From 1670d65082c2e07afa8eef0468bc1bec9c886e47 Mon Sep 17 00:00:00 2001 From: Thomas Hallgren Date: Sat, 11 May 2019 08:59:06 +0200 Subject: [PATCH] Reverse dependency between lyra-operator and lyra This commit adds a lyra build that includes the controller command to this project so that lyra is used as a library. This means that there will be two lyra binaries. One that has the controller command built in, and one that doesn't. We might want to consider other options. --- .gitignore | 7 +- Gopkg.lock | 452 +- Gopkg.toml | 13 + Makefile | 98 + build/Dockerfile | 7 - cmd/lyra/main.go | 17 + cmd/manager/controller/controller.go | 102 + cmd/manager/controller/start.go | 3 +- cmd/manager/main.go | 10 + go.mod | 51 - go.sum | 166 - .../workflow/workflow_controller.go | 15 +- vendor/github.com/bmatcuk/doublestar/LICENSE | 22 + .../bmatcuk/doublestar/doublestar.go | 455 + .../bmatcuk/doublestar/test/b/symlink-dir | 1 + .../bmatcuk/doublestar/test/broken-symlink | 1 + .../bmatcuk/doublestar/test/working-symlink | 1 + vendor/github.com/hashicorp/go-hclog/LICENSE | 21 + .../github.com/hashicorp/go-hclog/global.go | 34 + .../hashicorp/go-hclog/hclogvet/buildtag.go | 91 + .../hashicorp/go-hclog/hclogvet/dead.go | 108 + .../hashicorp/go-hclog/hclogvet/hclog.go | 55 + .../hashicorp/go-hclog/hclogvet/main.go | 617 + .../hashicorp/go-hclog/hclogvet/shadow.go | 238 + .../go-hclog/hclogvet/testdata/log.go | 10 + .../hashicorp/go-hclog/hclogvet/types.go | 163 + .../hashicorp/go-hclog/intlogger.go | 527 + .../github.com/hashicorp/go-hclog/logger.go | 176 + .../hashicorp/go-hclog/nulllogger.go | 52 + .../hashicorp/go-hclog/stacktrace.go | 109 + .../github.com/hashicorp/go-hclog/stdlog.go | 83 + .../github.com/hashicorp/go-hclog/writer.go | 74 + vendor/github.com/hashicorp/go-plugin/LICENSE | 353 + .../github.com/hashicorp/go-plugin/client.go | 1015 + .../hashicorp/go-plugin/discover.go | 28 + .../github.com/hashicorp/go-plugin/error.go | 24 + .../basic/commons/greeter_interface.go | 62 + .../go-plugin/examples/basic/main.go | 62 + .../examples/basic/plugin/greeter_impl.go | 52 + .../go-plugin/examples/bidirectional/main.go | 81 + .../bidirectional/plugin-go-grpc/main.go | 61 + .../examples/bidirectional/proto/kv.pb.go | 350 + .../examples/bidirectional/shared/grpc.go | 103 + .../bidirectional/shared/interface.go | 59 + .../hashicorp/go-plugin/examples/grpc/main.go | 68 + .../examples/grpc/plugin-go-grpc/main.go | 34 + .../examples/grpc/plugin-go-netrpc/main.go | 31 + .../go-plugin/examples/grpc/proto/kv.pb.go | 229 + .../go-plugin/examples/grpc/shared/grpc.go | 47 + .../examples/grpc/shared/interface.go | 65 + .../go-plugin/examples/grpc/shared/rpc.go | 42 + .../go-plugin/examples/negotiated/main.go | 84 + .../examples/negotiated/plugin-go/main.go | 62 + .../hashicorp/go-plugin/grpc_broker.go | 457 + .../hashicorp/go-plugin/grpc_client.go | 111 + .../hashicorp/go-plugin/grpc_controller.go | 23 + .../hashicorp/go-plugin/grpc_server.go | 142 + .../go-plugin/internal/plugin/gen.go | 3 + .../internal/plugin/grpc_broker.pb.go | 203 + .../internal/plugin/grpc_controller.pb.go | 143 + .../hashicorp/go-plugin/log_entry.go | 73 + vendor/github.com/hashicorp/go-plugin/mtls.go | 73 + .../hashicorp/go-plugin/mux_broker.go | 204 + .../github.com/hashicorp/go-plugin/plugin.go | 58 + .../github.com/hashicorp/go-plugin/process.go | 24 + .../hashicorp/go-plugin/process_posix.go | 19 + .../hashicorp/go-plugin/process_windows.go | 29 + .../hashicorp/go-plugin/protocol.go | 45 + .../hashicorp/go-plugin/rpc_client.go | 170 + .../hashicorp/go-plugin/rpc_server.go | 197 + .../github.com/hashicorp/go-plugin/server.go | 432 + .../hashicorp/go-plugin/server_mux.go | 31 + .../github.com/hashicorp/go-plugin/stream.go | 18 + .../hashicorp/go-plugin/test/grpc/gen.go | 3 + .../hashicorp/go-plugin/test/grpc/test.pb.go | 733 + .../github.com/hashicorp/go-plugin/testing.go | 180 + vendor/github.com/hashicorp/yamux/LICENSE | 362 + vendor/github.com/hashicorp/yamux/addr.go | 60 + vendor/github.com/hashicorp/yamux/const.go | 157 + vendor/github.com/hashicorp/yamux/mux.go | 98 + vendor/github.com/hashicorp/yamux/session.go | 653 + vendor/github.com/hashicorp/yamux/stream.go | 470 + vendor/github.com/hashicorp/yamux/util.go | 43 + .../inconshreveable/mousetrap/LICENSE | 13 + .../inconshreveable/mousetrap/trap_others.go | 15 + .../inconshreveable/mousetrap/trap_windows.go | 98 + .../mousetrap/trap_windows_1.4.go | 46 + .../github.com/leonelquinteros/gotext/LICENSE | 21 + .../leonelquinteros/gotext/gotext.go | 221 + .../leonelquinteros/gotext/helper.go | 86 + .../leonelquinteros/gotext/locale.go | 304 + .../github.com/leonelquinteros/gotext/mo.go | 472 + .../gotext/plurals/compiler.go | 429 + .../gotext/plurals/expression.go | 43 + .../leonelquinteros/gotext/plurals/math.go | 18 + .../leonelquinteros/gotext/plurals/tests.go | 104 + .../github.com/leonelquinteros/gotext/po.go | 499 + .../leonelquinteros/gotext/translation.go | 52 + .../leonelquinteros/gotext/translator.go | 59 + .../github.com/lyraproj/data-protobuf/LICENSE | 201 + .../lyraproj/data-protobuf/datapb/data.pb.go | 471 + .../lyraproj/data-protobuf/datapb/reflect.go | 156 + vendor/github.com/lyraproj/hiera/LICENSE | 201 + .../github.com/lyraproj/hiera/hiera/hiera.go | 46 + .../github.com/lyraproj/hiera/hieraapi/api.go | 124 + .../lyraproj/hiera/hieraapi/dataprovider.go | 8 + .../lyraproj/hiera/hieraapi/issues.go | 88 + .../github.com/lyraproj/hiera/hieraapi/key.go | 31 + .../lyraproj/hiera/hieraapi/location.go | 19 + .../lyraproj/hiera/hieraapi/mergestrategy.go | 17 + .../hiera/hieraapi/providercontext.go | 43 + .../lyraproj/hiera/internal/config.go | 369 + .../hiera/internal/datadigprovider.go | 94 + .../hiera/internal/datahashprovider.go | 145 + .../lyraproj/hiera/internal/deepmerge.go | 65 + .../lyraproj/hiera/internal/init.go | 111 + .../lyraproj/hiera/internal/interpolate.go | 154 + .../lyraproj/hiera/internal/invocation.go | 299 + .../github.com/lyraproj/hiera/internal/key.go | 119 + .../lyraproj/hiera/internal/location.go | 180 + .../lyraproj/hiera/internal/lookup.go | 136 + .../hiera/internal/lookupkeyprovider.go | 93 + .../lyraproj/hiera/internal/mergestrategy.go | 207 + .../lyraproj/hiera/internal/providerctx.go | 162 + .../hiera/provider/configlookupkey.go | 94 + .../lyraproj/hiera/provider/environment.go | 34 + .../lyraproj/hiera/provider/jsondata.go | 32 + .../lyraproj/hiera/provider/muxlookupkey.go | 33 + .../lyraproj/hiera/provider/yamldata.go | 26 + .../lyraproj/hiera/provider/yamllookupkey.go | 25 + vendor/github.com/lyraproj/issue/LICENSE | 201 + .../lyraproj/issue/issue/conversions.go | 227 + .../github.com/lyraproj/issue/issue/issue.go | 146 + .../github.com/lyraproj/issue/issue/label.go | 32 + .../lyraproj/issue/issue/location.go | 77 + .../github.com/lyraproj/issue/issue/printf.go | 119 + .../lyraproj/issue/issue/reported.go | 171 + .../github.com/lyraproj/issue/issue/result.go | 35 + .../lyraproj/issue/issue/severity.go | 42 + vendor/github.com/lyraproj/lyra/LICENSE | 201 + .../cmd/goplugin-example/example/start.go | 41 + .../lyra/cmd/goplugin-example/main.go | 7 + .../cmd/goplugin-example/resource/resource.go | 62 + .../goplugin-foobernetes/foobernetes/start.go | 33 + .../lyra/cmd/goplugin-foobernetes/main.go | 33 + .../goplugin-foobernetes/resource/instance.go | 93 + .../resource/loadbalancer.go | 86 + .../goplugin-foobernetes/resource/state.go | 53 + .../resource/webserver.go | 83 + .../lyraproj/lyra/cmd/lyra/cmd/apply.go | 44 + .../lyraproj/lyra/cmd/lyra/cmd/delete.go | 39 + .../lyraproj/lyra/cmd/lyra/cmd/embedded.go | 32 + .../lyraproj/lyra/cmd/lyra/cmd/generate.go | 40 + .../lyraproj/lyra/cmd/lyra/cmd/root.go | 77 + .../lyraproj/lyra/cmd/lyra/cmd/validate.go | 81 + .../lyraproj/lyra/cmd/lyra/cmd/version.go | 34 + .../github.com/lyraproj/lyra/cmd/lyra/main.go | 15 + .../lyraproj/lyra/cmd/lyra/ui/ui.go | 142 + .../lyra/examples/go-samples/aws/aws.go | 121 + .../go-samples/declarative/declarative.go | 69 + .../go-samples/imperative/imperative.go | 62 + .../examples/go-samples/referee/referee.go | 31 + .../go-samples/referenced/referenced.go | 16 + .../lyra/examples/go-samples/types/aws/aws.go | 8865 +++++++++ .../go-samples/types/azurerm/azurerm.go | 3978 ++++ .../go-samples/types/example/example.go | 50 + .../types/foobernetes/foobernetes.go | 60 + .../go-samples/types/github/github.go | 223 + .../go-samples/types/google/google.go | 2940 +++ .../go-samples/types/identity/identity.go | 24 + .../go-samples/types/kubernetes/kubernetes.go | 834 + .../go-samples/types/puppet/puppet.go | 28 + .../lyraproj/lyra/external/externalmodules.go | 20 + .../lyraproj/lyra/pkg/apply/apply.go | 201 + .../lyraproj/lyra/pkg/change/node.go | 184 + .../lyraproj/lyra/pkg/change/registry.go | 49 + .../lyraproj/lyra/pkg/generate/generate.go | 42 + .../github.com/lyraproj/lyra/pkg/loader/fs.go | 71 + .../lyra/pkg/loader/integrity/sha256sum.go | 27 + .../lyraproj/lyra/pkg/loader/loader.go | 81 + .../lyraproj/lyra/pkg/logger/logger.go | 49 + .../lyraproj/lyra/pkg/util/command.go | 54 + .../lyraproj/lyra/pkg/version/logo.go | 13 + .../lyraproj/lyra/pkg/version/version.go | 39 + vendor/github.com/lyraproj/pcore/LICENSE | 201 + .../lyraproj/pcore/hash/stringhash.go | 235 + .../lyraproj/pcore/loader/dependency.go | 63 + .../lyraproj/pcore/loader/filebased.go | 354 + .../lyraproj/pcore/loader/instantiate.go | 27 + .../lyraproj/pcore/loader/loader.go | 243 + .../lyraproj/pcore/loader/smartpath.go | 146 + .../lyraproj/pcore/pcore/entrypoint.go | 104 + .../lyraproj/pcore/proto/convert.go | 185 + .../lyraproj/pcore/px/collection.go | 90 + .../github.com/lyraproj/pcore/px/collector.go | 16 + .../github.com/lyraproj/pcore/px/context.go | 206 + .../github.com/lyraproj/pcore/px/equality.go | 149 + vendor/github.com/lyraproj/pcore/px/format.go | 72 + .../github.com/lyraproj/pcore/px/function.go | 126 + .../lyraproj/pcore/px/functional.go | 29 + vendor/github.com/lyraproj/pcore/px/issues.go | 306 + vendor/github.com/lyraproj/pcore/px/loader.go | 90 + .../github.com/lyraproj/pcore/px/logging.go | 166 + .../github.com/lyraproj/pcore/px/reflector.go | 125 + .../github.com/lyraproj/pcore/px/runtime.go | 20 + .../github.com/lyraproj/pcore/px/typedname.go | 84 + vendor/github.com/lyraproj/pcore/px/types.go | 338 + .../lyraproj/pcore/px/valueconsumer.go | 41 + vendor/github.com/lyraproj/pcore/px/values.go | 258 + vendor/github.com/lyraproj/pcore/px/wrap.go | 31 + .../lyraproj/pcore/pximpl/context.go | 304 + .../lyraproj/pcore/pximpl/equality.go | 123 + .../lyraproj/pcore/pximpl/function.go | 402 + .../pcore/pximpl/implementationregistry.go | 92 + .../lyraproj/pcore/pximpl/parameter.go | 144 + .../lyraproj/pcore/pximpl/runtime.go | 286 + .../lyraproj/pcore/pximpl/setting.go | 35 + .../pcore/pximpl/typemismatchdescriber.go | 1204 ++ .../pcore/serialization/deserializer.go | 194 + .../lyraproj/pcore/serialization/extension.go | 27 + .../pcore/serialization/jsonstreamer.go | 123 + .../pcore/serialization/jsontodata.go | 113 + .../pcore/serialization/serializer.go | 382 + .../lyraproj/pcore/threadlocal/gid.go | 98 + .../lyraproj/pcore/types/annotatable.go | 56 + .../lyraproj/pcore/types/annotatedmember.go | 113 + .../lyraproj/pcore/types/anytype.go | 73 + .../lyraproj/pcore/types/arraytype.go | 763 + .../lyraproj/pcore/types/attribute.go | 177 + .../lyraproj/pcore/types/attributesinfo.go | 62 + .../lyraproj/pcore/types/basiccollector.go | 91 + .../lyraproj/pcore/types/binarytype.go | 297 + .../lyraproj/pcore/types/booleantype.go | 314 + .../lyraproj/pcore/types/callabletype.go | 325 + .../github.com/lyraproj/pcore/types/coerce.go | 174 + .../lyraproj/pcore/types/collectiontype.go | 173 + .../lyraproj/pcore/types/commonality.go | 175 + .../lyraproj/pcore/types/constants.go | 17 + .../lyraproj/pcore/types/conversions.go | 24 + .../lyraproj/pcore/types/defaulttype.go | 107 + .../lyraproj/pcore/types/deferred.go | 158 + .../lyraproj/pcore/types/deferredtype.go | 98 + .../lyraproj/pcore/types/enumtype.go | 229 + .../lyraproj/pcore/types/floattype.go | 392 + .../github.com/lyraproj/pcore/types/format.go | 836 + .../lyraproj/pcore/types/function.go | 140 + .../lyraproj/pcore/types/hashtype.go | 1389 ++ .../lyraproj/pcore/types/inittype.go | 254 + .../lyraproj/pcore/types/integertype.go | 535 + .../lyraproj/pcore/types/iterabletype.go | 153 + .../lyraproj/pcore/types/iteratortype.go | 143 + .../github.com/lyraproj/pcore/types/lexer.go | 463 + .../lyraproj/pcore/types/liketype.go | 185 + .../lyraproj/pcore/types/notundeftype.go | 150 + .../lyraproj/pcore/types/numerictype.go | 173 + .../lyraproj/pcore/types/objecttype.go | 1407 ++ .../pcore/types/objecttypeextension.go | 258 + .../lyraproj/pcore/types/objectvalue.go | 484 + .../lyraproj/pcore/types/optionaltype.go | 150 + .../github.com/lyraproj/pcore/types/parser.go | 386 + .../lyraproj/pcore/types/patterntype.go | 181 + .../lyraproj/pcore/types/reflector.go | 464 + .../lyraproj/pcore/types/regexptype.go | 265 + .../lyraproj/pcore/types/resolver.go | 44 + .../lyraproj/pcore/types/runtimetype.go | 300 + .../lyraproj/pcore/types/scalardatatype.go | 81 + .../lyraproj/pcore/types/scalartype.go | 81 + .../lyraproj/pcore/types/semverrangetype.go | 225 + .../lyraproj/pcore/types/semvertype.go | 305 + .../lyraproj/pcore/types/sensitivetype.go | 172 + .../lyraproj/pcore/types/stringtype.go | 607 + .../lyraproj/pcore/types/structtype.go | 397 + .../lyraproj/pcore/types/taggedtype.go | 204 + .../lyraproj/pcore/types/timespantype.go | 1118 ++ .../lyraproj/pcore/types/timestamptype.go | 774 + .../lyraproj/pcore/types/tupletype.go | 380 + .../lyraproj/pcore/types/typealiastype.go | 189 + .../lyraproj/pcore/types/typedname.go | 275 + .../lyraproj/pcore/types/typeparameter.go | 39 + .../lyraproj/pcore/types/typereferencetype.go | 132 + .../github.com/lyraproj/pcore/types/types.go | 1031 ++ .../lyraproj/pcore/types/typeset.go | 695 + .../lyraproj/pcore/types/typetype.go | 154 + .../lyraproj/pcore/types/undeftype.go | 120 + .../lyraproj/pcore/types/unittype.go | 78 + .../lyraproj/pcore/types/uritype.go | 477 + .../lyraproj/pcore/types/varianttype.go | 179 + .../github.com/lyraproj/pcore/types/zinit.go | 121 + vendor/github.com/lyraproj/pcore/utils/pow.go | 25 + .../github.com/lyraproj/pcore/utils/reader.go | 72 + .../lyraproj/pcore/utils/strings.go | 271 + .../lyraproj/pcore/yaml/unmarshal.go | 45 + .../lyraproj/semver/semver/version.go | 324 + .../lyraproj/semver/semver/versionrange.go | 1073 ++ vendor/github.com/lyraproj/servicesdk/LICENSE | 201 + .../lyraproj/servicesdk/annotation/issues.go | 27 + .../servicesdk/annotation/relationship.go | 125 + .../servicesdk/annotation/resource.go | 269 + .../lyraproj/servicesdk/grpc/client.go | 137 + .../lyraproj/servicesdk/grpc/issues.go | 11 + .../lyraproj/servicesdk/grpc/server.go | 146 + .../servicesdk/lang/go/lyra/action.go | 170 + .../servicesdk/lang/go/lyra/collect.go | 134 + .../servicesdk/lang/go/lyra/issues.go | 27 + .../servicesdk/lang/go/lyra/reference.go | 42 + .../servicesdk/lang/go/lyra/resource.go | 124 + .../servicesdk/lang/go/lyra/stateconverter.go | 57 + .../lyraproj/servicesdk/lang/go/lyra/util.go | 135 + .../servicesdk/lang/go/lyra/workflow.go | 38 + .../lyraproj/servicesdk/lang/issues.go | 11 + .../servicesdk/lang/typegen/generator.go | 86 + .../servicesdk/lang/typegen/golang.go | 655 + .../servicesdk/lang/typegen/puppet.go | 23 + .../servicesdk/lang/typegen/typescript.go | 532 + .../lyraproj/servicesdk/service/builder.go | 453 + .../lyraproj/servicesdk/service/definition.go | 103 + .../lyraproj/servicesdk/service/error.go | 191 + .../lyraproj/servicesdk/service/issues.go | 33 + .../lyraproj/servicesdk/service/loader.go | 36 + .../lyraproj/servicesdk/service/notfound.go | 13 + .../servicesdk/service/resourcetypebuilder.go | 95 + .../lyraproj/servicesdk/service/server.go | 111 + .../lyraproj/servicesdk/service/subservice.go | 55 + .../servicesdk/serviceapi/definition.go | 25 + .../lyraproj/servicesdk/serviceapi/error.go | 35 + .../servicesdk/serviceapi/identity.go | 19 + .../servicesdk/serviceapi/invokable.go | 9 + .../servicesdk/serviceapi/metadata.go | 9 + .../lyraproj/servicesdk/serviceapi/server.go | 11 + .../servicesdk/serviceapi/stateresolver.go | 9 + .../servicesdk/servicepb/service.pb.go | 331 + .../lyraproj/servicesdk/wf/action.go | 28 + .../lyraproj/servicesdk/wf/builder.go | 410 + .../lyraproj/servicesdk/wf/condition.go | 206 + .../github.com/lyraproj/servicesdk/wf/crd.go | 34 + .../lyraproj/servicesdk/wf/issues.go | 25 + .../lyraproj/servicesdk/wf/iterator.go | 100 + .../lyraproj/servicesdk/wf/operation.go | 37 + .../lyraproj/servicesdk/wf/parser.go | 96 + .../lyraproj/servicesdk/wf/reference.go | 29 + .../lyraproj/servicesdk/wf/resource.go | 42 + .../lyraproj/servicesdk/wf/statehandler.go | 28 + .../github.com/lyraproj/servicesdk/wf/step.go | 51 + .../lyraproj/servicesdk/wf/workflow.go | 28 + vendor/github.com/lyraproj/wfe/LICENSE | 201 + vendor/github.com/lyraproj/wfe/api/issues.go | 30 + .../github.com/lyraproj/wfe/api/iterator.go | 24 + vendor/github.com/lyraproj/wfe/api/loader.go | 10 + .../github.com/lyraproj/wfe/api/resource.go | 13 + vendor/github.com/lyraproj/wfe/api/step.go | 49 + .../github.com/lyraproj/wfe/api/workflow.go | 7 + .../github.com/lyraproj/wfe/service/crud.go | 214 + .../lyraproj/wfe/service/identity.go | 91 + .../github.com/lyraproj/wfe/service/util.go | 83 + vendor/github.com/lyraproj/wfe/wfe/action.go | 60 + vendor/github.com/lyraproj/wfe/wfe/issues.go | 32 + .../github.com/lyraproj/wfe/wfe/iterator.go | 382 + vendor/github.com/lyraproj/wfe/wfe/loader.go | 219 + .../github.com/lyraproj/wfe/wfe/reference.go | 126 + .../github.com/lyraproj/wfe/wfe/resource.go | 82 + .../lyraproj/wfe/wfe/statehandler.go | 67 + vendor/github.com/lyraproj/wfe/wfe/step.go | 135 + .../github.com/lyraproj/wfe/wfe/workflow.go | 55 + .../lyraproj/wfe/wfe/workflowengine.go | 498 + vendor/github.com/mattn/go-colorable/LICENSE | 21 + .../go-colorable/_example/escape-seq/main.go | 16 + .../go-colorable/_example/logrus/main.go | 16 + .../mattn/go-colorable/_example/title/main.go | 14 + .../go-colorable/cmd/colorable/colorable.go | 12 + .../mattn/go-colorable/colorable_appengine.go | 29 + .../mattn/go-colorable/colorable_others.go | 30 + .../mattn/go-colorable/colorable_windows.go | 980 + .../mattn/go-colorable/noncolorable.go | 55 + vendor/github.com/mattn/go-isatty/LICENSE | 9 + vendor/github.com/mattn/go-isatty/doc.go | 2 + .../mattn/go-isatty/isatty_android.go | 23 + .../github.com/mattn/go-isatty/isatty_bsd.go | 24 + .../mattn/go-isatty/isatty_linux.go | 19 + .../mattn/go-isatty/isatty_others.go | 15 + .../mattn/go-isatty/isatty_solaris.go | 22 + .../mattn/go-isatty/isatty_windows.go | 94 + vendor/github.com/mgutz/ansi/LICENSE | 9 + vendor/github.com/mgutz/ansi/ansi.go | 285 + .../mgutz/ansi/cmd/ansi-mgutz/main.go | 135 + vendor/github.com/mgutz/ansi/doc.go | 65 + vendor/github.com/mgutz/ansi/print.go | 57 + .../mitchellh/go-testing-interface/LICENSE | 21 + .../mitchellh/go-testing-interface/testing.go | 84 + .../go-testing-interface/testing_go19.go | 108 + vendor/github.com/oklog/run/LICENSE | 201 + vendor/github.com/oklog/run/group.go | 62 + vendor/github.com/spf13/cobra/LICENSE.txt | 174 + vendor/github.com/spf13/cobra/args.go | 89 + .../spf13/cobra/bash_completions.go | 584 + vendor/github.com/spf13/cobra/cobra.go | 200 + .../github.com/spf13/cobra/cobra/cmd/add.go | 179 + .../spf13/cobra/cobra/cmd/helpers.go | 168 + .../github.com/spf13/cobra/cobra/cmd/init.go | 234 + .../spf13/cobra/cobra/cmd/license_agpl.go | 683 + .../spf13/cobra/cobra/cmd/license_apache_2.go | 238 + .../cobra/cobra/cmd/license_bsd_clause_2.go | 71 + .../cobra/cobra/cmd/license_bsd_clause_3.go | 78 + .../spf13/cobra/cobra/cmd/license_gpl_2.go | 376 + .../spf13/cobra/cobra/cmd/license_gpl_3.go | 711 + .../spf13/cobra/cobra/cmd/license_lgpl.go | 186 + .../spf13/cobra/cobra/cmd/license_mit.go | 63 + .../spf13/cobra/cobra/cmd/licenses.go | 118 + .../spf13/cobra/cobra/cmd/project.go | 200 + .../github.com/spf13/cobra/cobra/cmd/root.go | 79 + .../cobra/cobra/cmd/testdata/LICENSE.golden | 202 + vendor/github.com/spf13/cobra/cobra/main.go | 20 + vendor/github.com/spf13/cobra/command.go | 1517 ++ .../github.com/spf13/cobra/command_notwin.go | 5 + vendor/github.com/spf13/cobra/command_win.go | 20 + vendor/github.com/spf13/cobra/doc/man_docs.go | 236 + vendor/github.com/spf13/cobra/doc/md_docs.go | 159 + .../github.com/spf13/cobra/doc/rest_docs.go | 185 + vendor/github.com/spf13/cobra/doc/util.go | 51 + .../github.com/spf13/cobra/doc/yaml_docs.go | 169 + .../github.com/spf13/cobra/zsh_completions.go | 126 + .../v1/gonum/.travis/deps.d/linux/01-deps.sh | 1 + .../v1/gonum/.travis/deps.d/osx/nothing.sh | 1 + .../gonum/.travis/deps.d/windows/nothing.sh | 1 + .../.travis/run.d/linux/01-check-copyright.sh | 1 + .../.travis/run.d/linux/02-check-imports.sh | 1 + .../run.d/linux/03-check-formatting.sh | 1 + .../v1/gonum/.travis/run.d/linux/04-test.sh | 1 + .../.travis/run.d/linux/05-test-coverage.sh | 1 + .../.travis/run.d/linux/06-check-generate.sh | 1 + .../v1/gonum/.travis/run.d/osx/01-test.sh | 1 + vendor/gonum.org/v1/gonum/AUTHORS | 92 + vendor/gonum.org/v1/gonum/CONTRIBUTORS | 94 + vendor/gonum.org/v1/gonum/LICENSE | 23 + vendor/gonum.org/v1/gonum/blas/blas.go | 283 + .../gonum.org/v1/gonum/blas/blas32/blas32.go | 458 + vendor/gonum.org/v1/gonum/blas/blas32/conv.go | 279 + .../v1/gonum/blas/blas32/conv_symmetric.go | 155 + vendor/gonum.org/v1/gonum/blas/blas32/doc.go | 6 + .../gonum.org/v1/gonum/blas/blas64/blas64.go | 469 + vendor/gonum.org/v1/gonum/blas/blas64/conv.go | 277 + .../v1/gonum/blas/blas64/conv_symmetric.go | 153 + vendor/gonum.org/v1/gonum/blas/blas64/doc.go | 6 + .../v1/gonum/blas/cblas128/cblas128.go | 508 + .../gonum.org/v1/gonum/blas/cblas128/conv.go | 279 + .../v1/gonum/blas/cblas128/conv_hermitian.go | 155 + .../v1/gonum/blas/cblas128/conv_symmetric.go | 155 + .../gonum.org/v1/gonum/blas/cblas128/doc.go | 6 + .../v1/gonum/blas/cblas64/cblas64.go | 508 + .../gonum.org/v1/gonum/blas/cblas64/conv.go | 279 + .../v1/gonum/blas/cblas64/conv_hermitian.go | 155 + vendor/gonum.org/v1/gonum/blas/cblas64/doc.go | 6 + vendor/gonum.org/v1/gonum/blas/doc.go | 108 + vendor/gonum.org/v1/gonum/blas/gonum/dgemm.go | 314 + vendor/gonum.org/v1/gonum/blas/gonum/doc.go | 88 + .../gonum.org/v1/gonum/blas/gonum/errors.go | 35 + vendor/gonum.org/v1/gonum/blas/gonum/gemv.go | 190 + vendor/gonum.org/v1/gonum/blas/gonum/gonum.go | 58 + .../v1/gonum/blas/gonum/level1cmplx128.go | 445 + .../v1/gonum/blas/gonum/level1cmplx64.go | 467 + .../v1/gonum/blas/gonum/level1float32.go | 644 + .../gonum/blas/gonum/level1float32_dsdot.go | 53 + .../v1/gonum/blas/gonum/level1float32_sdot.go | 53 + .../gonum/blas/gonum/level1float32_sdsdot.go | 53 + .../v1/gonum/blas/gonum/level1float64.go | 620 + .../v1/gonum/blas/gonum/level1float64_ddot.go | 49 + .../v1/gonum/blas/gonum/level2cmplx128.go | 2906 +++ .../v1/gonum/blas/gonum/level2cmplx64.go | 2942 +++ .../v1/gonum/blas/gonum/level2float32.go | 2296 +++ .../v1/gonum/blas/gonum/level2float64.go | 2264 +++ .../v1/gonum/blas/gonum/level3cmplx128.go | 1715 ++ .../v1/gonum/blas/gonum/level3cmplx64.go | 1735 ++ .../v1/gonum/blas/gonum/level3float32.go | 876 + .../v1/gonum/blas/gonum/level3float64.go | 864 + vendor/gonum.org/v1/gonum/blas/gonum/sgemm.go | 318 + .../autogen_bench_level1double.go | 288 + .../v1/gonum/blas/testblas/benchsize.go | 12 + .../v1/gonum/blas/testblas/common.go | 728 + .../gonum.org/v1/gonum/blas/testblas/dgbmv.go | 153 + .../gonum.org/v1/gonum/blas/testblas/dgemm.go | 248 + .../v1/gonum/blas/testblas/dgemmbench.go | 44 + .../gonum.org/v1/gonum/blas/testblas/dgemv.go | 728 + .../gonum.org/v1/gonum/blas/testblas/dger.go | 235 + .../gonum.org/v1/gonum/blas/testblas/doc.go | 6 + .../gonum.org/v1/gonum/blas/testblas/dsbmv.go | 87 + .../gonum.org/v1/gonum/blas/testblas/dspmv.go | 77 + .../gonum.org/v1/gonum/blas/testblas/dspr.go | 75 + .../gonum.org/v1/gonum/blas/testblas/dspr2.go | 80 + .../gonum.org/v1/gonum/blas/testblas/dsymm.go | 281 + .../gonum.org/v1/gonum/blas/testblas/dsymv.go | 77 + .../gonum.org/v1/gonum/blas/testblas/dsyr.go | 76 + .../gonum.org/v1/gonum/blas/testblas/dsyr2.go | 80 + .../v1/gonum/blas/testblas/dsyr2k.go | 205 + .../gonum.org/v1/gonum/blas/testblas/dsyrk.go | 219 + .../gonum.org/v1/gonum/blas/testblas/dtbmv.go | 127 + .../gonum.org/v1/gonum/blas/testblas/dtbsv.go | 259 + .../gonum.org/v1/gonum/blas/testblas/dtpmv.go | 133 + .../gonum.org/v1/gonum/blas/testblas/dtpsv.go | 148 + .../gonum.org/v1/gonum/blas/testblas/dtrmm.go | 810 + .../gonum.org/v1/gonum/blas/testblas/dtrmv.go | 151 + .../v1/gonum/blas/testblas/dtrmvbench.go | 31 + .../gonum.org/v1/gonum/blas/testblas/dtrsm.go | 870 + .../gonum.org/v1/gonum/blas/testblas/dtrsv.go | 148 + .../gonum.org/v1/gonum/blas/testblas/dtxmv.go | 149 + .../v1/gonum/blas/testblas/dzasum.go | 58 + .../v1/gonum/blas/testblas/dznrm2.go | 137 + .../v1/gonum/blas/testblas/izamax.go | 47 + .../v1/gonum/blas/testblas/level1double.go | 2194 +++ .../v1/gonum/blas/testblas/level2bench.go | 91 + .../gonum.org/v1/gonum/blas/testblas/zaxpy.go | 157 + .../gonum.org/v1/gonum/blas/testblas/zcopy.go | 74 + .../gonum.org/v1/gonum/blas/testblas/zdotc.go | 139 + .../gonum.org/v1/gonum/blas/testblas/zdotu.go | 139 + .../v1/gonum/blas/testblas/zdscal.go | 119 + .../gonum.org/v1/gonum/blas/testblas/zgbmv.go | 147 + .../gonum.org/v1/gonum/blas/testblas/zgemm.go | 104 + .../gonum.org/v1/gonum/blas/testblas/zgemv.go | 344 + .../gonum.org/v1/gonum/blas/testblas/zgerc.go | 183 + .../gonum.org/v1/gonum/blas/testblas/zgeru.go | 257 + .../gonum.org/v1/gonum/blas/testblas/zhbmv.go | 138 + .../gonum.org/v1/gonum/blas/testblas/zhemm.go | 130 + .../gonum.org/v1/gonum/blas/testblas/zhemv.go | 276 + .../gonum.org/v1/gonum/blas/testblas/zher.go | 153 + .../gonum.org/v1/gonum/blas/testblas/zher2.go | 313 + .../v1/gonum/blas/testblas/zher2k.go | 177 + .../gonum.org/v1/gonum/blas/testblas/zherk.go | 158 + .../gonum.org/v1/gonum/blas/testblas/zhpmv.go | 61 + .../gonum.org/v1/gonum/blas/testblas/zhpr.go | 46 + .../gonum.org/v1/gonum/blas/testblas/zhpr2.go | 46 + .../gonum.org/v1/gonum/blas/testblas/zscal.go | 119 + .../gonum.org/v1/gonum/blas/testblas/zswap.go | 74 + .../gonum.org/v1/gonum/blas/testblas/zsymm.go | 127 + .../v1/gonum/blas/testblas/zsyr2k.go | 155 + .../gonum.org/v1/gonum/blas/testblas/zsyrk.go | 135 + .../gonum.org/v1/gonum/blas/testblas/ztbmv.go | 114 + .../gonum.org/v1/gonum/blas/testblas/ztbsv.go | 115 + .../gonum.org/v1/gonum/blas/testblas/ztpmv.go | 75 + .../gonum.org/v1/gonum/blas/testblas/ztpsv.go | 105 + .../gonum.org/v1/gonum/blas/testblas/ztrmm.go | 128 + .../gonum.org/v1/gonum/blas/testblas/ztrmv.go | 285 + .../gonum.org/v1/gonum/blas/testblas/ztrsm.go | 144 + .../gonum.org/v1/gonum/blas/testblas/ztrsv.go | 102 + vendor/gonum.org/v1/gonum/bound/bound.go | 75 + vendor/gonum.org/v1/gonum/bound/doc.go | 6 + .../v1/gonum/diff/fd/crosslaplacian.go | 186 + .../gonum.org/v1/gonum/diff/fd/derivative.go | 71 + vendor/gonum.org/v1/gonum/diff/fd/diff.go | 148 + vendor/gonum.org/v1/gonum/diff/fd/doc.go | 6 + vendor/gonum.org/v1/gonum/diff/fd/gradient.go | 145 + vendor/gonum.org/v1/gonum/diff/fd/hessian.go | 186 + vendor/gonum.org/v1/gonum/diff/fd/jacobian.go | 202 + .../gonum.org/v1/gonum/diff/fd/laplacian.go | 158 + vendor/gonum.org/v1/gonum/doc.go | 12 + vendor/gonum.org/v1/gonum/floats/doc.go | 11 + vendor/gonum.org/v1/gonum/floats/floats.go | 933 + vendor/gonum.org/v1/gonum/fourier/doc.go | 6 + vendor/gonum.org/v1/gonum/fourier/fourier.go | 260 + .../internal/fftpack/array_bounds_checks.go | 88 + .../fftpack/array_no_bounds_checks.go | 64 + .../v1/gonum/fourier/internal/fftpack/cfft.go | 652 + .../v1/gonum/fourier/internal/fftpack/cosq.go | 220 + .../v1/gonum/fourier/internal/fftpack/cost.go | 143 + .../v1/gonum/fourier/internal/fftpack/doc.go | 7 + .../v1/gonum/fourier/internal/fftpack/rfft.go | 1151 ++ .../v1/gonum/fourier/internal/fftpack/sinq.go | 179 + .../v1/gonum/fourier/internal/fftpack/sint.go | 146 + vendor/gonum.org/v1/gonum/fourier/quarter.go | 133 + vendor/gonum.org/v1/gonum/fourier/sincos.go | 112 + .../v1/gonum/graph/community/bisect.go | 249 + .../gonum.org/v1/gonum/graph/community/doc.go | 6 + .../v1/gonum/graph/community/k_communities.go | 98 + .../gonum/graph/community/louvain_common.go | 418 + .../gonum/graph/community/louvain_directed.go | 677 + .../community/louvain_directed_multiplex.go | 917 + .../graph/community/louvain_undirected.go | 607 + .../community/louvain_undirected_multiplex.go | 845 + vendor/gonum.org/v1/gonum/graph/doc.go | 9 + .../gonum/graph/encoding/digraph6/digraph6.go | 338 + .../gonum.org/v1/gonum/graph/encoding/doc.go | 6 + .../v1/gonum/graph/encoding/dot/decode.go | 527 + .../v1/gonum/graph/encoding/dot/doc.go | 21 + .../v1/gonum/graph/encoding/dot/encode.go | 657 + .../v1/gonum/graph/encoding/encoding.go | 36 + .../v1/gonum/graph/encoding/graph6/graph6.go | 283 + .../v1/gonum/graph/encoding/graphql/decode.go | 157 + .../v1/gonum/graph/encoding/graphql/doc.go | 7 + .../gonum/graph/encoding/graphql/graphql.go | 5 + .../v1/gonum/graph/ex/fdpclust/gn.go | 282 + .../v1/gonum/graph/ex/fdpclust/main.go | 79 + .../v1/gonum/graph/flow/control_flow_lt.go | 209 + .../v1/gonum/graph/flow/control_flow_slt.go | 232 + vendor/gonum.org/v1/gonum/graph/flow/doc.go | 6 + .../graph/formats/cytoscapejs/cytoscapejs.go | 310 + .../formats/cytoscapejs/testdata/LICENSE | 21 + .../v1/gonum/graph/formats/dot/ast/ast.go | 409 + .../v1/gonum/graph/formats/dot/ast/doc.go | 7 + .../v1/gonum/graph/formats/dot/doc.go | 6 + .../v1/gonum/graph/formats/dot/dot.go | 64 + .../v1/gonum/graph/formats/dot/fuzz/fuzz.go | 39 + .../graph/formats/dot/internal/astx/astx.go | 326 + .../graph/formats/dot/internal/astx/doc.go | 7 + .../graph/formats/dot/internal/errors/doc.go | 6 + .../formats/dot/internal/errors/errors.go | 66 + .../formats/dot/internal/lexer/acttab.go | 605 + .../graph/formats/dot/internal/lexer/doc.go | 6 + .../graph/formats/dot/internal/lexer/lexer.go | 310 + .../dot/internal/lexer/transitiontable.go | 2813 +++ .../formats/dot/internal/parser/action.go | 61 + .../dot/internal/parser/actiontable.go | 2199 +++ .../graph/formats/dot/internal/parser/doc.go | 6 + .../formats/dot/internal/parser/gototable.go | 2807 +++ .../formats/dot/internal/parser/parser.go | 226 + .../dot/internal/parser/productionstable.go | 586 + .../formats/dot/internal/paste_copyright.go | 57 + .../graph/formats/dot/internal/token/doc.go | 6 + .../graph/formats/dot/internal/token/token.go | 116 + .../graph/formats/dot/internal/util/doc.go | 6 + .../formats/dot/internal/util/litconv.go | 118 + .../graph/formats/dot/internal/util/rune.go | 49 + .../v1/gonum/graph/formats/dot/sem.go | 160 + .../v1/gonum/graph/formats/gexf12/gexf.go | 304 + .../v1/gonum/graph/formats/sigmajs/sigmajs.go | 128 + .../formats/sigmajs/testdata/LICENSE.txt | 12 + vendor/gonum.org/v1/gonum/graph/graph.go | 282 + .../graph/graphs/gen/batagelj_brandes.go | 396 + .../v1/gonum/graph/graphs/gen/doc.go | 6 + .../v1/gonum/graph/graphs/gen/duplication.go | 131 + .../v1/gonum/graph/graphs/gen/gen.go | 20 + .../v1/gonum/graph/graphs/gen/holme_kim.go | 170 + .../v1/gonum/graph/graphs/gen/small_world.go | 204 + .../v1/gonum/graph/internal/linear/doc.go | 6 + .../v1/gonum/graph/internal/linear/linear.go | 73 + .../v1/gonum/graph/internal/ordered/doc.go | 6 + .../v1/gonum/graph/internal/ordered/sort.go | 93 + .../v1/gonum/graph/internal/set/doc.go | 6 + .../v1/gonum/graph/internal/set/same.go | 36 + .../graph/internal/set/same_appengine.go | 36 + .../v1/gonum/graph/internal/set/set.go | 228 + .../v1/gonum/graph/internal/uid/uid.go | 54 + .../gonum.org/v1/gonum/graph/iterator/doc.go | 9 + .../v1/gonum/graph/iterator/edges.go | 131 + .../v1/gonum/graph/iterator/lines.go | 131 + .../v1/gonum/graph/iterator/nodes.go | 125 + .../v1/gonum/graph/multi/directed.go | 278 + vendor/gonum.org/v1/gonum/graph/multi/doc.go | 9 + .../gonum.org/v1/gonum/graph/multi/multi.go | 130 + .../v1/gonum/graph/multi/undirected.go | 260 + .../v1/gonum/graph/multi/weighted_directed.go | 352 + .../gonum/graph/multi/weighted_undirected.go | 360 + vendor/gonum.org/v1/gonum/graph/multigraph.go | 198 + .../v1/gonum/graph/network/betweenness.go | 256 + .../v1/gonum/graph/network/diffusion.go | 212 + .../v1/gonum/graph/network/distance.go | 132 + .../gonum.org/v1/gonum/graph/network/doc.go | 6 + .../gonum.org/v1/gonum/graph/network/hits.go | 101 + .../v1/gonum/graph/network/network.go | 13 + .../gonum.org/v1/gonum/graph/network/page.go | 418 + .../gonum.org/v1/gonum/graph/nodes_edges.go | 300 + .../gonum.org/v1/gonum/graph/path/a_star.go | 151 + .../v1/gonum/graph/path/bellman_ford_moore.go | 72 + .../gonum.org/v1/gonum/graph/path/dijkstra.go | 165 + .../gonum.org/v1/gonum/graph/path/disjoint.go | 87 + vendor/gonum.org/v1/gonum/graph/path/doc.go | 6 + .../v1/gonum/graph/path/dynamic/doc.go | 6 + .../v1/gonum/graph/path/dynamic/dstarlite.go | 502 + .../v1/gonum/graph/path/floydwarshall.go | 91 + .../graph/path/internal/testgraphs/doc.go | 7 + .../graph/path/internal/testgraphs/grid.go | 303 + .../graph/path/internal/testgraphs/limited.go | 329 + .../path/internal/testgraphs/shortest.go | 654 + .../v1/gonum/graph/path/johnson_apsp.go | 199 + .../gonum.org/v1/gonum/graph/path/shortest.go | 405 + .../v1/gonum/graph/path/spanning_tree.go | 189 + .../gonum.org/v1/gonum/graph/path/weight.go | 53 + .../gonum.org/v1/gonum/graph/path/yen_ksp.go | 151 + .../graph/simple/dense_directed_matrix.go | 301 + .../graph/simple/dense_undirected_matrix.go | 268 + .../v1/gonum/graph/simple/directed.go | 235 + vendor/gonum.org/v1/gonum/graph/simple/doc.go | 9 + .../gonum.org/v1/gonum/graph/simple/simple.go | 72 + .../v1/gonum/graph/simple/undirected.go | 216 + .../gonum/graph/simple/weighted_directed.go | 279 + .../gonum/graph/simple/weighted_undirected.go | 273 + .../v1/gonum/graph/testgraph/testcases.go | 347 + .../v1/gonum/graph/testgraph/testgraph.go | 2112 +++ .../v1/gonum/graph/topo/bron_kerbosch.go | 250 + .../v1/gonum/graph/topo/clique_graph.go | 111 + vendor/gonum.org/v1/gonum/graph/topo/doc.go | 6 + .../v1/gonum/graph/topo/johnson_cycles.go | 285 + .../v1/gonum/graph/topo/non_tomita_choice.go | 9 + .../v1/gonum/graph/topo/paton_cycles.go | 83 + .../gonum.org/v1/gonum/graph/topo/tarjan.go | 199 + .../v1/gonum/graph/topo/tomita_choice.go | 9 + vendor/gonum.org/v1/gonum/graph/topo/topo.go | 68 + .../gonum.org/v1/gonum/graph/traverse/doc.go | 6 + .../v1/gonum/graph/traverse/traverse.go | 231 + vendor/gonum.org/v1/gonum/graph/undirect.go | 270 + vendor/gonum.org/v1/gonum/integrate/doc.go | 7 + .../gonum.org/v1/gonum/integrate/quad/doc.go | 7 + .../v1/gonum/integrate/quad/hermite.go | 314 + .../v1/gonum/integrate/quad/hermite_data.go | 413 + .../integrate/quad/internal/PrintGoSlice.m | 11 + .../gonum/integrate/quad/internal/genherm.m | 32 + .../gonum/integrate/quad/internal/hermpts.m | 609 + .../v1/gonum/integrate/quad/legendre.go | 385 + .../gonum.org/v1/gonum/integrate/quad/quad.go | 157 + .../v1/gonum/integrate/trapezoidal.go | 40 + .../gonum/internal/asm/c128/axpyinc_amd64.s | 134 + .../gonum/internal/asm/c128/axpyincto_amd64.s | 141 + .../internal/asm/c128/axpyunitary_amd64.s | 122 + .../internal/asm/c128/axpyunitaryto_amd64.s | 123 + .../v1/gonum/internal/asm/c128/doc.go | 6 + .../gonum/internal/asm/c128/dotcinc_amd64.s | 153 + .../internal/asm/c128/dotcunitary_amd64.s | 143 + .../gonum/internal/asm/c128/dotuinc_amd64.s | 141 + .../internal/asm/c128/dotuunitary_amd64.s | 130 + .../gonum/internal/asm/c128/dscalinc_amd64.s | 69 + .../internal/asm/c128/dscalunitary_amd64.s | 66 + .../v1/gonum/internal/asm/c128/scal.go | 31 + .../internal/asm/c128/scalUnitary_amd64.s | 116 + .../gonum/internal/asm/c128/scalinc_amd64.s | 121 + .../v1/gonum/internal/asm/c128/stubs_amd64.go | 96 + .../v1/gonum/internal/asm/c128/stubs_noasm.go | 163 + .../v1/gonum/internal/asm/c64/axpyinc_amd64.s | 151 + .../gonum/internal/asm/c64/axpyincto_amd64.s | 156 + .../internal/asm/c64/axpyunitary_amd64.s | 160 + .../internal/asm/c64/axpyunitaryto_amd64.s | 157 + .../v1/gonum/internal/asm/c64/conj.go | 7 + .../v1/gonum/internal/asm/c64/doc.go | 6 + .../v1/gonum/internal/asm/c64/dotcinc_amd64.s | 160 + .../internal/asm/c64/dotcunitary_amd64.s | 208 + .../v1/gonum/internal/asm/c64/dotuinc_amd64.s | 148 + .../internal/asm/c64/dotuunitary_amd64.s | 197 + .../v1/gonum/internal/asm/c64/scal.go | 79 + .../v1/gonum/internal/asm/c64/stubs_amd64.go | 68 + .../v1/gonum/internal/asm/c64/stubs_noasm.go | 113 + .../v1/gonum/internal/asm/f32/axpyinc_amd64.s | 73 + .../gonum/internal/asm/f32/axpyincto_amd64.s | 78 + .../internal/asm/f32/axpyunitary_amd64.s | 97 + .../internal/asm/f32/axpyunitaryto_amd64.s | 98 + .../v1/gonum/internal/asm/f32/ddotinc_amd64.s | 91 + .../internal/asm/f32/ddotunitary_amd64.s | 110 + .../v1/gonum/internal/asm/f32/doc.go | 6 + .../v1/gonum/internal/asm/f32/dotinc_amd64.s | 85 + .../gonum/internal/asm/f32/dotunitary_amd64.s | 106 + .../v1/gonum/internal/asm/f32/ge_amd64.go | 15 + .../v1/gonum/internal/asm/f32/ge_amd64.s | 757 + .../v1/gonum/internal/asm/f32/ge_noasm.go | 36 + .../v1/gonum/internal/asm/f32/scal.go | 55 + .../v1/gonum/internal/asm/f32/stubs_amd64.go | 68 + .../v1/gonum/internal/asm/f32/stubs_noasm.go | 113 + .../v1/gonum/internal/asm/f64/abssum_amd64.s | 82 + .../gonum/internal/asm/f64/abssuminc_amd64.s | 90 + .../v1/gonum/internal/asm/f64/add_amd64.s | 66 + .../gonum/internal/asm/f64/addconst_amd64.s | 53 + .../v1/gonum/internal/asm/f64/axpy.go | 57 + .../v1/gonum/internal/asm/f64/axpyinc_amd64.s | 142 + .../gonum/internal/asm/f64/axpyincto_amd64.s | 148 + .../internal/asm/f64/axpyunitary_amd64.s | 134 + .../internal/asm/f64/axpyunitaryto_amd64.s | 140 + .../v1/gonum/internal/asm/f64/cumprod_amd64.s | 71 + .../v1/gonum/internal/asm/f64/cumsum_amd64.s | 64 + .../v1/gonum/internal/asm/f64/div_amd64.s | 67 + .../v1/gonum/internal/asm/f64/divto_amd64.s | 73 + .../v1/gonum/internal/asm/f64/doc.go | 6 + .../v1/gonum/internal/asm/f64/dot.go | 35 + .../v1/gonum/internal/asm/f64/dot_amd64.s | 145 + .../v1/gonum/internal/asm/f64/ge_amd64.go | 22 + .../v1/gonum/internal/asm/f64/ge_noasm.go | 118 + .../v1/gonum/internal/asm/f64/gemvN_amd64.s | 685 + .../v1/gonum/internal/asm/f64/gemvT_amd64.s | 745 + .../v1/gonum/internal/asm/f64/ger_amd64.s | 591 + .../v1/gonum/internal/asm/f64/l1norm_amd64.s | 58 + .../gonum/internal/asm/f64/linfnorm_amd64.s | 57 + .../v1/gonum/internal/asm/f64/scal.go | 57 + .../v1/gonum/internal/asm/f64/scalinc_amd64.s | 113 + .../gonum/internal/asm/f64/scalincto_amd64.s | 122 + .../internal/asm/f64/scalunitary_amd64.s | 112 + .../internal/asm/f64/scalunitaryto_amd64.s | 113 + .../v1/gonum/internal/asm/f64/stubs_amd64.go | 172 + .../v1/gonum/internal/asm/f64/stubs_noasm.go | 170 + .../v1/gonum/internal/asm/f64/sum_amd64.s | 100 + .../v1/gonum/internal/cmplx64/abs.go | 14 + .../v1/gonum/internal/cmplx64/conj.go | 12 + .../v1/gonum/internal/cmplx64/doc.go | 7 + .../v1/gonum/internal/cmplx64/isinf.go | 25 + .../v1/gonum/internal/cmplx64/isnan.go | 29 + .../v1/gonum/internal/cmplx64/sqrt.go | 108 + .../gonum.org/v1/gonum/internal/math32/doc.go | 7 + .../v1/gonum/internal/math32/math.go | 111 + .../v1/gonum/internal/math32/signbit.go | 16 + .../v1/gonum/internal/math32/sqrt.go | 25 + .../v1/gonum/internal/math32/sqrt_amd64.go | 20 + .../v1/gonum/internal/math32/sqrt_amd64.s | 20 + vendor/gonum.org/v1/gonum/lapack/doc.go | 6 + .../gonum.org/v1/gonum/lapack/gonum/dbdsqr.go | 505 + .../gonum.org/v1/gonum/lapack/gonum/dgebak.go | 89 + .../gonum.org/v1/gonum/lapack/gonum/dgebal.go | 239 + .../gonum.org/v1/gonum/lapack/gonum/dgebd2.go | 86 + .../gonum.org/v1/gonum/lapack/gonum/dgebrd.go | 161 + .../gonum.org/v1/gonum/lapack/gonum/dgecon.go | 92 + .../gonum.org/v1/gonum/lapack/gonum/dgeev.go | 279 + .../gonum.org/v1/gonum/lapack/gonum/dgehd2.go | 97 + .../gonum.org/v1/gonum/lapack/gonum/dgehrd.go | 194 + .../gonum.org/v1/gonum/lapack/gonum/dgelq2.go | 65 + .../gonum.org/v1/gonum/lapack/gonum/dgelqf.go | 97 + .../gonum.org/v1/gonum/lapack/gonum/dgels.go | 219 + .../gonum.org/v1/gonum/lapack/gonum/dgeql2.go | 61 + .../gonum.org/v1/gonum/lapack/gonum/dgeqp3.go | 186 + .../gonum.org/v1/gonum/lapack/gonum/dgeqr2.go | 76 + .../gonum.org/v1/gonum/lapack/gonum/dgeqrf.go | 108 + .../gonum.org/v1/gonum/lapack/gonum/dgerq2.go | 68 + .../gonum.org/v1/gonum/lapack/gonum/dgerqf.go | 129 + .../gonum.org/v1/gonum/lapack/gonum/dgesvd.go | 1374 ++ .../gonum.org/v1/gonum/lapack/gonum/dgetf2.go | 84 + .../gonum.org/v1/gonum/lapack/gonum/dgetrf.go | 85 + .../gonum.org/v1/gonum/lapack/gonum/dgetri.go | 116 + .../gonum.org/v1/gonum/lapack/gonum/dgetrs.go | 72 + .../v1/gonum/lapack/gonum/dggsvd3.go | 242 + .../v1/gonum/lapack/gonum/dggsvp3.go | 281 + .../gonum.org/v1/gonum/lapack/gonum/dhseqr.go | 252 + .../gonum.org/v1/gonum/lapack/gonum/dlabrd.go | 173 + .../gonum.org/v1/gonum/lapack/gonum/dlacn2.go | 134 + .../gonum.org/v1/gonum/lapack/gonum/dlacpy.go | 59 + .../gonum.org/v1/gonum/lapack/gonum/dlae2.go | 49 + .../gonum.org/v1/gonum/lapack/gonum/dlaev2.go | 82 + .../gonum.org/v1/gonum/lapack/gonum/dlaexc.go | 269 + .../gonum.org/v1/gonum/lapack/gonum/dlags2.go | 182 + .../gonum.org/v1/gonum/lapack/gonum/dlahqr.go | 431 + .../gonum.org/v1/gonum/lapack/gonum/dlahr2.go | 195 + .../gonum.org/v1/gonum/lapack/gonum/dlaln2.go | 405 + .../gonum.org/v1/gonum/lapack/gonum/dlange.go | 89 + .../gonum.org/v1/gonum/lapack/gonum/dlanst.go | 75 + .../gonum.org/v1/gonum/lapack/gonum/dlansy.go | 132 + .../gonum.org/v1/gonum/lapack/gonum/dlantr.go | 260 + .../gonum.org/v1/gonum/lapack/gonum/dlanv2.go | 132 + .../gonum.org/v1/gonum/lapack/gonum/dlapll.go | 55 + .../gonum.org/v1/gonum/lapack/gonum/dlapmt.go | 89 + .../gonum.org/v1/gonum/lapack/gonum/dlapy2.go | 14 + .../gonum.org/v1/gonum/lapack/gonum/dlaqp2.go | 127 + .../gonum.org/v1/gonum/lapack/gonum/dlaqps.go | 244 + .../v1/gonum/lapack/gonum/dlaqr04.go | 478 + .../gonum.org/v1/gonum/lapack/gonum/dlaqr1.go | 59 + .../v1/gonum/lapack/gonum/dlaqr23.go | 415 + .../gonum.org/v1/gonum/lapack/gonum/dlaqr5.go | 644 + .../gonum.org/v1/gonum/lapack/gonum/dlarf.go | 101 + .../gonum.org/v1/gonum/lapack/gonum/dlarfb.go | 449 + .../gonum.org/v1/gonum/lapack/gonum/dlarfg.go | 71 + .../gonum.org/v1/gonum/lapack/gonum/dlarft.go | 166 + .../gonum.org/v1/gonum/lapack/gonum/dlarfx.go | 550 + .../gonum.org/v1/gonum/lapack/gonum/dlartg.go | 80 + .../gonum.org/v1/gonum/lapack/gonum/dlas2.go | 43 + .../gonum.org/v1/gonum/lapack/gonum/dlascl.go | 111 + .../gonum.org/v1/gonum/lapack/gonum/dlaset.go | 57 + .../gonum.org/v1/gonum/lapack/gonum/dlasq1.go | 100 + .../gonum.org/v1/gonum/lapack/gonum/dlasq2.go | 369 + .../gonum.org/v1/gonum/lapack/gonum/dlasq3.go | 172 + .../gonum.org/v1/gonum/lapack/gonum/dlasq4.go | 249 + .../gonum.org/v1/gonum/lapack/gonum/dlasq5.go | 140 + .../gonum.org/v1/gonum/lapack/gonum/dlasq6.go | 118 + .../gonum.org/v1/gonum/lapack/gonum/dlasr.go | 279 + .../gonum.org/v1/gonum/lapack/gonum/dlasrt.go | 36 + .../gonum.org/v1/gonum/lapack/gonum/dlassq.go | 41 + .../gonum.org/v1/gonum/lapack/gonum/dlasv2.go | 115 + .../gonum.org/v1/gonum/lapack/gonum/dlaswp.go | 52 + .../gonum.org/v1/gonum/lapack/gonum/dlasy2.go | 290 + .../gonum.org/v1/gonum/lapack/gonum/dlatrd.go | 165 + .../gonum.org/v1/gonum/lapack/gonum/dlatrs.go | 359 + .../gonum.org/v1/gonum/lapack/gonum/dlauu2.go | 64 + .../gonum.org/v1/gonum/lapack/gonum/dlauum.go | 81 + vendor/gonum.org/v1/gonum/lapack/gonum/doc.go | 28 + .../gonum.org/v1/gonum/lapack/gonum/dorg2l.go | 76 + .../gonum.org/v1/gonum/lapack/gonum/dorg2r.go | 75 + .../gonum.org/v1/gonum/lapack/gonum/dorgbr.go | 138 + .../gonum.org/v1/gonum/lapack/gonum/dorghr.go | 101 + .../gonum.org/v1/gonum/lapack/gonum/dorgl2.go | 71 + .../gonum.org/v1/gonum/lapack/gonum/dorglq.go | 123 + .../gonum.org/v1/gonum/lapack/gonum/dorgql.go | 136 + .../gonum.org/v1/gonum/lapack/gonum/dorgqr.go | 134 + .../gonum.org/v1/gonum/lapack/gonum/dorgtr.go | 104 + .../gonum.org/v1/gonum/lapack/gonum/dorm2r.go | 101 + .../gonum.org/v1/gonum/lapack/gonum/dormbr.go | 178 + .../gonum.org/v1/gonum/lapack/gonum/dormhr.go | 129 + .../gonum.org/v1/gonum/lapack/gonum/dorml2.go | 102 + .../gonum.org/v1/gonum/lapack/gonum/dormlq.go | 174 + .../gonum.org/v1/gonum/lapack/gonum/dormqr.go | 177 + .../gonum.org/v1/gonum/lapack/gonum/dormr2.go | 103 + .../gonum.org/v1/gonum/lapack/gonum/dpbtf2.go | 110 + .../gonum.org/v1/gonum/lapack/gonum/dpocon.go | 90 + .../gonum.org/v1/gonum/lapack/gonum/dpotf2.go | 82 + .../gonum.org/v1/gonum/lapack/gonum/dpotrf.go | 81 + .../gonum.org/v1/gonum/lapack/gonum/dpotri.go | 44 + .../gonum.org/v1/gonum/lapack/gonum/dpotrs.go | 62 + .../gonum.org/v1/gonum/lapack/gonum/drscl.go | 63 + .../gonum.org/v1/gonum/lapack/gonum/dsteqr.go | 376 + .../gonum.org/v1/gonum/lapack/gonum/dsterf.go | 285 + .../gonum.org/v1/gonum/lapack/gonum/dsyev.go | 130 + .../gonum.org/v1/gonum/lapack/gonum/dsytd2.go | 136 + .../gonum.org/v1/gonum/lapack/gonum/dsytrd.go | 172 + .../gonum.org/v1/gonum/lapack/gonum/dtgsja.go | 373 + .../gonum.org/v1/gonum/lapack/gonum/dtrcon.go | 90 + .../v1/gonum/lapack/gonum/dtrevc3.go | 885 + .../gonum.org/v1/gonum/lapack/gonum/dtrexc.go | 230 + .../gonum.org/v1/gonum/lapack/gonum/dtrti2.go | 69 + .../gonum.org/v1/gonum/lapack/gonum/dtrtri.go | 72 + .../gonum.org/v1/gonum/lapack/gonum/dtrtrs.go | 55 + .../gonum.org/v1/gonum/lapack/gonum/errors.go | 174 + .../gonum.org/v1/gonum/lapack/gonum/iladlc.go | 45 + .../gonum.org/v1/gonum/lapack/gonum/iladlr.go | 41 + .../gonum.org/v1/gonum/lapack/gonum/ilaenv.go | 387 + .../gonum.org/v1/gonum/lapack/gonum/iparmq.go | 115 + .../gonum.org/v1/gonum/lapack/gonum/lapack.go | 55 + .../internal/testdata/dlahr2test/main.go | 102 + .../internal/testdata/dlaqr5test/main.go | 186 + .../internal/testdata/dlasqtest/dcopy.f | 115 + .../internal/testdata/dlasqtest/disnan.f | 80 + .../internal/testdata/dlasqtest/dlaisnan.f | 91 + .../internal/testdata/dlasqtest/dlamch.f | 193 + .../internal/testdata/dlasqtest/dlas2.f | 183 + .../internal/testdata/dlasqtest/dlascl.f | 364 + .../internal/testdata/dlasqtest/dlasq1.f | 226 + .../internal/testdata/dlasqtest/dlasq2.f | 652 + .../internal/testdata/dlasqtest/dlasq3.f | 498 + .../internal/testdata/dlasqtest/dlasq4.f | 427 + .../internal/testdata/dlasqtest/dlasq5.f | 413 + .../internal/testdata/dlasqtest/dlasq6.f | 258 + .../internal/testdata/dlasqtest/dlasrt.f | 303 + .../internal/testdata/dlasqtest/ieeeck.f | 203 + .../internal/testdata/dlasqtest/ilaenv.f | 624 + .../internal/testdata/dlasqtest/iparmq.f | 322 + .../internal/testdata/dlasqtest/lsame.f | 125 + .../testdata/dlasqtest/testdlasq1.f90 | 626 + .../testdata/dlasqtest/testdlasq2.f90 | 171 + .../testdata/dlasqtest/testdlasq3.f90 | 132 + .../testdata/dlasqtest/testdlasq4.f90 | 435 + .../internal/testdata/dlasqtest/xerbla.f | 99 + .../internal/testdata/dsterftest/disnan.f | 80 + .../internal/testdata/dsterftest/dlae2.f | 185 + .../internal/testdata/dsterftest/dlaisnan.f | 91 + .../internal/testdata/dsterftest/dlamch.f | 193 + .../internal/testdata/dsterftest/dlanst.f | 186 + .../internal/testdata/dsterftest/dlapy2.f | 104 + .../internal/testdata/dsterftest/dlascl.f | 364 + .../internal/testdata/dsterftest/dlasrt.f | 303 + .../internal/testdata/dsterftest/dlassq.f | 155 + .../internal/testdata/dsterftest/dsterf.f | 448 + .../internal/testdata/dsterftest/lsame.f | 125 + .../testdata/dsterftest/testdsterf.f90 | 15 + .../internal/testdata/dsterftest/xerbla.f | 99 + .../lapack/internal/testdata/netlib/daxpy.f | 115 + .../lapack/internal/testdata/netlib/dcopy.f | 115 + .../lapack/internal/testdata/netlib/dgemm.f | 384 + .../lapack/internal/testdata/netlib/dgemv.f | 330 + .../lapack/internal/testdata/netlib/dlabad.f | 105 + .../lapack/internal/testdata/netlib/dlacpy.f | 156 + .../lapack/internal/testdata/netlib/dlahr2.f | 326 + .../lapack/internal/testdata/netlib/dlamch.f | 189 + .../lapack/internal/testdata/netlib/dlapy2.f | 104 + .../lapack/internal/testdata/netlib/dlaqr1.f | 179 + .../lapack/internal/testdata/netlib/dlaqr5.f | 921 + .../lapack/internal/testdata/netlib/dlarfg.f | 196 + .../lapack/internal/testdata/netlib/dlaset.f | 184 + .../lapack/internal/testdata/netlib/dnrm2.f | 112 + .../lapack/internal/testdata/netlib/dscal.f | 110 + .../lapack/internal/testdata/netlib/dtrmm.f | 415 + .../lapack/internal/testdata/netlib/dtrmv.f | 342 + .../lapack/internal/testdata/netlib/lsame.f | 125 + .../lapack/internal/testdata/netlib/netlib.go | 68 + .../lapack/internal/testdata/netlib/xerbla.f | 89 + vendor/gonum.org/v1/gonum/lapack/lapack.go | 213 + .../gonum.org/v1/gonum/lapack/lapack64/doc.go | 20 + .../v1/gonum/lapack/lapack64/lapack64.go | 581 + .../v1/gonum/lapack/testlapack/dbdsqr.go | 198 + .../v1/gonum/lapack/testlapack/dgebak.go | 109 + .../v1/gonum/lapack/testlapack/dgebal.go | 174 + .../v1/gonum/lapack/testlapack/dgebd2.go | 60 + .../v1/gonum/lapack/testlapack/dgebrd.go | 152 + .../v1/gonum/lapack/testlapack/dgecon.go | 96 + .../v1/gonum/lapack/testlapack/dgeev.go | 740 + .../v1/gonum/lapack/testlapack/dgeev_bench.go | 62 + .../v1/gonum/lapack/testlapack/dgehd2.go | 197 + .../v1/gonum/lapack/testlapack/dgehrd.go | 210 + .../v1/gonum/lapack/testlapack/dgelq2.go | 101 + .../v1/gonum/lapack/testlapack/dgelqf.go | 101 + .../v1/gonum/lapack/testlapack/dgels.go | 183 + .../v1/gonum/lapack/testlapack/dgeql2.go | 100 + .../v1/gonum/lapack/testlapack/dgeqp3.go | 139 + .../v1/gonum/lapack/testlapack/dgeqr2.go | 102 + .../v1/gonum/lapack/testlapack/dgeqrf.go | 102 + .../v1/gonum/lapack/testlapack/dgerq2.go | 108 + .../v1/gonum/lapack/testlapack/dgerqf.go | 133 + .../v1/gonum/lapack/testlapack/dgesvd.go | 371 + .../v1/gonum/lapack/testlapack/dgetf2.go | 197 + .../v1/gonum/lapack/testlapack/dgetrf.go | 66 + .../v1/gonum/lapack/testlapack/dgetri.go | 95 + .../v1/gonum/lapack/testlapack/dgetrs.go | 114 + .../v1/gonum/lapack/testlapack/dggsvd3.go | 174 + .../v1/gonum/lapack/testlapack/dggsvp3.go | 147 + .../v1/gonum/lapack/testlapack/dhseqr.go | 861 + .../v1/gonum/lapack/testlapack/dlabrd.go | 108 + .../v1/gonum/lapack/testlapack/dlacn2.go | 73 + .../v1/gonum/lapack/testlapack/dlacpy.go | 90 + .../v1/gonum/lapack/testlapack/dlae2.go | 53 + .../v1/gonum/lapack/testlapack/dlaev2.go | 47 + .../v1/gonum/lapack/testlapack/dlaexc.go | 228 + .../v1/gonum/lapack/testlapack/dlags2.go | 119 + .../v1/gonum/lapack/testlapack/dlahqr.go | 442 + .../v1/gonum/lapack/testlapack/dlahr2.go | 241 + .../v1/gonum/lapack/testlapack/dlaln2.go | 152 + .../v1/gonum/lapack/testlapack/dlange.go | 101 + .../v1/gonum/lapack/testlapack/dlanst.go | 62 + .../v1/gonum/lapack/testlapack/dlansy.go | 93 + .../v1/gonum/lapack/testlapack/dlantr.go | 90 + .../v1/gonum/lapack/testlapack/dlanv2.go | 113 + .../v1/gonum/lapack/testlapack/dlapll.go | 51 + .../v1/gonum/lapack/testlapack/dlapmt.go | 113 + .../v1/gonum/lapack/testlapack/dlapy2.go | 31 + .../v1/gonum/lapack/testlapack/dlaqp2.go | 105 + .../v1/gonum/lapack/testlapack/dlaqps.go | 103 + .../v1/gonum/lapack/testlapack/dlaqr04.go | 450 + .../v1/gonum/lapack/testlapack/dlaqr1.go | 92 + .../v1/gonum/lapack/testlapack/dlaqr23.go | 369 + .../v1/gonum/lapack/testlapack/dlaqr5.go | 215 + .../v1/gonum/lapack/testlapack/dlarf.go | 174 + .../v1/gonum/lapack/testlapack/dlarfb.go | 162 + .../v1/gonum/lapack/testlapack/dlarfg.go | 134 + .../v1/gonum/lapack/testlapack/dlarft.go | 169 + .../v1/gonum/lapack/testlapack/dlarfx.go | 90 + .../v1/gonum/lapack/testlapack/dlartg.go | 119 + .../v1/gonum/lapack/testlapack/dlas2.go | 34 + .../v1/gonum/lapack/testlapack/dlascl.go | 107 + .../v1/gonum/lapack/testlapack/dlaset.go | 77 + .../v1/gonum/lapack/testlapack/dlasq1.go | 88 + .../v1/gonum/lapack/testlapack/dlasq2.go | 715 + .../v1/gonum/lapack/testlapack/dlasq3.go | 2703 +++ .../v1/gonum/lapack/testlapack/dlasq4.go | 3101 ++++ .../v1/gonum/lapack/testlapack/dlasq5.go | 1992 ++ .../v1/gonum/lapack/testlapack/dlasr.go | 168 + .../v1/gonum/lapack/testlapack/dlasrt.go | 80 + .../v1/gonum/lapack/testlapack/dlasv2.go | 49 + .../v1/gonum/lapack/testlapack/dlaswp.go | 130 + .../v1/gonum/lapack/testlapack/dlasy2.go | 105 + .../v1/gonum/lapack/testlapack/dlatrd.go | 272 + .../v1/gonum/lapack/testlapack/dlatrs.go | 142 + .../v1/gonum/lapack/testlapack/dlauu2.go | 112 + .../v1/gonum/lapack/testlapack/dlauum.go | 30 + .../v1/gonum/lapack/testlapack/doc.go | 6 + .../v1/gonum/lapack/testlapack/dorg2l.go | 55 + .../v1/gonum/lapack/testlapack/dorg2r.go | 79 + .../v1/gonum/lapack/testlapack/dorgbr.go | 156 + .../v1/gonum/lapack/testlapack/dorghr.go | 100 + .../v1/gonum/lapack/testlapack/dorgl2.go | 66 + .../v1/gonum/lapack/testlapack/dorglq.go | 84 + .../v1/gonum/lapack/testlapack/dorgql.go | 131 + .../v1/gonum/lapack/testlapack/dorgqr.go | 84 + .../v1/gonum/lapack/testlapack/dorgtr.go | 161 + .../v1/gonum/lapack/testlapack/dorm2r.go | 140 + .../v1/gonum/lapack/testlapack/dormbr.go | 165 + .../v1/gonum/lapack/testlapack/dormhr.go | 133 + .../v1/gonum/lapack/testlapack/dorml2.go | 145 + .../v1/gonum/lapack/testlapack/dormlq.go | 131 + .../v1/gonum/lapack/testlapack/dormqr.go | 155 + .../v1/gonum/lapack/testlapack/dormr2.go | 138 + .../v1/gonum/lapack/testlapack/dpbtf2.go | 52 + .../v1/gonum/lapack/testlapack/dpocon.go | 158 + .../v1/gonum/lapack/testlapack/dpotf2.go | 117 + .../v1/gonum/lapack/testlapack/dpotrf.go | 136 + .../v1/gonum/lapack/testlapack/dpotri.go | 109 + .../v1/gonum/lapack/testlapack/dpotrs.go | 93 + .../v1/gonum/lapack/testlapack/drscl.go | 52 + .../v1/gonum/lapack/testlapack/dsteqr.go | 173 + .../v1/gonum/lapack/testlapack/dsterf.go | 127 + .../v1/gonum/lapack/testlapack/dsyev.go | 116 + .../v1/gonum/lapack/testlapack/dsytd2.go | 188 + .../v1/gonum/lapack/testlapack/dsytrd.go | 161 + .../v1/gonum/lapack/testlapack/dtgsja.go | 166 + .../v1/gonum/lapack/testlapack/dtrcon.go | 185 + .../v1/gonum/lapack/testlapack/dtrevc3.go | 223 + .../v1/gonum/lapack/testlapack/dtrexc.go | 220 + .../v1/gonum/lapack/testlapack/dtrti2.go | 158 + .../v1/gonum/lapack/testlapack/dtrtri.go | 90 + .../v1/gonum/lapack/testlapack/fortran.go | 33 + .../v1/gonum/lapack/testlapack/general.go | 1578 ++ .../v1/gonum/lapack/testlapack/iladlc.go | 83 + .../v1/gonum/lapack/testlapack/iladlr.go | 83 + .../v1/gonum/lapack/testlapack/matgen.go | 740 + .../gonum/lapack/testlapack/test_matrices.go | 616 + vendor/gonum.org/v1/gonum/mat/band.go | 277 + vendor/gonum.org/v1/gonum/mat/cdense.go | 168 + vendor/gonum.org/v1/gonum/mat/cholesky.go | 673 + vendor/gonum.org/v1/gonum/mat/cmatrix.go | 210 + vendor/gonum.org/v1/gonum/mat/consts.go | 15 + vendor/gonum.org/v1/gonum/mat/dense.go | 558 + .../v1/gonum/mat/dense_arithmetic.go | 886 + vendor/gonum.org/v1/gonum/mat/diagonal.go | 322 + vendor/gonum.org/v1/gonum/mat/doc.go | 169 + vendor/gonum.org/v1/gonum/mat/eigen.go | 350 + vendor/gonum.org/v1/gonum/mat/errors.go | 149 + vendor/gonum.org/v1/gonum/mat/format.go | 238 + vendor/gonum.org/v1/gonum/mat/gsvd.go | 415 + vendor/gonum.org/v1/gonum/mat/hogsvd.go | 233 + .../v1/gonum/mat/index_bound_checks.go | 348 + .../v1/gonum/mat/index_no_bound_checks.go | 359 + vendor/gonum.org/v1/gonum/mat/inner.go | 121 + vendor/gonum.org/v1/gonum/mat/io.go | 492 + vendor/gonum.org/v1/gonum/mat/lq.go | 262 + vendor/gonum.org/v1/gonum/mat/lu.go | 422 + vendor/gonum.org/v1/gonum/mat/matrix.go | 985 + vendor/gonum.org/v1/gonum/mat/offset.go | 20 + .../v1/gonum/mat/offset_appengine.go | 24 + vendor/gonum.org/v1/gonum/mat/pool.go | 236 + vendor/gonum.org/v1/gonum/mat/product.go | 193 + vendor/gonum.org/v1/gonum/mat/qr.go | 260 + vendor/gonum.org/v1/gonum/mat/shadow.go | 226 + vendor/gonum.org/v1/gonum/mat/solve.go | 140 + vendor/gonum.org/v1/gonum/mat/svd.go | 247 + vendor/gonum.org/v1/gonum/mat/symband.go | 231 + vendor/gonum.org/v1/gonum/mat/symmetric.go | 602 + vendor/gonum.org/v1/gonum/mat/triangular.go | 683 + vendor/gonum.org/v1/gonum/mat/triband.go | 367 + vendor/gonum.org/v1/gonum/mat/vector.go | 741 + vendor/gonum.org/v1/gonum/mathext/airy.go | 37 + vendor/gonum.org/v1/gonum/mathext/beta.go | 34 + vendor/gonum.org/v1/gonum/mathext/betainc.go | 29 + vendor/gonum.org/v1/gonum/mathext/digamma.go | 44 + vendor/gonum.org/v1/gonum/mathext/doc.go | 7 + .../gonum.org/v1/gonum/mathext/ell_carlson.go | 156 + .../v1/gonum/mathext/ell_complete.go | 355 + vendor/gonum.org/v1/gonum/mathext/erf.go | 91 + .../gonum.org/v1/gonum/mathext/gamma_inc.go | 50 + .../v1/gonum/mathext/gamma_inc_inv.go | 56 + .../v1/gonum/mathext/internal/amos/amos.go | 2154 +++ .../mathext/internal/amos/amoslib/d1mach.f | 97 + .../mathext/internal/amos/amoslib/dgamln.f | 189 + .../mathext/internal/amos/amoslib/fortran.go | 366 + .../mathext/internal/amos/amoslib/i1mach.f | 113 + .../mathext/internal/amos/amoslib/myabs.f | 5 + .../mathext/internal/amos/amoslib/myatan.f | 5 + .../mathext/internal/amos/amoslib/mycos.f | 5 + .../mathext/internal/amos/amoslib/myexp.f | 5 + .../mathext/internal/amos/amoslib/mylog.f | 5 + .../mathext/internal/amos/amoslib/mymax.f | 5 + .../mathext/internal/amos/amoslib/mymin.f | 5 + .../mathext/internal/amos/amoslib/mysin.f | 5 + .../mathext/internal/amos/amoslib/mysqrt.f | 5 + .../mathext/internal/amos/amoslib/mytan.f | 5 + .../mathext/internal/amos/amoslib/xerror.f | 22 + .../mathext/internal/amos/amoslib/zabs.f | 34 + .../mathext/internal/amos/amoslib/zacai.f | 99 + .../mathext/internal/amos/amoslib/zacon.f | 203 + .../mathext/internal/amos/amoslib/zairy.f | 395 + .../mathext/internal/amos/amoslib/zasyi.f | 169 + .../mathext/internal/amos/amoslib/zbesh.f | 348 + .../mathext/internal/amos/amoslib/zbesi.f | 269 + .../mathext/internal/amos/amoslib/zbesj.f | 266 + .../mathext/internal/amos/amoslib/zbesk.f | 281 + .../mathext/internal/amos/amoslib/zbesy.f | 244 + .../mathext/internal/amos/amoslib/zbinu.f | 110 + .../mathext/internal/amos/amoslib/zbiry.f | 364 + .../mathext/internal/amos/amoslib/zbknu.f | 568 + .../mathext/internal/amos/amoslib/zbuni.f | 174 + .../mathext/internal/amos/amoslib/zbunk.f | 35 + .../mathext/internal/amos/amoslib/zdiv.f | 19 + .../mathext/internal/amos/amoslib/zexp.f | 16 + .../mathext/internal/amos/amoslib/zkscl.f | 121 + .../mathext/internal/amos/amoslib/zlog.f | 41 + .../mathext/internal/amos/amoslib/zmlri.f | 206 + .../mathext/internal/amos/amoslib/zmlt.f | 15 + .../mathext/internal/amos/amoslib/zrati.f | 132 + .../mathext/internal/amos/amoslib/zs1s2.f | 51 + .../mathext/internal/amos/amoslib/zseri.f | 195 + .../mathext/internal/amos/amoslib/zshch.f | 22 + .../mathext/internal/amos/amoslib/zsqrt.f | 45 + .../mathext/internal/amos/amoslib/zuchk.f | 28 + .../mathext/internal/amos/amoslib/zunhj.f | 714 + .../mathext/internal/amos/amoslib/zuni1.f | 204 + .../mathext/internal/amos/amoslib/zuni2.f | 267 + .../mathext/internal/amos/amoslib/zunik.f | 211 + .../mathext/internal/amos/amoslib/zunk1.f | 426 + .../mathext/internal/amos/amoslib/zunk2.f | 505 + .../mathext/internal/amos/amoslib/zuoik.f | 194 + .../mathext/internal/amos/amoslib/zwrsk.f | 94 + .../v1/gonum/mathext/internal/amos/doc.go | 6 + .../gonum/mathext/internal/cephes/cephes.go | 28 + .../v1/gonum/mathext/internal/cephes/doc.go | 6 + .../v1/gonum/mathext/internal/cephes/igam.go | 311 + .../v1/gonum/mathext/internal/cephes/igami.go | 153 + .../gonum/mathext/internal/cephes/incbeta.go | 312 + .../v1/gonum/mathext/internal/cephes/incbi.go | 248 + .../gonum/mathext/internal/cephes/lanczos.go | 153 + .../v1/gonum/mathext/internal/cephes/ndtri.go | 150 + .../gonum/mathext/internal/cephes/polevl.go | 82 + .../v1/gonum/mathext/internal/cephes/unity.go | 170 + .../v1/gonum/mathext/internal/cephes/zeta.go | 110 + .../v1/gonum/mathext/internal/gonum/beta.go | 52 + .../v1/gonum/mathext/internal/gonum/doc.go | 7 + .../v1/gonum/mathext/internal/gonum/gonum.go | 5 + vendor/gonum.org/v1/gonum/mathext/mvgamma.go | 32 + vendor/gonum.org/v1/gonum/mathext/roots.go | 178 + vendor/gonum.org/v1/gonum/mathext/zeta.go | 20 + vendor/gonum.org/v1/gonum/num/dual/doc.go | 13 + vendor/gonum.org/v1/gonum/num/dual/dual.go | 123 + .../gonum.org/v1/gonum/num/dual/dual_fike.go | 286 + .../v1/gonum/num/dual/dual_hyperbolic.go | 165 + .../gonum.org/v1/gonum/num/dualcmplx/doc.go | 12 + .../gonum.org/v1/gonum/num/dualcmplx/dual.go | 267 + vendor/gonum.org/v1/gonum/num/dualquat/doc.go | 13 + .../gonum.org/v1/gonum/num/dualquat/dual.go | 169 + .../v1/gonum/num/dualquat/dual_fike.go | 152 + .../gonum.org/v1/gonum/num/hyperdual/doc.go | 14 + .../v1/gonum/num/hyperdual/hyperdual.go | 142 + .../v1/gonum/num/hyperdual/hyperdual_fike.go | 336 + .../num/hyperdual/hyperdual_hyperbolic.go | 202 + vendor/gonum.org/v1/gonum/num/quat/abs.go | 52 + vendor/gonum.org/v1/gonum/num/quat/conj.go | 23 + vendor/gonum.org/v1/gonum/num/quat/doc.go | 10 + vendor/gonum.org/v1/gonum/num/quat/exp.go | 64 + vendor/gonum.org/v1/gonum/num/quat/inf.go | 22 + vendor/gonum.org/v1/gonum/num/quat/nan.go | 26 + vendor/gonum.org/v1/gonum/num/quat/quat.go | 401 + vendor/gonum.org/v1/gonum/num/quat/trig.go | 171 + vendor/gonum.org/v1/gonum/num/quat/util.go | 26 + .../v1/gonum/optimize/backtracking.go | 82 + vendor/gonum.org/v1/gonum/optimize/bfgs.go | 192 + .../gonum.org/v1/gonum/optimize/bisection.go | 146 + vendor/gonum.org/v1/gonum/optimize/cg.go | 349 + vendor/gonum.org/v1/gonum/optimize/cmaes.go | 464 + .../v1/gonum/optimize/convex/lp/convert.go | 137 + .../v1/gonum/optimize/convex/lp/doc.go | 6 + .../v1/gonum/optimize/convex/lp/simplex.go | 640 + vendor/gonum.org/v1/gonum/optimize/doc.go | 6 + vendor/gonum.org/v1/gonum/optimize/errors.go | 80 + .../v1/gonum/optimize/functionconvergence.go | 76 + .../v1/gonum/optimize/functions/doc.go | 15 + .../v1/gonum/optimize/functions/functions.go | 1819 ++ .../v1/gonum/optimize/functions/minsurf.go | 256 + .../v1/gonum/optimize/functions/validate.go | 127 + .../v1/gonum/optimize/functions/vlse.go | 395 + .../v1/gonum/optimize/gradientdescent.go | 95 + .../v1/gonum/optimize/guessandcheck.go | 92 + .../gonum.org/v1/gonum/optimize/interfaces.go | 130 + vendor/gonum.org/v1/gonum/optimize/lbfgs.go | 199 + .../gonum.org/v1/gonum/optimize/linesearch.go | 218 + .../gonum.org/v1/gonum/optimize/listsearch.go | 123 + vendor/gonum.org/v1/gonum/optimize/local.go | 146 + .../gonum.org/v1/gonum/optimize/minimize.go | 586 + .../v1/gonum/optimize/morethuente.go | 385 + .../gonum.org/v1/gonum/optimize/neldermead.go | 349 + vendor/gonum.org/v1/gonum/optimize/newton.go | 178 + vendor/gonum.org/v1/gonum/optimize/printer.go | 106 + .../gonum.org/v1/gonum/optimize/stepsizers.go | 185 + .../v1/gonum/optimize/termination.go | 123 + vendor/gonum.org/v1/gonum/optimize/types.go | 273 + .../v1/gonum/spatial/barneshut/barneshut2.go | 249 + .../v1/gonum/spatial/barneshut/barneshut3.go | 300 + .../v1/gonum/spatial/barneshut/bounds.go | 9 + .../v1/gonum/spatial/barneshut/doc.go | 10 + .../v1/gonum/spatial/barneshut/no_bounds.go | 9 + .../gonum.org/v1/gonum/spatial/kdtree/doc.go | 8 + .../v1/gonum/spatial/kdtree/kdtree.go | 467 + .../v1/gonum/spatial/kdtree/medians.go | 106 + .../v1/gonum/spatial/kdtree/points.go | 88 + vendor/gonum.org/v1/gonum/spatial/r2/doc.go | 6 + .../gonum.org/v1/gonum/spatial/r2/vector.go | 36 + vendor/gonum.org/v1/gonum/spatial/r3/doc.go | 6 + .../gonum.org/v1/gonum/spatial/r3/vector.go | 39 + .../gonum.org/v1/gonum/stat/combin/combin.go | 298 + vendor/gonum.org/v1/gonum/stat/combin/doc.go | 7 + vendor/gonum.org/v1/gonum/stat/distmat/doc.go | 6 + .../v1/gonum/stat/distmat/general.go | 7 + .../v1/gonum/stat/distmat/wishart.go | 210 + .../v1/gonum/stat/distmv/dirichlet.go | 143 + vendor/gonum.org/v1/gonum/stat/distmv/doc.go | 6 + .../gonum.org/v1/gonum/stat/distmv/general.go | 30 + .../v1/gonum/stat/distmv/interfaces.go | 33 + .../gonum.org/v1/gonum/stat/distmv/normal.go | 391 + .../v1/gonum/stat/distmv/statdist.go | 353 + .../v1/gonum/stat/distmv/studentst.go | 354 + .../gonum.org/v1/gonum/stat/distmv/uniform.go | 198 + .../v1/gonum/stat/distuv/bernoulli.go | 132 + vendor/gonum.org/v1/gonum/stat/distuv/beta.go | 136 + .../v1/gonum/stat/distuv/binomial.go | 188 + .../v1/gonum/stat/distuv/categorical.go | 185 + .../v1/gonum/stat/distuv/chisquared.go | 100 + .../v1/gonum/stat/distuv/constants.go | 24 + vendor/gonum.org/v1/gonum/stat/distuv/doc.go | 6 + .../v1/gonum/stat/distuv/exponential.go | 260 + vendor/gonum.org/v1/gonum/stat/distuv/f.go | 133 + .../gonum.org/v1/gonum/stat/distuv/gamma.go | 246 + .../gonum.org/v1/gonum/stat/distuv/general.go | 24 + .../gonum.org/v1/gonum/stat/distuv/gumbel.go | 117 + .../v1/gonum/stat/distuv/interfaces.go | 22 + .../v1/gonum/stat/distuv/inversegamma.go | 123 + .../gonum.org/v1/gonum/stat/distuv/laplace.go | 259 + .../v1/gonum/stat/distuv/lognormal.go | 113 + vendor/gonum.org/v1/gonum/stat/distuv/norm.go | 254 + .../gonum.org/v1/gonum/stat/distuv/pareto.go | 122 + .../gonum.org/v1/gonum/stat/distuv/poisson.go | 138 + .../v1/gonum/stat/distuv/statdist.go | 126 + .../v1/gonum/stat/distuv/studentst.go | 161 + .../v1/gonum/stat/distuv/triangle.go | 193 + .../gonum.org/v1/gonum/stat/distuv/uniform.go | 159 + .../gonum.org/v1/gonum/stat/distuv/weibull.go | 243 + vendor/gonum.org/v1/gonum/stat/doc.go | 6 + vendor/gonum.org/v1/gonum/stat/mds/doc.go | 6 + vendor/gonum.org/v1/gonum/stat/mds/mds.go | 90 + vendor/gonum.org/v1/gonum/stat/pca_cca.go | 316 + vendor/gonum.org/v1/gonum/stat/roc.go | 125 + .../gonum.org/v1/gonum/stat/samplemv/doc.go | 7 + .../v1/gonum/stat/samplemv/halton.go | 173 + .../gonum/stat/samplemv/metropolishastings.go | 213 + .../v1/gonum/stat/samplemv/samplemv.go | 263 + .../gonum.org/v1/gonum/stat/sampleuv/doc.go | 11 + .../v1/gonum/stat/sampleuv/sample.go | 373 + .../v1/gonum/stat/sampleuv/weighted.go | 140 + .../gonum/stat/sampleuv/withoutreplacement.go | 61 + vendor/gonum.org/v1/gonum/stat/spatial/doc.go | 6 + .../v1/gonum/stat/spatial/spatial.go | 162 + vendor/gonum.org/v1/gonum/stat/stat.go | 1295 ++ vendor/gonum.org/v1/gonum/stat/statmat.go | 140 + .../v1/gonum/unit/absorbedradioactivedose.go | 71 + .../gonum.org/v1/gonum/unit/acceleration.go | 69 + vendor/gonum.org/v1/gonum/unit/angle.go | 70 + vendor/gonum.org/v1/gonum/unit/area.go | 68 + vendor/gonum.org/v1/gonum/unit/capacitance.go | 73 + vendor/gonum.org/v1/gonum/unit/charge.go | 71 + vendor/gonum.org/v1/gonum/unit/conductance.go | 73 + .../v1/gonum/unit/constant/atomicmass.go | 13 + .../v1/gonum/unit/constant/avogadro.go | 52 + .../v1/gonum/unit/constant/boltzmann.go | 55 + .../v1/gonum/unit/constant/defined_types.go | 56 + .../gonum.org/v1/gonum/unit/constant/doc.go | 12 + .../gonum/unit/constant/electricconstant.go | 55 + .../gonum/unit/constant/elementarycharge.go | 13 + .../v1/gonum/unit/constant/faraday.go | 54 + .../v1/gonum/unit/constant/finestructure.go | 12 + .../gonum/unit/constant/generate_constants.go | 334 + .../unit/constant/generate_defined_types.go | 110 + .../v1/gonum/unit/constant/gravitational.go | 54 + .../gonum/unit/constant/lightspeedinvacuum.go | 13 + .../gonum/unit/constant/magneticconstant.go | 55 + .../v1/gonum/unit/constant/planck.go | 54 + .../v1/gonum/unit/constant/standardgravity.go | 13 + vendor/gonum.org/v1/gonum/unit/current.go | 70 + vendor/gonum.org/v1/gonum/unit/dimless.go | 65 + vendor/gonum.org/v1/gonum/unit/doc.go | 118 + vendor/gonum.org/v1/gonum/unit/energy.go | 72 + .../gonum/unit/equivalentradioactivedose.go | 71 + vendor/gonum.org/v1/gonum/unit/force.go | 72 + vendor/gonum.org/v1/gonum/unit/frequency.go | 70 + .../gonum.org/v1/gonum/unit/generate_unit.go | 626 + vendor/gonum.org/v1/gonum/unit/inductance.go | 73 + vendor/gonum.org/v1/gonum/unit/length.go | 70 + .../v1/gonum/unit/luminousintensity.go | 70 + .../gonum.org/v1/gonum/unit/magneticflux.go | 73 + .../v1/gonum/unit/magneticfluxdensity.go | 72 + vendor/gonum.org/v1/gonum/unit/mass.go | 74 + vendor/gonum.org/v1/gonum/unit/mole.go | 70 + vendor/gonum.org/v1/gonum/unit/power.go | 72 + vendor/gonum.org/v1/gonum/unit/prefixes.go | 29 + vendor/gonum.org/v1/gonum/unit/pressure.go | 72 + .../gonum.org/v1/gonum/unit/radioactivity.go | 70 + vendor/gonum.org/v1/gonum/unit/resistance.go | 73 + vendor/gonum.org/v1/gonum/unit/temperature.go | 70 + vendor/gonum.org/v1/gonum/unit/time.go | 75 + vendor/gonum.org/v1/gonum/unit/torque.go | 72 + vendor/gonum.org/v1/gonum/unit/unittype.go | 375 + vendor/gonum.org/v1/gonum/unit/velocity.go | 69 + vendor/gonum.org/v1/gonum/unit/voltage.go | 73 + vendor/gonum.org/v1/gonum/unit/volume.go | 70 + vendor/gonum.org/v1/gonum/version.go | 48 + vendor/google.golang.org/genproto/LICENSE | 202 + .../ads/googleads/v1/common/ad_asset.pb.go | 235 + .../googleads/v1/common/ad_type_infos.pb.go | 2449 +++ .../ads/googleads/v1/common/asset_types.pb.go | 321 + .../ads/googleads/v1/common/bidding.pb.go | 878 + .../googleads/v1/common/click_location.pb.go | 132 + .../ads/googleads/v1/common/criteria.pb.go | 3275 ++++ .../criterion_category_availability.pb.go | 261 + .../v1/common/custom_parameter.pb.go | 102 + .../ads/googleads/v1/common/dates.pb.go | 101 + .../explorer_auto_optimizer_setting.pb.go | 95 + .../ads/googleads/v1/common/extensions.pb.go | 1368 ++ .../ads/googleads/v1/common/feed_common.pb.go | 103 + .../googleads/v1/common/final_app_url.pb.go | 112 + .../googleads/v1/common/frequency_cap.pb.go | 185 + .../v1/common/keyword_plan_common.pb.go | 108 + .../v1/common/matching_function.pb.go | 731 + .../ads/googleads/v1/common/metrics.pb.go | 1375 ++ .../ads/googleads/v1/common/policy.pb.go | 1207 ++ .../v1/common/real_time_bidding_setting.pb.go | 93 + .../ads/googleads/v1/common/segments.pb.go | 905 + .../ads/googleads/v1/common/simulation.pb.go | 478 + .../ads/googleads/v1/common/tag_snippet.pb.go | 131 + .../v1/common/targeting_setting.pb.go | 158 + .../ads/googleads/v1/common/text_label.pb.go | 105 + .../googleads/v1/common/url_collection.pb.go | 124 + .../ads/googleads/v1/common/user_lists.pb.go | 1405 ++ .../ads/googleads/v1/common/value.pb.go | 280 + .../googleads/v1/enums/access_reason.pb.go | 131 + .../account_budget_proposal_status.pb.go | 137 + .../enums/account_budget_proposal_type.pb.go | 127 + .../v1/enums/account_budget_status.pb.go | 122 + .../ad_customizer_placeholder_field.pb.go | 128 + .../v1/enums/ad_group_ad_rotation_mode.pb.go | 122 + .../v1/enums/ad_group_ad_status.pb.go | 124 + .../ad_group_criterion_approval_status.pb.go | 128 + .../v1/enums/ad_group_criterion_status.pb.go | 125 + .../googleads/v1/enums/ad_group_status.pb.go | 124 + .../googleads/v1/enums/ad_group_type.pb.go | 166 + .../googleads/v1/enums/ad_network_type.pb.go | 137 + .../ad_serving_optimization_status.pb.go | 138 + .../ads/googleads/v1/enums/ad_strength.pb.go | 135 + .../ads/googleads/v1/enums/ad_type.pb.go | 196 + .../enums/advertising_channel_sub_type.pb.go | 172 + .../v1/enums/advertising_channel_type.pb.go | 139 + ...iate_location_feed_relationship_type.pb.go | 118 + ...affiliate_location_placeholder_field.pb.go | 161 + .../googleads/v1/enums/age_range_type.pb.go | 142 + .../v1/enums/app_campaign_app_store.pb.go | 118 + ..._campaign_bidding_strategy_goal_type.pb.go | 142 + .../v1/enums/app_payment_model_type.pb.go | 113 + .../v1/enums/app_placeholder_field.pb.go | 153 + .../ads/googleads/v1/enums/app_store.pb.go | 117 + .../enums/app_url_operating_system_type.pb.go | 118 + .../ads/googleads/v1/enums/asset_type.pb.go | 127 + .../v1/enums/attribution_model.pb.go | 153 + .../v1/enums/bid_modifier_source.pb.go | 119 + .../googleads/v1/enums/bidding_source.pb.go | 125 + .../v1/enums/bidding_strategy_status.pb.go | 120 + .../v1/enums/bidding_strategy_type.pb.go | 193 + .../v1/enums/billing_setup_status.pb.go | 129 + .../v1/enums/brand_safety_suitability.pb.go | 144 + .../v1/enums/budget_delivery_method.pb.go | 121 + .../googleads/v1/enums/budget_period.pb.go | 122 + .../googleads/v1/enums/budget_status.pb.go | 117 + .../ads/googleads/v1/enums/budget_type.pb.go | 136 + .../call_conversion_reporting_state.pb.go | 128 + .../v1/enums/call_placeholder_field.pb.go | 141 + .../v1/enums/callout_placeholder_field.pb.go | 114 + .../v1/enums/campaign_experiment_type.pb.go | 129 + .../v1/enums/campaign_serving_status.pb.go | 133 + .../v1/enums/campaign_shared_set_status.pb.go | 118 + .../googleads/v1/enums/campaign_status.pb.go | 122 + .../v1/enums/change_status_operation.pb.go | 123 + .../enums/change_status_resource_type.pb.go | 158 + .../ads/googleads/v1/enums/click_type.pb.go | 357 + .../v1/enums/content_label_type.pb.go | 180 + .../v1/enums/conversion_action_category.pb.go | 138 + .../conversion_action_counting_type.pb.go | 121 + .../v1/enums/conversion_action_status.pb.go | 123 + .../v1/enums/conversion_action_type.pb.go | 152 + .../v1/enums/conversion_adjustment_type.pb.go | 120 + .../conversion_attribution_event_type.pb.go | 119 + .../v1/enums/conversion_lag_bucket.pb.go | 216 + .../conversion_or_adjustment_lag_bucket.pb.go | 342 + ...n_category_channel_availability_mode.pb.go | 135 + ...on_category_locale_availability_mode.pb.go | 139 + .../criterion_system_serving_status.pb.go | 119 + .../googleads/v1/enums/criterion_type.pb.go | 249 + .../enums/custom_interest_member_type.pb.go | 118 + .../v1/enums/custom_interest_status.pb.go | 119 + .../v1/enums/custom_interest_type.pb.go | 118 + .../v1/enums/custom_placeholder_field.pb.go | 228 + .../customer_match_upload_key_type.pb.go | 126 + ...onversion_eligibility_failure_reason.pb.go | 155 + .../v1/enums/data_driven_model_status.pb.go | 134 + .../ads/googleads/v1/enums/day_of_week.pb.go | 140 + .../ads/googleads/v1/enums/device.pb.go | 126 + .../v1/enums/display_ad_format_setting.pb.go | 124 + .../enums/display_upload_product_type.pb.go | 181 + .../enums/dsa_page_feed_criterion_field.pb.go | 119 + .../enums/education_placeholder_field.pb.go | 207 + .../v1/enums/extension_setting_device.pb.go | 120 + .../googleads/v1/enums/extension_type.pb.go | 165 + .../v1/enums/external_conversion_source.pb.go | 234 + .../v1/enums/feed_attribute_type.pb.go | 168 + .../feed_item_quality_approval_status.pb.go | 121 + ...feed_item_quality_disapproval_reason.pb.go | 203 + .../googleads/v1/enums/feed_item_status.pb.go | 117 + .../v1/enums/feed_item_target_device.pb.go | 114 + .../v1/enums/feed_item_target_type.pb.go | 123 + .../enums/feed_item_validation_status.pb.go | 122 + .../googleads/v1/enums/feed_link_status.pb.go | 117 + .../enums/feed_mapping_criterion_type.pb.go | 120 + .../v1/enums/feed_mapping_status.pb.go | 117 + .../ads/googleads/v1/enums/feed_origin.pb.go | 120 + .../ads/googleads/v1/enums/feed_status.pb.go | 117 + .../v1/enums/flight_placeholder_field.pb.go | 213 + .../v1/enums/frequency_cap_event_type.pb.go | 118 + .../v1/enums/frequency_cap_level.pb.go | 123 + .../v1/enums/frequency_cap_time_unit.pb.go | 123 + .../ads/googleads/v1/enums/gender_type.pb.go | 122 + .../v1/enums/geo_target_constant_status.pb.go | 121 + .../v1/enums/geo_targeting_restriction.pb.go | 116 + .../v1/enums/geo_targeting_type.pb.go | 119 + .../v1/enums/google_ads_field_category.pb.go | 132 + .../v1/enums/google_ads_field_data_type.pb.go | 178 + .../v1/enums/hotel_date_selection_type.pb.go | 119 + .../v1/enums/hotel_placeholder_field.pb.go | 224 + .../v1/enums/income_range_type.pb.go | 143 + .../v1/enums/interaction_event_type.pb.go | 133 + .../googleads/v1/enums/interaction_type.pb.go | 113 + .../v1/enums/job_placeholder_field.pb.go | 209 + .../v1/enums/keyword_match_type.pb.go | 122 + .../keyword_plan_competition_level.pb.go | 127 + .../keyword_plan_forecast_interval.pb.go | 127 + .../v1/enums/keyword_plan_network.pb.go | 119 + .../ads/googleads/v1/enums/label_status.pb.go | 117 + .../legacy_app_install_ad_app_store.pb.go | 134 + .../listing_custom_attribute_index.pb.go | 131 + .../v1/enums/listing_group_type.pb.go | 120 + .../v1/enums/local_placeholder_field.pb.go | 216 + ..._extension_targeting_criterion_field.pb.go | 142 + .../enums/location_group_radius_units.pb.go | 118 + .../v1/enums/location_placeholder_field.pb.go | 147 + .../v1/enums/manager_link_status.pb.go | 133 + .../matching_function_context_type.pb.go | 119 + .../v1/enums/matching_function_operator.pb.go | 138 + .../ads/googleads/v1/enums/media_type.pb.go | 138 + .../enums/merchant_center_link_status.pb.go | 121 + .../v1/enums/message_placeholder_field.pb.go | 136 + .../ads/googleads/v1/enums/mime_type.pb.go | 165 + .../googleads/v1/enums/minute_of_hour.pb.go | 127 + .../v1/enums/mobile_device_type.pb.go | 118 + .../googleads/v1/enums/month_of_year.pb.go | 163 + .../v1/enums/mutate_job_status.pb.go | 122 + .../v1/enums/negative_geo_target_type.pb.go | 121 + ...erating_system_version_operator_type.pb.go | 122 + .../page_one_promoted_strategy_goal.pb.go | 120 + .../v1/enums/parental_status_type.pb.go | 123 + .../ads/googleads/v1/enums/payment_mode.pb.go | 128 + .../googleads/v1/enums/placeholder_type.pb.go | 217 + .../googleads/v1/enums/placement_type.pb.go | 133 + .../v1/enums/policy_approval_status.pb.go | 133 + .../v1/enums/policy_review_status.pb.go | 126 + .../v1/enums/policy_topic_entry_type.pb.go | 142 + ...idence_destination_mismatch_url_type.pb.go | 142 + ...dence_destination_not_working_device.pb.go | 131 + .../v1/enums/positive_geo_target_type.pb.go | 127 + .../v1/enums/preferred_content_type.pb.go | 114 + .../price_extension_price_qualifier.pb.go | 123 + .../v1/enums/price_extension_price_unit.pb.go | 136 + .../v1/enums/price_extension_type.pb.go | 151 + .../v1/enums/price_placeholder_field.pb.go | 385 + .../product_bidding_category_level.pb.go | 131 + .../product_bidding_category_status.pb.go | 118 + .../googleads/v1/enums/product_channel.pb.go | 117 + .../enums/product_channel_exclusivity.pb.go | 121 + .../v1/enums/product_condition.pb.go | 122 + .../v1/enums/product_type_level.pb.go | 130 + ...romotion_extension_discount_modifier.pb.go | 117 + .../enums/promotion_extension_occasion.pb.go | 282 + .../enums/promotion_placeholder_field.pb.go | 185 + .../v1/enums/proximity_radius_units.pb.go | 118 + .../v1/enums/quality_score_bucket.pb.go | 122 + .../enums/real_estate_placeholder_field.pb.go | 212 + .../v1/enums/recommendation_type.pb.go | 183 + .../search_engine_results_page_type.pb.go | 125 + .../v1/enums/search_term_match_type.pb.go | 132 + .../enums/search_term_targeting_status.pb.go | 129 + .../v1/enums/served_asset_field_type.pb.go | 134 + .../v1/enums/shared_set_status.pb.go | 117 + .../googleads/v1/enums/shared_set_type.pb.go | 118 + .../simulation_modification_method.pb.go | 122 + .../googleads/v1/enums/simulation_type.pb.go | 127 + .../v1/enums/sitelink_placeholder_field.pb.go | 146 + .../ads/googleads/v1/enums/slot.pb.go | 140 + .../v1/enums/spending_limit_type.pb.go | 114 + ...structured_snippet_placeholder_field.pb.go | 125 + .../enums/system_managed_entity_source.pb.go | 114 + ...arget_cpa_opt_in_recommendation_goal.pb.go | 130 + .../target_impression_share_location.pb.go | 126 + .../v1/enums/targeting_dimension.pb.go | 152 + .../ads/googleads/v1/enums/time_type.pb.go | 118 + .../v1/enums/tracking_code_page_format.pb.go | 120 + .../v1/enums/tracking_code_type.pb.go | 127 + .../v1/enums/travel_placeholder_field.pb.go | 227 + .../enums/user_interest_taxonomy_type.pb.go | 135 + .../v1/enums/user_list_access_status.pb.go | 118 + .../v1/enums/user_list_closing_reason.pb.go | 114 + .../user_list_combined_rule_operator.pb.go | 118 + .../user_list_crm_data_source_type.pb.go | 125 + .../user_list_date_rule_item_operator.pb.go | 128 + .../user_list_logical_rule_operator.pb.go | 123 + .../enums/user_list_membership_status.pb.go | 120 + .../user_list_number_rule_item_operator.pb.go | 138 + .../user_list_prepopulation_status.pb.go | 123 + .../v1/enums/user_list_rule_type.pb.go | 118 + .../v1/enums/user_list_size_range.pb.go | 186 + .../user_list_string_rule_item_operator.pb.go | 147 + .../googleads/v1/enums/user_list_type.pb.go | 138 + .../vanity_pharma_display_url_mode.pb.go | 120 + .../v1/enums/vanity_pharma_text.pb.go | 172 + .../v1/enums/webpage_condition_operand.pb.go | 133 + .../v1/enums/webpage_condition_operator.pb.go | 119 + .../account_budget_proposal_error.pb.go | 225 + .../v1/errors/ad_customizer_error.pb.go | 135 + .../ads/googleads/v1/errors/ad_error.pb.go | 758 + .../v1/errors/ad_group_ad_error.pb.go | 152 + .../errors/ad_group_bid_modifier_error.pb.go | 122 + .../v1/errors/ad_group_criterion_error.pb.go | 309 + .../googleads/v1/errors/ad_group_error.pb.go | 176 + .../v1/errors/ad_group_feed_error.pb.go | 148 + .../v1/errors/ad_parameter_error.pb.go | 120 + .../v1/errors/ad_sharing_error.pb.go | 126 + .../ads/googleads/v1/errors/adx_error.pb.go | 113 + .../ads/googleads/v1/errors/asset_error.pb.go | 136 + .../v1/errors/authentication_error.pb.go | 202 + .../v1/errors/authorization_error.pb.go | 167 + .../googleads/v1/errors/bidding_error.pb.go | 229 + .../v1/errors/bidding_strategy_error.pb.go | 138 + .../v1/errors/billing_setup_error.pb.go | 198 + .../v1/errors/campaign_budget_error.pb.go | 192 + .../v1/errors/campaign_criterion_error.pb.go | 171 + .../googleads/v1/errors/campaign_error.pb.go | 312 + .../v1/errors/campaign_feed_error.pb.go | 142 + .../v1/errors/campaign_shared_set_error.pb.go | 114 + .../v1/errors/change_status_error.pb.go | 114 + .../v1/errors/collection_size_error.pb.go | 118 + .../googleads/v1/errors/context_error.pb.go | 119 + .../v1/errors/conversion_action_error.pb.go | 157 + .../conversion_adjustment_upload_error.pb.go | 164 + .../v1/errors/conversion_upload_error.pb.go | 237 + .../v1/errors/country_code_error.pb.go | 114 + .../googleads/v1/errors/criterion_error.pb.go | 618 + .../v1/errors/custom_interest_error.pb.go | 147 + .../errors/customer_client_link_error.pb.go | 147 + .../googleads/v1/errors/customer_error.pb.go | 122 + .../v1/errors/customer_feed_error.pb.go | 147 + .../errors/customer_manager_link_error.pb.go | 156 + .../googleads/v1/errors/database_error.pb.go | 114 + .../ads/googleads/v1/errors/date_error.pb.go | 158 + .../v1/errors/date_range_error.pb.go | 135 + .../googleads/v1/errors/distinct_error.pb.go | 118 + .../ads/googleads/v1/errors/enum_error.pb.go | 113 + .../ads/googleads/v1/errors/errors.pb.go | 3496 ++++ .../v1/errors/extension_feed_item_error.pb.go | 343 + .../v1/errors/extension_setting_error.pb.go | 450 + .../feed_attribute_reference_error.pb.go | 125 + .../ads/googleads/v1/errors/feed_error.pb.go | 206 + .../googleads/v1/errors/feed_item_error.pb.go | 157 + .../v1/errors/feed_item_target_error.pb.go | 145 + .../errors/feed_item_validation_error.pb.go | 591 + .../v1/errors/feed_mapping_error.pb.go | 194 + .../ads/googleads/v1/errors/field_error.pb.go | 143 + .../v1/errors/field_mask_error.pb.go | 131 + .../googleads/v1/errors/function_error.pb.go | 189 + .../v1/errors/function_parsing_error.pb.go | 165 + ...geo_target_constant_suggestion_error.pb.go | 133 + .../googleads/v1/errors/header_error.pb.go | 113 + .../ads/googleads/v1/errors/id_error.pb.go | 112 + .../ads/googleads/v1/errors/image_error.pb.go | 289 + .../googleads/v1/errors/internal_error.pb.go | 125 + .../errors/keyword_plan_ad_group_error.pb.go | 122 + .../errors/keyword_plan_campaign_error.pb.go | 136 + .../v1/errors/keyword_plan_error.pb.go | 181 + .../v1/errors/keyword_plan_idea_error.pb.go | 119 + .../errors/keyword_plan_keyword_error.pb.go | 141 + .../keyword_plan_negative_keyword_error.pb.go | 111 + .../ads/googleads/v1/errors/label_error.pb.go | 158 + .../v1/errors/language_code_error.pb.go | 119 + .../v1/errors/list_operation_error.pb.go | 119 + .../v1/errors/media_bundle_error.pb.go | 219 + .../v1/errors/media_file_error.pb.go | 225 + .../v1/errors/media_upload_error.pb.go | 129 + .../v1/errors/multiplier_error.pb.go | 172 + .../googleads/v1/errors/mutate_error.pb.go | 140 + .../v1/errors/mutate_job_error.pb.go | 136 + .../errors/new_resource_creation_error.pb.go | 126 + .../googleads/v1/errors/not_empty_error.pb.go | 113 + .../ads/googleads/v1/errors/null_error.pb.go | 112 + .../operation_access_denied_error.pb.go | 158 + .../googleads/v1/errors/operator_error.pb.go | 113 + .../v1/errors/partial_failure_error.pb.go | 115 + .../v1/errors/policy_finding_error.pb.go | 120 + .../policy_validation_parameter_error.pb.go | 128 + .../v1/errors/policy_violation_error.pb.go | 113 + .../ads/googleads/v1/errors/query_error.pb.go | 373 + .../ads/googleads/v1/errors/quota_error.pb.go | 123 + .../ads/googleads/v1/errors/range_error.pb.go | 117 + .../v1/errors/recommendation_error.pb.go | 183 + .../v1/errors/region_code_error.pb.go | 113 + .../googleads/v1/errors/request_error.pb.go | 201 + .../errors/resource_access_denied_error.pb.go | 114 + .../resource_count_limit_exceeded_error.pb.go | 169 + .../googleads/v1/errors/setting_error.pb.go | 212 + .../v1/errors/shared_criterion_error.pb.go | 115 + .../v1/errors/shared_set_error.pb.go | 130 + .../v1/errors/size_limit_error.pb.go | 119 + .../v1/errors/string_format_error.pb.go | 118 + .../v1/errors/string_length_error.pb.go | 118 + .../googleads/v1/errors/url_field_error.pb.go | 363 + .../googleads/v1/errors/user_list_error.pb.go | 248 + .../youtube_video_registration_error.pb.go | 119 + .../v1/resources/account_budget.pb.go | 1055 ++ .../resources/account_budget_proposal.pb.go | 713 + .../ads/googleads/v1/resources/ad.pb.go | 884 + .../ads/googleads/v1/resources/ad_group.pb.go | 366 + .../googleads/v1/resources/ad_group_ad.pb.go | 220 + .../v1/resources/ad_group_ad_label.pb.go | 114 + .../v1/resources/ad_group_audience_view.pb.go | 96 + .../v1/resources/ad_group_bid_modifier.pb.go | 424 + .../v1/resources/ad_group_criterion.pb.go | 1238 ++ .../resources/ad_group_criterion_label.pb.go | 116 + .../ad_group_criterion_simulation.pb.go | 256 + .../ad_group_extension_setting.pb.go | 150 + .../v1/resources/ad_group_feed.pb.go | 158 + .../v1/resources/ad_group_label.pb.go | 114 + .../v1/resources/ad_group_simulation.pb.go | 286 + .../googleads/v1/resources/ad_parameter.pb.go | 146 + .../v1/resources/ad_schedule_view.pb.go | 93 + .../v1/resources/age_range_view.pb.go | 92 + .../ads/googleads/v1/resources/asset.pb.go | 321 + .../v1/resources/bidding_strategy.pb.go | 473 + .../v1/resources/billing_setup.pb.go | 462 + .../ads/googleads/v1/resources/campaign.pb.go | 1510 ++ .../v1/resources/campaign_audience_view.pb.go | 98 + .../v1/resources/campaign_bid_modifier.pb.go | 218 + .../v1/resources/campaign_budget.pb.go | 321 + .../v1/resources/campaign_criterion.pb.go | 1106 ++ .../campaign_criterion_simulation.pb.go | 257 + .../campaign_extension_setting.pb.go | 149 + .../v1/resources/campaign_feed.pb.go | 158 + .../v1/resources/campaign_label.pb.go | 114 + .../v1/resources/campaign_shared_set.pb.go | 135 + .../v1/resources/carrier_constant.pb.go | 127 + .../v1/resources/change_status.pb.go | 234 + .../googleads/v1/resources/click_view.pb.go | 144 + .../v1/resources/conversion_action.pb.go | 404 + .../v1/resources/custom_interest.pb.go | 222 + .../ads/googleads/v1/resources/customer.pb.go | 422 + .../v1/resources/customer_client.pb.go | 132 + .../v1/resources/customer_client_link.pb.go | 143 + .../customer_extension_setting.pb.go | 134 + .../v1/resources/customer_feed.pb.go | 148 + .../v1/resources/customer_label.pb.go | 120 + .../v1/resources/customer_manager_link.pb.go | 130 + .../customer_negative_criterion.pb.go | 383 + .../v1/resources/detail_placement_view.pb.go | 155 + .../v1/resources/display_keyword_view.pb.go | 93 + .../v1/resources/domain_category.pb.go | 190 + .../dynamic_search_ads_search_term_view.pb.go | 146 + .../expanded_landing_page_view.pb.go | 109 + .../v1/resources/extension_feed_item.pb.go | 573 + .../ads/googleads/v1/resources/feed.pb.go | 685 + .../googleads/v1/resources/feed_item.pb.go | 579 + .../v1/resources/feed_item_target.pb.go | 386 + .../googleads/v1/resources/feed_mapping.pb.go | 993 + .../v1/resources/feed_placeholder_view.pb.go | 106 + .../googleads/v1/resources/gender_view.pb.go | 92 + .../v1/resources/geo_target_constant.pb.go | 161 + .../v1/resources/geographic_view.pb.go | 127 + .../v1/resources/google_ads_field.pb.go | 256 + .../v1/resources/group_placement_view.pb.go | 142 + .../v1/resources/hotel_group_view.pb.go | 92 + .../v1/resources/hotel_performance_view.pb.go | 93 + .../googleads/v1/resources/keyword_plan.pb.go | 285 + .../v1/resources/keyword_plan_ad_group.pb.go | 143 + .../v1/resources/keyword_plan_campaign.pb.go | 228 + .../v1/resources/keyword_plan_keyword.pb.go | 153 + .../keyword_plan_negative_keyword.pb.go | 143 + .../googleads/v1/resources/keyword_view.pb.go | 92 + .../ads/googleads/v1/resources/label.pb.go | 143 + .../v1/resources/landing_page_view.pb.go | 109 + .../v1/resources/language_constant.pb.go | 137 + .../v1/resources/location_view.pb.go | 93 + .../v1/resources/managed_placement_view.pb.go | 94 + .../googleads/v1/resources/media_file.pb.go | 553 + .../v1/resources/merchant_center_link.pb.go | 133 + .../mobile_app_category_constant.pb.go | 116 + .../v1/resources/mobile_device_constant.pb.go | 150 + .../googleads/v1/resources/mutate_job.pb.go | 245 + .../operating_system_version_constant.pb.go | 157 + .../paid_organic_search_term_view.pb.go | 109 + .../v1/resources/parental_status_view.pb.go | 93 + .../v1/resources/payments_account.pb.go | 150 + .../product_bidding_category_constant.pb.go | 176 + .../v1/resources/product_group_view.pb.go | 92 + .../v1/resources/recommendation.pb.go | 1698 ++ .../v1/resources/remarketing_action.pb.go | 134 + .../v1/resources/search_term_view.pb.go | 133 + .../v1/resources/shared_criterion.pb.go | 395 + .../googleads/v1/resources/shared_set.pb.go | 166 + .../resources/shopping_performance_view.pb.go | 97 + .../v1/resources/topic_constant.pb.go | 135 + .../googleads/v1/resources/topic_view.pb.go | 92 + .../v1/resources/user_interest.pb.go | 165 + .../googleads/v1/resources/user_list.pb.go | 544 + .../ads/googleads/v1/resources/video.pb.go | 136 + .../account_budget_proposal_service.pb.go | 534 + .../v1/services/account_budget_service.pb.go | 176 + .../services/ad_group_ad_label_service.pb.go | 544 + .../v1/services/ad_group_ad_service.pb.go | 601 + .../ad_group_audience_view_service.pb.go | 176 + .../ad_group_bid_modifier_service.pb.go | 591 + .../ad_group_criterion_label_service.pb.go | 547 + .../services/ad_group_criterion_service.pb.go | 610 + ...d_group_criterion_simulation_service.pb.go | 177 + .../ad_group_extension_setting_service.pb.go | 597 + .../v1/services/ad_group_feed_service.pb.go | 590 + .../v1/services/ad_group_label_service.pb.go | 544 + .../v1/services/ad_group_service.pb.go | 587 + .../ad_group_simulation_service.pb.go | 175 + .../v1/services/ad_parameter_service.pb.go | 590 + .../services/ad_schedule_view_service.pb.go | 175 + .../v1/services/age_range_view_service.pb.go | 175 + .../googleads/v1/services/asset_service.pb.go | 468 + .../services/bidding_strategy_service.pb.go | 591 + .../v1/services/billing_setup_service.pb.go | 502 + .../campaign_audience_view_service.pb.go | 176 + .../campaign_bid_modifier_service.pb.go | 591 + .../v1/services/campaign_budget_service.pb.go | 590 + .../services/campaign_criterion_service.pb.go | 589 + ...ampaign_criterion_simulation_service.pb.go | 177 + .../campaign_extension_setting_service.pb.go | 599 + .../v1/services/campaign_feed_service.pb.go | 590 + .../v1/services/campaign_label_service.pb.go | 544 + .../v1/services/campaign_service.pb.go | 587 + .../campaign_shared_set_service.pb.go | 542 + .../services/carrier_constant_service.pb.go | 175 + .../v1/services/change_status_service.pb.go | 175 + .../v1/services/click_view_service.pb.go | 175 + .../services/conversion_action_service.pb.go | 590 + ...conversion_adjustment_upload_service.pb.go | 764 + .../services/conversion_upload_service.pb.go | 817 + .../v1/services/custom_interest_service.pb.go | 529 + .../customer_client_link_service.pb.go | 518 + .../v1/services/customer_client_service.pb.go | 175 + .../customer_extension_setting_service.pb.go | 599 + .../v1/services/customer_feed_service.pb.go | 590 + .../v1/services/customer_label_service.pb.go | 544 + .../customer_manager_link_service.pb.go | 487 + .../customer_negative_criterion_service.pb.go | 546 + .../v1/services/customer_service.pb.go | 666 + .../detail_placement_view_service.pb.go | 176 + .../display_keyword_view_service.pb.go | 176 + .../v1/services/domain_category_service.pb.go | 176 + ..._search_ads_search_term_view_service.pb.go | 180 + .../expanded_landing_page_view_service.pb.go | 179 + .../extension_feed_item_service.pb.go | 562 + .../v1/services/feed_item_service.pb.go | 590 + .../services/feed_item_target_service.pb.go | 499 + .../v1/services/feed_mapping_service.pb.go | 544 + .../feed_placeholder_view_service.pb.go | 176 + .../googleads/v1/services/feed_service.pb.go | 589 + .../v1/services/gender_view_service.pb.go | 175 + .../geo_target_constant_service.pb.go | 616 + .../v1/services/geographic_view_service.pb.go | 175 + .../services/google_ads_field_service.pb.go | 345 + .../v1/services/google_ads_service.pb.go | 4322 +++++ .../group_placement_view_service.pb.go | 176 + .../services/hotel_group_view_service.pb.go | 176 + .../hotel_performance_view_service.pb.go | 176 + .../keyword_plan_ad_group_service.pb.go | 592 + .../keyword_plan_campaign_service.pb.go | 593 + .../services/keyword_plan_idea_service.pb.go | 605 + .../keyword_plan_keyword_service.pb.go | 592 + ...eyword_plan_negative_keyword_service.pb.go | 602 + .../v1/services/keyword_plan_service.pb.go | 1170 ++ .../v1/services/keyword_view_service.pb.go | 175 + .../googleads/v1/services/label_service.pb.go | 587 + .../services/landing_page_view_service.pb.go | 178 + .../services/language_constant_service.pb.go | 175 + .../v1/services/location_view_service.pb.go | 175 + .../managed_placement_view_service.pb.go | 176 + .../v1/services/media_file_service.pb.go | 512 + .../merchant_center_link_service.pb.go | 635 + ...mobile_app_category_constant_service.pb.go | 177 + .../mobile_device_constant_service.pb.go | 175 + .../v1/services/mutate_job_service.pb.go | 783 + ...ting_system_version_constant_service.pb.go | 179 + ...aid_organic_search_term_view_service.pb.go | 178 + .../parental_status_view_service.pb.go | 176 + .../services/payments_account_service.pb.go | 222 + ...ct_bidding_category_constant_service.pb.go | 179 + .../services/product_group_view_service.pb.go | 176 + .../v1/services/recommendation_service.pb.go | 1427 ++ .../services/remarketing_action_service.pb.go | 559 + .../services/search_term_view_service.pb.go | 176 + .../services/shared_criterion_service.pb.go | 542 + .../v1/services/shared_set_service.pb.go | 588 + .../shopping_performance_view_service.pb.go | 177 + .../v1/services/topic_constant_service.pb.go | 175 + .../v1/services/topic_view_service.pb.go | 175 + .../v1/services/user_interest_service.pb.go | 175 + .../v1/services/user_list_service.pb.go | 588 + .../googleads/v1/services/video_service.pb.go | 174 + .../api/annotations/annotations.pb.go | 54 + .../googleapis/api/annotations/client.pb.go | 76 + .../api/annotations/field_behavior.pb.go | 119 + .../googleapis/api/annotations/http.pb.go | 745 + .../googleapis/api/annotations/resource.pb.go | 321 + .../googleapis/api/authorization_config.pb.go | 93 + .../api/configchange/config_change.pb.go | 227 + .../api/distribution/distribution.pb.go | 714 + .../googleapis/api/experimental.pb.go | 86 + .../api/expr/v1alpha1/cel_service.pb.go | 195 + .../api/expr/v1alpha1/checked.pb.go | 1428 ++ .../expr/v1alpha1/conformance_service.pb.go | 799 + .../googleapis/api/expr/v1alpha1/eval.pb.go | 427 + .../api/expr/v1alpha1/explain.pb.go | 157 + .../googleapis/api/expr/v1alpha1/syntax.pb.go | 1572 ++ .../googleapis/api/expr/v1alpha1/value.pb.go | 708 + .../googleapis/api/expr/v1beta1/decl.pb.go | 403 + .../googleapis/api/expr/v1beta1/eval.pb.go | 470 + .../googleapis/api/expr/v1beta1/expr.pb.go | 1351 ++ .../googleapis/api/expr/v1beta1/source.pb.go | 189 + .../googleapis/api/expr/v1beta1/value.pb.go | 708 + .../googleapis/api/httpbody/httpbody.pb.go | 142 + .../genproto/googleapis/api/label/label.pb.go | 134 + .../googleapis/api/launch_stage.pb.go | 110 + .../googleapis/api/metric/metric.pb.go | 476 + .../api/monitoredres/monitored_resource.pb.go | 288 + .../googleapis/api/serviceconfig/auth.pb.go | 446 + .../api/serviceconfig/backend.pb.go | 350 + .../api/serviceconfig/billing.pb.go | 162 + .../api/serviceconfig/consumer.pb.go | 210 + .../api/serviceconfig/context.pb.go | 206 + .../api/serviceconfig/control.pb.go | 83 + .../api/serviceconfig/documentation.pb.go | 339 + .../api/serviceconfig/endpoint.pb.go | 149 + .../googleapis/api/serviceconfig/log.pb.go | 126 + .../api/serviceconfig/logging.pb.go | 184 + .../api/serviceconfig/monitoring.pb.go | 196 + .../googleapis/api/serviceconfig/quota.pb.go | 393 + .../api/serviceconfig/service.pb.go | 405 + .../api/serviceconfig/source_info.pb.go | 85 + .../api/serviceconfig/system_parameter.pb.go | 243 + .../googleapis/api/serviceconfig/usage.pb.go | 208 + .../api/servicecontrol/v1/check_error.pb.go | 210 + .../api/servicecontrol/v1/distribution.pb.go | 528 + .../api/servicecontrol/v1/log_entry.pb.go | 293 + .../api/servicecontrol/v1/metric_value.pb.go | 386 + .../api/servicecontrol/v1/operation.pb.go | 250 + .../servicecontrol/v1/quota_controller.pb.go | 590 + .../v1/service_controller.pb.go | 658 + .../api/servicemanagement/v1/resources.pb.go | 1036 ++ .../servicemanagement/v1/servicemanager.pb.go | 2015 ++ .../appengine/legacy/audit_data.pb.go | 99 + .../appengine/logging/v1/request_log.pb.go | 616 + .../googleapis/appengine/v1/app_yaml.pb.go | 1025 ++ .../googleapis/appengine/v1/appengine.pb.go | 1767 ++ .../googleapis/appengine/v1/application.pb.go | 274 + .../googleapis/appengine/v1/audit_data.pb.go | 284 + .../googleapis/appengine/v1/deploy.pb.go | 280 + .../googleapis/appengine/v1/instance.pb.go | 298 + .../googleapis/appengine/v1/location.pb.go | 102 + .../googleapis/appengine/v1/operation.pb.go | 139 + .../googleapis/appengine/v1/service.pb.go | 220 + .../googleapis/appengine/v1/version.pb.go | 1312 ++ .../v1alpha1/embedded_assistant.pb.go | 1142 ++ .../v1alpha2/embedded_assistant.pb.go | 1775 ++ .../cluster/v1/bigtable_cluster_data.pb.go | 276 + .../cluster/v1/bigtable_cluster_service.pb.go | 484 + .../bigtable_cluster_service_messages.pb.go | 646 + .../admin/table/v1/bigtable_table_data.pb.go | 546 + .../table/v1/bigtable_table_service.pb.go | 429 + .../v1/bigtable_table_service_messages.pb.go | 606 + .../admin/v2/bigtable_instance_admin.pb.go | 2062 +++ .../admin/v2/bigtable_table_admin.pb.go | 2174 +++ .../googleapis/bigtable/admin/v2/common.pb.go | 80 + .../bigtable/admin/v2/instance.pb.go | 659 + .../googleapis/bigtable/admin/v2/table.pb.go | 820 + .../bigtable/v1/bigtable_data.pb.go | 2428 +++ .../bigtable/v1/bigtable_service.pb.go | 395 + .../v1/bigtable_service_messages.pb.go | 1039 ++ .../googleapis/bigtable/v2/bigtable.pb.go | 1532 ++ .../googleapis/bigtable/v2/data.pb.go | 2611 +++ .../googleapis/bytestream/bytestream.pb.go | 675 + .../cloud/asset/v1/asset_service.pb.go | 738 + .../googleapis/cloud/asset/v1/assets.pb.go | 361 + .../cloud/asset/v1beta1/asset_service.pb.go | 740 + .../cloud/asset/v1beta1/assets.pb.go | 362 + .../googleapis/cloud/audit/audit_log.pb.go | 389 + .../automl/v1beta1/annotation_payload.pb.go | 403 + .../cloud/automl/v1beta1/classification.pb.go | 633 + .../cloud/automl/v1beta1/column_spec.pb.go | 212 + .../cloud/automl/v1beta1/data_items.pb.go | 605 + .../cloud/automl/v1beta1/data_stats.pb.go | 920 + .../cloud/automl/v1beta1/data_types.pb.go | 374 + .../cloud/automl/v1beta1/dataset.pb.go | 549 + .../cloud/automl/v1beta1/detection.pb.go | 319 + .../cloud/automl/v1beta1/geometry.pb.go | 145 + .../cloud/automl/v1beta1/image.pb.go | 370 + .../googleapis/cloud/automl/v1beta1/io.pb.go | 1861 ++ .../cloud/automl/v1beta1/model.pb.go | 519 + .../automl/v1beta1/model_evaluation.pb.go | 423 + .../cloud/automl/v1beta1/operations.pb.go | 1246 ++ .../automl/v1beta1/prediction_service.pb.go | 506 + .../cloud/automl/v1beta1/ranges.pb.go | 95 + .../cloud/automl/v1beta1/regression.pb.go | 119 + .../cloud/automl/v1beta1/service.pb.go | 2785 +++ .../cloud/automl/v1beta1/table_spec.pb.go | 159 + .../cloud/automl/v1beta1/tables.pb.go | 534 + .../cloud/automl/v1beta1/temporal.pb.go | 101 + .../cloud/automl/v1beta1/text.pb.go | 269 + .../automl/v1beta1/text_extraction.pb.go | 233 + .../cloud/automl/v1beta1/text_segment.pb.go | 110 + .../cloud/automl/v1beta1/text_sentiment.pb.go | 235 + .../cloud/automl/v1beta1/translation.pb.go | 261 + .../cloud/automl/v1beta1/video.pb.go | 111 + .../datatransfer/v1/datatransfer.pb.go | 2302 +++ .../bigquery/datatransfer/v1/transfer.pb.go | 605 + .../bigquery/logging/v1/audit_data.pb.go | 3062 ++++ .../cloud/bigquery/storage/v1beta1/avro.pb.go | 138 + .../storage/v1beta1/read_options.pb.go | 105 + .../bigquery/storage/v1beta1/storage.pb.go | 1327 ++ .../storage/v1beta1/table_reference.pb.go | 151 + .../googleapis/cloud/bigquery/v2/model.pb.go | 2251 +++ .../cloud/bigquery/v2/model_reference.pb.go | 106 + .../cloud/bigquery/v2/standard_sql.pb.go | 395 + .../cloud/billing/v1/cloud_billing.pb.go | 871 + .../v1beta1/resources.pb.go | 966 + .../binaryauthorization/v1beta1/service.pb.go | 777 + .../datacatalog/v1beta1/datacatalog.pb.go | 524 + .../cloud/datacatalog/v1beta1/schema.pb.go | 168 + .../datacatalog/v1beta1/table_spec.pb.go | 173 + .../datacatalog/v1beta1/timestamps.pb.go | 106 + .../datalabeling/v1beta1/annotation.pb.go | 1992 ++ .../v1beta1/annotation_spec_set.pb.go | 173 + .../v1beta1/data_labeling_service.pb.go | 3439 ++++ .../cloud/datalabeling/v1beta1/dataset.pb.go | 2004 ++ .../v1beta1/human_annotation_config.pb.go | 858 + .../datalabeling/v1beta1/instruction.pb.go | 255 + .../datalabeling/v1beta1/operations.pb.go | 1489 ++ .../cloud/dataproc/v1/clusters.pb.go | 2157 +++ .../googleapis/cloud/dataproc/v1/jobs.pb.go | 3119 ++++ .../cloud/dataproc/v1/operations.pb.go | 274 + .../dataproc/v1/workflow_templates.pb.go | 2519 +++ .../cloud/dataproc/v1beta2/clusters.pb.go | 2358 +++ .../cloud/dataproc/v1beta2/jobs.pb.go | 3132 ++++ .../cloud/dataproc/v1beta2/operations.pb.go | 274 + .../cloud/dataproc/v1beta2/shared.pb.go | 38 + .../dataproc/v1beta2/workflow_templates.pb.go | 2541 +++ .../cloud/dialogflow/v2/agent.pb.go | 1261 ++ .../cloud/dialogflow/v2/audio_config.pb.go | 599 + .../cloud/dialogflow/v2/context.pb.go | 747 + .../cloud/dialogflow/v2/entity_type.pb.go | 1624 ++ .../cloud/dialogflow/v2/intent.pb.go | 3390 ++++ .../cloud/dialogflow/v2/session.pb.go | 1585 ++ .../dialogflow/v2/session_entity_type.pb.go | 723 + .../cloud/dialogflow/v2/webhook.pb.go | 328 + .../cloud/dialogflow/v2beta1/agent.pb.go | 1270 ++ .../dialogflow/v2beta1/audio_config.pb.go | 619 + .../cloud/dialogflow/v2beta1/context.pb.go | 777 + .../cloud/dialogflow/v2beta1/document.pb.go | 1052 ++ .../dialogflow/v2beta1/entity_type.pb.go | 1624 ++ .../cloud/dialogflow/v2beta1/intent.pb.go | 3787 ++++ .../dialogflow/v2beta1/knowledge_base.pb.go | 663 + .../cloud/dialogflow/v2beta1/session.pb.go | 1845 ++ .../v2beta1/session_entity_type.pb.go | 749 + .../cloud/dialogflow/v2beta1/webhook.pb.go | 352 + .../cloud/functions/v1beta2/functions.pb.go | 1461 ++ .../cloud/functions/v1beta2/operations.pb.go | 148 + .../cloud/iot/v1/device_manager.pb.go | 2287 +++ .../googleapis/cloud/iot/v1/resources.pb.go | 1586 ++ .../cloud/irm/v1alpha2/incidents.pb.go | 1808 ++ .../irm/v1alpha2/incidents_service.pb.go | 3740 ++++ .../googleapis/cloud/kms/v1/resources.pb.go | 1016 + .../googleapis/cloud/kms/v1/service.pb.go | 2653 +++ .../cloud/language/v1/language_service.pb.go | 3164 ++++ .../language/v1beta1/language_service.pb.go | 2723 +++ .../language/v1beta2/language_service.pb.go | 3118 ++++ .../googleapis/cloud/location/locations.pb.go | 424 + .../googleapis/cloud/ml/v1/job_service.pb.go | 2106 +++ .../cloud/ml/v1/model_service.pb.go | 1365 ++ .../cloud/ml/v1/operation_metadata.pb.go | 191 + .../cloud/ml/v1/prediction_service.pb.go | 373 + .../cloud/ml/v1/project_service.pb.go | 229 + .../cloud/oslogin/common/common.pb.go | 232 + .../googleapis/cloud/oslogin/v1/oslogin.pb.go | 747 + .../cloud/oslogin/v1alpha/oslogin.pb.go | 746 + .../cloud/oslogin/v1beta/oslogin.pb.go | 747 + .../v1beta1/phishingprotection.pb.go | 225 + .../v1beta1/recaptchaenterprise.pb.go | 672 + .../cloud/redis/v1/cloud_redis.pb.go | 1345 ++ .../cloud/redis/v1beta1/cloud_redis.pb.go | 1266 ++ .../cloud/resourcemanager/v2/folders.pb.go | 1510 ++ .../runtimeconfig/v1beta1/resources.pb.go | 677 + .../runtimeconfig/v1beta1/runtimeconfig.pb.go | 1764 ++ .../cloud/scheduler/v1/cloudscheduler.pb.go | 911 + .../googleapis/cloud/scheduler/v1/job.pb.go | 595 + .../cloud/scheduler/v1/target.pb.go | 818 + .../scheduler/v1beta1/cloudscheduler.pb.go | 912 + .../cloud/scheduler/v1beta1/job.pb.go | 597 + .../cloud/scheduler/v1beta1/target.pb.go | 819 + .../cloud/securitycenter/v1/asset.pb.go | 310 + .../cloud/securitycenter/v1/finding.pb.go | 249 + .../v1/organization_settings.pb.go | 219 + .../v1/run_asset_discovery_response.pb.go | 143 + .../securitycenter/v1/security_marks.pb.go | 114 + .../v1/securitycenter_service.pb.go | 2777 +++ .../cloud/securitycenter/v1/source.pb.go | 124 + .../cloud/securitycenter/v1beta1/asset.pb.go | 248 + .../securitycenter/v1beta1/finding.pb.go | 246 + .../v1beta1/organization_settings.pb.go | 215 + .../v1beta1/security_marks.pb.go | 110 + .../v1beta1/securitycenter_service.pb.go | 2498 +++ .../cloud/securitycenter/v1beta1/source.pb.go | 121 + .../cloud/speech/v1/cloud_speech.pb.go | 1731 ++ .../cloud/speech/v1p1beta1/cloud_speech.pb.go | 2208 +++ .../cloud/support/common/common.pb.go | 1040 ++ .../support/v1alpha1/cloud_support.pb.go | 1052 ++ .../cloud/talent/v4beta1/application.pb.go | 418 + .../talent/v4beta1/application_service.pb.go | 639 + .../cloud/talent/v4beta1/batch.pb.go | 43 + .../cloud/talent/v4beta1/common.pb.go | 2389 +++ .../cloud/talent/v4beta1/company.pb.go | 311 + .../talent/v4beta1/company_service.pb.go | 668 + .../talent/v4beta1/completion_service.pb.go | 480 + .../cloud/talent/v4beta1/event.pb.go | 571 + .../cloud/talent/v4beta1/event_service.pb.go | 204 + .../cloud/talent/v4beta1/filters.pb.go | 1886 ++ .../cloud/talent/v4beta1/histogram.pb.go | 159 + .../googleapis/cloud/talent/v4beta1/job.pb.go | 801 + .../cloud/talent/v4beta1/job_service.pb.go | 1928 ++ .../cloud/talent/v4beta1/profile.pb.go | 2388 +++ .../talent/v4beta1/profile_service.pb.go | 1156 ++ .../cloud/talent/v4beta1/tenant.pb.go | 182 + .../cloud/talent/v4beta1/tenant_service.pb.go | 632 + .../cloud/tasks/v2/cloudtasks.pb.go | 1879 ++ .../googleapis/cloud/tasks/v2/queue.pb.go | 577 + .../googleapis/cloud/tasks/v2/target.pb.go | 447 + .../googleapis/cloud/tasks/v2/task.pb.go | 453 + .../cloud/tasks/v2beta2/cloudtasks.pb.go | 2604 +++ .../cloud/tasks/v2beta2/queue.pb.go | 817 + .../cloud/tasks/v2beta2/target.pb.go | 665 + .../googleapis/cloud/tasks/v2beta2/task.pb.go | 517 + .../cloud/tasks/v2beta3/cloudtasks.pb.go | 1880 ++ .../cloud/tasks/v2beta3/queue.pb.go | 656 + .../cloud/tasks/v2beta3/target.pb.go | 896 + .../googleapis/cloud/tasks/v2beta3/task.pb.go | 489 + .../cloud/texttospeech/v1/cloud_tts.pb.go | 874 + .../texttospeech/v1beta1/cloud_tts.pb.go | 869 + .../v3beta1/translation_service.pb.go | 2820 +++ .../v1/video_intelligence.pb.go | 2648 +++ .../v1beta1/video_intelligence.pb.go | 1263 ++ .../v1beta2/video_intelligence.pb.go | 1598 ++ .../v1p1beta1/video_intelligence.pb.go | 1653 ++ .../v1p2beta1/video_intelligence.pb.go | 1855 ++ .../v1p3beta1/video_intelligence.pb.go | 2852 +++ .../googleapis/cloud/vision/v1/geometry.pb.go | 264 + .../cloud/vision/v1/image_annotator.pb.go | 3223 ++++ .../cloud/vision/v1/product_search.pb.go | 321 + .../vision/v1/product_search_service.pb.go | 2944 +++ .../cloud/vision/v1/text_annotation.pb.go | 798 + .../cloud/vision/v1/web_detection.pb.go | 396 + .../cloud/vision/v1p1beta1/geometry.pb.go | 199 + .../vision/v1p1beta1/image_annotator.pb.go | 1998 ++ .../vision/v1p1beta1/text_annotation.pb.go | 791 + .../vision/v1p1beta1/web_detection.pb.go | 395 + .../cloud/vision/v1p2beta1/geometry.pb.go | 262 + .../vision/v1p2beta1/image_annotator.pb.go | 2700 +++ .../vision/v1p2beta1/text_annotation.pb.go | 798 + .../vision/v1p2beta1/web_detection.pb.go | 395 + .../cloud/vision/v1p3beta1/geometry.pb.go | 306 + .../vision/v1p3beta1/image_annotator.pb.go | 2826 +++ .../vision/v1p3beta1/product_search.pb.go | 483 + .../v1p3beta1/product_search_service.pb.go | 2968 +++ .../vision/v1p3beta1/text_annotation.pb.go | 798 + .../vision/v1p3beta1/web_detection.pb.go | 395 + .../cloud/vision/v1p4beta1/geometry.pb.go | 265 + .../vision/v1p4beta1/image_annotator.pb.go | 3215 ++++ .../vision/v1p4beta1/product_search.pb.go | 319 + .../v1p4beta1/product_search_service.pb.go | 2978 +++ .../vision/v1p4beta1/text_annotation.pb.go | 799 + .../vision/v1p4beta1/web_detection.pb.go | 397 + .../cloud/webrisk/v1beta1/webrisk.pb.go | 1222 ++ .../v1alpha/crawled_url.pb.go | 111 + .../websecurityscanner/v1alpha/finding.pb.go | 322 + .../v1alpha/finding_addon.pb.go | 257 + .../v1alpha/finding_type_stats.pb.go | 100 + .../v1alpha/scan_config.pb.go | 595 + .../websecurityscanner/v1alpha/scan_run.pb.go | 272 + .../v1alpha/web_security_scanner.pb.go | 1537 ++ .../v1beta/crawled_url.pb.go | 111 + .../websecurityscanner/v1beta/finding.pb.go | 269 + .../v1beta/finding_addon.pb.go | 414 + .../v1beta/finding_type_stats.pb.go | 99 + .../v1beta/scan_config.pb.go | 700 + .../v1beta/scan_config_error.pb.go | 344 + .../websecurityscanner/v1beta/scan_run.pb.go | 298 + .../v1beta/scan_run_error_trace.pb.go | 178 + .../v1beta/scan_run_warning_trace.pb.go | 140 + .../v1beta/web_security_scanner.pb.go | 1537 ++ .../container/v1/cluster_service.pb.go | 7105 +++++++ .../container/v1alpha1/cluster_service.pb.go | 6835 +++++++ .../container/v1beta1/cluster_service.pb.go | 9135 +++++++++ .../datastore/admin/v1/datastore_admin.pb.go | 1225 ++ .../googleapis/datastore/admin/v1/index.pb.go | 324 + .../admin/v1beta1/datastore_admin.pb.go | 900 + .../googleapis/datastore/v1/datastore.pb.go | 2116 +++ .../googleapis/datastore/v1/entity.pb.go | 927 + .../googleapis/datastore/v1/query.pb.go | 1258 ++ .../datastore/v1beta3/datastore.pb.go | 2123 +++ .../googleapis/datastore/v1beta3/entity.pb.go | 928 + .../googleapis/datastore/v1beta3/query.pb.go | 1259 ++ .../devtools/build/v1/build_events.pb.go | 1013 + .../devtools/build/v1/build_status.pb.go | 162 + .../build/v1/publish_build_event.pb.go | 542 + .../devtools/cloudbuild/v1/cloudbuild.pb.go | 3633 ++++ .../clouddebugger/v2/controller.pb.go | 578 + .../devtools/clouddebugger/v2/data.pb.go | 1073 ++ .../devtools/clouddebugger/v2/debugger.pb.go | 876 + .../clouderrorreporting/v1beta1/common.pb.go | 555 + .../v1beta1/error_group_service.pb.go | 261 + .../v1beta1/error_stats_service.pb.go | 1142 ++ .../v1beta1/report_errors_service.pb.go | 318 + .../devtools/cloudprofiler/v2/profiler.pb.go | 667 + .../devtools/cloudtrace/v1/trace.pb.go | 870 + .../devtools/cloudtrace/v2/trace.pb.go | 1391 ++ .../devtools/cloudtrace/v2/tracing.pb.go | 227 + .../v1/containeranalysis.pb.go | 244 + .../v1alpha1/bill_of_materials.pb.go | 383 + .../v1alpha1/containeranalysis.pb.go | 4434 +++++ .../v1alpha1/image_basis.pb.go | 439 + .../v1alpha1/package_vulnerability.pb.go | 612 + .../v1alpha1/provenance.pb.go | 989 + .../v1alpha1/source_context.pb.go | 924 + .../v1beta1/attestation/attestation.pb.go | 500 + .../v1beta1/build/build.pb.go | 287 + .../v1beta1/common/common.pb.go | 152 + .../v1beta1/containeranalysis.pb.go | 670 + .../v1beta1/deployment/deployment.pb.go | 272 + .../v1beta1/discovery/discovery.pb.go | 297 + .../v1beta1/grafeas/grafeas.pb.go | 2739 +++ .../v1beta1/image/image.pb.go | 442 + .../v1beta1/package/package.pb.go | 499 + .../v1beta1/provenance/provenance.pb.go | 592 + .../v1beta1/source/source.pb.go | 919 + .../v1beta1/vulnerability/vulnerability.pb.go | 520 + .../v1test/remote_execution.pb.go | 2807 +++ .../devtools/remoteworkers/v1test2/bots.pb.go | 942 + .../remoteworkers/v1test2/command.pb.go | 878 + .../remoteworkers/v1test2/tasks.pb.go | 608 + .../remoteworkers/v1test2/worker.pb.go | 412 + .../devtools/resultstore/v2/action.pb.go | 1438 ++ .../devtools/resultstore/v2/common.pb.go | 600 + .../resultstore/v2/configuration.pb.go | 227 + .../resultstore/v2/configured_target.pb.go | 290 + .../devtools/resultstore/v2/coverage.pb.go | 310 + .../resultstore/v2/coverage_summary.pb.go | 229 + .../devtools/resultstore/v2/file.pb.go | 298 + .../devtools/resultstore/v2/file_set.pb.go | 169 + .../devtools/resultstore/v2/invocation.pb.go | 561 + .../resultstore/v2/resultstore_download.pb.go | 2240 +++ .../v2/resultstore_file_download.pb.go | 433 + .../resultstore/v2/resultstore_upload.pb.go | 2197 +++ .../devtools/resultstore/v2/target.pb.go | 434 + .../devtools/resultstore/v2/test_suite.pb.go | 659 + .../devtools/source/v1/source_context.pb.go | 1170 ++ .../devtools/sourcerepo/v1/sourcerepo.pb.go | 776 + .../example/library/v1/library.pb.go | 1303 ++ .../connection/v1alpha1/connection_api.pb.go | 541 + .../googleapis/firestore/admin/v1/field.pb.go | 213 + .../firestore/admin/v1/firestore_admin.pb.go | 1120 ++ .../googleapis/firestore/admin/v1/index.pb.go | 446 + .../firestore/admin/v1/location.pb.go | 79 + .../firestore/admin/v1/operation.pb.go | 699 + .../admin/v1beta1/firestore_admin.pb.go | 1252 ++ .../firestore/admin/v1beta1/index.pb.go | 267 + .../firestore/admin/v1beta1/location.pb.go | 78 + .../firestore/admin/v1beta2/field.pb.go | 213 + .../admin/v1beta2/firestore_admin.pb.go | 1120 ++ .../firestore/admin/v1beta2/index.pb.go | 446 + .../firestore/admin/v1beta2/operation.pb.go | 699 + .../googleapis/firestore/v1/common.pb.go | 562 + .../googleapis/firestore/v1/document.pb.go | 684 + .../googleapis/firestore/v1/firestore.pb.go | 3721 ++++ .../googleapis/firestore/v1/query.pb.go | 1015 + .../googleapis/firestore/v1/write.pb.go | 973 + .../googleapis/firestore/v1beta1/common.pb.go | 563 + .../firestore/v1beta1/document.pb.go | 684 + .../firestore/v1beta1/firestore.pb.go | 3723 ++++ .../googleapis/firestore/v1beta1/query.pb.go | 1016 + .../googleapis/firestore/v1beta1/write.pb.go | 974 + .../googleapis/genomics/v1/annotations.pb.go | 2633 +++ .../googleapis/genomics/v1/cigar.pb.go | 200 + .../googleapis/genomics/v1/datasets.pb.go | 960 + .../googleapis/genomics/v1/operations.pb.go | 242 + .../googleapis/genomics/v1/position.pb.go | 108 + .../googleapis/genomics/v1/range.pb.go | 105 + .../genomics/v1/readalignment.pb.go | 446 + .../googleapis/genomics/v1/readgroup.pb.go | 350 + .../googleapis/genomics/v1/readgroupset.pb.go | 163 + .../googleapis/genomics/v1/reads.pb.go | 1752 ++ .../googleapis/genomics/v1/references.pb.go | 1092 ++ .../googleapis/genomics/v1/variants.pb.go | 3486 ++++ .../genomics/v1alpha2/pipelines.pb.go | 2304 +++ .../googleapis/home/graph/v1/device.pb.go | 316 + .../googleapis/home/graph/v1/homegraph.pb.go | 1259 ++ .../googleapis/iam/admin/v1/iam.pb.go | 3129 ++++ .../iam/credentials/v1/common.pb.go | 717 + .../iam/credentials/v1/iamcredentials.pb.go | 277 + .../googleapis/iam/v1/iam_policy.pb.go | 417 + .../iam/v1/logging/audit_data.pb.go | 90 + .../genproto/googleapis/iam/v1/policy.pb.go | 528 + .../logging/type/http_request.pb.go | 249 + .../logging/type/log_severity.pb.go | 121 + .../googleapis/logging/v2/log_entry.pb.go | 588 + .../googleapis/logging/v2/logging.pb.go | 1031 ++ .../logging/v2/logging_config.pb.go | 1557 ++ .../logging/v2/logging_metrics.pb.go | 837 + .../googleapis/longrunning/operations.pb.go | 926 + .../googleapis/monitoring/v3/alert.pb.go | 962 + .../monitoring/v3/alert_service.pb.go | 667 + .../googleapis/monitoring/v3/common.pb.go | 890 + .../monitoring/v3/dropped_labels.pb.go | 102 + .../googleapis/monitoring/v3/group.pb.go | 157 + .../monitoring/v3/group_service.pb.go | 941 + .../googleapis/monitoring/v3/metric.pb.go | 232 + .../monitoring/v3/metric_service.pb.go | 1218 ++ .../monitoring/v3/mutation_record.pb.go | 97 + .../monitoring/v3/notification.pb.go | 370 + .../monitoring/v3/notification_service.pb.go | 1309 ++ .../monitoring/v3/span_context.pb.go | 96 + .../googleapis/monitoring/v3/uptime.pb.go | 969 + .../monitoring/v3/uptime_service.pb.go | 786 + .../googleapis/privacy/dlp/v2/dlp.pb.go | 15267 ++++++++++++++++ .../googleapis/privacy/dlp/v2/storage.pb.go | 2903 +++ .../googleapis/pubsub/v1/pubsub.pb.go | 3888 ++++ .../googleapis/pubsub/v1beta2/pubsub.pb.go | 1917 ++ .../genproto/googleapis/rpc/code/code.pb.go | 246 + .../rpc/errdetails/error_details.pb.go | 765 + .../googleapis/rpc/status/status.pb.go | 159 + .../database/v1/spanner_database_admin.pb.go | 1158 ++ .../instance/v1/spanner_instance_admin.pb.go | 1568 ++ .../genproto/googleapis/spanner/v1/keys.pb.go | 454 + .../googleapis/spanner/v1/mutation.pb.go | 437 + .../googleapis/spanner/v1/query_plan.pb.go | 388 + .../googleapis/spanner/v1/result_set.pb.go | 529 + .../googleapis/spanner/v1/spanner.pb.go | 2592 +++ .../googleapis/spanner/v1/transaction.pb.go | 1101 ++ .../genproto/googleapis/spanner/v1/type.pb.go | 301 + .../storagetransfer/v1/transfer.pb.go | 807 + .../storagetransfer/v1/transfer_types.pb.go | 1553 ++ .../streetview/publish/v1/resources.pb.go | 681 + .../streetview/publish/v1/rpcmessages.pb.go | 841 + .../publish/v1/streetview_publish.pb.go | 706 + .../type/calendarperiod/calendar_period.pb.go | 103 + .../googleapis/type/color/color.pb.go | 240 + .../genproto/googleapis/type/date/date.pb.go | 111 + .../googleapis/type/dayofweek/dayofweek.pb.go | 96 + .../genproto/googleapis/type/expr/expr.pb.go | 121 + .../googleapis/type/fraction/fraction.pb.go | 92 + .../googleapis/type/latlng/latlng.pb.go | 93 + .../googleapis/type/money/money.pb.go | 106 + .../type/postaladdress/postal_address.pb.go | 255 + .../type/quaternion/quaternion.pb.go | 165 + .../googleapis/type/timeofday/timeofday.pb.go | 115 + .../googleapis/watcher/v1/watch.pb.go | 435 + .../genproto/protobuf/api/api.pb.go | 405 + .../protobuf/field_mask/field_mask.pb.go | 280 + .../genproto/protobuf/ptype/type.pb.go | 641 + .../source_context/source_context.pb.go | 85 + vendor/google.golang.org/genproto/regen.go | 137 + vendor/google.golang.org/genproto/tools.go | 32 + vendor/google.golang.org/grpc/AUTHORS | 1 + vendor/google.golang.org/grpc/LICENSE | 202 + vendor/google.golang.org/grpc/backoff.go | 38 + vendor/google.golang.org/grpc/balancer.go | 391 + .../grpc/balancer/balancer.go | 336 + .../grpc/balancer/base/balancer.go | 178 + .../grpc/balancer/base/base.go | 64 + .../grpclb/grpc_lb_v1/load_balancer.pb.go | 839 + .../grpc/balancer/grpclb/grpclb.go | 476 + .../grpc/balancer/grpclb/grpclb_picker.go | 195 + .../balancer/grpclb/grpclb_remote_balancer.go | 341 + .../grpc/balancer/grpclb/grpclb_util.go | 209 + .../grpc/balancer/roundrobin/roundrobin.go | 83 + .../balancer/xds/edsbalancer/balancergroup.go | 348 + .../balancer/xds/edsbalancer/edsbalancer.go | 309 + .../grpc/balancer/xds/edsbalancer/util.go | 58 + .../proto/envoy/api/v2/auth/cert/cert.pb.go | 1144 ++ .../internal/proto/envoy/api/v2/cds/cds.pb.go | 1771 ++ .../circuit_breaker/circuit_breaker.pb.go | 185 + .../outlier_detection/outlier_detection.pb.go | 185 + .../envoy/api/v2/core/address/address.pb.go | 610 + .../proto/envoy/api/v2/core/base/base.pb.go | 1121 ++ .../v2/core/config_source/config_source.pb.go | 446 + .../v2/core/grpc_service/grpc_service.pb.go | 1196 ++ .../v2/core/health_check/health_check.pb.go | 998 + .../envoy/api/v2/core/protocol/protocol.pb.go | 311 + .../envoy/api/v2/discovery/discovery.pb.go | 446 + .../internal/proto/envoy/api/v2/eds/eds.pb.go | 376 + .../api/v2/endpoint/endpoint/endpoint.pb.go | 385 + .../envoy/service/discovery/v2/ads/ads.pb.go | 251 + .../proto/envoy/type/percent/percent.pb.go | 160 + .../proto/envoy/type/range/range.pb.go | 132 + .../internal/proto/validate/validate.pb.go | 3214 ++++ .../grpc/balancer/xds/xds.go | 612 + .../grpc/balancer/xds/xds_client.go | 269 + .../grpc/balancer_conn_wrappers.go | 315 + .../grpc/balancer_v1_wrapper.go | 341 + .../grpc/benchmark/benchmain/main.go | 606 + .../grpc/benchmark/benchmark.go | 436 + .../grpc/benchmark/benchresult/main.go | 133 + .../grpc/benchmark/client/main.go | 207 + .../grpc/benchmark/grpc_testing/control.pb.go | 1580 ++ .../benchmark/grpc_testing/messages.pb.go | 731 + .../benchmark/grpc_testing/payloads.pb.go | 348 + .../benchmark/grpc_testing/services.pb.go | 518 + .../grpc/benchmark/grpc_testing/stats.pb.go | 302 + .../grpc/benchmark/latency/latency.go | 315 + .../grpc/benchmark/server/main.go | 90 + .../grpc/benchmark/stats/histogram.go | 222 + .../grpc/benchmark/stats/stats.go | 303 + .../grpc/benchmark/stats/util.go | 208 + .../grpc/benchmark/worker/benchmark_client.go | 386 + .../grpc/benchmark/worker/benchmark_server.go | 184 + .../grpc/benchmark/worker/main.go | 230 + .../grpc_binarylog_v1/binarylog.pb.go | 900 + vendor/google.golang.org/grpc/call.go | 74 + .../channelz/grpc_channelz_v1/channelz.pb.go | 3445 ++++ .../grpc/channelz/service/func_linux.go | 105 + .../grpc/channelz/service/func_nonlinux.go | 30 + .../grpc/channelz/service/service.go | 346 + vendor/google.golang.org/grpc/clientconn.go | 1356 ++ vendor/google.golang.org/grpc/codec.go | 50 + .../grpc/codes/code_string.go | 62 + vendor/google.golang.org/grpc/codes/codes.go | 197 + .../grpc/connectivity/connectivity.go | 73 + .../grpc/credentials/alts/alts.go | 330 + .../alts/internal/authinfo/authinfo.go | 87 + .../grpc/credentials/alts/internal/common.go | 69 + .../alts/internal/conn/aeadrekey.go | 131 + .../alts/internal/conn/aes128gcm.go | 105 + .../alts/internal/conn/aes128gcmrekey.go | 116 + .../credentials/alts/internal/conn/common.go | 70 + .../credentials/alts/internal/conn/counter.go | 62 + .../credentials/alts/internal/conn/record.go | 271 + .../credentials/alts/internal/conn/utils.go | 63 + .../alts/internal/handshaker/handshaker.go | 365 + .../internal/handshaker/service/service.go | 54 + .../internal/proto/grpc_gcp/altscontext.pb.go | 151 + .../internal/proto/grpc_gcp/handshaker.pb.go | 1196 ++ .../grpc_gcp/transport_security_common.pb.go | 178 + .../alts/internal/testutil/testutil.go | 125 + .../grpc/credentials/alts/utils.go | 141 + .../grpc/credentials/credentials.go | 338 + .../grpc/credentials/google/google.go | 125 + .../grpc/credentials/internal/syscallconn.go | 61 + .../internal/syscallconn_appengine.go | 30 + .../grpc/credentials/oauth/oauth.go | 173 + .../grpc/credentials/tls13.go | 30 + vendor/google.golang.org/grpc/dialoptions.go | 532 + vendor/google.golang.org/grpc/doc.go | 24 + .../grpc/encoding/encoding.go | 118 + .../grpc/encoding/gzip/gzip.go | 117 + .../grpc/encoding/proto/proto.go | 110 + .../features/authentication/client/main.go | 86 + .../features/authentication/server/main.go | 118 + .../features/cancellation/client/main.go | 94 + .../features/cancellation/server/main.go | 78 + .../features/compression/client/main.go | 60 + .../features/compression/server/main.go | 70 + .../examples/features/deadline/client/main.go | 95 + .../examples/features/deadline/server/main.go | 128 + .../features/debugging/client/main.go | 89 + .../features/debugging/server/main.go | 86 + .../features/encryption/ALTS/client/main.go | 62 + .../features/encryption/ALTS/server/main.go | 74 + .../features/encryption/TLS/client/main.go | 66 + .../features/encryption/TLS/server/main.go | 79 + .../examples/features/errors/client/main.go | 68 + .../examples/features/errors/server/main.go | 83 + .../features/interceptor/client/main.go | 165 + .../features/interceptor/server/main.go | 170 + .../features/keepalive/client/main.go | 62 + .../features/keepalive/server/main.go | 86 + .../features/load_balancing/client/main.go | 125 + .../features/load_balancing/server/main.go | 79 + .../examples/features/metadata/client/main.go | 307 + .../examples/features/metadata/server/main.go | 207 + .../features/multiplex/client/main.go | 77 + .../features/multiplex/server/main.go | 83 + .../features/name_resolving/client/main.go | 135 + .../features/name_resolving/server/main.go | 64 + .../grpc/examples/features/proto/doc.go | 22 + .../examples/features/proto/echo/echo.pb.go | 401 + .../features/reflection/server/main.go | 87 + .../examples/features/wait_for_ready/main.go | 125 + .../helloworld/greeter_client/main.go | 58 + .../helloworld/greeter_server/main.go | 56 + .../helloworld/helloworld/helloworld.pb.go | 198 + .../helloworld/mock_helloworld/hw_mock.go | 48 + .../examples/route_guide/client/client.go | 192 + .../route_guide/mock_routeguide/rg_mock.go | 200 + .../route_guide/routeguide/route_guide.pb.go | 640 + .../examples/route_guide/server/server.go | 848 + .../grpc/grpclog/glogger/glogger.go | 86 + .../google.golang.org/grpc/grpclog/grpclog.go | 126 + .../google.golang.org/grpc/grpclog/logger.go | 85 + .../grpc/grpclog/loggerv2.go | 195 + .../google.golang.org/grpc/health/client.go | 107 + .../grpc/health/grpc_health_v1/health.pb.go | 327 + .../google.golang.org/grpc/health/server.go | 165 + vendor/google.golang.org/grpc/interceptor.go | 77 + .../grpc/internal/backoff/backoff.go | 78 + .../grpc/internal/balancerload/load.go | 46 + .../grpc/internal/balancerload/orca/orca.go | 84 + .../balancerload/orca/orca_v1/orca.pb.go | 293 + .../grpc/internal/binarylog/binarylog.go | 167 + .../internal/binarylog/binarylog_testutil.go | 42 + .../grpc/internal/binarylog/env_config.go | 210 + .../grpc/internal/binarylog/method_logger.go | 423 + .../grpc/internal/binarylog/sink.go | 162 + .../grpc/internal/binarylog/util.go | 41 + .../grpc/internal/channelz/funcs.go | 699 + .../grpc/internal/channelz/types.go | 702 + .../grpc/internal/channelz/types_linux.go | 53 + .../grpc/internal/channelz/types_nonlinux.go | 44 + .../grpc/internal/channelz/util_linux.go | 39 + .../grpc/internal/channelz/util_nonlinux.go | 26 + .../grpc/internal/envconfig/envconfig.go | 64 + .../grpc/internal/grpcrand/grpcrand.go | 56 + .../grpc/internal/grpcsync/event.go | 61 + .../grpc/internal/grpctest/grpctest.go | 69 + .../grpc/internal/internal.go | 54 + .../grpc/internal/leakcheck/leakcheck.go | 118 + .../grpc/internal/syscall/syscall_linux.go | 114 + .../grpc/internal/syscall/syscall_nonlinux.go | 73 + .../grpc/internal/testutils/pipe_listener.go | 96 + .../grpc/internal/transport/bdp_estimator.go | 141 + .../grpc/internal/transport/controlbuf.go | 852 + .../grpc/internal/transport/defaults.go | 49 + .../grpc/internal/transport/flowcontrol.go | 218 + .../grpc/internal/transport/handler_server.go | 430 + .../grpc/internal/transport/http2_client.go | 1397 ++ .../grpc/internal/transport/http2_server.go | 1214 ++ .../grpc/internal/transport/http_util.go | 676 + .../grpc/internal/transport/log.go | 44 + .../grpc/internal/transport/transport.go | 760 + .../grpc/interop/alts/client/client.go | 65 + .../grpc/interop/alts/server/server.go | 53 + .../grpc/interop/client/client.go | 276 + .../grpc/interop/fake_grpclb/fake_grpclb.go | 169 + .../grpc/interop/grpc_testing/test.pb.go | 1087 ++ .../interop/http2/negative_http2_client.go | 159 + .../grpc/interop/server/server.go | 78 + .../grpc/interop/test_utils.go | 804 + .../grpc/keepalive/keepalive.go | 85 + .../grpc/metadata/metadata.go | 209 + .../grpc/naming/dns_resolver.go | 293 + .../google.golang.org/grpc/naming/naming.go | 69 + vendor/google.golang.org/grpc/peer/peer.go | 51 + .../google.golang.org/grpc/picker_wrapper.go | 189 + vendor/google.golang.org/grpc/pickfirst.go | 110 + vendor/google.golang.org/grpc/proxy.go | 152 + .../grpc_reflection_v1alpha/reflection.pb.go | 939 + .../grpc/reflection/grpc_testing/proto2.pb.go | 82 + .../reflection/grpc_testing/proto2_ext.pb.go | 109 + .../reflection/grpc_testing/proto2_ext2.pb.go | 98 + .../grpc/reflection/grpc_testing/test.pb.go | 319 + .../reflection/grpc_testingv3/testv3.pb.go | 457 + .../grpc/reflection/serverreflection.go | 454 + .../grpc/resolver/dns/dns_resolver.go | 438 + .../grpc/resolver/manual/manual.go | 86 + .../grpc/resolver/passthrough/passthrough.go | 57 + .../grpc/resolver/resolver.go | 173 + .../grpc/resolver_conn_wrapper.go | 165 + vendor/google.golang.org/grpc/rpc_util.go | 843 + vendor/google.golang.org/grpc/server.go | 1498 ++ .../google.golang.org/grpc/service_config.go | 373 + .../grpc/stats/grpc_testing/test.pb.go | 403 + .../google.golang.org/grpc/stats/handlers.go | 63 + vendor/google.golang.org/grpc/stats/stats.go | 300 + .../google.golang.org/grpc/status/status.go | 210 + vendor/google.golang.org/grpc/stream.go | 1498 ++ .../grpc/stress/client/main.go | 337 + .../grpc/stress/grpc_testing/metrics.pb.go | 433 + .../grpc/stress/metrics_client/main.go | 82 + vendor/google.golang.org/grpc/tap/tap.go | 51 + .../grpc/test/bufconn/bufconn.go | 244 + .../grpc/test/codec_perf/perf.pb.go | 75 + .../google.golang.org/grpc/test/go_vet/vet.go | 53 + .../grpc/test/grpc_testing/test.pb.go | 949 + vendor/google.golang.org/grpc/test/race.go | 24 + .../grpc/test/rawConnWrapper.go | 387 + .../grpc/test/servertester.go | 280 + .../grpc/test/tools/tools.go | 34 + .../grpc/testdata/testdata.go | 42 + vendor/google.golang.org/grpc/trace.go | 126 + vendor/google.golang.org/grpc/version.go | 22 + vendor/k8s.io/code-generator/go.mod | 21 - vendor/k8s.io/code-generator/go.sum | 21 - 2586 files changed, 938585 insertions(+), 454 deletions(-) create mode 100644 Makefile delete mode 100644 build/Dockerfile create mode 100644 cmd/lyra/main.go create mode 100644 cmd/manager/controller/controller.go delete mode 100644 go.mod delete mode 100644 go.sum create mode 100644 vendor/github.com/bmatcuk/doublestar/LICENSE create mode 100644 vendor/github.com/bmatcuk/doublestar/doublestar.go create mode 120000 vendor/github.com/bmatcuk/doublestar/test/b/symlink-dir create mode 120000 vendor/github.com/bmatcuk/doublestar/test/broken-symlink create mode 120000 vendor/github.com/bmatcuk/doublestar/test/working-symlink create mode 100644 vendor/github.com/hashicorp/go-hclog/LICENSE create mode 100644 vendor/github.com/hashicorp/go-hclog/global.go create mode 100644 vendor/github.com/hashicorp/go-hclog/hclogvet/buildtag.go create mode 100644 vendor/github.com/hashicorp/go-hclog/hclogvet/dead.go create mode 100644 vendor/github.com/hashicorp/go-hclog/hclogvet/hclog.go create mode 100644 vendor/github.com/hashicorp/go-hclog/hclogvet/main.go create mode 100644 vendor/github.com/hashicorp/go-hclog/hclogvet/shadow.go create mode 100644 vendor/github.com/hashicorp/go-hclog/hclogvet/testdata/log.go create mode 100644 vendor/github.com/hashicorp/go-hclog/hclogvet/types.go create mode 100644 vendor/github.com/hashicorp/go-hclog/intlogger.go create mode 100644 vendor/github.com/hashicorp/go-hclog/logger.go create mode 100644 vendor/github.com/hashicorp/go-hclog/nulllogger.go create mode 100644 vendor/github.com/hashicorp/go-hclog/stacktrace.go create mode 100644 vendor/github.com/hashicorp/go-hclog/stdlog.go create mode 100644 vendor/github.com/hashicorp/go-hclog/writer.go create mode 100644 vendor/github.com/hashicorp/go-plugin/LICENSE create mode 100644 vendor/github.com/hashicorp/go-plugin/client.go create mode 100644 vendor/github.com/hashicorp/go-plugin/discover.go create mode 100644 vendor/github.com/hashicorp/go-plugin/error.go create mode 100644 vendor/github.com/hashicorp/go-plugin/examples/basic/commons/greeter_interface.go create mode 100644 vendor/github.com/hashicorp/go-plugin/examples/basic/main.go create mode 100644 vendor/github.com/hashicorp/go-plugin/examples/basic/plugin/greeter_impl.go create mode 100644 vendor/github.com/hashicorp/go-plugin/examples/bidirectional/main.go create mode 100644 vendor/github.com/hashicorp/go-plugin/examples/bidirectional/plugin-go-grpc/main.go create mode 100644 vendor/github.com/hashicorp/go-plugin/examples/bidirectional/proto/kv.pb.go create mode 100644 vendor/github.com/hashicorp/go-plugin/examples/bidirectional/shared/grpc.go create mode 100644 vendor/github.com/hashicorp/go-plugin/examples/bidirectional/shared/interface.go create mode 100644 vendor/github.com/hashicorp/go-plugin/examples/grpc/main.go create mode 100644 vendor/github.com/hashicorp/go-plugin/examples/grpc/plugin-go-grpc/main.go create mode 100644 vendor/github.com/hashicorp/go-plugin/examples/grpc/plugin-go-netrpc/main.go create mode 100644 vendor/github.com/hashicorp/go-plugin/examples/grpc/proto/kv.pb.go create mode 100644 vendor/github.com/hashicorp/go-plugin/examples/grpc/shared/grpc.go create mode 100644 vendor/github.com/hashicorp/go-plugin/examples/grpc/shared/interface.go create mode 100644 vendor/github.com/hashicorp/go-plugin/examples/grpc/shared/rpc.go create mode 100644 vendor/github.com/hashicorp/go-plugin/examples/negotiated/main.go create mode 100644 vendor/github.com/hashicorp/go-plugin/examples/negotiated/plugin-go/main.go create mode 100644 vendor/github.com/hashicorp/go-plugin/grpc_broker.go create mode 100644 vendor/github.com/hashicorp/go-plugin/grpc_client.go create mode 100644 vendor/github.com/hashicorp/go-plugin/grpc_controller.go create mode 100644 vendor/github.com/hashicorp/go-plugin/grpc_server.go create mode 100644 vendor/github.com/hashicorp/go-plugin/internal/plugin/gen.go create mode 100644 vendor/github.com/hashicorp/go-plugin/internal/plugin/grpc_broker.pb.go create mode 100644 vendor/github.com/hashicorp/go-plugin/internal/plugin/grpc_controller.pb.go create mode 100644 vendor/github.com/hashicorp/go-plugin/log_entry.go create mode 100644 vendor/github.com/hashicorp/go-plugin/mtls.go create mode 100644 vendor/github.com/hashicorp/go-plugin/mux_broker.go create mode 100644 vendor/github.com/hashicorp/go-plugin/plugin.go create mode 100644 vendor/github.com/hashicorp/go-plugin/process.go create mode 100644 vendor/github.com/hashicorp/go-plugin/process_posix.go create mode 100644 vendor/github.com/hashicorp/go-plugin/process_windows.go create mode 100644 vendor/github.com/hashicorp/go-plugin/protocol.go create mode 100644 vendor/github.com/hashicorp/go-plugin/rpc_client.go create mode 100644 vendor/github.com/hashicorp/go-plugin/rpc_server.go create mode 100644 vendor/github.com/hashicorp/go-plugin/server.go create mode 100644 vendor/github.com/hashicorp/go-plugin/server_mux.go create mode 100644 vendor/github.com/hashicorp/go-plugin/stream.go create mode 100644 vendor/github.com/hashicorp/go-plugin/test/grpc/gen.go create mode 100644 vendor/github.com/hashicorp/go-plugin/test/grpc/test.pb.go create mode 100644 vendor/github.com/hashicorp/go-plugin/testing.go create mode 100644 vendor/github.com/hashicorp/yamux/LICENSE create mode 100644 vendor/github.com/hashicorp/yamux/addr.go create mode 100644 vendor/github.com/hashicorp/yamux/const.go create mode 100644 vendor/github.com/hashicorp/yamux/mux.go create mode 100644 vendor/github.com/hashicorp/yamux/session.go create mode 100644 vendor/github.com/hashicorp/yamux/stream.go create mode 100644 vendor/github.com/hashicorp/yamux/util.go create mode 100644 vendor/github.com/inconshreveable/mousetrap/LICENSE create mode 100644 vendor/github.com/inconshreveable/mousetrap/trap_others.go create mode 100644 vendor/github.com/inconshreveable/mousetrap/trap_windows.go create mode 100644 vendor/github.com/inconshreveable/mousetrap/trap_windows_1.4.go create mode 100644 vendor/github.com/leonelquinteros/gotext/LICENSE create mode 100644 vendor/github.com/leonelquinteros/gotext/gotext.go create mode 100644 vendor/github.com/leonelquinteros/gotext/helper.go create mode 100644 vendor/github.com/leonelquinteros/gotext/locale.go create mode 100644 vendor/github.com/leonelquinteros/gotext/mo.go create mode 100644 vendor/github.com/leonelquinteros/gotext/plurals/compiler.go create mode 100644 vendor/github.com/leonelquinteros/gotext/plurals/expression.go create mode 100644 vendor/github.com/leonelquinteros/gotext/plurals/math.go create mode 100644 vendor/github.com/leonelquinteros/gotext/plurals/tests.go create mode 100644 vendor/github.com/leonelquinteros/gotext/po.go create mode 100644 vendor/github.com/leonelquinteros/gotext/translation.go create mode 100644 vendor/github.com/leonelquinteros/gotext/translator.go create mode 100644 vendor/github.com/lyraproj/data-protobuf/LICENSE create mode 100644 vendor/github.com/lyraproj/data-protobuf/datapb/data.pb.go create mode 100644 vendor/github.com/lyraproj/data-protobuf/datapb/reflect.go create mode 100644 vendor/github.com/lyraproj/hiera/LICENSE create mode 100644 vendor/github.com/lyraproj/hiera/hiera/hiera.go create mode 100644 vendor/github.com/lyraproj/hiera/hieraapi/api.go create mode 100644 vendor/github.com/lyraproj/hiera/hieraapi/dataprovider.go create mode 100644 vendor/github.com/lyraproj/hiera/hieraapi/issues.go create mode 100644 vendor/github.com/lyraproj/hiera/hieraapi/key.go create mode 100644 vendor/github.com/lyraproj/hiera/hieraapi/location.go create mode 100644 vendor/github.com/lyraproj/hiera/hieraapi/mergestrategy.go create mode 100644 vendor/github.com/lyraproj/hiera/hieraapi/providercontext.go create mode 100644 vendor/github.com/lyraproj/hiera/internal/config.go create mode 100644 vendor/github.com/lyraproj/hiera/internal/datadigprovider.go create mode 100644 vendor/github.com/lyraproj/hiera/internal/datahashprovider.go create mode 100644 vendor/github.com/lyraproj/hiera/internal/deepmerge.go create mode 100644 vendor/github.com/lyraproj/hiera/internal/init.go create mode 100644 vendor/github.com/lyraproj/hiera/internal/interpolate.go create mode 100644 vendor/github.com/lyraproj/hiera/internal/invocation.go create mode 100644 vendor/github.com/lyraproj/hiera/internal/key.go create mode 100644 vendor/github.com/lyraproj/hiera/internal/location.go create mode 100644 vendor/github.com/lyraproj/hiera/internal/lookup.go create mode 100644 vendor/github.com/lyraproj/hiera/internal/lookupkeyprovider.go create mode 100644 vendor/github.com/lyraproj/hiera/internal/mergestrategy.go create mode 100644 vendor/github.com/lyraproj/hiera/internal/providerctx.go create mode 100644 vendor/github.com/lyraproj/hiera/provider/configlookupkey.go create mode 100644 vendor/github.com/lyraproj/hiera/provider/environment.go create mode 100644 vendor/github.com/lyraproj/hiera/provider/jsondata.go create mode 100644 vendor/github.com/lyraproj/hiera/provider/muxlookupkey.go create mode 100644 vendor/github.com/lyraproj/hiera/provider/yamldata.go create mode 100644 vendor/github.com/lyraproj/hiera/provider/yamllookupkey.go create mode 100644 vendor/github.com/lyraproj/issue/LICENSE create mode 100644 vendor/github.com/lyraproj/issue/issue/conversions.go create mode 100644 vendor/github.com/lyraproj/issue/issue/issue.go create mode 100644 vendor/github.com/lyraproj/issue/issue/label.go create mode 100644 vendor/github.com/lyraproj/issue/issue/location.go create mode 100644 vendor/github.com/lyraproj/issue/issue/printf.go create mode 100644 vendor/github.com/lyraproj/issue/issue/reported.go create mode 100644 vendor/github.com/lyraproj/issue/issue/result.go create mode 100644 vendor/github.com/lyraproj/issue/issue/severity.go create mode 100644 vendor/github.com/lyraproj/lyra/LICENSE create mode 100644 vendor/github.com/lyraproj/lyra/cmd/goplugin-example/example/start.go create mode 100644 vendor/github.com/lyraproj/lyra/cmd/goplugin-example/main.go create mode 100644 vendor/github.com/lyraproj/lyra/cmd/goplugin-example/resource/resource.go create mode 100644 vendor/github.com/lyraproj/lyra/cmd/goplugin-foobernetes/foobernetes/start.go create mode 100644 vendor/github.com/lyraproj/lyra/cmd/goplugin-foobernetes/main.go create mode 100644 vendor/github.com/lyraproj/lyra/cmd/goplugin-foobernetes/resource/instance.go create mode 100644 vendor/github.com/lyraproj/lyra/cmd/goplugin-foobernetes/resource/loadbalancer.go create mode 100644 vendor/github.com/lyraproj/lyra/cmd/goplugin-foobernetes/resource/state.go create mode 100644 vendor/github.com/lyraproj/lyra/cmd/goplugin-foobernetes/resource/webserver.go create mode 100644 vendor/github.com/lyraproj/lyra/cmd/lyra/cmd/apply.go create mode 100644 vendor/github.com/lyraproj/lyra/cmd/lyra/cmd/delete.go create mode 100644 vendor/github.com/lyraproj/lyra/cmd/lyra/cmd/embedded.go create mode 100644 vendor/github.com/lyraproj/lyra/cmd/lyra/cmd/generate.go create mode 100644 vendor/github.com/lyraproj/lyra/cmd/lyra/cmd/root.go create mode 100644 vendor/github.com/lyraproj/lyra/cmd/lyra/cmd/validate.go create mode 100644 vendor/github.com/lyraproj/lyra/cmd/lyra/cmd/version.go create mode 100644 vendor/github.com/lyraproj/lyra/cmd/lyra/main.go create mode 100644 vendor/github.com/lyraproj/lyra/cmd/lyra/ui/ui.go create mode 100644 vendor/github.com/lyraproj/lyra/examples/go-samples/aws/aws.go create mode 100644 vendor/github.com/lyraproj/lyra/examples/go-samples/declarative/declarative.go create mode 100644 vendor/github.com/lyraproj/lyra/examples/go-samples/imperative/imperative.go create mode 100644 vendor/github.com/lyraproj/lyra/examples/go-samples/referee/referee.go create mode 100644 vendor/github.com/lyraproj/lyra/examples/go-samples/referenced/referenced.go create mode 100644 vendor/github.com/lyraproj/lyra/examples/go-samples/types/aws/aws.go create mode 100644 vendor/github.com/lyraproj/lyra/examples/go-samples/types/azurerm/azurerm.go create mode 100644 vendor/github.com/lyraproj/lyra/examples/go-samples/types/example/example.go create mode 100644 vendor/github.com/lyraproj/lyra/examples/go-samples/types/foobernetes/foobernetes.go create mode 100644 vendor/github.com/lyraproj/lyra/examples/go-samples/types/github/github.go create mode 100644 vendor/github.com/lyraproj/lyra/examples/go-samples/types/google/google.go create mode 100644 vendor/github.com/lyraproj/lyra/examples/go-samples/types/identity/identity.go create mode 100644 vendor/github.com/lyraproj/lyra/examples/go-samples/types/kubernetes/kubernetes.go create mode 100644 vendor/github.com/lyraproj/lyra/examples/go-samples/types/puppet/puppet.go create mode 100644 vendor/github.com/lyraproj/lyra/external/externalmodules.go create mode 100644 vendor/github.com/lyraproj/lyra/pkg/apply/apply.go create mode 100644 vendor/github.com/lyraproj/lyra/pkg/change/node.go create mode 100644 vendor/github.com/lyraproj/lyra/pkg/change/registry.go create mode 100644 vendor/github.com/lyraproj/lyra/pkg/generate/generate.go create mode 100644 vendor/github.com/lyraproj/lyra/pkg/loader/fs.go create mode 100644 vendor/github.com/lyraproj/lyra/pkg/loader/integrity/sha256sum.go create mode 100644 vendor/github.com/lyraproj/lyra/pkg/loader/loader.go create mode 100644 vendor/github.com/lyraproj/lyra/pkg/logger/logger.go create mode 100644 vendor/github.com/lyraproj/lyra/pkg/util/command.go create mode 100644 vendor/github.com/lyraproj/lyra/pkg/version/logo.go create mode 100644 vendor/github.com/lyraproj/lyra/pkg/version/version.go create mode 100644 vendor/github.com/lyraproj/pcore/LICENSE create mode 100644 vendor/github.com/lyraproj/pcore/hash/stringhash.go create mode 100644 vendor/github.com/lyraproj/pcore/loader/dependency.go create mode 100644 vendor/github.com/lyraproj/pcore/loader/filebased.go create mode 100644 vendor/github.com/lyraproj/pcore/loader/instantiate.go create mode 100644 vendor/github.com/lyraproj/pcore/loader/loader.go create mode 100644 vendor/github.com/lyraproj/pcore/loader/smartpath.go create mode 100644 vendor/github.com/lyraproj/pcore/pcore/entrypoint.go create mode 100644 vendor/github.com/lyraproj/pcore/proto/convert.go create mode 100644 vendor/github.com/lyraproj/pcore/px/collection.go create mode 100644 vendor/github.com/lyraproj/pcore/px/collector.go create mode 100644 vendor/github.com/lyraproj/pcore/px/context.go create mode 100644 vendor/github.com/lyraproj/pcore/px/equality.go create mode 100644 vendor/github.com/lyraproj/pcore/px/format.go create mode 100644 vendor/github.com/lyraproj/pcore/px/function.go create mode 100644 vendor/github.com/lyraproj/pcore/px/functional.go create mode 100644 vendor/github.com/lyraproj/pcore/px/issues.go create mode 100644 vendor/github.com/lyraproj/pcore/px/loader.go create mode 100644 vendor/github.com/lyraproj/pcore/px/logging.go create mode 100644 vendor/github.com/lyraproj/pcore/px/reflector.go create mode 100644 vendor/github.com/lyraproj/pcore/px/runtime.go create mode 100644 vendor/github.com/lyraproj/pcore/px/typedname.go create mode 100644 vendor/github.com/lyraproj/pcore/px/types.go create mode 100644 vendor/github.com/lyraproj/pcore/px/valueconsumer.go create mode 100644 vendor/github.com/lyraproj/pcore/px/values.go create mode 100644 vendor/github.com/lyraproj/pcore/px/wrap.go create mode 100644 vendor/github.com/lyraproj/pcore/pximpl/context.go create mode 100644 vendor/github.com/lyraproj/pcore/pximpl/equality.go create mode 100644 vendor/github.com/lyraproj/pcore/pximpl/function.go create mode 100644 vendor/github.com/lyraproj/pcore/pximpl/implementationregistry.go create mode 100644 vendor/github.com/lyraproj/pcore/pximpl/parameter.go create mode 100644 vendor/github.com/lyraproj/pcore/pximpl/runtime.go create mode 100644 vendor/github.com/lyraproj/pcore/pximpl/setting.go create mode 100644 vendor/github.com/lyraproj/pcore/pximpl/typemismatchdescriber.go create mode 100644 vendor/github.com/lyraproj/pcore/serialization/deserializer.go create mode 100644 vendor/github.com/lyraproj/pcore/serialization/extension.go create mode 100644 vendor/github.com/lyraproj/pcore/serialization/jsonstreamer.go create mode 100644 vendor/github.com/lyraproj/pcore/serialization/jsontodata.go create mode 100644 vendor/github.com/lyraproj/pcore/serialization/serializer.go create mode 100644 vendor/github.com/lyraproj/pcore/threadlocal/gid.go create mode 100644 vendor/github.com/lyraproj/pcore/types/annotatable.go create mode 100644 vendor/github.com/lyraproj/pcore/types/annotatedmember.go create mode 100644 vendor/github.com/lyraproj/pcore/types/anytype.go create mode 100644 vendor/github.com/lyraproj/pcore/types/arraytype.go create mode 100644 vendor/github.com/lyraproj/pcore/types/attribute.go create mode 100644 vendor/github.com/lyraproj/pcore/types/attributesinfo.go create mode 100644 vendor/github.com/lyraproj/pcore/types/basiccollector.go create mode 100644 vendor/github.com/lyraproj/pcore/types/binarytype.go create mode 100644 vendor/github.com/lyraproj/pcore/types/booleantype.go create mode 100644 vendor/github.com/lyraproj/pcore/types/callabletype.go create mode 100644 vendor/github.com/lyraproj/pcore/types/coerce.go create mode 100644 vendor/github.com/lyraproj/pcore/types/collectiontype.go create mode 100644 vendor/github.com/lyraproj/pcore/types/commonality.go create mode 100644 vendor/github.com/lyraproj/pcore/types/constants.go create mode 100644 vendor/github.com/lyraproj/pcore/types/conversions.go create mode 100644 vendor/github.com/lyraproj/pcore/types/defaulttype.go create mode 100644 vendor/github.com/lyraproj/pcore/types/deferred.go create mode 100644 vendor/github.com/lyraproj/pcore/types/deferredtype.go create mode 100644 vendor/github.com/lyraproj/pcore/types/enumtype.go create mode 100644 vendor/github.com/lyraproj/pcore/types/floattype.go create mode 100644 vendor/github.com/lyraproj/pcore/types/format.go create mode 100644 vendor/github.com/lyraproj/pcore/types/function.go create mode 100644 vendor/github.com/lyraproj/pcore/types/hashtype.go create mode 100644 vendor/github.com/lyraproj/pcore/types/inittype.go create mode 100644 vendor/github.com/lyraproj/pcore/types/integertype.go create mode 100644 vendor/github.com/lyraproj/pcore/types/iterabletype.go create mode 100644 vendor/github.com/lyraproj/pcore/types/iteratortype.go create mode 100644 vendor/github.com/lyraproj/pcore/types/lexer.go create mode 100644 vendor/github.com/lyraproj/pcore/types/liketype.go create mode 100644 vendor/github.com/lyraproj/pcore/types/notundeftype.go create mode 100644 vendor/github.com/lyraproj/pcore/types/numerictype.go create mode 100644 vendor/github.com/lyraproj/pcore/types/objecttype.go create mode 100644 vendor/github.com/lyraproj/pcore/types/objecttypeextension.go create mode 100644 vendor/github.com/lyraproj/pcore/types/objectvalue.go create mode 100644 vendor/github.com/lyraproj/pcore/types/optionaltype.go create mode 100644 vendor/github.com/lyraproj/pcore/types/parser.go create mode 100644 vendor/github.com/lyraproj/pcore/types/patterntype.go create mode 100644 vendor/github.com/lyraproj/pcore/types/reflector.go create mode 100644 vendor/github.com/lyraproj/pcore/types/regexptype.go create mode 100644 vendor/github.com/lyraproj/pcore/types/resolver.go create mode 100644 vendor/github.com/lyraproj/pcore/types/runtimetype.go create mode 100644 vendor/github.com/lyraproj/pcore/types/scalardatatype.go create mode 100644 vendor/github.com/lyraproj/pcore/types/scalartype.go create mode 100644 vendor/github.com/lyraproj/pcore/types/semverrangetype.go create mode 100644 vendor/github.com/lyraproj/pcore/types/semvertype.go create mode 100644 vendor/github.com/lyraproj/pcore/types/sensitivetype.go create mode 100644 vendor/github.com/lyraproj/pcore/types/stringtype.go create mode 100644 vendor/github.com/lyraproj/pcore/types/structtype.go create mode 100644 vendor/github.com/lyraproj/pcore/types/taggedtype.go create mode 100644 vendor/github.com/lyraproj/pcore/types/timespantype.go create mode 100644 vendor/github.com/lyraproj/pcore/types/timestamptype.go create mode 100644 vendor/github.com/lyraproj/pcore/types/tupletype.go create mode 100644 vendor/github.com/lyraproj/pcore/types/typealiastype.go create mode 100644 vendor/github.com/lyraproj/pcore/types/typedname.go create mode 100644 vendor/github.com/lyraproj/pcore/types/typeparameter.go create mode 100644 vendor/github.com/lyraproj/pcore/types/typereferencetype.go create mode 100644 vendor/github.com/lyraproj/pcore/types/types.go create mode 100644 vendor/github.com/lyraproj/pcore/types/typeset.go create mode 100644 vendor/github.com/lyraproj/pcore/types/typetype.go create mode 100644 vendor/github.com/lyraproj/pcore/types/undeftype.go create mode 100644 vendor/github.com/lyraproj/pcore/types/unittype.go create mode 100644 vendor/github.com/lyraproj/pcore/types/uritype.go create mode 100644 vendor/github.com/lyraproj/pcore/types/varianttype.go create mode 100644 vendor/github.com/lyraproj/pcore/types/zinit.go create mode 100644 vendor/github.com/lyraproj/pcore/utils/pow.go create mode 100644 vendor/github.com/lyraproj/pcore/utils/reader.go create mode 100644 vendor/github.com/lyraproj/pcore/utils/strings.go create mode 100644 vendor/github.com/lyraproj/pcore/yaml/unmarshal.go create mode 100644 vendor/github.com/lyraproj/semver/semver/version.go create mode 100644 vendor/github.com/lyraproj/semver/semver/versionrange.go create mode 100644 vendor/github.com/lyraproj/servicesdk/LICENSE create mode 100644 vendor/github.com/lyraproj/servicesdk/annotation/issues.go create mode 100644 vendor/github.com/lyraproj/servicesdk/annotation/relationship.go create mode 100644 vendor/github.com/lyraproj/servicesdk/annotation/resource.go create mode 100644 vendor/github.com/lyraproj/servicesdk/grpc/client.go create mode 100644 vendor/github.com/lyraproj/servicesdk/grpc/issues.go create mode 100644 vendor/github.com/lyraproj/servicesdk/grpc/server.go create mode 100644 vendor/github.com/lyraproj/servicesdk/lang/go/lyra/action.go create mode 100644 vendor/github.com/lyraproj/servicesdk/lang/go/lyra/collect.go create mode 100644 vendor/github.com/lyraproj/servicesdk/lang/go/lyra/issues.go create mode 100644 vendor/github.com/lyraproj/servicesdk/lang/go/lyra/reference.go create mode 100644 vendor/github.com/lyraproj/servicesdk/lang/go/lyra/resource.go create mode 100644 vendor/github.com/lyraproj/servicesdk/lang/go/lyra/stateconverter.go create mode 100644 vendor/github.com/lyraproj/servicesdk/lang/go/lyra/util.go create mode 100644 vendor/github.com/lyraproj/servicesdk/lang/go/lyra/workflow.go create mode 100644 vendor/github.com/lyraproj/servicesdk/lang/issues.go create mode 100644 vendor/github.com/lyraproj/servicesdk/lang/typegen/generator.go create mode 100644 vendor/github.com/lyraproj/servicesdk/lang/typegen/golang.go create mode 100644 vendor/github.com/lyraproj/servicesdk/lang/typegen/puppet.go create mode 100644 vendor/github.com/lyraproj/servicesdk/lang/typegen/typescript.go create mode 100644 vendor/github.com/lyraproj/servicesdk/service/builder.go create mode 100644 vendor/github.com/lyraproj/servicesdk/service/definition.go create mode 100644 vendor/github.com/lyraproj/servicesdk/service/error.go create mode 100644 vendor/github.com/lyraproj/servicesdk/service/issues.go create mode 100644 vendor/github.com/lyraproj/servicesdk/service/loader.go create mode 100644 vendor/github.com/lyraproj/servicesdk/service/notfound.go create mode 100644 vendor/github.com/lyraproj/servicesdk/service/resourcetypebuilder.go create mode 100644 vendor/github.com/lyraproj/servicesdk/service/server.go create mode 100644 vendor/github.com/lyraproj/servicesdk/service/subservice.go create mode 100644 vendor/github.com/lyraproj/servicesdk/serviceapi/definition.go create mode 100644 vendor/github.com/lyraproj/servicesdk/serviceapi/error.go create mode 100644 vendor/github.com/lyraproj/servicesdk/serviceapi/identity.go create mode 100644 vendor/github.com/lyraproj/servicesdk/serviceapi/invokable.go create mode 100644 vendor/github.com/lyraproj/servicesdk/serviceapi/metadata.go create mode 100644 vendor/github.com/lyraproj/servicesdk/serviceapi/server.go create mode 100644 vendor/github.com/lyraproj/servicesdk/serviceapi/stateresolver.go create mode 100644 vendor/github.com/lyraproj/servicesdk/servicepb/service.pb.go create mode 100644 vendor/github.com/lyraproj/servicesdk/wf/action.go create mode 100644 vendor/github.com/lyraproj/servicesdk/wf/builder.go create mode 100644 vendor/github.com/lyraproj/servicesdk/wf/condition.go create mode 100644 vendor/github.com/lyraproj/servicesdk/wf/crd.go create mode 100644 vendor/github.com/lyraproj/servicesdk/wf/issues.go create mode 100644 vendor/github.com/lyraproj/servicesdk/wf/iterator.go create mode 100644 vendor/github.com/lyraproj/servicesdk/wf/operation.go create mode 100644 vendor/github.com/lyraproj/servicesdk/wf/parser.go create mode 100644 vendor/github.com/lyraproj/servicesdk/wf/reference.go create mode 100644 vendor/github.com/lyraproj/servicesdk/wf/resource.go create mode 100644 vendor/github.com/lyraproj/servicesdk/wf/statehandler.go create mode 100644 vendor/github.com/lyraproj/servicesdk/wf/step.go create mode 100644 vendor/github.com/lyraproj/servicesdk/wf/workflow.go create mode 100644 vendor/github.com/lyraproj/wfe/LICENSE create mode 100644 vendor/github.com/lyraproj/wfe/api/issues.go create mode 100644 vendor/github.com/lyraproj/wfe/api/iterator.go create mode 100644 vendor/github.com/lyraproj/wfe/api/loader.go create mode 100644 vendor/github.com/lyraproj/wfe/api/resource.go create mode 100644 vendor/github.com/lyraproj/wfe/api/step.go create mode 100644 vendor/github.com/lyraproj/wfe/api/workflow.go create mode 100644 vendor/github.com/lyraproj/wfe/service/crud.go create mode 100644 vendor/github.com/lyraproj/wfe/service/identity.go create mode 100644 vendor/github.com/lyraproj/wfe/service/util.go create mode 100644 vendor/github.com/lyraproj/wfe/wfe/action.go create mode 100644 vendor/github.com/lyraproj/wfe/wfe/issues.go create mode 100644 vendor/github.com/lyraproj/wfe/wfe/iterator.go create mode 100644 vendor/github.com/lyraproj/wfe/wfe/loader.go create mode 100644 vendor/github.com/lyraproj/wfe/wfe/reference.go create mode 100644 vendor/github.com/lyraproj/wfe/wfe/resource.go create mode 100644 vendor/github.com/lyraproj/wfe/wfe/statehandler.go create mode 100644 vendor/github.com/lyraproj/wfe/wfe/step.go create mode 100644 vendor/github.com/lyraproj/wfe/wfe/workflow.go create mode 100644 vendor/github.com/lyraproj/wfe/wfe/workflowengine.go create mode 100644 vendor/github.com/mattn/go-colorable/LICENSE create mode 100644 vendor/github.com/mattn/go-colorable/_example/escape-seq/main.go create mode 100644 vendor/github.com/mattn/go-colorable/_example/logrus/main.go create mode 100644 vendor/github.com/mattn/go-colorable/_example/title/main.go create mode 100644 vendor/github.com/mattn/go-colorable/cmd/colorable/colorable.go create mode 100644 vendor/github.com/mattn/go-colorable/colorable_appengine.go create mode 100644 vendor/github.com/mattn/go-colorable/colorable_others.go create mode 100644 vendor/github.com/mattn/go-colorable/colorable_windows.go create mode 100644 vendor/github.com/mattn/go-colorable/noncolorable.go create mode 100644 vendor/github.com/mattn/go-isatty/LICENSE create mode 100644 vendor/github.com/mattn/go-isatty/doc.go create mode 100644 vendor/github.com/mattn/go-isatty/isatty_android.go create mode 100644 vendor/github.com/mattn/go-isatty/isatty_bsd.go create mode 100644 vendor/github.com/mattn/go-isatty/isatty_linux.go create mode 100644 vendor/github.com/mattn/go-isatty/isatty_others.go create mode 100644 vendor/github.com/mattn/go-isatty/isatty_solaris.go create mode 100644 vendor/github.com/mattn/go-isatty/isatty_windows.go create mode 100644 vendor/github.com/mgutz/ansi/LICENSE create mode 100644 vendor/github.com/mgutz/ansi/ansi.go create mode 100644 vendor/github.com/mgutz/ansi/cmd/ansi-mgutz/main.go create mode 100644 vendor/github.com/mgutz/ansi/doc.go create mode 100644 vendor/github.com/mgutz/ansi/print.go create mode 100644 vendor/github.com/mitchellh/go-testing-interface/LICENSE create mode 100644 vendor/github.com/mitchellh/go-testing-interface/testing.go create mode 100644 vendor/github.com/mitchellh/go-testing-interface/testing_go19.go create mode 100644 vendor/github.com/oklog/run/LICENSE create mode 100644 vendor/github.com/oklog/run/group.go create mode 100644 vendor/github.com/spf13/cobra/LICENSE.txt create mode 100644 vendor/github.com/spf13/cobra/args.go create mode 100644 vendor/github.com/spf13/cobra/bash_completions.go create mode 100644 vendor/github.com/spf13/cobra/cobra.go create mode 100644 vendor/github.com/spf13/cobra/cobra/cmd/add.go create mode 100644 vendor/github.com/spf13/cobra/cobra/cmd/helpers.go create mode 100644 vendor/github.com/spf13/cobra/cobra/cmd/init.go create mode 100644 vendor/github.com/spf13/cobra/cobra/cmd/license_agpl.go create mode 100644 vendor/github.com/spf13/cobra/cobra/cmd/license_apache_2.go create mode 100644 vendor/github.com/spf13/cobra/cobra/cmd/license_bsd_clause_2.go create mode 100644 vendor/github.com/spf13/cobra/cobra/cmd/license_bsd_clause_3.go create mode 100644 vendor/github.com/spf13/cobra/cobra/cmd/license_gpl_2.go create mode 100644 vendor/github.com/spf13/cobra/cobra/cmd/license_gpl_3.go create mode 100644 vendor/github.com/spf13/cobra/cobra/cmd/license_lgpl.go create mode 100644 vendor/github.com/spf13/cobra/cobra/cmd/license_mit.go create mode 100644 vendor/github.com/spf13/cobra/cobra/cmd/licenses.go create mode 100644 vendor/github.com/spf13/cobra/cobra/cmd/project.go create mode 100644 vendor/github.com/spf13/cobra/cobra/cmd/root.go create mode 100644 vendor/github.com/spf13/cobra/cobra/cmd/testdata/LICENSE.golden create mode 100644 vendor/github.com/spf13/cobra/cobra/main.go create mode 100644 vendor/github.com/spf13/cobra/command.go create mode 100644 vendor/github.com/spf13/cobra/command_notwin.go create mode 100644 vendor/github.com/spf13/cobra/command_win.go create mode 100644 vendor/github.com/spf13/cobra/doc/man_docs.go create mode 100644 vendor/github.com/spf13/cobra/doc/md_docs.go create mode 100644 vendor/github.com/spf13/cobra/doc/rest_docs.go create mode 100644 vendor/github.com/spf13/cobra/doc/util.go create mode 100644 vendor/github.com/spf13/cobra/doc/yaml_docs.go create mode 100644 vendor/github.com/spf13/cobra/zsh_completions.go create mode 120000 vendor/gonum.org/v1/gonum/.travis/deps.d/linux/01-deps.sh create mode 120000 vendor/gonum.org/v1/gonum/.travis/deps.d/osx/nothing.sh create mode 120000 vendor/gonum.org/v1/gonum/.travis/deps.d/windows/nothing.sh create mode 120000 vendor/gonum.org/v1/gonum/.travis/run.d/linux/01-check-copyright.sh create mode 120000 vendor/gonum.org/v1/gonum/.travis/run.d/linux/02-check-imports.sh create mode 120000 vendor/gonum.org/v1/gonum/.travis/run.d/linux/03-check-formatting.sh create mode 120000 vendor/gonum.org/v1/gonum/.travis/run.d/linux/04-test.sh create mode 120000 vendor/gonum.org/v1/gonum/.travis/run.d/linux/05-test-coverage.sh create mode 120000 vendor/gonum.org/v1/gonum/.travis/run.d/linux/06-check-generate.sh create mode 120000 vendor/gonum.org/v1/gonum/.travis/run.d/osx/01-test.sh create mode 100644 vendor/gonum.org/v1/gonum/AUTHORS create mode 100644 vendor/gonum.org/v1/gonum/CONTRIBUTORS create mode 100644 vendor/gonum.org/v1/gonum/LICENSE create mode 100644 vendor/gonum.org/v1/gonum/blas/blas.go create mode 100644 vendor/gonum.org/v1/gonum/blas/blas32/blas32.go create mode 100644 vendor/gonum.org/v1/gonum/blas/blas32/conv.go create mode 100644 vendor/gonum.org/v1/gonum/blas/blas32/conv_symmetric.go create mode 100644 vendor/gonum.org/v1/gonum/blas/blas32/doc.go create mode 100644 vendor/gonum.org/v1/gonum/blas/blas64/blas64.go create mode 100644 vendor/gonum.org/v1/gonum/blas/blas64/conv.go create mode 100644 vendor/gonum.org/v1/gonum/blas/blas64/conv_symmetric.go create mode 100644 vendor/gonum.org/v1/gonum/blas/blas64/doc.go create mode 100644 vendor/gonum.org/v1/gonum/blas/cblas128/cblas128.go create mode 100644 vendor/gonum.org/v1/gonum/blas/cblas128/conv.go create mode 100644 vendor/gonum.org/v1/gonum/blas/cblas128/conv_hermitian.go create mode 100644 vendor/gonum.org/v1/gonum/blas/cblas128/conv_symmetric.go create mode 100644 vendor/gonum.org/v1/gonum/blas/cblas128/doc.go create mode 100644 vendor/gonum.org/v1/gonum/blas/cblas64/cblas64.go create mode 100644 vendor/gonum.org/v1/gonum/blas/cblas64/conv.go create mode 100644 vendor/gonum.org/v1/gonum/blas/cblas64/conv_hermitian.go create mode 100644 vendor/gonum.org/v1/gonum/blas/cblas64/doc.go create mode 100644 vendor/gonum.org/v1/gonum/blas/doc.go create mode 100644 vendor/gonum.org/v1/gonum/blas/gonum/dgemm.go create mode 100644 vendor/gonum.org/v1/gonum/blas/gonum/doc.go create mode 100644 vendor/gonum.org/v1/gonum/blas/gonum/errors.go create mode 100644 vendor/gonum.org/v1/gonum/blas/gonum/gemv.go create mode 100644 vendor/gonum.org/v1/gonum/blas/gonum/gonum.go create mode 100644 vendor/gonum.org/v1/gonum/blas/gonum/level1cmplx128.go create mode 100644 vendor/gonum.org/v1/gonum/blas/gonum/level1cmplx64.go create mode 100644 vendor/gonum.org/v1/gonum/blas/gonum/level1float32.go create mode 100644 vendor/gonum.org/v1/gonum/blas/gonum/level1float32_dsdot.go create mode 100644 vendor/gonum.org/v1/gonum/blas/gonum/level1float32_sdot.go create mode 100644 vendor/gonum.org/v1/gonum/blas/gonum/level1float32_sdsdot.go create mode 100644 vendor/gonum.org/v1/gonum/blas/gonum/level1float64.go create mode 100644 vendor/gonum.org/v1/gonum/blas/gonum/level1float64_ddot.go create mode 100644 vendor/gonum.org/v1/gonum/blas/gonum/level2cmplx128.go create mode 100644 vendor/gonum.org/v1/gonum/blas/gonum/level2cmplx64.go create mode 100644 vendor/gonum.org/v1/gonum/blas/gonum/level2float32.go create mode 100644 vendor/gonum.org/v1/gonum/blas/gonum/level2float64.go create mode 100644 vendor/gonum.org/v1/gonum/blas/gonum/level3cmplx128.go create mode 100644 vendor/gonum.org/v1/gonum/blas/gonum/level3cmplx64.go create mode 100644 vendor/gonum.org/v1/gonum/blas/gonum/level3float32.go create mode 100644 vendor/gonum.org/v1/gonum/blas/gonum/level3float64.go create mode 100644 vendor/gonum.org/v1/gonum/blas/gonum/sgemm.go create mode 100644 vendor/gonum.org/v1/gonum/blas/testblas/benchautogen/autogen_bench_level1double.go create mode 100644 vendor/gonum.org/v1/gonum/blas/testblas/benchsize.go create mode 100644 vendor/gonum.org/v1/gonum/blas/testblas/common.go create mode 100644 vendor/gonum.org/v1/gonum/blas/testblas/dgbmv.go create mode 100644 vendor/gonum.org/v1/gonum/blas/testblas/dgemm.go create mode 100644 vendor/gonum.org/v1/gonum/blas/testblas/dgemmbench.go create mode 100644 vendor/gonum.org/v1/gonum/blas/testblas/dgemv.go create mode 100644 vendor/gonum.org/v1/gonum/blas/testblas/dger.go create mode 100644 vendor/gonum.org/v1/gonum/blas/testblas/doc.go create mode 100644 vendor/gonum.org/v1/gonum/blas/testblas/dsbmv.go create mode 100644 vendor/gonum.org/v1/gonum/blas/testblas/dspmv.go create mode 100644 vendor/gonum.org/v1/gonum/blas/testblas/dspr.go create mode 100644 vendor/gonum.org/v1/gonum/blas/testblas/dspr2.go create mode 100644 vendor/gonum.org/v1/gonum/blas/testblas/dsymm.go create mode 100644 vendor/gonum.org/v1/gonum/blas/testblas/dsymv.go create mode 100644 vendor/gonum.org/v1/gonum/blas/testblas/dsyr.go create mode 100644 vendor/gonum.org/v1/gonum/blas/testblas/dsyr2.go create mode 100644 vendor/gonum.org/v1/gonum/blas/testblas/dsyr2k.go create mode 100644 vendor/gonum.org/v1/gonum/blas/testblas/dsyrk.go create mode 100644 vendor/gonum.org/v1/gonum/blas/testblas/dtbmv.go create mode 100644 vendor/gonum.org/v1/gonum/blas/testblas/dtbsv.go create mode 100644 vendor/gonum.org/v1/gonum/blas/testblas/dtpmv.go create mode 100644 vendor/gonum.org/v1/gonum/blas/testblas/dtpsv.go create mode 100644 vendor/gonum.org/v1/gonum/blas/testblas/dtrmm.go create mode 100644 vendor/gonum.org/v1/gonum/blas/testblas/dtrmv.go create mode 100644 vendor/gonum.org/v1/gonum/blas/testblas/dtrmvbench.go create mode 100644 vendor/gonum.org/v1/gonum/blas/testblas/dtrsm.go create mode 100644 vendor/gonum.org/v1/gonum/blas/testblas/dtrsv.go create mode 100644 vendor/gonum.org/v1/gonum/blas/testblas/dtxmv.go create mode 100644 vendor/gonum.org/v1/gonum/blas/testblas/dzasum.go create mode 100644 vendor/gonum.org/v1/gonum/blas/testblas/dznrm2.go create mode 100644 vendor/gonum.org/v1/gonum/blas/testblas/izamax.go create mode 100644 vendor/gonum.org/v1/gonum/blas/testblas/level1double.go create mode 100644 vendor/gonum.org/v1/gonum/blas/testblas/level2bench.go create mode 100644 vendor/gonum.org/v1/gonum/blas/testblas/zaxpy.go create mode 100644 vendor/gonum.org/v1/gonum/blas/testblas/zcopy.go create mode 100644 vendor/gonum.org/v1/gonum/blas/testblas/zdotc.go create mode 100644 vendor/gonum.org/v1/gonum/blas/testblas/zdotu.go create mode 100644 vendor/gonum.org/v1/gonum/blas/testblas/zdscal.go create mode 100644 vendor/gonum.org/v1/gonum/blas/testblas/zgbmv.go create mode 100644 vendor/gonum.org/v1/gonum/blas/testblas/zgemm.go create mode 100644 vendor/gonum.org/v1/gonum/blas/testblas/zgemv.go create mode 100644 vendor/gonum.org/v1/gonum/blas/testblas/zgerc.go create mode 100644 vendor/gonum.org/v1/gonum/blas/testblas/zgeru.go create mode 100644 vendor/gonum.org/v1/gonum/blas/testblas/zhbmv.go create mode 100644 vendor/gonum.org/v1/gonum/blas/testblas/zhemm.go create mode 100644 vendor/gonum.org/v1/gonum/blas/testblas/zhemv.go create mode 100644 vendor/gonum.org/v1/gonum/blas/testblas/zher.go create mode 100644 vendor/gonum.org/v1/gonum/blas/testblas/zher2.go create mode 100644 vendor/gonum.org/v1/gonum/blas/testblas/zher2k.go create mode 100644 vendor/gonum.org/v1/gonum/blas/testblas/zherk.go create mode 100644 vendor/gonum.org/v1/gonum/blas/testblas/zhpmv.go create mode 100644 vendor/gonum.org/v1/gonum/blas/testblas/zhpr.go create mode 100644 vendor/gonum.org/v1/gonum/blas/testblas/zhpr2.go create mode 100644 vendor/gonum.org/v1/gonum/blas/testblas/zscal.go create mode 100644 vendor/gonum.org/v1/gonum/blas/testblas/zswap.go create mode 100644 vendor/gonum.org/v1/gonum/blas/testblas/zsymm.go create mode 100644 vendor/gonum.org/v1/gonum/blas/testblas/zsyr2k.go create mode 100644 vendor/gonum.org/v1/gonum/blas/testblas/zsyrk.go create mode 100644 vendor/gonum.org/v1/gonum/blas/testblas/ztbmv.go create mode 100644 vendor/gonum.org/v1/gonum/blas/testblas/ztbsv.go create mode 100644 vendor/gonum.org/v1/gonum/blas/testblas/ztpmv.go create mode 100644 vendor/gonum.org/v1/gonum/blas/testblas/ztpsv.go create mode 100644 vendor/gonum.org/v1/gonum/blas/testblas/ztrmm.go create mode 100644 vendor/gonum.org/v1/gonum/blas/testblas/ztrmv.go create mode 100644 vendor/gonum.org/v1/gonum/blas/testblas/ztrsm.go create mode 100644 vendor/gonum.org/v1/gonum/blas/testblas/ztrsv.go create mode 100644 vendor/gonum.org/v1/gonum/bound/bound.go create mode 100644 vendor/gonum.org/v1/gonum/bound/doc.go create mode 100644 vendor/gonum.org/v1/gonum/diff/fd/crosslaplacian.go create mode 100644 vendor/gonum.org/v1/gonum/diff/fd/derivative.go create mode 100644 vendor/gonum.org/v1/gonum/diff/fd/diff.go create mode 100644 vendor/gonum.org/v1/gonum/diff/fd/doc.go create mode 100644 vendor/gonum.org/v1/gonum/diff/fd/gradient.go create mode 100644 vendor/gonum.org/v1/gonum/diff/fd/hessian.go create mode 100644 vendor/gonum.org/v1/gonum/diff/fd/jacobian.go create mode 100644 vendor/gonum.org/v1/gonum/diff/fd/laplacian.go create mode 100644 vendor/gonum.org/v1/gonum/doc.go create mode 100644 vendor/gonum.org/v1/gonum/floats/doc.go create mode 100644 vendor/gonum.org/v1/gonum/floats/floats.go create mode 100644 vendor/gonum.org/v1/gonum/fourier/doc.go create mode 100644 vendor/gonum.org/v1/gonum/fourier/fourier.go create mode 100644 vendor/gonum.org/v1/gonum/fourier/internal/fftpack/array_bounds_checks.go create mode 100644 vendor/gonum.org/v1/gonum/fourier/internal/fftpack/array_no_bounds_checks.go create mode 100644 vendor/gonum.org/v1/gonum/fourier/internal/fftpack/cfft.go create mode 100644 vendor/gonum.org/v1/gonum/fourier/internal/fftpack/cosq.go create mode 100644 vendor/gonum.org/v1/gonum/fourier/internal/fftpack/cost.go create mode 100644 vendor/gonum.org/v1/gonum/fourier/internal/fftpack/doc.go create mode 100644 vendor/gonum.org/v1/gonum/fourier/internal/fftpack/rfft.go create mode 100644 vendor/gonum.org/v1/gonum/fourier/internal/fftpack/sinq.go create mode 100644 vendor/gonum.org/v1/gonum/fourier/internal/fftpack/sint.go create mode 100644 vendor/gonum.org/v1/gonum/fourier/quarter.go create mode 100644 vendor/gonum.org/v1/gonum/fourier/sincos.go create mode 100644 vendor/gonum.org/v1/gonum/graph/community/bisect.go create mode 100644 vendor/gonum.org/v1/gonum/graph/community/doc.go create mode 100644 vendor/gonum.org/v1/gonum/graph/community/k_communities.go create mode 100644 vendor/gonum.org/v1/gonum/graph/community/louvain_common.go create mode 100644 vendor/gonum.org/v1/gonum/graph/community/louvain_directed.go create mode 100644 vendor/gonum.org/v1/gonum/graph/community/louvain_directed_multiplex.go create mode 100644 vendor/gonum.org/v1/gonum/graph/community/louvain_undirected.go create mode 100644 vendor/gonum.org/v1/gonum/graph/community/louvain_undirected_multiplex.go create mode 100644 vendor/gonum.org/v1/gonum/graph/doc.go create mode 100644 vendor/gonum.org/v1/gonum/graph/encoding/digraph6/digraph6.go create mode 100644 vendor/gonum.org/v1/gonum/graph/encoding/doc.go create mode 100644 vendor/gonum.org/v1/gonum/graph/encoding/dot/decode.go create mode 100644 vendor/gonum.org/v1/gonum/graph/encoding/dot/doc.go create mode 100644 vendor/gonum.org/v1/gonum/graph/encoding/dot/encode.go create mode 100644 vendor/gonum.org/v1/gonum/graph/encoding/encoding.go create mode 100644 vendor/gonum.org/v1/gonum/graph/encoding/graph6/graph6.go create mode 100644 vendor/gonum.org/v1/gonum/graph/encoding/graphql/decode.go create mode 100644 vendor/gonum.org/v1/gonum/graph/encoding/graphql/doc.go create mode 100644 vendor/gonum.org/v1/gonum/graph/encoding/graphql/graphql.go create mode 100644 vendor/gonum.org/v1/gonum/graph/ex/fdpclust/gn.go create mode 100644 vendor/gonum.org/v1/gonum/graph/ex/fdpclust/main.go create mode 100644 vendor/gonum.org/v1/gonum/graph/flow/control_flow_lt.go create mode 100644 vendor/gonum.org/v1/gonum/graph/flow/control_flow_slt.go create mode 100644 vendor/gonum.org/v1/gonum/graph/flow/doc.go create mode 100644 vendor/gonum.org/v1/gonum/graph/formats/cytoscapejs/cytoscapejs.go create mode 100644 vendor/gonum.org/v1/gonum/graph/formats/cytoscapejs/testdata/LICENSE create mode 100644 vendor/gonum.org/v1/gonum/graph/formats/dot/ast/ast.go create mode 100644 vendor/gonum.org/v1/gonum/graph/formats/dot/ast/doc.go create mode 100644 vendor/gonum.org/v1/gonum/graph/formats/dot/doc.go create mode 100644 vendor/gonum.org/v1/gonum/graph/formats/dot/dot.go create mode 100644 vendor/gonum.org/v1/gonum/graph/formats/dot/fuzz/fuzz.go create mode 100644 vendor/gonum.org/v1/gonum/graph/formats/dot/internal/astx/astx.go create mode 100644 vendor/gonum.org/v1/gonum/graph/formats/dot/internal/astx/doc.go create mode 100644 vendor/gonum.org/v1/gonum/graph/formats/dot/internal/errors/doc.go create mode 100644 vendor/gonum.org/v1/gonum/graph/formats/dot/internal/errors/errors.go create mode 100644 vendor/gonum.org/v1/gonum/graph/formats/dot/internal/lexer/acttab.go create mode 100644 vendor/gonum.org/v1/gonum/graph/formats/dot/internal/lexer/doc.go create mode 100644 vendor/gonum.org/v1/gonum/graph/formats/dot/internal/lexer/lexer.go create mode 100644 vendor/gonum.org/v1/gonum/graph/formats/dot/internal/lexer/transitiontable.go create mode 100644 vendor/gonum.org/v1/gonum/graph/formats/dot/internal/parser/action.go create mode 100644 vendor/gonum.org/v1/gonum/graph/formats/dot/internal/parser/actiontable.go create mode 100644 vendor/gonum.org/v1/gonum/graph/formats/dot/internal/parser/doc.go create mode 100644 vendor/gonum.org/v1/gonum/graph/formats/dot/internal/parser/gototable.go create mode 100644 vendor/gonum.org/v1/gonum/graph/formats/dot/internal/parser/parser.go create mode 100644 vendor/gonum.org/v1/gonum/graph/formats/dot/internal/parser/productionstable.go create mode 100644 vendor/gonum.org/v1/gonum/graph/formats/dot/internal/paste_copyright.go create mode 100644 vendor/gonum.org/v1/gonum/graph/formats/dot/internal/token/doc.go create mode 100644 vendor/gonum.org/v1/gonum/graph/formats/dot/internal/token/token.go create mode 100644 vendor/gonum.org/v1/gonum/graph/formats/dot/internal/util/doc.go create mode 100644 vendor/gonum.org/v1/gonum/graph/formats/dot/internal/util/litconv.go create mode 100644 vendor/gonum.org/v1/gonum/graph/formats/dot/internal/util/rune.go create mode 100644 vendor/gonum.org/v1/gonum/graph/formats/dot/sem.go create mode 100644 vendor/gonum.org/v1/gonum/graph/formats/gexf12/gexf.go create mode 100644 vendor/gonum.org/v1/gonum/graph/formats/sigmajs/sigmajs.go create mode 100644 vendor/gonum.org/v1/gonum/graph/formats/sigmajs/testdata/LICENSE.txt create mode 100644 vendor/gonum.org/v1/gonum/graph/graph.go create mode 100644 vendor/gonum.org/v1/gonum/graph/graphs/gen/batagelj_brandes.go create mode 100644 vendor/gonum.org/v1/gonum/graph/graphs/gen/doc.go create mode 100644 vendor/gonum.org/v1/gonum/graph/graphs/gen/duplication.go create mode 100644 vendor/gonum.org/v1/gonum/graph/graphs/gen/gen.go create mode 100644 vendor/gonum.org/v1/gonum/graph/graphs/gen/holme_kim.go create mode 100644 vendor/gonum.org/v1/gonum/graph/graphs/gen/small_world.go create mode 100644 vendor/gonum.org/v1/gonum/graph/internal/linear/doc.go create mode 100644 vendor/gonum.org/v1/gonum/graph/internal/linear/linear.go create mode 100644 vendor/gonum.org/v1/gonum/graph/internal/ordered/doc.go create mode 100644 vendor/gonum.org/v1/gonum/graph/internal/ordered/sort.go create mode 100644 vendor/gonum.org/v1/gonum/graph/internal/set/doc.go create mode 100644 vendor/gonum.org/v1/gonum/graph/internal/set/same.go create mode 100644 vendor/gonum.org/v1/gonum/graph/internal/set/same_appengine.go create mode 100644 vendor/gonum.org/v1/gonum/graph/internal/set/set.go create mode 100644 vendor/gonum.org/v1/gonum/graph/internal/uid/uid.go create mode 100644 vendor/gonum.org/v1/gonum/graph/iterator/doc.go create mode 100644 vendor/gonum.org/v1/gonum/graph/iterator/edges.go create mode 100644 vendor/gonum.org/v1/gonum/graph/iterator/lines.go create mode 100644 vendor/gonum.org/v1/gonum/graph/iterator/nodes.go create mode 100644 vendor/gonum.org/v1/gonum/graph/multi/directed.go create mode 100644 vendor/gonum.org/v1/gonum/graph/multi/doc.go create mode 100644 vendor/gonum.org/v1/gonum/graph/multi/multi.go create mode 100644 vendor/gonum.org/v1/gonum/graph/multi/undirected.go create mode 100644 vendor/gonum.org/v1/gonum/graph/multi/weighted_directed.go create mode 100644 vendor/gonum.org/v1/gonum/graph/multi/weighted_undirected.go create mode 100644 vendor/gonum.org/v1/gonum/graph/multigraph.go create mode 100644 vendor/gonum.org/v1/gonum/graph/network/betweenness.go create mode 100644 vendor/gonum.org/v1/gonum/graph/network/diffusion.go create mode 100644 vendor/gonum.org/v1/gonum/graph/network/distance.go create mode 100644 vendor/gonum.org/v1/gonum/graph/network/doc.go create mode 100644 vendor/gonum.org/v1/gonum/graph/network/hits.go create mode 100644 vendor/gonum.org/v1/gonum/graph/network/network.go create mode 100644 vendor/gonum.org/v1/gonum/graph/network/page.go create mode 100644 vendor/gonum.org/v1/gonum/graph/nodes_edges.go create mode 100644 vendor/gonum.org/v1/gonum/graph/path/a_star.go create mode 100644 vendor/gonum.org/v1/gonum/graph/path/bellman_ford_moore.go create mode 100644 vendor/gonum.org/v1/gonum/graph/path/dijkstra.go create mode 100644 vendor/gonum.org/v1/gonum/graph/path/disjoint.go create mode 100644 vendor/gonum.org/v1/gonum/graph/path/doc.go create mode 100644 vendor/gonum.org/v1/gonum/graph/path/dynamic/doc.go create mode 100644 vendor/gonum.org/v1/gonum/graph/path/dynamic/dstarlite.go create mode 100644 vendor/gonum.org/v1/gonum/graph/path/floydwarshall.go create mode 100644 vendor/gonum.org/v1/gonum/graph/path/internal/testgraphs/doc.go create mode 100644 vendor/gonum.org/v1/gonum/graph/path/internal/testgraphs/grid.go create mode 100644 vendor/gonum.org/v1/gonum/graph/path/internal/testgraphs/limited.go create mode 100644 vendor/gonum.org/v1/gonum/graph/path/internal/testgraphs/shortest.go create mode 100644 vendor/gonum.org/v1/gonum/graph/path/johnson_apsp.go create mode 100644 vendor/gonum.org/v1/gonum/graph/path/shortest.go create mode 100644 vendor/gonum.org/v1/gonum/graph/path/spanning_tree.go create mode 100644 vendor/gonum.org/v1/gonum/graph/path/weight.go create mode 100644 vendor/gonum.org/v1/gonum/graph/path/yen_ksp.go create mode 100644 vendor/gonum.org/v1/gonum/graph/simple/dense_directed_matrix.go create mode 100644 vendor/gonum.org/v1/gonum/graph/simple/dense_undirected_matrix.go create mode 100644 vendor/gonum.org/v1/gonum/graph/simple/directed.go create mode 100644 vendor/gonum.org/v1/gonum/graph/simple/doc.go create mode 100644 vendor/gonum.org/v1/gonum/graph/simple/simple.go create mode 100644 vendor/gonum.org/v1/gonum/graph/simple/undirected.go create mode 100644 vendor/gonum.org/v1/gonum/graph/simple/weighted_directed.go create mode 100644 vendor/gonum.org/v1/gonum/graph/simple/weighted_undirected.go create mode 100644 vendor/gonum.org/v1/gonum/graph/testgraph/testcases.go create mode 100644 vendor/gonum.org/v1/gonum/graph/testgraph/testgraph.go create mode 100644 vendor/gonum.org/v1/gonum/graph/topo/bron_kerbosch.go create mode 100644 vendor/gonum.org/v1/gonum/graph/topo/clique_graph.go create mode 100644 vendor/gonum.org/v1/gonum/graph/topo/doc.go create mode 100644 vendor/gonum.org/v1/gonum/graph/topo/johnson_cycles.go create mode 100644 vendor/gonum.org/v1/gonum/graph/topo/non_tomita_choice.go create mode 100644 vendor/gonum.org/v1/gonum/graph/topo/paton_cycles.go create mode 100644 vendor/gonum.org/v1/gonum/graph/topo/tarjan.go create mode 100644 vendor/gonum.org/v1/gonum/graph/topo/tomita_choice.go create mode 100644 vendor/gonum.org/v1/gonum/graph/topo/topo.go create mode 100644 vendor/gonum.org/v1/gonum/graph/traverse/doc.go create mode 100644 vendor/gonum.org/v1/gonum/graph/traverse/traverse.go create mode 100644 vendor/gonum.org/v1/gonum/graph/undirect.go create mode 100644 vendor/gonum.org/v1/gonum/integrate/doc.go create mode 100644 vendor/gonum.org/v1/gonum/integrate/quad/doc.go create mode 100644 vendor/gonum.org/v1/gonum/integrate/quad/hermite.go create mode 100644 vendor/gonum.org/v1/gonum/integrate/quad/hermite_data.go create mode 100644 vendor/gonum.org/v1/gonum/integrate/quad/internal/PrintGoSlice.m create mode 100644 vendor/gonum.org/v1/gonum/integrate/quad/internal/genherm.m create mode 100644 vendor/gonum.org/v1/gonum/integrate/quad/internal/hermpts.m create mode 100644 vendor/gonum.org/v1/gonum/integrate/quad/legendre.go create mode 100644 vendor/gonum.org/v1/gonum/integrate/quad/quad.go create mode 100644 vendor/gonum.org/v1/gonum/integrate/trapezoidal.go create mode 100644 vendor/gonum.org/v1/gonum/internal/asm/c128/axpyinc_amd64.s create mode 100644 vendor/gonum.org/v1/gonum/internal/asm/c128/axpyincto_amd64.s create mode 100644 vendor/gonum.org/v1/gonum/internal/asm/c128/axpyunitary_amd64.s create mode 100644 vendor/gonum.org/v1/gonum/internal/asm/c128/axpyunitaryto_amd64.s create mode 100644 vendor/gonum.org/v1/gonum/internal/asm/c128/doc.go create mode 100644 vendor/gonum.org/v1/gonum/internal/asm/c128/dotcinc_amd64.s create mode 100644 vendor/gonum.org/v1/gonum/internal/asm/c128/dotcunitary_amd64.s create mode 100644 vendor/gonum.org/v1/gonum/internal/asm/c128/dotuinc_amd64.s create mode 100644 vendor/gonum.org/v1/gonum/internal/asm/c128/dotuunitary_amd64.s create mode 100644 vendor/gonum.org/v1/gonum/internal/asm/c128/dscalinc_amd64.s create mode 100644 vendor/gonum.org/v1/gonum/internal/asm/c128/dscalunitary_amd64.s create mode 100644 vendor/gonum.org/v1/gonum/internal/asm/c128/scal.go create mode 100644 vendor/gonum.org/v1/gonum/internal/asm/c128/scalUnitary_amd64.s create mode 100644 vendor/gonum.org/v1/gonum/internal/asm/c128/scalinc_amd64.s create mode 100644 vendor/gonum.org/v1/gonum/internal/asm/c128/stubs_amd64.go create mode 100644 vendor/gonum.org/v1/gonum/internal/asm/c128/stubs_noasm.go create mode 100644 vendor/gonum.org/v1/gonum/internal/asm/c64/axpyinc_amd64.s create mode 100644 vendor/gonum.org/v1/gonum/internal/asm/c64/axpyincto_amd64.s create mode 100644 vendor/gonum.org/v1/gonum/internal/asm/c64/axpyunitary_amd64.s create mode 100644 vendor/gonum.org/v1/gonum/internal/asm/c64/axpyunitaryto_amd64.s create mode 100644 vendor/gonum.org/v1/gonum/internal/asm/c64/conj.go create mode 100644 vendor/gonum.org/v1/gonum/internal/asm/c64/doc.go create mode 100644 vendor/gonum.org/v1/gonum/internal/asm/c64/dotcinc_amd64.s create mode 100644 vendor/gonum.org/v1/gonum/internal/asm/c64/dotcunitary_amd64.s create mode 100644 vendor/gonum.org/v1/gonum/internal/asm/c64/dotuinc_amd64.s create mode 100644 vendor/gonum.org/v1/gonum/internal/asm/c64/dotuunitary_amd64.s create mode 100644 vendor/gonum.org/v1/gonum/internal/asm/c64/scal.go create mode 100644 vendor/gonum.org/v1/gonum/internal/asm/c64/stubs_amd64.go create mode 100644 vendor/gonum.org/v1/gonum/internal/asm/c64/stubs_noasm.go create mode 100644 vendor/gonum.org/v1/gonum/internal/asm/f32/axpyinc_amd64.s create mode 100644 vendor/gonum.org/v1/gonum/internal/asm/f32/axpyincto_amd64.s create mode 100644 vendor/gonum.org/v1/gonum/internal/asm/f32/axpyunitary_amd64.s create mode 100644 vendor/gonum.org/v1/gonum/internal/asm/f32/axpyunitaryto_amd64.s create mode 100644 vendor/gonum.org/v1/gonum/internal/asm/f32/ddotinc_amd64.s create mode 100644 vendor/gonum.org/v1/gonum/internal/asm/f32/ddotunitary_amd64.s create mode 100644 vendor/gonum.org/v1/gonum/internal/asm/f32/doc.go create mode 100644 vendor/gonum.org/v1/gonum/internal/asm/f32/dotinc_amd64.s create mode 100644 vendor/gonum.org/v1/gonum/internal/asm/f32/dotunitary_amd64.s create mode 100644 vendor/gonum.org/v1/gonum/internal/asm/f32/ge_amd64.go create mode 100644 vendor/gonum.org/v1/gonum/internal/asm/f32/ge_amd64.s create mode 100644 vendor/gonum.org/v1/gonum/internal/asm/f32/ge_noasm.go create mode 100644 vendor/gonum.org/v1/gonum/internal/asm/f32/scal.go create mode 100644 vendor/gonum.org/v1/gonum/internal/asm/f32/stubs_amd64.go create mode 100644 vendor/gonum.org/v1/gonum/internal/asm/f32/stubs_noasm.go create mode 100644 vendor/gonum.org/v1/gonum/internal/asm/f64/abssum_amd64.s create mode 100644 vendor/gonum.org/v1/gonum/internal/asm/f64/abssuminc_amd64.s create mode 100644 vendor/gonum.org/v1/gonum/internal/asm/f64/add_amd64.s create mode 100644 vendor/gonum.org/v1/gonum/internal/asm/f64/addconst_amd64.s create mode 100644 vendor/gonum.org/v1/gonum/internal/asm/f64/axpy.go create mode 100644 vendor/gonum.org/v1/gonum/internal/asm/f64/axpyinc_amd64.s create mode 100644 vendor/gonum.org/v1/gonum/internal/asm/f64/axpyincto_amd64.s create mode 100644 vendor/gonum.org/v1/gonum/internal/asm/f64/axpyunitary_amd64.s create mode 100644 vendor/gonum.org/v1/gonum/internal/asm/f64/axpyunitaryto_amd64.s create mode 100644 vendor/gonum.org/v1/gonum/internal/asm/f64/cumprod_amd64.s create mode 100644 vendor/gonum.org/v1/gonum/internal/asm/f64/cumsum_amd64.s create mode 100644 vendor/gonum.org/v1/gonum/internal/asm/f64/div_amd64.s create mode 100644 vendor/gonum.org/v1/gonum/internal/asm/f64/divto_amd64.s create mode 100644 vendor/gonum.org/v1/gonum/internal/asm/f64/doc.go create mode 100644 vendor/gonum.org/v1/gonum/internal/asm/f64/dot.go create mode 100644 vendor/gonum.org/v1/gonum/internal/asm/f64/dot_amd64.s create mode 100644 vendor/gonum.org/v1/gonum/internal/asm/f64/ge_amd64.go create mode 100644 vendor/gonum.org/v1/gonum/internal/asm/f64/ge_noasm.go create mode 100644 vendor/gonum.org/v1/gonum/internal/asm/f64/gemvN_amd64.s create mode 100644 vendor/gonum.org/v1/gonum/internal/asm/f64/gemvT_amd64.s create mode 100644 vendor/gonum.org/v1/gonum/internal/asm/f64/ger_amd64.s create mode 100644 vendor/gonum.org/v1/gonum/internal/asm/f64/l1norm_amd64.s create mode 100644 vendor/gonum.org/v1/gonum/internal/asm/f64/linfnorm_amd64.s create mode 100644 vendor/gonum.org/v1/gonum/internal/asm/f64/scal.go create mode 100644 vendor/gonum.org/v1/gonum/internal/asm/f64/scalinc_amd64.s create mode 100644 vendor/gonum.org/v1/gonum/internal/asm/f64/scalincto_amd64.s create mode 100644 vendor/gonum.org/v1/gonum/internal/asm/f64/scalunitary_amd64.s create mode 100644 vendor/gonum.org/v1/gonum/internal/asm/f64/scalunitaryto_amd64.s create mode 100644 vendor/gonum.org/v1/gonum/internal/asm/f64/stubs_amd64.go create mode 100644 vendor/gonum.org/v1/gonum/internal/asm/f64/stubs_noasm.go create mode 100644 vendor/gonum.org/v1/gonum/internal/asm/f64/sum_amd64.s create mode 100644 vendor/gonum.org/v1/gonum/internal/cmplx64/abs.go create mode 100644 vendor/gonum.org/v1/gonum/internal/cmplx64/conj.go create mode 100644 vendor/gonum.org/v1/gonum/internal/cmplx64/doc.go create mode 100644 vendor/gonum.org/v1/gonum/internal/cmplx64/isinf.go create mode 100644 vendor/gonum.org/v1/gonum/internal/cmplx64/isnan.go create mode 100644 vendor/gonum.org/v1/gonum/internal/cmplx64/sqrt.go create mode 100644 vendor/gonum.org/v1/gonum/internal/math32/doc.go create mode 100644 vendor/gonum.org/v1/gonum/internal/math32/math.go create mode 100644 vendor/gonum.org/v1/gonum/internal/math32/signbit.go create mode 100644 vendor/gonum.org/v1/gonum/internal/math32/sqrt.go create mode 100644 vendor/gonum.org/v1/gonum/internal/math32/sqrt_amd64.go create mode 100644 vendor/gonum.org/v1/gonum/internal/math32/sqrt_amd64.s create mode 100644 vendor/gonum.org/v1/gonum/lapack/doc.go create mode 100644 vendor/gonum.org/v1/gonum/lapack/gonum/dbdsqr.go create mode 100644 vendor/gonum.org/v1/gonum/lapack/gonum/dgebak.go create mode 100644 vendor/gonum.org/v1/gonum/lapack/gonum/dgebal.go create mode 100644 vendor/gonum.org/v1/gonum/lapack/gonum/dgebd2.go create mode 100644 vendor/gonum.org/v1/gonum/lapack/gonum/dgebrd.go create mode 100644 vendor/gonum.org/v1/gonum/lapack/gonum/dgecon.go create mode 100644 vendor/gonum.org/v1/gonum/lapack/gonum/dgeev.go create mode 100644 vendor/gonum.org/v1/gonum/lapack/gonum/dgehd2.go create mode 100644 vendor/gonum.org/v1/gonum/lapack/gonum/dgehrd.go create mode 100644 vendor/gonum.org/v1/gonum/lapack/gonum/dgelq2.go create mode 100644 vendor/gonum.org/v1/gonum/lapack/gonum/dgelqf.go create mode 100644 vendor/gonum.org/v1/gonum/lapack/gonum/dgels.go create mode 100644 vendor/gonum.org/v1/gonum/lapack/gonum/dgeql2.go create mode 100644 vendor/gonum.org/v1/gonum/lapack/gonum/dgeqp3.go create mode 100644 vendor/gonum.org/v1/gonum/lapack/gonum/dgeqr2.go create mode 100644 vendor/gonum.org/v1/gonum/lapack/gonum/dgeqrf.go create mode 100644 vendor/gonum.org/v1/gonum/lapack/gonum/dgerq2.go create mode 100644 vendor/gonum.org/v1/gonum/lapack/gonum/dgerqf.go create mode 100644 vendor/gonum.org/v1/gonum/lapack/gonum/dgesvd.go create mode 100644 vendor/gonum.org/v1/gonum/lapack/gonum/dgetf2.go create mode 100644 vendor/gonum.org/v1/gonum/lapack/gonum/dgetrf.go create mode 100644 vendor/gonum.org/v1/gonum/lapack/gonum/dgetri.go create mode 100644 vendor/gonum.org/v1/gonum/lapack/gonum/dgetrs.go create mode 100644 vendor/gonum.org/v1/gonum/lapack/gonum/dggsvd3.go create mode 100644 vendor/gonum.org/v1/gonum/lapack/gonum/dggsvp3.go create mode 100644 vendor/gonum.org/v1/gonum/lapack/gonum/dhseqr.go create mode 100644 vendor/gonum.org/v1/gonum/lapack/gonum/dlabrd.go create mode 100644 vendor/gonum.org/v1/gonum/lapack/gonum/dlacn2.go create mode 100644 vendor/gonum.org/v1/gonum/lapack/gonum/dlacpy.go create mode 100644 vendor/gonum.org/v1/gonum/lapack/gonum/dlae2.go create mode 100644 vendor/gonum.org/v1/gonum/lapack/gonum/dlaev2.go create mode 100644 vendor/gonum.org/v1/gonum/lapack/gonum/dlaexc.go create mode 100644 vendor/gonum.org/v1/gonum/lapack/gonum/dlags2.go create mode 100644 vendor/gonum.org/v1/gonum/lapack/gonum/dlahqr.go create mode 100644 vendor/gonum.org/v1/gonum/lapack/gonum/dlahr2.go create mode 100644 vendor/gonum.org/v1/gonum/lapack/gonum/dlaln2.go create mode 100644 vendor/gonum.org/v1/gonum/lapack/gonum/dlange.go create mode 100644 vendor/gonum.org/v1/gonum/lapack/gonum/dlanst.go create mode 100644 vendor/gonum.org/v1/gonum/lapack/gonum/dlansy.go create mode 100644 vendor/gonum.org/v1/gonum/lapack/gonum/dlantr.go create mode 100644 vendor/gonum.org/v1/gonum/lapack/gonum/dlanv2.go create mode 100644 vendor/gonum.org/v1/gonum/lapack/gonum/dlapll.go create mode 100644 vendor/gonum.org/v1/gonum/lapack/gonum/dlapmt.go create mode 100644 vendor/gonum.org/v1/gonum/lapack/gonum/dlapy2.go create mode 100644 vendor/gonum.org/v1/gonum/lapack/gonum/dlaqp2.go create mode 100644 vendor/gonum.org/v1/gonum/lapack/gonum/dlaqps.go create mode 100644 vendor/gonum.org/v1/gonum/lapack/gonum/dlaqr04.go create mode 100644 vendor/gonum.org/v1/gonum/lapack/gonum/dlaqr1.go create mode 100644 vendor/gonum.org/v1/gonum/lapack/gonum/dlaqr23.go create mode 100644 vendor/gonum.org/v1/gonum/lapack/gonum/dlaqr5.go create mode 100644 vendor/gonum.org/v1/gonum/lapack/gonum/dlarf.go create mode 100644 vendor/gonum.org/v1/gonum/lapack/gonum/dlarfb.go create mode 100644 vendor/gonum.org/v1/gonum/lapack/gonum/dlarfg.go create mode 100644 vendor/gonum.org/v1/gonum/lapack/gonum/dlarft.go create mode 100644 vendor/gonum.org/v1/gonum/lapack/gonum/dlarfx.go create mode 100644 vendor/gonum.org/v1/gonum/lapack/gonum/dlartg.go create mode 100644 vendor/gonum.org/v1/gonum/lapack/gonum/dlas2.go create mode 100644 vendor/gonum.org/v1/gonum/lapack/gonum/dlascl.go create mode 100644 vendor/gonum.org/v1/gonum/lapack/gonum/dlaset.go create mode 100644 vendor/gonum.org/v1/gonum/lapack/gonum/dlasq1.go create mode 100644 vendor/gonum.org/v1/gonum/lapack/gonum/dlasq2.go create mode 100644 vendor/gonum.org/v1/gonum/lapack/gonum/dlasq3.go create mode 100644 vendor/gonum.org/v1/gonum/lapack/gonum/dlasq4.go create mode 100644 vendor/gonum.org/v1/gonum/lapack/gonum/dlasq5.go create mode 100644 vendor/gonum.org/v1/gonum/lapack/gonum/dlasq6.go create mode 100644 vendor/gonum.org/v1/gonum/lapack/gonum/dlasr.go create mode 100644 vendor/gonum.org/v1/gonum/lapack/gonum/dlasrt.go create mode 100644 vendor/gonum.org/v1/gonum/lapack/gonum/dlassq.go create mode 100644 vendor/gonum.org/v1/gonum/lapack/gonum/dlasv2.go create mode 100644 vendor/gonum.org/v1/gonum/lapack/gonum/dlaswp.go create mode 100644 vendor/gonum.org/v1/gonum/lapack/gonum/dlasy2.go create mode 100644 vendor/gonum.org/v1/gonum/lapack/gonum/dlatrd.go create mode 100644 vendor/gonum.org/v1/gonum/lapack/gonum/dlatrs.go create mode 100644 vendor/gonum.org/v1/gonum/lapack/gonum/dlauu2.go create mode 100644 vendor/gonum.org/v1/gonum/lapack/gonum/dlauum.go create mode 100644 vendor/gonum.org/v1/gonum/lapack/gonum/doc.go create mode 100644 vendor/gonum.org/v1/gonum/lapack/gonum/dorg2l.go create mode 100644 vendor/gonum.org/v1/gonum/lapack/gonum/dorg2r.go create mode 100644 vendor/gonum.org/v1/gonum/lapack/gonum/dorgbr.go create mode 100644 vendor/gonum.org/v1/gonum/lapack/gonum/dorghr.go create mode 100644 vendor/gonum.org/v1/gonum/lapack/gonum/dorgl2.go create mode 100644 vendor/gonum.org/v1/gonum/lapack/gonum/dorglq.go create mode 100644 vendor/gonum.org/v1/gonum/lapack/gonum/dorgql.go create mode 100644 vendor/gonum.org/v1/gonum/lapack/gonum/dorgqr.go create mode 100644 vendor/gonum.org/v1/gonum/lapack/gonum/dorgtr.go create mode 100644 vendor/gonum.org/v1/gonum/lapack/gonum/dorm2r.go create mode 100644 vendor/gonum.org/v1/gonum/lapack/gonum/dormbr.go create mode 100644 vendor/gonum.org/v1/gonum/lapack/gonum/dormhr.go create mode 100644 vendor/gonum.org/v1/gonum/lapack/gonum/dorml2.go create mode 100644 vendor/gonum.org/v1/gonum/lapack/gonum/dormlq.go create mode 100644 vendor/gonum.org/v1/gonum/lapack/gonum/dormqr.go create mode 100644 vendor/gonum.org/v1/gonum/lapack/gonum/dormr2.go create mode 100644 vendor/gonum.org/v1/gonum/lapack/gonum/dpbtf2.go create mode 100644 vendor/gonum.org/v1/gonum/lapack/gonum/dpocon.go create mode 100644 vendor/gonum.org/v1/gonum/lapack/gonum/dpotf2.go create mode 100644 vendor/gonum.org/v1/gonum/lapack/gonum/dpotrf.go create mode 100644 vendor/gonum.org/v1/gonum/lapack/gonum/dpotri.go create mode 100644 vendor/gonum.org/v1/gonum/lapack/gonum/dpotrs.go create mode 100644 vendor/gonum.org/v1/gonum/lapack/gonum/drscl.go create mode 100644 vendor/gonum.org/v1/gonum/lapack/gonum/dsteqr.go create mode 100644 vendor/gonum.org/v1/gonum/lapack/gonum/dsterf.go create mode 100644 vendor/gonum.org/v1/gonum/lapack/gonum/dsyev.go create mode 100644 vendor/gonum.org/v1/gonum/lapack/gonum/dsytd2.go create mode 100644 vendor/gonum.org/v1/gonum/lapack/gonum/dsytrd.go create mode 100644 vendor/gonum.org/v1/gonum/lapack/gonum/dtgsja.go create mode 100644 vendor/gonum.org/v1/gonum/lapack/gonum/dtrcon.go create mode 100644 vendor/gonum.org/v1/gonum/lapack/gonum/dtrevc3.go create mode 100644 vendor/gonum.org/v1/gonum/lapack/gonum/dtrexc.go create mode 100644 vendor/gonum.org/v1/gonum/lapack/gonum/dtrti2.go create mode 100644 vendor/gonum.org/v1/gonum/lapack/gonum/dtrtri.go create mode 100644 vendor/gonum.org/v1/gonum/lapack/gonum/dtrtrs.go create mode 100644 vendor/gonum.org/v1/gonum/lapack/gonum/errors.go create mode 100644 vendor/gonum.org/v1/gonum/lapack/gonum/iladlc.go create mode 100644 vendor/gonum.org/v1/gonum/lapack/gonum/iladlr.go create mode 100644 vendor/gonum.org/v1/gonum/lapack/gonum/ilaenv.go create mode 100644 vendor/gonum.org/v1/gonum/lapack/gonum/iparmq.go create mode 100644 vendor/gonum.org/v1/gonum/lapack/gonum/lapack.go create mode 100644 vendor/gonum.org/v1/gonum/lapack/internal/testdata/dlahr2test/main.go create mode 100644 vendor/gonum.org/v1/gonum/lapack/internal/testdata/dlaqr5test/main.go create mode 100644 vendor/gonum.org/v1/gonum/lapack/internal/testdata/dlasqtest/dcopy.f create mode 100644 vendor/gonum.org/v1/gonum/lapack/internal/testdata/dlasqtest/disnan.f create mode 100644 vendor/gonum.org/v1/gonum/lapack/internal/testdata/dlasqtest/dlaisnan.f create mode 100644 vendor/gonum.org/v1/gonum/lapack/internal/testdata/dlasqtest/dlamch.f create mode 100644 vendor/gonum.org/v1/gonum/lapack/internal/testdata/dlasqtest/dlas2.f create mode 100644 vendor/gonum.org/v1/gonum/lapack/internal/testdata/dlasqtest/dlascl.f create mode 100644 vendor/gonum.org/v1/gonum/lapack/internal/testdata/dlasqtest/dlasq1.f create mode 100644 vendor/gonum.org/v1/gonum/lapack/internal/testdata/dlasqtest/dlasq2.f create mode 100644 vendor/gonum.org/v1/gonum/lapack/internal/testdata/dlasqtest/dlasq3.f create mode 100644 vendor/gonum.org/v1/gonum/lapack/internal/testdata/dlasqtest/dlasq4.f create mode 100644 vendor/gonum.org/v1/gonum/lapack/internal/testdata/dlasqtest/dlasq5.f create mode 100644 vendor/gonum.org/v1/gonum/lapack/internal/testdata/dlasqtest/dlasq6.f create mode 100644 vendor/gonum.org/v1/gonum/lapack/internal/testdata/dlasqtest/dlasrt.f create mode 100644 vendor/gonum.org/v1/gonum/lapack/internal/testdata/dlasqtest/ieeeck.f create mode 100644 vendor/gonum.org/v1/gonum/lapack/internal/testdata/dlasqtest/ilaenv.f create mode 100644 vendor/gonum.org/v1/gonum/lapack/internal/testdata/dlasqtest/iparmq.f create mode 100644 vendor/gonum.org/v1/gonum/lapack/internal/testdata/dlasqtest/lsame.f create mode 100644 vendor/gonum.org/v1/gonum/lapack/internal/testdata/dlasqtest/testdlasq1.f90 create mode 100644 vendor/gonum.org/v1/gonum/lapack/internal/testdata/dlasqtest/testdlasq2.f90 create mode 100644 vendor/gonum.org/v1/gonum/lapack/internal/testdata/dlasqtest/testdlasq3.f90 create mode 100644 vendor/gonum.org/v1/gonum/lapack/internal/testdata/dlasqtest/testdlasq4.f90 create mode 100644 vendor/gonum.org/v1/gonum/lapack/internal/testdata/dlasqtest/xerbla.f create mode 100644 vendor/gonum.org/v1/gonum/lapack/internal/testdata/dsterftest/disnan.f create mode 100644 vendor/gonum.org/v1/gonum/lapack/internal/testdata/dsterftest/dlae2.f create mode 100644 vendor/gonum.org/v1/gonum/lapack/internal/testdata/dsterftest/dlaisnan.f create mode 100644 vendor/gonum.org/v1/gonum/lapack/internal/testdata/dsterftest/dlamch.f create mode 100644 vendor/gonum.org/v1/gonum/lapack/internal/testdata/dsterftest/dlanst.f create mode 100644 vendor/gonum.org/v1/gonum/lapack/internal/testdata/dsterftest/dlapy2.f create mode 100644 vendor/gonum.org/v1/gonum/lapack/internal/testdata/dsterftest/dlascl.f create mode 100644 vendor/gonum.org/v1/gonum/lapack/internal/testdata/dsterftest/dlasrt.f create mode 100644 vendor/gonum.org/v1/gonum/lapack/internal/testdata/dsterftest/dlassq.f create mode 100644 vendor/gonum.org/v1/gonum/lapack/internal/testdata/dsterftest/dsterf.f create mode 100644 vendor/gonum.org/v1/gonum/lapack/internal/testdata/dsterftest/lsame.f create mode 100644 vendor/gonum.org/v1/gonum/lapack/internal/testdata/dsterftest/testdsterf.f90 create mode 100644 vendor/gonum.org/v1/gonum/lapack/internal/testdata/dsterftest/xerbla.f create mode 100644 vendor/gonum.org/v1/gonum/lapack/internal/testdata/netlib/daxpy.f create mode 100644 vendor/gonum.org/v1/gonum/lapack/internal/testdata/netlib/dcopy.f create mode 100644 vendor/gonum.org/v1/gonum/lapack/internal/testdata/netlib/dgemm.f create mode 100644 vendor/gonum.org/v1/gonum/lapack/internal/testdata/netlib/dgemv.f create mode 100644 vendor/gonum.org/v1/gonum/lapack/internal/testdata/netlib/dlabad.f create mode 100644 vendor/gonum.org/v1/gonum/lapack/internal/testdata/netlib/dlacpy.f create mode 100644 vendor/gonum.org/v1/gonum/lapack/internal/testdata/netlib/dlahr2.f create mode 100644 vendor/gonum.org/v1/gonum/lapack/internal/testdata/netlib/dlamch.f create mode 100644 vendor/gonum.org/v1/gonum/lapack/internal/testdata/netlib/dlapy2.f create mode 100644 vendor/gonum.org/v1/gonum/lapack/internal/testdata/netlib/dlaqr1.f create mode 100644 vendor/gonum.org/v1/gonum/lapack/internal/testdata/netlib/dlaqr5.f create mode 100644 vendor/gonum.org/v1/gonum/lapack/internal/testdata/netlib/dlarfg.f create mode 100644 vendor/gonum.org/v1/gonum/lapack/internal/testdata/netlib/dlaset.f create mode 100644 vendor/gonum.org/v1/gonum/lapack/internal/testdata/netlib/dnrm2.f create mode 100644 vendor/gonum.org/v1/gonum/lapack/internal/testdata/netlib/dscal.f create mode 100644 vendor/gonum.org/v1/gonum/lapack/internal/testdata/netlib/dtrmm.f create mode 100644 vendor/gonum.org/v1/gonum/lapack/internal/testdata/netlib/dtrmv.f create mode 100644 vendor/gonum.org/v1/gonum/lapack/internal/testdata/netlib/lsame.f create mode 100644 vendor/gonum.org/v1/gonum/lapack/internal/testdata/netlib/netlib.go create mode 100644 vendor/gonum.org/v1/gonum/lapack/internal/testdata/netlib/xerbla.f create mode 100644 vendor/gonum.org/v1/gonum/lapack/lapack.go create mode 100644 vendor/gonum.org/v1/gonum/lapack/lapack64/doc.go create mode 100644 vendor/gonum.org/v1/gonum/lapack/lapack64/lapack64.go create mode 100644 vendor/gonum.org/v1/gonum/lapack/testlapack/dbdsqr.go create mode 100644 vendor/gonum.org/v1/gonum/lapack/testlapack/dgebak.go create mode 100644 vendor/gonum.org/v1/gonum/lapack/testlapack/dgebal.go create mode 100644 vendor/gonum.org/v1/gonum/lapack/testlapack/dgebd2.go create mode 100644 vendor/gonum.org/v1/gonum/lapack/testlapack/dgebrd.go create mode 100644 vendor/gonum.org/v1/gonum/lapack/testlapack/dgecon.go create mode 100644 vendor/gonum.org/v1/gonum/lapack/testlapack/dgeev.go create mode 100644 vendor/gonum.org/v1/gonum/lapack/testlapack/dgeev_bench.go create mode 100644 vendor/gonum.org/v1/gonum/lapack/testlapack/dgehd2.go create mode 100644 vendor/gonum.org/v1/gonum/lapack/testlapack/dgehrd.go create mode 100644 vendor/gonum.org/v1/gonum/lapack/testlapack/dgelq2.go create mode 100644 vendor/gonum.org/v1/gonum/lapack/testlapack/dgelqf.go create mode 100644 vendor/gonum.org/v1/gonum/lapack/testlapack/dgels.go create mode 100644 vendor/gonum.org/v1/gonum/lapack/testlapack/dgeql2.go create mode 100644 vendor/gonum.org/v1/gonum/lapack/testlapack/dgeqp3.go create mode 100644 vendor/gonum.org/v1/gonum/lapack/testlapack/dgeqr2.go create mode 100644 vendor/gonum.org/v1/gonum/lapack/testlapack/dgeqrf.go create mode 100644 vendor/gonum.org/v1/gonum/lapack/testlapack/dgerq2.go create mode 100644 vendor/gonum.org/v1/gonum/lapack/testlapack/dgerqf.go create mode 100644 vendor/gonum.org/v1/gonum/lapack/testlapack/dgesvd.go create mode 100644 vendor/gonum.org/v1/gonum/lapack/testlapack/dgetf2.go create mode 100644 vendor/gonum.org/v1/gonum/lapack/testlapack/dgetrf.go create mode 100644 vendor/gonum.org/v1/gonum/lapack/testlapack/dgetri.go create mode 100644 vendor/gonum.org/v1/gonum/lapack/testlapack/dgetrs.go create mode 100644 vendor/gonum.org/v1/gonum/lapack/testlapack/dggsvd3.go create mode 100644 vendor/gonum.org/v1/gonum/lapack/testlapack/dggsvp3.go create mode 100644 vendor/gonum.org/v1/gonum/lapack/testlapack/dhseqr.go create mode 100644 vendor/gonum.org/v1/gonum/lapack/testlapack/dlabrd.go create mode 100644 vendor/gonum.org/v1/gonum/lapack/testlapack/dlacn2.go create mode 100644 vendor/gonum.org/v1/gonum/lapack/testlapack/dlacpy.go create mode 100644 vendor/gonum.org/v1/gonum/lapack/testlapack/dlae2.go create mode 100644 vendor/gonum.org/v1/gonum/lapack/testlapack/dlaev2.go create mode 100644 vendor/gonum.org/v1/gonum/lapack/testlapack/dlaexc.go create mode 100644 vendor/gonum.org/v1/gonum/lapack/testlapack/dlags2.go create mode 100644 vendor/gonum.org/v1/gonum/lapack/testlapack/dlahqr.go create mode 100644 vendor/gonum.org/v1/gonum/lapack/testlapack/dlahr2.go create mode 100644 vendor/gonum.org/v1/gonum/lapack/testlapack/dlaln2.go create mode 100644 vendor/gonum.org/v1/gonum/lapack/testlapack/dlange.go create mode 100644 vendor/gonum.org/v1/gonum/lapack/testlapack/dlanst.go create mode 100644 vendor/gonum.org/v1/gonum/lapack/testlapack/dlansy.go create mode 100644 vendor/gonum.org/v1/gonum/lapack/testlapack/dlantr.go create mode 100644 vendor/gonum.org/v1/gonum/lapack/testlapack/dlanv2.go create mode 100644 vendor/gonum.org/v1/gonum/lapack/testlapack/dlapll.go create mode 100644 vendor/gonum.org/v1/gonum/lapack/testlapack/dlapmt.go create mode 100644 vendor/gonum.org/v1/gonum/lapack/testlapack/dlapy2.go create mode 100644 vendor/gonum.org/v1/gonum/lapack/testlapack/dlaqp2.go create mode 100644 vendor/gonum.org/v1/gonum/lapack/testlapack/dlaqps.go create mode 100644 vendor/gonum.org/v1/gonum/lapack/testlapack/dlaqr04.go create mode 100644 vendor/gonum.org/v1/gonum/lapack/testlapack/dlaqr1.go create mode 100644 vendor/gonum.org/v1/gonum/lapack/testlapack/dlaqr23.go create mode 100644 vendor/gonum.org/v1/gonum/lapack/testlapack/dlaqr5.go create mode 100644 vendor/gonum.org/v1/gonum/lapack/testlapack/dlarf.go create mode 100644 vendor/gonum.org/v1/gonum/lapack/testlapack/dlarfb.go create mode 100644 vendor/gonum.org/v1/gonum/lapack/testlapack/dlarfg.go create mode 100644 vendor/gonum.org/v1/gonum/lapack/testlapack/dlarft.go create mode 100644 vendor/gonum.org/v1/gonum/lapack/testlapack/dlarfx.go create mode 100644 vendor/gonum.org/v1/gonum/lapack/testlapack/dlartg.go create mode 100644 vendor/gonum.org/v1/gonum/lapack/testlapack/dlas2.go create mode 100644 vendor/gonum.org/v1/gonum/lapack/testlapack/dlascl.go create mode 100644 vendor/gonum.org/v1/gonum/lapack/testlapack/dlaset.go create mode 100644 vendor/gonum.org/v1/gonum/lapack/testlapack/dlasq1.go create mode 100644 vendor/gonum.org/v1/gonum/lapack/testlapack/dlasq2.go create mode 100644 vendor/gonum.org/v1/gonum/lapack/testlapack/dlasq3.go create mode 100644 vendor/gonum.org/v1/gonum/lapack/testlapack/dlasq4.go create mode 100644 vendor/gonum.org/v1/gonum/lapack/testlapack/dlasq5.go create mode 100644 vendor/gonum.org/v1/gonum/lapack/testlapack/dlasr.go create mode 100644 vendor/gonum.org/v1/gonum/lapack/testlapack/dlasrt.go create mode 100644 vendor/gonum.org/v1/gonum/lapack/testlapack/dlasv2.go create mode 100644 vendor/gonum.org/v1/gonum/lapack/testlapack/dlaswp.go create mode 100644 vendor/gonum.org/v1/gonum/lapack/testlapack/dlasy2.go create mode 100644 vendor/gonum.org/v1/gonum/lapack/testlapack/dlatrd.go create mode 100644 vendor/gonum.org/v1/gonum/lapack/testlapack/dlatrs.go create mode 100644 vendor/gonum.org/v1/gonum/lapack/testlapack/dlauu2.go create mode 100644 vendor/gonum.org/v1/gonum/lapack/testlapack/dlauum.go create mode 100644 vendor/gonum.org/v1/gonum/lapack/testlapack/doc.go create mode 100644 vendor/gonum.org/v1/gonum/lapack/testlapack/dorg2l.go create mode 100644 vendor/gonum.org/v1/gonum/lapack/testlapack/dorg2r.go create mode 100644 vendor/gonum.org/v1/gonum/lapack/testlapack/dorgbr.go create mode 100644 vendor/gonum.org/v1/gonum/lapack/testlapack/dorghr.go create mode 100644 vendor/gonum.org/v1/gonum/lapack/testlapack/dorgl2.go create mode 100644 vendor/gonum.org/v1/gonum/lapack/testlapack/dorglq.go create mode 100644 vendor/gonum.org/v1/gonum/lapack/testlapack/dorgql.go create mode 100644 vendor/gonum.org/v1/gonum/lapack/testlapack/dorgqr.go create mode 100644 vendor/gonum.org/v1/gonum/lapack/testlapack/dorgtr.go create mode 100644 vendor/gonum.org/v1/gonum/lapack/testlapack/dorm2r.go create mode 100644 vendor/gonum.org/v1/gonum/lapack/testlapack/dormbr.go create mode 100644 vendor/gonum.org/v1/gonum/lapack/testlapack/dormhr.go create mode 100644 vendor/gonum.org/v1/gonum/lapack/testlapack/dorml2.go create mode 100644 vendor/gonum.org/v1/gonum/lapack/testlapack/dormlq.go create mode 100644 vendor/gonum.org/v1/gonum/lapack/testlapack/dormqr.go create mode 100644 vendor/gonum.org/v1/gonum/lapack/testlapack/dormr2.go create mode 100644 vendor/gonum.org/v1/gonum/lapack/testlapack/dpbtf2.go create mode 100644 vendor/gonum.org/v1/gonum/lapack/testlapack/dpocon.go create mode 100644 vendor/gonum.org/v1/gonum/lapack/testlapack/dpotf2.go create mode 100644 vendor/gonum.org/v1/gonum/lapack/testlapack/dpotrf.go create mode 100644 vendor/gonum.org/v1/gonum/lapack/testlapack/dpotri.go create mode 100644 vendor/gonum.org/v1/gonum/lapack/testlapack/dpotrs.go create mode 100644 vendor/gonum.org/v1/gonum/lapack/testlapack/drscl.go create mode 100644 vendor/gonum.org/v1/gonum/lapack/testlapack/dsteqr.go create mode 100644 vendor/gonum.org/v1/gonum/lapack/testlapack/dsterf.go create mode 100644 vendor/gonum.org/v1/gonum/lapack/testlapack/dsyev.go create mode 100644 vendor/gonum.org/v1/gonum/lapack/testlapack/dsytd2.go create mode 100644 vendor/gonum.org/v1/gonum/lapack/testlapack/dsytrd.go create mode 100644 vendor/gonum.org/v1/gonum/lapack/testlapack/dtgsja.go create mode 100644 vendor/gonum.org/v1/gonum/lapack/testlapack/dtrcon.go create mode 100644 vendor/gonum.org/v1/gonum/lapack/testlapack/dtrevc3.go create mode 100644 vendor/gonum.org/v1/gonum/lapack/testlapack/dtrexc.go create mode 100644 vendor/gonum.org/v1/gonum/lapack/testlapack/dtrti2.go create mode 100644 vendor/gonum.org/v1/gonum/lapack/testlapack/dtrtri.go create mode 100644 vendor/gonum.org/v1/gonum/lapack/testlapack/fortran.go create mode 100644 vendor/gonum.org/v1/gonum/lapack/testlapack/general.go create mode 100644 vendor/gonum.org/v1/gonum/lapack/testlapack/iladlc.go create mode 100644 vendor/gonum.org/v1/gonum/lapack/testlapack/iladlr.go create mode 100644 vendor/gonum.org/v1/gonum/lapack/testlapack/matgen.go create mode 100644 vendor/gonum.org/v1/gonum/lapack/testlapack/test_matrices.go create mode 100644 vendor/gonum.org/v1/gonum/mat/band.go create mode 100644 vendor/gonum.org/v1/gonum/mat/cdense.go create mode 100644 vendor/gonum.org/v1/gonum/mat/cholesky.go create mode 100644 vendor/gonum.org/v1/gonum/mat/cmatrix.go create mode 100644 vendor/gonum.org/v1/gonum/mat/consts.go create mode 100644 vendor/gonum.org/v1/gonum/mat/dense.go create mode 100644 vendor/gonum.org/v1/gonum/mat/dense_arithmetic.go create mode 100644 vendor/gonum.org/v1/gonum/mat/diagonal.go create mode 100644 vendor/gonum.org/v1/gonum/mat/doc.go create mode 100644 vendor/gonum.org/v1/gonum/mat/eigen.go create mode 100644 vendor/gonum.org/v1/gonum/mat/errors.go create mode 100644 vendor/gonum.org/v1/gonum/mat/format.go create mode 100644 vendor/gonum.org/v1/gonum/mat/gsvd.go create mode 100644 vendor/gonum.org/v1/gonum/mat/hogsvd.go create mode 100644 vendor/gonum.org/v1/gonum/mat/index_bound_checks.go create mode 100644 vendor/gonum.org/v1/gonum/mat/index_no_bound_checks.go create mode 100644 vendor/gonum.org/v1/gonum/mat/inner.go create mode 100644 vendor/gonum.org/v1/gonum/mat/io.go create mode 100644 vendor/gonum.org/v1/gonum/mat/lq.go create mode 100644 vendor/gonum.org/v1/gonum/mat/lu.go create mode 100644 vendor/gonum.org/v1/gonum/mat/matrix.go create mode 100644 vendor/gonum.org/v1/gonum/mat/offset.go create mode 100644 vendor/gonum.org/v1/gonum/mat/offset_appengine.go create mode 100644 vendor/gonum.org/v1/gonum/mat/pool.go create mode 100644 vendor/gonum.org/v1/gonum/mat/product.go create mode 100644 vendor/gonum.org/v1/gonum/mat/qr.go create mode 100644 vendor/gonum.org/v1/gonum/mat/shadow.go create mode 100644 vendor/gonum.org/v1/gonum/mat/solve.go create mode 100644 vendor/gonum.org/v1/gonum/mat/svd.go create mode 100644 vendor/gonum.org/v1/gonum/mat/symband.go create mode 100644 vendor/gonum.org/v1/gonum/mat/symmetric.go create mode 100644 vendor/gonum.org/v1/gonum/mat/triangular.go create mode 100644 vendor/gonum.org/v1/gonum/mat/triband.go create mode 100644 vendor/gonum.org/v1/gonum/mat/vector.go create mode 100644 vendor/gonum.org/v1/gonum/mathext/airy.go create mode 100644 vendor/gonum.org/v1/gonum/mathext/beta.go create mode 100644 vendor/gonum.org/v1/gonum/mathext/betainc.go create mode 100644 vendor/gonum.org/v1/gonum/mathext/digamma.go create mode 100644 vendor/gonum.org/v1/gonum/mathext/doc.go create mode 100644 vendor/gonum.org/v1/gonum/mathext/ell_carlson.go create mode 100644 vendor/gonum.org/v1/gonum/mathext/ell_complete.go create mode 100644 vendor/gonum.org/v1/gonum/mathext/erf.go create mode 100644 vendor/gonum.org/v1/gonum/mathext/gamma_inc.go create mode 100644 vendor/gonum.org/v1/gonum/mathext/gamma_inc_inv.go create mode 100644 vendor/gonum.org/v1/gonum/mathext/internal/amos/amos.go create mode 100644 vendor/gonum.org/v1/gonum/mathext/internal/amos/amoslib/d1mach.f create mode 100644 vendor/gonum.org/v1/gonum/mathext/internal/amos/amoslib/dgamln.f create mode 100644 vendor/gonum.org/v1/gonum/mathext/internal/amos/amoslib/fortran.go create mode 100644 vendor/gonum.org/v1/gonum/mathext/internal/amos/amoslib/i1mach.f create mode 100644 vendor/gonum.org/v1/gonum/mathext/internal/amos/amoslib/myabs.f create mode 100644 vendor/gonum.org/v1/gonum/mathext/internal/amos/amoslib/myatan.f create mode 100644 vendor/gonum.org/v1/gonum/mathext/internal/amos/amoslib/mycos.f create mode 100644 vendor/gonum.org/v1/gonum/mathext/internal/amos/amoslib/myexp.f create mode 100644 vendor/gonum.org/v1/gonum/mathext/internal/amos/amoslib/mylog.f create mode 100644 vendor/gonum.org/v1/gonum/mathext/internal/amos/amoslib/mymax.f create mode 100644 vendor/gonum.org/v1/gonum/mathext/internal/amos/amoslib/mymin.f create mode 100644 vendor/gonum.org/v1/gonum/mathext/internal/amos/amoslib/mysin.f create mode 100644 vendor/gonum.org/v1/gonum/mathext/internal/amos/amoslib/mysqrt.f create mode 100644 vendor/gonum.org/v1/gonum/mathext/internal/amos/amoslib/mytan.f create mode 100644 vendor/gonum.org/v1/gonum/mathext/internal/amos/amoslib/xerror.f create mode 100644 vendor/gonum.org/v1/gonum/mathext/internal/amos/amoslib/zabs.f create mode 100644 vendor/gonum.org/v1/gonum/mathext/internal/amos/amoslib/zacai.f create mode 100644 vendor/gonum.org/v1/gonum/mathext/internal/amos/amoslib/zacon.f create mode 100644 vendor/gonum.org/v1/gonum/mathext/internal/amos/amoslib/zairy.f create mode 100644 vendor/gonum.org/v1/gonum/mathext/internal/amos/amoslib/zasyi.f create mode 100644 vendor/gonum.org/v1/gonum/mathext/internal/amos/amoslib/zbesh.f create mode 100644 vendor/gonum.org/v1/gonum/mathext/internal/amos/amoslib/zbesi.f create mode 100644 vendor/gonum.org/v1/gonum/mathext/internal/amos/amoslib/zbesj.f create mode 100644 vendor/gonum.org/v1/gonum/mathext/internal/amos/amoslib/zbesk.f create mode 100644 vendor/gonum.org/v1/gonum/mathext/internal/amos/amoslib/zbesy.f create mode 100644 vendor/gonum.org/v1/gonum/mathext/internal/amos/amoslib/zbinu.f create mode 100644 vendor/gonum.org/v1/gonum/mathext/internal/amos/amoslib/zbiry.f create mode 100644 vendor/gonum.org/v1/gonum/mathext/internal/amos/amoslib/zbknu.f create mode 100644 vendor/gonum.org/v1/gonum/mathext/internal/amos/amoslib/zbuni.f create mode 100644 vendor/gonum.org/v1/gonum/mathext/internal/amos/amoslib/zbunk.f create mode 100644 vendor/gonum.org/v1/gonum/mathext/internal/amos/amoslib/zdiv.f create mode 100644 vendor/gonum.org/v1/gonum/mathext/internal/amos/amoslib/zexp.f create mode 100644 vendor/gonum.org/v1/gonum/mathext/internal/amos/amoslib/zkscl.f create mode 100644 vendor/gonum.org/v1/gonum/mathext/internal/amos/amoslib/zlog.f create mode 100644 vendor/gonum.org/v1/gonum/mathext/internal/amos/amoslib/zmlri.f create mode 100644 vendor/gonum.org/v1/gonum/mathext/internal/amos/amoslib/zmlt.f create mode 100644 vendor/gonum.org/v1/gonum/mathext/internal/amos/amoslib/zrati.f create mode 100644 vendor/gonum.org/v1/gonum/mathext/internal/amos/amoslib/zs1s2.f create mode 100644 vendor/gonum.org/v1/gonum/mathext/internal/amos/amoslib/zseri.f create mode 100644 vendor/gonum.org/v1/gonum/mathext/internal/amos/amoslib/zshch.f create mode 100644 vendor/gonum.org/v1/gonum/mathext/internal/amos/amoslib/zsqrt.f create mode 100644 vendor/gonum.org/v1/gonum/mathext/internal/amos/amoslib/zuchk.f create mode 100644 vendor/gonum.org/v1/gonum/mathext/internal/amos/amoslib/zunhj.f create mode 100644 vendor/gonum.org/v1/gonum/mathext/internal/amos/amoslib/zuni1.f create mode 100644 vendor/gonum.org/v1/gonum/mathext/internal/amos/amoslib/zuni2.f create mode 100644 vendor/gonum.org/v1/gonum/mathext/internal/amos/amoslib/zunik.f create mode 100644 vendor/gonum.org/v1/gonum/mathext/internal/amos/amoslib/zunk1.f create mode 100644 vendor/gonum.org/v1/gonum/mathext/internal/amos/amoslib/zunk2.f create mode 100644 vendor/gonum.org/v1/gonum/mathext/internal/amos/amoslib/zuoik.f create mode 100644 vendor/gonum.org/v1/gonum/mathext/internal/amos/amoslib/zwrsk.f create mode 100644 vendor/gonum.org/v1/gonum/mathext/internal/amos/doc.go create mode 100644 vendor/gonum.org/v1/gonum/mathext/internal/cephes/cephes.go create mode 100644 vendor/gonum.org/v1/gonum/mathext/internal/cephes/doc.go create mode 100644 vendor/gonum.org/v1/gonum/mathext/internal/cephes/igam.go create mode 100644 vendor/gonum.org/v1/gonum/mathext/internal/cephes/igami.go create mode 100644 vendor/gonum.org/v1/gonum/mathext/internal/cephes/incbeta.go create mode 100644 vendor/gonum.org/v1/gonum/mathext/internal/cephes/incbi.go create mode 100644 vendor/gonum.org/v1/gonum/mathext/internal/cephes/lanczos.go create mode 100644 vendor/gonum.org/v1/gonum/mathext/internal/cephes/ndtri.go create mode 100644 vendor/gonum.org/v1/gonum/mathext/internal/cephes/polevl.go create mode 100644 vendor/gonum.org/v1/gonum/mathext/internal/cephes/unity.go create mode 100644 vendor/gonum.org/v1/gonum/mathext/internal/cephes/zeta.go create mode 100644 vendor/gonum.org/v1/gonum/mathext/internal/gonum/beta.go create mode 100644 vendor/gonum.org/v1/gonum/mathext/internal/gonum/doc.go create mode 100644 vendor/gonum.org/v1/gonum/mathext/internal/gonum/gonum.go create mode 100644 vendor/gonum.org/v1/gonum/mathext/mvgamma.go create mode 100644 vendor/gonum.org/v1/gonum/mathext/roots.go create mode 100644 vendor/gonum.org/v1/gonum/mathext/zeta.go create mode 100644 vendor/gonum.org/v1/gonum/num/dual/doc.go create mode 100644 vendor/gonum.org/v1/gonum/num/dual/dual.go create mode 100644 vendor/gonum.org/v1/gonum/num/dual/dual_fike.go create mode 100644 vendor/gonum.org/v1/gonum/num/dual/dual_hyperbolic.go create mode 100644 vendor/gonum.org/v1/gonum/num/dualcmplx/doc.go create mode 100644 vendor/gonum.org/v1/gonum/num/dualcmplx/dual.go create mode 100644 vendor/gonum.org/v1/gonum/num/dualquat/doc.go create mode 100644 vendor/gonum.org/v1/gonum/num/dualquat/dual.go create mode 100644 vendor/gonum.org/v1/gonum/num/dualquat/dual_fike.go create mode 100644 vendor/gonum.org/v1/gonum/num/hyperdual/doc.go create mode 100644 vendor/gonum.org/v1/gonum/num/hyperdual/hyperdual.go create mode 100644 vendor/gonum.org/v1/gonum/num/hyperdual/hyperdual_fike.go create mode 100644 vendor/gonum.org/v1/gonum/num/hyperdual/hyperdual_hyperbolic.go create mode 100644 vendor/gonum.org/v1/gonum/num/quat/abs.go create mode 100644 vendor/gonum.org/v1/gonum/num/quat/conj.go create mode 100644 vendor/gonum.org/v1/gonum/num/quat/doc.go create mode 100644 vendor/gonum.org/v1/gonum/num/quat/exp.go create mode 100644 vendor/gonum.org/v1/gonum/num/quat/inf.go create mode 100644 vendor/gonum.org/v1/gonum/num/quat/nan.go create mode 100644 vendor/gonum.org/v1/gonum/num/quat/quat.go create mode 100644 vendor/gonum.org/v1/gonum/num/quat/trig.go create mode 100644 vendor/gonum.org/v1/gonum/num/quat/util.go create mode 100644 vendor/gonum.org/v1/gonum/optimize/backtracking.go create mode 100644 vendor/gonum.org/v1/gonum/optimize/bfgs.go create mode 100644 vendor/gonum.org/v1/gonum/optimize/bisection.go create mode 100644 vendor/gonum.org/v1/gonum/optimize/cg.go create mode 100644 vendor/gonum.org/v1/gonum/optimize/cmaes.go create mode 100644 vendor/gonum.org/v1/gonum/optimize/convex/lp/convert.go create mode 100644 vendor/gonum.org/v1/gonum/optimize/convex/lp/doc.go create mode 100644 vendor/gonum.org/v1/gonum/optimize/convex/lp/simplex.go create mode 100644 vendor/gonum.org/v1/gonum/optimize/doc.go create mode 100644 vendor/gonum.org/v1/gonum/optimize/errors.go create mode 100644 vendor/gonum.org/v1/gonum/optimize/functionconvergence.go create mode 100644 vendor/gonum.org/v1/gonum/optimize/functions/doc.go create mode 100644 vendor/gonum.org/v1/gonum/optimize/functions/functions.go create mode 100644 vendor/gonum.org/v1/gonum/optimize/functions/minsurf.go create mode 100644 vendor/gonum.org/v1/gonum/optimize/functions/validate.go create mode 100644 vendor/gonum.org/v1/gonum/optimize/functions/vlse.go create mode 100644 vendor/gonum.org/v1/gonum/optimize/gradientdescent.go create mode 100644 vendor/gonum.org/v1/gonum/optimize/guessandcheck.go create mode 100644 vendor/gonum.org/v1/gonum/optimize/interfaces.go create mode 100644 vendor/gonum.org/v1/gonum/optimize/lbfgs.go create mode 100644 vendor/gonum.org/v1/gonum/optimize/linesearch.go create mode 100644 vendor/gonum.org/v1/gonum/optimize/listsearch.go create mode 100644 vendor/gonum.org/v1/gonum/optimize/local.go create mode 100644 vendor/gonum.org/v1/gonum/optimize/minimize.go create mode 100644 vendor/gonum.org/v1/gonum/optimize/morethuente.go create mode 100644 vendor/gonum.org/v1/gonum/optimize/neldermead.go create mode 100644 vendor/gonum.org/v1/gonum/optimize/newton.go create mode 100644 vendor/gonum.org/v1/gonum/optimize/printer.go create mode 100644 vendor/gonum.org/v1/gonum/optimize/stepsizers.go create mode 100644 vendor/gonum.org/v1/gonum/optimize/termination.go create mode 100644 vendor/gonum.org/v1/gonum/optimize/types.go create mode 100644 vendor/gonum.org/v1/gonum/spatial/barneshut/barneshut2.go create mode 100644 vendor/gonum.org/v1/gonum/spatial/barneshut/barneshut3.go create mode 100644 vendor/gonum.org/v1/gonum/spatial/barneshut/bounds.go create mode 100644 vendor/gonum.org/v1/gonum/spatial/barneshut/doc.go create mode 100644 vendor/gonum.org/v1/gonum/spatial/barneshut/no_bounds.go create mode 100644 vendor/gonum.org/v1/gonum/spatial/kdtree/doc.go create mode 100644 vendor/gonum.org/v1/gonum/spatial/kdtree/kdtree.go create mode 100644 vendor/gonum.org/v1/gonum/spatial/kdtree/medians.go create mode 100644 vendor/gonum.org/v1/gonum/spatial/kdtree/points.go create mode 100644 vendor/gonum.org/v1/gonum/spatial/r2/doc.go create mode 100644 vendor/gonum.org/v1/gonum/spatial/r2/vector.go create mode 100644 vendor/gonum.org/v1/gonum/spatial/r3/doc.go create mode 100644 vendor/gonum.org/v1/gonum/spatial/r3/vector.go create mode 100644 vendor/gonum.org/v1/gonum/stat/combin/combin.go create mode 100644 vendor/gonum.org/v1/gonum/stat/combin/doc.go create mode 100644 vendor/gonum.org/v1/gonum/stat/distmat/doc.go create mode 100644 vendor/gonum.org/v1/gonum/stat/distmat/general.go create mode 100644 vendor/gonum.org/v1/gonum/stat/distmat/wishart.go create mode 100644 vendor/gonum.org/v1/gonum/stat/distmv/dirichlet.go create mode 100644 vendor/gonum.org/v1/gonum/stat/distmv/doc.go create mode 100644 vendor/gonum.org/v1/gonum/stat/distmv/general.go create mode 100644 vendor/gonum.org/v1/gonum/stat/distmv/interfaces.go create mode 100644 vendor/gonum.org/v1/gonum/stat/distmv/normal.go create mode 100644 vendor/gonum.org/v1/gonum/stat/distmv/statdist.go create mode 100644 vendor/gonum.org/v1/gonum/stat/distmv/studentst.go create mode 100644 vendor/gonum.org/v1/gonum/stat/distmv/uniform.go create mode 100644 vendor/gonum.org/v1/gonum/stat/distuv/bernoulli.go create mode 100644 vendor/gonum.org/v1/gonum/stat/distuv/beta.go create mode 100644 vendor/gonum.org/v1/gonum/stat/distuv/binomial.go create mode 100644 vendor/gonum.org/v1/gonum/stat/distuv/categorical.go create mode 100644 vendor/gonum.org/v1/gonum/stat/distuv/chisquared.go create mode 100644 vendor/gonum.org/v1/gonum/stat/distuv/constants.go create mode 100644 vendor/gonum.org/v1/gonum/stat/distuv/doc.go create mode 100644 vendor/gonum.org/v1/gonum/stat/distuv/exponential.go create mode 100644 vendor/gonum.org/v1/gonum/stat/distuv/f.go create mode 100644 vendor/gonum.org/v1/gonum/stat/distuv/gamma.go create mode 100644 vendor/gonum.org/v1/gonum/stat/distuv/general.go create mode 100644 vendor/gonum.org/v1/gonum/stat/distuv/gumbel.go create mode 100644 vendor/gonum.org/v1/gonum/stat/distuv/interfaces.go create mode 100644 vendor/gonum.org/v1/gonum/stat/distuv/inversegamma.go create mode 100644 vendor/gonum.org/v1/gonum/stat/distuv/laplace.go create mode 100644 vendor/gonum.org/v1/gonum/stat/distuv/lognormal.go create mode 100644 vendor/gonum.org/v1/gonum/stat/distuv/norm.go create mode 100644 vendor/gonum.org/v1/gonum/stat/distuv/pareto.go create mode 100644 vendor/gonum.org/v1/gonum/stat/distuv/poisson.go create mode 100644 vendor/gonum.org/v1/gonum/stat/distuv/statdist.go create mode 100644 vendor/gonum.org/v1/gonum/stat/distuv/studentst.go create mode 100644 vendor/gonum.org/v1/gonum/stat/distuv/triangle.go create mode 100644 vendor/gonum.org/v1/gonum/stat/distuv/uniform.go create mode 100644 vendor/gonum.org/v1/gonum/stat/distuv/weibull.go create mode 100644 vendor/gonum.org/v1/gonum/stat/doc.go create mode 100644 vendor/gonum.org/v1/gonum/stat/mds/doc.go create mode 100644 vendor/gonum.org/v1/gonum/stat/mds/mds.go create mode 100644 vendor/gonum.org/v1/gonum/stat/pca_cca.go create mode 100644 vendor/gonum.org/v1/gonum/stat/roc.go create mode 100644 vendor/gonum.org/v1/gonum/stat/samplemv/doc.go create mode 100644 vendor/gonum.org/v1/gonum/stat/samplemv/halton.go create mode 100644 vendor/gonum.org/v1/gonum/stat/samplemv/metropolishastings.go create mode 100644 vendor/gonum.org/v1/gonum/stat/samplemv/samplemv.go create mode 100644 vendor/gonum.org/v1/gonum/stat/sampleuv/doc.go create mode 100644 vendor/gonum.org/v1/gonum/stat/sampleuv/sample.go create mode 100644 vendor/gonum.org/v1/gonum/stat/sampleuv/weighted.go create mode 100644 vendor/gonum.org/v1/gonum/stat/sampleuv/withoutreplacement.go create mode 100644 vendor/gonum.org/v1/gonum/stat/spatial/doc.go create mode 100644 vendor/gonum.org/v1/gonum/stat/spatial/spatial.go create mode 100644 vendor/gonum.org/v1/gonum/stat/stat.go create mode 100644 vendor/gonum.org/v1/gonum/stat/statmat.go create mode 100644 vendor/gonum.org/v1/gonum/unit/absorbedradioactivedose.go create mode 100644 vendor/gonum.org/v1/gonum/unit/acceleration.go create mode 100644 vendor/gonum.org/v1/gonum/unit/angle.go create mode 100644 vendor/gonum.org/v1/gonum/unit/area.go create mode 100644 vendor/gonum.org/v1/gonum/unit/capacitance.go create mode 100644 vendor/gonum.org/v1/gonum/unit/charge.go create mode 100644 vendor/gonum.org/v1/gonum/unit/conductance.go create mode 100644 vendor/gonum.org/v1/gonum/unit/constant/atomicmass.go create mode 100644 vendor/gonum.org/v1/gonum/unit/constant/avogadro.go create mode 100644 vendor/gonum.org/v1/gonum/unit/constant/boltzmann.go create mode 100644 vendor/gonum.org/v1/gonum/unit/constant/defined_types.go create mode 100644 vendor/gonum.org/v1/gonum/unit/constant/doc.go create mode 100644 vendor/gonum.org/v1/gonum/unit/constant/electricconstant.go create mode 100644 vendor/gonum.org/v1/gonum/unit/constant/elementarycharge.go create mode 100644 vendor/gonum.org/v1/gonum/unit/constant/faraday.go create mode 100644 vendor/gonum.org/v1/gonum/unit/constant/finestructure.go create mode 100644 vendor/gonum.org/v1/gonum/unit/constant/generate_constants.go create mode 100644 vendor/gonum.org/v1/gonum/unit/constant/generate_defined_types.go create mode 100644 vendor/gonum.org/v1/gonum/unit/constant/gravitational.go create mode 100644 vendor/gonum.org/v1/gonum/unit/constant/lightspeedinvacuum.go create mode 100644 vendor/gonum.org/v1/gonum/unit/constant/magneticconstant.go create mode 100644 vendor/gonum.org/v1/gonum/unit/constant/planck.go create mode 100644 vendor/gonum.org/v1/gonum/unit/constant/standardgravity.go create mode 100644 vendor/gonum.org/v1/gonum/unit/current.go create mode 100644 vendor/gonum.org/v1/gonum/unit/dimless.go create mode 100644 vendor/gonum.org/v1/gonum/unit/doc.go create mode 100644 vendor/gonum.org/v1/gonum/unit/energy.go create mode 100644 vendor/gonum.org/v1/gonum/unit/equivalentradioactivedose.go create mode 100644 vendor/gonum.org/v1/gonum/unit/force.go create mode 100644 vendor/gonum.org/v1/gonum/unit/frequency.go create mode 100644 vendor/gonum.org/v1/gonum/unit/generate_unit.go create mode 100644 vendor/gonum.org/v1/gonum/unit/inductance.go create mode 100644 vendor/gonum.org/v1/gonum/unit/length.go create mode 100644 vendor/gonum.org/v1/gonum/unit/luminousintensity.go create mode 100644 vendor/gonum.org/v1/gonum/unit/magneticflux.go create mode 100644 vendor/gonum.org/v1/gonum/unit/magneticfluxdensity.go create mode 100644 vendor/gonum.org/v1/gonum/unit/mass.go create mode 100644 vendor/gonum.org/v1/gonum/unit/mole.go create mode 100644 vendor/gonum.org/v1/gonum/unit/power.go create mode 100644 vendor/gonum.org/v1/gonum/unit/prefixes.go create mode 100644 vendor/gonum.org/v1/gonum/unit/pressure.go create mode 100644 vendor/gonum.org/v1/gonum/unit/radioactivity.go create mode 100644 vendor/gonum.org/v1/gonum/unit/resistance.go create mode 100644 vendor/gonum.org/v1/gonum/unit/temperature.go create mode 100644 vendor/gonum.org/v1/gonum/unit/time.go create mode 100644 vendor/gonum.org/v1/gonum/unit/torque.go create mode 100644 vendor/gonum.org/v1/gonum/unit/unittype.go create mode 100644 vendor/gonum.org/v1/gonum/unit/velocity.go create mode 100644 vendor/gonum.org/v1/gonum/unit/voltage.go create mode 100644 vendor/gonum.org/v1/gonum/unit/volume.go create mode 100644 vendor/gonum.org/v1/gonum/version.go create mode 100644 vendor/google.golang.org/genproto/LICENSE create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/common/ad_asset.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/common/ad_type_infos.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/common/asset_types.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/common/bidding.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/common/click_location.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/common/criteria.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/common/criterion_category_availability.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/common/custom_parameter.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/common/dates.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/common/explorer_auto_optimizer_setting.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/common/extensions.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/common/feed_common.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/common/final_app_url.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/common/frequency_cap.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/common/keyword_plan_common.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/common/matching_function.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/common/metrics.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/common/policy.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/common/real_time_bidding_setting.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/common/segments.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/common/simulation.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/common/tag_snippet.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/common/targeting_setting.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/common/text_label.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/common/url_collection.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/common/user_lists.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/common/value.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/access_reason.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/account_budget_proposal_status.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/account_budget_proposal_type.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/account_budget_status.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/ad_customizer_placeholder_field.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/ad_group_ad_rotation_mode.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/ad_group_ad_status.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/ad_group_criterion_approval_status.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/ad_group_criterion_status.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/ad_group_status.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/ad_group_type.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/ad_network_type.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/ad_serving_optimization_status.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/ad_strength.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/ad_type.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/advertising_channel_sub_type.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/advertising_channel_type.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/affiliate_location_feed_relationship_type.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/affiliate_location_placeholder_field.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/age_range_type.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/app_campaign_app_store.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/app_campaign_bidding_strategy_goal_type.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/app_payment_model_type.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/app_placeholder_field.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/app_store.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/app_url_operating_system_type.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/asset_type.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/attribution_model.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/bid_modifier_source.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/bidding_source.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/bidding_strategy_status.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/bidding_strategy_type.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/billing_setup_status.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/brand_safety_suitability.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/budget_delivery_method.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/budget_period.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/budget_status.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/budget_type.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/call_conversion_reporting_state.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/call_placeholder_field.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/callout_placeholder_field.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/campaign_experiment_type.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/campaign_serving_status.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/campaign_shared_set_status.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/campaign_status.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/change_status_operation.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/change_status_resource_type.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/click_type.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/content_label_type.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/conversion_action_category.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/conversion_action_counting_type.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/conversion_action_status.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/conversion_action_type.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/conversion_adjustment_type.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/conversion_attribution_event_type.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/conversion_lag_bucket.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/conversion_or_adjustment_lag_bucket.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/criterion_category_channel_availability_mode.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/criterion_category_locale_availability_mode.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/criterion_system_serving_status.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/criterion_type.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/custom_interest_member_type.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/custom_interest_status.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/custom_interest_type.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/custom_placeholder_field.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/customer_match_upload_key_type.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/customer_pay_per_conversion_eligibility_failure_reason.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/data_driven_model_status.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/day_of_week.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/device.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/display_ad_format_setting.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/display_upload_product_type.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/dsa_page_feed_criterion_field.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/education_placeholder_field.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/extension_setting_device.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/extension_type.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/external_conversion_source.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/feed_attribute_type.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/feed_item_quality_approval_status.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/feed_item_quality_disapproval_reason.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/feed_item_status.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/feed_item_target_device.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/feed_item_target_type.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/feed_item_validation_status.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/feed_link_status.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/feed_mapping_criterion_type.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/feed_mapping_status.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/feed_origin.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/feed_status.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/flight_placeholder_field.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/frequency_cap_event_type.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/frequency_cap_level.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/frequency_cap_time_unit.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/gender_type.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/geo_target_constant_status.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/geo_targeting_restriction.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/geo_targeting_type.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/google_ads_field_category.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/google_ads_field_data_type.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/hotel_date_selection_type.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/hotel_placeholder_field.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/income_range_type.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/interaction_event_type.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/interaction_type.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/job_placeholder_field.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/keyword_match_type.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/keyword_plan_competition_level.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/keyword_plan_forecast_interval.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/keyword_plan_network.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/label_status.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/legacy_app_install_ad_app_store.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/listing_custom_attribute_index.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/listing_group_type.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/local_placeholder_field.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/location_extension_targeting_criterion_field.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/location_group_radius_units.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/location_placeholder_field.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/manager_link_status.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/matching_function_context_type.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/matching_function_operator.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/media_type.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/merchant_center_link_status.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/message_placeholder_field.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/mime_type.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/minute_of_hour.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/mobile_device_type.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/month_of_year.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/mutate_job_status.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/negative_geo_target_type.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/operating_system_version_operator_type.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/page_one_promoted_strategy_goal.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/parental_status_type.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/payment_mode.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/placeholder_type.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/placement_type.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/policy_approval_status.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/policy_review_status.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/policy_topic_entry_type.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/policy_topic_evidence_destination_mismatch_url_type.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/policy_topic_evidence_destination_not_working_device.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/positive_geo_target_type.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/preferred_content_type.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/price_extension_price_qualifier.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/price_extension_price_unit.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/price_extension_type.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/price_placeholder_field.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/product_bidding_category_level.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/product_bidding_category_status.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/product_channel.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/product_channel_exclusivity.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/product_condition.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/product_type_level.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/promotion_extension_discount_modifier.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/promotion_extension_occasion.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/promotion_placeholder_field.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/proximity_radius_units.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/quality_score_bucket.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/real_estate_placeholder_field.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/recommendation_type.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/search_engine_results_page_type.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/search_term_match_type.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/search_term_targeting_status.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/served_asset_field_type.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/shared_set_status.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/shared_set_type.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/simulation_modification_method.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/simulation_type.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/sitelink_placeholder_field.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/slot.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/spending_limit_type.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/structured_snippet_placeholder_field.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/system_managed_entity_source.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/target_cpa_opt_in_recommendation_goal.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/target_impression_share_location.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/targeting_dimension.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/time_type.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/tracking_code_page_format.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/tracking_code_type.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/travel_placeholder_field.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/user_interest_taxonomy_type.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/user_list_access_status.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/user_list_closing_reason.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/user_list_combined_rule_operator.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/user_list_crm_data_source_type.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/user_list_date_rule_item_operator.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/user_list_logical_rule_operator.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/user_list_membership_status.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/user_list_number_rule_item_operator.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/user_list_prepopulation_status.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/user_list_rule_type.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/user_list_size_range.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/user_list_string_rule_item_operator.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/user_list_type.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/vanity_pharma_display_url_mode.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/vanity_pharma_text.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/webpage_condition_operand.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/webpage_condition_operator.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/errors/account_budget_proposal_error.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/errors/ad_customizer_error.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/errors/ad_error.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/errors/ad_group_ad_error.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/errors/ad_group_bid_modifier_error.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/errors/ad_group_criterion_error.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/errors/ad_group_error.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/errors/ad_group_feed_error.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/errors/ad_parameter_error.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/errors/ad_sharing_error.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/errors/adx_error.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/errors/asset_error.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/errors/authentication_error.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/errors/authorization_error.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/errors/bidding_error.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/errors/bidding_strategy_error.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/errors/billing_setup_error.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/errors/campaign_budget_error.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/errors/campaign_criterion_error.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/errors/campaign_error.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/errors/campaign_feed_error.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/errors/campaign_shared_set_error.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/errors/change_status_error.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/errors/collection_size_error.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/errors/context_error.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/errors/conversion_action_error.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/errors/conversion_adjustment_upload_error.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/errors/conversion_upload_error.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/errors/country_code_error.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/errors/criterion_error.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/errors/custom_interest_error.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/errors/customer_client_link_error.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/errors/customer_error.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/errors/customer_feed_error.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/errors/customer_manager_link_error.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/errors/database_error.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/errors/date_error.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/errors/date_range_error.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/errors/distinct_error.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/errors/enum_error.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/errors/errors.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/errors/extension_feed_item_error.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/errors/extension_setting_error.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/errors/feed_attribute_reference_error.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/errors/feed_error.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/errors/feed_item_error.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/errors/feed_item_target_error.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/errors/feed_item_validation_error.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/errors/feed_mapping_error.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/errors/field_error.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/errors/field_mask_error.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/errors/function_error.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/errors/function_parsing_error.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/errors/geo_target_constant_suggestion_error.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/errors/header_error.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/errors/id_error.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/errors/image_error.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/errors/internal_error.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/errors/keyword_plan_ad_group_error.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/errors/keyword_plan_campaign_error.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/errors/keyword_plan_error.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/errors/keyword_plan_idea_error.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/errors/keyword_plan_keyword_error.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/errors/keyword_plan_negative_keyword_error.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/errors/label_error.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/errors/language_code_error.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/errors/list_operation_error.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/errors/media_bundle_error.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/errors/media_file_error.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/errors/media_upload_error.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/errors/multiplier_error.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/errors/mutate_error.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/errors/mutate_job_error.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/errors/new_resource_creation_error.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/errors/not_empty_error.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/errors/null_error.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/errors/operation_access_denied_error.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/errors/operator_error.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/errors/partial_failure_error.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/errors/policy_finding_error.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/errors/policy_validation_parameter_error.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/errors/policy_violation_error.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/errors/query_error.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/errors/quota_error.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/errors/range_error.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/errors/recommendation_error.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/errors/region_code_error.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/errors/request_error.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/errors/resource_access_denied_error.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/errors/resource_count_limit_exceeded_error.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/errors/setting_error.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/errors/shared_criterion_error.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/errors/shared_set_error.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/errors/size_limit_error.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/errors/string_format_error.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/errors/string_length_error.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/errors/url_field_error.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/errors/user_list_error.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/errors/youtube_video_registration_error.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/resources/account_budget.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/resources/account_budget_proposal.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/resources/ad.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/resources/ad_group.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/resources/ad_group_ad.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/resources/ad_group_ad_label.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/resources/ad_group_audience_view.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/resources/ad_group_bid_modifier.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/resources/ad_group_criterion.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/resources/ad_group_criterion_label.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/resources/ad_group_criterion_simulation.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/resources/ad_group_extension_setting.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/resources/ad_group_feed.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/resources/ad_group_label.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/resources/ad_group_simulation.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/resources/ad_parameter.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/resources/ad_schedule_view.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/resources/age_range_view.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/resources/asset.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/resources/bidding_strategy.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/resources/billing_setup.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/resources/campaign.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/resources/campaign_audience_view.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/resources/campaign_bid_modifier.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/resources/campaign_budget.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/resources/campaign_criterion.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/resources/campaign_criterion_simulation.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/resources/campaign_extension_setting.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/resources/campaign_feed.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/resources/campaign_label.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/resources/campaign_shared_set.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/resources/carrier_constant.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/resources/change_status.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/resources/click_view.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/resources/conversion_action.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/resources/custom_interest.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/resources/customer.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/resources/customer_client.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/resources/customer_client_link.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/resources/customer_extension_setting.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/resources/customer_feed.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/resources/customer_label.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/resources/customer_manager_link.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/resources/customer_negative_criterion.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/resources/detail_placement_view.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/resources/display_keyword_view.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/resources/domain_category.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/resources/dynamic_search_ads_search_term_view.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/resources/expanded_landing_page_view.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/resources/extension_feed_item.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/resources/feed.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/resources/feed_item.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/resources/feed_item_target.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/resources/feed_mapping.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/resources/feed_placeholder_view.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/resources/gender_view.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/resources/geo_target_constant.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/resources/geographic_view.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/resources/google_ads_field.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/resources/group_placement_view.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/resources/hotel_group_view.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/resources/hotel_performance_view.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/resources/keyword_plan.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/resources/keyword_plan_ad_group.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/resources/keyword_plan_campaign.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/resources/keyword_plan_keyword.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/resources/keyword_plan_negative_keyword.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/resources/keyword_view.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/resources/label.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/resources/landing_page_view.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/resources/language_constant.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/resources/location_view.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/resources/managed_placement_view.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/resources/media_file.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/resources/merchant_center_link.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/resources/mobile_app_category_constant.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/resources/mobile_device_constant.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/resources/mutate_job.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/resources/operating_system_version_constant.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/resources/paid_organic_search_term_view.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/resources/parental_status_view.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/resources/payments_account.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/resources/product_bidding_category_constant.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/resources/product_group_view.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/resources/recommendation.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/resources/remarketing_action.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/resources/search_term_view.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/resources/shared_criterion.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/resources/shared_set.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/resources/shopping_performance_view.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/resources/topic_constant.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/resources/topic_view.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/resources/user_interest.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/resources/user_list.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/resources/video.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/services/account_budget_proposal_service.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/services/account_budget_service.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/services/ad_group_ad_label_service.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/services/ad_group_ad_service.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/services/ad_group_audience_view_service.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/services/ad_group_bid_modifier_service.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/services/ad_group_criterion_label_service.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/services/ad_group_criterion_service.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/services/ad_group_criterion_simulation_service.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/services/ad_group_extension_setting_service.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/services/ad_group_feed_service.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/services/ad_group_label_service.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/services/ad_group_service.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/services/ad_group_simulation_service.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/services/ad_parameter_service.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/services/ad_schedule_view_service.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/services/age_range_view_service.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/services/asset_service.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/services/bidding_strategy_service.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/services/billing_setup_service.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/services/campaign_audience_view_service.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/services/campaign_bid_modifier_service.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/services/campaign_budget_service.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/services/campaign_criterion_service.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/services/campaign_criterion_simulation_service.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/services/campaign_extension_setting_service.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/services/campaign_feed_service.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/services/campaign_label_service.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/services/campaign_service.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/services/campaign_shared_set_service.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/services/carrier_constant_service.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/services/change_status_service.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/services/click_view_service.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/services/conversion_action_service.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/services/conversion_adjustment_upload_service.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/services/conversion_upload_service.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/services/custom_interest_service.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/services/customer_client_link_service.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/services/customer_client_service.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/services/customer_extension_setting_service.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/services/customer_feed_service.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/services/customer_label_service.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/services/customer_manager_link_service.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/services/customer_negative_criterion_service.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/services/customer_service.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/services/detail_placement_view_service.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/services/display_keyword_view_service.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/services/domain_category_service.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/services/dynamic_search_ads_search_term_view_service.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/services/expanded_landing_page_view_service.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/services/extension_feed_item_service.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/services/feed_item_service.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/services/feed_item_target_service.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/services/feed_mapping_service.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/services/feed_placeholder_view_service.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/services/feed_service.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/services/gender_view_service.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/services/geo_target_constant_service.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/services/geographic_view_service.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/services/google_ads_field_service.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/services/google_ads_service.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/services/group_placement_view_service.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/services/hotel_group_view_service.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/services/hotel_performance_view_service.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/services/keyword_plan_ad_group_service.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/services/keyword_plan_campaign_service.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/services/keyword_plan_idea_service.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/services/keyword_plan_keyword_service.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/services/keyword_plan_negative_keyword_service.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/services/keyword_plan_service.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/services/keyword_view_service.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/services/label_service.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/services/landing_page_view_service.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/services/language_constant_service.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/services/location_view_service.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/services/managed_placement_view_service.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/services/media_file_service.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/services/merchant_center_link_service.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/services/mobile_app_category_constant_service.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/services/mobile_device_constant_service.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/services/mutate_job_service.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/services/operating_system_version_constant_service.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/services/paid_organic_search_term_view_service.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/services/parental_status_view_service.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/services/payments_account_service.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/services/product_bidding_category_constant_service.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/services/product_group_view_service.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/services/recommendation_service.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/services/remarketing_action_service.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/services/search_term_view_service.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/services/shared_criterion_service.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/services/shared_set_service.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/services/shopping_performance_view_service.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/services/topic_constant_service.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/services/topic_view_service.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/services/user_interest_service.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/services/user_list_service.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/services/video_service.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/api/annotations/annotations.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/api/annotations/client.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/api/annotations/field_behavior.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/api/annotations/http.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/api/annotations/resource.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/api/authorization_config.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/api/configchange/config_change.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/api/distribution/distribution.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/api/experimental.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/api/expr/v1alpha1/cel_service.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/api/expr/v1alpha1/checked.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/api/expr/v1alpha1/conformance_service.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/api/expr/v1alpha1/eval.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/api/expr/v1alpha1/explain.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/api/expr/v1alpha1/syntax.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/api/expr/v1alpha1/value.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/api/expr/v1beta1/decl.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/api/expr/v1beta1/eval.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/api/expr/v1beta1/expr.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/api/expr/v1beta1/source.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/api/expr/v1beta1/value.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/api/httpbody/httpbody.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/api/label/label.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/api/launch_stage.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/api/metric/metric.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/api/monitoredres/monitored_resource.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/api/serviceconfig/auth.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/api/serviceconfig/backend.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/api/serviceconfig/billing.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/api/serviceconfig/consumer.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/api/serviceconfig/context.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/api/serviceconfig/control.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/api/serviceconfig/documentation.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/api/serviceconfig/endpoint.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/api/serviceconfig/log.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/api/serviceconfig/logging.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/api/serviceconfig/monitoring.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/api/serviceconfig/quota.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/api/serviceconfig/service.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/api/serviceconfig/source_info.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/api/serviceconfig/system_parameter.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/api/serviceconfig/usage.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/api/servicecontrol/v1/check_error.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/api/servicecontrol/v1/distribution.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/api/servicecontrol/v1/log_entry.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/api/servicecontrol/v1/metric_value.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/api/servicecontrol/v1/operation.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/api/servicecontrol/v1/quota_controller.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/api/servicecontrol/v1/service_controller.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/api/servicemanagement/v1/resources.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/api/servicemanagement/v1/servicemanager.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/appengine/legacy/audit_data.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/appengine/logging/v1/request_log.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/appengine/v1/app_yaml.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/appengine/v1/appengine.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/appengine/v1/application.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/appengine/v1/audit_data.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/appengine/v1/deploy.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/appengine/v1/instance.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/appengine/v1/location.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/appengine/v1/operation.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/appengine/v1/service.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/appengine/v1/version.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/assistant/embedded/v1alpha1/embedded_assistant.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/assistant/embedded/v1alpha2/embedded_assistant.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/bigtable/admin/cluster/v1/bigtable_cluster_data.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/bigtable/admin/cluster/v1/bigtable_cluster_service.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/bigtable/admin/cluster/v1/bigtable_cluster_service_messages.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/bigtable/admin/table/v1/bigtable_table_data.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/bigtable/admin/table/v1/bigtable_table_service.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/bigtable/admin/table/v1/bigtable_table_service_messages.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/bigtable/admin/v2/bigtable_instance_admin.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/bigtable/admin/v2/bigtable_table_admin.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/bigtable/admin/v2/common.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/bigtable/admin/v2/instance.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/bigtable/admin/v2/table.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/bigtable/v1/bigtable_data.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/bigtable/v1/bigtable_service.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/bigtable/v1/bigtable_service_messages.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/bigtable/v2/bigtable.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/bigtable/v2/data.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/bytestream/bytestream.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/cloud/asset/v1/asset_service.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/cloud/asset/v1/assets.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/cloud/asset/v1beta1/asset_service.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/cloud/asset/v1beta1/assets.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/cloud/audit/audit_log.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/cloud/automl/v1beta1/annotation_payload.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/cloud/automl/v1beta1/classification.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/cloud/automl/v1beta1/column_spec.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/cloud/automl/v1beta1/data_items.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/cloud/automl/v1beta1/data_stats.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/cloud/automl/v1beta1/data_types.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/cloud/automl/v1beta1/dataset.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/cloud/automl/v1beta1/detection.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/cloud/automl/v1beta1/geometry.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/cloud/automl/v1beta1/image.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/cloud/automl/v1beta1/io.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/cloud/automl/v1beta1/model.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/cloud/automl/v1beta1/model_evaluation.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/cloud/automl/v1beta1/operations.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/cloud/automl/v1beta1/prediction_service.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/cloud/automl/v1beta1/ranges.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/cloud/automl/v1beta1/regression.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/cloud/automl/v1beta1/service.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/cloud/automl/v1beta1/table_spec.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/cloud/automl/v1beta1/tables.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/cloud/automl/v1beta1/temporal.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/cloud/automl/v1beta1/text.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/cloud/automl/v1beta1/text_extraction.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/cloud/automl/v1beta1/text_segment.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/cloud/automl/v1beta1/text_sentiment.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/cloud/automl/v1beta1/translation.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/cloud/automl/v1beta1/video.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/cloud/bigquery/datatransfer/v1/datatransfer.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/cloud/bigquery/datatransfer/v1/transfer.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/cloud/bigquery/logging/v1/audit_data.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/cloud/bigquery/storage/v1beta1/avro.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/cloud/bigquery/storage/v1beta1/read_options.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/cloud/bigquery/storage/v1beta1/storage.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/cloud/bigquery/storage/v1beta1/table_reference.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/cloud/bigquery/v2/model.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/cloud/bigquery/v2/model_reference.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/cloud/bigquery/v2/standard_sql.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/cloud/billing/v1/cloud_billing.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/cloud/binaryauthorization/v1beta1/resources.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/cloud/binaryauthorization/v1beta1/service.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/cloud/datacatalog/v1beta1/datacatalog.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/cloud/datacatalog/v1beta1/schema.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/cloud/datacatalog/v1beta1/table_spec.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/cloud/datacatalog/v1beta1/timestamps.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/cloud/datalabeling/v1beta1/annotation.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/cloud/datalabeling/v1beta1/annotation_spec_set.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/cloud/datalabeling/v1beta1/data_labeling_service.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/cloud/datalabeling/v1beta1/dataset.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/cloud/datalabeling/v1beta1/human_annotation_config.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/cloud/datalabeling/v1beta1/instruction.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/cloud/datalabeling/v1beta1/operations.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/cloud/dataproc/v1/clusters.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/cloud/dataproc/v1/jobs.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/cloud/dataproc/v1/operations.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/cloud/dataproc/v1/workflow_templates.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/cloud/dataproc/v1beta2/clusters.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/cloud/dataproc/v1beta2/jobs.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/cloud/dataproc/v1beta2/operations.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/cloud/dataproc/v1beta2/shared.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/cloud/dataproc/v1beta2/workflow_templates.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/cloud/dialogflow/v2/agent.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/cloud/dialogflow/v2/audio_config.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/cloud/dialogflow/v2/context.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/cloud/dialogflow/v2/entity_type.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/cloud/dialogflow/v2/intent.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/cloud/dialogflow/v2/session.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/cloud/dialogflow/v2/session_entity_type.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/cloud/dialogflow/v2/webhook.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/cloud/dialogflow/v2beta1/agent.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/cloud/dialogflow/v2beta1/audio_config.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/cloud/dialogflow/v2beta1/context.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/cloud/dialogflow/v2beta1/document.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/cloud/dialogflow/v2beta1/entity_type.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/cloud/dialogflow/v2beta1/intent.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/cloud/dialogflow/v2beta1/knowledge_base.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/cloud/dialogflow/v2beta1/session.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/cloud/dialogflow/v2beta1/session_entity_type.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/cloud/dialogflow/v2beta1/webhook.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/cloud/functions/v1beta2/functions.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/cloud/functions/v1beta2/operations.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/cloud/iot/v1/device_manager.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/cloud/iot/v1/resources.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/cloud/irm/v1alpha2/incidents.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/cloud/irm/v1alpha2/incidents_service.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/cloud/kms/v1/resources.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/cloud/kms/v1/service.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/cloud/language/v1/language_service.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/cloud/language/v1beta1/language_service.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/cloud/language/v1beta2/language_service.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/cloud/location/locations.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/cloud/ml/v1/job_service.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/cloud/ml/v1/model_service.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/cloud/ml/v1/operation_metadata.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/cloud/ml/v1/prediction_service.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/cloud/ml/v1/project_service.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/cloud/oslogin/common/common.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/cloud/oslogin/v1/oslogin.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/cloud/oslogin/v1alpha/oslogin.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/cloud/oslogin/v1beta/oslogin.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/cloud/phishingprotection/v1beta1/phishingprotection.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/cloud/recaptchaenterprise/v1beta1/recaptchaenterprise.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/cloud/redis/v1/cloud_redis.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/cloud/redis/v1beta1/cloud_redis.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/cloud/resourcemanager/v2/folders.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/cloud/runtimeconfig/v1beta1/resources.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/cloud/runtimeconfig/v1beta1/runtimeconfig.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/cloud/scheduler/v1/cloudscheduler.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/cloud/scheduler/v1/job.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/cloud/scheduler/v1/target.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/cloud/scheduler/v1beta1/cloudscheduler.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/cloud/scheduler/v1beta1/job.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/cloud/scheduler/v1beta1/target.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/cloud/securitycenter/v1/asset.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/cloud/securitycenter/v1/finding.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/cloud/securitycenter/v1/organization_settings.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/cloud/securitycenter/v1/run_asset_discovery_response.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/cloud/securitycenter/v1/security_marks.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/cloud/securitycenter/v1/securitycenter_service.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/cloud/securitycenter/v1/source.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/cloud/securitycenter/v1beta1/asset.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/cloud/securitycenter/v1beta1/finding.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/cloud/securitycenter/v1beta1/organization_settings.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/cloud/securitycenter/v1beta1/security_marks.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/cloud/securitycenter/v1beta1/securitycenter_service.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/cloud/securitycenter/v1beta1/source.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/cloud/speech/v1/cloud_speech.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/cloud/speech/v1p1beta1/cloud_speech.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/cloud/support/common/common.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/cloud/support/v1alpha1/cloud_support.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/cloud/talent/v4beta1/application.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/cloud/talent/v4beta1/application_service.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/cloud/talent/v4beta1/batch.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/cloud/talent/v4beta1/common.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/cloud/talent/v4beta1/company.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/cloud/talent/v4beta1/company_service.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/cloud/talent/v4beta1/completion_service.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/cloud/talent/v4beta1/event.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/cloud/talent/v4beta1/event_service.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/cloud/talent/v4beta1/filters.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/cloud/talent/v4beta1/histogram.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/cloud/talent/v4beta1/job.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/cloud/talent/v4beta1/job_service.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/cloud/talent/v4beta1/profile.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/cloud/talent/v4beta1/profile_service.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/cloud/talent/v4beta1/tenant.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/cloud/talent/v4beta1/tenant_service.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/cloud/tasks/v2/cloudtasks.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/cloud/tasks/v2/queue.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/cloud/tasks/v2/target.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/cloud/tasks/v2/task.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/cloud/tasks/v2beta2/cloudtasks.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/cloud/tasks/v2beta2/queue.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/cloud/tasks/v2beta2/target.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/cloud/tasks/v2beta2/task.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/cloud/tasks/v2beta3/cloudtasks.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/cloud/tasks/v2beta3/queue.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/cloud/tasks/v2beta3/target.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/cloud/tasks/v2beta3/task.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/cloud/texttospeech/v1/cloud_tts.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/cloud/texttospeech/v1beta1/cloud_tts.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/cloud/translate/v3beta1/translation_service.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/cloud/videointelligence/v1/video_intelligence.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/cloud/videointelligence/v1beta1/video_intelligence.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/cloud/videointelligence/v1beta2/video_intelligence.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/cloud/videointelligence/v1p1beta1/video_intelligence.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/cloud/videointelligence/v1p2beta1/video_intelligence.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/cloud/videointelligence/v1p3beta1/video_intelligence.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/cloud/vision/v1/geometry.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/cloud/vision/v1/image_annotator.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/cloud/vision/v1/product_search.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/cloud/vision/v1/product_search_service.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/cloud/vision/v1/text_annotation.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/cloud/vision/v1/web_detection.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/cloud/vision/v1p1beta1/geometry.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/cloud/vision/v1p1beta1/image_annotator.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/cloud/vision/v1p1beta1/text_annotation.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/cloud/vision/v1p1beta1/web_detection.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/cloud/vision/v1p2beta1/geometry.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/cloud/vision/v1p2beta1/image_annotator.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/cloud/vision/v1p2beta1/text_annotation.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/cloud/vision/v1p2beta1/web_detection.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/cloud/vision/v1p3beta1/geometry.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/cloud/vision/v1p3beta1/image_annotator.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/cloud/vision/v1p3beta1/product_search.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/cloud/vision/v1p3beta1/product_search_service.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/cloud/vision/v1p3beta1/text_annotation.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/cloud/vision/v1p3beta1/web_detection.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/cloud/vision/v1p4beta1/geometry.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/cloud/vision/v1p4beta1/image_annotator.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/cloud/vision/v1p4beta1/product_search.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/cloud/vision/v1p4beta1/product_search_service.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/cloud/vision/v1p4beta1/text_annotation.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/cloud/vision/v1p4beta1/web_detection.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/cloud/webrisk/v1beta1/webrisk.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/cloud/websecurityscanner/v1alpha/crawled_url.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/cloud/websecurityscanner/v1alpha/finding.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/cloud/websecurityscanner/v1alpha/finding_addon.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/cloud/websecurityscanner/v1alpha/finding_type_stats.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/cloud/websecurityscanner/v1alpha/scan_config.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/cloud/websecurityscanner/v1alpha/scan_run.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/cloud/websecurityscanner/v1alpha/web_security_scanner.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/cloud/websecurityscanner/v1beta/crawled_url.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/cloud/websecurityscanner/v1beta/finding.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/cloud/websecurityscanner/v1beta/finding_addon.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/cloud/websecurityscanner/v1beta/finding_type_stats.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/cloud/websecurityscanner/v1beta/scan_config.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/cloud/websecurityscanner/v1beta/scan_config_error.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/cloud/websecurityscanner/v1beta/scan_run.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/cloud/websecurityscanner/v1beta/scan_run_error_trace.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/cloud/websecurityscanner/v1beta/scan_run_warning_trace.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/cloud/websecurityscanner/v1beta/web_security_scanner.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/container/v1/cluster_service.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/container/v1alpha1/cluster_service.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/container/v1beta1/cluster_service.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/datastore/admin/v1/datastore_admin.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/datastore/admin/v1/index.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/datastore/admin/v1beta1/datastore_admin.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/datastore/v1/datastore.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/datastore/v1/entity.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/datastore/v1/query.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/datastore/v1beta3/datastore.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/datastore/v1beta3/entity.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/datastore/v1beta3/query.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/devtools/build/v1/build_events.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/devtools/build/v1/build_status.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/devtools/build/v1/publish_build_event.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/devtools/cloudbuild/v1/cloudbuild.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/devtools/clouddebugger/v2/controller.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/devtools/clouddebugger/v2/data.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/devtools/clouddebugger/v2/debugger.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/devtools/clouderrorreporting/v1beta1/common.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/devtools/clouderrorreporting/v1beta1/error_group_service.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/devtools/clouderrorreporting/v1beta1/error_stats_service.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/devtools/clouderrorreporting/v1beta1/report_errors_service.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/devtools/cloudprofiler/v2/profiler.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/devtools/cloudtrace/v1/trace.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/devtools/cloudtrace/v2/trace.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/devtools/cloudtrace/v2/tracing.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/devtools/containeranalysis/v1/containeranalysis.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/devtools/containeranalysis/v1alpha1/bill_of_materials.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/devtools/containeranalysis/v1alpha1/containeranalysis.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/devtools/containeranalysis/v1alpha1/image_basis.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/devtools/containeranalysis/v1alpha1/package_vulnerability.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/devtools/containeranalysis/v1alpha1/provenance.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/devtools/containeranalysis/v1alpha1/source_context.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/devtools/containeranalysis/v1beta1/attestation/attestation.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/devtools/containeranalysis/v1beta1/build/build.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/devtools/containeranalysis/v1beta1/common/common.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/devtools/containeranalysis/v1beta1/containeranalysis.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/devtools/containeranalysis/v1beta1/deployment/deployment.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/devtools/containeranalysis/v1beta1/discovery/discovery.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/devtools/containeranalysis/v1beta1/grafeas/grafeas.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/devtools/containeranalysis/v1beta1/image/image.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/devtools/containeranalysis/v1beta1/package/package.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/devtools/containeranalysis/v1beta1/provenance/provenance.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/devtools/containeranalysis/v1beta1/source/source.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/devtools/containeranalysis/v1beta1/vulnerability/vulnerability.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/devtools/remoteexecution/v1test/remote_execution.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/devtools/remoteworkers/v1test2/bots.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/devtools/remoteworkers/v1test2/command.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/devtools/remoteworkers/v1test2/tasks.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/devtools/remoteworkers/v1test2/worker.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/devtools/resultstore/v2/action.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/devtools/resultstore/v2/common.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/devtools/resultstore/v2/configuration.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/devtools/resultstore/v2/configured_target.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/devtools/resultstore/v2/coverage.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/devtools/resultstore/v2/coverage_summary.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/devtools/resultstore/v2/file.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/devtools/resultstore/v2/file_set.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/devtools/resultstore/v2/invocation.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/devtools/resultstore/v2/resultstore_download.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/devtools/resultstore/v2/resultstore_file_download.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/devtools/resultstore/v2/resultstore_upload.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/devtools/resultstore/v2/target.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/devtools/resultstore/v2/test_suite.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/devtools/source/v1/source_context.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/devtools/sourcerepo/v1/sourcerepo.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/example/library/v1/library.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/firebase/fcm/connection/v1alpha1/connection_api.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/firestore/admin/v1/field.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/firestore/admin/v1/firestore_admin.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/firestore/admin/v1/index.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/firestore/admin/v1/location.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/firestore/admin/v1/operation.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/firestore/admin/v1beta1/firestore_admin.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/firestore/admin/v1beta1/index.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/firestore/admin/v1beta1/location.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/firestore/admin/v1beta2/field.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/firestore/admin/v1beta2/firestore_admin.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/firestore/admin/v1beta2/index.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/firestore/admin/v1beta2/operation.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/firestore/v1/common.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/firestore/v1/document.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/firestore/v1/firestore.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/firestore/v1/query.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/firestore/v1/write.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/firestore/v1beta1/common.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/firestore/v1beta1/document.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/firestore/v1beta1/firestore.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/firestore/v1beta1/query.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/firestore/v1beta1/write.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/genomics/v1/annotations.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/genomics/v1/cigar.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/genomics/v1/datasets.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/genomics/v1/operations.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/genomics/v1/position.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/genomics/v1/range.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/genomics/v1/readalignment.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/genomics/v1/readgroup.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/genomics/v1/readgroupset.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/genomics/v1/reads.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/genomics/v1/references.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/genomics/v1/variants.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/genomics/v1alpha2/pipelines.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/home/graph/v1/device.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/home/graph/v1/homegraph.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/iam/admin/v1/iam.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/iam/credentials/v1/common.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/iam/credentials/v1/iamcredentials.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/iam/v1/iam_policy.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/iam/v1/logging/audit_data.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/iam/v1/policy.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/logging/type/http_request.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/logging/type/log_severity.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/logging/v2/log_entry.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/logging/v2/logging.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/logging/v2/logging_config.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/logging/v2/logging_metrics.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/longrunning/operations.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/monitoring/v3/alert.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/monitoring/v3/alert_service.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/monitoring/v3/common.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/monitoring/v3/dropped_labels.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/monitoring/v3/group.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/monitoring/v3/group_service.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/monitoring/v3/metric.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/monitoring/v3/metric_service.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/monitoring/v3/mutation_record.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/monitoring/v3/notification.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/monitoring/v3/notification_service.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/monitoring/v3/span_context.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/monitoring/v3/uptime.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/monitoring/v3/uptime_service.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/privacy/dlp/v2/dlp.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/privacy/dlp/v2/storage.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/pubsub/v1/pubsub.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/pubsub/v1beta2/pubsub.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/rpc/code/code.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/rpc/errdetails/error_details.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/rpc/status/status.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/spanner/admin/database/v1/spanner_database_admin.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/spanner/admin/instance/v1/spanner_instance_admin.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/spanner/v1/keys.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/spanner/v1/mutation.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/spanner/v1/query_plan.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/spanner/v1/result_set.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/spanner/v1/spanner.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/spanner/v1/transaction.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/spanner/v1/type.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/storagetransfer/v1/transfer.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/storagetransfer/v1/transfer_types.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/streetview/publish/v1/resources.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/streetview/publish/v1/rpcmessages.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/streetview/publish/v1/streetview_publish.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/type/calendarperiod/calendar_period.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/type/color/color.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/type/date/date.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/type/dayofweek/dayofweek.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/type/expr/expr.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/type/fraction/fraction.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/type/latlng/latlng.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/type/money/money.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/type/postaladdress/postal_address.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/type/quaternion/quaternion.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/type/timeofday/timeofday.pb.go create mode 100644 vendor/google.golang.org/genproto/googleapis/watcher/v1/watch.pb.go create mode 100644 vendor/google.golang.org/genproto/protobuf/api/api.pb.go create mode 100644 vendor/google.golang.org/genproto/protobuf/field_mask/field_mask.pb.go create mode 100644 vendor/google.golang.org/genproto/protobuf/ptype/type.pb.go create mode 100644 vendor/google.golang.org/genproto/protobuf/source_context/source_context.pb.go create mode 100644 vendor/google.golang.org/genproto/regen.go create mode 100644 vendor/google.golang.org/genproto/tools.go create mode 100644 vendor/google.golang.org/grpc/AUTHORS create mode 100644 vendor/google.golang.org/grpc/LICENSE create mode 100644 vendor/google.golang.org/grpc/backoff.go create mode 100644 vendor/google.golang.org/grpc/balancer.go create mode 100644 vendor/google.golang.org/grpc/balancer/balancer.go create mode 100644 vendor/google.golang.org/grpc/balancer/base/balancer.go create mode 100644 vendor/google.golang.org/grpc/balancer/base/base.go create mode 100644 vendor/google.golang.org/grpc/balancer/grpclb/grpc_lb_v1/load_balancer.pb.go create mode 100644 vendor/google.golang.org/grpc/balancer/grpclb/grpclb.go create mode 100644 vendor/google.golang.org/grpc/balancer/grpclb/grpclb_picker.go create mode 100644 vendor/google.golang.org/grpc/balancer/grpclb/grpclb_remote_balancer.go create mode 100644 vendor/google.golang.org/grpc/balancer/grpclb/grpclb_util.go create mode 100644 vendor/google.golang.org/grpc/balancer/roundrobin/roundrobin.go create mode 100644 vendor/google.golang.org/grpc/balancer/xds/edsbalancer/balancergroup.go create mode 100644 vendor/google.golang.org/grpc/balancer/xds/edsbalancer/edsbalancer.go create mode 100644 vendor/google.golang.org/grpc/balancer/xds/edsbalancer/util.go create mode 100755 vendor/google.golang.org/grpc/balancer/xds/internal/proto/envoy/api/v2/auth/cert/cert.pb.go create mode 100755 vendor/google.golang.org/grpc/balancer/xds/internal/proto/envoy/api/v2/cds/cds.pb.go create mode 100755 vendor/google.golang.org/grpc/balancer/xds/internal/proto/envoy/api/v2/cluster/circuit_breaker/circuit_breaker.pb.go create mode 100755 vendor/google.golang.org/grpc/balancer/xds/internal/proto/envoy/api/v2/cluster/outlier_detection/outlier_detection.pb.go create mode 100755 vendor/google.golang.org/grpc/balancer/xds/internal/proto/envoy/api/v2/core/address/address.pb.go create mode 100755 vendor/google.golang.org/grpc/balancer/xds/internal/proto/envoy/api/v2/core/base/base.pb.go create mode 100755 vendor/google.golang.org/grpc/balancer/xds/internal/proto/envoy/api/v2/core/config_source/config_source.pb.go create mode 100755 vendor/google.golang.org/grpc/balancer/xds/internal/proto/envoy/api/v2/core/grpc_service/grpc_service.pb.go create mode 100755 vendor/google.golang.org/grpc/balancer/xds/internal/proto/envoy/api/v2/core/health_check/health_check.pb.go create mode 100755 vendor/google.golang.org/grpc/balancer/xds/internal/proto/envoy/api/v2/core/protocol/protocol.pb.go create mode 100755 vendor/google.golang.org/grpc/balancer/xds/internal/proto/envoy/api/v2/discovery/discovery.pb.go create mode 100755 vendor/google.golang.org/grpc/balancer/xds/internal/proto/envoy/api/v2/eds/eds.pb.go create mode 100755 vendor/google.golang.org/grpc/balancer/xds/internal/proto/envoy/api/v2/endpoint/endpoint/endpoint.pb.go create mode 100755 vendor/google.golang.org/grpc/balancer/xds/internal/proto/envoy/service/discovery/v2/ads/ads.pb.go create mode 100755 vendor/google.golang.org/grpc/balancer/xds/internal/proto/envoy/type/percent/percent.pb.go create mode 100755 vendor/google.golang.org/grpc/balancer/xds/internal/proto/envoy/type/range/range.pb.go create mode 100755 vendor/google.golang.org/grpc/balancer/xds/internal/proto/validate/validate.pb.go create mode 100644 vendor/google.golang.org/grpc/balancer/xds/xds.go create mode 100644 vendor/google.golang.org/grpc/balancer/xds/xds_client.go create mode 100644 vendor/google.golang.org/grpc/balancer_conn_wrappers.go create mode 100644 vendor/google.golang.org/grpc/balancer_v1_wrapper.go create mode 100644 vendor/google.golang.org/grpc/benchmark/benchmain/main.go create mode 100644 vendor/google.golang.org/grpc/benchmark/benchmark.go create mode 100644 vendor/google.golang.org/grpc/benchmark/benchresult/main.go create mode 100644 vendor/google.golang.org/grpc/benchmark/client/main.go create mode 100644 vendor/google.golang.org/grpc/benchmark/grpc_testing/control.pb.go create mode 100644 vendor/google.golang.org/grpc/benchmark/grpc_testing/messages.pb.go create mode 100644 vendor/google.golang.org/grpc/benchmark/grpc_testing/payloads.pb.go create mode 100644 vendor/google.golang.org/grpc/benchmark/grpc_testing/services.pb.go create mode 100644 vendor/google.golang.org/grpc/benchmark/grpc_testing/stats.pb.go create mode 100644 vendor/google.golang.org/grpc/benchmark/latency/latency.go create mode 100644 vendor/google.golang.org/grpc/benchmark/server/main.go create mode 100644 vendor/google.golang.org/grpc/benchmark/stats/histogram.go create mode 100644 vendor/google.golang.org/grpc/benchmark/stats/stats.go create mode 100644 vendor/google.golang.org/grpc/benchmark/stats/util.go create mode 100644 vendor/google.golang.org/grpc/benchmark/worker/benchmark_client.go create mode 100644 vendor/google.golang.org/grpc/benchmark/worker/benchmark_server.go create mode 100644 vendor/google.golang.org/grpc/benchmark/worker/main.go create mode 100644 vendor/google.golang.org/grpc/binarylog/grpc_binarylog_v1/binarylog.pb.go create mode 100644 vendor/google.golang.org/grpc/call.go create mode 100644 vendor/google.golang.org/grpc/channelz/grpc_channelz_v1/channelz.pb.go create mode 100644 vendor/google.golang.org/grpc/channelz/service/func_linux.go create mode 100644 vendor/google.golang.org/grpc/channelz/service/func_nonlinux.go create mode 100644 vendor/google.golang.org/grpc/channelz/service/service.go create mode 100644 vendor/google.golang.org/grpc/clientconn.go create mode 100644 vendor/google.golang.org/grpc/codec.go create mode 100644 vendor/google.golang.org/grpc/codes/code_string.go create mode 100644 vendor/google.golang.org/grpc/codes/codes.go create mode 100644 vendor/google.golang.org/grpc/connectivity/connectivity.go create mode 100644 vendor/google.golang.org/grpc/credentials/alts/alts.go create mode 100644 vendor/google.golang.org/grpc/credentials/alts/internal/authinfo/authinfo.go create mode 100644 vendor/google.golang.org/grpc/credentials/alts/internal/common.go create mode 100644 vendor/google.golang.org/grpc/credentials/alts/internal/conn/aeadrekey.go create mode 100644 vendor/google.golang.org/grpc/credentials/alts/internal/conn/aes128gcm.go create mode 100644 vendor/google.golang.org/grpc/credentials/alts/internal/conn/aes128gcmrekey.go create mode 100644 vendor/google.golang.org/grpc/credentials/alts/internal/conn/common.go create mode 100644 vendor/google.golang.org/grpc/credentials/alts/internal/conn/counter.go create mode 100644 vendor/google.golang.org/grpc/credentials/alts/internal/conn/record.go create mode 100644 vendor/google.golang.org/grpc/credentials/alts/internal/conn/utils.go create mode 100644 vendor/google.golang.org/grpc/credentials/alts/internal/handshaker/handshaker.go create mode 100644 vendor/google.golang.org/grpc/credentials/alts/internal/handshaker/service/service.go create mode 100644 vendor/google.golang.org/grpc/credentials/alts/internal/proto/grpc_gcp/altscontext.pb.go create mode 100644 vendor/google.golang.org/grpc/credentials/alts/internal/proto/grpc_gcp/handshaker.pb.go create mode 100644 vendor/google.golang.org/grpc/credentials/alts/internal/proto/grpc_gcp/transport_security_common.pb.go create mode 100644 vendor/google.golang.org/grpc/credentials/alts/internal/testutil/testutil.go create mode 100644 vendor/google.golang.org/grpc/credentials/alts/utils.go create mode 100644 vendor/google.golang.org/grpc/credentials/credentials.go create mode 100644 vendor/google.golang.org/grpc/credentials/google/google.go create mode 100644 vendor/google.golang.org/grpc/credentials/internal/syscallconn.go create mode 100644 vendor/google.golang.org/grpc/credentials/internal/syscallconn_appengine.go create mode 100644 vendor/google.golang.org/grpc/credentials/oauth/oauth.go create mode 100644 vendor/google.golang.org/grpc/credentials/tls13.go create mode 100644 vendor/google.golang.org/grpc/dialoptions.go create mode 100644 vendor/google.golang.org/grpc/doc.go create mode 100644 vendor/google.golang.org/grpc/encoding/encoding.go create mode 100644 vendor/google.golang.org/grpc/encoding/gzip/gzip.go create mode 100644 vendor/google.golang.org/grpc/encoding/proto/proto.go create mode 100644 vendor/google.golang.org/grpc/examples/features/authentication/client/main.go create mode 100644 vendor/google.golang.org/grpc/examples/features/authentication/server/main.go create mode 100644 vendor/google.golang.org/grpc/examples/features/cancellation/client/main.go create mode 100644 vendor/google.golang.org/grpc/examples/features/cancellation/server/main.go create mode 100644 vendor/google.golang.org/grpc/examples/features/compression/client/main.go create mode 100644 vendor/google.golang.org/grpc/examples/features/compression/server/main.go create mode 100644 vendor/google.golang.org/grpc/examples/features/deadline/client/main.go create mode 100644 vendor/google.golang.org/grpc/examples/features/deadline/server/main.go create mode 100644 vendor/google.golang.org/grpc/examples/features/debugging/client/main.go create mode 100644 vendor/google.golang.org/grpc/examples/features/debugging/server/main.go create mode 100644 vendor/google.golang.org/grpc/examples/features/encryption/ALTS/client/main.go create mode 100644 vendor/google.golang.org/grpc/examples/features/encryption/ALTS/server/main.go create mode 100644 vendor/google.golang.org/grpc/examples/features/encryption/TLS/client/main.go create mode 100644 vendor/google.golang.org/grpc/examples/features/encryption/TLS/server/main.go create mode 100644 vendor/google.golang.org/grpc/examples/features/errors/client/main.go create mode 100644 vendor/google.golang.org/grpc/examples/features/errors/server/main.go create mode 100644 vendor/google.golang.org/grpc/examples/features/interceptor/client/main.go create mode 100644 vendor/google.golang.org/grpc/examples/features/interceptor/server/main.go create mode 100644 vendor/google.golang.org/grpc/examples/features/keepalive/client/main.go create mode 100644 vendor/google.golang.org/grpc/examples/features/keepalive/server/main.go create mode 100644 vendor/google.golang.org/grpc/examples/features/load_balancing/client/main.go create mode 100644 vendor/google.golang.org/grpc/examples/features/load_balancing/server/main.go create mode 100644 vendor/google.golang.org/grpc/examples/features/metadata/client/main.go create mode 100644 vendor/google.golang.org/grpc/examples/features/metadata/server/main.go create mode 100644 vendor/google.golang.org/grpc/examples/features/multiplex/client/main.go create mode 100644 vendor/google.golang.org/grpc/examples/features/multiplex/server/main.go create mode 100644 vendor/google.golang.org/grpc/examples/features/name_resolving/client/main.go create mode 100644 vendor/google.golang.org/grpc/examples/features/name_resolving/server/main.go create mode 100644 vendor/google.golang.org/grpc/examples/features/proto/doc.go create mode 100644 vendor/google.golang.org/grpc/examples/features/proto/echo/echo.pb.go create mode 100644 vendor/google.golang.org/grpc/examples/features/reflection/server/main.go create mode 100644 vendor/google.golang.org/grpc/examples/features/wait_for_ready/main.go create mode 100644 vendor/google.golang.org/grpc/examples/helloworld/greeter_client/main.go create mode 100644 vendor/google.golang.org/grpc/examples/helloworld/greeter_server/main.go create mode 100644 vendor/google.golang.org/grpc/examples/helloworld/helloworld/helloworld.pb.go create mode 100644 vendor/google.golang.org/grpc/examples/helloworld/mock_helloworld/hw_mock.go create mode 100644 vendor/google.golang.org/grpc/examples/route_guide/client/client.go create mode 100644 vendor/google.golang.org/grpc/examples/route_guide/mock_routeguide/rg_mock.go create mode 100644 vendor/google.golang.org/grpc/examples/route_guide/routeguide/route_guide.pb.go create mode 100644 vendor/google.golang.org/grpc/examples/route_guide/server/server.go create mode 100644 vendor/google.golang.org/grpc/grpclog/glogger/glogger.go create mode 100644 vendor/google.golang.org/grpc/grpclog/grpclog.go create mode 100644 vendor/google.golang.org/grpc/grpclog/logger.go create mode 100644 vendor/google.golang.org/grpc/grpclog/loggerv2.go create mode 100644 vendor/google.golang.org/grpc/health/client.go create mode 100644 vendor/google.golang.org/grpc/health/grpc_health_v1/health.pb.go create mode 100644 vendor/google.golang.org/grpc/health/server.go create mode 100644 vendor/google.golang.org/grpc/interceptor.go create mode 100644 vendor/google.golang.org/grpc/internal/backoff/backoff.go create mode 100644 vendor/google.golang.org/grpc/internal/balancerload/load.go create mode 100644 vendor/google.golang.org/grpc/internal/balancerload/orca/orca.go create mode 100644 vendor/google.golang.org/grpc/internal/balancerload/orca/orca_v1/orca.pb.go create mode 100644 vendor/google.golang.org/grpc/internal/binarylog/binarylog.go create mode 100644 vendor/google.golang.org/grpc/internal/binarylog/binarylog_testutil.go create mode 100644 vendor/google.golang.org/grpc/internal/binarylog/env_config.go create mode 100644 vendor/google.golang.org/grpc/internal/binarylog/method_logger.go create mode 100644 vendor/google.golang.org/grpc/internal/binarylog/sink.go create mode 100644 vendor/google.golang.org/grpc/internal/binarylog/util.go create mode 100644 vendor/google.golang.org/grpc/internal/channelz/funcs.go create mode 100644 vendor/google.golang.org/grpc/internal/channelz/types.go create mode 100644 vendor/google.golang.org/grpc/internal/channelz/types_linux.go create mode 100644 vendor/google.golang.org/grpc/internal/channelz/types_nonlinux.go create mode 100644 vendor/google.golang.org/grpc/internal/channelz/util_linux.go create mode 100644 vendor/google.golang.org/grpc/internal/channelz/util_nonlinux.go create mode 100644 vendor/google.golang.org/grpc/internal/envconfig/envconfig.go create mode 100644 vendor/google.golang.org/grpc/internal/grpcrand/grpcrand.go create mode 100644 vendor/google.golang.org/grpc/internal/grpcsync/event.go create mode 100644 vendor/google.golang.org/grpc/internal/grpctest/grpctest.go create mode 100644 vendor/google.golang.org/grpc/internal/internal.go create mode 100644 vendor/google.golang.org/grpc/internal/leakcheck/leakcheck.go create mode 100644 vendor/google.golang.org/grpc/internal/syscall/syscall_linux.go create mode 100644 vendor/google.golang.org/grpc/internal/syscall/syscall_nonlinux.go create mode 100644 vendor/google.golang.org/grpc/internal/testutils/pipe_listener.go create mode 100644 vendor/google.golang.org/grpc/internal/transport/bdp_estimator.go create mode 100644 vendor/google.golang.org/grpc/internal/transport/controlbuf.go create mode 100644 vendor/google.golang.org/grpc/internal/transport/defaults.go create mode 100644 vendor/google.golang.org/grpc/internal/transport/flowcontrol.go create mode 100644 vendor/google.golang.org/grpc/internal/transport/handler_server.go create mode 100644 vendor/google.golang.org/grpc/internal/transport/http2_client.go create mode 100644 vendor/google.golang.org/grpc/internal/transport/http2_server.go create mode 100644 vendor/google.golang.org/grpc/internal/transport/http_util.go create mode 100644 vendor/google.golang.org/grpc/internal/transport/log.go create mode 100644 vendor/google.golang.org/grpc/internal/transport/transport.go create mode 100644 vendor/google.golang.org/grpc/interop/alts/client/client.go create mode 100644 vendor/google.golang.org/grpc/interop/alts/server/server.go create mode 100644 vendor/google.golang.org/grpc/interop/client/client.go create mode 100644 vendor/google.golang.org/grpc/interop/fake_grpclb/fake_grpclb.go create mode 100644 vendor/google.golang.org/grpc/interop/grpc_testing/test.pb.go create mode 100644 vendor/google.golang.org/grpc/interop/http2/negative_http2_client.go create mode 100644 vendor/google.golang.org/grpc/interop/server/server.go create mode 100644 vendor/google.golang.org/grpc/interop/test_utils.go create mode 100644 vendor/google.golang.org/grpc/keepalive/keepalive.go create mode 100644 vendor/google.golang.org/grpc/metadata/metadata.go create mode 100644 vendor/google.golang.org/grpc/naming/dns_resolver.go create mode 100644 vendor/google.golang.org/grpc/naming/naming.go create mode 100644 vendor/google.golang.org/grpc/peer/peer.go create mode 100644 vendor/google.golang.org/grpc/picker_wrapper.go create mode 100644 vendor/google.golang.org/grpc/pickfirst.go create mode 100644 vendor/google.golang.org/grpc/proxy.go create mode 100644 vendor/google.golang.org/grpc/reflection/grpc_reflection_v1alpha/reflection.pb.go create mode 100644 vendor/google.golang.org/grpc/reflection/grpc_testing/proto2.pb.go create mode 100644 vendor/google.golang.org/grpc/reflection/grpc_testing/proto2_ext.pb.go create mode 100644 vendor/google.golang.org/grpc/reflection/grpc_testing/proto2_ext2.pb.go create mode 100644 vendor/google.golang.org/grpc/reflection/grpc_testing/test.pb.go create mode 100644 vendor/google.golang.org/grpc/reflection/grpc_testingv3/testv3.pb.go create mode 100644 vendor/google.golang.org/grpc/reflection/serverreflection.go create mode 100644 vendor/google.golang.org/grpc/resolver/dns/dns_resolver.go create mode 100644 vendor/google.golang.org/grpc/resolver/manual/manual.go create mode 100644 vendor/google.golang.org/grpc/resolver/passthrough/passthrough.go create mode 100644 vendor/google.golang.org/grpc/resolver/resolver.go create mode 100644 vendor/google.golang.org/grpc/resolver_conn_wrapper.go create mode 100644 vendor/google.golang.org/grpc/rpc_util.go create mode 100644 vendor/google.golang.org/grpc/server.go create mode 100644 vendor/google.golang.org/grpc/service_config.go create mode 100644 vendor/google.golang.org/grpc/stats/grpc_testing/test.pb.go create mode 100644 vendor/google.golang.org/grpc/stats/handlers.go create mode 100644 vendor/google.golang.org/grpc/stats/stats.go create mode 100644 vendor/google.golang.org/grpc/status/status.go create mode 100644 vendor/google.golang.org/grpc/stream.go create mode 100644 vendor/google.golang.org/grpc/stress/client/main.go create mode 100644 vendor/google.golang.org/grpc/stress/grpc_testing/metrics.pb.go create mode 100644 vendor/google.golang.org/grpc/stress/metrics_client/main.go create mode 100644 vendor/google.golang.org/grpc/tap/tap.go create mode 100644 vendor/google.golang.org/grpc/test/bufconn/bufconn.go create mode 100644 vendor/google.golang.org/grpc/test/codec_perf/perf.pb.go create mode 100644 vendor/google.golang.org/grpc/test/go_vet/vet.go create mode 100644 vendor/google.golang.org/grpc/test/grpc_testing/test.pb.go create mode 100644 vendor/google.golang.org/grpc/test/race.go create mode 100644 vendor/google.golang.org/grpc/test/rawConnWrapper.go create mode 100644 vendor/google.golang.org/grpc/test/servertester.go create mode 100644 vendor/google.golang.org/grpc/test/tools/tools.go create mode 100644 vendor/google.golang.org/grpc/testdata/testdata.go create mode 100644 vendor/google.golang.org/grpc/trace.go create mode 100644 vendor/google.golang.org/grpc/version.go delete mode 100644 vendor/k8s.io/code-generator/go.mod delete mode 100644 vendor/k8s.io/code-generator/go.sum diff --git a/.gitignore b/.gitignore index a54931a..ae01c86 100644 --- a/.gitignore +++ b/.gitignore @@ -1,5 +1,5 @@ -go.mod -go.sum +/go.mod +/go.sum # Temporary Build Files build/_output @@ -78,3 +78,6 @@ tags .vscode/* .history # End of https://www.gitignore.io/api/go,vim,emacs,visualstudiocode + +### Jetbrains ### +/.idea/ diff --git a/Gopkg.lock b/Gopkg.lock index 50dc498..ea9c3e0 100644 --- a/Gopkg.lock +++ b/Gopkg.lock @@ -2,379 +2,463 @@ [[projects]] - digest = "1:fd1a7ca82682444a45424f6af37b1e0373f632e5a303441b111558ae8656a9b7" name = "cloud.google.com/go" packages = ["compute/metadata"] - pruneopts = "NT" revision = "0ebda48a7f143b1cce9eb37a8c1106ac762a3430" version = "v0.34.0" [[projects]] - digest = "1:d8ebbd207f3d3266d4423ce4860c9f3794956306ded6c7ba312ecc69cdfbf04c" name = "github.com/PuerkitoBio/purell" packages = ["."] - pruneopts = "NT" revision = "0bcb03f4b4d0a9428594752bd2a3b9aa0a9d4bd4" version = "v1.1.0" [[projects]] branch = "master" - digest = "1:8098cd40cd09879efbf12e33bcd51ead4a66006ac802cd563a66c4f3373b9727" name = "github.com/PuerkitoBio/urlesc" packages = ["."] - pruneopts = "NT" revision = "de5bf2ad457846296e2031421a34e2568e304e35" [[projects]] branch = "master" - digest = "1:c819830f4f5ef85874a90ac3cbcc96cd322c715f5c96fbe4722eacd3dafbaa07" name = "github.com/beorn7/perks" packages = ["quantile"] - pruneopts = "NT" revision = "3a771d992973f24aa725d07868b467d1ddfceafb" [[projects]] - digest = "1:4b8b5811da6970495e04d1f4e98bb89518cc3cfc3b3f456bdb876ed7b6c74049" + name = "github.com/bmatcuk/doublestar" + packages = ["."] + revision = "85a78806aa1b4707d1dbace9be592cf1ece91ab3" + version = "v1.1.1" + +[[projects]] name = "github.com/davecgh/go-spew" packages = ["spew"] - pruneopts = "NT" revision = "8991bc29aa16c548c550c7ff78260e27b9ab7c73" version = "v1.1.1" [[projects]] - digest = "1:e6f888d4be8ec0f05c50e2aba83da4948b58045dee54d03be81fa74ea673302c" name = "github.com/emicklei/go-restful" packages = [ ".", - "log", + "log" ] - pruneopts = "NT" revision = "3eb9738c1697594ea6e71a7156a9bb32ed216cf0" version = "v2.8.0" [[projects]] - digest = "1:81466b4218bf6adddac2572a30ac733a9255919bc2f470b4827a317bd4ee1756" name = "github.com/ghodss/yaml" packages = ["."] - pruneopts = "NT" revision = "0ca9ea5df5451ffdf184b4428c902747c2c11cd7" version = "v1.0.0" [[projects]] branch = "master" - digest = "1:d421af4c4fe51d399667d573982d663fe1fa67020a88d3ae43466ebfe8e2b5c9" name = "github.com/go-logr/logr" packages = ["."] - pruneopts = "NT" revision = "9fb12b3b21c5415d16ac18dc5cd42c1cfdd40c4e" [[projects]] - digest = "1:340497a512995aa69c0add901d79a2096b3449d35a44a6f1f1115091a9f8c687" name = "github.com/go-logr/zapr" packages = ["."] - pruneopts = "NT" revision = "7536572e8d55209135cd5e7ccf7fce43dca217ab" version = "v0.1.0" [[projects]] - digest = "1:260f7ebefc63024c8dfe2c9f1a2935a89fa4213637a1f522f592f80c001cc441" name = "github.com/go-openapi/jsonpointer" packages = ["."] - pruneopts = "NT" revision = "ef5f0afec364d3b9396b7b77b43dbe26bf1f8004" version = "v0.18.0" [[projects]] - digest = "1:98abd61947ff5c7c6fcfec5473d02a4821ed3a2dd99a4fbfdb7925b0dd745546" name = "github.com/go-openapi/jsonreference" packages = ["."] - pruneopts = "NT" revision = "8483a886a90412cd6858df4ea3483dce9c8e35a3" version = "v0.18.0" [[projects]] - digest = "1:4da4ea0a664ba528965683d350f602d0f11464e6bb2e17aad0914723bc25d163" name = "github.com/go-openapi/spec" packages = ["."] - pruneopts = "NT" revision = "5b6cdde3200976e3ecceb2868706ee39b6aff3e4" version = "v0.18.0" [[projects]] - digest = "1:dc0f590770e5a6c70ea086232324f7b7dc4857c60eca63ab8ff78e0a5cfcdbf3" name = "github.com/go-openapi/swag" packages = ["."] - pruneopts = "NT" revision = "1d29f06aebd59ccdf11ae04aa0334ded96e2d909" version = "v0.18.0" [[projects]] - digest = "1:932970e69f16e127aa0653b8263ae588cd127fa53273e19ba44332902c9826f2" name = "github.com/gogo/protobuf" packages = [ "proto", - "sortkeys", + "sortkeys" ] - pruneopts = "NT" revision = "4cbf7e384e768b4e01799441fdf2a706a5635ae7" version = "v1.2.0" [[projects]] branch = "master" - digest = "1:e2b86e41f3d669fc36b50d31d32d22c8ac656c75aa5ea89717ce7177e134ff2a" name = "github.com/golang/glog" packages = ["."] - pruneopts = "NT" revision = "23def4e6c14b4da8ac2ed8007337bc5eb5007998" [[projects]] branch = "master" - digest = "1:aaedc94233e56ed57cdb04e3abfacc85c90c14082b62e3cdbe8ea72fc06ee035" name = "github.com/golang/groupcache" packages = ["lru"] - pruneopts = "NT" revision = "c65c006176ff7ff98bb916961c7abbc6b0afc0aa" [[projects]] - digest = "1:d7cb4458ea8782e6efacd8f4940796ec559c90833509c436f40c4085b98156dd" name = "github.com/golang/protobuf" packages = [ "proto", "ptypes", "ptypes/any", "ptypes/duration", - "ptypes/timestamp", + "ptypes/timestamp" ] - pruneopts = "NT" revision = "aa810b61a9c79d51363740d207bb46cf8e620ed5" version = "v1.2.0" [[projects]] branch = "master" - digest = "1:05f95ffdfcf651bdb0f05b40b69e7f5663047f8da75c72d58728acb59b5cc107" name = "github.com/google/btree" packages = ["."] - pruneopts = "NT" revision = "4030bb1f1f0c35b30ca7009e9ebd06849dd45306" [[projects]] branch = "master" - digest = "1:52c5834e2bebac9030c97cc0798ac11c3aa8a39f098aeb419f142533da6cd3cc" name = "github.com/google/gofuzz" packages = ["."] - pruneopts = "NT" revision = "24818f796faf91cd76ec7bddd72458fbced7a6c1" [[projects]] - digest = "1:56a1f3949ebb7fa22fa6b4e4ac0fe0f77cc4faee5b57413e6fa9199a8458faf1" name = "github.com/google/uuid" packages = ["."] - pruneopts = "NT" revision = "9b3b1e0f5f99ae461456d768e7d301a7acdaa2d8" version = "v1.1.0" [[projects]] - digest = "1:289332c13b80edfefc88397cce5266c16845dcf204fa2f6ac7e464ee4c7f6e96" name = "github.com/googleapis/gnostic" packages = [ "OpenAPIv2", "compiler", - "extensions", + "extensions" ] - pruneopts = "NT" revision = "7c663266750e7d82587642f65e60bc4083f1f84e" version = "v0.2.0" [[projects]] branch = "master" - digest = "1:97972f03fbf34ec4247ddc78ddb681389c468c020492aa32b109744a54fc0c14" name = "github.com/gregjones/httpcache" packages = [ ".", - "diskcache", + "diskcache" ] - pruneopts = "NT" revision = "c63ab54fda8f77302f8d414e19933f2b6026a089" [[projects]] - digest = "1:b42cde0e1f3c816dd57f57f7bbcf05ca40263ad96f168714c130c611fc0856a6" + name = "github.com/hashicorp/go-hclog" + packages = ["."] + revision = "d2f17ae9f9297cad64f18f15d537397e8783627b" + version = "v0.9.0" + +[[projects]] + name = "github.com/hashicorp/go-plugin" + packages = [ + ".", + "internal/plugin" + ] + revision = "52e1c4730856c1438ced7597c9b5c585a7bd06a2" + version = "v1.0.0" + +[[projects]] name = "github.com/hashicorp/golang-lru" packages = [ ".", - "simplelru", + "simplelru" ] - pruneopts = "NT" revision = "20f1fb78b0740ba8c3cb143a61e86ba5c8669768" version = "v0.5.0" [[projects]] - digest = "1:9a52adf44086cead3b384e5d0dbf7a1c1cce65e67552ee3383a8561c42a18cd3" + branch = "master" + name = "github.com/hashicorp/yamux" + packages = ["."] + revision = "2f1d1f20f75d5404f53b9edf6b53ed5505508675" + +[[projects]] name = "github.com/imdario/mergo" packages = ["."] - pruneopts = "NT" revision = "9f23e2d6bd2a77f959b2bf6acdbefd708a83a4a4" version = "v0.3.6" [[projects]] - digest = "1:1d39c063244ad17c4b18e8da1551163b6ffb52bd1640a49a8ec5c3b7bf4dbd5d" + name = "github.com/inconshreveable/mousetrap" + packages = ["."] + revision = "76626ae9c91c4f2a10f34cad8ce83ea42c93bb75" + version = "v1.0" + +[[projects]] name = "github.com/json-iterator/go" packages = ["."] - pruneopts = "NT" revision = "1624edc4454b8682399def8740d46db5e4362ba4" version = "v1.1.5" +[[projects]] + name = "github.com/leonelquinteros/gotext" + packages = [ + ".", + "plurals" + ] + revision = "477ce49ddf8f201350f40fdb5ed80a65d155cc33" + version = "v1.4.0" + +[[projects]] + branch = "master" + name = "github.com/lyraproj/data-protobuf" + packages = ["datapb"] + revision = "a909d9e1f93b5e56ec35444550be1c34bc047ccf" + +[[projects]] + branch = "master" + name = "github.com/lyraproj/hiera" + packages = [ + "hiera", + "hieraapi", + "internal", + "provider" + ] + revision = "7e5f10f7437189172a58c4ec54a447b028f5a2c9" + +[[projects]] + branch = "master" + name = "github.com/lyraproj/issue" + packages = ["issue"] + revision = "8bc10230f9955cc31a9c52eec2c17af3934d1744" + +[[projects]] + branch = "remove-operator" + name = "github.com/lyraproj/lyra" + packages = [ + "cmd/lyra/cmd", + "cmd/lyra/ui", + "pkg/apply", + "pkg/generate", + "pkg/loader", + "pkg/logger", + "pkg/util", + "pkg/version" + ] + revision = "2d2a8e4200f3a6f43fb5bfda727e06e96c893537" + source = "github.com/thallgren/lyra" + +[[projects]] + branch = "master" + name = "github.com/lyraproj/pcore" + packages = [ + "hash", + "loader", + "pcore", + "proto", + "px", + "pximpl", + "serialization", + "threadlocal", + "types", + "utils", + "yaml" + ] + revision = "08ede39ae02dc6c8f233b10c7d65998f60278821" + +[[projects]] + name = "github.com/lyraproj/semver" + packages = ["semver"] + revision = "989ec48625a81ee1acf56b46de64f528edb21dbe" + version = "0.2.0" + +[[projects]] + branch = "master" + name = "github.com/lyraproj/servicesdk" + packages = [ + "annotation", + "grpc", + "lang", + "lang/go/lyra", + "lang/typegen", + "service", + "serviceapi", + "servicepb", + "wf" + ] + revision = "aa1c3c39fdcbbf36d2494007bf8fef9d4035d3b1" + +[[projects]] + branch = "master" + name = "github.com/lyraproj/wfe" + packages = [ + "api", + "service", + "wfe" + ] + revision = "84cb6813dc7bdd5a1f923550551b66cfa0c8873b" + [[projects]] branch = "master" - digest = "1:7d9fcac7f1228470c4ea0ee31cdfb662a758c44df691e39b3e76c11d3e12ba8f" name = "github.com/mailru/easyjson" packages = [ "buffer", "jlexer", - "jwriter", + "jwriter" ] - pruneopts = "NT" revision = "60711f1a8329503b04e1c88535f419d0bb440bff" [[projects]] branch = "master" - digest = "1:0e9bfc47ab9941ecc3344e580baca5deb4091177e84dd9773b48b38ec26b93d5" name = "github.com/mattbaird/jsonpatch" packages = ["."] - pruneopts = "NT" revision = "81af80346b1a01caae0cbc27fd3c1ba5b11e189f" [[projects]] - digest = "1:ea1db000388d88b31db7531c83016bef0d6db0d908a07794bfc36aca16fbf935" + name = "github.com/mattn/go-colorable" + packages = ["."] + revision = "3a70a971f94a22f2fa562ffcc7a0eb45f5daf045" + version = "v0.1.1" + +[[projects]] + name = "github.com/mattn/go-isatty" + packages = ["."] + revision = "c2a7a6ca930a4cd0bc33a3f298eb71960732a3a7" + version = "v0.0.7" + +[[projects]] name = "github.com/matttproud/golang_protobuf_extensions" packages = ["pbutil"] - pruneopts = "NT" revision = "c12348ce28de40eed0136aa2b644d0ee0650e56c" version = "v1.0.1" [[projects]] - digest = "1:2f42fa12d6911c7b7659738758631bec870b7e9b4c6be5444f963cdcfccc191f" + branch = "master" + name = "github.com/mgutz/ansi" + packages = ["."] + revision = "9520e82c474b0a04dd04f8a40959027271bab992" + +[[projects]] + name = "github.com/mitchellh/go-testing-interface" + packages = ["."] + revision = "6d0b8010fcc857872e42fc6c931227569016843c" + version = "v1.0.0" + +[[projects]] name = "github.com/modern-go/concurrent" packages = ["."] - pruneopts = "NT" revision = "bacd9c7ef1dd9b15be4a9909b8ac7a4e313eec94" version = "1.0.3" [[projects]] - digest = "1:c6aca19413b13dc59c220ad7430329e2ec454cc310bc6d8de2c7e2b93c18a0f6" name = "github.com/modern-go/reflect2" packages = ["."] - pruneopts = "NT" revision = "4b7aa43c6742a2c18fdef89dd197aaae7dac7ccd" version = "1.0.1" +[[projects]] + name = "github.com/oklog/run" + packages = ["."] + revision = "4dadeb3030eda0273a12382bb2348ffc7c9d1a39" + version = "v1.0.0" + [[projects]] branch = "master" - digest = "1:cd56674df3949c18b5f94441f54be8d55a065b51c6df094a431a6f29a97174ca" name = "github.com/operator-framework/operator-sdk" packages = [ "pkg/k8sutil", "pkg/leader", "pkg/ready", - "version", + "version" ] - pruneopts = "NT" revision = "429c43cb724ff059557927bbb98d21f17f2e3450" [[projects]] - digest = "1:93b1d84c5fa6d1ea52f4114c37714cddd84d5b78f151b62bb101128dd51399bf" name = "github.com/pborman/uuid" packages = ["."] - pruneopts = "NT" revision = "adf5a7427709b9deb95d29d3fa8a2bf9cfd388f1" version = "v1.2" [[projects]] branch = "master" - digest = "1:bf2ac97824a7221eb16b096aecc1c390d4c8a4e49524386aaa2e2dd215cbfb31" name = "github.com/petar/GoLLRB" packages = ["llrb"] - pruneopts = "NT" revision = "53be0d36a84c2a886ca057d34b6aa4468df9ccb4" [[projects]] - digest = "1:e4e9e026b8e4c5630205cd0208efb491b40ad40552e57f7a646bb8a46896077b" name = "github.com/peterbourgon/diskv" packages = ["."] - pruneopts = "NT" revision = "5f041e8faa004a95c88a202771f4cc3e991971e6" version = "v2.0.1" [[projects]] - digest = "1:ec2a29e3bd141038ae5c3d3a4f57db0c341fcc1d98055a607aedd683aed124ee" name = "github.com/prometheus/client_golang" packages = [ "prometheus", "prometheus/internal", - "prometheus/promhttp", + "prometheus/promhttp" ] - pruneopts = "NT" revision = "505eaef017263e299324067d40ca2c48f6a2cf50" version = "v0.9.2" [[projects]] branch = "master" - digest = "1:c2cc5049e927e2749c0d5163c9f8d924880d83e84befa732b9aad0b6be227bed" name = "github.com/prometheus/client_model" packages = ["go"] - pruneopts = "NT" revision = "f287a105a20ec685d797f65cd0ce8fbeaef42da1" [[projects]] branch = "master" - digest = "1:8e4954d40c6b7c6e3dd8277127cb6b66f9f20c0a3b18ee6f40d763c0e64ec48e" name = "github.com/prometheus/common" packages = [ "expfmt", "internal/bitbucket.org/ww/goautoneg", - "model", + "model" ] - pruneopts = "NT" revision = "2998b132700a7d019ff618c06a234b47c1f3f681" [[projects]] branch = "master" - digest = "1:42e89ce7daa7d2e08a4dc30943c359ba47c7ba93de7f6a41b974522fe02cf172" name = "github.com/prometheus/procfs" packages = [ ".", "internal/util", "nfs", - "xfs", + "xfs" ] - pruneopts = "NT" revision = "b1a0a9a36d7453ba0f62578b99712f3a6c5f82d1" [[projects]] - digest = "1:9d8420bbf131d1618bde6530af37c3799340d3762cc47210c1d9532a4c3a2779" + name = "github.com/spf13/cobra" + packages = ["."] + revision = "ef82de70bb3f60c65fb8eebacbb2d122ef517385" + version = "v0.0.3" + +[[projects]] name = "github.com/spf13/pflag" packages = ["."] - pruneopts = "NT" revision = "298182f68c66c05229eb03ac171abe6e309ee79a" version = "v1.0.3" [[projects]] - digest = "1:22f696cee54865fb8e9ff91df7b633f6b8f22037a8015253c6b6a71ca82219c7" name = "go.uber.org/atomic" packages = ["."] - pruneopts = "NT" revision = "1ea20fb1cbb1cc08cbd0d913a96dead89aa18289" version = "v1.3.2" [[projects]] - digest = "1:58ca93bdf81bac106ded02226b5395a0595d5346cdc4caa8d9c1f3a5f8f9976e" name = "go.uber.org/multierr" packages = ["."] - pruneopts = "NT" revision = "3c4937480c32f4c13a875a1829af76c98ca3d40a" version = "v1.1.0" [[projects]] - digest = "1:572fa4496563920f3e3107a2294cf2621d6cc4ffd03403fb6397b1bab9fa082a" name = "go.uber.org/zap" packages = [ ".", @@ -382,23 +466,19 @@ "internal/bufferpool", "internal/color", "internal/exit", - "zapcore", + "zapcore" ] - pruneopts = "NT" revision = "ff33455a0e382e8a81d14dd7c922020b6b5e7982" version = "v1.9.1" [[projects]] branch = "master" - digest = "1:3152b340c093c4fab5b097bae35a1b7f3c3a9c8627439a12cab4bfeeff9327b9" name = "golang.org/x/crypto" packages = ["ssh/terminal"] - pruneopts = "NT" revision = "ff983b9c42bc9fbf91556e191cc8efb585c16908" [[projects]] branch = "master" - digest = "1:ed22825de8992a1b90d6550ec7ce74745565b65ba974f92f1e7b394c5f220973" name = "golang.org/x/net" packages = [ "context", @@ -407,37 +487,33 @@ "http2", "http2/hpack", "idna", + "internal/timeseries", + "trace" ] - pruneopts = "NT" revision = "915654e7eabcea33ae277abbecf52f0d8b7a9fdc" [[projects]] branch = "master" - digest = "1:bdb664c89389d18d2aa69fb3b61fe5e2effc09e55b333a56e3cb071026418e33" name = "golang.org/x/oauth2" packages = [ ".", "google", "internal", "jws", - "jwt", + "jwt" ] - pruneopts = "NT" revision = "fd3eaa146cbb5c89ce187c275fb79bd3a36a5ffc" [[projects]] branch = "master" - digest = "1:458b612aa5d8b7f46210b7f37ac2a4d365a81ca972685d0ab68dfc10285a2fd9" name = "golang.org/x/sys" packages = [ "unix", - "windows", + "windows" ] - pruneopts = "NT" revision = "48ac38b7c8cbedd50b1613c0fccacfc7d88dfcdf" [[projects]] - digest = "1:8c74f97396ed63cc2ef04ebb5fc37bb032871b8fd890a25991ed40974b00cd2a" name = "golang.org/x/text" packages = [ "collate", @@ -454,23 +530,19 @@ "unicode/cldr", "unicode/norm", "unicode/rangetable", - "width", + "width" ] - pruneopts = "NT" revision = "f21a4dfb5e38f5895301dc265a8def02365cc3d0" version = "v0.3.0" [[projects]] branch = "master" - digest = "1:9fdc2b55e8e0fafe4b41884091e51e77344f7dc511c5acedcfd98200003bff90" name = "golang.org/x/time" packages = ["rate"] - pruneopts = "NT" revision = "85acf8d2951cb2a3bde7632f9ff273ef0379bcbd" [[projects]] branch = "master" - digest = "1:6fe65f6df5b27ce09022caa9137b599c722914db99294d532d9ab2fbbd8ecc67" name = "golang.org/x/tools" packages = [ "go/ast/astutil", @@ -483,13 +555,48 @@ "imports", "internal/fastwalk", "internal/gopathwalk", - "internal/semver", + "internal/semver" ] - pruneopts = "NT" revision = "68c5ac90f574c3cf0e181d3cdde7cc60cb38fa9b" [[projects]] - digest = "1:902ffa11f1d8c19c12b05cabffe69e1a16608ad03a8899ebcb9c6bde295660ae" + branch = "master" + name = "gonum.org/v1/gonum" + packages = [ + "blas", + "blas/blas64", + "blas/cblas128", + "blas/gonum", + "floats", + "graph", + "graph/encoding", + "graph/encoding/dot", + "graph/formats/dot", + "graph/formats/dot/ast", + "graph/formats/dot/internal/astx", + "graph/formats/dot/internal/errors", + "graph/formats/dot/internal/lexer", + "graph/formats/dot/internal/parser", + "graph/formats/dot/internal/token", + "graph/internal/ordered", + "graph/internal/set", + "graph/internal/uid", + "graph/iterator", + "graph/simple", + "internal/asm/c128", + "internal/asm/c64", + "internal/asm/f32", + "internal/asm/f64", + "internal/cmplx64", + "internal/math32", + "lapack", + "lapack/gonum", + "lapack/lapack64", + "mat" + ] + revision = "50179cd3f3f7c0b521118fa9f625e86880fb84d0" + +[[projects]] name = "google.golang.org/appengine" packages = [ ".", @@ -501,30 +608,71 @@ "internal/modules", "internal/remote_api", "internal/urlfetch", - "urlfetch", + "urlfetch" ] - pruneopts = "NT" revision = "e9657d882bb81064595ca3b56cbe2546bbabf7b1" version = "v1.4.0" [[projects]] - digest = "1:2d1fbdc6777e5408cabeb02bf336305e724b925ff4546ded0fa8715a7267922a" + branch = "master" + name = "google.golang.org/genproto" + packages = ["googleapis/rpc/status"] + revision = "b515fa19cec88c32f305a962f34ae60068947aea" + +[[projects]] + name = "google.golang.org/grpc" + packages = [ + ".", + "balancer", + "balancer/base", + "balancer/roundrobin", + "binarylog/grpc_binarylog_v1", + "codes", + "connectivity", + "credentials", + "credentials/internal", + "encoding", + "encoding/proto", + "grpclog", + "health", + "health/grpc_health_v1", + "internal", + "internal/backoff", + "internal/balancerload", + "internal/binarylog", + "internal/channelz", + "internal/envconfig", + "internal/grpcrand", + "internal/grpcsync", + "internal/syscall", + "internal/transport", + "keepalive", + "metadata", + "naming", + "peer", + "resolver", + "resolver/dns", + "resolver/passthrough", + "stats", + "status", + "tap" + ] + revision = "25c4f928eaa6d96443009bd842389fb4fa48664e" + version = "v1.20.1" + +[[projects]] name = "gopkg.in/inf.v0" packages = ["."] - pruneopts = "NT" revision = "d2d2541c53f18d2a059457998ce2876cc8e67cbf" version = "v0.9.1" [[projects]] - digest = "1:18108594151654e9e696b27b181b953f9a90b16bf14d253dd1b397b025a1487f" name = "gopkg.in/yaml.v2" packages = ["."] - pruneopts = "NT" revision = "51d6538a90f86fe93ac480b35f37b2be17fef232" version = "v2.2.2" [[projects]] - digest = "1:b3f8152a68d73095a40fdcf329a93fc42e8eadb3305171df23fdb6b4e41a6417" name = "k8s.io/api" packages = [ "admission/v1beta1", @@ -558,13 +706,11 @@ "settings/v1alpha1", "storage/v1", "storage/v1alpha1", - "storage/v1beta1", + "storage/v1beta1" ] - pruneopts = "NT" revision = "b503174bad5991eb66f18247f52e41c3258f6348" [[projects]] - digest = "1:868de7cbaa0ecde6dc231c1529a10ae01bb05916095c0c992186e2a5cac57e79" name = "k8s.io/apimachinery" packages = [ "pkg/api/errors", @@ -609,13 +755,11 @@ "pkg/version", "pkg/watch", "third_party/forked/golang/json", - "third_party/forked/golang/reflect", + "third_party/forked/golang/reflect" ] - pruneopts = "NT" revision = "eddba98df674a16931d2d4ba75edc3a389bf633a" [[projects]] - digest = "1:00089f60de414edb1a51e63efde2480ce87c95d2cb3536ea240afe483905d736" name = "k8s.io/client-go" packages = [ "discovery", @@ -684,13 +828,11 @@ "util/integer", "util/jsonpath", "util/retry", - "util/workqueue", + "util/workqueue" ] - pruneopts = "NT" revision = "d082d5923d3cc0bfbb066ee5fbdea3d0ca79acf8" [[projects]] - digest = "1:4e2addcdbe0330f43800c1fcb905fc7a21b86415dfcca619e5c606c87257af1b" name = "k8s.io/code-generator" packages = [ "cmd/client-gen", @@ -716,14 +858,12 @@ "cmd/lister-gen/generators", "cmd/openapi-gen", "cmd/openapi-gen/args", - "pkg/util", + "pkg/util" ] - pruneopts = "T" revision = "3dcf91f64f638563e5106f21f50c31fa361c918d" [[projects]] branch = "master" - digest = "1:5edbd655d7ee65178fd5750bda9a3d3cd7fb96291937926f4969e6b2dfbc5743" name = "k8s.io/gengo" packages = [ "args", @@ -733,22 +873,18 @@ "generator", "namer", "parser", - "types", + "types" ] - pruneopts = "NT" revision = "fd15ee9cc2f77baa4f31e59e6acbf21146455073" [[projects]] - digest = "1:f3b42f307c7f49a1a7276c48d4b910db76e003220e88797f7acd41e3a9277ddf" name = "k8s.io/klog" packages = ["."] - pruneopts = "NT" revision = "a5bc97fbc634d635061f3146511332c7e313a55a" version = "v0.1.0" [[projects]] branch = "master" - digest = "1:9ac2fdede4a8304e3b00ea3b36526536339f306d0306e320fc74f6cefeead18e" name = "k8s.io/kube-openapi" packages = [ "cmd/openapi-gen/args", @@ -756,13 +892,11 @@ "pkg/generators", "pkg/generators/rules", "pkg/util/proto", - "pkg/util/sets", + "pkg/util/sets" ] - pruneopts = "NT" revision = "0317810137be915b9cf888946c6e115c1bfac693" [[projects]] - digest = "1:e03ddaf9f31bccbbb8c33eabad2c85025a95ca98905649fd744e0a54c630a064" name = "sigs.k8s.io/controller-runtime" packages = [ "pkg/cache", @@ -791,44 +925,14 @@ "pkg/source/internal", "pkg/webhook/admission", "pkg/webhook/admission/types", - "pkg/webhook/types", + "pkg/webhook/types" ] - pruneopts = "NT" revision = "c63ebda0bf4be5f0a8abd4003e4ea546032545ba" version = "v0.1.8" [solve-meta] analyzer-name = "dep" analyzer-version = 1 - input-imports = [ - "github.com/operator-framework/operator-sdk/pkg/k8sutil", - "github.com/operator-framework/operator-sdk/pkg/leader", - "github.com/operator-framework/operator-sdk/pkg/ready", - "github.com/operator-framework/operator-sdk/version", - "k8s.io/api/core/v1", - "k8s.io/apimachinery/pkg/api/errors", - "k8s.io/apimachinery/pkg/apis/meta/v1", - "k8s.io/apimachinery/pkg/runtime", - "k8s.io/apimachinery/pkg/runtime/schema", - "k8s.io/client-go/plugin/pkg/client/auth/gcp", - "k8s.io/code-generator/cmd/client-gen", - "k8s.io/code-generator/cmd/conversion-gen", - "k8s.io/code-generator/cmd/deepcopy-gen", - "k8s.io/code-generator/cmd/defaulter-gen", - "k8s.io/code-generator/cmd/informer-gen", - "k8s.io/code-generator/cmd/lister-gen", - "k8s.io/code-generator/cmd/openapi-gen", - "k8s.io/gengo/args", - "sigs.k8s.io/controller-runtime/pkg/client", - "sigs.k8s.io/controller-runtime/pkg/client/config", - "sigs.k8s.io/controller-runtime/pkg/controller", - "sigs.k8s.io/controller-runtime/pkg/handler", - "sigs.k8s.io/controller-runtime/pkg/manager", - "sigs.k8s.io/controller-runtime/pkg/reconcile", - "sigs.k8s.io/controller-runtime/pkg/runtime/log", - "sigs.k8s.io/controller-runtime/pkg/runtime/scheme", - "sigs.k8s.io/controller-runtime/pkg/runtime/signals", - "sigs.k8s.io/controller-runtime/pkg/source", - ] + inputs-digest = "e43aa8f779d1145dda86b33ae9bfe1ea9c4d5c7127f85c19a1464589c2b76d5f" solver-name = "gps-cdcl" solver-version = 1 diff --git a/Gopkg.toml b/Gopkg.toml index 8091c71..aa6e960 100644 --- a/Gopkg.toml +++ b/Gopkg.toml @@ -10,6 +10,19 @@ required = [ "k8s.io/gengo/args", ] +[[override]] + name = "github.com/lyraproj/lyra" + source = "github.com/thallgren/lyra" + branch = "remove-operator" + +[[override]] + name = "github.com/lyraproj/issue" + branch = "master" + +[[override]] + name = "github.com/lyraproj/pcore" + branch = "master" + [[override]] name = "k8s.io/code-generator" # revision for tag "kubernetes-1.12.3" diff --git a/Makefile b/Makefile new file mode 100644 index 0000000..48e370c --- /dev/null +++ b/Makefile @@ -0,0 +1,98 @@ +OS_TYPE=$(shell echo `uname`| tr '[A-Z]' '[a-z]') +ifeq ($(OS_TYPE),darwin) + OS := osx +else + OS := linux +endif + +# version 11.4 or later of go +HAS_REQUIRED_GO := $(shell go version | grep -E 'go[2-9]|go1.1[2-9]|go1.12.[4-9]') + +PACKAGE_NAME = github.com/lyraproj/lyra-operator +LDFLAGS += -X "$(PACKAGE_NAME)/pkg/version.BuildTime=$(shell date -u '+%Y-%m-%d %I:%M:%S %Z')" +LDFLAGS += -X "$(PACKAGE_NAME)/pkg/version.BuildTag=$(shell git describe --all --exact-match `git rev-parse HEAD` | grep tags | sed 's/tags\///')" +LDFLAGS += -X "$(PACKAGE_NAME)/pkg/version.BuildSHA=$(shell git rev-parse --short HEAD)" +BUILDARGS = + +PHONY+= default +default: LINTFLAGS = --fast +default: everything + +PHONY+= all +all: LDFLAGS += -s -w # Strip debug information +all: TESTFLAGS = --race +all: everything + +PHONY+= everything +everything: clean lint test lyra smoke-test + +PHONY+= docker-build +docker-build: BUILDARGS += CGO_ENABLED=0 GOOS=linux +docker-build: LDFLAGS += -extldflags "-static" +docker-build: lyra plugins + +PHONY+= shrink +shrink: + for f in build/*; do \ + upx $$f; \ + done; + +PHONY+= lyra +lyra: + @$(call build,bin/lyra,cmd/lyra/main.go) + +PHONY+= test +test: + @echo "🔘 Running unit tests... (`date '+%H:%M:%S'`)" + $(BUILDARGS) go test $(TESTFLAGS) github.com/lyraproj/lyra-operator/... + +PHONY+= clean +clean: + @echo "🔘 Cleaning build dir..." + @rm -rf build + +PHONY+= lint +lint: $(GOPATH)/bin/golangci-lint + @$(call checklint,pkg/...) + @$(call checklint,cmd/lyra/...) + +PHONY+= dist-release +dist-release: + @if [ "$(OS)" != "linux" ]; \ + then \ + echo ""; \ + echo "🔴 dist-release target only supported on linux (Travis CI)"; \ + exit 1; \ + fi + + echo "🔘 Deploying release to Github..." + for f in build/*; do \ + echo " - $$f"; \ + tar czf $$f.tar.gz $$f; \ + sha256sum $$f.tar.gz | awk '{ print $1 }' > $$f.tar.gz.sha256 ; \ + done; + +PHONY+= smoke-test +smoke-test: lyra + +define build + echo "🔘 Building - $(1) (`date '+%H:%M:%S'`)" + mkdir -p build/ + $(BUILDARGS) go build -ldflags '$(LDFLAGS)' -o build/$(1) $(2) + echo "✅ Build complete - $(1) (`date '+%H:%M:%S'`)" +endef + +define checklint + echo "🔘 Linting $(1) (`date '+%H:%M:%S'`)" + lint=`$(BUILDARGS) golangci-lint run $(LINTFLAGS) $(1)`; \ + if [ "$$lint" != "" ]; \ + then echo "🔴 Lint found"; echo "$$lint"; exit 1;\ + else echo "✅ Lint-free (`date '+%H:%M:%S'`)"; \ + fi +endef + +$(GOPATH)/bin/golangci-lint: + @echo "🔘 Installing golangci-lint... (`date '+%H:%M:%S'`)" + @GO111MODULE=off go get github.com/golangci/golangci-lint/cmd/golangci-lint + +.PHONY: $(PHONY) diff --git a/build/Dockerfile b/build/Dockerfile deleted file mode 100644 index 06a6cbe..0000000 --- a/build/Dockerfile +++ /dev/null @@ -1,7 +0,0 @@ -FROM alpine:3.8 - -RUN apk upgrade --update --no-cache - -USER nobody - -ADD build/_output/bin/lyra-operator /usr/local/bin/lyra-operator diff --git a/cmd/lyra/main.go b/cmd/lyra/main.go new file mode 100644 index 0000000..48ece75 --- /dev/null +++ b/cmd/lyra/main.go @@ -0,0 +1,17 @@ +package main + +import ( + "fmt" + "github.com/lyraproj/lyra-operator/cmd/manager/controller" + "github.com/lyraproj/lyra/cmd/lyra/cmd" + "os" + + _ "k8s.io/client-go/plugin/pkg/client/auth/gcp" +) + +func main() { + if err := cmd.NewRootCmd(controller.NewControllerCmd()).Execute(); err != nil { + _, _ = fmt.Fprintln(os.Stderr, err) + os.Exit(-1) + } +} diff --git a/cmd/manager/controller/controller.go b/cmd/manager/controller/controller.go new file mode 100644 index 0000000..4ee7ec3 --- /dev/null +++ b/cmd/manager/controller/controller.go @@ -0,0 +1,102 @@ +package controller + +import ( + "fmt" + "os" + + "github.com/go-logr/logr" + hclog "github.com/hashicorp/go-hclog" + "github.com/leonelquinteros/gotext" + "github.com/lyraproj/lyra/cmd/lyra/ui" + "github.com/lyraproj/lyra/pkg/apply" + "github.com/lyraproj/lyra/pkg/logger" + "github.com/spf13/cobra" + logf "sigs.k8s.io/controller-runtime/pkg/runtime/log" +) + +var namespace string +var rootDir string + +// NewControllerCmd starts the Kubernetes controller +func NewControllerCmd() *cobra.Command { + cmd := &cobra.Command{ + Use: gotext.Get("controller"), + Short: gotext.Get("Run Lyra as a Kubernetes controller"), + Long: gotext.Get("Run Lyra as a Kubernetes controller"), + Run: runControllerCmd, + Args: cobra.NoArgs, + } + + cmd.Flags().StringVarP(&namespace, "namespace", "n", "default", gotext.Get("controller namesacpe")) + cmd.Flags().StringVarP(&rootDir, "root", "r", "", gotext.Get("path to root directory")) + + cmd.SetHelpTemplate(ui.HelpTemplate) + cmd.SetUsageTemplate(ui.UsageTemplate) + + return cmd +} + +func runControllerCmd(cmd *cobra.Command, args []string) { + logf.SetLogger(&hclogLogger{hcLogger: logger.Get()}) + applicator := apply.NewApplicator(rootDir, ``) + err := Start(namespace, applicator) + if err != nil { + logger.Get().Error("Failed to start controller", "err", err) + os.Exit(1) + } +} + +type hclogLogger struct { + hcLogger hclog.Logger +} + +// Info logs a non-error message with the given key/value pairs as context. +// +// The msg argument should be used to add some constant description to +// the log line. The key/value pairs can then be used to add additional +// variable information. The key/value pairs should alternate string +// keys and arbitrary values. +func (l *hclogLogger) Info(msg string, keysAndValues ...interface{}) { + l.hcLogger.Info(msg, keysAndValues...) +} + +// Enabled tests whether this InfoLogger is enabled. For example, +// commandline flags might be used to set the logging verbosity and disable +// some info logs. +func (l *hclogLogger) Enabled() bool { + return true +} + +// Error logs an error, with the given message and key/value pairs as context. +// It functions similarly to calling Info with the "error" named value, but may +// have unique behavior, and should be preferred for logging errors (see the +// package documentations for more information). +// +// The msg field should be used to add context to any underlying error, +// while the err field should be used to attach the actual error that +// triggered this log line, if present. +func (l *hclogLogger) Error(err error, msg string, keysAndValues ...interface{}) { + l.hcLogger.Error(fmt.Sprintf("%s: %v", msg, err), keysAndValues...) +} + +// V returns an InfoLogger value for a specific verbosity level. A higher +// verbosity level means a log message is less important. It's illegal to +// pass a log level less than zero. +func (l *hclogLogger) V(level int) logr.InfoLogger { + return l +} + +// WithValues adds some key-value pairs of context to a logger. +// See Info for documentation on how key/value pairs work. +func (l *hclogLogger) WithValues(keysAndValues ...interface{}) logr.Logger { + return &hclogLogger{hcLogger: l.hcLogger.With(keysAndValues...)} +} + +// WithName adds a new element to the logger's name. +// Successive calls with WithName continue to append +// suffixes to the logger's name. It's strongly reccomended +// that name segments contain only letters, digits, and hyphens +// (see the package documentation for more information). +func (l *hclogLogger) WithName(name string) logr.Logger { + return &hclogLogger{hcLogger: l.hcLogger.Named(name)} +} diff --git a/cmd/manager/controller/start.go b/cmd/manager/controller/start.go index 6f55630..4a83ce8 100644 --- a/cmd/manager/controller/start.go +++ b/cmd/manager/controller/start.go @@ -3,6 +3,7 @@ package controller import ( "context" "fmt" + "github.com/lyraproj/lyra/pkg/apply" "github.com/operator-framework/operator-sdk/pkg/leader" "github.com/operator-framework/operator-sdk/pkg/ready" @@ -14,7 +15,7 @@ import ( ) // Start the Kubernetes controller running -func Start(namespace string, applicator workflow.Applicator) error { +func Start(namespace string, applicator apply.Applicator) error { // Get a config to talk to the apiserver cfg, err := config.GetConfig() diff --git a/cmd/manager/main.go b/cmd/manager/main.go index 99f6743..1d86adf 100644 --- a/cmd/manager/main.go +++ b/cmd/manager/main.go @@ -21,10 +21,20 @@ func printVersion() { type mockApplicator struct{} +func (*mockApplicator) ApplyWorkflow(workflowName string) (exitCode int) { + // Mock Applicator is not really applying the workflow ... + return 0 +} + func (*mockApplicator) ApplyWorkflowWithHieraData(workflowName string, data map[string]string) { // Mock Applicator is not really applying the workflow ... } +func (*mockApplicator) DeleteWorkflow(workflowName string) (exitCode int) { + // Mock Applicator is not really applying the workflow ... + return 0 +} + func (*mockApplicator) DeleteWorkflowWithHieraData(workflowName string, data map[string]string) { // Mock Applicator is not really doing anything ... } diff --git a/go.mod b/go.mod deleted file mode 100644 index 5230791..0000000 --- a/go.mod +++ /dev/null @@ -1,51 +0,0 @@ -module github.com/lyraproj/lyra-operator - -require ( - github.com/appscode/jsonpatch v0.0.0-20190108182946-7c0e3b262f30 // indirect - github.com/evanphx/json-patch v4.1.0+incompatible // indirect - github.com/ghodss/yaml v1.0.0 // indirect - github.com/go-logr/logr v0.1.0 - github.com/go-logr/zapr v0.1.0 // indirect - github.com/gogo/protobuf v1.2.0 // indirect - github.com/golang/groupcache v0.0.0-20181024230925-c65c006176ff // indirect - github.com/google/btree v0.0.0-20180813153112-4030bb1f1f0c // indirect - github.com/google/gofuzz v0.0.0-20170612174753-24818f796faf // indirect - github.com/google/uuid v1.1.0 // indirect - github.com/googleapis/gnostic v0.2.0 // indirect - github.com/gregjones/httpcache v0.0.0-20181110185634-c63ab54fda8f // indirect - github.com/hashicorp/golang-lru v0.5.0 // indirect - github.com/imdario/mergo v0.3.6 // indirect - github.com/json-iterator/go v1.1.5 // indirect - github.com/mattbaird/jsonpatch v0.0.0-20171005235357-81af80346b1a // indirect - github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd // indirect - github.com/modern-go/reflect2 v1.0.1 // indirect - github.com/onsi/ginkgo v1.7.0 // indirect - github.com/onsi/gomega v1.4.3 // indirect - github.com/operator-framework/operator-sdk v0.4.0 - github.com/pborman/uuid v0.0.0-20180906182336-adf5a7427709 // indirect - github.com/peterbourgon/diskv v2.0.1+incompatible // indirect - github.com/prometheus/client_golang v0.9.2 // indirect - github.com/prometheus/client_model v0.0.0-20190109181635-f287a105a20e // indirect - github.com/prometheus/common v0.0.0-20190107103113-2998b132700a // indirect - github.com/prometheus/procfs v0.0.0-20190104112138-b1a0a9a36d74 // indirect - github.com/spf13/pflag v1.0.3 // indirect - go.uber.org/atomic v1.3.2 // indirect - go.uber.org/multierr v1.1.0 // indirect - go.uber.org/zap v1.9.1 // indirect - golang.org/x/crypto v0.0.0-20190103213133-ff983b9c42bc // indirect - golang.org/x/net v0.0.0-20190110200230-915654e7eabc // indirect - golang.org/x/oauth2 v0.0.0-20190110195249-fd3eaa146cbb // indirect - golang.org/x/sys v0.0.0-20190124100055-b90733256f2e // indirect - golang.org/x/time v0.0.0-20181108054448-85acf8d2951c // indirect - gopkg.in/inf.v0 v0.9.1 // indirect - gopkg.in/yaml.v2 v2.2.2 // indirect - k8s.io/api v0.0.0-20181221193117-173ce66c1e39 - k8s.io/apiextensions-apiserver v0.0.0-20190103235604-e7617803aceb // indirect - k8s.io/apimachinery v0.0.0-20190126155707-0e6dcdd1b5ce - k8s.io/client-go v10.0.0+incompatible // indirect - k8s.io/klog v0.1.0 // indirect - k8s.io/kube-openapi v0.0.0-20181114233023-0317810137be // indirect - sigs.k8s.io/controller-runtime v0.1.10 - sigs.k8s.io/testing_frameworks v0.1.1 // indirect - sigs.k8s.io/yaml v1.1.0 // indirect -) diff --git a/go.sum b/go.sum deleted file mode 100644 index 0c33b46..0000000 --- a/go.sum +++ /dev/null @@ -1,166 +0,0 @@ -cloud.google.com/go v0.34.0 h1:eOI3/cP2VTU6uZLDYAoic+eyzzB9YyGmJ7eIjl8rOPg= -cloud.google.com/go v0.34.0/go.mod h1:aQUYkXzVsufM+DwF1aE+0xfcU+56JwCaLick0ClmMTw= -github.com/alecthomas/template v0.0.0-20160405071501-a0175ee3bccc/go.mod h1:LOuyumcjzFXgccqObfd/Ljyb9UuFJ6TxHnclSeseNhc= -github.com/alecthomas/units v0.0.0-20151022065526-2efee857e7cf/go.mod h1:ybxpYRFXyAe+OPACYpWeL0wqObRcbAqCMya13uyzqw0= -github.com/appscode/jsonpatch v0.0.0-20190108182946-7c0e3b262f30 h1:Kn3rqvbUFqSepE2OqVu0Pn1CbDw9IuMlONapol0zuwk= -github.com/appscode/jsonpatch v0.0.0-20190108182946-7c0e3b262f30/go.mod h1:4AJxUpXUhv4N+ziTvIcWWXgeorXpxPZOfk9HdEVr96M= -github.com/beorn7/perks v0.0.0-20180321164747-3a771d992973 h1:xJ4a3vCFaGF/jqvzLMYoU8P317H5OQ+Via4RmuPwCS0= -github.com/beorn7/perks v0.0.0-20180321164747-3a771d992973/go.mod h1:Dwedo/Wpr24TaqPxmxbtue+5NUziq4I4S80YR8gNf3Q= -github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c= -github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= -github.com/evanphx/json-patch v4.0.0+incompatible/go.mod h1:50XU6AFN0ol/bzJsmQLiYLvXMP4fmwYFNcr97nuDLSk= -github.com/evanphx/json-patch v4.1.0+incompatible h1:K1MDoo4AZ4wU0GIU/fPmtZg7VpzLjCxu+UwBD1FvwOc= -github.com/evanphx/json-patch v4.1.0+incompatible/go.mod h1:50XU6AFN0ol/bzJsmQLiYLvXMP4fmwYFNcr97nuDLSk= -github.com/fsnotify/fsnotify v1.4.7 h1:IXs+QLmnXW2CcXuY+8Mzv/fWEsPGWxqefPtCP5CnV9I= -github.com/fsnotify/fsnotify v1.4.7/go.mod h1:jwhsz4b93w/PPRr/qN1Yymfu8t87LnFCMoQvtojpjFo= -github.com/ghodss/yaml v1.0.0 h1:wQHKEahhL6wmXdzwWG11gIVCkOv05bNOh+Rxn0yngAk= -github.com/ghodss/yaml v1.0.0/go.mod h1:4dBDuWmgqj2HViK6kFavaiC9ZROes6MMH2rRYeMEF04= -github.com/go-kit/kit v0.8.0/go.mod h1:xBxKIO96dXMWWy0MnWVtmwkA9/13aqxPnvrjFYMA2as= -github.com/go-logfmt/logfmt v0.3.0/go.mod h1:Qt1PoO58o5twSAckw1HlFXLmHsOX5/0LbT9GBnD5lWE= -github.com/go-logr/logr v0.1.0 h1:M1Tv3VzNlEHg6uyACnRdtrploV2P7wZqH8BoQMtz0cg= -github.com/go-logr/logr v0.1.0/go.mod h1:ixOQHD9gLJUVQQ2ZOR7zLEifBX6tGkNJF4QyIY7sIas= -github.com/go-logr/zapr v0.1.0 h1:h+WVe9j6HAA01niTJPA/kKH0i7e0rLZBCwauQFcRE54= -github.com/go-logr/zapr v0.1.0/go.mod h1:tabnROwaDl0UNxkVeFRbY8bwB37GwRv0P8lg6aAiEnk= -github.com/go-stack/stack v1.8.0/go.mod h1:v0f6uXyyMGvRgIKkXu+yp6POWl0qKG85gN/melR3HDY= -github.com/gogo/protobuf v1.1.1/go.mod h1:r8qH/GZQm5c6nD/R0oafs1akxWv10x8SbQlK7atdtwQ= -github.com/gogo/protobuf v1.2.0 h1:xU6/SpYbvkNYiptHJYEDRseDLvYE7wSqhYYNy0QSUzI= -github.com/gogo/protobuf v1.2.0/go.mod h1:r8qH/GZQm5c6nD/R0oafs1akxWv10x8SbQlK7atdtwQ= -github.com/golang/groupcache v0.0.0-20181024230925-c65c006176ff h1:kOkM9whyQYodu09SJ6W3NCsHG7crFaJILQ22Gozp3lg= -github.com/golang/groupcache v0.0.0-20181024230925-c65c006176ff/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc= -github.com/golang/protobuf v1.2.0 h1:P3YflyNX/ehuJFLhxviNdFxQPkGK5cDcApsge1SqnvM= -github.com/golang/protobuf v1.2.0/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U= -github.com/google/btree v0.0.0-20180813153112-4030bb1f1f0c h1:964Od4U6p2jUkFxvCydnIczKteheJEzHRToSGK3Bnlw= -github.com/google/btree v0.0.0-20180813153112-4030bb1f1f0c/go.mod h1:lNA+9X1NB3Zf8V7Ke586lFgjr2dZNuvo3lPJSGZ5JPQ= -github.com/google/gofuzz v0.0.0-20170612174753-24818f796faf h1:+RRA9JqSOZFfKrOeqr2z77+8R2RKyh8PG66dcu1V0ck= -github.com/google/gofuzz v0.0.0-20170612174753-24818f796faf/go.mod h1:HP5RmnzzSNb993RKQDq4+1A4ia9nllfqcQFTQJedwGI= -github.com/google/uuid v1.1.0 h1:Jf4mxPC/ziBnoPIdpQdPJ9OeiomAUHLvxmPRSPH9m4s= -github.com/google/uuid v1.1.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= -github.com/googleapis/gnostic v0.2.0 h1:l6N3VoaVzTncYYW+9yOz2LJJammFZGBO13sqgEhpy9g= -github.com/googleapis/gnostic v0.2.0/go.mod h1:sJBsCZ4ayReDTBIg8b9dl28c5xFWyhBTVRp3pOg5EKY= -github.com/gregjones/httpcache v0.0.0-20181110185634-c63ab54fda8f h1:ShTPMJQes6tubcjzGMODIVG5hlrCeImaBnZzKF2N8SM= -github.com/gregjones/httpcache v0.0.0-20181110185634-c63ab54fda8f/go.mod h1:FecbI9+v66THATjSRHfNgh1IVFe/9kFxbXtjV0ctIMA= -github.com/hashicorp/golang-lru v0.5.0 h1:CL2msUPvZTLb5O648aiLNJw3hnBxN2+1Jq8rCOH9wdo= -github.com/hashicorp/golang-lru v0.5.0/go.mod h1:/m3WP610KZHVQ1SGc6re/UDhFvYD7pJ4Ao+sR/qLZy8= -github.com/hpcloud/tail v1.0.0 h1:nfCOvKYfkgYP8hkirhJocXT2+zOD8yUNjXaWfTlyFKI= -github.com/hpcloud/tail v1.0.0/go.mod h1:ab1qPbhIpdTxEkNHXyeSf5vhxWSCs/tWer42PpOxQnU= -github.com/imdario/mergo v0.3.6 h1:xTNEAn+kxVO7dTZGu0CegyqKZmoWFI0rF8UxjlB2d28= -github.com/imdario/mergo v0.3.6/go.mod h1:2EnlNZ0deacrJVfApfmtdGgDfMuh/nq6Ok1EcJh5FfA= -github.com/json-iterator/go v1.1.5 h1:gL2yXlmiIo4+t+y32d4WGwOjKGYcGOuyrg46vadswDE= -github.com/json-iterator/go v1.1.5/go.mod h1:+SdeFBvtyEkXs7REEP0seUULqWtbJapLOCVDaaPEHmU= -github.com/julienschmidt/httprouter v1.2.0/go.mod h1:SYymIcj16QtmaHHD7aYtjjsJG7VTCxuUUipMqKk8s4w= -github.com/konsorten/go-windows-terminal-sequences v1.0.1/go.mod h1:T0+1ngSBFLxvqU3pZ+m/2kptfBszLMUkC4ZK/EgS/cQ= -github.com/kr/logfmt v0.0.0-20140226030751-b84e30acd515/go.mod h1:+0opPa2QZZtGFBFZlji/RkVcI2GknAs/DXo4wKdlNEc= -github.com/mattbaird/jsonpatch v0.0.0-20171005235357-81af80346b1a h1:+J2gw7Bw77w/fbK7wnNJJDKmw1IbWft2Ul5BzrG1Qm8= -github.com/mattbaird/jsonpatch v0.0.0-20171005235357-81af80346b1a/go.mod h1:M1qoD/MqPgTZIk0EWKB38wE28ACRfVcn+cU08jyArI0= -github.com/matttproud/golang_protobuf_extensions v1.0.1 h1:4hp9jkHxhMHkqkrB3Ix0jegS5sx/RkqARlsWZ6pIwiU= -github.com/matttproud/golang_protobuf_extensions v1.0.1/go.mod h1:D8He9yQNgCq6Z5Ld7szi9bcBfOoFv/3dc6xSMkL2PC0= -github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd h1:TRLaZ9cD/w8PVh93nsPXa1VrQ6jlwL5oN8l14QlcNfg= -github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q= -github.com/modern-go/reflect2 v1.0.1 h1:9f412s+6RmYXLWZSEzVVgPGK7C2PphHj5RJrvfx9AWI= -github.com/modern-go/reflect2 v1.0.1/go.mod h1:bx2lNnkwVCuqBIxFjflWJWanXIb3RllmbCylyMrvgv0= -github.com/mwitkow/go-conntrack v0.0.0-20161129095857-cc309e4a2223/go.mod h1:qRWi+5nqEBWmkhHvq77mSJWrCKwh8bxhgT7d/eI7P4U= -github.com/onsi/ginkgo v1.6.0/go.mod h1:lLunBs/Ym6LB5Z9jYTR76FiuTmxDTDusOGeTQH+WWjE= -github.com/onsi/ginkgo v1.7.0 h1:WSHQ+IS43OoUrWtD1/bbclrwK8TTH5hzp+umCiuxHgs= -github.com/onsi/ginkgo v1.7.0/go.mod h1:lLunBs/Ym6LB5Z9jYTR76FiuTmxDTDusOGeTQH+WWjE= -github.com/onsi/gomega v1.4.3 h1:RE1xgDvH7imwFD45h+u2SgIfERHlS2yNG4DObb5BSKU= -github.com/onsi/gomega v1.4.3/go.mod h1:ex+gbHU/CVuBBDIJjb2X0qEXbFg53c61hWP/1CpauHY= -github.com/operator-framework/operator-sdk v0.3.0 h1:MHBAwot3D4ssTwnlemupOkLNIj/Ohjfy/nF/zDRg1gA= -github.com/operator-framework/operator-sdk v0.3.0/go.mod h1:iVyukRkam5JZa8AnjYf+/G3rk7JI1+M6GsU0sq0B9NA= -github.com/operator-framework/operator-sdk v0.4.0 h1:5LKhvld7AZZaFkbA5Uvt3y/BSjXgtmjNrM1mJHY/+CI= -github.com/operator-framework/operator-sdk v0.4.0/go.mod h1:iVyukRkam5JZa8AnjYf+/G3rk7JI1+M6GsU0sq0B9NA= -github.com/pborman/uuid v0.0.0-20180906182336-adf5a7427709 h1:zNBQb37RGLmJybyMcs983HfUfpkw9OTFD9tbBfAViHE= -github.com/pborman/uuid v0.0.0-20180906182336-adf5a7427709/go.mod h1:VyrYX9gd7irzKovcSS6BIIEwPRkP2Wm2m9ufcdFSJ34= -github.com/peterbourgon/diskv v2.0.1+incompatible h1:UBdAOUP5p4RWqPBg048CAvpKN+vxiaj6gdUUzhl4XmI= -github.com/peterbourgon/diskv v2.0.1+incompatible/go.mod h1:uqqh8zWWbv1HBMNONnaR/tNboyR3/BZd58JJSHlUSCU= -github.com/pkg/errors v0.8.0 h1:WdK/asTD0HN+q6hsWO3/vpuAkAr+tw6aNJNDFFf0+qw= -github.com/pkg/errors v0.8.0/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= -github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM= -github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= -github.com/prometheus/client_golang v0.9.1/go.mod h1:7SWBe2y4D6OKWSNQJUaRYU/AaXPKyh/dDVn+NZz0KFw= -github.com/prometheus/client_golang v0.9.2 h1:awm861/B8OKDd2I/6o1dy3ra4BamzKhYOiGItCeZ740= -github.com/prometheus/client_golang v0.9.2/go.mod h1:OsXs2jCmiKlQ1lTBmv21f2mNfw4xf/QclQDMrYNZzcM= -github.com/prometheus/client_model v0.0.0-20180712105110-5c3871d89910/go.mod h1:MbSGuTsp3dbXC40dX6PRTWyKYBIrTGTE9sqQNg2J8bo= -github.com/prometheus/client_model v0.0.0-20190109181635-f287a105a20e h1:/F8S20P9KteTOlxM8k6xWtTiY+u32wemAL2/zilHKzw= -github.com/prometheus/client_model v0.0.0-20190109181635-f287a105a20e/go.mod h1:MbSGuTsp3dbXC40dX6PRTWyKYBIrTGTE9sqQNg2J8bo= -github.com/prometheus/common v0.0.0-20181126121408-4724e9255275/go.mod h1:daVV7qP5qjZbuso7PdcryaAu0sAZbrN9i7WWcTMWvro= -github.com/prometheus/common v0.0.0-20190107103113-2998b132700a h1:bLKgQQEViHvsdgCwCGyyga8npETKygQ8b7c/28mJ8tw= -github.com/prometheus/common v0.0.0-20190107103113-2998b132700a/go.mod h1:TNfzLD0ON7rHzMJeJkieUDPYmFC7Snx/y86RQel1bk4= -github.com/prometheus/procfs v0.0.0-20181005140218-185b4288413d/go.mod h1:c3At6R/oaqEKCNdg8wHV1ftS6bRYblBhIjjI8uT2IGk= -github.com/prometheus/procfs v0.0.0-20181204211112-1dc9a6cbc91a/go.mod h1:c3At6R/oaqEKCNdg8wHV1ftS6bRYblBhIjjI8uT2IGk= -github.com/prometheus/procfs v0.0.0-20190104112138-b1a0a9a36d74 h1:d1Xoc24yp/pXmWl2leBiBA+Tptce6cQsA+MMx/nOOcY= -github.com/prometheus/procfs v0.0.0-20190104112138-b1a0a9a36d74/go.mod h1:c3At6R/oaqEKCNdg8wHV1ftS6bRYblBhIjjI8uT2IGk= -github.com/sirupsen/logrus v1.2.0/go.mod h1:LxeOpSwHxABJmUn/MG1IvRgCAasNZTLOkJPxbbu5VWo= -github.com/spf13/pflag v1.0.3 h1:zPAT6CGy6wXeQ7NtTnaTerfKOsV6V6F8agHXFiazDkg= -github.com/spf13/pflag v1.0.3/go.mod h1:DYY7MBk1bdzusC3SYhjObp+wFpr4gzcvqqNjLnInEg4= -github.com/stretchr/objx v0.1.1/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= -github.com/stretchr/testify v1.2.2 h1:bSDNvY7ZPG5RlJ8otE/7V6gMiyenm9RtJ7IUVIAoJ1w= -github.com/stretchr/testify v1.2.2/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs= -go.uber.org/atomic v1.3.2 h1:2Oa65PReHzfn29GpvgsYwloV9AVFHPDk8tYxt2c2tr4= -go.uber.org/atomic v1.3.2/go.mod h1:gD2HeocX3+yG+ygLZcrzQJaqmWj9AIm7n08wl/qW/PE= -go.uber.org/multierr v1.1.0 h1:HoEmRHQPVSqub6w2z2d2EOVs2fjyFRGyofhKuyDq0QI= -go.uber.org/multierr v1.1.0/go.mod h1:wR5kodmAFQ0UK8QlbwjlSNy0Z68gJhDJUG5sjR94q/0= -go.uber.org/zap v1.9.1 h1:XCJQEf3W6eZaVwhRBof6ImoYGJSITeKWsyeh3HFu/5o= -go.uber.org/zap v1.9.1/go.mod h1:vwi/ZaCAaUcBkycHslxD9B2zi4UTXhF60s6SWpuDF0Q= -golang.org/x/crypto v0.0.0-20180904163835-0709b304e793/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4= -golang.org/x/crypto v0.0.0-20190103213133-ff983b9c42bc h1:F5tKCVGp+MUAHhKp5MZtGqAlGX3+oCsiL1Q629FL90M= -golang.org/x/crypto v0.0.0-20190103213133-ff983b9c42bc/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4= -golang.org/x/net v0.0.0-20180724234803-3673e40ba225/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= -golang.org/x/net v0.0.0-20180906233101-161cd47e91fd/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= -golang.org/x/net v0.0.0-20181114220301-adae6a3d119a/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= -golang.org/x/net v0.0.0-20181201002055-351d144fa1fc/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= -golang.org/x/net v0.0.0-20190108225652-1e06a53dbb7e/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= -golang.org/x/net v0.0.0-20190110200230-915654e7eabc h1:Yx9JGxI1SBhVLFjpAkWMaO1TF+xyqtHLjZpvQboJGiM= -golang.org/x/net v0.0.0-20190110200230-915654e7eabc/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= -golang.org/x/oauth2 v0.0.0-20190110195249-fd3eaa146cbb h1:B7Dt1cnU6RCJTD1iFYg0SDa/UpNoDxgfT+coBHUe5Ks= -golang.org/x/oauth2 v0.0.0-20190110195249-fd3eaa146cbb/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw= -golang.org/x/sync v0.0.0-20180314180146-1d60e4601c6f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= -golang.org/x/sync v0.0.0-20181108010431-42b317875d0f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= -golang.org/x/sync v0.0.0-20181221193216-37e7f081c4d4 h1:YUO/7uOKsKeq9UokNS62b8FYywz3ker1l1vDZRCRefw= -golang.org/x/sync v0.0.0-20181221193216-37e7f081c4d4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= -golang.org/x/sys v0.0.0-20180905080454-ebe1bf3edb33/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= -golang.org/x/sys v0.0.0-20180909124046-d0be0721c37e/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= -golang.org/x/sys v0.0.0-20181116152217-5ac8a444bdc5/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= -golang.org/x/sys v0.0.0-20190109145017-48ac38b7c8cb h1:1w588/yEchbPNpa9sEvOcMZYbWHedwJjg4VOAdDHWHk= -golang.org/x/sys v0.0.0-20190109145017-48ac38b7c8cb/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= -golang.org/x/sys v0.0.0-20190124100055-b90733256f2e h1:3GIlrlVLfkoipSReOMNAgApI0ajnalyLa/EZHHca/XI= -golang.org/x/sys v0.0.0-20190124100055-b90733256f2e/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= -golang.org/x/text v0.3.0 h1:g61tztE5qeGQ89tm6NTjjM9VPIm088od1l6aSorWRWg= -golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= -golang.org/x/time v0.0.0-20181108054448-85acf8d2951c h1:fqgJT0MGcGpPgpWU7VRdRjuArfcOvC4AoJmILihzhDg= -golang.org/x/time v0.0.0-20181108054448-85acf8d2951c/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= -google.golang.org/appengine v1.4.0 h1:/wp5JvzpHIxhs/dumFmF7BXTf3Z+dd4uXta4kVyO508= -google.golang.org/appengine v1.4.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4= -gopkg.in/alecthomas/kingpin.v2 v2.2.6/go.mod h1:FMv+mEhP44yOT+4EoQTLFTRgOQ1FBLkstjWtayDeSgw= -gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405 h1:yhCVgyC4o1eVCa2tZl7eS0r+SDo693bJlVdllGtEeKM= -gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= -gopkg.in/fsnotify.v1 v1.4.7 h1:xOHLXZwVvI9hhs+cLKq5+I5onOuwQLhQwiu63xxlHs4= -gopkg.in/fsnotify.v1 v1.4.7/go.mod h1:Tz8NjZHkW78fSQdbUxIjBTcgA1z1m8ZHf0WmKUhAMys= -gopkg.in/inf.v0 v0.9.1 h1:73M5CoZyi3ZLMOyDlQh031Cx6N9NDJ2Vvfl76EDAgDc= -gopkg.in/inf.v0 v0.9.1/go.mod h1:cWUDdTG/fYaXco+Dcufb5Vnc6Gp2YChqWtbxRZE0mXw= -gopkg.in/tomb.v1 v1.0.0-20141024135613-dd632973f1e7 h1:uRGJdciOHaEIrze2W8Q3AKkepLTh2hOroT7a+7czfdQ= -gopkg.in/tomb.v1 v1.0.0-20141024135613-dd632973f1e7/go.mod h1:dt/ZhP58zS4L8KSrWDmTeBkI65Dw0HsyUHuEVlX15mw= -gopkg.in/yaml.v2 v2.2.1/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= -gopkg.in/yaml.v2 v2.2.2 h1:ZCJp+EgiOT7lHqUV2J862kp8Qj64Jo6az82+3Td9dZw= -gopkg.in/yaml.v2 v2.2.2/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= -k8s.io/api v0.0.0-20181221193117-173ce66c1e39 h1:iGq7zEPXFb0IeXAQK5RiYT1SVKX/af9F9Wv0M+yudPY= -k8s.io/api v0.0.0-20181221193117-173ce66c1e39/go.mod h1:iuAfoD4hCxJ8Onx9kaTIt30j7jUFS00AXQi6QMi99vA= -k8s.io/apiextensions-apiserver v0.0.0-20190103235604-e7617803aceb h1:3yElwSbnV34qIVTlGgkbWwWM+wq7fw6i7EKRtLV6z58= -k8s.io/apiextensions-apiserver v0.0.0-20190103235604-e7617803aceb/go.mod h1:IxkesAMoaCRoLrPJdZNZUQp9NfZnzqaVzLhb2VEQzXE= -k8s.io/apimachinery v0.0.0-20190104073114-849b284f3b75 h1:dLhsGWh58R0WYgTCX6ZdaqSz2FltMZsk+ByHsUgMWRU= -k8s.io/apimachinery v0.0.0-20190104073114-849b284f3b75/go.mod h1:ccL7Eh7zubPUSh9A3USN90/OzHNSVN6zxzde07TDCL0= -k8s.io/apimachinery v0.0.0-20190126155707-0e6dcdd1b5ce h1:CzCioXa348Dzt9UuYrvxRrLGpL1ZV50QpyBHt01SYj4= -k8s.io/apimachinery v0.0.0-20190126155707-0e6dcdd1b5ce/go.mod h1:ccL7Eh7zubPUSh9A3USN90/OzHNSVN6zxzde07TDCL0= -k8s.io/client-go v10.0.0+incompatible h1:F1IqCqw7oMBzDkqlcBymRq1450wD0eNqLE9jzUrIi34= -k8s.io/client-go v10.0.0+incompatible/go.mod h1:7vJpHMYJwNQCWgzmNV+VYUl1zCObLyodBc8nIyt8L5s= -k8s.io/klog v0.1.0 h1:I5HMfc/DtuVaGR1KPwUrTc476K8NCqNBldC7H4dYEzk= -k8s.io/klog v0.1.0/go.mod h1:Gq+BEi5rUBO/HRz0bTSXDUcqjScdoY3a9IHpCEIOOfk= -k8s.io/kube-openapi v0.0.0-20181114233023-0317810137be h1:aWEq4nbj7HRJ0mtKYjNSk/7X28Tl6TI6FeG8gKF+r7Q= -k8s.io/kube-openapi v0.0.0-20181114233023-0317810137be/go.mod h1:BXM9ceUBTj2QnfH2MK1odQs778ajze1RxcmP6S8RVVc= -sigs.k8s.io/controller-runtime v0.1.9 h1:ZcnTZfnCGynyToVwHsqV3bMoGXwViYlnUF8kfMgghK8= -sigs.k8s.io/controller-runtime v0.1.9/go.mod h1:HFAYoOh6XMV+jKF1UjFwrknPbowfyHEHHRdJMf2jMX8= -sigs.k8s.io/controller-runtime v0.1.10 h1:amLOmcekVdnsD1uIpmgRqfTbQWJ2qxvQkcdeFhcotn4= -sigs.k8s.io/controller-runtime v0.1.10/go.mod h1:HFAYoOh6XMV+jKF1UjFwrknPbowfyHEHHRdJMf2jMX8= -sigs.k8s.io/testing_frameworks v0.1.1 h1:cP2l8fkA3O9vekpy5Ks8mmA0NW/F7yBdXf8brkWhVrs= -sigs.k8s.io/testing_frameworks v0.1.1/go.mod h1:VVBKrHmJ6Ekkfz284YKhQePcdycOzNH9qL6ht1zEr/U= -sigs.k8s.io/yaml v1.1.0 h1:4A07+ZFc2wgJwo8YNlQpr1rVlgUDlxXHhPJciaPY5gs= -sigs.k8s.io/yaml v1.1.0/go.mod h1:UJmg0vDUVViEyp3mgSv9WPwZCDxu4rQW1olrI1uml+o= diff --git a/pkg/controller/workflow/workflow_controller.go b/pkg/controller/workflow/workflow_controller.go index 7eff9c7..f29e392 100644 --- a/pkg/controller/workflow/workflow_controller.go +++ b/pkg/controller/workflow/workflow_controller.go @@ -2,6 +2,7 @@ package workflow import ( "context" + "github.com/lyraproj/lyra/pkg/apply" "fmt" "github.com/go-logr/logr" "math/rand" @@ -27,22 +28,14 @@ const ( finalizerName = "workflow.finalizers.lyra.org" ) -// Applicator abstracts over workflow application and deletion -type Applicator interface { - ApplyWorkflowWithHieraData(workflowName string, data map[string]string) - - //DeleteWorkflowWithHieraData calls the delete on the workflow in lyra, meaning that resources will be destroyed, if applicable - DeleteWorkflowWithHieraData(workflowName string, data map[string]string) -} - // Add creates a new Workflow Controller and adds it to the Manager. The Manager will set fields on the Controller // and Start it when the Manager is Started. -func Add(mgr manager.Manager, applicator Applicator) error { +func Add(mgr manager.Manager, applicator apply.Applicator) error { return add(mgr, newReconciler(mgr, applicator)) } // newReconciler returns a new reconcile.Reconciler -func newReconciler(mgr manager.Manager, applicator Applicator) reconcile.Reconciler { +func newReconciler(mgr manager.Manager, applicator apply.Applicator) reconcile.Reconciler { return &ReconcileWorkflow{ client: mgr.GetClient(), scheme: mgr.GetScheme(), @@ -85,7 +78,7 @@ type ReconcileWorkflow struct { // that reads objects from the cache and writes to the apiserver client client.Client scheme *runtime.Scheme - applicator Applicator + applicator apply.Applicator } // Reconcile reads that state of the cluster for a Workflow object and makes changes based on the state read diff --git a/vendor/github.com/bmatcuk/doublestar/LICENSE b/vendor/github.com/bmatcuk/doublestar/LICENSE new file mode 100644 index 0000000..309c9d1 --- /dev/null +++ b/vendor/github.com/bmatcuk/doublestar/LICENSE @@ -0,0 +1,22 @@ +The MIT License (MIT) + +Copyright (c) 2014 Bob Matcuk + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. + diff --git a/vendor/github.com/bmatcuk/doublestar/doublestar.go b/vendor/github.com/bmatcuk/doublestar/doublestar.go new file mode 100644 index 0000000..ceab4e3 --- /dev/null +++ b/vendor/github.com/bmatcuk/doublestar/doublestar.go @@ -0,0 +1,455 @@ +package doublestar + +import ( + "fmt" + "os" + "path" + "path/filepath" + "strings" + "unicode/utf8" +) + +var ErrBadPattern = path.ErrBadPattern + +// Split a path on the given separator, respecting escaping. +func splitPathOnSeparator(path string, separator rune) []string { + // if the separator is '\\', then we can just split... + if separator == '\\' { + return strings.Split(path, string(separator)) + } + + // otherwise, we need to be careful of situations where the separator was escaped + cnt := strings.Count(path, string(separator)) + if cnt == 0 { + return []string{path} + } + ret := make([]string, cnt+1) + pathlen := len(path) + separatorLen := utf8.RuneLen(separator) + idx := 0 + for start := 0; start < pathlen; { + end := indexRuneWithEscaping(path[start:], separator) + if end == -1 { + end = pathlen + } else { + end += start + } + ret[idx] = path[start:end] + start = end + separatorLen + idx++ + } + return ret[:idx] +} + +// Find the first index of a rune in a string, +// ignoring any times the rune is escaped using "\". +func indexRuneWithEscaping(s string, r rune) int { + end := strings.IndexRune(s, r) + if end == -1 { + return -1 + } + if end > 0 && s[end-1] == '\\' { + start := end + utf8.RuneLen(r) + end = indexRuneWithEscaping(s[start:], r) + if end != -1 { + end += start + } + } + return end +} + +// Match returns true if name matches the shell file name pattern. +// The pattern syntax is: +// +// pattern: +// { term } +// term: +// '*' matches any sequence of non-path-separators +// '**' matches any sequence of characters, including +// path separators. +// '?' matches any single non-path-separator character +// '[' [ '^' ] { character-range } ']' +// character class (must be non-empty) +// '{' { term } [ ',' { term } ... ] '}' +// c matches character c (c != '*', '?', '\\', '[') +// '\\' c matches character c +// +// character-range: +// c matches character c (c != '\\', '-', ']') +// '\\' c matches character c +// lo '-' hi matches character c for lo <= c <= hi +// +// Match requires pattern to match all of name, not just a substring. +// The path-separator defaults to the '/' character. The only possible +// returned error is ErrBadPattern, when pattern is malformed. +// +// Note: this is meant as a drop-in replacement for path.Match() which +// always uses '/' as the path separator. If you want to support systems +// which use a different path separator (such as Windows), what you want +// is the PathMatch() function below. +// +func Match(pattern, name string) (bool, error) { + return matchWithSeparator(pattern, name, '/') +} + +// PathMatch is like Match except that it uses your system's path separator. +// For most systems, this will be '/'. However, for Windows, it would be '\\'. +// Note that for systems where the path separator is '\\', escaping is +// disabled. +// +// Note: this is meant as a drop-in replacement for filepath.Match(). +// +func PathMatch(pattern, name string) (bool, error) { + return matchWithSeparator(pattern, name, os.PathSeparator) +} + +// Match returns true if name matches the shell file name pattern. +// The pattern syntax is: +// +// pattern: +// { term } +// term: +// '*' matches any sequence of non-path-separators +// '**' matches any sequence of characters, including +// path separators. +// '?' matches any single non-path-separator character +// '[' [ '^' ] { character-range } ']' +// character class (must be non-empty) +// '{' { term } [ ',' { term } ... ] '}' +// c matches character c (c != '*', '?', '\\', '[') +// '\\' c matches character c +// +// character-range: +// c matches character c (c != '\\', '-', ']') +// '\\' c matches character c, unless separator is '\\' +// lo '-' hi matches character c for lo <= c <= hi +// +// Match requires pattern to match all of name, not just a substring. +// The only possible returned error is ErrBadPattern, when pattern +// is malformed. +// +func matchWithSeparator(pattern, name string, separator rune) (bool, error) { + patternComponents := splitPathOnSeparator(pattern, separator) + nameComponents := splitPathOnSeparator(name, separator) + return doMatching(patternComponents, nameComponents) +} + +func doMatching(patternComponents, nameComponents []string) (matched bool, err error) { + // check for some base-cases + patternLen, nameLen := len(patternComponents), len(nameComponents) + if patternLen == 0 && nameLen == 0 { + return true, nil + } + if patternLen == 0 || nameLen == 0 { + return false, nil + } + + patIdx, nameIdx := 0, 0 + for patIdx < patternLen && nameIdx < nameLen { + if patternComponents[patIdx] == "**" { + // if our last pattern component is a doublestar, we're done - + // doublestar will match any remaining name components, if any. + if patIdx++; patIdx >= patternLen { + return true, nil + } + + // otherwise, try matching remaining components + for ; nameIdx < nameLen; nameIdx++ { + if m, _ := doMatching(patternComponents[patIdx:], nameComponents[nameIdx:]); m { + return true, nil + } + } + return false, nil + } else { + // try matching components + matched, err = matchComponent(patternComponents[patIdx], nameComponents[nameIdx]) + if !matched || err != nil { + return + } + } + patIdx++ + nameIdx++ + } + return patIdx >= patternLen && nameIdx >= nameLen, nil +} + +// Glob returns the names of all files matching pattern or nil +// if there is no matching file. The syntax of pattern is the same +// as in Match. The pattern may describe hierarchical names such as +// /usr/*/bin/ed (assuming the Separator is '/'). +// +// Glob ignores file system errors such as I/O errors reading directories. +// The only possible returned error is ErrBadPattern, when pattern +// is malformed. +// +// Your system path separator is automatically used. This means on +// systems where the separator is '\\' (Windows), escaping will be +// disabled. +// +// Note: this is meant as a drop-in replacement for filepath.Glob(). +// +func Glob(pattern string) (matches []string, err error) { + patternComponents := splitPathOnSeparator(filepath.ToSlash(pattern), '/') + if len(patternComponents) == 0 { + return nil, nil + } + + // On Windows systems, this will return the drive name ('C:'), on others, + // it will return an empty string. + volumeName := filepath.VolumeName(pattern) + + // If the first pattern component is equal to the volume name, then the + // pattern is an absolute path. + if patternComponents[0] == volumeName { + return doGlob(fmt.Sprintf("%s%s", volumeName, string(os.PathSeparator)), patternComponents[1:], matches) + } + + // otherwise, it's a relative pattern + return doGlob(".", patternComponents, matches) +} + +// Perform a glob +func doGlob(basedir string, components, matches []string) (m []string, e error) { + m = matches + e = nil + + // figure out how many components we don't need to glob because they're + // just names without patterns - we'll use os.Lstat below to check if that + // path actually exists + patLen := len(components) + patIdx := 0 + for ; patIdx < patLen; patIdx++ { + if strings.IndexAny(components[patIdx], "*?[{\\") >= 0 { + break + } + } + if patIdx > 0 { + basedir = filepath.Join(basedir, filepath.Join(components[0:patIdx]...)) + } + + // Lstat will return an error if the file/directory doesn't exist + fi, err := os.Lstat(basedir) + if err != nil { + return + } + + // if there are no more components, we've found a match + if patIdx >= patLen { + m = append(m, basedir) + return + } + + // otherwise, we need to check each item in the directory... + // first, if basedir is a symlink, follow it... + if (fi.Mode() & os.ModeSymlink) != 0 { + fi, err = os.Stat(basedir) + if err != nil { + return + } + } + + // confirm it's a directory... + if !fi.IsDir() { + return + } + + // read directory + dir, err := os.Open(basedir) + if err != nil { + return + } + defer dir.Close() + + files, _ := dir.Readdir(-1) + lastComponent := (patIdx + 1) >= patLen + if components[patIdx] == "**" { + // if the current component is a doublestar, we'll try depth-first + for _, file := range files { + // if symlink, we may want to follow + if (file.Mode() & os.ModeSymlink) != 0 { + file, err = os.Stat(filepath.Join(basedir, file.Name())) + if err != nil { + continue + } + } + + if file.IsDir() { + // recurse into directories + if lastComponent { + m = append(m, filepath.Join(basedir, file.Name())) + } + m, e = doGlob(filepath.Join(basedir, file.Name()), components[patIdx:], m) + } else if lastComponent { + // if the pattern's last component is a doublestar, we match filenames, too + m = append(m, filepath.Join(basedir, file.Name())) + } + } + if lastComponent { + return // we're done + } + patIdx++ + lastComponent = (patIdx + 1) >= patLen + } + + // check items in current directory and recurse + var match bool + for _, file := range files { + match, e = matchComponent(components[patIdx], file.Name()) + if e != nil { + return + } + if match { + if lastComponent { + m = append(m, filepath.Join(basedir, file.Name())) + } else { + m, e = doGlob(filepath.Join(basedir, file.Name()), components[patIdx+1:], m) + } + } + } + return +} + +// Attempt to match a single pattern component with a path component +func matchComponent(pattern, name string) (bool, error) { + // check some base cases + patternLen, nameLen := len(pattern), len(name) + if patternLen == 0 && nameLen == 0 { + return true, nil + } + if patternLen == 0 { + return false, nil + } + if nameLen == 0 && pattern != "*" { + return false, nil + } + + // check for matches one rune at a time + patIdx, nameIdx := 0, 0 + for patIdx < patternLen && nameIdx < nameLen { + patRune, patAdj := utf8.DecodeRuneInString(pattern[patIdx:]) + nameRune, nameAdj := utf8.DecodeRuneInString(name[nameIdx:]) + if patRune == '\\' { + // handle escaped runes + patIdx += patAdj + patRune, patAdj = utf8.DecodeRuneInString(pattern[patIdx:]) + if patRune == utf8.RuneError { + return false, ErrBadPattern + } else if patRune == nameRune { + patIdx += patAdj + nameIdx += nameAdj + } else { + return false, nil + } + } else if patRune == '*' { + // handle stars + if patIdx += patAdj; patIdx >= patternLen { + // a star at the end of a pattern will always + // match the rest of the path + return true, nil + } + + // check if we can make any matches + for ; nameIdx < nameLen; nameIdx += nameAdj { + if m, _ := matchComponent(pattern[patIdx:], name[nameIdx:]); m { + return true, nil + } + } + return false, nil + } else if patRune == '[' { + // handle character sets + patIdx += patAdj + endClass := indexRuneWithEscaping(pattern[patIdx:], ']') + if endClass == -1 { + return false, ErrBadPattern + } + endClass += patIdx + classRunes := []rune(pattern[patIdx:endClass]) + classRunesLen := len(classRunes) + if classRunesLen > 0 { + classIdx := 0 + matchClass := false + if classRunes[0] == '^' { + classIdx++ + } + for classIdx < classRunesLen { + low := classRunes[classIdx] + if low == '-' { + return false, ErrBadPattern + } + classIdx++ + if low == '\\' { + if classIdx < classRunesLen { + low = classRunes[classIdx] + classIdx++ + } else { + return false, ErrBadPattern + } + } + high := low + if classIdx < classRunesLen && classRunes[classIdx] == '-' { + // we have a range of runes + if classIdx++; classIdx >= classRunesLen { + return false, ErrBadPattern + } + high = classRunes[classIdx] + if high == '-' { + return false, ErrBadPattern + } + classIdx++ + if high == '\\' { + if classIdx < classRunesLen { + high = classRunes[classIdx] + classIdx++ + } else { + return false, ErrBadPattern + } + } + } + if low <= nameRune && nameRune <= high { + matchClass = true + } + } + if matchClass == (classRunes[0] == '^') { + return false, nil + } + } else { + return false, ErrBadPattern + } + patIdx = endClass + 1 + nameIdx += nameAdj + } else if patRune == '{' { + // handle alternatives such as {alt1,alt2,...} + patIdx += patAdj + endOptions := indexRuneWithEscaping(pattern[patIdx:], '}') + if endOptions == -1 { + return false, ErrBadPattern + } + endOptions += patIdx + options := splitPathOnSeparator(pattern[patIdx:endOptions], ',') + patIdx = endOptions + 1 + for _, o := range options { + m, e := matchComponent(o+pattern[patIdx:], name[nameIdx:]) + if e != nil { + return false, e + } + if m { + return true, nil + } + } + return false, nil + } else if patRune == '?' || patRune == nameRune { + // handle single-rune wildcard + patIdx += patAdj + nameIdx += nameAdj + } else { + return false, nil + } + } + if patIdx >= patternLen && nameIdx >= nameLen { + return true, nil + } + if nameIdx >= nameLen && pattern[patIdx:] == "*" || pattern[patIdx:] == "**" { + return true, nil + } + return false, nil +} diff --git a/vendor/github.com/bmatcuk/doublestar/test/b/symlink-dir b/vendor/github.com/bmatcuk/doublestar/test/b/symlink-dir new file mode 120000 index 0000000..4a07501 --- /dev/null +++ b/vendor/github.com/bmatcuk/doublestar/test/b/symlink-dir @@ -0,0 +1 @@ +../axbxcxdxe/ \ No newline at end of file diff --git a/vendor/github.com/bmatcuk/doublestar/test/broken-symlink b/vendor/github.com/bmatcuk/doublestar/test/broken-symlink new file mode 120000 index 0000000..0b8ae1d --- /dev/null +++ b/vendor/github.com/bmatcuk/doublestar/test/broken-symlink @@ -0,0 +1 @@ +/tmp/nonexistant-file-20160902155705 \ No newline at end of file diff --git a/vendor/github.com/bmatcuk/doublestar/test/working-symlink b/vendor/github.com/bmatcuk/doublestar/test/working-symlink new file mode 120000 index 0000000..db89c97 --- /dev/null +++ b/vendor/github.com/bmatcuk/doublestar/test/working-symlink @@ -0,0 +1 @@ +a/b \ No newline at end of file diff --git a/vendor/github.com/hashicorp/go-hclog/LICENSE b/vendor/github.com/hashicorp/go-hclog/LICENSE new file mode 100644 index 0000000..abaf1e4 --- /dev/null +++ b/vendor/github.com/hashicorp/go-hclog/LICENSE @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) 2017 HashiCorp + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/vendor/github.com/hashicorp/go-hclog/global.go b/vendor/github.com/hashicorp/go-hclog/global.go new file mode 100644 index 0000000..e5f7f95 --- /dev/null +++ b/vendor/github.com/hashicorp/go-hclog/global.go @@ -0,0 +1,34 @@ +package hclog + +import ( + "sync" +) + +var ( + protect sync.Once + def Logger + + // DefaultOptions is used to create the Default logger. These are read + // only when the Default logger is created, so set them as soon as the + // process starts. + DefaultOptions = &LoggerOptions{ + Level: DefaultLevel, + Output: DefaultOutput, + } +) + +// Default returns a globally held logger. This can be a good starting +// place, and then you can use .With() and .Name() to create sub-loggers +// to be used in more specific contexts. +func Default() Logger { + protect.Do(func() { + def = New(DefaultOptions) + }) + + return def +} + +// L is a short alias for Default(). +func L() Logger { + return Default() +} diff --git a/vendor/github.com/hashicorp/go-hclog/hclogvet/buildtag.go b/vendor/github.com/hashicorp/go-hclog/hclogvet/buildtag.go new file mode 100644 index 0000000..80d8f81 --- /dev/null +++ b/vendor/github.com/hashicorp/go-hclog/hclogvet/buildtag.go @@ -0,0 +1,91 @@ +// Copyright 2013 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package main + +import ( + "bytes" + "fmt" + "os" + "strings" + "unicode" +) + +var ( + nl = []byte("\n") + slashSlash = []byte("//") + plusBuild = []byte("+build") +) + +// checkBuildTag checks that build tags are in the correct location and well-formed. +func checkBuildTag(name string, data []byte) { + if !vet("buildtags") { + return + } + lines := bytes.SplitAfter(data, nl) + + // Determine cutpoint where +build comments are no longer valid. + // They are valid in leading // comments in the file followed by + // a blank line. + var cutoff int + for i, line := range lines { + line = bytes.TrimSpace(line) + if len(line) == 0 { + cutoff = i + continue + } + if bytes.HasPrefix(line, slashSlash) { + continue + } + break + } + + for i, line := range lines { + line = bytes.TrimSpace(line) + if !bytes.HasPrefix(line, slashSlash) { + continue + } + text := bytes.TrimSpace(line[2:]) + if bytes.HasPrefix(text, plusBuild) { + fields := bytes.Fields(text) + if !bytes.Equal(fields[0], plusBuild) { + // Comment is something like +buildasdf not +build. + fmt.Fprintf(os.Stderr, "%s:%d: possible malformed +build comment\n", name, i+1) + setExit(1) + continue + } + if i >= cutoff { + fmt.Fprintf(os.Stderr, "%s:%d: +build comment must appear before package clause and be followed by a blank line\n", name, i+1) + setExit(1) + continue + } + // Check arguments. + Args: + for _, arg := range fields[1:] { + for _, elem := range strings.Split(string(arg), ",") { + if strings.HasPrefix(elem, "!!") { + fmt.Fprintf(os.Stderr, "%s:%d: invalid double negative in build constraint: %s\n", name, i+1, arg) + setExit(1) + break Args + } + elem = strings.TrimPrefix(elem, "!") + for _, c := range elem { + if !unicode.IsLetter(c) && !unicode.IsDigit(c) && c != '_' && c != '.' { + fmt.Fprintf(os.Stderr, "%s:%d: invalid non-alphanumeric build constraint: %s\n", name, i+1, arg) + setExit(1) + break Args + } + } + } + } + continue + } + // Comment with +build but not at beginning. + if bytes.Contains(line, plusBuild) && i < cutoff { + fmt.Fprintf(os.Stderr, "%s:%d: possible malformed +build comment\n", name, i+1) + setExit(1) + continue + } + } +} diff --git a/vendor/github.com/hashicorp/go-hclog/hclogvet/dead.go b/vendor/github.com/hashicorp/go-hclog/hclogvet/dead.go new file mode 100644 index 0000000..130f619 --- /dev/null +++ b/vendor/github.com/hashicorp/go-hclog/hclogvet/dead.go @@ -0,0 +1,108 @@ +// Copyright 2017 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. +// +// Simplified dead code detector. Used for skipping certain checks +// on unreachable code (for instance, shift checks on arch-specific code). + +package main + +import ( + "go/ast" + "go/constant" +) + +// updateDead puts unreachable "if" and "case" nodes into f.dead. +func (f *File) updateDead(node ast.Node) { + if f.dead[node] { + // The node is already marked as dead. + return + } + + switch stmt := node.(type) { + case *ast.IfStmt: + // "if" branch is dead if its condition evaluates + // to constant false. + v := f.pkg.types[stmt.Cond].Value + if v == nil { + return + } + if !constant.BoolVal(v) { + f.setDead(stmt.Body) + return + } + f.setDead(stmt.Else) + case *ast.SwitchStmt: + // Case clause with empty switch tag is dead if it evaluates + // to constant false. + if stmt.Tag == nil { + BodyLoopBool: + for _, stmt := range stmt.Body.List { + cc := stmt.(*ast.CaseClause) + if cc.List == nil { + // Skip default case. + continue + } + for _, expr := range cc.List { + v := f.pkg.types[expr].Value + if v == nil || constant.BoolVal(v) { + continue BodyLoopBool + } + } + f.setDead(cc) + } + return + } + + // Case clause is dead if its constant value doesn't match + // the constant value from the switch tag. + // TODO: This handles integer comparisons only. + v := f.pkg.types[stmt.Tag].Value + if v == nil || v.Kind() != constant.Int { + return + } + tagN, ok := constant.Uint64Val(v) + if !ok { + return + } + BodyLoopInt: + for _, x := range stmt.Body.List { + cc := x.(*ast.CaseClause) + if cc.List == nil { + // Skip default case. + continue + } + for _, expr := range cc.List { + v := f.pkg.types[expr].Value + if v == nil { + continue BodyLoopInt + } + n, ok := constant.Uint64Val(v) + if !ok || tagN == n { + continue BodyLoopInt + } + } + f.setDead(cc) + } + } +} + +// setDead marks the node and all the children as dead. +func (f *File) setDead(node ast.Node) { + dv := deadVisitor{ + f: f, + } + ast.Walk(dv, node) +} + +type deadVisitor struct { + f *File +} + +func (dv deadVisitor) Visit(node ast.Node) ast.Visitor { + if node == nil { + return nil + } + dv.f.dead[node] = true + return dv +} diff --git a/vendor/github.com/hashicorp/go-hclog/hclogvet/hclog.go b/vendor/github.com/hashicorp/go-hclog/hclogvet/hclog.go new file mode 100644 index 0000000..43842e0 --- /dev/null +++ b/vendor/github.com/hashicorp/go-hclog/hclogvet/hclog.go @@ -0,0 +1,55 @@ +// Copyright 2010 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// This file contains the printf-checker. + +package main + +import ( + "go/ast" + "go/types" +) + +func init() { + register("hclog", + "check hclog invocations", + checkHCLog, + callExpr) +} + +var checkHCLogFunc = map[string]bool{ + "Trace": true, + "Debug": true, + "Info": true, + "Warn": true, + "Error": true, +} + +func checkHCLog(f *File, node ast.Node) { + call := node.(*ast.CallExpr) + fun, _ := call.Fun.(*ast.SelectorExpr) + typ := f.pkg.types[fun] + sig, _ := typ.Type.(*types.Signature) + if sig == nil { + return // the call is not on of the form x.f() + } + + recv := f.pkg.types[fun.X] + + if recv.Type == nil { + return + } + + if !isNamedType(recv.Type, "github.com/hashicorp/go-hclog", "Logger") { + return + } + + if _, ok := checkHCLogFunc[fun.Sel.Name]; !ok { + return + } + + if len(call.Args)%2 != 1 { + f.Badf(call.Pos(), "invalid number of log arguments to %s (%d)", fun.Sel.Name, len(call.Args)) + } +} diff --git a/vendor/github.com/hashicorp/go-hclog/hclogvet/main.go b/vendor/github.com/hashicorp/go-hclog/hclogvet/main.go new file mode 100644 index 0000000..c7e6ae0 --- /dev/null +++ b/vendor/github.com/hashicorp/go-hclog/hclogvet/main.go @@ -0,0 +1,617 @@ +// Copyright 2010 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Vet is a simple checker for static errors in Go source code. +// See doc.go for more information. +package main + +import ( + "bytes" + "encoding/json" + "flag" + "fmt" + "go/ast" + "go/build" + "go/importer" + "go/parser" + "go/printer" + "go/token" + "go/types" + "io" + "io/ioutil" + "os" + "path/filepath" + "strconv" + "strings" +) + +// Important! If you add flags here, make sure to update cmd/go/internal/vet/vetflag.go. + +var ( + verbose = flag.Bool("v", false, "verbose") + source = flag.Bool("source", false, "import from source instead of compiled object files") + tags = flag.String("tags", "", "space-separated list of build tags to apply when parsing") + tagList = []string{} // exploded version of tags flag; set in main + + vcfg vetConfig + mustTypecheck bool +) + +var exitCode = 0 + +// "-all" flag enables all non-experimental checks +var all = triStateFlag("all", unset, "enable all non-experimental checks") + +// Flags to control which individual checks to perform. +var report = map[string]*triState{ + // Only unusual checks are written here. + // Most checks that operate during the AST walk are added by register. + "asmdecl": triStateFlag("asmdecl", unset, "check assembly against Go declarations"), + "buildtags": triStateFlag("buildtags", unset, "check that +build tags are valid"), +} + +// experimental records the flags enabling experimental features. These must be +// requested explicitly; they are not enabled by -all. +var experimental = map[string]bool{} + +// setTrueCount record how many flags are explicitly set to true. +var setTrueCount int + +// dirsRun and filesRun indicate whether the vet is applied to directory or +// file targets. The distinction affects which checks are run. +var dirsRun, filesRun bool + +// includesNonTest indicates whether the vet is applied to non-test targets. +// Certain checks are relevant only if they touch both test and non-test files. +var includesNonTest bool + +// A triState is a boolean that knows whether it has been set to either true or false. +// It is used to identify if a flag appears; the standard boolean flag cannot +// distinguish missing from unset. It also satisfies flag.Value. +type triState int + +const ( + unset triState = iota + setTrue + setFalse +) + +func triStateFlag(name string, value triState, usage string) *triState { + flag.Var(&value, name, usage) + return &value +} + +// triState implements flag.Value, flag.Getter, and flag.boolFlag. +// They work like boolean flags: we can say vet -printf as well as vet -printf=true +func (ts *triState) Get() interface{} { + return *ts == setTrue +} + +func (ts triState) isTrue() bool { + return ts == setTrue +} + +func (ts *triState) Set(value string) error { + b, err := strconv.ParseBool(value) + if err != nil { + return err + } + if b { + *ts = setTrue + setTrueCount++ + } else { + *ts = setFalse + } + return nil +} + +func (ts *triState) String() string { + switch *ts { + case unset: + return "true" // An unset flag will be set by -all, so defaults to true. + case setTrue: + return "true" + case setFalse: + return "false" + } + panic("not reached") +} + +func (ts triState) IsBoolFlag() bool { + return true +} + +// vet tells whether to report errors for the named check, a flag name. +func vet(name string) bool { + return report[name].isTrue() +} + +// setExit sets the value for os.Exit when it is called, later. It +// remembers the highest value. +func setExit(err int) { + if err > exitCode { + exitCode = err + } +} + +var ( + // Each of these vars has a corresponding case in (*File).Visit. + assignStmt *ast.AssignStmt + binaryExpr *ast.BinaryExpr + callExpr *ast.CallExpr + compositeLit *ast.CompositeLit + exprStmt *ast.ExprStmt + forStmt *ast.ForStmt + funcDecl *ast.FuncDecl + funcLit *ast.FuncLit + genDecl *ast.GenDecl + interfaceType *ast.InterfaceType + rangeStmt *ast.RangeStmt + returnStmt *ast.ReturnStmt + structType *ast.StructType + + // checkers is a two-level map. + // The outer level is keyed by a nil pointer, one of the AST vars above. + // The inner level is keyed by checker name. + checkers = make(map[ast.Node]map[string]func(*File, ast.Node)) +) + +func register(name, usage string, fn func(*File, ast.Node), types ...ast.Node) { + report[name] = triStateFlag(name, unset, usage) + for _, typ := range types { + m := checkers[typ] + if m == nil { + m = make(map[string]func(*File, ast.Node)) + checkers[typ] = m + } + m[name] = fn + } +} + +// Usage is a replacement usage function for the flags package. +func Usage() { + fmt.Fprintf(os.Stderr, "Usage of vet:\n") + fmt.Fprintf(os.Stderr, "\tvet [flags] directory...\n") + fmt.Fprintf(os.Stderr, "\tvet [flags] files... # Must be a single package\n") + fmt.Fprintf(os.Stderr, "By default, -all is set and all non-experimental checks are run.\n") + fmt.Fprintf(os.Stderr, "For more information run\n") + fmt.Fprintf(os.Stderr, "\tgo doc cmd/vet\n\n") + fmt.Fprintf(os.Stderr, "Flags:\n") + flag.PrintDefaults() + os.Exit(2) +} + +// File is a wrapper for the state of a file used in the parser. +// The parse tree walkers are all methods of this type. +type File struct { + pkg *Package + fset *token.FileSet + name string + content []byte + file *ast.File + b bytes.Buffer // for use by methods + + // Parsed package "foo" when checking package "foo_test" + basePkg *Package + + // The keys are the objects that are receivers of a "String() + // string" method. The value reports whether the method has a + // pointer receiver. + // This is used by the recursiveStringer method in print.go. + stringerPtrs map[*ast.Object]bool + + // Registered checkers to run. + checkers map[ast.Node][]func(*File, ast.Node) + + // Unreachable nodes; can be ignored in shift check. + dead map[ast.Node]bool +} + +func main() { + flag.Usage = Usage + flag.Parse() + + // If any flag is set, we run only those checks requested. + // If all flag is set true or if no flags are set true, set all the non-experimental ones + // not explicitly set (in effect, set the "-all" flag). + if setTrueCount == 0 || *all == setTrue { + for name, setting := range report { + if *setting == unset && !experimental[name] { + *setting = setTrue + } + } + } + + // Accept space-separated tags because that matches + // the go command's other subcommands. + // Accept commas because go tool vet traditionally has. + tagList = strings.Fields(strings.Replace(*tags, ",", " ", -1)) + + if flag.NArg() == 0 { + Usage() + } + + // Special case for "go vet" passing an explicit configuration: + // single argument ending in vet.cfg. + // Once we have a more general mechanism for obtaining this + // information from build tools like the go command, + // vet should be changed to use it. This vet.cfg hack is an + // experiment to learn about what form that information should take. + if flag.NArg() == 1 && strings.HasSuffix(flag.Arg(0), "vet.cfg") { + doPackageCfg(flag.Arg(0)) + os.Exit(exitCode) + } + + for _, name := range flag.Args() { + // Is it a directory? + fi, err := os.Stat(name) + if err != nil { + warnf("error walking tree: %s", err) + continue + } + if fi.IsDir() { + dirsRun = true + } else { + filesRun = true + if !strings.HasSuffix(name, "_test.go") { + includesNonTest = true + } + } + } + if dirsRun && filesRun { + Usage() + } + if dirsRun { + for _, name := range flag.Args() { + walkDir(name) + } + os.Exit(exitCode) + } + if doPackage(flag.Args(), nil) == nil { + warnf("no files checked") + } + os.Exit(exitCode) +} + +// prefixDirectory places the directory name on the beginning of each name in the list. +func prefixDirectory(directory string, names []string) { + if directory != "." { + for i, name := range names { + names[i] = filepath.Join(directory, name) + } + } +} + +// vetConfig is the JSON config struct prepared by the Go command. +type vetConfig struct { + Compiler string + Dir string + ImportPath string + GoFiles []string + ImportMap map[string]string + PackageFile map[string]string + + SucceedOnTypecheckFailure bool + + imp types.Importer +} + +func (v *vetConfig) Import(path string) (*types.Package, error) { + if v.imp == nil { + v.imp = importer.For(v.Compiler, v.openPackageFile) + } + if path == "unsafe" { + return v.imp.Import("unsafe") + } + p := v.ImportMap[path] + if p == "" { + return nil, fmt.Errorf("unknown import path %q", path) + } + if v.PackageFile[p] == "" { + return nil, fmt.Errorf("unknown package file for import %q", path) + } + return v.imp.Import(p) +} + +func (v *vetConfig) openPackageFile(path string) (io.ReadCloser, error) { + file := v.PackageFile[path] + if file == "" { + // Note that path here has been translated via v.ImportMap, + // unlike in the error in Import above. We prefer the error in + // Import, but it's worth diagnosing this one too, just in case. + return nil, fmt.Errorf("unknown package file for %q", path) + } + f, err := os.Open(file) + if err != nil { + return nil, err + } + return f, nil +} + +// doPackageCfg analyzes a single package described in a config file. +func doPackageCfg(cfgFile string) { + js, err := ioutil.ReadFile(cfgFile) + if err != nil { + errorf("%v", err) + } + if err := json.Unmarshal(js, &vcfg); err != nil { + errorf("parsing vet config %s: %v", cfgFile, err) + } + stdImporter = &vcfg + inittypes() + mustTypecheck = true + doPackage(vcfg.GoFiles, nil) +} + +// doPackageDir analyzes the single package found in the directory, if there is one, +// plus a test package, if there is one. +func doPackageDir(directory string) { + context := build.Default + if len(context.BuildTags) != 0 { + warnf("build tags %s previously set", context.BuildTags) + } + context.BuildTags = append(tagList, context.BuildTags...) + + pkg, err := context.ImportDir(directory, 0) + if err != nil { + // If it's just that there are no go source files, that's fine. + if _, nogo := err.(*build.NoGoError); nogo { + return + } + // Non-fatal: we are doing a recursive walk and there may be other directories. + warnf("cannot process directory %s: %s", directory, err) + return + } + var names []string + names = append(names, pkg.GoFiles...) + names = append(names, pkg.CgoFiles...) + names = append(names, pkg.TestGoFiles...) // These are also in the "foo" package. + names = append(names, pkg.SFiles...) + prefixDirectory(directory, names) + basePkg := doPackage(names, nil) + // Is there also a "foo_test" package? If so, do that one as well. + if len(pkg.XTestGoFiles) > 0 { + names = pkg.XTestGoFiles + prefixDirectory(directory, names) + doPackage(names, basePkg) + } +} + +type Package struct { + path string + defs map[*ast.Ident]types.Object + uses map[*ast.Ident]types.Object + selectors map[*ast.SelectorExpr]*types.Selection + types map[ast.Expr]types.TypeAndValue + spans map[types.Object]Span + files []*File + typesPkg *types.Package +} + +// doPackage analyzes the single package constructed from the named files. +// It returns the parsed Package or nil if none of the files have been checked. +func doPackage(names []string, basePkg *Package) *Package { + var files []*File + var astFiles []*ast.File + fs := token.NewFileSet() + for _, name := range names { + data, err := ioutil.ReadFile(name) + if err != nil { + // Warn but continue to next package. + warnf("%s: %s", name, err) + return nil + } + checkBuildTag(name, data) + var parsedFile *ast.File + if strings.HasSuffix(name, ".go") { + parsedFile, err = parser.ParseFile(fs, name, data, 0) + if err != nil { + warnf("%s: %s", name, err) + return nil + } + astFiles = append(astFiles, parsedFile) + } + files = append(files, &File{ + fset: fs, + content: data, + name: name, + file: parsedFile, + dead: make(map[ast.Node]bool), + }) + } + if len(astFiles) == 0 { + return nil + } + pkg := new(Package) + pkg.path = astFiles[0].Name.Name + pkg.files = files + // Type check the package. + errs := pkg.check(fs, astFiles) + if errs != nil { + if vcfg.SucceedOnTypecheckFailure { + os.Exit(0) + } + if *verbose || mustTypecheck { + for _, err := range errs { + fmt.Fprintf(os.Stderr, "%v\n", err) + } + if mustTypecheck { + // This message could be silenced, and we could just exit, + // but it might be helpful at least at first to make clear that the + // above errors are coming from vet and not the compiler + // (they often look like compiler errors, such as "declared but not used"). + errorf("typecheck failures") + } + } + } + + // Check. + chk := make(map[ast.Node][]func(*File, ast.Node)) + for typ, set := range checkers { + for name, fn := range set { + if vet(name) { + chk[typ] = append(chk[typ], fn) + } + } + } + for _, file := range files { + file.pkg = pkg + file.basePkg = basePkg + file.checkers = chk + if file.file != nil { + file.walkFile(file.name, file.file) + } + } + return pkg +} + +func visit(path string, f os.FileInfo, err error) error { + if err != nil { + warnf("walk error: %s", err) + return err + } + // One package per directory. Ignore the files themselves. + if !f.IsDir() { + return nil + } + doPackageDir(path) + return nil +} + +func (pkg *Package) hasFileWithSuffix(suffix string) bool { + for _, f := range pkg.files { + if strings.HasSuffix(f.name, suffix) { + return true + } + } + return false +} + +// walkDir recursively walks the tree looking for Go packages. +func walkDir(root string) { + filepath.Walk(root, visit) +} + +// errorf formats the error to standard error, adding program +// identification and a newline, and exits. +func errorf(format string, args ...interface{}) { + fmt.Fprintf(os.Stderr, "vet: "+format+"\n", args...) + os.Exit(2) +} + +// warnf formats the error to standard error, adding program +// identification and a newline, but does not exit. +func warnf(format string, args ...interface{}) { + fmt.Fprintf(os.Stderr, "vet: "+format+"\n", args...) + setExit(1) +} + +// Println is fmt.Println guarded by -v. +func Println(args ...interface{}) { + if !*verbose { + return + } + fmt.Println(args...) +} + +// Printf is fmt.Printf guarded by -v. +func Printf(format string, args ...interface{}) { + if !*verbose { + return + } + fmt.Printf(format+"\n", args...) +} + +// Bad reports an error and sets the exit code.. +func (f *File) Bad(pos token.Pos, args ...interface{}) { + f.Warn(pos, args...) + setExit(1) +} + +// Badf reports a formatted error and sets the exit code. +func (f *File) Badf(pos token.Pos, format string, args ...interface{}) { + f.Warnf(pos, format, args...) + setExit(1) +} + +// loc returns a formatted representation of the position. +func (f *File) loc(pos token.Pos) string { + if pos == token.NoPos { + return "" + } + // Do not print columns. Because the pos often points to the start of an + // expression instead of the inner part with the actual error, the + // precision can mislead. + posn := f.fset.Position(pos) + return fmt.Sprintf("%s:%d", posn.Filename, posn.Line) +} + +// locPrefix returns a formatted representation of the position for use as a line prefix. +func (f *File) locPrefix(pos token.Pos) string { + if pos == token.NoPos { + return "" + } + return fmt.Sprintf("%s: ", f.loc(pos)) +} + +// Warn reports an error but does not set the exit code. +func (f *File) Warn(pos token.Pos, args ...interface{}) { + fmt.Fprintf(os.Stderr, "%s%s", f.locPrefix(pos), fmt.Sprintln(args...)) +} + +// Warnf reports a formatted error but does not set the exit code. +func (f *File) Warnf(pos token.Pos, format string, args ...interface{}) { + fmt.Fprintf(os.Stderr, "%s%s\n", f.locPrefix(pos), fmt.Sprintf(format, args...)) +} + +// walkFile walks the file's tree. +func (f *File) walkFile(name string, file *ast.File) { + Println("Checking file", name) + ast.Walk(f, file) +} + +// Visit implements the ast.Visitor interface. +func (f *File) Visit(node ast.Node) ast.Visitor { + f.updateDead(node) + var key ast.Node + switch node.(type) { + case *ast.AssignStmt: + key = assignStmt + case *ast.BinaryExpr: + key = binaryExpr + case *ast.CallExpr: + key = callExpr + case *ast.CompositeLit: + key = compositeLit + case *ast.ExprStmt: + key = exprStmt + case *ast.ForStmt: + key = forStmt + case *ast.FuncDecl: + key = funcDecl + case *ast.FuncLit: + key = funcLit + case *ast.GenDecl: + key = genDecl + case *ast.InterfaceType: + key = interfaceType + case *ast.RangeStmt: + key = rangeStmt + case *ast.ReturnStmt: + key = returnStmt + case *ast.StructType: + key = structType + } + for _, fn := range f.checkers[key] { + fn(f, node) + } + return f +} + +// gofmt returns a string representation of the expression. +func (f *File) gofmt(x ast.Expr) string { + f.b.Reset() + printer.Fprint(&f.b, f.fset, x) + return f.b.String() +} diff --git a/vendor/github.com/hashicorp/go-hclog/hclogvet/shadow.go b/vendor/github.com/hashicorp/go-hclog/hclogvet/shadow.go new file mode 100644 index 0000000..390c6d9 --- /dev/null +++ b/vendor/github.com/hashicorp/go-hclog/hclogvet/shadow.go @@ -0,0 +1,238 @@ +// Copyright 2013 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +/* +This file contains the code to check for shadowed variables. +A shadowed variable is a variable declared in an inner scope +with the same name and type as a variable in an outer scope, +and where the outer variable is mentioned after the inner one +is declared. + +(This definition can be refined; the module generates too many +false positives and is not yet enabled by default.) + +For example: + + func BadRead(f *os.File, buf []byte) error { + var err error + for { + n, err := f.Read(buf) // shadows the function variable 'err' + if err != nil { + break // causes return of wrong value + } + foo(buf) + } + return err + } + +*/ + +package main + +import ( + "flag" + "go/ast" + "go/token" + "go/types" +) + +var strictShadowing = flag.Bool("shadowstrict", false, "whether to be strict about shadowing; can be noisy") + +func checkShadow(f *File, node ast.Node) { + switch n := node.(type) { + case *ast.AssignStmt: + checkShadowAssignment(f, n) + case *ast.GenDecl: + checkShadowDecl(f, n) + } +} + +// Span stores the minimum range of byte positions in the file in which a +// given variable (types.Object) is mentioned. It is lexically defined: it spans +// from the beginning of its first mention to the end of its last mention. +// A variable is considered shadowed (if *strictShadowing is off) only if the +// shadowing variable is declared within the span of the shadowed variable. +// In other words, if a variable is shadowed but not used after the shadowed +// variable is declared, it is inconsequential and not worth complaining about. +// This simple check dramatically reduces the nuisance rate for the shadowing +// check, at least until something cleverer comes along. +// +// One wrinkle: A "naked return" is a silent use of a variable that the Span +// will not capture, but the compilers catch naked returns of shadowed +// variables so we don't need to. +// +// Cases this gets wrong (TODO): +// - If a for loop's continuation statement mentions a variable redeclared in +// the block, we should complain about it but don't. +// - A variable declared inside a function literal can falsely be identified +// as shadowing a variable in the outer function. +// +type Span struct { + min token.Pos + max token.Pos +} + +// contains reports whether the position is inside the span. +func (s Span) contains(pos token.Pos) bool { + return s.min <= pos && pos < s.max +} + +// growSpan expands the span for the object to contain the instance represented +// by the identifier. +func (pkg *Package) growSpan(ident *ast.Ident, obj types.Object) { + if *strictShadowing { + return // No need + } + pos := ident.Pos() + end := ident.End() + span, ok := pkg.spans[obj] + if ok { + if span.min > pos { + span.min = pos + } + if span.max < end { + span.max = end + } + } else { + span = Span{pos, end} + } + pkg.spans[obj] = span +} + +// checkShadowAssignment checks for shadowing in a short variable declaration. +func checkShadowAssignment(f *File, a *ast.AssignStmt) { + if a.Tok != token.DEFINE { + return + } + if f.idiomaticShortRedecl(a) { + return + } + for _, expr := range a.Lhs { + ident, ok := expr.(*ast.Ident) + if !ok { + f.Badf(expr.Pos(), "invalid AST: short variable declaration of non-identifier") + return + } + checkShadowing(f, ident) + } +} + +// idiomaticShortRedecl reports whether this short declaration can be ignored for +// the purposes of shadowing, that is, that any redeclarations it contains are deliberate. +func (f *File) idiomaticShortRedecl(a *ast.AssignStmt) bool { + // Don't complain about deliberate redeclarations of the form + // i := i + // Such constructs are idiomatic in range loops to create a new variable + // for each iteration. Another example is + // switch n := n.(type) + if len(a.Rhs) != len(a.Lhs) { + return false + } + // We know it's an assignment, so the LHS must be all identifiers. (We check anyway.) + for i, expr := range a.Lhs { + lhs, ok := expr.(*ast.Ident) + if !ok { + f.Badf(expr.Pos(), "invalid AST: short variable declaration of non-identifier") + return true // Don't do any more processing. + } + switch rhs := a.Rhs[i].(type) { + case *ast.Ident: + if lhs.Name != rhs.Name { + return false + } + case *ast.TypeAssertExpr: + if id, ok := rhs.X.(*ast.Ident); ok { + if lhs.Name != id.Name { + return false + } + } + default: + return false + } + } + return true +} + +// idiomaticRedecl reports whether this declaration spec can be ignored for +// the purposes of shadowing, that is, that any redeclarations it contains are deliberate. +func (f *File) idiomaticRedecl(d *ast.ValueSpec) bool { + // Don't complain about deliberate redeclarations of the form + // var i, j = i, j + if len(d.Names) != len(d.Values) { + return false + } + for i, lhs := range d.Names { + if rhs, ok := d.Values[i].(*ast.Ident); ok { + if lhs.Name != rhs.Name { + return false + } + } + } + return true +} + +// checkShadowDecl checks for shadowing in a general variable declaration. +func checkShadowDecl(f *File, d *ast.GenDecl) { + if d.Tok != token.VAR { + return + } + for _, spec := range d.Specs { + valueSpec, ok := spec.(*ast.ValueSpec) + if !ok { + f.Badf(spec.Pos(), "invalid AST: var GenDecl not ValueSpec") + return + } + // Don't complain about deliberate redeclarations of the form + // var i = i + if f.idiomaticRedecl(valueSpec) { + return + } + for _, ident := range valueSpec.Names { + checkShadowing(f, ident) + } + } +} + +// checkShadowing checks whether the identifier shadows an identifier in an outer scope. +func checkShadowing(f *File, ident *ast.Ident) { + if ident.Name == "_" { + // Can't shadow the blank identifier. + return + } + obj := f.pkg.defs[ident] + if obj == nil { + return + } + // obj.Parent.Parent is the surrounding scope. If we can find another declaration + // starting from there, we have a shadowed identifier. + _, shadowed := obj.Parent().Parent().LookupParent(obj.Name(), obj.Pos()) + if shadowed == nil { + return + } + // Don't complain if it's shadowing a universe-declared identifier; that's fine. + if shadowed.Parent() == types.Universe { + return + } + if *strictShadowing { + // The shadowed identifier must appear before this one to be an instance of shadowing. + if shadowed.Pos() > ident.Pos() { + return + } + } else { + // Don't complain if the span of validity of the shadowed identifier doesn't include + // the shadowing identifier. + span, ok := f.pkg.spans[shadowed] + if !ok { + f.Badf(ident.Pos(), "internal error: no range for %q", ident.Name) + return + } + if !span.contains(ident.Pos()) { + return + } + } + // Don't complain if the types differ: that implies the programmer really wants two different things. + if types.Identical(obj.Type(), shadowed.Type()) { + f.Badf(ident.Pos(), "declaration of %q shadows declaration at %s", obj.Name(), f.loc(shadowed.Pos())) + } +} diff --git a/vendor/github.com/hashicorp/go-hclog/hclogvet/testdata/log.go b/vendor/github.com/hashicorp/go-hclog/hclogvet/testdata/log.go new file mode 100644 index 0000000..8f2b0a5 --- /dev/null +++ b/vendor/github.com/hashicorp/go-hclog/hclogvet/testdata/log.go @@ -0,0 +1,10 @@ +package testdata + +import hclog "github.com/hashicorp/go-hclog" + +func badHCLog() { + l := hclog.L() + + l.Info("ok", "key", "val") + l.Info("bad", "key") +} diff --git a/vendor/github.com/hashicorp/go-hclog/hclogvet/types.go b/vendor/github.com/hashicorp/go-hclog/hclogvet/types.go new file mode 100644 index 0000000..129afa7 --- /dev/null +++ b/vendor/github.com/hashicorp/go-hclog/hclogvet/types.go @@ -0,0 +1,163 @@ +// Copyright 2010 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// This file contains the pieces of the tool that use typechecking from the go/types package. + +package main + +import ( + "go/ast" + "go/build" + "go/importer" + "go/token" + "go/types" + "strings" +) + +// stdImporter is the importer we use to import packages. +// It is shared so that all packages are imported by the same importer. +var stdImporter types.Importer + +var ( + errorType *types.Interface + stringerType *types.Interface // possibly nil + formatterType *types.Interface // possibly nil +) + +func inittypes() { + errorType = types.Universe.Lookup("error").Type().Underlying().(*types.Interface) + + if typ := importType("fmt", "Stringer"); typ != nil { + stringerType = typ.Underlying().(*types.Interface) + } + if typ := importType("fmt", "Formatter"); typ != nil { + formatterType = typ.Underlying().(*types.Interface) + } +} + +// isNamedType reports whether t is the named type path.name. +func isNamedType(t types.Type, path, name string) bool { + n, ok := t.(*types.Named) + if !ok { + return false + } + obj := n.Obj() + return obj.Name() == name && isPackage(obj.Pkg(), path) +} + +// isPackage reports whether pkg has path as the canonical path, +// taking into account vendoring effects +func isPackage(pkg *types.Package, path string) bool { + if pkg == nil { + return false + } + + return pkg.Path() == path || + strings.HasSuffix(pkg.Path(), "/vendor/"+path) +} + +// importType returns the type denoted by the qualified identifier +// path.name, and adds the respective package to the imports map +// as a side effect. In case of an error, importType returns nil. +func importType(path, name string) types.Type { + pkg, err := stdImporter.Import(path) + if err != nil { + // This can happen if the package at path hasn't been compiled yet. + warnf("import failed: %v", err) + return nil + } + if obj, ok := pkg.Scope().Lookup(name).(*types.TypeName); ok { + return obj.Type() + } + warnf("invalid type name %q", name) + return nil +} + +func (pkg *Package) check(fs *token.FileSet, astFiles []*ast.File) []error { + if stdImporter == nil { + if *source { + stdImporter = importer.For("source", nil) + } else { + stdImporter = importer.Default() + } + inittypes() + } + pkg.defs = make(map[*ast.Ident]types.Object) + pkg.uses = make(map[*ast.Ident]types.Object) + pkg.selectors = make(map[*ast.SelectorExpr]*types.Selection) + pkg.spans = make(map[types.Object]Span) + pkg.types = make(map[ast.Expr]types.TypeAndValue) + + var allErrors []error + config := types.Config{ + // We use the same importer for all imports to ensure that + // everybody sees identical packages for the given paths. + Importer: stdImporter, + // By providing a Config with our own error function, it will continue + // past the first error. We collect them all for printing later. + Error: func(e error) { + allErrors = append(allErrors, e) + }, + + Sizes: archSizes, + } + info := &types.Info{ + Selections: pkg.selectors, + Types: pkg.types, + Defs: pkg.defs, + Uses: pkg.uses, + } + typesPkg, err := config.Check(pkg.path, fs, astFiles, info) + if len(allErrors) == 0 && err != nil { + allErrors = append(allErrors, err) + } + pkg.typesPkg = typesPkg + // update spans + for id, obj := range pkg.defs { + pkg.growSpan(id, obj) + } + for id, obj := range pkg.uses { + pkg.growSpan(id, obj) + } + return allErrors +} + +func isConvertibleToString(typ types.Type) bool { + if bt, ok := typ.(*types.Basic); ok && bt.Kind() == types.UntypedNil { + // We explicitly don't want untyped nil, which is + // convertible to both of the interfaces below, as it + // would just panic anyway. + return false + } + if types.ConvertibleTo(typ, errorType) { + return true // via .Error() + } + if stringerType != nil && types.ConvertibleTo(typ, stringerType) { + return true // via .String() + } + return false +} + +// hasBasicType reports whether x's type is a types.Basic with the given kind. +func (f *File) hasBasicType(x ast.Expr, kind types.BasicKind) bool { + t := f.pkg.types[x].Type + if t != nil { + t = t.Underlying() + } + b, ok := t.(*types.Basic) + return ok && b.Kind() == kind +} + +// hasMethod reports whether the type contains a method with the given name. +// It is part of the workaround for Formatters and should be deleted when +// that workaround is no longer necessary. +// TODO: This could be better once issue 6259 is fixed. +func (f *File) hasMethod(typ types.Type, name string) bool { + // assume we have an addressable variable of type typ + obj, _, _ := types.LookupFieldOrMethod(typ, true, f.pkg.typesPkg, name) + _, ok := obj.(*types.Func) + return ok +} + +var archSizes = types.SizesFor("gc", build.Default.GOARCH) diff --git a/vendor/github.com/hashicorp/go-hclog/intlogger.go b/vendor/github.com/hashicorp/go-hclog/intlogger.go new file mode 100644 index 0000000..219656c --- /dev/null +++ b/vendor/github.com/hashicorp/go-hclog/intlogger.go @@ -0,0 +1,527 @@ +package hclog + +import ( + "bytes" + "encoding" + "encoding/json" + "fmt" + "io" + "log" + "reflect" + "runtime" + "sort" + "strconv" + "strings" + "sync" + "sync/atomic" + "time" +) + +// TimeFormat to use for logging. This is a version of RFC3339 that contains +// contains millisecond precision +const TimeFormat = "2006-01-02T15:04:05.000Z0700" + +// errJsonUnsupportedTypeMsg is included in log json entries, if an arg cannot be serialized to json +const errJsonUnsupportedTypeMsg = "logging contained values that don't serialize to json" + +var ( + _levelToBracket = map[Level]string{ + Debug: "[DEBUG]", + Trace: "[TRACE]", + Info: "[INFO] ", + Warn: "[WARN] ", + Error: "[ERROR]", + } +) + +// Make sure that intLogger is a Logger +var _ Logger = &intLogger{} + +// intLogger is an internal logger implementation. Internal in that it is +// defined entirely by this package. +type intLogger struct { + json bool + caller bool + name string + timeFormat string + + // This is a pointer so that it's shared by any derived loggers, since + // those derived loggers share the bufio.Writer as well. + mutex *sync.Mutex + writer *writer + level *int32 + + implied []interface{} +} + +// New returns a configured logger. +func New(opts *LoggerOptions) Logger { + if opts == nil { + opts = &LoggerOptions{} + } + + output := opts.Output + if output == nil { + output = DefaultOutput + } + + level := opts.Level + if level == NoLevel { + level = DefaultLevel + } + + mutex := opts.Mutex + if mutex == nil { + mutex = new(sync.Mutex) + } + + l := &intLogger{ + json: opts.JSONFormat, + caller: opts.IncludeLocation, + name: opts.Name, + timeFormat: TimeFormat, + mutex: mutex, + writer: newWriter(output), + level: new(int32), + } + + if opts.TimeFormat != "" { + l.timeFormat = opts.TimeFormat + } + + atomic.StoreInt32(l.level, int32(level)) + + return l +} + +// Log a message and a set of key/value pairs if the given level is at +// or more severe that the threshold configured in the Logger. +func (l *intLogger) Log(level Level, msg string, args ...interface{}) { + if level < Level(atomic.LoadInt32(l.level)) { + return + } + + t := time.Now() + + l.mutex.Lock() + defer l.mutex.Unlock() + + if l.json { + l.logJSON(t, level, msg, args...) + } else { + l.log(t, level, msg, args...) + } + + l.writer.Flush(level) +} + +// Cleanup a path by returning the last 2 segments of the path only. +func trimCallerPath(path string) string { + // lovely borrowed from zap + // nb. To make sure we trim the path correctly on Windows too, we + // counter-intuitively need to use '/' and *not* os.PathSeparator here, + // because the path given originates from Go stdlib, specifically + // runtime.Caller() which (as of Mar/17) returns forward slashes even on + // Windows. + // + // See https://github.com/golang/go/issues/3335 + // and https://github.com/golang/go/issues/18151 + // + // for discussion on the issue on Go side. + + // Find the last separator. + idx := strings.LastIndexByte(path, '/') + if idx == -1 { + return path + } + + // Find the penultimate separator. + idx = strings.LastIndexByte(path[:idx], '/') + if idx == -1 { + return path + } + + return path[idx+1:] +} + +// Non-JSON logging format function +func (l *intLogger) log(t time.Time, level Level, msg string, args ...interface{}) { + l.writer.WriteString(t.Format(l.timeFormat)) + l.writer.WriteByte(' ') + + s, ok := _levelToBracket[level] + if ok { + l.writer.WriteString(s) + } else { + l.writer.WriteString("[?????]") + } + + if l.caller { + if _, file, line, ok := runtime.Caller(3); ok { + l.writer.WriteByte(' ') + l.writer.WriteString(trimCallerPath(file)) + l.writer.WriteByte(':') + l.writer.WriteString(strconv.Itoa(line)) + l.writer.WriteByte(':') + } + } + + l.writer.WriteByte(' ') + + if l.name != "" { + l.writer.WriteString(l.name) + l.writer.WriteString(": ") + } + + l.writer.WriteString(msg) + + args = append(l.implied, args...) + + var stacktrace CapturedStacktrace + + if args != nil && len(args) > 0 { + if len(args)%2 != 0 { + cs, ok := args[len(args)-1].(CapturedStacktrace) + if ok { + args = args[:len(args)-1] + stacktrace = cs + } else { + args = append(args, "") + } + } + + l.writer.WriteByte(':') + + FOR: + for i := 0; i < len(args); i = i + 2 { + var ( + val string + raw bool + ) + + switch st := args[i+1].(type) { + case string: + val = st + case int: + val = strconv.FormatInt(int64(st), 10) + case int64: + val = strconv.FormatInt(int64(st), 10) + case int32: + val = strconv.FormatInt(int64(st), 10) + case int16: + val = strconv.FormatInt(int64(st), 10) + case int8: + val = strconv.FormatInt(int64(st), 10) + case uint: + val = strconv.FormatUint(uint64(st), 10) + case uint64: + val = strconv.FormatUint(uint64(st), 10) + case uint32: + val = strconv.FormatUint(uint64(st), 10) + case uint16: + val = strconv.FormatUint(uint64(st), 10) + case uint8: + val = strconv.FormatUint(uint64(st), 10) + case CapturedStacktrace: + stacktrace = st + continue FOR + case Format: + val = fmt.Sprintf(st[0].(string), st[1:]...) + default: + v := reflect.ValueOf(st) + if v.Kind() == reflect.Slice { + val = l.renderSlice(v) + raw = true + } else { + val = fmt.Sprintf("%v", st) + } + } + + l.writer.WriteByte(' ') + l.writer.WriteString(args[i].(string)) + l.writer.WriteByte('=') + + if !raw && strings.ContainsAny(val, " \t\n\r") { + l.writer.WriteByte('"') + l.writer.WriteString(val) + l.writer.WriteByte('"') + } else { + l.writer.WriteString(val) + } + } + } + + l.writer.WriteString("\n") + + if stacktrace != "" { + l.writer.WriteString(string(stacktrace)) + } +} + +func (l *intLogger) renderSlice(v reflect.Value) string { + var buf bytes.Buffer + + buf.WriteRune('[') + + for i := 0; i < v.Len(); i++ { + if i > 0 { + buf.WriteString(", ") + } + + sv := v.Index(i) + + var val string + + switch sv.Kind() { + case reflect.String: + val = sv.String() + case reflect.Int, reflect.Int16, reflect.Int32, reflect.Int64: + val = strconv.FormatInt(sv.Int(), 10) + case reflect.Uint, reflect.Uint16, reflect.Uint32, reflect.Uint64: + val = strconv.FormatUint(sv.Uint(), 10) + default: + val = fmt.Sprintf("%v", sv.Interface()) + } + + if strings.ContainsAny(val, " \t\n\r") { + buf.WriteByte('"') + buf.WriteString(val) + buf.WriteByte('"') + } else { + buf.WriteString(val) + } + } + + buf.WriteRune(']') + + return buf.String() +} + +// JSON logging function +func (l *intLogger) logJSON(t time.Time, level Level, msg string, args ...interface{}) { + vals := l.jsonMapEntry(t, level, msg) + args = append(l.implied, args...) + + if args != nil && len(args) > 0 { + if len(args)%2 != 0 { + cs, ok := args[len(args)-1].(CapturedStacktrace) + if ok { + args = args[:len(args)-1] + vals["stacktrace"] = cs + } else { + args = append(args, "") + } + } + + for i := 0; i < len(args); i = i + 2 { + if _, ok := args[i].(string); !ok { + // As this is the logging function not much we can do here + // without injecting into logs... + continue + } + val := args[i+1] + switch sv := val.(type) { + case error: + // Check if val is of type error. If error type doesn't + // implement json.Marshaler or encoding.TextMarshaler + // then set val to err.Error() so that it gets marshaled + switch sv.(type) { + case json.Marshaler, encoding.TextMarshaler: + default: + val = sv.Error() + } + case Format: + val = fmt.Sprintf(sv[0].(string), sv[1:]...) + } + + vals[args[i].(string)] = val + } + } + + err := json.NewEncoder(l.writer).Encode(vals) + if err != nil { + if _, ok := err.(*json.UnsupportedTypeError); ok { + plainVal := l.jsonMapEntry(t, level, msg) + plainVal["@warn"] = errJsonUnsupportedTypeMsg + + json.NewEncoder(l.writer).Encode(plainVal) + } + } +} + +func (l intLogger) jsonMapEntry(t time.Time, level Level, msg string) map[string]interface{} { + vals := map[string]interface{}{ + "@message": msg, + "@timestamp": t.Format("2006-01-02T15:04:05.000000Z07:00"), + } + + var levelStr string + switch level { + case Error: + levelStr = "error" + case Warn: + levelStr = "warn" + case Info: + levelStr = "info" + case Debug: + levelStr = "debug" + case Trace: + levelStr = "trace" + default: + levelStr = "all" + } + + vals["@level"] = levelStr + + if l.name != "" { + vals["@module"] = l.name + } + + if l.caller { + if _, file, line, ok := runtime.Caller(4); ok { + vals["@caller"] = fmt.Sprintf("%s:%d", file, line) + } + } + return vals +} + +// Emit the message and args at DEBUG level +func (l *intLogger) Debug(msg string, args ...interface{}) { + l.Log(Debug, msg, args...) +} + +// Emit the message and args at TRACE level +func (l *intLogger) Trace(msg string, args ...interface{}) { + l.Log(Trace, msg, args...) +} + +// Emit the message and args at INFO level +func (l *intLogger) Info(msg string, args ...interface{}) { + l.Log(Info, msg, args...) +} + +// Emit the message and args at WARN level +func (l *intLogger) Warn(msg string, args ...interface{}) { + l.Log(Warn, msg, args...) +} + +// Emit the message and args at ERROR level +func (l *intLogger) Error(msg string, args ...interface{}) { + l.Log(Error, msg, args...) +} + +// Indicate that the logger would emit TRACE level logs +func (l *intLogger) IsTrace() bool { + return Level(atomic.LoadInt32(l.level)) == Trace +} + +// Indicate that the logger would emit DEBUG level logs +func (l *intLogger) IsDebug() bool { + return Level(atomic.LoadInt32(l.level)) <= Debug +} + +// Indicate that the logger would emit INFO level logs +func (l *intLogger) IsInfo() bool { + return Level(atomic.LoadInt32(l.level)) <= Info +} + +// Indicate that the logger would emit WARN level logs +func (l *intLogger) IsWarn() bool { + return Level(atomic.LoadInt32(l.level)) <= Warn +} + +// Indicate that the logger would emit ERROR level logs +func (l *intLogger) IsError() bool { + return Level(atomic.LoadInt32(l.level)) <= Error +} + +// Return a sub-Logger for which every emitted log message will contain +// the given key/value pairs. This is used to create a context specific +// Logger. +func (l *intLogger) With(args ...interface{}) Logger { + if len(args)%2 != 0 { + panic("With() call requires paired arguments") + } + + sl := *l + + result := make(map[string]interface{}, len(l.implied)+len(args)) + keys := make([]string, 0, len(l.implied)+len(args)) + + // Read existing args, store map and key for consistent sorting + for i := 0; i < len(l.implied); i += 2 { + key := l.implied[i].(string) + keys = append(keys, key) + result[key] = l.implied[i+1] + } + // Read new args, store map and key for consistent sorting + for i := 0; i < len(args); i += 2 { + key := args[i].(string) + _, exists := result[key] + if !exists { + keys = append(keys, key) + } + result[key] = args[i+1] + } + + // Sort keys to be consistent + sort.Strings(keys) + + sl.implied = make([]interface{}, 0, len(l.implied)+len(args)) + for _, k := range keys { + sl.implied = append(sl.implied, k) + sl.implied = append(sl.implied, result[k]) + } + + return &sl +} + +// Create a new sub-Logger that a name decending from the current name. +// This is used to create a subsystem specific Logger. +func (l *intLogger) Named(name string) Logger { + sl := *l + + if sl.name != "" { + sl.name = sl.name + "." + name + } else { + sl.name = name + } + + return &sl +} + +// Create a new sub-Logger with an explicit name. This ignores the current +// name. This is used to create a standalone logger that doesn't fall +// within the normal hierarchy. +func (l *intLogger) ResetNamed(name string) Logger { + sl := *l + + sl.name = name + + return &sl +} + +// Update the logging level on-the-fly. This will affect all subloggers as +// well. +func (l *intLogger) SetLevel(level Level) { + atomic.StoreInt32(l.level, int32(level)) +} + +// Create a *log.Logger that will send it's data through this Logger. This +// allows packages that expect to be using the standard library log to actually +// use this logger. +func (l *intLogger) StandardLogger(opts *StandardLoggerOptions) *log.Logger { + if opts == nil { + opts = &StandardLoggerOptions{} + } + + return log.New(l.StandardWriter(opts), "", 0) +} + +func (l *intLogger) StandardWriter(opts *StandardLoggerOptions) io.Writer { + return &stdlogAdapter{ + log: l, + inferLevels: opts.InferLevels, + forceLevel: opts.ForceLevel, + } +} diff --git a/vendor/github.com/hashicorp/go-hclog/logger.go b/vendor/github.com/hashicorp/go-hclog/logger.go new file mode 100644 index 0000000..080ed79 --- /dev/null +++ b/vendor/github.com/hashicorp/go-hclog/logger.go @@ -0,0 +1,176 @@ +package hclog + +import ( + "io" + "log" + "os" + "strings" + "sync" +) + +var ( + //DefaultOutput is used as the default log output. + DefaultOutput io.Writer = os.Stderr + + // DefaultLevel is used as the default log level. + DefaultLevel = Info +) + +// Level represents a log level. +type Level int32 + +const ( + // NoLevel is a special level used to indicate that no level has been + // set and allow for a default to be used. + NoLevel Level = 0 + + // Trace is the most verbose level. Intended to be used for the tracing + // of actions in code, such as function enters/exits, etc. + Trace Level = 1 + + // Debug information for programmer lowlevel analysis. + Debug Level = 2 + + // Info information about steady state operations. + Info Level = 3 + + // Warn information about rare but handled events. + Warn Level = 4 + + // Error information about unrecoverable events. + Error Level = 5 +) + +// Format is a simple convience type for when formatting is required. When +// processing a value of this type, the logger automatically treats the first +// argument as a Printf formatting string and passes the rest as the values +// to be formatted. For example: L.Info(Fmt{"%d beans/day", beans}). +type Format []interface{} + +// Fmt returns a Format type. This is a convience function for creating a Format +// type. +func Fmt(str string, args ...interface{}) Format { + return append(Format{str}, args...) +} + +// LevelFromString returns a Level type for the named log level, or "NoLevel" if +// the level string is invalid. This facilitates setting the log level via +// config or environment variable by name in a predictable way. +func LevelFromString(levelStr string) Level { + // We don't care about case. Accept both "INFO" and "info". + levelStr = strings.ToLower(strings.TrimSpace(levelStr)) + switch levelStr { + case "trace": + return Trace + case "debug": + return Debug + case "info": + return Info + case "warn": + return Warn + case "error": + return Error + default: + return NoLevel + } +} + +// Logger describes the interface that must be implemeted by all loggers. +type Logger interface { + // Args are alternating key, val pairs + // keys must be strings + // vals can be any type, but display is implementation specific + // Emit a message and key/value pairs at the TRACE level + Trace(msg string, args ...interface{}) + + // Emit a message and key/value pairs at the DEBUG level + Debug(msg string, args ...interface{}) + + // Emit a message and key/value pairs at the INFO level + Info(msg string, args ...interface{}) + + // Emit a message and key/value pairs at the WARN level + Warn(msg string, args ...interface{}) + + // Emit a message and key/value pairs at the ERROR level + Error(msg string, args ...interface{}) + + // Indicate if TRACE logs would be emitted. This and the other Is* guards + // are used to elide expensive logging code based on the current level. + IsTrace() bool + + // Indicate if DEBUG logs would be emitted. This and the other Is* guards + IsDebug() bool + + // Indicate if INFO logs would be emitted. This and the other Is* guards + IsInfo() bool + + // Indicate if WARN logs would be emitted. This and the other Is* guards + IsWarn() bool + + // Indicate if ERROR logs would be emitted. This and the other Is* guards + IsError() bool + + // Creates a sublogger that will always have the given key/value pairs + With(args ...interface{}) Logger + + // Create a logger that will prepend the name string on the front of all messages. + // If the logger already has a name, the new value will be appended to the current + // name. That way, a major subsystem can use this to decorate all it's own logs + // without losing context. + Named(name string) Logger + + // Create a logger that will prepend the name string on the front of all messages. + // This sets the name of the logger to the value directly, unlike Named which honor + // the current name as well. + ResetNamed(name string) Logger + + // Updates the level. This should affect all sub-loggers as well. If an + // implementation cannot update the level on the fly, it should no-op. + SetLevel(level Level) + + // Return a value that conforms to the stdlib log.Logger interface + StandardLogger(opts *StandardLoggerOptions) *log.Logger + + // Return a value that conforms to io.Writer, which can be passed into log.SetOutput() + StandardWriter(opts *StandardLoggerOptions) io.Writer +} + +// StandardLoggerOptions can be used to configure a new standard logger. +type StandardLoggerOptions struct { + // Indicate that some minimal parsing should be done on strings to try + // and detect their level and re-emit them. + // This supports the strings like [ERROR], [ERR] [TRACE], [WARN], [INFO], + // [DEBUG] and strip it off before reapplying it. + InferLevels bool + + // ForceLevel is used to force all output from the standard logger to be at + // the specified level. Similar to InferLevels, this will strip any level + // prefix contained in the logged string before applying the forced level. + // If set, this override InferLevels. + ForceLevel Level +} + +// LoggerOptions can be used to configure a new logger. +type LoggerOptions struct { + // Name of the subsystem to prefix logs with + Name string + + // The threshold for the logger. Anything less severe is supressed + Level Level + + // Where to write the logs to. Defaults to os.Stderr if nil + Output io.Writer + + // An optional mutex pointer in case Output is shared + Mutex *sync.Mutex + + // Control if the output should be in JSON. + JSONFormat bool + + // Include file and line information in each log line + IncludeLocation bool + + // The time format to use instead of the default + TimeFormat string +} diff --git a/vendor/github.com/hashicorp/go-hclog/nulllogger.go b/vendor/github.com/hashicorp/go-hclog/nulllogger.go new file mode 100644 index 0000000..7ad6b35 --- /dev/null +++ b/vendor/github.com/hashicorp/go-hclog/nulllogger.go @@ -0,0 +1,52 @@ +package hclog + +import ( + "io" + "io/ioutil" + "log" +) + +// NewNullLogger instantiates a Logger for which all calls +// will succeed without doing anything. +// Useful for testing purposes. +func NewNullLogger() Logger { + return &nullLogger{} +} + +type nullLogger struct{} + +func (l *nullLogger) Trace(msg string, args ...interface{}) {} + +func (l *nullLogger) Debug(msg string, args ...interface{}) {} + +func (l *nullLogger) Info(msg string, args ...interface{}) {} + +func (l *nullLogger) Warn(msg string, args ...interface{}) {} + +func (l *nullLogger) Error(msg string, args ...interface{}) {} + +func (l *nullLogger) IsTrace() bool { return false } + +func (l *nullLogger) IsDebug() bool { return false } + +func (l *nullLogger) IsInfo() bool { return false } + +func (l *nullLogger) IsWarn() bool { return false } + +func (l *nullLogger) IsError() bool { return false } + +func (l *nullLogger) With(args ...interface{}) Logger { return l } + +func (l *nullLogger) Named(name string) Logger { return l } + +func (l *nullLogger) ResetNamed(name string) Logger { return l } + +func (l *nullLogger) SetLevel(level Level) {} + +func (l *nullLogger) StandardLogger(opts *StandardLoggerOptions) *log.Logger { + return log.New(l.StandardWriter(opts), "", log.LstdFlags) +} + +func (l *nullLogger) StandardWriter(opts *StandardLoggerOptions) io.Writer { + return ioutil.Discard +} diff --git a/vendor/github.com/hashicorp/go-hclog/stacktrace.go b/vendor/github.com/hashicorp/go-hclog/stacktrace.go new file mode 100644 index 0000000..9b27bd3 --- /dev/null +++ b/vendor/github.com/hashicorp/go-hclog/stacktrace.go @@ -0,0 +1,109 @@ +// Copyright (c) 2016 Uber Technologies, Inc. +// +// Permission is hereby granted, free of charge, to any person obtaining a copy +// of this software and associated documentation files (the "Software"), to deal +// in the Software without restriction, including without limitation the rights +// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +// copies of the Software, and to permit persons to whom the Software is +// furnished to do so, subject to the following conditions: +// +// The above copyright notice and this permission notice shall be included in +// all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +// THE SOFTWARE. + +package hclog + +import ( + "bytes" + "runtime" + "strconv" + "strings" + "sync" +) + +var ( + _stacktraceIgnorePrefixes = []string{ + "runtime.goexit", + "runtime.main", + } + _stacktracePool = sync.Pool{ + New: func() interface{} { + return newProgramCounters(64) + }, + } +) + +// CapturedStacktrace represents a stacktrace captured by a previous call +// to log.Stacktrace. If passed to a logging function, the stacktrace +// will be appended. +type CapturedStacktrace string + +// Stacktrace captures a stacktrace of the current goroutine and returns +// it to be passed to a logging function. +func Stacktrace() CapturedStacktrace { + return CapturedStacktrace(takeStacktrace()) +} + +func takeStacktrace() string { + programCounters := _stacktracePool.Get().(*programCounters) + defer _stacktracePool.Put(programCounters) + + var buffer bytes.Buffer + + for { + // Skip the call to runtime.Counters and takeStacktrace so that the + // program counters start at the caller of takeStacktrace. + n := runtime.Callers(2, programCounters.pcs) + if n < cap(programCounters.pcs) { + programCounters.pcs = programCounters.pcs[:n] + break + } + // Don't put the too-short counter slice back into the pool; this lets + // the pool adjust if we consistently take deep stacktraces. + programCounters = newProgramCounters(len(programCounters.pcs) * 2) + } + + i := 0 + frames := runtime.CallersFrames(programCounters.pcs) + for frame, more := frames.Next(); more; frame, more = frames.Next() { + if shouldIgnoreStacktraceFunction(frame.Function) { + continue + } + if i != 0 { + buffer.WriteByte('\n') + } + i++ + buffer.WriteString(frame.Function) + buffer.WriteByte('\n') + buffer.WriteByte('\t') + buffer.WriteString(frame.File) + buffer.WriteByte(':') + buffer.WriteString(strconv.Itoa(int(frame.Line))) + } + + return buffer.String() +} + +func shouldIgnoreStacktraceFunction(function string) bool { + for _, prefix := range _stacktraceIgnorePrefixes { + if strings.HasPrefix(function, prefix) { + return true + } + } + return false +} + +type programCounters struct { + pcs []uintptr +} + +func newProgramCounters(size int) *programCounters { + return &programCounters{make([]uintptr, size)} +} diff --git a/vendor/github.com/hashicorp/go-hclog/stdlog.go b/vendor/github.com/hashicorp/go-hclog/stdlog.go new file mode 100644 index 0000000..044a469 --- /dev/null +++ b/vendor/github.com/hashicorp/go-hclog/stdlog.go @@ -0,0 +1,83 @@ +package hclog + +import ( + "bytes" + "strings" +) + +// Provides a io.Writer to shim the data out of *log.Logger +// and back into our Logger. This is basically the only way to +// build upon *log.Logger. +type stdlogAdapter struct { + log Logger + inferLevels bool + forceLevel Level +} + +// Take the data, infer the levels if configured, and send it through +// a regular Logger. +func (s *stdlogAdapter) Write(data []byte) (int, error) { + str := string(bytes.TrimRight(data, " \t\n")) + + if s.forceLevel != NoLevel { + // Use pickLevel to strip log levels included in the line since we are + // forcing the level + _, str := s.pickLevel(str) + + // Log at the forced level + switch s.forceLevel { + case Trace: + s.log.Trace(str) + case Debug: + s.log.Debug(str) + case Info: + s.log.Info(str) + case Warn: + s.log.Warn(str) + case Error: + s.log.Error(str) + default: + s.log.Info(str) + } + } else if s.inferLevels { + level, str := s.pickLevel(str) + switch level { + case Trace: + s.log.Trace(str) + case Debug: + s.log.Debug(str) + case Info: + s.log.Info(str) + case Warn: + s.log.Warn(str) + case Error: + s.log.Error(str) + default: + s.log.Info(str) + } + } else { + s.log.Info(str) + } + + return len(data), nil +} + +// Detect, based on conventions, what log level this is. +func (s *stdlogAdapter) pickLevel(str string) (Level, string) { + switch { + case strings.HasPrefix(str, "[DEBUG]"): + return Debug, strings.TrimSpace(str[7:]) + case strings.HasPrefix(str, "[TRACE]"): + return Trace, strings.TrimSpace(str[7:]) + case strings.HasPrefix(str, "[INFO]"): + return Info, strings.TrimSpace(str[6:]) + case strings.HasPrefix(str, "[WARN]"): + return Warn, strings.TrimSpace(str[7:]) + case strings.HasPrefix(str, "[ERROR]"): + return Error, strings.TrimSpace(str[7:]) + case strings.HasPrefix(str, "[ERR]"): + return Error, strings.TrimSpace(str[5:]) + default: + return Info, str + } +} diff --git a/vendor/github.com/hashicorp/go-hclog/writer.go b/vendor/github.com/hashicorp/go-hclog/writer.go new file mode 100644 index 0000000..7e8ec72 --- /dev/null +++ b/vendor/github.com/hashicorp/go-hclog/writer.go @@ -0,0 +1,74 @@ +package hclog + +import ( + "bytes" + "io" +) + +type writer struct { + b bytes.Buffer + w io.Writer +} + +func newWriter(w io.Writer) *writer { + return &writer{w: w} +} + +func (w *writer) Flush(level Level) (err error) { + if lw, ok := w.w.(LevelWriter); ok { + _, err = lw.LevelWrite(level, w.b.Bytes()) + } else { + _, err = w.w.Write(w.b.Bytes()) + } + w.b.Reset() + return err +} + +func (w *writer) Write(p []byte) (int, error) { + return w.b.Write(p) +} + +func (w *writer) WriteByte(c byte) error { + return w.b.WriteByte(c) +} + +func (w *writer) WriteString(s string) (int, error) { + return w.b.WriteString(s) +} + +// LevelWriter is the interface that wraps the LevelWrite method. +type LevelWriter interface { + LevelWrite(level Level, p []byte) (n int, err error) +} + +// LeveledWriter writes all log messages to the standard writer, +// except for log levels that are defined in the overrides map. +type LeveledWriter struct { + standard io.Writer + overrides map[Level]io.Writer +} + +// NewLeveledWriter returns an initialized LeveledWriter. +// +// standard will be used as the default writer for all log levels, +// except for log levels that are defined in the overrides map. +func NewLeveledWriter(standard io.Writer, overrides map[Level]io.Writer) *LeveledWriter { + return &LeveledWriter{ + standard: standard, + overrides: overrides, + } +} + +// Write implements io.Writer. +func (lw *LeveledWriter) Write(p []byte) (int, error) { + return lw.standard.Write(p) +} + +// LevelWrite implements LevelWriter. +func (lw *LeveledWriter) LevelWrite(level Level, p []byte) (int, error) { + w, ok := lw.overrides[level] + if !ok { + w = lw.standard + } + return w.Write(p) +} diff --git a/vendor/github.com/hashicorp/go-plugin/LICENSE b/vendor/github.com/hashicorp/go-plugin/LICENSE new file mode 100644 index 0000000..82b4de9 --- /dev/null +++ b/vendor/github.com/hashicorp/go-plugin/LICENSE @@ -0,0 +1,353 @@ +Mozilla Public License, version 2.0 + +1. Definitions + +1.1. “Contributor” + + means each individual or legal entity that creates, contributes to the + creation of, or owns Covered Software. + +1.2. “Contributor Version” + + means the combination of the Contributions of others (if any) used by a + Contributor and that particular Contributor’s Contribution. + +1.3. “Contribution” + + means Covered Software of a particular Contributor. + +1.4. “Covered Software” + + means Source Code Form to which the initial Contributor has attached the + notice in Exhibit A, the Executable Form of such Source Code Form, and + Modifications of such Source Code Form, in each case including portions + thereof. + +1.5. “Incompatible With Secondary Licenses” + means + + a. that the initial Contributor has attached the notice described in + Exhibit B to the Covered Software; or + + b. that the Covered Software was made available under the terms of version + 1.1 or earlier of the License, but not also under the terms of a + Secondary License. + +1.6. “Executable Form” + + means any form of the work other than Source Code Form. + +1.7. “Larger Work” + + means a work that combines Covered Software with other material, in a separate + file or files, that is not Covered Software. + +1.8. “License” + + means this document. + +1.9. “Licensable” + + means having the right to grant, to the maximum extent possible, whether at the + time of the initial grant or subsequently, any and all of the rights conveyed by + this License. + +1.10. “Modifications” + + means any of the following: + + a. any file in Source Code Form that results from an addition to, deletion + from, or modification of the contents of Covered Software; or + + b. any new file in Source Code Form that contains any Covered Software. + +1.11. “Patent Claims” of a Contributor + + means any patent claim(s), including without limitation, method, process, + and apparatus claims, in any patent Licensable by such Contributor that + would be infringed, but for the grant of the License, by the making, + using, selling, offering for sale, having made, import, or transfer of + either its Contributions or its Contributor Version. + +1.12. “Secondary License” + + means either the GNU General Public License, Version 2.0, the GNU Lesser + General Public License, Version 2.1, the GNU Affero General Public + License, Version 3.0, or any later versions of those licenses. + +1.13. “Source Code Form” + + means the form of the work preferred for making modifications. + +1.14. “You” (or “Your”) + + means an individual or a legal entity exercising rights under this + License. For legal entities, “You” includes any entity that controls, is + controlled by, or is under common control with You. For purposes of this + definition, “control” means (a) the power, direct or indirect, to cause + the direction or management of such entity, whether by contract or + otherwise, or (b) ownership of more than fifty percent (50%) of the + outstanding shares or beneficial ownership of such entity. + + +2. License Grants and Conditions + +2.1. Grants + + Each Contributor hereby grants You a world-wide, royalty-free, + non-exclusive license: + + a. under intellectual property rights (other than patent or trademark) + Licensable by such Contributor to use, reproduce, make available, + modify, display, perform, distribute, and otherwise exploit its + Contributions, either on an unmodified basis, with Modifications, or as + part of a Larger Work; and + + b. under Patent Claims of such Contributor to make, use, sell, offer for + sale, have made, import, and otherwise transfer either its Contributions + or its Contributor Version. + +2.2. Effective Date + + The licenses granted in Section 2.1 with respect to any Contribution become + effective for each Contribution on the date the Contributor first distributes + such Contribution. + +2.3. Limitations on Grant Scope + + The licenses granted in this Section 2 are the only rights granted under this + License. No additional rights or licenses will be implied from the distribution + or licensing of Covered Software under this License. Notwithstanding Section + 2.1(b) above, no patent license is granted by a Contributor: + + a. for any code that a Contributor has removed from Covered Software; or + + b. for infringements caused by: (i) Your and any other third party’s + modifications of Covered Software, or (ii) the combination of its + Contributions with other software (except as part of its Contributor + Version); or + + c. under Patent Claims infringed by Covered Software in the absence of its + Contributions. + + This License does not grant any rights in the trademarks, service marks, or + logos of any Contributor (except as may be necessary to comply with the + notice requirements in Section 3.4). + +2.4. Subsequent Licenses + + No Contributor makes additional grants as a result of Your choice to + distribute the Covered Software under a subsequent version of this License + (see Section 10.2) or under the terms of a Secondary License (if permitted + under the terms of Section 3.3). + +2.5. Representation + + Each Contributor represents that the Contributor believes its Contributions + are its original creation(s) or it has sufficient rights to grant the + rights to its Contributions conveyed by this License. + +2.6. Fair Use + + This License is not intended to limit any rights You have under applicable + copyright doctrines of fair use, fair dealing, or other equivalents. + +2.7. Conditions + + Sections 3.1, 3.2, 3.3, and 3.4 are conditions of the licenses granted in + Section 2.1. + + +3. Responsibilities + +3.1. Distribution of Source Form + + All distribution of Covered Software in Source Code Form, including any + Modifications that You create or to which You contribute, must be under the + terms of this License. You must inform recipients that the Source Code Form + of the Covered Software is governed by the terms of this License, and how + they can obtain a copy of this License. You may not attempt to alter or + restrict the recipients’ rights in the Source Code Form. + +3.2. Distribution of Executable Form + + If You distribute Covered Software in Executable Form then: + + a. such Covered Software must also be made available in Source Code Form, + as described in Section 3.1, and You must inform recipients of the + Executable Form how they can obtain a copy of such Source Code Form by + reasonable means in a timely manner, at a charge no more than the cost + of distribution to the recipient; and + + b. You may distribute such Executable Form under the terms of this License, + or sublicense it under different terms, provided that the license for + the Executable Form does not attempt to limit or alter the recipients’ + rights in the Source Code Form under this License. + +3.3. Distribution of a Larger Work + + You may create and distribute a Larger Work under terms of Your choice, + provided that You also comply with the requirements of this License for the + Covered Software. If the Larger Work is a combination of Covered Software + with a work governed by one or more Secondary Licenses, and the Covered + Software is not Incompatible With Secondary Licenses, this License permits + You to additionally distribute such Covered Software under the terms of + such Secondary License(s), so that the recipient of the Larger Work may, at + their option, further distribute the Covered Software under the terms of + either this License or such Secondary License(s). + +3.4. Notices + + You may not remove or alter the substance of any license notices (including + copyright notices, patent notices, disclaimers of warranty, or limitations + of liability) contained within the Source Code Form of the Covered + Software, except that You may alter any license notices to the extent + required to remedy known factual inaccuracies. + +3.5. Application of Additional Terms + + You may choose to offer, and to charge a fee for, warranty, support, + indemnity or liability obligations to one or more recipients of Covered + Software. However, You may do so only on Your own behalf, and not on behalf + of any Contributor. You must make it absolutely clear that any such + warranty, support, indemnity, or liability obligation is offered by You + alone, and You hereby agree to indemnify every Contributor for any + liability incurred by such Contributor as a result of warranty, support, + indemnity or liability terms You offer. You may include additional + disclaimers of warranty and limitations of liability specific to any + jurisdiction. + +4. Inability to Comply Due to Statute or Regulation + + If it is impossible for You to comply with any of the terms of this License + with respect to some or all of the Covered Software due to statute, judicial + order, or regulation then You must: (a) comply with the terms of this License + to the maximum extent possible; and (b) describe the limitations and the code + they affect. Such description must be placed in a text file included with all + distributions of the Covered Software under this License. Except to the + extent prohibited by statute or regulation, such description must be + sufficiently detailed for a recipient of ordinary skill to be able to + understand it. + +5. Termination + +5.1. The rights granted under this License will terminate automatically if You + fail to comply with any of its terms. However, if You become compliant, + then the rights granted under this License from a particular Contributor + are reinstated (a) provisionally, unless and until such Contributor + explicitly and finally terminates Your grants, and (b) on an ongoing basis, + if such Contributor fails to notify You of the non-compliance by some + reasonable means prior to 60 days after You have come back into compliance. + Moreover, Your grants from a particular Contributor are reinstated on an + ongoing basis if such Contributor notifies You of the non-compliance by + some reasonable means, this is the first time You have received notice of + non-compliance with this License from such Contributor, and You become + compliant prior to 30 days after Your receipt of the notice. + +5.2. If You initiate litigation against any entity by asserting a patent + infringement claim (excluding declaratory judgment actions, counter-claims, + and cross-claims) alleging that a Contributor Version directly or + indirectly infringes any patent, then the rights granted to You by any and + all Contributors for the Covered Software under Section 2.1 of this License + shall terminate. + +5.3. In the event of termination under Sections 5.1 or 5.2 above, all end user + license agreements (excluding distributors and resellers) which have been + validly granted by You or Your distributors under this License prior to + termination shall survive termination. + +6. Disclaimer of Warranty + + Covered Software is provided under this License on an “as is” basis, without + warranty of any kind, either expressed, implied, or statutory, including, + without limitation, warranties that the Covered Software is free of defects, + merchantable, fit for a particular purpose or non-infringing. The entire + risk as to the quality and performance of the Covered Software is with You. + Should any Covered Software prove defective in any respect, You (not any + Contributor) assume the cost of any necessary servicing, repair, or + correction. This disclaimer of warranty constitutes an essential part of this + License. No use of any Covered Software is authorized under this License + except under this disclaimer. + +7. Limitation of Liability + + Under no circumstances and under no legal theory, whether tort (including + negligence), contract, or otherwise, shall any Contributor, or anyone who + distributes Covered Software as permitted above, be liable to You for any + direct, indirect, special, incidental, or consequential damages of any + character including, without limitation, damages for lost profits, loss of + goodwill, work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses, even if such party shall have been + informed of the possibility of such damages. This limitation of liability + shall not apply to liability for death or personal injury resulting from such + party’s negligence to the extent applicable law prohibits such limitation. + Some jurisdictions do not allow the exclusion or limitation of incidental or + consequential damages, so this exclusion and limitation may not apply to You. + +8. Litigation + + Any litigation relating to this License may be brought only in the courts of + a jurisdiction where the defendant maintains its principal place of business + and such litigation shall be governed by laws of that jurisdiction, without + reference to its conflict-of-law provisions. Nothing in this Section shall + prevent a party’s ability to bring cross-claims or counter-claims. + +9. Miscellaneous + + This License represents the complete agreement concerning the subject matter + hereof. If any provision of this License is held to be unenforceable, such + provision shall be reformed only to the extent necessary to make it + enforceable. Any law or regulation which provides that the language of a + contract shall be construed against the drafter shall not be used to construe + this License against a Contributor. + + +10. Versions of the License + +10.1. New Versions + + Mozilla Foundation is the license steward. Except as provided in Section + 10.3, no one other than the license steward has the right to modify or + publish new versions of this License. Each version will be given a + distinguishing version number. + +10.2. Effect of New Versions + + You may distribute the Covered Software under the terms of the version of + the License under which You originally received the Covered Software, or + under the terms of any subsequent version published by the license + steward. + +10.3. Modified Versions + + If you create software not governed by this License, and you want to + create a new license for such software, you may create and use a modified + version of this License if you rename the license and remove any + references to the name of the license steward (except to note that such + modified license differs from this License). + +10.4. Distributing Source Code Form that is Incompatible With Secondary Licenses + If You choose to distribute Source Code Form that is Incompatible With + Secondary Licenses under the terms of this version of the License, the + notice described in Exhibit B of this License must be attached. + +Exhibit A - Source Code Form License Notice + + This Source Code Form is subject to the + terms of the Mozilla Public License, v. + 2.0. If a copy of the MPL was not + distributed with this file, You can + obtain one at + http://mozilla.org/MPL/2.0/. + +If it is not possible or desirable to put the notice in a particular file, then +You may include the notice in a location (such as a LICENSE file in a relevant +directory) where a recipient would be likely to look for such a notice. + +You may add additional accurate notices of copyright ownership. + +Exhibit B - “Incompatible With Secondary Licenses” Notice + + This Source Code Form is “Incompatible + With Secondary Licenses”, as defined by + the Mozilla Public License, v. 2.0. diff --git a/vendor/github.com/hashicorp/go-plugin/client.go b/vendor/github.com/hashicorp/go-plugin/client.go new file mode 100644 index 0000000..8118b58 --- /dev/null +++ b/vendor/github.com/hashicorp/go-plugin/client.go @@ -0,0 +1,1015 @@ +package plugin + +import ( + "bufio" + "context" + "crypto/subtle" + "crypto/tls" + "crypto/x509" + "encoding/base64" + "errors" + "fmt" + "hash" + "io" + "io/ioutil" + "net" + "os" + "os/exec" + "path/filepath" + "strconv" + "strings" + "sync" + "sync/atomic" + "time" + + hclog "github.com/hashicorp/go-hclog" +) + +// If this is 1, then we've called CleanupClients. This can be used +// by plugin RPC implementations to change error behavior since you +// can expected network connection errors at this point. This should be +// read by using sync/atomic. +var Killed uint32 = 0 + +// This is a slice of the "managed" clients which are cleaned up when +// calling Cleanup +var managedClients = make([]*Client, 0, 5) +var managedClientsLock sync.Mutex + +// Error types +var ( + // ErrProcessNotFound is returned when a client is instantiated to + // reattach to an existing process and it isn't found. + ErrProcessNotFound = errors.New("Reattachment process not found") + + // ErrChecksumsDoNotMatch is returned when binary's checksum doesn't match + // the one provided in the SecureConfig. + ErrChecksumsDoNotMatch = errors.New("checksums did not match") + + // ErrSecureNoChecksum is returned when an empty checksum is provided to the + // SecureConfig. + ErrSecureConfigNoChecksum = errors.New("no checksum provided") + + // ErrSecureNoHash is returned when a nil Hash object is provided to the + // SecureConfig. + ErrSecureConfigNoHash = errors.New("no hash implementation provided") + + // ErrSecureConfigAndReattach is returned when both Reattach and + // SecureConfig are set. + ErrSecureConfigAndReattach = errors.New("only one of Reattach or SecureConfig can be set") +) + +// Client handles the lifecycle of a plugin application. It launches +// plugins, connects to them, dispenses interface implementations, and handles +// killing the process. +// +// Plugin hosts should use one Client for each plugin executable. To +// dispense a plugin type, use the `Client.Client` function, and then +// cal `Dispense`. This awkward API is mostly historical but is used to split +// the client that deals with subprocess management and the client that +// does RPC management. +// +// See NewClient and ClientConfig for using a Client. +type Client struct { + config *ClientConfig + exited bool + l sync.Mutex + address net.Addr + process *os.Process + client ClientProtocol + protocol Protocol + logger hclog.Logger + doneCtx context.Context + ctxCancel context.CancelFunc + negotiatedVersion int + + // clientWaitGroup is used to manage the lifecycle of the plugin management + // goroutines. + clientWaitGroup sync.WaitGroup + + // processKilled is used for testing only, to flag when the process was + // forcefully killed. + processKilled bool +} + +// NegotiatedVersion returns the protocol version negotiated with the server. +// This is only valid after Start() is called. +func (c *Client) NegotiatedVersion() int { + return c.negotiatedVersion +} + +// ClientConfig is the configuration used to initialize a new +// plugin client. After being used to initialize a plugin client, +// that configuration must not be modified again. +type ClientConfig struct { + // HandshakeConfig is the configuration that must match servers. + HandshakeConfig + + // Plugins are the plugins that can be consumed. + // The implied version of this PluginSet is the Handshake.ProtocolVersion. + Plugins PluginSet + + // VersionedPlugins is a map of PluginSets for specific protocol versions. + // These can be used to negotiate a compatible version between client and + // server. If this is set, Handshake.ProtocolVersion is not required. + VersionedPlugins map[int]PluginSet + + // One of the following must be set, but not both. + // + // Cmd is the unstarted subprocess for starting the plugin. If this is + // set, then the Client starts the plugin process on its own and connects + // to it. + // + // Reattach is configuration for reattaching to an existing plugin process + // that is already running. This isn't common. + Cmd *exec.Cmd + Reattach *ReattachConfig + + // SecureConfig is configuration for verifying the integrity of the + // executable. It can not be used with Reattach. + SecureConfig *SecureConfig + + // TLSConfig is used to enable TLS on the RPC client. + TLSConfig *tls.Config + + // Managed represents if the client should be managed by the + // plugin package or not. If true, then by calling CleanupClients, + // it will automatically be cleaned up. Otherwise, the client + // user is fully responsible for making sure to Kill all plugin + // clients. By default the client is _not_ managed. + Managed bool + + // The minimum and maximum port to use for communicating with + // the subprocess. If not set, this defaults to 10,000 and 25,000 + // respectively. + MinPort, MaxPort uint + + // StartTimeout is the timeout to wait for the plugin to say it + // has started successfully. + StartTimeout time.Duration + + // If non-nil, then the stderr of the client will be written to here + // (as well as the log). This is the original os.Stderr of the subprocess. + // This isn't the output of synced stderr. + Stderr io.Writer + + // SyncStdout, SyncStderr can be set to override the + // respective os.Std* values in the plugin. Care should be taken to + // avoid races here. If these are nil, then this will automatically be + // hooked up to os.Stdin, Stdout, and Stderr, respectively. + // + // If the default values (nil) are used, then this package will not + // sync any of these streams. + SyncStdout io.Writer + SyncStderr io.Writer + + // AllowedProtocols is a list of allowed protocols. If this isn't set, + // then only netrpc is allowed. This is so that older go-plugin systems + // can show friendly errors if they see a plugin with an unknown + // protocol. + // + // By setting this, you can cause an error immediately on plugin start + // if an unsupported protocol is used with a good error message. + // + // If this isn't set at all (nil value), then only net/rpc is accepted. + // This is done for legacy reasons. You must explicitly opt-in to + // new protocols. + AllowedProtocols []Protocol + + // Logger is the logger that the client will used. If none is provided, + // it will default to hclog's default logger. + Logger hclog.Logger + + // AutoMTLS has the client and server automatically negotiate mTLS for + // transport authentication. This ensures that only the original client will + // be allowed to connect to the server, and all other connections will be + // rejected. The client will also refuse to connect to any server that isn't + // the original instance started by the client. + // + // In this mode of operation, the client generates a one-time use tls + // certificate, sends the public x.509 certificate to the new server, and + // the server generates a one-time use tls certificate, and sends the public + // x.509 certificate back to the client. These are used to authenticate all + // rpc connections between the client and server. + // + // Setting AutoMTLS to true implies that the server must support the + // protocol, and correctly negotiate the tls certificates, or a connection + // failure will result. + // + // The client should not set TLSConfig, nor should the server set a + // TLSProvider, because AutoMTLS implies that a new certificate and tls + // configuration will be generated at startup. + // + // You cannot Reattach to a server with this option enabled. + AutoMTLS bool +} + +// ReattachConfig is used to configure a client to reattach to an +// already-running plugin process. You can retrieve this information by +// calling ReattachConfig on Client. +type ReattachConfig struct { + Protocol Protocol + Addr net.Addr + Pid int +} + +// SecureConfig is used to configure a client to verify the integrity of an +// executable before running. It does this by verifying the checksum is +// expected. Hash is used to specify the hashing method to use when checksumming +// the file. The configuration is verified by the client by calling the +// SecureConfig.Check() function. +// +// The host process should ensure the checksum was provided by a trusted and +// authoritative source. The binary should be installed in such a way that it +// can not be modified by an unauthorized user between the time of this check +// and the time of execution. +type SecureConfig struct { + Checksum []byte + Hash hash.Hash +} + +// Check takes the filepath to an executable and returns true if the checksum of +// the file matches the checksum provided in the SecureConfig. +func (s *SecureConfig) Check(filePath string) (bool, error) { + if len(s.Checksum) == 0 { + return false, ErrSecureConfigNoChecksum + } + + if s.Hash == nil { + return false, ErrSecureConfigNoHash + } + + file, err := os.Open(filePath) + if err != nil { + return false, err + } + defer file.Close() + + _, err = io.Copy(s.Hash, file) + if err != nil { + return false, err + } + + sum := s.Hash.Sum(nil) + + return subtle.ConstantTimeCompare(sum, s.Checksum) == 1, nil +} + +// This makes sure all the managed subprocesses are killed and properly +// logged. This should be called before the parent process running the +// plugins exits. +// +// This must only be called _once_. +func CleanupClients() { + // Set the killed to true so that we don't get unexpected panics + atomic.StoreUint32(&Killed, 1) + + // Kill all the managed clients in parallel and use a WaitGroup + // to wait for them all to finish up. + var wg sync.WaitGroup + managedClientsLock.Lock() + for _, client := range managedClients { + wg.Add(1) + + go func(client *Client) { + client.Kill() + wg.Done() + }(client) + } + managedClientsLock.Unlock() + + wg.Wait() +} + +// Creates a new plugin client which manages the lifecycle of an external +// plugin and gets the address for the RPC connection. +// +// The client must be cleaned up at some point by calling Kill(). If +// the client is a managed client (created with NewManagedClient) you +// can just call CleanupClients at the end of your program and they will +// be properly cleaned. +func NewClient(config *ClientConfig) (c *Client) { + if config.MinPort == 0 && config.MaxPort == 0 { + config.MinPort = 10000 + config.MaxPort = 25000 + } + + if config.StartTimeout == 0 { + config.StartTimeout = 1 * time.Minute + } + + if config.Stderr == nil { + config.Stderr = ioutil.Discard + } + + if config.SyncStdout == nil { + config.SyncStdout = ioutil.Discard + } + if config.SyncStderr == nil { + config.SyncStderr = ioutil.Discard + } + + if config.AllowedProtocols == nil { + config.AllowedProtocols = []Protocol{ProtocolNetRPC} + } + + if config.Logger == nil { + config.Logger = hclog.New(&hclog.LoggerOptions{ + Output: hclog.DefaultOutput, + Level: hclog.Trace, + Name: "plugin", + }) + } + + c = &Client{ + config: config, + logger: config.Logger, + } + if config.Managed { + managedClientsLock.Lock() + managedClients = append(managedClients, c) + managedClientsLock.Unlock() + } + + return +} + +// Client returns the protocol client for this connection. +// +// Subsequent calls to this will return the same client. +func (c *Client) Client() (ClientProtocol, error) { + _, err := c.Start() + if err != nil { + return nil, err + } + + c.l.Lock() + defer c.l.Unlock() + + if c.client != nil { + return c.client, nil + } + + switch c.protocol { + case ProtocolNetRPC: + c.client, err = newRPCClient(c) + + case ProtocolGRPC: + c.client, err = newGRPCClient(c.doneCtx, c) + + default: + return nil, fmt.Errorf("unknown server protocol: %s", c.protocol) + } + + if err != nil { + c.client = nil + return nil, err + } + + return c.client, nil +} + +// Tells whether or not the underlying process has exited. +func (c *Client) Exited() bool { + c.l.Lock() + defer c.l.Unlock() + return c.exited +} + +// killed is used in tests to check if a process failed to exit gracefully, and +// needed to be killed. +func (c *Client) killed() bool { + c.l.Lock() + defer c.l.Unlock() + return c.processKilled +} + +// End the executing subprocess (if it is running) and perform any cleanup +// tasks necessary such as capturing any remaining logs and so on. +// +// This method blocks until the process successfully exits. +// +// This method can safely be called multiple times. +func (c *Client) Kill() { + // Grab a lock to read some private fields. + c.l.Lock() + process := c.process + addr := c.address + c.l.Unlock() + + // If there is no process, there is nothing to kill. + if process == nil { + return + } + + defer func() { + // Wait for the all client goroutines to finish. + c.clientWaitGroup.Wait() + + // Make sure there is no reference to the old process after it has been + // killed. + c.l.Lock() + c.process = nil + c.l.Unlock() + }() + + // We need to check for address here. It is possible that the plugin + // started (process != nil) but has no address (addr == nil) if the + // plugin failed at startup. If we do have an address, we need to close + // the plugin net connections. + graceful := false + if addr != nil { + // Close the client to cleanly exit the process. + client, err := c.Client() + if err == nil { + err = client.Close() + + // If there is no error, then we attempt to wait for a graceful + // exit. If there was an error, we assume that graceful cleanup + // won't happen and just force kill. + graceful = err == nil + if err != nil { + // If there was an error just log it. We're going to force + // kill in a moment anyways. + c.logger.Warn("error closing client during Kill", "err", err) + } + } else { + c.logger.Error("client", "error", err) + } + } + + // If we're attempting a graceful exit, then we wait for a short period + // of time to allow that to happen. To wait for this we just wait on the + // doneCh which would be closed if the process exits. + if graceful { + select { + case <-c.doneCtx.Done(): + c.logger.Debug("plugin exited") + return + case <-time.After(2 * time.Second): + } + } + + // If graceful exiting failed, just kill it + c.logger.Warn("plugin failed to exit gracefully") + process.Kill() + + c.l.Lock() + c.processKilled = true + c.l.Unlock() +} + +// Starts the underlying subprocess, communicating with it to negotiate +// a port for RPC connections, and returning the address to connect via RPC. +// +// This method is safe to call multiple times. Subsequent calls have no effect. +// Once a client has been started once, it cannot be started again, even if +// it was killed. +func (c *Client) Start() (addr net.Addr, err error) { + c.l.Lock() + defer c.l.Unlock() + + if c.address != nil { + return c.address, nil + } + + // If one of cmd or reattach isn't set, then it is an error. We wrap + // this in a {} for scoping reasons, and hopeful that the escape + // analysis will pop the stack here. + { + cmdSet := c.config.Cmd != nil + attachSet := c.config.Reattach != nil + secureSet := c.config.SecureConfig != nil + if cmdSet == attachSet { + return nil, fmt.Errorf("Only one of Cmd or Reattach must be set") + } + + if secureSet && attachSet { + return nil, ErrSecureConfigAndReattach + } + } + + if c.config.Reattach != nil { + return c.reattach() + } + + if c.config.VersionedPlugins == nil { + c.config.VersionedPlugins = make(map[int]PluginSet) + } + + // handle all plugins as versioned, using the handshake config as the default. + version := int(c.config.ProtocolVersion) + + // Make sure we're not overwriting a real version 0. If ProtocolVersion was + // non-zero, then we have to just assume the user made sure that + // VersionedPlugins doesn't conflict. + if _, ok := c.config.VersionedPlugins[version]; !ok && c.config.Plugins != nil { + c.config.VersionedPlugins[version] = c.config.Plugins + } + + var versionStrings []string + for v := range c.config.VersionedPlugins { + versionStrings = append(versionStrings, strconv.Itoa(v)) + } + + env := []string{ + fmt.Sprintf("%s=%s", c.config.MagicCookieKey, c.config.MagicCookieValue), + fmt.Sprintf("PLUGIN_MIN_PORT=%d", c.config.MinPort), + fmt.Sprintf("PLUGIN_MAX_PORT=%d", c.config.MaxPort), + fmt.Sprintf("PLUGIN_PROTOCOL_VERSIONS=%s", strings.Join(versionStrings, ",")), + } + + cmd := c.config.Cmd + cmd.Env = append(cmd.Env, os.Environ()...) + cmd.Env = append(cmd.Env, env...) + cmd.Stdin = os.Stdin + + cmdStdout, err := cmd.StdoutPipe() + if err != nil { + return nil, err + } + cmdStderr, err := cmd.StderrPipe() + if err != nil { + return nil, err + } + + if c.config.SecureConfig != nil { + if ok, err := c.config.SecureConfig.Check(cmd.Path); err != nil { + return nil, fmt.Errorf("error verifying checksum: %s", err) + } else if !ok { + return nil, ErrChecksumsDoNotMatch + } + } + + // Setup a temporary certificate for client/server mtls, and send the public + // certificate to the plugin. + if c.config.AutoMTLS { + c.logger.Info("configuring client automatic mTLS") + certPEM, keyPEM, err := generateCert() + if err != nil { + c.logger.Error("failed to generate client certificate", "error", err) + return nil, err + } + cert, err := tls.X509KeyPair(certPEM, keyPEM) + if err != nil { + c.logger.Error("failed to parse client certificate", "error", err) + return nil, err + } + + cmd.Env = append(cmd.Env, fmt.Sprintf("PLUGIN_CLIENT_CERT=%s", certPEM)) + + c.config.TLSConfig = &tls.Config{ + Certificates: []tls.Certificate{cert}, + ServerName: "localhost", + } + } + + c.logger.Debug("starting plugin", "path", cmd.Path, "args", cmd.Args) + err = cmd.Start() + if err != nil { + return + } + + // Set the process + c.process = cmd.Process + c.logger.Debug("plugin started", "path", cmd.Path, "pid", c.process.Pid) + + // Make sure the command is properly cleaned up if there is an error + defer func() { + r := recover() + + if err != nil || r != nil { + cmd.Process.Kill() + } + + if r != nil { + panic(r) + } + }() + + // Create a context for when we kill + c.doneCtx, c.ctxCancel = context.WithCancel(context.Background()) + + c.clientWaitGroup.Add(1) + go func() { + // ensure the context is cancelled when we're done + defer c.ctxCancel() + + defer c.clientWaitGroup.Done() + + // get the cmd info early, since the process information will be removed + // in Kill. + pid := c.process.Pid + path := cmd.Path + + // Wait for the command to end. + err := cmd.Wait() + + debugMsgArgs := []interface{}{ + "path", path, + "pid", pid, + } + if err != nil { + debugMsgArgs = append(debugMsgArgs, + []interface{}{"error", err.Error()}...) + } + + // Log and make sure to flush the logs write away + c.logger.Debug("plugin process exited", debugMsgArgs...) + os.Stderr.Sync() + + // Set that we exited, which takes a lock + c.l.Lock() + defer c.l.Unlock() + c.exited = true + }() + + // Start goroutine that logs the stderr + c.clientWaitGroup.Add(1) + // logStderr calls Done() + go c.logStderr(cmdStderr) + + // Start a goroutine that is going to be reading the lines + // out of stdout + linesCh := make(chan string) + c.clientWaitGroup.Add(1) + go func() { + defer c.clientWaitGroup.Done() + defer close(linesCh) + + scanner := bufio.NewScanner(cmdStdout) + for scanner.Scan() { + linesCh <- scanner.Text() + } + }() + + // Make sure after we exit we read the lines from stdout forever + // so they don't block since it is a pipe. + // The scanner goroutine above will close this, but track it with a wait + // group for completeness. + c.clientWaitGroup.Add(1) + defer func() { + go func() { + defer c.clientWaitGroup.Done() + for range linesCh { + } + }() + }() + + // Some channels for the next step + timeout := time.After(c.config.StartTimeout) + + // Start looking for the address + c.logger.Debug("waiting for RPC address", "path", cmd.Path) + select { + case <-timeout: + err = errors.New("timeout while waiting for plugin to start") + case <-c.doneCtx.Done(): + err = errors.New("plugin exited before we could connect") + case line := <-linesCh: + // Trim the line and split by "|" in order to get the parts of + // the output. + line = strings.TrimSpace(line) + parts := strings.SplitN(line, "|", 6) + if len(parts) < 4 { + err = fmt.Errorf( + "Unrecognized remote plugin message: %s\n\n"+ + "This usually means that the plugin is either invalid or simply\n"+ + "needs to be recompiled to support the latest protocol.", line) + return + } + + // Check the core protocol. Wrapped in a {} for scoping. + { + var coreProtocol int64 + coreProtocol, err = strconv.ParseInt(parts[0], 10, 0) + if err != nil { + err = fmt.Errorf("Error parsing core protocol version: %s", err) + return + } + + if int(coreProtocol) != CoreProtocolVersion { + err = fmt.Errorf("Incompatible core API version with plugin. "+ + "Plugin version: %s, Core version: %d\n\n"+ + "To fix this, the plugin usually only needs to be recompiled.\n"+ + "Please report this to the plugin author.", parts[0], CoreProtocolVersion) + return + } + } + + // Test the API version + version, pluginSet, err := c.checkProtoVersion(parts[1]) + if err != nil { + return addr, err + } + + // set the Plugins value to the compatible set, so the version + // doesn't need to be passed through to the ClientProtocol + // implementation. + c.config.Plugins = pluginSet + c.negotiatedVersion = version + c.logger.Debug("using plugin", "version", version) + + switch parts[2] { + case "tcp": + addr, err = net.ResolveTCPAddr("tcp", parts[3]) + case "unix": + addr, err = net.ResolveUnixAddr("unix", parts[3]) + default: + err = fmt.Errorf("Unknown address type: %s", parts[3]) + } + + // If we have a server type, then record that. We default to net/rpc + // for backwards compatibility. + c.protocol = ProtocolNetRPC + if len(parts) >= 5 { + c.protocol = Protocol(parts[4]) + } + + found := false + for _, p := range c.config.AllowedProtocols { + if p == c.protocol { + found = true + break + } + } + if !found { + err = fmt.Errorf("Unsupported plugin protocol %q. Supported: %v", + c.protocol, c.config.AllowedProtocols) + return addr, err + } + + // See if we have a TLS certificate from the server. + // Checking if the length is > 50 rules out catching the unused "extra" + // data returned from some older implementations. + if len(parts) >= 6 && len(parts[5]) > 50 { + err := c.loadServerCert(parts[5]) + if err != nil { + return nil, fmt.Errorf("error parsing server cert: %s", err) + } + } + } + + c.address = addr + return +} + +// loadServerCert is used by AutoMTLS to read an x.509 cert returned by the +// server, and load it as the RootCA for the client TLSConfig. +func (c *Client) loadServerCert(cert string) error { + certPool := x509.NewCertPool() + + asn1, err := base64.RawStdEncoding.DecodeString(cert) + if err != nil { + return err + } + + x509Cert, err := x509.ParseCertificate([]byte(asn1)) + if err != nil { + return err + } + + certPool.AddCert(x509Cert) + + c.config.TLSConfig.RootCAs = certPool + return nil +} + +func (c *Client) reattach() (net.Addr, error) { + // Verify the process still exists. If not, then it is an error + p, err := os.FindProcess(c.config.Reattach.Pid) + if err != nil { + return nil, err + } + + // Attempt to connect to the addr since on Unix systems FindProcess + // doesn't actually return an error if it can't find the process. + conn, err := net.Dial( + c.config.Reattach.Addr.Network(), + c.config.Reattach.Addr.String()) + if err != nil { + p.Kill() + return nil, ErrProcessNotFound + } + conn.Close() + + // Create a context for when we kill + c.doneCtx, c.ctxCancel = context.WithCancel(context.Background()) + + c.clientWaitGroup.Add(1) + // Goroutine to mark exit status + go func(pid int) { + defer c.clientWaitGroup.Done() + + // ensure the context is cancelled when we're done + defer c.ctxCancel() + + // Wait for the process to die + pidWait(pid) + + // Log so we can see it + c.logger.Debug("reattached plugin process exited") + + // Mark it + c.l.Lock() + defer c.l.Unlock() + c.exited = true + }(p.Pid) + + // Set the address and process + c.address = c.config.Reattach.Addr + c.process = p + c.protocol = c.config.Reattach.Protocol + if c.protocol == "" { + // Default the protocol to net/rpc for backwards compatibility + c.protocol = ProtocolNetRPC + } + + return c.address, nil +} + +// checkProtoVersion returns the negotiated version and PluginSet. +// This returns an error if the server returned an incompatible protocol +// version, or an invalid handshake response. +func (c *Client) checkProtoVersion(protoVersion string) (int, PluginSet, error) { + serverVersion, err := strconv.Atoi(protoVersion) + if err != nil { + return 0, nil, fmt.Errorf("Error parsing protocol version %q: %s", protoVersion, err) + } + + // record these for the error message + var clientVersions []int + + // all versions, including the legacy ProtocolVersion have been added to + // the versions set + for version, plugins := range c.config.VersionedPlugins { + clientVersions = append(clientVersions, version) + + if serverVersion != version { + continue + } + return version, plugins, nil + } + + return 0, nil, fmt.Errorf("Incompatible API version with plugin. "+ + "Plugin version: %d, Client versions: %d", serverVersion, clientVersions) +} + +// ReattachConfig returns the information that must be provided to NewClient +// to reattach to the plugin process that this client started. This is +// useful for plugins that detach from their parent process. +// +// If this returns nil then the process hasn't been started yet. Please +// call Start or Client before calling this. +func (c *Client) ReattachConfig() *ReattachConfig { + c.l.Lock() + defer c.l.Unlock() + + if c.address == nil { + return nil + } + + if c.config.Cmd != nil && c.config.Cmd.Process == nil { + return nil + } + + // If we connected via reattach, just return the information as-is + if c.config.Reattach != nil { + return c.config.Reattach + } + + return &ReattachConfig{ + Protocol: c.protocol, + Addr: c.address, + Pid: c.config.Cmd.Process.Pid, + } +} + +// Protocol returns the protocol of server on the remote end. This will +// start the plugin process if it isn't already started. Errors from +// starting the plugin are surpressed and ProtocolInvalid is returned. It +// is recommended you call Start explicitly before calling Protocol to ensure +// no errors occur. +func (c *Client) Protocol() Protocol { + _, err := c.Start() + if err != nil { + return ProtocolInvalid + } + + return c.protocol +} + +func netAddrDialer(addr net.Addr) func(string, time.Duration) (net.Conn, error) { + return func(_ string, _ time.Duration) (net.Conn, error) { + // Connect to the client + conn, err := net.Dial(addr.Network(), addr.String()) + if err != nil { + return nil, err + } + if tcpConn, ok := conn.(*net.TCPConn); ok { + // Make sure to set keep alive so that the connection doesn't die + tcpConn.SetKeepAlive(true) + } + + return conn, nil + } +} + +// dialer is compatible with grpc.WithDialer and creates the connection +// to the plugin. +func (c *Client) dialer(_ string, timeout time.Duration) (net.Conn, error) { + conn, err := netAddrDialer(c.address)("", timeout) + if err != nil { + return nil, err + } + + // If we have a TLS config we wrap our connection. We only do this + // for net/rpc since gRPC uses its own mechanism for TLS. + if c.protocol == ProtocolNetRPC && c.config.TLSConfig != nil { + conn = tls.Client(conn, c.config.TLSConfig) + } + + return conn, nil +} + +var stdErrBufferSize = 64 * 1024 + +func (c *Client) logStderr(r io.Reader) { + defer c.clientWaitGroup.Done() + l := c.logger.Named(filepath.Base(c.config.Cmd.Path)) + + reader := bufio.NewReaderSize(r, stdErrBufferSize) + // continuation indicates the previous line was a prefix + continuation := false + + for { + line, isPrefix, err := reader.ReadLine() + switch { + case err == io.EOF: + return + case err != nil: + l.Error("reading plugin stderr", "error", err) + return + } + + c.config.Stderr.Write(line) + + // The line was longer than our max token size, so it's likely + // incomplete and won't unmarshal. + if isPrefix || continuation { + l.Debug(string(line)) + + // if we're finishing a continued line, add the newline back in + if !isPrefix { + c.config.Stderr.Write([]byte{'\n'}) + } + + continuation = isPrefix + continue + } + + c.config.Stderr.Write([]byte{'\n'}) + + entry, err := parseJSON(line) + // If output is not JSON format, print directly to Debug + if err != nil { + // Attempt to infer the desired log level from the commonly used + // string prefixes + switch line := string(line); { + case strings.HasPrefix("[TRACE]", line): + l.Trace(line) + case strings.HasPrefix("[DEBUG]", line): + l.Debug(line) + case strings.HasPrefix("[INFO]", line): + l.Info(line) + case strings.HasPrefix("[WARN]", line): + l.Warn(line) + case strings.HasPrefix("[ERROR]", line): + l.Error(line) + default: + l.Debug(line) + } + } else { + out := flattenKVPairs(entry.KVPairs) + + out = append(out, "timestamp", entry.Timestamp.Format(hclog.TimeFormat)) + switch hclog.LevelFromString(entry.Level) { + case hclog.Trace: + l.Trace(entry.Message, out...) + case hclog.Debug: + l.Debug(entry.Message, out...) + case hclog.Info: + l.Info(entry.Message, out...) + case hclog.Warn: + l.Warn(entry.Message, out...) + case hclog.Error: + l.Error(entry.Message, out...) + default: + // if there was no log level, it's likely this is unexpected + // json from something other than hclog, and we should output + // it verbatim. + l.Debug(string(line)) + } + } + } +} diff --git a/vendor/github.com/hashicorp/go-plugin/discover.go b/vendor/github.com/hashicorp/go-plugin/discover.go new file mode 100644 index 0000000..d22c566 --- /dev/null +++ b/vendor/github.com/hashicorp/go-plugin/discover.go @@ -0,0 +1,28 @@ +package plugin + +import ( + "path/filepath" +) + +// Discover discovers plugins that are in a given directory. +// +// The directory doesn't need to be absolute. For example, "." will work fine. +// +// This currently assumes any file matching the glob is a plugin. +// In the future this may be smarter about checking that a file is +// executable and so on. +// +// TODO: test +func Discover(glob, dir string) ([]string, error) { + var err error + + // Make the directory absolute if it isn't already + if !filepath.IsAbs(dir) { + dir, err = filepath.Abs(dir) + if err != nil { + return nil, err + } + } + + return filepath.Glob(filepath.Join(dir, glob)) +} diff --git a/vendor/github.com/hashicorp/go-plugin/error.go b/vendor/github.com/hashicorp/go-plugin/error.go new file mode 100644 index 0000000..22a7baa --- /dev/null +++ b/vendor/github.com/hashicorp/go-plugin/error.go @@ -0,0 +1,24 @@ +package plugin + +// This is a type that wraps error types so that they can be messaged +// across RPC channels. Since "error" is an interface, we can't always +// gob-encode the underlying structure. This is a valid error interface +// implementer that we will push across. +type BasicError struct { + Message string +} + +// NewBasicError is used to create a BasicError. +// +// err is allowed to be nil. +func NewBasicError(err error) *BasicError { + if err == nil { + return nil + } + + return &BasicError{err.Error()} +} + +func (e *BasicError) Error() string { + return e.Message +} diff --git a/vendor/github.com/hashicorp/go-plugin/examples/basic/commons/greeter_interface.go b/vendor/github.com/hashicorp/go-plugin/examples/basic/commons/greeter_interface.go new file mode 100644 index 0000000..cc8dcf1 --- /dev/null +++ b/vendor/github.com/hashicorp/go-plugin/examples/basic/commons/greeter_interface.go @@ -0,0 +1,62 @@ +package example + +import ( + "net/rpc" + + "github.com/hashicorp/go-plugin" +) + +// Greeter is the interface that we're exposing as a plugin. +type Greeter interface { + Greet() string +} + +// Here is an implementation that talks over RPC +type GreeterRPC struct{ client *rpc.Client } + +func (g *GreeterRPC) Greet() string { + var resp string + err := g.client.Call("Plugin.Greet", new(interface{}), &resp) + if err != nil { + // You usually want your interfaces to return errors. If they don't, + // there isn't much other choice here. + panic(err) + } + + return resp +} + +// Here is the RPC server that GreeterRPC talks to, conforming to +// the requirements of net/rpc +type GreeterRPCServer struct { + // This is the real implementation + Impl Greeter +} + +func (s *GreeterRPCServer) Greet(args interface{}, resp *string) error { + *resp = s.Impl.Greet() + return nil +} + +// This is the implementation of plugin.Plugin so we can serve/consume this +// +// This has two methods: Server must return an RPC server for this plugin +// type. We construct a GreeterRPCServer for this. +// +// Client must return an implementation of our interface that communicates +// over an RPC client. We return GreeterRPC for this. +// +// Ignore MuxBroker. That is used to create more multiplexed streams on our +// plugin connection and is a more advanced use case. +type GreeterPlugin struct { + // Impl Injection + Impl Greeter +} + +func (p *GreeterPlugin) Server(*plugin.MuxBroker) (interface{}, error) { + return &GreeterRPCServer{Impl: p.Impl}, nil +} + +func (GreeterPlugin) Client(b *plugin.MuxBroker, c *rpc.Client) (interface{}, error) { + return &GreeterRPC{client: c}, nil +} diff --git a/vendor/github.com/hashicorp/go-plugin/examples/basic/main.go b/vendor/github.com/hashicorp/go-plugin/examples/basic/main.go new file mode 100644 index 0000000..394a3fc --- /dev/null +++ b/vendor/github.com/hashicorp/go-plugin/examples/basic/main.go @@ -0,0 +1,62 @@ +package main + +import ( + "fmt" + "log" + "os" + "os/exec" + + hclog "github.com/hashicorp/go-hclog" + "github.com/hashicorp/go-plugin" + "github.com/hashicorp/go-plugin/examples/basic/commons" +) + +func main() { + // Create an hclog.Logger + logger := hclog.New(&hclog.LoggerOptions{ + Name: "plugin", + Output: os.Stdout, + Level: hclog.Debug, + }) + + // We're a host! Start by launching the plugin process. + client := plugin.NewClient(&plugin.ClientConfig{ + HandshakeConfig: handshakeConfig, + Plugins: pluginMap, + Cmd: exec.Command("./plugin/greeter"), + Logger: logger, + }) + defer client.Kill() + + // Connect via RPC + rpcClient, err := client.Client() + if err != nil { + log.Fatal(err) + } + + // Request the plugin + raw, err := rpcClient.Dispense("greeter") + if err != nil { + log.Fatal(err) + } + + // We should have a Greeter now! This feels like a normal interface + // implementation but is in fact over an RPC connection. + greeter := raw.(example.Greeter) + fmt.Println(greeter.Greet()) +} + +// handshakeConfigs are used to just do a basic handshake between +// a plugin and host. If the handshake fails, a user friendly error is shown. +// This prevents users from executing bad plugins or executing a plugin +// directory. It is a UX feature, not a security feature. +var handshakeConfig = plugin.HandshakeConfig{ + ProtocolVersion: 1, + MagicCookieKey: "BASIC_PLUGIN", + MagicCookieValue: "hello", +} + +// pluginMap is the map of plugins we can dispense. +var pluginMap = map[string]plugin.Plugin{ + "greeter": &example.GreeterPlugin{}, +} diff --git a/vendor/github.com/hashicorp/go-plugin/examples/basic/plugin/greeter_impl.go b/vendor/github.com/hashicorp/go-plugin/examples/basic/plugin/greeter_impl.go new file mode 100644 index 0000000..d60c26c --- /dev/null +++ b/vendor/github.com/hashicorp/go-plugin/examples/basic/plugin/greeter_impl.go @@ -0,0 +1,52 @@ +package main + +import ( + "os" + + "github.com/hashicorp/go-hclog" + "github.com/hashicorp/go-plugin" + "github.com/hashicorp/go-plugin/examples/basic/commons" +) + +// Here is a real implementation of Greeter +type GreeterHello struct { + logger hclog.Logger +} + +func (g *GreeterHello) Greet() string { + g.logger.Debug("message from GreeterHello.Greet") + return "Hello!" +} + +// handshakeConfigs are used to just do a basic handshake between +// a plugin and host. If the handshake fails, a user friendly error is shown. +// This prevents users from executing bad plugins or executing a plugin +// directory. It is a UX feature, not a security feature. +var handshakeConfig = plugin.HandshakeConfig{ + ProtocolVersion: 1, + MagicCookieKey: "BASIC_PLUGIN", + MagicCookieValue: "hello", +} + +func main() { + logger := hclog.New(&hclog.LoggerOptions{ + Level: hclog.Trace, + Output: os.Stderr, + JSONFormat: true, + }) + + greeter := &GreeterHello{ + logger: logger, + } + // pluginMap is the map of plugins we can dispense. + var pluginMap = map[string]plugin.Plugin{ + "greeter": &example.GreeterPlugin{Impl: greeter}, + } + + logger.Debug("message from plugin", "foo", "bar") + + plugin.Serve(&plugin.ServeConfig{ + HandshakeConfig: handshakeConfig, + Plugins: pluginMap, + }) +} diff --git a/vendor/github.com/hashicorp/go-plugin/examples/bidirectional/main.go b/vendor/github.com/hashicorp/go-plugin/examples/bidirectional/main.go new file mode 100644 index 0000000..92a1c8f --- /dev/null +++ b/vendor/github.com/hashicorp/go-plugin/examples/bidirectional/main.go @@ -0,0 +1,81 @@ +package main + +import ( + "fmt" + "io/ioutil" + "log" + "os" + "os/exec" + "strconv" + + "github.com/hashicorp/go-plugin" + "github.com/hashicorp/go-plugin/examples/bidirectional/shared" +) + +type addHelper struct{} + +func (*addHelper) Sum(a, b int64) (int64, error) { + return a + b, nil +} + +func main() { + // We don't want to see the plugin logs. + log.SetOutput(ioutil.Discard) + + // We're a host. Start by launching the plugin process. + client := plugin.NewClient(&plugin.ClientConfig{ + HandshakeConfig: shared.Handshake, + Plugins: shared.PluginMap, + Cmd: exec.Command("sh", "-c", os.Getenv("COUNTER_PLUGIN")), + AllowedProtocols: []plugin.Protocol{ + plugin.ProtocolNetRPC, plugin.ProtocolGRPC}, + }) + defer client.Kill() + + // Connect via RPC + rpcClient, err := client.Client() + if err != nil { + fmt.Println("Error:", err.Error()) + os.Exit(1) + } + + // Request the plugin + raw, err := rpcClient.Dispense("counter") + if err != nil { + fmt.Println("Error:", err.Error()) + os.Exit(1) + } + + // We should have a Counter store now! This feels like a normal interface + // implementation but is in fact over an RPC connection. + counter := raw.(shared.Counter) + + os.Args = os.Args[1:] + switch os.Args[0] { + case "get": + result, err := counter.Get(os.Args[1]) + if err != nil { + fmt.Println("Error:", err.Error()) + os.Exit(1) + } + + fmt.Println(result) + + case "put": + i, err := strconv.Atoi(os.Args[2]) + if err != nil { + fmt.Println("Error:", err.Error()) + os.Exit(1) + } + + err = counter.Put(os.Args[1], int64(i), &addHelper{}) + if err != nil { + fmt.Println("Error:", err.Error()) + os.Exit(1) + } + + default: + fmt.Println("Please only use 'get' or 'put'") + os.Exit(1) + } +} diff --git a/vendor/github.com/hashicorp/go-plugin/examples/bidirectional/plugin-go-grpc/main.go b/vendor/github.com/hashicorp/go-plugin/examples/bidirectional/plugin-go-grpc/main.go new file mode 100644 index 0000000..c6beb93 --- /dev/null +++ b/vendor/github.com/hashicorp/go-plugin/examples/bidirectional/plugin-go-grpc/main.go @@ -0,0 +1,61 @@ +package main + +import ( + "encoding/json" + "io/ioutil" + + "github.com/hashicorp/go-plugin" + "github.com/hashicorp/go-plugin/examples/bidirectional/shared" +) + +// Here is a real implementation of KV that writes to a local file with +// the key name and the contents are the value of the key. +type Counter struct { +} + +type data struct { + Value int64 +} + +func (k *Counter) Put(key string, value int64, a shared.AddHelper) error { + v, _ := k.Get(key) + + r, err := a.Sum(v, value) + if err != nil { + return err + } + + buf, err := json.Marshal(&data{r}) + if err != nil { + return err + } + + return ioutil.WriteFile("kv_"+key, buf, 0644) +} + +func (k *Counter) Get(key string) (int64, error) { + dataRaw, err := ioutil.ReadFile("kv_" + key) + if err != nil { + return 0, err + } + + data := &data{} + err = json.Unmarshal(dataRaw, data) + if err != nil { + return 0, err + } + + return data.Value, nil +} + +func main() { + plugin.Serve(&plugin.ServeConfig{ + HandshakeConfig: shared.Handshake, + Plugins: map[string]plugin.Plugin{ + "counter": &shared.CounterPlugin{Impl: &Counter{}}, + }, + + // A non-nil value here enables gRPC serving for this plugin... + GRPCServer: plugin.DefaultGRPCServer, + }) +} diff --git a/vendor/github.com/hashicorp/go-plugin/examples/bidirectional/proto/kv.pb.go b/vendor/github.com/hashicorp/go-plugin/examples/bidirectional/proto/kv.pb.go new file mode 100644 index 0000000..e71ec61 --- /dev/null +++ b/vendor/github.com/hashicorp/go-plugin/examples/bidirectional/proto/kv.pb.go @@ -0,0 +1,350 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: kv.proto + +/* +Package proto is a generated protocol buffer package. + +It is generated from these files: + kv.proto + +It has these top-level messages: + GetRequest + GetResponse + PutRequest + Empty + SumRequest + SumResponse +*/ +package proto + +import proto1 "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" + +import ( + context "golang.org/x/net/context" + grpc "google.golang.org/grpc" +) + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto1.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto1.ProtoPackageIsVersion2 // please upgrade the proto package + +type GetRequest struct { + Key string `protobuf:"bytes,1,opt,name=key" json:"key,omitempty"` +} + +func (m *GetRequest) Reset() { *m = GetRequest{} } +func (m *GetRequest) String() string { return proto1.CompactTextString(m) } +func (*GetRequest) ProtoMessage() {} +func (*GetRequest) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{0} } + +func (m *GetRequest) GetKey() string { + if m != nil { + return m.Key + } + return "" +} + +type GetResponse struct { + Value int64 `protobuf:"varint,1,opt,name=value" json:"value,omitempty"` +} + +func (m *GetResponse) Reset() { *m = GetResponse{} } +func (m *GetResponse) String() string { return proto1.CompactTextString(m) } +func (*GetResponse) ProtoMessage() {} +func (*GetResponse) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{1} } + +func (m *GetResponse) GetValue() int64 { + if m != nil { + return m.Value + } + return 0 +} + +type PutRequest struct { + AddServer uint32 `protobuf:"varint,1,opt,name=add_server,json=addServer" json:"add_server,omitempty"` + Key string `protobuf:"bytes,2,opt,name=key" json:"key,omitempty"` + Value int64 `protobuf:"varint,3,opt,name=value" json:"value,omitempty"` +} + +func (m *PutRequest) Reset() { *m = PutRequest{} } +func (m *PutRequest) String() string { return proto1.CompactTextString(m) } +func (*PutRequest) ProtoMessage() {} +func (*PutRequest) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{2} } + +func (m *PutRequest) GetAddServer() uint32 { + if m != nil { + return m.AddServer + } + return 0 +} + +func (m *PutRequest) GetKey() string { + if m != nil { + return m.Key + } + return "" +} + +func (m *PutRequest) GetValue() int64 { + if m != nil { + return m.Value + } + return 0 +} + +type Empty struct { +} + +func (m *Empty) Reset() { *m = Empty{} } +func (m *Empty) String() string { return proto1.CompactTextString(m) } +func (*Empty) ProtoMessage() {} +func (*Empty) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{3} } + +type SumRequest struct { + A int64 `protobuf:"varint,1,opt,name=a" json:"a,omitempty"` + B int64 `protobuf:"varint,2,opt,name=b" json:"b,omitempty"` +} + +func (m *SumRequest) Reset() { *m = SumRequest{} } +func (m *SumRequest) String() string { return proto1.CompactTextString(m) } +func (*SumRequest) ProtoMessage() {} +func (*SumRequest) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{4} } + +func (m *SumRequest) GetA() int64 { + if m != nil { + return m.A + } + return 0 +} + +func (m *SumRequest) GetB() int64 { + if m != nil { + return m.B + } + return 0 +} + +type SumResponse struct { + R int64 `protobuf:"varint,1,opt,name=r" json:"r,omitempty"` +} + +func (m *SumResponse) Reset() { *m = SumResponse{} } +func (m *SumResponse) String() string { return proto1.CompactTextString(m) } +func (*SumResponse) ProtoMessage() {} +func (*SumResponse) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{5} } + +func (m *SumResponse) GetR() int64 { + if m != nil { + return m.R + } + return 0 +} + +func init() { + proto1.RegisterType((*GetRequest)(nil), "proto.GetRequest") + proto1.RegisterType((*GetResponse)(nil), "proto.GetResponse") + proto1.RegisterType((*PutRequest)(nil), "proto.PutRequest") + proto1.RegisterType((*Empty)(nil), "proto.Empty") + proto1.RegisterType((*SumRequest)(nil), "proto.SumRequest") + proto1.RegisterType((*SumResponse)(nil), "proto.SumResponse") +} + +// Reference imports to suppress errors if they are not otherwise used. +var _ context.Context +var _ grpc.ClientConn + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the grpc package it is being compiled against. +const _ = grpc.SupportPackageIsVersion4 + +// Client API for Counter service + +type CounterClient interface { + Get(ctx context.Context, in *GetRequest, opts ...grpc.CallOption) (*GetResponse, error) + Put(ctx context.Context, in *PutRequest, opts ...grpc.CallOption) (*Empty, error) +} + +type counterClient struct { + cc *grpc.ClientConn +} + +func NewCounterClient(cc *grpc.ClientConn) CounterClient { + return &counterClient{cc} +} + +func (c *counterClient) Get(ctx context.Context, in *GetRequest, opts ...grpc.CallOption) (*GetResponse, error) { + out := new(GetResponse) + err := grpc.Invoke(ctx, "/proto.Counter/Get", in, out, c.cc, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *counterClient) Put(ctx context.Context, in *PutRequest, opts ...grpc.CallOption) (*Empty, error) { + out := new(Empty) + err := grpc.Invoke(ctx, "/proto.Counter/Put", in, out, c.cc, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +// Server API for Counter service + +type CounterServer interface { + Get(context.Context, *GetRequest) (*GetResponse, error) + Put(context.Context, *PutRequest) (*Empty, error) +} + +func RegisterCounterServer(s *grpc.Server, srv CounterServer) { + s.RegisterService(&_Counter_serviceDesc, srv) +} + +func _Counter_Get_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(GetRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(CounterServer).Get(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/proto.Counter/Get", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(CounterServer).Get(ctx, req.(*GetRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _Counter_Put_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(PutRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(CounterServer).Put(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/proto.Counter/Put", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(CounterServer).Put(ctx, req.(*PutRequest)) + } + return interceptor(ctx, in, info, handler) +} + +var _Counter_serviceDesc = grpc.ServiceDesc{ + ServiceName: "proto.Counter", + HandlerType: (*CounterServer)(nil), + Methods: []grpc.MethodDesc{ + { + MethodName: "Get", + Handler: _Counter_Get_Handler, + }, + { + MethodName: "Put", + Handler: _Counter_Put_Handler, + }, + }, + Streams: []grpc.StreamDesc{}, + Metadata: "kv.proto", +} + +// Client API for AddHelper service + +type AddHelperClient interface { + Sum(ctx context.Context, in *SumRequest, opts ...grpc.CallOption) (*SumResponse, error) +} + +type addHelperClient struct { + cc *grpc.ClientConn +} + +func NewAddHelperClient(cc *grpc.ClientConn) AddHelperClient { + return &addHelperClient{cc} +} + +func (c *addHelperClient) Sum(ctx context.Context, in *SumRequest, opts ...grpc.CallOption) (*SumResponse, error) { + out := new(SumResponse) + err := grpc.Invoke(ctx, "/proto.AddHelper/Sum", in, out, c.cc, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +// Server API for AddHelper service + +type AddHelperServer interface { + Sum(context.Context, *SumRequest) (*SumResponse, error) +} + +func RegisterAddHelperServer(s *grpc.Server, srv AddHelperServer) { + s.RegisterService(&_AddHelper_serviceDesc, srv) +} + +func _AddHelper_Sum_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(SumRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(AddHelperServer).Sum(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/proto.AddHelper/Sum", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(AddHelperServer).Sum(ctx, req.(*SumRequest)) + } + return interceptor(ctx, in, info, handler) +} + +var _AddHelper_serviceDesc = grpc.ServiceDesc{ + ServiceName: "proto.AddHelper", + HandlerType: (*AddHelperServer)(nil), + Methods: []grpc.MethodDesc{ + { + MethodName: "Sum", + Handler: _AddHelper_Sum_Handler, + }, + }, + Streams: []grpc.StreamDesc{}, + Metadata: "kv.proto", +} + +func init() { proto1.RegisterFile("kv.proto", fileDescriptor0) } + +var fileDescriptor0 = []byte{ + // 253 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x4c, 0x90, 0x4f, 0x4b, 0x03, 0x31, + 0x10, 0xc5, 0x89, 0x61, 0xad, 0xfb, 0xba, 0x82, 0x06, 0x0f, 0x52, 0x51, 0x24, 0x82, 0xf4, 0x20, + 0x3d, 0xd4, 0x93, 0x47, 0x11, 0xa9, 0xc7, 0x92, 0xfd, 0x00, 0x25, 0x4b, 0xe6, 0xd4, 0x6e, 0x77, + 0xcd, 0x26, 0x0b, 0xfd, 0xf6, 0xa5, 0xd9, 0x3f, 0xd9, 0x53, 0x32, 0x2f, 0x2f, 0xbf, 0x37, 0x33, + 0xb8, 0xd9, 0xb7, 0xab, 0xda, 0x56, 0xae, 0x12, 0x49, 0x38, 0xe4, 0x0b, 0xb0, 0x21, 0xa7, 0xe8, + 0xdf, 0x53, 0xe3, 0xc4, 0x1d, 0xf8, 0x9e, 0x4e, 0x8f, 0xec, 0x95, 0x2d, 0x53, 0x75, 0xb9, 0xca, + 0x37, 0xcc, 0xc3, 0x7b, 0x53, 0x57, 0xc7, 0x86, 0xc4, 0x03, 0x92, 0x56, 0x1f, 0x3c, 0x05, 0x0b, + 0x57, 0x5d, 0x21, 0x73, 0x60, 0xeb, 0x47, 0xc8, 0x33, 0xa0, 0x8d, 0xd9, 0x35, 0x64, 0x5b, 0xb2, + 0xc1, 0x78, 0xab, 0x52, 0x6d, 0x4c, 0x1e, 0x84, 0x21, 0xe3, 0x6a, 0xcc, 0x88, 0x50, 0x3e, 0x85, + 0xce, 0x90, 0xfc, 0x96, 0xb5, 0x3b, 0xc9, 0x25, 0x90, 0xfb, 0x72, 0xa0, 0x67, 0x60, 0xba, 0x4f, + 0x67, 0xfa, 0x52, 0x15, 0x01, 0xc5, 0x15, 0x2b, 0xe4, 0x13, 0xe6, 0xc1, 0xd9, 0x37, 0x9b, 0x81, + 0xd9, 0xc1, 0x6a, 0xd7, 0x3b, 0xcc, 0x7e, 0x2a, 0x7f, 0x74, 0x64, 0xc5, 0x07, 0xf8, 0x86, 0x9c, + 0xb8, 0xef, 0x56, 0xb1, 0x8a, 0x0b, 0x58, 0x88, 0xa9, 0xd4, 0x63, 0xde, 0xc1, 0xb7, 0x3e, 0xba, + 0xe3, 0xa4, 0x8b, 0xac, 0x97, 0x42, 0x9f, 0xeb, 0x2f, 0xa4, 0xdf, 0xc6, 0xfc, 0xd1, 0xa1, 0xee, + 0x22, 0x72, 0x5f, 0x8e, 0x9f, 0xe2, 0x00, 0x63, 0xc4, 0xa4, 0xd3, 0xe2, 0x3a, 0x48, 0x9f, 0xe7, + 0x00, 0x00, 0x00, 0xff, 0xff, 0x40, 0xa3, 0x85, 0x07, 0x9f, 0x01, 0x00, 0x00, +} diff --git a/vendor/github.com/hashicorp/go-plugin/examples/bidirectional/shared/grpc.go b/vendor/github.com/hashicorp/go-plugin/examples/bidirectional/shared/grpc.go new file mode 100644 index 0000000..c2e54ba --- /dev/null +++ b/vendor/github.com/hashicorp/go-plugin/examples/bidirectional/shared/grpc.go @@ -0,0 +1,103 @@ +package shared + +import ( + hclog "github.com/hashicorp/go-hclog" + plugin "github.com/hashicorp/go-plugin" + "github.com/hashicorp/go-plugin/examples/bidirectional/proto" + "golang.org/x/net/context" + "google.golang.org/grpc" +) + +// GRPCClient is an implementation of KV that talks over RPC. +type GRPCClient struct { + broker *plugin.GRPCBroker + client proto.CounterClient +} + +func (m *GRPCClient) Put(key string, value int64, a AddHelper) error { + addHelperServer := &GRPCAddHelperServer{Impl: a} + + var s *grpc.Server + serverFunc := func(opts []grpc.ServerOption) *grpc.Server { + s = grpc.NewServer(opts...) + proto.RegisterAddHelperServer(s, addHelperServer) + + return s + } + + brokerID := m.broker.NextId() + go m.broker.AcceptAndServe(brokerID, serverFunc) + + _, err := m.client.Put(context.Background(), &proto.PutRequest{ + AddServer: brokerID, + Key: key, + Value: value, + }) + + s.Stop() + return err +} + +func (m *GRPCClient) Get(key string) (int64, error) { + resp, err := m.client.Get(context.Background(), &proto.GetRequest{ + Key: key, + }) + if err != nil { + return 0, err + } + + return resp.Value, nil +} + +// Here is the gRPC server that GRPCClient talks to. +type GRPCServer struct { + // This is the real implementation + Impl Counter + + broker *plugin.GRPCBroker +} + +func (m *GRPCServer) Put(ctx context.Context, req *proto.PutRequest) (*proto.Empty, error) { + conn, err := m.broker.Dial(req.AddServer) + if err != nil { + return nil, err + } + defer conn.Close() + + a := &GRPCAddHelperClient{proto.NewAddHelperClient(conn)} + return &proto.Empty{}, m.Impl.Put(req.Key, req.Value, a) +} + +func (m *GRPCServer) Get(ctx context.Context, req *proto.GetRequest) (*proto.GetResponse, error) { + v, err := m.Impl.Get(req.Key) + return &proto.GetResponse{Value: v}, err +} + +// GRPCClient is an implementation of KV that talks over RPC. +type GRPCAddHelperClient struct{ client proto.AddHelperClient } + +func (m *GRPCAddHelperClient) Sum(a, b int64) (int64, error) { + resp, err := m.client.Sum(context.Background(), &proto.SumRequest{ + A: a, + B: b, + }) + if err != nil { + hclog.Default().Info("add.Sum", "client", "start", "err", err) + return 0, err + } + return resp.R, err +} + +// Here is the gRPC server that GRPCClient talks to. +type GRPCAddHelperServer struct { + // This is the real implementation + Impl AddHelper +} + +func (m *GRPCAddHelperServer) Sum(ctx context.Context, req *proto.SumRequest) (resp *proto.SumResponse, err error) { + r, err := m.Impl.Sum(req.A, req.B) + if err != nil { + return nil, err + } + return &proto.SumResponse{R: r}, err +} diff --git a/vendor/github.com/hashicorp/go-plugin/examples/bidirectional/shared/interface.go b/vendor/github.com/hashicorp/go-plugin/examples/bidirectional/shared/interface.go new file mode 100644 index 0000000..f4ecb57 --- /dev/null +++ b/vendor/github.com/hashicorp/go-plugin/examples/bidirectional/shared/interface.go @@ -0,0 +1,59 @@ +// Package shared contains shared data between the host and plugins. +package shared + +import ( + "golang.org/x/net/context" + "google.golang.org/grpc" + + "github.com/hashicorp/go-plugin" + "github.com/hashicorp/go-plugin/examples/bidirectional/proto" +) + +// Handshake is a common handshake that is shared by plugin and host. +var Handshake = plugin.HandshakeConfig{ + ProtocolVersion: 1, + MagicCookieKey: "BASIC_PLUGIN", + MagicCookieValue: "hello", +} + +// PluginMap is the map of plugins we can dispense. +var PluginMap = map[string]plugin.Plugin{ + "counter": &CounterPlugin{}, +} + +type AddHelper interface { + Sum(int64, int64) (int64, error) +} + +// KV is the interface that we're exposing as a plugin. +type Counter interface { + Put(key string, value int64, a AddHelper) error + Get(key string) (int64, error) +} + +// This is the implementation of plugin.Plugin so we can serve/consume this. +// We also implement GRPCPlugin so that this plugin can be served over +// gRPC. +type CounterPlugin struct { + plugin.NetRPCUnsupportedPlugin + // Concrete implementation, written in Go. This is only used for plugins + // that are written in Go. + Impl Counter +} + +func (p *CounterPlugin) GRPCServer(broker *plugin.GRPCBroker, s *grpc.Server) error { + proto.RegisterCounterServer(s, &GRPCServer{ + Impl: p.Impl, + broker: broker, + }) + return nil +} + +func (p *CounterPlugin) GRPCClient(ctx context.Context, broker *plugin.GRPCBroker, c *grpc.ClientConn) (interface{}, error) { + return &GRPCClient{ + client: proto.NewCounterClient(c), + broker: broker, + }, nil +} + +var _ plugin.GRPCPlugin = &CounterPlugin{} diff --git a/vendor/github.com/hashicorp/go-plugin/examples/grpc/main.go b/vendor/github.com/hashicorp/go-plugin/examples/grpc/main.go new file mode 100644 index 0000000..cc91d71 --- /dev/null +++ b/vendor/github.com/hashicorp/go-plugin/examples/grpc/main.go @@ -0,0 +1,68 @@ +package main + +import ( + "fmt" + "io/ioutil" + "log" + "os" + "os/exec" + + "github.com/hashicorp/go-plugin" + "github.com/hashicorp/go-plugin/examples/grpc/shared" +) + +func main() { + // We don't want to see the plugin logs. + log.SetOutput(ioutil.Discard) + + // We're a host. Start by launching the plugin process. + client := plugin.NewClient(&plugin.ClientConfig{ + HandshakeConfig: shared.Handshake, + Plugins: shared.PluginMap, + Cmd: exec.Command("sh", "-c", os.Getenv("KV_PLUGIN")), + AllowedProtocols: []plugin.Protocol{ + plugin.ProtocolNetRPC, plugin.ProtocolGRPC}, + }) + defer client.Kill() + + // Connect via RPC + rpcClient, err := client.Client() + if err != nil { + fmt.Println("Error:", err.Error()) + os.Exit(1) + } + + // Request the plugin + raw, err := rpcClient.Dispense("kv_grpc") + if err != nil { + fmt.Println("Error:", err.Error()) + os.Exit(1) + } + + // We should have a KV store now! This feels like a normal interface + // implementation but is in fact over an RPC connection. + kv := raw.(shared.KV) + os.Args = os.Args[1:] + switch os.Args[0] { + case "get": + result, err := kv.Get(os.Args[1]) + if err != nil { + fmt.Println("Error:", err.Error()) + os.Exit(1) + } + + fmt.Println(string(result)) + + case "put": + err := kv.Put(os.Args[1], []byte(os.Args[2])) + if err != nil { + fmt.Println("Error:", err.Error()) + os.Exit(1) + } + + default: + fmt.Printf("Please only use 'get' or 'put', given: %q", os.Args[0]) + os.Exit(1) + } + os.Exit(0) +} diff --git a/vendor/github.com/hashicorp/go-plugin/examples/grpc/plugin-go-grpc/main.go b/vendor/github.com/hashicorp/go-plugin/examples/grpc/plugin-go-grpc/main.go new file mode 100644 index 0000000..af0d153 --- /dev/null +++ b/vendor/github.com/hashicorp/go-plugin/examples/grpc/plugin-go-grpc/main.go @@ -0,0 +1,34 @@ +package main + +import ( + "fmt" + "io/ioutil" + + "github.com/hashicorp/go-plugin" + "github.com/hashicorp/go-plugin/examples/grpc/shared" +) + +// Here is a real implementation of KV that writes to a local file with +// the key name and the contents are the value of the key. +type KV struct{} + +func (KV) Put(key string, value []byte) error { + value = []byte(fmt.Sprintf("%s\n\nWritten from plugin-go-grpc", string(value))) + return ioutil.WriteFile("kv_"+key, value, 0644) +} + +func (KV) Get(key string) ([]byte, error) { + return ioutil.ReadFile("kv_" + key) +} + +func main() { + plugin.Serve(&plugin.ServeConfig{ + HandshakeConfig: shared.Handshake, + Plugins: map[string]plugin.Plugin{ + "kv": &shared.KVGRPCPlugin{Impl: &KV{}}, + }, + + // A non-nil value here enables gRPC serving for this plugin... + GRPCServer: plugin.DefaultGRPCServer, + }) +} diff --git a/vendor/github.com/hashicorp/go-plugin/examples/grpc/plugin-go-netrpc/main.go b/vendor/github.com/hashicorp/go-plugin/examples/grpc/plugin-go-netrpc/main.go new file mode 100644 index 0000000..b3bb843 --- /dev/null +++ b/vendor/github.com/hashicorp/go-plugin/examples/grpc/plugin-go-netrpc/main.go @@ -0,0 +1,31 @@ +package main + +import ( + "fmt" + "io/ioutil" + + "github.com/hashicorp/go-plugin" + "github.com/hashicorp/go-plugin/examples/grpc/shared" +) + +// Here is a real implementation of KV that writes to a local file with +// the key name and the contents are the value of the key. +type KV struct{} + +func (KV) Put(key string, value []byte) error { + value = []byte(fmt.Sprintf("%s\n\nWritten from plugin-go-netrpc", string(value))) + return ioutil.WriteFile("kv_"+key, value, 0644) +} + +func (KV) Get(key string) ([]byte, error) { + return ioutil.ReadFile("kv_" + key) +} + +func main() { + plugin.Serve(&plugin.ServeConfig{ + HandshakeConfig: shared.Handshake, + Plugins: map[string]plugin.Plugin{ + "kv": &shared.KVPlugin{Impl: &KV{}}, + }, + }) +} diff --git a/vendor/github.com/hashicorp/go-plugin/examples/grpc/proto/kv.pb.go b/vendor/github.com/hashicorp/go-plugin/examples/grpc/proto/kv.pb.go new file mode 100644 index 0000000..6ddbdaf --- /dev/null +++ b/vendor/github.com/hashicorp/go-plugin/examples/grpc/proto/kv.pb.go @@ -0,0 +1,229 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: kv.proto + +/* +Package proto is a generated protocol buffer package. + +It is generated from these files: + kv.proto + +It has these top-level messages: + GetRequest + GetResponse + PutRequest + Empty +*/ +package proto + +import proto1 "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" + +import ( + context "golang.org/x/net/context" + grpc "google.golang.org/grpc" +) + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto1.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto1.ProtoPackageIsVersion2 // please upgrade the proto package + +type GetRequest struct { + Key string `protobuf:"bytes,1,opt,name=key" json:"key,omitempty"` +} + +func (m *GetRequest) Reset() { *m = GetRequest{} } +func (m *GetRequest) String() string { return proto1.CompactTextString(m) } +func (*GetRequest) ProtoMessage() {} +func (*GetRequest) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{0} } + +func (m *GetRequest) GetKey() string { + if m != nil { + return m.Key + } + return "" +} + +type GetResponse struct { + Value []byte `protobuf:"bytes,1,opt,name=value,proto3" json:"value,omitempty"` +} + +func (m *GetResponse) Reset() { *m = GetResponse{} } +func (m *GetResponse) String() string { return proto1.CompactTextString(m) } +func (*GetResponse) ProtoMessage() {} +func (*GetResponse) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{1} } + +func (m *GetResponse) GetValue() []byte { + if m != nil { + return m.Value + } + return nil +} + +type PutRequest struct { + Key string `protobuf:"bytes,1,opt,name=key" json:"key,omitempty"` + Value []byte `protobuf:"bytes,2,opt,name=value,proto3" json:"value,omitempty"` +} + +func (m *PutRequest) Reset() { *m = PutRequest{} } +func (m *PutRequest) String() string { return proto1.CompactTextString(m) } +func (*PutRequest) ProtoMessage() {} +func (*PutRequest) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{2} } + +func (m *PutRequest) GetKey() string { + if m != nil { + return m.Key + } + return "" +} + +func (m *PutRequest) GetValue() []byte { + if m != nil { + return m.Value + } + return nil +} + +type Empty struct { +} + +func (m *Empty) Reset() { *m = Empty{} } +func (m *Empty) String() string { return proto1.CompactTextString(m) } +func (*Empty) ProtoMessage() {} +func (*Empty) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{3} } + +func init() { + proto1.RegisterType((*GetRequest)(nil), "proto.GetRequest") + proto1.RegisterType((*GetResponse)(nil), "proto.GetResponse") + proto1.RegisterType((*PutRequest)(nil), "proto.PutRequest") + proto1.RegisterType((*Empty)(nil), "proto.Empty") +} + +// Reference imports to suppress errors if they are not otherwise used. +var _ context.Context +var _ grpc.ClientConn + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the grpc package it is being compiled against. +const _ = grpc.SupportPackageIsVersion4 + +// Client API for KV service + +type KVClient interface { + Get(ctx context.Context, in *GetRequest, opts ...grpc.CallOption) (*GetResponse, error) + Put(ctx context.Context, in *PutRequest, opts ...grpc.CallOption) (*Empty, error) +} + +type kVClient struct { + cc *grpc.ClientConn +} + +func NewKVClient(cc *grpc.ClientConn) KVClient { + return &kVClient{cc} +} + +func (c *kVClient) Get(ctx context.Context, in *GetRequest, opts ...grpc.CallOption) (*GetResponse, error) { + out := new(GetResponse) + err := grpc.Invoke(ctx, "/proto.KV/Get", in, out, c.cc, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *kVClient) Put(ctx context.Context, in *PutRequest, opts ...grpc.CallOption) (*Empty, error) { + out := new(Empty) + err := grpc.Invoke(ctx, "/proto.KV/Put", in, out, c.cc, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +// Server API for KV service + +type KVServer interface { + Get(context.Context, *GetRequest) (*GetResponse, error) + Put(context.Context, *PutRequest) (*Empty, error) +} + +func RegisterKVServer(s *grpc.Server, srv KVServer) { + s.RegisterService(&_KV_serviceDesc, srv) +} + +func _KV_Get_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(GetRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(KVServer).Get(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/proto.KV/Get", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(KVServer).Get(ctx, req.(*GetRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _KV_Put_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(PutRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(KVServer).Put(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/proto.KV/Put", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(KVServer).Put(ctx, req.(*PutRequest)) + } + return interceptor(ctx, in, info, handler) +} + +var _KV_serviceDesc = grpc.ServiceDesc{ + ServiceName: "proto.KV", + HandlerType: (*KVServer)(nil), + Methods: []grpc.MethodDesc{ + { + MethodName: "Get", + Handler: _KV_Get_Handler, + }, + { + MethodName: "Put", + Handler: _KV_Put_Handler, + }, + }, + Streams: []grpc.StreamDesc{}, + Metadata: "kv.proto", +} + +func init() { proto1.RegisterFile("kv.proto", fileDescriptor0) } + +var fileDescriptor0 = []byte{ + // 162 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xe2, 0xe2, 0xc8, 0x2e, 0xd3, 0x2b, + 0x28, 0xca, 0x2f, 0xc9, 0x17, 0x62, 0x05, 0x53, 0x4a, 0x72, 0x5c, 0x5c, 0xee, 0xa9, 0x25, 0x41, + 0xa9, 0x85, 0xa5, 0xa9, 0xc5, 0x25, 0x42, 0x02, 0x5c, 0xcc, 0xd9, 0xa9, 0x95, 0x12, 0x8c, 0x0a, + 0x8c, 0x1a, 0x9c, 0x41, 0x20, 0xa6, 0x92, 0x32, 0x17, 0x37, 0x58, 0xbe, 0xb8, 0x20, 0x3f, 0xaf, + 0x38, 0x55, 0x48, 0x84, 0x8b, 0xb5, 0x2c, 0x31, 0xa7, 0x34, 0x15, 0xac, 0x84, 0x27, 0x08, 0xc2, + 0x51, 0x32, 0xe1, 0xe2, 0x0a, 0x28, 0xc5, 0x6d, 0x08, 0x42, 0x17, 0x13, 0xb2, 0x2e, 0x76, 0x2e, + 0x56, 0xd7, 0xdc, 0x82, 0x92, 0x4a, 0xa3, 0x28, 0x2e, 0x26, 0xef, 0x30, 0x21, 0x1d, 0x2e, 0x66, + 0xf7, 0xd4, 0x12, 0x21, 0x41, 0x88, 0xfb, 0xf4, 0x10, 0xae, 0x92, 0x12, 0x42, 0x16, 0x82, 0x3a, + 0x44, 0x8d, 0x8b, 0x39, 0xa0, 0x14, 0xa1, 0x1a, 0x61, 0xbd, 0x14, 0x0f, 0x54, 0x08, 0x6c, 0x76, + 0x12, 0x1b, 0x98, 0x63, 0x0c, 0x08, 0x00, 0x00, 0xff, 0xff, 0x06, 0x32, 0x05, 0x89, 0xf9, 0x00, + 0x00, 0x00, +} diff --git a/vendor/github.com/hashicorp/go-plugin/examples/grpc/shared/grpc.go b/vendor/github.com/hashicorp/go-plugin/examples/grpc/shared/grpc.go new file mode 100644 index 0000000..4b532e8 --- /dev/null +++ b/vendor/github.com/hashicorp/go-plugin/examples/grpc/shared/grpc.go @@ -0,0 +1,47 @@ +package shared + +import ( + "github.com/hashicorp/go-plugin/examples/grpc/proto" + "golang.org/x/net/context" +) + +// GRPCClient is an implementation of KV that talks over RPC. +type GRPCClient struct{ client proto.KVClient } + +func (m *GRPCClient) Put(key string, value []byte) error { + _, err := m.client.Put(context.Background(), &proto.PutRequest{ + Key: key, + Value: value, + }) + return err +} + +func (m *GRPCClient) Get(key string) ([]byte, error) { + resp, err := m.client.Get(context.Background(), &proto.GetRequest{ + Key: key, + }) + if err != nil { + return nil, err + } + + return resp.Value, nil +} + +// Here is the gRPC server that GRPCClient talks to. +type GRPCServer struct { + // This is the real implementation + Impl KV +} + +func (m *GRPCServer) Put( + ctx context.Context, + req *proto.PutRequest) (*proto.Empty, error) { + return &proto.Empty{}, m.Impl.Put(req.Key, req.Value) +} + +func (m *GRPCServer) Get( + ctx context.Context, + req *proto.GetRequest) (*proto.GetResponse, error) { + v, err := m.Impl.Get(req.Key) + return &proto.GetResponse{Value: v}, err +} diff --git a/vendor/github.com/hashicorp/go-plugin/examples/grpc/shared/interface.go b/vendor/github.com/hashicorp/go-plugin/examples/grpc/shared/interface.go new file mode 100644 index 0000000..969d35d --- /dev/null +++ b/vendor/github.com/hashicorp/go-plugin/examples/grpc/shared/interface.go @@ -0,0 +1,65 @@ +// Package shared contains shared data between the host and plugins. +package shared + +import ( + "context" + "net/rpc" + + "google.golang.org/grpc" + + "github.com/hashicorp/go-plugin" + "github.com/hashicorp/go-plugin/examples/grpc/proto" +) + +// Handshake is a common handshake that is shared by plugin and host. +var Handshake = plugin.HandshakeConfig{ + // This isn't required when using VersionedPlugins + ProtocolVersion: 1, + MagicCookieKey: "BASIC_PLUGIN", + MagicCookieValue: "hello", +} + +// PluginMap is the map of plugins we can dispense. +var PluginMap = map[string]plugin.Plugin{ + "kv_grpc": &KVGRPCPlugin{}, + "kv": &KVPlugin{}, +} + +// KV is the interface that we're exposing as a plugin. +type KV interface { + Put(key string, value []byte) error + Get(key string) ([]byte, error) +} + +// This is the implementation of plugin.Plugin so we can serve/consume this. +type KVPlugin struct { + // Concrete implementation, written in Go. This is only used for plugins + // that are written in Go. + Impl KV +} + +func (p *KVPlugin) Server(*plugin.MuxBroker) (interface{}, error) { + return &RPCServer{Impl: p.Impl}, nil +} + +func (*KVPlugin) Client(b *plugin.MuxBroker, c *rpc.Client) (interface{}, error) { + return &RPCClient{client: c}, nil +} + +// This is the implementation of plugin.GRPCPlugin so we can serve/consume this. +type KVGRPCPlugin struct { + // GRPCPlugin must still implement the Plugin interface + plugin.Plugin + // Concrete implementation, written in Go. This is only used for plugins + // that are written in Go. + Impl KV +} + +func (p *KVGRPCPlugin) GRPCServer(broker *plugin.GRPCBroker, s *grpc.Server) error { + proto.RegisterKVServer(s, &GRPCServer{Impl: p.Impl}) + return nil +} + +func (p *KVGRPCPlugin) GRPCClient(ctx context.Context, broker *plugin.GRPCBroker, c *grpc.ClientConn) (interface{}, error) { + return &GRPCClient{client: proto.NewKVClient(c)}, nil +} diff --git a/vendor/github.com/hashicorp/go-plugin/examples/grpc/shared/rpc.go b/vendor/github.com/hashicorp/go-plugin/examples/grpc/shared/rpc.go new file mode 100644 index 0000000..000e62b --- /dev/null +++ b/vendor/github.com/hashicorp/go-plugin/examples/grpc/shared/rpc.go @@ -0,0 +1,42 @@ +package shared + +import ( + "net/rpc" +) + +// RPCClient is an implementation of KV that talks over RPC. +type RPCClient struct{ client *rpc.Client } + +func (m *RPCClient) Put(key string, value []byte) error { + // We don't expect a response, so we can just use interface{} + var resp interface{} + + // The args are just going to be a map. A struct could be better. + return m.client.Call("Plugin.Put", map[string]interface{}{ + "key": key, + "value": value, + }, &resp) +} + +func (m *RPCClient) Get(key string) ([]byte, error) { + var resp []byte + err := m.client.Call("Plugin.Get", key, &resp) + return resp, err +} + +// Here is the RPC server that RPCClient talks to, conforming to +// the requirements of net/rpc +type RPCServer struct { + // This is the real implementation + Impl KV +} + +func (m *RPCServer) Put(args map[string]interface{}, resp *interface{}) error { + return m.Impl.Put(args["key"].(string), args["value"].([]byte)) +} + +func (m *RPCServer) Get(key string, resp *[]byte) error { + v, err := m.Impl.Get(key) + *resp = v + return err +} diff --git a/vendor/github.com/hashicorp/go-plugin/examples/negotiated/main.go b/vendor/github.com/hashicorp/go-plugin/examples/negotiated/main.go new file mode 100644 index 0000000..e396640 --- /dev/null +++ b/vendor/github.com/hashicorp/go-plugin/examples/negotiated/main.go @@ -0,0 +1,84 @@ +package main + +import ( + "fmt" + "io/ioutil" + "log" + "os" + "os/exec" + + "github.com/hashicorp/go-plugin" + "github.com/hashicorp/go-plugin/examples/grpc/shared" +) + +func main() { + // We don't want to see the plugin logs. + log.SetOutput(ioutil.Discard) + + plugins := map[int]plugin.PluginSet{} + + // Both version can be supported, but switch the implementation to + // demonstrate version negoation. + switch os.Getenv("KV_PROTO") { + case "netrpc": + plugins[2] = plugin.PluginSet{ + "kv": &shared.KVPlugin{}, + } + case "grpc": + plugins[3] = plugin.PluginSet{ + "kv": &shared.KVGRPCPlugin{}, + } + default: + fmt.Println("must set KV_PROTO to netrpc or grpc") + os.Exit(1) + } + + // We're a host. Start by launching the plugin process. + client := plugin.NewClient(&plugin.ClientConfig{ + HandshakeConfig: shared.Handshake, + VersionedPlugins: plugins, + Cmd: exec.Command("./kv-plugin"), + AllowedProtocols: []plugin.Protocol{ + plugin.ProtocolNetRPC, plugin.ProtocolGRPC}, + }) + defer client.Kill() + + rpcClient, err := client.Client() + if err != nil { + fmt.Println("Error:", err.Error()) + os.Exit(1) + } + + // Request the plugin + raw, err := rpcClient.Dispense("kv") + if err != nil { + fmt.Println("Error:", err.Error()) + os.Exit(1) + } + + // We should have a KV store now! This feels like a normal interface + // implementation but is in fact over an RPC connection. + kv := raw.(shared.KV) + os.Args = os.Args[1:] + switch os.Args[0] { + case "get": + result, err := kv.Get(os.Args[1]) + if err != nil { + fmt.Println("Error:", err.Error()) + os.Exit(1) + } + + fmt.Println(string(result)) + + case "put": + err := kv.Put(os.Args[1], []byte(os.Args[2])) + if err != nil { + fmt.Println("Error:", err.Error()) + os.Exit(1) + } + + default: + fmt.Println("Please only use 'get' or 'put'") + os.Exit(1) + } +} diff --git a/vendor/github.com/hashicorp/go-plugin/examples/negotiated/plugin-go/main.go b/vendor/github.com/hashicorp/go-plugin/examples/negotiated/plugin-go/main.go new file mode 100644 index 0000000..13e5e4d --- /dev/null +++ b/vendor/github.com/hashicorp/go-plugin/examples/negotiated/plugin-go/main.go @@ -0,0 +1,62 @@ +package main + +import ( + "fmt" + "io/ioutil" + + "github.com/hashicorp/go-plugin" + "github.com/hashicorp/go-plugin/examples/grpc/shared" +) + +// Here is a real implementation of KV that uses grpc and writes to a local +// file with the key name and the contents are the value of the key. +type KVGRPC struct{} + +func (KVGRPC) Put(key string, value []byte) error { + value = []byte(fmt.Sprintf("%s\n\nWritten from plugin version 3\n", string(value))) + return ioutil.WriteFile("kv_"+key, value, 0644) +} + +func (KVGRPC) Get(key string) ([]byte, error) { + d, err := ioutil.ReadFile("kv_" + key) + if err != nil { + return nil, err + } + return append(d, []byte("Read by plugin version 3\n")...), nil +} + +// Here is a real implementation of KV that writes to a local file with +// the key name and the contents are the value of the key. +type KV struct{} + +func (KV) Put(key string, value []byte) error { + value = []byte(fmt.Sprintf("%s\n\nWritten from plugin version 2\n", string(value))) + return ioutil.WriteFile("kv_"+key, value, 0644) +} + +func (KV) Get(key string) ([]byte, error) { + d, err := ioutil.ReadFile("kv_" + key) + if err != nil { + return nil, err + } + return append(d, []byte("Read by plugin version 2\n")...), nil +} + +func main() { + plugin.Serve(&plugin.ServeConfig{ + HandshakeConfig: shared.Handshake, + VersionedPlugins: map[int]plugin.PluginSet{ + // Version 2 only uses NetRPC + 2: { + "kv": &shared.KVPlugin{Impl: &KV{}}, + }, + // Version 3 only uses GRPC + 3: { + "kv": &shared.KVGRPCPlugin{Impl: &KVGRPC{}}, + }, + }, + + // A non-nil value here enables gRPC serving for this plugin... + GRPCServer: plugin.DefaultGRPCServer, + }) +} diff --git a/vendor/github.com/hashicorp/go-plugin/grpc_broker.go b/vendor/github.com/hashicorp/go-plugin/grpc_broker.go new file mode 100644 index 0000000..daf142d --- /dev/null +++ b/vendor/github.com/hashicorp/go-plugin/grpc_broker.go @@ -0,0 +1,457 @@ +package plugin + +import ( + "context" + "crypto/tls" + "errors" + "fmt" + "log" + "net" + "sync" + "sync/atomic" + "time" + + "github.com/hashicorp/go-plugin/internal/plugin" + + "github.com/oklog/run" + "google.golang.org/grpc" + "google.golang.org/grpc/credentials" +) + +// streamer interface is used in the broker to send/receive connection +// information. +type streamer interface { + Send(*plugin.ConnInfo) error + Recv() (*plugin.ConnInfo, error) + Close() +} + +// sendErr is used to pass errors back during a send. +type sendErr struct { + i *plugin.ConnInfo + ch chan error +} + +// gRPCBrokerServer is used by the plugin to start a stream and to send +// connection information to/from the plugin. Implements GRPCBrokerServer and +// streamer interfaces. +type gRPCBrokerServer struct { + // send is used to send connection info to the gRPC stream. + send chan *sendErr + + // recv is used to receive connection info from the gRPC stream. + recv chan *plugin.ConnInfo + + // quit closes down the stream. + quit chan struct{} + + // o is used to ensure we close the quit channel only once. + o sync.Once +} + +func newGRPCBrokerServer() *gRPCBrokerServer { + return &gRPCBrokerServer{ + send: make(chan *sendErr), + recv: make(chan *plugin.ConnInfo), + quit: make(chan struct{}), + } +} + +// StartStream implements the GRPCBrokerServer interface and will block until +// the quit channel is closed or the context reports Done. The stream will pass +// connection information to/from the client. +func (s *gRPCBrokerServer) StartStream(stream plugin.GRPCBroker_StartStreamServer) error { + doneCh := stream.Context().Done() + defer s.Close() + + // Proccess send stream + go func() { + for { + select { + case <-doneCh: + return + case <-s.quit: + return + case se := <-s.send: + err := stream.Send(se.i) + se.ch <- err + } + } + }() + + // Process receive stream + for { + i, err := stream.Recv() + if err != nil { + return err + } + select { + case <-doneCh: + return nil + case <-s.quit: + return nil + case s.recv <- i: + } + } + + return nil +} + +// Send is used by the GRPCBroker to pass connection information into the stream +// to the client. +func (s *gRPCBrokerServer) Send(i *plugin.ConnInfo) error { + ch := make(chan error) + defer close(ch) + + select { + case <-s.quit: + return errors.New("broker closed") + case s.send <- &sendErr{ + i: i, + ch: ch, + }: + } + + return <-ch +} + +// Recv is used by the GRPCBroker to pass connection information that has been +// sent from the client from the stream to the broker. +func (s *gRPCBrokerServer) Recv() (*plugin.ConnInfo, error) { + select { + case <-s.quit: + return nil, errors.New("broker closed") + case i := <-s.recv: + return i, nil + } +} + +// Close closes the quit channel, shutting down the stream. +func (s *gRPCBrokerServer) Close() { + s.o.Do(func() { + close(s.quit) + }) +} + +// gRPCBrokerClientImpl is used by the client to start a stream and to send +// connection information to/from the client. Implements GRPCBrokerClient and +// streamer interfaces. +type gRPCBrokerClientImpl struct { + // client is the underlying GRPC client used to make calls to the server. + client plugin.GRPCBrokerClient + + // send is used to send connection info to the gRPC stream. + send chan *sendErr + + // recv is used to receive connection info from the gRPC stream. + recv chan *plugin.ConnInfo + + // quit closes down the stream. + quit chan struct{} + + // o is used to ensure we close the quit channel only once. + o sync.Once +} + +func newGRPCBrokerClient(conn *grpc.ClientConn) *gRPCBrokerClientImpl { + return &gRPCBrokerClientImpl{ + client: plugin.NewGRPCBrokerClient(conn), + send: make(chan *sendErr), + recv: make(chan *plugin.ConnInfo), + quit: make(chan struct{}), + } +} + +// StartStream implements the GRPCBrokerClient interface and will block until +// the quit channel is closed or the context reports Done. The stream will pass +// connection information to/from the plugin. +func (s *gRPCBrokerClientImpl) StartStream() error { + ctx, cancelFunc := context.WithCancel(context.Background()) + defer cancelFunc() + defer s.Close() + + stream, err := s.client.StartStream(ctx) + if err != nil { + return err + } + doneCh := stream.Context().Done() + + go func() { + for { + select { + case <-doneCh: + return + case <-s.quit: + return + case se := <-s.send: + err := stream.Send(se.i) + se.ch <- err + } + } + }() + + for { + i, err := stream.Recv() + if err != nil { + return err + } + select { + case <-doneCh: + return nil + case <-s.quit: + return nil + case s.recv <- i: + } + } + + return nil +} + +// Send is used by the GRPCBroker to pass connection information into the stream +// to the plugin. +func (s *gRPCBrokerClientImpl) Send(i *plugin.ConnInfo) error { + ch := make(chan error) + defer close(ch) + + select { + case <-s.quit: + return errors.New("broker closed") + case s.send <- &sendErr{ + i: i, + ch: ch, + }: + } + + return <-ch +} + +// Recv is used by the GRPCBroker to pass connection information that has been +// sent from the plugin to the broker. +func (s *gRPCBrokerClientImpl) Recv() (*plugin.ConnInfo, error) { + select { + case <-s.quit: + return nil, errors.New("broker closed") + case i := <-s.recv: + return i, nil + } +} + +// Close closes the quit channel, shutting down the stream. +func (s *gRPCBrokerClientImpl) Close() { + s.o.Do(func() { + close(s.quit) + }) +} + +// GRPCBroker is responsible for brokering connections by unique ID. +// +// It is used by plugins to create multiple gRPC connections and data +// streams between the plugin process and the host process. +// +// This allows a plugin to request a channel with a specific ID to connect to +// or accept a connection from, and the broker handles the details of +// holding these channels open while they're being negotiated. +// +// The Plugin interface has access to these for both Server and Client. +// The broker can be used by either (optionally) to reserve and connect to +// new streams. This is useful for complex args and return values, +// or anything else you might need a data stream for. +type GRPCBroker struct { + nextId uint32 + streamer streamer + streams map[uint32]*gRPCBrokerPending + tls *tls.Config + doneCh chan struct{} + o sync.Once + + sync.Mutex +} + +type gRPCBrokerPending struct { + ch chan *plugin.ConnInfo + doneCh chan struct{} +} + +func newGRPCBroker(s streamer, tls *tls.Config) *GRPCBroker { + return &GRPCBroker{ + streamer: s, + streams: make(map[uint32]*gRPCBrokerPending), + tls: tls, + doneCh: make(chan struct{}), + } +} + +// Accept accepts a connection by ID. +// +// This should not be called multiple times with the same ID at one time. +func (b *GRPCBroker) Accept(id uint32) (net.Listener, error) { + listener, err := serverListener() + if err != nil { + return nil, err + } + + err = b.streamer.Send(&plugin.ConnInfo{ + ServiceId: id, + Network: listener.Addr().Network(), + Address: listener.Addr().String(), + }) + if err != nil { + return nil, err + } + + return listener, nil +} + +// AcceptAndServe is used to accept a specific stream ID and immediately +// serve a gRPC server on that stream ID. This is used to easily serve +// complex arguments. Each AcceptAndServe call opens a new listener socket and +// sends the connection info down the stream to the dialer. Since a new +// connection is opened every call, these calls should be used sparingly. +// Multiple gRPC server implementations can be registered to a single +// AcceptAndServe call. +func (b *GRPCBroker) AcceptAndServe(id uint32, s func([]grpc.ServerOption) *grpc.Server) { + listener, err := b.Accept(id) + if err != nil { + log.Printf("[ERR] plugin: plugin acceptAndServe error: %s", err) + return + } + defer listener.Close() + + var opts []grpc.ServerOption + if b.tls != nil { + opts = []grpc.ServerOption{grpc.Creds(credentials.NewTLS(b.tls))} + } + + server := s(opts) + + // Here we use a run group to close this goroutine if the server is shutdown + // or the broker is shutdown. + var g run.Group + { + // Serve on the listener, if shutting down call GracefulStop. + g.Add(func() error { + return server.Serve(listener) + }, func(err error) { + server.GracefulStop() + }) + } + { + // block on the closeCh or the doneCh. If we are shutting down close the + // closeCh. + closeCh := make(chan struct{}) + g.Add(func() error { + select { + case <-b.doneCh: + case <-closeCh: + } + return nil + }, func(err error) { + close(closeCh) + }) + } + + // Block until we are done + g.Run() +} + +// Close closes the stream and all servers. +func (b *GRPCBroker) Close() error { + b.streamer.Close() + b.o.Do(func() { + close(b.doneCh) + }) + return nil +} + +// Dial opens a connection by ID. +func (b *GRPCBroker) Dial(id uint32) (conn *grpc.ClientConn, err error) { + var c *plugin.ConnInfo + + // Open the stream + p := b.getStream(id) + select { + case c = <-p.ch: + close(p.doneCh) + case <-time.After(5 * time.Second): + return nil, fmt.Errorf("timeout waiting for connection info") + } + + var addr net.Addr + switch c.Network { + case "tcp": + addr, err = net.ResolveTCPAddr("tcp", c.Address) + case "unix": + addr, err = net.ResolveUnixAddr("unix", c.Address) + default: + err = fmt.Errorf("Unknown address type: %s", c.Address) + } + if err != nil { + return nil, err + } + + return dialGRPCConn(b.tls, netAddrDialer(addr)) +} + +// NextId returns a unique ID to use next. +// +// It is possible for very long-running plugin hosts to wrap this value, +// though it would require a very large amount of calls. In practice +// we've never seen it happen. +func (m *GRPCBroker) NextId() uint32 { + return atomic.AddUint32(&m.nextId, 1) +} + +// Run starts the brokering and should be executed in a goroutine, since it +// blocks forever, or until the session closes. +// +// Uses of GRPCBroker never need to call this. It is called internally by +// the plugin host/client. +func (m *GRPCBroker) Run() { + for { + stream, err := m.streamer.Recv() + if err != nil { + // Once we receive an error, just exit + break + } + + // Initialize the waiter + p := m.getStream(stream.ServiceId) + select { + case p.ch <- stream: + default: + } + + go m.timeoutWait(stream.ServiceId, p) + } +} + +func (m *GRPCBroker) getStream(id uint32) *gRPCBrokerPending { + m.Lock() + defer m.Unlock() + + p, ok := m.streams[id] + if ok { + return p + } + + m.streams[id] = &gRPCBrokerPending{ + ch: make(chan *plugin.ConnInfo, 1), + doneCh: make(chan struct{}), + } + return m.streams[id] +} + +func (m *GRPCBroker) timeoutWait(id uint32, p *gRPCBrokerPending) { + // Wait for the stream to either be picked up and connected, or + // for a timeout. + select { + case <-p.doneCh: + case <-time.After(5 * time.Second): + } + + m.Lock() + defer m.Unlock() + + // Delete the stream so no one else can grab it + delete(m.streams, id) +} diff --git a/vendor/github.com/hashicorp/go-plugin/grpc_client.go b/vendor/github.com/hashicorp/go-plugin/grpc_client.go new file mode 100644 index 0000000..294518e --- /dev/null +++ b/vendor/github.com/hashicorp/go-plugin/grpc_client.go @@ -0,0 +1,111 @@ +package plugin + +import ( + "crypto/tls" + "fmt" + "net" + "time" + + "github.com/hashicorp/go-plugin/internal/plugin" + "golang.org/x/net/context" + "google.golang.org/grpc" + "google.golang.org/grpc/credentials" + "google.golang.org/grpc/health/grpc_health_v1" +) + +func dialGRPCConn(tls *tls.Config, dialer func(string, time.Duration) (net.Conn, error)) (*grpc.ClientConn, error) { + // Build dialing options. + opts := make([]grpc.DialOption, 0, 5) + + // We use a custom dialer so that we can connect over unix domain sockets. + opts = append(opts, grpc.WithDialer(dialer)) + + // Fail right away + opts = append(opts, grpc.FailOnNonTempDialError(true)) + + // If we have no TLS configuration set, we need to explicitly tell grpc + // that we're connecting with an insecure connection. + if tls == nil { + opts = append(opts, grpc.WithInsecure()) + } else { + opts = append(opts, grpc.WithTransportCredentials( + credentials.NewTLS(tls))) + } + + // Connect. Note the first parameter is unused because we use a custom + // dialer that has the state to see the address. + conn, err := grpc.Dial("unused", opts...) + if err != nil { + return nil, err + } + + return conn, nil +} + +// newGRPCClient creates a new GRPCClient. The Client argument is expected +// to be successfully started already with a lock held. +func newGRPCClient(doneCtx context.Context, c *Client) (*GRPCClient, error) { + conn, err := dialGRPCConn(c.config.TLSConfig, c.dialer) + if err != nil { + return nil, err + } + + // Start the broker. + brokerGRPCClient := newGRPCBrokerClient(conn) + broker := newGRPCBroker(brokerGRPCClient, c.config.TLSConfig) + go broker.Run() + go brokerGRPCClient.StartStream() + + cl := &GRPCClient{ + Conn: conn, + Plugins: c.config.Plugins, + doneCtx: doneCtx, + broker: broker, + controller: plugin.NewGRPCControllerClient(conn), + } + + return cl, nil +} + +// GRPCClient connects to a GRPCServer over gRPC to dispense plugin types. +type GRPCClient struct { + Conn *grpc.ClientConn + Plugins map[string]Plugin + + doneCtx context.Context + broker *GRPCBroker + + controller plugin.GRPCControllerClient +} + +// ClientProtocol impl. +func (c *GRPCClient) Close() error { + c.broker.Close() + c.controller.Shutdown(c.doneCtx, &plugin.Empty{}) + return c.Conn.Close() +} + +// ClientProtocol impl. +func (c *GRPCClient) Dispense(name string) (interface{}, error) { + raw, ok := c.Plugins[name] + if !ok { + return nil, fmt.Errorf("unknown plugin type: %s", name) + } + + p, ok := raw.(GRPCPlugin) + if !ok { + return nil, fmt.Errorf("plugin %q doesn't support gRPC", name) + } + + return p.GRPCClient(c.doneCtx, c.broker, c.Conn) +} + +// ClientProtocol impl. +func (c *GRPCClient) Ping() error { + client := grpc_health_v1.NewHealthClient(c.Conn) + _, err := client.Check(context.Background(), &grpc_health_v1.HealthCheckRequest{ + Service: GRPCServiceName, + }) + + return err +} diff --git a/vendor/github.com/hashicorp/go-plugin/grpc_controller.go b/vendor/github.com/hashicorp/go-plugin/grpc_controller.go new file mode 100644 index 0000000..1a8a8e7 --- /dev/null +++ b/vendor/github.com/hashicorp/go-plugin/grpc_controller.go @@ -0,0 +1,23 @@ +package plugin + +import ( + "context" + + "github.com/hashicorp/go-plugin/internal/plugin" +) + +// GRPCControllerServer handles shutdown calls to terminate the server when the +// plugin client is closed. +type grpcControllerServer struct { + server *GRPCServer +} + +// Shutdown stops the grpc server. It first will attempt a graceful stop, then a +// full stop on the server. +func (s *grpcControllerServer) Shutdown(ctx context.Context, _ *plugin.Empty) (*plugin.Empty, error) { + resp := &plugin.Empty{} + + // TODO: figure out why GracefullStop doesn't work. + s.server.Stop() + return resp, nil +} diff --git a/vendor/github.com/hashicorp/go-plugin/grpc_server.go b/vendor/github.com/hashicorp/go-plugin/grpc_server.go new file mode 100644 index 0000000..d3dbf1c --- /dev/null +++ b/vendor/github.com/hashicorp/go-plugin/grpc_server.go @@ -0,0 +1,142 @@ +package plugin + +import ( + "bytes" + "crypto/tls" + "encoding/json" + "fmt" + "io" + "net" + + hclog "github.com/hashicorp/go-hclog" + "github.com/hashicorp/go-plugin/internal/plugin" + "google.golang.org/grpc" + "google.golang.org/grpc/credentials" + "google.golang.org/grpc/health" + "google.golang.org/grpc/health/grpc_health_v1" +) + +// GRPCServiceName is the name of the service that the health check should +// return as passing. +const GRPCServiceName = "plugin" + +// DefaultGRPCServer can be used with the "GRPCServer" field for Server +// as a default factory method to create a gRPC server with no extra options. +func DefaultGRPCServer(opts []grpc.ServerOption) *grpc.Server { + return grpc.NewServer(opts...) +} + +// GRPCServer is a ServerType implementation that serves plugins over +// gRPC. This allows plugins to easily be written for other languages. +// +// The GRPCServer outputs a custom configuration as a base64-encoded +// JSON structure represented by the GRPCServerConfig config structure. +type GRPCServer struct { + // Plugins are the list of plugins to serve. + Plugins map[string]Plugin + + // Server is the actual server that will accept connections. This + // will be used for plugin registration as well. + Server func([]grpc.ServerOption) *grpc.Server + + // TLS should be the TLS configuration if available. If this is nil, + // the connection will not have transport security. + TLS *tls.Config + + // DoneCh is the channel that is closed when this server has exited. + DoneCh chan struct{} + + // Stdout/StderrLis are the readers for stdout/stderr that will be copied + // to the stdout/stderr connection that is output. + Stdout io.Reader + Stderr io.Reader + + config GRPCServerConfig + server *grpc.Server + broker *GRPCBroker + + logger hclog.Logger +} + +// ServerProtocol impl. +func (s *GRPCServer) Init() error { + // Create our server + var opts []grpc.ServerOption + if s.TLS != nil { + opts = append(opts, grpc.Creds(credentials.NewTLS(s.TLS))) + } + s.server = s.Server(opts) + + // Register the health service + healthCheck := health.NewServer() + healthCheck.SetServingStatus( + GRPCServiceName, grpc_health_v1.HealthCheckResponse_SERVING) + grpc_health_v1.RegisterHealthServer(s.server, healthCheck) + + // Register the broker service + brokerServer := newGRPCBrokerServer() + plugin.RegisterGRPCBrokerServer(s.server, brokerServer) + s.broker = newGRPCBroker(brokerServer, s.TLS) + go s.broker.Run() + + // Register the controller + controllerServer := &grpcControllerServer{ + server: s, + } + plugin.RegisterGRPCControllerServer(s.server, controllerServer) + + // Register all our plugins onto the gRPC server. + for k, raw := range s.Plugins { + p, ok := raw.(GRPCPlugin) + if !ok { + return fmt.Errorf("%q is not a GRPC-compatible plugin", k) + } + + if err := p.GRPCServer(s.broker, s.server); err != nil { + return fmt.Errorf("error registering %q: %s", k, err) + } + } + + return nil +} + +// Stop calls Stop on the underlying grpc.Server +func (s *GRPCServer) Stop() { + s.server.Stop() +} + +// GracefulStop calls GracefulStop on the underlying grpc.Server +func (s *GRPCServer) GracefulStop() { + s.server.GracefulStop() +} + +// Config is the GRPCServerConfig encoded as JSON then base64. +func (s *GRPCServer) Config() string { + // Create a buffer that will contain our final contents + var buf bytes.Buffer + + // Wrap the base64 encoding with JSON encoding. + if err := json.NewEncoder(&buf).Encode(s.config); err != nil { + // We panic since ths shouldn't happen under any scenario. We + // carefully control the structure being encoded here and it should + // always be successful. + panic(err) + } + + return buf.String() +} + +func (s *GRPCServer) Serve(lis net.Listener) { + defer close(s.DoneCh) + err := s.server.Serve(lis) + if err != nil { + s.logger.Error("grpc server", "error", err) + } +} + +// GRPCServerConfig is the extra configuration passed along for consumers +// to facilitate using GRPC plugins. +type GRPCServerConfig struct { + StdoutAddr string `json:"stdout_addr"` + StderrAddr string `json:"stderr_addr"` +} diff --git a/vendor/github.com/hashicorp/go-plugin/internal/plugin/gen.go b/vendor/github.com/hashicorp/go-plugin/internal/plugin/gen.go new file mode 100644 index 0000000..aa2fdc8 --- /dev/null +++ b/vendor/github.com/hashicorp/go-plugin/internal/plugin/gen.go @@ -0,0 +1,3 @@ +//go:generate protoc -I ./ ./grpc_broker.proto ./grpc_controller.proto --go_out=plugins=grpc:. + +package plugin diff --git a/vendor/github.com/hashicorp/go-plugin/internal/plugin/grpc_broker.pb.go b/vendor/github.com/hashicorp/go-plugin/internal/plugin/grpc_broker.pb.go new file mode 100644 index 0000000..b6850aa --- /dev/null +++ b/vendor/github.com/hashicorp/go-plugin/internal/plugin/grpc_broker.pb.go @@ -0,0 +1,203 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: grpc_broker.proto + +package plugin + +import ( + fmt "fmt" + proto "github.com/golang/protobuf/proto" + context "golang.org/x/net/context" + grpc "google.golang.org/grpc" + math "math" +) + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +type ConnInfo struct { + ServiceId uint32 `protobuf:"varint,1,opt,name=service_id,json=serviceId,proto3" json:"service_id,omitempty"` + Network string `protobuf:"bytes,2,opt,name=network,proto3" json:"network,omitempty"` + Address string `protobuf:"bytes,3,opt,name=address,proto3" json:"address,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ConnInfo) Reset() { *m = ConnInfo{} } +func (m *ConnInfo) String() string { return proto.CompactTextString(m) } +func (*ConnInfo) ProtoMessage() {} +func (*ConnInfo) Descriptor() ([]byte, []int) { + return fileDescriptor_802e9beed3ec3b28, []int{0} +} + +func (m *ConnInfo) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ConnInfo.Unmarshal(m, b) +} +func (m *ConnInfo) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ConnInfo.Marshal(b, m, deterministic) +} +func (m *ConnInfo) XXX_Merge(src proto.Message) { + xxx_messageInfo_ConnInfo.Merge(m, src) +} +func (m *ConnInfo) XXX_Size() int { + return xxx_messageInfo_ConnInfo.Size(m) +} +func (m *ConnInfo) XXX_DiscardUnknown() { + xxx_messageInfo_ConnInfo.DiscardUnknown(m) +} + +var xxx_messageInfo_ConnInfo proto.InternalMessageInfo + +func (m *ConnInfo) GetServiceId() uint32 { + if m != nil { + return m.ServiceId + } + return 0 +} + +func (m *ConnInfo) GetNetwork() string { + if m != nil { + return m.Network + } + return "" +} + +func (m *ConnInfo) GetAddress() string { + if m != nil { + return m.Address + } + return "" +} + +func init() { + proto.RegisterType((*ConnInfo)(nil), "plugin.ConnInfo") +} + +func init() { proto.RegisterFile("grpc_broker.proto", fileDescriptor_802e9beed3ec3b28) } + +var fileDescriptor_802e9beed3ec3b28 = []byte{ + // 175 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xe2, 0x12, 0x4c, 0x2f, 0x2a, 0x48, + 0x8e, 0x4f, 0x2a, 0xca, 0xcf, 0x4e, 0x2d, 0xd2, 0x2b, 0x28, 0xca, 0x2f, 0xc9, 0x17, 0x62, 0x2b, + 0xc8, 0x29, 0x4d, 0xcf, 0xcc, 0x53, 0x8a, 0xe5, 0xe2, 0x70, 0xce, 0xcf, 0xcb, 0xf3, 0xcc, 0x4b, + 0xcb, 0x17, 0x92, 0xe5, 0xe2, 0x2a, 0x4e, 0x2d, 0x2a, 0xcb, 0x4c, 0x4e, 0x8d, 0xcf, 0x4c, 0x91, + 0x60, 0x54, 0x60, 0xd4, 0xe0, 0x0d, 0xe2, 0x84, 0x8a, 0x78, 0xa6, 0x08, 0x49, 0x70, 0xb1, 0xe7, + 0xa5, 0x96, 0x94, 0xe7, 0x17, 0x65, 0x4b, 0x30, 0x29, 0x30, 0x6a, 0x70, 0x06, 0xc1, 0xb8, 0x20, + 0x99, 0xc4, 0x94, 0x94, 0xa2, 0xd4, 0xe2, 0x62, 0x09, 0x66, 0x88, 0x0c, 0x94, 0x6b, 0xe4, 0xcc, + 0xc5, 0xe5, 0x1e, 0x14, 0xe0, 0xec, 0x04, 0xb6, 0x5a, 0xc8, 0x94, 0x8b, 0x3b, 0xb8, 0x24, 0xb1, + 0xa8, 0x24, 0xb8, 0xa4, 0x28, 0x35, 0x31, 0x57, 0x48, 0x40, 0x0f, 0xe2, 0x08, 0x3d, 0x98, 0x0b, + 0xa4, 0x30, 0x44, 0x34, 0x18, 0x0d, 0x18, 0x9d, 0x38, 0xa2, 0xa0, 0xae, 0x4d, 0x62, 0x03, 0x3b, + 0xde, 0x18, 0x10, 0x00, 0x00, 0xff, 0xff, 0x10, 0x15, 0x39, 0x47, 0xd1, 0x00, 0x00, 0x00, +} + +// Reference imports to suppress errors if they are not otherwise used. +var _ context.Context +var _ grpc.ClientConn + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the grpc package it is being compiled against. +const _ = grpc.SupportPackageIsVersion4 + +// GRPCBrokerClient is the client API for GRPCBroker service. +// +// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://godoc.org/google.golang.org/grpc#ClientConn.NewStream. +type GRPCBrokerClient interface { + StartStream(ctx context.Context, opts ...grpc.CallOption) (GRPCBroker_StartStreamClient, error) +} + +type gRPCBrokerClient struct { + cc *grpc.ClientConn +} + +func NewGRPCBrokerClient(cc *grpc.ClientConn) GRPCBrokerClient { + return &gRPCBrokerClient{cc} +} + +func (c *gRPCBrokerClient) StartStream(ctx context.Context, opts ...grpc.CallOption) (GRPCBroker_StartStreamClient, error) { + stream, err := c.cc.NewStream(ctx, &_GRPCBroker_serviceDesc.Streams[0], "/plugin.GRPCBroker/StartStream", opts...) + if err != nil { + return nil, err + } + x := &gRPCBrokerStartStreamClient{stream} + return x, nil +} + +type GRPCBroker_StartStreamClient interface { + Send(*ConnInfo) error + Recv() (*ConnInfo, error) + grpc.ClientStream +} + +type gRPCBrokerStartStreamClient struct { + grpc.ClientStream +} + +func (x *gRPCBrokerStartStreamClient) Send(m *ConnInfo) error { + return x.ClientStream.SendMsg(m) +} + +func (x *gRPCBrokerStartStreamClient) Recv() (*ConnInfo, error) { + m := new(ConnInfo) + if err := x.ClientStream.RecvMsg(m); err != nil { + return nil, err + } + return m, nil +} + +// GRPCBrokerServer is the server API for GRPCBroker service. +type GRPCBrokerServer interface { + StartStream(GRPCBroker_StartStreamServer) error +} + +func RegisterGRPCBrokerServer(s *grpc.Server, srv GRPCBrokerServer) { + s.RegisterService(&_GRPCBroker_serviceDesc, srv) +} + +func _GRPCBroker_StartStream_Handler(srv interface{}, stream grpc.ServerStream) error { + return srv.(GRPCBrokerServer).StartStream(&gRPCBrokerStartStreamServer{stream}) +} + +type GRPCBroker_StartStreamServer interface { + Send(*ConnInfo) error + Recv() (*ConnInfo, error) + grpc.ServerStream +} + +type gRPCBrokerStartStreamServer struct { + grpc.ServerStream +} + +func (x *gRPCBrokerStartStreamServer) Send(m *ConnInfo) error { + return x.ServerStream.SendMsg(m) +} + +func (x *gRPCBrokerStartStreamServer) Recv() (*ConnInfo, error) { + m := new(ConnInfo) + if err := x.ServerStream.RecvMsg(m); err != nil { + return nil, err + } + return m, nil +} + +var _GRPCBroker_serviceDesc = grpc.ServiceDesc{ + ServiceName: "plugin.GRPCBroker", + HandlerType: (*GRPCBrokerServer)(nil), + Methods: []grpc.MethodDesc{}, + Streams: []grpc.StreamDesc{ + { + StreamName: "StartStream", + Handler: _GRPCBroker_StartStream_Handler, + ServerStreams: true, + ClientStreams: true, + }, + }, + Metadata: "grpc_broker.proto", +} diff --git a/vendor/github.com/hashicorp/go-plugin/internal/plugin/grpc_controller.pb.go b/vendor/github.com/hashicorp/go-plugin/internal/plugin/grpc_controller.pb.go new file mode 100644 index 0000000..38b4204 --- /dev/null +++ b/vendor/github.com/hashicorp/go-plugin/internal/plugin/grpc_controller.pb.go @@ -0,0 +1,143 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: grpc_controller.proto + +package plugin + +import ( + fmt "fmt" + proto "github.com/golang/protobuf/proto" + context "golang.org/x/net/context" + grpc "google.golang.org/grpc" + math "math" +) + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +type Empty struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Empty) Reset() { *m = Empty{} } +func (m *Empty) String() string { return proto.CompactTextString(m) } +func (*Empty) ProtoMessage() {} +func (*Empty) Descriptor() ([]byte, []int) { + return fileDescriptor_23c2c7e42feab570, []int{0} +} + +func (m *Empty) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Empty.Unmarshal(m, b) +} +func (m *Empty) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Empty.Marshal(b, m, deterministic) +} +func (m *Empty) XXX_Merge(src proto.Message) { + xxx_messageInfo_Empty.Merge(m, src) +} +func (m *Empty) XXX_Size() int { + return xxx_messageInfo_Empty.Size(m) +} +func (m *Empty) XXX_DiscardUnknown() { + xxx_messageInfo_Empty.DiscardUnknown(m) +} + +var xxx_messageInfo_Empty proto.InternalMessageInfo + +func init() { + proto.RegisterType((*Empty)(nil), "plugin.Empty") +} + +func init() { proto.RegisterFile("grpc_controller.proto", fileDescriptor_23c2c7e42feab570) } + +var fileDescriptor_23c2c7e42feab570 = []byte{ + // 108 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xe2, 0x12, 0x4d, 0x2f, 0x2a, 0x48, + 0x8e, 0x4f, 0xce, 0xcf, 0x2b, 0x29, 0xca, 0xcf, 0xc9, 0x49, 0x2d, 0xd2, 0x2b, 0x28, 0xca, 0x2f, + 0xc9, 0x17, 0x62, 0x2b, 0xc8, 0x29, 0x4d, 0xcf, 0xcc, 0x53, 0x62, 0xe7, 0x62, 0x75, 0xcd, 0x2d, + 0x28, 0xa9, 0x34, 0xb2, 0xe2, 0xe2, 0x73, 0x0f, 0x0a, 0x70, 0x76, 0x86, 0x2b, 0x14, 0xd2, 0xe0, + 0xe2, 0x08, 0xce, 0x28, 0x2d, 0x49, 0xc9, 0x2f, 0xcf, 0x13, 0xe2, 0xd5, 0x83, 0xa8, 0xd7, 0x03, + 0x2b, 0x96, 0x42, 0xe5, 0x3a, 0x71, 0x44, 0x41, 0x8d, 0x4b, 0x62, 0x03, 0x9b, 0x6e, 0x0c, 0x08, + 0x00, 0x00, 0xff, 0xff, 0xab, 0x7c, 0x27, 0xe5, 0x76, 0x00, 0x00, 0x00, +} + +// Reference imports to suppress errors if they are not otherwise used. +var _ context.Context +var _ grpc.ClientConn + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the grpc package it is being compiled against. +const _ = grpc.SupportPackageIsVersion4 + +// GRPCControllerClient is the client API for GRPCController service. +// +// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://godoc.org/google.golang.org/grpc#ClientConn.NewStream. +type GRPCControllerClient interface { + Shutdown(ctx context.Context, in *Empty, opts ...grpc.CallOption) (*Empty, error) +} + +type gRPCControllerClient struct { + cc *grpc.ClientConn +} + +func NewGRPCControllerClient(cc *grpc.ClientConn) GRPCControllerClient { + return &gRPCControllerClient{cc} +} + +func (c *gRPCControllerClient) Shutdown(ctx context.Context, in *Empty, opts ...grpc.CallOption) (*Empty, error) { + out := new(Empty) + err := c.cc.Invoke(ctx, "/plugin.GRPCController/Shutdown", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +// GRPCControllerServer is the server API for GRPCController service. +type GRPCControllerServer interface { + Shutdown(context.Context, *Empty) (*Empty, error) +} + +func RegisterGRPCControllerServer(s *grpc.Server, srv GRPCControllerServer) { + s.RegisterService(&_GRPCController_serviceDesc, srv) +} + +func _GRPCController_Shutdown_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(Empty) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(GRPCControllerServer).Shutdown(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/plugin.GRPCController/Shutdown", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(GRPCControllerServer).Shutdown(ctx, req.(*Empty)) + } + return interceptor(ctx, in, info, handler) +} + +var _GRPCController_serviceDesc = grpc.ServiceDesc{ + ServiceName: "plugin.GRPCController", + HandlerType: (*GRPCControllerServer)(nil), + Methods: []grpc.MethodDesc{ + { + MethodName: "Shutdown", + Handler: _GRPCController_Shutdown_Handler, + }, + }, + Streams: []grpc.StreamDesc{}, + Metadata: "grpc_controller.proto", +} diff --git a/vendor/github.com/hashicorp/go-plugin/log_entry.go b/vendor/github.com/hashicorp/go-plugin/log_entry.go new file mode 100644 index 0000000..fb2ef93 --- /dev/null +++ b/vendor/github.com/hashicorp/go-plugin/log_entry.go @@ -0,0 +1,73 @@ +package plugin + +import ( + "encoding/json" + "time" +) + +// logEntry is the JSON payload that gets sent to Stderr from the plugin to the host +type logEntry struct { + Message string `json:"@message"` + Level string `json:"@level"` + Timestamp time.Time `json:"timestamp"` + KVPairs []*logEntryKV `json:"kv_pairs"` +} + +// logEntryKV is a key value pair within the Output payload +type logEntryKV struct { + Key string `json:"key"` + Value interface{} `json:"value"` +} + +// flattenKVPairs is used to flatten KVPair slice into []interface{} +// for hclog consumption. +func flattenKVPairs(kvs []*logEntryKV) []interface{} { + var result []interface{} + for _, kv := range kvs { + result = append(result, kv.Key) + result = append(result, kv.Value) + } + + return result +} + +// parseJSON handles parsing JSON output +func parseJSON(input []byte) (*logEntry, error) { + var raw map[string]interface{} + entry := &logEntry{} + + err := json.Unmarshal(input, &raw) + if err != nil { + return nil, err + } + + // Parse hclog-specific objects + if v, ok := raw["@message"]; ok { + entry.Message = v.(string) + delete(raw, "@message") + } + + if v, ok := raw["@level"]; ok { + entry.Level = v.(string) + delete(raw, "@level") + } + + if v, ok := raw["@timestamp"]; ok { + t, err := time.Parse("2006-01-02T15:04:05.000000Z07:00", v.(string)) + if err != nil { + return nil, err + } + entry.Timestamp = t + delete(raw, "@timestamp") + } + + // Parse dynamic KV args from the hclog payload. + for k, v := range raw { + entry.KVPairs = append(entry.KVPairs, &logEntryKV{ + Key: k, + Value: v, + }) + } + + return entry, nil +} diff --git a/vendor/github.com/hashicorp/go-plugin/mtls.go b/vendor/github.com/hashicorp/go-plugin/mtls.go new file mode 100644 index 0000000..8895524 --- /dev/null +++ b/vendor/github.com/hashicorp/go-plugin/mtls.go @@ -0,0 +1,73 @@ +package plugin + +import ( + "bytes" + "crypto/ecdsa" + "crypto/elliptic" + "crypto/rand" + "crypto/x509" + "crypto/x509/pkix" + "encoding/pem" + "math/big" + "time" +) + +// generateCert generates a temporary certificate for plugin authentication. The +// certificate and private key are returns in PEM format. +func generateCert() (cert []byte, privateKey []byte, err error) { + key, err := ecdsa.GenerateKey(elliptic.P521(), rand.Reader) + if err != nil { + return nil, nil, err + } + + serialNumberLimit := new(big.Int).Lsh(big.NewInt(1), 128) + sn, err := rand.Int(rand.Reader, serialNumberLimit) + if err != nil { + return nil, nil, err + } + + host := "localhost" + + template := &x509.Certificate{ + Subject: pkix.Name{ + CommonName: host, + Organization: []string{"HashiCorp"}, + }, + DNSNames: []string{host}, + ExtKeyUsage: []x509.ExtKeyUsage{ + x509.ExtKeyUsageClientAuth, + x509.ExtKeyUsageServerAuth, + }, + KeyUsage: x509.KeyUsageDigitalSignature | x509.KeyUsageKeyEncipherment | x509.KeyUsageKeyAgreement | x509.KeyUsageCertSign, + BasicConstraintsValid: true, + SerialNumber: sn, + NotBefore: time.Now().Add(-30 * time.Second), + NotAfter: time.Now().Add(262980 * time.Hour), + IsCA: true, + } + + der, err := x509.CreateCertificate(rand.Reader, template, template, key.Public(), key) + if err != nil { + return nil, nil, err + } + + var certOut bytes.Buffer + if err := pem.Encode(&certOut, &pem.Block{Type: "CERTIFICATE", Bytes: der}); err != nil { + return nil, nil, err + } + + keyBytes, err := x509.MarshalECPrivateKey(key) + if err != nil { + return nil, nil, err + } + + var keyOut bytes.Buffer + if err := pem.Encode(&keyOut, &pem.Block{Type: "EC PRIVATE KEY", Bytes: keyBytes}); err != nil { + return nil, nil, err + } + + cert = certOut.Bytes() + privateKey = keyOut.Bytes() + + return cert, privateKey, nil +} diff --git a/vendor/github.com/hashicorp/go-plugin/mux_broker.go b/vendor/github.com/hashicorp/go-plugin/mux_broker.go new file mode 100644 index 0000000..01c45ad --- /dev/null +++ b/vendor/github.com/hashicorp/go-plugin/mux_broker.go @@ -0,0 +1,204 @@ +package plugin + +import ( + "encoding/binary" + "fmt" + "log" + "net" + "sync" + "sync/atomic" + "time" + + "github.com/hashicorp/yamux" +) + +// MuxBroker is responsible for brokering multiplexed connections by unique ID. +// +// It is used by plugins to multiplex multiple RPC connections and data +// streams on top of a single connection between the plugin process and the +// host process. +// +// This allows a plugin to request a channel with a specific ID to connect to +// or accept a connection from, and the broker handles the details of +// holding these channels open while they're being negotiated. +// +// The Plugin interface has access to these for both Server and Client. +// The broker can be used by either (optionally) to reserve and connect to +// new multiplexed streams. This is useful for complex args and return values, +// or anything else you might need a data stream for. +type MuxBroker struct { + nextId uint32 + session *yamux.Session + streams map[uint32]*muxBrokerPending + + sync.Mutex +} + +type muxBrokerPending struct { + ch chan net.Conn + doneCh chan struct{} +} + +func newMuxBroker(s *yamux.Session) *MuxBroker { + return &MuxBroker{ + session: s, + streams: make(map[uint32]*muxBrokerPending), + } +} + +// Accept accepts a connection by ID. +// +// This should not be called multiple times with the same ID at one time. +func (m *MuxBroker) Accept(id uint32) (net.Conn, error) { + var c net.Conn + p := m.getStream(id) + select { + case c = <-p.ch: + close(p.doneCh) + case <-time.After(5 * time.Second): + m.Lock() + defer m.Unlock() + delete(m.streams, id) + + return nil, fmt.Errorf("timeout waiting for accept") + } + + // Ack our connection + if err := binary.Write(c, binary.LittleEndian, id); err != nil { + c.Close() + return nil, err + } + + return c, nil +} + +// AcceptAndServe is used to accept a specific stream ID and immediately +// serve an RPC server on that stream ID. This is used to easily serve +// complex arguments. +// +// The served interface is always registered to the "Plugin" name. +func (m *MuxBroker) AcceptAndServe(id uint32, v interface{}) { + conn, err := m.Accept(id) + if err != nil { + log.Printf("[ERR] plugin: plugin acceptAndServe error: %s", err) + return + } + + serve(conn, "Plugin", v) +} + +// Close closes the connection and all sub-connections. +func (m *MuxBroker) Close() error { + return m.session.Close() +} + +// Dial opens a connection by ID. +func (m *MuxBroker) Dial(id uint32) (net.Conn, error) { + // Open the stream + stream, err := m.session.OpenStream() + if err != nil { + return nil, err + } + + // Write the stream ID onto the wire. + if err := binary.Write(stream, binary.LittleEndian, id); err != nil { + stream.Close() + return nil, err + } + + // Read the ack that we connected. Then we're off! + var ack uint32 + if err := binary.Read(stream, binary.LittleEndian, &ack); err != nil { + stream.Close() + return nil, err + } + if ack != id { + stream.Close() + return nil, fmt.Errorf("bad ack: %d (expected %d)", ack, id) + } + + return stream, nil +} + +// NextId returns a unique ID to use next. +// +// It is possible for very long-running plugin hosts to wrap this value, +// though it would require a very large amount of RPC calls. In practice +// we've never seen it happen. +func (m *MuxBroker) NextId() uint32 { + return atomic.AddUint32(&m.nextId, 1) +} + +// Run starts the brokering and should be executed in a goroutine, since it +// blocks forever, or until the session closes. +// +// Uses of MuxBroker never need to call this. It is called internally by +// the plugin host/client. +func (m *MuxBroker) Run() { + for { + stream, err := m.session.AcceptStream() + if err != nil { + // Once we receive an error, just exit + break + } + + // Read the stream ID from the stream + var id uint32 + if err := binary.Read(stream, binary.LittleEndian, &id); err != nil { + stream.Close() + continue + } + + // Initialize the waiter + p := m.getStream(id) + select { + case p.ch <- stream: + default: + } + + // Wait for a timeout + go m.timeoutWait(id, p) + } +} + +func (m *MuxBroker) getStream(id uint32) *muxBrokerPending { + m.Lock() + defer m.Unlock() + + p, ok := m.streams[id] + if ok { + return p + } + + m.streams[id] = &muxBrokerPending{ + ch: make(chan net.Conn, 1), + doneCh: make(chan struct{}), + } + return m.streams[id] +} + +func (m *MuxBroker) timeoutWait(id uint32, p *muxBrokerPending) { + // Wait for the stream to either be picked up and connected, or + // for a timeout. + timeout := false + select { + case <-p.doneCh: + case <-time.After(5 * time.Second): + timeout = true + } + + m.Lock() + defer m.Unlock() + + // Delete the stream so no one else can grab it + delete(m.streams, id) + + // If we timed out, then check if we have a channel in the buffer, + // and if so, close it. + if timeout { + select { + case s := <-p.ch: + s.Close() + } + } +} diff --git a/vendor/github.com/hashicorp/go-plugin/plugin.go b/vendor/github.com/hashicorp/go-plugin/plugin.go new file mode 100644 index 0000000..79d9674 --- /dev/null +++ b/vendor/github.com/hashicorp/go-plugin/plugin.go @@ -0,0 +1,58 @@ +// The plugin package exposes functions and helpers for communicating to +// plugins which are implemented as standalone binary applications. +// +// plugin.Client fully manages the lifecycle of executing the application, +// connecting to it, and returning the RPC client for dispensing plugins. +// +// plugin.Serve fully manages listeners to expose an RPC server from a binary +// that plugin.Client can connect to. +package plugin + +import ( + "context" + "errors" + "net/rpc" + + "google.golang.org/grpc" +) + +// Plugin is the interface that is implemented to serve/connect to an +// inteface implementation. +type Plugin interface { + // Server should return the RPC server compatible struct to serve + // the methods that the Client calls over net/rpc. + Server(*MuxBroker) (interface{}, error) + + // Client returns an interface implementation for the plugin you're + // serving that communicates to the server end of the plugin. + Client(*MuxBroker, *rpc.Client) (interface{}, error) +} + +// GRPCPlugin is the interface that is implemented to serve/connect to +// a plugin over gRPC. +type GRPCPlugin interface { + // GRPCServer should register this plugin for serving with the + // given GRPCServer. Unlike Plugin.Server, this is only called once + // since gRPC plugins serve singletons. + GRPCServer(*GRPCBroker, *grpc.Server) error + + // GRPCClient should return the interface implementation for the plugin + // you're serving via gRPC. The provided context will be canceled by + // go-plugin in the event of the plugin process exiting. + GRPCClient(context.Context, *GRPCBroker, *grpc.ClientConn) (interface{}, error) +} + +// NetRPCUnsupportedPlugin implements Plugin but returns errors for the +// Server and Client functions. This will effectively disable support for +// net/rpc based plugins. +// +// This struct can be embedded in your struct. +type NetRPCUnsupportedPlugin struct{} + +func (p NetRPCUnsupportedPlugin) Server(*MuxBroker) (interface{}, error) { + return nil, errors.New("net/rpc plugin protocol not supported") +} + +func (p NetRPCUnsupportedPlugin) Client(*MuxBroker, *rpc.Client) (interface{}, error) { + return nil, errors.New("net/rpc plugin protocol not supported") +} diff --git a/vendor/github.com/hashicorp/go-plugin/process.go b/vendor/github.com/hashicorp/go-plugin/process.go new file mode 100644 index 0000000..88c999a --- /dev/null +++ b/vendor/github.com/hashicorp/go-plugin/process.go @@ -0,0 +1,24 @@ +package plugin + +import ( + "time" +) + +// pidAlive checks whether a pid is alive. +func pidAlive(pid int) bool { + return _pidAlive(pid) +} + +// pidWait blocks for a process to exit. +func pidWait(pid int) error { + ticker := time.NewTicker(1 * time.Second) + defer ticker.Stop() + + for range ticker.C { + if !pidAlive(pid) { + break + } + } + + return nil +} diff --git a/vendor/github.com/hashicorp/go-plugin/process_posix.go b/vendor/github.com/hashicorp/go-plugin/process_posix.go new file mode 100644 index 0000000..70ba546 --- /dev/null +++ b/vendor/github.com/hashicorp/go-plugin/process_posix.go @@ -0,0 +1,19 @@ +// +build !windows + +package plugin + +import ( + "os" + "syscall" +) + +// _pidAlive tests whether a process is alive or not by sending it Signal 0, +// since Go otherwise has no way to test this. +func _pidAlive(pid int) bool { + proc, err := os.FindProcess(pid) + if err == nil { + err = proc.Signal(syscall.Signal(0)) + } + + return err == nil +} diff --git a/vendor/github.com/hashicorp/go-plugin/process_windows.go b/vendor/github.com/hashicorp/go-plugin/process_windows.go new file mode 100644 index 0000000..9f7b018 --- /dev/null +++ b/vendor/github.com/hashicorp/go-plugin/process_windows.go @@ -0,0 +1,29 @@ +package plugin + +import ( + "syscall" +) + +const ( + // Weird name but matches the MSDN docs + exit_STILL_ACTIVE = 259 + + processDesiredAccess = syscall.STANDARD_RIGHTS_READ | + syscall.PROCESS_QUERY_INFORMATION | + syscall.SYNCHRONIZE +) + +// _pidAlive tests whether a process is alive or not +func _pidAlive(pid int) bool { + h, err := syscall.OpenProcess(processDesiredAccess, false, uint32(pid)) + if err != nil { + return false + } + + var ec uint32 + if e := syscall.GetExitCodeProcess(h, &ec); e != nil { + return false + } + + return ec == exit_STILL_ACTIVE +} diff --git a/vendor/github.com/hashicorp/go-plugin/protocol.go b/vendor/github.com/hashicorp/go-plugin/protocol.go new file mode 100644 index 0000000..0cfc19e --- /dev/null +++ b/vendor/github.com/hashicorp/go-plugin/protocol.go @@ -0,0 +1,45 @@ +package plugin + +import ( + "io" + "net" +) + +// Protocol is an enum representing the types of protocols. +type Protocol string + +const ( + ProtocolInvalid Protocol = "" + ProtocolNetRPC Protocol = "netrpc" + ProtocolGRPC Protocol = "grpc" +) + +// ServerProtocol is an interface that must be implemented for new plugin +// protocols to be servers. +type ServerProtocol interface { + // Init is called once to configure and initialize the protocol, but + // not start listening. This is the point at which all validation should + // be done and errors returned. + Init() error + + // Config is extra configuration to be outputted to stdout. This will + // be automatically base64 encoded to ensure it can be parsed properly. + // This can be an empty string if additional configuration is not needed. + Config() string + + // Serve is called to serve connections on the given listener. This should + // continue until the listener is closed. + Serve(net.Listener) +} + +// ClientProtocol is an interface that must be implemented for new plugin +// protocols to be clients. +type ClientProtocol interface { + io.Closer + + // Dispense dispenses a new instance of the plugin with the given name. + Dispense(string) (interface{}, error) + + // Ping checks that the client connection is still healthy. + Ping() error +} diff --git a/vendor/github.com/hashicorp/go-plugin/rpc_client.go b/vendor/github.com/hashicorp/go-plugin/rpc_client.go new file mode 100644 index 0000000..f30a4b1 --- /dev/null +++ b/vendor/github.com/hashicorp/go-plugin/rpc_client.go @@ -0,0 +1,170 @@ +package plugin + +import ( + "crypto/tls" + "fmt" + "io" + "net" + "net/rpc" + + "github.com/hashicorp/yamux" +) + +// RPCClient connects to an RPCServer over net/rpc to dispense plugin types. +type RPCClient struct { + broker *MuxBroker + control *rpc.Client + plugins map[string]Plugin + + // These are the streams used for the various stdout/err overrides + stdout, stderr net.Conn +} + +// newRPCClient creates a new RPCClient. The Client argument is expected +// to be successfully started already with a lock held. +func newRPCClient(c *Client) (*RPCClient, error) { + // Connect to the client + conn, err := net.Dial(c.address.Network(), c.address.String()) + if err != nil { + return nil, err + } + if tcpConn, ok := conn.(*net.TCPConn); ok { + // Make sure to set keep alive so that the connection doesn't die + tcpConn.SetKeepAlive(true) + } + + if c.config.TLSConfig != nil { + conn = tls.Client(conn, c.config.TLSConfig) + } + + // Create the actual RPC client + result, err := NewRPCClient(conn, c.config.Plugins) + if err != nil { + conn.Close() + return nil, err + } + + // Begin the stream syncing so that stdin, out, err work properly + err = result.SyncStreams( + c.config.SyncStdout, + c.config.SyncStderr) + if err != nil { + result.Close() + return nil, err + } + + return result, nil +} + +// NewRPCClient creates a client from an already-open connection-like value. +// Dial is typically used instead. +func NewRPCClient(conn io.ReadWriteCloser, plugins map[string]Plugin) (*RPCClient, error) { + // Create the yamux client so we can multiplex + mux, err := yamux.Client(conn, nil) + if err != nil { + conn.Close() + return nil, err + } + + // Connect to the control stream. + control, err := mux.Open() + if err != nil { + mux.Close() + return nil, err + } + + // Connect stdout, stderr streams + stdstream := make([]net.Conn, 2) + for i, _ := range stdstream { + stdstream[i], err = mux.Open() + if err != nil { + mux.Close() + return nil, err + } + } + + // Create the broker and start it up + broker := newMuxBroker(mux) + go broker.Run() + + // Build the client using our broker and control channel. + return &RPCClient{ + broker: broker, + control: rpc.NewClient(control), + plugins: plugins, + stdout: stdstream[0], + stderr: stdstream[1], + }, nil +} + +// SyncStreams should be called to enable syncing of stdout, +// stderr with the plugin. +// +// This will return immediately and the syncing will continue to happen +// in the background. You do not need to launch this in a goroutine itself. +// +// This should never be called multiple times. +func (c *RPCClient) SyncStreams(stdout io.Writer, stderr io.Writer) error { + go copyStream("stdout", stdout, c.stdout) + go copyStream("stderr", stderr, c.stderr) + return nil +} + +// Close closes the connection. The client is no longer usable after this +// is called. +func (c *RPCClient) Close() error { + // Call the control channel and ask it to gracefully exit. If this + // errors, then we save it so that we always return an error but we + // want to try to close the other channels anyways. + var empty struct{} + returnErr := c.control.Call("Control.Quit", true, &empty) + + // Close the other streams we have + if err := c.control.Close(); err != nil { + return err + } + if err := c.stdout.Close(); err != nil { + return err + } + if err := c.stderr.Close(); err != nil { + return err + } + if err := c.broker.Close(); err != nil { + return err + } + + // Return back the error we got from Control.Quit. This is very important + // since we MUST return non-nil error if this fails so that Client.Kill + // will properly try a process.Kill. + return returnErr +} + +func (c *RPCClient) Dispense(name string) (interface{}, error) { + p, ok := c.plugins[name] + if !ok { + return nil, fmt.Errorf("unknown plugin type: %s", name) + } + + var id uint32 + if err := c.control.Call( + "Dispenser.Dispense", name, &id); err != nil { + return nil, err + } + + conn, err := c.broker.Dial(id) + if err != nil { + return nil, err + } + + return p.Client(c.broker, rpc.NewClient(conn)) +} + +// Ping pings the connection to ensure it is still alive. +// +// The error from the RPC call is returned exactly if you want to inspect +// it for further error analysis. Any error returned from here would indicate +// that the connection to the plugin is not healthy. +func (c *RPCClient) Ping() error { + var empty struct{} + return c.control.Call("Control.Ping", true, &empty) +} diff --git a/vendor/github.com/hashicorp/go-plugin/rpc_server.go b/vendor/github.com/hashicorp/go-plugin/rpc_server.go new file mode 100644 index 0000000..5bb18dd --- /dev/null +++ b/vendor/github.com/hashicorp/go-plugin/rpc_server.go @@ -0,0 +1,197 @@ +package plugin + +import ( + "errors" + "fmt" + "io" + "log" + "net" + "net/rpc" + "sync" + + "github.com/hashicorp/yamux" +) + +// RPCServer listens for network connections and then dispenses interface +// implementations over net/rpc. +// +// After setting the fields below, they shouldn't be read again directly +// from the structure which may be reading/writing them concurrently. +type RPCServer struct { + Plugins map[string]Plugin + + // Stdout, Stderr are what this server will use instead of the + // normal stdin/out/err. This is because due to the multi-process nature + // of our plugin system, we can't use the normal process values so we + // make our own custom one we pipe across. + Stdout io.Reader + Stderr io.Reader + + // DoneCh should be set to a non-nil channel that will be closed + // when the control requests the RPC server to end. + DoneCh chan<- struct{} + + lock sync.Mutex +} + +// ServerProtocol impl. +func (s *RPCServer) Init() error { return nil } + +// ServerProtocol impl. +func (s *RPCServer) Config() string { return "" } + +// ServerProtocol impl. +func (s *RPCServer) Serve(lis net.Listener) { + for { + conn, err := lis.Accept() + if err != nil { + log.Printf("[ERR] plugin: plugin server: %s", err) + return + } + + go s.ServeConn(conn) + } +} + +// ServeConn runs a single connection. +// +// ServeConn blocks, serving the connection until the client hangs up. +func (s *RPCServer) ServeConn(conn io.ReadWriteCloser) { + // First create the yamux server to wrap this connection + mux, err := yamux.Server(conn, nil) + if err != nil { + conn.Close() + log.Printf("[ERR] plugin: error creating yamux server: %s", err) + return + } + + // Accept the control connection + control, err := mux.Accept() + if err != nil { + mux.Close() + if err != io.EOF { + log.Printf("[ERR] plugin: error accepting control connection: %s", err) + } + + return + } + + // Connect the stdstreams (in, out, err) + stdstream := make([]net.Conn, 2) + for i, _ := range stdstream { + stdstream[i], err = mux.Accept() + if err != nil { + mux.Close() + log.Printf("[ERR] plugin: accepting stream %d: %s", i, err) + return + } + } + + // Copy std streams out to the proper place + go copyStream("stdout", stdstream[0], s.Stdout) + go copyStream("stderr", stdstream[1], s.Stderr) + + // Create the broker and start it up + broker := newMuxBroker(mux) + go broker.Run() + + // Use the control connection to build the dispenser and serve the + // connection. + server := rpc.NewServer() + server.RegisterName("Control", &controlServer{ + server: s, + }) + server.RegisterName("Dispenser", &dispenseServer{ + broker: broker, + plugins: s.Plugins, + }) + server.ServeConn(control) +} + +// done is called internally by the control server to trigger the +// doneCh to close which is listened to by the main process to cleanly +// exit. +func (s *RPCServer) done() { + s.lock.Lock() + defer s.lock.Unlock() + + if s.DoneCh != nil { + close(s.DoneCh) + s.DoneCh = nil + } +} + +// dispenseServer dispenses variousinterface implementations for Terraform. +type controlServer struct { + server *RPCServer +} + +// Ping can be called to verify the connection (and likely the binary) +// is still alive to a plugin. +func (c *controlServer) Ping( + null bool, response *struct{}) error { + *response = struct{}{} + return nil +} + +func (c *controlServer) Quit( + null bool, response *struct{}) error { + // End the server + c.server.done() + + // Always return true + *response = struct{}{} + + return nil +} + +// dispenseServer dispenses variousinterface implementations for Terraform. +type dispenseServer struct { + broker *MuxBroker + plugins map[string]Plugin +} + +func (d *dispenseServer) Dispense( + name string, response *uint32) error { + // Find the function to create this implementation + p, ok := d.plugins[name] + if !ok { + return fmt.Errorf("unknown plugin type: %s", name) + } + + // Create the implementation first so we know if there is an error. + impl, err := p.Server(d.broker) + if err != nil { + // We turn the error into an errors error so that it works across RPC + return errors.New(err.Error()) + } + + // Reserve an ID for our implementation + id := d.broker.NextId() + *response = id + + // Run the rest in a goroutine since it can only happen once this RPC + // call returns. We wait for a connection for the plugin implementation + // and serve it. + go func() { + conn, err := d.broker.Accept(id) + if err != nil { + log.Printf("[ERR] go-plugin: plugin dispense error: %s: %s", name, err) + return + } + + serve(conn, "Plugin", impl) + }() + + return nil +} + +func serve(conn io.ReadWriteCloser, name string, v interface{}) { + server := rpc.NewServer() + if err := server.RegisterName(name, v); err != nil { + log.Printf("[ERR] go-plugin: plugin dispense error: %s", err) + return + } + + server.ServeConn(conn) +} diff --git a/vendor/github.com/hashicorp/go-plugin/server.go b/vendor/github.com/hashicorp/go-plugin/server.go new file mode 100644 index 0000000..fc9f05a --- /dev/null +++ b/vendor/github.com/hashicorp/go-plugin/server.go @@ -0,0 +1,432 @@ +package plugin + +import ( + "crypto/tls" + "crypto/x509" + "encoding/base64" + "errors" + "fmt" + "io/ioutil" + "log" + "net" + "os" + "os/signal" + "runtime" + "sort" + "strconv" + "strings" + "sync/atomic" + + "github.com/hashicorp/go-hclog" + + "google.golang.org/grpc" +) + +// CoreProtocolVersion is the ProtocolVersion of the plugin system itself. +// We will increment this whenever we change any protocol behavior. This +// will invalidate any prior plugins but will at least allow us to iterate +// on the core in a safe way. We will do our best to do this very +// infrequently. +const CoreProtocolVersion = 1 + +// HandshakeConfig is the configuration used by client and servers to +// handshake before starting a plugin connection. This is embedded by +// both ServeConfig and ClientConfig. +// +// In practice, the plugin host creates a HandshakeConfig that is exported +// and plugins then can easily consume it. +type HandshakeConfig struct { + // ProtocolVersion is the version that clients must match on to + // agree they can communicate. This should match the ProtocolVersion + // set on ClientConfig when using a plugin. + // This field is not required if VersionedPlugins are being used in the + // Client or Server configurations. + ProtocolVersion uint + + // MagicCookieKey and value are used as a very basic verification + // that a plugin is intended to be launched. This is not a security + // measure, just a UX feature. If the magic cookie doesn't match, + // we show human-friendly output. + MagicCookieKey string + MagicCookieValue string +} + +// PluginSet is a set of plugins provided to be registered in the plugin +// server. +type PluginSet map[string]Plugin + +// ServeConfig configures what sorts of plugins are served. +type ServeConfig struct { + // HandshakeConfig is the configuration that must match clients. + HandshakeConfig + + // TLSProvider is a function that returns a configured tls.Config. + TLSProvider func() (*tls.Config, error) + + // Plugins are the plugins that are served. + // The implied version of this PluginSet is the Handshake.ProtocolVersion. + Plugins PluginSet + + // VersionedPlugins is a map of PluginSets for specific protocol versions. + // These can be used to negotiate a compatible version between client and + // server. If this is set, Handshake.ProtocolVersion is not required. + VersionedPlugins map[int]PluginSet + + // GRPCServer should be non-nil to enable serving the plugins over + // gRPC. This is a function to create the server when needed with the + // given server options. The server options populated by go-plugin will + // be for TLS if set. You may modify the input slice. + // + // Note that the grpc.Server will automatically be registered with + // the gRPC health checking service. This is not optional since go-plugin + // relies on this to implement Ping(). + GRPCServer func([]grpc.ServerOption) *grpc.Server + + // Logger is used to pass a logger into the server. If none is provided the + // server will create a default logger. + Logger hclog.Logger +} + +// protocolVersion determines the protocol version and plugin set to be used by +// the server. In the event that there is no suitable version, the last version +// in the config is returned leaving the client to report the incompatibility. +func protocolVersion(opts *ServeConfig) (int, Protocol, PluginSet) { + protoVersion := int(opts.ProtocolVersion) + pluginSet := opts.Plugins + protoType := ProtocolNetRPC + // Check if the client sent a list of acceptable versions + var clientVersions []int + if vs := os.Getenv("PLUGIN_PROTOCOL_VERSIONS"); vs != "" { + for _, s := range strings.Split(vs, ",") { + v, err := strconv.Atoi(s) + if err != nil { + fmt.Fprintf(os.Stderr, "server sent invalid plugin version %q", s) + continue + } + clientVersions = append(clientVersions, v) + } + } + + // We want to iterate in reverse order, to ensure we match the newest + // compatible plugin version. + sort.Sort(sort.Reverse(sort.IntSlice(clientVersions))) + + // set the old un-versioned fields as if they were versioned plugins + if opts.VersionedPlugins == nil { + opts.VersionedPlugins = make(map[int]PluginSet) + } + + if pluginSet != nil { + opts.VersionedPlugins[protoVersion] = pluginSet + } + + // Sort the version to make sure we match the latest first + var versions []int + for v := range opts.VersionedPlugins { + versions = append(versions, v) + } + + sort.Sort(sort.Reverse(sort.IntSlice(versions))) + + // See if we have multiple versions of Plugins to choose from + for _, version := range versions { + // Record each version, since we guarantee that this returns valid + // values even if they are not a protocol match. + protoVersion = version + pluginSet = opts.VersionedPlugins[version] + + // If we have a configured gRPC server we should select a protocol + if opts.GRPCServer != nil { + // All plugins in a set must use the same transport, so check the first + // for the protocol type + for _, p := range pluginSet { + switch p.(type) { + case GRPCPlugin: + protoType = ProtocolGRPC + default: + protoType = ProtocolNetRPC + } + break + } + } + + for _, clientVersion := range clientVersions { + if clientVersion == protoVersion { + return protoVersion, protoType, pluginSet + } + } + } + + // Return the lowest version as the fallback. + // Since we iterated over all the versions in reverse order above, these + // values are from the lowest version number plugins (which may be from + // a combination of the Handshake.ProtocolVersion and ServeConfig.Plugins + // fields). This allows serving the oldest version of our plugins to a + // legacy client that did not send a PLUGIN_PROTOCOL_VERSIONS list. + return protoVersion, protoType, pluginSet +} + +// Serve serves the plugins given by ServeConfig. +// +// Serve doesn't return until the plugin is done being executed. Any +// errors will be outputted to os.Stderr. +// +// This is the method that plugins should call in their main() functions. +func Serve(opts *ServeConfig) { + // Validate the handshake config + if opts.MagicCookieKey == "" || opts.MagicCookieValue == "" { + fmt.Fprintf(os.Stderr, + "Misconfigured ServeConfig given to serve this plugin: no magic cookie\n"+ + "key or value was set. Please notify the plugin author and report\n"+ + "this as a bug.\n") + os.Exit(1) + } + + // First check the cookie + if os.Getenv(opts.MagicCookieKey) != opts.MagicCookieValue { + fmt.Fprintf(os.Stderr, + "This binary is a plugin. These are not meant to be executed directly.\n"+ + "Please execute the program that consumes these plugins, which will\n"+ + "load any plugins automatically\n") + os.Exit(1) + } + + // negotiate the version and plugins + // start with default version in the handshake config + protoVersion, protoType, pluginSet := protocolVersion(opts) + + // Logging goes to the original stderr + log.SetOutput(os.Stderr) + + logger := opts.Logger + if logger == nil { + // internal logger to os.Stderr + logger = hclog.New(&hclog.LoggerOptions{ + Level: hclog.Trace, + Output: os.Stderr, + JSONFormat: true, + }) + } + + // Create our new stdout, stderr files. These will override our built-in + // stdout/stderr so that it works across the stream boundary. + stdout_r, stdout_w, err := os.Pipe() + if err != nil { + fmt.Fprintf(os.Stderr, "Error preparing plugin: %s\n", err) + os.Exit(1) + } + stderr_r, stderr_w, err := os.Pipe() + if err != nil { + fmt.Fprintf(os.Stderr, "Error preparing plugin: %s\n", err) + os.Exit(1) + } + + // Register a listener so we can accept a connection + listener, err := serverListener() + if err != nil { + logger.Error("plugin init error", "error", err) + return + } + + // Close the listener on return. We wrap this in a func() on purpose + // because the "listener" reference may change to TLS. + defer func() { + listener.Close() + }() + + var tlsConfig *tls.Config + if opts.TLSProvider != nil { + tlsConfig, err = opts.TLSProvider() + if err != nil { + logger.Error("plugin tls init", "error", err) + return + } + } + + var serverCert string + clientCert := os.Getenv("PLUGIN_CLIENT_CERT") + // If the client is configured using AutoMTLS, the certificate will be here, + // and we need to generate our own in response. + if tlsConfig == nil && clientCert != "" { + logger.Info("configuring server automatic mTLS") + clientCertPool := x509.NewCertPool() + if !clientCertPool.AppendCertsFromPEM([]byte(clientCert)) { + logger.Error("client cert provided but failed to parse", "cert", clientCert) + } + + certPEM, keyPEM, err := generateCert() + if err != nil { + logger.Error("failed to generate client certificate", "error", err) + panic(err) + } + + cert, err := tls.X509KeyPair(certPEM, keyPEM) + if err != nil { + logger.Error("failed to parse client certificate", "error", err) + panic(err) + } + + tlsConfig = &tls.Config{ + Certificates: []tls.Certificate{cert}, + ClientAuth: tls.RequireAndVerifyClientCert, + ClientCAs: clientCertPool, + MinVersion: tls.VersionTLS12, + } + + // We send back the raw leaf cert data for the client rather than the + // PEM, since the protocol can't handle newlines. + serverCert = base64.RawStdEncoding.EncodeToString(cert.Certificate[0]) + } + + // Create the channel to tell us when we're done + doneCh := make(chan struct{}) + + // Build the server type + var server ServerProtocol + switch protoType { + case ProtocolNetRPC: + // If we have a TLS configuration then we wrap the listener + // ourselves and do it at that level. + if tlsConfig != nil { + listener = tls.NewListener(listener, tlsConfig) + } + + // Create the RPC server to dispense + server = &RPCServer{ + Plugins: pluginSet, + Stdout: stdout_r, + Stderr: stderr_r, + DoneCh: doneCh, + } + + case ProtocolGRPC: + // Create the gRPC server + server = &GRPCServer{ + Plugins: pluginSet, + Server: opts.GRPCServer, + TLS: tlsConfig, + Stdout: stdout_r, + Stderr: stderr_r, + DoneCh: doneCh, + logger: logger, + } + + default: + panic("unknown server protocol: " + protoType) + } + + // Initialize the servers + if err := server.Init(); err != nil { + logger.Error("protocol init", "error", err) + return + } + + logger.Debug("plugin address", "network", listener.Addr().Network(), "address", listener.Addr().String()) + + // Output the address and service name to stdout so that the client can bring it up. + fmt.Printf("%d|%d|%s|%s|%s|%s\n", + CoreProtocolVersion, + protoVersion, + listener.Addr().Network(), + listener.Addr().String(), + protoType, + serverCert) + os.Stdout.Sync() + + // Eat the interrupts + ch := make(chan os.Signal, 1) + signal.Notify(ch, os.Interrupt) + go func() { + var count int32 = 0 + for { + <-ch + newCount := atomic.AddInt32(&count, 1) + logger.Debug("plugin received interrupt signal, ignoring", "count", newCount) + } + }() + + // Set our new out, err + os.Stdout = stdout_w + os.Stderr = stderr_w + + // Accept connections and wait for completion + go server.Serve(listener) + <-doneCh +} + +func serverListener() (net.Listener, error) { + if runtime.GOOS == "windows" { + return serverListener_tcp() + } + + return serverListener_unix() +} + +func serverListener_tcp() (net.Listener, error) { + minPort, err := strconv.ParseInt(os.Getenv("PLUGIN_MIN_PORT"), 10, 32) + if err != nil { + return nil, err + } + + maxPort, err := strconv.ParseInt(os.Getenv("PLUGIN_MAX_PORT"), 10, 32) + if err != nil { + return nil, err + } + + for port := minPort; port <= maxPort; port++ { + address := fmt.Sprintf("127.0.0.1:%d", port) + listener, err := net.Listen("tcp", address) + if err == nil { + return listener, nil + } + } + + return nil, errors.New("Couldn't bind plugin TCP listener") +} + +func serverListener_unix() (net.Listener, error) { + tf, err := ioutil.TempFile("", "plugin") + if err != nil { + return nil, err + } + path := tf.Name() + + // Close the file and remove it because it has to not exist for + // the domain socket. + if err := tf.Close(); err != nil { + return nil, err + } + if err := os.Remove(path); err != nil { + return nil, err + } + + l, err := net.Listen("unix", path) + if err != nil { + return nil, err + } + + // Wrap the listener in rmListener so that the Unix domain socket file + // is removed on close. + return &rmListener{ + Listener: l, + Path: path, + }, nil +} + +// rmListener is an implementation of net.Listener that forwards most +// calls to the listener but also removes a file as part of the close. We +// use this to cleanup the unix domain socket on close. +type rmListener struct { + net.Listener + Path string +} + +func (l *rmListener) Close() error { + // Close the listener itself + if err := l.Listener.Close(); err != nil { + return err + } + + // Remove the file + return os.Remove(l.Path) +} diff --git a/vendor/github.com/hashicorp/go-plugin/server_mux.go b/vendor/github.com/hashicorp/go-plugin/server_mux.go new file mode 100644 index 0000000..033079e --- /dev/null +++ b/vendor/github.com/hashicorp/go-plugin/server_mux.go @@ -0,0 +1,31 @@ +package plugin + +import ( + "fmt" + "os" +) + +// ServeMuxMap is the type that is used to configure ServeMux +type ServeMuxMap map[string]*ServeConfig + +// ServeMux is like Serve, but serves multiple types of plugins determined +// by the argument given on the command-line. +// +// This command doesn't return until the plugin is done being executed. Any +// errors are logged or output to stderr. +func ServeMux(m ServeMuxMap) { + if len(os.Args) != 2 { + fmt.Fprintf(os.Stderr, + "Invoked improperly. This is an internal command that shouldn't\n"+ + "be manually invoked.\n") + os.Exit(1) + } + + opts, ok := m[os.Args[1]] + if !ok { + fmt.Fprintf(os.Stderr, "Unknown plugin: %s\n", os.Args[1]) + os.Exit(1) + } + + Serve(opts) +} diff --git a/vendor/github.com/hashicorp/go-plugin/stream.go b/vendor/github.com/hashicorp/go-plugin/stream.go new file mode 100644 index 0000000..1d547aa --- /dev/null +++ b/vendor/github.com/hashicorp/go-plugin/stream.go @@ -0,0 +1,18 @@ +package plugin + +import ( + "io" + "log" +) + +func copyStream(name string, dst io.Writer, src io.Reader) { + if src == nil { + panic(name + ": src is nil") + } + if dst == nil { + panic(name + ": dst is nil") + } + if _, err := io.Copy(dst, src); err != nil && err != io.EOF { + log.Printf("[ERR] plugin: stream copy '%s' error: %s", name, err) + } +} diff --git a/vendor/github.com/hashicorp/go-plugin/test/grpc/gen.go b/vendor/github.com/hashicorp/go-plugin/test/grpc/gen.go new file mode 100644 index 0000000..c14618a --- /dev/null +++ b/vendor/github.com/hashicorp/go-plugin/test/grpc/gen.go @@ -0,0 +1,3 @@ +package grpctest + +//go:generate protoc -I ./ ./test.proto --go_out=plugins=grpc:. diff --git a/vendor/github.com/hashicorp/go-plugin/test/grpc/test.pb.go b/vendor/github.com/hashicorp/go-plugin/test/grpc/test.pb.go new file mode 100644 index 0000000..480849b --- /dev/null +++ b/vendor/github.com/hashicorp/go-plugin/test/grpc/test.pb.go @@ -0,0 +1,733 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: test.proto + +package grpctest + +import ( + fmt "fmt" + proto "github.com/golang/protobuf/proto" + context "golang.org/x/net/context" + grpc "google.golang.org/grpc" + math "math" +) + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +type TestRequest struct { + Input int32 `protobuf:"varint,1,opt,name=Input,proto3" json:"Input,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *TestRequest) Reset() { *m = TestRequest{} } +func (m *TestRequest) String() string { return proto.CompactTextString(m) } +func (*TestRequest) ProtoMessage() {} +func (*TestRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_c161fcfdc0c3ff1e, []int{0} +} + +func (m *TestRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_TestRequest.Unmarshal(m, b) +} +func (m *TestRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_TestRequest.Marshal(b, m, deterministic) +} +func (m *TestRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_TestRequest.Merge(m, src) +} +func (m *TestRequest) XXX_Size() int { + return xxx_messageInfo_TestRequest.Size(m) +} +func (m *TestRequest) XXX_DiscardUnknown() { + xxx_messageInfo_TestRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_TestRequest proto.InternalMessageInfo + +func (m *TestRequest) GetInput() int32 { + if m != nil { + return m.Input + } + return 0 +} + +type TestResponse struct { + Output int32 `protobuf:"varint,2,opt,name=Output,proto3" json:"Output,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *TestResponse) Reset() { *m = TestResponse{} } +func (m *TestResponse) String() string { return proto.CompactTextString(m) } +func (*TestResponse) ProtoMessage() {} +func (*TestResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_c161fcfdc0c3ff1e, []int{1} +} + +func (m *TestResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_TestResponse.Unmarshal(m, b) +} +func (m *TestResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_TestResponse.Marshal(b, m, deterministic) +} +func (m *TestResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_TestResponse.Merge(m, src) +} +func (m *TestResponse) XXX_Size() int { + return xxx_messageInfo_TestResponse.Size(m) +} +func (m *TestResponse) XXX_DiscardUnknown() { + xxx_messageInfo_TestResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_TestResponse proto.InternalMessageInfo + +func (m *TestResponse) GetOutput() int32 { + if m != nil { + return m.Output + } + return 0 +} + +type PrintKVRequest struct { + Key string `protobuf:"bytes,1,opt,name=Key,proto3" json:"Key,omitempty"` + // Types that are valid to be assigned to Value: + // *PrintKVRequest_ValueString + // *PrintKVRequest_ValueInt + Value isPrintKVRequest_Value `protobuf_oneof:"Value"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *PrintKVRequest) Reset() { *m = PrintKVRequest{} } +func (m *PrintKVRequest) String() string { return proto.CompactTextString(m) } +func (*PrintKVRequest) ProtoMessage() {} +func (*PrintKVRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_c161fcfdc0c3ff1e, []int{2} +} + +func (m *PrintKVRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_PrintKVRequest.Unmarshal(m, b) +} +func (m *PrintKVRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_PrintKVRequest.Marshal(b, m, deterministic) +} +func (m *PrintKVRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_PrintKVRequest.Merge(m, src) +} +func (m *PrintKVRequest) XXX_Size() int { + return xxx_messageInfo_PrintKVRequest.Size(m) +} +func (m *PrintKVRequest) XXX_DiscardUnknown() { + xxx_messageInfo_PrintKVRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_PrintKVRequest proto.InternalMessageInfo + +func (m *PrintKVRequest) GetKey() string { + if m != nil { + return m.Key + } + return "" +} + +type isPrintKVRequest_Value interface { + isPrintKVRequest_Value() +} + +type PrintKVRequest_ValueString struct { + ValueString string `protobuf:"bytes,2,opt,name=ValueString,proto3,oneof"` +} + +type PrintKVRequest_ValueInt struct { + ValueInt int32 `protobuf:"varint,3,opt,name=ValueInt,proto3,oneof"` +} + +func (*PrintKVRequest_ValueString) isPrintKVRequest_Value() {} + +func (*PrintKVRequest_ValueInt) isPrintKVRequest_Value() {} + +func (m *PrintKVRequest) GetValue() isPrintKVRequest_Value { + if m != nil { + return m.Value + } + return nil +} + +func (m *PrintKVRequest) GetValueString() string { + if x, ok := m.GetValue().(*PrintKVRequest_ValueString); ok { + return x.ValueString + } + return "" +} + +func (m *PrintKVRequest) GetValueInt() int32 { + if x, ok := m.GetValue().(*PrintKVRequest_ValueInt); ok { + return x.ValueInt + } + return 0 +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*PrintKVRequest) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _PrintKVRequest_OneofMarshaler, _PrintKVRequest_OneofUnmarshaler, _PrintKVRequest_OneofSizer, []interface{}{ + (*PrintKVRequest_ValueString)(nil), + (*PrintKVRequest_ValueInt)(nil), + } +} + +func _PrintKVRequest_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*PrintKVRequest) + // Value + switch x := m.Value.(type) { + case *PrintKVRequest_ValueString: + b.EncodeVarint(2<<3 | proto.WireBytes) + b.EncodeStringBytes(x.ValueString) + case *PrintKVRequest_ValueInt: + b.EncodeVarint(3<<3 | proto.WireVarint) + b.EncodeVarint(uint64(x.ValueInt)) + case nil: + default: + return fmt.Errorf("PrintKVRequest.Value has unexpected type %T", x) + } + return nil +} + +func _PrintKVRequest_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*PrintKVRequest) + switch tag { + case 2: // Value.ValueString + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeStringBytes() + m.Value = &PrintKVRequest_ValueString{x} + return true, err + case 3: // Value.ValueInt + if wire != proto.WireVarint { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeVarint() + m.Value = &PrintKVRequest_ValueInt{int32(x)} + return true, err + default: + return false, nil + } +} + +func _PrintKVRequest_OneofSizer(msg proto.Message) (n int) { + m := msg.(*PrintKVRequest) + // Value + switch x := m.Value.(type) { + case *PrintKVRequest_ValueString: + n += 1 // tag and wire + n += proto.SizeVarint(uint64(len(x.ValueString))) + n += len(x.ValueString) + case *PrintKVRequest_ValueInt: + n += 1 // tag and wire + n += proto.SizeVarint(uint64(x.ValueInt)) + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +type PrintKVResponse struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *PrintKVResponse) Reset() { *m = PrintKVResponse{} } +func (m *PrintKVResponse) String() string { return proto.CompactTextString(m) } +func (*PrintKVResponse) ProtoMessage() {} +func (*PrintKVResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_c161fcfdc0c3ff1e, []int{3} +} + +func (m *PrintKVResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_PrintKVResponse.Unmarshal(m, b) +} +func (m *PrintKVResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_PrintKVResponse.Marshal(b, m, deterministic) +} +func (m *PrintKVResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_PrintKVResponse.Merge(m, src) +} +func (m *PrintKVResponse) XXX_Size() int { + return xxx_messageInfo_PrintKVResponse.Size(m) +} +func (m *PrintKVResponse) XXX_DiscardUnknown() { + xxx_messageInfo_PrintKVResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_PrintKVResponse proto.InternalMessageInfo + +type BidirectionalRequest struct { + Id uint32 `protobuf:"varint,1,opt,name=id,proto3" json:"id,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *BidirectionalRequest) Reset() { *m = BidirectionalRequest{} } +func (m *BidirectionalRequest) String() string { return proto.CompactTextString(m) } +func (*BidirectionalRequest) ProtoMessage() {} +func (*BidirectionalRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_c161fcfdc0c3ff1e, []int{4} +} + +func (m *BidirectionalRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_BidirectionalRequest.Unmarshal(m, b) +} +func (m *BidirectionalRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_BidirectionalRequest.Marshal(b, m, deterministic) +} +func (m *BidirectionalRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_BidirectionalRequest.Merge(m, src) +} +func (m *BidirectionalRequest) XXX_Size() int { + return xxx_messageInfo_BidirectionalRequest.Size(m) +} +func (m *BidirectionalRequest) XXX_DiscardUnknown() { + xxx_messageInfo_BidirectionalRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_BidirectionalRequest proto.InternalMessageInfo + +func (m *BidirectionalRequest) GetId() uint32 { + if m != nil { + return m.Id + } + return 0 +} + +type BidirectionalResponse struct { + Id uint32 `protobuf:"varint,1,opt,name=id,proto3" json:"id,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *BidirectionalResponse) Reset() { *m = BidirectionalResponse{} } +func (m *BidirectionalResponse) String() string { return proto.CompactTextString(m) } +func (*BidirectionalResponse) ProtoMessage() {} +func (*BidirectionalResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_c161fcfdc0c3ff1e, []int{5} +} + +func (m *BidirectionalResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_BidirectionalResponse.Unmarshal(m, b) +} +func (m *BidirectionalResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_BidirectionalResponse.Marshal(b, m, deterministic) +} +func (m *BidirectionalResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_BidirectionalResponse.Merge(m, src) +} +func (m *BidirectionalResponse) XXX_Size() int { + return xxx_messageInfo_BidirectionalResponse.Size(m) +} +func (m *BidirectionalResponse) XXX_DiscardUnknown() { + xxx_messageInfo_BidirectionalResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_BidirectionalResponse proto.InternalMessageInfo + +func (m *BidirectionalResponse) GetId() uint32 { + if m != nil { + return m.Id + } + return 0 +} + +type PingRequest struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *PingRequest) Reset() { *m = PingRequest{} } +func (m *PingRequest) String() string { return proto.CompactTextString(m) } +func (*PingRequest) ProtoMessage() {} +func (*PingRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_c161fcfdc0c3ff1e, []int{6} +} + +func (m *PingRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_PingRequest.Unmarshal(m, b) +} +func (m *PingRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_PingRequest.Marshal(b, m, deterministic) +} +func (m *PingRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_PingRequest.Merge(m, src) +} +func (m *PingRequest) XXX_Size() int { + return xxx_messageInfo_PingRequest.Size(m) +} +func (m *PingRequest) XXX_DiscardUnknown() { + xxx_messageInfo_PingRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_PingRequest proto.InternalMessageInfo + +type PongResponse struct { + Msg string `protobuf:"bytes,1,opt,name=msg,proto3" json:"msg,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *PongResponse) Reset() { *m = PongResponse{} } +func (m *PongResponse) String() string { return proto.CompactTextString(m) } +func (*PongResponse) ProtoMessage() {} +func (*PongResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_c161fcfdc0c3ff1e, []int{7} +} + +func (m *PongResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_PongResponse.Unmarshal(m, b) +} +func (m *PongResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_PongResponse.Marshal(b, m, deterministic) +} +func (m *PongResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_PongResponse.Merge(m, src) +} +func (m *PongResponse) XXX_Size() int { + return xxx_messageInfo_PongResponse.Size(m) +} +func (m *PongResponse) XXX_DiscardUnknown() { + xxx_messageInfo_PongResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_PongResponse proto.InternalMessageInfo + +func (m *PongResponse) GetMsg() string { + if m != nil { + return m.Msg + } + return "" +} + +func init() { + proto.RegisterType((*TestRequest)(nil), "grpctest.TestRequest") + proto.RegisterType((*TestResponse)(nil), "grpctest.TestResponse") + proto.RegisterType((*PrintKVRequest)(nil), "grpctest.PrintKVRequest") + proto.RegisterType((*PrintKVResponse)(nil), "grpctest.PrintKVResponse") + proto.RegisterType((*BidirectionalRequest)(nil), "grpctest.BidirectionalRequest") + proto.RegisterType((*BidirectionalResponse)(nil), "grpctest.BidirectionalResponse") + proto.RegisterType((*PingRequest)(nil), "grpctest.PingRequest") + proto.RegisterType((*PongResponse)(nil), "grpctest.PongResponse") +} + +func init() { proto.RegisterFile("test.proto", fileDescriptor_c161fcfdc0c3ff1e) } + +var fileDescriptor_c161fcfdc0c3ff1e = []byte{ + // 355 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x94, 0x92, 0xcd, 0x4e, 0xc2, 0x40, + 0x14, 0x85, 0xdb, 0x02, 0x05, 0x2e, 0x3f, 0xe2, 0x04, 0x08, 0x12, 0xa3, 0x64, 0x4c, 0x90, 0x15, + 0x31, 0xb8, 0x30, 0x2e, 0x4c, 0x0c, 0xba, 0x80, 0xb0, 0x90, 0x0c, 0x86, 0x3d, 0x3f, 0x93, 0x66, + 0x12, 0x3a, 0xad, 0x9d, 0xe9, 0xc2, 0x17, 0xf1, 0x79, 0xcd, 0x0c, 0x6d, 0x19, 0x08, 0x2e, 0xdc, + 0xdd, 0x73, 0x7b, 0x72, 0xe6, 0x9e, 0x2f, 0x05, 0x90, 0x54, 0xc8, 0x61, 0x18, 0x05, 0x32, 0x40, + 0x25, 0x2f, 0x0a, 0x37, 0x4a, 0xe3, 0x3b, 0xa8, 0x7c, 0x52, 0x21, 0x09, 0xfd, 0x8a, 0xa9, 0x90, + 0xa8, 0x09, 0x85, 0x29, 0x0f, 0x63, 0xd9, 0xb1, 0x7b, 0xf6, 0xa0, 0x40, 0xf6, 0x02, 0xf7, 0xa1, + 0xba, 0x37, 0x89, 0x30, 0xe0, 0x82, 0xa2, 0x36, 0xb8, 0x1f, 0xb1, 0x54, 0x36, 0x47, 0xdb, 0x12, + 0x85, 0x7d, 0xa8, 0xcf, 0x23, 0xc6, 0xe5, 0x6c, 0x99, 0xe6, 0x35, 0x20, 0x37, 0xa3, 0xdf, 0x3a, + 0xad, 0x4c, 0xd4, 0x88, 0x30, 0x54, 0x96, 0xab, 0x5d, 0x4c, 0x17, 0x32, 0x62, 0xdc, 0xd3, 0x01, + 0xe5, 0x89, 0x45, 0xcc, 0x25, 0xba, 0x86, 0x92, 0x96, 0x53, 0x2e, 0x3b, 0x39, 0xf5, 0xc2, 0xc4, + 0x22, 0xd9, 0x66, 0x5c, 0x84, 0x82, 0x9e, 0xf1, 0x25, 0x5c, 0x64, 0xcf, 0xed, 0x2f, 0xc3, 0x7d, + 0x68, 0x8e, 0xd9, 0x96, 0x45, 0x74, 0x23, 0x59, 0xc0, 0x57, 0xbb, 0xf4, 0x8e, 0x3a, 0x38, 0x6c, + 0xab, 0xcf, 0xa8, 0x11, 0x87, 0x6d, 0xf1, 0x3d, 0xb4, 0x4e, 0x7c, 0x49, 0xb5, 0x53, 0x63, 0x0d, + 0x2a, 0x73, 0xc6, 0xbd, 0x24, 0x07, 0xf7, 0xa0, 0x3a, 0x0f, 0x94, 0x4c, 0xec, 0x0d, 0xc8, 0xf9, + 0xc2, 0x4b, 0xfb, 0xf9, 0xc2, 0x1b, 0xfd, 0x38, 0x90, 0x57, 0xb0, 0xd0, 0x33, 0xb8, 0xef, 0x41, + 0xbc, 0xde, 0x51, 0xd4, 0x1a, 0xa6, 0xb8, 0x87, 0x06, 0xeb, 0x6e, 0xfb, 0x74, 0x9d, 0x74, 0xb0, + 0xd0, 0x2b, 0x14, 0x93, 0x62, 0xa8, 0x73, 0x30, 0x1d, 0xa3, 0xed, 0x5e, 0x9d, 0xf9, 0x92, 0x25, + 0x10, 0xa8, 0x1d, 0xf5, 0x43, 0x37, 0x07, 0xf7, 0x39, 0x40, 0xdd, 0xdb, 0x3f, 0xbf, 0x67, 0x99, + 0x2f, 0xe0, 0x2e, 0x64, 0x44, 0x57, 0xfe, 0xbf, 0x0b, 0x0d, 0xec, 0x07, 0x7b, 0xf4, 0x06, 0x25, + 0x45, 0x52, 0xe1, 0x43, 0x4f, 0x90, 0x57, 0xb3, 0x19, 0x64, 0x50, 0x36, 0x83, 0x4c, 0xda, 0xd8, + 0x5a, 0xbb, 0xfa, 0xff, 0x7d, 0xfc, 0x0d, 0x00, 0x00, 0xff, 0xff, 0xf0, 0x59, 0x20, 0xc7, 0xcd, + 0x02, 0x00, 0x00, +} + +// Reference imports to suppress errors if they are not otherwise used. +var _ context.Context +var _ grpc.ClientConn + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the grpc package it is being compiled against. +const _ = grpc.SupportPackageIsVersion4 + +// TestClient is the client API for Test service. +// +// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://godoc.org/google.golang.org/grpc#ClientConn.NewStream. +type TestClient interface { + Double(ctx context.Context, in *TestRequest, opts ...grpc.CallOption) (*TestResponse, error) + PrintKV(ctx context.Context, in *PrintKVRequest, opts ...grpc.CallOption) (*PrintKVResponse, error) + Bidirectional(ctx context.Context, in *BidirectionalRequest, opts ...grpc.CallOption) (*BidirectionalResponse, error) + Stream(ctx context.Context, opts ...grpc.CallOption) (Test_StreamClient, error) +} + +type testClient struct { + cc *grpc.ClientConn +} + +func NewTestClient(cc *grpc.ClientConn) TestClient { + return &testClient{cc} +} + +func (c *testClient) Double(ctx context.Context, in *TestRequest, opts ...grpc.CallOption) (*TestResponse, error) { + out := new(TestResponse) + err := c.cc.Invoke(ctx, "/grpctest.Test/Double", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *testClient) PrintKV(ctx context.Context, in *PrintKVRequest, opts ...grpc.CallOption) (*PrintKVResponse, error) { + out := new(PrintKVResponse) + err := c.cc.Invoke(ctx, "/grpctest.Test/PrintKV", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *testClient) Bidirectional(ctx context.Context, in *BidirectionalRequest, opts ...grpc.CallOption) (*BidirectionalResponse, error) { + out := new(BidirectionalResponse) + err := c.cc.Invoke(ctx, "/grpctest.Test/Bidirectional", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *testClient) Stream(ctx context.Context, opts ...grpc.CallOption) (Test_StreamClient, error) { + stream, err := c.cc.NewStream(ctx, &_Test_serviceDesc.Streams[0], "/grpctest.Test/Stream", opts...) + if err != nil { + return nil, err + } + x := &testStreamClient{stream} + return x, nil +} + +type Test_StreamClient interface { + Send(*TestRequest) error + Recv() (*TestResponse, error) + grpc.ClientStream +} + +type testStreamClient struct { + grpc.ClientStream +} + +func (x *testStreamClient) Send(m *TestRequest) error { + return x.ClientStream.SendMsg(m) +} + +func (x *testStreamClient) Recv() (*TestResponse, error) { + m := new(TestResponse) + if err := x.ClientStream.RecvMsg(m); err != nil { + return nil, err + } + return m, nil +} + +// TestServer is the server API for Test service. +type TestServer interface { + Double(context.Context, *TestRequest) (*TestResponse, error) + PrintKV(context.Context, *PrintKVRequest) (*PrintKVResponse, error) + Bidirectional(context.Context, *BidirectionalRequest) (*BidirectionalResponse, error) + Stream(Test_StreamServer) error +} + +func RegisterTestServer(s *grpc.Server, srv TestServer) { + s.RegisterService(&_Test_serviceDesc, srv) +} + +func _Test_Double_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(TestRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(TestServer).Double(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/grpctest.Test/Double", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(TestServer).Double(ctx, req.(*TestRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _Test_PrintKV_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(PrintKVRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(TestServer).PrintKV(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/grpctest.Test/PrintKV", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(TestServer).PrintKV(ctx, req.(*PrintKVRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _Test_Bidirectional_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(BidirectionalRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(TestServer).Bidirectional(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/grpctest.Test/Bidirectional", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(TestServer).Bidirectional(ctx, req.(*BidirectionalRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _Test_Stream_Handler(srv interface{}, stream grpc.ServerStream) error { + return srv.(TestServer).Stream(&testStreamServer{stream}) +} + +type Test_StreamServer interface { + Send(*TestResponse) error + Recv() (*TestRequest, error) + grpc.ServerStream +} + +type testStreamServer struct { + grpc.ServerStream +} + +func (x *testStreamServer) Send(m *TestResponse) error { + return x.ServerStream.SendMsg(m) +} + +func (x *testStreamServer) Recv() (*TestRequest, error) { + m := new(TestRequest) + if err := x.ServerStream.RecvMsg(m); err != nil { + return nil, err + } + return m, nil +} + +var _Test_serviceDesc = grpc.ServiceDesc{ + ServiceName: "grpctest.Test", + HandlerType: (*TestServer)(nil), + Methods: []grpc.MethodDesc{ + { + MethodName: "Double", + Handler: _Test_Double_Handler, + }, + { + MethodName: "PrintKV", + Handler: _Test_PrintKV_Handler, + }, + { + MethodName: "Bidirectional", + Handler: _Test_Bidirectional_Handler, + }, + }, + Streams: []grpc.StreamDesc{ + { + StreamName: "Stream", + Handler: _Test_Stream_Handler, + ServerStreams: true, + ClientStreams: true, + }, + }, + Metadata: "test.proto", +} + +// PingPongClient is the client API for PingPong service. +// +// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://godoc.org/google.golang.org/grpc#ClientConn.NewStream. +type PingPongClient interface { + Ping(ctx context.Context, in *PingRequest, opts ...grpc.CallOption) (*PongResponse, error) +} + +type pingPongClient struct { + cc *grpc.ClientConn +} + +func NewPingPongClient(cc *grpc.ClientConn) PingPongClient { + return &pingPongClient{cc} +} + +func (c *pingPongClient) Ping(ctx context.Context, in *PingRequest, opts ...grpc.CallOption) (*PongResponse, error) { + out := new(PongResponse) + err := c.cc.Invoke(ctx, "/grpctest.PingPong/Ping", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +// PingPongServer is the server API for PingPong service. +type PingPongServer interface { + Ping(context.Context, *PingRequest) (*PongResponse, error) +} + +func RegisterPingPongServer(s *grpc.Server, srv PingPongServer) { + s.RegisterService(&_PingPong_serviceDesc, srv) +} + +func _PingPong_Ping_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(PingRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(PingPongServer).Ping(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/grpctest.PingPong/Ping", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(PingPongServer).Ping(ctx, req.(*PingRequest)) + } + return interceptor(ctx, in, info, handler) +} + +var _PingPong_serviceDesc = grpc.ServiceDesc{ + ServiceName: "grpctest.PingPong", + HandlerType: (*PingPongServer)(nil), + Methods: []grpc.MethodDesc{ + { + MethodName: "Ping", + Handler: _PingPong_Ping_Handler, + }, + }, + Streams: []grpc.StreamDesc{}, + Metadata: "test.proto", +} diff --git a/vendor/github.com/hashicorp/go-plugin/testing.go b/vendor/github.com/hashicorp/go-plugin/testing.go new file mode 100644 index 0000000..2cf2c26 --- /dev/null +++ b/vendor/github.com/hashicorp/go-plugin/testing.go @@ -0,0 +1,180 @@ +package plugin + +import ( + "bytes" + "context" + "io" + "net" + "net/rpc" + + "github.com/mitchellh/go-testing-interface" + hclog "github.com/hashicorp/go-hclog" + "github.com/hashicorp/go-plugin/internal/plugin" + "google.golang.org/grpc" +) + +// TestOptions allows specifying options that can affect the behavior of the +// test functions +type TestOptions struct { + //ServerStdout causes the given value to be used in place of a blank buffer + //for RPCServer's Stdout + ServerStdout io.ReadCloser + + //ServerStderr causes the given value to be used in place of a blank buffer + //for RPCServer's Stderr + ServerStderr io.ReadCloser +} + +// The testing file contains test helpers that you can use outside of +// this package for making it easier to test plugins themselves. + +// TestConn is a helper function for returning a client and server +// net.Conn connected to each other. +func TestConn(t testing.T) (net.Conn, net.Conn) { + // Listen to any local port. This listener will be closed + // after a single connection is established. + l, err := net.Listen("tcp", "127.0.0.1:0") + if err != nil { + t.Fatalf("err: %s", err) + } + + // Start a goroutine to accept our client connection + var serverConn net.Conn + doneCh := make(chan struct{}) + go func() { + defer close(doneCh) + defer l.Close() + var err error + serverConn, err = l.Accept() + if err != nil { + t.Fatalf("err: %s", err) + } + }() + + // Connect to the server + clientConn, err := net.Dial("tcp", l.Addr().String()) + if err != nil { + t.Fatalf("err: %s", err) + } + + // Wait for the server side to acknowledge it has connected + <-doneCh + + return clientConn, serverConn +} + +// TestRPCConn returns a rpc client and server connected to each other. +func TestRPCConn(t testing.T) (*rpc.Client, *rpc.Server) { + clientConn, serverConn := TestConn(t) + + server := rpc.NewServer() + go server.ServeConn(serverConn) + + client := rpc.NewClient(clientConn) + return client, server +} + +// TestPluginRPCConn returns a plugin RPC client and server that are connected +// together and configured. +func TestPluginRPCConn(t testing.T, ps map[string]Plugin, opts *TestOptions) (*RPCClient, *RPCServer) { + // Create two net.Conns we can use to shuttle our control connection + clientConn, serverConn := TestConn(t) + + // Start up the server + server := &RPCServer{Plugins: ps, Stdout: new(bytes.Buffer), Stderr: new(bytes.Buffer)} + if opts != nil { + if opts.ServerStdout != nil { + server.Stdout = opts.ServerStdout + } + if opts.ServerStderr != nil { + server.Stderr = opts.ServerStderr + } + } + go server.ServeConn(serverConn) + + // Connect the client to the server + client, err := NewRPCClient(clientConn, ps) + if err != nil { + t.Fatalf("err: %s", err) + } + + return client, server +} + +// TestGRPCConn returns a gRPC client conn and grpc server that are connected +// together and configured. The register function is used to register services +// prior to the Serve call. This is used to test gRPC connections. +func TestGRPCConn(t testing.T, register func(*grpc.Server)) (*grpc.ClientConn, *grpc.Server) { + // Create a listener + l, err := net.Listen("tcp", "127.0.0.1:0") + if err != nil { + t.Fatalf("err: %s", err) + } + + server := grpc.NewServer() + register(server) + go server.Serve(l) + + // Connect to the server + conn, err := grpc.Dial( + l.Addr().String(), + grpc.WithBlock(), + grpc.WithInsecure()) + if err != nil { + t.Fatalf("err: %s", err) + } + + // Connection successful, close the listener + l.Close() + + return conn, server +} + +// TestPluginGRPCConn returns a plugin gRPC client and server that are connected +// together and configured. This is used to test gRPC connections. +func TestPluginGRPCConn(t testing.T, ps map[string]Plugin) (*GRPCClient, *GRPCServer) { + // Create a listener + l, err := net.Listen("tcp", "127.0.0.1:0") + if err != nil { + t.Fatalf("err: %s", err) + } + + // Start up the server + server := &GRPCServer{ + Plugins: ps, + DoneCh: make(chan struct{}), + Server: DefaultGRPCServer, + Stdout: new(bytes.Buffer), + Stderr: new(bytes.Buffer), + logger: hclog.Default(), + } + if err := server.Init(); err != nil { + t.Fatalf("err: %s", err) + } + go server.Serve(l) + + // Connect to the server + conn, err := grpc.Dial( + l.Addr().String(), + grpc.WithBlock(), + grpc.WithInsecure()) + if err != nil { + t.Fatalf("err: %s", err) + } + + brokerGRPCClient := newGRPCBrokerClient(conn) + broker := newGRPCBroker(brokerGRPCClient, nil) + go broker.Run() + go brokerGRPCClient.StartStream() + + // Create the client + client := &GRPCClient{ + Conn: conn, + Plugins: ps, + broker: broker, + doneCtx: context.Background(), + controller: plugin.NewGRPCControllerClient(conn), + } + + return client, server +} diff --git a/vendor/github.com/hashicorp/yamux/LICENSE b/vendor/github.com/hashicorp/yamux/LICENSE new file mode 100644 index 0000000..f0e5c79 --- /dev/null +++ b/vendor/github.com/hashicorp/yamux/LICENSE @@ -0,0 +1,362 @@ +Mozilla Public License, version 2.0 + +1. Definitions + +1.1. "Contributor" + + means each individual or legal entity that creates, contributes to the + creation of, or owns Covered Software. + +1.2. "Contributor Version" + + means the combination of the Contributions of others (if any) used by a + Contributor and that particular Contributor's Contribution. + +1.3. "Contribution" + + means Covered Software of a particular Contributor. + +1.4. "Covered Software" + + means Source Code Form to which the initial Contributor has attached the + notice in Exhibit A, the Executable Form of such Source Code Form, and + Modifications of such Source Code Form, in each case including portions + thereof. + +1.5. "Incompatible With Secondary Licenses" + means + + a. that the initial Contributor has attached the notice described in + Exhibit B to the Covered Software; or + + b. that the Covered Software was made available under the terms of + version 1.1 or earlier of the License, but not also under the terms of + a Secondary License. + +1.6. "Executable Form" + + means any form of the work other than Source Code Form. + +1.7. "Larger Work" + + means a work that combines Covered Software with other material, in a + separate file or files, that is not Covered Software. + +1.8. "License" + + means this document. + +1.9. "Licensable" + + means having the right to grant, to the maximum extent possible, whether + at the time of the initial grant or subsequently, any and all of the + rights conveyed by this License. + +1.10. "Modifications" + + means any of the following: + + a. any file in Source Code Form that results from an addition to, + deletion from, or modification of the contents of Covered Software; or + + b. any new file in Source Code Form that contains any Covered Software. + +1.11. "Patent Claims" of a Contributor + + means any patent claim(s), including without limitation, method, + process, and apparatus claims, in any patent Licensable by such + Contributor that would be infringed, but for the grant of the License, + by the making, using, selling, offering for sale, having made, import, + or transfer of either its Contributions or its Contributor Version. + +1.12. "Secondary License" + + means either the GNU General Public License, Version 2.0, the GNU Lesser + General Public License, Version 2.1, the GNU Affero General Public + License, Version 3.0, or any later versions of those licenses. + +1.13. "Source Code Form" + + means the form of the work preferred for making modifications. + +1.14. "You" (or "Your") + + means an individual or a legal entity exercising rights under this + License. For legal entities, "You" includes any entity that controls, is + controlled by, or is under common control with You. For purposes of this + definition, "control" means (a) the power, direct or indirect, to cause + the direction or management of such entity, whether by contract or + otherwise, or (b) ownership of more than fifty percent (50%) of the + outstanding shares or beneficial ownership of such entity. + + +2. License Grants and Conditions + +2.1. Grants + + Each Contributor hereby grants You a world-wide, royalty-free, + non-exclusive license: + + a. under intellectual property rights (other than patent or trademark) + Licensable by such Contributor to use, reproduce, make available, + modify, display, perform, distribute, and otherwise exploit its + Contributions, either on an unmodified basis, with Modifications, or + as part of a Larger Work; and + + b. under Patent Claims of such Contributor to make, use, sell, offer for + sale, have made, import, and otherwise transfer either its + Contributions or its Contributor Version. + +2.2. Effective Date + + The licenses granted in Section 2.1 with respect to any Contribution + become effective for each Contribution on the date the Contributor first + distributes such Contribution. + +2.3. Limitations on Grant Scope + + The licenses granted in this Section 2 are the only rights granted under + this License. No additional rights or licenses will be implied from the + distribution or licensing of Covered Software under this License. + Notwithstanding Section 2.1(b) above, no patent license is granted by a + Contributor: + + a. for any code that a Contributor has removed from Covered Software; or + + b. for infringements caused by: (i) Your and any other third party's + modifications of Covered Software, or (ii) the combination of its + Contributions with other software (except as part of its Contributor + Version); or + + c. under Patent Claims infringed by Covered Software in the absence of + its Contributions. + + This License does not grant any rights in the trademarks, service marks, + or logos of any Contributor (except as may be necessary to comply with + the notice requirements in Section 3.4). + +2.4. Subsequent Licenses + + No Contributor makes additional grants as a result of Your choice to + distribute the Covered Software under a subsequent version of this + License (see Section 10.2) or under the terms of a Secondary License (if + permitted under the terms of Section 3.3). + +2.5. Representation + + Each Contributor represents that the Contributor believes its + Contributions are its original creation(s) or it has sufficient rights to + grant the rights to its Contributions conveyed by this License. + +2.6. Fair Use + + This License is not intended to limit any rights You have under + applicable copyright doctrines of fair use, fair dealing, or other + equivalents. + +2.7. Conditions + + Sections 3.1, 3.2, 3.3, and 3.4 are conditions of the licenses granted in + Section 2.1. + + +3. Responsibilities + +3.1. Distribution of Source Form + + All distribution of Covered Software in Source Code Form, including any + Modifications that You create or to which You contribute, must be under + the terms of this License. You must inform recipients that the Source + Code Form of the Covered Software is governed by the terms of this + License, and how they can obtain a copy of this License. You may not + attempt to alter or restrict the recipients' rights in the Source Code + Form. + +3.2. Distribution of Executable Form + + If You distribute Covered Software in Executable Form then: + + a. such Covered Software must also be made available in Source Code Form, + as described in Section 3.1, and You must inform recipients of the + Executable Form how they can obtain a copy of such Source Code Form by + reasonable means in a timely manner, at a charge no more than the cost + of distribution to the recipient; and + + b. You may distribute such Executable Form under the terms of this + License, or sublicense it under different terms, provided that the + license for the Executable Form does not attempt to limit or alter the + recipients' rights in the Source Code Form under this License. + +3.3. Distribution of a Larger Work + + You may create and distribute a Larger Work under terms of Your choice, + provided that You also comply with the requirements of this License for + the Covered Software. If the Larger Work is a combination of Covered + Software with a work governed by one or more Secondary Licenses, and the + Covered Software is not Incompatible With Secondary Licenses, this + License permits You to additionally distribute such Covered Software + under the terms of such Secondary License(s), so that the recipient of + the Larger Work may, at their option, further distribute the Covered + Software under the terms of either this License or such Secondary + License(s). + +3.4. Notices + + You may not remove or alter the substance of any license notices + (including copyright notices, patent notices, disclaimers of warranty, or + limitations of liability) contained within the Source Code Form of the + Covered Software, except that You may alter any license notices to the + extent required to remedy known factual inaccuracies. + +3.5. Application of Additional Terms + + You may choose to offer, and to charge a fee for, warranty, support, + indemnity or liability obligations to one or more recipients of Covered + Software. However, You may do so only on Your own behalf, and not on + behalf of any Contributor. You must make it absolutely clear that any + such warranty, support, indemnity, or liability obligation is offered by + You alone, and You hereby agree to indemnify every Contributor for any + liability incurred by such Contributor as a result of warranty, support, + indemnity or liability terms You offer. You may include additional + disclaimers of warranty and limitations of liability specific to any + jurisdiction. + +4. Inability to Comply Due to Statute or Regulation + + If it is impossible for You to comply with any of the terms of this License + with respect to some or all of the Covered Software due to statute, + judicial order, or regulation then You must: (a) comply with the terms of + this License to the maximum extent possible; and (b) describe the + limitations and the code they affect. Such description must be placed in a + text file included with all distributions of the Covered Software under + this License. Except to the extent prohibited by statute or regulation, + such description must be sufficiently detailed for a recipient of ordinary + skill to be able to understand it. + +5. Termination + +5.1. The rights granted under this License will terminate automatically if You + fail to comply with any of its terms. However, if You become compliant, + then the rights granted under this License from a particular Contributor + are reinstated (a) provisionally, unless and until such Contributor + explicitly and finally terminates Your grants, and (b) on an ongoing + basis, if such Contributor fails to notify You of the non-compliance by + some reasonable means prior to 60 days after You have come back into + compliance. Moreover, Your grants from a particular Contributor are + reinstated on an ongoing basis if such Contributor notifies You of the + non-compliance by some reasonable means, this is the first time You have + received notice of non-compliance with this License from such + Contributor, and You become compliant prior to 30 days after Your receipt + of the notice. + +5.2. If You initiate litigation against any entity by asserting a patent + infringement claim (excluding declaratory judgment actions, + counter-claims, and cross-claims) alleging that a Contributor Version + directly or indirectly infringes any patent, then the rights granted to + You by any and all Contributors for the Covered Software under Section + 2.1 of this License shall terminate. + +5.3. In the event of termination under Sections 5.1 or 5.2 above, all end user + license agreements (excluding distributors and resellers) which have been + validly granted by You or Your distributors under this License prior to + termination shall survive termination. + +6. Disclaimer of Warranty + + Covered Software is provided under this License on an "as is" basis, + without warranty of any kind, either expressed, implied, or statutory, + including, without limitation, warranties that the Covered Software is free + of defects, merchantable, fit for a particular purpose or non-infringing. + The entire risk as to the quality and performance of the Covered Software + is with You. Should any Covered Software prove defective in any respect, + You (not any Contributor) assume the cost of any necessary servicing, + repair, or correction. This disclaimer of warranty constitutes an essential + part of this License. No use of any Covered Software is authorized under + this License except under this disclaimer. + +7. Limitation of Liability + + Under no circumstances and under no legal theory, whether tort (including + negligence), contract, or otherwise, shall any Contributor, or anyone who + distributes Covered Software as permitted above, be liable to You for any + direct, indirect, special, incidental, or consequential damages of any + character including, without limitation, damages for lost profits, loss of + goodwill, work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses, even if such party shall have been + informed of the possibility of such damages. This limitation of liability + shall not apply to liability for death or personal injury resulting from + such party's negligence to the extent applicable law prohibits such + limitation. Some jurisdictions do not allow the exclusion or limitation of + incidental or consequential damages, so this exclusion and limitation may + not apply to You. + +8. Litigation + + Any litigation relating to this License may be brought only in the courts + of a jurisdiction where the defendant maintains its principal place of + business and such litigation shall be governed by laws of that + jurisdiction, without reference to its conflict-of-law provisions. Nothing + in this Section shall prevent a party's ability to bring cross-claims or + counter-claims. + +9. Miscellaneous + + This License represents the complete agreement concerning the subject + matter hereof. If any provision of this License is held to be + unenforceable, such provision shall be reformed only to the extent + necessary to make it enforceable. Any law or regulation which provides that + the language of a contract shall be construed against the drafter shall not + be used to construe this License against a Contributor. + + +10. Versions of the License + +10.1. New Versions + + Mozilla Foundation is the license steward. Except as provided in Section + 10.3, no one other than the license steward has the right to modify or + publish new versions of this License. Each version will be given a + distinguishing version number. + +10.2. Effect of New Versions + + You may distribute the Covered Software under the terms of the version + of the License under which You originally received the Covered Software, + or under the terms of any subsequent version published by the license + steward. + +10.3. Modified Versions + + If you create software not governed by this License, and you want to + create a new license for such software, you may create and use a + modified version of this License if you rename the license and remove + any references to the name of the license steward (except to note that + such modified license differs from this License). + +10.4. Distributing Source Code Form that is Incompatible With Secondary + Licenses If You choose to distribute Source Code Form that is + Incompatible With Secondary Licenses under the terms of this version of + the License, the notice described in Exhibit B of this License must be + attached. + +Exhibit A - Source Code Form License Notice + + This Source Code Form is subject to the + terms of the Mozilla Public License, v. + 2.0. If a copy of the MPL was not + distributed with this file, You can + obtain one at + http://mozilla.org/MPL/2.0/. + +If it is not possible or desirable to put the notice in a particular file, +then You may include the notice in a location (such as a LICENSE file in a +relevant directory) where a recipient would be likely to look for such a +notice. + +You may add additional accurate notices of copyright ownership. + +Exhibit B - "Incompatible With Secondary Licenses" Notice + + This Source Code Form is "Incompatible + With Secondary Licenses", as defined by + the Mozilla Public License, v. 2.0. \ No newline at end of file diff --git a/vendor/github.com/hashicorp/yamux/addr.go b/vendor/github.com/hashicorp/yamux/addr.go new file mode 100644 index 0000000..be6ebca --- /dev/null +++ b/vendor/github.com/hashicorp/yamux/addr.go @@ -0,0 +1,60 @@ +package yamux + +import ( + "fmt" + "net" +) + +// hasAddr is used to get the address from the underlying connection +type hasAddr interface { + LocalAddr() net.Addr + RemoteAddr() net.Addr +} + +// yamuxAddr is used when we cannot get the underlying address +type yamuxAddr struct { + Addr string +} + +func (*yamuxAddr) Network() string { + return "yamux" +} + +func (y *yamuxAddr) String() string { + return fmt.Sprintf("yamux:%s", y.Addr) +} + +// Addr is used to get the address of the listener. +func (s *Session) Addr() net.Addr { + return s.LocalAddr() +} + +// LocalAddr is used to get the local address of the +// underlying connection. +func (s *Session) LocalAddr() net.Addr { + addr, ok := s.conn.(hasAddr) + if !ok { + return &yamuxAddr{"local"} + } + return addr.LocalAddr() +} + +// RemoteAddr is used to get the address of remote end +// of the underlying connection +func (s *Session) RemoteAddr() net.Addr { + addr, ok := s.conn.(hasAddr) + if !ok { + return &yamuxAddr{"remote"} + } + return addr.RemoteAddr() +} + +// LocalAddr returns the local address +func (s *Stream) LocalAddr() net.Addr { + return s.session.LocalAddr() +} + +// LocalAddr returns the remote address +func (s *Stream) RemoteAddr() net.Addr { + return s.session.RemoteAddr() +} diff --git a/vendor/github.com/hashicorp/yamux/const.go b/vendor/github.com/hashicorp/yamux/const.go new file mode 100644 index 0000000..4f52938 --- /dev/null +++ b/vendor/github.com/hashicorp/yamux/const.go @@ -0,0 +1,157 @@ +package yamux + +import ( + "encoding/binary" + "fmt" +) + +var ( + // ErrInvalidVersion means we received a frame with an + // invalid version + ErrInvalidVersion = fmt.Errorf("invalid protocol version") + + // ErrInvalidMsgType means we received a frame with an + // invalid message type + ErrInvalidMsgType = fmt.Errorf("invalid msg type") + + // ErrSessionShutdown is used if there is a shutdown during + // an operation + ErrSessionShutdown = fmt.Errorf("session shutdown") + + // ErrStreamsExhausted is returned if we have no more + // stream ids to issue + ErrStreamsExhausted = fmt.Errorf("streams exhausted") + + // ErrDuplicateStream is used if a duplicate stream is + // opened inbound + ErrDuplicateStream = fmt.Errorf("duplicate stream initiated") + + // ErrReceiveWindowExceeded indicates the window was exceeded + ErrRecvWindowExceeded = fmt.Errorf("recv window exceeded") + + // ErrTimeout is used when we reach an IO deadline + ErrTimeout = fmt.Errorf("i/o deadline reached") + + // ErrStreamClosed is returned when using a closed stream + ErrStreamClosed = fmt.Errorf("stream closed") + + // ErrUnexpectedFlag is set when we get an unexpected flag + ErrUnexpectedFlag = fmt.Errorf("unexpected flag") + + // ErrRemoteGoAway is used when we get a go away from the other side + ErrRemoteGoAway = fmt.Errorf("remote end is not accepting connections") + + // ErrConnectionReset is sent if a stream is reset. This can happen + // if the backlog is exceeded, or if there was a remote GoAway. + ErrConnectionReset = fmt.Errorf("connection reset") + + // ErrConnectionWriteTimeout indicates that we hit the "safety valve" + // timeout writing to the underlying stream connection. + ErrConnectionWriteTimeout = fmt.Errorf("connection write timeout") + + // ErrKeepAliveTimeout is sent if a missed keepalive caused the stream close + ErrKeepAliveTimeout = fmt.Errorf("keepalive timeout") +) + +const ( + // protoVersion is the only version we support + protoVersion uint8 = 0 +) + +const ( + // Data is used for data frames. They are followed + // by length bytes worth of payload. + typeData uint8 = iota + + // WindowUpdate is used to change the window of + // a given stream. The length indicates the delta + // update to the window. + typeWindowUpdate + + // Ping is sent as a keep-alive or to measure + // the RTT. The StreamID and Length value are echoed + // back in the response. + typePing + + // GoAway is sent to terminate a session. The StreamID + // should be 0 and the length is an error code. + typeGoAway +) + +const ( + // SYN is sent to signal a new stream. May + // be sent with a data payload + flagSYN uint16 = 1 << iota + + // ACK is sent to acknowledge a new stream. May + // be sent with a data payload + flagACK + + // FIN is sent to half-close the given stream. + // May be sent with a data payload. + flagFIN + + // RST is used to hard close a given stream. + flagRST +) + +const ( + // initialStreamWindow is the initial stream window size + initialStreamWindow uint32 = 256 * 1024 +) + +const ( + // goAwayNormal is sent on a normal termination + goAwayNormal uint32 = iota + + // goAwayProtoErr sent on a protocol error + goAwayProtoErr + + // goAwayInternalErr sent on an internal error + goAwayInternalErr +) + +const ( + sizeOfVersion = 1 + sizeOfType = 1 + sizeOfFlags = 2 + sizeOfStreamID = 4 + sizeOfLength = 4 + headerSize = sizeOfVersion + sizeOfType + sizeOfFlags + + sizeOfStreamID + sizeOfLength +) + +type header []byte + +func (h header) Version() uint8 { + return h[0] +} + +func (h header) MsgType() uint8 { + return h[1] +} + +func (h header) Flags() uint16 { + return binary.BigEndian.Uint16(h[2:4]) +} + +func (h header) StreamID() uint32 { + return binary.BigEndian.Uint32(h[4:8]) +} + +func (h header) Length() uint32 { + return binary.BigEndian.Uint32(h[8:12]) +} + +func (h header) String() string { + return fmt.Sprintf("Vsn:%d Type:%d Flags:%d StreamID:%d Length:%d", + h.Version(), h.MsgType(), h.Flags(), h.StreamID(), h.Length()) +} + +func (h header) encode(msgType uint8, flags uint16, streamID uint32, length uint32) { + h[0] = protoVersion + h[1] = msgType + binary.BigEndian.PutUint16(h[2:4], flags) + binary.BigEndian.PutUint32(h[4:8], streamID) + binary.BigEndian.PutUint32(h[8:12], length) +} diff --git a/vendor/github.com/hashicorp/yamux/mux.go b/vendor/github.com/hashicorp/yamux/mux.go new file mode 100644 index 0000000..18a078c --- /dev/null +++ b/vendor/github.com/hashicorp/yamux/mux.go @@ -0,0 +1,98 @@ +package yamux + +import ( + "fmt" + "io" + "log" + "os" + "time" +) + +// Config is used to tune the Yamux session +type Config struct { + // AcceptBacklog is used to limit how many streams may be + // waiting an accept. + AcceptBacklog int + + // EnableKeepalive is used to do a period keep alive + // messages using a ping. + EnableKeepAlive bool + + // KeepAliveInterval is how often to perform the keep alive + KeepAliveInterval time.Duration + + // ConnectionWriteTimeout is meant to be a "safety valve" timeout after + // we which will suspect a problem with the underlying connection and + // close it. This is only applied to writes, where's there's generally + // an expectation that things will move along quickly. + ConnectionWriteTimeout time.Duration + + // MaxStreamWindowSize is used to control the maximum + // window size that we allow for a stream. + MaxStreamWindowSize uint32 + + // LogOutput is used to control the log destination. Either Logger or + // LogOutput can be set, not both. + LogOutput io.Writer + + // Logger is used to pass in the logger to be used. Either Logger or + // LogOutput can be set, not both. + Logger *log.Logger +} + +// DefaultConfig is used to return a default configuration +func DefaultConfig() *Config { + return &Config{ + AcceptBacklog: 256, + EnableKeepAlive: true, + KeepAliveInterval: 30 * time.Second, + ConnectionWriteTimeout: 10 * time.Second, + MaxStreamWindowSize: initialStreamWindow, + LogOutput: os.Stderr, + } +} + +// VerifyConfig is used to verify the sanity of configuration +func VerifyConfig(config *Config) error { + if config.AcceptBacklog <= 0 { + return fmt.Errorf("backlog must be positive") + } + if config.KeepAliveInterval == 0 { + return fmt.Errorf("keep-alive interval must be positive") + } + if config.MaxStreamWindowSize < initialStreamWindow { + return fmt.Errorf("MaxStreamWindowSize must be larger than %d", initialStreamWindow) + } + if config.LogOutput != nil && config.Logger != nil { + return fmt.Errorf("both Logger and LogOutput may not be set, select one") + } else if config.LogOutput == nil && config.Logger == nil { + return fmt.Errorf("one of Logger or LogOutput must be set, select one") + } + return nil +} + +// Server is used to initialize a new server-side connection. +// There must be at most one server-side connection. If a nil config is +// provided, the DefaultConfiguration will be used. +func Server(conn io.ReadWriteCloser, config *Config) (*Session, error) { + if config == nil { + config = DefaultConfig() + } + if err := VerifyConfig(config); err != nil { + return nil, err + } + return newSession(config, conn, false), nil +} + +// Client is used to initialize a new client-side connection. +// There must be at most one client-side connection. +func Client(conn io.ReadWriteCloser, config *Config) (*Session, error) { + if config == nil { + config = DefaultConfig() + } + + if err := VerifyConfig(config); err != nil { + return nil, err + } + return newSession(config, conn, true), nil +} diff --git a/vendor/github.com/hashicorp/yamux/session.go b/vendor/github.com/hashicorp/yamux/session.go new file mode 100644 index 0000000..a80ddec --- /dev/null +++ b/vendor/github.com/hashicorp/yamux/session.go @@ -0,0 +1,653 @@ +package yamux + +import ( + "bufio" + "fmt" + "io" + "io/ioutil" + "log" + "math" + "net" + "strings" + "sync" + "sync/atomic" + "time" +) + +// Session is used to wrap a reliable ordered connection and to +// multiplex it into multiple streams. +type Session struct { + // remoteGoAway indicates the remote side does + // not want futher connections. Must be first for alignment. + remoteGoAway int32 + + // localGoAway indicates that we should stop + // accepting futher connections. Must be first for alignment. + localGoAway int32 + + // nextStreamID is the next stream we should + // send. This depends if we are a client/server. + nextStreamID uint32 + + // config holds our configuration + config *Config + + // logger is used for our logs + logger *log.Logger + + // conn is the underlying connection + conn io.ReadWriteCloser + + // bufRead is a buffered reader + bufRead *bufio.Reader + + // pings is used to track inflight pings + pings map[uint32]chan struct{} + pingID uint32 + pingLock sync.Mutex + + // streams maps a stream id to a stream, and inflight has an entry + // for any outgoing stream that has not yet been established. Both are + // protected by streamLock. + streams map[uint32]*Stream + inflight map[uint32]struct{} + streamLock sync.Mutex + + // synCh acts like a semaphore. It is sized to the AcceptBacklog which + // is assumed to be symmetric between the client and server. This allows + // the client to avoid exceeding the backlog and instead blocks the open. + synCh chan struct{} + + // acceptCh is used to pass ready streams to the client + acceptCh chan *Stream + + // sendCh is used to mark a stream as ready to send, + // or to send a header out directly. + sendCh chan sendReady + + // recvDoneCh is closed when recv() exits to avoid a race + // between stream registration and stream shutdown + recvDoneCh chan struct{} + + // shutdown is used to safely close a session + shutdown bool + shutdownErr error + shutdownCh chan struct{} + shutdownLock sync.Mutex +} + +// sendReady is used to either mark a stream as ready +// or to directly send a header +type sendReady struct { + Hdr []byte + Body io.Reader + Err chan error +} + +// newSession is used to construct a new session +func newSession(config *Config, conn io.ReadWriteCloser, client bool) *Session { + logger := config.Logger + if logger == nil { + logger = log.New(config.LogOutput, "", log.LstdFlags) + } + + s := &Session{ + config: config, + logger: logger, + conn: conn, + bufRead: bufio.NewReader(conn), + pings: make(map[uint32]chan struct{}), + streams: make(map[uint32]*Stream), + inflight: make(map[uint32]struct{}), + synCh: make(chan struct{}, config.AcceptBacklog), + acceptCh: make(chan *Stream, config.AcceptBacklog), + sendCh: make(chan sendReady, 64), + recvDoneCh: make(chan struct{}), + shutdownCh: make(chan struct{}), + } + if client { + s.nextStreamID = 1 + } else { + s.nextStreamID = 2 + } + go s.recv() + go s.send() + if config.EnableKeepAlive { + go s.keepalive() + } + return s +} + +// IsClosed does a safe check to see if we have shutdown +func (s *Session) IsClosed() bool { + select { + case <-s.shutdownCh: + return true + default: + return false + } +} + +// CloseChan returns a read-only channel which is closed as +// soon as the session is closed. +func (s *Session) CloseChan() <-chan struct{} { + return s.shutdownCh +} + +// NumStreams returns the number of currently open streams +func (s *Session) NumStreams() int { + s.streamLock.Lock() + num := len(s.streams) + s.streamLock.Unlock() + return num +} + +// Open is used to create a new stream as a net.Conn +func (s *Session) Open() (net.Conn, error) { + conn, err := s.OpenStream() + if err != nil { + return nil, err + } + return conn, nil +} + +// OpenStream is used to create a new stream +func (s *Session) OpenStream() (*Stream, error) { + if s.IsClosed() { + return nil, ErrSessionShutdown + } + if atomic.LoadInt32(&s.remoteGoAway) == 1 { + return nil, ErrRemoteGoAway + } + + // Block if we have too many inflight SYNs + select { + case s.synCh <- struct{}{}: + case <-s.shutdownCh: + return nil, ErrSessionShutdown + } + +GET_ID: + // Get an ID, and check for stream exhaustion + id := atomic.LoadUint32(&s.nextStreamID) + if id >= math.MaxUint32-1 { + return nil, ErrStreamsExhausted + } + if !atomic.CompareAndSwapUint32(&s.nextStreamID, id, id+2) { + goto GET_ID + } + + // Register the stream + stream := newStream(s, id, streamInit) + s.streamLock.Lock() + s.streams[id] = stream + s.inflight[id] = struct{}{} + s.streamLock.Unlock() + + // Send the window update to create + if err := stream.sendWindowUpdate(); err != nil { + select { + case <-s.synCh: + default: + s.logger.Printf("[ERR] yamux: aborted stream open without inflight syn semaphore") + } + return nil, err + } + return stream, nil +} + +// Accept is used to block until the next available stream +// is ready to be accepted. +func (s *Session) Accept() (net.Conn, error) { + conn, err := s.AcceptStream() + if err != nil { + return nil, err + } + return conn, err +} + +// AcceptStream is used to block until the next available stream +// is ready to be accepted. +func (s *Session) AcceptStream() (*Stream, error) { + select { + case stream := <-s.acceptCh: + if err := stream.sendWindowUpdate(); err != nil { + return nil, err + } + return stream, nil + case <-s.shutdownCh: + return nil, s.shutdownErr + } +} + +// Close is used to close the session and all streams. +// Attempts to send a GoAway before closing the connection. +func (s *Session) Close() error { + s.shutdownLock.Lock() + defer s.shutdownLock.Unlock() + + if s.shutdown { + return nil + } + s.shutdown = true + if s.shutdownErr == nil { + s.shutdownErr = ErrSessionShutdown + } + close(s.shutdownCh) + s.conn.Close() + <-s.recvDoneCh + + s.streamLock.Lock() + defer s.streamLock.Unlock() + for _, stream := range s.streams { + stream.forceClose() + } + return nil +} + +// exitErr is used to handle an error that is causing the +// session to terminate. +func (s *Session) exitErr(err error) { + s.shutdownLock.Lock() + if s.shutdownErr == nil { + s.shutdownErr = err + } + s.shutdownLock.Unlock() + s.Close() +} + +// GoAway can be used to prevent accepting further +// connections. It does not close the underlying conn. +func (s *Session) GoAway() error { + return s.waitForSend(s.goAway(goAwayNormal), nil) +} + +// goAway is used to send a goAway message +func (s *Session) goAway(reason uint32) header { + atomic.SwapInt32(&s.localGoAway, 1) + hdr := header(make([]byte, headerSize)) + hdr.encode(typeGoAway, 0, 0, reason) + return hdr +} + +// Ping is used to measure the RTT response time +func (s *Session) Ping() (time.Duration, error) { + // Get a channel for the ping + ch := make(chan struct{}) + + // Get a new ping id, mark as pending + s.pingLock.Lock() + id := s.pingID + s.pingID++ + s.pings[id] = ch + s.pingLock.Unlock() + + // Send the ping request + hdr := header(make([]byte, headerSize)) + hdr.encode(typePing, flagSYN, 0, id) + if err := s.waitForSend(hdr, nil); err != nil { + return 0, err + } + + // Wait for a response + start := time.Now() + select { + case <-ch: + case <-time.After(s.config.ConnectionWriteTimeout): + s.pingLock.Lock() + delete(s.pings, id) // Ignore it if a response comes later. + s.pingLock.Unlock() + return 0, ErrTimeout + case <-s.shutdownCh: + return 0, ErrSessionShutdown + } + + // Compute the RTT + return time.Now().Sub(start), nil +} + +// keepalive is a long running goroutine that periodically does +// a ping to keep the connection alive. +func (s *Session) keepalive() { + for { + select { + case <-time.After(s.config.KeepAliveInterval): + _, err := s.Ping() + if err != nil { + if err != ErrSessionShutdown { + s.logger.Printf("[ERR] yamux: keepalive failed: %v", err) + s.exitErr(ErrKeepAliveTimeout) + } + return + } + case <-s.shutdownCh: + return + } + } +} + +// waitForSendErr waits to send a header, checking for a potential shutdown +func (s *Session) waitForSend(hdr header, body io.Reader) error { + errCh := make(chan error, 1) + return s.waitForSendErr(hdr, body, errCh) +} + +// waitForSendErr waits to send a header with optional data, checking for a +// potential shutdown. Since there's the expectation that sends can happen +// in a timely manner, we enforce the connection write timeout here. +func (s *Session) waitForSendErr(hdr header, body io.Reader, errCh chan error) error { + t := timerPool.Get() + timer := t.(*time.Timer) + timer.Reset(s.config.ConnectionWriteTimeout) + defer func() { + timer.Stop() + select { + case <-timer.C: + default: + } + timerPool.Put(t) + }() + + ready := sendReady{Hdr: hdr, Body: body, Err: errCh} + select { + case s.sendCh <- ready: + case <-s.shutdownCh: + return ErrSessionShutdown + case <-timer.C: + return ErrConnectionWriteTimeout + } + + select { + case err := <-errCh: + return err + case <-s.shutdownCh: + return ErrSessionShutdown + case <-timer.C: + return ErrConnectionWriteTimeout + } +} + +// sendNoWait does a send without waiting. Since there's the expectation that +// the send happens right here, we enforce the connection write timeout if we +// can't queue the header to be sent. +func (s *Session) sendNoWait(hdr header) error { + t := timerPool.Get() + timer := t.(*time.Timer) + timer.Reset(s.config.ConnectionWriteTimeout) + defer func() { + timer.Stop() + select { + case <-timer.C: + default: + } + timerPool.Put(t) + }() + + select { + case s.sendCh <- sendReady{Hdr: hdr}: + return nil + case <-s.shutdownCh: + return ErrSessionShutdown + case <-timer.C: + return ErrConnectionWriteTimeout + } +} + +// send is a long running goroutine that sends data +func (s *Session) send() { + for { + select { + case ready := <-s.sendCh: + // Send a header if ready + if ready.Hdr != nil { + sent := 0 + for sent < len(ready.Hdr) { + n, err := s.conn.Write(ready.Hdr[sent:]) + if err != nil { + s.logger.Printf("[ERR] yamux: Failed to write header: %v", err) + asyncSendErr(ready.Err, err) + s.exitErr(err) + return + } + sent += n + } + } + + // Send data from a body if given + if ready.Body != nil { + _, err := io.Copy(s.conn, ready.Body) + if err != nil { + s.logger.Printf("[ERR] yamux: Failed to write body: %v", err) + asyncSendErr(ready.Err, err) + s.exitErr(err) + return + } + } + + // No error, successful send + asyncSendErr(ready.Err, nil) + case <-s.shutdownCh: + return + } + } +} + +// recv is a long running goroutine that accepts new data +func (s *Session) recv() { + if err := s.recvLoop(); err != nil { + s.exitErr(err) + } +} + +// Ensure that the index of the handler (typeData/typeWindowUpdate/etc) matches the message type +var ( + handlers = []func(*Session, header) error{ + typeData: (*Session).handleStreamMessage, + typeWindowUpdate: (*Session).handleStreamMessage, + typePing: (*Session).handlePing, + typeGoAway: (*Session).handleGoAway, + } +) + +// recvLoop continues to receive data until a fatal error is encountered +func (s *Session) recvLoop() error { + defer close(s.recvDoneCh) + hdr := header(make([]byte, headerSize)) + for { + // Read the header + if _, err := io.ReadFull(s.bufRead, hdr); err != nil { + if err != io.EOF && !strings.Contains(err.Error(), "closed") && !strings.Contains(err.Error(), "reset by peer") { + s.logger.Printf("[ERR] yamux: Failed to read header: %v", err) + } + return err + } + + // Verify the version + if hdr.Version() != protoVersion { + s.logger.Printf("[ERR] yamux: Invalid protocol version: %d", hdr.Version()) + return ErrInvalidVersion + } + + mt := hdr.MsgType() + if mt < typeData || mt > typeGoAway { + return ErrInvalidMsgType + } + + if err := handlers[mt](s, hdr); err != nil { + return err + } + } +} + +// handleStreamMessage handles either a data or window update frame +func (s *Session) handleStreamMessage(hdr header) error { + // Check for a new stream creation + id := hdr.StreamID() + flags := hdr.Flags() + if flags&flagSYN == flagSYN { + if err := s.incomingStream(id); err != nil { + return err + } + } + + // Get the stream + s.streamLock.Lock() + stream := s.streams[id] + s.streamLock.Unlock() + + // If we do not have a stream, likely we sent a RST + if stream == nil { + // Drain any data on the wire + if hdr.MsgType() == typeData && hdr.Length() > 0 { + s.logger.Printf("[WARN] yamux: Discarding data for stream: %d", id) + if _, err := io.CopyN(ioutil.Discard, s.bufRead, int64(hdr.Length())); err != nil { + s.logger.Printf("[ERR] yamux: Failed to discard data: %v", err) + return nil + } + } else { + s.logger.Printf("[WARN] yamux: frame for missing stream: %v", hdr) + } + return nil + } + + // Check if this is a window update + if hdr.MsgType() == typeWindowUpdate { + if err := stream.incrSendWindow(hdr, flags); err != nil { + if sendErr := s.sendNoWait(s.goAway(goAwayProtoErr)); sendErr != nil { + s.logger.Printf("[WARN] yamux: failed to send go away: %v", sendErr) + } + return err + } + return nil + } + + // Read the new data + if err := stream.readData(hdr, flags, s.bufRead); err != nil { + if sendErr := s.sendNoWait(s.goAway(goAwayProtoErr)); sendErr != nil { + s.logger.Printf("[WARN] yamux: failed to send go away: %v", sendErr) + } + return err + } + return nil +} + +// handlePing is invokde for a typePing frame +func (s *Session) handlePing(hdr header) error { + flags := hdr.Flags() + pingID := hdr.Length() + + // Check if this is a query, respond back in a separate context so we + // don't interfere with the receiving thread blocking for the write. + if flags&flagSYN == flagSYN { + go func() { + hdr := header(make([]byte, headerSize)) + hdr.encode(typePing, flagACK, 0, pingID) + if err := s.sendNoWait(hdr); err != nil { + s.logger.Printf("[WARN] yamux: failed to send ping reply: %v", err) + } + }() + return nil + } + + // Handle a response + s.pingLock.Lock() + ch := s.pings[pingID] + if ch != nil { + delete(s.pings, pingID) + close(ch) + } + s.pingLock.Unlock() + return nil +} + +// handleGoAway is invokde for a typeGoAway frame +func (s *Session) handleGoAway(hdr header) error { + code := hdr.Length() + switch code { + case goAwayNormal: + atomic.SwapInt32(&s.remoteGoAway, 1) + case goAwayProtoErr: + s.logger.Printf("[ERR] yamux: received protocol error go away") + return fmt.Errorf("yamux protocol error") + case goAwayInternalErr: + s.logger.Printf("[ERR] yamux: received internal error go away") + return fmt.Errorf("remote yamux internal error") + default: + s.logger.Printf("[ERR] yamux: received unexpected go away") + return fmt.Errorf("unexpected go away received") + } + return nil +} + +// incomingStream is used to create a new incoming stream +func (s *Session) incomingStream(id uint32) error { + // Reject immediately if we are doing a go away + if atomic.LoadInt32(&s.localGoAway) == 1 { + hdr := header(make([]byte, headerSize)) + hdr.encode(typeWindowUpdate, flagRST, id, 0) + return s.sendNoWait(hdr) + } + + // Allocate a new stream + stream := newStream(s, id, streamSYNReceived) + + s.streamLock.Lock() + defer s.streamLock.Unlock() + + // Check if stream already exists + if _, ok := s.streams[id]; ok { + s.logger.Printf("[ERR] yamux: duplicate stream declared") + if sendErr := s.sendNoWait(s.goAway(goAwayProtoErr)); sendErr != nil { + s.logger.Printf("[WARN] yamux: failed to send go away: %v", sendErr) + } + return ErrDuplicateStream + } + + // Register the stream + s.streams[id] = stream + + // Check if we've exceeded the backlog + select { + case s.acceptCh <- stream: + return nil + default: + // Backlog exceeded! RST the stream + s.logger.Printf("[WARN] yamux: backlog exceeded, forcing connection reset") + delete(s.streams, id) + stream.sendHdr.encode(typeWindowUpdate, flagRST, id, 0) + return s.sendNoWait(stream.sendHdr) + } +} + +// closeStream is used to close a stream once both sides have +// issued a close. If there was an in-flight SYN and the stream +// was not yet established, then this will give the credit back. +func (s *Session) closeStream(id uint32) { + s.streamLock.Lock() + if _, ok := s.inflight[id]; ok { + select { + case <-s.synCh: + default: + s.logger.Printf("[ERR] yamux: SYN tracking out of sync") + } + } + delete(s.streams, id) + s.streamLock.Unlock() +} + +// establishStream is used to mark a stream that was in the +// SYN Sent state as established. +func (s *Session) establishStream(id uint32) { + s.streamLock.Lock() + if _, ok := s.inflight[id]; ok { + delete(s.inflight, id) + } else { + s.logger.Printf("[ERR] yamux: established stream without inflight SYN (no tracking entry)") + } + select { + case <-s.synCh: + default: + s.logger.Printf("[ERR] yamux: established stream without inflight SYN (didn't have semaphore)") + } + s.streamLock.Unlock() +} diff --git a/vendor/github.com/hashicorp/yamux/stream.go b/vendor/github.com/hashicorp/yamux/stream.go new file mode 100644 index 0000000..aa23919 --- /dev/null +++ b/vendor/github.com/hashicorp/yamux/stream.go @@ -0,0 +1,470 @@ +package yamux + +import ( + "bytes" + "io" + "sync" + "sync/atomic" + "time" +) + +type streamState int + +const ( + streamInit streamState = iota + streamSYNSent + streamSYNReceived + streamEstablished + streamLocalClose + streamRemoteClose + streamClosed + streamReset +) + +// Stream is used to represent a logical stream +// within a session. +type Stream struct { + recvWindow uint32 + sendWindow uint32 + + id uint32 + session *Session + + state streamState + stateLock sync.Mutex + + recvBuf *bytes.Buffer + recvLock sync.Mutex + + controlHdr header + controlErr chan error + controlHdrLock sync.Mutex + + sendHdr header + sendErr chan error + sendLock sync.Mutex + + recvNotifyCh chan struct{} + sendNotifyCh chan struct{} + + readDeadline atomic.Value // time.Time + writeDeadline atomic.Value // time.Time +} + +// newStream is used to construct a new stream within +// a given session for an ID +func newStream(session *Session, id uint32, state streamState) *Stream { + s := &Stream{ + id: id, + session: session, + state: state, + controlHdr: header(make([]byte, headerSize)), + controlErr: make(chan error, 1), + sendHdr: header(make([]byte, headerSize)), + sendErr: make(chan error, 1), + recvWindow: initialStreamWindow, + sendWindow: initialStreamWindow, + recvNotifyCh: make(chan struct{}, 1), + sendNotifyCh: make(chan struct{}, 1), + } + s.readDeadline.Store(time.Time{}) + s.writeDeadline.Store(time.Time{}) + return s +} + +// Session returns the associated stream session +func (s *Stream) Session() *Session { + return s.session +} + +// StreamID returns the ID of this stream +func (s *Stream) StreamID() uint32 { + return s.id +} + +// Read is used to read from the stream +func (s *Stream) Read(b []byte) (n int, err error) { + defer asyncNotify(s.recvNotifyCh) +START: + s.stateLock.Lock() + switch s.state { + case streamLocalClose: + fallthrough + case streamRemoteClose: + fallthrough + case streamClosed: + s.recvLock.Lock() + if s.recvBuf == nil || s.recvBuf.Len() == 0 { + s.recvLock.Unlock() + s.stateLock.Unlock() + return 0, io.EOF + } + s.recvLock.Unlock() + case streamReset: + s.stateLock.Unlock() + return 0, ErrConnectionReset + } + s.stateLock.Unlock() + + // If there is no data available, block + s.recvLock.Lock() + if s.recvBuf == nil || s.recvBuf.Len() == 0 { + s.recvLock.Unlock() + goto WAIT + } + + // Read any bytes + n, _ = s.recvBuf.Read(b) + s.recvLock.Unlock() + + // Send a window update potentially + err = s.sendWindowUpdate() + return n, err + +WAIT: + var timeout <-chan time.Time + var timer *time.Timer + readDeadline := s.readDeadline.Load().(time.Time) + if !readDeadline.IsZero() { + delay := readDeadline.Sub(time.Now()) + timer = time.NewTimer(delay) + timeout = timer.C + } + select { + case <-s.recvNotifyCh: + if timer != nil { + timer.Stop() + } + goto START + case <-timeout: + return 0, ErrTimeout + } +} + +// Write is used to write to the stream +func (s *Stream) Write(b []byte) (n int, err error) { + s.sendLock.Lock() + defer s.sendLock.Unlock() + total := 0 + for total < len(b) { + n, err := s.write(b[total:]) + total += n + if err != nil { + return total, err + } + } + return total, nil +} + +// write is used to write to the stream, may return on +// a short write. +func (s *Stream) write(b []byte) (n int, err error) { + var flags uint16 + var max uint32 + var body io.Reader +START: + s.stateLock.Lock() + switch s.state { + case streamLocalClose: + fallthrough + case streamClosed: + s.stateLock.Unlock() + return 0, ErrStreamClosed + case streamReset: + s.stateLock.Unlock() + return 0, ErrConnectionReset + } + s.stateLock.Unlock() + + // If there is no data available, block + window := atomic.LoadUint32(&s.sendWindow) + if window == 0 { + goto WAIT + } + + // Determine the flags if any + flags = s.sendFlags() + + // Send up to our send window + max = min(window, uint32(len(b))) + body = bytes.NewReader(b[:max]) + + // Send the header + s.sendHdr.encode(typeData, flags, s.id, max) + if err = s.session.waitForSendErr(s.sendHdr, body, s.sendErr); err != nil { + return 0, err + } + + // Reduce our send window + atomic.AddUint32(&s.sendWindow, ^uint32(max-1)) + + // Unlock + return int(max), err + +WAIT: + var timeout <-chan time.Time + writeDeadline := s.writeDeadline.Load().(time.Time) + if !writeDeadline.IsZero() { + delay := writeDeadline.Sub(time.Now()) + timeout = time.After(delay) + } + select { + case <-s.sendNotifyCh: + goto START + case <-timeout: + return 0, ErrTimeout + } + return 0, nil +} + +// sendFlags determines any flags that are appropriate +// based on the current stream state +func (s *Stream) sendFlags() uint16 { + s.stateLock.Lock() + defer s.stateLock.Unlock() + var flags uint16 + switch s.state { + case streamInit: + flags |= flagSYN + s.state = streamSYNSent + case streamSYNReceived: + flags |= flagACK + s.state = streamEstablished + } + return flags +} + +// sendWindowUpdate potentially sends a window update enabling +// further writes to take place. Must be invoked with the lock. +func (s *Stream) sendWindowUpdate() error { + s.controlHdrLock.Lock() + defer s.controlHdrLock.Unlock() + + // Determine the delta update + max := s.session.config.MaxStreamWindowSize + var bufLen uint32 + s.recvLock.Lock() + if s.recvBuf != nil { + bufLen = uint32(s.recvBuf.Len()) + } + delta := (max - bufLen) - s.recvWindow + + // Determine the flags if any + flags := s.sendFlags() + + // Check if we can omit the update + if delta < (max/2) && flags == 0 { + s.recvLock.Unlock() + return nil + } + + // Update our window + s.recvWindow += delta + s.recvLock.Unlock() + + // Send the header + s.controlHdr.encode(typeWindowUpdate, flags, s.id, delta) + if err := s.session.waitForSendErr(s.controlHdr, nil, s.controlErr); err != nil { + return err + } + return nil +} + +// sendClose is used to send a FIN +func (s *Stream) sendClose() error { + s.controlHdrLock.Lock() + defer s.controlHdrLock.Unlock() + + flags := s.sendFlags() + flags |= flagFIN + s.controlHdr.encode(typeWindowUpdate, flags, s.id, 0) + if err := s.session.waitForSendErr(s.controlHdr, nil, s.controlErr); err != nil { + return err + } + return nil +} + +// Close is used to close the stream +func (s *Stream) Close() error { + closeStream := false + s.stateLock.Lock() + switch s.state { + // Opened means we need to signal a close + case streamSYNSent: + fallthrough + case streamSYNReceived: + fallthrough + case streamEstablished: + s.state = streamLocalClose + goto SEND_CLOSE + + case streamLocalClose: + case streamRemoteClose: + s.state = streamClosed + closeStream = true + goto SEND_CLOSE + + case streamClosed: + case streamReset: + default: + panic("unhandled state") + } + s.stateLock.Unlock() + return nil +SEND_CLOSE: + s.stateLock.Unlock() + s.sendClose() + s.notifyWaiting() + if closeStream { + s.session.closeStream(s.id) + } + return nil +} + +// forceClose is used for when the session is exiting +func (s *Stream) forceClose() { + s.stateLock.Lock() + s.state = streamClosed + s.stateLock.Unlock() + s.notifyWaiting() +} + +// processFlags is used to update the state of the stream +// based on set flags, if any. Lock must be held +func (s *Stream) processFlags(flags uint16) error { + // Close the stream without holding the state lock + closeStream := false + defer func() { + if closeStream { + s.session.closeStream(s.id) + } + }() + + s.stateLock.Lock() + defer s.stateLock.Unlock() + if flags&flagACK == flagACK { + if s.state == streamSYNSent { + s.state = streamEstablished + } + s.session.establishStream(s.id) + } + if flags&flagFIN == flagFIN { + switch s.state { + case streamSYNSent: + fallthrough + case streamSYNReceived: + fallthrough + case streamEstablished: + s.state = streamRemoteClose + s.notifyWaiting() + case streamLocalClose: + s.state = streamClosed + closeStream = true + s.notifyWaiting() + default: + s.session.logger.Printf("[ERR] yamux: unexpected FIN flag in state %d", s.state) + return ErrUnexpectedFlag + } + } + if flags&flagRST == flagRST { + s.state = streamReset + closeStream = true + s.notifyWaiting() + } + return nil +} + +// notifyWaiting notifies all the waiting channels +func (s *Stream) notifyWaiting() { + asyncNotify(s.recvNotifyCh) + asyncNotify(s.sendNotifyCh) +} + +// incrSendWindow updates the size of our send window +func (s *Stream) incrSendWindow(hdr header, flags uint16) error { + if err := s.processFlags(flags); err != nil { + return err + } + + // Increase window, unblock a sender + atomic.AddUint32(&s.sendWindow, hdr.Length()) + asyncNotify(s.sendNotifyCh) + return nil +} + +// readData is used to handle a data frame +func (s *Stream) readData(hdr header, flags uint16, conn io.Reader) error { + if err := s.processFlags(flags); err != nil { + return err + } + + // Check that our recv window is not exceeded + length := hdr.Length() + if length == 0 { + return nil + } + + // Wrap in a limited reader + conn = &io.LimitedReader{R: conn, N: int64(length)} + + // Copy into buffer + s.recvLock.Lock() + + if length > s.recvWindow { + s.session.logger.Printf("[ERR] yamux: receive window exceeded (stream: %d, remain: %d, recv: %d)", s.id, s.recvWindow, length) + return ErrRecvWindowExceeded + } + + if s.recvBuf == nil { + // Allocate the receive buffer just-in-time to fit the full data frame. + // This way we can read in the whole packet without further allocations. + s.recvBuf = bytes.NewBuffer(make([]byte, 0, length)) + } + if _, err := io.Copy(s.recvBuf, conn); err != nil { + s.session.logger.Printf("[ERR] yamux: Failed to read stream data: %v", err) + s.recvLock.Unlock() + return err + } + + // Decrement the receive window + s.recvWindow -= length + s.recvLock.Unlock() + + // Unblock any readers + asyncNotify(s.recvNotifyCh) + return nil +} + +// SetDeadline sets the read and write deadlines +func (s *Stream) SetDeadline(t time.Time) error { + if err := s.SetReadDeadline(t); err != nil { + return err + } + if err := s.SetWriteDeadline(t); err != nil { + return err + } + return nil +} + +// SetReadDeadline sets the deadline for future Read calls. +func (s *Stream) SetReadDeadline(t time.Time) error { + s.readDeadline.Store(t) + return nil +} + +// SetWriteDeadline sets the deadline for future Write calls +func (s *Stream) SetWriteDeadline(t time.Time) error { + s.writeDeadline.Store(t) + return nil +} + +// Shrink is used to compact the amount of buffers utilized +// This is useful when using Yamux in a connection pool to reduce +// the idle memory utilization. +func (s *Stream) Shrink() { + s.recvLock.Lock() + if s.recvBuf != nil && s.recvBuf.Len() == 0 { + s.recvBuf = nil + } + s.recvLock.Unlock() +} diff --git a/vendor/github.com/hashicorp/yamux/util.go b/vendor/github.com/hashicorp/yamux/util.go new file mode 100644 index 0000000..8a73e92 --- /dev/null +++ b/vendor/github.com/hashicorp/yamux/util.go @@ -0,0 +1,43 @@ +package yamux + +import ( + "sync" + "time" +) + +var ( + timerPool = &sync.Pool{ + New: func() interface{} { + timer := time.NewTimer(time.Hour * 1e6) + timer.Stop() + return timer + }, + } +) + +// asyncSendErr is used to try an async send of an error +func asyncSendErr(ch chan error, err error) { + if ch == nil { + return + } + select { + case ch <- err: + default: + } +} + +// asyncNotify is used to signal a waiting goroutine +func asyncNotify(ch chan struct{}) { + select { + case ch <- struct{}{}: + default: + } +} + +// min computes the minimum of two values +func min(a, b uint32) uint32 { + if a < b { + return a + } + return b +} diff --git a/vendor/github.com/inconshreveable/mousetrap/LICENSE b/vendor/github.com/inconshreveable/mousetrap/LICENSE new file mode 100644 index 0000000..5f0d1fb --- /dev/null +++ b/vendor/github.com/inconshreveable/mousetrap/LICENSE @@ -0,0 +1,13 @@ +Copyright 2014 Alan Shreve + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. diff --git a/vendor/github.com/inconshreveable/mousetrap/trap_others.go b/vendor/github.com/inconshreveable/mousetrap/trap_others.go new file mode 100644 index 0000000..9d2d8a4 --- /dev/null +++ b/vendor/github.com/inconshreveable/mousetrap/trap_others.go @@ -0,0 +1,15 @@ +// +build !windows + +package mousetrap + +// StartedByExplorer returns true if the program was invoked by the user +// double-clicking on the executable from explorer.exe +// +// It is conservative and returns false if any of the internal calls fail. +// It does not guarantee that the program was run from a terminal. It only can tell you +// whether it was launched from explorer.exe +// +// On non-Windows platforms, it always returns false. +func StartedByExplorer() bool { + return false +} diff --git a/vendor/github.com/inconshreveable/mousetrap/trap_windows.go b/vendor/github.com/inconshreveable/mousetrap/trap_windows.go new file mode 100644 index 0000000..336142a --- /dev/null +++ b/vendor/github.com/inconshreveable/mousetrap/trap_windows.go @@ -0,0 +1,98 @@ +// +build windows +// +build !go1.4 + +package mousetrap + +import ( + "fmt" + "os" + "syscall" + "unsafe" +) + +const ( + // defined by the Win32 API + th32cs_snapprocess uintptr = 0x2 +) + +var ( + kernel = syscall.MustLoadDLL("kernel32.dll") + CreateToolhelp32Snapshot = kernel.MustFindProc("CreateToolhelp32Snapshot") + Process32First = kernel.MustFindProc("Process32FirstW") + Process32Next = kernel.MustFindProc("Process32NextW") +) + +// ProcessEntry32 structure defined by the Win32 API +type processEntry32 struct { + dwSize uint32 + cntUsage uint32 + th32ProcessID uint32 + th32DefaultHeapID int + th32ModuleID uint32 + cntThreads uint32 + th32ParentProcessID uint32 + pcPriClassBase int32 + dwFlags uint32 + szExeFile [syscall.MAX_PATH]uint16 +} + +func getProcessEntry(pid int) (pe *processEntry32, err error) { + snapshot, _, e1 := CreateToolhelp32Snapshot.Call(th32cs_snapprocess, uintptr(0)) + if snapshot == uintptr(syscall.InvalidHandle) { + err = fmt.Errorf("CreateToolhelp32Snapshot: %v", e1) + return + } + defer syscall.CloseHandle(syscall.Handle(snapshot)) + + var processEntry processEntry32 + processEntry.dwSize = uint32(unsafe.Sizeof(processEntry)) + ok, _, e1 := Process32First.Call(snapshot, uintptr(unsafe.Pointer(&processEntry))) + if ok == 0 { + err = fmt.Errorf("Process32First: %v", e1) + return + } + + for { + if processEntry.th32ProcessID == uint32(pid) { + pe = &processEntry + return + } + + ok, _, e1 = Process32Next.Call(snapshot, uintptr(unsafe.Pointer(&processEntry))) + if ok == 0 { + err = fmt.Errorf("Process32Next: %v", e1) + return + } + } +} + +func getppid() (pid int, err error) { + pe, err := getProcessEntry(os.Getpid()) + if err != nil { + return + } + + pid = int(pe.th32ParentProcessID) + return +} + +// StartedByExplorer returns true if the program was invoked by the user double-clicking +// on the executable from explorer.exe +// +// It is conservative and returns false if any of the internal calls fail. +// It does not guarantee that the program was run from a terminal. It only can tell you +// whether it was launched from explorer.exe +func StartedByExplorer() bool { + ppid, err := getppid() + if err != nil { + return false + } + + pe, err := getProcessEntry(ppid) + if err != nil { + return false + } + + name := syscall.UTF16ToString(pe.szExeFile[:]) + return name == "explorer.exe" +} diff --git a/vendor/github.com/inconshreveable/mousetrap/trap_windows_1.4.go b/vendor/github.com/inconshreveable/mousetrap/trap_windows_1.4.go new file mode 100644 index 0000000..9a28e57 --- /dev/null +++ b/vendor/github.com/inconshreveable/mousetrap/trap_windows_1.4.go @@ -0,0 +1,46 @@ +// +build windows +// +build go1.4 + +package mousetrap + +import ( + "os" + "syscall" + "unsafe" +) + +func getProcessEntry(pid int) (*syscall.ProcessEntry32, error) { + snapshot, err := syscall.CreateToolhelp32Snapshot(syscall.TH32CS_SNAPPROCESS, 0) + if err != nil { + return nil, err + } + defer syscall.CloseHandle(snapshot) + var procEntry syscall.ProcessEntry32 + procEntry.Size = uint32(unsafe.Sizeof(procEntry)) + if err = syscall.Process32First(snapshot, &procEntry); err != nil { + return nil, err + } + for { + if procEntry.ProcessID == uint32(pid) { + return &procEntry, nil + } + err = syscall.Process32Next(snapshot, &procEntry) + if err != nil { + return nil, err + } + } +} + +// StartedByExplorer returns true if the program was invoked by the user double-clicking +// on the executable from explorer.exe +// +// It is conservative and returns false if any of the internal calls fail. +// It does not guarantee that the program was run from a terminal. It only can tell you +// whether it was launched from explorer.exe +func StartedByExplorer() bool { + pe, err := getProcessEntry(os.Getppid()) + if err != nil { + return false + } + return "explorer.exe" == syscall.UTF16ToString(pe.ExeFile[:]) +} diff --git a/vendor/github.com/leonelquinteros/gotext/LICENSE b/vendor/github.com/leonelquinteros/gotext/LICENSE new file mode 100644 index 0000000..a753ef2 --- /dev/null +++ b/vendor/github.com/leonelquinteros/gotext/LICENSE @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) 2016 Leonel Quinteros + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/vendor/github.com/leonelquinteros/gotext/gotext.go b/vendor/github.com/leonelquinteros/gotext/gotext.go new file mode 100644 index 0000000..4cb0df5 --- /dev/null +++ b/vendor/github.com/leonelquinteros/gotext/gotext.go @@ -0,0 +1,221 @@ +/* +Package gotext implements GNU gettext utilities. + +For quick/simple translations you can use the package level functions directly. + + import ( + "fmt" + "github.com/leonelquinteros/gotext" + ) + + func main() { + // Configure package + gotext.Configure("/path/to/locales/root/dir", "en_UK", "domain-name") + + // Translate text from default domain + fmt.Println(gotext.Get("My text on 'domain-name' domain")) + + // Translate text from a different domain without reconfigure + fmt.Println(gotext.GetD("domain2", "Another text on a different domain")) + } + +*/ +package gotext + +import ( + "encoding/gob" + "sync" +) + +// Global environment variables +type config struct { + sync.RWMutex + + // Default domain to look at when no domain is specified. Used by package level functions. + domain string + + // Language set. + language string + + // Path to library directory where all locale directories and Translation files are. + library string + + // Storage for package level methods + storage *Locale +} + +var globalConfig *config + +func init() { + // Init default configuration + globalConfig = &config{ + domain: "default", + language: "en_US", + library: "/usr/local/share/locale", + storage: nil, + } + + // Register Translator types for gob encoding + gob.Register(TranslatorEncoding{}) +} + +// loadStorage creates a new Locale object at package level based on the Global variables settings. +// It's called automatically when trying to use Get or GetD methods. +func loadStorage(force bool) { + globalConfig.Lock() + + if globalConfig.storage == nil || force { + globalConfig.storage = NewLocale(globalConfig.library, globalConfig.language) + } + + if _, ok := globalConfig.storage.Domains[globalConfig.domain]; !ok || force { + globalConfig.storage.AddDomain(globalConfig.domain) + } + globalConfig.storage.SetDomain(globalConfig.domain) + + globalConfig.Unlock() +} + +// GetDomain is the domain getter for the package configuration +func GetDomain() string { + var dom string + globalConfig.RLock() + if globalConfig.storage != nil { + dom = globalConfig.storage.GetDomain() + } + if dom == "" { + dom = globalConfig.domain + } + globalConfig.RUnlock() + + return dom +} + +// SetDomain sets the name for the domain to be used at package level. +// It reloads the corresponding Translation file. +func SetDomain(dom string) { + globalConfig.Lock() + globalConfig.domain = dom + if globalConfig.storage != nil { + globalConfig.storage.SetDomain(dom) + } + globalConfig.Unlock() + + loadStorage(true) +} + +// GetLanguage is the language getter for the package configuration +func GetLanguage() string { + globalConfig.RLock() + lang := globalConfig.language + globalConfig.RUnlock() + + return lang +} + +// SetLanguage sets the language code to be used at package level. +// It reloads the corresponding Translation file. +func SetLanguage(lang string) { + globalConfig.Lock() + globalConfig.language = SimplifiedLocale(lang) + globalConfig.Unlock() + + loadStorage(true) +} + +// GetLibrary is the library getter for the package configuration +func GetLibrary() string { + globalConfig.RLock() + lib := globalConfig.library + globalConfig.RUnlock() + + return lib +} + +// SetLibrary sets the root path for the loale directories and files to be used at package level. +// It reloads the corresponding Translation file. +func SetLibrary(lib string) { + globalConfig.Lock() + globalConfig.library = lib + globalConfig.Unlock() + + loadStorage(true) +} + +// Configure sets all configuration variables to be used at package level and reloads the corresponding Translation file. +// It receives the library path, language code and domain name. +// This function is recommended to be used when changing more than one setting, +// as using each setter will introduce a I/O overhead because the Translation file will be loaded after each set. +func Configure(lib, lang, dom string) { + globalConfig.Lock() + globalConfig.library = lib + globalConfig.language = SimplifiedLocale(lang) + globalConfig.domain = dom + globalConfig.Unlock() + + loadStorage(true) +} + +// Get uses the default domain globally set to return the corresponding Translation of a given string. +// Supports optional parameters (vars... interface{}) to be inserted on the formatted string using the fmt.Printf syntax. +func Get(str string, vars ...interface{}) string { + return GetD(GetDomain(), str, vars...) +} + +// GetN retrieves the (N)th plural form of Translation for the given string in the default domain. +// Supports optional parameters (vars... interface{}) to be inserted on the formatted string using the fmt.Printf syntax. +func GetN(str, plural string, n int, vars ...interface{}) string { + return GetND(GetDomain(), str, plural, n, vars...) +} + +// GetD returns the corresponding Translation in the given domain for a given string. +// Supports optional parameters (vars... interface{}) to be inserted on the formatted string using the fmt.Printf syntax. +func GetD(dom, str string, vars ...interface{}) string { + return GetND(dom, str, str, 1, vars...) +} + +// GetND retrieves the (N)th plural form of Translation in the given domain for a given string. +// Supports optional parameters (vars... interface{}) to be inserted on the formatted string using the fmt.Printf syntax. +func GetND(dom, str, plural string, n int, vars ...interface{}) string { + // Try to load default package Locale storage + loadStorage(false) + + // Return Translation + globalConfig.RLock() + tr := globalConfig.storage.GetND(dom, str, plural, n, vars...) + globalConfig.RUnlock() + + return tr +} + +// GetC uses the default domain globally set to return the corresponding Translation of the given string in the given context. +// Supports optional parameters (vars... interface{}) to be inserted on the formatted string using the fmt.Printf syntax. +func GetC(str, ctx string, vars ...interface{}) string { + return GetDC(GetDomain(), str, ctx, vars...) +} + +// GetNC retrieves the (N)th plural form of Translation for the given string in the given context in the default domain. +// Supports optional parameters (vars... interface{}) to be inserted on the formatted string using the fmt.Printf syntax. +func GetNC(str, plural string, n int, ctx string, vars ...interface{}) string { + return GetNDC(GetDomain(), str, plural, n, ctx, vars...) +} + +// GetDC returns the corresponding Translation in the given domain for the given string in the given context. +// Supports optional parameters (vars... interface{}) to be inserted on the formatted string using the fmt.Printf syntax. +func GetDC(dom, str, ctx string, vars ...interface{}) string { + return GetNDC(dom, str, str, 1, ctx, vars...) +} + +// GetNDC retrieves the (N)th plural form of Translation in the given domain for a given string. +// Supports optional parameters (vars... interface{}) to be inserted on the formatted string using the fmt.Printf syntax. +func GetNDC(dom, str, plural string, n int, ctx string, vars ...interface{}) string { + // Try to load default package Locale storage + loadStorage(false) + + // Return Translation + globalConfig.RLock() + tr := globalConfig.storage.GetNDC(dom, str, plural, n, ctx, vars...) + globalConfig.RUnlock() + + return tr +} diff --git a/vendor/github.com/leonelquinteros/gotext/helper.go b/vendor/github.com/leonelquinteros/gotext/helper.go new file mode 100644 index 0000000..ba1b62c --- /dev/null +++ b/vendor/github.com/leonelquinteros/gotext/helper.go @@ -0,0 +1,86 @@ +/* + * Copyright (c) 2018 DeineAgentur UG https://www.deineagentur.com. All rights reserved. + * Licensed under the MIT License. See LICENSE file in the project root for full license information. + */ + +package gotext + +import ( + "fmt" + "regexp" + "strings" +) + +var re = regexp.MustCompile(`%\(([a-zA-Z0-9_]+)\)[.0-9]*[svTtbcdoqXxUeEfFgGp]`) + +// SimplifiedLocale simplified locale like " en_US"/"de_DE "/en_US.UTF-8/zh_CN/zh_TW/el_GR@euro/... to en_US, de_DE, zh_CN, el_GR... +func SimplifiedLocale(lang string) string { + // en_US/en_US.UTF-8/zh_CN/zh_TW/el_GR@euro/... + if idx := strings.Index(lang, ":"); idx != -1 { + lang = lang[:idx] + } + if idx := strings.Index(lang, "@"); idx != -1 { + lang = lang[:idx] + } + if idx := strings.Index(lang, "."); idx != -1 { + lang = lang[:idx] + } + return strings.TrimSpace(lang) +} + +// Printf applies text formatting only when needed to parse variables. +func Printf(str string, vars ...interface{}) string { + if len(vars) > 0 { + return fmt.Sprintf(str, vars...) + } + + return str +} + +// NPrintf support named format +// NPrintf("%(name)s is Type %(type)s", map[string]interface{}{"name": "Gotext", "type": "struct"}) +func NPrintf(format string, params map[string]interface{}) { + f, p := parseSprintf(format, params) + fmt.Printf(f, p...) +} + +// Sprintf support named format +// Sprintf("%(name)s is Type %(type)s", map[string]interface{}{"name": "Gotext", "type": "struct"}) +func Sprintf(format string, params map[string]interface{}) string { + f, p := parseSprintf(format, params) + return fmt.Sprintf(f, p...) +} + +func parseSprintf(format string, params map[string]interface{}) (string, []interface{}) { + f, n := reformatSprintf(format) + var p []interface{} + for _, v := range n { + p = append(p, params[v]) + } + return f, p +} + +func reformatSprintf(f string) (string, []string) { + m := re.FindAllStringSubmatch(f, -1) + i := re.FindAllStringSubmatchIndex(f, -1) + + ord := []string{} + for _, v := range m { + ord = append(ord, v[1]) + } + + pair := []int{0} + for _, v := range i { + pair = append(pair, v[2]-1) + pair = append(pair, v[3]+1) + } + pair = append(pair, len(f)) + plen := len(pair) + + out := "" + for n := 0; n < plen; n += 2 { + out += f[pair[n]:pair[n+1]] + } + + return out, ord +} diff --git a/vendor/github.com/leonelquinteros/gotext/locale.go b/vendor/github.com/leonelquinteros/gotext/locale.go new file mode 100644 index 0000000..195b7d5 --- /dev/null +++ b/vendor/github.com/leonelquinteros/gotext/locale.go @@ -0,0 +1,304 @@ +/* + * Copyright (c) 2018 DeineAgentur UG https://www.deineagentur.com. All rights reserved. + * Licensed under the MIT License. See LICENSE file in the project root for full license information. + */ + +package gotext + +import ( + "bytes" + "encoding/gob" + "os" + "path" + "sync" +) + +/* +Locale wraps the entire i18n collection for a single language (locale) +It's used by the package functions, but it can also be used independently to handle +multiple languages at the same time by working with this object. + +Example: + + import ( + "encoding/gob" + "bytes" + "fmt" + "github.com/leonelquinteros/gotext" + ) + + func main() { + // Create Locale with library path and language code + l := gotext.NewLocale("/path/to/i18n/dir", "en_US") + + // Load domain '/path/to/i18n/dir/en_US/LC_MESSAGES/default.{po,mo}' + l.AddDomain("default") + + // Translate text from default domain + fmt.Println(l.Get("Translate this")) + + // Load different domain ('/path/to/i18n/dir/en_US/LC_MESSAGES/extras.{po,mo}') + l.AddDomain("extras") + + // Translate text from domain + fmt.Println(l.GetD("extras", "Translate this")) + } + +*/ +type Locale struct { + // Path to locale files. + path string + + // Language for this Locale + lang string + + // List of available Domains for this locale. + Domains map[string]Translator + + // First AddDomain is default Domain + defaultDomain string + + // Sync Mutex + sync.RWMutex +} + +// NewLocale creates and initializes a new Locale object for a given language. +// It receives a path for the i18n .po/.mo files directory (p) and a language code to use (l). +func NewLocale(p, l string) *Locale { + return &Locale{ + path: p, + lang: SimplifiedLocale(l), + Domains: make(map[string]Translator), + } +} + +func (l *Locale) findExt(dom, ext string) string { + filename := path.Join(l.path, l.lang, "LC_MESSAGES", dom+"."+ext) + if _, err := os.Stat(filename); err == nil { + return filename + } + + if len(l.lang) > 2 { + filename = path.Join(l.path, l.lang[:2], "LC_MESSAGES", dom+"."+ext) + if _, err := os.Stat(filename); err == nil { + return filename + } + } + + filename = path.Join(l.path, l.lang, dom+"."+ext) + if _, err := os.Stat(filename); err == nil { + return filename + } + + if len(l.lang) > 2 { + filename = path.Join(l.path, l.lang[:2], dom+"."+ext) + if _, err := os.Stat(filename); err == nil { + return filename + } + } + + return "" +} + +// AddDomain creates a new domain for a given locale object and initializes the Po object. +// If the domain exists, it gets reloaded. +func (l *Locale) AddDomain(dom string) { + var poObj Translator + + file := l.findExt(dom, "po") + if file != "" { + poObj = new(Po) + // Parse file. + poObj.ParseFile(file) + } else { + file = l.findExt(dom, "mo") + if file != "" { + poObj = new(Mo) + // Parse file. + poObj.ParseFile(file) + } else { + // fallback return if no file found with + return + } + } + + // Save new domain + l.Lock() + + if l.Domains == nil { + l.Domains = make(map[string]Translator) + } + if l.defaultDomain == "" { + l.defaultDomain = dom + } + l.Domains[dom] = poObj + + // Unlock "Save new domain" + l.Unlock() +} + +// AddTranslator takes a domain name and a Translator object to make it available in the Locale object. +func (l *Locale) AddTranslator(dom string, tr Translator) { + l.Lock() + + if l.Domains == nil { + l.Domains = make(map[string]Translator) + } + if l.defaultDomain == "" { + l.defaultDomain = dom + } + l.Domains[dom] = tr + + l.Unlock() +} + +// GetDomain is the domain getter for the package configuration +func (l *Locale) GetDomain() string { + l.RLock() + dom := l.defaultDomain + l.RUnlock() + return dom +} + +// SetDomain sets the name for the domain to be used. +func (l *Locale) SetDomain(dom string) { + l.Lock() + l.defaultDomain = dom + l.Unlock() +} + +// Get uses a domain "default" to return the corresponding Translation of a given string. +// Supports optional parameters (vars... interface{}) to be inserted on the formatted string using the fmt.Printf syntax. +func (l *Locale) Get(str string, vars ...interface{}) string { + return l.GetD(l.GetDomain(), str, vars...) +} + +// GetN retrieves the (N)th plural form of Translation for the given string in the "default" domain. +// Supports optional parameters (vars... interface{}) to be inserted on the formatted string using the fmt.Printf syntax. +func (l *Locale) GetN(str, plural string, n int, vars ...interface{}) string { + return l.GetND(l.GetDomain(), str, plural, n, vars...) +} + +// GetD returns the corresponding Translation in the given domain for the given string. +// Supports optional parameters (vars... interface{}) to be inserted on the formatted string using the fmt.Printf syntax. +func (l *Locale) GetD(dom, str string, vars ...interface{}) string { + return l.GetND(dom, str, str, 1, vars...) +} + +// GetND retrieves the (N)th plural form of Translation in the given domain for the given string. +// Supports optional parameters (vars... interface{}) to be inserted on the formatted string using the fmt.Printf syntax. +func (l *Locale) GetND(dom, str, plural string, n int, vars ...interface{}) string { + // Sync read + l.RLock() + defer l.RUnlock() + + if l.Domains != nil { + if _, ok := l.Domains[dom]; ok { + if l.Domains[dom] != nil { + return l.Domains[dom].GetN(str, plural, n, vars...) + } + } + } + + // Return the same we received by default + return Printf(plural, vars...) +} + +// GetC uses a domain "default" to return the corresponding Translation of the given string in the given context. +// Supports optional parameters (vars... interface{}) to be inserted on the formatted string using the fmt.Printf syntax. +func (l *Locale) GetC(str, ctx string, vars ...interface{}) string { + return l.GetDC(l.GetDomain(), str, ctx, vars...) +} + +// GetNC retrieves the (N)th plural form of Translation for the given string in the given context in the "default" domain. +// Supports optional parameters (vars... interface{}) to be inserted on the formatted string using the fmt.Printf syntax. +func (l *Locale) GetNC(str, plural string, n int, ctx string, vars ...interface{}) string { + return l.GetNDC(l.GetDomain(), str, plural, n, ctx, vars...) +} + +// GetDC returns the corresponding Translation in the given domain for the given string in the given context. +// Supports optional parameters (vars... interface{}) to be inserted on the formatted string using the fmt.Printf syntax. +func (l *Locale) GetDC(dom, str, ctx string, vars ...interface{}) string { + return l.GetNDC(dom, str, str, 1, ctx, vars...) +} + +// GetNDC retrieves the (N)th plural form of Translation in the given domain for the given string in the given context. +// Supports optional parameters (vars... interface{}) to be inserted on the formatted string using the fmt.Printf syntax. +func (l *Locale) GetNDC(dom, str, plural string, n int, ctx string, vars ...interface{}) string { + // Sync read + l.RLock() + defer l.RUnlock() + + if l.Domains != nil { + if _, ok := l.Domains[dom]; ok { + if l.Domains[dom] != nil { + return l.Domains[dom].GetNC(str, plural, n, ctx, vars...) + } + } + } + + // Return the same we received by default + return Printf(plural, vars...) +} + +// LocaleEncoding is used as intermediary storage to encode Locale objects to Gob. +type LocaleEncoding struct { + Path string + Lang string + Domains map[string][]byte + DefaultDomain string +} + +// MarshalBinary implements encoding BinaryMarshaler interface +func (l *Locale) MarshalBinary() ([]byte, error) { + obj := new(LocaleEncoding) + obj.DefaultDomain = l.defaultDomain + obj.Domains = make(map[string][]byte) + for k, v := range l.Domains { + var err error + obj.Domains[k], err = v.MarshalBinary() + if err != nil { + return nil, err + } + } + obj.Lang = l.lang + obj.Path = l.path + + var buff bytes.Buffer + encoder := gob.NewEncoder(&buff) + err := encoder.Encode(obj) + + return buff.Bytes(), err +} + +// UnmarshalBinary implements encoding BinaryUnmarshaler interface +func (l *Locale) UnmarshalBinary(data []byte) error { + buff := bytes.NewBuffer(data) + obj := new(LocaleEncoding) + + decoder := gob.NewDecoder(buff) + err := decoder.Decode(obj) + if err != nil { + return err + } + + l.defaultDomain = obj.DefaultDomain + l.lang = obj.Lang + l.path = obj.Path + + // Decode Domains + l.Domains = make(map[string]Translator) + for k, v := range obj.Domains { + var tr TranslatorEncoding + buff := bytes.NewBuffer(v) + trDecoder := gob.NewDecoder(buff) + err := trDecoder.Decode(&tr) + if err != nil { + return err + } + + l.Domains[k] = tr.GetTranslator() + } + + return nil +} diff --git a/vendor/github.com/leonelquinteros/gotext/mo.go b/vendor/github.com/leonelquinteros/gotext/mo.go new file mode 100644 index 0000000..deb21ab --- /dev/null +++ b/vendor/github.com/leonelquinteros/gotext/mo.go @@ -0,0 +1,472 @@ +/* + * Copyright (c) 2018 DeineAgentur UG https://www.deineagentur.com. All rights reserved. + * Licensed under the MIT License. See LICENSE file in the project root for full license information. + */ + +package gotext + +import ( + "bufio" + "bytes" + "encoding/binary" + "encoding/gob" + "io/ioutil" + "net/textproto" + "os" + "strconv" + "strings" + "sync" + + "github.com/leonelquinteros/gotext/plurals" +) + +const ( + // MoMagicLittleEndian encoding + MoMagicLittleEndian = 0x950412de + // MoMagicBigEndian encoding + MoMagicBigEndian = 0xde120495 + + // EotSeparator msgctxt and msgid separator + EotSeparator = "\x04" + // NulSeparator msgid and msgstr separator + NulSeparator = "\x00" +) + +/* +Mo parses the content of any MO file and provides all the Translation functions needed. +It's the base object used by all package methods. +And it's safe for concurrent use by multiple goroutines by using the sync package for locking. + +Example: + + import ( + "fmt" + "github.com/leonelquinteros/gotext" + ) + + func main() { + // Create po object + po := gotext.NewMoTranslator() + + // Parse .po file + po.ParseFile("/path/to/po/file/translations.mo") + + // Get Translation + fmt.Println(po.Get("Translate this")) + } + +*/ +type Mo struct { + // Headers storage + Headers textproto.MIMEHeader + + // Language header + Language string + + // Plural-Forms header + PluralForms string + + // Parsed Plural-Forms header values + nplurals int + plural string + pluralforms plurals.Expression + + // Storage + translations map[string]*Translation + contexts map[string]map[string]*Translation + + // Sync Mutex + sync.RWMutex + + // Parsing buffers + trBuffer *Translation + ctxBuffer string +} + +// NewMoTranslator creates a new Mo object with the Translator interface +func NewMoTranslator() Translator { + return new(Mo) +} + +// ParseFile tries to read the file by its provided path (f) and parse its content as a .po file. +func (mo *Mo) ParseFile(f string) { + // Check if file exists + info, err := os.Stat(f) + if err != nil { + return + } + + // Check that isn't a directory + if info.IsDir() { + return + } + + // Parse file content + data, err := ioutil.ReadFile(f) + if err != nil { + return + } + + mo.Parse(data) +} + +// Parse loads the translations specified in the provided string (str) +func (mo *Mo) Parse(buf []byte) { + // Lock while parsing + mo.Lock() + + // Init storage + if mo.translations == nil { + mo.translations = make(map[string]*Translation) + mo.contexts = make(map[string]map[string]*Translation) + } + + r := bytes.NewReader(buf) + + var magicNumber uint32 + if err := binary.Read(r, binary.LittleEndian, &magicNumber); err != nil { + return + // return fmt.Errorf("gettext: %v", err) + } + var bo binary.ByteOrder + switch magicNumber { + case MoMagicLittleEndian: + bo = binary.LittleEndian + case MoMagicBigEndian: + bo = binary.BigEndian + default: + return + // return fmt.Errorf("gettext: %v", "invalid magic number") + } + + var header struct { + MajorVersion uint16 + MinorVersion uint16 + MsgIDCount uint32 + MsgIDOffset uint32 + MsgStrOffset uint32 + HashSize uint32 + HashOffset uint32 + } + if err := binary.Read(r, bo, &header); err != nil { + return + // return fmt.Errorf("gettext: %v", err) + } + if v := header.MajorVersion; v != 0 && v != 1 { + return + // return fmt.Errorf("gettext: %v", "invalid version number") + } + if v := header.MinorVersion; v != 0 && v != 1 { + return + // return fmt.Errorf("gettext: %v", "invalid version number") + } + + msgIDStart := make([]uint32, header.MsgIDCount) + msgIDLen := make([]uint32, header.MsgIDCount) + if _, err := r.Seek(int64(header.MsgIDOffset), 0); err != nil { + return + // return fmt.Errorf("gettext: %v", err) + } + for i := 0; i < int(header.MsgIDCount); i++ { + if err := binary.Read(r, bo, &msgIDLen[i]); err != nil { + return + // return fmt.Errorf("gettext: %v", err) + } + if err := binary.Read(r, bo, &msgIDStart[i]); err != nil { + return + // return fmt.Errorf("gettext: %v", err) + } + } + + msgStrStart := make([]int32, header.MsgIDCount) + msgStrLen := make([]int32, header.MsgIDCount) + if _, err := r.Seek(int64(header.MsgStrOffset), 0); err != nil { + return + // return fmt.Errorf("gettext: %v", err) + } + for i := 0; i < int(header.MsgIDCount); i++ { + if err := binary.Read(r, bo, &msgStrLen[i]); err != nil { + return + // return fmt.Errorf("gettext: %v", err) + } + if err := binary.Read(r, bo, &msgStrStart[i]); err != nil { + return + // return fmt.Errorf("gettext: %v", err) + } + } + + for i := 0; i < int(header.MsgIDCount); i++ { + if _, err := r.Seek(int64(msgIDStart[i]), 0); err != nil { + return + // return fmt.Errorf("gettext: %v", err) + } + msgIDData := make([]byte, msgIDLen[i]) + if _, err := r.Read(msgIDData); err != nil { + return + // return fmt.Errorf("gettext: %v", err) + } + + if _, err := r.Seek(int64(msgStrStart[i]), 0); err != nil { + return + // return fmt.Errorf("gettext: %v", err) + } + msgStrData := make([]byte, msgStrLen[i]) + if _, err := r.Read(msgStrData); err != nil { + return + // return fmt.Errorf("gettext: %v", err) + } + + if len(msgIDData) == 0 { + mo.addTranslation(msgIDData, msgStrData) + } else { + mo.addTranslation(msgIDData, msgStrData) + } + } + + // Unlock to parse headers + mo.Unlock() + + // Parse headers + mo.parseHeaders() + return + // return nil +} + +func (mo *Mo) addTranslation(msgid, msgstr []byte) { + translation := NewTranslation() + var msgctxt []byte + var msgidPlural []byte + + d := bytes.Split(msgid, []byte(EotSeparator)) + if len(d) == 1 { + msgid = d[0] + } else { + msgid, msgctxt = d[1], d[0] + } + + dd := bytes.Split(msgid, []byte(NulSeparator)) + if len(dd) > 1 { + msgid = dd[0] + dd = dd[1:] + } + + translation.ID = string(msgid) + + msgidPlural = bytes.Join(dd, []byte(NulSeparator)) + if len(msgidPlural) > 0 { + translation.PluralID = string(msgidPlural) + } + + ddd := bytes.Split(msgstr, []byte(NulSeparator)) + if len(ddd) > 0 { + for i, s := range ddd { + translation.Trs[i] = string(s) + } + } + + if len(msgctxt) > 0 { + // With context... + if _, ok := mo.contexts[string(msgctxt)]; !ok { + mo.contexts[string(msgctxt)] = make(map[string]*Translation) + } + mo.contexts[string(msgctxt)][translation.ID] = translation + } else { + mo.translations[translation.ID] = translation + } +} + +// parseHeaders retrieves data from previously parsed headers +func (mo *Mo) parseHeaders() { + // Make sure we end with 2 carriage returns. + raw := mo.Get("") + "\n\n" + + // Read + reader := bufio.NewReader(strings.NewReader(raw)) + tp := textproto.NewReader(reader) + + var err error + + // Sync Headers write. + mo.Lock() + defer mo.Unlock() + + mo.Headers, err = tp.ReadMIMEHeader() + if err != nil { + return + } + + // Get/save needed headers + mo.Language = mo.Headers.Get("Language") + mo.PluralForms = mo.Headers.Get("Plural-Forms") + + // Parse Plural-Forms formula + if mo.PluralForms == "" { + return + } + + // Split plural form header value + pfs := strings.Split(mo.PluralForms, ";") + + // Parse values + for _, i := range pfs { + vs := strings.SplitN(i, "=", 2) + if len(vs) != 2 { + continue + } + + switch strings.TrimSpace(vs[0]) { + case "nplurals": + mo.nplurals, _ = strconv.Atoi(vs[1]) + + case "plural": + mo.plural = vs[1] + + if expr, err := plurals.Compile(mo.plural); err == nil { + mo.pluralforms = expr + } + + } + } +} + +// pluralForm calculates the plural form index corresponding to n. +// Returns 0 on error +func (mo *Mo) pluralForm(n int) int { + mo.RLock() + defer mo.RUnlock() + + // Failure fallback + if mo.pluralforms == nil { + /* Use the Germanic plural rule. */ + if n == 1 { + return 0 + } + return 1 + + } + return mo.pluralforms.Eval(uint32(n)) +} + +// Get retrieves the corresponding Translation for the given string. +// Supports optional parameters (vars... interface{}) to be inserted on the formatted string using the fmt.Printf syntax. +func (mo *Mo) Get(str string, vars ...interface{}) string { + // Sync read + mo.RLock() + defer mo.RUnlock() + + if mo.translations != nil { + if _, ok := mo.translations[str]; ok { + return Printf(mo.translations[str].Get(), vars...) + } + } + + // Return the same we received by default + return Printf(str, vars...) +} + +// GetN retrieves the (N)th plural form of Translation for the given string. +// Supports optional parameters (vars... interface{}) to be inserted on the formatted string using the fmt.Printf syntax. +func (mo *Mo) GetN(str, plural string, n int, vars ...interface{}) string { + // Sync read + mo.RLock() + defer mo.RUnlock() + + if mo.translations != nil { + if _, ok := mo.translations[str]; ok { + return Printf(mo.translations[str].GetN(mo.pluralForm(n)), vars...) + } + } + + if n == 1 { + return Printf(str, vars...) + } + return Printf(plural, vars...) +} + +// GetC retrieves the corresponding Translation for a given string in the given context. +// Supports optional parameters (vars... interface{}) to be inserted on the formatted string using the fmt.Printf syntax. +func (mo *Mo) GetC(str, ctx string, vars ...interface{}) string { + // Sync read + mo.RLock() + defer mo.RUnlock() + + if mo.contexts != nil { + if _, ok := mo.contexts[ctx]; ok { + if mo.contexts[ctx] != nil { + if _, ok := mo.contexts[ctx][str]; ok { + return Printf(mo.contexts[ctx][str].Get(), vars...) + } + } + } + } + + // Return the string we received by default + return Printf(str, vars...) +} + +// GetNC retrieves the (N)th plural form of Translation for the given string in the given context. +// Supports optional parameters (vars... interface{}) to be inserted on the formatted string using the fmt.Printf syntax. +func (mo *Mo) GetNC(str, plural string, n int, ctx string, vars ...interface{}) string { + // Sync read + mo.RLock() + defer mo.RUnlock() + + if mo.contexts != nil { + if _, ok := mo.contexts[ctx]; ok { + if mo.contexts[ctx] != nil { + if _, ok := mo.contexts[ctx][str]; ok { + return Printf(mo.contexts[ctx][str].GetN(mo.pluralForm(n)), vars...) + } + } + } + } + + if n == 1 { + return Printf(str, vars...) + } + return Printf(plural, vars...) +} + +// MarshalBinary implements encoding.BinaryMarshaler interface +func (mo *Mo) MarshalBinary() ([]byte, error) { + obj := new(TranslatorEncoding) + obj.Headers = mo.Headers + obj.Language = mo.Language + obj.PluralForms = mo.PluralForms + obj.Nplurals = mo.nplurals + obj.Plural = mo.plural + obj.Translations = mo.translations + obj.Contexts = mo.contexts + + var buff bytes.Buffer + encoder := gob.NewEncoder(&buff) + err := encoder.Encode(obj) + + return buff.Bytes(), err +} + +// UnmarshalBinary implements encoding.BinaryUnmarshaler interface +func (mo *Mo) UnmarshalBinary(data []byte) error { + buff := bytes.NewBuffer(data) + obj := new(TranslatorEncoding) + + decoder := gob.NewDecoder(buff) + err := decoder.Decode(obj) + if err != nil { + return err + } + + mo.Headers = obj.Headers + mo.Language = obj.Language + mo.PluralForms = obj.PluralForms + mo.nplurals = obj.Nplurals + mo.plural = obj.Plural + mo.translations = obj.Translations + mo.contexts = obj.Contexts + + if expr, err := plurals.Compile(mo.plural); err == nil { + mo.pluralforms = expr + } + + return nil +} diff --git a/vendor/github.com/leonelquinteros/gotext/plurals/compiler.go b/vendor/github.com/leonelquinteros/gotext/plurals/compiler.go new file mode 100644 index 0000000..8c85017 --- /dev/null +++ b/vendor/github.com/leonelquinteros/gotext/plurals/compiler.go @@ -0,0 +1,429 @@ +/* + * Copyright (c) 2018 DeineAgentur UG https://www.deineagentur.com. All rights reserved. + * Licensed under the MIT License. See LICENSE file in the project root for full license information. + */ + +/* + Package plurals is the pluralform compiler to get the correct translation id of the plural string +*/ +package plurals + +import ( + "errors" + "fmt" + "regexp" + "strconv" + "strings" +) + +type match struct { + openPos int + closePos int +} + +var pat = regexp.MustCompile(`(\?|:|\|\||&&|==|!=|>=|>|<=|<|%|\d+|n)`) + +type testToken interface { + compile(tokens []string) (test test, err error) +} + +type cmpTestBuilder func(val uint32, flipped bool) test +type logicTestBuild func(left test, right test) test + +var ternaryToken ternaryStruct + +type ternaryStruct struct{} + +func (ternaryStruct) compile(tokens []string) (expr Expression, err error) { + main, err := splitTokens(tokens, "?") + if err != nil { + return expr, err + } + test, err := compileTest(strings.Join(main.Left, "")) + if err != nil { + return expr, err + } + actions, err := splitTokens(main.Right, ":") + if err != nil { + return expr, err + } + trueAction, err := compileExpression(strings.Join(actions.Left, "")) + if err != nil { + return expr, err + } + falseAction, err := compileExpression(strings.Join(actions.Right, "")) + if err != nil { + return expr, nil + } + return ternary{ + test: test, + trueExpr: trueAction, + falseExpr: falseAction, + }, nil +} + +var constToken constValStruct + +type constValStruct struct{} + +func (constValStruct) compile(tokens []string) (expr Expression, err error) { + if len(tokens) == 0 { + return expr, errors.New("got nothing instead of constant") + } + if len(tokens) != 1 { + return expr, fmt.Errorf("invalid constant: %s", strings.Join(tokens, "")) + } + i, err := strconv.Atoi(tokens[0]) + if err != nil { + return expr, err + } + return constValue{value: i}, nil +} + +func compileLogicTest(tokens []string, sep string, builder logicTestBuild) (test test, err error) { + split, err := splitTokens(tokens, sep) + if err != nil { + return test, err + } + left, err := compileTest(strings.Join(split.Left, "")) + if err != nil { + return test, err + } + right, err := compileTest(strings.Join(split.Right, "")) + if err != nil { + return test, err + } + return builder(left, right), nil +} + +var orToken orStruct + +type orStruct struct{} + +func (orStruct) compile(tokens []string) (test test, err error) { + return compileLogicTest(tokens, "||", buildOr) +} +func buildOr(left test, right test) test { + return or{left: left, right: right} +} + +var andToken andStruct + +type andStruct struct{} + +func (andStruct) compile(tokens []string) (test test, err error) { + return compileLogicTest(tokens, "&&", buildAnd) +} +func buildAnd(left test, right test) test { + return and{left: left, right: right} +} + +func compileMod(tokens []string) (math math, err error) { + split, err := splitTokens(tokens, "%") + if err != nil { + return math, err + } + if len(split.Left) != 1 || split.Left[0] != "n" { + return math, errors.New("Modulus operation requires 'n' as left operand") + } + if len(split.Right) != 1 { + return math, errors.New("Modulus operation requires simple integer as right operand") + } + i, err := parseUint32(split.Right[0]) + if err != nil { + return math, err + } + return mod{value: uint32(i)}, nil +} + +func subPipe(modTokens []string, actionTokens []string, builder cmpTestBuilder, flipped bool) (test test, err error) { + modifier, err := compileMod(modTokens) + if err != nil { + return test, err + } + if len(actionTokens) != 1 { + return test, errors.New("can only get modulus of integer") + } + i, err := parseUint32(actionTokens[0]) + if err != nil { + return test, err + } + action := builder(uint32(i), flipped) + return pipe{ + modifier: modifier, + action: action, + }, nil +} + +func compileEquality(tokens []string, sep string, builder cmpTestBuilder) (test test, err error) { + split, err := splitTokens(tokens, sep) + if err != nil { + return test, err + } + if len(split.Left) == 1 && split.Left[0] == "n" { + if len(split.Right) != 1 { + return test, errors.New("test can only compare n to integers") + } + i, err := parseUint32(split.Right[0]) + if err != nil { + return test, err + } + return builder(i, false), nil + } else if len(split.Right) == 1 && split.Right[0] == "n" { + if len(split.Left) != 1 { + return test, errors.New("test can only compare n to integers") + } + i, err := parseUint32(split.Left[0]) + if err != nil { + return test, err + } + return builder(i, true), nil + } else if contains(split.Left, "n") && contains(split.Left, "%") { + return subPipe(split.Left, split.Right, builder, false) + } + return test, errors.New("equality test must have 'n' as one of the two tests") + +} + +var eqToken eqStruct + +type eqStruct struct{} + +func (eqStruct) compile(tokens []string) (test test, err error) { + return compileEquality(tokens, "==", buildEq) +} +func buildEq(val uint32, flipped bool) test { + return equal{value: val} +} + +var neqToken neqStruct + +type neqStruct struct{} + +func (neqStruct) compile(tokens []string) (test test, err error) { + return compileEquality(tokens, "!=", buildNeq) +} +func buildNeq(val uint32, flipped bool) test { + return notequal{value: val} +} + +var gtToken gtStruct + +type gtStruct struct{} + +func (gtStruct) compile(tokens []string) (test test, err error) { + return compileEquality(tokens, ">", buildGt) +} +func buildGt(val uint32, flipped bool) test { + return gt{value: val, flipped: flipped} +} + +var gteToken gteStruct + +type gteStruct struct{} + +func (gteStruct) compile(tokens []string) (test test, err error) { + return compileEquality(tokens, ">=", buildGte) +} +func buildGte(val uint32, flipped bool) test { + return gte{value: val, flipped: flipped} +} + +var ltToken ltStruct + +type ltStruct struct{} + +func (ltStruct) compile(tokens []string) (test test, err error) { + return compileEquality(tokens, "<", buildLt) +} +func buildLt(val uint32, flipped bool) test { + return lt{value: val, flipped: flipped} +} + +var lteToken lteStruct + +type lteStruct struct{} + +func (lteStruct) compile(tokens []string) (test test, err error) { + return compileEquality(tokens, "<=", buildLte) +} +func buildLte(val uint32, flipped bool) test { + return lte{value: val, flipped: flipped} +} + +type testTokenDef struct { + op string + token testToken +} + +var precedence = []testTokenDef{ + {op: "||", token: orToken}, + {op: "&&", token: andToken}, + {op: "==", token: eqToken}, + {op: "!=", token: neqToken}, + {op: ">=", token: gteToken}, + {op: ">", token: gtToken}, + {op: "<=", token: lteToken}, + {op: "<", token: ltToken}, +} + +type splitted struct { + Left []string + Right []string +} + +// Find index of token in list of tokens +func index(tokens []string, sep string) int { + for index, token := range tokens { + if token == sep { + return index + } + } + return -1 +} + +// Split a list of tokens by a token into a splitted struct holding the tokens +// before and after the token to be split by. +func splitTokens(tokens []string, sep string) (s splitted, err error) { + index := index(tokens, sep) + if index == -1 { + return s, fmt.Errorf("'%s' not found in ['%s']", sep, strings.Join(tokens, "','")) + } + return splitted{ + Left: tokens[:index], + Right: tokens[index+1:], + }, nil +} + +// Scan a string for parenthesis +func scan(s string) <-chan match { + ch := make(chan match) + go func() { + depth := 0 + opener := 0 + for index, char := range s { + switch char { + case '(': + if depth == 0 { + opener = index + } + depth++ + case ')': + depth-- + if depth == 0 { + ch <- match{ + openPos: opener, + closePos: index + 1, + } + } + } + + } + close(ch) + }() + return ch +} + +// Split the string into tokens +func split(s string) <-chan string { + ch := make(chan string) + go func() { + s = strings.Replace(s, " ", "", -1) + if !strings.Contains(s, "(") { + ch <- s + } else { + last := 0 + end := len(s) + for info := range scan(s) { + if last != info.openPos { + ch <- s[last:info.openPos] + } + ch <- s[info.openPos:info.closePos] + last = info.closePos + } + if last != end { + ch <- s[last:] + } + } + close(ch) + }() + return ch +} + +// Tokenizes a string into a list of strings, tokens grouped by parenthesis are +// not split! If the string starts with ( and ends in ), those are stripped. +func tokenize(s string) []string { + /* + TODO: Properly detect if the string starts with a ( and ends with a ) + and that those two form a matching pair. + + Eg: (foo) -> true; (foo)(bar) -> false; + */ + if s[0] == '(' && s[len(s)-1] == ')' { + s = s[1 : len(s)-1] + } + ret := []string{} + for chunk := range split(s) { + if len(chunk) != 0 { + if chunk[0] == '(' && chunk[len(chunk)-1] == ')' { + ret = append(ret, chunk) + } else { + for _, token := range pat.FindAllStringSubmatch(chunk, -1) { + ret = append(ret, token[0]) + } + } + } else { + fmt.Printf("Empty chunk in string '%s'\n", s) + } + } + return ret +} + +// Compile a string containing a plural form expression to a Expression object. +func Compile(s string) (expr Expression, err error) { + if s == "0" { + return constValue{value: 0}, nil + } + if !strings.Contains(s, "?") { + s += "?1:0" + } + return compileExpression(s) +} + +// Check if a token is in a slice of strings +func contains(haystack []string, needle string) bool { + for _, s := range haystack { + if s == needle { + return true + } + } + return false +} + +// Compiles an expression (ternary or constant) +func compileExpression(s string) (expr Expression, err error) { + tokens := tokenize(s) + if contains(tokens, "?") { + return ternaryToken.compile(tokens) + } + return constToken.compile(tokens) +} + +// Compiles a test (comparison) +func compileTest(s string) (test test, err error) { + tokens := tokenize(s) + for _, tokenDef := range precedence { + if contains(tokens, tokenDef.op) { + return tokenDef.token.compile(tokens) + } + } + return test, errors.New("cannot compile") +} + +func parseUint32(s string) (ui uint32, err error) { + i, err := strconv.ParseUint(s, 10, 32) + if err != nil { + return ui, err + } + return uint32(i), nil +} diff --git a/vendor/github.com/leonelquinteros/gotext/plurals/expression.go b/vendor/github.com/leonelquinteros/gotext/plurals/expression.go new file mode 100644 index 0000000..3a2add5 --- /dev/null +++ b/vendor/github.com/leonelquinteros/gotext/plurals/expression.go @@ -0,0 +1,43 @@ +/* + * Copyright (c) 2018 DeineAgentur UG https://www.deineagentur.com. All rights reserved. + * Licensed under the MIT License. See LICENSE file in the project root for full license information. + */ + +package plurals + +// Expression is a plurals expression. Eval evaluates the expression for +// a given n value. Use plurals.Compile to generate Expression instances. +type Expression interface { + Eval(n uint32) int +} + +type constValue struct { + value int +} + +func (c constValue) Eval(n uint32) int { + return c.value +} + +type test interface { + test(n uint32) bool +} + +type ternary struct { + test test + trueExpr Expression + falseExpr Expression +} + +func (t ternary) Eval(n uint32) int { + if t.test.test(n) { + if t.trueExpr == nil { + return -1 + } + return t.trueExpr.Eval(n) + } + if t.falseExpr == nil { + return -1 + } + return t.falseExpr.Eval(n) +} diff --git a/vendor/github.com/leonelquinteros/gotext/plurals/math.go b/vendor/github.com/leonelquinteros/gotext/plurals/math.go new file mode 100644 index 0000000..ceaeaaf --- /dev/null +++ b/vendor/github.com/leonelquinteros/gotext/plurals/math.go @@ -0,0 +1,18 @@ +/* + * Copyright (c) 2018 DeineAgentur UG https://www.deineagentur.com. All rights reserved. + * Licensed under the MIT License. See LICENSE file in the project root for full license information. + */ + +package plurals + +type math interface { + calc(n uint32) uint32 +} + +type mod struct { + value uint32 +} + +func (m mod) calc(n uint32) uint32 { + return n % m.value +} diff --git a/vendor/github.com/leonelquinteros/gotext/plurals/tests.go b/vendor/github.com/leonelquinteros/gotext/plurals/tests.go new file mode 100644 index 0000000..b459610 --- /dev/null +++ b/vendor/github.com/leonelquinteros/gotext/plurals/tests.go @@ -0,0 +1,104 @@ +/* + * Copyright (c) 2018 DeineAgentur UG https://www.deineagentur.com. All rights reserved. + * Licensed under the MIT License. See LICENSE file in the project root for full license information. + */ + +package plurals + +type equal struct { + value uint32 +} + +func (e equal) test(n uint32) bool { + return n == e.value +} + +type notequal struct { + value uint32 +} + +func (e notequal) test(n uint32) bool { + return n != e.value +} + +type gt struct { + value uint32 + flipped bool +} + +func (e gt) test(n uint32) bool { + if e.flipped { + return e.value > n + } else { + return n > e.value + } +} + +type lt struct { + value uint32 + flipped bool +} + +func (e lt) test(n uint32) bool { + if e.flipped { + return e.value < n + } + return n < e.value +} + +type gte struct { + value uint32 + flipped bool +} + +func (e gte) test(n uint32) bool { + if e.flipped { + return e.value >= n + } + return n >= e.value +} + +type lte struct { + value uint32 + flipped bool +} + +func (e lte) test(n uint32) bool { + if e.flipped { + return e.value <= n + } + return n <= e.value +} + +type and struct { + left test + right test +} + +func (e and) test(n uint32) bool { + if !e.left.test(n) { + return false + } + return e.right.test(n) +} + +type or struct { + left test + right test +} + +func (e or) test(n uint32) bool { + if e.left.test(n) { + return true + } + return e.right.test(n) +} + +type pipe struct { + modifier math + action test +} + +func (e pipe) test(n uint32) bool { + return e.action.test(e.modifier.calc(n)) +} diff --git a/vendor/github.com/leonelquinteros/gotext/po.go b/vendor/github.com/leonelquinteros/gotext/po.go new file mode 100644 index 0000000..4754924 --- /dev/null +++ b/vendor/github.com/leonelquinteros/gotext/po.go @@ -0,0 +1,499 @@ +/* + * Copyright (c) 2018 DeineAgentur UG https://www.deineagentur.com. All rights reserved. + * Licensed under the MIT License. See LICENSE file in the project root for full license information. + */ + +package gotext + +import ( + "bufio" + "bytes" + "encoding/gob" + "io/ioutil" + "net/textproto" + "os" + "strconv" + "strings" + "sync" + + "github.com/leonelquinteros/gotext/plurals" +) + +/* +Po parses the content of any PO file and provides all the Translation functions needed. +It's the base object used by all package methods. +And it's safe for concurrent use by multiple goroutines by using the sync package for locking. + +Example: + + import ( + "fmt" + "github.com/leonelquinteros/gotext" + ) + + func main() { + // Create po object + po := gotext.NewPoTranslator() + + // Parse .po file + po.ParseFile("/path/to/po/file/translations.po") + + // Get Translation + fmt.Println(po.Get("Translate this")) + } + +*/ +type Po struct { + // Headers storage + Headers textproto.MIMEHeader + + // Language header + Language string + + // Plural-Forms header + PluralForms string + + // Parsed Plural-Forms header values + nplurals int + plural string + pluralforms plurals.Expression + + // Storage + translations map[string]*Translation + contexts map[string]map[string]*Translation + + // Sync Mutex + sync.RWMutex + + // Parsing buffers + trBuffer *Translation + ctxBuffer string +} + +type parseState int + +const ( + head parseState = iota + msgCtxt + msgID + msgIDPlural + msgStr +) + +// NewPoTranslator creates a new Po object with the Translator interface +func NewPoTranslator() Translator { + return new(Po) +} + +// ParseFile tries to read the file by its provided path (f) and parse its content as a .po file. +func (po *Po) ParseFile(f string) { + // Check if file exists + info, err := os.Stat(f) + if err != nil { + return + } + + // Check that isn't a directory + if info.IsDir() { + return + } + + // Parse file content + data, err := ioutil.ReadFile(f) + if err != nil { + return + } + + po.Parse(data) +} + +// Parse loads the translations specified in the provided string (str) +func (po *Po) Parse(buf []byte) { + // Lock while parsing + po.Lock() + + // Init storage + if po.translations == nil { + po.translations = make(map[string]*Translation) + po.contexts = make(map[string]map[string]*Translation) + } + + // Get lines + lines := strings.Split(string(buf), "\n") + + // Init buffer + po.trBuffer = NewTranslation() + po.ctxBuffer = "" + + state := head + for _, l := range lines { + // Trim spaces + l = strings.TrimSpace(l) + + // Skip invalid lines + if !po.isValidLine(l) { + continue + } + + // Buffer context and continue + if strings.HasPrefix(l, "msgctxt") { + po.parseContext(l) + state = msgCtxt + continue + } + + // Buffer msgid and continue + if strings.HasPrefix(l, "msgid") && !strings.HasPrefix(l, "msgid_plural") { + po.parseID(l) + state = msgID + continue + } + + // Check for plural form + if strings.HasPrefix(l, "msgid_plural") { + po.parsePluralID(l) + state = msgIDPlural + continue + } + + // Save Translation + if strings.HasPrefix(l, "msgstr") { + po.parseMessage(l) + state = msgStr + continue + } + + // Multi line strings and headers + if strings.HasPrefix(l, "\"") && strings.HasSuffix(l, "\"") { + po.parseString(l, state) + continue + } + } + + // Save last Translation buffer. + po.saveBuffer() + + // Unlock to parse headers + po.Unlock() + + // Parse headers + po.parseHeaders() +} + +// saveBuffer takes the context and Translation buffers +// and saves it on the translations collection +func (po *Po) saveBuffer() { + // With no context... + if po.ctxBuffer == "" { + po.translations[po.trBuffer.ID] = po.trBuffer + } else { + // With context... + if _, ok := po.contexts[po.ctxBuffer]; !ok { + po.contexts[po.ctxBuffer] = make(map[string]*Translation) + } + po.contexts[po.ctxBuffer][po.trBuffer.ID] = po.trBuffer + + // Cleanup current context buffer if needed + if po.trBuffer.ID != "" { + po.ctxBuffer = "" + } + } + + // Flush Translation buffer + po.trBuffer = NewTranslation() +} + +// parseContext takes a line starting with "msgctxt", +// saves the current Translation buffer and creates a new context. +func (po *Po) parseContext(l string) { + // Save current Translation buffer. + po.saveBuffer() + + // Buffer context + po.ctxBuffer, _ = strconv.Unquote(strings.TrimSpace(strings.TrimPrefix(l, "msgctxt"))) +} + +// parseID takes a line starting with "msgid", +// saves the current Translation and creates a new msgid buffer. +func (po *Po) parseID(l string) { + // Save current Translation buffer. + po.saveBuffer() + + // Set id + po.trBuffer.ID, _ = strconv.Unquote(strings.TrimSpace(strings.TrimPrefix(l, "msgid"))) +} + +// parsePluralID saves the plural id buffer from a line starting with "msgid_plural" +func (po *Po) parsePluralID(l string) { + po.trBuffer.PluralID, _ = strconv.Unquote(strings.TrimSpace(strings.TrimPrefix(l, "msgid_plural"))) +} + +// parseMessage takes a line starting with "msgstr" and saves it into the current buffer. +func (po *Po) parseMessage(l string) { + l = strings.TrimSpace(strings.TrimPrefix(l, "msgstr")) + + // Check for indexed Translation forms + if strings.HasPrefix(l, "[") { + idx := strings.Index(l, "]") + if idx == -1 { + // Skip wrong index formatting + return + } + + // Parse index + i, err := strconv.Atoi(l[1:idx]) + if err != nil { + // Skip wrong index formatting + return + } + + // Parse Translation string + po.trBuffer.Trs[i], _ = strconv.Unquote(strings.TrimSpace(l[idx+1:])) + + // Loop + return + } + + // Save single Translation form under 0 index + po.trBuffer.Trs[0], _ = strconv.Unquote(l) +} + +// parseString takes a well formatted string without prefix +// and creates headers or attach multi-line strings when corresponding +func (po *Po) parseString(l string, state parseState) { + clean, _ := strconv.Unquote(l) + + switch state { + case msgStr: + // Append to last Translation found + po.trBuffer.Trs[len(po.trBuffer.Trs)-1] += clean + + case msgID: + // Multiline msgid - Append to current id + po.trBuffer.ID += clean + + case msgIDPlural: + // Multiline msgid - Append to current id + po.trBuffer.PluralID += clean + + case msgCtxt: + // Multiline context - Append to current context + po.ctxBuffer += clean + + } +} + +// isValidLine checks for line prefixes to detect valid syntax. +func (po *Po) isValidLine(l string) bool { + // Check prefix + valid := []string{ + "\"", + "msgctxt", + "msgid", + "msgid_plural", + "msgstr", + } + + for _, v := range valid { + if strings.HasPrefix(l, v) { + return true + } + } + + return false +} + +// parseHeaders retrieves data from previously parsed headers +func (po *Po) parseHeaders() { + // Make sure we end with 2 carriage returns. + raw := po.Get("") + "\n\n" + + // Read + reader := bufio.NewReader(strings.NewReader(raw)) + tp := textproto.NewReader(reader) + + var err error + + // Sync Headers write. + po.Lock() + defer po.Unlock() + + po.Headers, err = tp.ReadMIMEHeader() + if err != nil { + return + } + + // Get/save needed headers + po.Language = po.Headers.Get("Language") + po.PluralForms = po.Headers.Get("Plural-Forms") + + // Parse Plural-Forms formula + if po.PluralForms == "" { + return + } + + // Split plural form header value + pfs := strings.Split(po.PluralForms, ";") + + // Parse values + for _, i := range pfs { + vs := strings.SplitN(i, "=", 2) + if len(vs) != 2 { + continue + } + + switch strings.TrimSpace(vs[0]) { + case "nplurals": + po.nplurals, _ = strconv.Atoi(vs[1]) + + case "plural": + po.plural = vs[1] + + if expr, err := plurals.Compile(po.plural); err == nil { + po.pluralforms = expr + } + + } + } +} + +// pluralForm calculates the plural form index corresponding to n. +// Returns 0 on error +func (po *Po) pluralForm(n int) int { + po.RLock() + defer po.RUnlock() + + // Failure fallback + if po.pluralforms == nil { + /* Use the Germanic plural rule. */ + if n == 1 { + return 0 + } + return 1 + } + return po.pluralforms.Eval(uint32(n)) +} + +// Get retrieves the corresponding Translation for the given string. +// Supports optional parameters (vars... interface{}) to be inserted on the formatted string using the fmt.Printf syntax. +func (po *Po) Get(str string, vars ...interface{}) string { + // Sync read + po.RLock() + defer po.RUnlock() + + if po.translations != nil { + if _, ok := po.translations[str]; ok { + return Printf(po.translations[str].Get(), vars...) + } + } + + // Return the same we received by default + return Printf(str, vars...) +} + +// GetN retrieves the (N)th plural form of Translation for the given string. +// Supports optional parameters (vars... interface{}) to be inserted on the formatted string using the fmt.Printf syntax. +func (po *Po) GetN(str, plural string, n int, vars ...interface{}) string { + // Sync read + po.RLock() + defer po.RUnlock() + + if po.translations != nil { + if _, ok := po.translations[str]; ok { + return Printf(po.translations[str].GetN(po.pluralForm(n)), vars...) + } + } + + if n == 1 { + return Printf(str, vars...) + } + return Printf(plural, vars...) +} + +// GetC retrieves the corresponding Translation for a given string in the given context. +// Supports optional parameters (vars... interface{}) to be inserted on the formatted string using the fmt.Printf syntax. +func (po *Po) GetC(str, ctx string, vars ...interface{}) string { + // Sync read + po.RLock() + defer po.RUnlock() + + if po.contexts != nil { + if _, ok := po.contexts[ctx]; ok { + if po.contexts[ctx] != nil { + if _, ok := po.contexts[ctx][str]; ok { + return Printf(po.contexts[ctx][str].Get(), vars...) + } + } + } + } + + // Return the string we received by default + return Printf(str, vars...) +} + +// GetNC retrieves the (N)th plural form of Translation for the given string in the given context. +// Supports optional parameters (vars... interface{}) to be inserted on the formatted string using the fmt.Printf syntax. +func (po *Po) GetNC(str, plural string, n int, ctx string, vars ...interface{}) string { + // Sync read + po.RLock() + defer po.RUnlock() + + if po.contexts != nil { + if _, ok := po.contexts[ctx]; ok { + if po.contexts[ctx] != nil { + if _, ok := po.contexts[ctx][str]; ok { + return Printf(po.contexts[ctx][str].GetN(po.pluralForm(n)), vars...) + } + } + } + } + + if n == 1 { + return Printf(str, vars...) + } + return Printf(plural, vars...) +} + +// MarshalBinary implements encoding.BinaryMarshaler interface +func (po *Po) MarshalBinary() ([]byte, error) { + obj := new(TranslatorEncoding) + obj.Headers = po.Headers + obj.Language = po.Language + obj.PluralForms = po.PluralForms + obj.Nplurals = po.nplurals + obj.Plural = po.plural + obj.Translations = po.translations + obj.Contexts = po.contexts + + var buff bytes.Buffer + encoder := gob.NewEncoder(&buff) + err := encoder.Encode(obj) + + return buff.Bytes(), err +} + +// UnmarshalBinary implements encoding.BinaryUnmarshaler interface +func (po *Po) UnmarshalBinary(data []byte) error { + buff := bytes.NewBuffer(data) + obj := new(TranslatorEncoding) + + decoder := gob.NewDecoder(buff) + err := decoder.Decode(obj) + if err != nil { + return err + } + + po.Headers = obj.Headers + po.Language = obj.Language + po.PluralForms = obj.PluralForms + po.nplurals = obj.Nplurals + po.plural = obj.Plural + po.translations = obj.Translations + po.contexts = obj.Contexts + + if expr, err := plurals.Compile(po.plural); err == nil { + po.pluralforms = expr + } + + return nil +} diff --git a/vendor/github.com/leonelquinteros/gotext/translation.go b/vendor/github.com/leonelquinteros/gotext/translation.go new file mode 100644 index 0000000..bc069d4 --- /dev/null +++ b/vendor/github.com/leonelquinteros/gotext/translation.go @@ -0,0 +1,52 @@ +/* + * Copyright (c) 2018 DeineAgentur UG https://www.deineagentur.com. All rights reserved. + * Licensed under the MIT License. See LICENSE file in the project root for full license information. + */ + +package gotext + +// Translation is the struct for the Translations parsed via Po or Mo files and all coming parsers +type Translation struct { + ID string + PluralID string + Trs map[int]string +} + +// NewTranslation returns the Translation object and initialized it. +func NewTranslation() *Translation { + tr := new(Translation) + tr.Trs = make(map[int]string) + + return tr +} + +// Get returns the string of the translation +func (t *Translation) Get() string { + // Look for Translation index 0 + if _, ok := t.Trs[0]; ok { + if t.Trs[0] != "" { + return t.Trs[0] + } + } + + // Return untranslated id by default + return t.ID +} + +// GetN returns the string of the plural translation +func (t *Translation) GetN(n int) string { + // Look for Translation index + if _, ok := t.Trs[n]; ok { + if t.Trs[n] != "" { + return t.Trs[n] + } + } + + // Return untranslated singular if corresponding + if n == 0 { + return t.ID + } + + // Return untranslated plural by default + return t.PluralID +} diff --git a/vendor/github.com/leonelquinteros/gotext/translator.go b/vendor/github.com/leonelquinteros/gotext/translator.go new file mode 100644 index 0000000..982a600 --- /dev/null +++ b/vendor/github.com/leonelquinteros/gotext/translator.go @@ -0,0 +1,59 @@ +/* + * Copyright (c) 2018 DeineAgentur UG https://www.deineagentur.com. All rights reserved. + * Licensed under the MIT License. See LICENSE file in the project root for full license information. + */ + +package gotext + +import "net/textproto" + +// Translator interface is used by Locale and Po objects.Translator +// It contains all methods needed to parse translation sources and obtain corresponding translations. +// Also implements gob.GobEncoder/gob.DobDecoder interfaces to allow serialization of Locale objects. +type Translator interface { + ParseFile(f string) + Parse(buf []byte) + Get(str string, vars ...interface{}) string + GetN(str, plural string, n int, vars ...interface{}) string + GetC(str, ctx string, vars ...interface{}) string + GetNC(str, plural string, n int, ctx string, vars ...interface{}) string + + MarshalBinary() ([]byte, error) + UnmarshalBinary([]byte) error +} + +// TranslatorEncoding is used as intermediary storage to encode Translator objects to Gob. +type TranslatorEncoding struct { + // Headers storage + Headers textproto.MIMEHeader + + // Language header + Language string + + // Plural-Forms header + PluralForms string + + // Parsed Plural-Forms header values + Nplurals int + Plural string + + // Storage + Translations map[string]*Translation + Contexts map[string]map[string]*Translation +} + +// GetTranslator is used to recover a Translator object after unmarshaling the TranslatorEncoding object. +// Internally uses a Po object as it should be switcheable with Mo objects without problem. +// External Translator implementations should be able to serialize into a TranslatorEncoding object in order to unserialize into a Po-compatible object. +func (te *TranslatorEncoding) GetTranslator() Translator { + po := new(Po) + po.Headers = te.Headers + po.Language = te.Language + po.PluralForms = te.PluralForms + po.nplurals = te.Nplurals + po.plural = te.Plural + po.translations = te.Translations + po.contexts = te.Contexts + + return po +} diff --git a/vendor/github.com/lyraproj/data-protobuf/LICENSE b/vendor/github.com/lyraproj/data-protobuf/LICENSE new file mode 100644 index 0000000..261eeb9 --- /dev/null +++ b/vendor/github.com/lyraproj/data-protobuf/LICENSE @@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/vendor/github.com/lyraproj/data-protobuf/datapb/data.pb.go b/vendor/github.com/lyraproj/data-protobuf/datapb/data.pb.go new file mode 100644 index 0000000..324a1ac --- /dev/null +++ b/vendor/github.com/lyraproj/data-protobuf/datapb/data.pb.go @@ -0,0 +1,471 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: datapb/data.proto + +/* +Package datapb is a generated protocol buffer package. + +It is generated from these files: + datapb/data.proto + +It has these top-level messages: + DataHash + DataEntry + Data + DataArray +*/ +package datapb + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// `NullValue` is a singleton enumeration to represent the null value for the +// `Value` type union. +type NullValue int32 + +const ( + // Null value. + NullValue_NULL_VALUE NullValue = 0 +) + +var NullValue_name = map[int32]string{ + 0: "NULL_VALUE", +} +var NullValue_value = map[string]int32{ + "NULL_VALUE": 0, +} + +func (x NullValue) String() string { + return proto.EnumName(NullValue_name, int32(x)) +} +func (NullValue) EnumDescriptor() ([]byte, []int) { return fileDescriptor0, []int{0} } + +// `DataHash` represents an ordered hash of Data values, consisting of fields +// which map to dynamically typed values. In some languages, `DataHash` +// might be supported by a native representation. For example, in scripting +// languages like JS, a data hash is represented as an object. +type DataHash struct { + // Ordered list of associations between strings and dynamically typed values. + Entries []*DataEntry `protobuf:"bytes,1,rep,name=entries" json:"entries,omitempty"` +} + +func (m *DataHash) Reset() { *m = DataHash{} } +func (m *DataHash) String() string { return proto.CompactTextString(m) } +func (*DataHash) ProtoMessage() {} +func (*DataHash) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{0} } + +func (m *DataHash) GetEntries() []*DataEntry { + if m != nil { + return m.Entries + } + return nil +} + +// `DataEntry` represents one association between a key and a value +// in the ordered `DataHash` +type DataEntry struct { + Key *Data `protobuf:"bytes,1,opt,name=key" json:"key,omitempty"` + Value *Data `protobuf:"bytes,2,opt,name=value" json:"value,omitempty"` +} + +func (m *DataEntry) Reset() { *m = DataEntry{} } +func (m *DataEntry) String() string { return proto.CompactTextString(m) } +func (*DataEntry) ProtoMessage() {} +func (*DataEntry) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{1} } + +func (m *DataEntry) GetKey() *Data { + if m != nil { + return m.Key + } + return nil +} + +func (m *DataEntry) GetValue() *Data { + if m != nil { + return m.Value + } + return nil +} + +// `Data` represents a dynamically typed value which can be either +// null, an integer, a float, a string, a boolean, a recursive data hash, or an +// array of data. +type Data struct { + // The kind of value. + // + // Types that are valid to be assigned to Kind: + // *Data_UndefValue + // *Data_IntegerValue + // *Data_FloatValue + // *Data_StringValue + // *Data_BooleanValue + // *Data_HashValue + // *Data_ArrayValue + // *Data_BinaryValue + // *Data_Reference + Kind isData_Kind `protobuf_oneof:"kind"` +} + +func (m *Data) Reset() { *m = Data{} } +func (m *Data) String() string { return proto.CompactTextString(m) } +func (*Data) ProtoMessage() {} +func (*Data) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{2} } + +type isData_Kind interface { + isData_Kind() +} + +type Data_UndefValue struct { + UndefValue NullValue `protobuf:"varint,1,opt,name=undef_value,json=undefValue,enum=puppet.datapb.NullValue,oneof"` +} +type Data_IntegerValue struct { + IntegerValue int64 `protobuf:"varint,2,opt,name=integer_value,json=integerValue,oneof"` +} +type Data_FloatValue struct { + FloatValue float64 `protobuf:"fixed64,3,opt,name=float_value,json=floatValue,oneof"` +} +type Data_StringValue struct { + StringValue string `protobuf:"bytes,4,opt,name=string_value,json=stringValue,oneof"` +} +type Data_BooleanValue struct { + BooleanValue bool `protobuf:"varint,5,opt,name=boolean_value,json=booleanValue,oneof"` +} +type Data_HashValue struct { + HashValue *DataHash `protobuf:"bytes,6,opt,name=hash_value,json=hashValue,oneof"` +} +type Data_ArrayValue struct { + ArrayValue *DataArray `protobuf:"bytes,7,opt,name=array_value,json=arrayValue,oneof"` +} +type Data_BinaryValue struct { + BinaryValue []byte `protobuf:"bytes,8,opt,name=binary_value,json=binaryValue,proto3,oneof"` +} +type Data_Reference struct { + Reference int64 `protobuf:"varint,9,opt,name=reference,oneof"` +} + +func (*Data_UndefValue) isData_Kind() {} +func (*Data_IntegerValue) isData_Kind() {} +func (*Data_FloatValue) isData_Kind() {} +func (*Data_StringValue) isData_Kind() {} +func (*Data_BooleanValue) isData_Kind() {} +func (*Data_HashValue) isData_Kind() {} +func (*Data_ArrayValue) isData_Kind() {} +func (*Data_BinaryValue) isData_Kind() {} +func (*Data_Reference) isData_Kind() {} + +func (m *Data) GetKind() isData_Kind { + if m != nil { + return m.Kind + } + return nil +} + +func (m *Data) GetUndefValue() NullValue { + if x, ok := m.GetKind().(*Data_UndefValue); ok { + return x.UndefValue + } + return NullValue_NULL_VALUE +} + +func (m *Data) GetIntegerValue() int64 { + if x, ok := m.GetKind().(*Data_IntegerValue); ok { + return x.IntegerValue + } + return 0 +} + +func (m *Data) GetFloatValue() float64 { + if x, ok := m.GetKind().(*Data_FloatValue); ok { + return x.FloatValue + } + return 0 +} + +func (m *Data) GetStringValue() string { + if x, ok := m.GetKind().(*Data_StringValue); ok { + return x.StringValue + } + return "" +} + +func (m *Data) GetBooleanValue() bool { + if x, ok := m.GetKind().(*Data_BooleanValue); ok { + return x.BooleanValue + } + return false +} + +func (m *Data) GetHashValue() *DataHash { + if x, ok := m.GetKind().(*Data_HashValue); ok { + return x.HashValue + } + return nil +} + +func (m *Data) GetArrayValue() *DataArray { + if x, ok := m.GetKind().(*Data_ArrayValue); ok { + return x.ArrayValue + } + return nil +} + +func (m *Data) GetBinaryValue() []byte { + if x, ok := m.GetKind().(*Data_BinaryValue); ok { + return x.BinaryValue + } + return nil +} + +func (m *Data) GetReference() int64 { + if x, ok := m.GetKind().(*Data_Reference); ok { + return x.Reference + } + return 0 +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*Data) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _Data_OneofMarshaler, _Data_OneofUnmarshaler, _Data_OneofSizer, []interface{}{ + (*Data_UndefValue)(nil), + (*Data_IntegerValue)(nil), + (*Data_FloatValue)(nil), + (*Data_StringValue)(nil), + (*Data_BooleanValue)(nil), + (*Data_HashValue)(nil), + (*Data_ArrayValue)(nil), + (*Data_BinaryValue)(nil), + (*Data_Reference)(nil), + } +} + +func _Data_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*Data) + // kind + switch x := m.Kind.(type) { + case *Data_UndefValue: + b.EncodeVarint(1<<3 | proto.WireVarint) + b.EncodeVarint(uint64(x.UndefValue)) + case *Data_IntegerValue: + b.EncodeVarint(2<<3 | proto.WireVarint) + b.EncodeVarint(uint64(x.IntegerValue)) + case *Data_FloatValue: + b.EncodeVarint(3<<3 | proto.WireFixed64) + b.EncodeFixed64(math.Float64bits(x.FloatValue)) + case *Data_StringValue: + b.EncodeVarint(4<<3 | proto.WireBytes) + b.EncodeStringBytes(x.StringValue) + case *Data_BooleanValue: + t := uint64(0) + if x.BooleanValue { + t = 1 + } + b.EncodeVarint(5<<3 | proto.WireVarint) + b.EncodeVarint(t) + case *Data_HashValue: + b.EncodeVarint(6<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.HashValue); err != nil { + return err + } + case *Data_ArrayValue: + b.EncodeVarint(7<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.ArrayValue); err != nil { + return err + } + case *Data_BinaryValue: + b.EncodeVarint(8<<3 | proto.WireBytes) + b.EncodeRawBytes(x.BinaryValue) + case *Data_Reference: + b.EncodeVarint(9<<3 | proto.WireVarint) + b.EncodeVarint(uint64(x.Reference)) + case nil: + default: + return fmt.Errorf("Data.Kind has unexpected type %T", x) + } + return nil +} + +func _Data_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*Data) + switch tag { + case 1: // kind.undef_value + if wire != proto.WireVarint { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeVarint() + m.Kind = &Data_UndefValue{NullValue(x)} + return true, err + case 2: // kind.integer_value + if wire != proto.WireVarint { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeVarint() + m.Kind = &Data_IntegerValue{int64(x)} + return true, err + case 3: // kind.float_value + if wire != proto.WireFixed64 { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeFixed64() + m.Kind = &Data_FloatValue{math.Float64frombits(x)} + return true, err + case 4: // kind.string_value + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeStringBytes() + m.Kind = &Data_StringValue{x} + return true, err + case 5: // kind.boolean_value + if wire != proto.WireVarint { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeVarint() + m.Kind = &Data_BooleanValue{x != 0} + return true, err + case 6: // kind.hash_value + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(DataHash) + err := b.DecodeMessage(msg) + m.Kind = &Data_HashValue{msg} + return true, err + case 7: // kind.array_value + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(DataArray) + err := b.DecodeMessage(msg) + m.Kind = &Data_ArrayValue{msg} + return true, err + case 8: // kind.binary_value + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeRawBytes(true) + m.Kind = &Data_BinaryValue{x} + return true, err + case 9: // kind.reference + if wire != proto.WireVarint { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeVarint() + m.Kind = &Data_Reference{int64(x)} + return true, err + default: + return false, nil + } +} + +func _Data_OneofSizer(msg proto.Message) (n int) { + m := msg.(*Data) + // kind + switch x := m.Kind.(type) { + case *Data_UndefValue: + n += proto.SizeVarint(1<<3 | proto.WireVarint) + n += proto.SizeVarint(uint64(x.UndefValue)) + case *Data_IntegerValue: + n += proto.SizeVarint(2<<3 | proto.WireVarint) + n += proto.SizeVarint(uint64(x.IntegerValue)) + case *Data_FloatValue: + n += proto.SizeVarint(3<<3 | proto.WireFixed64) + n += 8 + case *Data_StringValue: + n += proto.SizeVarint(4<<3 | proto.WireBytes) + n += proto.SizeVarint(uint64(len(x.StringValue))) + n += len(x.StringValue) + case *Data_BooleanValue: + n += proto.SizeVarint(5<<3 | proto.WireVarint) + n += 1 + case *Data_HashValue: + s := proto.Size(x.HashValue) + n += proto.SizeVarint(6<<3 | proto.WireBytes) + n += proto.SizeVarint(uint64(s)) + n += s + case *Data_ArrayValue: + s := proto.Size(x.ArrayValue) + n += proto.SizeVarint(7<<3 | proto.WireBytes) + n += proto.SizeVarint(uint64(s)) + n += s + case *Data_BinaryValue: + n += proto.SizeVarint(8<<3 | proto.WireBytes) + n += proto.SizeVarint(uint64(len(x.BinaryValue))) + n += len(x.BinaryValue) + case *Data_Reference: + n += proto.SizeVarint(9<<3 | proto.WireVarint) + n += proto.SizeVarint(uint64(x.Reference)) + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +// `DataArray` is a wrapper around a repeated field of Data values. +type DataArray struct { + // Repeated field of dynamically typed values. + Values []*Data `protobuf:"bytes,1,rep,name=values" json:"values,omitempty"` +} + +func (m *DataArray) Reset() { *m = DataArray{} } +func (m *DataArray) String() string { return proto.CompactTextString(m) } +func (*DataArray) ProtoMessage() {} +func (*DataArray) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{3} } + +func (m *DataArray) GetValues() []*Data { + if m != nil { + return m.Values + } + return nil +} + +func init() { + proto.RegisterType((*DataHash)(nil), "puppet.datapb.DataHash") + proto.RegisterType((*DataEntry)(nil), "puppet.datapb.DataEntry") + proto.RegisterType((*Data)(nil), "puppet.datapb.Data") + proto.RegisterType((*DataArray)(nil), "puppet.datapb.DataArray") + proto.RegisterEnum("puppet.datapb.NullValue", NullValue_name, NullValue_value) +} + +func init() { proto.RegisterFile("datapb/data.proto", fileDescriptor0) } + +var fileDescriptor0 = []byte{ + // 414 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x74, 0x92, 0x5d, 0x8b, 0x9b, 0x40, + 0x14, 0x86, 0x9d, 0x9a, 0x75, 0xe3, 0x31, 0xbb, 0xb4, 0xd3, 0x8b, 0x0a, 0x85, 0x62, 0x2d, 0x0b, + 0x76, 0x4b, 0x0d, 0xa4, 0x37, 0x0b, 0x0b, 0x85, 0x5d, 0xba, 0xe0, 0x45, 0xc8, 0x85, 0x90, 0x5c, + 0x14, 0x4a, 0x18, 0x93, 0x31, 0xda, 0xd8, 0x51, 0xc6, 0xb1, 0xe0, 0x8f, 0xe9, 0x7f, 0x2d, 0xf3, + 0x11, 0x43, 0xa9, 0x7b, 0x25, 0xf3, 0x9e, 0xe7, 0x3d, 0xc7, 0xf3, 0x01, 0xaf, 0xf6, 0x44, 0x90, + 0x26, 0x9b, 0xcb, 0x4f, 0xdc, 0xf0, 0x5a, 0xd4, 0xf8, 0xaa, 0xe9, 0x9a, 0x86, 0x8a, 0x58, 0x47, + 0xc2, 0xaf, 0x30, 0xfd, 0x46, 0x04, 0x49, 0x48, 0x5b, 0xe0, 0x05, 0x5c, 0x52, 0x26, 0x78, 0x49, + 0x5b, 0x1f, 0x05, 0x76, 0xe4, 0x2d, 0xfc, 0xf8, 0x1f, 0x38, 0x96, 0xe4, 0x13, 0x13, 0xbc, 0x4f, + 0x4f, 0x60, 0xf8, 0x03, 0xdc, 0x41, 0xc5, 0x37, 0x60, 0x1f, 0x69, 0xef, 0xa3, 0x00, 0x45, 0xde, + 0xe2, 0xf5, 0x88, 0x39, 0x95, 0x71, 0xfc, 0x11, 0x2e, 0x7e, 0x93, 0xaa, 0xa3, 0xfe, 0x8b, 0xe7, + 0x41, 0x4d, 0x84, 0x7f, 0x6c, 0x98, 0xc8, 0x37, 0xbe, 0x07, 0xaf, 0x63, 0x7b, 0x9a, 0x6f, 0xb5, + 0x53, 0x96, 0xb8, 0xfe, 0xef, 0xff, 0x56, 0x5d, 0x55, 0x6d, 0x64, 0x3c, 0xb1, 0x52, 0x50, 0xb8, + 0x7a, 0xe1, 0x1b, 0xb8, 0x2a, 0x99, 0xa0, 0x07, 0xca, 0xb7, 0xe7, 0xc2, 0x76, 0x62, 0xa5, 0x33, + 0x23, 0x6b, 0xec, 0x3d, 0x78, 0x79, 0x55, 0x13, 0x61, 0x20, 0x3b, 0x40, 0x11, 0x92, 0x99, 0x94, + 0xa8, 0x91, 0x0f, 0x30, 0x6b, 0x05, 0x2f, 0xd9, 0xc1, 0x30, 0x93, 0x00, 0x45, 0x6e, 0x62, 0xa5, + 0x9e, 0x56, 0x87, 0x72, 0x59, 0x5d, 0x57, 0x94, 0x30, 0x43, 0x5d, 0x04, 0x28, 0x9a, 0xca, 0x72, + 0x46, 0xd6, 0xd8, 0x1d, 0x40, 0x41, 0xda, 0xc2, 0x30, 0x8e, 0x9a, 0xc5, 0x9b, 0x91, 0x59, 0xc8, + 0xdd, 0x24, 0x56, 0xea, 0x4a, 0x58, 0x3b, 0xef, 0xc1, 0x23, 0x9c, 0x93, 0xde, 0x58, 0x2f, 0x95, + 0x75, 0x6c, 0x59, 0x0f, 0x92, 0x92, 0x2d, 0x28, 0x7c, 0x68, 0x21, 0x2b, 0x19, 0xe1, 0x27, 0xf7, + 0x34, 0x40, 0xd1, 0x4c, 0xb6, 0xa0, 0x55, 0x0d, 0xbd, 0x03, 0x97, 0xd3, 0x9c, 0x72, 0xca, 0x76, + 0xd4, 0x77, 0xcd, 0xb4, 0xce, 0xd2, 0xa3, 0x03, 0x93, 0x63, 0xc9, 0xf6, 0xe1, 0x9d, 0x5e, 0xbf, + 0xaa, 0x83, 0x3f, 0x81, 0xa3, 0x52, 0x9e, 0xce, 0x67, 0x74, 0xb1, 0x06, 0xb9, 0x7d, 0x0b, 0xee, + 0xb0, 0x2e, 0x7c, 0x0d, 0xb0, 0x5a, 0x2f, 0x97, 0xdb, 0xcd, 0xc3, 0x72, 0xfd, 0xf4, 0xd2, 0x7a, + 0xbc, 0xfd, 0x1e, 0x1d, 0x4a, 0x51, 0x74, 0x59, 0xbc, 0xab, 0x7f, 0xcd, 0xab, 0x9e, 0x93, 0x86, + 0xd7, 0x3f, 0xd5, 0x19, 0x7f, 0x56, 0x67, 0x9c, 0x75, 0xf9, 0x5c, 0x67, 0xcd, 0x1c, 0x25, 0x7c, + 0xf9, 0x1b, 0x00, 0x00, 0xff, 0xff, 0xc2, 0x34, 0xe5, 0x5c, 0xec, 0x02, 0x00, 0x00, +} diff --git a/vendor/github.com/lyraproj/data-protobuf/datapb/reflect.go b/vendor/github.com/lyraproj/data-protobuf/datapb/reflect.go new file mode 100644 index 0000000..9505b03 --- /dev/null +++ b/vendor/github.com/lyraproj/data-protobuf/datapb/reflect.go @@ -0,0 +1,156 @@ +package datapb + +import ( + "fmt" + "reflect" +) + +// ToData converts reflect.Value to a datapb.Data +func ToData(v reflect.Value) (*Data, error) { + if !v.IsValid() { + return &Data{Kind: &Data_UndefValue{}}, nil + } + + switch v.Kind() { + case reflect.Bool: + return &Data{Kind: &Data_BooleanValue{v.Bool()}}, nil + case reflect.Float32, reflect.Float64: + return &Data{Kind: &Data_FloatValue{v.Float()}}, nil + case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64: + return &Data{Kind: &Data_IntegerValue{v.Int()}}, nil + case reflect.String: + return &Data{Kind: &Data_StringValue{v.String()}}, nil + case reflect.Slice, reflect.Array: + cnt := v.Len() + if v.Type().Elem().Kind() == reflect.Uint8 { + // []byte + return &Data{Kind: &Data_BinaryValue{v.Bytes()}}, nil + } + els := make([]*Data, cnt) + for i := 0; i < cnt; i++ { + elem, err := ToData(v.Index(i)) + if err != nil { + return nil, err + } + els[i] = elem + } + return &Data{Kind: &Data_ArrayValue{&DataArray{els}}}, nil + case reflect.Map: + cnt := v.Len() + els := make([]*DataEntry, cnt) + for i, k := range v.MapKeys() { + key, err := ToData(k) + if err != nil { + return nil, err + } + val, err := ToData(v.MapIndex(k)) + if err != nil { + return nil, err + } + els[i] = &DataEntry{key, val} + } + return &Data{Kind: &Data_HashValue{&DataHash{els}}}, nil + case reflect.Struct: + if v.Type().String() == `reflect.Value` { + return ToData(v.Interface().(reflect.Value)) + } + case reflect.Interface: + if v.Type() == interfaceType && v.Interface() == nil { + // The interface{} nil value represents a generic nil + return &Data{Kind: &Data_UndefValue{}}, nil + } + return ToData(v.Elem()) + } + return nil, fmt.Errorf(`unable to convert a value of kind '%s' and type '%s' to Data`, v.Kind(), v.Type().Name()) +} + +var interfaceType = reflect.TypeOf([]interface{}{}).Elem() +var GenericNilValue = reflect.Zero(interfaceType) +var InvalidValue = reflect.ValueOf(nil) + +// FromData converts a datapb.Data to a reflect.Value +func FromData(v *Data) (reflect.Value, error) { + if v.Kind == nil { + return GenericNilValue, nil + } + + switch v.Kind.(type) { + case *Data_BooleanValue: + return reflect.ValueOf(v.GetBooleanValue()), nil + case *Data_FloatValue: + return reflect.ValueOf(v.GetFloatValue()), nil + case *Data_IntegerValue: + return reflect.ValueOf(v.GetIntegerValue()), nil + case *Data_StringValue: + return reflect.ValueOf(v.GetStringValue()), nil + case *Data_UndefValue: + return GenericNilValue, nil + case *Data_BinaryValue: + return reflect.ValueOf(v.GetBinaryValue()), nil + case *Data_ArrayValue: + av := v.GetArrayValue().GetValues() + vals := make([]reflect.Value, len(av)) + var et reflect.Type = nil + for i, elem := range av { + rv, err := FromData(elem) + if err != nil { + return InvalidValue, err + } + rt := rv.Type() + if et == nil { + et = rt + } else if et != rt { + et = nil + } + vals[i] = rv + } + if et == nil { + et = interfaceType + } + return reflect.Append(reflect.MakeSlice(reflect.SliceOf(et), 0, len(vals)), vals...), nil + case *Data_HashValue: + av := v.GetHashValue().Entries + vals := make([]reflect.Value, len(av)) + keys := make([]reflect.Value, len(av)) + var kType reflect.Type = nil + var vType reflect.Type = nil + for i, elem := range av { + rv, err := FromData(elem.Key) + if err != nil { + return InvalidValue, err + } + keys[i] = rv + rt := rv.Type() + if kType == nil { + kType = rt + } else if kType != rt { + kType = nil + } + + rv, err = FromData(elem.Value) + if err != nil { + return InvalidValue, err + } + vals[i] = rv + rt = rv.Type() + if vType == nil { + vType = rt + } else if vType != rt { + vType = nil + } + } + if kType == nil { + kType = interfaceType + } + if vType == nil { + vType = interfaceType + } + hash := reflect.MakeMapWithSize(reflect.MapOf(kType, vType), len(vals)) + for i, k := range keys { + hash.SetMapIndex(k, vals[i]) + } + return hash, nil + default: + return InvalidValue, fmt.Errorf(`unable to convert a value of type '%T' to reflect.Value`, v.Kind) + } +} diff --git a/vendor/github.com/lyraproj/hiera/LICENSE b/vendor/github.com/lyraproj/hiera/LICENSE new file mode 100644 index 0000000..261eeb9 --- /dev/null +++ b/vendor/github.com/lyraproj/hiera/LICENSE @@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/vendor/github.com/lyraproj/hiera/hiera/hiera.go b/vendor/github.com/lyraproj/hiera/hiera/hiera.go new file mode 100644 index 0000000..82cb53f --- /dev/null +++ b/vendor/github.com/lyraproj/hiera/hiera/hiera.go @@ -0,0 +1,46 @@ +package hiera + +import ( + "context" + + "github.com/lyraproj/hiera/hieraapi" + "github.com/lyraproj/hiera/internal" + "github.com/lyraproj/pcore/pcore" + "github.com/lyraproj/pcore/px" +) + +func NewInvocation(c px.Context, scope px.Keyed) hieraapi.Invocation { + return internal.NewInvocation(c, scope) +} + +func Lookup(ic hieraapi.Invocation, name string, dflt px.Value, options map[string]px.Value) px.Value { + return internal.Lookup(ic, name, dflt, options) +} + +// TryWithParent is like px.TryWithParent but enables lookup +func TryWithParent(parent context.Context, tp hieraapi.LookupKey, options map[string]px.Value, consumer func(px.Context) error) error { + return pcore.TryWithParent(parent, func(c px.Context) error { + internal.InitContext(c, tp, options) + return consumer(c) + }) +} + +// DoWithParent is like px.DoWithParent but enables lookup +func DoWithParent(parent context.Context, tp hieraapi.LookupKey, options map[string]px.Value, consumer func(px.Context)) { + pcore.DoWithParent(parent, func(c px.Context) { + internal.InitContext(c, tp, options) + consumer(c) + }) +} + +func Lookup2( + ic hieraapi.Invocation, + names []string, + valueType px.Type, + defaultValue px.Value, + override px.OrderedMap, + defaultValuesHash px.OrderedMap, + options map[string]px.Value, + block px.Lambda) px.Value { + return internal.Lookup2(ic, names, valueType, defaultValue, override, defaultValuesHash, options, block) +} diff --git a/vendor/github.com/lyraproj/hiera/hieraapi/api.go b/vendor/github.com/lyraproj/hiera/hieraapi/api.go new file mode 100644 index 0000000..db1c3fe --- /dev/null +++ b/vendor/github.com/lyraproj/hiera/hieraapi/api.go @@ -0,0 +1,124 @@ +package hieraapi + +import ( + "github.com/lyraproj/issue/issue" + "github.com/lyraproj/pcore/px" +) + +type Kind string + +// HieraRoot is an option key that can be used to change the default root which is the current working directory +const HieraRoot = `Hiera::Root` + +// HieraConfigFileName is an option that can be used to change the default file name 'hiera.yaml' +const HieraConfigFileName = `Hiera::ConfigFileName` + +// HieraConfig is an option that can be used to change absolute path of the hiera config. When specified, the +// HieraRoot and HieraConfigFileName will not have any effect. +const HieraConfig = `Hiera::Config` + +const KindDataDig = Kind(`data_dig`) +const KindDataHash = Kind(`data_hash`) +const KindLookupKey = Kind(`lookup_key`) + +var FunctionKeys = []string{string(KindDataDig), string(KindDataHash), string(KindLookupKey)} + +var LocationKeys = []string{string(LcPath), `paths`, string(LcGlob), `globs`, string(LcUri), `uris`, string(LcMappedPaths)} + +var ReservedOptionKeys = []string{string(LcPath), string(LcUri)} + +type Function interface { + Kind() Kind + Name() string + Resolve(ic Invocation) (Function, bool) +} + +type Entry interface { + Copy(Config) Entry + Options() px.OrderedMap + DataDir() string + Function() Function +} + +type HierarchyEntry interface { + Entry + Name() string + Resolve(ic Invocation, defaults Entry) HierarchyEntry + CreateProvider() DataProvider + Locations() []Location +} + +type Config interface { + // Root returns the directory holding this Config + Root() string + + // Path is the full path to this Config + Path() string + + // Defaults returns the Defaults entry + Defaults() Entry + + // Hierarchy returns the configuration hierarchy slice + Hierarchy() []HierarchyEntry + + // DefaultHierarchy returns the default hierarchy slice + DefaultHierarchy() []HierarchyEntry + + // Resolve resolves this instance into a ResolveHierarchy. Resolving means creating the proper + // DataProviders for all Hierarchy entries + Resolve(ic Invocation) ResolvedConfig +} + +type ResolvedConfig interface { + // Config returns the original Config that the receiver was created from + Config() Config + + // Hierarchy returns the DataProvider slice + Hierarchy() []DataProvider + + // DefaultHierarchy returns the DataProvider slice for the configured default_hierarchy. + // The slice will be empty if no such hierarchy has been defined. + DefaultHierarchy() []DataProvider + + // LookupOptions returns the resolved lookup_options value for the given key or nil + // if no such options exists + LookupOptions(key Key) map[string]px.Value +} + +// An Invocation keeps track of one specific lookup invocation implements a guard against +// endless recursion +type Invocation interface { + px.Context + + Config() ResolvedConfig + + DoWithScope(scope px.Keyed, doer px.Doer) + + // Call doer and while it is executing, don't reveal any found values in logs + DoRedacted(doer px.Doer) + + // Execute the given function 'f' in an explanation context named by 'n' + WithExplanationContext(n string, f func()) + + // Explain will add the message returned by the given function to the + // lookup explainer. The method will only get called when the explanation + // support is enabled + Explain(messageProducer func() string) + + WithKey(key Key, value px.Producer) px.Value + WithDataProvider(dh DataProvider, value px.Producer) px.Value + WithLocation(loc Location, value px.Producer) px.Value + ReportLocationNotFound() + ReportFound(value px.Value) + ReportNotFound() +} + +// NotFound is the error that Hiera will panic with when a value cannot be found and no default +// value has been defined +var NotFound issue.Reported + +type DataDig func(ic ProviderContext, key Key, options map[string]px.Value) px.Value + +type DataHash func(ic ProviderContext, options map[string]px.Value) px.OrderedMap + +type LookupKey func(ic ProviderContext, key string, options map[string]px.Value) px.Value diff --git a/vendor/github.com/lyraproj/hiera/hieraapi/dataprovider.go b/vendor/github.com/lyraproj/hiera/hieraapi/dataprovider.go new file mode 100644 index 0000000..b768c3f --- /dev/null +++ b/vendor/github.com/lyraproj/hiera/hieraapi/dataprovider.go @@ -0,0 +1,8 @@ +package hieraapi + +import "github.com/lyraproj/pcore/px" + +type DataProvider interface { + UncheckedLookup(key Key, invocation Invocation, merge MergeStrategy) px.Value + FullName() string +} diff --git a/vendor/github.com/lyraproj/hiera/hieraapi/issues.go b/vendor/github.com/lyraproj/hiera/hieraapi/issues.go new file mode 100644 index 0000000..ea4381f --- /dev/null +++ b/vendor/github.com/lyraproj/hiera/hieraapi/issues.go @@ -0,0 +1,88 @@ +package hieraapi + +import ( + "fmt" + "strings" + + "github.com/lyraproj/issue/issue" +) + +const ( + DigMismatch = `HIERA_DIG_MISMATCH` + EmptyKeySegment = `HIERA_EMPTY_KEY_SEGMENT` + EndlessRecursion = `HIERA_ENDLESS_RECURSION` + FirstKeySegmentInt = `HIERA_FIRST_KEY_SEGMENT_INT` + HierarchyNameMultiplyDefined = `HIERA_HIERARCHY_NAME_MULTIPLY_DEFINED` + InterpolationAliasNotEntireString = `HIERA_INTERPOLATION_ALIAS_NOT_ENTIRE_STRING` + InterpolationMethodSyntaxNotAllowed = `HIERA_INTERPOLATION_METHOD_SYNTAX_NOT_ALLOWED` + JsonNotHash = `HIERA_JSON_NOT_HASH` + KeyNotFound = `HIERA_KEY_NOT_FOUND` + MissingDataProviderFunction = `HIERA_MISSING_DATA_PROVIDER_FUNCTION` + MissingRequiredOption = `HIERA_MISSING_REQUIRED_OPTION` + MultipleDataProviderFunctions = `HIERA_MULTIPLE_DATA_PROVIDER_FUNCTIONS` + MultipleLocationSpecs = `HIERA_MULTIPLE_LOCATION_SPECS` + NameNotFound = `HIERA_NAME_NOT_FOUND` + NotAnyNameFound = `HIERA_NOT_ANY_NAME_FOUND` + NotInitialized = `HIERA_NOT_INITIALIZED` + OptionReservedByHiera = `HIERA_OPTION_RESERVED_BY_HIERA` + UnterminatedQuote = `HIERA_UNTERMINATED_QUOTE` + UnknownInterpolationMethod = `HIERA_UNKNOWN_INTERPOLATION_METHOD` + UnknownMergeStrategy = `HIERA_UNKNOWN_MERGE_STRATEGY` + YamlNotHash = `HIERA_YAML_NOT_HASH` +) + +func joinNames(v interface{}) string { + if names, ok := v.([]string); ok { + return strings.Join(names, `, `) + } + return fmt.Sprintf("%v", v) +} + +func init() { + issue.Hard(DigMismatch, + `lookup() Got %{type} when a hash-like object was expected to access value using '%{segment}' from key '%{key}'`) + + issue.Hard(EmptyKeySegment, `lookup() key '%{key}' contains an empty segment`) + + issue.Hard2(EndlessRecursion, `Recursive lookup detected in [%{name_stack}]`, issue.HF{`name_stack`: joinNames}) + + issue.Hard(FirstKeySegmentInt, `lookup() key '%{key}' first segment cannot be an index`) + + issue.Hard(HierarchyNameMultiplyDefined, `Hierarchy name '%{name}' defined more than once`) + + issue.Hard(InterpolationAliasNotEntireString, `'alias' interpolation is only permitted if the expression is equal to the entire string`) + + issue.Hard(InterpolationMethodSyntaxNotAllowed, `Interpolation using method syntax is not allowed in this context`) + + issue.Hard(JsonNotHash, `File '%{path}' does not contain a JSON object`) + + issue.Hard(KeyNotFound, `key not found`) + + issue.Hard2(MissingDataProviderFunction, `One of %{keys} must be defined in hierarchy '%{name}'`, + issue.HF{`keys`: joinNames}) + + issue.Hard(MissingRequiredOption, `Missing required provider option '%{option}'`) + + issue.Hard2(MultipleDataProviderFunctions, `Only one of %{keys} can be defined in hierarchy '%{name}'`, + issue.HF{`keys`: joinNames}) + + issue.Hard2(MultipleLocationSpecs, `Only one of %{keys} can be defined in hierarchy '%{name}'`, + issue.HF{`keys`: joinNames}) + + issue.Hard(NameNotFound, `lookup() did not find a value for the name '%{name}'`) + + issue.Hard2(NotAnyNameFound, `lookup() did not find a value for any of the names [%{name_list}]`, + issue.HF{`name_list`: joinNames}) + + issue.Hard(NotInitialized, `Given px.Context is not initialized with Hiera`) + + issue.Hard(OptionReservedByHiera, `Option key '%{key}' used in hierarchy '%{name}' is reserved by Hiera`) + + issue.Hard(UnknownInterpolationMethod, `Unknown interpolation method '%{name}'`) + + issue.Hard(UnknownMergeStrategy, `Unknown merge strategy '%{name}'`) + + issue.Hard(UnterminatedQuote, `Unterminated quote in key '%{key}'`) + + issue.Hard(YamlNotHash, `File '%{path}' does not contain a YAML hash`) +} diff --git a/vendor/github.com/lyraproj/hiera/hieraapi/key.go b/vendor/github.com/lyraproj/hiera/hieraapi/key.go new file mode 100644 index 0000000..be542f8 --- /dev/null +++ b/vendor/github.com/lyraproj/hiera/hieraapi/key.go @@ -0,0 +1,31 @@ +package hieraapi + +import ( + "fmt" + + "github.com/lyraproj/pcore/px" +) + +// A Key is a parsed version of the possibly dot-separated key to lookup. The +// parts of a key will be strings or integers +type Key interface { + fmt.Stringer + + // Return the result of using this key to dig into the given value. Nil is returned + // unless the dig was a success + Dig(px.Value) px.Value + + // Bury is the opposite of Dig. It returns the value that represents what would be found + // using the root of this key. If this key has one part, the value itself is returned, otherwise + // a nested chain of single entry hashes is returned. + Bury(px.Value) px.Value + + // Return the parts of this key. Each part is either a string or an int value + Parts() []interface{} + + // Return the root key, i.e. the first part. + Root() string +} + +// NewKey parses the given string into a Key +var NewKey func(str string) Key diff --git a/vendor/github.com/lyraproj/hiera/hieraapi/location.go b/vendor/github.com/lyraproj/hiera/hieraapi/location.go new file mode 100644 index 0000000..713a21b --- /dev/null +++ b/vendor/github.com/lyraproj/hiera/hieraapi/location.go @@ -0,0 +1,19 @@ +package hieraapi + +import "fmt" + +type LocationKind string + +const LcPath = LocationKind(`path`) +const LcUri = LocationKind(`uri`) +const LcGlob = LocationKind(`glob`) +const LcMappedPaths = LocationKind(`mapped_paths`) + +type Location interface { + fmt.Stringer + Kind() LocationKind + Exist() bool + Resolve(ic Invocation, dataDir string) []Location + Original() string + Resolved() string +} diff --git a/vendor/github.com/lyraproj/hiera/hieraapi/mergestrategy.go b/vendor/github.com/lyraproj/hiera/hieraapi/mergestrategy.go new file mode 100644 index 0000000..e2b6578 --- /dev/null +++ b/vendor/github.com/lyraproj/hiera/hieraapi/mergestrategy.go @@ -0,0 +1,17 @@ +package hieraapi + +import ( + "github.com/lyraproj/pcore/px" +) + +// GetMergeStrategy returns the MergeStrategy that corresponds to the given name. The +// options argument is only applicable to deep merge +var GetMergeStrategy func(name string, options map[string]px.Value) MergeStrategy + +// MergeStrategy is responsible for merging or prioritizing the result of several lookups into one. +type MergeStrategy interface { + // Lookup performs a series of lookups for each variant found in the given variants slice. The actual + // lookup value is returned by the given value function which will be called at least once. The argument to + // the value function will be an element of the variants slice. + Lookup(variants interface{}, invocation Invocation, value func(location interface{}) px.Value) px.Value +} diff --git a/vendor/github.com/lyraproj/hiera/hieraapi/providercontext.go b/vendor/github.com/lyraproj/hiera/hieraapi/providercontext.go new file mode 100644 index 0000000..075e201 --- /dev/null +++ b/vendor/github.com/lyraproj/hiera/hieraapi/providercontext.go @@ -0,0 +1,43 @@ +package hieraapi + +import ( + "github.com/lyraproj/pcore/px" +) + +// A Context provides a local cache and utility functions to a provider function +type ProviderContext interface { + px.PuppetObject + px.CallableObject + + // NotFound should be called by a function to indicate that a specified key + // was not found. This is different from returning an Undef since undef is + // a valid value for a key. + // + // This method will panic with an internal value that is recovered by the + // Lookup logic. There is no return from this method. + NotFound() + + // Explain will add the message returned by the given function to the + // lookup explainer. The method will only get called when the explanation + // support is enabled + Explain(messageProducer func() string) + + // Cache adds the given key - value association to the cache + Cache(key string, value px.Value) px.Value + + // CacheAll adds all key - value associations in the given hash to the cache + CacheAll(hash px.OrderedMap) + + // CachedEntry returns the value for the given key together with + // a boolean to indicate if the value was found or not + CachedValue(key string) (px.Value, bool) + + // CachedEntries calls the consumer with each entry in the cache + CachedEntries(consumer px.BiConsumer) + + // Interpolate resolves interpolations in the given value and returns the result + Interpolate(value px.Value) px.Value + + // Invocation returns the active invocation. + Invocation() Invocation +} diff --git a/vendor/github.com/lyraproj/hiera/internal/config.go b/vendor/github.com/lyraproj/hiera/internal/config.go new file mode 100644 index 0000000..04d95c9 --- /dev/null +++ b/vendor/github.com/lyraproj/hiera/internal/config.go @@ -0,0 +1,369 @@ +package internal + +import ( + "fmt" + "path/filepath" + + "github.com/lyraproj/hiera/hieraapi" + "github.com/lyraproj/issue/issue" + "github.com/lyraproj/pcore/px" + "github.com/lyraproj/pcore/types" + "github.com/lyraproj/pcore/utils" + "github.com/lyraproj/pcore/yaml" + + // Ensure that pcore is initialized + _ "github.com/lyraproj/pcore/pcore" +) + +type function struct { + kind hieraapi.Kind + name string +} + +func (f *function) Kind() hieraapi.Kind { + return f.kind +} + +func (f *function) Name() string { + return f.name +} + +func (f *function) Resolve(ic hieraapi.Invocation) (hieraapi.Function, bool) { + if n, changed := interpolateString(ic, f.name, false); changed { + return &function{f.kind, n.String()}, true + } + return f, false +} + +type entry struct { + cfg *hieraCfg + dataDir string + options px.OrderedMap + function hieraapi.Function +} + +func (e *entry) Options() px.OrderedMap { + return e.options +} + +func (e *entry) DataDir() string { + return e.dataDir +} + +func (e *entry) Function() hieraapi.Function { + return e.function +} + +func (e *entry) initialize(ic hieraapi.Invocation, name string, entryHash *types.Hash) { + entryHash.EachPair(func(k, v px.Value) { + ks := k.String() + if ks == `options` { + e.options = v.(*types.Hash) + e.options.EachKey(func(optKey px.Value) { + if utils.ContainsString(hieraapi.ReservedOptionKeys, optKey.String()) { + panic(px.Error(hieraapi.OptionReservedByHiera, issue.H{`key`: optKey.String(), `name`: name})) + } + }) + } else if utils.ContainsString(hieraapi.FunctionKeys, ks) { + if e.function != nil { + panic(px.Error(hieraapi.MultipleDataProviderFunctions, issue.H{`keys`: hieraapi.FunctionKeys, `name`: name})) + } + e.function = &function{hieraapi.Kind(ks), v.String()} + } + }) +} + +func (e *entry) Copy(cfg hieraapi.Config) hieraapi.Entry { + c := *e + c.cfg = cfg.(*hieraCfg) + return &c +} + +type hieraEntry struct { + entry + name string + locations []hieraapi.Location +} + +func (e *hieraEntry) Copy(cfg hieraapi.Config) hieraapi.Entry { + c := *e + c.cfg = cfg.(*hieraCfg) + return &c +} + +func (e *hieraEntry) Name() string { + return e.name +} + +func (e *hieraEntry) Locations() []hieraapi.Location { + return e.locations +} + +func (e *hieraEntry) CreateProvider() hieraapi.DataProvider { + switch e.function.Kind() { + case hieraapi.KindDataHash: + return newDataHashProvider(e) + case hieraapi.KindDataDig: + return newDataDigProvider(e) + default: + return newLookupKeyProvider(e) + } +} + +func (e *hieraEntry) Resolve(ic hieraapi.Invocation, defaults hieraapi.Entry) hieraapi.HierarchyEntry { + // Resolve interpolated strings and locations + ce := *e + + if ce.function == nil { + ce.function = defaults.Function() + } else if f, fc := ce.function.Resolve(ic); fc { + ce.function = f + } + + if ce.function == nil { + panic(px.Error(hieraapi.MissingDataProviderFunction, issue.H{`keys`: hieraapi.FunctionKeys, `name`: e.name})) + } + + if ce.dataDir == `` { + ce.dataDir = defaults.DataDir() + } else { + if d, dc := interpolateString(ic, ce.dataDir, false); dc { + ce.dataDir = d.String() + } + } + + if ce.options == nil { + ce.options = defaults.Options() + } else if ce.options.Len() > 0 { + if o, oc := doInterpolate(ic, ce.options, false); oc { + ce.options = o.(*types.Hash) + } + } + + dataRoot := filepath.Join(e.cfg.root, ce.dataDir) + if ce.locations != nil { + ne := make([]hieraapi.Location, 0, len(ce.locations)) + for _, l := range ce.locations { + ne = append(ne, l.Resolve(ic, dataRoot)...) + } + ce.locations = ne + } + + return &ce +} + +type hieraCfg struct { + root string + path string + defaults hieraapi.Entry + hierarchy []hieraapi.HierarchyEntry + defaultHierarchy []hieraapi.HierarchyEntry +} + +func NewConfig(ic hieraapi.Invocation, configPath string) hieraapi.Config { + b, ok := types.BinaryFromFile2(configPath) + if !ok { + dc := &hieraCfg{ + root: filepath.Dir(configPath), + path: ``, + defaultHierarchy: []hieraapi.HierarchyEntry{}, + } + dc.defaults = dc.makeDefaultConfig() + dc.hierarchy = dc.makeDefaultHierarchy() + return dc + } + + cfgType := ic.ParseType(`Hiera::Config`) + yv := yaml.Unmarshal(ic, b.Bytes()) + + return createConfig(ic, configPath, px.AssertInstance(func() string { + return fmt.Sprintf(`The Lookup Configuration at '%s'`, configPath) + }, cfgType, yv).(*types.Hash)) +} + +func createConfig(ic hieraapi.Invocation, path string, hash *types.Hash) hieraapi.Config { + cfg := &hieraCfg{root: filepath.Dir(path), path: path} + + if dv, ok := hash.Get4(`defaults`); ok { + cfg.defaults = cfg.createDefaultsEntry(ic, dv.(*types.Hash)) + } else { + cfg.defaults = cfg.makeDefaultConfig() + } + + if hv, ok := hash.Get4(`hierarchy`); ok { + cfg.hierarchy = cfg.createHierarchy(ic, hv.(*types.Array)) + } else { + cfg.hierarchy = cfg.makeDefaultHierarchy() + } + + if hv, ok := hash.Get4(`default_hierarchy`); ok { + cfg.defaultHierarchy = cfg.createHierarchy(ic, hv.(*types.Array)) + } + + return cfg +} + +func (hc *hieraCfg) makeDefaultConfig() *entry { + return &entry{cfg: hc, dataDir: `data`, function: &function{kind: hieraapi.KindDataHash, name: `yaml_data`}} +} + +func (hc *hieraCfg) makeDefaultHierarchy() []hieraapi.HierarchyEntry { + return []hieraapi.HierarchyEntry{ + // The lyra default behavior is to look for a /data.yaml. Hiera root is the current directory. + &hieraEntry{entry: entry{cfg: hc, dataDir: `.`}, name: `Root`, locations: []hieraapi.Location{&path{original: `data.yaml`}}}, + // Hiera proper default behavior is to look for /data/common.yaml + &hieraEntry{entry: entry{cfg: hc}, name: `Common`, locations: []hieraapi.Location{&path{original: `common.yaml`}}}} +} + +func (hc *hieraCfg) Resolve(ic hieraapi.Invocation) (cfg hieraapi.ResolvedConfig) { + r := &resolvedConfig{config: hc} + r.Resolve(ic) + cfg = r + + ms := hieraapi.GetMergeStrategy(`deep`, nil) + k := newKey(`lookup_options`) + v := ms.Lookup(r.Hierarchy(), ic, func(prv interface{}) px.Value { + pr := prv.(hieraapi.DataProvider) + return pr.UncheckedLookup(k, ic, ms) + }) + if lm, ok := v.(px.OrderedMap); ok { + lo := make(map[string]map[string]px.Value, lm.Len()) + lm.EachPair(func(k, v px.Value) { + if km, ok := v.(px.OrderedMap); ok { + ko := make(map[string]px.Value, km.Len()) + lo[k.String()] = ko + km.EachPair(func(k, v px.Value) { + ko[k.String()] = v + }) + } + }) + r.lookupOptions = lo + } + return r +} + +func (hc *hieraCfg) Hierarchy() []hieraapi.HierarchyEntry { + return hc.hierarchy +} + +func (hc *hieraCfg) DefaultHierarchy() []hieraapi.HierarchyEntry { + return hc.defaultHierarchy +} + +func (hc *hieraCfg) Root() string { + return hc.root +} + +func (hc *hieraCfg) Path() string { + return hc.path +} + +func (hc *hieraCfg) Defaults() hieraapi.Entry { + return hc.defaults +} + +func (hc *hieraCfg) CreateProviders(ic hieraapi.Invocation, hierarchy []hieraapi.HierarchyEntry) []hieraapi.DataProvider { + providers := make([]hieraapi.DataProvider, len(hierarchy)) + var defaults hieraapi.Entry + if hdf, ok := hc.defaults.(*hieraEntry); ok { + defaults = hdf.Resolve(ic, nil) + } else { + defaults = hc.defaults.Copy(hc) + } + for i, he := range hierarchy { + providers[i] = he.(*hieraEntry).Resolve(ic, defaults).CreateProvider() + } + return providers +} + +func (hc *hieraCfg) createHierarchy(ic hieraapi.Invocation, hierarchy *types.Array) []hieraapi.HierarchyEntry { + entries := make([]hieraapi.HierarchyEntry, 0, hierarchy.Len()) + uniqueNames := make(map[string]bool, hierarchy.Len()) + hierarchy.Each(func(hv px.Value) { + hh := hv.(*types.Hash) + name := hh.Get5(`name`, px.EmptyString).String() + if uniqueNames[name] { + panic(px.Error(hieraapi.HierarchyNameMultiplyDefined, issue.H{`name`: name})) + } + uniqueNames[name] = true + entries = append(entries, hc.createHierarchyEntry(ic, name, hh)) + }) + return entries +} + +func (hc *hieraCfg) createDefaultsEntry(ic hieraapi.Invocation, entryHash *types.Hash) hieraapi.Entry { + defaults := &entry{cfg: hc} + defaults.initialize(ic, `defaults`, entryHash) + return defaults +} + +func (hc *hieraCfg) createHierarchyEntry(ic hieraapi.Invocation, name string, entryHash *types.Hash) hieraapi.HierarchyEntry { + entry := &hieraEntry{entry: entry{cfg: hc}, name: name} + entry.initialize(ic, name, entryHash) + entryHash.EachPair(func(k, v px.Value) { + ks := k.String() + if ks == `data_dir` { + entry.dataDir = v.String() + } + if utils.ContainsString(hieraapi.LocationKeys, ks) { + if entry.locations != nil { + panic(px.Error(hieraapi.MultipleLocationSpecs, issue.H{`keys`: hieraapi.LocationKeys, `name`: name})) + } + switch ks { + case `path`: + entry.locations = []hieraapi.Location{&path{original: v.String()}} + case `paths`: + a := v.(*types.Array) + entry.locations = make([]hieraapi.Location, 0, a.Len()) + a.Each(func(p px.Value) { entry.locations = append(entry.locations, &path{original: p.String()}) }) + case `glob`: + entry.locations = []hieraapi.Location{&glob{v.String()}} + case `globs`: + a := v.(*types.Array) + entry.locations = make([]hieraapi.Location, 0, a.Len()) + a.Each(func(p px.Value) { entry.locations = append(entry.locations, &glob{p.String()}) }) + case `uri`: + entry.locations = []hieraapi.Location{&uri{original: v.String()}} + case `uris`: + a := v.(*types.Array) + entry.locations = make([]hieraapi.Location, 0, a.Len()) + a.Each(func(p px.Value) { entry.locations = append(entry.locations, &uri{original: p.String()}) }) + default: // Mapped paths + a := v.(*types.Array) + entry.locations = []hieraapi.Location{&mappedPaths{a.At(0).String(), a.At(1).String(), a.At(2).String()}} + } + } + }) + return entry +} + +type resolvedConfig struct { + config *hieraCfg + providers []hieraapi.DataProvider + defaultProviders []hieraapi.DataProvider + lookupOptions map[string]map[string]px.Value +} + +func (r *resolvedConfig) Config() hieraapi.Config { + return r.config +} + +func (r *resolvedConfig) Hierarchy() []hieraapi.DataProvider { + return r.providers +} + +func (r *resolvedConfig) DefaultHierarchy() []hieraapi.DataProvider { + return r.defaultProviders +} + +func (r *resolvedConfig) LookupOptions(key hieraapi.Key) map[string]px.Value { + if r.lookupOptions != nil { + return r.lookupOptions[key.Root()] + } + return nil +} + +func (r *resolvedConfig) Resolve(ic hieraapi.Invocation) { + r.providers = r.config.CreateProviders(ic, r.config.Hierarchy()) + r.defaultProviders = r.config.CreateProviders(ic, r.config.DefaultHierarchy()) +} diff --git a/vendor/github.com/lyraproj/hiera/internal/datadigprovider.go b/vendor/github.com/lyraproj/hiera/internal/datadigprovider.go new file mode 100644 index 0000000..834a4b4 --- /dev/null +++ b/vendor/github.com/lyraproj/hiera/internal/datadigprovider.go @@ -0,0 +1,94 @@ +package internal + +import ( + "fmt" + "sync" + + "github.com/lyraproj/hiera/hieraapi" + "github.com/lyraproj/pcore/px" + "github.com/lyraproj/pcore/types" +) + +type DataDigProvider struct { + function hieraapi.Function + locations []hieraapi.Location + providerFunc hieraapi.DataDig + hashes *sync.Map +} + +func (dh *DataDigProvider) UncheckedLookup(key hieraapi.Key, invocation hieraapi.Invocation, merge hieraapi.MergeStrategy) px.Value { + return invocation.WithDataProvider(dh, func() px.Value { + return merge.Lookup(dh.locations, invocation, func(location interface{}) px.Value { + return dh.invokeWithLocation(invocation, location.(hieraapi.Location), key) + }) + }) +} + +func (dh *DataDigProvider) invokeWithLocation(invocation hieraapi.Invocation, location hieraapi.Location, key hieraapi.Key) px.Value { + var v px.Value + if location == nil { + v = dh.lookupKey(invocation, nil, key) + } else { + v = invocation.WithLocation(location, func() px.Value { + if location.Exist() { + return dh.lookupKey(invocation, location, key) + } + invocation.ReportLocationNotFound() + return nil + }) + } + if v != nil { + v = key.Bury(v) + } + return v +} + +func (dh *DataDigProvider) lookupKey(ic hieraapi.Invocation, location hieraapi.Location, key hieraapi.Key) px.Value { + cacheKey := `` + opts := NoOptions + if location != nil { + cacheKey = location.Resolved() + opts = map[string]px.Value{`path`: types.WrapString(cacheKey)} + } + + cache, _ := dh.hashes.LoadOrStore(cacheKey, &sync.Map{}) + value := dh.providerFunction(ic)(newProviderContext(ic, cache.(*sync.Map)), key, opts) + if value != nil { + ic.ReportFound(value) + } + return value +} + +func (dh *DataDigProvider) providerFunction(ic hieraapi.Invocation) (pf hieraapi.DataDig) { + if dh.providerFunc == nil { + n := dh.function.Name() + // Load lookup provider function using the standard loader + if f, ok := px.Load(ic, px.NewTypedName(px.NsFunction, n)); ok { + dh.providerFunc = func(pc hieraapi.ProviderContext, key hieraapi.Key, options map[string]px.Value) px.Value { + defer catchNotFound() + return f.(px.Function).Call(ic, nil, []px.Value{pc, px.Wrap(ic, key.Parts()), px.Wrap(ic, options)}...) + } + } else { + ic.Explain(func() string { + return fmt.Sprintf(`unresolved function '%s'`, n) + }) + dh.providerFunc = func(pc hieraapi.ProviderContext, key hieraapi.Key, options map[string]px.Value) px.Value { + return nil + } + } + } + return dh.providerFunc +} + +func (dh *DataDigProvider) FullName() string { + return fmt.Sprintf(`data_dig function '%s'`, dh.function.Name()) +} + +func newDataDigProvider(he hieraapi.HierarchyEntry) hieraapi.DataProvider { + ls := he.Locations() + return &DataDigProvider{ + function: he.Function(), + locations: ls, + hashes: &sync.Map{}, + } +} diff --git a/vendor/github.com/lyraproj/hiera/internal/datahashprovider.go b/vendor/github.com/lyraproj/hiera/internal/datahashprovider.go new file mode 100644 index 0000000..3534588 --- /dev/null +++ b/vendor/github.com/lyraproj/hiera/internal/datahashprovider.go @@ -0,0 +1,145 @@ +package internal + +import ( + "fmt" + "sync" + + "github.com/lyraproj/hiera/hieraapi" + "github.com/lyraproj/hiera/provider" + + "github.com/lyraproj/pcore/px" + "github.com/lyraproj/pcore/types" +) + +type DataHashProvider struct { + function hieraapi.Function + locations []hieraapi.Location + providerFunc hieraapi.DataHash + hashes map[string]px.OrderedMap + hashesLock sync.RWMutex +} + +func (dh *DataHashProvider) UncheckedLookup(key hieraapi.Key, invocation hieraapi.Invocation, merge hieraapi.MergeStrategy) px.Value { + return invocation.WithDataProvider(dh, func() px.Value { + return merge.Lookup(dh.locations, invocation, func(location interface{}) px.Value { + return dh.invokeWithLocation(invocation, location.(hieraapi.Location), key.Root()) + }) + }) +} + +func (dh *DataHashProvider) invokeWithLocation(invocation hieraapi.Invocation, location hieraapi.Location, root string) px.Value { + if location == nil { + return dh.lookupKey(invocation, nil, root) + } + return invocation.WithLocation(location, func() px.Value { + if location.Exist() { + return dh.lookupKey(invocation, location, root) + } + invocation.ReportLocationNotFound() + return nil + }) +} + +func (dh *DataHashProvider) lookupKey(invocation hieraapi.Invocation, location hieraapi.Location, root string) px.Value { + if value := dh.dataValue(invocation, location, root); value != nil { + invocation.ReportFound(value) + return value + } + return nil +} + +func (dh *DataHashProvider) dataValue(ic hieraapi.Invocation, location hieraapi.Location, root string) px.Value { + hash := dh.dataHash(ic, location) + value, found := hash.Get4(root) + if !found { + return nil + } + + pfx := func() string { + msg := fmt.Sprintf(`Value for key '%s' in hash returned from %s`, root, dh.FullName()) + if location != nil { + msg = fmt.Sprintf(`%s, when using location '%s'`, msg, location) + } + return msg + } + + value = px.AssertInstance(pfx, types.DefaultRichDataType(), value) + return Interpolate(ic, value, true) +} + +func (dh *DataHashProvider) providerFunction(ic hieraapi.Invocation) (pf hieraapi.DataHash) { + if dh.providerFunc == nil { + n := dh.function.Name() + if n == `yaml_data` { + // Shortcut. No need to go through pcore calling mechanism + dh.providerFunc = provider.YamlData + return dh.providerFunc + } + if n == `json_data` { + // Shortcut. No need to go through pcore calling mechanism + dh.providerFunc = provider.JsonData + return dh.providerFunc + } + + // Load lookup provider function using the standard loader + if f, ok := px.Load(ic, px.NewTypedName(px.NsFunction, dh.function.Name())); ok { + dh.providerFunc = func(pc hieraapi.ProviderContext, options map[string]px.Value) (value px.OrderedMap) { + value = px.EmptyMap + defer catchNotFound() + v := f.(px.Function).Call(ic, nil, []px.Value{pc, px.Wrap(ic, options)}...) + if dv, ok := v.(px.OrderedMap); ok { + value = dv + } + return + } + } else { + ic.Explain(func() string { + return fmt.Sprintf(`unresolved function '%s'`, dh.function.Name()) + }) + dh.providerFunc = func(pc hieraapi.ProviderContext, options map[string]px.Value) px.OrderedMap { + return px.EmptyMap + } + } + } + return dh.providerFunc +} + +func (dh *DataHashProvider) dataHash(ic hieraapi.Invocation, location hieraapi.Location) (hash px.OrderedMap) { + key := `` + opts := NoOptions + if location != nil { + key = location.Resolved() + opts = map[string]px.Value{`path`: types.WrapString(key)} + } + + var ok bool + dh.hashesLock.RLock() + hash, ok = dh.hashes[key] + dh.hashesLock.RUnlock() + if ok { + return + } + + dh.hashesLock.Lock() + defer dh.hashesLock.Unlock() + + if hash, ok = dh.hashes[key]; ok { + return hash + } + hash = dh.providerFunction(ic)(newProviderContext(ic, &sync.Map{}), opts) + dh.hashes[key] = hash + return +} + +func (dh *DataHashProvider) FullName() string { + return fmt.Sprintf(`data_hash function '%s'`, dh.function.Name()) +} + +func newDataHashProvider(he hieraapi.HierarchyEntry) hieraapi.DataProvider { + ls := he.Locations() + return &DataHashProvider{ + function: he.Function(), + locations: ls, + hashes: make(map[string]px.OrderedMap, len(ls)), + } +} diff --git a/vendor/github.com/lyraproj/hiera/internal/deepmerge.go b/vendor/github.com/lyraproj/hiera/internal/deepmerge.go new file mode 100644 index 0000000..8567179 --- /dev/null +++ b/vendor/github.com/lyraproj/hiera/internal/deepmerge.go @@ -0,0 +1,65 @@ +package internal + +import ( + "github.com/lyraproj/pcore/types" + + "github.com/lyraproj/pcore/px" +) + +// DeepMerge will merge the values 'a' and 'b' if both values are hashes or both values are +// arrays. When this is not the case, no merge takes place and the 'a' argument is returned. +// The second bool return value true if a merge took place and false when the first argument +// is returned. +// +// When both values are hashes, DeepMerge is called recursively entries with identical keys. +// When both values are arrays, the merge creates a union of the unique elements from the two arrays. +// No recursive merge takes place for the array elements. +func DeepMerge(a, b px.Value, options map[string]px.Value) (px.Value, bool) { + switch a := a.(type) { + case *types.Hash: + if hb, ok := b.(*types.Hash); ok { + es := make([]*types.HashEntry, 0, a.Len()+hb.Len()) + mergeHappened := false + a.Each(func(ev px.Value) { + e := ev.(*types.HashEntry) + if bv, ok := hb.Get(e.Key()); ok { + if m, mh := DeepMerge(e.Value(), bv, options); mh { + es = append(es, types.WrapHashEntry(e.Key(), m)) + mergeHappened = true + return + } + } + es = append(es, e) + }) + hb.Each(func(ev px.Value) { + e := ev.(*types.HashEntry) + if !a.IncludesKey(e.Key()) { + mergeHappened = true + es = append(es, e) + } + }) + if mergeHappened { + return types.WrapHash(es), true + } + } + + case *types.Array: + if ab, ok := b.(*types.Array); ok && ab.Len() > 0 { + if a.Len() == 0 { + return ab, true + } + es := a.AppendTo(make([]px.Value, 0, a.Len()+ab.Len())) + mergeHappened := false + ab.Each(func(e px.Value) { + if !a.Any(func(v px.Value) bool { return v.Equals(e, nil) }) { + es = append(es, e) + mergeHappened = true + } + }) + if mergeHappened { + return types.WrapValues(es), true + } + } + } + return a, false +} diff --git a/vendor/github.com/lyraproj/hiera/internal/init.go b/vendor/github.com/lyraproj/hiera/internal/init.go new file mode 100644 index 0000000..c4ee7ae --- /dev/null +++ b/vendor/github.com/lyraproj/hiera/internal/init.go @@ -0,0 +1,111 @@ +package internal + +import ( + "github.com/lyraproj/hiera/hieraapi" + "github.com/lyraproj/pcore/types" + + "github.com/lyraproj/issue/issue" + "github.com/lyraproj/pcore/pcore" + "github.com/lyraproj/pcore/px" +) + +var NoOptions = map[string]px.Value{} + +func init() { + px.RegisterResolvableType(px.NewNamedType(`Hiera`, `TypeSet[{ + pcore_version => '1.0.0', + version => '5.0.0', + types => { + Options => Hash[Pattern[/\A[A-Za-z](:?[0-9A-Za-z_-]*[0-9A-Za-z])?\z/], Data], + Defaults => Struct[{ + Optional[options] => Options, + Optional[data_dig] => String[1], + Optional[data_hash] => String[1], + Optional[lookup_key] => String[1], + Optional[data_dir] => String[1], + }], + Entry => Struct[{ + name => String[1], + Optional[options] => Options, + Optional[data_dig] => String[1], + Optional[data_hash] => String[1], + Optional[lookup_key] => String[1], + Optional[data_dir] => String[1], + Optional[path] => String[1], + Optional[paths] => Array[String[1], 1], + Optional[glob] => String[1], + Optional[globs] => Array[String[1], 1], + Optional[uri] => String[1], + Optional[uris] => Array[String[1], 1], + Optional[mapped_paths] => Array[String[1], 3, 3], + }], + Config => Struct[{ + version => Integer[5, 5], + Optional[defaults] => Defaults, + Optional[hierarchy] => Array[Entry], + Optional[default_hierarchy] => Array[Entry] + }] + } + }]`).(px.ResolvableType)) + + pcore.DefineSetting(`hiera_config`, types.DefaultStringType(), nil) + + hieraapi.NotFound = px.Error(hieraapi.KeyNotFound, issue.NoArgs) + + hieraapi.NewKey = newKey +} + +func Lookup(ic hieraapi.Invocation, name string, dflt px.Value, options map[string]px.Value) px.Value { + return Lookup2(ic, []string{name}, types.DefaultAnyType(), dflt, px.EmptyMap, px.EmptyMap, options, nil) +} + +func Lookup2( + ic hieraapi.Invocation, + names []string, + valueType px.Type, + defaultValue px.Value, + override px.OrderedMap, + defaultValuesHash px.OrderedMap, + options map[string]px.Value, + block px.Lambda) px.Value { + if override == nil { + override = px.EmptyMap + } + if defaultValuesHash == nil { + defaultValuesHash = px.EmptyMap + } + + if options == nil { + options = NoOptions + } + + for _, name := range names { + if ov, ok := override.Get4(name); ok { + return ov + } + key := newKey(name) + v := ic.WithKey(key, func() px.Value { + return ic.(*invocation).lookupViaCache(key, options) + }) + if v != nil { + return v + } + } + + if defaultValuesHash.Len() > 0 { + for _, name := range names { + if dv, ok := defaultValuesHash.Get4(name); ok { + return dv + } + } + } + + if defaultValue == nil { + // nil (as opposed to UNDEF) means that no default was provided. + if len(names) == 1 { + panic(px.Error(hieraapi.NameNotFound, issue.H{`name`: names[0]})) + } + panic(px.Error(hieraapi.NotAnyNameFound, issue.H{`name_list`: names})) + } + return defaultValue +} diff --git a/vendor/github.com/lyraproj/hiera/internal/interpolate.go b/vendor/github.com/lyraproj/hiera/internal/interpolate.go new file mode 100644 index 0000000..e596239 --- /dev/null +++ b/vendor/github.com/lyraproj/hiera/internal/interpolate.go @@ -0,0 +1,154 @@ +package internal + +import ( + "regexp" + "strings" + + "github.com/lyraproj/hiera/hieraapi" + "github.com/lyraproj/issue/issue" + "github.com/lyraproj/pcore/px" + "github.com/lyraproj/pcore/types" +) + +var iplPattern = regexp.MustCompile(`%{[^}]*}`) +var emptyInterpolations = map[string]bool{ + ``: true, + `::`: true, + `""`: true, + "''": true, + `"::"`: true, + "'::'": true, +} + +// Interpolate resolves interpolations in the given value and returns the result +func Interpolate(ic hieraapi.Invocation, value px.Value, allowMethods bool) px.Value { + result, _ := doInterpolate(ic, value, allowMethods) + return result +} + +func doInterpolate(ic hieraapi.Invocation, value px.Value, allowMethods bool) (px.Value, bool) { + if s, ok := value.(px.StringValue); ok { + return interpolateString(ic, s.String(), allowMethods) + } + if a, ok := value.(*types.Array); ok { + cp := a.AppendTo(make([]px.Value, 0, a.Len())) + changed := false + for i, e := range cp { + v, c := doInterpolate(ic, e, allowMethods) + if c { + changed = true + cp[i] = v + } + } + if changed { + a = types.WrapValues(cp) + } + return a, changed + } + if h, ok := value.(*types.Hash); ok { + cp := h.AppendEntriesTo(make([]*types.HashEntry, 0, h.Len())) + changed := false + for i, e := range cp { + k, kc := doInterpolate(ic, e.Key(), allowMethods) + v, vc := doInterpolate(ic, e.Value(), allowMethods) + if kc || vc { + changed = true + cp[i] = types.WrapHashEntry(k, v) + } + } + if changed { + h = types.WrapHash(cp) + } + return h, changed + } + return value, false +} + +const scopeMethod = 1 +const aliasMethod = 2 +const lookupMethod = 3 +const literalMethod = 4 + +var methodMatch = regexp.MustCompile(`^(\w+)\((?:["]([^"]+)["]|[']([^']+)['])\)$`) + +func getMethodAndData(expr string, allowMethods bool) (int, string) { + if groups := methodMatch.FindStringSubmatch(expr); groups != nil { + if !allowMethods { + panic(px.Error(hieraapi.InterpolationMethodSyntaxNotAllowed, issue.NoArgs)) + } + data := groups[2] + if data == `` { + data = groups[3] + } + switch groups[1] { + case `alias`: + return aliasMethod, data + case `hiera`, `lookup`: + return lookupMethod, data + case `literal`: + return literalMethod, data + case `scope`: + return scopeMethod, data + default: + panic(px.Error(hieraapi.UnknownInterpolationMethod, issue.H{`name`: groups[1]})) + } + } + return scopeMethod, expr +} + +func interpolateString(ic hieraapi.Invocation, str string, allowMethods bool) (result px.Value, changed bool) { + changed = false + if !strings.Contains(str, `%{`) { + result = types.WrapString(str) + return + } + str = iplPattern.ReplaceAllStringFunc(str, func(match string) string { + expr := strings.TrimSpace(match[2 : len(match)-1]) + if emptyInterpolations[expr] { + return `` + } + var methodKey int + methodKey, expr = getMethodAndData(expr, allowMethods) + if methodKey == aliasMethod && match != str { + panic(px.Error(hieraapi.InterpolationAliasNotEntireString, issue.NoArgs)) + } + + switch methodKey { + case literalMethod: + return expr + case scopeMethod: + key := newKey(expr) + if val, ok := ic.Scope().Get(types.WrapString(key.Root())); ok { + val, _ = doInterpolate(ic, val, allowMethods) + val = key.Dig(val) + if val == nil { + return `` + } + return val.String() + } + return `` + default: + val := Lookup(ic, expr, px.Undef, nil) + if methodKey == aliasMethod { + result = val + return `` + } + return val.String() + } + }) + changed = true + if result == nil { + result = types.WrapString(str) + } + return + +} + +func resolveInScope(ic hieraapi.Invocation, expr string, allowMethods bool) px.Value { + key := newKey(expr) + if val, ok := ic.Scope().Get(types.WrapString(key.Root())); ok { + val, _ = doInterpolate(ic, val, allowMethods) + return key.Dig(val) + } + return nil +} diff --git a/vendor/github.com/lyraproj/hiera/internal/invocation.go b/vendor/github.com/lyraproj/hiera/internal/invocation.go new file mode 100644 index 0000000..969d41a --- /dev/null +++ b/vendor/github.com/lyraproj/hiera/internal/invocation.go @@ -0,0 +1,299 @@ +package internal + +import ( + "os" + "path/filepath" + "sync" + + "github.com/lyraproj/hiera/hieraapi" + + "github.com/lyraproj/hiera/provider" + + "github.com/hashicorp/go-hclog" + "github.com/lyraproj/issue/issue" + "github.com/lyraproj/pcore/px" + "github.com/lyraproj/pcore/types" + "github.com/lyraproj/pcore/utils" +) + +const hieraCacheKey = `Hiera::Cache` +const hieraTopProviderKey = `Hiera::TopProvider` +const hieraGlobalOptionsKey = `Hiera::GlobalOptions` +const hieraTopProviderCacheKey = `Hiera::TopProvider::Cache` + +const hieraConfigsPrefix = `HieraConfig:` +const hieraLockPrefix = `HieraLock:` + +type invocation struct { + px.Context + nameStack []string + configPath string + scope px.Keyed + + expCtx string + provider hieraapi.DataProvider + location hieraapi.Location + redacted bool +} + +// InitContext initializes the given context with the Hiera cache. The context initialized +// with this method determines the life-cycle of that cache. +func InitContext(c px.Context, topProvider hieraapi.LookupKey, options map[string]px.Value) { + c.Set(hieraCacheKey, &sync.Map{}) + if topProvider == nil { + topProvider = provider.ConfigLookupKey + } + c.Set(hieraTopProviderKey, topProvider) + c.Set(hieraTopProviderCacheKey, &sync.Map{}) + + if options == nil { + options = make(map[string]px.Value) + } + c.Set(hieraGlobalOptionsKey, options) + + _, ok := options[hieraapi.HieraConfig] + if !ok { + var hieraRoot string + r, ok := options[hieraapi.HieraRoot] + if ok { + hieraRoot = r.String() + } else { + var err error + if hieraRoot, err = os.Getwd(); err != nil { + panic(err) + } + } + + var fileName string + if r, ok = options[hieraapi.HieraConfigFileName]; ok { + fileName = r.String() + } else { + fileName = `hiera.yaml` + } + options[hieraapi.HieraConfig] = types.WrapString(filepath.Join(hieraRoot, fileName)) + } +} + +func NewInvocation(c px.Context, scope px.Keyed) hieraapi.Invocation { + return &invocation{Context: c, nameStack: []string{}, scope: scope, configPath: globalOptions(c)[hieraapi.HieraConfig].String()} +} + +func (ic *invocation) topProvider() hieraapi.LookupKey { + if v, ok := ic.Get(hieraTopProviderKey); ok { + var tp hieraapi.LookupKey + if tp, ok = v.(hieraapi.LookupKey); ok { + return tp + } + } + panic(px.Error(hieraapi.NotInitialized, issue.NoArgs)) +} + +func (ic *invocation) topProviderCache() *sync.Map { + if v, ok := ic.Get(hieraTopProviderCacheKey); ok { + var tc *sync.Map + if tc, ok = v.(*sync.Map); ok { + return tc + } + } + panic(px.Error(hieraapi.NotInitialized, issue.NoArgs)) +} + +func globalOptions(c px.Context) map[string]px.Value { + if v, ok := c.Get(hieraGlobalOptionsKey); ok { + var g map[string]px.Value + if g, ok = v.(map[string]px.Value); ok { + return g + } + } + panic(px.Error(hieraapi.NotInitialized, issue.NoArgs)) +} + +func (ic *invocation) sharedCache() *sync.Map { + if v, ok := ic.Get(hieraCacheKey); ok { + var sh *sync.Map + if sh, ok = v.(*sync.Map); ok { + return sh + } + } + panic(px.Error(hieraapi.NotInitialized, issue.NoArgs)) +} + +func (ic *invocation) Config() (conf hieraapi.ResolvedConfig) { + sc := ic.sharedCache() + cp := hieraConfigsPrefix + ic.configPath + if val, ok := sc.Load(cp); ok { + conf = val.(hieraapi.ResolvedConfig) + return + } + + lc := hieraLockPrefix + ic.configPath + myLock := sync.RWMutex{} + myLock.Lock() + defer myLock.Unlock() + + if lv, loaded := sc.LoadOrStore(lc, &myLock); loaded { + // Only the one storing thread should proceed and create the configuration. This thread + // awaits the completion of that creation by waiting for the loaded mutex. + lock := lv.(*sync.RWMutex) + lock.RLock() + val, _ := sc.Load(cp) + lock.RUnlock() + conf = val.(hieraapi.ResolvedConfig) + } else { + conf = NewConfig(ic, ic.configPath).Resolve(ic) + sc.Store(cp, conf) + } + return +} + +func (ic *invocation) lookupViaCache(key hieraapi.Key, options map[string]px.Value) px.Value { + rootKey := key.Root() + + sc := ic.sharedCache() + if val, ok := sc.Load(rootKey); ok { + return key.Dig(val.(px.Value)) + } + + globalOptions := globalOptions(ic) + if len(options) == 0 { + options = globalOptions + } else if len(globalOptions) > 0 { + no := make(map[string]px.Value, len(options)+len(globalOptions)) + for k, v := range globalOptions { + no[k] = v + } + for k, v := range options { + no[k] = v + } + options = no + } + v := ic.topProvider()(newProviderContext(ic, ic.topProviderCache()), rootKey, options) + if v != nil { + v := Interpolate(ic, v, true) + sc.Store(rootKey, v) + return key.Dig(v) + } + return nil +} + +func (ic *invocation) WithKey(key hieraapi.Key, actor px.Producer) px.Value { + if utils.ContainsString(ic.nameStack, key.String()) { + panic(px.Error(hieraapi.EndlessRecursion, issue.H{`name_stack`: ic.nameStack})) + } + ic.nameStack = append(ic.nameStack, key.String()) + defer func() { + ic.nameStack = ic.nameStack[:len(ic.nameStack)-1] + }() + return actor() +} + +func (ic *invocation) DoRedacted(doer px.Doer) { + if ic.redacted { + doer() + } else { + defer func() { + ic.redacted = false + }() + ic.redacted = true + doer() + } +} + +func (ic *invocation) DoWithScope(scope px.Keyed, doer px.Doer) { + sc := ic.scope + ic.scope = scope + doer() + ic.scope = sc +} + +func (ic *invocation) Scope() px.Keyed { + return ic.scope +} + +func (ic *invocation) WithDataProvider(p hieraapi.DataProvider, actor px.Producer) px.Value { + saveProv := ic.provider + ic.provider = p + defer func() { + ic.provider = saveProv + }() + return actor() +} + +func (ic *invocation) WithLocation(loc hieraapi.Location, actor px.Producer) px.Value { + saveLoc := ic.location + ic.location = loc + defer func() { + ic.location = saveLoc + }() + return actor() +} + +func (ic *invocation) ReportLocationNotFound() { + lg := hclog.Default() + if lg.IsDebug() { + lg.Debug(`location not found`, ic.debugArgs()...) + } +} + +func (ic *invocation) ReportFound(value px.Value) { + lg := hclog.Default() + if lg.IsDebug() { + var vs string + if ic.redacted { + // Value hasn't been assembled yet so it's not yet converted to a Sensitive + vs = `value redacted` + } else { + vs = value.String() + } + lg.Debug(`value found`, append(ic.debugArgs(), `value`, vs)...) + } +} + +func (ic *invocation) ReportNotFound() { + lg := hclog.Default() + if lg.IsDebug() { + lg.Debug(`key not found`, append(ic.debugArgs())...) + } +} + +func (ic *invocation) NotFound() { + panic(hieraapi.NotFound) +} + +func (ic *invocation) Explain(messageProducer func() string) { + lg := hclog.Default() + if lg.IsDebug() { + lg.Debug(messageProducer()) + } +} + +func (ic *invocation) WithExplanationContext(n string, f func()) { + saveExpCtx := ic.expCtx + defer func() { + ic.expCtx = saveExpCtx + }() + ic.expCtx = n + // TODO: Add explanation support + f() +} + +func (ic *invocation) debugArgs() []interface{} { + args := make([]interface{}, 0, 4) + if len(ic.nameStack) > 0 { + args = append(args, `key`) + args = append(args, ic.nameStack[len(ic.nameStack)-1]) + } + if ic.expCtx != `` { + args = append(args, `context`) + args = append(args, ic.expCtx) + } + if ic.provider != nil { + args = append(args, `provider`) + args = append(args, ic.provider.FullName()) + } + if ic.location != nil { + args = append(args, `location`) + args = append(args, ic.location.String()) + } + return args +} diff --git a/vendor/github.com/lyraproj/hiera/internal/key.go b/vendor/github.com/lyraproj/hiera/internal/key.go new file mode 100644 index 0000000..83de4cb --- /dev/null +++ b/vendor/github.com/lyraproj/hiera/internal/key.go @@ -0,0 +1,119 @@ +package internal + +import ( + "bytes" + "strconv" + + "github.com/lyraproj/hiera/hieraapi" + "github.com/lyraproj/issue/issue" + "github.com/lyraproj/pcore/px" + "github.com/lyraproj/pcore/types" +) + +type key struct { + orig string + parts []interface{} +} + +func newKey(str string) hieraapi.Key { + b := bytes.NewBufferString(``) + return &key{str, parseUnquoted(b, str, str, []interface{}{})} +} + +func (k *key) Dig(v px.Value) px.Value { + var ok bool + var ix int + for i := 1; i < len(k.parts); i++ { + p := k.parts[i] + switch vc := v.(type) { + case *types.Array: + if ix, ok = p.(int); ok { + if ix >= 0 && ix < vc.Len() { + v = vc.At(ix) + continue + } + return nil + } + case *types.Hash: + var kx px.Value + if ix, ok = p.(int); ok { + kx = types.WrapInteger(int64(ix)) + } else { + kx = types.WrapString(p.(string)) + } + if v, ok = vc.Get(kx); ok { + continue + } + return nil + } + panic(px.Error(hieraapi.DigMismatch, issue.H{`type`: px.GenericValueType(v), `segment`: p, `key`: k.orig})) + } + return v +} + +func (k *key) Bury(value px.Value) px.Value { + for i := len(k.parts) - 1; i > 0; i-- { + p := k.parts[i] + var kx px.Value + if ix, ok := p.(int); ok { + kx = types.WrapInteger(int64(ix)) + } else { + kx = types.WrapString(p.(string)) + } + value = types.WrapHash([]*types.HashEntry{types.WrapHashEntry(kx, value)}) + } + return value +} + +func (k *key) Parts() []interface{} { + return k.parts +} + +func (k *key) String() string { + return k.orig +} + +func (k *key) Root() string { + return k.parts[0].(string) +} + +func parseUnquoted(b *bytes.Buffer, key, part string, parts []interface{}) []interface{} { + mungedPart := func(ix int, part string) interface{} { + if i, err := strconv.ParseInt(part, 10, 32); err == nil { + if ix == 0 { + panic(px.Error(hieraapi.FirstKeySegmentInt, issue.H{`key`: key})) + } + return int(i) + } + if part == `` { + panic(px.Error(hieraapi.EmptyKeySegment, issue.H{`key`: key})) + } + return part + } + + for i, c := range part { + switch c { + case '\'', '"': + return parseQuoted(b, c, key, part[i+1:], parts) + case '.': + parts = append(parts, mungedPart(len(parts), b.String())) + b.Reset() + default: + b.WriteRune(c) + } + } + return append(parts, mungedPart(len(parts), b.String())) +} + +func parseQuoted(b *bytes.Buffer, q rune, key, part string, parts []interface{}) []interface{} { + for i, c := range part { + if c == q { + if i == len(part)-1 { + return append(parts, b.String()) + } + return parseUnquoted(b, key, part[i+1:], parts) + } + b.WriteRune(c) + } + panic(px.Error(hieraapi.UnterminatedQuote, issue.H{`key`: key})) +} diff --git a/vendor/github.com/lyraproj/hiera/internal/location.go b/vendor/github.com/lyraproj/hiera/internal/location.go new file mode 100644 index 0000000..4a0bba9 --- /dev/null +++ b/vendor/github.com/lyraproj/hiera/internal/location.go @@ -0,0 +1,180 @@ +package internal + +import ( + "fmt" + "os" + "path/filepath" + + "github.com/bmatcuk/doublestar" + "github.com/lyraproj/hiera/hieraapi" + "github.com/lyraproj/pcore/px" + "github.com/lyraproj/pcore/types" +) + +type path struct { + original string + resolved string + exist bool +} + +func (p *path) Exist() bool { + return p.exist +} + +func (p *path) Kind() hieraapi.LocationKind { + return hieraapi.LcPath +} + +func (p *path) String() string { + return fmt.Sprintf("path{ original:%s, resolved:%s, exist:%v}", p.original, p.resolved, p.exist) +} + +func (p *path) Resolve(ic hieraapi.Invocation, dataDir string) []hieraapi.Location { + r, _ := interpolateString(ic, p.original, false) + rp := filepath.Join(dataDir, r.String()) + _, err := os.Stat(rp) + return []hieraapi.Location{&path{p.original, rp, err == nil}} +} + +func (p *path) Original() string { + return p.original +} + +func (p *path) Resolved() string { + return p.resolved +} + +type glob struct { + pattern string +} + +func (g *glob) Exist() bool { + return false +} + +func (g *glob) Kind() hieraapi.LocationKind { + return hieraapi.LcGlob +} + +func (g *glob) String() string { + return fmt.Sprintf("glob{pattern:%s}", g.pattern) +} + +func (g *glob) Original() string { + return g.pattern +} + +func (g *glob) Resolve(ic hieraapi.Invocation, dataDir string) []hieraapi.Location { + r, _ := interpolateString(ic, g.pattern, false) + rp := filepath.Join(dataDir, r.String()) + matches, _ := doublestar.Glob(rp) + ls := make([]hieraapi.Location, len(matches)) + for i, m := range matches { + ls[i] = &path{g.pattern, m, true} + } + return ls +} + +func (g *glob) Resolved() string { + // This should never happen. + panic(fmt.Errorf(`resolved requested on a glob`)) +} + +type uri struct { + original string + resolved string +} + +func (u *uri) Exist() bool { + return true +} + +func (u *uri) Kind() hieraapi.LocationKind { + return hieraapi.LcUri +} + +func (u *uri) String() string { + return fmt.Sprintf("uri{original:%s, resolved:%s", u.original, u.resolved) +} + +func (u *uri) Original() string { + return u.original +} + +func (u *uri) Resolve(ic hieraapi.Invocation, dataDir string) []hieraapi.Location { + r, _ := interpolateString(ic, u.original, false) + return []hieraapi.Location{&uri{u.original, r.String()}} +} + +func (u *uri) Resolved() string { + return u.resolved +} + +type mappedPaths struct { + // Name of variable that contains an array of strings + sourceVar string + + // Variable name to use when resolving template + key string + + // Template containing interpolation of the key + template string +} + +func (m *mappedPaths) Exist() bool { + return false +} + +func (m *mappedPaths) Kind() hieraapi.LocationKind { + return hieraapi.LcMappedPaths +} + +func (m *mappedPaths) Original() string { + return m.String() +} + +func (m *mappedPaths) String() string { + return fmt.Sprintf("mapped_paths{sourceVar:%s, key:%s, template:%s}", m.sourceVar, m.key, m.template) +} + +type scopeWithVar struct { + s px.Keyed + k px.Value + v px.Value +} + +func (s *scopeWithVar) Get(key px.Value) (px.Value, bool) { + if s.k.Equals(key, nil) { + return s.v, true + } + return s.s.Get(key) +} + +func (m *mappedPaths) Resolve(ic hieraapi.Invocation, dataDir string) []hieraapi.Location { + var mappedVars px.List + v := resolveInScope(ic, m.sourceVar, false) + switch v := v.(type) { + case px.StringValue: + mappedVars = types.SingletonArray(v) + case px.List: + mappedVars = v + default: + return []hieraapi.Location{} + } + paths := make([]hieraapi.Location, mappedVars.Len()) + + mappedVars.EachWithIndex(func(mv px.Value, i int) { + ic.DoWithScope(&scopeWithVar{ic.Scope(), types.WrapString(m.key), mv}, func() { + r, _ := interpolateString(ic, m.template, false) + rp := filepath.Join(dataDir, r.String()) + _, err := os.Stat(rp) + paths[i] = &path{m.template, rp, err == nil} + }) + }) + return paths +} + +func (m *mappedPaths) Resolved() string { + // This should never happen. + panic(fmt.Errorf(`resolved requested on mapped paths`)) +} diff --git a/vendor/github.com/lyraproj/hiera/internal/lookup.go b/vendor/github.com/lyraproj/hiera/internal/lookup.go new file mode 100644 index 0000000..a3b10c5 --- /dev/null +++ b/vendor/github.com/lyraproj/hiera/internal/lookup.go @@ -0,0 +1,136 @@ +package internal + +import ( + "github.com/lyraproj/pcore/px" + "github.com/lyraproj/pcore/types" +) + +func luNames(nameOrNames px.Value) (names []string) { + if ar, ok := nameOrNames.(*types.Array); ok { + names = make([]string, ar.Len()) + ar.EachWithIndex(func(v px.Value, i int) { + names[i] = v.String() + }) + } else { + names = []string{nameOrNames.String()} + } + return +} + +func mergeType(nameOrHash px.Value) (merge map[string]px.Value) { + if hs, ok := nameOrHash.(*types.Hash); ok { + merge = make(map[string]px.Value, hs.Len()) + hs.EachPair(func(k, v px.Value) { merge[k.String()] = v }) + } else if nameOrHash == px.Undef { + merge = NoOptions + } else { + merge = map[string]px.Value{`merge`: nameOrHash} + } + return +} + +func init() { + px.NewGoFunction2(`lookup`, + func(l px.LocalTypes) { + l.Type(`NameType`, `Variant[String, Array[String]]`) + l.Type(`ValueType`, `Type`) + l.Type(`DefaultValueType`, `Any`) + l.Type(`MergeType`, `Variant[String[1], Hash[String, Scalar]]`) + l.Type(`BlockType`, `Callable[NameType]`) + l.Type(`OptionsWithName`, `Struct[{ + name => NameType, + value_type => Optional[ValueType], + default_value => Optional[DefaultValueType], + override => Optional[Hash[String,Any]], + default_values_hash => Optional[Hash[String,Any]], + merge => Optional[MergeType] + }]`) + l.Type(`OptionsWithoutName`, `Struct[{ + value_type => Optional[ValueType], + default_value => Optional[DefaultValueType], + override => Optional[Hash[String,Any]], + default_values_hash => Optional[Hash[String,Any]], + merge => Optional[MergeType] + }]`) + }, + + func(d px.Dispatch) { + d.Param(`NameType`) + d.OptionalParam(`ValueType`) + d.OptionalParam(`MergeType`) + d.Function(func(c px.Context, args []px.Value) px.Value { + vt := px.Type(types.DefaultAnyType()) + var options map[string]px.Value + argc := len(args) + if argc > 1 { + vt = args[1].(px.Type) + if argc > 2 { + options = mergeType(args[2]) + } + } + return Lookup2(NewInvocation(c, c.Scope()), luNames(args[0]), vt, nil, nil, nil, options, nil) + }) + }, + + func(d px.Dispatch) { + d.Param(`NameType`) + d.Param(`Optional[ValueType]`) + d.Param(`Optional[MergeType]`) + d.Param(`DefaultValueType`) + d.Function(func(c px.Context, args []px.Value) px.Value { + vt := px.Type(types.DefaultAnyType()) + if arg := args[1]; arg != px.Undef { + vt = arg.(px.Type) + } + options := mergeType(args[2]) + return Lookup2(NewInvocation(c, c.Scope()), luNames(args[0]), vt, args[3], nil, nil, options, nil) + }) + }, + + func(d px.Dispatch) { + d.Param(`NameType`) + d.OptionalParam(`ValueType`) + d.OptionalParam(`MergeType`) + d.Block(`BlockType`) + d.Function2(func(c px.Context, args []px.Value, block px.Lambda) px.Value { + vt := px.Type(types.DefaultAnyType()) + if arg := args[1]; arg != px.Undef { + vt = arg.(px.Type) + } + options := mergeType(args[2]) + return Lookup2(NewInvocation(c, c.Scope()), luNames(args[0]), vt, nil, nil, nil, options, block) + }) + }, + + func(d px.Dispatch) { + d.Param(`OptionsWithName`) + d.OptionalBlock(`BlockType`) + d.Function2(func(c px.Context, args []px.Value, block px.Lambda) px.Value { + hash := args[0].(*types.Hash) + names := luNames(hash.Get5(`name`, px.Undef)) + dflt := hash.Get5(`default_value`, nil) + vt := hash.Get5(`value_type`, types.DefaultAnyType()).(px.Type) + override := hash.Get5(`override`, px.EmptyMap).(px.OrderedMap) + dfltHash := hash.Get5(`default_values_hash`, px.EmptyMap).(px.OrderedMap) + options := mergeType(hash.Get5(`merge`, px.Undef)) + return Lookup2(NewInvocation(c, c.Scope()), names, vt, dflt, override, dfltHash, options, block) + }) + }, + + func(d px.Dispatch) { + d.Param(`NameType`) + d.Param(`OptionsWithoutName`) + d.OptionalBlock(`BlockType`) + d.Function2(func(c px.Context, args []px.Value, block px.Lambda) px.Value { + names := luNames(args[0]) + hash := args[1].(*types.Hash) + dflt := hash.Get5(`default_value`, nil) + vt := hash.Get5(`value_type`, types.DefaultAnyType()).(px.Type) + override := hash.Get5(`override`, px.EmptyMap).(px.OrderedMap) + dfltHash := hash.Get5(`default_values_hash`, px.EmptyMap).(px.OrderedMap) + options := mergeType(hash.Get5(`merge`, px.Undef)) + return Lookup2(NewInvocation(c, c.Scope()), names, vt, dflt, override, dfltHash, options, block) + }) + }, + ) +} diff --git a/vendor/github.com/lyraproj/hiera/internal/lookupkeyprovider.go b/vendor/github.com/lyraproj/hiera/internal/lookupkeyprovider.go new file mode 100644 index 0000000..b3ab483 --- /dev/null +++ b/vendor/github.com/lyraproj/hiera/internal/lookupkeyprovider.go @@ -0,0 +1,93 @@ +package internal + +import ( + "fmt" + "sync" + + "github.com/lyraproj/hiera/hieraapi" + "github.com/lyraproj/hiera/provider" + + "github.com/lyraproj/pcore/px" + "github.com/lyraproj/pcore/types" +) + +type LookupKeyProvider struct { + function hieraapi.Function + locations []hieraapi.Location + providerFunc hieraapi.LookupKey + hashes *sync.Map +} + +func (dh *LookupKeyProvider) UncheckedLookup(key hieraapi.Key, invocation hieraapi.Invocation, merge hieraapi.MergeStrategy) px.Value { + return invocation.WithDataProvider(dh, func() px.Value { + return merge.Lookup(dh.locations, invocation, func(location interface{}) px.Value { + return dh.invokeWithLocation(invocation, location.(hieraapi.Location), key.Root()) + }) + }) +} + +func (dh *LookupKeyProvider) invokeWithLocation(invocation hieraapi.Invocation, location hieraapi.Location, root string) px.Value { + if location == nil { + return dh.lookupKey(invocation, nil, root) + } + return invocation.WithLocation(location, func() px.Value { + if location.Exist() { + return dh.lookupKey(invocation, location, root) + } + invocation.ReportLocationNotFound() + return nil + }) +} + +func (dh *LookupKeyProvider) lookupKey(ic hieraapi.Invocation, location hieraapi.Location, root string) px.Value { + key := `` + opts := NoOptions + if location != nil { + key = location.Resolved() + opts = map[string]px.Value{`path`: types.WrapString(key)} + } + + cache, _ := dh.hashes.LoadOrStore(key, &sync.Map{}) + value := dh.providerFunction(ic)(newProviderContext(ic, cache.(*sync.Map)), root, opts) + if value != nil { + ic.ReportFound(value) + } + return value +} + +func (dh *LookupKeyProvider) providerFunction(ic hieraapi.Invocation) (pf hieraapi.LookupKey) { + if dh.providerFunc == nil { + n := dh.function.Name() + if n == `environment` { + dh.providerFunc = provider.Environment + } + // Load lookup provider function using the standard loader + if f, ok := px.Load(ic, px.NewTypedName(px.NsFunction, n)); ok { + dh.providerFunc = func(pc hieraapi.ProviderContext, key string, options map[string]px.Value) px.Value { + defer catchNotFound() + return f.(px.Function).Call(ic, nil, []px.Value{pc, types.WrapString(key), px.Wrap(ic, options)}...) + } + } else { + ic.Explain(func() string { + return fmt.Sprintf(`unresolved function '%s'`, n) + }) + dh.providerFunc = func(pc hieraapi.ProviderContext, key string, options map[string]px.Value) px.Value { + return nil + } + } + } + return dh.providerFunc +} + +func (dh *LookupKeyProvider) FullName() string { + return fmt.Sprintf(`lookup_key function '%s'`, dh.function.Name()) +} + +func newLookupKeyProvider(he hieraapi.HierarchyEntry) hieraapi.DataProvider { + ls := he.Locations() + return &LookupKeyProvider{ + function: he.Function(), + locations: ls, + hashes: &sync.Map{}, + } +} diff --git a/vendor/github.com/lyraproj/hiera/internal/mergestrategy.go b/vendor/github.com/lyraproj/hiera/internal/mergestrategy.go new file mode 100644 index 0000000..ed42281 --- /dev/null +++ b/vendor/github.com/lyraproj/hiera/internal/mergestrategy.go @@ -0,0 +1,207 @@ +package internal + +import ( + "reflect" + + "github.com/lyraproj/hiera/hieraapi" + "github.com/lyraproj/pcore/types" + + "github.com/lyraproj/issue/issue" + + "github.com/lyraproj/pcore/px" +) + +func init() { + hieraapi.GetMergeStrategy = getMergeStrategy +} + +func getMergeStrategy(n string, opts map[string]px.Value) hieraapi.MergeStrategy { + switch n { + case `first`: + return &firstFound{} + case `unique`: + return &unique{} + case `hash`: + return &hashMerge{} + case `deep`: + return &deepMerge{opts} + default: + panic(px.Error(hieraapi.UnknownMergeStrategy, issue.H{`name`: n})) + } +} + +type merger interface { + issue.Labeled + hieraapi.MergeStrategy + + merge(a, b px.Value) px.Value + + mergeSingle(v reflect.Value, vf func(l interface{}) px.Value) px.Value + + convertValue(v px.Value) px.Value +} + +type deepMerge struct{ opts map[string]px.Value } + +type hashMerge struct{} + +type firstFound struct{} + +type unique struct{} + +func doLookup(s merger, vs interface{}, ic hieraapi.Invocation, vf func(l interface{}) px.Value) px.Value { + vsr := reflect.ValueOf(vs) + if vsr.Kind() != reflect.Slice { + return nil + } + top := vsr.Len() + switch top { + case 0: + return nil + case 1: + return s.mergeSingle(vsr.Index(0), vf) + default: + var memo px.Value + ic.WithExplanationContext(s.Label(), func() { + for idx := 0; idx < top; idx++ { + v := variantLookup(vsr.Index(idx), vf) + if v != nil { + if memo == nil { + memo = s.convertValue(v) + } else { + memo = s.merge(memo, v) + } + } + } + if memo != nil { + ic.ReportFound(memo) + } + }) + return memo + } +} + +func variantLookup(v reflect.Value, vf func(l interface{}) px.Value) px.Value { + if v.CanInterface() { + return vf(v.Interface()) + } + return nil +} + +func (d *firstFound) Label() string { + return `first found strategy` +} + +func (d *firstFound) Lookup(vs interface{}, ic hieraapi.Invocation, f func(location interface{}) px.Value) px.Value { + vsr := reflect.ValueOf(vs) + if vsr.Kind() != reflect.Slice { + return nil + } + top := vsr.Len() + switch top { + case 0: + return nil + case 1: + return variantLookup(vsr.Index(0), f) + default: + var v px.Value + ic.WithExplanationContext(d.Label(), func() { + for idx := 0; idx < top; idx++ { + v = variantLookup(vsr.Index(idx), f) + if v != nil { + break + } + } + if v != nil { + ic.ReportFound(v) + } + }) + return v + } +} + +func (d *firstFound) mergeSingle(v reflect.Value, vf func(l interface{}) px.Value) px.Value { + return variantLookup(v, vf) +} + +func (d *firstFound) convertValue(v px.Value) px.Value { + return v +} + +func (d *firstFound) merge(a, b px.Value) px.Value { + return a +} + +func (d *unique) Label() string { + return `unique merge strategy` +} + +func (d *unique) Lookup(vs interface{}, ic hieraapi.Invocation, f func(location interface{}) px.Value) px.Value { + return doLookup(d, vs, ic, f) +} + +func (d *unique) mergeSingle(rv reflect.Value, vf func(l interface{}) px.Value) px.Value { + v := variantLookup(rv, vf) + if av, ok := v.(*types.Array); ok { + return av.Flatten().Unique() + } + return v +} + +func (d *unique) convertValue(v px.Value) px.Value { + if av, ok := v.(*types.Array); ok { + return av.Flatten() + } + return types.WrapValues([]px.Value{v}) +} + +func (d *unique) merge(a, b px.Value) px.Value { + return d.convertValue(a).(px.List).AddAll(d.convertValue(b).(px.List)).Unique() +} + +func (d *deepMerge) Label() string { + return `deep merge strategy` +} + +func (d *deepMerge) Lookup(vs interface{}, ic hieraapi.Invocation, f func(location interface{}) px.Value) px.Value { + return doLookup(d, vs, ic, f) +} + +func (d *deepMerge) mergeSingle(v reflect.Value, vf func(l interface{}) px.Value) px.Value { + return variantLookup(v, vf) +} + +func (d *deepMerge) convertValue(v px.Value) px.Value { + return v +} + +func (d *deepMerge) merge(a, b px.Value) px.Value { + v, _ := DeepMerge(a, b, d.opts) + return v +} + +func (d *hashMerge) Label() string { + return `hash merge strategy` +} + +func (d *hashMerge) Lookup(vs interface{}, ic hieraapi.Invocation, f func(location interface{}) px.Value) px.Value { + return doLookup(d, vs, ic, f) +} + +func (d *hashMerge) mergeSingle(v reflect.Value, vf func(l interface{}) px.Value) px.Value { + return variantLookup(v, vf) +} + +func (d *hashMerge) convertValue(v px.Value) px.Value { + return v +} + +func (d *hashMerge) merge(a, b px.Value) px.Value { + if ah, ok := a.(*types.Hash); ok { + var bh *types.Hash + if bh, ok = b.(*types.Hash); ok { + return bh.Merge(ah) + } + } + return a +} diff --git a/vendor/github.com/lyraproj/hiera/internal/providerctx.go b/vendor/github.com/lyraproj/hiera/internal/providerctx.go new file mode 100644 index 0000000..03636e9 --- /dev/null +++ b/vendor/github.com/lyraproj/hiera/internal/providerctx.go @@ -0,0 +1,162 @@ +package internal + +import ( + "io" + "sync" + + "github.com/lyraproj/hiera/hieraapi" + "github.com/lyraproj/pcore/px" + "github.com/lyraproj/pcore/types" +) + +var ContextType px.ObjectType + +func init() { + ContextType = px.NewObjectType(`Hiera::Context`, `{ + attributes => { + environment_name => { + type => String[1], + kind => derived + }, + module_name => { + type => Optional[String[1]], + kind => derived + } + }, + functions => { + not_found => Callable[[0,0], Undef], + explain => Callable[[Callable[0, 0]], Undef], + interpolate => Callable[1, 1], + cache => Callable[[Scalar, Any], Any], + cache_all => Callable[[Hash[Scalar, Any]], Undef], + cache_has_key => Callable[[Scalar], Boolean], + cached_value => Callable[[Scalar], Any], + cached_entries => Variant[ + Callable[[Callable[1,1]], Undef], + Callable[[Callable[2,2]], Undef], + Callable[[0, 0], Iterable[Tuple[Scalar, Any]]]], + cached_file_data => Callable[String,Optional[Callable[1,1]]], + } + }`) +} + +type providerCtx struct { + invocation hieraapi.Invocation + cache *sync.Map +} + +func (c *providerCtx) Interpolate(value px.Value) px.Value { + return Interpolate(c.invocation, value, true) +} + +func newProviderContext(c hieraapi.Invocation, cache *sync.Map) hieraapi.ProviderContext { + // TODO: Cache should be specific to a provider identity determined by the providers position in + // the configured hierarchy + return &providerCtx{invocation: c, cache: cache} +} + +func (c *providerCtx) Call(ctx px.Context, method px.ObjFunc, args []px.Value, block px.Lambda) (result px.Value, ok bool) { + result = px.Undef + ok = true + switch method.Name() { + case `cache`: + result = c.Cache(args[0].String(), args[1]) + case `cache_all`: + c.CacheAll(args[0].(px.OrderedMap)) + case `cached_value`: + if v, ok := c.CachedValue(args[0].String()); ok { + result = v + } + case `cached_entries`: + c.CachedEntries(func(k, v px.Value) { block.Call(ctx, nil, k, v) }) + case `explain`: + c.Explain(func() string { return block.Call(ctx, nil).String() }) + case `not_found`: + c.NotFound() + default: + result = nil + ok = false + } + return result, ok +} + +func (c *providerCtx) String() string { + return px.ToString(c) +} + +func (c *providerCtx) Equals(other interface{}, guard px.Guard) bool { + return c == other +} + +func (c *providerCtx) ToString(b io.Writer, s px.FormatContext, g px.RDetect) { + types.ObjectToString(c, s, b, g) +} + +func (c *providerCtx) PType() px.Type { + return ContextType +} + +func (c *providerCtx) Get(key string) (value px.Value, ok bool) { + switch key { + case `environment_name`, `module_name`: + return px.Undef, true + } + return nil, false +} + +func (c *providerCtx) InitHash() px.OrderedMap { + return px.EmptyMap +} + +func (c *providerCtx) NotFound() { + panic(hieraapi.NotFound) +} + +func (c *providerCtx) Explain(messageProducer func() string) { + c.invocation.Explain(messageProducer) +} + +func (c *providerCtx) Cache(key string, value px.Value) px.Value { + old, loaded := c.cache.LoadOrStore(key, value) + if loaded { + // Replace old value + c.cache.Store(key, value) + } else { + old = px.Undef + } + return old.(px.Value) +} + +func (c *providerCtx) CacheAll(hash px.OrderedMap) { + hash.EachPair(func(k, v px.Value) { + c.cache.Store(k.String(), v) + }) +} + +func (c *providerCtx) CachedValue(key string) (px.Value, bool) { + if v, ok := c.cache.Load(key); ok { + return v.(px.Value), true + } + return nil, false +} + +func (c *providerCtx) CachedEntries(consumer px.BiConsumer) { + ic := c.invocation + c.cache.Range(func(k, v interface{}) bool { + consumer(px.Wrap(ic, k), px.Wrap(ic, v)) + return true + }) +} + +func (c *providerCtx) Invocation() hieraapi.Invocation { + return c.invocation +} + +func catchNotFound() { + if r := recover(); r != nil { + // lookup.NotFound is ok. It just means that there was no lookup_options + if r != hieraapi.NotFound { + panic(r) + } + } +} diff --git a/vendor/github.com/lyraproj/hiera/provider/configlookupkey.go b/vendor/github.com/lyraproj/hiera/provider/configlookupkey.go new file mode 100644 index 0000000..8f9de7b --- /dev/null +++ b/vendor/github.com/lyraproj/hiera/provider/configlookupkey.go @@ -0,0 +1,94 @@ +package provider + +import ( + "github.com/lyraproj/hiera/hieraapi" + "github.com/lyraproj/pcore/px" + "github.com/lyraproj/pcore/types" +) + +var first = types.WrapString(`first`) + +// ConfigLookupKey performs a lookup based on a hierarchy of providers that has been specified +// in a yaml based configuration stored on disk. +func ConfigLookupKey(pc hieraapi.ProviderContext, key string, options map[string]px.Value) px.Value { + ic := pc.Invocation() + cfg := ic.Config() + k := hieraapi.NewKey(key) + lo := cfg.LookupOptions(k) + merge, ok := options[`merge`] + if !ok { + if lo == nil { + merge = first + } else { + merge, ok = lo[`merge`] + if !ok { + merge = first + } + } + } + + var mh px.OrderedMap + var mergeOpts map[string]px.Value + if mh, ok = merge.(px.OrderedMap); ok { + merge = mh.Get5(`strategy`, first) + mergeOpts = make(map[string]px.Value, mh.Len()) + mh.EachPair(func(k, v px.Value) { + ks := k.String() + if ks != `strategy` { + mergeOpts[ks] = v + } + }) + } + + redacted := false + + var convertToType px.Type + var convertToArgs []px.Value + if lo != nil { + ts := `` + if ct, ok := lo[`convert_to`]; ok { + if cm, ok := ct.(*types.Array); ok { + // First arg must be a type. The rest is arguments + switch cm.Len() { + case 0: + // Obviously bogus + case 1: + ts = cm.At(0).String() + default: + ts = cm.At(0).String() + convertToArgs = cm.Slice(1, cm.Len()).AppendTo(make([]px.Value, 0, cm.Len()-1)) + } + } else { + ts = ct.String() + } + } + if ts != `` { + convertToType = ic.ParseType(ts) + redacted = ts == `Sensitive` + } + } + + var v px.Value + hf := func() { + ms := hieraapi.GetMergeStrategy(merge.String(), mergeOpts) + v = ms.Lookup(cfg.Hierarchy(), ic, func(prv interface{}) px.Value { + pr := prv.(hieraapi.DataProvider) + return pr.UncheckedLookup(k, ic, ms) + }) + } + + if redacted { + ic.DoRedacted(hf) + } else { + hf() + } + + if v != nil && convertToType != nil { + av := []px.Value{v} + if convertToArgs != nil { + av = append(av, convertToArgs...) + } + v = px.New(ic, convertToType, av...) + } + return v +} diff --git a/vendor/github.com/lyraproj/hiera/provider/environment.go b/vendor/github.com/lyraproj/hiera/provider/environment.go new file mode 100644 index 0000000..a43c2ef --- /dev/null +++ b/vendor/github.com/lyraproj/hiera/provider/environment.go @@ -0,0 +1,34 @@ +package provider + +import ( + "os" + "strings" + + "github.com/lyraproj/hiera/hieraapi" + "github.com/lyraproj/pcore/px" + "github.com/lyraproj/pcore/types" +) + +// Environment is a LookupKey function that performs a lookup in the current environment. The key can either be just +// "env" in which case all current environment variables will be returned as an OrderedMap, or +// prefixed with "env::" in which case the rest of the key is interpreted as the environment variable +// to look for. +func Environment(_ hieraapi.ProviderContext, key string, _ map[string]px.Value) px.Value { + if key == `env` { + env := os.Environ() + em := make([]*types.HashEntry, len(env)) + for _, ev := range env { + if ei := strings.IndexRune(ev, '='); ei > 0 { + em = append(em, types.WrapHashEntry2(ev[:ei], types.WrapString(ev[ei+1:]))) + } + } + return types.WrapHash(em) + } + if strings.HasSuffix(key, `env::`) { + // Rest of key is name of environment + if v, ok := os.LookupEnv(key[5:]); ok { + return types.WrapString(v) + } + } + return nil +} diff --git a/vendor/github.com/lyraproj/hiera/provider/jsondata.go b/vendor/github.com/lyraproj/hiera/provider/jsondata.go new file mode 100644 index 0000000..5b64cd2 --- /dev/null +++ b/vendor/github.com/lyraproj/hiera/provider/jsondata.go @@ -0,0 +1,32 @@ +package provider + +import ( + "bytes" + + "github.com/lyraproj/hiera/hieraapi" + "github.com/lyraproj/pcore/serialization" + + "github.com/lyraproj/issue/issue" + "github.com/lyraproj/pcore/px" + "github.com/lyraproj/pcore/types" +) + +func JsonData(_ hieraapi.ProviderContext, options map[string]px.Value) px.OrderedMap { + pv, ok := options[`path`] + if !ok { + panic(px.Error(hieraapi.MissingRequiredOption, issue.H{`option`: `path`})) + } + path := pv.String() + var bin *types.Binary + if bin, ok = types.BinaryFromFile2(path); ok { + rdr := bytes.NewBuffer(bin.Bytes()) + vc := px.NewCollector() + serialization.JsonToData(path, rdr, vc) + v := vc.Value() + if data, ok := v.(px.OrderedMap); ok { + return data + } + panic(px.Error(hieraapi.JsonNotHash, issue.H{`path`: path})) + } + return px.EmptyMap +} diff --git a/vendor/github.com/lyraproj/hiera/provider/muxlookupkey.go b/vendor/github.com/lyraproj/hiera/provider/muxlookupkey.go new file mode 100644 index 0000000..a01d22f --- /dev/null +++ b/vendor/github.com/lyraproj/hiera/provider/muxlookupkey.go @@ -0,0 +1,33 @@ +package provider + +import ( + "github.com/lyraproj/hiera/hieraapi" + "github.com/lyraproj/pcore/px" + "github.com/lyraproj/pcore/types" +) + +const LookupProvidersKey = `hiera::lookup::providers` + +// MuxLookup performs a lookup using all LookupKey function slice registered under the LookupProviderKey key +// in the given options map. The lookups are performed in the order the functions appear in the +// slice. The first found value is returned. +// +// The intended use for this function is when a very simplistic way of configuring Hiera is desired that +// requires no configuration files. +func MuxLookupKey(c hieraapi.ProviderContext, key string, options map[string]px.Value) px.Value { + if pv, ok := options[LookupProvidersKey]; ok { + var rpv *types.RuntimeValue + if rpv, ok = pv.(*types.RuntimeValue); ok { + var pvs []hieraapi.LookupKey + if pvs, ok = rpv.Interface().([]hieraapi.LookupKey); ok { + for _, lk := range pvs { + var result px.Value + if result = lk(c, key, options); result != nil { + return result + } + } + } + } + } + return nil +} diff --git a/vendor/github.com/lyraproj/hiera/provider/yamldata.go b/vendor/github.com/lyraproj/hiera/provider/yamldata.go new file mode 100644 index 0000000..2b28002 --- /dev/null +++ b/vendor/github.com/lyraproj/hiera/provider/yamldata.go @@ -0,0 +1,26 @@ +package provider + +import ( + "github.com/lyraproj/hiera/hieraapi" + "github.com/lyraproj/issue/issue" + "github.com/lyraproj/pcore/px" + "github.com/lyraproj/pcore/types" + "github.com/lyraproj/pcore/yaml" +) + +func YamlData(ctx hieraapi.ProviderContext, options map[string]px.Value) px.OrderedMap { + pv, ok := options[`path`] + if !ok { + panic(px.Error(hieraapi.MissingRequiredOption, issue.H{`option`: `path`})) + } + path := pv.String() + var bin *types.Binary + if bin, ok = types.BinaryFromFile2(path); ok { + v := yaml.Unmarshal(ctx.Invocation(), bin.Bytes()) + if data, ok := v.(px.OrderedMap); ok { + return data + } + panic(px.Error(hieraapi.YamlNotHash, issue.H{`path`: path})) + } + return px.EmptyMap +} diff --git a/vendor/github.com/lyraproj/hiera/provider/yamllookupkey.go b/vendor/github.com/lyraproj/hiera/provider/yamllookupkey.go new file mode 100644 index 0000000..fefbefb --- /dev/null +++ b/vendor/github.com/lyraproj/hiera/provider/yamllookupkey.go @@ -0,0 +1,25 @@ +package provider + +import ( + "github.com/lyraproj/hiera/hieraapi" + "github.com/lyraproj/pcore/px" +) + +var YamlDataKey = `yaml::data` + +// YamlLookupKey is a LookupKey function that uses the YamlData DataHash function to find the data and caches the result. +// It is mainly intended for testing purposes but can also be used as a complete replacement of a Configured hiera +// setup. +func YamlLookupKey(c hieraapi.ProviderContext, key string, options map[string]px.Value) px.Value { + data, ok := c.CachedValue(YamlDataKey) + if !ok { + data = YamlData(c, options) + c.Cache(YamlDataKey, data) + } + hash, _ := data.(px.OrderedMap) + v, ok := hash.Get4(key) + if !ok { + v = nil + } + return v +} diff --git a/vendor/github.com/lyraproj/issue/LICENSE b/vendor/github.com/lyraproj/issue/LICENSE new file mode 100644 index 0000000..261eeb9 --- /dev/null +++ b/vendor/github.com/lyraproj/issue/LICENSE @@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/vendor/github.com/lyraproj/issue/issue/conversions.go b/vendor/github.com/lyraproj/issue/issue/conversions.go new file mode 100644 index 0000000..f54fff9 --- /dev/null +++ b/vendor/github.com/lyraproj/issue/issue/conversions.go @@ -0,0 +1,227 @@ +package issue + +import ( + "bytes" + "fmt" + "math" + "unicode" +) + +// AnOrA returns the non capitalized article for the label of the given argument +func AnOrA(e interface{}) string { + label := Label(e) + return fmt.Sprintf(`%s %s`, Article(label), label) +} + +// UcAnOrA returns the capitalized article for the label of the given argument +func UcAnOrA(e interface{}) string { + label := Label(e) + return fmt.Sprintf(`%s %s`, ArticleUc(label), label) +} + +// CamelToSnakeCase converts a camel cased name like "NameIsBob" to +// its corresponding snake cased "name_is_bob" +func CamelToSnakeCase(name string) string { + b := bytes.NewBufferString(``) + leadIn := true + mlUpper := false + var p rune = -1 + for _, c := range name { + if leadIn && c == '_' { + b.WriteByte('_') + continue + } + r := c + if unicode.IsUpper(r) { + mlUpper = unicode.IsUpper(p) + if !(leadIn || p == '_' || mlUpper) { + b.WriteByte('_') + } + r = unicode.ToLower(r) + } else if mlUpper { + mlUpper = false + if !(leadIn || r == '_') { + b.WriteByte('_') + } + } + b.WriteRune(r) + p = c + leadIn = false + } + return b.String() +} + +// FirstToLower ensures that the first character in a name like "NameIsBob" is lowercase. Leading +//// underscore characters are left as is. +func FirstToLower(name string) string { + b := bytes.NewBufferString(``) + + firstChar := true + for _, c := range name { + if c == '_' { + // Don't alter firstChar status + b.WriteRune(c) + continue + } + if firstChar { + c = unicode.ToLower(c) + firstChar = false + } + b.WriteRune(c) + } + return b.String() +} + +// SnakeToCamelCase converts a snake cased name like "name_is_bob" to +// its corresponding camel cased "NameIsBob" +func SnakeToCamelCase(name string) string { + return snakeToCamelCase(name, true) +} + +// SnakeToCamelCaseDC converts a snake cased name like "name_is_bob" to +// its corresponding camel cased de-capitalized name "nameIsBob". Leading +// underscore characters are left as is. +func SnakeToCamelCaseDC(name string) string { + return snakeToCamelCase(name, false) +} + +func snakeToCamelCase(name string, nextUpper bool) string { + b := bytes.NewBufferString(``) + + nonUnderscorePrefix := true + for _, c := range name { + if c == '_' { + if nonUnderscorePrefix { + b.WriteRune(c) + } else { + nextUpper = true + } + continue + } + if nextUpper { + c = unicode.ToUpper(c) + nextUpper = false + } + b.WriteRune(c) + nonUnderscorePrefix = false + } + return b.String() +} + +// Article returns the non capitalized article for the given string +func Article(s string) string { + if s == `` { + return `a` + } + switch s[0] { + case 'A', 'E', 'I', 'O', 'U', 'Y', 'a', 'e', 'i', 'o', 'u', 'y': + return `an` + default: + return `a` + } +} + +// ArticleUc returns the capitalized article for the given string +func ArticleUc(s string) string { + if s == `` { + return `A` + } + switch s[0] { + case 'A', 'E', 'I', 'O', 'U', 'Y', 'a', 'e', 'i', 'o', 'u', 'y': + return `An` + default: + return `A` + } +} + +// JoinErrors joins a set of errors into a string using newline separators. The argument +// can be a Result, a Reported, an error, a string, or a slice of Reported, error, or string, +func JoinErrors(e interface{}) string { + b := bytes.NewBufferString(``) + switch e := e.(type) { + case Result: + for _, err := range e.Issues() { + b.WriteString("\n") + err.ErrorTo(b) + } + case []Reported: + for _, err := range e { + b.WriteString("\n") + err.ErrorTo(b) + } + case []error: + for _, err := range e { + b.WriteString("\n") + b.WriteString(err.Error()) + } + case []string: + for _, err := range e { + b.WriteString("\n") + b.WriteString(err) + } + case Reported: + e.ErrorTo(b) + case error: + b.WriteString(e.Error()) + case string: + b.WriteString(e) + } + return b.String() +} + +// Unindent determines the maximum indent that can be stripped by looking at leading whitespace on all lines. Lines that +// consists entirely of whitespace are not included in the computation. +// Strips first line if it's empty, then strips the computed indent from all lines and returns the result. +// +func Unindent(str string) string { + minIndent := computeIndent(str) + if minIndent == 0 { + return str + } + r := bytes.NewBufferString(str) + b := bytes.NewBufferString("") + first := true + for { + line, err := r.ReadString('\n') + if first { + first = false + if line == "\n" { + continue + } + } + if len(line) > minIndent { + b.WriteString(line[minIndent:]) + } else if err == nil { + b.WriteByte('\n') + } else { + break + } + } + return b.String() +} + +func computeIndent(str string) int { + minIndent := math.MaxInt64 + r := bytes.NewBufferString(str) + for minIndent > 0 { + line, err := r.ReadString('\n') + ll := len(line) + + for wsCount := 0; wsCount < minIndent && wsCount < ll; wsCount++ { + c := line[wsCount] + if c != ' ' && c != '\t' { + if c != '\n' { + minIndent = wsCount + } + break + } + } + if err != nil { + break + } + } + if minIndent == math.MaxInt64 { + minIndent = 0 + } + return minIndent +} diff --git a/vendor/github.com/lyraproj/issue/issue/issue.go b/vendor/github.com/lyraproj/issue/issue/issue.go new file mode 100644 index 0000000..d04baa9 --- /dev/null +++ b/vendor/github.com/lyraproj/issue/issue/issue.go @@ -0,0 +1,146 @@ +package issue + +import ( + "bytes" + "fmt" +) + +var NoArgs = H{} + +// A Code is a unique string representation of an issue. It should be all uppercase +// and words must be separated by underscores, not spaces. +// Since all issues live in the same global namespace, it's recommended that the +// code is prefixed with a package name. +// +// Example: +// +// const EVAL_UNKNOWN_FUNCTION = `EVAL_UNKNOWN_FUNCTION` +// +type Code string + +// An ArgFormatter function, provided to the Issue constructors via the HF map, is +// responsible for formatting a named argument in the format string before the final +// formatting takes place. +// +// Typical formatters are AnOrA or UcAnOrA. Both will prefix the named argument +// with an article. The difference between the two is that UcAnOrA uses a capitalized +// article. +type ArgFormatter func(value interface{}) string + +// A HF is used for passing ArgFormatters to an Issue constructor +type HF map[string]ArgFormatter + +// An Issue is a formal description of a warning or an error. +type Issue interface { + // ArgFormatters returns the argument formatters or nil if no such + // formatters exists + ArgFormatters() HF + + // Code returns the issue code + Code() Code + + // Format uses the receivers format string and the given arguments to + // write a string onto the given buffer + Format(b *bytes.Buffer, arguments H) + + // IsDemotable returns false for soft issues and true for hard issues + IsDemotable() bool + + // MessageFormat returns the format used when formatting the receiver + MessageFormat() string +} + +type issue struct { + code Code + messageFormat string + argFormats HF + demotable bool +} + +var issues = map[Code]*issue{} + +// Hard creates a non-demotable Issue with the given code and messageFormat +func Hard(code Code, messageFormat string) Issue { + return addIssue(code, messageFormat, false, nil) +} + +// Hard2 creates a non-demotable Issue with the given code, messageFormat, and +// argFormatters map. +func Hard2(code Code, messageFormat string, argFormatters HF) Issue { + return addIssue(code, messageFormat, false, argFormatters) +} + +// Hard creates a demotable Issue with the given code and messageFormat +func Soft(code Code, messageFormat string) Issue { + return addIssue(code, messageFormat, true, nil) +} + +// Soft2 creates a demotable Issue with the given code, messageFormat, and +// argFormatters map. +func Soft2(code Code, messageFormat string, argFormats HF) Issue { + return addIssue(code, messageFormat, true, argFormats) +} + +func (issue *issue) ArgFormatters() HF { + return issue.argFormats +} + +func (issue *issue) Code() Code { + return issue.code +} + +func (issue *issue) IsDemotable() bool { + return issue.demotable +} + +func (issue *issue) MessageFormat() string { + return issue.messageFormat +} + +func (issue *issue) Format(b *bytes.Buffer, arguments H) { + var args H + af := issue.ArgFormatters() + if af != nil { + args = make(H, len(arguments)) + for k, v := range arguments { + if a, ok := af[k]; ok { + v = a(v) + } + args[k] = v + } + } else { + args = arguments + } + _, _ = MapFprintf(b, issue.MessageFormat(), args) +} + +// Returns the Issue for a Code. Will panic if the given code does not represent +// an existing issue +func ForCode(code Code) Issue { + if dsc, ok := issues[code]; ok { + return dsc + } + panic(fmt.Sprintf("internal error: no issue found for issue code '%s'", code)) +} + +// Returns the Issue for a Code together with a bool indicating if the issue was +// found or not +func ForCode2(code Code) (dsc Issue, ok bool) { + dsc, ok = issues[code] + return +} + +func addIssue(code Code, messageFormat string, demotable bool, argFormats HF) Issue { + dsc := &issue{code, messageFormat, argFormats, demotable} + issues[code] = dsc + return dsc +} + +func withIssues(example func()) { + savedIssues := issues + defer func() { + issues = savedIssues + }() + issues = map[Code]*issue{} + example() +} diff --git a/vendor/github.com/lyraproj/issue/issue/label.go b/vendor/github.com/lyraproj/issue/issue/label.go new file mode 100644 index 0000000..0fdc416 --- /dev/null +++ b/vendor/github.com/lyraproj/issue/issue/label.go @@ -0,0 +1,32 @@ +package issue + +import ( + "fmt" +) + +// A Labeled is an object that has a label in the form of a string +type Labeled interface { + // Returns a very brief description of this expression suitable to use in error messages + Label() string +} + +// A Named is an object that has a name in the form of a string +type Named interface { + Name() string +} + +// Label returns the Label for a Labeled argument, the Name for a Named argument, a string +// verbatim, or the resulting text from doing a Sprintf with "value of type %T" for other +// types of arguments. +func Label(e interface{}) string { + if l, ok := e.(Labeled); ok { + return l.Label() + } + if n, ok := e.(Named); ok { + return n.Name() + } + if s, ok := e.(string); ok { + return s + } + return fmt.Sprintf(`value of type %T`, e) +} diff --git a/vendor/github.com/lyraproj/issue/issue/location.go b/vendor/github.com/lyraproj/issue/issue/location.go new file mode 100644 index 0000000..adb793a --- /dev/null +++ b/vendor/github.com/lyraproj/issue/issue/location.go @@ -0,0 +1,77 @@ +package issue + +import ( + "bytes" + "strconv" +) + +type Location interface { + File() string + + Line() int + + // Position on line + Pos() int +} + +type Located interface { + Location() Location +} + +type location struct { + file string + line int + pos int +} + +func NewLocation(file string, line, pos int) Location { + return &location{file, line, pos} +} + +func (l *location) File() string { + return l.file +} + +func (l *location) Line() int { + return l.line +} + +func (l *location) Pos() int { + return l.pos +} + +func LocationString(location Location) string { + b := bytes.NewBufferString(``) + appendLocation(b, location) + return b.String() +} + +func appendLocation(b *bytes.Buffer, location Location) { + if location == nil { + return + } + file := location.File() + line := location.Line() + if file == `` && line <= 0 { + return + } + + pos := location.Pos() + b.WriteByte('(') + if file != `` { + b.WriteString(`file: `) + b.WriteString(file) + if line > 0 { + b.WriteString(`, `) + } + } + if line > 0 { + b.WriteString(`line: `) + b.WriteString(strconv.Itoa(line)) + if pos > 0 { + b.WriteString(`, column: `) + b.WriteString(strconv.Itoa(pos)) + } + } + b.WriteByte(')') +} diff --git a/vendor/github.com/lyraproj/issue/issue/printf.go b/vendor/github.com/lyraproj/issue/issue/printf.go new file mode 100644 index 0000000..6d0d7a8 --- /dev/null +++ b/vendor/github.com/lyraproj/issue/issue/printf.go @@ -0,0 +1,119 @@ +package issue + +import ( + "bytes" + "fmt" + "io" + "unicode/utf8" +) + +type H map[string]interface{} + +type stringReader struct { + i int + text string +} + +func (r *stringReader) next() rune { + if r.i >= len(r.text) { + return 0 + } + c := rune(r.text[r.i]) + if c < utf8.RuneSelf { + r.i++ + return c + } + c, size := utf8.DecodeRuneInString(r.text[r.i:]) + if c == utf8.RuneError { + panic(`Invalid unicode in string`) + } + r.i += size + return c +} + +// MapSprintf calls MapFprintf with a string Buffer and returns string that is output to that buffer +func MapSprintf(formatString string, args H) string { + b := bytes.NewBufferString(``) + _, err := MapFprintf(b, formatString, args) + if err != nil { + panic(err) + } + return b.String() +} + +// MapFprintf is like fmt.Fprintf but it allows named arguments and it assumes a map as the arguments +// that follow the format string. +// +// The notation %{name} maps to the 'name' key of the map and uses the default format (%v) +// The notation %2.2s maps to the 'name' key of the map and uses the %2.2s format. +func MapFprintf(writer io.Writer, formatString string, args H) (int, error) { + posFormatString, argCount, expectedArgs := extractNamesAndLocations(formatString) + posArgs := make([]interface{}, argCount) + for k, v := range expectedArgs { + var a interface{} + if arg, ok := args[k]; ok { + a = arg + } else { + a = fmt.Sprintf(`%%!{%s}(MISSING)`, k) + } + for _, pos := range v { + posArgs[pos] = a + } + } + return fmt.Fprintf(writer, posFormatString, posArgs...) +} + +func extractNamesAndLocations(formatString string) (string, int, map[string][]int) { + b := bytes.NewBufferString(``) + rdr := stringReader{0, formatString} + locations := make(map[string][]int, 8) + c := rdr.next() + location := 0 + for c != 0 { + b.WriteRune(c) + if c != '%' { + c = rdr.next() + continue + } + c = rdr.next() + if c != '{' && c != '<' { + if c != '%' { + panic(fmt.Sprintf(`keyed formats cannot be combined with other %% formats at position %d in string '%s'`, + rdr.i, formatString)) + } + b.WriteRune(c) + c = rdr.next() + continue + } + ec := '}' + bc := c + if bc == '<' { + ec = '>' + } + s := rdr.i + c = rdr.next() + for c != 0 && c != ec { + c = rdr.next() + } + if c == 0 { + panic(fmt.Sprintf(`unterminated %%%c at position %d in string '%s'`, bc, s-2, formatString)) + } + e := rdr.i - 1 + if s == e { + panic(fmt.Sprintf(`empty %%%c%c at position %d in string '%s'`, bc, ec, s-2, formatString)) + } + key := formatString[s:e] + if ps, ok := locations[key]; ok { + locations[key] = append(ps, location) + } else { + locations[key] = []int{location} + } + location++ + if bc == '{' { + // %{} constructs uses default format specifier whereas %<> uses whatever was specified + b.WriteByte('v') + } + c = rdr.next() + } + return b.String(), location, locations +} diff --git a/vendor/github.com/lyraproj/issue/issue/reported.go b/vendor/github.com/lyraproj/issue/issue/reported.go new file mode 100644 index 0000000..4d96277 --- /dev/null +++ b/vendor/github.com/lyraproj/issue/issue/reported.go @@ -0,0 +1,171 @@ +package issue + +import ( + "bytes" + "runtime" + "strconv" +) + +// A Reported instance contains information of an issue such as an +// error or a warning. It contains an Issue and arguments needed to +// format that issue. It also contains the location where the issue +// was reported. +type Reported interface { + // Argument returns the argument for the given key or nil if no + // such argument exists + Argument(key string) interface{} + + // Code returns the issue code + Code() Code + + // Error produces a string from the receives issue and arguments + Error() string + + // Error produces a string from the receives issue and arguments + // and writes it to the given buffer + ErrorTo(*bytes.Buffer) + + // Location returns the location where the issue was reported + Location() Location + + // OffsetByLocation returns a copy of the receiver where the location + // is offset by the given location. This is useful when the original + // source is embedded in a another file. + OffsetByLocation(location Location) Reported + + // Severity returns the severity + Severity() Severity + + // String is an alias for Error + String() string +} + +type reported struct { + issueCode Code + severity Severity + args H + location Location + stack []runtime.Frame +} + +var includeStacktrace = false + +// IncludeStacktrace can be set to true to get all Reported to include a stacktrace. +func IncludeStacktrace(flag bool) { + includeStacktrace = flag +} + +// NewReported creates a new instance of the Reported error with a given Code, Severity, and argument hash. The +// locOrSkip must either be nil, a Location, or an int denoting the number of frames to skip in a stacktrace, +// counting form the caller of NewReported. +func NewReported(code Code, severity Severity, args H, locOrSkip interface{}) Reported { + var location Location + skip := 0 + switch locOrSkip := locOrSkip.(type) { + case int: + skip = locOrSkip + case Location: + location = locOrSkip + } + + skip += 2 // Always skip runtime.Callers and this function + r := &reported{code, severity, args, location, nil} + if includeStacktrace { + // Ask runtime.Callers for up to 100 pcs, including runtime.Callers itself. + pc := make([]uintptr, 100) + n := runtime.Callers(skip, pc) + if n > 0 { + pc = pc[:n] // pass only valid pcs to runtime.CallersFrames + frames := runtime.CallersFrames(pc) + stack := make([]runtime.Frame, 0, n) + + // Loop to get frames. + // A fixed number of pcs can expand to an indefinite number of Frames. + for { + if frame, more := frames.Next(); more { + stack = append(stack, frame) + } else { + break + } + } + r.stack = stack + } + } + + if r.location == nil { + if r.stack == nil { + // Use first caller we can find with regards to given skip and use it + // as the location + for { + // Start by decrementing to even out the different interpretations of skip between runtime.Caller + // and runtime.Callers + skip-- + if _, f, l, ok := runtime.Caller(skip); ok { + r.location = NewLocation(f, l, 0) + break + } + } + } else { + // Set location to first stack entry + tf := r.stack[0] + r.location = NewLocation(tf.File, tf.Line, 0) + } + } + return r +} + +func (ri *reported) Argument(key string) interface{} { + return ri.args[key] +} + +func (ri *reported) OffsetByLocation(location Location) Reported { + loc := ri.location + if loc == nil { + loc = location + } else { + loc = NewLocation(location.File(), location.Line()+loc.Line(), location.Pos()) + } + return &reported{ri.issueCode, ri.severity, ri.args, loc, ri.stack} +} + +func (ri *reported) Error() (str string) { + b := bytes.NewBufferString(``) + ri.ErrorTo(b) + return b.String() +} + +func (ri *reported) ErrorTo(b *bytes.Buffer) { + ForCode(ri.issueCode).Format(b, ri.args) + if ri.stack != nil { + for _, f := range ri.stack { + b.WriteString("\n at ") + b.WriteString(f.File) + b.WriteByte(':') + b.WriteString(strconv.Itoa(f.Line)) + if f.Function != `` { + b.WriteString(" (") + b.WriteString(f.Function) + b.WriteByte(')') + } + } + } else if ri.location != nil { + b.WriteByte(' ') + appendLocation(b, ri.location) + } +} + +func (ri *reported) Location() Location { + return ri.location +} + +func (ri *reported) String() (str string) { + return ri.Error() +} + +func (ri *reported) Code() Code { + return ri.issueCode +} + +func (ri *reported) Severity() Severity { + return ri.severity +} diff --git a/vendor/github.com/lyraproj/issue/issue/result.go b/vendor/github.com/lyraproj/issue/issue/result.go new file mode 100644 index 0000000..93c34f3 --- /dev/null +++ b/vendor/github.com/lyraproj/issue/issue/result.go @@ -0,0 +1,35 @@ +package issue + +type ( + Result interface { + // Error returns true if errors where found or false if this result + // contains only warnings. + Error() bool + + // Issues returns all errors and warnings. An empty slice is returned + // when no errors or warnings were generated + Issues() []Reported + } + + result struct { + issues []Reported + } +) + +// NewResult creates a Result from a slice of Reported +func NewResult(issues []Reported) Result { + return &result{issues} +} + +func (pr *result) Error() bool { + for _, i := range pr.issues { + if i.Severity() == SeverityError { + return true + } + } + return false +} + +func (pr *result) Issues() []Reported { + return pr.issues +} diff --git a/vendor/github.com/lyraproj/issue/issue/severity.go b/vendor/github.com/lyraproj/issue/issue/severity.go new file mode 100644 index 0000000..6645914 --- /dev/null +++ b/vendor/github.com/lyraproj/issue/issue/severity.go @@ -0,0 +1,42 @@ +package issue + +import ( + "fmt" +) + +// this would be an enum in most other languages +const ( + SeverityIgnore = Severity(1) + SeverityDeprecation = Severity(2) + SeverityWarning = Severity(3) + SeverityError = Severity(4) +) + +// Severity used in reported issues +type Severity int + +// String returns the severity in lowercase string form +func (severity Severity) String() string { + switch severity { + case SeverityIgnore: + return `ignore` + case SeverityDeprecation: + return `deprecation` + case SeverityWarning: + return `warning` + case SeverityError: + return `error` + default: + panic(fmt.Sprintf(`Illegal severity level: %d`, severity)) + } +} + +// AssertValid checks that the given severity is one of the recognized severities +func (severity Severity) AssertValid() { + switch severity { + case SeverityIgnore, SeverityDeprecation, SeverityWarning, SeverityError: + return + default: + panic(fmt.Sprintf(`Illegal severity level: %d`, severity)) + } +} diff --git a/vendor/github.com/lyraproj/lyra/LICENSE b/vendor/github.com/lyraproj/lyra/LICENSE new file mode 100644 index 0000000..f49a4e1 --- /dev/null +++ b/vendor/github.com/lyraproj/lyra/LICENSE @@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. \ No newline at end of file diff --git a/vendor/github.com/lyraproj/lyra/cmd/goplugin-example/example/start.go b/vendor/github.com/lyraproj/lyra/cmd/goplugin-example/example/start.go new file mode 100644 index 0000000..0858801 --- /dev/null +++ b/vendor/github.com/lyraproj/lyra/cmd/goplugin-example/example/start.go @@ -0,0 +1,41 @@ +package example + +import ( + "github.com/lyraproj/lyra/cmd/goplugin-example/resource" + "github.com/lyraproj/pcore/pcore" + "github.com/lyraproj/pcore/px" + "github.com/lyraproj/servicesdk/annotation" + "github.com/lyraproj/servicesdk/grpc" + "github.com/lyraproj/servicesdk/service" +) + +// Start the example plugin running +func Start() { + pcore.Do(func(c px.Context) { + s := Server(c) + grpc.Serve(c, s) + }) +} + +// Server returns the built server to be served +func Server(c px.Context) *service.Server { + + sb := service.NewServiceBuilder(c, `Example`) + evs := sb.RegisterTypes("Example", + resource.Person{}, + resource.Address{}) + sb.RegisterHandler("Example::PersonHandler", &resource.PersonHandler{}, evs[0]) + + sb.RegisterTypes("Example", + sb.BuildResource(&resource.OwnerRes{}, func(rtb service.ResourceTypeBuilder) { + rtb.ProvidedAttributes(`id`) + rtb.AddRelationship(`mine`, `Example::ContainedRes`, annotation.KindContained, annotation.CardinalityMany, ``, []string{`id`, `ownerId`}) + }), + sb.BuildResource(&resource.ContainedRes{}, func(rtb service.ResourceTypeBuilder) { + rtb.ProvidedAttributes(`id`) + rtb.AddRelationship(`owner`, `Example::OwnerRes`, annotation.KindContainer, annotation.CardinalityOne, ``, []string{`ownerId`, `id`}) + }), + ) + + return sb.Server() +} diff --git a/vendor/github.com/lyraproj/lyra/cmd/goplugin-example/main.go b/vendor/github.com/lyraproj/lyra/cmd/goplugin-example/main.go new file mode 100644 index 0000000..23a484e --- /dev/null +++ b/vendor/github.com/lyraproj/lyra/cmd/goplugin-example/main.go @@ -0,0 +1,7 @@ +package main + +import "github.com/lyraproj/lyra/cmd/goplugin-example/example" + +func main() { + example.Start() +} diff --git a/vendor/github.com/lyraproj/lyra/cmd/goplugin-example/resource/resource.go b/vendor/github.com/lyraproj/lyra/cmd/goplugin-example/resource/resource.go new file mode 100644 index 0000000..4b5d3a4 --- /dev/null +++ b/vendor/github.com/lyraproj/lyra/cmd/goplugin-example/resource/resource.go @@ -0,0 +1,62 @@ +package resource + +import ( + "github.com/hashicorp/go-hclog" +) + +// Person represents a human +type Person struct { + Name string `puppet:"type=>String, value=>''"` + Age int64 `puppet:"type=>Integer, value=>0"` + Human bool `puppet:"type=>Boolean, value=>false"` + Address *Address `puppet:"type=>Optional[Example::Address], value=>undef"` +} + +// Address type +type Address struct { + LineOne string `puppet:"type=>String, value=>''"` +} + +//OwnerRes type to show parent in parent-child relationships +type OwnerRes struct { + Id *string + Phone string +} + +//ContainedRes type to show child in parent-child relationships +type ContainedRes struct { + Id *string + OwnerId string + Stuff string +} + +// PersonHandler is used to perform CRUD operations on a Person resource +type PersonHandler struct{} + +// Create a new person resource +func (*PersonHandler) Create(desiredState *Person) (*Person, string, error) { + hclog.Default().Debug("Creating person", "desiredState", desiredState) + return desiredState, "12345", nil +} + +// Read an existing person resource +func (*PersonHandler) Read(externalID string) (*Person, error) { + hclog.Default().Debug("Reading person", "externalID", externalID) + return &Person{ + Name: "Alice", + Age: 32, + }, nil +} + +// Update an existing persn resource +func (*PersonHandler) Update(externalID string, desiredState *Person) *Person { + hclog.Default().Debug("Updating person", "externalID", externalID, "desiredState", desiredState) + desiredState.Age = 33 + return desiredState +} + +// Delete an existing person resource +func (*PersonHandler) Delete(externalID string) error { + hclog.Default().Debug("Deleting person:", "externalID", externalID) + return nil +} diff --git a/vendor/github.com/lyraproj/lyra/cmd/goplugin-foobernetes/foobernetes/start.go b/vendor/github.com/lyraproj/lyra/cmd/goplugin-foobernetes/foobernetes/start.go new file mode 100644 index 0000000..0bfa703 --- /dev/null +++ b/vendor/github.com/lyraproj/lyra/cmd/goplugin-foobernetes/foobernetes/start.go @@ -0,0 +1,33 @@ +package foobernetes + +import ( + "github.com/lyraproj/lyra/cmd/goplugin-foobernetes/resource" + "github.com/lyraproj/pcore/pcore" + "github.com/lyraproj/pcore/px" + "github.com/lyraproj/servicesdk/grpc" + "github.com/lyraproj/servicesdk/service" +) + +// Start the Foobernetes example plugin running +func Start() { + pcore.Do(func(c px.Context) { + s := Server(c) + grpc.Serve(c, s) + }) +} + +// Server returns the built server to be served +func Server(c px.Context) *service.Server { + sb := service.NewServiceBuilder(c, "Foobernetes") + + evs := sb.RegisterTypes("Foobernetes", resource.LoadBalancer{}) + sb.RegisterHandler("Foobernetes::LoadBalancerHandler", &resource.LoadBalancerHandler{}, evs[0]) + + evs = sb.RegisterTypes("Foobernetes", resource.WebServer{}) + sb.RegisterHandler("Foobernetes::WebServerHandler", &resource.WebServerHandler{}, evs[0]) + + evs = sb.RegisterTypes("Foobernetes", resource.Instance{}) + sb.RegisterHandler("Foobernetes::InstanceHandler", &resource.InstanceHandler{}, evs[0]) + + return sb.Server() +} diff --git a/vendor/github.com/lyraproj/lyra/cmd/goplugin-foobernetes/main.go b/vendor/github.com/lyraproj/lyra/cmd/goplugin-foobernetes/main.go new file mode 100644 index 0000000..2630310 --- /dev/null +++ b/vendor/github.com/lyraproj/lyra/cmd/goplugin-foobernetes/main.go @@ -0,0 +1,33 @@ +package main + +import ( + "os" + + "github.com/hashicorp/go-hclog" + "github.com/lyraproj/issue/issue" + "github.com/lyraproj/lyra/cmd/goplugin-foobernetes/foobernetes" +) + +func init() { + // Tell issue reporting to amend all errors with a stack trace + if hclog.DefaultOptions.Level <= hclog.Debug { + issue.IncludeStacktrace(true) + } + // Configuring hclog like this allows Lyra to handle log levels automatically + hclog.DefaultOptions = &hclog.LoggerOptions{ + Name: "Foobernetes", + Level: hclog.LevelFromString(os.Getenv("LYRA_LOG_LEVEL")), + JSONFormat: true, + IncludeLocation: false, + Output: os.Stderr, + } +} + +func main() { + log := hclog.Default() + log.Debug("This is an example debug message") + log.Info("This is an example info message") + // log.Warn("This is an example warn message") + // log.Error("This is an example error message") + foobernetes.Start() +} diff --git a/vendor/github.com/lyraproj/lyra/cmd/goplugin-foobernetes/resource/instance.go b/vendor/github.com/lyraproj/lyra/cmd/goplugin-foobernetes/resource/instance.go new file mode 100644 index 0000000..fa8a87e --- /dev/null +++ b/vendor/github.com/lyraproj/lyra/cmd/goplugin-foobernetes/resource/instance.go @@ -0,0 +1,93 @@ +package resource + +import ( + "fmt" + + "github.com/hashicorp/go-hclog" + "github.com/lyraproj/servicesdk/serviceapi" +) + +var ip int + +// Instance is a virtual machine on which the app or database can be deployed +type Instance struct { + InstanceID *string + InstanceIP *string + Location *string + Image string + Config *map[string]string + Cpus int + Memory string +} + +// InstanceHandler is used to perform CRUD operations on a Instance resource +type InstanceHandler struct{} + +// Create a new resource +func (*InstanceHandler) Create(desiredState *Instance) (*Instance, string, error) { + hclog.Default().Debug("Creating Instance", "desiredState", desiredState) + + // The cloud creates the resource and allocates an ID that can be used to read it in the future + d := loadFakeCloudData() + defer saveFakeCloudData(d) + cloudAllocatedID := fmt.Sprintf("i-%d", randomInt()) + d.Instances[cloudAllocatedID] = desiredState + + // Update the desired state with values provided by the cloud + ip++ + ipAddress := fmt.Sprintf("10.0.0.%d", ip) + desiredState.InstanceID = &cloudAllocatedID + desiredState.InstanceIP = &ipAddress + + return desiredState, cloudAllocatedID, nil +} + +// Read an existing resource +func (*InstanceHandler) Read(externalID string) (*Instance, error) { + hclog.Default().Debug("Reading Instance", "externalID", externalID) + + // Read the actual state of the resource from the cloud + // The external ID passed here is the same one that is returned at creation time + d := loadFakeCloudData() + actualState, ok := d.Instances[externalID] + if !ok { + return nil, serviceapi.NotFound("Instance", externalID) + } + + return actualState, nil +} + +// Update an existing resource +func (*InstanceHandler) Update(externalID string, desiredState *Instance) (*Instance, error) { + hclog.Default().Debug("Updating Instance", "externalID", externalID, "desiredState", desiredState) + + // The cloud updates the resource based on its ID + // Update is not allowed to change the external ID + d := loadFakeCloudData() + defer saveFakeCloudData(d) + + // Update the desired state with values provided by the cloud + actualState, ok := d.Instances[externalID] + if !ok { + return nil, serviceapi.NotFound("Instance", externalID) + } + desiredState.InstanceID = actualState.InstanceID + desiredState.InstanceIP = actualState.InstanceIP + d.Instances[externalID] = desiredState + + return desiredState, nil +} + +// Delete an existing resource +func (*InstanceHandler) Delete(externalID string) error { + hclog.Default().Debug("Deleting Instance:", "externalID", externalID) + + // The cloud deletes the resource based on its ID + d := loadFakeCloudData() + if _, ok := d.Instances[externalID]; ok { + defer saveFakeCloudData(d) + delete(d.Instances, externalID) + } + + return nil +} diff --git a/vendor/github.com/lyraproj/lyra/cmd/goplugin-foobernetes/resource/loadbalancer.go b/vendor/github.com/lyraproj/lyra/cmd/goplugin-foobernetes/resource/loadbalancer.go new file mode 100644 index 0000000..32ad88f --- /dev/null +++ b/vendor/github.com/lyraproj/lyra/cmd/goplugin-foobernetes/resource/loadbalancer.go @@ -0,0 +1,86 @@ +package resource + +import ( + "fmt" + + "github.com/hashicorp/go-hclog" + "github.com/lyraproj/servicesdk/serviceapi" +) + +// LoadBalancer distributes traffic to web servers +type LoadBalancer struct { + LoadBalancerID *string + LoadBalancerIP *string + Location *string + Replica *bool + WebServerIDs []string + Tags *map[string]string +} + +// LoadBalancerHandler is used to perform CRUD operations on a LoadBalancer resource +type LoadBalancerHandler struct{} + +// Create a new resource +func (*LoadBalancerHandler) Create(desiredState *LoadBalancer) (*LoadBalancer, string, error) { + hclog.Default().Debug("Creating LoadBalancer", "desiredState", desiredState) + + // The cloud creates the resource and allocates an ID that can be used to read it in the future + d := loadFakeCloudData() + defer saveFakeCloudData(d) + cloudAllocatedID := fmt.Sprintf("lb-%d", randomInt()) + d.LoadBalancers[cloudAllocatedID] = desiredState + + // Update the desired state with values provided by the cloud + desiredState.LoadBalancerID = &cloudAllocatedID + + return desiredState, cloudAllocatedID, nil +} + +// Read an existing resource +func (*LoadBalancerHandler) Read(externalID string) (*LoadBalancer, error) { + hclog.Default().Debug("Reading LoadBalancer", "externalID", externalID) + + // Read the actual state of the resource from the cloud + // The external ID passed here is the same one that is returned at creation time + d := loadFakeCloudData() + actualState, ok := d.LoadBalancers[externalID] + if !ok { + return nil, serviceapi.NotFound("LoadBalancer", externalID) + } + + return actualState, nil +} + +// Update an existing resource +func (*LoadBalancerHandler) Update(externalID string, desiredState *LoadBalancer) (*LoadBalancer, error) { + hclog.Default().Debug("Updating LoadBalancer", "externalID", externalID, "desiredState", desiredState) + + // The cloud updates the resource based on its ID + // Update is not allowed to change the external ID + d := loadFakeCloudData() + defer saveFakeCloudData(d) + + // Update the desired state with values provided by the cloud + actualState, ok := d.LoadBalancers[externalID] + if !ok { + return nil, serviceapi.NotFound("LoadBalancer", externalID) + } + desiredState.LoadBalancerID = actualState.LoadBalancerID + d.LoadBalancers[externalID] = desiredState + + return desiredState, nil +} + +// Delete an existing resource +func (*LoadBalancerHandler) Delete(externalID string) error { + hclog.Default().Debug("Deleting LoadBalancer:", "externalID", externalID) + + // The cloud deletes the resource based on its ID + d := loadFakeCloudData() + if _, ok := d.LoadBalancers[externalID]; ok { + defer saveFakeCloudData(d) + delete(d.LoadBalancers, externalID) + } + + return nil +} diff --git a/vendor/github.com/lyraproj/lyra/cmd/goplugin-foobernetes/resource/state.go b/vendor/github.com/lyraproj/lyra/cmd/goplugin-foobernetes/resource/state.go new file mode 100644 index 0000000..32c3037 --- /dev/null +++ b/vendor/github.com/lyraproj/lyra/cmd/goplugin-foobernetes/resource/state.go @@ -0,0 +1,53 @@ +package resource + +import ( + "encoding/json" + "io/ioutil" + "math/rand" + "os" + "time" +) + +type deployment struct { + LoadBalancers map[string]*LoadBalancer + WebServers map[string]*WebServer + Instances map[string]*Instance +} + +var filename = "deployment.json" + +func loadFakeCloudData() deployment { + var d deployment + bs, err := ioutil.ReadFile(filename) + if err != nil { + if os.IsNotExist(err) { + d.LoadBalancers = make(map[string]*LoadBalancer) + d.WebServers = make(map[string]*WebServer) + d.Instances = make(map[string]*Instance) + return d + } + panic(err) + } + err = json.Unmarshal(bs, &d) + if err != nil { + panic(err) + } + return d +} + +func saveFakeCloudData(d deployment) { + bs, err := json.MarshalIndent(&d, "", " ") + if err != nil { + panic(err) + } + err = ioutil.WriteFile(filename, bs, os.ModePerm) + if err != nil { + panic(err) + } +} + +func randomInt() int { + s1 := rand.NewSource(time.Now().UnixNano()) + r1 := rand.New(s1) + return r1.Int() +} diff --git a/vendor/github.com/lyraproj/lyra/cmd/goplugin-foobernetes/resource/webserver.go b/vendor/github.com/lyraproj/lyra/cmd/goplugin-foobernetes/resource/webserver.go new file mode 100644 index 0000000..0f440b9 --- /dev/null +++ b/vendor/github.com/lyraproj/lyra/cmd/goplugin-foobernetes/resource/webserver.go @@ -0,0 +1,83 @@ +package resource + +import ( + "fmt" + + "github.com/hashicorp/go-hclog" + "github.com/lyraproj/servicesdk/serviceapi" +) + +// WebServer handles HTTP requests targetting the app +type WebServer struct { + WebServerID *string + Port int + AppServers []string +} + +// WebServerHandler is used to perform CRUD operations on a WebServer resource +type WebServerHandler struct{} + +// Create a new resource +func (*WebServerHandler) Create(desiredState *WebServer) (*WebServer, string, error) { + hclog.Default().Debug("Creating WebServer", "desiredState", desiredState) + + // The cloud creates the resource and allocates an ID that can be used to read it in the future + d := loadFakeCloudData() + defer saveFakeCloudData(d) + cloudAllocatedID := fmt.Sprintf("ws-%d", randomInt()) + d.WebServers[cloudAllocatedID] = desiredState + + // Update the desired state with values provided by the cloud + desiredState.WebServerID = &cloudAllocatedID + + return desiredState, cloudAllocatedID, nil +} + +// Read an existing resource +func (*WebServerHandler) Read(externalID string) (*WebServer, error) { + hclog.Default().Debug("Reading WebServer", "externalID", externalID) + + // Read the actual state of the resource from the cloud + // The external ID passed here is the same one that is returned at creation time + d := loadFakeCloudData() + actualState, ok := d.WebServers[externalID] + if !ok { + return nil, serviceapi.NotFound("WebServer", externalID) + } + + return actualState, nil +} + +// Update an existing resource +func (*WebServerHandler) Update(externalID string, desiredState *WebServer) (*WebServer, error) { + hclog.Default().Debug("Updating WebServer", "externalID", externalID, "desiredState", desiredState) + + // The cloud updates the resource based on its ID + // Update is not allowed to change the external ID + d := loadFakeCloudData() + defer saveFakeCloudData(d) + + // Update the desired state with values provided by the cloud + actualState, ok := d.WebServers[externalID] + if !ok { + return nil, serviceapi.NotFound("WebServer", externalID) + } + desiredState.WebServerID = actualState.WebServerID + d.WebServers[externalID] = desiredState + + return desiredState, nil +} + +// Delete an existing resource +func (*WebServerHandler) Delete(externalID string) error { + hclog.Default().Debug("Deleting WebServer:", "externalID", externalID) + + // The cloud deletes the resource based on its ID + d := loadFakeCloudData() + if _, ok := d.WebServers[externalID]; ok { + defer saveFakeCloudData(d) + delete(d.WebServers, externalID) + } + + return nil +} diff --git a/vendor/github.com/lyraproj/lyra/cmd/lyra/cmd/apply.go b/vendor/github.com/lyraproj/lyra/cmd/lyra/cmd/apply.go new file mode 100644 index 0000000..9857e01 --- /dev/null +++ b/vendor/github.com/lyraproj/lyra/cmd/lyra/cmd/apply.go @@ -0,0 +1,44 @@ +package cmd + +import ( + "os" + + "github.com/lyraproj/lyra/pkg/apply" + + "github.com/leonelquinteros/gotext" + "github.com/lyraproj/lyra/cmd/lyra/ui" + "github.com/spf13/cobra" + + // Ensure that types created by the go lyra package are loaded + _ "github.com/lyraproj/servicesdk/lang/go/lyra" +) + +var homeDir string + +// NewApplyCmd returns the apply subcommand used to evaluate and apply steps. //TODO: (JD) Does 'apply' even make sense for what this does now? +func NewApplyCmd() *cobra.Command { + cmd := &cobra.Command{ + Use: gotext.Get("apply "), + Short: gotext.Get("Execute a Lyra workflow"), + Long: gotext.Get("Execute a Lyra workflow"), + Example: gotext.Get("\n # Execute a workflow\n lyra apply sample\n\n # Execute a workflow using external variable data\n lyra apply sample --data data.yaml"), + Run: runApplyCmd, + Args: cobra.ExactArgs(1), + } + + cmd.Flags().StringVarP(&homeDir, "root", "r", "", gotext.Get("path to root directory")) + + cmd.SetHelpTemplate(ui.HelpTemplate) + cmd.SetUsageTemplate(ui.UsageTemplate) + + return cmd +} + +func runApplyCmd(cmd *cobra.Command, args []string) { + applicator := apply.NewApplicator(homeDir, dlvConfig) + workflowName := args[0] + exitCode := applicator.ApplyWorkflow(workflowName) + if exitCode != 0 { + os.Exit(exitCode) + } +} diff --git a/vendor/github.com/lyraproj/lyra/cmd/lyra/cmd/delete.go b/vendor/github.com/lyraproj/lyra/cmd/lyra/cmd/delete.go new file mode 100644 index 0000000..f462b41 --- /dev/null +++ b/vendor/github.com/lyraproj/lyra/cmd/lyra/cmd/delete.go @@ -0,0 +1,39 @@ +package cmd + +import ( + "os" + + "github.com/lyraproj/lyra/pkg/apply" + + "github.com/leonelquinteros/gotext" + "github.com/lyraproj/lyra/cmd/lyra/ui" + "github.com/spf13/cobra" +) + +// NewDeleteCmd returns the delete subcommand used to delete steps. +func NewDeleteCmd() *cobra.Command { + cmd := &cobra.Command{ + Use: gotext.Get("delete "), + Short: gotext.Get("Delete the resources created by a Lyra workflow"), + Long: gotext.Get("Delete the resources created by a Lyra workflow"), + Example: gotext.Get("\n # Delete the resources created by a Lyra workflow\n lyra delete sample\n"), + Run: runDeleteCmd, + Args: cobra.ExactArgs(1), + } + + cmd.Flags().StringVarP(&homeDir, "root", "r", "", gotext.Get("path to root directory")) + + cmd.SetHelpTemplate(ui.HelpTemplate) + cmd.SetUsageTemplate(ui.UsageTemplate) + + return cmd +} + +func runDeleteCmd(cmd *cobra.Command, args []string) { + applicator := apply.NewApplicator(homeDir, dlvConfig) + workflowName := args[0] + exitCode := applicator.DeleteWorkflow(workflowName) + if exitCode != 0 { + os.Exit(exitCode) + } +} diff --git a/vendor/github.com/lyraproj/lyra/cmd/lyra/cmd/embedded.go b/vendor/github.com/lyraproj/lyra/cmd/lyra/cmd/embedded.go new file mode 100644 index 0000000..3829b8e --- /dev/null +++ b/vendor/github.com/lyraproj/lyra/cmd/lyra/cmd/embedded.go @@ -0,0 +1,32 @@ +package cmd + +import ( + "os" + + "github.com/lyraproj/lyra/pkg/logger" + "github.com/spf13/cobra" +) + +// EmbeddedPluginCmd runs embedded plugins +func EmbeddedPluginCmd() *cobra.Command { + cmd := &cobra.Command{ + Use: "plugin", + Hidden: true, + Run: startPlugin, + Args: cobra.ExactArgs(1), + } + + cmd.SetHelpTemplate(cmd.HelpTemplate()) + + return cmd +} + +func startPlugin(cmd *cobra.Command, args []string) { + name := args[0] + switch name { + // Embedded plugins go here + default: + logger.Get().Error("Unknown embedded plugin", "name", name) + os.Exit(1) + } +} diff --git a/vendor/github.com/lyraproj/lyra/cmd/lyra/cmd/generate.go b/vendor/github.com/lyraproj/lyra/cmd/lyra/cmd/generate.go new file mode 100644 index 0000000..b698c2f --- /dev/null +++ b/vendor/github.com/lyraproj/lyra/cmd/lyra/cmd/generate.go @@ -0,0 +1,40 @@ +package cmd + +import ( + "os" + + "github.com/leonelquinteros/gotext" + "github.com/lyraproj/lyra/cmd/lyra/ui" + "github.com/lyraproj/lyra/pkg/generate" + "github.com/spf13/cobra" +) + +var targetDirectory = `` + +//NewGenerateCmd generates typesets in the languge of choice +func NewGenerateCmd() *cobra.Command { + cmd := &cobra.Command{ + Use: gotext.Get("generate "), + Short: gotext.Get("Generate all typesets in the target language"), + Long: gotext.Get("Generate all typesets in the target language"), + Example: gotext.Get("\n # Generate all typesets in typescript\n lyra generate typescript\n"), + Run: runGenerateCmd, + Args: cobra.ExactArgs(1), + } + + cmd.Flags().StringVarP(&homeDir, "root", "r", "", gotext.Get("path to root directory")) + cmd.Flags().StringVarP(&targetDirectory, "target-directory", "t", "", gotext.Get("path to target directory")) + + cmd.SetHelpTemplate(ui.HelpTemplate) + cmd.SetUsageTemplate(ui.UsageTemplate) + + return cmd +} + +func runGenerateCmd(cmd *cobra.Command, args []string) { + exitCode := generate.Generate(args[0], targetDirectory) + if exitCode == 0 { + ui.ShowMessage("Generation complete") + } + os.Exit(exitCode) +} diff --git a/vendor/github.com/lyraproj/lyra/cmd/lyra/cmd/root.go b/vendor/github.com/lyraproj/lyra/cmd/lyra/cmd/root.go new file mode 100644 index 0000000..0e8a936 --- /dev/null +++ b/vendor/github.com/lyraproj/lyra/cmd/lyra/cmd/root.go @@ -0,0 +1,77 @@ +package cmd + +import ( + "fmt" + "os" + + "github.com/lyraproj/issue/issue" + + "github.com/leonelquinteros/gotext" + "github.com/lyraproj/lyra/cmd/lyra/ui" + "github.com/lyraproj/lyra/pkg/logger" + "github.com/lyraproj/lyra/pkg/version" + "github.com/mgutz/ansi" + "github.com/spf13/cobra" +) + +var ( + debug bool + loglevel string + dlvConfig string +) + +// NewRootCmd returns the root command +func NewRootCmd(subCommands ...*cobra.Command) *cobra.Command { + + gotext.Configure("locales", "en_US", "default") + + cmd := &cobra.Command{ + Use: gotext.Get("lyra "), + Short: gotext.Get("Lyra - Provision and manage cloud native infrastructure"), + Long: gotext.Get("Lyra - Provision and manage cloud native infrastructure.\n Find more information at: https://github.com/lyraproj/lyra"), + Run: runHelp, + PersistentPreRun: initialiseTool, + Version: fmt.Sprintf("%v", version.Get()), + } + + cmd.PersistentFlags().BoolVar(&debug, "debug", false, gotext.Get("Sets log level to debug")) + cmd.PersistentFlags().StringVar(&loglevel, "loglevel", "", gotext.Get("Set log level to error, warn, info or debug")) + cmd.PersistentFlags().StringVar(&dlvConfig, "dlv", "", gotext.Get("Tell remote process to stop and listen for debugger to attach")) + + cmd.SetHelpTemplate(ansi.Blue + version.LogoFiglet + ansi.Reset + ui.HelpTemplate) + cmd.SetUsageTemplate(ui.UsageTemplate) + + cmd.AddCommand( + NewVersionCmd(), + NewApplyCmd(), + NewDeleteCmd(), + // NewValidateCmd(), + NewGenerateCmd(), + // cmd.AddCommand(EmbeddedPluginCmd()) + ) + if len(subCommands) > 0 { + cmd.AddCommand(subCommands...) + } + + return cmd +} + +func runHelp(cmd *cobra.Command, args []string) { + err := cmd.Help() + if err != nil { + panic(err) + } +} + +func initialiseTool(cmd *cobra.Command, args []string) { + if debug { + loglevel = "debug" + issue.IncludeStacktrace(true) + } + spec := logger.Spec{ + Name: "lyra", + Level: loglevel, + Output: os.Stderr, + } + logger.Initialise(spec) +} diff --git a/vendor/github.com/lyraproj/lyra/cmd/lyra/cmd/validate.go b/vendor/github.com/lyraproj/lyra/cmd/lyra/cmd/validate.go new file mode 100644 index 0000000..2b85179 --- /dev/null +++ b/vendor/github.com/lyraproj/lyra/cmd/lyra/cmd/validate.go @@ -0,0 +1,81 @@ +package cmd + +import ( + "os" + + "github.com/leonelquinteros/gotext" + "github.com/lyraproj/issue/issue" + "github.com/lyraproj/lyra/cmd/lyra/ui" + "github.com/lyraproj/lyra/pkg/logger" + "github.com/spf13/cobra" +) + +// NewValidateCmd returns the validate subcommand used to syntactically validate manifests. +func NewValidateCmd() *cobra.Command { + cmd := &cobra.Command{ + Use: gotext.Get("validate "), + Short: gotext.Get("Validate a single workflow file"), + Long: gotext.Get("Validate a single workflow file"), + Example: gotext.Get("\n lyra validate plugins/example.pp"), + Run: runValidate, + Args: cobra.ExactArgs(1), + } + + cmd.SetHelpTemplate(ui.HelpTemplate) + cmd.SetUsageTemplate(ui.UsageTemplate) + + return cmd +} + +func runValidate(cmd *cobra.Command, args []string) { + log := logger.Get() + + log.Debug("validating manifest") + + err := validateFile(args[0]) + + if err != nil { + switch err.(type) { + case issue.Reported: + ui.ValidationFailure(err) + default: + ui.ValidationError(err) + } + os.Exit(1) + } + ui.ValidationSuccess() +} + +func validateFile(path string) error { + log := logger.Get() + + log.Debug("validating manifest") + + log.Debug("reading manifest", "path", path) + // manifest, err := ioutil.ReadFile(path) + // if err != nil { + // return fmt.Errorf("unable to read manifest: %v", err) + // } + + // lang, err := getLanguage(path) + // if err != nil { + // return err + // } + + // err = pcore.DoWithParent(pcore.RootContext(), func(c px.Context) error { + // var err error + // switch lang { + // case "Puppet": + // log.Debug(" dsl") + // c.ParseAndValidate(path, string(manifest), false) + // case "YAML": + // log.Debug("validating yaml") + // yaml2ast.YamlToAST(c, path, manifest) + // default: + // return fmt.Errorf("Unable to determine manifest language") + // } + // return err + // }) + + return nil +} diff --git a/vendor/github.com/lyraproj/lyra/cmd/lyra/cmd/version.go b/vendor/github.com/lyraproj/lyra/cmd/lyra/cmd/version.go new file mode 100644 index 0000000..996bad3 --- /dev/null +++ b/vendor/github.com/lyraproj/lyra/cmd/lyra/cmd/version.go @@ -0,0 +1,34 @@ +package cmd + +import ( + "fmt" + + "github.com/leonelquinteros/gotext" + "github.com/lyraproj/lyra/cmd/lyra/ui" + "github.com/lyraproj/lyra/pkg/version" + "github.com/spf13/cobra" +) + +// NewVersionCmd returns the version subcommand +func NewVersionCmd() *cobra.Command { + cmd := &cobra.Command{ + Use: gotext.Get("version"), + Short: gotext.Get("Show the current Lyra version"), + Long: gotext.Get("Show the current Lyra version"), + Run: runVersion, + } + + cmd.SetHelpTemplate(ui.HelpTemplate) + cmd.SetUsageTemplate(ui.UsageTemplate) + + return cmd +} + +func runVersion(cmd *cobra.Command, args []string) { + fmt.Printf("%v\n", prettyPrintVersion()) +} + +func prettyPrintVersion() string { + v := version.Get() + return fmt.Sprintf("Tag:\t\t%s\nCommit:\t\t%s\nBuildTime:\t%s", v.BuildTag, v.BuildSHA, v.BuildTime) +} diff --git a/vendor/github.com/lyraproj/lyra/cmd/lyra/main.go b/vendor/github.com/lyraproj/lyra/cmd/lyra/main.go new file mode 100644 index 0000000..20b7945 --- /dev/null +++ b/vendor/github.com/lyraproj/lyra/cmd/lyra/main.go @@ -0,0 +1,15 @@ +package main + +import ( + "fmt" + "os" + + "github.com/lyraproj/lyra/cmd/lyra/cmd" +) + +func main() { + if err := cmd.NewRootCmd().Execute(); err != nil { + _, _ = fmt.Fprintln(os.Stderr, err) + os.Exit(-1) + } +} diff --git a/vendor/github.com/lyraproj/lyra/cmd/lyra/ui/ui.go b/vendor/github.com/lyraproj/lyra/cmd/lyra/ui/ui.go new file mode 100644 index 0000000..d9cf8f5 --- /dev/null +++ b/vendor/github.com/lyraproj/lyra/cmd/lyra/ui/ui.go @@ -0,0 +1,142 @@ +package ui + +import ( + "bufio" + "fmt" + "log" + "os" + "strings" + "time" + + "github.com/mgutz/ansi" +) + +func init() { + // Probably should just not use the log package, but ¯\_(ツ)_/¯ + log.SetFlags(log.Flags() &^ (log.Ldate | log.Ltime)) + // FIXME: These messages should be suppressed for +} + +// Message prepends messages about what we are going to do +// with colour and an informative label +func Message(kind string, message interface{}) { + switch kind { + case "resource": + log.Println(ansi.Green+"[set resource]"+ansi.Reset, message) + // Generic error + case "error": + log.Println(ansi.Red+"[error]"+ansi.Reset, message) + default: + log.Println(message) + } +} + +// ShowMessage prints an attractive message to STDOUT +func ShowMessage(params ...string) { + var action = "" + var msg = "" + if len(params) > 1 { + msg = params[1] + } + if len(params) > 0 { + action = params[0] + } + log.Println("\n"+ansi.Green+"▸ "+action+ansi.Reset, msg+"\n") +} + +// ShowMessage prints an attractive message to STDOUT +func ShowError(params ...string) { + var action = "" + var msg = "" + if len(params) > 1 { + msg = params[1] + } + if len(params) > 0 { + action = params[0] + } + log.Println("\n"+ansi.Red+"▸ "+action+ansi.Reset, msg+"\n") +} + +// AskForConfirmation presents a blocking choice to users +func AskForConfirmation(s string) bool { + // Quiet implies yes. This might not be the right choice. + reader := bufio.NewReader(os.Stdin) + + for { + fmt.Printf("%s [y/n]: ", s) + + response, err := reader.ReadString('\n') + if err != nil { + log.Fatal(err) + } + + response = strings.ToLower(strings.TrimSpace(response)) + + if response == "y" || response == "yes" { + return true + } else if response == "n" || response == "no" { + return false + } + } +} + +// Delay artifically slows down execution +func Delay(durationMs int) { + time.Sleep(time.Duration(durationMs) * time.Millisecond) +} + +// ValidationFailure pretty prints a validation failure message +func ValidationFailure(err error) { + fmt.Fprintln(os.Stderr, ansi.Red+"▸ Manifest Invalid "+ansi.Reset+err.Error()) +} + +// ValidationSuccess pretty prints a validation success message +func ValidationSuccess() { + fmt.Fprintln(os.Stderr, ansi.Green+"▸ Manifest Valid "+ansi.Reset) +} + +// ValidationError pretty prints a validation error message +func ValidationError(err error) { + fmt.Fprintln(os.Stderr, ansi.Red+"▸ Error validating manifest "+ansi.Reset+err.Error()) +} + +// HelpTemplate is helpful +// Inspired by https://github.com/kubernetes/kompose/blob/master/cmd/convert.go +// Remember ALL the whitespace is significant! +// TODO: Externalise this whole thing +var HelpTemplate = `Description: + {{rpad .Long 10}} + +Usage:{{if .Runnable}}{{if .HasAvailableFlags}} + {{appendIfNotPresent .UseLine "[flags]"}}{{else}}{{.UseLine}}{{end}}{{end}}{{if gt .Aliases 0}} + +Aliases: + {{.NameAndAliases}}{{end}}{{if .HasExample }} + +Examples: + {{ .Example }}{{end}}{{ if .HasAvailableSubCommands}} + +Available Commands:{{range .Commands}}{{if .IsAvailableCommand}} + {{rpad .Name .NamePadding }} {{.Short}}{{end}}{{end}}{{end}}{{ if .HasAvailableLocalFlags}} + +Flags: +{{.LocalFlags.FlagUsages | trimRightSpace}}{{end}}{{ if .HasAvailableInheritedFlags}} + +Global Flags: +{{.InheritedFlags.FlagUsages | trimRightSpace}}{{end}}{{if .HasHelpSubCommands}} + +Additional help topics:{{range .Commands}}{{if .IsHelpCommand}} +{{rpad .CommandPath .CommandPathPadding}} {{.Short}}{{end}}{{end}}{{end}} +` + +// UsageTemplate is similar to HelpTemplate, but sticks to brief usage. +// Remember ALL the whitespace is significant! +// TODO: Externalise this whole thing +var UsageTemplate = `Usage:{{if .Runnable}}{{if .HasAvailableFlags}} + {{appendIfNotPresent .UseLine "[flags]"}}{{else}}{{.UseLine}}{{end}}{{end}}{{ if .HasAvailableLocalFlags}} + +Flags: +{{.LocalFlags.FlagUsages | trimRightSpace}}{{end}} + +See '{{.CommandPath}} --help' for more help and examples. +` diff --git a/vendor/github.com/lyraproj/lyra/examples/go-samples/aws/aws.go b/vendor/github.com/lyraproj/lyra/examples/go-samples/aws/aws.go new file mode 100644 index 0000000..fc9bf5f --- /dev/null +++ b/vendor/github.com/lyraproj/lyra/examples/go-samples/aws/aws.go @@ -0,0 +1,121 @@ +package main + +import ( + "fmt" + + "github.com/lyraproj/lyra/examples/go-samples/types/aws" + "github.com/lyraproj/pcore/px" + "github.com/lyraproj/servicesdk/lang/go/lyra" +) + +func main() { + lyra.Serve(`aws_go`, func(c px.Context) { aws.InitTypes(c) }, &lyra.Workflow{ + Parameters: struct { + Tags map[string]string `lookup:"aws.tags"` + }{}, + Return: struct{ VpcId string }{}, + + Steps: map[string]lyra.Step{ + `lyraIamRole`: &lyra.Resource{ + State: func() *aws.IamRole { + return &aws.IamRole{ + Name: lyra.StringPtr(`lyra-iam-role`), + AssumeRolePolicy: `{ + "Version": "2012-10-17", + "Statement": [ + { + "Action": "sts:AssumeRoleWithSAML", + "Effect": "Allow", + "Condition": { + "StringEquals": { + "SAML:aud": "https://signin.aws.amazon.com/saml" + } + }, + "Principal": { + "Federated": "arn:aws:iam::1234567890:saml-provider/myidp" + } + } + ]}`} + }}, + + `vpc`: &lyra.Resource{ + Return: struct{ VpcId string }{}, + State: func(input struct{ Tags map[string]string }) *aws.Vpc { + return &aws.Vpc{ + CidrBlock: `192.168.0.0/16`, + Tags: &input.Tags} + }}, + + `routeTable`: &lyra.Resource{ + State: func(input struct{ VpcId string }) *aws.RouteTable { + return &aws.RouteTable{ + VpcId: input.VpcId, + Tags: &map[string]string{ + `Name`: `lyra-routetable`, + `created_by`: `lyra`}} + }}, + + `securityGroup`: &lyra.Resource{ + State: func(input struct{ VpcId string }) *aws.SecurityGroup { + return &aws.SecurityGroup{ + Name: lyra.StringPtr(`lyra`), + VpcId: &input.VpcId, + Ingress: &[]aws.GroupEgressIngress{ + { + FromPort: 0, + ToPort: 0, + Protocol: `-1`, + CidrBlocks: &[]string{`0.0.0.0/0`}, + }}, + Egress: &[]aws.GroupEgressIngress{ + { + FromPort: 0, + ToPort: 0, + Protocol: `-1`, + CidrBlocks: &[]string{`0.0.0.0/0`}}}} + }}, + + // The 'subnets' workflow runs 2 times and creates a Subnet with an Instance + `subnets`: &lyra.Collect{ + Times: 2, + As: `Index`, + Return: `SubnetIds`, + + Step: &lyra.Workflow{ + Parameters: struct { + VpcId string // Import VpcId from parent workflow + Index int // Index is the collector variable + }{}, + Return: struct{ SubnetId string }{}, + Steps: map[string]lyra.Step{ + `subnet`: &lyra.Resource{ + Return: struct{ SubnetId string }{}, + State: func(input struct { + VpcId string + Index int + }) *aws.Subnet { + return &aws.Subnet{ + VpcId: input.VpcId, + CidrBlock: fmt.Sprintf(`192.168.%d.0/24`, input.Index), + Tags: &map[string]string{ + `Name`: fmt.Sprintf(`lyra-subnet-%d`, input.Index), + `created_by`: `lyra`}} + }}, + + `instance`: &lyra.Resource{ + State: func(input struct { + SubnetId string + Index int + }) *aws.Instance { + return &aws.Instance{ + InstanceType: `t2.nano`, + Ami: `ami-f90a4880`, + SubnetId: &input.SubnetId, + Tags: &map[string]string{ + `Name`: fmt.Sprintf(`lyra-instance-%d`, input.Index), + `created_by`: `lyra`}} + }}, + }}, + }}, + }) +} diff --git a/vendor/github.com/lyraproj/lyra/examples/go-samples/declarative/declarative.go b/vendor/github.com/lyraproj/lyra/examples/go-samples/declarative/declarative.go new file mode 100644 index 0000000..43b7e65 --- /dev/null +++ b/vendor/github.com/lyraproj/lyra/examples/go-samples/declarative/declarative.go @@ -0,0 +1,69 @@ +package main + +import ( + "github.com/hashicorp/go-hclog" + "github.com/lyraproj/lyra/examples/go-samples/types/foobernetes" + "github.com/lyraproj/pcore/px" + "github.com/lyraproj/servicesdk/lang/go/lyra" + "github.com/mgutz/ansi" +) + +func main() { + // Workflow input is from Hiera and a constant (declared by annotations in the In struct). + lyra.Serve(`declarative_go`, func(c px.Context) { foobernetes.InitTypes(c) }, &lyra.Workflow{ + Steps: map[string]lyra.Step{ + // The Resources here uses anonymous structs to describe input and output. They can of + // course also use named structs. + + // Order is determined by input/output dependencies + `webserver`: &lyra.Resource{ + Return: struct{ WebServerID string }{}, + State: func(input struct{ AppServerID string }) *foobernetes.WebServer { + return &foobernetes.WebServer{ + Port: 8080, + AppServers: []string{input.AppServerID}} + }}, + + `appserver`: &lyra.Resource{ + Return: struct { + AppServerID string `alias:"InstanceID"` + }{}, + State: func(input struct{ DatabaseID string }) *foobernetes.Instance { + return &foobernetes.Instance{ + Location: lyra.StringPtr(`eu1`), + Image: `lyra::application`, + Config: &map[string]string{ + `name`: `appserver1xxx`, + `databaseID`: input.DatabaseID}, + Cpus: 4, + Memory: `8G`} + }}, + + `database`: &lyra.Resource{ + Return: struct { + DatabaseID string `alias:"InstanceID"` + }{}, + State: func() *foobernetes.Instance { + return &foobernetes.Instance{ + Location: lyra.StringPtr(`eu1`), + Image: `lyra::database`, + Cpus: 16, + Memory: `64G`} + }}, + + `notifyAppServer`: &lyra.Action{ + Do: func(input struct{ AppServerID string }) { + hclog.Default().Info(ansi.Green+"The AppServer is now created!"+ansi.Reset, "ID", input.AppServerID) + }}, + + `notifyDatabase`: &lyra.Action{ + Do: func(input struct{ DatabaseID string }) { + hclog.Default().Info(ansi.Green+"The Database is now created!"+ansi.Reset, "ID", input.DatabaseID) + }}, + + `notifyWebServer`: &lyra.Action{ + Do: func(input struct{ WebServerID string }) { + hclog.Default().Info(ansi.Green+"The WebServer is now created!"+ansi.Reset, "ID", input.WebServerID) + }}, + }}) +} diff --git a/vendor/github.com/lyraproj/lyra/examples/go-samples/imperative/imperative.go b/vendor/github.com/lyraproj/lyra/examples/go-samples/imperative/imperative.go new file mode 100644 index 0000000..94a61f2 --- /dev/null +++ b/vendor/github.com/lyraproj/lyra/examples/go-samples/imperative/imperative.go @@ -0,0 +1,62 @@ +package main + +import ( + "fmt" + "strconv" + "strings" + + "github.com/hashicorp/go-hclog" + "github.com/lyraproj/servicesdk/lang/go/lyra" +) + +func main() { + type out struct { + F string + } + + // Workflow input is from Hiera and a constant (declared by annotations in the In struct). + lyra.Serve(`imperative_go`, nil, &lyra.Workflow{ + Parameters: struct { + A string `lookup:"golang.hello"` + B int `value:"5"` + }{}, + Return: out{}, + Steps: map[string]lyra.Step{ + + // The Actions here uses anonymous structs to describe input and output. They can of + // course also use named structs. + + // First and second will execute in parallel + `first`: &lyra.Collect{ + // The step will be executed five times in parallel and the result will be collected + // into an output variable named 'C' which is a slice with five elements of the output from + // the producer. + Times: 5, + As: struct{ Idx int }{}, + Return: `C`, + + Step: &lyra.Action{ + Do: func(input struct { + A string + Idx int + }) struct{ C string } { + hclog.Default().Info("first", "A", input.A, "Idx", input.Idx) + return struct{ C string }{input.A + ` #` + strconv.Itoa(input.Idx)} + }}}, + + `second`: &lyra.Action{ + Do: func(input struct{ B int }) struct{ D int } { + hclog.Default().Info("second", "B", input.B) + return struct{ D int }{input.B} + }}, + + // Third takes input from first and second + `third`: &lyra.Action{ + Do: func(input struct { + C []string + D int + }) out { + hclog.Default().Info("third", "C", input.C, "D", input.D) + return out{fmt.Sprintf("%s - %d", strings.Join(input.C, `, `), input.D)} + }}}}) +} diff --git a/vendor/github.com/lyraproj/lyra/examples/go-samples/referee/referee.go b/vendor/github.com/lyraproj/lyra/examples/go-samples/referee/referee.go new file mode 100644 index 0000000..8dfb9b3 --- /dev/null +++ b/vendor/github.com/lyraproj/lyra/examples/go-samples/referee/referee.go @@ -0,0 +1,31 @@ +package main + +import ( + "github.com/hashicorp/go-hclog" + "github.com/lyraproj/servicesdk/lang/go/lyra" +) + +func main() { + lyra.Serve(`referee_go`, nil, &lyra.Workflow{ + Steps: map[string]lyra.Step{ + `prolog`: &lyra.Action{ + Do: func() struct{ A string } { + hclog.Default().Info(`prolog`) + return struct{ A string }{`value from prolog`} + }}, + + `referenced`: &lyra.Reference{ + Parameters: struct { + A string `alias:"P"` + }{}, + Return: struct { + B string `alias:"X"` + }{}, + StepName: `referenced_go`}, + + `epilog`: &lyra.Action{ + Do: func(input struct{ B string }) { + hclog.Default().Info(`epilog`, `B`, input.B) + }, + }}}) +} diff --git a/vendor/github.com/lyraproj/lyra/examples/go-samples/referenced/referenced.go b/vendor/github.com/lyraproj/lyra/examples/go-samples/referenced/referenced.go new file mode 100644 index 0000000..a9b6295 --- /dev/null +++ b/vendor/github.com/lyraproj/lyra/examples/go-samples/referenced/referenced.go @@ -0,0 +1,16 @@ +package main + +import ( + "fmt" + + "github.com/hashicorp/go-hclog" + "github.com/lyraproj/servicesdk/lang/go/lyra" +) + +func main() { + lyra.Serve(`referenced_go`, nil, &lyra.Action{ + Do: func(input struct{ P string }) struct{ X string } { + hclog.Default().Info(`referenced`, `P`, input.P) + return struct{ X string }{fmt.Sprintf(`value from referenced + %s`, input.P)} + }}) +} diff --git a/vendor/github.com/lyraproj/lyra/examples/go-samples/types/aws/aws.go b/vendor/github.com/lyraproj/lyra/examples/go-samples/types/aws/aws.go new file mode 100644 index 0000000..db81044 --- /dev/null +++ b/vendor/github.com/lyraproj/lyra/examples/go-samples/types/aws/aws.go @@ -0,0 +1,8865 @@ +// this file is generated +package aws + +import ( + "fmt" + "reflect" + "time" + + "github.com/lyraproj/pcore/px" +) + +type AcmCertificate struct { + AcmCertificateId *string `puppet:"name=>'acm_certificate_id'"` + Arn *string + CertificateBody *string `puppet:"name=>'certificate_body'"` + CertificateChain *string `puppet:"name=>'certificate_chain'"` + DomainName *string `puppet:"name=>'domain_name'"` + DomainValidationOptions *[]ValidationOptions `puppet:"name=>'domain_validation_options'"` + PrivateKey *string `puppet:"name=>'private_key'"` + SubjectAlternativeNames *[]string `puppet:"name=>'subject_alternative_names'"` + Tags *map[string]string + ValidationEmails *[]string `puppet:"name=>'validation_emails'"` + ValidationMethod *string `puppet:"name=>'validation_method'"` +} + +type AcmCertificateValidation struct { + CertificateArn string `puppet:"name=>'certificate_arn'"` + AcmCertificateValidationId *string `puppet:"name=>'acm_certificate_validation_id'"` + ValidationRecordFqdns *[]string `puppet:"name=>'validation_record_fqdns'"` +} + +type AcmpcaCertificateAuthority struct { + AcmpcaCertificateAuthorityId *string `puppet:"name=>'acmpca_certificate_authority_id'"` + Arn *string + Certificate *string + CertificateAuthorityConfiguration *AuthorityConfiguration `puppet:"name=>'certificate_authority_configuration'"` + CertificateChain *string `puppet:"name=>'certificate_chain'"` + CertificateSigningRequest *string `puppet:"name=>'certificate_signing_request'"` + Enabled *bool + NotAfter *string `puppet:"name=>'not_after'"` + NotBefore *string `puppet:"name=>'not_before'"` + RevocationConfiguration *RevocationConfiguration `puppet:"name=>'revocation_configuration'"` + Serial *string + Status *string + Tags *map[string]string + Type *string +} + +type Alb struct { + AlbId *string `puppet:"name=>'alb_id'"` + AccessLogs *Logs `puppet:"name=>'access_logs'"` + Arn *string + ArnSuffix *string `puppet:"name=>'arn_suffix'"` + DnsName *string `puppet:"name=>'dns_name'"` + EnableCrossZoneLoadBalancing *bool `puppet:"name=>'enable_cross_zone_load_balancing'"` + EnableDeletionProtection *bool `puppet:"name=>'enable_deletion_protection'"` + EnableHttp2 *bool `puppet:"name=>'enable_http2'"` + IdleTimeout *int64 `puppet:"name=>'idle_timeout'"` + Internal *bool + IpAddressType *string `puppet:"name=>'ip_address_type'"` + LoadBalancerType *string `puppet:"name=>'load_balancer_type'"` + Name *string + NamePrefix *string `puppet:"name=>'name_prefix'"` + SecurityGroups *[]string `puppet:"name=>'security_groups'"` + SubnetMapping *[]Mapping `puppet:"name=>'subnet_mapping'"` + Subnets *[]string + Tags *map[string]string + VpcId *string `puppet:"name=>'vpc_id'"` + ZoneId *string `puppet:"name=>'zone_id'"` +} + +type AlbListener struct { + DefaultAction []DefaultAction `puppet:"name=>'default_action'"` + LoadBalancerArn string `puppet:"name=>'load_balancer_arn'"` + Port int64 + AlbListenerId *string `puppet:"name=>'alb_listener_id'"` + Arn *string + CertificateArn *string `puppet:"name=>'certificate_arn'"` + Protocol *string + SslPolicy *string `puppet:"name=>'ssl_policy'"` +} + +type AlbListenerCertificate struct { + CertificateArn string `puppet:"name=>'certificate_arn'"` + ListenerArn string `puppet:"name=>'listener_arn'"` + AlbListenerCertificateId *string `puppet:"name=>'alb_listener_certificate_id'"` +} + +type AlbListenerRule struct { + Action []DefaultAction + Condition []Condition + ListenerArn string `puppet:"name=>'listener_arn'"` + AlbListenerRuleId *string `puppet:"name=>'alb_listener_rule_id'"` + Arn *string + Priority *int64 +} + +type AlbTargetGroup struct { + AlbTargetGroupId *string `puppet:"name=>'alb_target_group_id'"` + Arn *string + ArnSuffix *string `puppet:"name=>'arn_suffix'"` + DeregistrationDelay *int64 `puppet:"name=>'deregistration_delay'"` + HealthCheck *Check `puppet:"name=>'health_check'"` + Name *string + NamePrefix *string `puppet:"name=>'name_prefix'"` + Port *int64 + Protocol *string + ProxyProtocolV2 *bool `puppet:"name=>'proxy_protocol_v2'"` + SlowStart *int64 `puppet:"name=>'slow_start'"` + Stickiness *Stickiness + Tags *map[string]string + TargetType *string `puppet:"name=>'target_type'"` + VpcId *string `puppet:"name=>'vpc_id'"` +} + +type AlbTargetGroupAttachment struct { + TargetGroupArn string `puppet:"name=>'target_group_arn'"` + TargetId string `puppet:"name=>'target_id'"` + AlbTargetGroupAttachmentId *string `puppet:"name=>'alb_target_group_attachment_id'"` + AvailabilityZone *string `puppet:"name=>'availability_zone'"` + Port *int64 +} + +type Ami struct { + Name string + AmiId *string `puppet:"name=>'ami_id'"` + Architecture *string + Description *string + EbsBlockDevice *[]AmiEbsBlockDevice `puppet:"name=>'ebs_block_device'"` + EnaSupport *bool `puppet:"name=>'ena_support'"` + EphemeralBlockDevice *[]Device `puppet:"name=>'ephemeral_block_device'"` + ImageLocation *string `puppet:"name=>'image_location'"` + KernelId *string `puppet:"name=>'kernel_id'"` + ManageEbsSnapshots *bool `puppet:"name=>'manage_ebs_snapshots'"` + RamdiskId *string `puppet:"name=>'ramdisk_id'"` + RootDeviceName *string `puppet:"name=>'root_device_name'"` + RootSnapshotId *string `puppet:"name=>'root_snapshot_id'"` + SriovNetSupport *string `puppet:"name=>'sriov_net_support'"` + Tags *map[string]string + VirtualizationType *string `puppet:"name=>'virtualization_type'"` +} + +type AmiCopy struct { + Name string + SourceAmiId string `puppet:"name=>'source_ami_id'"` + SourceAmiRegion string `puppet:"name=>'source_ami_region'"` + AmiCopyId *string `puppet:"name=>'ami_copy_id'"` + Architecture *string + Description *string + EbsBlockDevice *[]EbsBlockDevice `puppet:"name=>'ebs_block_device'"` + EnaSupport *bool `puppet:"name=>'ena_support'"` + Encrypted *bool + EphemeralBlockDevice *[]EphemeralBlockDevice `puppet:"name=>'ephemeral_block_device'"` + ImageLocation *string `puppet:"name=>'image_location'"` + KernelId *string `puppet:"name=>'kernel_id'"` + KmsKeyId *string `puppet:"name=>'kms_key_id'"` + ManageEbsSnapshots *bool `puppet:"name=>'manage_ebs_snapshots'"` + RamdiskId *string `puppet:"name=>'ramdisk_id'"` + RootDeviceName *string `puppet:"name=>'root_device_name'"` + RootSnapshotId *string `puppet:"name=>'root_snapshot_id'"` + SriovNetSupport *string `puppet:"name=>'sriov_net_support'"` + Tags *map[string]string + VirtualizationType *string `puppet:"name=>'virtualization_type'"` +} + +type AmiFromInstance struct { + Name string + SourceInstanceId string `puppet:"name=>'source_instance_id'"` + AmiFromInstanceId *string `puppet:"name=>'ami_from_instance_id'"` + Architecture *string + Description *string + EbsBlockDevice *[]EbsBlockDevice `puppet:"name=>'ebs_block_device'"` + EnaSupport *bool `puppet:"name=>'ena_support'"` + EphemeralBlockDevice *[]EphemeralBlockDevice `puppet:"name=>'ephemeral_block_device'"` + ImageLocation *string `puppet:"name=>'image_location'"` + KernelId *string `puppet:"name=>'kernel_id'"` + ManageEbsSnapshots *bool `puppet:"name=>'manage_ebs_snapshots'"` + RamdiskId *string `puppet:"name=>'ramdisk_id'"` + RootDeviceName *string `puppet:"name=>'root_device_name'"` + RootSnapshotId *string `puppet:"name=>'root_snapshot_id'"` + SnapshotWithoutReboot *bool `puppet:"name=>'snapshot_without_reboot'"` + SriovNetSupport *string `puppet:"name=>'sriov_net_support'"` + Tags *map[string]string + VirtualizationType *string `puppet:"name=>'virtualization_type'"` +} + +type AmiLaunchPermission struct { + AccountId string `puppet:"name=>'account_id'"` + ImageId string `puppet:"name=>'image_id'"` + AmiLaunchPermissionId *string `puppet:"name=>'ami_launch_permission_id'"` +} + +type ApiGatewayAccount struct { + ApiGatewayAccountId *string `puppet:"name=>'api_gateway_account_id'"` + CloudwatchRoleArn *string `puppet:"name=>'cloudwatch_role_arn'"` + ThrottleSettings *Settings `puppet:"name=>'throttle_settings'"` +} + +type ApiGatewayApiKey struct { + Name string + ApiGatewayApiKeyId *string `puppet:"name=>'api_gateway_api_key_id'"` + CreatedDate *string `puppet:"name=>'created_date'"` + Description *string + Enabled *bool + LastUpdatedDate *string `puppet:"name=>'last_updated_date'"` + Value *string +} + +type ApiGatewayAuthorizer struct { + Name string + RestApiId string `puppet:"name=>'rest_api_id'"` + ApiGatewayAuthorizerId *string `puppet:"name=>'api_gateway_authorizer_id'"` + AuthorizerCredentials *string `puppet:"name=>'authorizer_credentials'"` + AuthorizerResultTtlInSeconds *int64 `puppet:"name=>'authorizer_result_ttl_in_seconds'"` + AuthorizerUri *string `puppet:"name=>'authorizer_uri'"` + IdentitySource *string `puppet:"name=>'identity_source'"` + IdentityValidationExpression *string `puppet:"name=>'identity_validation_expression'"` + ProviderArns *[]string `puppet:"name=>'provider_arns'"` + Type *string +} + +type ApiGatewayBasePathMapping struct { + ApiId string `puppet:"name=>'api_id'"` + DomainName string `puppet:"name=>'domain_name'"` + ApiGatewayBasePathMappingId *string `puppet:"name=>'api_gateway_base_path_mapping_id'"` + BasePath *string `puppet:"name=>'base_path'"` + StageName *string `puppet:"name=>'stage_name'"` +} + +type ApiGatewayClientCertificate struct { + ApiGatewayClientCertificateId *string `puppet:"name=>'api_gateway_client_certificate_id'"` + CreatedDate *string `puppet:"name=>'created_date'"` + Description *string + ExpirationDate *string `puppet:"name=>'expiration_date'"` + PemEncodedCertificate *string `puppet:"name=>'pem_encoded_certificate'"` +} + +type ApiGatewayDeployment struct { + RestApiId string `puppet:"name=>'rest_api_id'"` + StageName string `puppet:"name=>'stage_name'"` + ApiGatewayDeploymentId *string `puppet:"name=>'api_gateway_deployment_id'"` + CreatedDate *string `puppet:"name=>'created_date'"` + Description *string + ExecutionArn *string `puppet:"name=>'execution_arn'"` + InvokeUrl *string `puppet:"name=>'invoke_url'"` + StageDescription *string `puppet:"name=>'stage_description'"` + Variables *map[string]string +} + +type ApiGatewayDocumentationPart struct { + Properties string + RestApiId string `puppet:"name=>'rest_api_id'"` + ApiGatewayDocumentationPartId *string `puppet:"name=>'api_gateway_documentation_part_id'"` + Location *Location +} + +type ApiGatewayDocumentationVersion struct { + RestApiId string `puppet:"name=>'rest_api_id'"` + Version string + ApiGatewayDocumentationVersionId *string `puppet:"name=>'api_gateway_documentation_version_id'"` + Description *string +} + +type ApiGatewayDomainName struct { + DomainName string `puppet:"name=>'domain_name'"` + ApiGatewayDomainNameId *string `puppet:"name=>'api_gateway_domain_name_id'"` + CertificateArn *string `puppet:"name=>'certificate_arn'"` + CertificateBody *string `puppet:"name=>'certificate_body'"` + CertificateChain *string `puppet:"name=>'certificate_chain'"` + CertificateName *string `puppet:"name=>'certificate_name'"` + CertificatePrivateKey *string `puppet:"name=>'certificate_private_key'"` + CertificateUploadDate *string `puppet:"name=>'certificate_upload_date'"` + CloudfrontDomainName *string `puppet:"name=>'cloudfront_domain_name'"` + CloudfrontZoneId *string `puppet:"name=>'cloudfront_zone_id'"` + EndpointConfiguration *EndpointConfiguration `puppet:"name=>'endpoint_configuration'"` + RegionalCertificateArn *string `puppet:"name=>'regional_certificate_arn'"` + RegionalCertificateName *string `puppet:"name=>'regional_certificate_name'"` + RegionalDomainName *string `puppet:"name=>'regional_domain_name'"` + RegionalZoneId *string `puppet:"name=>'regional_zone_id'"` +} + +type ApiGatewayGatewayResponse struct { + ResponseType string `puppet:"name=>'response_type'"` + RestApiId string `puppet:"name=>'rest_api_id'"` + ApiGatewayGatewayResponseId *string `puppet:"name=>'api_gateway_gateway_response_id'"` + ResponseParameters *map[string]string `puppet:"name=>'response_parameters'"` + ResponseTemplates *map[string]string `puppet:"name=>'response_templates'"` + StatusCode *string `puppet:"name=>'status_code'"` +} + +type ApiGatewayIntegration struct { + HttpMethod string `puppet:"name=>'http_method'"` + ResourceId string `puppet:"name=>'resource_id'"` + RestApiId string `puppet:"name=>'rest_api_id'"` + Type string + ApiGatewayIntegrationId *string `puppet:"name=>'api_gateway_integration_id'"` + CacheKeyParameters *[]string `puppet:"name=>'cache_key_parameters'"` + CacheNamespace *string `puppet:"name=>'cache_namespace'"` + ConnectionId *string `puppet:"name=>'connection_id'"` + ConnectionType *string `puppet:"name=>'connection_type'"` + ContentHandling *string `puppet:"name=>'content_handling'"` + Credentials *string + IntegrationHttpMethod *string `puppet:"name=>'integration_http_method'"` + PassthroughBehavior *string `puppet:"name=>'passthrough_behavior'"` + RequestParameters *map[string]string `puppet:"name=>'request_parameters'"` + RequestTemplates *map[string]string `puppet:"name=>'request_templates'"` + TimeoutMilliseconds *int64 `puppet:"name=>'timeout_milliseconds'"` + Uri *string +} + +type ApiGatewayIntegrationResponse struct { + HttpMethod string `puppet:"name=>'http_method'"` + ResourceId string `puppet:"name=>'resource_id'"` + RestApiId string `puppet:"name=>'rest_api_id'"` + StatusCode string `puppet:"name=>'status_code'"` + ApiGatewayIntegrationResponseId *string `puppet:"name=>'api_gateway_integration_response_id'"` + ContentHandling *string `puppet:"name=>'content_handling'"` + ResponseParameters *map[string]string `puppet:"name=>'response_parameters'"` + ResponseTemplates *map[string]string `puppet:"name=>'response_templates'"` + SelectionPattern *string `puppet:"name=>'selection_pattern'"` +} + +type ApiGatewayMethod struct { + Authorization string + HttpMethod string `puppet:"name=>'http_method'"` + ResourceId string `puppet:"name=>'resource_id'"` + RestApiId string `puppet:"name=>'rest_api_id'"` + ApiGatewayMethodId *string `puppet:"name=>'api_gateway_method_id'"` + ApiKeyRequired *bool `puppet:"name=>'api_key_required'"` + AuthorizationScopes *[]string `puppet:"name=>'authorization_scopes'"` + AuthorizerId *string `puppet:"name=>'authorizer_id'"` + RequestModels *map[string]string `puppet:"name=>'request_models'"` + RequestParameters *map[string]bool `puppet:"name=>'request_parameters'"` + RequestValidatorId *string `puppet:"name=>'request_validator_id'"` +} + +type ApiGatewayMethodResponse struct { + HttpMethod string `puppet:"name=>'http_method'"` + ResourceId string `puppet:"name=>'resource_id'"` + RestApiId string `puppet:"name=>'rest_api_id'"` + StatusCode string `puppet:"name=>'status_code'"` + ApiGatewayMethodResponseId *string `puppet:"name=>'api_gateway_method_response_id'"` + ResponseModels *map[string]string `puppet:"name=>'response_models'"` + ResponseParameters *map[string]bool `puppet:"name=>'response_parameters'"` +} + +type ApiGatewayMethodSettings struct { + MethodPath string `puppet:"name=>'method_path'"` + RestApiId string `puppet:"name=>'rest_api_id'"` + StageName string `puppet:"name=>'stage_name'"` + ApiGatewayMethodSettingsId *string `puppet:"name=>'api_gateway_method_settings_id'"` + Settings *SettingsSettings +} + +type ApiGatewayModel struct { + ContentType string `puppet:"name=>'content_type'"` + Name string + RestApiId string `puppet:"name=>'rest_api_id'"` + ApiGatewayModelId *string `puppet:"name=>'api_gateway_model_id'"` + Description *string + Schema *string +} + +type ApiGatewayRequestValidator struct { + Name string + RestApiId string `puppet:"name=>'rest_api_id'"` + ApiGatewayRequestValidatorId *string `puppet:"name=>'api_gateway_request_validator_id'"` + ValidateRequestBody *bool `puppet:"name=>'validate_request_body'"` + ValidateRequestParameters *bool `puppet:"name=>'validate_request_parameters'"` +} + +type ApiGatewayResource struct { + ParentId string `puppet:"name=>'parent_id'"` + PathPart string `puppet:"name=>'path_part'"` + RestApiId string `puppet:"name=>'rest_api_id'"` + ApiGatewayResourceId *string `puppet:"name=>'api_gateway_resource_id'"` + Path *string +} + +type ApiGatewayRestApi struct { + Name string + ApiGatewayRestApiId *string `puppet:"name=>'api_gateway_rest_api_id'"` + ApiKeySource *string `puppet:"name=>'api_key_source'"` + BinaryMediaTypes *[]string `puppet:"name=>'binary_media_types'"` + Body *string + CreatedDate *string `puppet:"name=>'created_date'"` + Description *string + EndpointConfiguration *EndpointConfiguration `puppet:"name=>'endpoint_configuration'"` + ExecutionArn *string `puppet:"name=>'execution_arn'"` + MinimumCompressionSize *int64 `puppet:"name=>'minimum_compression_size'"` + Policy *string + RootResourceId *string `puppet:"name=>'root_resource_id'"` +} + +type ApiGatewayStage struct { + DeploymentId string `puppet:"name=>'deployment_id'"` + RestApiId string `puppet:"name=>'rest_api_id'"` + StageName string `puppet:"name=>'stage_name'"` + ApiGatewayStageId *string `puppet:"name=>'api_gateway_stage_id'"` + AccessLogSettings *LogSettings `puppet:"name=>'access_log_settings'"` + CacheClusterEnabled *bool `puppet:"name=>'cache_cluster_enabled'"` + CacheClusterSize *string `puppet:"name=>'cache_cluster_size'"` + ClientCertificateId *string `puppet:"name=>'client_certificate_id'"` + Description *string + DocumentationVersion *string `puppet:"name=>'documentation_version'"` + ExecutionArn *string `puppet:"name=>'execution_arn'"` + InvokeUrl *string `puppet:"name=>'invoke_url'"` + Tags *map[string]string + Variables *map[string]string + XrayTracingEnabled *bool `puppet:"name=>'xray_tracing_enabled'"` +} + +type ApiGatewayUsagePlan struct { + Name string + ApiGatewayUsagePlanId *string `puppet:"name=>'api_gateway_usage_plan_id'"` + ApiStages *[]Stages `puppet:"name=>'api_stages'"` + Description *string + ProductCode *string `puppet:"name=>'product_code'"` + QuotaSettings *QuotaSettings `puppet:"name=>'quota_settings'"` + ThrottleSettings *ThrottleSettings `puppet:"name=>'throttle_settings'"` +} + +type ApiGatewayUsagePlanKey struct { + KeyId string `puppet:"name=>'key_id'"` + KeyType string `puppet:"name=>'key_type'"` + UsagePlanId string `puppet:"name=>'usage_plan_id'"` + ApiGatewayUsagePlanKeyId *string `puppet:"name=>'api_gateway_usage_plan_key_id'"` + Name *string + Value *string +} + +type ApiGatewayVpcLink struct { + Name string + TargetArns []string `puppet:"name=>'target_arns'"` + ApiGatewayVpcLinkId *string `puppet:"name=>'api_gateway_vpc_link_id'"` + Description *string +} + +type AppCookieStickinessPolicy struct { + CookieName string `puppet:"name=>'cookie_name'"` + LbPort int64 `puppet:"name=>'lb_port'"` + LoadBalancer string `puppet:"name=>'load_balancer'"` + Name string + AppCookieStickinessPolicyId *string `puppet:"name=>'app_cookie_stickiness_policy_id'"` +} + +type AppautoscalingPolicy struct { + Name string + ResourceId string `puppet:"name=>'resource_id'"` + ScalableDimension string `puppet:"name=>'scalable_dimension'"` + ServiceNamespace string `puppet:"name=>'service_namespace'"` + AppautoscalingPolicyId *string `puppet:"name=>'appautoscaling_policy_id'"` + Alarms *[]string + Arn *string + PolicyType *string `puppet:"name=>'policy_type'"` + StepScalingPolicyConfiguration *[]PolicyConfiguration `puppet:"name=>'step_scaling_policy_configuration'"` + TargetTrackingScalingPolicyConfiguration *ScalingPolicyConfiguration `puppet:"name=>'target_tracking_scaling_policy_configuration'"` +} + +type AppautoscalingScheduledAction struct { + Name string + ResourceId string `puppet:"name=>'resource_id'"` + ServiceNamespace string `puppet:"name=>'service_namespace'"` + AppautoscalingScheduledActionId *string `puppet:"name=>'appautoscaling_scheduled_action_id'"` + Arn *string + EndTime *string `puppet:"name=>'end_time'"` + ScalableDimension *string `puppet:"name=>'scalable_dimension'"` + ScalableTargetAction *TargetAction `puppet:"name=>'scalable_target_action'"` + Schedule *string + StartTime *string `puppet:"name=>'start_time'"` +} + +type AppautoscalingTarget struct { + MaxCapacity int64 `puppet:"name=>'max_capacity'"` + MinCapacity int64 `puppet:"name=>'min_capacity'"` + ResourceId string `puppet:"name=>'resource_id'"` + ScalableDimension string `puppet:"name=>'scalable_dimension'"` + ServiceNamespace string `puppet:"name=>'service_namespace'"` + AppautoscalingTargetId *string `puppet:"name=>'appautoscaling_target_id'"` + RoleArn *string `puppet:"name=>'role_arn'"` +} + +type AppmeshMesh struct { + Name string + AppmeshMeshId *string `puppet:"name=>'appmesh_mesh_id'"` + Arn *string + CreatedDate *string `puppet:"name=>'created_date'"` + LastUpdatedDate *string `puppet:"name=>'last_updated_date'"` +} + +type AppmeshRoute struct { + MeshName string `puppet:"name=>'mesh_name'"` + Name string + Spec Spec + VirtualRouterName string `puppet:"name=>'virtual_router_name'"` + AppmeshRouteId *string `puppet:"name=>'appmesh_route_id'"` + Arn *string + CreatedDate *string `puppet:"name=>'created_date'"` + LastUpdatedDate *string `puppet:"name=>'last_updated_date'"` +} + +type AppmeshVirtualNode struct { + MeshName string `puppet:"name=>'mesh_name'"` + Name string + Spec NodeSpec + AppmeshVirtualNodeId *string `puppet:"name=>'appmesh_virtual_node_id'"` + Arn *string + CreatedDate *string `puppet:"name=>'created_date'"` + LastUpdatedDate *string `puppet:"name=>'last_updated_date'"` +} + +type AppmeshVirtualRouter struct { + MeshName string `puppet:"name=>'mesh_name'"` + Name string + Spec RouterSpec + AppmeshVirtualRouterId *string `puppet:"name=>'appmesh_virtual_router_id'"` + Arn *string + CreatedDate *string `puppet:"name=>'created_date'"` + LastUpdatedDate *string `puppet:"name=>'last_updated_date'"` +} + +type AppsyncApiKey struct { + ApiId string `puppet:"name=>'api_id'"` + AppsyncApiKeyId *string `puppet:"name=>'appsync_api_key_id'"` + Description *string + Expires *time.Time + Key *string +} + +type AppsyncDatasource struct { + ApiId string `puppet:"name=>'api_id'"` + Name string + Type string + AppsyncDatasourceId *string `puppet:"name=>'appsync_datasource_id'"` + Arn *string + Description *string + DynamodbConfig *DynamodbConfig `puppet:"name=>'dynamodb_config'"` + ElasticsearchConfig *ElasticsearchConfig `puppet:"name=>'elasticsearch_config'"` + HttpConfig *HttpConfig `puppet:"name=>'http_config'"` + LambdaConfig *ConfigLambda `puppet:"name=>'lambda_config'"` + ServiceRoleArn *string `puppet:"name=>'service_role_arn'"` +} + +type AppsyncGraphqlApi struct { + AuthenticationType string `puppet:"name=>'authentication_type'"` + Name string + AppsyncGraphqlApiId *string `puppet:"name=>'appsync_graphql_api_id'"` + Arn *string + LogConfig *LogConfig `puppet:"name=>'log_config'"` + OpenidConnectConfig *ConnectConfig `puppet:"name=>'openid_connect_config'"` + Uris *map[string]string + UserPoolConfig *PoolConfig `puppet:"name=>'user_pool_config'"` +} + +type AthenaDatabase struct { + Bucket string + Name string + AthenaDatabaseId *string `puppet:"name=>'athena_database_id'"` + EncryptionConfiguration *EncryptionConfiguration `puppet:"name=>'encryption_configuration'"` + ForceDestroy *bool `puppet:"name=>'force_destroy'"` +} + +type AthenaNamedQuery struct { + Database string + Name string + Query string + AthenaNamedQueryId *string `puppet:"name=>'athena_named_query_id'"` + Description *string +} + +type AutoscalingAttachment struct { + AutoscalingGroupName string `puppet:"name=>'autoscaling_group_name'"` + AutoscalingAttachmentId *string `puppet:"name=>'autoscaling_attachment_id'"` + AlbTargetGroupArn *string `puppet:"name=>'alb_target_group_arn'"` + Elb *string +} + +type AutoscalingGroup struct { + MaxSize int64 `puppet:"name=>'max_size'"` + MinSize int64 `puppet:"name=>'min_size'"` + AutoscalingGroupId *string `puppet:"name=>'autoscaling_group_id'"` + Arn *string + AvailabilityZones *[]string `puppet:"name=>'availability_zones'"` + DefaultCooldown *int64 `puppet:"name=>'default_cooldown'"` + DesiredCapacity *int64 `puppet:"name=>'desired_capacity'"` + EnabledMetrics *[]string `puppet:"name=>'enabled_metrics'"` + ForceDelete *bool `puppet:"name=>'force_delete'"` + HealthCheckGracePeriod *int64 `puppet:"name=>'health_check_grace_period'"` + HealthCheckType *string `puppet:"name=>'health_check_type'"` + InitialLifecycleHook *[]Hook `puppet:"name=>'initial_lifecycle_hook'"` + LaunchConfiguration *string `puppet:"name=>'launch_configuration'"` + LaunchTemplate *Template `puppet:"name=>'launch_template'"` + LoadBalancers *[]string `puppet:"name=>'load_balancers'"` + MetricsGranularity *string `puppet:"name=>'metrics_granularity'"` + MinElbCapacity *int64 `puppet:"name=>'min_elb_capacity'"` + MixedInstancesPolicy *Policy `puppet:"name=>'mixed_instances_policy'"` + Name *string + NamePrefix *string `puppet:"name=>'name_prefix'"` + PlacementGroup *string `puppet:"name=>'placement_group'"` + ProtectFromScaleIn *bool `puppet:"name=>'protect_from_scale_in'"` + ServiceLinkedRoleArn *string `puppet:"name=>'service_linked_role_arn'"` + SuspendedProcesses *[]string `puppet:"name=>'suspended_processes'"` + Tag *[]Tag + Tags *[]map[string]string + TargetGroupArns *[]string `puppet:"name=>'target_group_arns'"` + TerminationPolicies *[]string `puppet:"name=>'termination_policies'"` + VpcZoneIdentifier *[]string `puppet:"name=>'vpc_zone_identifier'"` + WaitForCapacityTimeout *string `puppet:"name=>'wait_for_capacity_timeout'"` + WaitForElbCapacity *int64 `puppet:"name=>'wait_for_elb_capacity'"` +} + +type AutoscalingLifecycleHook struct { + AutoscalingGroupName string `puppet:"name=>'autoscaling_group_name'"` + LifecycleTransition string `puppet:"name=>'lifecycle_transition'"` + Name string + AutoscalingLifecycleHookId *string `puppet:"name=>'autoscaling_lifecycle_hook_id'"` + DefaultResult *string `puppet:"name=>'default_result'"` + HeartbeatTimeout *int64 `puppet:"name=>'heartbeat_timeout'"` + NotificationMetadata *string `puppet:"name=>'notification_metadata'"` + NotificationTargetArn *string `puppet:"name=>'notification_target_arn'"` + RoleArn *string `puppet:"name=>'role_arn'"` +} + +type AutoscalingNotification struct { + GroupNames []string `puppet:"name=>'group_names'"` + Notifications []string + TopicArn string `puppet:"name=>'topic_arn'"` + AutoscalingNotificationId *string `puppet:"name=>'autoscaling_notification_id'"` +} + +type AutoscalingPolicy struct { + AutoscalingGroupName string `puppet:"name=>'autoscaling_group_name'"` + Name string + AutoscalingPolicyId *string `puppet:"name=>'autoscaling_policy_id'"` + AdjustmentType *string `puppet:"name=>'adjustment_type'"` + Arn *string + Cooldown *int64 + EstimatedInstanceWarmup *int64 `puppet:"name=>'estimated_instance_warmup'"` + MetricAggregationType *string `puppet:"name=>'metric_aggregation_type'"` + MinAdjustmentMagnitude *int64 `puppet:"name=>'min_adjustment_magnitude'"` + PolicyType *string `puppet:"name=>'policy_type'"` + ScalingAdjustment *int64 `puppet:"name=>'scaling_adjustment'"` + StepAdjustment *[]Adjustment `puppet:"name=>'step_adjustment'"` + TargetTrackingConfiguration *TrackingConfiguration `puppet:"name=>'target_tracking_configuration'"` +} + +type AutoscalingSchedule struct { + AutoscalingGroupName string `puppet:"name=>'autoscaling_group_name'"` + ScheduledActionName string `puppet:"name=>'scheduled_action_name'"` + AutoscalingScheduleId *string `puppet:"name=>'autoscaling_schedule_id'"` + Arn *string + DesiredCapacity *int64 `puppet:"name=>'desired_capacity'"` + EndTime *string `puppet:"name=>'end_time'"` + MaxSize *int64 `puppet:"name=>'max_size'"` + MinSize *int64 `puppet:"name=>'min_size'"` + Recurrence *string + StartTime *string `puppet:"name=>'start_time'"` +} + +type BatchComputeEnvironment struct { + ComputeEnvironmentName string `puppet:"name=>'compute_environment_name'"` + ServiceRole string `puppet:"name=>'service_role'"` + Type string + BatchComputeEnvironmentId *string `puppet:"name=>'batch_compute_environment_id'"` + Arn *string + ComputeResources *Resources `puppet:"name=>'compute_resources'"` + EcsClusterArn *string `puppet:"name=>'ecs_cluster_arn'"` + State *string + Status *string + StatusReason *string `puppet:"name=>'status_reason'"` +} + +type BatchJobDefinition struct { + Name string + Type string + BatchJobDefinitionId *string `puppet:"name=>'batch_job_definition_id'"` + Arn *string + ContainerProperties *string `puppet:"name=>'container_properties'"` + Parameters *map[string]string + RetryStrategy *Strategy `puppet:"name=>'retry_strategy'"` + Revision *int64 + Timeout *Timeout +} + +type BatchJobQueue struct { + ComputeEnvironments []string `puppet:"name=>'compute_environments'"` + Name string + Priority int64 + State string + BatchJobQueueId *string `puppet:"name=>'batch_job_queue_id'"` + Arn *string +} + +type BudgetsBudget struct { + BudgetType string `puppet:"name=>'budget_type'"` + LimitAmount string `puppet:"name=>'limit_amount'"` + LimitUnit string `puppet:"name=>'limit_unit'"` + TimePeriodStart string `puppet:"name=>'time_period_start'"` + TimeUnit string `puppet:"name=>'time_unit'"` + BudgetsBudgetId *string `puppet:"name=>'budgets_budget_id'"` + AccountId *string `puppet:"name=>'account_id'"` + CostFilters *map[string]string `puppet:"name=>'cost_filters'"` + CostTypes *Types `puppet:"name=>'cost_types'"` + Name *string + NamePrefix *string `puppet:"name=>'name_prefix'"` + TimePeriodEnd *string `puppet:"name=>'time_period_end'"` +} + +type Cloud9EnvironmentEc2 struct { + InstanceType string `puppet:"name=>'instance_type'"` + Name string + Cloud9EnvironmentEc2Id *string `puppet:"name=>'cloud9_environment_ec2_id'"` + Arn *string + AutomaticStopTimeMinutes *int64 `puppet:"name=>'automatic_stop_time_minutes'"` + Description *string + OwnerArn *string `puppet:"name=>'owner_arn'"` + SubnetId *string `puppet:"name=>'subnet_id'"` + Type *string +} + +type CloudformationStack struct { + Name string + CloudformationStackId *string `puppet:"name=>'cloudformation_stack_id'"` + Capabilities *[]string + DisableRollback *bool `puppet:"name=>'disable_rollback'"` + IamRoleArn *string `puppet:"name=>'iam_role_arn'"` + NotificationArns *[]string `puppet:"name=>'notification_arns'"` + OnFailure *string `puppet:"name=>'on_failure'"` + Outputs *map[string]string + Parameters *map[string]string + PolicyBody *string `puppet:"name=>'policy_body'"` + PolicyUrl *string `puppet:"name=>'policy_url'"` + Tags *map[string]string + TemplateBody *string `puppet:"name=>'template_body'"` + TemplateUrl *string `puppet:"name=>'template_url'"` + TimeoutInMinutes *int64 `puppet:"name=>'timeout_in_minutes'"` +} + +type CloudfrontDistribution struct { + Enabled bool + Origin []Origin + CloudfrontDistributionId *string `puppet:"name=>'cloudfront_distribution_id'"` + ActiveTrustedSigners *map[string]string `puppet:"name=>'active_trusted_signers'"` + Aliases *[]string + Arn *string + CallerReference *string `puppet:"name=>'caller_reference'"` + Comment *string + CustomErrorResponse *[]ErrorResponse `puppet:"name=>'custom_error_response'"` + DefaultCacheBehavior *Behavior `puppet:"name=>'default_cache_behavior'"` + DefaultRootObject *string `puppet:"name=>'default_root_object'"` + DomainName *string `puppet:"name=>'domain_name'"` + Etag *string + HostedZoneId *string `puppet:"name=>'hosted_zone_id'"` + HttpVersion *string `puppet:"name=>'http_version'"` + InProgressValidationBatches *int64 `puppet:"name=>'in_progress_validation_batches'"` + IsIpv6Enabled *bool `puppet:"name=>'is_ipv6_enabled'"` + LastModifiedTime *string `puppet:"name=>'last_modified_time'"` + LoggingConfig *LoggingConfig `puppet:"name=>'logging_config'"` + OrderedCacheBehavior *[]CacheBehavior `puppet:"name=>'ordered_cache_behavior'"` + PriceClass *string `puppet:"name=>'price_class'"` + Restrictions *Restrictions + RetainOnDelete *bool `puppet:"name=>'retain_on_delete'"` + Status *string + Tags *map[string]string + ViewerCertificate *Certificate `puppet:"name=>'viewer_certificate'"` + WebAclId *string `puppet:"name=>'web_acl_id'"` +} + +type CloudfrontOriginAccessIdentity struct { + CloudfrontOriginAccessIdentityId *string `puppet:"name=>'cloudfront_origin_access_identity_id'"` + CallerReference *string `puppet:"name=>'caller_reference'"` + CloudfrontAccessIdentityPath *string `puppet:"name=>'cloudfront_access_identity_path'"` + Comment *string + Etag *string + IamArn *string `puppet:"name=>'iam_arn'"` + S3CanonicalUserId *string `puppet:"name=>'s3_canonical_user_id'"` +} + +type CloudfrontPublicKey struct { + EncodedKey string `puppet:"name=>'encoded_key'"` + CloudfrontPublicKeyId *string `puppet:"name=>'cloudfront_public_key_id'"` + CallerReference *string `puppet:"name=>'caller_reference'"` + Comment *string + Etag *string + Name *string + NamePrefix *string `puppet:"name=>'name_prefix'"` +} + +type CloudhsmV2Cluster struct { + HsmType string `puppet:"name=>'hsm_type'"` + SubnetIds []string `puppet:"name=>'subnet_ids'"` + CloudhsmV2ClusterId *string `puppet:"name=>'cloudhsm_v2_cluster_id'"` + ClusterCertificates *Certificates `puppet:"name=>'cluster_certificates'"` + ClusterId *string `puppet:"name=>'cluster_id'"` + ClusterState *string `puppet:"name=>'cluster_state'"` + SecurityGroupId *string `puppet:"name=>'security_group_id'"` + SourceBackupIdentifier *string `puppet:"name=>'source_backup_identifier'"` + Tags *map[string]string + VpcId *string `puppet:"name=>'vpc_id'"` +} + +type CloudhsmV2Hsm struct { + ClusterId string `puppet:"name=>'cluster_id'"` + CloudhsmV2HsmId *string `puppet:"name=>'cloudhsm_v2_hsm_id'"` + AvailabilityZone *string `puppet:"name=>'availability_zone'"` + HsmEniId *string `puppet:"name=>'hsm_eni_id'"` + HsmId *string `puppet:"name=>'hsm_id'"` + HsmState *string `puppet:"name=>'hsm_state'"` + IpAddress *string `puppet:"name=>'ip_address'"` + SubnetId *string `puppet:"name=>'subnet_id'"` +} + +type Cloudtrail struct { + Name string + S3BucketName string `puppet:"name=>'s3_bucket_name'"` + CloudtrailId *string `puppet:"name=>'cloudtrail_id'"` + Arn *string + CloudWatchLogsGroupArn *string `puppet:"name=>'cloud_watch_logs_group_arn'"` + CloudWatchLogsRoleArn *string `puppet:"name=>'cloud_watch_logs_role_arn'"` + EnableLogFileValidation *bool `puppet:"name=>'enable_log_file_validation'"` + EnableLogging *bool `puppet:"name=>'enable_logging'"` + EventSelector *[]Selector `puppet:"name=>'event_selector'"` + HomeRegion *string `puppet:"name=>'home_region'"` + IncludeGlobalServiceEvents *bool `puppet:"name=>'include_global_service_events'"` + IsMultiRegionTrail *bool `puppet:"name=>'is_multi_region_trail'"` + IsOrganizationTrail *bool `puppet:"name=>'is_organization_trail'"` + KmsKeyId *string `puppet:"name=>'kms_key_id'"` + S3KeyPrefix *string `puppet:"name=>'s3_key_prefix'"` + SnsTopicName *string `puppet:"name=>'sns_topic_name'"` + Tags *map[string]string +} + +type CloudwatchDashboard struct { + DashboardBody string `puppet:"name=>'dashboard_body'"` + DashboardName string `puppet:"name=>'dashboard_name'"` + CloudwatchDashboardId *string `puppet:"name=>'cloudwatch_dashboard_id'"` + DashboardArn *string `puppet:"name=>'dashboard_arn'"` +} + +type CloudwatchEventPermission struct { + Principal string + StatementId string `puppet:"name=>'statement_id'"` + CloudwatchEventPermissionId *string `puppet:"name=>'cloudwatch_event_permission_id'"` + Action *string + Condition *PermissionCondition +} + +type CloudwatchEventRule struct { + CloudwatchEventRuleId *string `puppet:"name=>'cloudwatch_event_rule_id'"` + Arn *string + Description *string + EventPattern *string `puppet:"name=>'event_pattern'"` + IsEnabled *bool `puppet:"name=>'is_enabled'"` + Name *string + NamePrefix *string `puppet:"name=>'name_prefix'"` + RoleArn *string `puppet:"name=>'role_arn'"` + ScheduleExpression *string `puppet:"name=>'schedule_expression'"` +} + +type CloudwatchEventTarget struct { + Arn string + Rule string + CloudwatchEventTargetId *string `puppet:"name=>'cloudwatch_event_target_id'"` + BatchTarget *BatchTarget `puppet:"name=>'batch_target'"` + EcsTarget *EcsTarget `puppet:"name=>'ecs_target'"` + Input *string + InputPath *string `puppet:"name=>'input_path'"` + InputTransformer *Transformer `puppet:"name=>'input_transformer'"` + KinesisTarget *KinesisTarget `puppet:"name=>'kinesis_target'"` + RoleArn *string `puppet:"name=>'role_arn'"` + RunCommandTargets *[]Targets `puppet:"name=>'run_command_targets'"` + SqsTarget *SqsTarget `puppet:"name=>'sqs_target'"` + TargetId *string `puppet:"name=>'target_id'"` +} + +type CloudwatchLogDestination struct { + Name string + RoleArn string `puppet:"name=>'role_arn'"` + TargetArn string `puppet:"name=>'target_arn'"` + CloudwatchLogDestinationId *string `puppet:"name=>'cloudwatch_log_destination_id'"` + Arn *string +} + +type CloudwatchLogDestinationPolicy struct { + AccessPolicy string `puppet:"name=>'access_policy'"` + DestinationName string `puppet:"name=>'destination_name'"` + CloudwatchLogDestinationPolicyId *string `puppet:"name=>'cloudwatch_log_destination_policy_id'"` +} + +type CloudwatchLogGroup struct { + CloudwatchLogGroupId *string `puppet:"name=>'cloudwatch_log_group_id'"` + Arn *string + KmsKeyId *string `puppet:"name=>'kms_key_id'"` + Name *string + NamePrefix *string `puppet:"name=>'name_prefix'"` + RetentionInDays *int64 `puppet:"name=>'retention_in_days'"` + Tags *map[string]string +} + +type CloudwatchLogMetricFilter struct { + LogGroupName string `puppet:"name=>'log_group_name'"` + Name string + Pattern string + CloudwatchLogMetricFilterId *string `puppet:"name=>'cloudwatch_log_metric_filter_id'"` + MetricTransformation *Transformation `puppet:"name=>'metric_transformation'"` +} + +type CloudwatchLogResourcePolicy struct { + PolicyDocument string `puppet:"name=>'policy_document'"` + PolicyName string `puppet:"name=>'policy_name'"` + CloudwatchLogResourcePolicyId *string `puppet:"name=>'cloudwatch_log_resource_policy_id'"` +} + +type CloudwatchLogStream struct { + LogGroupName string `puppet:"name=>'log_group_name'"` + Name string + CloudwatchLogStreamId *string `puppet:"name=>'cloudwatch_log_stream_id'"` + Arn *string +} + +type CloudwatchLogSubscriptionFilter struct { + DestinationArn string `puppet:"name=>'destination_arn'"` + FilterPattern string `puppet:"name=>'filter_pattern'"` + LogGroupName string `puppet:"name=>'log_group_name'"` + Name string + CloudwatchLogSubscriptionFilterId *string `puppet:"name=>'cloudwatch_log_subscription_filter_id'"` + Distribution *string + RoleArn *string `puppet:"name=>'role_arn'"` +} + +type CloudwatchMetricAlarm struct { + AlarmName string `puppet:"name=>'alarm_name'"` + ComparisonOperator string `puppet:"name=>'comparison_operator'"` + EvaluationPeriods int64 `puppet:"name=>'evaluation_periods'"` + MetricName string `puppet:"name=>'metric_name'"` + Namespace string + Period int64 + Threshold float64 + CloudwatchMetricAlarmId *string `puppet:"name=>'cloudwatch_metric_alarm_id'"` + ActionsEnabled *bool `puppet:"name=>'actions_enabled'"` + AlarmActions *[]string `puppet:"name=>'alarm_actions'"` + AlarmDescription *string `puppet:"name=>'alarm_description'"` + Arn *string + DatapointsToAlarm *int64 `puppet:"name=>'datapoints_to_alarm'"` + Dimensions *map[string]string + EvaluateLowSampleCountPercentiles *string `puppet:"name=>'evaluate_low_sample_count_percentiles'"` + ExtendedStatistic *string `puppet:"name=>'extended_statistic'"` + InsufficientDataActions *[]string `puppet:"name=>'insufficient_data_actions'"` + OkActions *[]string `puppet:"name=>'ok_actions'"` + Statistic *string + TreatMissingData *string `puppet:"name=>'treat_missing_data'"` + Unit *string +} + +type CodebuildProject struct { + Name string + ServiceRole string `puppet:"name=>'service_role'"` + CodebuildProjectId *string `puppet:"name=>'codebuild_project_id'"` + Arn *string + Artifacts *Artifacts + BadgeEnabled *bool `puppet:"name=>'badge_enabled'"` + BadgeUrl *string `puppet:"name=>'badge_url'"` + BuildTimeout *int64 `puppet:"name=>'build_timeout'"` + Cache *Cache + Description *string + EncryptionKey *string `puppet:"name=>'encryption_key'"` + Environment *Environment + SecondaryArtifacts *[]SecondaryArtifacts `puppet:"name=>'secondary_artifacts'"` + SecondarySources *[]Sources `puppet:"name=>'secondary_sources'"` + Source *Source + Tags *map[string]string + VpcConfig *VpcConfig `puppet:"name=>'vpc_config'"` +} + +type CodebuildWebhook struct { + ProjectName string `puppet:"name=>'project_name'"` + CodebuildWebhookId *string `puppet:"name=>'codebuild_webhook_id'"` + BranchFilter *string `puppet:"name=>'branch_filter'"` + PayloadUrl *string `puppet:"name=>'payload_url'"` + Secret *string + Url *string +} + +type CodecommitRepository struct { + RepositoryName string `puppet:"name=>'repository_name'"` + CodecommitRepositoryId *string `puppet:"name=>'codecommit_repository_id'"` + Arn *string + CloneUrlHttp *string `puppet:"name=>'clone_url_http'"` + CloneUrlSsh *string `puppet:"name=>'clone_url_ssh'"` + DefaultBranch *string `puppet:"name=>'default_branch'"` + Description *string + RepositoryId *string `puppet:"name=>'repository_id'"` +} + +type CodecommitTrigger struct { + RepositoryName string `puppet:"name=>'repository_name'"` + Trigger []Trigger + CodecommitTriggerId *string `puppet:"name=>'codecommit_trigger_id'"` + ConfigurationId *string `puppet:"name=>'configuration_id'"` +} + +type CodedeployApp struct { + Name string + CodedeployAppId *string `puppet:"name=>'codedeploy_app_id'"` + ComputePlatform *string `puppet:"name=>'compute_platform'"` + UniqueId *string `puppet:"name=>'unique_id'"` +} + +type CodedeployDeploymentConfig struct { + DeploymentConfigName string `puppet:"name=>'deployment_config_name'"` + CodedeployDeploymentConfigId *string `puppet:"name=>'codedeploy_deployment_config_id'"` + ComputePlatform *string `puppet:"name=>'compute_platform'"` + DeploymentConfigId *string `puppet:"name=>'deployment_config_id'"` + MinimumHealthyHosts *Hosts `puppet:"name=>'minimum_healthy_hosts'"` + TrafficRoutingConfig *RoutingConfig `puppet:"name=>'traffic_routing_config'"` +} + +type CodedeployDeploymentGroup struct { + AppName string `puppet:"name=>'app_name'"` + DeploymentGroupName string `puppet:"name=>'deployment_group_name'"` + ServiceRoleArn string `puppet:"name=>'service_role_arn'"` + CodedeployDeploymentGroupId *string `puppet:"name=>'codedeploy_deployment_group_id'"` + AlarmConfiguration *AlarmConfiguration `puppet:"name=>'alarm_configuration'"` + AutoRollbackConfiguration *RollbackConfiguration `puppet:"name=>'auto_rollback_configuration'"` + AutoscalingGroups *[]string `puppet:"name=>'autoscaling_groups'"` + BlueGreenDeploymentConfig *DeploymentConfig `puppet:"name=>'blue_green_deployment_config'"` + DeploymentConfigName *string `puppet:"name=>'deployment_config_name'"` + DeploymentStyle *Style `puppet:"name=>'deployment_style'"` + Ec2TagFilter *[]Filter `puppet:"name=>'ec2_tag_filter'"` + Ec2TagSet *[]Set `puppet:"name=>'ec2_tag_set'"` + EcsService *Service `puppet:"name=>'ecs_service'"` + LoadBalancerInfo *BalancerInfo `puppet:"name=>'load_balancer_info'"` + OnPremisesInstanceTagFilter *[]Filter `puppet:"name=>'on_premises_instance_tag_filter'"` + TriggerConfiguration *[]TriggerConfiguration `puppet:"name=>'trigger_configuration'"` +} + +type Codepipeline struct { + Name string + RoleArn string `puppet:"name=>'role_arn'"` + Stage []Stage + CodepipelineId *string `puppet:"name=>'codepipeline_id'"` + Arn *string + ArtifactStore *Store `puppet:"name=>'artifact_store'"` +} + +type CodepipelineWebhook struct { + Authentication string + Filter []WebhookFilter + Name string + TargetAction string `puppet:"name=>'target_action'"` + TargetPipeline string `puppet:"name=>'target_pipeline'"` + CodepipelineWebhookId *string `puppet:"name=>'codepipeline_webhook_id'"` + AuthenticationConfiguration *AuthenticationConfiguration `puppet:"name=>'authentication_configuration'"` + Url *string +} + +type CognitoIdentityPool struct { + IdentityPoolName string `puppet:"name=>'identity_pool_name'"` + CognitoIdentityPoolId *string `puppet:"name=>'cognito_identity_pool_id'"` + AllowUnauthenticatedIdentities *bool `puppet:"name=>'allow_unauthenticated_identities'"` + Arn *string + CognitoIdentityProviders *[]Providers `puppet:"name=>'cognito_identity_providers'"` + DeveloperProviderName *string `puppet:"name=>'developer_provider_name'"` + OpenidConnectProviderArns *[]string `puppet:"name=>'openid_connect_provider_arns'"` + SamlProviderArns *[]string `puppet:"name=>'saml_provider_arns'"` + SupportedLoginProviders *map[string]string `puppet:"name=>'supported_login_providers'"` +} + +type CognitoIdentityPoolRolesAttachment struct { + IdentityPoolId string `puppet:"name=>'identity_pool_id'"` + Roles map[string]Roles + CognitoIdentityPoolRolesAttachmentId *string `puppet:"name=>'cognito_identity_pool_roles_attachment_id'"` + RoleMapping *[]RoleMapping `puppet:"name=>'role_mapping'"` +} + +type CognitoIdentityProvider struct { + ProviderDetails map[string]string `puppet:"name=>'provider_details'"` + ProviderName string `puppet:"name=>'provider_name'"` + ProviderType string `puppet:"name=>'provider_type'"` + UserPoolId string `puppet:"name=>'user_pool_id'"` + CognitoIdentityProviderId *string `puppet:"name=>'cognito_identity_provider_id'"` + AttributeMapping *map[string]string `puppet:"name=>'attribute_mapping'"` + IdpIdentifiers *[]string `puppet:"name=>'idp_identifiers'"` +} + +type CognitoResourceServer struct { + Identifier string + Name string + UserPoolId string `puppet:"name=>'user_pool_id'"` + CognitoResourceServerId *string `puppet:"name=>'cognito_resource_server_id'"` + Scope *[]Scope + ScopeIdentifiers *[]string `puppet:"name=>'scope_identifiers'"` +} + +type CognitoUserGroup struct { + Name string + UserPoolId string `puppet:"name=>'user_pool_id'"` + CognitoUserGroupId *string `puppet:"name=>'cognito_user_group_id'"` + Description *string + Precedence *int64 + RoleArn *string `puppet:"name=>'role_arn'"` +} + +type CognitoUserPool struct { + Name string + CognitoUserPoolId *string `puppet:"name=>'cognito_user_pool_id'"` + AdminCreateUserConfig *UserConfig `puppet:"name=>'admin_create_user_config'"` + AliasAttributes *[]string `puppet:"name=>'alias_attributes'"` + Arn *string + AutoVerifiedAttributes *[]string `puppet:"name=>'auto_verified_attributes'"` + CreationDate *string `puppet:"name=>'creation_date'"` + DeviceConfiguration *DeviceConfiguration `puppet:"name=>'device_configuration'"` + EmailConfiguration *EmailConfiguration `puppet:"name=>'email_configuration'"` + EmailVerificationMessage *string `puppet:"name=>'email_verification_message'"` + EmailVerificationSubject *string `puppet:"name=>'email_verification_subject'"` + Endpoint *string + LambdaConfig *LambdaConfig `puppet:"name=>'lambda_config'"` + LastModifiedDate *string `puppet:"name=>'last_modified_date'"` + MfaConfiguration *string `puppet:"name=>'mfa_configuration'"` + PasswordPolicy *PasswordPolicy `puppet:"name=>'password_policy'"` + Schema *[]PoolSchema + SmsAuthenticationMessage *string `puppet:"name=>'sms_authentication_message'"` + SmsConfiguration *SmsConfiguration `puppet:"name=>'sms_configuration'"` + SmsVerificationMessage *string `puppet:"name=>'sms_verification_message'"` + Tags *map[string]string + UsernameAttributes *[]string `puppet:"name=>'username_attributes'"` + VerificationMessageTemplate *VerificationMessageTemplate `puppet:"name=>'verification_message_template'"` +} + +type CognitoUserPoolClient struct { + Name string + UserPoolId string `puppet:"name=>'user_pool_id'"` + CognitoUserPoolClientId *string `puppet:"name=>'cognito_user_pool_client_id'"` + AllowedOauthFlows *[]string `puppet:"name=>'allowed_oauth_flows'"` + AllowedOauthFlowsUserPoolClient *bool `puppet:"name=>'allowed_oauth_flows_user_pool_client'"` + AllowedOauthScopes *[]string `puppet:"name=>'allowed_oauth_scopes'"` + CallbackUrls *[]string `puppet:"name=>'callback_urls'"` + ClientSecret *string `puppet:"name=>'client_secret'"` + DefaultRedirectUri *string `puppet:"name=>'default_redirect_uri'"` + ExplicitAuthFlows *[]string `puppet:"name=>'explicit_auth_flows'"` + GenerateSecret *bool `puppet:"name=>'generate_secret'"` + LogoutUrls *[]string `puppet:"name=>'logout_urls'"` + ReadAttributes *[]string `puppet:"name=>'read_attributes'"` + RefreshTokenValidity *int64 `puppet:"name=>'refresh_token_validity'"` + SupportedIdentityProviders *[]string `puppet:"name=>'supported_identity_providers'"` + WriteAttributes *[]string `puppet:"name=>'write_attributes'"` +} + +type CognitoUserPoolDomain struct { + Domain string + UserPoolId string `puppet:"name=>'user_pool_id'"` + CognitoUserPoolDomainId *string `puppet:"name=>'cognito_user_pool_domain_id'"` + AwsAccountId *string `puppet:"name=>'aws_account_id'"` + CertificateArn *string `puppet:"name=>'certificate_arn'"` + CloudfrontDistributionArn *string `puppet:"name=>'cloudfront_distribution_arn'"` + S3Bucket *string `puppet:"name=>'s3_bucket'"` + Version *string +} + +type ConfigAggregateAuthorization struct { + AccountId string `puppet:"name=>'account_id'"` + Region string + ConfigAggregateAuthorizationId *string `puppet:"name=>'config_aggregate_authorization_id'"` + Arn *string +} + +type ConfigConfigRule struct { + Name string + ConfigConfigRuleId *string `puppet:"name=>'config_config_rule_id'"` + Arn *string + Description *string + InputParameters *string `puppet:"name=>'input_parameters'"` + MaximumExecutionFrequency *string `puppet:"name=>'maximum_execution_frequency'"` + RuleId *string `puppet:"name=>'rule_id'"` + Scope *RuleScope + Source *RuleSource +} + +type ConfigConfigurationAggregator struct { + Name string + ConfigConfigurationAggregatorId *string `puppet:"name=>'config_configuration_aggregator_id'"` + AccountAggregationSource *AggregationSource `puppet:"name=>'account_aggregation_source'"` + Arn *string + OrganizationAggregationSource *OrganizationAggregationSource `puppet:"name=>'organization_aggregation_source'"` +} + +type ConfigConfigurationRecorder struct { + RoleArn string `puppet:"name=>'role_arn'"` + ConfigConfigurationRecorderId *string `puppet:"name=>'config_configuration_recorder_id'"` + Name *string + RecordingGroup *RecordingGroup `puppet:"name=>'recording_group'"` +} + +type ConfigConfigurationRecorderStatus struct { + IsEnabled bool `puppet:"name=>'is_enabled'"` + Name string + ConfigConfigurationRecorderStatusId *string `puppet:"name=>'config_configuration_recorder_status_id'"` +} + +type ConfigDeliveryChannel struct { + S3BucketName string `puppet:"name=>'s3_bucket_name'"` + ConfigDeliveryChannelId *string `puppet:"name=>'config_delivery_channel_id'"` + Name *string + S3KeyPrefix *string `puppet:"name=>'s3_key_prefix'"` + SnapshotDeliveryProperties *Properties `puppet:"name=>'snapshot_delivery_properties'"` + SnsTopicArn *string `puppet:"name=>'sns_topic_arn'"` +} + +type CustomerGateway struct { + BgpAsn int64 `puppet:"name=>'bgp_asn'"` + IpAddress string `puppet:"name=>'ip_address'"` + Type string + CustomerGatewayId *string `puppet:"name=>'customer_gateway_id'"` + Tags *map[string]string +} + +type DatasyncAgent struct { + DatasyncAgentId *string `puppet:"name=>'datasync_agent_id'"` + ActivationKey *string `puppet:"name=>'activation_key'"` + Arn *string + IpAddress *string `puppet:"name=>'ip_address'"` + Name *string + Tags *map[string]string +} + +type DatasyncLocationEfs struct { + EfsFileSystemArn string `puppet:"name=>'efs_file_system_arn'"` + DatasyncLocationEfsId *string `puppet:"name=>'datasync_location_efs_id'"` + Arn *string + Ec2Config *Ec2Config `puppet:"name=>'ec2_config'"` + Subdirectory *string + Tags *map[string]string + Uri *string +} + +type DatasyncLocationNfs struct { + ServerHostname string `puppet:"name=>'server_hostname'"` + Subdirectory string + DatasyncLocationNfsId *string `puppet:"name=>'datasync_location_nfs_id'"` + Arn *string + OnPremConfig *PremConfig `puppet:"name=>'on_prem_config'"` + Tags *map[string]string + Uri *string +} + +type DatasyncLocationS3 struct { + S3BucketArn string `puppet:"name=>'s3_bucket_arn'"` + Subdirectory string + DatasyncLocationS3Id *string `puppet:"name=>'datasync_location_s3_id'"` + Arn *string + S3Config *S3Config `puppet:"name=>'s3_config'"` + Tags *map[string]string + Uri *string +} + +type DatasyncTask struct { + DestinationLocationArn string `puppet:"name=>'destination_location_arn'"` + SourceLocationArn string `puppet:"name=>'source_location_arn'"` + DatasyncTaskId *string `puppet:"name=>'datasync_task_id'"` + Arn *string + CloudwatchLogGroupArn *string `puppet:"name=>'cloudwatch_log_group_arn'"` + Name *string + Options *TaskOptions + Tags *map[string]string +} + +type DaxCluster struct { + ClusterName string `puppet:"name=>'cluster_name'"` + IamRoleArn string `puppet:"name=>'iam_role_arn'"` + NodeType string `puppet:"name=>'node_type'"` + ReplicationFactor int64 `puppet:"name=>'replication_factor'"` + DaxClusterId *string `puppet:"name=>'dax_cluster_id'"` + Arn *string + AvailabilityZones *[]string `puppet:"name=>'availability_zones'"` + ClusterAddress *string `puppet:"name=>'cluster_address'"` + ConfigurationEndpoint *string `puppet:"name=>'configuration_endpoint'"` + Description *string + MaintenanceWindow *string `puppet:"name=>'maintenance_window'"` + Nodes *[]Nodes + NotificationTopicArn *string `puppet:"name=>'notification_topic_arn'"` + ParameterGroupName *string `puppet:"name=>'parameter_group_name'"` + Port *int64 + SecurityGroupIds *[]string `puppet:"name=>'security_group_ids'"` + ServerSideEncryption *Encryption `puppet:"name=>'server_side_encryption'"` + SubnetGroupName *string `puppet:"name=>'subnet_group_name'"` + Tags *map[string]string +} + +type DaxParameterGroup struct { + Name string + DaxParameterGroupId *string `puppet:"name=>'dax_parameter_group_id'"` + Description *string + Parameters *[]GroupParameter +} + +type DaxSubnetGroup struct { + Name string + SubnetIds []string `puppet:"name=>'subnet_ids'"` + DaxSubnetGroupId *string `puppet:"name=>'dax_subnet_group_id'"` + Description *string + VpcId *string `puppet:"name=>'vpc_id'"` +} + +type DbClusterSnapshot struct { + DbClusterIdentifier string `puppet:"name=>'db_cluster_identifier'"` + DbClusterSnapshotIdentifier string `puppet:"name=>'db_cluster_snapshot_identifier'"` + DbClusterSnapshotId *string `puppet:"name=>'db_cluster_snapshot_id'"` + AllocatedStorage *int64 `puppet:"name=>'allocated_storage'"` + AvailabilityZones *[]string `puppet:"name=>'availability_zones'"` + DbClusterSnapshotArn *string `puppet:"name=>'db_cluster_snapshot_arn'"` + Engine *string + EngineVersion *string `puppet:"name=>'engine_version'"` + KmsKeyId *string `puppet:"name=>'kms_key_id'"` + LicenseModel *string `puppet:"name=>'license_model'"` + Port *int64 + SnapshotType *string `puppet:"name=>'snapshot_type'"` + SourceDbClusterSnapshotArn *string `puppet:"name=>'source_db_cluster_snapshot_arn'"` + Status *string + StorageEncrypted *bool `puppet:"name=>'storage_encrypted'"` + VpcId *string `puppet:"name=>'vpc_id'"` +} + +type DbEventSubscription struct { + SnsTopic string `puppet:"name=>'sns_topic'"` + DbEventSubscriptionId *string `puppet:"name=>'db_event_subscription_id'"` + Arn *string + CustomerAwsId *string `puppet:"name=>'customer_aws_id'"` + Enabled *bool + EventCategories *[]string `puppet:"name=>'event_categories'"` + Name *string + NamePrefix *string `puppet:"name=>'name_prefix'"` + SourceIds *[]string `puppet:"name=>'source_ids'"` + SourceType *string `puppet:"name=>'source_type'"` + Tags *map[string]string +} + +type DbInstance struct { + InstanceClass string `puppet:"name=>'instance_class'"` + DbInstanceId *string `puppet:"name=>'db_instance_id'"` + Address *string + AllocatedStorage *int64 `puppet:"name=>'allocated_storage'"` + AllowMajorVersionUpgrade *bool `puppet:"name=>'allow_major_version_upgrade'"` + ApplyImmediately *bool `puppet:"name=>'apply_immediately'"` + Arn *string + AutoMinorVersionUpgrade *bool `puppet:"name=>'auto_minor_version_upgrade'"` + AvailabilityZone *string `puppet:"name=>'availability_zone'"` + BackupRetentionPeriod *int64 `puppet:"name=>'backup_retention_period'"` + BackupWindow *string `puppet:"name=>'backup_window'"` + CaCertIdentifier *string `puppet:"name=>'ca_cert_identifier'"` + CharacterSetName *string `puppet:"name=>'character_set_name'"` + CopyTagsToSnapshot *bool `puppet:"name=>'copy_tags_to_snapshot'"` + DbSubnetGroupName *string `puppet:"name=>'db_subnet_group_name'"` + DeletionProtection *bool `puppet:"name=>'deletion_protection'"` + Domain *string + DomainIamRoleName *string `puppet:"name=>'domain_iam_role_name'"` + EnabledCloudwatchLogsExports *[]string `puppet:"name=>'enabled_cloudwatch_logs_exports'"` + Endpoint *string + Engine *string + EngineVersion *string `puppet:"name=>'engine_version'"` + FinalSnapshotIdentifier *string `puppet:"name=>'final_snapshot_identifier'"` + HostedZoneId *string `puppet:"name=>'hosted_zone_id'"` + IamDatabaseAuthenticationEnabled *bool `puppet:"name=>'iam_database_authentication_enabled'"` + Identifier *string + IdentifierPrefix *string `puppet:"name=>'identifier_prefix'"` + Iops *int64 + KmsKeyId *string `puppet:"name=>'kms_key_id'"` + LicenseModel *string `puppet:"name=>'license_model'"` + MaintenanceWindow *string `puppet:"name=>'maintenance_window'"` + MonitoringInterval *int64 `puppet:"name=>'monitoring_interval'"` + MonitoringRoleArn *string `puppet:"name=>'monitoring_role_arn'"` + MultiAz *bool `puppet:"name=>'multi_az'"` + Name *string + OptionGroupName *string `puppet:"name=>'option_group_name'"` + ParameterGroupName *string `puppet:"name=>'parameter_group_name'"` + Password *string + Port *int64 + PubliclyAccessible *bool `puppet:"name=>'publicly_accessible'"` + Replicas *[]string + ReplicateSourceDb *string `puppet:"name=>'replicate_source_db'"` + ResourceId *string `puppet:"name=>'resource_id'"` + S3Import *Import `puppet:"name=>'s3_import'"` + SecurityGroupNames *[]string `puppet:"name=>'security_group_names'"` + SkipFinalSnapshot *bool `puppet:"name=>'skip_final_snapshot'"` + SnapshotIdentifier *string `puppet:"name=>'snapshot_identifier'"` + Status *string + StorageEncrypted *bool `puppet:"name=>'storage_encrypted'"` + StorageType *string `puppet:"name=>'storage_type'"` + Tags *map[string]string + Timezone *string + Username *string + VpcSecurityGroupIds *[]string `puppet:"name=>'vpc_security_group_ids'"` +} + +type DbOptionGroup struct { + EngineName string `puppet:"name=>'engine_name'"` + MajorEngineVersion string `puppet:"name=>'major_engine_version'"` + DbOptionGroupId *string `puppet:"name=>'db_option_group_id'"` + Arn *string + Name *string + NamePrefix *string `puppet:"name=>'name_prefix'"` + Option *[]GroupOption + OptionGroupDescription *string `puppet:"name=>'option_group_description'"` + Tags *map[string]string +} + +type DbParameterGroup struct { + Family string + DbParameterGroupId *string `puppet:"name=>'db_parameter_group_id'"` + Arn *string + Description *string + Name *string + NamePrefix *string `puppet:"name=>'name_prefix'"` + Parameter *[]Parameter + Tags *map[string]string +} + +type DbSecurityGroup struct { + Ingress []Ingress + Name string + DbSecurityGroupId *string `puppet:"name=>'db_security_group_id'"` + Arn *string + Description *string + Tags *map[string]string +} + +type DbSnapshot struct { + DbInstanceIdentifier string `puppet:"name=>'db_instance_identifier'"` + DbSnapshotIdentifier string `puppet:"name=>'db_snapshot_identifier'"` + DbSnapshotId *string `puppet:"name=>'db_snapshot_id'"` + AllocatedStorage *int64 `puppet:"name=>'allocated_storage'"` + AvailabilityZone *string `puppet:"name=>'availability_zone'"` + DbSnapshotArn *string `puppet:"name=>'db_snapshot_arn'"` + Encrypted *bool + Engine *string + EngineVersion *string `puppet:"name=>'engine_version'"` + Iops *int64 + KmsKeyId *string `puppet:"name=>'kms_key_id'"` + LicenseModel *string `puppet:"name=>'license_model'"` + OptionGroupName *string `puppet:"name=>'option_group_name'"` + Port *int64 + SnapshotType *string `puppet:"name=>'snapshot_type'"` + SourceDbSnapshotIdentifier *string `puppet:"name=>'source_db_snapshot_identifier'"` + SourceRegion *string `puppet:"name=>'source_region'"` + Status *string + StorageType *string `puppet:"name=>'storage_type'"` + Tags *map[string]string + VpcId *string `puppet:"name=>'vpc_id'"` +} + +type DbSubnetGroup struct { + SubnetIds []string `puppet:"name=>'subnet_ids'"` + DbSubnetGroupId *string `puppet:"name=>'db_subnet_group_id'"` + Arn *string + Description *string + Name *string + NamePrefix *string `puppet:"name=>'name_prefix'"` + Tags *map[string]string +} + +type DefaultNetworkAcl struct { + DefaultNetworkAclId string `puppet:"name=>'default_network_acl_id'"` + DefaultNetworkAclLyraId *string `puppet:"name=>'default_network_acl_lyra_id'"` + Egress *[]EgressIngress + Ingress *[]EgressIngress + OwnerId *string `puppet:"name=>'owner_id'"` + SubnetIds *[]string `puppet:"name=>'subnet_ids'"` + Tags *map[string]string + VpcId *string `puppet:"name=>'vpc_id'"` +} + +type DefaultRouteTable struct { + DefaultRouteTableId string `puppet:"name=>'default_route_table_id'"` + DefaultRouteTableLyraId *string `puppet:"name=>'default_route_table_lyra_id'"` + OwnerId *string `puppet:"name=>'owner_id'"` + PropagatingVgws *[]string `puppet:"name=>'propagating_vgws'"` + Route *[]TableRoute + Tags *map[string]string + VpcId *string `puppet:"name=>'vpc_id'"` +} + +type DefaultSecurityGroup struct { + DefaultSecurityGroupId *string `puppet:"name=>'default_security_group_id'"` + Arn *string + Egress *[]GroupEgressIngress + Ingress *[]GroupEgressIngress + Name *string + OwnerId *string `puppet:"name=>'owner_id'"` + RevokeRulesOnDelete *bool `puppet:"name=>'revoke_rules_on_delete'"` + Tags *map[string]string + VpcId *string `puppet:"name=>'vpc_id'"` +} + +type DefaultSubnet struct { + AvailabilityZone string `puppet:"name=>'availability_zone'"` + DefaultSubnetId *string `puppet:"name=>'default_subnet_id'"` + Arn *string + AssignIpv6AddressOnCreation *bool `puppet:"name=>'assign_ipv6_address_on_creation'"` + AvailabilityZoneId *string `puppet:"name=>'availability_zone_id'"` + CidrBlock *string `puppet:"name=>'cidr_block'"` + Ipv6CidrBlock *string `puppet:"name=>'ipv6_cidr_block'"` + Ipv6CidrBlockAssociationId *string `puppet:"name=>'ipv6_cidr_block_association_id'"` + MapPublicIpOnLaunch *bool `puppet:"name=>'map_public_ip_on_launch'"` + OwnerId *string `puppet:"name=>'owner_id'"` + Tags *map[string]string + VpcId *string `puppet:"name=>'vpc_id'"` +} + +type DefaultVpc struct { + DefaultVpcId *string `puppet:"name=>'default_vpc_id'"` + Arn *string + AssignGeneratedIpv6CidrBlock *bool `puppet:"name=>'assign_generated_ipv6_cidr_block'"` + CidrBlock *string `puppet:"name=>'cidr_block'"` + DefaultNetworkAclId *string `puppet:"name=>'default_network_acl_id'"` + DefaultRouteTableId *string `puppet:"name=>'default_route_table_id'"` + DefaultSecurityGroupId *string `puppet:"name=>'default_security_group_id'"` + DhcpOptionsId *string `puppet:"name=>'dhcp_options_id'"` + EnableClassiclink *bool `puppet:"name=>'enable_classiclink'"` + EnableClassiclinkDnsSupport *bool `puppet:"name=>'enable_classiclink_dns_support'"` + EnableDnsHostnames *bool `puppet:"name=>'enable_dns_hostnames'"` + EnableDnsSupport *bool `puppet:"name=>'enable_dns_support'"` + InstanceTenancy *string `puppet:"name=>'instance_tenancy'"` + Ipv6AssociationId *string `puppet:"name=>'ipv6_association_id'"` + Ipv6CidrBlock *string `puppet:"name=>'ipv6_cidr_block'"` + MainRouteTableId *string `puppet:"name=>'main_route_table_id'"` + OwnerId *string `puppet:"name=>'owner_id'"` + Tags *map[string]string +} + +type DefaultVpcDhcpOptions struct { + DefaultVpcDhcpOptionsId *string `puppet:"name=>'default_vpc_dhcp_options_id'"` + DomainName *string `puppet:"name=>'domain_name'"` + DomainNameServers *string `puppet:"name=>'domain_name_servers'"` + NetbiosNameServers *[]string `puppet:"name=>'netbios_name_servers'"` + NetbiosNodeType *string `puppet:"name=>'netbios_node_type'"` + NtpServers *string `puppet:"name=>'ntp_servers'"` + OwnerId *string `puppet:"name=>'owner_id'"` + Tags *map[string]string +} + +type DevicefarmProject struct { + Name string + DevicefarmProjectId *string `puppet:"name=>'devicefarm_project_id'"` + Arn *string +} + +type DirectoryServiceConditionalForwarder struct { + DirectoryId string `puppet:"name=>'directory_id'"` + DnsIps []string `puppet:"name=>'dns_ips'"` + RemoteDomainName string `puppet:"name=>'remote_domain_name'"` + DirectoryServiceConditionalForwarderId *string `puppet:"name=>'directory_service_conditional_forwarder_id'"` +} + +type DirectoryServiceDirectory struct { + Name string + Password string + DirectoryServiceDirectoryId *string `puppet:"name=>'directory_service_directory_id'"` + AccessUrl *string `puppet:"name=>'access_url'"` + Alias *string + ConnectSettings *ConnectSettings `puppet:"name=>'connect_settings'"` + Description *string + DnsIpAddresses *[]string `puppet:"name=>'dns_ip_addresses'"` + Edition *string + EnableSso *bool `puppet:"name=>'enable_sso'"` + SecurityGroupId *string `puppet:"name=>'security_group_id'"` + ShortName *string `puppet:"name=>'short_name'"` + Size *string + Tags *map[string]string + Type *string + VpcSettings *VpcSettings `puppet:"name=>'vpc_settings'"` +} + +type DlmLifecyclePolicy struct { + Description string + ExecutionRoleArn string `puppet:"name=>'execution_role_arn'"` + DlmLifecyclePolicyId *string `puppet:"name=>'dlm_lifecycle_policy_id'"` + PolicyDetails *Details `puppet:"name=>'policy_details'"` + State *string +} + +type DmsCertificate struct { + CertificateId string `puppet:"name=>'certificate_id'"` + DmsCertificateId *string `puppet:"name=>'dms_certificate_id'"` + CertificateArn *string `puppet:"name=>'certificate_arn'"` + CertificatePem *string `puppet:"name=>'certificate_pem'"` + CertificateWallet *string `puppet:"name=>'certificate_wallet'"` +} + +type DmsEndpoint struct { + EndpointId string `puppet:"name=>'endpoint_id'"` + EndpointType string `puppet:"name=>'endpoint_type'"` + EngineName string `puppet:"name=>'engine_name'"` + DmsEndpointId *string `puppet:"name=>'dms_endpoint_id'"` + CertificateArn *string `puppet:"name=>'certificate_arn'"` + DatabaseName *string `puppet:"name=>'database_name'"` + EndpointArn *string `puppet:"name=>'endpoint_arn'"` + ExtraConnectionAttributes *string `puppet:"name=>'extra_connection_attributes'"` + KmsKeyArn *string `puppet:"name=>'kms_key_arn'"` + MongodbSettings *MongodbSettings `puppet:"name=>'mongodb_settings'"` + Password *string + Port *int64 + S3Settings *S3Settings `puppet:"name=>'s3_settings'"` + ServerName *string `puppet:"name=>'server_name'"` + ServiceAccessRole *string `puppet:"name=>'service_access_role'"` + SslMode *string `puppet:"name=>'ssl_mode'"` + Tags *map[string]string + Username *string +} + +type DmsReplicationInstance struct { + ReplicationInstanceClass string `puppet:"name=>'replication_instance_class'"` + ReplicationInstanceId string `puppet:"name=>'replication_instance_id'"` + DmsReplicationInstanceId *string `puppet:"name=>'dms_replication_instance_id'"` + AllocatedStorage *int64 `puppet:"name=>'allocated_storage'"` + ApplyImmediately *bool `puppet:"name=>'apply_immediately'"` + AutoMinorVersionUpgrade *bool `puppet:"name=>'auto_minor_version_upgrade'"` + AvailabilityZone *string `puppet:"name=>'availability_zone'"` + EngineVersion *string `puppet:"name=>'engine_version'"` + KmsKeyArn *string `puppet:"name=>'kms_key_arn'"` + MultiAz *bool `puppet:"name=>'multi_az'"` + PreferredMaintenanceWindow *string `puppet:"name=>'preferred_maintenance_window'"` + PubliclyAccessible *bool `puppet:"name=>'publicly_accessible'"` + ReplicationInstanceArn *string `puppet:"name=>'replication_instance_arn'"` + ReplicationInstancePrivateIps *[]string `puppet:"name=>'replication_instance_private_ips'"` + ReplicationInstancePublicIps *[]string `puppet:"name=>'replication_instance_public_ips'"` + ReplicationSubnetGroupId *string `puppet:"name=>'replication_subnet_group_id'"` + Tags *map[string]string + VpcSecurityGroupIds *[]string `puppet:"name=>'vpc_security_group_ids'"` +} + +type DmsReplicationSubnetGroup struct { + ReplicationSubnetGroupDescription string `puppet:"name=>'replication_subnet_group_description'"` + ReplicationSubnetGroupId string `puppet:"name=>'replication_subnet_group_id'"` + SubnetIds []string `puppet:"name=>'subnet_ids'"` + DmsReplicationSubnetGroupId *string `puppet:"name=>'dms_replication_subnet_group_id'"` + ReplicationSubnetGroupArn *string `puppet:"name=>'replication_subnet_group_arn'"` + Tags *map[string]string + VpcId *string `puppet:"name=>'vpc_id'"` +} + +type DmsReplicationTask struct { + MigrationType string `puppet:"name=>'migration_type'"` + ReplicationInstanceArn string `puppet:"name=>'replication_instance_arn'"` + ReplicationTaskId string `puppet:"name=>'replication_task_id'"` + SourceEndpointArn string `puppet:"name=>'source_endpoint_arn'"` + TableMappings string `puppet:"name=>'table_mappings'"` + TargetEndpointArn string `puppet:"name=>'target_endpoint_arn'"` + DmsReplicationTaskId *string `puppet:"name=>'dms_replication_task_id'"` + CdcStartTime *string `puppet:"name=>'cdc_start_time'"` + ReplicationTaskArn *string `puppet:"name=>'replication_task_arn'"` + ReplicationTaskSettings *string `puppet:"name=>'replication_task_settings'"` + Tags *map[string]string +} + +type DocdbClusterParameterGroup struct { + Family string + DocdbClusterParameterGroupId *string `puppet:"name=>'docdb_cluster_parameter_group_id'"` + Arn *string + Description *string + Name *string + NamePrefix *string `puppet:"name=>'name_prefix'"` + Parameter *[]Parameter + Tags *map[string]string +} + +type DocdbSubnetGroup struct { + SubnetIds []string `puppet:"name=>'subnet_ids'"` + DocdbSubnetGroupId *string `puppet:"name=>'docdb_subnet_group_id'"` + Arn *string + Description *string + Name *string + NamePrefix *string `puppet:"name=>'name_prefix'"` + Tags *map[string]string +} + +type DxBgpPeer struct { + AddressFamily string `puppet:"name=>'address_family'"` + BgpAsn int64 `puppet:"name=>'bgp_asn'"` + VirtualInterfaceId string `puppet:"name=>'virtual_interface_id'"` + DxBgpPeerId *string `puppet:"name=>'dx_bgp_peer_id'"` + AmazonAddress *string `puppet:"name=>'amazon_address'"` + BgpAuthKey *string `puppet:"name=>'bgp_auth_key'"` + BgpStatus *string `puppet:"name=>'bgp_status'"` + CustomerAddress *string `puppet:"name=>'customer_address'"` +} + +type DxConnection struct { + Bandwidth string + Location string + Name string + DxConnectionId *string `puppet:"name=>'dx_connection_id'"` + Arn *string + JumboFrameCapable *bool `puppet:"name=>'jumbo_frame_capable'"` + Tags *map[string]string +} + +type DxConnectionAssociation struct { + ConnectionId string `puppet:"name=>'connection_id'"` + LagId string `puppet:"name=>'lag_id'"` + DxConnectionAssociationId *string `puppet:"name=>'dx_connection_association_id'"` +} + +type DxGateway struct { + AmazonSideAsn string `puppet:"name=>'amazon_side_asn'"` + Name string + DxGatewayId *string `puppet:"name=>'dx_gateway_id'"` +} + +type DxGatewayAssociation struct { + DxGatewayId string `puppet:"name=>'dx_gateway_id'"` + VpnGatewayId string `puppet:"name=>'vpn_gateway_id'"` + DxGatewayAssociationId *string `puppet:"name=>'dx_gateway_association_id'"` +} + +type DxHostedPrivateVirtualInterface struct { + AddressFamily string `puppet:"name=>'address_family'"` + BgpAsn int64 `puppet:"name=>'bgp_asn'"` + ConnectionId string `puppet:"name=>'connection_id'"` + Name string + OwnerAccountId string `puppet:"name=>'owner_account_id'"` + Vlan int64 + DxHostedPrivateVirtualInterfaceId *string `puppet:"name=>'dx_hosted_private_virtual_interface_id'"` + AmazonAddress *string `puppet:"name=>'amazon_address'"` + Arn *string + BgpAuthKey *string `puppet:"name=>'bgp_auth_key'"` + CustomerAddress *string `puppet:"name=>'customer_address'"` + JumboFrameCapable *bool `puppet:"name=>'jumbo_frame_capable'"` + Mtu *int64 +} + +type DxHostedPrivateVirtualInterfaceAccepter struct { + VirtualInterfaceId string `puppet:"name=>'virtual_interface_id'"` + DxHostedPrivateVirtualInterfaceAccepterId *string `puppet:"name=>'dx_hosted_private_virtual_interface_accepter_id'"` + Arn *string + DxGatewayId *string `puppet:"name=>'dx_gateway_id'"` + Tags *map[string]string + VpnGatewayId *string `puppet:"name=>'vpn_gateway_id'"` +} + +type DxHostedPublicVirtualInterface struct { + AddressFamily string `puppet:"name=>'address_family'"` + BgpAsn int64 `puppet:"name=>'bgp_asn'"` + ConnectionId string `puppet:"name=>'connection_id'"` + Name string + OwnerAccountId string `puppet:"name=>'owner_account_id'"` + RouteFilterPrefixes []string `puppet:"name=>'route_filter_prefixes'"` + Vlan int64 + DxHostedPublicVirtualInterfaceId *string `puppet:"name=>'dx_hosted_public_virtual_interface_id'"` + AmazonAddress *string `puppet:"name=>'amazon_address'"` + Arn *string + BgpAuthKey *string `puppet:"name=>'bgp_auth_key'"` + CustomerAddress *string `puppet:"name=>'customer_address'"` +} + +type DxHostedPublicVirtualInterfaceAccepter struct { + VirtualInterfaceId string `puppet:"name=>'virtual_interface_id'"` + DxHostedPublicVirtualInterfaceAccepterId *string `puppet:"name=>'dx_hosted_public_virtual_interface_accepter_id'"` + Arn *string + Tags *map[string]string +} + +type DxLag struct { + ConnectionsBandwidth string `puppet:"name=>'connections_bandwidth'"` + Location string + Name string + DxLagId *string `puppet:"name=>'dx_lag_id'"` + Arn *string + ForceDestroy *bool `puppet:"name=>'force_destroy'"` + Tags *map[string]string +} + +type DxPrivateVirtualInterface struct { + AddressFamily string `puppet:"name=>'address_family'"` + BgpAsn int64 `puppet:"name=>'bgp_asn'"` + ConnectionId string `puppet:"name=>'connection_id'"` + Name string + Vlan int64 + DxPrivateVirtualInterfaceId *string `puppet:"name=>'dx_private_virtual_interface_id'"` + AmazonAddress *string `puppet:"name=>'amazon_address'"` + Arn *string + BgpAuthKey *string `puppet:"name=>'bgp_auth_key'"` + CustomerAddress *string `puppet:"name=>'customer_address'"` + DxGatewayId *string `puppet:"name=>'dx_gateway_id'"` + JumboFrameCapable *bool `puppet:"name=>'jumbo_frame_capable'"` + Mtu *int64 + Tags *map[string]string + VpnGatewayId *string `puppet:"name=>'vpn_gateway_id'"` +} + +type DxPublicVirtualInterface struct { + AddressFamily string `puppet:"name=>'address_family'"` + BgpAsn int64 `puppet:"name=>'bgp_asn'"` + ConnectionId string `puppet:"name=>'connection_id'"` + Name string + RouteFilterPrefixes []string `puppet:"name=>'route_filter_prefixes'"` + Vlan int64 + DxPublicVirtualInterfaceId *string `puppet:"name=>'dx_public_virtual_interface_id'"` + AmazonAddress *string `puppet:"name=>'amazon_address'"` + Arn *string + BgpAuthKey *string `puppet:"name=>'bgp_auth_key'"` + CustomerAddress *string `puppet:"name=>'customer_address'"` + Tags *map[string]string +} + +type DynamodbGlobalTable struct { + Name string + Replica []Replica + DynamodbGlobalTableId *string `puppet:"name=>'dynamodb_global_table_id'"` + Arn *string +} + +type DynamodbTable struct { + Attribute []Attribute + HashKey string `puppet:"name=>'hash_key'"` + Name string + DynamodbTableId *string `puppet:"name=>'dynamodb_table_id'"` + Arn *string + BillingMode *string `puppet:"name=>'billing_mode'"` + GlobalSecondaryIndex *[]Index `puppet:"name=>'global_secondary_index'"` + LocalSecondaryIndex *[]SecondaryIndex `puppet:"name=>'local_secondary_index'"` + PointInTimeRecovery *Encryption `puppet:"name=>'point_in_time_recovery'"` + RangeKey *string `puppet:"name=>'range_key'"` + ReadCapacity *int64 `puppet:"name=>'read_capacity'"` + ServerSideEncryption *Encryption `puppet:"name=>'server_side_encryption'"` + StreamArn *string `puppet:"name=>'stream_arn'"` + StreamEnabled *bool `puppet:"name=>'stream_enabled'"` + StreamLabel *string `puppet:"name=>'stream_label'"` + StreamViewType *string `puppet:"name=>'stream_view_type'"` + Tags *map[string]string + Ttl *Ttl + WriteCapacity *int64 `puppet:"name=>'write_capacity'"` +} + +type DynamodbTableItem struct { + HashKey string `puppet:"name=>'hash_key'"` + Item string + TableName string `puppet:"name=>'table_name'"` + DynamodbTableItemId *string `puppet:"name=>'dynamodb_table_item_id'"` + RangeKey *string `puppet:"name=>'range_key'"` +} + +type EbsSnapshot struct { + VolumeId string `puppet:"name=>'volume_id'"` + EbsSnapshotId *string `puppet:"name=>'ebs_snapshot_id'"` + DataEncryptionKeyId *string `puppet:"name=>'data_encryption_key_id'"` + Description *string + Encrypted *bool + KmsKeyId *string `puppet:"name=>'kms_key_id'"` + OwnerAlias *string `puppet:"name=>'owner_alias'"` + OwnerId *string `puppet:"name=>'owner_id'"` + Tags *map[string]string + VolumeSize *int64 `puppet:"name=>'volume_size'"` +} + +type EbsSnapshotCopy struct { + SourceRegion string `puppet:"name=>'source_region'"` + SourceSnapshotId string `puppet:"name=>'source_snapshot_id'"` + EbsSnapshotCopyId *string `puppet:"name=>'ebs_snapshot_copy_id'"` + DataEncryptionKeyId *string `puppet:"name=>'data_encryption_key_id'"` + Description *string + Encrypted *bool + KmsKeyId *string `puppet:"name=>'kms_key_id'"` + OwnerAlias *string `puppet:"name=>'owner_alias'"` + OwnerId *string `puppet:"name=>'owner_id'"` + Tags *map[string]string + VolumeId *string `puppet:"name=>'volume_id'"` + VolumeSize *int64 `puppet:"name=>'volume_size'"` +} + +type EbsVolume struct { + AvailabilityZone string `puppet:"name=>'availability_zone'"` + EbsVolumeId *string `puppet:"name=>'ebs_volume_id'"` + Arn *string + Encrypted *bool + Iops *int64 + KmsKeyId *string `puppet:"name=>'kms_key_id'"` + Size *int64 + SnapshotId *string `puppet:"name=>'snapshot_id'"` + Tags *map[string]string + Type *string +} + +type Ec2CapacityReservation struct { + AvailabilityZone string `puppet:"name=>'availability_zone'"` + InstanceCount int64 `puppet:"name=>'instance_count'"` + InstancePlatform string `puppet:"name=>'instance_platform'"` + InstanceType string `puppet:"name=>'instance_type'"` + Ec2CapacityReservationId *string `puppet:"name=>'ec2_capacity_reservation_id'"` + EbsOptimized *bool `puppet:"name=>'ebs_optimized'"` + EndDate *time.Time `puppet:"name=>'end_date'"` + EndDateType *string `puppet:"name=>'end_date_type'"` + EphemeralStorage *bool `puppet:"name=>'ephemeral_storage'"` + InstanceMatchCriteria *string `puppet:"name=>'instance_match_criteria'"` + Tags *map[string]string + Tenancy *string +} + +type Ec2Fleet struct { + LaunchTemplateConfig TemplateConfig `puppet:"name=>'launch_template_config'"` + TargetCapacitySpecification CapacitySpecification `puppet:"name=>'target_capacity_specification'"` + Ec2FleetId *string `puppet:"name=>'ec2_fleet_id'"` + ExcessCapacityTerminationPolicy *string `puppet:"name=>'excess_capacity_termination_policy'"` + OnDemandOptions *DemandOptions `puppet:"name=>'on_demand_options'"` + ReplaceUnhealthyInstances *bool `puppet:"name=>'replace_unhealthy_instances'"` + SpotOptions *SpotOptions `puppet:"name=>'spot_options'"` + Tags *map[string]string + TerminateInstances *bool `puppet:"name=>'terminate_instances'"` + TerminateInstancesWithExpiration *bool `puppet:"name=>'terminate_instances_with_expiration'"` + Type *string +} + +type Ec2TransitGateway struct { + Ec2TransitGatewayId *string `puppet:"name=>'ec2_transit_gateway_id'"` + AmazonSideAsn *int64 `puppet:"name=>'amazon_side_asn'"` + Arn *string + AssociationDefaultRouteTableId *string `puppet:"name=>'association_default_route_table_id'"` + AutoAcceptSharedAttachments *string `puppet:"name=>'auto_accept_shared_attachments'"` + DefaultRouteTableAssociation *string `puppet:"name=>'default_route_table_association'"` + DefaultRouteTablePropagation *string `puppet:"name=>'default_route_table_propagation'"` + Description *string + DnsSupport *string `puppet:"name=>'dns_support'"` + OwnerId *string `puppet:"name=>'owner_id'"` + PropagationDefaultRouteTableId *string `puppet:"name=>'propagation_default_route_table_id'"` + Tags *map[string]string + VpnEcmpSupport *string `puppet:"name=>'vpn_ecmp_support'"` +} + +type Ec2TransitGatewayRoute struct { + DestinationCidrBlock string `puppet:"name=>'destination_cidr_block'"` + TransitGatewayAttachmentId string `puppet:"name=>'transit_gateway_attachment_id'"` + TransitGatewayRouteTableId string `puppet:"name=>'transit_gateway_route_table_id'"` + Ec2TransitGatewayRouteId *string `puppet:"name=>'ec2_transit_gateway_route_id'"` +} + +type Ec2TransitGatewayRouteTable struct { + TransitGatewayId string `puppet:"name=>'transit_gateway_id'"` + Ec2TransitGatewayRouteTableId *string `puppet:"name=>'ec2_transit_gateway_route_table_id'"` + DefaultAssociationRouteTable *bool `puppet:"name=>'default_association_route_table'"` + DefaultPropagationRouteTable *bool `puppet:"name=>'default_propagation_route_table'"` + Tags *map[string]string +} + +type Ec2TransitGatewayRouteTableAssociation struct { + TransitGatewayAttachmentId string `puppet:"name=>'transit_gateway_attachment_id'"` + TransitGatewayRouteTableId string `puppet:"name=>'transit_gateway_route_table_id'"` + Ec2TransitGatewayRouteTableAssociationId *string `puppet:"name=>'ec2_transit_gateway_route_table_association_id'"` + ResourceId *string `puppet:"name=>'resource_id'"` + ResourceType *string `puppet:"name=>'resource_type'"` +} + +type Ec2TransitGatewayRouteTablePropagation struct { + TransitGatewayAttachmentId string `puppet:"name=>'transit_gateway_attachment_id'"` + TransitGatewayRouteTableId string `puppet:"name=>'transit_gateway_route_table_id'"` + Ec2TransitGatewayRouteTablePropagationId *string `puppet:"name=>'ec2_transit_gateway_route_table_propagation_id'"` + ResourceId *string `puppet:"name=>'resource_id'"` + ResourceType *string `puppet:"name=>'resource_type'"` +} + +type Ec2TransitGatewayVpcAttachment struct { + SubnetIds []string `puppet:"name=>'subnet_ids'"` + TransitGatewayId string `puppet:"name=>'transit_gateway_id'"` + VpcId string `puppet:"name=>'vpc_id'"` + Ec2TransitGatewayVpcAttachmentId *string `puppet:"name=>'ec2_transit_gateway_vpc_attachment_id'"` + DnsSupport *string `puppet:"name=>'dns_support'"` + Ipv6Support *string `puppet:"name=>'ipv6_support'"` + Tags *map[string]string + TransitGatewayDefaultRouteTableAssociation *bool `puppet:"name=>'transit_gateway_default_route_table_association'"` + TransitGatewayDefaultRouteTablePropagation *bool `puppet:"name=>'transit_gateway_default_route_table_propagation'"` + VpcOwnerId *string `puppet:"name=>'vpc_owner_id'"` +} + +type EcrLifecyclePolicy struct { + Policy string + Repository string + EcrLifecyclePolicyId *string `puppet:"name=>'ecr_lifecycle_policy_id'"` + RegistryId *string `puppet:"name=>'registry_id'"` +} + +type EcrRepository struct { + Name string + EcrRepositoryId *string `puppet:"name=>'ecr_repository_id'"` + Arn *string + RegistryId *string `puppet:"name=>'registry_id'"` + RepositoryUrl *string `puppet:"name=>'repository_url'"` + Tags *map[string]string +} + +type EcrRepositoryPolicy struct { + Policy string + Repository string + EcrRepositoryPolicyId *string `puppet:"name=>'ecr_repository_policy_id'"` + RegistryId *string `puppet:"name=>'registry_id'"` +} + +type EcsCluster struct { + Name string + EcsClusterId *string `puppet:"name=>'ecs_cluster_id'"` + Arn *string + Tags *map[string]string +} + +type EcsService struct { + Name string + TaskDefinition string `puppet:"name=>'task_definition'"` + EcsServiceId *string `puppet:"name=>'ecs_service_id'"` + Cluster *string + DeploymentController *Action `puppet:"name=>'deployment_controller'"` + DeploymentMaximumPercent *int64 `puppet:"name=>'deployment_maximum_percent'"` + DeploymentMinimumHealthyPercent *int64 `puppet:"name=>'deployment_minimum_healthy_percent'"` + DesiredCount *int64 `puppet:"name=>'desired_count'"` + EnableEcsManagedTags *bool `puppet:"name=>'enable_ecs_managed_tags'"` + HealthCheckGracePeriodSeconds *int64 `puppet:"name=>'health_check_grace_period_seconds'"` + IamRole *string `puppet:"name=>'iam_role'"` + LaunchType *string `puppet:"name=>'launch_type'"` + LoadBalancer *Balancer `puppet:"name=>'load_balancer'"` + NetworkConfiguration *NetworkConfiguration `puppet:"name=>'network_configuration'"` + OrderedPlacementStrategy *[]PlacementStrategy `puppet:"name=>'ordered_placement_strategy'"` + PlacementConstraints *[]Constraints `puppet:"name=>'placement_constraints'"` + PlatformVersion *string `puppet:"name=>'platform_version'"` + PropagateTags *string `puppet:"name=>'propagate_tags'"` + SchedulingStrategy *string `puppet:"name=>'scheduling_strategy'"` + ServiceRegistries *Registries `puppet:"name=>'service_registries'"` + Tags *map[string]string +} + +type EcsTaskDefinition struct { + ContainerDefinitions string `puppet:"name=>'container_definitions'"` + Family string + EcsTaskDefinitionId *string `puppet:"name=>'ecs_task_definition_id'"` + Arn *string + Cpu *string + ExecutionRoleArn *string `puppet:"name=>'execution_role_arn'"` + IpcMode *string `puppet:"name=>'ipc_mode'"` + Memory *string + NetworkMode *string `puppet:"name=>'network_mode'"` + PidMode *string `puppet:"name=>'pid_mode'"` + PlacementConstraints *[]Constraints `puppet:"name=>'placement_constraints'"` + RequiresCompatibilities *[]string `puppet:"name=>'requires_compatibilities'"` + Revision *int64 + Tags *map[string]string + TaskRoleArn *string `puppet:"name=>'task_role_arn'"` + Volume *[]DefinitionVolume +} + +type EfsFileSystem struct { + EfsFileSystemId *string `puppet:"name=>'efs_file_system_id'"` + Arn *string + CreationToken *string `puppet:"name=>'creation_token'"` + DnsName *string `puppet:"name=>'dns_name'"` + Encrypted *bool + KmsKeyId *string `puppet:"name=>'kms_key_id'"` + PerformanceMode *string `puppet:"name=>'performance_mode'"` + ProvisionedThroughputInMibps *float64 `puppet:"name=>'provisioned_throughput_in_mibps'"` + Tags *map[string]string + ThroughputMode *string `puppet:"name=>'throughput_mode'"` +} + +type EfsMountTarget struct { + FileSystemId string `puppet:"name=>'file_system_id'"` + SubnetId string `puppet:"name=>'subnet_id'"` + EfsMountTargetId *string `puppet:"name=>'efs_mount_target_id'"` + DnsName *string `puppet:"name=>'dns_name'"` + FileSystemArn *string `puppet:"name=>'file_system_arn'"` + IpAddress *string `puppet:"name=>'ip_address'"` + NetworkInterfaceId *string `puppet:"name=>'network_interface_id'"` + SecurityGroups *[]string `puppet:"name=>'security_groups'"` +} + +type EgressOnlyInternetGateway struct { + VpcId string `puppet:"name=>'vpc_id'"` + EgressOnlyInternetGatewayId *string `puppet:"name=>'egress_only_internet_gateway_id'"` +} + +type Eip struct { + EipId *string `puppet:"name=>'eip_id'"` + AllocationId *string `puppet:"name=>'allocation_id'"` + AssociateWithPrivateIp *string `puppet:"name=>'associate_with_private_ip'"` + AssociationId *string `puppet:"name=>'association_id'"` + Domain *string + Instance *string + NetworkInterface *string `puppet:"name=>'network_interface'"` + PrivateIp *string `puppet:"name=>'private_ip'"` + PublicIp *string `puppet:"name=>'public_ip'"` + PublicIpv4Pool *string `puppet:"name=>'public_ipv4_pool'"` + Tags *map[string]string + Vpc *bool +} + +type EipAssociation struct { + EipAssociationId *string `puppet:"name=>'eip_association_id'"` + AllocationId *string `puppet:"name=>'allocation_id'"` + AllowReassociation *bool `puppet:"name=>'allow_reassociation'"` + InstanceId *string `puppet:"name=>'instance_id'"` + NetworkInterfaceId *string `puppet:"name=>'network_interface_id'"` + PrivateIpAddress *string `puppet:"name=>'private_ip_address'"` + PublicIp *string `puppet:"name=>'public_ip'"` +} + +type EksCluster struct { + Name string + RoleArn string `puppet:"name=>'role_arn'"` + VpcConfig ClusterVpcConfig `puppet:"name=>'vpc_config'"` + EksClusterId *string `puppet:"name=>'eks_cluster_id'"` + Arn *string + CertificateAuthority *Authority `puppet:"name=>'certificate_authority'"` + CreatedAt *string `puppet:"name=>'created_at'"` + Endpoint *string + PlatformVersion *string `puppet:"name=>'platform_version'"` + Version *string +} + +type ElasticBeanstalkApplication struct { + Name string + ElasticBeanstalkApplicationId *string `puppet:"name=>'elastic_beanstalk_application_id'"` + AppversionLifecycle *Lifecycle `puppet:"name=>'appversion_lifecycle'"` + Description *string +} + +type ElasticBeanstalkApplicationVersion struct { + Application string + Bucket string + Key string + Name string + ElasticBeanstalkApplicationVersionId *string `puppet:"name=>'elastic_beanstalk_application_version_id'"` + Description *string + ForceDelete *bool `puppet:"name=>'force_delete'"` +} + +type ElasticBeanstalkConfigurationTemplate struct { + Application string + Name string + ElasticBeanstalkConfigurationTemplateId *string `puppet:"name=>'elastic_beanstalk_configuration_template_id'"` + Description *string + EnvironmentId *string `puppet:"name=>'environment_id'"` + Setting *[]Setting + SolutionStackName *string `puppet:"name=>'solution_stack_name'"` +} + +type ElasticBeanstalkEnvironment struct { + Application string + Name string + ElasticBeanstalkEnvironmentId *string `puppet:"name=>'elastic_beanstalk_environment_id'"` + AllSettings *[]Setting `puppet:"name=>'all_settings'"` + Arn *string + AutoscalingGroups *[]string `puppet:"name=>'autoscaling_groups'"` + Cname *string + CnamePrefix *string `puppet:"name=>'cname_prefix'"` + Description *string + Instances *[]string + LaunchConfigurations *[]string `puppet:"name=>'launch_configurations'"` + LoadBalancers *[]string `puppet:"name=>'load_balancers'"` + PlatformArn *string `puppet:"name=>'platform_arn'"` + PollInterval *string `puppet:"name=>'poll_interval'"` + Queues *[]string + Setting *[]Setting + SolutionStackName *string `puppet:"name=>'solution_stack_name'"` + Tags *map[string]string + TemplateName *string `puppet:"name=>'template_name'"` + Tier *string + Triggers *[]string + VersionLabel *string `puppet:"name=>'version_label'"` + WaitForReadyTimeout *string `puppet:"name=>'wait_for_ready_timeout'"` +} + +type ElasticacheCluster struct { + ClusterId string `puppet:"name=>'cluster_id'"` + ElasticacheClusterId *string `puppet:"name=>'elasticache_cluster_id'"` + ApplyImmediately *bool `puppet:"name=>'apply_immediately'"` + AvailabilityZone *string `puppet:"name=>'availability_zone'"` + AzMode *string `puppet:"name=>'az_mode'"` + CacheNodes *[]Nodes `puppet:"name=>'cache_nodes'"` + ClusterAddress *string `puppet:"name=>'cluster_address'"` + ConfigurationEndpoint *string `puppet:"name=>'configuration_endpoint'"` + Engine *string + EngineVersion *string `puppet:"name=>'engine_version'"` + MaintenanceWindow *string `puppet:"name=>'maintenance_window'"` + NodeType *string `puppet:"name=>'node_type'"` + NotificationTopicArn *string `puppet:"name=>'notification_topic_arn'"` + NumCacheNodes *int64 `puppet:"name=>'num_cache_nodes'"` + ParameterGroupName *string `puppet:"name=>'parameter_group_name'"` + Port *int64 + PreferredAvailabilityZones *[]string `puppet:"name=>'preferred_availability_zones'"` + ReplicationGroupId *string `puppet:"name=>'replication_group_id'"` + SecurityGroupIds *[]string `puppet:"name=>'security_group_ids'"` + SecurityGroupNames *[]string `puppet:"name=>'security_group_names'"` + SnapshotArns *[]string `puppet:"name=>'snapshot_arns'"` + SnapshotName *string `puppet:"name=>'snapshot_name'"` + SnapshotRetentionLimit *int64 `puppet:"name=>'snapshot_retention_limit'"` + SnapshotWindow *string `puppet:"name=>'snapshot_window'"` + SubnetGroupName *string `puppet:"name=>'subnet_group_name'"` + Tags *map[string]string +} + +type ElasticacheParameterGroup struct { + Family string + Name string + ElasticacheParameterGroupId *string `puppet:"name=>'elasticache_parameter_group_id'"` + Description *string + Parameter *[]GroupParameter +} + +type ElasticacheReplicationGroup struct { + ReplicationGroupDescription string `puppet:"name=>'replication_group_description'"` + ReplicationGroupId string `puppet:"name=>'replication_group_id'"` + ElasticacheReplicationGroupId *string `puppet:"name=>'elasticache_replication_group_id'"` + ApplyImmediately *bool `puppet:"name=>'apply_immediately'"` + AtRestEncryptionEnabled *bool `puppet:"name=>'at_rest_encryption_enabled'"` + AuthToken *string `puppet:"name=>'auth_token'"` + AutoMinorVersionUpgrade *bool `puppet:"name=>'auto_minor_version_upgrade'"` + AutomaticFailoverEnabled *bool `puppet:"name=>'automatic_failover_enabled'"` + AvailabilityZones *[]string `puppet:"name=>'availability_zones'"` + ClusterMode *Mode `puppet:"name=>'cluster_mode'"` + ConfigurationEndpointAddress *string `puppet:"name=>'configuration_endpoint_address'"` + Engine *string + EngineVersion *string `puppet:"name=>'engine_version'"` + MaintenanceWindow *string `puppet:"name=>'maintenance_window'"` + MemberClusters *[]string `puppet:"name=>'member_clusters'"` + NodeType *string `puppet:"name=>'node_type'"` + NotificationTopicArn *string `puppet:"name=>'notification_topic_arn'"` + NumberCacheClusters *int64 `puppet:"name=>'number_cache_clusters'"` + ParameterGroupName *string `puppet:"name=>'parameter_group_name'"` + Port *int64 + PrimaryEndpointAddress *string `puppet:"name=>'primary_endpoint_address'"` + SecurityGroupIds *[]string `puppet:"name=>'security_group_ids'"` + SecurityGroupNames *[]string `puppet:"name=>'security_group_names'"` + SnapshotArns *[]string `puppet:"name=>'snapshot_arns'"` + SnapshotName *string `puppet:"name=>'snapshot_name'"` + SnapshotRetentionLimit *int64 `puppet:"name=>'snapshot_retention_limit'"` + SnapshotWindow *string `puppet:"name=>'snapshot_window'"` + SubnetGroupName *string `puppet:"name=>'subnet_group_name'"` + Tags *map[string]string + TransitEncryptionEnabled *bool `puppet:"name=>'transit_encryption_enabled'"` +} + +type ElasticacheSecurityGroup struct { + Name string + SecurityGroupNames []string `puppet:"name=>'security_group_names'"` + ElasticacheSecurityGroupId *string `puppet:"name=>'elasticache_security_group_id'"` + Description *string +} + +type ElasticacheSubnetGroup struct { + Name string + SubnetIds []string `puppet:"name=>'subnet_ids'"` + ElasticacheSubnetGroupId *string `puppet:"name=>'elasticache_subnet_group_id'"` + Description *string +} + +type ElasticsearchDomain struct { + DomainName string `puppet:"name=>'domain_name'"` + ElasticsearchDomainId *string `puppet:"name=>'elasticsearch_domain_id'"` + AccessPolicies *string `puppet:"name=>'access_policies'"` + AdvancedOptions *map[string]string `puppet:"name=>'advanced_options'"` + Arn *string + ClusterConfig *ClusterConfig `puppet:"name=>'cluster_config'"` + CognitoOptions *CognitoOptions `puppet:"name=>'cognito_options'"` + DomainId *string `puppet:"name=>'domain_id'"` + EbsOptions *EbsOptions `puppet:"name=>'ebs_options'"` + ElasticsearchVersion *string `puppet:"name=>'elasticsearch_version'"` + EncryptAtRest *Rest `puppet:"name=>'encrypt_at_rest'"` + Endpoint *string + KibanaEndpoint *string `puppet:"name=>'kibana_endpoint'"` + LogPublishingOptions *[]PublishingOptions `puppet:"name=>'log_publishing_options'"` + NodeToNodeEncryption *Encryption `puppet:"name=>'node_to_node_encryption'"` + SnapshotOptions *SnapshotOptions `puppet:"name=>'snapshot_options'"` + Tags *map[string]string + VpcOptions *VpcOptions `puppet:"name=>'vpc_options'"` +} + +type ElasticsearchDomainPolicy struct { + AccessPolicies string `puppet:"name=>'access_policies'"` + DomainName string `puppet:"name=>'domain_name'"` + ElasticsearchDomainPolicyId *string `puppet:"name=>'elasticsearch_domain_policy_id'"` +} + +type ElastictranscoderPipeline struct { + InputBucket string `puppet:"name=>'input_bucket'"` + Role string + ElastictranscoderPipelineId *string `puppet:"name=>'elastictranscoder_pipeline_id'"` + Arn *string + AwsKmsKeyArn *string `puppet:"name=>'aws_kms_key_arn'"` + ContentConfig *Config `puppet:"name=>'content_config'"` + ContentConfigPermissions *[]Permissions `puppet:"name=>'content_config_permissions'"` + Name *string + Notifications *Notifications + OutputBucket *string `puppet:"name=>'output_bucket'"` + ThumbnailConfig *Config `puppet:"name=>'thumbnail_config'"` + ThumbnailConfigPermissions *[]Permissions `puppet:"name=>'thumbnail_config_permissions'"` +} + +type ElastictranscoderPreset struct { + Container string + ElastictranscoderPresetId *string `puppet:"name=>'elastictranscoder_preset_id'"` + Arn *string + Audio *Audio + AudioCodecOptions *CodecOptions `puppet:"name=>'audio_codec_options'"` + Description *string + Name *string + Thumbnails *Thumbnails + Type *string + Video *Video + VideoCodecOptions *map[string]string `puppet:"name=>'video_codec_options'"` + VideoWatermarks *[]Watermarks `puppet:"name=>'video_watermarks'"` +} + +type Elb struct { + Listener []ElbListener + ElbId *string `puppet:"name=>'elb_id'"` + AccessLogs *AccessLogs `puppet:"name=>'access_logs'"` + Arn *string + AvailabilityZones *[]string `puppet:"name=>'availability_zones'"` + ConnectionDraining *bool `puppet:"name=>'connection_draining'"` + ConnectionDrainingTimeout *int64 `puppet:"name=>'connection_draining_timeout'"` + CrossZoneLoadBalancing *bool `puppet:"name=>'cross_zone_load_balancing'"` + DnsName *string `puppet:"name=>'dns_name'"` + HealthCheck *HealthCheck `puppet:"name=>'health_check'"` + IdleTimeout *int64 `puppet:"name=>'idle_timeout'"` + Instances *[]string + Internal *bool + Name *string + NamePrefix *string `puppet:"name=>'name_prefix'"` + SecurityGroups *[]string `puppet:"name=>'security_groups'"` + SourceSecurityGroup *string `puppet:"name=>'source_security_group'"` + SourceSecurityGroupId *string `puppet:"name=>'source_security_group_id'"` + Subnets *[]string + Tags *map[string]string + ZoneId *string `puppet:"name=>'zone_id'"` +} + +type ElbAttachment struct { + Elb string + Instance string + ElbAttachmentId *string `puppet:"name=>'elb_attachment_id'"` +} + +type EmrCluster struct { + Name string + ReleaseLabel string `puppet:"name=>'release_label'"` + ServiceRole string `puppet:"name=>'service_role'"` + EmrClusterId *string `puppet:"name=>'emr_cluster_id'"` + AdditionalInfo *string `puppet:"name=>'additional_info'"` + Applications *[]string + AutoscalingRole *string `puppet:"name=>'autoscaling_role'"` + BootstrapAction *[]BootstrapAction `puppet:"name=>'bootstrap_action'"` + ClusterState *string `puppet:"name=>'cluster_state'"` + Configurations *string + ConfigurationsJson *string `puppet:"name=>'configurations_json'"` + CoreInstanceCount *int64 `puppet:"name=>'core_instance_count'"` + CoreInstanceType *string `puppet:"name=>'core_instance_type'"` + CustomAmiId *string `puppet:"name=>'custom_ami_id'"` + EbsRootVolumeSize *int64 `puppet:"name=>'ebs_root_volume_size'"` + Ec2Attributes *Attributes `puppet:"name=>'ec2_attributes'"` + InstanceGroup *[]InstanceGroup `puppet:"name=>'instance_group'"` + KeepJobFlowAliveWhenNoSteps *bool `puppet:"name=>'keep_job_flow_alive_when_no_steps'"` + KerberosAttributes *KerberosAttributes `puppet:"name=>'kerberos_attributes'"` + LogUri *string `puppet:"name=>'log_uri'"` + MasterInstanceType *string `puppet:"name=>'master_instance_type'"` + MasterPublicDns *string `puppet:"name=>'master_public_dns'"` + ScaleDownBehavior *string `puppet:"name=>'scale_down_behavior'"` + SecurityConfiguration *string `puppet:"name=>'security_configuration'"` + Step *[]ClusterStep + Tags *map[string]string + TerminationProtection *bool `puppet:"name=>'termination_protection'"` + VisibleToAllUsers *bool `puppet:"name=>'visible_to_all_users'"` +} + +type EmrInstanceGroup struct { + ClusterId string `puppet:"name=>'cluster_id'"` + InstanceType string `puppet:"name=>'instance_type'"` + EmrInstanceGroupId *string `puppet:"name=>'emr_instance_group_id'"` + EbsConfig *[]GroupEbsConfig `puppet:"name=>'ebs_config'"` + EbsOptimized *bool `puppet:"name=>'ebs_optimized'"` + InstanceCount *int64 `puppet:"name=>'instance_count'"` + Name *string + RunningInstanceCount *int64 `puppet:"name=>'running_instance_count'"` + Status *string +} + +type EmrSecurityConfiguration struct { + Configuration string + EmrSecurityConfigurationId *string `puppet:"name=>'emr_security_configuration_id'"` + CreationDate *string `puppet:"name=>'creation_date'"` + Name *string + NamePrefix *string `puppet:"name=>'name_prefix'"` +} + +type FlowLog struct { + TrafficType string `puppet:"name=>'traffic_type'"` + FlowLogId *string `puppet:"name=>'flow_log_id'"` + EniId *string `puppet:"name=>'eni_id'"` + IamRoleArn *string `puppet:"name=>'iam_role_arn'"` + LogDestination *string `puppet:"name=>'log_destination'"` + LogDestinationType *string `puppet:"name=>'log_destination_type'"` + SubnetId *string `puppet:"name=>'subnet_id'"` + VpcId *string `puppet:"name=>'vpc_id'"` +} + +type GameliftAlias struct { + Name string + GameliftAliasId *string `puppet:"name=>'gamelift_alias_id'"` + Arn *string + Description *string + RoutingStrategy *RoutingStrategy `puppet:"name=>'routing_strategy'"` +} + +type GameliftBuild struct { + Name string + OperatingSystem string `puppet:"name=>'operating_system'"` + GameliftBuildId *string `puppet:"name=>'gamelift_build_id'"` + StorageLocation *StorageLocation `puppet:"name=>'storage_location'"` + Version *string +} + +type GameliftFleet struct { + BuildId string `puppet:"name=>'build_id'"` + Ec2InstanceType string `puppet:"name=>'ec2_instance_type'"` + Name string + GameliftFleetId *string `puppet:"name=>'gamelift_fleet_id'"` + Arn *string + Description *string + Ec2InboundPermission *[]Permission `puppet:"name=>'ec2_inbound_permission'"` + LogPaths *[]string `puppet:"name=>'log_paths'"` + MetricGroups *[]string `puppet:"name=>'metric_groups'"` + NewGameSessionProtectionPolicy *string `puppet:"name=>'new_game_session_protection_policy'"` + OperatingSystem *string `puppet:"name=>'operating_system'"` + ResourceCreationLimitPolicy *LimitPolicy `puppet:"name=>'resource_creation_limit_policy'"` + RuntimeConfiguration *RuntimeConfiguration `puppet:"name=>'runtime_configuration'"` +} + +type GameliftGameSessionQueue struct { + Name string + GameliftGameSessionQueueId *string `puppet:"name=>'gamelift_game_session_queue_id'"` + Arn *string + Destinations *[]string + PlayerLatencyPolicy *[]LatencyPolicy `puppet:"name=>'player_latency_policy'"` + TimeoutInSeconds *int64 `puppet:"name=>'timeout_in_seconds'"` +} + +type GenericHandler struct { +} + +type GlacierVault struct { + Name string + GlacierVaultId *string `puppet:"name=>'glacier_vault_id'"` + AccessPolicy *string `puppet:"name=>'access_policy'"` + Arn *string + Location *string + Notification *[]Notification + Tags *map[string]string +} + +type GlacierVaultLock struct { + CompleteLock bool `puppet:"name=>'complete_lock'"` + Policy string + VaultName string `puppet:"name=>'vault_name'"` + GlacierVaultLockId *string `puppet:"name=>'glacier_vault_lock_id'"` + IgnoreDeletionError *bool `puppet:"name=>'ignore_deletion_error'"` +} + +type GlobalacceleratorAccelerator struct { + Name string + GlobalacceleratorAcceleratorId *string `puppet:"name=>'globalaccelerator_accelerator_id'"` + Attributes *AcceleratorAttributes + Enabled *bool + IpAddressType *string `puppet:"name=>'ip_address_type'"` + IpSets *[]Sets `puppet:"name=>'ip_sets'"` +} + +type GlueCatalogDatabase struct { + Name string + GlueCatalogDatabaseId *string `puppet:"name=>'glue_catalog_database_id'"` + CatalogId *string `puppet:"name=>'catalog_id'"` + Description *string + LocationUri *string `puppet:"name=>'location_uri'"` + Parameters *map[string]string +} + +type GlueCatalogTable struct { + DatabaseName string `puppet:"name=>'database_name'"` + Name string + GlueCatalogTableId *string `puppet:"name=>'glue_catalog_table_id'"` + CatalogId *string `puppet:"name=>'catalog_id'"` + Description *string + Owner *string + Parameters *map[string]string + PartitionKeys *[]KeysColumns `puppet:"name=>'partition_keys'"` + Retention *int64 + StorageDescriptor *Descriptor `puppet:"name=>'storage_descriptor'"` + TableType *string `puppet:"name=>'table_type'"` + ViewExpandedText *string `puppet:"name=>'view_expanded_text'"` + ViewOriginalText *string `puppet:"name=>'view_original_text'"` +} + +type GlueClassifier struct { + Name string + GlueClassifierId *string `puppet:"name=>'glue_classifier_id'"` + GrokClassifier *Classifier `puppet:"name=>'grok_classifier'"` + JsonClassifier *JsonClassifier `puppet:"name=>'json_classifier'"` + XmlClassifier *XmlClassifier `puppet:"name=>'xml_classifier'"` +} + +type GlueConnection struct { + ConnectionProperties map[string]string `puppet:"name=>'connection_properties'"` + Name string + GlueConnectionId *string `puppet:"name=>'glue_connection_id'"` + CatalogId *string `puppet:"name=>'catalog_id'"` + ConnectionType *string `puppet:"name=>'connection_type'"` + Description *string + MatchCriteria *[]string `puppet:"name=>'match_criteria'"` + PhysicalConnectionRequirements *Requirements `puppet:"name=>'physical_connection_requirements'"` +} + +type GlueCrawler struct { + DatabaseName string `puppet:"name=>'database_name'"` + Name string + Role string + GlueCrawlerId *string `puppet:"name=>'glue_crawler_id'"` + Classifiers *[]string + Configuration *string + Description *string + DynamodbTarget *[]DynamodbTarget `puppet:"name=>'dynamodb_target'"` + JdbcTarget *[]JdbcTarget `puppet:"name=>'jdbc_target'"` + S3Target *[]S3Target `puppet:"name=>'s3_target'"` + Schedule *string + SchemaChangePolicy *ChangePolicy `puppet:"name=>'schema_change_policy'"` + SecurityConfiguration *string `puppet:"name=>'security_configuration'"` + TablePrefix *string `puppet:"name=>'table_prefix'"` +} + +type GlueJob struct { + Name string + RoleArn string `puppet:"name=>'role_arn'"` + GlueJobId *string `puppet:"name=>'glue_job_id'"` + AllocatedCapacity *int64 `puppet:"name=>'allocated_capacity'"` + Command *Command + Connections *[]string + DefaultArguments *map[string]string `puppet:"name=>'default_arguments'"` + Description *string + ExecutionProperty *Property `puppet:"name=>'execution_property'"` + MaxRetries *int64 `puppet:"name=>'max_retries'"` + SecurityConfiguration *string `puppet:"name=>'security_configuration'"` + Timeout *int64 +} + +type GlueSecurityConfiguration struct { + Name string + GlueSecurityConfigurationId *string `puppet:"name=>'glue_security_configuration_id'"` + EncryptionConfiguration *ConfigurationEncryptionConfiguration `puppet:"name=>'encryption_configuration'"` +} + +type GlueTrigger struct { + Actions []Actions + Name string + Type string + GlueTriggerId *string `puppet:"name=>'glue_trigger_id'"` + Description *string + Enabled *bool + Predicate *Predicate + Schedule *string +} + +type GuarddutyDetector struct { + GuarddutyDetectorId *string `puppet:"name=>'guardduty_detector_id'"` + AccountId *string `puppet:"name=>'account_id'"` + Enable *bool + FindingPublishingFrequency *string `puppet:"name=>'finding_publishing_frequency'"` +} + +type GuarddutyIpset struct { + Activate bool + DetectorId string `puppet:"name=>'detector_id'"` + Format string + Location string + Name string + GuarddutyIpsetId *string `puppet:"name=>'guardduty_ipset_id'"` +} + +type GuarddutyMember struct { + AccountId string `puppet:"name=>'account_id'"` + DetectorId string `puppet:"name=>'detector_id'"` + Email string + GuarddutyMemberId *string `puppet:"name=>'guardduty_member_id'"` + DisableEmailNotification *bool `puppet:"name=>'disable_email_notification'"` + InvitationMessage *string `puppet:"name=>'invitation_message'"` + Invite *bool + RelationshipStatus *string `puppet:"name=>'relationship_status'"` +} + +type GuarddutyThreatintelset struct { + Activate bool + DetectorId string `puppet:"name=>'detector_id'"` + Format string + Location string + Name string + GuarddutyThreatintelsetId *string `puppet:"name=>'guardduty_threatintelset_id'"` +} + +type IamAccessKey struct { + User string + IamAccessKeyId *string `puppet:"name=>'iam_access_key_id'"` + EncryptedSecret *string `puppet:"name=>'encrypted_secret'"` + KeyFingerprint *string `puppet:"name=>'key_fingerprint'"` + PgpKey *string `puppet:"name=>'pgp_key'"` + SesSmtpPassword *string `puppet:"name=>'ses_smtp_password'"` + Status *string +} + +type IamAccountAlias struct { + AccountAlias string `puppet:"name=>'account_alias'"` + IamAccountAliasId *string `puppet:"name=>'iam_account_alias_id'"` +} + +type IamAccountPasswordPolicy struct { + IamAccountPasswordPolicyId *string `puppet:"name=>'iam_account_password_policy_id'"` + AllowUsersToChangePassword *bool `puppet:"name=>'allow_users_to_change_password'"` + ExpirePasswords *bool `puppet:"name=>'expire_passwords'"` + HardExpiry *bool `puppet:"name=>'hard_expiry'"` + MaxPasswordAge *int64 `puppet:"name=>'max_password_age'"` + MinimumPasswordLength *int64 `puppet:"name=>'minimum_password_length'"` + PasswordReusePrevention *int64 `puppet:"name=>'password_reuse_prevention'"` + RequireLowercaseCharacters *bool `puppet:"name=>'require_lowercase_characters'"` + RequireNumbers *bool `puppet:"name=>'require_numbers'"` + RequireSymbols *bool `puppet:"name=>'require_symbols'"` + RequireUppercaseCharacters *bool `puppet:"name=>'require_uppercase_characters'"` +} + +type IamGroup struct { + Name string + IamGroupId *string `puppet:"name=>'iam_group_id'"` + Arn *string + Path *string + UniqueId *string `puppet:"name=>'unique_id'"` +} + +type IamGroupMembership struct { + Group string + Name string + Users []string + IamGroupMembershipId *string `puppet:"name=>'iam_group_membership_id'"` +} + +type IamGroupPolicy struct { + Group string + Policy string + IamGroupPolicyId *string `puppet:"name=>'iam_group_policy_id'"` + Name *string + NamePrefix *string `puppet:"name=>'name_prefix'"` +} + +type IamGroupPolicyAttachment struct { + Group string + PolicyArn string `puppet:"name=>'policy_arn'"` + IamGroupPolicyAttachmentId *string `puppet:"name=>'iam_group_policy_attachment_id'"` +} + +type IamInstanceProfile struct { + IamInstanceProfileId *string `puppet:"name=>'iam_instance_profile_id'"` + Arn *string + CreateDate *string `puppet:"name=>'create_date'"` + Name *string + NamePrefix *string `puppet:"name=>'name_prefix'"` + Path *string + Role *string + UniqueId *string `puppet:"name=>'unique_id'"` +} + +type IamOpenidConnectProvider struct { + ClientIdList []string `puppet:"name=>'client_id_list'"` + ThumbprintList []string `puppet:"name=>'thumbprint_list'"` + Url string + IamOpenidConnectProviderId *string `puppet:"name=>'iam_openid_connect_provider_id'"` + Arn *string +} + +type IamPolicy struct { + Policy string + IamPolicyId *string `puppet:"name=>'iam_policy_id'"` + Arn *string + Description *string + Name *string + NamePrefix *string `puppet:"name=>'name_prefix'"` + Path *string +} + +type IamPolicyAttachment struct { + Name string + PolicyArn string `puppet:"name=>'policy_arn'"` + IamPolicyAttachmentId *string `puppet:"name=>'iam_policy_attachment_id'"` + Groups *[]string + Roles *[]string + Users *[]string +} + +type IamRole struct { + AssumeRolePolicy string `puppet:"name=>'assume_role_policy'"` + IamRoleId *string `puppet:"name=>'iam_role_id'"` + Arn *string + CreateDate *string `puppet:"name=>'create_date'"` + Description *string + ForceDetachPolicies *bool `puppet:"name=>'force_detach_policies'"` + MaxSessionDuration *int64 `puppet:"name=>'max_session_duration'"` + Name *string + NamePrefix *string `puppet:"name=>'name_prefix'"` + Path *string + PermissionsBoundary *string `puppet:"name=>'permissions_boundary'"` + Tags *map[string]string + UniqueId *string `puppet:"name=>'unique_id'"` +} + +type IamRolePolicy struct { + Policy string + Role string + IamRolePolicyId *string `puppet:"name=>'iam_role_policy_id'"` + Name *string + NamePrefix *string `puppet:"name=>'name_prefix'"` +} + +type IamRolePolicyAttachment struct { + PolicyArn string `puppet:"name=>'policy_arn'"` + Role string + IamRolePolicyAttachmentId *string `puppet:"name=>'iam_role_policy_attachment_id'"` +} + +type IamSamlProvider struct { + Name string + SamlMetadataDocument string `puppet:"name=>'saml_metadata_document'"` + IamSamlProviderId *string `puppet:"name=>'iam_saml_provider_id'"` + Arn *string + ValidUntil *string `puppet:"name=>'valid_until'"` +} + +type IamServerCertificate struct { + CertificateBody string `puppet:"name=>'certificate_body'"` + PrivateKey string `puppet:"name=>'private_key'"` + IamServerCertificateId *string `puppet:"name=>'iam_server_certificate_id'"` + Arn *string + CertificateChain *string `puppet:"name=>'certificate_chain'"` + Name *string + NamePrefix *string `puppet:"name=>'name_prefix'"` + Path *string +} + +type IamServiceLinkedRole struct { + AwsServiceName string `puppet:"name=>'aws_service_name'"` + IamServiceLinkedRoleId *string `puppet:"name=>'iam_service_linked_role_id'"` + Arn *string + CreateDate *string `puppet:"name=>'create_date'"` + CustomSuffix *string `puppet:"name=>'custom_suffix'"` + Description *string + Name *string + Path *string + UniqueId *string `puppet:"name=>'unique_id'"` +} + +type IamUser struct { + Name string + IamUserId *string `puppet:"name=>'iam_user_id'"` + Arn *string + ForceDestroy *bool `puppet:"name=>'force_destroy'"` + Path *string + PermissionsBoundary *string `puppet:"name=>'permissions_boundary'"` + Tags *map[string]string + UniqueId *string `puppet:"name=>'unique_id'"` +} + +type IamUserGroupMembership struct { + Groups []string + User string + IamUserGroupMembershipId *string `puppet:"name=>'iam_user_group_membership_id'"` +} + +type IamUserLoginProfile struct { + PgpKey string `puppet:"name=>'pgp_key'"` + User string + IamUserLoginProfileId *string `puppet:"name=>'iam_user_login_profile_id'"` + EncryptedPassword *string `puppet:"name=>'encrypted_password'"` + KeyFingerprint *string `puppet:"name=>'key_fingerprint'"` + PasswordLength *int64 `puppet:"name=>'password_length'"` + PasswordResetRequired *bool `puppet:"name=>'password_reset_required'"` +} + +type IamUserPolicy struct { + Policy string + User string + IamUserPolicyId *string `puppet:"name=>'iam_user_policy_id'"` + Name *string + NamePrefix *string `puppet:"name=>'name_prefix'"` +} + +type IamUserPolicyAttachment struct { + PolicyArn string `puppet:"name=>'policy_arn'"` + User string + IamUserPolicyAttachmentId *string `puppet:"name=>'iam_user_policy_attachment_id'"` +} + +type IamUserSshKey struct { + Encoding string + PublicKey string `puppet:"name=>'public_key'"` + Username string + IamUserSshKeyId *string `puppet:"name=>'iam_user_ssh_key_id'"` + Fingerprint *string + SshPublicKeyId *string `puppet:"name=>'ssh_public_key_id'"` + Status *string +} + +type InspectorAssessmentTarget struct { + Name string + InspectorAssessmentTargetId *string `puppet:"name=>'inspector_assessment_target_id'"` + Arn *string + ResourceGroupArn *string `puppet:"name=>'resource_group_arn'"` +} + +type InspectorAssessmentTemplate struct { + Duration int64 + Name string + RulesPackageArns []string `puppet:"name=>'rules_package_arns'"` + TargetArn string `puppet:"name=>'target_arn'"` + InspectorAssessmentTemplateId *string `puppet:"name=>'inspector_assessment_template_id'"` + Arn *string +} + +type InspectorResourceGroup struct { + Tags map[string]string + InspectorResourceGroupId *string `puppet:"name=>'inspector_resource_group_id'"` + Arn *string +} + +type Instance struct { + Ami string + InstanceType string `puppet:"name=>'instance_type'"` + InstanceId *string `puppet:"name=>'instance_id'"` + Arn *string + AssociatePublicIpAddress *bool `puppet:"name=>'associate_public_ip_address'"` + AvailabilityZone *string `puppet:"name=>'availability_zone'"` + CpuCoreCount *int64 `puppet:"name=>'cpu_core_count'"` + CpuThreadsPerCore *int64 `puppet:"name=>'cpu_threads_per_core'"` + CreditSpecification *Specification `puppet:"name=>'credit_specification'"` + DisableApiTermination *bool `puppet:"name=>'disable_api_termination'"` + EbsBlockDevice *[]RequestEbsBlockDevice `puppet:"name=>'ebs_block_device'"` + EbsOptimized *bool `puppet:"name=>'ebs_optimized'"` + EphemeralBlockDevice *[]InstanceEphemeralBlockDevice `puppet:"name=>'ephemeral_block_device'"` + GetPasswordData *bool `puppet:"name=>'get_password_data'"` + HostId *string `puppet:"name=>'host_id'"` + IamInstanceProfile *string `puppet:"name=>'iam_instance_profile'"` + InstanceInitiatedShutdownBehavior *string `puppet:"name=>'instance_initiated_shutdown_behavior'"` + InstanceState *string `puppet:"name=>'instance_state'"` + Ipv6AddressCount *int64 `puppet:"name=>'ipv6_address_count'"` + Ipv6Addresses *[]string `puppet:"name=>'ipv6_addresses'"` + KeyName *string `puppet:"name=>'key_name'"` + Monitoring *bool + NetworkInterface *[]Interface `puppet:"name=>'network_interface'"` + PasswordData *string `puppet:"name=>'password_data'"` + PlacementGroup *string `puppet:"name=>'placement_group'"` + PrimaryNetworkInterfaceId *string `puppet:"name=>'primary_network_interface_id'"` + PrivateDns *string `puppet:"name=>'private_dns'"` + PrivateIp *string `puppet:"name=>'private_ip'"` + PublicDns *string `puppet:"name=>'public_dns'"` + PublicIp *string `puppet:"name=>'public_ip'"` + RootBlockDevice *RootBlockDevice `puppet:"name=>'root_block_device'"` + SecurityGroups *[]string `puppet:"name=>'security_groups'"` + SourceDestCheck *bool `puppet:"name=>'source_dest_check'"` + SubnetId *string `puppet:"name=>'subnet_id'"` + Tags *map[string]string + Tenancy *string + UserData *string `puppet:"name=>'user_data'"` + UserDataBase64 *string `puppet:"name=>'user_data_base64'"` + VolumeTags *map[string]string `puppet:"name=>'volume_tags'"` + VpcSecurityGroupIds *[]string `puppet:"name=>'vpc_security_group_ids'"` +} + +type InternetGateway struct { + InternetGatewayId *string `puppet:"name=>'internet_gateway_id'"` + OwnerId *string `puppet:"name=>'owner_id'"` + Tags *map[string]string + VpcId *string `puppet:"name=>'vpc_id'"` +} + +type IotCertificate struct { + Active bool + Csr string + IotCertificateId *string `puppet:"name=>'iot_certificate_id'"` + Arn *string +} + +type IotPolicy struct { + Name string + Policy string + IotPolicyId *string `puppet:"name=>'iot_policy_id'"` + Arn *string + DefaultVersionId *string `puppet:"name=>'default_version_id'"` +} + +type IotPolicyAttachment struct { + Policy string + Target string + IotPolicyAttachmentId *string `puppet:"name=>'iot_policy_attachment_id'"` +} + +type IotThing struct { + Name string + IotThingId *string `puppet:"name=>'iot_thing_id'"` + Arn *string + Attributes *map[string]string + DefaultClientId *string `puppet:"name=>'default_client_id'"` + ThingTypeName *string `puppet:"name=>'thing_type_name'"` + Version *int64 +} + +type IotThingPrincipalAttachment struct { + Principal string + Thing string + IotThingPrincipalAttachmentId *string `puppet:"name=>'iot_thing_principal_attachment_id'"` +} + +type IotThingType struct { + Name string + IotThingTypeId *string `puppet:"name=>'iot_thing_type_id'"` + Arn *string + Deprecated *bool + Properties *TypeProperties +} + +type IotTopicRule struct { + Enabled bool + Name string + Sql string + SqlVersion string `puppet:"name=>'sql_version'"` + IotTopicRuleId *string `puppet:"name=>'iot_topic_rule_id'"` + Arn *string + CloudwatchAlarm *[]Alarm `puppet:"name=>'cloudwatch_alarm'"` + CloudwatchMetric *[]Metric `puppet:"name=>'cloudwatch_metric'"` + Description *string + Dynamodb *[]Dynamodb + Elasticsearch *[]Elasticsearch + Firehose *[]Firehose + Kinesis *[]Kinesis + Lambda *[]ConfigLambda + Republish *[]Republish + S3 *[]RuleS3 + Sns *[]Sns + Sqs *[]Sqs +} + +type KeyPair struct { + PublicKey string `puppet:"name=>'public_key'"` + KeyPairId *string `puppet:"name=>'key_pair_id'"` + Fingerprint *string + KeyName *string `puppet:"name=>'key_name'"` + KeyNamePrefix *string `puppet:"name=>'key_name_prefix'"` +} + +type KinesisAnalyticsApplication struct { + Name string + KinesisAnalyticsApplicationId *string `puppet:"name=>'kinesis_analytics_application_id'"` + Arn *string + CloudwatchLoggingOptions *LoggingOptions `puppet:"name=>'cloudwatch_logging_options'"` + Code *string + CreateTimestamp *string `puppet:"name=>'create_timestamp'"` + Description *string + Inputs *Inputs + LastUpdateTimestamp *string `puppet:"name=>'last_update_timestamp'"` + Outputs *[]Outputs + ReferenceDataSources *DataSources `puppet:"name=>'reference_data_sources'"` + Status *string + Version *int64 +} + +type KinesisFirehoseDeliveryStream struct { + Destination string + Name string + KinesisFirehoseDeliveryStreamId *string `puppet:"name=>'kinesis_firehose_delivery_stream_id'"` + Arn *string + DestinationId *string `puppet:"name=>'destination_id'"` + ElasticsearchConfiguration *ElasticsearchConfiguration `puppet:"name=>'elasticsearch_configuration'"` + ExtendedS3Configuration *S3Configuration `puppet:"name=>'extended_s3_configuration'"` + KinesisSourceConfiguration *SourceConfiguration `puppet:"name=>'kinesis_source_configuration'"` + RedshiftConfiguration *RedshiftConfiguration `puppet:"name=>'redshift_configuration'"` + S3Configuration *BackupConfiguration `puppet:"name=>'s3_configuration'"` + SplunkConfiguration *SplunkConfiguration `puppet:"name=>'splunk_configuration'"` + Tags *map[string]string + VersionId *string `puppet:"name=>'version_id'"` +} + +type KinesisStream struct { + Name string + ShardCount int64 `puppet:"name=>'shard_count'"` + KinesisStreamId *string `puppet:"name=>'kinesis_stream_id'"` + Arn *string + EncryptionType *string `puppet:"name=>'encryption_type'"` + KmsKeyId *string `puppet:"name=>'kms_key_id'"` + RetentionPeriod *int64 `puppet:"name=>'retention_period'"` + ShardLevelMetrics *[]string `puppet:"name=>'shard_level_metrics'"` + Tags *map[string]string +} + +type KmsAlias struct { + TargetKeyId string `puppet:"name=>'target_key_id'"` + KmsAliasId *string `puppet:"name=>'kms_alias_id'"` + Arn *string + Name *string + NamePrefix *string `puppet:"name=>'name_prefix'"` + TargetKeyArn *string `puppet:"name=>'target_key_arn'"` +} + +type KmsGrant struct { + GranteePrincipal string `puppet:"name=>'grantee_principal'"` + KeyId string `puppet:"name=>'key_id'"` + Operations []string + KmsGrantId *string `puppet:"name=>'kms_grant_id'"` + Constraints *[]GrantConstraints + GrantCreationTokens *[]string `puppet:"name=>'grant_creation_tokens'"` + GrantId *string `puppet:"name=>'grant_id'"` + GrantToken *string `puppet:"name=>'grant_token'"` + Name *string + RetireOnDelete *bool `puppet:"name=>'retire_on_delete'"` + RetiringPrincipal *string `puppet:"name=>'retiring_principal'"` +} + +type KmsKey struct { + KmsKeyId *string `puppet:"name=>'kms_key_id'"` + Arn *string + DeletionWindowInDays *int64 `puppet:"name=>'deletion_window_in_days'"` + Description *string + EnableKeyRotation *bool `puppet:"name=>'enable_key_rotation'"` + IsEnabled *bool `puppet:"name=>'is_enabled'"` + KeyId *string `puppet:"name=>'key_id'"` + KeyUsage *string `puppet:"name=>'key_usage'"` + Policy *string + Tags *map[string]string +} + +type LambdaAlias struct { + FunctionName string `puppet:"name=>'function_name'"` + FunctionVersion string `puppet:"name=>'function_version'"` + Name string + LambdaAliasId *string `puppet:"name=>'lambda_alias_id'"` + Arn *string + Description *string + InvokeArn *string `puppet:"name=>'invoke_arn'"` + RoutingConfig *AliasRoutingConfig `puppet:"name=>'routing_config'"` +} + +type LambdaEventSourceMapping struct { + EventSourceArn string `puppet:"name=>'event_source_arn'"` + FunctionName string `puppet:"name=>'function_name'"` + LambdaEventSourceMappingId *string `puppet:"name=>'lambda_event_source_mapping_id'"` + BatchSize *int64 `puppet:"name=>'batch_size'"` + Enabled *bool + FunctionArn *string `puppet:"name=>'function_arn'"` + LastModified *string `puppet:"name=>'last_modified'"` + LastProcessingResult *string `puppet:"name=>'last_processing_result'"` + StartingPosition *string `puppet:"name=>'starting_position'"` + StartingPositionTimestamp *time.Time `puppet:"name=>'starting_position_timestamp'"` + State *string + StateTransitionReason *string `puppet:"name=>'state_transition_reason'"` + Uuid *string +} + +type LambdaFunction struct { + FunctionName string `puppet:"name=>'function_name'"` + Handler string + Role string + Runtime string + LambdaFunctionId *string `puppet:"name=>'lambda_function_id'"` + Arn *string + DeadLetterConfig *LetterConfig `puppet:"name=>'dead_letter_config'"` + Description *string + Environment *FunctionEnvironment + Filename *string + InvokeArn *string `puppet:"name=>'invoke_arn'"` + KmsKeyArn *string `puppet:"name=>'kms_key_arn'"` + LastModified *string `puppet:"name=>'last_modified'"` + Layers *[]string + MemorySize *int64 `puppet:"name=>'memory_size'"` + Publish *bool + QualifiedArn *string `puppet:"name=>'qualified_arn'"` + ReservedConcurrentExecutions *int64 `puppet:"name=>'reserved_concurrent_executions'"` + S3Bucket *string `puppet:"name=>'s3_bucket'"` + S3Key *string `puppet:"name=>'s3_key'"` + S3ObjectVersion *string `puppet:"name=>'s3_object_version'"` + SourceCodeHash *string `puppet:"name=>'source_code_hash'"` + SourceCodeSize *int64 `puppet:"name=>'source_code_size'"` + Tags *map[string]string + Timeout *int64 + TracingConfig *TracingConfig `puppet:"name=>'tracing_config'"` + Version *string + VpcConfig *FunctionVpcConfig `puppet:"name=>'vpc_config'"` +} + +type LambdaLayerVersion struct { + LayerName string `puppet:"name=>'layer_name'"` + LambdaLayerVersionId *string `puppet:"name=>'lambda_layer_version_id'"` + Arn *string + CompatibleRuntimes *[]string `puppet:"name=>'compatible_runtimes'"` + CreatedDate *string `puppet:"name=>'created_date'"` + Description *string + Filename *string + LayerArn *string `puppet:"name=>'layer_arn'"` + LicenseInfo *string `puppet:"name=>'license_info'"` + S3Bucket *string `puppet:"name=>'s3_bucket'"` + S3Key *string `puppet:"name=>'s3_key'"` + S3ObjectVersion *string `puppet:"name=>'s3_object_version'"` + SourceCodeHash *string `puppet:"name=>'source_code_hash'"` + SourceCodeSize *int64 `puppet:"name=>'source_code_size'"` + Version *string +} + +type LambdaPermission struct { + Action string + FunctionName string `puppet:"name=>'function_name'"` + Principal string + LambdaPermissionId *string `puppet:"name=>'lambda_permission_id'"` + EventSourceToken *string `puppet:"name=>'event_source_token'"` + Qualifier *string + SourceAccount *string `puppet:"name=>'source_account'"` + SourceArn *string `puppet:"name=>'source_arn'"` + StatementId *string `puppet:"name=>'statement_id'"` + StatementIdPrefix *string `puppet:"name=>'statement_id_prefix'"` +} + +type LaunchConfiguration struct { + ImageId string `puppet:"name=>'image_id'"` + InstanceType string `puppet:"name=>'instance_type'"` + LaunchConfigurationId *string `puppet:"name=>'launch_configuration_id'"` + AssociatePublicIpAddress *bool `puppet:"name=>'associate_public_ip_address'"` + EbsBlockDevice *[]ConfigurationEbsBlockDevice `puppet:"name=>'ebs_block_device'"` + EbsOptimized *bool `puppet:"name=>'ebs_optimized'"` + EnableMonitoring *bool `puppet:"name=>'enable_monitoring'"` + EphemeralBlockDevice *[]Device `puppet:"name=>'ephemeral_block_device'"` + IamInstanceProfile *string `puppet:"name=>'iam_instance_profile'"` + KeyName *string `puppet:"name=>'key_name'"` + Name *string + NamePrefix *string `puppet:"name=>'name_prefix'"` + PlacementTenancy *string `puppet:"name=>'placement_tenancy'"` + RootBlockDevice *BlockDevice `puppet:"name=>'root_block_device'"` + SecurityGroups *[]string `puppet:"name=>'security_groups'"` + SpotPrice *string `puppet:"name=>'spot_price'"` + UserData *string `puppet:"name=>'user_data'"` + UserDataBase64 *string `puppet:"name=>'user_data_base64'"` + VpcClassicLinkId *string `puppet:"name=>'vpc_classic_link_id'"` + VpcClassicLinkSecurityGroups *[]string `puppet:"name=>'vpc_classic_link_security_groups'"` +} + +type LaunchTemplate struct { + LaunchTemplateId *string `puppet:"name=>'launch_template_id'"` + Arn *string + BlockDeviceMappings *[]Mappings `puppet:"name=>'block_device_mappings'"` + CapacityReservationSpecification *ReservationSpecification `puppet:"name=>'capacity_reservation_specification'"` + CreditSpecification *Specification `puppet:"name=>'credit_specification'"` + DefaultVersion *int64 `puppet:"name=>'default_version'"` + Description *string + DisableApiTermination *bool `puppet:"name=>'disable_api_termination'"` + EbsOptimized *string `puppet:"name=>'ebs_optimized'"` + ElasticGpuSpecifications *[]Action `puppet:"name=>'elastic_gpu_specifications'"` + IamInstanceProfile *Profile `puppet:"name=>'iam_instance_profile'"` + ImageId *string `puppet:"name=>'image_id'"` + InstanceInitiatedShutdownBehavior *string `puppet:"name=>'instance_initiated_shutdown_behavior'"` + InstanceMarketOptions *MarketOptions `puppet:"name=>'instance_market_options'"` + InstanceType *string `puppet:"name=>'instance_type'"` + KernelId *string `puppet:"name=>'kernel_id'"` + KeyName *string `puppet:"name=>'key_name'"` + LatestVersion *int64 `puppet:"name=>'latest_version'"` + LicenseSpecification *[]LicenseSpecification `puppet:"name=>'license_specification'"` + Monitoring *Monitoring + Name *string + NamePrefix *string `puppet:"name=>'name_prefix'"` + NetworkInterfaces *[]Interfaces `puppet:"name=>'network_interfaces'"` + Placement *Placement + RamDiskId *string `puppet:"name=>'ram_disk_id'"` + SecurityGroupNames *[]string `puppet:"name=>'security_group_names'"` + TagSpecifications *[]Specifications `puppet:"name=>'tag_specifications'"` + Tags *map[string]string + UserData *string `puppet:"name=>'user_data'"` + VpcSecurityGroupIds *[]string `puppet:"name=>'vpc_security_group_ids'"` +} + +type Lb struct { + LbId *string `puppet:"name=>'lb_id'"` + AccessLogs *Logs `puppet:"name=>'access_logs'"` + Arn *string + ArnSuffix *string `puppet:"name=>'arn_suffix'"` + DnsName *string `puppet:"name=>'dns_name'"` + EnableCrossZoneLoadBalancing *bool `puppet:"name=>'enable_cross_zone_load_balancing'"` + EnableDeletionProtection *bool `puppet:"name=>'enable_deletion_protection'"` + EnableHttp2 *bool `puppet:"name=>'enable_http2'"` + IdleTimeout *int64 `puppet:"name=>'idle_timeout'"` + Internal *bool + IpAddressType *string `puppet:"name=>'ip_address_type'"` + LoadBalancerType *string `puppet:"name=>'load_balancer_type'"` + Name *string + NamePrefix *string `puppet:"name=>'name_prefix'"` + SecurityGroups *[]string `puppet:"name=>'security_groups'"` + SubnetMapping *[]Mapping `puppet:"name=>'subnet_mapping'"` + Subnets *[]string + Tags *map[string]string + VpcId *string `puppet:"name=>'vpc_id'"` + ZoneId *string `puppet:"name=>'zone_id'"` +} + +type LbCookieStickinessPolicy struct { + LbPort int64 `puppet:"name=>'lb_port'"` + LoadBalancer string `puppet:"name=>'load_balancer'"` + Name string + LbCookieStickinessPolicyId *string `puppet:"name=>'lb_cookie_stickiness_policy_id'"` + CookieExpirationPeriod *int64 `puppet:"name=>'cookie_expiration_period'"` +} + +type LbListener struct { + DefaultAction []DefaultAction `puppet:"name=>'default_action'"` + LoadBalancerArn string `puppet:"name=>'load_balancer_arn'"` + Port int64 + LbListenerId *string `puppet:"name=>'lb_listener_id'"` + Arn *string + CertificateArn *string `puppet:"name=>'certificate_arn'"` + Protocol *string + SslPolicy *string `puppet:"name=>'ssl_policy'"` +} + +type LbListenerCertificate struct { + CertificateArn string `puppet:"name=>'certificate_arn'"` + ListenerArn string `puppet:"name=>'listener_arn'"` + LbListenerCertificateId *string `puppet:"name=>'lb_listener_certificate_id'"` +} + +type LbListenerRule struct { + Action []DefaultAction + Condition []Condition + ListenerArn string `puppet:"name=>'listener_arn'"` + LbListenerRuleId *string `puppet:"name=>'lb_listener_rule_id'"` + Arn *string + Priority *int64 +} + +type LbSslNegotiationPolicy struct { + LbPort int64 `puppet:"name=>'lb_port'"` + LoadBalancer string `puppet:"name=>'load_balancer'"` + Name string + LbSslNegotiationPolicyId *string `puppet:"name=>'lb_ssl_negotiation_policy_id'"` + Attribute *[]GroupParameter +} + +type LbTargetGroup struct { + LbTargetGroupId *string `puppet:"name=>'lb_target_group_id'"` + Arn *string + ArnSuffix *string `puppet:"name=>'arn_suffix'"` + DeregistrationDelay *int64 `puppet:"name=>'deregistration_delay'"` + HealthCheck *Check `puppet:"name=>'health_check'"` + Name *string + NamePrefix *string `puppet:"name=>'name_prefix'"` + Port *int64 + Protocol *string + ProxyProtocolV2 *bool `puppet:"name=>'proxy_protocol_v2'"` + SlowStart *int64 `puppet:"name=>'slow_start'"` + Stickiness *Stickiness + Tags *map[string]string + TargetType *string `puppet:"name=>'target_type'"` + VpcId *string `puppet:"name=>'vpc_id'"` +} + +type LbTargetGroupAttachment struct { + TargetGroupArn string `puppet:"name=>'target_group_arn'"` + TargetId string `puppet:"name=>'target_id'"` + LbTargetGroupAttachmentId *string `puppet:"name=>'lb_target_group_attachment_id'"` + AvailabilityZone *string `puppet:"name=>'availability_zone'"` + Port *int64 +} + +type LicensemanagerAssociation struct { + LicenseConfigurationArn string `puppet:"name=>'license_configuration_arn'"` + ResourceArn string `puppet:"name=>'resource_arn'"` + LicensemanagerAssociationId *string `puppet:"name=>'licensemanager_association_id'"` +} + +type LicensemanagerLicenseConfiguration struct { + LicenseCountingType string `puppet:"name=>'license_counting_type'"` + Name string + LicensemanagerLicenseConfigurationId *string `puppet:"name=>'licensemanager_license_configuration_id'"` + Description *string + LicenseCount *int64 `puppet:"name=>'license_count'"` + LicenseCountHardLimit *bool `puppet:"name=>'license_count_hard_limit'"` + LicenseRules *[]string `puppet:"name=>'license_rules'"` + Tags *map[string]string +} + +type LightsailDomain struct { + DomainName string `puppet:"name=>'domain_name'"` + LightsailDomainId *string `puppet:"name=>'lightsail_domain_id'"` + Arn *string +} + +type LightsailInstance struct { + AvailabilityZone string `puppet:"name=>'availability_zone'"` + BlueprintId string `puppet:"name=>'blueprint_id'"` + BundleId string `puppet:"name=>'bundle_id'"` + Name string + LightsailInstanceId *string `puppet:"name=>'lightsail_instance_id'"` + Arn *string + CpuCount *int64 `puppet:"name=>'cpu_count'"` + CreatedAt *string `puppet:"name=>'created_at'"` + Ipv6Address *string `puppet:"name=>'ipv6_address'"` + IsStaticIp *bool `puppet:"name=>'is_static_ip'"` + KeyPairName *string `puppet:"name=>'key_pair_name'"` + PrivateIpAddress *string `puppet:"name=>'private_ip_address'"` + PublicIpAddress *string `puppet:"name=>'public_ip_address'"` + RamSize *int64 `puppet:"name=>'ram_size'"` + UserData *string `puppet:"name=>'user_data'"` + Username *string +} + +type LightsailKeyPair struct { + LightsailKeyPairId *string `puppet:"name=>'lightsail_key_pair_id'"` + Arn *string + EncryptedFingerprint *string `puppet:"name=>'encrypted_fingerprint'"` + EncryptedPrivateKey *string `puppet:"name=>'encrypted_private_key'"` + Fingerprint *string + Name *string + NamePrefix *string `puppet:"name=>'name_prefix'"` + PgpKey *string `puppet:"name=>'pgp_key'"` + PrivateKey *string `puppet:"name=>'private_key'"` + PublicKey *string `puppet:"name=>'public_key'"` +} + +type LightsailStaticIp struct { + Name string + LightsailStaticIpId *string `puppet:"name=>'lightsail_static_ip_id'"` + Arn *string + IpAddress *string `puppet:"name=>'ip_address'"` + SupportCode *string `puppet:"name=>'support_code'"` +} + +type LightsailStaticIpAttachment struct { + InstanceName string `puppet:"name=>'instance_name'"` + StaticIpName string `puppet:"name=>'static_ip_name'"` + LightsailStaticIpAttachmentId *string `puppet:"name=>'lightsail_static_ip_attachment_id'"` +} + +type LoadBalancerBackendServerPolicy struct { + InstancePort int64 `puppet:"name=>'instance_port'"` + LoadBalancerName string `puppet:"name=>'load_balancer_name'"` + LoadBalancerBackendServerPolicyId *string `puppet:"name=>'load_balancer_backend_server_policy_id'"` + PolicyNames *[]string `puppet:"name=>'policy_names'"` +} + +type LoadBalancerListenerPolicy struct { + LoadBalancerName string `puppet:"name=>'load_balancer_name'"` + LoadBalancerPort int64 `puppet:"name=>'load_balancer_port'"` + LoadBalancerListenerPolicyId *string `puppet:"name=>'load_balancer_listener_policy_id'"` + PolicyNames *[]string `puppet:"name=>'policy_names'"` +} + +type LoadBalancerPolicy struct { + LoadBalancerName string `puppet:"name=>'load_balancer_name'"` + PolicyName string `puppet:"name=>'policy_name'"` + PolicyTypeName string `puppet:"name=>'policy_type_name'"` + LoadBalancerPolicyId *string `puppet:"name=>'load_balancer_policy_id'"` + PolicyAttribute *[]PolicyAttribute `puppet:"name=>'policy_attribute'"` +} + +type MacieMemberAccountAssociation struct { + MemberAccountId string `puppet:"name=>'member_account_id'"` + MacieMemberAccountAssociationId *string `puppet:"name=>'macie_member_account_association_id'"` +} + +type MacieS3BucketAssociation struct { + BucketName string `puppet:"name=>'bucket_name'"` + MacieS3BucketAssociationId *string `puppet:"name=>'macie_s3_bucket_association_id'"` + ClassificationType *Type `puppet:"name=>'classification_type'"` + MemberAccountId *string `puppet:"name=>'member_account_id'"` + Prefix *string +} + +type MainRouteTableAssociation struct { + RouteTableId string `puppet:"name=>'route_table_id'"` + VpcId string `puppet:"name=>'vpc_id'"` + MainRouteTableAssociationId *string `puppet:"name=>'main_route_table_association_id'"` + OriginalRouteTableId *string `puppet:"name=>'original_route_table_id'"` +} + +type MediaPackageChannel struct { + ChannelId string `puppet:"name=>'channel_id'"` + MediaPackageChannelId *string `puppet:"name=>'media_package_channel_id'"` + Arn *string + Description *string + HlsIngest *[]Ingest `puppet:"name=>'hls_ingest'"` +} + +type MediaStoreContainer struct { + Name string + MediaStoreContainerId *string `puppet:"name=>'media_store_container_id'"` + Arn *string + Endpoint *string +} + +type MediaStoreContainerPolicy struct { + ContainerName string `puppet:"name=>'container_name'"` + Policy string + MediaStoreContainerPolicyId *string `puppet:"name=>'media_store_container_policy_id'"` +} + +type MqBroker struct { + BrokerName string `puppet:"name=>'broker_name'"` + EngineType string `puppet:"name=>'engine_type'"` + EngineVersion string `puppet:"name=>'engine_version'"` + HostInstanceType string `puppet:"name=>'host_instance_type'"` + SecurityGroups []string `puppet:"name=>'security_groups'"` + User []User + MqBrokerId *string `puppet:"name=>'mq_broker_id'"` + ApplyImmediately *bool `puppet:"name=>'apply_immediately'"` + Arn *string + AutoMinorVersionUpgrade *bool `puppet:"name=>'auto_minor_version_upgrade'"` + Configuration *BrokerConfiguration + DeploymentMode *string `puppet:"name=>'deployment_mode'"` + Instances *[]Instances + Logs *BrokerLogs + MaintenanceWindowStartTime *Time `puppet:"name=>'maintenance_window_start_time'"` + PubliclyAccessible *bool `puppet:"name=>'publicly_accessible'"` + SubnetIds *[]string `puppet:"name=>'subnet_ids'"` + Tags *map[string]string +} + +type MqConfiguration struct { + Data string + EngineType string `puppet:"name=>'engine_type'"` + EngineVersion string `puppet:"name=>'engine_version'"` + Name string + MqConfigurationId *string `puppet:"name=>'mq_configuration_id'"` + Arn *string + Description *string + LatestRevision *int64 `puppet:"name=>'latest_revision'"` + Tags *map[string]string +} + +type NatGateway struct { + AllocationId string `puppet:"name=>'allocation_id'"` + SubnetId string `puppet:"name=>'subnet_id'"` + NatGatewayId *string `puppet:"name=>'nat_gateway_id'"` + NetworkInterfaceId *string `puppet:"name=>'network_interface_id'"` + PrivateIp *string `puppet:"name=>'private_ip'"` + PublicIp *string `puppet:"name=>'public_ip'"` + Tags *map[string]string +} + +type NeptuneCluster struct { + NeptuneClusterId *string `puppet:"name=>'neptune_cluster_id'"` + ApplyImmediately *bool `puppet:"name=>'apply_immediately'"` + Arn *string + AvailabilityZones *[]string `puppet:"name=>'availability_zones'"` + BackupRetentionPeriod *int64 `puppet:"name=>'backup_retention_period'"` + ClusterIdentifier *string `puppet:"name=>'cluster_identifier'"` + ClusterIdentifierPrefix *string `puppet:"name=>'cluster_identifier_prefix'"` + ClusterMembers *[]string `puppet:"name=>'cluster_members'"` + ClusterResourceId *string `puppet:"name=>'cluster_resource_id'"` + Endpoint *string + Engine *string + EngineVersion *string `puppet:"name=>'engine_version'"` + FinalSnapshotIdentifier *string `puppet:"name=>'final_snapshot_identifier'"` + HostedZoneId *string `puppet:"name=>'hosted_zone_id'"` + IamDatabaseAuthenticationEnabled *bool `puppet:"name=>'iam_database_authentication_enabled'"` + IamRoles *[]string `puppet:"name=>'iam_roles'"` + KmsKeyArn *string `puppet:"name=>'kms_key_arn'"` + NeptuneClusterParameterGroupName *string `puppet:"name=>'neptune_cluster_parameter_group_name'"` + NeptuneSubnetGroupName *string `puppet:"name=>'neptune_subnet_group_name'"` + Port *int64 + PreferredBackupWindow *string `puppet:"name=>'preferred_backup_window'"` + PreferredMaintenanceWindow *string `puppet:"name=>'preferred_maintenance_window'"` + ReaderEndpoint *string `puppet:"name=>'reader_endpoint'"` + ReplicationSourceIdentifier *string `puppet:"name=>'replication_source_identifier'"` + SkipFinalSnapshot *bool `puppet:"name=>'skip_final_snapshot'"` + SnapshotIdentifier *string `puppet:"name=>'snapshot_identifier'"` + StorageEncrypted *bool `puppet:"name=>'storage_encrypted'"` + Tags *map[string]string + VpcSecurityGroupIds *[]string `puppet:"name=>'vpc_security_group_ids'"` +} + +type NeptuneClusterInstance struct { + ClusterIdentifier string `puppet:"name=>'cluster_identifier'"` + InstanceClass string `puppet:"name=>'instance_class'"` + NeptuneClusterInstanceId *string `puppet:"name=>'neptune_cluster_instance_id'"` + Address *string + ApplyImmediately *bool `puppet:"name=>'apply_immediately'"` + Arn *string + AutoMinorVersionUpgrade *bool `puppet:"name=>'auto_minor_version_upgrade'"` + AvailabilityZone *string `puppet:"name=>'availability_zone'"` + DbiResourceId *string `puppet:"name=>'dbi_resource_id'"` + Endpoint *string + Engine *string + EngineVersion *string `puppet:"name=>'engine_version'"` + Identifier *string + IdentifierPrefix *string `puppet:"name=>'identifier_prefix'"` + KmsKeyArn *string `puppet:"name=>'kms_key_arn'"` + NeptuneParameterGroupName *string `puppet:"name=>'neptune_parameter_group_name'"` + NeptuneSubnetGroupName *string `puppet:"name=>'neptune_subnet_group_name'"` + Port *int64 + PreferredBackupWindow *string `puppet:"name=>'preferred_backup_window'"` + PreferredMaintenanceWindow *string `puppet:"name=>'preferred_maintenance_window'"` + PromotionTier *int64 `puppet:"name=>'promotion_tier'"` + PubliclyAccessible *bool `puppet:"name=>'publicly_accessible'"` + StorageEncrypted *bool `puppet:"name=>'storage_encrypted'"` + Tags *map[string]string + Writer *bool +} + +type NeptuneClusterParameterGroup struct { + Family string + NeptuneClusterParameterGroupId *string `puppet:"name=>'neptune_cluster_parameter_group_id'"` + Arn *string + Description *string + Name *string + NamePrefix *string `puppet:"name=>'name_prefix'"` + Parameter *[]Parameter + Tags *map[string]string +} + +type NeptuneClusterSnapshot struct { + DbClusterIdentifier string `puppet:"name=>'db_cluster_identifier'"` + DbClusterSnapshotIdentifier string `puppet:"name=>'db_cluster_snapshot_identifier'"` + NeptuneClusterSnapshotId *string `puppet:"name=>'neptune_cluster_snapshot_id'"` + AllocatedStorage *int64 `puppet:"name=>'allocated_storage'"` + AvailabilityZones *[]string `puppet:"name=>'availability_zones'"` + DbClusterSnapshotArn *string `puppet:"name=>'db_cluster_snapshot_arn'"` + Engine *string + EngineVersion *string `puppet:"name=>'engine_version'"` + KmsKeyId *string `puppet:"name=>'kms_key_id'"` + LicenseModel *string `puppet:"name=>'license_model'"` + Port *int64 + SnapshotType *string `puppet:"name=>'snapshot_type'"` + SourceDbClusterSnapshotArn *string `puppet:"name=>'source_db_cluster_snapshot_arn'"` + Status *string + StorageEncrypted *bool `puppet:"name=>'storage_encrypted'"` + VpcId *string `puppet:"name=>'vpc_id'"` +} + +type NeptuneEventSubscription struct { + SnsTopicArn string `puppet:"name=>'sns_topic_arn'"` + NeptuneEventSubscriptionId *string `puppet:"name=>'neptune_event_subscription_id'"` + Arn *string + CustomerAwsId *string `puppet:"name=>'customer_aws_id'"` + Enabled *bool + EventCategories *[]string `puppet:"name=>'event_categories'"` + Name *string + NamePrefix *string `puppet:"name=>'name_prefix'"` + SourceIds *[]string `puppet:"name=>'source_ids'"` + SourceType *string `puppet:"name=>'source_type'"` + Tags *map[string]string +} + +type NeptuneParameterGroup struct { + Family string + Name string + NeptuneParameterGroupId *string `puppet:"name=>'neptune_parameter_group_id'"` + Arn *string + Description *string + Parameter *[]Parameter + Tags *map[string]string +} + +type NeptuneSubnetGroup struct { + SubnetIds []string `puppet:"name=>'subnet_ids'"` + NeptuneSubnetGroupId *string `puppet:"name=>'neptune_subnet_group_id'"` + Arn *string + Description *string + Name *string + NamePrefix *string `puppet:"name=>'name_prefix'"` + Tags *map[string]string +} + +type NetworkAcl struct { + VpcId string `puppet:"name=>'vpc_id'"` + NetworkAclId *string `puppet:"name=>'network_acl_id'"` + Egress *[]EgressIngress + Ingress *[]EgressIngress + OwnerId *string `puppet:"name=>'owner_id'"` + SubnetIds *[]string `puppet:"name=>'subnet_ids'"` + Tags *map[string]string +} + +type NetworkAclRule struct { + NetworkAclId string `puppet:"name=>'network_acl_id'"` + Protocol string + RuleAction string `puppet:"name=>'rule_action'"` + RuleNumber int64 `puppet:"name=>'rule_number'"` + NetworkAclRuleId *string `puppet:"name=>'network_acl_rule_id'"` + CidrBlock *string `puppet:"name=>'cidr_block'"` + Egress *bool + FromPort *int64 `puppet:"name=>'from_port'"` + IcmpCode *string `puppet:"name=>'icmp_code'"` + IcmpType *string `puppet:"name=>'icmp_type'"` + Ipv6CidrBlock *string `puppet:"name=>'ipv6_cidr_block'"` + ToPort *int64 `puppet:"name=>'to_port'"` +} + +type NetworkInterface struct { + SubnetId string `puppet:"name=>'subnet_id'"` + NetworkInterfaceId *string `puppet:"name=>'network_interface_id'"` + Attachment *[]Attachment + Description *string + PrivateDnsName *string `puppet:"name=>'private_dns_name'"` + PrivateIp *string `puppet:"name=>'private_ip'"` + PrivateIps *[]string `puppet:"name=>'private_ips'"` + PrivateIpsCount *int64 `puppet:"name=>'private_ips_count'"` + SecurityGroups *[]string `puppet:"name=>'security_groups'"` + SourceDestCheck *bool `puppet:"name=>'source_dest_check'"` + Tags *map[string]string +} + +type NetworkInterfaceAttachment struct { + DeviceIndex int64 `puppet:"name=>'device_index'"` + InstanceId string `puppet:"name=>'instance_id'"` + NetworkInterfaceId string `puppet:"name=>'network_interface_id'"` + NetworkInterfaceAttachmentId *string `puppet:"name=>'network_interface_attachment_id'"` + AttachmentId *string `puppet:"name=>'attachment_id'"` + Status *string +} + +type NetworkInterfaceSgAttachment struct { + NetworkInterfaceId string `puppet:"name=>'network_interface_id'"` + SecurityGroupId string `puppet:"name=>'security_group_id'"` + NetworkInterfaceSgAttachmentId *string `puppet:"name=>'network_interface_sg_attachment_id'"` +} + +type OpsworksApplication struct { + Name string + StackId string `puppet:"name=>'stack_id'"` + Type string + OpsworksApplicationId *string `puppet:"name=>'opsworks_application_id'"` + AppSource *[]AppSource `puppet:"name=>'app_source'"` + AutoBundleOnDeploy *string `puppet:"name=>'auto_bundle_on_deploy'"` + AwsFlowRubySettings *string `puppet:"name=>'aws_flow_ruby_settings'"` + DataSourceArn *string `puppet:"name=>'data_source_arn'"` + DataSourceDatabaseName *string `puppet:"name=>'data_source_database_name'"` + DataSourceType *string `puppet:"name=>'data_source_type'"` + Description *string + DocumentRoot *string `puppet:"name=>'document_root'"` + Domains *[]string + EnableSsl *bool `puppet:"name=>'enable_ssl'"` + Environment *[]ApplicationEnvironment + RailsEnv *string `puppet:"name=>'rails_env'"` + ShortName *string `puppet:"name=>'short_name'"` + SslConfiguration *[]SslConfiguration `puppet:"name=>'ssl_configuration'"` +} + +type OpsworksCustomLayer struct { + Name string + ShortName string `puppet:"name=>'short_name'"` + StackId string `puppet:"name=>'stack_id'"` + OpsworksCustomLayerId *string `puppet:"name=>'opsworks_custom_layer_id'"` + AutoAssignElasticIps *bool `puppet:"name=>'auto_assign_elastic_ips'"` + AutoAssignPublicIps *bool `puppet:"name=>'auto_assign_public_ips'"` + AutoHealing *bool `puppet:"name=>'auto_healing'"` + CustomConfigureRecipes *[]string `puppet:"name=>'custom_configure_recipes'"` + CustomDeployRecipes *[]string `puppet:"name=>'custom_deploy_recipes'"` + CustomInstanceProfileArn *string `puppet:"name=>'custom_instance_profile_arn'"` + CustomJson *string `puppet:"name=>'custom_json'"` + CustomSecurityGroupIds *[]string `puppet:"name=>'custom_security_group_ids'"` + CustomSetupRecipes *[]string `puppet:"name=>'custom_setup_recipes'"` + CustomShutdownRecipes *[]string `puppet:"name=>'custom_shutdown_recipes'"` + CustomUndeployRecipes *[]string `puppet:"name=>'custom_undeploy_recipes'"` + DrainElbOnShutdown *bool `puppet:"name=>'drain_elb_on_shutdown'"` + EbsVolume *[]Volume `puppet:"name=>'ebs_volume'"` + ElasticLoadBalancer *string `puppet:"name=>'elastic_load_balancer'"` + InstallUpdatesOnBoot *bool `puppet:"name=>'install_updates_on_boot'"` + InstanceShutdownTimeout *int64 `puppet:"name=>'instance_shutdown_timeout'"` + SystemPackages *[]string `puppet:"name=>'system_packages'"` + UseEbsOptimizedInstances *bool `puppet:"name=>'use_ebs_optimized_instances'"` +} + +type OpsworksGangliaLayer struct { + Password string + StackId string `puppet:"name=>'stack_id'"` + OpsworksGangliaLayerId *string `puppet:"name=>'opsworks_ganglia_layer_id'"` + AutoAssignElasticIps *bool `puppet:"name=>'auto_assign_elastic_ips'"` + AutoAssignPublicIps *bool `puppet:"name=>'auto_assign_public_ips'"` + AutoHealing *bool `puppet:"name=>'auto_healing'"` + CustomConfigureRecipes *[]string `puppet:"name=>'custom_configure_recipes'"` + CustomDeployRecipes *[]string `puppet:"name=>'custom_deploy_recipes'"` + CustomInstanceProfileArn *string `puppet:"name=>'custom_instance_profile_arn'"` + CustomJson *string `puppet:"name=>'custom_json'"` + CustomSecurityGroupIds *[]string `puppet:"name=>'custom_security_group_ids'"` + CustomSetupRecipes *[]string `puppet:"name=>'custom_setup_recipes'"` + CustomShutdownRecipes *[]string `puppet:"name=>'custom_shutdown_recipes'"` + CustomUndeployRecipes *[]string `puppet:"name=>'custom_undeploy_recipes'"` + DrainElbOnShutdown *bool `puppet:"name=>'drain_elb_on_shutdown'"` + EbsVolume *[]Volume `puppet:"name=>'ebs_volume'"` + ElasticLoadBalancer *string `puppet:"name=>'elastic_load_balancer'"` + InstallUpdatesOnBoot *bool `puppet:"name=>'install_updates_on_boot'"` + InstanceShutdownTimeout *int64 `puppet:"name=>'instance_shutdown_timeout'"` + Name *string + SystemPackages *[]string `puppet:"name=>'system_packages'"` + Url *string + UseEbsOptimizedInstances *bool `puppet:"name=>'use_ebs_optimized_instances'"` + Username *string +} + +type OpsworksHaproxyLayer struct { + StackId string `puppet:"name=>'stack_id'"` + StatsPassword string `puppet:"name=>'stats_password'"` + OpsworksHaproxyLayerId *string `puppet:"name=>'opsworks_haproxy_layer_id'"` + AutoAssignElasticIps *bool `puppet:"name=>'auto_assign_elastic_ips'"` + AutoAssignPublicIps *bool `puppet:"name=>'auto_assign_public_ips'"` + AutoHealing *bool `puppet:"name=>'auto_healing'"` + CustomConfigureRecipes *[]string `puppet:"name=>'custom_configure_recipes'"` + CustomDeployRecipes *[]string `puppet:"name=>'custom_deploy_recipes'"` + CustomInstanceProfileArn *string `puppet:"name=>'custom_instance_profile_arn'"` + CustomJson *string `puppet:"name=>'custom_json'"` + CustomSecurityGroupIds *[]string `puppet:"name=>'custom_security_group_ids'"` + CustomSetupRecipes *[]string `puppet:"name=>'custom_setup_recipes'"` + CustomShutdownRecipes *[]string `puppet:"name=>'custom_shutdown_recipes'"` + CustomUndeployRecipes *[]string `puppet:"name=>'custom_undeploy_recipes'"` + DrainElbOnShutdown *bool `puppet:"name=>'drain_elb_on_shutdown'"` + EbsVolume *[]Volume `puppet:"name=>'ebs_volume'"` + ElasticLoadBalancer *string `puppet:"name=>'elastic_load_balancer'"` + HealthcheckMethod *string `puppet:"name=>'healthcheck_method'"` + HealthcheckUrl *string `puppet:"name=>'healthcheck_url'"` + InstallUpdatesOnBoot *bool `puppet:"name=>'install_updates_on_boot'"` + InstanceShutdownTimeout *int64 `puppet:"name=>'instance_shutdown_timeout'"` + Name *string + StatsEnabled *bool `puppet:"name=>'stats_enabled'"` + StatsUrl *string `puppet:"name=>'stats_url'"` + StatsUser *string `puppet:"name=>'stats_user'"` + SystemPackages *[]string `puppet:"name=>'system_packages'"` + UseEbsOptimizedInstances *bool `puppet:"name=>'use_ebs_optimized_instances'"` +} + +type OpsworksInstance struct { + LayerIds []string `puppet:"name=>'layer_ids'"` + StackId string `puppet:"name=>'stack_id'"` + OpsworksInstanceId *string `puppet:"name=>'opsworks_instance_id'"` + AgentVersion *string `puppet:"name=>'agent_version'"` + AmiId *string `puppet:"name=>'ami_id'"` + Architecture *string + AutoScalingType *string `puppet:"name=>'auto_scaling_type'"` + AvailabilityZone *string `puppet:"name=>'availability_zone'"` + CreatedAt *string `puppet:"name=>'created_at'"` + DeleteEbs *bool `puppet:"name=>'delete_ebs'"` + DeleteEip *bool `puppet:"name=>'delete_eip'"` + EbsBlockDevice *[]InstanceEbsBlockDevice `puppet:"name=>'ebs_block_device'"` + EbsOptimized *bool `puppet:"name=>'ebs_optimized'"` + Ec2InstanceId *string `puppet:"name=>'ec2_instance_id'"` + EcsClusterArn *string `puppet:"name=>'ecs_cluster_arn'"` + ElasticIp *string `puppet:"name=>'elastic_ip'"` + EphemeralBlockDevice *[]Device `puppet:"name=>'ephemeral_block_device'"` + Hostname *string + InfrastructureClass *string `puppet:"name=>'infrastructure_class'"` + InstallUpdatesOnBoot *bool `puppet:"name=>'install_updates_on_boot'"` + InstanceProfileArn *string `puppet:"name=>'instance_profile_arn'"` + InstanceType *string `puppet:"name=>'instance_type'"` + LastServiceErrorId *string `puppet:"name=>'last_service_error_id'"` + Os *string + Platform *string + PrivateDns *string `puppet:"name=>'private_dns'"` + PrivateIp *string `puppet:"name=>'private_ip'"` + PublicDns *string `puppet:"name=>'public_dns'"` + PublicIp *string `puppet:"name=>'public_ip'"` + RegisteredBy *string `puppet:"name=>'registered_by'"` + ReportedAgentVersion *string `puppet:"name=>'reported_agent_version'"` + ReportedOsFamily *string `puppet:"name=>'reported_os_family'"` + ReportedOsName *string `puppet:"name=>'reported_os_name'"` + ReportedOsVersion *string `puppet:"name=>'reported_os_version'"` + RootBlockDevice *[]BlockDevice `puppet:"name=>'root_block_device'"` + RootDeviceType *string `puppet:"name=>'root_device_type'"` + RootDeviceVolumeId *string `puppet:"name=>'root_device_volume_id'"` + SecurityGroupIds *[]string `puppet:"name=>'security_group_ids'"` + SshHostDsaKeyFingerprint *string `puppet:"name=>'ssh_host_dsa_key_fingerprint'"` + SshHostRsaKeyFingerprint *string `puppet:"name=>'ssh_host_rsa_key_fingerprint'"` + SshKeyName *string `puppet:"name=>'ssh_key_name'"` + State *string + Status *string + SubnetId *string `puppet:"name=>'subnet_id'"` + Tenancy *string + VirtualizationType *string `puppet:"name=>'virtualization_type'"` +} + +type OpsworksJavaAppLayer struct { + StackId string `puppet:"name=>'stack_id'"` + OpsworksJavaAppLayerId *string `puppet:"name=>'opsworks_java_app_layer_id'"` + AppServer *string `puppet:"name=>'app_server'"` + AppServerVersion *string `puppet:"name=>'app_server_version'"` + AutoAssignElasticIps *bool `puppet:"name=>'auto_assign_elastic_ips'"` + AutoAssignPublicIps *bool `puppet:"name=>'auto_assign_public_ips'"` + AutoHealing *bool `puppet:"name=>'auto_healing'"` + CustomConfigureRecipes *[]string `puppet:"name=>'custom_configure_recipes'"` + CustomDeployRecipes *[]string `puppet:"name=>'custom_deploy_recipes'"` + CustomInstanceProfileArn *string `puppet:"name=>'custom_instance_profile_arn'"` + CustomJson *string `puppet:"name=>'custom_json'"` + CustomSecurityGroupIds *[]string `puppet:"name=>'custom_security_group_ids'"` + CustomSetupRecipes *[]string `puppet:"name=>'custom_setup_recipes'"` + CustomShutdownRecipes *[]string `puppet:"name=>'custom_shutdown_recipes'"` + CustomUndeployRecipes *[]string `puppet:"name=>'custom_undeploy_recipes'"` + DrainElbOnShutdown *bool `puppet:"name=>'drain_elb_on_shutdown'"` + EbsVolume *[]Volume `puppet:"name=>'ebs_volume'"` + ElasticLoadBalancer *string `puppet:"name=>'elastic_load_balancer'"` + InstallUpdatesOnBoot *bool `puppet:"name=>'install_updates_on_boot'"` + InstanceShutdownTimeout *int64 `puppet:"name=>'instance_shutdown_timeout'"` + JvmOptions *string `puppet:"name=>'jvm_options'"` + JvmType *string `puppet:"name=>'jvm_type'"` + JvmVersion *string `puppet:"name=>'jvm_version'"` + Name *string + SystemPackages *[]string `puppet:"name=>'system_packages'"` + UseEbsOptimizedInstances *bool `puppet:"name=>'use_ebs_optimized_instances'"` +} + +type OpsworksMemcachedLayer struct { + StackId string `puppet:"name=>'stack_id'"` + OpsworksMemcachedLayerId *string `puppet:"name=>'opsworks_memcached_layer_id'"` + AllocatedMemory *int64 `puppet:"name=>'allocated_memory'"` + AutoAssignElasticIps *bool `puppet:"name=>'auto_assign_elastic_ips'"` + AutoAssignPublicIps *bool `puppet:"name=>'auto_assign_public_ips'"` + AutoHealing *bool `puppet:"name=>'auto_healing'"` + CustomConfigureRecipes *[]string `puppet:"name=>'custom_configure_recipes'"` + CustomDeployRecipes *[]string `puppet:"name=>'custom_deploy_recipes'"` + CustomInstanceProfileArn *string `puppet:"name=>'custom_instance_profile_arn'"` + CustomJson *string `puppet:"name=>'custom_json'"` + CustomSecurityGroupIds *[]string `puppet:"name=>'custom_security_group_ids'"` + CustomSetupRecipes *[]string `puppet:"name=>'custom_setup_recipes'"` + CustomShutdownRecipes *[]string `puppet:"name=>'custom_shutdown_recipes'"` + CustomUndeployRecipes *[]string `puppet:"name=>'custom_undeploy_recipes'"` + DrainElbOnShutdown *bool `puppet:"name=>'drain_elb_on_shutdown'"` + EbsVolume *[]Volume `puppet:"name=>'ebs_volume'"` + ElasticLoadBalancer *string `puppet:"name=>'elastic_load_balancer'"` + InstallUpdatesOnBoot *bool `puppet:"name=>'install_updates_on_boot'"` + InstanceShutdownTimeout *int64 `puppet:"name=>'instance_shutdown_timeout'"` + Name *string + SystemPackages *[]string `puppet:"name=>'system_packages'"` + UseEbsOptimizedInstances *bool `puppet:"name=>'use_ebs_optimized_instances'"` +} + +type OpsworksMysqlLayer struct { + StackId string `puppet:"name=>'stack_id'"` + OpsworksMysqlLayerId *string `puppet:"name=>'opsworks_mysql_layer_id'"` + AutoAssignElasticIps *bool `puppet:"name=>'auto_assign_elastic_ips'"` + AutoAssignPublicIps *bool `puppet:"name=>'auto_assign_public_ips'"` + AutoHealing *bool `puppet:"name=>'auto_healing'"` + CustomConfigureRecipes *[]string `puppet:"name=>'custom_configure_recipes'"` + CustomDeployRecipes *[]string `puppet:"name=>'custom_deploy_recipes'"` + CustomInstanceProfileArn *string `puppet:"name=>'custom_instance_profile_arn'"` + CustomJson *string `puppet:"name=>'custom_json'"` + CustomSecurityGroupIds *[]string `puppet:"name=>'custom_security_group_ids'"` + CustomSetupRecipes *[]string `puppet:"name=>'custom_setup_recipes'"` + CustomShutdownRecipes *[]string `puppet:"name=>'custom_shutdown_recipes'"` + CustomUndeployRecipes *[]string `puppet:"name=>'custom_undeploy_recipes'"` + DrainElbOnShutdown *bool `puppet:"name=>'drain_elb_on_shutdown'"` + EbsVolume *[]Volume `puppet:"name=>'ebs_volume'"` + ElasticLoadBalancer *string `puppet:"name=>'elastic_load_balancer'"` + InstallUpdatesOnBoot *bool `puppet:"name=>'install_updates_on_boot'"` + InstanceShutdownTimeout *int64 `puppet:"name=>'instance_shutdown_timeout'"` + Name *string + RootPassword *string `puppet:"name=>'root_password'"` + RootPasswordOnAllInstances *bool `puppet:"name=>'root_password_on_all_instances'"` + SystemPackages *[]string `puppet:"name=>'system_packages'"` + UseEbsOptimizedInstances *bool `puppet:"name=>'use_ebs_optimized_instances'"` +} + +type OpsworksNodejsAppLayer struct { + StackId string `puppet:"name=>'stack_id'"` + OpsworksNodejsAppLayerId *string `puppet:"name=>'opsworks_nodejs_app_layer_id'"` + AutoAssignElasticIps *bool `puppet:"name=>'auto_assign_elastic_ips'"` + AutoAssignPublicIps *bool `puppet:"name=>'auto_assign_public_ips'"` + AutoHealing *bool `puppet:"name=>'auto_healing'"` + CustomConfigureRecipes *[]string `puppet:"name=>'custom_configure_recipes'"` + CustomDeployRecipes *[]string `puppet:"name=>'custom_deploy_recipes'"` + CustomInstanceProfileArn *string `puppet:"name=>'custom_instance_profile_arn'"` + CustomJson *string `puppet:"name=>'custom_json'"` + CustomSecurityGroupIds *[]string `puppet:"name=>'custom_security_group_ids'"` + CustomSetupRecipes *[]string `puppet:"name=>'custom_setup_recipes'"` + CustomShutdownRecipes *[]string `puppet:"name=>'custom_shutdown_recipes'"` + CustomUndeployRecipes *[]string `puppet:"name=>'custom_undeploy_recipes'"` + DrainElbOnShutdown *bool `puppet:"name=>'drain_elb_on_shutdown'"` + EbsVolume *[]Volume `puppet:"name=>'ebs_volume'"` + ElasticLoadBalancer *string `puppet:"name=>'elastic_load_balancer'"` + InstallUpdatesOnBoot *bool `puppet:"name=>'install_updates_on_boot'"` + InstanceShutdownTimeout *int64 `puppet:"name=>'instance_shutdown_timeout'"` + Name *string + NodejsVersion *string `puppet:"name=>'nodejs_version'"` + SystemPackages *[]string `puppet:"name=>'system_packages'"` + UseEbsOptimizedInstances *bool `puppet:"name=>'use_ebs_optimized_instances'"` +} + +type OpsworksPermission struct { + UserArn string `puppet:"name=>'user_arn'"` + OpsworksPermissionId *string `puppet:"name=>'opsworks_permission_id'"` + AllowSsh *bool `puppet:"name=>'allow_ssh'"` + AllowSudo *bool `puppet:"name=>'allow_sudo'"` + Level *string + StackId *string `puppet:"name=>'stack_id'"` +} + +type OpsworksPhpAppLayer struct { + StackId string `puppet:"name=>'stack_id'"` + OpsworksPhpAppLayerId *string `puppet:"name=>'opsworks_php_app_layer_id'"` + AutoAssignElasticIps *bool `puppet:"name=>'auto_assign_elastic_ips'"` + AutoAssignPublicIps *bool `puppet:"name=>'auto_assign_public_ips'"` + AutoHealing *bool `puppet:"name=>'auto_healing'"` + CustomConfigureRecipes *[]string `puppet:"name=>'custom_configure_recipes'"` + CustomDeployRecipes *[]string `puppet:"name=>'custom_deploy_recipes'"` + CustomInstanceProfileArn *string `puppet:"name=>'custom_instance_profile_arn'"` + CustomJson *string `puppet:"name=>'custom_json'"` + CustomSecurityGroupIds *[]string `puppet:"name=>'custom_security_group_ids'"` + CustomSetupRecipes *[]string `puppet:"name=>'custom_setup_recipes'"` + CustomShutdownRecipes *[]string `puppet:"name=>'custom_shutdown_recipes'"` + CustomUndeployRecipes *[]string `puppet:"name=>'custom_undeploy_recipes'"` + DrainElbOnShutdown *bool `puppet:"name=>'drain_elb_on_shutdown'"` + EbsVolume *[]Volume `puppet:"name=>'ebs_volume'"` + ElasticLoadBalancer *string `puppet:"name=>'elastic_load_balancer'"` + InstallUpdatesOnBoot *bool `puppet:"name=>'install_updates_on_boot'"` + InstanceShutdownTimeout *int64 `puppet:"name=>'instance_shutdown_timeout'"` + Name *string + SystemPackages *[]string `puppet:"name=>'system_packages'"` + UseEbsOptimizedInstances *bool `puppet:"name=>'use_ebs_optimized_instances'"` +} + +type OpsworksRailsAppLayer struct { + StackId string `puppet:"name=>'stack_id'"` + OpsworksRailsAppLayerId *string `puppet:"name=>'opsworks_rails_app_layer_id'"` + AppServer *string `puppet:"name=>'app_server'"` + AutoAssignElasticIps *bool `puppet:"name=>'auto_assign_elastic_ips'"` + AutoAssignPublicIps *bool `puppet:"name=>'auto_assign_public_ips'"` + AutoHealing *bool `puppet:"name=>'auto_healing'"` + BundlerVersion *string `puppet:"name=>'bundler_version'"` + CustomConfigureRecipes *[]string `puppet:"name=>'custom_configure_recipes'"` + CustomDeployRecipes *[]string `puppet:"name=>'custom_deploy_recipes'"` + CustomInstanceProfileArn *string `puppet:"name=>'custom_instance_profile_arn'"` + CustomJson *string `puppet:"name=>'custom_json'"` + CustomSecurityGroupIds *[]string `puppet:"name=>'custom_security_group_ids'"` + CustomSetupRecipes *[]string `puppet:"name=>'custom_setup_recipes'"` + CustomShutdownRecipes *[]string `puppet:"name=>'custom_shutdown_recipes'"` + CustomUndeployRecipes *[]string `puppet:"name=>'custom_undeploy_recipes'"` + DrainElbOnShutdown *bool `puppet:"name=>'drain_elb_on_shutdown'"` + EbsVolume *[]Volume `puppet:"name=>'ebs_volume'"` + ElasticLoadBalancer *string `puppet:"name=>'elastic_load_balancer'"` + InstallUpdatesOnBoot *bool `puppet:"name=>'install_updates_on_boot'"` + InstanceShutdownTimeout *int64 `puppet:"name=>'instance_shutdown_timeout'"` + ManageBundler *bool `puppet:"name=>'manage_bundler'"` + Name *string + PassengerVersion *string `puppet:"name=>'passenger_version'"` + RubyVersion *string `puppet:"name=>'ruby_version'"` + RubygemsVersion *string `puppet:"name=>'rubygems_version'"` + SystemPackages *[]string `puppet:"name=>'system_packages'"` + UseEbsOptimizedInstances *bool `puppet:"name=>'use_ebs_optimized_instances'"` +} + +type OpsworksRdsDbInstance struct { + DbPassword string `puppet:"name=>'db_password'"` + DbUser string `puppet:"name=>'db_user'"` + RdsDbInstanceArn string `puppet:"name=>'rds_db_instance_arn'"` + StackId string `puppet:"name=>'stack_id'"` + OpsworksRdsDbInstanceId *string `puppet:"name=>'opsworks_rds_db_instance_id'"` +} + +type OpsworksStack struct { + DefaultInstanceProfileArn string `puppet:"name=>'default_instance_profile_arn'"` + Name string + Region string + ServiceRoleArn string `puppet:"name=>'service_role_arn'"` + OpsworksStackId *string `puppet:"name=>'opsworks_stack_id'"` + AgentVersion *string `puppet:"name=>'agent_version'"` + Arn *string + BerkshelfVersion *string `puppet:"name=>'berkshelf_version'"` + Color *string + ConfigurationManagerName *string `puppet:"name=>'configuration_manager_name'"` + ConfigurationManagerVersion *string `puppet:"name=>'configuration_manager_version'"` + CustomCookbooksSource *[]CookbooksSource `puppet:"name=>'custom_cookbooks_source'"` + CustomJson *string `puppet:"name=>'custom_json'"` + DefaultAvailabilityZone *string `puppet:"name=>'default_availability_zone'"` + DefaultOs *string `puppet:"name=>'default_os'"` + DefaultRootDeviceType *string `puppet:"name=>'default_root_device_type'"` + DefaultSshKeyName *string `puppet:"name=>'default_ssh_key_name'"` + DefaultSubnetId *string `puppet:"name=>'default_subnet_id'"` + HostnameTheme *string `puppet:"name=>'hostname_theme'"` + ManageBerkshelf *bool `puppet:"name=>'manage_berkshelf'"` + StackEndpoint *string `puppet:"name=>'stack_endpoint'"` + Tags *map[string]string + UseCustomCookbooks *bool `puppet:"name=>'use_custom_cookbooks'"` + UseOpsworksSecurityGroups *bool `puppet:"name=>'use_opsworks_security_groups'"` + VpcId *string `puppet:"name=>'vpc_id'"` +} + +type OpsworksStaticWebLayer struct { + StackId string `puppet:"name=>'stack_id'"` + OpsworksStaticWebLayerId *string `puppet:"name=>'opsworks_static_web_layer_id'"` + AutoAssignElasticIps *bool `puppet:"name=>'auto_assign_elastic_ips'"` + AutoAssignPublicIps *bool `puppet:"name=>'auto_assign_public_ips'"` + AutoHealing *bool `puppet:"name=>'auto_healing'"` + CustomConfigureRecipes *[]string `puppet:"name=>'custom_configure_recipes'"` + CustomDeployRecipes *[]string `puppet:"name=>'custom_deploy_recipes'"` + CustomInstanceProfileArn *string `puppet:"name=>'custom_instance_profile_arn'"` + CustomJson *string `puppet:"name=>'custom_json'"` + CustomSecurityGroupIds *[]string `puppet:"name=>'custom_security_group_ids'"` + CustomSetupRecipes *[]string `puppet:"name=>'custom_setup_recipes'"` + CustomShutdownRecipes *[]string `puppet:"name=>'custom_shutdown_recipes'"` + CustomUndeployRecipes *[]string `puppet:"name=>'custom_undeploy_recipes'"` + DrainElbOnShutdown *bool `puppet:"name=>'drain_elb_on_shutdown'"` + EbsVolume *[]Volume `puppet:"name=>'ebs_volume'"` + ElasticLoadBalancer *string `puppet:"name=>'elastic_load_balancer'"` + InstallUpdatesOnBoot *bool `puppet:"name=>'install_updates_on_boot'"` + InstanceShutdownTimeout *int64 `puppet:"name=>'instance_shutdown_timeout'"` + Name *string + SystemPackages *[]string `puppet:"name=>'system_packages'"` + UseEbsOptimizedInstances *bool `puppet:"name=>'use_ebs_optimized_instances'"` +} + +type OpsworksUserProfile struct { + SshUsername string `puppet:"name=>'ssh_username'"` + UserArn string `puppet:"name=>'user_arn'"` + OpsworksUserProfileId *string `puppet:"name=>'opsworks_user_profile_id'"` + AllowSelfManagement *bool `puppet:"name=>'allow_self_management'"` + SshPublicKey *string `puppet:"name=>'ssh_public_key'"` +} + +type OrganizationsAccount struct { + Email string + Name string + OrganizationsAccountId *string `puppet:"name=>'organizations_account_id'"` + Arn *string + IamUserAccessToBilling *string `puppet:"name=>'iam_user_access_to_billing'"` + JoinedMethod *string `puppet:"name=>'joined_method'"` + JoinedTimestamp *string `puppet:"name=>'joined_timestamp'"` + RoleName *string `puppet:"name=>'role_name'"` + Status *string +} + +type OrganizationsOrganization struct { + OrganizationsOrganizationId *string `puppet:"name=>'organizations_organization_id'"` + Arn *string + AwsServiceAccessPrincipals *[]string `puppet:"name=>'aws_service_access_principals'"` + FeatureSet *string `puppet:"name=>'feature_set'"` + MasterAccountArn *string `puppet:"name=>'master_account_arn'"` + MasterAccountEmail *string `puppet:"name=>'master_account_email'"` + MasterAccountId *string `puppet:"name=>'master_account_id'"` +} + +type OrganizationsPolicy struct { + Content string + Name string + OrganizationsPolicyId *string `puppet:"name=>'organizations_policy_id'"` + Arn *string + Description *string + Type *string +} + +type OrganizationsPolicyAttachment struct { + PolicyId string `puppet:"name=>'policy_id'"` + TargetId string `puppet:"name=>'target_id'"` + OrganizationsPolicyAttachmentId *string `puppet:"name=>'organizations_policy_attachment_id'"` +} + +type PinpointAdmChannel struct { + ApplicationId string `puppet:"name=>'application_id'"` + ClientId string `puppet:"name=>'client_id'"` + ClientSecret string `puppet:"name=>'client_secret'"` + PinpointAdmChannelId *string `puppet:"name=>'pinpoint_adm_channel_id'"` + Enabled *bool +} + +type PinpointApnsChannel struct { + ApplicationId string `puppet:"name=>'application_id'"` + PinpointApnsChannelId *string `puppet:"name=>'pinpoint_apns_channel_id'"` + BundleId *string `puppet:"name=>'bundle_id'"` + Certificate *string + DefaultAuthenticationMethod *string `puppet:"name=>'default_authentication_method'"` + Enabled *bool + PrivateKey *string `puppet:"name=>'private_key'"` + TeamId *string `puppet:"name=>'team_id'"` + TokenKey *string `puppet:"name=>'token_key'"` + TokenKeyId *string `puppet:"name=>'token_key_id'"` +} + +type PinpointApnsSandboxChannel struct { + ApplicationId string `puppet:"name=>'application_id'"` + PinpointApnsSandboxChannelId *string `puppet:"name=>'pinpoint_apns_sandbox_channel_id'"` + BundleId *string `puppet:"name=>'bundle_id'"` + Certificate *string + DefaultAuthenticationMethod *string `puppet:"name=>'default_authentication_method'"` + Enabled *bool + PrivateKey *string `puppet:"name=>'private_key'"` + TeamId *string `puppet:"name=>'team_id'"` + TokenKey *string `puppet:"name=>'token_key'"` + TokenKeyId *string `puppet:"name=>'token_key_id'"` +} + +type PinpointApnsVoipChannel struct { + ApplicationId string `puppet:"name=>'application_id'"` + PinpointApnsVoipChannelId *string `puppet:"name=>'pinpoint_apns_voip_channel_id'"` + BundleId *string `puppet:"name=>'bundle_id'"` + Certificate *string + DefaultAuthenticationMethod *string `puppet:"name=>'default_authentication_method'"` + Enabled *bool + PrivateKey *string `puppet:"name=>'private_key'"` + TeamId *string `puppet:"name=>'team_id'"` + TokenKey *string `puppet:"name=>'token_key'"` + TokenKeyId *string `puppet:"name=>'token_key_id'"` +} + +type PinpointApnsVoipSandboxChannel struct { + ApplicationId string `puppet:"name=>'application_id'"` + PinpointApnsVoipSandboxChannelId *string `puppet:"name=>'pinpoint_apns_voip_sandbox_channel_id'"` + BundleId *string `puppet:"name=>'bundle_id'"` + Certificate *string + DefaultAuthenticationMethod *string `puppet:"name=>'default_authentication_method'"` + Enabled *bool + PrivateKey *string `puppet:"name=>'private_key'"` + TeamId *string `puppet:"name=>'team_id'"` + TokenKey *string `puppet:"name=>'token_key'"` + TokenKeyId *string `puppet:"name=>'token_key_id'"` +} + +type PinpointApp struct { + PinpointAppId *string `puppet:"name=>'pinpoint_app_id'"` + ApplicationId *string `puppet:"name=>'application_id'"` + CampaignHook *CampaignHook `puppet:"name=>'campaign_hook'"` + Limits *Limits + Name *string + NamePrefix *string `puppet:"name=>'name_prefix'"` + QuietTime *QuietTime `puppet:"name=>'quiet_time'"` +} + +type PinpointBaiduChannel struct { + ApiKey string `puppet:"name=>'api_key'"` + ApplicationId string `puppet:"name=>'application_id'"` + SecretKey string `puppet:"name=>'secret_key'"` + PinpointBaiduChannelId *string `puppet:"name=>'pinpoint_baidu_channel_id'"` + Enabled *bool +} + +type PinpointEmailChannel struct { + ApplicationId string `puppet:"name=>'application_id'"` + FromAddress string `puppet:"name=>'from_address'"` + Identity string + RoleArn string `puppet:"name=>'role_arn'"` + PinpointEmailChannelId *string `puppet:"name=>'pinpoint_email_channel_id'"` + Enabled *bool + MessagesPerSecond *int64 `puppet:"name=>'messages_per_second'"` +} + +type PinpointEventStream struct { + ApplicationId string `puppet:"name=>'application_id'"` + DestinationStreamArn string `puppet:"name=>'destination_stream_arn'"` + RoleArn string `puppet:"name=>'role_arn'"` + PinpointEventStreamId *string `puppet:"name=>'pinpoint_event_stream_id'"` +} + +type PinpointGcmChannel struct { + ApiKey string `puppet:"name=>'api_key'"` + ApplicationId string `puppet:"name=>'application_id'"` + PinpointGcmChannelId *string `puppet:"name=>'pinpoint_gcm_channel_id'"` + Enabled *bool +} + +type PinpointSmsChannel struct { + ApplicationId string `puppet:"name=>'application_id'"` + PinpointSmsChannelId *string `puppet:"name=>'pinpoint_sms_channel_id'"` + Enabled *bool + PromotionalMessagesPerSecond *int64 `puppet:"name=>'promotional_messages_per_second'"` + SenderId *string `puppet:"name=>'sender_id'"` + ShortCode *string `puppet:"name=>'short_code'"` + TransactionalMessagesPerSecond *int64 `puppet:"name=>'transactional_messages_per_second'"` +} + +type PlacementGroup struct { + Name string + Strategy string + PlacementGroupId *string `puppet:"name=>'placement_group_id'"` +} + +type ProxyProtocolPolicy struct { + InstancePorts []string `puppet:"name=>'instance_ports'"` + LoadBalancer string `puppet:"name=>'load_balancer'"` + ProxyProtocolPolicyId *string `puppet:"name=>'proxy_protocol_policy_id'"` +} + +type RamResourceShare struct { + Name string + RamResourceShareId *string `puppet:"name=>'ram_resource_share_id'"` + AllowExternalPrincipals *bool `puppet:"name=>'allow_external_principals'"` + Tags *map[string]string +} + +type RdsCluster struct { + RdsClusterId *string `puppet:"name=>'rds_cluster_id'"` + ApplyImmediately *bool `puppet:"name=>'apply_immediately'"` + Arn *string + AvailabilityZones *[]string `puppet:"name=>'availability_zones'"` + BacktrackWindow *int64 `puppet:"name=>'backtrack_window'"` + BackupRetentionPeriod *int64 `puppet:"name=>'backup_retention_period'"` + ClusterIdentifier *string `puppet:"name=>'cluster_identifier'"` + ClusterIdentifierPrefix *string `puppet:"name=>'cluster_identifier_prefix'"` + ClusterMembers *[]string `puppet:"name=>'cluster_members'"` + ClusterResourceId *string `puppet:"name=>'cluster_resource_id'"` + DatabaseName *string `puppet:"name=>'database_name'"` + DbClusterParameterGroupName *string `puppet:"name=>'db_cluster_parameter_group_name'"` + DbSubnetGroupName *string `puppet:"name=>'db_subnet_group_name'"` + DeletionProtection *bool `puppet:"name=>'deletion_protection'"` + EnabledCloudwatchLogsExports *[]string `puppet:"name=>'enabled_cloudwatch_logs_exports'"` + Endpoint *string + Engine *string + EngineMode *string `puppet:"name=>'engine_mode'"` + EngineVersion *string `puppet:"name=>'engine_version'"` + FinalSnapshotIdentifier *string `puppet:"name=>'final_snapshot_identifier'"` + GlobalClusterIdentifier *string `puppet:"name=>'global_cluster_identifier'"` + HostedZoneId *string `puppet:"name=>'hosted_zone_id'"` + IamDatabaseAuthenticationEnabled *bool `puppet:"name=>'iam_database_authentication_enabled'"` + IamRoles *[]string `puppet:"name=>'iam_roles'"` + KmsKeyId *string `puppet:"name=>'kms_key_id'"` + MasterPassword *string `puppet:"name=>'master_password'"` + MasterUsername *string `puppet:"name=>'master_username'"` + Port *int64 + PreferredBackupWindow *string `puppet:"name=>'preferred_backup_window'"` + PreferredMaintenanceWindow *string `puppet:"name=>'preferred_maintenance_window'"` + ReaderEndpoint *string `puppet:"name=>'reader_endpoint'"` + ReplicationSourceIdentifier *string `puppet:"name=>'replication_source_identifier'"` + S3Import *Import `puppet:"name=>'s3_import'"` + ScalingConfiguration *ScalingConfiguration `puppet:"name=>'scaling_configuration'"` + SkipFinalSnapshot *bool `puppet:"name=>'skip_final_snapshot'"` + SnapshotIdentifier *string `puppet:"name=>'snapshot_identifier'"` + SourceRegion *string `puppet:"name=>'source_region'"` + StorageEncrypted *bool `puppet:"name=>'storage_encrypted'"` + Tags *map[string]string + VpcSecurityGroupIds *[]string `puppet:"name=>'vpc_security_group_ids'"` +} + +type RdsClusterEndpoint struct { + ClusterEndpointIdentifier string `puppet:"name=>'cluster_endpoint_identifier'"` + ClusterIdentifier string `puppet:"name=>'cluster_identifier'"` + CustomEndpointType string `puppet:"name=>'custom_endpoint_type'"` + RdsClusterEndpointId *string `puppet:"name=>'rds_cluster_endpoint_id'"` + Arn *string + Endpoint *string + ExcludedMembers *[]string `puppet:"name=>'excluded_members'"` + StaticMembers *[]string `puppet:"name=>'static_members'"` +} + +type RdsClusterInstance struct { + ClusterIdentifier string `puppet:"name=>'cluster_identifier'"` + InstanceClass string `puppet:"name=>'instance_class'"` + RdsClusterInstanceId *string `puppet:"name=>'rds_cluster_instance_id'"` + ApplyImmediately *bool `puppet:"name=>'apply_immediately'"` + Arn *string + AutoMinorVersionUpgrade *bool `puppet:"name=>'auto_minor_version_upgrade'"` + AvailabilityZone *string `puppet:"name=>'availability_zone'"` + CopyTagsToSnapshot *bool `puppet:"name=>'copy_tags_to_snapshot'"` + DbParameterGroupName *string `puppet:"name=>'db_parameter_group_name'"` + DbSubnetGroupName *string `puppet:"name=>'db_subnet_group_name'"` + DbiResourceId *string `puppet:"name=>'dbi_resource_id'"` + Endpoint *string + Engine *string + EngineVersion *string `puppet:"name=>'engine_version'"` + Identifier *string + IdentifierPrefix *string `puppet:"name=>'identifier_prefix'"` + KmsKeyId *string `puppet:"name=>'kms_key_id'"` + MonitoringInterval *int64 `puppet:"name=>'monitoring_interval'"` + MonitoringRoleArn *string `puppet:"name=>'monitoring_role_arn'"` + PerformanceInsightsEnabled *bool `puppet:"name=>'performance_insights_enabled'"` + PerformanceInsightsKmsKeyId *string `puppet:"name=>'performance_insights_kms_key_id'"` + Port *int64 + PreferredBackupWindow *string `puppet:"name=>'preferred_backup_window'"` + PreferredMaintenanceWindow *string `puppet:"name=>'preferred_maintenance_window'"` + PromotionTier *int64 `puppet:"name=>'promotion_tier'"` + PubliclyAccessible *bool `puppet:"name=>'publicly_accessible'"` + StorageEncrypted *bool `puppet:"name=>'storage_encrypted'"` + Tags *map[string]string + Writer *bool +} + +type RdsClusterParameterGroup struct { + Family string + RdsClusterParameterGroupId *string `puppet:"name=>'rds_cluster_parameter_group_id'"` + Arn *string + Description *string + Name *string + NamePrefix *string `puppet:"name=>'name_prefix'"` + Parameter *[]Parameter + Tags *map[string]string +} + +type RdsGlobalCluster struct { + GlobalClusterIdentifier string `puppet:"name=>'global_cluster_identifier'"` + RdsGlobalClusterId *string `puppet:"name=>'rds_global_cluster_id'"` + Arn *string + DatabaseName *string `puppet:"name=>'database_name'"` + DeletionProtection *bool `puppet:"name=>'deletion_protection'"` + Engine *string + EngineVersion *string `puppet:"name=>'engine_version'"` + GlobalClusterResourceId *string `puppet:"name=>'global_cluster_resource_id'"` + StorageEncrypted *bool `puppet:"name=>'storage_encrypted'"` +} + +type RedshiftCluster struct { + ClusterIdentifier string `puppet:"name=>'cluster_identifier'"` + NodeType string `puppet:"name=>'node_type'"` + RedshiftClusterId *string `puppet:"name=>'redshift_cluster_id'"` + AllowVersionUpgrade *bool `puppet:"name=>'allow_version_upgrade'"` + AutomatedSnapshotRetentionPeriod *int64 `puppet:"name=>'automated_snapshot_retention_period'"` + AvailabilityZone *string `puppet:"name=>'availability_zone'"` + ClusterParameterGroupName *string `puppet:"name=>'cluster_parameter_group_name'"` + ClusterPublicKey *string `puppet:"name=>'cluster_public_key'"` + ClusterRevisionNumber *string `puppet:"name=>'cluster_revision_number'"` + ClusterSecurityGroups *[]string `puppet:"name=>'cluster_security_groups'"` + ClusterSubnetGroupName *string `puppet:"name=>'cluster_subnet_group_name'"` + ClusterType *string `puppet:"name=>'cluster_type'"` + ClusterVersion *string `puppet:"name=>'cluster_version'"` + DatabaseName *string `puppet:"name=>'database_name'"` + DnsName *string `puppet:"name=>'dns_name'"` + ElasticIp *string `puppet:"name=>'elastic_ip'"` + Encrypted *bool + Endpoint *string + EnhancedVpcRouting *bool `puppet:"name=>'enhanced_vpc_routing'"` + FinalSnapshotIdentifier *string `puppet:"name=>'final_snapshot_identifier'"` + IamRoles *[]string `puppet:"name=>'iam_roles'"` + KmsKeyId *string `puppet:"name=>'kms_key_id'"` + Logging *Logging + MasterPassword *string `puppet:"name=>'master_password'"` + MasterUsername *string `puppet:"name=>'master_username'"` + NumberOfNodes *int64 `puppet:"name=>'number_of_nodes'"` + OwnerAccount *string `puppet:"name=>'owner_account'"` + Port *int64 + PreferredMaintenanceWindow *string `puppet:"name=>'preferred_maintenance_window'"` + PubliclyAccessible *bool `puppet:"name=>'publicly_accessible'"` + SkipFinalSnapshot *bool `puppet:"name=>'skip_final_snapshot'"` + SnapshotClusterIdentifier *string `puppet:"name=>'snapshot_cluster_identifier'"` + SnapshotCopy *Copy `puppet:"name=>'snapshot_copy'"` + SnapshotIdentifier *string `puppet:"name=>'snapshot_identifier'"` + Tags *map[string]string + VpcSecurityGroupIds *[]string `puppet:"name=>'vpc_security_group_ids'"` +} + +type RedshiftEventSubscription struct { + Name string + SnsTopicArn string `puppet:"name=>'sns_topic_arn'"` + RedshiftEventSubscriptionId *string `puppet:"name=>'redshift_event_subscription_id'"` + CustomerAwsId *string `puppet:"name=>'customer_aws_id'"` + Enabled *bool + EventCategories *[]string `puppet:"name=>'event_categories'"` + Severity *string + SourceIds *[]string `puppet:"name=>'source_ids'"` + SourceType *string `puppet:"name=>'source_type'"` + Status *string + Tags *map[string]string +} + +type RedshiftParameterGroup struct { + Family string + Name string + RedshiftParameterGroupId *string `puppet:"name=>'redshift_parameter_group_id'"` + Description *string + Parameter *[]GroupParameter +} + +type RedshiftSecurityGroup struct { + Ingress []GroupIngress + Name string + RedshiftSecurityGroupId *string `puppet:"name=>'redshift_security_group_id'"` + Description *string +} + +type RedshiftSnapshotCopyGrant struct { + SnapshotCopyGrantName string `puppet:"name=>'snapshot_copy_grant_name'"` + RedshiftSnapshotCopyGrantId *string `puppet:"name=>'redshift_snapshot_copy_grant_id'"` + KmsKeyId *string `puppet:"name=>'kms_key_id'"` + Tags *map[string]string +} + +type RedshiftSubnetGroup struct { + Name string + SubnetIds []string `puppet:"name=>'subnet_ids'"` + RedshiftSubnetGroupId *string `puppet:"name=>'redshift_subnet_group_id'"` + Description *string + Tags *map[string]string +} + +type ResourcegroupsGroup struct { + Name string + ResourceQuery Query `puppet:"name=>'resource_query'"` + ResourcegroupsGroupId *string `puppet:"name=>'resourcegroups_group_id'"` + Arn *string + Description *string +} + +type Route struct { + RouteTableId string `puppet:"name=>'route_table_id'"` + RouteId *string `puppet:"name=>'route_id'"` + DestinationCidrBlock *string `puppet:"name=>'destination_cidr_block'"` + DestinationIpv6CidrBlock *string `puppet:"name=>'destination_ipv6_cidr_block'"` + DestinationPrefixListId *string `puppet:"name=>'destination_prefix_list_id'"` + EgressOnlyGatewayId *string `puppet:"name=>'egress_only_gateway_id'"` + GatewayId *string `puppet:"name=>'gateway_id'"` + InstanceId *string `puppet:"name=>'instance_id'"` + InstanceOwnerId *string `puppet:"name=>'instance_owner_id'"` + NatGatewayId *string `puppet:"name=>'nat_gateway_id'"` + NetworkInterfaceId *string `puppet:"name=>'network_interface_id'"` + Origin *string + State *string + TransitGatewayId *string `puppet:"name=>'transit_gateway_id'"` + VpcPeeringConnectionId *string `puppet:"name=>'vpc_peering_connection_id'"` +} + +type Route53DelegationSet struct { + Route53DelegationSetId *string `puppet:"name=>'route53_delegation_set_id'"` + NameServers *[]string `puppet:"name=>'name_servers'"` + ReferenceName *string `puppet:"name=>'reference_name'"` +} + +type Route53HealthCheck struct { + Type string + Route53HealthCheckId *string `puppet:"name=>'route53_health_check_id'"` + ChildHealthThreshold *int64 `puppet:"name=>'child_health_threshold'"` + ChildHealthchecks *[]string `puppet:"name=>'child_healthchecks'"` + CloudwatchAlarmName *string `puppet:"name=>'cloudwatch_alarm_name'"` + CloudwatchAlarmRegion *string `puppet:"name=>'cloudwatch_alarm_region'"` + EnableSni *bool `puppet:"name=>'enable_sni'"` + FailureThreshold *int64 `puppet:"name=>'failure_threshold'"` + Fqdn *string + InsufficientDataHealthStatus *string `puppet:"name=>'insufficient_data_health_status'"` + InvertHealthcheck *bool `puppet:"name=>'invert_healthcheck'"` + IpAddress *string `puppet:"name=>'ip_address'"` + MeasureLatency *bool `puppet:"name=>'measure_latency'"` + Port *int64 + ReferenceName *string `puppet:"name=>'reference_name'"` + Regions *[]string + RequestInterval *int64 `puppet:"name=>'request_interval'"` + ResourcePath *string `puppet:"name=>'resource_path'"` + SearchString *string `puppet:"name=>'search_string'"` + Tags *map[string]string +} + +type Route53QueryLog struct { + CloudwatchLogGroupArn string `puppet:"name=>'cloudwatch_log_group_arn'"` + ZoneId string `puppet:"name=>'zone_id'"` + Route53QueryLogId *string `puppet:"name=>'route53_query_log_id'"` +} + +type Route53Record struct { + Name string + Type string + ZoneId string `puppet:"name=>'zone_id'"` + Route53RecordId *string `puppet:"name=>'route53_record_id'"` + Alias *[]Alias + AllowOverwrite *bool `puppet:"name=>'allow_overwrite'"` + FailoverRoutingPolicy *[]Action `puppet:"name=>'failover_routing_policy'"` + Fqdn *string + GeolocationRoutingPolicy *[]RoutingPolicy `puppet:"name=>'geolocation_routing_policy'"` + HealthCheckId *string `puppet:"name=>'health_check_id'"` + LatencyRoutingPolicy *[]LatencyRoutingPolicy `puppet:"name=>'latency_routing_policy'"` + MultivalueAnswerRoutingPolicy *bool `puppet:"name=>'multivalue_answer_routing_policy'"` + Records *[]string + SetIdentifier *string `puppet:"name=>'set_identifier'"` + Ttl *int64 + WeightedRoutingPolicy *[]WeightedRoutingPolicy `puppet:"name=>'weighted_routing_policy'"` +} + +type Route53Zone struct { + Name string + Route53ZoneId *string `puppet:"name=>'route53_zone_id'"` + Comment *string + DelegationSetId *string `puppet:"name=>'delegation_set_id'"` + ForceDestroy *bool `puppet:"name=>'force_destroy'"` + NameServers *[]string `puppet:"name=>'name_servers'"` + Tags *map[string]string + Vpc *[]ZoneVpc + ZoneId *string `puppet:"name=>'zone_id'"` +} + +type Route53ZoneAssociation struct { + VpcId string `puppet:"name=>'vpc_id'"` + ZoneId string `puppet:"name=>'zone_id'"` + Route53ZoneAssociationId *string `puppet:"name=>'route53_zone_association_id'"` + VpcRegion *string `puppet:"name=>'vpc_region'"` +} + +type RouteTable struct { + VpcId string `puppet:"name=>'vpc_id'"` + RouteTableId *string `puppet:"name=>'route_table_id'"` + OwnerId *string `puppet:"name=>'owner_id'"` + PropagatingVgws *[]string `puppet:"name=>'propagating_vgws'"` + Route *[]TableRoute + Tags *map[string]string +} + +type RouteTableAssociation struct { + RouteTableId string `puppet:"name=>'route_table_id'"` + SubnetId string `puppet:"name=>'subnet_id'"` + RouteTableAssociationId *string `puppet:"name=>'route_table_association_id'"` +} + +type S3AccountPublicAccessBlock struct { + S3AccountPublicAccessBlockId *string `puppet:"name=>'s3_account_public_access_block_id'"` + AccountId *string `puppet:"name=>'account_id'"` + BlockPublicAcls *bool `puppet:"name=>'block_public_acls'"` + BlockPublicPolicy *bool `puppet:"name=>'block_public_policy'"` + IgnorePublicAcls *bool `puppet:"name=>'ignore_public_acls'"` + RestrictPublicBuckets *bool `puppet:"name=>'restrict_public_buckets'"` +} + +type S3Bucket struct { + S3BucketId *string `puppet:"name=>'s3_bucket_id'"` + AccelerationStatus *string `puppet:"name=>'acceleration_status'"` + Acl *string + Arn *string + Bucket *string + BucketDomainName *string `puppet:"name=>'bucket_domain_name'"` + BucketPrefix *string `puppet:"name=>'bucket_prefix'"` + BucketRegionalDomainName *string `puppet:"name=>'bucket_regional_domain_name'"` + CorsRule *[]CorsRule `puppet:"name=>'cors_rule'"` + ForceDestroy *bool `puppet:"name=>'force_destroy'"` + HostedZoneId *string `puppet:"name=>'hosted_zone_id'"` + LifecycleRule *[]LifecycleRule `puppet:"name=>'lifecycle_rule'"` + Logging *[]BucketLogging + ObjectLockConfiguration *LockConfiguration `puppet:"name=>'object_lock_configuration'"` + Policy *string + Region *string + ReplicationConfiguration *ReplicationConfiguration `puppet:"name=>'replication_configuration'"` + RequestPayer *string `puppet:"name=>'request_payer'"` + ServerSideEncryptionConfiguration *SideEncryptionConfiguration `puppet:"name=>'server_side_encryption_configuration'"` + Tags *map[string]string + Versioning *Versioning + Website *Website + WebsiteDomain *string `puppet:"name=>'website_domain'"` + WebsiteEndpoint *string `puppet:"name=>'website_endpoint'"` +} + +type S3BucketInventory struct { + Bucket string + Destination InventoryDestination + IncludedObjectVersions string `puppet:"name=>'included_object_versions'"` + Name string + Schedule InventorySchedule + S3BucketInventoryId *string `puppet:"name=>'s3_bucket_inventory_id'"` + Enabled *bool + Filter *InventoryFilter + OptionalFields *[]string `puppet:"name=>'optional_fields'"` +} + +type S3BucketMetric struct { + Bucket string + Name string + S3BucketMetricId *string `puppet:"name=>'s3_bucket_metric_id'"` + Filter *MetricFilter +} + +type S3BucketNotification struct { + Bucket string + S3BucketNotificationId *string `puppet:"name=>'s3_bucket_notification_id'"` + LambdaFunction *[]Function `puppet:"name=>'lambda_function'"` + Queue *[]Queue + Topic *[]Topic +} + +type S3BucketObject struct { + Bucket string + Key string + S3BucketObjectId *string `puppet:"name=>'s3_bucket_object_id'"` + Acl *string + CacheControl *string `puppet:"name=>'cache_control'"` + Content *string + ContentBase64 *string `puppet:"name=>'content_base64'"` + ContentDisposition *string `puppet:"name=>'content_disposition'"` + ContentEncoding *string `puppet:"name=>'content_encoding'"` + ContentLanguage *string `puppet:"name=>'content_language'"` + ContentType *string `puppet:"name=>'content_type'"` + Etag *string + KmsKeyId *string `puppet:"name=>'kms_key_id'"` + ServerSideEncryption *string `puppet:"name=>'server_side_encryption'"` + Source *string + StorageClass *string `puppet:"name=>'storage_class'"` + Tags *map[string]string + VersionId *string `puppet:"name=>'version_id'"` + WebsiteRedirect *string `puppet:"name=>'website_redirect'"` +} + +type S3BucketPolicy struct { + Bucket string + Policy string + S3BucketPolicyId *string `puppet:"name=>'s3_bucket_policy_id'"` +} + +type S3BucketPublicAccessBlock struct { + Bucket string + S3BucketPublicAccessBlockId *string `puppet:"name=>'s3_bucket_public_access_block_id'"` + BlockPublicAcls *bool `puppet:"name=>'block_public_acls'"` + BlockPublicPolicy *bool `puppet:"name=>'block_public_policy'"` + IgnorePublicAcls *bool `puppet:"name=>'ignore_public_acls'"` + RestrictPublicBuckets *bool `puppet:"name=>'restrict_public_buckets'"` +} + +type SagemakerNotebookInstance struct { + InstanceType string `puppet:"name=>'instance_type'"` + Name string + RoleArn string `puppet:"name=>'role_arn'"` + SagemakerNotebookInstanceId *string `puppet:"name=>'sagemaker_notebook_instance_id'"` + Arn *string + KmsKeyId *string `puppet:"name=>'kms_key_id'"` + SecurityGroups *[]string `puppet:"name=>'security_groups'"` + SubnetId *string `puppet:"name=>'subnet_id'"` + Tags *map[string]string +} + +type SecretsmanagerSecret struct { + SecretsmanagerSecretId *string `puppet:"name=>'secretsmanager_secret_id'"` + Arn *string + Description *string + KmsKeyId *string `puppet:"name=>'kms_key_id'"` + Name *string + NamePrefix *string `puppet:"name=>'name_prefix'"` + Policy *string + RecoveryWindowInDays *int64 `puppet:"name=>'recovery_window_in_days'"` + RotationEnabled *bool `puppet:"name=>'rotation_enabled'"` + RotationLambdaArn *string `puppet:"name=>'rotation_lambda_arn'"` + RotationRules *RotationRules `puppet:"name=>'rotation_rules'"` + Tags *map[string]string +} + +type SecretsmanagerSecretVersion struct { + SecretId string `puppet:"name=>'secret_id'"` + SecretsmanagerSecretVersionId *string `puppet:"name=>'secretsmanager_secret_version_id'"` + Arn *string + SecretBinary *string `puppet:"name=>'secret_binary'"` + SecretString *string `puppet:"name=>'secret_string'"` + VersionId *string `puppet:"name=>'version_id'"` + VersionStages *[]string `puppet:"name=>'version_stages'"` +} + +type SecurityGroup struct { + SecurityGroupId *string `puppet:"name=>'security_group_id'"` + Arn *string + Description *string + Egress *[]GroupEgressIngress + Ingress *[]GroupEgressIngress + Name *string + NamePrefix *string `puppet:"name=>'name_prefix'"` + OwnerId *string `puppet:"name=>'owner_id'"` + RevokeRulesOnDelete *bool `puppet:"name=>'revoke_rules_on_delete'"` + Tags *map[string]string + VpcId *string `puppet:"name=>'vpc_id'"` +} + +type SecurityGroupRule struct { + FromPort int64 `puppet:"name=>'from_port'"` + Protocol string + SecurityGroupId string `puppet:"name=>'security_group_id'"` + ToPort int64 `puppet:"name=>'to_port'"` + Type string + SecurityGroupRuleId *string `puppet:"name=>'security_group_rule_id'"` + CidrBlocks *[]string `puppet:"name=>'cidr_blocks'"` + Description *string + Ipv6CidrBlocks *[]string `puppet:"name=>'ipv6_cidr_blocks'"` + PrefixListIds *[]string `puppet:"name=>'prefix_list_ids'"` + Self *bool + SourceSecurityGroupId *string `puppet:"name=>'source_security_group_id'"` +} + +type SecurityhubAccount struct { + SecurityhubAccountId *string `puppet:"name=>'securityhub_account_id'"` +} + +type SecurityhubProductSubscription struct { + ProductArn string `puppet:"name=>'product_arn'"` + SecurityhubProductSubscriptionId *string `puppet:"name=>'securityhub_product_subscription_id'"` + Arn *string +} + +type SecurityhubStandardsSubscription struct { + StandardsArn string `puppet:"name=>'standards_arn'"` + SecurityhubStandardsSubscriptionId *string `puppet:"name=>'securityhub_standards_subscription_id'"` +} + +type ServiceDiscoveryHttpNamespace struct { + Name string + ServiceDiscoveryHttpNamespaceId *string `puppet:"name=>'service_discovery_http_namespace_id'"` + Arn *string + Description *string +} + +type ServiceDiscoveryPrivateDnsNamespace struct { + Name string + Vpc string + ServiceDiscoveryPrivateDnsNamespaceId *string `puppet:"name=>'service_discovery_private_dns_namespace_id'"` + Arn *string + Description *string + HostedZone *string `puppet:"name=>'hosted_zone'"` +} + +type ServiceDiscoveryPublicDnsNamespace struct { + Name string + ServiceDiscoveryPublicDnsNamespaceId *string `puppet:"name=>'service_discovery_public_dns_namespace_id'"` + Arn *string + Description *string + HostedZone *string `puppet:"name=>'hosted_zone'"` +} + +type ServiceDiscoveryService struct { + Name string + ServiceDiscoveryServiceId *string `puppet:"name=>'service_discovery_service_id'"` + Arn *string + Description *string + DnsConfig *DnsConfig `puppet:"name=>'dns_config'"` + HealthCheckConfig *CheckConfig `puppet:"name=>'health_check_config'"` + HealthCheckCustomConfig *CustomConfig `puppet:"name=>'health_check_custom_config'"` +} + +type ServicecatalogPortfolio struct { + Name string + ServicecatalogPortfolioId *string `puppet:"name=>'servicecatalog_portfolio_id'"` + Arn *string + CreatedTime *string `puppet:"name=>'created_time'"` + Description *string + ProviderName *string `puppet:"name=>'provider_name'"` + Tags *map[string]string +} + +type SesActiveReceiptRuleSet struct { + RuleSetName string `puppet:"name=>'rule_set_name'"` + SesActiveReceiptRuleSetId *string `puppet:"name=>'ses_active_receipt_rule_set_id'"` +} + +type SesConfigurationSet struct { + Name string + SesConfigurationSetId *string `puppet:"name=>'ses_configuration_set_id'"` +} + +type SesDomainDkim struct { + Domain string + SesDomainDkimId *string `puppet:"name=>'ses_domain_dkim_id'"` + DkimTokens *[]string `puppet:"name=>'dkim_tokens'"` +} + +type SesDomainIdentity struct { + Domain string + SesDomainIdentityId *string `puppet:"name=>'ses_domain_identity_id'"` + Arn *string + VerificationToken *string `puppet:"name=>'verification_token'"` +} + +type SesDomainIdentityVerification struct { + Domain string + SesDomainIdentityVerificationId *string `puppet:"name=>'ses_domain_identity_verification_id'"` + Arn *string +} + +type SesDomainMailFrom struct { + Domain string + MailFromDomain string `puppet:"name=>'mail_from_domain'"` + SesDomainMailFromId *string `puppet:"name=>'ses_domain_mail_from_id'"` + BehaviorOnMxFailure *string `puppet:"name=>'behavior_on_mx_failure'"` +} + +type SesEventDestination struct { + ConfigurationSetName string `puppet:"name=>'configuration_set_name'"` + MatchingTypes []string `puppet:"name=>'matching_types'"` + Name string + SesEventDestinationId *string `puppet:"name=>'ses_event_destination_id'"` + CloudwatchDestination *[]CloudwatchDestination `puppet:"name=>'cloudwatch_destination'"` + Enabled *bool + KinesisDestination *KinesisDestination `puppet:"name=>'kinesis_destination'"` + SnsDestination *SnsDestination `puppet:"name=>'sns_destination'"` +} + +type SesIdentityNotificationTopic struct { + Identity string + NotificationType string `puppet:"name=>'notification_type'"` + SesIdentityNotificationTopicId *string `puppet:"name=>'ses_identity_notification_topic_id'"` + TopicArn *string `puppet:"name=>'topic_arn'"` +} + +type SesReceiptFilter struct { + Cidr string + Name string + Policy string + SesReceiptFilterId *string `puppet:"name=>'ses_receipt_filter_id'"` +} + +type SesReceiptRule struct { + Name string + RuleSetName string `puppet:"name=>'rule_set_name'"` + SesReceiptRuleId *string `puppet:"name=>'ses_receipt_rule_id'"` + AddHeaderAction *[]HeaderAction `puppet:"name=>'add_header_action'"` + After *string + BounceAction *[]BounceAction `puppet:"name=>'bounce_action'"` + Enabled *bool + LambdaAction *[]LambdaAction `puppet:"name=>'lambda_action'"` + Recipients *[]string + S3Action *[]S3Action `puppet:"name=>'s3_action'"` + ScanEnabled *bool `puppet:"name=>'scan_enabled'"` + SnsAction *[]SnsAction `puppet:"name=>'sns_action'"` + StopAction *[]StopAction `puppet:"name=>'stop_action'"` + TlsPolicy *string `puppet:"name=>'tls_policy'"` + WorkmailAction *[]WorkmailAction `puppet:"name=>'workmail_action'"` +} + +type SesReceiptRuleSet struct { + RuleSetName string `puppet:"name=>'rule_set_name'"` + SesReceiptRuleSetId *string `puppet:"name=>'ses_receipt_rule_set_id'"` +} + +type SesTemplate struct { + Name string + SesTemplateId *string `puppet:"name=>'ses_template_id'"` + Html *string + Subject *string + Text *string +} + +type SfnActivity struct { + Name string + SfnActivityId *string `puppet:"name=>'sfn_activity_id'"` + CreationDate *string `puppet:"name=>'creation_date'"` + Tags *map[string]string +} + +type SfnStateMachine struct { + Definition string + Name string + RoleArn string `puppet:"name=>'role_arn'"` + SfnStateMachineId *string `puppet:"name=>'sfn_state_machine_id'"` + CreationDate *string `puppet:"name=>'creation_date'"` + Status *string + Tags *map[string]string +} + +type SimpledbDomain struct { + Name string + SimpledbDomainId *string `puppet:"name=>'simpledb_domain_id'"` +} + +type SnapshotCreateVolumePermission struct { + AccountId string `puppet:"name=>'account_id'"` + SnapshotId string `puppet:"name=>'snapshot_id'"` + SnapshotCreateVolumePermissionId *string `puppet:"name=>'snapshot_create_volume_permission_id'"` +} + +type SnsPlatformApplication struct { + Name string + Platform string + PlatformCredential string `puppet:"name=>'platform_credential'"` + SnsPlatformApplicationId *string `puppet:"name=>'sns_platform_application_id'"` + Arn *string + EventDeliveryFailureTopicArn *string `puppet:"name=>'event_delivery_failure_topic_arn'"` + EventEndpointCreatedTopicArn *string `puppet:"name=>'event_endpoint_created_topic_arn'"` + EventEndpointDeletedTopicArn *string `puppet:"name=>'event_endpoint_deleted_topic_arn'"` + EventEndpointUpdatedTopicArn *string `puppet:"name=>'event_endpoint_updated_topic_arn'"` + FailureFeedbackRoleArn *string `puppet:"name=>'failure_feedback_role_arn'"` + PlatformPrincipal *string `puppet:"name=>'platform_principal'"` + SuccessFeedbackRoleArn *string `puppet:"name=>'success_feedback_role_arn'"` + SuccessFeedbackSampleRate *string `puppet:"name=>'success_feedback_sample_rate'"` +} + +type SnsSmsPreferences struct { + SnsSmsPreferencesId *string `puppet:"name=>'sns_sms_preferences_id'"` + DefaultSenderId *string `puppet:"name=>'default_sender_id'"` + DefaultSmsType *string `puppet:"name=>'default_sms_type'"` + DeliveryStatusIamRoleArn *string `puppet:"name=>'delivery_status_iam_role_arn'"` + DeliveryStatusSuccessSamplingRate *string `puppet:"name=>'delivery_status_success_sampling_rate'"` + MonthlySpendLimit *string `puppet:"name=>'monthly_spend_limit'"` + UsageReportS3Bucket *string `puppet:"name=>'usage_report_s3_bucket'"` +} + +type SnsTopic struct { + SnsTopicId *string `puppet:"name=>'sns_topic_id'"` + ApplicationFailureFeedbackRoleArn *string `puppet:"name=>'application_failure_feedback_role_arn'"` + ApplicationSuccessFeedbackRoleArn *string `puppet:"name=>'application_success_feedback_role_arn'"` + ApplicationSuccessFeedbackSampleRate *int64 `puppet:"name=>'application_success_feedback_sample_rate'"` + Arn *string + DeliveryPolicy *string `puppet:"name=>'delivery_policy'"` + DisplayName *string `puppet:"name=>'display_name'"` + HttpFailureFeedbackRoleArn *string `puppet:"name=>'http_failure_feedback_role_arn'"` + HttpSuccessFeedbackRoleArn *string `puppet:"name=>'http_success_feedback_role_arn'"` + HttpSuccessFeedbackSampleRate *int64 `puppet:"name=>'http_success_feedback_sample_rate'"` + KmsMasterKeyId *string `puppet:"name=>'kms_master_key_id'"` + LambdaFailureFeedbackRoleArn *string `puppet:"name=>'lambda_failure_feedback_role_arn'"` + LambdaSuccessFeedbackRoleArn *string `puppet:"name=>'lambda_success_feedback_role_arn'"` + LambdaSuccessFeedbackSampleRate *int64 `puppet:"name=>'lambda_success_feedback_sample_rate'"` + Name *string + NamePrefix *string `puppet:"name=>'name_prefix'"` + Policy *string + SqsFailureFeedbackRoleArn *string `puppet:"name=>'sqs_failure_feedback_role_arn'"` + SqsSuccessFeedbackRoleArn *string `puppet:"name=>'sqs_success_feedback_role_arn'"` + SqsSuccessFeedbackSampleRate *int64 `puppet:"name=>'sqs_success_feedback_sample_rate'"` +} + +type SnsTopicPolicy struct { + Arn string + Policy string + SnsTopicPolicyId *string `puppet:"name=>'sns_topic_policy_id'"` +} + +type SnsTopicSubscription struct { + Endpoint string + Protocol string + TopicArn string `puppet:"name=>'topic_arn'"` + SnsTopicSubscriptionId *string `puppet:"name=>'sns_topic_subscription_id'"` + Arn *string + ConfirmationTimeoutInMinutes *int64 `puppet:"name=>'confirmation_timeout_in_minutes'"` + DeliveryPolicy *string `puppet:"name=>'delivery_policy'"` + EndpointAutoConfirms *bool `puppet:"name=>'endpoint_auto_confirms'"` + FilterPolicy *string `puppet:"name=>'filter_policy'"` + RawMessageDelivery *bool `puppet:"name=>'raw_message_delivery'"` +} + +type SpotDatafeedSubscription struct { + Bucket string + SpotDatafeedSubscriptionId *string `puppet:"name=>'spot_datafeed_subscription_id'"` + Prefix *string +} + +type SpotFleetRequest struct { + IamFleetRole string `puppet:"name=>'iam_fleet_role'"` + LaunchSpecification []LaunchSpecification `puppet:"name=>'launch_specification'"` + TargetCapacity int64 `puppet:"name=>'target_capacity'"` + SpotFleetRequestId *string `puppet:"name=>'spot_fleet_request_id'"` + AllocationStrategy *string `puppet:"name=>'allocation_strategy'"` + ClientToken *string `puppet:"name=>'client_token'"` + ExcessCapacityTerminationPolicy *string `puppet:"name=>'excess_capacity_termination_policy'"` + FleetType *string `puppet:"name=>'fleet_type'"` + InstanceInterruptionBehaviour *string `puppet:"name=>'instance_interruption_behaviour'"` + InstancePoolsToUseCount *int64 `puppet:"name=>'instance_pools_to_use_count'"` + LoadBalancers *[]string `puppet:"name=>'load_balancers'"` + ReplaceUnhealthyInstances *bool `puppet:"name=>'replace_unhealthy_instances'"` + SpotPrice *string `puppet:"name=>'spot_price'"` + SpotRequestState *string `puppet:"name=>'spot_request_state'"` + TargetGroupArns *[]string `puppet:"name=>'target_group_arns'"` + TerminateInstancesWithExpiration *bool `puppet:"name=>'terminate_instances_with_expiration'"` + ValidFrom *time.Time `puppet:"name=>'valid_from'"` + ValidUntil *time.Time `puppet:"name=>'valid_until'"` + WaitForFulfillment *bool `puppet:"name=>'wait_for_fulfillment'"` +} + +type SpotInstanceRequest struct { + Ami string + InstanceType string `puppet:"name=>'instance_type'"` + SpotInstanceRequestId *string `puppet:"name=>'spot_instance_request_id'"` + Arn *string + AssociatePublicIpAddress *bool `puppet:"name=>'associate_public_ip_address'"` + AvailabilityZone *string `puppet:"name=>'availability_zone'"` + BlockDurationMinutes *int64 `puppet:"name=>'block_duration_minutes'"` + CpuCoreCount *int64 `puppet:"name=>'cpu_core_count'"` + CpuThreadsPerCore *int64 `puppet:"name=>'cpu_threads_per_core'"` + CreditSpecification *Specification `puppet:"name=>'credit_specification'"` + DisableApiTermination *bool `puppet:"name=>'disable_api_termination'"` + EbsBlockDevice *[]RequestEbsBlockDevice `puppet:"name=>'ebs_block_device'"` + EbsOptimized *bool `puppet:"name=>'ebs_optimized'"` + EphemeralBlockDevice *[]InstanceEphemeralBlockDevice `puppet:"name=>'ephemeral_block_device'"` + GetPasswordData *bool `puppet:"name=>'get_password_data'"` + HostId *string `puppet:"name=>'host_id'"` + IamInstanceProfile *string `puppet:"name=>'iam_instance_profile'"` + InstanceInitiatedShutdownBehavior *string `puppet:"name=>'instance_initiated_shutdown_behavior'"` + InstanceInterruptionBehaviour *string `puppet:"name=>'instance_interruption_behaviour'"` + InstanceState *string `puppet:"name=>'instance_state'"` + Ipv6AddressCount *int64 `puppet:"name=>'ipv6_address_count'"` + Ipv6Addresses *[]string `puppet:"name=>'ipv6_addresses'"` + KeyName *string `puppet:"name=>'key_name'"` + LaunchGroup *string `puppet:"name=>'launch_group'"` + Monitoring *bool + NetworkInterface *[]Interface `puppet:"name=>'network_interface'"` + PasswordData *string `puppet:"name=>'password_data'"` + PlacementGroup *string `puppet:"name=>'placement_group'"` + PrimaryNetworkInterfaceId *string `puppet:"name=>'primary_network_interface_id'"` + PrivateDns *string `puppet:"name=>'private_dns'"` + PrivateIp *string `puppet:"name=>'private_ip'"` + PublicDns *string `puppet:"name=>'public_dns'"` + PublicIp *string `puppet:"name=>'public_ip'"` + RootBlockDevice *RootBlockDevice `puppet:"name=>'root_block_device'"` + SecurityGroups *[]string `puppet:"name=>'security_groups'"` + SourceDestCheck *bool `puppet:"name=>'source_dest_check'"` + SpotBidStatus *string `puppet:"name=>'spot_bid_status'"` + SpotInstanceId *string `puppet:"name=>'spot_instance_id'"` + SpotPrice *string `puppet:"name=>'spot_price'"` + SpotRequestState *string `puppet:"name=>'spot_request_state'"` + SpotType *string `puppet:"name=>'spot_type'"` + SubnetId *string `puppet:"name=>'subnet_id'"` + Tags *map[string]string + Tenancy *string + UserData *string `puppet:"name=>'user_data'"` + UserDataBase64 *string `puppet:"name=>'user_data_base64'"` + ValidFrom *time.Time `puppet:"name=>'valid_from'"` + ValidUntil *time.Time `puppet:"name=>'valid_until'"` + VolumeTags *map[string]string `puppet:"name=>'volume_tags'"` + VpcSecurityGroupIds *[]string `puppet:"name=>'vpc_security_group_ids'"` + WaitForFulfillment *bool `puppet:"name=>'wait_for_fulfillment'"` +} + +type SqsQueue struct { + SqsQueueId *string `puppet:"name=>'sqs_queue_id'"` + Arn *string + ContentBasedDeduplication *bool `puppet:"name=>'content_based_deduplication'"` + DelaySeconds *int64 `puppet:"name=>'delay_seconds'"` + FifoQueue *bool `puppet:"name=>'fifo_queue'"` + KmsDataKeyReusePeriodSeconds *int64 `puppet:"name=>'kms_data_key_reuse_period_seconds'"` + KmsMasterKeyId *string `puppet:"name=>'kms_master_key_id'"` + MaxMessageSize *int64 `puppet:"name=>'max_message_size'"` + MessageRetentionSeconds *int64 `puppet:"name=>'message_retention_seconds'"` + Name *string + NamePrefix *string `puppet:"name=>'name_prefix'"` + Policy *string + ReceiveWaitTimeSeconds *int64 `puppet:"name=>'receive_wait_time_seconds'"` + RedrivePolicy *string `puppet:"name=>'redrive_policy'"` + Tags *map[string]string + VisibilityTimeoutSeconds *int64 `puppet:"name=>'visibility_timeout_seconds'"` +} + +type SqsQueuePolicy struct { + Policy string + QueueUrl string `puppet:"name=>'queue_url'"` + SqsQueuePolicyId *string `puppet:"name=>'sqs_queue_policy_id'"` +} + +type SsmActivation struct { + IamRole string `puppet:"name=>'iam_role'"` + SsmActivationId *string `puppet:"name=>'ssm_activation_id'"` + ActivationCode *string `puppet:"name=>'activation_code'"` + Description *string + ExpirationDate *time.Time `puppet:"name=>'expiration_date'"` + Expired *string + Name *string + RegistrationCount *int64 `puppet:"name=>'registration_count'"` + RegistrationLimit *int64 `puppet:"name=>'registration_limit'"` +} + +type SsmAssociation struct { + Name string + SsmAssociationId *string `puppet:"name=>'ssm_association_id'"` + AssociationId *string `puppet:"name=>'association_id'"` + AssociationName *string `puppet:"name=>'association_name'"` + DocumentVersion *string `puppet:"name=>'document_version'"` + InstanceId *string `puppet:"name=>'instance_id'"` + OutputLocation *OutputLocation `puppet:"name=>'output_location'"` + Parameters *map[string]string + ScheduleExpression *string `puppet:"name=>'schedule_expression'"` + Targets *[]Targets +} + +type SsmDocument struct { + Content string + DocumentType string `puppet:"name=>'document_type'"` + Name string + SsmDocumentId *string `puppet:"name=>'ssm_document_id'"` + Arn *string + CreatedDate *string `puppet:"name=>'created_date'"` + DefaultVersion *string `puppet:"name=>'default_version'"` + Description *string + DocumentFormat *string `puppet:"name=>'document_format'"` + Hash *string + HashType *string `puppet:"name=>'hash_type'"` + LatestVersion *string `puppet:"name=>'latest_version'"` + Owner *string + Parameter *[]DocumentParameter + Permissions *map[string]DocumentPermissions + PlatformTypes *[]string `puppet:"name=>'platform_types'"` + SchemaVersion *string `puppet:"name=>'schema_version'"` + Status *string + Tags *map[string]string +} + +type SsmMaintenanceWindow struct { + Cutoff int64 + Duration int64 + Name string + Schedule string + SsmMaintenanceWindowId *string `puppet:"name=>'ssm_maintenance_window_id'"` + AllowUnassociatedTargets *bool `puppet:"name=>'allow_unassociated_targets'"` + Enabled *bool + EndDate *string `puppet:"name=>'end_date'"` + ScheduleTimezone *string `puppet:"name=>'schedule_timezone'"` + StartDate *string `puppet:"name=>'start_date'"` +} + +type SsmMaintenanceWindowTarget struct { + ResourceType string `puppet:"name=>'resource_type'"` + Targets []Targets + WindowId string `puppet:"name=>'window_id'"` + SsmMaintenanceWindowTargetId *string `puppet:"name=>'ssm_maintenance_window_target_id'"` + OwnerInformation *string `puppet:"name=>'owner_information'"` +} + +type SsmMaintenanceWindowTask struct { + MaxConcurrency string `puppet:"name=>'max_concurrency'"` + MaxErrors string `puppet:"name=>'max_errors'"` + ServiceRoleArn string `puppet:"name=>'service_role_arn'"` + Targets []Targets + TaskArn string `puppet:"name=>'task_arn'"` + TaskType string `puppet:"name=>'task_type'"` + WindowId string `puppet:"name=>'window_id'"` + SsmMaintenanceWindowTaskId *string `puppet:"name=>'ssm_maintenance_window_task_id'"` + Description *string + LoggingInfo *LoggingInfo `puppet:"name=>'logging_info'"` + Name *string + Priority *int64 + TaskParameters *[]TaskParameters `puppet:"name=>'task_parameters'"` +} + +type SsmParameter struct { + Name string + Type string + Value string + SsmParameterId *string `puppet:"name=>'ssm_parameter_id'"` + AllowedPattern *string `puppet:"name=>'allowed_pattern'"` + Arn *string + Description *string + KeyId *string `puppet:"name=>'key_id'"` + Overwrite *bool + Tags *map[string]string +} + +type SsmPatchBaseline struct { + Name string + SsmPatchBaselineId *string `puppet:"name=>'ssm_patch_baseline_id'"` + ApprovalRule *[]ApprovalRule `puppet:"name=>'approval_rule'"` + ApprovedPatches *[]string `puppet:"name=>'approved_patches'"` + ApprovedPatchesComplianceLevel *string `puppet:"name=>'approved_patches_compliance_level'"` + Description *string + GlobalFilter *[]Targets `puppet:"name=>'global_filter'"` + OperatingSystem *string `puppet:"name=>'operating_system'"` + RejectedPatches *[]string `puppet:"name=>'rejected_patches'"` +} + +type SsmPatchGroup struct { + BaselineId string `puppet:"name=>'baseline_id'"` + PatchGroup string `puppet:"name=>'patch_group'"` + SsmPatchGroupId *string `puppet:"name=>'ssm_patch_group_id'"` +} + +type SsmResourceDataSync struct { + Name string + SsmResourceDataSyncId *string `puppet:"name=>'ssm_resource_data_sync_id'"` + S3Destination *S3Destination `puppet:"name=>'s3_destination'"` +} + +type StoragegatewayCache struct { + DiskId string `puppet:"name=>'disk_id'"` + GatewayArn string `puppet:"name=>'gateway_arn'"` + StoragegatewayCacheId *string `puppet:"name=>'storagegateway_cache_id'"` +} + +type StoragegatewayCachedIscsiVolume struct { + GatewayArn string `puppet:"name=>'gateway_arn'"` + NetworkInterfaceId string `puppet:"name=>'network_interface_id'"` + TargetName string `puppet:"name=>'target_name'"` + VolumeSizeInBytes int64 `puppet:"name=>'volume_size_in_bytes'"` + StoragegatewayCachedIscsiVolumeId *string `puppet:"name=>'storagegateway_cached_iscsi_volume_id'"` + Arn *string + ChapEnabled *bool `puppet:"name=>'chap_enabled'"` + LunNumber *int64 `puppet:"name=>'lun_number'"` + NetworkInterfacePort *int64 `puppet:"name=>'network_interface_port'"` + SnapshotId *string `puppet:"name=>'snapshot_id'"` + SourceVolumeArn *string `puppet:"name=>'source_volume_arn'"` + TargetArn *string `puppet:"name=>'target_arn'"` + VolumeArn *string `puppet:"name=>'volume_arn'"` + VolumeId *string `puppet:"name=>'volume_id'"` +} + +type StoragegatewayGateway struct { + GatewayName string `puppet:"name=>'gateway_name'"` + GatewayTimezone string `puppet:"name=>'gateway_timezone'"` + StoragegatewayGatewayId *string `puppet:"name=>'storagegateway_gateway_id'"` + ActivationKey *string `puppet:"name=>'activation_key'"` + Arn *string + GatewayId *string `puppet:"name=>'gateway_id'"` + GatewayIpAddress *string `puppet:"name=>'gateway_ip_address'"` + GatewayType *string `puppet:"name=>'gateway_type'"` + MediumChangerType *string `puppet:"name=>'medium_changer_type'"` + SmbActiveDirectorySettings *DirectorySettings `puppet:"name=>'smb_active_directory_settings'"` + SmbGuestPassword *string `puppet:"name=>'smb_guest_password'"` + TapeDriveType *string `puppet:"name=>'tape_drive_type'"` +} + +type StoragegatewayNfsFileShare struct { + ClientList []string `puppet:"name=>'client_list'"` + GatewayArn string `puppet:"name=>'gateway_arn'"` + LocationArn string `puppet:"name=>'location_arn'"` + RoleArn string `puppet:"name=>'role_arn'"` + StoragegatewayNfsFileShareId *string `puppet:"name=>'storagegateway_nfs_file_share_id'"` + Arn *string + DefaultStorageClass *string `puppet:"name=>'default_storage_class'"` + FileshareId *string `puppet:"name=>'fileshare_id'"` + GuessMimeTypeEnabled *bool `puppet:"name=>'guess_mime_type_enabled'"` + KmsEncrypted *bool `puppet:"name=>'kms_encrypted'"` + KmsKeyArn *string `puppet:"name=>'kms_key_arn'"` + NfsFileShareDefaults *Defaults `puppet:"name=>'nfs_file_share_defaults'"` + ObjectAcl *string `puppet:"name=>'object_acl'"` + ReadOnly *bool `puppet:"name=>'read_only'"` + RequesterPays *bool `puppet:"name=>'requester_pays'"` + Squash *string +} + +type StoragegatewaySmbFileShare struct { + GatewayArn string `puppet:"name=>'gateway_arn'"` + LocationArn string `puppet:"name=>'location_arn'"` + RoleArn string `puppet:"name=>'role_arn'"` + StoragegatewaySmbFileShareId *string `puppet:"name=>'storagegateway_smb_file_share_id'"` + Arn *string + Authentication *string + DefaultStorageClass *string `puppet:"name=>'default_storage_class'"` + FileshareId *string `puppet:"name=>'fileshare_id'"` + GuessMimeTypeEnabled *bool `puppet:"name=>'guess_mime_type_enabled'"` + InvalidUserList *[]string `puppet:"name=>'invalid_user_list'"` + KmsEncrypted *bool `puppet:"name=>'kms_encrypted'"` + KmsKeyArn *string `puppet:"name=>'kms_key_arn'"` + ObjectAcl *string `puppet:"name=>'object_acl'"` + ReadOnly *bool `puppet:"name=>'read_only'"` + RequesterPays *bool `puppet:"name=>'requester_pays'"` + ValidUserList *[]string `puppet:"name=>'valid_user_list'"` +} + +type StoragegatewayUploadBuffer struct { + DiskId string `puppet:"name=>'disk_id'"` + GatewayArn string `puppet:"name=>'gateway_arn'"` + StoragegatewayUploadBufferId *string `puppet:"name=>'storagegateway_upload_buffer_id'"` +} + +type StoragegatewayWorkingStorage struct { + DiskId string `puppet:"name=>'disk_id'"` + GatewayArn string `puppet:"name=>'gateway_arn'"` + StoragegatewayWorkingStorageId *string `puppet:"name=>'storagegateway_working_storage_id'"` +} + +type Subnet struct { + CidrBlock string `puppet:"name=>'cidr_block'"` + VpcId string `puppet:"name=>'vpc_id'"` + SubnetId *string `puppet:"name=>'subnet_id'"` + Arn *string + AssignIpv6AddressOnCreation *bool `puppet:"name=>'assign_ipv6_address_on_creation'"` + AvailabilityZone *string `puppet:"name=>'availability_zone'"` + AvailabilityZoneId *string `puppet:"name=>'availability_zone_id'"` + Ipv6CidrBlock *string `puppet:"name=>'ipv6_cidr_block'"` + Ipv6CidrBlockAssociationId *string `puppet:"name=>'ipv6_cidr_block_association_id'"` + MapPublicIpOnLaunch *bool `puppet:"name=>'map_public_ip_on_launch'"` + OwnerId *string `puppet:"name=>'owner_id'"` + Tags *map[string]string +} + +type SwfDomain struct { + WorkflowExecutionRetentionPeriodInDays string `puppet:"name=>'workflow_execution_retention_period_in_days'"` + SwfDomainId *string `puppet:"name=>'swf_domain_id'"` + Description *string + Name *string + NamePrefix *string `puppet:"name=>'name_prefix'"` +} + +type TransferServer struct { + TransferServerId *string `puppet:"name=>'transfer_server_id'"` + Arn *string + Endpoint *string + ForceDestroy *bool `puppet:"name=>'force_destroy'"` + IdentityProviderType *string `puppet:"name=>'identity_provider_type'"` + InvocationRole *string `puppet:"name=>'invocation_role'"` + LoggingRole *string `puppet:"name=>'logging_role'"` + Tags *map[string]string + Url *string +} + +type TransferSshKey struct { + Body string + ServerId string `puppet:"name=>'server_id'"` + UserName string `puppet:"name=>'user_name'"` + TransferSshKeyId *string `puppet:"name=>'transfer_ssh_key_id'"` +} + +type TransferUser struct { + Role string + ServerId string `puppet:"name=>'server_id'"` + UserName string `puppet:"name=>'user_name'"` + TransferUserId *string `puppet:"name=>'transfer_user_id'"` + Arn *string + HomeDirectory *string `puppet:"name=>'home_directory'"` + Policy *string + Tags *map[string]string +} + +type VolumeAttachment struct { + DeviceName string `puppet:"name=>'device_name'"` + InstanceId string `puppet:"name=>'instance_id'"` + VolumeId string `puppet:"name=>'volume_id'"` + VolumeAttachmentId *string `puppet:"name=>'volume_attachment_id'"` + ForceDetach *bool `puppet:"name=>'force_detach'"` + SkipDestroy *bool `puppet:"name=>'skip_destroy'"` +} + +type Vpc struct { + CidrBlock string `puppet:"name=>'cidr_block'"` + VpcId *string `puppet:"name=>'vpc_id'"` + Arn *string + AssignGeneratedIpv6CidrBlock *bool `puppet:"name=>'assign_generated_ipv6_cidr_block'"` + DefaultNetworkAclId *string `puppet:"name=>'default_network_acl_id'"` + DefaultRouteTableId *string `puppet:"name=>'default_route_table_id'"` + DefaultSecurityGroupId *string `puppet:"name=>'default_security_group_id'"` + DhcpOptionsId *string `puppet:"name=>'dhcp_options_id'"` + EnableClassiclink *bool `puppet:"name=>'enable_classiclink'"` + EnableClassiclinkDnsSupport *bool `puppet:"name=>'enable_classiclink_dns_support'"` + EnableDnsHostnames *bool `puppet:"name=>'enable_dns_hostnames'"` + EnableDnsSupport *bool `puppet:"name=>'enable_dns_support'"` + InstanceTenancy *string `puppet:"name=>'instance_tenancy'"` + Ipv6AssociationId *string `puppet:"name=>'ipv6_association_id'"` + Ipv6CidrBlock *string `puppet:"name=>'ipv6_cidr_block'"` + MainRouteTableId *string `puppet:"name=>'main_route_table_id'"` + OwnerId *string `puppet:"name=>'owner_id'"` + Tags *map[string]string +} + +type VpcDhcpOptions struct { + VpcDhcpOptionsId *string `puppet:"name=>'vpc_dhcp_options_id'"` + DomainName *string `puppet:"name=>'domain_name'"` + DomainNameServers *[]string `puppet:"name=>'domain_name_servers'"` + NetbiosNameServers *[]string `puppet:"name=>'netbios_name_servers'"` + NetbiosNodeType *string `puppet:"name=>'netbios_node_type'"` + NtpServers *[]string `puppet:"name=>'ntp_servers'"` + OwnerId *string `puppet:"name=>'owner_id'"` + Tags *map[string]string +} + +type VpcDhcpOptionsAssociation struct { + DhcpOptionsId string `puppet:"name=>'dhcp_options_id'"` + VpcId string `puppet:"name=>'vpc_id'"` + VpcDhcpOptionsAssociationId *string `puppet:"name=>'vpc_dhcp_options_association_id'"` +} + +type VpcEndpoint struct { + ServiceName string `puppet:"name=>'service_name'"` + VpcId string `puppet:"name=>'vpc_id'"` + VpcEndpointId *string `puppet:"name=>'vpc_endpoint_id'"` + AutoAccept *bool `puppet:"name=>'auto_accept'"` + CidrBlocks *[]string `puppet:"name=>'cidr_blocks'"` + DnsEntry *[]Entry `puppet:"name=>'dns_entry'"` + NetworkInterfaceIds *[]string `puppet:"name=>'network_interface_ids'"` + Policy *string + PrefixListId *string `puppet:"name=>'prefix_list_id'"` + PrivateDnsEnabled *bool `puppet:"name=>'private_dns_enabled'"` + RouteTableIds *[]string `puppet:"name=>'route_table_ids'"` + SecurityGroupIds *[]string `puppet:"name=>'security_group_ids'"` + State *string + SubnetIds *[]string `puppet:"name=>'subnet_ids'"` + VpcEndpointType *string `puppet:"name=>'vpc_endpoint_type'"` +} + +type VpcEndpointConnectionNotification struct { + ConnectionEvents []string `puppet:"name=>'connection_events'"` + ConnectionNotificationArn string `puppet:"name=>'connection_notification_arn'"` + VpcEndpointConnectionNotificationId *string `puppet:"name=>'vpc_endpoint_connection_notification_id'"` + NotificationType *string `puppet:"name=>'notification_type'"` + State *string + VpcEndpointId *string `puppet:"name=>'vpc_endpoint_id'"` + VpcEndpointServiceId *string `puppet:"name=>'vpc_endpoint_service_id'"` +} + +type VpcEndpointRouteTableAssociation struct { + RouteTableId string `puppet:"name=>'route_table_id'"` + VpcEndpointId string `puppet:"name=>'vpc_endpoint_id'"` + VpcEndpointRouteTableAssociationId *string `puppet:"name=>'vpc_endpoint_route_table_association_id'"` +} + +type VpcEndpointService struct { + AcceptanceRequired bool `puppet:"name=>'acceptance_required'"` + NetworkLoadBalancerArns []string `puppet:"name=>'network_load_balancer_arns'"` + VpcEndpointServiceId *string `puppet:"name=>'vpc_endpoint_service_id'"` + AllowedPrincipals *[]string `puppet:"name=>'allowed_principals'"` + AvailabilityZones *[]string `puppet:"name=>'availability_zones'"` + BaseEndpointDnsNames *[]string `puppet:"name=>'base_endpoint_dns_names'"` + PrivateDnsName *string `puppet:"name=>'private_dns_name'"` + ServiceName *string `puppet:"name=>'service_name'"` + ServiceType *string `puppet:"name=>'service_type'"` + State *string +} + +type VpcEndpointServiceAllowedPrincipal struct { + PrincipalArn string `puppet:"name=>'principal_arn'"` + VpcEndpointServiceId string `puppet:"name=>'vpc_endpoint_service_id'"` + VpcEndpointServiceAllowedPrincipalId *string `puppet:"name=>'vpc_endpoint_service_allowed_principal_id'"` +} + +type VpcEndpointSubnetAssociation struct { + SubnetId string `puppet:"name=>'subnet_id'"` + VpcEndpointId string `puppet:"name=>'vpc_endpoint_id'"` + VpcEndpointSubnetAssociationId *string `puppet:"name=>'vpc_endpoint_subnet_association_id'"` +} + +type VpcIpv4CidrBlockAssociation struct { + CidrBlock string `puppet:"name=>'cidr_block'"` + VpcId string `puppet:"name=>'vpc_id'"` + VpcIpv4CidrBlockAssociationId *string `puppet:"name=>'vpc_ipv4_cidr_block_association_id'"` +} + +type VpcPeeringConnection struct { + PeerVpcId string `puppet:"name=>'peer_vpc_id'"` + VpcId string `puppet:"name=>'vpc_id'"` + VpcPeeringConnectionId *string `puppet:"name=>'vpc_peering_connection_id'"` + AcceptStatus *string `puppet:"name=>'accept_status'"` + Accepter *AccepterRequester + AutoAccept *bool `puppet:"name=>'auto_accept'"` + PeerOwnerId *string `puppet:"name=>'peer_owner_id'"` + PeerRegion *string `puppet:"name=>'peer_region'"` + Requester *AccepterRequester + Tags *map[string]string +} + +type VpcPeeringConnectionAccepter struct { + VpcPeeringConnectionId string `puppet:"name=>'vpc_peering_connection_id'"` + VpcPeeringConnectionAccepterId *string `puppet:"name=>'vpc_peering_connection_accepter_id'"` + AcceptStatus *string `puppet:"name=>'accept_status'"` + Accepter *AccepterRequester + AutoAccept *bool `puppet:"name=>'auto_accept'"` + PeerOwnerId *string `puppet:"name=>'peer_owner_id'"` + PeerRegion *string `puppet:"name=>'peer_region'"` + PeerVpcId *string `puppet:"name=>'peer_vpc_id'"` + Requester *AccepterRequester + Tags *map[string]string + VpcId *string `puppet:"name=>'vpc_id'"` +} + +type VpcPeeringConnectionOptions struct { + VpcPeeringConnectionId string `puppet:"name=>'vpc_peering_connection_id'"` + VpcPeeringConnectionOptionsId *string `puppet:"name=>'vpc_peering_connection_options_id'"` + Accepter *AccepterRequester + Requester *AccepterRequester +} + +type VpnConnection struct { + CustomerGatewayId string `puppet:"name=>'customer_gateway_id'"` + Type string + VpnConnectionId *string `puppet:"name=>'vpn_connection_id'"` + CustomerGatewayConfiguration *string `puppet:"name=>'customer_gateway_configuration'"` + Routes *[]Routes + StaticRoutesOnly *bool `puppet:"name=>'static_routes_only'"` + Tags *map[string]string + TransitGatewayId *string `puppet:"name=>'transit_gateway_id'"` + Tunnel1Address *string `puppet:"name=>'tunnel1_address'"` + Tunnel1BgpAsn *string `puppet:"name=>'tunnel1_bgp_asn'"` + Tunnel1BgpHoldtime *int64 `puppet:"name=>'tunnel1_bgp_holdtime'"` + Tunnel1CgwInsideAddress *string `puppet:"name=>'tunnel1_cgw_inside_address'"` + Tunnel1InsideCidr *string `puppet:"name=>'tunnel1_inside_cidr'"` + Tunnel1PresharedKey *string `puppet:"name=>'tunnel1_preshared_key'"` + Tunnel1VgwInsideAddress *string `puppet:"name=>'tunnel1_vgw_inside_address'"` + Tunnel2Address *string `puppet:"name=>'tunnel2_address'"` + Tunnel2BgpAsn *string `puppet:"name=>'tunnel2_bgp_asn'"` + Tunnel2BgpHoldtime *int64 `puppet:"name=>'tunnel2_bgp_holdtime'"` + Tunnel2CgwInsideAddress *string `puppet:"name=>'tunnel2_cgw_inside_address'"` + Tunnel2InsideCidr *string `puppet:"name=>'tunnel2_inside_cidr'"` + Tunnel2PresharedKey *string `puppet:"name=>'tunnel2_preshared_key'"` + Tunnel2VgwInsideAddress *string `puppet:"name=>'tunnel2_vgw_inside_address'"` + VgwTelemetry *[]Telemetry `puppet:"name=>'vgw_telemetry'"` + VpnGatewayId *string `puppet:"name=>'vpn_gateway_id'"` +} + +type VpnConnectionRoute struct { + DestinationCidrBlock string `puppet:"name=>'destination_cidr_block'"` + VpnConnectionId string `puppet:"name=>'vpn_connection_id'"` + VpnConnectionRouteId *string `puppet:"name=>'vpn_connection_route_id'"` +} + +type VpnGateway struct { + VpnGatewayId *string `puppet:"name=>'vpn_gateway_id'"` + AmazonSideAsn *string `puppet:"name=>'amazon_side_asn'"` + AvailabilityZone *string `puppet:"name=>'availability_zone'"` + Tags *map[string]string + VpcId *string `puppet:"name=>'vpc_id'"` +} + +type VpnGatewayAttachment struct { + VpcId string `puppet:"name=>'vpc_id'"` + VpnGatewayId string `puppet:"name=>'vpn_gateway_id'"` + VpnGatewayAttachmentId *string `puppet:"name=>'vpn_gateway_attachment_id'"` +} + +type VpnGatewayRoutePropagation struct { + RouteTableId string `puppet:"name=>'route_table_id'"` + VpnGatewayId string `puppet:"name=>'vpn_gateway_id'"` + VpnGatewayRoutePropagationId *string `puppet:"name=>'vpn_gateway_route_propagation_id'"` +} + +type WafByteMatchSet struct { + Name string + WafByteMatchSetId *string `puppet:"name=>'waf_byte_match_set_id'"` + ByteMatchTuples *[]Tuples `puppet:"name=>'byte_match_tuples'"` +} + +type WafGeoMatchSet struct { + Name string + WafGeoMatchSetId *string `puppet:"name=>'waf_geo_match_set_id'"` + GeoMatchConstraint *[]Constraint `puppet:"name=>'geo_match_constraint'"` +} + +type WafIpset struct { + Name string + WafIpsetId *string `puppet:"name=>'waf_ipset_id'"` + Arn *string + IpSetDescriptors *[]Constraint `puppet:"name=>'ip_set_descriptors'"` +} + +type WafRateBasedRule struct { + MetricName string `puppet:"name=>'metric_name'"` + Name string + RateKey string `puppet:"name=>'rate_key'"` + RateLimit int64 `puppet:"name=>'rate_limit'"` + WafRateBasedRuleId *string `puppet:"name=>'waf_rate_based_rule_id'"` + Predicates *[]PredicatesPredicate +} + +type WafRegexMatchSet struct { + Name string + WafRegexMatchSetId *string `puppet:"name=>'waf_regex_match_set_id'"` + RegexMatchTuple *[]Tuple `puppet:"name=>'regex_match_tuple'"` +} + +type WafRegexPatternSet struct { + Name string + WafRegexPatternSetId *string `puppet:"name=>'waf_regex_pattern_set_id'"` + RegexPatternStrings *[]string `puppet:"name=>'regex_pattern_strings'"` +} + +type WafRule struct { + MetricName string `puppet:"name=>'metric_name'"` + Name string + WafRuleId *string `puppet:"name=>'waf_rule_id'"` + Predicates *[]PredicatesPredicate +} + +type WafRuleGroup struct { + MetricName string `puppet:"name=>'metric_name'"` + Name string + WafRuleGroupId *string `puppet:"name=>'waf_rule_group_id'"` + ActivatedRule *[]Rule `puppet:"name=>'activated_rule'"` +} + +type WafSizeConstraintSet struct { + Name string + WafSizeConstraintSetId *string `puppet:"name=>'waf_size_constraint_set_id'"` + SizeConstraints *[]SizeConstraints `puppet:"name=>'size_constraints'"` +} + +type WafSqlInjectionMatchSet struct { + Name string + WafSqlInjectionMatchSetId *string `puppet:"name=>'waf_sql_injection_match_set_id'"` + SqlInjectionMatchTuples *[]TuplesTuple `puppet:"name=>'sql_injection_match_tuples'"` +} + +type WafWebAcl struct { + MetricName string `puppet:"name=>'metric_name'"` + Name string + WafWebAclId *string `puppet:"name=>'waf_web_acl_id'"` + DefaultAction *Action `puppet:"name=>'default_action'"` + Rules *[]RuleRules +} + +type WafXssMatchSet struct { + Name string + WafXssMatchSetId *string `puppet:"name=>'waf_xss_match_set_id'"` + XssMatchTuples *[]TuplesTuple `puppet:"name=>'xss_match_tuples'"` +} + +type WafregionalByteMatchSet struct { + Name string + WafregionalByteMatchSetId *string `puppet:"name=>'wafregional_byte_match_set_id'"` + ByteMatchTuples *[]Tuples `puppet:"name=>'byte_match_tuples'"` +} + +type WafregionalGeoMatchSet struct { + Name string + WafregionalGeoMatchSetId *string `puppet:"name=>'wafregional_geo_match_set_id'"` + GeoMatchConstraint *[]Constraint `puppet:"name=>'geo_match_constraint'"` +} + +type WafregionalIpset struct { + Name string + WafregionalIpsetId *string `puppet:"name=>'wafregional_ipset_id'"` + Arn *string + IpSetDescriptor *[]Constraint `puppet:"name=>'ip_set_descriptor'"` +} + +type WafregionalRateBasedRule struct { + MetricName string `puppet:"name=>'metric_name'"` + Name string + RateKey string `puppet:"name=>'rate_key'"` + RateLimit int64 `puppet:"name=>'rate_limit'"` + WafregionalRateBasedRuleId *string `puppet:"name=>'wafregional_rate_based_rule_id'"` + Predicate *[]PredicatesPredicate +} + +type WafregionalRegexMatchSet struct { + Name string + WafregionalRegexMatchSetId *string `puppet:"name=>'wafregional_regex_match_set_id'"` + RegexMatchTuple *[]Tuple `puppet:"name=>'regex_match_tuple'"` +} + +type WafregionalRegexPatternSet struct { + Name string + WafregionalRegexPatternSetId *string `puppet:"name=>'wafregional_regex_pattern_set_id'"` + RegexPatternStrings *[]string `puppet:"name=>'regex_pattern_strings'"` +} + +type WafregionalRule struct { + MetricName string `puppet:"name=>'metric_name'"` + Name string + WafregionalRuleId *string `puppet:"name=>'wafregional_rule_id'"` + Predicate *[]PredicatesPredicate +} + +type WafregionalRuleGroup struct { + MetricName string `puppet:"name=>'metric_name'"` + Name string + WafregionalRuleGroupId *string `puppet:"name=>'wafregional_rule_group_id'"` + ActivatedRule *[]Rule `puppet:"name=>'activated_rule'"` +} + +type WafregionalSizeConstraintSet struct { + Name string + WafregionalSizeConstraintSetId *string `puppet:"name=>'wafregional_size_constraint_set_id'"` + SizeConstraints *[]SizeConstraints `puppet:"name=>'size_constraints'"` +} + +type WafregionalSqlInjectionMatchSet struct { + Name string + WafregionalSqlInjectionMatchSetId *string `puppet:"name=>'wafregional_sql_injection_match_set_id'"` + SqlInjectionMatchTuple *[]TuplesTuple `puppet:"name=>'sql_injection_match_tuple'"` +} + +type WafregionalWebAcl struct { + MetricName string `puppet:"name=>'metric_name'"` + Name string + WafregionalWebAclId *string `puppet:"name=>'wafregional_web_acl_id'"` + DefaultAction *Action `puppet:"name=>'default_action'"` + Rule *[]RuleRules +} + +type WafregionalWebAclAssociation struct { + ResourceArn string `puppet:"name=>'resource_arn'"` + WebAclId string `puppet:"name=>'web_acl_id'"` + WafregionalWebAclAssociationId *string `puppet:"name=>'wafregional_web_acl_association_id'"` +} + +type WafregionalXssMatchSet struct { + Name string + WafregionalXssMatchSetId *string `puppet:"name=>'wafregional_xss_match_set_id'"` + XssMatchTuple *[]TuplesTuple `puppet:"name=>'xss_match_tuple'"` +} + +type AcceleratorAttributes struct { + FlowLogsEnabled *bool `puppet:"name=>'flow_logs_enabled'"` + FlowLogsS3Bucket *string `puppet:"name=>'flow_logs_s3_bucket'"` + FlowLogsS3Prefix *string `puppet:"name=>'flow_logs_s3_prefix'"` +} + +type AccepterRequester struct { + AllowClassicLinkToRemoteVpc *bool `puppet:"name=>'allow_classic_link_to_remote_vpc'"` + AllowRemoteVpcDnsResolution *bool `puppet:"name=>'allow_remote_vpc_dns_resolution'"` + AllowVpcToRemoteClassicLink *bool `puppet:"name=>'allow_vpc_to_remote_classic_link'"` +} + +type AccessLogs struct { + Bucket string + BucketPrefix *string `puppet:"name=>'bucket_prefix'"` + Enabled *bool + Interval *int64 +} + +type Action struct { + Type *string +} + +type Actions struct { + JobName string `puppet:"name=>'job_name'"` + Arguments *map[string]string + Timeout *int64 +} + +type Adjustment struct { + ScalingAdjustment int64 `puppet:"name=>'scaling_adjustment'"` + MetricIntervalLowerBound *string `puppet:"name=>'metric_interval_lower_bound'"` + MetricIntervalUpperBound *string `puppet:"name=>'metric_interval_upper_bound'"` +} + +type AggregationSource struct { + AccountIds []string `puppet:"name=>'account_ids'"` + AllRegions *bool `puppet:"name=>'all_regions'"` + Regions *[]string +} + +type Alarm struct { + AlarmName string `puppet:"name=>'alarm_name'"` + RoleArn string `puppet:"name=>'role_arn'"` + StateReason string `puppet:"name=>'state_reason'"` + StateValue string `puppet:"name=>'state_value'"` +} + +type AlarmConfiguration struct { + Alarms *[]string + Enabled *bool + IgnorePollAlarmFailure *bool `puppet:"name=>'ignore_poll_alarm_failure'"` +} + +type Alias struct { + EvaluateTargetHealth bool `puppet:"name=>'evaluate_target_health'"` + Name string + ZoneId string `puppet:"name=>'zone_id'"` +} + +type AliasRoutingConfig struct { + AdditionalVersionWeights *map[string]float64 `puppet:"name=>'additional_version_weights'"` +} + +type AmiEbsBlockDevice struct { + DeviceName string `puppet:"name=>'device_name'"` + DeleteOnTermination *bool `puppet:"name=>'delete_on_termination'"` + Encrypted *bool + Iops *int64 + SnapshotId *string `puppet:"name=>'snapshot_id'"` + VolumeSize *int64 `puppet:"name=>'volume_size'"` + VolumeType *string `puppet:"name=>'volume_type'"` +} + +type AppSource struct { + Type string + Password *string + Revision *string + SshKey *string `puppet:"name=>'ssh_key'"` + Url *string + Username *string +} + +type ApplicationEnvironment struct { + Key string + Value string + Secure *bool +} + +type ApprovalRule struct { + ApproveAfterDays int64 `puppet:"name=>'approve_after_days'"` + PatchFilter []Targets `puppet:"name=>'patch_filter'"` + ComplianceLevel *string `puppet:"name=>'compliance_level'"` + EnableNonSecurity *bool `puppet:"name=>'enable_non_security'"` +} + +type Artifacts struct { + Type string + EncryptionDisabled *bool `puppet:"name=>'encryption_disabled'"` + Location *string + Name *string + NamespaceType *string `puppet:"name=>'namespace_type'"` + Packaging *string + Path *string +} + +type Association struct { + EventType string `puppet:"name=>'event_type'"` + LambdaArn string `puppet:"name=>'lambda_arn'"` + IncludeBody *bool `puppet:"name=>'include_body'"` +} + +type Attachment struct { + DeviceIndex int64 `puppet:"name=>'device_index'"` + Instance string + AttachmentId *string `puppet:"name=>'attachment_id'"` +} + +type Attribute struct { + Name string + Type string +} + +type AttributeConstraints struct { + MaxValue *string `puppet:"name=>'max_value'"` + MinValue *string `puppet:"name=>'min_value'"` +} + +type Attributes struct { + InstanceProfile string `puppet:"name=>'instance_profile'"` + AdditionalMasterSecurityGroups *string `puppet:"name=>'additional_master_security_groups'"` + AdditionalSlaveSecurityGroups *string `puppet:"name=>'additional_slave_security_groups'"` + EmrManagedMasterSecurityGroup *string `puppet:"name=>'emr_managed_master_security_group'"` + EmrManagedSlaveSecurityGroup *string `puppet:"name=>'emr_managed_slave_security_group'"` + KeyName *string `puppet:"name=>'key_name'"` + ServiceAccessSecurityGroup *string `puppet:"name=>'service_access_security_group'"` + SubnetId *string `puppet:"name=>'subnet_id'"` +} + +type Audio struct { + AudioPackingMode *string `puppet:"name=>'audio_packing_mode'"` + BitRate *string `puppet:"name=>'bit_rate'"` + Channels *string + Codec *string + SampleRate *string `puppet:"name=>'sample_rate'"` +} + +type Auth struct { + Type string + Resource *string +} + +type AuthenticationConfiguration struct { + AllowedIpRange *string `puppet:"name=>'allowed_ip_range'"` + SecretToken *string `puppet:"name=>'secret_token'"` +} + +type Authority struct { + Data *string +} + +type AuthorityConfiguration struct { + KeyAlgorithm string `puppet:"name=>'key_algorithm'"` + SigningAlgorithm string `puppet:"name=>'signing_algorithm'"` + Subject *Subject +} + +type BackupConfiguration struct { + BucketArn string `puppet:"name=>'bucket_arn'"` + RoleArn string `puppet:"name=>'role_arn'"` + BufferInterval *int64 `puppet:"name=>'buffer_interval'"` + BufferSize *int64 `puppet:"name=>'buffer_size'"` + CloudwatchLoggingOptions *Options `puppet:"name=>'cloudwatch_logging_options'"` + CompressionFormat *string `puppet:"name=>'compression_format'"` + KmsKeyArn *string `puppet:"name=>'kms_key_arn'"` + Prefix *string +} + +type Balancer struct { + ContainerName string `puppet:"name=>'container_name'"` + ContainerPort int64 `puppet:"name=>'container_port'"` + ElbName *string `puppet:"name=>'elb_name'"` + TargetGroupArn *string `puppet:"name=>'target_group_arn'"` +} + +type BalancerInfo struct { + ElbInfo *[]Info `puppet:"name=>'elb_info'"` + TargetGroupInfo *[]Info `puppet:"name=>'target_group_info'"` + TargetGroupPairInfo *PairInfo `puppet:"name=>'target_group_pair_info'"` +} + +type BatchTarget struct { + JobDefinition string `puppet:"name=>'job_definition'"` + JobName string `puppet:"name=>'job_name'"` + ArraySize *int64 `puppet:"name=>'array_size'"` + JobAttempts *int64 `puppet:"name=>'job_attempts'"` +} + +type Behavior struct { + AllowedMethods []string `puppet:"name=>'allowed_methods'"` + CachedMethods []string `puppet:"name=>'cached_methods'"` + TargetOriginId string `puppet:"name=>'target_origin_id'"` + ViewerProtocolPolicy string `puppet:"name=>'viewer_protocol_policy'"` + Compress *bool + DefaultTtl *int64 `puppet:"name=>'default_ttl'"` + FieldLevelEncryptionId *string `puppet:"name=>'field_level_encryption_id'"` + ForwardedValues *Values `puppet:"name=>'forwarded_values'"` + LambdaFunctionAssociation *[]Association `puppet:"name=>'lambda_function_association'"` + MaxTtl *int64 `puppet:"name=>'max_ttl'"` + MinTtl *int64 `puppet:"name=>'min_ttl'"` + SmoothStreaming *bool `puppet:"name=>'smooth_streaming'"` + TrustedSigners *[]string `puppet:"name=>'trusted_signers'"` +} + +type BlockDevice struct { + DeleteOnTermination *bool `puppet:"name=>'delete_on_termination'"` + Iops *int64 + VolumeSize *int64 `puppet:"name=>'volume_size'"` + VolumeType *string `puppet:"name=>'volume_type'"` +} + +type BookmarksEncryption struct { + JobBookmarksEncryptionMode *string `puppet:"name=>'job_bookmarks_encryption_mode'"` + KmsKeyArn *string `puppet:"name=>'kms_key_arn'"` +} + +type BootstrapAction struct { + Name string + Path string + Args *[]string +} + +type BounceAction struct { + Message string + Position int64 + Sender string + SmtpReplyCode string `puppet:"name=>'smtp_reply_code'"` + StatusCode *string `puppet:"name=>'status_code'"` + TopicArn *string `puppet:"name=>'topic_arn'"` +} + +type BrokerConfiguration struct { + Id *string + Revision *int64 +} + +type BrokerLogs struct { + Audit *bool + General *bool +} + +type Bucket struct { + BucketArn string `puppet:"name=>'bucket_arn'"` + Format string + AccountId *string `puppet:"name=>'account_id'"` + Encryption *BucketEncryption + Prefix *string +} + +type BucketEncryption struct { + SseKms *Kms `puppet:"name=>'sse_kms'"` + SseS3 *SseS3 `puppet:"name=>'sse_s3'"` +} + +type BucketLogging struct { + TargetBucket string `puppet:"name=>'target_bucket'"` + TargetPrefix *string `puppet:"name=>'target_prefix'"` +} + +type Cache struct { + Location *string + Type *string +} + +type CacheBehavior struct { + AllowedMethods []string `puppet:"name=>'allowed_methods'"` + CachedMethods []string `puppet:"name=>'cached_methods'"` + PathPattern string `puppet:"name=>'path_pattern'"` + TargetOriginId string `puppet:"name=>'target_origin_id'"` + ViewerProtocolPolicy string `puppet:"name=>'viewer_protocol_policy'"` + Compress *bool + DefaultTtl *int64 `puppet:"name=>'default_ttl'"` + FieldLevelEncryptionId *string `puppet:"name=>'field_level_encryption_id'"` + ForwardedValues *Values `puppet:"name=>'forwarded_values'"` + LambdaFunctionAssociation *[]Association `puppet:"name=>'lambda_function_association'"` + MaxTtl *int64 `puppet:"name=>'max_ttl'"` + MinTtl *int64 `puppet:"name=>'min_ttl'"` + SmoothStreaming *bool `puppet:"name=>'smooth_streaming'"` + TrustedSigners *[]string `puppet:"name=>'trusted_signers'"` +} + +type CampaignHook struct { + LambdaFunctionName *string `puppet:"name=>'lambda_function_name'"` + Mode *string + WebUrl *string `puppet:"name=>'web_url'"` +} + +type CanaryLinear struct { + Interval *int64 + Percentage *int64 +} + +type CapacitySpecification struct { + DefaultTargetCapacityType string `puppet:"name=>'default_target_capacity_type'"` + TotalTargetCapacity int64 `puppet:"name=>'total_target_capacity'"` + OnDemandTargetCapacity *int64 `puppet:"name=>'on_demand_target_capacity'"` + SpotTargetCapacity *int64 `puppet:"name=>'spot_target_capacity'"` +} + +type Certificate struct { + AcmCertificateArn *string `puppet:"name=>'acm_certificate_arn'"` + CloudfrontDefaultCertificate *bool `puppet:"name=>'cloudfront_default_certificate'"` + IamCertificateId *string `puppet:"name=>'iam_certificate_id'"` + MinimumProtocolVersion *string `puppet:"name=>'minimum_protocol_version'"` + SslSupportMethod *string `puppet:"name=>'ssl_support_method'"` +} + +type Certificates struct { + AwsHardwareCertificate *string `puppet:"name=>'aws_hardware_certificate'"` + ClusterCertificate *string `puppet:"name=>'cluster_certificate'"` + ClusterCsr *string `puppet:"name=>'cluster_csr'"` + HsmCertificate *string `puppet:"name=>'hsm_certificate'"` + ManufacturerHardwareCertificate *string `puppet:"name=>'manufacturer_hardware_certificate'"` +} + +type ChangePolicy struct { + DeleteBehavior *string `puppet:"name=>'delete_behavior'"` + UpdateBehavior *string `puppet:"name=>'update_behavior'"` +} + +type Check struct { + HealthyThreshold *int64 `puppet:"name=>'healthy_threshold'"` + Interval *int64 + Matcher *string + Path *string + Port *string + Protocol *string + Timeout *int64 + UnhealthyThreshold *int64 `puppet:"name=>'unhealthy_threshold'"` +} + +type CheckConfig struct { + FailureThreshold *int64 `puppet:"name=>'failure_threshold'"` + ResourcePath *string `puppet:"name=>'resource_path'"` + Type *string +} + +type Classifier struct { + Classification string + GrokPattern string `puppet:"name=>'grok_pattern'"` + CustomPatterns *string `puppet:"name=>'custom_patterns'"` +} + +type CloudwatchDestination struct { + DefaultValue string `puppet:"name=>'default_value'"` + DimensionName string `puppet:"name=>'dimension_name'"` + ValueSource string `puppet:"name=>'value_source'"` +} + +type CloudwatchEncryption struct { + CloudwatchEncryptionMode *string `puppet:"name=>'cloudwatch_encryption_mode'"` + KmsKeyArn *string `puppet:"name=>'kms_key_arn'"` +} + +type ClusterConfig struct { + DedicatedMasterCount *int64 `puppet:"name=>'dedicated_master_count'"` + DedicatedMasterEnabled *bool `puppet:"name=>'dedicated_master_enabled'"` + DedicatedMasterType *string `puppet:"name=>'dedicated_master_type'"` + InstanceCount *int64 `puppet:"name=>'instance_count'"` + InstanceType *string `puppet:"name=>'instance_type'"` + ZoneAwarenessEnabled *bool `puppet:"name=>'zone_awareness_enabled'"` +} + +type ClusterStep struct { + ActionOnFailure string `puppet:"name=>'action_on_failure'"` + Name string + HadoopJarStep *Step `puppet:"name=>'hadoop_jar_step'"` +} + +type ClusterVpcConfig struct { + SubnetIds []string `puppet:"name=>'subnet_ids'"` + SecurityGroupIds *[]string `puppet:"name=>'security_group_ids'"` + VpcId *string `puppet:"name=>'vpc_id'"` +} + +type CodecOptions struct { + BitDepth *string `puppet:"name=>'bit_depth'"` + BitOrder *string `puppet:"name=>'bit_order'"` + Profile *string + Signed *string +} + +type Cognito struct { + UserPoolArn string `puppet:"name=>'user_pool_arn'"` + UserPoolClientId string `puppet:"name=>'user_pool_client_id'"` + UserPoolDomain string `puppet:"name=>'user_pool_domain'"` + AuthenticationRequestExtraParams *map[string]string `puppet:"name=>'authentication_request_extra_params'"` + OnUnauthenticatedRequest *string `puppet:"name=>'on_unauthenticated_request'"` + Scope *string + SessionCookieName *string `puppet:"name=>'session_cookie_name'"` + SessionTimeout *int64 `puppet:"name=>'session_timeout'"` +} + +type CognitoOptions struct { + IdentityPoolId string `puppet:"name=>'identity_pool_id'"` + RoleArn string `puppet:"name=>'role_arn'"` + UserPoolId string `puppet:"name=>'user_pool_id'"` + Enabled *bool +} + +type Columns struct { + Name string + SqlType string `puppet:"name=>'sql_type'"` + Mapping *string +} + +type Command struct { + ScriptLocation string `puppet:"name=>'script_location'"` + Name *string +} + +type Condition struct { + Field *string + Values *[]string +} + +type Conditions struct { + JobName string `puppet:"name=>'job_name'"` + State string + LogicalOperator *string `puppet:"name=>'logical_operator'"` +} + +type Config struct { + Bucket *string + StorageClass *string `puppet:"name=>'storage_class'"` +} + +type ConfigLambda struct { + FunctionArn string `puppet:"name=>'function_arn'"` +} + +type ConfigOverride struct { + AvailabilityZone *string `puppet:"name=>'availability_zone'"` + InstanceType *string `puppet:"name=>'instance_type'"` + MaxPrice *string `puppet:"name=>'max_price'"` + Priority *float64 + SubnetId *string `puppet:"name=>'subnet_id'"` + WeightedCapacity *float64 `puppet:"name=>'weighted_capacity'"` +} + +type Configuration struct { + Enabled *bool + Processors *[]Processors +} + +type ConfigurationCustomizedMetricSpecification struct { + MetricName string `puppet:"name=>'metric_name'"` + Namespace string + Statistic string + MetricDimension *[]GroupParameter `puppet:"name=>'metric_dimension'"` + Unit *string +} + +type ConfigurationEbsBlockDevice struct { + DeviceName string `puppet:"name=>'device_name'"` + DeleteOnTermination *bool `puppet:"name=>'delete_on_termination'"` + Encrypted *bool + Iops *int64 + NoDevice *bool `puppet:"name=>'no_device'"` + SnapshotId *string `puppet:"name=>'snapshot_id'"` + VolumeSize *int64 `puppet:"name=>'volume_size'"` + VolumeType *string `puppet:"name=>'volume_type'"` +} + +type ConfigurationEncryptionConfiguration struct { + CloudwatchEncryption *CloudwatchEncryption `puppet:"name=>'cloudwatch_encryption'"` + JobBookmarksEncryption *BookmarksEncryption `puppet:"name=>'job_bookmarks_encryption'"` + S3Encryption *S3Encryption `puppet:"name=>'s3_encryption'"` +} + +type ConfigurationRule struct { + DefaultRetention Retention `puppet:"name=>'default_retention'"` +} + +type ConnectConfig struct { + Issuer string + AuthTtl *int64 `puppet:"name=>'auth_ttl'"` + ClientId *string `puppet:"name=>'client_id'"` + IatTtl *int64 `puppet:"name=>'iat_ttl'"` +} + +type ConnectSettings struct { + CustomerDnsIps []string `puppet:"name=>'customer_dns_ips'"` + CustomerUsername string `puppet:"name=>'customer_username'"` + SubnetIds []string `puppet:"name=>'subnet_ids'"` + VpcId string `puppet:"name=>'vpc_id'"` +} + +type Constraint struct { + Type string + Value string +} + +type Constraints struct { + Type string + Expression *string +} + +type ConversionConfiguration struct { + Enabled *bool + InputFormatConfiguration *FormatConfiguration `puppet:"name=>'input_format_configuration'"` + OutputFormatConfiguration *OutputFormatConfiguration `puppet:"name=>'output_format_configuration'"` + SchemaConfiguration *SchemaConfiguration `puppet:"name=>'schema_configuration'"` +} + +type CookbooksSource struct { + Type string + Url string + Password *string + Revision *string + SshKey *string `puppet:"name=>'ssh_key'"` + Username *string +} + +type Cookies struct { + Forward string + WhitelistedNames *[]string `puppet:"name=>'whitelisted_names'"` +} + +type Copy struct { + DestinationRegion string `puppet:"name=>'destination_region'"` + GrantName *string `puppet:"name=>'grant_name'"` + RetentionPeriod *int64 `puppet:"name=>'retention_period'"` +} + +type CorsRule struct { + AllowedMethods []string `puppet:"name=>'allowed_methods'"` + AllowedOrigins []string `puppet:"name=>'allowed_origins'"` + AllowedHeaders *[]string `puppet:"name=>'allowed_headers'"` + ExposeHeaders *[]string `puppet:"name=>'expose_headers'"` + MaxAgeSeconds *int64 `puppet:"name=>'max_age_seconds'"` +} + +type CreateRule struct { + Interval int64 + IntervalUnit *string `puppet:"name=>'interval_unit'"` + Times *[]string +} + +type Criteria struct { + SseKmsEncryptedObjects *Encryption `puppet:"name=>'sse_kms_encrypted_objects'"` +} + +type CrlConfiguration struct { + ExpirationInDays int64 `puppet:"name=>'expiration_in_days'"` + CustomCname *string `puppet:"name=>'custom_cname'"` + Enabled *bool + S3BucketName *string `puppet:"name=>'s3_bucket_name'"` +} + +type Csv struct { + RecordColumnDelimiter string `puppet:"name=>'record_column_delimiter'"` + RecordRowDelimiter string `puppet:"name=>'record_row_delimiter'"` +} + +type CustomConfig struct { + FailureThreshold *int64 `puppet:"name=>'failure_threshold'"` +} + +type CustomizedMetricSpecification struct { + MetricName string `puppet:"name=>'metric_name'"` + Namespace string + Statistic string + Dimensions *[]GroupParameter + Unit *string +} + +type DataSources struct { + TableName string `puppet:"name=>'table_name'"` + Id *string + S3 *SourcesS3 + Schema *Schema +} + +type DeInfo struct { + Name *string + Parameters *map[string]string + SerializationLibrary *string `puppet:"name=>'serialization_library'"` +} + +type Default struct { + SseAlgorithm string `puppet:"name=>'sse_algorithm'"` + KmsMasterKeyId *string `puppet:"name=>'kms_master_key_id'"` +} + +type DefaultAction struct { + Type string + AuthenticateCognito *Cognito `puppet:"name=>'authenticate_cognito'"` + AuthenticateOidc *Oidc `puppet:"name=>'authenticate_oidc'"` + FixedResponse *Response `puppet:"name=>'fixed_response'"` + Order *int64 + Redirect *Redirect + TargetGroupArn *string `puppet:"name=>'target_group_arn'"` +} + +type Defaults struct { + DirectoryMode *string `puppet:"name=>'directory_mode'"` + FileMode *string `puppet:"name=>'file_mode'"` + GroupId *int64 `puppet:"name=>'group_id'"` + OwnerId *int64 `puppet:"name=>'owner_id'"` +} + +type DefinitionVolume struct { + Name string + DockerVolumeConfiguration *VolumeConfiguration `puppet:"name=>'docker_volume_configuration'"` + HostPath *string `puppet:"name=>'host_path'"` +} + +type DemandOptions struct { + AllocationStrategy *string `puppet:"name=>'allocation_strategy'"` +} + +type DeploymentConfig struct { + DeploymentReadyOption *Option `puppet:"name=>'deployment_ready_option'"` + GreenFleetProvisioningOption *ProvisioningOption `puppet:"name=>'green_fleet_provisioning_option'"` + TerminateBlueInstancesOnDeploymentSuccess *Success `puppet:"name=>'terminate_blue_instances_on_deployment_success'"` +} + +type Descriptor struct { + BucketColumns *[]string `puppet:"name=>'bucket_columns'"` + Columns *[]KeysColumns + Compressed *bool + InputFormat *string `puppet:"name=>'input_format'"` + Location *string + NumberOfBuckets *int64 `puppet:"name=>'number_of_buckets'"` + OutputFormat *string `puppet:"name=>'output_format'"` + Parameters *map[string]string + SerDeInfo *DeInfo `puppet:"name=>'ser_de_info'"` + SkewedInfo *SkewedInfo `puppet:"name=>'skewed_info'"` + SortColumns *[]SortColumns `puppet:"name=>'sort_columns'"` + StoredAsSubDirectories *bool `puppet:"name=>'stored_as_sub_directories'"` +} + +type Deserializer struct { + HiveJsonSerDe *SerDe `puppet:"name=>'hive_json_ser_de'"` + OpenXJsonSerDe *JsonSerDe `puppet:"name=>'open_x_json_ser_de'"` +} + +type Destination struct { + Bucket string + AccessControlTranslation *Translation `puppet:"name=>'access_control_translation'"` + AccountId *string `puppet:"name=>'account_id'"` + ReplicaKmsKeyId *string `puppet:"name=>'replica_kms_key_id'"` + StorageClass *string `puppet:"name=>'storage_class'"` +} + +type Detail struct { + EventSource *string `puppet:"name=>'event_source'"` + MaximumExecutionFrequency *string `puppet:"name=>'maximum_execution_frequency'"` + MessageType *string `puppet:"name=>'message_type'"` +} + +type Details struct { + ResourceTypes []string `puppet:"name=>'resource_types'"` + Schedule []Schedule + TargetTags map[string]string `puppet:"name=>'target_tags'"` +} + +type Device struct { + DeviceName string `puppet:"name=>'device_name'"` + VirtualName string `puppet:"name=>'virtual_name'"` +} + +type DeviceConfiguration struct { + ChallengeRequiredOnNewDevice *bool `puppet:"name=>'challenge_required_on_new_device'"` + DeviceOnlyRememberedOnUserPrompt *bool `puppet:"name=>'device_only_remembered_on_user_prompt'"` +} + +type DirectorySettings struct { + DomainName string `puppet:"name=>'domain_name'"` + Password string + Username string +} + +type Discovery struct { + Dns Dns +} + +type Distribution struct { + OnDemandAllocationStrategy *string `puppet:"name=>'on_demand_allocation_strategy'"` + OnDemandBaseCapacity *int64 `puppet:"name=>'on_demand_base_capacity'"` + OnDemandPercentageAboveBaseCapacity *int64 `puppet:"name=>'on_demand_percentage_above_base_capacity'"` + SpotAllocationStrategy *string `puppet:"name=>'spot_allocation_strategy'"` + SpotInstancePools *int64 `puppet:"name=>'spot_instance_pools'"` + SpotMaxPrice *string `puppet:"name=>'spot_max_price'"` +} + +type Dns struct { + ServiceName string `puppet:"name=>'service_name'"` +} + +type DnsConfig struct { + DnsRecords []Records `puppet:"name=>'dns_records'"` + NamespaceId string `puppet:"name=>'namespace_id'"` + RoutingPolicy *string `puppet:"name=>'routing_policy'"` +} + +type DocumentParameter struct { + DefaultValue *string `puppet:"name=>'default_value'"` + Description *string + Name *string + Type *string +} + +type DocumentPermissions struct { + AccountIds string `puppet:"name=>'account_ids'"` + Type string +} + +type Dynamodb struct { + HashKeyField string `puppet:"name=>'hash_key_field'"` + HashKeyValue string `puppet:"name=>'hash_key_value'"` + RangeKeyField string `puppet:"name=>'range_key_field'"` + RangeKeyValue string `puppet:"name=>'range_key_value'"` + RoleArn string `puppet:"name=>'role_arn'"` + TableName string `puppet:"name=>'table_name'"` + HashKeyType *string `puppet:"name=>'hash_key_type'"` + PayloadField *string `puppet:"name=>'payload_field'"` + RangeKeyType *string `puppet:"name=>'range_key_type'"` +} + +type DynamodbConfig struct { + TableName string `puppet:"name=>'table_name'"` + Region *string + UseCallerCredentials *bool `puppet:"name=>'use_caller_credentials'"` +} + +type DynamodbTarget struct { + Path string +} + +type Ebs struct { + DeleteOnTermination *string `puppet:"name=>'delete_on_termination'"` + Encrypted *string + Iops *int64 + KmsKeyId *string `puppet:"name=>'kms_key_id'"` + SnapshotId *string `puppet:"name=>'snapshot_id'"` + VolumeSize *int64 `puppet:"name=>'volume_size'"` + VolumeType *string `puppet:"name=>'volume_type'"` +} + +type EbsBlockDevice struct { + DeleteOnTermination *bool `puppet:"name=>'delete_on_termination'"` + DeviceName *string `puppet:"name=>'device_name'"` + Encrypted *bool + Iops *int64 + SnapshotId *string `puppet:"name=>'snapshot_id'"` + VolumeSize *int64 `puppet:"name=>'volume_size'"` + VolumeType *string `puppet:"name=>'volume_type'"` +} + +type EbsConfig struct { + Size int64 + Type string + Iops *int64 + VolumesPerInstance *int64 `puppet:"name=>'volumes_per_instance'"` +} + +type EbsOptions struct { + EbsEnabled bool `puppet:"name=>'ebs_enabled'"` + Iops *int64 + VolumeSize *int64 `puppet:"name=>'volume_size'"` + VolumeType *string `puppet:"name=>'volume_type'"` +} + +type Ec2Config struct { + SecurityGroupArns []string `puppet:"name=>'security_group_arns'"` + SubnetArn string `puppet:"name=>'subnet_arn'"` +} + +type EcsTarget struct { + TaskDefinitionArn string `puppet:"name=>'task_definition_arn'"` + Group *string + LaunchType *string `puppet:"name=>'launch_type'"` + NetworkConfiguration *NetworkConfiguration `puppet:"name=>'network_configuration'"` + PlatformVersion *string `puppet:"name=>'platform_version'"` + TaskCount *int64 `puppet:"name=>'task_count'"` +} + +type EgressIngress struct { + Action string + FromPort int64 `puppet:"name=>'from_port'"` + Protocol string + RuleNo int64 `puppet:"name=>'rule_no'"` + ToPort int64 `puppet:"name=>'to_port'"` + CidrBlock *string `puppet:"name=>'cidr_block'"` + IcmpCode *int64 `puppet:"name=>'icmp_code'"` + IcmpType *int64 `puppet:"name=>'icmp_type'"` + Ipv6CidrBlock *string `puppet:"name=>'ipv6_cidr_block'"` +} + +type Elasticsearch struct { + Endpoint string + Id string + Index string + RoleArn string `puppet:"name=>'role_arn'"` + Type string +} + +type ElasticsearchConfig struct { + Endpoint string + Region *string +} + +type ElasticsearchConfiguration struct { + DomainArn string `puppet:"name=>'domain_arn'"` + IndexName string `puppet:"name=>'index_name'"` + RoleArn string `puppet:"name=>'role_arn'"` + BufferingInterval *int64 `puppet:"name=>'buffering_interval'"` + BufferingSize *int64 `puppet:"name=>'buffering_size'"` + CloudwatchLoggingOptions *Options `puppet:"name=>'cloudwatch_logging_options'"` + IndexRotationPeriod *string `puppet:"name=>'index_rotation_period'"` + ProcessingConfiguration *Configuration `puppet:"name=>'processing_configuration'"` + RetryDuration *int64 `puppet:"name=>'retry_duration'"` + S3BackupMode *string `puppet:"name=>'s3_backup_mode'"` + TypeName *string `puppet:"name=>'type_name'"` +} + +type ElbListener struct { + InstancePort int64 `puppet:"name=>'instance_port'"` + InstanceProtocol string `puppet:"name=>'instance_protocol'"` + LbPort int64 `puppet:"name=>'lb_port'"` + LbProtocol string `puppet:"name=>'lb_protocol'"` + SslCertificateId *string `puppet:"name=>'ssl_certificate_id'"` +} + +type EmailConfiguration struct { + ReplyToEmailAddress *string `puppet:"name=>'reply_to_email_address'"` + SourceArn *string `puppet:"name=>'source_arn'"` +} + +type Encryption struct { + Enabled *bool +} + +type EncryptionConfiguration struct { + EncryptionOption string `puppet:"name=>'encryption_option'"` + KmsKey *string `puppet:"name=>'kms_key'"` +} + +type EncryptionConfigurationRule struct { + ApplyServerSideEncryptionByDefault *Default `puppet:"name=>'apply_server_side_encryption_by_default'"` +} + +type EndpointConfiguration struct { + Types []string +} + +type Endpoints struct { + Password *string + Url *string + Username *string +} + +type Entry struct { + DnsName *string `puppet:"name=>'dns_name'"` + HostedZoneId *string `puppet:"name=>'hosted_zone_id'"` +} + +type Environment struct { + ComputeType string `puppet:"name=>'compute_type'"` + Image string + Type string + Certificate *string + EnvironmentVariable *[]Variable `puppet:"name=>'environment_variable'"` + PrivilegedMode *bool `puppet:"name=>'privileged_mode'"` +} + +type EphemeralBlockDevice struct { + DeviceName *string `puppet:"name=>'device_name'"` + VirtualName *string `puppet:"name=>'virtual_name'"` +} + +type ErrorResponse struct { + ErrorCode int64 `puppet:"name=>'error_code'"` + ErrorCachingMinTtl *int64 `puppet:"name=>'error_caching_min_ttl'"` + ResponseCode *int64 `puppet:"name=>'response_code'"` + ResponsePagePath *string `puppet:"name=>'response_page_path'"` +} + +type Expiration struct { + Date *string + Days *int64 + ExpiredObjectDeleteMarker *bool `puppet:"name=>'expired_object_delete_marker'"` +} + +type Filter struct { + Key *string + Type *string + Value *string +} + +type Firehose struct { + DeliveryStreamName string `puppet:"name=>'delivery_stream_name'"` + RoleArn string `puppet:"name=>'role_arn'"` + Separator *string +} + +type FirehoseStreamLambda struct { + ResourceArn string `puppet:"name=>'resource_arn'"` + RoleArn string `puppet:"name=>'role_arn'"` +} + +type Format struct { + MappingParameters *MappingParameters `puppet:"name=>'mapping_parameters'"` + RecordFormatType *string `puppet:"name=>'record_format_type'"` +} + +type FormatConfiguration struct { + Deserializer *Deserializer +} + +type Function struct { + Events []string + FilterPrefix *string `puppet:"name=>'filter_prefix'"` + FilterSuffix *string `puppet:"name=>'filter_suffix'"` + Id *string + LambdaFunctionArn *string `puppet:"name=>'lambda_function_arn'"` +} + +type FunctionEnvironment struct { + Variables *map[string]string +} + +type FunctionVpcConfig struct { + SecurityGroupIds []string `puppet:"name=>'security_group_ids'"` + SubnetIds []string `puppet:"name=>'subnet_ids'"` + VpcId *string `puppet:"name=>'vpc_id'"` +} + +type GrantConstraints struct { + EncryptionContextEquals *map[string]string `puppet:"name=>'encryption_context_equals'"` + EncryptionContextSubset *map[string]string `puppet:"name=>'encryption_context_subset'"` +} + +type Group struct { + Name string +} + +type GroupEbsConfig struct { + Size int64 + Type string + Iops *int64 + VolumesPerInstance *int64 `puppet:"name=>'volumes_per_instance'"` +} + +type GroupEgressIngress struct { + FromPort int64 `puppet:"name=>'from_port'"` + Protocol string + ToPort int64 `puppet:"name=>'to_port'"` + CidrBlocks *[]string `puppet:"name=>'cidr_blocks'"` + Description *string + Ipv6CidrBlocks *[]string `puppet:"name=>'ipv6_cidr_blocks'"` + PrefixListIds *[]string `puppet:"name=>'prefix_list_ids'"` + SecurityGroups *[]string `puppet:"name=>'security_groups'"` + Self *bool +} + +type GroupIngress struct { + Cidr *string + SecurityGroupName *string `puppet:"name=>'security_group_name'"` + SecurityGroupOwnerId *string `puppet:"name=>'security_group_owner_id'"` +} + +type GroupOption struct { + OptionName string `puppet:"name=>'option_name'"` + DbSecurityGroupMemberships *[]string `puppet:"name=>'db_security_group_memberships'"` + OptionSettings *[]GroupParameter `puppet:"name=>'option_settings'"` + Port *int64 + Version *string + VpcSecurityGroupMemberships *[]string `puppet:"name=>'vpc_security_group_memberships'"` +} + +type GroupParameter struct { + Name string + Value string +} + +type HeaderAction struct { + HeaderName string `puppet:"name=>'header_name'"` + HeaderValue string `puppet:"name=>'header_value'"` + Position int64 +} + +type HealthCheck struct { + HealthyThreshold int64 `puppet:"name=>'healthy_threshold'"` + Interval int64 + Target string + Timeout int64 + UnhealthyThreshold int64 `puppet:"name=>'unhealthy_threshold'"` +} + +type Hook struct { + LifecycleTransition string `puppet:"name=>'lifecycle_transition'"` + Name string + DefaultResult *string `puppet:"name=>'default_result'"` + HeartbeatTimeout *int64 `puppet:"name=>'heartbeat_timeout'"` + NotificationMetadata *string `puppet:"name=>'notification_metadata'"` + NotificationTargetArn *string `puppet:"name=>'notification_target_arn'"` + RoleArn *string `puppet:"name=>'role_arn'"` +} + +type Hosts struct { + Type *string + Value *int64 +} + +type HttpConfig struct { + Endpoint string +} + +type HttpRoute struct { + Action RouteAction + Match RouteMatch +} + +type Import struct { + BucketName string `puppet:"name=>'bucket_name'"` + IngestionRole string `puppet:"name=>'ingestion_role'"` + SourceEngine string `puppet:"name=>'source_engine'"` + SourceEngineVersion string `puppet:"name=>'source_engine_version'"` + BucketPrefix *string `puppet:"name=>'bucket_prefix'"` +} + +type Index struct { + HashKey string `puppet:"name=>'hash_key'"` + Name string + ProjectionType string `puppet:"name=>'projection_type'"` + NonKeyAttributes *[]string `puppet:"name=>'non_key_attributes'"` + RangeKey *string `puppet:"name=>'range_key'"` + ReadCapacity *int64 `puppet:"name=>'read_capacity'"` + WriteCapacity *int64 `puppet:"name=>'write_capacity'"` +} + +type Info struct { + Name *string +} + +type Ingest struct { + IngestEndpoints *[]Endpoints `puppet:"name=>'ingest_endpoints'"` +} + +type Ingress struct { + Cidr *string + SecurityGroupId *string `puppet:"name=>'security_group_id'"` + SecurityGroupName *string `puppet:"name=>'security_group_name'"` + SecurityGroupOwnerId *string `puppet:"name=>'security_group_owner_id'"` +} + +type Inputs struct { + NamePrefix string `puppet:"name=>'name_prefix'"` + Id *string + KinesisFirehose *FirehoseStreamLambda `puppet:"name=>'kinesis_firehose'"` + KinesisStream *FirehoseStreamLambda `puppet:"name=>'kinesis_stream'"` + Parallelism *RuleParallelism + ProcessingConfiguration *ProcessingConfiguration `puppet:"name=>'processing_configuration'"` + Schema *Schema + StartingPositionConfiguration *[]PositionConfiguration `puppet:"name=>'starting_position_configuration'"` + StreamNames *[]string `puppet:"name=>'stream_names'"` +} + +type InstanceEbsBlockDevice struct { + DeviceName string `puppet:"name=>'device_name'"` + DeleteOnTermination *bool `puppet:"name=>'delete_on_termination'"` + Iops *int64 + SnapshotId *string `puppet:"name=>'snapshot_id'"` + VolumeSize *int64 `puppet:"name=>'volume_size'"` + VolumeType *string `puppet:"name=>'volume_type'"` +} + +type InstanceEphemeralBlockDevice struct { + DeviceName string `puppet:"name=>'device_name'"` + NoDevice *bool `puppet:"name=>'no_device'"` + VirtualName *string `puppet:"name=>'virtual_name'"` +} + +type InstanceGroup struct { + InstanceRole string `puppet:"name=>'instance_role'"` + InstanceType string `puppet:"name=>'instance_type'"` + AutoscalingPolicy *string `puppet:"name=>'autoscaling_policy'"` + BidPrice *string `puppet:"name=>'bid_price'"` + EbsConfig *[]EbsConfig `puppet:"name=>'ebs_config'"` + Id *string + InstanceCount *int64 `puppet:"name=>'instance_count'"` + Name *string +} + +type Instances struct { + ConsoleUrl *string `puppet:"name=>'console_url'"` + Endpoints *[]string + IpAddress *string `puppet:"name=>'ip_address'"` +} + +type Interface struct { + DeviceIndex int64 `puppet:"name=>'device_index'"` + NetworkInterfaceId string `puppet:"name=>'network_interface_id'"` + DeleteOnTermination *bool `puppet:"name=>'delete_on_termination'"` +} + +type Interfaces struct { + AssociatePublicIpAddress *bool `puppet:"name=>'associate_public_ip_address'"` + DeleteOnTermination *bool `puppet:"name=>'delete_on_termination'"` + Description *string + DeviceIndex *int64 `puppet:"name=>'device_index'"` + Ipv4AddressCount *int64 `puppet:"name=>'ipv4_address_count'"` + Ipv4Addresses *[]string `puppet:"name=>'ipv4_addresses'"` + Ipv6AddressCount *int64 `puppet:"name=>'ipv6_address_count'"` + Ipv6Addresses *[]string `puppet:"name=>'ipv6_addresses'"` + NetworkInterfaceId *string `puppet:"name=>'network_interface_id'"` + PrivateIpAddress *string `puppet:"name=>'private_ip_address'"` + SecurityGroups *[]string `puppet:"name=>'security_groups'"` + SubnetId *string `puppet:"name=>'subnet_id'"` +} + +type InventoryDestination struct { + Bucket Bucket +} + +type InventoryFilter struct { + Prefix *string +} + +type InventorySchedule struct { + Frequency string +} + +type JdbcTarget struct { + ConnectionName string `puppet:"name=>'connection_name'"` + Path string + Exclusions *[]string +} + +type Json struct { + RecordRowPath string `puppet:"name=>'record_row_path'"` +} + +type JsonClassifier struct { + JsonPath string `puppet:"name=>'json_path'"` +} + +type JsonSerDe struct { + CaseInsensitive *bool `puppet:"name=>'case_insensitive'"` + ColumnToJsonKeyMappings *map[string]string `puppet:"name=>'column_to_json_key_mappings'"` + ConvertDotsInJsonKeysToUnderscores *bool `puppet:"name=>'convert_dots_in_json_keys_to_underscores'"` +} + +type KerberosAttributes struct { + KdcAdminPassword string `puppet:"name=>'kdc_admin_password'"` + Realm string + AdDomainJoinPassword *string `puppet:"name=>'ad_domain_join_password'"` + AdDomainJoinUser *string `puppet:"name=>'ad_domain_join_user'"` + CrossRealmTrustPrincipalPassword *string `puppet:"name=>'cross_realm_trust_principal_password'"` +} + +type Key struct { + Id string + Type string +} + +type KeysColumns struct { + Name string + Comment *string + Type *string +} + +type Kinesis struct { + RoleArn string `puppet:"name=>'role_arn'"` + StreamName string `puppet:"name=>'stream_name'"` + PartitionKey *string `puppet:"name=>'partition_key'"` +} + +type KinesisDestination struct { + RoleArn string `puppet:"name=>'role_arn'"` + StreamArn string `puppet:"name=>'stream_arn'"` +} + +type KinesisTarget struct { + PartitionKeyPath *string `puppet:"name=>'partition_key_path'"` +} + +type Kms struct { + KeyId string `puppet:"name=>'key_id'"` +} + +type LambdaAction struct { + FunctionArn string `puppet:"name=>'function_arn'"` + Position int64 + InvocationType *string `puppet:"name=>'invocation_type'"` + TopicArn *string `puppet:"name=>'topic_arn'"` +} + +type LambdaConfig struct { + CreateAuthChallenge *string `puppet:"name=>'create_auth_challenge'"` + CustomMessage *string `puppet:"name=>'custom_message'"` + DefineAuthChallenge *string `puppet:"name=>'define_auth_challenge'"` + PostAuthentication *string `puppet:"name=>'post_authentication'"` + PostConfirmation *string `puppet:"name=>'post_confirmation'"` + PreAuthentication *string `puppet:"name=>'pre_authentication'"` + PreSignUp *string `puppet:"name=>'pre_sign_up'"` + PreTokenGeneration *string `puppet:"name=>'pre_token_generation'"` + UserMigration *string `puppet:"name=>'user_migration'"` + VerifyAuthChallengeResponse *string `puppet:"name=>'verify_auth_challenge_response'"` +} + +type LatencyPolicy struct { + MaximumIndividualPlayerLatencyMilliseconds int64 `puppet:"name=>'maximum_individual_player_latency_milliseconds'"` + PolicyDurationSeconds *int64 `puppet:"name=>'policy_duration_seconds'"` +} + +type LatencyRoutingPolicy struct { + Region string +} + +type LaunchSpecification struct { + Ami string + InstanceType string `puppet:"name=>'instance_type'"` + AssociatePublicIpAddress *bool `puppet:"name=>'associate_public_ip_address'"` + AvailabilityZone *string `puppet:"name=>'availability_zone'"` + EbsBlockDevice *[]SpecificationEbsBlockDevice `puppet:"name=>'ebs_block_device'"` + EbsOptimized *bool `puppet:"name=>'ebs_optimized'"` + EphemeralBlockDevice *[]Device `puppet:"name=>'ephemeral_block_device'"` + IamInstanceProfile *string `puppet:"name=>'iam_instance_profile'"` + IamInstanceProfileArn *string `puppet:"name=>'iam_instance_profile_arn'"` + KeyName *string `puppet:"name=>'key_name'"` + Monitoring *bool + PlacementGroup *string `puppet:"name=>'placement_group'"` + PlacementTenancy *string `puppet:"name=>'placement_tenancy'"` + RootBlockDevice *[]BlockDevice `puppet:"name=>'root_block_device'"` + SpotPrice *string `puppet:"name=>'spot_price'"` + SubnetId *string `puppet:"name=>'subnet_id'"` + Tags *map[string]string + UserData *string `puppet:"name=>'user_data'"` + VpcSecurityGroupIds *[]string `puppet:"name=>'vpc_security_group_ids'"` + WeightedCapacity *string `puppet:"name=>'weighted_capacity'"` +} + +type LetterConfig struct { + TargetArn string `puppet:"name=>'target_arn'"` +} + +type LicenseSpecification struct { + LicenseConfigurationArn string `puppet:"name=>'license_configuration_arn'"` +} + +type Lifecycle struct { + ServiceRole string `puppet:"name=>'service_role'"` + DeleteSourceFromS3 *bool `puppet:"name=>'delete_source_from_s3'"` + MaxAgeInDays *int64 `puppet:"name=>'max_age_in_days'"` + MaxCount *int64 `puppet:"name=>'max_count'"` +} + +type LifecycleRule struct { + Enabled bool + AbortIncompleteMultipartUploadDays *int64 `puppet:"name=>'abort_incomplete_multipart_upload_days'"` + Expiration *[]Expiration + Id *string + NoncurrentVersionExpiration *[]VersionExpiration `puppet:"name=>'noncurrent_version_expiration'"` + NoncurrentVersionTransition *[]Transition `puppet:"name=>'noncurrent_version_transition'"` + Prefix *string + Tags *map[string]string + Transition *[]RuleTransition +} + +type LimitPolicy struct { + NewGameSessionsPerCreator *int64 `puppet:"name=>'new_game_sessions_per_creator'"` + PolicyPeriodInMinutes *int64 `puppet:"name=>'policy_period_in_minutes'"` +} + +type Limits struct { + Daily *int64 + MaximumDuration *int64 `puppet:"name=>'maximum_duration'"` + MessagesPerSecond *int64 `puppet:"name=>'messages_per_second'"` + Total *int64 +} + +type Listener struct { + PortMapping PortMapping `puppet:"name=>'port_mapping'"` +} + +type Location struct { + Type string + Method *string + Name *string + Path *string + StatusCode *string `puppet:"name=>'status_code'"` +} + +type LockConfiguration struct { + ObjectLockEnabled string `puppet:"name=>'object_lock_enabled'"` + Rule *ConfigurationRule +} + +type LogConfig struct { + CloudwatchLogsRoleArn string `puppet:"name=>'cloudwatch_logs_role_arn'"` + FieldLogLevel string `puppet:"name=>'field_log_level'"` +} + +type LogSettings struct { + DestinationArn string `puppet:"name=>'destination_arn'"` + Format string +} + +type Logging struct { + Enable bool + BucketName *string `puppet:"name=>'bucket_name'"` + S3KeyPrefix *string `puppet:"name=>'s3_key_prefix'"` +} + +type LoggingConfig struct { + Bucket string + IncludeCookies *bool `puppet:"name=>'include_cookies'"` + Prefix *string +} + +type LoggingInfo struct { + S3BucketName string `puppet:"name=>'s3_bucket_name'"` + S3Region string `puppet:"name=>'s3_region'"` + S3BucketPrefix *string `puppet:"name=>'s3_bucket_prefix'"` +} + +type LoggingOptions struct { + LogStreamArn string `puppet:"name=>'log_stream_arn'"` + RoleArn string `puppet:"name=>'role_arn'"` + Id *string +} + +type Logs struct { + Bucket string + Enabled *bool + Prefix *string +} + +type Mapping struct { + SubnetId string `puppet:"name=>'subnet_id'"` + AllocationId *string `puppet:"name=>'allocation_id'"` +} + +type MappingParameters struct { + Csv *Csv + Json *Json +} + +type MappingRule struct { + Claim string + MatchType string `puppet:"name=>'match_type'"` + RoleArn string `puppet:"name=>'role_arn'"` + Value string +} + +type Mappings struct { + DeviceName *string `puppet:"name=>'device_name'"` + Ebs *Ebs + NoDevice *string `puppet:"name=>'no_device'"` + VirtualName *string `puppet:"name=>'virtual_name'"` +} + +type MarketOptions struct { + MarketType *string `puppet:"name=>'market_type'"` + SpotOptions *OptionsSpotOptions `puppet:"name=>'spot_options'"` +} + +type Match struct { + Type string + Data *string +} + +type MessageTemplate struct { + EmailMessage *string `puppet:"name=>'email_message'"` + EmailSubject *string `puppet:"name=>'email_subject'"` + SmsMessage *string `puppet:"name=>'sms_message'"` +} + +type Metric struct { + MetricName string `puppet:"name=>'metric_name'"` + MetricNamespace string `puppet:"name=>'metric_namespace'"` + MetricUnit string `puppet:"name=>'metric_unit'"` + MetricValue string `puppet:"name=>'metric_value'"` + RoleArn string `puppet:"name=>'role_arn'"` + MetricTimestamp *string `puppet:"name=>'metric_timestamp'"` +} + +type MetricFilter struct { + Prefix *string + Tags *map[string]string +} + +type MetricSpecification struct { + PredefinedMetricType string `puppet:"name=>'predefined_metric_type'"` + ResourceLabel *string `puppet:"name=>'resource_label'"` +} + +type Mode struct { + NumNodeGroups int64 `puppet:"name=>'num_node_groups'"` + ReplicasPerNodeGroup int64 `puppet:"name=>'replicas_per_node_group'"` +} + +type MongodbSettings struct { + AuthMechanism *string `puppet:"name=>'auth_mechanism'"` + AuthSource *string `puppet:"name=>'auth_source'"` + AuthType *string `puppet:"name=>'auth_type'"` + DocsToInvestigate *string `puppet:"name=>'docs_to_investigate'"` + ExtractDocId *string `puppet:"name=>'extract_doc_id'"` + NestingLevel *string `puppet:"name=>'nesting_level'"` +} + +type Monitoring struct { + Enabled *bool +} + +type NetworkConfiguration struct { + Subnets []string + AssignPublicIp *bool `puppet:"name=>'assign_public_ip'"` + SecurityGroups *[]string `puppet:"name=>'security_groups'"` +} + +type NodeSpec struct { + Backends *[]string + Listener *Listener + ServiceDiscovery *Discovery `puppet:"name=>'service_discovery'"` +} + +type Nodes struct { + Address *string + AvailabilityZone *string `puppet:"name=>'availability_zone'"` + Id *string + Port *int64 +} + +type Notification struct { + Events []string + SnsTopic string `puppet:"name=>'sns_topic'"` +} + +type Notifications struct { + Completed *string + Error *string + Progressing *string + Warning *string +} + +type Oidc struct { + AuthorizationEndpoint string `puppet:"name=>'authorization_endpoint'"` + ClientId string `puppet:"name=>'client_id'"` + ClientSecret string `puppet:"name=>'client_secret'"` + Issuer string + TokenEndpoint string `puppet:"name=>'token_endpoint'"` + UserInfoEndpoint string `puppet:"name=>'user_info_endpoint'"` + AuthenticationRequestExtraParams *map[string]string `puppet:"name=>'authentication_request_extra_params'"` + OnUnauthenticatedRequest *string `puppet:"name=>'on_unauthenticated_request'"` + Scope *string + SessionCookieName *string `puppet:"name=>'session_cookie_name'"` + SessionTimeout *int64 `puppet:"name=>'session_timeout'"` +} + +type Option struct { + ActionOnTimeout *string `puppet:"name=>'action_on_timeout'"` + WaitTimeInMinutes *int64 `puppet:"name=>'wait_time_in_minutes'"` +} + +type Options struct { + Enabled *bool + LogGroupName *string `puppet:"name=>'log_group_name'"` + LogStreamName *string `puppet:"name=>'log_stream_name'"` +} + +type OptionsSpotOptions struct { + BlockDurationMinutes *int64 `puppet:"name=>'block_duration_minutes'"` + InstanceInterruptionBehavior *string `puppet:"name=>'instance_interruption_behavior'"` + MaxPrice *string `puppet:"name=>'max_price'"` + SpotInstanceType *string `puppet:"name=>'spot_instance_type'"` + ValidUntil *time.Time `puppet:"name=>'valid_until'"` +} + +type OrcSerDe struct { + BlockSizeBytes *int64 `puppet:"name=>'block_size_bytes'"` + BloomFilterColumns *[]string `puppet:"name=>'bloom_filter_columns'"` + BloomFilterFalsePositiveProbability *float64 `puppet:"name=>'bloom_filter_false_positive_probability'"` + Compression *string + DictionaryKeyThreshold *float64 `puppet:"name=>'dictionary_key_threshold'"` + EnablePadding *bool `puppet:"name=>'enable_padding'"` + FormatVersion *string `puppet:"name=>'format_version'"` + PaddingTolerance *float64 `puppet:"name=>'padding_tolerance'"` + RowIndexStride *int64 `puppet:"name=>'row_index_stride'"` + StripeSizeBytes *int64 `puppet:"name=>'stripe_size_bytes'"` +} + +type OrganizationAggregationSource struct { + RoleArn string `puppet:"name=>'role_arn'"` + AllRegions *bool `puppet:"name=>'all_regions'"` + Regions *[]string +} + +type Origin struct { + DomainName string `puppet:"name=>'domain_name'"` + OriginId string `puppet:"name=>'origin_id'"` + CustomHeader *[]GroupParameter `puppet:"name=>'custom_header'"` + CustomOriginConfig *OriginConfig `puppet:"name=>'custom_origin_config'"` + OriginPath *string `puppet:"name=>'origin_path'"` + S3OriginConfig *S3OriginConfig `puppet:"name=>'s3_origin_config'"` +} + +type OriginConfig struct { + HttpPort int64 `puppet:"name=>'http_port'"` + HttpsPort int64 `puppet:"name=>'https_port'"` + OriginProtocolPolicy string `puppet:"name=>'origin_protocol_policy'"` + OriginSslProtocols []string `puppet:"name=>'origin_ssl_protocols'"` + OriginKeepaliveTimeout *int64 `puppet:"name=>'origin_keepalive_timeout'"` + OriginReadTimeout *int64 `puppet:"name=>'origin_read_timeout'"` +} + +type OutputFormatConfiguration struct { + Serializer *Serializer +} + +type OutputLocation struct { + S3BucketName string `puppet:"name=>'s3_bucket_name'"` + S3KeyPrefix *string `puppet:"name=>'s3_key_prefix'"` +} + +type Outputs struct { + Name string + Id *string + KinesisFirehose *FirehoseStreamLambda `puppet:"name=>'kinesis_firehose'"` + KinesisStream *FirehoseStreamLambda `puppet:"name=>'kinesis_stream'"` + Lambda *FirehoseStreamLambda + Schema *OutputsSchema +} + +type OutputsSchema struct { + RecordFormatType *string `puppet:"name=>'record_format_type'"` +} + +type Override struct { + InstanceType *string `puppet:"name=>'instance_type'"` +} + +type PairInfo struct { + ProdTrafficRoute TrafficRoute `puppet:"name=>'prod_traffic_route'"` + TargetGroup []Group `puppet:"name=>'target_group'"` + TestTrafficRoute *TrafficRoute `puppet:"name=>'test_traffic_route'"` +} + +type Parameter struct { + Name string + Value string + ApplyMethod *string `puppet:"name=>'apply_method'"` +} + +type Parameters struct { + ParameterName string `puppet:"name=>'parameter_name'"` + ParameterValue string `puppet:"name=>'parameter_value'"` +} + +type ParquetSerDe struct { + BlockSizeBytes *int64 `puppet:"name=>'block_size_bytes'"` + Compression *string + EnableDictionaryCompression *bool `puppet:"name=>'enable_dictionary_compression'"` + MaxPaddingBytes *int64 `puppet:"name=>'max_padding_bytes'"` + PageSizeBytes *int64 `puppet:"name=>'page_size_bytes'"` + WriterVersion *string `puppet:"name=>'writer_version'"` +} + +type PasswordPolicy struct { + MinimumLength *int64 `puppet:"name=>'minimum_length'"` + RequireLowercase *bool `puppet:"name=>'require_lowercase'"` + RequireNumbers *bool `puppet:"name=>'require_numbers'"` + RequireSymbols *bool `puppet:"name=>'require_symbols'"` + RequireUppercase *bool `puppet:"name=>'require_uppercase'"` +} + +type Permission struct { + FromPort int64 `puppet:"name=>'from_port'"` + IpRange string `puppet:"name=>'ip_range'"` + Protocol string + ToPort int64 `puppet:"name=>'to_port'"` +} + +type PermissionCondition struct { + Key string + Type string + Value string +} + +type Permissions struct { + Access *[]string + Grantee *string + GranteeType *string `puppet:"name=>'grantee_type'"` +} + +type Placement struct { + Affinity *string + AvailabilityZone *string `puppet:"name=>'availability_zone'"` + GroupName *string `puppet:"name=>'group_name'"` + HostId *string `puppet:"name=>'host_id'"` + SpreadDomain *string `puppet:"name=>'spread_domain'"` + Tenancy *string +} + +type PlacementStrategy struct { + Type string + Field *string +} + +type Policy struct { + LaunchTemplate PolicyLaunchTemplate `puppet:"name=>'launch_template'"` + InstancesDistribution *Distribution `puppet:"name=>'instances_distribution'"` +} + +type PolicyAttribute struct { + Name *string + Value *string +} + +type PolicyConfiguration struct { + AdjustmentType *string `puppet:"name=>'adjustment_type'"` + Cooldown *int64 + MetricAggregationType *string `puppet:"name=>'metric_aggregation_type'"` + MinAdjustmentMagnitude *int64 `puppet:"name=>'min_adjustment_magnitude'"` + StepAdjustment *[]Adjustment `puppet:"name=>'step_adjustment'"` +} + +type PolicyLaunchTemplate struct { + LaunchTemplateSpecification TemplateSpecification `puppet:"name=>'launch_template_specification'"` + Override *[]Override +} + +type PoolConfig struct { + DefaultAction string `puppet:"name=>'default_action'"` + UserPoolId string `puppet:"name=>'user_pool_id'"` + AppIdClientRegex *string `puppet:"name=>'app_id_client_regex'"` + AwsRegion *string `puppet:"name=>'aws_region'"` +} + +type PoolSchema struct { + AttributeDataType string `puppet:"name=>'attribute_data_type'"` + Name string + DeveloperOnlyAttribute *bool `puppet:"name=>'developer_only_attribute'"` + Mutable *bool + NumberAttributeConstraints *AttributeConstraints `puppet:"name=>'number_attribute_constraints'"` + Required *bool + StringAttributeConstraints *StringAttributeConstraints `puppet:"name=>'string_attribute_constraints'"` +} + +type PortMapping struct { + Port int64 + Protocol string +} + +type PositionConfiguration struct { + StartingPosition *string `puppet:"name=>'starting_position'"` +} + +type Predicate struct { + Conditions []Conditions + Logical *string +} + +type PredicatesPredicate struct { + DataId string `puppet:"name=>'data_id'"` + Negated bool + Type string +} + +type PremConfig struct { + AgentArns []string `puppet:"name=>'agent_arns'"` +} + +type Process struct { + ConcurrentExecutions int64 `puppet:"name=>'concurrent_executions'"` + LaunchPath string `puppet:"name=>'launch_path'"` + Parameters *string +} + +type ProcessingConfiguration struct { + Lambda *FirehoseStreamLambda +} + +type Processors struct { + Type string + Parameters *[]Parameters +} + +type Profile struct { + Arn *string + Name *string +} + +type Properties struct { + DeliveryFrequency *string `puppet:"name=>'delivery_frequency'"` +} + +type Property struct { + MaxConcurrentRuns *int64 `puppet:"name=>'max_concurrent_runs'"` +} + +type Providers struct { + ClientId *string `puppet:"name=>'client_id'"` + ProviderName *string `puppet:"name=>'provider_name'"` + ServerSideTokenCheck *bool `puppet:"name=>'server_side_token_check'"` +} + +type ProvisioningOption struct { + Action *string +} + +type PublishingOptions struct { + CloudwatchLogGroupArn string `puppet:"name=>'cloudwatch_log_group_arn'"` + LogType string `puppet:"name=>'log_type'"` + Enabled *bool +} + +type Query struct { + Query string + Type *string +} + +type Queue struct { + Events []string + QueueArn string `puppet:"name=>'queue_arn'"` + FilterPrefix *string `puppet:"name=>'filter_prefix'"` + FilterSuffix *string `puppet:"name=>'filter_suffix'"` + Id *string +} + +type QuietTime struct { + End *string + Start *string +} + +type QuotaSettings struct { + Limit int64 + Period string + Offset *int64 +} + +type RecordingGroup struct { + AllSupported *bool `puppet:"name=>'all_supported'"` + IncludeGlobalResourceTypes *bool `puppet:"name=>'include_global_resource_types'"` + ResourceTypes *[]string `puppet:"name=>'resource_types'"` +} + +type Records struct { + Ttl int64 + Type string +} + +type Redirect struct { + StatusCode string `puppet:"name=>'status_code'"` + Host *string + Path *string + Port *string + Protocol *string + Query *string +} + +type RedshiftConfiguration struct { + ClusterJdbcurl string `puppet:"name=>'cluster_jdbcurl'"` + DataTableName string `puppet:"name=>'data_table_name'"` + Password string + RoleArn string `puppet:"name=>'role_arn'"` + Username string + CloudwatchLoggingOptions *Options `puppet:"name=>'cloudwatch_logging_options'"` + CopyOptions *string `puppet:"name=>'copy_options'"` + DataTableColumns *string `puppet:"name=>'data_table_columns'"` + ProcessingConfiguration *Configuration `puppet:"name=>'processing_configuration'"` + RetryDuration *int64 `puppet:"name=>'retry_duration'"` + S3BackupConfiguration *BackupConfiguration `puppet:"name=>'s3_backup_configuration'"` + S3BackupMode *string `puppet:"name=>'s3_backup_mode'"` +} + +type Registries struct { + RegistryArn string `puppet:"name=>'registry_arn'"` + ContainerName *string `puppet:"name=>'container_name'"` + ContainerPort *int64 `puppet:"name=>'container_port'"` + Port *int64 +} + +type Replica struct { + RegionName string `puppet:"name=>'region_name'"` +} + +type ReplicationConfiguration struct { + Role string + Rules []Rules +} + +type Republish struct { + RoleArn string `puppet:"name=>'role_arn'"` + Topic string +} + +type RequestEbsBlockDevice struct { + DeviceName string `puppet:"name=>'device_name'"` + DeleteOnTermination *bool `puppet:"name=>'delete_on_termination'"` + Encrypted *bool + Iops *int64 + SnapshotId *string `puppet:"name=>'snapshot_id'"` + VolumeId *string `puppet:"name=>'volume_id'"` + VolumeSize *int64 `puppet:"name=>'volume_size'"` + VolumeType *string `puppet:"name=>'volume_type'"` +} + +type Requirements struct { + AvailabilityZone *string `puppet:"name=>'availability_zone'"` + SecurityGroupIdList *[]string `puppet:"name=>'security_group_id_list'"` + SubnetId *string `puppet:"name=>'subnet_id'"` +} + +type ReservationSpecification struct { + CapacityReservationPreference *string `puppet:"name=>'capacity_reservation_preference'"` + CapacityReservationTarget *ReservationTarget `puppet:"name=>'capacity_reservation_target'"` +} + +type ReservationTarget struct { + CapacityReservationId *string `puppet:"name=>'capacity_reservation_id'"` +} + +type Resource struct { + Type string + Values []string +} + +type Resources struct { + InstanceRole string `puppet:"name=>'instance_role'"` + InstanceType []string `puppet:"name=>'instance_type'"` + MaxVcpus int64 `puppet:"name=>'max_vcpus'"` + MinVcpus int64 `puppet:"name=>'min_vcpus'"` + SecurityGroupIds []string `puppet:"name=>'security_group_ids'"` + Subnets []string + Type string + BidPercentage *int64 `puppet:"name=>'bid_percentage'"` + DesiredVcpus *int64 `puppet:"name=>'desired_vcpus'"` + Ec2KeyPair *string `puppet:"name=>'ec2_key_pair'"` + ImageId *string `puppet:"name=>'image_id'"` + SpotIamFleetRole *string `puppet:"name=>'spot_iam_fleet_role'"` + Tags *map[string]string +} + +type Response struct { + ContentType string `puppet:"name=>'content_type'"` + MessageBody *string `puppet:"name=>'message_body'"` + StatusCode *string `puppet:"name=>'status_code'"` +} + +type Rest struct { + Enabled bool + KmsKeyId *string `puppet:"name=>'kms_key_id'"` +} + +type Restriction struct { + RestrictionType string `puppet:"name=>'restriction_type'"` + Locations *[]string +} + +type Restrictions struct { + GeoRestriction *Restriction `puppet:"name=>'geo_restriction'"` +} + +type Retention struct { + Mode string + Days *int64 + Years *int64 +} + +type RevocationConfiguration struct { + CrlConfiguration *CrlConfiguration `puppet:"name=>'crl_configuration'"` +} + +type RoleMapping struct { + IdentityProvider string `puppet:"name=>'identity_provider'"` + Type string + AmbiguousRoleResolution *string `puppet:"name=>'ambiguous_role_resolution'"` + MappingRule *[]MappingRule `puppet:"name=>'mapping_rule'"` +} + +type Roles struct { + Authenticated *string + Unauthenticated *string +} + +type RollbackConfiguration struct { + Enabled *bool + Events *[]string +} + +type RootBlockDevice struct { + DeleteOnTermination *bool `puppet:"name=>'delete_on_termination'"` + Iops *int64 + VolumeId *string `puppet:"name=>'volume_id'"` + VolumeSize *int64 `puppet:"name=>'volume_size'"` + VolumeType *string `puppet:"name=>'volume_type'"` +} + +type RotationRules struct { + AutomaticallyAfterDays int64 `puppet:"name=>'automatically_after_days'"` +} + +type RouteAction struct { + WeightedTarget []Target `puppet:"name=>'weighted_target'"` +} + +type RouteMatch struct { + Prefix string +} + +type RouterSpec struct { + ServiceNames []string `puppet:"name=>'service_names'"` +} + +type Routes struct { + DestinationCidrBlock *string `puppet:"name=>'destination_cidr_block'"` + Source *string + State *string +} + +type RoutingConfig struct { + TimeBasedCanary *CanaryLinear `puppet:"name=>'time_based_canary'"` + TimeBasedLinear *CanaryLinear `puppet:"name=>'time_based_linear'"` + Type *string +} + +type RoutingPolicy struct { + Continent *string + Country *string + Subdivision *string +} + +type RoutingStrategy struct { + Type string + FleetId *string `puppet:"name=>'fleet_id'"` + Message *string +} + +type Rule struct { + Priority int64 + RuleId string `puppet:"name=>'rule_id'"` + Action *Action + Type *string +} + +type RuleParallelism struct { + Count int64 +} + +type RuleRules struct { + Priority int64 + RuleId string `puppet:"name=>'rule_id'"` + Action *Action + OverrideAction *Action `puppet:"name=>'override_action'"` + Type *string +} + +type RuleS3 struct { + BucketName string `puppet:"name=>'bucket_name'"` + Key string + RoleArn string `puppet:"name=>'role_arn'"` +} + +type RuleScope struct { + ComplianceResourceId *string `puppet:"name=>'compliance_resource_id'"` + ComplianceResourceTypes *[]string `puppet:"name=>'compliance_resource_types'"` + TagKey *string `puppet:"name=>'tag_key'"` + TagValue *string `puppet:"name=>'tag_value'"` +} + +type RuleSource struct { + Owner string + SourceIdentifier string `puppet:"name=>'source_identifier'"` + SourceDetail *[]Detail `puppet:"name=>'source_detail'"` +} + +type RuleTransition struct { + StorageClass string `puppet:"name=>'storage_class'"` + Date *string + Days *int64 +} + +type Rules struct { + Destination Destination + Status string + Filter *MetricFilter + Id *string + Prefix *string + Priority *int64 + SourceSelectionCriteria *Criteria `puppet:"name=>'source_selection_criteria'"` +} + +type RuntimeConfiguration struct { + GameSessionActivationTimeoutSeconds *int64 `puppet:"name=>'game_session_activation_timeout_seconds'"` + MaxConcurrentGameSessionActivations *int64 `puppet:"name=>'max_concurrent_game_session_activations'"` + ServerProcess *[]Process `puppet:"name=>'server_process'"` +} + +type S3Action struct { + BucketName string `puppet:"name=>'bucket_name'"` + Position int64 + KmsKeyArn *string `puppet:"name=>'kms_key_arn'"` + ObjectKeyPrefix *string `puppet:"name=>'object_key_prefix'"` + TopicArn *string `puppet:"name=>'topic_arn'"` +} + +type S3Config struct { + BucketAccessRoleArn string `puppet:"name=>'bucket_access_role_arn'"` +} + +type S3Configuration struct { + BucketArn string `puppet:"name=>'bucket_arn'"` + RoleArn string `puppet:"name=>'role_arn'"` + BufferInterval *int64 `puppet:"name=>'buffer_interval'"` + BufferSize *int64 `puppet:"name=>'buffer_size'"` + CloudwatchLoggingOptions *Options `puppet:"name=>'cloudwatch_logging_options'"` + CompressionFormat *string `puppet:"name=>'compression_format'"` + DataFormatConversionConfiguration *ConversionConfiguration `puppet:"name=>'data_format_conversion_configuration'"` + ErrorOutputPrefix *string `puppet:"name=>'error_output_prefix'"` + KmsKeyArn *string `puppet:"name=>'kms_key_arn'"` + Prefix *string + ProcessingConfiguration *Configuration `puppet:"name=>'processing_configuration'"` + S3BackupConfiguration *BackupConfiguration `puppet:"name=>'s3_backup_configuration'"` + S3BackupMode *string `puppet:"name=>'s3_backup_mode'"` +} + +type S3Destination struct { + BucketName string `puppet:"name=>'bucket_name'"` + Region string + KmsKeyArn *string `puppet:"name=>'kms_key_arn'"` + Prefix *string + SyncFormat *string `puppet:"name=>'sync_format'"` +} + +type S3Encryption struct { + KmsKeyArn *string `puppet:"name=>'kms_key_arn'"` + S3EncryptionMode *string `puppet:"name=>'s3_encryption_mode'"` +} + +type S3OriginConfig struct { + OriginAccessIdentity string `puppet:"name=>'origin_access_identity'"` +} + +type S3Settings struct { + BucketFolder *string `puppet:"name=>'bucket_folder'"` + BucketName *string `puppet:"name=>'bucket_name'"` + CompressionType *string `puppet:"name=>'compression_type'"` + CsvDelimiter *string `puppet:"name=>'csv_delimiter'"` + CsvRowDelimiter *string `puppet:"name=>'csv_row_delimiter'"` + ExternalTableDefinition *string `puppet:"name=>'external_table_definition'"` + ServiceAccessRoleArn *string `puppet:"name=>'service_access_role_arn'"` +} + +type S3Target struct { + Path string + Exclusions *[]string +} + +type ScalingConfiguration struct { + AutoPause *bool `puppet:"name=>'auto_pause'"` + MaxCapacity *int64 `puppet:"name=>'max_capacity'"` + MinCapacity *int64 `puppet:"name=>'min_capacity'"` + SecondsUntilAutoPause *int64 `puppet:"name=>'seconds_until_auto_pause'"` +} + +type ScalingPolicyConfiguration struct { + TargetValue float64 `puppet:"name=>'target_value'"` + CustomizedMetricSpecification *CustomizedMetricSpecification `puppet:"name=>'customized_metric_specification'"` + DisableScaleIn *bool `puppet:"name=>'disable_scale_in'"` + PredefinedMetricSpecification *MetricSpecification `puppet:"name=>'predefined_metric_specification'"` + ScaleInCooldown *int64 `puppet:"name=>'scale_in_cooldown'"` + ScaleOutCooldown *int64 `puppet:"name=>'scale_out_cooldown'"` +} + +type Schedule struct { + Name string + CopyTags *bool `puppet:"name=>'copy_tags'"` + CreateRule *CreateRule `puppet:"name=>'create_rule'"` + RetainRule *RuleParallelism `puppet:"name=>'retain_rule'"` + TagsToAdd *map[string]string `puppet:"name=>'tags_to_add'"` +} + +type Schema struct { + RecordColumns []Columns `puppet:"name=>'record_columns'"` + RecordEncoding *string `puppet:"name=>'record_encoding'"` + RecordFormat *Format `puppet:"name=>'record_format'"` +} + +type SchemaConfiguration struct { + DatabaseName string `puppet:"name=>'database_name'"` + RoleArn string `puppet:"name=>'role_arn'"` + TableName string `puppet:"name=>'table_name'"` + CatalogId *string `puppet:"name=>'catalog_id'"` + Region *string + VersionId *string `puppet:"name=>'version_id'"` +} + +type Scope struct { + ScopeDescription string `puppet:"name=>'scope_description'"` + ScopeName string `puppet:"name=>'scope_name'"` +} + +type SecondaryArtifacts struct { + ArtifactIdentifier string `puppet:"name=>'artifact_identifier'"` + Type string + EncryptionDisabled *bool `puppet:"name=>'encryption_disabled'"` + Location *string + Name *string + NamespaceType *string `puppet:"name=>'namespace_type'"` + Packaging *string + Path *string +} + +type SecondaryIndex struct { + Name string + ProjectionType string `puppet:"name=>'projection_type'"` + RangeKey string `puppet:"name=>'range_key'"` + NonKeyAttributes *[]string `puppet:"name=>'non_key_attributes'"` +} + +type Selector struct { + DataResource *[]Resource `puppet:"name=>'data_resource'"` + IncludeManagementEvents *bool `puppet:"name=>'include_management_events'"` + ReadWriteType *string `puppet:"name=>'read_write_type'"` +} + +type SerDe struct { + TimestampFormats *[]string `puppet:"name=>'timestamp_formats'"` +} + +type Serializer struct { + OrcSerDe *OrcSerDe `puppet:"name=>'orc_ser_de'"` + ParquetSerDe *ParquetSerDe `puppet:"name=>'parquet_ser_de'"` +} + +type Service struct { + ClusterName string `puppet:"name=>'cluster_name'"` + ServiceName string `puppet:"name=>'service_name'"` +} + +type Set struct { + Ec2TagFilter *[]Filter `puppet:"name=>'ec2_tag_filter'"` +} + +type Sets struct { + IpAddresses *[]string `puppet:"name=>'ip_addresses'"` + IpFamily *string `puppet:"name=>'ip_family'"` +} + +type Setting struct { + Name string + Namespace string + Value string + Resource *string +} + +type Settings struct { + BurstLimit *int64 `puppet:"name=>'burst_limit'"` + RateLimit *float64 `puppet:"name=>'rate_limit'"` +} + +type SettingsSettings struct { + CacheDataEncrypted *bool `puppet:"name=>'cache_data_encrypted'"` + CacheTtlInSeconds *int64 `puppet:"name=>'cache_ttl_in_seconds'"` + CachingEnabled *bool `puppet:"name=>'caching_enabled'"` + DataTraceEnabled *bool `puppet:"name=>'data_trace_enabled'"` + LoggingLevel *string `puppet:"name=>'logging_level'"` + MetricsEnabled *bool `puppet:"name=>'metrics_enabled'"` + RequireAuthorizationForCacheControl *bool `puppet:"name=>'require_authorization_for_cache_control'"` + ThrottlingBurstLimit *int64 `puppet:"name=>'throttling_burst_limit'"` + ThrottlingRateLimit *float64 `puppet:"name=>'throttling_rate_limit'"` + UnauthorizedCacheControlHeaderStrategy *string `puppet:"name=>'unauthorized_cache_control_header_strategy'"` +} + +type SideEncryptionConfiguration struct { + Rule *EncryptionConfigurationRule +} + +type SizeConstraints struct { + ComparisonOperator string `puppet:"name=>'comparison_operator'"` + Size int64 + TextTransformation string `puppet:"name=>'text_transformation'"` + FieldToMatch *Match `puppet:"name=>'field_to_match'"` +} + +type SkewedInfo struct { + SkewedColumnNames *[]string `puppet:"name=>'skewed_column_names'"` + SkewedColumnValueLocationMaps *map[string]string `puppet:"name=>'skewed_column_value_location_maps'"` + SkewedColumnValues *[]string `puppet:"name=>'skewed_column_values'"` +} + +type SmsConfiguration struct { + ExternalId string `puppet:"name=>'external_id'"` + SnsCallerArn string `puppet:"name=>'sns_caller_arn'"` +} + +type SnapshotOptions struct { + AutomatedSnapshotStartHour int64 `puppet:"name=>'automated_snapshot_start_hour'"` +} + +type Sns struct { + RoleArn string `puppet:"name=>'role_arn'"` + TargetArn string `puppet:"name=>'target_arn'"` + MessageFormat *string `puppet:"name=>'message_format'"` +} + +type SnsAction struct { + Position int64 + TopicArn string `puppet:"name=>'topic_arn'"` +} + +type SnsDestination struct { + TopicArn string `puppet:"name=>'topic_arn'"` +} + +type SortColumns struct { + Column string + SortOrder int64 `puppet:"name=>'sort_order'"` +} + +type Source struct { + Type string + Auth *[]Auth + Buildspec *string + GitCloneDepth *int64 `puppet:"name=>'git_clone_depth'"` + InsecureSsl *bool `puppet:"name=>'insecure_ssl'"` + Location *string + ReportBuildStatus *bool `puppet:"name=>'report_build_status'"` +} + +type SourceConfiguration struct { + KinesisStreamArn string `puppet:"name=>'kinesis_stream_arn'"` + RoleArn string `puppet:"name=>'role_arn'"` +} + +type Sources struct { + SourceIdentifier string `puppet:"name=>'source_identifier'"` + Type string + Auth *[]Auth + Buildspec *string + GitCloneDepth *int64 `puppet:"name=>'git_clone_depth'"` + InsecureSsl *bool `puppet:"name=>'insecure_ssl'"` + Location *string + ReportBuildStatus *bool `puppet:"name=>'report_build_status'"` +} + +type SourcesS3 struct { + BucketArn string `puppet:"name=>'bucket_arn'"` + FileKey string `puppet:"name=>'file_key'"` + RoleArn string `puppet:"name=>'role_arn'"` +} + +type Spec struct { + HttpRoute *HttpRoute `puppet:"name=>'http_route'"` +} + +type Specification struct { + CpuCredits *string `puppet:"name=>'cpu_credits'"` +} + +type SpecificationEbsBlockDevice struct { + DeviceName string `puppet:"name=>'device_name'"` + DeleteOnTermination *bool `puppet:"name=>'delete_on_termination'"` + Encrypted *bool + Iops *int64 + SnapshotId *string `puppet:"name=>'snapshot_id'"` + VolumeSize *int64 `puppet:"name=>'volume_size'"` + VolumeType *string `puppet:"name=>'volume_type'"` +} + +type Specifications struct { + ResourceType *string `puppet:"name=>'resource_type'"` + Tags *map[string]string +} + +type SplunkConfiguration struct { + HecEndpoint string `puppet:"name=>'hec_endpoint'"` + HecToken string `puppet:"name=>'hec_token'"` + CloudwatchLoggingOptions *Options `puppet:"name=>'cloudwatch_logging_options'"` + HecAcknowledgmentTimeout *int64 `puppet:"name=>'hec_acknowledgment_timeout'"` + HecEndpointType *string `puppet:"name=>'hec_endpoint_type'"` + ProcessingConfiguration *Configuration `puppet:"name=>'processing_configuration'"` + RetryDuration *int64 `puppet:"name=>'retry_duration'"` + S3BackupMode *string `puppet:"name=>'s3_backup_mode'"` +} + +type SpotOptions struct { + AllocationStrategy *string `puppet:"name=>'allocation_strategy'"` + InstanceInterruptionBehavior *string `puppet:"name=>'instance_interruption_behavior'"` + InstancePoolsToUseCount *int64 `puppet:"name=>'instance_pools_to_use_count'"` +} + +type Sqs struct { + QueueUrl string `puppet:"name=>'queue_url'"` + RoleArn string `puppet:"name=>'role_arn'"` + UseBase64 bool `puppet:"name=>'use_base64'"` +} + +type SqsTarget struct { + MessageGroupId *string `puppet:"name=>'message_group_id'"` +} + +type SseS3 struct { +} + +type SslConfiguration struct { + Certificate string + PrivateKey string `puppet:"name=>'private_key'"` + Chain *string +} + +type Stage struct { + Action []StageAction + Name string +} + +type StageAction struct { + Category string + Name string + Owner string + Provider string + Version string + Configuration *map[string]string + InputArtifacts *[]string `puppet:"name=>'input_artifacts'"` + OutputArtifacts *[]string `puppet:"name=>'output_artifacts'"` + RoleArn *string `puppet:"name=>'role_arn'"` + RunOrder *int64 `puppet:"name=>'run_order'"` +} + +type Stages struct { + ApiId string `puppet:"name=>'api_id'"` + Stage string +} + +type Step struct { + Jar string + Args *[]string + MainClass *string `puppet:"name=>'main_class'"` + Properties *map[string]string +} + +type Stickiness struct { + Type string + CookieDuration *int64 `puppet:"name=>'cookie_duration'"` + Enabled *bool +} + +type StopAction struct { + Position int64 + Scope string + TopicArn *string `puppet:"name=>'topic_arn'"` +} + +type StorageLocation struct { + Bucket string + Key string + RoleArn string `puppet:"name=>'role_arn'"` +} + +type Store struct { + Location string + Type string + EncryptionKey *Key `puppet:"name=>'encryption_key'"` +} + +type Strategy struct { + Attempts *int64 +} + +type StringAttributeConstraints struct { + MaxLength *string `puppet:"name=>'max_length'"` + MinLength *string `puppet:"name=>'min_length'"` +} + +type Style struct { + DeploymentOption *string `puppet:"name=>'deployment_option'"` + DeploymentType *string `puppet:"name=>'deployment_type'"` +} + +type Subject struct { + CommonName *string `puppet:"name=>'common_name'"` + Country *string + DistinguishedNameQualifier *string `puppet:"name=>'distinguished_name_qualifier'"` + GenerationQualifier *string `puppet:"name=>'generation_qualifier'"` + GivenName *string `puppet:"name=>'given_name'"` + Initials *string + Locality *string + Organization *string + OrganizationalUnit *string `puppet:"name=>'organizational_unit'"` + Pseudonym *string + State *string + Surname *string + Title *string +} + +type Success struct { + Action *string + TerminationWaitTimeInMinutes *int64 `puppet:"name=>'termination_wait_time_in_minutes'"` +} + +type TableRoute struct { + CidrBlock *string `puppet:"name=>'cidr_block'"` + EgressOnlyGatewayId *string `puppet:"name=>'egress_only_gateway_id'"` + GatewayId *string `puppet:"name=>'gateway_id'"` + InstanceId *string `puppet:"name=>'instance_id'"` + Ipv6CidrBlock *string `puppet:"name=>'ipv6_cidr_block'"` + NatGatewayId *string `puppet:"name=>'nat_gateway_id'"` + NetworkInterfaceId *string `puppet:"name=>'network_interface_id'"` + TransitGatewayId *string `puppet:"name=>'transit_gateway_id'"` + VpcPeeringConnectionId *string `puppet:"name=>'vpc_peering_connection_id'"` +} + +type Tag struct { + Key string + PropagateAtLaunch bool `puppet:"name=>'propagate_at_launch'"` + Value string +} + +type Target struct { + VirtualNode string `puppet:"name=>'virtual_node'"` + Weight int64 +} + +type TargetAction struct { + MaxCapacity *int64 `puppet:"name=>'max_capacity'"` + MinCapacity *int64 `puppet:"name=>'min_capacity'"` +} + +type Targets struct { + Key string + Values []string +} + +type TaskOptions struct { + Atime *string + BytesPerSecond *int64 `puppet:"name=>'bytes_per_second'"` + Gid *string + Mtime *string + PosixPermissions *string `puppet:"name=>'posix_permissions'"` + PreserveDeletedFiles *string `puppet:"name=>'preserve_deleted_files'"` + PreserveDevices *string `puppet:"name=>'preserve_devices'"` + Uid *string + VerifyMode *string `puppet:"name=>'verify_mode'"` +} + +type TaskParameters struct { + Name string + Values []string +} + +type Telemetry struct { + AcceptedRouteCount *int64 `puppet:"name=>'accepted_route_count'"` + LastStatusChange *string `puppet:"name=>'last_status_change'"` + OutsideIpAddress *string `puppet:"name=>'outside_ip_address'"` + Status *string + StatusMessage *string `puppet:"name=>'status_message'"` +} + +type Template struct { + Id *string + Name *string + Version *string +} + +type TemplateConfig struct { + LaunchTemplateSpecification TemplateSpecification `puppet:"name=>'launch_template_specification'"` + Override *[]ConfigOverride +} + +type TemplateSpecification struct { + LaunchTemplateId *string `puppet:"name=>'launch_template_id'"` + LaunchTemplateName *string `puppet:"name=>'launch_template_name'"` + Version *string +} + +type ThrottleSettings struct { + BurstLimit *int64 `puppet:"name=>'burst_limit'"` + RateLimit *float64 `puppet:"name=>'rate_limit'"` +} + +type Thumbnails struct { + AspectRatio *string `puppet:"name=>'aspect_ratio'"` + Format *string + Interval *string + MaxHeight *string `puppet:"name=>'max_height'"` + MaxWidth *string `puppet:"name=>'max_width'"` + PaddingPolicy *string `puppet:"name=>'padding_policy'"` + Resolution *string + SizingPolicy *string `puppet:"name=>'sizing_policy'"` +} + +type Time struct { + DayOfWeek string `puppet:"name=>'day_of_week'"` + TimeOfDay string `puppet:"name=>'time_of_day'"` + TimeZone string `puppet:"name=>'time_zone'"` +} + +type Timeout struct { + AttemptDurationSeconds *int64 `puppet:"name=>'attempt_duration_seconds'"` +} + +type Topic struct { + Events []string + TopicArn string `puppet:"name=>'topic_arn'"` + FilterPrefix *string `puppet:"name=>'filter_prefix'"` + FilterSuffix *string `puppet:"name=>'filter_suffix'"` + Id *string +} + +type TracingConfig struct { + Mode string +} + +type TrackingConfiguration struct { + TargetValue float64 `puppet:"name=>'target_value'"` + CustomizedMetricSpecification *ConfigurationCustomizedMetricSpecification `puppet:"name=>'customized_metric_specification'"` + DisableScaleIn *bool `puppet:"name=>'disable_scale_in'"` + PredefinedMetricSpecification *MetricSpecification `puppet:"name=>'predefined_metric_specification'"` +} + +type TrafficRoute struct { + ListenerArns []string `puppet:"name=>'listener_arns'"` +} + +type Transformation struct { + Name string + Namespace string + Value string + DefaultValue *string `puppet:"name=>'default_value'"` +} + +type Transformer struct { + InputTemplate string `puppet:"name=>'input_template'"` + InputPaths *map[string]string `puppet:"name=>'input_paths'"` +} + +type Transition struct { + StorageClass string `puppet:"name=>'storage_class'"` + Days *int64 +} + +type Translation struct { + Owner string +} + +type Trigger struct { + DestinationArn string `puppet:"name=>'destination_arn'"` + Events []string + Name string + Branches *[]string + CustomData *string `puppet:"name=>'custom_data'"` +} + +type TriggerConfiguration struct { + TriggerEvents []string `puppet:"name=>'trigger_events'"` + TriggerName string `puppet:"name=>'trigger_name'"` + TriggerTargetArn string `puppet:"name=>'trigger_target_arn'"` +} + +type Ttl struct { + AttributeName string `puppet:"name=>'attribute_name'"` + Enabled bool +} + +type Tuple struct { + RegexPatternSetId string `puppet:"name=>'regex_pattern_set_id'"` + TextTransformation string `puppet:"name=>'text_transformation'"` + FieldToMatch *Match `puppet:"name=>'field_to_match'"` +} + +type Tuples struct { + PositionalConstraint string `puppet:"name=>'positional_constraint'"` + TextTransformation string `puppet:"name=>'text_transformation'"` + FieldToMatch *Match `puppet:"name=>'field_to_match'"` + TargetString *string `puppet:"name=>'target_string'"` +} + +type TuplesTuple struct { + TextTransformation string `puppet:"name=>'text_transformation'"` + FieldToMatch *Match `puppet:"name=>'field_to_match'"` +} + +type Type struct { + Continuous *string + OneTime *string `puppet:"name=>'one_time'"` +} + +type TypeProperties struct { + Description *string + SearchableAttributes *[]string `puppet:"name=>'searchable_attributes'"` +} + +type Types struct { + IncludeCredit *bool `puppet:"name=>'include_credit'"` + IncludeDiscount *bool `puppet:"name=>'include_discount'"` + IncludeOtherSubscription *bool `puppet:"name=>'include_other_subscription'"` + IncludeRecurring *bool `puppet:"name=>'include_recurring'"` + IncludeRefund *bool `puppet:"name=>'include_refund'"` + IncludeSubscription *bool `puppet:"name=>'include_subscription'"` + IncludeSupport *bool `puppet:"name=>'include_support'"` + IncludeTax *bool `puppet:"name=>'include_tax'"` + IncludeUpfront *bool `puppet:"name=>'include_upfront'"` + UseAmortized *bool `puppet:"name=>'use_amortized'"` + UseBlended *bool `puppet:"name=>'use_blended'"` +} + +type User struct { + Password string + Username string + ConsoleAccess *bool `puppet:"name=>'console_access'"` + Groups *[]string +} + +type UserConfig struct { + AllowAdminCreateUserOnly *bool `puppet:"name=>'allow_admin_create_user_only'"` + InviteMessageTemplate *MessageTemplate `puppet:"name=>'invite_message_template'"` + UnusedAccountValidityDays *int64 `puppet:"name=>'unused_account_validity_days'"` +} + +type ValidationOptions struct { + DomainName *string `puppet:"name=>'domain_name'"` + ResourceRecordName *string `puppet:"name=>'resource_record_name'"` + ResourceRecordType *string `puppet:"name=>'resource_record_type'"` + ResourceRecordValue *string `puppet:"name=>'resource_record_value'"` +} + +type Values struct { + QueryString bool `puppet:"name=>'query_string'"` + Cookies *Cookies + Headers *[]string + QueryStringCacheKeys *[]string `puppet:"name=>'query_string_cache_keys'"` +} + +type Variable struct { + Name string + Value string + Type *string +} + +type VerificationMessageTemplate struct { + DefaultEmailOption *string `puppet:"name=>'default_email_option'"` + EmailMessage *string `puppet:"name=>'email_message'"` + EmailMessageByLink *string `puppet:"name=>'email_message_by_link'"` + EmailSubject *string `puppet:"name=>'email_subject'"` + EmailSubjectByLink *string `puppet:"name=>'email_subject_by_link'"` + SmsMessage *string `puppet:"name=>'sms_message'"` +} + +type VersionExpiration struct { + Days *int64 +} + +type Versioning struct { + Enabled *bool + MfaDelete *bool `puppet:"name=>'mfa_delete'"` +} + +type Video struct { + AspectRatio *string `puppet:"name=>'aspect_ratio'"` + BitRate *string `puppet:"name=>'bit_rate'"` + Codec *string + DisplayAspectRatio *string `puppet:"name=>'display_aspect_ratio'"` + FixedGop *string `puppet:"name=>'fixed_gop'"` + FrameRate *string `puppet:"name=>'frame_rate'"` + KeyframesMaxDist *string `puppet:"name=>'keyframes_max_dist'"` + MaxFrameRate *string `puppet:"name=>'max_frame_rate'"` + MaxHeight *string `puppet:"name=>'max_height'"` + MaxWidth *string `puppet:"name=>'max_width'"` + PaddingPolicy *string `puppet:"name=>'padding_policy'"` + Resolution *string + SizingPolicy *string `puppet:"name=>'sizing_policy'"` +} + +type Volume struct { + MountPoint string `puppet:"name=>'mount_point'"` + NumberOfDisks int64 `puppet:"name=>'number_of_disks'"` + Size int64 + Iops *int64 + RaidLevel *string `puppet:"name=>'raid_level'"` + Type *string +} + +type VolumeConfiguration struct { + Autoprovision *bool + Driver *string + DriverOpts *map[string]string `puppet:"name=>'driver_opts'"` + Labels *map[string]string + Scope *string +} + +type VpcConfig struct { + SecurityGroupIds []string `puppet:"name=>'security_group_ids'"` + Subnets []string + VpcId string `puppet:"name=>'vpc_id'"` +} + +type VpcOptions struct { + AvailabilityZones *[]string `puppet:"name=>'availability_zones'"` + SecurityGroupIds *[]string `puppet:"name=>'security_group_ids'"` + SubnetIds *[]string `puppet:"name=>'subnet_ids'"` + VpcId *string `puppet:"name=>'vpc_id'"` +} + +type VpcSettings struct { + SubnetIds []string `puppet:"name=>'subnet_ids'"` + VpcId string `puppet:"name=>'vpc_id'"` +} + +type Watermarks struct { + HorizontalAlign *string `puppet:"name=>'horizontal_align'"` + HorizontalOffset *string `puppet:"name=>'horizontal_offset'"` + Id *string + MaxHeight *string `puppet:"name=>'max_height'"` + MaxWidth *string `puppet:"name=>'max_width'"` + Opacity *string + SizingPolicy *string `puppet:"name=>'sizing_policy'"` + Target *string + VerticalAlign *string `puppet:"name=>'vertical_align'"` + VerticalOffset *string `puppet:"name=>'vertical_offset'"` +} + +type WebhookFilter struct { + JsonPath string `puppet:"name=>'json_path'"` + MatchEquals string `puppet:"name=>'match_equals'"` +} + +type Website struct { + ErrorDocument *string `puppet:"name=>'error_document'"` + IndexDocument *string `puppet:"name=>'index_document'"` + RedirectAllRequestsTo *string `puppet:"name=>'redirect_all_requests_to'"` + RoutingRules *string `puppet:"name=>'routing_rules'"` +} + +type WeightedRoutingPolicy struct { + Weight int64 +} + +type WorkmailAction struct { + OrganizationArn string `puppet:"name=>'organization_arn'"` + Position int64 + TopicArn *string `puppet:"name=>'topic_arn'"` +} + +type XmlClassifier struct { + Classification string + RowTag string `puppet:"name=>'row_tag'"` +} + +type ZoneVpc struct { + VpcId string `puppet:"name=>'vpc_id'"` + VpcRegion *string `puppet:"name=>'vpc_region'"` +} + +func InitTypes(c px.Context) { + load := func(n string) px.Type { + if v, ok := px.Load(c, px.NewTypedName(px.NsType, n)); ok { + return v.(px.Type) + } + panic(fmt.Errorf("unable to load Type '%s'", n)) + } + + ir := c.ImplementationRegistry() + ir.RegisterType(load("Aws::Acm_certificate"), reflect.TypeOf(&AcmCertificate{})) + ir.RegisterType(load("Aws::Acm_certificate_validation"), reflect.TypeOf(&AcmCertificateValidation{})) + ir.RegisterType(load("Aws::Acmpca_certificate_authority"), reflect.TypeOf(&AcmpcaCertificateAuthority{})) + ir.RegisterType(load("Aws::Alb"), reflect.TypeOf(&Alb{})) + ir.RegisterType(load("Aws::Alb_listener"), reflect.TypeOf(&AlbListener{})) + ir.RegisterType(load("Aws::Alb_listener_certificate"), reflect.TypeOf(&AlbListenerCertificate{})) + ir.RegisterType(load("Aws::Alb_listener_rule"), reflect.TypeOf(&AlbListenerRule{})) + ir.RegisterType(load("Aws::Alb_target_group"), reflect.TypeOf(&AlbTargetGroup{})) + ir.RegisterType(load("Aws::Alb_target_group_attachment"), reflect.TypeOf(&AlbTargetGroupAttachment{})) + ir.RegisterType(load("Aws::Ami"), reflect.TypeOf(&Ami{})) + ir.RegisterType(load("Aws::Ami_copy"), reflect.TypeOf(&AmiCopy{})) + ir.RegisterType(load("Aws::Ami_from_instance"), reflect.TypeOf(&AmiFromInstance{})) + ir.RegisterType(load("Aws::Ami_launch_permission"), reflect.TypeOf(&AmiLaunchPermission{})) + ir.RegisterType(load("Aws::Api_gateway_account"), reflect.TypeOf(&ApiGatewayAccount{})) + ir.RegisterType(load("Aws::Api_gateway_api_key"), reflect.TypeOf(&ApiGatewayApiKey{})) + ir.RegisterType(load("Aws::Api_gateway_authorizer"), reflect.TypeOf(&ApiGatewayAuthorizer{})) + ir.RegisterType(load("Aws::Api_gateway_base_path_mapping"), reflect.TypeOf(&ApiGatewayBasePathMapping{})) + ir.RegisterType(load("Aws::Api_gateway_client_certificate"), reflect.TypeOf(&ApiGatewayClientCertificate{})) + ir.RegisterType(load("Aws::Api_gateway_deployment"), reflect.TypeOf(&ApiGatewayDeployment{})) + ir.RegisterType(load("Aws::Api_gateway_documentation_part"), reflect.TypeOf(&ApiGatewayDocumentationPart{})) + ir.RegisterType(load("Aws::Api_gateway_documentation_version"), reflect.TypeOf(&ApiGatewayDocumentationVersion{})) + ir.RegisterType(load("Aws::Api_gateway_domain_name"), reflect.TypeOf(&ApiGatewayDomainName{})) + ir.RegisterType(load("Aws::Api_gateway_gateway_response"), reflect.TypeOf(&ApiGatewayGatewayResponse{})) + ir.RegisterType(load("Aws::Api_gateway_integration"), reflect.TypeOf(&ApiGatewayIntegration{})) + ir.RegisterType(load("Aws::Api_gateway_integration_response"), reflect.TypeOf(&ApiGatewayIntegrationResponse{})) + ir.RegisterType(load("Aws::Api_gateway_method"), reflect.TypeOf(&ApiGatewayMethod{})) + ir.RegisterType(load("Aws::Api_gateway_method_response"), reflect.TypeOf(&ApiGatewayMethodResponse{})) + ir.RegisterType(load("Aws::Api_gateway_method_settings"), reflect.TypeOf(&ApiGatewayMethodSettings{})) + ir.RegisterType(load("Aws::Api_gateway_model"), reflect.TypeOf(&ApiGatewayModel{})) + ir.RegisterType(load("Aws::Api_gateway_request_validator"), reflect.TypeOf(&ApiGatewayRequestValidator{})) + ir.RegisterType(load("Aws::Api_gateway_resource"), reflect.TypeOf(&ApiGatewayResource{})) + ir.RegisterType(load("Aws::Api_gateway_rest_api"), reflect.TypeOf(&ApiGatewayRestApi{})) + ir.RegisterType(load("Aws::Api_gateway_stage"), reflect.TypeOf(&ApiGatewayStage{})) + ir.RegisterType(load("Aws::Api_gateway_usage_plan"), reflect.TypeOf(&ApiGatewayUsagePlan{})) + ir.RegisterType(load("Aws::Api_gateway_usage_plan_key"), reflect.TypeOf(&ApiGatewayUsagePlanKey{})) + ir.RegisterType(load("Aws::Api_gateway_vpc_link"), reflect.TypeOf(&ApiGatewayVpcLink{})) + ir.RegisterType(load("Aws::App_cookie_stickiness_policy"), reflect.TypeOf(&AppCookieStickinessPolicy{})) + ir.RegisterType(load("Aws::Appautoscaling_policy"), reflect.TypeOf(&AppautoscalingPolicy{})) + ir.RegisterType(load("Aws::Appautoscaling_scheduled_action"), reflect.TypeOf(&AppautoscalingScheduledAction{})) + ir.RegisterType(load("Aws::Appautoscaling_target"), reflect.TypeOf(&AppautoscalingTarget{})) + ir.RegisterType(load("Aws::Appmesh_mesh"), reflect.TypeOf(&AppmeshMesh{})) + ir.RegisterType(load("Aws::Appmesh_route"), reflect.TypeOf(&AppmeshRoute{})) + ir.RegisterType(load("Aws::Appmesh_virtual_node"), reflect.TypeOf(&AppmeshVirtualNode{})) + ir.RegisterType(load("Aws::Appmesh_virtual_router"), reflect.TypeOf(&AppmeshVirtualRouter{})) + ir.RegisterType(load("Aws::Appsync_api_key"), reflect.TypeOf(&AppsyncApiKey{})) + ir.RegisterType(load("Aws::Appsync_datasource"), reflect.TypeOf(&AppsyncDatasource{})) + ir.RegisterType(load("Aws::Appsync_graphql_api"), reflect.TypeOf(&AppsyncGraphqlApi{})) + ir.RegisterType(load("Aws::Athena_database"), reflect.TypeOf(&AthenaDatabase{})) + ir.RegisterType(load("Aws::Athena_named_query"), reflect.TypeOf(&AthenaNamedQuery{})) + ir.RegisterType(load("Aws::Autoscaling_attachment"), reflect.TypeOf(&AutoscalingAttachment{})) + ir.RegisterType(load("Aws::Autoscaling_group"), reflect.TypeOf(&AutoscalingGroup{})) + ir.RegisterType(load("Aws::Autoscaling_lifecycle_hook"), reflect.TypeOf(&AutoscalingLifecycleHook{})) + ir.RegisterType(load("Aws::Autoscaling_notification"), reflect.TypeOf(&AutoscalingNotification{})) + ir.RegisterType(load("Aws::Autoscaling_policy"), reflect.TypeOf(&AutoscalingPolicy{})) + ir.RegisterType(load("Aws::Autoscaling_schedule"), reflect.TypeOf(&AutoscalingSchedule{})) + ir.RegisterType(load("Aws::Batch_compute_environment"), reflect.TypeOf(&BatchComputeEnvironment{})) + ir.RegisterType(load("Aws::Batch_job_definition"), reflect.TypeOf(&BatchJobDefinition{})) + ir.RegisterType(load("Aws::Batch_job_queue"), reflect.TypeOf(&BatchJobQueue{})) + ir.RegisterType(load("Aws::Budgets_budget"), reflect.TypeOf(&BudgetsBudget{})) + ir.RegisterType(load("Aws::Cloud9_environment_ec2"), reflect.TypeOf(&Cloud9EnvironmentEc2{})) + ir.RegisterType(load("Aws::Cloudformation_stack"), reflect.TypeOf(&CloudformationStack{})) + ir.RegisterType(load("Aws::Cloudfront_distribution"), reflect.TypeOf(&CloudfrontDistribution{})) + ir.RegisterType(load("Aws::Cloudfront_origin_access_identity"), reflect.TypeOf(&CloudfrontOriginAccessIdentity{})) + ir.RegisterType(load("Aws::Cloudfront_public_key"), reflect.TypeOf(&CloudfrontPublicKey{})) + ir.RegisterType(load("Aws::Cloudhsm_v2_cluster"), reflect.TypeOf(&CloudhsmV2Cluster{})) + ir.RegisterType(load("Aws::Cloudhsm_v2_hsm"), reflect.TypeOf(&CloudhsmV2Hsm{})) + ir.RegisterType(load("Aws::Cloudtrail"), reflect.TypeOf(&Cloudtrail{})) + ir.RegisterType(load("Aws::Cloudwatch_dashboard"), reflect.TypeOf(&CloudwatchDashboard{})) + ir.RegisterType(load("Aws::Cloudwatch_event_permission"), reflect.TypeOf(&CloudwatchEventPermission{})) + ir.RegisterType(load("Aws::Cloudwatch_event_rule"), reflect.TypeOf(&CloudwatchEventRule{})) + ir.RegisterType(load("Aws::Cloudwatch_event_target"), reflect.TypeOf(&CloudwatchEventTarget{})) + ir.RegisterType(load("Aws::Cloudwatch_log_destination"), reflect.TypeOf(&CloudwatchLogDestination{})) + ir.RegisterType(load("Aws::Cloudwatch_log_destination_policy"), reflect.TypeOf(&CloudwatchLogDestinationPolicy{})) + ir.RegisterType(load("Aws::Cloudwatch_log_group"), reflect.TypeOf(&CloudwatchLogGroup{})) + ir.RegisterType(load("Aws::Cloudwatch_log_metric_filter"), reflect.TypeOf(&CloudwatchLogMetricFilter{})) + ir.RegisterType(load("Aws::Cloudwatch_log_resource_policy"), reflect.TypeOf(&CloudwatchLogResourcePolicy{})) + ir.RegisterType(load("Aws::Cloudwatch_log_stream"), reflect.TypeOf(&CloudwatchLogStream{})) + ir.RegisterType(load("Aws::Cloudwatch_log_subscription_filter"), reflect.TypeOf(&CloudwatchLogSubscriptionFilter{})) + ir.RegisterType(load("Aws::Cloudwatch_metric_alarm"), reflect.TypeOf(&CloudwatchMetricAlarm{})) + ir.RegisterType(load("Aws::Codebuild_project"), reflect.TypeOf(&CodebuildProject{})) + ir.RegisterType(load("Aws::Codebuild_webhook"), reflect.TypeOf(&CodebuildWebhook{})) + ir.RegisterType(load("Aws::Codecommit_repository"), reflect.TypeOf(&CodecommitRepository{})) + ir.RegisterType(load("Aws::Codecommit_trigger"), reflect.TypeOf(&CodecommitTrigger{})) + ir.RegisterType(load("Aws::Codedeploy_app"), reflect.TypeOf(&CodedeployApp{})) + ir.RegisterType(load("Aws::Codedeploy_deployment_config"), reflect.TypeOf(&CodedeployDeploymentConfig{})) + ir.RegisterType(load("Aws::Codedeploy_deployment_group"), reflect.TypeOf(&CodedeployDeploymentGroup{})) + ir.RegisterType(load("Aws::Codepipeline"), reflect.TypeOf(&Codepipeline{})) + ir.RegisterType(load("Aws::Codepipeline_webhook"), reflect.TypeOf(&CodepipelineWebhook{})) + ir.RegisterType(load("Aws::Cognito_identity_pool"), reflect.TypeOf(&CognitoIdentityPool{})) + ir.RegisterType(load("Aws::Cognito_identity_pool_roles_attachment"), reflect.TypeOf(&CognitoIdentityPoolRolesAttachment{})) + ir.RegisterType(load("Aws::Cognito_identity_provider"), reflect.TypeOf(&CognitoIdentityProvider{})) + ir.RegisterType(load("Aws::Cognito_resource_server"), reflect.TypeOf(&CognitoResourceServer{})) + ir.RegisterType(load("Aws::Cognito_user_group"), reflect.TypeOf(&CognitoUserGroup{})) + ir.RegisterType(load("Aws::Cognito_user_pool"), reflect.TypeOf(&CognitoUserPool{})) + ir.RegisterType(load("Aws::Cognito_user_pool_client"), reflect.TypeOf(&CognitoUserPoolClient{})) + ir.RegisterType(load("Aws::Cognito_user_pool_domain"), reflect.TypeOf(&CognitoUserPoolDomain{})) + ir.RegisterType(load("Aws::Config_aggregate_authorization"), reflect.TypeOf(&ConfigAggregateAuthorization{})) + ir.RegisterType(load("Aws::Config_config_rule"), reflect.TypeOf(&ConfigConfigRule{})) + ir.RegisterType(load("Aws::Config_configuration_aggregator"), reflect.TypeOf(&ConfigConfigurationAggregator{})) + ir.RegisterType(load("Aws::Config_configuration_recorder"), reflect.TypeOf(&ConfigConfigurationRecorder{})) + ir.RegisterType(load("Aws::Config_configuration_recorder_status"), reflect.TypeOf(&ConfigConfigurationRecorderStatus{})) + ir.RegisterType(load("Aws::Config_delivery_channel"), reflect.TypeOf(&ConfigDeliveryChannel{})) + ir.RegisterType(load("Aws::Customer_gateway"), reflect.TypeOf(&CustomerGateway{})) + ir.RegisterType(load("Aws::Datasync_agent"), reflect.TypeOf(&DatasyncAgent{})) + ir.RegisterType(load("Aws::Datasync_location_efs"), reflect.TypeOf(&DatasyncLocationEfs{})) + ir.RegisterType(load("Aws::Datasync_location_nfs"), reflect.TypeOf(&DatasyncLocationNfs{})) + ir.RegisterType(load("Aws::Datasync_location_s3"), reflect.TypeOf(&DatasyncLocationS3{})) + ir.RegisterType(load("Aws::Datasync_task"), reflect.TypeOf(&DatasyncTask{})) + ir.RegisterType(load("Aws::Dax_cluster"), reflect.TypeOf(&DaxCluster{})) + ir.RegisterType(load("Aws::Dax_parameter_group"), reflect.TypeOf(&DaxParameterGroup{})) + ir.RegisterType(load("Aws::Dax_subnet_group"), reflect.TypeOf(&DaxSubnetGroup{})) + ir.RegisterType(load("Aws::Db_cluster_snapshot"), reflect.TypeOf(&DbClusterSnapshot{})) + ir.RegisterType(load("Aws::Db_event_subscription"), reflect.TypeOf(&DbEventSubscription{})) + ir.RegisterType(load("Aws::Db_instance"), reflect.TypeOf(&DbInstance{})) + ir.RegisterType(load("Aws::Db_option_group"), reflect.TypeOf(&DbOptionGroup{})) + ir.RegisterType(load("Aws::Db_parameter_group"), reflect.TypeOf(&DbParameterGroup{})) + ir.RegisterType(load("Aws::Db_security_group"), reflect.TypeOf(&DbSecurityGroup{})) + ir.RegisterType(load("Aws::Db_snapshot"), reflect.TypeOf(&DbSnapshot{})) + ir.RegisterType(load("Aws::Db_subnet_group"), reflect.TypeOf(&DbSubnetGroup{})) + ir.RegisterType(load("Aws::Default_network_acl"), reflect.TypeOf(&DefaultNetworkAcl{})) + ir.RegisterType(load("Aws::Default_route_table"), reflect.TypeOf(&DefaultRouteTable{})) + ir.RegisterType(load("Aws::Default_security_group"), reflect.TypeOf(&DefaultSecurityGroup{})) + ir.RegisterType(load("Aws::Default_subnet"), reflect.TypeOf(&DefaultSubnet{})) + ir.RegisterType(load("Aws::Default_vpc"), reflect.TypeOf(&DefaultVpc{})) + ir.RegisterType(load("Aws::Default_vpc_dhcp_options"), reflect.TypeOf(&DefaultVpcDhcpOptions{})) + ir.RegisterType(load("Aws::Devicefarm_project"), reflect.TypeOf(&DevicefarmProject{})) + ir.RegisterType(load("Aws::Directory_service_conditional_forwarder"), reflect.TypeOf(&DirectoryServiceConditionalForwarder{})) + ir.RegisterType(load("Aws::Directory_service_directory"), reflect.TypeOf(&DirectoryServiceDirectory{})) + ir.RegisterType(load("Aws::Dlm_lifecycle_policy"), reflect.TypeOf(&DlmLifecyclePolicy{})) + ir.RegisterType(load("Aws::Dms_certificate"), reflect.TypeOf(&DmsCertificate{})) + ir.RegisterType(load("Aws::Dms_endpoint"), reflect.TypeOf(&DmsEndpoint{})) + ir.RegisterType(load("Aws::Dms_replication_instance"), reflect.TypeOf(&DmsReplicationInstance{})) + ir.RegisterType(load("Aws::Dms_replication_subnet_group"), reflect.TypeOf(&DmsReplicationSubnetGroup{})) + ir.RegisterType(load("Aws::Dms_replication_task"), reflect.TypeOf(&DmsReplicationTask{})) + ir.RegisterType(load("Aws::Docdb_cluster_parameter_group"), reflect.TypeOf(&DocdbClusterParameterGroup{})) + ir.RegisterType(load("Aws::Docdb_subnet_group"), reflect.TypeOf(&DocdbSubnetGroup{})) + ir.RegisterType(load("Aws::Dx_bgp_peer"), reflect.TypeOf(&DxBgpPeer{})) + ir.RegisterType(load("Aws::Dx_connection"), reflect.TypeOf(&DxConnection{})) + ir.RegisterType(load("Aws::Dx_connection_association"), reflect.TypeOf(&DxConnectionAssociation{})) + ir.RegisterType(load("Aws::Dx_gateway"), reflect.TypeOf(&DxGateway{})) + ir.RegisterType(load("Aws::Dx_gateway_association"), reflect.TypeOf(&DxGatewayAssociation{})) + ir.RegisterType(load("Aws::Dx_hosted_private_virtual_interface"), reflect.TypeOf(&DxHostedPrivateVirtualInterface{})) + ir.RegisterType(load("Aws::Dx_hosted_private_virtual_interface_accepter"), reflect.TypeOf(&DxHostedPrivateVirtualInterfaceAccepter{})) + ir.RegisterType(load("Aws::Dx_hosted_public_virtual_interface"), reflect.TypeOf(&DxHostedPublicVirtualInterface{})) + ir.RegisterType(load("Aws::Dx_hosted_public_virtual_interface_accepter"), reflect.TypeOf(&DxHostedPublicVirtualInterfaceAccepter{})) + ir.RegisterType(load("Aws::Dx_lag"), reflect.TypeOf(&DxLag{})) + ir.RegisterType(load("Aws::Dx_private_virtual_interface"), reflect.TypeOf(&DxPrivateVirtualInterface{})) + ir.RegisterType(load("Aws::Dx_public_virtual_interface"), reflect.TypeOf(&DxPublicVirtualInterface{})) + ir.RegisterType(load("Aws::Dynamodb_global_table"), reflect.TypeOf(&DynamodbGlobalTable{})) + ir.RegisterType(load("Aws::Dynamodb_table"), reflect.TypeOf(&DynamodbTable{})) + ir.RegisterType(load("Aws::Dynamodb_table_item"), reflect.TypeOf(&DynamodbTableItem{})) + ir.RegisterType(load("Aws::Ebs_snapshot"), reflect.TypeOf(&EbsSnapshot{})) + ir.RegisterType(load("Aws::Ebs_snapshot_copy"), reflect.TypeOf(&EbsSnapshotCopy{})) + ir.RegisterType(load("Aws::Ebs_volume"), reflect.TypeOf(&EbsVolume{})) + ir.RegisterType(load("Aws::Ec2_capacity_reservation"), reflect.TypeOf(&Ec2CapacityReservation{})) + ir.RegisterType(load("Aws::Ec2_fleet"), reflect.TypeOf(&Ec2Fleet{})) + ir.RegisterType(load("Aws::Ec2_transit_gateway"), reflect.TypeOf(&Ec2TransitGateway{})) + ir.RegisterType(load("Aws::Ec2_transit_gateway_route"), reflect.TypeOf(&Ec2TransitGatewayRoute{})) + ir.RegisterType(load("Aws::Ec2_transit_gateway_route_table"), reflect.TypeOf(&Ec2TransitGatewayRouteTable{})) + ir.RegisterType(load("Aws::Ec2_transit_gateway_route_table_association"), reflect.TypeOf(&Ec2TransitGatewayRouteTableAssociation{})) + ir.RegisterType(load("Aws::Ec2_transit_gateway_route_table_propagation"), reflect.TypeOf(&Ec2TransitGatewayRouteTablePropagation{})) + ir.RegisterType(load("Aws::Ec2_transit_gateway_vpc_attachment"), reflect.TypeOf(&Ec2TransitGatewayVpcAttachment{})) + ir.RegisterType(load("Aws::Ecr_lifecycle_policy"), reflect.TypeOf(&EcrLifecyclePolicy{})) + ir.RegisterType(load("Aws::Ecr_repository"), reflect.TypeOf(&EcrRepository{})) + ir.RegisterType(load("Aws::Ecr_repository_policy"), reflect.TypeOf(&EcrRepositoryPolicy{})) + ir.RegisterType(load("Aws::Ecs_cluster"), reflect.TypeOf(&EcsCluster{})) + ir.RegisterType(load("Aws::Ecs_service"), reflect.TypeOf(&EcsService{})) + ir.RegisterType(load("Aws::Ecs_task_definition"), reflect.TypeOf(&EcsTaskDefinition{})) + ir.RegisterType(load("Aws::Efs_file_system"), reflect.TypeOf(&EfsFileSystem{})) + ir.RegisterType(load("Aws::Efs_mount_target"), reflect.TypeOf(&EfsMountTarget{})) + ir.RegisterType(load("Aws::Egress_only_internet_gateway"), reflect.TypeOf(&EgressOnlyInternetGateway{})) + ir.RegisterType(load("Aws::Eip"), reflect.TypeOf(&Eip{})) + ir.RegisterType(load("Aws::Eip_association"), reflect.TypeOf(&EipAssociation{})) + ir.RegisterType(load("Aws::Eks_cluster"), reflect.TypeOf(&EksCluster{})) + ir.RegisterType(load("Aws::Elastic_beanstalk_application"), reflect.TypeOf(&ElasticBeanstalkApplication{})) + ir.RegisterType(load("Aws::Elastic_beanstalk_application_version"), reflect.TypeOf(&ElasticBeanstalkApplicationVersion{})) + ir.RegisterType(load("Aws::Elastic_beanstalk_configuration_template"), reflect.TypeOf(&ElasticBeanstalkConfigurationTemplate{})) + ir.RegisterType(load("Aws::Elastic_beanstalk_environment"), reflect.TypeOf(&ElasticBeanstalkEnvironment{})) + ir.RegisterType(load("Aws::Elasticache_cluster"), reflect.TypeOf(&ElasticacheCluster{})) + ir.RegisterType(load("Aws::Elasticache_parameter_group"), reflect.TypeOf(&ElasticacheParameterGroup{})) + ir.RegisterType(load("Aws::Elasticache_replication_group"), reflect.TypeOf(&ElasticacheReplicationGroup{})) + ir.RegisterType(load("Aws::Elasticache_security_group"), reflect.TypeOf(&ElasticacheSecurityGroup{})) + ir.RegisterType(load("Aws::Elasticache_subnet_group"), reflect.TypeOf(&ElasticacheSubnetGroup{})) + ir.RegisterType(load("Aws::Elasticsearch_domain"), reflect.TypeOf(&ElasticsearchDomain{})) + ir.RegisterType(load("Aws::Elasticsearch_domain_policy"), reflect.TypeOf(&ElasticsearchDomainPolicy{})) + ir.RegisterType(load("Aws::Elastictranscoder_pipeline"), reflect.TypeOf(&ElastictranscoderPipeline{})) + ir.RegisterType(load("Aws::Elastictranscoder_preset"), reflect.TypeOf(&ElastictranscoderPreset{})) + ir.RegisterType(load("Aws::Elb"), reflect.TypeOf(&Elb{})) + ir.RegisterType(load("Aws::Elb_attachment"), reflect.TypeOf(&ElbAttachment{})) + ir.RegisterType(load("Aws::Emr_cluster"), reflect.TypeOf(&EmrCluster{})) + ir.RegisterType(load("Aws::Emr_instance_group"), reflect.TypeOf(&EmrInstanceGroup{})) + ir.RegisterType(load("Aws::Emr_security_configuration"), reflect.TypeOf(&EmrSecurityConfiguration{})) + ir.RegisterType(load("Aws::Flow_log"), reflect.TypeOf(&FlowLog{})) + ir.RegisterType(load("Aws::Gamelift_alias"), reflect.TypeOf(&GameliftAlias{})) + ir.RegisterType(load("Aws::Gamelift_build"), reflect.TypeOf(&GameliftBuild{})) + ir.RegisterType(load("Aws::Gamelift_fleet"), reflect.TypeOf(&GameliftFleet{})) + ir.RegisterType(load("Aws::Gamelift_game_session_queue"), reflect.TypeOf(&GameliftGameSessionQueue{})) + ir.RegisterType(load("Aws::GenericHandler"), reflect.TypeOf(&GenericHandler{})) + ir.RegisterType(load("Aws::Glacier_vault"), reflect.TypeOf(&GlacierVault{})) + ir.RegisterType(load("Aws::Glacier_vault_lock"), reflect.TypeOf(&GlacierVaultLock{})) + ir.RegisterType(load("Aws::Globalaccelerator_accelerator"), reflect.TypeOf(&GlobalacceleratorAccelerator{})) + ir.RegisterType(load("Aws::Glue_catalog_database"), reflect.TypeOf(&GlueCatalogDatabase{})) + ir.RegisterType(load("Aws::Glue_catalog_table"), reflect.TypeOf(&GlueCatalogTable{})) + ir.RegisterType(load("Aws::Glue_classifier"), reflect.TypeOf(&GlueClassifier{})) + ir.RegisterType(load("Aws::Glue_connection"), reflect.TypeOf(&GlueConnection{})) + ir.RegisterType(load("Aws::Glue_crawler"), reflect.TypeOf(&GlueCrawler{})) + ir.RegisterType(load("Aws::Glue_job"), reflect.TypeOf(&GlueJob{})) + ir.RegisterType(load("Aws::Glue_security_configuration"), reflect.TypeOf(&GlueSecurityConfiguration{})) + ir.RegisterType(load("Aws::Glue_trigger"), reflect.TypeOf(&GlueTrigger{})) + ir.RegisterType(load("Aws::Guardduty_detector"), reflect.TypeOf(&GuarddutyDetector{})) + ir.RegisterType(load("Aws::Guardduty_ipset"), reflect.TypeOf(&GuarddutyIpset{})) + ir.RegisterType(load("Aws::Guardduty_member"), reflect.TypeOf(&GuarddutyMember{})) + ir.RegisterType(load("Aws::Guardduty_threatintelset"), reflect.TypeOf(&GuarddutyThreatintelset{})) + ir.RegisterType(load("Aws::Iam_access_key"), reflect.TypeOf(&IamAccessKey{})) + ir.RegisterType(load("Aws::Iam_account_alias"), reflect.TypeOf(&IamAccountAlias{})) + ir.RegisterType(load("Aws::Iam_account_password_policy"), reflect.TypeOf(&IamAccountPasswordPolicy{})) + ir.RegisterType(load("Aws::Iam_group"), reflect.TypeOf(&IamGroup{})) + ir.RegisterType(load("Aws::Iam_group_membership"), reflect.TypeOf(&IamGroupMembership{})) + ir.RegisterType(load("Aws::Iam_group_policy"), reflect.TypeOf(&IamGroupPolicy{})) + ir.RegisterType(load("Aws::Iam_group_policy_attachment"), reflect.TypeOf(&IamGroupPolicyAttachment{})) + ir.RegisterType(load("Aws::Iam_instance_profile"), reflect.TypeOf(&IamInstanceProfile{})) + ir.RegisterType(load("Aws::Iam_openid_connect_provider"), reflect.TypeOf(&IamOpenidConnectProvider{})) + ir.RegisterType(load("Aws::Iam_policy"), reflect.TypeOf(&IamPolicy{})) + ir.RegisterType(load("Aws::Iam_policy_attachment"), reflect.TypeOf(&IamPolicyAttachment{})) + ir.RegisterType(load("Aws::Iam_role"), reflect.TypeOf(&IamRole{})) + ir.RegisterType(load("Aws::Iam_role_policy"), reflect.TypeOf(&IamRolePolicy{})) + ir.RegisterType(load("Aws::Iam_role_policy_attachment"), reflect.TypeOf(&IamRolePolicyAttachment{})) + ir.RegisterType(load("Aws::Iam_saml_provider"), reflect.TypeOf(&IamSamlProvider{})) + ir.RegisterType(load("Aws::Iam_server_certificate"), reflect.TypeOf(&IamServerCertificate{})) + ir.RegisterType(load("Aws::Iam_service_linked_role"), reflect.TypeOf(&IamServiceLinkedRole{})) + ir.RegisterType(load("Aws::Iam_user"), reflect.TypeOf(&IamUser{})) + ir.RegisterType(load("Aws::Iam_user_group_membership"), reflect.TypeOf(&IamUserGroupMembership{})) + ir.RegisterType(load("Aws::Iam_user_login_profile"), reflect.TypeOf(&IamUserLoginProfile{})) + ir.RegisterType(load("Aws::Iam_user_policy"), reflect.TypeOf(&IamUserPolicy{})) + ir.RegisterType(load("Aws::Iam_user_policy_attachment"), reflect.TypeOf(&IamUserPolicyAttachment{})) + ir.RegisterType(load("Aws::Iam_user_ssh_key"), reflect.TypeOf(&IamUserSshKey{})) + ir.RegisterType(load("Aws::Inspector_assessment_target"), reflect.TypeOf(&InspectorAssessmentTarget{})) + ir.RegisterType(load("Aws::Inspector_assessment_template"), reflect.TypeOf(&InspectorAssessmentTemplate{})) + ir.RegisterType(load("Aws::Inspector_resource_group"), reflect.TypeOf(&InspectorResourceGroup{})) + ir.RegisterType(load("Aws::Instance"), reflect.TypeOf(&Instance{})) + ir.RegisterType(load("Aws::Internet_gateway"), reflect.TypeOf(&InternetGateway{})) + ir.RegisterType(load("Aws::Iot_certificate"), reflect.TypeOf(&IotCertificate{})) + ir.RegisterType(load("Aws::Iot_policy"), reflect.TypeOf(&IotPolicy{})) + ir.RegisterType(load("Aws::Iot_policy_attachment"), reflect.TypeOf(&IotPolicyAttachment{})) + ir.RegisterType(load("Aws::Iot_thing"), reflect.TypeOf(&IotThing{})) + ir.RegisterType(load("Aws::Iot_thing_principal_attachment"), reflect.TypeOf(&IotThingPrincipalAttachment{})) + ir.RegisterType(load("Aws::Iot_thing_type"), reflect.TypeOf(&IotThingType{})) + ir.RegisterType(load("Aws::Iot_topic_rule"), reflect.TypeOf(&IotTopicRule{})) + ir.RegisterType(load("Aws::Key_pair"), reflect.TypeOf(&KeyPair{})) + ir.RegisterType(load("Aws::Kinesis_analytics_application"), reflect.TypeOf(&KinesisAnalyticsApplication{})) + ir.RegisterType(load("Aws::Kinesis_firehose_delivery_stream"), reflect.TypeOf(&KinesisFirehoseDeliveryStream{})) + ir.RegisterType(load("Aws::Kinesis_stream"), reflect.TypeOf(&KinesisStream{})) + ir.RegisterType(load("Aws::Kms_alias"), reflect.TypeOf(&KmsAlias{})) + ir.RegisterType(load("Aws::Kms_grant"), reflect.TypeOf(&KmsGrant{})) + ir.RegisterType(load("Aws::Kms_key"), reflect.TypeOf(&KmsKey{})) + ir.RegisterType(load("Aws::Lambda_alias"), reflect.TypeOf(&LambdaAlias{})) + ir.RegisterType(load("Aws::Lambda_event_source_mapping"), reflect.TypeOf(&LambdaEventSourceMapping{})) + ir.RegisterType(load("Aws::Lambda_function"), reflect.TypeOf(&LambdaFunction{})) + ir.RegisterType(load("Aws::Lambda_layer_version"), reflect.TypeOf(&LambdaLayerVersion{})) + ir.RegisterType(load("Aws::Lambda_permission"), reflect.TypeOf(&LambdaPermission{})) + ir.RegisterType(load("Aws::Launch_configuration"), reflect.TypeOf(&LaunchConfiguration{})) + ir.RegisterType(load("Aws::Launch_template"), reflect.TypeOf(&LaunchTemplate{})) + ir.RegisterType(load("Aws::Lb"), reflect.TypeOf(&Lb{})) + ir.RegisterType(load("Aws::Lb_cookie_stickiness_policy"), reflect.TypeOf(&LbCookieStickinessPolicy{})) + ir.RegisterType(load("Aws::Lb_listener"), reflect.TypeOf(&LbListener{})) + ir.RegisterType(load("Aws::Lb_listener_certificate"), reflect.TypeOf(&LbListenerCertificate{})) + ir.RegisterType(load("Aws::Lb_listener_rule"), reflect.TypeOf(&LbListenerRule{})) + ir.RegisterType(load("Aws::Lb_ssl_negotiation_policy"), reflect.TypeOf(&LbSslNegotiationPolicy{})) + ir.RegisterType(load("Aws::Lb_target_group"), reflect.TypeOf(&LbTargetGroup{})) + ir.RegisterType(load("Aws::Lb_target_group_attachment"), reflect.TypeOf(&LbTargetGroupAttachment{})) + ir.RegisterType(load("Aws::Licensemanager_association"), reflect.TypeOf(&LicensemanagerAssociation{})) + ir.RegisterType(load("Aws::Licensemanager_license_configuration"), reflect.TypeOf(&LicensemanagerLicenseConfiguration{})) + ir.RegisterType(load("Aws::Lightsail_domain"), reflect.TypeOf(&LightsailDomain{})) + ir.RegisterType(load("Aws::Lightsail_instance"), reflect.TypeOf(&LightsailInstance{})) + ir.RegisterType(load("Aws::Lightsail_key_pair"), reflect.TypeOf(&LightsailKeyPair{})) + ir.RegisterType(load("Aws::Lightsail_static_ip"), reflect.TypeOf(&LightsailStaticIp{})) + ir.RegisterType(load("Aws::Lightsail_static_ip_attachment"), reflect.TypeOf(&LightsailStaticIpAttachment{})) + ir.RegisterType(load("Aws::Load_balancer_backend_server_policy"), reflect.TypeOf(&LoadBalancerBackendServerPolicy{})) + ir.RegisterType(load("Aws::Load_balancer_listener_policy"), reflect.TypeOf(&LoadBalancerListenerPolicy{})) + ir.RegisterType(load("Aws::Load_balancer_policy"), reflect.TypeOf(&LoadBalancerPolicy{})) + ir.RegisterType(load("Aws::Macie_member_account_association"), reflect.TypeOf(&MacieMemberAccountAssociation{})) + ir.RegisterType(load("Aws::Macie_s3_bucket_association"), reflect.TypeOf(&MacieS3BucketAssociation{})) + ir.RegisterType(load("Aws::Main_route_table_association"), reflect.TypeOf(&MainRouteTableAssociation{})) + ir.RegisterType(load("Aws::Media_package_channel"), reflect.TypeOf(&MediaPackageChannel{})) + ir.RegisterType(load("Aws::Media_store_container"), reflect.TypeOf(&MediaStoreContainer{})) + ir.RegisterType(load("Aws::Media_store_container_policy"), reflect.TypeOf(&MediaStoreContainerPolicy{})) + ir.RegisterType(load("Aws::Mq_broker"), reflect.TypeOf(&MqBroker{})) + ir.RegisterType(load("Aws::Mq_configuration"), reflect.TypeOf(&MqConfiguration{})) + ir.RegisterType(load("Aws::Nat_gateway"), reflect.TypeOf(&NatGateway{})) + ir.RegisterType(load("Aws::Neptune_cluster"), reflect.TypeOf(&NeptuneCluster{})) + ir.RegisterType(load("Aws::Neptune_cluster_instance"), reflect.TypeOf(&NeptuneClusterInstance{})) + ir.RegisterType(load("Aws::Neptune_cluster_parameter_group"), reflect.TypeOf(&NeptuneClusterParameterGroup{})) + ir.RegisterType(load("Aws::Neptune_cluster_snapshot"), reflect.TypeOf(&NeptuneClusterSnapshot{})) + ir.RegisterType(load("Aws::Neptune_event_subscription"), reflect.TypeOf(&NeptuneEventSubscription{})) + ir.RegisterType(load("Aws::Neptune_parameter_group"), reflect.TypeOf(&NeptuneParameterGroup{})) + ir.RegisterType(load("Aws::Neptune_subnet_group"), reflect.TypeOf(&NeptuneSubnetGroup{})) + ir.RegisterType(load("Aws::Network_acl"), reflect.TypeOf(&NetworkAcl{})) + ir.RegisterType(load("Aws::Network_acl_rule"), reflect.TypeOf(&NetworkAclRule{})) + ir.RegisterType(load("Aws::Network_interface"), reflect.TypeOf(&NetworkInterface{})) + ir.RegisterType(load("Aws::Network_interface_attachment"), reflect.TypeOf(&NetworkInterfaceAttachment{})) + ir.RegisterType(load("Aws::Network_interface_sg_attachment"), reflect.TypeOf(&NetworkInterfaceSgAttachment{})) + ir.RegisterType(load("Aws::Opsworks_application"), reflect.TypeOf(&OpsworksApplication{})) + ir.RegisterType(load("Aws::Opsworks_custom_layer"), reflect.TypeOf(&OpsworksCustomLayer{})) + ir.RegisterType(load("Aws::Opsworks_ganglia_layer"), reflect.TypeOf(&OpsworksGangliaLayer{})) + ir.RegisterType(load("Aws::Opsworks_haproxy_layer"), reflect.TypeOf(&OpsworksHaproxyLayer{})) + ir.RegisterType(load("Aws::Opsworks_instance"), reflect.TypeOf(&OpsworksInstance{})) + ir.RegisterType(load("Aws::Opsworks_java_app_layer"), reflect.TypeOf(&OpsworksJavaAppLayer{})) + ir.RegisterType(load("Aws::Opsworks_memcached_layer"), reflect.TypeOf(&OpsworksMemcachedLayer{})) + ir.RegisterType(load("Aws::Opsworks_mysql_layer"), reflect.TypeOf(&OpsworksMysqlLayer{})) + ir.RegisterType(load("Aws::Opsworks_nodejs_app_layer"), reflect.TypeOf(&OpsworksNodejsAppLayer{})) + ir.RegisterType(load("Aws::Opsworks_permission"), reflect.TypeOf(&OpsworksPermission{})) + ir.RegisterType(load("Aws::Opsworks_php_app_layer"), reflect.TypeOf(&OpsworksPhpAppLayer{})) + ir.RegisterType(load("Aws::Opsworks_rails_app_layer"), reflect.TypeOf(&OpsworksRailsAppLayer{})) + ir.RegisterType(load("Aws::Opsworks_rds_db_instance"), reflect.TypeOf(&OpsworksRdsDbInstance{})) + ir.RegisterType(load("Aws::Opsworks_stack"), reflect.TypeOf(&OpsworksStack{})) + ir.RegisterType(load("Aws::Opsworks_static_web_layer"), reflect.TypeOf(&OpsworksStaticWebLayer{})) + ir.RegisterType(load("Aws::Opsworks_user_profile"), reflect.TypeOf(&OpsworksUserProfile{})) + ir.RegisterType(load("Aws::Organizations_account"), reflect.TypeOf(&OrganizationsAccount{})) + ir.RegisterType(load("Aws::Organizations_organization"), reflect.TypeOf(&OrganizationsOrganization{})) + ir.RegisterType(load("Aws::Organizations_policy"), reflect.TypeOf(&OrganizationsPolicy{})) + ir.RegisterType(load("Aws::Organizations_policy_attachment"), reflect.TypeOf(&OrganizationsPolicyAttachment{})) + ir.RegisterType(load("Aws::Pinpoint_adm_channel"), reflect.TypeOf(&PinpointAdmChannel{})) + ir.RegisterType(load("Aws::Pinpoint_apns_channel"), reflect.TypeOf(&PinpointApnsChannel{})) + ir.RegisterType(load("Aws::Pinpoint_apns_sandbox_channel"), reflect.TypeOf(&PinpointApnsSandboxChannel{})) + ir.RegisterType(load("Aws::Pinpoint_apns_voip_channel"), reflect.TypeOf(&PinpointApnsVoipChannel{})) + ir.RegisterType(load("Aws::Pinpoint_apns_voip_sandbox_channel"), reflect.TypeOf(&PinpointApnsVoipSandboxChannel{})) + ir.RegisterType(load("Aws::Pinpoint_app"), reflect.TypeOf(&PinpointApp{})) + ir.RegisterType(load("Aws::Pinpoint_baidu_channel"), reflect.TypeOf(&PinpointBaiduChannel{})) + ir.RegisterType(load("Aws::Pinpoint_email_channel"), reflect.TypeOf(&PinpointEmailChannel{})) + ir.RegisterType(load("Aws::Pinpoint_event_stream"), reflect.TypeOf(&PinpointEventStream{})) + ir.RegisterType(load("Aws::Pinpoint_gcm_channel"), reflect.TypeOf(&PinpointGcmChannel{})) + ir.RegisterType(load("Aws::Pinpoint_sms_channel"), reflect.TypeOf(&PinpointSmsChannel{})) + ir.RegisterType(load("Aws::Placement_group"), reflect.TypeOf(&PlacementGroup{})) + ir.RegisterType(load("Aws::Proxy_protocol_policy"), reflect.TypeOf(&ProxyProtocolPolicy{})) + ir.RegisterType(load("Aws::Ram_resource_share"), reflect.TypeOf(&RamResourceShare{})) + ir.RegisterType(load("Aws::Rds_cluster"), reflect.TypeOf(&RdsCluster{})) + ir.RegisterType(load("Aws::Rds_cluster_endpoint"), reflect.TypeOf(&RdsClusterEndpoint{})) + ir.RegisterType(load("Aws::Rds_cluster_instance"), reflect.TypeOf(&RdsClusterInstance{})) + ir.RegisterType(load("Aws::Rds_cluster_parameter_group"), reflect.TypeOf(&RdsClusterParameterGroup{})) + ir.RegisterType(load("Aws::Rds_global_cluster"), reflect.TypeOf(&RdsGlobalCluster{})) + ir.RegisterType(load("Aws::Redshift_cluster"), reflect.TypeOf(&RedshiftCluster{})) + ir.RegisterType(load("Aws::Redshift_event_subscription"), reflect.TypeOf(&RedshiftEventSubscription{})) + ir.RegisterType(load("Aws::Redshift_parameter_group"), reflect.TypeOf(&RedshiftParameterGroup{})) + ir.RegisterType(load("Aws::Redshift_security_group"), reflect.TypeOf(&RedshiftSecurityGroup{})) + ir.RegisterType(load("Aws::Redshift_snapshot_copy_grant"), reflect.TypeOf(&RedshiftSnapshotCopyGrant{})) + ir.RegisterType(load("Aws::Redshift_subnet_group"), reflect.TypeOf(&RedshiftSubnetGroup{})) + ir.RegisterType(load("Aws::Resourcegroups_group"), reflect.TypeOf(&ResourcegroupsGroup{})) + ir.RegisterType(load("Aws::Route"), reflect.TypeOf(&Route{})) + ir.RegisterType(load("Aws::Route53_delegation_set"), reflect.TypeOf(&Route53DelegationSet{})) + ir.RegisterType(load("Aws::Route53_health_check"), reflect.TypeOf(&Route53HealthCheck{})) + ir.RegisterType(load("Aws::Route53_query_log"), reflect.TypeOf(&Route53QueryLog{})) + ir.RegisterType(load("Aws::Route53_record"), reflect.TypeOf(&Route53Record{})) + ir.RegisterType(load("Aws::Route53_zone"), reflect.TypeOf(&Route53Zone{})) + ir.RegisterType(load("Aws::Route53_zone_association"), reflect.TypeOf(&Route53ZoneAssociation{})) + ir.RegisterType(load("Aws::Route_table"), reflect.TypeOf(&RouteTable{})) + ir.RegisterType(load("Aws::Route_table_association"), reflect.TypeOf(&RouteTableAssociation{})) + ir.RegisterType(load("Aws::S3_account_public_access_block"), reflect.TypeOf(&S3AccountPublicAccessBlock{})) + ir.RegisterType(load("Aws::S3_bucket"), reflect.TypeOf(&S3Bucket{})) + ir.RegisterType(load("Aws::S3_bucket_inventory"), reflect.TypeOf(&S3BucketInventory{})) + ir.RegisterType(load("Aws::S3_bucket_metric"), reflect.TypeOf(&S3BucketMetric{})) + ir.RegisterType(load("Aws::S3_bucket_notification"), reflect.TypeOf(&S3BucketNotification{})) + ir.RegisterType(load("Aws::S3_bucket_object"), reflect.TypeOf(&S3BucketObject{})) + ir.RegisterType(load("Aws::S3_bucket_policy"), reflect.TypeOf(&S3BucketPolicy{})) + ir.RegisterType(load("Aws::S3_bucket_public_access_block"), reflect.TypeOf(&S3BucketPublicAccessBlock{})) + ir.RegisterType(load("Aws::Sagemaker_notebook_instance"), reflect.TypeOf(&SagemakerNotebookInstance{})) + ir.RegisterType(load("Aws::Secretsmanager_secret"), reflect.TypeOf(&SecretsmanagerSecret{})) + ir.RegisterType(load("Aws::Secretsmanager_secret_version"), reflect.TypeOf(&SecretsmanagerSecretVersion{})) + ir.RegisterType(load("Aws::Security_group"), reflect.TypeOf(&SecurityGroup{})) + ir.RegisterType(load("Aws::Security_group_rule"), reflect.TypeOf(&SecurityGroupRule{})) + ir.RegisterType(load("Aws::Securityhub_account"), reflect.TypeOf(&SecurityhubAccount{})) + ir.RegisterType(load("Aws::Securityhub_product_subscription"), reflect.TypeOf(&SecurityhubProductSubscription{})) + ir.RegisterType(load("Aws::Securityhub_standards_subscription"), reflect.TypeOf(&SecurityhubStandardsSubscription{})) + ir.RegisterType(load("Aws::Service_discovery_http_namespace"), reflect.TypeOf(&ServiceDiscoveryHttpNamespace{})) + ir.RegisterType(load("Aws::Service_discovery_private_dns_namespace"), reflect.TypeOf(&ServiceDiscoveryPrivateDnsNamespace{})) + ir.RegisterType(load("Aws::Service_discovery_public_dns_namespace"), reflect.TypeOf(&ServiceDiscoveryPublicDnsNamespace{})) + ir.RegisterType(load("Aws::Service_discovery_service"), reflect.TypeOf(&ServiceDiscoveryService{})) + ir.RegisterType(load("Aws::Servicecatalog_portfolio"), reflect.TypeOf(&ServicecatalogPortfolio{})) + ir.RegisterType(load("Aws::Ses_active_receipt_rule_set"), reflect.TypeOf(&SesActiveReceiptRuleSet{})) + ir.RegisterType(load("Aws::Ses_configuration_set"), reflect.TypeOf(&SesConfigurationSet{})) + ir.RegisterType(load("Aws::Ses_domain_dkim"), reflect.TypeOf(&SesDomainDkim{})) + ir.RegisterType(load("Aws::Ses_domain_identity"), reflect.TypeOf(&SesDomainIdentity{})) + ir.RegisterType(load("Aws::Ses_domain_identity_verification"), reflect.TypeOf(&SesDomainIdentityVerification{})) + ir.RegisterType(load("Aws::Ses_domain_mail_from"), reflect.TypeOf(&SesDomainMailFrom{})) + ir.RegisterType(load("Aws::Ses_event_destination"), reflect.TypeOf(&SesEventDestination{})) + ir.RegisterType(load("Aws::Ses_identity_notification_topic"), reflect.TypeOf(&SesIdentityNotificationTopic{})) + ir.RegisterType(load("Aws::Ses_receipt_filter"), reflect.TypeOf(&SesReceiptFilter{})) + ir.RegisterType(load("Aws::Ses_receipt_rule"), reflect.TypeOf(&SesReceiptRule{})) + ir.RegisterType(load("Aws::Ses_receipt_rule_set"), reflect.TypeOf(&SesReceiptRuleSet{})) + ir.RegisterType(load("Aws::Ses_template"), reflect.TypeOf(&SesTemplate{})) + ir.RegisterType(load("Aws::Sfn_activity"), reflect.TypeOf(&SfnActivity{})) + ir.RegisterType(load("Aws::Sfn_state_machine"), reflect.TypeOf(&SfnStateMachine{})) + ir.RegisterType(load("Aws::Simpledb_domain"), reflect.TypeOf(&SimpledbDomain{})) + ir.RegisterType(load("Aws::Snapshot_create_volume_permission"), reflect.TypeOf(&SnapshotCreateVolumePermission{})) + ir.RegisterType(load("Aws::Sns_platform_application"), reflect.TypeOf(&SnsPlatformApplication{})) + ir.RegisterType(load("Aws::Sns_sms_preferences"), reflect.TypeOf(&SnsSmsPreferences{})) + ir.RegisterType(load("Aws::Sns_topic"), reflect.TypeOf(&SnsTopic{})) + ir.RegisterType(load("Aws::Sns_topic_policy"), reflect.TypeOf(&SnsTopicPolicy{})) + ir.RegisterType(load("Aws::Sns_topic_subscription"), reflect.TypeOf(&SnsTopicSubscription{})) + ir.RegisterType(load("Aws::Spot_datafeed_subscription"), reflect.TypeOf(&SpotDatafeedSubscription{})) + ir.RegisterType(load("Aws::Spot_fleet_request"), reflect.TypeOf(&SpotFleetRequest{})) + ir.RegisterType(load("Aws::Spot_instance_request"), reflect.TypeOf(&SpotInstanceRequest{})) + ir.RegisterType(load("Aws::Sqs_queue"), reflect.TypeOf(&SqsQueue{})) + ir.RegisterType(load("Aws::Sqs_queue_policy"), reflect.TypeOf(&SqsQueuePolicy{})) + ir.RegisterType(load("Aws::Ssm_activation"), reflect.TypeOf(&SsmActivation{})) + ir.RegisterType(load("Aws::Ssm_association"), reflect.TypeOf(&SsmAssociation{})) + ir.RegisterType(load("Aws::Ssm_document"), reflect.TypeOf(&SsmDocument{})) + ir.RegisterType(load("Aws::Ssm_maintenance_window"), reflect.TypeOf(&SsmMaintenanceWindow{})) + ir.RegisterType(load("Aws::Ssm_maintenance_window_target"), reflect.TypeOf(&SsmMaintenanceWindowTarget{})) + ir.RegisterType(load("Aws::Ssm_maintenance_window_task"), reflect.TypeOf(&SsmMaintenanceWindowTask{})) + ir.RegisterType(load("Aws::Ssm_parameter"), reflect.TypeOf(&SsmParameter{})) + ir.RegisterType(load("Aws::Ssm_patch_baseline"), reflect.TypeOf(&SsmPatchBaseline{})) + ir.RegisterType(load("Aws::Ssm_patch_group"), reflect.TypeOf(&SsmPatchGroup{})) + ir.RegisterType(load("Aws::Ssm_resource_data_sync"), reflect.TypeOf(&SsmResourceDataSync{})) + ir.RegisterType(load("Aws::Storagegateway_cache"), reflect.TypeOf(&StoragegatewayCache{})) + ir.RegisterType(load("Aws::Storagegateway_cached_iscsi_volume"), reflect.TypeOf(&StoragegatewayCachedIscsiVolume{})) + ir.RegisterType(load("Aws::Storagegateway_gateway"), reflect.TypeOf(&StoragegatewayGateway{})) + ir.RegisterType(load("Aws::Storagegateway_nfs_file_share"), reflect.TypeOf(&StoragegatewayNfsFileShare{})) + ir.RegisterType(load("Aws::Storagegateway_smb_file_share"), reflect.TypeOf(&StoragegatewaySmbFileShare{})) + ir.RegisterType(load("Aws::Storagegateway_upload_buffer"), reflect.TypeOf(&StoragegatewayUploadBuffer{})) + ir.RegisterType(load("Aws::Storagegateway_working_storage"), reflect.TypeOf(&StoragegatewayWorkingStorage{})) + ir.RegisterType(load("Aws::Subnet"), reflect.TypeOf(&Subnet{})) + ir.RegisterType(load("Aws::Swf_domain"), reflect.TypeOf(&SwfDomain{})) + ir.RegisterType(load("Aws::Transfer_server"), reflect.TypeOf(&TransferServer{})) + ir.RegisterType(load("Aws::Transfer_ssh_key"), reflect.TypeOf(&TransferSshKey{})) + ir.RegisterType(load("Aws::Transfer_user"), reflect.TypeOf(&TransferUser{})) + ir.RegisterType(load("Aws::Volume_attachment"), reflect.TypeOf(&VolumeAttachment{})) + ir.RegisterType(load("Aws::Vpc"), reflect.TypeOf(&Vpc{})) + ir.RegisterType(load("Aws::Vpc_dhcp_options"), reflect.TypeOf(&VpcDhcpOptions{})) + ir.RegisterType(load("Aws::Vpc_dhcp_options_association"), reflect.TypeOf(&VpcDhcpOptionsAssociation{})) + ir.RegisterType(load("Aws::Vpc_endpoint"), reflect.TypeOf(&VpcEndpoint{})) + ir.RegisterType(load("Aws::Vpc_endpoint_connection_notification"), reflect.TypeOf(&VpcEndpointConnectionNotification{})) + ir.RegisterType(load("Aws::Vpc_endpoint_route_table_association"), reflect.TypeOf(&VpcEndpointRouteTableAssociation{})) + ir.RegisterType(load("Aws::Vpc_endpoint_service"), reflect.TypeOf(&VpcEndpointService{})) + ir.RegisterType(load("Aws::Vpc_endpoint_service_allowed_principal"), reflect.TypeOf(&VpcEndpointServiceAllowedPrincipal{})) + ir.RegisterType(load("Aws::Vpc_endpoint_subnet_association"), reflect.TypeOf(&VpcEndpointSubnetAssociation{})) + ir.RegisterType(load("Aws::Vpc_ipv4_cidr_block_association"), reflect.TypeOf(&VpcIpv4CidrBlockAssociation{})) + ir.RegisterType(load("Aws::Vpc_peering_connection"), reflect.TypeOf(&VpcPeeringConnection{})) + ir.RegisterType(load("Aws::Vpc_peering_connection_accepter"), reflect.TypeOf(&VpcPeeringConnectionAccepter{})) + ir.RegisterType(load("Aws::Vpc_peering_connection_options"), reflect.TypeOf(&VpcPeeringConnectionOptions{})) + ir.RegisterType(load("Aws::Vpn_connection"), reflect.TypeOf(&VpnConnection{})) + ir.RegisterType(load("Aws::Vpn_connection_route"), reflect.TypeOf(&VpnConnectionRoute{})) + ir.RegisterType(load("Aws::Vpn_gateway"), reflect.TypeOf(&VpnGateway{})) + ir.RegisterType(load("Aws::Vpn_gateway_attachment"), reflect.TypeOf(&VpnGatewayAttachment{})) + ir.RegisterType(load("Aws::Vpn_gateway_route_propagation"), reflect.TypeOf(&VpnGatewayRoutePropagation{})) + ir.RegisterType(load("Aws::Waf_byte_match_set"), reflect.TypeOf(&WafByteMatchSet{})) + ir.RegisterType(load("Aws::Waf_geo_match_set"), reflect.TypeOf(&WafGeoMatchSet{})) + ir.RegisterType(load("Aws::Waf_ipset"), reflect.TypeOf(&WafIpset{})) + ir.RegisterType(load("Aws::Waf_rate_based_rule"), reflect.TypeOf(&WafRateBasedRule{})) + ir.RegisterType(load("Aws::Waf_regex_match_set"), reflect.TypeOf(&WafRegexMatchSet{})) + ir.RegisterType(load("Aws::Waf_regex_pattern_set"), reflect.TypeOf(&WafRegexPatternSet{})) + ir.RegisterType(load("Aws::Waf_rule"), reflect.TypeOf(&WafRule{})) + ir.RegisterType(load("Aws::Waf_rule_group"), reflect.TypeOf(&WafRuleGroup{})) + ir.RegisterType(load("Aws::Waf_size_constraint_set"), reflect.TypeOf(&WafSizeConstraintSet{})) + ir.RegisterType(load("Aws::Waf_sql_injection_match_set"), reflect.TypeOf(&WafSqlInjectionMatchSet{})) + ir.RegisterType(load("Aws::Waf_web_acl"), reflect.TypeOf(&WafWebAcl{})) + ir.RegisterType(load("Aws::Waf_xss_match_set"), reflect.TypeOf(&WafXssMatchSet{})) + ir.RegisterType(load("Aws::Wafregional_byte_match_set"), reflect.TypeOf(&WafregionalByteMatchSet{})) + ir.RegisterType(load("Aws::Wafregional_geo_match_set"), reflect.TypeOf(&WafregionalGeoMatchSet{})) + ir.RegisterType(load("Aws::Wafregional_ipset"), reflect.TypeOf(&WafregionalIpset{})) + ir.RegisterType(load("Aws::Wafregional_rate_based_rule"), reflect.TypeOf(&WafregionalRateBasedRule{})) + ir.RegisterType(load("Aws::Wafregional_regex_match_set"), reflect.TypeOf(&WafregionalRegexMatchSet{})) + ir.RegisterType(load("Aws::Wafregional_regex_pattern_set"), reflect.TypeOf(&WafregionalRegexPatternSet{})) + ir.RegisterType(load("Aws::Wafregional_rule"), reflect.TypeOf(&WafregionalRule{})) + ir.RegisterType(load("Aws::Wafregional_rule_group"), reflect.TypeOf(&WafregionalRuleGroup{})) + ir.RegisterType(load("Aws::Wafregional_size_constraint_set"), reflect.TypeOf(&WafregionalSizeConstraintSet{})) + ir.RegisterType(load("Aws::Wafregional_sql_injection_match_set"), reflect.TypeOf(&WafregionalSqlInjectionMatchSet{})) + ir.RegisterType(load("Aws::Wafregional_web_acl"), reflect.TypeOf(&WafregionalWebAcl{})) + ir.RegisterType(load("Aws::Wafregional_web_acl_association"), reflect.TypeOf(&WafregionalWebAclAssociation{})) + ir.RegisterType(load("Aws::Wafregional_xss_match_set"), reflect.TypeOf(&WafregionalXssMatchSet{})) +} diff --git a/vendor/github.com/lyraproj/lyra/examples/go-samples/types/azurerm/azurerm.go b/vendor/github.com/lyraproj/lyra/examples/go-samples/types/azurerm/azurerm.go new file mode 100644 index 0000000..750e16e --- /dev/null +++ b/vendor/github.com/lyraproj/lyra/examples/go-samples/types/azurerm/azurerm.go @@ -0,0 +1,3978 @@ +// this file is generated +package azurerm + +import ( + "fmt" + "reflect" + + "github.com/lyraproj/pcore/px" +) + +type ApiManagement struct { + Location string + Name string + PublisherEmail string `puppet:"name=>'publisher_email'"` + PublisherName string `puppet:"name=>'publisher_name'"` + ResourceGroupName string `puppet:"name=>'resource_group_name'"` + ApiManagementId *string `puppet:"name=>'api_management_id'"` + AdditionalLocation *Location `puppet:"name=>'additional_location'"` + Certificate *[]ManagementCertificate + GatewayRegionalUrl *string `puppet:"name=>'gateway_regional_url'"` + GatewayUrl *string `puppet:"name=>'gateway_url'"` + HostnameConfiguration *Configuration `puppet:"name=>'hostname_configuration'"` + Identity *Identity + ManagementApiUrl *string `puppet:"name=>'management_api_url'"` + NotificationSenderEmail *string `puppet:"name=>'notification_sender_email'"` + PortalUrl *string `puppet:"name=>'portal_url'"` + PublicIpAddresses *[]string `puppet:"name=>'public_ip_addresses'"` + ScmUrl *string `puppet:"name=>'scm_url'"` + Security *Security + Sku *ManagementSku + Tags *map[string]string +} + +type AppService struct { + AppServicePlanId string `puppet:"name=>'app_service_plan_id'"` + Location string + Name string + ResourceGroupName string `puppet:"name=>'resource_group_name'"` + AppServiceId *string `puppet:"name=>'app_service_id'"` + AppSettings *map[string]string `puppet:"name=>'app_settings'"` + ClientAffinityEnabled *bool `puppet:"name=>'client_affinity_enabled'"` + ConnectionString *[]String `puppet:"name=>'connection_string'"` + DefaultSiteHostname *string `puppet:"name=>'default_site_hostname'"` + Enabled *bool + HttpsOnly *bool `puppet:"name=>'https_only'"` + Identity *Identity + OutboundIpAddresses *string `puppet:"name=>'outbound_ip_addresses'"` + PossibleOutboundIpAddresses *string `puppet:"name=>'possible_outbound_ip_addresses'"` + SiteConfig *Config `puppet:"name=>'site_config'"` + SiteCredential *Credential `puppet:"name=>'site_credential'"` + SourceControl *Control `puppet:"name=>'source_control'"` + Tags *map[string]string +} + +type AppServiceActiveSlot struct { + AppServiceName string `puppet:"name=>'app_service_name'"` + AppServiceSlotName string `puppet:"name=>'app_service_slot_name'"` + ResourceGroupName string `puppet:"name=>'resource_group_name'"` + AppServiceActiveSlotId *string `puppet:"name=>'app_service_active_slot_id'"` +} + +type AppServiceCustomHostnameBinding struct { + AppServiceName string `puppet:"name=>'app_service_name'"` + Hostname string + ResourceGroupName string `puppet:"name=>'resource_group_name'"` + AppServiceCustomHostnameBindingId *string `puppet:"name=>'app_service_custom_hostname_binding_id'"` +} + +type AppServicePlan struct { + Location string + Name string + ResourceGroupName string `puppet:"name=>'resource_group_name'"` + AppServicePlanId *string `puppet:"name=>'app_service_plan_id'"` + AppServiceEnvironmentId *string `puppet:"name=>'app_service_environment_id'"` + Kind *string + MaximumNumberOfWorkers *int64 `puppet:"name=>'maximum_number_of_workers'"` + PerSiteScaling *bool `puppet:"name=>'per_site_scaling'"` + Reserved *bool + Sku *PlanSku + Tags *map[string]string +} + +type AppServiceSlot struct { + AppServiceName string `puppet:"name=>'app_service_name'"` + AppServicePlanId string `puppet:"name=>'app_service_plan_id'"` + Location string + Name string + ResourceGroupName string `puppet:"name=>'resource_group_name'"` + AppServiceSlotId *string `puppet:"name=>'app_service_slot_id'"` + AppSettings *map[string]string `puppet:"name=>'app_settings'"` + ClientAffinityEnabled *bool `puppet:"name=>'client_affinity_enabled'"` + ConnectionString *[]String `puppet:"name=>'connection_string'"` + DefaultSiteHostname *string `puppet:"name=>'default_site_hostname'"` + Enabled *bool + HttpsOnly *bool `puppet:"name=>'https_only'"` + Identity *Identity + SiteConfig *Config `puppet:"name=>'site_config'"` + Tags *map[string]string +} + +type ApplicationGateway struct { + BackendAddressPool []Pool `puppet:"name=>'backend_address_pool'"` + BackendHttpSettings []HttpSettings `puppet:"name=>'backend_http_settings'"` + FrontendIpConfiguration []IpConfiguration `puppet:"name=>'frontend_ip_configuration'"` + FrontendPort []Port `puppet:"name=>'frontend_port'"` + GatewayIpConfiguration []GatewayIpConfiguration `puppet:"name=>'gateway_ip_configuration'"` + HttpListener []Listener `puppet:"name=>'http_listener'"` + Location string + Name string + RequestRoutingRule []RoutingRule `puppet:"name=>'request_routing_rule'"` + ResourceGroupName string `puppet:"name=>'resource_group_name'"` + ApplicationGatewayId *string `puppet:"name=>'application_gateway_id'"` + AuthenticationCertificate *[]GatewayAuthenticationCertificate `puppet:"name=>'authentication_certificate'"` + DisabledSslProtocols *[]string `puppet:"name=>'disabled_ssl_protocols'"` + Probe *[]Probe + Sku *IothubSku + SslCertificate *[]SslCertificate `puppet:"name=>'ssl_certificate'"` + Tags *map[string]string + UrlPathMap *[]Map `puppet:"name=>'url_path_map'"` + WafConfiguration *WafConfiguration `puppet:"name=>'waf_configuration'"` +} + +type ApplicationInsights struct { + ApplicationType string `puppet:"name=>'application_type'"` + Location string + Name string + ResourceGroupName string `puppet:"name=>'resource_group_name'"` + ApplicationInsightsId *string `puppet:"name=>'application_insights_id'"` + AppId *string `puppet:"name=>'app_id'"` + InstrumentationKey *string `puppet:"name=>'instrumentation_key'"` + Tags *map[string]string +} + +type ApplicationInsightsApiKey struct { + ApplicationInsightsId string `puppet:"name=>'application_insights_id'"` + Name string + ApplicationInsightsApiKeyId *string `puppet:"name=>'application_insights_api_key_id'"` + ApiKey *string `puppet:"name=>'api_key'"` + ReadPermissions *[]string `puppet:"name=>'read_permissions'"` + WritePermissions *[]string `puppet:"name=>'write_permissions'"` +} + +type ApplicationSecurityGroup struct { + Location string + Name string + ResourceGroupName string `puppet:"name=>'resource_group_name'"` + ApplicationSecurityGroupId *string `puppet:"name=>'application_security_group_id'"` + Tags *map[string]string +} + +type AutomationAccount struct { + Location string + Name string + ResourceGroupName string `puppet:"name=>'resource_group_name'"` + AutomationAccountId *string `puppet:"name=>'automation_account_id'"` + DscPrimaryAccessKey *string `puppet:"name=>'dsc_primary_access_key'"` + DscSecondaryAccessKey *string `puppet:"name=>'dsc_secondary_access_key'"` + DscServerEndpoint *string `puppet:"name=>'dsc_server_endpoint'"` + Sku *Sku + Tags *map[string]string +} + +type AutomationCredential struct { + AccountName string `puppet:"name=>'account_name'"` + Name string + Password string + ResourceGroupName string `puppet:"name=>'resource_group_name'"` + Username string + AutomationCredentialId *string `puppet:"name=>'automation_credential_id'"` + Description *string +} + +type AutomationDscConfiguration struct { + AutomationAccountName string `puppet:"name=>'automation_account_name'"` + ContentEmbedded string `puppet:"name=>'content_embedded'"` + Location string + Name string + ResourceGroupName string `puppet:"name=>'resource_group_name'"` + AutomationDscConfigurationId *string `puppet:"name=>'automation_dsc_configuration_id'"` + Description *string + LogVerbose *bool `puppet:"name=>'log_verbose'"` + State *string +} + +type AutomationDscNodeconfiguration struct { + AutomationAccountName string `puppet:"name=>'automation_account_name'"` + ContentEmbedded string `puppet:"name=>'content_embedded'"` + Name string + ResourceGroupName string `puppet:"name=>'resource_group_name'"` + AutomationDscNodeconfigurationId *string `puppet:"name=>'automation_dsc_nodeconfiguration_id'"` + ConfigurationName *string `puppet:"name=>'configuration_name'"` +} + +type AutomationModule struct { + AutomationAccountName string `puppet:"name=>'automation_account_name'"` + Name string + ResourceGroupName string `puppet:"name=>'resource_group_name'"` + AutomationModuleId *string `puppet:"name=>'automation_module_id'"` + ModuleLink *Link `puppet:"name=>'module_link'"` +} + +type AutomationRunbook struct { + AccountName string `puppet:"name=>'account_name'"` + Location string + LogProgress bool `puppet:"name=>'log_progress'"` + LogVerbose bool `puppet:"name=>'log_verbose'"` + Name string + ResourceGroupName string `puppet:"name=>'resource_group_name'"` + RunbookType string `puppet:"name=>'runbook_type'"` + AutomationRunbookId *string `puppet:"name=>'automation_runbook_id'"` + Content *string + Description *string + PublishContentLink *ContentLink `puppet:"name=>'publish_content_link'"` + Tags *map[string]string +} + +type AutomationSchedule struct { + Frequency string + Name string + ResourceGroupName string `puppet:"name=>'resource_group_name'"` + AutomationScheduleId *string `puppet:"name=>'automation_schedule_id'"` + AutomationAccountName *string `puppet:"name=>'automation_account_name'"` + Description *string + ExpiryTime *string `puppet:"name=>'expiry_time'"` + Interval *int64 + MonthDays *[]int64 `puppet:"name=>'month_days'"` + MonthlyOccurrence *[]OccurrenceOccurrences `puppet:"name=>'monthly_occurrence'"` + StartTime *string `puppet:"name=>'start_time'"` + Timezone *string + WeekDays *[]string `puppet:"name=>'week_days'"` +} + +type AutoscaleSetting struct { + Location string + Name string + Profile []SettingProfile + ResourceGroupName string `puppet:"name=>'resource_group_name'"` + TargetResourceId string `puppet:"name=>'target_resource_id'"` + AutoscaleSettingId *string `puppet:"name=>'autoscale_setting_id'"` + Enabled *bool + Notification *Notification + Tags *map[string]string +} + +type AvailabilitySet struct { + Location string + Name string + ResourceGroupName string `puppet:"name=>'resource_group_name'"` + AvailabilitySetId *string `puppet:"name=>'availability_set_id'"` + Managed *bool + PlatformFaultDomainCount *int64 `puppet:"name=>'platform_fault_domain_count'"` + PlatformUpdateDomainCount *int64 `puppet:"name=>'platform_update_domain_count'"` + Tags *map[string]string +} + +type AzureadApplication struct { + Name string + AzureadApplicationId *string `puppet:"name=>'azuread_application_id'"` + ApplicationId *string `puppet:"name=>'application_id'"` + AvailableToOtherTenants *bool `puppet:"name=>'available_to_other_tenants'"` + Homepage *string + IdentifierUris *[]string `puppet:"name=>'identifier_uris'"` + Oauth2AllowImplicitFlow *bool `puppet:"name=>'oauth2_allow_implicit_flow'"` + ReplyUrls *[]string `puppet:"name=>'reply_urls'"` +} + +type AzureadServicePrincipal struct { + ApplicationId string `puppet:"name=>'application_id'"` + AzureadServicePrincipalId *string `puppet:"name=>'azuread_service_principal_id'"` + DisplayName *string `puppet:"name=>'display_name'"` +} + +type AzureadServicePrincipalPassword struct { + EndDate string `puppet:"name=>'end_date'"` + ServicePrincipalId string `puppet:"name=>'service_principal_id'"` + Value string + AzureadServicePrincipalPasswordId *string `puppet:"name=>'azuread_service_principal_password_id'"` + KeyId *string `puppet:"name=>'key_id'"` + StartDate *string `puppet:"name=>'start_date'"` +} + +type BatchAccount struct { + Location string + Name string + ResourceGroupName string `puppet:"name=>'resource_group_name'"` + BatchAccountId *string `puppet:"name=>'batch_account_id'"` + PoolAllocationMode *string `puppet:"name=>'pool_allocation_mode'"` + StorageAccountId *string `puppet:"name=>'storage_account_id'"` + Tags *map[string]string +} + +type BatchPool struct { + AccountName string `puppet:"name=>'account_name'"` + Name string + NodeAgentSkuId string `puppet:"name=>'node_agent_sku_id'"` + ResourceGroupName string `puppet:"name=>'resource_group_name'"` + VmSize string `puppet:"name=>'vm_size'"` + BatchPoolId *string `puppet:"name=>'batch_pool_id'"` + AutoScale *Scale `puppet:"name=>'auto_scale'"` + DisplayName *string `puppet:"name=>'display_name'"` + FixedScale *FixedScale `puppet:"name=>'fixed_scale'"` + StartTask *Task `puppet:"name=>'start_task'"` + StopPendingResizeOperation *bool `puppet:"name=>'stop_pending_resize_operation'"` + StorageImageReference *StorageImageReference `puppet:"name=>'storage_image_reference'"` +} + +type CdnEndpoint struct { + Location string + Name string + Origin []Origin + ProfileName string `puppet:"name=>'profile_name'"` + ResourceGroupName string `puppet:"name=>'resource_group_name'"` + CdnEndpointId *string `puppet:"name=>'cdn_endpoint_id'"` + ContentTypesToCompress *[]string `puppet:"name=>'content_types_to_compress'"` + GeoFilter *[]Filter `puppet:"name=>'geo_filter'"` + HostName *string `puppet:"name=>'host_name'"` + IsCompressionEnabled *bool `puppet:"name=>'is_compression_enabled'"` + IsHttpAllowed *bool `puppet:"name=>'is_http_allowed'"` + IsHttpsAllowed *bool `puppet:"name=>'is_https_allowed'"` + OptimizationType *string `puppet:"name=>'optimization_type'"` + OriginHostHeader *string `puppet:"name=>'origin_host_header'"` + OriginPath *string `puppet:"name=>'origin_path'"` + ProbePath *string `puppet:"name=>'probe_path'"` + QuerystringCachingBehaviour *string `puppet:"name=>'querystring_caching_behaviour'"` + Tags *map[string]string +} + +type CdnProfile struct { + Location string + Name string + ResourceGroupName string `puppet:"name=>'resource_group_name'"` + Sku string + CdnProfileId *string `puppet:"name=>'cdn_profile_id'"` + Tags *map[string]string +} + +type CognitiveAccount struct { + Kind string + Location string + Name string + ResourceGroupName string `puppet:"name=>'resource_group_name'"` + CognitiveAccountId *string `puppet:"name=>'cognitive_account_id'"` + Endpoint *string + Sku *AccountSku + Tags *map[string]string +} + +type ContainerGroup struct { + Container []Container + Location string + Name string + OsType string `puppet:"name=>'os_type'"` + ResourceGroupName string `puppet:"name=>'resource_group_name'"` + ContainerGroupId *string `puppet:"name=>'container_group_id'"` + DnsNameLabel *string `puppet:"name=>'dns_name_label'"` + Fqdn *string + ImageRegistryCredential *[]RegistryCredential `puppet:"name=>'image_registry_credential'"` + IpAddress *string `puppet:"name=>'ip_address'"` + IpAddressType *string `puppet:"name=>'ip_address_type'"` + RestartPolicy *string `puppet:"name=>'restart_policy'"` + Tags *map[string]string +} + +type ContainerRegistry struct { + Location string + Name string + ResourceGroupName string `puppet:"name=>'resource_group_name'"` + ContainerRegistryId *string `puppet:"name=>'container_registry_id'"` + AdminEnabled *bool `puppet:"name=>'admin_enabled'"` + AdminPassword *string `puppet:"name=>'admin_password'"` + AdminUsername *string `puppet:"name=>'admin_username'"` + GeoreplicationLocations *[]string `puppet:"name=>'georeplication_locations'"` + LoginServer *string `puppet:"name=>'login_server'"` + Sku *string + StorageAccountId *string `puppet:"name=>'storage_account_id'"` + Tags *map[string]string +} + +type ContainerService struct { + Location string + Name string + OrchestrationPlatform string `puppet:"name=>'orchestration_platform'"` + ResourceGroupName string `puppet:"name=>'resource_group_name'"` + ContainerServiceId *string `puppet:"name=>'container_service_id'"` + AgentPoolProfile *PoolProfile `puppet:"name=>'agent_pool_profile'"` + DiagnosticsProfile *DiagnosticsProfile `puppet:"name=>'diagnostics_profile'"` + LinuxProfile *LinuxProfile `puppet:"name=>'linux_profile'"` + MasterProfile *MasterProfile `puppet:"name=>'master_profile'"` + ServicePrincipal *Principal `puppet:"name=>'service_principal'"` + Tags *map[string]string +} + +type CosmosdbAccount struct { + Location string + Name string + OfferType string `puppet:"name=>'offer_type'"` + ResourceGroupName string `puppet:"name=>'resource_group_name'"` + CosmosdbAccountId *string `puppet:"name=>'cosmosdb_account_id'"` + Capabilities *[]Sku + ConnectionStrings *[]string `puppet:"name=>'connection_strings'"` + ConsistencyPolicy *ConsistencyPolicy `puppet:"name=>'consistency_policy'"` + EnableAutomaticFailover *bool `puppet:"name=>'enable_automatic_failover'"` + EnableMultipleWriteLocations *bool `puppet:"name=>'enable_multiple_write_locations'"` + Endpoint *string + GeoLocation *[]GeoLocation `puppet:"name=>'geo_location'"` + IpRangeFilter *string `puppet:"name=>'ip_range_filter'"` + IsVirtualNetworkFilterEnabled *bool `puppet:"name=>'is_virtual_network_filter_enabled'"` + Kind *string + PrimaryMasterKey *string `puppet:"name=>'primary_master_key'"` + PrimaryReadonlyMasterKey *string `puppet:"name=>'primary_readonly_master_key'"` + ReadEndpoints *[]string `puppet:"name=>'read_endpoints'"` + SecondaryMasterKey *string `puppet:"name=>'secondary_master_key'"` + SecondaryReadonlyMasterKey *string `puppet:"name=>'secondary_readonly_master_key'"` + Tags *map[string]string + VirtualNetworkRule *[]NetworkRule `puppet:"name=>'virtual_network_rule'"` + WriteEndpoints *[]string `puppet:"name=>'write_endpoints'"` +} + +type DataLakeAnalyticsAccount struct { + DefaultStoreAccountName string `puppet:"name=>'default_store_account_name'"` + Location string + Name string + ResourceGroupName string `puppet:"name=>'resource_group_name'"` + DataLakeAnalyticsAccountId *string `puppet:"name=>'data_lake_analytics_account_id'"` + Tags *map[string]string + Tier *string +} + +type DataLakeAnalyticsFirewallRule struct { + AccountName string `puppet:"name=>'account_name'"` + EndIpAddress string `puppet:"name=>'end_ip_address'"` + Name string + ResourceGroupName string `puppet:"name=>'resource_group_name'"` + StartIpAddress string `puppet:"name=>'start_ip_address'"` + DataLakeAnalyticsFirewallRuleId *string `puppet:"name=>'data_lake_analytics_firewall_rule_id'"` +} + +type DataLakeStore struct { + Location string + Name string + ResourceGroupName string `puppet:"name=>'resource_group_name'"` + DataLakeStoreId *string `puppet:"name=>'data_lake_store_id'"` + EncryptionState *string `puppet:"name=>'encryption_state'"` + EncryptionType *string `puppet:"name=>'encryption_type'"` + Endpoint *string + FirewallAllowAzureIps *string `puppet:"name=>'firewall_allow_azure_ips'"` + FirewallState *string `puppet:"name=>'firewall_state'"` + Tags *map[string]string + Tier *string +} + +type DataLakeStoreFile struct { + AccountName string `puppet:"name=>'account_name'"` + LocalFilePath string `puppet:"name=>'local_file_path'"` + RemoteFilePath string `puppet:"name=>'remote_file_path'"` + DataLakeStoreFileId *string `puppet:"name=>'data_lake_store_file_id'"` +} + +type DataLakeStoreFirewallRule struct { + AccountName string `puppet:"name=>'account_name'"` + EndIpAddress string `puppet:"name=>'end_ip_address'"` + Name string + ResourceGroupName string `puppet:"name=>'resource_group_name'"` + StartIpAddress string `puppet:"name=>'start_ip_address'"` + DataLakeStoreFirewallRuleId *string `puppet:"name=>'data_lake_store_firewall_rule_id'"` +} + +type DatabricksWorkspace struct { + Location string + Name string + ResourceGroupName string `puppet:"name=>'resource_group_name'"` + Sku string + DatabricksWorkspaceId *string `puppet:"name=>'databricks_workspace_id'"` + ManagedResourceGroupId *string `puppet:"name=>'managed_resource_group_id'"` + ManagedResourceGroupName *string `puppet:"name=>'managed_resource_group_name'"` + Tags *map[string]string +} + +type DevTestLab struct { + Location string + Name string + ResourceGroupName string `puppet:"name=>'resource_group_name'"` + DevTestLabId *string `puppet:"name=>'dev_test_lab_id'"` + ArtifactsStorageAccountId *string `puppet:"name=>'artifacts_storage_account_id'"` + DefaultPremiumStorageAccountId *string `puppet:"name=>'default_premium_storage_account_id'"` + DefaultStorageAccountId *string `puppet:"name=>'default_storage_account_id'"` + KeyVaultId *string `puppet:"name=>'key_vault_id'"` + PremiumDataDiskStorageAccountId *string `puppet:"name=>'premium_data_disk_storage_account_id'"` + StorageType *string `puppet:"name=>'storage_type'"` + Tags *map[string]string + UniqueIdentifier *string `puppet:"name=>'unique_identifier'"` +} + +type DevTestLinuxVirtualMachine struct { + LabName string `puppet:"name=>'lab_name'"` + LabSubnetName string `puppet:"name=>'lab_subnet_name'"` + LabVirtualNetworkId string `puppet:"name=>'lab_virtual_network_id'"` + Location string + Name string + ResourceGroupName string `puppet:"name=>'resource_group_name'"` + Size string + StorageType string `puppet:"name=>'storage_type'"` + Username string + DevTestLinuxVirtualMachineId *string `puppet:"name=>'dev_test_linux_virtual_machine_id'"` + AllowClaim *bool `puppet:"name=>'allow_claim'"` + DisallowPublicIpAddress *bool `puppet:"name=>'disallow_public_ip_address'"` + Fqdn *string + GalleryImageReference *Reference `puppet:"name=>'gallery_image_reference'"` + InboundNatRule *[]Rule `puppet:"name=>'inbound_nat_rule'"` + Notes *string + Password *string + SshKey *string `puppet:"name=>'ssh_key'"` + Tags *map[string]string + UniqueIdentifier *string `puppet:"name=>'unique_identifier'"` +} + +type DevTestPolicy struct { + EvaluatorType string `puppet:"name=>'evaluator_type'"` + LabName string `puppet:"name=>'lab_name'"` + Name string + PolicySetName string `puppet:"name=>'policy_set_name'"` + ResourceGroupName string `puppet:"name=>'resource_group_name'"` + Threshold string + DevTestPolicyId *string `puppet:"name=>'dev_test_policy_id'"` + Description *string + FactData *string `puppet:"name=>'fact_data'"` + Tags *map[string]string +} + +type DevTestVirtualNetwork struct { + LabName string `puppet:"name=>'lab_name'"` + Name string + ResourceGroupName string `puppet:"name=>'resource_group_name'"` + DevTestVirtualNetworkId *string `puppet:"name=>'dev_test_virtual_network_id'"` + Description *string + Subnet *NetworkSubnet + Tags *map[string]string + UniqueIdentifier *string `puppet:"name=>'unique_identifier'"` +} + +type DevTestWindowsVirtualMachine struct { + LabName string `puppet:"name=>'lab_name'"` + LabSubnetName string `puppet:"name=>'lab_subnet_name'"` + LabVirtualNetworkId string `puppet:"name=>'lab_virtual_network_id'"` + Location string + Name string + Password string + ResourceGroupName string `puppet:"name=>'resource_group_name'"` + Size string + StorageType string `puppet:"name=>'storage_type'"` + Username string + DevTestWindowsVirtualMachineId *string `puppet:"name=>'dev_test_windows_virtual_machine_id'"` + AllowClaim *bool `puppet:"name=>'allow_claim'"` + DisallowPublicIpAddress *bool `puppet:"name=>'disallow_public_ip_address'"` + Fqdn *string + GalleryImageReference *Reference `puppet:"name=>'gallery_image_reference'"` + InboundNatRule *[]Rule `puppet:"name=>'inbound_nat_rule'"` + Notes *string + Tags *map[string]string + UniqueIdentifier *string `puppet:"name=>'unique_identifier'"` +} + +type DevspaceController struct { + HostSuffix string `puppet:"name=>'host_suffix'"` + Location string + Name string + ResourceGroupName string `puppet:"name=>'resource_group_name'"` + TargetContainerHostCredentialsBase64 string `puppet:"name=>'target_container_host_credentials_base64'"` + TargetContainerHostResourceId string `puppet:"name=>'target_container_host_resource_id'"` + DevspaceControllerId *string `puppet:"name=>'devspace_controller_id'"` + DataPlaneFqdn *string `puppet:"name=>'data_plane_fqdn'"` + Sku *AccountSku + Tags *map[string]string +} + +type DnsARecord struct { + Name string + Records []string + ResourceGroupName string `puppet:"name=>'resource_group_name'"` + Ttl int64 + ZoneName string `puppet:"name=>'zone_name'"` + DnsARecordId *string `puppet:"name=>'dns_a_record_id'"` + Tags *map[string]string +} + +type DnsAaaaRecord struct { + Name string + Records []string + ResourceGroupName string `puppet:"name=>'resource_group_name'"` + Ttl int64 + ZoneName string `puppet:"name=>'zone_name'"` + DnsAaaaRecordId *string `puppet:"name=>'dns_aaaa_record_id'"` + Tags *map[string]string +} + +type DnsCaaRecord struct { + Name string + Record []Record + ResourceGroupName string `puppet:"name=>'resource_group_name'"` + Ttl int64 + ZoneName string `puppet:"name=>'zone_name'"` + DnsCaaRecordId *string `puppet:"name=>'dns_caa_record_id'"` + Tags *map[string]string +} + +type DnsCnameRecord struct { + Name string + Record string + ResourceGroupName string `puppet:"name=>'resource_group_name'"` + Ttl int64 + ZoneName string `puppet:"name=>'zone_name'"` + DnsCnameRecordId *string `puppet:"name=>'dns_cname_record_id'"` + Tags *map[string]string +} + +type DnsMxRecord struct { + Name string + Record []RecordRecord + ResourceGroupName string `puppet:"name=>'resource_group_name'"` + Ttl int64 + ZoneName string `puppet:"name=>'zone_name'"` + DnsMxRecordId *string `puppet:"name=>'dns_mx_record_id'"` + Tags *map[string]string +} + +type DnsNsRecord struct { + Name string + ResourceGroupName string `puppet:"name=>'resource_group_name'"` + Ttl int64 + ZoneName string `puppet:"name=>'zone_name'"` + DnsNsRecordId *string `puppet:"name=>'dns_ns_record_id'"` + Records *[]string + Tags *map[string]string +} + +type DnsPtrRecord struct { + Name string + Records []string + ResourceGroupName string `puppet:"name=>'resource_group_name'"` + Ttl int64 + ZoneName string `puppet:"name=>'zone_name'"` + DnsPtrRecordId *string `puppet:"name=>'dns_ptr_record_id'"` + Tags *map[string]string +} + +type DnsSrvRecord struct { + Name string + Record []SrvRecordRecord + ResourceGroupName string `puppet:"name=>'resource_group_name'"` + Ttl int64 + ZoneName string `puppet:"name=>'zone_name'"` + DnsSrvRecordId *string `puppet:"name=>'dns_srv_record_id'"` + Tags *map[string]string +} + +type DnsTxtRecord struct { + Name string + Record []TxtRecordRecord + ResourceGroupName string `puppet:"name=>'resource_group_name'"` + Ttl int64 + ZoneName string `puppet:"name=>'zone_name'"` + DnsTxtRecordId *string `puppet:"name=>'dns_txt_record_id'"` + Tags *map[string]string +} + +type DnsZone struct { + Name string + ResourceGroupName string `puppet:"name=>'resource_group_name'"` + DnsZoneId *string `puppet:"name=>'dns_zone_id'"` + MaxNumberOfRecordSets *int64 `puppet:"name=>'max_number_of_record_sets'"` + NameServers *[]string `puppet:"name=>'name_servers'"` + NumberOfRecordSets *int64 `puppet:"name=>'number_of_record_sets'"` + RegistrationVirtualNetworkIds *[]string `puppet:"name=>'registration_virtual_network_ids'"` + ResolutionVirtualNetworkIds *[]string `puppet:"name=>'resolution_virtual_network_ids'"` + Tags *map[string]string + ZoneType *string `puppet:"name=>'zone_type'"` +} + +type EventgridTopic struct { + Location string + Name string + ResourceGroupName string `puppet:"name=>'resource_group_name'"` + EventgridTopicId *string `puppet:"name=>'eventgrid_topic_id'"` + Endpoint *string + PrimaryAccessKey *string `puppet:"name=>'primary_access_key'"` + SecondaryAccessKey *string `puppet:"name=>'secondary_access_key'"` + Tags *map[string]string +} + +type Eventhub struct { + MessageRetention int64 `puppet:"name=>'message_retention'"` + Name string + NamespaceName string `puppet:"name=>'namespace_name'"` + PartitionCount int64 `puppet:"name=>'partition_count'"` + ResourceGroupName string `puppet:"name=>'resource_group_name'"` + EventhubId *string `puppet:"name=>'eventhub_id'"` + CaptureDescription *Description `puppet:"name=>'capture_description'"` + PartitionIds *[]string `puppet:"name=>'partition_ids'"` +} + +type EventhubAuthorizationRule struct { + EventhubName string `puppet:"name=>'eventhub_name'"` + Name string + NamespaceName string `puppet:"name=>'namespace_name'"` + ResourceGroupName string `puppet:"name=>'resource_group_name'"` + EventhubAuthorizationRuleId *string `puppet:"name=>'eventhub_authorization_rule_id'"` + Listen *bool + Manage *bool + PrimaryConnectionString *string `puppet:"name=>'primary_connection_string'"` + PrimaryKey *string `puppet:"name=>'primary_key'"` + SecondaryConnectionString *string `puppet:"name=>'secondary_connection_string'"` + SecondaryKey *string `puppet:"name=>'secondary_key'"` + Send *bool +} + +type EventhubConsumerGroup struct { + EventhubName string `puppet:"name=>'eventhub_name'"` + Name string + NamespaceName string `puppet:"name=>'namespace_name'"` + ResourceGroupName string `puppet:"name=>'resource_group_name'"` + EventhubConsumerGroupId *string `puppet:"name=>'eventhub_consumer_group_id'"` + UserMetadata *string `puppet:"name=>'user_metadata'"` +} + +type EventhubNamespace struct { + Location string + Name string + ResourceGroupName string `puppet:"name=>'resource_group_name'"` + Sku string + EventhubNamespaceId *string `puppet:"name=>'eventhub_namespace_id'"` + AutoInflateEnabled *bool `puppet:"name=>'auto_inflate_enabled'"` + Capacity *int64 + DefaultPrimaryConnectionString *string `puppet:"name=>'default_primary_connection_string'"` + DefaultPrimaryKey *string `puppet:"name=>'default_primary_key'"` + DefaultSecondaryConnectionString *string `puppet:"name=>'default_secondary_connection_string'"` + DefaultSecondaryKey *string `puppet:"name=>'default_secondary_key'"` + KafkaEnabled *bool `puppet:"name=>'kafka_enabled'"` + MaximumThroughputUnits *int64 `puppet:"name=>'maximum_throughput_units'"` + Tags *map[string]string +} + +type EventhubNamespaceAuthorizationRule struct { + Name string + NamespaceName string `puppet:"name=>'namespace_name'"` + ResourceGroupName string `puppet:"name=>'resource_group_name'"` + EventhubNamespaceAuthorizationRuleId *string `puppet:"name=>'eventhub_namespace_authorization_rule_id'"` + Listen *bool + Manage *bool + PrimaryConnectionString *string `puppet:"name=>'primary_connection_string'"` + PrimaryKey *string `puppet:"name=>'primary_key'"` + SecondaryConnectionString *string `puppet:"name=>'secondary_connection_string'"` + SecondaryKey *string `puppet:"name=>'secondary_key'"` + Send *bool +} + +type ExpressRouteCircuit struct { + BandwidthInMbps int64 `puppet:"name=>'bandwidth_in_mbps'"` + Location string + Name string + PeeringLocation string `puppet:"name=>'peering_location'"` + ResourceGroupName string `puppet:"name=>'resource_group_name'"` + ServiceProviderName string `puppet:"name=>'service_provider_name'"` + ExpressRouteCircuitId *string `puppet:"name=>'express_route_circuit_id'"` + AllowClassicOperations *bool `puppet:"name=>'allow_classic_operations'"` + ServiceKey *string `puppet:"name=>'service_key'"` + ServiceProviderProvisioningState *string `puppet:"name=>'service_provider_provisioning_state'"` + Sku *CircuitSku + Tags *map[string]string +} + +type ExpressRouteCircuitAuthorization struct { + ExpressRouteCircuitName string `puppet:"name=>'express_route_circuit_name'"` + Name string + ResourceGroupName string `puppet:"name=>'resource_group_name'"` + ExpressRouteCircuitAuthorizationId *string `puppet:"name=>'express_route_circuit_authorization_id'"` + AuthorizationKey *string `puppet:"name=>'authorization_key'"` + AuthorizationUseStatus *string `puppet:"name=>'authorization_use_status'"` +} + +type ExpressRouteCircuitPeering struct { + ExpressRouteCircuitName string `puppet:"name=>'express_route_circuit_name'"` + PeeringType string `puppet:"name=>'peering_type'"` + PrimaryPeerAddressPrefix string `puppet:"name=>'primary_peer_address_prefix'"` + ResourceGroupName string `puppet:"name=>'resource_group_name'"` + SecondaryPeerAddressPrefix string `puppet:"name=>'secondary_peer_address_prefix'"` + VlanId int64 `puppet:"name=>'vlan_id'"` + ExpressRouteCircuitPeeringId *string `puppet:"name=>'express_route_circuit_peering_id'"` + AzureAsn *int64 `puppet:"name=>'azure_asn'"` + MicrosoftPeeringConfig *PeeringConfig `puppet:"name=>'microsoft_peering_config'"` + PeerAsn *int64 `puppet:"name=>'peer_asn'"` + PrimaryAzurePort *string `puppet:"name=>'primary_azure_port'"` + SecondaryAzurePort *string `puppet:"name=>'secondary_azure_port'"` + SharedKey *string `puppet:"name=>'shared_key'"` +} + +type Firewall struct { + Location string + Name string + ResourceGroupName string `puppet:"name=>'resource_group_name'"` + FirewallId *string `puppet:"name=>'firewall_id'"` + IpConfiguration *FirewallIpConfiguration `puppet:"name=>'ip_configuration'"` + Tags *map[string]string +} + +type FirewallApplicationRuleCollection struct { + Action string + AzureFirewallName string `puppet:"name=>'azure_firewall_name'"` + Name string + Priority int64 + ResourceGroupName string `puppet:"name=>'resource_group_name'"` + Rule []CollectionRule + FirewallApplicationRuleCollectionId *string `puppet:"name=>'firewall_application_rule_collection_id'"` +} + +type FirewallNetworkRuleCollection struct { + Action string + AzureFirewallName string `puppet:"name=>'azure_firewall_name'"` + Name string + Priority int64 + ResourceGroupName string `puppet:"name=>'resource_group_name'"` + Rule []RuleCollectionRule + FirewallNetworkRuleCollectionId *string `puppet:"name=>'firewall_network_rule_collection_id'"` +} + +type FunctionApp struct { + AppServicePlanId string `puppet:"name=>'app_service_plan_id'"` + Location string + Name string + ResourceGroupName string `puppet:"name=>'resource_group_name'"` + StorageConnectionString string `puppet:"name=>'storage_connection_string'"` + FunctionAppId *string `puppet:"name=>'function_app_id'"` + AppSettings *map[string]string `puppet:"name=>'app_settings'"` + ClientAffinityEnabled *bool `puppet:"name=>'client_affinity_enabled'"` + ConnectionString *[]String `puppet:"name=>'connection_string'"` + DefaultHostname *string `puppet:"name=>'default_hostname'"` + EnableBuiltinLogging *bool `puppet:"name=>'enable_builtin_logging'"` + Enabled *bool + HttpsOnly *bool `puppet:"name=>'https_only'"` + Identity *Identity + OutboundIpAddresses *string `puppet:"name=>'outbound_ip_addresses'"` + SiteConfig *SiteConfig `puppet:"name=>'site_config'"` + SiteCredential *Credential `puppet:"name=>'site_credential'"` + Tags *map[string]string + Version *string +} + +type GenericHandler struct { +} + +type Image struct { + Location string + Name string + ResourceGroupName string `puppet:"name=>'resource_group_name'"` + ImageId *string `puppet:"name=>'image_id'"` + DataDisk *[]Disk `puppet:"name=>'data_disk'"` + OsDisk *OsDisk `puppet:"name=>'os_disk'"` + SourceVirtualMachineId *string `puppet:"name=>'source_virtual_machine_id'"` + Tags *map[string]string +} + +type Iothub struct { + Location string + Name string + ResourceGroupName string `puppet:"name=>'resource_group_name'"` + IothubId *string `puppet:"name=>'iothub_id'"` + Endpoint *[]Endpoint + EventHubEventsEndpoint *string `puppet:"name=>'event_hub_events_endpoint'"` + EventHubEventsPath *string `puppet:"name=>'event_hub_events_path'"` + EventHubOperationsEndpoint *string `puppet:"name=>'event_hub_operations_endpoint'"` + EventHubOperationsPath *string `puppet:"name=>'event_hub_operations_path'"` + Hostname *string + Route *[]IothubRoute + SharedAccessPolicy *[]AccessPolicy `puppet:"name=>'shared_access_policy'"` + Sku *IothubSku + Tags *map[string]string + Type *string +} + +type IothubConsumerGroup struct { + EventhubEndpointName string `puppet:"name=>'eventhub_endpoint_name'"` + IothubName string `puppet:"name=>'iothub_name'"` + Name string + ResourceGroupName string `puppet:"name=>'resource_group_name'"` + IothubConsumerGroupId *string `puppet:"name=>'iothub_consumer_group_id'"` +} + +type KeyVault struct { + Location string + Name string + ResourceGroupName string `puppet:"name=>'resource_group_name'"` + Sku []Sku + TenantId string `puppet:"name=>'tenant_id'"` + KeyVaultId *string `puppet:"name=>'key_vault_id'"` + AccessPolicy *[]VaultAccessPolicy `puppet:"name=>'access_policy'"` + EnabledForDeployment *bool `puppet:"name=>'enabled_for_deployment'"` + EnabledForDiskEncryption *bool `puppet:"name=>'enabled_for_disk_encryption'"` + EnabledForTemplateDeployment *bool `puppet:"name=>'enabled_for_template_deployment'"` + NetworkAcls *Acls `puppet:"name=>'network_acls'"` + Tags *map[string]string + VaultUri *string `puppet:"name=>'vault_uri'"` +} + +type KeyVaultAccessPolicy struct { + ObjectId string `puppet:"name=>'object_id'"` + ResourceGroupName string `puppet:"name=>'resource_group_name'"` + TenantId string `puppet:"name=>'tenant_id'"` + VaultName string `puppet:"name=>'vault_name'"` + KeyVaultAccessPolicyId *string `puppet:"name=>'key_vault_access_policy_id'"` + ApplicationId *string `puppet:"name=>'application_id'"` + CertificatePermissions *[]string `puppet:"name=>'certificate_permissions'"` + KeyPermissions *[]string `puppet:"name=>'key_permissions'"` + SecretPermissions *[]string `puppet:"name=>'secret_permissions'"` +} + +type KeyVaultCertificate struct { + Name string + VaultUri string `puppet:"name=>'vault_uri'"` + KeyVaultCertificateId *string `puppet:"name=>'key_vault_certificate_id'"` + Certificate *CertificateCertificate + CertificateData *string `puppet:"name=>'certificate_data'"` + CertificatePolicy *CertificatePolicy `puppet:"name=>'certificate_policy'"` + SecretId *string `puppet:"name=>'secret_id'"` + Tags *map[string]string + Thumbprint *string + Version *string +} + +type KeyVaultKey struct { + KeyOpts []string `puppet:"name=>'key_opts'"` + KeySize int64 `puppet:"name=>'key_size'"` + KeyType string `puppet:"name=>'key_type'"` + Name string + VaultUri string `puppet:"name=>'vault_uri'"` + KeyVaultKeyId *string `puppet:"name=>'key_vault_key_id'"` + E *string + N *string + Tags *map[string]string + Version *string +} + +type KeyVaultSecret struct { + Name string + Value string + VaultUri string `puppet:"name=>'vault_uri'"` + KeyVaultSecretId *string `puppet:"name=>'key_vault_secret_id'"` + ContentType *string `puppet:"name=>'content_type'"` + Tags *map[string]string + Version *string +} + +type KubernetesCluster struct { + DnsPrefix string `puppet:"name=>'dns_prefix'"` + Location string + Name string + ResourceGroupName string `puppet:"name=>'resource_group_name'"` + KubernetesClusterId *string `puppet:"name=>'kubernetes_cluster_id'"` + AddonProfile *AddonProfile `puppet:"name=>'addon_profile'"` + AgentPoolProfile *AgentPoolProfile `puppet:"name=>'agent_pool_profile'"` + Fqdn *string + KubeAdminConfig *KubeConfig `puppet:"name=>'kube_admin_config'"` + KubeAdminConfigRaw *string `puppet:"name=>'kube_admin_config_raw'"` + KubeConfig *KubeConfig `puppet:"name=>'kube_config'"` + KubeConfigRaw *string `puppet:"name=>'kube_config_raw'"` + KubernetesVersion *string `puppet:"name=>'kubernetes_version'"` + LinuxProfile *ClusterLinuxProfile `puppet:"name=>'linux_profile'"` + NetworkProfile *NetworkProfile `puppet:"name=>'network_profile'"` + NodeResourceGroup *string `puppet:"name=>'node_resource_group'"` + RoleBasedAccessControl *AccessControl `puppet:"name=>'role_based_access_control'"` + ServicePrincipal *Principal `puppet:"name=>'service_principal'"` + Tags *map[string]string +} + +type Lb struct { + Location string + Name string + ResourceGroupName string `puppet:"name=>'resource_group_name'"` + LbId *string `puppet:"name=>'lb_id'"` + FrontendIpConfiguration *[]FrontendIpConfiguration `puppet:"name=>'frontend_ip_configuration'"` + PrivateIpAddress *string `puppet:"name=>'private_ip_address'"` + PrivateIpAddresses *[]string `puppet:"name=>'private_ip_addresses'"` + Sku *string + Tags *map[string]string +} + +type LbBackendAddressPool struct { + LoadbalancerId string `puppet:"name=>'loadbalancer_id'"` + Name string + ResourceGroupName string `puppet:"name=>'resource_group_name'"` + LbBackendAddressPoolId *string `puppet:"name=>'lb_backend_address_pool_id'"` + BackendIpConfigurations *[]string `puppet:"name=>'backend_ip_configurations'"` + LoadBalancingRules *[]string `puppet:"name=>'load_balancing_rules'"` +} + +type LbNatPool struct { + BackendPort int64 `puppet:"name=>'backend_port'"` + FrontendIpConfigurationName string `puppet:"name=>'frontend_ip_configuration_name'"` + FrontendPortEnd int64 `puppet:"name=>'frontend_port_end'"` + FrontendPortStart int64 `puppet:"name=>'frontend_port_start'"` + LoadbalancerId string `puppet:"name=>'loadbalancer_id'"` + Name string + Protocol string + ResourceGroupName string `puppet:"name=>'resource_group_name'"` + LbNatPoolId *string `puppet:"name=>'lb_nat_pool_id'"` + FrontendIpConfigurationId *string `puppet:"name=>'frontend_ip_configuration_id'"` +} + +type LbNatRule struct { + BackendPort int64 `puppet:"name=>'backend_port'"` + FrontendIpConfigurationName string `puppet:"name=>'frontend_ip_configuration_name'"` + FrontendPort int64 `puppet:"name=>'frontend_port'"` + LoadbalancerId string `puppet:"name=>'loadbalancer_id'"` + Name string + Protocol string + ResourceGroupName string `puppet:"name=>'resource_group_name'"` + LbNatRuleId *string `puppet:"name=>'lb_nat_rule_id'"` + BackendIpConfigurationId *string `puppet:"name=>'backend_ip_configuration_id'"` + EnableFloatingIp *bool `puppet:"name=>'enable_floating_ip'"` + FrontendIpConfigurationId *string `puppet:"name=>'frontend_ip_configuration_id'"` +} + +type LbProbe struct { + LoadbalancerId string `puppet:"name=>'loadbalancer_id'"` + Name string + Port int64 + ResourceGroupName string `puppet:"name=>'resource_group_name'"` + LbProbeId *string `puppet:"name=>'lb_probe_id'"` + IntervalInSeconds *int64 `puppet:"name=>'interval_in_seconds'"` + LoadBalancerRules *[]string `puppet:"name=>'load_balancer_rules'"` + NumberOfProbes *int64 `puppet:"name=>'number_of_probes'"` + Protocol *string + RequestPath *string `puppet:"name=>'request_path'"` +} + +type LbRule struct { + BackendPort int64 `puppet:"name=>'backend_port'"` + FrontendIpConfigurationName string `puppet:"name=>'frontend_ip_configuration_name'"` + FrontendPort int64 `puppet:"name=>'frontend_port'"` + LoadbalancerId string `puppet:"name=>'loadbalancer_id'"` + Name string + Protocol string + ResourceGroupName string `puppet:"name=>'resource_group_name'"` + LbRuleId *string `puppet:"name=>'lb_rule_id'"` + BackendAddressPoolId *string `puppet:"name=>'backend_address_pool_id'"` + EnableFloatingIp *bool `puppet:"name=>'enable_floating_ip'"` + FrontendIpConfigurationId *string `puppet:"name=>'frontend_ip_configuration_id'"` + IdleTimeoutInMinutes *int64 `puppet:"name=>'idle_timeout_in_minutes'"` + LoadDistribution *string `puppet:"name=>'load_distribution'"` + ProbeId *string `puppet:"name=>'probe_id'"` +} + +type LocalNetworkGateway struct { + AddressSpace []string `puppet:"name=>'address_space'"` + GatewayAddress string `puppet:"name=>'gateway_address'"` + Location string + Name string + ResourceGroupName string `puppet:"name=>'resource_group_name'"` + LocalNetworkGatewayId *string `puppet:"name=>'local_network_gateway_id'"` + BgpSettings *BgpSettings `puppet:"name=>'bgp_settings'"` + Tags *map[string]string +} + +type LogAnalyticsSolution struct { + Location string + ResourceGroupName string `puppet:"name=>'resource_group_name'"` + SolutionName string `puppet:"name=>'solution_name'"` + WorkspaceName string `puppet:"name=>'workspace_name'"` + WorkspaceResourceId string `puppet:"name=>'workspace_resource_id'"` + LogAnalyticsSolutionId *string `puppet:"name=>'log_analytics_solution_id'"` + Plan *SolutionPlan +} + +type LogAnalyticsWorkspace struct { + Location string + Name string + ResourceGroupName string `puppet:"name=>'resource_group_name'"` + Sku string + LogAnalyticsWorkspaceId *string `puppet:"name=>'log_analytics_workspace_id'"` + PortalUrl *string `puppet:"name=>'portal_url'"` + PrimarySharedKey *string `puppet:"name=>'primary_shared_key'"` + RetentionInDays *int64 `puppet:"name=>'retention_in_days'"` + SecondarySharedKey *string `puppet:"name=>'secondary_shared_key'"` + Tags *map[string]string + WorkspaceId *string `puppet:"name=>'workspace_id'"` +} + +type LogAnalyticsWorkspaceLinkedService struct { + LinkedServiceProperties map[string]ServiceProperties `puppet:"name=>'linked_service_properties'"` + ResourceGroupName string `puppet:"name=>'resource_group_name'"` + WorkspaceName string `puppet:"name=>'workspace_name'"` + LogAnalyticsWorkspaceLinkedServiceId *string `puppet:"name=>'log_analytics_workspace_linked_service_id'"` + LinkedServiceName *string `puppet:"name=>'linked_service_name'"` + Name *string + Tags *map[string]string +} + +type LogicAppActionCustom struct { + Body string + LogicAppId string `puppet:"name=>'logic_app_id'"` + Name string + LogicAppActionCustomId *string `puppet:"name=>'logic_app_action_custom_id'"` +} + +type LogicAppActionHttp struct { + LogicAppId string `puppet:"name=>'logic_app_id'"` + Method string + Name string + Uri string + LogicAppActionHttpId *string `puppet:"name=>'logic_app_action_http_id'"` + Body *string + Headers *map[string]string +} + +type LogicAppTriggerCustom struct { + Body string + LogicAppId string `puppet:"name=>'logic_app_id'"` + Name string + LogicAppTriggerCustomId *string `puppet:"name=>'logic_app_trigger_custom_id'"` +} + +type LogicAppTriggerHttpRequest struct { + LogicAppId string `puppet:"name=>'logic_app_id'"` + Name string + Schema string + LogicAppTriggerHttpRequestId *string `puppet:"name=>'logic_app_trigger_http_request_id'"` + Method *string + RelativePath *string `puppet:"name=>'relative_path'"` +} + +type LogicAppTriggerRecurrence struct { + Frequency string + Interval int64 + LogicAppId string `puppet:"name=>'logic_app_id'"` + Name string + LogicAppTriggerRecurrenceId *string `puppet:"name=>'logic_app_trigger_recurrence_id'"` +} + +type LogicAppWorkflow struct { + Location string + Name string + ResourceGroupName string `puppet:"name=>'resource_group_name'"` + LogicAppWorkflowId *string `puppet:"name=>'logic_app_workflow_id'"` + AccessEndpoint *string `puppet:"name=>'access_endpoint'"` + Parameters *map[string]string + Tags *map[string]string + WorkflowSchema *string `puppet:"name=>'workflow_schema'"` + WorkflowVersion *string `puppet:"name=>'workflow_version'"` +} + +type ManagedDisk struct { + CreateOption string `puppet:"name=>'create_option'"` + Location string + Name string + ResourceGroupName string `puppet:"name=>'resource_group_name'"` + StorageAccountType string `puppet:"name=>'storage_account_type'"` + ManagedDiskId *string `puppet:"name=>'managed_disk_id'"` + DiskSizeGb *int64 `puppet:"name=>'disk_size_gb'"` + EncryptionSettings *Settings `puppet:"name=>'encryption_settings'"` + ImageReferenceId *string `puppet:"name=>'image_reference_id'"` + OsType *string `puppet:"name=>'os_type'"` + SourceResourceId *string `puppet:"name=>'source_resource_id'"` + SourceUri *string `puppet:"name=>'source_uri'"` + Tags *map[string]string + Zones *[]string +} + +type ManagementGroup struct { + ManagementGroupId *string `puppet:"name=>'management_group_id'"` + DisplayName *string `puppet:"name=>'display_name'"` + GroupId *string `puppet:"name=>'group_id'"` + ParentManagementGroupId *string `puppet:"name=>'parent_management_group_id'"` + SubscriptionIds *[]string `puppet:"name=>'subscription_ids'"` +} + +type ManagementLock struct { + LockLevel string `puppet:"name=>'lock_level'"` + Name string + Scope string + ManagementLockId *string `puppet:"name=>'management_lock_id'"` + Notes *string +} + +type MariadbDatabase struct { + Charset string + Collation string + Name string + ResourceGroupName string `puppet:"name=>'resource_group_name'"` + ServerName string `puppet:"name=>'server_name'"` + MariadbDatabaseId *string `puppet:"name=>'mariadb_database_id'"` +} + +type MariadbServer struct { + AdministratorLogin string `puppet:"name=>'administrator_login'"` + AdministratorLoginPassword string `puppet:"name=>'administrator_login_password'"` + Location string + Name string + ResourceGroupName string `puppet:"name=>'resource_group_name'"` + SslEnforcement string `puppet:"name=>'ssl_enforcement'"` + Version string + MariadbServerId *string `puppet:"name=>'mariadb_server_id'"` + Fqdn *string + Sku *ServerSku + StorageProfile *Profile `puppet:"name=>'storage_profile'"` + Tags *map[string]string +} + +type MetricAlertrule struct { + Aggregation string + Location string + MetricName string `puppet:"name=>'metric_name'"` + Name string + Operator string + Period string + ResourceGroupName string `puppet:"name=>'resource_group_name'"` + ResourceId string `puppet:"name=>'resource_id'"` + Threshold float64 + MetricAlertruleId *string `puppet:"name=>'metric_alertrule_id'"` + Description *string + EmailAction *EmailAction `puppet:"name=>'email_action'"` + Enabled *bool + Tags *map[string]string + WebhookAction *WebhookAction `puppet:"name=>'webhook_action'"` +} + +type MonitorActionGroup struct { + Name string + ResourceGroupName string `puppet:"name=>'resource_group_name'"` + ShortName string `puppet:"name=>'short_name'"` + MonitorActionGroupId *string `puppet:"name=>'monitor_action_group_id'"` + EmailReceiver *[]Receiver `puppet:"name=>'email_receiver'"` + Enabled *bool + SmsReceiver *[]SmsReceiver `puppet:"name=>'sms_receiver'"` + Tags *map[string]string + WebhookReceiver *[]WebhookReceiver `puppet:"name=>'webhook_receiver'"` +} + +type MonitorActivityLogAlert struct { + Name string + ResourceGroupName string `puppet:"name=>'resource_group_name'"` + Scopes []string + MonitorActivityLogAlertId *string `puppet:"name=>'monitor_activity_log_alert_id'"` + Action *[]Action + Criteria *Criteria + Description *string + Enabled *bool + Tags *map[string]string +} + +type MonitorDiagnosticSetting struct { + Name string + TargetResourceId string `puppet:"name=>'target_resource_id'"` + MonitorDiagnosticSettingId *string `puppet:"name=>'monitor_diagnostic_setting_id'"` + EventhubAuthorizationRuleId *string `puppet:"name=>'eventhub_authorization_rule_id'"` + EventhubName *string `puppet:"name=>'eventhub_name'"` + Log *[]LogMetric + LogAnalyticsWorkspaceId *string `puppet:"name=>'log_analytics_workspace_id'"` + Metric *[]LogMetric + StorageAccountId *string `puppet:"name=>'storage_account_id'"` +} + +type MonitorLogProfile struct { + Categories []string + Locations []string + Name string + MonitorLogProfileId *string `puppet:"name=>'monitor_log_profile_id'"` + RetentionPolicy *RetentionPolicy `puppet:"name=>'retention_policy'"` + ServicebusRuleId *string `puppet:"name=>'servicebus_rule_id'"` + StorageAccountId *string `puppet:"name=>'storage_account_id'"` +} + +type MonitorMetricAlert struct { + Criteria []AlertCriteria + Name string + ResourceGroupName string `puppet:"name=>'resource_group_name'"` + Scopes []string + MonitorMetricAlertId *string `puppet:"name=>'monitor_metric_alert_id'"` + Action *[]Action + AutoMitigate *bool `puppet:"name=>'auto_mitigate'"` + Description *string + Enabled *bool + Frequency *string + Severity *int64 + Tags *map[string]string + WindowSize *string `puppet:"name=>'window_size'"` +} + +type MssqlElasticpool struct { + Location string + Name string + ResourceGroupName string `puppet:"name=>'resource_group_name'"` + ServerName string `puppet:"name=>'server_name'"` + MssqlElasticpoolId *string `puppet:"name=>'mssql_elasticpool_id'"` + MaxSizeBytes *int64 `puppet:"name=>'max_size_bytes'"` + PerDatabaseSettings *DatabaseSettings `puppet:"name=>'per_database_settings'"` + Sku *ElasticpoolSku + Tags *map[string]string + ZoneRedundant *bool `puppet:"name=>'zone_redundant'"` +} + +type MysqlConfiguration struct { + Name string + ResourceGroupName string `puppet:"name=>'resource_group_name'"` + ServerName string `puppet:"name=>'server_name'"` + Value string + MysqlConfigurationId *string `puppet:"name=>'mysql_configuration_id'"` +} + +type MysqlDatabase struct { + Charset string + Collation string + Name string + ResourceGroupName string `puppet:"name=>'resource_group_name'"` + ServerName string `puppet:"name=>'server_name'"` + MysqlDatabaseId *string `puppet:"name=>'mysql_database_id'"` +} + +type MysqlFirewallRule struct { + EndIpAddress string `puppet:"name=>'end_ip_address'"` + Name string + ResourceGroupName string `puppet:"name=>'resource_group_name'"` + ServerName string `puppet:"name=>'server_name'"` + StartIpAddress string `puppet:"name=>'start_ip_address'"` + MysqlFirewallRuleId *string `puppet:"name=>'mysql_firewall_rule_id'"` +} + +type MysqlServer struct { + AdministratorLogin string `puppet:"name=>'administrator_login'"` + AdministratorLoginPassword string `puppet:"name=>'administrator_login_password'"` + Location string + Name string + ResourceGroupName string `puppet:"name=>'resource_group_name'"` + SslEnforcement string `puppet:"name=>'ssl_enforcement'"` + Version string + MysqlServerId *string `puppet:"name=>'mysql_server_id'"` + Fqdn *string + Sku *ServerSku + StorageProfile *Profile `puppet:"name=>'storage_profile'"` + Tags *map[string]string +} + +type MysqlVirtualNetworkRule struct { + Name string + ResourceGroupName string `puppet:"name=>'resource_group_name'"` + ServerName string `puppet:"name=>'server_name'"` + SubnetId string `puppet:"name=>'subnet_id'"` + MysqlVirtualNetworkRuleId *string `puppet:"name=>'mysql_virtual_network_rule_id'"` +} + +type NetworkInterface struct { + IpConfiguration []InterfaceIpConfiguration `puppet:"name=>'ip_configuration'"` + Location string + Name string + ResourceGroupName string `puppet:"name=>'resource_group_name'"` + NetworkInterfaceId *string `puppet:"name=>'network_interface_id'"` + AppliedDnsServers *[]string `puppet:"name=>'applied_dns_servers'"` + DnsServers *[]string `puppet:"name=>'dns_servers'"` + EnableAcceleratedNetworking *bool `puppet:"name=>'enable_accelerated_networking'"` + EnableIpForwarding *bool `puppet:"name=>'enable_ip_forwarding'"` + InternalDnsNameLabel *string `puppet:"name=>'internal_dns_name_label'"` + MacAddress *string `puppet:"name=>'mac_address'"` + NetworkSecurityGroupId *string `puppet:"name=>'network_security_group_id'"` + PrivateIpAddress *string `puppet:"name=>'private_ip_address'"` + PrivateIpAddresses *[]string `puppet:"name=>'private_ip_addresses'"` + Tags *map[string]string + VirtualMachineId *string `puppet:"name=>'virtual_machine_id'"` +} + +type NetworkInterfaceApplicationGatewayBackendAddressPoolAssociation struct { + BackendAddressPoolId string `puppet:"name=>'backend_address_pool_id'"` + IpConfigurationName string `puppet:"name=>'ip_configuration_name'"` + NetworkInterfaceId string `puppet:"name=>'network_interface_id'"` + NetworkInterfaceApplicationGatewayBackendAddressPoolAssociationId *string `puppet:"name=>'network_interface_application_gateway_backend_address_pool_association_id'"` +} + +type NetworkInterfaceBackendAddressPoolAssociation struct { + BackendAddressPoolId string `puppet:"name=>'backend_address_pool_id'"` + IpConfigurationName string `puppet:"name=>'ip_configuration_name'"` + NetworkInterfaceId string `puppet:"name=>'network_interface_id'"` + NetworkInterfaceBackendAddressPoolAssociationId *string `puppet:"name=>'network_interface_backend_address_pool_association_id'"` +} + +type NetworkInterfaceNatRuleAssociation struct { + IpConfigurationName string `puppet:"name=>'ip_configuration_name'"` + NatRuleId string `puppet:"name=>'nat_rule_id'"` + NetworkInterfaceId string `puppet:"name=>'network_interface_id'"` + NetworkInterfaceNatRuleAssociationId *string `puppet:"name=>'network_interface_nat_rule_association_id'"` +} + +type NetworkSecurityGroup struct { + Location string + Name string + ResourceGroupName string `puppet:"name=>'resource_group_name'"` + NetworkSecurityGroupId *string `puppet:"name=>'network_security_group_id'"` + SecurityRule *[]SecurityRule `puppet:"name=>'security_rule'"` + Tags *map[string]string +} + +type NetworkSecurityRule struct { + Access string + Direction string + Name string + NetworkSecurityGroupName string `puppet:"name=>'network_security_group_name'"` + Priority int64 + Protocol string + ResourceGroupName string `puppet:"name=>'resource_group_name'"` + NetworkSecurityRuleId *string `puppet:"name=>'network_security_rule_id'"` + Description *string + DestinationAddressPrefix *string `puppet:"name=>'destination_address_prefix'"` + DestinationAddressPrefixes *[]string `puppet:"name=>'destination_address_prefixes'"` + DestinationApplicationSecurityGroupIds *[]string `puppet:"name=>'destination_application_security_group_ids'"` + DestinationPortRange *string `puppet:"name=>'destination_port_range'"` + DestinationPortRanges *[]string `puppet:"name=>'destination_port_ranges'"` + SourceAddressPrefix *string `puppet:"name=>'source_address_prefix'"` + SourceAddressPrefixes *[]string `puppet:"name=>'source_address_prefixes'"` + SourceApplicationSecurityGroupIds *[]string `puppet:"name=>'source_application_security_group_ids'"` + SourcePortRange *string `puppet:"name=>'source_port_range'"` + SourcePortRanges *[]string `puppet:"name=>'source_port_ranges'"` +} + +type NetworkWatcher struct { + Location string + Name string + ResourceGroupName string `puppet:"name=>'resource_group_name'"` + NetworkWatcherId *string `puppet:"name=>'network_watcher_id'"` + Tags *map[string]string +} + +type NotificationHub struct { + Location string + Name string + NamespaceName string `puppet:"name=>'namespace_name'"` + ResourceGroupName string `puppet:"name=>'resource_group_name'"` + NotificationHubId *string `puppet:"name=>'notification_hub_id'"` + ApnsCredential *ApnsCredential `puppet:"name=>'apns_credential'"` + GcmCredential *GcmCredential `puppet:"name=>'gcm_credential'"` +} + +type NotificationHubAuthorizationRule struct { + Name string + NamespaceName string `puppet:"name=>'namespace_name'"` + NotificationHubName string `puppet:"name=>'notification_hub_name'"` + ResourceGroupName string `puppet:"name=>'resource_group_name'"` + NotificationHubAuthorizationRuleId *string `puppet:"name=>'notification_hub_authorization_rule_id'"` + Listen *bool + Manage *bool + PrimaryAccessKey *string `puppet:"name=>'primary_access_key'"` + SecondaryAccessKey *string `puppet:"name=>'secondary_access_key'"` + Send *bool +} + +type NotificationHubNamespace struct { + Location string + Name string + NamespaceType string `puppet:"name=>'namespace_type'"` + ResourceGroupName string `puppet:"name=>'resource_group_name'"` + NotificationHubNamespaceId *string `puppet:"name=>'notification_hub_namespace_id'"` + Enabled *bool + ServicebusEndpoint *string `puppet:"name=>'servicebus_endpoint'"` + Sku *Sku +} + +type PacketCapture struct { + Name string + NetworkWatcherName string `puppet:"name=>'network_watcher_name'"` + ResourceGroupName string `puppet:"name=>'resource_group_name'"` + TargetResourceId string `puppet:"name=>'target_resource_id'"` + PacketCaptureId *string `puppet:"name=>'packet_capture_id'"` + Filter *[]CaptureFilter + MaximumBytesPerPacket *int64 `puppet:"name=>'maximum_bytes_per_packet'"` + MaximumBytesPerSession *int64 `puppet:"name=>'maximum_bytes_per_session'"` + MaximumCaptureDuration *int64 `puppet:"name=>'maximum_capture_duration'"` + StorageLocation *StorageLocation `puppet:"name=>'storage_location'"` +} + +type PolicyAssignment struct { + Name string + PolicyDefinitionId string `puppet:"name=>'policy_definition_id'"` + Scope string + PolicyAssignmentId *string `puppet:"name=>'policy_assignment_id'"` + Description *string + DisplayName *string `puppet:"name=>'display_name'"` + Identity *AssignmentIdentity + Location *string + NotScopes *[]string `puppet:"name=>'not_scopes'"` + Parameters *string +} + +type PolicyDefinition struct { + DisplayName string `puppet:"name=>'display_name'"` + Mode string + Name string + PolicyType string `puppet:"name=>'policy_type'"` + PolicyDefinitionId *string `puppet:"name=>'policy_definition_id'"` + Description *string + ManagementGroupId *string `puppet:"name=>'management_group_id'"` + Metadata *string + Parameters *string + PolicyRule *string `puppet:"name=>'policy_rule'"` +} + +type PolicySetDefinition struct { + DisplayName string `puppet:"name=>'display_name'"` + Name string + PolicyType string `puppet:"name=>'policy_type'"` + PolicySetDefinitionId *string `puppet:"name=>'policy_set_definition_id'"` + Description *string + ManagementGroupId *string `puppet:"name=>'management_group_id'"` + Metadata *string + Parameters *string + PolicyDefinitions *string `puppet:"name=>'policy_definitions'"` +} + +type PostgresqlConfiguration struct { + Name string + ResourceGroupName string `puppet:"name=>'resource_group_name'"` + ServerName string `puppet:"name=>'server_name'"` + Value string + PostgresqlConfigurationId *string `puppet:"name=>'postgresql_configuration_id'"` +} + +type PostgresqlDatabase struct { + Charset string + Collation string + Name string + ResourceGroupName string `puppet:"name=>'resource_group_name'"` + ServerName string `puppet:"name=>'server_name'"` + PostgresqlDatabaseId *string `puppet:"name=>'postgresql_database_id'"` +} + +type PostgresqlFirewallRule struct { + EndIpAddress string `puppet:"name=>'end_ip_address'"` + Name string + ResourceGroupName string `puppet:"name=>'resource_group_name'"` + ServerName string `puppet:"name=>'server_name'"` + StartIpAddress string `puppet:"name=>'start_ip_address'"` + PostgresqlFirewallRuleId *string `puppet:"name=>'postgresql_firewall_rule_id'"` +} + +type PostgresqlServer struct { + AdministratorLogin string `puppet:"name=>'administrator_login'"` + AdministratorLoginPassword string `puppet:"name=>'administrator_login_password'"` + Location string + Name string + ResourceGroupName string `puppet:"name=>'resource_group_name'"` + SslEnforcement string `puppet:"name=>'ssl_enforcement'"` + Version string + PostgresqlServerId *string `puppet:"name=>'postgresql_server_id'"` + Fqdn *string + Sku *ServerSku + StorageProfile *Profile `puppet:"name=>'storage_profile'"` + Tags *map[string]string +} + +type PostgresqlVirtualNetworkRule struct { + Name string + ResourceGroupName string `puppet:"name=>'resource_group_name'"` + ServerName string `puppet:"name=>'server_name'"` + SubnetId string `puppet:"name=>'subnet_id'"` + PostgresqlVirtualNetworkRuleId *string `puppet:"name=>'postgresql_virtual_network_rule_id'"` + IgnoreMissingVnetServiceEndpoint *bool `puppet:"name=>'ignore_missing_vnet_service_endpoint'"` +} + +type PublicIp struct { + Location string + Name string + ResourceGroupName string `puppet:"name=>'resource_group_name'"` + PublicIpId *string `puppet:"name=>'public_ip_id'"` + AllocationMethod *string `puppet:"name=>'allocation_method'"` + DomainNameLabel *string `puppet:"name=>'domain_name_label'"` + Fqdn *string + IdleTimeoutInMinutes *int64 `puppet:"name=>'idle_timeout_in_minutes'"` + IpAddress *string `puppet:"name=>'ip_address'"` + IpVersion *string `puppet:"name=>'ip_version'"` + ReverseFqdn *string `puppet:"name=>'reverse_fqdn'"` + Sku *string + Tags *map[string]string + Zones *[]string +} + +type RecoveryServicesProtectedVm struct { + BackupPolicyId string `puppet:"name=>'backup_policy_id'"` + RecoveryVaultName string `puppet:"name=>'recovery_vault_name'"` + ResourceGroupName string `puppet:"name=>'resource_group_name'"` + SourceVmId string `puppet:"name=>'source_vm_id'"` + RecoveryServicesProtectedVmId *string `puppet:"name=>'recovery_services_protected_vm_id'"` + Tags *map[string]string +} + +type RecoveryServicesProtectionPolicyVm struct { + Name string + RecoveryVaultName string `puppet:"name=>'recovery_vault_name'"` + ResourceGroupName string `puppet:"name=>'resource_group_name'"` + RecoveryServicesProtectionPolicyVmId *string `puppet:"name=>'recovery_services_protection_policy_vm_id'"` + Backup *Backup + RetentionDaily *Daily `puppet:"name=>'retention_daily'"` + RetentionMonthly *Monthly `puppet:"name=>'retention_monthly'"` + RetentionWeekly *Weekly `puppet:"name=>'retention_weekly'"` + RetentionYearly *Yearly `puppet:"name=>'retention_yearly'"` + Tags *map[string]string + Timezone *string +} + +type RecoveryServicesVault struct { + Location string + Name string + ResourceGroupName string `puppet:"name=>'resource_group_name'"` + Sku string + RecoveryServicesVaultId *string `puppet:"name=>'recovery_services_vault_id'"` + Tags *map[string]string +} + +type RedisCache struct { + Capacity int64 + Family string + Location string + Name string + ResourceGroupName string `puppet:"name=>'resource_group_name'"` + SkuName string `puppet:"name=>'sku_name'"` + RedisCacheId *string `puppet:"name=>'redis_cache_id'"` + EnableNonSslPort *bool `puppet:"name=>'enable_non_ssl_port'"` + Hostname *string + PatchSchedule *[]Schedule `puppet:"name=>'patch_schedule'"` + Port *int64 + PrimaryAccessKey *string `puppet:"name=>'primary_access_key'"` + PrivateStaticIpAddress *string `puppet:"name=>'private_static_ip_address'"` + RedisConfiguration *RedisConfiguration `puppet:"name=>'redis_configuration'"` + SecondaryAccessKey *string `puppet:"name=>'secondary_access_key'"` + ShardCount *int64 `puppet:"name=>'shard_count'"` + SslPort *int64 `puppet:"name=>'ssl_port'"` + SubnetId *string `puppet:"name=>'subnet_id'"` + Tags *map[string]string + Zones *[]string +} + +type RedisFirewallRule struct { + EndIp string `puppet:"name=>'end_ip'"` + Name string + RedisCacheName string `puppet:"name=>'redis_cache_name'"` + ResourceGroupName string `puppet:"name=>'resource_group_name'"` + StartIp string `puppet:"name=>'start_ip'"` + RedisFirewallRuleId *string `puppet:"name=>'redis_firewall_rule_id'"` +} + +type RelayNamespace struct { + Location string + Name string + ResourceGroupName string `puppet:"name=>'resource_group_name'"` + RelayNamespaceId *string `puppet:"name=>'relay_namespace_id'"` + MetricId *string `puppet:"name=>'metric_id'"` + PrimaryConnectionString *string `puppet:"name=>'primary_connection_string'"` + PrimaryKey *string `puppet:"name=>'primary_key'"` + SecondaryConnectionString *string `puppet:"name=>'secondary_connection_string'"` + SecondaryKey *string `puppet:"name=>'secondary_key'"` + Sku *Sku + Tags *map[string]string +} + +type ResourceGroup struct { + Location string + Name string + ResourceGroupId *string `puppet:"name=>'resource_group_id'"` + Tags *map[string]string +} + +type RoleAssignment struct { + PrincipalId string `puppet:"name=>'principal_id'"` + Scope string + RoleAssignmentId *string `puppet:"name=>'role_assignment_id'"` + Name *string + RoleDefinitionId *string `puppet:"name=>'role_definition_id'"` + RoleDefinitionName *string `puppet:"name=>'role_definition_name'"` +} + +type RoleDefinition struct { + AssignableScopes []string `puppet:"name=>'assignable_scopes'"` + Name string + Permissions []Permissions + Scope string + RoleDefinitionLyraId *string `puppet:"name=>'role_definition_lyra_id'"` + Description *string + RoleDefinitionId *string `puppet:"name=>'role_definition_id'"` +} + +type Route struct { + AddressPrefix string `puppet:"name=>'address_prefix'"` + Name string + NextHopType string `puppet:"name=>'next_hop_type'"` + ResourceGroupName string `puppet:"name=>'resource_group_name'"` + RouteTableName string `puppet:"name=>'route_table_name'"` + RouteId *string `puppet:"name=>'route_id'"` + NextHopInIpAddress *string `puppet:"name=>'next_hop_in_ip_address'"` +} + +type RouteTable struct { + Location string + Name string + ResourceGroupName string `puppet:"name=>'resource_group_name'"` + RouteTableId *string `puppet:"name=>'route_table_id'"` + DisableBgpRoutePropagation *bool `puppet:"name=>'disable_bgp_route_propagation'"` + Route *[]TableRoute + Subnets *[]string + Tags *map[string]string +} + +type SchedulerJob struct { + JobCollectionName string `puppet:"name=>'job_collection_name'"` + Name string + ResourceGroupName string `puppet:"name=>'resource_group_name'"` + SchedulerJobId *string `puppet:"name=>'scheduler_job_id'"` + ActionStorageQueue *Queue `puppet:"name=>'action_storage_queue'"` + ActionWeb *Web `puppet:"name=>'action_web'"` + ErrorActionStorageQueue *Queue `puppet:"name=>'error_action_storage_queue'"` + ErrorActionWeb *Web `puppet:"name=>'error_action_web'"` + Recurrence *JobRecurrence + Retry *Retry + StartTime *string `puppet:"name=>'start_time'"` + State *string +} + +type SchedulerJobCollection struct { + Location string + Name string + ResourceGroupName string `puppet:"name=>'resource_group_name'"` + Sku string + SchedulerJobCollectionId *string `puppet:"name=>'scheduler_job_collection_id'"` + Quota *Quota + State *string + Tags *map[string]string +} + +type SearchService struct { + Location string + Name string + ResourceGroupName string `puppet:"name=>'resource_group_name'"` + Sku string + SearchServiceId *string `puppet:"name=>'search_service_id'"` + PartitionCount *int64 `puppet:"name=>'partition_count'"` + PrimaryKey *string `puppet:"name=>'primary_key'"` + ReplicaCount *int64 `puppet:"name=>'replica_count'"` + SecondaryKey *string `puppet:"name=>'secondary_key'"` + Tags *map[string]string +} + +type SecurityCenterContact struct { + AlertNotifications bool `puppet:"name=>'alert_notifications'"` + AlertsToAdmins bool `puppet:"name=>'alerts_to_admins'"` + Email string + Phone string + SecurityCenterContactId *string `puppet:"name=>'security_center_contact_id'"` +} + +type SecurityCenterSubscriptionPricing struct { + Tier string + SecurityCenterSubscriptionPricingId *string `puppet:"name=>'security_center_subscription_pricing_id'"` +} + +type SecurityCenterWorkspace struct { + Scope string + WorkspaceId string `puppet:"name=>'workspace_id'"` + SecurityCenterWorkspaceId *string `puppet:"name=>'security_center_workspace_id'"` +} + +type ServiceFabricCluster struct { + Location string + ManagementEndpoint string `puppet:"name=>'management_endpoint'"` + Name string + NodeType []Type `puppet:"name=>'node_type'"` + ReliabilityLevel string `puppet:"name=>'reliability_level'"` + ResourceGroupName string `puppet:"name=>'resource_group_name'"` + UpgradeMode string `puppet:"name=>'upgrade_mode'"` + VmImage string `puppet:"name=>'vm_image'"` + ServiceFabricClusterId *string `puppet:"name=>'service_fabric_cluster_id'"` + AddOnFeatures *[]string `puppet:"name=>'add_on_features'"` + AzureActiveDirectory *AzureActiveDirectory `puppet:"name=>'azure_active_directory'"` + Certificate *ClusterCertificate + ClientCertificateThumbprint *[]Thumbprint `puppet:"name=>'client_certificate_thumbprint'"` + ClusterCodeVersion *string `puppet:"name=>'cluster_code_version'"` + ClusterEndpoint *string `puppet:"name=>'cluster_endpoint'"` + DiagnosticsConfig *DiagnosticsConfig `puppet:"name=>'diagnostics_config'"` + FabricSettings *[]FabricSettings `puppet:"name=>'fabric_settings'"` + ReverseProxyCertificate *ClusterCertificate `puppet:"name=>'reverse_proxy_certificate'"` + Tags *map[string]string +} + +type ServicebusNamespace struct { + Location string + Name string + ResourceGroupName string `puppet:"name=>'resource_group_name'"` + Sku string + ServicebusNamespaceId *string `puppet:"name=>'servicebus_namespace_id'"` + Capacity *int64 + DefaultPrimaryConnectionString *string `puppet:"name=>'default_primary_connection_string'"` + DefaultPrimaryKey *string `puppet:"name=>'default_primary_key'"` + DefaultSecondaryConnectionString *string `puppet:"name=>'default_secondary_connection_string'"` + DefaultSecondaryKey *string `puppet:"name=>'default_secondary_key'"` + Tags *map[string]string +} + +type ServicebusNamespaceAuthorizationRule struct { + Name string + NamespaceName string `puppet:"name=>'namespace_name'"` + ResourceGroupName string `puppet:"name=>'resource_group_name'"` + ServicebusNamespaceAuthorizationRuleId *string `puppet:"name=>'servicebus_namespace_authorization_rule_id'"` + Listen *bool + Manage *bool + PrimaryConnectionString *string `puppet:"name=>'primary_connection_string'"` + PrimaryKey *string `puppet:"name=>'primary_key'"` + SecondaryConnectionString *string `puppet:"name=>'secondary_connection_string'"` + SecondaryKey *string `puppet:"name=>'secondary_key'"` + Send *bool +} + +type ServicebusQueue struct { + Name string + NamespaceName string `puppet:"name=>'namespace_name'"` + ResourceGroupName string `puppet:"name=>'resource_group_name'"` + ServicebusQueueId *string `puppet:"name=>'servicebus_queue_id'"` + AutoDeleteOnIdle *string `puppet:"name=>'auto_delete_on_idle'"` + DeadLetteringOnMessageExpiration *bool `puppet:"name=>'dead_lettering_on_message_expiration'"` + DefaultMessageTtl *string `puppet:"name=>'default_message_ttl'"` + DuplicateDetectionHistoryTimeWindow *string `puppet:"name=>'duplicate_detection_history_time_window'"` + EnableExpress *bool `puppet:"name=>'enable_express'"` + EnablePartitioning *bool `puppet:"name=>'enable_partitioning'"` + LockDuration *string `puppet:"name=>'lock_duration'"` + MaxDeliveryCount *int64 `puppet:"name=>'max_delivery_count'"` + MaxSizeInMegabytes *int64 `puppet:"name=>'max_size_in_megabytes'"` + RequiresDuplicateDetection *bool `puppet:"name=>'requires_duplicate_detection'"` + RequiresSession *bool `puppet:"name=>'requires_session'"` +} + +type ServicebusQueueAuthorizationRule struct { + Name string + NamespaceName string `puppet:"name=>'namespace_name'"` + QueueName string `puppet:"name=>'queue_name'"` + ResourceGroupName string `puppet:"name=>'resource_group_name'"` + ServicebusQueueAuthorizationRuleId *string `puppet:"name=>'servicebus_queue_authorization_rule_id'"` + Listen *bool + Manage *bool + PrimaryConnectionString *string `puppet:"name=>'primary_connection_string'"` + PrimaryKey *string `puppet:"name=>'primary_key'"` + SecondaryConnectionString *string `puppet:"name=>'secondary_connection_string'"` + SecondaryKey *string `puppet:"name=>'secondary_key'"` + Send *bool +} + +type ServicebusSubscription struct { + MaxDeliveryCount int64 `puppet:"name=>'max_delivery_count'"` + Name string + NamespaceName string `puppet:"name=>'namespace_name'"` + ResourceGroupName string `puppet:"name=>'resource_group_name'"` + TopicName string `puppet:"name=>'topic_name'"` + ServicebusSubscriptionId *string `puppet:"name=>'servicebus_subscription_id'"` + AutoDeleteOnIdle *string `puppet:"name=>'auto_delete_on_idle'"` + DeadLetteringOnMessageExpiration *bool `puppet:"name=>'dead_lettering_on_message_expiration'"` + DefaultMessageTtl *string `puppet:"name=>'default_message_ttl'"` + EnableBatchedOperations *bool `puppet:"name=>'enable_batched_operations'"` + ForwardTo *string `puppet:"name=>'forward_to'"` + LockDuration *string `puppet:"name=>'lock_duration'"` + RequiresSession *bool `puppet:"name=>'requires_session'"` +} + +type ServicebusSubscriptionRule struct { + FilterType string `puppet:"name=>'filter_type'"` + Name string + NamespaceName string `puppet:"name=>'namespace_name'"` + ResourceGroupName string `puppet:"name=>'resource_group_name'"` + SubscriptionName string `puppet:"name=>'subscription_name'"` + TopicName string `puppet:"name=>'topic_name'"` + ServicebusSubscriptionRuleId *string `puppet:"name=>'servicebus_subscription_rule_id'"` + Action *string + CorrelationFilter *CorrelationFilter `puppet:"name=>'correlation_filter'"` + SqlFilter *string `puppet:"name=>'sql_filter'"` +} + +type ServicebusTopic struct { + Name string + NamespaceName string `puppet:"name=>'namespace_name'"` + ResourceGroupName string `puppet:"name=>'resource_group_name'"` + ServicebusTopicId *string `puppet:"name=>'servicebus_topic_id'"` + AutoDeleteOnIdle *string `puppet:"name=>'auto_delete_on_idle'"` + DefaultMessageTtl *string `puppet:"name=>'default_message_ttl'"` + DuplicateDetectionHistoryTimeWindow *string `puppet:"name=>'duplicate_detection_history_time_window'"` + EnableBatchedOperations *bool `puppet:"name=>'enable_batched_operations'"` + EnableExpress *bool `puppet:"name=>'enable_express'"` + EnablePartitioning *bool `puppet:"name=>'enable_partitioning'"` + MaxSizeInMegabytes *int64 `puppet:"name=>'max_size_in_megabytes'"` + RequiresDuplicateDetection *bool `puppet:"name=>'requires_duplicate_detection'"` + Status *string + SupportOrdering *bool `puppet:"name=>'support_ordering'"` +} + +type ServicebusTopicAuthorizationRule struct { + Name string + NamespaceName string `puppet:"name=>'namespace_name'"` + ResourceGroupName string `puppet:"name=>'resource_group_name'"` + TopicName string `puppet:"name=>'topic_name'"` + ServicebusTopicAuthorizationRuleId *string `puppet:"name=>'servicebus_topic_authorization_rule_id'"` + Listen *bool + Manage *bool + PrimaryConnectionString *string `puppet:"name=>'primary_connection_string'"` + PrimaryKey *string `puppet:"name=>'primary_key'"` + SecondaryConnectionString *string `puppet:"name=>'secondary_connection_string'"` + SecondaryKey *string `puppet:"name=>'secondary_key'"` + Send *bool +} + +type SharedImage struct { + GalleryName string `puppet:"name=>'gallery_name'"` + Location string + Name string + OsType string `puppet:"name=>'os_type'"` + ResourceGroupName string `puppet:"name=>'resource_group_name'"` + SharedImageId *string `puppet:"name=>'shared_image_id'"` + Description *string + Eula *string + Identifier *Identifier + PrivacyStatementUri *string `puppet:"name=>'privacy_statement_uri'"` + ReleaseNoteUri *string `puppet:"name=>'release_note_uri'"` + Tags *map[string]string +} + +type SharedImageGallery struct { + Location string + Name string + ResourceGroupName string `puppet:"name=>'resource_group_name'"` + SharedImageGalleryId *string `puppet:"name=>'shared_image_gallery_id'"` + Description *string + Tags *map[string]string + UniqueName *string `puppet:"name=>'unique_name'"` +} + +type SharedImageVersion struct { + GalleryName string `puppet:"name=>'gallery_name'"` + ImageName string `puppet:"name=>'image_name'"` + Location string + ManagedImageId string `puppet:"name=>'managed_image_id'"` + Name string + ResourceGroupName string `puppet:"name=>'resource_group_name'"` + TargetRegion []Region `puppet:"name=>'target_region'"` + SharedImageVersionId *string `puppet:"name=>'shared_image_version_id'"` + ExcludeFromLatest *bool `puppet:"name=>'exclude_from_latest'"` + Tags *map[string]string +} + +type SignalrService struct { + Location string + Name string + ResourceGroupName string `puppet:"name=>'resource_group_name'"` + SignalrServiceId *string `puppet:"name=>'signalr_service_id'"` + Hostname *string + IpAddress *string `puppet:"name=>'ip_address'"` + PublicPort *int64 `puppet:"name=>'public_port'"` + ServerPort *int64 `puppet:"name=>'server_port'"` + Sku *ManagementSku + Tags *map[string]string +} + +type Snapshot struct { + CreateOption string `puppet:"name=>'create_option'"` + Location string + Name string + ResourceGroupName string `puppet:"name=>'resource_group_name'"` + SnapshotId *string `puppet:"name=>'snapshot_id'"` + DiskSizeGb *int64 `puppet:"name=>'disk_size_gb'"` + EncryptionSettings *Settings `puppet:"name=>'encryption_settings'"` + SourceResourceId *string `puppet:"name=>'source_resource_id'"` + SourceUri *string `puppet:"name=>'source_uri'"` + StorageAccountId *string `puppet:"name=>'storage_account_id'"` + Tags *map[string]string +} + +type SqlActiveDirectoryAdministrator struct { + Login string + ObjectId string `puppet:"name=>'object_id'"` + ResourceGroupName string `puppet:"name=>'resource_group_name'"` + ServerName string `puppet:"name=>'server_name'"` + TenantId string `puppet:"name=>'tenant_id'"` + SqlActiveDirectoryAdministratorId *string `puppet:"name=>'sql_active_directory_administrator_id'"` +} + +type SqlDatabase struct { + Location string + Name string + ResourceGroupName string `puppet:"name=>'resource_group_name'"` + ServerName string `puppet:"name=>'server_name'"` + SqlDatabaseId *string `puppet:"name=>'sql_database_id'"` + Collation *string + CreateMode *string `puppet:"name=>'create_mode'"` + CreationDate *string `puppet:"name=>'creation_date'"` + DefaultSecondaryLocation *string `puppet:"name=>'default_secondary_location'"` + Edition *string + ElasticPoolName *string `puppet:"name=>'elastic_pool_name'"` + Encryption *string + Import *Import + MaxSizeBytes *string `puppet:"name=>'max_size_bytes'"` + RequestedServiceObjectiveId *string `puppet:"name=>'requested_service_objective_id'"` + RequestedServiceObjectiveName *string `puppet:"name=>'requested_service_objective_name'"` + RestorePointInTime *string `puppet:"name=>'restore_point_in_time'"` + SourceDatabaseDeletionDate *string `puppet:"name=>'source_database_deletion_date'"` + SourceDatabaseId *string `puppet:"name=>'source_database_id'"` + Tags *map[string]string + ThreatDetectionPolicy *DetectionPolicy `puppet:"name=>'threat_detection_policy'"` +} + +type SqlElasticpool struct { + Dtu int64 + Edition string + Location string + Name string + ResourceGroupName string `puppet:"name=>'resource_group_name'"` + ServerName string `puppet:"name=>'server_name'"` + SqlElasticpoolId *string `puppet:"name=>'sql_elasticpool_id'"` + CreationDate *string `puppet:"name=>'creation_date'"` + DbDtuMax *int64 `puppet:"name=>'db_dtu_max'"` + DbDtuMin *int64 `puppet:"name=>'db_dtu_min'"` + PoolSize *int64 `puppet:"name=>'pool_size'"` + Tags *map[string]string +} + +type SqlFirewallRule struct { + EndIpAddress string `puppet:"name=>'end_ip_address'"` + Name string + ResourceGroupName string `puppet:"name=>'resource_group_name'"` + ServerName string `puppet:"name=>'server_name'"` + StartIpAddress string `puppet:"name=>'start_ip_address'"` + SqlFirewallRuleId *string `puppet:"name=>'sql_firewall_rule_id'"` +} + +type SqlServer struct { + AdministratorLogin string `puppet:"name=>'administrator_login'"` + AdministratorLoginPassword string `puppet:"name=>'administrator_login_password'"` + Location string + Name string + ResourceGroupName string `puppet:"name=>'resource_group_name'"` + Version string + SqlServerId *string `puppet:"name=>'sql_server_id'"` + FullyQualifiedDomainName *string `puppet:"name=>'fully_qualified_domain_name'"` + Tags *map[string]string +} + +type SqlVirtualNetworkRule struct { + Name string + ResourceGroupName string `puppet:"name=>'resource_group_name'"` + ServerName string `puppet:"name=>'server_name'"` + SubnetId string `puppet:"name=>'subnet_id'"` + SqlVirtualNetworkRuleId *string `puppet:"name=>'sql_virtual_network_rule_id'"` + IgnoreMissingVnetServiceEndpoint *bool `puppet:"name=>'ignore_missing_vnet_service_endpoint'"` +} + +type StorageAccount struct { + AccountReplicationType string `puppet:"name=>'account_replication_type'"` + AccountTier string `puppet:"name=>'account_tier'"` + Location string + Name string + ResourceGroupName string `puppet:"name=>'resource_group_name'"` + StorageAccountId *string `puppet:"name=>'storage_account_id'"` + AccessTier *string `puppet:"name=>'access_tier'"` + AccountEncryptionSource *string `puppet:"name=>'account_encryption_source'"` + AccountKind *string `puppet:"name=>'account_kind'"` + CustomDomain *Domain `puppet:"name=>'custom_domain'"` + EnableBlobEncryption *bool `puppet:"name=>'enable_blob_encryption'"` + EnableFileEncryption *bool `puppet:"name=>'enable_file_encryption'"` + EnableHttpsTrafficOnly *bool `puppet:"name=>'enable_https_traffic_only'"` + Identity *Identity + NetworkRules *Rules `puppet:"name=>'network_rules'"` + PrimaryAccessKey *string `puppet:"name=>'primary_access_key'"` + PrimaryBlobConnectionString *string `puppet:"name=>'primary_blob_connection_string'"` + PrimaryBlobEndpoint *string `puppet:"name=>'primary_blob_endpoint'"` + PrimaryConnectionString *string `puppet:"name=>'primary_connection_string'"` + PrimaryFileEndpoint *string `puppet:"name=>'primary_file_endpoint'"` + PrimaryLocation *string `puppet:"name=>'primary_location'"` + PrimaryQueueEndpoint *string `puppet:"name=>'primary_queue_endpoint'"` + PrimaryTableEndpoint *string `puppet:"name=>'primary_table_endpoint'"` + SecondaryAccessKey *string `puppet:"name=>'secondary_access_key'"` + SecondaryBlobConnectionString *string `puppet:"name=>'secondary_blob_connection_string'"` + SecondaryBlobEndpoint *string `puppet:"name=>'secondary_blob_endpoint'"` + SecondaryConnectionString *string `puppet:"name=>'secondary_connection_string'"` + SecondaryLocation *string `puppet:"name=>'secondary_location'"` + SecondaryQueueEndpoint *string `puppet:"name=>'secondary_queue_endpoint'"` + SecondaryTableEndpoint *string `puppet:"name=>'secondary_table_endpoint'"` + Tags *map[string]string +} + +type StorageBlob struct { + Name string + ResourceGroupName string `puppet:"name=>'resource_group_name'"` + StorageAccountName string `puppet:"name=>'storage_account_name'"` + StorageContainerName string `puppet:"name=>'storage_container_name'"` + StorageBlobId *string `puppet:"name=>'storage_blob_id'"` + Attempts *int64 + ContentType *string `puppet:"name=>'content_type'"` + Parallelism *int64 + Size *int64 + Source *string + SourceUri *string `puppet:"name=>'source_uri'"` + Type *string + Url *string +} + +type StorageContainer struct { + Name string + ResourceGroupName string `puppet:"name=>'resource_group_name'"` + StorageAccountName string `puppet:"name=>'storage_account_name'"` + StorageContainerId *string `puppet:"name=>'storage_container_id'"` + ContainerAccessType *string `puppet:"name=>'container_access_type'"` + Properties *map[string]string +} + +type StorageQueue struct { + Name string + ResourceGroupName string `puppet:"name=>'resource_group_name'"` + StorageAccountName string `puppet:"name=>'storage_account_name'"` + StorageQueueId *string `puppet:"name=>'storage_queue_id'"` +} + +type StorageShare struct { + Name string + ResourceGroupName string `puppet:"name=>'resource_group_name'"` + StorageAccountName string `puppet:"name=>'storage_account_name'"` + StorageShareId *string `puppet:"name=>'storage_share_id'"` + Quota *int64 + Url *string +} + +type StorageTable struct { + Name string + ResourceGroupName string `puppet:"name=>'resource_group_name'"` + StorageAccountName string `puppet:"name=>'storage_account_name'"` + StorageTableId *string `puppet:"name=>'storage_table_id'"` +} + +type Subnet struct { + AddressPrefix string `puppet:"name=>'address_prefix'"` + Name string + ResourceGroupName string `puppet:"name=>'resource_group_name'"` + VirtualNetworkName string `puppet:"name=>'virtual_network_name'"` + SubnetId *string `puppet:"name=>'subnet_id'"` + Delegation *[]SubnetDelegation + IpConfigurations *[]string `puppet:"name=>'ip_configurations'"` + ServiceEndpoints *[]string `puppet:"name=>'service_endpoints'"` +} + +type SubnetNetworkSecurityGroupAssociation struct { + NetworkSecurityGroupId string `puppet:"name=>'network_security_group_id'"` + SubnetId string `puppet:"name=>'subnet_id'"` + SubnetNetworkSecurityGroupAssociationId *string `puppet:"name=>'subnet_network_security_group_association_id'"` +} + +type SubnetRouteTableAssociation struct { + RouteTableId string `puppet:"name=>'route_table_id'"` + SubnetId string `puppet:"name=>'subnet_id'"` + SubnetRouteTableAssociationId *string `puppet:"name=>'subnet_route_table_association_id'"` +} + +type TemplateDeployment struct { + DeploymentMode string `puppet:"name=>'deployment_mode'"` + Name string + ResourceGroupName string `puppet:"name=>'resource_group_name'"` + TemplateDeploymentId *string `puppet:"name=>'template_deployment_id'"` + Outputs *map[string]string + Parameters *map[string]string + ParametersBody *string `puppet:"name=>'parameters_body'"` + TemplateBody *string `puppet:"name=>'template_body'"` +} + +type TrafficManagerEndpoint struct { + Name string + ProfileName string `puppet:"name=>'profile_name'"` + ResourceGroupName string `puppet:"name=>'resource_group_name'"` + Type string + TrafficManagerEndpointId *string `puppet:"name=>'traffic_manager_endpoint_id'"` + EndpointLocation *string `puppet:"name=>'endpoint_location'"` + EndpointMonitorStatus *string `puppet:"name=>'endpoint_monitor_status'"` + EndpointStatus *string `puppet:"name=>'endpoint_status'"` + GeoMappings *[]string `puppet:"name=>'geo_mappings'"` + MinChildEndpoints *int64 `puppet:"name=>'min_child_endpoints'"` + Priority *int64 + Target *string + TargetResourceId *string `puppet:"name=>'target_resource_id'"` + Weight *int64 +} + +type TrafficManagerProfile struct { + DnsConfig []DnsConfig `puppet:"name=>'dns_config'"` + MonitorConfig []MonitorConfig `puppet:"name=>'monitor_config'"` + Name string + ResourceGroupName string `puppet:"name=>'resource_group_name'"` + TrafficRoutingMethod string `puppet:"name=>'traffic_routing_method'"` + TrafficManagerProfileId *string `puppet:"name=>'traffic_manager_profile_id'"` + Fqdn *string + ProfileStatus *string `puppet:"name=>'profile_status'"` + Tags *map[string]string +} + +type UserAssignedIdentity struct { + Location string + Name string + ResourceGroupName string `puppet:"name=>'resource_group_name'"` + UserAssignedIdentityId *string `puppet:"name=>'user_assigned_identity_id'"` + ClientId *string `puppet:"name=>'client_id'"` + PrincipalId *string `puppet:"name=>'principal_id'"` + Tags *map[string]string +} + +type VirtualMachine struct { + Location string + Name string + NetworkInterfaceIds []string `puppet:"name=>'network_interface_ids'"` + ResourceGroupName string `puppet:"name=>'resource_group_name'"` + VmSize string `puppet:"name=>'vm_size'"` + VirtualMachineId *string `puppet:"name=>'virtual_machine_id'"` + AvailabilitySetId *string `puppet:"name=>'availability_set_id'"` + BootDiagnostics *Diagnostics `puppet:"name=>'boot_diagnostics'"` + DeleteDataDisksOnTermination *bool `puppet:"name=>'delete_data_disks_on_termination'"` + DeleteOsDiskOnTermination *bool `puppet:"name=>'delete_os_disk_on_termination'"` + Identity *MachineIdentity + LicenseType *string `puppet:"name=>'license_type'"` + OsProfile *OsProfile `puppet:"name=>'os_profile'"` + OsProfileLinuxConfig *LinuxConfig `puppet:"name=>'os_profile_linux_config'"` + OsProfileSecrets *[]Secrets `puppet:"name=>'os_profile_secrets'"` + OsProfileWindowsConfig *WindowsConfig `puppet:"name=>'os_profile_windows_config'"` + Plan *Plan + PrimaryNetworkInterfaceId *string `puppet:"name=>'primary_network_interface_id'"` + StorageDataDisk *[]DataDisk `puppet:"name=>'storage_data_disk'"` + StorageImageReference *ImageReference `puppet:"name=>'storage_image_reference'"` + StorageOsDisk *StorageOsDisk `puppet:"name=>'storage_os_disk'"` + Tags *map[string]string + Zones *[]string +} + +type VirtualMachineDataDiskAttachment struct { + Caching string + Lun int64 + ManagedDiskId string `puppet:"name=>'managed_disk_id'"` + VirtualMachineId string `puppet:"name=>'virtual_machine_id'"` + VirtualMachineDataDiskAttachmentId *string `puppet:"name=>'virtual_machine_data_disk_attachment_id'"` + CreateOption *string `puppet:"name=>'create_option'"` + WriteAcceleratorEnabled *bool `puppet:"name=>'write_accelerator_enabled'"` +} + +type VirtualMachineExtension struct { + Location string + Name string + Publisher string + ResourceGroupName string `puppet:"name=>'resource_group_name'"` + Type string + TypeHandlerVersion string `puppet:"name=>'type_handler_version'"` + VirtualMachineName string `puppet:"name=>'virtual_machine_name'"` + VirtualMachineExtensionId *string `puppet:"name=>'virtual_machine_extension_id'"` + AutoUpgradeMinorVersion *bool `puppet:"name=>'auto_upgrade_minor_version'"` + ProtectedSettings *string `puppet:"name=>'protected_settings'"` + Settings *string + Tags *map[string]string +} + +type VirtualMachineScaleSet struct { + Location string + Name string + NetworkProfile []SetNetworkProfile `puppet:"name=>'network_profile'"` + ResourceGroupName string `puppet:"name=>'resource_group_name'"` + UpgradePolicyMode string `puppet:"name=>'upgrade_policy_mode'"` + VirtualMachineScaleSetId *string `puppet:"name=>'virtual_machine_scale_set_id'"` + AutomaticOsUpgrade *bool `puppet:"name=>'automatic_os_upgrade'"` + BootDiagnostics *Diagnostics `puppet:"name=>'boot_diagnostics'"` + EvictionPolicy *string `puppet:"name=>'eviction_policy'"` + Extension *[]Extension + HealthProbeId *string `puppet:"name=>'health_probe_id'"` + Identity *MachineIdentity + LicenseType *string `puppet:"name=>'license_type'"` + OsProfile *SetOsProfile `puppet:"name=>'os_profile'"` + OsProfileLinuxConfig *ProfileLinuxConfig `puppet:"name=>'os_profile_linux_config'"` + OsProfileSecrets *[]Secrets `puppet:"name=>'os_profile_secrets'"` + OsProfileWindowsConfig *ProfileWindowsConfig `puppet:"name=>'os_profile_windows_config'"` + Overprovision *bool + Plan *Plan + Priority *string + RollingUpgradePolicy *UpgradePolicy `puppet:"name=>'rolling_upgrade_policy'"` + SinglePlacementGroup *bool `puppet:"name=>'single_placement_group'"` + Sku *SetSku + StorageProfileDataDisk *[]ProfileDataDisk `puppet:"name=>'storage_profile_data_disk'"` + StorageProfileImageReference *ImageReference `puppet:"name=>'storage_profile_image_reference'"` + StorageProfileOsDisk *ProfileOsDisk `puppet:"name=>'storage_profile_os_disk'"` + Tags *map[string]string + Zones *[]string +} + +type VirtualNetwork struct { + AddressSpace []string `puppet:"name=>'address_space'"` + Location string + Name string + ResourceGroupName string `puppet:"name=>'resource_group_name'"` + VirtualNetworkId *string `puppet:"name=>'virtual_network_id'"` + DnsServers *[]string `puppet:"name=>'dns_servers'"` + Subnet *[]VirtualNetworkSubnet + Tags *map[string]string +} + +type VirtualNetworkGateway struct { + IpConfiguration []NetworkGatewayIpConfiguration `puppet:"name=>'ip_configuration'"` + Location string + Name string + ResourceGroupName string `puppet:"name=>'resource_group_name'"` + Sku string + Type string + VirtualNetworkGatewayId *string `puppet:"name=>'virtual_network_gateway_id'"` + ActiveActive *bool `puppet:"name=>'active_active'"` + BgpSettings *GatewayBgpSettings `puppet:"name=>'bgp_settings'"` + DefaultLocalNetworkGatewayId *string `puppet:"name=>'default_local_network_gateway_id'"` + EnableBgp *bool `puppet:"name=>'enable_bgp'"` + Tags *map[string]string + VpnClientConfiguration *ClientConfiguration `puppet:"name=>'vpn_client_configuration'"` + VpnType *string `puppet:"name=>'vpn_type'"` +} + +type VirtualNetworkGatewayConnection struct { + Location string + Name string + ResourceGroupName string `puppet:"name=>'resource_group_name'"` + Type string + VirtualNetworkGatewayId string `puppet:"name=>'virtual_network_gateway_id'"` + VirtualNetworkGatewayConnectionId *string `puppet:"name=>'virtual_network_gateway_connection_id'"` + AuthorizationKey *string `puppet:"name=>'authorization_key'"` + EnableBgp *bool `puppet:"name=>'enable_bgp'"` + ExpressRouteCircuitId *string `puppet:"name=>'express_route_circuit_id'"` + IpsecPolicy *IpsecPolicy `puppet:"name=>'ipsec_policy'"` + LocalNetworkGatewayId *string `puppet:"name=>'local_network_gateway_id'"` + PeerVirtualNetworkGatewayId *string `puppet:"name=>'peer_virtual_network_gateway_id'"` + RoutingWeight *int64 `puppet:"name=>'routing_weight'"` + SharedKey *string `puppet:"name=>'shared_key'"` + Tags *map[string]string + UsePolicyBasedTrafficSelectors *bool `puppet:"name=>'use_policy_based_traffic_selectors'"` +} + +type VirtualNetworkPeering struct { + Name string + RemoteVirtualNetworkId string `puppet:"name=>'remote_virtual_network_id'"` + ResourceGroupName string `puppet:"name=>'resource_group_name'"` + VirtualNetworkName string `puppet:"name=>'virtual_network_name'"` + VirtualNetworkPeeringId *string `puppet:"name=>'virtual_network_peering_id'"` + AllowForwardedTraffic *bool `puppet:"name=>'allow_forwarded_traffic'"` + AllowGatewayTransit *bool `puppet:"name=>'allow_gateway_transit'"` + AllowVirtualNetworkAccess *bool `puppet:"name=>'allow_virtual_network_access'"` + UseRemoteGateways *bool `puppet:"name=>'use_remote_gateways'"` +} + +type AccessControl struct { + Enabled bool + AzureActiveDirectory *ActiveDirectory `puppet:"name=>'azure_active_directory'"` +} + +type AccessPolicy struct { + KeyName *string `puppet:"name=>'key_name'"` + Permissions *string + PrimaryKey *string `puppet:"name=>'primary_key'"` + SecondaryKey *string `puppet:"name=>'secondary_key'"` +} + +type AccountSku struct { + Name string + Tier string +} + +type Acls struct { + Bypass string + DefaultAction string `puppet:"name=>'default_action'"` + IpRules *[]string `puppet:"name=>'ip_rules'"` + VirtualNetworkSubnetIds *[]string `puppet:"name=>'virtual_network_subnet_ids'"` +} + +type Action struct { + ActionGroupId string `puppet:"name=>'action_group_id'"` + WebhookProperties *map[string]string `puppet:"name=>'webhook_properties'"` +} + +type ActionAction struct { + ActionType string `puppet:"name=>'action_type'"` +} + +type ActionTrigger struct { + DaysBeforeExpiry *int64 `puppet:"name=>'days_before_expiry'"` + LifetimePercentage *int64 `puppet:"name=>'lifetime_percentage'"` +} + +type ActiveDirectory struct { + ClientAppId string `puppet:"name=>'client_app_id'"` + ServerAppId string `puppet:"name=>'server_app_id'"` + ServerAppSecret string `puppet:"name=>'server_app_secret'"` + TenantId *string `puppet:"name=>'tenant_id'"` +} + +type AddonProfile struct { + AciConnectorLinux *Linux `puppet:"name=>'aci_connector_linux'"` + HttpApplicationRouting *Routing `puppet:"name=>'http_application_routing'"` + OmsAgent *Agent `puppet:"name=>'oms_agent'"` +} + +type AddressConfiguration struct { + DomainNameLabel string `puppet:"name=>'domain_name_label'"` + IdleTimeout int64 `puppet:"name=>'idle_timeout'"` + Name string +} + +type Agent struct { + Enabled bool + LogAnalyticsWorkspaceId string `puppet:"name=>'log_analytics_workspace_id'"` +} + +type AgentPoolProfile struct { + Name string + VmSize string `puppet:"name=>'vm_size'"` + Count *int64 + MaxPods *int64 `puppet:"name=>'max_pods'"` + OsDiskSizeGb *int64 `puppet:"name=>'os_disk_size_gb'"` + OsType *string `puppet:"name=>'os_type'"` + VnetSubnetId *string `puppet:"name=>'vnet_subnet_id'"` +} + +type AlertCriteria struct { + Aggregation string + MetricName string `puppet:"name=>'metric_name'"` + MetricNamespace string `puppet:"name=>'metric_namespace'"` + Operator string + Threshold float64 + Dimension *[]Dimension +} + +type ApnsCredential struct { + ApplicationMode string `puppet:"name=>'application_mode'"` + BundleId string `puppet:"name=>'bundle_id'"` + KeyId string `puppet:"name=>'key_id'"` + TeamId string `puppet:"name=>'team_id'"` + Token string +} + +type AssignmentIdentity struct { + PrincipalId *string `puppet:"name=>'principal_id'"` + TenantId *string `puppet:"name=>'tenant_id'"` + Type *string +} + +type AuthenticationCertificate struct { + Name string + Id *string +} + +type AzureActiveDirectory struct { + ClientApplicationId string `puppet:"name=>'client_application_id'"` + ClusterApplicationId string `puppet:"name=>'cluster_application_id'"` + TenantId string `puppet:"name=>'tenant_id'"` +} + +type Backup struct { + Frequency string + Time string + Weekdays *[]string +} + +type Basic struct { + Password string + Username string +} + +type BgpSettings struct { + Asn int64 + BgpPeeringAddress string `puppet:"name=>'bgp_peering_address'"` + PeerWeight *int64 `puppet:"name=>'peer_weight'"` +} + +type Capacity struct { + Default int64 + Maximum int64 + Minimum int64 +} + +type CaptureFilter struct { + Protocol string + LocalIpAddress *string `puppet:"name=>'local_ip_address'"` + LocalPort *string `puppet:"name=>'local_port'"` + RemoteIpAddress *string `puppet:"name=>'remote_ip_address'"` + RemotePort *string `puppet:"name=>'remote_port'"` +} + +type Certificate struct { + Password string + Pfx string + Expiration *string + SubjectName *string `puppet:"name=>'subject_name'"` + Thumbprint *string +} + +type CertificateCertificate struct { + Contents string + Password *string +} + +type CertificatePolicy struct { + IssuerParameters *Sku `puppet:"name=>'issuer_parameters'"` + KeyProperties *Properties `puppet:"name=>'key_properties'"` + LifetimeAction *[]LifetimeAction `puppet:"name=>'lifetime_action'"` + SecretProperties *SecretProperties `puppet:"name=>'secret_properties'"` + X509CertificateProperties *CertificateProperties `puppet:"name=>'x509_certificate_properties'"` +} + +type CertificateProperties struct { + KeyUsage []string `puppet:"name=>'key_usage'"` + Subject string + ValidityInMonths int64 `puppet:"name=>'validity_in_months'"` + ExtendedKeyUsage *[]string `puppet:"name=>'extended_key_usage'"` + SubjectAlternativeNames *Names `puppet:"name=>'subject_alternative_names'"` +} + +type Certificates struct { + CertificateUrl string `puppet:"name=>'certificate_url'"` + CertificateStore *string `puppet:"name=>'certificate_store'"` +} + +type CircuitSku struct { + Family string + Tier string +} + +type ClientConfiguration struct { + AddressSpace []string `puppet:"name=>'address_space'"` + RadiusServerAddress *string `puppet:"name=>'radius_server_address'"` + RadiusServerSecret *string `puppet:"name=>'radius_server_secret'"` + RevokedCertificate *[]RevokedCertificate `puppet:"name=>'revoked_certificate'"` + RootCertificate *[]RootCertificate `puppet:"name=>'root_certificate'"` + VpnClientProtocols *[]string `puppet:"name=>'vpn_client_protocols'"` +} + +type ClusterCertificate struct { + Thumbprint string + X509StoreName string `puppet:"name=>'x509_store_name'"` + ThumbprintSecondary *string `puppet:"name=>'thumbprint_secondary'"` +} + +type ClusterLinuxProfile struct { + AdminUsername string `puppet:"name=>'admin_username'"` + SshKey []Key `puppet:"name=>'ssh_key'"` +} + +type CollectionRule struct { + Name string + SourceAddresses []string `puppet:"name=>'source_addresses'"` + Description *string + FqdnTags *[]string `puppet:"name=>'fqdn_tags'"` + Protocol *[]Protocol + TargetFqdns *[]string `puppet:"name=>'target_fqdns'"` +} + +type Config struct { + AlwaysOn *bool `puppet:"name=>'always_on'"` + AppCommandLine *string `puppet:"name=>'app_command_line'"` + DefaultDocuments *[]string `puppet:"name=>'default_documents'"` + DotnetFrameworkVersion *string `puppet:"name=>'dotnet_framework_version'"` + FtpsState *string `puppet:"name=>'ftps_state'"` + Http2Enabled *bool `puppet:"name=>'http2_enabled'"` + IpRestriction *[]Restriction `puppet:"name=>'ip_restriction'"` + JavaContainer *string `puppet:"name=>'java_container'"` + JavaContainerVersion *string `puppet:"name=>'java_container_version'"` + JavaVersion *string `puppet:"name=>'java_version'"` + LinuxFxVersion *string `puppet:"name=>'linux_fx_version'"` + LocalMysqlEnabled *bool `puppet:"name=>'local_mysql_enabled'"` + ManagedPipelineMode *string `puppet:"name=>'managed_pipeline_mode'"` + MinTlsVersion *string `puppet:"name=>'min_tls_version'"` + PhpVersion *string `puppet:"name=>'php_version'"` + PythonVersion *string `puppet:"name=>'python_version'"` + RemoteDebuggingEnabled *bool `puppet:"name=>'remote_debugging_enabled'"` + RemoteDebuggingVersion *string `puppet:"name=>'remote_debugging_version'"` + ScmType *string `puppet:"name=>'scm_type'"` + Use32BitWorkerProcess *bool `puppet:"name=>'use_32_bit_worker_process'"` + VirtualNetworkName *string `puppet:"name=>'virtual_network_name'"` + WebsocketsEnabled *bool `puppet:"name=>'websockets_enabled'"` +} + +type Configuration struct { + Management *[]ScmManagementPortal + Portal *[]ScmManagementPortal + Proxy *[]Proxy + Scm *[]ScmManagementPortal +} + +type ConsistencyPolicy struct { + ConsistencyLevel string `puppet:"name=>'consistency_level'"` + MaxIntervalInSeconds *int64 `puppet:"name=>'max_interval_in_seconds'"` + MaxStalenessPrefix *int64 `puppet:"name=>'max_staleness_prefix'"` +} + +type Container struct { + Cpu float64 + Image string + Memory float64 + Name string + Commands *[]string + EnvironmentVariables *map[string]string `puppet:"name=>'environment_variables'"` + Ports *[]ContainerPorts + SecureEnvironmentVariables *map[string]string `puppet:"name=>'secure_environment_variables'"` + Volume *[]Volume +} + +type ContainerPorts struct { + Port *int64 + Protocol *string +} + +type ContentLink struct { + Uri string + Hash *Hash + Version *string +} + +type Control struct { + Branch *string + RepoUrl *string `puppet:"name=>'repo_url'"` +} + +type CorrelationFilter struct { + ContentType *string `puppet:"name=>'content_type'"` + CorrelationId *string `puppet:"name=>'correlation_id'"` + Label *string + MessageId *string `puppet:"name=>'message_id'"` + ReplyTo *string `puppet:"name=>'reply_to'"` + ReplyToSessionId *string `puppet:"name=>'reply_to_session_id'"` + SessionId *string `puppet:"name=>'session_id'"` + To *string +} + +type Credential struct { + Password *string + Username *string +} + +type Criteria struct { + Category string + Caller *string + Level *string + OperationName *string `puppet:"name=>'operation_name'"` + ResourceGroup *string `puppet:"name=>'resource_group'"` + ResourceId *string `puppet:"name=>'resource_id'"` + ResourceProvider *string `puppet:"name=>'resource_provider'"` + ResourceType *string `puppet:"name=>'resource_type'"` + Status *string + SubStatus *string `puppet:"name=>'sub_status'"` +} + +type Daily struct { + Count int64 +} + +type DataDisk struct { + CreateOption string `puppet:"name=>'create_option'"` + Lun int64 + Name string + Caching *string + DiskSizeGb *int64 `puppet:"name=>'disk_size_gb'"` + ManagedDiskId *string `puppet:"name=>'managed_disk_id'"` + ManagedDiskType *string `puppet:"name=>'managed_disk_type'"` + VhdUri *string `puppet:"name=>'vhd_uri'"` + WriteAcceleratorEnabled *bool `puppet:"name=>'write_accelerator_enabled'"` +} + +type DatabaseSettings struct { + MaxCapacity float64 `puppet:"name=>'max_capacity'"` + MinCapacity float64 `puppet:"name=>'min_capacity'"` +} + +type Date struct { + End string + Start string + Timezone *string +} + +type Delegation struct { + Name string + Actions *[]string +} + +type Description struct { + Enabled bool + Encoding string + Destination *Destination + IntervalInSeconds *int64 `puppet:"name=>'interval_in_seconds'"` + SizeLimitInBytes *int64 `puppet:"name=>'size_limit_in_bytes'"` +} + +type Destination struct { + ArchiveNameFormat string `puppet:"name=>'archive_name_format'"` + BlobContainerName string `puppet:"name=>'blob_container_name'"` + Name string + StorageAccountId string `puppet:"name=>'storage_account_id'"` +} + +type DetectionPolicy struct { + DisabledAlerts *[]string `puppet:"name=>'disabled_alerts'"` + EmailAccountAdmins *string `puppet:"name=>'email_account_admins'"` + EmailAddresses *[]string `puppet:"name=>'email_addresses'"` + RetentionDays *int64 `puppet:"name=>'retention_days'"` + State *string + StorageAccountAccessKey *string `puppet:"name=>'storage_account_access_key'"` + StorageEndpoint *string `puppet:"name=>'storage_endpoint'"` + UseServerDefault *string `puppet:"name=>'use_server_default'"` +} + +type Diagnostics struct { + Enabled bool + StorageUri string `puppet:"name=>'storage_uri'"` +} + +type DiagnosticsConfig struct { + BlobEndpoint string `puppet:"name=>'blob_endpoint'"` + ProtectedAccountKeyName string `puppet:"name=>'protected_account_key_name'"` + QueueEndpoint string `puppet:"name=>'queue_endpoint'"` + StorageAccountName string `puppet:"name=>'storage_account_name'"` + TableEndpoint string `puppet:"name=>'table_endpoint'"` +} + +type DiagnosticsProfile struct { + Enabled bool + StorageUri *string `puppet:"name=>'storage_uri'"` +} + +type Dimension struct { + Name string + Operator string + Values []string +} + +type Directory struct { + ClientId string `puppet:"name=>'client_id'"` + Secret string + TenantId string `puppet:"name=>'tenant_id'"` + Audience *string +} + +type Disk struct { + BlobUri *string `puppet:"name=>'blob_uri'"` + Caching *string + Lun *int64 + ManagedDiskId *string `puppet:"name=>'managed_disk_id'"` + SizeGb *int64 `puppet:"name=>'size_gb'"` +} + +type DnsConfig struct { + RelativeName string `puppet:"name=>'relative_name'"` + Ttl int64 +} + +type DnsSettings struct { + DnsServers []string `puppet:"name=>'dns_servers'"` +} + +type Domain struct { + Name string + UseSubdomain *bool `puppet:"name=>'use_subdomain'"` +} + +type ElasticpoolSku struct { + Capacity int64 + Name string + Tier string + Family *string +} + +type Email struct { + CustomEmails *[]string `puppet:"name=>'custom_emails'"` + SendToSubscriptionAdministrator *bool `puppet:"name=>'send_to_subscription_administrator'"` + SendToSubscriptionCoAdministrator *bool `puppet:"name=>'send_to_subscription_co_administrator'"` +} + +type EmailAction struct { + CustomEmails *[]string `puppet:"name=>'custom_emails'"` + SendToServiceOwners *bool `puppet:"name=>'send_to_service_owners'"` +} + +type EncryptionKey struct { + SecretUrl string `puppet:"name=>'secret_url'"` + SourceVaultId string `puppet:"name=>'source_vault_id'"` +} + +type Endpoint struct { + ConnectionString string `puppet:"name=>'connection_string'"` + Name string + Type string + BatchFrequencyInSeconds *int64 `puppet:"name=>'batch_frequency_in_seconds'"` + ContainerName *string `puppet:"name=>'container_name'"` + Encoding *string + FileNameFormat *string `puppet:"name=>'file_name_format'"` + MaxChunkSizeInBytes *int64 `puppet:"name=>'max_chunk_size_in_bytes'"` +} + +type Extension struct { + Name string + Publisher string + Type string + TypeHandlerVersion string `puppet:"name=>'type_handler_version'"` + AutoUpgradeMinorVersion *bool `puppet:"name=>'auto_upgrade_minor_version'"` + ProtectedSettings *string `puppet:"name=>'protected_settings'"` + Settings *string +} + +type FabricSettings struct { + Name string + Parameters *map[string]string +} + +type Filter struct { + Action string + CountryCodes []string `puppet:"name=>'country_codes'"` + RelativePath string `puppet:"name=>'relative_path'"` +} + +type FirewallIpConfiguration struct { + Name string + SubnetId string `puppet:"name=>'subnet_id'"` + PrivateIpAddress *string `puppet:"name=>'private_ip_address'"` + PublicIpAddressId *string `puppet:"name=>'public_ip_address_id'"` +} + +type FixedScale struct { + ResizeTimeout *string `puppet:"name=>'resize_timeout'"` + TargetDedicatedNodes *int64 `puppet:"name=>'target_dedicated_nodes'"` + TargetLowPriorityNodes *int64 `puppet:"name=>'target_low_priority_nodes'"` +} + +type FrontendIpConfiguration struct { + Name string + InboundNatRules *[]string `puppet:"name=>'inbound_nat_rules'"` + LoadBalancerRules *[]string `puppet:"name=>'load_balancer_rules'"` + PrivateIpAddress *string `puppet:"name=>'private_ip_address'"` + PrivateIpAddressAllocation *string `puppet:"name=>'private_ip_address_allocation'"` + PublicIpAddressId *string `puppet:"name=>'public_ip_address_id'"` + SubnetId *string `puppet:"name=>'subnet_id'"` + Zones *[]string +} + +type GatewayAuthenticationCertificate struct { + Data string + Name string + Id *string +} + +type GatewayBgpSettings struct { + Asn *int64 + PeerWeight *int64 `puppet:"name=>'peer_weight'"` + PeeringAddress *string `puppet:"name=>'peering_address'"` +} + +type GatewayIpConfiguration struct { + Name string + SubnetId string `puppet:"name=>'subnet_id'"` + Id *string +} + +type GcmCredential struct { + ApiKey string `puppet:"name=>'api_key'"` +} + +type GeoLocation struct { + FailoverPriority int64 `puppet:"name=>'failover_priority'"` + Location string + Id *string + Prefix *string +} + +type Hash struct { + Algorithm string + Value string +} + +type HttpSettings struct { + CookieBasedAffinity string `puppet:"name=>'cookie_based_affinity'"` + Name string + Port int64 + Protocol string + AuthenticationCertificate *[]AuthenticationCertificate `puppet:"name=>'authentication_certificate'"` + Id *string + ProbeId *string `puppet:"name=>'probe_id'"` + ProbeName *string `puppet:"name=>'probe_name'"` + RequestTimeout *int64 `puppet:"name=>'request_timeout'"` +} + +type Identifier struct { + Offer string + Publisher string + Sku string +} + +type Identity struct { + Type string + PrincipalId *string `puppet:"name=>'principal_id'"` + TenantId *string `puppet:"name=>'tenant_id'"` +} + +type ImageReference struct { + Id *string + Offer *string + Publisher *string + Sku *string + Version *string +} + +type Import struct { + AdministratorLogin string `puppet:"name=>'administrator_login'"` + AdministratorLoginPassword string `puppet:"name=>'administrator_login_password'"` + AuthenticationType string `puppet:"name=>'authentication_type'"` + StorageKey string `puppet:"name=>'storage_key'"` + StorageKeyType string `puppet:"name=>'storage_key_type'"` + StorageUri string `puppet:"name=>'storage_uri'"` + OperationMode *string `puppet:"name=>'operation_mode'"` +} + +type InterfaceIpConfiguration struct { + Name string + PrivateIpAddressAllocation string `puppet:"name=>'private_ip_address_allocation'"` + ApplicationSecurityGroupIds *[]string `puppet:"name=>'application_security_group_ids'"` + Primary *bool + PrivateIpAddress *string `puppet:"name=>'private_ip_address'"` + PrivateIpAddressVersion *string `puppet:"name=>'private_ip_address_version'"` + PublicIpAddressId *string `puppet:"name=>'public_ip_address_id'"` + SubnetId *string `puppet:"name=>'subnet_id'"` +} + +type IothubRoute struct { + Enabled bool + EndpointNames []string `puppet:"name=>'endpoint_names'"` + Name string + Source string + Condition *string +} + +type IothubSku struct { + Capacity int64 + Name string + Tier string +} + +type IpConfiguration struct { + Name string + Id *string + PrivateIpAddress *string `puppet:"name=>'private_ip_address'"` + PrivateIpAddressAllocation *string `puppet:"name=>'private_ip_address_allocation'"` + PublicIpAddressId *string `puppet:"name=>'public_ip_address_id'"` + SubnetId *string `puppet:"name=>'subnet_id'"` +} + +type IpsecPolicy struct { + DhGroup string `puppet:"name=>'dh_group'"` + IkeEncryption string `puppet:"name=>'ike_encryption'"` + IkeIntegrity string `puppet:"name=>'ike_integrity'"` + IpsecEncryption string `puppet:"name=>'ipsec_encryption'"` + IpsecIntegrity string `puppet:"name=>'ipsec_integrity'"` + PfsGroup string `puppet:"name=>'pfs_group'"` + SaDatasize *int64 `puppet:"name=>'sa_datasize'"` + SaLifetime *int64 `puppet:"name=>'sa_lifetime'"` +} + +type JobRecurrence struct { + Frequency string + Count *int64 + EndTime *string `puppet:"name=>'end_time'"` + Hours *[]int64 + Interval *int64 + Minutes *[]int64 + MonthDays *[]int64 `puppet:"name=>'month_days'"` + MonthlyOccurrences *[]OccurrenceOccurrences `puppet:"name=>'monthly_occurrences'"` + WeekDays *[]string `puppet:"name=>'week_days'"` +} + +type Key struct { + KeyData string `puppet:"name=>'key_data'"` +} + +type KeyEncryptionKey struct { + KeyUrl string `puppet:"name=>'key_url'"` + SourceVaultId string `puppet:"name=>'source_vault_id'"` +} + +type Keys struct { + KeyData string `puppet:"name=>'key_data'"` + Path string +} + +type KubeConfig struct { + ClientCertificate *string `puppet:"name=>'client_certificate'"` + ClientKey *string `puppet:"name=>'client_key'"` + ClusterCaCertificate *string `puppet:"name=>'cluster_ca_certificate'"` + Host *string + Password *string + Username *string +} + +type LifetimeAction struct { + Action *ActionAction + Trigger *ActionTrigger +} + +type Link struct { + Uri string + Hash *Hash +} + +type Linux struct { + Enabled bool + SubnetName string `puppet:"name=>'subnet_name'"` +} + +type LinuxConfig struct { + DisablePasswordAuthentication bool `puppet:"name=>'disable_password_authentication'"` + SshKeys *[]Keys `puppet:"name=>'ssh_keys'"` +} + +type LinuxProfile struct { + AdminUsername string `puppet:"name=>'admin_username'"` + SshKey *Key `puppet:"name=>'ssh_key'"` +} + +type Listener struct { + FrontendIpConfigurationName string `puppet:"name=>'frontend_ip_configuration_name'"` + FrontendPortName string `puppet:"name=>'frontend_port_name'"` + Name string + Protocol string + FrontendIpConfigurationId *string `puppet:"name=>'frontend_ip_configuration_id'"` + FrontendPortId *string `puppet:"name=>'frontend_port_id'"` + HostName *string `puppet:"name=>'host_name'"` + Id *string + RequireSni *bool `puppet:"name=>'require_sni'"` + SslCertificateId *string `puppet:"name=>'ssl_certificate_id'"` + SslCertificateName *string `puppet:"name=>'ssl_certificate_name'"` +} + +type Location struct { + Location string + GatewayRegionalUrl *string `puppet:"name=>'gateway_regional_url'"` + PublicIpAddresses *[]string `puppet:"name=>'public_ip_addresses'"` +} + +type LogMetric struct { + Category string + Enabled *bool + RetentionPolicy *Policy `puppet:"name=>'retention_policy'"` +} + +type MachineIdentity struct { + Type string + IdentityIds *[]string `puppet:"name=>'identity_ids'"` + PrincipalId *string `puppet:"name=>'principal_id'"` +} + +type ManagementCertificate struct { + CertificatePassword string `puppet:"name=>'certificate_password'"` + EncodedCertificate string `puppet:"name=>'encoded_certificate'"` + StoreName string `puppet:"name=>'store_name'"` +} + +type ManagementSku struct { + Capacity int64 + Name string +} + +type Map struct { + DefaultBackendAddressPoolName string `puppet:"name=>'default_backend_address_pool_name'"` + DefaultBackendHttpSettingsName string `puppet:"name=>'default_backend_http_settings_name'"` + Name string + PathRule []PathRule `puppet:"name=>'path_rule'"` + DefaultBackendAddressPoolId *string `puppet:"name=>'default_backend_address_pool_id'"` + DefaultBackendHttpSettingsId *string `puppet:"name=>'default_backend_http_settings_id'"` + Id *string +} + +type MasterProfile struct { + DnsPrefix string `puppet:"name=>'dns_prefix'"` + Count *int64 + Fqdn *string +} + +type Match struct { + Body *string + StatusCode *[]string `puppet:"name=>'status_code'"` +} + +type MonitorConfig struct { + Port int64 + Protocol string + Path *string +} + +type Monthly struct { + Count int64 + Weekdays []string + Weeks []string +} + +type Names struct { + DnsNames *[]string `puppet:"name=>'dns_names'"` + Emails *[]string + Upns *[]string +} + +type NetworkGatewayIpConfiguration struct { + SubnetId string `puppet:"name=>'subnet_id'"` + Name *string + PrivateIpAddressAllocation *string `puppet:"name=>'private_ip_address_allocation'"` + PublicIpAddressId *string `puppet:"name=>'public_ip_address_id'"` +} + +type NetworkProfile struct { + NetworkPlugin string `puppet:"name=>'network_plugin'"` + DnsServiceIp *string `puppet:"name=>'dns_service_ip'"` + DockerBridgeCidr *string `puppet:"name=>'docker_bridge_cidr'"` + PodCidr *string `puppet:"name=>'pod_cidr'"` + ServiceCidr *string `puppet:"name=>'service_cidr'"` +} + +type NetworkRule struct { + Id string +} + +type NetworkSubnet struct { + Name *string + UseInVirtualMachineCreation *string `puppet:"name=>'use_in_virtual_machine_creation'"` + UsePublicIpAddress *string `puppet:"name=>'use_public_ip_address'"` +} + +type Notification struct { + Email *Email + Webhook *[]WebhookAction +} + +type OccurrenceOccurrences struct { + Day string + Occurrence int64 +} + +type Origin struct { + HostName string `puppet:"name=>'host_name'"` + Name string + HttpPort *int64 `puppet:"name=>'http_port'"` + HttpsPort *int64 `puppet:"name=>'https_port'"` +} + +type OsDisk struct { + BlobUri *string `puppet:"name=>'blob_uri'"` + Caching *string + ManagedDiskId *string `puppet:"name=>'managed_disk_id'"` + OsState *string `puppet:"name=>'os_state'"` + OsType *string `puppet:"name=>'os_type'"` + SizeGb *int64 `puppet:"name=>'size_gb'"` +} + +type OsProfile struct { + AdminUsername string `puppet:"name=>'admin_username'"` + ComputerName string `puppet:"name=>'computer_name'"` + AdminPassword *string `puppet:"name=>'admin_password'"` + CustomData *string `puppet:"name=>'custom_data'"` +} + +type PathRule struct { + BackendAddressPoolName string `puppet:"name=>'backend_address_pool_name'"` + BackendHttpSettingsName string `puppet:"name=>'backend_http_settings_name'"` + Name string + Paths []string + BackendAddressPoolId *string `puppet:"name=>'backend_address_pool_id'"` + BackendHttpSettingsId *string `puppet:"name=>'backend_http_settings_id'"` + Id *string +} + +type PeeringConfig struct { + AdvertisedPublicPrefixes []string `puppet:"name=>'advertised_public_prefixes'"` +} + +type Permissions struct { + Actions *[]string + DataActions *[]string `puppet:"name=>'data_actions'"` + NotActions *[]string `puppet:"name=>'not_actions'"` + NotDataActions *[]string `puppet:"name=>'not_data_actions'"` +} + +type Plan struct { + Name string + Product string + Publisher string +} + +type PlanSku struct { + Size string + Tier string + Capacity *int64 +} + +type Policy struct { + Enabled bool + Days *int64 +} + +type Pool struct { + Name string + FqdnList *[]string `puppet:"name=>'fqdn_list'"` + Id *string + IpAddressList *[]string `puppet:"name=>'ip_address_list'"` +} + +type PoolProfile struct { + DnsPrefix string `puppet:"name=>'dns_prefix'"` + Name string + VmSize string `puppet:"name=>'vm_size'"` + Count *int64 + Fqdn *string +} + +type Port struct { + Name string + Port int64 + Id *string +} + +type Ports struct { + EndPort int64 `puppet:"name=>'end_port'"` + StartPort int64 `puppet:"name=>'start_port'"` +} + +type Principal struct { + ClientId string `puppet:"name=>'client_id'"` + ClientSecret string `puppet:"name=>'client_secret'"` +} + +type Probe struct { + Host string + Interval int64 + Name string + Path string + Protocol string + Timeout int64 + UnhealthyThreshold int64 `puppet:"name=>'unhealthy_threshold'"` + Id *string + Match *Match + MinimumServers *int64 `puppet:"name=>'minimum_servers'"` +} + +type Profile struct { + StorageMb int64 `puppet:"name=>'storage_mb'"` + BackupRetentionDays *int64 `puppet:"name=>'backup_retention_days'"` + GeoRedundantBackup *string `puppet:"name=>'geo_redundant_backup'"` +} + +type ProfileDataDisk struct { + CreateOption string `puppet:"name=>'create_option'"` + Lun int64 + Caching *string + DiskSizeGb *int64 `puppet:"name=>'disk_size_gb'"` + ManagedDiskType *string `puppet:"name=>'managed_disk_type'"` +} + +type ProfileIpConfiguration struct { + Name string + Primary bool + SubnetId string `puppet:"name=>'subnet_id'"` + ApplicationGatewayBackendAddressPoolIds *[]string `puppet:"name=>'application_gateway_backend_address_pool_ids'"` + ApplicationSecurityGroupIds *[]string `puppet:"name=>'application_security_group_ids'"` + LoadBalancerBackendAddressPoolIds *[]string `puppet:"name=>'load_balancer_backend_address_pool_ids'"` + LoadBalancerInboundNatRulesIds *[]string `puppet:"name=>'load_balancer_inbound_nat_rules_ids'"` + PublicIpAddressConfiguration *AddressConfiguration `puppet:"name=>'public_ip_address_configuration'"` +} + +type ProfileLinuxConfig struct { + DisablePasswordAuthentication *bool `puppet:"name=>'disable_password_authentication'"` + SshKeys *[]SshKeys `puppet:"name=>'ssh_keys'"` +} + +type ProfileOsDisk struct { + CreateOption string `puppet:"name=>'create_option'"` + Caching *string + Image *string + ManagedDiskType *string `puppet:"name=>'managed_disk_type'"` + Name *string + OsType *string `puppet:"name=>'os_type'"` + VhdContainers *[]string `puppet:"name=>'vhd_containers'"` +} + +type ProfileRule struct { + MetricTrigger *Trigger `puppet:"name=>'metric_trigger'"` + ScaleAction *ScaleAction `puppet:"name=>'scale_action'"` +} + +type ProfileWindowsConfig struct { + AdditionalUnattendConfig *[]UnattendConfig `puppet:"name=>'additional_unattend_config'"` + EnableAutomaticUpgrades *bool `puppet:"name=>'enable_automatic_upgrades'"` + ProvisionVmAgent *bool `puppet:"name=>'provision_vm_agent'"` + Winrm *[]Winrm +} + +type Properties struct { + Exportable bool + KeySize int64 `puppet:"name=>'key_size'"` + KeyType string `puppet:"name=>'key_type'"` + ReuseKey bool `puppet:"name=>'reuse_key'"` +} + +type Protocol struct { + Type string + Port *int64 +} + +type Proxy struct { + HostName string `puppet:"name=>'host_name'"` + Certificate *string + CertificatePassword *string `puppet:"name=>'certificate_password'"` + DefaultSslBinding *bool `puppet:"name=>'default_ssl_binding'"` + KeyVaultId *string `puppet:"name=>'key_vault_id'"` + NegotiateClientCertificate *bool `puppet:"name=>'negotiate_client_certificate'"` +} + +type Queue struct { + Message string + SasToken string `puppet:"name=>'sas_token'"` + StorageAccountName string `puppet:"name=>'storage_account_name'"` + StorageQueueName string `puppet:"name=>'storage_queue_name'"` +} + +type Quota struct { + MaxRecurrenceFrequency string `puppet:"name=>'max_recurrence_frequency'"` + MaxJobCount *int64 `puppet:"name=>'max_job_count'"` + MaxRecurrenceInterval *int64 `puppet:"name=>'max_recurrence_interval'"` +} + +type Receiver struct { + EmailAddress string `puppet:"name=>'email_address'"` + Name string +} + +type Record struct { + Flags int64 + Tag string + Value string +} + +type RecordRecord struct { + Exchange string + Preference string +} + +type Recurrence struct { + Days []string + Hours []int64 + Minutes []int64 + Timezone *string +} + +type RedisConfiguration struct { + Maxclients *int64 + MaxmemoryDelta *int64 `puppet:"name=>'maxmemory_delta'"` + MaxmemoryPolicy *string `puppet:"name=>'maxmemory_policy'"` + MaxmemoryReserved *int64 `puppet:"name=>'maxmemory_reserved'"` + NotifyKeyspaceEvents *string `puppet:"name=>'notify_keyspace_events'"` + RdbBackupEnabled *bool `puppet:"name=>'rdb_backup_enabled'"` + RdbBackupFrequency *int64 `puppet:"name=>'rdb_backup_frequency'"` + RdbBackupMaxSnapshotCount *int64 `puppet:"name=>'rdb_backup_max_snapshot_count'"` + RdbStorageConnectionString *string `puppet:"name=>'rdb_storage_connection_string'"` +} + +type Reference struct { + Offer string + Publisher string + Sku string + Version string +} + +type Region struct { + Name string + RegionalReplicaCount int64 `puppet:"name=>'regional_replica_count'"` +} + +type RegistryCredential struct { + Password string + Server string + Username string +} + +type Restriction struct { + IpAddress string `puppet:"name=>'ip_address'"` + SubnetMask *string `puppet:"name=>'subnet_mask'"` +} + +type RetentionPolicy struct { + Enabled bool + Days *int64 +} + +type Retry struct { + Count *int64 + Interval *string +} + +type RevokedCertificate struct { + Name string + Thumbprint string +} + +type RootCertificate struct { + Name string + PublicCertData string `puppet:"name=>'public_cert_data'"` +} + +type Routing struct { + Enabled bool + HttpApplicationRoutingZoneName *string `puppet:"name=>'http_application_routing_zone_name'"` +} + +type RoutingRule struct { + HttpListenerName string `puppet:"name=>'http_listener_name'"` + Name string + RuleType string `puppet:"name=>'rule_type'"` + BackendAddressPoolId *string `puppet:"name=>'backend_address_pool_id'"` + BackendAddressPoolName *string `puppet:"name=>'backend_address_pool_name'"` + BackendHttpSettingsId *string `puppet:"name=>'backend_http_settings_id'"` + BackendHttpSettingsName *string `puppet:"name=>'backend_http_settings_name'"` + HttpListenerId *string `puppet:"name=>'http_listener_id'"` + Id *string + UrlPathMapId *string `puppet:"name=>'url_path_map_id'"` + UrlPathMapName *string `puppet:"name=>'url_path_map_name'"` +} + +type Rule struct { + BackendPort int64 `puppet:"name=>'backend_port'"` + Protocol string + FrontendPort *int64 `puppet:"name=>'frontend_port'"` +} + +type RuleCollectionRule struct { + DestinationAddresses []string `puppet:"name=>'destination_addresses'"` + DestinationPorts []string `puppet:"name=>'destination_ports'"` + Name string + Protocols []string + SourceAddresses []string `puppet:"name=>'source_addresses'"` + Description *string +} + +type Rules struct { + Bypass *[]string + IpRules *[]string `puppet:"name=>'ip_rules'"` + VirtualNetworkSubnetIds *[]string `puppet:"name=>'virtual_network_subnet_ids'"` +} + +type Scale struct { + Formula string + EvaluationInterval *string `puppet:"name=>'evaluation_interval'"` +} + +type ScaleAction struct { + Cooldown string + Direction string + Type string + Value int64 +} + +type Schedule struct { + DayOfWeek string `puppet:"name=>'day_of_week'"` + StartHourUtc *int64 `puppet:"name=>'start_hour_utc'"` +} + +type ScmManagementPortal struct { + HostName string `puppet:"name=>'host_name'"` + Certificate *string + CertificatePassword *string `puppet:"name=>'certificate_password'"` + KeyVaultId *string `puppet:"name=>'key_vault_id'"` + NegotiateClientCertificate *bool `puppet:"name=>'negotiate_client_certificate'"` +} + +type SecretProperties struct { + ContentType string `puppet:"name=>'content_type'"` +} + +type Secrets struct { + SourceVaultId string `puppet:"name=>'source_vault_id'"` + VaultCertificates *[]Certificates `puppet:"name=>'vault_certificates'"` +} + +type Security struct { + DisableBackendSsl30 *bool `puppet:"name=>'disable_backend_ssl30'"` + DisableBackendTls10 *bool `puppet:"name=>'disable_backend_tls10'"` + DisableBackendTls11 *bool `puppet:"name=>'disable_backend_tls11'"` + DisableFrontendSsl30 *bool `puppet:"name=>'disable_frontend_ssl30'"` + DisableFrontendTls10 *bool `puppet:"name=>'disable_frontend_tls10'"` + DisableFrontendTls11 *bool `puppet:"name=>'disable_frontend_tls11'"` + DisableTripleDesChipers *bool `puppet:"name=>'disable_triple_des_chipers'"` +} + +type SecurityRule struct { + Access string + Direction string + Name string + Priority int64 + Protocol string + Description *string + DestinationAddressPrefix *string `puppet:"name=>'destination_address_prefix'"` + DestinationAddressPrefixes *[]string `puppet:"name=>'destination_address_prefixes'"` + DestinationApplicationSecurityGroupIds *[]string `puppet:"name=>'destination_application_security_group_ids'"` + DestinationPortRange *string `puppet:"name=>'destination_port_range'"` + DestinationPortRanges *[]string `puppet:"name=>'destination_port_ranges'"` + SourceAddressPrefix *string `puppet:"name=>'source_address_prefix'"` + SourceAddressPrefixes *[]string `puppet:"name=>'source_address_prefixes'"` + SourceApplicationSecurityGroupIds *[]string `puppet:"name=>'source_application_security_group_ids'"` + SourcePortRange *string `puppet:"name=>'source_port_range'"` + SourcePortRanges *[]string `puppet:"name=>'source_port_ranges'"` +} + +type ServerSku struct { + Capacity int64 + Family string + Name string + Tier string +} + +type ServiceProperties struct { + ResourceId string `puppet:"name=>'resource_id'"` +} + +type SetNetworkProfile struct { + IpConfiguration []ProfileIpConfiguration `puppet:"name=>'ip_configuration'"` + Name string + Primary bool + AcceleratedNetworking *bool `puppet:"name=>'accelerated_networking'"` + DnsSettings *DnsSettings `puppet:"name=>'dns_settings'"` + IpForwarding *bool `puppet:"name=>'ip_forwarding'"` + NetworkSecurityGroupId *string `puppet:"name=>'network_security_group_id'"` +} + +type SetOsProfile struct { + AdminUsername string `puppet:"name=>'admin_username'"` + ComputerNamePrefix string `puppet:"name=>'computer_name_prefix'"` + AdminPassword *string `puppet:"name=>'admin_password'"` + CustomData *string `puppet:"name=>'custom_data'"` +} + +type SetSku struct { + Capacity int64 + Name string + Tier *string +} + +type SettingProfile struct { + Name string + Capacity *Capacity + FixedDate *Date `puppet:"name=>'fixed_date'"` + Recurrence *Recurrence + Rule *[]ProfileRule +} + +type Settings struct { + Enabled bool + DiskEncryptionKey *EncryptionKey `puppet:"name=>'disk_encryption_key'"` + KeyEncryptionKey *KeyEncryptionKey `puppet:"name=>'key_encryption_key'"` +} + +type SiteConfig struct { + AlwaysOn *bool `puppet:"name=>'always_on'"` + Use32BitWorkerProcess *bool `puppet:"name=>'use_32_bit_worker_process'"` + WebsocketsEnabled *bool `puppet:"name=>'websockets_enabled'"` +} + +type Sku struct { + Name *string +} + +type SmsReceiver struct { + CountryCode string `puppet:"name=>'country_code'"` + Name string + PhoneNumber string `puppet:"name=>'phone_number'"` +} + +type SolutionPlan struct { + Product string + Publisher string + Name *string + PromotionCode *string `puppet:"name=>'promotion_code'"` +} + +type SrvRecordRecord struct { + Port int64 + Priority int64 + Target string + Weight int64 +} + +type SshKeys struct { + Path string + KeyData *string `puppet:"name=>'key_data'"` +} + +type SslCertificate struct { + Data string + Name string + Password string + Id *string + PublicCertData *string `puppet:"name=>'public_cert_data'"` +} + +type StorageImageReference struct { + Offer string + Publisher string + Sku string + Version string + Id *string +} + +type StorageLocation struct { + FilePath *string `puppet:"name=>'file_path'"` + StorageAccountId *string `puppet:"name=>'storage_account_id'"` + StoragePath *string `puppet:"name=>'storage_path'"` +} + +type StorageOsDisk struct { + CreateOption string `puppet:"name=>'create_option'"` + Name string + Caching *string + DiskSizeGb *int64 `puppet:"name=>'disk_size_gb'"` + ImageUri *string `puppet:"name=>'image_uri'"` + ManagedDiskId *string `puppet:"name=>'managed_disk_id'"` + ManagedDiskType *string `puppet:"name=>'managed_disk_type'"` + OsType *string `puppet:"name=>'os_type'"` + VhdUri *string `puppet:"name=>'vhd_uri'"` + WriteAcceleratorEnabled *bool `puppet:"name=>'write_accelerator_enabled'"` +} + +type String struct { + Name string + Type string + Value string +} + +type SubnetDelegation struct { + Name string + ServiceDelegation *Delegation `puppet:"name=>'service_delegation'"` +} + +type TableRoute struct { + AddressPrefix string `puppet:"name=>'address_prefix'"` + Name string + NextHopType string `puppet:"name=>'next_hop_type'"` + NextHopInIpAddress *string `puppet:"name=>'next_hop_in_ip_address'"` +} + +type Task struct { + CommandLine string `puppet:"name=>'command_line'"` + Environment *map[string]string + MaxTaskRetryCount *int64 `puppet:"name=>'max_task_retry_count'"` + UserIdentity *UserIdentity `puppet:"name=>'user_identity'"` + WaitForSuccess *bool `puppet:"name=>'wait_for_success'"` +} + +type Thumbprint struct { + IsAdmin bool `puppet:"name=>'is_admin'"` + Thumbprint string +} + +type Trigger struct { + MetricName string `puppet:"name=>'metric_name'"` + MetricResourceId string `puppet:"name=>'metric_resource_id'"` + Operator string + Statistic string + Threshold float64 + TimeAggregation string `puppet:"name=>'time_aggregation'"` + TimeGrain string `puppet:"name=>'time_grain'"` + TimeWindow string `puppet:"name=>'time_window'"` +} + +type TxtRecordRecord struct { + Value string +} + +type Type struct { + ClientEndpointPort int64 `puppet:"name=>'client_endpoint_port'"` + HttpEndpointPort int64 `puppet:"name=>'http_endpoint_port'"` + InstanceCount int64 `puppet:"name=>'instance_count'"` + IsPrimary bool `puppet:"name=>'is_primary'"` + Name string + ApplicationPorts *Ports `puppet:"name=>'application_ports'"` + DurabilityLevel *string `puppet:"name=>'durability_level'"` + EphemeralPorts *Ports `puppet:"name=>'ephemeral_ports'"` + ReverseProxyEndpointPort *int64 `puppet:"name=>'reverse_proxy_endpoint_port'"` +} + +type UnattendConfig struct { + Component string + Content string + Pass string + SettingName string `puppet:"name=>'setting_name'"` +} + +type UpgradePolicy struct { + MaxBatchInstancePercent *int64 `puppet:"name=>'max_batch_instance_percent'"` + MaxUnhealthyInstancePercent *int64 `puppet:"name=>'max_unhealthy_instance_percent'"` + MaxUnhealthyUpgradedInstancePercent *int64 `puppet:"name=>'max_unhealthy_upgraded_instance_percent'"` + PauseTimeBetweenBatches *string `puppet:"name=>'pause_time_between_batches'"` +} + +type User struct { + ElevationLevel *string `puppet:"name=>'elevation_level'"` + Scope *string +} + +type UserIdentity struct { + AutoUser *User `puppet:"name=>'auto_user'"` + UserName *string `puppet:"name=>'user_name'"` +} + +type VaultAccessPolicy struct { + ObjectId string `puppet:"name=>'object_id'"` + TenantId string `puppet:"name=>'tenant_id'"` + ApplicationId *string `puppet:"name=>'application_id'"` + CertificatePermissions *[]string `puppet:"name=>'certificate_permissions'"` + KeyPermissions *[]string `puppet:"name=>'key_permissions'"` + SecretPermissions *[]string `puppet:"name=>'secret_permissions'"` +} + +type VirtualNetworkSubnet struct { + AddressPrefix string `puppet:"name=>'address_prefix'"` + Name string + Id *string + SecurityGroup *string `puppet:"name=>'security_group'"` +} + +type Volume struct { + MountPath string `puppet:"name=>'mount_path'"` + Name string + ShareName string `puppet:"name=>'share_name'"` + StorageAccountKey string `puppet:"name=>'storage_account_key'"` + StorageAccountName string `puppet:"name=>'storage_account_name'"` + ReadOnly *bool `puppet:"name=>'read_only'"` +} + +type WafConfiguration struct { + Enabled bool + FirewallMode string `puppet:"name=>'firewall_mode'"` + RuleSetVersion string `puppet:"name=>'rule_set_version'"` + RuleSetType *string `puppet:"name=>'rule_set_type'"` +} + +type Web struct { + Method string + Url string + AuthenticationActiveDirectory *Directory `puppet:"name=>'authentication_active_directory'"` + AuthenticationBasic *Basic `puppet:"name=>'authentication_basic'"` + AuthenticationCertificate *Certificate `puppet:"name=>'authentication_certificate'"` + Body *string + Headers *map[string]string +} + +type WebhookAction struct { + ServiceUri string `puppet:"name=>'service_uri'"` + Properties *map[string]string +} + +type WebhookReceiver struct { + Name string + ServiceUri string `puppet:"name=>'service_uri'"` +} + +type Weekly struct { + Count int64 + Weekdays []string +} + +type WindowsConfig struct { + AdditionalUnattendConfig *[]UnattendConfig `puppet:"name=>'additional_unattend_config'"` + EnableAutomaticUpgrades *bool `puppet:"name=>'enable_automatic_upgrades'"` + ProvisionVmAgent *bool `puppet:"name=>'provision_vm_agent'"` + Timezone *string + Winrm *[]Winrm +} + +type Winrm struct { + Protocol string + CertificateUrl *string `puppet:"name=>'certificate_url'"` +} + +type Yearly struct { + Count int64 + Months []string + Weekdays []string + Weeks []string +} + +func InitTypes(c px.Context) { + load := func(n string) px.Type { + if v, ok := px.Load(c, px.NewTypedName(px.NsType, n)); ok { + return v.(px.Type) + } + panic(fmt.Errorf("unable to load Type '%s'", n)) + } + + ir := c.ImplementationRegistry() + ir.RegisterType(load("AzureRM::Api_management"), reflect.TypeOf(&ApiManagement{})) + ir.RegisterType(load("AzureRM::App_service"), reflect.TypeOf(&AppService{})) + ir.RegisterType(load("AzureRM::App_service_active_slot"), reflect.TypeOf(&AppServiceActiveSlot{})) + ir.RegisterType(load("AzureRM::App_service_custom_hostname_binding"), reflect.TypeOf(&AppServiceCustomHostnameBinding{})) + ir.RegisterType(load("AzureRM::App_service_plan"), reflect.TypeOf(&AppServicePlan{})) + ir.RegisterType(load("AzureRM::App_service_slot"), reflect.TypeOf(&AppServiceSlot{})) + ir.RegisterType(load("AzureRM::Application_gateway"), reflect.TypeOf(&ApplicationGateway{})) + ir.RegisterType(load("AzureRM::Application_insights"), reflect.TypeOf(&ApplicationInsights{})) + ir.RegisterType(load("AzureRM::Application_insights_api_key"), reflect.TypeOf(&ApplicationInsightsApiKey{})) + ir.RegisterType(load("AzureRM::Application_security_group"), reflect.TypeOf(&ApplicationSecurityGroup{})) + ir.RegisterType(load("AzureRM::Automation_account"), reflect.TypeOf(&AutomationAccount{})) + ir.RegisterType(load("AzureRM::Automation_credential"), reflect.TypeOf(&AutomationCredential{})) + ir.RegisterType(load("AzureRM::Automation_dsc_configuration"), reflect.TypeOf(&AutomationDscConfiguration{})) + ir.RegisterType(load("AzureRM::Automation_dsc_nodeconfiguration"), reflect.TypeOf(&AutomationDscNodeconfiguration{})) + ir.RegisterType(load("AzureRM::Automation_module"), reflect.TypeOf(&AutomationModule{})) + ir.RegisterType(load("AzureRM::Automation_runbook"), reflect.TypeOf(&AutomationRunbook{})) + ir.RegisterType(load("AzureRM::Automation_schedule"), reflect.TypeOf(&AutomationSchedule{})) + ir.RegisterType(load("AzureRM::Autoscale_setting"), reflect.TypeOf(&AutoscaleSetting{})) + ir.RegisterType(load("AzureRM::Availability_set"), reflect.TypeOf(&AvailabilitySet{})) + ir.RegisterType(load("AzureRM::Azuread_application"), reflect.TypeOf(&AzureadApplication{})) + ir.RegisterType(load("AzureRM::Azuread_service_principal"), reflect.TypeOf(&AzureadServicePrincipal{})) + ir.RegisterType(load("AzureRM::Azuread_service_principal_password"), reflect.TypeOf(&AzureadServicePrincipalPassword{})) + ir.RegisterType(load("AzureRM::Batch_account"), reflect.TypeOf(&BatchAccount{})) + ir.RegisterType(load("AzureRM::Batch_pool"), reflect.TypeOf(&BatchPool{})) + ir.RegisterType(load("AzureRM::Cdn_endpoint"), reflect.TypeOf(&CdnEndpoint{})) + ir.RegisterType(load("AzureRM::Cdn_profile"), reflect.TypeOf(&CdnProfile{})) + ir.RegisterType(load("AzureRM::Cognitive_account"), reflect.TypeOf(&CognitiveAccount{})) + ir.RegisterType(load("AzureRM::Container_group"), reflect.TypeOf(&ContainerGroup{})) + ir.RegisterType(load("AzureRM::Container_registry"), reflect.TypeOf(&ContainerRegistry{})) + ir.RegisterType(load("AzureRM::Container_service"), reflect.TypeOf(&ContainerService{})) + ir.RegisterType(load("AzureRM::Cosmosdb_account"), reflect.TypeOf(&CosmosdbAccount{})) + ir.RegisterType(load("AzureRM::Data_lake_analytics_account"), reflect.TypeOf(&DataLakeAnalyticsAccount{})) + ir.RegisterType(load("AzureRM::Data_lake_analytics_firewall_rule"), reflect.TypeOf(&DataLakeAnalyticsFirewallRule{})) + ir.RegisterType(load("AzureRM::Data_lake_store"), reflect.TypeOf(&DataLakeStore{})) + ir.RegisterType(load("AzureRM::Data_lake_store_file"), reflect.TypeOf(&DataLakeStoreFile{})) + ir.RegisterType(load("AzureRM::Data_lake_store_firewall_rule"), reflect.TypeOf(&DataLakeStoreFirewallRule{})) + ir.RegisterType(load("AzureRM::Databricks_workspace"), reflect.TypeOf(&DatabricksWorkspace{})) + ir.RegisterType(load("AzureRM::Dev_test_lab"), reflect.TypeOf(&DevTestLab{})) + ir.RegisterType(load("AzureRM::Dev_test_linux_virtual_machine"), reflect.TypeOf(&DevTestLinuxVirtualMachine{})) + ir.RegisterType(load("AzureRM::Dev_test_policy"), reflect.TypeOf(&DevTestPolicy{})) + ir.RegisterType(load("AzureRM::Dev_test_virtual_network"), reflect.TypeOf(&DevTestVirtualNetwork{})) + ir.RegisterType(load("AzureRM::Dev_test_windows_virtual_machine"), reflect.TypeOf(&DevTestWindowsVirtualMachine{})) + ir.RegisterType(load("AzureRM::Devspace_controller"), reflect.TypeOf(&DevspaceController{})) + ir.RegisterType(load("AzureRM::Dns_a_record"), reflect.TypeOf(&DnsARecord{})) + ir.RegisterType(load("AzureRM::Dns_aaaa_record"), reflect.TypeOf(&DnsAaaaRecord{})) + ir.RegisterType(load("AzureRM::Dns_caa_record"), reflect.TypeOf(&DnsCaaRecord{})) + ir.RegisterType(load("AzureRM::Dns_cname_record"), reflect.TypeOf(&DnsCnameRecord{})) + ir.RegisterType(load("AzureRM::Dns_mx_record"), reflect.TypeOf(&DnsMxRecord{})) + ir.RegisterType(load("AzureRM::Dns_ns_record"), reflect.TypeOf(&DnsNsRecord{})) + ir.RegisterType(load("AzureRM::Dns_ptr_record"), reflect.TypeOf(&DnsPtrRecord{})) + ir.RegisterType(load("AzureRM::Dns_srv_record"), reflect.TypeOf(&DnsSrvRecord{})) + ir.RegisterType(load("AzureRM::Dns_txt_record"), reflect.TypeOf(&DnsTxtRecord{})) + ir.RegisterType(load("AzureRM::Dns_zone"), reflect.TypeOf(&DnsZone{})) + ir.RegisterType(load("AzureRM::Eventgrid_topic"), reflect.TypeOf(&EventgridTopic{})) + ir.RegisterType(load("AzureRM::Eventhub"), reflect.TypeOf(&Eventhub{})) + ir.RegisterType(load("AzureRM::Eventhub_authorization_rule"), reflect.TypeOf(&EventhubAuthorizationRule{})) + ir.RegisterType(load("AzureRM::Eventhub_consumer_group"), reflect.TypeOf(&EventhubConsumerGroup{})) + ir.RegisterType(load("AzureRM::Eventhub_namespace"), reflect.TypeOf(&EventhubNamespace{})) + ir.RegisterType(load("AzureRM::Eventhub_namespace_authorization_rule"), reflect.TypeOf(&EventhubNamespaceAuthorizationRule{})) + ir.RegisterType(load("AzureRM::Express_route_circuit"), reflect.TypeOf(&ExpressRouteCircuit{})) + ir.RegisterType(load("AzureRM::Express_route_circuit_authorization"), reflect.TypeOf(&ExpressRouteCircuitAuthorization{})) + ir.RegisterType(load("AzureRM::Express_route_circuit_peering"), reflect.TypeOf(&ExpressRouteCircuitPeering{})) + ir.RegisterType(load("AzureRM::Firewall"), reflect.TypeOf(&Firewall{})) + ir.RegisterType(load("AzureRM::Firewall_application_rule_collection"), reflect.TypeOf(&FirewallApplicationRuleCollection{})) + ir.RegisterType(load("AzureRM::Firewall_network_rule_collection"), reflect.TypeOf(&FirewallNetworkRuleCollection{})) + ir.RegisterType(load("AzureRM::Function_app"), reflect.TypeOf(&FunctionApp{})) + ir.RegisterType(load("AzureRM::GenericHandler"), reflect.TypeOf(&GenericHandler{})) + ir.RegisterType(load("AzureRM::Image"), reflect.TypeOf(&Image{})) + ir.RegisterType(load("AzureRM::Iothub"), reflect.TypeOf(&Iothub{})) + ir.RegisterType(load("AzureRM::Iothub_consumer_group"), reflect.TypeOf(&IothubConsumerGroup{})) + ir.RegisterType(load("AzureRM::Key_vault"), reflect.TypeOf(&KeyVault{})) + ir.RegisterType(load("AzureRM::Key_vault_access_policy"), reflect.TypeOf(&KeyVaultAccessPolicy{})) + ir.RegisterType(load("AzureRM::Key_vault_certificate"), reflect.TypeOf(&KeyVaultCertificate{})) + ir.RegisterType(load("AzureRM::Key_vault_key"), reflect.TypeOf(&KeyVaultKey{})) + ir.RegisterType(load("AzureRM::Key_vault_secret"), reflect.TypeOf(&KeyVaultSecret{})) + ir.RegisterType(load("AzureRM::Kubernetes_cluster"), reflect.TypeOf(&KubernetesCluster{})) + ir.RegisterType(load("AzureRM::Lb"), reflect.TypeOf(&Lb{})) + ir.RegisterType(load("AzureRM::Lb_backend_address_pool"), reflect.TypeOf(&LbBackendAddressPool{})) + ir.RegisterType(load("AzureRM::Lb_nat_pool"), reflect.TypeOf(&LbNatPool{})) + ir.RegisterType(load("AzureRM::Lb_nat_rule"), reflect.TypeOf(&LbNatRule{})) + ir.RegisterType(load("AzureRM::Lb_probe"), reflect.TypeOf(&LbProbe{})) + ir.RegisterType(load("AzureRM::Lb_rule"), reflect.TypeOf(&LbRule{})) + ir.RegisterType(load("AzureRM::Local_network_gateway"), reflect.TypeOf(&LocalNetworkGateway{})) + ir.RegisterType(load("AzureRM::Log_analytics_solution"), reflect.TypeOf(&LogAnalyticsSolution{})) + ir.RegisterType(load("AzureRM::Log_analytics_workspace"), reflect.TypeOf(&LogAnalyticsWorkspace{})) + ir.RegisterType(load("AzureRM::Log_analytics_workspace_linked_service"), reflect.TypeOf(&LogAnalyticsWorkspaceLinkedService{})) + ir.RegisterType(load("AzureRM::Logic_app_action_custom"), reflect.TypeOf(&LogicAppActionCustom{})) + ir.RegisterType(load("AzureRM::Logic_app_action_http"), reflect.TypeOf(&LogicAppActionHttp{})) + ir.RegisterType(load("AzureRM::Logic_app_trigger_custom"), reflect.TypeOf(&LogicAppTriggerCustom{})) + ir.RegisterType(load("AzureRM::Logic_app_trigger_http_request"), reflect.TypeOf(&LogicAppTriggerHttpRequest{})) + ir.RegisterType(load("AzureRM::Logic_app_trigger_recurrence"), reflect.TypeOf(&LogicAppTriggerRecurrence{})) + ir.RegisterType(load("AzureRM::Logic_app_workflow"), reflect.TypeOf(&LogicAppWorkflow{})) + ir.RegisterType(load("AzureRM::Managed_disk"), reflect.TypeOf(&ManagedDisk{})) + ir.RegisterType(load("AzureRM::Management_group"), reflect.TypeOf(&ManagementGroup{})) + ir.RegisterType(load("AzureRM::Management_lock"), reflect.TypeOf(&ManagementLock{})) + ir.RegisterType(load("AzureRM::Mariadb_database"), reflect.TypeOf(&MariadbDatabase{})) + ir.RegisterType(load("AzureRM::Mariadb_server"), reflect.TypeOf(&MariadbServer{})) + ir.RegisterType(load("AzureRM::Metric_alertrule"), reflect.TypeOf(&MetricAlertrule{})) + ir.RegisterType(load("AzureRM::Monitor_action_group"), reflect.TypeOf(&MonitorActionGroup{})) + ir.RegisterType(load("AzureRM::Monitor_activity_log_alert"), reflect.TypeOf(&MonitorActivityLogAlert{})) + ir.RegisterType(load("AzureRM::Monitor_diagnostic_setting"), reflect.TypeOf(&MonitorDiagnosticSetting{})) + ir.RegisterType(load("AzureRM::Monitor_log_profile"), reflect.TypeOf(&MonitorLogProfile{})) + ir.RegisterType(load("AzureRM::Monitor_metric_alert"), reflect.TypeOf(&MonitorMetricAlert{})) + ir.RegisterType(load("AzureRM::Mssql_elasticpool"), reflect.TypeOf(&MssqlElasticpool{})) + ir.RegisterType(load("AzureRM::Mysql_configuration"), reflect.TypeOf(&MysqlConfiguration{})) + ir.RegisterType(load("AzureRM::Mysql_database"), reflect.TypeOf(&MysqlDatabase{})) + ir.RegisterType(load("AzureRM::Mysql_firewall_rule"), reflect.TypeOf(&MysqlFirewallRule{})) + ir.RegisterType(load("AzureRM::Mysql_server"), reflect.TypeOf(&MysqlServer{})) + ir.RegisterType(load("AzureRM::Mysql_virtual_network_rule"), reflect.TypeOf(&MysqlVirtualNetworkRule{})) + ir.RegisterType(load("AzureRM::Network_interface"), reflect.TypeOf(&NetworkInterface{})) + ir.RegisterType(load("AzureRM::Network_interface_application_gateway_backend_address_pool_association"), reflect.TypeOf(&NetworkInterfaceApplicationGatewayBackendAddressPoolAssociation{})) + ir.RegisterType(load("AzureRM::Network_interface_backend_address_pool_association"), reflect.TypeOf(&NetworkInterfaceBackendAddressPoolAssociation{})) + ir.RegisterType(load("AzureRM::Network_interface_nat_rule_association"), reflect.TypeOf(&NetworkInterfaceNatRuleAssociation{})) + ir.RegisterType(load("AzureRM::Network_security_group"), reflect.TypeOf(&NetworkSecurityGroup{})) + ir.RegisterType(load("AzureRM::Network_security_rule"), reflect.TypeOf(&NetworkSecurityRule{})) + ir.RegisterType(load("AzureRM::Network_watcher"), reflect.TypeOf(&NetworkWatcher{})) + ir.RegisterType(load("AzureRM::Notification_hub"), reflect.TypeOf(&NotificationHub{})) + ir.RegisterType(load("AzureRM::Notification_hub_authorization_rule"), reflect.TypeOf(&NotificationHubAuthorizationRule{})) + ir.RegisterType(load("AzureRM::Notification_hub_namespace"), reflect.TypeOf(&NotificationHubNamespace{})) + ir.RegisterType(load("AzureRM::Packet_capture"), reflect.TypeOf(&PacketCapture{})) + ir.RegisterType(load("AzureRM::Policy_assignment"), reflect.TypeOf(&PolicyAssignment{})) + ir.RegisterType(load("AzureRM::Policy_definition"), reflect.TypeOf(&PolicyDefinition{})) + ir.RegisterType(load("AzureRM::Policy_set_definition"), reflect.TypeOf(&PolicySetDefinition{})) + ir.RegisterType(load("AzureRM::Postgresql_configuration"), reflect.TypeOf(&PostgresqlConfiguration{})) + ir.RegisterType(load("AzureRM::Postgresql_database"), reflect.TypeOf(&PostgresqlDatabase{})) + ir.RegisterType(load("AzureRM::Postgresql_firewall_rule"), reflect.TypeOf(&PostgresqlFirewallRule{})) + ir.RegisterType(load("AzureRM::Postgresql_server"), reflect.TypeOf(&PostgresqlServer{})) + ir.RegisterType(load("AzureRM::Postgresql_virtual_network_rule"), reflect.TypeOf(&PostgresqlVirtualNetworkRule{})) + ir.RegisterType(load("AzureRM::Public_ip"), reflect.TypeOf(&PublicIp{})) + ir.RegisterType(load("AzureRM::Recovery_services_protected_vm"), reflect.TypeOf(&RecoveryServicesProtectedVm{})) + ir.RegisterType(load("AzureRM::Recovery_services_protection_policy_vm"), reflect.TypeOf(&RecoveryServicesProtectionPolicyVm{})) + ir.RegisterType(load("AzureRM::Recovery_services_vault"), reflect.TypeOf(&RecoveryServicesVault{})) + ir.RegisterType(load("AzureRM::Redis_cache"), reflect.TypeOf(&RedisCache{})) + ir.RegisterType(load("AzureRM::Redis_firewall_rule"), reflect.TypeOf(&RedisFirewallRule{})) + ir.RegisterType(load("AzureRM::Relay_namespace"), reflect.TypeOf(&RelayNamespace{})) + ir.RegisterType(load("AzureRM::Resource_group"), reflect.TypeOf(&ResourceGroup{})) + ir.RegisterType(load("AzureRM::Role_assignment"), reflect.TypeOf(&RoleAssignment{})) + ir.RegisterType(load("AzureRM::Role_definition"), reflect.TypeOf(&RoleDefinition{})) + ir.RegisterType(load("AzureRM::Route"), reflect.TypeOf(&Route{})) + ir.RegisterType(load("AzureRM::Route_table"), reflect.TypeOf(&RouteTable{})) + ir.RegisterType(load("AzureRM::Scheduler_job"), reflect.TypeOf(&SchedulerJob{})) + ir.RegisterType(load("AzureRM::Scheduler_job_collection"), reflect.TypeOf(&SchedulerJobCollection{})) + ir.RegisterType(load("AzureRM::Search_service"), reflect.TypeOf(&SearchService{})) + ir.RegisterType(load("AzureRM::Security_center_contact"), reflect.TypeOf(&SecurityCenterContact{})) + ir.RegisterType(load("AzureRM::Security_center_subscription_pricing"), reflect.TypeOf(&SecurityCenterSubscriptionPricing{})) + ir.RegisterType(load("AzureRM::Security_center_workspace"), reflect.TypeOf(&SecurityCenterWorkspace{})) + ir.RegisterType(load("AzureRM::Service_fabric_cluster"), reflect.TypeOf(&ServiceFabricCluster{})) + ir.RegisterType(load("AzureRM::Servicebus_namespace"), reflect.TypeOf(&ServicebusNamespace{})) + ir.RegisterType(load("AzureRM::Servicebus_namespace_authorization_rule"), reflect.TypeOf(&ServicebusNamespaceAuthorizationRule{})) + ir.RegisterType(load("AzureRM::Servicebus_queue"), reflect.TypeOf(&ServicebusQueue{})) + ir.RegisterType(load("AzureRM::Servicebus_queue_authorization_rule"), reflect.TypeOf(&ServicebusQueueAuthorizationRule{})) + ir.RegisterType(load("AzureRM::Servicebus_subscription"), reflect.TypeOf(&ServicebusSubscription{})) + ir.RegisterType(load("AzureRM::Servicebus_subscription_rule"), reflect.TypeOf(&ServicebusSubscriptionRule{})) + ir.RegisterType(load("AzureRM::Servicebus_topic"), reflect.TypeOf(&ServicebusTopic{})) + ir.RegisterType(load("AzureRM::Servicebus_topic_authorization_rule"), reflect.TypeOf(&ServicebusTopicAuthorizationRule{})) + ir.RegisterType(load("AzureRM::Shared_image"), reflect.TypeOf(&SharedImage{})) + ir.RegisterType(load("AzureRM::Shared_image_gallery"), reflect.TypeOf(&SharedImageGallery{})) + ir.RegisterType(load("AzureRM::Shared_image_version"), reflect.TypeOf(&SharedImageVersion{})) + ir.RegisterType(load("AzureRM::Signalr_service"), reflect.TypeOf(&SignalrService{})) + ir.RegisterType(load("AzureRM::Snapshot"), reflect.TypeOf(&Snapshot{})) + ir.RegisterType(load("AzureRM::Sql_active_directory_administrator"), reflect.TypeOf(&SqlActiveDirectoryAdministrator{})) + ir.RegisterType(load("AzureRM::Sql_database"), reflect.TypeOf(&SqlDatabase{})) + ir.RegisterType(load("AzureRM::Sql_elasticpool"), reflect.TypeOf(&SqlElasticpool{})) + ir.RegisterType(load("AzureRM::Sql_firewall_rule"), reflect.TypeOf(&SqlFirewallRule{})) + ir.RegisterType(load("AzureRM::Sql_server"), reflect.TypeOf(&SqlServer{})) + ir.RegisterType(load("AzureRM::Sql_virtual_network_rule"), reflect.TypeOf(&SqlVirtualNetworkRule{})) + ir.RegisterType(load("AzureRM::Storage_account"), reflect.TypeOf(&StorageAccount{})) + ir.RegisterType(load("AzureRM::Storage_blob"), reflect.TypeOf(&StorageBlob{})) + ir.RegisterType(load("AzureRM::Storage_container"), reflect.TypeOf(&StorageContainer{})) + ir.RegisterType(load("AzureRM::Storage_queue"), reflect.TypeOf(&StorageQueue{})) + ir.RegisterType(load("AzureRM::Storage_share"), reflect.TypeOf(&StorageShare{})) + ir.RegisterType(load("AzureRM::Storage_table"), reflect.TypeOf(&StorageTable{})) + ir.RegisterType(load("AzureRM::Subnet"), reflect.TypeOf(&Subnet{})) + ir.RegisterType(load("AzureRM::Subnet_network_security_group_association"), reflect.TypeOf(&SubnetNetworkSecurityGroupAssociation{})) + ir.RegisterType(load("AzureRM::Subnet_route_table_association"), reflect.TypeOf(&SubnetRouteTableAssociation{})) + ir.RegisterType(load("AzureRM::Template_deployment"), reflect.TypeOf(&TemplateDeployment{})) + ir.RegisterType(load("AzureRM::Traffic_manager_endpoint"), reflect.TypeOf(&TrafficManagerEndpoint{})) + ir.RegisterType(load("AzureRM::Traffic_manager_profile"), reflect.TypeOf(&TrafficManagerProfile{})) + ir.RegisterType(load("AzureRM::User_assigned_identity"), reflect.TypeOf(&UserAssignedIdentity{})) + ir.RegisterType(load("AzureRM::Virtual_machine"), reflect.TypeOf(&VirtualMachine{})) + ir.RegisterType(load("AzureRM::Virtual_machine_data_disk_attachment"), reflect.TypeOf(&VirtualMachineDataDiskAttachment{})) + ir.RegisterType(load("AzureRM::Virtual_machine_extension"), reflect.TypeOf(&VirtualMachineExtension{})) + ir.RegisterType(load("AzureRM::Virtual_machine_scale_set"), reflect.TypeOf(&VirtualMachineScaleSet{})) + ir.RegisterType(load("AzureRM::Virtual_network"), reflect.TypeOf(&VirtualNetwork{})) + ir.RegisterType(load("AzureRM::Virtual_network_gateway"), reflect.TypeOf(&VirtualNetworkGateway{})) + ir.RegisterType(load("AzureRM::Virtual_network_gateway_connection"), reflect.TypeOf(&VirtualNetworkGatewayConnection{})) + ir.RegisterType(load("AzureRM::Virtual_network_peering"), reflect.TypeOf(&VirtualNetworkPeering{})) +} diff --git a/vendor/github.com/lyraproj/lyra/examples/go-samples/types/example/example.go b/vendor/github.com/lyraproj/lyra/examples/go-samples/types/example/example.go new file mode 100644 index 0000000..c9c4f04 --- /dev/null +++ b/vendor/github.com/lyraproj/lyra/examples/go-samples/types/example/example.go @@ -0,0 +1,50 @@ +// this file is generated +package example + +import ( + "fmt" + "reflect" + + "github.com/lyraproj/pcore/px" +) + +type Address struct { + LineOne *string +} + +type ContainedRes struct { + OwnerId string + Stuff string + Id *string +} + +type OwnerRes struct { + Phone string + Id *string +} + +type Person struct { + Name *string + Age *int64 + Human *bool + Address *Address +} + +type PersonHandler struct { +} + +func InitTypes(c px.Context) { + load := func(n string) px.Type { + if v, ok := px.Load(c, px.NewTypedName(px.NsType, n)); ok { + return v.(px.Type) + } + panic(fmt.Errorf("unable to load Type '%s'", n)) + } + + ir := c.ImplementationRegistry() + ir.RegisterType(load("Example::Address"), reflect.TypeOf(&Address{})) + ir.RegisterType(load("Example::ContainedRes"), reflect.TypeOf(&ContainedRes{})) + ir.RegisterType(load("Example::OwnerRes"), reflect.TypeOf(&OwnerRes{})) + ir.RegisterType(load("Example::Person"), reflect.TypeOf(&Person{})) + ir.RegisterType(load("Example::PersonHandler"), reflect.TypeOf(&PersonHandler{})) +} diff --git a/vendor/github.com/lyraproj/lyra/examples/go-samples/types/foobernetes/foobernetes.go b/vendor/github.com/lyraproj/lyra/examples/go-samples/types/foobernetes/foobernetes.go new file mode 100644 index 0000000..cad456a --- /dev/null +++ b/vendor/github.com/lyraproj/lyra/examples/go-samples/types/foobernetes/foobernetes.go @@ -0,0 +1,60 @@ +// this file is generated +package foobernetes + +import ( + "fmt" + "reflect" + + "github.com/lyraproj/pcore/px" +) + +type Instance struct { + Image string + Cpus int64 + Memory string + InstanceID *string + InstanceIP *string + Location *string + Config *map[string]string +} + +type InstanceHandler struct { +} + +type LoadBalancer struct { + WebServerIDs []string + LoadBalancerID *string + LoadBalancerIP *string + Location *string + Replica *bool + Tags *map[string]string +} + +type LoadBalancerHandler struct { +} + +type WebServer struct { + Port int64 + AppServers []string + WebServerID *string +} + +type WebServerHandler struct { +} + +func InitTypes(c px.Context) { + load := func(n string) px.Type { + if v, ok := px.Load(c, px.NewTypedName(px.NsType, n)); ok { + return v.(px.Type) + } + panic(fmt.Errorf("unable to load Type '%s'", n)) + } + + ir := c.ImplementationRegistry() + ir.RegisterType(load("Foobernetes::Instance"), reflect.TypeOf(&Instance{})) + ir.RegisterType(load("Foobernetes::InstanceHandler"), reflect.TypeOf(&InstanceHandler{})) + ir.RegisterType(load("Foobernetes::LoadBalancer"), reflect.TypeOf(&LoadBalancer{})) + ir.RegisterType(load("Foobernetes::LoadBalancerHandler"), reflect.TypeOf(&LoadBalancerHandler{})) + ir.RegisterType(load("Foobernetes::WebServer"), reflect.TypeOf(&WebServer{})) + ir.RegisterType(load("Foobernetes::WebServerHandler"), reflect.TypeOf(&WebServerHandler{})) +} diff --git a/vendor/github.com/lyraproj/lyra/examples/go-samples/types/github/github.go b/vendor/github.com/lyraproj/lyra/examples/go-samples/types/github/github.go new file mode 100644 index 0000000..722d2f6 --- /dev/null +++ b/vendor/github.com/lyraproj/lyra/examples/go-samples/types/github/github.go @@ -0,0 +1,223 @@ +// this file is generated +package github + +import ( + "fmt" + "reflect" + + "github.com/lyraproj/pcore/px" +) + +type BranchProtection struct { + Branch string + Repository string + BranchProtectionId *string `puppet:"name=>'branch_protection_id'"` + EnforceAdmins *bool `puppet:"name=>'enforce_admins'"` + Etag *string + RequiredPullRequestReviews *Reviews `puppet:"name=>'required_pull_request_reviews'"` + RequiredStatusChecks *Checks `puppet:"name=>'required_status_checks'"` + Restrictions *Restrictions +} + +type GenericHandler struct { +} + +type IssueLabel struct { + Color string + Name string + Repository string + IssueLabelId *string `puppet:"name=>'issue_label_id'"` + Description *string + Etag *string + Url *string +} + +type Membership struct { + Username string + MembershipId *string `puppet:"name=>'membership_id'"` + Etag *string + Role *string +} + +type OrganizationProject struct { + Name string + OrganizationProjectId *string `puppet:"name=>'organization_project_id'"` + Body *string + Etag *string + Url *string +} + +type OrganizationWebhook struct { + Events []string + Name string + OrganizationWebhookId *string `puppet:"name=>'organization_webhook_id'"` + Active *bool + Configuration *Configuration + Etag *string + Url *string +} + +type ProjectColumn struct { + Name string + ProjectId string `puppet:"name=>'project_id'"` + ProjectColumnId *string `puppet:"name=>'project_column_id'"` + Etag *string +} + +type Repository struct { + Name string + RepositoryId *string `puppet:"name=>'repository_id'"` + AllowMergeCommit *bool `puppet:"name=>'allow_merge_commit'"` + AllowRebaseMerge *bool `puppet:"name=>'allow_rebase_merge'"` + AllowSquashMerge *bool `puppet:"name=>'allow_squash_merge'"` + Archived *bool + AutoInit *bool `puppet:"name=>'auto_init'"` + DefaultBranch *string `puppet:"name=>'default_branch'"` + Description *string + Etag *string + FullName *string `puppet:"name=>'full_name'"` + GitCloneUrl *string `puppet:"name=>'git_clone_url'"` + GitignoreTemplate *string `puppet:"name=>'gitignore_template'"` + HasDownloads *bool `puppet:"name=>'has_downloads'"` + HasIssues *bool `puppet:"name=>'has_issues'"` + HasProjects *bool `puppet:"name=>'has_projects'"` + HasWiki *bool `puppet:"name=>'has_wiki'"` + HomepageUrl *string `puppet:"name=>'homepage_url'"` + HtmlUrl *string `puppet:"name=>'html_url'"` + HttpCloneUrl *string `puppet:"name=>'http_clone_url'"` + LicenseTemplate *string `puppet:"name=>'license_template'"` + Private *bool + SshCloneUrl *string `puppet:"name=>'ssh_clone_url'"` + SvnUrl *string `puppet:"name=>'svn_url'"` + Topics *[]string +} + +type RepositoryCollaborator struct { + Repository string + Username string + RepositoryCollaboratorId *string `puppet:"name=>'repository_collaborator_id'"` + Permission *string +} + +type RepositoryDeployKey struct { + Key string + Repository string + Title string + RepositoryDeployKeyId *string `puppet:"name=>'repository_deploy_key_id'"` + Etag *string + ReadOnly *bool `puppet:"name=>'read_only'"` +} + +type RepositoryProject struct { + Name string + Repository string + RepositoryProjectId *string `puppet:"name=>'repository_project_id'"` + Body *string + Etag *string + Url *string +} + +type RepositoryWebhook struct { + Events []string + Name string + Repository string + RepositoryWebhookId *string `puppet:"name=>'repository_webhook_id'"` + Active *bool + Configuration *Configuration + Etag *string + Url *string +} + +type Team struct { + Name string + TeamId *string `puppet:"name=>'team_id'"` + Description *string + Etag *string + LdapDn *string `puppet:"name=>'ldap_dn'"` + ParentTeamId *int64 `puppet:"name=>'parent_team_id'"` + Privacy *string + Slug *string +} + +type TeamMembership struct { + TeamId string `puppet:"name=>'team_id'"` + Username string + TeamMembershipId *string `puppet:"name=>'team_membership_id'"` + Etag *string + Role *string +} + +type TeamRepository struct { + Repository string + TeamId string `puppet:"name=>'team_id'"` + TeamRepositoryId *string `puppet:"name=>'team_repository_id'"` + Etag *string + Permission *string +} + +type UserGpgKey struct { + ArmoredPublicKey string `puppet:"name=>'armored_public_key'"` + UserGpgKeyId *string `puppet:"name=>'user_gpg_key_id'"` + Etag *string + KeyId *string `puppet:"name=>'key_id'"` +} + +type UserSshKey struct { + Key string + Title string + UserSshKeyId *string `puppet:"name=>'user_ssh_key_id'"` + Etag *string + Url *string +} + +type Checks struct { + Contexts *[]string + Strict *bool +} + +type Configuration struct { + Url string + ContentType *string `puppet:"name=>'content_type'"` + InsecureSsl *string `puppet:"name=>'insecure_ssl'"` + Secret *string +} + +type Restrictions struct { + Teams *[]string + Users *[]string +} + +type Reviews struct { + DismissStaleReviews *bool `puppet:"name=>'dismiss_stale_reviews'"` + DismissalTeams *[]string `puppet:"name=>'dismissal_teams'"` + DismissalUsers *[]string `puppet:"name=>'dismissal_users'"` + RequireCodeOwnerReviews *bool `puppet:"name=>'require_code_owner_reviews'"` +} + +func InitTypes(c px.Context) { + load := func(n string) px.Type { + if v, ok := px.Load(c, px.NewTypedName(px.NsType, n)); ok { + return v.(px.Type) + } + panic(fmt.Errorf("unable to load Type '%s'", n)) + } + + ir := c.ImplementationRegistry() + ir.RegisterType(load("GitHub::Branch_protection"), reflect.TypeOf(&BranchProtection{})) + ir.RegisterType(load("GitHub::GenericHandler"), reflect.TypeOf(&GenericHandler{})) + ir.RegisterType(load("GitHub::Issue_label"), reflect.TypeOf(&IssueLabel{})) + ir.RegisterType(load("GitHub::Membership"), reflect.TypeOf(&Membership{})) + ir.RegisterType(load("GitHub::Organization_project"), reflect.TypeOf(&OrganizationProject{})) + ir.RegisterType(load("GitHub::Organization_webhook"), reflect.TypeOf(&OrganizationWebhook{})) + ir.RegisterType(load("GitHub::Project_column"), reflect.TypeOf(&ProjectColumn{})) + ir.RegisterType(load("GitHub::Repository"), reflect.TypeOf(&Repository{})) + ir.RegisterType(load("GitHub::Repository_collaborator"), reflect.TypeOf(&RepositoryCollaborator{})) + ir.RegisterType(load("GitHub::Repository_deploy_key"), reflect.TypeOf(&RepositoryDeployKey{})) + ir.RegisterType(load("GitHub::Repository_project"), reflect.TypeOf(&RepositoryProject{})) + ir.RegisterType(load("GitHub::Repository_webhook"), reflect.TypeOf(&RepositoryWebhook{})) + ir.RegisterType(load("GitHub::Team"), reflect.TypeOf(&Team{})) + ir.RegisterType(load("GitHub::Team_membership"), reflect.TypeOf(&TeamMembership{})) + ir.RegisterType(load("GitHub::Team_repository"), reflect.TypeOf(&TeamRepository{})) + ir.RegisterType(load("GitHub::User_gpg_key"), reflect.TypeOf(&UserGpgKey{})) + ir.RegisterType(load("GitHub::User_ssh_key"), reflect.TypeOf(&UserSshKey{})) +} diff --git a/vendor/github.com/lyraproj/lyra/examples/go-samples/types/google/google.go b/vendor/github.com/lyraproj/lyra/examples/go-samples/types/google/google.go new file mode 100644 index 0000000..a00d610 --- /dev/null +++ b/vendor/github.com/lyraproj/lyra/examples/go-samples/types/google/google.go @@ -0,0 +1,2940 @@ +// this file is generated +package google + +import ( + "fmt" + "reflect" + + "github.com/lyraproj/pcore/px" +) + +type AppEngineApplication struct { + LocationId string `puppet:"name=>'location_id'"` + AppEngineApplicationId *string `puppet:"name=>'app_engine_application_id'"` + AuthDomain *string `puppet:"name=>'auth_domain'"` + CodeBucket *string `puppet:"name=>'code_bucket'"` + DefaultBucket *string `puppet:"name=>'default_bucket'"` + DefaultHostname *string `puppet:"name=>'default_hostname'"` + FeatureSettings *Settings `puppet:"name=>'feature_settings'"` + GcrDomain *string `puppet:"name=>'gcr_domain'"` + Name *string + Project *string + ServingStatus *string `puppet:"name=>'serving_status'"` + UrlDispatchRule *[]Rule `puppet:"name=>'url_dispatch_rule'"` +} + +type BigqueryDataset struct { + DatasetId string `puppet:"name=>'dataset_id'"` + BigqueryDatasetId *string `puppet:"name=>'bigquery_dataset_id'"` + Access *[]Access + CreationTime *int64 `puppet:"name=>'creation_time'"` + DefaultTableExpirationMs *int64 `puppet:"name=>'default_table_expiration_ms'"` + Description *string + Etag *string + FriendlyName *string `puppet:"name=>'friendly_name'"` + Labels *map[string]string + LastModifiedTime *int64 `puppet:"name=>'last_modified_time'"` + Location *string + Project *string + SelfLink *string `puppet:"name=>'self_link'"` +} + +type BigqueryTable struct { + DatasetId string `puppet:"name=>'dataset_id'"` + TableId string `puppet:"name=>'table_id'"` + BigqueryTableId *string `puppet:"name=>'bigquery_table_id'"` + CreationTime *int64 `puppet:"name=>'creation_time'"` + Description *string + Etag *string + ExpirationTime *int64 `puppet:"name=>'expiration_time'"` + FriendlyName *string `puppet:"name=>'friendly_name'"` + Labels *map[string]string + LastModifiedTime *int64 `puppet:"name=>'last_modified_time'"` + Location *string + NumBytes *int64 `puppet:"name=>'num_bytes'"` + NumLongTermBytes *int64 `puppet:"name=>'num_long_term_bytes'"` + NumRows *int64 `puppet:"name=>'num_rows'"` + Project *string + Schema *string + SelfLink *string `puppet:"name=>'self_link'"` + TimePartitioning *Partitioning `puppet:"name=>'time_partitioning'"` + Type *string + View *TableView +} + +type BigtableInstance struct { + Name string + BigtableInstanceId *string `puppet:"name=>'bigtable_instance_id'"` + Cluster *Cluster + DisplayName *string `puppet:"name=>'display_name'"` + InstanceType *string `puppet:"name=>'instance_type'"` + Project *string +} + +type BigtableTable struct { + InstanceName string `puppet:"name=>'instance_name'"` + Name string + BigtableTableId *string `puppet:"name=>'bigtable_table_id'"` + Project *string + SplitKeys *[]string `puppet:"name=>'split_keys'"` +} + +type BillingAccountIamBinding struct { + BillingAccountId string `puppet:"name=>'billing_account_id'"` + Members []string + Role string + BillingAccountIamBindingId *string `puppet:"name=>'billing_account_iam_binding_id'"` + Etag *string +} + +type BillingAccountIamMember struct { + BillingAccountId string `puppet:"name=>'billing_account_id'"` + Member string + Role string + BillingAccountIamMemberId *string `puppet:"name=>'billing_account_iam_member_id'"` + Etag *string +} + +type BillingAccountIamPolicy struct { + BillingAccountId string `puppet:"name=>'billing_account_id'"` + PolicyData string `puppet:"name=>'policy_data'"` + BillingAccountIamPolicyId *string `puppet:"name=>'billing_account_iam_policy_id'"` + Etag *string +} + +type BinaryAuthorizationAttestor struct { + Name string + BinaryAuthorizationAttestorId *string `puppet:"name=>'binary_authorization_attestor_id'"` + AttestationAuthorityNote *Note `puppet:"name=>'attestation_authority_note'"` + Description *string + Project *string +} + +type BinaryAuthorizationPolicy struct { + BinaryAuthorizationPolicyId *string `puppet:"name=>'binary_authorization_policy_id'"` + AdmissionWhitelistPatterns *[]Patterns `puppet:"name=>'admission_whitelist_patterns'"` + ClusterAdmissionRules *[]Rules `puppet:"name=>'cluster_admission_rules'"` + DefaultAdmissionRule *AdmissionRule `puppet:"name=>'default_admission_rule'"` + Description *string + Project *string +} + +type CloudbuildTrigger struct { + CloudbuildTriggerId *string `puppet:"name=>'cloudbuild_trigger_id'"` + Build *Build + Description *string + Filename *string + Project *string + Substitutions *map[string]string + TriggerTemplate *Template `puppet:"name=>'trigger_template'"` +} + +type CloudfunctionsFunction struct { + Name string + SourceArchiveBucket string `puppet:"name=>'source_archive_bucket'"` + SourceArchiveObject string `puppet:"name=>'source_archive_object'"` + CloudfunctionsFunctionId *string `puppet:"name=>'cloudfunctions_function_id'"` + AvailableMemoryMb *int64 `puppet:"name=>'available_memory_mb'"` + Description *string + EntryPoint *string `puppet:"name=>'entry_point'"` + EnvironmentVariables *map[string]string `puppet:"name=>'environment_variables'"` + EventTrigger *EventTrigger `puppet:"name=>'event_trigger'"` + HttpsTriggerUrl *string `puppet:"name=>'https_trigger_url'"` + Labels *map[string]string + Project *string + Region *string + Runtime *string + Timeout *int64 + TriggerHttp *bool `puppet:"name=>'trigger_http'"` +} + +type CloudiotRegistry struct { + Name string + CloudiotRegistryId *string `puppet:"name=>'cloudiot_registry_id'"` + Credentials *[]Credentials + EventNotificationConfig *map[string]NotificationConfig `puppet:"name=>'event_notification_config'"` + HttpConfig *map[string]HttpConfig `puppet:"name=>'http_config'"` + MqttConfig *map[string]MqttConfig `puppet:"name=>'mqtt_config'"` + Project *string + Region *string + StateNotificationConfig *map[string]NotificationConfig `puppet:"name=>'state_notification_config'"` +} + +type ComposerEnvironment struct { + Name string + ComposerEnvironmentId *string `puppet:"name=>'composer_environment_id'"` + Config *EnvironmentConfig + Labels *map[string]string + Project *string + Region *string +} + +type ComputeAddress struct { + Name string + ComputeAddressId *string `puppet:"name=>'compute_address_id'"` + Address *string + AddressType *string `puppet:"name=>'address_type'"` + CreationTimestamp *string `puppet:"name=>'creation_timestamp'"` + Description *string + NetworkTier *string `puppet:"name=>'network_tier'"` + Project *string + Region *string + SelfLink *string `puppet:"name=>'self_link'"` + Subnetwork *string + Users *[]string +} + +type ComputeAttachedDisk struct { + Disk string + Instance string + ComputeAttachedDiskId *string `puppet:"name=>'compute_attached_disk_id'"` + DeviceName *string `puppet:"name=>'device_name'"` + Mode *string + Project *string + Zone *string +} + +type ComputeAutoscaler struct { + Name string + Target string + ComputeAutoscalerId *string `puppet:"name=>'compute_autoscaler_id'"` + AutoscalingPolicy *AutoscalingPolicy `puppet:"name=>'autoscaling_policy'"` + CreationTimestamp *string `puppet:"name=>'creation_timestamp'"` + Description *string + Project *string + SelfLink *string `puppet:"name=>'self_link'"` + Zone *string +} + +type ComputeBackendBucket struct { + BucketName string `puppet:"name=>'bucket_name'"` + Name string + ComputeBackendBucketId *string `puppet:"name=>'compute_backend_bucket_id'"` + CreationTimestamp *string `puppet:"name=>'creation_timestamp'"` + Description *string + EnableCdn *bool `puppet:"name=>'enable_cdn'"` + Project *string + SelfLink *string `puppet:"name=>'self_link'"` +} + +type ComputeBackendService struct { + HealthChecks []string `puppet:"name=>'health_checks'"` + Name string + ComputeBackendServiceId *string `puppet:"name=>'compute_backend_service_id'"` + Backend *[]Backend + CdnPolicy *CdnPolicy `puppet:"name=>'cdn_policy'"` + ConnectionDrainingTimeoutSec *int64 `puppet:"name=>'connection_draining_timeout_sec'"` + Description *string + EnableCdn *bool `puppet:"name=>'enable_cdn'"` + Fingerprint *string + Iap *Iap + PortName *string `puppet:"name=>'port_name'"` + Project *string + Protocol *string + SecurityPolicy *string `puppet:"name=>'security_policy'"` + SelfLink *string `puppet:"name=>'self_link'"` + SessionAffinity *string `puppet:"name=>'session_affinity'"` + TimeoutSec *int64 `puppet:"name=>'timeout_sec'"` +} + +type ComputeDisk struct { + Name string + ComputeDiskId *string `puppet:"name=>'compute_disk_id'"` + CreationTimestamp *string `puppet:"name=>'creation_timestamp'"` + Description *string + DiskEncryptionKey *Key `puppet:"name=>'disk_encryption_key'"` + Image *string + LabelFingerprint *string `puppet:"name=>'label_fingerprint'"` + Labels *map[string]string + LastAttachTimestamp *string `puppet:"name=>'last_attach_timestamp'"` + LastDetachTimestamp *string `puppet:"name=>'last_detach_timestamp'"` + Project *string + SelfLink *string `puppet:"name=>'self_link'"` + Size *int64 + Snapshot *string + SourceImageEncryptionKey *Key `puppet:"name=>'source_image_encryption_key'"` + SourceImageId *string `puppet:"name=>'source_image_id'"` + SourceSnapshotEncryptionKey *Key `puppet:"name=>'source_snapshot_encryption_key'"` + SourceSnapshotId *string `puppet:"name=>'source_snapshot_id'"` + Type *string + Users *[]string + Zone *string +} + +type ComputeFirewall struct { + Name string + Network string + ComputeFirewallId *string `puppet:"name=>'compute_firewall_id'"` + Allow *[]FirewallAllowDeny + CreationTimestamp *string `puppet:"name=>'creation_timestamp'"` + Deny *[]FirewallAllowDeny + Description *string + DestinationRanges *[]string `puppet:"name=>'destination_ranges'"` + Direction *string + Disabled *bool + Priority *int64 + Project *string + SelfLink *string `puppet:"name=>'self_link'"` + SourceRanges *[]string `puppet:"name=>'source_ranges'"` + SourceServiceAccounts *[]string `puppet:"name=>'source_service_accounts'"` + SourceTags *[]string `puppet:"name=>'source_tags'"` + TargetServiceAccounts *[]string `puppet:"name=>'target_service_accounts'"` + TargetTags *[]string `puppet:"name=>'target_tags'"` +} + +type ComputeForwardingRule struct { + Name string + ComputeForwardingRuleId *string `puppet:"name=>'compute_forwarding_rule_id'"` + BackendService *string `puppet:"name=>'backend_service'"` + CreationTimestamp *string `puppet:"name=>'creation_timestamp'"` + Description *string + IpAddress *string `puppet:"name=>'ip_address'"` + IpProtocol *string `puppet:"name=>'ip_protocol'"` + IpVersion *string `puppet:"name=>'ip_version'"` + LabelFingerprint *string `puppet:"name=>'label_fingerprint'"` + LoadBalancingScheme *string `puppet:"name=>'load_balancing_scheme'"` + Network *string + NetworkTier *string `puppet:"name=>'network_tier'"` + PortRange *string `puppet:"name=>'port_range'"` + Ports *[]string + Project *string + Region *string + SelfLink *string `puppet:"name=>'self_link'"` + Subnetwork *string + Target *string +} + +type ComputeGlobalAddress struct { + Name string + ComputeGlobalAddressId *string `puppet:"name=>'compute_global_address_id'"` + Address *string + AddressType *string `puppet:"name=>'address_type'"` + CreationTimestamp *string `puppet:"name=>'creation_timestamp'"` + Description *string + IpVersion *string `puppet:"name=>'ip_version'"` + LabelFingerprint *string `puppet:"name=>'label_fingerprint'"` + Project *string + SelfLink *string `puppet:"name=>'self_link'"` +} + +type ComputeGlobalForwardingRule struct { + Name string + Target string + ComputeGlobalForwardingRuleId *string `puppet:"name=>'compute_global_forwarding_rule_id'"` + Description *string + IpAddress *string `puppet:"name=>'ip_address'"` + IpProtocol *string `puppet:"name=>'ip_protocol'"` + IpVersion *string `puppet:"name=>'ip_version'"` + LabelFingerprint *string `puppet:"name=>'label_fingerprint'"` + PortRange *string `puppet:"name=>'port_range'"` + Project *string + SelfLink *string `puppet:"name=>'self_link'"` +} + +type ComputeHealthCheck struct { + Name string + ComputeHealthCheckId *string `puppet:"name=>'compute_health_check_id'"` + CheckIntervalSec *int64 `puppet:"name=>'check_interval_sec'"` + CreationTimestamp *string `puppet:"name=>'creation_timestamp'"` + Description *string + HealthyThreshold *int64 `puppet:"name=>'healthy_threshold'"` + HttpHealthCheck *Check `puppet:"name=>'http_health_check'"` + HttpsHealthCheck *Check `puppet:"name=>'https_health_check'"` + Project *string + SelfLink *string `puppet:"name=>'self_link'"` + SslHealthCheck *HealthCheck `puppet:"name=>'ssl_health_check'"` + TcpHealthCheck *HealthCheck `puppet:"name=>'tcp_health_check'"` + TimeoutSec *int64 `puppet:"name=>'timeout_sec'"` + Type *string + UnhealthyThreshold *int64 `puppet:"name=>'unhealthy_threshold'"` +} + +type ComputeHttpHealthCheck struct { + Name string + ComputeHttpHealthCheckId *string `puppet:"name=>'compute_http_health_check_id'"` + CheckIntervalSec *int64 `puppet:"name=>'check_interval_sec'"` + CreationTimestamp *string `puppet:"name=>'creation_timestamp'"` + Description *string + HealthyThreshold *int64 `puppet:"name=>'healthy_threshold'"` + Host *string + Port *int64 + Project *string + RequestPath *string `puppet:"name=>'request_path'"` + SelfLink *string `puppet:"name=>'self_link'"` + TimeoutSec *int64 `puppet:"name=>'timeout_sec'"` + UnhealthyThreshold *int64 `puppet:"name=>'unhealthy_threshold'"` +} + +type ComputeHttpsHealthCheck struct { + Name string + ComputeHttpsHealthCheckId *string `puppet:"name=>'compute_https_health_check_id'"` + CheckIntervalSec *int64 `puppet:"name=>'check_interval_sec'"` + CreationTimestamp *string `puppet:"name=>'creation_timestamp'"` + Description *string + HealthyThreshold *int64 `puppet:"name=>'healthy_threshold'"` + Host *string + Port *int64 + Project *string + RequestPath *string `puppet:"name=>'request_path'"` + SelfLink *string `puppet:"name=>'self_link'"` + TimeoutSec *int64 `puppet:"name=>'timeout_sec'"` + UnhealthyThreshold *int64 `puppet:"name=>'unhealthy_threshold'"` +} + +type ComputeImage struct { + Name string + ComputeImageId *string `puppet:"name=>'compute_image_id'"` + Description *string + Family *string + LabelFingerprint *string `puppet:"name=>'label_fingerprint'"` + Labels *map[string]string + Licenses *[]string + Project *string + RawDisk *Disk `puppet:"name=>'raw_disk'"` + SelfLink *string `puppet:"name=>'self_link'"` + SourceDisk *string `puppet:"name=>'source_disk'"` +} + +type ComputeInstance struct { + MachineType string `puppet:"name=>'machine_type'"` + Name string + NetworkInterface []Interface `puppet:"name=>'network_interface'"` + ComputeInstanceId *string `puppet:"name=>'compute_instance_id'"` + AllowStoppingForUpdate *bool `puppet:"name=>'allow_stopping_for_update'"` + AttachedDisk *[]AttachedDisk `puppet:"name=>'attached_disk'"` + BootDisk *BootDisk `puppet:"name=>'boot_disk'"` + CanIpForward *bool `puppet:"name=>'can_ip_forward'"` + CpuPlatform *string `puppet:"name=>'cpu_platform'"` + DeletionProtection *bool `puppet:"name=>'deletion_protection'"` + Description *string + GuestAccelerator *[]Accelerator `puppet:"name=>'guest_accelerator'"` + InstanceId *string `puppet:"name=>'instance_id'"` + LabelFingerprint *string `puppet:"name=>'label_fingerprint'"` + Labels *map[string]string + Metadata *map[string]string + MetadataFingerprint *string `puppet:"name=>'metadata_fingerprint'"` + MetadataStartupScript *string `puppet:"name=>'metadata_startup_script'"` + MinCpuPlatform *string `puppet:"name=>'min_cpu_platform'"` + Project *string + Scheduling *Scheduling + ScratchDisk *[]ScratchDisk `puppet:"name=>'scratch_disk'"` + SelfLink *string `puppet:"name=>'self_link'"` + ServiceAccount *Account `puppet:"name=>'service_account'"` + Tags *[]string + TagsFingerprint *string `puppet:"name=>'tags_fingerprint'"` + Zone *string +} + +type ComputeInstanceFromTemplate struct { + Name string + SourceInstanceTemplate string `puppet:"name=>'source_instance_template'"` + ComputeInstanceFromTemplateId *string `puppet:"name=>'compute_instance_from_template_id'"` + AllowStoppingForUpdate *bool `puppet:"name=>'allow_stopping_for_update'"` + AttachedDisk *[]TemplateAttachedDisk `puppet:"name=>'attached_disk'"` + BootDisk *TemplateBootDisk `puppet:"name=>'boot_disk'"` + CanIpForward *bool `puppet:"name=>'can_ip_forward'"` + CpuPlatform *string `puppet:"name=>'cpu_platform'"` + DeletionProtection *bool `puppet:"name=>'deletion_protection'"` + Description *string + GuestAccelerator *[]Accelerator `puppet:"name=>'guest_accelerator'"` + InstanceId *string `puppet:"name=>'instance_id'"` + LabelFingerprint *string `puppet:"name=>'label_fingerprint'"` + Labels *map[string]string + MachineType *string `puppet:"name=>'machine_type'"` + Metadata *map[string]string + MetadataFingerprint *string `puppet:"name=>'metadata_fingerprint'"` + MetadataStartupScript *string `puppet:"name=>'metadata_startup_script'"` + MinCpuPlatform *string `puppet:"name=>'min_cpu_platform'"` + NetworkInterface *[]Interface `puppet:"name=>'network_interface'"` + Project *string + Scheduling *TemplateScheduling + ScratchDisk *[]TemplateScratchDisk `puppet:"name=>'scratch_disk'"` + SelfLink *string `puppet:"name=>'self_link'"` + ServiceAccount *Account `puppet:"name=>'service_account'"` + Tags *[]string + TagsFingerprint *string `puppet:"name=>'tags_fingerprint'"` + Zone *string +} + +type ComputeInstanceGroup struct { + Name string + ComputeInstanceGroupId *string `puppet:"name=>'compute_instance_group_id'"` + Description *string + Instances *[]string + NamedPort *[]Port `puppet:"name=>'named_port'"` + Network *string + Project *string + SelfLink *string `puppet:"name=>'self_link'"` + Size *int64 + Zone *string +} + +type ComputeInstanceGroupManager struct { + BaseInstanceName string `puppet:"name=>'base_instance_name'"` + Name string + ComputeInstanceGroupManagerId *string `puppet:"name=>'compute_instance_group_manager_id'"` + Description *string + Fingerprint *string + InstanceGroup *string `puppet:"name=>'instance_group'"` + InstanceTemplate *string `puppet:"name=>'instance_template'"` + NamedPort *[]Port `puppet:"name=>'named_port'"` + Project *string + SelfLink *string `puppet:"name=>'self_link'"` + TargetPools *[]string `puppet:"name=>'target_pools'"` + TargetSize *int64 `puppet:"name=>'target_size'"` + UpdateStrategy *string `puppet:"name=>'update_strategy'"` + WaitForInstances *bool `puppet:"name=>'wait_for_instances'"` + Zone *string +} + +type ComputeInstanceTemplate struct { + Disk []TemplateDisk + MachineType string `puppet:"name=>'machine_type'"` + ComputeInstanceTemplateId *string `puppet:"name=>'compute_instance_template_id'"` + CanIpForward *bool `puppet:"name=>'can_ip_forward'"` + Description *string + GuestAccelerator *[]Accelerator `puppet:"name=>'guest_accelerator'"` + InstanceDescription *string `puppet:"name=>'instance_description'"` + Labels *map[string]string + Metadata *map[string]string + MetadataFingerprint *string `puppet:"name=>'metadata_fingerprint'"` + MetadataStartupScript *string `puppet:"name=>'metadata_startup_script'"` + MinCpuPlatform *string `puppet:"name=>'min_cpu_platform'"` + Name *string + NamePrefix *string `puppet:"name=>'name_prefix'"` + NetworkInterface *[]NetworkInterface `puppet:"name=>'network_interface'"` + Project *string + Region *string + Scheduling *[]Scheduling + SelfLink *string `puppet:"name=>'self_link'"` + ServiceAccount *Account `puppet:"name=>'service_account'"` + Tags *[]string + TagsFingerprint *string `puppet:"name=>'tags_fingerprint'"` +} + +type ComputeInterconnectAttachment struct { + Interconnect string + Name string + Router string + ComputeInterconnectAttachmentId *string `puppet:"name=>'compute_interconnect_attachment_id'"` + CloudRouterIpAddress *string `puppet:"name=>'cloud_router_ip_address'"` + CreationTimestamp *string `puppet:"name=>'creation_timestamp'"` + CustomerRouterIpAddress *string `puppet:"name=>'customer_router_ip_address'"` + Description *string + GoogleReferenceId *string `puppet:"name=>'google_reference_id'"` + PrivateInterconnectInfo *Info `puppet:"name=>'private_interconnect_info'"` + Project *string + Region *string + SelfLink *string `puppet:"name=>'self_link'"` +} + +type ComputeNetwork struct { + Name string + ComputeNetworkId *string `puppet:"name=>'compute_network_id'"` + AutoCreateSubnetworks *bool `puppet:"name=>'auto_create_subnetworks'"` + Description *string + GatewayIpv4 *string `puppet:"name=>'gateway_ipv4'"` + Project *string + RoutingMode *string `puppet:"name=>'routing_mode'"` + SelfLink *string `puppet:"name=>'self_link'"` +} + +type ComputeNetworkPeering struct { + Name string + Network string + PeerNetwork string `puppet:"name=>'peer_network'"` + ComputeNetworkPeeringId *string `puppet:"name=>'compute_network_peering_id'"` + AutoCreateRoutes *bool `puppet:"name=>'auto_create_routes'"` + State *string + StateDetails *string `puppet:"name=>'state_details'"` +} + +type ComputeProjectMetadata struct { + Metadata map[string]string + ComputeProjectMetadataId *string `puppet:"name=>'compute_project_metadata_id'"` + Project *string +} + +type ComputeProjectMetadataItem struct { + Key string + Value string + ComputeProjectMetadataItemId *string `puppet:"name=>'compute_project_metadata_item_id'"` + Project *string +} + +type ComputeRegionAutoscaler struct { + Name string + Target string + ComputeRegionAutoscalerId *string `puppet:"name=>'compute_region_autoscaler_id'"` + AutoscalingPolicy *AutoscalingPolicy `puppet:"name=>'autoscaling_policy'"` + CreationTimestamp *string `puppet:"name=>'creation_timestamp'"` + Description *string + Project *string + Region *string + SelfLink *string `puppet:"name=>'self_link'"` +} + +type ComputeRegionBackendService struct { + HealthChecks []string `puppet:"name=>'health_checks'"` + Name string + ComputeRegionBackendServiceId *string `puppet:"name=>'compute_region_backend_service_id'"` + Backend *[]ServiceBackend + ConnectionDrainingTimeoutSec *int64 `puppet:"name=>'connection_draining_timeout_sec'"` + Description *string + Fingerprint *string + Project *string + Protocol *string + Region *string + SelfLink *string `puppet:"name=>'self_link'"` + SessionAffinity *string `puppet:"name=>'session_affinity'"` + TimeoutSec *int64 `puppet:"name=>'timeout_sec'"` +} + +type ComputeRegionDisk struct { + Name string + ReplicaZones []string `puppet:"name=>'replica_zones'"` + ComputeRegionDiskId *string `puppet:"name=>'compute_region_disk_id'"` + CreationTimestamp *string `puppet:"name=>'creation_timestamp'"` + Description *string + DiskEncryptionKey *Key `puppet:"name=>'disk_encryption_key'"` + LabelFingerprint *string `puppet:"name=>'label_fingerprint'"` + Labels *map[string]string + LastAttachTimestamp *string `puppet:"name=>'last_attach_timestamp'"` + LastDetachTimestamp *string `puppet:"name=>'last_detach_timestamp'"` + Project *string + Region *string + SelfLink *string `puppet:"name=>'self_link'"` + Size *int64 + Snapshot *string + SourceSnapshotEncryptionKey *Key `puppet:"name=>'source_snapshot_encryption_key'"` + SourceSnapshotId *string `puppet:"name=>'source_snapshot_id'"` + Type *string + Users *[]string +} + +type ComputeRegionInstanceGroupManager struct { + BaseInstanceName string `puppet:"name=>'base_instance_name'"` + Name string + Region string + ComputeRegionInstanceGroupManagerId *string `puppet:"name=>'compute_region_instance_group_manager_id'"` + Description *string + DistributionPolicyZones *[]string `puppet:"name=>'distribution_policy_zones'"` + Fingerprint *string + InstanceGroup *string `puppet:"name=>'instance_group'"` + InstanceTemplate *string `puppet:"name=>'instance_template'"` + NamedPort *[]Port `puppet:"name=>'named_port'"` + Project *string + SelfLink *string `puppet:"name=>'self_link'"` + TargetPools *[]string `puppet:"name=>'target_pools'"` + TargetSize *int64 `puppet:"name=>'target_size'"` + WaitForInstances *bool `puppet:"name=>'wait_for_instances'"` +} + +type ComputeRoute struct { + DestRange string `puppet:"name=>'dest_range'"` + Name string + Network string + ComputeRouteId *string `puppet:"name=>'compute_route_id'"` + Description *string + NextHopGateway *string `puppet:"name=>'next_hop_gateway'"` + NextHopInstance *string `puppet:"name=>'next_hop_instance'"` + NextHopInstanceZone *string `puppet:"name=>'next_hop_instance_zone'"` + NextHopIp *string `puppet:"name=>'next_hop_ip'"` + NextHopNetwork *string `puppet:"name=>'next_hop_network'"` + NextHopVpnTunnel *string `puppet:"name=>'next_hop_vpn_tunnel'"` + Priority *int64 + Project *string + SelfLink *string `puppet:"name=>'self_link'"` + Tags *[]string +} + +type ComputeRouter struct { + Name string + Network string + ComputeRouterId *string `puppet:"name=>'compute_router_id'"` + Bgp *Bgp + CreationTimestamp *string `puppet:"name=>'creation_timestamp'"` + Description *string + Project *string + Region *string + SelfLink *string `puppet:"name=>'self_link'"` +} + +type ComputeRouterInterface struct { + Name string + Router string + VpnTunnel string `puppet:"name=>'vpn_tunnel'"` + ComputeRouterInterfaceId *string `puppet:"name=>'compute_router_interface_id'"` + IpRange *string `puppet:"name=>'ip_range'"` + Project *string + Region *string +} + +type ComputeRouterNat struct { + Name string + NatIpAllocateOption string `puppet:"name=>'nat_ip_allocate_option'"` + Router string + ComputeRouterNatId *string `puppet:"name=>'compute_router_nat_id'"` + IcmpIdleTimeoutSec *int64 `puppet:"name=>'icmp_idle_timeout_sec'"` + MinPortsPerVm *int64 `puppet:"name=>'min_ports_per_vm'"` + NatIps *[]string `puppet:"name=>'nat_ips'"` + Project *string + Region *string + SourceSubnetworkIpRangesToNat *string `puppet:"name=>'source_subnetwork_ip_ranges_to_nat'"` + Subnetwork *[]Subnetwork + TcpEstablishedIdleTimeoutSec *int64 `puppet:"name=>'tcp_established_idle_timeout_sec'"` + TcpTransitoryIdleTimeoutSec *int64 `puppet:"name=>'tcp_transitory_idle_timeout_sec'"` + UdpIdleTimeoutSec *int64 `puppet:"name=>'udp_idle_timeout_sec'"` +} + +type ComputeRouterPeer struct { + Interface string + Name string + PeerAsn int64 `puppet:"name=>'peer_asn'"` + Router string + ComputeRouterPeerId *string `puppet:"name=>'compute_router_peer_id'"` + AdvertisedRoutePriority *int64 `puppet:"name=>'advertised_route_priority'"` + IpAddress *string `puppet:"name=>'ip_address'"` + PeerIpAddress *string `puppet:"name=>'peer_ip_address'"` + Project *string + Region *string +} + +type ComputeSecurityPolicy struct { + Name string + ComputeSecurityPolicyId *string `puppet:"name=>'compute_security_policy_id'"` + Description *string + Fingerprint *string + Project *string + Rule *[]PolicyRule + SelfLink *string `puppet:"name=>'self_link'"` +} + +type ComputeSharedVpcHostProject struct { + Project string + ComputeSharedVpcHostProjectId *string `puppet:"name=>'compute_shared_vpc_host_project_id'"` +} + +type ComputeSharedVpcServiceProject struct { + HostProject string `puppet:"name=>'host_project'"` + ServiceProject string `puppet:"name=>'service_project'"` + ComputeSharedVpcServiceProjectId *string `puppet:"name=>'compute_shared_vpc_service_project_id'"` +} + +type ComputeSnapshot struct { + Name string + SourceDisk string `puppet:"name=>'source_disk'"` + ComputeSnapshotId *string `puppet:"name=>'compute_snapshot_id'"` + CreationTimestamp *string `puppet:"name=>'creation_timestamp'"` + Description *string + DiskSizeGb *int64 `puppet:"name=>'disk_size_gb'"` + LabelFingerprint *string `puppet:"name=>'label_fingerprint'"` + Labels *map[string]string + Licenses *[]string + Project *string + SelfLink *string `puppet:"name=>'self_link'"` + SnapshotEncryptionKey *Key `puppet:"name=>'snapshot_encryption_key'"` + SnapshotId *int64 `puppet:"name=>'snapshot_id'"` + SourceDiskEncryptionKey *DiskEncryptionKey `puppet:"name=>'source_disk_encryption_key'"` + SourceDiskLink *string `puppet:"name=>'source_disk_link'"` + StorageBytes *int64 `puppet:"name=>'storage_bytes'"` + Zone *string +} + +type ComputeSslCertificate struct { + Certificate string + PrivateKey string `puppet:"name=>'private_key'"` + ComputeSslCertificateId *string `puppet:"name=>'compute_ssl_certificate_id'"` + CertificateId *int64 `puppet:"name=>'certificate_id'"` + CreationTimestamp *string `puppet:"name=>'creation_timestamp'"` + Description *string + Name *string + NamePrefix *string `puppet:"name=>'name_prefix'"` + Project *string + SelfLink *string `puppet:"name=>'self_link'"` +} + +type ComputeSslPolicy struct { + Name string + ComputeSslPolicyId *string `puppet:"name=>'compute_ssl_policy_id'"` + CreationTimestamp *string `puppet:"name=>'creation_timestamp'"` + CustomFeatures *[]string `puppet:"name=>'custom_features'"` + Description *string + EnabledFeatures *[]string `puppet:"name=>'enabled_features'"` + Fingerprint *string + MinTlsVersion *string `puppet:"name=>'min_tls_version'"` + Profile *string + Project *string + SelfLink *string `puppet:"name=>'self_link'"` +} + +type ComputeSubnetwork struct { + IpCidrRange string `puppet:"name=>'ip_cidr_range'"` + Name string + Network string + ComputeSubnetworkId *string `puppet:"name=>'compute_subnetwork_id'"` + CreationTimestamp *string `puppet:"name=>'creation_timestamp'"` + Description *string + EnableFlowLogs *bool `puppet:"name=>'enable_flow_logs'"` + Fingerprint *string + GatewayAddress *string `puppet:"name=>'gateway_address'"` + PrivateIpGoogleAccess *bool `puppet:"name=>'private_ip_google_access'"` + Project *string + Region *string + SecondaryIpRange *[]IpRange `puppet:"name=>'secondary_ip_range'"` + SelfLink *string `puppet:"name=>'self_link'"` +} + +type ComputeSubnetworkIamBinding struct { + Members []string + Role string + ComputeSubnetworkIamBindingId *string `puppet:"name=>'compute_subnetwork_iam_binding_id'"` + Etag *string +} + +type ComputeSubnetworkIamMember struct { + Member string + Role string + ComputeSubnetworkIamMemberId *string `puppet:"name=>'compute_subnetwork_iam_member_id'"` + Etag *string +} + +type ComputeSubnetworkIamPolicy struct { + PolicyData string `puppet:"name=>'policy_data'"` + ComputeSubnetworkIamPolicyId *string `puppet:"name=>'compute_subnetwork_iam_policy_id'"` + Etag *string +} + +type ComputeTargetHttpProxy struct { + Name string + UrlMap string `puppet:"name=>'url_map'"` + ComputeTargetHttpProxyId *string `puppet:"name=>'compute_target_http_proxy_id'"` + CreationTimestamp *string `puppet:"name=>'creation_timestamp'"` + Description *string + Project *string + ProxyId *int64 `puppet:"name=>'proxy_id'"` + SelfLink *string `puppet:"name=>'self_link'"` +} + +type ComputeTargetHttpsProxy struct { + Name string + SslCertificates []string `puppet:"name=>'ssl_certificates'"` + UrlMap string `puppet:"name=>'url_map'"` + ComputeTargetHttpsProxyId *string `puppet:"name=>'compute_target_https_proxy_id'"` + CreationTimestamp *string `puppet:"name=>'creation_timestamp'"` + Description *string + Project *string + ProxyId *int64 `puppet:"name=>'proxy_id'"` + QuicOverride *string `puppet:"name=>'quic_override'"` + SelfLink *string `puppet:"name=>'self_link'"` + SslPolicy *string `puppet:"name=>'ssl_policy'"` +} + +type ComputeTargetPool struct { + Name string + ComputeTargetPoolId *string `puppet:"name=>'compute_target_pool_id'"` + BackupPool *string `puppet:"name=>'backup_pool'"` + Description *string + FailoverRatio *float64 `puppet:"name=>'failover_ratio'"` + HealthChecks *[]string `puppet:"name=>'health_checks'"` + Instances *[]string + Project *string + Region *string + SelfLink *string `puppet:"name=>'self_link'"` + SessionAffinity *string `puppet:"name=>'session_affinity'"` +} + +type ComputeTargetSslProxy struct { + BackendService string `puppet:"name=>'backend_service'"` + Name string + SslCertificates []string `puppet:"name=>'ssl_certificates'"` + ComputeTargetSslProxyId *string `puppet:"name=>'compute_target_ssl_proxy_id'"` + CreationTimestamp *string `puppet:"name=>'creation_timestamp'"` + Description *string + Project *string + ProxyHeader *string `puppet:"name=>'proxy_header'"` + ProxyId *int64 `puppet:"name=>'proxy_id'"` + SelfLink *string `puppet:"name=>'self_link'"` + SslPolicy *string `puppet:"name=>'ssl_policy'"` +} + +type ComputeTargetTcpProxy struct { + BackendService string `puppet:"name=>'backend_service'"` + Name string + ComputeTargetTcpProxyId *string `puppet:"name=>'compute_target_tcp_proxy_id'"` + CreationTimestamp *string `puppet:"name=>'creation_timestamp'"` + Description *string + Project *string + ProxyHeader *string `puppet:"name=>'proxy_header'"` + ProxyId *int64 `puppet:"name=>'proxy_id'"` + SelfLink *string `puppet:"name=>'self_link'"` +} + +type ComputeUrlMap struct { + DefaultService string `puppet:"name=>'default_service'"` + Name string + ComputeUrlMapId *string `puppet:"name=>'compute_url_map_id'"` + Description *string + Fingerprint *string + HostRule *[]HostRule `puppet:"name=>'host_rule'"` + MapId *string `puppet:"name=>'map_id'"` + PathMatcher *[]Matcher `puppet:"name=>'path_matcher'"` + Project *string + SelfLink *string `puppet:"name=>'self_link'"` + Test *[]Test +} + +type ComputeVpnGateway struct { + Name string + Network string + ComputeVpnGatewayId *string `puppet:"name=>'compute_vpn_gateway_id'"` + CreationTimestamp *string `puppet:"name=>'creation_timestamp'"` + Description *string + Project *string + Region *string + SelfLink *string `puppet:"name=>'self_link'"` +} + +type ComputeVpnTunnel struct { + Name string + PeerIp string `puppet:"name=>'peer_ip'"` + SharedSecret string `puppet:"name=>'shared_secret'"` + TargetVpnGateway string `puppet:"name=>'target_vpn_gateway'"` + ComputeVpnTunnelId *string `puppet:"name=>'compute_vpn_tunnel_id'"` + CreationTimestamp *string `puppet:"name=>'creation_timestamp'"` + Description *string + DetailedStatus *string `puppet:"name=>'detailed_status'"` + IkeVersion *int64 `puppet:"name=>'ike_version'"` + LabelFingerprint *string `puppet:"name=>'label_fingerprint'"` + Labels *map[string]string + LocalTrafficSelector *[]string `puppet:"name=>'local_traffic_selector'"` + Project *string + Region *string + RemoteTrafficSelector *[]string `puppet:"name=>'remote_traffic_selector'"` + Router *string + SelfLink *string `puppet:"name=>'self_link'"` + SharedSecretHash *string `puppet:"name=>'shared_secret_hash'"` +} + +type ContainerAnalysisNote struct { + Name string + ContainerAnalysisNoteId *string `puppet:"name=>'container_analysis_note_id'"` + AttestationAuthority *Authority `puppet:"name=>'attestation_authority'"` + Project *string +} + +type ContainerCluster struct { + Name string + ContainerClusterId *string `puppet:"name=>'container_cluster_id'"` + AdditionalZones *[]string `puppet:"name=>'additional_zones'"` + AddonsConfig *AddonsConfig `puppet:"name=>'addons_config'"` + ClusterIpv4Cidr *string `puppet:"name=>'cluster_ipv4_cidr'"` + Description *string + EnableKubernetesAlpha *bool `puppet:"name=>'enable_kubernetes_alpha'"` + EnableLegacyAbac *bool `puppet:"name=>'enable_legacy_abac'"` + Endpoint *string + InitialNodeCount *int64 `puppet:"name=>'initial_node_count'"` + InstanceGroupUrls *[]string `puppet:"name=>'instance_group_urls'"` + IpAllocationPolicy *AllocationPolicy `puppet:"name=>'ip_allocation_policy'"` + LoggingService *string `puppet:"name=>'logging_service'"` + MaintenancePolicy *MaintenancePolicy `puppet:"name=>'maintenance_policy'"` + MasterAuth *Auth `puppet:"name=>'master_auth'"` + MasterAuthorizedNetworksConfig *NetworksConfig `puppet:"name=>'master_authorized_networks_config'"` + MasterVersion *string `puppet:"name=>'master_version'"` + MinMasterVersion *string `puppet:"name=>'min_master_version'"` + MonitoringService *string `puppet:"name=>'monitoring_service'"` + Network *string + NetworkPolicy *NetworkPolicy `puppet:"name=>'network_policy'"` + NodeConfig *NodeConfig `puppet:"name=>'node_config'"` + NodePool *[]Pool `puppet:"name=>'node_pool'"` + NodeVersion *string `puppet:"name=>'node_version'"` + PrivateClusterConfig *ClusterConfig `puppet:"name=>'private_cluster_config'"` + Project *string + Region *string + RemoveDefaultNodePool *bool `puppet:"name=>'remove_default_node_pool'"` + ResourceLabels *map[string]string `puppet:"name=>'resource_labels'"` + Subnetwork *string + Zone *string +} + +type ContainerNodePool struct { + Cluster string + ContainerNodePoolId *string `puppet:"name=>'container_node_pool_id'"` + Autoscaling *Autoscaling + InitialNodeCount *int64 `puppet:"name=>'initial_node_count'"` + InstanceGroupUrls *[]string `puppet:"name=>'instance_group_urls'"` + Management *Management + Name *string + NodeConfig *NodeConfig `puppet:"name=>'node_config'"` + NodeCount *int64 `puppet:"name=>'node_count'"` + Project *string + Region *string + Version *string + Zone *string +} + +type DataflowJob struct { + Name string + TempGcsLocation string `puppet:"name=>'temp_gcs_location'"` + TemplateGcsPath string `puppet:"name=>'template_gcs_path'"` + DataflowJobId *string `puppet:"name=>'dataflow_job_id'"` + MaxWorkers *int64 `puppet:"name=>'max_workers'"` + OnDelete *string `puppet:"name=>'on_delete'"` + Parameters *map[string]string + Project *string + Region *string + State *string + Zone *string +} + +type DataprocCluster struct { + Name string + DataprocClusterId *string `puppet:"name=>'dataproc_cluster_id'"` + ClusterConfig *ClusterClusterConfig `puppet:"name=>'cluster_config'"` + Labels *map[string]string + Project *string + Region *string +} + +type DataprocJob struct { + DataprocJobId *string `puppet:"name=>'dataproc_job_id'"` + DriverControlsFilesUri *string `puppet:"name=>'driver_controls_files_uri'"` + DriverOutputResourceUri *string `puppet:"name=>'driver_output_resource_uri'"` + ForceDelete *bool `puppet:"name=>'force_delete'"` + HadoopConfig *HadoopConfig `puppet:"name=>'hadoop_config'"` + HiveConfig *HiveConfig `puppet:"name=>'hive_config'"` + Labels *map[string]string + PigConfig *PigConfig `puppet:"name=>'pig_config'"` + Placement *Placement + Project *string + PysparkConfig *PysparkConfig `puppet:"name=>'pyspark_config'"` + Reference *Reference + Region *string + Scheduling *JobScheduling + SparkConfig *HadoopConfig `puppet:"name=>'spark_config'"` + SparksqlConfig *SparksqlConfig `puppet:"name=>'sparksql_config'"` + Status *Status +} + +type DnsManagedZone struct { + DnsName string `puppet:"name=>'dns_name'"` + Name string + DnsManagedZoneId *string `puppet:"name=>'dns_managed_zone_id'"` + Description *string + Labels *map[string]string + NameServers *[]string `puppet:"name=>'name_servers'"` + Project *string +} + +type DnsRecordSet struct { + ManagedZone string `puppet:"name=>'managed_zone'"` + Name string + Rrdatas []string + Ttl int64 + Type string + DnsRecordSetId *string `puppet:"name=>'dns_record_set_id'"` + Project *string +} + +type EndpointsService struct { + ServiceName string `puppet:"name=>'service_name'"` + EndpointsServiceId *string `puppet:"name=>'endpoints_service_id'"` + Apis *[]Apis + ConfigId *string `puppet:"name=>'config_id'"` + DnsAddress *string `puppet:"name=>'dns_address'"` + Endpoints *[]Endpoints + GrpcConfig *string `puppet:"name=>'grpc_config'"` + OpenapiConfig *string `puppet:"name=>'openapi_config'"` + Project *string + ProtocOutputBase64 *string `puppet:"name=>'protoc_output_base64'"` +} + +type FilestoreInstance struct { + Name string + Networks []Networks + Tier string + Zone string + FilestoreInstanceId *string `puppet:"name=>'filestore_instance_id'"` + CreateTime *string `puppet:"name=>'create_time'"` + Description *string + Etag *string + FileShares *Shares `puppet:"name=>'file_shares'"` + Labels *map[string]string + Project *string +} + +type Folder struct { + DisplayName string `puppet:"name=>'display_name'"` + Parent string + FolderId *string `puppet:"name=>'folder_id'"` + CreateTime *string `puppet:"name=>'create_time'"` + LifecycleState *string `puppet:"name=>'lifecycle_state'"` + Name *string +} + +type FolderIamBinding struct { + Folder string + Members []string + Role string + FolderIamBindingId *string `puppet:"name=>'folder_iam_binding_id'"` + Etag *string +} + +type FolderIamMember struct { + Folder string + Member string + Role string + FolderIamMemberId *string `puppet:"name=>'folder_iam_member_id'"` + Etag *string +} + +type FolderIamPolicy struct { + Folder string + PolicyData string `puppet:"name=>'policy_data'"` + FolderIamPolicyId *string `puppet:"name=>'folder_iam_policy_id'"` + Etag *string +} + +type FolderOrganizationPolicy struct { + Constraint string + Folder string + FolderOrganizationPolicyId *string `puppet:"name=>'folder_organization_policy_id'"` + BooleanPolicy *Policy `puppet:"name=>'boolean_policy'"` + Etag *string + ListPolicy *ListPolicy `puppet:"name=>'list_policy'"` + RestorePolicy *RestorePolicy `puppet:"name=>'restore_policy'"` + UpdateTime *string `puppet:"name=>'update_time'"` + Version *int64 +} + +type GenericHandler struct { +} + +type KmsCryptoKey struct { + KeyRing string `puppet:"name=>'key_ring'"` + Name string + KmsCryptoKeyId *string `puppet:"name=>'kms_crypto_key_id'"` + RotationPeriod *string `puppet:"name=>'rotation_period'"` + SelfLink *string `puppet:"name=>'self_link'"` +} + +type KmsCryptoKeyIamBinding struct { + CryptoKeyId string `puppet:"name=>'crypto_key_id'"` + Members []string + Role string + KmsCryptoKeyIamBindingId *string `puppet:"name=>'kms_crypto_key_iam_binding_id'"` + Etag *string +} + +type KmsCryptoKeyIamMember struct { + CryptoKeyId string `puppet:"name=>'crypto_key_id'"` + Member string + Role string + KmsCryptoKeyIamMemberId *string `puppet:"name=>'kms_crypto_key_iam_member_id'"` + Etag *string +} + +type KmsKeyRing struct { + Location string + Name string + KmsKeyRingId *string `puppet:"name=>'kms_key_ring_id'"` + Project *string + SelfLink *string `puppet:"name=>'self_link'"` +} + +type KmsKeyRingIamBinding struct { + KeyRingId string `puppet:"name=>'key_ring_id'"` + Members []string + Role string + KmsKeyRingIamBindingId *string `puppet:"name=>'kms_key_ring_iam_binding_id'"` + Etag *string +} + +type KmsKeyRingIamMember struct { + KeyRingId string `puppet:"name=>'key_ring_id'"` + Member string + Role string + KmsKeyRingIamMemberId *string `puppet:"name=>'kms_key_ring_iam_member_id'"` + Etag *string +} + +type KmsKeyRingIamPolicy struct { + KeyRingId string `puppet:"name=>'key_ring_id'"` + PolicyData string `puppet:"name=>'policy_data'"` + KmsKeyRingIamPolicyId *string `puppet:"name=>'kms_key_ring_iam_policy_id'"` + Etag *string +} + +type LoggingBillingAccountExclusion struct { + BillingAccount string `puppet:"name=>'billing_account'"` + Filter string + Name string + LoggingBillingAccountExclusionId *string `puppet:"name=>'logging_billing_account_exclusion_id'"` + Description *string + Disabled *bool +} + +type LoggingBillingAccountSink struct { + BillingAccount string `puppet:"name=>'billing_account'"` + Destination string + Name string + LoggingBillingAccountSinkId *string `puppet:"name=>'logging_billing_account_sink_id'"` + Filter *string + WriterIdentity *string `puppet:"name=>'writer_identity'"` +} + +type LoggingFolderExclusion struct { + Filter string + Folder string + Name string + LoggingFolderExclusionId *string `puppet:"name=>'logging_folder_exclusion_id'"` + Description *string + Disabled *bool +} + +type LoggingFolderSink struct { + Destination string + Folder string + Name string + LoggingFolderSinkId *string `puppet:"name=>'logging_folder_sink_id'"` + Filter *string + IncludeChildren *bool `puppet:"name=>'include_children'"` + WriterIdentity *string `puppet:"name=>'writer_identity'"` +} + +type LoggingOrganizationExclusion struct { + Filter string + Name string + OrgId string `puppet:"name=>'org_id'"` + LoggingOrganizationExclusionId *string `puppet:"name=>'logging_organization_exclusion_id'"` + Description *string + Disabled *bool +} + +type LoggingOrganizationSink struct { + Destination string + Name string + OrgId string `puppet:"name=>'org_id'"` + LoggingOrganizationSinkId *string `puppet:"name=>'logging_organization_sink_id'"` + Filter *string + IncludeChildren *bool `puppet:"name=>'include_children'"` + WriterIdentity *string `puppet:"name=>'writer_identity'"` +} + +type LoggingProjectExclusion struct { + Filter string + Name string + LoggingProjectExclusionId *string `puppet:"name=>'logging_project_exclusion_id'"` + Description *string + Disabled *bool + Project *string +} + +type LoggingProjectSink struct { + Destination string + Name string + LoggingProjectSinkId *string `puppet:"name=>'logging_project_sink_id'"` + Filter *string + Project *string + UniqueWriterIdentity *bool `puppet:"name=>'unique_writer_identity'"` + WriterIdentity *string `puppet:"name=>'writer_identity'"` +} + +type MonitoringAlertPolicy struct { + Combiner string + Conditions []Conditions + DisplayName string `puppet:"name=>'display_name'"` + Enabled bool + MonitoringAlertPolicyId *string `puppet:"name=>'monitoring_alert_policy_id'"` + CreationRecord *Record `puppet:"name=>'creation_record'"` + Labels *[]string + Name *string + NotificationChannels *[]string `puppet:"name=>'notification_channels'"` + Project *string +} + +type MonitoringGroup struct { + DisplayName string `puppet:"name=>'display_name'"` + Filter string + MonitoringGroupId *string `puppet:"name=>'monitoring_group_id'"` + IsCluster *bool `puppet:"name=>'is_cluster'"` + Name *string + ParentName *string `puppet:"name=>'parent_name'"` + Project *string +} + +type MonitoringNotificationChannel struct { + DisplayName string `puppet:"name=>'display_name'"` + Type string + MonitoringNotificationChannelId *string `puppet:"name=>'monitoring_notification_channel_id'"` + Description *string + Enabled *bool + Labels *map[string]string + Name *string + Project *string + UserLabels *map[string]string `puppet:"name=>'user_labels'"` + VerificationStatus *string `puppet:"name=>'verification_status'"` +} + +type MonitoringUptimeCheckConfig struct { + DisplayName string `puppet:"name=>'display_name'"` + Timeout string + MonitoringUptimeCheckConfigId *string `puppet:"name=>'monitoring_uptime_check_config_id'"` + ContentMatchers *[]Matchers `puppet:"name=>'content_matchers'"` + HttpCheck *HttpCheck `puppet:"name=>'http_check'"` + InternalCheckers *[]Checkers `puppet:"name=>'internal_checkers'"` + IsInternal *bool `puppet:"name=>'is_internal'"` + MonitoredResource *Resource `puppet:"name=>'monitored_resource'"` + Name *string + Period *string + Project *string + ResourceGroup *Group `puppet:"name=>'resource_group'"` + SelectedRegions *[]string `puppet:"name=>'selected_regions'"` + TcpCheck *TcpCheck `puppet:"name=>'tcp_check'"` +} + +type OrganizationIamBinding struct { + Members []string + OrgId string `puppet:"name=>'org_id'"` + Role string + OrganizationIamBindingId *string `puppet:"name=>'organization_iam_binding_id'"` + Etag *string +} + +type OrganizationIamCustomRole struct { + OrgId string `puppet:"name=>'org_id'"` + Permissions []string + RoleId string `puppet:"name=>'role_id'"` + Title string + OrganizationIamCustomRoleId *string `puppet:"name=>'organization_iam_custom_role_id'"` + Description *string + Stage *string +} + +type OrganizationIamMember struct { + Member string + OrgId string `puppet:"name=>'org_id'"` + Role string + OrganizationIamMemberId *string `puppet:"name=>'organization_iam_member_id'"` + Etag *string +} + +type OrganizationIamPolicy struct { + OrgId string `puppet:"name=>'org_id'"` + PolicyData string `puppet:"name=>'policy_data'"` + OrganizationIamPolicyId *string `puppet:"name=>'organization_iam_policy_id'"` + Etag *string +} + +type OrganizationPolicy struct { + Constraint string + OrgId string `puppet:"name=>'org_id'"` + OrganizationPolicyId *string `puppet:"name=>'organization_policy_id'"` + BooleanPolicy *Policy `puppet:"name=>'boolean_policy'"` + Etag *string + ListPolicy *ListPolicy `puppet:"name=>'list_policy'"` + RestorePolicy *RestorePolicy `puppet:"name=>'restore_policy'"` + UpdateTime *string `puppet:"name=>'update_time'"` + Version *int64 +} + +type Project struct { + Name string + ProjectId string `puppet:"name=>'project_id'"` + ProjectLyraId *string `puppet:"name=>'project_lyra_id'"` + AutoCreateNetwork *bool `puppet:"name=>'auto_create_network'"` + BillingAccount *string `puppet:"name=>'billing_account'"` + FolderId *string `puppet:"name=>'folder_id'"` + Labels *map[string]string + Number *string + OrgId *string `puppet:"name=>'org_id'"` + SkipDelete *bool `puppet:"name=>'skip_delete'"` +} + +type ProjectIamBinding struct { + Members []string + Role string + ProjectIamBindingId *string `puppet:"name=>'project_iam_binding_id'"` + Etag *string + Project *string +} + +type ProjectIamCustomRole struct { + Permissions []string + RoleId string `puppet:"name=>'role_id'"` + Title string + ProjectIamCustomRoleId *string `puppet:"name=>'project_iam_custom_role_id'"` + Description *string + Project *string + Stage *string +} + +type ProjectIamMember struct { + Member string + Role string + ProjectIamMemberId *string `puppet:"name=>'project_iam_member_id'"` + Etag *string + Project *string +} + +type ProjectIamPolicy struct { + PolicyData string `puppet:"name=>'policy_data'"` + ProjectIamPolicyId *string `puppet:"name=>'project_iam_policy_id'"` + Etag *string + Project *string +} + +type ProjectOrganizationPolicy struct { + Constraint string + Project string + ProjectOrganizationPolicyId *string `puppet:"name=>'project_organization_policy_id'"` + BooleanPolicy *Policy `puppet:"name=>'boolean_policy'"` + Etag *string + ListPolicy *ListPolicy `puppet:"name=>'list_policy'"` + RestorePolicy *RestorePolicy `puppet:"name=>'restore_policy'"` + UpdateTime *string `puppet:"name=>'update_time'"` + Version *int64 +} + +type ProjectService struct { + Service string + ProjectServiceId *string `puppet:"name=>'project_service_id'"` + DisableOnDestroy *bool `puppet:"name=>'disable_on_destroy'"` + Project *string +} + +type ProjectServices struct { + Services []string + ProjectServicesId *string `puppet:"name=>'project_services_id'"` + DisableOnDestroy *bool `puppet:"name=>'disable_on_destroy'"` + Project *string +} + +type ProjectUsageExportBucket struct { + BucketName string `puppet:"name=>'bucket_name'"` + ProjectUsageExportBucketId *string `puppet:"name=>'project_usage_export_bucket_id'"` + Prefix *string + Project *string +} + +type PubsubSubscription struct { + Name string + Topic string + PubsubSubscriptionId *string `puppet:"name=>'pubsub_subscription_id'"` + AckDeadlineSeconds *int64 `puppet:"name=>'ack_deadline_seconds'"` + Path *string + Project *string + PushConfig *PushConfig `puppet:"name=>'push_config'"` +} + +type PubsubSubscriptionIamBinding struct { + Members []string + Role string + Subscription string + PubsubSubscriptionIamBindingId *string `puppet:"name=>'pubsub_subscription_iam_binding_id'"` + Etag *string + Project *string +} + +type PubsubSubscriptionIamMember struct { + Member string + Role string + Subscription string + PubsubSubscriptionIamMemberId *string `puppet:"name=>'pubsub_subscription_iam_member_id'"` + Etag *string + Project *string +} + +type PubsubSubscriptionIamPolicy struct { + PolicyData string `puppet:"name=>'policy_data'"` + Subscription string + PubsubSubscriptionIamPolicyId *string `puppet:"name=>'pubsub_subscription_iam_policy_id'"` + Etag *string + Project *string +} + +type PubsubTopic struct { + Name string + PubsubTopicId *string `puppet:"name=>'pubsub_topic_id'"` + Project *string +} + +type PubsubTopicIamBinding struct { + Members []string + Role string + Topic string + PubsubTopicIamBindingId *string `puppet:"name=>'pubsub_topic_iam_binding_id'"` + Etag *string + Project *string +} + +type PubsubTopicIamMember struct { + Member string + Role string + Topic string + PubsubTopicIamMemberId *string `puppet:"name=>'pubsub_topic_iam_member_id'"` + Etag *string + Project *string +} + +type PubsubTopicIamPolicy struct { + PolicyData string `puppet:"name=>'policy_data'"` + Topic string + PubsubTopicIamPolicyId *string `puppet:"name=>'pubsub_topic_iam_policy_id'"` + Etag *string + Project *string +} + +type RedisInstance struct { + MemorySizeGb int64 `puppet:"name=>'memory_size_gb'"` + Name string + RedisInstanceId *string `puppet:"name=>'redis_instance_id'"` + AlternativeLocationId *string `puppet:"name=>'alternative_location_id'"` + AuthorizedNetwork *string `puppet:"name=>'authorized_network'"` + CreateTime *string `puppet:"name=>'create_time'"` + CurrentLocationId *string `puppet:"name=>'current_location_id'"` + DisplayName *string `puppet:"name=>'display_name'"` + Host *string + Labels *map[string]string + LocationId *string `puppet:"name=>'location_id'"` + Port *int64 + Project *string + RedisConfigs *map[string]string `puppet:"name=>'redis_configs'"` + RedisVersion *string `puppet:"name=>'redis_version'"` + Region *string + ReservedIpRange *string `puppet:"name=>'reserved_ip_range'"` + Tier *string +} + +type ResourceManagerLien struct { + Origin string + Parent string + Reason string + Restrictions []string + ResourceManagerLienId *string `puppet:"name=>'resource_manager_lien_id'"` + CreateTime *string `puppet:"name=>'create_time'"` + Name *string +} + +type RuntimeconfigConfig struct { + Name string + RuntimeconfigConfigId *string `puppet:"name=>'runtimeconfig_config_id'"` + Description *string + Project *string +} + +type RuntimeconfigVariable struct { + Name string + Parent string + RuntimeconfigVariableId *string `puppet:"name=>'runtimeconfig_variable_id'"` + Project *string + Text *string + UpdateTime *string `puppet:"name=>'update_time'"` + Value *string +} + +type ServiceAccount struct { + AccountId string `puppet:"name=>'account_id'"` + ServiceAccountId *string `puppet:"name=>'service_account_id'"` + DisplayName *string `puppet:"name=>'display_name'"` + Email *string + Name *string + Project *string + UniqueId *string `puppet:"name=>'unique_id'"` +} + +type ServiceAccountIamBinding struct { + Members []string + Role string + ServiceAccountId string `puppet:"name=>'service_account_id'"` + ServiceAccountIamBindingId *string `puppet:"name=>'service_account_iam_binding_id'"` + Etag *string +} + +type ServiceAccountIamMember struct { + Member string + Role string + ServiceAccountId string `puppet:"name=>'service_account_id'"` + ServiceAccountIamMemberId *string `puppet:"name=>'service_account_iam_member_id'"` + Etag *string +} + +type ServiceAccountIamPolicy struct { + PolicyData string `puppet:"name=>'policy_data'"` + ServiceAccountId string `puppet:"name=>'service_account_id'"` + ServiceAccountIamPolicyId *string `puppet:"name=>'service_account_iam_policy_id'"` + Etag *string +} + +type ServiceAccountKey struct { + ServiceAccountId string `puppet:"name=>'service_account_id'"` + ServiceAccountKeyId *string `puppet:"name=>'service_account_key_id'"` + KeyAlgorithm *string `puppet:"name=>'key_algorithm'"` + Name *string + PgpKey *string `puppet:"name=>'pgp_key'"` + PrivateKey *string `puppet:"name=>'private_key'"` + PrivateKeyEncrypted *string `puppet:"name=>'private_key_encrypted'"` + PrivateKeyFingerprint *string `puppet:"name=>'private_key_fingerprint'"` + PrivateKeyType *string `puppet:"name=>'private_key_type'"` + PublicKey *string `puppet:"name=>'public_key'"` + PublicKeyType *string `puppet:"name=>'public_key_type'"` + ValidAfter *string `puppet:"name=>'valid_after'"` + ValidBefore *string `puppet:"name=>'valid_before'"` +} + +type SourcerepoRepository struct { + Name string + SourcerepoRepositoryId *string `puppet:"name=>'sourcerepo_repository_id'"` + Project *string + Size *int64 + Url *string +} + +type SpannerDatabase struct { + Instance string + Name string + SpannerDatabaseId *string `puppet:"name=>'spanner_database_id'"` + Ddl *[]string + Project *string + State *string +} + +type SpannerDatabaseIamBinding struct { + Database string + Instance string + Members []string + Role string + SpannerDatabaseIamBindingId *string `puppet:"name=>'spanner_database_iam_binding_id'"` + Etag *string + Project *string +} + +type SpannerDatabaseIamMember struct { + Database string + Instance string + Member string + Role string + SpannerDatabaseIamMemberId *string `puppet:"name=>'spanner_database_iam_member_id'"` + Etag *string + Project *string +} + +type SpannerDatabaseIamPolicy struct { + Database string + Instance string + PolicyData string `puppet:"name=>'policy_data'"` + SpannerDatabaseIamPolicyId *string `puppet:"name=>'spanner_database_iam_policy_id'"` + Etag *string + Project *string +} + +type SpannerInstance struct { + Config string + DisplayName string `puppet:"name=>'display_name'"` + SpannerInstanceId *string `puppet:"name=>'spanner_instance_id'"` + Labels *map[string]string + Name *string + NumNodes *int64 `puppet:"name=>'num_nodes'"` + Project *string + State *string +} + +type SpannerInstanceIamBinding struct { + Instance string + Members []string + Role string + SpannerInstanceIamBindingId *string `puppet:"name=>'spanner_instance_iam_binding_id'"` + Etag *string + Project *string +} + +type SpannerInstanceIamMember struct { + Instance string + Member string + Role string + SpannerInstanceIamMemberId *string `puppet:"name=>'spanner_instance_iam_member_id'"` + Etag *string + Project *string +} + +type SpannerInstanceIamPolicy struct { + Instance string + PolicyData string `puppet:"name=>'policy_data'"` + SpannerInstanceIamPolicyId *string `puppet:"name=>'spanner_instance_iam_policy_id'"` + Etag *string + Project *string +} + +type SqlDatabase struct { + Instance string + Name string + SqlDatabaseId *string `puppet:"name=>'sql_database_id'"` + Charset *string + Collation *string + Project *string + SelfLink *string `puppet:"name=>'self_link'"` +} + +type SqlDatabaseInstance struct { + SqlDatabaseInstanceId *string `puppet:"name=>'sql_database_instance_id'"` + ConnectionName *string `puppet:"name=>'connection_name'"` + DatabaseVersion *string `puppet:"name=>'database_version'"` + FirstIpAddress *string `puppet:"name=>'first_ip_address'"` + IpAddress *[]Address `puppet:"name=>'ip_address'"` + MasterInstanceName *string `puppet:"name=>'master_instance_name'"` + Name *string + Project *string + Region *string + ReplicaConfiguration *Configuration `puppet:"name=>'replica_configuration'"` + SelfLink *string `puppet:"name=>'self_link'"` + ServerCaCert *Cert `puppet:"name=>'server_ca_cert'"` + ServiceAccountEmailAddress *string `puppet:"name=>'service_account_email_address'"` + Settings *InstanceSettings +} + +type SqlSslCert struct { + CommonName string `puppet:"name=>'common_name'"` + Instance string + SqlSslCertId *string `puppet:"name=>'sql_ssl_cert_id'"` + Cert *string + CertSerialNumber *string `puppet:"name=>'cert_serial_number'"` + CreateTime *string `puppet:"name=>'create_time'"` + ExpirationTime *string `puppet:"name=>'expiration_time'"` + PrivateKey *string `puppet:"name=>'private_key'"` + ServerCaCert *string `puppet:"name=>'server_ca_cert'"` + Sha1Fingerprint *string `puppet:"name=>'sha1_fingerprint'"` +} + +type SqlUser struct { + Instance string + Name string + SqlUserId *string `puppet:"name=>'sql_user_id'"` + Host *string + Password *string + Project *string +} + +type StorageBucket struct { + Name string + StorageBucketId *string `puppet:"name=>'storage_bucket_id'"` + Cors *[]Cors + Encryption *Encryption + ForceDestroy *bool `puppet:"name=>'force_destroy'"` + Labels *map[string]string + LifecycleRule *[]LifecycleRule `puppet:"name=>'lifecycle_rule'"` + Location *string + Logging *Logging + Project *string + SelfLink *string `puppet:"name=>'self_link'"` + StorageClass *string `puppet:"name=>'storage_class'"` + Url *string + Versioning *Versioning + Website *[]Website +} + +type StorageBucketAcl struct { + Bucket string + StorageBucketAclId *string `puppet:"name=>'storage_bucket_acl_id'"` + DefaultAcl *string `puppet:"name=>'default_acl'"` + PredefinedAcl *string `puppet:"name=>'predefined_acl'"` + RoleEntity *[]string `puppet:"name=>'role_entity'"` +} + +type StorageBucketIamBinding struct { + Bucket string + Members []string + Role string + StorageBucketIamBindingId *string `puppet:"name=>'storage_bucket_iam_binding_id'"` + Etag *string +} + +type StorageBucketIamMember struct { + Bucket string + Member string + Role string + StorageBucketIamMemberId *string `puppet:"name=>'storage_bucket_iam_member_id'"` + Etag *string +} + +type StorageBucketIamPolicy struct { + Bucket string + PolicyData string `puppet:"name=>'policy_data'"` + StorageBucketIamPolicyId *string `puppet:"name=>'storage_bucket_iam_policy_id'"` + Etag *string +} + +type StorageBucketObject struct { + Bucket string + Name string + StorageBucketObjectId *string `puppet:"name=>'storage_bucket_object_id'"` + CacheControl *string `puppet:"name=>'cache_control'"` + Content *string + ContentDisposition *string `puppet:"name=>'content_disposition'"` + ContentEncoding *string `puppet:"name=>'content_encoding'"` + ContentLanguage *string `puppet:"name=>'content_language'"` + ContentType *string `puppet:"name=>'content_type'"` + Crc32c *string + DetectMd5hash *string `puppet:"name=>'detect_md5hash'"` + Md5hash *string + Source *string + StorageClass *string `puppet:"name=>'storage_class'"` +} + +type StorageDefaultObjectAccessControl struct { + Bucket string + Entity string + Role string + StorageDefaultObjectAccessControlId *string `puppet:"name=>'storage_default_object_access_control_id'"` + Domain *string + Email *string + EntityId *string `puppet:"name=>'entity_id'"` + Generation *int64 + Object *string + ProjectTeam *Team `puppet:"name=>'project_team'"` +} + +type StorageDefaultObjectAcl struct { + Bucket string + StorageDefaultObjectAclId *string `puppet:"name=>'storage_default_object_acl_id'"` + RoleEntity *[]string `puppet:"name=>'role_entity'"` +} + +type StorageNotification struct { + Bucket string + PayloadFormat string `puppet:"name=>'payload_format'"` + Topic string + StorageNotificationId *string `puppet:"name=>'storage_notification_id'"` + CustomAttributes *map[string]string `puppet:"name=>'custom_attributes'"` + EventTypes *[]string `puppet:"name=>'event_types'"` + ObjectNamePrefix *string `puppet:"name=>'object_name_prefix'"` + SelfLink *string `puppet:"name=>'self_link'"` +} + +type StorageObjectAccessControl struct { + Bucket string + Entity string + Object string + Role string + StorageObjectAccessControlId *string `puppet:"name=>'storage_object_access_control_id'"` + Domain *string + Email *string + EntityId *string `puppet:"name=>'entity_id'"` + Generation *int64 + ProjectTeam *Team `puppet:"name=>'project_team'"` +} + +type StorageObjectAcl struct { + Bucket string + Object string + StorageObjectAclId *string `puppet:"name=>'storage_object_acl_id'"` + PredefinedAcl *string `puppet:"name=>'predefined_acl'"` + RoleEntity *[]string `puppet:"name=>'role_entity'"` +} + +type Absent struct { + Duration string + Aggregations *[]Aggregations + Filter *string + Trigger *Trigger +} + +type Accelerator struct { + Count int64 + Type string +} + +type Access struct { + Domain *string + GroupByEmail *string `puppet:"name=>'group_by_email'"` + Role *string + SpecialGroup *string `puppet:"name=>'special_group'"` + UserByEmail *string `puppet:"name=>'user_by_email'"` + View *View +} + +type AccessConfig struct { + NatIp *string `puppet:"name=>'nat_ip'"` + NetworkTier *string `puppet:"name=>'network_tier'"` + PublicPtrDomainName *string `puppet:"name=>'public_ptr_domain_name'"` +} + +type Account struct { + Scopes []string + Email *string +} + +type Action struct { + Script string + TimeoutSec *int64 `puppet:"name=>'timeout_sec'"` +} + +type AddonsConfig struct { + HorizontalPodAutoscaling *AutoscalingBalancingDashboardConfig `puppet:"name=>'horizontal_pod_autoscaling'"` + HttpLoadBalancing *AutoscalingBalancingDashboardConfig `puppet:"name=>'http_load_balancing'"` + KubernetesDashboard *AutoscalingBalancingDashboardConfig `puppet:"name=>'kubernetes_dashboard'"` + NetworkPolicyConfig *AutoscalingBalancingDashboardConfig `puppet:"name=>'network_policy_config'"` +} + +type Address struct { + IpAddress *string `puppet:"name=>'ip_address'"` + TimeToRetire *string `puppet:"name=>'time_to_retire'"` +} + +type AdmissionRule struct { + EnforcementMode string `puppet:"name=>'enforcement_mode'"` + EvaluationMode string `puppet:"name=>'evaluation_mode'"` + RequireAttestationsBy *[]string `puppet:"name=>'require_attestations_by'"` +} + +type Aggregations struct { + AlignmentPeriod *string `puppet:"name=>'alignment_period'"` + CrossSeriesReducer *string `puppet:"name=>'cross_series_reducer'"` + GroupByFields *[]string `puppet:"name=>'group_by_fields'"` + PerSeriesAligner *string `puppet:"name=>'per_series_aligner'"` +} + +type AllocationPolicy struct { + ClusterIpv4CidrBlock *string `puppet:"name=>'cluster_ipv4_cidr_block'"` + ClusterSecondaryRangeName *string `puppet:"name=>'cluster_secondary_range_name'"` + CreateSubnetwork *bool `puppet:"name=>'create_subnetwork'"` + ServicesIpv4CidrBlock *string `puppet:"name=>'services_ipv4_cidr_block'"` + ServicesSecondaryRangeName *string `puppet:"name=>'services_secondary_range_name'"` + SubnetworkName *string `puppet:"name=>'subnetwork_name'"` +} + +type AllowDeny struct { + All *bool + Values *[]string +} + +type Apis struct { + Methods *[]Methods + Name *string + Syntax *string + Version *string +} + +type AttachedDisk struct { + Source string + DeviceName *string `puppet:"name=>'device_name'"` + DiskEncryptionKeyRaw *string `puppet:"name=>'disk_encryption_key_raw'"` + DiskEncryptionKeySha256 *string `puppet:"name=>'disk_encryption_key_sha256'"` + Mode *string +} + +type Auth struct { + Password string + Username string + ClientCertificate *string `puppet:"name=>'client_certificate'"` + ClientCertificateConfig *CertificateConfig `puppet:"name=>'client_certificate_config'"` + ClientKey *string `puppet:"name=>'client_key'"` + ClusterCaCertificate *string `puppet:"name=>'cluster_ca_certificate'"` +} + +type AuthInfo struct { + Password *string + Username *string +} + +type Authority struct { + Hint *Hint +} + +type AuthorizedNetworks struct { + ExpirationTime *string `puppet:"name=>'expiration_time'"` + Name *string + Value *string +} + +type Autoscaling struct { + MaxNodeCount int64 `puppet:"name=>'max_node_count'"` + MinNodeCount int64 `puppet:"name=>'min_node_count'"` +} + +type AutoscalingBalancingDashboardConfig struct { + Disabled *bool +} + +type AutoscalingPolicy struct { + MaxReplicas int64 `puppet:"name=>'max_replicas'"` + MinReplicas int64 `puppet:"name=>'min_replicas'"` + CooldownPeriod *int64 `puppet:"name=>'cooldown_period'"` + CpuUtilization *Utilization `puppet:"name=>'cpu_utilization'"` + LoadBalancingUtilization *Utilization `puppet:"name=>'load_balancing_utilization'"` + Metric *[]Metric +} + +type Backend struct { + BalancingMode *string `puppet:"name=>'balancing_mode'"` + CapacityScaler *float64 `puppet:"name=>'capacity_scaler'"` + Description *string + Group *string + MaxConnections *int64 `puppet:"name=>'max_connections'"` + MaxConnectionsPerInstance *int64 `puppet:"name=>'max_connections_per_instance'"` + MaxRate *int64 `puppet:"name=>'max_rate'"` + MaxRatePerInstance *float64 `puppet:"name=>'max_rate_per_instance'"` + MaxUtilization *float64 `puppet:"name=>'max_utilization'"` +} + +type BackupConfiguration struct { + BinaryLogEnabled *bool `puppet:"name=>'binary_log_enabled'"` + Enabled *bool + StartTime *string `puppet:"name=>'start_time'"` +} + +type Bgp struct { + Asn int64 + AdvertiseMode *string `puppet:"name=>'advertise_mode'"` + AdvertisedGroups *[]string `puppet:"name=>'advertised_groups'"` + AdvertisedIpRanges *[]Ranges `puppet:"name=>'advertised_ip_ranges'"` +} + +type Blocks struct { + CidrBlock string `puppet:"name=>'cidr_block'"` + DisplayName *string `puppet:"name=>'display_name'"` +} + +type BootDisk struct { + AutoDelete *bool `puppet:"name=>'auto_delete'"` + DeviceName *string `puppet:"name=>'device_name'"` + DiskEncryptionKeyRaw *string `puppet:"name=>'disk_encryption_key_raw'"` + DiskEncryptionKeySha256 *string `puppet:"name=>'disk_encryption_key_sha256'"` + InitializeParams *Params `puppet:"name=>'initialize_params'"` + Source *string +} + +type Build struct { + Images *[]string + Step *[]Step + Tags *[]string +} + +type CdnPolicy struct { + CacheKeyPolicy *KeyPolicy `puppet:"name=>'cache_key_policy'"` +} + +type Cert struct { + Cert *string + CommonName *string `puppet:"name=>'common_name'"` + CreateTime *string `puppet:"name=>'create_time'"` + ExpirationTime *string `puppet:"name=>'expiration_time'"` + Sha1Fingerprint *string `puppet:"name=>'sha1_fingerprint'"` +} + +type Certificate struct { + Certificate string + Format string +} + +type CertificateConfig struct { + IssueClientCertificate bool `puppet:"name=>'issue_client_certificate'"` +} + +type Check struct { + Host *string + Port *int64 + ProxyHeader *string `puppet:"name=>'proxy_header'"` + RequestPath *string `puppet:"name=>'request_path'"` + Response *string +} + +type Checkers struct { + DisplayName *string `puppet:"name=>'display_name'"` + GcpZone *string `puppet:"name=>'gcp_zone'"` + Name *string + Network *string + PeerProjectId *string `puppet:"name=>'peer_project_id'"` +} + +type Cluster struct { + ClusterId *string `puppet:"name=>'cluster_id'"` + NumNodes *int64 `puppet:"name=>'num_nodes'"` + StorageType *string `puppet:"name=>'storage_type'"` + Zone *string +} + +type ClusterClusterConfig struct { + Bucket *string + GceClusterConfig *GceClusterConfig `puppet:"name=>'gce_cluster_config'"` + InitializationAction *[]Action `puppet:"name=>'initialization_action'"` + MasterConfig *MasterConfig `puppet:"name=>'master_config'"` + PreemptibleWorkerConfig *WorkerConfig `puppet:"name=>'preemptible_worker_config'"` + SoftwareConfig *ConfigSoftwareConfig `puppet:"name=>'software_config'"` + StagingBucket *string `puppet:"name=>'staging_bucket'"` + WorkerConfig *MasterConfig `puppet:"name=>'worker_config'"` +} + +type ClusterConfig struct { + EnablePrivateEndpoint *bool `puppet:"name=>'enable_private_endpoint'"` + EnablePrivateNodes *bool `puppet:"name=>'enable_private_nodes'"` + MasterIpv4CidrBlock *string `puppet:"name=>'master_ipv4_cidr_block'"` + PrivateEndpoint *string `puppet:"name=>'private_endpoint'"` + PublicEndpoint *string `puppet:"name=>'public_endpoint'"` +} + +type Condition struct { + Age *int64 + CreatedBefore *string `puppet:"name=>'created_before'"` + IsLive *bool `puppet:"name=>'is_live'"` + MatchesStorageClass *[]string `puppet:"name=>'matches_storage_class'"` + NumNewerVersions *int64 `puppet:"name=>'num_newer_versions'"` +} + +type Conditions struct { + DisplayName string `puppet:"name=>'display_name'"` + ConditionAbsent *Absent `puppet:"name=>'condition_absent'"` + ConditionThreshold *Threshold `puppet:"name=>'condition_threshold'"` + Name *string +} + +type Config struct { + DriverLogLevels *map[string]string `puppet:"name=>'driver_log_levels'"` +} + +type ConfigDiskConfig struct { + BootDiskSizeGb *int64 `puppet:"name=>'boot_disk_size_gb'"` +} + +type ConfigNodeConfig struct { + DiskSizeGb *int64 `puppet:"name=>'disk_size_gb'"` + MachineType *string `puppet:"name=>'machine_type'"` + Network *string + OauthScopes *[]string `puppet:"name=>'oauth_scopes'"` + ServiceAccount *string `puppet:"name=>'service_account'"` + Subnetwork *string + Tags *[]string + Zone *string +} + +type ConfigSoftwareConfig struct { + ImageVersion *string `puppet:"name=>'image_version'"` + OverrideProperties *map[string]string `puppet:"name=>'override_properties'"` + Properties *map[string]string +} + +type Configuration struct { + CaCertificate *string `puppet:"name=>'ca_certificate'"` + ClientCertificate *string `puppet:"name=>'client_certificate'"` + ClientKey *string `puppet:"name=>'client_key'"` + ConnectRetryInterval *int64 `puppet:"name=>'connect_retry_interval'"` + DumpFilePath *string `puppet:"name=>'dump_file_path'"` + FailoverTarget *bool `puppet:"name=>'failover_target'"` + MasterHeartbeatPeriod *int64 `puppet:"name=>'master_heartbeat_period'"` + Password *string + SslCipher *string `puppet:"name=>'ssl_cipher'"` + Username *string + VerifyServerCertificate *bool `puppet:"name=>'verify_server_certificate'"` +} + +type Cors struct { + MaxAgeSeconds *int64 `puppet:"name=>'max_age_seconds'"` + Method *[]string + Origin *[]string + ResponseHeader *[]string `puppet:"name=>'response_header'"` +} + +type Credentials struct { + PublicKeyCertificate *map[string]Certificate `puppet:"name=>'public_key_certificate'"` +} + +type Disk struct { + Source string + ContainerType *string `puppet:"name=>'container_type'"` + Sha1 *string +} + +type DiskConfig struct { + BootDiskSizeGb *int64 `puppet:"name=>'boot_disk_size_gb'"` + BootDiskType *string `puppet:"name=>'boot_disk_type'"` + NumLocalSsds *int64 `puppet:"name=>'num_local_ssds'"` +} + +type DiskEncryptionKey struct { + RawKey *string `puppet:"name=>'raw_key'"` +} + +type Encryption struct { + DefaultKmsKeyName string `puppet:"name=>'default_kms_key_name'"` +} + +type EncryptionKey struct { + KmsKeySelfLink *string `puppet:"name=>'kms_key_self_link'"` +} + +type Endpoints struct { + Address *string + Name *string +} + +type EnvironmentConfig struct { + AirflowUri *string `puppet:"name=>'airflow_uri'"` + DagGcsPrefix *string `puppet:"name=>'dag_gcs_prefix'"` + GkeCluster *string `puppet:"name=>'gke_cluster'"` + NodeConfig *ConfigNodeConfig `puppet:"name=>'node_config'"` + NodeCount *int64 `puppet:"name=>'node_count'"` + SoftwareConfig *SoftwareConfig `puppet:"name=>'software_config'"` +} + +type EventTrigger struct { + EventType string `puppet:"name=>'event_type'"` + Resource string + FailurePolicy *FailurePolicy `puppet:"name=>'failure_policy'"` +} + +type FailurePolicy struct { + Retry bool +} + +type FirewallAllowDeny struct { + Protocol string + Ports *[]string +} + +type Flags struct { + Name *string + Value *string +} + +type GceClusterConfig struct { + InternalIpOnly *bool `puppet:"name=>'internal_ip_only'"` + Metadata *map[string]string + Network *string + ServiceAccount *string `puppet:"name=>'service_account'"` + ServiceAccountScopes *[]string `puppet:"name=>'service_account_scopes'"` + Subnetwork *string + Tags *[]string + Zone *string +} + +type Group struct { + GroupId *string `puppet:"name=>'group_id'"` + ResourceType *string `puppet:"name=>'resource_type'"` +} + +type HadoopConfig struct { + ArchiveUris *[]string `puppet:"name=>'archive_uris'"` + Args *[]string + FileUris *[]string `puppet:"name=>'file_uris'"` + JarFileUris *[]string `puppet:"name=>'jar_file_uris'"` + LoggingConfig *Config `puppet:"name=>'logging_config'"` + MainClass *string `puppet:"name=>'main_class'"` + MainJarFileUri *string `puppet:"name=>'main_jar_file_uri'"` + Properties *map[string]string +} + +type HealthCheck struct { + Port *int64 + ProxyHeader *string `puppet:"name=>'proxy_header'"` + Request *string + Response *string +} + +type Hint struct { + HumanReadableName string `puppet:"name=>'human_readable_name'"` +} + +type HiveConfig struct { + ContinueOnFailure *bool `puppet:"name=>'continue_on_failure'"` + JarFileUris *[]string `puppet:"name=>'jar_file_uris'"` + Properties *map[string]string + QueryFileUri *string `puppet:"name=>'query_file_uri'"` + QueryList *[]string `puppet:"name=>'query_list'"` + ScriptVariables *map[string]string `puppet:"name=>'script_variables'"` +} + +type HostRule struct { + Hosts []string + PathMatcher string `puppet:"name=>'path_matcher'"` + Description *string +} + +type HttpCheck struct { + AuthInfo *AuthInfo `puppet:"name=>'auth_info'"` + Headers *map[string]string + MaskHeaders *bool `puppet:"name=>'mask_headers'"` + Path *string + Port *int64 + UseSsl *bool `puppet:"name=>'use_ssl'"` +} + +type HttpConfig struct { + HttpEnabledState string `puppet:"name=>'http_enabled_state'"` +} + +type Iap struct { + Oauth2ClientId string `puppet:"name=>'oauth2_client_id'"` + Oauth2ClientSecret string `puppet:"name=>'oauth2_client_secret'"` +} + +type Info struct { + Tag8021q *int64 +} + +type InstanceSettings struct { + Tier string + ActivationPolicy *string `puppet:"name=>'activation_policy'"` + AuthorizedGaeApplications *[]string `puppet:"name=>'authorized_gae_applications'"` + AvailabilityType *string `puppet:"name=>'availability_type'"` + BackupConfiguration *BackupConfiguration `puppet:"name=>'backup_configuration'"` + CrashSafeReplication *bool `puppet:"name=>'crash_safe_replication'"` + DatabaseFlags *[]Flags `puppet:"name=>'database_flags'"` + DiskAutoresize *bool `puppet:"name=>'disk_autoresize'"` + DiskSize *int64 `puppet:"name=>'disk_size'"` + DiskType *string `puppet:"name=>'disk_type'"` + IpConfiguration *IpConfiguration `puppet:"name=>'ip_configuration'"` + LocationPreference *Preference `puppet:"name=>'location_preference'"` + MaintenanceWindow *MaintenanceWindow `puppet:"name=>'maintenance_window'"` + PricingPlan *string `puppet:"name=>'pricing_plan'"` + ReplicationType *string `puppet:"name=>'replication_type'"` + UserLabels *map[string]string `puppet:"name=>'user_labels'"` + Version *int64 +} + +type Interface struct { + AccessConfig *[]AccessConfig `puppet:"name=>'access_config'"` + AliasIpRange *[]Range `puppet:"name=>'alias_ip_range'"` + Name *string + Network *string + NetworkIp *string `puppet:"name=>'network_ip'"` + Subnetwork *string + SubnetworkProject *string `puppet:"name=>'subnetwork_project'"` +} + +type InterfaceAccessConfig struct { + NatIp *string `puppet:"name=>'nat_ip'"` + NetworkTier *string `puppet:"name=>'network_tier'"` +} + +type IpConfiguration struct { + AuthorizedNetworks *[]AuthorizedNetworks `puppet:"name=>'authorized_networks'"` + Ipv4Enabled *bool `puppet:"name=>'ipv4_enabled'"` + PrivateNetwork *string `puppet:"name=>'private_network'"` + RequireSsl *bool `puppet:"name=>'require_ssl'"` +} + +type IpRange struct { + IpCidrRange string `puppet:"name=>'ip_cidr_range'"` + RangeName string `puppet:"name=>'range_name'"` +} + +type JobScheduling struct { + MaxFailuresPerHour *int64 `puppet:"name=>'max_failures_per_hour'"` +} + +type Key struct { + RawKey *string `puppet:"name=>'raw_key'"` + Sha256 *string +} + +type KeyPolicy struct { + IncludeHost *bool `puppet:"name=>'include_host'"` + IncludeProtocol *bool `puppet:"name=>'include_protocol'"` + IncludeQueryString *bool `puppet:"name=>'include_query_string'"` + QueryStringBlacklist *[]string `puppet:"name=>'query_string_blacklist'"` + QueryStringWhitelist *[]string `puppet:"name=>'query_string_whitelist'"` +} + +type Keys struct { + AsciiArmoredPgpPublicKey string `puppet:"name=>'ascii_armored_pgp_public_key'"` + Comment *string + Id *string +} + +type LifecycleRule struct { + Action RuleAction + Condition Condition +} + +type ListPolicy struct { + Allow *AllowDeny + Deny *AllowDeny + SuggestedValue *string `puppet:"name=>'suggested_value'"` +} + +type Logging struct { + LogBucket string `puppet:"name=>'log_bucket'"` + LogObjectPrefix *string `puppet:"name=>'log_object_prefix'"` +} + +type MaintenancePolicy struct { + DailyMaintenanceWindow *Window `puppet:"name=>'daily_maintenance_window'"` +} + +type MaintenanceWindow struct { + Day *int64 + Hour *int64 + UpdateTrack *string `puppet:"name=>'update_track'"` +} + +type Management struct { + AutoRepair *bool `puppet:"name=>'auto_repair'"` + AutoUpgrade *bool `puppet:"name=>'auto_upgrade'"` +} + +type MasterConfig struct { + DiskConfig *DiskConfig `puppet:"name=>'disk_config'"` + InstanceNames *[]string `puppet:"name=>'instance_names'"` + MachineType *string `puppet:"name=>'machine_type'"` + NumInstances *int64 `puppet:"name=>'num_instances'"` +} + +type Match struct { + VersionedExpr string `puppet:"name=>'versioned_expr'"` + Config *MatchConfig +} + +type MatchConfig struct { + SrcIpRanges []string `puppet:"name=>'src_ip_ranges'"` +} + +type Matcher struct { + DefaultService string `puppet:"name=>'default_service'"` + Name string + Description *string + PathRule *[]PathRule `puppet:"name=>'path_rule'"` +} + +type Matchers struct { + Content *string +} + +type Methods struct { + Name *string + RequestType *string `puppet:"name=>'request_type'"` + ResponseType *string `puppet:"name=>'response_type'"` + Syntax *string +} + +type Metric struct { + Name string + Target float64 + Type string +} + +type MqttConfig struct { + MqttEnabledState string `puppet:"name=>'mqtt_enabled_state'"` +} + +type NetworkInterface struct { + AccessConfig *[]InterfaceAccessConfig `puppet:"name=>'access_config'"` + AliasIpRange *[]Range `puppet:"name=>'alias_ip_range'"` + Network *string + NetworkIp *string `puppet:"name=>'network_ip'"` + Subnetwork *string + SubnetworkProject *string `puppet:"name=>'subnetwork_project'"` +} + +type NetworkPolicy struct { + Enabled *bool + Provider *string +} + +type Networks struct { + Modes []string + Network string + IpAddresses *[]string `puppet:"name=>'ip_addresses'"` + ReservedIpRange *string `puppet:"name=>'reserved_ip_range'"` +} + +type NetworksConfig struct { + CidrBlocks *[]Blocks `puppet:"name=>'cidr_blocks'"` +} + +type NodeConfig struct { + DiskSizeGb *int64 `puppet:"name=>'disk_size_gb'"` + DiskType *string `puppet:"name=>'disk_type'"` + GuestAccelerator *[]Accelerator `puppet:"name=>'guest_accelerator'"` + ImageType *string `puppet:"name=>'image_type'"` + Labels *map[string]string + LocalSsdCount *int64 `puppet:"name=>'local_ssd_count'"` + MachineType *string `puppet:"name=>'machine_type'"` + Metadata *map[string]string + MinCpuPlatform *string `puppet:"name=>'min_cpu_platform'"` + OauthScopes *[]string `puppet:"name=>'oauth_scopes'"` + Preemptible *bool + ServiceAccount *string `puppet:"name=>'service_account'"` + Tags *[]string +} + +type Note struct { + NoteReference string `puppet:"name=>'note_reference'"` + DelegationServiceAccountEmail *string `puppet:"name=>'delegation_service_account_email'"` + PublicKeys *[]Keys `puppet:"name=>'public_keys'"` +} + +type NotificationConfig struct { + PubsubTopicName string `puppet:"name=>'pubsub_topic_name'"` +} + +type Params struct { + Image *string + Size *int64 + Type *string +} + +type Partitioning struct { + Type string + ExpirationMs *int64 `puppet:"name=>'expiration_ms'"` + Field *string +} + +type PathRule struct { + Paths []string + Service string +} + +type Patterns struct { + NamePattern *string `puppet:"name=>'name_pattern'"` +} + +type PigConfig struct { + ContinueOnFailure *bool `puppet:"name=>'continue_on_failure'"` + JarFileUris *[]string `puppet:"name=>'jar_file_uris'"` + LoggingConfig *Config `puppet:"name=>'logging_config'"` + Properties *map[string]string + QueryFileUri *string `puppet:"name=>'query_file_uri'"` + QueryList *[]string `puppet:"name=>'query_list'"` + ScriptVariables *map[string]string `puppet:"name=>'script_variables'"` +} + +type Placement struct { + ClusterName string `puppet:"name=>'cluster_name'"` + ClusterUuid *string `puppet:"name=>'cluster_uuid'"` +} + +type Policy struct { + Enforced bool +} + +type PolicyRule struct { + Action string + Priority int64 + Description *string + Match *Match + Preview *bool +} + +type Pool struct { + Autoscaling *Autoscaling + InitialNodeCount *int64 `puppet:"name=>'initial_node_count'"` + InstanceGroupUrls *[]string `puppet:"name=>'instance_group_urls'"` + Management *Management + Name *string + NodeConfig *NodeConfig `puppet:"name=>'node_config'"` + NodeCount *int64 `puppet:"name=>'node_count'"` + Version *string +} + +type Port struct { + Name string + Port int64 +} + +type Preference struct { + FollowGaeApplication *string `puppet:"name=>'follow_gae_application'"` + Zone *string +} + +type PushConfig struct { + PushEndpoint string `puppet:"name=>'push_endpoint'"` + Attributes *map[string]string +} + +type PysparkConfig struct { + MainPythonFileUri string `puppet:"name=>'main_python_file_uri'"` + ArchiveUris *[]string `puppet:"name=>'archive_uris'"` + Args *[]string + FileUris *[]string `puppet:"name=>'file_uris'"` + JarFileUris *[]string `puppet:"name=>'jar_file_uris'"` + LoggingConfig *Config `puppet:"name=>'logging_config'"` + Properties *map[string]string + PythonFileUris *[]string `puppet:"name=>'python_file_uris'"` +} + +type Range struct { + IpCidrRange string `puppet:"name=>'ip_cidr_range'"` + SubnetworkRangeName *string `puppet:"name=>'subnetwork_range_name'"` +} + +type Ranges struct { + Description *string + Range *string +} + +type Record struct { + MutateTime *string `puppet:"name=>'mutate_time'"` + MutatedBy *string `puppet:"name=>'mutated_by'"` +} + +type Reference struct { + JobId *string `puppet:"name=>'job_id'"` +} + +type Resource struct { + Labels map[string]string + Type string +} + +type RestorePolicy struct { + Default bool +} + +type Rule struct { + Domain *string + Path *string + Service *string +} + +type RuleAction struct { + Type string + StorageClass *string `puppet:"name=>'storage_class'"` +} + +type Rules struct { + Cluster string + EnforcementMode *string `puppet:"name=>'enforcement_mode'"` + EvaluationMode *string `puppet:"name=>'evaluation_mode'"` + RequireAttestationsBy *[]string `puppet:"name=>'require_attestations_by'"` +} + +type Scheduling struct { + AutomaticRestart *bool `puppet:"name=>'automatic_restart'"` + OnHostMaintenance *string `puppet:"name=>'on_host_maintenance'"` + Preemptible *bool +} + +type ScratchDisk struct { + Interface *string +} + +type ServiceBackend struct { + Description *string + Group *string +} + +type Settings struct { + SplitHealthChecks *bool `puppet:"name=>'split_health_checks'"` +} + +type Shares struct { + CapacityGb int64 `puppet:"name=>'capacity_gb'"` + Name string +} + +type SoftwareConfig struct { + AirflowConfigOverrides *map[string]string `puppet:"name=>'airflow_config_overrides'"` + EnvVariables *map[string]string `puppet:"name=>'env_variables'"` + ImageVersion *string `puppet:"name=>'image_version'"` + PypiPackages *map[string]string `puppet:"name=>'pypi_packages'"` +} + +type SparksqlConfig struct { + JarFileUris *[]string `puppet:"name=>'jar_file_uris'"` + LoggingConfig *Config `puppet:"name=>'logging_config'"` + Properties *map[string]string + QueryFileUri *string `puppet:"name=>'query_file_uri'"` + QueryList *[]string `puppet:"name=>'query_list'"` + ScriptVariables *map[string]string `puppet:"name=>'script_variables'"` +} + +type Status struct { + Details *string + State *string + StateStartTime *string `puppet:"name=>'state_start_time'"` + Substate *string +} + +type Step struct { + Args *string + Name *string +} + +type Subnetwork struct { + Name string + SecondaryIpRangeNames *[]string `puppet:"name=>'secondary_ip_range_names'"` + SourceIpRangesToNat *[]string `puppet:"name=>'source_ip_ranges_to_nat'"` +} + +type TableView struct { + Query string + UseLegacySql *bool `puppet:"name=>'use_legacy_sql'"` +} + +type TcpCheck struct { + Port int64 +} + +type Team struct { + ProjectNumber *string `puppet:"name=>'project_number'"` + Team *string +} + +type Template struct { + BranchName *string `puppet:"name=>'branch_name'"` + CommitSha *string `puppet:"name=>'commit_sha'"` + Dir *string + Project *string + RepoName *string `puppet:"name=>'repo_name'"` + TagName *string `puppet:"name=>'tag_name'"` +} + +type TemplateAttachedDisk struct { + Source string + DeviceName *string `puppet:"name=>'device_name'"` + DiskEncryptionKeyRaw *string `puppet:"name=>'disk_encryption_key_raw'"` + DiskEncryptionKeySha256 *string `puppet:"name=>'disk_encryption_key_sha256'"` + Mode *string +} + +type TemplateBootDisk struct { + AutoDelete *bool `puppet:"name=>'auto_delete'"` + DeviceName *string `puppet:"name=>'device_name'"` + DiskEncryptionKeyRaw *string `puppet:"name=>'disk_encryption_key_raw'"` + DiskEncryptionKeySha256 *string `puppet:"name=>'disk_encryption_key_sha256'"` + InitializeParams *Params `puppet:"name=>'initialize_params'"` + Source *string +} + +type TemplateDisk struct { + AutoDelete *bool `puppet:"name=>'auto_delete'"` + Boot *bool + DeviceName *string `puppet:"name=>'device_name'"` + DiskEncryptionKey *EncryptionKey `puppet:"name=>'disk_encryption_key'"` + DiskName *string `puppet:"name=>'disk_name'"` + DiskSizeGb *int64 `puppet:"name=>'disk_size_gb'"` + DiskType *string `puppet:"name=>'disk_type'"` + Interface *string + Mode *string + Source *string + SourceImage *string `puppet:"name=>'source_image'"` + Type *string +} + +type TemplateScheduling struct { + AutomaticRestart *bool `puppet:"name=>'automatic_restart'"` + OnHostMaintenance *string `puppet:"name=>'on_host_maintenance'"` + Preemptible *bool +} + +type TemplateScratchDisk struct { + Interface *string +} + +type Test struct { + Host string + Path string + Service string + Description *string +} + +type Threshold struct { + Comparison string + Duration string + Aggregations *[]Aggregations + DenominatorAggregations *[]Aggregations `puppet:"name=>'denominator_aggregations'"` + DenominatorFilter *string `puppet:"name=>'denominator_filter'"` + Filter *string + ThresholdValue *float64 `puppet:"name=>'threshold_value'"` + Trigger *Trigger +} + +type Trigger struct { + Count *int64 + Percent *float64 +} + +type Utilization struct { + Target float64 +} + +type Versioning struct { + Enabled *bool +} + +type View struct { + DatasetId string `puppet:"name=>'dataset_id'"` + ProjectId string `puppet:"name=>'project_id'"` + TableId string `puppet:"name=>'table_id'"` +} + +type Website struct { + MainPageSuffix *string `puppet:"name=>'main_page_suffix'"` + NotFoundPage *string `puppet:"name=>'not_found_page'"` +} + +type Window struct { + StartTime string `puppet:"name=>'start_time'"` + Duration *string +} + +type WorkerConfig struct { + DiskConfig *ConfigDiskConfig `puppet:"name=>'disk_config'"` + InstanceNames *[]string `puppet:"name=>'instance_names'"` + NumInstances *int64 `puppet:"name=>'num_instances'"` +} + +func InitTypes(c px.Context) { + load := func(n string) px.Type { + if v, ok := px.Load(c, px.NewTypedName(px.NsType, n)); ok { + return v.(px.Type) + } + panic(fmt.Errorf("unable to load Type '%s'", n)) + } + + ir := c.ImplementationRegistry() + ir.RegisterType(load("Google::App_engine_application"), reflect.TypeOf(&AppEngineApplication{})) + ir.RegisterType(load("Google::Bigquery_dataset"), reflect.TypeOf(&BigqueryDataset{})) + ir.RegisterType(load("Google::Bigquery_table"), reflect.TypeOf(&BigqueryTable{})) + ir.RegisterType(load("Google::Bigtable_instance"), reflect.TypeOf(&BigtableInstance{})) + ir.RegisterType(load("Google::Bigtable_table"), reflect.TypeOf(&BigtableTable{})) + ir.RegisterType(load("Google::Billing_account_iam_binding"), reflect.TypeOf(&BillingAccountIamBinding{})) + ir.RegisterType(load("Google::Billing_account_iam_member"), reflect.TypeOf(&BillingAccountIamMember{})) + ir.RegisterType(load("Google::Billing_account_iam_policy"), reflect.TypeOf(&BillingAccountIamPolicy{})) + ir.RegisterType(load("Google::Binary_authorization_attestor"), reflect.TypeOf(&BinaryAuthorizationAttestor{})) + ir.RegisterType(load("Google::Binary_authorization_policy"), reflect.TypeOf(&BinaryAuthorizationPolicy{})) + ir.RegisterType(load("Google::Cloudbuild_trigger"), reflect.TypeOf(&CloudbuildTrigger{})) + ir.RegisterType(load("Google::Cloudfunctions_function"), reflect.TypeOf(&CloudfunctionsFunction{})) + ir.RegisterType(load("Google::Cloudiot_registry"), reflect.TypeOf(&CloudiotRegistry{})) + ir.RegisterType(load("Google::Composer_environment"), reflect.TypeOf(&ComposerEnvironment{})) + ir.RegisterType(load("Google::Compute_address"), reflect.TypeOf(&ComputeAddress{})) + ir.RegisterType(load("Google::Compute_attached_disk"), reflect.TypeOf(&ComputeAttachedDisk{})) + ir.RegisterType(load("Google::Compute_autoscaler"), reflect.TypeOf(&ComputeAutoscaler{})) + ir.RegisterType(load("Google::Compute_backend_bucket"), reflect.TypeOf(&ComputeBackendBucket{})) + ir.RegisterType(load("Google::Compute_backend_service"), reflect.TypeOf(&ComputeBackendService{})) + ir.RegisterType(load("Google::Compute_disk"), reflect.TypeOf(&ComputeDisk{})) + ir.RegisterType(load("Google::Compute_firewall"), reflect.TypeOf(&ComputeFirewall{})) + ir.RegisterType(load("Google::Compute_forwarding_rule"), reflect.TypeOf(&ComputeForwardingRule{})) + ir.RegisterType(load("Google::Compute_global_address"), reflect.TypeOf(&ComputeGlobalAddress{})) + ir.RegisterType(load("Google::Compute_global_forwarding_rule"), reflect.TypeOf(&ComputeGlobalForwardingRule{})) + ir.RegisterType(load("Google::Compute_health_check"), reflect.TypeOf(&ComputeHealthCheck{})) + ir.RegisterType(load("Google::Compute_http_health_check"), reflect.TypeOf(&ComputeHttpHealthCheck{})) + ir.RegisterType(load("Google::Compute_https_health_check"), reflect.TypeOf(&ComputeHttpsHealthCheck{})) + ir.RegisterType(load("Google::Compute_image"), reflect.TypeOf(&ComputeImage{})) + ir.RegisterType(load("Google::Compute_instance"), reflect.TypeOf(&ComputeInstance{})) + ir.RegisterType(load("Google::Compute_instance_from_template"), reflect.TypeOf(&ComputeInstanceFromTemplate{})) + ir.RegisterType(load("Google::Compute_instance_group"), reflect.TypeOf(&ComputeInstanceGroup{})) + ir.RegisterType(load("Google::Compute_instance_group_manager"), reflect.TypeOf(&ComputeInstanceGroupManager{})) + ir.RegisterType(load("Google::Compute_instance_template"), reflect.TypeOf(&ComputeInstanceTemplate{})) + ir.RegisterType(load("Google::Compute_interconnect_attachment"), reflect.TypeOf(&ComputeInterconnectAttachment{})) + ir.RegisterType(load("Google::Compute_network"), reflect.TypeOf(&ComputeNetwork{})) + ir.RegisterType(load("Google::Compute_network_peering"), reflect.TypeOf(&ComputeNetworkPeering{})) + ir.RegisterType(load("Google::Compute_project_metadata"), reflect.TypeOf(&ComputeProjectMetadata{})) + ir.RegisterType(load("Google::Compute_project_metadata_item"), reflect.TypeOf(&ComputeProjectMetadataItem{})) + ir.RegisterType(load("Google::Compute_region_autoscaler"), reflect.TypeOf(&ComputeRegionAutoscaler{})) + ir.RegisterType(load("Google::Compute_region_backend_service"), reflect.TypeOf(&ComputeRegionBackendService{})) + ir.RegisterType(load("Google::Compute_region_disk"), reflect.TypeOf(&ComputeRegionDisk{})) + ir.RegisterType(load("Google::Compute_region_instance_group_manager"), reflect.TypeOf(&ComputeRegionInstanceGroupManager{})) + ir.RegisterType(load("Google::Compute_route"), reflect.TypeOf(&ComputeRoute{})) + ir.RegisterType(load("Google::Compute_router"), reflect.TypeOf(&ComputeRouter{})) + ir.RegisterType(load("Google::Compute_router_interface"), reflect.TypeOf(&ComputeRouterInterface{})) + ir.RegisterType(load("Google::Compute_router_nat"), reflect.TypeOf(&ComputeRouterNat{})) + ir.RegisterType(load("Google::Compute_router_peer"), reflect.TypeOf(&ComputeRouterPeer{})) + ir.RegisterType(load("Google::Compute_security_policy"), reflect.TypeOf(&ComputeSecurityPolicy{})) + ir.RegisterType(load("Google::Compute_shared_vpc_host_project"), reflect.TypeOf(&ComputeSharedVpcHostProject{})) + ir.RegisterType(load("Google::Compute_shared_vpc_service_project"), reflect.TypeOf(&ComputeSharedVpcServiceProject{})) + ir.RegisterType(load("Google::Compute_snapshot"), reflect.TypeOf(&ComputeSnapshot{})) + ir.RegisterType(load("Google::Compute_ssl_certificate"), reflect.TypeOf(&ComputeSslCertificate{})) + ir.RegisterType(load("Google::Compute_ssl_policy"), reflect.TypeOf(&ComputeSslPolicy{})) + ir.RegisterType(load("Google::Compute_subnetwork"), reflect.TypeOf(&ComputeSubnetwork{})) + ir.RegisterType(load("Google::Compute_subnetwork_iam_binding"), reflect.TypeOf(&ComputeSubnetworkIamBinding{})) + ir.RegisterType(load("Google::Compute_subnetwork_iam_member"), reflect.TypeOf(&ComputeSubnetworkIamMember{})) + ir.RegisterType(load("Google::Compute_subnetwork_iam_policy"), reflect.TypeOf(&ComputeSubnetworkIamPolicy{})) + ir.RegisterType(load("Google::Compute_target_http_proxy"), reflect.TypeOf(&ComputeTargetHttpProxy{})) + ir.RegisterType(load("Google::Compute_target_https_proxy"), reflect.TypeOf(&ComputeTargetHttpsProxy{})) + ir.RegisterType(load("Google::Compute_target_pool"), reflect.TypeOf(&ComputeTargetPool{})) + ir.RegisterType(load("Google::Compute_target_ssl_proxy"), reflect.TypeOf(&ComputeTargetSslProxy{})) + ir.RegisterType(load("Google::Compute_target_tcp_proxy"), reflect.TypeOf(&ComputeTargetTcpProxy{})) + ir.RegisterType(load("Google::Compute_url_map"), reflect.TypeOf(&ComputeUrlMap{})) + ir.RegisterType(load("Google::Compute_vpn_gateway"), reflect.TypeOf(&ComputeVpnGateway{})) + ir.RegisterType(load("Google::Compute_vpn_tunnel"), reflect.TypeOf(&ComputeVpnTunnel{})) + ir.RegisterType(load("Google::Container_analysis_note"), reflect.TypeOf(&ContainerAnalysisNote{})) + ir.RegisterType(load("Google::Container_cluster"), reflect.TypeOf(&ContainerCluster{})) + ir.RegisterType(load("Google::Container_node_pool"), reflect.TypeOf(&ContainerNodePool{})) + ir.RegisterType(load("Google::Dataflow_job"), reflect.TypeOf(&DataflowJob{})) + ir.RegisterType(load("Google::Dataproc_cluster"), reflect.TypeOf(&DataprocCluster{})) + ir.RegisterType(load("Google::Dataproc_job"), reflect.TypeOf(&DataprocJob{})) + ir.RegisterType(load("Google::Dns_managed_zone"), reflect.TypeOf(&DnsManagedZone{})) + ir.RegisterType(load("Google::Dns_record_set"), reflect.TypeOf(&DnsRecordSet{})) + ir.RegisterType(load("Google::Endpoints_service"), reflect.TypeOf(&EndpointsService{})) + ir.RegisterType(load("Google::Filestore_instance"), reflect.TypeOf(&FilestoreInstance{})) + ir.RegisterType(load("Google::Folder"), reflect.TypeOf(&Folder{})) + ir.RegisterType(load("Google::Folder_iam_binding"), reflect.TypeOf(&FolderIamBinding{})) + ir.RegisterType(load("Google::Folder_iam_member"), reflect.TypeOf(&FolderIamMember{})) + ir.RegisterType(load("Google::Folder_iam_policy"), reflect.TypeOf(&FolderIamPolicy{})) + ir.RegisterType(load("Google::Folder_organization_policy"), reflect.TypeOf(&FolderOrganizationPolicy{})) + ir.RegisterType(load("Google::GenericHandler"), reflect.TypeOf(&GenericHandler{})) + ir.RegisterType(load("Google::Kms_crypto_key"), reflect.TypeOf(&KmsCryptoKey{})) + ir.RegisterType(load("Google::Kms_crypto_key_iam_binding"), reflect.TypeOf(&KmsCryptoKeyIamBinding{})) + ir.RegisterType(load("Google::Kms_crypto_key_iam_member"), reflect.TypeOf(&KmsCryptoKeyIamMember{})) + ir.RegisterType(load("Google::Kms_key_ring"), reflect.TypeOf(&KmsKeyRing{})) + ir.RegisterType(load("Google::Kms_key_ring_iam_binding"), reflect.TypeOf(&KmsKeyRingIamBinding{})) + ir.RegisterType(load("Google::Kms_key_ring_iam_member"), reflect.TypeOf(&KmsKeyRingIamMember{})) + ir.RegisterType(load("Google::Kms_key_ring_iam_policy"), reflect.TypeOf(&KmsKeyRingIamPolicy{})) + ir.RegisterType(load("Google::Logging_billing_account_exclusion"), reflect.TypeOf(&LoggingBillingAccountExclusion{})) + ir.RegisterType(load("Google::Logging_billing_account_sink"), reflect.TypeOf(&LoggingBillingAccountSink{})) + ir.RegisterType(load("Google::Logging_folder_exclusion"), reflect.TypeOf(&LoggingFolderExclusion{})) + ir.RegisterType(load("Google::Logging_folder_sink"), reflect.TypeOf(&LoggingFolderSink{})) + ir.RegisterType(load("Google::Logging_organization_exclusion"), reflect.TypeOf(&LoggingOrganizationExclusion{})) + ir.RegisterType(load("Google::Logging_organization_sink"), reflect.TypeOf(&LoggingOrganizationSink{})) + ir.RegisterType(load("Google::Logging_project_exclusion"), reflect.TypeOf(&LoggingProjectExclusion{})) + ir.RegisterType(load("Google::Logging_project_sink"), reflect.TypeOf(&LoggingProjectSink{})) + ir.RegisterType(load("Google::Monitoring_alert_policy"), reflect.TypeOf(&MonitoringAlertPolicy{})) + ir.RegisterType(load("Google::Monitoring_group"), reflect.TypeOf(&MonitoringGroup{})) + ir.RegisterType(load("Google::Monitoring_notification_channel"), reflect.TypeOf(&MonitoringNotificationChannel{})) + ir.RegisterType(load("Google::Monitoring_uptime_check_config"), reflect.TypeOf(&MonitoringUptimeCheckConfig{})) + ir.RegisterType(load("Google::Organization_iam_binding"), reflect.TypeOf(&OrganizationIamBinding{})) + ir.RegisterType(load("Google::Organization_iam_custom_role"), reflect.TypeOf(&OrganizationIamCustomRole{})) + ir.RegisterType(load("Google::Organization_iam_member"), reflect.TypeOf(&OrganizationIamMember{})) + ir.RegisterType(load("Google::Organization_iam_policy"), reflect.TypeOf(&OrganizationIamPolicy{})) + ir.RegisterType(load("Google::Organization_policy"), reflect.TypeOf(&OrganizationPolicy{})) + ir.RegisterType(load("Google::Project"), reflect.TypeOf(&Project{})) + ir.RegisterType(load("Google::Project_iam_binding"), reflect.TypeOf(&ProjectIamBinding{})) + ir.RegisterType(load("Google::Project_iam_custom_role"), reflect.TypeOf(&ProjectIamCustomRole{})) + ir.RegisterType(load("Google::Project_iam_member"), reflect.TypeOf(&ProjectIamMember{})) + ir.RegisterType(load("Google::Project_iam_policy"), reflect.TypeOf(&ProjectIamPolicy{})) + ir.RegisterType(load("Google::Project_organization_policy"), reflect.TypeOf(&ProjectOrganizationPolicy{})) + ir.RegisterType(load("Google::Project_service"), reflect.TypeOf(&ProjectService{})) + ir.RegisterType(load("Google::Project_services"), reflect.TypeOf(&ProjectServices{})) + ir.RegisterType(load("Google::Project_usage_export_bucket"), reflect.TypeOf(&ProjectUsageExportBucket{})) + ir.RegisterType(load("Google::Pubsub_subscription"), reflect.TypeOf(&PubsubSubscription{})) + ir.RegisterType(load("Google::Pubsub_subscription_iam_binding"), reflect.TypeOf(&PubsubSubscriptionIamBinding{})) + ir.RegisterType(load("Google::Pubsub_subscription_iam_member"), reflect.TypeOf(&PubsubSubscriptionIamMember{})) + ir.RegisterType(load("Google::Pubsub_subscription_iam_policy"), reflect.TypeOf(&PubsubSubscriptionIamPolicy{})) + ir.RegisterType(load("Google::Pubsub_topic"), reflect.TypeOf(&PubsubTopic{})) + ir.RegisterType(load("Google::Pubsub_topic_iam_binding"), reflect.TypeOf(&PubsubTopicIamBinding{})) + ir.RegisterType(load("Google::Pubsub_topic_iam_member"), reflect.TypeOf(&PubsubTopicIamMember{})) + ir.RegisterType(load("Google::Pubsub_topic_iam_policy"), reflect.TypeOf(&PubsubTopicIamPolicy{})) + ir.RegisterType(load("Google::Redis_instance"), reflect.TypeOf(&RedisInstance{})) + ir.RegisterType(load("Google::Resource_manager_lien"), reflect.TypeOf(&ResourceManagerLien{})) + ir.RegisterType(load("Google::Runtimeconfig_config"), reflect.TypeOf(&RuntimeconfigConfig{})) + ir.RegisterType(load("Google::Runtimeconfig_variable"), reflect.TypeOf(&RuntimeconfigVariable{})) + ir.RegisterType(load("Google::Service_account"), reflect.TypeOf(&ServiceAccount{})) + ir.RegisterType(load("Google::Service_account_iam_binding"), reflect.TypeOf(&ServiceAccountIamBinding{})) + ir.RegisterType(load("Google::Service_account_iam_member"), reflect.TypeOf(&ServiceAccountIamMember{})) + ir.RegisterType(load("Google::Service_account_iam_policy"), reflect.TypeOf(&ServiceAccountIamPolicy{})) + ir.RegisterType(load("Google::Service_account_key"), reflect.TypeOf(&ServiceAccountKey{})) + ir.RegisterType(load("Google::Sourcerepo_repository"), reflect.TypeOf(&SourcerepoRepository{})) + ir.RegisterType(load("Google::Spanner_database"), reflect.TypeOf(&SpannerDatabase{})) + ir.RegisterType(load("Google::Spanner_database_iam_binding"), reflect.TypeOf(&SpannerDatabaseIamBinding{})) + ir.RegisterType(load("Google::Spanner_database_iam_member"), reflect.TypeOf(&SpannerDatabaseIamMember{})) + ir.RegisterType(load("Google::Spanner_database_iam_policy"), reflect.TypeOf(&SpannerDatabaseIamPolicy{})) + ir.RegisterType(load("Google::Spanner_instance"), reflect.TypeOf(&SpannerInstance{})) + ir.RegisterType(load("Google::Spanner_instance_iam_binding"), reflect.TypeOf(&SpannerInstanceIamBinding{})) + ir.RegisterType(load("Google::Spanner_instance_iam_member"), reflect.TypeOf(&SpannerInstanceIamMember{})) + ir.RegisterType(load("Google::Spanner_instance_iam_policy"), reflect.TypeOf(&SpannerInstanceIamPolicy{})) + ir.RegisterType(load("Google::Sql_database"), reflect.TypeOf(&SqlDatabase{})) + ir.RegisterType(load("Google::Sql_database_instance"), reflect.TypeOf(&SqlDatabaseInstance{})) + ir.RegisterType(load("Google::Sql_ssl_cert"), reflect.TypeOf(&SqlSslCert{})) + ir.RegisterType(load("Google::Sql_user"), reflect.TypeOf(&SqlUser{})) + ir.RegisterType(load("Google::Storage_bucket"), reflect.TypeOf(&StorageBucket{})) + ir.RegisterType(load("Google::Storage_bucket_acl"), reflect.TypeOf(&StorageBucketAcl{})) + ir.RegisterType(load("Google::Storage_bucket_iam_binding"), reflect.TypeOf(&StorageBucketIamBinding{})) + ir.RegisterType(load("Google::Storage_bucket_iam_member"), reflect.TypeOf(&StorageBucketIamMember{})) + ir.RegisterType(load("Google::Storage_bucket_iam_policy"), reflect.TypeOf(&StorageBucketIamPolicy{})) + ir.RegisterType(load("Google::Storage_bucket_object"), reflect.TypeOf(&StorageBucketObject{})) + ir.RegisterType(load("Google::Storage_default_object_access_control"), reflect.TypeOf(&StorageDefaultObjectAccessControl{})) + ir.RegisterType(load("Google::Storage_default_object_acl"), reflect.TypeOf(&StorageDefaultObjectAcl{})) + ir.RegisterType(load("Google::Storage_notification"), reflect.TypeOf(&StorageNotification{})) + ir.RegisterType(load("Google::Storage_object_access_control"), reflect.TypeOf(&StorageObjectAccessControl{})) + ir.RegisterType(load("Google::Storage_object_acl"), reflect.TypeOf(&StorageObjectAcl{})) +} diff --git a/vendor/github.com/lyraproj/lyra/examples/go-samples/types/identity/identity.go b/vendor/github.com/lyraproj/lyra/examples/go-samples/types/identity/identity.go new file mode 100644 index 0000000..b538751 --- /dev/null +++ b/vendor/github.com/lyraproj/lyra/examples/go-samples/types/identity/identity.go @@ -0,0 +1,24 @@ +// this file is generated +package identity + +import ( + "fmt" + "reflect" + + "github.com/lyraproj/pcore/px" +) + +type Service struct { +} + +func InitTypes(c px.Context) { + load := func(n string) px.Type { + if v, ok := px.Load(c, px.NewTypedName(px.NsType, n)); ok { + return v.(px.Type) + } + panic(fmt.Errorf("unable to load Type '%s'", n)) + } + + ir := c.ImplementationRegistry() + ir.RegisterType(load("Identity::Service"), reflect.TypeOf(&Service{})) +} diff --git a/vendor/github.com/lyraproj/lyra/examples/go-samples/types/kubernetes/kubernetes.go b/vendor/github.com/lyraproj/lyra/examples/go-samples/types/kubernetes/kubernetes.go new file mode 100644 index 0000000..26aa3d0 --- /dev/null +++ b/vendor/github.com/lyraproj/lyra/examples/go-samples/types/kubernetes/kubernetes.go @@ -0,0 +1,834 @@ +// this file is generated +package kubernetes + +import ( + "fmt" + "reflect" + + "github.com/lyraproj/pcore/px" +) + +type ClusterRoleBinding struct { + RoleRef map[string]RoleRef `puppet:"name=>'role_ref'"` + Subject []Subject + ClusterRoleBindingId *string `puppet:"name=>'cluster_role_binding_id'"` + Metadata *VolumeMetadata +} + +type ConfigMap struct { + ConfigMapId *string `puppet:"name=>'config_map_id'"` + Data *map[string]string + Metadata *Metadata +} + +type Deployment struct { + DeploymentId *string `puppet:"name=>'deployment_id'"` + Metadata *Metadata + Spec *DeploymentSpec +} + +type GenericHandler struct { +} + +type HorizontalPodAutoscaler struct { + HorizontalPodAutoscalerId *string `puppet:"name=>'horizontal_pod_autoscaler_id'"` + Metadata *Metadata + Spec *AutoscalerSpec +} + +type LimitRange struct { + LimitRangeId *string `puppet:"name=>'limit_range_id'"` + Metadata *Metadata + Spec *RangeSpec +} + +type Namespace struct { + NamespaceId *string `puppet:"name=>'namespace_id'"` + Metadata *NamespaceMetadata +} + +type NetworkPolicy struct { + NetworkPolicyId *string `puppet:"name=>'network_policy_id'"` + Metadata *Metadata + Spec *PolicySpec +} + +type PersistentVolume struct { + Spec []VolumeSpec + PersistentVolumeId *string `puppet:"name=>'persistent_volume_id'"` + Metadata *VolumeMetadata +} + +type PersistentVolumeClaim struct { + PersistentVolumeClaimId *string `puppet:"name=>'persistent_volume_claim_id'"` + Metadata *Metadata + Spec *ClaimSpec + WaitUntilBound *bool `puppet:"name=>'wait_until_bound'"` +} + +type Pod struct { + PodId *string `puppet:"name=>'pod_id'"` + Metadata *Metadata + Spec *Spec +} + +type ReplicationController struct { + ReplicationControllerId *string `puppet:"name=>'replication_controller_id'"` + Metadata *Metadata + Spec *ControllerSpec +} + +type ResourceQuota struct { + ResourceQuotaId *string `puppet:"name=>'resource_quota_id'"` + Metadata *Metadata + Spec *QuotaSpec +} + +type Role struct { + Rule []Rule + RoleId *string `puppet:"name=>'role_id'"` + Metadata *Metadata +} + +type RoleBinding struct { + RoleRef map[string]RoleRef `puppet:"name=>'role_ref'"` + Subject []Subject + RoleBindingId *string `puppet:"name=>'role_binding_id'"` + Metadata *BindingMetadata +} + +type Secret struct { + SecretId *string `puppet:"name=>'secret_id'"` + Data *map[string]string + Metadata *Metadata + Type *string +} + +type Service struct { + ServiceId *string `puppet:"name=>'service_id'"` + LoadBalancerIngress *[]BalancerIngress `puppet:"name=>'load_balancer_ingress'"` + Metadata *Metadata + Spec *ServiceSpec +} + +type ServiceAccount struct { + ServiceAccountId *string `puppet:"name=>'service_account_id'"` + AutomountServiceAccountToken *bool `puppet:"name=>'automount_service_account_token'"` + DefaultSecretName *string `puppet:"name=>'default_secret_name'"` + ImagePullSecret *[]SecretRef `puppet:"name=>'image_pull_secret'"` + Metadata *Metadata + Secret *[]SecretRef +} + +type StatefulSet struct { + Spec SetSpec + StatefulSetId *string `puppet:"name=>'stateful_set_id'"` + Metadata *Metadata +} + +type StorageClass struct { + StorageProvisioner string `puppet:"name=>'storage_provisioner'"` + StorageClassId *string `puppet:"name=>'storage_class_id'"` + Metadata *NamespaceMetadata + Parameters *map[string]string + ReclaimPolicy *string `puppet:"name=>'reclaim_policy'"` + VolumeBindingMode *string `puppet:"name=>'volume_binding_mode'"` +} + +type Affinity struct { + Required *Required +} + +type Api struct { + DefaultMode *int64 `puppet:"name=>'default_mode'"` + Items *[]ApiItems +} + +type ApiItems struct { + Path string + FieldRef *FieldRef `puppet:"name=>'field_ref'"` + Mode *int64 + ResourceFieldRef *ItemsResourceFieldRef `puppet:"name=>'resource_field_ref'"` +} + +type AutoscalerSpec struct { + MaxReplicas int64 `puppet:"name=>'max_replicas'"` + MinReplicas *int64 `puppet:"name=>'min_replicas'"` + ScaleTargetRef *TargetRef `puppet:"name=>'scale_target_ref'"` + TargetCpuUtilizationPercentage *int64 `puppet:"name=>'target_cpu_utilization_percentage'"` +} + +type BalancerIngress struct { + Hostname *string + Ip *string +} + +type BindingMetadata struct { + Annotations *map[string]string + Generation *int64 + Labels *map[string]string + Name *string + Namespace *string + ResourceVersion *string `puppet:"name=>'resource_version'"` + SelfLink *string `puppet:"name=>'self_link'"` + Uid *string +} + +type Block struct { + Cidr *string + Except *[]string +} + +type Capabilities struct { + Add *[]string + Drop *[]string +} + +type CephFs struct { + Monitors []string + Path *string + ReadOnly *bool `puppet:"name=>'read_only'"` + SecretFile *string `puppet:"name=>'secret_file'"` + SecretRef *SecretRef `puppet:"name=>'secret_ref'"` + User *string +} + +type Cinder struct { + VolumeId string `puppet:"name=>'volume_id'"` + FsType *string `puppet:"name=>'fs_type'"` + ReadOnly *bool `puppet:"name=>'read_only'"` +} + +type Claim struct { + ClaimName *string `puppet:"name=>'claim_name'"` + ReadOnly *bool `puppet:"name=>'read_only'"` +} + +type ClaimSpec struct { + AccessModes []string `puppet:"name=>'access_modes'"` + Resources *SpecResources + Selector *Selector + StorageClassName *string `puppet:"name=>'storage_class_name'"` + VolumeName *string `puppet:"name=>'volume_name'"` +} + +type ClaimTemplate struct { + Metadata *Metadata + Spec *ClaimSpec +} + +type Container struct { + Name string + Args *[]string + Command *[]string + Env *[]Env + EnvFrom *[]EnvFrom `puppet:"name=>'env_from'"` + Image *string + ImagePullPolicy *string `puppet:"name=>'image_pull_policy'"` + Lifecycle *Lifecycle + LivenessProbe *Probe `puppet:"name=>'liveness_probe'"` + Port *[]Port + ReadinessProbe *Probe `puppet:"name=>'readiness_probe'"` + Resources *Resources + SecurityContext *Context `puppet:"name=>'security_context'"` + Stdin *bool + StdinOnce *bool `puppet:"name=>'stdin_once'"` + TerminationMessagePath *string `puppet:"name=>'termination_message_path'"` + Tty *bool + VolumeMount *[]Mount `puppet:"name=>'volume_mount'"` + WorkingDir *string `puppet:"name=>'working_dir'"` +} + +type Context struct { + AllowPrivilegeEscalation *bool `puppet:"name=>'allow_privilege_escalation'"` + Capabilities *Capabilities + Privileged *bool + ReadOnlyRootFilesystem *bool `puppet:"name=>'read_only_root_filesystem'"` + RunAsNonRoot *bool `puppet:"name=>'run_as_non_root'"` + RunAsUser *int64 `puppet:"name=>'run_as_user'"` + SeLinuxOptions *Options `puppet:"name=>'se_linux_options'"` +} + +type ControllerSpec struct { + Selector map[string]string + MinReadySeconds *int64 `puppet:"name=>'min_ready_seconds'"` + Replicas *int64 + Template *Template +} + +type DeploymentSpec struct { + MinReadySeconds *int64 `puppet:"name=>'min_ready_seconds'"` + Paused *bool + ProgressDeadlineSeconds *int64 `puppet:"name=>'progress_deadline_seconds'"` + Replicas *int64 + RevisionHistoryLimit *int64 `puppet:"name=>'revision_history_limit'"` + Selector *Selector + Strategy *Strategy + Template *Template +} + +type Dir struct { + Medium *string +} + +type Disk struct { + CachingMode string `puppet:"name=>'caching_mode'"` + DataDiskUri string `puppet:"name=>'data_disk_uri'"` + DiskName string `puppet:"name=>'disk_name'"` + FsType *string `puppet:"name=>'fs_type'"` + ReadOnly *bool `puppet:"name=>'read_only'"` +} + +type Egress struct { + Ports *[]Ports + To *[]ToFrom +} + +type Env struct { + Name string + Value *string + ValueFrom *From `puppet:"name=>'value_from'"` +} + +type EnvFrom struct { + ConfigMapRef *FromSecretRef `puppet:"name=>'config_map_ref'"` + Prefix *string + SecretRef *FromSecretRef `puppet:"name=>'secret_ref'"` +} + +type Exec struct { + Command *[]string +} + +type Expressions struct { + Key *string + Operator *string + Values *[]string +} + +type FieldRef struct { + ApiVersion *string `puppet:"name=>'api_version'"` + FieldPath *string `puppet:"name=>'field_path'"` +} + +type File struct { + SecretName string `puppet:"name=>'secret_name'"` + ShareName string `puppet:"name=>'share_name'"` + ReadOnly *bool `puppet:"name=>'read_only'"` +} + +type Flocker struct { + DatasetName *string `puppet:"name=>'dataset_name'"` + DatasetUuid *string `puppet:"name=>'dataset_uuid'"` +} + +type From struct { + ConfigMapKeyRef *Ref `puppet:"name=>'config_map_key_ref'"` + FieldRef *FieldRef `puppet:"name=>'field_ref'"` + ResourceFieldRef *ResourceFieldRef `puppet:"name=>'resource_field_ref'"` + SecretKeyRef *Ref `puppet:"name=>'secret_key_ref'"` +} + +type FromSecretRef struct { + Name string + Optional *bool +} + +type Get struct { + Host *string + HttpHeader *[]Header `puppet:"name=>'http_header'"` + Path *string + Port *string + Scheme *string +} + +type Glusterfs struct { + EndpointsName string `puppet:"name=>'endpoints_name'"` + Path string + ReadOnly *bool `puppet:"name=>'read_only'"` +} + +type Header struct { + Name *string + Value *string +} + +type Ingress struct { + From *[]ToFrom + Ports *[]Ports +} + +type Iscsi struct { + Iqn string + TargetPortal string `puppet:"name=>'target_portal'"` + FsType *string `puppet:"name=>'fs_type'"` + IscsiInterface *string `puppet:"name=>'iscsi_interface'"` + Lun *int64 + ReadOnly *bool `puppet:"name=>'read_only'"` +} + +type Items struct { + Key *string + Mode *int64 + Path *string +} + +type ItemsResourceFieldRef struct { + ContainerName string `puppet:"name=>'container_name'"` + Resource string + Quantity *string +} + +type Lifecycle struct { + PostStart *[]StartStop `puppet:"name=>'post_start'"` + PreStop *[]StartStop `puppet:"name=>'pre_stop'"` +} + +type Limit struct { + Default *map[string]string + DefaultRequest *map[string]string `puppet:"name=>'default_request'"` + Max *map[string]string + MaxLimitRequestRatio *map[string]string `puppet:"name=>'max_limit_request_ratio'"` + Min *map[string]string + Type *string +} + +type Map struct { + DefaultMode *int64 `puppet:"name=>'default_mode'"` + Items *[]Items + Name *string +} + +type Metadata struct { + Annotations *map[string]string + GenerateName *string `puppet:"name=>'generate_name'"` + Generation *int64 + Labels *map[string]string + Name *string + Namespace *string + ResourceVersion *string `puppet:"name=>'resource_version'"` + SelfLink *string `puppet:"name=>'self_link'"` + Uid *string +} + +type Mount struct { + MountPath string `puppet:"name=>'mount_path'"` + Name string + ReadOnly *bool `puppet:"name=>'read_only'"` + SubPath *string `puppet:"name=>'sub_path'"` +} + +type NamespaceMetadata struct { + Annotations *map[string]string + GenerateName *string `puppet:"name=>'generate_name'"` + Generation *int64 + Labels *map[string]string + Name *string + ResourceVersion *string `puppet:"name=>'resource_version'"` + SelfLink *string `puppet:"name=>'self_link'"` + Uid *string +} + +type Nfs struct { + Path string + Server string + ReadOnly *bool `puppet:"name=>'read_only'"` +} + +type Options struct { + Level *string + Role *string + Type *string + User *string +} + +type PathLocal struct { + Path *string +} + +type PersistentDisk struct { + PdName string `puppet:"name=>'pd_name'"` + FsType *string `puppet:"name=>'fs_type'"` + Partition *int64 + ReadOnly *bool `puppet:"name=>'read_only'"` +} + +type PhotonPersistentDisk struct { + PdId string `puppet:"name=>'pd_id'"` + FsType *string `puppet:"name=>'fs_type'"` +} + +type PolicySpec struct { + PolicyTypes []string `puppet:"name=>'policy_types'"` + Egress *[]Egress + Ingress *[]Ingress + PodSelector *Selector `puppet:"name=>'pod_selector'"` +} + +type Port struct { + ContainerPort int64 `puppet:"name=>'container_port'"` + HostIp *string `puppet:"name=>'host_ip'"` + HostPort *int64 `puppet:"name=>'host_port'"` + Name *string + Protocol *string +} + +type Ports struct { + Port *string + Protocol *string +} + +type Probe struct { + Exec *Exec + FailureThreshold *int64 `puppet:"name=>'failure_threshold'"` + HttpGet *Get `puppet:"name=>'http_get'"` + InitialDelaySeconds *int64 `puppet:"name=>'initial_delay_seconds'"` + PeriodSeconds *int64 `puppet:"name=>'period_seconds'"` + SuccessThreshold *int64 `puppet:"name=>'success_threshold'"` + TcpSocket *[]Socket `puppet:"name=>'tcp_socket'"` + TimeoutSeconds *int64 `puppet:"name=>'timeout_seconds'"` +} + +type Quobyte struct { + Registry string + Volume string + Group *string + ReadOnly *bool `puppet:"name=>'read_only'"` + User *string +} + +type QuotaSpec struct { + Hard *map[string]string + Scopes *[]string +} + +type RangeSpec struct { + Limit *[]Limit +} + +type Rbd struct { + CephMonitors []string `puppet:"name=>'ceph_monitors'"` + RbdImage string `puppet:"name=>'rbd_image'"` + FsType *string `puppet:"name=>'fs_type'"` + Keyring *string + RadosUser *string `puppet:"name=>'rados_user'"` + RbdPool *string `puppet:"name=>'rbd_pool'"` + ReadOnly *bool `puppet:"name=>'read_only'"` + SecretRef *SecretRef `puppet:"name=>'secret_ref'"` +} + +type Ref struct { + Key *string + Name *string +} + +type Repo struct { + Directory *string + Repository *string + Revision *string +} + +type RequestsLimits struct { + Cpu *string + Memory *string +} + +type Required struct { + NodeSelectorTerm *[]Term `puppet:"name=>'node_selector_term'"` +} + +type ResourceFieldRef struct { + Resource string + ContainerName *string `puppet:"name=>'container_name'"` +} + +type Resources struct { + Limits *RequestsLimits + Requests *RequestsLimits +} + +type RoleRef struct { + Name string + ApiGroup *string `puppet:"name=>'api_group'"` + Kind *string +} + +type RollingUpdate struct { + Partition *int64 +} + +type Rule struct { + ApiGroups []string `puppet:"name=>'api_groups'"` + Resources []string + Verbs []string + ResourceNames *[]string `puppet:"name=>'resource_names'"` +} + +type SecretRef struct { + Name *string +} + +type Secrets struct { + Name string +} + +type SecurityContext struct { + FsGroup *int64 `puppet:"name=>'fs_group'"` + RunAsNonRoot *bool `puppet:"name=>'run_as_non_root'"` + RunAsUser *int64 `puppet:"name=>'run_as_user'"` + SeLinuxOptions *Options `puppet:"name=>'se_linux_options'"` + SupplementalGroups *[]int64 `puppet:"name=>'supplemental_groups'"` +} + +type Selector struct { + MatchExpressions *[]Expressions `puppet:"name=>'match_expressions'"` + MatchLabels *map[string]string `puppet:"name=>'match_labels'"` +} + +type ServiceSpec struct { + ClusterIp *string `puppet:"name=>'cluster_ip'"` + ExternalIps *[]string `puppet:"name=>'external_ips'"` + ExternalName *string `puppet:"name=>'external_name'"` + LoadBalancerIp *string `puppet:"name=>'load_balancer_ip'"` + LoadBalancerSourceRanges *[]string `puppet:"name=>'load_balancer_source_ranges'"` + Port *[]SpecPort + Selector *map[string]string + SessionAffinity *string `puppet:"name=>'session_affinity'"` + Type *string +} + +type SetSpec struct { + ServiceName string `puppet:"name=>'service_name'"` + PodManagementPolicy *string `puppet:"name=>'pod_management_policy'"` + Replicas *int64 + RevisionHistoryLimit *int64 `puppet:"name=>'revision_history_limit'"` + Selector *Selector + Template *SpecTemplate + UpdateStrategy *[]UpdateStrategy `puppet:"name=>'update_strategy'"` + VolumeClaimTemplate *[]ClaimTemplate `puppet:"name=>'volume_claim_template'"` +} + +type Socket struct { + Port string +} + +type Source struct { + AwsElasticBlockStore *Store `puppet:"name=>'aws_elastic_block_store'"` + AzureDisk *Disk `puppet:"name=>'azure_disk'"` + AzureFile *File `puppet:"name=>'azure_file'"` + CephFs *CephFs `puppet:"name=>'ceph_fs'"` + Cinder *Cinder + Fc *VolumeFc + FlexVolume *Volume `puppet:"name=>'flex_volume'"` + Flocker *Flocker + GcePersistentDisk *PersistentDisk `puppet:"name=>'gce_persistent_disk'"` + Glusterfs *Glusterfs + HostPath *PathLocal `puppet:"name=>'host_path'"` + Iscsi *Iscsi + Local *PathLocal + Nfs *Nfs + PhotonPersistentDisk *PhotonPersistentDisk `puppet:"name=>'photon_persistent_disk'"` + Quobyte *Quobyte + Rbd *Rbd + VsphereVolume *VsphereVolume `puppet:"name=>'vsphere_volume'"` +} + +type Spec struct { + ActiveDeadlineSeconds *int64 `puppet:"name=>'active_deadline_seconds'"` + Container *[]Container + DnsPolicy *string `puppet:"name=>'dns_policy'"` + HostIpc *bool `puppet:"name=>'host_ipc'"` + HostNetwork *bool `puppet:"name=>'host_network'"` + HostPid *bool `puppet:"name=>'host_pid'"` + Hostname *string + ImagePullSecrets *[]Secrets `puppet:"name=>'image_pull_secrets'"` + InitContainer *[]Container `puppet:"name=>'init_container'"` + NodeName *string `puppet:"name=>'node_name'"` + NodeSelector *map[string]string `puppet:"name=>'node_selector'"` + RestartPolicy *string `puppet:"name=>'restart_policy'"` + SecurityContext *SecurityContext `puppet:"name=>'security_context'"` + ServiceAccountName *string `puppet:"name=>'service_account_name'"` + Subdomain *string + TerminationGracePeriodSeconds *int64 `puppet:"name=>'termination_grace_period_seconds'"` + Volume *[]SpecVolume +} + +type SpecPort struct { + Port int64 + Name *string + NodePort *int64 `puppet:"name=>'node_port'"` + Protocol *string + TargetPort *string `puppet:"name=>'target_port'"` +} + +type SpecResources struct { + Limits *map[string]string + Requests *map[string]string +} + +type SpecTemplate struct { + Metadata *NamespaceMetadata + Spec *Spec +} + +type SpecVolume struct { + AwsElasticBlockStore *Store `puppet:"name=>'aws_elastic_block_store'"` + AzureDisk *Disk `puppet:"name=>'azure_disk'"` + AzureFile *File `puppet:"name=>'azure_file'"` + CephFs *CephFs `puppet:"name=>'ceph_fs'"` + Cinder *Cinder + ConfigMap *Map `puppet:"name=>'config_map'"` + DownwardApi *Api `puppet:"name=>'downward_api'"` + EmptyDir *Dir `puppet:"name=>'empty_dir'"` + Fc *VolumeFc + FlexVolume *Volume `puppet:"name=>'flex_volume'"` + Flocker *Flocker + GcePersistentDisk *PersistentDisk `puppet:"name=>'gce_persistent_disk'"` + GitRepo *Repo `puppet:"name=>'git_repo'"` + Glusterfs *Glusterfs + HostPath *PathLocal `puppet:"name=>'host_path'"` + Iscsi *Iscsi + Local *PathLocal + Name *string + Nfs *Nfs + PersistentVolumeClaim *Claim `puppet:"name=>'persistent_volume_claim'"` + PhotonPersistentDisk *PhotonPersistentDisk `puppet:"name=>'photon_persistent_disk'"` + Quobyte *Quobyte + Rbd *Rbd + Secret *VolumeSecret + VsphereVolume *VsphereVolume `puppet:"name=>'vsphere_volume'"` +} + +type StartStop struct { + Exec *Exec + HttpGet *Get `puppet:"name=>'http_get'"` + TcpSocket *[]Socket `puppet:"name=>'tcp_socket'"` +} + +type Store struct { + VolumeId string `puppet:"name=>'volume_id'"` + FsType *string `puppet:"name=>'fs_type'"` + Partition *int64 + ReadOnly *bool `puppet:"name=>'read_only'"` +} + +type Strategy struct { + RollingUpdate *Update `puppet:"name=>'rolling_update'"` + Type *string +} + +type Subject struct { + Kind string + Name string + ApiGroup *string `puppet:"name=>'api_group'"` + Namespace *string +} + +type TargetRef struct { + Kind string + Name string + ApiVersion *string `puppet:"name=>'api_version'"` +} + +type Template struct { + Metadata *Metadata + Spec *Spec +} + +type Term struct { + MatchExpressions *[]Expressions `puppet:"name=>'match_expressions'"` + MatchFields *[]Expressions `puppet:"name=>'match_fields'"` +} + +type ToFrom struct { + IpBlock *Block `puppet:"name=>'ip_block'"` + NamespaceSelector *Selector `puppet:"name=>'namespace_selector'"` + PodSelector *Selector `puppet:"name=>'pod_selector'"` +} + +type Update struct { + MaxSurge *string `puppet:"name=>'max_surge'"` + MaxUnavailable *string `puppet:"name=>'max_unavailable'"` +} + +type UpdateStrategy struct { + RollingUpdate *[]RollingUpdate `puppet:"name=>'rolling_update'"` + Type *string +} + +type Volume struct { + Driver string + FsType *string `puppet:"name=>'fs_type'"` + Options *map[string]string + ReadOnly *bool `puppet:"name=>'read_only'"` + SecretRef *SecretRef `puppet:"name=>'secret_ref'"` +} + +type VolumeFc struct { + Lun int64 + TargetWwNs []string `puppet:"name=>'target_ww_ns'"` + FsType *string `puppet:"name=>'fs_type'"` + ReadOnly *bool `puppet:"name=>'read_only'"` +} + +type VolumeMetadata struct { + Annotations *map[string]string + Generation *int64 + Labels *map[string]string + Name *string + ResourceVersion *string `puppet:"name=>'resource_version'"` + SelfLink *string `puppet:"name=>'self_link'"` + Uid *string +} + +type VolumeSecret struct { + DefaultMode *int64 `puppet:"name=>'default_mode'"` + Items *[]Items + Optional *bool + SecretName *string `puppet:"name=>'secret_name'"` +} + +type VolumeSpec struct { + AccessModes []string `puppet:"name=>'access_modes'"` + Capacity map[string]string + NodeAffinity *Affinity `puppet:"name=>'node_affinity'"` + PersistentVolumeReclaimPolicy *string `puppet:"name=>'persistent_volume_reclaim_policy'"` + PersistentVolumeSource *Source `puppet:"name=>'persistent_volume_source'"` + StorageClassName *string `puppet:"name=>'storage_class_name'"` +} + +type VsphereVolume struct { + VolumePath string `puppet:"name=>'volume_path'"` + FsType *string `puppet:"name=>'fs_type'"` +} + +func InitTypes(c px.Context) { + load := func(n string) px.Type { + if v, ok := px.Load(c, px.NewTypedName(px.NsType, n)); ok { + return v.(px.Type) + } + panic(fmt.Errorf("unable to load Type '%s'", n)) + } + + ir := c.ImplementationRegistry() + ir.RegisterType(load("Kubernetes::Cluster_role_binding"), reflect.TypeOf(&ClusterRoleBinding{})) + ir.RegisterType(load("Kubernetes::Config_map"), reflect.TypeOf(&ConfigMap{})) + ir.RegisterType(load("Kubernetes::Deployment"), reflect.TypeOf(&Deployment{})) + ir.RegisterType(load("Kubernetes::GenericHandler"), reflect.TypeOf(&GenericHandler{})) + ir.RegisterType(load("Kubernetes::Horizontal_pod_autoscaler"), reflect.TypeOf(&HorizontalPodAutoscaler{})) + ir.RegisterType(load("Kubernetes::Limit_range"), reflect.TypeOf(&LimitRange{})) + ir.RegisterType(load("Kubernetes::Namespace"), reflect.TypeOf(&Namespace{})) + ir.RegisterType(load("Kubernetes::Network_policy"), reflect.TypeOf(&NetworkPolicy{})) + ir.RegisterType(load("Kubernetes::Persistent_volume"), reflect.TypeOf(&PersistentVolume{})) + ir.RegisterType(load("Kubernetes::Persistent_volume_claim"), reflect.TypeOf(&PersistentVolumeClaim{})) + ir.RegisterType(load("Kubernetes::Pod"), reflect.TypeOf(&Pod{})) + ir.RegisterType(load("Kubernetes::Replication_controller"), reflect.TypeOf(&ReplicationController{})) + ir.RegisterType(load("Kubernetes::Resource_quota"), reflect.TypeOf(&ResourceQuota{})) + ir.RegisterType(load("Kubernetes::Role"), reflect.TypeOf(&Role{})) + ir.RegisterType(load("Kubernetes::Role_binding"), reflect.TypeOf(&RoleBinding{})) + ir.RegisterType(load("Kubernetes::Secret"), reflect.TypeOf(&Secret{})) + ir.RegisterType(load("Kubernetes::Service"), reflect.TypeOf(&Service{})) + ir.RegisterType(load("Kubernetes::Service_account"), reflect.TypeOf(&ServiceAccount{})) + ir.RegisterType(load("Kubernetes::Stateful_set"), reflect.TypeOf(&StatefulSet{})) + ir.RegisterType(load("Kubernetes::Storage_class"), reflect.TypeOf(&StorageClass{})) +} diff --git a/vendor/github.com/lyraproj/lyra/examples/go-samples/types/puppet/puppet.go b/vendor/github.com/lyraproj/lyra/examples/go-samples/types/puppet/puppet.go new file mode 100644 index 0000000..d08c580 --- /dev/null +++ b/vendor/github.com/lyraproj/lyra/examples/go-samples/types/puppet/puppet.go @@ -0,0 +1,28 @@ +// this file is generated +package puppet + +import ( + "fmt" + "reflect" + + "github.com/lyraproj/pcore/px" +) + +type ManifestLoader struct { +} + +type Service struct { +} + +func InitTypes(c px.Context) { + load := func(n string) px.Type { + if v, ok := px.Load(c, px.NewTypedName(px.NsType, n)); ok { + return v.(px.Type) + } + panic(fmt.Errorf("unable to load Type '%s'", n)) + } + + ir := c.ImplementationRegistry() + ir.RegisterType(load("Puppet::ManifestLoader"), reflect.TypeOf(&ManifestLoader{})) + ir.RegisterType(load("Puppet::Service"), reflect.TypeOf(&Service{})) +} diff --git a/vendor/github.com/lyraproj/lyra/external/externalmodules.go b/vendor/github.com/lyraproj/lyra/external/externalmodules.go new file mode 100644 index 0000000..5c9521b --- /dev/null +++ b/vendor/github.com/lyraproj/lyra/external/externalmodules.go @@ -0,0 +1,20 @@ +// This file contains imports of external plug-in modules so that they are retained in the +// go.mod file when running "go mod tidy". The external package should never be built +// or referenced. +// +// NOTE: This is a Q&D solution to get the current Makefile/go.mod combo functional without +// too much hassle. We should investigate other ways to get all plug-in binaries into a +// common place. +package external + +import ( + + // Identity service + _ "github.com/lyraproj/identity/identity" + + // Puppet DSL service + _ "github.com/lyraproj/puppet-workflow/puppetwf" + + // Terraform bridge + _ "github.com/lyraproj/terraform-bridge/pkg/bridge" +) diff --git a/vendor/github.com/lyraproj/lyra/pkg/apply/apply.go b/vendor/github.com/lyraproj/lyra/pkg/apply/apply.go new file mode 100644 index 0000000..7cab0bf --- /dev/null +++ b/vendor/github.com/lyraproj/lyra/pkg/apply/apply.go @@ -0,0 +1,201 @@ +package apply + +import ( + "bytes" + "context" + "fmt" + "os" + "strings" + + "github.com/hashicorp/go-hclog" + "github.com/lyraproj/hiera/hiera" + "github.com/lyraproj/hiera/hieraapi" + "github.com/lyraproj/hiera/provider" + "github.com/lyraproj/lyra/cmd/lyra/ui" + "github.com/lyraproj/lyra/pkg/loader" + "github.com/lyraproj/lyra/pkg/logger" + "github.com/lyraproj/lyra/pkg/util" + "github.com/lyraproj/pcore/px" + "github.com/lyraproj/pcore/types" + "github.com/lyraproj/pcore/utils" + "github.com/lyraproj/servicesdk/serviceapi" + "github.com/lyraproj/servicesdk/wf" + "github.com/lyraproj/wfe/api" + "github.com/lyraproj/wfe/service" + "github.com/lyraproj/wfe/wfe" +) + +// Applicator abstracts over workflow application and deletion +type Applicator interface { + ApplyWorkflow(workflowName string) (exitCode int) + + DeleteWorkflow(workflowName string) (exitCode int) + + ApplyWorkflowWithHieraData(workflowName string, data map[string]string) + + //DeleteWorkflowWithHieraData calls the delete on the workflow in lyra, meaning that resources will be destroyed, if applicable + DeleteWorkflowWithHieraData(workflowName string, data map[string]string) +} + +// applicator is used to apply workflows +type applicator struct { + homeDir string + dlvConfig string +} + +func NewApplicator(homeDir, dlvConfig string) Applicator { + return &applicator{homeDir, dlvConfig} +} + +// ApplyWorkflowWithHieraData will apply the named workflow with the supplied hiera data +func (a *applicator) ApplyWorkflowWithHieraData(workflowName string, hieraData map[string]string) { + a.applyWithHieraData(workflowName, hieraData, wf.Upsert) +} + +func (a *applicator) applyWithHieraData(workflowName string, hieraData map[string]string, intent wf.Operation) { + m := convertToDeepMap(hieraData) + hclog.Default().Debug("converted map to hiera data", "m", m) + v := px.Wrap(nil, m).(px.OrderedMap) + tp := func(ic hieraapi.ProviderContext, key string, _ map[string]px.Value) px.Value { + if v, ok := v.Get4(key); ok { + return v + } + return nil + } + hiera.DoWithParent(context.Background(), tp, nil, a.applyWithContext(workflowName, intent)) +} + +//convertToDeepMap converts a map[string]string with entries like {k:"aws.tags.created_by", v:"user@company.com"} +//to a recursive map[string]interface{} with a key created_by nested under a key tags nested under a key aws +//e.g. output map[aws:map[tags:map[created_by:person@company.com lifetime:2hrs] hello:hi]] +func convertToDeepMap(hieraData map[string]string) map[string]interface{} { + output := make(map[string]interface{}) + for k, v := range hieraData { + current := output + tokens := strings.Split(k, ".") + len := len(tokens) + for index, token := range tokens { + if index == len-1 { + current[token] = v + } else { + if _, ok := current[token]; !ok { + current[token] = make(map[string]interface{}) + } + current = current[token].(map[string]interface{}) + } + } + } + return output +} + +// ApplyWorkflow will apply the named workflow getting hiera data from file +func (a *applicator) ApplyWorkflow(workflowName string) (exitCode int) { + return a.applyWorkflow(workflowName, wf.Upsert) +} + +// DeleteWorkflow will delete resources persisted by the named workflow +func (a *applicator) DeleteWorkflow(workflowName string) (exitCode int) { + return a.applyWorkflow(workflowName, wf.Delete) +} + +// DeleteWorkflowWithHieraData will delete the named workflow with the supplied hiera data +func (a *applicator) DeleteWorkflowWithHieraData(workflowName string, hieraData map[string]string) { + a.applyWithHieraData(workflowName, hieraData, wf.Delete) +} + +func (a *applicator) applyWorkflow(workflowName string, intent wf.Operation) (exitCode int) { + if a.homeDir != `` { + if err := os.Chdir(a.homeDir); err != nil { + ui.Message("error", fmt.Errorf("Unable to change directory to '%s'", a.homeDir)) + return 1 + } + } + + lookupOptions := map[string]px.Value{ + provider.LookupProvidersKey: types.WrapRuntime([]hieraapi.LookupKey{provider.ConfigLookupKey, provider.Environment})} + + return util.RunCommand(func() int { + hiera.DoWithParent(context.Background(), provider.MuxLookupKey, lookupOptions, a.applyWithContext(workflowName, intent)) + return 0 + }) +} + +func (a *applicator) applyWithContext(workflowName string, intent wf.Operation) func(px.Context) { + return func(c px.Context) { + logger := logger.Get() + c.DoWithLoader(loader.New(c.Loader()), func() { + a.parseDlvConfig(c) + if intent == wf.Delete { + logger.Debug("calling delete") + delete(c, workflowName) + ui.ShowMessage("delete done:", workflowName) + logger.Debug("delete finished") + } else { + apply(c, workflowName, px.EmptyMap, intent) // TODO: Perhaps provide top-level parameters from command line args + } + }) + } +} + +func (a *applicator) parseDlvConfig(c px.Context) { + cfg := strings.TrimSpace(a.dlvConfig) + if cfg == `` { + return + } + + // config must be a string or a hash. The former must be quoted unless it already is + switch cfg[0] { + case '{', '"', '\'': + default: + b := bytes.NewBufferString(``) + utils.PuppetQuote(b, cfg) + cfg = b.String() + } + dc, err := types.Parse(cfg) + if err != nil { + panic(util.CmdError(fmt.Sprintf("Unable to parse --dlv option '%s': %s", cfg, err.Error()))) + } + // Pass DlvConfig on to the plugin loader + c.Set(api.LyraDlvConfigKey, dc) +} + +func loadStep(c px.Context, stepID string) api.Step { + def, ok := px.Load(c, px.NewTypedName(px.NsDefinition, stepID)) + if !ok { + panic(util.CmdError(fmt.Sprintf("Unable to find definition for step %s", stepID))) + } + return wfe.CreateStep(c, def.(serviceapi.Definition)) +} + +func delete(c px.Context, stepID string) { + log := logger.Get() + log.Debug("deleting", "stepID", stepID) + + // Nothing in the workflow will be in the new era so all is deleted + service.StartEra(c) + service.SweepAndGC(c, loadStep(c, stepID).Identifier()+"/") +} + +func apply(c px.Context, stepID string, parameters px.OrderedMap, intent wf.Operation) { + log := logger.Get() + + log.Debug("configuring scope") + c.Set(service.StepContextKey, px.SingletonMap(`operation`, types.WrapInteger(int64(intent)))) + + log.Debug("applying", "stepID", stepID) + service.StartEra(c) + a := loadStep(c, stepID) + defer func() { + if r := recover(); r != nil { + log.Error(`apply failed`, `Error`, r) + ui.ShowError(`apply failed`, stepID) + } + gcPrefix := a.Identifier() + "/" + log.Debug("garbage collecting", "prefix", gcPrefix) + service.SweepAndGC(c, gcPrefix) + }() + + result := a.Run(c, px.Wrap(c, parameters).(px.OrderedMap)) + log.Debug("apply done", "result", result) + ui.ShowMessage("apply done:", stepID) +} diff --git a/vendor/github.com/lyraproj/lyra/pkg/change/node.go b/vendor/github.com/lyraproj/lyra/pkg/change/node.go new file mode 100644 index 0000000..db1821b --- /dev/null +++ b/vendor/github.com/lyraproj/lyra/pkg/change/node.go @@ -0,0 +1,184 @@ +package change + +import ( + "errors" + "fmt" + "log" + "reflect" +) + +// Node in the object graph which if present should result in the HandlerFn being invoked +// where HandlerFn takes a resource objects current and desired states, with the provider +// responsible for testing equality and subsequent handling of any required changes to the +// resource. +// +// For example, say you have the following types: +// +// type Car struct { +// Make string +// Age int +// Fuel Fuel +// } +// +// type Fuel struct { +// Make string +// Unleaded bool +// } +// +// You can create a change node with handler function for the Unleaded field in a Car object +// as follows. +// +// handler := func(have, want interface{}, path *change.Node) error { +// fmt.Printf("Handle the change here") +// } +// node := NewNode().Field("Fuel").Field("Unleaded").Handler(handler) +// +// The handler function can be a closure to include additional objects such as API clients, +// look at the digitalocean package for an example of this. +type Node struct { + Path []NodeDetail + HandlerFn func(from, to interface{}, path *Node) error +} + +// NodeDetail describes an individual node in the path to the target Node above, navigating +// the object graph via struct fields, map keys or slice/array indices. +type NodeDetail struct { + Field string + Index int + Key interface{} + Type string +} + +// NewNode initialises a Node object, with the Path and HandlerFn configured using a builder +// pattern +func NewNode() *Node { + n := &Node{ + Path: []NodeDetail{}, + } + return n +} + +// Field adds a struct field to the Node path, identified by the name of the struct +func (n *Node) Field(name string) *Node { + n.Path = append(n.Path, NodeDetail{ + Field: name, + Type: "field", + }) + return n +} + +// Index adds a specific slice/array index to the Node path +func (n *Node) Index(id int) *Node { + n.Path = append(n.Path, NodeDetail{ + Index: id, + Type: "index", + }) + return n +} + +// Key adds a key value to the Node path, keys are type dependent +func (n *Node) Key(key interface{}) *Node { + n.Path = append(n.Path, NodeDetail{ + Key: key, + Type: "key", + }) + return n +} + +// Handler adds a HandlerFn to the Node which will be invoked when the Handle() +// method is called +func (n *Node) Handler(f func(from, to interface{}, path *Node) error) *Node { + // TODO Validate the path here + n.HandlerFn = f + return n +} + +// Handle invokes the stored HandlerFn with the current and desired resource states +func (n *Node) Handle(from, to interface{}) error { + if n.HandlerFn == nil { + return errors.New("no handler function set") + } + return n.HandlerFn(from, to, n) +} + +// GetValue attempts to extract a value from sources object graph using the supplied +// path, returning the found value by reference in v +// v must be the same type as the node pointed to by the supplied path +func GetValue(source, v interface{}, path *Node) (err error) { + + defer func() { + if r := recover(); r != nil { + var ok bool + err, ok = r.(error) + if !ok { + err = fmt.Errorf("%v", r) + } + } + }() + + rv := reflect.ValueOf(v) + if rv.Kind() != reflect.Ptr || rv.IsNil() { + return errors.New("value v must be a pointer") + } + + if path == nil { + return errors.New("no node path provided") + } + if source == nil { + return errors.New("provided source is nil") + } + + sourceVal := reflect.ValueOf(source) + curVal := sourceVal + + for _, node := range path.Path { + switch node.Type { + case "field": + // dereference pointer + if curVal.Kind() == reflect.Ptr { + curVal = curVal.Elem() + } + + if curVal.Kind() != reflect.Struct { + return fmt.Errorf("expected a struct, got a %s", curVal.Kind()) + } + curVal = curVal.FieldByName(node.Field) + if !curVal.IsValid() { + return fmt.Errorf("field does not exist at path: %s", node.Field) + } + continue + case "index": + if curVal.Kind() != reflect.Slice && curVal.Kind() != reflect.Array { + return fmt.Errorf("expected a slice or index, got a %s", curVal.Kind()) + } + if node.Index > (curVal.Len() - 1) { + return fmt.Errorf("index out of range") + } + curVal = curVal.Index(node.Index) + continue + case "key": + if curVal.Kind() != reflect.Map { + return fmt.Errorf("expected a map, got a %s", curVal.Kind()) + } + + // Check that key is assignable to the maps key type + key := reflect.ValueOf(node.Key) + curVal = curVal.MapIndex(key) + continue + default: + log.Fatalf("unhandled path node: %#v", node) + } + } + + vv := reflect.ValueOf(v) + if !vv.Elem().CanSet() { + return fmt.Errorf("unable to set return value") + } + vv.Elem().Set(curVal) + return nil +} + +// Equals checks if the from and to objects are equal using reflect.DeepEquals +func Equals(from, to interface{}) bool { + return reflect.DeepEqual(from, to) +} diff --git a/vendor/github.com/lyraproj/lyra/pkg/change/registry.go b/vendor/github.com/lyraproj/lyra/pkg/change/registry.go new file mode 100644 index 0000000..06d23fb --- /dev/null +++ b/vendor/github.com/lyraproj/lyra/pkg/change/registry.go @@ -0,0 +1,49 @@ +package change + +import ( + "sync" + + "github.com/hashicorp/go-hclog" +) + +// Registry stores a list of change nodes to be evaluated +type Registry struct { + sync.Mutex + nodes []*Node + logger hclog.Logger +} + +// NewRegistry initialises an empty change Registry with a default logger +func NewRegistry() *Registry { + r := &Registry{ + nodes: []*Node{}, + logger: hclog.New(hclog.DefaultOptions), + } + return r +} + +// SetLogger overrides the default logger +func (r *Registry) SetLogger(logger hclog.Logger) { + r.logger = logger +} + +// Add a new change node to the Registry +func (r *Registry) Add(n *Node) { + r.Lock() + r.nodes = append(r.nodes, n) + r.Unlock() +} + +// HandleChanges invokes the change handler for all stored change nodes +func (r *Registry) HandleChanges(from, to interface{}) error { + r.Lock() + defer r.Unlock() + for _, node := range r.nodes { + r.logger.Debug("handling change", "node", node.Path) + if err := node.Handle(from, to); err != nil { + r.logger.Error("error handling change, halting processing", "error", err) + return err + } + } + return nil +} diff --git a/vendor/github.com/lyraproj/lyra/pkg/generate/generate.go b/vendor/github.com/lyraproj/lyra/pkg/generate/generate.go new file mode 100644 index 0000000..f68ddd6 --- /dev/null +++ b/vendor/github.com/lyraproj/lyra/pkg/generate/generate.go @@ -0,0 +1,42 @@ +package generate + +import ( + "github.com/lyraproj/lyra/pkg/loader" + "github.com/lyraproj/lyra/pkg/util" + "github.com/lyraproj/pcore/pcore" + "github.com/lyraproj/pcore/px" + "github.com/lyraproj/servicesdk/lang/typegen" + "github.com/lyraproj/servicesdk/serviceapi" +) + +// Generate generates typeset files in the given language for all types exported from known services +// into the given targetDirectory. If the targetDirectory is the empty string, it will default to +// plugins/types relative to the current working directory. +func Generate(language, targetDirectory string) int { + return util.RunCommand(func() int { + pcore.Do(func(c px.Context) { + generator := typegen.GetGenerator(language) + + c.DoWithLoader(loader.New(c.Loader()), func() { + loader.LoadPlugins(c) + + sNames := c.Loader().Discover(c, func(tn px.TypedName) bool { + return tn.Namespace() == px.NsService + }) + + if targetDirectory == `` { + targetDirectory = "types" + } + for _, sName := range sNames { + if v, ok := px.Load(c, sName); ok { + typeSet, _ := v.(serviceapi.Service).Metadata(c) + if typeSet != nil && typeSet.Types().Len() > 0 { + generator.GenerateTypes(typeSet, targetDirectory) + } + } + } + }) + }) + return 0 + }) +} diff --git a/vendor/github.com/lyraproj/lyra/pkg/loader/fs.go b/vendor/github.com/lyraproj/lyra/pkg/loader/fs.go new file mode 100644 index 0000000..dba6999 --- /dev/null +++ b/vendor/github.com/lyraproj/lyra/pkg/loader/fs.go @@ -0,0 +1,71 @@ +package loader + +// var ( +// // ToolDir is the home directory for the tool +// ToolDir string +// // DefaultPluginDir is where plugins are stored +// DefaultPluginDir string +// ) + +// const ( +// // ToolDirEnvVar can be used to override the default user directory +// ToolDirEnvVar = "LYRA_DIR" +// // DefaultPluginDirEnvVar can be used to override the default plugin directory +// DefaultPluginDirEnvVar = "LYRA_DEFAULT_PLUGINS_DIR" +// ) + +// // InitializeDirs ensures all directories have been created +// func initializeDirs() error { +// var err error +// user, err := user.Current() +// if err != nil { +// return err +// } + +// if ToolDir, err = initialiseDir(ToolDirEnvVar, filepath.Join(user.HomeDir, ".lyra")); err != nil { +// return err +// } +// if DefaultPluginDir, err = initialiseDir(DefaultPluginDirEnvVar, filepath.Join(ToolDir, "plugins")); err != nil { +// return err +// } +// return nil +// } + +// func initialiseDir(envvar, defaultpath string) (string, error) { +// path := defaultpath +// if v, ok := os.LookupEnv(envvar); ok { +// path = v +// } + +// return path, ensureDir(path) +// } + +// // Find returns paths to files matching the pattern relative to the +// // supplied directory +// func find(dir, pattern string) ([]string, error) { +// stat, err := os.Stat(dir) +// if err != nil { +// return nil, err +// } +// if !stat.IsDir() { +// return nil, fmt.Errorf("not a directory") +// } +// return filepath.Glob(filepath.Join(dir, pattern)) +// } + +// func ensureDir(path string) error { +// log := logger.Get() +// log.Debug("ensure directory exists", "path", path) +// info, err := os.Stat(path) + +// if os.IsNotExist(err) { +// log.Debug("create directory", "path", path) +// return os.MkdirAll(path, 0755) +// } + +// if info.Mode().IsRegular() { +// return fmt.Errorf("file exists at path") +// } + +// return nil +// } diff --git a/vendor/github.com/lyraproj/lyra/pkg/loader/integrity/sha256sum.go b/vendor/github.com/lyraproj/lyra/pkg/loader/integrity/sha256sum.go new file mode 100644 index 0000000..912019c --- /dev/null +++ b/vendor/github.com/lyraproj/lyra/pkg/loader/integrity/sha256sum.go @@ -0,0 +1,27 @@ +package integrity + +import ( + "crypto/sha256" + "encoding/hex" + "io" + "os" +) + +// Sha256sum returns a sha256sum string for the supplied io.Reader +func Sha256sum(r io.Reader) (string, error) { + h := sha256.New() + if _, err := io.Copy(h, r); err != nil { + return "", err + } + return hex.EncodeToString(h.Sum(nil)), nil +} + +// Sha256sumFile returns a sha256sum string for file at the supplied path +func Sha256sumFile(path string) (string, error) { + f, err := os.Open(path) + if err != nil { + return "", err + } + defer f.Close() + return Sha256sum(f) +} diff --git a/vendor/github.com/lyraproj/lyra/pkg/loader/loader.go b/vendor/github.com/lyraproj/lyra/pkg/loader/loader.go new file mode 100644 index 0000000..64d4a08 --- /dev/null +++ b/vendor/github.com/lyraproj/lyra/pkg/loader/loader.go @@ -0,0 +1,81 @@ +package loader + +import ( + "fmt" + "os" + "path/filepath" + + "github.com/hashicorp/go-hclog" + "github.com/lyraproj/pcore/px" + "github.com/lyraproj/wfe/api" +) + +var defaultPluginsPath []string +var defaultWorkflowsPath []string + +func init() { + executable, err := os.Executable() + if err != nil { + panic(fmt.Sprintf("failed to determine the path of the executable: %v", err)) + } + executable, err = filepath.EvalSymlinks(executable) + if err != nil { + panic(fmt.Sprintf("failed to eval symlinks on the executable: %s %v", executable, err)) + } + executableParentDir := filepath.Dir(filepath.Dir(executable)) + // Load plugins from: + // - WORKING_DIR/build/goplugins (to support both plugin and lyra development) + // - EXECUTABLE_DIR/../goplugins (to support running the built binary) + defaultPluginsPath = []string{"build", executableParentDir} + // Load workflows from: + // - WORKING_DIR/workflows + // - EXECUTABLE_DIR/../workflows (to support brew and running build\lyra irrespective of working dir) + defaultWorkflowsPath = []string{".", executableParentDir} +} + +// New creates a new federated loader instance +func New(parentLoader px.Loader) px.Loader { + var loaders []px.ModuleLoader + for _, pluginPathElement := range defaultPluginsPath { + loaders = append(loaders, px.NewFileBasedLoader(parentLoader, pluginPathElement, "", api.GoPluginPath)) + } + for _, workflowsPathElement := range defaultWorkflowsPath { + loaders = append(loaders, px.NewFileBasedLoader(parentLoader, workflowsPathElement, "", api.LyraLinkPath, api.YamlManifestPath, api.PpManifestPath)) + } + return px.NewDependencyLoader(loaders) +} + +// LoadPlugins loads all known goplugins +func LoadPlugins(c px.Context) { + for _, plugin := range findFiles("goplugins/*", defaultPluginsPath) { + px.Load(c, px.NewTypedName(px.NsService, filepath.Base(plugin))) + } +} + +func findFiles(glob string, pluginsPath []string) []string { + files := []string{} + logger := hclog.Default() + for _, pluginDir := range pluginsPath { + stat, err := os.Stat(pluginDir) + if err != nil { + if !os.IsNotExist(err) { + logger.Error("failed to read plugin directory", "pluginDir", pluginDir, "err", err) + } + continue + } + if !stat.IsDir() { + logger.Error("not a plugin directory", "pluginDir", pluginDir, "err", err) + continue + } + fullGlob := filepath.Join(pluginDir, glob) + fs, err := filepath.Glob(fullGlob) + if err != nil { + logger.Error("failed to read plugins from dir", "pluginDir", pluginDir, "err", err) + continue + } + + files = append(files, fs...) + logger.Debug(fmt.Sprintf("found %d files", len(fs))) + } + return files +} diff --git a/vendor/github.com/lyraproj/lyra/pkg/logger/logger.go b/vendor/github.com/lyraproj/lyra/pkg/logger/logger.go new file mode 100644 index 0000000..99debe4 --- /dev/null +++ b/vendor/github.com/lyraproj/lyra/pkg/logger/logger.go @@ -0,0 +1,49 @@ +package logger + +import ( + "io" + "sync" + + "github.com/lyraproj/issue/issue" + + hclog "github.com/hashicorp/go-hclog" +) + +var logger hclog.Logger +var once sync.Once + +// Spec describes the logger to be created +type Spec struct { + Name string + Level string + Output io.Writer + JSON bool + IncludeLocation bool +} + +// Get returns the initialised Logger +func Get() hclog.Logger { + return logger +} + +// Initialise the Logger +func Initialise(spec Spec) hclog.Logger { + once.Do(func() { + hclog.DefaultOptions = &hclog.LoggerOptions{ + Name: spec.Name, + Level: hclog.Warn, + JSONFormat: spec.JSON, + IncludeLocation: spec.IncludeLocation, + } + if len(spec.Level) > 0 { + hclog.DefaultOptions.Level = hclog.LevelFromString(spec.Level) + } + if spec.Output != nil { + hclog.DefaultOptions.Output = spec.Output + } + l := hclog.Default() + issue.IncludeStacktrace(l.IsDebug()) + logger = l + }) + return logger +} diff --git a/vendor/github.com/lyraproj/lyra/pkg/util/command.go b/vendor/github.com/lyraproj/lyra/pkg/util/command.go new file mode 100644 index 0000000..7ff3ef3 --- /dev/null +++ b/vendor/github.com/lyraproj/lyra/pkg/util/command.go @@ -0,0 +1,54 @@ +package util + +import ( + "os" + "os/signal" + "syscall" + + "github.com/hashicorp/go-plugin" + "github.com/lyraproj/lyra/cmd/lyra/ui" + "github.com/lyraproj/lyra/pkg/logger" +) + +type CmdError string + +func (e CmdError) Error() string { + return string(e) +} + +// RunCommand calls the given cmdFunc returns its exit value. It ensures that: +// +// A CmdError is logged using an ui.Message (changes exitCode to 1) +// SIGINT and SIGTERM are handled (changes exitCode to 1) +// All started plugins are cleaned up +func RunCommand(cmdFunc func() int) (exitCode int) { + sgs := make(chan os.Signal, 1) + done := make(chan bool, 1) + + // Spawn signal handler routine. It will get called explicitly by the deferred func + // below this one unless it is called when a signal is trapped. + go func() { + sig := <-sgs + plugin.CleanupClients() + logger.Get().Debug("all plugins cleaned up") + if sig != syscall.SIGUSR1 { + exitCode = 1 + } + done <- true + }() + signal.Notify(sgs, syscall.SIGINT, syscall.SIGTERM) + + defer func() { + if e := recover(); e != nil { + exitCode = 1 + if err, ok := e.(CmdError); ok { + ui.Message("error", err) + } else { + ui.Message("fatal", e) + } + } + sgs <- syscall.SIGUSR1 // Our own + <-done + }() + return cmdFunc() +} diff --git a/vendor/github.com/lyraproj/lyra/pkg/version/logo.go b/vendor/github.com/lyraproj/lyra/pkg/version/logo.go new file mode 100644 index 0000000..3fd7b47 --- /dev/null +++ b/vendor/github.com/lyraproj/lyra/pkg/version/logo.go @@ -0,0 +1,13 @@ +package version + +// LogoFiglet is an ASCII art logo +var LogoFiglet = ` _ +| | +| | _ _ _ __ __ _ +| | | | | | '__/ _` + "`" + ` | +| |___| |_| | | | (_| | +\_____/\__, |_| \__,_| + __/ | + |___/ + +` diff --git a/vendor/github.com/lyraproj/lyra/pkg/version/version.go b/vendor/github.com/lyraproj/lyra/pkg/version/version.go new file mode 100644 index 0000000..0e558cb --- /dev/null +++ b/vendor/github.com/lyraproj/lyra/pkg/version/version.go @@ -0,0 +1,39 @@ +package version + +import "fmt" + +var ( + // BuildTag set at build time, empty if not a tagged version + BuildTag string + // BuildTime set at build time + BuildTime string + // BuildSHA set at build time + BuildSHA string +) + +// Version set at buildtime +type Version struct { + BuildTag string + BuildTime string + BuildSHA string +} + +// Get the structured version +func Get() Version { + tag := BuildTag + if len(BuildTag) == 0 { + tag = "dirty" + } + + v := Version{ + BuildTag: tag, + BuildTime: BuildTime, + BuildSHA: BuildSHA, + } + return v +} + +// String returns a simplified version string consisting of - +func (v Version) String() string { + return fmt.Sprintf("%s-%s", v.BuildSHA, v.BuildTag) +} diff --git a/vendor/github.com/lyraproj/pcore/LICENSE b/vendor/github.com/lyraproj/pcore/LICENSE new file mode 100644 index 0000000..261eeb9 --- /dev/null +++ b/vendor/github.com/lyraproj/pcore/LICENSE @@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/vendor/github.com/lyraproj/pcore/hash/stringhash.go b/vendor/github.com/lyraproj/pcore/hash/stringhash.go new file mode 100644 index 0000000..b4873d8 --- /dev/null +++ b/vendor/github.com/lyraproj/pcore/hash/stringhash.go @@ -0,0 +1,235 @@ +package hash + +import ( + "fmt" + + "github.com/lyraproj/pcore/px" +) + +// Mutable and order preserving hash with string keys and arbitrary values. Used, among other things, by the +// Object type to store parameters, attributes, and functions + +type ( + stringEntry struct { + key string + value interface{} + } + + StringHash struct { + entries []*stringEntry + index map[string]int + frozen bool + } + + frozenError struct { + key string + } +) + +var EmptyStringHash = &StringHash{[]*stringEntry{}, map[string]int{}, true} + +func (f *frozenError) Error() string { + return fmt.Sprintf("attempt to add, modify, or delete key '%s' in a frozen StringHash", f.key) +} + +// NewStringHash returns an empty *StringHash initialized with given capacity +func NewStringHash(capacity int) *StringHash { + return &StringHash{make([]*stringEntry, 0, capacity), make(map[string]int, capacity), false} +} + +// Copy returns a shallow copy of this hash, i.e. each key and value is not cloned +func (h *StringHash) Copy() *StringHash { + entries := make([]*stringEntry, len(h.entries)) + for i, e := range h.entries { + entries[i] = &stringEntry{e.key, e.value} + } + index := make(map[string]int, len(h.index)) + for k, v := range h.index { + index[k] = v + } + return &StringHash{entries, index, false} +} + +// EachKey calls the given consumer function once for each key in this hash +func (h *StringHash) EachKey(consumer func(key string)) { + for _, e := range h.entries { + consumer(e.key) + } +} + +// AllPair calls the given function once for each key/value pair in this hash. Return +// true if all invocations returned true. False otherwise. +// The method returns true if the hash i empty. +func (h *StringHash) AllPair(f func(key string, value interface{}) bool) bool { + for _, e := range h.entries { + if !f(e.key, e.value) { + return false + } + } + return true +} + +// AnyPair calls the given function once for each key/value pair in this hash. Return +// true when an invocation returns true. False otherwise. +// The method returns false if the hash i empty. +func (h *StringHash) AnyPair(f func(key string, value interface{}) bool) bool { + for _, e := range h.entries { + if f(e.key, e.value) { + return true + } + } + return false +} + +// EachPair calls the given consumer function once for each key/value pair in this hash +func (h *StringHash) EachPair(consumer func(key string, value interface{})) { + for _, e := range h.entries { + consumer(e.key, e.value) + } +} + +// EachValue calls the given consumer function once for each value in this hash +func (h *StringHash) EachValue(consumer func(value interface{})) { + for _, e := range h.entries { + consumer(e.value) + } +} + +// Equals compares two hashes for equality. Hashes are considered equal if the have +// the same size and contains the same key/value associations irrespective of order +func (h *StringHash) Equals(other interface{}, g px.Guard) bool { + oh, ok := other.(*StringHash) + if !ok || len(h.entries) != len(oh.entries) { + return false + } + + for _, e := range h.entries { + oi, ok := oh.index[e.key] + if !(ok && px.Equals(e.value, oh.entries[oi].value, g)) { + return false + } + } + return true +} + +// Freeze prevents further changes to the hash +func (h *StringHash) Freeze() { + h.frozen = true +} + +// Get returns a value from the hash or the given default if no value was found +func (h *StringHash) Get(key string, dflt interface{}) interface{} { + if p, ok := h.index[key]; ok { + return h.entries[p].value + } + return dflt +} + +// Get2 returns a value from the hash or the value returned by given default function if no value was found +func (h *StringHash) Get2(key string, dflt func() interface{}) interface{} { + if p, ok := h.index[key]; ok { + return h.entries[p].value + } + return dflt() +} + +// Get3 returns a value from the hash or nil together with a boolean to indicate if the key was present or not +func (h *StringHash) Get3(key string) (interface{}, bool) { + if p, ok := h.index[key]; ok { + return h.entries[p].value, true + } + return nil, false +} + +// Delete the entry for the given key from the hash. Returns the old value or nil if not found +func (h *StringHash) Delete(key string) (oldValue interface{}) { + if h.frozen { + panic(frozenError{key}) + } + index := h.index + oldValue = nil + if p, ok := index[key]; ok { + oldValue = h.entries[p].value + delete(h.index, key) + for k, v := range index { + if v > p { + index[k] = p - 1 + } + } + ne := make([]*stringEntry, len(h.entries)-1) + for i, e := range h.entries { + if i < p { + ne[i] = e + } else if i > p { + ne[i-1] = e + } + } + h.entries = ne + } + return +} + +// Includes returns true if the hash contains the given key +func (h *StringHash) Includes(key string) bool { + _, ok := h.index[key] + return ok +} + +// IsEmpty returns true if the hash has no entries +func (h *StringHash) IsEmpty() bool { + return len(h.entries) == 0 +} + +// Keys returns the keys of the hash in the order that they were first entered +func (h *StringHash) Keys() []string { + keys := make([]string, len(h.entries)) + for i, e := range h.entries { + keys[i] = e.key + } + return keys +} + +// Merge this hash with the other hash giving the other precedence. A new hash is returned +func (h *StringHash) Merge(other *StringHash) (merged *StringHash) { + merged = h.Copy() + merged.PutAll(other) + return +} + +// Put adds a new key/value association to the hash or replace the value of an existing association +func (h *StringHash) Put(key string, value interface{}) (oldValue interface{}) { + if h.frozen { + panic(frozenError{key}) + } + if p, ok := h.index[key]; ok { + e := h.entries[p] + oldValue = e.value + e.value = value + } else { + oldValue = nil + h.index[key] = len(h.entries) + h.entries = append(h.entries, &stringEntry{key, value}) + } + return +} + +// PutAll merges this hash with the other hash giving the other precedence. A new hash is returned +func (h *StringHash) PutAll(other *StringHash) { + for _, e := range other.entries { + h.Put(e.key, e.value) + } +} + +// Len returns the number of entries in the hash +func (h *StringHash) Len() int { + return len(h.entries) +} + +// Values returns the values of the hash in the order that their respective keys were first entered +func (h *StringHash) Values() []interface{} { + values := make([]interface{}, len(h.entries)) + for i, e := range h.entries { + values[i] = e.value + } + return values +} diff --git a/vendor/github.com/lyraproj/pcore/loader/dependency.go b/vendor/github.com/lyraproj/pcore/loader/dependency.go new file mode 100644 index 0000000..530ab42 --- /dev/null +++ b/vendor/github.com/lyraproj/pcore/loader/dependency.go @@ -0,0 +1,63 @@ +package loader + +import "github.com/lyraproj/pcore/px" + +type dependencyLoader struct { + basicLoader + loaders []px.ModuleLoader + index map[string]px.ModuleLoader +} + +func newDependencyLoader(loaders []px.ModuleLoader) px.Loader { + index := make(map[string]px.ModuleLoader, len(loaders)) + for _, ml := range loaders { + n := ml.ModuleName() + if n != `` { + index[n] = ml + } + } + return &dependencyLoader{ + basicLoader: basicLoader{namedEntries: make(map[string]px.LoaderEntry, 32)}, + loaders: loaders, + index: index} +} + +func init() { + px.NewDependencyLoader = newDependencyLoader +} + +func (l *dependencyLoader) LoadEntry(c px.Context, name px.TypedName) px.LoaderEntry { + entry := l.basicLoader.LoadEntry(c, name) + if entry == nil { + entry = l.find(c, name) + if entry == nil { + entry = &loaderEntry{nil, nil} + } + l.SetEntry(name, entry) + } + return entry +} + +func (l *dependencyLoader) LoaderFor(moduleName string) px.ModuleLoader { + return l.index[moduleName] +} + +func (l *dependencyLoader) find(c px.Context, name px.TypedName) px.LoaderEntry { + if len(l.index) > 0 && name.IsQualified() { + // Explicit loader for given name takes precedence + if ml, ok := l.index[name.Parts()[0]]; ok { + return ml.LoadEntry(c, name) + } + } + + for _, ml := range l.loaders { + e := ml.LoadEntry(c, name) + if !(e == nil || e.Value() == nil) { + return e + } + } + + // Recursion or parallel go routines might have set the entry now. Returning + // nil here might therefore be a lie. + return l.basicLoader.LoadEntry(c, name) +} diff --git a/vendor/github.com/lyraproj/pcore/loader/filebased.go b/vendor/github.com/lyraproj/pcore/loader/filebased.go new file mode 100644 index 0000000..c181aa4 --- /dev/null +++ b/vendor/github.com/lyraproj/pcore/loader/filebased.go @@ -0,0 +1,354 @@ +package loader + +import ( + "io/ioutil" + "os" + "path/filepath" + "sort" + "strings" + "sync" + + "github.com/lyraproj/issue/issue" + "github.com/lyraproj/pcore/px" + "github.com/lyraproj/pcore/utils" +) + +type ( + ContentProvidingLoader interface { + px.Loader + + GetContent(c px.Context, path string) []byte + } + + fileBasedLoader struct { + parentedLoader + path string + moduleName string + paths map[px.Namespace][]SmartPath + index map[string][]string + locks map[string]*sync.Mutex + locksLock sync.Mutex + } + + SmartPathFactory func(loader px.ModuleLoader, moduleNameRelative bool) SmartPath +) + +var SmartPathFactories map[px.PathType]SmartPathFactory = map[px.PathType]SmartPathFactory{ + px.PuppetDataTypePath: newPuppetTypePath, +} + +func init() { + px.NewFileBasedLoader = newFileBasedLoader +} + +func newFileBasedLoader(parent px.Loader, path, moduleName string, lds ...px.PathType) px.ModuleLoader { + paths := make(map[px.Namespace][]SmartPath, len(lds)) + loader := &fileBasedLoader{ + parentedLoader: parentedLoader{ + basicLoader: basicLoader{namedEntries: make(map[string]px.LoaderEntry, 64)}, + parent: parent}, + path: path, + moduleName: moduleName, + paths: paths, + locks: make(map[string]*sync.Mutex)} + + for _, p := range lds { + path := loader.newSmartPath(p, !(moduleName == `` || moduleName == `environment`)) + for _, ns := range path.Namespaces() { + if sa, ok := paths[ns]; ok { + paths[ns] = append(sa, path) + } else { + paths[ns] = []SmartPath{path} + } + } + } + return loader +} + +func (l *fileBasedLoader) newSmartPath(pathType px.PathType, moduleNameRelative bool) SmartPath { + if f, ok := SmartPathFactories[pathType]; ok { + return f(l, moduleNameRelative) + } + panic(px.Error(px.IllegalArgument, issue.H{`function`: `newSmartPath`, `index`: 1, `arg`: pathType})) +} + +func newPuppetTypePath(loader px.ModuleLoader, moduleNameRelative bool) SmartPath { + return NewSmartPath(`types`, `.pp`, loader, []px.Namespace{px.NsType}, moduleNameRelative, false, InstantiatePuppetType) +} + +func (l *fileBasedLoader) LoadEntry(c px.Context, name px.TypedName) px.LoaderEntry { + entry := l.parentedLoader.LoadEntry(c, name) + if entry != nil { + return entry + } + + if name.Namespace() == px.NsConstructor || name.Namespace() == px.NsAllocator { + // Process internal. Never found in file system + return nil + } + + entry = l.GetEntry(name) + if entry != nil { + return entry + } + + entry = l.find(c, name) + if entry == nil { + entry = &loaderEntry{nil, nil} + l.SetEntry(name, entry) + } + return entry +} + +func (l *fileBasedLoader) ModuleName() string { + return l.moduleName +} + +func (l *fileBasedLoader) Path() string { + return l.path +} + +func (l *fileBasedLoader) isGlobal() bool { + return l.moduleName == `` || l.moduleName == `environment` +} + +func (l *fileBasedLoader) find(c px.Context, name px.TypedName) px.LoaderEntry { + if name.IsQualified() { + // The name is in a name space. + if l.moduleName != `` && l.moduleName != name.Parts()[0] { + // Then entity cannot possible be in this module unless the name starts with the module name. + // Note: If "module" represents a "global component", the module_name is empty and cannot match which is + // ok since such a "module" cannot have namespaced content). + return nil + } + if name.Namespace() == px.NsTask && len(name.Parts()) > 2 { + // Subdirectories beneath the tasks directory are currently not recognized + return nil + } + } else { + // The name is in the global name space. + switch name.Namespace() { + case px.NsFunction: + // Can be defined in module using a global name. No action required + case px.NsType: + if !l.isGlobal() { + // Global name must be the name of the module + if l.moduleName != name.Parts()[0] { + // Global name must be the name of the module + return nil + } + + // Look for special 'init_typeset' TypeSet + origins, smartPath := l.findExistingPath(px.NewTypedName2(name.Namespace(), `init_typeset`, l.NameAuthority())) + if smartPath == nil { + return nil + } + smartPath.Instantiator()(c, l, name, origins) + entry := l.GetEntry(name) + if entry != nil { + if _, ok := entry.Value().(px.TypeSet); ok { + return entry + } + } + panic(px.Error(px.NotExpectedTypeset, issue.H{`source`: origins[0], `name`: utils.CapitalizeSegment(l.moduleName)})) + } + default: + if !l.isGlobal() { + // Global name must be the name of the module + if l.moduleName != name.Parts()[0] { + // Global name must be the name of the module + return nil + } + + // Look for special 'init' file + origins, smartPath := l.findExistingPath(px.NewTypedName2(name.Namespace(), `init`, l.NameAuthority())) + if smartPath == nil { + return nil + } + return l.instantiate(c, smartPath, name, origins) + } + } + } + + origins, smartPath := l.findExistingPath(name) + if smartPath != nil { + return l.instantiate(c, smartPath, name, origins) + } + + if !name.IsQualified() { + return nil + } + + // Search using parent name. If a parent is found, load it and check if that load fulfilled the + // request of the qualified name + tsName := name.Parent() + for tsName != nil { + tse := l.GetEntry(tsName) + if tse == nil { + tse = l.find(c, tsName) + if tse != nil && tse.Value() != nil { + if ts, ok := tse.Value().(px.TypeSet); ok { + c.DoWithLoader(l, func() { + ts.(px.ResolvableType).Resolve(c) + }) + } + } + te := l.GetEntry(name) + if te != nil { + return te + } + } + tsName = tsName.Parent() + } + return nil +} + +func (l *fileBasedLoader) findExistingPath(name px.TypedName) (origins []string, smartPath SmartPath) { + l.lock.Lock() + defer l.lock.Unlock() + + if paths, ok := l.paths[name.Namespace()]; ok { + for _, sm := range paths { + l.ensureIndexed(sm) + if paths, ok := l.index[name.MapKey()]; ok { + return paths, sm + } + } + } + return nil, nil +} + +func (l *fileBasedLoader) ensureAllIndexed() { + l.lock.Lock() + defer l.lock.Unlock() + + for _, paths := range l.paths { + for _, sm := range paths { + l.ensureIndexed(sm) + } + } +} + +func (l *fileBasedLoader) ensureIndexed(sp SmartPath) { + if !sp.Indexed() { + sp.SetIndexed() + l.addToIndex(sp) + } +} + +func (l *fileBasedLoader) instantiate(c px.Context, smartPath SmartPath, name px.TypedName, origins []string) px.LoaderEntry { + rn := name + + // The name of the thing to instantiate must be based on the first namespace when the SmartPath supports more than one + ns := smartPath.Namespaces() + if len(ns) > 1 && name.Namespace() != ns[0] { + name = px.NewTypedName2(ns[0], name.Name(), name.Authority()) + } + + // Lock the on the name. Several instantiations of different names must be allowed to execute in parallel + var nameLock *sync.Mutex + l.locksLock.Lock() + if lk, ok := l.locks[name.MapKey()]; ok { + nameLock = lk + } else { + nameLock = &sync.Mutex{} + l.locks[name.MapKey()] = nameLock + } + l.locksLock.Unlock() + + nameLock.Lock() + defer func() { + nameLock.Unlock() + l.locksLock.Lock() + delete(l.locks, name.MapKey()) + l.locksLock.Unlock() + }() + + if l.GetEntry(name) == nil { + // Make absolutely sure that we don't recurse into instantiate again + l.SetEntry(name, px.NewLoaderEntry(nil, nil)) + smartPath.Instantiator()(c, l, name, origins) + } + return l.GetEntry(rn) +} + +func (l *fileBasedLoader) Discover(c px.Context, predicate func(px.TypedName) bool) []px.TypedName { + l.ensureAllIndexed() + found := l.parent.Discover(c, predicate) + added := false + for k := range l.index { + tn := px.TypedNameFromMapKey(k) + if !l.parent.HasEntry(tn) { + if predicate(tn) { + found = append(found, tn) + added = true + } + } + } + if added { + sort.Slice(found, func(i, j int) bool { return found[i].MapKey() < found[j].MapKey() }) + } + return found +} + +func (l *fileBasedLoader) GetContent(c px.Context, path string) []byte { + content, err := ioutil.ReadFile(path) + if err != nil { + panic(px.Error(px.UnableToReadFile, issue.H{`path`: path, `detail`: err.Error()})) + } + return content +} + +func (l *fileBasedLoader) HasEntry(name px.TypedName) bool { + if l.parent.HasEntry(name) { + return true + } + + if paths, ok := l.paths[name.Namespace()]; ok { + for _, sm := range paths { + l.ensureIndexed(sm) + if _, ok := l.index[name.MapKey()]; ok { + return true + } + } + } + return false +} + +func (l *fileBasedLoader) addToIndex(smartPath SmartPath) { + if l.index == nil { + l.index = make(map[string][]string, 64) + } + ext := smartPath.Extension() + noExtension := ext == `` + + generic := smartPath.GenericPath() + err := filepath.Walk(generic, func(path string, info os.FileInfo, err error) error { + if err != nil { + if strings.Contains(err.Error(), `no such file or directory`) { + // A missing path is OK + err = nil + } + return err + } + if !info.IsDir() { + if noExtension || strings.HasSuffix(path, ext) { + rel, err := filepath.Rel(generic, path) + if err == nil { + for _, tn := range smartPath.TypedNames(l.NameAuthority(), rel) { + if paths, ok := l.index[tn.MapKey()]; ok { + l.index[tn.MapKey()] = append(paths, path) + } else { + l.index[tn.MapKey()] = []string{path} + } + } + } + } + } + return nil + }) + + if err != nil { + panic(px.Error(px.Failure, issue.H{`message`: err.Error()})) + } +} diff --git a/vendor/github.com/lyraproj/pcore/loader/instantiate.go b/vendor/github.com/lyraproj/pcore/loader/instantiate.go new file mode 100644 index 0000000..3b08f63 --- /dev/null +++ b/vendor/github.com/lyraproj/pcore/loader/instantiate.go @@ -0,0 +1,27 @@ +package loader + +import ( + "strings" + + "github.com/lyraproj/issue/issue" + "github.com/lyraproj/pcore/px" + "github.com/lyraproj/pcore/types" +) + +// InstantiatePuppetType reads the contents a puppet manifest file and parses it using +// the types.Parse() function. +func InstantiatePuppetType(ctx px.Context, loader ContentProvidingLoader, tn px.TypedName, sources []string) { + content := string(loader.GetContent(ctx, sources[0])) + dt, err := types.Parse(content) + if err != nil { + panic(err) + } + if nt, ok := dt.(px.Type); ok { + if !strings.EqualFold(tn.Name(), nt.Name()) { + panic(px.Error(px.WrongDefinition, issue.H{`source`: sources[0], `type`: px.NsType, `expected`: tn.Name(), `actual`: nt.Name()})) + } + px.AddTypes(ctx, nt) + } else { + px.AddTypes(ctx, types.NamedType(tn.Authority(), tn.Name(), dt)) + } +} diff --git a/vendor/github.com/lyraproj/pcore/loader/loader.go b/vendor/github.com/lyraproj/pcore/loader/loader.go new file mode 100644 index 0000000..4c4a3e3 --- /dev/null +++ b/vendor/github.com/lyraproj/pcore/loader/loader.go @@ -0,0 +1,243 @@ +package loader + +import ( + "bytes" + "sort" + "sync" + + "github.com/lyraproj/issue/issue" + "github.com/lyraproj/pcore/px" + "github.com/lyraproj/pcore/types" +) + +type ( + loaderEntry struct { + value interface{} + origin issue.Location + } + + basicLoader struct { + lock sync.RWMutex + namedEntries map[string]px.LoaderEntry + } + + parentedLoader struct { + basicLoader + parent px.Loader + } + + typeSetLoader struct { + parentedLoader + typeSet px.TypeSet + } +) + +var StaticLoader = &basicLoader{namedEntries: make(map[string]px.LoaderEntry, 64)} + +func init() { + sh := StaticLoader.namedEntries + types.EachCoreType(func(t px.Type) { + sh[px.NewTypedName(px.NsType, t.Name()).MapKey()] = &loaderEntry{t, nil} + }) + + px.StaticLoader = func() px.Loader { + return StaticLoader + } + + px.NewParentedLoader = func(parent px.Loader) px.DefiningLoader { + return &parentedLoader{basicLoader{namedEntries: make(map[string]px.LoaderEntry, 64)}, parent} + } + + px.NewTypeSetLoader = func(parent px.Loader, typeSet px.Type) px.TypeSetLoader { + return &typeSetLoader{parentedLoader{basicLoader{namedEntries: make(map[string]px.LoaderEntry, 64)}, parent}, typeSet.(px.TypeSet)} + } + + px.NewLoaderEntry = func(value interface{}, origin issue.Location) px.LoaderEntry { + return &loaderEntry{value, origin} + } + + px.Load = load +} + +func (e *loaderEntry) Origin() issue.Location { + return e.origin +} + +func (e *loaderEntry) Value() interface{} { + return e.value +} + +func load(c px.Context, name px.TypedName) (interface{}, bool) { + l := c.Loader() + if name.Authority() != l.NameAuthority() { + return nil, false + } + entry := l.LoadEntry(c, name) + if entry == nil { + if dl, ok := l.(px.DefiningLoader); ok { + dl.SetEntry(name, &loaderEntry{nil, nil}) + } + return nil, false + } + if entry.Value() == nil { + return nil, false + } + return entry.Value(), true +} + +func (l *basicLoader) Discover(c px.Context, predicate func(tn px.TypedName) bool) []px.TypedName { + found := make([]px.TypedName, 0) + for k := range l.namedEntries { + tn := px.TypedNameFromMapKey(k) + if predicate(tn) { + found = append(found, tn) + } + } + sort.Slice(found, func(i, j int) bool { return found[i].MapKey() < found[j].MapKey() }) + return found +} + +func (l *basicLoader) LoadEntry(c px.Context, name px.TypedName) px.LoaderEntry { + return l.GetEntry(name) +} + +func (l *basicLoader) GetEntry(name px.TypedName) px.LoaderEntry { + l.lock.RLock() + v := l.namedEntries[name.MapKey()] + l.lock.RUnlock() + return v +} + +func (l *basicLoader) HasEntry(name px.TypedName) bool { + l.lock.RLock() + e, found := l.namedEntries[name.MapKey()] + l.lock.RUnlock() + return found && e.Value() != nil +} + +func (l *basicLoader) SetEntry(name px.TypedName, entry px.LoaderEntry) px.LoaderEntry { + l.lock.Lock() + defer l.lock.Unlock() + + if old, ok := l.namedEntries[name.MapKey()]; ok { + ov := old.Value() + if ov == nil { + *old.(*loaderEntry) = *entry.(*loaderEntry) + return old + } + nv := entry.Value() + if ov == nv { + return old + } + if ea, ok := ov.(px.Equality); ok && ea.Equals(nv, nil) { + return old + } + + if lt, ok := old.Value().(px.Type); ok { + ob := bytes.NewBufferString(``) + lt.ToString(ob, px.PrettyExpanded, nil) + nb := bytes.NewBufferString(``) + nv.(px.Type).ToString(nb, px.PrettyExpanded, nil) + panic(px.Error(px.AttemptToRedefineType, issue.H{`name`: name, `old`: ob.String(), `new`: nb.String()})) + } + panic(px.Error(px.AttemptToRedefine, issue.H{`name`: name})) + } + l.namedEntries[name.MapKey()] = entry + return entry +} + +func (l *basicLoader) NameAuthority() px.URI { + return px.RuntimeNameAuthority +} + +func (l *parentedLoader) Discover(c px.Context, predicate func(tn px.TypedName) bool) []px.TypedName { + found := l.parent.Discover(c, predicate) + added := false + for k := range l.namedEntries { + tn := px.TypedNameFromMapKey(k) + if !l.parent.HasEntry(tn) { + if predicate(tn) { + found = append(found, tn) + added = true + } + } + } + if added { + sort.Slice(found, func(i, j int) bool { return found[i].MapKey() < found[j].MapKey() }) + } + return found +} + +func (l *parentedLoader) HasEntry(name px.TypedName) bool { + return l.parent.HasEntry(name) || l.basicLoader.HasEntry(name) +} + +func (l *parentedLoader) LoadEntry(c px.Context, name px.TypedName) px.LoaderEntry { + entry := l.parent.LoadEntry(c, name) + if entry == nil || entry.Value() == nil { + entry = l.basicLoader.LoadEntry(c, name) + } + return entry +} + +func (l *parentedLoader) NameAuthority() px.URI { + return l.parent.NameAuthority() +} + +func (l *parentedLoader) Parent() px.Loader { + return l.parent +} + +func (l *typeSetLoader) Discover(c px.Context, predicate func(tn px.TypedName) bool) []px.TypedName { + found := make([]px.TypedName, 0) + ts := l.typeSet.Types() + ts.EachKey(func(v px.Value) { + tn := v.(px.TypedName) + if predicate(tn) { + found = append(found, tn) + } + }) + + pf := l.parentedLoader.Discover(c, func(tn px.TypedName) bool { return !ts.IncludesKey(tn) && predicate(tn) }) + if len(pf) > 0 { + found = append(found, pf...) + sort.Slice(found, func(i, j int) bool { return found[i].MapKey() < found[j].MapKey() }) + } + return found +} + +func (l *typeSetLoader) HasEntry(name px.TypedName) bool { + if _, ok := l.typeSet.GetType(name); ok { + return true + } + if l.parentedLoader.HasEntry(name) { + return true + } + if child, ok := name.RelativeTo(l.typeSet.TypedName()); ok { + return l.HasEntry(child) + } + return false +} + +func (l *typeSetLoader) LoadEntry(c px.Context, name px.TypedName) px.LoaderEntry { + if tp, ok := l.typeSet.GetType(name); ok { + return &loaderEntry{tp, nil} + } + entry := l.parentedLoader.LoadEntry(c, name) + if entry == nil { + if child, ok := name.RelativeTo(l.typeSet.TypedName()); ok { + return l.LoadEntry(c, child) + } + entry = &loaderEntry{nil, nil} + l.parentedLoader.SetEntry(name, entry) + } + return entry +} + +func (l *typeSetLoader) SetEntry(name px.TypedName, entry px.LoaderEntry) px.LoaderEntry { + return l.parent.(px.DefiningLoader).SetEntry(name, entry) +} + +func (l *typeSetLoader) TypeSet() px.Type { + return l.typeSet +} diff --git a/vendor/github.com/lyraproj/pcore/loader/smartpath.go b/vendor/github.com/lyraproj/pcore/loader/smartpath.go new file mode 100644 index 0000000..912083d --- /dev/null +++ b/vendor/github.com/lyraproj/pcore/loader/smartpath.go @@ -0,0 +1,146 @@ +package loader + +import ( + "path/filepath" + "regexp" + "strings" + + "github.com/lyraproj/pcore/px" +) + +type ( + Instantiator func(ctx px.Context, loader ContentProvidingLoader, tn px.TypedName, sources []string) + + SmartPath interface { + Loader() px.Loader + GenericPath() string + EffectivePath(name px.TypedName) string + Extension() string + RelativePath() string + Namespaces() []px.Namespace + IsMatchMany() bool + PreferredOrigin(i []string) string + TypedNames(nameAuthority px.URI, relativePath string) []px.TypedName + Instantiator() Instantiator + Indexed() bool + SetIndexed() + } + + smartPath struct { + relativePath string + loader px.ModuleLoader + namespaces []px.Namespace + extension string + + // Paths are not supposed to contain module name + moduleNameRelative bool + matchMany bool + instantiator Instantiator + indexed bool + } +) + +func NewSmartPath(relativePath, extension string, + loader px.ModuleLoader, namespaces []px.Namespace, moduleNameRelative, + matchMany bool, instantiator Instantiator) SmartPath { + return &smartPath{relativePath: relativePath, extension: extension, + loader: loader, namespaces: namespaces, moduleNameRelative: moduleNameRelative, + matchMany: matchMany, instantiator: instantiator, indexed: false} +} + +func (p *smartPath) Indexed() bool { + return p.indexed +} + +func (p *smartPath) SetIndexed() { + p.indexed = true +} + +func (p *smartPath) Loader() px.Loader { + return p.loader +} + +func (p *smartPath) EffectivePath(name px.TypedName) string { + nameParts := name.Parts() + if p.moduleNameRelative { + if len(nameParts) < 2 || nameParts[0] != p.loader.ModuleName() { + return `` + } + nameParts = nameParts[1:] + } + + parts := make([]string, 0, len(nameParts)+2) + parts = append(parts, p.loader.Path()) // system, environment, or module root + if p.relativePath != `` { + parts = append(parts, p.relativePath) + } + parts = append(parts, nameParts...) + return filepath.Join(parts...) + p.extension +} + +func (p *smartPath) GenericPath() string { + parts := make([]string, 0) + parts = append(parts, p.loader.Path()) // system, environment, or module root + if p.relativePath != `` { + parts = append(parts, p.relativePath) + } + return filepath.Join(parts...) +} + +func (p *smartPath) Namespaces() []px.Namespace { + return p.namespaces +} + +func (p *smartPath) Extension() string { + return p.extension +} + +func (p *smartPath) RelativePath() string { + return p.relativePath +} + +func (p *smartPath) IsMatchMany() bool { + return p.matchMany +} + +func (p *smartPath) PreferredOrigin(origins []string) string { + if len(origins) == 1 { + return origins[0] + } + if p.namespaces[0] == px.NsTask { + // Prefer .json file if present + for _, origin := range origins { + if strings.HasSuffix(origin, `.json`) { + return origin + } + } + } + return origins[0] +} + +var dropExtension = regexp.MustCompile(`\.[^\\/]*\z`) + +func (p *smartPath) TypedNames(nameAuthority px.URI, relativePath string) []px.TypedName { + parts := strings.Split(relativePath, `/`) + l := len(parts) - 1 + s := parts[l] + if p.extension == `` { + s = dropExtension.ReplaceAllLiteralString(s, ``) + } else { + s = s[:len(s)-len(p.extension)] + } + parts[l] = s + + if p.moduleNameRelative && !(len(parts) == 1 && (s == `init` || s == `init_typeset`)) { + parts = append([]string{p.loader.ModuleName()}, parts...) + } + ts := make([]px.TypedName, len(p.namespaces)) + for i, n := range p.namespaces { + ts[i] = px.NewTypedName2(n, strings.Join(parts, `::`), nameAuthority) + } + return ts +} + +func (p *smartPath) Instantiator() Instantiator { + return p.instantiator +} diff --git a/vendor/github.com/lyraproj/pcore/pcore/entrypoint.go b/vendor/github.com/lyraproj/pcore/pcore/entrypoint.go new file mode 100644 index 0000000..33bfd00 --- /dev/null +++ b/vendor/github.com/lyraproj/pcore/pcore/entrypoint.go @@ -0,0 +1,104 @@ +package pcore + +import ( + "context" + + "github.com/lyraproj/pcore/px" + "github.com/lyraproj/pcore/pximpl" +) + +// DefineSetting defines a new setting with a given valueType and default +// value. +func DefineSetting(key string, valueType px.Type, dflt px.Value) { + pximpl.InitializeRuntime().DefineSetting(key, valueType, dflt) +} + +// Do executes a given function with an initialized Context instance. +// +// The Context will be parented by the Go context returned by context.Background() +func Do(f func(c px.Context)) { + pximpl.InitializeRuntime().Do(f) +} + +// DoWithParent executes a given function with an initialized Context instance. +// +// The context will be parented by the given Go context +func DoWithParent(parentCtx context.Context, actor func(px.Context)) { + pximpl.InitializeRuntime().DoWithParent(parentCtx, actor) +} + +// EnvironmentLoader returns the loader that finds things declared +// in the environment and its modules. This loader is parented +// by the SystemLoader +func EnvironmentLoader() px.Loader { + return pximpl.InitializeRuntime().EnvironmentLoader() +} + +// Get returns a setting or calls the given defaultProducer +// function if the setting does not exist +func Get(key string, defaultProducer px.Producer) px.Value { + return pximpl.InitializeRuntime().Get(key, defaultProducer) +} + +// Loader returns a loader for module. +func Loader(key string) px.Loader { + return pximpl.InitializeRuntime().Loader(key) +} + +func NewContext(loader px.Loader, logger px.Logger) px.Context { + return pximpl.NewContext(loader, logger) +} + +// Logger returns the logger that this instance was created with +func Logger() px.Logger { + return pximpl.InitializeRuntime().Logger() +} + +// Reset clears all settings and loaders, except the static loader +func Reset() { + pximpl.InitializeRuntime().Reset() +} + +// RootContext returns a new Context that is parented by the context.Background() +// and is initialized with a loader that is parented by the EnvironmentLoader. +func RootContext() px.Context { + return pximpl.InitializeRuntime().RootContext() +} + +// Set changes a setting +func Set(key string, value px.Value) { + pximpl.InitializeRuntime().Set(key, value) +} + +// SetLogger changes the logger +func SetLogger(logger px.Logger) { + pximpl.InitializeRuntime().SetLogger(logger) +} + +// SystemLoader returns the loader that finds all built-ins. It's parented +// by a static loader. +func SystemLoader() px.Loader { + return pximpl.InitializeRuntime().SystemLoader() +} + +// Try executes a given function with an initialized Context instance. If an error occurs, +// it is caught and returned. The error returned from the given function is returned when +// no other error is caught. +// +// The Context will be parented by the Go context returned by context.Background() +func Try(actor func(px.Context) error) (err error) { + return pximpl.InitializeRuntime().Try(actor) +} + +// TryWithParent executes a given function with an initialized Context instance. If an error occurs, +// it is caught and returned. The error returned from the given function is returned when no other +// error is caught +// +// The context will be parented by the given Go context +func TryWithParent(parentCtx context.Context, actor func(px.Context) error) (err error) { + return pximpl.InitializeRuntime().TryWithParent(parentCtx, actor) +} + +func WithParent(parent context.Context, loader px.Loader, logger px.Logger, ir px.ImplementationRegistry) px.Context { + return pximpl.WithParent(parent, loader, logger, ir) +} diff --git a/vendor/github.com/lyraproj/pcore/proto/convert.go b/vendor/github.com/lyraproj/pcore/proto/convert.go new file mode 100644 index 0000000..d414070 --- /dev/null +++ b/vendor/github.com/lyraproj/pcore/proto/convert.go @@ -0,0 +1,185 @@ +package proto + +import ( + "github.com/lyraproj/data-protobuf/datapb" + "github.com/lyraproj/pcore/px" + "github.com/lyraproj/pcore/types" +) + +// A Consumer consumes values and produces a datapb.Data +type Consumer interface { + px.ValueConsumer + + // Value returns the created value. Must not be called until the consumption + // of values is complete. + Value() *datapb.Data +} + +type protoConsumer struct { + stack [][]*datapb.Data +} + +// NewProtoConsumer creates a new Consumer +func NewProtoConsumer() Consumer { + return &protoConsumer{stack: make([][]*datapb.Data, 1, 8)} +} + +func (pc *protoConsumer) CanDoBinary() bool { + return true +} + +func (pc *protoConsumer) CanDoComplexKeys() bool { + return true +} + +func (pc *protoConsumer) StringDedupThreshold() int { + return 0 +} + +func (pc *protoConsumer) AddArray(cap int, doer px.Doer) { + top := len(pc.stack) + pc.stack = append(pc.stack, make([]*datapb.Data, 0, cap)) + doer() + els := pc.stack[top] + pc.stack = pc.stack[0:top] + pc.add(&datapb.Data{Kind: &datapb.Data_ArrayValue{ArrayValue: &datapb.DataArray{Values: els}}}) +} + +func (pc *protoConsumer) AddHash(cap int, doer px.Doer) { + top := len(pc.stack) + pc.stack = append(pc.stack, make([]*datapb.Data, 0, cap*2)) + doer() + els := pc.stack[top] + pc.stack = pc.stack[0:top] + + top = len(els) + vs := make([]*datapb.DataEntry, top/2) + for i := 0; i < top; i += 2 { + vs[i/2] = &datapb.DataEntry{Key: els[i], Value: els[i+1]} + } + pc.add(&datapb.Data{Kind: &datapb.Data_HashValue{HashValue: &datapb.DataHash{Entries: vs}}}) +} + +func (pc *protoConsumer) Add(v px.Value) { + pc.add(ToPBData(v)) +} + +func (pc *protoConsumer) AddRef(ref int) { + pc.add(&datapb.Data{Kind: &datapb.Data_Reference{Reference: int64(ref)}}) +} + +func (pc *protoConsumer) Value() *datapb.Data { + bs := pc.stack[0] + if len(bs) > 0 { + return bs[0] + } + return nil +} + +func (pc *protoConsumer) add(value *datapb.Data) { + top := len(pc.stack) - 1 + pc.stack[top] = append(pc.stack[top], value) +} + +func ToPBData(v px.Value) (value *datapb.Data) { + switch v := v.(type) { + case px.Boolean: + value = &datapb.Data{Kind: &datapb.Data_BooleanValue{BooleanValue: v.Bool()}} + case px.Float: + value = &datapb.Data{Kind: &datapb.Data_FloatValue{FloatValue: v.Float()}} + case px.Integer: + value = &datapb.Data{Kind: &datapb.Data_IntegerValue{IntegerValue: v.Int()}} + case px.StringValue: + value = &datapb.Data{Kind: &datapb.Data_StringValue{StringValue: v.String()}} + case *types.UndefValue: + value = &datapb.Data{Kind: &datapb.Data_UndefValue{}} + case *types.Array: + vs := make([]*datapb.Data, v.Len()) + v.EachWithIndex(func(elem px.Value, i int) { + vs[i] = ToPBData(elem) + }) + value = &datapb.Data{Kind: &datapb.Data_ArrayValue{ArrayValue: &datapb.DataArray{Values: vs}}} + case *types.Hash: + vs := make([]*datapb.DataEntry, v.Len()) + v.EachWithIndex(func(elem px.Value, i int) { + entry := elem.(*types.HashEntry) + vs[i] = &datapb.DataEntry{Key: ToPBData(entry.Key()), Value: ToPBData(entry.Value())} + }) + value = &datapb.Data{Kind: &datapb.Data_HashValue{HashValue: &datapb.DataHash{Entries: vs}}} + case *types.Binary: + value = &datapb.Data{Kind: &datapb.Data_BinaryValue{BinaryValue: v.Bytes()}} + default: + value = &datapb.Data{Kind: &datapb.Data_UndefValue{}} + } + return +} + +// ConsumePBData converts a datapb.Data into stream of values that are sent to a +// serialization.ValueConsumer. +func ConsumePBData(v *datapb.Data, consumer px.ValueConsumer) { + switch v.Kind.(type) { + case *datapb.Data_BooleanValue: + consumer.Add(types.WrapBoolean(v.GetBooleanValue())) + case *datapb.Data_FloatValue: + consumer.Add(types.WrapFloat(v.GetFloatValue())) + case *datapb.Data_IntegerValue: + consumer.Add(types.WrapInteger(v.GetIntegerValue())) + case *datapb.Data_StringValue: + consumer.Add(types.WrapString(v.GetStringValue())) + case *datapb.Data_UndefValue: + consumer.Add(px.Undef) + case *datapb.Data_ArrayValue: + av := v.GetArrayValue().GetValues() + consumer.AddArray(len(av), func() { + for _, elem := range av { + ConsumePBData(elem, consumer) + } + }) + case *datapb.Data_HashValue: + av := v.GetHashValue().Entries + consumer.AddHash(len(av), func() { + for _, val := range av { + ConsumePBData(val.Key, consumer) + ConsumePBData(val.Value, consumer) + } + }) + case *datapb.Data_BinaryValue: + consumer.Add(types.WrapBinary(v.GetBinaryValue())) + case *datapb.Data_Reference: + consumer.AddRef(int(v.GetReference())) + default: + consumer.Add(px.Undef) + } +} + +func FromPBData(v *datapb.Data) (value px.Value) { + switch v.Kind.(type) { + case *datapb.Data_BooleanValue: + value = types.WrapBoolean(v.GetBooleanValue()) + case *datapb.Data_FloatValue: + value = types.WrapFloat(v.GetFloatValue()) + case *datapb.Data_IntegerValue: + value = types.WrapInteger(v.GetIntegerValue()) + case *datapb.Data_StringValue: + value = types.WrapString(v.GetStringValue()) + case *datapb.Data_UndefValue: + value = px.Undef + case *datapb.Data_ArrayValue: + av := v.GetArrayValue().GetValues() + vs := make([]px.Value, len(av)) + for i, elem := range av { + vs[i] = FromPBData(elem) + } + value = types.WrapValues(vs) + case *datapb.Data_HashValue: + av := v.GetHashValue().Entries + vs := make([]*types.HashEntry, len(av)) + for i, val := range av { + vs[i] = types.WrapHashEntry(FromPBData(val.Key), FromPBData(val.Value)) + } + value = types.WrapHash(vs) + default: + value = px.Undef + } + return +} diff --git a/vendor/github.com/lyraproj/pcore/px/collection.go b/vendor/github.com/lyraproj/pcore/px/collection.go new file mode 100644 index 0000000..03f1fcc --- /dev/null +++ b/vendor/github.com/lyraproj/pcore/px/collection.go @@ -0,0 +1,90 @@ +package px + +// ToArray returns a List consisting of the given elements +var ToArray func(elements []Value) List + +// All passes each of the given elements to the given predicate. It returns true if the predicate +// never returns false. +func All(elements []Value, predicate Predicate) bool { + for _, elem := range elements { + if !predicate(elem) { + return false + } + } + return true +} + +// Any passes each of the given elements to the given predicate until the predicate returns true. The +// function then returns true. The function returns false when no predicate returns true. +func Any(elements []Value, predicate Predicate) bool { + for _, elem := range elements { + if predicate(elem) { + return true + } + } + return false +} + +// Each passes each of the given elements to the given consumer +func Each(elements []Value, consumer Consumer) { + for _, elem := range elements { + consumer(elem) + } +} + +// Find passes each of the given elements to the given predicate and returns the first element for +// which the predicate returns true together with a boolean true. The function returns nil, false +// when no predicate returns true. +func Find(elements []Value, predicate Predicate) (Value, bool) { + for _, elem := range elements { + if predicate(elem) { + return elem, true + } + } + return nil, false +} + +// Map passes each of the given elements to the given mapper and builds a new slice from the +// mapper return values. The new slice is returned. +func Map(elements []Value, mapper Mapper) []Value { + result := make([]Value, len(elements)) + for idx, elem := range elements { + result[idx] = mapper(elem) + } + return result +} + +// Reduce combines all elements of the given slice by applying a binary operation. For each +// element in teh slice, the reductor is passed an accumulator value (memo) and the element. +// The result becomes the new value for the memo. At the end of iteration, the final memo +// is returned. +func Reduce(elements []Value, memo Value, reductor BiMapper) Value { + for _, elem := range elements { + memo = reductor(memo, elem) + } + return memo +} + +// Select passes each of the given elements to the given predicate and returns a new slice +// containing those elements for which predicate returned true. +func Select(elements []Value, predicate Predicate) []Value { + result := make([]Value, 0, 8) + for _, elem := range elements { + if predicate(elem) { + result = append(result, elem) + } + } + return result +} + +// Select passes each of the given elements to the given predicate and returns a new slice +// containing those elements for which predicate returned false. +func Reject(elements []Value, predicate Predicate) []Value { + result := make([]Value, 0, 8) + for _, elem := range elements { + if !predicate(elem) { + result = append(result, elem) + } + } + return result +} diff --git a/vendor/github.com/lyraproj/pcore/px/collector.go b/vendor/github.com/lyraproj/pcore/px/collector.go new file mode 100644 index 0000000..000aaec --- /dev/null +++ b/vendor/github.com/lyraproj/pcore/px/collector.go @@ -0,0 +1,16 @@ +package px + +// A Collector receives streaming events and produces an Value +type Collector interface { + ValueConsumer + + // PopLast pops the last value from the BasicCollector and returns it + PopLast() Value + + // Value returns the created value. Must not be called until the consumption + // of values is complete. + Value() Value +} + +// NewCollector returns a new Collector instance +var NewCollector func() Collector diff --git a/vendor/github.com/lyraproj/pcore/px/context.go b/vendor/github.com/lyraproj/pcore/px/context.go new file mode 100644 index 0000000..73cc84d --- /dev/null +++ b/vendor/github.com/lyraproj/pcore/px/context.go @@ -0,0 +1,206 @@ +package px + +import ( + "context" + "reflect" + "runtime" + + "github.com/lyraproj/pcore/threadlocal" + + "github.com/lyraproj/issue/issue" +) + +const PuppetContextKey = `puppet.context` + +type VariableState int + +const NotFound = VariableState(0) +const Global = VariableState(1) +const Local = VariableState(2) + +// VariableStates is implemented by an evaluation scope that wishes to differentiate between +// local and global states. The difference is significant for modules like Hiera that relies +// on that global variables remain unchanged during evaluation. +type VariableStates interface { + // State returns NotFound, Global, or Local for the given name. + State(name string) VariableState +} + +// An Context holds all state during evaluation. Since it contains the stack, each +// thread of execution must use a context of its own. It's expected that multiple +// contexts share common parents for scope and loaders. +// +type Context interface { + context.Context + + // Delete deletes the given key from the context variable map + Delete(key string) + + // DefiningLoader returns a Loader that can receive new definitions + DefiningLoader() DefiningLoader + + // DoWithLoader assigns the given loader to the receiver and calls the doer. The original loader is + // restored before this call returns. + DoWithLoader(loader Loader, doer Doer) + + // Error creates a Reported with the given issue code, location, and arguments + // Typical use is to panic with the returned value + Error(location issue.Location, issueCode issue.Code, args issue.H) issue.Reported + + // Fail creates a Reported with the EVAL_FAILURE issue code, location from stack top, + // and the given message + // Typical use is to panic with the returned value + Fail(message string) issue.Reported + + // Fork a new context from this context. The fork will have the same scope, + // loaders, and logger as this context. The stack and the map of context variables will + // be shallow copied + Fork() Context + + // Get returns the context variable with the given key together with a bool to indicate + // if the key was found + Get(key string) (interface{}, bool) + + // ImplementationRegistry returns the registry that holds mappings between Type and reflect.Type + ImplementationRegistry() ImplementationRegistry + + // Loader returns the loader of the receiver. + Loader() Loader + + // Logger returns the logger of the receiver. This will be the same logger as the + // logger of the evaluator. + Logger() Logger + + // ParseTypeValue parses and evaluates the given Value into a Type. It will panic with + // an issue.Reported unless the parsing was successful and the result is evaluates + // to a Type + ParseTypeValue(str Value) Type + + // ParseType parses and evaluates the given string into a Type. It will panic with + // an issue.Reported unless the parsing was successful and the result is evaluates + // to a Type + ParseType(str string) Type + + // Reflector returns a Reflector capable of converting to and from reflected values + // and types + Reflector() Reflector + + // Scope returns an optional variable scope. This method is intended to be used when + // the context is used as an evaluation context. The method returns an empty OrderedMap + // by default. + Scope() Keyed + + // Set adds or replaces the context variable for the given key with the given value + Set(key string, value interface{}) + + // Permanently change the loader of this context + SetLoader(loader Loader) + + // Stack returns the full stack. The returned value must not be modified. + Stack() []issue.Location + + // StackPop pops the last pushed location from the stack + StackPop() + + // StackPush pushes a location onto the stack. The location is typically the + // currently evaluated expression. + StackPush(location issue.Location) + + // StackTop returns the top of the stack + StackTop() issue.Location +} + +// AddTypes Makes the given types known to the loader appointed by the Context +func AddTypes(c Context, types ...Type) { + l := c.DefiningLoader() + rts := make([]ResolvableType, 0, len(types)) + tss := make([]TypeSet, 0, 4) + for _, t := range types { + // A TypeSet should not be added until it is resolved since it uses its own + // loader for the resolution. + if ts, ok := t.(TypeSet); ok { + tss = append(tss, ts) + } else { + l.SetEntry(NewTypedName(NsType, t.Name()), NewLoaderEntry(t, nil)) + } + if rt, ok := t.(ResolvableType); ok { + rts = append(rts, rt) + } + } + ResolveTypes(c, rts...) + for _, ts := range tss { + l.SetEntry(NewTypedName(NsType, ts.Name()), NewLoaderEntry(ts, nil)) + } +} + +// Call calls a function known to the loader of the Context with arguments and an optional block. +func Call(c Context, name string, args []Value, block Lambda) Value { + tn := NewTypedName2(`function`, name, c.Loader().NameAuthority()) + if f, ok := Load(c, tn); ok { + return f.(Function).Call(c, block, args...) + } + panic(issue.NewReported(UnknownFunction, issue.SeverityError, issue.H{`name`: tn.String()}, c.StackTop())) +} + +// DoWithContext sets the given context to be the current context of the executing Go routine, calls +// the actor, and ensures that the old Context is restored once that call ends normally by panic. +func DoWithContext(ctx Context, actor func(Context)) { + if saveCtx, ok := threadlocal.Get(PuppetContextKey); ok { + defer func() { + threadlocal.Set(PuppetContextKey, saveCtx) + }() + } else { + threadlocal.Init() + } + threadlocal.Set(PuppetContextKey, ctx) + actor(ctx) +} + +// CurrentContext returns the current runtime context or panics if no such context has been assigned +func CurrentContext() Context { + if ctx, ok := threadlocal.Get(PuppetContextKey); ok { + return ctx.(Context) + } + panic(issue.NewReported(NoCurrentContext, issue.SeverityError, issue.NoArgs, 0)) +} + +// Fork calls the given function in a new go routine. The given context is forked and becomes +// the CurrentContext for that routine. +func Fork(c Context, doer ContextDoer) { + go func() { + defer threadlocal.Cleanup() + threadlocal.Init() + cf := c.Fork() + threadlocal.Set(PuppetContextKey, cf) + doer(cf) + }() +} + +// Go calls the given function in a new go routine. The CurrentContext is forked and becomes +// the CurrentContext for that routine. +func Go(f ContextDoer) { + Fork(CurrentContext(), f) +} + +// StackTop returns the top of the stack contained in the current context or a location determined +// as the Go function that was the caller of the caller of this function, i.e. runtime.Caller(2). +func StackTop() issue.Location { + if ctx, ok := threadlocal.Get(PuppetContextKey); ok { + return ctx.(Context).StackTop() + } + _, file, line, _ := runtime.Caller(2) + return issue.NewLocation(file, line, 0) +} + +// ResolveResolvables resolves types, constructions, or functions that has been recently added by +// init() functions +var ResolveResolvables func(c Context) + +// Resolve +var ResolveTypes func(c Context, types ...ResolvableType) + +// Wrap converts the given value into a Value +var Wrap func(c Context, v interface{}) Value + +// WrapReflected converts the given reflect.Value into a Value +var WrapReflected func(c Context, v reflect.Value) Value diff --git a/vendor/github.com/lyraproj/pcore/px/equality.go b/vendor/github.com/lyraproj/pcore/px/equality.go new file mode 100644 index 0000000..251d662 --- /dev/null +++ b/vendor/github.com/lyraproj/pcore/px/equality.go @@ -0,0 +1,149 @@ +package px + +import "reflect" + +type ( + visit struct { + a1 interface{} + a2 interface{} + } + + // Guard helps tracking endless recursion. The comparison algorithm assumes that all checks in progress + // are true when it encounters them again. Visited comparisons are stored in a map + // indexed by visit. + // + // (algorithm copied from golang reflect/deepequal.go) + Guard map[visit]bool + + // Equality is implemented by values that can be compared for equality with other values + Equality interface { + // Returns true if the receiver is equal to the given value, false otherwise. + Equals(value interface{}, guard Guard) bool + } +) + +// Seen returns true if the combination of the given values has been seen by this guard and if not, registers +// the combination so that true is returned the next time the same values are given. +func (g Guard) Seen(a, b interface{}) bool { + v := visit{a, b} + if _, ok := g[v]; ok { + return true + } + g[v] = true + return false +} + +// Equals will compare two values for equality. If the first value implements the Equality interface, then +// the that interface is used. If the first value is a primitive, then the primitive will be compared using +// ==. The default behavior is to delegate to reflect.DeepEqual. +// +// The comparison is optionally guarded from endless recursion by passing a Guard instance +func Equals(a interface{}, b interface{}, g Guard) bool { + switch a := a.(type) { + case nil: + return b == nil + case Equality: + return a.(Equality).Equals(b, g) + case bool: + bs, ok := b.(bool) + return ok && a == bs + case int: + bs, ok := b.(int) + return ok && a == bs + case int64: + bs, ok := b.(int64) + return ok && a == bs + case float64: + bs, ok := b.(float64) + return ok && a == bs + case string: + bs, ok := b.(string) + return ok && a == bs + default: + return reflect.DeepEqual(a, b) + } +} + +// IncludesAll returns true if the given slice a contains all values +// in the given slice b. +func IncludesAll(a interface{}, b interface{}, g Guard) bool { + ra := reflect.ValueOf(a) + la := ra.Len() + rb := reflect.ValueOf(b) + lb := rb.Len() + for ia := 0; ia < la; ia++ { + v := ra.Index(ia).Interface() + found := false + for ib := 0; ib < lb; ib++ { + ov := rb.Index(ib).Interface() + if Equals(ov, v, g) { + found = true + break + } + } + if !found { + return false + } + } + return true +} + +// IndexFrom returns the index in the given slice of the first occurrence of an +// element for which Equals with the given value starting the comparison at the +// given startPos. The value of -1 is returned when no such element is found. +func IndexFrom(slice interface{}, value interface{}, startPos int, g Guard) int { + ra := reflect.ValueOf(slice) + la := ra.Len() + for idx := startPos; idx < la; idx++ { + if Equals(ra.Index(idx).Interface(), value, g) { + return idx + } + } + return -1 +} + +// ReverseIndexFrom returns the index in the given slice of the last occurrence of an +// element for which Equals with the given value between position zero and the given +// endPos. If endPos is -1 it will be set to the length of the slice. +// +// The value of -1 is returned when no such element is found. +func ReverseIndexFrom(slice interface{}, value interface{}, endPos int, g Guard) int { + ra := reflect.ValueOf(slice) + top := ra.Len() + idx := top - 1 + if endPos >= 0 && endPos < idx { + idx = endPos + } + for ; idx >= 0; idx-- { + if Equals(ra.Index(idx).Interface(), value, g) { + return idx + } + } + return -1 +} + +// PuppetEquals is like Equals but: +// int and float values with same value are considered equal +// string comparisons are case insensitive +var PuppetEquals func(a, b Value) bool + +// PuppetMatch tests if the LHS matches the RHS pattern expression and returns a Boolean result +// +// When the RHS is a Type: +// +// the match is true if the LHS is an instance of the type +// No match variables are set in this case. +// +// When the RHS is a SemVerRange: +// +// the match is true if the LHS is a SemVer, and the version is within the range +// the match is true if the LHS is a String representing a SemVer, and the version is within the range +// an error is raised if the LHS is neither a String with a valid SemVer representation, nor a SemVer. +// otherwise the result is false (not in range). +// +// When the RHS is not a Type: +// +// If the RHS evaluates to a String a new Regular Expression is created with the string value as its pattern. +// If the RHS is not a Regexp (after string conversion) an error is raised. +// If the LHS is not a String an error is raised. (Note, Numeric values are not converted to String automatically because of unknown radix). +var PuppetMatch func(lhs, rhs Value) bool diff --git a/vendor/github.com/lyraproj/pcore/px/format.go b/vendor/github.com/lyraproj/pcore/px/format.go new file mode 100644 index 0000000..cd5d30b --- /dev/null +++ b/vendor/github.com/lyraproj/pcore/px/format.go @@ -0,0 +1,72 @@ +package px + +import ( + "io" + "regexp" +) + +type ( + FormatMap OrderedMap + + Format interface { + Value + HasStringFlags() bool + ApplyStringFlags(b io.Writer, str string, quoted bool) + Width() int + Precision() int + FormatChar() byte + Plus() byte + IsAlt() bool + IsLeft() bool + IsZeroPad() bool + LeftDelimiter() byte + ContainerFormats() FormatMap + Separator(dflt string) string + Separator2(dflt string) string + OrigFormat() string + ReplaceFormatChar(c byte) Format + WithoutWidth() Format + } + + Indentation interface { + Breaks() bool + Increase(indenting bool) Indentation + IsFirst() bool + IsIndenting() bool + Indenting(indenting bool) Indentation + Level() int + Padding() string + Subsequent() Indentation + } + + FormatContext interface { + Indentation() Indentation + FormatMap() FormatMap + Property(key string) (string, bool) + Properties() map[string]string + SetProperty(key, value string) + Subsequent() FormatContext + UnsupportedFormat(t Type, supportedFormats string, actualFormat Format) error + WithProperties(properties map[string]string) FormatContext + } +) + +var FormatPattern = regexp.MustCompile(`\A%([\s\[+#0{<(|-]*)([1-9][0-9]*)?(?:\.([0-9]+))?([a-zA-Z])\z`) + +var DefaultFormat Format +var DefaultFormatContext FormatContext +var Pretty FormatContext +var PrettyExpanded FormatContext + +var NewFormat func(format string) Format +var NewIndentation func(indenting bool, level int) Indentation +var NewFormatContext func(t Type, format Format, indentation Indentation) FormatContext +var NewFormatContext2 func(indentation Indentation, formatMap FormatMap, properties map[string]string) FormatContext +var NewFormatContext3 func(value Value, format Value) (FormatContext, error) + +func GetFormat(f FormatMap, t Type) Format { + if v, ok := f.Find(func(ev Value) bool { return IsAssignable(ev.(MapEntry).Key().(Type), t) }); ok { + return v.(MapEntry).Value().(Format) + } + return DefaultFormat +} diff --git a/vendor/github.com/lyraproj/pcore/px/function.go b/vendor/github.com/lyraproj/pcore/px/function.go new file mode 100644 index 0000000..583861d --- /dev/null +++ b/vendor/github.com/lyraproj/pcore/px/function.go @@ -0,0 +1,126 @@ +package px + +type ( + InvokableValue interface { + Value + + Call(c Context, block Lambda, args ...Value) Value + } + + Parameter interface { + Value + + Name() string + + Type() Type + + HasValue() bool + + Value() Value + + CapturesRest() bool + } + + Lambda interface { + InvokableValue + + Parameters() []Parameter + + Signature() Signature + } + + Function interface { + InvokableValue + + Dispatchers() []Lambda + + Name() string + } + + ResolvableFunction interface { + Name() string + Resolve(c Context) Function + } + + DispatchFunction func(c Context, args []Value) Value + + DispatchFunctionWithBlock func(c Context, args []Value, block Lambda) Value + + LocalTypes interface { + Type(name string, decl string) + Type2(name string, tp Type) + } + + // Dispatch is a builder to build function dispatchers (Lambdas) + Dispatch interface { + // Name returns the name of the owner function + Name() string + + Param(typeString string) + Param2(puppetType Type) + + OptionalParam(typeString string) + OptionalParam2(puppetType Type) + + RepeatedParam(typeString string) + RepeatedParam2(puppetType Type) + + RequiredRepeatedParam(typeString string) + RequiredRepeatedParam2(puppetType Type) + + Block(typeString string) + Block2(puppetType Type) + + OptionalBlock(typeString string) + OptionalBlock2(puppetType Type) + + Returns(typeString string) + Returns2(puppetType Type) + + Function(f DispatchFunction) + Function2(f DispatchFunctionWithBlock) + } + + Signature interface { + Type + + CallableWith(args []Value, block Lambda) bool + + ParametersType() Type + + ReturnType() Type + + // BlockType returns a Callable, Optional[Callable], or nil to denote if a + // block is required, optional, or invalid + BlockType() Type + + // BlockName will typically return the string "block" + BlockName() string + + // ParameterNames returns the names of the parameters. Will return the strings "1", "2", etc. + // for unnamed parameters. + ParameterNames() []string + } + + DispatchCreator func(db Dispatch) + + LocalTypesCreator func(lt LocalTypes) +) + +var BuildFunction func(name string, localTypes LocalTypesCreator, creators []DispatchCreator) ResolvableFunction + +var NewGoFunction func(name string, creators ...DispatchCreator) + +var NewGoFunction2 func(name string, localTypes LocalTypesCreator, creators ...DispatchCreator) + +var NewGoConstructor func(typeName string, creators ...DispatchCreator) + +var MakeGoAllocator func(allocFunc DispatchFunction) Lambda + +var NewGoConstructor2 func(typeName string, localTypes LocalTypesCreator, creators ...DispatchCreator) + +var NewParameter func(name string, typ Type, value Value, capturesRest bool) Parameter + +var MakeGoConstructor func(typeName string, creators ...DispatchCreator) ResolvableFunction + +var MakeGoConstructor2 func(typeName string, localTypes LocalTypesCreator, creators ...DispatchCreator) ResolvableFunction diff --git a/vendor/github.com/lyraproj/pcore/px/functional.go b/vendor/github.com/lyraproj/pcore/px/functional.go new file mode 100644 index 0000000..607cd4e --- /dev/null +++ b/vendor/github.com/lyraproj/pcore/px/functional.go @@ -0,0 +1,29 @@ +package px + +type ( + Doer func() + + ContextDoer func(c Context) + + Consumer func(value Value) + + EntryMapper func(value MapEntry) MapEntry + + IndexedConsumer func(value Value, index int) + + SliceConsumer func(value List) + + Mapper func(value Value) Value + + Predicate func(value Value) bool + + Producer func() Value + + TypeMapper func(value Type) Value + + BiConsumer func(v1 Value, v2 Value) + + BiPredicate func(v1 Value, v2 Value) bool + + BiMapper func(v1 Value, v2 Value) Value +) diff --git a/vendor/github.com/lyraproj/pcore/px/issues.go b/vendor/github.com/lyraproj/pcore/px/issues.go new file mode 100644 index 0000000..4327aba --- /dev/null +++ b/vendor/github.com/lyraproj/pcore/px/issues.go @@ -0,0 +1,306 @@ +package px + +import "github.com/lyraproj/issue/issue" + +const ( + AttemptToRedefine = `PCORE_ATTEMPT_TO_REDEFINE` + AttemptToRedefineType = `PCORE_ATTEMPT_TO_REDEFINE_TYPE` + AttemptToSetUnsettable = `PCORE_ATTEMPT_TO_SET_UNSETTABLE` + AttemptToSetWrongKind = `PCORE_ATTEMPT_TO_SET_WRONG_KIND` + AttributeHasNoValue = `PCORE_ATTRIBUTE_HAS_NO_VALUE` + AttributeNotFound = `PCORE_ATTRIBUTE_NOT_FOUND` + BadTypeString = `PCORE_BAD_TYPE_STRING` + BothConstantAndAttribute = `PCORE_BOTH_CONSTANT_AND_ATTRIBUTE` + ConstantRequiresValue = `PCORE_CONSTANT_REQUIRES_VALUE` + ConstantWithFinal = `PCORE_CONSTANT_WITH_FINAL` + CtorNotFound = `PCORE_CTOR_NOT_FOUND` + DuplicateKey = `PCORE_DUPLICATE_KEY` + EmptyTypeParameterList = `PCORE_EMPTY_TYPE_PARAMETER_LIST` + EqualityAttributeNotFound = `PCORE_EQUALITY_ATTRIBUTE_NOT_FOUND` + EqualityNotAttribute = `PCORE_EQUALITY_NOT_ATTRIBUTE` + EqualityOnConstant = `PCORE_EQUALITY_ON_CONSTANT` + EqualityRedefined = `PCORE_EQUALITY_REDEFINED` + Failure = `PCORE_FAILURE` + FileNotFound = `PCORE_FILE_NOT_FOUND` + FileReadDenied = `PCORE_FILE_READ_DENIED` + GoFunctionError = `PCORE_GO_FUNCTION_ERROR` + GoRuntimeTypeWithoutGoType = `PCORE_GO_RUNTIME_TYPE_WITHOUT_GO_TYPE` + IllegalArgument = `PCORE_ILLEGAL_ARGUMENT` + IllegalArguments = `PCORE_ILLEGAL_ARGUMENTS` + IllegalArgumentCount = `PCORE_ILLEGAL_ARGUMENT_COUNT` + IllegalArgumentType = `PCORE_ILLEGAL_ARGUMENT_TYPE` + IllegalKindValueCombination = `PCORE_ILLEGAL_KIND_VALUE_COMBINATION` + IllegalObjectInheritance = `PCORE_ILLEGAL_OBJECT_INHERITANCE` + ImplAlreadyRegistered = `PCORE_IMPL_ALREADY_REGISTERED` + InstanceDoesNotRespond = `PCORE_INSTANCE_DOES_NOT_RESPOND` + ImpossibleOptional = `PCORE_IMPOSSIBLE_OPTIONAL` + InvalidCharactersInName = `PCORE_INVALID_CHARACTERS_IN_NAME` + InvalidJson = `PCORE_INVALID_JSON` + InvalidRegexp = `PCORE_INVALID_REGEXP` + InvalidSourceForGet = `PCORE_INVALID_SOURCE_FOR_GET` + InvalidSourceForSet = `PCORE_INVALID_SOURCE_FOR_SET` + InvalidStringFormatSpec = `PCORE_INVALID_STRING_FORMAT_SPEC` + InvalidStringFormatDelimiter = `PCORE_INVALID_STRING_FORMAT_DELIMITER` + InvalidStringFormatRepeatedFlag = `PCORE_INVALID_STRING_FORMAT_REPEATED_FLAG` + InvalidTimezone = `PCORE_INVALID_TIMEZONE` + InvalidTypedNameMapKey = `PCORE_INVALID_TYPED_NAME_MAP_KEY` + InvalidUri = `PCORE_INVALID_URI` + InvalidVersion = `PCORE_INVALID_VERSION` + InvalidVersionRange = `PCORE_INVALID_VERSION_RANGE` + IsDirectory = `PCORE_IS_DIRECTORY` + MatchNotRegexp = `PCORE_MATCH_NOT_REGEXP` + MatchNotString = `PCORE_MATCH_NOT_STRING` + MemberNameConflict = `PCORE_MEMBER_NAME_CONFLICT` + MissingRequiredAttribute = `PCORE_MISSING_REQUIRED_ATTRIBUTE` + MissingTypeParameter = `PCORE_MISSING_TYPE_PARAMETER` + NilArrayElement = `NIL_ARRAY_ELEMENT` + NilHashKey = `NIL_HASH_KEY` + NilHashValue = `NIL_HASH_VALUE` + NoAttributeReader = `PCORE_NO_ATTRIBUTE_READER` + NoCurrentContext = `PCORE_NO_CURRENT_CONTEXT` + NoDefinition = `PCORE_NO_DEFINITION` + NotExpectedTypeset = `PCORE_NOT_EXPECTED_TYPESET` + NotInteger = `PCORE_NOT_INTEGER` + NotParameterizedType = `PCORE_NOT_PARAMETERIZED_TYPE` + NotSemver = `PCORE_NOT_SEMVER` + NotSupportedByGoTimeLayout = `PCORE_NOT_SUPPORTED_BY_GO_TIME_LAYOUT` + ObjectInheritsSelf = `PCORE_OBJECT_INHERITS_SELF` + OverrideMemberMismatch = `PCORE_OVERRIDE_MEMBER_MISMATCH` + OverrideTypeMismatch = `PCORE_OVERRIDE_TYPE_MISMATCH` + OverriddenNotFound = `PCORE_OVERRIDDEN_NOT_FOUND` + OverrideOfFinal = `PCORE_OVERRIDE_OF_FINAL` + OverrideIsMissing = `PCORE_OVERRIDE_IS_MISSING` + ParseError = `PCORE_PARSE_ERROR` + SerializationAttributeNotFound = `PCORE_SERIALIZATION_ATTRIBUTE_NOT_FOUND` + SerializationNotAttribute = `PCORE_SERIALIZATION_NOT_ATTRIBUTE` + SerializationBadKind = `PCORE_SERIALIZATION_BAD_KIND` + SerializationDefaultConvertedToString = `PCORE_SERIALIZATION_DEFAULT_CONVERTED_TO_STRING` + SerializationRequiredAfterOptional = `PCORE_SERIALIZATION_REQUIRED_AFTER_OPTIONAL` + SerializationUnknownConvertedToString = `PCORE_SERIALIZATION_UNKNOWN_CONVERTED_TO_STRING` + TimespanBadFormatSpec = `PCORE_TIMESPAN_BAD_FORMAT_SPEC` + CannotBeParsed = `PCORE_TIMESPAN_CANNOT_BE_PARSED` + TimespanFormatSpecNotHigher = `PCORE_TIMESPAN_FORMAT_SPEC_NOT_HIGHER` + TimestampCannotBeParsed = `PCORE_TIMESTAMP_CANNOT_BE_PARSED` + TimestampTzAmbiguity = `PCORE_TIMESTAMP_TZ_AMBIGUITY` + TypeMismatch = `PCORE_TYPE_MISMATCH` + TypesetAliasCollides = `PCORE_TYPESET_ALIAS_COLLIDES` + TypesetMissingNameAuthority = `PCORE_TYPESET_MISSING_NAME_AUTHORITY` + TypesetReferenceBadType = `PCORE_TYPESET_REFERENCE_BAD_TYPE` + TypesetReferenceDuplicate = `PCORE_TYPESET_REFERENCE_DUPLICATE` + TypesetReferenceMismatch = `PCORE_TYPESET_REFERENCE_MISMATCH` + TypesetReferenceOverlap = `PCORE_TYPESET_REFERENCE_OVERLAP` + TypesetReferenceUnresolved = `PCORE_TYPESET_REFERENCE_UNRESOLVED` + UnableToDeserializeType = `PCORE_UNABLE_TO_DESERIALIZE_TYPE` + UnableToDeserializeValue = `PCORE_UNABLE_TO_DESERIALIZE_VALUE` + UnableToReadFile = `PCORE_UNABLE_TO_READ_FILE` + UnhandledPcoreVersion = `PCORE_UNHANDLED_PCORE_VERSION` + UnknownFunction = `PCORE_UNKNOWN_FUNCTION` + UnknownVariable = `PCORE_UNKNOWN_VARIABLE` + UnreflectableType = `PCORE_UNREFLECTABLE_TYPE` + UnreflectableValue = `PCORE_UNREFLECTABLE_VALUE` + UnresolvedType = `PCORE_UNRESOLVED_TYPE` + UnresolvedTypeOf = `PCORE_UNRESOLVED_TYPE_OF` + UnsupportedStringFormat = `PCORE_UNSUPPORTED_STRING_FORMAT` + WrongDefinition = `PCORE_WRONG_DEFINITION` +) + +func init() { + issue.Hard(AttemptToRedefine, `attempt to redefine %{name}`) + + issue.Hard(AttemptToRedefineType, "attempt to redefine type %{name}. Old:\n%{old}\nNew:\n%{new}\n") + + issue.Hard(AttemptToSetUnsettable, `attempt to set a value of kind %{kind} in an unsettable reflect.Value`) + + issue.Hard(AttemptToSetWrongKind, `attempt to assign a value of kind %{expected} to a reflect.Value of kind %{actual}`) + + issue.Hard(AttributeHasNoValue, `%{label} has no value`) + + issue.Hard2(AttributeNotFound, `%{type} has no attribute named %{name}`, issue.HF{`type`: issue.UcAnOrA}) + + issue.Hard(BadTypeString, `%{label} type string '%{string}' cannot be parsed into a data type: %{detail}`) + + issue.Hard(BothConstantAndAttribute, `attribute %{label}[%{key}] is defined as both a constant and an attribute`) + + issue.Hard(ConstantRequiresValue, `%{label} of kind 'constant' requires a value`) + + issue.Hard(CtorNotFound, `Unable to load the constructor for data type '%{type}'`) + + // TRANSLATOR 'final => false' is puppet syntax and should not be translated + issue.Hard(ConstantWithFinal, `%{label} of kind 'constant' cannot be combined with final => false`) + + issue.Hard(DuplicateKey, `The key '%{key}' is declared more than once`) + + issue.Hard(EmptyTypeParameterList, `The %{label}-Type cannot be parameterized using an empty parameter list`) + + issue.Hard(EqualityAttributeNotFound, `%{label} equality is referencing non existent attribute '%{attribute}'`) + + issue.Hard(EqualityNotAttribute, `{label} equality is referencing %{attribute}. Only attribute references are allowed`) + + issue.Hard(EqualityOnConstant, `%{label} equality is referencing constant %{attribute}.`) + + issue.Hard(EqualityRedefined, `%{label} equality is referencing %{attribute} which is included in equality of %{including_parent}`) + + issue.Hard(Failure, `%{message}`) + + issue.Hard(FileNotFound, `File '%{path}' does not exist`) + + issue.Hard(FileReadDenied, `Insufficient permissions to read '%{path}'`) + + issue.Hard(GoFunctionError, `Go function %{name} returned error '%{error}'`) + + issue.Hard(GoRuntimeTypeWithoutGoType, `Attempt to create a Runtime['go', '%{name}'] without providing a Go type`) + + issue.Hard(IllegalArgument, `invalid argument for function %{function}, argument %{index}: %{arg}`) + + issue.Hard(IllegalArgumentCount, `invalid argument count for function %{function}. expected '%{expected}', got %{actual}`) + + issue.Hard(IllegalArguments, `invalid arguments for function %{function}: %{message}`) + + issue.Hard(IllegalArgumentType, `invalid argument type for function %{function}, argument %{index}. expected '%{expected}', got %{actual}`) + + issue.Hard(IllegalKindValueCombination, `%{label} of kind '%{kind}' cannot be combined with an attribute value`) + + issue.Hard(IllegalObjectInheritance, `An Object can only inherit another Object or alias thereof. The %{label} inherits from a %{type}.`) + + issue.Hard(ImplAlreadyRegistered, `The type %{type} is already present in the implementation registry`) + + issue.Hard(IsDirectory, `The path '%{path}' is a directory`) + + issue.Hard(ImpossibleOptional, `The field %{name} cannot have the type %{type}. Optional attributes must be pointers`) + + issue.Hard(InstanceDoesNotRespond, `An instance of %{type} does not respond to %{message}`) + + issue.Hard(InvalidCharactersInName, `Name '%{name} contains invalid characters. Must start with letter and only contain letters, digits, and underscore'`) + + issue.Hard(InvalidJson, `Unable to parse JSON from '%{path}': %{detail}`) + + issue.Hard(InvalidRegexp, `Cannot compile regular expression '%{pattern}': %{detail}`) + + issue.Hard2(InvalidSourceForGet, `Cannot create a reflect.Value from %{type}`, issue.HF{`type`: issue.AnOrA}) + + issue.Hard2(InvalidSourceForSet, `Cannot set a reflect.Value from %{type}`, issue.HF{`type`: issue.AnOrA}) + + issue.Hard(InvalidStringFormatSpec, `The string format '%{format}' is not a valid format on the form '%%.'`) + + issue.Hard(InvalidStringFormatDelimiter, `Only one of the delimiters [ { ( < | can be given in the string format flags, got '%c'`) + + issue.Hard(InvalidStringFormatRepeatedFlag, `The same flag can only be used once in a string format, got '%{format}'`) + + issue.Hard(InvalidTimezone, `Unable to load timezone '%{zone}': %{detail}`) + + issue.Hard(InvalidTypedNameMapKey, `The key '%{mapKey}' does not represent a valid TypedName`) + + issue.Hard(InvalidVersion, `Cannot parse a semantic version from string '%{str}': '%{detail}'`) + + issue.Hard(InvalidVersionRange, `Cannot parse a semantic version range from string '%{str}': '%{detail}'`) + + issue.Hard(InvalidUri, `Cannot parse an URI from string '%{str}': '%{detail}'`) + + issue.Hard(MatchNotRegexp, `Can not convert right match operand to a regular expression. Caused by '%{detail}'`) + + issue.Hard2(MatchNotString, `"Left match operand must result in a String value. Got %{left}`, issue.HF{`left`: issue.AnOrA}) + + issue.Hard(MemberNameConflict, `%{label} conflicts with attribute with the same name`) + + issue.Hard(MissingRequiredAttribute, `%{label} requires a value but none was provided`) + + issue.Hard(MissingTypeParameter, `'%{name}' is not a known type parameter for %{label}-Type`) + + issue.Hard(ObjectInheritsSelf, `The Object type '%{label}' inherits from itself`) + + issue.Hard(NilArrayElement, `Attempt to create array with nil element at index %{index}`) + + issue.Hard(NilHashKey, `Attempt to create hash with nil key`) + + issue.Hard(NilHashValue, `Attempt to create hash with nil value for key '%{key}'`) + + issue.Hard(NoAttributeReader, `No attribute reader is implemented for %{label}`) + + issue.Hard(NoCurrentContext, `There is no current evaluation context`) + + issue.Hard(NoDefinition, `The code loaded from %{source} does not define the %{type} '%{name}`) + + issue.Hard(NotInteger, `The value '%{value}' cannot be converted to an Integer`) + + issue.Hard(NotExpectedTypeset, `The code loaded from %{source} does not define the TypeSet %{name}'`) + + issue.Hard2(NotParameterizedType, `%{type} is not a parameterized type`, + issue.HF{`type`: issue.UcAnOrA}) + + issue.Hard(NotSemver, `The value cannot be converted to semantic version. Caused by '%{detail}'`) + + issue.Hard(NotSupportedByGoTimeLayout, `The format specifier '%{format_specifier}' "%{description}" can not be converted to a Go Time Layout`) + + issue.Hard(OverrideMemberMismatch, `%{member} attempts to override %{label}`) + + issue.Hard(OverriddenNotFound, `expected %{label} to override an inherited %{feature_type}, but no such %{feature_type} was found`) + + // TRANSLATOR 'override => true' is a puppet syntax and should not be translated + issue.Hard(OverrideIsMissing, `%{member} attempts to override %{label} without having override => true`) + + issue.Hard(OverrideOfFinal, `%{member} attempts to override final %{label}`) + + issue.Hard(ParseError, `Unable to parse %{language}. Detail: %{detail}`) + + issue.Hard(SerializationAttributeNotFound, `%{label} serialization is referencing non existent attribute '%{attribute}'`) + + issue.Hard(SerializationNotAttribute, `{label} serialization is referencing %{attribute}. Only attribute references are allowed`) + + issue.Hard(SerializationBadKind, `%{label} equality is referencing {kind} %{attribute}.`) + + issue.Hard(SerializationDefaultConvertedToString, `%{path} contains the special value default. It will be converted to the String 'default'`) + + issue.Hard2(SerializationUnknownConvertedToString, `%{path} contains %{klass} value. It will be converted to the String '%{value}'`, issue.HF{`klass`: issue.AnOrA}) + + issue.Hard(SerializationRequiredAfterOptional, `%{label} serialization is referencing required %{required} after optional %{optional}. Optional attributes must be last`) + + issue.Hard(TimespanBadFormatSpec, `Bad format specifier '%{expression}' in '%{format}', at position %{position}`) + + issue.Hard(CannotBeParsed, `Unable to parse Timespan '%{str}' using any of the formats %{formats}`) + + issue.Hard(TimespanFormatSpecNotHigher, `Format specifiers %L and %N denotes fractions and must be used together with a specifier of higher magnitude`) + + issue.Hard(TimestampCannotBeParsed, `Unable to parse Timestamp '%{str}' using any of the formats %{formats}`) + + issue.Hard(TimestampTzAmbiguity, `Parsed timezone '%{parsed}' conflicts with provided timezone argument %{given}`) + + issue.Hard(TypeMismatch, `Type mismatch: %{detail}`) + + issue.Hard(TypesetAliasCollides, `TypeSet '%{name}' references a TypeSet using alias '%{ref_alias}'. The alias collides with the name of a declared type`) + + issue.Hard(TypesetMissingNameAuthority, `No 'name_authority' is declared in TypeSet '%{name}' and it cannot be inferred`) + + issue.Hard(TypesetReferenceBadType, `TypeSet '%{name}' reference to TypeSet named %{ref_name} resoles to a %{type_name}`) + + issue.Hard(TypesetReferenceDuplicate, `TypeSet '%{name}' references a TypeSet using alias '%{ref_alias}' more than once`) + + issue.Hard(TypesetReferenceMismatch, `TypeSet '%{name}' reference to TypeSet named %{ref_name} resolves to an incompatible version. Expected %{version_range}, got %{version`) + + issue.Hard(TypesetReferenceOverlap, `TypeSet '%{name}' references TypeSet '%{ref_na}/%{ref_name}' more than once using overlapping version ranges`) + + issue.Hard(TypesetReferenceUnresolved, `TypeSet '%{name}' reference to TypeSet '%{ref_name}' cannot be resolved`) + + issue.Hard(UnableToDeserializeType, `Unable to deserialize a data type from hash %{hash}`) + + issue.Hard2(UnableToDeserializeValue, `Unable to deserialize an instance of %{type} from %{arg_type}`, issue.HF{`arg_type`: issue.AnOrA}) + + issue.Hard(UnableToReadFile, `Unable to read file '%{path}': %{detail}`) + + issue.Hard(UnhandledPcoreVersion, `The pcore version for TypeSet '%{name}' is not understood by this runtime. Expected range %{expected_range}, got %{pcore_version}`) + + issue.Hard(UnknownFunction, `Unknown function: '%{name}'`) + + issue.Hard(UnknownVariable, `Unknown variable: '$%{name}'`) + + issue.Hard(UnreflectableType, `Unable to create a pcore.Type from value of type '%{type}'`) + + issue.Hard(UnreflectableValue, `Unable to create a reflect.Value from value of type '%{type}'`) + + issue.Hard(UnresolvedType, `Reference to unresolved type '%{typeString}'`) + + issue.Hard(UnresolvedTypeOf, `Unable to resolve attribute '%{navigation}' of type '%{type}'`) + + issue.Hard(UnsupportedStringFormat, `Illegal format '%c' specified for value of %{type} type - expected one of the characters '%{supported_formats}'`) + + issue.Hard(WrongDefinition, `The code loaded from %{source} produced %{type} with the wrong name, expected %{expected}, actual %{actual}`) +} diff --git a/vendor/github.com/lyraproj/pcore/px/loader.go b/vendor/github.com/lyraproj/pcore/px/loader.go new file mode 100644 index 0000000..954bcd3 --- /dev/null +++ b/vendor/github.com/lyraproj/pcore/px/loader.go @@ -0,0 +1,90 @@ +package px + +import ( + "regexp" + + "github.com/lyraproj/issue/issue" +) + +type ( + PathType string + + LoaderEntry interface { + Value() interface{} + + Origin() issue.Location + } + + Loader interface { + // LoadEntry returns the requested entry or nil if no such entry can be found + LoadEntry(c Context, name TypedName) LoaderEntry + + // NameAuthority returns the name authority + NameAuthority() URI + + // Discover iterates over all entries accessible to this loader and its parents + // and returns a slice of all entries for which the provided function returns + // true + Discover(c Context, predicate func(tn TypedName) bool) []TypedName + + // HasEntry returns true if this loader has an entry that maps to the give name + // The status of the entry is determined without actually loading it. + HasEntry(name TypedName) bool + } + + DefiningLoader interface { + Loader + + SetEntry(name TypedName, entry LoaderEntry) LoaderEntry + } + + ParentedLoader interface { + Loader + + // Parent returns the parent loader + Parent() Loader + } + + ModuleLoader interface { + Loader + + Path() string + + ModuleName() string + } + + DependencyLoader interface { + Loader + + LoaderFor(key string) ModuleLoader + } + + TypeSetLoader interface { + Loader + + TypeSet() Type + } +) + +const ( + PuppetDataTypePath = PathType(`puppetDataType`) + PuppetFunctionPath = PathType(`puppetFunction`) + PlanPath = PathType(`plan`) + TaskPath = PathType(`task`) +) + +var moduleNameRX = regexp.MustCompile(`^[a-z][a-z0-9_]*$`) + +func IsValidModuleName(moduleName string) bool { + return moduleNameRX.MatchString(moduleName) +} + +var Load func(c Context, name TypedName) (interface{}, bool) +var NewLoaderEntry func(value interface{}, origin issue.Location) LoaderEntry +var StaticLoader func() Loader +var NewParentedLoader func(parent Loader) DefiningLoader +var NewFileBasedLoader func(parent Loader, path, moduleName string, pathTypes ...PathType) ModuleLoader +var NewDependencyLoader func(depLoaders []ModuleLoader) Loader +var RegisterGoFunction func(function ResolvableFunction) +var RegisterResolvableType func(rt ResolvableType) +var NewTypeSetLoader func(parent Loader, typeSet Type) TypeSetLoader diff --git a/vendor/github.com/lyraproj/pcore/px/logging.go b/vendor/github.com/lyraproj/pcore/px/logging.go new file mode 100644 index 0000000..423d532 --- /dev/null +++ b/vendor/github.com/lyraproj/pcore/px/logging.go @@ -0,0 +1,166 @@ +package px + +import ( + "bytes" + "fmt" + "io" + "os" + + "github.com/lyraproj/pcore/utils" + + "github.com/lyraproj/issue/issue" +) + +type ( + LogLevel string + + Logger interface { + Log(level LogLevel, args ...Value) + + Logf(level LogLevel, format string, args ...interface{}) + + LogIssue(issue issue.Reported) + } + + stdlog struct { + } + + LogEntry interface { + Level() LogLevel + Message() string + } + + ArrayLogger struct { + entries []LogEntry + } + + ReportedEntry struct { + issue issue.Reported + } + + TextEntry struct { + level LogLevel + message string + } +) + +const ( + ALERT = LogLevel(`alert`) + CRIT = LogLevel(`crit`) + DEBUG = LogLevel(`debug`) + EMERG = LogLevel(`emerg`) + ERR = LogLevel(`err`) + INFO = LogLevel(`info`) + NOTICE = LogLevel(`notice`) + WARNING = LogLevel(`warning`) + IGNORE = LogLevel(``) +) + +var LogLevels = []LogLevel{ALERT, CRIT, DEBUG, EMERG, ERR, INFO, NOTICE, WARNING} + +func (l LogLevel) Severity() issue.Severity { + switch l { + case CRIT, EMERG, ERR: + return issue.SeverityError + case ALERT, WARNING: + return issue.SeverityWarning + default: + return issue.SeverityIgnore + } +} + +func NewStdLogger() Logger { + return &stdlog{} +} + +func (l *stdlog) Log(level LogLevel, args ...Value) { + w := l.writerFor(level) + utils.Fprintf(w, `%s: `, level) + for _, arg := range args { + ToString3(arg, w) + } + utils.Fprintln(w) +} + +func (l *stdlog) Logf(level LogLevel, format string, args ...interface{}) { + w := l.writerFor(level) + utils.Fprintf(w, `%s: `, level) + utils.Fprintf(w, format, args...) + utils.Fprintln(w) +} + +func (l *stdlog) writerFor(level LogLevel) io.Writer { + switch level { + case DEBUG, INFO, NOTICE: + return os.Stdout + default: + return os.Stderr + } +} + +func (l *stdlog) LogIssue(issue issue.Reported) { + utils.Fprintln(os.Stderr, issue.String()) +} + +func NewArrayLogger() *ArrayLogger { + return &ArrayLogger{make([]LogEntry, 0, 16)} +} + +func (l *ArrayLogger) Entries(level LogLevel) []LogEntry { + result := make([]LogEntry, 0, 8) + for _, entry := range l.entries { + if entry.Level() == level { + result = append(result, entry) + } + } + return result +} + +func (l *ArrayLogger) Log(level LogLevel, args ...Value) { + w := bytes.NewBufferString(``) + for _, arg := range args { + ToString3(arg, w) + } + l.entries = append(l.entries, &TextEntry{level, w.String()}) +} + +func (l *ArrayLogger) Logf(level LogLevel, format string, args ...interface{}) { + l.entries = append(l.entries, &TextEntry{level, fmt.Sprintf(format, args...)}) +} + +func (l *ArrayLogger) LogIssue(i issue.Reported) { + if i.Severity() != issue.SeverityIgnore { + l.entries = append(l.entries, &ReportedEntry{i}) + } +} + +func (te *TextEntry) Level() LogLevel { + return te.level +} + +func (te *TextEntry) Message() string { + return te.message +} + +func (re *ReportedEntry) Level() LogLevel { + return LogLevelFromSeverity(re.issue.Severity()) +} + +func (re *ReportedEntry) Message() string { + return re.issue.String() +} + +func (re *ReportedEntry) Issue() issue.Reported { + return re.issue +} + +func LogLevelFromSeverity(severity issue.Severity) LogLevel { + switch severity { + case issue.SeverityError: + return ERR + case issue.SeverityWarning, issue.SeverityDeprecation: + return WARNING + default: + return IGNORE + } +} diff --git a/vendor/github.com/lyraproj/pcore/px/reflector.go b/vendor/github.com/lyraproj/pcore/px/reflector.go new file mode 100644 index 0000000..c348949 --- /dev/null +++ b/vendor/github.com/lyraproj/pcore/px/reflector.go @@ -0,0 +1,125 @@ +package px + +import ( + "reflect" + + "github.com/lyraproj/semver/semver" +) + +// A ReflectedType is implemented by PTypes that can have a potential to +// present themselves as a reflect.Type +type ReflectedType interface { + Type + + // ReflectType returns the reflect.Type that corresponds to the receiver + // if possible + ReflectType(c Context) (reflect.Type, bool) +} + +// A Reflected is a value that can reflect itself into a given reflect.Value +type Reflected interface { + Reflect(c Context) reflect.Value + + ReflectTo(c Context, value reflect.Value) +} + +// An ImplementationRegistry contains mappings between ObjectType and reflect.Type +type ImplementationRegistry interface { + // RegisterType registers the mapping between the given Type and reflect.Type + RegisterType(t Type, r reflect.Type) + + // TypeToReflected returns the reflect.Type for the given Type + TypeToReflected(t Type) (reflect.Type, bool) + + // ReflectedToType returns the Type name for the given reflect.Type + ReflectedToType(t reflect.Type) (Type, bool) + + // ReflectedNameToType returns the Type for the given Go Type name + ReflectedNameToType(name string) (Type, bool) +} + +// A AnnotatedType represent a reflect.Type with fields that may have 'puppet' tag overrides. +type AnnotatedType interface { + // Type returns the reflect.Type + Type() reflect.Type + + // Annotations returns a map of annotations where each key is an Annotation type and the + // associated value is an instance of Init[]. + Annotations() OrderedMap + + // Tags returns a map, keyed by field names, containing values that are the + // 'puppet' tag parsed into an OrderedMap. The map is merged with possible + // overrides given when the AnnotatedType instance was created + Tags() map[string]OrderedMap + + // Tags returns a map, keyed by field names, containing a map of all tags besides + // the 'puppet' tag + OtherTags() map[string]map[string]string +} + +// NewTaggedType returns a new instance of a AnnotatedType +var NewTaggedType func(reflect.Type, map[string]string) AnnotatedType + +// NewAnnotatedType returns a new instance of a AnnotatedType that is annotated +var NewAnnotatedType func(reflect.Type, map[string]string, OrderedMap) AnnotatedType + +// A Reflector deals with conversions between Value and reflect.Value and +// between Type and reflect.Type +type Reflector interface { + // FieldName returns the puppet name for the given field. The puppet name is + // either picked from the 'puppet' tag of the field or the result of + // munging the field name through utils.FirstToLower + FieldName(f *reflect.StructField) string + + // FunctionDeclFromReflect creates a function declaration suitable for inclusion in an ObjectType initialization + // hash. + FunctionDeclFromReflect(name string, mt reflect.Type, fromInterface bool) OrderedMap + + // Reflect returns the reflected value of the native value held + // by the given src + Reflect(src Value) reflect.Value + + // Reflect2 returns the reflected value of given type from the native value held + // by the given src + Reflect2(src Value, rt reflect.Type) reflect.Value + + // ReflectFieldTags reflects the name, type, and value from a reflect.StructField + // using the field tags and the optionally given puppetTag + ReflectFieldTags(f *reflect.StructField, puppetTag OrderedMap, otherTags map[string]string) (name string, decl OrderedMap) + + // ReflectTo assigns the native value of src to dest + ReflectTo(src Value, dest reflect.Value) + + // ReflectType returns the reflected type of the given Type if possible. Only + // PTypes that represent a value can be represented as a reflected type. Types + // like Any, Default, Unit, or Variant have no reflected type representation + ReflectType(src Type) (reflect.Type, bool) + + // InitializerFromTagged creates an Object initializer hash based on the given reflected type. + InitializerFromTagged(typeName string, parent Type, rType AnnotatedType) OrderedMap + + // TypeFromReflect creates an ObjectType based on the given reflected type. + // The new type is automatically added to the ImplementationRegistry registered to + // the Context from where the Reflector was obtained. + TypeFromReflect(typeName string, parent Type, rType reflect.Type) ObjectType + + // TypeFromTagged creates an Object type based on the given reflected type. + // The new type is automatically added to the ImplementationRegistry registered to + // the Context from where the Reflector was obtained. + TypeFromTagged(typeName string, parent Type, rType AnnotatedType, rcFunc Doer) ObjectType + + // TypeSetFromReflect creates a TypeSet based on the given reflected types The new types are automatically + // added to the ImplementationRegistry registered to the Context from where the Reflector was obtained. + // The aliases map maps the names of the reflected types to the unqualified name of the created types. + // The aliases map may be nil and if present, may map only a subset of the reflected type names + TypeSetFromReflect(typeSetName string, version semver.Version, aliases map[string]string, rTypes ...reflect.Type) TypeSet + + // TagHash returns the parsed and evaluated hash from the 'puppet' tag + TagHash(f *reflect.StructField) (OrderedMap, bool) + + // Fields returns all fields of the given reflected type or an empty slice if no fields exists. + Fields(t reflect.Type) []reflect.StructField + + // Methods returns all methods of the given reflected type or an empty slice if no methods exists. + Methods(ptr reflect.Type) []reflect.Method +} diff --git a/vendor/github.com/lyraproj/pcore/px/runtime.go b/vendor/github.com/lyraproj/pcore/px/runtime.go new file mode 100644 index 0000000..24ad49f --- /dev/null +++ b/vendor/github.com/lyraproj/pcore/px/runtime.go @@ -0,0 +1,20 @@ +package px + +import ( + "github.com/lyraproj/semver/semver" +) + +type ( + URI string +) + +const ( + KeyPcoreUri = `pcore_uri` + KeyPcoreVersion = `pcore_version` + + RuntimeNameAuthority = URI(`http://puppet.com/2016.1/runtime`) + PcoreUri = URI(`http://puppet.com/2016.1/pcore`) +) + +var PcoreVersion, _ = semver.NewVersion3(1, 0, 0, ``, ``) +var ParsablePcoreVersions, _ = semver.ParseVersionRange(`1.x`) diff --git a/vendor/github.com/lyraproj/pcore/px/typedname.go b/vendor/github.com/lyraproj/pcore/px/typedname.go new file mode 100644 index 0000000..970dc8f --- /dev/null +++ b/vendor/github.com/lyraproj/pcore/px/typedname.go @@ -0,0 +1,84 @@ +package px + +import ( + "github.com/lyraproj/issue/issue" +) + +type Namespace string + +// Identifier TypedName namespaces. Used by a service to identify what the type of entity a loader +// will look for. + +// NsType denotes a type in the Puppet type system +const NsType = Namespace(`type`) + +// NsFunction denotes a callable function +const NsFunction = Namespace(`function`) + +// NsInterface denotes an entity that must have an "interface" property that appoints +// an object type which in turn contains a declaration of the methods that the interface +// implements. +const NsInterface = Namespace(`interface`) + +// NsDefinition denotes an entity that describes something that is provided by a remote service. Examples +// of such entities are callable API's and steps that can participate in a workflow. +const NsDefinition = Namespace(`definition`) + +// NsHandler denotes a handler for a state in a workflow +const NsHandler = Namespace(`handler`) + +// NsService denotes a remote service +const NsService = Namespace(`service`) + +// NsStep denotes a Puppet DSL specific step (not to be confused with a Definition of an step) +const NsStep = Namespace(`step`) + +// Here in case of future Bolt integration with the Evaluator +const NsPlan = Namespace(`plan`) +const NsTask = Namespace(`task`) + +// For internal use only + +// NsAllocator returns a function capable of allocating an instance of an object +// without initializing its content +const NsAllocator = Namespace(`allocator`) + +// NsConstructor denotes a function that both allocates an initializes an object based +// on parameter values +const NsConstructor = Namespace(`constructor`) + +type TypedName interface { + PuppetObject + issue.Named + + IsParent(n TypedName) bool + + IsQualified() bool + + MapKey() string + + Authority() URI + + Namespace() Namespace + + Parts() []string + + // PartsList returns the parts as a List + PartsList() List + + // Child returns the typed name with its leading segment stripped off, e.g. + // A::B::C returns B::C + Child() TypedName + + // Parent returns the typed name with its final segment stripped off, e.g. + // A::B::C returns A::B + Parent() TypedName + + RelativeTo(parent TypedName) (TypedName, bool) +} + +var NewTypedName func(namespace Namespace, name string) TypedName +var NewTypedName2 func(namespace Namespace, name string, nameAuthority URI) TypedName + +// TypedNameFromMapKey recreates a TypedName from a given MapKey that was produced by a TypedName +var TypedNameFromMapKey func(mapKey string) TypedName diff --git a/vendor/github.com/lyraproj/pcore/px/types.go b/vendor/github.com/lyraproj/pcore/px/types.go new file mode 100644 index 0000000..6680c42 --- /dev/null +++ b/vendor/github.com/lyraproj/pcore/px/types.go @@ -0,0 +1,338 @@ +package px + +import ( + "reflect" + + "github.com/lyraproj/issue/issue" + "github.com/lyraproj/semver/semver" +) + +type ( + Visitor func(t Type) + + Type interface { + Value + + IsInstance(o Value, g Guard) bool + + IsAssignable(t Type, g Guard) bool + + MetaType() ObjectType + + Name() string + + Accept(visitor Visitor, g Guard) + } + + SizedType interface { + Type + + Size() Type + } + + StringType interface { + SizedType + + Value() *string + } + + Creatable interface { + Constructor(c Context) Function + } + + Newable interface { + New(c Context, args []Value) Value + } + + ResolvableType interface { + Name() string + + Resolve(c Context) Type + } + + ParameterizedType interface { + Type + + Default() Type + + // Parameters returns the parameters that is needed in order to recreate + // an instance of the parameterized type. + Parameters() []Value + } + + SerializeAsString interface { + // CanSerializeAsString responds true if this instance and all its nested + // instances can serialize as string + CanSerializeAsString() bool + + // SerializationString returns the string that the type of the instance can use + // to recreate the instance + SerializationString() string + } + + Annotation interface { + Validate(Context, Annotatable) + } + + Annotatable interface { + Annotations(c Context) OrderedMap + } + + CallableMember interface { + Call(c Context, receiver Value, block Lambda, args []Value) Value + } + + CallableGoMember interface { + // CallGo calls a member on a struct pointer with the given arguments + CallGo(c Context, receiver interface{}, args ...interface{}) []interface{} + + // CallGoReflected is like Call but using reflected arguments and return value. The + // first argument is the receiver + CallGoReflected(c Context, args []reflect.Value) []reflect.Value + } + + TypeWithCallableMembers interface { + // Member returns an attribute reader or other function and true, or nil and false if no such member exists + Member(name string) (CallableMember, bool) + } + + AnnotatedMember interface { + Annotatable + Equality + CallableMember + + Name() string + + Label() string + + FeatureType() string + + Container() ObjectType + + Type() Type + + Override() bool + + Final() bool + + InitHash() OrderedMap + + Accept(v Visitor, g Guard) + + CallableType() Type + } + + AttributeKind string + + TagsAnnotation interface { + PuppetObject + + Tag(key string) string + + Tags() OrderedMap + } + + Attribute interface { + AnnotatedMember + Kind() AttributeKind + + // Get returns this attributes value in the given instance + Get(instance Value) Value + + // HasValue returns true if a value has been defined for this attribute. + HasValue() bool + + // Default returns true if the given value equals the default value for this attribute + Default(value Value) bool + + // Value returns the value of this attribute, or raises an error if no value has been defined. + Value() Value + + // GoName Returns the name of the struct field that this attribute maps to when applicable or + // an empty string. + GoName() string + + // Tags returns the TagAnnotation for this attribute or nil if the attribute has no tags. + Tags(Context) TagsAnnotation + } + + ObjFunc interface { + AnnotatedMember + + // GoName Returns the name of the struct field that this attribute maps to when applicable or + // an empty string. + GoName() string + + // ReturnsError returns true if the underlying method returns an error instance in case of + // failure. Such errors must be converted to panics by the caller + ReturnsError() bool + } + + AttributesInfo interface { + NameToPos() map[string]int + + Attributes() []Attribute + + EqualityAttributeIndex() []int + + RequiredCount() int + + PositionalFromHash(hash OrderedMap) []Value + } + + ObjectType interface { + Annotatable + ParameterizedType + TypeWithCallableMembers + Creatable + + HasHashConstructor() bool + + Functions(includeParent bool) []ObjFunc + + // Returns the Go reflect.Type that this type was reflected from, if any. + // + GoType() reflect.Type + + // IsInterface returns true for non parameterized types that contains only methods + IsInterface() bool + + IsMetaType() bool + + IsParameterized() bool + + // Implements returns true the receiver implements all methods of ObjectType + Implements(ObjectType, Guard) bool + + AttributesInfo() AttributesInfo + + // FromReflectedValue creates a new instance of the receiver type + // and initializes that instance from the given src + FromReflectedValue(c Context, src reflect.Value) PuppetObject + + // Parent returns the type that this type inherits from or nil if + // the type doesn't have a parent + Parent() Type + + // ToReflectedValue copies values from src to dest. The src argument + // must be an instance of the receiver. The dest argument must be + // a reflected struct. The src must be able to deliver a value to + // each of the exported fields in dest. + // + // Puppets name convention stipulates lower case names using + // underscores to separate words. The Go conversion is to use + // camel cased names. ReflectValueTo will convert camel cased names + // into names with underscores. + ToReflectedValue(c Context, src PuppetObject, dest reflect.Value) + } + + TypeSet interface { + ParameterizedType + + // GetType returns the given type from the receiver together with + // a flag indicating success or failure + GetType(typedName TypedName) (Type, bool) + + // GetType2 is like GetType but uses a string to identify the type + GetType2(name string) (Type, bool) + + // Authority returns the name authority of the receiver + NameAuthority() URI + + // TypedName returns the name of this type set as a TypedName + TypedName() TypedName + + // Types returns a hash of all types contained in this set. The keys + // in this hash are relative to the receiver name + Types() OrderedMap + + // Version returns the version of the receiver + Version() semver.Version + } + + TypeWithContainedType interface { + Type + + ContainedType() Type + } + + // Generalizable implemented by all parameterized types that have type parameters + Generalizable interface { + ParameterizedType + Generic() Type + } +) + +var CommonType func(a Type, b Type) Type + +var GenericType func(t Type) Type + +var IsInstance func(puppetType Type, value Value) bool + +// IsAssignable answers if t is assignable to this type +var IsAssignable func(puppetType Type, other Type) bool + +var Generalize func(t Type) Type + +var Normalize func(t Type) Type + +var DefaultFor func(t Type) Type + +func AssertType(pfx interface{}, expected, actual Type) Type { + if !IsAssignable(expected, actual) { + panic(TypeMismatchError(pfx, expected, actual)) + } + return actual +} + +func AssertInstance(pfx interface{}, expected Type, value Value) Value { + if !IsInstance(expected, value) { + panic(MismatchError(pfx, expected, value)) + } + return value +} + +func MismatchError(pfx interface{}, expected Type, value Value) issue.Reported { + return Error(TypeMismatch, issue.H{`detail`: DescribeMismatch(getPrefix(pfx), expected, DetailedValueType(value))}) +} + +func TypeMismatchError(pfx interface{}, expected Type, actual Type) issue.Reported { + return Error(TypeMismatch, issue.H{`detail`: DescribeMismatch(getPrefix(pfx), expected, actual)}) +} + +// New creates a new instance of type t +var New func(c Context, receiver Value, args ...Value) Value + +// New creates a new instance of type t and calls the block with the created instance. It +// returns the value returned from the block +func NewWithBlock(c Context, receiver Value, args []Value, block Lambda) Value { + r := New(c, receiver, args...) + if block != nil { + r = block.Call(c, nil, r) + } + return r +} + +var DescribeSignatures func(signatures []Signature, argsTuple Type, block Lambda) string + +var DescribeMismatch func(pfx string, expected Type, actual Type) string + +var NewGoType func(name string, zeroValue interface{}) ObjectType + +var NewGoObjectType func(name string, rType reflect.Type, typeDecl string, creators ...DispatchFunction) ObjectType + +var NewNamedType func(name, typeDecl string) Type + +var NewObjectType func(name, typeDecl string, creators ...DispatchFunction) ObjectType + +var WrapReflectedType func(c Context, rt reflect.Type) (Type, error) + +func getPrefix(pfx interface{}) string { + name := `` + if s, ok := pfx.(string); ok { + name = s + } else if f, ok := pfx.(func() string); ok { + name = f() + } + return name +} diff --git a/vendor/github.com/lyraproj/pcore/px/valueconsumer.go b/vendor/github.com/lyraproj/pcore/px/valueconsumer.go new file mode 100644 index 0000000..e752c7d --- /dev/null +++ b/vendor/github.com/lyraproj/pcore/px/valueconsumer.go @@ -0,0 +1,41 @@ +package px + +// A ValueConsumer is used by a Data streaming mechanism that maintains a reference index +// which is increased by one for each value that it streams. The reference index +// originates from zero. +type ValueConsumer interface { + // CanDoBinary returns true if the value can handle binary efficiently. This tells + // the Serializer to pass BinaryValue verbatim to Add + CanDoBinary() bool + + // CanComplexKeys() returns true if complex values can be used as keys. If this + // method returns false, all keys must be strings + CanDoComplexKeys() bool + + // StringDedupThreshold returns the preferred threshold for dedup of strings. Strings + // shorter than this threshold will not be subjected to de-duplication. + StringDedupThreshold() int + + // AddArray starts a new array, calls the doer function, and then ends the Array. + // + // The callers reference index is increased by one. + AddArray(len int, doer Doer) + + // AddHash starts a new hash., calls the doer function, and then ends the Hash. + // + // The callers reference index is increased by one. + AddHash(len int, doer Doer) + + // Add adds the next value. + // + // Calls following a StartArray will add elements to the Array + // + // Calls following a StartHash will first add a key, then a value. This + // repeats until End or StartArray is called. + // + // The callers reference index is increased by one. + Add(element Value) + + // Add a reference to a previously added afterElement, hash, or array. + AddRef(ref int) +} diff --git a/vendor/github.com/lyraproj/pcore/px/values.go b/vendor/github.com/lyraproj/pcore/px/values.go new file mode 100644 index 0000000..9f90015 --- /dev/null +++ b/vendor/github.com/lyraproj/pcore/px/values.go @@ -0,0 +1,258 @@ +package px + +import ( + "bytes" + "fmt" + "io" + "regexp" +) + +type ( + RDetect map[interface{}]bool + + Value interface { + fmt.Stringer + Equality + ToString(bld io.Writer, format FormatContext, g RDetect) + PType() Type + } + + // Comparator returns true when a is less than b. + Comparator func(a, b Value) bool + + Object interface { + Value + Initialize(c Context, arguments []Value) + InitFromHash(c Context, hash OrderedMap) + } + + ReadableObject interface { + Get(key string) (value Value, ok bool) + } + + // CallableObject is implemented by PuppetObjects that have functions + CallableObject interface { + Call(c Context, method ObjFunc, args []Value, block Lambda) (result Value, ok bool) + } + + PuppetObject interface { + Value + ReadableObject + + InitHash() OrderedMap + } + + DetailedTypeValue interface { + Value + DetailedType() Type + } + + Sized interface { + Value + Len() int + IsEmpty() bool + } + + InterfaceValue interface { + Value + Interface() interface{} + } + + Arrayable interface { + AsArray() List + } + + Indexed interface { + Sized + At(index int) Value + ElementType() Type + IsHashStyle() bool + } + + IteratorValue interface { + Value + Arrayable + ElementType() Type + Next() (Value, bool) + } + + // List represents an Array. The iterative methods will not catch break exceptions. If + // // that is desired, then use an Iterator instead. + List interface { + Indexed + Add(Value) List + AddAll(List) List + All(predicate Predicate) bool + Any(predicate Predicate) bool + AppendTo(slice []Value) []Value + Delete(Value) List + DeleteAll(List) List + Each(Consumer) + EachSlice(int, SliceConsumer) + EachWithIndex(consumer IndexedConsumer) + Find(predicate Predicate) (Value, bool) + Flatten() List + Map(mapper Mapper) List + Select(predicate Predicate) List + Slice(i int, j int) List + Reduce(redactor BiMapper) Value + Reduce2(initialValue Value, redactor BiMapper) Value + Reject(predicate Predicate) List + Unique() List + } + + SortableList interface { + List + Sort(comparator Comparator) List + } + + HashKey string + + HashKeyValue interface { + ToKey() HashKey + } + + StreamHashKeyValue interface { + ToKey(b *bytes.Buffer) + } + + MapEntry interface { + Value + Key() Value + Value() Value + } + + Keyed interface { + Get(key Value) (Value, bool) + } + + // OrderedMap represents a Hash. The iterative methods will not catch break exceptions. If + // that is desired, then use an Iterator instead. + OrderedMap interface { + List + Keyed + AllPairs(BiPredicate) bool + AnyPair(BiPredicate) bool + AllKeysAreStrings() bool + Entries() List + EachKey(Consumer) + EachPair(BiConsumer) + EachValue(Consumer) + + Get2(key Value, dflt Value) Value + Get3(key Value, dflt Producer) Value + Get4(key string) (Value, bool) + Get5(key string, dflt Value) Value + Get6(key string, dflt Producer) Value + + // GetEntry returns the entry that represents the mapping between + // the given key and its value + GetEntry(key string) (entry MapEntry, found bool) + + // GetEntryFold retuns the first entry that has a key which, in string form + // equals the given key using case insensitive comparison + GetEntryFold(key string) (entry MapEntry, found bool) + + IncludesKey(o Value) bool + + IncludesKey2(o string) bool + + Keys() List + + // MapEntries returns a new OrderedMap with both keys and values + // converted using the given mapper function + MapEntries(mapper EntryMapper) OrderedMap + + // MapValues returns a new OrderedMap with the exact same keys as + // before but where each value has been converted using the given + // mapper function + MapValues(mapper Mapper) OrderedMap + + Merge(OrderedMap) OrderedMap + + Values() List + SelectPairs(BiPredicate) OrderedMap + RejectPairs(BiPredicate) OrderedMap + } + + Number interface { + Value + Int() int64 + Float() float64 + } + + Boolean interface { + Value + Bool() bool + } + + Integer interface { + Number + Abs() int64 + } + + Float interface { + Number + Abs() float64 + } + + StringValue interface { + List + Split(pattern *regexp.Regexp) List + ToLower() StringValue + ToUpper() StringValue + EqualsIgnoreCase(Value) bool + } +) + +var EmptyArray List +var EmptyMap OrderedMap +var EmptyString Value +var EmptyValues []Value +var Undef Value + +var SingletonMap func(string, Value) OrderedMap +var DetailedValueType func(value Value) Type +var GenericValueType func(value Value) Type +var ToKey func(value Value) HashKey +var IsTruthy func(tv Value) bool + +var ToInt func(v Value) (int64, bool) +var ToFloat func(v Value) (float64, bool) + +// StringElements returns a slice containing each element in the given list as a string +func StringElements(l List) []string { + ss := make([]string, l.Len()) + l.EachWithIndex(func(e Value, i int) { + ss[i] = e.String() + }) + return ss +} + +func ToString(t Value) string { + return ToString2(t, DefaultFormatContext) +} + +func ToPrettyString(t Value) string { + return ToString2(t, Pretty) +} + +func ToString2(t Value, format FormatContext) string { + bld := bytes.NewBufferString(``) + t.ToString(bld, format, nil) + return bld.String() +} + +func ToString3(t Value, writer io.Writer) { + ToString4(t, DefaultFormatContext, writer) +} + +func ToString4(t Value, format FormatContext, writer io.Writer) { + t.ToString(writer, format, nil) +} + +func CopyValues(src []Value) []Value { + dst := make([]Value, len(src)) + copy(dst, src) + return dst +} diff --git a/vendor/github.com/lyraproj/pcore/px/wrap.go b/vendor/github.com/lyraproj/pcore/px/wrap.go new file mode 100644 index 0000000..f32a2c7 --- /dev/null +++ b/vendor/github.com/lyraproj/pcore/px/wrap.go @@ -0,0 +1,31 @@ +package px + +import ( + "github.com/lyraproj/issue/issue" +) + +// LogWarning logs a warning to the logger of the current context +func LogWarning(issueCode issue.Code, args issue.H) { + CurrentContext().Logger().LogIssue(Warning(issueCode, args)) +} + +// Error creates a Reported with the given issue code, location from stack top, and arguments +// Typical use is to panic with the returned value +func Error(issueCode issue.Code, args issue.H) issue.Reported { + return issue.NewReported(issueCode, issue.SeverityError, args, 1) +} + +// Error2 creates a Reported with the given issue code, location from stack top, and arguments +// Typical use is to panic with the returned value +func Error2(location issue.Location, issueCode issue.Code, args issue.H) issue.Reported { + return issue.NewReported(issueCode, issue.SeverityError, args, location) +} + +// Warning creates a Reported with the given issue code, location from stack top, and arguments +// and logs it on the currently active logger +func Warning(issueCode issue.Code, args issue.H) issue.Reported { + c := CurrentContext() + ri := issue.NewReported(issueCode, issue.SeverityWarning, args, 1) + c.Logger().LogIssue(ri) + return ri +} diff --git a/vendor/github.com/lyraproj/pcore/pximpl/context.go b/vendor/github.com/lyraproj/pcore/pximpl/context.go new file mode 100644 index 0000000..0c4bad5 --- /dev/null +++ b/vendor/github.com/lyraproj/pcore/pximpl/context.go @@ -0,0 +1,304 @@ +package pximpl + +import ( + "context" + "fmt" + "sync" + + "github.com/lyraproj/issue/issue" + "github.com/lyraproj/pcore/px" + "github.com/lyraproj/pcore/types" +) + +type ( + pxContext struct { + context.Context + loader px.Loader + logger px.Logger + stack []issue.Location + implRegistry px.ImplementationRegistry + vars map[string]interface{} + } + + systemLocation struct{} +) + +func (systemLocation) File() string { + return `` +} + +func (systemLocation) Line() int { + return 0 +} + +func (systemLocation) Pos() int { + return 0 +} + +var resolvableFunctions = make([]px.ResolvableFunction, 0, 16) +var resolvableFunctionsLock sync.Mutex + +func init() { + px.RegisterGoFunction = func(function px.ResolvableFunction) { + resolvableFunctionsLock.Lock() + resolvableFunctions = append(resolvableFunctions, function) + resolvableFunctionsLock.Unlock() + } + + px.ResolveResolvables = resolveResolvables + px.ResolveTypes = resolveTypes +} + +func NewContext(loader px.Loader, logger px.Logger) px.Context { + return WithParent(context.Background(), loader, logger, newImplementationRegistry()) +} + +func WithParent(parent context.Context, loader px.Loader, logger px.Logger, ir px.ImplementationRegistry) px.Context { + var c *pxContext + ir = newParentedImplementationRegistry(ir) + if cp, ok := parent.(pxContext); ok { + c = cp.clone() + c.Context = parent + c.loader = loader + c.logger = logger + } else { + c = &pxContext{Context: parent, loader: loader, logger: logger, stack: make([]issue.Location, 0, 8), implRegistry: ir} + } + return c +} + +func (c *pxContext) DefiningLoader() px.DefiningLoader { + l := c.loader + for { + if dl, ok := l.(px.DefiningLoader); ok { + return dl + } + if pl, ok := l.(px.ParentedLoader); ok { + l = pl.Parent() + continue + } + panic(`No defining loader found in context`) + } +} + +func (c *pxContext) Delete(key string) { + if c.vars != nil { + delete(c.vars, key) + } +} + +func (c *pxContext) DoWithLoader(loader px.Loader, doer px.Doer) { + saveLoader := c.loader + defer func() { + c.loader = saveLoader + }() + c.loader = loader + doer() +} + +func (c *pxContext) Error(location issue.Location, issueCode issue.Code, args issue.H) issue.Reported { + if location == nil { + location = c.StackTop() + } + return issue.NewReported(issueCode, issue.SeverityError, args, location) +} + +func (c *pxContext) Fork() px.Context { + s := make([]issue.Location, len(c.stack)) + copy(s, c.stack) + clone := c.clone() + clone.loader = px.NewParentedLoader(clone.loader) + clone.implRegistry = newParentedImplementationRegistry(clone.implRegistry) + clone.stack = s + + if c.vars != nil { + cv := make(map[string]interface{}, len(c.vars)) + for k, v := range c.vars { + cv[k] = v + } + clone.vars = cv + } + return clone +} + +func (c *pxContext) Fail(message string) issue.Reported { + return c.Error(nil, px.Failure, issue.H{`message`: message}) +} + +func (c *pxContext) Get(key string) (interface{}, bool) { + if c.vars != nil { + if v, ok := c.vars[key]; ok { + return v, true + } + } + return nil, false +} + +func (c *pxContext) ImplementationRegistry() px.ImplementationRegistry { + return c.implRegistry +} + +func (c *pxContext) Loader() px.Loader { + return c.loader +} + +func (c *pxContext) Logger() px.Logger { + return c.logger +} + +func (c *pxContext) ParseTypeValue(typeString px.Value) px.Type { + if sv, ok := typeString.(px.StringValue); ok { + return c.ParseType(sv.String()) + } + panic(px.Error(px.IllegalArgumentType, issue.H{`function`: `ParseTypeValue`, `index`: 0, `expected`: `String`, `actual`: px.DetailedValueType(typeString).String()})) +} + +func (c *pxContext) ParseType(str string) px.Type { + t, err := types.Parse(str) + if err != nil { + panic(err) + } + if pt, ok := t.(px.ResolvableType); ok { + return pt.Resolve(c) + } + panic(fmt.Errorf(`expression "%s" does no resolve to a Type`, str)) +} + +func (c *pxContext) Reflector() px.Reflector { + return types.NewReflector(c) +} + +func resolveResolvables(c px.Context) { + l := c.Loader().(px.DefiningLoader) + ts := types.PopDeclaredTypes() + for _, rt := range ts { + l.SetEntry(px.NewTypedName(px.NsType, rt.Name()), px.NewLoaderEntry(rt, nil)) + } + + for _, mp := range types.PopDeclaredMappings() { + c.ImplementationRegistry().RegisterType(mp.T, mp.R) + } + + resolveTypes(c, ts...) + + ctors := types.PopDeclaredConstructors() + for _, ct := range ctors { + rf := px.BuildFunction(ct.Name, ct.LocalTypes, ct.Creators) + l.SetEntry(px.NewTypedName(px.NsConstructor, rf.Name()), px.NewLoaderEntry(rf.Resolve(c), nil)) + } + + fs := popDeclaredGoFunctions() + for _, rf := range fs { + l.SetEntry(px.NewTypedName(px.NsFunction, rf.Name()), px.NewLoaderEntry(rf.Resolve(c), nil)) + } +} + +func (c *pxContext) Scope() px.Keyed { + return px.EmptyMap +} + +func (c *pxContext) Set(key string, value interface{}) { + if c.vars == nil { + c.vars = map[string]interface{}{key: value} + } else { + c.vars[key] = value + } +} + +func (c *pxContext) SetLoader(loader px.Loader) { + c.loader = loader +} + +func (c *pxContext) Stack() []issue.Location { + return c.stack +} + +func (c *pxContext) StackPop() { + c.stack = c.stack[:len(c.stack)-1] +} + +func (c *pxContext) StackPush(location issue.Location) { + c.stack = append(c.stack, location) +} + +func (c *pxContext) StackTop() issue.Location { + s := len(c.stack) + if s == 0 { + return &systemLocation{} + } + return c.stack[s-1] +} + +// clone a new context from this context which is an exact copy except for the parent +// of the clone which is set to the original. It is used internally by Fork +func (c *pxContext) clone() *pxContext { + clone := &pxContext{} + *clone = *c + clone.Context = c + return clone +} + +func resolveTypes(c px.Context, types ...px.ResolvableType) { + l := c.DefiningLoader() + typeSets := make([]px.TypeSet, 0) + allAnnotated := make([]px.Annotatable, 0, len(types)) + for _, rt := range types { + switch t := rt.Resolve(c).(type) { + case px.TypeSet: + typeSets = append(typeSets, t) + case px.ObjectType: + if ctor := t.Constructor(c); ctor != nil { + l.SetEntry(px.NewTypedName(px.NsConstructor, t.Name()), px.NewLoaderEntry(ctor, nil)) + } + allAnnotated = append(allAnnotated, t) + case px.Annotatable: + allAnnotated = append(allAnnotated, t) + } + } + + for _, ts := range typeSets { + allAnnotated = resolveTypeSet(c, l, ts, allAnnotated) + } + + // Validate type annotations + for _, a := range allAnnotated { + a.Annotations(c).EachValue(func(v px.Value) { + v.(px.Annotation).Validate(c, a) + }) + } +} + +func resolveTypeSet(c px.Context, l px.DefiningLoader, ts px.TypeSet, allAnnotated []px.Annotatable) []px.Annotatable { + ts.Types().EachValue(func(tv px.Value) { + t := tv.(px.Type) + if tsc, ok := t.(px.TypeSet); ok { + allAnnotated = resolveTypeSet(c, l, tsc, allAnnotated) + } + // Types already known to the loader might have been added to a TypeSet. When that + // happens, we don't want them added again. + tn := px.NewTypedName(px.NsType, t.Name()) + le := l.LoadEntry(c, tn) + if le == nil || le.Value() == nil { + if a, ok := t.(px.Annotatable); ok { + allAnnotated = append(allAnnotated, a) + } + l.SetEntry(tn, px.NewLoaderEntry(t, nil)) + if ot, ok := t.(px.ObjectType); ok { + if ctor := ot.Constructor(c); ctor != nil { + l.SetEntry(px.NewTypedName(px.NsConstructor, t.Name()), px.NewLoaderEntry(ctor, nil)) + } + } + } + }) + return allAnnotated +} + +func popDeclaredGoFunctions() (fs []px.ResolvableFunction) { + resolvableFunctionsLock.Lock() + fs = resolvableFunctions + if len(fs) > 0 { + resolvableFunctions = make([]px.ResolvableFunction, 0, 16) + } + resolvableFunctionsLock.Unlock() + return +} diff --git a/vendor/github.com/lyraproj/pcore/pximpl/equality.go b/vendor/github.com/lyraproj/pcore/pximpl/equality.go new file mode 100644 index 0000000..ca4c3fb --- /dev/null +++ b/vendor/github.com/lyraproj/pcore/pximpl/equality.go @@ -0,0 +1,123 @@ +package pximpl + +import ( + "fmt" + "regexp" + + "github.com/lyraproj/issue/issue" + "github.com/lyraproj/pcore/px" + "github.com/lyraproj/pcore/types" + "github.com/lyraproj/semver/semver" +) + +// PuppetEquals is like Equals but: +// int and float values with same value are considered equal +// string comparisons are case insensitive +// +func init() { + px.PuppetMatch = match + + px.PuppetEquals = equals +} + +func equals(a px.Value, b px.Value) bool { + switch a := a.(type) { + case px.StringValue: + return a.EqualsIgnoreCase(b) + case px.Integer: + lhs := a.Int() + switch b := b.(type) { + case px.Integer: + return lhs == b.Int() + case px.Number: + return float64(lhs) == b.Float() + } + return false + case px.Float: + lhs := a.Float() + if rhv, ok := b.(px.Number); ok { + return lhs == rhv.Float() + } + return false + case *types.Array: + if rhs, ok := b.(*types.Array); ok { + if a.Len() == rhs.Len() { + idx := 0 + return a.All(func(el px.Value) bool { + eq := px.PuppetEquals(el, rhs.At(idx)) + idx++ + return eq + }) + } + } + return false + case *types.Hash: + if rhs, ok := b.(*types.Hash); ok { + if a.Len() == rhs.Len() { + return a.AllPairs(func(key, value px.Value) bool { + rhsValue, ok := rhs.Get(key) + return ok && px.PuppetEquals(value, rhsValue) + }) + } + } + return false + case px.Equality: + return a.Equals(b, nil) + default: + return px.Equals(a, b, nil) + } +} + +// PuppetMatch implements the Puppet =~ semantics +func match(a px.Value, b px.Value) bool { + result := false + switch b := b.(type) { + case px.Type: + result = px.IsInstance(b, a) + + case px.StringValue, *types.Regexp: + var rx *regexp.Regexp + if s, ok := b.(px.StringValue); ok { + var err error + rx, err = regexp.Compile(s.String()) + if err != nil { + panic(px.Error(px.MatchNotRegexp, issue.H{`detail`: err.Error()})) + } + } else { + rx = b.(*types.Regexp).Regexp() + } + + sv, ok := a.(px.StringValue) + if !ok { + panic(px.Error(px.MatchNotString, issue.H{`left`: a.PType()})) + } + if group := rx.FindStringSubmatch(sv.String()); group != nil { + result = true + } + + case *types.SemVer, *types.SemVerRange: + var version semver.Version + + if v, ok := a.(*types.SemVer); ok { + version = v.Version() + } else if s, ok := a.(px.StringValue); ok { + var err error + version, err = semver.ParseVersion(s.String()) + if err != nil { + panic(px.Error(px.NotSemver, issue.H{`detail`: err.Error()})) + } + } else { + panic(px.Error(px.NotSemver, + issue.H{`detail`: fmt.Sprintf(`A value of type %s cannot be converted to a SemVer`, a.PType().String())})) + } + if lv, ok := b.(*types.SemVer); ok { + result = lv.Version().Equals(version) + } else { + result = b.(*types.SemVerRange).VersionRange().Includes(version) + } + + default: + result = px.PuppetEquals(b, a) + } + return result +} diff --git a/vendor/github.com/lyraproj/pcore/pximpl/function.go b/vendor/github.com/lyraproj/pcore/pximpl/function.go new file mode 100644 index 0000000..d7de41a --- /dev/null +++ b/vendor/github.com/lyraproj/pcore/pximpl/function.go @@ -0,0 +1,402 @@ +package pximpl + +import ( + "fmt" + "io" + "math" + + "github.com/lyraproj/issue/issue" + + "github.com/lyraproj/pcore/utils" + + "github.com/lyraproj/pcore/px" + "github.com/lyraproj/pcore/types" +) + +type ( + typeDecl struct { + name string + decl string + tp px.Type + } + + functionBuilder struct { + name string + localTypeBuilder *localTypeBuilder + dispatchers []*dispatchBuilder + } + + localTypeBuilder struct { + localTypes []*typeDecl + } + + dispatchBuilder struct { + fb *functionBuilder + min int64 + max int64 + types []px.Type + blockType px.Type + optionalBlock bool + returnType px.Type + function px.DispatchFunction + function2 px.DispatchFunctionWithBlock + } + + goFunction struct { + name string + dispatchers []px.Lambda + } + + lambda struct { + signature *types.CallableType + } + + goLambda struct { + lambda + function px.DispatchFunction + } + + goLambdaWithBlock struct { + lambda + function px.DispatchFunctionWithBlock + } +) + +func parametersFromSignature(s px.Signature) []px.Parameter { + paramNames := s.ParameterNames() + count := len(paramNames) + tuple := s.ParametersType().(*types.TupleType) + tz := tuple.Size() + capture := -1 + if tz.Max() > int64(count) { + capture = count - 1 + } + paramTypes := s.ParametersType().(*types.TupleType).Types() + ps := make([]px.Parameter, len(paramNames)) + for i, paramName := range paramNames { + ps[i] = newParameter(paramName, paramTypes[i], nil, i == capture) + } + return ps +} + +func (l *lambda) Equals(other interface{}, guard px.Guard) bool { + if ol, ok := other.(*lambda); ok { + return l.signature.Equals(ol.signature, guard) + } + return false +} + +func (l *lambda) String() string { + return `lambda` +} + +func (l *lambda) ToString(bld io.Writer, format px.FormatContext, g px.RDetect) { + utils.WriteString(bld, `lambda`) +} + +func (l *lambda) PType() px.Type { + return l.signature +} + +func (l *lambda) Signature() px.Signature { + return l.signature +} + +func (l *goLambda) Call(c px.Context, block px.Lambda, args ...px.Value) px.Value { + return l.function(c, args) +} + +func (l *goLambda) Parameters() []px.Parameter { + return parametersFromSignature(l.signature) +} + +func (l *goLambdaWithBlock) Call(c px.Context, block px.Lambda, args ...px.Value) px.Value { + return l.function(c, args, block) +} + +func (l *goLambdaWithBlock) Parameters() []px.Parameter { + return parametersFromSignature(l.signature) +} + +var emptyTypeBuilder = &localTypeBuilder{[]*typeDecl{}} + +func buildFunction(name string, localTypes px.LocalTypesCreator, creators []px.DispatchCreator) px.ResolvableFunction { + lt := emptyTypeBuilder + if localTypes != nil { + lt = &localTypeBuilder{make([]*typeDecl, 0, 8)} + localTypes(lt) + } + + fb := &functionBuilder{name: name, localTypeBuilder: lt, dispatchers: make([]*dispatchBuilder, len(creators))} + dbs := fb.dispatchers + fb.dispatchers = dbs + for idx, creator := range creators { + dbs[idx] = fb.newDispatchBuilder() + creator(dbs[idx]) + } + return fb +} + +func (fb *functionBuilder) newDispatchBuilder() *dispatchBuilder { + return &dispatchBuilder{fb: fb, types: make([]px.Type, 0, 8), min: 0, max: 0, optionalBlock: false, blockType: nil, returnType: nil} +} + +func (fb *functionBuilder) Name() string { + return fb.name +} + +func (fb *functionBuilder) Resolve(c px.Context) px.Function { + ds := make([]px.Lambda, len(fb.dispatchers)) + + if tl := len(fb.localTypeBuilder.localTypes); tl > 0 { + localLoader := px.NewParentedLoader(c.Loader()) + c.DoWithLoader(localLoader, func() { + te := make([]px.Type, 0, tl) + for _, td := range fb.localTypeBuilder.localTypes { + if td.tp == nil { + v, err := types.Parse(td.decl) + if err != nil { + panic(err) + } + if dt, ok := v.(*types.DeferredType); ok { + te = append(te, types.NamedType(px.RuntimeNameAuthority, td.name, dt)) + } + } else { + localLoader.SetEntry(px.NewTypedName(px.NsType, td.name), px.NewLoaderEntry(td.tp, nil)) + } + } + + if len(te) > 0 { + px.AddTypes(c, te...) + } + for i, d := range fb.dispatchers { + ds[i] = d.createDispatch(c) + } + }) + } else { + for i, d := range fb.dispatchers { + ds[i] = d.createDispatch(c) + } + } + return &goFunction{fb.name, ds} +} + +func (tb *localTypeBuilder) Type(name string, decl string) { + tb.localTypes = append(tb.localTypes, &typeDecl{name, decl, nil}) +} + +func (tb *localTypeBuilder) Type2(name string, tp px.Type) { + tb.localTypes = append(tb.localTypes, &typeDecl{name, ``, tp}) +} + +func (db *dispatchBuilder) createDispatch(c px.Context) px.Lambda { + for idx, tp := range db.types { + if trt, ok := tp.(*types.TypeReferenceType); ok { + db.types[idx] = c.ParseType(trt.TypeString()) + } + } + if r, ok := db.blockType.(*types.TypeReferenceType); ok { + db.blockType = c.ParseType(r.TypeString()) + } + if db.optionalBlock { + db.blockType = types.NewOptionalType(db.blockType) + } + if r, ok := db.returnType.(*types.TypeReferenceType); ok { + db.returnType = c.ParseType(r.TypeString()) + } + if db.function2 == nil { + return &goLambda{lambda{types.NewCallableType(types.NewTupleType(db.types, types.NewIntegerType(db.min, db.max)), db.returnType, nil)}, db.function} + } + return &goLambdaWithBlock{lambda{types.NewCallableType(types.NewTupleType(db.types, types.NewIntegerType(db.min, db.max)), db.returnType, db.blockType)}, db.function2} +} + +func (db *dispatchBuilder) Name() string { + return db.fb.name +} + +func (db *dispatchBuilder) Param(tp string) { + db.Param2(types.NewTypeReferenceType(tp)) +} + +func (db *dispatchBuilder) Param2(tp px.Type) { + db.assertNotAfterRepeated() + if db.min < db.max { + panic(`Required parameters must not come after optional parameters in a dispatch`) + } + db.types = append(db.types, tp) + db.min++ + db.max++ +} + +func (db *dispatchBuilder) OptionalParam(tp string) { + db.OptionalParam2(types.NewTypeReferenceType(tp)) +} + +func (db *dispatchBuilder) OptionalParam2(tp px.Type) { + db.assertNotAfterRepeated() + db.types = append(db.types, tp) + db.max++ +} + +func (db *dispatchBuilder) RepeatedParam(tp string) { + db.RepeatedParam2(types.NewTypeReferenceType(tp)) +} + +func (db *dispatchBuilder) RepeatedParam2(tp px.Type) { + db.assertNotAfterRepeated() + db.types = append(db.types, tp) + db.max = math.MaxInt64 +} + +func (db *dispatchBuilder) RequiredRepeatedParam(tp string) { + db.RequiredRepeatedParam2(types.NewTypeReferenceType(tp)) +} + +func (db *dispatchBuilder) RequiredRepeatedParam2(tp px.Type) { + db.assertNotAfterRepeated() + db.types = append(db.types, tp) + db.min++ + db.max = math.MaxInt64 +} + +func (db *dispatchBuilder) Block(tp string) { + db.Block2(types.NewTypeReferenceType(tp)) +} + +func (db *dispatchBuilder) Block2(tp px.Type) { + if db.returnType != nil { + panic(`Block specified more than once`) + } + db.blockType = tp +} + +func (db *dispatchBuilder) OptionalBlock(tp string) { + db.OptionalBlock2(types.NewTypeReferenceType(tp)) +} + +func (db *dispatchBuilder) OptionalBlock2(tp px.Type) { + db.Block2(tp) + db.optionalBlock = true +} + +func (db *dispatchBuilder) Returns(tp string) { + db.Returns2(types.NewTypeReferenceType(tp)) +} + +func (db *dispatchBuilder) Returns2(tp px.Type) { + if db.returnType != nil { + panic(`Returns specified more than once`) + } + db.returnType = tp +} + +func (db *dispatchBuilder) Function(df px.DispatchFunction) { + if _, ok := db.blockType.(*types.CallableType); ok { + panic(`Dispatch requires a block. Use FunctionWithBlock`) + } + db.function = df +} + +func (db *dispatchBuilder) Function2(df px.DispatchFunctionWithBlock) { + if db.blockType == nil { + panic(`Dispatch does not expect a block. Use Function instead of FunctionWithBlock`) + } + db.function2 = df +} + +func (db *dispatchBuilder) assertNotAfterRepeated() { + if db.max == math.MaxInt64 { + panic(`Repeated parameters can only occur last in a dispatch`) + } +} + +func (f *goFunction) Call(c px.Context, block px.Lambda, args ...px.Value) px.Value { + for _, d := range f.dispatchers { + if d.Signature().CallableWith(args, block) { + return d.Call(c, block, args...) + } + } + panic(px.Error(px.IllegalArguments, issue.H{`function`: f.name, `message`: px.DescribeSignatures(signatures(f.dispatchers), types.WrapValues(args).DetailedType(), block)})) +} + +func signatures(lambdas []px.Lambda) []px.Signature { + s := make([]px.Signature, len(lambdas)) + for i, l := range lambdas { + s[i] = l.Signature() + } + return s +} + +func (f *goFunction) Dispatchers() []px.Lambda { + return f.dispatchers +} + +func (f *goFunction) Name() string { + return f.name +} + +func (f *goFunction) Equals(other interface{}, g px.Guard) bool { + dc := len(f.dispatchers) + if of, ok := other.(*goFunction); ok && f.name == of.name && dc == len(of.dispatchers) { + for i := 0; i < dc; i++ { + if !f.dispatchers[i].Equals(of.dispatchers[i], g) { + return false + } + } + return true + } + return false +} + +func (f *goFunction) String() string { + return fmt.Sprintf(`function %s`, f.name) +} + +func (f *goFunction) ToString(bld io.Writer, format px.FormatContext, g px.RDetect) { + utils.WriteString(bld, `function `) + utils.WriteString(bld, f.name) +} + +func (f *goFunction) PType() px.Type { + top := len(f.dispatchers) + variants := make([]px.Type, top) + for idx := 0; idx < top; idx++ { + variants[idx] = f.dispatchers[idx].PType() + } + return types.NewVariantType(variants...) +} + +func init() { + px.BuildFunction = buildFunction + + px.NewGoFunction = func(name string, creators ...px.DispatchCreator) { + px.RegisterGoFunction(buildFunction(name, nil, creators)) + } + + px.NewGoFunction2 = func(name string, localTypes px.LocalTypesCreator, creators ...px.DispatchCreator) { + px.RegisterGoFunction(buildFunction(name, localTypes, creators)) + } + + px.MakeGoAllocator = func(allocFunc px.DispatchFunction) px.Lambda { + return &goLambda{lambda{types.NewCallableType(types.EmptyTupleType(), nil, nil)}, allocFunc} + } + + px.MakeGoConstructor = func(typeName string, creators ...px.DispatchCreator) px.ResolvableFunction { + return buildFunction(typeName, nil, creators) + } + + px.MakeGoConstructor2 = func(typeName string, localTypes px.LocalTypesCreator, creators ...px.DispatchCreator) px.ResolvableFunction { + return buildFunction(typeName, localTypes, creators) + } + + px.NewGoFunction(`new`, + func(d px.Dispatch) { + d.Param(`Variant[Type,String]`) + d.RepeatedParam(`Any`) + d.OptionalBlock(`Callable[1,1]`) + d.Function2(func(c px.Context, args []px.Value, block px.Lambda) px.Value { + return px.NewWithBlock(c, args[0], args[1:], block) + }) + }, + ) +} diff --git a/vendor/github.com/lyraproj/pcore/pximpl/implementationregistry.go b/vendor/github.com/lyraproj/pcore/pximpl/implementationregistry.go new file mode 100644 index 0000000..7c964c1 --- /dev/null +++ b/vendor/github.com/lyraproj/pcore/pximpl/implementationregistry.go @@ -0,0 +1,92 @@ +package pximpl + +import ( + "reflect" + + "github.com/lyraproj/issue/issue" + "github.com/lyraproj/pcore/px" + "github.com/lyraproj/pcore/types" +) + +type implRegistry struct { + reflectToObjectType map[string]px.Type + objectTypeToReflect map[string]reflect.Type +} + +type parentedImplRegistry struct { + px.ImplementationRegistry + implRegistry +} + +func newImplementationRegistry() px.ImplementationRegistry { + return &implRegistry{make(map[string]px.Type, 7), make(map[string]reflect.Type, 7)} +} + +func newParentedImplementationRegistry(parent px.ImplementationRegistry) px.ImplementationRegistry { + return &parentedImplRegistry{parent, implRegistry{make(map[string]px.Type, 7), make(map[string]reflect.Type, 7)}} +} + +func (ir *implRegistry) RegisterType(t px.Type, r reflect.Type) { + r = types.NormalizeType(r) + r = assertUnregistered(ir, t, r) + ir.addTypeMapping(t, r) +} + +func (ir *implRegistry) TypeToReflected(t px.Type) (reflect.Type, bool) { + rt, ok := ir.objectTypeToReflect[t.Name()] + return rt, ok +} + +func (ir *implRegistry) ReflectedNameToType(tn string) (px.Type, bool) { + pt, ok := ir.reflectToObjectType[tn] + return pt, ok +} + +func (ir *implRegistry) ReflectedToType(t reflect.Type) (px.Type, bool) { + return ir.ReflectedNameToType(types.NormalizeType(t).String()) +} + +func (ir *implRegistry) addTypeMapping(t px.Type, r reflect.Type) { + ir.objectTypeToReflect[t.Name()] = r + ir.reflectToObjectType[r.String()] = t +} + +func (pr *parentedImplRegistry) RegisterType(t px.Type, r reflect.Type) { + r = types.NormalizeType(r) + r = assertUnregistered(pr, t, r) + pr.addTypeMapping(t, r) +} + +func (pr *parentedImplRegistry) TypeToReflected(t px.Type) (reflect.Type, bool) { + rt, ok := pr.ImplementationRegistry.TypeToReflected(t) + if !ok { + rt, ok = pr.implRegistry.TypeToReflected(t) + } + return rt, ok +} + +func (pr *parentedImplRegistry) ReflectedNameToType(tn string) (px.Type, bool) { + pt, ok := pr.ImplementationRegistry.ReflectedNameToType(tn) + if !ok { + pt, ok = pr.implRegistry.ReflectedNameToType(tn) + } + return pt, ok +} + +func (pr *parentedImplRegistry) ReflectedToType(t reflect.Type) (px.Type, bool) { + return pr.ReflectedNameToType(types.NormalizeType(t).String()) +} + +func assertUnregistered(ir px.ImplementationRegistry, t px.Type, r reflect.Type) reflect.Type { + if rt, ok := ir.TypeToReflected(t); ok { + if r.String() != rt.String() { + panic(px.Error(px.ImplAlreadyRegistered, issue.H{`type`: t})) + } + } + if tn, ok := ir.ReflectedToType(r); ok { + if tn != t { + panic(px.Error(px.ImplAlreadyRegistered, issue.H{`type`: r.String()})) + } + } + return r +} diff --git a/vendor/github.com/lyraproj/pcore/pximpl/parameter.go b/vendor/github.com/lyraproj/pcore/pximpl/parameter.go new file mode 100644 index 0000000..bd62017 --- /dev/null +++ b/vendor/github.com/lyraproj/pcore/pximpl/parameter.go @@ -0,0 +1,144 @@ +package pximpl + +import ( + "io" + + "github.com/lyraproj/pcore/px" + "github.com/lyraproj/pcore/types" +) + +type parameter struct { + name string + typ px.Type + value px.Value + captures bool +} + +func init() { + px.NewParameter = newParameter +} + +func newParameter(name string, typ px.Type, value px.Value, capturesRest bool) px.Parameter { + return ¶meter{name, typ, value, capturesRest} +} + +func (p *parameter) HasValue() bool { + return p.value != nil +} + +func (p *parameter) Name() string { + return p.name +} + +func (p *parameter) Value() px.Value { + return p.value +} + +func (p *parameter) Type() px.Type { + return p.typ +} + +func (p *parameter) CapturesRest() bool { + return p.captures +} + +func (p *parameter) Get(key string) (value px.Value, ok bool) { + switch key { + case `name`: + return types.WrapString(p.name), true + case `type`: + return p.typ, true + case `value`: + return p.Value(), true + case `has_value`: + return types.WrapBoolean(p.value != nil), true + case `captures_rest`: + return types.WrapBoolean(p.captures), true + } + return nil, false +} + +func (p *parameter) InitHash() px.OrderedMap { + es := make([]*types.HashEntry, 0, 3) + es = append(es, types.WrapHashEntry2(`name`, types.WrapString(p.name))) + es = append(es, types.WrapHashEntry2(`type`, p.typ)) + if p.value != nil { + es = append(es, types.WrapHashEntry2(`value`, p.value)) + } + if p.value == px.Undef { + es = append(es, types.WrapHashEntry2(`has_value`, types.BooleanTrue)) + } + if p.captures { + es = append(es, types.WrapHashEntry2(`captures_rest`, types.BooleanTrue)) + } + return types.WrapHash(es) +} + +var ParameterMetaType px.Type + +func (p *parameter) Equals(other interface{}, guard px.Guard) bool { + return p == other +} + +func (p *parameter) String() string { + return px.ToString(p) +} + +func (p *parameter) ToString(bld io.Writer, format px.FormatContext, g px.RDetect) { + types.ObjectToString(p, format, bld, g) +} + +func (p *parameter) PType() px.Type { + return ParameterMetaType +} + +func init() { + ParameterMetaType = px.NewObjectType(`Parameter`, `{ + attributes => { + 'name' => String, + 'type' => Type, + 'has_value' => { type => Boolean, value => false }, + 'value' => { type => Variant[Deferred,Data], value => undef }, + 'captures_rest' => { type => Boolean, value => false }, + } + }`, func(ctx px.Context, args []px.Value) px.Value { + n := args[0].String() + t := args[1].(px.Type) + h := false + if len(args) > 2 { + h = args[2].(px.Boolean).Bool() + } + var v px.Value + if len(args) > 3 { + v = args[3] + } + c := false + if len(args) > 4 { + c = args[4].(px.Boolean).Bool() + } + if h && v == nil { + v = px.Undef + } + return newParameter(n, t, v, c) + }, func(ctx px.Context, args []px.Value) px.Value { + h := args[0].(*types.Hash) + n := h.Get5(`name`, px.EmptyString).String() + t := h.Get5(`type`, types.DefaultDataType()).(px.Type) + var v px.Value + if x, ok := h.Get4(`value`); ok { + v = x + } + hv := false + if x, ok := h.Get4(`has_value`); ok { + hv = x.(px.Boolean).Bool() + } + c := false + if x, ok := h.Get4(`captures_rest`); ok { + c = x.(px.Boolean).Bool() + } + if hv && v == nil { + v = px.Undef + } + return newParameter(n, t, v, c) + }) +} diff --git a/vendor/github.com/lyraproj/pcore/pximpl/runtime.go b/vendor/github.com/lyraproj/pcore/pximpl/runtime.go new file mode 100644 index 0000000..ea6f16c --- /dev/null +++ b/vendor/github.com/lyraproj/pcore/pximpl/runtime.go @@ -0,0 +1,286 @@ +package pximpl + +import ( + "context" + "fmt" + "io/ioutil" + "path/filepath" + "sync" + + "github.com/lyraproj/pcore/loader" + "github.com/lyraproj/pcore/px" + "github.com/lyraproj/pcore/threadlocal" + "github.com/lyraproj/pcore/types" +) + +type ( + rt struct { + lock sync.RWMutex + logger px.Logger + systemLoader px.Loader + environmentLoader px.Loader + settings map[string]*setting + } + + // RuntimeAPI is the interface to the runtime. The runtime should normally not be + // accessed using the instance. Instead, use the exported functions in the + // runtime package. + RuntimeAPI interface { + // Reset clears all settings and loaders, except the static loaders + Reset() + + // SystemLoader returns the loader that finds all built-ins. It's parented + // by a static loader. + SystemLoader() px.Loader + + // EnvironmentLoader returns the loader that finds things declared + // in the environment and its modules. This loader is parented + // by the SystemLoader + EnvironmentLoader() px.Loader + + // Loader returns a loader for module. + Loader(moduleName string) px.Loader + + // Logger returns the logger that this instance was created with + Logger() px.Logger + + // RootContext returns a new Context that is parented by the context.Background() + // and is initialized with a loader that is parented by the EnvironmentLoader. + // + RootContext() px.Context + + // Get returns a setting or calls the given defaultProducer + // function if the setting does not exist + Get(key string, defaultProducer px.Producer) px.Value + + // Set changes a setting + Set(key string, value px.Value) + + // SetLogger changes the logger + SetLogger(px.Logger) + + // Do executes a given function with an initialized Context instance. + // + // The Context will be parented by the Go context returned by context.Background() + Do(func(px.Context)) + + // DoWithParent executes a given function with an initialized Context instance. + // + // The context will be parented by the given Go context + DoWithParent(context.Context, func(px.Context)) + + // Try executes a given function with an initialized Context instance. If an error occurs, + // it is caught and returned. The error returned from the given function is returned when + // no other error is caught. + // + // The Context will be parented by the Go context returned by context.Background() + Try(func(px.Context) error) error + + // TryWithParent executes a given function with an initialized Context instance. If an error occurs, + // it is caught and returned. The error returned from the given function is returned when no other + // error is caught + // + // The context will be parented by the given Go context + TryWithParent(context.Context, func(px.Context) error) error + + // DefineSetting defines a new setting with a given valueType and default + // value. + DefineSetting(key string, valueType px.Type, dflt px.Value) + } +) + +var staticLock sync.Mutex +var pcoreRuntime = &rt{settings: make(map[string]*setting, 32)} +var topImplRegistry px.ImplementationRegistry + +func init() { + pcoreRuntime.DefineSetting(`environment`, types.DefaultStringType(), types.WrapString(`production`)) + pcoreRuntime.DefineSetting(`environmentpath`, types.DefaultStringType(), nil) + pcoreRuntime.DefineSetting(`module_path`, types.DefaultStringType(), nil) + pcoreRuntime.DefineSetting(`strict`, types.NewEnumType([]string{`off`, `warning`, `error`}, true), types.WrapString(`warning`)) + pcoreRuntime.DefineSetting(`tasks`, types.DefaultBooleanType(), types.WrapBoolean(false)) + pcoreRuntime.DefineSetting(`workflow`, types.DefaultBooleanType(), types.WrapBoolean(false)) +} + +func InitializeRuntime() RuntimeAPI { + // First call initializes the static loader. There can be only one since it receives + // most of its contents from Go init() functions + staticLock.Lock() + defer staticLock.Unlock() + + if pcoreRuntime.logger != nil { + return pcoreRuntime + } + + pcoreRuntime.logger = px.NewStdLogger() + + px.RegisterResolvableType(types.NewTypeAliasType(`Pcore::MemberName`, nil, types.TypeMemberName)) + px.RegisterResolvableType(types.NewTypeAliasType(`Pcore::SimpleTypeName`, nil, types.TypeSimpleTypeName)) + px.RegisterResolvableType(types.NewTypeAliasType(`Pcore::typeName`, nil, types.TypeTypeName)) + px.RegisterResolvableType(types.NewTypeAliasType(`Pcore::QRef`, nil, types.TypeQualifiedReference)) + + c := NewContext(loader.StaticLoader, pcoreRuntime.logger) + px.ResolveResolvables(c) + topImplRegistry = c.ImplementationRegistry() + return pcoreRuntime +} + +func (p *rt) Reset() { + p.lock.Lock() + p.systemLoader = nil + p.environmentLoader = nil + for _, s := range p.settings { + s.reset() + } + p.lock.Unlock() +} + +func (p *rt) SetLogger(logger px.Logger) { + p.logger = logger +} + +func (p *rt) SystemLoader() px.Loader { + p.lock.Lock() + p.ensureSystemLoader() + p.lock.Unlock() + return p.systemLoader +} + +// not exported, provides unprotected access to shared object +func (p *rt) ensureSystemLoader() px.Loader { + if p.systemLoader == nil { + p.systemLoader = px.NewParentedLoader(loader.StaticLoader) + } + return p.systemLoader +} + +func (p *rt) EnvironmentLoader() px.Loader { + p.lock.Lock() + defer p.lock.Unlock() + + if p.environmentLoader == nil { + p.ensureSystemLoader() + envLoader := p.systemLoader // TODO: Add proper environment loader + s := p.settings[`module_path`] + if s.isSet() { + mds := make([]px.ModuleLoader, 0) + modulesPath := s.get().String() + fis, err := ioutil.ReadDir(modulesPath) + if err == nil { + lds := []px.PathType{px.PuppetFunctionPath, px.PuppetDataTypePath, px.PlanPath, px.TaskPath} + for _, fi := range fis { + if fi.IsDir() && px.IsValidModuleName(fi.Name()) { + ml := px.NewFileBasedLoader(envLoader, filepath.Join(modulesPath, fi.Name()), fi.Name(), lds...) + mds = append(mds, ml) + } + } + } + if len(mds) > 0 { + envLoader = px.NewDependencyLoader(mds) + } + } + p.environmentLoader = envLoader + } + return p.environmentLoader +} + +func (p *rt) Loader(key string) px.Loader { + envLoader := p.EnvironmentLoader() + if key == `` { + return envLoader + } + if dp, ok := envLoader.(px.DependencyLoader); ok { + return dp.LoaderFor(key) + } + return nil +} + +func (p *rt) DefineSetting(key string, valueType px.Type, dflt px.Value) { + s := &setting{name: key, valueType: valueType, defaultValue: dflt} + if dflt != nil { + s.set(dflt) + } + p.lock.Lock() + p.settings[key] = s + p.lock.Unlock() +} + +func (p *rt) Get(key string, defaultProducer px.Producer) px.Value { + p.lock.RLock() + v, ok := p.settings[key] + p.lock.RUnlock() + + if ok { + if v.isSet() { + return v.get() + } + if defaultProducer == nil { + return px.Undef + } + return defaultProducer() + } + panic(fmt.Sprintf(`Attempt to access unknown setting '%s'`, key)) +} + +func (p *rt) Logger() px.Logger { + return p.logger +} + +func (p *rt) RootContext() px.Context { + InitializeRuntime() + c := WithParent(context.Background(), p.EnvironmentLoader(), p.logger, topImplRegistry) + threadlocal.Init() + threadlocal.Set(px.PuppetContextKey, c) + px.ResolveResolvables(c) + return c +} + +func (p *rt) Do(actor func(px.Context)) { + p.DoWithParent(p.RootContext(), actor) +} + +func (p *rt) DoWithParent(parentCtx context.Context, actor func(px.Context)) { + InitializeRuntime() + if ec, ok := parentCtx.(px.Context); ok { + ctx := ec.Fork() + px.DoWithContext(ctx, actor) + } else { + ctx := WithParent(parentCtx, p.EnvironmentLoader(), p.logger, topImplRegistry) + px.DoWithContext(ctx, func(ctx px.Context) { + px.ResolveResolvables(ctx) + actor(ctx) + }) + } +} + +func (p *rt) Try(actor func(px.Context) error) (err error) { + return p.TryWithParent(p.RootContext(), actor) +} + +func (p *rt) TryWithParent(parentCtx context.Context, actor func(px.Context) error) (err error) { + defer func() { + if r := recover(); r != nil { + if ri, ok := r.(error); ok { + err = ri + } else { + panic(r) + } + } + }() + p.DoWithParent(parentCtx, func(c px.Context) { + err = actor(c) + }) + return +} + +func (p *rt) Set(key string, value px.Value) { + p.lock.RLock() + v, ok := p.settings[key] + p.lock.RUnlock() + + if ok { + v.set(value) + return + } + panic(fmt.Sprintf(`Attempt to assign unknown setting '%s'`, key)) +} diff --git a/vendor/github.com/lyraproj/pcore/pximpl/setting.go b/vendor/github.com/lyraproj/pcore/pximpl/setting.go new file mode 100644 index 0000000..f26c56c --- /dev/null +++ b/vendor/github.com/lyraproj/pcore/pximpl/setting.go @@ -0,0 +1,35 @@ +package pximpl + +import ( + "fmt" + + "github.com/lyraproj/pcore/px" +) + +type ( + setting struct { + name string + value px.Value + defaultValue px.Value + valueType px.Type + } +) + +func (s *setting) get() px.Value { + return s.value +} + +func (s *setting) reset() { + s.value = s.defaultValue +} + +func (s *setting) set(value px.Value) { + if !px.IsInstance(s.valueType, value) { + panic(px.DescribeMismatch(fmt.Sprintf(`Setting '%s'`, s.name), s.valueType, px.DetailedValueType(value))) + } + s.value = value +} + +func (s *setting) isSet() bool { + return s.value != nil // As opposed to UNDEF which is a proper value +} diff --git a/vendor/github.com/lyraproj/pcore/pximpl/typemismatchdescriber.go b/vendor/github.com/lyraproj/pcore/pximpl/typemismatchdescriber.go new file mode 100644 index 0000000..fe516b2 --- /dev/null +++ b/vendor/github.com/lyraproj/pcore/pximpl/typemismatchdescriber.go @@ -0,0 +1,1204 @@ +package pximpl + +import ( + "bytes" + "fmt" + "math" + "reflect" + "strconv" + "strings" + + "github.com/lyraproj/issue/issue" + "github.com/lyraproj/pcore/px" + "github.com/lyraproj/pcore/types" + "github.com/lyraproj/pcore/utils" +) + +type ( + pathElement struct { + key string + pathType pathType + } + + pathType string + + mismatchClass string + + mismatch interface { + canonicalPath() []*pathElement + path() []*pathElement + pathString() string + setPath(path []*pathElement) + text() string + class() mismatchClass + equals(other mismatch) bool + } + + expectedActualMismatch interface { + mismatch + actual() px.Type + expected() px.Type + setExpected(expected px.Type) + } + + sizeMismatch interface { + expectedActualMismatch + from() int64 + to() int64 + } + + sizeMismatchFunc func(path []*pathElement, expected *types.IntegerType, actual *types.IntegerType) mismatch + + basicMismatch struct { + p []*pathElement + } + + unexpectedBlock struct{ basicMismatch } + + missingRequiredBlock struct{ basicMismatch } + + keyMismatch struct { + basicMismatch + key string + } + + missingKey struct{ keyMismatch } + extraneousKey struct{ keyMismatch } + unresolvedTypeReference struct{ keyMismatch } + + basicEAMismatch struct { + basicMismatch + actualType px.Type + expectedType px.Type + } + + typeMismatch struct{ basicEAMismatch } + patternMismatch struct{ typeMismatch } + basicSizeMismatch struct{ basicEAMismatch } + countMismatch struct{ basicSizeMismatch } +) + +var NoMismatch []mismatch + +const ( + subject = pathType(``) + entry = pathType(`entry`) + entryKey = pathType(`key of entry`) + _parameter = pathType(`parameter`) + _return = pathType(`return`) + block = pathType(`block`) + index = pathType(`index`) + variant = pathType(`variant`) + signature = pathType(`signature`) + + countMismatchClass = mismatchClass(`countMismatch`) + missingKeyClass = mismatchClass(`missingKey`) + missingRequiredBlockClass = mismatchClass(`missingRequiredBlock`) + extraneousKeyClass = mismatchClass(`extraneousKey`) + patternMismatchClass = mismatchClass(`patternMismatch`) + sizeMismatchClass = mismatchClass(`sizeMismatch`) + typeMismatchClass = mismatchClass(`typeMismatch`) + unexpectedBlockClass = mismatchClass(`unexpectedBlock`) + unresolvedTypeReferenceClass = mismatchClass(`unresolvedTypeReference`) +) + +func (p pathType) String(key string) string { + if p == subject || p == signature { + return key + } + if p == block && key == `block` { + return key + } + if p == _parameter && utils.IsDecimalInteger(key) { + return fmt.Sprintf("parameter %s", key) + } + return fmt.Sprintf("%s '%s'", string(p), key) +} + +func (pe *pathElement) String() string { + return pe.pathType.String(pe.key) +} + +func copyMismatch(m mismatch) mismatch { + orig := reflect.Indirect(reflect.ValueOf(m)) + c := reflect.New(orig.Type()) + c.Elem().Set(orig) + return c.Interface().(mismatch) +} + +func withPath(m mismatch, path []*pathElement) mismatch { + m = copyMismatch(m) + m.setPath(path) + return m +} + +func chopPath(m mismatch, index int) mismatch { + p := m.path() + if index >= len(p) { + return m + } + cp := make([]*pathElement, 0) + for i, pe := range p { + if i != index { + cp = append(cp, pe) + } + } + return withPath(m, cp) +} + +func mergeMismatch(m mismatch, o mismatch, path []*pathElement) mismatch { + m = withPath(m, path) + switch m.(type) { + case *typeMismatch: + et := m.(*typeMismatch) + if ot, ok := o.(*typeMismatch); ok { + if ev, ok := et.expectedType.(*types.VariantType); ok { + if ov, ok := ot.expectedType.(*types.VariantType); ok { + ts := make([]px.Type, 0, len(ev.Types())+len(ov.Types())) + ts = append(ts, ev.Types()...) + ts = append(ts, ov.Types()...) + et.setExpected(types.NewVariantType(types.UniqueTypes(ts)...)) + } else { + et.setExpected(types.NewVariantType(types.UniqueTypes(types.CopyAppend(ev.Types(), ot.expectedType))...)) + } + } else { + if ov, ok := ot.expectedType.(*types.VariantType); ok { + ts := make([]px.Type, 0, len(ov.Types())+1) + ts = append(ts, et.expectedType) + ts = append(ts, ov.Types()...) + et.setExpected(types.NewVariantType(types.UniqueTypes(ts)...)) + } else { + if !et.expectedType.Equals(ot.expectedType, nil) { + et.setExpected(types.NewVariantType(et.expectedType, ot.expectedType)) + } + } + } + } + case sizeMismatch: + esm := m.(sizeMismatch) + if osm, ok := o.(sizeMismatch); ok { + min := esm.from() + if min > osm.from() { + min = osm.from() + } + max := esm.to() + if max < osm.to() { + max = osm.to() + } + esm.setExpected(types.NewIntegerType(min, max)) + } + case expectedActualMismatch: + eam := m.(expectedActualMismatch) + if oam, ok := o.(expectedActualMismatch); ok { + eam.setExpected(oam.expected()) + } + } + return m +} + +func joinPath(path []*pathElement) string { + s := make([]string, len(path)) + for i, p := range path { + s[i] = p.String() + } + return strings.Join(s, ` `) +} + +func formatMismatch(m mismatch) string { + p := m.path() + variant := `` + position := `` + if len(p) > 0 { + f := p[0] + if f.pathType == signature { + variant = fmt.Sprintf(` %s`, f.String()) + p = p[1:] + } + if len(p) > 0 { + position = fmt.Sprintf(` %s`, joinPath(p)) + } + } + return message(m, variant, position) +} + +func message(m mismatch, variant string, position string) string { + if variant == `` && position == `` { + return m.text() + } + return fmt.Sprintf("%s%s %s", variant, position, m.text()) +} + +func (m *basicMismatch) canonicalPath() []*pathElement { + result := make([]*pathElement, 0) + for _, p := range m.p { + if p.pathType != variant && p.pathType != signature { + result = append(result, p) + } + } + return result +} + +func (m *basicMismatch) class() mismatchClass { + return `` +} + +func (m *basicMismatch) path() []*pathElement { + return m.p +} + +func (m *basicMismatch) setPath(path []*pathElement) { + m.p = path +} + +func (m *basicMismatch) pathString() string { + return joinPath(m.p) +} + +func (m *basicMismatch) text() string { + return `` +} + +func (m *basicMismatch) equals(other mismatch) bool { + return m.class() == other.class() && pathEquals(m.p, other.path()) +} + +func newUnexpectedBlock(path []*pathElement) mismatch { + return &unexpectedBlock{basicMismatch{p: path}} +} + +func (*unexpectedBlock) class() mismatchClass { + return unexpectedBlockClass +} + +func (*unexpectedBlock) text() string { + return `does not expect a block` +} + +func newMissingRequiredBlock(path []*pathElement) mismatch { + return &missingRequiredBlock{basicMismatch{p: path}} +} + +func (*missingRequiredBlock) class() mismatchClass { + return missingRequiredBlockClass +} + +func (*missingRequiredBlock) text() string { + return `expects a block` +} + +func (m *keyMismatch) equals(other mismatch) bool { + if om, ok := other.(*keyMismatch); ok && pathEquals(m.p, other.path()) { + return m.key == om.key + } + return false +} + +func newMissingKey(path []*pathElement, key string) mismatch { + return &missingKey{keyMismatch{basicMismatch{p: path}, key}} +} + +func (*missingKey) class() mismatchClass { + return missingKeyClass +} + +func (m *missingKey) text() string { + return fmt.Sprintf(`expects a value for key '%s'`, m.key) +} + +func newExtraneousKey(path []*pathElement, key string) mismatch { + return &extraneousKey{keyMismatch{basicMismatch{p: path}, key}} +} + +func (*extraneousKey) class() mismatchClass { + return extraneousKeyClass +} + +func (m *extraneousKey) text() string { + return fmt.Sprintf(`unrecognized key '%s'`, m.key) +} + +func newUnresolvedTypeReference(path []*pathElement, key string) mismatch { + return &unresolvedTypeReference{keyMismatch{basicMismatch{p: path}, key}} +} + +func (*unresolvedTypeReference) class() mismatchClass { + return unresolvedTypeReferenceClass +} + +func (m *unresolvedTypeReference) text() string { + return fmt.Sprintf(`references an unresolved type '%s'`, m.key) +} + +func (ea *basicEAMismatch) equals(other mismatch) bool { + if om, ok := other.(*basicEAMismatch); ok && pathEquals(ea.p, other.path()) { + return ea.expectedType == om.expectedType && ea.actualType == om.actualType + } + return false +} + +func (ea *basicEAMismatch) expected() px.Type { + return ea.expectedType +} + +func (ea *basicEAMismatch) actual() px.Type { + return ea.actualType +} + +func (ea *basicEAMismatch) setExpected(expected px.Type) { + ea.expectedType = expected +} + +func newTypeMismatch(path []*pathElement, expected px.Type, actual px.Type) mismatch { + return &typeMismatch{basicEAMismatch{basicMismatch{p: path}, actual, expected}} +} + +func (*typeMismatch) class() mismatchClass { + return typeMismatchClass +} + +func (tm *typeMismatch) text() string { + e := tm.expectedType + a := tm.actualType + multi := false + optional := false + if opt, ok := e.(*types.OptionalType); ok { + e = opt.ContainedType() + optional = true + } + var as, es string + if vt, ok := e.(*types.VariantType); ok { + el := vt.Types() + els := make([]string, len(el)) + if reportDetailed(el, a) { + as = detailedToActualToS(el, a) + for i, e := range el { + els[i] = shortName(e) + } + } else { + for i, e := range el { + els[i] = shortName(e) + } + as = shortName(a) + } + if optional { + els = append([]string{`Undef`}, els...) + } + switch len(els) { + case 1: + es = els[0] + case 2: + es = fmt.Sprintf(`%s or %s`, els[0], els[1]) + multi = true + default: + es = fmt.Sprintf(`%s, or %s`, strings.Join(els[0:len(els)-1], `, `), els[len(els)-1]) + multi = true + } + } else { + el := []px.Type{e} + if reportDetailed(el, a) { + as = detailedToActualToS(el, a) + es = px.ToString2(e, types.Expanded) + } else { + as = shortName(a) + es = shortName(e) + } + } + + if multi { + return fmt.Sprintf(`expects a value of type %s, got %s`, es, as) + } + return fmt.Sprintf(`expects %s %s value, got %s`, issue.Article(es), es, as) +} + +func shortName(t px.Type) string { + if tc, ok := t.(px.TypeWithContainedType); ok && !(tc.ContainedType() == nil || tc.ContainedType() == types.DefaultAnyType()) { + return fmt.Sprintf("%s[%s]", t.Name(), tc.ContainedType().Name()) + } + if t.Name() == `` { + return `Object` + } + return t.Name() +} + +func detailedToActualToS(es []px.Type, a px.Type) string { + es = allResolved(es) + if alwaysFullyDetailed(es, a) || anyAssignable(es, px.Generalize(a)) { + if as, ok := a.(px.StringType); ok && as.Value() != nil { + b := bytes.NewBufferString(``) + utils.PuppetQuote(b, *as.Value()) + return b.String() + } + return px.ToString2(a, types.Expanded) + } + return a.Name() +} + +func anyAssignable(es []px.Type, a px.Type) bool { + for _, e := range es { + if px.IsAssignable(e, a) { + return true + } + } + return false +} + +func alwaysFullyDetailed(es []px.Type, a px.Type) bool { + for _, e := range es { + if px.Generalize(e).Equals(px.Generalize(a), nil) { + return true + } + if _, ok := e.(*types.TypeAliasType); ok { + return true + } + if _, ok := a.(*types.TypeAliasType); ok { + return true + } + if specialization(e, a) { + return true + } + } + return false +} + +func specialization(e px.Type, a px.Type) (result bool) { + switch e.(type) { + case *types.InitType: + result = true + case px.StringType: + as, ok := a.(px.StringType) + result = ok && as.Value() != nil + case *types.StructType: + _, result = a.(*types.HashType) + case *types.TupleType: + _, result = a.(*types.ArrayType) + case px.ObjectType: + _, result = a.(*types.StructType) + default: + result = false + } + return +} + +func allResolved(es []px.Type) []px.Type { + rs := make([]px.Type, len(es)) + for i, e := range es { + if ea, ok := e.(*types.TypeAliasType); ok { + e = ea.ResolvedType() + } + rs[i] = e + } + return rs +} + +func reportDetailed(e []px.Type, a px.Type) bool { + return alwaysFullyDetailed(e, a) || assignableToDefault(e, a) +} + +func assignableToDefault(es []px.Type, a px.Type) bool { + for _, e := range es { + if ea, ok := e.(*types.TypeAliasType); ok { + e = ea.ResolvedType() + } + if px.IsAssignable(px.DefaultFor(e), a) { + return true + } + } + return false +} + +func newPatternMismatch(path []*pathElement, expected px.Type, actual px.Type) mismatch { + return &patternMismatch{ + typeMismatch{ + basicEAMismatch{ + basicMismatch{p: path}, actual, expected}}} +} + +func (*patternMismatch) class() mismatchClass { + return patternMismatchClass +} + +func (m *patternMismatch) text() string { + e := m.expectedType + valuePfx := `` + if oe, ok := e.(*types.OptionalType); ok { + e = oe.ContainedType() + valuePfx = `an undef value or ` + } + return fmt.Sprintf(`expects %sa match for %s, got %s`, + valuePfx, px.ToString2(e, types.Expanded), m.actualString()) +} + +func (m *patternMismatch) actualString() string { + a := m.actualType + if as, ok := a.(px.StringType); ok && as.Value() != nil { + return fmt.Sprintf(`'%s'`, *as.Value()) + } + return shortName(a) +} + +func newSizeMismatch(path []*pathElement, expected *types.IntegerType, actual *types.IntegerType) mismatch { + return &basicSizeMismatch{ + basicEAMismatch{ + basicMismatch{p: path}, actual, expected}} +} + +func (*basicSizeMismatch) class() mismatchClass { + return sizeMismatchClass +} + +func (m *basicSizeMismatch) from() int64 { + return m.expectedType.(*types.IntegerType).Min() +} + +func (m *basicSizeMismatch) to() int64 { + return m.expectedType.(*types.IntegerType).Max() +} + +func (m *basicSizeMismatch) text() string { + return fmt.Sprintf(`expects size to be %s, got %s`, + rangeToS(m.expectedType.(*types.IntegerType), `0`), + rangeToS(m.actualType.(*types.IntegerType), `0`)) +} + +func rangeToS(rng *types.IntegerType, zeroString string) string { + if rng.Min() == rng.Max() { + if rng.Min() == 0 { + return zeroString + } + return strconv.FormatInt(rng.Min(), 10) + } else if rng.Min() == 0 { + if rng.Max() == math.MaxInt64 { + return `unbounded` + } + return fmt.Sprintf(`at most %d`, rng.Max()) + } else if rng.Max() == math.MaxInt64 { + return fmt.Sprintf(`at least %d`, rng.Min()) + } else { + return fmt.Sprintf("between %d and %d", rng.Min(), rng.Max()) + } +} + +func newCountMismatch(path []*pathElement, expected *types.IntegerType, actual *types.IntegerType) mismatch { + return &countMismatch{basicSizeMismatch{ + basicEAMismatch{ + basicMismatch{p: path}, actual, expected}}} +} + +func (*countMismatch) class() mismatchClass { + return countMismatchClass +} + +func (tm *countMismatch) text() string { + ei := tm.expectedType.(*types.IntegerType) + suffix := `s` + if ei.Min() == 1 && (ei.Max() == 1 || ei.Max() == math.MaxInt64) || ei.Min() == 0 && ei.Max() == 1 { + suffix = `` + } + + return fmt.Sprintf(`expects %s argument%s, got %s`, + rangeToS(ei, `no`), suffix, + rangeToS(tm.actualType.(*types.IntegerType), `none`)) +} + +func describeOptionalType(expected *types.OptionalType, original, actual px.Type, path []*pathElement) []mismatch { + if _, ok := actual.(*types.UndefType); ok { + return NoMismatch + } + if _, ok := original.(*types.TypeAliasType); !ok { + // If the original expectation is an alias, it must now track the optional type instead + original = expected + } + return internalDescribe(expected.ContainedType(), original, actual, path) +} + +func describeEnumType(expected *types.EnumType, original, actual px.Type, path []*pathElement) []mismatch { + if px.IsAssignable(expected, actual) { + return []mismatch{} + } + return []mismatch{newPatternMismatch(path, original, actual)} +} + +func describeInitType(expected *types.InitType, actual px.Type, path []*pathElement) []mismatch { + if px.IsAssignable(expected, actual) { + return []mismatch{} + } + + ds := make([]mismatch, 0, 4) + ix := 0 + at := types.NewTupleType([]px.Type{actual}, nil) + expected.EachSignature(func(sg px.Signature) { + ds = append(ds, describeSignatureArguments(sg, at, append(path, &pathElement{strconv.Itoa(ix), signature}))...) + }) + return ds +} + +func describePatternType(expected *types.PatternType, original, actual px.Type, path []*pathElement) []mismatch { + if px.IsAssignable(expected, actual) { + return NoMismatch + } + return []mismatch{newPatternMismatch(path, original, actual)} +} + +func describeTypeAliasType(expected *types.TypeAliasType, actual px.Type, path []*pathElement) []mismatch { + return internalDescribe(px.Normalize(expected.ResolvedType()), expected, actual, path) +} + +func describeArrayType(expected *types.ArrayType, original, actual px.Type, path []*pathElement) []mismatch { + descriptions := make([]mismatch, 0, 4) + et := expected.ElementType() + if ta, ok := actual.(*types.TupleType); ok { + if px.IsAssignable(expected.Size(), ta.Size()) { + for ax, at := range ta.Types() { + if !px.IsAssignable(et, at) { + descriptions = append(descriptions, internalDescribe(et, et, at, pathWith(path, &pathElement{strconv.Itoa(ax), index}))...) + } + } + } else { + descriptions = append(descriptions, newSizeMismatch(path, expected.Size(), ta.Size())) + } + } else if aa, ok := actual.(*types.ArrayType); ok { + if !px.IsAssignable(expected, aa) { + if px.IsAssignable(expected.Size(), aa.Size()) { + descriptions = append(descriptions, newTypeMismatch(path, original, types.NewArrayType(aa.ElementType(), nil))) + } else { + descriptions = append(descriptions, newSizeMismatch(path, expected.Size(), aa.Size())) + } + } + } else { + descriptions = append(descriptions, newTypeMismatch(path, original, actual)) + } + return descriptions +} + +func describeHashType(expected *types.HashType, original, actual px.Type, path []*pathElement) []mismatch { + descriptions := make([]mismatch, 0, 4) + kt := expected.KeyType() + vt := expected.ValueType() + if sa, ok := actual.(*types.StructType); ok { + if px.IsAssignable(expected.Size(), sa.Size()) { + for _, al := range sa.Elements() { + descriptions = append(descriptions, internalDescribe(kt, kt, al.Key(), pathWith(path, &pathElement{al.Name(), entryKey}))...) + descriptions = append(descriptions, internalDescribe(vt, vt, al.Value(), pathWith(path, &pathElement{al.Name(), entry}))...) + } + } else { + descriptions = append(descriptions, newSizeMismatch(path, expected.Size(), sa.Size())) + } + } else if ha, ok := actual.(*types.HashType); ok { + if !px.IsAssignable(expected, ha) { + if px.IsAssignable(expected.Size(), ha.Size()) { + descriptions = append(descriptions, newTypeMismatch(path, original, types.NewHashType(ha.KeyType(), ha.ValueType(), nil))) + } else { + descriptions = append(descriptions, newSizeMismatch(path, expected.Size(), ha.Size())) + } + } + } else { + descriptions = append(descriptions, newTypeMismatch(path, original, actual)) + } + return descriptions +} + +func describeStructType(expected *types.StructType, original, actual px.Type, path []*pathElement) []mismatch { + descriptions := make([]mismatch, 0, 4) + if sa, ok := actual.(*types.StructType); ok { + h2 := sa.HashedMembersCloned() + for _, e1 := range expected.Elements() { + key := e1.Name() + e2, ok := h2[key] + if ok { + delete(h2, key) + ek := e1.ActualKeyType() + descriptions = append(descriptions, internalDescribe(ek, ek, e2.ActualKeyType(), pathWith(path, &pathElement{key, entryKey}))...) + descriptions = append(descriptions, internalDescribe(e1.Value(), e1.Value(), e2.Value(), pathWith(path, &pathElement{key, entry}))...) + } else { + if !e1.Optional() { + descriptions = append(descriptions, newMissingKey(path, e1.Name())) + } + } + } + for key := range h2 { + descriptions = append(descriptions, newExtraneousKey(path, key)) + } + } else if ha, ok := actual.(*types.HashType); ok { + if !px.IsAssignable(expected, ha) { + if px.IsAssignable(expected.Size(), ha.Size()) { + descriptions = append(descriptions, newTypeMismatch(path, original, types.NewHashType(ha.KeyType(), ha.ValueType(), nil))) + } else { + descriptions = append(descriptions, newSizeMismatch(path, expected.Size(), ha.Size())) + } + } + } else { + descriptions = append(descriptions, newTypeMismatch(path, original, actual)) + } + return descriptions +} + +func describeTupleType(expected *types.TupleType, original, actual px.Type, path []*pathElement) []mismatch { + return describeTuple(expected, original, actual, path, newCountMismatch) +} + +func describeArgumentTuple(expected *types.TupleType, actual px.Type, path []*pathElement) []mismatch { + return describeTuple(expected, expected, actual, path, newCountMismatch) +} + +func describeTuple(expected *types.TupleType, original, actual px.Type, path []*pathElement, sm sizeMismatchFunc) []mismatch { + if aa, ok := actual.(*types.ArrayType); ok { + if len(expected.Types()) == 0 || px.IsAssignable(expected, aa) { + return NoMismatch + } + t2Entry := aa.ElementType() + if t2Entry == types.DefaultAnyType() { + // Array of anything can not be assigned (unless tuple is tuple of anything) - this case + // was handled at the top of this method. + return []mismatch{newTypeMismatch(path, original, actual)} + } + + if !px.IsAssignable(expected.Size(), aa.Size()) { + return []mismatch{sm(path, expected.Size(), aa.Size())} + } + + descriptions := make([]mismatch, 0, 4) + for ex, et := range expected.Types() { + descriptions = append(descriptions, internalDescribe(et, et, aa.ElementType(), + pathWith(path, &pathElement{strconv.Itoa(ex), index}))...) + } + return descriptions + } + + if at, ok := actual.(*types.TupleType); ok { + if expected.Equals(actual, nil) || px.IsAssignable(expected, at) { + return NoMismatch + } + + if !px.IsAssignable(expected.Size(), at.Size()) { + return []mismatch{sm(path, expected.Size(), at.Size())} + } + + exl := len(expected.Types()) + if exl == 0 { + return NoMismatch + } + + descriptions := make([]mismatch, 0, 4) + for ax, at := range at.Types() { + if ax >= exl { + ex := exl - 1 + ext := expected.Types()[ex] + descriptions = append(descriptions, internalDescribe(ext, ext, at, + pathWith(path, &pathElement{strconv.Itoa(ax), index}))...) + } + } + return descriptions + } + + return []mismatch{newTypeMismatch(path, original, actual)} +} + +func pathEquals(a, b []*pathElement) bool { + n := len(a) + if n != len(b) { + return false + } + for i := 0; i < n; i++ { + if *(a[i]) != *(b[i]) { + return false + } + } + return true +} + +func pathWith(path []*pathElement, elem *pathElement) []*pathElement { + top := len(path) + pc := make([]*pathElement, top+1) + copy(pc, path) + pc[top] = elem + return pc +} + +func describeCallableType(expected *types.CallableType, original, actual px.Type, path []*pathElement) []mismatch { + if ca, ok := actual.(*types.CallableType); ok { + ep := expected.ParametersType() + paramErrors := NoMismatch + if ep != nil { + ap := ca.ParametersType() + paramErrors = describeArgumentTuple(ep.(*types.TupleType), types.NilAs(types.DefaultTupleType(), ap), path) + } + if len(paramErrors) == 0 { + er := expected.ReturnType() + ar := types.NilAs(types.DefaultAnyType(), ca.ReturnType()) + if er == nil || px.IsAssignable(er, ar) { + eb := expected.BlockType() + ab := ca.BlockType() + if eb == nil || px.IsAssignable(eb, types.NilAs(types.DefaultUndefType(), ab)) { + return NoMismatch + } + if ab == nil { + return []mismatch{newMissingRequiredBlock(path)} + } + return []mismatch{newTypeMismatch(pathWith(path, &pathElement{``, block}), eb, ab)} + } + return []mismatch{newTypeMismatch(pathWith(path, &pathElement{``, _return}), er, ar)} + } + return paramErrors + } + return []mismatch{newTypeMismatch(path, original, actual)} +} + +func describeAnyType(expected px.Type, original, actual px.Type, path []*pathElement) []mismatch { + if px.IsAssignable(expected, actual) { + return NoMismatch + } + return []mismatch{newTypeMismatch(path, original, actual)} +} + +func describe(expected px.Type, actual px.Type, path []*pathElement) []mismatch { + var unresolved *types.TypeReferenceType + expected.Accept(func(t px.Type) { + if unresolved == nil { + if ur, ok := t.(*types.TypeReferenceType); ok { + unresolved = ur + } + } + }, nil) + + if unresolved != nil { + return []mismatch{newUnresolvedTypeReference(path, unresolved.TypeString())} + } + return internalDescribe(px.Normalize(expected), expected, actual, path) +} + +func internalDescribe(expected px.Type, original, actual px.Type, path []*pathElement) []mismatch { + switch expected := expected.(type) { + case *types.VariantType: + return describeVariantType(expected, original, actual, path) + case *types.StructType: + return describeStructType(expected, original, actual, path) + case *types.HashType: + return describeHashType(expected, original, actual, path) + case *types.TupleType: + return describeTupleType(expected, original, actual, path) + case *types.ArrayType: + return describeArrayType(expected, original, actual, path) + case *types.CallableType: + return describeCallableType(expected, original, actual, path) + case *types.OptionalType: + return describeOptionalType(expected, original, actual, path) + case *types.PatternType: + return describePatternType(expected, original, actual, path) + case *types.EnumType: + return describeEnumType(expected, original, actual, path) + case *types.InitType: + return describeInitType(expected, actual, path) + case *types.TypeAliasType: + return describeTypeAliasType(expected, actual, path) + default: + return describeAnyType(expected, original, actual, path) + } +} + +func describeVariantType(expected *types.VariantType, original, actual px.Type, path []*pathElement) []mismatch { + vs := make([]mismatch, 0, len(expected.Types())) + ts := expected.Types() + if _, ok := original.(*types.OptionalType); ok { + ts = types.CopyAppend(ts, types.DefaultUndefType()) + } + + for ex, vt := range ts { + if px.IsAssignable(vt, actual) { + return NoMismatch + } + d := internalDescribe(vt, vt, actual, pathWith(path, &pathElement{strconv.Itoa(ex), variant})) + vs = append(vs, d...) + } + + ds := mergeDescriptions(len(path), sizeMismatchClass, vs) + if _, ok := original.(*types.TypeAliasType); ok && len(ds) == 1 { + // All variants failed in this alias so we report it as a mismatch on the alias + // rather than reporting individual failures of the variants + ds = []mismatch{newTypeMismatch(path, original, actual)} + } + return ds +} + +func mergeDescriptions(varyingPathPosition int, sm mismatchClass, descriptions []mismatch) []mismatch { + n := len(descriptions) + if n == 0 { + return NoMismatch + } + + for _, mClass := range []mismatchClass{sm, missingRequiredBlockClass, unexpectedBlockClass, typeMismatchClass} { + mismatches := make([]mismatch, 0, 4) + for _, desc := range descriptions { + if desc.class() == mClass { + mismatches = append(mismatches, desc) + } + } + if len(mismatches) == n { + // If they all have the same canonical path, then we can compact this into one + prev := mismatches[0] + for idx := 1; idx < n; idx++ { + curr := mismatches[idx] + if pathEquals(prev.canonicalPath(), curr.canonicalPath()) { + prev = mergeMismatch(prev, curr, prev.path()) + } else { + prev = nil + break + } + } + if prev != nil { + // Report the generic mismatch and skip the rest + descriptions = []mismatch{prev} + break + } + } + } + descriptions = unique(descriptions) + if len(descriptions) == 1 { + descriptions = []mismatch{chopPath(descriptions[0], varyingPathPosition)} + } + return descriptions +} + +func unique(v []mismatch) []mismatch { + u := make([]mismatch, 0, len(v)) +next: + for _, m := range v { + for _, x := range u { + if m == x { + break next + } + } + u = append(u, m) + } + return u +} + +func init() { + px.DescribeSignatures = describeSignatures + + px.DescribeMismatch = func(name string, expected, actual px.Type) string { + result := describe(expected, actual, []*pathElement{{fmt.Sprintf("function %s:", name), subject}}) + switch len(result) { + case 0: + return `` + case 1: + return formatMismatch(result[0]) + default: + rs := make([]string, len(result)) + for i, r := range result { + rs[i] = formatMismatch(r) + } + return strings.Join(rs, "\n") + } + } +} + +func describeSignatures(signatures []px.Signature, argsTuple px.Type, block px.Lambda) string { + errorArrays := make([][]mismatch, len(signatures)) + allSet := true + + ne := 0 + for ix, sg := range signatures { + ae := describeSignatureArguments(sg, argsTuple, []*pathElement{{strconv.Itoa(ix), signature}}) + errorArrays[ix] = ae + if len(ae) == 0 { + allSet = false + } else { + ne++ + } + } + + // Skip block checks if all signatures have argument errors + if !allSet { + blockArrays := make([][]mismatch, len(signatures)) + bcCount := 0 + for ix, sg := range signatures { + ae := describeSignatureBlock(sg, block, []*pathElement{{strconv.Itoa(ix), signature}}) + blockArrays[ix] = ae + if len(ae) > 0 { + bcCount++ + } + } + if bcCount == len(blockArrays) { + // Skip argument errors when all alternatives have block errors + errorArrays = blockArrays + } else if bcCount > 0 { + // Merge errors giving argument errors precedence over block errors + for ix, ea := range errorArrays { + if len(ea) == 0 { + errorArrays[ix] = blockArrays[ix] + } + } + } + } + if len(errorArrays) == 0 { + return `` + } + + if ne > 1 { + // If the argsTuple is of size one and the argument is a Struct, then skip the positional + // signature since that output just decreases readability. + isStruct := false + switch args := argsTuple.(type) { + case *types.TupleType: + if len(args.Types()) == 1 { + _, isStruct = args.Types()[0].(*types.StructType) + } + case *types.ArrayType: + aSize := args.Size() + if aSize.Max() == 1 { + _, isStruct = args.ElementType().(*types.StructType) + } + } + + if isStruct { + structArg := -1 + for ix, sg := range signatures { + ae := errorArrays[ix] + if len(ae) > 0 { + paramsTuple := sg.ParametersType().(*types.TupleType) + tps := paramsTuple.Types() + if len(tps) >= 1 && paramsTuple.Size().Min() <= 1 { + if _, ok := tps[0].(*types.StructType); ok { + if structArg >= 0 { + // Multiple struct args. Break out + structArg = -1 + break + } + structArg = ix + } + } + } + } + if structArg >= 0 { + // Strip other errors + errorArrays = errorArrays[structArg : structArg+1] + signatures = signatures[structArg : structArg+1] + } + } + } + + errors := make([]mismatch, 0) + for _, ea := range errorArrays { + errors = append(errors, ea...) + } + + errors = mergeDescriptions(0, countMismatchClass, errors) + if len(errors) == 1 { + return formatMismatch(errors[0]) + } + + var result []string + if len(signatures) == 1 { + result = []string{fmt.Sprintf(`expects (%s)`, signatureString(signatures[0]))} + for _, e := range errorArrays[0] { + result = append(result, fmt.Sprintf(` rejected:%s`, formatMismatch(chopPath(e, 0)))) + } + } else { + result = []string{`expects one of:`} + for ix, sg := range signatures { + result = append(result, fmt.Sprintf(` (%s)`, signatureString(sg))) + for _, e := range errorArrays[ix] { + result = append(result, fmt.Sprintf(` rejected:%s`, formatMismatch(chopPath(e, 0)))) + } + } + } + return strings.Join(result, "\n") +} + +func describeSignatureArguments(signature px.Signature, args px.Type, path []*pathElement) []mismatch { + paramsTuple := signature.ParametersType().(*types.TupleType) + eSize := paramsTuple.Size() + + var aSize *types.IntegerType + var aTypes []px.Type + switch args := args.(type) { + case *types.TupleType: + aSize = args.Size() + aTypes = args.Types() + case *types.ArrayType: + aSize = args.Size() + n := int(aSize.Min()) + aTypes = make([]px.Type, n) + for i := 0; i < n; i++ { + aTypes[i] = args.ElementType() + } + } + if px.IsAssignable(eSize, aSize) { + eTypes := paramsTuple.Types() + eLast := len(eTypes) - 1 + eNames := signature.ParameterNames() + for ax, aType := range aTypes { + ex := ax + if ex > eLast { + ex = eLast + } + eType := eTypes[ex] + if !px.IsAssignable(eType, aType) { + descriptions := describe(eType, aType, pathWith(path, &pathElement{eNames[ex], _parameter})) + if len(descriptions) > 0 { + return descriptions + } + } + } + return NoMismatch + } + return []mismatch{newCountMismatch(path, eSize, aSize)} +} + +func describeSignatureBlock(signature px.Signature, aBlock px.Lambda, path []*pathElement) []mismatch { + eBlock := signature.BlockType() + if aBlock == nil { + if eBlock == nil || px.IsAssignable(eBlock, types.DefaultUndefType()) { + return NoMismatch + } + return []mismatch{newMissingRequiredBlock(path)} + } + + if eBlock == nil { + return []mismatch{newUnexpectedBlock(path)} + } + return describe(eBlock, aBlock.Signature(), pathWith(path, &pathElement{signature.BlockName(), block})) +} + +func signatureString(signature px.Signature) string { + tuple := signature.ParametersType().(*types.TupleType) + size := tuple.Size() + if size.Min() == 0 && size.Max() == 0 { + return `` + } + + names := signature.ParameterNames() + ts := tuple.Types() + limit := len(ts) + results := make([]string, 0, limit) + for ix, t := range ts { + indicator := `` + if size.Max() == math.MaxInt64 && ix == limit-1 { + // Last is a repeated_param. + indicator = `*` + if size.Min() == int64(len(names)) { + indicator = `+` + } + } else if optional(ix, size.Max()) { + indicator = `?` + if ot, ok := t.(*types.OptionalType); ok { + t = ot.ContainedType() + } + } + results = append(results, fmt.Sprintf(`%s %s%s`, t.String(), names[ix], indicator)) + } + + block := signature.BlockType() + if block != nil { + if ob, ok := block.(*types.OptionalType); ok { + block = ob.ContainedType() + } + results = append(results, fmt.Sprintf(`%s %s`, block.String(), signature.BlockName())) + } + return strings.Join(results, `, `) +} + +func optional(index int, requiredCount int64) bool { + count := int64(index + 1) + return count > requiredCount +} diff --git a/vendor/github.com/lyraproj/pcore/serialization/deserializer.go b/vendor/github.com/lyraproj/pcore/serialization/deserializer.go new file mode 100644 index 0000000..dc9d287 --- /dev/null +++ b/vendor/github.com/lyraproj/pcore/serialization/deserializer.go @@ -0,0 +1,194 @@ +package serialization + +import ( + "bytes" + + "github.com/lyraproj/issue/issue" + "github.com/lyraproj/pcore/px" + "github.com/lyraproj/pcore/types" +) + +type dsContext struct { + types.BasicCollector + allowUnresolved bool + context px.Context + newTypes []px.Type + value px.Value + converted map[px.Value]px.Value +} + +// NewDeserializer creates a new Collector that consumes input and creates a RichData Value +func NewDeserializer(ctx px.Context, options px.OrderedMap) px.Collector { + ds := &dsContext{ + context: ctx, + newTypes: make([]px.Type, 0, 11), + converted: make(map[px.Value]px.Value, 11), + allowUnresolved: options.Get5(`allow_unresolved`, types.BooleanFalse).(px.Boolean).Bool()} + ds.Init() + return ds +} + +func (ds *dsContext) Value() px.Value { + if ds.value == nil { + ds.value = ds.convert(ds.BasicCollector.Value()) + px.AddTypes(ds.context, ds.newTypes...) + } + return ds.value +} + +func (ds *dsContext) convert(value px.Value) px.Value { + if cv, ok := ds.converted[value]; ok { + return cv + } + + if hash, ok := value.(*types.Hash); ok { + if hash.AllKeysAreStrings() { + if pcoreType, ok := hash.Get4(PcoreTypeKey); ok { + switch pcoreType.String() { + case PcoreTypeHash: + return ds.convertHash(hash) + case PcoreTypeSensitive: + return ds.convertSensitive(hash) + case PcoreTypeDefault: + return types.WrapDefault() + default: + v := ds.convertOther(hash, pcoreType) + switch v.(type) { + case px.ObjectType, px.TypeSet, *types.TypeAliasType: + // Ensure that type is made known to current loader + rt := v.(px.ResolvableType) + n := rt.Name() + if n == `` { + // Anonymous type. Just resolve. + return rt.Resolve(ds.context) + } + // Duplicates can be found here if serialization was made with dedupLevel NoDedup + for _, nt := range ds.newTypes { + if n == nt.Name() { + return nt + } + } + tn := px.NewTypedName(px.NsType, n) + if lt, ok := px.Load(ds.context, tn); ok { + t := rt.Resolve(ds.context) + if t.Equals(lt, nil) { + return lt.(px.Value) + } + ob := bytes.NewBufferString(``) + lt.(px.Type).ToString(ob, px.PrettyExpanded, nil) + nb := bytes.NewBufferString(``) + t.(px.Type).ToString(nb, px.PrettyExpanded, nil) + panic(px.Error(px.AttemptToRedefineType, issue.H{`name`: tn, `old`: ob.String(), `new`: nb.String()})) + } + ds.newTypes = append(ds.newTypes, v.(px.Type)) + } + return v + } + } + } + + return types.BuildHash(hash.Len(), func(h *types.Hash, entries []*types.HashEntry) []*types.HashEntry { + ds.converted[value] = h + hash.EachPair(func(k, v px.Value) { + entries = append(entries, types.WrapHashEntry(ds.convert(k), ds.convert(v))) + }) + return entries + }) + } + + if array, ok := value.(*types.Array); ok { + return types.BuildArray(array.Len(), func(a *types.Array, elements []px.Value) []px.Value { + ds.converted[value] = a + array.Each(func(v px.Value) { elements = append(elements, ds.convert(v)) }) + return elements + }) + } + return value +} + +func (ds *dsContext) convertHash(hv px.OrderedMap) px.Value { + value := hv.Get5(PcoreValueKey, px.EmptyArray).(px.List) + return types.BuildHash(value.Len(), func(hash *types.Hash, entries []*types.HashEntry) []*types.HashEntry { + ds.converted[hv] = hash + for idx := 0; idx < value.Len(); idx += 2 { + entries = append(entries, types.WrapHashEntry(ds.convert(value.At(idx)), ds.convert(value.At(idx+1)))) + } + return entries + }) +} + +func (ds *dsContext) convertSensitive(hash px.OrderedMap) px.Value { + cv := types.WrapSensitive(ds.convert(hash.Get5(PcoreValueKey, px.Undef))) + ds.converted[hash] = cv + return cv +} + +func (ds *dsContext) convertOther(hash px.OrderedMap, typeValue px.Value) px.Value { + value := hash.Get6(PcoreValueKey, func() px.Value { + return hash.RejectPairs(func(k, v px.Value) bool { + if s, ok := k.(px.StringValue); ok { + return s.String() == PcoreTypeKey + } + return false + }) + }) + if typeHash, ok := typeValue.(*types.Hash); ok { + typ := ds.convert(typeHash) + if _, ok := typ.(*types.Hash); ok { + if !ds.allowUnresolved { + panic(px.Error(px.UnableToDeserializeType, issue.H{`hash`: typ.String()})) + } + return hash + } + return ds.pcoreTypeHashToValue(typ.(px.Type), hash, value) + } + typ := ds.context.ParseTypeValue(typeValue) + if tr, ok := typ.(*types.TypeReferenceType); ok { + if !ds.allowUnresolved { + panic(px.Error(px.UnresolvedType, issue.H{`typeString`: tr.String()})) + } + return hash + } + return ds.pcoreTypeHashToValue(typ.(px.Type), hash, value) +} + +func (ds *dsContext) pcoreTypeHashToValue(typ px.Type, key, value px.Value) px.Value { + var ov px.Value + if hash, ok := value.(*types.Hash); ok { + args := ds.convert(hash) + + if ov, ok = ds.allocate(typ); ok { + ds.converted[key] = ov + ov.(px.Object).InitFromHash(ds.context, args.(*types.Hash)) + return ov + } + + if ot, ok := typ.(px.ObjectType); ok { + if ot.HasHashConstructor() { + ov = px.New(ds.context, typ, args) + } else { + ov = px.New(ds.context, typ, ot.AttributesInfo().PositionalFromHash(args.(*types.Hash))...) + } + } else { + ov = px.New(ds.context, typ, args) + } + } else { + if str, ok := value.(px.StringValue); ok { + ov = px.New(ds.context, typ, str) + } else { + panic(px.Error(px.UnableToDeserializeValue, issue.H{`type`: typ.Name(), `arg_type`: value.PType().Name()})) + } + } + ds.converted[key] = ov + return ov +} + +func (ds *dsContext) allocate(typ px.Type) (px.Object, bool) { + if allocator, ok := px.Load(ds.context, px.NewTypedName(px.NsAllocator, typ.Name())); ok { + return allocator.(px.Lambda).Call(nil, nil).(px.Object), true + } + if ot, ok := typ.(px.ObjectType); ok && ot.Name() == `Pcore::ObjectType` { + return types.AllocObjectType(), true + } + return nil, false +} diff --git a/vendor/github.com/lyraproj/pcore/serialization/extension.go b/vendor/github.com/lyraproj/pcore/serialization/extension.go new file mode 100644 index 0000000..fbd094a --- /dev/null +++ b/vendor/github.com/lyraproj/pcore/serialization/extension.go @@ -0,0 +1,27 @@ +package serialization + +const ( + // PcoreTypeKey is the key used to signify the type of a serialized value + PcoreTypeKey = `__ptype` + + // PcoreValueKey is used when the value can be represented as, and recreated from, a single string that can + // be passed to a `from_string` method or an array of values that can be passed to the default + // initializer method. + PcoreValueKey = `__pvalue` + + // PcoreRefKey is the key used to signify the ordinal number of a previously serialized value. The + // value is always an integer + PcoreRefKey = `__pref` + + // PcoreTypeBinary is used for binaries serialized using base64 + PcoreTypeBinary = `Binary` + + // PcoreTypeHash is used for hashes that contain keys that are not of type String + PcoreTypeHash = `Hash` + + // PcoreTypeSensitive is the type key used for sensitive values + PcoreTypeSensitive = `Sensitive` + + // PcoreTypeDefault is the type key used for Default + PcoreTypeDefault = `Default` +) diff --git a/vendor/github.com/lyraproj/pcore/serialization/jsonstreamer.go b/vendor/github.com/lyraproj/pcore/serialization/jsonstreamer.go new file mode 100644 index 0000000..16ad7fc --- /dev/null +++ b/vendor/github.com/lyraproj/pcore/serialization/jsonstreamer.go @@ -0,0 +1,123 @@ +package serialization + +import ( + "encoding/json" + "fmt" + "io" + + "github.com/lyraproj/pcore/pcore" + + "github.com/lyraproj/issue/issue" + "github.com/lyraproj/pcore/px" + "github.com/lyraproj/pcore/types" +) + +// NewJsonStreamer creates a new streamer that will produce JSON when +// receiving values +func NewJsonStreamer(out io.Writer) px.ValueConsumer { + return &jsonStreamer{out, firstInArray} +} + +type jsonStreamer struct { + out io.Writer + state int +} + +// DataToJson streams the given value to a Json ValueConsumer using a +// Serializer. This function is deprecated Use a Serializer directly with +// NewJsonStreamer +func DataToJson(value px.Value, out io.Writer) { + he := make([]*types.HashEntry, 0, 2) + he = append(he, types.WrapHashEntry2(`rich_data`, types.BooleanFalse)) + NewSerializer(pcore.RootContext(), types.WrapHash(he)).Convert(value, NewJsonStreamer(out)) + assertOk(out.Write([]byte("\n"))) +} + +func (j *jsonStreamer) AddArray(len int, doer px.Doer) { + j.delimit(func() { + j.state = firstInArray + assertOk(j.out.Write([]byte{'['})) + doer() + assertOk(j.out.Write([]byte{']'})) + }) +} + +func (j *jsonStreamer) AddHash(len int, doer px.Doer) { + j.delimit(func() { + assertOk(j.out.Write([]byte{'{'})) + j.state = firstInObject + doer() + assertOk(j.out.Write([]byte{'}'})) + }) +} + +func (j *jsonStreamer) Add(element px.Value) { + j.delimit(func() { + j.write(element) + }) +} + +func (j *jsonStreamer) AddRef(ref int) { + j.delimit(func() { + assertOk(fmt.Fprintf(j.out, `{"%s":%d}`, PcoreRefKey, ref)) + }) +} + +func (j *jsonStreamer) CanDoBinary() bool { + return false +} + +func (j *jsonStreamer) CanDoComplexKeys() bool { + return false +} + +func (j *jsonStreamer) StringDedupThreshold() int { + return 20 +} + +func (j *jsonStreamer) delimit(doer px.Doer) { + switch j.state { + case firstInArray: + doer() + j.state = afterElement + case firstInObject: + doer() + j.state = afterKey + case afterKey: + assertOk(j.out.Write([]byte{':'})) + doer() + j.state = afterValue + case afterValue: + assertOk(j.out.Write([]byte{','})) + doer() + j.state = afterKey + default: // Element + assertOk(j.out.Write([]byte{','})) + doer() + } +} + +func (j *jsonStreamer) write(e px.Value) { + var v []byte + var err error + switch e := e.(type) { + case px.StringValue: + v, err = json.Marshal(e.String()) + case px.Float: + v, err = json.Marshal(e.Float()) + case px.Integer: + v, err = json.Marshal(e.Int()) + case px.Boolean: + v, err = json.Marshal(e.Bool()) + default: + v = []byte(`null`) + } + assertOk(0, err) + assertOk(j.out.Write(v)) +} + +func assertOk(_ int, err error) { + if err != nil { + panic(px.Error(px.Failure, issue.H{`message`: err})) + } +} diff --git a/vendor/github.com/lyraproj/pcore/serialization/jsontodata.go b/vendor/github.com/lyraproj/pcore/serialization/jsontodata.go new file mode 100644 index 0000000..91afc7e --- /dev/null +++ b/vendor/github.com/lyraproj/pcore/serialization/jsontodata.go @@ -0,0 +1,113 @@ +package serialization + +import ( + "encoding/json" + "fmt" + "io" + + "github.com/lyraproj/issue/issue" + "github.com/lyraproj/pcore/px" + "github.com/lyraproj/pcore/types" +) + +const firstInArray = 0 +const firstInObject = 1 +const afterElement = 2 +const afterValue = 3 +const afterKey = 4 + +// JsonToData reads JSON from the given reader and streams the values to the +// given ValueConsumer +func JsonToData(path string, in io.Reader, consumer px.ValueConsumer) { + defer func() { + if r := recover(); r != nil { + panic(px.Error(px.InvalidJson, issue.H{`path`: path, `detail`: r})) + } + }() + d := json.NewDecoder(in) + d.UseNumber() + jsonValues(consumer, d) +} + +func jsonValues(c px.ValueConsumer, d *json.Decoder) { + for { + t, err := d.Token() + if err == io.EOF { + return + } + if err != nil { + panic(err) + } + if dl, ok := t.(json.Delim); ok { + ds := dl.String() + if ds == `}` || ds == `]` { + return + } + if ds == `{` { + t = nil + if d.More() { + t, err = d.Token() + if err != nil { + panic(err) + } + if ds, ok = t.(string); ok && ds == PcoreRefKey && d.More() { + t, err = d.Token() + if err != nil { + panic(err) + } + var n int64 + n, err = t.(json.Number).Int64() + if err != nil { + panic(err) + } + // Consume end delimiter + t, err = d.Token() + if err != nil { + panic(err) + } + if dl, ok = t.(json.Delim); ok && dl.String() == `}` { + c.AddRef(int(n)) + } else { + panic(fmt.Errorf("invalid token %T %v", t, t)) + } + continue + } + c.AddHash(8, func() { + addValue(c, t) + jsonValues(c, d) + }) + } else { + c.AddHash(8, func() { + jsonValues(c, d) + }) + } + } else { + c.AddArray(8, func() { + jsonValues(c, d) + }) + } + } else { + addValue(c, t) + } + } +} + +func addValue(c px.ValueConsumer, t json.Token) { + switch t := t.(type) { + case bool: + c.Add(types.WrapBoolean(t)) + case float64: + c.Add(types.WrapFloat(t)) + case json.Number: + if i, err := t.Int64(); err == nil { + c.Add(types.WrapInteger(i)) + } else { + f, _ := t.Float64() + c.Add(types.WrapFloat(f)) + } + case string: + c.Add(types.WrapString(t)) + case nil: + c.Add(px.Undef) + } +} diff --git a/vendor/github.com/lyraproj/pcore/serialization/serializer.go b/vendor/github.com/lyraproj/pcore/serialization/serializer.go new file mode 100644 index 0000000..dd2c6d4 --- /dev/null +++ b/vendor/github.com/lyraproj/pcore/serialization/serializer.go @@ -0,0 +1,382 @@ +package serialization + +import ( + "bytes" + "fmt" + "strings" + + "github.com/lyraproj/issue/issue" + "github.com/lyraproj/pcore/px" + "github.com/lyraproj/pcore/types" +) + +const NoDedup = 0 +const NoKeyDedup = 1 +const MaxDedup = 2 + +// Serializer is a re-entrant fully configured serializer that streams the given +// value to the given consumer. +type Serializer interface { + // Convert the given RichData value to a series of Data values streamed to the + // given consumer. + Convert(value px.Value, consumer px.ValueConsumer) +} + +type rdSerializer struct { + context px.Context + richData bool + messagePrefix string + dedupLevel int +} + +type context struct { + config *rdSerializer + values map[px.Value]int + path []px.Value + refIndex int + dedupLevel int + consumer px.ValueConsumer +} + +// NewSerializer returns a new Serializer +func NewSerializer(ctx px.Context, options px.OrderedMap) Serializer { + t := &rdSerializer{context: ctx} + t.richData = options.Get5(`rich_data`, types.BooleanTrue).(px.Boolean).Bool() + t.messagePrefix = options.Get5(`message_prefix`, px.EmptyString).String() + if !options.Get5(`local_reference`, types.BooleanTrue).(px.Boolean).Bool() { + // local_reference explicitly set to false + t.dedupLevel = NoDedup + } else { + t.dedupLevel = int(options.Get5(`dedup_level`, types.WrapInteger(MaxDedup)).(px.Integer).Int()) + } + return t +} + +var typeKey = types.WrapString(PcoreTypeKey) +var valueKey = types.WrapString(PcoreValueKey) +var defaultType = types.WrapString(PcoreTypeDefault) +var binaryType = types.WrapString(PcoreTypeBinary) +var sensitiveType = types.WrapString(PcoreTypeSensitive) +var hashKey = types.WrapString(PcoreTypeHash) + +func (t *rdSerializer) Convert(value px.Value, consumer px.ValueConsumer) { + c := context{config: t, values: make(map[px.Value]int, 63), refIndex: 0, consumer: consumer, path: make([]px.Value, 0, 16), dedupLevel: t.dedupLevel} + if c.dedupLevel >= MaxDedup && !consumer.CanDoComplexKeys() { + c.dedupLevel = NoKeyDedup + } + c.toData(1, value) +} + +func (sc *context) pathToString() string { + s := bytes.NewBufferString(sc.config.messagePrefix) + for _, v := range sc.path { + if s.Len() > 0 { + s.WriteByte('/') + } + if v == nil { + s.WriteString(`null`) + } else if px.IsInstance(types.DefaultScalarType(), v) { + v.ToString(s, types.Program, nil) + } else { + s.WriteString(issue.Label(s)) + } + } + return s.String() +} + +func (sc *context) toData(level int, value px.Value) { + if value == nil { + sc.addData(px.Undef) + return + } + + switch value := value.(type) { + case *types.UndefValue, px.Integer, px.Float, px.Boolean: + // Never dedup + sc.addData(value) + case px.StringValue: + // Dedup only if length exceeds stringThreshold + key := value.String() + if sc.dedupLevel >= level && len(key) >= sc.consumer.StringDedupThreshold() { + sc.process(value, func() { + sc.addData(value) + }) + } else { + sc.addData(value) + } + case *types.DefaultValue: + if sc.config.richData { + sc.addHash(1, func() { + sc.toData(2, typeKey) + sc.toData(1, defaultType) + }) + } else { + px.LogWarning(px.SerializationDefaultConvertedToString, issue.H{`path`: sc.pathToString()}) + sc.toData(1, types.WrapString(`default`)) + } + case *types.Hash: + if sc.consumer.CanDoComplexKeys() || value.AllKeysAreStrings() { + sc.process(value, func() { + sc.addHash(value.Len(), func() { + value.EachPair(func(key, elem px.Value) { + sc.toData(2, key) + sc.withPath(key, func() { sc.toData(1, elem) }) + }) + }) + }) + } else { + sc.nonStringKeyedHashToData(value) + } + case *types.Array: + sc.process(value, func() { + sc.addArray(value.Len(), func() { + value.EachWithIndex(func(elem px.Value, idx int) { + sc.withPath(types.WrapInteger(int64(idx)), func() { sc.toData(1, elem) }) + }) + }) + }) + case *types.Sensitive: + sc.process(value, func() { + if sc.config.richData { + sc.addHash(2, func() { + sc.toData(2, typeKey) + sc.toData(1, sensitiveType) + sc.toData(2, valueKey) + sc.withPath(valueKey, func() { sc.toData(1, value.Unwrap()) }) + }) + } else { + sc.unknownToStringWithWarning(level, value) + } + }) + case *types.Binary: + sc.process(value, func() { + if sc.consumer.CanDoBinary() { + sc.addData(value) + } else { + if sc.config.richData { + sc.addHash(2, func() { + sc.toData(2, typeKey) + sc.toData(1, binaryType) + sc.toData(2, valueKey) + sc.toData(1, types.WrapString(value.SerializationString())) + }) + } else { + sc.unknownToStringWithWarning(level, value) + } + } + }) + default: + if sc.config.richData { + sc.valueToDataHash(value) + } else { + sc.unknownToStringWithWarning(1, value) + } + } +} + +func (sc *context) unknownToStringWithWarning(level int, value px.Value) { + var klass string + var s string + if rt, ok := value.(*types.RuntimeValue); ok { + s = fmt.Sprintf(`%v`, rt.Interface()) + klass = rt.PType().(*types.RuntimeType).Name() + } else { + s = value.String() + klass = value.PType().Name() + } + px.LogWarning(px.SerializationUnknownConvertedToString, issue.H{`path`: sc.pathToString(), `klass`: klass, `value`: s}) + sc.toData(level, types.WrapString(s)) +} + +func (sc *context) withPath(p px.Value, doer px.Doer) { + sc.path = append(sc.path, p) + doer() + sc.path = sc.path[0 : len(sc.path)-1] +} + +func (sc *context) process(value px.Value, doer px.Doer) { + if sc.dedupLevel == NoDedup { + doer() + return + } + + if ref, ok := sc.values[value]; ok { + sc.consumer.AddRef(ref) + } else { + sc.values[value] = sc.refIndex + doer() + } +} + +func (sc *context) nonStringKeyedHashToData(hash px.OrderedMap) { + if sc.config.richData { + sc.toKeyExtendedHash(hash) + return + } + sc.process(hash, func() { + sc.addHash(hash.Len(), func() { + hash.EachPair(func(key, elem px.Value) { + if s, ok := key.(px.StringValue); ok { + sc.toData(2, s) + } else { + sc.unknownToStringWithWarning(2, key) + } + sc.withPath(key, func() { sc.toData(1, elem) }) + }) + }) + }) +} + +func (sc *context) addArray(len int, doer px.Doer) { + sc.refIndex++ + sc.consumer.AddArray(len, doer) +} + +func (sc *context) addHash(len int, doer px.Doer) { + sc.refIndex++ + sc.consumer.AddHash(len, doer) +} + +func (sc *context) addData(v px.Value) { + sc.refIndex++ + sc.consumer.Add(v) +} + +func (sc *context) valueToDataHash(value px.Value) { + if _, ok := value.(*types.RuntimeValue); ok { + sc.unknownToStringWithWarning(1, value) + return + } + + switch value := value.(type) { + case *types.TypeAliasType: + if sc.isKnownType(value.Name()) { + sc.process(value, func() { + sc.addHash(2, func() { + sc.toData(2, typeKey) + sc.toData(2, types.WrapString(`Type`)) + sc.toData(2, valueKey) + sc.toData(1, types.WrapString(value.Name())) + }) + }) + return + } + case px.ObjectType: + tv := value.(px.ObjectType) + if sc.isKnownType(tv.Name()) { + sc.process(value, func() { + sc.addHash(2, func() { + sc.toData(2, typeKey) + sc.toData(2, types.WrapString(`Type`)) + sc.toData(2, valueKey) + sc.toData(1, types.WrapString(tv.String())) + }) + }) + return + } + } + + vt := value.PType() + if tx, ok := value.(px.Type); ok { + if ss, ok := value.(px.SerializeAsString); ok && ss.CanSerializeAsString() { + sc.process(value, func() { + sc.addHash(2, func() { + sc.toData(2, typeKey) + sc.withPath(typeKey, func() { sc.pcoreTypeToData(vt) }) + sc.toData(2, valueKey) + sc.toData(1, types.WrapString(ss.SerializationString())) + }) + }) + return + } + vt = tx.MetaType() + } + + if ss, ok := value.(px.SerializeAsString); ok && ss.CanSerializeAsString() { + sc.process(value, func() { + sc.addHash(2, func() { + sc.toData(2, typeKey) + sc.withPath(typeKey, func() { sc.pcoreTypeToData(vt) }) + sc.toData(2, valueKey) + sc.toData(1, types.WrapString(ss.SerializationString())) + }) + }) + return + } + + if po, ok := value.(px.PuppetObject); ok { + sc.process(value, func() { + sc.addHash(2, func() { + sc.toData(2, typeKey) + sc.withPath(typeKey, func() { sc.pcoreTypeToData(vt) }) + po.InitHash().EachPair(func(k, v px.Value) { + sc.toData(2, k) // No need to convert key. It's always a string + sc.withPath(k, func() { sc.toData(1, v) }) + }) + }) + }) + return + } + + if ot, ok := vt.(px.ObjectType); ok { + sc.process(value, func() { + ai := ot.AttributesInfo() + attrs := ai.Attributes() + args := make([]px.Value, len(attrs)) + for i, a := range attrs { + args[i] = a.Get(value) + } + + for i := len(args) - 1; i >= ai.RequiredCount(); i-- { + if !attrs[i].Default(args[i]) { + break + } + args = args[:i] + } + sc.addHash(1+len(args), func() { + sc.toData(2, typeKey) + sc.withPath(typeKey, func() { sc.pcoreTypeToData(vt) }) + for i, a := range args { + k := types.WrapString(attrs[i].Name()) + sc.toData(2, k) + sc.withPath(k, func() { sc.toData(1, a) }) + } + }) + }) + return + } + sc.unknownToStringWithWarning(1, value) +} + +func (sc *context) isKnownType(typeName string) bool { + if strings.HasPrefix(typeName, `Runtime::`) { + return true + } + _, found := px.Load(sc.config.context, px.NewTypedName(px.NsType, typeName)) + return found +} + +func (sc *context) pcoreTypeToData(pcoreType px.Type) { + typeName := pcoreType.Name() + if sc.isKnownType(typeName) { + sc.toData(1, types.WrapString(typeName)) + } else { + sc.toData(1, pcoreType) + } +} + +func (sc *context) toKeyExtendedHash(hash px.OrderedMap) { + sc.process(hash, func() { + sc.addHash(2, func() { + sc.toData(2, typeKey) + sc.toData(1, hashKey) + sc.toData(2, valueKey) + sc.addArray(hash.Len()*2, func() { + hash.EachPair(func(key, value px.Value) { + sc.toData(1, key) + sc.withPath(key, func() { sc.toData(1, value) }) + }) + }) + }) + }) +} diff --git a/vendor/github.com/lyraproj/pcore/threadlocal/gid.go b/vendor/github.com/lyraproj/pcore/threadlocal/gid.go new file mode 100644 index 0000000..d083aaa --- /dev/null +++ b/vendor/github.com/lyraproj/pcore/threadlocal/gid.go @@ -0,0 +1,98 @@ +package threadlocal + +import ( + "fmt" + "runtime" + "sync" +) + +// Getgid returns the ID of the current Go routine +// +// The solution here is not the fastest in the world but it gets the job done +// in a portable way. For a faster ways to do this that involves assembler code +// for different platforms, please look at: https://github.com/huandu/go-tls +func getg() int64 { + const prefixLen = 10 // Length of prefix "goroutine " + var buf [64]byte + + l := runtime.Stack(buf[:64], false) + n := int64(0) + for i := prefixLen; i < l; i++ { + d := buf[i] + if d < 0x30 || d > 0x39 { + break + } + n = n*10 + int64(d-0x30) + } + if n == 0 { + panic(fmt.Errorf(`unable to retrieve id of current go routine`)) + } + return n +} + +var tlsLock sync.RWMutex +var tls = make(map[int64]map[string]interface{}, 7) + +// Init initializes a go routine local storage for the current go routine +func Init() { + gid := getg() + ls := make(map[string]interface{}) + tlsLock.Lock() + tls[gid] = ls + tlsLock.Unlock() +} + +// Cleanup deletes the local storage for the current go routine +func Cleanup() { + gid := getg() + tlsLock.Lock() + delete(tls, gid) + tlsLock.Unlock() +} + +// Get returns a variable from the local storage of the current go routine +func Get(key string) (interface{}, bool) { + gid := getg() + tlsLock.RLock() + ls, ok := tls[gid] + tlsLock.RUnlock() + var found interface{} + if ok { + found, ok = ls[key] + } + return found, ok +} + +// Go executes the given function in a go routine and ensures that the local +// storage is initialized before the function is called and deleted before +// after the function returns or panics +func Go(f func()) { + go func() { + defer Cleanup() + Init() + f() + }() +} + +// Delete deletes a variable from the local storage of the current go routine +func Delete(key string) { + gid := getg() + tlsLock.RLock() + ls, ok := tls[gid] + tlsLock.RUnlock() + if ok { + delete(ls, key) + } +} + +// Set adds or replaces a variable to the local storage of the current go routine +func Set(key string, value interface{}) { + gid := getg() + tlsLock.RLock() + ls, ok := tls[gid] + tlsLock.RUnlock() + if !ok { + panic(`thread local not initialized for current go routine`) + } + ls[key] = value +} diff --git a/vendor/github.com/lyraproj/pcore/types/annotatable.go b/vendor/github.com/lyraproj/pcore/types/annotatable.go new file mode 100644 index 0000000..753d258 --- /dev/null +++ b/vendor/github.com/lyraproj/pcore/types/annotatable.go @@ -0,0 +1,56 @@ +package types + +import ( + "github.com/lyraproj/pcore/hash" + "github.com/lyraproj/pcore/px" +) + +var annotationTypeDefault = &objectType{ + annotatable: annotatable{annotations: emptyMap}, + hashKey: px.HashKey("\x00tAnnotation"), + name: `Annotation`, + parameters: hash.EmptyStringHash, + attributes: hash.EmptyStringHash, + functions: hash.EmptyStringHash, + equalityIncludeType: true, + equality: nil} + +func DefaultAnnotationType() px.Type { + return annotationTypeDefault +} + +var typeAnnotations = NewHashType(NewTypeType(annotationTypeDefault), DefaultHashType(), nil) + +type annotatable struct { + annotations *Hash + resolvedAnnotations *Hash +} + +func (a *annotatable) Annotations(c px.Context) px.OrderedMap { + if a.resolvedAnnotations == nil { + ah := a.annotations + if ah.IsEmpty() { + a.resolvedAnnotations = emptyMap + } else { + as := make([]*HashEntry, 0, ah.Len()) + ah.EachPair(func(k, v px.Value) { + at := k.(px.ObjectType) + as = append(as, WrapHashEntry(k, px.New(c, at, v))) + }) + a.resolvedAnnotations = WrapHash(as) + } + } + return a.resolvedAnnotations +} + +func (a *annotatable) initialize(initHash *Hash) { + a.annotations = hashArg(initHash, keyAnnotations) +} + +func (a *annotatable) initHash() *hash.StringHash { + h := hash.NewStringHash(5) + if a.annotations.Len() > 0 { + h.Put(keyAnnotations, a.annotations) + } + return h +} diff --git a/vendor/github.com/lyraproj/pcore/types/annotatedmember.go b/vendor/github.com/lyraproj/pcore/types/annotatedmember.go new file mode 100644 index 0000000..6ed3ae1 --- /dev/null +++ b/vendor/github.com/lyraproj/pcore/types/annotatedmember.go @@ -0,0 +1,113 @@ +package types + +import ( + "github.com/lyraproj/issue/issue" + "github.com/lyraproj/pcore/hash" + "github.com/lyraproj/pcore/px" +) + +type annotatedMember struct { + annotatable + name string + container *objectType + typ px.Type + override bool + final bool +} + +func (a *annotatedMember) initialize(c px.Context, memberType, name string, container *objectType, initHash *Hash) { + a.annotatable.initialize(initHash) + a.name = name + a.container = container + typ := initHash.Get5(keyType, nil) + if tn, ok := typ.(stringValue); ok { + a.typ = container.parseAttributeType(c, memberType, name, tn) + } else { + // Unchecked because type is guaranteed by earlier type assertion on the hash + a.typ = typ.(px.Type) + } + a.override = boolArg(initHash, keyOverride, false) + a.final = boolArg(initHash, keyFinal, false) +} + +func (a *annotatedMember) Accept(v px.Visitor, g px.Guard) { + a.typ.Accept(v, g) + visitAnnotations(a.annotations, v, g) +} + +func (a *annotatedMember) Call(c px.Context, receiver px.Value, block px.Lambda, args []px.Value) px.Value { + // TODO: + panic("implement me") +} + +func (a *annotatedMember) Name() string { + return a.name +} + +func (a *annotatedMember) Container() px.ObjectType { + return a.container +} + +func (a *annotatedMember) Type() px.Type { + return a.typ +} + +func (a *annotatedMember) Override() bool { + return a.override +} + +func (a *annotatedMember) initHash() *hash.StringHash { + h := a.annotatable.initHash() + h.Put(keyType, a.typ) + if a.final { + h.Put(keyFinal, BooleanTrue) + } + if a.override { + h.Put(keyOverride, BooleanTrue) + } + return h +} + +func (a *annotatedMember) Final() bool { + return a.final +} + +// Checks if the this _member_ overrides an inherited member, and if so, that this member is declared with +// override = true and that the inherited member accepts to be overridden by this member. +func assertOverride(a px.AnnotatedMember, parentMembers *hash.StringHash) { + parentMember, _ := parentMembers.Get(a.Name(), nil).(px.AnnotatedMember) + if parentMember == nil { + if a.Override() { + panic(px.Error(px.OverriddenNotFound, issue.H{`label`: a.Label(), `feature_type`: a.FeatureType()})) + } + } else { + assertCanBeOverridden(parentMember, a) + } +} + +func assertCanBeOverridden(a px.AnnotatedMember, member px.AnnotatedMember) { + if a.FeatureType() != member.FeatureType() { + panic(px.Error(px.OverrideMemberMismatch, issue.H{`member`: member.Label(), `label`: a.Label()})) + } + if a.Final() { + aa, ok := a.(px.Attribute) + if !(ok && aa.Kind() == constant && member.(px.Attribute).Kind() == constant) { + panic(px.Error(px.OverrideOfFinal, issue.H{`member`: member.Label(), `label`: a.Label()})) + } + } + if !member.Override() { + panic(px.Error(px.OverrideIsMissing, issue.H{`member`: member.Label(), `label`: a.Label()})) + } + if !px.IsAssignable(a.Type(), member.Type()) { + panic(px.Error(px.OverrideTypeMismatch, issue.H{`member`: member.Label(), `label`: a.Label()})) + } +} + +// Visit the keys of an annotations map. All keys are known to be types +func visitAnnotations(a *Hash, v px.Visitor, g px.Guard) { + if a != nil { + a.EachKey(func(key px.Value) { + key.(px.Type).Accept(v, g) + }) + } +} diff --git a/vendor/github.com/lyraproj/pcore/types/anytype.go b/vendor/github.com/lyraproj/pcore/types/anytype.go new file mode 100644 index 0000000..73e18ed --- /dev/null +++ b/vendor/github.com/lyraproj/pcore/types/anytype.go @@ -0,0 +1,73 @@ +package types + +import ( + "io" + + "github.com/lyraproj/pcore/px" +) + +type AnyType struct{} + +var AnyMetaType px.ObjectType + +func init() { + px.NewCollector = NewCollector + px.NewTypedName = NewTypedName + px.NewTypedName2 = newTypedName2 + px.TypedNameFromMapKey = typedNameFromMapKey + + AnyMetaType = newObjectType(`Pcore::AnyType`, `{}`, func(ctx px.Context, args []px.Value) px.Value { + return DefaultAnyType() + }) +} + +func DefaultAnyType() *AnyType { + return anyTypeDefault +} + +func (t *AnyType) Accept(v px.Visitor, g px.Guard) { + v(t) +} + +func (t *AnyType) Equals(o interface{}, g px.Guard) bool { + _, ok := o.(*AnyType) + return ok +} + +func (t *AnyType) IsAssignable(o px.Type, g px.Guard) bool { + return true +} + +func (t *AnyType) IsInstance(v px.Value, g px.Guard) bool { + return true +} + +func (t *AnyType) MetaType() px.ObjectType { + return AnyMetaType +} + +func (t *AnyType) Name() string { + return `Any` +} + +func (t *AnyType) CanSerializeAsString() bool { + return true +} + +func (t *AnyType) SerializationString() string { + return `Any` +} + +func (t *AnyType) String() string { + return `Any` +} + +func (t *AnyType) ToString(b io.Writer, s px.FormatContext, g px.RDetect) { + TypeToString(t, b, s, g) +} + +func (t *AnyType) PType() px.Type { + return &TypeType{t} +} + +var anyTypeDefault = &AnyType{} diff --git a/vendor/github.com/lyraproj/pcore/types/arraytype.go b/vendor/github.com/lyraproj/pcore/types/arraytype.go new file mode 100644 index 0000000..d352b0a --- /dev/null +++ b/vendor/github.com/lyraproj/pcore/types/arraytype.go @@ -0,0 +1,763 @@ +package types + +import ( + "bytes" + "io" + "math" + "reflect" + "sort" + + "github.com/lyraproj/issue/issue" + + "github.com/lyraproj/pcore/px" + "github.com/lyraproj/pcore/utils" +) + +type ( + ArrayType struct { + size *IntegerType + typ px.Type + } + + Array struct { + reducedType *ArrayType + detailedType px.Type + elements []px.Value + } +) + +var ArrayMetaType px.ObjectType + +func init() { + ArrayMetaType = newObjectType(`Pcore::ArrayType`, + `Pcore::CollectionType { + attributes => { + 'element_type' => { type => Type, value => Any } + }, + serialization => [ 'element_type', 'size_type' ] +}`, func(ctx px.Context, args []px.Value) px.Value { + return newArrayType2(args...) + }, + func(ctx px.Context, args []px.Value) px.Value { + h := args[0].(*Hash) + et := h.Get5(`element_type`, DefaultAnyType()) + st := h.Get5(`size_type`, PositiveIntegerType()) + return newArrayType2(et, st) + }) + + newGoConstructor3([]string{`Array`, `Tuple`}, nil, + func(d px.Dispatch) { + d.Param(`Variant[Array,Hash,Binary,Iterable]`) + d.OptionalParam(`Boolean`) + d.Function(func(c px.Context, args []px.Value) px.Value { + switch arg := args[0].(type) { + case *Array: + if len(args) > 1 && args[1].(booleanValue).Bool() { + // Wrapped + return WrapValues(args[:1]) + } + return arg + default: + return arg.(px.Arrayable).AsArray() + } + }) + }, + ) +} + +func DefaultArrayType() *ArrayType { + return arrayTypeDefault +} + +func EmptyArrayType() *ArrayType { + return arrayTypeEmpty +} + +func NewArrayType(element px.Type, rng *IntegerType) *ArrayType { + if element == nil { + element = anyTypeDefault + } + if rng == nil { + rng = IntegerTypePositive + } + if *rng == *IntegerTypePositive && element == anyTypeDefault { + return DefaultArrayType() + } + if *rng == *IntegerTypeZero && element == unitTypeDefault { + return EmptyArrayType() + } + return &ArrayType{rng, element} +} + +func newArrayType2(args ...px.Value) *ArrayType { + argc := len(args) + if argc == 0 { + return DefaultArrayType() + } + + offset := 0 + element, ok := args[0].(px.Type) + if ok { + offset++ + } else { + element = DefaultAnyType() + } + + var rng *IntegerType + switch argc - offset { + case 0: + rng = IntegerTypePositive + case 1: + sizeArg := args[offset] + if rng, ok = sizeArg.(*IntegerType); !ok { + var sz int64 + sz, ok = toInt(sizeArg) + if !ok { + panic(illegalArgumentType(`Array[]`, offset, `Variant[Integer, Type[Integer]]`, sizeArg)) + } + rng = NewIntegerType(sz, math.MaxInt64) + } + case 2: + var min, max int64 + arg := args[offset] + if min, ok = toInt(arg); !ok { + if _, ok = arg.(*DefaultValue); !ok { + panic(illegalArgumentType(`Array[]`, offset, `Integer`, arg)) + } + min = 0 + } + offset++ + arg = args[offset] + if max, ok = toInt(args[offset]); !ok { + if _, ok = arg.(*DefaultValue); !ok { + panic(illegalArgumentType(`Array[]`, offset, `Integer`, arg)) + } + max = math.MaxInt64 + } + rng = NewIntegerType(min, max) + default: + panic(illegalArgumentCount(`Array[]`, `0 - 3`, argc)) + } + return NewArrayType(element, rng) +} + +func (t *ArrayType) ElementType() px.Type { + return t.typ +} + +func (t *ArrayType) Accept(v px.Visitor, g px.Guard) { + v(t) + t.size.Accept(v, g) + t.typ.Accept(v, g) +} + +func (t *ArrayType) Equals(o interface{}, g px.Guard) bool { + if ot, ok := o.(*ArrayType); ok { + return t.typ.Equals(ot.typ, g) + } + return false +} + +func (t *ArrayType) Generic() px.Type { + if t.typ == anyTypeDefault { + return arrayTypeDefault + } + return NewArrayType(px.Generalize(t.typ), nil) +} + +func (t *ArrayType) Get(key string) (value px.Value, ok bool) { + switch key { + case `element_type`: + return t.typ, true + case `size_type`: + return t.size, true + } + return nil, false +} + +func (t *ArrayType) Default() px.Type { + return arrayTypeDefault +} + +func (t *ArrayType) IsAssignable(o px.Type, g px.Guard) bool { + switch o := o.(type) { + case *ArrayType: + return t.size.IsAssignable(o.size, g) && GuardedIsAssignable(t.typ, o.typ, g) + case *TupleType: + return t.size.IsAssignable(o.givenOrActualSize, g) && allAssignableTo(o.types, t.typ, g) + default: + return false + } +} + +func (t *ArrayType) IsInstance(v px.Value, g px.Guard) bool { + iv, ok := v.(*Array) + if !ok { + return false + } + + osz := iv.Len() + if !t.size.IsInstance3(osz) { + return false + } + + if t.typ == anyTypeDefault { + return true + } + + for idx := 0; idx < osz; idx++ { + if !GuardedIsInstance(t.typ, iv.At(idx), g) { + return false + } + } + return true +} + +func (t *ArrayType) MetaType() px.ObjectType { + return ArrayMetaType +} + +func (t *ArrayType) Name() string { + return `Array` +} + +func (t *ArrayType) Resolve(c px.Context) px.Type { + t.typ = resolve(c, t.typ) + return t +} + +func (t *ArrayType) Size() *IntegerType { + return t.size +} + +func (t *ArrayType) String() string { + return px.ToString2(t, None) +} + +func (t *ArrayType) PType() px.Type { + return &TypeType{t} +} + +func (t *ArrayType) Parameters() []px.Value { + if t.typ.Equals(unitTypeDefault, nil) && *t.size == *IntegerTypeZero { + return t.size.SizeParameters() + } + + params := make([]px.Value, 0) + if !t.typ.Equals(DefaultAnyType(), nil) { + params = append(params, t.typ) + } + if *t.size != *IntegerTypePositive { + params = append(params, t.size.SizeParameters()...) + } + return params +} + +func (t *ArrayType) ReflectType(c px.Context) (reflect.Type, bool) { + if et, ok := ReflectType(c, t.ElementType()); ok { + return reflect.SliceOf(et), true + } + return nil, false +} + +func (t *ArrayType) CanSerializeAsString() bool { + return canSerializeAsString(t.typ) +} + +func (t *ArrayType) SerializationString() string { + return t.String() +} + +func (t *ArrayType) ToString(b io.Writer, s px.FormatContext, g px.RDetect) { + TypeToString(t, b, s, g) +} + +var arrayTypeDefault = &ArrayType{IntegerTypePositive, anyTypeDefault} +var arrayTypeEmpty = &ArrayType{IntegerTypeZero, unitTypeDefault} + +func BuildArray(len int, bld func(*Array, []px.Value) []px.Value) *Array { + ar := &Array{elements: make([]px.Value, 0, len)} + ar.elements = bld(ar, ar.elements) + return ar +} + +func SingletonArray(element px.Value) *Array { + return &Array{elements: []px.Value{element}} +} + +func WrapTypes(elements []px.Type) *Array { + els := make([]px.Value, len(elements)) + for i, e := range elements { + if e == nil { + panic(px.Error(px.NilArrayElement, issue.H{`index`: i})) + } + els[i] = e + } + return &Array{elements: els} +} + +func WrapValues(elements []px.Value) *Array { + for i, e := range elements { + if e == nil { + panic(px.Error(px.NilArrayElement, issue.H{`index`: i})) + } + } + return &Array{elements: elements} +} + +func WrapInterfaces(c px.Context, elements []interface{}) *Array { + els := make([]px.Value, len(elements)) + for i, e := range elements { + els[i] = wrap(c, e) + } + return &Array{elements: els} +} + +func WrapInts(ints []int) *Array { + els := make([]px.Value, len(ints)) + for i, e := range ints { + els[i] = integerValue(int64(e)) + } + return &Array{elements: els} +} + +func WrapStrings(strings []string) *Array { + els := make([]px.Value, len(strings)) + for i, e := range strings { + els[i] = stringValue(e) + } + return &Array{elements: els} +} + +func WrapArray3(iv px.List) *Array { + if ar, ok := iv.(*Array); ok { + return ar + } + return WrapValues(iv.AppendTo(make([]px.Value, 0, iv.Len()))) +} + +func (av *Array) Add(ov px.Value) px.List { + return WrapValues(append(av.elements, ov)) +} + +func (av *Array) AddAll(ov px.List) px.List { + if ar, ok := ov.(*Array); ok { + return WrapValues(append(av.elements, ar.elements...)) + } + + aLen := len(av.elements) + sLen := aLen + ov.Len() + el := make([]px.Value, sLen) + copy(el, av.elements) + for idx := aLen; idx < sLen; idx++ { + el[idx] = ov.At(idx - aLen) + } + return WrapValues(el) +} + +func (av *Array) All(predicate px.Predicate) bool { + return px.All(av.elements, predicate) +} + +func (av *Array) Any(predicate px.Predicate) bool { + return px.Any(av.elements, predicate) +} + +func (av *Array) AppendTo(slice []px.Value) []px.Value { + return append(slice, av.elements...) +} + +func (av *Array) At(i int) px.Value { + if i >= 0 && i < len(av.elements) { + return av.elements[i] + } + return undef +} + +func (av *Array) Delete(ov px.Value) px.List { + return av.Reject(func(elem px.Value) bool { + return elem.Equals(ov, nil) + }) +} + +func (av *Array) DeleteAll(ov px.List) px.List { + return av.Reject(func(elem px.Value) bool { + return ov.Any(func(oe px.Value) bool { + return elem.Equals(oe, nil) + }) + }) +} + +func (av *Array) DetailedType() px.Type { + return av.privateDetailedType() +} + +func (av *Array) Each(consumer px.Consumer) { + for _, e := range av.elements { + consumer(e) + } +} + +func (av *Array) EachWithIndex(consumer px.IndexedConsumer) { + for i, e := range av.elements { + consumer(e, i) + } +} + +func (av *Array) EachSlice(n int, consumer px.SliceConsumer) { + top := len(av.elements) + for i := 0; i < top; i += n { + e := i + n + if e > top { + e = top + } + consumer(WrapValues(av.elements[i:e])) + } +} + +func (av *Array) ElementType() px.Type { + return av.PType().(*ArrayType).ElementType() +} + +func (av *Array) Equals(o interface{}, g px.Guard) bool { + if ov, ok := o.(*Array); ok { + if top := len(av.elements); top == len(ov.elements) { + for idx := 0; idx < top; idx++ { + if !av.elements[idx].Equals(ov.elements[idx], g) { + return false + } + } + return true + } + } + if len(av.elements) == 2 { + if he, ok := o.(*HashEntry); ok { + return av.elements[0].Equals(he.key, g) && av.elements[1].Equals(he.value, g) + } + } + return false +} + +func (av *Array) Find(predicate px.Predicate) (px.Value, bool) { + return px.Find(av.elements, predicate) +} + +func (av *Array) Flatten() px.List { + for _, e := range av.elements { + switch e.(type) { + case *Array, *HashEntry: + return WrapValues(flattenElements(av.elements, make([]px.Value, 0, len(av.elements)*2))) + } + } + return av +} + +func flattenElements(elements, receiver []px.Value) []px.Value { + for _, e := range elements { + switch e := e.(type) { + case *Array: + receiver = flattenElements(e.elements, receiver) + case *HashEntry: + receiver = flattenElements([]px.Value{e.key, e.value}, receiver) + default: + receiver = append(receiver, e) + } + } + return receiver +} + +func (av *Array) IsEmpty() bool { + return len(av.elements) == 0 +} + +func (av *Array) IsHashStyle() bool { + return false +} + +func (av *Array) Len() int { + return len(av.elements) +} + +func (av *Array) Map(mapper px.Mapper) px.List { + return WrapValues(px.Map(av.elements, mapper)) +} + +func (av *Array) Reduce(redactor px.BiMapper) px.Value { + if av.IsEmpty() { + return undef + } + return reduceSlice(av.elements[1:], av.At(0), redactor) +} + +func (av *Array) Reduce2(initialValue px.Value, redactor px.BiMapper) px.Value { + return reduceSlice(av.elements, initialValue, redactor) +} + +func (av *Array) Reflect(c px.Context) reflect.Value { + at, ok := ReflectType(c, av.PType()) + if !ok { + at = reflect.TypeOf([]interface{}{}) + } + s := reflect.MakeSlice(at, av.Len(), av.Len()) + rf := c.Reflector() + for i, e := range av.elements { + rf.ReflectTo(e, s.Index(i)) + } + return s +} + +func (av *Array) ReflectTo(c px.Context, value reflect.Value) { + vt := value.Type() + ptr := vt.Kind() == reflect.Ptr + if ptr { + vt = vt.Elem() + } + s := reflect.MakeSlice(vt, av.Len(), av.Len()) + rf := c.Reflector() + for i, e := range av.elements { + rf.ReflectTo(e, s.Index(i)) + } + if ptr { + // The created slice cannot be addressed. A pointer to it is necessary + x := reflect.New(s.Type()) + x.Elem().Set(s) + s = x + } + value.Set(s) +} + +func (av *Array) Reject(predicate px.Predicate) px.List { + return WrapValues(px.Reject(av.elements, predicate)) +} + +func (av *Array) Select(predicate px.Predicate) px.List { + return WrapValues(px.Select(av.elements, predicate)) +} + +func (av *Array) Slice(i int, j int) px.List { + return WrapValues(av.elements[i:j]) +} + +type arraySorter struct { + values []px.Value + comparator px.Comparator +} + +func (s *arraySorter) Len() int { + return len(s.values) +} + +func (s *arraySorter) Less(i, j int) bool { + vs := s.values + return s.comparator(vs[i], vs[j]) +} + +func (s *arraySorter) Swap(i, j int) { + vs := s.values + v := vs[i] + vs[i] = vs[j] + vs[j] = v +} + +func (av *Array) Sort(comparator px.Comparator) px.List { + s := &arraySorter{make([]px.Value, len(av.elements)), comparator} + copy(s.values, av.elements) + sort.Sort(s) + return WrapValues(s.values) +} + +func (av *Array) String() string { + return px.ToString2(av, None) +} + +func (av *Array) ToString(b io.Writer, s px.FormatContext, g px.RDetect) { + av.ToString2(b, s, px.GetFormat(s.FormatMap(), av.PType()), '[', g) +} + +func (av *Array) ToString2(b io.Writer, s px.FormatContext, f px.Format, delim byte, g px.RDetect) { + if g == nil { + g = make(px.RDetect) + } else if g[av] { + utils.WriteString(b, ``) + return + } + g[av] = true + + switch f.FormatChar() { + case 'a', 's', 'p': + default: + panic(s.UnsupportedFormat(av.PType(), `asp`, f)) + } + indent := s.Indentation() + indent = indent.Indenting(f.IsAlt() || indent.IsIndenting()) + + if indent.Breaks() { + utils.WriteString(b, "\n") + utils.WriteString(b, indent.Padding()) + } + + var delims [2]byte + if f.LeftDelimiter() == 0 { + delims = delimiterPairs[delim] + } else { + delims = delimiterPairs[f.LeftDelimiter()] + } + if delims[0] != 0 { + utils.WriteByte(b, delims[0]) + } + + top := len(av.elements) + if top > 0 { + mapped := make([]string, top) + arrayOrHash := make([]bool, top) + childrenIndent := indent.Increase(f.IsAlt()) + + cf := f.ContainerFormats() + if cf == nil { + cf = DefaultContainerFormats + } + for idx, v := range av.elements { + arrayOrHash[idx] = isContainer(v, s) + mapped[idx] = childToString(v, childrenIndent.Subsequent(), s, cf, g) + } + + szBreak := false + if f.IsAlt() && f.Width() >= 0 { + widest := 0 + for idx, ah := range arrayOrHash { + if ah { + widest = 0 + } else { + widest += len(mapped[idx]) + if widest > f.Width() { + szBreak = true + break + } + } + } + } + + sep := f.Separator(`,`) + for idx, ah := range arrayOrHash { + if childrenIndent.IsFirst() { + childrenIndent = childrenIndent.Subsequent() + // if breaking, indent first element by one + if szBreak && !ah { + utils.WriteString(b, ` `) + } + } else { + utils.WriteString(b, sep) + // if break on each (and breaking will not occur because next is an array or hash) + // or, if indenting, and previous was an array or hash, then break and continue on next line + // indented. + if !ah && (szBreak || f.IsAlt() && arrayOrHash[idx-1]) { + utils.WriteString(b, "\n") + utils.WriteString(b, childrenIndent.Padding()) + } else if !(f.IsAlt() && ah) { + utils.WriteString(b, ` `) + } + } + utils.WriteString(b, mapped[idx]) + } + } + if delims[1] != 0 { + utils.WriteByte(b, delims[1]) + } + delete(g, av) +} + +func (av *Array) Unique() px.List { + top := len(av.elements) + if top < 2 { + return av + } + + result := make([]px.Value, 0, top) + exists := make(map[px.HashKey]bool, top) + for _, v := range av.elements { + key := px.ToKey(v) + if !exists[key] { + exists[key] = true + result = append(result, v) + } + } + if len(result) == len(av.elements) { + return av + } + return WrapValues(result) +} + +func childToString(child px.Value, indent px.Indentation, parentCtx px.FormatContext, cf px.FormatMap, g px.RDetect) string { + var childrenCtx px.FormatContext + if isContainer(child, parentCtx) { + childrenCtx = newFormatContext2(indent, parentCtx.FormatMap(), parentCtx.Properties()) + } else { + childrenCtx = newFormatContext2(indent, cf, parentCtx.Properties()) + } + b := bytes.NewBufferString(``) + child.ToString(b, childrenCtx, g) + return b.String() +} + +func isContainer(child px.Value, s px.FormatContext) bool { + switch child.(type) { + case *Array, *Hash: + return true + case px.ObjectType, px.TypeSet: + if ex, ok := s.Property(`expanded`); ok && ex == `true` { + return true + } + return false + case px.PuppetObject: + return true + default: + return false + } +} + +func (av *Array) PType() px.Type { + return av.privateReducedType() +} + +func (av *Array) privateDetailedType() px.Type { + if av.detailedType == nil { + if len(av.elements) == 0 { + av.detailedType = av.privateReducedType() + } else { + types := make([]px.Type, len(av.elements)) + av.detailedType = NewTupleType(types, nil) + for idx := range types { + types[idx] = DefaultAnyType() + } + for idx, element := range av.elements { + types[idx] = px.DetailedValueType(element) + } + } + } + return av.detailedType +} + +func (av *Array) privateReducedType() *ArrayType { + if av.reducedType == nil { + top := len(av.elements) + if top == 0 { + av.reducedType = EmptyArrayType() + } else { + av.reducedType = NewArrayType(DefaultAnyType(), NewIntegerType(int64(top), int64(top))) + elemType := av.elements[0].PType() + for idx := 1; idx < top; idx++ { + elemType = commonType(elemType, av.elements[idx].PType()) + } + av.reducedType.typ = elemType + } + } + return av.reducedType +} + +func reduceSlice(slice []px.Value, initialValue px.Value, redactor px.BiMapper) px.Value { + memo := initialValue + for _, v := range slice { + memo = redactor(memo, v) + } + return memo +} diff --git a/vendor/github.com/lyraproj/pcore/types/attribute.go b/vendor/github.com/lyraproj/pcore/types/attribute.go new file mode 100644 index 0000000..208e02b --- /dev/null +++ b/vendor/github.com/lyraproj/pcore/types/attribute.go @@ -0,0 +1,177 @@ +package types + +import ( + "fmt" + + "github.com/lyraproj/issue/issue" + "github.com/lyraproj/pcore/hash" + "github.com/lyraproj/pcore/px" +) + +const KeyGoName = `go_name` + +var typeAttributeKind = NewEnumType([]string{string(constant), string(derived), string(givenOrDerived), string(reference)}, false) + +var typeAttribute = NewStructType([]*StructElement{ + newStructElement2(keyType, NewVariantType(DefaultTypeType(), TypeTypeName)), + NewStructElement(newOptionalType3(keyFinal), DefaultBooleanType()), + NewStructElement(newOptionalType3(keyOverride), DefaultBooleanType()), + NewStructElement(newOptionalType3(keyKind), typeAttributeKind), + NewStructElement(newOptionalType3(keyValue), DefaultAnyType()), + NewStructElement(newOptionalType3(keyAnnotations), typeAnnotations), + NewStructElement(newOptionalType3(KeyGoName), DefaultStringType()), +}) + +var typeAttributeCallable = newCallableType2(NewIntegerType(0, 0)) + +type attribute struct { + annotatedMember + kind px.AttributeKind + value px.Value + goName string +} + +func newAttribute(c px.Context, name string, container *objectType, initHash *Hash) px.Attribute { + a := &attribute{} + a.initialize(c, name, container, initHash) + return a +} + +func (a *attribute) initialize(c px.Context, name string, container *objectType, initHash *Hash) { + px.AssertInstance(func() string { return fmt.Sprintf(`initializer for attribute %s[%s]`, container.Label(), name) }, typeAttribute, initHash) + a.annotatedMember.initialize(c, `attribute`, name, container, initHash) + a.kind = px.AttributeKind(stringArg(initHash, keyKind, ``)) + if a.kind == constant { // final is implied + if initHash.IncludesKey2(keyFinal) && !a.final { + panic(px.Error(px.ConstantWithFinal, issue.H{`label`: a.Label()})) + } + a.final = true + } + v := initHash.Get5(keyValue, nil) + if v != nil { + if a.kind == derived || a.kind == givenOrDerived { + panic(px.Error(px.IllegalKindValueCombination, issue.H{`label`: a.Label(), `kind`: a.kind})) + } + if _, ok := v.(*DefaultValue); ok || px.IsInstance(a.typ, v) { + a.value = v + } else { + panic(px.Error(px.TypeMismatch, issue.H{`detail`: px.DescribeMismatch(a.Label(), a.typ, px.DetailedValueType(v))})) + } + } else { + if a.kind == constant { + panic(px.Error(px.ConstantRequiresValue, issue.H{`label`: a.Label()})) + } + if a.kind == givenOrDerived { + // Type is always optional + if !px.IsInstance(a.typ, undef) { + a.typ = NewOptionalType(a.typ) + } + } + if _, ok := a.typ.(*OptionalType); ok { + a.value = undef // Optional attributes have an implicit value of undef + } else { + a.value = nil // Not to be confused with undef + } + } + if gn, ok := initHash.Get4(KeyGoName); ok { + a.goName = gn.String() + } +} + +func (a *attribute) Call(c px.Context, receiver px.Value, block px.Lambda, args []px.Value) px.Value { + if block == nil && len(args) == 0 { + return a.Get(receiver) + } + types := make([]px.Value, len(args)) + for i, a := range args { + types[i] = a.PType() + } + panic(px.Error(px.TypeMismatch, issue.H{`detail`: px.DescribeSignatures( + []px.Signature{a.CallableType().(*CallableType)}, newTupleType2(types...), block)})) +} + +func (a *attribute) Default(value px.Value) bool { + return a.value != nil && a.value.Equals(value, nil) +} + +func (a *attribute) GoName() string { + return a.goName +} + +func (a *attribute) Tags(c px.Context) px.TagsAnnotation { + if ta, ok := a.Annotations(c).Get(TagsAnnotationType); ok { + return ta.(px.TagsAnnotation) + } + return nil +} + +func (a *attribute) Kind() px.AttributeKind { + return a.kind +} + +func (a *attribute) HasValue() bool { + return a.value != nil +} + +func (a *attribute) initHash() *hash.StringHash { + h := a.annotatedMember.initHash() + if a.kind != defaultKind { + h.Put(keyKind, stringValue(string(a.kind))) + } + if a.value != nil { + opt := a.value.Equals(undef, nil) + if opt { + _, opt = a.typ.(*OptionalType) + } + if !opt { + h.Put(keyValue, a.value) + } + } + return h +} + +func (a *attribute) InitHash() px.OrderedMap { + return WrapStringPValue(a.initHash()) +} + +func (a *attribute) Value() px.Value { + if a.value == nil { + panic(px.Error(px.AttributeHasNoValue, issue.H{`label`: a.Label()})) + } + return a.value +} + +func (a *attribute) FeatureType() string { + return `attribute` +} + +func (a *attribute) Get(instance px.Value) px.Value { + if a.kind == constant { + return a.value + } + if v, ok := a.container.GetValue(a.name, instance); ok { + return v + } + panic(px.Error(px.NoAttributeReader, issue.H{`label`: a.Label()})) +} + +func (a *attribute) Label() string { + return fmt.Sprintf(`attribute %s[%s]`, a.container.Label(), a.Name()) +} + +func (a *attribute) Equals(other interface{}, g px.Guard) bool { + if oa, ok := other.(*attribute); ok { + return a.kind == oa.kind && a.override == oa.override && a.name == oa.name && a.final == oa.final && a.typ.Equals(oa.typ, g) + } + return false +} + +func (a *attribute) CallableType() px.Type { + return typeAttributeCallable +} + +func newTypeParameter(c px.Context, name string, container *objectType, initHash *Hash) px.Attribute { + t := &typeParameter{} + t.initialize(c, name, container, initHash) + return t +} diff --git a/vendor/github.com/lyraproj/pcore/types/attributesinfo.go b/vendor/github.com/lyraproj/pcore/types/attributesinfo.go new file mode 100644 index 0000000..97b800b --- /dev/null +++ b/vendor/github.com/lyraproj/pcore/types/attributesinfo.go @@ -0,0 +1,62 @@ +package types + +import "github.com/lyraproj/pcore/px" + +type attributesInfo struct { + nameToPos map[string]int + attributes []px.Attribute + equalityAttributeIndexes []int + requiredCount int +} + +func newAttributesInfo(attributes []px.Attribute, requiredCount int, equality []string) *attributesInfo { + nameToPos := make(map[string]int, len(attributes)) + posToName := make(map[int]string, len(attributes)) + for ix, at := range attributes { + nameToPos[at.Name()] = ix + posToName[ix] = at.Name() + } + + ei := make([]int, len(equality)) + for ix, e := range equality { + ei[ix] = nameToPos[e] + } + + return &attributesInfo{attributes: attributes, nameToPos: nameToPos, equalityAttributeIndexes: ei, requiredCount: requiredCount} +} + +func (ai *attributesInfo) NameToPos() map[string]int { + return ai.nameToPos +} + +func (ai *attributesInfo) Attributes() []px.Attribute { + return ai.attributes +} + +func (ai *attributesInfo) EqualityAttributeIndex() []int { + return ai.equalityAttributeIndexes +} + +func (ai *attributesInfo) RequiredCount() int { + return ai.requiredCount +} + +func (ai *attributesInfo) PositionalFromHash(hash px.OrderedMap) []px.Value { + nameToPos := ai.NameToPos() + va := make([]px.Value, len(nameToPos)) + + hash.EachPair(func(k px.Value, v px.Value) { + if ix, ok := nameToPos[k.String()]; ok { + va[ix] = v + } + }) + attrs := ai.Attributes() + fillValueSlice(va, attrs) + for i := len(va) - 1; i >= ai.RequiredCount(); i-- { + if !attrs[i].Default(va[i]) { + break + } + va = va[:i] + } + return va +} diff --git a/vendor/github.com/lyraproj/pcore/types/basiccollector.go b/vendor/github.com/lyraproj/pcore/types/basiccollector.go new file mode 100644 index 0000000..14fcd46 --- /dev/null +++ b/vendor/github.com/lyraproj/pcore/types/basiccollector.go @@ -0,0 +1,91 @@ +package types + +import "github.com/lyraproj/pcore/px" + +// A BasicCollector is an extendable basic implementation of the Collector interface +type BasicCollector struct { + values []px.Value + stack [][]px.Value +} + +// NewCollector returns a new Collector instance +func NewCollector() px.Collector { + hm := &BasicCollector{} + hm.Init() + return hm +} + +func (hm *BasicCollector) Init() { + hm.values = make([]px.Value, 0, 64) + hm.stack = make([][]px.Value, 1, 8) + hm.stack[0] = make([]px.Value, 0, 1) +} + +func (hm *BasicCollector) AddArray(cap int, doer px.Doer) { + BuildArray(cap, func(ar *Array, elements []px.Value) []px.Value { + hm.Add(ar) + top := len(hm.stack) + hm.stack = append(hm.stack, elements) + doer() + st := hm.stack[top] + hm.stack = hm.stack[0:top] + return st + }) +} + +func (hm *BasicCollector) AddHash(cap int, doer px.Doer) { + BuildHash(cap, func(ar *Hash, entries []*HashEntry) []*HashEntry { + hm.Add(ar) + top := len(hm.stack) + hm.stack = append(hm.stack, make([]px.Value, 0, cap*2)) + doer() + st := hm.stack[top] + hm.stack = hm.stack[0:top] + + top = len(st) + for i := 0; i < top; i += 2 { + entries = append(entries, WrapHashEntry(st[i], st[i+1])) + } + return entries + }) +} + +func (hm *BasicCollector) Add(element px.Value) { + top := len(hm.stack) - 1 + hm.stack[top] = append(hm.stack[top], element) + hm.values = append(hm.values, element) +} + +func (hm *BasicCollector) AddRef(ref int) { + top := len(hm.stack) - 1 + hm.stack[top] = append(hm.stack[top], hm.values[ref]) +} + +func (hm *BasicCollector) CanDoBinary() bool { + return true +} + +func (hm *BasicCollector) CanDoComplexKeys() bool { + return true +} + +func (hm *BasicCollector) PopLast() px.Value { + top := len(hm.stack) - 1 + st := hm.stack[top] + l := len(st) - 1 + if l >= 0 { + v := st[l] + hm.stack[top] = st[:l] + hm.values = hm.values[:len(hm.values)-1] + return v + } + return nil +} + +func (hm *BasicCollector) StringDedupThreshold() int { + return 0 +} + +func (hm *BasicCollector) Value() px.Value { + return hm.stack[0][0] +} diff --git a/vendor/github.com/lyraproj/pcore/types/binarytype.go b/vendor/github.com/lyraproj/pcore/types/binarytype.go new file mode 100644 index 0000000..7534110 --- /dev/null +++ b/vendor/github.com/lyraproj/pcore/types/binarytype.go @@ -0,0 +1,297 @@ +package types + +import ( + "bytes" + "encoding/base64" + "fmt" + "io" + "io/ioutil" + "os" + "reflect" + "unicode/utf8" + + "github.com/lyraproj/issue/issue" + "github.com/lyraproj/pcore/px" +) + +var binaryTypeDefault = &BinaryType{} + +var BinaryMetaType px.ObjectType + +func init() { + BinaryMetaType = newObjectType(`Pcore::BinaryType`, `Pcore::AnyType{}`, func(ctx px.Context, args []px.Value) px.Value { + return DefaultBinaryType() + }) + + newGoConstructor2(`Binary`, + func(t px.LocalTypes) { + t.Type(`ByteInteger`, `Integer[0,255]`) + t.Type(`Encoding`, `Enum['%b', '%u', '%B', '%s', '%r']`) + t.Type(`StringHash`, `Struct[value => String, format => Optional[Encoding]]`) + t.Type(`ArrayHash`, `Struct[value => Array[ByteInteger]]`) + }, + + func(d px.Dispatch) { + d.Param(`String`) + d.OptionalParam(`Encoding`) + d.Function(func(c px.Context, args []px.Value) px.Value { + str := args[0].String() + f := `%B` + if len(args) > 1 { + f = args[1].String() + } + return BinaryFromString(str, f) + }) + }, + + func(d px.Dispatch) { + d.Param(`Array[ByteInteger]`) + d.Function(func(c px.Context, args []px.Value) px.Value { + return BinaryFromArray(args[0].(px.List)) + }) + }, + + func(d px.Dispatch) { + d.Param(`StringHash`) + d.Function(func(c px.Context, args []px.Value) px.Value { + hv := args[0].(px.OrderedMap) + return BinaryFromString(hv.Get5(`value`, px.Undef).String(), hv.Get5(`format`, px.Undef).String()) + }) + }, + + func(d px.Dispatch) { + d.Param(`ArrayHash`) + d.Function(func(c px.Context, args []px.Value) px.Value { + return BinaryFromArray(args[0].(px.List)) + }) + }, + ) +} + +type ( + BinaryType struct{} + + // Binary keeps only the value because the type is known and not parameterized + Binary struct { + bytes []byte + } +) + +func DefaultBinaryType() *BinaryType { + return binaryTypeDefault +} + +func (t *BinaryType) Accept(v px.Visitor, g px.Guard) { + v(t) +} + +func (t *BinaryType) Equals(o interface{}, g px.Guard) bool { + _, ok := o.(*BinaryType) + return ok +} + +func (t *BinaryType) IsAssignable(o px.Type, g px.Guard) bool { + _, ok := o.(*BinaryType) + return ok +} + +func (t *BinaryType) IsInstance(o px.Value, g px.Guard) bool { + _, ok := o.(*Binary) + return ok +} + +func (t *BinaryType) MetaType() px.ObjectType { + return BinaryMetaType +} + +func (t *BinaryType) Name() string { + return `Binary` +} + +func (t *BinaryType) ReflectType(c px.Context) (reflect.Type, bool) { + return reflect.TypeOf([]byte{}), true +} + +func (t *BinaryType) CanSerializeAsString() bool { + return true +} + +func (t *BinaryType) SerializationString() string { + return t.String() +} + +func (t *BinaryType) String() string { + return `Binary` +} + +func (t *BinaryType) ToString(b io.Writer, s px.FormatContext, g px.RDetect) { + TypeToString(t, b, s, g) +} + +func (t *BinaryType) PType() px.Type { + return &TypeType{t} +} + +func WrapBinary(val []byte) *Binary { + return &Binary{val} +} + +// BinaryFromFile opens file appointed by the given path for reading and returns +// its contents as a Binary. The function will panic with an issue.Reported unless +// the operation succeeds. +func BinaryFromFile(path string) *Binary { + if bf, ok := BinaryFromFile2(path); ok { + return bf + } + panic(px.Error(px.FileNotFound, issue.H{`path`: path})) +} + +// BinaryFromFile2 opens file appointed by the given path for reading and returns +// its contents as a Binary together with a boolean indicating if the file was +// found or not. +// +// The function will only return false if the given file does not exist. It will panic +// with an issue.Reported on all other errors. +func BinaryFromFile2(path string) (*Binary, bool) { + bs, err := ioutil.ReadFile(path) + if err != nil { + stat, statErr := os.Stat(path) + if statErr != nil { + if os.IsNotExist(statErr) { + return nil, false + } + if os.IsPermission(statErr) { + panic(px.Error(px.FileReadDenied, issue.H{`path`: path})) + } + } else { + if stat.IsDir() { + panic(px.Error(px.IsDirectory, issue.H{`path`: path})) + } + } + panic(px.Error(px.Failure, issue.H{`message`: err.Error()})) + } + return WrapBinary(bs), true +} + +func BinaryFromString(str string, f string) *Binary { + var bs []byte + var err error + + switch f { + case `%b`: + bs, err = base64.StdEncoding.DecodeString(str) + case `%u`: + bs, err = base64.URLEncoding.DecodeString(str) + case `%B`: + bs, err = base64.StdEncoding.Strict().DecodeString(str) + case `%s`: + if !utf8.ValidString(str) { + panic(illegalArgument(`BinaryFromString`, 0, `The given string is not valid utf8. Cannot create a Binary UTF-8 representation`)) + } + bs = []byte(str) + case `%r`: + bs = []byte(str) + default: + panic(illegalArgument(`BinaryFromString`, 1, `unsupported format specifier`)) + } + if err == nil { + return WrapBinary(bs) + } + panic(illegalArgument(`BinaryFromString`, 0, err.Error())) +} + +func BinaryFromArray(array px.List) *Binary { + top := array.Len() + result := make([]byte, top) + for idx := 0; idx < top; idx++ { + if v, ok := toInt(array.At(idx)); ok && 0 <= v && v <= 255 { + result[idx] = byte(v) + continue + } + panic(illegalArgument(`BinaryFromString`, 0, `The given array is not all integers between 0 and 255`)) + } + return WrapBinary(result) +} + +func (bv *Binary) AsArray() px.List { + vs := make([]px.Value, len(bv.bytes)) + for i, b := range bv.bytes { + vs[i] = integerValue(int64(b)) + } + return WrapValues(vs) +} + +func (bv *Binary) Equals(o interface{}, g px.Guard) bool { + if ov, ok := o.(*Binary); ok { + return bytes.Equal(bv.bytes, ov.bytes) + } + return false +} + +func (bv *Binary) Reflect(c px.Context) reflect.Value { + return reflect.ValueOf(bv.bytes) +} + +func (bv *Binary) ReflectTo(c px.Context, value reflect.Value) { + switch value.Type().Elem().Kind() { + case reflect.Int8, reflect.Uint8: + value.SetBytes(bv.bytes) + case reflect.Interface: + value.Set(reflect.ValueOf(bv.bytes)) + default: + panic(px.Error(px.AttemptToSetWrongKind, issue.H{`expected`: `[]byte`, `actual`: fmt.Sprintf(`[]%s`, value.Kind())})) + } +} + +func (bv *Binary) CanSerializeAsString() bool { + return true +} + +func (bv *Binary) SerializationString() string { + return base64.StdEncoding.Strict().EncodeToString(bv.bytes) +} + +func (bv *Binary) String() string { + return px.ToString2(bv, None) +} + +func (bv *Binary) ToKey(b *bytes.Buffer) { + b.WriteByte(0) + b.WriteByte(HkBinary) + b.Write(bv.bytes) +} + +func (bv *Binary) ToString(b io.Writer, s px.FormatContext, g px.RDetect) { + f := px.GetFormat(s.FormatMap(), bv.PType()) + var str string + switch f.FormatChar() { + case 's': + if !utf8.Valid(bv.bytes) { + panic(px.Error(px.Failure, issue.H{`message`: `binary data is not valid UTF-8`})) + } + str = string(bv.bytes) + case 'p': + str = `Binary('` + base64.StdEncoding.EncodeToString(bv.bytes) + `')` + case 'b': + str = base64.StdEncoding.EncodeToString(bv.bytes) + "\n" + case 'B': + str = base64.StdEncoding.Strict().EncodeToString(bv.bytes) + case 'u': + str = base64.URLEncoding.EncodeToString(bv.bytes) + case 't': + str = `Binary` + case 'T': + str = `BINARY` + default: + panic(s.UnsupportedFormat(bv.PType(), `bButTsp`, f)) + } + f.ApplyStringFlags(b, str, f.IsAlt()) +} + +func (bv *Binary) PType() px.Type { + return DefaultBinaryType() +} + +func (bv *Binary) Bytes() []byte { + return bv.bytes +} diff --git a/vendor/github.com/lyraproj/pcore/types/booleantype.go b/vendor/github.com/lyraproj/pcore/types/booleantype.go new file mode 100644 index 0000000..d4288cd --- /dev/null +++ b/vendor/github.com/lyraproj/pcore/types/booleantype.go @@ -0,0 +1,314 @@ +package types + +import ( + "io" + "reflect" + "strings" + + "github.com/lyraproj/pcore/px" +) + +var BooleanFalse = booleanValue(false) +var BooleanTrue = booleanValue(true) + +type ( + BooleanType struct { + value int // -1 == unset, 0 == false, 1 == true + } + + // booleanValue represents bool as a pcore.Value + booleanValue bool +) + +var booleanTypeDefault = &BooleanType{-1} + +var BooleanMetaType px.ObjectType + +func init() { + BooleanMetaType = newObjectType(`Pcore::BooleanType`, `Pcore::ScalarDataType { + attributes => { + value => { type => Optional[Boolean], value => undef } + } +}`, func(ctx px.Context, args []px.Value) px.Value { + return newBooleanType2(args...) + }) + + newGoConstructor(`Boolean`, + func(d px.Dispatch) { + d.Param(`Variant[Integer, Float, Boolean, Enum['false','true','yes','no','y','n',true]]`) + d.Function(func(c px.Context, args []px.Value) px.Value { + switch arg := args[0].(type) { + case integerValue: + if arg == 0 { + return BooleanFalse + } + return BooleanTrue + case floatValue: + if arg == 0.0 { + return BooleanFalse + } + return BooleanTrue + case booleanValue: + return arg + default: + switch strings.ToLower(arg.String()) { + case `false`, `no`, `n`: + return BooleanFalse + default: + return BooleanTrue + } + } + }) + }, + ) +} + +func DefaultBooleanType() *BooleanType { + return booleanTypeDefault +} + +func NewBooleanType(value bool) *BooleanType { + n := 0 + if value { + n = 1 + } + return &BooleanType{n} +} + +func newBooleanType2(args ...px.Value) *BooleanType { + switch len(args) { + case 0: + return DefaultBooleanType() + case 1: + if bv, ok := args[0].(booleanValue); ok { + return NewBooleanType(bool(bv)) + } + panic(illegalArgumentType(`Boolean[]`, 0, `Boolean`, args[0])) + default: + panic(illegalArgumentCount(`Boolean[]`, `0 or 1`, len(args))) + } +} + +func (t *BooleanType) Accept(v px.Visitor, g px.Guard) { + v(t) +} + +func (t *BooleanType) Default() px.Type { + return booleanTypeDefault +} + +func (t *BooleanType) Generic() px.Type { + return booleanTypeDefault +} + +func (t *BooleanType) Equals(o interface{}, g px.Guard) bool { + if bo, ok := o.(*BooleanType); ok { + return t.value == bo.value + } + return false +} + +func (t *BooleanType) Get(key string) (px.Value, bool) { + switch key { + case `value`: + switch t.value { + case 0: + return BooleanFalse, true + case 1: + return BooleanTrue, true + default: + return px.Undef, true + } + default: + return nil, false + } +} + +func (t *BooleanType) MetaType() px.ObjectType { + return BooleanMetaType +} + +func (t *BooleanType) Name() string { + return `Boolean` +} + +func (t *BooleanType) String() string { + switch t.value { + case 0: + return `Boolean[false]` + case 1: + return `Boolean[true]` + default: + return `Boolean` + } +} + +func (t *BooleanType) IsAssignable(o px.Type, g px.Guard) bool { + if bo, ok := o.(*BooleanType); ok { + return t.value == -1 || t.value == bo.value + } + return false +} + +func (t *BooleanType) IsInstance(o px.Value, g px.Guard) bool { + if bo, ok := o.(booleanValue); ok { + return t.value == -1 || bool(bo) == (t.value == 1) + } + return false +} + +func (t *BooleanType) Parameters() []px.Value { + if t.value == -1 { + return px.EmptyValues + } + return []px.Value{booleanValue(t.value == 1)} +} + +func (t *BooleanType) ReflectType(c px.Context) (reflect.Type, bool) { + return reflect.TypeOf(true), true +} + +func (t *BooleanType) CanSerializeAsString() bool { + return true +} + +func (t *BooleanType) SerializationString() string { + return t.String() +} + +func (t *BooleanType) ToString(b io.Writer, s px.FormatContext, g px.RDetect) { + TypeToString(t, b, s, g) +} + +func (t *BooleanType) PType() px.Type { + return &TypeType{t} +} + +func WrapBoolean(val bool) px.Boolean { + if val { + return BooleanTrue + } + return BooleanFalse +} + +func (bv booleanValue) Bool() bool { + return bool(bv) +} + +func (bv booleanValue) Equals(o interface{}, g px.Guard) bool { + if ov, ok := o.(booleanValue); ok { + return bv == ov + } + return false +} + +func (bv booleanValue) Float() float64 { + if bv { + return float64(1.0) + } + return float64(0.0) +} + +func (bv booleanValue) Int() int64 { + if bv { + return int64(1) + } + return int64(0) +} + +func (bv booleanValue) Reflect(c px.Context) reflect.Value { + return reflect.ValueOf(bool(bv)) +} + +var theTrue = true +var theFalse = false +var theTruePtr = &theTrue +var theFalsePtr = &theFalse + +var reflectTrue = reflect.ValueOf(theTrue) +var reflectFalse = reflect.ValueOf(theFalse) +var reflectTruePtr = reflect.ValueOf(theTruePtr) +var reflectFalsePtr = reflect.ValueOf(theFalsePtr) + +func (bv booleanValue) ReflectTo(c px.Context, value reflect.Value) { + if value.Kind() == reflect.Interface { + if bv { + value.Set(reflectTrue) + } else { + value.Set(reflectFalse) + } + } else if value.Kind() == reflect.Ptr { + if bv { + value.Set(reflectTruePtr) + } else { + value.Set(reflectFalsePtr) + } + } else { + value.SetBool(bool(bv)) + } +} + +func (bv booleanValue) CanSerializeAsString() bool { + return true +} + +func (bv booleanValue) SerializationString() string { + return bv.String() +} + +func (bv booleanValue) String() string { + if bv { + return `true` + } + return `false` +} + +func (bv booleanValue) ToString(b io.Writer, s px.FormatContext, g px.RDetect) { + f := px.GetFormat(s.FormatMap(), bv.PType()) + switch f.FormatChar() { + case 't': + f.ApplyStringFlags(b, bv.stringVal(f.IsAlt(), `true`, `false`), false) + case 'T': + f.ApplyStringFlags(b, bv.stringVal(f.IsAlt(), `True`, `False`), false) + case 'y': + f.ApplyStringFlags(b, bv.stringVal(f.IsAlt(), `yes`, `no`), false) + case 'Y': + f.ApplyStringFlags(b, bv.stringVal(f.IsAlt(), `Yes`, `No`), false) + case 'd', 'x', 'X', 'o', 'b', 'B': + integerValue(bv.Int()).ToString(b, px.NewFormatContext(DefaultIntegerType(), f, s.Indentation()), g) + case 'e', 'E', 'f', 'g', 'G', 'a', 'A': + floatValue(bv.Float()).ToString(b, px.NewFormatContext(DefaultFloatType(), f, s.Indentation()), g) + case 's', 'p': + f.ApplyStringFlags(b, bv.stringVal(false, `true`, `false`), false) + default: + panic(s.UnsupportedFormat(bv.PType(), `tTyYdxXobBeEfgGaAsp`, f)) + } +} + +func (bv booleanValue) stringVal(alt bool, yes string, no string) string { + str := no + if bv { + str = yes + } + if alt { + str = str[:1] + } + return str +} + +var hkTrue = px.HashKey([]byte{1, HkBoolean, 1}) +var hkFalse = px.HashKey([]byte{1, HkBoolean, 0}) + +func (bv booleanValue) ToKey() px.HashKey { + if bv { + return hkTrue + } + return hkFalse +} + +func (bv booleanValue) PType() px.Type { + if bv { + return &BooleanType{1} + } + return &BooleanType{0} +} diff --git a/vendor/github.com/lyraproj/pcore/types/callabletype.go b/vendor/github.com/lyraproj/pcore/types/callabletype.go new file mode 100644 index 0000000..41df42f --- /dev/null +++ b/vendor/github.com/lyraproj/pcore/types/callabletype.go @@ -0,0 +1,325 @@ +package types + +import ( + "io" + "strconv" + + "github.com/lyraproj/pcore/px" +) + +type CallableType struct { + paramsType px.Type + returnType px.Type + blockType px.Type // Callable or Optional[Callable] +} + +var CallableMetaType px.ObjectType + +func init() { + CallableMetaType = newObjectType(`Pcore::CallableType`, + `Pcore::AnyType { + attributes => { + param_types => { + type => Optional[Type[Tuple]], + value => undef + }, + block_type => { + type => Optional[Type[Callable]], + value => undef + }, + return_type => { + type => Optional[Type], + value => undef + } + } +}`, func(ctx px.Context, args []px.Value) px.Value { + return newCallableType2(args...) + }) +} + +func DefaultCallableType() *CallableType { + return callableTypeDefault +} + +func NewCallableType(paramsType px.Type, returnType px.Type, blockType px.Type) *CallableType { + return &CallableType{paramsType, returnType, blockType} +} + +func newCallableType2(args ...px.Value) *CallableType { + return newCallableType3(WrapValues(args)) +} + +func newCallableType3(args px.List) *CallableType { + argc := args.Len() + if argc == 0 { + return DefaultCallableType() + } + + first := args.At(0) + if tv, ok := first.(*TupleType); ok { + var returnType px.Type + var blockType px.Type + if argc > 1 { + blockType, ok = args.At(1).(px.Type) + if argc > 2 { + returnType, ok = args.At(2).(px.Type) + } + } + if ok { + return &CallableType{tv, returnType, blockType} + } + } + + var ( + rt px.Type + block px.Type + ok bool + ) + + if argc == 1 || argc == 2 { + // check for [[params, block], return] + var iv px.List + if iv, ok = first.(px.List); ok { + if argc == 2 { + if rt, ok = args.At(1).(px.Type); !ok { + panic(illegalArgumentType(`Callable[]`, 1, `Type`, args.At(1))) + } + } + argc = iv.Len() + args = iv + } + } + + last := args.At(argc - 1) + block, ok = last.(*CallableType) + if !ok { + block = nil + var ob *OptionalType + if ob, ok = last.(*OptionalType); ok { + if _, ok = ob.typ.(*CallableType); ok { + block = ob + } + } + } + if ok { + argc-- + args = args.Slice(0, argc) + } + return NewCallableType(tupleFromArgs(true, args), rt, block) +} + +func (t *CallableType) BlockType() px.Type { + if t.blockType == nil { + return nil // Return untyped nil + } + return t.blockType +} + +func (t *CallableType) CallableWith(args []px.Value, block px.Lambda) bool { + if block != nil { + cb := t.blockType + switch ca := cb.(type) { + case nil: + return false + case *OptionalType: + cb = ca.ContainedType() + } + if block.PType() == nil { + return false + } + if !isAssignable(block.PType(), cb) { + return false + } + } else if t.blockType != nil && !isAssignable(t.blockType, anyTypeDefault) { + // Required block but non provided + return false + } + if pt, ok := t.paramsType.(*TupleType); ok { + return pt.IsInstance3(args, nil) + } + return true +} + +func (t *CallableType) Accept(v px.Visitor, g px.Guard) { + v(t) + if t.paramsType != nil { + t.paramsType.Accept(v, g) + } + if t.blockType != nil { + t.blockType.Accept(v, g) + } + if t.returnType != nil { + t.returnType.Accept(v, g) + } +} + +func (t *CallableType) BlockName() string { + return `block` +} + +func (t *CallableType) CanSerializeAsString() bool { + return canSerializeAsString(t.paramsType) && canSerializeAsString(t.blockType) && canSerializeAsString(t.returnType) +} + +func (t *CallableType) SerializationString() string { + return t.String() +} + +func (t *CallableType) Default() px.Type { + return callableTypeDefault +} + +func (t *CallableType) Equals(o interface{}, g px.Guard) bool { + _, ok := o.(*CallableType) + return ok +} + +func (t *CallableType) Generic() px.Type { + return callableTypeDefault +} + +func (t *CallableType) Get(key string) (px.Value, bool) { + switch key { + case `param_types`: + if t.paramsType == nil { + return px.Undef, true + } + return t.paramsType, true + case `return_type`: + if t.returnType == nil { + return px.Undef, true + } + return t.returnType, true + case `block_type`: + if t.blockType == nil { + return px.Undef, true + } + return t.blockType, true + default: + return nil, false + } +} + +func (t *CallableType) IsAssignable(o px.Type, g px.Guard) bool { + oc, ok := o.(*CallableType) + if !ok { + return false + } + if t.returnType == nil && t.paramsType == nil && t.blockType == nil { + return true + } + + if t.returnType != nil { + or := oc.returnType + if or == nil { + or = anyTypeDefault + } + if !isAssignable(t.returnType, or) { + return false + } + } + + // NOTE: these tests are made in reverse as it is calling the callable that is constrained + // (it's lower bound), not its upper bound + if oc.paramsType != nil && (t.paramsType == nil || !isAssignable(oc.paramsType, t.paramsType)) { + return false + } + + if t.blockType == nil { + return oc.blockType == nil + } + if oc.blockType == nil { + return false + } + return isAssignable(oc.blockType, t.blockType) +} + +func (t *CallableType) IsInstance(o px.Value, g px.Guard) bool { + if l, ok := o.(px.Lambda); ok { + return isAssignable(t, l.PType()) + } + // TODO: Maybe check Go func using reflection + return false +} + +func (t *CallableType) MetaType() px.ObjectType { + return CallableMetaType +} + +func (t *CallableType) Name() string { + return `Callable` +} + +func (t *CallableType) ParameterNames() []string { + if pt, ok := t.paramsType.(*TupleType); ok { + n := len(pt.types) + r := make([]string, 0, n) + for i := 0; i < n; { + i++ + r = append(r, strconv.Itoa(i)) + } + return r + } + return []string{} +} + +func (t *CallableType) Parameters() (params []px.Value) { + if *t == *callableTypeDefault { + return px.EmptyValues + } + if pt, ok := t.paramsType.(*TupleType); ok { + tupleParams := pt.Parameters() + if len(tupleParams) == 0 { + params = make([]px.Value, 0) + } else { + params = px.Select(tupleParams, func(p px.Value) bool { _, ok := p.(*UnitType); return !ok }) + } + } else { + params = make([]px.Value, 0) + } + if t.blockType != nil { + params = append(params, t.blockType) + } + if t.returnType != nil { + params = []px.Value{WrapValues(params), t.returnType} + } + return params +} + +func (t *CallableType) ParametersType() px.Type { + if t.paramsType == nil { + return nil // Return untyped nil + } + return t.paramsType +} + +func (t *CallableType) Resolve(c px.Context) px.Type { + if t.paramsType != nil { + t.paramsType = resolve(c, t.paramsType).(*TupleType) + } + if t.returnType != nil { + t.returnType = resolve(c, t.returnType) + } + if t.blockType != nil { + t.blockType = resolve(c, t.blockType) + } + return t +} + +func (t *CallableType) ReturnType() px.Type { + return t.returnType +} + +func (t *CallableType) String() string { + return px.ToString2(t, None) +} + +func (t *CallableType) PType() px.Type { + return &TypeType{t} +} + +func (t *CallableType) ToString(b io.Writer, s px.FormatContext, g px.RDetect) { + TypeToString(t, b, s, g) +} + +var callableTypeDefault = &CallableType{paramsType: nil, blockType: nil, returnType: nil} diff --git a/vendor/github.com/lyraproj/pcore/types/coerce.go b/vendor/github.com/lyraproj/pcore/types/coerce.go new file mode 100644 index 0000000..368922e --- /dev/null +++ b/vendor/github.com/lyraproj/pcore/types/coerce.go @@ -0,0 +1,174 @@ +package types + +import ( + "strings" + + "github.com/lyraproj/pcore/px" +) + +// CoerceTo will deep coerce the given value into an instance of the given type t. The coercion will +// recurse down into hashes, arrays, and objects and take key, value, and attribute types into account. +// +// The label is used in potential type assertion errors. It should indicate what it is that is being +// coerced. +func CoerceTo(c px.Context, label string, typ px.Type, value px.Value) (result px.Value) { + return coerceTo(c, []string{label}, typ, value) +} + +type path []string + +func (p path) with(n string) path { + np := make(path, len(p)+1) + copy(np, p) + np[len(p)] = n + return np +} + +// CanCoerce responds true iv the given value can be coerced into an instance of the given type. +func CanCoerce(typ px.Type, value px.Value) bool { + if typ.IsInstance(value, nil) { + return true + } + + if opt, ok := typ.(*OptionalType); ok { + typ = opt.ContainedType() + } + + switch t := typ.(type) { + case *ArrayType: + et := t.ElementType() + if oa, ok := value.(*Array); ok { + return oa.All(func(e px.Value) bool { return CanCoerce(et, e) }) + } + return CanCoerce(et, value) + case *HashType: + kt := t.KeyType() + vt := t.ValueType() + if oh, ok := value.(*Hash); ok { + return oh.All(func(v px.Value) bool { + e := v.(px.MapEntry) + if CanCoerce(kt, e.Key()) { + return CanCoerce(vt, e.Value()) + } + return false + }) + } + return false + case *StructType: + hm := t.HashedMembers() + if oh, ok := value.(*Hash); ok { + return oh.All(func(v px.Value) bool { + e := v.(px.MapEntry) + var s px.StringValue + if s, ok = e.Key().(px.StringValue); ok { + var se *StructElement + if se, ok = hm[s.String()]; ok { + return CanCoerce(se.Value(), e.Value()) + } + } + return false + }) + } + return false + case px.ObjectType: + ai := t.AttributesInfo() + switch o := value.(type) { + case *Array: + for i, ca := range ai.Attributes() { + if i >= o.Len() { + return i <= ai.RequiredCount() + } + if !CanCoerce(ca.Type(), o.At(i)) { + return false + } + } + case *Hash: + for _, ca := range ai.Attributes() { + e, ok := o.GetEntry(ca.Name()) + if !ok { + if !ca.HasValue() { + return false + } + } else if !CanCoerce(ca.Type(), e.Value()) { + return false + } + } + default: + return false + } + return true + case *InitType: + // Should have answered true to IsInstance above + return false + } + return NewInitType(typ, emptyArray).IsInstance(value, nil) +} + +func coerceTo(c px.Context, path path, typ px.Type, value px.Value) px.Value { + if typ.IsInstance(value, nil) { + return value + } + + if opt, ok := typ.(*OptionalType); ok { + typ = opt.ContainedType() + } + + labelFunc := func() string { return strings.Join(path, `/`) } + + switch t := typ.(type) { + case *ArrayType: + et := t.ElementType() + ep := path.with(`[]`) + if oa, ok := value.(*Array); ok { + value = oa.Map(func(e px.Value) px.Value { return coerceTo(c, ep, et, e) }) + if t.Size().IsInstance3(value.(*Array).Len()) { + return value + } + } + panic(px.MismatchError(labelFunc, t, value)) + case *HashType: + kt := t.KeyType() + vt := t.ValueType() + kp := path.with(`key`) + if oh, ok := value.(*Hash); ok { + value = oh.MapEntries(func(e px.MapEntry) px.MapEntry { + kv := coerceTo(c, kp, kt, e.Key()) + return WrapHashEntry(kv, coerceTo(c, path.with(kv.String()), vt, e.Value())) + }) + if !t.Size().IsInstance3(value.(*Hash).Len()) { + panic(px.MismatchError(labelFunc, t, value)) + } + return value + } + panic(px.MismatchError(labelFunc, t, value)) + case *StructType: + hm := t.HashedMembers() + if oh, ok := value.(*Hash); ok { + value = oh.MapEntries(func(e px.MapEntry) px.MapEntry { + var s px.StringValue + if s, ok = e.Key().(px.StringValue); ok { + var se *StructElement + if se, ok = hm[s.String()]; ok { + return WrapHashEntry(s, coerceTo(c, path.with(s.String()), se.Value(), e.Value())) + } + } + return e + }) + return px.AssertInstance(labelFunc, t, value) + } + panic(px.MismatchError(labelFunc, t, value)) + case px.ObjectType: + ai := t.AttributesInfo() + if oh, ok := value.(*Hash); ok { + el := make([]*HashEntry, 0, oh.Len()) + for _, ca := range ai.Attributes() { + if e, ok := oh.GetEntry(ca.Name()); ok { + el = append(el, WrapHashEntry(e.Key(), coerceTo(c, []string{ca.Label()}, ca.Type(), e.Value()))) + } + } + return newInstance(c, t, oh.Merge(WrapHash(el))) + } + panic(px.MismatchError(labelFunc, t, value)) + } + return newInstance(c, typ, value) +} diff --git a/vendor/github.com/lyraproj/pcore/types/collectiontype.go b/vendor/github.com/lyraproj/pcore/types/collectiontype.go new file mode 100644 index 0000000..4f4dc46 --- /dev/null +++ b/vendor/github.com/lyraproj/pcore/types/collectiontype.go @@ -0,0 +1,173 @@ +package types + +import ( + "io" + "math" + + "github.com/lyraproj/pcore/px" +) + +type CollectionType struct { + size *IntegerType +} + +var CollectionMetaType px.ObjectType + +func init() { + CollectionMetaType = newObjectType(`Pcore::CollectionType`, `Pcore::AnyType { + attributes => { + 'size_type' => { type => Type[Integer], value => Integer[0] } + } +}`, func(ctx px.Context, args []px.Value) px.Value { + return newCollectionType2(args...) + }) +} + +func DefaultCollectionType() *CollectionType { + return collectionTypeDefault +} + +func NewCollectionType(size *IntegerType) *CollectionType { + if size == nil || *size == *IntegerTypePositive { + return DefaultCollectionType() + } + return &CollectionType{size} +} + +func newCollectionType2(args ...px.Value) *CollectionType { + switch len(args) { + case 0: + return DefaultCollectionType() + case 1: + arg := args[0] + size, ok := arg.(*IntegerType) + if !ok { + sz, ok := toInt(arg) + if !ok { + if _, ok := arg.(*DefaultValue); !ok { + panic(illegalArgumentType(`Collection[]`, 0, `Variant[Integer, Default, Type[Integer]]`, arg)) + } + sz = 0 + } + size = NewIntegerType(sz, math.MaxInt64) + } + return NewCollectionType(size) + case 2: + arg := args[0] + min, ok := toInt(arg) + if !ok { + if _, ok := arg.(*DefaultValue); !ok { + panic(illegalArgumentType(`Collection[]`, 0, `Variant[Integer, Default]`, arg)) + } + min = 0 + } + arg = args[1] + max, ok := toInt(arg) + if !ok { + if _, ok := arg.(*DefaultValue); !ok { + panic(illegalArgumentType(`Collection[]`, 1, `Variant[Integer, Default]`, arg)) + } + max = math.MaxInt64 + } + return NewCollectionType(NewIntegerType(min, max)) + default: + panic(illegalArgumentCount(`Collection[]`, `0 - 2`, len(args))) + } +} + +func (t *CollectionType) Accept(v px.Visitor, g px.Guard) { + v(t) + t.size.Accept(v, g) +} + +func (t *CollectionType) Default() px.Type { + return collectionTypeDefault +} + +func (t *CollectionType) Equals(o interface{}, g px.Guard) bool { + if ot, ok := o.(*CollectionType); ok { + return t.size.Equals(ot.size, g) + } + return false +} + +func (t *CollectionType) Generic() px.Type { + return collectionTypeDefault +} + +func (t *CollectionType) Get(key string) (px.Value, bool) { + switch key { + case `size_type`: + if t.size == nil { + return px.Undef, true + } + return t.size, true + default: + return nil, false + } +} + +func (t *CollectionType) IsAssignable(o px.Type, g px.Guard) bool { + var osz *IntegerType + switch o := o.(type) { + case *CollectionType: + osz = o.size + case *ArrayType: + osz = o.size + case *HashType: + osz = o.size + case *TupleType: + osz = o.givenOrActualSize + case *StructType: + n := int64(len(o.elements)) + osz = NewIntegerType(n, n) + default: + return false + } + return t.size.IsAssignable(osz, g) +} + +func (t *CollectionType) IsInstance(o px.Value, g px.Guard) bool { + return t.IsAssignable(o.PType(), g) +} + +func (t *CollectionType) MetaType() px.ObjectType { + return CollectionMetaType +} + +func (t *CollectionType) Name() string { + return `Collection` +} + +func (t *CollectionType) Parameters() []px.Value { + if *t.size == *IntegerTypePositive { + return px.EmptyValues + } + return t.size.SizeParameters() +} + +func (t *CollectionType) CanSerializeAsString() bool { + return true +} + +func (t *CollectionType) SerializationString() string { + return t.String() +} + +func (t *CollectionType) Size() *IntegerType { + return t.size +} + +func (t *CollectionType) String() string { + return px.ToString2(t, None) +} + +func (t *CollectionType) ToString(b io.Writer, s px.FormatContext, g px.RDetect) { + TypeToString(t, b, s, g) +} + +func (t *CollectionType) PType() px.Type { + return &TypeType{t} +} + +var collectionTypeDefault = &CollectionType{IntegerTypePositive} diff --git a/vendor/github.com/lyraproj/pcore/types/commonality.go b/vendor/github.com/lyraproj/pcore/types/commonality.go new file mode 100644 index 0000000..8d7ea11 --- /dev/null +++ b/vendor/github.com/lyraproj/pcore/types/commonality.go @@ -0,0 +1,175 @@ +package types + +import ( + "math" + "reflect" + + "github.com/lyraproj/pcore/px" + "github.com/lyraproj/pcore/utils" +) + +// CommonType returns a type that both a and b are assignable to +func commonType(a px.Type, b px.Type) px.Type { + if isAssignable(a, b) { + return a + } + if isAssignable(b, a) { + return a + } + + // Deal with mergable string types + switch a.(type) { + case *EnumType: + switch b.(type) { + case *vcStringType: + str := b.(*vcStringType).value + ea := a.(*EnumType) + return NewEnumType(utils.Unique(append(ea.values, str)), ea.caseInsensitive) + + case px.StringType: + return DefaultStringType() + + case *EnumType: + ea := a.(*EnumType) + eb := b.(*EnumType) + return NewEnumType(utils.Unique(append(ea.values, eb.values...)), ea.caseInsensitive || eb.caseInsensitive) + } + + case *scStringType: + switch b.(type) { + case *scStringType: + as := a.(*scStringType) + bs := b.(*scStringType) + return NewStringType(commonType(as.Size(), bs.Size()).(*IntegerType), ``) + + case px.StringType, *EnumType: + return DefaultStringType() + } + + case *vcStringType: + switch b.(type) { + case *vcStringType: + as := a.(*vcStringType) + bs := b.(*vcStringType) + return NewEnumType([]string{as.value, bs.value}, false) + + case px.StringType: + return DefaultStringType() + + case *EnumType: + return commonType(b, a) + } + } + + // Deal with mergable types same type + if reflect.TypeOf(a) == reflect.TypeOf(b) { + switch a.(type) { + case *ArrayType: + aa := a.(*ArrayType) + ba := b.(*ArrayType) + return NewArrayType(commonType(aa.typ, ba.typ), commonType(aa.size, ba.size).(*IntegerType)) + + case *FloatType: + af := a.(*FloatType) + bf := b.(*FloatType) + return NewFloatType(math.Min(af.min, bf.min), math.Max(af.max, bf.max)) + + case *IntegerType: + ai := a.(*IntegerType) + bi := b.(*IntegerType) + min := ai.min + if bi.min < min { + min = bi.min + } + max := ai.max + if bi.max > max { + max = bi.max + } + return NewIntegerType(min, max) + + case *IterableType: + an := a.(*IterableType) + bn := b.(*IterableType) + return NewIterableType(commonType(an.ElementType(), bn.ElementType())) + + case *IteratorType: + an := a.(*IteratorType) + bn := b.(*IteratorType) + return NewIteratorType(commonType(an.ElementType(), bn.ElementType())) + + case *NotUndefType: + an := a.(*NotUndefType) + bn := b.(*NotUndefType) + return NewNotUndefType(commonType(an.ContainedType(), bn.ContainedType())) + + case *PatternType: + ap := a.(*PatternType) + bp := b.(*PatternType) + return NewPatternType(UniqueRegexps(append(ap.regexps, bp.regexps...))) + + case *RuntimeType: + ar := a.(*RuntimeType) + br := b.(*RuntimeType) + if ar.runtime == br.runtime { + return NewRuntimeType(ar.runtime, ``, nil) + } + return DefaultRuntimeType() + + case *TupleType: + at := a.(*TupleType) + bt := b.(*TupleType) + return NewArrayType(commonType(at.CommonElementType(), bt.CommonElementType()), commonType(at.Size(), bt.Size()).(*IntegerType)) + + case *TypeType: + at := a.(*TypeType) + bt := b.(*TypeType) + return NewTypeType(commonType(at.ContainedType(), bt.ContainedType())) + + case *VariantType: + ap := a.(*VariantType) + bp := b.(*VariantType) + return NewVariantType(UniqueTypes(append(ap.Types(), bp.Types()...))...) + } + } + + if isCommonNumeric(a, b) { + return numericTypeDefault + } + if isCommonScalarData(a, b) { + return scalarDataTypeDefault + } + if isCommonScalar(a, b) { + return scalarTypeDefault + } + if isCommonData(a, b) { + return dataTypeDefault + } + if isCommonRichData(a, b) { + return richDataTypeDefault + } + return anyTypeDefault +} + +func isCommonNumeric(a px.Type, b px.Type) bool { + return isAssignable(numericTypeDefault, a) && isAssignable(numericTypeDefault, b) +} + +func isCommonScalarData(a px.Type, b px.Type) bool { + return isAssignable(scalarDataTypeDefault, a) && isAssignable(scalarDataTypeDefault, b) +} + +func isCommonScalar(a px.Type, b px.Type) bool { + return isAssignable(scalarTypeDefault, a) && isAssignable(scalarTypeDefault, b) +} + +func isCommonData(a px.Type, b px.Type) bool { + return isAssignable(dataTypeDefault, a) && isAssignable(dataTypeDefault, b) +} + +func isCommonRichData(a px.Type, b px.Type) bool { + return isAssignable(richDataTypeDefault, a) && isAssignable(richDataTypeDefault, b) +} + +func init() { + px.CommonType = commonType +} diff --git a/vendor/github.com/lyraproj/pcore/types/constants.go b/vendor/github.com/lyraproj/pcore/types/constants.go new file mode 100644 index 0000000..d5737a9 --- /dev/null +++ b/vendor/github.com/lyraproj/pcore/types/constants.go @@ -0,0 +1,17 @@ +package types + +import ( + "github.com/lyraproj/pcore/px" +) + +var emptyArray = WrapValues([]px.Value{}) +var emptyMap = WrapHash([]*HashEntry{}) +var emptyString = stringValue(``) +var undef = WrapUndef() + +func init() { + px.EmptyArray = emptyArray + px.EmptyMap = emptyMap + px.EmptyString = emptyString + px.Undef = undef +} diff --git a/vendor/github.com/lyraproj/pcore/types/conversions.go b/vendor/github.com/lyraproj/pcore/types/conversions.go new file mode 100644 index 0000000..50f5fdf --- /dev/null +++ b/vendor/github.com/lyraproj/pcore/types/conversions.go @@ -0,0 +1,24 @@ +package types + +import ( + "github.com/lyraproj/pcore/px" +) + +func toFloat(v px.Value) (float64, bool) { + if iv, ok := v.(floatValue); ok { + return float64(iv), true + } + return 0.0, false +} + +func toInt(v px.Value) (int64, bool) { + if iv, ok := v.(integerValue); ok { + return int64(iv), true + } + return 0, false +} + +func init() { + px.ToInt = toInt + px.ToFloat = toFloat +} diff --git a/vendor/github.com/lyraproj/pcore/types/defaulttype.go b/vendor/github.com/lyraproj/pcore/types/defaulttype.go new file mode 100644 index 0000000..c32241d --- /dev/null +++ b/vendor/github.com/lyraproj/pcore/types/defaulttype.go @@ -0,0 +1,107 @@ +package types + +import ( + "io" + + "github.com/lyraproj/pcore/px" +) + +type ( + DefaultType struct{} + + // DefaultValue is an empty struct because both type and value are known + DefaultValue struct{} +) + +var defaultTypeDefault = &DefaultType{} + +var DefaultMetaType px.ObjectType + +func init() { + DefaultMetaType = newObjectType(`Pcore::DefaultType`, `Pcore::AnyType{}`, func(ctx px.Context, args []px.Value) px.Value { + return DefaultDefaultType() + }) +} + +func DefaultDefaultType() *DefaultType { + return defaultTypeDefault +} + +func (t *DefaultType) Accept(v px.Visitor, g px.Guard) { + v(t) +} + +func (t *DefaultType) Equals(o interface{}, g px.Guard) bool { + _, ok := o.(*DefaultType) + return ok +} + +func (t *DefaultType) IsAssignable(o px.Type, g px.Guard) bool { + return o == defaultTypeDefault +} + +func (t *DefaultType) IsInstance(o px.Value, g px.Guard) bool { + _, ok := o.(*DefaultValue) + return ok +} + +func (t *DefaultType) MetaType() px.ObjectType { + return DefaultMetaType +} + +func (t *DefaultType) Name() string { + return `Default` +} + +func (t *DefaultType) CanSerializeAsString() bool { + return true +} + +func (t *DefaultType) SerializationString() string { + return t.String() +} + +func (t *DefaultType) String() string { + return px.ToString2(t, None) +} + +func (t *DefaultType) ToString(b io.Writer, s px.FormatContext, g px.RDetect) { + TypeToString(t, b, s, g) +} + +func (t *DefaultType) PType() px.Type { + return &TypeType{t} +} + +func WrapDefault() *DefaultValue { + return &DefaultValue{} +} + +func (dv *DefaultValue) Equals(o interface{}, g px.Guard) bool { + _, ok := o.(*DefaultValue) + return ok +} + +func (dv *DefaultValue) ToKey() px.HashKey { + return px.HashKey([]byte{1, HkDefault}) +} + +func (dv *DefaultValue) String() string { + return `default` +} + +func (dv *DefaultValue) ToString(b io.Writer, s px.FormatContext, g px.RDetect) { + f := px.GetFormat(s.FormatMap(), dv.PType()) + switch f.FormatChar() { + case 'd', 's', 'p': + f.ApplyStringFlags(b, `default`, false) + case 'D': + f.ApplyStringFlags(b, `Default`, false) + default: + panic(s.UnsupportedFormat(dv.PType(), `dDsp`, f)) + } +} + +func (dv *DefaultValue) PType() px.Type { + return DefaultDefaultType() +} diff --git a/vendor/github.com/lyraproj/pcore/types/deferred.go b/vendor/github.com/lyraproj/pcore/types/deferred.go new file mode 100644 index 0000000..7599be0 --- /dev/null +++ b/vendor/github.com/lyraproj/pcore/types/deferred.go @@ -0,0 +1,158 @@ +package types + +import ( + "io" + + "github.com/lyraproj/issue/issue" + + "github.com/lyraproj/pcore/px" +) + +var DeferredMetaType px.ObjectType + +func init() { + DeferredMetaType = newObjectType(`Deferred`, `{ + attributes => { + # Fully qualified name of the function + name => { type => Pattern[/\A[$]?[a-z][0-9A-Za-z_]*(?:::[a-z][0-9A-Za-z_]*)*\z/] }, + arguments => { type => Optional[Array[Any]], value => undef}, + }}`, + func(ctx px.Context, args []px.Value) px.Value { + return newDeferred2(args...) + }, + func(ctx px.Context, args []px.Value) px.Value { + return newDeferredFromHash(args[0].(*Hash)) + }) +} + +type Deferred interface { + px.Value + + Name() string + + Arguments() *Array + + Resolve(c px.Context, scope px.Keyed) px.Value +} + +type deferred struct { + name string + arguments *Array +} + +func NewDeferred(name string, arguments ...px.Value) *deferred { + return &deferred{name, WrapValues(arguments)} +} + +func newDeferred2(args ...px.Value) *deferred { + argc := len(args) + if argc < 1 || argc > 2 { + panic(illegalArgumentCount(`Deferred[]`, `1 - 2`, argc)) + } + if name, ok := args[0].(stringValue); ok { + if argc == 1 { + return &deferred{string(name), emptyArray} + } + if as, ok := args[1].(*Array); ok { + return &deferred{string(name), as} + } + panic(illegalArgumentType(`deferred[]`, 1, `Array`, args[1])) + } + panic(illegalArgumentType(`deferred[]`, 0, `String`, args[0])) +} + +func newDeferredFromHash(hash *Hash) *deferred { + name := hash.Get5(`name`, px.EmptyString).String() + arguments := hash.Get5(`arguments`, px.EmptyArray).(*Array) + return &deferred{name, arguments} +} + +func (e *deferred) Name() string { + return e.name +} + +func (e *deferred) Arguments() *Array { + return e.arguments +} + +func (e *deferred) String() string { + return px.ToString(e) +} + +func (e *deferred) Equals(other interface{}, guard px.Guard) bool { + if o, ok := other.(*deferred); ok { + return e.name == o.name && + px.Equals(e.arguments, o.arguments, guard) + } + return false +} + +func (e *deferred) ToString(b io.Writer, s px.FormatContext, g px.RDetect) { + ObjectToString(e, s, b, g) +} + +func (e *deferred) PType() px.Type { + return DeferredMetaType +} + +func (e *deferred) Get(key string) (value px.Value, ok bool) { + switch key { + case `name`: + return stringValue(e.name), true + case `arguments`: + return e.arguments, true + } + return nil, false +} + +func (e *deferred) InitHash() px.OrderedMap { + return WrapHash([]*HashEntry{WrapHashEntry2(`name`, stringValue(e.name)), WrapHashEntry2(`arguments`, e.arguments)}) +} + +func (e *deferred) Resolve(c px.Context, scope px.Keyed) px.Value { + fn := e.Name() + da := e.Arguments() + + var args []px.Value + if fn[0] == '$' { + vn := fn[1:] + vv, ok := scope.Get(stringValue(vn)) + if !ok { + panic(px.Error(px.UnknownVariable, issue.H{`name`: vn})) + } + if da.Len() == 0 { + // No point digging with zero arguments + return vv + } + fn = `dig` + args = append(make([]px.Value, 0, 1+da.Len()), vv) + } else { + args = make([]px.Value, 0, da.Len()) + } + args = da.AppendTo(args) + for i, a := range args { + args[i] = ResolveDeferred(c, a, scope) + } + return px.Call(c, fn, args, nil) +} + +// ResolveDeferred will resolve all occurrences of a DeferredValue in its +// given argument. Array and Hash arguments will be resolved recursively. +func ResolveDeferred(c px.Context, a px.Value, scope px.Keyed) px.Value { + switch a := a.(type) { + case Deferred: + return a.Resolve(c, scope) + case *DeferredType: + return a.Resolve(c) + case *Array: + return a.Map(func(v px.Value) px.Value { + return ResolveDeferred(c, v, scope) + }) + case *Hash: + return a.MapEntries(func(v px.MapEntry) px.MapEntry { + return WrapHashEntry(ResolveDeferred(c, v.Key(), scope), ResolveDeferred(c, v.Value(), scope)) + }) + default: + return a + } +} diff --git a/vendor/github.com/lyraproj/pcore/types/deferredtype.go b/vendor/github.com/lyraproj/pcore/types/deferredtype.go new file mode 100644 index 0000000..a6bb5fd --- /dev/null +++ b/vendor/github.com/lyraproj/pcore/types/deferredtype.go @@ -0,0 +1,98 @@ +package types + +import ( + "io" + "reflect" + + "github.com/lyraproj/pcore/px" + "github.com/lyraproj/pcore/utils" +) + +var deferredMetaType px.ObjectType + +func init() { + deferredMetaType = newGoObjectType(`DeferredType`, reflect.TypeOf(&DeferredType{}), `{}`) +} + +type DeferredType struct { + tn string + params []px.Value + resolved px.Type +} + +func NewDeferredType(name string, params ...px.Value) *DeferredType { + return &DeferredType{tn: name, params: params} +} + +func (dt *DeferredType) String() string { + return px.ToString(dt) +} + +func (dt *DeferredType) Equals(other interface{}, guard px.Guard) bool { + if ot, ok := other.(*DeferredType); ok { + return dt.tn == ot.tn + } + return false +} + +func (dt *DeferredType) ToString(bld io.Writer, s px.FormatContext, g px.RDetect) { + utils.WriteString(bld, `DeferredType(`) + utils.WriteString(bld, dt.tn) + if dt.params != nil { + utils.WriteString(bld, `, `) + WrapValues(dt.params).ToString(bld, s.Subsequent(), g) + } + utils.WriteByte(bld, ')') +} + +func (dt *DeferredType) PType() px.Type { + return deferredMetaType +} + +func (dt *DeferredType) Name() string { + return dt.tn +} + +func (dt *DeferredType) Resolve(c px.Context) px.Type { + if dt.resolved == nil { + if dt.params != nil { + if dt.Name() == `TypeSet` && len(dt.params) == 1 { + if ih, ok := dt.params[0].(px.OrderedMap); ok { + dt.resolved = newTypeSetType2(ih, c.Loader()) + } + } else { + ar := resolveValue(c, WrapValues(dt.params)).(*Array) + dt.resolved = ResolveWithParams(c, dt.tn, ar.AppendTo(make([]px.Value, 0, ar.Len()))) + } + } else { + dt.resolved = Resolve(c, dt.tn) + } + } + return dt.resolved +} + +func (dt *DeferredType) Parameters() []px.Value { + return dt.params +} + +func resolveValue(c px.Context, v px.Value) (rv px.Value) { + switch v := v.(type) { + case *DeferredType: + rv = v.Resolve(c) + case Deferred: + rv = v.Resolve(c, emptyMap) + case *Array: + rv = v.Map(func(e px.Value) px.Value { return resolveValue(c, e) }) + case *HashEntry: + rv = resolveEntry(c, v) + case *Hash: + rv = v.MapEntries(func(he px.MapEntry) px.MapEntry { return resolveEntry(c, he) }) + default: + rv = v + } + return +} + +func resolveEntry(c px.Context, he px.MapEntry) px.MapEntry { + return WrapHashEntry(resolveValue(c, he.Key()), resolveValue(c, he.Value())) +} diff --git a/vendor/github.com/lyraproj/pcore/types/enumtype.go b/vendor/github.com/lyraproj/pcore/types/enumtype.go new file mode 100644 index 0000000..cec1e07 --- /dev/null +++ b/vendor/github.com/lyraproj/pcore/types/enumtype.go @@ -0,0 +1,229 @@ +package types + +import ( + "io" + "reflect" + "strings" + + "github.com/lyraproj/pcore/px" + "github.com/lyraproj/pcore/utils" +) + +type EnumType struct { + caseInsensitive bool + values []string +} + +var EnumMetaType px.ObjectType + +func init() { + EnumMetaType = newObjectType(`Pcore::EnumType`, + `Pcore::ScalarDataType { + attributes => { + values => Array[String[1]], + case_insensitive => { + type => Boolean, + value => false + } + } +}`, func(ctx px.Context, args []px.Value) px.Value { + return newEnumType2(args...) + }) +} + +func DefaultEnumType() *EnumType { + return enumTypeDefault +} + +func NewEnumType(enums []string, caseInsensitive bool) *EnumType { + if caseInsensitive { + top := len(enums) + if top > 0 { + lce := make([]string, top) + for i, v := range enums { + lce[i] = strings.ToLower(v) + } + enums = lce + } + } + return &EnumType{caseInsensitive, enums} +} + +func newEnumType2(args ...px.Value) *EnumType { + return newEnumType3(WrapValues(args)) +} + +func newEnumType3(args px.List) *EnumType { + if args.Len() == 0 { + return DefaultEnumType() + } + var enums []string + top := args.Len() + caseInsensitive := false + first := args.At(0) + if top == 1 { + switch first := first.(type) { + case stringValue: + enums = []string{first.String()} + case *Array: + return newEnumType3(first) + default: + panic(illegalArgumentType(`Enum[]`, 0, `String or Array[String]`, args.At(0))) + } + } else { + if ar, ok := first.(*Array); ok { + enumArgs := ar.AppendTo(make([]px.Value, 0, ar.Len()+top-1)) + for i := 1; i < top; i++ { + enumArgs = append(enumArgs, args.At(i)) + } + if len(enumArgs) == 0 { + return DefaultEnumType() + } + args = WrapValues(enumArgs) + top = args.Len() + } + + enums = make([]string, top) + args.EachWithIndex(func(arg px.Value, idx int) { + str, ok := arg.(stringValue) + if !ok { + if ci, ok := arg.(booleanValue); ok && idx == top-1 { + caseInsensitive = ci.Bool() + return + } + panic(illegalArgumentType(`Enum[]`, idx, `String`, arg)) + } + enums[idx] = string(str) + }) + } + return NewEnumType(enums, caseInsensitive) +} + +func (t *EnumType) Accept(v px.Visitor, g px.Guard) { + v(t) +} + +func (t *EnumType) Default() px.Type { + return enumTypeDefault +} + +func (t *EnumType) Equals(o interface{}, g px.Guard) bool { + if ot, ok := o.(*EnumType); ok { + return t.caseInsensitive == ot.caseInsensitive && len(t.values) == len(ot.values) && utils.ContainsAllStrings(t.values, ot.values) + } + return false +} + +func (t *EnumType) Generic() px.Type { + return enumTypeDefault +} + +func (t *EnumType) Get(key string) (px.Value, bool) { + switch key { + case `values`: + return WrapValues(t.enums()), true + case `case_insensitive`: + return booleanValue(t.caseInsensitive), true + default: + return nil, false + } +} + +func (t *EnumType) IsAssignable(o px.Type, g px.Guard) bool { + if len(t.values) == 0 { + switch o.(type) { + case *stringType, *vcStringType, *scStringType, *EnumType, *PatternType: + return true + } + return false + } + + if st, ok := o.(*vcStringType); ok { + return px.IsInstance(t, stringValue(st.value)) + } + + if en, ok := o.(*EnumType); ok { + oEnums := en.values + if len(oEnums) > 0 && (t.caseInsensitive || !en.caseInsensitive) { + for _, v := range en.values { + if !px.IsInstance(t, stringValue(v)) { + return false + } + } + return true + } + } + return false +} + +func (t *EnumType) IsInstance(o px.Value, g px.Guard) bool { + if str, ok := o.(stringValue); ok { + if len(t.values) == 0 { + return true + } + s := string(str) + if t.caseInsensitive { + s = strings.ToLower(s) + } + for _, v := range t.values { + if v == s { + return true + } + } + } + return false +} + +func (t *EnumType) MetaType() px.ObjectType { + return EnumMetaType +} + +func (t *EnumType) Name() string { + return `Enum` +} + +func (t *EnumType) ReflectType(c px.Context) (reflect.Type, bool) { + return reflect.TypeOf(`x`), true +} + +func (t *EnumType) String() string { + return px.ToString2(t, None) +} + +func (t *EnumType) Parameters() []px.Value { + result := t.enums() + if t.caseInsensitive { + result = append(result, BooleanTrue) + } + return result +} + +func (t *EnumType) CanSerializeAsString() bool { + return true +} + +func (t *EnumType) SerializationString() string { + return t.String() +} + +func (t *EnumType) ToString(b io.Writer, f px.FormatContext, g px.RDetect) { + TypeToString(t, b, f, g) +} + +func (t *EnumType) PType() px.Type { + return &TypeType{t} +} + +func (t *EnumType) enums() []px.Value { + top := len(t.values) + if top == 0 { + return px.EmptyValues + } + v := make([]px.Value, top) + for idx, e := range t.values { + v[idx] = stringValue(e) + } + return v +} + +var enumTypeDefault = &EnumType{false, []string{}} diff --git a/vendor/github.com/lyraproj/pcore/types/floattype.go b/vendor/github.com/lyraproj/pcore/types/floattype.go new file mode 100644 index 0000000..4734770 --- /dev/null +++ b/vendor/github.com/lyraproj/pcore/types/floattype.go @@ -0,0 +1,392 @@ +package types + +import ( + "bytes" + "fmt" + "io" + "math" + "reflect" + "strings" + + "github.com/lyraproj/issue/issue" + "github.com/lyraproj/pcore/px" +) + +type ( + FloatType struct { + min float64 + max float64 + } + + // floatValue represents float64 as a pcore.Value + floatValue float64 +) + +var floatTypeDefault = &FloatType{-math.MaxFloat64, math.MaxFloat64} +var floatType32 = &FloatType{-math.MaxFloat32, math.MaxFloat32} + +var FloatMetaType px.ObjectType + +func init() { + FloatMetaType = newObjectType(`Pcore::FloatType`, + `Pcore::NumericType { + attributes => { + from => { type => Optional[Float], value => undef }, + to => { type => Optional[Float], value => undef } + } +}`, func(ctx px.Context, args []px.Value) px.Value { + return newFloatType2(args...) + }) + + newGoConstructor2(`Float`, + func(t px.LocalTypes) { + t.Type(`Convertible`, `Variant[Numeric, Boolean, Pattern[/`+FloatPattern+`/], Timespan, Timestamp]`) + t.Type(`NamedArgs`, `Struct[{from => Convertible, Optional[abs] => Boolean}]`) + }, + + func(d px.Dispatch) { + d.Param(`Convertible`) + d.OptionalParam(`Boolean`) + d.Function(func(c px.Context, args []px.Value) px.Value { + return numberFromPositionalArgs(args, false) + }) + }, + + func(d px.Dispatch) { + d.Param(`NamedArgs`) + d.Function(func(c px.Context, args []px.Value) px.Value { + return numberFromNamedArgs(args, false) + }) + }, + ) +} + +func DefaultFloatType() *FloatType { + return floatTypeDefault +} + +func NewFloatType(min float64, max float64) *FloatType { + if min == -math.MaxFloat64 && max == math.MaxFloat64 { + return DefaultFloatType() + } + if min > max { + panic(illegalArguments(`Float[]`, `min is not allowed to be greater than max`)) + } + return &FloatType{min, max} +} + +func newFloatType2(limits ...px.Value) *FloatType { + argc := len(limits) + if argc == 0 { + return floatTypeDefault + } + min, ok := toFloat(limits[0]) + if !ok { + if _, ok = limits[0].(*DefaultValue); !ok { + panic(illegalArgumentType(`Float[]`, 0, `Float`, limits[0])) + } + min = -math.MaxFloat64 + } + + var max float64 + switch argc { + case 1: + max = math.MaxFloat64 + case 2: + if max, ok = toFloat(limits[1]); !ok { + if _, ok = limits[1].(*DefaultValue); !ok { + panic(illegalArgumentType(`Float[]`, 1, `Float`, limits[1])) + } + max = math.MaxFloat64 + } + default: + panic(illegalArgumentCount(`Float`, `0 - 2`, len(limits))) + } + return NewFloatType(min, max) +} + +func (t *FloatType) Accept(v px.Visitor, g px.Guard) { + v(t) +} + +func (t *FloatType) Default() px.Type { + return floatTypeDefault +} + +func (t *FloatType) Equals(o interface{}, g px.Guard) bool { + if ot, ok := o.(*FloatType); ok { + return t.min == ot.min && t.max == ot.max + } + return false +} + +func (t *FloatType) Generic() px.Type { + return floatTypeDefault +} + +func (t *FloatType) Get(key string) (px.Value, bool) { + switch key { + case `from`: + v := px.Undef + if t.min != -math.MaxFloat64 { + v = floatValue(t.min) + } + return v, true + case `to`: + v := px.Undef + if t.max != math.MaxFloat64 { + v = floatValue(t.max) + } + return v, true + default: + return nil, false + } +} + +func (t *FloatType) IsAssignable(o px.Type, g px.Guard) bool { + if ft, ok := o.(*FloatType); ok { + return t.min <= ft.min && t.max >= ft.max + } + return false +} + +func (t *FloatType) IsInstance(o px.Value, g px.Guard) bool { + if n, ok := toFloat(o); ok { + return t.min <= n && n <= t.max + } + return false +} + +func (t *FloatType) MetaType() px.ObjectType { + return FloatMetaType +} + +func (t *FloatType) Min() float64 { + return t.min +} + +func (t *FloatType) Max() float64 { + return t.max +} + +func (t *FloatType) Name() string { + return `Float` +} + +func (t *FloatType) Parameters() []px.Value { + if t.min == -math.MaxFloat64 { + if t.max == math.MaxFloat64 { + return px.EmptyValues + } + return []px.Value{WrapDefault(), floatValue(t.max)} + } + if t.max == math.MaxFloat64 { + return []px.Value{floatValue(t.min)} + } + return []px.Value{floatValue(t.min), floatValue(t.max)} +} + +func (t *FloatType) ReflectType(c px.Context) (reflect.Type, bool) { + return reflect.TypeOf(float64(0.0)), true +} + +func (t *FloatType) CanSerializeAsString() bool { + return true +} + +func (t *FloatType) SerializationString() string { + return t.String() +} + +func (t *FloatType) String() string { + return px.ToString2(t, None) +} + +func (t *FloatType) IsUnbounded() bool { + return t.min == -math.MaxFloat64 && t.max == math.MaxFloat64 +} + +func (t *FloatType) ToString(b io.Writer, s px.FormatContext, g px.RDetect) { + TypeToString(t, b, s, g) +} + +func (t *FloatType) PType() px.Type { + return &TypeType{t} +} + +func WrapFloat(val float64) px.Float { + return floatValue(val) +} + +func (fv floatValue) Abs() float64 { + f := float64(fv) + if f < 0 { + return -f + } + return f +} + +func (fv floatValue) Equals(o interface{}, g px.Guard) bool { + if ov, ok := o.(floatValue); ok { + return fv == ov + } + return false +} + +func (fv floatValue) Float() float64 { + return float64(fv) +} + +func (fv floatValue) Int() int64 { + return int64(fv.Float()) +} + +func (fv floatValue) Reflect(c px.Context) reflect.Value { + return reflect.ValueOf(fv.Float()) +} + +func (fv floatValue) ReflectTo(c px.Context, value reflect.Value) { + switch value.Kind() { + case reflect.Float64, reflect.Float32: + value.SetFloat(float64(fv)) + return + case reflect.Interface: + value.Set(reflect.ValueOf(float64(fv))) + return + case reflect.Ptr: + switch value.Type().Elem().Kind() { + case reflect.Float64: + f := float64(fv) + value.Set(reflect.ValueOf(&f)) + return + case reflect.Float32: + f32 := float32(fv) + value.Set(reflect.ValueOf(&f32)) + return + } + } + panic(px.Error(px.AttemptToSetWrongKind, issue.H{`expected`: `Float`, `actual`: value.Kind().String()})) +} + +func (fv floatValue) String() string { + return fmt.Sprintf(`%v`, fv.Float()) +} + +func (fv floatValue) ToKey(b *bytes.Buffer) { + n := math.Float64bits(float64(fv)) + b.WriteByte(1) + b.WriteByte(HkFloat) + b.WriteByte(byte(n >> 56)) + b.WriteByte(byte(n >> 48)) + b.WriteByte(byte(n >> 40)) + b.WriteByte(byte(n >> 32)) + b.WriteByte(byte(n >> 24)) + b.WriteByte(byte(n >> 16)) + b.WriteByte(byte(n >> 8)) + b.WriteByte(byte(n)) +} + +var defaultFormatP = newFormat(`%g`) +var defaultFormatS = newFormat(`%#g`) + +func (fv floatValue) ToString(b io.Writer, s px.FormatContext, g px.RDetect) { + f := px.GetFormat(s.FormatMap(), fv.PType()) + switch f.FormatChar() { + case 'd', 'x', 'X', 'o', 'b', 'B': + integerValue(fv.Int()).ToString(b, px.NewFormatContext(DefaultIntegerType(), f, s.Indentation()), g) + case 'p': + f.ApplyStringFlags(b, floatGFormat(defaultFormatP, float64(fv)), false) + case 'e', 'E', 'f': + _, err := fmt.Fprintf(b, f.OrigFormat(), float64(fv)) + if err != nil { + panic(err) + } + case 'g', 'G': + _, err := io.WriteString(b, floatGFormat(f, float64(fv))) + if err != nil { + panic(err) + } + case 's': + f.ApplyStringFlags(b, floatGFormat(defaultFormatS, float64(fv)), f.IsAlt()) + case 'a', 'A': + // TODO: Implement this or list as limitation? + panic(s.UnsupportedFormat(fv.PType(), `dxXobBeEfgGaAsp`, f)) + default: + panic(s.UnsupportedFormat(fv.PType(), `dxXobBeEfgGaAsp`, f)) + } +} + +func floatGFormat(f px.Format, value float64) string { + str := fmt.Sprintf(f.WithoutWidth().OrigFormat(), value) + sc := byte('e') + if f.FormatChar() == 'G' { + sc = 'E' + } + if strings.IndexByte(str, sc) >= 0 { + // Scientific notation in use. + return str + } + + // Go might strip both trailing zeroes and decimal point when using '%g'. The + // decimal point and trailing zeroes are restored here + totLen := len(str) + prc := f.Precision() + if prc < 0 && !f.IsAlt() { + prc = 6 + } + + dotIndex := strings.IndexByte(str, '.') + missing := 0 + if prc >= 0 { + if dotIndex >= 0 { + missing = prc - (totLen - 1) + } else { + missing = prc - totLen + if missing == 0 { + // Impossible to add a fraction part. Force scientific notation + return fmt.Sprintf(f.ReplaceFormatChar(sc).OrigFormat(), value) + } + } + } + + b := bytes.NewBufferString(``) + + padByte := byte(' ') + if f.IsZeroPad() { + padByte = '0' + } + pad := 0 + if f.Width() > 0 { + pad = f.Width() - (totLen + missing + 1) + } + + if !f.IsLeft() { + for ; pad > 0; pad-- { + b.WriteByte(padByte) + } + } + + b.WriteString(str) + if dotIndex < 0 { + b.WriteByte('.') + if missing == 0 { + b.WriteByte('0') + } + } + for missing > 0 { + b.WriteByte('0') + missing-- + } + + if f.IsLeft() { + for ; pad > 0; pad-- { + b.WriteByte(padByte) + } + } + return b.String() +} + +func (fv floatValue) PType() px.Type { + f := float64(fv) + return &FloatType{f, f} +} diff --git a/vendor/github.com/lyraproj/pcore/types/format.go b/vendor/github.com/lyraproj/pcore/types/format.go new file mode 100644 index 0000000..7eb6f64 --- /dev/null +++ b/vendor/github.com/lyraproj/pcore/types/format.go @@ -0,0 +1,836 @@ +package types + +import ( + "bytes" + "fmt" + "io" + "sort" + "strconv" + "strings" + "unicode/utf8" + + "github.com/lyraproj/issue/issue" + "github.com/lyraproj/pcore/px" + "github.com/lyraproj/pcore/utils" +) + +type ( + format struct { + alt bool + left bool + zeroPad bool + formatChar byte + plus byte + precision int + width int + leftDelimiter byte + separator string + separator2 string + origFmt string + containerFormats px.FormatMap + } + + formatContext struct { + indentation px.Indentation + formatMap px.FormatMap + properties map[string]string + } + + indentation struct { + first bool + indenting bool + level int + padding string + } +) + +func (f *format) Equals(other interface{}, guard px.Guard) bool { + if of, ok := other.(*format); ok { + return f.alt == of.alt && + f.left == of.left && + f.zeroPad == of.zeroPad && + f.formatChar == of.formatChar && + f.plus == of.plus && + f.precision == of.precision && + f.width == of.width && + f.leftDelimiter == of.leftDelimiter && + f.separator == of.separator && + f.separator2 == of.separator2 && + f.origFmt == of.origFmt && + f.containerFormats.Equals(of.containerFormats, nil) + } + return false +} + +func (f *format) String() string { + return f.origFmt +} + +func (f *format) ToString(bld io.Writer, format px.FormatContext, g px.RDetect) { + utils.WriteString(bld, f.origFmt) +} + +func (f *format) PType() px.Type { + return WrapRuntime(f).PType() +} + +var DefaultProgramFormat = simpleFormat('p') + +var DefaultAnyFormat = simpleFormat('s') + +var PrettyProgramFormat = newFormat(`%#p`) +var PrettyContainerFormats = px.FormatMap(WrapHash([]*HashEntry{WrapHashEntry(DefaultAnyType(), PrettyProgramFormat)})) +var PrettyArrayFormat = basicAltFormat('a', `,`, '[', PrettyContainerFormats) +var PrettyHashFormat = basicAltFormat('h', ` => `, '{', PrettyContainerFormats) +var PrettyObjectFormat = basicAltFormat('p', ` => `, '(', PrettyContainerFormats) + +var PrettyIndentation = newIndentation(true, 0) + +func init() { + DefaultArrayFormat.(*format).containerFormats = DefaultContainerFormats + DefaultHashFormat.(*format).containerFormats = DefaultContainerFormats + DefaultObjectFormat.(*format).containerFormats = DefaultContainerFormats + DefaultArrayContainerFormat.(*format).containerFormats = DefaultContainerFormats + DefaultHashContainerFormat.(*format).containerFormats = DefaultContainerFormats + DefaultObjectContainerFormat.(*format).containerFormats = DefaultContainerFormats + PrettyProgramFormat.(*format).containerFormats = PrettyContainerFormats + + px.DefaultFormat = DefaultAnyFormat + px.DefaultFormatContext = None + px.Pretty = newFormatContext2(PrettyIndentation, px.FormatMap(WrapHash([]*HashEntry{ + WrapHashEntry(DefaultObjectType(), PrettyObjectFormat), + WrapHashEntry(DefaultTypeType(), PrettyObjectFormat), + WrapHashEntry(DefaultFloatType(), simpleFormat('f')), + WrapHashEntry(DefaultNumericType(), simpleFormat('d')), + WrapHashEntry(DefaultStringType(), PrettyProgramFormat), + WrapHashEntry(DefaultUriType(), PrettyProgramFormat), + WrapHashEntry(DefaultSemVerType(), PrettyProgramFormat), + WrapHashEntry(DefaultSemVerRangeType(), PrettyProgramFormat), + WrapHashEntry(DefaultTimestampType(), PrettyProgramFormat), + WrapHashEntry(DefaultTimespanType(), PrettyProgramFormat), + WrapHashEntry(DefaultArrayType(), PrettyArrayFormat), + WrapHashEntry(DefaultHashType(), PrettyHashFormat), + WrapHashEntry(DefaultBinaryType(), simpleFormat('B')), + WrapHashEntry(DefaultAnyType(), DefaultAnyFormat), + })), map[string]string{}) + + px.NewFormatContext = newFormatContext + px.NewFormatContext2 = newFormatContext2 + px.NewFormatContext3 = newFormatContext3 + px.NewIndentation = newIndentation + px.NewFormat = newFormat + + px.PrettyExpanded = px.Pretty.WithProperties(map[string]string{`expanded`: `true`}) +} + +var DefaultArrayFormat = basicFormat('a', `,`, '[', nil) +var DefaultHashFormat = basicFormat('h', ` => `, '{', nil) +var DefaultObjectFormat = basicFormat('p', ` => `, '(', nil) + +var DefaultArrayContainerFormat = basicFormat('p', `,`, '[', nil) +var DefaultHashContainerFormat = basicFormat('p', ` => `, '{', nil) +var DefaultObjectContainerFormat = basicFormat('p', ` => `, '(', nil) + +var DefaultIndentation = newIndentation(false, 0) + +var DefaultFormats = px.FormatMap(WrapHash([]*HashEntry{ + WrapHashEntry(DefaultObjectType(), DefaultObjectFormat), + WrapHashEntry(DefaultTypeType(), DefaultObjectFormat), + WrapHashEntry(DefaultFloatType(), simpleFormat('f')), + WrapHashEntry(DefaultNumericType(), simpleFormat('d')), + WrapHashEntry(DefaultArrayType(), DefaultArrayFormat), + WrapHashEntry(DefaultHashType(), DefaultHashFormat), + WrapHashEntry(DefaultBinaryType(), simpleFormat('B')), + WrapHashEntry(DefaultAnyType(), DefaultAnyFormat), +})) + +var DefaultContainerFormats = px.FormatMap(WrapHash([]*HashEntry{ + WrapHashEntry(DefaultObjectType(), DefaultObjectContainerFormat), + WrapHashEntry(DefaultTypeType(), DefaultObjectContainerFormat), + WrapHashEntry(DefaultFloatType(), DefaultProgramFormat), + WrapHashEntry(DefaultNumericType(), DefaultProgramFormat), + WrapHashEntry(DefaultArrayType(), DefaultArrayContainerFormat), + WrapHashEntry(DefaultHashType(), DefaultHashContainerFormat), + WrapHashEntry(DefaultBinaryType(), DefaultProgramFormat), + WrapHashEntry(DefaultAnyType(), DefaultProgramFormat), +})) + +var delimiters = []byte{'[', '{', '(', '<', '|'} +var delimiterPairs = map[byte][2]byte{ + '[': {'[', ']'}, + '{': {'{', '}'}, + '(': {'(', ')'}, + '<': {'<', '>'}, + '|': {'|', '|'}, + ' ': {0, 0}, + 0: {'[', ']'}, +} + +var None = newFormatContext2(DefaultIndentation, DefaultFormats, nil) + +var Expanded = newFormatContext2(DefaultIndentation, DefaultFormats, map[string]string{`expanded`: `true`}) + +var Program = newFormatContext2(DefaultIndentation, px.FormatMap(singleMap(DefaultAnyType(), DefaultObjectFormat)), nil) + +func newFormatContext(t px.Type, format px.Format, indentation px.Indentation) px.FormatContext { + return &formatContext{indentation, WrapHash([]*HashEntry{WrapHashEntry(t, format)}), nil} +} + +func newFormatContext2(indentation px.Indentation, formatMap px.FormatMap, properties map[string]string) px.FormatContext { + return &formatContext{indentation, formatMap, properties} +} + +var typeStringFormat = NewVariantType(DefaultStringType(), DefaultDefaultType(), DefaultHashType()) + +func newFormatContext3(value px.Value, format px.Value) (context px.FormatContext, err error) { + px.AssertInstance(`String format`, typeStringFormat, format) + + defer func() { + if r := recover(); r != nil { + var ok bool + if err, ok = r.(issue.Reported); !ok { + panic(r) + } + } + }() + + switch format.(type) { + case stringValue: + context = px.NewFormatContext(value.PType(), newFormat(format.String()), DefaultIndentation) + case *DefaultValue: + context = px.DefaultFormatContext + default: + context = newFormatContext2(DefaultIndentation, mergeFormats(DefaultFormats, NewFormatMap(format.(*Hash))), nil) + } + return +} + +func mergeFormats(lower px.FormatMap, higher px.FormatMap) px.FormatMap { + if lower == nil || lower.Len() == 0 { + return higher + } + if higher == nil || higher.Len() == 0 { + return lower + } + + higherKeys := higher.Keys() + normLower := WrapHash2(lower.Entries().Reject(func(lev px.Value) bool { + le := lev.(*HashEntry) + return higherKeys.Any(func(hk px.Value) bool { + return !hk.Equals(le.Key(), nil) && px.IsAssignable(hk.(px.Type), le.Key().(px.Type)) + }) + })) + + merged := make([]*HashEntry, 0, 8) + normLower.Keys().AddAll(higherKeys).Unique().Each(func(k px.Value) { + if low, ok := normLower.Get(k); ok { + if high, ok := higher.Get(k); ok { + merged = append(merged, WrapHashEntry(k, merge(low.(px.Format), high.(px.Format)))) + } else { + merged = append(merged, WrapHashEntry(k, low)) + } + } else { + if high, ok := higher.Get(k); ok { + merged = append(merged, WrapHashEntry(k, high)) + } + } + }) + + sort.Slice(merged, func(ax, bx int) bool { + a := merged[ax].Key().(px.Type) + b := merged[bx].Key().(px.Type) + if a.Equals(b, nil) { + return false + } + ab := px.IsAssignable(b, a) + ba := px.IsAssignable(a, b) + if ab && !ba { + return true + } + if !ab && ba { + return false + } + ra := typeRank(a) + rb := typeRank(b) + if ra < rb { + return true + } + if ra > rb { + return false + } + return strings.Compare(a.String(), b.String()) < 0 + }) + return px.FormatMap(WrapHash(merged)) +} + +func merge(low px.Format, high px.Format) px.Format { + sep := high.Separator(NoString) + if sep == NoString { + sep = low.Separator(NoString) + } + sep2 := high.Separator2(NoString) + if sep2 == NoString { + sep2 = low.Separator2(NoString) + } + + return &format{ + origFmt: high.OrigFormat(), + alt: high.IsAlt(), + leftDelimiter: high.LeftDelimiter(), + formatChar: high.FormatChar(), + zeroPad: high.IsZeroPad(), + precision: high.Precision(), + left: high.IsLeft(), + plus: high.Plus(), + width: high.Width(), + separator2: sep2, + separator: sep, + containerFormats: mergeFormats(low.ContainerFormats(), high.ContainerFormats()), + } +} + +func typeRank(pt px.Type) int { + switch pt.(type) { + case *NumericType, *IntegerType, *FloatType: + return 13 + case *stringType, *vcStringType, *scStringType: + return 12 + case *EnumType: + return 11 + case *PatternType: + return 10 + case *ArrayType: + return 4 + case *TupleType: + return 3 + case *HashType: + return 2 + case *StructType: + return 1 + } + return 0 +} + +var typeStringFormatTypeHash = NewHashType(DefaultTypeType(), NewVariantType(DefaultStringType(), DefaultHashType()), nil) + +func NewFormatMap(h *Hash) px.FormatMap { + px.AssertInstance(`String format type hash`, typeStringFormatTypeHash, h) + result := make([]*HashEntry, h.Len()) + h.EachWithIndex(func(elem px.Value, idx int) { + entry := elem.(*HashEntry) + pt := entry.Key().(px.Type) + v := entry.Value() + if s, ok := v.(stringValue); ok { + result[idx] = WrapHashEntry(pt, newFormat(s.String())) + } else { + result[idx] = WrapHashEntry(pt, FormatFromHash(v.(*Hash))) + } + }) + return px.FormatMap(WrapHash(result)) +} + +var typeStringFormatHash = NewStructType([]*StructElement{ + newStructElement2(`format`, DefaultStringType()), + NewStructElement(newOptionalType3(`separator`), DefaultStringType()), + NewStructElement(newOptionalType3(`separator2`), DefaultStringType()), + NewStructElement(newOptionalType3(`string_formats`), DefaultHashType()), +}) + +func FormatFromHash(h *Hash) px.Format { + px.AssertInstance(`String format hash`, typeStringFormatHash, h) + + stringArg := func(key string, required bool) string { + v := h.Get5(key, undef) + switch v.(type) { + case stringValue: + return v.String() + default: + return NoString + } + } + + var cf px.FormatMap + if v := h.Get5(`string_formats`, undef); !v.Equals(undef, nil) { + cf = NewFormatMap(v.(*Hash)) + } + return parseFormat(stringArg(`format`, true), stringArg(`separator`, false), stringArg(`separator2`, false), cf) +} + +func (c *formatContext) Indentation() px.Indentation { + return c.indentation +} + +func (c *formatContext) FormatMap() px.FormatMap { + return c.formatMap +} + +func (c *formatContext) Property(key string) (string, bool) { + if c.properties != nil { + pv, ok := c.properties[key] + return pv, ok + } + return ``, false +} + +func (c *formatContext) Properties() map[string]string { + return c.properties +} + +func (c *formatContext) SetProperty(key, value string) { + if c.properties == nil { + c.properties = map[string]string{key: value} + } else { + c.properties[key] = value + } +} + +func (c *formatContext) Subsequent() px.FormatContext { + si := c.Indentation() + if si.Breaks() { + // Never break between the type and the start array marker + return newFormatContext2(newIndentation(si.IsIndenting(), si.Level()), c.FormatMap(), c.Properties()) + } + return c +} + +func (c *formatContext) UnsupportedFormat(t px.Type, supportedFormats string, actualFormat px.Format) error { + return px.Error(px.UnsupportedStringFormat, issue.H{`format`: actualFormat.FormatChar(), `type`: t.Name(), `supported_formats`: supportedFormats}) +} + +func (c *formatContext) WithProperties(properties map[string]string) px.FormatContext { + if c.properties != nil { + merged := make(map[string]string, len(c.properties)+len(properties)) + for k, v := range c.properties { + merged[k] = v + } + for k, v := range properties { + merged[k] = v + } + properties = merged + } + return newFormatContext2(c.indentation, c.formatMap, properties) +} + +func newIndentation(indenting bool, level int) px.Indentation { + return newIndentation2(true, indenting, level) +} + +func newIndentation2(first bool, indenting bool, level int) px.Indentation { + return &indentation{first, indenting, level, strings.Repeat(` `, level)} +} + +func (i *indentation) Breaks() bool { + return i.indenting && i.level > 0 && !i.first +} + +func (i *indentation) Level() int { + return i.level +} + +func (i *indentation) Increase(indenting bool) px.Indentation { + return newIndentation2(true, indenting, i.level+1) +} + +func (i *indentation) Indenting(indenting bool) px.Indentation { + if i.indenting == indenting { + return i + } + return &indentation{i.first, indenting, i.level, i.padding} +} + +func (i *indentation) IsFirst() bool { + return i.first +} + +func (i *indentation) IsIndenting() bool { + return i.indenting +} + +func (i *indentation) Padding() string { + return i.padding +} + +func (i *indentation) Subsequent() px.Indentation { + if i.first { + return &indentation{false, i.indenting, i.level, i.padding} + } + return i +} + +// NewFormat parses a format string into a Format +func newFormat(format string) px.Format { + return parseFormat(format, NoString, NoString, nil) +} + +func simpleFormat(formatChar byte) px.Format { + return basicFormat(formatChar, NoString, '[', nil) +} + +func basicFormat(formatChar byte, sep2 string, leftDelimiter byte, containerFormats px.FormatMap) px.Format { + return &format{ + formatChar: formatChar, + precision: -1, + width: -1, + origFmt: `%` + string(formatChar), + separator: `,`, + separator2: sep2, + leftDelimiter: leftDelimiter, + containerFormats: containerFormats, + } +} + +func basicAltFormat(formatChar byte, sep2 string, leftDelimiter byte, containerFormats px.FormatMap) px.Format { + return &format{ + formatChar: formatChar, + alt: true, + precision: -1, + width: -1, + origFmt: `%` + string(formatChar), + separator: `,`, + separator2: sep2, + leftDelimiter: leftDelimiter, + containerFormats: containerFormats, + } +} + +func parseFormat(origFmt string, separator string, separator2 string, containerFormats px.FormatMap) px.Format { + group := px.FormatPattern.FindStringSubmatch(origFmt) + if group == nil { + panic(px.Error(px.InvalidStringFormatSpec, issue.H{`format`: origFmt})) + } + + flags := group[1] + + plus := byte(0) + if hasDelimOnce(flags, origFmt, ' ') { + plus = ' ' + } else if hasDelimOnce(flags, origFmt, '+') { + plus = '+' + } + + foundDelim := byte(0) + for _, delim := range delimiters { + if hasDelimOnce(flags, origFmt, delim) { + if foundDelim != 0 { + panic(px.Error(px.InvalidStringFormatDelimiter, issue.H{`delimiter`: foundDelim})) + } + foundDelim = delim + } + } + + if foundDelim == 0 && plus == ' ' { + foundDelim = plus + } + + width := -1 + prc := -1 + if tmp := group[2]; tmp != `` { + width, _ = strconv.Atoi(tmp) + } + if tmp := group[3]; tmp != `` { + prc, _ = strconv.Atoi(tmp) + } + return &format{ + origFmt: origFmt, + formatChar: group[4][0], + left: hasDelimOnce(flags, origFmt, '-'), + alt: hasDelimOnce(flags, origFmt, '#'), + zeroPad: hasDelimOnce(flags, origFmt, '0'), + plus: plus, + leftDelimiter: foundDelim, + width: width, + precision: prc, + separator: separator, + separator2: separator2, + containerFormats: containerFormats, + } +} + +func (f *format) unParse() string { + b := bytes.NewBufferString(`%`) + if f.zeroPad { + b.Write([]byte{'0'}) + } + if f.plus != 0 { + b.Write([]byte{f.plus}) + } + if f.left { + b.Write([]byte{'-'}) + } + if f.leftDelimiter != 0 && f.leftDelimiter != f.plus { + b.Write([]byte{f.leftDelimiter}) + } + if f.width >= 0 { + b.WriteString(strconv.Itoa(f.width)) + } + if f.precision >= 0 { + b.Write([]byte{'.'}) + b.WriteString(strconv.Itoa(f.precision)) + } + if f.alt { + b.Write([]byte{'#'}) + } + b.Write([]byte{f.formatChar}) + return b.String() +} + +func hasDelimOnce(flags string, format string, delim byte) bool { + found := false + for _, b := range flags { + if byte(b) == delim { + if found { + panic(px.Error(px.InvalidStringFormatRepeatedFlag, issue.H{`format`: format})) + } + found = true + } + } + return found +} + +func (f *format) HasStringFlags() bool { + return f.left || f.width >= 0 || f.precision >= 0 +} + +func (f *format) ApplyStringFlags(b io.Writer, str string, quoted bool) { + if f.HasStringFlags() { + bld := bytes.NewBufferString(``) + if quoted { + utils.PuppetQuote(bld, str) + str = bld.String() + bld.Truncate(0) + } + bld.WriteByte('%') + if f.IsLeft() { + bld.WriteByte('-') + } + if f.Width() >= 0 { + utils.WriteString(bld, strconv.Itoa(f.Width())) + } + if f.Precision() >= 0 { + utils.WriteByte(bld, '.') + utils.WriteString(bld, strconv.Itoa(f.Precision())) + } + bld.WriteByte('s') + utils.Fprintf(b, bld.String(), str) + } else { + if quoted { + utils.PuppetQuote(b, str) + } else { + utils.WriteString(b, str) + } + } +} + +func (f *format) Width() int { + return f.width +} + +func (f *format) Precision() int { + return f.precision +} + +func (f *format) FormatChar() byte { + return f.formatChar +} + +func (f *format) Plus() byte { + return f.plus +} + +func (f *format) IsAlt() bool { + return f.alt +} + +func (f *format) IsLeft() bool { + return f.left +} + +func (f *format) IsZeroPad() bool { + return f.zeroPad +} + +func (f *format) LeftDelimiter() byte { + return f.leftDelimiter +} + +func (f *format) ContainerFormats() px.FormatMap { + return f.containerFormats +} + +func (f *format) Separator(dflt string) string { + if f.separator == NoString { + return dflt + } + return f.separator +} + +func (f *format) Separator2(dflt string) string { + if f.separator2 == NoString { + return dflt + } + return f.separator2 +} + +func (f *format) OrigFormat() string { + return f.origFmt +} + +func (f *format) ReplaceFormatChar(c byte) px.Format { + nf := &format{} + *nf = *f + nf.formatChar = c + nf.origFmt = nf.unParse() + return nf +} + +func (f *format) WithoutWidth() px.Format { + nf := &format{} + *nf = *f + nf.width = -1 + nf.left = false + nf.zeroPad = false + nf.alt = false + nf.origFmt = nf.unParse() + return nf +} + +type stringReader struct { + i int + text string +} + +func (r *stringReader) Next() (rune, bool) { + if r.i >= len(r.text) { + return 0, false + } + c := rune(r.text[r.i]) + if c < utf8.RuneSelf { + r.i++ + return c, true + } + c, size := utf8.DecodeRuneInString(r.text[r.i:]) + if c == utf8.RuneError { + panic(`invalid unicode character`) + } + r.i += size + return c, true +} + +// PuppetSprintf is like fmt.Fprintf but using named arguments accessed with %{key} formatting instructions +// and using Puppet StringFormatter for evaluating formatting specifications +func PuppetSprintf(s string, args ...px.Value) string { + buf := bytes.NewBufferString(``) + fprintf(buf, `sprintf`, s, args...) + return buf.String() +} + +// PuppetFprintf is like fmt.Fprintf but using named arguments accessed with %{key} formatting instructions +// and using Puppet StringFormatter for evaluating formatting specifications +func PuppetFprintf(buf io.Writer, s string, args ...px.Value) { + fprintf(buf, `fprintf`, s, args...) +} + +func fprintf(buf io.Writer, callerName string, s string, args ...px.Value) { + // Transform the map into a slice of values and a map that maps a key to the position + // of its value in the slice. + // Transform all %{key} to %[pos] + var c rune + var ok bool + rdr := &stringReader{0, s} + + consumeAndApplyPattern := func(v px.Value) { + f := bytes.NewBufferString(`%`) + for ok { + f.WriteRune(c) + if 'A' <= c && c <= 'Z' || 'a' <= c && c <= 'z' { + c, ok = rdr.Next() + break + } + c, ok = rdr.Next() + } + ctx, err := px.NewFormatContext3(v, stringValue(f.String())) + if err != nil { + panic(illegalArgument(callerName, 1, err.Error())) + } + px.ToString4(v, ctx, buf) + } + + var hashArg *Hash + + pos := 0 + top := len(args) + c, ok = rdr.Next() +nextChar: + for ok { + if c != '%' { + utils.WriteRune(buf, c) + c, ok = rdr.Next() + continue + } + + c, ok = rdr.Next() + if c == '%' { + // %% means % verbatim + utils.WriteRune(buf, c) + c, ok = rdr.Next() + continue + } + + // Both % and %{key} are allowed + e := rune(0) + if c == '{' { + e = '}' + } else if c == '<' { + e = '>' + } + + if e == 0 { + // This is a positional argument. It is allowed but there can only be one (for the + // hash as a whole) + if hashArg != nil { + panic(illegalArguments(callerName, `keyed and positional format specifications cannot be mixed`)) + } + if pos >= top { + panic(illegalArguments(callerName, `unbalanced format versus arguments`)) + } + consumeAndApplyPattern(args[pos]) + pos++ + continue + } + + if pos > 0 { + panic(illegalArguments(callerName, `keyed and positional format specifications cannot be mixed`)) + } + + if hashArg == nil { + if top == 1 { + hashArg, _ = args[0].(*Hash) + } + if hashArg == nil { + panic(illegalArguments(callerName, `keyed format specifications requires one hash argument`)) + } + } + + b := c + keyStart := rdr.i + c, ok = rdr.Next() + for ok { + if c == e { + keyEnd := rdr.i - 1 // Safe since '}' is below RuneSelf + key := s[keyStart:keyEnd] + if value, keyFound := hashArg.Get(stringValue(key)); keyFound { + c, ok = rdr.Next() + if b == '{' { + px.ToString4(value, None, buf) + } else { + consumeAndApplyPattern(value) + } + continue nextChar + } + panic(illegalArgument(callerName, 1, fmt.Sprintf("key%c%s%c not found", b, key, c))) + } + c, ok = rdr.Next() + } + panic(illegalArguments(callerName, fmt.Sprintf(`unterminated %%%c`, b))) + } +} diff --git a/vendor/github.com/lyraproj/pcore/types/function.go b/vendor/github.com/lyraproj/pcore/types/function.go new file mode 100644 index 0000000..d39617e --- /dev/null +++ b/vendor/github.com/lyraproj/pcore/types/function.go @@ -0,0 +1,140 @@ +package types + +import ( + "fmt" + "reflect" + + "github.com/lyraproj/issue/issue" + "github.com/lyraproj/pcore/px" +) + +var typeFunctionType = NewTypeType(DefaultCallableType()) + +const keyReturnsError = `returns_error` + +var typeFunction = NewStructType([]*StructElement{ + newStructElement2(keyType, typeFunctionType), + NewStructElement(newOptionalType3(keyFinal), DefaultBooleanType()), + NewStructElement(newOptionalType3(keyOverride), DefaultBooleanType()), + NewStructElement(newOptionalType3(keyAnnotations), typeAnnotations), + NewStructElement(newOptionalType3(KeyGoName), DefaultStringType()), + NewStructElement(newOptionalType3(keyReturnsError), NewBooleanType(true)), +}) + +type function struct { + annotatedMember + returnsError bool + goName string +} + +func newFunction(c px.Context, name string, container *objectType, initHash *Hash) px.ObjFunc { + f := &function{} + f.initialize(c, name, container, initHash) + return f +} + +func (f *function) initialize(c px.Context, name string, container *objectType, initHash *Hash) { + px.AssertInstance(func() string { return fmt.Sprintf(`initializer function for %s[%s]`, container.Label(), name) }, typeFunction, initHash) + f.annotatedMember.initialize(c, `function`, name, container, initHash) + if gn, ok := initHash.Get4(KeyGoName); ok { + f.goName = gn.String() + } + if re, ok := initHash.Get4(keyReturnsError); ok { + f.returnsError = re.(booleanValue).Bool() + } +} + +func (f *function) Call(c px.Context, receiver px.Value, block px.Lambda, args []px.Value) px.Value { + if f.CallableType().(*CallableType).CallableWith(args, block) { + if co, ok := receiver.(px.CallableObject); ok { + if result, ok := co.Call(c, f, args, block); ok { + return result + } + } + + panic(px.Error(px.InstanceDoesNotRespond, issue.H{`type`: receiver.PType(), `message`: f.name})) + } + types := make([]px.Value, len(args)) + for i, a := range args { + types[i] = a.PType() + } + panic(px.Error(px.TypeMismatch, issue.H{`detail`: px.DescribeSignatures( + []px.Signature{f.CallableType().(*CallableType)}, newTupleType2(types...), block)})) +} + +func (f *function) CallGo(c px.Context, receiver interface{}, args ...interface{}) []interface{} { + rfArgs := make([]reflect.Value, 1+len(args)) + rfArgs[0] = reflect.ValueOf(receiver) + for i, arg := range args { + rfArgs[i+1] = reflect.ValueOf(arg) + } + result := f.CallGoReflected(c, rfArgs) + rs := make([]interface{}, len(result)) + for i, ret := range result { + if ret.IsValid() { + rs[i] = ret.Interface() + } + } + return rs +} + +func (f *function) CallGoReflected(c px.Context, args []reflect.Value) []reflect.Value { + rt := args[0].Type() + m, ok := rt.MethodByName(f.goName) + if !ok { + panic(px.Error(px.InstanceDoesNotRespond, issue.H{`type`: rt.String(), `message`: f.goName})) + } + + mt := m.Type + pc := mt.NumIn() + if pc != len(args) { + panic(px.Error(px.TypeMismatch, issue.H{`detail`: px.DescribeSignatures( + []px.Signature{f.CallableType().(*CallableType)}, NewTupleType([]px.Type{}, NewIntegerType(int64(pc-1), int64(pc-1))), nil)})) + } + result := m.Func.Call(args) + oc := mt.NumOut() + + if f.ReturnsError() { + oc-- + err := result[oc].Interface() + if err != nil { + if re, ok := err.(issue.Reported); ok { + panic(re) + } + panic(px.Error(px.GoFunctionError, issue.H{`name`: f.goName, `error`: err})) + } + result = result[:oc] + } + return result +} + +func (f *function) GoName() string { + return f.goName +} + +func (f *function) ReturnsError() bool { + return f.returnsError +} + +func (f *function) Equals(other interface{}, g px.Guard) bool { + if of, ok := other.(*function); ok { + return f.override == of.override && f.name == of.name && f.final == of.final && f.typ.Equals(of.typ, g) + } + return false +} + +func (f *function) FeatureType() string { + return `function` +} + +func (f *function) Label() string { + return fmt.Sprintf(`function %s[%s]`, f.container.Label(), f.Name()) +} + +func (f *function) CallableType() px.Type { + return f.typ.(*CallableType) +} + +func (f *function) InitHash() px.OrderedMap { + return WrapStringPValue(f.initHash()) +} diff --git a/vendor/github.com/lyraproj/pcore/types/hashtype.go b/vendor/github.com/lyraproj/pcore/types/hashtype.go new file mode 100644 index 0000000..ba1d772 --- /dev/null +++ b/vendor/github.com/lyraproj/pcore/types/hashtype.go @@ -0,0 +1,1389 @@ +package types + +import ( + "fmt" + "io" + "math" + "reflect" + "sort" + "strings" + + "github.com/lyraproj/issue/issue" + + "github.com/lyraproj/pcore/hash" + "github.com/lyraproj/pcore/px" + "github.com/lyraproj/pcore/utils" +) + +type ( + HashType struct { + size *IntegerType + keyType px.Type + valueType px.Type + } + + HashEntry struct { + key px.Value + value px.Value + } + + Hash struct { + reducedType *HashType + detailedType px.Type + entries []*HashEntry + index map[px.HashKey]int + } + + MutableHashValue struct { + Hash + } +) + +var hashTypeEmpty = &HashType{IntegerTypeZero, unitTypeDefault, unitTypeDefault} +var hashTypeDefault = &HashType{IntegerTypePositive, anyTypeDefault, anyTypeDefault} + +var HashMetaType px.ObjectType + +func init() { + HashMetaType = newObjectType(`Pcore::HashType`, + `Pcore::CollectionType { + attributes => { + key_type => { + type => Optional[Type], + value => Any + }, + value_type => { + type => Optional[Type], + value => Any + }, + }, + serialization => [ 'key_type', 'value_type', 'size_type' ] +}`, func(ctx px.Context, args []px.Value) px.Value { + return newHashType2(args...) + }, + func(ctx px.Context, args []px.Value) px.Value { + h := args[0].(*Hash) + kt := h.Get5(`key_type`, DefaultAnyType()) + vt := h.Get5(`value_type`, DefaultAnyType()) + st := h.Get5(`size_type`, PositiveIntegerType()) + return newHashType2(kt, vt, st) + }) + + newGoConstructor3([]string{`Hash`, `Struct`}, + func(t px.LocalTypes) { + t.Type(`KeyValueArray`, `Array[Tuple[Any,Any],1]`) + t.Type(`TreeArray`, `Array[Tuple[Array,Any],1]`) + t.Type(`NewHashOption`, `Enum[tree, hash_tree]`) + }, + + func(d px.Dispatch) { + d.Param(`TreeArray`) + d.OptionalParam(`NewHashOption`) + d.Function(func(c px.Context, args []px.Value) px.Value { + if len(args) < 2 { + return WrapHashFromArray(args[0].(*Array)) + } + allHashes := args[1].String() == `hash_tree` + result := NewMutableHash() + args[0].(*Array).Each(func(entry px.Value) { + tpl := entry.(*Array) + path := tpl.At(0).(*Array) + value := tpl.At(1) + if path.IsEmpty() { + // root node (index [] was included - values merge into the result) + // An array must be changed to a hash first as this is the root + // (Cannot return an array from a Hash.new) + if av, ok := value.(*Array); ok { + result.PutAll(IndexedFromArray(av)) + } else { + if hv, ok := value.(px.OrderedMap); ok { + result.PutAll(hv) + } + } + } else { + r := path.Slice(0, path.Len()-1).Reduce2(result, func(memo, idx px.Value) px.Value { + if hv, ok := memo.(*MutableHashValue); ok { + return hv.Get3(idx, func() px.Value { + x := NewMutableHash() + hv.Put(idx, x) + return x + }) + } + if av, ok := memo.(px.List); ok { + if ix, ok := idx.(integerValue); ok { + return av.At(int(ix)) + } + } + return undef + }) + if hr, ok := r.(*MutableHashValue); ok { + if allHashes { + if av, ok := value.(*Array); ok { + value = IndexedFromArray(av) + } + } + hr.Put(path.At(path.Len()-1), value) + } + } + }) + return &result.Hash + }) + }, + + func(d px.Dispatch) { + d.Param(`KeyValueArray`) + d.Function(func(c px.Context, args []px.Value) px.Value { + return WrapHashFromArray(args[0].(*Array)) + }) + }, + + func(d px.Dispatch) { + d.Param(`Iterable`) + d.Function(func(c px.Context, args []px.Value) px.Value { + switch arg := args[0].(type) { + case *Array: + return WrapHashFromArray(arg) + case *Hash: + return arg + default: + return WrapHashFromArray(arg.(px.Arrayable).AsArray().(*Array)) + } + }) + }, + ) + px.SingletonMap = singletonMap +} + +func DefaultHashType() *HashType { + return hashTypeDefault +} + +func EmptyHashType() *HashType { + return hashTypeEmpty +} + +func NewHashType(keyType px.Type, valueType px.Type, rng *IntegerType) *HashType { + if rng == nil { + rng = IntegerTypePositive + } + if keyType == nil { + keyType = anyTypeDefault + } + if valueType == nil { + valueType = anyTypeDefault + } + if keyType == anyTypeDefault && valueType == anyTypeDefault && rng == IntegerTypePositive { + return DefaultHashType() + } + return &HashType{rng, keyType, valueType} +} + +func newHashType2(args ...px.Value) *HashType { + argc := len(args) + if argc == 0 { + return hashTypeDefault + } + + if argc == 1 || argc > 4 { + panic(illegalArgumentCount(`Hash[]`, `0, 2, or 3`, argc)) + } + + offset := 0 + var valueType px.Type + keyType, ok := args[0].(px.Type) + if ok { + valueType, ok = args[1].(px.Type) + if !ok { + panic(illegalArgumentType(`Hash[]`, 1, `Type`, args[1])) + } + offset += 2 + } else { + keyType = DefaultAnyType() + valueType = DefaultAnyType() + } + + var rng *IntegerType + switch argc - offset { + case 0: + rng = IntegerTypePositive + case 1: + sizeArg := args[offset] + if rng, ok = sizeArg.(*IntegerType); !ok { + var sz int64 + if sz, ok = toInt(sizeArg); !ok { + panic(illegalArgumentType(`Hash[]`, offset, `Integer or Type[Integer]`, args[2])) + } + rng = NewIntegerType(sz, math.MaxInt64) + } + case 2: + var min, max int64 + if min, ok = toInt(args[offset]); !ok { + panic(illegalArgumentType(`Hash[]`, offset, `Integer`, args[offset])) + } + if max, ok = toInt(args[offset+1]); !ok { + panic(illegalArgumentType(`Hash[]`, offset+1, `Integer`, args[offset+1])) + } + if min == 0 && max == 0 && offset == 0 { + return hashTypeEmpty + } + rng = NewIntegerType(min, max) + } + return NewHashType(keyType, valueType, rng) +} + +func (t *HashType) Accept(v px.Visitor, g px.Guard) { + v(t) + t.size.Accept(v, g) + t.keyType.Accept(v, g) + t.valueType.Accept(v, g) +} + +func (t *HashType) Default() px.Type { + return hashTypeDefault +} + +func (t *HashType) EntryType() px.Type { + return NewTupleType([]px.Type{t.keyType, t.valueType}, nil) +} + +func (t *HashType) Equals(o interface{}, g px.Guard) bool { + if ot, ok := o.(*HashType); ok { + return t.size.Equals(ot.size, g) && t.keyType.Equals(ot.keyType, g) && t.valueType.Equals(ot.valueType, g) + } + return false +} + +func (t *HashType) Generic() px.Type { + return NewHashType(px.GenericType(t.keyType), px.GenericType(t.valueType), nil) +} + +func (t *HashType) Get(key string) (value px.Value, ok bool) { + switch key { + case `key_type`: + return t.keyType, true + case `value_type`: + return t.valueType, true + case `size_type`: + return t.size, true + } + return nil, false +} + +func (t *HashType) IsAssignable(o px.Type, g px.Guard) bool { + switch o := o.(type) { + case *HashType: + if t.size.min == 0 && o == hashTypeEmpty { + return true + } + return t.size.IsAssignable(o.size, g) && GuardedIsAssignable(t.keyType, o.keyType, g) && GuardedIsAssignable(t.valueType, o.valueType, g) + case *StructType: + if !t.size.IsInstance3(len(o.elements)) { + return false + } + for _, element := range o.elements { + if !(GuardedIsAssignable(t.keyType, element.ActualKeyType(), g) && GuardedIsAssignable(t.valueType, element.value, g)) { + return false + } + } + return true + default: + return false + } +} + +func (t *HashType) IsInstance(o px.Value, g px.Guard) bool { + if v, ok := o.(*Hash); ok && t.size.IsInstance3(v.Len()) { + for _, entry := range v.entries { + if !(GuardedIsInstance(t.keyType, entry.key, g) && GuardedIsInstance(t.valueType, entry.value, g)) { + return false + } + } + return true + } + return false +} + +func (t *HashType) KeyType() px.Type { + return t.keyType +} + +func (t *HashType) MetaType() px.ObjectType { + return HashMetaType +} + +func (t *HashType) Name() string { + return `Hash` +} + +func (t *HashType) Parameters() []px.Value { + if *t == *hashTypeDefault { + return px.EmptyValues + } + if *t == *hashTypeEmpty { + return []px.Value{ZERO, ZERO} + } + params := make([]px.Value, 0, 4) + params = append(params, t.keyType) + params = append(params, t.valueType) + if *t.size != *IntegerTypePositive { + params = append(params, t.size.SizeParameters()...) + } + return params +} + +func (t *HashType) ReflectType(c px.Context) (reflect.Type, bool) { + if kt, ok := ReflectType(c, t.keyType); ok { + if vt, ok := ReflectType(c, t.valueType); ok { + return reflect.MapOf(kt, vt), true + } + } + return nil, false +} + +func (t *HashType) Resolve(c px.Context) px.Type { + t.keyType = resolve(c, t.keyType) + t.valueType = resolve(c, t.valueType) + return t +} + +func (t *HashType) CanSerializeAsString() bool { + return canSerializeAsString(t.keyType) && canSerializeAsString(t.valueType) +} + +func (t *HashType) SerializationString() string { + return t.String() +} + +func (t *HashType) Size() *IntegerType { + return t.size +} + +func (t *HashType) String() string { + return px.ToString2(t, None) +} + +func (t *HashType) ValueType() px.Type { + return t.valueType +} + +func (t *HashType) ToString(b io.Writer, s px.FormatContext, g px.RDetect) { + TypeToString(t, b, s, g) +} + +func (t *HashType) PType() px.Type { + return &TypeType{t} +} + +func WrapHashEntry(key px.Value, value px.Value) *HashEntry { + if key == nil { + panic(px.Error(px.NilHashKey, issue.NoArgs)) + } + if value == nil { + panic(px.Error(px.NilHashValue, issue.H{`key`: key})) + } + return &HashEntry{key, value} +} + +func WrapHashEntry2(key string, value px.Value) *HashEntry { + if value == nil { + panic(px.Error(px.NilHashValue, issue.H{`key`: key})) + } + return &HashEntry{stringValue(key), value} +} + +func (he *HashEntry) Add(v px.Value) px.List { + panic(`Operation not supported`) +} + +func (he *HashEntry) AddAll(v px.List) px.List { + panic(`Operation not supported`) +} + +func (he *HashEntry) All(predicate px.Predicate) bool { + return predicate(he.key) && predicate(he.value) +} + +func (he *HashEntry) Any(predicate px.Predicate) bool { + return predicate(he.key) || predicate(he.value) +} + +func (he *HashEntry) AppendTo(slice []px.Value) []px.Value { + return append(slice, he.key, he.value) +} + +func (he *HashEntry) At(i int) px.Value { + switch i { + case 0: + return he.key + case 1: + return he.value + default: + return undef + } +} + +func (he *HashEntry) Delete(v px.Value) px.List { + panic(`Operation not supported`) +} + +func (he *HashEntry) DeleteAll(v px.List) px.List { + panic(`Operation not supported`) +} + +func (he *HashEntry) DetailedType() px.Type { + return NewTupleType([]px.Type{px.DetailedValueType(he.key), px.DetailedValueType(he.value)}, NewIntegerType(2, 2)) +} + +func (he *HashEntry) Each(consumer px.Consumer) { + consumer(he.key) + consumer(he.value) +} + +func (he *HashEntry) EachSlice(n int, consumer px.SliceConsumer) { + if n == 1 { + consumer(SingletonArray(he.key)) + consumer(SingletonArray(he.value)) + } else if n >= 2 { + consumer(he) + } +} + +func (he *HashEntry) EachWithIndex(consumer px.IndexedConsumer) { + consumer(he.key, 0) + consumer(he.value, 1) +} + +func (he *HashEntry) ElementType() px.Type { + return commonType(he.key.PType(), he.value.PType()) +} + +func (he *HashEntry) Equals(o interface{}, g px.Guard) bool { + if ov, ok := o.(*HashEntry); ok { + return he.key.Equals(ov.key, g) && he.value.Equals(ov.value, g) + } + if iv, ok := o.(*Array); ok && iv.Len() == 2 { + return he.key.Equals(iv.At(0), g) && he.value.Equals(iv.At(1), g) + } + return false +} + +func (he *HashEntry) Find(predicate px.Predicate) (px.Value, bool) { + if predicate(he.key) { + return he.key, true + } + if predicate(he.value) { + return he.value, true + } + return nil, false +} + +func (he *HashEntry) Flatten() px.List { + return he.AsArray().Flatten() +} + +func (he *HashEntry) IsEmpty() bool { + return false +} + +func (he *HashEntry) IsHashStyle() bool { + return false +} + +func (he *HashEntry) AsArray() px.List { + return WrapValues([]px.Value{he.key, he.value}) +} + +func (he *HashEntry) Key() px.Value { + return he.key +} + +func (he *HashEntry) Len() int { + return 2 +} + +func (he *HashEntry) Map(mapper px.Mapper) px.List { + return WrapValues([]px.Value{mapper(he.key), mapper(he.value)}) +} + +func (he *HashEntry) Select(predicate px.Predicate) px.List { + if predicate(he.key) { + if predicate(he.value) { + return he + } + return SingletonArray(he.key) + } + if predicate(he.value) { + return SingletonArray(he.value) + } + return px.EmptyArray +} + +func (he *HashEntry) Slice(i int, j int) px.List { + if i > 1 || i >= j { + return px.EmptyArray + } + if i == 1 { + return SingletonArray(he.value) + } + if j == 1 { + return SingletonArray(he.key) + } + return he +} + +func (he *HashEntry) Reduce(redactor px.BiMapper) px.Value { + return redactor(he.key, he.value) +} + +func (he *HashEntry) Reduce2(initialValue px.Value, redactor px.BiMapper) px.Value { + return redactor(redactor(initialValue, he.key), he.value) +} + +func (he *HashEntry) Reject(predicate px.Predicate) px.List { + if predicate(he.key) { + if predicate(he.value) { + return px.EmptyArray + } + return SingletonArray(he.value) + } + if predicate(he.value) { + return SingletonArray(he.key) + } + return he +} + +func (he *HashEntry) PType() px.Type { + return NewArrayType(commonType(he.key.PType(), he.value.PType()), NewIntegerType(2, 2)) +} + +func (he *HashEntry) Unique() px.List { + if he.key.Equals(he.value, nil) { + return SingletonArray(he.key) + } + return he +} + +func (he *HashEntry) Value() px.Value { + return he.value +} + +func (he *HashEntry) String() string { + return px.ToString2(he, None) +} + +func (he *HashEntry) ToString(b io.Writer, s px.FormatContext, g px.RDetect) { + WrapValues([]px.Value{he.key, he.value}).ToString(b, s, g) +} + +func BuildHash(len int, bld func(*Hash, []*HashEntry) []*HashEntry) *Hash { + h := &Hash{entries: make([]*HashEntry, 0, len)} + h.entries = bld(h, h.entries) + return h +} + +func WrapHash(entries []*HashEntry) *Hash { + return &Hash{entries: entries} +} + +func WrapHash2(entries px.List) *Hash { + hvEntries := make([]*HashEntry, entries.Len()) + entries.EachWithIndex(func(entry px.Value, idx int) { + hvEntries[idx] = entry.(*HashEntry) + }) + return &Hash{entries: hvEntries} +} + +// WrapStringToTypeMap builds an ordered map from adds all entries in the given map +func WrapStringToTypeMap(hash map[string]px.Type) *Hash { + hvEntries := make([]*HashEntry, len(hash)) + i := 0 + for k, v := range hash { + hvEntries[i] = WrapHashEntry(stringValue(k), v) + i++ + } + return sortedMap(hvEntries) +} + +// WrapStringToValueMap builds an ordered map from adds all entries in the given map +func WrapStringToValueMap(hash map[string]px.Value) *Hash { + hvEntries := make([]*HashEntry, len(hash)) + i := 0 + for k, v := range hash { + hvEntries[i] = WrapHashEntry(stringValue(k), v) + i++ + } + return sortedMap(hvEntries) +} + +// WrapStringToInterfaceMap does not preserve order since order is undefined in a Go map +func WrapStringToInterfaceMap(c px.Context, hash map[string]interface{}) *Hash { + hvEntries := make([]*HashEntry, len(hash)) + i := 0 + for k, v := range hash { + hvEntries[i] = WrapHashEntry2(k, wrap(c, v)) + i++ + } + return sortedMap(hvEntries) +} + +// WrapStringToStringMap does not preserve order since order is undefined in a Go map +func WrapStringToStringMap(hash map[string]string) *Hash { + hvEntries := make([]*HashEntry, len(hash)) + i := 0 + for k, v := range hash { + hvEntries[i] = WrapHashEntry2(k, stringValue(v)) + i++ + } + return sortedMap(hvEntries) +} + +func sortedMap(hvEntries []*HashEntry) *Hash { + // map order is undefined (and changes from one run to another) so entries must + // be sorted to get a predictable order + sort.Slice(hvEntries, func(i, j int) bool { + return hvEntries[i].key.String() < hvEntries[j].key.String() + }) + return &Hash{entries: hvEntries} +} + +func WrapStringPValue(hash *hash.StringHash) *Hash { + hvEntries := make([]*HashEntry, hash.Len()) + i := 0 + hash.EachPair(func(k string, v interface{}) { + hvEntries[i] = WrapHashEntry2(k, v.(px.Value)) + i++ + }) + return &Hash{entries: hvEntries} +} + +func WrapHashFromArray(a *Array) *Hash { + top := a.Len() + switch a.PType().(*ArrayType).ElementType().(type) { + case *ArrayType: + // Array of arrays. Assume that each nested array is [key, value] + entries := make([]*HashEntry, top) + a.EachWithIndex(func(pair px.Value, idx int) { + pairArr := pair.(px.List) + if pairArr.Len() != 2 { + panic(illegalArguments(`Hash`, fmt.Sprintf(`hash entry array must have 2 elements, got %d`, pairArr.Len()))) + } + entries[idx] = WrapHashEntry(pairArr.At(0), pairArr.At(1)) + }) + return WrapHash(entries) + default: + if (top % 2) != 0 { + panic(illegalArguments(`Hash`, `odd number of arguments in Array`)) + } + entries := make([]*HashEntry, top/2) + idx := 0 + a.EachSlice(2, func(slice px.List) { + entries[idx] = WrapHashEntry(slice.At(0), slice.At(1)) + idx++ + }) + return WrapHash(entries) + } +} + +func IndexedFromArray(a *Array) *Hash { + top := a.Len() + entries := make([]*HashEntry, top) + a.EachWithIndex(func(v px.Value, idx int) { + entries[idx] = WrapHashEntry(integerValue(int64(idx)), v) + }) + return WrapHash(entries) +} + +func singleMap(key, value px.Value) *Hash { + return &Hash{entries: []*HashEntry{WrapHashEntry(key, value)}} +} + +func singletonMap(key string, value px.Value) px.OrderedMap { + return &Hash{entries: []*HashEntry{WrapHashEntry2(key, value)}} +} + +func (hv *Hash) Add(v px.Value) px.List { + switch v := v.(type) { + case *HashEntry: + return hv.Merge(WrapHash([]*HashEntry{v})) + case *Array: + if v.Len() == 2 { + return hv.Merge(WrapHash([]*HashEntry{WrapHashEntry(v.At(0), v.At(1))})) + } + } + panic(`Operation not supported`) +} + +func (hv *Hash) AddAll(v px.List) px.List { + switch v := v.(type) { + case *Hash: + return hv.Merge(v) + case *Array: + return hv.Merge(WrapHashFromArray(v)) + } + panic(`Operation not supported`) +} + +func (hv *Hash) All(predicate px.Predicate) bool { + for _, e := range hv.entries { + if !predicate(e) { + return false + } + } + return true +} + +func (hv *Hash) AllPairs(predicate px.BiPredicate) bool { + for _, e := range hv.entries { + if !predicate(e.key, e.value) { + return false + } + } + return true +} + +func (hv *Hash) AllKeysAreStrings() bool { + for _, e := range hv.entries { + if _, ok := e.key.(stringValue); !ok { + return false + } + } + return true +} + +func (hv *Hash) Any(predicate px.Predicate) bool { + for _, e := range hv.entries { + if predicate(e) { + return true + } + } + return false +} + +func (hv *Hash) AnyPair(predicate px.BiPredicate) bool { + for _, e := range hv.entries { + if predicate(e.key, e.value) { + return true + } + } + return false +} + +func (hv *Hash) AppendEntriesTo(entries []*HashEntry) []*HashEntry { + return append(entries, hv.entries...) +} + +func (hv *Hash) AppendTo(slice []px.Value) []px.Value { + for _, e := range hv.entries { + slice = append(slice, e) + } + return slice +} + +func (hv *Hash) AsArray() px.List { + values := make([]px.Value, len(hv.entries)) + for idx, entry := range hv.entries { + values[idx] = WrapValues([]px.Value{entry.key, entry.value}) + } + return WrapValues(values) +} + +func (hv *Hash) At(i int) px.Value { + if i >= 0 && i < len(hv.entries) { + return hv.entries[i] + } + return undef +} + +func (hv *Hash) Delete(key px.Value) px.List { + if idx, ok := hv.valueIndex()[px.ToKey(key)]; ok { + return WrapHash(append(hv.entries[:idx], hv.entries[idx+1:]...)) + } + return hv +} + +func (hv *Hash) DeleteAll(keys px.List) px.List { + entries := hv.entries + valueIndex := hv.valueIndex() + keys.Each(func(key px.Value) { + if idx, ok := valueIndex[px.ToKey(key)]; ok { + entries = append(hv.entries[:idx], hv.entries[idx+1:]...) + } + }) + if len(hv.entries) == len(entries) { + return hv + } + return WrapHash(entries) +} + +func (hv *Hash) DetailedType() px.Type { + return hv.privateDetailedType() +} + +func (hv *Hash) ElementType() px.Type { + return hv.PType().(*HashType).EntryType() +} + +func (hv *Hash) Entries() px.List { + return hv +} + +func (hv *Hash) Each(consumer px.Consumer) { + for _, e := range hv.entries { + consumer(e) + } +} + +func (hv *Hash) EachSlice(n int, consumer px.SliceConsumer) { + top := len(hv.entries) + for i := 0; i < top; i += n { + e := i + n + if e > top { + e = top + } + consumer(WrapValues(ValueSlice(hv.entries[i:e]))) + } +} + +func (hv *Hash) EachKey(consumer px.Consumer) { + for _, e := range hv.entries { + consumer(e.key) + } +} + +func (hv *Hash) Find(predicate px.Predicate) (px.Value, bool) { + for _, e := range hv.entries { + if predicate(e) { + return e, true + } + } + return nil, false +} + +func (hv *Hash) Flatten() px.List { + els := make([]px.Value, 0, len(hv.entries)*2) + for _, he := range hv.entries { + els = append(els, he.key, he.value) + } + return WrapValues(els).Flatten() +} + +func (hv *Hash) Map(mapper px.Mapper) px.List { + mapped := make([]px.Value, len(hv.entries)) + for i, e := range hv.entries { + mapped[i] = mapper(e) + } + return WrapValues(mapped) +} + +func (hv *Hash) MapEntries(mapper px.EntryMapper) px.OrderedMap { + mapped := make([]*HashEntry, len(hv.entries)) + for i, e := range hv.entries { + mapped[i] = mapper(e).(*HashEntry) + } + return WrapHash(mapped) +} + +func (hv *Hash) MapValues(mapper px.Mapper) px.OrderedMap { + mapped := make([]*HashEntry, len(hv.entries)) + for i, e := range hv.entries { + mapped[i] = WrapHashEntry(e.key, mapper(e.value)) + } + return WrapHash(mapped) +} + +func (hv *Hash) Select(predicate px.Predicate) px.List { + selected := make([]*HashEntry, 0) + for _, e := range hv.entries { + if predicate(e) { + selected = append(selected, e) + } + } + return WrapHash(selected) +} + +func (hv *Hash) SelectPairs(predicate px.BiPredicate) px.OrderedMap { + selected := make([]*HashEntry, 0) + for _, e := range hv.entries { + if predicate(e.key, e.value) { + selected = append(selected, e) + } + } + return WrapHash(selected) +} + +func (hv *Hash) Reflect(c px.Context) reflect.Value { + ht, ok := ReflectType(c, hv.PType()) + if !ok { + ht = reflect.TypeOf(map[interface{}]interface{}{}) + } + + keyType := ht.Key() + valueType := ht.Elem() + m := reflect.MakeMapWithSize(ht, hv.Len()) + rf := c.Reflector() + for _, e := range hv.entries { + m.SetMapIndex(rf.Reflect2(e.key, keyType), rf.Reflect2(e.value, valueType)) + } + return m +} + +func (hv *Hash) ReflectTo(c px.Context, value reflect.Value) { + ht := value.Type() + ptr := ht.Kind() == reflect.Ptr + if ptr { + ht = ht.Elem() + } + if ht.Kind() == reflect.Interface { + var ok bool + if ht, ok = ReflectType(c, hv.PType()); !ok { + ht = reflect.TypeOf(map[interface{}]interface{}{}) + } + } + keyType := ht.Key() + valueType := ht.Elem() + m := reflect.MakeMapWithSize(ht, hv.Len()) + rf := c.Reflector() + for _, e := range hv.entries { + m.SetMapIndex(rf.Reflect2(e.key, keyType), rf.Reflect2(e.value, valueType)) + } + if ptr { + // The created map cannot be addressed. A pointer to it is necessary + x := reflect.New(m.Type()) + x.Elem().Set(m) + m = x + } + value.Set(m) +} + +func (hv *Hash) Reduce(redactor px.BiMapper) px.Value { + if hv.IsEmpty() { + return undef + } + return reduceEntries(hv.entries[1:], hv.At(0), redactor) +} + +func (hv *Hash) Reduce2(initialValue px.Value, redactor px.BiMapper) px.Value { + return reduceEntries(hv.entries, initialValue, redactor) +} + +func (hv *Hash) Reject(predicate px.Predicate) px.List { + selected := make([]*HashEntry, 0) + for _, e := range hv.entries { + if !predicate(e) { + selected = append(selected, e) + } + } + return WrapHash(selected) +} + +func (hv *Hash) RejectPairs(predicate px.BiPredicate) px.OrderedMap { + selected := make([]*HashEntry, 0) + for _, e := range hv.entries { + if !predicate(e.key, e.value) { + selected = append(selected, e) + } + } + return WrapHash(selected) +} + +func (hv *Hash) EachPair(consumer px.BiConsumer) { + for _, e := range hv.entries { + consumer(e.key, e.value) + } +} + +func (hv *Hash) EachValue(consumer px.Consumer) { + for _, e := range hv.entries { + consumer(e.value) + } +} + +func (hv *Hash) EachWithIndex(consumer px.IndexedConsumer) { + for i, e := range hv.entries { + consumer(e, i) + } +} + +func (hv *Hash) Equals(o interface{}, g px.Guard) bool { + if ov, ok := o.(*Hash); ok { + if top := len(hv.entries); top == len(ov.entries) { + ovIndex := ov.valueIndex() + for key, idx := range hv.valueIndex() { + var ovIdx int + if ovIdx, ok = ovIndex[key]; !(ok && hv.entries[idx].Equals(ov.entries[ovIdx], g)) { + return false + } + } + return true + } + } + return false +} + +func (hv *Hash) Get(key px.Value) (px.Value, bool) { + return hv.get(px.ToKey(key)) +} + +func (hv *Hash) Get2(key px.Value, dflt px.Value) px.Value { + return hv.get2(px.ToKey(key), dflt) +} + +func (hv *Hash) Get3(key px.Value, dflt px.Producer) px.Value { + return hv.get3(px.ToKey(key), dflt) +} + +func (hv *Hash) Get4(key string) (px.Value, bool) { + return hv.get(px.HashKey(key)) +} + +func (hv *Hash) Get5(key string, dflt px.Value) px.Value { + return hv.get2(px.HashKey(key), dflt) +} + +func (hv *Hash) Get6(key string, dflt px.Producer) px.Value { + return hv.get3(px.HashKey(key), dflt) +} + +func (hv *Hash) GetEntry(key string) (px.MapEntry, bool) { + if pos, ok := hv.valueIndex()[px.HashKey(key)]; ok { + return hv.entries[pos], true + } + return nil, false +} + +func (hv *Hash) GetEntryFold(key string) (px.MapEntry, bool) { + for _, e := range hv.entries { + if strings.EqualFold(e.key.String(), key) { + return e, true + } + } + return nil, false +} + +func (hv *Hash) get(key px.HashKey) (px.Value, bool) { + if pos, ok := hv.valueIndex()[key]; ok { + return hv.entries[pos].value, true + } + return undef, false +} + +func (hv *Hash) get2(key px.HashKey, dflt px.Value) px.Value { + if pos, ok := hv.valueIndex()[key]; ok { + return hv.entries[pos].value + } + return dflt +} + +func (hv *Hash) get3(key px.HashKey, dflt px.Producer) px.Value { + if pos, ok := hv.valueIndex()[key]; ok { + return hv.entries[pos].value + } + return dflt() +} + +func (hv *Hash) IncludesKey(o px.Value) bool { + _, ok := hv.valueIndex()[px.ToKey(o)] + return ok +} + +func (hv *Hash) IncludesKey2(key string) bool { + _, ok := hv.valueIndex()[px.HashKey(key)] + return ok +} + +func (hv *Hash) IsEmpty() bool { + return len(hv.entries) == 0 +} + +func (hv *Hash) IsHashStyle() bool { + return true +} + +func (hv *Hash) Keys() px.List { + keys := make([]px.Value, len(hv.entries)) + for idx, entry := range hv.entries { + keys[idx] = entry.key + } + return WrapValues(keys) +} + +func (hv *Hash) Len() int { + return len(hv.entries) +} + +func (hv *Hash) Merge(o px.OrderedMap) px.OrderedMap { + return WrapHash(hv.mergeEntries(o)) +} + +func (hv *Hash) mergeEntries(o px.OrderedMap) []*HashEntry { + oh := o.(*Hash) + index := hv.valueIndex() + selfLen := len(hv.entries) + all := make([]*HashEntry, selfLen, selfLen+len(oh.entries)) + copy(all, hv.entries) + for _, entry := range oh.entries { + if idx, ok := index[px.ToKey(entry.key)]; ok { + all[idx] = entry + } else { + all = append(all, entry) + } + } + return all +} + +func (hv *Hash) Slice(i int, j int) px.List { + return WrapHash(hv.entries[i:j]) +} + +type hashSorter struct { + entries []*HashEntry + comparator px.Comparator +} + +func (s *hashSorter) Len() int { + return len(s.entries) +} + +func (s *hashSorter) Less(i, j int) bool { + vs := s.entries + return s.comparator(vs[i].key, vs[j].key) +} + +func (s *hashSorter) Swap(i, j int) { + vs := s.entries + v := vs[i] + vs[i] = vs[j] + vs[j] = v +} + +// Sort reorders the associations of this hash by applying the comparator +// to the keys +func (hv *Hash) Sort(comparator px.Comparator) px.List { + s := &hashSorter{make([]*HashEntry, len(hv.entries)), comparator} + copy(s.entries, hv.entries) + sort.Sort(s) + return WrapHash(s.entries) +} + +func (hv *Hash) String() string { + return px.ToString2(hv, None) +} + +func (hv *Hash) ToString(b io.Writer, s px.FormatContext, g px.RDetect) { + hv.ToString2(b, s, px.GetFormat(s.FormatMap(), hv.PType()), '{', g) +} + +func (hv *Hash) ToString2(b io.Writer, s px.FormatContext, f px.Format, delim byte, g px.RDetect) { + if g == nil { + g = make(px.RDetect) + } else if g[hv] { + utils.WriteString(b, ``) + return + } + g[hv] = true + + switch f.FormatChar() { + case 'a': + WrapArray3(hv).ToString(b, s, g) + return + case 'h', 's', 'p': + indent := s.Indentation() + indent = indent.Indenting(f.IsAlt() || indent.IsIndenting()) + + if indent.Breaks() && delim != '(' { + utils.WriteString(b, "\n") + utils.WriteString(b, indent.Padding()) + } + + var delims [2]byte + if delim == '(' || f.LeftDelimiter() == 0 { + delims = delimiterPairs[delim] + } else { + delims = delimiterPairs[f.LeftDelimiter()] + } + if delims[0] != 0 { + utils.WriteByte(b, delims[0]) + } + + if f.IsAlt() { + utils.WriteString(b, "\n") + } + + top := len(hv.entries) + if top > 0 { + sep := f.Separator(`,`) + assoc := f.Separator2(` => `) + cf := f.ContainerFormats() + if cf == nil { + cf = DefaultContainerFormats + } + if f.IsAlt() { + sep += "\n" + } else { + sep += ` ` + } + + childrenIndent := indent.Increase(f.IsAlt()) + padding := `` + if f.IsAlt() { + padding = childrenIndent.Padding() + } + + last := top - 1 + for idx, entry := range hv.entries { + k := entry.Key() + utils.WriteString(b, padding) + if isContainer(k, s) { + k.ToString(b, px.NewFormatContext2(childrenIndent, s.FormatMap(), s.Properties()), g) + } else { + k.ToString(b, px.NewFormatContext2(childrenIndent, cf, s.Properties()), g) + } + v := entry.Value() + utils.WriteString(b, assoc) + if isContainer(v, s) { + v.ToString(b, px.NewFormatContext2(childrenIndent, s.FormatMap(), s.Properties()), g) + } else { + if v == nil { + panic(`not good`) + } + v.ToString(b, px.NewFormatContext2(childrenIndent, cf, s.Properties()), g) + } + if idx < last { + utils.WriteString(b, sep) + } + } + } + + if f.IsAlt() { + utils.WriteString(b, "\n") + utils.WriteString(b, indent.Padding()) + } + if delims[1] != 0 { + utils.WriteByte(b, delims[1]) + } + default: + panic(s.UnsupportedFormat(hv.PType(), `hasp`, f)) + } + delete(g, hv) +} + +func (hv *Hash) PType() px.Type { + return hv.privateReducedType() +} + +// Unique on a Hash will always return self since the keys of a hash are unique +func (hv *Hash) Unique() px.List { + return hv +} + +func (hv *Hash) Values() px.List { + values := make([]px.Value, len(hv.entries)) + for idx, entry := range hv.entries { + values[idx] = entry.value + } + return WrapValues(values) +} + +func (hv *Hash) privateDetailedType() px.Type { + if hv.detailedType == nil { + top := len(hv.entries) + if top == 0 { + hv.detailedType = hv.privateReducedType() + return hv.detailedType + } + + structEntries := make([]*StructElement, top) + for idx, entry := range hv.entries { + if ks, ok := entry.key.(stringValue); ok { + structEntries[idx] = NewStructElement(ks, DefaultAnyType()) + continue + } + + // Struct type cannot be used unless all keys are strings + hv.detailedType = hv.privateReducedType() + return hv.detailedType + } + hv.detailedType = NewStructType(structEntries) + + for _, entry := range hv.entries { + if sv, ok := entry.key.(stringValue); !ok || len(string(sv)) == 0 { + firstEntry := hv.entries[0] + commonKeyType := px.DetailedValueType(firstEntry.key) + commonValueType := px.DetailedValueType(firstEntry.value) + for idx := 1; idx < top; idx++ { + entry := hv.entries[idx] + commonKeyType = commonType(commonKeyType, px.DetailedValueType(entry.key)) + commonValueType = commonType(commonValueType, px.DetailedValueType(entry.value)) + } + sz := int64(len(hv.entries)) + hv.detailedType = NewHashType(commonKeyType, commonValueType, NewIntegerType(sz, sz)) + return hv.detailedType + } + } + + for idx, entry := range hv.entries { + structEntries[idx] = NewStructElement(entry.key, px.DetailedValueType(entry.value)) + } + } + return hv.detailedType +} + +func (hv *Hash) privateReducedType() px.Type { + if hv.reducedType == nil { + top := len(hv.entries) + if top == 0 { + hv.reducedType = EmptyHashType() + } else { + sz := int64(top) + ht := NewHashType(DefaultAnyType(), DefaultAnyType(), NewIntegerType(sz, sz)) + hv.reducedType = ht + firstEntry := hv.entries[0] + commonKeyType := firstEntry.key.PType() + commonValueType := firstEntry.value.PType() + for idx := 1; idx < top; idx++ { + entry := hv.entries[idx] + commonKeyType = commonType(commonKeyType, entry.key.PType()) + commonValueType = commonType(commonValueType, entry.value.PType()) + } + ht.keyType = commonKeyType + ht.valueType = commonValueType + } + } + return hv.reducedType +} + +func (hv *Hash) valueIndex() map[px.HashKey]int { + if hv.index == nil { + result := make(map[px.HashKey]int, len(hv.entries)) + for idx, entry := range hv.entries { + result[px.ToKey(entry.key)] = idx + } + hv.index = result + } + return hv.index +} + +func NewMutableHash() *MutableHashValue { + return &MutableHashValue{Hash{entries: make([]*HashEntry, 0, 7)}} +} + +// PutAll merges the given hash into this hash (mutates the hash). The method +// is not thread safe +func (hv *MutableHashValue) PutAll(o px.OrderedMap) { + hv.entries = hv.mergeEntries(o) + hv.detailedType = nil + hv.index = nil +} + +// Put adds or replaces the given key/value association in this hash +func (hv *MutableHashValue) Put(key, value px.Value) { + hv.PutAll(WrapHash([]*HashEntry{{key, value}})) +} + +func reduceEntries(slice []*HashEntry, initialValue px.Value, redactor px.BiMapper) px.Value { + memo := initialValue + for _, v := range slice { + memo = redactor(memo, v) + } + return memo +} diff --git a/vendor/github.com/lyraproj/pcore/types/inittype.go b/vendor/github.com/lyraproj/pcore/types/inittype.go new file mode 100644 index 0000000..8e8b635 --- /dev/null +++ b/vendor/github.com/lyraproj/pcore/types/inittype.go @@ -0,0 +1,254 @@ +package types + +import ( + "io" + + "github.com/lyraproj/issue/issue" + "github.com/lyraproj/pcore/px" +) + +type InitType struct { + typ px.Type + initArgs *Array + ctor px.Function +} + +var InitMetaType px.ObjectType + +func init() { + InitMetaType = newObjectType(`Pcore::Init`, `Pcore::AnyType { + attributes => { + type => { type => Optional[Type], value => undef }, + init_args => { type => Array, value => [] } + } +}`, func(ctx px.Context, args []px.Value) px.Value { + return newInitType2(args...) + }) +} + +func DefaultInitType() *InitType { + return initTypeDefault +} + +func NewInitType(typ px.Value, args px.Value) *InitType { + tp, ok := typ.(px.Type) + if !ok { + tp = nil + } + aa, ok := args.(*Array) + if !ok { + aa = emptyArray + } + if typ == nil && aa.Len() == 0 { + return initTypeDefault + } + return &InitType{typ: tp, initArgs: aa} +} + +func newInitType2(args ...px.Value) *InitType { + switch len(args) { + case 0: + return DefaultInitType() + case 1: + return NewInitType(args[0], nil) + default: + return NewInitType(args[0], WrapValues(args[1:])) + } +} + +func (t *InitType) Accept(v px.Visitor, g px.Guard) { + v(t) + t.typ.Accept(v, g) +} + +func (t *InitType) CanSerializeAsString() bool { + return canSerializeAsString(t.typ) +} + +func (t *InitType) SerializationString() string { + return t.String() +} + +func (t *InitType) Default() px.Type { + return initTypeDefault +} + +func (t *InitType) Equals(o interface{}, g px.Guard) bool { + if ot, ok := o.(*InitType); ok { + return t == ot || px.Equals(t.typ, ot.typ, g) && px.Equals(t.initArgs, ot.initArgs, g) + } + return false +} + +func (t *InitType) Get(key string) (px.Value, bool) { + switch key { + case `type`: + if t.typ == nil { + return undef, true + } + return t.typ, true + case `init_args`: + return t.initArgs, true + default: + return nil, false + } +} + +func (t *InitType) EachSignature(doer func(signature px.Signature)) { + t.assertInitialized() + if t.ctor != nil { + for _, lambda := range t.ctor.Dispatchers() { + doer(lambda.Signature()) + } + } +} + +func (t *InitType) anySignature(doer func(signature px.Signature) bool) bool { + t.assertInitialized() + if t.ctor != nil { + for _, lambda := range t.ctor.Dispatchers() { + if doer(lambda.Signature()) { + return true + } + } + } + return false +} + +// IsAssignable answers the question if a value of the given type can be used when +// instantiating an instance of the contained type +func (t *InitType) IsAssignable(o px.Type, g px.Guard) bool { + if t.typ == nil { + return richDataTypeDefault.IsAssignable(o, g) + } + + if !t.initArgs.IsEmpty() { + ts := append(make([]px.Type, 0, t.initArgs.Len()+1), o) + t.initArgs.Each(func(v px.Value) { ts = append(ts, v.PType()) }) + tp := NewTupleType(ts, nil) + return t.anySignature(func(s px.Signature) bool { return s.IsAssignable(tp, g) }) + } + + // First test if the given value matches a single value constructor + tp := NewTupleType([]px.Type{o}, nil) + return t.anySignature(func(s px.Signature) bool { return s.IsAssignable(tp, g) }) +} + +// IsInstance answers the question if the given value can be used when +// instantiating an instance of the contained type +func (t *InitType) IsInstance(o px.Value, g px.Guard) bool { + if t.typ == nil { + return richDataTypeDefault.IsInstance(o, g) + } + + if !t.initArgs.IsEmpty() { + // The init arguments must be combined with the given value in an array. Here, it doesn't + // matter if the given value is an array or not. It must match as a single value regardless. + vs := append(make([]px.Value, 0, t.initArgs.Len()+1), o) + vs = t.initArgs.AppendTo(vs) + return t.anySignature(func(s px.Signature) bool { return s.CallableWith(vs, nil) }) + } + + // First test if the given value matches a single value constructor. + vs := []px.Value{o} + if t.anySignature(func(s px.Signature) bool { return s.CallableWith(vs, nil) }) { + return true + } + + // If the given value is an array, expand it and check if it matches. + if a, ok := o.(*Array); ok { + vs = a.AppendTo(make([]px.Value, 0, a.Len())) + return t.anySignature(func(s px.Signature) bool { return s.CallableWith(vs, nil) }) + } + return false +} + +func (t *InitType) MetaType() px.ObjectType { + return InitMetaType +} + +func (t *InitType) Name() string { + return `Init` +} + +func (t *InitType) New(c px.Context, args []px.Value) px.Value { + t.Resolve(c) + if t.ctor == nil { + panic(px.Error(px.InstanceDoesNotRespond, issue.H{`type`: t, `message`: `new`})) + } + + if !t.initArgs.IsEmpty() { + // The init arguments must be combined with the given value in an array. Here, it doesn't + // matter if the given value is an array or not. It must match as a single value regardless. + vs := append(make([]px.Value, 0, t.initArgs.Len()+len(args)), args...) + vs = t.initArgs.AppendTo(vs) + return t.ctor.Call(c, nil, vs...) + } + + // First test if the given value matches a single value constructor. + if t.anySignature(func(s px.Signature) bool { return s.CallableWith(args, nil) }) { + return t.ctor.Call(c, nil, args...) + } + + // If the given value is an array, expand it and check if it matches. + if len(args) == 1 { + arg := args[0] + if a, ok := arg.(*Array); ok { + vs := a.AppendTo(make([]px.Value, 0, a.Len())) + return t.ctor.Call(c, nil, vs...) + } + } + + // Provoke argument error + return t.ctor.Call(c, nil, args...) +} + +func (t *InitType) Resolve(c px.Context) px.Type { + if t.typ != nil && t.ctor == nil { + if ctor, ok := px.Load(c, NewTypedName(px.NsConstructor, t.typ.Name())); ok { + t.ctor = ctor.(px.Function) + } else { + panic(px.Error(px.CtorNotFound, issue.H{`type`: t.typ.Name()})) + } + } + return t +} + +func (t *InitType) String() string { + return px.ToString2(t, None) +} + +func (t *InitType) Parameters() []px.Value { + t.assertInitialized() + if t.initArgs.Len() == 0 { + if t.typ == nil { + return px.EmptyValues + } + return []px.Value{t.typ} + } + ps := []px.Value{undef, t.initArgs} + if t.typ != nil { + ps[1] = t.typ + } + return ps +} + +func (t *InitType) ToString(b io.Writer, s px.FormatContext, g px.RDetect) { + TypeToString(t, b, s, g) +} + +func (t *InitType) Type() px.Type { + return t.typ +} + +func (t *InitType) PType() px.Type { + return &TypeType{t} +} + +func (t *InitType) assertInitialized() { + if t.typ != nil && t.ctor == nil { + t.Resolve(px.CurrentContext()) + } +} + +var initTypeDefault = &InitType{typ: nil, initArgs: emptyArray} diff --git a/vendor/github.com/lyraproj/pcore/types/integertype.go b/vendor/github.com/lyraproj/pcore/types/integertype.go new file mode 100644 index 0000000..9514cfc --- /dev/null +++ b/vendor/github.com/lyraproj/pcore/types/integertype.go @@ -0,0 +1,535 @@ +package types + +import ( + "bytes" + "fmt" + "io" + "math" + "strconv" + + "reflect" + + "github.com/lyraproj/issue/issue" + "github.com/lyraproj/pcore/px" +) + +type ( + IntegerType struct { + min int64 + max int64 + } + + // integerValue represents int64 as a pcore.Value + integerValue int64 +) + +var IntegerTypePositive = &IntegerType{0, math.MaxInt64} +var IntegerTypeZero = &IntegerType{0, 0} +var IntegerTypeOne = &IntegerType{1, 1} + +var ZERO = integerValue(0) + +var integerTypeDefault = &IntegerType{math.MinInt64, math.MaxInt64} +var integerType8 = &IntegerType{math.MinInt8, math.MaxInt8} +var integerType16 = &IntegerType{math.MinInt16, math.MaxInt16} +var integerType32 = &IntegerType{math.MinInt32, math.MaxInt32} +var integerTypeU8 = &IntegerType{0, math.MaxUint8} +var integerTypeU16 = &IntegerType{0, math.MaxUint16} +var integerTypeU32 = &IntegerType{0, math.MaxUint32} +var integerTypeU64 = IntegerTypePositive // MaxUInt64 isn't supported at this time + +var IntegerMetaType px.ObjectType + +func init() { + IntegerMetaType = newObjectType(`Pcore::IntegerType`, + `Pcore::NumericType { + attributes => { + from => { type => Optional[Integer], value => undef }, + to => { type => Optional[Integer], value => undef } + } +}`, func(ctx px.Context, args []px.Value) px.Value { + return newIntegerType2(args...) + }) + + newGoConstructor2(`Integer`, + func(t px.LocalTypes) { + t.Type(`Radix`, `Variant[Default, Integer[2,2], Integer[8,8], Integer[10,10], Integer[16,16]]`) + t.Type(`Convertible`, `Variant[Numeric, Boolean, Pattern[/`+IntegerPattern+`/], Timespan, Timestamp]`) + t.Type(`NamedArgs`, `Struct[{from => Convertible, Optional[radix] => Radix, Optional[abs] => Boolean}]`) + }, + + func(d px.Dispatch) { + d.Param(`Convertible`) + d.OptionalParam(`Radix`) + d.OptionalParam(`Boolean`) + d.Function(func(c px.Context, args []px.Value) px.Value { + r := 10 + abs := false + if len(args) > 1 { + if radix, ok := args[1].(integerValue); ok { + r = int(radix) + } + if len(args) > 2 { + abs = args[2].(booleanValue).Bool() + } + } + n := intFromConvertible(args[0], r) + if abs && n < 0 { + n = -n + } + return integerValue(n) + }) + }, + + func(d px.Dispatch) { + d.Param(`NamedArgs`) + d.Function(func(c px.Context, args []px.Value) px.Value { + h := args[0].(*Hash) + r := 10 + abs := false + if rx, ok := h.Get4(`radix`); ok { + if radix, ok := rx.(integerValue); ok { + r = int(radix) + } + } + if ab, ok := h.Get4(`abs`); ok { + abs = ab.(booleanValue).Bool() + } + n := intFromConvertible(h.Get5(`from`, undef), r) + if abs && n < 0 { + n = -n + } + return integerValue(n) + }) + }, + ) +} + +func intFromConvertible(from px.Value, radix int) int64 { + switch from := from.(type) { + case integerValue: + return from.Int() + case floatValue: + return from.Int() + case *Timestamp: + return from.Int() + case Timespan: + return from.Int() + case booleanValue: + return from.Int() + default: + i, err := strconv.ParseInt(from.String(), radix, 64) + if err == nil { + return i + } + panic(px.Error(px.NotInteger, issue.H{`value`: from})) + } +} + +func DefaultIntegerType() *IntegerType { + return integerTypeDefault +} + +func PositiveIntegerType() *IntegerType { + return IntegerTypePositive +} + +func NewIntegerType(min int64, max int64) *IntegerType { + if min == math.MinInt64 { + if max == math.MaxInt64 { + return DefaultIntegerType() + } + } else if min == 0 { + if max == math.MaxInt64 { + return PositiveIntegerType() + } else if max == 0 { + return IntegerTypeZero + } + } else if min == 1 && max == 1 { + return IntegerTypeOne + } + if min > max { + panic(illegalArguments(`Integer[]`, `min is not allowed to be greater than max`)) + } + return &IntegerType{min, max} +} + +func newIntegerType2(limits ...px.Value) *IntegerType { + argc := len(limits) + if argc == 0 { + return integerTypeDefault + } + min, ok := toInt(limits[0]) + if !ok { + if _, ok = limits[0].(*DefaultValue); !ok { + panic(illegalArgumentType(`Integer[]`, 0, `Integer`, limits[0])) + } + min = math.MinInt64 + } + + var max int64 + switch len(limits) { + case 1: + max = math.MaxInt64 + case 2: + max, ok = toInt(limits[1]) + if !ok { + if _, ok = limits[1].(*DefaultValue); !ok { + panic(illegalArgumentType(`Integer[]`, 1, `Integer`, limits[1])) + } + max = math.MaxInt64 + } + default: + panic(illegalArgumentCount(`Integer[]`, `0 - 2`, len(limits))) + } + return NewIntegerType(min, max) +} + +func (t *IntegerType) Default() px.Type { + return integerTypeDefault +} + +func (t *IntegerType) Accept(v px.Visitor, g px.Guard) { + v(t) +} + +func (t *IntegerType) Equals(o interface{}, g px.Guard) bool { + if ot, ok := o.(*IntegerType); ok { + return t.min == ot.min && t.max == ot.max + } + return false +} + +func (t *IntegerType) Generic() px.Type { + return integerTypeDefault +} + +func (t *IntegerType) Get(key string) (px.Value, bool) { + switch key { + case `from`: + v := px.Undef + if t.min != math.MinInt64 { + v = integerValue(t.min) + } + return v, true + case `to`: + v := px.Undef + if t.max != math.MaxInt64 { + v = integerValue(t.max) + } + return v, true + default: + return nil, false + } +} + +func (t *IntegerType) IsAssignable(o px.Type, g px.Guard) bool { + if it, ok := o.(*IntegerType); ok { + return t.min <= it.min && t.max >= it.max + } + return false +} + +func (t *IntegerType) IsInstance(o px.Value, g px.Guard) bool { + if n, ok := toInt(o); ok { + return t.IsInstance2(n) + } + return false +} + +func (t *IntegerType) IsInstance2(n int64) bool { + return t.min <= n && n <= t.max +} + +func (t *IntegerType) IsInstance3(n int) bool { + return t.IsInstance2(int64(n)) +} + +func (t *IntegerType) IsUnbounded() bool { + return t.min == math.MinInt64 && t.max == math.MaxInt64 +} + +func (t *IntegerType) Min() int64 { + return t.min +} + +func (t *IntegerType) Max() int64 { + return t.max +} + +func (t *IntegerType) MetaType() px.ObjectType { + return IntegerMetaType +} + +func (t *IntegerType) Name() string { + return `Integer` +} + +func (t *IntegerType) Parameters() []px.Value { + if t.min == math.MinInt64 { + if t.max == math.MaxInt64 { + return px.EmptyValues + } + return []px.Value{WrapDefault(), integerValue(t.max)} + } + if t.max == math.MaxInt64 { + return []px.Value{integerValue(t.min)} + } + return []px.Value{integerValue(t.min), integerValue(t.max)} +} + +func (t *IntegerType) ReflectType(c px.Context) (reflect.Type, bool) { + return reflect.TypeOf(int64(0)), true +} + +func (t *IntegerType) SizeParameters() []px.Value { + params := make([]px.Value, 2) + params[0] = integerValue(t.min) + if t.max == math.MaxInt64 { + params[1] = WrapDefault() + } else { + params[1] = integerValue(t.max) + } + return params +} + +func (t *IntegerType) CanSerializeAsString() bool { + return true +} + +func (t *IntegerType) SerializationString() string { + return t.String() +} + +func (t *IntegerType) String() string { + return px.ToString2(t, None) +} + +func (t *IntegerType) ToString(b io.Writer, s px.FormatContext, g px.RDetect) { + TypeToString(t, b, s, g) +} + +func (t *IntegerType) PType() px.Type { + return &TypeType{t} +} + +func WrapInteger(val int64) px.Integer { + return integerValue(val) +} + +func (iv integerValue) Abs() int64 { + if iv < 0 { + return -int64(iv) + } + return int64(iv) +} + +func (iv integerValue) Equals(o interface{}, g px.Guard) bool { + if ov, ok := o.(integerValue); ok { + return iv == ov + } + return false +} + +func (iv integerValue) Float() float64 { + return float64(iv) +} + +func (iv integerValue) Int() int64 { + return int64(iv) +} + +func (iv integerValue) Reflect(c px.Context) reflect.Value { + return reflect.ValueOf(int64(iv)) +} + +func (iv integerValue) ReflectTo(c px.Context, value reflect.Value) { + if !value.CanSet() { + panic(px.Error(px.AttemptToSetUnsettable, issue.H{`kind`: reflect.Int.String()})) + } + ok := true + switch value.Kind() { + case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64: + value.SetInt(int64(iv)) + case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64: + value.SetUint(uint64(iv)) + case reflect.Interface: + value.Set(reflect.ValueOf(int64(iv))) + case reflect.Ptr: + switch value.Type().Elem().Kind() { + case reflect.Int64: + v := int64(iv) + value.Set(reflect.ValueOf(&v)) + case reflect.Int: + v := int(iv) + value.Set(reflect.ValueOf(&v)) + case reflect.Int8: + v := int8(iv) + value.Set(reflect.ValueOf(&v)) + case reflect.Int16: + v := int16(iv) + value.Set(reflect.ValueOf(&v)) + case reflect.Int32: + v := int32(iv) + value.Set(reflect.ValueOf(&v)) + case reflect.Uint: + v := uint(iv) + value.Set(reflect.ValueOf(&v)) + case reflect.Uint8: + v := uint8(iv) + value.Set(reflect.ValueOf(&v)) + case reflect.Uint16: + v := uint16(iv) + value.Set(reflect.ValueOf(&v)) + case reflect.Uint32: + v := uint32(iv) + value.Set(reflect.ValueOf(&v)) + case reflect.Uint64: + v := uint64(iv) + value.Set(reflect.ValueOf(&v)) + default: + ok = false + } + default: + ok = false + } + if !ok { + panic(px.Error(px.AttemptToSetWrongKind, issue.H{`expected`: reflect.Int.String(), `actual`: value.Kind().String()})) + } +} + +func (iv integerValue) String() string { + return fmt.Sprintf(`%d`, int64(iv)) +} + +func (iv integerValue) ToKey(b *bytes.Buffer) { + n := int64(iv) + b.WriteByte(1) + b.WriteByte(HkInteger) + b.WriteByte(byte(n >> 56)) + b.WriteByte(byte(n >> 48)) + b.WriteByte(byte(n >> 40)) + b.WriteByte(byte(n >> 32)) + b.WriteByte(byte(n >> 24)) + b.WriteByte(byte(n >> 16)) + b.WriteByte(byte(n >> 8)) + b.WriteByte(byte(n)) +} + +func (iv integerValue) ToString(b io.Writer, s px.FormatContext, g px.RDetect) { + f := px.GetFormat(s.FormatMap(), iv.PType()) + var err error + switch f.FormatChar() { + case 'x', 'X', 'o', 'd': + _, err = fmt.Fprintf(b, f.OrigFormat(), int64(iv)) + case 'p', 'b', 'B': + longVal := int64(iv) + intString := strconv.FormatInt(longVal, integerRadix(f.FormatChar())) + totWidth := 0 + if f.Width() > 0 { + totWidth = f.Width() + } + numWidth := 0 + if f.Precision() > 0 { + numWidth = f.Precision() + } + + if numWidth > 0 && numWidth < len(intString) && f.FormatChar() == 'p' { + intString = intString[:numWidth] + } + + zeroPad := numWidth - len(intString) + + pfx := `` + if f.IsAlt() && longVal != 0 && !(f.FormatChar() == 'o' && zeroPad > 0) { + pfx = integerPrefixRadix(f.FormatChar()) + } + computedFieldWidth := len(pfx) + intMax(numWidth, len(intString)) + + for spacePad := totWidth - computedFieldWidth; spacePad > 0; spacePad-- { + _, err = b.Write([]byte{' '}) + if err != nil { + break + } + } + if err != nil { + break + } + + _, err = io.WriteString(b, pfx) + if err != nil { + break + } + if zeroPad > 0 { + padChar := []byte{'0'} + if f.FormatChar() == 'p' { + padChar = []byte{' '} + } + for ; zeroPad > 0; zeroPad-- { + _, err = b.Write(padChar) + if err != nil { + break + } + } + } + if err == nil { + _, err = io.WriteString(b, intString) + } + case 'e', 'E', 'f', 'g', 'G', 'a', 'A': + floatValue(iv.Float()).ToString(b, px.NewFormatContext(DefaultFloatType(), f, s.Indentation()), g) + case 'c': + bld := bytes.NewBufferString(``) + bld.WriteRune(rune(int64(iv))) + f.ApplyStringFlags(b, bld.String(), f.IsAlt()) + case 's': + f.ApplyStringFlags(b, strconv.Itoa(int(int64(iv))), f.IsAlt()) + default: + //noinspection SpellCheckingInspection + panic(s.UnsupportedFormat(iv.PType(), `dxXobBeEfgGaAspc`, f)) + } + if err != nil { + panic(err) + } +} + +func intMax(a int, b int) int { + if a > b { + return a + } + return b +} + +func integerRadix(c byte) int { + switch c { + case 'b', 'B': + return 2 + case 'o': + return 8 + case 'x', 'X': + return 16 + default: + return 10 + } +} + +func integerPrefixRadix(c byte) string { + switch c { + case 'x': + return `0x` + case 'X': + return `0X` + case 'o': + return `0` + case 'b': + return `0b` + case 'B': + return `0B` + default: + return `` + } +} + +func (iv integerValue) PType() px.Type { + v := int64(iv) + return &IntegerType{v, v} +} diff --git a/vendor/github.com/lyraproj/pcore/types/iterabletype.go b/vendor/github.com/lyraproj/pcore/types/iterabletype.go new file mode 100644 index 0000000..b595e4b --- /dev/null +++ b/vendor/github.com/lyraproj/pcore/types/iterabletype.go @@ -0,0 +1,153 @@ +package types + +import ( + "io" + + "github.com/lyraproj/pcore/px" +) + +type IterableType struct { + typ px.Type +} + +var IterableMetaType px.ObjectType + +func init() { + IterableMetaType = newObjectType(`Pcore::IterableType`, + `Pcore::AnyType { + attributes => { + type => { + type => Optional[Type], + value => Any + }, + } + }`, func(ctx px.Context, args []px.Value) px.Value { + return newIterableType2(args...) + }) +} + +func DefaultIterableType() *IterableType { + return iterableTypeDefault +} + +func NewIterableType(elementType px.Type) *IterableType { + if elementType == nil || elementType == anyTypeDefault { + return DefaultIterableType() + } + return &IterableType{elementType} +} + +func newIterableType2(args ...px.Value) *IterableType { + switch len(args) { + case 0: + return DefaultIterableType() + case 1: + containedType, ok := args[0].(px.Type) + if !ok { + panic(illegalArgumentType(`Iterable[]`, 0, `Type`, args[0])) + } + return NewIterableType(containedType) + default: + panic(illegalArgumentCount(`Iterable[]`, `0 - 1`, len(args))) + } +} + +func (t *IterableType) Accept(v px.Visitor, g px.Guard) { + v(t) + t.typ.Accept(v, g) +} + +func (t *IterableType) Default() px.Type { + return iterableTypeDefault +} + +func (t *IterableType) Equals(o interface{}, g px.Guard) bool { + if ot, ok := o.(*IterableType); ok { + return t.typ.Equals(ot.typ, g) + } + return false +} + +func (t *IterableType) Generic() px.Type { + return NewIterableType(px.GenericType(t.typ)) +} + +func (t *IterableType) Get(key string) (value px.Value, ok bool) { + switch key { + case `type`: + return t.typ, true + } + return nil, false +} + +func (t *IterableType) IsAssignable(o px.Type, g px.Guard) bool { + var et px.Type + switch o := o.(type) { + case *ArrayType: + et = o.ElementType() + case *BinaryType: + et = NewIntegerType(0, 255) + case *HashType: + et = o.EntryType() + case *stringType, *vcStringType, *scStringType: + et = OneCharStringType + case *TupleType: + return allAssignableTo(o.types, t.typ, g) + default: + return false + } + return GuardedIsAssignable(t.typ, et, g) +} + +func (t *IterableType) IsInstance(o px.Value, g px.Guard) bool { + if iv, ok := o.(px.Indexed); ok { + return GuardedIsAssignable(t.typ, iv.ElementType(), g) + } + return false +} + +func (t *IterableType) MetaType() px.ObjectType { + return IterableMetaType +} + +func (t *IterableType) Name() string { + return `Iterable` +} + +func (t *IterableType) Parameters() []px.Value { + if t.typ == DefaultAnyType() { + return px.EmptyValues + } + return []px.Value{t.typ} +} + +func (t *IterableType) Resolve(c px.Context) px.Type { + t.typ = resolve(c, t.typ) + return t +} + +func (t *IterableType) CanSerializeAsString() bool { + return canSerializeAsString(t.typ) +} + +func (t *IterableType) SerializationString() string { + return t.String() +} + +func (t *IterableType) String() string { + return px.ToString2(t, None) +} + +func (t *IterableType) ElementType() px.Type { + return t.typ +} + +func (t *IterableType) ToString(b io.Writer, s px.FormatContext, g px.RDetect) { + TypeToString(t, b, s, g) +} + +func (t *IterableType) PType() px.Type { + return &TypeType{t} +} + +var iterableTypeDefault = &IterableType{typ: DefaultAnyType()} diff --git a/vendor/github.com/lyraproj/pcore/types/iteratortype.go b/vendor/github.com/lyraproj/pcore/types/iteratortype.go new file mode 100644 index 0000000..f2c0625 --- /dev/null +++ b/vendor/github.com/lyraproj/pcore/types/iteratortype.go @@ -0,0 +1,143 @@ +package types + +import ( + "io" + + "github.com/lyraproj/pcore/px" +) + +type ( + IteratorType struct { + typ px.Type + } +) + +var iteratorTypeDefault = &IteratorType{typ: DefaultAnyType()} + +var IteratorMetaType px.ObjectType + +func init() { + IteratorMetaType = newObjectType(`Pcore::IteratorType`, + `Pcore::AnyType { + attributes => { + type => { + type => Optional[Type], + value => Any + }, + } + }`, func(ctx px.Context, args []px.Value) px.Value { + return newIteratorType2(args...) + }) +} + +func DefaultIteratorType() *IteratorType { + return iteratorTypeDefault +} + +func NewIteratorType(elementType px.Type) *IteratorType { + if elementType == nil || elementType == anyTypeDefault { + return DefaultIteratorType() + } + return &IteratorType{elementType} +} + +func newIteratorType2(args ...px.Value) *IteratorType { + switch len(args) { + case 0: + return DefaultIteratorType() + case 1: + containedType, ok := args[0].(px.Type) + if !ok { + panic(illegalArgumentType(`Iterator[]`, 0, `Type`, args[0])) + } + return NewIteratorType(containedType) + default: + panic(illegalArgumentCount(`Iterator[]`, `0 - 1`, len(args))) + } +} + +func (t *IteratorType) Accept(v px.Visitor, g px.Guard) { + v(t) + t.typ.Accept(v, g) +} + +func (t *IteratorType) Default() px.Type { + return iteratorTypeDefault +} + +func (t *IteratorType) Equals(o interface{}, g px.Guard) bool { + if ot, ok := o.(*IteratorType); ok { + return t.typ.Equals(ot.typ, g) + } + return false +} + +func (t *IteratorType) Generic() px.Type { + return NewIteratorType(px.GenericType(t.typ)) +} + +func (t *IteratorType) Get(key string) (value px.Value, ok bool) { + switch key { + case `type`: + return t.typ, true + } + return nil, false +} + +func (t *IteratorType) IsAssignable(o px.Type, g px.Guard) bool { + if it, ok := o.(*IteratorType); ok { + return GuardedIsAssignable(t.typ, it.typ, g) + } + return false +} + +func (t *IteratorType) IsInstance(o px.Value, g px.Guard) bool { + if it, ok := o.(px.IteratorValue); ok { + return GuardedIsInstance(t.typ, it.ElementType(), g) + } + return false +} + +func (t *IteratorType) MetaType() px.ObjectType { + return IteratorMetaType +} + +func (t *IteratorType) Name() string { + return `Iterator` +} + +func (t *IteratorType) Parameters() []px.Value { + if t.typ == DefaultAnyType() { + return px.EmptyValues + } + return []px.Value{t.typ} +} + +func (t *IteratorType) CanSerializeAsString() bool { + return canSerializeAsString(t.typ) +} + +func (t *IteratorType) SerializationString() string { + return t.String() +} + +func (t *IteratorType) String() string { + return px.ToString2(t, None) +} + +func (t *IteratorType) ElementType() px.Type { + return t.typ +} + +func (t *IteratorType) Resolve(c px.Context) px.Type { + t.typ = resolve(c, t.typ) + return t +} + +func (t *IteratorType) ToString(b io.Writer, s px.FormatContext, g px.RDetect) { + TypeToString(t, b, s, g) +} + +func (t *IteratorType) PType() px.Type { + return &TypeType{t} +} diff --git a/vendor/github.com/lyraproj/pcore/types/lexer.go b/vendor/github.com/lyraproj/pcore/types/lexer.go new file mode 100644 index 0000000..c4915e8 --- /dev/null +++ b/vendor/github.com/lyraproj/pcore/types/lexer.go @@ -0,0 +1,463 @@ +package types + +import ( + "bytes" + "errors" + "fmt" + "unicode" + "unicode/utf8" + + "github.com/lyraproj/pcore/utils" +) + +type tokenType int + +const ( + end = iota + name + identifier + integer + float + regexpLiteral + stringLiteral + leftBracket + rightBracket + leftCurlyBrace + rightCurlyBrace + leftParen + rightParen + comma + dot + rocket + equal +) + +func (t tokenType) String() (s string) { + switch t { + case end: + s = "end" + case name: + s = "name" + case identifier: + s = "identifier" + case integer: + s = "integer" + case float: + s = "float" + case regexpLiteral: + s = "regexp" + case stringLiteral: + s = "string" + case leftBracket: + s = "leftBracket" + case rightBracket: + s = "rightBracket" + case leftCurlyBrace: + s = "leftCurlyBrace" + case rightCurlyBrace: + s = "rightCurlyBrace" + case leftParen: + s = "leftParen" + case rightParen: + s = "rightParen" + case comma: + s = "comma" + case dot: + s = "dot" + case rocket: + s = "rocket" + case equal: + s = "equal" + default: + s = "*UNKNOWN TOKEN*" + } + return +} + +type token struct { + s string + i tokenType +} + +func (t token) String() string { + return fmt.Sprintf("%s: '%s'", t.i.String(), t.s) +} + +func badToken(r rune) error { + return fmt.Errorf("unexpected character '%c'", r) +} + +func scan(sr *utils.StringReader, tf func(t token) error) (err error) { + buf := bytes.NewBufferString(``) + for { + r := sr.Next() + if r == utf8.RuneError { + return errors.New("unicode error") + } + if r == 0 { + break + } + + switch r { + case ' ', '\t', '\n': + case '\'', '"': + if err = consumeString(sr, r, buf); err != nil { + return err + } + if err = tf(token{buf.String(), stringLiteral}); err != nil { + return err + } + buf.Reset() + case '/': + if err = consumeRegexp(sr, buf); err != nil { + return err + } + if err = tf(token{buf.String(), regexpLiteral}); err != nil { + return err + } + buf.Reset() + case '#': + if err = consumeLineComment(sr); err != nil { + return err + } + case '{': + if err = tf(token{string(r), leftCurlyBrace}); err != nil { + return err + } + case '}': + if err = tf(token{string(r), rightCurlyBrace}); err != nil { + return err + } + case '[': + if err = tf(token{string(r), leftBracket}); err != nil { + return err + } + case ']': + if err = tf(token{string(r), rightBracket}); err != nil { + return err + } + case '(': + if err = tf(token{string(r), leftParen}); err != nil { + return err + } + case ')': + if err = tf(token{string(r), rightParen}); err != nil { + return err + } + case ',': + if err = tf(token{string(r), comma}); err != nil { + return err + } + case '.': + if err = tf(token{string(r), dot}); err != nil { + return err + } + case '=': + r = sr.Peek() + if r == '>' { + sr.Next() + if err = tf(token{`=>`, rocket}); err != nil { + return err + } + } else { + if err = tf(token{string(r), equal}); err != nil { + return err + } + } + continue + case '-', '+': + n := sr.Next() + if n >= '0' && n <= '9' { + var tkn tokenType + buf.WriteRune(r) + tkn, err = consumeNumber(sr, n, buf, integer) + if err != nil { + return err + } + if err = tf(token{buf.String(), tkn}); err != nil { + return err + } + buf.Reset() + continue + } + return badToken(r) + default: + var tkn tokenType + if r >= '0' && r <= '9' { + tkn, err = consumeNumber(sr, r, buf, integer) + } else if r >= 'A' && r <= 'Z' { + err = consumeTypeName(sr, r, buf) + tkn = name + } else if r >= 'a' && r <= 'z' { + err = consumeIdentifier(sr, r, buf) + tkn = identifier + } else { + return badToken(r) + } + if err != nil { + return err + } + if err = tf(token{buf.String(), tkn}); err != nil { + return err + } + buf.Reset() + } + } + if err = tf(token{``, end}); err != nil { + return err + } + return nil +} + +func consumeLineComment(sr *utils.StringReader) error { + for { + switch sr.Next() { + case 0, '\n': + return nil + case utf8.RuneError: + return errors.New("unicode error") + } + } +} + +func consumeUnsignedInteger(sr *utils.StringReader, buf *bytes.Buffer) error { + for { + r := sr.Peek() + switch r { + case utf8.RuneError: + return errors.New("unicode error") + case 0: + return nil + case '.': + return badToken(r) + default: + if r >= '0' && r <= '9' { + sr.Next() + buf.WriteRune(r) + continue + } + if unicode.IsLetter(r) { + sr.Next() + return badToken(r) + } + return nil + } + } +} + +func consumeExponent(sr *utils.StringReader, buf *bytes.Buffer) error { + for { + r := sr.Next() + switch r { + case 0: + return errors.New("unexpected end") + case '+', '-': + buf.WriteRune(r) + r = sr.Next() + fallthrough + default: + if r >= '0' && r <= '9' { + buf.WriteRune(r) + return consumeUnsignedInteger(sr, buf) + } + return badToken(r) + } + } +} + +func consumeHexInteger(sr *utils.StringReader, buf *bytes.Buffer) error { + for { + r := sr.Peek() + switch r { + case 0: + return nil + default: + if r >= '0' && r <= '9' || r >= 'A' && r <= 'F' || r >= 'a' && r <= 'f' { + sr.Next() + buf.WriteRune(r) + continue + } + return nil + } + } +} + +func consumeNumber(sr *utils.StringReader, start rune, buf *bytes.Buffer, t tokenType) (tokenType, error) { + buf.WriteRune(start) + firstZero := t != float && start == '0' + for { + r := sr.Peek() + switch r { + case 0: + return 0, nil + case '0': + sr.Next() + buf.WriteRune(r) + continue + case 'e', 'E': + sr.Next() + buf.WriteRune(r) + err := consumeExponent(sr, buf) + return float, err + case 'x', 'X': + if firstZero { + sr.Next() + buf.WriteRune(r) + r = sr.Next() + if r >= '0' && r <= '9' || r >= 'A' && r <= 'F' || r >= 'a' && r <= 'f' { + buf.WriteRune(r) + err := consumeHexInteger(sr, buf) + return t, err + } + } + return t, badToken(r) + case '.': + if t == float { + return t, badToken(r) + } + sr.Next() + buf.WriteRune(r) + r = sr.Next() + if r >= '0' && r <= '9' { + return consumeNumber(sr, r, buf, float) + } + return t, badToken(r) + default: + if r >= '0' && r <= '9' { + sr.Next() + buf.WriteRune(r) + continue + } + return t, nil + } + } +} + +func consumeRegexp(sr *utils.StringReader, buf *bytes.Buffer) error { + for { + r := sr.Next() + switch r { + case utf8.RuneError: + return badToken(r) + case '/': + return nil + case '\\': + r = sr.Next() + switch r { + case 0: + return errors.New("unterminated regexp") + case utf8.RuneError: + return badToken(r) + case '/': // Escape is removed + default: + buf.WriteByte('\\') + } + buf.WriteRune(r) + case 0, '\n': + return errors.New("unterminated regexp") + default: + buf.WriteRune(r) + } + } +} + +func consumeString(sr *utils.StringReader, end rune, buf *bytes.Buffer) error { + for { + r := sr.Next() + if r == end { + return nil + } + switch r { + case 0: + return errors.New("unterminated string") + case utf8.RuneError: + return badToken(r) + case '\\': + r := sr.Next() + switch r { + case 0: + return errors.New("unterminated string") + case utf8.RuneError: + return badToken(r) + case 'n': + r = '\n' + case 'r': + r = '\r' + case 't': + r = '\t' + case '\\': + default: + if r != end { + return fmt.Errorf("illegal escape '\\%c'", r) + } + } + buf.WriteRune(r) + case '\n': + return errors.New("unterminated string") + default: + buf.WriteRune(r) + } + } +} + +func consumeIdentifier(sr *utils.StringReader, start rune, buf *bytes.Buffer) error { + buf.WriteRune(start) + for { + r := sr.Peek() + switch r { + case 0: + return nil + case ':': + sr.Next() + buf.WriteRune(r) + r = sr.Next() + if r == ':' { + buf.WriteRune(r) + r = sr.Next() + if r >= 'a' && r <= 'z' || r == '_' { + buf.WriteRune(r) + continue + } + } + return badToken(r) + default: + if r == '_' || r >= '0' && r <= '9' || r >= 'A' && r <= 'Z' || r >= 'a' && r <= 'z' { + sr.Next() + buf.WriteRune(r) + continue + } + return nil + } + } +} + +func consumeTypeName(sr *utils.StringReader, start rune, buf *bytes.Buffer) error { + buf.WriteRune(start) + for { + r := sr.Peek() + switch r { + case 0: + return nil + case ':': + sr.Next() + buf.WriteRune(r) + r = sr.Next() + if r == ':' { + buf.WriteRune(r) + r = sr.Next() + if r >= 'A' && r <= 'Z' { + buf.WriteRune(r) + continue + } + } + return badToken(r) + default: + if r == '_' || r >= '0' && r <= '9' || r >= 'A' && r <= 'Z' || r >= 'a' && r <= 'z' { + sr.Next() + buf.WriteRune(r) + continue + } + return nil + } + } +} diff --git a/vendor/github.com/lyraproj/pcore/types/liketype.go b/vendor/github.com/lyraproj/pcore/types/liketype.go new file mode 100644 index 0000000..20c7323 --- /dev/null +++ b/vendor/github.com/lyraproj/pcore/types/liketype.go @@ -0,0 +1,185 @@ +package types + +import ( + "io" + + "strconv" + "strings" + + "github.com/lyraproj/issue/issue" + "github.com/lyraproj/pcore/px" +) + +type LikeType struct { + baseType px.Type + resolved px.Type + navigation string +} + +var LikeMetaType px.ObjectType + +func init() { + LikeMetaType = newObjectType(`Pcore::Like`, + `Pcore::AnyType { + attributes => { + base_type => Type, + navigation => String[1] + } +}`, func(ctx px.Context, args []px.Value) px.Value { + return newLikeType2(args...) + }) +} + +func DefaultLikeType() *LikeType { + return typeOfTypeDefault +} + +func NewLikeType(baseType px.Type, navigation string) *LikeType { + return &LikeType{baseType: baseType, navigation: navigation} +} + +func newLikeType2(args ...px.Value) *LikeType { + switch len(args) { + case 0: + return DefaultLikeType() + case 2: + if tp, ok := args[0].(px.Type); ok { + if an, ok := args[1].(stringValue); ok { + return NewLikeType(tp, string(an)) + } else { + panic(illegalArgumentType(`Like[]`, 1, `String`, args[1])) + } + } else { + panic(illegalArgumentType(`Like[]`, 0, `Type`, args[1])) + } + default: + panic(illegalArgumentCount(`Like[]`, `0 or 2`, len(args))) + } +} + +func (t *LikeType) Accept(v px.Visitor, g px.Guard) { + v(t) + t.baseType.Accept(v, g) +} + +func (t *LikeType) Default() px.Type { + return typeOfTypeDefault +} + +func (t *LikeType) Equals(o interface{}, g px.Guard) bool { + if ot, ok := o.(*LikeType); ok { + return t.navigation == ot.navigation && t.baseType.Equals(ot.baseType, g) + } + return false +} + +func (t *LikeType) Get(key string) (px.Value, bool) { + switch key { + case `base_type`: + return t.baseType, true + case `navigation`: + return stringValue(t.navigation), true + default: + return nil, false + } +} + +func (t *LikeType) IsAssignable(o px.Type, g px.Guard) bool { + return t.Resolve(nil).IsAssignable(o, g) +} + +func (t *LikeType) IsInstance(o px.Value, g px.Guard) bool { + return t.Resolve(nil).IsInstance(o, g) +} + +func (t *LikeType) MetaType() px.ObjectType { + return LikeMetaType +} + +func (t *LikeType) Name() string { + return `Like` +} + +func (t *LikeType) String() string { + return px.ToString2(t, None) +} + +func (t *LikeType) Parameters() []px.Value { + if *t == *typeOfTypeDefault { + return px.EmptyValues + } + return []px.Value{t.baseType, stringValue(t.navigation)} +} + +func (t *LikeType) Resolve(c px.Context) px.Type { + if t.resolved != nil { + return t.resolved + } + bt := t.baseType + bv := bt.(px.Value) + var ok bool + for _, part := range strings.Split(t.navigation, `.`) { + if c, bv, ok = navigate(c, bv, part); !ok { + panic(px.Error(px.UnresolvedTypeOf, issue.H{`type`: t.baseType, `navigation`: t.navigation})) + } + } + if bt, ok = bv.(px.Type); ok { + t.resolved = bt + return bt + } + panic(px.Error(px.UnresolvedTypeOf, issue.H{`type`: t.baseType, `navigation`: t.navigation})) +} + +func (t *LikeType) ToString(b io.Writer, s px.FormatContext, g px.RDetect) { + TypeToString(t, b, s, g) +} + +func (t *LikeType) PType() px.Type { + return &TypeType{t} +} + +func navigate(c px.Context, value px.Value, member string) (px.Context, px.Value, bool) { + if typ, ok := value.(px.Type); ok { + if po, ok := typ.(px.TypeWithCallableMembers); ok { + if m, ok := po.Member(member); ok { + if a, ok := m.(px.Attribute); ok { + return c, a.Type(), true + } + if f, ok := m.(px.Function); ok { + return c, f.PType().(*CallableType).ReturnType(), true + } + } + } else if st, ok := typ.(*StructType); ok { + if m, ok := st.HashedMembers()[member]; ok { + return c, m.Value(), true + } + } else if tt, ok := typ.(*TupleType); ok { + if n, err := strconv.ParseInt(member, 0, 64); err == nil { + if et, ok := tt.At(int(n)).(px.Type); ok { + return c, et, true + } + } + } else if ta, ok := typ.(*TypeAliasType); ok { + return navigate(c, ta.ResolvedType(), member) + } else { + if m, ok := typ.MetaType().Member(member); ok { + if c == nil { + c = px.CurrentContext() + } + return c, m.Call(c, typ, nil, []px.Value{}), true + } + } + } else { + if po, ok := value.PType().(px.TypeWithCallableMembers); ok { + if m, ok := po.Member(member); ok { + if c == nil { + c = px.CurrentContext() + } + return c, m.Call(c, value, nil, []px.Value{}), true + } + } + } + return c, nil, false +} + +var typeOfTypeDefault = &LikeType{baseType: DefaultAnyType()} diff --git a/vendor/github.com/lyraproj/pcore/types/notundeftype.go b/vendor/github.com/lyraproj/pcore/types/notundeftype.go new file mode 100644 index 0000000..cbd6b83 --- /dev/null +++ b/vendor/github.com/lyraproj/pcore/types/notundeftype.go @@ -0,0 +1,150 @@ +package types + +import ( + "io" + + "reflect" + + "github.com/lyraproj/pcore/px" +) + +type NotUndefType struct { + typ px.Type +} + +var NotUndefMetaType px.ObjectType + +func init() { + NotUndefMetaType = newObjectType(`Pcore::NotUndefType`, + `Pcore::AnyType { + attributes => { + type => { + type => Optional[Type], + value => Any + }, + } + }`, func(ctx px.Context, args []px.Value) px.Value { + return newNotUndefType2(args...) + }) +} + +func DefaultNotUndefType() *NotUndefType { + return notUndefTypeDefault +} + +func NewNotUndefType(containedType px.Type) *NotUndefType { + if containedType == nil || containedType == anyTypeDefault { + return DefaultNotUndefType() + } + return &NotUndefType{containedType} +} + +func newNotUndefType2(args ...px.Value) *NotUndefType { + switch len(args) { + case 0: + return DefaultNotUndefType() + case 1: + if containedType, ok := args[0].(px.Type); ok { + return NewNotUndefType(containedType) + } + if containedType, ok := args[0].(stringValue); ok { + return newNotUndefType3(string(containedType)) + } + panic(illegalArgumentType(`NotUndef[]`, 0, `Variant[Type,String]`, args[0])) + default: + panic(illegalArgumentCount(`NotUndef[]`, `0 - 1`, len(args))) + } +} + +func newNotUndefType3(str string) *NotUndefType { + return &NotUndefType{NewStringType(nil, str)} +} + +func (t *NotUndefType) Accept(v px.Visitor, g px.Guard) { + v(t) + t.typ.Accept(v, g) +} + +func (t *NotUndefType) ContainedType() px.Type { + return t.typ +} + +func (t *NotUndefType) Default() px.Type { + return notUndefTypeDefault +} + +func (t *NotUndefType) Equals(o interface{}, g px.Guard) bool { + if ot, ok := o.(*NotUndefType); ok { + return t.typ.Equals(ot.typ, g) + } + return false +} + +func (t *NotUndefType) Generic() px.Type { + return NewNotUndefType(px.GenericType(t.typ)) +} + +func (t *NotUndefType) Get(key string) (value px.Value, ok bool) { + switch key { + case `type`: + return t.typ, true + } + return nil, false +} + +func (t *NotUndefType) IsAssignable(o px.Type, g px.Guard) bool { + return !GuardedIsAssignable(o, undefTypeDefault, g) && GuardedIsAssignable(t.typ, o, g) +} + +func (t *NotUndefType) IsInstance(o px.Value, g px.Guard) bool { + return o != undef && GuardedIsInstance(t.typ, o, g) +} + +func (t *NotUndefType) MetaType() px.ObjectType { + return NotUndefMetaType +} + +func (t *NotUndefType) Name() string { + return `NotUndef` +} + +func (t *NotUndefType) Parameters() []px.Value { + if t.typ == DefaultAnyType() { + return px.EmptyValues + } + if str, ok := t.typ.(*vcStringType); ok && str.value != `` { + return []px.Value{stringValue(str.value)} + } + return []px.Value{t.typ} +} + +func (t *NotUndefType) Resolve(c px.Context) px.Type { + t.typ = resolve(c, t.typ) + return t +} + +func (t *NotUndefType) ReflectType(c px.Context) (reflect.Type, bool) { + return ReflectType(c, t.typ) +} + +func (t *NotUndefType) CanSerializeAsString() bool { + return canSerializeAsString(t.typ) +} + +func (t *NotUndefType) SerializationString() string { + return t.String() +} + +func (t *NotUndefType) String() string { + return px.ToString2(t, None) +} + +func (t *NotUndefType) ToString(b io.Writer, s px.FormatContext, g px.RDetect) { + TypeToString(t, b, s, g) +} + +func (t *NotUndefType) PType() px.Type { + return &TypeType{t} +} + +var notUndefTypeDefault = &NotUndefType{typ: anyTypeDefault} diff --git a/vendor/github.com/lyraproj/pcore/types/numerictype.go b/vendor/github.com/lyraproj/pcore/types/numerictype.go new file mode 100644 index 0000000..441d89e --- /dev/null +++ b/vendor/github.com/lyraproj/pcore/types/numerictype.go @@ -0,0 +1,173 @@ +package types + +import ( + "io" + + "fmt" + "reflect" + "strconv" + + "github.com/lyraproj/pcore/px" +) + +type NumericType struct{} + +var numericTypeDefault = &NumericType{} + +var NumericMetaType px.ObjectType + +func init() { + NumericMetaType = newObjectType(`Pcore::NumericType`, `Pcore::ScalarDataType {}`, func(ctx px.Context, args []px.Value) px.Value { + return DefaultNumericType() + }) + + newGoConstructor2(`Numeric`, + func(t px.LocalTypes) { + t.Type(`Convertible`, `Variant[Numeric, Boolean, Pattern[/`+FloatPattern+`/], Timespan, Timestamp]`) + t.Type(`NamedArgs`, `Struct[from => Convertible, Optional[abs] => Boolean]`) + }, + + func(d px.Dispatch) { + d.Param(`NamedArgs`) + d.Function(func(c px.Context, args []px.Value) px.Value { + return numberFromNamedArgs(args, true) + }) + }, + + func(d px.Dispatch) { + d.Param(`Convertible`) + d.OptionalParam(`Boolean`) + d.Function(func(c px.Context, args []px.Value) px.Value { + return numberFromPositionalArgs(args, true) + }) + }, + ) +} + +func numberFromPositionalArgs(args []px.Value, tryInt bool) px.Number { + n := fromConvertible(args[0], tryInt) + if len(args) > 1 && args[1].(booleanValue).Bool() { + if i, ok := n.(integerValue); ok { + n = integerValue(i.Abs()) + } else { + n = floatValue(n.(floatValue).Abs()) + } + } + return n +} + +func numberFromNamedArgs(args []px.Value, tryInt bool) px.Number { + h := args[0].(*Hash) + n := fromConvertible(h.Get5(`from`, px.Undef), tryInt) + a := h.Get5(`abs`, nil) + if a != nil && a.(booleanValue).Bool() { + if i, ok := n.(integerValue); ok { + n = integerValue(i.Abs()) + } else { + n = floatValue(n.(floatValue).Abs()) + } + } + return n +} + +func DefaultNumericType() *NumericType { + return numericTypeDefault +} + +func (t *NumericType) Accept(v px.Visitor, g px.Guard) { + v(t) +} + +func (t *NumericType) Equals(o interface{}, g px.Guard) bool { + _, ok := o.(*NumericType) + return ok +} + +func (t *NumericType) IsAssignable(o px.Type, g px.Guard) bool { + switch o.(type) { + case *IntegerType, *FloatType: + return true + default: + return false + } +} + +func (t *NumericType) IsInstance(o px.Value, g px.Guard) bool { + switch o.PType().(type) { + case *FloatType, *IntegerType: + return true + default: + return false + } +} + +func (t *NumericType) MetaType() px.ObjectType { + return NumericMetaType +} + +func (t *NumericType) Name() string { + return `Numeric` +} + +func (t *NumericType) ReflectType(c px.Context) (reflect.Type, bool) { + return reflect.TypeOf(float64(0.0)), true +} + +func (t *NumericType) CanSerializeAsString() bool { + return true +} + +func (t *NumericType) SerializationString() string { + return t.String() +} + +func (t *NumericType) String() string { + return px.ToString2(t, None) +} + +func (t *NumericType) ToString(b io.Writer, s px.FormatContext, g px.RDetect) { + TypeToString(t, b, s, g) +} + +func (t *NumericType) PType() px.Type { + return &TypeType{t} +} + +func fromConvertible(c px.Value, allowInt bool) px.Number { + switch c := c.(type) { + case integerValue: + if allowInt { + return c + } + return floatValue(c.Float()) + case *Timestamp: + return floatValue(c.Float()) + case Timespan: + return floatValue(c.Float()) + case booleanValue: + if allowInt { + return integerValue(c.Int()) + } + return floatValue(c.Float()) + case px.Number: + return c + case stringValue: + s := c.String() + if allowInt { + if i, err := strconv.ParseInt(s, 0, 64); err == nil { + return integerValue(i) + } + } + if f, err := strconv.ParseFloat(s, 64); err == nil { + return floatValue(f) + } + if allowInt { + if len(s) > 2 && s[0] == '0' && (s[1] == 'b' || s[1] == 'B') { + if i, err := strconv.ParseInt(s[2:], 2, 64); err == nil { + return integerValue(i) + } + } + } + } + panic(illegalArguments(`Numeric`, fmt.Sprintf(`Value of type %s cannot be converted to an Number`, c.PType().String()))) +} diff --git a/vendor/github.com/lyraproj/pcore/types/objecttype.go b/vendor/github.com/lyraproj/pcore/types/objecttype.go new file mode 100644 index 0000000..f71687d --- /dev/null +++ b/vendor/github.com/lyraproj/pcore/types/objecttype.go @@ -0,0 +1,1407 @@ +package types + +import ( + "fmt" + "io" + "reflect" + "regexp" + "runtime" + "sync/atomic" + + "bytes" + + "github.com/lyraproj/issue/issue" + "github.com/lyraproj/pcore/hash" + "github.com/lyraproj/pcore/px" + "github.com/lyraproj/pcore/utils" +) + +// TODO: Implement Type.CanCoerce(Value) +// Object.IsInstance is true if value is a Hash matching the attribute requirements +// where each entry value can be coerced into the attribute´s type +var ObjectMetaType px.ObjectType + +func init() { + oneArgCtor := func(ctx px.Context, args []px.Value) px.Value { + return newObjectType2(ctx, args...) + } + ObjectMetaType = MakeObjectType(`Pcore::ObjectType`, AnyMetaType, + WrapStringToValueMap(map[string]px.Value{ + `attributes`: singletonMap(`_pcore_init_hash`, TypeObjectInitHash)}), true, + oneArgCtor, oneArgCtor) +} + +const ( + keyAnnotations = `annotations` + keyAttributes = `attributes` + keyConstants = `constants` + keyEquality = `equality` + keyEqualityIncludeType = `equality_include_type` + keyFinal = `final` + keyFunctions = `functions` + keyKind = `kind` + keyName = `name` + keyOverride = `override` + keyParent = `parent` + keySerialization = `serialization` + keyType = `type` + keyTypeParameters = `type_parameters` + keyValue = `value` + + defaultKind = px.AttributeKind(``) + constant = px.AttributeKind(`constant`) + derived = px.AttributeKind(`derived`) + givenOrDerived = px.AttributeKind(`given_or_derived`) + reference = px.AttributeKind(`reference`) +) + +var TypeNamePattern = regexp.MustCompile(`\A[A-Z][\w]*(?:::[A-Z][\w]*)*\z`) + +var TypeTypeName = NewPatternType([]*RegexpType{NewRegexpTypeR(TypeNamePattern)}) + +var MemberNamePattern = regexp.MustCompile(`\A[a-z_]\w*\z`) + +var TypeMemberName = newPatternType2(NewRegexpTypeR(MemberNamePattern)) + +var TypeMemberNames = newArrayType2(TypeMemberName) +var TypeParameters = NewHashType(TypeMemberName, DefaultNotUndefType(), nil) +var TypeAttributes = NewHashType(TypeMemberName, DefaultNotUndefType(), nil) +var TypeConstants = NewHashType(TypeMemberName, DefaultAnyType(), nil) +var TypeFunctions = NewHashType(newVariantType2(TypeMemberName, newPatternType2(NewRegexpTypeR(regexp.MustCompile(`^\[]$`)))), DefaultNotUndefType(), nil) +var TypeEquality = newVariantType2(TypeMemberName, TypeMemberNames) + +var TypeObjectInitHash = NewStructType([]*StructElement{ + NewStructElement(newOptionalType3(keyName), TypeTypeName), + NewStructElement(newOptionalType3(keyParent), NewVariantType(DefaultTypeType(), TypeTypeName)), + NewStructElement(newOptionalType3(keyTypeParameters), TypeParameters), + NewStructElement(newOptionalType3(keyAttributes), TypeAttributes), + NewStructElement(newOptionalType3(keyConstants), TypeConstants), + NewStructElement(newOptionalType3(keyFunctions), TypeFunctions), + NewStructElement(newOptionalType3(keyEquality), TypeEquality), + NewStructElement(newOptionalType3(keyEqualityIncludeType), DefaultBooleanType()), + NewStructElement(newOptionalType3(keyEquality), TypeEquality), + NewStructElement(newOptionalType3(keySerialization), TypeMemberNames), + NewStructElement(newOptionalType3(keyAnnotations), typeAnnotations), +}) + +type objectType struct { + annotatable + hashKey px.HashKey + name string + parent px.Type + creators []px.DispatchFunction + parameters *hash.StringHash // map doesn't preserve order + attributes *hash.StringHash + functions *hash.StringHash + equality []string + equalityIncludeType bool + serialization []string + loader px.Loader + initHashExpression interface{} // Expression, *Hash, or Go zero value + attrInfo *attributesInfo + ctor px.Function + goType px.AnnotatedType + isInterface bool + initType *StructType +} + +func (t *objectType) ReflectType(c px.Context) (reflect.Type, bool) { + if t.goType != nil { + return t.goType.Type(), true + } + return c.ImplementationRegistry().TypeToReflected(t) +} + +func ObjectToString(o px.PuppetObject, s px.FormatContext, b io.Writer, g px.RDetect) { + indent := s.Indentation() + if indent.Breaks() { + utils.WriteString(b, "\n") + utils.WriteString(b, indent.Padding()) + } + n := o.PType().Name() + ih := o.InitHash().(*Hash) + if n == `` { + // Anonymous objects can't be written in constructor call form. They are instead + // written as a Hash + ih.ToString(b, s, g) + } else { + utils.WriteString(b, n) + ih.ToString2(b, s, px.GetFormat(s.FormatMap(), o.PType()), '(', g) + } +} + +var objectTypeDefault = &objectType{ + annotatable: annotatable{annotations: emptyMap}, + name: `Object`, + hashKey: px.HashKey("\x00tObject"), + parameters: hash.EmptyStringHash, + attributes: hash.EmptyStringHash, + functions: hash.EmptyStringHash, + equalityIncludeType: true} + +func DefaultObjectType() *objectType { + return objectTypeDefault +} + +var objectId = int64(0) + +func AllocObjectType() *objectType { + return &objectType{ + annotatable: annotatable{annotations: emptyMap}, + hashKey: px.HashKey(fmt.Sprintf("\x00tObject%d", atomic.AddInt64(&objectId, 1))), + parameters: hash.EmptyStringHash, + attributes: hash.EmptyStringHash, + functions: hash.EmptyStringHash, + equalityIncludeType: true} +} + +func BuildObjectType(name string, parent px.Type, hashProducer func(px.ObjectType) px.OrderedMap) px.ObjectType { + obj := AllocObjectType() + obj.name = name + obj.parent = parent + obj.initHashExpression = hashProducer(obj) + return obj +} + +func (t *objectType) Initialize(c px.Context, args []px.Value) { + if len(args) == 1 { + if om, ok := args[0].(px.OrderedMap); ok { + t.InitFromHash(c, om) + return + } + } + panic(px.Error(px.Failure, issue.H{`message`: `internal error when creating an Object data type`})) +} + +func (t *objectType) Accept(v px.Visitor, g px.Guard) { + if g == nil { + g = make(px.Guard) + } + if g.Seen(t, nil) { + return + } + v(t) + visitAnnotations(t.annotations, v, g) + if t.parent != nil { + t.parent.Accept(v, g) + } + t.parameters.EachValue(func(p interface{}) { p.(px.AnnotatedMember).Accept(v, g) }) + t.attributes.EachValue(func(a interface{}) { a.(px.AnnotatedMember).Accept(v, g) }) + t.functions.EachValue(func(f interface{}) { f.(px.AnnotatedMember).Accept(v, g) }) +} + +func (t *objectType) AttributesInfo() px.AttributesInfo { + return t.attrInfo +} + +func (t *objectType) Constructor(c px.Context) px.Function { + if t.ctor == nil { + t.createNewFunction(c) + } + return t.ctor +} + +func (t *objectType) Default() px.Type { + return objectTypeDefault +} + +func (t *objectType) EachAttribute(includeParent bool, consumer func(attr px.Attribute)) { + if includeParent && t.parent != nil { + t.resolvedParent().EachAttribute(includeParent, consumer) + } + t.attributes.EachValue(func(a interface{}) { consumer(a.(px.Attribute)) }) +} + +func (t *objectType) EqualityAttributes() *hash.StringHash { + eqa := make([]string, 0, 8) + tp := t + for tp != nil { + if tp.equality != nil { + eqa = append(eqa, tp.equality...) + } + tp = tp.resolvedParent() + } + attrs := hash.NewStringHash(len(eqa)) + for _, an := range eqa { + attrs.Put(an, t.GetAttribute(an)) + } + return attrs +} + +func (t *objectType) Equals(other interface{}, guard px.Guard) bool { + if t == other { + return true + } + ot, ok := other.(*objectType) + if !ok { + return false + } + if t.initHashExpression != nil || ot.initHashExpression != nil { + // Not yet resolved. + return false + } + if t.name != ot.name { + return false + } + if t.equalityIncludeType != ot.equalityIncludeType { + return false + } + + pa := t.resolvedParent() + pb := ot.resolvedParent() + if pa == nil { + if pb != nil { + return false + } + } else { + if pb == nil { + return false + } + if !pa.Equals(pb, guard) { + return false + } + } + if guard == nil { + guard = make(px.Guard) + } + if guard.Seen(t, ot) { + return true + } + return t.attributes.Equals(ot.attributes, guard) && + t.functions.Equals(ot.functions, guard) && + t.parameters.Equals(ot.parameters, guard) && + px.Equals(t.equality, ot.equality, guard) && + px.Equals(t.serialization, ot.serialization, guard) +} + +func (t *objectType) FromReflectedValue(c px.Context, src reflect.Value) px.PuppetObject { + if t.goType != nil { + return NewReflectedValue(t, src).(px.PuppetObject) + } + if src.Kind() == reflect.Ptr { + src = src.Elem() + } + entries := t.appendAttributeValues(c, make([]*HashEntry, 0), &src) + return px.New(c, t, WrapHash(entries)).(px.PuppetObject) +} + +func (t *objectType) Get(key string) (value px.Value, ok bool) { + if key == `_pcore_init_hash` { + return t.InitHash(), true + } + return nil, false +} + +func (t *objectType) GetAttribute(name string) px.Attribute { + a, _ := t.attributes.Get2(name, func() interface{} { + p := t.resolvedParent() + if p != nil { + return p.GetAttribute(name) + } + return nil + }).(px.Attribute) + return a +} + +func (t *objectType) GetFunction(name string) px.Function { + f, _ := t.functions.Get2(name, func() interface{} { + p := t.resolvedParent() + if p != nil { + return p.GetFunction(name) + } + return nil + }).(px.Function) + return f +} + +func (t *objectType) GetValue(key string, o px.Value) (value px.Value, ok bool) { + if pu, ok := o.(px.ReadableObject); ok { + return pu.Get(key) + } + return nil, false +} + +func (t *objectType) GoType() reflect.Type { + if t.goType != nil { + return t.goType.Type() + } + return nil +} + +func (t *objectType) HasHashConstructor() bool { + return t.creators == nil || len(t.creators) == 2 +} + +func (t *objectType) parseAttributeType(c px.Context, receiverType, receiver string, typeString px.StringValue) px.Type { + defer func() { + if r := recover(); r != nil { + if err, ok := r.(error); ok { + var label string + if receiverType == `` { + label = fmt.Sprintf(`%s.%s`, t.Label(), receiver) + } else { + label = fmt.Sprintf(`%s %s[%s]`, receiverType, t.Label(), receiver) + } + panic(px.Error(px.BadTypeString, + issue.H{ + `string`: typeString, + `label`: label, + `detail`: err.Error()})) + } + panic(r) + } + }() + return c.ParseType(typeString.String()) +} + +func (t *objectType) InitFromHash(c px.Context, initHash px.OrderedMap) { + px.AssertInstance(`object initializer`, TypeObjectInitHash, initHash) + t.parameters = hash.EmptyStringHash + t.attributes = hash.EmptyStringHash + t.functions = hash.EmptyStringHash + t.name = stringArg(initHash, keyName, t.name) + + if t.parent == nil { + if pt, ok := initHash.Get4(keyParent); ok { + switch pt := pt.(type) { + case stringValue: + t.parent = t.parseAttributeType(c, ``, `parent`, pt) + case px.ResolvableType: + t.parent = pt.Resolve(c) + default: + t.parent = pt.(px.Type) + } + } + } + + parentMembers := hash.EmptyStringHash + parentTypeParams := hash.EmptyStringHash + var parentObjectType *objectType + + if t.parent != nil { + t.checkSelfRecursion(c, t) + parentObjectType = t.resolvedParent() + parentMembers = parentObjectType.members(true) + parentTypeParams = parentObjectType.typeParameters(true) + } + + typeParameters := hashArg(initHash, keyTypeParameters) + if !typeParameters.IsEmpty() { + parameters := hash.NewStringHash(typeParameters.Len()) + typeParameters.EachPair(func(k, v px.Value) { + key := k.String() + var paramType px.Type + var paramValue px.Value + if ph, ok := v.(*Hash); ok { + px.AssertInstance( + func() string { return fmt.Sprintf(`type_parameter %s[%s]`, t.Label(), key) }, + TypeTypeParameter, ph) + paramType = typeArg(ph, keyType, DefaultTypeType()) + paramValue = ph.Get5(keyValue, nil) + } else { + if tn, ok := v.(stringValue); ok { + // Type name. Load the type. + paramType = t.parseAttributeType(c, `type_parameter`, key, tn) + } else { + paramType = px.AssertInstance( + func() string { return fmt.Sprintf(`type_parameter %s[%s]`, t.Label(), key) }, + DefaultTypeType(), v).(px.Type) + } + paramValue = nil + } + if _, ok := paramType.(*OptionalType); !ok { + paramType = NewOptionalType(paramType) + } + param := newTypeParameter(c, key, t, WrapStringToInterfaceMap(c, issue.H{ + keyType: paramType, + keyValue: paramValue})) + assertOverride(param, parentTypeParams) + parameters.Put(key, param) + }) + parameters.Freeze() + t.parameters = parameters + } + + constants := hashArg(initHash, keyConstants) + attributes := hashArg(initHash, keyAttributes) + attrSpecs := hash.NewStringHash(constants.Len() + attributes.Len()) + attributes.EachPair(func(k, v px.Value) { + attrSpecs.Put(k.String(), v) + }) + + if !constants.IsEmpty() { + constants.EachPair(func(k, v px.Value) { + key := k.String() + if attrSpecs.Includes(key) { + panic(px.Error(px.BothConstantAndAttribute, issue.H{`label`: t.Label(), `key`: key})) + } + value := v.(px.Value) + attrSpec := issue.H{ + keyType: px.Generalize(value.PType()), + keyValue: value, + keyKind: constant} + attrSpec[keyOverride] = parentMembers.Includes(key) + attrSpecs.Put(key, WrapStringToInterfaceMap(c, attrSpec)) + }) + } + + if !attrSpecs.IsEmpty() { + ah := hash.NewStringHash(attrSpecs.Len()) + attrSpecs.EachPair(func(key string, ifv interface{}) { + value := ifv.(px.Value) + attrSpec, ok := value.(*Hash) + if !ok { + var attrType px.Type + if tn, ok := value.(stringValue); ok { + // Type name. Load the type. + attrType = t.parseAttributeType(c, `attribute`, key, tn) + } else { + attrType = px.AssertInstance( + func() string { return fmt.Sprintf(`attribute %s[%s]`, t.Label(), key) }, + DefaultTypeType(), value).(px.Type) + } + h := issue.H{keyType: attrType} + if _, ok = attrType.(*OptionalType); ok { + h[keyValue] = px.Undef + } + attrSpec = WrapStringToInterfaceMap(c, h) + } + attr := newAttribute(c, key, t, attrSpec) + assertOverride(attr, parentMembers) + ah.Put(key, attr) + }) + ah.Freeze() + t.attributes = ah + } + isInterface := t.attributes.IsEmpty() && (parentObjectType == nil || parentObjectType.isInterface) + + if t.goType != nil && t.attributes.IsEmpty() { + if pt, ok := PrimitivePType(t.goType.Type()); ok { + t.isInterface = false + + // Create the special attribute that holds the primitive value that is + // reflectable to/from the the go type + attrs := make([]*HashEntry, 2) + attrs[0] = WrapHashEntry2(keyType, pt) + attrs[1] = WrapHashEntry2(KeyGoName, stringValue(keyValue)) + ah := hash.NewStringHash(1) + ah.Put(keyValue, newAttribute(c, keyValue, t, WrapHash(attrs))) + ah.Freeze() + t.attributes = ah + } + } + + funcSpecs := hashArg(initHash, keyFunctions) + if funcSpecs.IsEmpty() { + if isInterface && parentObjectType == nil { + isInterface = false + } + } else { + functions := hash.NewStringHash(funcSpecs.Len()) + funcSpecs.EachPair(func(key, value px.Value) { + if attributes.IncludesKey(key) { + panic(px.Error(px.MemberNameConflict, issue.H{`label`: fmt.Sprintf(`function %s[%s]`, t.Label(), key)})) + } + funcSpec, ok := value.(*Hash) + if !ok { + var funcType px.Type + if tn, ok := value.(stringValue); ok { + // Type name. Load the type. + funcType = t.parseAttributeType(c, `function`, key.String(), tn) + } else { + funcType = px.AssertInstance( + func() string { return fmt.Sprintf(`function %s[%s]`, t.Label(), key) }, + typeFunctionType, value).(px.Type) + } + funcSpec = WrapStringToInterfaceMap(c, issue.H{keyType: funcType}) + } + fnc := newFunction(c, key.String(), t, funcSpec) + assertOverride(fnc, parentMembers) + functions.Put(key.String(), fnc) + }) + functions.Freeze() + t.functions = functions + } + t.equalityIncludeType = boolArg(initHash, keyEqualityIncludeType, true) + + var equality []string + eq := initHash.Get5(keyEquality, nil) + if es, ok := eq.(stringValue); ok { + equality = []string{string(es)} + } else if ea, ok := eq.(*Array); ok { + equality = make([]string, ea.Len()) + } else { + equality = nil + } + for _, attrName := range equality { + mbr := t.attributes.Get2(attrName, func() interface{} { + return t.functions.Get2(attrName, func() interface{} { + return parentMembers.Get(attrName, nil) + }) + }) + attr, ok := mbr.(px.Attribute) + + if !ok { + if mbr == nil { + panic(px.Error(px.EqualityAttributeNotFound, issue.H{`label`: t.Label(), `attribute`: attrName})) + } + panic(px.Error(px.EqualityNotAttribute, issue.H{`label`: t.Label(), `member`: mbr.(px.AnnotatedMember).Label()})) + } + if attr.Kind() == constant { + panic(px.Error(px.EqualityOnConstant, issue.H{`label`: t.Label(), `attribute`: mbr.(px.AnnotatedMember).Label()})) + } + // Assert that attribute is not already include by parent equality + if ok && parentObjectType != nil && parentObjectType.EqualityAttributes().Includes(attrName) { + includingParent := t.findEqualityDefiner(attrName) + panic(px.Error(px.EqualityRedefined, issue.H{`label`: t.Label(), `attribute`: attr.Label(), `including_parent`: includingParent})) + } + } + t.equality = equality + + se, ok := initHash.Get5(keySerialization, nil).(*Array) + if ok { + serialization := make([]string, se.Len()) + var optFound px.Attribute + se.EachWithIndex(func(elem px.Value, i int) { + attrName := elem.String() + mbr := t.attributes.Get2(attrName, func() interface{} { + return t.functions.Get2(attrName, func() interface{} { + return parentMembers.Get(attrName, nil) + }) + }) + attr, ok := mbr.(px.Attribute) + + if !ok { + if mbr == nil { + panic(px.Error(px.SerializationAttributeNotFound, issue.H{`label`: t.Label(), `attribute`: attrName})) + } + panic(px.Error(px.SerializationNotAttribute, issue.H{`label`: t.Label(), `member`: mbr.(px.AnnotatedMember).Label()})) + } + if attr.Kind() == constant || attr.Kind() == derived { + panic(px.Error(px.SerializationBadKind, issue.H{`label`: t.Label(), `kind`: attr.Kind(), `attribute`: attr.Label()})) + } + if attr.Kind() == givenOrDerived || attr.HasValue() { + optFound = attr + } else if optFound != nil { + panic(px.Error(px.SerializationRequiredAfterOptional, issue.H{`label`: t.Label(), `required`: attr.Label(), `optional`: optFound.Label()})) + } + serialization[i] = attrName + }) + t.serialization = serialization + } + + t.isInterface = isInterface + t.attrInfo = t.createAttributesInfo() + t.annotatable.initialize(initHash.(*Hash)) + t.loader = c.Loader() +} + +func (t *objectType) Implements(ifd px.ObjectType, g px.Guard) bool { + if !ifd.IsInterface() { + return false + } + + for _, f := range ifd.Functions(true) { + m, ok := t.Member(f.Name()) + if !ok { + return false + } + mf, ok := m.(px.ObjFunc) + if !ok { + return false + } + if !f.Type().Equals(mf.Type(), g) { + return false + } + } + return true +} + +func (t *objectType) InitHash() px.OrderedMap { + return WrapStringPValue(t.initHash(true)) +} + +func (t *objectType) IsInterface() bool { + return t.isInterface +} + +func (t *objectType) IsAssignable(o px.Type, g px.Guard) bool { + var ot *objectType + switch o := o.(type) { + case *objectType: + ot = o + case *objectTypeExtension: + ot = o.baseType + default: + return false + } + + if t == DefaultObjectType() { + return true + } + + if t.isInterface { + return ot.Implements(t, g) + } + + if t == DefaultObjectType() || t.Equals(ot, g) { + return true + } + if ot.parent != nil { + return t.IsAssignable(ot.parent, g) + } + return false +} + +func (t *objectType) IsInstance(o px.Value, g px.Guard) bool { + if po, ok := o.(px.Type); ok { + return isAssignable(t, po.MetaType()) + } + return isAssignable(t, o.PType()) +} + +func (t *objectType) IsParameterized() bool { + if !t.parameters.IsEmpty() { + return true + } + p := t.resolvedParent() + if p != nil { + return p.IsParameterized() + } + return false +} + +func (t *objectType) IsMetaType() bool { + return px.IsAssignable(AnyMetaType, t) +} + +func (t *objectType) Label() string { + if t.name == `` { + return `Object` + } + return t.name +} + +func (t *objectType) Member(name string) (px.CallableMember, bool) { + mbr := t.attributes.Get2(name, func() interface{} { + return t.functions.Get2(name, func() interface{} { + if t.parent == nil { + return nil + } + pm, _ := t.resolvedParent().Member(name) + return pm + }) + }) + if mbr == nil { + return nil, false + } + return mbr.(px.CallableMember), true +} + +func (t *objectType) MetaType() px.ObjectType { + return ObjectMetaType +} + +func (t *objectType) Name() string { + return t.name +} + +func (t *objectType) Parameters() []px.Value { + return t.Parameters2(true) +} + +func (t *objectType) Parameters2(includeName bool) []px.Value { + if t == objectTypeDefault { + return px.EmptyValues + } + return []px.Value{WrapStringPValue(t.initHash(includeName))} +} + +func (t *objectType) Parent() px.Type { + return t.parent +} + +func (t *objectType) Resolve(c px.Context) px.Type { + if t.initHashExpression == nil { + return t + } + + ihe := t.initHashExpression + t.initHashExpression = nil + + if prt, ok := t.parent.(px.ResolvableType); ok { + t.parent = resolveTypeRefs(c, prt).(px.Type) + } + + var initHash px.OrderedMap + switch ihe := ihe.(type) { + case px.OrderedMap: + initHash = resolveTypeRefs(c, ihe).(px.OrderedMap) + case *taggedType: + t.goType = ihe + initHash = c.Reflector().InitializerFromTagged(t.name, t.parent, ihe) + c.ImplementationRegistry().RegisterType(t, ihe.Type()) + default: + tg := px.NewTaggedType(reflect.TypeOf(ihe), nil) + t.goType = tg + initHash = c.Reflector().InitializerFromTagged(t.name, t.parent, tg) + c.ImplementationRegistry().RegisterType(t, tg.Type()) + } + t.InitFromHash(c, initHash) + return t +} + +func (t *objectType) CanSerializeAsString() bool { + return t == objectTypeDefault +} + +func (t *objectType) SerializationString() string { + return t.String() +} + +func (t *objectType) String() string { + return px.ToString(t) +} + +func (t *objectType) ToKey() px.HashKey { + return t.hashKey +} + +func (t *objectType) ToReflectedValue(c px.Context, src px.PuppetObject, dest reflect.Value) { + dt := dest.Type() + rf := c.Reflector() + fs := rf.Fields(dt) + for i, field := range fs { + f := dest.Field(i) + if field.Anonymous && i == 0 && t.parent != nil { + t.resolvedParent().ToReflectedValue(c, src, f) + continue + } + an := rf.FieldName(&field) + if av, ok := src.Get(an); ok { + rf.ReflectTo(av, f) + continue + } + panic(px.Error(px.AttributeNotFound, issue.H{`name`: an})) + } +} + +func (t *objectType) ToString(b io.Writer, s px.FormatContext, g px.RDetect) { + f := px.GetFormat(s.FormatMap(), t.PType()) + switch f.FormatChar() { + case 's', 'p': + quoted := f.IsAlt() && f.FormatChar() == 's' + if quoted || f.HasStringFlags() { + bld := bytes.NewBufferString(``) + t.basicTypeToString(bld, f, s, g) + f.ApplyStringFlags(b, bld.String(), quoted) + } else { + t.basicTypeToString(b, f, s, g) + } + default: + panic(s.UnsupportedFormat(t.PType(), `sp`, f)) + } +} + +func (t *objectType) basicTypeToString(b io.Writer, f px.Format, s px.FormatContext, g px.RDetect) { + + if t.Equals(DefaultObjectType(), nil) { + utils.WriteString(b, `Object`) + return + } + + typeSetParent := false + typeSetName, inTypeSet := s.Property(`typeSet`) + if tp, ok := s.Property(`typeSetParent`); ok && tp == `true` { + s = s.WithProperties(map[string]string{`typeSetParent`: `false`}) + typeSetParent = true + } + + if ex, ok := s.Property(`expanded`); !(ok && ex == `true`) { + name := t.Name() + if name != `` { + if inTypeSet { + name = stripTypeSetName(typeSetName, name) + } + utils.WriteString(b, name) + return + } + } + + // Avoid nested expansions + s = s.WithProperties(map[string]string{`expanded`: `false`}) + + indent1 := s.Indentation() + indent2 := indent1.Increase(f.IsAlt()) + indent3 := indent2.Increase(f.IsAlt()) + padding1 := `` + padding2 := `` + padding3 := `` + if f.IsAlt() { + padding1 = indent1.Padding() + padding2 = indent2.Padding() + padding3 = indent3.Padding() + } + + cf := f.ContainerFormats() + if cf == nil { + cf = DefaultContainerFormats + } + + ctx2 := px.NewFormatContext2(indent2, s.FormatMap(), s.Properties()) + cti2 := px.NewFormatContext2(indent2, cf, s.Properties()) + ctx3 := px.NewFormatContext2(indent3, s.FormatMap(), s.Properties()) + + if inTypeSet { + if t.parent != nil { + utils.WriteString(b, stripTypeSetName(typeSetName, t.parent.Name())) + } + } + if !typeSetParent { + utils.WriteString(b, `Object[`) + } + utils.WriteString(b, `{`) + + first2 := true + ih := t.initHash(!inTypeSet) + for _, key := range ih.Keys() { + if inTypeSet && key == `parent` { + continue + } + + value := ih.Get(key, nil).(px.Value) + if first2 { + first2 = false + } else { + utils.WriteString(b, `,`) + if !f.IsAlt() { + utils.WriteString(b, ` `) + } + } + if f.IsAlt() { + utils.WriteString(b, "\n") + utils.WriteString(b, padding2) + } + utils.WriteString(b, key) + utils.WriteString(b, ` => `) + switch key { + case `attributes`, `functions`: + // The keys should not be quoted in this hash + utils.WriteString(b, `{`) + first3 := true + value.(*Hash).EachPair(func(name, typ px.Value) { + if first3 { + first3 = false + } else { + utils.WriteString(b, `,`) + if !f.IsAlt() { + utils.WriteString(b, ` `) + } + } + if f.IsAlt() { + utils.WriteString(b, "\n") + utils.WriteString(b, padding3) + } + utils.PuppetQuote(b, name.String()) + utils.WriteString(b, ` => `) + typ.ToString(b, ctx3, g) + }) + if f.IsAlt() { + utils.WriteString(b, "\n") + utils.WriteString(b, padding2) + } + utils.WriteString(b, "}") + default: + cx := cti2 + if isContainer(value, s) { + cx = ctx2 + } + value.ToString(b, cx, g) + } + } + if f.IsAlt() { + utils.WriteString(b, "\n") + utils.WriteString(b, padding1) + } + utils.WriteString(b, "}") + if !typeSetParent { + utils.WriteString(b, "]") + } +} + +func (t *objectType) PType() px.Type { + return &TypeType{t} +} + +func (t *objectType) appendAttributeValues(c px.Context, entries []*HashEntry, src *reflect.Value) []*HashEntry { + dt := src.Type() + fs := Fields(dt) + + for i, field := range fs { + sf := src.Field(i) + if sf.Kind() == reflect.Ptr { + sf = sf.Elem() + } + if field.Anonymous && i == 0 && t.parent != nil { + entries = t.resolvedParent().appendAttributeValues(c, entries, &sf) + } else { + if sf.IsValid() { + switch sf.Kind() { + case reflect.Slice, reflect.Map, reflect.Interface, reflect.Ptr: + if sf.IsNil() { + continue + } + } + entries = append(entries, WrapHashEntry2(FieldName(&field), wrap(c, sf))) + } + } + } + return entries +} + +func (t *objectType) checkSelfRecursion(c px.Context, originator *objectType) { + if t.parent != nil { + op := t.resolvedParent() + if op.Equals(originator, nil) { + panic(px.Error(px.ObjectInheritsSelf, issue.H{`label`: originator.Label()})) + } + op.checkSelfRecursion(c, originator) + } +} + +func (t *objectType) collectAttributes(includeParent bool, collector *hash.StringHash) { + if includeParent && t.parent != nil { + t.resolvedParent().collectAttributes(true, collector) + } + collector.PutAll(t.attributes) +} + +func (t *objectType) Functions(includeParent bool) []px.ObjFunc { + collector := hash.NewStringHash(7) + t.collectFunctions(includeParent, collector) + vs := collector.Values() + fs := make([]px.ObjFunc, len(vs)) + for i, v := range vs { + fs[i] = v.(px.ObjFunc) + } + return fs +} + +func (t *objectType) collectFunctions(includeParent bool, collector *hash.StringHash) { + if includeParent && t.parent != nil { + t.resolvedParent().collectFunctions(true, collector) + } + collector.PutAll(t.functions) +} + +func (t *objectType) collectMembers(includeParent bool, collector *hash.StringHash) { + if includeParent && t.parent != nil { + t.resolvedParent().collectMembers(true, collector) + } + collector.PutAll(t.attributes) + collector.PutAll(t.functions) +} + +func (t *objectType) collectParameters(includeParent bool, collector *hash.StringHash) { + if includeParent && t.parent != nil { + t.resolvedParent().collectParameters(true, collector) + } + collector.PutAll(t.parameters) +} + +func (t *objectType) createAttributesInfo() *attributesInfo { + attrs := make([]px.Attribute, 0) + nonOptSize := 0 + if t.serialization == nil { + optAttrs := make([]px.Attribute, 0) + t.EachAttribute(true, func(attr px.Attribute) { + switch attr.Kind() { + case constant, derived: + case givenOrDerived: + optAttrs = append(optAttrs, attr) + default: + if attr.HasValue() { + optAttrs = append(optAttrs, attr) + } else { + attrs = append(attrs, attr) + } + } + }) + nonOptSize = len(attrs) + attrs = append(attrs, optAttrs...) + } else { + atMap := hash.NewStringHash(15) + t.collectAttributes(true, atMap) + for _, key := range t.serialization { + attr := atMap.Get(key, nil).(px.Attribute) + if attr.HasValue() { + nonOptSize++ + } + attrs = append(attrs, attr) + } + } + return newAttributesInfo(attrs, nonOptSize, t.EqualityAttributes().Keys()) +} + +func (t *objectType) createInitType() *StructType { + if t.initType == nil { + ai := t.attrInfo + if ai == nil { + // Default Object. It has no attributes + t.initType = NewStructType([]*StructElement{}) + return t.initType + } + attrs := ai.attributes + elements := make([]*StructElement, len(attrs)) + t.initType = NewStructType(elements) + for i, attr := range attrs { + at := typeAndInit(attr.Type()) + switch attr.Kind() { + case constant, derived: + case givenOrDerived: + elements[i] = NewStructElement(newOptionalType3(attr.Name()), at) + default: + var key px.Type + if attr.HasValue() { + key = newOptionalType3(attr.Name()) + } else { + key = NewStringType(nil, attr.Name()) + } + elements[i] = NewStructElement(key, at) + } + } + } + return t.initType +} + +func (t *objectType) createNewFunction(c px.Context) { + pi := t.AttributesInfo() + + var functions []px.DispatchFunction + if t.creators != nil { + functions = t.creators + if functions[0] == nil { + // Specific instruction not to create a constructor + return + } + } else { + if t.name != `` { + var dl px.DefiningLoader + if t.loader == nil { + dl = c.DefiningLoader() + } else { + dl = t.loader.(px.DefiningLoader) + } + tn := px.NewTypedName(px.NsAllocator, t.name) + le := dl.LoadEntry(c, tn) + if le == nil || le.Value() == nil { + dl.SetEntry(tn, px.NewLoaderEntry(px.MakeGoAllocator(func(ctx px.Context, args []px.Value) px.Value { + return AllocObjectValue(t) + }), nil)) + } + } + + functions = []px.DispatchFunction{ + // Positional argument creator + func(c px.Context, args []px.Value) px.Value { + return NewObjectValue(c, t, args) + }, + // Named argument creator + func(c px.Context, args []px.Value) px.Value { + ai := t.AttributesInfo() + if oh, ok := args[0].(*Hash); ok { + el := make([]*HashEntry, 0, oh.Len()) + for _, ca := range ai.Attributes() { + if e, ok := oh.GetEntry(ca.Name()); ok { + el = append(el, WrapHashEntry(e.Key(), coerceTo(c, []string{ca.Label()}, ca.Type(), e.Value()))) + } + } + return newObjectValue2(c, t, oh.Merge(WrapHash(el)).(*Hash)) + } + panic(px.MismatchError(t.Label(), t, args[0])) + }} + } + + paCreator := func(d px.Dispatch) { + for i, attr := range pi.Attributes() { + at := attr.Type() + if ot, ok := at.(px.ObjectType); ok { + at = typeAndInit(ot) + } + switch attr.Kind() { + case constant, derived: + case givenOrDerived: + d.OptionalParam2(at) + default: + if i >= pi.RequiredCount() { + d.OptionalParam2(at) + } else { + d.Param2(at) + } + } + } + d.Function(functions[0]) + } + + var creators []px.DispatchCreator + if len(functions) > 1 { + // A named argument constructor exists. Place it first. + creators = []px.DispatchCreator{func(d px.Dispatch) { + d.Param2(t.createInitType()) + d.Function(functions[1]) + }, paCreator} + } else { + creators = []px.DispatchCreator{paCreator} + } + + t.ctor = px.MakeGoConstructor(t.name, creators...).Resolve(c) +} + +func typeAndInit(t px.Type) px.Type { + switch t := t.(type) { + case *objectType: + return NewVariantType(t, t.createInitType()) + case *HashType: + return NewHashType(typeAndInit(t.keyType), typeAndInit(t.valueType), t.size) + case *ArrayType: + return NewArrayType(typeAndInit(t.ElementType()), t.size) + case *StructType: + ses := make([]*StructElement, len(t.elements)) + for i, se := range t.elements { + ses[i] = &StructElement{key: se.key, name: se.name, value: typeAndInit(se.value)} + } + return NewStructType(ses) + case *TupleType: + tts := make([]px.Type, len(t.types)) + for i, tt := range t.types { + tts[i] = typeAndInit(tt) + } + return &TupleType{types: tts, size: t.size, givenOrActualSize: t.givenOrActualSize} + case *VariantType: + tts := make([]px.Type, len(t.types)) + for i, tt := range t.types { + tts[i] = typeAndInit(tt) + } + return &VariantType{types: tts} + case *OptionalType: + return &OptionalType{typ: typeAndInit(t.typ)} + case *NotUndefType: + return &OptionalType{typ: typeAndInit(t.typ)} + default: + return t + } +} + +func (t *objectType) findEqualityDefiner(attrName string) *objectType { + tp := t + for tp != nil { + p := tp.resolvedParent() + if p == nil || !p.EqualityAttributes().Includes(attrName) { + return tp + } + tp = p + } + return nil +} + +func (t *objectType) initHash(includeName bool) *hash.StringHash { + h := t.annotatable.initHash() + if includeName && t.name != `` && t.name != `Object` { + h.Put(keyName, stringValue(t.name)) + } + if t.parent != nil { + h.Put(keyParent, t.parent) + } + if !t.parameters.IsEmpty() { + h.Put(keyTypeParameters, compressedMembersHash(t.parameters)) + } + if !t.attributes.IsEmpty() { + // Divide attributes into constants and others + constants := make([]*HashEntry, 0) + others := hash.NewStringHash(5) + t.attributes.EachPair(func(key string, value interface{}) { + a := value.(px.Attribute) + if a.Kind() == constant && px.Equals(a.Type(), px.Generalize(a.Value().PType()), nil) { + constants = append(constants, WrapHashEntry2(key, a.Value())) + } else { + others.Put(key, a) + } + if !others.IsEmpty() { + h.Put(keyAttributes, compressedMembersHash(others)) + } + if len(constants) > 0 { + h.Put(keyConstants, WrapHash(constants)) + } + }) + } + if !t.functions.IsEmpty() { + h.Put(keyFunctions, compressedMembersHash(t.functions)) + } + if t.equality != nil { + ev := make([]px.Value, len(t.equality)) + for i, e := range t.equality { + ev[i] = stringValue(e) + } + h.Put(keyEquality, WrapValues(ev)) + } + + if !t.equalityIncludeType { + h.Put(keyEqualityIncludeType, BooleanFalse) + } + + if t.serialization != nil { + sv := make([]px.Value, len(t.serialization)) + for i, s := range t.serialization { + sv[i] = stringValue(s) + } + h.Put(keySerialization, WrapValues(sv)) + } + return h +} + +func (t *objectType) members(includeParent bool) *hash.StringHash { + collector := hash.NewStringHash(7) + t.collectMembers(includeParent, collector) + return collector +} + +func (t *objectType) resolvedParent() *objectType { + tp := t.parent + for { + switch at := tp.(type) { + case nil: + return nil + case *objectType: + return at + case *TypeAliasType: + tp = at.resolvedType + default: + panic(px.Error(px.IllegalObjectInheritance, issue.H{`label`: t.Label(), `type`: tp.PType().String()})) + } + } +} + +// setCreators takes one or two arguments. The first function is for positional arguments, the second +// for named arguments (expects exactly one argument which is a Hash. +func (t *objectType) setCreators(creators ...px.DispatchFunction) { + t.creators = creators +} + +func (t *objectType) typeParameters(includeParent bool) *hash.StringHash { + c := hash.NewStringHash(5) + t.collectParameters(includeParent, c) + return c +} + +func compressedMembersHash(mh *hash.StringHash) *Hash { + he := make([]*HashEntry, 0, mh.Len()) + mh.EachPair(func(key string, value interface{}) { + fh := value.(px.AnnotatedMember).InitHash() + if fh.Len() == 1 { + tp := fh.Get5(keyType, nil) + if tp != nil { + he = append(he, WrapHashEntry2(key, tp)) + return + } + } + he = append(he, WrapHashEntry2(key, fh)) + }) + return WrapHash(he) +} + +func resolveTypeRefs(c px.Context, v interface{}) px.Value { + switch v := v.(type) { + case *Hash: + he := make([]*HashEntry, v.Len()) + i := 0 + v.EachPair(func(key, value px.Value) { + he[i] = WrapHashEntry( + resolveTypeRefs(c, key), resolveTypeRefs(c, value)) + i++ + }) + return WrapHash(he) + case *Array: + ae := make([]px.Value, v.Len()) + i := 0 + v.Each(func(value px.Value) { + ae[i] = resolveTypeRefs(c, value) + i++ + }) + return WrapValues(ae) + case px.ResolvableType: + return v.Resolve(c) + default: + return v.(px.Value) + } +} + +func newObjectType(name, typeDecl string, creators ...px.DispatchFunction) px.ObjectType { + ta, err := Parse(typeDecl) + if err != nil { + _, fileName, fileLine, _ := runtime.Caller(1) + panic(convertReported(err, fileName, fileLine)) + } + if h, ok := ta.(*Hash); ok { + // "type = {}" + return MakeObjectType(name, nil, h, true, creators...) + } + if dt, ok := ta.(*DeferredType); ok { + ps := dt.Parameters() + if len(ps) == 1 { + if h, ok := ps[0].(*Hash); ok { + var p px.Type + if pn := dt.Name(); pn != `TypeSet` && pn != `Object` { + p = NewTypeReferenceType(pn) + } + return MakeObjectType(name, p, h, true, creators...) + } + } + } + panic(px.Error(px.NoDefinition, issue.H{`source`: ``, `type`: px.NsType, `name`: name})) +} + +func newObjectType2(c px.Context, args ...px.Value) *objectType { + argc := len(args) + switch argc { + case 0: + return DefaultObjectType() + case 1: + arg := args[0] + if initHash, ok := arg.(*Hash); ok { + if initHash.IsEmpty() { + return DefaultObjectType() + } + obj := AllocObjectType() + obj.InitFromHash(c, initHash) + obj.loader = c.Loader() + return obj + } + panic(illegalArgumentType(`Object[]`, 0, `Hash[String,Any]`, arg.PType())) + default: + panic(illegalArgumentCount(`Object[]`, `1`, argc)) + } +} + +// MakeObjectType creates a new object type and optionally registers it as a resolvable to be picked up for resolution +// on the next call to px.ResolveResolvables if the given register flag is true. This flag should only be set to true +// when the call stems from a static init() function where no context is available. +func MakeObjectType(name string, parent px.Type, initHash px.OrderedMap, register bool, creators ...px.DispatchFunction) *objectType { + if name == `` { + name = initHash.Get5(`name`, emptyString).String() + } + obj := AllocObjectType() + obj.name = name + obj.initHashExpression = initHash + obj.parent = parent + obj.setCreators(creators...) + if register { + registerResolvableType(obj) + } + return obj +} + +func newGoObjectType(name string, rType reflect.Type, typeDecl string, creators ...px.DispatchFunction) px.ObjectType { + t := newObjectType(name, typeDecl, creators...) + registerMapping(t, rType) + return t +} diff --git a/vendor/github.com/lyraproj/pcore/types/objecttypeextension.go b/vendor/github.com/lyraproj/pcore/types/objecttypeextension.go new file mode 100644 index 0000000..ccd78d3 --- /dev/null +++ b/vendor/github.com/lyraproj/pcore/types/objecttypeextension.go @@ -0,0 +1,258 @@ +package types + +import ( + "io" + + "reflect" + + "github.com/lyraproj/issue/issue" + "github.com/lyraproj/pcore/hash" + "github.com/lyraproj/pcore/px" +) + +type objectTypeExtension struct { + baseType *objectType + parameters *hash.StringHash +} + +var ObjectTypeExtensionMetaType px.ObjectType + +func init() { + ObjectTypeExtensionMetaType = newObjectType(`Pcore::ObjectTypeExtensionType`, + `Pcore::AnyType { + attributes => { + base_type => Type, + init_parameters => Array + } + }`) +} + +func NewObjectTypeExtension(c px.Context, baseType px.ObjectType, initParameters []px.Value) *objectTypeExtension { + o := &objectTypeExtension{} + o.initialize(c, baseType.(*objectType), initParameters) + return o +} + +func (te *objectTypeExtension) Accept(v px.Visitor, g px.Guard) { + v(te) + te.baseType.Accept(v, g) +} + +func (te *objectTypeExtension) Annotations(c px.Context) px.OrderedMap { + return te.baseType.Annotations(c) +} + +func (te *objectTypeExtension) Constructor(c px.Context) px.Function { + return te.baseType.Constructor(c) +} + +func (te *objectTypeExtension) Default() px.Type { + return te.baseType.Default() +} + +func (te *objectTypeExtension) Equals(other interface{}, g px.Guard) bool { + op, ok := other.(*objectTypeExtension) + return ok && te.baseType.Equals(op.baseType, g) && te.parameters.Equals(op.parameters, g) +} + +func (te *objectTypeExtension) Functions(includeParent bool) []px.ObjFunc { + return te.baseType.Functions(includeParent) +} + +func (te *objectTypeExtension) Generalize() px.Type { + return te.baseType +} + +func (te *objectTypeExtension) Get(key string) (px.Value, bool) { + return te.baseType.Get(key) +} + +func (te *objectTypeExtension) GoType() reflect.Type { + return te.baseType.GoType() +} + +func (te *objectTypeExtension) HasHashConstructor() bool { + return te.baseType.HasHashConstructor() +} + +func (te *objectTypeExtension) Implements(ifd px.ObjectType, g px.Guard) bool { + return te.baseType.Implements(ifd, g) +} + +func (te *objectTypeExtension) IsInterface() bool { + return false +} + +func (te *objectTypeExtension) IsAssignable(t px.Type, g px.Guard) bool { + if ote, ok := t.(*objectTypeExtension); ok { + return te.baseType.IsAssignable(ote.baseType, g) && te.testAssignable(ote.parameters, g) + } + if ot, ok := t.(*objectType); ok { + return te.baseType.IsAssignable(ot, g) && te.testAssignable(hash.EmptyStringHash, g) + } + return false +} + +func (te *objectTypeExtension) IsMetaType() bool { + return te.baseType.IsMetaType() +} + +func (te *objectTypeExtension) IsParameterized() bool { + return true +} + +func (te *objectTypeExtension) IsInstance(v px.Value, g px.Guard) bool { + return te.baseType.IsInstance(v, g) && te.testInstance(v, g) +} + +func (te *objectTypeExtension) Member(name string) (px.CallableMember, bool) { + return te.baseType.Member(name) +} + +func (te *objectTypeExtension) MetaType() px.ObjectType { + return ObjectTypeExtensionMetaType +} + +func (te *objectTypeExtension) Name() string { + return te.baseType.Name() +} + +func (te *objectTypeExtension) Parameters() []px.Value { + pts := te.baseType.typeParameters(true) + n := pts.Len() + if n > 2 { + return []px.Value{WrapStringPValue(te.parameters)} + } + params := make([]px.Value, 0, n) + top := 0 + idx := 0 + pts.EachKey(func(k string) { + v, ok := te.parameters.Get3(k) + if ok { + top = idx + 1 + } else { + v = WrapDefault() + } + params = append(params, v.(px.Value)) + idx++ + }) + return params[:top] +} + +func (te *objectTypeExtension) FromReflectedValue(c px.Context, src reflect.Value) px.PuppetObject { + return te.baseType.FromReflectedValue(c, src) +} + +func (te *objectTypeExtension) Parent() px.Type { + return te.baseType.Parent() +} + +func (te *objectTypeExtension) ToReflectedValue(c px.Context, src px.PuppetObject, dest reflect.Value) { + te.baseType.ToReflectedValue(c, src, dest) +} + +func (te *objectTypeExtension) String() string { + return px.ToString2(te, None) +} + +func (te *objectTypeExtension) ToString(bld io.Writer, format px.FormatContext, g px.RDetect) { + TypeToString(te, bld, format, g) +} + +func (te *objectTypeExtension) PType() px.Type { + return &TypeType{te} +} + +func (te *objectTypeExtension) initialize(c px.Context, baseType *objectType, initParameters []px.Value) { + pts := baseType.typeParameters(true) + pvs := pts.Values() + if pts.IsEmpty() { + panic(px.Error(px.NotParameterizedType, issue.H{`type`: baseType.Label()})) + } + te.baseType = baseType + namedArgs := false + if len(initParameters) == 1 { + _, namedArgs = initParameters[0].(*Hash) + } + + if namedArgs { + namedArgs = pts.Len() >= 1 && !px.IsInstance(pvs[0].(*typeParameter).Type(), initParameters[0]) + } + + checkParam := func(tp *typeParameter, v px.Value) px.Value { + return px.AssertInstance(func() string { return tp.Label() }, tp.Type(), v) + } + + byName := hash.NewStringHash(pts.Len()) + if namedArgs { + h := initParameters[0].(*Hash) + h.EachPair(func(k, pv px.Value) { + pn := k.String() + tp := pts.Get(pn, nil) + if tp == nil { + panic(px.Error(px.MissingTypeParameter, issue.H{`name`: pn, `label`: baseType.Label()})) + } + if !pv.Equals(WrapDefault(), nil) { + byName.Put(pn, checkParam(tp.(*typeParameter), pv)) + } + }) + } else { + for idx, t := range pvs { + if idx < len(initParameters) { + tp := t.(*typeParameter) + pv := initParameters[idx] + if !pv.Equals(WrapDefault(), nil) { + byName.Put(tp.Name(), checkParam(tp, pv)) + } + } + } + } + if byName.IsEmpty() { + panic(px.Error(px.EmptyTypeParameterList, issue.H{`label`: baseType.Label()})) + } + te.parameters = byName +} + +func (te *objectTypeExtension) AttributesInfo() px.AttributesInfo { + return te.baseType.AttributesInfo() +} + +// Checks that the given `paramValues` hash contains all keys present in the `parameters` of +// this instance and that each keyed value is a match for the given parameter. The match is done +// using case expression semantics. +// +// This method is only called when a given type is found to be assignable to the base type of +// this extension. +func (te *objectTypeExtension) testAssignable(paramValues *hash.StringHash, g px.Guard) bool { + // Default implementation performs case expression style matching of all parameter values + // provided that the value exist (this should always be the case, since all defaults have + // been assigned at this point) + return te.parameters.AllPair(func(key string, v1 interface{}) bool { + if v2, ok := paramValues.Get3(key); ok { + a := v2.(px.Value) + b := v1.(px.Value) + if px.PuppetMatch(a, b) { + return true + } + if at, ok := a.(px.Type); ok { + if bt, ok := b.(px.Type); ok { + return px.IsAssignable(bt, at) + } + } + } + return false + }) +} + +// Checks that the given instance `o` has one attribute for each key present in the `parameters` of +// this instance and that each attribute value is a match for the given parameter. The match is done +// using case expression semantics. +// +// This method is only called when the given value is found to be an instance of the base type of +// this extension. +func (te *objectTypeExtension) testInstance(o px.Value, g px.Guard) bool { + return te.parameters.AllPair(func(key string, v1 interface{}) bool { + v2, ok := te.baseType.GetValue(key, o) + return ok && px.PuppetMatch(v2, v1.(px.Value)) + }) +} diff --git a/vendor/github.com/lyraproj/pcore/types/objectvalue.go b/vendor/github.com/lyraproj/pcore/types/objectvalue.go new file mode 100644 index 0000000..7e537be --- /dev/null +++ b/vendor/github.com/lyraproj/pcore/types/objectvalue.go @@ -0,0 +1,484 @@ +package types + +import ( + "fmt" + "io" + "reflect" + "strings" + + "github.com/lyraproj/issue/issue" + "github.com/lyraproj/pcore/px" +) + +type typedObject struct { + typ px.ObjectType +} + +func (o *typedObject) PType() px.Type { + return o.typ +} + +func (o *typedObject) valuesFromHash(c px.Context, hash px.OrderedMap) []px.Value { + typ := o.typ.(*objectType) + va := typ.AttributesInfo().PositionalFromHash(hash) + if len(va) > 0 && typ.IsParameterized() { + params := make([]*HashEntry, 0) + typ.typeParameters(true).EachPair(func(k string, v interface{}) { + if pv, ok := hash.Get4(k); ok && px.IsInstance(v.(*typeParameter).typ, pv) { + params = append(params, WrapHashEntry2(k, pv)) + } + }) + if len(params) > 0 { + o.typ = NewObjectTypeExtension(c, typ, []px.Value{WrapHash(params)}) + } + } + return va +} + +type attributeSlice struct { + typedObject + values []px.Value +} + +func AllocObjectValue(typ px.ObjectType) px.Object { + if typ.IsMetaType() { + return AllocObjectType() + } + if rf := typ.GoType(); rf != nil { + if rf.Kind() == reflect.Ptr && rf.Elem().Kind() == reflect.Struct { + rf = rf.Elem() + } + return &reflectedObject{typedObject{typ}, reflect.New(rf).Elem()} + } + return &attributeSlice{typedObject{typ}, px.EmptyValues} +} + +func NewReflectedValue(typ px.ObjectType, value reflect.Value) px.Object { + if value.Kind() == reflect.Func { + return &reflectedFunc{typedObject{typ}, value} + } + return &reflectedObject{typedObject{typ}, value} +} + +func NewObjectValue(c px.Context, typ px.ObjectType, values []px.Value) (ov px.Object) { + ov = AllocObjectValue(typ) + ov.Initialize(c, values) + return ov +} + +func newObjectValue2(c px.Context, typ px.ObjectType, hash *Hash) (ov px.Object) { + ov = AllocObjectValue(typ) + ov.InitFromHash(c, hash) + return ov +} + +func (o *attributeSlice) Reflect(c px.Context) reflect.Value { + ot := o.PType().(px.ReflectedType) + if v, ok := ot.ReflectType(c); ok { + rv := reflect.New(v.Elem()) + o.ReflectTo(c, rv.Elem()) + return rv + } + panic(px.Error(px.UnreflectableValue, issue.H{`type`: o.PType()})) +} + +func (o *attributeSlice) ReflectTo(c px.Context, value reflect.Value) { + o.typ.ToReflectedValue(c, o, value) +} + +func (o *attributeSlice) Initialize(c px.Context, values []px.Value) { + if len(values) > 0 && o.typ.IsParameterized() { + o.InitFromHash(c, makeValueHash(o.typ.AttributesInfo(), values)) + return + } + fillValueSlice(values, o.typ.AttributesInfo().Attributes()) + o.values = values +} + +func (o *attributeSlice) InitFromHash(c px.Context, hash px.OrderedMap) { + o.values = o.valuesFromHash(c, hash) +} + +// Ensure that all entries in the value slice that are nil receive default values from the given attributes +func fillValueSlice(values []px.Value, attrs []px.Attribute) { + for ix, v := range values { + if v == nil { + at := attrs[ix] + if at.Kind() == givenOrDerived { + values[ix] = undef + } else { + if !at.HasValue() { + panic(px.Error(px.MissingRequiredAttribute, issue.H{`label`: at.Label()})) + } + values[ix] = at.Value() + } + } + } +} + +func (o *attributeSlice) Get(key string) (px.Value, bool) { + pi := o.typ.AttributesInfo() + if idx, ok := pi.NameToPos()[key]; ok { + if idx < len(o.values) { + return o.values[idx], ok + } + a := pi.Attributes()[idx] + if a.Kind() == givenOrDerived { + return undef, true + } + return a.Value(), ok + } + return nil, false +} + +func (o *attributeSlice) Call(c px.Context, method px.ObjFunc, args []px.Value, block px.Lambda) (result px.Value, ok bool) { + if v, ok := px.Load(c, NewTypedName(px.NsFunction, strings.ToLower(o.typ.Name())+`::`+method.Name())); ok { + if f, ok := v.(px.Function); ok { + return f.Call(c, block, args...), true + } + } + return nil, false +} + +func (o *attributeSlice) Equals(other interface{}, g px.Guard) bool { + if ov, ok := other.(*attributeSlice); ok { + return o.typ.Equals(ov.typ, g) && px.Equals(o.values, ov.values, g) + } + return false +} + +func (o *attributeSlice) String() string { + return px.ToString(o) +} + +func (o *attributeSlice) ToString(b io.Writer, s px.FormatContext, g px.RDetect) { + ObjectToString(o, s, b, g) +} + +func (o *attributeSlice) InitHash() px.OrderedMap { + return makeValueHash(o.typ.AttributesInfo(), o.values) +} + +// Turn a positional argument list into a hash. The hash will exclude all values +// that are equal to the default value of the corresponding attribute +func makeValueHash(pi px.AttributesInfo, values []px.Value) *Hash { + at := pi.Attributes() + entries := make([]*HashEntry, 0, len(at)) + for i, v := range values { + attr := at[i] + if !(attr.HasValue() && v.Equals(attr.Value(), nil) || attr.Kind() == givenOrDerived && v.Equals(undef, nil)) { + entries = append(entries, WrapHashEntry2(attr.Name(), v)) + } + } + return WrapHash(entries) +} + +type reflectedObject struct { + typedObject + value reflect.Value +} + +func (o *reflectedObject) Call(c px.Context, method px.ObjFunc, args []px.Value, block px.Lambda) (result px.Value, ok bool) { + m, ok := o.value.Type().MethodByName(method.GoName()) + if !ok { + return nil, false + } + + mt := m.Type + rf := c.Reflector() + var vat reflect.Type + + // argc, the number of arguments + the mandatory call receiver + argc := len(args) + 1 + + // number of expected arguments + top := mt.NumIn() + last := top - 1 + + if mt.IsVariadic() { + if argc < last { + // Must be at least expected number of arguments minus one (variadic can have a zero count) + panic(fmt.Errorf("argument count error. Expected at least %d, got %d", last, argc)) + } + + // Slice big enough to hold all variadics + vat = mt.In(last).Elem() + } else { + if top != argc { + panic(fmt.Errorf("argument count error. Expected %d, got %d", top, argc)) + } + } + + rfArgs := make([]reflect.Value, argc) + rfArgs[0] = o.value + + for i, arg := range args { + pn := i + 1 + var tp reflect.Type + if pn >= last && vat != nil { + tp = vat + } else { + tp = mt.In(pn) + } + av := reflect.New(tp).Elem() + rf.ReflectTo(arg, av) + rfArgs[pn] = av + } + + rr := method.(px.CallableGoMember).CallGoReflected(c, rfArgs) + + switch len(rr) { + case 0: + return undef, true + case 1: + r := rr[0] + if r.IsValid() { + return wrapReflected(c, r), true + } else { + return undef, true + } + default: + rs := make([]px.Value, len(rr)) + for i, r := range rr { + if r.IsValid() { + rs[i] = wrapReflected(c, r) + } else { + rs[i] = undef + } + } + return WrapValues(rs), true + } +} + +func (o *reflectedObject) Reflect(c px.Context) reflect.Value { + return o.value +} + +func (o *reflectedObject) ReflectTo(c px.Context, value reflect.Value) { + if o.value.Kind() == reflect.Struct && value.Kind() == reflect.Ptr { + value.Set(o.value.Addr()) + } else { + value.Set(o.value) + } +} + +func (o *reflectedObject) Initialize(c px.Context, values []px.Value) { + if len(values) > 0 && o.typ.IsParameterized() { + o.InitFromHash(c, makeValueHash(o.typ.AttributesInfo(), values)) + return + } + pi := o.typ.AttributesInfo() + attrs := pi.Attributes() + if len(attrs) > 0 { + attrs := pi.Attributes() + fillValueSlice(values, attrs) + o.setValues(c, values) + } else if len(values) == 1 { + values[0].(px.Reflected).ReflectTo(c, o.value) + } +} + +func (o *reflectedObject) InitFromHash(c px.Context, hash px.OrderedMap) { + o.setValues(c, o.valuesFromHash(c, hash)) +} + +func (o *reflectedObject) setValues(c px.Context, values []px.Value) { + attrs := o.typ.AttributesInfo().Attributes() + rf := c.Reflector() + if len(attrs) == 1 && attrs[0].GoName() == keyValue { + rf.ReflectTo(values[0], o.value) + } else { + oe := o.structVal() + for i, a := range attrs { + var v px.Value + if i < len(values) { + v = values[i] + } else { + if a.HasValue() { + v = a.Value() + } else { + v = undef + } + } + rf.ReflectTo(v, oe.FieldByName(a.GoName())) + } + } +} + +func (o *reflectedObject) Get(key string) (px.Value, bool) { + pi := o.typ.AttributesInfo() + if idx, ok := pi.NameToPos()[key]; ok { + attr := pi.Attributes()[idx] + if attr.GoName() == keyValue { + return WrapPrimitive(o.value) + } + rf := o.structVal().FieldByName(attr.GoName()) + if rf.IsValid() { + return wrap(nil, rf), true + } + a := pi.Attributes()[idx] + if a.Kind() == givenOrDerived { + return undef, true + } + return a.Value(), ok + } + return nil, false +} + +func (o *reflectedObject) Equals(other interface{}, g px.Guard) bool { + if o == other { + return true + } + if ov, ok := other.(*reflectedObject); ok { + for _, a := range o.typ.AttributesInfo().Attributes() { + if !a.Get(o).Equals(a.Get(ov), g) { + return false + } + } + return true + } + return false +} + +func (o *reflectedObject) String() string { + return px.ToString(o) +} + +func (o *reflectedObject) ToString(b io.Writer, s px.FormatContext, g px.RDetect) { + ObjectToString(o, s, b, g) +} + +func (o *reflectedObject) InitHash() px.OrderedMap { + pi := o.typ.AttributesInfo() + at := pi.Attributes() + nc := len(at) + if nc == 0 { + return px.EmptyMap + } + + if nc == 1 { + attr := at[0] + if attr.GoName() == keyValue { + pv, _ := WrapPrimitive(o.value) + return singletonMap(`value`, pv) + } + } + + entries := make([]*HashEntry, 0, nc) + oe := o.structVal() + c := px.CurrentContext() + for _, attr := range pi.Attributes() { + gn := attr.GoName() + if gn != `` { + v := wrapReflected(c, oe.FieldByName(gn)) + if !(attr.HasValue() && v.Equals(attr.Value(), nil) || attr.Kind() == givenOrDerived && v.Equals(undef, nil)) { + entries = append(entries, WrapHashEntry2(attr.Name(), v)) + } + } + } + return WrapHash(entries) +} + +func (o *reflectedObject) structVal() reflect.Value { + oe := o.value + if oe.Kind() == reflect.Ptr { + oe = oe.Elem() + } + return oe +} + +type reflectedFunc struct { + typedObject + function reflect.Value +} + +func (o *reflectedFunc) Call(c px.Context, method px.ObjFunc, args []px.Value, block px.Lambda) (result px.Value, ok bool) { + mt := o.function.Type() + rf := c.Reflector() + rfArgs := make([]reflect.Value, len(args)) + for i, arg := range args { + av := reflect.New(mt.In(i)).Elem() + rf.ReflectTo(arg, av) + rfArgs[i] = av + } + + pc := mt.NumIn() + if pc != len(args) { + panic(px.Error(px.TypeMismatch, issue.H{`detail`: px.DescribeSignatures( + []px.Signature{method.CallableType().(*CallableType)}, NewTupleType([]px.Type{}, NewIntegerType(int64(pc-1), int64(pc-1))), nil)})) + } + rr := o.function.Call(rfArgs) + + oc := mt.NumOut() + + if method.ReturnsError() { + oc-- + err := rr[oc].Interface() + if err != nil { + if re, ok := err.(issue.Reported); ok { + panic(re) + } + panic(px.Error(px.GoFunctionError, issue.H{`name`: mt.Name(), `error`: err})) + } + rr = rr[:oc] + } + + switch len(rr) { + case 0: + return undef, true + case 1: + r := rr[0] + if r.IsValid() { + return wrap(c, r), true + } else { + return undef, true + } + default: + rs := make([]px.Value, len(rr)) + for i, r := range rr { + if r.IsValid() { + rs[i] = wrap(c, r) + } else { + rs[i] = undef + } + } + return WrapValues(rs), true + } +} + +func (o *reflectedFunc) Reflect(c px.Context) reflect.Value { + return o.function +} + +func (o *reflectedFunc) ReflectTo(c px.Context, value reflect.Value) { + value.Set(o.function) +} + +func (o *reflectedFunc) Initialize(c px.Context, arguments []px.Value) { +} + +func (o *reflectedFunc) InitFromHash(c px.Context, hash px.OrderedMap) { +} + +func (o *reflectedFunc) Get(key string) (px.Value, bool) { + return nil, false +} + +func (o *reflectedFunc) Equals(other interface{}, g px.Guard) bool { + if ov, ok := other.(*reflectedFunc); ok { + return o.typ.Equals(ov.typ, g) && o.function == ov.function + } + return false +} + +func (o *reflectedFunc) String() string { + return px.ToString(o) +} + +func (o *reflectedFunc) ToString(b io.Writer, s px.FormatContext, g px.RDetect) { + ObjectToString(o, s, b, g) +} + +func (o *reflectedFunc) InitHash() px.OrderedMap { + return px.EmptyMap +} diff --git a/vendor/github.com/lyraproj/pcore/types/optionaltype.go b/vendor/github.com/lyraproj/pcore/types/optionaltype.go new file mode 100644 index 0000000..1e1ba46 --- /dev/null +++ b/vendor/github.com/lyraproj/pcore/types/optionaltype.go @@ -0,0 +1,150 @@ +package types + +import ( + "io" + + "reflect" + + "github.com/lyraproj/pcore/px" +) + +type OptionalType struct { + typ px.Type +} + +var OptionalMetaType px.ObjectType + +func init() { + OptionalMetaType = newObjectType(`Pcore::OptionalType`, + `Pcore::AnyType { + attributes => { + type => { + type => Optional[Type], + value => Any + }, + } +}`, func(ctx px.Context, args []px.Value) px.Value { + return newOptionalType2(args...) + }) +} + +func DefaultOptionalType() *OptionalType { + return optionalTypeDefault +} + +func NewOptionalType(containedType px.Type) *OptionalType { + if containedType == nil || containedType == anyTypeDefault { + return DefaultOptionalType() + } + return &OptionalType{containedType} +} + +func newOptionalType2(args ...px.Value) *OptionalType { + switch len(args) { + case 0: + return DefaultOptionalType() + case 1: + if containedType, ok := args[0].(px.Type); ok { + return NewOptionalType(containedType) + } + if containedType, ok := args[0].(stringValue); ok { + return newOptionalType3(string(containedType)) + } + panic(illegalArgumentType(`Optional[]`, 0, `Variant[Type,String]`, args[0])) + default: + panic(illegalArgumentCount(`Optional[]`, `0 - 1`, len(args))) + } +} + +func newOptionalType3(str string) *OptionalType { + return &OptionalType{NewStringType(nil, str)} +} + +func (t *OptionalType) Accept(v px.Visitor, g px.Guard) { + v(t) + t.typ.Accept(v, g) +} + +func (t *OptionalType) ContainedType() px.Type { + return t.typ +} + +func (t *OptionalType) Default() px.Type { + return optionalTypeDefault +} + +func (t *OptionalType) Equals(o interface{}, g px.Guard) bool { + if ot, ok := o.(*OptionalType); ok { + return t.typ.Equals(ot.typ, g) + } + return false +} + +func (t *OptionalType) Generic() px.Type { + return NewOptionalType(px.GenericType(t.typ)) +} + +func (t *OptionalType) Get(key string) (value px.Value, ok bool) { + switch key { + case `type`: + return t.typ, true + } + return nil, false +} + +func (t *OptionalType) IsAssignable(o px.Type, g px.Guard) bool { + return GuardedIsAssignable(o, undefTypeDefault, g) || GuardedIsAssignable(t.typ, o, g) +} + +func (t *OptionalType) IsInstance(o px.Value, g px.Guard) bool { + return o == undef || GuardedIsInstance(t.typ, o, g) +} + +func (t *OptionalType) MetaType() px.ObjectType { + return OptionalMetaType +} + +func (t *OptionalType) Name() string { + return `Optional` +} + +func (t *OptionalType) Parameters() []px.Value { + if t.typ == DefaultAnyType() { + return px.EmptyValues + } + if str, ok := t.typ.(*vcStringType); ok && str.value != `` { + return []px.Value{stringValue(str.value)} + } + return []px.Value{t.typ} +} + +func (t *OptionalType) ReflectType(c px.Context) (reflect.Type, bool) { + return ReflectType(c, t.typ) +} + +func (t *OptionalType) Resolve(c px.Context) px.Type { + t.typ = resolve(c, t.typ) + return t +} + +func (t *OptionalType) CanSerializeAsString() bool { + return canSerializeAsString(t.typ) +} + +func (t *OptionalType) SerializationString() string { + return t.String() +} + +func (t *OptionalType) String() string { + return px.ToString2(t, None) +} + +func (t *OptionalType) ToString(b io.Writer, s px.FormatContext, g px.RDetect) { + TypeToString(t, b, s, g) +} + +func (t *OptionalType) PType() px.Type { + return &TypeType{t} +} + +var optionalTypeDefault = &OptionalType{typ: anyTypeDefault} diff --git a/vendor/github.com/lyraproj/pcore/types/parser.go b/vendor/github.com/lyraproj/pcore/types/parser.go new file mode 100644 index 0000000..1c95f7b --- /dev/null +++ b/vendor/github.com/lyraproj/pcore/types/parser.go @@ -0,0 +1,386 @@ +package types + +import ( + "errors" + "fmt" + "strconv" + + "github.com/lyraproj/issue/issue" + "github.com/lyraproj/pcore/px" + "github.com/lyraproj/pcore/utils" +) + +// States: +const ( + exElement = 0 // Expect value literal + exParam = 1 // Expect value literal + exKey = 2 // Expect exKey or end of hash + exValue = 3 // Expect value + exEntryValue = 4 // Expect value + exPEntryValue = 5 + exRocket = 6 // Expect rocket + exListComma = 7 // Expect comma or end of array + exParamsComma = 8 // Expect comma or end of parameter list + exHashComma = 9 // Expect comma or end of hash +) + +var breakCollection = errors.New(`bc`) + +var breakScan = errors.New(`b`) + +const ParseError = `PARSE_ERROR` + +func init() { + issue.Hard(ParseError, `%{message}`) +} + +func expect(state int) (s string) { + switch state { + case exElement, exParam: + s = `literal` + case exKey: + s = `entry key` + case exValue, exEntryValue: + s = `entry value` + case exRocket: + s = `'=>'` + case exListComma: + s = `one of ',' or ']'` + case exParamsComma: + s = `one of ',' or ')'` + case exHashComma: + s = `one of ',' or '}'` + } + return +} + +func Parse(s string) (px.Value, error) { + d := NewCollector() + + sr := utils.NewStringReader(s) + var sf func(t token) error + var tp px.Value + state := exElement + arrayHash := false + + badSyntax := func(t token) error { + var ts string + if t.i == 0 { + ts = `EOF` + } else { + ts = t.s + } + return fmt.Errorf(`expected %s, got '%s'`, expect(state), ts) + } + + sf = func(t token) (err error) { + if tp != nil { + if t.i == leftBracket || t.i == leftParen || t.i == leftCurlyBrace { + // Revert state to previous + if state == exListComma { + state = exElement + } else if state == exParamsComma { + state = exParam + } else if state == exHashComma { + state = exValue + } else if state == exRocket { + state = exKey + } + } else { + d.Add(tp) + tp = nil + } + } + switch t.i { + case end: + if state != exListComma { + err = errors.New(`unexpected end of input`) + } + if arrayHash { + he := d.PopLast().(px.MapEntry) + d.Add(singleMap(he.Key(), he.Value())) + } + case rightCurlyBrace: + if state != exHashComma && state != exKey { + err = badSyntax(t) + } else { + err = breakCollection + } + case rightBracket: + if state != exListComma && state != exElement { + err = badSyntax(t) + } else { + err = breakCollection + } + case rightParen: + if state != exParamsComma && state != exElement { + err = badSyntax(t) + } else { + err = breakCollection + } + case rocket: + if state == exRocket { + state = exValue + } else if state == exListComma { + // Entry + state = exEntryValue + arrayHash = true + } else if state == exParamsComma { + // Entry + state = exPEntryValue + arrayHash = true + } else { + err = badSyntax(t) + } + case comma: + if state == exListComma { + state = exElement + } else if state == exParamsComma { + state = exParam + } else if state == exHashComma { + state = exKey + } else { + err = badSyntax(t) + } + default: + entry := state == exEntryValue || state == exPEntryValue + switch state { + case exElement, exEntryValue: + state = exListComma + case exParam, exPEntryValue: + state = exParamsComma + case exKey: + state = exRocket + case exValue: + state = exHashComma + default: + err = badSyntax(t) + } + switch t.i { + case leftCurlyBrace: + stp := tp + sv := state + state = exKey + tp = nil + d.AddHash(0, func() { err = scan(sr, sf) }) + if err != breakCollection { + break + } + err = nil + state = sv + if stp == nil { + break + } + ps := []px.Value{d.PopLast()} + if he, ok := stp.(*HashEntry); ok { + he.Value().(*DeferredType).params = ps + } else { + stp.(*DeferredType).params = ps + } + d.Add(stp) + case leftBracket: + stp := tp + saveSt := state + saveAh := arrayHash + arrayHash = false + state = exElement + tp = nil + d.AddArray(0, func() { err = scan(sr, sf) }) + if arrayHash { + d.Add(fixArrayHash(d.PopLast().(*Array))) + } + arrayHash = saveAh + if err != breakCollection { + if err == nil { + state = exListComma + return badSyntax(token{i: end}) + } + break + } + err = nil + state = saveSt + if stp == nil { + break + } + ll := d.PopLast().(*Array) + var dp *DeferredType + if he, ok := stp.(*HashEntry); ok { + dp = he.Value().(*DeferredType) + } else { + dp = stp.(*DeferredType) + } + dp.params = ll.AppendTo(make([]px.Value, 0, ll.Len())) + d.Add(stp) + case leftParen: + stp := tp + sv := state + saveAh := arrayHash + arrayHash = false + state = exParam + tp = nil + d.AddArray(0, func() { err = scan(sr, sf) }) + if arrayHash { + d.Add(fixArrayHash(d.PopLast().(*Array))) + } + arrayHash = saveAh + if err != breakCollection { + if err == nil { + state = exParamsComma + return badSyntax(token{i: end}) + } + break + } + err = nil + state = sv + if stp == nil { + break + } + ll := d.PopLast().(*Array) + if he, ok := stp.(*HashEntry); ok { + dt := he.Value().(*DeferredType).tn + if dt != `Deferred` { + params := append(make([]px.Value, 0, ll.Len()+1), WrapString(dt)) + stp = WrapHashEntry(he.Key(), NewDeferred(`new`, ll.AppendTo(params)...)) + } else { + params := ll.Slice(1, ll.Len()).AppendTo(make([]px.Value, 0, ll.Len()-1)) + stp = WrapHashEntry(he.Key(), NewDeferred(ll.At(0).String(), params...)) + } + } else { + dt := stp.(*DeferredType).tn + if dt != `Deferred` { + params := append(make([]px.Value, 0, ll.Len()+1), WrapString(dt)) + stp = NewDeferred(`new`, ll.AppendTo(params)...) + } else { + params := ll.Slice(1, ll.Len()).AppendTo(make([]px.Value, 0, ll.Len()-1)) + stp = NewDeferred(ll.At(0).String(), params...) + } + } + d.Add(stp) + case integer: + var i int64 + i, err = strconv.ParseInt(t.s, 0, 64) + if err == nil { + d.Add(WrapInteger(i)) + } + case float: + var f float64 + f, err = strconv.ParseFloat(t.s, 64) + if err == nil { + d.Add(WrapFloat(f)) + } + case identifier: + switch t.s { + case `true`: + d.Add(BooleanTrue) + case `false`: + d.Add(BooleanFalse) + case `default`: + d.Add(WrapDefault()) + case `undef`: + d.Add(undef) + default: + d.Add(WrapString(t.s)) + } + case stringLiteral: + d.Add(WrapString(t.s)) + case regexpLiteral: + d.Add(WrapRegexp(t.s)) + case name: + tp = &DeferredType{tn: t.s} + } + if err == nil && entry { + // Concatenate last two values to a HashEntry + if tp != nil { + tp = WrapHashEntry(d.PopLast(), tp) + } else { + v := d.PopLast() + d.Add(WrapHashEntry(d.PopLast(), v)) + } + } + } + return err + } + + // Initial lex receiver deals with alias syntax "type X = " + typeName := `` + initial := func(t token) (err error) { + if t.i == identifier && t.s == `type` { + err = scan(sr, func(t token) (err error) { + switch t.i { + case name: + typeName = t.s + err = scan(sr, func(t token) (err error) { + if t.i == equal { + err = scan(sr, sf) + if err == nil { + err = breakScan + } + } else { + err = badSyntax(t) + } + return + }) + case comma: + state = exElement + d.Add(WrapString(`type`)) + err = scan(sr, sf) + case rocket: + state = exEntryValue + d.Add(WrapString(`type`)) + err = scan(sr, sf) + default: + err = badSyntax(t) + } + if err == nil { + err = breakScan + } + return + }) + } else { + err = sf(t) + if err == nil && state != end { + err = scan(sr, sf) + } + } + if err == nil { + err = breakScan + } + return + } + + err := scan(sr, initial) + if err != nil && err != breakScan { + return nil, px.Error2(issue.NewLocation(``, sr.Line(), sr.Column()), ParseError, issue.H{`message`: err.Error()}) + } + dv := d.Value() + if typeName != `` { + dv = NamedType(px.RuntimeNameAuthority, typeName, dv) + } + return dv, nil +} + +func fixArrayHash(av *Array) *Array { + es := make([]px.Value, 0, av.Len()) + + // Array may contain hash entries that must be concatenated into a single hash + var en []*HashEntry + av.Each(func(v px.Value) { + if he, ok := v.(*HashEntry); ok { + if en == nil { + en = []*HashEntry{he} + } else { + en = append(en, he) + } + } else { + if en != nil { + es = append(es, WrapHash(en)) + en = nil + } + es = append(es, v) + } + }) + if en != nil { + es = append(es, WrapHash(en)) + } + return WrapValues(es) +} diff --git a/vendor/github.com/lyraproj/pcore/types/patterntype.go b/vendor/github.com/lyraproj/pcore/types/patterntype.go new file mode 100644 index 0000000..a10edbf --- /dev/null +++ b/vendor/github.com/lyraproj/pcore/types/patterntype.go @@ -0,0 +1,181 @@ +package types + +import ( + "io" + + "reflect" + + "github.com/lyraproj/pcore/px" + "github.com/lyraproj/pcore/utils" +) + +type PatternType struct { + regexps []*RegexpType +} + +var PatternMetaType px.ObjectType + +func init() { + PatternMetaType = newObjectType(`Pcore::PatternType`, + `Pcore::ScalarDataType { + attributes => { + patterns => Array[Regexp] + } +}`, func(ctx px.Context, args []px.Value) px.Value { + return newPatternType2(args...) + }) +} + +func DefaultPatternType() *PatternType { + return patternTypeDefault +} + +func NewPatternType(regexps []*RegexpType) *PatternType { + return &PatternType{regexps} +} + +func newPatternType2(regexps ...px.Value) *PatternType { + return newPatternType3(WrapValues(regexps)) +} + +func newPatternType3(regexps px.List) *PatternType { + + cnt := regexps.Len() + switch cnt { + case 0: + return DefaultPatternType() + case 1: + if av, ok := regexps.At(0).(*Array); ok { + return newPatternType3(av) + } + } + + rs := make([]*RegexpType, cnt) + regexps.EachWithIndex(func(arg px.Value, idx int) { + switch arg := arg.(type) { + case *RegexpType: + rs[idx] = arg + case *Regexp: + rs[idx] = arg.PType().(*RegexpType) + case stringValue: + rs[idx] = newRegexpType2(arg) + default: + panic(illegalArgumentType(`Pattern[]`, idx, `Type[Regexp], Regexp, or String`, arg)) + } + }) + return NewPatternType(rs) +} + +func (t *PatternType) Accept(v px.Visitor, g px.Guard) { + v(t) + for _, rx := range t.regexps { + rx.Accept(v, g) + } +} + +func (t *PatternType) Default() px.Type { + return patternTypeDefault +} + +func (t *PatternType) Equals(o interface{}, g px.Guard) bool { + if ot, ok := o.(*PatternType); ok { + return len(t.regexps) == len(ot.regexps) && px.IncludesAll(t.regexps, ot.regexps, g) + } + return false +} + +func (t *PatternType) Get(key string) (value px.Value, ok bool) { + switch key { + case `patterns`: + return WrapValues(t.Parameters()), true + } + return nil, false +} + +func (t *PatternType) IsAssignable(o px.Type, g px.Guard) bool { + if _, ok := o.(*PatternType); ok { + return len(t.regexps) == 0 + } + + if _, ok := o.(*stringType); ok { + if len(t.regexps) == 0 { + return true + } + } + + if vc, ok := o.(*vcStringType); ok { + if len(t.regexps) == 0 { + return true + } + str := vc.value + return utils.MatchesString(MapToRegexps(t.regexps), str) + } + + if et, ok := o.(*EnumType); ok { + if len(t.regexps) == 0 { + return true + } + enums := et.values + return len(enums) > 0 && utils.MatchesAllStrings(MapToRegexps(t.regexps), enums) + } + return false +} + +func (t *PatternType) IsInstance(o px.Value, g px.Guard) bool { + str, ok := o.(stringValue) + return ok && (len(t.regexps) == 0 || utils.MatchesString(MapToRegexps(t.regexps), string(str))) +} + +func (t *PatternType) MetaType() px.ObjectType { + return PatternMetaType +} + +func (t *PatternType) Name() string { + return `Pattern` +} + +func (t *PatternType) Parameters() []px.Value { + top := len(t.regexps) + if top == 0 { + return px.EmptyValues + } + rxs := make([]px.Value, top) + for idx, rx := range t.regexps { + rxs[idx] = WrapRegexp2(rx.pattern) + } + return rxs +} + +func (t *PatternType) Patterns() *Array { + rxs := make([]px.Value, len(t.regexps)) + for idx, rx := range t.regexps { + rxs[idx] = rx + } + return WrapValues(rxs) +} + +func (t *PatternType) ReflectType(c px.Context) (reflect.Type, bool) { + return reflect.TypeOf(`x`), true +} + +func (t *PatternType) CanSerializeAsString() bool { + return true +} + +func (t *PatternType) SerializationString() string { + return t.String() +} + +func (t *PatternType) ToString(b io.Writer, s px.FormatContext, g px.RDetect) { + TypeToString(t, b, s, g) +} + +func (t *PatternType) String() string { + return px.ToString2(t, None) +} + +func (t *PatternType) PType() px.Type { + return &TypeType{t} +} + +var patternTypeDefault = &PatternType{[]*RegexpType{}} diff --git a/vendor/github.com/lyraproj/pcore/types/reflector.go b/vendor/github.com/lyraproj/pcore/types/reflector.go new file mode 100644 index 0000000..1e3ccdf --- /dev/null +++ b/vendor/github.com/lyraproj/pcore/types/reflector.go @@ -0,0 +1,464 @@ +package types + +import ( + "math" + "reflect" + "strings" + + "github.com/lyraproj/issue/issue" + "github.com/lyraproj/pcore/px" + "github.com/lyraproj/semver/semver" +) + +const tagName = "puppet" + +type reflector struct { + c px.Context +} + +var pValueType = reflect.TypeOf((*px.Value)(nil)).Elem() + +func NewReflector(c px.Context) px.Reflector { + return &reflector{c} +} + +func Methods(t reflect.Type) []reflect.Method { + if t.Kind() == reflect.Ptr { + // Pointer may have methods + if t.NumMethod() == 0 { + t = t.Elem() + } + } + nm := t.NumMethod() + ms := make([]reflect.Method, nm) + for i := 0; i < nm; i++ { + ms[i] = t.Method(i) + } + return ms +} + +func Fields(t reflect.Type) []reflect.StructField { + if t.Kind() == reflect.Ptr { + t = t.Elem() + } + nf := 0 + if t.Kind() == reflect.Struct { + nf = t.NumField() + } + fs := make([]reflect.StructField, nf) + for i := 0; i < nf; i++ { + fs[i] = t.Field(i) + } + return fs +} + +// NormalizeType ensures that pointers to interface is converted to interface and that struct is converted to +// pointer to struct +func NormalizeType(rt reflect.Type) reflect.Type { + switch rt.Kind() { + case reflect.Struct: + rt = reflect.PtrTo(rt) + case reflect.Ptr: + re := rt.Elem() + if re.Kind() == reflect.Interface { + rt = re + } + } + return rt +} + +func (r *reflector) Methods(t reflect.Type) []reflect.Method { + return Methods(t) +} + +func (r *reflector) Fields(t reflect.Type) []reflect.StructField { + return Fields(t) +} + +func (r *reflector) FieldName(f *reflect.StructField) string { + return FieldName(f) +} + +func FieldName(f *reflect.StructField) string { + if tagHash, ok := TagHash(f); ok { + if nv, ok := tagHash.Get4(`name`); ok { + return nv.String() + } + } + return issue.FirstToLower(f.Name) +} + +func (r *reflector) Reflect(src px.Value) reflect.Value { + if sn, ok := src.(px.Reflected); ok { + return sn.Reflect(r.c) + } + panic(px.Error(px.UnreflectableValue, issue.H{`type`: src.PType()})) +} + +func (r *reflector) Reflect2(src px.Value, rt reflect.Type) reflect.Value { + if rt != nil && rt.Kind() == reflect.Interface && rt.AssignableTo(pValueType) { + sv := reflect.ValueOf(src) + if sv.Type().AssignableTo(rt) { + return sv + } + } + v := reflect.New(rt).Elem() + r.ReflectTo(src, v) + return v +} + +// ReflectTo assigns the native value of src to dest +func (r *reflector) ReflectTo(src px.Value, dest reflect.Value) { + dt := dest.Type() + assertSettable(&dest) + if dt.Kind() == reflect.Interface && dt.AssignableTo(pValueType) { + sv := reflect.ValueOf(src) + if !sv.Type().AssignableTo(dt) { + panic(px.Error(px.AttemptToSetWrongKind, issue.H{`expected`: sv.Type().String(), `actual`: dest.Type().String()})) + } + dest.Set(sv) + } else { + switch src := src.(type) { + case px.Reflected: + if dt.Kind() == reflect.Interface && dt.Name() == `` { + // Destination is an interface{}, derive type from source + dest.Set(src.Reflect(r.c)) + } else { + src.ReflectTo(r.c, dest) + } + case px.PuppetObject: + src.PType().(px.ObjectType).ToReflectedValue(r.c, src, dest) + default: + panic(px.Error(px.InvalidSourceForSet, issue.H{`type`: src.PType()})) + } + } +} + +func (r *reflector) ReflectType(src px.Type) (reflect.Type, bool) { + return ReflectType(r.c, src) +} + +func ReflectType(c px.Context, src px.Type) (reflect.Type, bool) { + if sn, ok := src.(px.ReflectedType); ok { + return sn.ReflectType(c) + } + return nil, false +} + +func (r *reflector) TagHash(f *reflect.StructField) (px.OrderedMap, bool) { + return TagHash(f) +} + +func TagHash(f *reflect.StructField) (px.OrderedMap, bool) { + return ParseTagHash(f.Tag.Get(tagName)) +} + +func ParseTagHash(tag string) (px.OrderedMap, bool) { + if tag != `` { + tagExpr, err := Parse(`{` + tag + `}`) + if err == nil { + return tagExpr.(px.OrderedMap), true + } + } + return nil, false +} + +var errorType = reflect.TypeOf((*error)(nil)).Elem() + +func (r *reflector) FunctionDeclFromReflect(name string, mt reflect.Type, withReceiver bool) px.OrderedMap { + returnsError := false + var rt px.Type + var err error + oc := mt.NumOut() + switch oc { + case 0: + rt = DefaultAnyType() + case 1: + ot := mt.Out(0) + if ot.AssignableTo(errorType) { + returnsError = true + } else { + rt, err = wrapReflectedType(r.c, mt.Out(0)) + if err != nil { + panic(err) + } + } + case 2: + rt, err = wrapReflectedType(r.c, mt.Out(0)) + if err != nil { + panic(err) + } + ot := mt.Out(1) + if ot.AssignableTo(errorType) { + returnsError = true + } else { + var rt2 px.Type + rt2, err = wrapReflectedType(r.c, mt.Out(1)) + if err != nil { + panic(err) + } + rt = NewTupleType([]px.Type{rt, rt2}, nil) + } + default: + ot := mt.Out(oc - 1) + if ot.AssignableTo(errorType) { + returnsError = true + oc = oc - 1 + } + ts := make([]px.Type, oc) + for i := 0; i < oc; i++ { + ts[i], err = wrapReflectedType(r.c, mt.Out(i)) + if err != nil { + panic(err) + } + } + rt = NewTupleType(ts, nil) + } + + var pt *TupleType + pc := mt.NumIn() + ix := 0 + if withReceiver { + // First argument is the receiver itself + ix = 1 + } + + if pc == ix { + pt = EmptyTupleType() + } else { + ps := make([]px.Type, pc-ix) + for p := ix; p < pc; p++ { + ps[p-ix], err = wrapReflectedType(r.c, mt.In(p)) + if err != nil { + panic(err) + } + } + var sz *IntegerType + if mt.IsVariadic() { + last := pc - ix - 1 + ps[last] = ps[last].(*ArrayType).ElementType() + sz = NewIntegerType(int64(last), math.MaxInt64) + } + pt = NewTupleType(ps, sz) + } + ne := 2 + if returnsError { + ne++ + } + ds := make([]*HashEntry, ne) + ds[0] = WrapHashEntry2(keyType, NewCallableType(pt, rt, nil)) + ds[1] = WrapHashEntry2(KeyGoName, stringValue(name)) + if returnsError { + ds[2] = WrapHashEntry2(keyReturnsError, BooleanTrue) + } + return WrapHash(ds) +} + +func (r *reflector) InitializerFromTagged(typeName string, parent px.Type, tg px.AnnotatedType) px.OrderedMap { + rf := tg.Type() + ie := make([]*HashEntry, 0, 2) + if rf.Kind() == reflect.Func { + fn := rf.Name() + if fn == `` { + fn = `do` + } + ie = append(ie, WrapHashEntry2(keyFunctions, singletonMap(`do`, r.FunctionDeclFromReflect(fn, rf, false)))) + } else { + tags := tg.Tags() + otherTags := tg.OtherTags() + fs := r.Fields(rf) + nf := len(fs) + var pt reflect.Type + + if nf > 0 { + es := make([]*HashEntry, 0, nf) + for i, f := range fs { + if i == 0 && f.Anonymous { + // Parent + pt = reflect.PtrTo(f.Type) + continue + } + if f.PkgPath != `` { + // Unexported + continue + } + + name, decl := r.ReflectFieldTags(&f, tags[f.Name], otherTags[f.Name]) + es = append(es, WrapHashEntry2(name, decl)) + } + ie = append(ie, WrapHashEntry2(keyAttributes, WrapHash(es))) + } + + ms := r.Methods(rf) + nm := len(ms) + if nm > 0 { + es := make([]*HashEntry, 0, nm) + for _, m := range ms { + if m.PkgPath != `` { + // Not exported struct method + continue + } + + if pt != nil { + if _, ok := pt.MethodByName(m.Name); ok { + // Redeclaration's of parent method are not included + continue + } + } + es = append(es, WrapHashEntry2(issue.FirstToLower(m.Name), r.FunctionDeclFromReflect(m.Name, m.Type, rf.Kind() != reflect.Interface))) + } + ie = append(ie, WrapHashEntry2(keyFunctions, WrapHash(es))) + } + } + ats := tg.Annotations() + if ats != nil && !ats.IsEmpty() { + ie = append(ie, WrapHashEntry2(keyAnnotations, ats)) + } + return WrapHash(ie) +} + +func (r *reflector) TypeFromReflect(typeName string, parent px.Type, rf reflect.Type) px.ObjectType { + return r.TypeFromTagged(typeName, parent, px.NewTaggedType(rf, nil), nil) +} + +func (r *reflector) TypeFromTagged(typeName string, parent px.Type, tg px.AnnotatedType, rcFunc px.Doer) px.ObjectType { + return BuildObjectType(typeName, parent, func(obj px.ObjectType) px.OrderedMap { + obj.(*objectType).goType = tg + + r.c.ImplementationRegistry().RegisterType(obj, tg.Type()) + if rcFunc != nil { + rcFunc() + } + return r.InitializerFromTagged(typeName, parent, tg) + }) +} + +func (r *reflector) ReflectFieldTags(f *reflect.StructField, fh px.OrderedMap, otherTags map[string]string) (name string, decl px.OrderedMap) { + as := make([]*HashEntry, 0) + var val px.Value + var typ px.Type + + if fh != nil { + if v, ok := fh.Get4(keyName); ok { + name = v.String() + } + if v, ok := fh.GetEntry(keyKind); ok { + as = append(as, v.(*HashEntry)) + } + if v, ok := fh.GetEntry(keyValue); ok { + val = v.Value() + as = append(as, v.(*HashEntry)) + } + if v, ok := fh.Get4(keyType); ok { + switch v := v.(type) { + case *DeferredType: + typ = v.Resolve(r.c) + case px.Type: + typ = v + } + } + } + + if typ == nil { + var err error + if typ, err = px.WrapReflectedType(r.c, f.Type); err != nil { + panic(err) + } + } + + optional := typ.IsInstance(px.Undef, nil) + if optional { + if val == nil { + // If no value is declared and the type is declared as optional, then + // value is an implicit undef + as = append(as, WrapHashEntry2(keyValue, undef)) + } + } else { + if val != nil && val.Equals(undef, nil) { + // Convenience. If a value is declared as being undef, then ensure that + // type accepts undef + typ = NewOptionalType(typ) + optional = true + } + } + + if optional { + switch f.Type.Kind() { + case reflect.Ptr, reflect.Interface: + // OK. Can be nil + default: + // The field will always have a value (the Go zero value), so it cannot be nil. + panic(px.Error(px.ImpossibleOptional, issue.H{`name`: f.Name, `type`: typ.String()})) + } + } + + as = append(as, WrapHashEntry2(keyType, typ)) + as = append(as, WrapHashEntry2(KeyGoName, stringValue(f.Name))) + if len(otherTags) > 0 { + as = append(as, WrapHashEntry2(keyAnnotations, singleMap(TagsAnnotationType, WrapStringToStringMap(otherTags)))) + } + if name == `` { + name = issue.FirstToLower(f.Name) + } + return name, WrapHash(as) +} + +func (r *reflector) TypeSetFromReflect(typeSetName string, version semver.Version, aliases map[string]string, rTypes ...reflect.Type) px.TypeSet { + types := make([]*HashEntry, 0) + prefix := typeSetName + `::` + for _, rt := range rTypes { + var parent px.Type + fs := r.Fields(rt) + nf := len(fs) + if nf > 0 { + f := fs[0] + if f.Anonymous && f.Type.Kind() == reflect.Struct { + parent = NewTypeReferenceType(typeName(prefix, aliases, f.Type)) + } + } + name := typeName(prefix, aliases, rt) + types = append(types, WrapHashEntry2( + name[strings.LastIndex(name, `::`)+2:], + r.TypeFromReflect(name, parent, rt))) + } + + es := make([]*HashEntry, 0) + es = append(es, WrapHashEntry2(px.KeyPcoreUri, stringValue(string(px.PcoreUri)))) + es = append(es, WrapHashEntry2(px.KeyPcoreVersion, WrapSemVer(px.PcoreVersion))) + es = append(es, WrapHashEntry2(KeyVersion, WrapSemVer(version))) + es = append(es, WrapHashEntry2(KeyTypes, WrapHash(types))) + return NewTypeSet(px.RuntimeNameAuthority, typeSetName, WrapHash(es)) +} + +func ParentType(t reflect.Type) reflect.Type { + if t.Kind() == reflect.Ptr { + t = t.Elem() + } + if t.Kind() == reflect.Struct && t.NumField() > 0 { + f := t.Field(0) + if f.Anonymous && f.Type.Kind() == reflect.Struct { + return f.Type + } + } + return nil +} + +func typeName(prefix string, aliases map[string]string, rt reflect.Type) string { + if rt.Kind() == reflect.Ptr { + // Pointers have no names + rt = rt.Elem() + } + name := rt.Name() + if aliases != nil { + if alias, ok := aliases[name]; ok { + name = alias + } + } + return prefix + name +} + +func assertSettable(value *reflect.Value) { + if !value.CanSet() { + panic(px.Error(px.AttemptToSetUnsettable, issue.H{`kind`: value.Type().String()})) + } +} diff --git a/vendor/github.com/lyraproj/pcore/types/regexptype.go b/vendor/github.com/lyraproj/pcore/types/regexptype.go new file mode 100644 index 0000000..961c5c9 --- /dev/null +++ b/vendor/github.com/lyraproj/pcore/types/regexptype.go @@ -0,0 +1,265 @@ +package types + +import ( + "bytes" + "io" + "reflect" + "regexp" + + "github.com/lyraproj/issue/issue" + "github.com/lyraproj/pcore/px" + "github.com/lyraproj/pcore/utils" +) + +type ( + RegexpType struct { + pattern *regexp.Regexp + } + + // Regexp represents RegexpType as a value + Regexp RegexpType +) + +var regexpTypeDefault = &RegexpType{pattern: regexp.MustCompile(``)} + +var RegexpMetaType px.ObjectType + +func init() { + RegexpMetaType = newObjectType(`Pcore::RegexpType`, + `Pcore::ScalarType { + attributes => { + pattern => { + type => Variant[Undef,String,Regexp], + value => undef + } + } +}`, func(ctx px.Context, args []px.Value) px.Value { + return newRegexpType2(args...) + }) + + newGoConstructor(`Regexp`, + func(d px.Dispatch) { + d.Param(`Variant[String,Regexp]`) + d.Function(func(c px.Context, args []px.Value) px.Value { + arg := args[0] + if s, ok := arg.(*Regexp); ok { + return s + } + return WrapRegexp(arg.String()) + }) + }, + ) +} + +func DefaultRegexpType() *RegexpType { + return regexpTypeDefault +} + +func NewRegexpType(patternString string) *RegexpType { + if patternString == `` { + return DefaultRegexpType() + } + pattern, err := regexp.Compile(patternString) + if err != nil { + panic(px.Error(px.InvalidRegexp, issue.H{`pattern`: patternString, `detail`: err.Error()})) + } + return &RegexpType{pattern: pattern} +} + +func NewRegexpTypeR(pattern *regexp.Regexp) *RegexpType { + if pattern.String() == `` { + return DefaultRegexpType() + } + return &RegexpType{pattern: pattern} +} + +func newRegexpType2(args ...px.Value) *RegexpType { + switch len(args) { + case 0: + return regexpTypeDefault + case 1: + rx := args[0] + if str, ok := rx.(stringValue); ok { + return NewRegexpType(string(str)) + } + if rt, ok := rx.(*Regexp); ok { + return rt.PType().(*RegexpType) + } + panic(illegalArgumentType(`Regexp[]`, 0, `Variant[Regexp,String]`, args[0])) + default: + panic(illegalArgumentCount(`Regexp[]`, `0 - 1`, len(args))) + } +} + +func (t *RegexpType) Accept(v px.Visitor, g px.Guard) { + v(t) +} + +func (t *RegexpType) Default() px.Type { + return regexpTypeDefault +} + +func (t *RegexpType) Equals(o interface{}, g px.Guard) bool { + ot, ok := o.(*RegexpType) + return ok && t.pattern.String() == ot.pattern.String() +} + +func (t *RegexpType) Get(key string) (value px.Value, ok bool) { + switch key { + case `pattern`: + if t.String() == `` { + return undef, true + } + return stringValue(t.pattern.String()), true + } + return nil, false +} + +func (t *RegexpType) IsAssignable(o px.Type, g px.Guard) bool { + rx, ok := o.(*RegexpType) + return ok && (t.pattern.String() == `` || t.pattern.String() == rx.PatternString()) +} + +func (t *RegexpType) IsInstance(o px.Value, g px.Guard) bool { + rx, ok := o.(*Regexp) + return ok && (t.pattern.String() == `` || t.pattern.String() == rx.PatternString()) +} + +func (t *RegexpType) MetaType() px.ObjectType { + return RegexpMetaType +} + +func (t *RegexpType) Name() string { + return `Regexp` +} + +func (t *RegexpType) Parameters() []px.Value { + if t.pattern.String() == `` { + return px.EmptyValues + } + return []px.Value{WrapRegexp2(t.pattern)} +} + +func (t *RegexpType) ReflectType(c px.Context) (reflect.Type, bool) { + return reflect.TypeOf(regexpTypeDefault.pattern), true +} + +func (t *RegexpType) PatternString() string { + return t.pattern.String() +} + +func (t *RegexpType) Regexp() *regexp.Regexp { + return t.pattern +} + +func (t *RegexpType) CanSerializeAsString() bool { + return true +} + +func (t *RegexpType) SerializationString() string { + return t.String() +} + +func (t *RegexpType) String() string { + return px.ToString2(t, None) +} + +func (t *RegexpType) ToString(b io.Writer, s px.FormatContext, g px.RDetect) { + TypeToString(t, b, s, g) +} + +func (t *RegexpType) PType() px.Type { + return &TypeType{t} +} + +func MapToRegexps(regexpTypes []*RegexpType) []*regexp.Regexp { + top := len(regexpTypes) + result := make([]*regexp.Regexp, top) + for idx := 0; idx < top; idx++ { + result[idx] = regexpTypes[idx].Regexp() + } + return result +} + +func UniqueRegexps(regexpTypes []*RegexpType) []*RegexpType { + top := len(regexpTypes) + if top < 2 { + return regexpTypes + } + + result := make([]*RegexpType, 0, top) + exists := make(map[string]bool, top) + for _, regexpType := range regexpTypes { + key := regexpType.String() + if !exists[key] { + exists[key] = true + result = append(result, regexpType) + } + } + return result +} + +func WrapRegexp(str string) *Regexp { + pattern, err := regexp.Compile(str) + if err != nil { + panic(px.Error(px.InvalidRegexp, issue.H{`pattern`: str, `detail`: err.Error()})) + } + return &Regexp{pattern} +} + +func WrapRegexp2(pattern *regexp.Regexp) *Regexp { + return &Regexp{pattern} +} + +func (r *Regexp) Equals(o interface{}, g px.Guard) bool { + if ov, ok := o.(*Regexp); ok { + return r.pattern.String() == ov.pattern.String() + } + return false +} + +func (r *Regexp) Match(s string) []string { + return r.pattern.FindStringSubmatch(s) +} + +func (r *Regexp) Regexp() *regexp.Regexp { + return r.pattern +} + +func (r *Regexp) PatternString() string { + return r.pattern.String() +} + +func (r *Regexp) Reflect(c px.Context) reflect.Value { + return reflect.ValueOf(r.pattern) +} + +func (r *Regexp) ReflectTo(c px.Context, dest reflect.Value) { + rv := r.Reflect(c) + if rv.Kind() == reflect.Ptr && dest.Kind() != reflect.Ptr { + rv = rv.Elem() + } + if !rv.Type().AssignableTo(dest.Type()) { + panic(px.Error(px.AttemptToSetWrongKind, issue.H{`expected`: rv.Type().String(), `actual`: dest.Type().String()})) + } + dest.Set(rv) +} + +func (r *Regexp) String() string { + return px.ToString2(r, None) +} + +func (r *Regexp) ToKey(b *bytes.Buffer) { + b.WriteByte(1) + b.WriteByte(HkRegexp) + b.Write([]byte(r.pattern.String())) +} + +func (r *Regexp) ToString(b io.Writer, s px.FormatContext, g px.RDetect) { + utils.RegexpQuote(b, r.pattern.String()) +} + +func (r *Regexp) PType() px.Type { + rt := RegexpType(*r) + return &rt +} diff --git a/vendor/github.com/lyraproj/pcore/types/resolver.go b/vendor/github.com/lyraproj/pcore/types/resolver.go new file mode 100644 index 0000000..b22fdd9 --- /dev/null +++ b/vendor/github.com/lyraproj/pcore/types/resolver.go @@ -0,0 +1,44 @@ +package types + +import ( + "github.com/lyraproj/issue/issue" + "github.com/lyraproj/pcore/px" +) + +var coreTypes map[string]px.Type + +func Resolve(c px.Context, tn string) px.Type { + pt := coreTypes[tn] + if pt != nil { + return pt + } + return loadType(c, tn) +} + +func ResolveWithParams(c px.Context, name string, args []px.Value) px.Type { + t := Resolve(c, name) + if oo, ok := t.(px.ObjectType); ok && oo.IsParameterized() { + return NewObjectTypeExtension(c, oo, args) + } + if pt, ok := t.(px.ParameterizedType); ok { + mt := pt.MetaType().(*objectType) + if mt.creators != nil { + if posCtor := mt.creators[0]; posCtor != nil { + return posCtor(c, args).(px.Type) + } + } + } + panic(px.Error(px.NotParameterizedType, issue.H{`type`: name})) +} + +func loadType(c px.Context, name string) px.Type { + if c == nil { + return nil + } + tn := newTypedName2(px.NsType, name, c.Loader().NameAuthority()) + found, ok := px.Load(c, tn) + if ok { + return found.(px.Type) + } + return NewTypeReferenceType(name) +} diff --git a/vendor/github.com/lyraproj/pcore/types/runtimetype.go b/vendor/github.com/lyraproj/pcore/types/runtimetype.go new file mode 100644 index 0000000..2a49caa --- /dev/null +++ b/vendor/github.com/lyraproj/pcore/types/runtimetype.go @@ -0,0 +1,300 @@ +package types + +import ( + "fmt" + "io" + "reflect" + + "github.com/lyraproj/issue/issue" + "github.com/lyraproj/pcore/px" + "github.com/lyraproj/pcore/utils" +) + +type ( + RuntimeType struct { + name string + runtime string + pattern *RegexpType + goType reflect.Type + } + + // RuntimeValue Captures values of all types unknown to Puppet + RuntimeValue struct { + puppetType *RuntimeType + value interface{} + } +) + +var runtimeTypeDefault = &RuntimeType{``, ``, nil, nil} + +var RuntimeMetaType px.ObjectType + +func init() { + RuntimeMetaType = newObjectType(`Pcore::RuntimeType`, + `Pcore::AnyType { + attributes => { + runtime => { + type => Optional[String[1]], + value => undef + }, + name_or_pattern => { + type => Variant[Undef,String[1],Tuple[Regexp,String[1]]], + value => undef + } + } +}`, func(ctx px.Context, args []px.Value) px.Value { + return newRuntimeType2(args...) + }) +} + +func DefaultRuntimeType() *RuntimeType { + return runtimeTypeDefault +} + +func NewRuntimeType(runtimeName string, name string, pattern *RegexpType) *RuntimeType { + if runtimeName == `` && name == `` && pattern == nil { + return DefaultRuntimeType() + } + if runtimeName == `go` && name != `` { + panic(px.Error(px.GoRuntimeTypeWithoutGoType, issue.H{`name`: name})) + } + return &RuntimeType{runtime: runtimeName, name: name, pattern: pattern} +} + +func newRuntimeType2(args ...px.Value) *RuntimeType { + top := len(args) + if top > 3 { + panic(illegalArgumentCount(`Runtime[]`, `0 - 3`, len(args))) + } + if top == 0 { + return DefaultRuntimeType() + } + + runtimeName, ok := args[0].(stringValue) + if !ok { + panic(illegalArgumentType(`Runtime[]`, 0, `String`, args[0])) + } + + var pattern *RegexpType + var name px.Value + if top == 1 { + name = px.EmptyString + } else { + var rv stringValue + rv, ok = args[1].(stringValue) + if !ok { + panic(illegalArgumentType(`Runtime[]`, 1, `String`, args[1])) + } + name = rv + + if top == 2 { + pattern = nil + } else { + pattern, ok = args[2].(*RegexpType) + if !ok { + panic(illegalArgumentType(`Runtime[]`, 2, `Type[Regexp]`, args[2])) + } + } + } + return NewRuntimeType(string(runtimeName), name.String(), pattern) +} + +// NewGoRuntimeType creates a Go runtime by extracting the element type of the given value. +// +// A Go interface must be registered by passing a Pointer to a nil of the interface so to +// create an Runtime for the interface Foo, use NewGoRuntimeType((*Foo)(nil)) +func NewGoRuntimeType(value interface{}) *RuntimeType { + goType := reflect.TypeOf(value) + if goType.Kind() == reflect.Ptr && goType.Elem().Kind() == reflect.Interface { + goType = goType.Elem() + } + return &RuntimeType{runtime: `go`, name: goType.String(), goType: goType} +} + +func (t *RuntimeType) Accept(v px.Visitor, g px.Guard) { + v(t) +} + +func (t *RuntimeType) Default() px.Type { + return runtimeTypeDefault +} + +func (t *RuntimeType) Equals(o interface{}, g px.Guard) bool { + if ot, ok := o.(*RuntimeType); ok && t.runtime == ot.runtime && t.name == ot.name { + if t.pattern == nil { + return ot.pattern == nil + } + return t.pattern.Equals(ot.pattern, g) + } + return false +} + +func (t *RuntimeType) Generic() px.Type { + return runtimeTypeDefault +} + +func (t *RuntimeType) Get(key string) (px.Value, bool) { + switch key { + case `runtime`: + if t.runtime == `` { + return undef, true + } + return stringValue(t.runtime), true + case `name_or_pattern`: + if t.pattern != nil { + return t.pattern, true + } + if t.name != `` { + return stringValue(t.name), true + } + return undef, true + default: + return nil, false + } +} + +func (t *RuntimeType) IsAssignable(o px.Type, g px.Guard) bool { + if rt, ok := o.(*RuntimeType); ok { + if t.goType != nil && rt.goType != nil { + return rt.goType.AssignableTo(t.goType) + } + if t.runtime == `` { + return true + } + if t.runtime != rt.runtime { + return false + } + if t.name == `` { + return true + } + if t.pattern != nil { + return t.name == rt.name && rt.pattern != nil && t.pattern.pattern.String() == rt.pattern.pattern.String() + } + if t.name == rt.name { + return true + } + } + return false +} + +func (t *RuntimeType) IsInstance(o px.Value, g px.Guard) bool { + rt, ok := o.(*RuntimeValue) + if !ok { + return false + } + if t.goType != nil { + return reflect.ValueOf(rt.Interface()).Type().AssignableTo(t.goType) + } + if t.runtime == `` { + return true + } + if o == nil || t.runtime != `go` || t.pattern != nil { + return false + } + if t.name == `` { + return true + } + return t.name == fmt.Sprintf(`%T`, rt.Interface()) +} + +func (t *RuntimeType) MetaType() px.ObjectType { + return RuntimeMetaType +} + +func (t *RuntimeType) Name() string { + return `Runtime` +} + +func (t *RuntimeType) Parameters() []px.Value { + if t.runtime == `` { + return px.EmptyValues + } + ps := make([]px.Value, 0, 2) + ps = append(ps, stringValue(t.runtime)) + if t.name != `` { + ps = append(ps, stringValue(t.name)) + } + if t.pattern != nil { + ps = append(ps, t.pattern) + } + return ps +} + +func (t *RuntimeType) ReflectType(c px.Context) (reflect.Type, bool) { + return t.goType, t.goType != nil +} + +func (t *RuntimeType) CanSerializeAsString() bool { + return true +} + +func (t *RuntimeType) SerializationString() string { + return t.String() +} + +func (t *RuntimeType) String() string { + return px.ToString2(t, None) +} + +func (t *RuntimeType) ToString(b io.Writer, s px.FormatContext, g px.RDetect) { + TypeToString(t, b, s, g) +} + +func (t *RuntimeType) PType() px.Type { + return &TypeType{t} +} + +func WrapRuntime(value interface{}) *RuntimeValue { + goType := reflect.TypeOf(value) + return &RuntimeValue{&RuntimeType{runtime: `go`, name: goType.String(), goType: goType}, value} +} + +func (rv *RuntimeValue) Equals(o interface{}, g px.Guard) bool { + if ov, ok := o.(*RuntimeValue); ok { + var re px.Equality + if re, ok = rv.value.(px.Equality); ok { + var oe px.Equality + if oe, ok = ov.value.(px.Equality); ok { + return re.Equals(oe, g) + } + return false + } + return reflect.DeepEqual(rv.value, ov.value) + } + return false +} + +func (rv *RuntimeValue) Reflect(c px.Context) reflect.Value { + gt := rv.puppetType.goType + if gt == nil { + panic(px.Error(px.InvalidSourceForGet, issue.H{`type`: rv.PType().String()})) + } + return reflect.ValueOf(rv.value) +} + +func (rv *RuntimeValue) ReflectTo(c px.Context, dest reflect.Value) { + gt := rv.puppetType.goType + if gt == nil { + panic(px.Error(px.InvalidSourceForGet, issue.H{`type`: rv.PType().String()})) + } + if !gt.AssignableTo(dest.Type()) { + panic(px.Error(px.AttemptToSetWrongKind, issue.H{`expected`: gt.String(), `actual`: dest.Type().String()})) + } + dest.Set(reflect.ValueOf(rv.value)) +} + +func (rv *RuntimeValue) String() string { + return px.ToString2(rv, None) +} + +func (rv *RuntimeValue) ToString(b io.Writer, s px.FormatContext, g px.RDetect) { + utils.PuppetQuote(b, fmt.Sprintf(`%v`, rv.value)) +} + +func (rv *RuntimeValue) PType() px.Type { + return rv.puppetType +} + +func (rv *RuntimeValue) Interface() interface{} { + return rv.value +} diff --git a/vendor/github.com/lyraproj/pcore/types/scalardatatype.go b/vendor/github.com/lyraproj/pcore/types/scalardatatype.go new file mode 100644 index 0000000..1edb113 --- /dev/null +++ b/vendor/github.com/lyraproj/pcore/types/scalardatatype.go @@ -0,0 +1,81 @@ +package types + +import ( + "io" + + "github.com/lyraproj/pcore/px" +) + +type ScalarDataType struct{} + +var ScalarDataMetaType px.ObjectType + +func init() { + ScalarDataMetaType = newObjectType(`Pcore::ScalarDataType`, `Pcore::ScalarType{}`, + func(ctx px.Context, args []px.Value) px.Value { + return DefaultScalarDataType() + }) +} + +func DefaultScalarDataType() *ScalarDataType { + return scalarDataTypeDefault +} + +func (t *ScalarDataType) Accept(v px.Visitor, g px.Guard) { + v(t) +} + +func (t *ScalarDataType) Equals(o interface{}, g px.Guard) bool { + _, ok := o.(*ScalarDataType) + return ok +} + +func (t *ScalarDataType) IsAssignable(o px.Type, g px.Guard) bool { + switch o.(type) { + case *ScalarDataType: + return true + default: + return GuardedIsAssignable(stringTypeDefault, o, g) || + GuardedIsAssignable(integerTypeDefault, o, g) || + GuardedIsAssignable(booleanTypeDefault, o, g) || + GuardedIsAssignable(floatTypeDefault, o, g) + } +} + +func (t *ScalarDataType) IsInstance(o px.Value, g px.Guard) bool { + switch o.(type) { + case booleanValue, floatValue, integerValue, stringValue: + return true + } + return false +} + +func (t *ScalarDataType) MetaType() px.ObjectType { + return ScalarDataMetaType +} + +func (t *ScalarDataType) Name() string { + return `ScalarData` +} + +func (t *ScalarDataType) CanSerializeAsString() bool { + return true +} + +func (t *ScalarDataType) SerializationString() string { + return t.String() +} + +func (t *ScalarDataType) String() string { + return `ScalarData` +} + +func (t *ScalarDataType) ToString(b io.Writer, s px.FormatContext, g px.RDetect) { + TypeToString(t, b, s, g) +} + +func (t *ScalarDataType) PType() px.Type { + return &TypeType{t} +} + +var scalarDataTypeDefault = &ScalarDataType{} diff --git a/vendor/github.com/lyraproj/pcore/types/scalartype.go b/vendor/github.com/lyraproj/pcore/types/scalartype.go new file mode 100644 index 0000000..6e8d231 --- /dev/null +++ b/vendor/github.com/lyraproj/pcore/types/scalartype.go @@ -0,0 +1,81 @@ +package types + +import ( + "io" + + "github.com/lyraproj/pcore/px" +) + +type ScalarType struct{} + +var ScalarMetaType px.ObjectType + +func init() { + ScalarMetaType = newObjectType(`Pcore::ScalarType`, `Pcore::AnyType{}`, + func(ctx px.Context, args []px.Value) px.Value { + return DefaultScalarType() + }) +} + +func DefaultScalarType() *ScalarType { + return scalarTypeDefault +} + +func (t *ScalarType) Accept(v px.Visitor, g px.Guard) { + v(t) +} + +func (t *ScalarType) Equals(o interface{}, g px.Guard) bool { + _, ok := o.(*ScalarType) + return ok +} + +func (t *ScalarType) IsAssignable(o px.Type, g px.Guard) bool { + switch o.(type) { + case *ScalarType, *ScalarDataType: + return true + default: + return GuardedIsAssignable(stringTypeDefault, o, g) || + GuardedIsAssignable(numericTypeDefault, o, g) || + GuardedIsAssignable(booleanTypeDefault, o, g) || + GuardedIsAssignable(regexpTypeDefault, o, g) + } +} + +func (t *ScalarType) IsInstance(o px.Value, g px.Guard) bool { + switch o.(type) { + case stringValue, integerValue, floatValue, booleanValue, Timespan, *Timestamp, *SemVer, *Regexp: + return true + } + return false +} + +func (t *ScalarType) MetaType() px.ObjectType { + return ScalarMetaType +} + +func (t *ScalarType) Name() string { + return `Scalar` +} + +func (t *ScalarType) CanSerializeAsString() bool { + return true +} + +func (t *ScalarType) SerializationString() string { + return t.String() +} + +func (t *ScalarType) String() string { + return `Scalar` +} + +func (t *ScalarType) ToString(b io.Writer, s px.FormatContext, g px.RDetect) { + TypeToString(t, b, s, g) +} + +func (t *ScalarType) PType() px.Type { + return &TypeType{t} +} + +var scalarTypeDefault = &ScalarType{} diff --git a/vendor/github.com/lyraproj/pcore/types/semverrangetype.go b/vendor/github.com/lyraproj/pcore/types/semverrangetype.go new file mode 100644 index 0000000..b6978d6 --- /dev/null +++ b/vendor/github.com/lyraproj/pcore/types/semverrangetype.go @@ -0,0 +1,225 @@ +package types + +import ( + "bytes" + "io" + + "reflect" + + "github.com/lyraproj/issue/issue" + "github.com/lyraproj/pcore/px" + "github.com/lyraproj/pcore/utils" + "github.com/lyraproj/semver/semver" +) + +type ( + SemVerRangeType struct{} + + SemVerRange struct { + rng semver.VersionRange + } +) + +var semVerRangeTypeDefault = &SemVerRangeType{} + +var SemVerRangeMetaType px.ObjectType + +func init() { + SemVerRangeMetaType = newObjectType(`Pcore::SemVerRangeType`, `Pcore::AnyType {}`, + func(ctx px.Context, args []px.Value) px.Value { + return DefaultSemVerRangeType() + }) + + newGoConstructor2(`SemVerRange`, + func(t px.LocalTypes) { + t.Type(`SemVerRangeString`, `String[1]`) + t.Type(`SemVerRangeHash`, `Struct[min=>Variant[Default,SemVer],Optional[max]=>Variant[Default,SemVer],Optional[exclude_max]=>Boolean]`) + }, + + func(d px.Dispatch) { + d.Param(`SemVerRangeString`) + d.Function(func(c px.Context, args []px.Value) px.Value { + v, err := semver.ParseVersionRange(args[0].String()) + if err != nil { + panic(illegalArgument(`SemVerRange`, 0, err.Error())) + } + return WrapSemVerRange(v) + }) + }, + + func(d px.Dispatch) { + d.Param(`Variant[Default,SemVer]`) + d.Param(`Variant[Default,SemVer]`) + d.OptionalParam(`Boolean`) + d.Function(func(c px.Context, args []px.Value) px.Value { + var start semver.Version + if _, ok := args[0].(*DefaultValue); ok { + start = semver.Min + } else { + start = args[0].(*SemVer).Version() + } + var end semver.Version + if _, ok := args[1].(*DefaultValue); ok { + end = semver.Max + } else { + end = args[1].(*SemVer).Version() + } + excludeEnd := false + if len(args) > 2 { + excludeEnd = args[2].(booleanValue).Bool() + } + return WrapSemVerRange(semver.FromVersions(start, false, end, excludeEnd)) + }) + }, + + func(d px.Dispatch) { + d.Param(`SemVerRangeHash`) + d.Function(func(c px.Context, args []px.Value) px.Value { + hash := args[0].(*Hash) + start := hash.Get5(`min`, nil).(*SemVer).Version() + + var end semver.Version + ev := hash.Get5(`max`, nil) + if ev == nil { + end = semver.Max + } else { + end = ev.(*SemVer).Version() + } + + excludeEnd := false + ev = hash.Get5(`excludeMax`, nil) + if ev != nil { + excludeEnd = ev.(booleanValue).Bool() + } + return WrapSemVerRange(semver.FromVersions(start, false, end, excludeEnd)) + }) + }, + ) +} + +func DefaultSemVerRangeType() *SemVerRangeType { + return semVerRangeTypeDefault +} + +func (t *SemVerRangeType) Accept(v px.Visitor, g px.Guard) { + v(t) +} + +func (t *SemVerRangeType) Equals(o interface{}, g px.Guard) bool { + _, ok := o.(*SemVerRangeType) + return ok +} + +func (t *SemVerRangeType) MetaType() px.ObjectType { + return SemVerRangeMetaType +} + +func (t *SemVerRangeType) Name() string { + return `SemVerRange` +} + +func (t *SemVerRangeType) CanSerializeAsString() bool { + return true +} + +func (t *SemVerRangeType) SerializationString() string { + return t.String() +} + +func (t *SemVerRangeType) String() string { + return `SemVerRange` +} + +func (t *SemVerRangeType) IsAssignable(o px.Type, g px.Guard) bool { + _, ok := o.(*SemVerRangeType) + return ok +} + +func (t *SemVerRangeType) IsInstance(o px.Value, g px.Guard) bool { + _, ok := o.(*SemVerRange) + return ok +} + +func (t *SemVerRangeType) ReflectType(c px.Context) (reflect.Type, bool) { + return reflect.TypeOf(semver.MatchAll), true +} + +func (t *SemVerRangeType) ToString(b io.Writer, s px.FormatContext, g px.RDetect) { + TypeToString(t, b, s, g) +} + +func (t *SemVerRangeType) PType() px.Type { + return &TypeType{t} +} + +func WrapSemVerRange(val semver.VersionRange) *SemVerRange { + return &SemVerRange{val} +} + +func (bv *SemVerRange) VersionRange() semver.VersionRange { + return bv.rng +} + +func (bv *SemVerRange) Equals(o interface{}, g px.Guard) bool { + if ov, ok := o.(*SemVerRange); ok { + return bv.rng.Equals(ov.rng) + } + return false +} + +func (bv *SemVerRange) Reflect(c px.Context) reflect.Value { + return reflect.ValueOf(bv.rng) +} + +func (bv *SemVerRange) ReflectTo(c px.Context, dest reflect.Value) { + rv := bv.Reflect(c) + if !rv.Type().AssignableTo(dest.Type()) { + panic(px.Error(px.AttemptToSetWrongKind, issue.H{`expected`: rv.Type().String(), `actual`: dest.Type().String()})) + } + dest.Set(rv) +} + +func (bv *SemVerRange) CanSerializeAsString() bool { + return true +} + +func (bv *SemVerRange) SerializationString() string { + return bv.String() +} + +func (bv *SemVerRange) String() string { + return px.ToString2(bv, None) +} + +func (bv *SemVerRange) ToString(b io.Writer, s px.FormatContext, g px.RDetect) { + f := px.GetFormat(s.FormatMap(), bv.PType()) + vr := bv.rng + switch f.FormatChar() { + case 'p': + utils.WriteString(b, `SemVerRange(`) + if f.IsAlt() { + utils.PuppetQuote(b, vr.NormalizedString()) + } else { + utils.PuppetQuote(b, vr.String()) + } + utils.WriteString(b, `)`) + case 's': + if f.IsAlt() { + vr.ToNormalizedString(b) + } else { + vr.ToString(b) + } + default: + panic(s.UnsupportedFormat(bv.PType(), `ps`, f)) + } +} + +func (bv *SemVerRange) ToKey(b *bytes.Buffer) { + b.WriteByte(1) + b.WriteByte(HkVersionRange) + bv.rng.ToString(b) +} + +func (bv *SemVerRange) PType() px.Type { + return DefaultSemVerRangeType() +} diff --git a/vendor/github.com/lyraproj/pcore/types/semvertype.go b/vendor/github.com/lyraproj/pcore/types/semvertype.go new file mode 100644 index 0000000..a082ae4 --- /dev/null +++ b/vendor/github.com/lyraproj/pcore/types/semvertype.go @@ -0,0 +1,305 @@ +package types + +import ( + "bytes" + "io" + + "reflect" + + "github.com/lyraproj/issue/issue" + "github.com/lyraproj/pcore/px" + "github.com/lyraproj/pcore/utils" + "github.com/lyraproj/semver/semver" +) + +type ( + SemVerType struct { + vRange semver.VersionRange + } + + SemVer SemVerType +) + +var semVerTypeDefault = &SemVerType{semver.MatchAll} + +var SemVerMetaType px.ObjectType + +func init() { + SemVerMetaType = newObjectType(`Pcore::SemVerType`, `Pcore::ScalarType { + attributes => { + ranges => { + type => Array[Variant[SemVerRange,String[1]]], + value => [] + } + } +}`, func(ctx px.Context, args []px.Value) px.Value { + return newSemVerType2(args...) + }) + + newGoConstructor2(`SemVer`, + func(t px.LocalTypes) { + t.Type(`PositiveInteger`, `Integer[0,default]`) + t.Type(`SemVerQualifier`, `Pattern[/\A[0-9A-Za-z-]+(?:\.[0-9A-Za-z-]+)*\z/]`) + t.Type(`SemVerString`, `String[1]`) + t.Type(`SemVerHash`, `Struct[major=>PositiveInteger,minor=>PositiveInteger,patch=>PositiveInteger,Optional[prerelease]=>SemVerQualifier,Optional[build]=>SemVerQualifier]`) + }, + + func(d px.Dispatch) { + d.Param(`SemVerString`) + d.Function(func(c px.Context, args []px.Value) px.Value { + v, err := semver.ParseVersion(args[0].String()) + if err != nil { + panic(illegalArgument(`SemVer`, 0, err.Error())) + } + return WrapSemVer(v) + }) + }, + + func(d px.Dispatch) { + d.Param(`PositiveInteger`) + d.Param(`PositiveInteger`) + d.Param(`PositiveInteger`) + d.OptionalParam(`SemVerQualifier`) + d.OptionalParam(`SemVerQualifier`) + d.Function(func(c px.Context, args []px.Value) px.Value { + argc := len(args) + major := args[0].(integerValue).Int() + minor := args[1].(integerValue).Int() + patch := args[2].(integerValue).Int() + preRelease := `` + build := `` + if argc > 3 { + preRelease = args[3].String() + if argc > 4 { + build = args[4].String() + } + } + v, err := semver.NewVersion3(int(major), int(minor), int(patch), preRelease, build) + if err != nil { + panic(illegalArguments(`SemVer`, err.Error())) + } + return WrapSemVer(v) + }) + }, + + func(d px.Dispatch) { + d.Param(`SemVerHash`) + d.Function(func(c px.Context, args []px.Value) px.Value { + hash := args[0].(*Hash) + major := hash.Get5(`major`, ZERO).(integerValue).Int() + minor := hash.Get5(`minor`, ZERO).(integerValue).Int() + patch := hash.Get5(`patch`, ZERO).(integerValue).Int() + preRelease := `` + build := `` + ev := hash.Get5(`prerelease`, nil) + if ev != nil { + preRelease = ev.String() + } + ev = hash.Get5(`build`, nil) + if ev != nil { + build = ev.String() + } + v, err := semver.NewVersion3(int(major), int(minor), int(patch), preRelease, build) + if err != nil { + panic(illegalArguments(`SemVer`, err.Error())) + } + return WrapSemVer(v) + }) + }, + ) +} + +func DefaultSemVerType() *SemVerType { + return semVerTypeDefault +} + +func NewSemVerType(vr semver.VersionRange) *SemVerType { + if vr.Equals(semver.MatchAll) { + return DefaultSemVerType() + } + return &SemVerType{vr} +} + +func newSemVerType2(limits ...px.Value) *SemVerType { + return newSemVerType3(WrapValues(limits)) +} + +func newSemVerType3(limits px.List) *SemVerType { + argc := limits.Len() + if argc == 0 { + return DefaultSemVerType() + } + + if argc == 1 { + if ranges, ok := limits.At(0).(px.List); ok { + return newSemVerType3(ranges) + } + } + + var finalRange semver.VersionRange + limits.EachWithIndex(func(arg px.Value, idx int) { + var rng semver.VersionRange + str, ok := arg.(stringValue) + if ok { + var err error + rng, err = semver.ParseVersionRange(string(str)) + if err != nil { + panic(illegalArgument(`SemVer[]`, idx, err.Error())) + } + } else { + rv, ok := arg.(*SemVerRange) + if !ok { + panic(illegalArgumentType(`SemVer[]`, idx, `Variant[String,SemVerRange]`, arg)) + } + rng = rv.VersionRange() + } + if finalRange == nil { + finalRange = rng + } else { + finalRange = finalRange.Merge(rng) + } + }) + return NewSemVerType(finalRange) +} + +func (t *SemVerType) Accept(v px.Visitor, g px.Guard) { + v(t) +} + +func (t *SemVerType) Default() px.Type { + return semVerTypeDefault +} + +func (t *SemVerType) Equals(o interface{}, g px.Guard) bool { + _, ok := o.(*SemVerType) + return ok +} + +func (t *SemVerType) Get(key string) (px.Value, bool) { + switch key { + case `ranges`: + return WrapValues(t.Parameters()), true + default: + return nil, false + } +} + +func (t *SemVerType) MetaType() px.ObjectType { + return SemVerMetaType +} + +func (t *SemVerType) Name() string { + return `SemVer` +} + +func (t *SemVerType) ReflectType(c px.Context) (reflect.Type, bool) { + return reflect.TypeOf(semver.Max), true +} + +func (t *SemVerType) CanSerializeAsString() bool { + return true +} + +func (t *SemVerType) SerializationString() string { + return t.String() +} + +func (t *SemVerType) String() string { + return px.ToString2(t, None) +} + +func (t *SemVerType) IsAssignable(o px.Type, g px.Guard) bool { + if vt, ok := o.(*SemVerType); ok { + return vt.vRange.IsAsRestrictiveAs(t.vRange) + } + return false +} + +func (t *SemVerType) IsInstance(o px.Value, g px.Guard) bool { + if v, ok := o.(*SemVer); ok { + return t.vRange.Includes(v.Version()) + } + return false +} + +func (t *SemVerType) Parameters() []px.Value { + if t.vRange.Equals(semver.MatchAll) { + return px.EmptyValues + } + return []px.Value{stringValue(t.vRange.String())} +} + +func (t *SemVerType) ToString(b io.Writer, s px.FormatContext, g px.RDetect) { + TypeToString(t, b, s, g) +} + +func (t *SemVerType) PType() px.Type { + return &TypeType{t} +} + +func WrapSemVer(val semver.Version) *SemVer { + return (*SemVer)(NewSemVerType(semver.ExactVersionRange(val))) +} + +func (v *SemVer) Version() semver.Version { + return v.vRange.StartVersion() +} + +func (v *SemVer) Equals(o interface{}, g px.Guard) bool { + if ov, ok := o.(*SemVer); ok { + return v.Version().Equals(ov.Version()) + } + return false +} + +func (v *SemVer) Reflect(c px.Context) reflect.Value { + return reflect.ValueOf(v.Version()) +} + +func (v *SemVer) ReflectTo(c px.Context, dest reflect.Value) { + rv := v.Reflect(c) + if !rv.Type().AssignableTo(dest.Type()) { + panic(px.Error(px.AttemptToSetWrongKind, issue.H{`expected`: rv.Type().String(), `actual`: dest.Type().String()})) + } + dest.Set(rv) +} + +func (v *SemVer) CanSerializeAsString() bool { + return true +} + +func (v *SemVer) SerializationString() string { + return v.String() +} + +func (v *SemVer) String() string { + if v.vRange == nil || v.vRange.StartVersion() == nil { + return `0.0.0-` + } + return v.Version().String() +} + +func (v *SemVer) ToKey(b *bytes.Buffer) { + b.WriteByte(1) + b.WriteByte(HkVersion) + v.Version().ToString(b) +} + +func (v *SemVer) ToString(b io.Writer, s px.FormatContext, g px.RDetect) { + f := px.GetFormat(s.FormatMap(), v.PType()) + val := v.Version().String() + switch f.FormatChar() { + case 's': + f.ApplyStringFlags(b, val, f.IsAlt()) + case 'p': + utils.WriteString(b, `SemVer(`) + utils.PuppetQuote(b, val) + utils.WriteByte(b, ')') + default: + panic(s.UnsupportedFormat(v.PType(), `sp`, f)) + } +} + +func (v *SemVer) PType() px.Type { + return (*SemVerType)(v) +} diff --git a/vendor/github.com/lyraproj/pcore/types/sensitivetype.go b/vendor/github.com/lyraproj/pcore/types/sensitivetype.go new file mode 100644 index 0000000..a2bd76c --- /dev/null +++ b/vendor/github.com/lyraproj/pcore/types/sensitivetype.go @@ -0,0 +1,172 @@ +package types + +import ( + "io" + + "github.com/lyraproj/pcore/utils" + + "github.com/lyraproj/pcore/px" +) + +var sensitiveTypeDefault = &SensitiveType{typ: anyTypeDefault} + +type ( + SensitiveType struct { + typ px.Type + } + + Sensitive struct { + value px.Value + } +) + +var SensitiveMetaType px.ObjectType + +func init() { + SensitiveMetaType = newObjectType(`Pcore::SensitiveType`, + `Pcore::AnyType { + attributes => { + type => { + type => Optional[Type], + value => Any + }, + } +}`, func(ctx px.Context, args []px.Value) px.Value { + return newSensitiveType2(args...) + }) + + newGoConstructor(`Sensitive`, + func(d px.Dispatch) { + d.Param(`Any`) + d.Function(func(c px.Context, args []px.Value) px.Value { + return WrapSensitive(args[0]) + }) + }) +} + +func DefaultSensitiveType() *SensitiveType { + return sensitiveTypeDefault +} + +func NewSensitiveType(containedType px.Type) *SensitiveType { + if containedType == nil || containedType == anyTypeDefault { + return DefaultSensitiveType() + } + return &SensitiveType{containedType} +} + +func newSensitiveType2(args ...px.Value) *SensitiveType { + switch len(args) { + case 0: + return DefaultSensitiveType() + case 1: + if containedType, ok := args[0].(px.Type); ok { + return NewSensitiveType(containedType) + } + panic(illegalArgumentType(`Sensitive[]`, 0, `Type`, args[0])) + default: + panic(illegalArgumentCount(`Sensitive[]`, `0 or 1`, len(args))) + } +} + +func (t *SensitiveType) ContainedType() px.Type { + return t.typ +} + +func (t *SensitiveType) Accept(v px.Visitor, g px.Guard) { + v(t) + t.typ.Accept(v, g) +} + +func (t *SensitiveType) Default() px.Type { + return DefaultSensitiveType() +} + +func (t *SensitiveType) Equals(o interface{}, g px.Guard) bool { + if ot, ok := o.(*SensitiveType); ok { + return t.typ.Equals(ot.typ, g) + } + return false +} + +func (t *SensitiveType) Generic() px.Type { + return NewSensitiveType(px.GenericType(t.typ)) +} + +func (t *SensitiveType) IsAssignable(o px.Type, g px.Guard) bool { + if ot, ok := o.(*SensitiveType); ok { + return GuardedIsAssignable(t.typ, ot.typ, g) + } + return false +} + +func (t *SensitiveType) IsInstance(o px.Value, g px.Guard) bool { + if sv, ok := o.(*Sensitive); ok { + return GuardedIsInstance(t.typ, sv.Unwrap(), g) + } + return false +} + +func (t *SensitiveType) MetaType() px.ObjectType { + return SensitiveMetaType +} + +func (t *SensitiveType) Name() string { + return `Sensitive` +} + +func (t *SensitiveType) Parameters() []px.Value { + if t.typ == DefaultAnyType() { + return px.EmptyValues + } + return []px.Value{t.typ} +} + +func (t *SensitiveType) Resolve(c px.Context) px.Type { + t.typ = resolve(c, t.typ) + return t +} + +func (t *SensitiveType) CanSerializeAsString() bool { + return canSerializeAsString(t.typ) +} + +func (t *SensitiveType) SerializationString() string { + return t.String() +} + +func (t *SensitiveType) String() string { + return px.ToString2(t, None) +} + +func (t *SensitiveType) PType() px.Type { + return &TypeType{t} +} + +func (t *SensitiveType) ToString(b io.Writer, s px.FormatContext, g px.RDetect) { + TypeToString(t, b, s, g) +} + +func WrapSensitive(val px.Value) *Sensitive { + return &Sensitive{val} +} + +func (s *Sensitive) Equals(o interface{}, g px.Guard) bool { + return false +} + +func (s *Sensitive) String() string { + return px.ToString2(s, None) +} + +func (s *Sensitive) ToString(b io.Writer, f px.FormatContext, g px.RDetect) { + utils.WriteString(b, `Sensitive [value redacted]`) +} + +func (s *Sensitive) PType() px.Type { + return NewSensitiveType(s.Unwrap().PType()) +} + +func (s *Sensitive) Unwrap() px.Value { + return s.value +} diff --git a/vendor/github.com/lyraproj/pcore/types/stringtype.go b/vendor/github.com/lyraproj/pcore/types/stringtype.go new file mode 100644 index 0000000..b6d6fe5 --- /dev/null +++ b/vendor/github.com/lyraproj/pcore/types/stringtype.go @@ -0,0 +1,607 @@ +package types + +import ( + "bytes" + "fmt" + "io" + "math" + "strings" + + "reflect" + "regexp" + + "github.com/lyraproj/pcore/px" + "github.com/lyraproj/pcore/utils" +) + +type ( + // String that is unconstrained + stringType struct{} + + // String constrained to content + vcStringType struct { + stringType + value string + } + + // String constrained by length of string + scStringType struct { + stringType + size *IntegerType + } + + // stringValue represents string as a pcore.Value + stringValue string +) + +var stringTypeDefault = &stringType{} +var stringTypeNotEmpty = &scStringType{size: NewIntegerType(1, math.MaxInt64)} + +var StringMetaType px.ObjectType + +func init() { + StringMetaType = newObjectType(`Pcore::StringType`, `Pcore::ScalarDataType { + attributes => { + size_type_or_value => { + type => Variant[Undef,String,Type[Integer]], + value => undef + }, + } +}`, func(ctx px.Context, args []px.Value) px.Value { + return newStringType2(args...) + }) + + newGoConstructor2(`String`, + func(t px.LocalTypes) { + t.Type2(`Format`, NewPatternType([]*RegexpType{NewRegexpTypeR(px.FormatPattern)})) + t.Type(`ContainerFormat`, `Struct[{ + Optional[format] => Format, + Optional[separator] => String, + Optional[separator2] => String, + Optional[string_formats] => Hash[Type, Format] + }]`) + t.Type(`TypeMap`, `Hash[Type, Variant[Format, ContainerFormat]]`) + t.Type(`Formats`, `Variant[Default, String[1], TypeMap]`) + }, + + func(d px.Dispatch) { + d.Param(`Any`) + d.OptionalParam(`Formats`) + d.Function(func(c px.Context, args []px.Value) px.Value { + f := None + if len(args) > 1 { + var err error + f, err = px.NewFormatContext3(args[0], args[1]) + if err != nil { + panic(illegalArgument(`String`, 1, err.Error())) + } + } + return stringValue(px.ToString2(args[0], f)) + }) + }, + ) +} + +func DefaultStringType() *stringType { + return stringTypeDefault +} + +func NewStringType(rng *IntegerType, s string) px.Type { + if s == `` { + if rng == nil || *rng == *IntegerTypePositive { + return DefaultStringType() + } + return &scStringType{size: rng} + } + return &vcStringType{value: s} +} + +func newStringType2(args ...px.Value) px.Type { + var rng *IntegerType + var ok bool + switch len(args) { + case 0: + return DefaultStringType() + case 1: + var value stringValue + if value, ok = args[0].(stringValue); ok { + return NewStringType(nil, string(value)) + } + rng, ok = args[0].(*IntegerType) + if !ok { + var min int64 + min, ok = toInt(args[0]) + if !ok { + panic(illegalArgumentType(`String[]`, 0, `String, Integer or Type[Integer]`, args[0])) + } + rng = NewIntegerType(min, math.MaxInt64) + } + case 2: + var min, max int64 + min, ok = toInt(args[0]) + if !ok { + panic(illegalArgumentType(`String[]`, 0, `Integer`, args[0])) + } + max, ok = toInt(args[1]) + if !ok { + panic(illegalArgumentType(`String[]`, 1, `Integer`, args[1])) + } + rng = NewIntegerType(min, max) + default: + panic(illegalArgumentCount(`String[]`, `0 - 2`, len(args))) + } + return NewStringType(rng, ``) +} + +func (t *stringType) Accept(v px.Visitor, g px.Guard) { + v(t) +} + +func (t *scStringType) Accept(v px.Visitor, g px.Guard) { + v(t) + t.size.Accept(v, g) +} + +func (t *stringType) Default() px.Type { + return stringTypeDefault +} + +func (t *stringType) Equals(o interface{}, g px.Guard) bool { + _, ok := o.(*stringType) + return ok +} + +func (t *scStringType) Equals(o interface{}, g px.Guard) bool { + if ot, ok := o.(*scStringType); ok { + return t.size.Equals(ot.size, g) + } + return false +} + +func (t *vcStringType) Equals(o interface{}, g px.Guard) bool { + if ot, ok := o.(*vcStringType); ok { + return t.value == ot.value + } + return false +} + +func (t *stringType) Get(key string) (value px.Value, ok bool) { + switch key { + case `size_type_or_value`: + return IntegerTypePositive, true + } + return nil, false +} + +func (t *scStringType) Get(key string) (value px.Value, ok bool) { + switch key { + case `size_type_or_value`: + return t.size, true + } + return nil, false +} + +func (t *vcStringType) Get(key string) (value px.Value, ok bool) { + switch key { + case `size_type_or_value`: + return stringValue(t.value), true + } + return nil, false +} + +func (t *stringType) IsAssignable(o px.Type, g px.Guard) bool { + switch o.(type) { + case *stringType, *scStringType, *vcStringType, *EnumType, *PatternType: + return true + } + return false +} + +func (t *scStringType) IsAssignable(o px.Type, g px.Guard) bool { + switch o := o.(type) { + case *vcStringType: + return t.size.IsInstance3(len(o.value)) + case *scStringType: + return t.size.IsAssignable(o.size, g) + case *EnumType: + for _, str := range o.values { + if !t.size.IsInstance3(len(string(str))) { + return false + } + } + return true + } + return false +} + +func (t *vcStringType) IsAssignable(o px.Type, g px.Guard) bool { + if st, ok := o.(*vcStringType); ok { + return t.value == st.value + } + return false +} + +func (t *stringType) IsInstance(o px.Value, g px.Guard) bool { + _, ok := o.(stringValue) + return ok +} + +func (t *scStringType) IsInstance(o px.Value, g px.Guard) bool { + str, ok := o.(stringValue) + return ok && t.size.IsInstance3(len(string(str))) +} + +func (t *vcStringType) IsInstance(o px.Value, g px.Guard) bool { + str, ok := o.(stringValue) + return ok && t.value == string(str) +} + +func (t *stringType) MetaType() px.ObjectType { + return StringMetaType +} + +func (t *stringType) Name() string { + return `String` +} + +func (t *stringType) Parameters() []px.Value { + return px.EmptyValues +} + +func (t *scStringType) Parameters() []px.Value { + return t.size.Parameters() +} + +func (t *stringType) ReflectType(c px.Context) (reflect.Type, bool) { + return reflect.TypeOf(`x`), true +} + +func (t *stringType) CanSerializeAsString() bool { + return true +} + +func (t *stringType) SerializationString() string { + return t.String() +} + +func (t *stringType) String() string { + return px.ToString2(t, None) +} + +func (t *scStringType) String() string { + return px.ToString2(t, None) +} + +func (t *vcStringType) String() string { + return px.ToString2(t, None) +} + +func (t *stringType) Size() px.Type { + return IntegerTypePositive +} + +func (t *scStringType) Size() px.Type { + return t.size +} + +func (t *stringType) ToString(b io.Writer, s px.FormatContext, g px.RDetect) { + TypeToString(t, b, s, g) +} + +func (t *scStringType) ToString(b io.Writer, s px.FormatContext, g px.RDetect) { + TypeToString(t, b, s, g) +} + +func (t *vcStringType) ToString(b io.Writer, s px.FormatContext, g px.RDetect) { + TypeToString(t, b, s, g) +} + +func (t *stringType) PType() px.Type { + return &TypeType{t} +} + +func (t *stringType) Value() *string { + return nil +} + +func (t *vcStringType) Value() *string { + return &t.value +} + +func WrapString(str string) px.StringValue { + return stringValue(str) +} + +func (sv stringValue) Add(v px.Value) px.List { + if ov, ok := v.(stringValue); ok { + return stringValue(string(sv) + string(ov)) + } + panic(fmt.Sprintf(`No auto conversion from %s to String`, v.PType().String())) +} + +var OneCharStringType = NewStringType(NewIntegerType(1, 1), ``) + +func (sv stringValue) AddAll(tv px.List) px.List { + s := bytes.NewBufferString(sv.String()) + tv.Each(func(e px.Value) { + ev, ok := e.(stringValue) + if !ok { + panic(fmt.Sprintf(`No auto conversion from %s to String`, e.PType().String())) + } + s.WriteString(string(ev)) + }) + return stringValue(s.String()) +} + +func (sv stringValue) All(predicate px.Predicate) bool { + for _, c := range sv.String() { + if !predicate(stringValue(string(c))) { + return false + } + } + return true +} + +func (sv stringValue) Any(predicate px.Predicate) bool { + for _, c := range sv.String() { + if predicate(stringValue(string(c))) { + return true + } + } + return false +} + +func (sv stringValue) AppendTo(slice []px.Value) []px.Value { + for _, c := range sv.String() { + slice = append(slice, stringValue(string(c))) + } + return slice +} + +func (sv stringValue) AsArray() px.List { + return WrapValues(sv.Elements()) +} + +func (sv stringValue) At(i int) px.Value { + if i >= 0 && i < len(sv.String()) { + return stringValue(sv.String()[i : i+1]) + } + return undef +} + +func (sv stringValue) Delete(v px.Value) px.List { + panic(`Operation not supported`) +} + +func (sv stringValue) DeleteAll(tv px.List) px.List { + panic(`Operation not supported`) +} + +func (sv stringValue) Elements() []px.Value { + str := sv.String() + top := len(str) + el := make([]px.Value, top) + for idx, c := range str { + el[idx] = stringValue(string(c)) + } + return el +} + +func (sv stringValue) Each(consumer px.Consumer) { + for _, c := range sv.String() { + consumer(stringValue(string(c))) + } +} + +func (sv stringValue) EachSlice(n int, consumer px.SliceConsumer) { + s := sv.String() + top := len(s) + for i := 0; i < top; i += n { + e := i + n + if e > top { + e = top + } + consumer(stringValue(s[i:e])) + } +} + +func (sv stringValue) EachWithIndex(consumer px.IndexedConsumer) { + for i, c := range sv.String() { + consumer(stringValue(string(c)), i) + } +} + +func (sv stringValue) ElementType() px.Type { + return OneCharStringType +} + +func (sv stringValue) Equals(o interface{}, g px.Guard) bool { + if ov, ok := o.(stringValue); ok { + return string(sv) == string(ov) + } + return false +} + +func (sv stringValue) EqualsIgnoreCase(o px.Value) bool { + if os, ok := o.(stringValue); ok { + return strings.EqualFold(string(sv), string(os)) + } + return false +} + +func (sv stringValue) Find(predicate px.Predicate) (px.Value, bool) { + for _, c := range string(sv) { + e := stringValue(string(c)) + if predicate(e) { + return e, true + } + } + return nil, false +} + +func (sv stringValue) Flatten() px.List { + return sv +} + +func (sv stringValue) IsEmpty() bool { + return sv.Len() == 0 +} + +func (sv stringValue) IsHashStyle() bool { + return false +} + +func (sv stringValue) Len() int { + return len(sv) +} + +func (sv stringValue) Map(mapper px.Mapper) px.List { + s := sv.String() + mapped := make([]px.Value, len(s)) + for i, c := range s { + mapped[i] = mapper(stringValue(string(c))) + } + return WrapValues(mapped) +} + +func (sv stringValue) Reduce(redactor px.BiMapper) px.Value { + s := sv.String() + if len(s) == 0 { + return undef + } + return reduceString(s[1:], sv.At(0), redactor) +} + +func (sv stringValue) Reduce2(initialValue px.Value, redactor px.BiMapper) px.Value { + return reduceString(sv.String(), initialValue, redactor) +} + +func (sv stringValue) Reflect(c px.Context) reflect.Value { + return reflect.ValueOf(sv.String()) +} + +func (sv stringValue) ReflectTo(c px.Context, value reflect.Value) { + switch value.Kind() { + case reflect.Interface: + value.Set(sv.Reflect(c)) + case reflect.Ptr: + s := string(sv) + value.Set(reflect.ValueOf(&s)) + default: + value.SetString(string(sv)) + } +} + +func (sv stringValue) Reject(predicate px.Predicate) px.List { + selected := bytes.NewBufferString(``) + for _, c := range sv.String() { + if !predicate(stringValue(string(c))) { + selected.WriteRune(c) + } + } + return stringValue(selected.String()) +} + +func (sv stringValue) Select(predicate px.Predicate) px.List { + selected := bytes.NewBufferString(``) + for _, c := range sv.String() { + if predicate(stringValue(string(c))) { + selected.WriteRune(c) + } + } + return stringValue(selected.String()) +} + +func (sv stringValue) Slice(i int, j int) px.List { + return stringValue(sv.String()[i:j]) +} + +func (sv stringValue) Split(pattern *regexp.Regexp) px.List { + parts := pattern.Split(sv.String(), -1) + result := make([]px.Value, len(parts)) + for i, s := range parts { + result[i] = stringValue(s) + } + return WrapValues(result) +} + +func (sv stringValue) String() string { + return string(sv) +} + +func (sv stringValue) ToString(b io.Writer, s px.FormatContext, g px.RDetect) { + f := px.GetFormat(s.FormatMap(), sv.PType()) + val := string(sv) + switch f.FormatChar() { + case 's': + _, err := fmt.Fprintf(b, f.OrigFormat(), val) + if err != nil { + panic(err) + } + case 'p': + f.ApplyStringFlags(b, val, true) + case 'c': + val = utils.CapitalizeSegment(val) + f.ReplaceFormatChar('s').ApplyStringFlags(b, val, f.IsAlt()) + case 'C': + val = utils.CapitalizeSegments(val) + f.ReplaceFormatChar('s').ApplyStringFlags(b, val, f.IsAlt()) + case 'u': + val = strings.ToUpper(val) + f.ReplaceFormatChar('s').ApplyStringFlags(b, val, f.IsAlt()) + case 'd': + val = strings.ToLower(val) + f.ReplaceFormatChar('s').ApplyStringFlags(b, val, f.IsAlt()) + case 't': + val = strings.TrimSpace(val) + f.ReplaceFormatChar('s').ApplyStringFlags(b, val, f.IsAlt()) + default: + //noinspection SpellCheckingInspection + panic(s.UnsupportedFormat(sv.PType(), `cCudspt`, f)) + } +} + +func (sv stringValue) ToKey() px.HashKey { + return px.HashKey(sv.String()) +} + +func (sv stringValue) ToLower() px.StringValue { + return stringValue(strings.ToLower(string(sv))) +} + +func (sv stringValue) ToUpper() px.StringValue { + return stringValue(strings.ToUpper(string(sv))) +} + +func (sv stringValue) PType() px.Type { + return &vcStringType{value: string(sv)} +} + +func (sv stringValue) Unique() px.List { + s := sv.String() + top := len(s) + if top < 2 { + return sv + } + + result := bytes.NewBufferString(``) + exists := make(map[rune]bool, top) + for _, c := range s { + if !exists[c] { + exists[c] = true + result.WriteRune(c) + } + } + if result.Len() == len(s) { + return sv + } + return stringValue(result.String()) +} + +func reduceString(slice string, initialValue px.Value, redactor px.BiMapper) px.Value { + memo := initialValue + for _, v := range slice { + memo = redactor(memo, stringValue(string(v))) + } + return memo +} diff --git a/vendor/github.com/lyraproj/pcore/types/structtype.go b/vendor/github.com/lyraproj/pcore/types/structtype.go new file mode 100644 index 0000000..42b76ca --- /dev/null +++ b/vendor/github.com/lyraproj/pcore/types/structtype.go @@ -0,0 +1,397 @@ +package types + +import ( + "io" + "sync" + + "github.com/lyraproj/pcore/utils" + + "github.com/lyraproj/pcore/px" +) + +type ( + StructElement struct { + name string + key px.Type + value px.Type + } + + StructType struct { + lock sync.Mutex + elements []*StructElement + hashedMembers map[string]*StructElement + } +) + +var StructElementMeta px.Type + +var StructMetaType px.ObjectType + +func init() { + StructElementMeta = newObjectType(`Pcore::StructElement`, + `{ + attributes => { + key_type => Type, + value_type => Type + } +}`, func(ctx px.Context, args []px.Value) px.Value { + return NewStructElement(args[0], args[1].(px.Type)) + }) + + StructMetaType = newObjectType(`Pcore::StructType`, + `Pcore::AnyType { + attributes => { + elements => Array[Pcore::StructElement] + } +}`, func(ctx px.Context, args []px.Value) px.Value { + return newStructType2(args...) + }) + + // Go constructor for Struct instances is registered by HashType +} + +func NewStructElement(key px.Value, value px.Type) *StructElement { + + var ( + name string + keyType px.Type + ) + + switch key := key.(type) { + case stringValue: + keyType = key.PType() + if isAssignable(value, DefaultUndefType()) { + keyType = NewOptionalType(keyType) + } + name = string(key) + case *vcStringType: + name = key.value + keyType = key + case *OptionalType: + if strType, ok := key.typ.(*vcStringType); ok { + name = strType.value + keyType = key + } + } + + if keyType == nil || name == `` { + panic(illegalArgumentType(`StructElement`, 0, `Variant[String[1], Type[String[1]], , Type[Optional[String[1]]]]`, key)) + } + return &StructElement{name, keyType, value} +} + +func newStructElement2(key string, value px.Type) *StructElement { + return NewStructElement(stringValue(key), value) +} + +func DefaultStructType() *StructType { + return structTypeDefault +} + +func NewStructType(elements []*StructElement) *StructType { + if len(elements) == 0 { + return DefaultStructType() + } + return &StructType{elements: elements} +} + +func newStructType2(args ...px.Value) *StructType { + switch len(args) { + case 0: + return DefaultStructType() + case 1: + arg := args[0] + if ar, ok := arg.(*Array); ok { + return newStructType2(ar.AppendTo(make([]px.Value, 0, ar.Len()))...) + } + hash, ok := arg.(px.OrderedMap) + if !ok { + panic(illegalArgumentType(`Struct[]`, 0, `Hash[Variant[String[1], Optional[String[1]]], Type]`, arg)) + } + top := hash.Len() + es := make([]*StructElement, top) + hash.EachWithIndex(func(v px.Value, idx int) { + e := v.(*HashEntry) + vt, ok := e.Value().(px.Type) + if !ok { + panic(illegalArgumentType(`StructElement`, 1, `Type`, v)) + } + es[idx] = NewStructElement(e.Key(), vt) + }) + return NewStructType(es) + default: + panic(illegalArgumentCount(`Struct`, `0 - 1`, len(args))) + } +} + +func (s *StructElement) Accept(v px.Visitor, g px.Guard) { + s.key.Accept(v, g) + s.value.Accept(v, g) +} + +func (s *StructElement) ActualKeyType() px.Type { + if ot, ok := s.key.(*OptionalType); ok { + return ot.typ + } + return s.key +} + +func (s *StructElement) Equals(o interface{}, g px.Guard) bool { + if ose, ok := o.(*StructElement); ok { + return s.key.Equals(ose.key, g) && s.value.Equals(ose.value, g) + } + return false +} + +func (s *StructElement) String() string { + return px.ToString(s) +} + +func (s *StructElement) PType() px.Type { + return StructElementMeta +} + +func (s *StructElement) Key() px.Type { + return s.key +} + +func (s *StructElement) Name() string { + return s.name +} + +func (s *StructElement) Optional() bool { + _, ok := s.key.(*OptionalType) + return ok +} + +func (s *StructElement) resolve(c px.Context) { + s.key = resolve(c, s.key) + s.value = resolve(c, s.value) +} + +func (s *StructElement) ToString(bld io.Writer, format px.FormatContext, g px.RDetect) { + optionalValue := isAssignable(s.value, undefTypeDefault) + if _, ok := s.key.(*OptionalType); ok { + if optionalValue { + utils.WriteString(bld, s.name) + } else { + s.key.ToString(bld, format, g) + } + } else { + if optionalValue { + NewNotUndefType(s.key).ToString(bld, format, g) + } else { + utils.WriteString(bld, s.name) + } + } + utils.WriteString(bld, ` => `) + s.value.ToString(bld, format, g) +} + +func (s *StructElement) Value() px.Type { + return s.value +} + +func (t *StructType) Accept(v px.Visitor, g px.Guard) { + v(t) + for _, element := range t.elements { + element.Accept(v, g) + } +} + +func (t *StructType) Default() px.Type { + return structTypeDefault +} + +func (t *StructType) Equals(o interface{}, g px.Guard) bool { + if ot, ok := o.(*StructType); ok && len(t.elements) == len(ot.elements) { + for idx, element := range t.elements { + if !element.Equals(ot.elements[idx], g) { + return false + } + } + return true + } + return false +} + +func (t *StructType) Generic() px.Type { + al := make([]*StructElement, len(t.elements)) + for idx, e := range t.elements { + al[idx] = &StructElement{e.name, px.GenericType(e.key), px.GenericType(e.value)} + } + return NewStructType(al) +} + +func (t *StructType) Elements() []*StructElement { + return t.elements +} + +func (t *StructType) Get(key string) (value px.Value, ok bool) { + switch key { + case `elements`: + els := make([]px.Value, len(t.elements)) + for i, e := range t.elements { + els[i] = e + } + return WrapValues(els), true + } + return nil, false +} + +func (t *StructType) HashedMembers() map[string]*StructElement { + t.lock.Lock() + if t.hashedMembers == nil { + t.hashedMembers = t.HashedMembersCloned() + } + t.lock.Unlock() + return t.hashedMembers +} + +func (t *StructType) HashedMembersCloned() map[string]*StructElement { + hashedMembers := make(map[string]*StructElement, len(t.elements)) + for _, elem := range t.elements { + hashedMembers[elem.name] = elem + } + return hashedMembers +} + +func (t *StructType) IsAssignable(o px.Type, g px.Guard) bool { + switch o := o.(type) { + case *StructType: + hm := o.HashedMembers() + matched := 0 + for _, e1 := range t.elements { + e2 := hm[e1.name] + if e2 == nil { + if !GuardedIsAssignable(e1.key, undefTypeDefault, g) { + return false + } + } else { + if !(GuardedIsAssignable(e1.key, e2.key, g) && GuardedIsAssignable(e1.value, e2.value, g)) { + return false + } + matched++ + } + } + return matched == len(hm) + case *HashType: + required := 0 + for _, e := range t.elements { + if !GuardedIsAssignable(e.key, undefTypeDefault, g) { + if !GuardedIsAssignable(e.value, o.valueType, g) { + return false + } + required++ + } + } + if required > 0 && !GuardedIsAssignable(stringTypeDefault, o.keyType, g) { + return false + } + return GuardedIsAssignable(NewIntegerType(int64(required), int64(len(t.elements))), o.size, g) + default: + return false + } +} + +func (t *StructType) IsInstance(o px.Value, g px.Guard) bool { + ov, ok := o.(*Hash) + if !ok { + return false + } + matched := 0 + for _, element := range t.elements { + key := element.name + v, ok := ov.Get(stringValue(key)) + if !ok { + if !GuardedIsAssignable(element.key, undefTypeDefault, g) { + return false + } + } else { + if !GuardedIsInstance(element.value, v, g) { + return false + } + matched++ + } + } + return matched == ov.Len() +} + +func (t *StructType) MetaType() px.ObjectType { + return StructMetaType +} + +func (t *StructType) Name() string { + return `Struct` +} + +func (t *StructType) Parameters() []px.Value { + top := len(t.elements) + if top == 0 { + return px.EmptyValues + } + entries := make([]*HashEntry, top) + for idx, s := range t.elements { + optionalValue := isAssignable(s.value, undefTypeDefault) + var key px.Value + if _, ok := s.key.(*OptionalType); ok { + if optionalValue { + key = stringValue(s.name) + } else { + key = s.key + } + } else { + if optionalValue { + key = NewNotUndefType(s.key) + } else { + key = stringValue(s.name) + } + } + entries[idx] = WrapHashEntry(key, s.value) + } + return []px.Value{WrapHash(entries)} +} + +func (t *StructType) Resolve(c px.Context) px.Type { + for _, e := range t.elements { + e.resolve(c) + } + return t +} + +func (t *StructType) CanSerializeAsString() bool { + for _, v := range t.elements { + if !(canSerializeAsString(v.key) && canSerializeAsString(v.value)) { + return false + } + } + return true +} + +func (t *StructType) SerializationString() string { + return t.String() +} + +func (t *StructType) Size() *IntegerType { + required := 0 + for _, e := range t.elements { + if !GuardedIsAssignable(e.key, undefTypeDefault, nil) { + required++ + } + } + return NewIntegerType(int64(required), int64(len(t.elements))) +} + +func (t *StructType) String() string { + return px.ToString2(t, None) +} + +func (t *StructType) ToString(b io.Writer, s px.FormatContext, g px.RDetect) { + TypeToString(t, b, s, g) +} + +func (t *StructType) PType() px.Type { + return &TypeType{t} +} + +var structTypeDefault = &StructType{elements: []*StructElement{}} diff --git a/vendor/github.com/lyraproj/pcore/types/taggedtype.go b/vendor/github.com/lyraproj/pcore/types/taggedtype.go new file mode 100644 index 0000000..871cc94 --- /dev/null +++ b/vendor/github.com/lyraproj/pcore/types/taggedtype.go @@ -0,0 +1,204 @@ +package types + +import ( + "bytes" + "io" + "reflect" + + "github.com/lyraproj/pcore/px" +) + +type taggedType struct { + typ reflect.Type + puppetTags map[string]string + annotations px.OrderedMap + tags map[string]map[string]string + parsedPuppetTags map[string]px.OrderedMap +} + +var TagsAnnotationType px.ObjectType + +func init() { + px.NewTaggedType = func(typ reflect.Type, puppetTags map[string]string) px.AnnotatedType { + tt := &taggedType{typ: typ, puppetTags: puppetTags, annotations: emptyMap} + tt.initTags() + return tt + } + + px.NewAnnotatedType = func(typ reflect.Type, puppetTags map[string]string, annotations px.OrderedMap) px.AnnotatedType { + tt := &taggedType{typ: typ, puppetTags: puppetTags, annotations: annotations} + tt.initTags() + return tt + } + + TagsAnnotationType = newGoObjectType(`TagsAnnotation`, reflect.TypeOf((*px.TagsAnnotation)(nil)).Elem(), `Annotation{ + attributes => { + # Arbitrary data used by custom implementations + tags => Hash[String,String] + } + }`, + func(ctx px.Context, args []px.Value) px.Value { + return NewTagsAnnotation(args[0].(px.OrderedMap)) + }, + func(ctx px.Context, args []px.Value) px.Value { + h := args[0].(*Hash) + return NewTagsAnnotation(h.Get5(`tags`, px.EmptyMap).(px.OrderedMap)) + }, + ) +} + +type tagsAnnotation struct { + tags px.OrderedMap +} + +func NewTagsAnnotation(tags px.OrderedMap) px.TagsAnnotation { + return &tagsAnnotation{tags} +} + +func (c *tagsAnnotation) Equals(value interface{}, guard px.Guard) bool { + if oc, ok := value.(*tagsAnnotation); ok { + return c.tags.Equals(oc.tags, guard) + } + return false +} + +func (c *tagsAnnotation) PType() px.Type { + return TagsAnnotationType +} + +func (c *tagsAnnotation) String() string { + return px.ToString(c) +} + +func (c *tagsAnnotation) ToString(bld io.Writer, format px.FormatContext, g px.RDetect) { + ObjectToString(c, format, bld, g) +} + +func (c *tagsAnnotation) Get(key string) (value px.Value, ok bool) { + if key == `tags` { + return c.tags, true + } + return nil, false +} + +func (c *tagsAnnotation) InitHash() px.OrderedMap { + return px.SingletonMap(`tags`, c.tags) +} + +func (c *tagsAnnotation) Tag(key string) string { + if t, ok := c.tags.Get4(key); ok { + return t.String() + } + return `` +} + +func (c *tagsAnnotation) Tags() px.OrderedMap { + return c.tags +} + +func (r *tagsAnnotation) Validate(c px.Context, annotatedType px.Annotatable) { +} + +func (tg *taggedType) Annotations() px.OrderedMap { + return tg.annotations +} + +func (tg *taggedType) Type() reflect.Type { + return tg.typ +} + +func (tg *taggedType) Tags() map[string]px.OrderedMap { + return tg.parsedPuppetTags +} + +func (tg *taggedType) OtherTags() map[string]map[string]string { + return tg.tags +} + +func (tg *taggedType) initTags() { + fs := Fields(tg.typ) + nf := len(fs) + tags := make(map[string]map[string]string, 7) + puppet := make(map[string]string) + if nf > 0 { + for i, f := range fs { + if i == 0 && f.Anonymous { + // Parent + continue + } + if f.PkgPath != `` { + // Unexported + continue + } + ft := ParseTags(string(f.Tag)) + if p, ok := ft[`puppet`]; ok { + puppet[f.Name] = p + delete(ft, `puppet`) + } + if len(ft) > 0 { + tags[f.Name] = ft + } + } + } + if tg.puppetTags != nil && len(tg.puppetTags) > 0 { + for k, v := range tg.puppetTags { + puppet[k] = v + } + } + pt := make(map[string]px.OrderedMap, len(puppet)) + for k, v := range puppet { + if h, ok := ParseTagHash(v); ok { + pt[k] = h + } + } + tg.parsedPuppetTags = pt + tg.tags = tags +} + +func ParseTags(tag string) map[string]string { + result := make(map[string]string) + for tag != "" { + // Skip leading space. + i := 0 + for i < len(tag) && tag[i] == ' ' { + i++ + } + tag = tag[i:] + tagLen := len(tag) + if tagLen == 0 { + break + } + + var c rune + for i, c = range tag { + if c < ' ' || c == ':' || c == '"' || c == 0x7f { + break + } + } + if i == 0 || i+1 >= tagLen || c != ':' || tag[i+1] != '"' { + break + } + name := string(tag[:i]) + tag = tag[i+2:] // Skip ':' and leading '"' + esc := false + tb := bytes.NewBufferString(``) + for i, c = range tag { + if esc { + tb.WriteRune(c) + esc = false + } else if c == '\\' { + esc = true + } else if c == '"' { + break + } else { + tb.WriteRune(c) + } + } + if esc || c != '"' { + break + } + result[name] = tb.String() + tag = tag[i+1:] + } + return result +} diff --git a/vendor/github.com/lyraproj/pcore/types/timespantype.go b/vendor/github.com/lyraproj/pcore/types/timespantype.go new file mode 100644 index 0000000..3f512a0 --- /dev/null +++ b/vendor/github.com/lyraproj/pcore/types/timespantype.go @@ -0,0 +1,1118 @@ +package types + +import ( + "bytes" + "fmt" + "io" + "math" + "time" + + "reflect" + "regexp" + "strconv" + "sync" + "unicode" + + "github.com/lyraproj/issue/issue" + "github.com/lyraproj/pcore/px" + "github.com/lyraproj/pcore/utils" +) + +type ( + TimespanType struct { + min time.Duration + max time.Duration + } + + // Timespan represents time.Duration as an pcore.Value + Timespan time.Duration +) + +const ( + NsecsPerUsec = 1000 + NsecsPerMsec = NsecsPerUsec * 1000 + NsecsPerSec = NsecsPerMsec * 1000 + NsecsPerMin = NsecsPerSec * 60 + NsecsPerHour = NsecsPerMin * 60 + NsecsPerDay = NsecsPerHour * 24 + + KeyString = `string` + KeyFormat = `format` + KeyNegative = `negative` + KeyDays = `days` + KeyHours = `hours` + KeyMinutes = `minutes` + KeySeconds = `seconds` + KeyMilliseconds = `milliseconds` + KeyMicroseconds = `microseconds` + KeyNanoseconds = `nanoseconds` +) + +var TimespanMin = time.Duration(math.MinInt64) +var TimespanMax = time.Duration(math.MaxInt64) + +var timespanTypeDefault = &TimespanType{TimespanMin, TimespanMax} + +var TimespanMetaType px.ObjectType +var DefaultTimespanFormatParser *TimespanFormatParser + +func init() { + tp := NewTimespanFormatParser() + DefaultTimespanFormats = []*TimespanFormat{ + tp.ParseFormat(`%D-%H:%M:%S.%-N`), + tp.ParseFormat(`%H:%M:%S.%-N`), + tp.ParseFormat(`%M:%S.%-N`), + tp.ParseFormat(`%S.%-N`), + tp.ParseFormat(`%D-%H:%M:%S`), + tp.ParseFormat(`%H:%M:%S`), + tp.ParseFormat(`%D-%H:%M`), + tp.ParseFormat(`%S`), + } + DefaultTimespanFormatParser = tp + + TimespanMetaType = newObjectType(`Pcore::TimespanType`, + `Pcore::ScalarType{ + attributes => { + from => { type => Optional[Timespan], value => undef }, + to => { type => Optional[Timespan], value => undef } + } +}`, func(ctx px.Context, args []px.Value) px.Value { + return newTimespanType2(args...) + }) + + newGoConstructor2(`Timespan`, + func(t px.LocalTypes) { + t.Type(`Formats`, `Variant[String[2],Array[String[2], 1]]`) + }, + + func(d px.Dispatch) { + d.Param(`Variant[Integer,Float]`) + d.Function(func(c px.Context, args []px.Value) px.Value { + arg := args[0] + if i, ok := arg.(integerValue); ok { + return WrapTimespan(time.Duration(i * NsecsPerSec)) + } + return WrapTimespan(time.Duration(arg.(floatValue) * NsecsPerSec)) + }) + }, + + func(d px.Dispatch) { + d.Param(`String[1]`) + d.OptionalParam(`Formats`) + d.Function(func(c px.Context, args []px.Value) px.Value { + formats := DefaultTimespanFormats + if len(args) > 1 { + formats = toTimespanFormats(args[1]) + } + + return ParseTimespan(args[0].String(), formats) + }) + }, + + func(d px.Dispatch) { + d.Param(`Integer`) + d.Param(`Integer`) + d.Param(`Integer`) + d.Param(`Integer`) + d.OptionalParam(`Integer`) + d.OptionalParam(`Integer`) + d.OptionalParam(`Integer`) + d.Function(func(c px.Context, args []px.Value) px.Value { + days := args[0].(integerValue).Int() + hours := args[1].(integerValue).Int() + minutes := args[2].(integerValue).Int() + seconds := args[3].(integerValue).Int() + argc := len(args) + var milliseconds, microseconds, nanoseconds int64 + if argc > 4 { + milliseconds = args[4].(integerValue).Int() + if argc > 5 { + microseconds = args[5].(integerValue).Int() + if argc > 6 { + nanoseconds = args[6].(integerValue).Int() + } + } + } + return WrapTimespan(fromFields(false, days, hours, minutes, seconds, milliseconds, microseconds, nanoseconds)) + }) + }, + + func(d px.Dispatch) { + d.Param(`Struct[string => String[1], Optional[format] => Formats]`) + d.Function(func(c px.Context, args []px.Value) px.Value { + hash := args[0].(*Hash) + str := hash.Get5(`string`, emptyString) + formats := toTimespanFormats(hash.Get5(`format`, undef)) + return ParseTimespan(str.String(), formats) + }) + }, + + func(d px.Dispatch) { + d.Param(`Struct[Optional[negative] => Boolean, + Optional[days] => Integer, + Optional[hours] => Integer, + Optional[minutes] => Integer, + Optional[seconds] => Integer, + Optional[milliseconds] => Integer, + Optional[microseconds] => Integer, + Optional[nanoseconds] => Integer]`) + d.Function(func(c px.Context, args []px.Value) px.Value { + return WrapTimespan(fromFieldsHash(args[0].(*Hash))) + }) + }) +} + +func DefaultTimespanType() *TimespanType { + return timespanTypeDefault +} + +func NewTimespanType(min time.Duration, max time.Duration) *TimespanType { + return &TimespanType{min, max} +} + +func newTimespanType2(args ...px.Value) *TimespanType { + argc := len(args) + if argc > 2 { + panic(illegalArgumentCount(`Timespan[]`, `0 or 2`, argc)) + } + if argc == 0 { + return timespanTypeDefault + } + convertArg := func(args []px.Value, argNo int) time.Duration { + arg := args[argNo] + var ( + t time.Duration + ok bool + ) + switch arg := arg.(type) { + case *Timespan: + t, ok = arg.Duration(), true + case *Hash: + t, ok = fromHash(arg) + case stringValue: + t, ok = parseDuration(arg.String(), DefaultTimespanFormats) + case integerValue: + t, ok = time.Duration(arg*1000000000), true + case floatValue: + t, ok = time.Duration(arg*1000000000.0), true + case *DefaultValue: + if argNo == 0 { + t, ok = TimespanMin, true + } else { + t, ok = TimespanMax, true + } + default: + t, ok = time.Duration(0), false + } + if ok { + return t + } + panic(illegalArgumentType(`Timestamp[]`, 0, `Variant[Hash,String,Integer,Float,Default]`, args[0])) + } + + min := convertArg(args, 0) + if argc == 2 { + return &TimespanType{min, convertArg(args, 1)} + } else { + return &TimespanType{min, TimespanMax} + } +} + +func (t *TimespanType) Accept(v px.Visitor, g px.Guard) { + v(t) +} + +func (t *TimespanType) Default() px.Type { + return timespanTypeDefault +} + +func (t *TimespanType) Equals(other interface{}, guard px.Guard) bool { + if ot, ok := other.(*TimespanType); ok { + return t.min == ot.min && t.max == ot.max + } + return false +} + +func (t *TimespanType) Get(key string) (px.Value, bool) { + switch key { + case `from`: + v := px.Undef + if t.min != TimespanMin { + v = WrapTimespan(t.min) + } + return v, true + case `to`: + v := px.Undef + if t.max != TimespanMax { + v = WrapTimespan(t.max) + } + return v, true + default: + return nil, false + } +} + +func (t *TimespanType) MetaType() px.ObjectType { + return TimespanMetaType +} + +func (t *TimespanType) Parameters() []px.Value { + if t.max == math.MaxInt64 { + if t.min == math.MinInt64 { + return px.EmptyValues + } + return []px.Value{stringValue(t.min.String())} + } + if t.min == math.MinInt64 { + return []px.Value{WrapDefault(), stringValue(t.max.String())} + } + return []px.Value{stringValue(t.min.String()), stringValue(t.max.String())} +} + +func (t *TimespanType) ReflectType(c px.Context) (reflect.Type, bool) { + return reflect.TypeOf(time.Duration(0)), true +} + +func (t *TimespanType) CanSerializeAsString() bool { + return true +} + +func (t *TimespanType) SerializationString() string { + return t.String() +} + +func (t *TimespanType) String() string { + return px.ToString2(t, None) +} + +func (t *TimespanType) ToString(b io.Writer, s px.FormatContext, g px.RDetect) { + TypeToString(t, b, s, g) +} + +func (t *TimespanType) PType() px.Type { + return &TypeType{t} +} + +func (t *TimespanType) IsInstance(o px.Value, g px.Guard) bool { + return t.IsAssignable(o.PType(), g) +} + +func (t *TimespanType) IsAssignable(o px.Type, g px.Guard) bool { + if ot, ok := o.(*TimespanType); ok { + return t.min <= ot.min && t.max >= ot.max + } + return false +} + +func (t *TimespanType) Name() string { + return `Timespan` +} + +func WrapTimespan(val time.Duration) Timespan { + return Timespan(val) +} + +func ParseTimespan(str string, formats []*TimespanFormat) Timespan { + if d, ok := parseDuration(str, formats); ok { + return WrapTimespan(d) + } + fs := bytes.NewBufferString(``) + for i, f := range formats { + if i > 0 { + fs.WriteByte(',') + } + fs.WriteString(f.fmt) + } + panic(px.Error(px.CannotBeParsed, issue.H{`str`: str, `formats`: fs.String()})) +} + +func fromFields(negative bool, days, hours, minutes, seconds, milliseconds, microseconds, nanoseconds int64) time.Duration { + ns := (((((days*24+hours)*60+minutes)*60+seconds)*1000+milliseconds)*1000+microseconds)*1000 + nanoseconds + if negative { + ns = -ns + } + return time.Duration(ns) +} + +func fromFieldsHash(hash *Hash) time.Duration { + intArg := func(key string) int64 { + if v, ok := hash.Get4(key); ok { + if i, ok := v.(integerValue); ok { + return int64(i) + } + } + return 0 + } + boolArg := func(key string) bool { + if v, ok := hash.Get4(key); ok { + if b, ok := v.(booleanValue); ok { + return b.Bool() + } + } + return false + } + return fromFields( + boolArg(KeyNegative), + intArg(KeyDays), + intArg(KeyHours), + intArg(KeyMinutes), + intArg(KeySeconds), + intArg(KeyMilliseconds), + intArg(KeyMicroseconds), + intArg(KeyNanoseconds)) +} + +func fromStringHash(hash *Hash) (time.Duration, bool) { + str := hash.Get5(KeyString, emptyString) + fmtStrings := hash.Get5(KeyFormat, nil) + var formats []*TimespanFormat + if fmtStrings == nil { + formats = DefaultTimespanFormats + } else { + if fs, ok := fmtStrings.(stringValue); ok { + formats = []*TimespanFormat{DefaultTimespanFormatParser.ParseFormat(string(fs))} + } else { + if fsa, ok := fmtStrings.(*Array); ok { + formats = make([]*TimespanFormat, fsa.Len()) + fsa.EachWithIndex(func(fs px.Value, i int) { + formats[i] = DefaultTimespanFormatParser.ParseFormat(fs.String()) + }) + } + } + } + return parseDuration(str.String(), formats) +} + +func fromHash(hash *Hash) (time.Duration, bool) { + if hash.IncludesKey2(KeyString) { + return fromStringHash(hash) + } + return fromFieldsHash(hash), true +} + +func parseDuration(str string, formats []*TimespanFormat) (time.Duration, bool) { + for _, f := range formats { + if ts, ok := f.parse(str); ok { + return ts, true + } + } + return 0, false +} + +func (tv Timespan) Abs() px.Number { + if tv < 0 { + return Timespan(-tv) + } + return tv +} + +// Hours returns a positive integer denoting the number of days +func (tv Timespan) Days() int64 { + return tv.totalDays() +} + +func (tv Timespan) Duration() time.Duration { + return time.Duration(tv) +} + +// Hours returns a positive integer, 0 - 23 denoting hours of day +func (tv Timespan) Hours() int64 { + return tv.totalHours() % 24 +} + +func (tv Timespan) Equals(o interface{}, g px.Guard) bool { + if ov, ok := o.(Timespan); ok { + return tv.Int() == ov.Int() + } + return false +} + +// Float returns the number of seconds with fraction +func (tv Timespan) Float() float64 { + return float64(tv.totalNanoseconds()) / float64(NsecsPerSec) +} + +func (tv Timespan) Format(format string) string { + return DefaultTimespanFormatParser.ParseFormat(format).format(tv) +} + +// Int returns the total number of seconds +func (tv Timespan) Int() int64 { + return tv.totalSeconds() +} + +// Minutes returns a positive integer, 0 - 59 denoting minutes of hour +func (tv Timespan) Minutes() int64 { + return tv.totalMinutes() % 60 +} + +func (tv Timespan) Reflect(c px.Context) reflect.Value { + return reflect.ValueOf(time.Duration(tv)) +} + +func (tv Timespan) ReflectTo(c px.Context, dest reflect.Value) { + rv := tv.Reflect(c) + if !rv.Type().AssignableTo(dest.Type()) { + panic(px.Error(px.AttemptToSetWrongKind, issue.H{`expected`: rv.Type().String(), `actual`: dest.Type().String()})) + } + dest.Set(rv) +} + +// Seconds returns a positive integer, 0 - 59 denoting seconds of minute +func (tv Timespan) Seconds() int64 { + return tv.totalSeconds() % 60 +} + +// Seconds returns a positive integer, 0 - 999 denoting milliseconds of second +func (tv Timespan) Milliseconds() int64 { + return tv.totalMilliseconds() % 1000 +} + +func (tv Timespan) CanSerializeAsString() bool { + return true +} + +func (tv Timespan) SerializationString() string { + return tv.String() +} + +func (tv Timespan) String() string { + return fmt.Sprintf(`%d`, tv.Int()) +} + +func (tv Timespan) ToKey(b *bytes.Buffer) { + n := tv.Int() + b.WriteByte(1) + b.WriteByte(HkTimespan) + b.WriteByte(byte(n >> 56)) + b.WriteByte(byte(n >> 48)) + b.WriteByte(byte(n >> 40)) + b.WriteByte(byte(n >> 32)) + b.WriteByte(byte(n >> 24)) + b.WriteByte(byte(n >> 16)) + b.WriteByte(byte(n >> 8)) + b.WriteByte(byte(n)) +} + +func (tv Timespan) totalDays() int64 { + return time.Duration(tv).Nanoseconds() / NsecsPerDay +} + +func (tv Timespan) totalHours() int64 { + return time.Duration(tv).Nanoseconds() / NsecsPerHour +} + +func (tv Timespan) totalMinutes() int64 { + return time.Duration(tv).Nanoseconds() / NsecsPerMin +} + +func (tv Timespan) totalSeconds() int64 { + return time.Duration(tv).Nanoseconds() / NsecsPerSec +} + +func (tv Timespan) totalMilliseconds() int64 { + return time.Duration(tv).Nanoseconds() / NsecsPerMsec +} + +func (tv Timespan) totalNanoseconds() int64 { + return time.Duration(tv).Nanoseconds() +} + +func (tv Timespan) ToString(b io.Writer, s px.FormatContext, g px.RDetect) { + DefaultTimespanFormats[0].format2(b, tv) +} + +func (tv Timespan) PType() px.Type { + t := time.Duration(tv) + return &TimespanType{t, t} +} + +type ( + TimespanFormat struct { + rx *regexp.Regexp + fmt string + segments []segment + } + + TimespanFormatParser struct { + lock sync.Mutex + formats map[string]*TimespanFormat + } + + segment interface { + appendRegexp(buffer *bytes.Buffer) + + appendTo(buffer io.Writer, ts Timespan) + + multiplier() int + + nanoseconds(group string, multiplier int) int64 + + ordinal() int + + setUseTotal() + } + + literalSegment struct { + literal string + } + + valueSegment struct { + useTotal bool + padChar rune + width int + defaultWidth int + format string + } + + daySegment struct { + valueSegment + } + + hourSegment struct { + valueSegment + } + + minuteSegment struct { + valueSegment + } + + secondSegment struct { + valueSegment + } + + fragmentSegment struct { + valueSegment + } + + millisecondSegment struct { + fragmentSegment + } + + nanosecondSegment struct { + fragmentSegment + } +) + +const ( + nsecMax = 0 + msecMax = 1 + secMax = 2 + minMax = 3 + hourMax = 4 + dayMax = 5 + + // States used by the #internal_parser function + + stateLiteral = 0 // expects literal or '%' + statePad = 1 // expects pad, width, or format character + stateWidth = 2 // expects width, or format character +) + +var trimTrailingZeroes = regexp.MustCompile(`\A([0-9]+?)0*\z`) +var digitsOnly = regexp.MustCompile(`\A[0-9]+\z`) +var DefaultTimespanFormats []*TimespanFormat + +func NewTimespanFormatParser() *TimespanFormatParser { + return &TimespanFormatParser{formats: make(map[string]*TimespanFormat, 17)} +} + +func (p *TimespanFormatParser) ParseFormat(format string) *TimespanFormat { + p.lock.Lock() + defer p.lock.Unlock() + + if f, ok := p.formats[format]; ok { + return f + } + f := p.parse(format) + p.formats[format] = f + return f +} + +func (p *TimespanFormatParser) parse(str string) *TimespanFormat { + bld := make([]segment, 0, 7) + highest := -1 + state := stateLiteral + padChar := '0' + width := -1 + formatStart := 0 + + for pos, c := range str { + if state == stateLiteral { + if c == '%' { + state = statePad + formatStart = pos + padChar = '0' + width = -1 + } else { + bld = appendLiteral(bld, c) + } + continue + } + + switch c { + case '%': + bld = appendLiteral(bld, c) + state = stateLiteral + case '-': + if state != statePad { + panic(badFormatSpecifier(str, formatStart, pos)) + } + padChar = 0 + state = stateWidth + case '_': + if state != statePad { + panic(badFormatSpecifier(str, formatStart, pos)) + } + padChar = ' ' + state = stateWidth + case 'D': + highest = dayMax + bld = append(bld, newDaySegment(padChar, width)) + state = stateLiteral + case 'H': + if highest < hourMax { + highest = hourMax + } + bld = append(bld, newHourSegment(padChar, width)) + state = stateLiteral + case 'M': + if highest < minMax { + highest = minMax + } + bld = append(bld, newMinuteSegment(padChar, width)) + state = stateLiteral + case 'S': + if highest < secMax { + highest = secMax + } + bld = append(bld, newSecondSegment(padChar, width)) + state = stateLiteral + case 'L': + if highest < msecMax { + highest = msecMax + } + bld = append(bld, newMillisecondSegment(padChar, width)) + state = stateLiteral + case 'N': + if highest < nsecMax { + highest = nsecMax + } + bld = append(bld, newNanosecondSegment(padChar, width)) + state = stateLiteral + default: + if c < '0' || c > '9' { + panic(badFormatSpecifier(str, formatStart, pos)) + } + if state == statePad && c == '0' { + padChar = '0' + } else { + n := int(c) - 0x30 + if width == -1 { + width = n + } else { + width = width*10 + n + } + } + state = stateWidth + } + } + + if state != stateLiteral { + panic(badFormatSpecifier(str, formatStart, len(str))) + } + + if highest != -1 { + for _, s := range bld { + if s.ordinal() == highest { + s.setUseTotal() + } + } + } + return newTimespanFormat(str, bld) +} + +func appendLiteral(bld []segment, c rune) []segment { + s := string(c) + lastIdx := len(bld) - 1 + if lastIdx >= 0 { + if li, ok := bld[lastIdx].(*literalSegment); ok { + li.literal += s + return bld + } + } + return append(bld, newLiteralSegment(s)) +} + +func badFormatSpecifier(str string, start, pos int) issue.Reported { + return px.Error(px.TimespanBadFormatSpec, issue.H{`expression`: str[start:pos], `format`: str, `position`: pos}) +} + +func newTimespanFormat(format string, segments []segment) *TimespanFormat { + return &TimespanFormat{fmt: format, segments: segments} +} + +func (f *TimespanFormat) format(ts Timespan) string { + b := bytes.NewBufferString(``) + f.format2(b, ts) + return b.String() +} + +func (f *TimespanFormat) format2(b io.Writer, ts Timespan) { + for _, s := range f.segments { + s.appendTo(b, ts) + } +} + +func (f *TimespanFormat) parse(str string) (time.Duration, bool) { + md := f.regexp().FindStringSubmatch(str) + if md == nil { + return 0, false + } + nanoseconds := int64(0) + for idx, group := range md[1:] { + segment := f.segments[idx] + if _, ok := segment.(*literalSegment); ok { + continue + } + in := 0 + for i, c := range group { + if !unicode.IsSpace(c) { + break + } + in = i + } + if in > 0 { + group = group[in:] + } + if !digitsOnly.MatchString(group) { + return 0, false + } + nanoseconds += segment.nanoseconds(group, segment.multiplier()) + } + return time.Duration(nanoseconds), true +} + +func (f *TimespanFormat) regexp() *regexp.Regexp { + if f.rx == nil { + b := bytes.NewBufferString(`\A-?`) + for _, s := range f.segments { + s.appendRegexp(b) + } + b.WriteString(`\z`) + rx, err := regexp.Compile(b.String()) + if err != nil { + panic(`Internal error while compiling Timespan format regexp: ` + err.Error()) + } + f.rx = rx + } + return f.rx +} + +func newLiteralSegment(literal string) segment { + return &literalSegment{literal} +} + +func (s *literalSegment) appendRegexp(buffer *bytes.Buffer) { + buffer.WriteByte('(') + buffer.WriteString(regexp.QuoteMeta(s.literal)) + buffer.WriteByte(')') +} + +func (s *literalSegment) appendTo(buffer io.Writer, ts Timespan) { + _, err := io.WriteString(buffer, s.literal) + if err != nil { + panic(err) + } +} + +func (s *literalSegment) multiplier() int { + return 0 +} + +func (s *literalSegment) nanoseconds(group string, multiplier int) int64 { + return 0 +} + +func (s *literalSegment) ordinal() int { + return -1 +} + +func (s *literalSegment) setUseTotal() {} + +func (s *valueSegment) initialize(padChar rune, width int, defaultWidth int) { + s.useTotal = false + s.padChar = padChar + s.width = width + s.defaultWidth = defaultWidth +} + +func (s *valueSegment) appendRegexp(buffer *bytes.Buffer) { + var err error + if s.width < 0 { + switch s.padChar { + case 0, '0': + if s.useTotal { + buffer.WriteString(`([0-9]+)`) + } else { + _, err = fmt.Fprintf(buffer, `([0-9]{1,%d})`, s.defaultWidth) + } + default: + if s.useTotal { + buffer.WriteString(`\s*([0-9]+)`) + } else { + _, err = fmt.Fprintf(buffer, `([0-9\\s]{1,%d})`, s.defaultWidth) + } + } + } else { + switch s.padChar { + case 0: + _, err = fmt.Fprintf(buffer, `([0-9]{1,%d})`, s.width) + case '0': + _, err = fmt.Fprintf(buffer, `([0-9]{%d})`, s.width) + default: + _, err = fmt.Fprintf(buffer, `([0-9\\s]{%d})`, s.width) + } + } + if err != nil { + panic(err) + } +} + +func (s *valueSegment) appendValue(buffer io.Writer, n int64) { + _, err := fmt.Fprintf(buffer, s.format, n) + if err != nil { + panic(err) + } +} + +func (s *valueSegment) createFormat() string { + if s.padChar == 0 { + return `%d` + } + w := s.width + if w < 0 { + w = s.defaultWidth + } + if s.padChar == ' ' { + return fmt.Sprintf(`%%%dd`, w) + } + return fmt.Sprintf(`%%%c%dd`, s.padChar, w) +} + +func (s *valueSegment) nanoseconds(group string, multiplier int) int64 { + ns, err := strconv.ParseInt(group, 10, 64) + if err != nil { + ns = 0 + } + return ns * int64(multiplier) +} + +func (s *valueSegment) setUseTotal() { + s.useTotal = true +} + +func newDaySegment(padChar rune, width int) segment { + s := &daySegment{} + s.initialize(padChar, width, 1) + s.format = s.createFormat() + return s +} + +func (s *daySegment) appendTo(buffer io.Writer, ts Timespan) { + s.appendValue(buffer, ts.Days()) +} + +func (s *daySegment) multiplier() int { + return NsecsPerDay +} + +func (s *daySegment) ordinal() int { + return dayMax +} + +func newHourSegment(padChar rune, width int) segment { + s := &hourSegment{} + s.initialize(padChar, width, 2) + s.format = s.createFormat() + return s +} + +func (s *hourSegment) appendTo(buffer io.Writer, ts Timespan) { + var v int64 + if s.useTotal { + v = ts.totalHours() + } else { + v = ts.Hours() + } + s.appendValue(buffer, v) +} + +func (s *hourSegment) multiplier() int { + return NsecsPerHour +} + +func (s *hourSegment) ordinal() int { + return hourMax +} + +func newMinuteSegment(padChar rune, width int) segment { + s := &minuteSegment{} + s.initialize(padChar, width, 2) + s.format = s.createFormat() + return s +} + +func (s *minuteSegment) appendTo(buffer io.Writer, ts Timespan) { + var v int64 + if s.useTotal { + v = ts.totalMinutes() + } else { + v = ts.Minutes() + } + s.appendValue(buffer, v) +} + +func (s *minuteSegment) multiplier() int { + return NsecsPerMin +} + +func (s *minuteSegment) ordinal() int { + return minMax +} + +func newSecondSegment(padChar rune, width int) segment { + s := &secondSegment{} + s.initialize(padChar, width, 2) + s.format = s.createFormat() + return s +} + +func (s *secondSegment) appendTo(buffer io.Writer, ts Timespan) { + var v int64 + if s.useTotal { + v = ts.totalSeconds() + } else { + v = ts.Seconds() + } + s.appendValue(buffer, v) +} + +func (s *secondSegment) multiplier() int { + return NsecsPerSec +} + +func (s *secondSegment) ordinal() int { + return secMax +} + +func (s *fragmentSegment) appendValue(buffer io.Writer, n int64) { + if !(s.useTotal || s.padChar == '0') { + n, _ = strconv.ParseInt(trimTrailingZeroes.ReplaceAllString(strconv.FormatInt(n, 10), `$1`), 10, 64) + } + s.valueSegment.appendValue(buffer, n) +} + +func (s *fragmentSegment) createFormat() string { + if s.padChar == 0 { + return `%d` + } + w := s.width + if w < 0 { + w = s.defaultWidth + } + return fmt.Sprintf(`%%-%dd`, w) +} + +func (s *fragmentSegment) nanoseconds(group string, multiplier int) int64 { + if s.useTotal { + panic(px.Error(px.TimespanFormatSpecNotHigher, issue.NoArgs)) + } + n := s.valueSegment.nanoseconds(group, multiplier) + p := int64(9 - len(group)) + if p <= 0 { + return n + } + return utils.Int64Pow(n*10, p) +} + +func newMillisecondSegment(padChar rune, width int) segment { + s := &millisecondSegment{} + s.initialize(padChar, width, 3) + s.format = s.createFormat() + return s +} + +func (s *millisecondSegment) appendTo(buffer io.Writer, ts Timespan) { + var v int64 + if s.useTotal { + v = ts.totalMilliseconds() + } else { + v = ts.Milliseconds() + } + s.appendValue(buffer, v) +} + +func (s *millisecondSegment) multiplier() int { + return NsecsPerMsec +} + +func (s *millisecondSegment) ordinal() int { + return msecMax +} + +func newNanosecondSegment(padChar rune, width int) segment { + s := &nanosecondSegment{} + s.initialize(padChar, width, 9) + s.format = s.createFormat() + return s +} + +func (s *nanosecondSegment) appendTo(buffer io.Writer, ts Timespan) { + v := ts.totalNanoseconds() + w := s.width + if w < 0 { + w = s.defaultWidth + } + if w < 9 { + // Truncate digits to the right, i.e. let %6N reflect microseconds + v /= utils.Int64Pow(10, int64(9-w)) + if !s.useTotal { + v %= utils.Int64Pow(10, int64(w)) + } + } else { + if !s.useTotal { + v %= NsecsPerSec + } + } + s.appendValue(buffer, v) +} + +func (s *nanosecondSegment) multiplier() int { + w := s.width + if w < 0 { + w = s.defaultWidth + } + if w < 9 { + return int(utils.Int64Pow(10, int64(9-w))) + } + return 1 +} + +func (s *nanosecondSegment) ordinal() int { + return nsecMax +} + +func toTimespanFormats(f px.Value) []*TimespanFormat { + fs := DefaultTimespanFormats + switch f := f.(type) { + case *Array: + fs = make([]*TimespanFormat, f.Len()) + f.EachWithIndex(func(f px.Value, i int) { + fs[i] = DefaultTimespanFormatParser.ParseFormat(f.String()) + }) + case stringValue: + fs = []*TimespanFormat{DefaultTimespanFormatParser.ParseFormat(f.String())} + } + return fs +} diff --git a/vendor/github.com/lyraproj/pcore/types/timestamptype.go b/vendor/github.com/lyraproj/pcore/types/timestamptype.go new file mode 100644 index 0000000..1f1d5f2 --- /dev/null +++ b/vendor/github.com/lyraproj/pcore/types/timestamptype.go @@ -0,0 +1,774 @@ +package types + +import ( + "bytes" + "io" + "math" + "time" + + "reflect" + "sync" + + "github.com/lyraproj/issue/issue" + "github.com/lyraproj/pcore/px" +) + +type ( + TimestampType struct { + min time.Time + max time.Time + } + + // Timestamp represents TimestampType as a value + Timestamp time.Time +) + +// MAX_UNIX_SECS is an offset of 62135596800 seconds to sec that +// represents the number of seconds from 1970-01-01:00:00:00 UTC. This offset +// must be retracted from the MaxInt64 value in order for it to end up +// as that value internally. +const MaxUnixSecs = math.MaxInt64 - 62135596800 + +var MinTime = time.Time{} +var MaxTime = time.Unix(MaxUnixSecs, 999999999) +var timestampTypeDefault = &TimestampType{MinTime, MaxTime} + +var TimestampMetaType px.ObjectType + +var DefaultTimestampFormatsWoTz []*TimestampFormat +var DefaultTimestampFormats []*TimestampFormat +var DefaultTimestampFormatParser *TimestampFormatParser + +func init() { + TimestampMetaType = newObjectType(`Pcore::TimestampType`, + `Pcore::ScalarType { + attributes => { + from => { type => Optional[Timestamp], value => undef }, + to => { type => Optional[Timestamp], value => undef } + } +}`, func(ctx px.Context, args []px.Value) px.Value { + return newTimestampType2(args...) + }) + + tp := NewTimestampFormatParser() + DefaultTimestampFormatParser = tp + + DefaultTimestampFormatsWoTz = []*TimestampFormat{ + tp.ParseFormat(`%FT%T.%N`), + tp.ParseFormat(`%FT%T`), + tp.ParseFormat(`%F %T.%N`), + tp.ParseFormat(`%F %T`), + tp.ParseFormat(`%F`), + } + + DefaultTimestampFormats = []*TimestampFormat{ + tp.ParseFormat(`%FT%T.%N %Z`), + tp.ParseFormat(`%FT%T %Z`), + tp.ParseFormat(`%F %T.%N %Z`), + &TimestampFormat{layout: time.RFC3339}, + tp.ParseFormat(`%F %T %Z`), + tp.ParseFormat(`%F %Z`), + } + DefaultTimestampFormats = append(DefaultTimestampFormats, DefaultTimestampFormatsWoTz...) + + newGoConstructor2(`Timestamp`, + + func(t px.LocalTypes) { + t.Type(`Formats`, `Variant[String[2],Array[String[2], 1]]`) + }, + + func(d px.Dispatch) { + d.Function(func(c px.Context, args []px.Value) px.Value { + return WrapTimestamp(time.Now()) + }) + }, + + func(d px.Dispatch) { + d.Param(`Variant[Integer,Float]`) + d.Function(func(c px.Context, args []px.Value) px.Value { + arg := args[0] + if i, ok := arg.(integerValue); ok { + return WrapTimestamp(time.Unix(int64(i), 0)) + } + s, f := math.Modf(float64(arg.(floatValue))) + return WrapTimestamp(time.Unix(int64(s), int64(f*1000000000.0))) + }) + }, + + func(d px.Dispatch) { + d.Param(`String[1]`) + d.OptionalParam(`Formats`) + d.OptionalParam(`String[1]`) + d.Function(func(c px.Context, args []px.Value) px.Value { + formats := DefaultTimestampFormats + tz := `` + if len(args) > 1 { + formats = toTimestampFormats(args[1]) + if len(args) > 2 { + tz = args[2].String() + } + } + return ParseTimestamp(args[0].String(), formats, tz) + }) + }, + + func(d px.Dispatch) { + d.Param(`Struct[string => String[1],Optional[format] => Formats,Optional[timezone] => String[1]]`) + d.Function(func(c px.Context, args []px.Value) px.Value { + hash := args[0].(*Hash) + str := hash.Get5(`string`, emptyString).String() + formats := toTimestampFormats(hash.Get5(`format`, px.Undef)) + tz := hash.Get5(`timezone`, emptyString).String() + return ParseTimestamp(str, formats, tz) + }) + }) +} + +func DefaultTimestampType() *TimestampType { + return timestampTypeDefault +} + +func NewTimestampType(min time.Time, max time.Time) *TimestampType { + return &TimestampType{min, max} +} + +func TimeFromHash(hash *Hash) (time.Time, bool) { + str := hash.Get5(`string`, emptyString).String() + formats := toTimestampFormats(hash.Get5(`format`, undef)) + tz := hash.Get5(`timezone`, emptyString).String() + return parseTime(str, formats, tz) +} + +func TimeFromString(value string) time.Time { + return time.Time(*ParseTimestamp(value, DefaultTimestampFormats, ``)) +} + +func newTimestampType2(args ...px.Value) *TimestampType { + argc := len(args) + if argc > 2 { + panic(illegalArgumentCount(`Timestamp[]`, `0 or 2`, argc)) + } + if argc == 0 { + return timestampTypeDefault + } + convertArg := func(args []px.Value, argNo int) time.Time { + arg := args[argNo] + var ( + t time.Time + ok bool + ) + switch arg := arg.(type) { + case *Timestamp: + t, ok = time.Time(*arg), true + case *Hash: + t, ok = TimeFromHash(arg) + case stringValue: + t, ok = TimeFromString(arg.String()), true + case integerValue: + t, ok = time.Unix(int64(arg), 0), true + case floatValue: + s, f := math.Modf(float64(arg)) + t, ok = time.Unix(int64(s), int64(f*1000000000.0)), true + case *DefaultValue: + if argNo == 0 { + t, ok = time.Time{}, true + } else { + t, ok = time.Unix(MaxUnixSecs, 999999999), true + } + default: + t, ok = time.Time{}, false + } + if ok { + return t + } + panic(illegalArgumentType(`Timestamp[]`, 0, `Variant[Hash,String,Integer,Float,Default]`, args[0])) + } + + min := convertArg(args, 0) + if argc == 2 { + return &TimestampType{min, convertArg(args, 1)} + } else { + return &TimestampType{min, MaxTime} + } +} + +func (t *TimestampType) Accept(v px.Visitor, g px.Guard) { + v(t) +} + +func (t *TimestampType) Default() px.Type { + return timestampTypeDefault +} + +func (t *TimestampType) Equals(other interface{}, guard px.Guard) bool { + if ot, ok := other.(*TimestampType); ok { + return t.min.Equal(ot.min) && t.max.Equal(ot.max) + } + return false +} + +func (t *TimestampType) Get(key string) (px.Value, bool) { + switch key { + case `from`: + v := px.Undef + if t.min != MinTime { + v = WrapTimestamp(t.min) + } + return v, true + case `to`: + v := px.Undef + if t.max != MaxTime { + v = WrapTimestamp(t.max) + } + return v, true + default: + return nil, false + } +} + +func (t *TimestampType) IsInstance(o px.Value, g px.Guard) bool { + return t.IsAssignable(o.PType(), g) +} + +func (t *TimestampType) IsAssignable(o px.Type, g px.Guard) bool { + if ot, ok := o.(*TimestampType); ok { + return (t.min.Before(ot.min) || t.min.Equal(ot.min)) && (t.max.After(ot.max) || t.max.Equal(ot.max)) + } + return false +} + +func (t *TimestampType) MetaType() px.ObjectType { + return TimestampMetaType +} + +func (t *TimestampType) Parameters() []px.Value { + if t.max.Equal(MaxTime) { + if t.min.Equal(MinTime) { + return px.EmptyValues + } + return []px.Value{stringValue(t.min.String())} + } + if t.min.Equal(MinTime) { + return []px.Value{WrapDefault(), stringValue(t.max.String())} + } + return []px.Value{stringValue(t.min.String()), stringValue(t.max.String())} +} + +func (t *TimestampType) ReflectType(c px.Context) (reflect.Type, bool) { + return reflect.TypeOf(time.Time{}), true +} + +func (t *TimestampType) CanSerializeAsString() bool { + return true +} + +func (t *TimestampType) SerializationString() string { + return t.String() +} + +func (t *TimestampType) String() string { + return px.ToString2(t, None) +} + +func (t *TimestampType) ToString(b io.Writer, s px.FormatContext, g px.RDetect) { + TypeToString(t, b, s, g) +} + +func (t *TimestampType) PType() px.Type { + return &TypeType{t} +} + +func (t *TimestampType) Name() string { + return `Timestamp` +} + +func WrapTimestamp(time time.Time) *Timestamp { + return (*Timestamp)(&time) +} + +func ParseTimestamp(str string, formats []*TimestampFormat, tz string) *Timestamp { + if t, ok := parseTime(str, formats, tz); ok { + return WrapTimestamp(t) + } + fs := bytes.NewBufferString(``) + for i, f := range formats { + if i > 0 { + fs.WriteByte(',') + } + fs.WriteString(f.format) + } + panic(px.Error(px.TimestampCannotBeParsed, issue.H{`str`: str, `formats`: fs.String()})) +} + +func parseTime(str string, formats []*TimestampFormat, tz string) (time.Time, bool) { + usedTz := tz + if usedTz == `` { + usedTz = `UTC` + } + loc := loadLocation(usedTz) + + for _, f := range formats { + ts, err := time.ParseInLocation(f.layout, str, loc) + if err == nil { + if usedTz != ts.Location().String() { + if tz != `` { + panic(px.Error(px.TimestampTzAmbiguity, issue.H{`parsed`: ts.Location().String(), `given`: tz})) + } + // Golang does real weird things when the string contains a timezone that isn't equal + // to the given timezone. Instead of loading the given zone, it creates a new location + // with a similarly named zone but with offset 0. It doesn't matter if Parse or ParseInLocation + // is used. Both has the same weird behavior. For this reason, a new loadLocation is performed + // here followed by a reparse. + loc, _ = time.LoadLocation(ts.Location().String()) + ts, err = time.ParseInLocation(f.layout, str, loc) + if err != nil { + continue + } + } + return ts.UTC(), true + } + } + return time.Time{}, false +} + +func loadLocation(tz string) *time.Location { + loc, err := time.LoadLocation(tz) + if err != nil { + panic(px.Error(px.InvalidTimezone, issue.H{`zone`: tz, `detail`: err.Error()})) + } + return loc +} + +func (tv *Timestamp) Equals(o interface{}, g px.Guard) bool { + if ov, ok := o.(*Timestamp); ok { + return tv.Int() == ov.Int() + } + return false +} + +func (tv *Timestamp) Float() float64 { + t := (*time.Time)(tv) + y := t.Year() + // Timestamps that represent a date before the year 1678 or after 2262 can + // be represented as nanoseconds in an int64. + if 1678 < y && y < 2262 { + return float64(float64(t.UnixNano()) / 1000000000.0) + } + // Fall back to microsecond precision + us := t.Unix()*1000000 + int64(t.Nanosecond())/1000 + return float64(us) / 1000000.0 +} + +func (tv *Timestamp) Format(format string) string { + return DefaultTimestampFormatParser.ParseFormat(format).Format(tv) +} + +func (tv *Timestamp) Format2(format, tz string) string { + return DefaultTimestampFormatParser.ParseFormat(format).Format2(tv, tz) +} + +func (tv *Timestamp) Reflect(c px.Context) reflect.Value { + return reflect.ValueOf((time.Time)(*tv)) +} + +func (tv *Timestamp) ReflectTo(c px.Context, dest reflect.Value) { + rv := tv.Reflect(c) + if !rv.Type().AssignableTo(dest.Type()) { + panic(px.Error(px.AttemptToSetWrongKind, issue.H{`expected`: rv.Type().String(), `actual`: dest.Type().String()})) + } + dest.Set(rv) +} + +func (tv *Timestamp) Time() time.Time { + return time.Time(*tv) +} + +func (tv *Timestamp) Int() int64 { + return (*time.Time)(tv).Unix() +} + +func (tv *Timestamp) CanSerializeAsString() bool { + return true +} + +func (tv *Timestamp) SerializationString() string { + return tv.String() +} + +func (tv *Timestamp) String() string { + return px.ToString2(tv, None) +} + +func (tv *Timestamp) ToKey(b *bytes.Buffer) { + t := (*time.Time)(tv) + b.WriteByte(1) + b.WriteByte(HkTimestamp) + n := t.Unix() + b.WriteByte(byte(n >> 56)) + b.WriteByte(byte(n >> 48)) + b.WriteByte(byte(n >> 40)) + b.WriteByte(byte(n >> 32)) + b.WriteByte(byte(n >> 24)) + b.WriteByte(byte(n >> 16)) + b.WriteByte(byte(n >> 8)) + b.WriteByte(byte(n)) + n = int64(t.Nanosecond()) + b.WriteByte(byte(n >> 56)) + b.WriteByte(byte(n >> 48)) + b.WriteByte(byte(n >> 40)) + b.WriteByte(byte(n >> 32)) + b.WriteByte(byte(n >> 24)) + b.WriteByte(byte(n >> 16)) + b.WriteByte(byte(n >> 8)) + b.WriteByte(byte(n)) +} + +func (tv *Timestamp) ToString(b io.Writer, s px.FormatContext, g px.RDetect) { + _, err := io.WriteString(b, (*time.Time)(tv).Format(DefaultTimestampFormats[0].layout)) + if err != nil { + panic(err) + } +} + +func (tv *Timestamp) PType() px.Type { + t := time.Time(*tv) + return &TimestampType{t, t} +} + +const ( + // Strings recognized as golang "layout" elements + + loLongMonth = `January` + loMonth = `Jan` + loLongWeekDay = `Monday` + loWeekDay = `Mon` + loTZ = `MST` + loZeroMonth = `01` + loZeroDay = `02` + loZeroHour12 = `03` + loZeroMinute = `04` + loZeroSecond = `05` + loYear = `06` + loHour = `15` + loNumMonth = `1` + loLongYear = `2006` + loDay = `2` + loUnderDay = `_2` + loHour12 = `3` + loMinute = `4` + loSecond = `5` + loPM = `PM` + loPm = `pm` + + loNumColonSecondsTZ = `-07:00:00` + loNumTZ = `-0700` + loNumColonTZ = `-07:00` + loNumShortTZ = `-07` + + /* Potential future additions + loISO8601SecondsTZ = `Z070000` + loISO8601ColonSecondsTZ = `Z07:00:00` + loISO8601TZ = `Z0700` + loISO8601ColonTZ = `Z07:00` + loISO8601ShortTz = `Z07` + loFracSecond0 = `.000` + loFracSecond9 = `.999` + */ +) + +type ( + TimestampFormat struct { + format string + layout string + } + + TimestampFormatParser struct { + lock sync.Mutex + formats map[string]*TimestampFormat + } +) + +func NewTimestampFormatParser() *TimestampFormatParser { + return &TimestampFormatParser{formats: make(map[string]*TimestampFormat, 17)} +} + +func (p *TimestampFormatParser) ParseFormat(format string) *TimestampFormat { + p.lock.Lock() + defer p.lock.Unlock() + + if fmt, ok := p.formats[format]; ok { + return fmt + } + bld := bytes.NewBufferString(``) + strftimeToLayout(bld, format) + fmt := &TimestampFormat{format, bld.String()} + p.formats[format] = fmt + return fmt +} + +func (f *TimestampFormat) Format(t *Timestamp) string { + return (*time.Time)(t).Format(f.layout) +} + +func (f *TimestampFormat) Format2(t *Timestamp, tz string) string { + return (*time.Time)(t).In(loadLocation(tz)).Format(f.layout) +} + +func strftimeToLayout(bld *bytes.Buffer, str string) { + state := stateLiteral + colons := 0 + padChar := '0' + width := -1 + formatStart := 0 + upper := false + + for pos, c := range str { + if state == stateLiteral { + if c == '%' { + state = statePad + formatStart = pos + padChar = '0' + width = -1 + upper = false + colons = 0 + } else { + bld.WriteRune(c) + } + continue + } + + switch c { + case '-': + if state != statePad { + panic(badFormatSpecifier(str, formatStart, pos)) + } + padChar = 0 + state = stateWidth + case '^': + if state != statePad { + panic(badFormatSpecifier(str, formatStart, pos)) + } + upper = true + case '_': + if state != statePad { + panic(badFormatSpecifier(str, formatStart, pos)) + } + padChar = ' ' + state = stateWidth + case 'Y': + bld.WriteString(loLongYear) + state = stateLiteral + case 'y': + bld.WriteString(loYear) + state = stateLiteral + case 'C': + panic(notSupportedByGoTimeLayout(str, formatStart, pos, `century`)) + case 'm': + switch padChar { + case 0: + bld.WriteString(loNumMonth) + case '0': + bld.WriteString(loZeroMonth) + case ' ': + panic(notSupportedByGoTimeLayout(str, formatStart, pos, `space padded month`)) + } + state = stateLiteral + case 'B': + if upper { + panic(notSupportedByGoTimeLayout(str, formatStart, pos, `upper cased month`)) + } + bld.WriteString(loLongMonth) + state = stateLiteral + case 'b', 'h': + if upper { + panic(notSupportedByGoTimeLayout(str, formatStart, pos, `upper cased short month`)) + } + bld.WriteString(loMonth) + state = stateLiteral + case 'd': + switch padChar { + case 0: + bld.WriteString(loDay) + case '0': + bld.WriteString(loZeroDay) + case ' ': + bld.WriteString(loUnderDay) + } + state = stateLiteral + case 'e': + bld.WriteString(loUnderDay) + state = stateLiteral + case 'j': + panic(notSupportedByGoTimeLayout(str, formatStart, pos, `year of the day`)) + case 'H': + switch padChar { + case ' ': + panic(notSupportedByGoTimeLayout(str, formatStart, pos, `blank padded 24 hour`)) + case 0: + panic(notSupportedByGoTimeLayout(str, formatStart, pos, `short 24 hour`)) + default: + bld.WriteString(loHour) + } + state = stateLiteral + case 'k': + panic(notSupportedByGoTimeLayout(str, formatStart, pos, `blank padded 24 hour`)) + case 'I': + bld.WriteString(loZeroHour12) + state = stateLiteral + case 'l': + bld.WriteString(loHour12) + state = stateLiteral + case 'P': + bld.WriteString(loPm) + state = stateLiteral + case 'p': + bld.WriteString(loPM) + state = stateLiteral + case 'M': + switch padChar { + case ' ': + panic(notSupportedByGoTimeLayout(str, formatStart, pos, `blank padded minute`)) + case 0: + bld.WriteString(loMinute) + default: + bld.WriteString(loZeroMinute) + } + state = stateLiteral + case 'S': + switch padChar { + case ' ': + panic(notSupportedByGoTimeLayout(str, formatStart, pos, `blank padded second`)) + case 0: + bld.WriteString(loSecond) + default: + bld.WriteString(loZeroSecond) + } + state = stateLiteral + case 'L': + if formatStart == 0 || str[formatStart-1] != '.' { + panic(notSupportedByGoTimeLayout(str, formatStart, pos, `fraction not preceded by dot in format`)) + } + if padChar == '0' { + bld.WriteString(`000`) + } else { + bld.WriteString(`999`) + } + state = stateLiteral + case 'N': + if formatStart == 0 || str[formatStart-1] != '.' { + panic(notSupportedByGoTimeLayout(str, formatStart, pos, `fraction not preceded by dot in format`)) + } + digit := byte('9') + if padChar == '0' { + digit = '0' + } + w := width + if width == -1 { + w = 9 + } + for i := 0; i < w; i++ { + bld.WriteByte(digit) + } + state = stateLiteral + case 'z': + switch colons { + case 0: + bld.WriteString(loNumTZ) + case 1: + bld.WriteString(loNumColonTZ) + case 2: + bld.WriteString(loNumColonSecondsTZ) + default: + // Not entirely correct since loosely defined num TZ not supported in Go + bld.WriteString(loNumShortTZ) + } + state = stateLiteral + case 'Z': + bld.WriteString(loTZ) + state = stateLiteral + case 'A': + bld.WriteString(loLongWeekDay) + state = stateLiteral + case 'a': + bld.WriteString(loWeekDay) + state = stateLiteral + case 'u', 'w': + panic(notSupportedByGoTimeLayout(str, formatStart, pos, `numeric week day`)) + case 'G', 'g': + panic(notSupportedByGoTimeLayout(str, formatStart, pos, `week based year`)) + case 'V': + panic(notSupportedByGoTimeLayout(str, formatStart, pos, `week number of the based year`)) + case 's': + panic(notSupportedByGoTimeLayout(str, formatStart, pos, `seconds since epoch`)) + case 'Q': + panic(notSupportedByGoTimeLayout(str, formatStart, pos, `milliseconds since epoch`)) + case 't': + bld.WriteString("\t") + state = stateLiteral + case 'n': + bld.WriteString("\n") + state = stateLiteral + case '%': + bld.WriteByte('%') + state = stateLiteral + case 'c': + strftimeToLayout(bld, `%a %b %-d %T %Y`) + state = stateLiteral + case 'D', 'x': + strftimeToLayout(bld, `%m/%d/%y`) + state = stateLiteral + case 'F': + strftimeToLayout(bld, `%Y-%m-%d`) + state = stateLiteral + case 'r': + strftimeToLayout(bld, `%I:%M:%S %p`) + state = stateLiteral + case 'R': + strftimeToLayout(bld, `%H:%M`) + state = stateLiteral + case 'X', 'T': + strftimeToLayout(bld, `%H:%M:%S`) + state = stateLiteral + case '+': + strftimeToLayout(bld, `%a %b %-d %H:%M:%S %Z %Y`) + state = stateLiteral + default: + if c < '0' || c > '9' { + panic(badFormatSpecifier(str, formatStart, pos)) + } + if state == statePad && c == '0' { + padChar = '0' + } else { + n := int(c) - 0x30 + if width == -1 { + width = n + } else { + width = width*10 + n + } + } + state = stateWidth + } + } + + if state != stateLiteral { + panic(badFormatSpecifier(str, formatStart, len(str))) + } +} + +func notSupportedByGoTimeLayout(str string, start, pos int, description string) issue.Reported { + return px.Error(px.NotSupportedByGoTimeLayout, issue.H{`format_specifier`: str[start : pos+1], `description`: description}) +} + +func toTimestampFormats(fmt px.Value) []*TimestampFormat { + formats := DefaultTimestampFormats + switch fmt := fmt.(type) { + case *Array: + formats = make([]*TimestampFormat, fmt.Len()) + fmt.EachWithIndex(func(f px.Value, i int) { + formats[i] = DefaultTimestampFormatParser.ParseFormat(f.String()) + }) + case stringValue: + formats = []*TimestampFormat{DefaultTimestampFormatParser.ParseFormat(fmt.String())} + } + return formats +} diff --git a/vendor/github.com/lyraproj/pcore/types/tupletype.go b/vendor/github.com/lyraproj/pcore/types/tupletype.go new file mode 100644 index 0000000..b5233c6 --- /dev/null +++ b/vendor/github.com/lyraproj/pcore/types/tupletype.go @@ -0,0 +1,380 @@ +package types + +import ( + "io" + "math" + + "github.com/lyraproj/pcore/px" +) + +type TupleType struct { + size *IntegerType + givenOrActualSize *IntegerType + types []px.Type +} + +var TupleMetaType px.ObjectType + +func init() { + TupleMetaType = newObjectType(`Pcore::TupleType`, + `Pcore::AnyType { + attributes => { + types => Array[Type], + size_type => { + type => Optional[Type[Integer]], + value => undef + } + } +}`, func(ctx px.Context, args []px.Value) px.Value { + return newTupleType2(args...) + }) + + // Go constructor for Tuple instances is registered by ArrayType +} + +func DefaultTupleType() *TupleType { + return tupleTypeDefault +} + +func EmptyTupleType() *TupleType { + return tupleTypeEmpty +} + +func NewTupleType(types []px.Type, size *IntegerType) *TupleType { + var givenOrActualSize *IntegerType + sz := int64(len(types)) + if size == nil { + givenOrActualSize = NewIntegerType(sz, sz) + } else { + if sz == 0 { + if *size == *IntegerTypePositive { + return DefaultTupleType() + } + if *size == *IntegerTypeZero { + return EmptyTupleType() + } + } + givenOrActualSize = size + } + return &TupleType{size, givenOrActualSize, types} +} + +func newTupleType2(args ...px.Value) *TupleType { + return tupleFromArgs(false, WrapValues(args)) +} + +func tupleFromArgs(callable bool, args px.List) *TupleType { + argc := args.Len() + if argc == 0 { + return tupleTypeDefault + } + + if argc == 1 || argc == 2 { + if ar, ok := args.At(0).(*Array); ok { + tupleArgs := ar.AppendTo(make([]px.Value, 0, ar.Len()+argc-1)) + if argc == 2 { + tupleArgs = append(tupleArgs, args.At(1).(*IntegerType).Parameters()...) + } + args = WrapValues(tupleArgs) + argc = len(tupleArgs) + } + } + + var rng, givenOrActualRng *IntegerType + var ok bool + var min int64 + + last := args.At(argc - 1) + max := int64(-1) + if _, ok = last.(*DefaultValue); ok { + max = math.MaxInt64 + } else if n, ok := toInt(last); ok { + max = n + } + if max >= 0 { + if argc == 1 { + rng = NewIntegerType(min, math.MaxInt64) + argc = 0 + } else { + if min, ok = toInt(args.At(argc - 2)); ok { + rng = NewIntegerType(min, max) + argc -= 2 + } else { + argc-- + rng = NewIntegerType(max, int64(argc)) + } + } + givenOrActualRng = rng + } else { + rng = nil + givenOrActualRng = NewIntegerType(int64(argc), int64(argc)) + } + + if argc == 0 { + if rng != nil && *rng == *IntegerTypeZero { + return tupleTypeEmpty + } + if callable { + return &TupleType{rng, rng, []px.Type{DefaultUnitType()}} + } + if rng != nil && *rng == *IntegerTypePositive { + return tupleTypeDefault + } + return &TupleType{rng, rng, []px.Type{}} + } + + var tupleTypes []px.Type + ok = false + var failIdx int + if argc == 1 { + // One arg can be either array of types or a type + tupleTypes, failIdx = toTypes(args.Slice(0, 1)) + ok = failIdx < 0 + } + + if !ok { + tupleTypes, failIdx = toTypes(args.Slice(0, argc)) + if failIdx >= 0 { + name := `Tuple[]` + if callable { + name = `Callable[]` + } + panic(illegalArgumentType(name, failIdx, `Type`, args.At(failIdx))) + } + } + return &TupleType{rng, givenOrActualRng, tupleTypes} +} + +func (t *TupleType) Accept(v px.Visitor, g px.Guard) { + v(t) + t.size.Accept(v, g) + for _, c := range t.types { + c.Accept(v, g) + } +} + +func (t *TupleType) At(i int) px.Value { + if i >= 0 { + if i < len(t.types) { + return t.types[i] + } + if int64(i) < t.givenOrActualSize.max { + return t.types[len(t.types)-1] + } + } + return undef +} + +func (t *TupleType) CommonElementType() px.Type { + top := len(t.types) + if top == 0 { + return anyTypeDefault + } + cet := t.types[0] + for idx := 1; idx < top; idx++ { + cet = commonType(cet, t.types[idx]) + } + return cet +} + +func (t *TupleType) Default() px.Type { + return tupleTypeDefault +} + +func (t *TupleType) Equals(o interface{}, g px.Guard) bool { + if ot, ok := o.(*TupleType); ok && len(t.types) == len(ot.types) && px.Equals(t.size, ot.size, g) { + for idx, col := range t.types { + if !col.Equals(ot.types[idx], g) { + return false + } + } + return true + } + return false +} + +func (t *TupleType) Generic() px.Type { + return NewTupleType(alterTypes(t.types, generalize), t.size) +} + +func (t *TupleType) Get(key string) (value px.Value, ok bool) { + switch key { + case `types`: + tps := make([]px.Value, len(t.types)) + for i, t := range t.types { + tps[i] = t + } + return WrapValues(tps), true + case `size_type`: + if t.size == nil { + return undef, true + } + return t.size, true + } + return nil, false +} + +func (t *TupleType) IsAssignable(o px.Type, g px.Guard) bool { + switch o := o.(type) { + case *ArrayType: + if !GuardedIsInstance(t.givenOrActualSize, integerValue(o.size.Min()), g) { + return false + } + top := len(t.types) + if top == 0 { + return true + } + elemType := o.typ + for idx := 0; idx < top; idx++ { + if !GuardedIsAssignable(t.types[idx], elemType, g) { + return false + } + } + return true + + case *TupleType: + if !(t.size == nil || GuardedIsInstance(t.size, integerValue(o.givenOrActualSize.Min()), g)) { + return false + } + + if len(t.types) > 0 { + top := len(o.types) + if top == 0 { + return t.givenOrActualSize.min == 0 + } + + last := len(t.types) - 1 + for idx := 0; idx < top; idx++ { + myIdx := idx + if myIdx > last { + myIdx = last + } + if !GuardedIsAssignable(t.types[myIdx], o.types[idx], g) { + return false + } + } + } + return true + + default: + return false + } +} + +func (t *TupleType) IsInstance(v px.Value, g px.Guard) bool { + if iv, ok := v.(*Array); ok { + return t.IsInstance2(iv, g) + } + return false +} + +func (t *TupleType) IsInstance2(vs px.List, g px.Guard) bool { + osz := vs.Len() + if !t.givenOrActualSize.IsInstance3(osz) { + return false + } + + last := len(t.types) - 1 + if last < 0 { + return true + } + + tdx := 0 + for idx := 0; idx < osz; idx++ { + if !GuardedIsInstance(t.types[tdx], vs.At(idx), g) { + return false + } + if tdx < last { + tdx++ + } + } + return true +} + +func (t *TupleType) IsInstance3(vs []px.Value, g px.Guard) bool { + osz := len(vs) + if !t.givenOrActualSize.IsInstance3(osz) { + return false + } + + last := len(t.types) - 1 + if last < 0 { + return true + } + + tdx := 0 + for idx := 0; idx < osz; idx++ { + if !GuardedIsInstance(t.types[tdx], vs[idx], g) { + return false + } + if tdx < last { + tdx++ + } + } + return true +} + +func (t *TupleType) MetaType() px.ObjectType { + return TupleMetaType +} + +func (t *TupleType) Name() string { + return `Tuple` +} + +func (t *TupleType) Resolve(c px.Context) px.Type { + rts := make([]px.Type, len(t.types)) + for i, ts := range t.types { + rts[i] = resolve(c, ts) + } + t.types = rts + return t +} + +func (t *TupleType) CanSerializeAsString() bool { + for _, v := range t.types { + if !canSerializeAsString(v) { + return false + } + } + return true +} + +func (t *TupleType) SerializationString() string { + return t.String() +} + +func (t *TupleType) Size() *IntegerType { + return t.givenOrActualSize +} + +func (t *TupleType) String() string { + return px.ToString2(t, None) +} + +func (t *TupleType) Parameters() []px.Value { + top := len(t.types) + params := make([]px.Value, 0, top+2) + for _, c := range t.types { + params = append(params, c) + } + if !(t.size == nil || top == 0 && *t.size == *IntegerTypePositive) { + params = append(params, t.size.SizeParameters()...) + } + return params +} + +func (t *TupleType) ToString(b io.Writer, s px.FormatContext, g px.RDetect) { + TypeToString(t, b, s, g) +} + +func (t *TupleType) PType() px.Type { + return &TypeType{t} +} + +func (t *TupleType) Types() []px.Type { + return t.types +} + +var tupleTypeDefault = &TupleType{IntegerTypePositive, IntegerTypePositive, []px.Type{}} +var tupleTypeEmpty = &TupleType{IntegerTypeZero, IntegerTypeZero, []px.Type{}} diff --git a/vendor/github.com/lyraproj/pcore/types/typealiastype.go b/vendor/github.com/lyraproj/pcore/types/typealiastype.go new file mode 100644 index 0000000..ea81f91 --- /dev/null +++ b/vendor/github.com/lyraproj/pcore/types/typealiastype.go @@ -0,0 +1,189 @@ +package types + +import ( + "fmt" + "io" + + "github.com/lyraproj/pcore/utils" + + "github.com/lyraproj/pcore/px" +) + +type TypeAliasType struct { + name string + typeExpression *DeferredType + resolvedType px.Type + loader px.Loader +} + +var TypeAliasMetaType px.ObjectType + +func init() { + TypeAliasMetaType = newObjectType(`Pcore::TypeAlias`, + `Pcore::AnyType { + attributes => { + name => String[1], + resolved_type => { + type => Optional[Type], + value => undef + } + } +}`, func(ctx px.Context, args []px.Value) px.Value { + return newTypeAliasType2(args...) + }) +} + +func DefaultTypeAliasType() *TypeAliasType { + return typeAliasTypeDefault +} + +// NewTypeAliasType creates a new TypeAliasType from a name and a typeExpression which +// must either be a *DeferredType, a parser.Expression, or nil. If it is nil, the +// resolved Type must be given. +func NewTypeAliasType(name string, typeExpression *DeferredType, resolvedType px.Type) *TypeAliasType { + return &TypeAliasType{name, typeExpression, resolvedType, nil} +} + +func newTypeAliasType2(args ...px.Value) *TypeAliasType { + switch len(args) { + case 0: + return DefaultTypeAliasType() + case 2: + name, ok := args[0].(stringValue) + if !ok { + panic(illegalArgumentType(`TypeAlias`, 0, `String`, args[0])) + } + var pt px.Type + if pt, ok = args[1].(px.Type); ok { + return NewTypeAliasType(string(name), nil, pt) + } + if dt, ok := args[1].(*DeferredType); ok { + return NewTypeAliasType(string(name), dt, nil) + } + panic(illegalArgumentType(`TypeAlias[]`, 1, `Type or Expression`, args[1])) + default: + panic(illegalArgumentCount(`TypeAlias[]`, `0 or 2`, len(args))) + } +} + +func (t *TypeAliasType) Accept(v px.Visitor, g px.Guard) { + if g == nil { + g = make(px.Guard) + } + if g.Seen(t, nil) { + return + } + v(t) + t.resolvedType.Accept(v, g) +} + +func (t *TypeAliasType) Default() px.Type { + return typeAliasTypeDefault +} + +func (t *TypeAliasType) Equals(o interface{}, g px.Guard) bool { + if ot, ok := o.(*TypeAliasType); ok && t.name == ot.name { + if g == nil { + g = make(px.Guard) + } + if g.Seen(t, ot) { + return true + } + tr := t.resolvedType + otr := ot.resolvedType + return tr.Equals(otr, g) + } + return false +} + +func (t *TypeAliasType) Get(key string) (px.Value, bool) { + switch key { + case `name`: + return stringValue(t.name), true + case `resolved_type`: + return t.resolvedType, true + default: + return nil, false + } +} + +func (t *TypeAliasType) Loader() px.Loader { + return t.loader +} + +func (t *TypeAliasType) IsAssignable(o px.Type, g px.Guard) bool { + if g == nil { + g = make(px.Guard) + } + if g.Seen(t, o) { + return true + } + return GuardedIsAssignable(t.ResolvedType(), o, g) +} + +func (t *TypeAliasType) IsInstance(o px.Value, g px.Guard) bool { + if g == nil { + g = make(px.Guard) + } + if g.Seen(t, o) { + return true + } + return GuardedIsInstance(t.ResolvedType(), o, g) +} + +func (t *TypeAliasType) MetaType() px.ObjectType { + return TypeAliasMetaType +} + +func (t *TypeAliasType) Name() string { + return t.name +} + +func (t *TypeAliasType) Resolve(c px.Context) px.Type { + if t.resolvedType == nil { + t.resolvedType = t.typeExpression.Resolve(c) + t.loader = c.Loader() + } + return t +} + +func (t *TypeAliasType) ResolvedType() px.Type { + if t.resolvedType == nil { + panic(fmt.Sprintf("Reference to unresolved type '%s'", t.name)) + } + return t.resolvedType +} + +func (t *TypeAliasType) String() string { + return px.ToString2(t, Expanded) +} + +func (t *TypeAliasType) ToString(b io.Writer, s px.FormatContext, g px.RDetect) { + f := px.GetFormat(s.FormatMap(), t.PType()) + if t.name == `UnresolvedAlias` { + utils.WriteString(b, `TypeAlias`) + } else { + utils.WriteString(b, t.name) + if !(f.IsAlt() && f.FormatChar() == 'b') { + return + } + if g == nil { + g = make(px.RDetect) + } else if g[t] { + utils.WriteString(b, ``) + return + } + g[t] = true + utils.WriteString(b, ` = `) + + // TODO: Need to be adjusted when included in TypeSet + t.resolvedType.ToString(b, s, g) + delete(g, t) + } +} + +func (t *TypeAliasType) PType() px.Type { + return &TypeType{t} +} + +var typeAliasTypeDefault = &TypeAliasType{`UnresolvedAlias`, nil, defaultTypeDefault, nil} diff --git a/vendor/github.com/lyraproj/pcore/types/typedname.go b/vendor/github.com/lyraproj/pcore/types/typedname.go new file mode 100644 index 0000000..d441c0a --- /dev/null +++ b/vendor/github.com/lyraproj/pcore/types/typedname.go @@ -0,0 +1,275 @@ +package types + +import ( + "io" + "regexp" + "strings" + + "github.com/lyraproj/issue/issue" + "github.com/lyraproj/pcore/px" +) + +type typedName struct { + namespace px.Namespace + authority px.URI + name string + canonical string + parts []string +} + +var TypedNameMetaType px.Type + +func init() { + TypedNameMetaType = newObjectType(`TypedName`, `{ + attributes => { + 'namespace' => String, + 'name' => String, + 'authority' => { type => Optional[URI], value => undef }, + 'parts' => { type => Array[String], kind => derived }, + 'is_qualified' => { type => Boolean, kind => derived }, + 'child' => { type => Optional[TypedName], kind => derived }, + 'parent' => { type => Optional[TypedName], kind => derived } + }, + functions => { + 'is_parent' => Callable[[TypedName],Boolean], + 'relative_to' => Callable[[TypedName],Optional[TypedName]] + } + }`, func(ctx px.Context, args []px.Value) px.Value { + ns := px.Namespace(args[0].String()) + n := args[1].String() + if len(args) > 2 { + return newTypedName2(ns, n, px.URI(args[2].(*UriValue).String())) + } + return NewTypedName(ns, n) + }, func(ctx px.Context, args []px.Value) px.Value { + h := args[0].(*Hash) + ns := px.Namespace(h.Get5(`namespace`, px.EmptyString).String()) + n := h.Get5(`name`, px.EmptyString).String() + if x, ok := h.Get4(`authority`); ok { + return newTypedName2(ns, n, px.URI(x.(*UriValue).String())) + } + return NewTypedName(ns, n) + }) +} + +func (t *typedName) ToString(bld io.Writer, format px.FormatContext, g px.RDetect) { + ObjectToString(t, format, bld, g) +} + +func (t *typedName) PType() px.Type { + return TypedNameMetaType +} + +func (t *typedName) Call(c px.Context, method px.ObjFunc, args []px.Value, block px.Lambda) (result px.Value, ok bool) { + switch method.Name() { + case `is_parent`: + return booleanValue(t.IsParent(args[0].(px.TypedName))), true + case `relative_to`: + if r, ok := t.RelativeTo(args[0].(px.TypedName)); ok { + return r, true + } + return undef, true + } + return nil, false +} + +func (t *typedName) Get(key string) (value px.Value, ok bool) { + switch key { + case `namespace`: + return stringValue(string(t.namespace)), true + case `authority`: + if t.authority == px.RuntimeNameAuthority { + return px.Undef, true + } + return WrapURI2(string(t.authority)), true + case `name`: + return stringValue(t.Name()), true + case `parts`: + return t.PartsList(), true + case `is_qualified`: + return booleanValue(t.IsQualified()), true + case `parent`: + p := t.Parent() + if p == nil { + return undef, true + } + return p, true + case `child`: + p := t.Child() + if p == nil { + return undef, true + } + return p, true + } + return nil, false +} + +func (t *typedName) InitHash() px.OrderedMap { + es := make([]*HashEntry, 0, 3) + es = append(es, WrapHashEntry2(`namespace`, stringValue(string(t.Namespace())))) + es = append(es, WrapHashEntry2(`name`, stringValue(t.Name()))) + if t.authority != px.RuntimeNameAuthority { + es = append(es, WrapHashEntry2(`authority`, WrapURI2(string(t.authority)))) + } + return WrapHash(es) +} + +func NewTypedName(namespace px.Namespace, name string) px.TypedName { + return newTypedName2(namespace, name, px.RuntimeNameAuthority) +} + +var allowedCharacters = regexp.MustCompile(`\A[A-Za-z][0-9A-Z_a-z]*\z`) + +func newTypedName2(namespace px.Namespace, name string, nameAuthority px.URI) px.TypedName { + tn := typedName{} + tn.namespace = namespace + tn.authority = nameAuthority + tn.name = strings.TrimPrefix(name, `::`) + return &tn +} + +func typedNameFromMapKey(mapKey string) px.TypedName { + if i := strings.LastIndexByte(mapKey, '/'); i > 0 { + pfx := mapKey[:i] + name := mapKey[i+1:] + if i = strings.LastIndexByte(pfx, '/'); i > 0 { + return newTypedName2(px.Namespace(pfx[i+1:]), name, px.URI(pfx[:i])) + } + } + panic(px.Error(px.InvalidTypedNameMapKey, issue.H{`mapKey`: mapKey})) +} + +func (t *typedName) Child() px.TypedName { + if !t.IsQualified() { + return nil + } + return t.child(1) +} + +func (t *typedName) child(stripCount int) px.TypedName { + name := t.name + sx := 0 + for i := 0; i < stripCount; i++ { + sx = strings.Index(name, `::`) + if sx < 0 { + return nil + } + name = name[sx+2:] + } + + tn := &typedName{ + namespace: t.namespace, + authority: t.authority, + name: name} + + if t.canonical != `` { + pfxLen := len(t.authority) + len(t.namespace) + 2 + diff := len(t.name) - len(name) + tn.canonical = t.canonical[:pfxLen] + t.canonical[pfxLen+diff:] + } + if t.parts != nil { + tn.parts = t.parts[stripCount:] + } + return tn +} + +func (t *typedName) Parent() px.TypedName { + lx := strings.LastIndex(t.name, `::`) + if lx < 0 { + return nil + } + tn := &typedName{ + namespace: t.namespace, + authority: t.authority, + name: t.name[:lx]} + + if t.canonical != `` { + pfxLen := len(t.authority) + len(t.namespace) + 2 + tn.canonical = t.canonical[:pfxLen+lx] + } + if t.parts != nil { + tn.parts = t.parts[:len(t.parts)-1] + } + return tn +} + +func (t *typedName) Equals(other interface{}, g px.Guard) bool { + if tn, ok := other.(px.TypedName); ok { + return t.MapKey() == tn.MapKey() + } + return false +} + +func (t *typedName) Name() string { + return t.name +} + +func (t *typedName) IsParent(o px.TypedName) bool { + tps := t.Parts() + ops := o.Parts() + top := len(tps) + if top < len(ops) { + for idx := 0; idx < top; idx++ { + if tps[idx] != ops[idx] { + return false + } + } + return true + } + return false +} + +func (t *typedName) RelativeTo(parent px.TypedName) (px.TypedName, bool) { + if parent.IsParent(t) { + return t.child(len(parent.Parts())), true + } + return nil, false +} + +func (t *typedName) IsQualified() bool { + if t.parts == nil { + return strings.Contains(t.name, `::`) + } + return len(t.parts) > 1 +} + +func (t *typedName) MapKey() string { + if t.canonical == `` { + t.canonical = strings.ToLower(string(t.authority) + `/` + string(t.namespace) + `/` + t.name) + } + return t.canonical +} + +func (t *typedName) Parts() []string { + if t.parts == nil { + parts := strings.Split(strings.ToLower(t.name), `::`) + for _, part := range parts { + if !allowedCharacters.MatchString(part) { + panic(px.Error(px.InvalidCharactersInName, issue.H{`name`: t.name})) + } + } + t.parts = parts + } + return t.parts +} + +func (t *typedName) PartsList() px.List { + parts := t.Parts() + es := make([]px.Value, len(parts)) + for i, p := range parts { + es[i] = stringValue(p) + } + return WrapValues(es) +} + +func (t *typedName) String() string { + return px.ToString(t) +} + +func (t *typedName) Namespace() px.Namespace { + return t.namespace +} + +func (t *typedName) Authority() px.URI { + return t.authority +} diff --git a/vendor/github.com/lyraproj/pcore/types/typeparameter.go b/vendor/github.com/lyraproj/pcore/types/typeparameter.go new file mode 100644 index 0000000..c7ce323 --- /dev/null +++ b/vendor/github.com/lyraproj/pcore/types/typeparameter.go @@ -0,0 +1,39 @@ +package types + +import ( + "github.com/lyraproj/pcore/hash" + "github.com/lyraproj/pcore/px" +) + +type typeParameter struct { + attribute +} + +var TypeTypeParameter = NewStructType([]*StructElement{ + newStructElement2(keyType, DefaultTypeType()), + NewStructElement(newOptionalType3(keyAnnotations), typeAnnotations), +}) + +func (t *typeParameter) initHash() *hash.StringHash { + h := t.attribute.initHash() + h.Put(keyType, h.Get(keyType, nil).(*TypeType).PType()) + if v, ok := h.Get3(keyValue); ok && v.(px.Value).Equals(undef, nil) { + h.Delete(keyValue) + } + return h +} + +func (t *typeParameter) Equals(o interface{}, g px.Guard) bool { + if ot, ok := o.(*typeParameter); ok { + return t.attribute.Equals(&ot.attribute, g) + } + return false +} + +func (t *typeParameter) InitHash() px.OrderedMap { + return WrapStringPValue(t.initHash()) +} + +func (t *typeParameter) FeatureType() string { + return `type_parameter` +} diff --git a/vendor/github.com/lyraproj/pcore/types/typereferencetype.go b/vendor/github.com/lyraproj/pcore/types/typereferencetype.go new file mode 100644 index 0000000..6f834ea --- /dev/null +++ b/vendor/github.com/lyraproj/pcore/types/typereferencetype.go @@ -0,0 +1,132 @@ +package types + +import ( + "io" + + "github.com/lyraproj/issue/issue" + "github.com/lyraproj/pcore/px" +) + +type TypeReferenceType struct { + typeString string +} + +var TypeReferenceMetaType px.ObjectType + +func init() { + TypeReferenceMetaType = newObjectType(`Pcore::TypeReference`, + `Pcore::AnyType { + attributes => { + type_string => String[1] + } +}`, func(ctx px.Context, args []px.Value) px.Value { + return newTypeReferenceType2(args...) + }) +} + +func DefaultTypeReferenceType() *TypeReferenceType { + return typeReferenceTypeDefault +} + +func NewTypeReferenceType(typeString string) *TypeReferenceType { + return &TypeReferenceType{typeString} +} + +func newTypeReferenceType2(args ...px.Value) *TypeReferenceType { + switch len(args) { + case 0: + return DefaultTypeReferenceType() + case 1: + if str, ok := args[0].(stringValue); ok { + return &TypeReferenceType{string(str)} + } + panic(illegalArgumentType(`TypeReference[]`, 0, `String`, args[0])) + default: + panic(illegalArgumentCount(`TypeReference[]`, `0 - 1`, len(args))) + } +} + +func (t *TypeReferenceType) Accept(v px.Visitor, g px.Guard) { + v(t) +} + +func (t *TypeReferenceType) Default() px.Type { + return typeReferenceTypeDefault +} + +func (t *TypeReferenceType) Equals(o interface{}, g px.Guard) bool { + if ot, ok := o.(*TypeReferenceType); ok { + return t.typeString == ot.typeString + } + return false +} + +func (t *TypeReferenceType) Get(key string) (px.Value, bool) { + switch key { + case `type_string`: + return stringValue(t.typeString), true + default: + return nil, false + } +} + +func (t *TypeReferenceType) IsAssignable(o px.Type, g px.Guard) bool { + tr, ok := o.(*TypeReferenceType) + return ok && t.typeString == tr.typeString +} + +func (t *TypeReferenceType) IsInstance(o px.Value, g px.Guard) bool { + return false +} + +func (t *TypeReferenceType) MetaType() px.ObjectType { + return TypeReferenceMetaType +} + +func (t *TypeReferenceType) Name() string { + return `TypeReference` +} + +func (t *TypeReferenceType) CanSerializeAsString() bool { + return true +} + +func (t *TypeReferenceType) SerializationString() string { + return t.String() +} + +func (t *TypeReferenceType) String() string { + return px.ToString2(t, None) +} + +func (t *TypeReferenceType) Parameters() []px.Value { + if *t == *typeReferenceTypeDefault { + return px.EmptyValues + } + return []px.Value{stringValue(t.typeString)} +} + +func (t *TypeReferenceType) Resolve(c px.Context) px.Type { + r := c.ParseType(t.typeString) + if rt, ok := r.(px.ResolvableType); ok { + if tr, ok := rt.(*TypeReferenceType); ok && t.typeString == tr.typeString { + panic(px.Error(px.UnresolvedType, issue.H{`typeString`: t.typeString})) + } + r = rt.Resolve(c) + } + return r +} + +func (t *TypeReferenceType) ToString(b io.Writer, s px.FormatContext, g px.RDetect) { + TypeToString(t, b, s, g) +} + +func (t *TypeReferenceType) PType() px.Type { + return &TypeType{t} +} + +func (t *TypeReferenceType) TypeString() string { + return t.typeString +} + +var typeReferenceTypeDefault = &TypeReferenceType{`UnresolvedReference`} diff --git a/vendor/github.com/lyraproj/pcore/types/types.go b/vendor/github.com/lyraproj/pcore/types/types.go new file mode 100644 index 0000000..adafa15 --- /dev/null +++ b/vendor/github.com/lyraproj/pcore/types/types.go @@ -0,0 +1,1031 @@ +package types + +import ( + "bytes" + "encoding/json" + "fmt" + "io" + "reflect" + "regexp" + "runtime" + "sort" + "sync" + "time" + + "strings" + + "github.com/lyraproj/issue/issue" + "github.com/lyraproj/pcore/px" + "github.com/lyraproj/semver/semver" +) + +const ( + NoString = "\x00" + + HkBinary = byte('B') + HkBoolean = byte('b') + HkDefault = byte('d') + HkFloat = byte('f') + HkInteger = byte('i') + HkRegexp = byte('r') + HkTimespan = byte('D') + HkTimestamp = byte('T') + HkType = byte('t') + HkUndef = byte('u') + HkUri = byte('U') + HkVersion = byte('v') + HkVersionRange = byte('R') + + IntegerHex = `(?:0[xX][0-9A-Fa-f]+)` + IntegerOct = `(?:0[0-7]+)` + IntegerBin = `(?:0[bB][01]+)` + IntegerDec = `(?:0|[1-9]\d*)` + SignPrefix = `[+-]?\s*` + + OptionalFraction = `(?:\.\d+)?` + OptionalExponent = `(?:[eE]-?\d+)?` + FloatDec = `(?:` + IntegerDec + OptionalFraction + OptionalExponent + `)` + + IntegerPattern = `\A` + SignPrefix + `(?:` + IntegerDec + `|` + IntegerHex + `|` + IntegerOct + `|` + IntegerBin + `)\z` + FloatPattern = `\A` + SignPrefix + `(?:` + FloatDec + `|` + IntegerHex + `|` + IntegerOct + `|` + IntegerBin + `)\z` +) + +// isInstance answers if value is an instance of the given puppetType +func isInstance(puppetType px.Type, value px.Value) bool { + return GuardedIsInstance(puppetType, value, nil) +} + +// isAssignable answers if t is assignable to this type +func isAssignable(puppetType px.Type, other px.Type) bool { + return GuardedIsAssignable(puppetType, other, nil) +} + +func generalize(a px.Type) px.Type { + if g, ok := a.(px.Generalizable); ok { + return g.Generic() + } + if g, ok := a.(px.ParameterizedType); ok { + return g.Default() + } + return a +} + +func defaultFor(t px.Type) px.Type { + if g, ok := t.(px.ParameterizedType); ok { + return g.Default() + } + return t +} + +func normalize(t px.Type) px.Type { + // TODO: Implement for ParameterizedType + return t +} + +func resolve(c px.Context, t px.Type) px.Type { + if rt, ok := t.(px.ResolvableType); ok { + return rt.Resolve(c) + } + return t +} + +func EachCoreType(fc func(t px.Type)) { + keys := make([]string, len(coreTypes)) + i := 0 + for key := range coreTypes { + keys[i] = key + i++ + } + sort.Strings(keys) + for _, key := range keys { + fc(coreTypes[key]) + } +} + +func GuardedIsInstance(a px.Type, v px.Value, g px.Guard) bool { + return a.IsInstance(v, g) +} + +func GuardedIsAssignable(a px.Type, b px.Type, g px.Guard) bool { + if a == b || a == anyTypeDefault { + return true + } + switch b := b.(type) { + case nil: + return false + case *UnitType: + return true + case *NotUndefType: + nt := b.typ + if !GuardedIsAssignable(nt, undefTypeDefault, g) { + return GuardedIsAssignable(a, nt, g) + } + case *OptionalType: + if GuardedIsAssignable(a, undefTypeDefault, g) { + ot := b.typ + return ot == nil || GuardedIsAssignable(a, ot, g) + } + return false + case *TypeAliasType: + return GuardedIsAssignable(a, b.resolvedType, g) + case *VariantType: + return b.allAssignableTo(a, g) + } + return a.IsAssignable(b, g) +} + +func UniqueTypes(types []px.Type) []px.Type { + top := len(types) + if top < 2 { + return types + } + + result := make([]px.Type, 0, top) + exists := make(map[px.HashKey]bool, top) + for _, t := range types { + key := px.ToKey(t) + if !exists[key] { + exists[key] = true + result = append(result, t) + } + } + return result +} + +// ValueSlice convert a slice of values that implement the pcore.Value interface to []pcore.Value. The +// method will panic if the given argument is not a slice or array, or if not all +// elements implement the pcore.Value interface +func ValueSlice(slice interface{}) []px.Value { + sv := reflect.ValueOf(slice) + top := sv.Len() + result := make([]px.Value, top) + for idx := 0; idx < top; idx++ { + result[idx] = sv.Index(idx).Interface().(px.Value) + } + return result +} + +func UniqueValues(values []px.Value) []px.Value { + top := len(values) + if top < 2 { + return values + } + + result := make([]px.Value, 0, top) + exists := make(map[px.HashKey]bool, top) + for _, v := range values { + key := px.ToKey(v) + if !exists[key] { + exists[key] = true + result = append(result, v) + } + } + return result +} + +func illegalArgument(name string, index int, arg string) issue.Reported { + return issue.NewReported(px.IllegalArgument, issue.SeverityError, issue.H{`function`: name, `index`: index, `arg`: arg}, 1) +} + +func illegalArguments(name string, message string) issue.Reported { + return issue.NewReported(px.IllegalArguments, issue.SeverityError, issue.H{`function`: name, `message`: message}, 1) +} + +func illegalArgumentType(name string, index int, expected string, actual px.Value) issue.Reported { + return issue.NewReported(px.IllegalArgumentType, issue.SeverityError, issue.H{`function`: name, `index`: index, `expected`: expected, `actual`: px.DetailedValueType(actual).String()}, 1) +} + +func illegalArgumentCount(name string, expected string, actual int) issue.Reported { + return issue.NewReported(px.IllegalArgumentCount, issue.SeverityError, issue.H{`function`: name, `expected`: expected, `actual`: actual}, 1) +} + +func TypeToString(t px.Type, b io.Writer, s px.FormatContext, g px.RDetect) { + f := px.GetFormat(s.FormatMap(), t.PType()) + switch f.FormatChar() { + case 's', 'p': + quoted := f.IsAlt() && f.FormatChar() == 's' + if quoted || f.HasStringFlags() { + bld := bytes.NewBufferString(``) + basicTypeToString(t, bld, s, g) + f.ApplyStringFlags(b, bld.String(), quoted) + } else { + basicTypeToString(t, b, s, g) + } + default: + panic(s.UnsupportedFormat(t.PType(), `sp`, f)) + } +} + +func basicTypeToString(t px.Type, b io.Writer, s px.FormatContext, g px.RDetect) { + name := t.Name() + if tp, ok := s.Property(`typeSetParent`); ok && tp == `true` { + s = s.WithProperties(map[string]string{`typeSetParent`: `false`}) + } + + if ex, ok := s.Property(`expanded`); !(ok && ex == `true`) { + switch t.(type) { + case *TypeAliasType: + if ts, ok := s.Property(`typeSet`); ok { + name = stripTypeSetName(ts, name) + } + _, err := io.WriteString(b, name) + if err != nil { + panic(err) + } + return + } + } + _, err := io.WriteString(b, name) + if err != nil { + panic(err) + } + if pt, ok := t.(px.ParameterizedType); ok { + params := pt.Parameters() + if len(params) > 0 { + WrapValues(params).ToString(b, s.Subsequent(), g) + } + } +} + +func stripTypeSetName(tsName, name string) string { + tsName = tsName + `::` + if strings.HasPrefix(name, tsName) { + // Strip name and two colons + return name[len(tsName):] + } + return name +} + +type alterFunc func(t px.Type) px.Type + +func alterTypes(types []px.Type, function alterFunc) []px.Type { + al := make([]px.Type, len(types)) + for idx, t := range types { + al[idx] = function(t) + } + return al +} + +func toTypes(types px.List) ([]px.Type, int) { + top := types.Len() + if top == 1 { + if a, ok := types.At(0).(px.List); ok { + if _, ok = a.(stringValue); !ok { + ts, f := toTypes(a) + if f >= 0 { + return nil, 0 + } + return ts, 0 + } + } + } + result := make([]px.Type, 0, top) + if types.All(func(t px.Value) bool { + if pt, ok := t.(px.Type); ok { + result = append(result, pt) + return true + } + return false + }) { + return result, -1 + } + return nil, 0 +} + +func DefaultDataType() *TypeAliasType { + return dataTypeDefault +} + +func DefaultRichDataType() *TypeAliasType { + return richDataTypeDefault +} + +func NilAs(dflt, t px.Type) px.Type { + if t == nil { + t = dflt + } + return t +} + +func CopyAppend(types []px.Type, t px.Type) []px.Type { + top := len(types) + tc := make([]px.Type, top+1) + copy(tc, types) + tc[top] = t + return tc +} + +var dataArrayTypeDefault = &ArrayType{IntegerTypePositive, &TypeReferenceType{`Data`}} +var dataHashTypeDefault = &HashType{IntegerTypePositive, stringTypeDefault, &TypeReferenceType{`Data`}} +var dataTypeDefault = &TypeAliasType{name: `Data`, resolvedType: &VariantType{[]px.Type{scalarDataTypeDefault, undefTypeDefault, dataArrayTypeDefault, dataHashTypeDefault}}} + +var richKeyTypeDefault = &VariantType{[]px.Type{stringTypeDefault, numericTypeDefault}} +var richDataArrayTypeDefault = &ArrayType{IntegerTypePositive, &TypeReferenceType{`RichData`}} +var richDataHashTypeDefault = &HashType{IntegerTypePositive, richKeyTypeDefault, &TypeReferenceType{`RichData`}} +var richDataTypeDefault = &TypeAliasType{`RichData`, nil, &VariantType{ + []px.Type{scalarTypeDefault, + binaryTypeDefault, + defaultTypeDefault, + objectTypeDefault, + typeTypeDefault, + typeSetTypeDefault, + undefTypeDefault, + richDataArrayTypeDefault, + richDataHashTypeDefault}}, nil} + +type Mapping struct { + T px.Type + R reflect.Type +} + +var resolvableTypes = make([]px.ResolvableType, 0, 16) +var resolvableMappings = make([]Mapping, 0, 16) +var resolvableTypesLock sync.Mutex + +type BuildFunctionArgs struct { + Name string + LocalTypes px.LocalTypesCreator + Creators []px.DispatchCreator +} + +var constructorsDecls = make([]*BuildFunctionArgs, 0, 16) + +func init() { + // "resolve" the dataType and richDataType + dataArrayTypeDefault.typ = dataTypeDefault + dataHashTypeDefault.valueType = dataTypeDefault + richDataArrayTypeDefault.typ = richDataTypeDefault + richDataHashTypeDefault.valueType = richDataTypeDefault + + px.DefaultFor = defaultFor + px.Generalize = generalize + px.Normalize = normalize + px.IsAssignable = isAssignable + px.IsInstance = isInstance + px.New = newInstance + + px.DetailedValueType = func(value px.Value) px.Type { + if dt, ok := value.(px.DetailedTypeValue); ok { + return dt.DetailedType() + } + return value.PType() + } + + px.GenericType = func(t px.Type) px.Type { + if g, ok := t.(px.Generalizable); ok { + return g.Generic() + } + return t + } + + px.GenericValueType = func(value px.Value) px.Type { + return px.GenericType(value.PType()) + } + + px.ToArray = func(elements []px.Value) px.List { + return WrapValues(elements) + } + + px.ToKey = func(value px.Value) px.HashKey { + if hk, ok := value.(px.HashKeyValue); ok { + return hk.ToKey() + } + b := bytes.NewBuffer([]byte{}) + appendKey(b, value) + return px.HashKey(b.String()) + } + + px.IsTruthy = func(tv px.Value) bool { + switch tv := tv.(type) { + case *UndefValue: + return false + case booleanValue: + return tv.Bool() + default: + return true + } + } + + px.NewObjectType = newObjectType + px.NewGoObjectType = newGoObjectType + px.NewNamedType = newNamedType + px.NewGoType = newGoType + px.RegisterResolvableType = registerResolvableType + px.NewGoConstructor = newGoConstructor + px.NewGoConstructor2 = newGoConstructor2 + px.Wrap = wrap + px.WrapReflected = wrapReflected + px.WrapReflectedType = wrapReflectedType +} + +func canSerializeAsString(t px.Type) bool { + if t == nil { + // true because nil members will not participate + return true + } + if st, ok := t.(px.SerializeAsString); ok { + return st.CanSerializeAsString() + } + return false +} + +// New creates a new instance of type t +func newInstance(c px.Context, receiver px.Value, args ...px.Value) px.Value { + var name string + typ, ok := receiver.(px.Type) + if ok { + name = typ.Name() + } else { + // Type might be in string form + _, ok = receiver.(stringValue) + if !ok { + // Only types or names of types can be used + panic(px.Error(px.InstanceDoesNotRespond, issue.H{`type`: receiver.PType(), `message`: `new`})) + } + + name = receiver.String() + var t interface{} + if t, ok = px.Load(c, NewTypedName(px.NsType, name)); ok { + typ = t.(px.Type) + } + } + + if nb, ok := typ.(px.Newable); ok { + return nb.New(c, args) + } + + var ctor px.Function + var ct px.Creatable + ct, ok = typ.(px.Creatable) + if ok { + ctor = ct.Constructor(c) + } + + if ctor == nil { + tn := NewTypedName(px.NsConstructor, name) + if t, ok := px.Load(c, tn); ok { + ctor = t.(px.Function) + } + } + + if ctor == nil { + panic(px.Error(px.InstanceDoesNotRespond, issue.H{`type`: name, `message`: `new`})) + } + + r := ctor.(px.Function).Call(c, nil, args...) + if typ != nil { + px.AssertInstance(`new`, typ, r) + } + return r +} + +func newGoConstructor(typeName string, creators ...px.DispatchCreator) { + registerGoConstructor(&BuildFunctionArgs{typeName, nil, creators}) +} + +func newGoConstructor2(typeName string, localTypes px.LocalTypesCreator, creators ...px.DispatchCreator) { + registerGoConstructor(&BuildFunctionArgs{typeName, localTypes, creators}) +} + +func newGoConstructor3(typeNames []string, localTypes px.LocalTypesCreator, creators ...px.DispatchCreator) { + for _, tn := range typeNames { + registerGoConstructor(&BuildFunctionArgs{tn, localTypes, creators}) + } +} + +func PopDeclaredTypes() (types []px.ResolvableType) { + resolvableTypesLock.Lock() + types = resolvableTypes + if len(types) > 0 { + resolvableTypes = make([]px.ResolvableType, 0, 16) + } + resolvableTypesLock.Unlock() + return +} + +func PopDeclaredMappings() (types []Mapping) { + resolvableTypesLock.Lock() + types = resolvableMappings + if len(types) > 0 { + resolvableMappings = make([]Mapping, 0, 16) + } + resolvableTypesLock.Unlock() + return +} + +func PopDeclaredConstructors() (ctorDecls []*BuildFunctionArgs) { + resolvableTypesLock.Lock() + ctorDecls = constructorsDecls + if len(ctorDecls) > 0 { + constructorsDecls = make([]*BuildFunctionArgs, 0, 16) + } + resolvableTypesLock.Unlock() + return +} + +func registerGoConstructor(ctorDecl *BuildFunctionArgs) { + resolvableTypesLock.Lock() + constructorsDecls = append(constructorsDecls, ctorDecl) + resolvableTypesLock.Unlock() +} + +func newGoType(name string, zeroValue interface{}) px.ObjectType { + obj := AllocObjectType() + obj.name = name + obj.initHashExpression = zeroValue + registerResolvableType(obj) + return obj +} + +func registerResolvableType(tp px.ResolvableType) { + resolvableTypesLock.Lock() + resolvableTypes = append(resolvableTypes, tp) + resolvableTypesLock.Unlock() +} + +func registerMapping(t px.Type, r reflect.Type) { + resolvableTypesLock.Lock() + resolvableMappings = append(resolvableMappings, Mapping{t, r}) + resolvableTypesLock.Unlock() +} + +func appendKey(b *bytes.Buffer, v px.Value) { + if hk, ok := v.(px.StreamHashKeyValue); ok { + hk.ToKey(b) + } else if pt, ok := v.(px.Type); ok { + b.WriteByte(1) + b.WriteByte(HkType) + b.Write([]byte(pt.Name())) + if ppt, ok := pt.(px.ParameterizedType); ok { + for _, p := range ppt.Parameters() { + appendTypeParamKey(b, p) + } + } + } else if hk, ok := v.(px.HashKeyValue); ok { + b.Write([]byte(hk.ToKey())) + } else { + panic(illegalArgumentType(`ToKey`, 0, `value used as hash key`, v)) + } +} + +// Special hash key generation for type parameters which might be hashes +// using string keys +func appendTypeParamKey(b *bytes.Buffer, v px.Value) { + if h, ok := v.(*Hash); ok { + b.WriteByte(2) + h.EachPair(func(k, v px.Value) { + b.Write([]byte(k.String())) + b.WriteByte(3) + appendTypeParamKey(b, v) + }) + } else { + appendKey(b, v) + } +} + +func wrap(c px.Context, v interface{}) (pv px.Value) { + switch v := v.(type) { + case nil: + pv = undef + case px.Value: + pv = v + case string: + pv = stringValue(v) + case int8: + pv = integerValue(int64(v)) + case int16: + pv = integerValue(int64(v)) + case int32: + pv = integerValue(int64(v)) + case int64: + pv = integerValue(v) + case byte: + pv = integerValue(int64(v)) + case int: + pv = integerValue(int64(v)) + case float64: + pv = floatValue(v) + case bool: + pv = booleanValue(v) + case *regexp.Regexp: + pv = WrapRegexp2(v) + case []byte: + pv = WrapBinary(v) + case semver.Version: + pv = WrapSemVer(v) + case semver.VersionRange: + pv = WrapSemVerRange(v) + case time.Duration: + pv = WrapTimespan(v) + case time.Time: + pv = WrapTimestamp(v) + case []int: + pv = WrapInts(v) + case []string: + pv = WrapStrings(v) + case []px.Value: + pv = WrapValues(v) + case []px.Type: + pv = WrapTypes(v) + case []interface{}: + return WrapInterfaces(c, v) + case map[string]interface{}: + pv = WrapStringToInterfaceMap(c, v) + case map[string]string: + pv = WrapStringToStringMap(v) + case map[string]px.Value: + pv = WrapStringToValueMap(v) + case map[string]px.Type: + pv = WrapStringToTypeMap(v) + case json.Number: + if i, err := v.Int64(); err == nil { + pv = integerValue(i) + } else { + f, _ := v.Float64() + pv = floatValue(f) + } + case reflect.Value: + pv = wrapReflected(c, v) + case reflect.Type: + var err error + if pv, err = wrapReflectedType(c, v); err != nil { + panic(err) + } + default: + // Can still be an alias, slice, or map in which case reflection conversion will work + pv = wrapReflected(c, reflect.ValueOf(v)) + } + return pv +} + +func wrapReflected(c px.Context, vr reflect.Value) (pv px.Value) { + if c == nil { + c = px.CurrentContext() + } + + // Invalid shouldn't happen, but needs a check + if !vr.IsValid() { + return undef + } + + vi := vr + + // Check for nil + switch vr.Kind() { + case reflect.Ptr, reflect.Slice, reflect.Array, reflect.Map, reflect.Interface: + if vr.IsNil() { + return undef + } + + if vi.Kind() == reflect.Interface { + // Need implementation here. + vi = vi.Elem() + } + } + + vt := vr.Type() + if _, ok := wellKnown[vt]; ok { + iv := vr.Interface() + if pv, ok = iv.(px.Value); ok { + return + } + // A well-known that isn't an pcore.Value just yet + return wrap(c, iv) + } + + if t, ok := loadFromImplRegistry(c, vi.Type()); ok { + if pt, ok := t.(px.ObjectType); ok { + pv = pt.FromReflectedValue(c, vi) + return + } + } + + pv, ok := WrapPrimitive(vr) + if ok { + return pv + } + + switch vt.Kind() { + case reflect.Slice, reflect.Array: + top := vr.Len() + els := make([]px.Value, top) + for i := 0; i < top; i++ { + els[i] = wrap(c, interfaceOrNil(vr.Index(i))) + } + pv = WrapValues(els) + case reflect.Map: + keys := vr.MapKeys() + els := make([]*HashEntry, len(keys)) + for i, k := range keys { + els[i] = WrapHashEntry(wrap(c, interfaceOrNil(k)), wrap(c, interfaceOrNil(vr.MapIndex(k)))) + } + pv = sortedMap(els) + case reflect.Ptr: + pv = wrapReflected(c, vr.Elem()) + case reflect.Struct: + // Unknown struct. Map this to a Hash + nf := vt.NumField() + els := make([]*HashEntry, 0, nf) + for i := 0; i < nf; i++ { + vv := vr.Field(i) + switch vv.Kind() { + case reflect.Slice, reflect.Map, reflect.Interface, reflect.Ptr: + if vv.IsNil() { + continue + } + } + ft := vt.Field(i) + els = append(els, WrapHashEntry2(FieldName(&ft), wrap(c, interfaceOrNil(vv)))) + } + pv = sortedMap(els) + default: + if vr.IsValid() && vr.CanInterface() { + ix := vr.Interface() + pv, ok = ix.(px.Value) + if ok { + return pv + } + pv = WrapRuntime(vr.Interface()) + } else { + pv = undef + } + } + return pv +} + +func WrapPrimitive(vr reflect.Value) (pv px.Value, ok bool) { + ok = true + switch vr.Kind() { + case reflect.String: + pv = stringValue(vr.String()) + case reflect.Int, reflect.Int64, reflect.Int32, reflect.Int16, reflect.Int8: + pv = integerValue(vr.Int()) + case reflect.Uint, reflect.Uint64, reflect.Uint32, reflect.Uint16, reflect.Uint8: + pv = integerValue(int64(vr.Uint())) // Possible loss for very large numbers + case reflect.Bool: + pv = booleanValue(vr.Bool()) + case reflect.Float64, reflect.Float32: + pv = floatValue(vr.Float()) + default: + ok = false + } + return +} + +func loadFromImplRegistry(c px.Context, vt reflect.Type) (px.Type, bool) { + if t, ok := c.ImplementationRegistry().ReflectedToType(vt); ok { + return t, true + } + return nil, false +} + +var evalValueType = reflect.TypeOf((*px.Value)(nil)).Elem() +var evalTypeType = reflect.TypeOf((*px.Type)(nil)).Elem() +var evalObjectTypeType = reflect.TypeOf((*px.ObjectType)(nil)).Elem() +var evalTypeSetType = reflect.TypeOf((*px.TypeSet)(nil)).Elem() + +var wellKnown map[reflect.Type]px.Type + +func wrapReflectedType(c px.Context, vt reflect.Type) (pt px.Type, err error) { + if c == nil { + c = px.CurrentContext() + } + + var ok bool + if pt, ok = wellKnown[vt]; ok { + return + } + + kind := vt.Kind() + if pt, ok = loadFromImplRegistry(c, vt); ok { + if kind == reflect.Ptr { + pt = NewOptionalType(pt) + } + return + } + + var t px.Type + switch kind { + case reflect.Slice, reflect.Array: + if t, err = wrapReflectedType(c, vt.Elem()); err == nil { + pt = NewArrayType(t, nil) + } + case reflect.Map: + if t, err = wrapReflectedType(c, vt.Key()); err == nil { + var v px.Type + if v, err = wrapReflectedType(c, vt.Elem()); err == nil { + pt = NewHashType(t, v, nil) + } + } + case reflect.Ptr: + if t, err = wrapReflectedType(c, vt.Elem()); err == nil { + pt = NewOptionalType(t) + } + default: + pt, ok = primitivePTypes[vt.Kind()] + if !ok { + err = px.Error(px.UnreflectableType, issue.H{`type`: vt.String()}) + } + } + return +} + +var primitivePTypes map[reflect.Kind]px.Type + +func PrimitivePType(vt reflect.Type) (pt px.Type, ok bool) { + pt, ok = primitivePTypes[vt.Kind()] + return +} + +func interfaceOrNil(vr reflect.Value) interface{} { + if vr.CanInterface() { + return vr.Interface() + } + return nil +} + +func newNamedType(name, typeDecl string) px.Type { + dt, err := Parse(typeDecl) + if err != nil { + _, fileName, fileLine, _ := runtime.Caller(1) + err = convertReported(err, fileName, fileLine) + panic(err) + } + return NamedType(px.RuntimeNameAuthority, name, dt) +} + +func convertReported(err error, fileName string, lineOffset int) error { + if ri, ok := err.(issue.Reported); ok { + return ri.OffsetByLocation(issue.NewLocation(fileName, lineOffset, 0)) + } + return err +} + +func NamedType(na px.URI, name string, value px.Value) px.Type { + var ta px.Type + if na == `` { + na = px.RuntimeNameAuthority + } + if dt, ok := value.(*DeferredType); ok { + if len(dt.params) == 1 { + if hash, ok := dt.params[0].(px.OrderedMap); ok && dt.Name() != `Struct` { + if dt.Name() == `Object` { + ta = createMetaType2(na, name, dt.Name(), extractParentName2(hash), hash) + } else if dt.Name() == `TypeSet` { + ta = createMetaType2(na, name, dt.Name(), ``, hash) + } else { + ta = createMetaType2(na, name, `Object`, dt.Name(), hash) + } + } + } + if ta == nil { + ta = NewTypeAliasType(name, dt, nil) + } + } else if h, ok := value.(px.OrderedMap); ok { + ta = createMetaType2(na, name, `Object`, ``, h) + } else { + panic(fmt.Sprintf(`cannot create object from a %s`, dt.String())) + } + return ta +} + +func extractParentName2(hash px.OrderedMap) string { + if p, ok := hash.Get4(keyParent); ok { + if dt, ok := p.(*DeferredType); ok { + return dt.Name() + } + if s, ok := p.(px.StringValue); ok { + return s.String() + } + } + return `` +} + +func createMetaType2(na px.URI, name string, typeName string, parentName string, hash px.OrderedMap) px.Type { + if parentName == `` { + switch typeName { + case `Object`: + return MakeObjectType(name, nil, hash, false) + default: + return NewTypeSet(na, name, hash) + } + } + + return MakeObjectType(name, NewTypeReferenceType(parentName), hash, false) +} + +func argError(key string, e px.Type, a px.Value) issue.Reported { + return px.Error(px.TypeMismatch, issue.H{`detail`: px.DescribeMismatch(`property '`+key+`'`, e, a.PType())}) +} + +func typeArg(hash px.OrderedMap, key string, d px.Type) px.Type { + v := hash.Get5(key, nil) + if v == nil { + return d + } + if t, ok := v.(px.Type); ok { + return t + } + panic(argError(key, DefaultTypeType(), v)) +} + +func hashArg(hash px.OrderedMap, key string) *Hash { + v := hash.Get5(key, nil) + if v == nil { + return emptyMap + } + if t, ok := v.(*Hash); ok { + return t + } + panic(argError(key, DefaultHashType(), v)) +} + +func boolArg(hash px.OrderedMap, key string, d bool) bool { + v := hash.Get5(key, nil) + if v == nil { + return d + } + if t, ok := v.(booleanValue); ok { + return t.Bool() + } + panic(argError(key, DefaultBooleanType(), v)) +} + +type LazyType interface { + LazyIsInstance(v px.Value, g px.Guard) int +} + +func LazyIsInstance(a px.Type, b px.Value, g px.Guard) int { + if lt, ok := a.(LazyType); ok { + return lt.LazyIsInstance(b, g) + } + if a.IsInstance(b, g) { + return 1 + } + return -1 +} + +func stringArg(hash px.OrderedMap, key string, d string) string { + v := hash.Get5(key, nil) + if v == nil { + return d + } + if t, ok := v.(stringValue); ok { + return string(t) + } + panic(argError(key, DefaultStringType(), v)) +} + +func uriArg(hash px.OrderedMap, key string, d px.URI) px.URI { + v := hash.Get5(key, nil) + if v == nil { + return d + } + if t, ok := v.(stringValue); ok { + str := string(t) + if _, err := ParseURI2(str, true); err != nil { + panic(px.Error(px.InvalidUri, issue.H{`str`: str, `detail`: err.Error()})) + } + return px.URI(str) + } + if t, ok := v.(*UriValue); ok { + return px.URI(t.URL().String()) + } + panic(argError(key, DefaultUriType(), v)) +} + +func versionArg(hash px.OrderedMap, key string, d semver.Version) semver.Version { + v := hash.Get5(key, nil) + if v == nil { + return d + } + if s, ok := v.(stringValue); ok { + sv, err := semver.ParseVersion(string(s)) + if err != nil { + panic(px.Error(px.InvalidVersion, issue.H{`str`: string(s), `detail`: err.Error()})) + } + return sv + } + if sv, ok := v.(*SemVer); ok { + return sv.Version() + } + panic(argError(key, DefaultSemVerType(), v)) +} + +func versionRangeArg(hash px.OrderedMap, key string, d semver.VersionRange) semver.VersionRange { + v := hash.Get5(key, nil) + if v == nil { + return d + } + if s, ok := v.(stringValue); ok { + sr, err := semver.ParseVersionRange(string(s)) + if err != nil { + panic(px.Error(px.InvalidVersionRange, issue.H{`str`: string(s), `detail`: err.Error()})) + } + return sr + } + if sv, ok := v.(*SemVerRange); ok { + return sv.VersionRange() + } + panic(argError(key, DefaultSemVerType(), v)) +} diff --git a/vendor/github.com/lyraproj/pcore/types/typeset.go b/vendor/github.com/lyraproj/pcore/types/typeset.go new file mode 100644 index 0000000..7e6e0ad --- /dev/null +++ b/vendor/github.com/lyraproj/pcore/types/typeset.go @@ -0,0 +1,695 @@ +package types + +import ( + "bytes" + "fmt" + "io" + "math" + "strings" + "sync/atomic" + + "github.com/lyraproj/issue/issue" + "github.com/lyraproj/pcore/hash" + "github.com/lyraproj/pcore/px" + "github.com/lyraproj/pcore/utils" + "github.com/lyraproj/semver/semver" +) + +const ( + KeyNameAuthority = `name_authority` + KeyReferences = `references` + KeyTypes = `types` + KeyVersion = `version` + KeyVersionRange = `version_range` +) + +var TypeSetMetaType px.ObjectType + +var TypeSimpleTypeName = NewPatternType([]*RegexpType{NewRegexpType(`\A[A-Z]\w*\z`)}) +var TypeQualifiedReference = NewPatternType([]*RegexpType{NewRegexpType(`\A[A-Z][\w]*(?:::[A-Z][\w]*)*\z`)}) + +var typeStringOrVersion = NewVariantType(stringTypeNotEmpty, DefaultSemVerType()) +var typeStringOrRange = NewVariantType(stringTypeNotEmpty, DefaultSemVerRangeType()) +var typeStringOrUri = NewVariantType(stringTypeNotEmpty, DefaultUriType()) + +var typeTypeReferenceInit = NewStructType([]*StructElement{ + newStructElement2(keyName, TypeQualifiedReference), + newStructElement2(KeyVersionRange, typeStringOrRange), + NewStructElement(newOptionalType3(KeyNameAuthority), typeStringOrUri), + NewStructElement(newOptionalType3(keyAnnotations), typeAnnotations)}) + +var typeTypesetInit = NewStructType([]*StructElement{ + NewStructElement(newOptionalType3(px.KeyPcoreUri), typeStringOrUri), + newStructElement2(px.KeyPcoreVersion, typeStringOrVersion), + NewStructElement(newOptionalType3(KeyNameAuthority), typeStringOrUri), + NewStructElement(newOptionalType3(keyName), TypeTypeName), + NewStructElement(newOptionalType3(KeyVersion), typeStringOrVersion), + NewStructElement(newOptionalType3(KeyTypes), + NewHashType(TypeSimpleTypeName, + NewVariantType(DefaultTypeType(), TypeObjectInitHash), NewIntegerType(1, math.MaxInt64))), + NewStructElement(newOptionalType3(KeyReferences), + NewHashType(TypeSimpleTypeName, typeTypeReferenceInit, NewIntegerType(1, math.MaxInt64))), + NewStructElement(newOptionalType3(keyAnnotations), typeAnnotations)}) + +func init() { + oneArgCtor := func(ctx px.Context, args []px.Value) px.Value { + return newTypeSetType2(args[0].(*Hash), ctx.Loader()) + } + TypeSetMetaType = MakeObjectType(`Pcore::TypeSet`, AnyMetaType, + WrapStringToValueMap(map[string]px.Value{ + `attributes`: singletonMap(`_pcore_init_hash`, typeTypesetInit)}), true, + // Hash constructor is equal to the positional arguments constructor + oneArgCtor, oneArgCtor) +} + +type ( + typeSetReference struct { + annotatable + name string + owner *typeSet + nameAuthority px.URI + versionRange semver.VersionRange + typeSet *typeSet + } + + typeSet struct { + annotatable + hashKey px.HashKey + dcToCcMap map[string]string + name string + nameAuthority px.URI + pcoreURI px.URI + pcoreVersion semver.Version + version semver.Version + typedName px.TypedName + types *Hash + references map[string]*typeSetReference + loader px.Loader + deferredInit px.OrderedMap + } +) + +func newTypeSetReference(t *typeSet, ref *Hash) *typeSetReference { + r := &typeSetReference{ + owner: t, + nameAuthority: uriArg(ref, KeyNameAuthority, t.nameAuthority), + name: stringArg(ref, keyName, ``), + versionRange: versionRangeArg(ref, KeyVersionRange, nil), + } + r.annotatable.initialize(ref) + return r +} + +func (r *typeSetReference) initHash() *hash.StringHash { + h := r.annotatable.initHash() + if r.nameAuthority != r.owner.nameAuthority { + h.Put(KeyNameAuthority, stringValue(string(r.nameAuthority))) + } + h.Put(keyName, stringValue(r.name)) + h.Put(KeyVersionRange, WrapSemVerRange(r.versionRange)) + return h +} + +func (r *typeSetReference) Equals(other interface{}, g px.Guard) bool { + if or, ok := other.(*typeSetReference); ok { + return r.name == or.name && r.nameAuthority == or.nameAuthority && r.versionRange == or.versionRange && r.typeSet.Equals(or.typeSet, g) + } + return false +} + +func (r *typeSetReference) resolve(c px.Context) { + tn := px.NewTypedName2(px.NsType, r.name, r.nameAuthority) + loadedEntry := c.Loader().LoadEntry(c, tn) + if loadedEntry != nil { + if ts, ok := loadedEntry.Value().(*typeSet); ok { + ts = ts.Resolve(c).(*typeSet) + if r.versionRange.Includes(ts.version) { + r.typeSet = ts + return + } + panic(px.Error(px.TypesetReferenceMismatch, issue.H{`name`: r.owner.name, `ref_name`: r.name, `version_range`: r.versionRange, `actual`: ts.version})) + } + } + var v interface{} + if loadedEntry != nil { + v = loadedEntry.Value() + } + if v == nil { + panic(px.Error(px.TypesetReferenceUnresolved, issue.H{`name`: r.owner.name, `ref_name`: r.name})) + } + var typeName string + if vt, ok := v.(px.Type); ok { + typeName = vt.Name() + } else if vv, ok := v.(px.Value); ok { + typeName = vv.PType().Name() + } else { + typeName = fmt.Sprintf("%T", v) + } + panic(px.Error(px.TypesetReferenceBadType, issue.H{`name`: r.owner.name, `ref_name`: r.name, `type_name`: typeName})) +} + +var typeSetTypeDefault = &typeSet{ + name: `TypeSet`, + nameAuthority: px.RuntimeNameAuthority, + pcoreURI: px.PcoreUri, + pcoreVersion: px.PcoreVersion, + version: semver.Zero, +} + +var typeSetId = int64(0) + +func AllocTypeSetType() *typeSet { + return &typeSet{ + annotatable: annotatable{annotations: emptyMap}, + dcToCcMap: make(map[string]string, 17), + hashKey: px.HashKey(fmt.Sprintf("\x00tTypeSet%d", atomic.AddInt64(&typeSetId, 1))), + types: emptyMap, + references: map[string]*typeSetReference{}, + } +} + +func (t *typeSet) Initialize(c px.Context, args []px.Value) { + if len(args) == 1 { + if h, ok := args[0].(px.OrderedMap); ok { + t.InitFromHash(c, h) + return + } + } + panic(px.Error(px.Failure, issue.H{`message`: `internal error when creating an TypeSet data type`})) +} + +func NewTypeSet(na px.URI, name string, initHash px.OrderedMap) px.TypeSet { + obj := AllocTypeSetType() + obj.nameAuthority = na + if name == `` { + if initHash.IsEmpty() { + return DefaultTypeSetType().(*typeSet) + } + name = initHash.Get5(keyName, emptyString).String() + } + obj.name = name + obj.deferredInit = initHash + return obj +} + +func newTypeSetType2(initHash px.OrderedMap, loader px.Loader) px.TypeSet { + obj := NewTypeSet(loader.NameAuthority(), ``, initHash).(*typeSet) + obj.loader = loader + return obj +} + +func DefaultTypeSetType() px.TypeSet { + return typeSetTypeDefault +} + +func (t *typeSet) Annotations() *Hash { + return t.annotations +} + +func (t *typeSet) Accept(v px.Visitor, g px.Guard) { + v(t) + // TODO: Visit typeset members +} + +func (t *typeSet) Default() px.Type { + return typeSetTypeDefault +} + +func (t *typeSet) Equals(other interface{}, guard px.Guard) bool { + if ot, ok := other.(*typeSet); ok { + return t.name == ot.name && t.nameAuthority == ot.nameAuthority && t.pcoreURI == ot.pcoreURI && t.pcoreVersion.Equals(ot.pcoreVersion) && t.version.Equals(ot.version) + } + return false +} + +func (t *typeSet) Generic() px.Type { + return DefaultTypeSetType() +} + +func (t *typeSet) InitFromHash(c px.Context, initHash px.OrderedMap) { + px.AssertInstance(`typeset initializer`, typeTypesetInit, initHash) + t.name = stringArg(initHash, keyName, t.name) + t.nameAuthority = uriArg(initHash, KeyNameAuthority, t.nameAuthority) + + t.pcoreVersion = versionArg(initHash, px.KeyPcoreVersion, nil) + if !px.ParsablePcoreVersions.Includes(t.pcoreVersion) { + panic(px.Error(px.UnhandledPcoreVersion, + issue.H{`name`: t.name, `expected_range`: px.ParsablePcoreVersions, `pcore_version`: t.pcoreVersion})) + } + t.pcoreURI = uriArg(initHash, px.KeyPcoreUri, ``) + t.version = versionArg(initHash, KeyVersion, nil) + t.types = hashArg(initHash, KeyTypes) + t.types.EachKey(func(kv px.Value) { + key := kv.String() + t.dcToCcMap[strings.ToLower(key)] = key + }) + + refs := hashArg(initHash, KeyReferences) + if !refs.IsEmpty() { + refMap := make(map[string]*typeSetReference, 7) + rootMap := make(map[px.URI]map[string][]semver.VersionRange, 7) + refs.EachPair(func(k, v px.Value) { + refAlias := k.String() + + if t.types.IncludesKey(k) { + panic(px.Error(px.TypesetAliasCollides, + issue.H{`name`: t.name, `ref_alias`: refAlias})) + } + + if _, ok := refMap[refAlias]; ok { + panic(px.Error(px.TypesetReferenceDuplicate, + issue.H{`name`: t.name, `ref_alias`: refAlias})) + } + + ref := newTypeSetReference(t, v.(*Hash)) + refName := ref.name + refNA := ref.nameAuthority + naRoots, found := rootMap[refNA] + if !found { + naRoots = make(map[string][]semver.VersionRange, 3) + rootMap[refNA] = naRoots + } + + if ranges, found := naRoots[refName]; found { + for _, rng := range ranges { + if rng.Intersection(ref.versionRange) != nil { + panic(px.Error(px.TypesetReferenceOverlap, + issue.H{`name`: t.name, `ref_na`: refNA, `ref_name`: refName})) + } + } + naRoots[refName] = append(ranges, ref.versionRange) + } else { + naRoots[refName] = []semver.VersionRange{ref.versionRange} + } + + refMap[refAlias] = ref + t.dcToCcMap[strings.ToLower(refAlias)] = refAlias + }) + t.references = refMap + } + t.annotatable.initialize(initHash.(*Hash)) +} + +func (t *typeSet) Get(key string) (value px.Value, ok bool) { + switch key { + case px.KeyPcoreUri: + if t.pcoreURI == `` { + return undef, true + } + return WrapURI2(string(t.pcoreURI)), true + case px.KeyPcoreVersion: + return WrapSemVer(t.pcoreVersion), true + case KeyNameAuthority: + if t.nameAuthority == `` { + return undef, true + } + return WrapURI2(string(t.nameAuthority)), true + case keyName: + return stringValue(t.name), true + case KeyVersion: + if t.version == nil { + return undef, true + } + return WrapSemVer(t.version), true + case KeyTypes: + return t.types, true + case KeyReferences: + return t.referencesHash(), true + } + return nil, false +} + +func (t *typeSet) GetType(typedName px.TypedName) (px.Type, bool) { + if !(typedName.Namespace() == px.NsType && typedName.Authority() == t.nameAuthority) { + return nil, false + } + + segments := typedName.Parts() + first := segments[0] + if len(segments) == 1 { + if found, ok := t.GetType2(first); ok { + return found, true + } + } + + if len(t.references) == 0 { + return nil, false + } + + tsRef, ok := t.references[first] + if !ok { + tsRef, ok = t.references[t.dcToCcMap[first]] + if !ok { + return nil, false + } + } + + typeSet := tsRef.typeSet + switch len(segments) { + case 1: + return typeSet, true + case 2: + return typeSet.GetType2(segments[1]) + default: + return typeSet.GetType(typedName.Child()) + } +} + +func (t *typeSet) GetType2(name string) (px.Type, bool) { + v := t.types.Get6(name, func() px.Value { + return t.types.Get5(t.dcToCcMap[name], nil) + }) + if found, ok := v.(px.Type); ok { + return found, true + } + return nil, false +} + +func (t *typeSet) InitHash() px.OrderedMap { + return WrapStringPValue(t.initHash()) +} + +func (t *typeSet) IsInstance(o px.Value, g px.Guard) bool { + return t.IsAssignable(o.PType(), g) +} + +func (t *typeSet) IsAssignable(other px.Type, g px.Guard) bool { + if ot, ok := other.(*typeSet); ok { + return t.Equals(typeSetTypeDefault, g) || t.Equals(ot, g) + } + return false +} + +func (t *typeSet) MetaType() px.ObjectType { + return TypeSetMetaType +} + +func (t *typeSet) Name() string { + return t.name +} + +func (t *typeSet) NameAuthority() px.URI { + return t.nameAuthority +} + +func (t *typeSet) TypedName() px.TypedName { + return t.typedName +} + +func (t *typeSet) Parameters() []px.Value { + if t.Equals(typeSetTypeDefault, nil) { + return px.EmptyValues + } + return []px.Value{t.InitHash()} +} + +func (t *typeSet) Resolve(c px.Context) px.Type { + ihe := t.deferredInit + if ihe == nil { + return t + } + + t.deferredInit = nil + initHash := t.resolveDeferred(c, ihe) + t.loader = c.Loader() + t.InitFromHash(c, initHash) + t.typedName = px.NewTypedName2(px.NsType, t.name, t.nameAuthority) + + for _, ref := range t.references { + ref.resolve(c) + } + c.DoWithLoader(px.NewTypeSetLoader(c.Loader(), t), func() { + if t.nameAuthority == `` { + t.nameAuthority = t.resolveNameAuthority(initHash, c, nil) + } + t.types = t.types.MapValues(func(tp px.Value) px.Value { + if rtp, ok := tp.(px.ResolvableType); ok { + return rtp.Resolve(c) + } + return tp + }).(*Hash) + }) + return t +} + +func (t *typeSet) Types() px.OrderedMap { + return t.types +} + +func (t *typeSet) Version() semver.Version { + return t.version +} + +func (t *typeSet) String() string { + return px.ToString2(t, Expanded) +} + +func (t *typeSet) ToString(b io.Writer, s px.FormatContext, g px.RDetect) { + f := px.GetFormat(s.FormatMap(), t.PType()) + switch f.FormatChar() { + case 's', 'p': + quoted := f.IsAlt() && f.FormatChar() == 's' + if quoted || f.HasStringFlags() { + bld := bytes.NewBufferString(``) + t.basicTypeToString(bld, f, s, g) + f.ApplyStringFlags(b, bld.String(), quoted) + } else { + t.basicTypeToString(b, f, s, g) + } + default: + panic(s.UnsupportedFormat(t.PType(), `sp`, f)) + } +} + +func (t *typeSet) PType() px.Type { + return &TypeType{t} +} + +func (t *typeSet) basicTypeToString(b io.Writer, f px.Format, s px.FormatContext, g px.RDetect) { + if t.Equals(DefaultTypeSetType(), nil) { + utils.WriteString(b, `TypeSet`) + return + } + + if ex, ok := s.Property(`expanded`); !(ok && ex == `true`) { + name := t.Name() + if ts, ok := s.Property(`typeSet`); ok { + name = stripTypeSetName(ts, name) + } + utils.WriteString(b, name) + return + } + s = s.WithProperties(map[string]string{`typeSet`: t.Name(), `typeSetParent`: `true`}) + + utils.WriteString(b, `TypeSet[{`) + indent1 := s.Indentation() + indent2 := indent1.Increase(f.IsAlt()) + indent3 := indent2.Increase(f.IsAlt()) + padding1 := `` + padding2 := `` + padding3 := `` + if f.IsAlt() { + padding1 = indent1.Padding() + padding2 = indent2.Padding() + padding3 = indent3.Padding() + } + + cf := f.ContainerFormats() + if cf == nil { + cf = DefaultContainerFormats + } + + ctx2 := px.NewFormatContext2(indent2, s.FormatMap(), s.Properties()) + cti2 := px.NewFormatContext2(indent2, cf, s.Properties()) + ctx3 := px.NewFormatContext2(indent3, s.FormatMap(), s.Properties()) + + first2 := true + t.initHash().EachPair(func(key string, vi interface{}) { + if first2 { + first2 = false + } else { + utils.WriteString(b, `,`) + if !f.IsAlt() { + utils.WriteString(b, ` `) + } + } + value := vi.(px.Value) + if f.IsAlt() { + utils.WriteString(b, "\n") + utils.WriteString(b, padding2) + } + utils.WriteString(b, key) + utils.WriteString(b, ` => `) + switch key { + case `pcore_uri`, `pcore_version`, `name_authority`, `version`: + utils.PuppetQuote(b, value.String()) + case `types`, `references`: + // The keys should not be quoted in this hash + utils.WriteString(b, `{`) + first3 := true + value.(*Hash).EachPair(func(typeName, typ px.Value) { + if first3 { + first3 = false + } else { + utils.WriteString(b, `,`) + if !f.IsAlt() { + utils.WriteString(b, ` `) + } + } + if f.IsAlt() { + utils.WriteString(b, "\n") + utils.WriteString(b, padding3) + } + utils.WriteString(b, typeName.String()) + utils.WriteString(b, ` => `) + typ.ToString(b, ctx3, g) + }) + if f.IsAlt() { + utils.WriteString(b, "\n") + utils.WriteString(b, padding2) + } + utils.WriteString(b, "}") + default: + cx := cti2 + if isContainer(value, s) { + cx = ctx2 + } + value.ToString(b, cx, g) + } + }) + if f.IsAlt() { + utils.WriteString(b, "\n") + utils.WriteString(b, padding1) + } + utils.WriteString(b, "}]") +} + +func (t *typeSet) initHash() *hash.StringHash { + h := t.annotatable.initHash() + if t.pcoreURI != `` { + h.Put(px.KeyPcoreUri, WrapURI2(string(t.pcoreURI))) + } + h.Put(px.KeyPcoreVersion, WrapSemVer(t.pcoreVersion)) + if t.nameAuthority != `` { + h.Put(KeyNameAuthority, WrapURI2(string(t.nameAuthority))) + } + h.Put(keyName, stringValue(t.name)) + if t.version != nil { + h.Put(KeyVersion, WrapSemVer(t.version)) + } + if !t.types.IsEmpty() { + h.Put(KeyTypes, t.types) + } + if len(t.references) > 0 { + h.Put(KeyReferences, t.referencesHash()) + } + return h +} + +func (t *typeSet) referencesHash() *Hash { + if len(t.references) == 0 { + return emptyMap + } + entries := make([]*HashEntry, len(t.references)) + idx := 0 + for key, tr := range t.references { + entries[idx] = WrapHashEntry2(key, WrapStringPValue(tr.initHash())) + idx++ + } + return WrapHash(entries) +} + +func (t *typeSet) resolveDeferred(c px.Context, lh px.OrderedMap) *Hash { + entries := make([]*HashEntry, 0) + types := hash.NewStringHash(16) + + var typesHash *Hash + + lh.Each(func(v px.Value) { + le := v.(px.MapEntry) + key := le.Key().String() + if key == KeyTypes || key == KeyReferences { + if key == KeyTypes { + typesHash = emptyMap + } + + // Avoid resolving qualified references into types + if vh, ok := le.Value().(px.OrderedMap); ok { + xes := make([]*HashEntry, 0) + vh.Each(func(v px.Value) { + he := v.(px.MapEntry) + var name string + if qr, ok := he.Key().(*DeferredType); ok && len(qr.Parameters()) == 0 { + name = qr.Name() + } else if tr, ok := he.Key().(*TypeReferenceType); ok { + name = tr.typeString + } else { + name = resolveValue(c, he.Key()).String() + } + if key == KeyTypes { + // Defer type resolution until all types are known + types.Put(name, he.Value()) + } else { + xes = append(xes, WrapHashEntry2(name, resolveValue(c, he.Value()))) + } + }) + if key == KeyReferences { + entries = append(entries, WrapHashEntry2(key, WrapHash(xes))) + } + } else { + // Probably a bogus entry, will cause type error further on + entries = append(entries, resolveEntry(c, le).(*HashEntry)) + if key == KeyTypes { + typesHash = nil + } + } + } else { + entries = append(entries, resolveEntry(c, le).(*HashEntry)) + } + }) + + result := WrapHash(entries) + nameAuth := t.resolveNameAuthority(result, c, nil) + if !types.IsEmpty() { + es := make([]*HashEntry, 0, types.Len()) + types.EachPair(func(typeName string, value interface{}) { + fullName := fmt.Sprintf(`%s::%s`, t.name, typeName) + var tp px.Type + if tv, ok := value.(px.Type); ok { + tp = tv + } else { + tp = NamedType(nameAuth, fullName, value.(px.Value)) + } + es = append(es, WrapHashEntry2(typeName, tp)) + }) + typesHash = WrapHash(es) + } + if typesHash != nil { + result = WrapHash(append(entries, WrapHashEntry2(KeyTypes, typesHash))) + } + return result +} + +func (t *typeSet) resolveNameAuthority(hash px.OrderedMap, c px.Context, location issue.Location) px.URI { + nameAuth := t.nameAuthority + if nameAuth == `` { + nameAuth = uriArg(hash, KeyNameAuthority, ``) + if nameAuth == `` { + nameAuth = c.Loader().NameAuthority() + } + } + if nameAuth == `` { + n := t.name + if n == `` { + n = stringArg(hash, keyName, ``) + } + var err error + if location != nil { + err = px.Error2(location, px.TypesetMissingNameAuthority, issue.H{`name`: n}) + } else { + err = px.Error(px.TypesetMissingNameAuthority, issue.H{`name`: n}) + } + panic(err) + } + return nameAuth +} diff --git a/vendor/github.com/lyraproj/pcore/types/typetype.go b/vendor/github.com/lyraproj/pcore/types/typetype.go new file mode 100644 index 0000000..b9ce724 --- /dev/null +++ b/vendor/github.com/lyraproj/pcore/types/typetype.go @@ -0,0 +1,154 @@ +package types + +import ( + "io" + + "github.com/lyraproj/pcore/px" +) + +type TypeType struct { + typ px.Type +} + +var typeTypeDefault = &TypeType{typ: anyTypeDefault} + +var TypeMetaType px.ObjectType + +func init() { + TypeMetaType = newObjectType(`Pcore::TypeType`, + `Pcore::AnyType { + attributes => { + type => { + type => Optional[Type], + value => Any + }, + } +}`, func(ctx px.Context, args []px.Value) px.Value { + return newTypeType2(args...) + }) + + newGoConstructor(`Type`, + func(d px.Dispatch) { + d.Param(`String`) + d.Function(func(c px.Context, args []px.Value) px.Value { + return c.ParseTypeValue(args[0]) + }) + }, + func(d px.Dispatch) { + d.Param2(TypeObjectInitHash) + d.Function(func(c px.Context, args []px.Value) px.Value { + return MakeObjectType(``, nil, args[0].(px.OrderedMap), false).Resolve(c) + }) + }) +} + +func DefaultTypeType() *TypeType { + return typeTypeDefault +} + +func NewTypeType(containedType px.Type) *TypeType { + if containedType == nil || containedType == anyTypeDefault { + return DefaultTypeType() + } + return &TypeType{containedType} +} + +func newTypeType2(args ...px.Value) *TypeType { + switch len(args) { + case 0: + return DefaultTypeType() + case 1: + if containedType, ok := args[0].(px.Type); ok { + return NewTypeType(containedType) + } + panic(illegalArgumentType(`Type[]`, 0, `Type`, args[0])) + default: + panic(illegalArgumentCount(`Type[]`, `0 or 1`, len(args))) + } +} + +func (t *TypeType) ContainedType() px.Type { + return t.typ +} + +func (t *TypeType) Accept(v px.Visitor, g px.Guard) { + v(t) + t.typ.Accept(v, g) +} + +func (t *TypeType) Default() px.Type { + return typeTypeDefault +} + +func (t *TypeType) Equals(o interface{}, g px.Guard) bool { + if ot, ok := o.(*TypeType); ok { + return t.typ.Equals(ot.typ, g) + } + return false +} + +func (t *TypeType) Generic() px.Type { + return NewTypeType(px.GenericType(t.typ)) +} + +func (t *TypeType) Get(key string) (value px.Value, ok bool) { + switch key { + case `type`: + return t.typ, true + } + return nil, false +} + +func (t *TypeType) IsAssignable(o px.Type, g px.Guard) bool { + if ot, ok := o.(*TypeType); ok { + return GuardedIsAssignable(t.typ, ot.typ, g) + } + return false +} + +func (t *TypeType) IsInstance(o px.Value, g px.Guard) bool { + if ot, ok := o.(px.Type); ok { + return GuardedIsAssignable(t.typ, ot, g) + } + return false +} + +func (t *TypeType) MetaType() px.ObjectType { + return TypeMetaType +} + +func (t *TypeType) Name() string { + return `Type` +} + +func (t *TypeType) Parameters() []px.Value { + if t.typ == DefaultAnyType() { + return px.EmptyValues + } + return []px.Value{t.typ} +} + +func (t *TypeType) Resolve(c px.Context) px.Type { + t.typ = resolve(c, t.typ) + return t +} + +func (t *TypeType) CanSerializeAsString() bool { + return canSerializeAsString(t.typ) +} + +func (t *TypeType) SerializationString() string { + return t.String() +} + +func (t *TypeType) String() string { + return px.ToString2(t, None) +} + +func (t *TypeType) PType() px.Type { + return &TypeType{t} +} + +func (t *TypeType) ToString(b io.Writer, s px.FormatContext, g px.RDetect) { + TypeToString(t, b, s, g) +} diff --git a/vendor/github.com/lyraproj/pcore/types/undeftype.go b/vendor/github.com/lyraproj/pcore/types/undeftype.go new file mode 100644 index 0000000..b9700e2 --- /dev/null +++ b/vendor/github.com/lyraproj/pcore/types/undeftype.go @@ -0,0 +1,120 @@ +package types + +import ( + "io" + + "github.com/lyraproj/pcore/utils" + + "reflect" + + "github.com/lyraproj/issue/issue" + "github.com/lyraproj/pcore/px" +) + +type ( + UndefType struct{} + + // UndefValue is an empty struct because both type and value are known + UndefValue struct{} +) + +var undefTypeDefault = &UndefType{} + +var UndefMetaType px.ObjectType + +func init() { + UndefMetaType = newObjectType(`Pcore::UndefType`, `Pcore::AnyType{}`, + func(ctx px.Context, args []px.Value) px.Value { + return DefaultUndefType() + }) +} + +func DefaultUndefType() *UndefType { + return undefTypeDefault +} + +func (t *UndefType) Accept(v px.Visitor, g px.Guard) { + v(t) +} + +func (t *UndefType) Equals(o interface{}, g px.Guard) bool { + _, ok := o.(*UndefType) + return ok +} + +func (t *UndefType) IsAssignable(o px.Type, g px.Guard) bool { + _, ok := o.(*UndefType) + return ok +} + +func (t *UndefType) IsInstance(o px.Value, g px.Guard) bool { + return o == undef +} + +func (t *UndefType) MetaType() px.ObjectType { + return UndefMetaType +} + +func (t *UndefType) Name() string { + return `Undef` +} + +func (t *UndefType) ReflectType(c px.Context) (reflect.Type, bool) { + return reflect.Value{}.Type(), true +} + +func (t *UndefType) CanSerializeAsString() bool { + return true +} + +func (t *UndefType) SerializationString() string { + return t.String() +} + +func (t *UndefType) String() string { + return `Undef` +} + +func (t *UndefType) ToString(b io.Writer, s px.FormatContext, g px.RDetect) { + TypeToString(t, b, s, g) +} + +func (t *UndefType) PType() px.Type { + return &TypeType{t} +} + +func WrapUndef() *UndefValue { + return &UndefValue{} +} + +func (uv *UndefValue) Equals(o interface{}, g px.Guard) bool { + _, ok := o.(*UndefValue) + return ok +} + +func (uv *UndefValue) Reflect(c px.Context) reflect.Value { + return reflect.Value{} +} + +func (uv *UndefValue) ReflectTo(c px.Context, value reflect.Value) { + if !value.CanSet() { + panic(px.Error(px.AttemptToSetUnsettable, issue.H{`kind`: value.Kind().String()})) + } + value.Set(reflect.Zero(value.Type())) +} + +func (uv *UndefValue) String() string { + return `undef` +} + +func (uv *UndefValue) ToKey() px.HashKey { + return px.HashKey([]byte{1, HkUndef}) +} + +func (uv *UndefValue) ToString(b io.Writer, s px.FormatContext, g px.RDetect) { + utils.WriteString(b, `undef`) +} + +func (uv *UndefValue) PType() px.Type { + return DefaultUndefType() +} diff --git a/vendor/github.com/lyraproj/pcore/types/unittype.go b/vendor/github.com/lyraproj/pcore/types/unittype.go new file mode 100644 index 0000000..72100cd --- /dev/null +++ b/vendor/github.com/lyraproj/pcore/types/unittype.go @@ -0,0 +1,78 @@ +package types + +import ( + "io" + + "github.com/lyraproj/pcore/px" +) + +type UnitType struct{} + +var UnitMetaType px.ObjectType + +func init() { + UnitMetaType = newObjectType(`Pcore::UnitType`, `Pcore::AnyType{}`, + func(ctx px.Context, args []px.Value) px.Value { + return DefaultUnitType() + }) + + newGoConstructor(`Unit`, + func(d px.Dispatch) { + d.Param(`Any`) + d.Function(func(c px.Context, args []px.Value) px.Value { + return args[0] + }) + }, + ) +} + +func DefaultUnitType() *UnitType { + return unitTypeDefault +} + +func (t *UnitType) Accept(v px.Visitor, g px.Guard) { + v(t) +} + +func (t *UnitType) Equals(o interface{}, g px.Guard) bool { + _, ok := o.(*UnitType) + return ok +} + +func (t *UnitType) IsAssignable(o px.Type, g px.Guard) bool { + return true +} + +func (t *UnitType) IsInstance(o px.Value, g px.Guard) bool { + return true +} + +func (t *UnitType) MetaType() px.ObjectType { + return UnitMetaType +} + +func (t *UnitType) Name() string { + return `Unit` +} + +func (t *UnitType) CanSerializeAsString() bool { + return true +} + +func (t *UnitType) SerializationString() string { + return t.String() +} + +func (t *UnitType) String() string { + return `Unit` +} + +func (t *UnitType) ToString(b io.Writer, s px.FormatContext, g px.RDetect) { + TypeToString(t, b, s, g) +} + +func (t *UnitType) PType() px.Type { + return &TypeType{t} +} + +var unitTypeDefault = &UnitType{} diff --git a/vendor/github.com/lyraproj/pcore/types/uritype.go b/vendor/github.com/lyraproj/pcore/types/uritype.go new file mode 100644 index 0000000..51b7e37 --- /dev/null +++ b/vendor/github.com/lyraproj/pcore/types/uritype.go @@ -0,0 +1,477 @@ +package types + +import ( + "bytes" + "fmt" + "io" + "net/url" + "strconv" + "strings" + + "github.com/lyraproj/issue/issue" + "github.com/lyraproj/pcore/px" + "github.com/lyraproj/pcore/utils" +) + +var uriTypeDefault = &UriType{undef} + +var URIMetaType px.ObjectType + +var members = map[string]uriMemberFunc{ + `scheme`: func(uri *url.URL) px.Value { + if uri.Scheme != `` { + return stringValue(strings.ToLower(uri.Scheme)) + } + return undef + }, + `userinfo`: func(uri *url.URL) px.Value { + if uri.User != nil { + return stringValue(uri.User.String()) + } + return undef + }, + `host`: func(uri *url.URL) px.Value { + if uri.Host != `` { + h := uri.Host + colon := strings.IndexByte(h, ':') + if colon >= 0 { + h = h[:colon] + } + return stringValue(strings.ToLower(h)) + } + return undef + }, + `port`: func(uri *url.URL) px.Value { + port := uri.Port() + if port != `` { + if pn, err := strconv.Atoi(port); err == nil { + return integerValue(int64(pn)) + } + } + return undef + }, + `path`: func(uri *url.URL) px.Value { + if uri.Path != `` { + return stringValue(uri.Path) + } + return undef + }, + `query`: func(uri *url.URL) px.Value { + if uri.RawQuery != `` { + return stringValue(uri.RawQuery) + } + return undef + }, + `fragment`: func(uri *url.URL) px.Value { + if uri.Fragment != `` { + return stringValue(uri.Fragment) + } + return undef + }, + `opaque`: func(uri *url.URL) px.Value { + if uri.Opaque != `` { + return stringValue(uri.Opaque) + } + return undef + }, +} + +func init() { + registerResolvableType( + newNamedType(`Pcore::URIStringParam`, `Variant[String[1],Regexp,Type[Pattern],Type[Enum],Type[NotUndef],Type[Undef]]`).(px.ResolvableType)) + registerResolvableType( + newNamedType(`Pcore::URIIntParam`, `Variant[Integer[0],Type[NotUndef],Type[Undef]]`).(px.ResolvableType)) + + URIMetaType = newObjectType(`Pcore::URIType`, + `Pcore::AnyType{ + attributes => { + parameters => { + type => Variant[Undef, String[1], URI, Struct[ + Optional['scheme'] => URIStringParam, + Optional['userinfo'] => URIStringParam, + Optional['host'] => URIStringParam, + Optional['port'] => URIIntParam, + Optional['path'] => URIStringParam, + Optional['query'] => URIStringParam, + Optional['fragment'] => URIStringParam, + Optional['opaque'] => URIStringParam, + ]], + value => undef + } + } +}`, func(ctx px.Context, args []px.Value) px.Value { + return newUriType2(args...) + }) + + newGoConstructor(`URI`, + func(d px.Dispatch) { + d.Param(`String[1]`) + d.OptionalParam(`Boolean`) + d.Function(func(c px.Context, args []px.Value) px.Value { + strict := false + str := args[0].String() + if len(args) > 1 { + strict = args[1].(booleanValue).Bool() + } + u, err := ParseURI2(str, strict) + if err != nil { + panic(px.Error(px.InvalidUri, issue.H{`str`: str, `detail`: err.Error()})) + } + return WrapURI(u) + }) + }, + + func(d px.Dispatch) { + d.Param(`Hash[String[1],ScalarData]`) + d.Function(func(c px.Context, args []px.Value) px.Value { + return WrapURI(URIFromHash(args[0].(*Hash))) + }) + }) +} + +type ( + UriType struct { + parameters interface{} // string, URL, or hash + } + + UriValue struct { + UriType + } + + uriMemberFunc func(*url.URL) px.Value + + uriMember struct { + memberFunc uriMemberFunc + } +) + +func (um *uriMember) Call(c px.Context, receiver px.Value, block px.Lambda, args []px.Value) px.Value { + return um.memberFunc(receiver.(*UriValue).URL()) +} + +func DefaultUriType() *UriType { + return uriTypeDefault +} + +func NewUriType(uri *url.URL) *UriType { + return &UriType{uri} +} + +func newUriType3(parameters *Hash) *UriType { + if parameters.IsEmpty() { + return uriTypeDefault + } + return &UriType{parameters} +} + +func newUriType2(args ...px.Value) *UriType { + switch len(args) { + case 0: + return DefaultUriType() + case 1: + if str, ok := args[0].(stringValue); ok { + return NewUriType(ParseURI(string(str))) + } + if uri, ok := args[0].(*UriValue); ok { + return NewUriType(uri.URL()) + } + if hash, ok := args[0].(*Hash); ok { + return newUriType3(hash) + } + panic(illegalArgumentType(`URI[]`, 0, `Variant[URI, Hash]`, args[0])) + default: + panic(illegalArgumentCount(`URI[]`, `0 or 1`, len(args))) + } +} + +// ParseURI parses a string into a uri.URL and panics with an issue code if the parse fails +func ParseURI(str string) *url.URL { + uri, err := url.Parse(str) + if err != nil { + panic(px.Error(px.InvalidUri, issue.H{`str`: str, `detail`: err.Error()})) + } + return uri +} + +func ParseURI2(str string, strict bool) (*url.URL, error) { + if strict { + return url.ParseRequestURI(str) + } + return url.Parse(str) +} + +func URIFromHash(hash *Hash) *url.URL { + uri := &url.URL{} + if scheme, ok := hash.Get4(`scheme`); ok { + uri.Scheme = scheme.String() + } + if user, ok := hash.Get4(`userinfo`); ok { + us := user.String() + colon := strings.IndexByte(us, ':') + if colon >= 0 { + uri.User = url.UserPassword(us[:colon], us[colon+1:]) + } else { + uri.User = url.User(us) + } + } + if host, ok := hash.Get4(`host`); ok { + uri.Host = host.String() + } + if port, ok := hash.Get4(`port`); ok { + uri.Host = fmt.Sprintf(`%s:%d`, uri.Host, port.(integerValue).Int()) + } + if path, ok := hash.Get4(`path`); ok { + uri.Path = path.String() + } + if query, ok := hash.Get4(`query`); ok { + uri.RawQuery = query.String() + } + if fragment, ok := hash.Get4(`fragment`); ok { + uri.Fragment = fragment.String() + } + if opaque, ok := hash.Get4(`opaque`); ok { + uri.Opaque = opaque.String() + } + return uri +} + +func (t *UriType) Accept(v px.Visitor, g px.Guard) { + v(t) +} + +func (t *UriType) Default() px.Type { + return uriTypeDefault +} + +func (t *UriType) Equals(other interface{}, g px.Guard) bool { + if ot, ok := other.(*UriType); ok { + switch t.parameters.(type) { + case *UndefValue: + return undef.Equals(ot.parameters, g) + case *Hash: + return t.parameters.(*Hash).Equals(ot.paramsAsHash(), g) + default: + if undef.Equals(ot.parameters, g) { + return false + } + if u, ok := ot.parameters.(*url.URL); ok { + return px.Equals(t.parameters, u, g) + } + return t.paramsAsHash().Equals(ot.paramsAsHash(), g) + } + } + return false +} + +func (t *UriType) Get(key string) (value px.Value, ok bool) { + if key == `parameters` { + switch t.parameters.(type) { + case *UndefValue: + return undef, true + case *Hash: + return t.parameters.(*Hash), true + default: + return urlToHash(t.parameters.(*url.URL)), true + } + } + return nil, false +} + +func (t *UriType) IsAssignable(other px.Type, g px.Guard) bool { + if ot, ok := other.(*UriType); ok { + switch t.parameters.(type) { + case *UndefValue: + return true + default: + oParams := ot.paramsAsHash() + return t.paramsAsHash().AllPairs(func(k, b px.Value) bool { + if a, ok := oParams.Get(k); ok { + if at, ok := a.(px.Type); ok { + bt, ok := b.(px.Type) + return ok && isAssignable(bt, at) + } + return px.PuppetMatch(a, b) + } + return false + }) + } + } + return false +} + +func (t *UriType) IsInstance(other px.Value, g px.Guard) bool { + if ov, ok := other.(*UriValue); ok { + switch t.parameters.(type) { + case *UndefValue: + return true + default: + ovUri := ov.URL() + return t.paramsAsHash().AllPairs(func(k, v px.Value) bool { + return px.PuppetMatch(v, getURLField(ovUri, k.String())) + }) + } + } + return false +} + +func (t *UriType) Member(name string) (px.CallableMember, bool) { + if member, ok := members[name]; ok { + return &uriMember{member}, true + } + return nil, false +} + +func (t *UriType) MetaType() px.ObjectType { + return URIMetaType +} + +func (t *UriType) Name() string { + return `URI` +} + +func (t *UriType) Parameters() []px.Value { + switch t.parameters.(type) { + case *UndefValue: + return px.EmptyValues + case *Hash: + return []px.Value{t.parameters.(*Hash)} + default: + return []px.Value{urlToHash(t.parameters.(*url.URL))} + } +} + +func (t *UriType) CanSerializeAsString() bool { + return true +} + +func (t *UriType) SerializationString() string { + return t.String() +} + +func (t *UriType) String() string { + return px.ToString2(t, None) +} + +func (t *UriType) ToString(b io.Writer, s px.FormatContext, g px.RDetect) { + TypeToString(t, b, s, g) +} + +func (t *UriType) PType() px.Type { + return &TypeType{t} +} + +func (t *UriType) paramsAsHash() *Hash { + switch t.parameters.(type) { + case *UndefValue: + return emptyMap + case *Hash: + return t.parameters.(*Hash) + default: + return urlToHash(t.parameters.(*url.URL)) + } +} + +func urlToHash(uri *url.URL) *Hash { + entries := make([]*HashEntry, 0, 8) + if uri.Scheme != `` { + entries = append(entries, WrapHashEntry2(`scheme`, stringValue(strings.ToLower(uri.Scheme)))) + } + if uri.User != nil { + entries = append(entries, WrapHashEntry2(`userinfo`, stringValue(uri.User.String()))) + } + if uri.Host != `` { + h := uri.Host + colon := strings.IndexByte(h, ':') + if colon >= 0 { + entries = append(entries, WrapHashEntry2(`host`, stringValue(strings.ToLower(h[:colon])))) + if p, err := strconv.Atoi(uri.Port()); err == nil { + entries = append(entries, WrapHashEntry2(`port`, integerValue(int64(p)))) + } + } else { + entries = append(entries, WrapHashEntry2(`host`, stringValue(strings.ToLower(h)))) + } + } + if uri.Path != `` { + entries = append(entries, WrapHashEntry2(`path`, stringValue(uri.Path))) + } + if uri.RawQuery != `` { + entries = append(entries, WrapHashEntry2(`query`, stringValue(uri.RawQuery))) + } + if uri.Fragment != `` { + entries = append(entries, WrapHashEntry2(`fragment`, stringValue(uri.Fragment))) + } + if uri.Opaque != `` { + entries = append(entries, WrapHashEntry2(`opaque`, stringValue(uri.Opaque))) + } + return WrapHash(entries) +} + +func getURLField(uri *url.URL, key string) px.Value { + if member, ok := members[key]; ok { + return member(uri) + } + return undef +} + +func WrapURI(uri *url.URL) *UriValue { + return &UriValue{UriType{uri}} +} + +func WrapURI2(str string) *UriValue { + return WrapURI(ParseURI(str)) +} + +func (u *UriValue) Equals(other interface{}, guard px.Guard) bool { + if ou, ok := other.(*UriValue); ok { + return *u.URL() == *ou.URL() + } + return false +} + +func (u *UriValue) Get(key string) (px.Value, bool) { + if member, ok := members[key]; ok { + return member(u.URL()), true + } + return undef, false +} + +func (u *UriValue) CanSerializeAsString() bool { + return true +} + +func (u *UriValue) SerializationString() string { + return u.String() +} + +func (u *UriValue) String() string { + return px.ToString(u) +} + +func (u *UriValue) ToString(b io.Writer, s px.FormatContext, g px.RDetect) { + f := px.GetFormat(s.FormatMap(), u.PType()) + val := u.URL().String() + switch f.FormatChar() { + case 's': + f.ApplyStringFlags(b, val, f.IsAlt()) + case 'p': + utils.WriteString(b, `URI(`) + utils.PuppetQuote(b, val) + utils.WriteByte(b, ')') + default: + panic(s.UnsupportedFormat(u.PType(), `sp`, f)) + } +} + +func (u *UriValue) ToKey(b *bytes.Buffer) { + b.WriteByte(1) + b.WriteByte(HkUri) + b.Write([]byte(u.URL().String())) +} + +func (u *UriValue) PType() px.Type { + return &u.UriType +} + +func (u *UriValue) URL() *url.URL { + return u.parameters.(*url.URL) +} diff --git a/vendor/github.com/lyraproj/pcore/types/varianttype.go b/vendor/github.com/lyraproj/pcore/types/varianttype.go new file mode 100644 index 0000000..bc741a6 --- /dev/null +++ b/vendor/github.com/lyraproj/pcore/types/varianttype.go @@ -0,0 +1,179 @@ +package types + +import ( + "io" + + "github.com/lyraproj/pcore/px" +) + +type VariantType struct { + types []px.Type +} + +var VariantMetaType px.ObjectType + +func init() { + VariantMetaType = newObjectType(`Pcore::VariantType`, + `Pcore::AnyType { + attributes => { + types => Array[Type] + } +}`, func(ctx px.Context, args []px.Value) px.Value { + return newVariantType2(args...) + }) +} + +func DefaultVariantType() *VariantType { + return variantTypeDefault +} + +func NewVariantType(types ...px.Type) px.Type { + switch len(types) { + case 0: + return DefaultVariantType() + case 1: + return types[0] + default: + return &VariantType{types} + } +} + +func newVariantType2(args ...px.Value) px.Type { + return newVariantType3(WrapValues(args)) +} + +func newVariantType3(args px.List) px.Type { + var variants []px.Type + var failIdx int + + switch args.Len() { + case 0: + return DefaultVariantType() + case 1: + first := args.At(0) + switch first := first.(type) { + case px.Type: + return first + case *Array: + return newVariantType3(first) + default: + panic(illegalArgumentType(`Variant[]`, 0, `Type or Array[Type]`, args.At(0))) + } + default: + variants, failIdx = toTypes(args) + if failIdx >= 0 { + panic(illegalArgumentType(`Variant[]`, failIdx, `Type`, args.At(failIdx))) + } + } + return &VariantType{variants} +} + +func (t *VariantType) Accept(v px.Visitor, g px.Guard) { + v(t) + for _, c := range t.types { + c.Accept(v, g) + } +} + +func (t *VariantType) Equals(o interface{}, g px.Guard) bool { + ot, ok := o.(*VariantType) + return ok && len(t.types) == len(ot.types) && px.IncludesAll(t.types, ot.types, g) +} + +func (t *VariantType) Generic() px.Type { + return &VariantType{UniqueTypes(alterTypes(t.types, generalize))} +} + +func (t *VariantType) Default() px.Type { + return variantTypeDefault +} + +func (t *VariantType) IsAssignable(o px.Type, g px.Guard) bool { + for _, v := range t.types { + if GuardedIsAssignable(v, o, g) { + return true + } + } + return false +} + +func (t *VariantType) IsInstance(o px.Value, g px.Guard) bool { + for _, v := range t.types { + if GuardedIsInstance(v, o, g) { + return true + } + } + return false +} + +func (t *VariantType) MetaType() px.ObjectType { + return VariantMetaType +} + +func (t *VariantType) Name() string { + return `Variant` +} + +func (t *VariantType) Parameters() []px.Value { + if len(t.types) == 0 { + return px.EmptyValues + } + ps := make([]px.Value, len(t.types)) + for idx, t := range t.types { + ps[idx] = t + } + return ps +} + +func (t *VariantType) Resolve(c px.Context) px.Type { + rts := make([]px.Type, len(t.types)) + for i, ts := range t.types { + rts[i] = resolve(c, ts) + } + t.types = rts + return t +} + +func (t *VariantType) CanSerializeAsString() bool { + for _, v := range t.types { + if !canSerializeAsString(v) { + return false + } + } + return true +} + +func (t *VariantType) SerializationString() string { + return t.String() +} + +func (t *VariantType) String() string { + return px.ToString2(t, None) +} + +func (t *VariantType) Types() []px.Type { + return t.types +} + +func (t *VariantType) allAssignableTo(o px.Type, g px.Guard) bool { + return allAssignableTo(t.types, o, g) +} + +func (t *VariantType) ToString(b io.Writer, s px.FormatContext, g px.RDetect) { + TypeToString(t, b, s, g) +} + +func (t *VariantType) PType() px.Type { + return &TypeType{t} +} + +var variantTypeDefault = &VariantType{types: []px.Type{}} + +func allAssignableTo(types []px.Type, o px.Type, g px.Guard) bool { + for _, v := range types { + if !GuardedIsAssignable(o, v, g) { + return false + } + } + return true +} diff --git a/vendor/github.com/lyraproj/pcore/types/zinit.go b/vendor/github.com/lyraproj/pcore/types/zinit.go new file mode 100644 index 0000000..0abd2b7 --- /dev/null +++ b/vendor/github.com/lyraproj/pcore/types/zinit.go @@ -0,0 +1,121 @@ +package types + +import ( + "reflect" + "regexp" + "time" + + "github.com/lyraproj/pcore/px" +) + +// This init function must be run last in the type package. Hence the file name +func init() { + primitivePTypes = map[reflect.Kind]px.Type{ + reflect.String: DefaultStringType(), + reflect.Int: DefaultIntegerType(), + reflect.Int8: integerType8, + reflect.Int16: integerType16, + reflect.Int32: integerType32, + reflect.Int64: DefaultIntegerType(), + reflect.Uint: integerTypeU64, + reflect.Uint8: integerTypeU8, + reflect.Uint16: integerTypeU16, + reflect.Uint32: integerTypeU32, + reflect.Uint64: integerTypeU64, + reflect.Float32: floatType32, + reflect.Float64: DefaultFloatType(), + reflect.Bool: DefaultBooleanType(), + } + + coreTypes = map[string]px.Type{ + `Annotation`: DefaultAnnotationType(), + `Any`: DefaultAnyType(), + `Array`: DefaultArrayType(), + `Binary`: DefaultBinaryType(), + `Boolean`: DefaultBooleanType(), + `Callable`: DefaultCallableType(), + `Collection`: DefaultCollectionType(), + `Data`: DefaultDataType(), + `Default`: DefaultDefaultType(), + `Enum`: DefaultEnumType(), + `Float`: DefaultFloatType(), + `Hash`: DefaultHashType(), + `Init`: DefaultInitType(), + `Integer`: DefaultIntegerType(), + `Iterable`: DefaultIterableType(), + `Iterator`: DefaultIteratorType(), + `Like`: DefaultLikeType(), + `Notundef`: DefaultNotUndefType(), + `NotUndef`: DefaultNotUndefType(), + `Numeric`: DefaultNumericType(), + `Optional`: DefaultOptionalType(), + `Object`: DefaultObjectType(), + `Pattern`: DefaultPatternType(), + `Regexp`: DefaultRegexpType(), + `RegExp`: DefaultRegexpType(), + `Richdata`: DefaultRichDataType(), + `RichData`: DefaultRichDataType(), + `Runtime`: DefaultRuntimeType(), + `Scalardata`: DefaultScalarDataType(), + `ScalarData`: DefaultScalarDataType(), + `Scalar`: DefaultScalarType(), + `Semver`: DefaultSemVerType(), + `SemVer`: DefaultSemVerType(), + `Semverrange`: DefaultSemVerRangeType(), + `SemverRange`: DefaultSemVerRangeType(), + `SemVerRange`: DefaultSemVerRangeType(), + `Sensitive`: DefaultSensitiveType(), + `String`: DefaultStringType(), + `Struct`: DefaultStructType(), + `Timespan`: DefaultTimespanType(), + `TimeSpan`: DefaultTimespanType(), + `Timestamp`: DefaultTimestampType(), + `TimeStamp`: DefaultTimestampType(), + `Tuple`: DefaultTupleType(), + `Type`: DefaultTypeType(), + `Typealias`: DefaultTypeAliasType(), + `TypeAlias`: DefaultTypeAliasType(), + `Typereference`: DefaultTypeReferenceType(), + `TypeReference`: DefaultTypeReferenceType(), + `Typeset`: DefaultTypeSetType(), + `TypeSet`: DefaultTypeSetType(), + `Undef`: DefaultUndefType(), + `Unit`: DefaultUnitType(), + `Uri`: DefaultUriType(), + `URI`: DefaultUriType(), + `Variant`: DefaultVariantType(), + } + + wellKnown = map[reflect.Type]px.Type{ + reflect.TypeOf(&Array{}): DefaultArrayType(), + reflect.TypeOf((*px.List)(nil)).Elem(): DefaultArrayType(), + reflect.TypeOf(&Binary{}): DefaultBinaryType(), + reflect.TypeOf(floatValue(0.0)): DefaultFloatType(), + reflect.TypeOf((*px.Float)(nil)).Elem(): DefaultFloatType(), + reflect.TypeOf(&Hash{}): DefaultHashType(), + reflect.TypeOf((*px.OrderedMap)(nil)).Elem(): DefaultHashType(), + reflect.TypeOf(integerValue(0)): DefaultIntegerType(), + reflect.TypeOf((*px.Integer)(nil)).Elem(): DefaultIntegerType(), + reflect.TypeOf(®exp.Regexp{}): DefaultRegexpType(), + reflect.TypeOf(&Regexp{}): DefaultRegexpType(), + reflect.TypeOf(&SemVer{}): DefaultSemVerType(), + reflect.TypeOf(&Sensitive{}): DefaultSensitiveType(), + reflect.TypeOf(stringValue(``)): DefaultStringType(), + reflect.TypeOf((*px.StringValue)(nil)).Elem(): DefaultStringType(), + reflect.TypeOf(Timespan(0)): DefaultTimespanType(), + reflect.TypeOf(time.Duration(0)): DefaultTimespanType(), + reflect.TypeOf(time.Time{}): DefaultTimestampType(), + reflect.TypeOf(&Timestamp{}): DefaultTimestampType(), + evalValueType: DefaultAnyType(), + reflect.TypeOf((*px.PuppetObject)(nil)).Elem(): DefaultObjectType(), + reflect.TypeOf((*px.Object)(nil)).Elem(): DefaultObjectType(), + evalObjectTypeType: ObjectMetaType, + reflect.TypeOf(&TypeType{}): DefaultTypeType(), + evalTypeType: DefaultTypeType(), + reflect.TypeOf(&typeSet{}): TypeSetMetaType, + evalTypeSetType: TypeSetMetaType, + reflect.TypeOf((*px.TypedName)(nil)).Elem(): TypedNameMetaType, + reflect.TypeOf(&UndefValue{}): DefaultUndefType(), + reflect.TypeOf(&UriValue{}): DefaultUriType(), + } +} diff --git a/vendor/github.com/lyraproj/pcore/utils/pow.go b/vendor/github.com/lyraproj/pcore/utils/pow.go new file mode 100644 index 0000000..799d038 --- /dev/null +++ b/vendor/github.com/lyraproj/pcore/utils/pow.go @@ -0,0 +1,25 @@ +package utils + +func Int64Pow(base, exp int64) int64 { + if base == 0 || exp <= 0 { + return 0 + } + if base == 1 { + return 1 + } + result := int64(1) + if base < 0 { + base = -base + if exp&1 == 1 { + result = -result + } + } + for exp > 0 { + if exp&1 == 1 { + result *= base + } + base *= base + exp >>= 1 + } + return result +} diff --git a/vendor/github.com/lyraproj/pcore/utils/reader.go b/vendor/github.com/lyraproj/pcore/utils/reader.go new file mode 100644 index 0000000..21a531a --- /dev/null +++ b/vendor/github.com/lyraproj/pcore/utils/reader.go @@ -0,0 +1,72 @@ +package utils + +import ( + "unicode/utf8" +) + +type StringReader struct { + p int + l int + c int + s string +} + +func NewStringReader(s string) *StringReader { + return &StringReader{p: 0, l: 1, c: 0, s: s} +} + +func (r *StringReader) Next() rune { + if r.p >= len(r.s) { + if r.p == len(r.s) { + r.p++ + r.c++ + } + return 0 + } + c := rune(r.s[r.p]) + if c < utf8.RuneSelf { + r.p++ + if c == '\n' { + r.l++ + r.c = 1 + } + r.c++ + } else { + var size int + c, size = utf8.DecodeRuneInString(r.s[r.p:]) + if c != utf8.RuneError { + r.p += size + r.c++ + } + } + return c +} + +func (r *StringReader) Peek() rune { + if r.p >= len(r.s) { + return 0 + } + c := rune(r.s[r.p]) + if c >= utf8.RuneSelf { + c, _ = utf8.DecodeRuneInString(r.s[r.p:]) + } + return c +} + +func (r *StringReader) Column() int { + return r.c +} + +func (r *StringReader) Line() int { + return r.l +} + +func (r *StringReader) Pos() int { + return r.p +} + +func (r *StringReader) Rewind() { + r.p = 0 + r.l = 1 + r.c = 1 +} diff --git a/vendor/github.com/lyraproj/pcore/utils/strings.go b/vendor/github.com/lyraproj/pcore/utils/strings.go new file mode 100644 index 0000000..6b19fd7 --- /dev/null +++ b/vendor/github.com/lyraproj/pcore/utils/strings.go @@ -0,0 +1,271 @@ +package utils + +import ( + "bytes" + "fmt" + "io" + "regexp" + "strings" + "unicode/utf8" +) + +func AllStrings(strings []string, predicate func(str string) bool) bool { + for _, v := range strings { + if !predicate(v) { + return false + } + } + return true +} + +// ContainsString returns true if strings contains str +func ContainsString(strings []string, str string) bool { + if str != `` { + for _, v := range strings { + if v == str { + return true + } + } + } + return false +} + +// ContainsAllStrings returns true if strings contains all entries in other +func ContainsAllStrings(strings []string, other []string) bool { + for _, str := range other { + if !ContainsString(strings, str) { + return false + } + } + return true +} + +// IsDecimalInteger returns true if the string represents a base 10 integer +func IsDecimalInteger(s string) bool { + if len(s) > 0 { + for _, c := range s { + if c < '0' || c > '9' { + return false + } + } + return true + } + return false +} + +// MatchesString returns true if at least one of the regexps matches str +func MatchesString(regexps []*regexp.Regexp, str string) bool { + if str != `` { + for _, v := range regexps { + if v.MatchString(str) { + return true + } + } + } + return false +} + +// MatchesAllStrings returns true if all strings are matched by at least one of the regexps +func MatchesAllStrings(regexps []*regexp.Regexp, strings []string) bool { + for _, str := range strings { + if !MatchesString(regexps, str) { + return false + } + } + return true +} + +// Unique creates a new slice where all duplicate strings in the given slice have been removed. Order is retained +func Unique(strings []string) []string { + top := len(strings) + if top < 2 { + return strings + } + exists := make(map[string]bool, top) + result := make([]string, 0, top) + + for _, v := range strings { + if !exists[v] { + exists[v] = true + result = append(result, v) + } + } + return result +} + +func CapitalizeSegment(segment string) string { + b := bytes.NewBufferString(``) + capitalizeSegment(b, segment) + return b.String() +} + +func capitalizeSegment(b *bytes.Buffer, segment string) { + _, s := utf8.DecodeRuneInString(segment) + if s > 0 { + if s == len(segment) { + b.WriteString(strings.ToUpper(segment)) + } else { + b.WriteString(strings.ToUpper(segment[:s])) + b.WriteString(strings.ToLower(segment[s:])) + } + } +} + +var ColonSplit = regexp.MustCompile(`::`) + +func CapitalizeSegments(segment string) string { + segments := ColonSplit.Split(segment, -1) + top := len(segments) + if top > 0 { + b := bytes.NewBufferString(``) + capitalizeSegment(b, segments[0]) + for idx := 1; idx < top; idx++ { + b.WriteString(`::`) + capitalizeSegment(b, segments[idx]) + } + return b.String() + } + return `` +} + +func RegexpQuote(b io.Writer, str string) { + WriteByte(b, '/') + for _, c := range str { + switch c { + case '\t': + WriteString(b, `\t`) + case '\n': + WriteString(b, `\n`) + case '\r': + WriteString(b, `\r`) + case '/': + WriteString(b, `\/`) + case '\\': + WriteString(b, `\\`) + default: + if c < 0x20 { + _, err := fmt.Fprintf(b, `\u{%X}`, c) + if err != nil { + panic(err) + } + } else { + WriteRune(b, c) + } + } + } + WriteByte(b, '/') +} + +func PuppetQuote(w io.Writer, str string) { + r := NewStringReader(str) + b, ok := w.(*bytes.Buffer) + if !ok { + b = bytes.NewBufferString(``) + defer func() { + WriteString(w, b.String()) + }() + } + begin := b.Len() + + WriteByte(b, '\'') + escaped := false + for c := r.Next(); c != 0; c = r.Next() { + if c < 0x20 { + r.Rewind() + b.Truncate(begin) + puppetDoubleQuote(r, b) + return + } + + if escaped { + WriteByte(b, '\\') + WriteRune(b, c) + escaped = false + continue + } + + switch c { + case '\'': + WriteString(b, `\'`) + case '\\': + escaped = true + default: + WriteRune(b, c) + } + } + if escaped { + WriteByte(b, '\\') + } + WriteByte(b, '\'') +} + +func puppetDoubleQuote(r *StringReader, b io.Writer) { + WriteByte(b, '"') + for c := r.Next(); c != 0; c = r.Next() { + switch c { + case '\t': + WriteString(b, `\t`) + case '\n': + WriteString(b, `\n`) + case '\r': + WriteString(b, `\r`) + case '"': + WriteString(b, `\"`) + case '\\': + WriteString(b, `\\`) + case '$': + WriteString(b, `\$`) + default: + if c < 0x20 { + _, err := fmt.Fprintf(b, `\u{%X}`, c) + if err != nil { + panic(err) + } + } else { + WriteRune(b, c) + } + } + } + WriteByte(b, '"') +} + +func Fprintf(b io.Writer, format string, args ...interface{}) { + _, err := fmt.Fprintf(b, format, args...) + if err != nil { + panic(err) + } +} + +func Fprintln(b io.Writer, args ...interface{}) { + _, err := fmt.Fprintln(b, args...) + if err != nil { + panic(err) + } +} + +func WriteByte(b io.Writer, v byte) { + _, err := b.Write([]byte{v}) + if err != nil { + panic(err) + } +} + +func WriteRune(b io.Writer, v rune) { + if v < utf8.RuneSelf { + WriteByte(b, byte(v)) + } else { + buf := make([]byte, utf8.UTFMax) + n := utf8.EncodeRune(buf, v) + _, err := b.Write(buf[:n]) + if err != nil { + panic(err) + } + } +} + +func WriteString(b io.Writer, s string) { + _, err := io.WriteString(b, s) + if err != nil { + panic(err) + } +} diff --git a/vendor/github.com/lyraproj/pcore/yaml/unmarshal.go b/vendor/github.com/lyraproj/pcore/yaml/unmarshal.go new file mode 100644 index 0000000..9da291e --- /dev/null +++ b/vendor/github.com/lyraproj/pcore/yaml/unmarshal.go @@ -0,0 +1,45 @@ +package yaml + +import ( + "github.com/lyraproj/issue/issue" + "github.com/lyraproj/pcore/px" + "github.com/lyraproj/pcore/types" + ym "gopkg.in/yaml.v2" +) + +func Unmarshal(c px.Context, data []byte) px.Value { + ms := make(ym.MapSlice, 0) + err := ym.Unmarshal([]byte(data), &ms) + if err != nil { + var itm interface{} + err2 := ym.Unmarshal([]byte(data), &itm) + if err2 != nil { + panic(px.Error(px.ParseError, issue.H{`language`: `YAML`, `detail`: err.Error()})) + } + return wrapValue(c, itm) + } + return wrapSlice(c, ms) +} + +func wrapSlice(c px.Context, ms ym.MapSlice) px.Value { + es := make([]*types.HashEntry, len(ms)) + for i, me := range ms { + es[i] = types.WrapHashEntry(wrapValue(c, me.Key), wrapValue(c, me.Value)) + } + return types.WrapHash(es) +} + +func wrapValue(c px.Context, v interface{}) px.Value { + switch v := v.(type) { + case ym.MapSlice: + return wrapSlice(c, v) + case []interface{}: + vs := make([]px.Value, len(v)) + for i, y := range v { + vs[i] = wrapValue(c, y) + } + return types.WrapValues(vs) + default: + return px.Wrap(c, v) + } +} diff --git a/vendor/github.com/lyraproj/semver/semver/version.go b/vendor/github.com/lyraproj/semver/semver/version.go new file mode 100644 index 0000000..0be1fa1 --- /dev/null +++ b/vendor/github.com/lyraproj/semver/semver/version.go @@ -0,0 +1,324 @@ +package semver + +import ( + "bytes" + "fmt" + "io" + "math" + "regexp" + "strconv" + "strings" +) + +// A Version represents a version as specified in "Semantic Versioning 2.0". The document +// can be found at https://semver.org +type Version interface { + fmt.Stringer + + // CompareTo compares the receiver to another version. Return zero if the versions are equal, + // a negative integer if the receiver is less than the given version, and a positive + // integer if the receiver is greater than the given version. + // + // The build suffix is not included in the comparison. + CompareTo(Version) int + + // Equals tests if the receiver is equal to another version. + // + // In contrast to CompareTo, this method will include build prefixes in the comparison. + Equals(other Version) bool + + // TripletEquals returns true if the major, minor, and patch numbers are equal. + TripletEquals(ov Version) bool + + // IsStable returns true when the version has no pre-release suffix. + IsStable() bool + + // Major returns the major version number + Major() int + + // Minor returns the minor version number + Minor() int + + // Patch returns the patch version number + Patch() int + + // PreRelease returns the pre-release suffix + PreRelease() string + + // Build returns the pre-release suffix + Build() string + + // NextPatch returns a copy of this version where the patch number is + // incremented by one and the pre-release and build suffixes are stripped + // off. + NextPatch() Version + + // ToStable returs a copy of this version where the pre-release and build + // suffixes are stripped off. + ToStable() Version + + // ToString writes the string representation of this version onto the given + // Writer. + ToString(io.Writer) +} + +type version struct { + major int + minor int + patch int + preRelease []interface{} + build []interface{} +} + +var minPrereleases []interface{} + +var vPRPart = `(?:0|[1-9][0-9]*|[0-9]*[A-Za-z-]+[0-9A-Za-z-]*)` +var vPRParts = vPRPart + `(?:\.` + vPRPart + `)*` +var vPart = `[0-9A-Za-z-]+` +var vParts = vPart + `(?:\.` + vPart + `)*` +var vPrerelase = `(?:-(` + vPRParts + `))?` +var vBuild = `(?:\+(` + vParts + `))?` +var vQualifier = vPrerelase + vBuild +var vNR = `(0|[1-9][0-9]*)` + +var vPRPartsPattern = regexp.MustCompile(`\A` + vPRParts + `\z`) +var vPartsPattern = regexp.MustCompile(`\A` + vParts + `\z`) + +var Max Version = &version{math.MaxInt64, math.MaxInt64, math.MaxInt64, nil, nil} +var Min = &version{0, 0, 0, minPrereleases, nil} +var Zero = &version{0, 0, 0, nil, nil} +var VersionPattern = regexp.MustCompile(`\A` + vNR + `\.` + vNR + `\.` + vNR + vQualifier + `\z`) + +func NewVersion(major, minor, patch int) (Version, error) { + return NewVersion3(major, minor, patch, ``, ``) +} + +func NewVersion2(major, minor, patch int, preRelease string) (Version, error) { + return NewVersion3(major, minor, patch, preRelease, ``) +} + +func NewVersion3(major, minor, patch int, preRelease string, build string) (Version, error) { + if major < 0 || minor < 0 || patch < 0 { + return nil, fmt.Errorf(`negative numbers not accepted in version`) + } + ps, err := splitParts(`pre-release`, preRelease, true) + if err != nil { + return nil, err + } + bs, err := splitParts(`build`, build, false) + if err != nil { + return nil, err + } + return &version{major, minor, patch, ps, bs}, nil +} + +func MustParseVersion(str string) Version { + v, err := ParseVersion(str) + if err != nil { + panic(err) + } + return v +} + +func ParseVersion(str string) (version Version, err error) { + if group := VersionPattern.FindStringSubmatch(str); group != nil { + major, _ := strconv.Atoi(group[1]) + minor, _ := strconv.Atoi(group[2]) + patch, _ := strconv.Atoi(group[3]) + return NewVersion3(major, minor, patch, group[4], group[5]) + } + return nil, fmt.Errorf(`the string '%s' does not represent a valid semantic version`, str) +} + +func (v *version) Build() string { + if v.build == nil { + return `` + } + bld := bytes.NewBufferString(``) + writeParts(v.build, bld) + return bld.String() +} + +func (v *version) CompareTo(other Version) int { + o := other.(*version) + cmp := v.major - o.major + if cmp == 0 { + cmp = v.minor - o.minor + if cmp == 0 { + cmp = v.patch - o.patch + if cmp == 0 { + cmp = comparePreReleases(v.preRelease, o.preRelease) + } + } + } + return cmp +} + +func (v *version) Equals(other Version) bool { + ov := other.(*version) + return v.tripletEquals(ov) && equalSegments(v.preRelease, ov.preRelease) && equalSegments(v.build, ov.build) +} + +func (v *version) IsStable() bool { + return v.preRelease == nil +} + +func (v *version) Major() int { + return v.major +} + +func (v *version) Minor() int { + return v.minor +} + +func (v *version) NextPatch() Version { + return &version{v.major, v.minor, v.patch + 1, nil, nil} +} + +func (v *version) Patch() int { + return v.patch +} + +func (v *version) PreRelease() string { + if v.preRelease == nil { + return `` + } + bld := bytes.NewBufferString(``) + writeParts(v.preRelease, bld) + return bld.String() +} + +func (v *version) String() string { + bld := bytes.NewBufferString(``) + v.ToString(bld) + return bld.String() +} + +func (v *version) ToStable() Version { + return &version{v.major, v.minor, v.patch, nil, v.build} +} + +func (v *version) ToString(bld io.Writer) { + fmt.Fprintf(bld, `%d.%d.%d`, v.major, v.minor, v.patch) + if v.preRelease != nil { + bld.Write([]byte(`-`)) + writeParts(v.preRelease, bld) + } + if v.build != nil { + bld.Write([]byte(`+`)) + writeParts(v.build, bld) + } +} + +func (v *version) TripletEquals(other Version) bool { + return v.tripletEquals(other.(*version)) +} + +func (v *version) tripletEquals(ov *version) bool { + return v.major == ov.major && v.minor == ov.minor && v.patch == ov.patch +} + +func writeParts(parts []interface{}, bld io.Writer) { + top := len(parts) + if top > 0 { + fmt.Fprintf(bld, `%v`, parts[0]) + for idx := 1; idx < top; idx++ { + bld.Write([]byte(`.`)) + fmt.Fprintf(bld, `%v`, parts[idx]) + } + } +} + +func comparePreReleases(p1, p2 []interface{}) int { + if p1 == nil { + if p2 == nil { + return 0 + } + return 1 + } + if p2 == nil { + return -1 + } + + p1Size := len(p1) + p2Size := len(p2) + commonMax := p1Size + if p1Size > p2Size { + commonMax = p2Size + } + for idx := 0; idx < commonMax; idx++ { + v1 := p1[idx] + v2 := p2[idx] + if i1, ok := v1.(int); ok { + if i2, ok := v2.(int); ok { + cmp := i1 - i2 + if cmp != 0 { + return cmp + } + continue + } + return -1 + } + + if _, ok := v2.(int); ok { + return 1 + } + + cmp := strings.Compare(v1.(string), v2.(string)) + if cmp != 0 { + return cmp + } + } + return p1Size - p2Size +} + +func equalSegments(a, b []interface{}) bool { + if a == nil { + if b == nil { + return true + } + return false + } + top := len(a) + if b == nil || top != len(b) { + return false + } + for idx := 0; idx < top; idx++ { + if a[idx] != b[idx] { + return false + } + } + return true +} + +func mungePart(part string) interface{} { + if i, err := strconv.ParseInt(part, 10, 64); err == nil { + return int(i) + } + return part +} + +func splitParts(tag, str string, stringToInt bool) ([]interface{}, error) { + if str == `` { + return nil, nil + } + + pattern := vPartsPattern + if stringToInt { + pattern = vPRPartsPattern + } + if !pattern.MatchString(str) { + return nil, fmt.Errorf(`Illegal characters in %s`, tag) + } + + parts := strings.Split(str, `.`) + result := make([]interface{}, len(parts)) + for idx, sp := range parts { + if stringToInt { + result[idx] = mungePart(sp) + } else { + result[idx] = sp + } + } + return result, nil +} diff --git a/vendor/github.com/lyraproj/semver/semver/versionrange.go b/vendor/github.com/lyraproj/semver/semver/versionrange.go new file mode 100644 index 0000000..db5941d --- /dev/null +++ b/vendor/github.com/lyraproj/semver/semver/versionrange.go @@ -0,0 +1,1073 @@ +package semver + +import ( + "bytes" + "fmt" + "io" + "regexp" + "strconv" +) + +// A VersionRange represents a range of semantic versions. It conforms to the specification +// used for npm. See https://docs.npmjs.com/misc/semver for a full description +type VersionRange interface { + fmt.Stringer + // EndVersion returns the ending version in the range if that is possible to determine, or nil otherwise + EndVersion() Version + + // Equals compares the receiver to another range and returns true if the ranges are equal + Equals(VersionRange) bool + + // Includes returns true if the given version is included in the receiver range + Includes(v Version) bool + + // Intersection returns a new range that is the intersection of the receiver and the given range + Intersection(other VersionRange) VersionRange + + // IsAsRestrictiveAs returns true if the receiver is equally or more restrictive than the given range + IsAsRestrictiveAs(other VersionRange) bool + + // IsExcludeEnd returns true unless the end version is included in the range + IsExcludeEnd() bool + + // IsExcludeStart returns true unless the start version is included in the range + IsExcludeStart() bool + + // Merge returns a new range that will includes all versions included by the receiver + // plus all versions included by the given range + Merge(or VersionRange) VersionRange + + // NormalizedString returns the canonical string representation of this range. E.g. + // + // "2.x" normalized becomes ">=2.0.0 <3.0.0" + NormalizedString() string + + // StartVersion returns the starting version in the range if that is possible to determine, or nil otherwise + StartVersion() Version + + // ToNormalizedString writes the normalized string onto the given Writer + ToNormalizedString(bld io.Writer) + + // ToString writes the string representation of this range onto the given writer + ToString(bld io.Writer) +} + +type abstractRange interface { + asLowerBound() abstractRange + asUpperBound() abstractRange + equals(or abstractRange) bool + includes(v Version) bool + isAbove(v Version) bool + isBelow(v Version) bool + isExcludeStart() bool + isExcludeEnd() bool + isLowerBound() bool + isUpperBound() bool + start() Version + end() Version + testPrerelease(v Version) bool + ToString(bld io.Writer) + } + +type simpleRange struct { + Version + } + +type startEndRange struct { + startCompare abstractRange + endCompare abstractRange + } + +type eqRange struct { + simpleRange + } + +type gtRange struct { + simpleRange + } + +type gtEqRange struct { + simpleRange + } + +type ltRange struct { + simpleRange + } + +type ltEqRange struct { + simpleRange + } + +type versionRange struct { + originalString string + ranges []abstractRange +} + + +var nr = `0|[1-9][0-9]*` +var xr = `(x|X|\*|` + nr + `)` + +var part = `(?:[0-9A-Za-z-]+)` +var parts = part + `(?:\.` + part + `)*` +var qualifier = `(?:-(` + parts + `))?(?:\+(` + parts + `))?` + +var partial = xr + `(?:\.` + xr + `(?:\.` + xr + qualifier + `)?)?` + +var simple = `([<>=~^]|<=|>=|~>|~=)?(?:` + partial + `)` +var simplePattern = regexp.MustCompile(`\A` + simple + `\z`) + +var orSplit = regexp.MustCompile(`\s*\|\|\s*`) +var simpleSplit = regexp.MustCompile(`\s+`) + +var opWsPattern = regexp.MustCompile(`([><=~^])(?:\s+|\s*v)`) + +var hyphen = `(?:` + partial + `)\s+-\s+(?:` + partial + `)` +var hyphenPattern = regexp.MustCompile(`\A` + hyphen + `\z`) + +var highestLb = >Range{simpleRange{Max}} +var lowestLb = >EqRange{simpleRange{Min}} +var lowestUb = <Range{simpleRange{Min}} + +var MatchAll VersionRange = &versionRange{`*`, []abstractRange{lowestLb}} +var MatchNone VersionRange = &versionRange{`<0.0.0`, []abstractRange{lowestUb}} + +func ExactVersionRange(v Version) VersionRange { + return &versionRange{``, []abstractRange{&eqRange{simpleRange{v}}}} +} + +func FromVersions(start Version, excludeStart bool, end Version, excludeEnd bool) VersionRange { + var as abstractRange + if excludeStart { + as = >Range{simpleRange{start}} + } else { + as = >EqRange{simpleRange{start}} + } + var ae abstractRange + if excludeEnd { + ae = <Range{simpleRange{end}} + } else { + ae = <EqRange{simpleRange{end}} + } + return newVersionRange(``, []abstractRange{as, ae}) +} + +func MustParseVersionRange(str string) VersionRange { + v, err := ParseVersionRange(str) + if err != nil { + panic(err) + } + return v +} + +func ParseVersionRange(vr string) (result VersionRange, err error) { + if vr == `` { + return nil, nil + } + + vr = opWsPattern.ReplaceAllString(vr, `$1`) + rangeStrings := orSplit.Split(vr, -1) + ranges := make([]abstractRange, 0, len(rangeStrings)) + if len(rangeStrings) == 0 { + return nil, fmt.Errorf(`'%s' is not a valid version range`, vr) + } + for _, rangeStr := range rangeStrings { + if rangeStr == `` { + ranges = append(ranges, lowestLb) + continue + } + + if m := hyphenPattern.FindStringSubmatch(rangeStr); m != nil { + e1, err := createGtEqRange(m, 1) + if err != nil { + return nil, err + } + e2, err := createGtEqRange(m, 6) + if err != nil { + return nil, err + } + ranges = append(ranges, intersection(e1, e2)) + continue + } + + var simpleRange abstractRange + for _, simple := range simpleSplit.Split(rangeStr, -1) { + m := simplePattern.FindStringSubmatch(simple) + if m == nil { + return nil, fmt.Errorf(`'%s' is not a valid version range`, simple) + } + var rng abstractRange + var err error + switch m[1] { + case `~`, `~>`: + rng, err = createTildeRange(m, 2) + case `^`: + rng, err = createCaretRange(m, 2) + case `>`: + rng, err = createGtRange(m, 2) + case `>=`: + rng, err = createGtEqRange(m, 2) + case `<`: + rng, err = createLtRange(m, 2) + case `<=`: + rng, err = createLtEqRange(m, 2) + default: + rng, err = createXRange(m, 2) + } + if err != nil { + return nil, err + } + if simpleRange == nil { + simpleRange = rng + } else { + simpleRange = intersection(simpleRange, rng) + } + } + if simpleRange != nil { + ranges = append(ranges, simpleRange) + } + } + return newVersionRange(vr, ranges), nil +} + +func (r *versionRange) EndVersion() Version { + if len(r.ranges) == 1 { + return r.ranges[0].end() + } + return nil +} + +func (r *versionRange) Equals(other VersionRange) bool { + or := other.(*versionRange) + top := len(r.ranges) + if top != len(or.ranges) { + return false + } + for idx, ar := range r.ranges { + if !ar.equals(or.ranges[idx]) { + return false + } + } + return true +} + +func (r *versionRange) Includes(v Version) bool { + if v != nil { + for _, ar := range r.ranges { + if ar.includes(v) && (v.IsStable() || ar.testPrerelease(v)) { + return true + } + } + } + return false +} + +func (r *versionRange) Intersection(other VersionRange) VersionRange { + if other != nil { + or := other.(*versionRange) + iscs := make([]abstractRange, 0) + for _, ar := range r.ranges { + for _, ao := range or.ranges { + is := intersection(ar, ao) + if is != nil { + iscs = append(iscs, is) + } + } + } + if len(iscs) > 0 { + return newVersionRange(``, iscs) + } + } + return nil +} + +func (r *versionRange) IsAsRestrictiveAs(other VersionRange) bool { +arNext: + for _, ar := range r.ranges { + for _, ao := range other.(*versionRange).ranges { + is := intersection(ar, ao) + if is != nil && asRestrictedAs(ar, ao) { + continue arNext + } + } + return false + } + return true +} + +func (r *versionRange) IsExcludeEnd() bool { + if len(r.ranges) == 1 { + return r.ranges[0].isExcludeEnd() + } + return false +} + +func (r *versionRange) IsExcludeStart() bool { + if len(r.ranges) == 1 { + return r.ranges[0].isExcludeStart() + } + return false +} + +func (r *versionRange) Merge(or VersionRange) VersionRange { + return newVersionRange(``, append(r.ranges, or.(*versionRange).ranges...)) +} + +func (r *versionRange) NormalizedString() string { + bld := bytes.NewBufferString(``) + r.ToNormalizedString(bld) + return bld.String() +} + +func (r *versionRange) StartVersion() Version { + if len(r.ranges) == 1 { + return r.ranges[0].start() + } + return nil +} + +func (r *versionRange) String() string { + bld := bytes.NewBufferString(``) + r.ToString(bld) + return bld.String() +} + +func (r *versionRange) ToNormalizedString(bld io.Writer) { + top := len(r.ranges) + r.ranges[0].ToString(bld) + for idx := 1; idx < top; idx++ { + io.WriteString(bld, ` || `) + r.ranges[idx].ToString(bld) + } +} + +func (r *versionRange) ToString(bld io.Writer) { + if r.originalString == `` { + r.ToNormalizedString(bld) + } else { + io.WriteString(bld, r.originalString) + } +} + +func newVersionRange(vr string, ranges []abstractRange) VersionRange { + mergeHappened := true + for len(ranges) > 1 && mergeHappened { + mergeHappened = false + result := make([]abstractRange, 0) + for len(ranges) > 1 { + unmerged := make([]abstractRange, 0) + ln := len(ranges) - 1 + x := ranges[ln] + ranges = ranges[:ln] + for _, y := range ranges { + merged := union(x, y) + if merged == nil { + unmerged = append(unmerged, y) + } else { + mergeHappened = true + x = merged + } + } + result = append([]abstractRange{x}, result...) + ranges = unmerged + } + if len(ranges) > 0 { + result = append(ranges, result...) + } + ranges = result + } + if len(ranges) == 0 { + return MatchNone + } + return &versionRange{vr, ranges} +} + +func createGtEqRange(rxGroup []string, startInMatcher int) (abstractRange, error) { + major, ok, err := xDigit(rxGroup[startInMatcher]) + if err != nil { + return nil, err + } + if !ok { + return lowestLb, nil + } + startInMatcher++ + minor, ok, err := xDigit(rxGroup[startInMatcher]) + if err != nil { + return nil, err + } + if !ok { + minor = 0 + } + startInMatcher++ + patch, ok, err := xDigit(rxGroup[startInMatcher]) + if err != nil { + return nil, err + } + if !ok { + patch = 0 + } + startInMatcher++ + preRelease := rxGroup[startInMatcher] + startInMatcher++ + build := rxGroup[startInMatcher] + v, err := NewVersion3(major, minor, patch, preRelease, build) + if err != nil { + return nil, err + } + return >EqRange{simpleRange{v}}, nil +} + +func createGtRange(rxGroup []string, startInMatcher int) (abstractRange, error) { + major, ok, err := xDigit(rxGroup[startInMatcher]) + if err != nil { + return nil, err + } + if !ok { + return lowestLb, nil + } + startInMatcher++ + minor, ok, err := xDigit(rxGroup[startInMatcher]) + if err != nil { + return nil, err + } + if !ok { + return >EqRange{simpleRange{&version{major + 1, 0, 0, nil, nil}}}, nil + } + startInMatcher++ + patch, ok, err := xDigit(rxGroup[startInMatcher]) + if err != nil { + return nil, err + } + if !ok { + return >EqRange{simpleRange{&version{major, minor + 1, 0, nil, nil}}}, nil + } + startInMatcher++ + preRelease := rxGroup[startInMatcher] + startInMatcher++ + build := rxGroup[startInMatcher] + v, err := NewVersion3(major, minor, patch, preRelease, build) + if err != nil { + return nil, err + } + return >Range{simpleRange{v}}, nil +} + +func createLtEqRange(rxGroup []string, startInMatcher int) (abstractRange, error) { + major, ok, err := xDigit(rxGroup[startInMatcher]) + if err != nil { + return nil, err + } + if !ok { + return lowestUb, nil + } + startInMatcher++ + minor, ok, err := xDigit(rxGroup[startInMatcher]) + if err != nil { + return nil, err + } + if !ok { + return <Range{simpleRange{&version{major + 1, 0, 0, nil, nil}}}, nil + } + startInMatcher++ + patch, ok, err := xDigit(rxGroup[startInMatcher]) + if err != nil { + return nil, err + } + if !ok { + return <Range{simpleRange{&version{major, minor + 1, 0, nil, nil}}}, nil + } + startInMatcher++ + preRelease := rxGroup[startInMatcher] + startInMatcher++ + build := rxGroup[startInMatcher] + v, err := NewVersion3(major, minor, patch, preRelease, build) + if err != nil { + return nil, err + } + return <EqRange{simpleRange{v}}, nil +} + +func createLtRange(rxGroup []string, startInMatcher int) (abstractRange, error) { + major, ok, err := xDigit(rxGroup[startInMatcher]) + if err != nil { + return nil, err + } + if !ok { + return lowestUb, nil + } + startInMatcher++ + minor, ok, err := xDigit(rxGroup[startInMatcher]) + if err != nil { + return nil, err + } + if !ok { + minor = 0 + } + startInMatcher++ + patch, ok, err := xDigit(rxGroup[startInMatcher]) + if err != nil { + return nil, err + } + if !ok { + patch = 0 + } + startInMatcher++ + preRelease := rxGroup[startInMatcher] + startInMatcher++ + build := rxGroup[startInMatcher] + v, err := NewVersion3(major, minor, patch, preRelease, build) + if err != nil { + return nil, err + } + return <Range{simpleRange{v}}, nil +} + +func createTildeRange(rxGroup []string, startInMatcher int) (abstractRange, error) { + return allowPatchUpdates(rxGroup, startInMatcher, true) +} + +func createCaretRange(rxGroup []string, startInMatcher int) (abstractRange, error) { + major, ok, err := xDigit(rxGroup[startInMatcher]) + if err != nil { + return nil, err + } + if !ok { + return lowestLb, nil + } + if major == 0 { + return allowPatchUpdates(rxGroup, startInMatcher, true) + } + startInMatcher++ + return allowMinorUpdates(rxGroup, major, startInMatcher) +} + +func createXRange(rxGroup []string, startInMatcher int) (abstractRange, error) { + return allowPatchUpdates(rxGroup, startInMatcher, false) +} + +func allowPatchUpdates(rxGroup []string, startInMatcher int, tildeOrCaret bool) (abstractRange, error) { + major, ok, err := xDigit(rxGroup[startInMatcher]) + if err != nil { + return nil, err + } + if !ok { + return lowestLb, nil + } + startInMatcher++ + minor, ok, err := xDigit(rxGroup[startInMatcher]) + if err != nil { + return nil, err + } + if !ok { + return &startEndRange{ + >EqRange{simpleRange{&version{major, 0, 0, nil, nil}}}, + <Range{simpleRange{&version{major + 1, 0, 0, nil, nil}}}}, nil + } + startInMatcher++ + patch, ok, err := xDigit(rxGroup[startInMatcher]) + if err != nil { + return nil, err + } + if !ok { + return &startEndRange{ + >EqRange{simpleRange{&version{major, minor, 0, nil, nil}}}, + <Range{simpleRange{&version{major, minor + 1, 0, nil, nil}}}}, nil + } + startInMatcher++ + preRelease := rxGroup[startInMatcher] + startInMatcher++ + build := rxGroup[startInMatcher] + v, err := NewVersion3(major, minor, patch, preRelease, build) + if err != nil { + return nil, err + } + if tildeOrCaret { + return &startEndRange{ + >EqRange{simpleRange{v}}, + <Range{simpleRange{&version{major, minor + 1, 0, nil, nil}}}}, nil + } + return &eqRange{simpleRange{v}}, nil +} + +func allowMinorUpdates(rxGroup []string, major int, startInMatcher int) (abstractRange, error) { + minor, ok, err := xDigit(rxGroup[startInMatcher]) + if !ok { + minor = 0 + } + startInMatcher++ + patch, ok, err := xDigit(rxGroup[startInMatcher]) + if err != nil { + return nil, err + } + if !ok { + patch = 0 + } + startInMatcher++ + preRelease := rxGroup[startInMatcher] + startInMatcher++ + build := rxGroup[startInMatcher] + v, err := NewVersion3(major, minor, patch, preRelease, build) + if err != nil { + return nil, err + } + return &startEndRange{ + >EqRange{simpleRange{v}}, + <Range{simpleRange{&version{major + 1, 0, 0, nil, nil}}}}, nil +} + +func xDigit(str string) (int, bool, error) { + if str == `` || str == `x` || str == `X` || str == `*` { + return 0, false, nil + } + if i, err := strconv.ParseInt(str, 10, 64); err == nil { + return int(i), true, nil + } + return 0, false, fmt.Errorf(`illegal version triplet`) +} + +func isOverlap(ra, rb abstractRange) bool { + cmp := ra.start().CompareTo(rb.end()) + if cmp < 0 || cmp == 0 && !(ra.isExcludeStart() || rb.isExcludeEnd()) { + cmp := rb.start().CompareTo(ra.end()) + return cmp < 0 || cmp == 0 && !(rb.isExcludeStart() || ra.isExcludeEnd()) + } + return false +} + +func asRestrictedAs(ra, vr abstractRange) bool { + cmp := vr.start().CompareTo(ra.start()) + if cmp > 0 || (cmp == 0 && !ra.isExcludeStart() && vr.isExcludeStart()) { + return false + } + + cmp = vr.end().CompareTo(ra.end()) + return !(cmp < 0 || (cmp == 0 && !ra.isExcludeEnd() && vr.isExcludeEnd())) +} + +func intersection(ra, rb abstractRange) abstractRange { + cmp := ra.start().CompareTo(rb.end()) + if cmp > 0 { + return nil + } + + if cmp == 0 { + if ra.isExcludeStart() || rb.isExcludeEnd() { + return nil + } + return &eqRange{simpleRange{ra.start()}} + } + + cmp = rb.start().CompareTo(ra.end()) + if cmp > 0 { + return nil + } + + if cmp == 0 { + if rb.isExcludeStart() || ra.isExcludeEnd() { + return nil + } + return &eqRange{simpleRange{rb.start()}} + } + + cmp = ra.start().CompareTo(rb.start()) + var start abstractRange + if cmp < 0 { + start = rb + } else if cmp > 0 { + start = ra + } else if ra.isExcludeStart() { + start = ra + } else { + start = rb + } + + cmp = ra.end().CompareTo(rb.end()) + var end abstractRange + if cmp > 0 { + end = rb + } else if cmp < 0 { + end = ra + } else if ra.isExcludeEnd() { + end = ra + } else { + end = rb + } + + if !end.isUpperBound() { + return start + } + + if !start.isLowerBound() { + return end + } + + return &startEndRange{start.asLowerBound(), end.asUpperBound()} +} + +func fromTo(ra, rb abstractRange) abstractRange { + var startR abstractRange + if ra.isExcludeStart() { + startR = >Range{simpleRange{ra.start()}} + } else { + startR = >EqRange{simpleRange{ra.start()}} + } + var endR abstractRange + if rb.isExcludeEnd() { + endR = <Range{simpleRange{rb.end()}} + } else { + endR = <EqRange{simpleRange{rb.end()}} + } + return &startEndRange{startR, endR} +} + +func union(ra, rb abstractRange) abstractRange { + if ra.includes(rb.start()) || rb.includes(ra.start()) { + var start Version + var excludeStart bool + cmp := ra.start().CompareTo(rb.start()) + if cmp < 0 { + start = ra.start() + excludeStart = ra.isExcludeStart() + } else if cmp > 0 { + start = rb.start() + excludeStart = rb.isExcludeStart() + } else { + start = ra.start() + excludeStart = ra.isExcludeStart() && rb.isExcludeStart() + } + + var end Version + var excludeEnd bool + cmp = ra.end().CompareTo(rb.end()) + if cmp > 0 { + end = ra.end() + excludeEnd = ra.isExcludeEnd() + } else if cmp < 0 { + end = rb.end() + excludeEnd = rb.isExcludeEnd() + } else { + end = ra.end() + excludeEnd = ra.isExcludeEnd() && rb.isExcludeEnd() + } + + var startR abstractRange + if excludeStart { + startR = >Range{simpleRange{start}} + } else { + startR = >EqRange{simpleRange{start}} + } + var endR abstractRange + if excludeEnd { + endR = <Range{simpleRange{end}} + } else { + endR = <EqRange{simpleRange{end}} + } + return &startEndRange{startR, endR} + } + if ra.isExcludeStart() && rb.isExcludeStart() && ra.start().CompareTo(rb.start()) == 0 { + return fromTo(ra, rb) + } + if ra.isExcludeEnd() && !rb.isExcludeStart() && ra.end().CompareTo(rb.start()) == 0 { + return fromTo(ra, rb) + } + if rb.isExcludeEnd() && !ra.isExcludeStart() && rb.end().CompareTo(ra.start()) == 0 { + return fromTo(rb, ra) + } + if !ra.isExcludeEnd() && !rb.isExcludeStart() && ra.end().NextPatch().CompareTo(rb.start()) == 0 { + return fromTo(ra, rb) + } + if !rb.isExcludeEnd() && !ra.isExcludeStart() && rb.end().NextPatch().CompareTo(ra.start()) == 0 { + return fromTo(rb, ra) + } + return nil +} + +func (r *startEndRange) asLowerBound() abstractRange { + return r.startCompare +} + +func (r *startEndRange) asUpperBound() abstractRange { + return r.endCompare +} + +func (r *startEndRange) equals(o abstractRange) bool { + if or, ok := o.(*startEndRange); ok { + return r.startCompare.equals(or.startCompare) && r.endCompare.equals(or.endCompare) + } + return false +} + +func (r *startEndRange) includes(v Version) bool { + return r.startCompare.includes(v) && r.endCompare.includes(v) +} + +func (r *startEndRange) isAbove(v Version) bool { + return r.startCompare.isAbove(v) +} + +func (r *startEndRange) isBelow(v Version) bool { + return r.endCompare.isBelow(v) +} + +func (r *startEndRange) isExcludeStart() bool { + return r.startCompare.isExcludeStart() +} + +func (r *startEndRange) isExcludeEnd() bool { + return r.endCompare.isExcludeEnd() +} + +func (r *startEndRange) isLowerBound() bool { + return r.startCompare.isLowerBound() +} + +func (r *startEndRange) isUpperBound() bool { + return r.endCompare.isUpperBound() +} + +func (r *startEndRange) start() Version { + return r.startCompare.start() +} + +func (r *startEndRange) end() Version { + return r.endCompare.end() +} + +func (r *startEndRange) testPrerelease(v Version) bool { + return r.startCompare.testPrerelease(v) || r.endCompare.testPrerelease(v) +} + +func (r *startEndRange) ToString(bld io.Writer) { + r.startCompare.ToString(bld) + bld.Write([]byte(` `)) + r.endCompare.ToString(bld) +} + +func (r *simpleRange) asLowerBound() abstractRange { + return highestLb +} + +func (r *simpleRange) asUpperBound() abstractRange { + return lowestUb +} + +func (r *simpleRange) isAbove(v Version) bool { + return false +} + +func (r *simpleRange) isBelow(v Version) bool { + return false +} + +func (r *simpleRange) isExcludeStart() bool { + return false +} + +func (r *simpleRange) isExcludeEnd() bool { + return false +} + +func (r *simpleRange) isLowerBound() bool { + return false +} + +func (r *simpleRange) isUpperBound() bool { + return false +} + +func (r *simpleRange) start() Version { + return Min +} + +func (r *simpleRange) end() Version { + return Max +} + +func (r *simpleRange) testPrerelease(v Version) bool { + return !r.IsStable() && r.TripletEquals(v) +} + +// Equals +func (r *eqRange) asLowerBound() abstractRange { + return r +} + +func (r *eqRange) asUpperBound() abstractRange { + return r +} + +func (r *eqRange) equals(o abstractRange) bool { + if or, ok := o.(*eqRange); ok { + return r.Equals(or.Version) + } + return false +} + +func (r *eqRange) includes(v Version) bool { + return r.CompareTo(v) == 0 +} + +func (r *eqRange) isAbove(v Version) bool { + return r.CompareTo(v) > 0 +} + +func (r *eqRange) isBelow(v Version) bool { + return r.CompareTo(v) < 0 +} + +func (r *eqRange) isLowerBound() bool { + return !r.Equals(Min) +} + +func (r *eqRange) isUpperBound() bool { + return !r.Equals(Max) +} + +func (r *eqRange) start() Version { + return r.Version +} + +func (r *eqRange) end() Version { + return r.Version +} + +// GreaterEquals +func (r *gtEqRange) asLowerBound() abstractRange { + return r +} + +func (r *gtEqRange) equals(o abstractRange) bool { + if or, ok := o.(*gtEqRange); ok { + return r.Equals(or.Version) + } + return false +} + +func (r *gtEqRange) includes(v Version) bool { + return r.CompareTo(v) <= 0 +} + +func (r *gtEqRange) isAbove(v Version) bool { + return r.CompareTo(v) > 0 +} + +func (r *gtEqRange) isLowerBound() bool { + return !r.Equals(Min) +} + +func (r *gtEqRange) start() Version { + return r.Version +} + +func (r *gtEqRange) ToString(bld io.Writer) { + bld.Write([]byte(`>=`)) + r.Version.ToString(bld) +} + +// Greater +func (r *gtRange) asLowerBound() abstractRange { + return r +} + +func (r *gtRange) equals(o abstractRange) bool { + if or, ok := o.(*gtRange); ok { + return r.Equals(or.Version) + } + return false +} + +func (r *gtRange) includes(v Version) bool { + return r.CompareTo(v) < 0 +} + +func (r *gtRange) isAbove(v Version) bool { + if r.IsStable() { + v = v.ToStable() + } + return r.CompareTo(v) >= 0 +} + +func (r *gtRange) isExcludeStart() bool { + return true +} + +func (r *gtRange) isLowerBound() bool { + return true +} + +func (r *gtRange) start() Version { + return r.Version +} + +func (r *gtRange) ToString(bld io.Writer) { + bld.Write([]byte(`>`)) + r.Version.ToString(bld) +} + +// Less Equal +func (r *ltEqRange) asUpperBound() abstractRange { + return r +} + +func (r *ltEqRange) equals(o abstractRange) bool { + if or, ok := o.(*ltEqRange); ok { + return r.Equals(or.Version) + } + return false +} + +func (r *ltEqRange) includes(v Version) bool { + return r.CompareTo(v) >= 0 +} + +func (r *ltEqRange) isBelow(v Version) bool { + return r.CompareTo(v) < 0 +} + +func (r *ltEqRange) isUpperBound() bool { + return !r.Equals(Max) +} + +func (r *ltEqRange) end() Version { + return r.Version +} + +func (r *ltEqRange) ToString(bld io.Writer) { + bld.Write([]byte(`<=`)) + r.Version.ToString(bld) +} + +// Less +func (r *ltRange) asUpperBound() abstractRange { + return r +} + +func (r *ltRange) equals(o abstractRange) bool { + if or, ok := o.(*ltRange); ok { + return r.Equals(or.Version) + } + return false +} + +func (r *ltRange) includes(v Version) bool { + return r.CompareTo(v) > 0 +} + +func (r *ltRange) isBelow(v Version) bool { + if r.IsStable() { + v = v.ToStable() + } + return r.CompareTo(v) <= 0 +} + +func (r *ltRange) isUpperBound() bool { + return true +} + +func (r *ltRange) end() Version { + return r.Version +} + +func (r *ltRange) ToString(bld io.Writer) { + bld.Write([]byte(`<`)) + r.Version.ToString(bld) +} diff --git a/vendor/github.com/lyraproj/servicesdk/LICENSE b/vendor/github.com/lyraproj/servicesdk/LICENSE new file mode 100644 index 0000000..261eeb9 --- /dev/null +++ b/vendor/github.com/lyraproj/servicesdk/LICENSE @@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/vendor/github.com/lyraproj/servicesdk/annotation/issues.go b/vendor/github.com/lyraproj/servicesdk/annotation/issues.go new file mode 100644 index 0000000..8c19b82 --- /dev/null +++ b/vendor/github.com/lyraproj/servicesdk/annotation/issues.go @@ -0,0 +1,27 @@ +package annotation + +import "github.com/lyraproj/issue/issue" + +const ( + AnnotatedIsNotObject = `RA_ANNOTATED_IS_NOT_OBJECT` + AttributeNotFound = `RA_ATTRIBUTE_NOT_FOUND` + ProvidedAttributeIsRequired = `RA_PROVIDED_ATTRIBUTE_IS_REQUIRED` + RelationshipKeysUnevenNumber = `RA_RELATIONSHIP_KEYS_UNEVEN_NUMBER` + RelationshipTypeIsNotObject = `RA_RELATIONSHIP_TYPE_IS_NOT_OBJECT` + NoResourceAnnotation = `RA_NO_RESOURCE_ANNOTATION` + MultipleCounterparts = `RA_MULTIPLE_COUNTERPARTS` + CounterpartNotFound = `RA_COUNTERPART_NOT_FOUND` + ContainedMoreThanOnce = `RA_CONTAINED_MORE_THAN_ONCE` +) + +func init() { + issue.Hard2(AnnotatedIsNotObject, `annotated %{type} is not an Object`, issue.HF{`attr`: issue.Label}) + issue.Hard2(AttributeNotFound, `%{type} has no attribute named %{name}`, issue.HF{`type`: issue.Label}) + issue.Hard2(ProvidedAttributeIsRequired, `provided attribute %{attr} cannot be required`, issue.HF{`attr`: issue.Label}) + issue.Hard2(RelationshipKeysUnevenNumber, `relationship type %{type} has an uneven number of keys`, issue.HF{`type`: issue.Label}) + issue.Hard2(RelationshipTypeIsNotObject, `relationship type %{type} is not an Object`, issue.HF{`type`: issue.Label}) + issue.Hard2(NoResourceAnnotation, `relationship type %{type} has no Resource annotation`, issue.HF{`type`: issue.Label}) + issue.Hard2(MultipleCounterparts, `relationship type %{type} has multiple matching counterparts for relation %{name}`, issue.HF{`type`: issue.Label}) + issue.Hard2(CounterpartNotFound, `relationship type %{type} has no matching counterpart for relation %{name}`, issue.HF{`type`: issue.Label}) + issue.Hard2(ContainedMoreThanOnce, `the type %{type} has more than one relationship of kind 'contained'`, issue.HF{`type`: issue.Label}) +} diff --git a/vendor/github.com/lyraproj/servicesdk/annotation/relationship.go b/vendor/github.com/lyraproj/servicesdk/annotation/relationship.go new file mode 100644 index 0000000..10dd5cd --- /dev/null +++ b/vendor/github.com/lyraproj/servicesdk/annotation/relationship.go @@ -0,0 +1,125 @@ +package annotation + +import ( + "github.com/lyraproj/issue/issue" + "github.com/lyraproj/pcore/px" +) + +var RelationshipType px.ObjectType + +const KindContained = `contained` +const KindContainer = `container` +const KindConsumer = `consumer` +const KindProvider = `provider` + +const CardinalityOne = `one` +const CardinalityMany = `many` +const CardinalityZeroOrOne = `zeroOrOne` + +type Relationship struct { + Type px.Type + Kind string `puppet:"type => Enum[contained, container, consumer, provider]"` + Cardinality string `puppet:"type => Enum[one, many, zeroOrOne]"` + Keys []string `puppet:"type => Array[Pcore::MemberName]"` + ReverseName *string `puppet:"type => Pcore::MemberName, value => undef"` +} + +func init() { + RelationshipType = px.NewGoType(`Lyra::Relationship`, Relationship{}) +} + +func (r *Relationship) Validate(c px.Context, typ px.ObjectType, name string) { + at, ok := r.Type.(px.ObjectType) + if !ok { + panic(px.Error(RelationshipTypeIsNotObject, issue.H{`type`: r.Type})) + } + + nk := len(r.Keys) + if nk%2 != 0 { + panic(px.Error(RelationshipKeysUnevenNumber, issue.H{`type`: r.Type})) + } + + for i := 0; i < nk; i += 2 { + assertAttribute(typ, r.Keys[i]) + assertAttribute(at, r.Keys[i+1]) + } + + var rs Resource + ra, ok := at.Annotations(c).Get(ResourceType) + if ok { + rs, ok = ra.(Resource) + } + if !ok { + panic(px.Error(NoResourceAnnotation, issue.H{`type`: r.Type})) + } + + var cr, v *Relationship + cs := rs.Relationships() + if r.ReverseName != nil { + if v, ok = cs[*r.ReverseName]; ok && v.IsCounterpartOf(name, typ, r) { + cr = v + } + } else { + for _, v = range cs { + if v.IsCounterpartOf(name, typ, r) { + if cr != nil { + panic(px.Error(MultipleCounterparts, issue.H{`type`: r.Type, `name`: name})) + } + cr = v + } + } + } + if cr == nil { + panic(px.Error(CounterpartNotFound, issue.H{`type`: r.Type, `name`: name})) + } +} + +func (r *Relationship) IsCounterpartOf(name string, typ px.ObjectType, o *Relationship) (match bool) { + switch r.Kind { + case KindContained: + match = o.Kind == KindContainer + case KindContainer: + match = o.Kind == KindContained + case KindConsumer: + match = o.Kind == KindProvider + case KindProvider: + match = o.Kind == KindConsumer + default: + match = false + } + + if match { + switch r.Cardinality { + case CardinalityMany: + match = o.Cardinality != CardinalityMany + case CardinalityOne: + match = o.Cardinality != CardinalityOne + case CardinalityZeroOrOne: + default: + match = false + } + } + + if match && r.ReverseName != nil { + match = name == *r.ReverseName + } + + if match { + nk := len(r.Keys) + match = nk == len(o.Keys) + if match { + // Must match in reverse + nk-- + for i, k := range r.Keys { + if k != o.Keys[nk-i] { + match = false + break + } + } + } + } + if match { + match = r.Type.Equals(typ, nil) + } + return +} diff --git a/vendor/github.com/lyraproj/servicesdk/annotation/resource.go b/vendor/github.com/lyraproj/servicesdk/annotation/resource.go new file mode 100644 index 0000000..74617ed --- /dev/null +++ b/vendor/github.com/lyraproj/servicesdk/annotation/resource.go @@ -0,0 +1,269 @@ +package annotation + +import ( + "io" + "reflect" + "sort" + + "github.com/hashicorp/go-hclog" + "github.com/lyraproj/issue/issue" + "github.com/lyraproj/pcore/px" + "github.com/lyraproj/pcore/types" + "github.com/lyraproj/pcore/utils" +) + +var ResourceType px.ObjectType + +func init() { + ResourceType = px.NewGoObjectType(`Lyra::Resource`, reflect.TypeOf((*Resource)(nil)).Elem(), `Annotation{ + attributes => { + # immutableAttributes lists the names of the attributes that cannot be + # changed. If a difference is detected between the desired state and the + # actual state that involves immutable attributes, then the resource must + # be deleted and recreated in order to reach the desired state. + immutableAttributes => Optional[Array[Pcore::MemberName]], + + # providedAttributes lists the names of the attributes that originates + # from the resource provider and shouldn't be used in comparison between + # desired state an actual state. + providedAttributes => Optional[Array[Pcore::MemberName]], + + # relationships describe how the annotated resource type relates to + # other resource types. + relationships => Optional[Hash[Pcore::MemberName, Init[Lyra::Relationship]]] + } + }`, + + func(ctx px.Context, args []px.Value) px.Value { + switch len(args) { + case 0: + return NewResource(ctx, nil, nil, nil) + case 1: + return NewResource(ctx, args[0], nil, nil) + case 2: + return NewResource(ctx, args[0], args[1], nil) + default: + return NewResource(ctx, args[0], args[1], args[3]) + } + }, + + func(ctx px.Context, args []px.Value) px.Value { + h := args[0].(*types.Hash) + return NewResource(ctx, h.Get5(`immutableAttributes`, px.Undef), h.Get5(`providedAttributes`, px.Undef), h.Get5(`relationships`, px.Undef)) + }) +} + +type Resource interface { + px.PuppetObject + + // Changed returns two booleans. + // + // The first boolean is true when the value of an attribute differs between the desired and actual + // state. Attributes listed in the providedAttributes array, for which the desired value is the + // default, are exempt from the comparison. + // + // The second boolean is true when the first is true and the attribute in question is listed in the + // immutableAttributes array. + Changed(x, y px.PuppetObject) (bool, bool) + + ImmutableAttributes() []string + + ProvidedAttributes() []string + + Relationships() map[string]*Relationship +} + +type resource struct { + immutableAttributes []string + providedAttributes []string + relationships map[string]*Relationship +} + +func NewResource(ctx px.Context, immutableAttributes, providedAttributes px.Value, relationships px.Value) Resource { + r := &resource{} + + stringsOrNil := func(v px.Value) []string { + if a, ok := v.(*types.Array); ok { + sa := px.StringElements(a) + if len(sa) > 0 { + return sa + } + } + return nil + } + + r.immutableAttributes = stringsOrNil(immutableAttributes) + r.providedAttributes = stringsOrNil(providedAttributes) + if rs, ok := relationships.(px.OrderedMap); ok { + rls := make(map[string]*Relationship, rs.Len()) + rs.EachPair(func(k, v px.Value) { + rv := px.New(ctx, RelationshipType, v).(px.Reflected).Reflect(ctx) + rls[k.String()] = rv.Addr().Interface().(*Relationship) + }) + r.relationships = rls + } + return r +} + +func (r *resource) ImmutableAttributes() []string { + return r.immutableAttributes +} + +func (r *resource) ImmutableAttributesList() px.Value { + if r.immutableAttributes == nil { + return px.Undef + } + return types.WrapStrings(r.immutableAttributes) +} + +func (r *resource) ProvidedAttributes() []string { + return r.providedAttributes +} + +func (r *resource) ProvidedAttributesList() px.Value { + if r.providedAttributes == nil { + return px.Undef + } + return types.WrapStrings(r.providedAttributes) +} + +func (r *resource) Relationships() map[string]*Relationship { + return r.relationships +} + +func (r *resource) RelationshipsMap() px.Value { + if r.relationships == nil { + return px.Undef + } + es := make([]*types.HashEntry, len(r.relationships)) + for k, v := range r.relationships { + es = append(es, types.WrapHashEntry2(k, types.NewReflectedValue(RelationshipType, reflect.ValueOf(v)))) + } + // Sort by key to get predictable order + sort.Slice(es, func(i, j int) bool { return es[i].Key().String() < es[j].Key().String() }) + return types.WrapHash(es) +} + +func (r *resource) Validate(c px.Context, annotatedType px.Annotatable) { + ot, ok := annotatedType.(px.ObjectType) + if !ok { + panic(px.Error(AnnotatedIsNotObject, issue.H{`type`: annotatedType})) + } + if r.relationships != nil { + isContained := false + for k, v := range r.relationships { + v.Validate(c, ot, k) + if v.Kind == KindContained { + if isContained { + panic(px.Error(ContainedMoreThanOnce, issue.H{`type`: ot})) + } + isContained = true + } + } + } + if r.immutableAttributes != nil { + for _, p := range r.immutableAttributes { + assertAttribute(ot, p) + } + } + if r.providedAttributes != nil { + for _, p := range r.providedAttributes { + a := assertAttribute(ot, p) + if a.HasValue() { + continue + } + panic(px.Error(ProvidedAttributeIsRequired, issue.H{`attr`: a})) + } + } +} + +// Changed returns two booleans. +// +// The first boolean is true when the value of an attribute differs between the desired and actual +// state. Attributes listed in the providedAttributes array, for which the desired value is the +// default, are exempt from the comparison. +// +// The second boolean is true when the first is true and the attribute in question is listed in the +// immutableAttributes array. +func (r *resource) Changed(desired, actual px.PuppetObject) (bool, bool) { + typ := desired.PType().(px.ObjectType) + for _, a := range typ.AttributesInfo().Attributes() { + dv := a.Get(desired) + if r.isProvided(a.Name()) && a.Default(dv) { + continue + } + av := a.Get(actual) + if !dv.Equals(av, nil) { + log := hclog.Default() + if r.isImmutable(a.Name()) { + log.Debug("immutable attribute mismatch", "attribute", a.Label(), "desired", dv, "actual", av) + return true, true + } + log.Debug("mutable attribute mismatch", "attribute", a.Label(), "desired", dv, "actual", av) + return true, false + } + } + return false, false +} + +func (r *resource) String() string { + return px.ToString(r) +} + +func (r *resource) Equals(other interface{}, guard px.Guard) bool { + if or, ok := other.(*resource); ok { + return px.Equals(r.providedAttributes, or.providedAttributes, guard) && px.Equals(r.relationships, or.relationships, guard) + } + return false +} + +func (r *resource) ToString(bld io.Writer, format px.FormatContext, g px.RDetect) { + types.ObjectToString(r, format, bld, g) +} + +func (r *resource) PType() px.Type { + return ResourceType +} + +func (r *resource) Get(key string) (value px.Value, ok bool) { + switch key { + case `immutableAttributes`: + return r.ImmutableAttributesList(), true + case `providedAttributes`: + return r.ProvidedAttributesList(), true + case `relationships`: + return r.RelationshipsMap(), true + } + return nil, false +} + +func (r *resource) InitHash() px.OrderedMap { + es := make([]*types.HashEntry, 3) + if r.immutableAttributes != nil { + es = append(es, types.WrapHashEntry2(`immutableAttributes`, r.ImmutableAttributesList())) + } + if r.providedAttributes != nil { + es = append(es, types.WrapHashEntry2(`providedAttributes`, r.ProvidedAttributesList())) + } + if r.relationships != nil { + es = append(es, types.WrapHashEntry2(`relationships`, r.RelationshipsMap())) + } + return types.WrapHash(es) +} + +func assertAttribute(ot px.ObjectType, n string) (a px.Attribute) { + if m, ok := ot.Member(n); ok { + if a, ok = m.(px.Attribute); ok { + return + } + } + panic(px.Error(AttributeNotFound, issue.H{`type`: ot, `name`: n})) +} + +func (r *resource) isProvided(name string) bool { + return r.providedAttributes != nil && utils.ContainsString(r.providedAttributes, name) +} + +func (r *resource) isImmutable(name string) bool { + return r.immutableAttributes != nil && utils.ContainsString(r.immutableAttributes, name) +} diff --git a/vendor/github.com/lyraproj/servicesdk/grpc/client.go b/vendor/github.com/lyraproj/servicesdk/grpc/client.go new file mode 100644 index 0000000..c5972f9 --- /dev/null +++ b/vendor/github.com/lyraproj/servicesdk/grpc/client.go @@ -0,0 +1,137 @@ +package grpc + +import ( + "context" + "fmt" + "os/exec" + + "github.com/hashicorp/go-hclog" + "github.com/hashicorp/go-plugin" + "github.com/lyraproj/issue/issue" + "github.com/lyraproj/pcore/px" + "github.com/lyraproj/pcore/types" + "github.com/lyraproj/servicesdk/serviceapi" + "github.com/lyraproj/servicesdk/servicepb" + "google.golang.org/grpc" + + // Ensure that service is initialized + _ "github.com/lyraproj/servicesdk/service" +) + +var handshake = plugin.HandshakeConfig{ + ProtocolVersion: 1, + MagicCookieKey: "PLUGIN_MAGIC_COOKIE", + MagicCookieValue: "7468697320697320616e20616d617a696e67206d6167696320636f6f6b69652c206e6f6d206e6f6d206e6f6d", +} + +type PluginClient struct { + plugin.NetRPCUnsupportedPlugin +} + +func (a *PluginClient) GRPCServer(*plugin.GRPCBroker, *grpc.Server) error { + return fmt.Errorf(`%T has no server implementation for rpc`, a) +} + +func (a *PluginClient) GRPCClient(ctx context.Context, broker *plugin.GRPCBroker, clientConn *grpc.ClientConn) (interface{}, error) { + return &Client{client: servicepb.NewDefinitionServiceClient(clientConn)}, nil +} + +type Client struct { + client servicepb.DefinitionServiceClient +} + +func (c *Client) Identifier(ctx px.Context) px.TypedName { + rr, err := c.client.Identity(ctx, &servicepb.EmptyRequest{}) + if err != nil { + panic(err) + } + return FromDataPB(ctx, rr).(px.TypedName) +} + +func (c *Client) Invoke(ctx px.Context, identifier, name string, arguments ...px.Value) px.Value { + rq := servicepb.InvokeRequest{ + Identifier: identifier, + Method: name, + Arguments: ToDataPB(ctx, types.WrapValues(arguments)), + } + rr, err := c.client.Invoke(ctx, &rq) + if err != nil { + panic(err) + } + result := FromDataPB(ctx, rr) + if eo, ok := result.(serviceapi.ErrorObject); ok { + panic(px.Error(InvocationError, issue.H{`identifier`: identifier, `name`: name, `code`: eo.IssueCode(), `message`: eo.Message()})) + } + return result +} + +func (c *Client) Metadata(ctx px.Context) (typeSet px.TypeSet, definitions []serviceapi.Definition) { + rr, err := c.client.Metadata(ctx, &servicepb.EmptyRequest{}) + if err != nil { + panic(err) + } + if ts := rr.GetTypeset(); ts != nil { + typeSet = FromDataPB(ctx, rr.GetTypeset()).(px.TypeSet) + } + ds := FromDataPB(ctx, rr.GetDefinitions()).(px.List) + definitions = make([]serviceapi.Definition, ds.Len()) + ds.EachWithIndex(func(d px.Value, i int) { definitions[i] = d.(serviceapi.Definition) }) + return +} + +func (c *Client) State(ctx px.Context, identifier string, parameters px.OrderedMap) px.PuppetObject { + rq := servicepb.StateRequest{Identifier: identifier, Parameters: ToDataPB(ctx, parameters)} + rr, err := c.client.State(ctx, &rq) + if err != nil { + panic(err) + } + return FromDataPB(ctx, rr).(px.PuppetObject) +} + +// Load ... +func Load(cmd *exec.Cmd, logger hclog.Logger) (serviceapi.Service, error) { + if logger == nil { + logger = hclog.Default() + } + + level := "warn" + switch { + case logger.IsTrace(): + level = "trace" + case logger.IsDebug(): + level = "debug" + case logger.IsInfo(): + level = "info" + case logger.IsWarn(): + level = "warn" + case logger.IsError(): + level = "error" + } + cmd.Env = append(cmd.Env, fmt.Sprintf("LYRA_LOG_LEVEL=%s", level)) + + client := plugin.NewClient(&plugin.ClientConfig{ + HandshakeConfig: handshake, + Plugins: map[string]plugin.Plugin{ + "server": &PluginClient{}, + }, + Managed: true, + Cmd: cmd, + Logger: logger, + AllowedProtocols: []plugin.Protocol{plugin.ProtocolGRPC}, + }) + + grpcClient, err := client.Client() + if err != nil { + hclog.Default().Error("error creating GRPC client", "error", err) + return nil, err + } + + // Request the plugin + pluginName := "server" + raw, err := grpcClient.Dispense(pluginName) + if err != nil { + hclog.Default().Error("error dispensing plugin", "plugin", pluginName, "error", err) + return nil, err + } + return raw.(serviceapi.Service), nil +} diff --git a/vendor/github.com/lyraproj/servicesdk/grpc/issues.go b/vendor/github.com/lyraproj/servicesdk/grpc/issues.go new file mode 100644 index 0000000..5a42d82 --- /dev/null +++ b/vendor/github.com/lyraproj/servicesdk/grpc/issues.go @@ -0,0 +1,11 @@ +package grpc + +import "github.com/lyraproj/issue/issue" + +const ( + InvocationError = `WF_INVOCATION_ERROR` +) + +func init() { + issue.Hard(InvocationError, `invocation of %{identifier} %{name} failed: %{code} %{message}`) +} diff --git a/vendor/github.com/lyraproj/servicesdk/grpc/server.go b/vendor/github.com/lyraproj/servicesdk/grpc/server.go new file mode 100644 index 0000000..c641885 --- /dev/null +++ b/vendor/github.com/lyraproj/servicesdk/grpc/server.go @@ -0,0 +1,146 @@ +package grpc + +import ( + "fmt" + "net/rpc" + + "github.com/hashicorp/go-hclog" + "github.com/hashicorp/go-plugin" + "github.com/lyraproj/data-protobuf/datapb" + "github.com/lyraproj/issue/issue" + "github.com/lyraproj/pcore/pcore" + "github.com/lyraproj/pcore/proto" + "github.com/lyraproj/pcore/px" + "github.com/lyraproj/pcore/serialization" + "github.com/lyraproj/pcore/threadlocal" + "github.com/lyraproj/pcore/types" + "github.com/lyraproj/servicesdk/serviceapi" + "github.com/lyraproj/servicesdk/servicepb" + "golang.org/x/net/context" + "google.golang.org/grpc" +) + +type Server struct { + ctx px.Context + impl serviceapi.Service +} + +func (s *Server) Server(*plugin.MuxBroker) (interface{}, error) { + return nil, fmt.Errorf(`%T has no server implementation for rpc`, s) +} + +func (s *Server) Client(*plugin.MuxBroker, *rpc.Client) (interface{}, error) { + return nil, fmt.Errorf(`%T has no RPC client implementation for rpc`, s) +} + +func (s *Server) GRPCServer(broker *plugin.GRPCBroker, impl *grpc.Server) error { + servicepb.RegisterDefinitionServiceServer(impl, s) + return nil +} + +func (s *Server) GRPCClient(context.Context, *plugin.GRPCBroker, *grpc.ClientConn) (interface{}, error) { + return nil, fmt.Errorf(`%T has no client implementation for rpc`, s) +} + +func (s *Server) Do(doer func(c px.Context)) (publicErr *datapb.Data, err error) { + c := s.ctx.Fork() + defer func() { + if x := recover(); x != nil { + if e, ok := x.(error); ok { + err = e + if e, ok := x.(issue.Reported); ok { + publicErr = ToDataPB(c, serviceapi.ErrorFromReported(c, e)) + } + } else { + err = fmt.Errorf(`%+v`, e) + } + } + }() + threadlocal.Init() + threadlocal.Set(px.PuppetContextKey, c) + doer(c) + return nil, nil +} + +func (s *Server) Identity(context.Context, *servicepb.EmptyRequest) (result *datapb.Data, err error) { + _, err = s.Do(func(c px.Context) { + result = ToDataPB(c, s.impl.Identifier(c)) + }) + return +} + +func (s *Server) Invoke(_ context.Context, r *servicepb.InvokeRequest) (result *datapb.Data, err error) { + var publicErr *datapb.Data + publicErr, err = s.Do(func(c px.Context) { + wrappedArgs := FromDataPB(c, r.Arguments) + arguments := wrappedArgs.(*types.Array).AppendTo([]px.Value{}) + rrr := s.impl.Invoke( + c, + r.Identifier, + r.Method, + arguments...) + result = ToDataPB(c, rrr) + }) + if publicErr != nil { + result = publicErr + err = nil + } + return +} + +func (s *Server) Metadata(_ context.Context, r *servicepb.EmptyRequest) (result *servicepb.MetadataResponse, err error) { + _, err = s.Do(func(c px.Context) { + ts, ds := s.impl.Metadata(c) + vs := make([]px.Value, len(ds)) + for i, d := range ds { + vs[i] = d + } + result = &servicepb.MetadataResponse{Typeset: ToDataPB(c, ts), Definitions: ToDataPB(c, types.WrapValues(vs))} + }) + return +} + +func (s *Server) State(_ context.Context, r *servicepb.StateRequest) (result *datapb.Data, err error) { + _, err = s.Do(func(c px.Context) { + result = ToDataPB(c, s.impl.State(c, r.Identifier, FromDataPB(c, r.Parameters).(px.OrderedMap))) + }) + return +} + +func ToDataPB(c px.Context, v px.Value) (data *datapb.Data) { + if v == nil { + return + } + c.DoWithLoader(pcore.SystemLoader(), func() { + pc := proto.NewProtoConsumer() + serialization.NewSerializer(c, px.EmptyMap).Convert(v, pc) + data = pc.Value() + }) + return +} + +func FromDataPB(c px.Context, d *datapb.Data) px.Value { + if d == nil { + return nil + } + ds := serialization.NewDeserializer(c, px.EmptyMap) + proto.ConsumePBData(d, ds) + return ds.Value() +} + +// Serve the supplied Server as a go-plugin +func Serve(c px.Context, s serviceapi.Service) { + logger := hclog.Default() + cfg := &plugin.ServeConfig{ + HandshakeConfig: handshake, + Plugins: map[string]plugin.Plugin{ + "server": &Server{ctx: c, impl: s}, + }, + GRPCServer: plugin.DefaultGRPCServer, + Logger: logger, + } + name := s.Identifier(c).Name() + logger.Debug("Starting to serve", "name", name) + plugin.Serve(cfg) + logger.Debug("Done serving", "name", name) +} diff --git a/vendor/github.com/lyraproj/servicesdk/lang/go/lyra/action.go b/vendor/github.com/lyraproj/servicesdk/lang/go/lyra/action.go new file mode 100644 index 0000000..99ca30b --- /dev/null +++ b/vendor/github.com/lyraproj/servicesdk/lang/go/lyra/action.go @@ -0,0 +1,170 @@ +package lyra + +import ( + "io" + "reflect" + + "github.com/lyraproj/issue/issue" + "github.com/lyraproj/pcore/px" + "github.com/lyraproj/pcore/types" + "github.com/lyraproj/servicesdk/wf" +) + +// Action is an imperative workflow step +type Action struct { + // When is a Condition in string form. Can be left empty + When string + + // Do is the actual function that is executed by this action. + // + // The function can take one optional parameter which must be a struct or pointer to a struct. The exported fields of + // that struct becomes the Parameters of the action. + // + // The function can return zero, one, or two values. If one value is returned, that value can be either an error, a + // struct, or a pointer to a struct. If two values are returned, the first value must be struct or a pointer to a + // struct and the second must be an error. The exported fields of a returned struct becomes the returns of the action. + Do interface{} +} + +func (a *Action) Resolve(c px.Context, n string) wf.Step { + fv := reflect.ValueOf(a.Do) + ft := fv.Type() + if ft.Kind() != reflect.Func { + panic(px.Error(NotActionFunction, issue.H{`name`: n, `type`: ft.String()})) + } + + var parameters, returns []px.Parameter + inc := ft.NumIn() + if ft.IsVariadic() || inc > 1 { + panic(badFunction(n, ft)) + } + + oc := ft.NumOut() + returnsError := false + switch oc { + case 0: + // OK. Function can evaluate and not produce anything + case 1: + // Return type must be an error or a struct + returnsError = ft.Out(0).AssignableTo(errorInterface) + if !returnsError { + returns = paramsFromStruct(c, ft.Out(0), nil) + } + case 2: + // First return type must be a struct, second must be an error + returnsError = ft.Out(1).AssignableTo(errorInterface) + if !returnsError { + panic(badFunction(n, ft)) + } + returns = paramsFromStruct(c, ft.Out(0), nil) + default: + panic(badFunction(n, ft)) + } + + if inc == 1 { + parameters = paramsFromStruct(c, ft.In(0), nil) + } + + return wf.MakeAction(n, wf.Parse(a.When), parameters, returns, &goAction{returnsError: returnsError, doer: fv}) +} + +type goAction struct { + doer reflect.Value + returnsError bool +} + +var goActionType px.ObjectType + +func init() { + goActionType = px.NewGoObjectType(`Lyra::Action`, reflect.TypeOf(&goAction{}), `{ + functions => { + do => Callable[[Hash[String,RichData]], Hash[String,RichData]] + } + }`) +} + +// Call checks if the method is 'do' and then converts the single argument OrderedMap into the go struct required by the +// go function, calls the function, and then converts the returned go struct into an OrderedMap which is returned. +// Call will return nil, false for any other method than 'do' +func (a *goAction) Call(ctx px.Context, method px.ObjFunc, args []px.Value, block px.Lambda) (px.Value, bool) { + if method.Name() != `do` { + return nil, false + } + fvType := a.doer.Type() + + parameters := args[0].(px.OrderedMap) + params := make([]reflect.Value, 0) + if fvType.NumIn() > 0 { + inType := fvType.In(0) + params = append(params, reflectParameters(ctx, inType, parameters)) + } + + result := a.doer.Call(params) + var re, rs reflect.Value + switch len(result) { + case 1: + rs = result[0] + if a.returnsError { + re = result[0] + } + case 2: + rs = result[0] + if a.returnsError { + re = result[1] + } + } + + if re.IsValid() && re.Type().AssignableTo(errorInterface) { + panic(rs.Interface()) + } + + if !rs.IsValid() { + return px.EmptyMap, true + } + + rt := rs.Type() + if rt.Kind() == reflect.Ptr { + rt = rt.Elem() + rs = rs.Elem() + } + if rt.Kind() != reflect.Struct { + panic(px.Error(NotStruct, issue.H{`type`: rt.String()})) + } + fc := rt.NumField() + entries := make([]*types.HashEntry, fc) + for i := 0; i < fc; i++ { + ft := rt.Field(i) + v := rs.Field(i) + n := issue.FirstToLower(ft.Name) + if v.IsValid() { + entries[i] = types.WrapHashEntry2(n, px.Wrap(ctx, v)) + } else { + entries[i] = types.WrapHashEntry2(n, px.Undef) + } + } + return types.WrapHash(entries), true +} + +func (a *goAction) String() string { + return px.ToString(a) +} + +func (a *goAction) Equals(value interface{}, guard px.Guard) bool { + return a == value +} + +func (a *goAction) ToString(bld io.Writer, format px.FormatContext, g px.RDetect) { + types.ObjectToString(a, format, bld, g) +} + +func (a *goAction) PType() px.Type { + return goActionType +} + +func (a *goAction) Get(key string) (value px.Value, ok bool) { + return nil, false +} + +func (a *goAction) InitHash() px.OrderedMap { + return px.EmptyMap +} diff --git a/vendor/github.com/lyraproj/servicesdk/lang/go/lyra/collect.go b/vendor/github.com/lyraproj/servicesdk/lang/go/lyra/collect.go new file mode 100644 index 0000000..d37585d --- /dev/null +++ b/vendor/github.com/lyraproj/servicesdk/lang/go/lyra/collect.go @@ -0,0 +1,134 @@ +package lyra + +import ( + "reflect" + + "github.com/lyraproj/issue/issue" + "github.com/lyraproj/pcore/px" + "github.com/lyraproj/pcore/types" + "github.com/lyraproj/servicesdk/wf" +) + +// Collect is an step that applies another step repeatedly in parallel and +// collects the results into a slice returns variable. +type Collect struct { + // When is a Condition in string form. Can be left empty + When string + + // Times denotes an iteration that will happen given number of times. It is mutually exclusive + // to Each + // + // The value must be either a literal integer or the zero value of a struct with one field of + // integer type that becomes an parameters variable of the step + Times interface{} + + // Each denotes the values to iterate over. It is mutually exclusive to Times. + // + // The value must be either a literal slice or the zero value of a struct with one field of + // slice type that becomes an parameters variable of the step + Each interface{} + + // As is the variable or variables that is the parameters of each iteration. The producer + // must declare these variables as parameters. It must be either a single string, a slice + // of strings, or the zero value of a struct. + As interface{} + + // Return is the name of the slice that represents the collected data (the returns of this + // step). The element type this slice is the returns type of the producer. Can be left empty + // in which case the returns name is the same as the leaf name of the collect step. + Return string + + // Step gets applied once for each iteration + Step Step +} + +func (e *Collect) Resolve(c px.Context, n string) wf.Step { + var v px.Value + var style wf.IterationStyle + if e.Times != nil { + if e.Each != nil { + panic(px.Error(MutuallyExclusiveFields, issue.H{`fields`: []string{`Times`, `Each`}})) + } + v = value(c, e.Times) + style = wf.IterationStyleTimes + } else if e.Each == nil { + panic(px.Error(RequireOneOfFields, issue.H{`fields`: []string{`Times`, `Each`}})) + } else { + v = value(c, e.Times) + style = wf.IterationStyleEach + } + + if e.Step == nil { + panic(px.Error(MissingRequiredField, issue.H{`type`: `Collect`, `name`: `Producer`})) + } + + if e.As == nil { + panic(px.Error(MissingRequiredField, issue.H{`type`: `Collect`, `name`: `As`})) + } + + return wf.MakeIterator( + n, wf.Parse(e.When), nil, nil, style, e.Step.Resolve(c, n), v, asParams(c, e.As), issue.FirstToLower(e.Return)) +} + +// value is like px.Wrap but transforms single element zero element structs into parameters +func value(c px.Context, uv interface{}) px.Value { + rv := reflect.ValueOf(uv) + switch rv.Kind() { + case reflect.Ptr: + e := rv.Elem() + if e.Len() == 1 && !e.Field(0).IsValid() { + return paramFromStruct(c, e) + } + case reflect.Struct: + if rv.Len() == 1 && !rv.Field(0).IsValid() { + return paramFromStruct(c, rv) + } + case reflect.Slice: + l := rv.Len() + es := make([]px.Value, l) + for i := 0; i < l; i++ { + es[i] = value(c, rv.Index(i)) + } + return types.WrapValues(es) + case reflect.Map: + ks := rv.MapKeys() + l := len(ks) + es := make([]*types.HashEntry, l) + for i, k := range ks { + es[i] = types.WrapHashEntry(value(c, k), value(c, rv.MapIndex(k))) + } + return types.WrapHash(es) + } + return px.Wrap(c, uv) +} + +func paramsFromString(n string) []px.Parameter { + return []px.Parameter{paramFromString(n)} +} + +func asParams(c px.Context, ns interface{}) []px.Parameter { + switch ns := ns.(type) { + case string: + return paramsFromString(ns) + case []string: + ps := make([]px.Parameter, len(ns)) + for i, n := range ns { + ps[i] = paramFromString(n) + } + return ps + default: + return paramsFromStruct(c, reflect.TypeOf(ns), nil) + } +} + +func paramFromString(n string) px.Parameter { + return px.NewParameter(issue.FirstToLower(n), types.DefaultAnyType(), nil, false) +} + +func paramFromStruct(c px.Context, s reflect.Value) px.Parameter { + params := paramsFromStruct(c, s.Type(), nil) + if len(params) != 1 { + panic(px.Error(NotOneStructField, issue.H{`type`: s.Type().String()})) + } + return params[0] +} diff --git a/vendor/github.com/lyraproj/servicesdk/lang/go/lyra/issues.go b/vendor/github.com/lyraproj/servicesdk/lang/go/lyra/issues.go new file mode 100644 index 0000000..48d01f9 --- /dev/null +++ b/vendor/github.com/lyraproj/servicesdk/lang/go/lyra/issues.go @@ -0,0 +1,27 @@ +package lyra + +import ( + "github.com/lyraproj/issue/issue" +) + +const ( + BadFunction = `WF_BAD_FUNCTION` + MissingRequiredField = `WF_MISSING_STEP_NAME` + MutuallyExclusiveFields = `WF_MUTUALLY_EXCLUSIVE_FIELDS` + NotActionFunction = `WF_NOT_STATE_FUNCTION` + NotOneStructField = `WF_NOT_ONE_STRUCT_FIELD` + NotStateFunction = `WF_NOT_STATE_FUNCTION` + NotStruct = `WF_NOT_STRUCT` + RequireOneOfFields = `WF_REQUIRE_ONE_OF_FIELDS` +) + +func init() { + issue.Hard(BadFunction, `the go func %{name} has invalid signature: %{type}`) + issue.Hard(MissingRequiredField, `missing required field %{type}.%{name}`) + issue.Hard2(MutuallyExclusiveFields, `only one of the %{fields} can have a value`, issue.HF{`fields`: issue.JoinErrors}) + issue.Hard(NotOneStructField, `struct describing parameter must have exactly one field, got %{type}`) + issue.Hard(NotActionFunction, `expected action %{name} function to be a go func, got %{type}`) + issue.Hard(NotStateFunction, `expected resource %{name} state function to be a go func, got %{type}`) + issue.Hard(NotStruct, `%{name} argument must be a go struct or a pointer to a go struct, got '%{type}'`) + issue.Hard2(RequireOneOfFields, `one of the %{fields} must have a value`, issue.HF{`fields`: issue.JoinErrors}) +} diff --git a/vendor/github.com/lyraproj/servicesdk/lang/go/lyra/reference.go b/vendor/github.com/lyraproj/servicesdk/lang/go/lyra/reference.go new file mode 100644 index 0000000..a6306c4 --- /dev/null +++ b/vendor/github.com/lyraproj/servicesdk/lang/go/lyra/reference.go @@ -0,0 +1,42 @@ +package lyra + +import ( + "reflect" + + "github.com/lyraproj/issue/issue" + "github.com/lyraproj/pcore/px" + "github.com/lyraproj/servicesdk/wf" +) + +// Reference is a reference to an external loadable step. +type Reference struct { + // When is a Condition in string form. Can be left empty + When string + + // Parameters is an optional zero value of a struct or a pointer to a struct. The exported fields + // of that struct defines the parameters of the reference step. + // + // Parameters on a reference should mainly be used to rename the parameters of the referenced step + // using parameter aliases. + // + // If the struct is not provided, then the parameters of the referenced step will be used verbatim. + Parameters interface{} + + // Return on a reference should mainly be used to rename the values returned from the referenced step. + Return interface{} + + // StepName is the name of the referenced step + StepName string +} + +func (r *Reference) Resolve(c px.Context, n string) wf.Step { + var parameters, returns []px.Parameter + if r.Parameters != nil { + parameters = paramsFromStruct(c, reflect.TypeOf(r.Parameters), issue.FirstToLower) + } + if r.Return != nil { + returns = paramsFromStruct(c, reflect.TypeOf(r.Return), issue.FirstToLower) + } + return wf.MakeReference( + n, wf.Parse(r.When), parameters, returns, r.StepName) +} diff --git a/vendor/github.com/lyraproj/servicesdk/lang/go/lyra/resource.go b/vendor/github.com/lyraproj/servicesdk/lang/go/lyra/resource.go new file mode 100644 index 0000000..c5b86c5 --- /dev/null +++ b/vendor/github.com/lyraproj/servicesdk/lang/go/lyra/resource.go @@ -0,0 +1,124 @@ +package lyra + +import ( + "reflect" + "strings" + + "github.com/lyraproj/issue/issue" + "github.com/lyraproj/pcore/px" + "github.com/lyraproj/pcore/types" + "github.com/lyraproj/servicesdk/wf" +) + +// Resource represents a declarative workflow step +type Resource struct { + // When is a Condition in string form. Can be left empty + When string + + // ExternalId can be set to the external ID of an existing resource. The resource will then + // not be managed by Lyra. + ExternalId string + + // Return is an optional zero value of a struct or a pointer to a struct. The exported fields + // of that struct defines the returns of the resource step + Return interface{} + + // State is a function that produces the desired state of the resource. + // + // The function can take one optional parameter which must be a struct or pointer to a struct. The exported fields of + // that struct becomes the Parameters of the action. + // + // The function can return one or two values. The first value must be a pointer to a struct. That struct represents + // the resource type. If an optional second value is returned, it must be of type error. + State interface{} +} + +func (r *Resource) Resolve(c px.Context, n string) wf.Step { + fv := reflect.ValueOf(r.State) + ft := fv.Type() + if ft.Kind() != reflect.Func { + panic(px.Error(NotStateFunction, issue.H{`name`: n, `type`: ft.String()})) + } + + // Derive the resource type from the state function return value + var rt reflect.Type + oc := ft.NumOut() + returnsError := false + switch oc { + case 0: + // Function must return a resource evaluate and not produce anything + panic(badFunction(n, ft)) + case 1: + // Return type must a struct + rt = ft.Out(0) + case 2: + // First return type must be a struct, second must be an error + returnsError = ft.Out(1).AssignableTo(errorInterface) + if !returnsError { + panic(badFunction(n, ft)) + } + rt = ft.Out(0) + } + + var t px.Type + var ot px.ObjectType + + ok := false + if rt != nil { + switch rt.Kind() { + case reflect.Ptr: + if rt.Elem().Kind() == reflect.Struct { + t, ok = c.ImplementationRegistry().ReflectedToType(rt) + } + case reflect.Struct: + rt = reflect.PtrTo(rt) + t, ok = c.ImplementationRegistry().ReflectedToType(rt) + } + if ok { + ot, ok = t.(px.ObjectType) + } + } + if !ok { + panic(badFunction(n, ft)) + } + + var parameters, returns []px.Parameter + + // Create return parameters from the Returns struct + if r.Return != nil { + ov := reflect.ValueOf(r.Return) + out := ov.Type() + if out.Kind() == reflect.Ptr { + out = out.Elem() + } + if out.Kind() != reflect.Struct { + panic(px.Error(NotStruct, issue.H{`name`: n, `type`: out.String()})) + } + returns = paramsFromStruct(c, out, func(name string) string { + // Check if alias maps to a field. If it does, then the puppet name of + // that field must be used instead + for _, a := range ot.AttributesInfo().Attributes() { + if strings.EqualFold(a.Name(), name) || strings.EqualFold(a.GoName(), name) { + return a.Name() + } + } + for _, f := range types.Fields(rt) { + if strings.EqualFold(f.Name, name) { + return types.FieldName(&f) + } + } + panic(px.Error(px.AttributeNotFound, issue.H{`type`: ot.Name(), `name`: name})) + }) + } + + // Create Parameters parameters from the state function struct parameter + inc := ft.NumIn() + if ft.IsVariadic() || inc > 1 { + panic(badFunction(n, ft)) + } + if inc == 1 { + parameters = paramsFromStruct(c, ft.In(0), nil) + } + + return wf.MakeResource(n, wf.Parse(r.When), parameters, returns, r.ExternalId, newGoState(ot, fv, returnsError)) +} diff --git a/vendor/github.com/lyraproj/servicesdk/lang/go/lyra/stateconverter.go b/vendor/github.com/lyraproj/servicesdk/lang/go/lyra/stateconverter.go new file mode 100644 index 0000000..71d2a2f --- /dev/null +++ b/vendor/github.com/lyraproj/servicesdk/lang/go/lyra/stateconverter.go @@ -0,0 +1,57 @@ +package lyra + +import ( + "reflect" + + "github.com/lyraproj/pcore/px" + "github.com/lyraproj/servicesdk/wf" +) + +type goState struct { + resourceType px.ObjectType + stateFunc reflect.Value + returnsError bool +} + +func (s *goState) Type() px.ObjectType { + return s.resourceType +} + +func (s *goState) State() interface{} { + return s.stateFunc +} + +func newGoState(resourceType px.ObjectType, stateFunc reflect.Value, returnsError bool) *goState { + return &goState{resourceType, stateFunc, returnsError} +} + +func (s *goState) call(c px.Context, parameters px.OrderedMap) px.PuppetObject { + fv := s.stateFunc + fvType := fv.Type() + var params []reflect.Value + if fvType.NumIn() == 1 { + params = []reflect.Value{reflectParameters(c, fvType.In(0), parameters)} + } + result := fv.Call(params) + var re, rs reflect.Value + switch len(result) { + case 1: + rs = result[0] + if s.returnsError { + re = result[0] + } + case 2: + rs = result[0] + if s.returnsError { + re = result[1] + } + } + if re.IsValid() && re.Type().AssignableTo(errorInterface) { + panic(rs.Interface()) + } + return px.WrapReflected(c, rs).(px.PuppetObject) +} + +func StateConverter(c px.Context, state wf.State, parameters px.OrderedMap) px.PuppetObject { + return state.(*goState).call(c, parameters) +} diff --git a/vendor/github.com/lyraproj/servicesdk/lang/go/lyra/util.go b/vendor/github.com/lyraproj/servicesdk/lang/go/lyra/util.go new file mode 100644 index 0000000..c330145 --- /dev/null +++ b/vendor/github.com/lyraproj/servicesdk/lang/go/lyra/util.go @@ -0,0 +1,135 @@ +// Package lyra provides struct types that implement the Step interface. The structs can be used to declare +// a complete Lyra workflow in Golang +package lyra + +import ( + "os" + "reflect" + "strings" + + "github.com/hashicorp/go-hclog" + "github.com/lyraproj/issue/issue" + "github.com/lyraproj/pcore/pcore" + "github.com/lyraproj/pcore/px" + "github.com/lyraproj/pcore/types" + "github.com/lyraproj/servicesdk/grpc" + "github.com/lyraproj/servicesdk/service" + "github.com/lyraproj/servicesdk/wf" +) + +// Step is implemented by Action, Resource, and Workflow +type Step interface { + // Resolve resolves the step internals using the given Context + Resolve(c px.Context, pn string) wf.Step +} + +// Serve initializes the grpc plugin mechanism, resolves the given Step, and serves it up to the Lyra client. The +// given init function can be used to initialize a resource type package. +func Serve(n string, init func(c px.Context), a Step) { + // Configuring hclog like this allows Lyra to handle log levels automatically + hclog.DefaultOptions = &hclog.LoggerOptions{ + Name: "Go", + Level: hclog.LevelFromString(os.Getenv("LYRA_LOG_LEVEL")), + JSONFormat: true, + IncludeLocation: false, + Output: os.Stderr, + } + // Tell issue reporting to amend all errors with a stack trace. + issue.IncludeStacktrace(hclog.DefaultOptions.Level <= hclog.Debug) + + pcore.Do(func(c px.Context) { + c.DoWithLoader(service.FederatedLoader(c.Loader()), func() { + if init != nil { + init(c) + } + sb := service.NewServiceBuilder(c, `Step::Service::`+strings.Title(n)) + sb.RegisterStateConverter(StateConverter) + sb.RegisterStep(a.Resolve(c, n)) + grpc.Serve(c, sb.Server()) + }) + }) +} + +// StringPtr returns a pointer to the given string. Useful when a pointer to a literal string is needed +func StringPtr(s string) *string { + return &s +} + +var errorInterface = reflect.TypeOf((*error)(nil)).Elem() + +func reflectParameters(ctx px.Context, param reflect.Type, parameters px.OrderedMap) reflect.Value { + ptr := param.Kind() == reflect.Ptr + if ptr { + param = param.Elem() + } + in := reflect.New(param).Elem() + t := in.NumField() + r := ctx.Reflector() + for i := 0; i < t; i++ { + pn := issue.FirstToLower(param.Field(i).Name) + r.ReflectTo(parameters.Get5(pn, px.Undef), in.Field(i)) + } + if ptr { + in = in.Addr() + } + return in +} + +func badFunction(name string, typ reflect.Type) error { + return px.Error(BadFunction, issue.H{`name`: name, `type`: typ.String()}) +} + +func ParametersFromGoStruct(c px.Context, v interface{}) []px.Parameter { + if v == nil { + return nil + } + return paramsFromStruct(c, reflect.TypeOf(v), nil) +} + +func paramsFromStruct(c px.Context, s reflect.Type, nameMapper func(string) string) []px.Parameter { + if s.Kind() == reflect.Ptr { + s = s.Elem() + } + if s.Kind() != reflect.Struct { + panic(px.Error(NotStruct, issue.H{`type`: s.String()})) + } + av, _ := c.Reflector().InitializerFromTagged(`Tmp`, nil, px.NewTaggedType(s, nil)).Get4(`attributes`) + attrs := av.(px.OrderedMap) + + outCount := attrs.Len() + params := make([]px.Parameter, 0, outCount) + var value px.Value + attrs.EachPair(func(k, v px.Value) { + ad := v.(px.OrderedMap) + tp := ad.Get5(`type`, types.DefaultAnyType()).(px.Type) + an := k.String() + alias := an + if v, ok := ad.Get4(`value`); ok { + value = v + } else { + if an, ok := ad.Get4(`annotations`); ok { + if tags, ok := an.(px.OrderedMap).Get(types.TagsAnnotationType); ok { + tm := tags.(px.OrderedMap) + if nameMapper == nil { + if v, ok := tm.Get4(`value`); ok { + value = types.CoerceTo(c, `value annotation`, tp, v) + } else if v, ok := tm.Get4(`lookup`); ok { + value = types.NewDeferred(`lookup`, v) + } + } else if v, ok := tm.Get4(`alias`); ok { + alias = v.String() + } + } + } + } + + if nameMapper != nil { + alias = nameMapper(alias) + if alias != an { + value = types.WrapString(alias) + } + } + params = append(params, px.NewParameter(an, tp, value, false)) + }) + return params +} diff --git a/vendor/github.com/lyraproj/servicesdk/lang/go/lyra/workflow.go b/vendor/github.com/lyraproj/servicesdk/lang/go/lyra/workflow.go new file mode 100644 index 0000000..08fdd89 --- /dev/null +++ b/vendor/github.com/lyraproj/servicesdk/lang/go/lyra/workflow.go @@ -0,0 +1,38 @@ +package lyra + +import ( + "sort" + + "github.com/lyraproj/pcore/px" + "github.com/lyraproj/servicesdk/wf" +) + +// Workflow groups several steps into one step. Dependencies between the steps are determined +// by their parameters and returns declarations +type Workflow struct { + // When is a Condition in string form. Can be left empty + When string + + // Parameters is an optional zero value of a struct or a pointer to a struct. The exported fields + // of that struct defines the parameters of the workflow step + Parameters interface{} + + // Return is an optional zero value of a struct or a pointer to a struct. The exported fields + // of that struct defines the returns of the workflow step + Return interface{} + + // Steps is the slice of steps that are executed by this workflow + Steps map[string]Step +} + +func (w *Workflow) Resolve(c px.Context, n string) wf.Step { + as := make([]wf.Step, 0, len(w.Steps)) + for k, a := range w.Steps { + as = append(as, a.Resolve(c, n+`::`+k)) + } + sort.Slice(as, func(i, j int) bool { + return as[i].Name() < as[j].Name() + }) + return wf.MakeWorkflow( + n, wf.Parse(w.When), ParametersFromGoStruct(c, w.Parameters), ParametersFromGoStruct(c, w.Return), as) +} diff --git a/vendor/github.com/lyraproj/servicesdk/lang/issues.go b/vendor/github.com/lyraproj/servicesdk/lang/issues.go new file mode 100644 index 0000000..d0045ce --- /dev/null +++ b/vendor/github.com/lyraproj/servicesdk/lang/issues.go @@ -0,0 +1,11 @@ +package lang + +import "github.com/lyraproj/issue/issue" + +const ( + UnsupportedLanguage = `WF_UNSUPPORTED_LANGUAGE` +) + +func init() { + issue.Hard(UnsupportedLanguage, `language %{language} not supported. Choose one of %{supportedLanguages}"`) +} diff --git a/vendor/github.com/lyraproj/servicesdk/lang/typegen/generator.go b/vendor/github.com/lyraproj/servicesdk/lang/typegen/generator.go new file mode 100644 index 0000000..7ab1bfd --- /dev/null +++ b/vendor/github.com/lyraproj/servicesdk/lang/typegen/generator.go @@ -0,0 +1,86 @@ +package typegen + +import ( + "bufio" + "io" + "os" + "path/filepath" + "sort" + "strings" + + "github.com/lyraproj/issue/issue" + "github.com/lyraproj/pcore/px" + "github.com/lyraproj/servicesdk/lang" +) + +// The Generator interface is implemented by generators that can transform Pcore types +// to types in some specific language. +type Generator interface { + // GenerateTypes produces types in some language for all types in the given TypeSet and writes + // them to a file under the given directory + GenerateTypes(ts px.TypeSet, directory string) + + // GenerateType produces a type in some language and writes it to a file under the + // given directory + GenerateType(t px.Type, directory string) +} + +// All known language generators +var generators = map[string]Generator{ + "go": &goGeneratorFactory{}, + "puppet": &puppetGenerator{}, + "typescript": &tsGeneratorFactory{}, +} + +func GetGenerator(language string) Generator { + generator, ok := generators[strings.ToLower(language)] + if !ok { + sl := make([]string, 0, len(generators)) + for l := range generators { + sl = append(sl, l) + } + sort.Strings(sl) + panic(px.Error(lang.UnsupportedLanguage, + issue.H{`language`: language, `supportedLanguages`: strings.Join(sl, `, `)})) + } + return generator +} + +func writeByte(w io.Writer, b byte) { + _, err := w.Write([]byte{b}) + if err != nil { + panic(err) + } +} + +func write(w io.Writer, s string) { + _, err := io.WriteString(w, s) + if err != nil { + panic(err) + } +} + +func typeFile(typ px.Type, directory, extension string) string { + tsp := strings.Split(typ.Name(), `::`) + return filepath.Join(directory, filepath.Join(tsp...)) + extension +} + +func typeToStream(directory string, gen func(io.Writer)) { + err := os.MkdirAll(filepath.Dir(directory), os.ModePerm) + if err != nil { + panic(err) + } + f, err := os.Create(directory) + if err != nil { + panic(err) + } + //noinspection ALL + defer f.Close() + + b := bufio.NewWriter(f) + gen(b) + err = b.Flush() + if err != nil { + panic(err) + } +} diff --git a/vendor/github.com/lyraproj/servicesdk/lang/typegen/golang.go b/vendor/github.com/lyraproj/servicesdk/lang/typegen/golang.go new file mode 100644 index 0000000..f6309cd --- /dev/null +++ b/vendor/github.com/lyraproj/servicesdk/lang/typegen/golang.go @@ -0,0 +1,655 @@ +package typegen + +import ( + "bytes" + "fmt" + "go/format" + "io" + "path/filepath" + "sort" + "strings" + + "github.com/lyraproj/issue/issue" + + "github.com/lyraproj/pcore/px" + "github.com/lyraproj/pcore/types" + "github.com/lyraproj/servicesdk/wf" +) + +type goGeneratorFactory struct { +} + +func (gf *goGeneratorFactory) GenerateTypes(typeSet px.TypeSet, directory string) { + tss := make([]px.TypeSet, 0) + tts := make([]px.Type, 0) + typeSet.Types().EachValue(func(t px.Value) { + if ts, ok := t.(px.TypeSet); ok { + tss = append(tss, ts) + } else { + tts = append(tts, t.(px.Type)) + } + }) + + for _, ts := range tss { + gf.GenerateTypes(ts, directory) + } + + if len(tts) > 0 { + pkg := strings.ToLower(wf.LeafName(typeSet.Name())) + formattedTypeToStream(typeSet, directory, pkg, func(g *goGenerator, b *bytes.Buffer) { + for _, t := range tts { + b.WriteByte('\n') + g.generateType(g.goTypeName(t), t, 0, b) + } + }) + } +} + +func (g *goGenerator) goTypeName(t px.Type) string { + n := wf.LeafName(t.Name()) + if g.useCamelCase { + n = issue.SnakeToCamelCase(n) + } + return n +} + +func (gf *goGeneratorFactory) GenerateType(typ px.Type, directory string) { + sg := strings.Split(typ.Name(), `::`) + pkg := `` + if len(sg) > 1 { + pkg = strings.ToLower(sg[len(sg)-2]) + } + formattedTypeToStream(typ, directory, pkg, func(g *goGenerator, b *bytes.Buffer) { + b.WriteByte('\n') + g.generateType(g.goTypeName(typ), typ, 0, b) + }) +} + +func formattedTypeToStream(t px.Type, directory string, pkg string, f func(g *goGenerator, b *bytes.Buffer)) { + tsp := append(strings.Split(strings.ToLower(t.Name()), `::`), pkg) + directory = filepath.Join(directory, filepath.Join(tsp...)) + `.go` + typeToStream(directory, func(w io.Writer) { + g := makeGoGenerator(pkg) + g.findAnonymousTypes(t, nil) + g.nameAnonymousTypes() + b := bytes.NewBufferString("// this file is generated\n") + b.WriteString(`package `) + b.WriteString(pkg) + b.WriteString("\n\nimport (") + newLine(1, b) + b.WriteString(`"fmt"`) + newLine(1, b) + b.WriteString(`"reflect"`) + if g.includeRegexp { + newLine(1, b) + b.WriteString(`"regexp"`) + } + if g.includeTime { + newLine(1, b) + b.WriteString(`"time"`) + } + newLine(0, b) + newLine(1, b) + b.WriteString(`"github.com/lyraproj/pcore/px"`) + if g.includeSemver { + newLine(1, b) + b.WriteString(`"github.com/lyraproj/semver/semver"`) + } + newLine(0, b) + b.WriteByte(')') + f(g, b) + b.WriteByte('\n') + g.writeAnonymousTypes(b) + g.writeInit(b) + _, err := w.Write(g.formatCode(b.Bytes())) + if err != nil { + panic(err) + } + }) +} + +type goGenerator struct { + anonTypes []*anonType + allTypes map[string]px.Type + anonNames map[string]bool + pkg string + useCamelCase bool + includeTime bool + includeRegexp bool + includeSemver bool +} + +func makeGoGenerator(pkg string) *goGenerator { + return &goGenerator{ + pkg: pkg, + anonTypes: make([]*anonType, 0, 50), + allTypes: make(map[string]px.Type, 100), + anonNames: make(map[string]bool, 100), + useCamelCase: true, + } +} + +type nameSeg struct { + n string // The segment + w int // How many paths that uses this segment at the same position +} + +func (n *nameSeg) String() string { + return fmt.Sprintf(`%s(%d)`, n.n, n.w) +} + +type anonType struct { + t px.Type // The anonymous type (a Struct or an Object + ps [][]string // All paths that lead up to this type, in reverse + cc bool // true when generator resorted to concatenate two first names in each path + n string // Generated name +} + +func (a *anonType) String() string { + return fmt.Sprintf(`%v, %s`, a.ps, a.n) +} + +func (a *anonType) mostCommonPath() []*nameSeg { + mc := a.findCommonPath(false) + if len(mc) == 0 && !a.cc { + mc = a.findCommonPath(true) + } + return mc +} + +// findCommonPath attempts to find a path common to all paths that leads to this +// anonType. If no such path is found, the most common segment at each position +// is used to form the path. If concatAlt is true, then if no most common segment +// is fount, then segments that are found to have equal number of occurrences for a +// specific path position can be concatenated to resolve the conflict. +func (a *anonType) findCommonPath(concatAlt bool) (result []*nameSeg) { + for s := 0; ; s++ { + maxCount := 0 + maxName := `` + counts := make(map[string]int, len(a.ps)) + for _, pe := range a.ps { + if s >= len(pe) { + return result + } + n := pe[s] + v, ok := counts[n] + if ok { + v++ + } else { + v = 1 + } + counts[n] = v + if v > maxCount { + maxCount = v + maxName = n + } + } + + if len(counts) > 1 { + // Check that maxCount is unique + if concatAlt { + sb := bytes.NewBufferString(``) + for n, mx := range counts { + if mx == maxCount { + sb.WriteString(n) + } + } + maxName = sb.String() + } else { + mxFound := false + for _, mx := range counts { + if mx == maxCount { + if mxFound { + return result + } + mxFound = true + } + } + } + } + result = append(result, &nameSeg{maxName, maxCount}) + } +} + +func (a *anonType) desiredName(allNames map[string]px.Type) (string, int) { + mc := a.mostCommonPath() + b := bytes.NewBufferString(``) + for _, ns := range mc { + s := b.String() + b.Reset() + b.WriteString(ns.n) + b.WriteString(s) + s = b.String() + if len(s) > 2 { + if _, ok := allNames[s]; !ok { + return s, ns.w + } + } + } + return ``, 0 +} + +// useFirstUniqueName sorts all paths to the anonymous type, shortest path +// first, and then makes an attempt to form a unique name from each path, starting +// with segment 0, then concatenating with segment 1, then 2, etc. The first name +// that is formed that doesn't already exist in the allNames map, is assigned as +// the name of the entry and added to the allNames map. +// +// This method should only be used as the last resort. +func (a *anonType) useFirstUniqueName(allNames map[string]px.Type) bool { + b := bytes.NewBufferString(``) + ps := a.ps + sort.Slice(ps, func(i, j int) bool { + return len(ps[i]) < len(ps[j]) + }) + + for _, p := range ps { + b.Reset() + for _, n := range p { + s := b.String() + b.Reset() + b.WriteString(n) + b.WriteString(s) + s = b.String() + if len(s) > 2 { + if _, ok := allNames[s]; !ok { + allNames[s] = a.t + a.n = s + return true + } + } + } + } + return false +} + +func (g *goGenerator) appendTypeWithPath(t px.Type, p []string) { + l := len(p) + rp := make([]string, 0, l) + for i := l - 1; i >= 0; i-- { + vs := strings.Split(p[i], `_`) + for vi := len(vs) - 1; vi >= 0; vi-- { + rp = append(rp, strings.Title(vs[vi])) + } + } + for _, ot := range g.anonTypes { + if t.Equals(ot.t, nil) { + ot.ps = append(ot.ps, rp) + return + } + } + g.anonTypes = append(g.anonTypes, &anonType{t, [][]string{rp}, false, ``}) +} + +func (g *goGenerator) anonymousName(t px.Type) string { + for _, ot := range g.anonTypes { + if t.Equals(ot.t, nil) { + return ot.n + } + } + panic(fmt.Errorf(`unable to find generated name for anonymous type %s`, px.ToPrettyString(t))) +} + +func (g *goGenerator) nameAnonymousTypes() { + stuffHappened := true + as := g.anonTypes + for stuffHappened { + stuffHappened = false + + // Build map of desired names and what weigh that each anonymous type + // have for the desired name. + nwsMap := make(map[string]map[int][]*anonType, len(as)) + keys := make([]string, 0, len(as)) + for _, a := range as { + if a.n != `` { + continue + } + + cn, weight := a.desiredName(g.allTypes) + if weight == 0 { + // No name produced. We'll deal with it later + continue + } + if nws, ok := nwsMap[cn]; ok { + if ns, ok := nws[weight]; ok { + nws[weight] = append(ns, a) + } else { + nws[weight] = []*anonType{a} + } + } else { + nwsMap[cn] = map[int][]*anonType{weight: {a}} + keys = append(keys, cn) + } + } + + // Assign names to all entries that only have one weight + sort.Slice(keys, func(i, j int) bool { + return keys[i] < keys[j] + }) + for _, n := range keys { + nws := nwsMap[n] + var a *anonType + if len(nws) == 1 { + // Must use range to pick the one and only element + for _, ns := range nws { + // First entry wins the name in case of a weight conflict + a = ns[0] + break + } + } else { + // Find type with heaviest weight + maxWeight := 0 + for w, ns := range nws { + if w > maxWeight { + // First entry wins the name in case of a weight conflict + a = ns[0] + maxWeight = w + } + } + } + if a != nil { + a.n = n + g.allTypes[n] = a.t + stuffHappened = true + } + } + + if stuffHappened { + // Above code must be reiterated + continue + } + + for _, a := range as { + // Modify all entries by concatenating the two first entries in each + // path of the anonType. This is a one time operation on each entry + if a.n == `` && !a.cc { + allConcat := true + for _, p := range a.ps { + if len(p) < 2 { + allConcat = false + break + } + } + if allConcat { + for i, p := range a.ps { + a.ps[i] = append([]string{p[1] + p[0]}, p[2:]...) + } + a.cc = true + stuffHappened = true + } + } + } + + if stuffHappened { + // Above code must be reiterated + continue + } + + // As a last resort, try making a unique name from one of the paths. + for _, a := range as { + if a.n == `` { + if a.useFirstUniqueName(g.allTypes) { + stuffHappened = true + } + } + } + } + + sort.Slice(as, func(i, j int) bool { + a := as[i] + b := as[j] + if a.n != `` { + if b.n != `` { + return a.n < b.n + } + return true + } + if b.n != `` { + return false + } + return strings.Join(a.ps[0], `/`) < strings.Join(b.ps[0], `/`) + }) + + for _, a := range as { + if a.n == `` { + panic(fmt.Errorf(`unable to generate name for %s`, a)) + } + g.anonNames[a.n] = true + } +} + +func (g *goGenerator) findAnonymousTypes(t px.Type, p []string) { + switch t := t.(type) { + case px.TypeSet: + t.Types().EachPair(func(k, v px.Value) { + g.findAnonymousTypes(v.(px.Type), append(p, k.String())) + }) + case px.ObjectType: + n := t.Name() + if n != `` { + n := g.goTypeName(t) + if _, ok := g.allTypes[n]; ok { + break + } + g.allTypes[n] = t + } + for _, a := range t.AttributesInfo().Attributes() { + g.findAnonymousTypes(a.Type(), append(p, a.Name())) + } + if t.Name() == `` { + g.appendTypeWithPath(t, p) + } + case px.TypeWithContainedType: + g.findAnonymousTypes(t.ContainedType(), p) + case *types.ArrayType: + g.findAnonymousTypes(t.ElementType(), p) + case *types.HashType: + g.findAnonymousTypes(t.KeyType(), p) + g.findAnonymousTypes(t.ValueType(), p) + case *types.StructType: + for _, se := range t.Elements() { + g.findAnonymousTypes(se.Key(), p) + g.findAnonymousTypes(se.Value(), append(p, se.Name())) + g.appendTypeWithPath(t, p) + } + case *types.TupleType: + for _, vt := range t.Types() { + g.findAnonymousTypes(vt, p) + } + case *types.VariantType: + for _, vt := range t.Types() { + g.findAnonymousTypes(vt, p) + } + case *types.RegexpType: + g.includeRegexp = true + case *types.SemVerType: + g.includeSemver = true + case *types.SemVerRangeType: + g.includeSemver = true + case *types.TimestampType: + g.includeTime = true + case *types.TimespanType: + g.includeTime = true + } + // TODO: Include packages of Object types from other TypeSets +} + +func (g *goGenerator) generateType(name string, t px.Type, i int, b *bytes.Buffer) { + switch t := t.(type) { + case px.ObjectType: + g.generateObjectType(name, t, 0, b) + case *types.StructType: + g.generateStructType(name, t, 0, b) + default: + b.WriteString(`type `) + b.WriteString(name) + b.WriteString(` = `) + g.writeType(t, b) + } +} + +func (g *goGenerator) generateObjectType(name string, t px.ObjectType, indent int, b *bytes.Buffer) { + newLine(indent, b) + b.WriteString("type ") + b.WriteString(name) + b.WriteString(` struct {`) + indent++ + for _, a := range t.AttributesInfo().Attributes() { + newLine(indent, b) + n := a.Name() + if g.useCamelCase { + n = issue.SnakeToCamelCase(n) + } else { + n = strings.Title(n) + } + b.WriteString(n) + b.WriteByte(' ') + if a.HasValue() { + b.WriteByte('*') + } + g.writeRequiredType(a.Type(), b) + if g.useCamelCase && issue.FirstToLower(n) != a.Name() { + b.WriteString(" `puppet:\"name=>'") + b.WriteString(a.Name()) + b.WriteString("'\"`") + } + } + indent-- + newLine(indent, b) + b.WriteString(`}`) +} + +func (g *goGenerator) generateStructType(name string, t *types.StructType, indent int, b *bytes.Buffer) { + b.WriteString("\ntype ") + b.WriteString(name) + b.WriteString(` struct {`) + indent++ + for _, e := range t.Elements() { + newLine(indent, b) + b.WriteString(strings.Title(e.Name())) + b.WriteString(` `) + g.writeType(e.Value(), b) + } + indent-- + newLine(indent, b) + b.WriteString(`}`) +} + +func (g *goGenerator) writeAnonymousTypes(b *bytes.Buffer) { + for _, a := range g.anonTypes { + b.WriteByte('\n') + g.generateType(a.n, a.t, 0, b) + } +} + +func (g *goGenerator) writeInit(b *bytes.Buffer) { + indent := 0 + b.WriteString("\nfunc InitTypes(c px.Context) {") + indent++ + rts := make([]px.ObjectType, 0, len(g.allTypes)-len(g.anonNames)) + for k, t := range g.allTypes { + if _, ok := g.anonNames[k]; ok { + continue + } + if rt, ok := t.(px.ObjectType); ok { + rts = append(rts, rt) + } + } + sort.Slice(rts, func(i, j int) bool { + return rts[i].Name() < rts[j].Name() + }) + + newLine(indent, b) + b.WriteString(`load := func(n string) px.Type {`) + indent++ + newLine(indent, b) + b.WriteString(`if v, ok := px.Load(c, px.NewTypedName(px.NsType, n)); ok {`) + newLine(indent+1, b) + b.WriteString(`return v.(px.Type)`) + newLine(indent, b) + b.WriteByte('}') + newLine(indent, b) + b.WriteString(`panic(fmt.Errorf("unable to load Type '%s'", n))`) + indent-- + newLine(indent, b) + b.WriteString("}\n") + newLine(indent, b) + b.WriteString(`ir := c.ImplementationRegistry()`) + for _, rt := range rts { + newLine(indent, b) + b.WriteString(`ir.RegisterType(load("`) + b.WriteString(rt.Name()) + b.WriteString(`"), reflect.TypeOf(&`) + b.WriteString(g.goTypeName(rt)) + b.WriteString(`{}))`) + } + + indent-- + newLine(indent, b) + b.WriteString(`}`) +} + +func (g *goGenerator) writeType(t px.Type, b *bytes.Buffer) { + if ot, ok := t.(*types.OptionalType); ok { + b.WriteByte('*') + t = ot.ContainedType() + } + g.writeRequiredType(t, b) +} + +func (g *goGenerator) writeRequiredType(t px.Type, b *bytes.Buffer) { + switch t := t.(type) { + case *types.OptionalType: + g.writeRequiredType(t.ContainedType(), b) + case *types.ArrayType: + b.WriteString("[]") + g.writeType(t.ElementType(), b) + case *types.TupleType: + b.WriteString("[]") + g.writeType(t.CommonElementType(), b) + case *types.HashType: + b.WriteString("map[") + g.writeType(t.KeyType(), b) + b.WriteByte(']') + g.writeType(t.ValueType(), b) + case *types.StructType: + b.WriteString(g.anonymousName(t)) + case px.ObjectType: + n := t.Name() + if n == `` { + n = g.anonymousName(t) + } else { + n = g.goTypeName(t) + } + b.WriteString(n) + case *types.BooleanType: + b.WriteString("bool") + case *types.IntegerType: + b.WriteString("int64") + case *types.FloatType: + b.WriteString("float64") + case px.StringType, *types.EnumType, *types.PatternType: + b.WriteString("string") + case *types.RegexpType: + b.WriteString("regexp.Regexp") + case *types.SemVerType: + b.WriteString("semver.Version") + case *types.SemVerRangeType: + b.WriteString("semver.VersionRange") + case *types.TimestampType: + b.WriteString("time.Time") + case *types.TimespanType: + b.WriteString("time.Duration") + default: + panic(fmt.Errorf("don't know how to generate Go type from: %s", px.ToPrettyString(t))) + } +} + +// formatCode reformats the code as `go fmt` would +func (g *goGenerator) formatCode(code []byte) []byte { + src, err := format.Source(code) + if err != nil { + panic(fmt.Errorf("unexpected error running format.Source: %s", err.Error())) + } + return src +} diff --git a/vendor/github.com/lyraproj/servicesdk/lang/typegen/puppet.go b/vendor/github.com/lyraproj/servicesdk/lang/typegen/puppet.go new file mode 100644 index 0000000..5785287 --- /dev/null +++ b/vendor/github.com/lyraproj/servicesdk/lang/typegen/puppet.go @@ -0,0 +1,23 @@ +package typegen + +import ( + "io" + + "github.com/lyraproj/pcore/px" +) + +type puppetGenerator struct{} + +func (g *puppetGenerator) GenerateTypes(typeSet px.TypeSet, directory string) { + g.GenerateType(typeSet, directory) +} + +func (g *puppetGenerator) GenerateType(typ px.Type, directory string) { + typeToStream(typeFile(typ, directory, `.pp`), func(b io.Writer) { + write(b, "# this file is generated\ntype ") + write(b, typ.Name()) + write(b, " = ") + typ.ToString(b, px.PrettyExpanded, nil) + write(b, "\n") + }) +} diff --git a/vendor/github.com/lyraproj/servicesdk/lang/typegen/typescript.go b/vendor/github.com/lyraproj/servicesdk/lang/typegen/typescript.go new file mode 100644 index 0000000..4eaade5 --- /dev/null +++ b/vendor/github.com/lyraproj/servicesdk/lang/typegen/typescript.go @@ -0,0 +1,532 @@ +package typegen + +import ( + "bytes" + "io" + "sort" + "strconv" + "strings" + + "github.com/lyraproj/pcore/px" + "github.com/lyraproj/pcore/types" + "github.com/lyraproj/pcore/utils" +) + +type tsGeneratorFactory struct { +} + +type tsGenerator struct { + anonIfds map[string]string + ns []string + useIfds bool +} + +func (gf *tsGeneratorFactory) GenerateTypes(typeSet px.TypeSet, directory string) { + tss := make([]px.TypeSet, 0) + tts := make([]px.Type, 0) + typeSet.Types().EachValue(func(t px.Value) { + if ts, ok := t.(px.TypeSet); ok { + tss = append(tss, ts) + } else { + tts = append(tts, t.(px.Type)) + } + }) + + for _, ts := range tss { + gf.GenerateTypes(ts, directory) + } + + if len(tts) > 0 { + typeToStream(typeFile(typeSet, directory, `.ts`), func(b io.Writer) { + write(b, "// this file is generated\n") + write(b, "import {PcoreValue, Value} from 'lyra-workflow';\n") + g := &tsGenerator{make(map[string]string), strings.Split(typeSet.Name(), `::`), true} + for _, t := range tts { + g.generateType(t, 0, b) + } + g.writeAnonIfds(b) + }) + } +} + +func (gf *tsGeneratorFactory) GenerateType(typ px.Type, directory string) { + typeToStream(typeFile(typ, directory, `.ts`), func(b io.Writer) { + write(b, "// this file is generated\n") + write(b, "import {PcoreValue, Value} from 'lyra-workflow';\n") + g := &tsGenerator{make(map[string]string), namespace(typ.Name()), true} + g.generateType(typ, 0, b) + g.writeAnonIfds(b) + }) +} + +func (g *tsGenerator) writeAnonIfds(b io.Writer) { + ac := len(g.anonIfds) + if !g.useIfds || ac == 0 { + return + } + + names := make([]string, 0, ac) + rev := make(map[string]string, ac) + for sign, name := range g.anonIfds { + names = append(names, name) + rev[name] = sign + } + sort.Slice(names, func(i, j int) bool { + ii, _ := strconv.Atoi(names[i][4:]) + ij, _ := strconv.Atoi(names[j][4:]) + return ii < ij + }) + for _, name := range names { + write(b, "\ninterface ") + write(b, name) + write(b, ` `) + write(b, rev[name]) + } +} + +// GenerateType produces a TypeScript type for the given Type and appends it to +// the given buffer. +func (g *tsGenerator) generateType(t px.Type, indent int, bld io.Writer) { + if _, ok := t.(px.TypeSet); ok { + return + } + + if pt, ok := t.(px.ObjectType); ok { + newLine(indent, bld) + write(bld, `export class `) + write(bld, nsName(g.ns, pt.Name())) + if ppt, ok := pt.Parent().(px.ObjectType); ok { + write(bld, ` extends `) + write(bld, nsName(g.ns, ppt.Name())) + } else { + write(bld, ` implements PcoreValue`) + } + write(bld, ` {`) + indent += 2 + ai := pt.AttributesInfo() + allAttrs, thisAttrs, superAttrs := g.toTsAttrs(pt, ai.Attributes(), indent) + g.appendFields(thisAttrs, indent, bld) + if len(thisAttrs) > 0 { + writeByte(bld, '\n') + } + if len(allAttrs) > 0 { + g.appendConstructor(allAttrs, thisAttrs, superAttrs, indent, bld) + writeByte(bld, '\n') + } + hasSuper := len(superAttrs) > 0 + if len(thisAttrs) > 0 || !hasSuper { + g.appendPValueGetter(hasSuper, thisAttrs, indent, bld) + writeByte(bld, '\n') + } + g.appendPTypeGetter(pt.Name(), indent, bld) + indent -= 2 + newLine(indent, bld) + write(bld, "}\n") + } else { + g.appendTsType(t, indent, bld) + } +} + +// ToTsType converts the given pType to a string representation of a TypeScript type. The given +// pType can not be a TypeSet. +func (g *tsGenerator) ToTsType(pType px.Type) string { + return g.toTsType(pType, 0) +} + +func (g *tsGenerator) toTsType(pType px.Type, indent int) string { + bld := bytes.NewBufferString(``) + g.appendTsType(pType, indent, bld) + return bld.String() +} + +type tsAttribute struct { + tsName string + name string + typ string + value *string +} + +var keywords = map[string]bool{ + // The following keywords are reserved and cannot be used as an Identifier: + `arguments`: true, + `break`: true, + `case`: true, + `catch`: true, + `class`: true, + `const`: true, + `continue`: true, + `debugger`: true, + `default`: true, + `delete`: true, + `do`: true, + `else`: true, + `enum`: true, + `export`: true, + `extends`: true, + `false`: true, + `finally`: true, + `for`: true, + `function`: true, + `if`: true, + `import`: true, + `in`: true, + `instanceof`: true, + `new`: true, + `null`: true, + `return`: true, + `super`: true, + `switch`: true, + `this`: true, + `throw`: true, + `true`: true, + `try`: true, + `typeof`: true, + `var`: true, + `void`: true, + `while`: true, + `with`: true, + + // The following keywords cannot be used as identifiers in strict mode code, but are otherwise not restricted: + `implements`: true, + `interface`: true, + `let`: true, + `package`: true, + `private`: true, + `protected`: true, + `public`: true, + `static`: true, + `yield`: true, + + // The following keywords cannot be used as user defined type names, but are otherwise not restricted: + `any`: true, + `boolean`: true, + `number`: true, + `string`: true, + `symbol`: true, +} + +func (g *tsGenerator) toTsAttrs( + t px.ObjectType, attrs []px.Attribute, indent int) (allAttrs, thisAttrs, superAttrs []*tsAttribute) { + allAttrs = make([]*tsAttribute, len(attrs)) + superAttrs = make([]*tsAttribute, 0) + thisAttrs = make([]*tsAttribute, 0) + for i, attr := range attrs { + n := attr.Name() + tsn := n + if keywords[n] { + tsn = n + `_` + } + tsAttr := &tsAttribute{tsName: tsn, name: n, typ: g.toTsType(attr.Type(), indent)} + if attr.HasValue() { + tsAttr.value = g.toTsValue(attr.Value()) + } + if attr.Container() == t { + thisAttrs = append(thisAttrs, tsAttr) + } else { + superAttrs = append(superAttrs, tsAttr) + } + allAttrs[i] = tsAttr + } + return +} + +func (g *tsGenerator) appendFields(thisAttrs []*tsAttribute, indent int, bld io.Writer) { + for _, attr := range thisAttrs { + newLine(indent, bld) + write(bld, `readonly `) + write(bld, attr.tsName) + write(bld, `: `) + write(bld, attr.typ) + write(bld, `;`) + } +} + +func (g *tsGenerator) appendConstructor(allAttrs, thisAttrs, superAttrs []*tsAttribute, indent int, bld io.Writer) { + newLine(indent, bld) + write(bld, `constructor(`) + g.appendParameters(allAttrs, indent, bld) + write(bld, `) {`) + indent += 2 + if len(superAttrs) > 0 { + newLine(indent, bld) + write(bld, `super({`) + for i, attr := range superAttrs { + if i > 0 { + write(bld, `, `) + } + write(bld, attr.tsName) + write(bld, `: `) + write(bld, attr.tsName) + } + write(bld, `});`) + } + for _, attr := range thisAttrs { + newLine(indent, bld) + write(bld, `this.`) + write(bld, attr.tsName) + write(bld, ` = `) + write(bld, attr.tsName) + writeByte(bld, ';') + } + indent -= 2 + newLine(indent, bld) + writeByte(bld, '}') +} + +func (g *tsGenerator) appendPValueGetter(hasSuper bool, thisAttrs []*tsAttribute, indent int, bld io.Writer) { + newLine(indent, bld) + write(bld, `__pvalue(): {[s: string]: Value} {`) + indent += 2 + newLine(indent, bld) + if len(thisAttrs) == 0 { + if hasSuper { + write(bld, `return super.__pvalue();`) + } else { + write(bld, `return {};`) + } + } else { + if hasSuper { + write(bld, `const ih = super.__pvalue();`) + } else { + write(bld, `const ih: {[s: string]: Value} = {};`) + } + for _, attr := range thisAttrs { + newLine(indent, bld) + if attr.value != nil { + write(bld, `if (this.`) + write(bld, attr.tsName) + write(bld, ` !== `) + write(bld, *attr.value) + write(bld, `) {`) + indent += 2 + newLine(indent, bld) + } + write(bld, `ih['`) + write(bld, attr.name) + write(bld, `'] = this.`) + write(bld, attr.tsName) + write(bld, `;`) + if attr.value != nil { + indent -= 2 + newLine(indent, bld) + write(bld, `}`) + } + } + newLine(indent, bld) + write(bld, `return ih;`) + } + indent -= 2 + newLine(indent, bld) + writeByte(bld, '}') +} + +func (g *tsGenerator) appendPTypeGetter(name string, indent int, bld io.Writer) { + newLine(indent, bld) + write(bld, `__ptype(): string {`) + indent += 2 + newLine(indent, bld) + write(bld, `return '`) + write(bld, name) + write(bld, `';`) + indent -= 2 + newLine(indent, bld) + writeByte(bld, '}') +} + +func (g *tsGenerator) appendParameters(params []*tsAttribute, indent int, bld io.Writer) { + indent += 2 + write(bld, `{`) + last := len(params) - 1 + for i, attr := range params { + newLine(indent, bld) + write(bld, attr.tsName) + if attr.value != nil { + write(bld, ` = `) + write(bld, *attr.value) + } + if i < last { + write(bld, `,`) + } + } + indent -= 2 + newLine(indent, bld) + write(bld, `}: {`) + indent += 2 + + for i, attr := range params { + newLine(indent, bld) + write(bld, attr.tsName) + if attr.value != nil { + writeByte(bld, '?') + } + write(bld, `: `) + write(bld, attr.typ) + if i < last { + writeByte(bld, ',') + } + } + + indent -= 2 + newLine(indent, bld) + write(bld, `}`) +} + +func (g *tsGenerator) toTsValue(value px.Value) *string { + bld := bytes.NewBufferString(``) + g.appendTsValue(value, bld) + s := bld.String() + return &s +} + +func (g *tsGenerator) appendTsValue(value px.Value, bld io.Writer) { + switch value := value.(type) { + case *types.UndefValue: + write(bld, `null`) + case px.StringValue: + utils.PuppetQuote(bld, value.String()) + case px.Boolean, px.Integer, px.Float: + write(bld, value.String()) + case *types.Array: + writeByte(bld, '[') + value.EachWithIndex(func(e px.Value, i int) { + if i > 0 { + write(bld, `, `) + } + g.appendTsValue(e, bld) + }) + writeByte(bld, ']') + case *types.Hash: + writeByte(bld, '{') + value.EachWithIndex(func(e px.Value, i int) { + ev := e.(*types.HashEntry) + if i > 0 { + write(bld, `, `) + } + utils.PuppetQuote(bld, ev.Key().String()) + write(bld, `: `) + g.appendTsValue(ev.Value(), bld) + }) + writeByte(bld, '}') + } +} + +func (g *tsGenerator) appendTsType(pType px.Type, indent int, bld io.Writer) { + switch pType := pType.(type) { + case *types.BooleanType: + write(bld, `boolean`) + case *types.IntegerType, *types.FloatType: + write(bld, `number`) + case px.StringType: + write(bld, `string`) + case *types.OptionalType: + g.appendTsType(pType.ContainedType(), indent, bld) + write(bld, `|null`) + case *types.ArrayType: + et := pType.ElementType() + switch et.(type) { + case *types.ArrayType, *types.EnumType, *types.HashType, *types.OptionalType, *types.VariantType: + write(bld, `Array<`) + g.appendTsType(et, indent, bld) + write(bld, `>`) + default: + g.appendTsType(et, indent, bld) + write(bld, `[]`) + } + case *types.VariantType: + for i, v := range pType.Types() { + if i > 0 { + write(bld, `|`) + } + g.appendTsType(v, indent, bld) + } + case *types.HashType: + write(bld, `{[s: `) + g.appendTsType(pType.KeyType(), indent, bld) + write(bld, `]: `) + g.appendTsType(pType.ValueType(), indent, bld) + write(bld, `}`) + case *types.EnumType: + for i, s := range pType.Parameters() { + if i > 0 { + write(bld, `|`) + } + g.appendTsValue(s, bld) + } + case *types.TypeAliasType: + write(bld, nsName(g.ns, pType.Name())) + case px.ObjectType: + if pType.Name() == `` { + write(bld, g.makeAnonymousType(pType, indent)) + } else { + write(bld, nsName(g.ns, pType.Name())) + } + } +} + +func (g *tsGenerator) makeAnonymousType(t px.ObjectType, indent int) string { + bld := bytes.NewBufferString(``) + if g.useIfds { + indent = 0 + } + write(bld, `{`) + indent += 2 + allAttrs, _, _ := g.toTsAttrs(t, t.AttributesInfo().Attributes(), indent) + for i, a := range allAttrs { + if i > 0 { + write(bld, `,`) + } + newLine(indent, bld) + write(bld, a.tsName) + if a.value != nil { + write(bld, `?`) + } + write(bld, `: `) + write(bld, a.typ) + } + indent -= 2 + newLine(indent, bld) + write(bld, `}`) + sign := bld.String() + if !g.useIfds { + return sign + } + if prev, ok := g.anonIfds[sign]; ok { + return prev + } + n := `Anon` + strconv.Itoa(len(g.anonIfds)) + g.anonIfds[sign] = n + return n +} + +func newLine(indent int, bld io.Writer) { + writeByte(bld, '\n') + for n := 0; n < indent; n++ { + writeByte(bld, ' ') + } +} + +func namespace(name string) []string { + parts := strings.Split(name, `::`) + return parts[:len(parts)-1] +} + +func nsName(ns []string, name string) string { + parts := strings.Split(name, `::`) + if isParent(ns, parts) { + return strings.Join(parts[len(ns):], `.`) + } + return strings.Join(parts, `.`) +} + +func isParent(ns, n []string) bool { + top := len(ns) + if top < len(n) { + for idx := 0; idx < top; idx++ { + if n[idx] != ns[idx] { + return false + } + } + return true + } + return false +} diff --git a/vendor/github.com/lyraproj/servicesdk/service/builder.go b/vendor/github.com/lyraproj/servicesdk/service/builder.go new file mode 100644 index 0000000..2e4ce7f --- /dev/null +++ b/vendor/github.com/lyraproj/servicesdk/service/builder.go @@ -0,0 +1,453 @@ +package service + +import ( + "reflect" + "sort" + "strings" + + "github.com/lyraproj/issue/issue" + "github.com/lyraproj/pcore/px" + "github.com/lyraproj/pcore/types" + "github.com/lyraproj/servicesdk/serviceapi" + "github.com/lyraproj/servicesdk/wf" +) + +type Builder struct { + ctx px.Context + serviceId px.TypedName + stateConverter wf.StateConverter + types map[string]px.Type + handlerFor map[string]px.Type + steps map[string]serviceapi.Definition + callables map[string]reflect.Value + actionApis map[string]bool + states map[string]wf.State + callableObjects map[string]px.PuppetObject +} + +func NewServiceBuilder(ctx px.Context, serviceName string) *Builder { + return &Builder{ + ctx: ctx, + serviceId: px.NewTypedName(px.NsService, assertTypeName(serviceName)), + callables: make(map[string]reflect.Value), + callableObjects: make(map[string]px.PuppetObject), + handlerFor: make(map[string]px.Type), + steps: make(map[string]serviceapi.Definition), + types: make(map[string]px.Type), + actionApis: make(map[string]bool), + states: make(map[string]wf.State)} +} + +func assertTypeName(name string) string { + if types.TypeNamePattern.MatchString(name) { + return name + } + panic(px.Error(IllegalTypeName, issue.H{`name`: name})) +} + +func (ds *Builder) RegisterStateConverter(sf wf.StateConverter) { + ds.stateConverter = sf +} + +// RegisterAPI registers a struct as an invokable. The callable instance given as the argument becomes the +// actual receiver the calls. +func (ds *Builder) RegisterAPI(name string, callable interface{}) { + name = assertTypeName(name) + if po, ok := callable.(px.PuppetObject); ok { + ds.callableObjects[name] = po + } else { + rv := reflect.ValueOf(callable) + rt := rv.Type() + _, ok := ds.ctx.ImplementationRegistry().ReflectedToType(rt) + if !ok { + pt := ds.ctx.Reflector().TypeFromReflect(name, nil, rt) + ds.registerType(name, pt) + } + ds.registerCallable(name, rv) + } +} + +// RegisterAPIType registers a the type of a struct as an invokable type. The struct should be a zero +// value. This method must be used to ensure that all type info is present for callable instances added to an +// already created service +func (ds *Builder) RegisterApiType(name string, callable interface{}) { + name = assertTypeName(name) + rv := reflect.ValueOf(callable) + rt := rv.Type() + pt, ok := ds.ctx.ImplementationRegistry().ReflectedToType(rt) + if !ok { + pt = ds.ctx.Reflector().TypeFromReflect(name, nil, rt) + } + if _, ok := ds.types[name]; !ok { + ds.registerType(name, pt) + } +} + +// RegisterState registers the unresolved state of a resource. +func (ds *Builder) RegisterState(name string, state wf.State) { + ds.states[name] = state +} + +func (ds *Builder) BuildResource(goType interface{}, bld func(f ResourceTypeBuilder)) px.AnnotatedType { + rb := &rtBuilder{ctx: ds.ctx} + bld(rb) + return rb.Build(goType) +} + +// RegisterHandler registers a callable struct as an invokable capable of handling a state described using +// px.Type. The callable instance given as the argument becomes the actual receiver the calls. +func (ds *Builder) RegisterHandler(name string, callable interface{}, stateType px.Type) { + ds.RegisterAPI(name, callable) + ds.types[stateType.Name()] = stateType + ds.handlerFor[name] = stateType +} + +// RegisterTypes registers arbitrary Go types to the TypeSet exported by this service. +// +// A value is typically a pointer to the zero value of a struct. The name of the generated type for +// that struct will be the struct name prefixed by the service ID. +func (ds *Builder) RegisterTypes(namespace string, values ...interface{}) []px.Type { + ts := make([]px.Type, len(values)) + for i, v := range values { + switch v := v.(type) { + case px.Type: + ds.types[v.Name()] = v + ts[i] = v + case px.AnnotatedType: + ts[i] = ds.registerReflectedType(namespace, v) + case reflect.Type: + ts[i] = ds.registerReflectedType(namespace, px.NewTaggedType(v, nil)) + case reflect.Value: + ts[i] = ds.registerReflectedType(namespace, px.NewTaggedType(v.Type(), nil)) + default: + ts[i] = ds.registerReflectedType(namespace, px.NewTaggedType(reflect.TypeOf(v), nil)) + } + } + return ts +} + +func (ds *Builder) registerReflectedType(namespace string, tg px.AnnotatedType) px.Type { + typ := tg.Type() + if typ.Kind() == reflect.Ptr { + el := typ.Elem() + if el.Kind() != reflect.Interface { + typ = el + } + } + + parent := types.ParentType(typ) + var pt px.Type + if parent != nil { + pt = ds.registerReflectedType(namespace, px.NewTaggedType(parent, nil)) + } + + name := namespace + `::` + typ.Name() + et, ok := ds.types[name] + if ok { + // Type is already registered + return et + } + + var registerFieldType func(ft reflect.Type) + registerFieldType = func(ft reflect.Type) { + switch ft.Kind() { + case reflect.Slice, reflect.Ptr, reflect.Array, reflect.Map: + registerFieldType(ft.Elem()) + case reflect.Struct: + if ft == parent { + break + } + // Register type unless it's already registered + if _, err := px.WrapReflectedType(ds.ctx, ft); err != nil { + ds.registerReflectedType(namespace, px.NewAnnotatedType(ft, nil, nil)) + } + } + } + + et = ds.ctx.Reflector().TypeFromTagged(name, pt, tg, func() { + // Register nested types unless already known to the implementation registry + nf := typ.NumField() + for i := 0; i < nf; i++ { + f := typ.Field(i) + if f.PkgPath == `` { + // Exported + registerFieldType(f.Type) + } + } + }) + ds.types[name] = et + return et +} + +// RegisterStep registers an step +func (ds *Builder) RegisterStep(step wf.Step) { + name := step.Name() + if _, found := ds.steps[name]; found { + panic(px.Error(AlreadyRegistered, issue.H{`namespace`: px.NsDefinition, `identifier`: name})) + } + ds.steps[name] = ds.createStepDefinition(step) +} + +func (ds *Builder) registerCallable(name string, callable reflect.Value) { + if _, found := ds.callables[name]; found { + panic(px.Error(AlreadyRegistered, issue.H{`namespace`: px.NsInterface, `identifier`: name})) + } + ds.callables[name] = callable +} + +func (ds *Builder) RegisterType(typ px.Type) { + ds.registerType(typ.Name(), typ) +} + +func (ds *Builder) registerType(name string, typ px.Type) { + if _, found := ds.types[name]; found { + panic(px.Error(AlreadyRegistered, issue.H{`namespace`: px.NsType, `identifier`: name})) + } + ds.types[name] = typ +} + +func (ds *Builder) createStepDefinition(step wf.Step) serviceapi.Definition { + props := make([]*types.HashEntry, 0, 5) + + if parameters := paramsAsList(step.Parameters()); parameters != nil { + props = append(props, types.WrapHashEntry2(`parameters`, parameters)) + } + if returns := paramsAsList(step.Returns()); returns != nil { + props = append(props, types.WrapHashEntry2(`returns`, returns)) + } + if step.When() != wf.Always { + props = append(props, types.WrapHashEntry2(`when`, types.WrapString(step.When().String()))) + } + + name := step.Name() + var style string + switch step := step.(type) { + case wf.Workflow: + style = `workflow` + props = append(props, types.WrapHashEntry2(`steps`, ds.stepsAsList(step.Steps()))) + case wf.Resource: + style = `resource` + state := step.State() + extId := step.ExternalId() + ds.RegisterState(name, state) + props = append(props, types.WrapHashEntry2(`resourceType`, state.Type())) + if extId != `` { + props = append(props, types.WrapHashEntry2(`externalId`, types.WrapString(extId))) + } + case wf.StateHandler: + style = `stateHandler` + tn := strings.Title(name) + api := step.Interface() + ds.RegisterAPI(tn, api) + var ifd px.Type + if po, ok := api.(px.PuppetObject); ok { + ifd = po.PType() + } else { + ifd = ds.types[tn] + } + props = append(props, types.WrapHashEntry2(`interface`, ifd)) + case wf.Action: + style = `action` + tn := strings.Title(name) + api := step.Function() + ds.RegisterAPI(tn, api) + ds.actionApis[tn] = true + var ifd px.Type + if po, ok := api.(px.PuppetObject); ok { + ifd = po.PType() + } else { + ifd, ok = ds.types[tn] + if !ok { + ifd, _ = ds.ctx.ImplementationRegistry().ReflectedToType(reflect.TypeOf(api)) + } + } + props = append(props, types.WrapHashEntry2(`interface`, ifd)) + case wf.Iterator: + style = `iterator` + props = append(props, types.WrapHashEntry2(`iterationStyle`, types.WrapString(step.IterationStyle().String()))) + props = append(props, types.WrapHashEntry2(`over`, step.Over())) + vars := step.Variables() + if len(vars) > 0 { + props = append(props, types.WrapHashEntry2(`variables`, paramsAsList(vars))) + } + if step.Into() != `` { + props = append(props, types.WrapHashEntry2(`into`, types.WrapString(step.Into()))) + } + props = append(props, types.WrapHashEntry2(`producer`, ds.createStepDefinition(step.Producer()))) + case wf.Reference: + style = `reference` + props = append(props, types.WrapHashEntry2(`reference`, types.WrapString(step.Reference()))) + } + props = append(props, types.WrapHashEntry2(`style`, types.WrapString(style))) + return serviceapi.NewDefinition(px.NewTypedName(px.NsDefinition, name), ds.serviceId, types.WrapHash(props)) +} + +func paramsAsList(params []px.Parameter) px.List { + np := len(params) + if np == 0 { + return nil + } + ps := make([]px.Value, np) + for i, p := range params { + ps[i] = p + } + return types.WrapValues(ps) +} + +func (ds *Builder) stepsAsList(steps []wf.Step) px.List { + as := make([]px.Value, len(steps)) + for i, a := range steps { + as[i] = ds.createStepDefinition(a) + } + return types.WrapValues(as) +} + +func CreateTypeSet(ts map[string]px.Type) px.TypeSet { + result := make(map[string]interface{}) + for k, t := range ts { + addName(strings.Split(k, `::`), result, t) + } + + if len(result) != 1 { + panic(px.Error(NoCommonNamespace, issue.NoArgs)) + } + +next: + for { + // If the value below is a map of size 1, then move that map up + for k, v := range result { + if sm, ok := v.(map[string]interface{}); ok && len(sm) == 1 { + delete(result, k) + for sk, sv := range sm { + result[k+`::`+sk] = sv + } + continue next + } + break next + } + } + t := makeType(``, result) + if ts, ok := t.(px.TypeSet); ok { + return ts + } + + sgs := strings.Split(t.Name(), `::`) + tsn := strings.Join(sgs[:len(sgs)-1], `::`) + tn := sgs[len(sgs)-1] + es := make([]*types.HashEntry, 0) + es = append(es, types.WrapHashEntry2(px.KeyPcoreUri, types.WrapString(string(px.PcoreUri)))) + es = append(es, types.WrapHashEntry2(px.KeyPcoreVersion, types.WrapSemVer(px.PcoreVersion))) + es = append(es, types.WrapHashEntry2(types.KeyVersion, types.WrapSemVer(ServerVersion))) + es = append(es, types.WrapHashEntry2(types.KeyTypes, px.SingletonMap(tn, t))) + return types.NewTypeSet(px.RuntimeNameAuthority, tsn, types.WrapHash(es)) +} + +func makeType(name string, tree map[string]interface{}) px.Type { + rl := len(tree) + ts := make(map[string]px.Type, rl) + for k, v := range tree { + var t px.Type + if x, ok := v.(px.Type); ok { + t = x + } else { + var tn string + if name == `` { + tn = k + } else { + tn = name + `::` + k + } + t = makeType(tn, v.(map[string]interface{})) + } + if rl == 1 { + return t + } + ts[k] = t + } + es := make([]*types.HashEntry, 0) + es = append(es, types.WrapHashEntry2(px.KeyPcoreUri, types.WrapString(string(px.PcoreUri)))) + es = append(es, types.WrapHashEntry2(px.KeyPcoreVersion, types.WrapSemVer(px.PcoreVersion))) + es = append(es, types.WrapHashEntry2(types.KeyVersion, types.WrapSemVer(ServerVersion))) + es = append(es, types.WrapHashEntry2(types.KeyTypes, types.WrapStringToTypeMap(ts))) + return types.NewTypeSet(px.RuntimeNameAuthority, name, types.WrapHash(es)) +} + +func addName(ks []string, tree map[string]interface{}, t px.Type) { + kl := len(ks) + k0 := ks[0] + if sn, ok := tree[k0]; ok { + if sm, ok := sn.(map[string]interface{}); ok { + if kl > 1 { + addName(ks[1:], sm, t) + return + } + } + panic(`type/typeset clash`) + } + if kl > 1 { + sm := make(map[string]interface{}) + tree[k0] = sm + addName(ks[1:], sm, t) + } else { + tree[k0] = t + } +} + +func (ds *Builder) Server() *Server { + var ts px.TypeSet + if len(ds.types) > 0 { + ts = CreateTypeSet(ds.types) + px.AddTypes(ds.ctx, ts) + } + + defs := make([]px.Value, 0, len(ds.callables)+len(ds.steps)) + + callableStyle := types.WrapString(`callable`) + // Create invokable definitions for callable handlers + for k, v := range ds.callables { + if _, ok := ds.actionApis[k]; ok { + continue + } + props := make([]*types.HashEntry, 0, 2) + if pt, ok := ds.ctx.ImplementationRegistry().ReflectedToType(v.Type()); ok { + props = append(props, types.WrapHashEntry2(`interface`, pt)) + } + + props = append(props, types.WrapHashEntry2(`style`, callableStyle)) + if stateType, ok := ds.handlerFor[k]; ok { + props = append(props, types.WrapHashEntry2(`handlerFor`, stateType)) + } + defs = append(defs, serviceapi.NewDefinition(px.NewTypedName(px.NsDefinition, k), ds.serviceId, types.WrapHash(props))) + } + + for k, po := range ds.callableObjects { + if _, ok := ds.actionApis[k]; ok { + continue + } + props := make([]*types.HashEntry, 0, 2) + props = append(props, types.WrapHashEntry2(`interface`, po.PType())) + props = append(props, types.WrapHashEntry2(`style`, callableStyle)) + if stateType, ok := ds.handlerFor[k]; ok { + props = append(props, types.WrapHashEntry2(`handlerFor`, stateType)) + } + defs = append(defs, serviceapi.NewDefinition(px.NewTypedName(px.NsDefinition, k), ds.serviceId, types.WrapHash(props))) + } + + // Add registered steps + for _, a := range ds.steps { + defs = append(defs, a) + } + sort.Slice(defs, func(i, j int) bool { + return defs[i].(serviceapi.Definition).Identifier().Name() < defs[j].(serviceapi.Definition).Identifier().Name() + }) + + callables := make(map[string]px.Value, len(ds.callables)+len(ds.callableObjects)) + for k, v := range ds.callables { + callables[k] = px.WrapReflected(ds.ctx, v) + } + + for k, po := range ds.callableObjects { + callables[k] = po + } + + return &Server{context: ds.ctx, id: ds.serviceId, typeSet: ts, metadata: types.WrapValues(defs), stateConverter: ds.stateConverter, callables: callables, states: ds.states} +} diff --git a/vendor/github.com/lyraproj/servicesdk/service/definition.go b/vendor/github.com/lyraproj/servicesdk/service/definition.go new file mode 100644 index 0000000..7fa222e --- /dev/null +++ b/vendor/github.com/lyraproj/servicesdk/service/definition.go @@ -0,0 +1,103 @@ +package service + +import ( + "fmt" + "io" + "reflect" + + "github.com/lyraproj/pcore/px" + "github.com/lyraproj/pcore/types" + "github.com/lyraproj/servicesdk/serviceapi" +) + +func init() { + serviceapi.DefinitionMetaType = px.NewGoObjectType(`Service::Definition`, reflect.TypeOf((*serviceapi.Definition)(nil)).Elem(), `{ + attributes => { + identifier => TypedName, + serviceId => TypedName, + properties => Hash[String,RichData] + } + }`, + + func(ctx px.Context, args []px.Value) px.Value { + identifier := args[0].(px.TypedName) + serviceId := args[1].(px.TypedName) + properties := args[2].(px.OrderedMap) + return newDefinition(identifier, serviceId, properties) + }, + + func(ctx px.Context, args []px.Value) px.Value { + h := args[0].(*types.Hash) + identifier := h.Get5(`identifier`, px.Undef).(px.TypedName) + serviceId := h.Get5(`serviceId`, px.Undef).(px.TypedName) + properties := h.Get5(`properties`, px.EmptyMap).(px.OrderedMap) + return newDefinition(identifier, serviceId, properties) + }) + + serviceapi.NewDefinition = newDefinition +} + +func newDefinition(identifier, serviceId px.TypedName, properties px.OrderedMap) serviceapi.Definition { + return &definition{identifier, serviceId, properties} +} + +type definition struct { + identifier px.TypedName + serviceId px.TypedName + properties px.OrderedMap +} + +func (d *definition) Label() string { + return fmt.Sprintf(`%s/%s`, d.serviceId.Name(), d.identifier.Name()) +} + +func (d *definition) Get(key string) (value px.Value, ok bool) { + switch key { + case `identifier`: + return d.identifier, true + case `serviceId`: + return d.serviceId, true + case `properties`: + return d.properties, true + } + return nil, false +} + +func (d *definition) InitHash() px.OrderedMap { + es := make([]*types.HashEntry, 0, 3) + es = append(es, types.WrapHashEntry2(`identifier`, d.identifier)) + es = append(es, types.WrapHashEntry2(`serviceId`, d.serviceId)) + es = append(es, types.WrapHashEntry2(`properties`, d.properties)) + return types.WrapHash(es) +} + +func (d *definition) Equals(other interface{}, g px.Guard) bool { + if o, ok := other.(*definition); ok { + return d.identifier.Equals(o.identifier, g) && d.serviceId.Equals(o.serviceId, g) && d.properties.Equals(o.properties, g) + } + return false +} + +func (d *definition) Identifier() px.TypedName { + return d.identifier +} + +func (d *definition) ServiceId() px.TypedName { + return d.serviceId +} + +func (d *definition) Properties() px.OrderedMap { + return d.properties +} + +func (d *definition) String() string { + return px.ToString(d) +} + +func (d *definition) ToString(bld io.Writer, format px.FormatContext, g px.RDetect) { + types.ObjectToString(d, format, bld, g) +} + +func (d *definition) PType() px.Type { + return serviceapi.DefinitionMetaType +} diff --git a/vendor/github.com/lyraproj/servicesdk/service/error.go b/vendor/github.com/lyraproj/servicesdk/service/error.go new file mode 100644 index 0000000..01317b3 --- /dev/null +++ b/vendor/github.com/lyraproj/servicesdk/service/error.go @@ -0,0 +1,191 @@ +package service + +import ( + "io" + "reflect" + + "github.com/lyraproj/issue/issue" + "github.com/lyraproj/pcore/px" + "github.com/lyraproj/pcore/types" + "github.com/lyraproj/servicesdk/serviceapi" +) + +var ErrorMetaType px.ObjectType + +func init() { + ErrorMetaType = px.NewGoObjectType(`Error`, reflect.TypeOf((*serviceapi.ErrorObject)(nil)).Elem(), `{ + type_parameters => { + kind => Optional[Variant[String,Regexp,Type[Enum],Type[Pattern],Type[NotUndef],Type[Undef]]], + issue_code => Optional[Variant[String,Regexp,Type[Enum],Type[Pattern],Type[NotUndef],Type[Undef]]] + }, + attributes => { + message => String[1], + kind => { type => Optional[String[1]], value => undef }, + issue_code => { type => Optional[String[1]], value => undef }, + partial_result => { type => Data, value => undef }, + details => { type => Optional[Hash[String[1],Data]], value => undef }, + }}`, + func(ctx px.Context, args []px.Value) px.Value { + return newError2(ctx, args...) + }, + func(ctx px.Context, args []px.Value) px.Value { + return newErrorFromHash(ctx, args[0].(px.OrderedMap)) + }) + + serviceapi.NewError = newError + serviceapi.ErrorFromReported = errorFromReported +} + +type errorObj struct { + typ px.Type + message string + kind string + issueCode string + partialResult px.Value + details px.OrderedMap +} + +func newError2(c px.Context, args ...px.Value) serviceapi.ErrorObject { + argc := len(args) + ev := &errorObj{partialResult: px.Undef, details: px.EmptyMap} + ev.message = args[0].String() + if argc > 1 { + ev.kind = args[1].String() + if argc > 2 { + ev.issueCode = args[2].String() + if argc > 3 { + ev.partialResult = args[3] + if argc > 4 { + ev.details = args[4].(px.OrderedMap) + } + } + } + } + ev.initType(c) + return ev +} + +func newError(c px.Context, message, kind, issueCode string, partialResult px.Value, details px.OrderedMap) serviceapi.ErrorObject { + if partialResult == nil { + partialResult = px.Undef + } + if details == nil { + details = px.EmptyMap + } + ev := &errorObj{message: message, kind: kind, issueCode: issueCode, partialResult: partialResult, details: details} + ev.initType(c) + return ev +} + +func errorFromReported(c px.Context, err issue.Reported) serviceapi.ErrorObject { + ev := &errorObj{partialResult: px.Undef, details: px.EmptyMap} + ev.message = err.Error() + ev.kind = `PUPPET_ERROR` + ev.issueCode = string(err.Code()) + if loc := err.Location(); loc != nil { + ev.details = px.SingletonMap(`location`, types.WrapString(issue.LocationString(loc))) + } + ev.initType(c) + return ev +} + +func newErrorFromHash(c px.Context, hash px.OrderedMap) serviceapi.ErrorObject { + ev := &errorObj{} + ev.message = hash.Get5(`message`, px.EmptyString).String() + ev.kind = hash.Get5(`kind`, px.EmptyString).String() + ev.issueCode = hash.Get5(`issue_code`, px.EmptyString).String() + ev.partialResult = hash.Get5(`partial_result`, px.Undef) + ev.details = hash.Get5(`details`, px.EmptyMap).(px.OrderedMap) + ev.initType(c) + return ev +} + +func (e *errorObj) Details() px.OrderedMap { + return e.details +} + +func (e *errorObj) IssueCode() string { + return e.issueCode +} + +func (e *errorObj) Kind() string { + return e.kind +} + +func (e *errorObj) Message() string { + return e.message +} + +func (e *errorObj) PartialResult() px.Value { + return e.partialResult +} + +func (e *errorObj) String() string { + return px.ToString(e) +} + +func (e *errorObj) Equals(other interface{}, guard px.Guard) bool { + if o, ok := other.(*errorObj); ok { + return e.message == o.message && e.kind == o.kind && e.issueCode == o.issueCode && + px.Equals(e.partialResult, o.partialResult, guard) && + px.Equals(e.details, o.details, guard) + } + return false +} + +func (e *errorObj) ToString(b io.Writer, s px.FormatContext, g px.RDetect) { + types.ObjectToString(e, s, b, g) +} + +func (e *errorObj) PType() px.Type { + return e.typ +} + +func (e *errorObj) Get(key string) (value px.Value, ok bool) { + switch key { + case `message`: + return types.WrapString(e.message), true + case `kind`: + return types.WrapString(e.kind), true + case `issue_code`: + return types.WrapString(e.issueCode), true + case `partial_result`: + return e.partialResult, true + case `details`: + return e.details, true + default: + return nil, false + } +} + +func (e *errorObj) InitHash() px.OrderedMap { + entries := []*types.HashEntry{types.WrapHashEntry2(`message`, types.WrapString(e.message))} + if e.kind != `` { + entries = append(entries, types.WrapHashEntry2(`kind`, types.WrapString(e.kind))) + } + if e.issueCode != `` { + entries = append(entries, types.WrapHashEntry2(`issue_code`, types.WrapString(e.issueCode))) + } + if !e.partialResult.Equals(px.Undef, nil) { + entries = append(entries, types.WrapHashEntry2(`partial_result`, e.partialResult)) + } + if !e.details.Equals(px.EmptyMap, nil) { + entries = append(entries, types.WrapHashEntry2(`details`, e.details)) + } + return types.WrapHash(entries) +} + +func (e *errorObj) initType(c px.Context) { + if e.kind == `` && e.issueCode == `` { + e.typ = ErrorMetaType + } else { + params := make([]*types.HashEntry, 0) + if e.kind != `` { + params = append(params, types.WrapHashEntry2(`kind`, types.WrapString(e.kind))) + } + if e.issueCode != `` { + params = append(params, types.WrapHashEntry2(`issue_code`, types.WrapString(e.issueCode))) + } + e.typ = types.NewObjectTypeExtension(c, ErrorMetaType, []px.Value{types.WrapHash(params)}) + } +} diff --git a/vendor/github.com/lyraproj/servicesdk/service/issues.go b/vendor/github.com/lyraproj/servicesdk/service/issues.go new file mode 100644 index 0000000..4d0345b --- /dev/null +++ b/vendor/github.com/lyraproj/servicesdk/service/issues.go @@ -0,0 +1,33 @@ +package service + +import "github.com/lyraproj/issue/issue" + +const ( + AlreadyRegistered = `WF_ALREADY_REGISTERED` + ApiTypeNotRegistered = `WF_API_TYPE_NOT_REGISTERED` + IllegalTypeName = `WF_ILLEGAL_TYPE_NAME` + NoCommonNamespace = `WF_NO_COMMON_NAMESPACE` + NoSuchApi = `WF_NO_SUCH_API` + NoSuchMethod = `WF_NO_SUCH_METHOD` + NoSuchState = `WF_NO_SUCH_STATE` + NotFound = `WF_NOT_FOUND` + NotFunc = `WF_NOT_FUNC` + NotPuppetObject = `WF_NOT_PUPPET_OBJECT` + NoStateConverter = `WF_NO_STATE_CONVERTER` + TypeNameClash = `WF_TYPE_NAME_CLASH` +) + +func init() { + issue.Hard(AlreadyRegistered, `the %{namespace} %{identifier} API has already been registered`) + issue.Hard(ApiTypeNotRegistered, `the Go type %{type} has not been registered as an API type`) + issue.Hard(IllegalTypeName, `name must be segments starting with an uppercase letter joined with'::'. Got: '%{name}'`) + issue.Hard(NoCommonNamespace, `registered types share no common namespace`) + issue.Hard(NoSuchApi, `the '%{api}' API does not exist`) + issue.Hard(NoSuchMethod, `the '%{api}' API does not have a method named %{method}`) + issue.Hard(NoSuchState, `state '%{name}' not found`) + issue.Hard(NoStateConverter, `no state converter has been registered`) + issue.Hard(NotFound, `%{typeName} resource with external id '%{extId}' does not exist`) + issue.Hard(NotFunc, `attempt to register a function '%{name}' as a %{type}. Expected a func'`) + issue.Hard(NotPuppetObject, `expected resource to produce an Object, got '%{actual}'`) + issue.Hard(TypeNameClash, `attempt to register '%{goType}' using both '%{oldType}' and '%{newType}'`) +} diff --git a/vendor/github.com/lyraproj/servicesdk/service/loader.go b/vendor/github.com/lyraproj/servicesdk/service/loader.go new file mode 100644 index 0000000..b9ebb59 --- /dev/null +++ b/vendor/github.com/lyraproj/servicesdk/service/loader.go @@ -0,0 +1,36 @@ +package service + +import ( + "fmt" + "os" + "path/filepath" + + "github.com/lyraproj/pcore/px" +) + +var defaultWorkflowsPath []string + +func init() { + executable, err := os.Executable() + if err != nil { + panic(fmt.Errorf("failed to determine the path of the executable: %s", err.Error())) + } + executable, err = filepath.EvalSymlinks(executable) + if err != nil { + panic(fmt.Errorf("failed to eval symlinks on the executable: %s %s", executable, err.Error())) + } + executableParentDir := filepath.Dir(filepath.Dir(executable)) + // Load workflows from: + // - WORKING_DIR/workflows + // - EXECUTABLE_DIR/../workflows (to support brew and running build\lyra irrespective of working dir) + defaultWorkflowsPath = []string{".", executableParentDir} +} + +// New creates a new federated loader instance +func FederatedLoader(parentLoader px.Loader) px.Loader { + var loaders []px.ModuleLoader + for _, workflowsPathElement := range defaultWorkflowsPath { + loaders = append(loaders, px.NewFileBasedLoader(parentLoader, workflowsPathElement, "", px.PuppetDataTypePath)) + } + return px.NewDependencyLoader(loaders) +} diff --git a/vendor/github.com/lyraproj/servicesdk/service/notfound.go b/vendor/github.com/lyraproj/servicesdk/service/notfound.go new file mode 100644 index 0000000..ca1f1fc --- /dev/null +++ b/vendor/github.com/lyraproj/servicesdk/service/notfound.go @@ -0,0 +1,13 @@ +package service + +import ( + "github.com/lyraproj/issue/issue" + "github.com/lyraproj/pcore/px" + "github.com/lyraproj/servicesdk/serviceapi" +) + +func init() { + serviceapi.NotFound = func(typeName, extId string) error { + return px.Error(NotFound, issue.H{`typeName`: typeName, `extId`: extId}) + } +} diff --git a/vendor/github.com/lyraproj/servicesdk/service/resourcetypebuilder.go b/vendor/github.com/lyraproj/servicesdk/service/resourcetypebuilder.go new file mode 100644 index 0000000..f16dfbb --- /dev/null +++ b/vendor/github.com/lyraproj/servicesdk/service/resourcetypebuilder.go @@ -0,0 +1,95 @@ +package service + +import ( + "reflect" + + "github.com/lyraproj/pcore/px" + "github.com/lyraproj/pcore/types" + "github.com/lyraproj/servicesdk/annotation" +) + +type ResourceTypeBuilder interface { + AddRelationship(name, to, kind, cardinality, reverseName string, keys []string) + ImmutableAttributes(names ...string) + ProvidedAttributes(names ...string) + Tags(tags map[string]string) + Build(goType interface{}) px.AnnotatedType +} + +type rtBuilder struct { + ctx px.Context + relationships []*types.HashEntry + immutableAttrs []string + providedAttrs []string + tags map[string]string +} + +func (rb *rtBuilder) AddRelationship(name, to, kind, cardinality, reverseName string, keys []string) { + ln := 4 + if reverseName != `` { + ln++ + } + es := make([]*types.HashEntry, ln) + es[0] = types.WrapHashEntry2(`type`, types.NewTypeReferenceType(to)) + es[1] = types.WrapHashEntry2(`kind`, types.WrapString(kind)) + es[2] = types.WrapHashEntry2(`cardinality`, types.WrapString(cardinality)) + es[3] = types.WrapHashEntry2(`keys`, types.WrapStrings(keys)) + if reverseName != `` { + es[4] = types.WrapHashEntry2(`reverseName`, types.WrapString(reverseName)) + } + rb.relationships = append(rb.relationships, types.WrapHashEntry2(name, types.WrapHash(es))) +} + +func (rb *rtBuilder) ImmutableAttributes(names ...string) { + if rb.immutableAttrs == nil { + rb.immutableAttrs = names + } else { + rb.immutableAttrs = append(rb.immutableAttrs, names...) + } +} + +func (rb *rtBuilder) ProvidedAttributes(names ...string) { + if rb.providedAttrs == nil { + rb.providedAttrs = names + } else { + rb.providedAttrs = append(rb.providedAttrs, names...) + } +} + +func (rb *rtBuilder) Tags(tags map[string]string) { + if rb.tags == nil { + rb.tags = tags + } else { + for k, v := range tags { + rb.tags[k] = v + } + } +} + +func (rb *rtBuilder) Build(goType interface{}) px.AnnotatedType { + var rt reflect.Type + switch goType := goType.(type) { + case reflect.Type: + rt = goType + case reflect.Value: + rt = goType.Type() + default: + rt = reflect.TypeOf(goType) + } + + annotations := px.EmptyMap + if rb.immutableAttrs != nil || rb.providedAttrs != nil || rb.relationships != nil { + as := make([]*types.HashEntry, 0, 3) + if rb.immutableAttrs != nil { + as = append(as, types.WrapHashEntry2(`immutableAttributes`, types.WrapStrings(rb.immutableAttrs))) + } + if rb.providedAttrs != nil { + as = append(as, types.WrapHashEntry2(`providedAttributes`, types.WrapStrings(rb.providedAttrs))) + } + if rb.relationships != nil { + as = append(as, types.WrapHashEntry2(`relationships`, types.WrapHash(rb.relationships))) + } + annotations = types.WrapHash([]*types.HashEntry{types.WrapHashEntry(annotation.ResourceType, types.WrapHash(as))}) + } + return px.NewAnnotatedType(rt, rb.tags, annotations) +} diff --git a/vendor/github.com/lyraproj/servicesdk/service/server.go b/vendor/github.com/lyraproj/servicesdk/service/server.go new file mode 100644 index 0000000..04c9fea --- /dev/null +++ b/vendor/github.com/lyraproj/servicesdk/service/server.go @@ -0,0 +1,111 @@ +package service + +import ( + "reflect" + "strings" + "sync" + + "github.com/hashicorp/go-hclog" + "github.com/lyraproj/issue/issue" + "github.com/lyraproj/pcore/px" + "github.com/lyraproj/pcore/types" + "github.com/lyraproj/semver/semver" + "github.com/lyraproj/servicesdk/serviceapi" + "github.com/lyraproj/servicesdk/wf" +) + +var ServerVersion = semver.MustParseVersion(`0.1.0`) + +type Server struct { + context px.Context + id px.TypedName + lock sync.RWMutex + typeSet px.TypeSet + metadata px.List + stateConverter wf.StateConverter + states map[string]wf.State + callables map[string]px.Value +} + +func (s *Server) AddApi(name string, callable interface{}) serviceapi.Definition { + rv := reflect.ValueOf(callable) + rt := rv.Type() + pt, ok := s.context.ImplementationRegistry().ReflectedToType(rt) + if !ok { + panic(px.Error(ApiTypeNotRegistered, issue.H{`type`: rt.Name()})) + } + + s.lock.RLock() + _, found := s.callables[name] + s.lock.RUnlock() + + if found { + panic(px.Error(AlreadyRegistered, issue.H{`namespace`: px.NsInterface, `identifier`: name})) + } + + props := make([]*types.HashEntry, 0, 2) + props = append(props, types.WrapHashEntry2(`interface`, pt)) + props = append(props, types.WrapHashEntry2(`style`, types.WrapString(`callable`))) + def := serviceapi.NewDefinition(px.NewTypedName(px.NsDefinition, name), s.id, types.WrapHash(props)) + + nmd := s.metadata.Add(def) + cls := px.WrapReflected(s.context, rv) + + s.lock.Lock() + s.callables[name] = cls + s.metadata = nmd + s.lock.Unlock() + + return def +} + +func (s *Server) State(c px.Context, name string, parameters px.OrderedMap) px.PuppetObject { + if s.stateConverter != nil { + s.lock.RLock() + st, ok := s.states[name] + s.lock.RUnlock() + if ok { + return s.stateConverter(c, st, parameters) + } + panic(px.Error(NoSuchState, issue.H{`name`: name})) + } + panic(px.Error(NoStateConverter, issue.H{`name`: name})) +} + +func (s *Server) Identifier(px.Context) px.TypedName { + return s.id +} + +func (s *Server) Invoke(c px.Context, api, name string, arguments ...px.Value) (result px.Value) { + s.lock.RLock() + api = strings.Title(api) + iv, ok := s.callables[api] + s.lock.RUnlock() + if ok { + if m, ok := iv.PType().(px.TypeWithCallableMembers).Member(name); ok { + defer func() { + if x := recover(); x != nil { + hclog.Default().Error(`Invoke failed`, `error`, x) + if err, ok := x.(issue.Reported); ok && string(err.Code()) == px.GoFunctionError { + result = serviceapi.ErrorFromReported(c, err) + return + } + panic(x) + } + }() + hclog.Default().Debug(`Invoke`, `api`, api, `name`, name) + result = m.Call(c, iv, nil, arguments) + return + } + panic(px.Error(NoSuchMethod, issue.H{`api`: api, `method`: name})) + } + panic(px.Error(NoSuchApi, issue.H{`api`: api})) +} + +func (s *Server) Metadata(px.Context) (typeSet px.TypeSet, definitions []serviceapi.Definition) { + ds := make([]serviceapi.Definition, s.metadata.Len()) + s.lock.RLock() + s.metadata.EachWithIndex(func(v px.Value, i int) { ds[i] = v.(serviceapi.Definition) }) + s.lock.RUnlock() + return s.typeSet, ds +} diff --git a/vendor/github.com/lyraproj/servicesdk/service/subservice.go b/vendor/github.com/lyraproj/servicesdk/service/subservice.go new file mode 100644 index 0000000..b6010d0 --- /dev/null +++ b/vendor/github.com/lyraproj/servicesdk/service/subservice.go @@ -0,0 +1,55 @@ +package service + +import ( + "fmt" + + "github.com/lyraproj/pcore/px" + "github.com/lyraproj/pcore/types" + "github.com/lyraproj/servicesdk/serviceapi" +) + +type subService struct { + def serviceapi.Definition +} + +func NewSubService(def serviceapi.Definition) serviceapi.Service { + return &subService{def} +} + +func (s *subService) Parent(c px.Context) serviceapi.Service { + x, ok := px.Load(c, s.def.ServiceId()) + if !ok { + panic(fmt.Errorf("failed to load %s", s.def.ServiceId())) + } + return x.(serviceapi.Service) +} + +func (s *subService) Invoke(c px.Context, identifier, name string, arguments ...px.Value) px.Value { + args := make([]px.Value, 2, 2+len(arguments)) + args[0] = types.WrapString(identifier) + args[1] = types.WrapString(name) + args = append(args, arguments...) + return s.Parent(c).Invoke(c, s.def.Identifier().Name(), "invoke", args...) +} + +func (s *subService) Metadata(c px.Context) (typeSet px.TypeSet, definitions []serviceapi.Definition) { + v := s.Parent(c).Invoke(c, s.def.Identifier().Name(), "metadata").(px.List) + if ts, ok := v.At(0).(px.TypeSet); ok { + typeSet = ts + } + if dl, ok := v.At(1).(px.List); ok { + definitions = make([]serviceapi.Definition, dl.Len()) + dl.EachWithIndex(func(d px.Value, i int) { + definitions[i] = d.(serviceapi.Definition) + }) + } + return +} + +func (s *subService) State(c px.Context, name string, parameters px.OrderedMap) px.PuppetObject { + return s.Parent(c).Invoke(c, s.def.Identifier().Name(), "state", types.WrapString(name), parameters).(px.PuppetObject) +} + +func (s *subService) Identifier(px.Context) px.TypedName { + return px.NewTypedName(px.NsService, s.def.Identifier().Name()) +} diff --git a/vendor/github.com/lyraproj/servicesdk/serviceapi/definition.go b/vendor/github.com/lyraproj/servicesdk/serviceapi/definition.go new file mode 100644 index 0000000..0011ee5 --- /dev/null +++ b/vendor/github.com/lyraproj/servicesdk/serviceapi/definition.go @@ -0,0 +1,25 @@ +package serviceapi + +import ( + "github.com/lyraproj/issue/issue" + "github.com/lyraproj/pcore/px" +) + +var DefinitionMetaType px.Type + +type Definition interface { + px.Value + issue.Labeled + + // Identifier returns a TypedName that uniquely identifies the step within the service. + Identifier() px.TypedName + + // ServiceId is the identifier of the service + ServiceId() px.TypedName + + // Properties is an ordered map of properties of this definition. Will be of type + // Hash[Pattern[/\A[a-z][A-Za-z]+\z/],RichData] + Properties() px.OrderedMap +} + +var NewDefinition func(identity, serviceId px.TypedName, properties px.OrderedMap) Definition diff --git a/vendor/github.com/lyraproj/servicesdk/serviceapi/error.go b/vendor/github.com/lyraproj/servicesdk/serviceapi/error.go new file mode 100644 index 0000000..8226b7a --- /dev/null +++ b/vendor/github.com/lyraproj/servicesdk/serviceapi/error.go @@ -0,0 +1,35 @@ +package serviceapi + +import ( + "github.com/lyraproj/issue/issue" + "github.com/lyraproj/pcore/px" +) + +type ErrorObject interface { + px.PuppetObject + + // Kind returns the error kind + Kind() string + + // Message returns the error message + Message() string + + // IssueCode returns the issue code + IssueCode() string + + // PartialResult returns the optional partial result. It returns + // pcore.UNDEF if no partial result exists + PartialResult() px.Value + + // Details returns the optional details. It returns + // an empty map when no details exist + Details() px.OrderedMap +} + +var ErrorFromReported func(c px.Context, err issue.Reported) ErrorObject + +var NewError func(c px.Context, message, kind, issueCode string, partialResult px.Value, details px.OrderedMap) ErrorObject + +// NotFound returns the special NotFound error which is recognized by the Lyra workflow engine. It should +// be used when requests are made to read, update, or delete a resource with an external id that no longer exists. +var NotFound func(typeName, extId string) error diff --git a/vendor/github.com/lyraproj/servicesdk/serviceapi/identity.go b/vendor/github.com/lyraproj/servicesdk/serviceapi/identity.go new file mode 100644 index 0000000..1d75031 --- /dev/null +++ b/vendor/github.com/lyraproj/servicesdk/serviceapi/identity.go @@ -0,0 +1,19 @@ +package serviceapi + +import "github.com/lyraproj/pcore/px" + +// Identity defines the API for services that track mappings between internal and external IDs +type Identity interface { + BumpEra() error + ReadEra() (era int64, err error) + Associate(internalID string, externalID string) error + GetExternal(internalID string) (string, error) + GetInternal(externalID string) (string, error) + PurgeExternal(externalID string) error + PurgeInternal(internalID string) error + RemoveExternal(externalID string) error + RemoveInternal(internalID string) error + Search(internalIDPrefix string) (px.List, error) + Sweep(internalIDPrefix string) error + Garbage(internalIDPrefix string) (px.List, error) +} diff --git a/vendor/github.com/lyraproj/servicesdk/serviceapi/invokable.go b/vendor/github.com/lyraproj/servicesdk/serviceapi/invokable.go new file mode 100644 index 0000000..2273460 --- /dev/null +++ b/vendor/github.com/lyraproj/servicesdk/serviceapi/invokable.go @@ -0,0 +1,9 @@ +package serviceapi + +import "github.com/lyraproj/pcore/px" + +type Invokable interface { + // Invoke will call a method with the given name on the object identified by the given + // identifier and return the result. + Invoke(c px.Context, identifier, name string, arguments ...px.Value) px.Value +} diff --git a/vendor/github.com/lyraproj/servicesdk/serviceapi/metadata.go b/vendor/github.com/lyraproj/servicesdk/serviceapi/metadata.go new file mode 100644 index 0000000..f3b88d9 --- /dev/null +++ b/vendor/github.com/lyraproj/servicesdk/serviceapi/metadata.go @@ -0,0 +1,9 @@ +package serviceapi + +import ( + "github.com/lyraproj/pcore/px" +) + +type Metadata interface { + Metadata(px.Context) (typeSet px.TypeSet, definitions []Definition) +} diff --git a/vendor/github.com/lyraproj/servicesdk/serviceapi/server.go b/vendor/github.com/lyraproj/servicesdk/serviceapi/server.go new file mode 100644 index 0000000..7ade1ed --- /dev/null +++ b/vendor/github.com/lyraproj/servicesdk/serviceapi/server.go @@ -0,0 +1,11 @@ +package serviceapi + +import "github.com/lyraproj/pcore/px" + +type Service interface { + Invokable + Metadata + StateResolver + + Identifier(px.Context) px.TypedName +} diff --git a/vendor/github.com/lyraproj/servicesdk/serviceapi/stateresolver.go b/vendor/github.com/lyraproj/servicesdk/serviceapi/stateresolver.go new file mode 100644 index 0000000..8576100 --- /dev/null +++ b/vendor/github.com/lyraproj/servicesdk/serviceapi/stateresolver.go @@ -0,0 +1,9 @@ +package serviceapi + +import "github.com/lyraproj/pcore/px" + +type StateResolver interface { + // State looks up a state that has been previously registered with the given name, + // resolves it using the given parameters, and returns the created state object. + State(c px.Context, name string, parameters px.OrderedMap) px.PuppetObject +} diff --git a/vendor/github.com/lyraproj/servicesdk/servicepb/service.pb.go b/vendor/github.com/lyraproj/servicesdk/servicepb/service.pb.go new file mode 100644 index 0000000..6e97bb8 --- /dev/null +++ b/vendor/github.com/lyraproj/servicesdk/servicepb/service.pb.go @@ -0,0 +1,331 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: servicepb/service.proto + +/* +Package servicepb is a generated protocol buffer package. + +It is generated from these files: + servicepb/service.proto + +It has these top-level messages: + MetadataResponse + InvokeRequest + EmptyRequest + StateRequest +*/ +package servicepb + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import puppet_datapb "github.com/lyraproj/data-protobuf/datapb" + +import ( + context "golang.org/x/net/context" + grpc "google.golang.org/grpc" +) + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +type MetadataResponse struct { + Typeset *puppet_datapb.Data `protobuf:"bytes,1,opt,name=typeset" json:"typeset,omitempty"` + Definitions *puppet_datapb.Data `protobuf:"bytes,2,opt,name=definitions" json:"definitions,omitempty"` +} + +func (m *MetadataResponse) Reset() { *m = MetadataResponse{} } +func (m *MetadataResponse) String() string { return proto.CompactTextString(m) } +func (*MetadataResponse) ProtoMessage() {} +func (*MetadataResponse) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{0} } + +func (m *MetadataResponse) GetTypeset() *puppet_datapb.Data { + if m != nil { + return m.Typeset + } + return nil +} + +func (m *MetadataResponse) GetDefinitions() *puppet_datapb.Data { + if m != nil { + return m.Definitions + } + return nil +} + +type InvokeRequest struct { + Identifier string `protobuf:"bytes,1,opt,name=identifier" json:"identifier,omitempty"` + Method string `protobuf:"bytes,2,opt,name=method" json:"method,omitempty"` + Arguments *puppet_datapb.Data `protobuf:"bytes,3,opt,name=arguments" json:"arguments,omitempty"` +} + +func (m *InvokeRequest) Reset() { *m = InvokeRequest{} } +func (m *InvokeRequest) String() string { return proto.CompactTextString(m) } +func (*InvokeRequest) ProtoMessage() {} +func (*InvokeRequest) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{1} } + +func (m *InvokeRequest) GetIdentifier() string { + if m != nil { + return m.Identifier + } + return "" +} + +func (m *InvokeRequest) GetMethod() string { + if m != nil { + return m.Method + } + return "" +} + +func (m *InvokeRequest) GetArguments() *puppet_datapb.Data { + if m != nil { + return m.Arguments + } + return nil +} + +type EmptyRequest struct { +} + +func (m *EmptyRequest) Reset() { *m = EmptyRequest{} } +func (m *EmptyRequest) String() string { return proto.CompactTextString(m) } +func (*EmptyRequest) ProtoMessage() {} +func (*EmptyRequest) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{2} } + +type StateRequest struct { + Identifier string `protobuf:"bytes,1,opt,name=identifier" json:"identifier,omitempty"` + Parameters *puppet_datapb.Data `protobuf:"bytes,2,opt,name=parameters" json:"parameters,omitempty"` +} + +func (m *StateRequest) Reset() { *m = StateRequest{} } +func (m *StateRequest) String() string { return proto.CompactTextString(m) } +func (*StateRequest) ProtoMessage() {} +func (*StateRequest) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{3} } + +func (m *StateRequest) GetIdentifier() string { + if m != nil { + return m.Identifier + } + return "" +} + +func (m *StateRequest) GetParameters() *puppet_datapb.Data { + if m != nil { + return m.Parameters + } + return nil +} + +func init() { + proto.RegisterType((*MetadataResponse)(nil), "puppet.service.MetadataResponse") + proto.RegisterType((*InvokeRequest)(nil), "puppet.service.InvokeRequest") + proto.RegisterType((*EmptyRequest)(nil), "puppet.service.EmptyRequest") + proto.RegisterType((*StateRequest)(nil), "puppet.service.StateRequest") +} + +// Reference imports to suppress errors if they are not otherwise used. +var _ context.Context +var _ grpc.ClientConn + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the grpc package it is being compiled against. +const _ = grpc.SupportPackageIsVersion4 + +// Client API for DefinitionService service + +type DefinitionServiceClient interface { + Identity(ctx context.Context, in *EmptyRequest, opts ...grpc.CallOption) (*puppet_datapb.Data, error) + Invoke(ctx context.Context, in *InvokeRequest, opts ...grpc.CallOption) (*puppet_datapb.Data, error) + Metadata(ctx context.Context, in *EmptyRequest, opts ...grpc.CallOption) (*MetadataResponse, error) + State(ctx context.Context, in *StateRequest, opts ...grpc.CallOption) (*puppet_datapb.Data, error) +} + +type definitionServiceClient struct { + cc *grpc.ClientConn +} + +func NewDefinitionServiceClient(cc *grpc.ClientConn) DefinitionServiceClient { + return &definitionServiceClient{cc} +} + +func (c *definitionServiceClient) Identity(ctx context.Context, in *EmptyRequest, opts ...grpc.CallOption) (*puppet_datapb.Data, error) { + out := new(puppet_datapb.Data) + err := grpc.Invoke(ctx, "/puppet.service.DefinitionService/Identity", in, out, c.cc, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *definitionServiceClient) Invoke(ctx context.Context, in *InvokeRequest, opts ...grpc.CallOption) (*puppet_datapb.Data, error) { + out := new(puppet_datapb.Data) + err := grpc.Invoke(ctx, "/puppet.service.DefinitionService/Invoke", in, out, c.cc, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *definitionServiceClient) Metadata(ctx context.Context, in *EmptyRequest, opts ...grpc.CallOption) (*MetadataResponse, error) { + out := new(MetadataResponse) + err := grpc.Invoke(ctx, "/puppet.service.DefinitionService/Metadata", in, out, c.cc, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *definitionServiceClient) State(ctx context.Context, in *StateRequest, opts ...grpc.CallOption) (*puppet_datapb.Data, error) { + out := new(puppet_datapb.Data) + err := grpc.Invoke(ctx, "/puppet.service.DefinitionService/State", in, out, c.cc, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +// Server API for DefinitionService service + +type DefinitionServiceServer interface { + Identity(context.Context, *EmptyRequest) (*puppet_datapb.Data, error) + Invoke(context.Context, *InvokeRequest) (*puppet_datapb.Data, error) + Metadata(context.Context, *EmptyRequest) (*MetadataResponse, error) + State(context.Context, *StateRequest) (*puppet_datapb.Data, error) +} + +func RegisterDefinitionServiceServer(s *grpc.Server, srv DefinitionServiceServer) { + s.RegisterService(&_DefinitionService_serviceDesc, srv) +} + +func _DefinitionService_Identity_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(EmptyRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(DefinitionServiceServer).Identity(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/puppet.service.DefinitionService/Identity", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(DefinitionServiceServer).Identity(ctx, req.(*EmptyRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _DefinitionService_Invoke_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(InvokeRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(DefinitionServiceServer).Invoke(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/puppet.service.DefinitionService/Invoke", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(DefinitionServiceServer).Invoke(ctx, req.(*InvokeRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _DefinitionService_Metadata_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(EmptyRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(DefinitionServiceServer).Metadata(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/puppet.service.DefinitionService/Metadata", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(DefinitionServiceServer).Metadata(ctx, req.(*EmptyRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _DefinitionService_State_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(StateRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(DefinitionServiceServer).State(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/puppet.service.DefinitionService/State", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(DefinitionServiceServer).State(ctx, req.(*StateRequest)) + } + return interceptor(ctx, in, info, handler) +} + +var _DefinitionService_serviceDesc = grpc.ServiceDesc{ + ServiceName: "puppet.service.DefinitionService", + HandlerType: (*DefinitionServiceServer)(nil), + Methods: []grpc.MethodDesc{ + { + MethodName: "Identity", + Handler: _DefinitionService_Identity_Handler, + }, + { + MethodName: "Invoke", + Handler: _DefinitionService_Invoke_Handler, + }, + { + MethodName: "Metadata", + Handler: _DefinitionService_Metadata_Handler, + }, + { + MethodName: "State", + Handler: _DefinitionService_State_Handler, + }, + }, + Streams: []grpc.StreamDesc{}, + Metadata: "servicepb/service.proto", +} + +func init() { proto.RegisterFile("servicepb/service.proto", fileDescriptor0) } + +var fileDescriptor0 = []byte{ + // 347 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x8c, 0x52, 0x41, 0x4f, 0xf2, 0x40, + 0x10, 0x05, 0xbe, 0xc0, 0x07, 0x03, 0x1f, 0xf9, 0x58, 0x13, 0x25, 0x44, 0x0d, 0xe9, 0x49, 0x0f, + 0x6c, 0x23, 0xc6, 0x9b, 0xd1, 0xc4, 0xe0, 0x81, 0x44, 0x2f, 0xe5, 0xa4, 0xb7, 0x96, 0x0e, 0xb0, + 0xc1, 0xee, 0xae, 0xdd, 0x29, 0x11, 0x7f, 0x8d, 0x3f, 0xd5, 0xd0, 0x16, 0x2c, 0x68, 0xd5, 0x53, + 0xdb, 0xe9, 0x9b, 0xf7, 0xde, 0xbc, 0x19, 0x38, 0x30, 0x18, 0x2e, 0xc4, 0x18, 0xb5, 0x67, 0xa7, + 0x6f, 0x5c, 0x87, 0x8a, 0x14, 0x6b, 0xea, 0x48, 0x6b, 0x24, 0x9e, 0x56, 0x3b, 0x2d, 0xdf, 0x25, + 0x57, 0x7b, 0xf6, 0xea, 0x91, 0x40, 0xac, 0x17, 0xf8, 0x7f, 0x8f, 0xe4, 0xae, 0x2a, 0x0e, 0x1a, + 0xad, 0xa4, 0x41, 0xd6, 0x83, 0xbf, 0xb4, 0xd4, 0x68, 0x90, 0xda, 0xc5, 0x6e, 0xf1, 0xa4, 0xde, + 0xdf, 0xe3, 0x29, 0x51, 0xd2, 0xcf, 0x07, 0x2b, 0xf4, 0x1a, 0xc3, 0x2e, 0xa0, 0xee, 0xe3, 0x44, + 0x48, 0x41, 0x42, 0x49, 0xd3, 0x2e, 0xe5, 0xb7, 0x64, 0x71, 0xd6, 0x2b, 0xfc, 0x1b, 0xca, 0x85, + 0x9a, 0xa3, 0x83, 0xcf, 0x11, 0x1a, 0x62, 0xc7, 0x00, 0xc2, 0x47, 0x49, 0x62, 0x22, 0x30, 0x8c, + 0x95, 0x6b, 0x4e, 0xa6, 0xc2, 0xf6, 0xa1, 0x12, 0x20, 0xcd, 0x94, 0x1f, 0x4b, 0xd4, 0x9c, 0xf4, + 0x8b, 0x9d, 0x41, 0xcd, 0x0d, 0xa7, 0x51, 0x80, 0x92, 0x4c, 0xfb, 0x4f, 0xbe, 0xfa, 0x07, 0xca, + 0x6a, 0x42, 0xe3, 0x36, 0xd0, 0xb4, 0x4c, 0xa5, 0xad, 0x07, 0x68, 0x8c, 0xc8, 0xa5, 0x5f, 0x5b, + 0x39, 0x85, 0xb2, 0x90, 0x3a, 0xa2, 0xef, 0x86, 0x4d, 0x10, 0xfd, 0xb7, 0x12, 0xb4, 0x06, 0x9b, + 0xb1, 0x47, 0xc9, 0x26, 0xd8, 0x35, 0x54, 0x87, 0x31, 0x1d, 0x2d, 0xd9, 0x21, 0xdf, 0x5e, 0x13, + 0xcf, 0x5a, 0xeb, 0x7c, 0xc5, 0x6d, 0x15, 0xd8, 0x15, 0x54, 0x92, 0xf4, 0xd8, 0xd1, 0x6e, 0xfb, + 0x56, 0xaa, 0x79, 0xfd, 0x77, 0x50, 0x5d, 0xef, 0xfd, 0x07, 0x03, 0xdd, 0xdd, 0xbf, 0xbb, 0xf7, + 0x62, 0x15, 0xd8, 0x25, 0x94, 0xe3, 0xfc, 0x3e, 0x53, 0x65, 0x63, 0xcd, 0xf1, 0x72, 0x63, 0x3f, + 0xf6, 0xa6, 0x82, 0x66, 0x91, 0xc7, 0xc7, 0x2a, 0xb0, 0x13, 0xc8, 0x93, 0xeb, 0x19, 0x7b, 0xaa, + 0x7a, 0x29, 0x8f, 0xf1, 0xe7, 0xf6, 0xe6, 0xca, 0xbd, 0x4a, 0x7c, 0xbb, 0xe7, 0xef, 0x01, 0x00, + 0x00, 0xff, 0xff, 0x2f, 0xcf, 0xf2, 0x36, 0xf9, 0x02, 0x00, 0x00, +} diff --git a/vendor/github.com/lyraproj/servicesdk/wf/action.go b/vendor/github.com/lyraproj/servicesdk/wf/action.go new file mode 100644 index 0000000..e2113d3 --- /dev/null +++ b/vendor/github.com/lyraproj/servicesdk/wf/action.go @@ -0,0 +1,28 @@ +package wf + +import ( + "github.com/lyraproj/pcore/px" +) + +type Action interface { + Step + + Function() interface{} +} + +type action struct { + step + function interface{} +} + +func MakeAction(name string, when Condition, parameters, returns []px.Parameter, function interface{}) Action { + return &action{step{name, when, parameters, returns}, function} +} + +func (s *action) Label() string { + return `action ` + s.name +} + +func (s *action) Function() interface{} { + return s.function +} diff --git a/vendor/github.com/lyraproj/servicesdk/wf/builder.go b/vendor/github.com/lyraproj/servicesdk/wf/builder.go new file mode 100644 index 0000000..95e7492 --- /dev/null +++ b/vendor/github.com/lyraproj/servicesdk/wf/builder.go @@ -0,0 +1,410 @@ +package wf + +import ( + "strings" + + "github.com/lyraproj/issue/issue" + "github.com/lyraproj/pcore/px" +) + +var noParams = make([]px.Parameter, 0) + +func LeafName(name string) string { + names := strings.Split(name, `::`) + return names[len(names)-1] +} + +type Builder interface { + Context() px.Context + Build() Step + Name(string) + When(string) + Parameters(...px.Parameter) + Returns(...px.Parameter) + QualifyName(childName string) string + GetParameters() []px.Parameter + GetName() string + Parameter(name, typeName string) px.Parameter +} + +type ChildBuilder interface { + Builder + StateHandler(func(StateHandlerBuilder)) + Resource(func(ResourceBuilder)) + Workflow(func(WorkflowBuilder)) + Action(func(ActionBuilder)) + Reference(func(ReferenceBuilder)) + AddChild(Builder) + Iterator(func(IteratorBuilder)) +} + +type APIBuilder interface { + Builder + API(interface{}) +} + +type StateHandlerBuilder interface { + Builder + API(interface{}) +} + +type IteratorBuilder interface { + ChildBuilder + Style(IterationStyle) + Over(px.Value) + Variables(...px.Parameter) + Into(into string) +} + +type ResourceBuilder interface { + Builder + ExternalId(extId string) + State(state State) + StateStruct(state interface{}) +} + +type ActionBuilder interface { + Builder + Doer(interface{}) +} + +type ReferenceBuilder interface { + Builder + ReferenceTo(string) +} + +type WorkflowBuilder interface { + ChildBuilder +} + +func NewStateHandler(ctx px.Context, bf func(StateHandlerBuilder)) StateHandler { + bld := &stateHandlerBuilder{builder: builder{ctx: ctx, when: Always, parameters: noParams, returns: noParams}} + bf(bld) + return bld.Build().(StateHandler) +} + +func NewIterator(ctx px.Context, bf func(IteratorBuilder)) Iterator { + bld := &iteratorBuilder{childBuilder: childBuilder{builder: builder{ctx: ctx, when: Always, parameters: noParams, returns: noParams}}} + bf(bld) + return bld.Build().(Iterator) +} + +func NewResource(ctx px.Context, bf func(ResourceBuilder)) Resource { + bld := &resourceBuilder{builder: builder{ctx: ctx, when: Always, parameters: noParams, returns: noParams}} + bf(bld) + return bld.Build().(Resource) +} + +func NewAction(ctx px.Context, bf func(ActionBuilder)) Action { + bld := &actionBuilder{builder: builder{ctx: ctx, when: Always, parameters: noParams, returns: noParams}} + bf(bld) + return bld.Build().(Action) +} + +func NewReference(ctx px.Context, bf func(ReferenceBuilder)) Reference { + bld := &referenceBuilder{builder: builder{ctx: ctx, when: Always, parameters: noParams, returns: noParams}} + bf(bld) + return bld.Build().(Reference) +} + +func NewWorkflow(ctx px.Context, bf func(WorkflowBuilder)) Workflow { + bld := &workflowBuilder{childBuilder: childBuilder{builder: builder{ctx: ctx, when: Always, parameters: noParams, returns: noParams}}} + bf(bld) + return bld.Build().(Workflow) +} + +type builder struct { + ctx px.Context + name string + when Condition + parameters []px.Parameter + returns []px.Parameter + parent Builder +} + +func (b *builder) Context() px.Context { + return b.ctx +} + +func (b *builder) Name(n string) { + b.name = n +} + +func (b *builder) When(w string) { + if w == `` { + b.when = Always + } else { + b.when = Parse(w) + } +} + +func (b *builder) validate() { + if b.name == `` { + panic(px.Error(StepNoName, issue.NoArgs)) + } +} + +func (b *builder) Parameter(name, typeName string) px.Parameter { + return px.NewParameter(name, b.ctx.ParseType(typeName), nil, false) +} + +func (b *builder) GetParameters() []px.Parameter { + return b.parameters +} + +func (b *builder) QualifyName(childName string) string { + return b.GetName() + `::` + childName +} + +func (b *builder) GetName() string { + if b.parent != nil { + return b.parent.QualifyName(b.name) + } + return b.name +} + +func (b *builder) Parameters(parameters ...px.Parameter) { + if len(b.parameters) == 0 { + b.parameters = parameters + } else { + b.parameters = append(b.parameters, parameters...) + } +} + +func (b *builder) Returns(returns ...px.Parameter) { + if len(b.returns) == 0 { + b.returns = returns + } else { + b.returns = append(b.returns, returns...) + } +} + +type stateHandlerBuilder struct { + builder + api interface{} +} + +func (b *stateHandlerBuilder) API(c interface{}) { + b.api = c +} + +func (b *stateHandlerBuilder) Build() Step { + b.validate() + return MakeStateHandler(b.GetName(), b.when, b.parameters, b.returns, b.api) +} + +type childBuilder struct { + builder + children []Step +} + +func stateHandlerChild(b ChildBuilder, bld func(b StateHandlerBuilder)) { + ab := &stateHandlerBuilder{builder: builder{parent: b, ctx: b.Context(), when: Always, parameters: noParams, returns: noParams}} + bld(ab) + b.AddChild(ab) +} + +func resourceChild(b ChildBuilder, bld func(b ResourceBuilder)) { + ab := &resourceBuilder{builder: builder{parent: b, ctx: b.Context(), when: Always, parameters: noParams, returns: noParams}} + bld(ab) + b.AddChild(ab) +} + +func workflowChild(b ChildBuilder, bld func(b WorkflowBuilder)) { + ab := &workflowBuilder{childBuilder: childBuilder{builder: builder{parent: b, ctx: b.Context(), when: Always, parameters: noParams, returns: noParams}}} + bld(ab) + b.AddChild(ab) +} + +func actionChild(b ChildBuilder, bld func(b ActionBuilder)) { + ab := &actionBuilder{builder: builder{parent: b, ctx: b.Context(), when: Always, parameters: noParams, returns: noParams}} + bld(ab) + b.AddChild(ab) +} + +func referenceChild(b ChildBuilder, bld func(b ReferenceBuilder)) { + ab := &referenceBuilder{builder: builder{parent: b, ctx: b.Context(), when: Always, parameters: noParams, returns: noParams}} + bld(ab) + b.AddChild(ab) +} + +func (b *childBuilder) AddChild(child Builder) { + b.children = append(b.children, child.Build()) +} + +type iteratorBuilder struct { + childBuilder + style IterationStyle + over px.Value + variables []px.Parameter + into string +} + +func (b *iteratorBuilder) StateHandler(bld func(b StateHandlerBuilder)) { + stateHandlerChild(b, bld) +} + +func (b *iteratorBuilder) Resource(bld func(b ResourceBuilder)) { + resourceChild(b, bld) +} + +func (b *iteratorBuilder) Workflow(bld func(b WorkflowBuilder)) { + workflowChild(b, bld) +} + +func (b *iteratorBuilder) Action(bld func(b ActionBuilder)) { + actionChild(b, bld) +} + +func (b *iteratorBuilder) Reference(bld func(b ReferenceBuilder)) { + referenceChild(b, bld) +} + +func (b *iteratorBuilder) Iterator(bld func(b IteratorBuilder)) { + ab := &iteratorBuilder{childBuilder: childBuilder{builder: builder{parent: b, ctx: b.ctx, when: Always, parameters: noParams, returns: noParams}}} + bld(ab) + b.AddChild(ab) +} + +func (b *iteratorBuilder) GetName() string { + if b.name == `` { + if len(b.children) != 1 { + panic(`ouch`) + } + return b.children[0].Name() + } + return b.parent.QualifyName(b.name) +} + +func (b *iteratorBuilder) QualifyName(childName string) string { + if b.parent == nil { + return childName + } + return b.parent.QualifyName(childName) +} + +func (b *iteratorBuilder) Style(style IterationStyle) { + b.style = style +} + +func (b *iteratorBuilder) Over(over px.Value) { + b.over = over +} + +func (b *iteratorBuilder) Into(into string) { + b.into = into +} + +func (b *iteratorBuilder) Variables(variables ...px.Parameter) { + if len(b.variables) == 0 { + b.variables = variables + } else { + b.variables = append(b.variables, variables...) + } +} + +func (b *iteratorBuilder) Build() Step { + b.validate() + return MakeIterator(b.GetName(), b.when, b.parameters, b.returns, b.style, b.children[0], b.over, b.variables, b.into) +} + +func (b *iteratorBuilder) validate() { + if len(b.children) != 1 { + panic(px.Error(IteratorNotOneStep, issue.NoArgs)) + } +} + +type resourceBuilder struct { + builder + state State + extId string +} + +func (b *resourceBuilder) Build() Step { + b.validate() + return MakeResource(b.GetName(), b.when, b.parameters, b.returns, b.extId, b.state) +} + +func (b *resourceBuilder) State(state State) { + b.state = state +} + +func (b *resourceBuilder) ExternalId(extId string) { + b.extId = extId +} + +// RegisterState registers a struct as a state. The state type is inferred from the +// struct +func (b *resourceBuilder) StateStruct(state interface{}) { + /* TODO: Fix this b.state = newGoState(pt.(px.ObjectType), rv) + rv := reflect.ValueOf(state) + rt := rv.Type() + pt, ok := b.ctx.ImplementationRegistry().ReflectedToType(rt) + if !ok { + pt = b.ctx.Reflector().TypeFromReflect(b.GetName(), nil, rt) + } + */ +} + +type workflowBuilder struct { + childBuilder +} + +func (b *workflowBuilder) Build() Step { + b.validate() + return MakeWorkflow(b.GetName(), b.when, b.parameters, b.returns, b.children) +} + +func (b *workflowBuilder) StateHandler(bld func(b StateHandlerBuilder)) { + stateHandlerChild(b, bld) +} + +func (b *workflowBuilder) Resource(bld func(b ResourceBuilder)) { + resourceChild(b, bld) +} + +func (b *workflowBuilder) Workflow(bld func(b WorkflowBuilder)) { + workflowChild(b, bld) +} + +func (b *workflowBuilder) Action(bld func(b ActionBuilder)) { + actionChild(b, bld) +} + +func (b *workflowBuilder) Reference(bld func(b ReferenceBuilder)) { + referenceChild(b, bld) +} + +func (b *workflowBuilder) Iterator(bld func(b IteratorBuilder)) { + ab := &iteratorBuilder{childBuilder: childBuilder{builder: builder{parent: b, ctx: b.ctx, when: Always, parameters: noParams, returns: noParams}}} + bld(ab) + b.AddChild(ab) +} + +type actionBuilder struct { + builder + function interface{} +} + +func (b *actionBuilder) Build() Step { + b.validate() + return MakeAction(b.GetName(), b.when, b.parameters, b.returns, b.function) +} + +func (b *actionBuilder) Doer(d interface{}) { + b.function = d +} + +type referenceBuilder struct { + builder + referencedStep string +} + +func (b *referenceBuilder) Build() Step { + b.validate() + return MakeReference(b.GetName(), b.when, b.parameters, b.returns, b.referencedStep) +} + +func (b *referenceBuilder) ReferenceTo(referencedStep string) { + b.referencedStep = referencedStep +} diff --git a/vendor/github.com/lyraproj/servicesdk/wf/condition.go b/vendor/github.com/lyraproj/servicesdk/wf/condition.go new file mode 100644 index 0000000..3e907a8 --- /dev/null +++ b/vendor/github.com/lyraproj/servicesdk/wf/condition.go @@ -0,0 +1,206 @@ +package wf + +import ( + "bytes" + "fmt" + "sort" + + "github.com/lyraproj/pcore/px" +) + +type boolean bool + +const Always = boolean(true) +const Never = boolean(false) + +// A Condition evaluates to true or false depending on its given parameters +type Condition interface { + fmt.Stringer + + // Precedence returns the operator precedence for this Condition + Precedence() int + + // IsTrue returns true if the given parameters satisfies the condition, false otherwise + IsTrue(parameters px.OrderedMap) bool + + // Returns all names in use by this condition and its nested conditions. The returned + // slice is guaranteed to be unique and sorted alphabetically + Names() []string +} + +// Boolean returns that Condition that yields the given boolean +func Boolean(v bool) Condition { + return boolean(v) +} + +func (b boolean) String() string { + if b { + return `true` + } + return `false` +} + +func (b boolean) Precedence() int { + return 5 +} + +func (b boolean) IsTrue(parameters px.OrderedMap) bool { + return bool(b) +} + +func (b boolean) Names() []string { + return []string{} +} + +type truthy string + +// Truthy returns a Condition that yields true when the variable +// named by the given name contains a truthy value (i.e. not undef or false) +func Truthy(name string) Condition { + return truthy(name) +} + +func (v truthy) IsTrue(parameters px.OrderedMap) bool { + value, ok := parameters.Get4(string(v)) + return ok && px.IsTruthy(value) +} + +func (v truthy) Names() []string { + return []string{string(v)} +} + +func (v truthy) Precedence() int { + return 4 +} + +func (v truthy) String() string { + return string(v) +} + +// Not returns a Condition that yields true when the given condition +// yields false +func Not(condition Condition) Condition { + return ¬{condition} +} + +type not struct { + condition Condition +} + +func (n *not) IsTrue(parameters px.OrderedMap) bool { + return !n.condition.IsTrue(parameters) +} + +func (n *not) Names() []string { + return n.condition.Names() +} + +func (n *not) Precedence() int { + return 3 +} + +func (n *not) String() string { + b := bytes.NewBufferString(`!`) + emitContained(n.condition, n.Precedence(), b) + return b.String() +} + +type and struct { + conditions []Condition +} + +// And returns a Condition that yields true when all given conditions +// yield true +func And(conditions []Condition) Condition { + return &and{conditions} +} + +func (a *and) IsTrue(parameters px.OrderedMap) bool { + for _, condition := range a.conditions { + if !condition.IsTrue(parameters) { + return false + } + } + return true +} + +func (a *and) Names() []string { + return mergeNames(a.conditions) +} + +func (a *and) Precedence() int { + return 2 +} + +func (a *and) String() string { + return concat(a.conditions, a.Precedence(), `and`) +} + +// Or returns a Condition that yields true when at least one of the given conditions +// yield true +func Or(conditions []Condition) Condition { + return &or{conditions} +} + +type or struct { + conditions []Condition +} + +func (o *or) IsTrue(parameters px.OrderedMap) bool { + for _, condition := range o.conditions { + if condition.IsTrue(parameters) { + return true + } + } + return false +} + +func (o *or) Names() []string { + return mergeNames(o.conditions) +} + +func (o *or) Precedence() int { + return 1 +} + +func (o *or) String() string { + return concat(o.conditions, o.Precedence(), `or`) +} + +func mergeNames(conditions []Condition) []string { + h := make(map[string]bool) + for _, c := range conditions { + for _, n := range c.Names() { + h[n] = true + } + } + names := make([]string, 0, len(h)) + for n := range h { + names = append(names, n) + } + sort.Strings(names) + return names +} + +func concat(conditions []Condition, precedence int, op string) string { + b := bytes.NewBufferString(``) + for i, c := range conditions { + if i > 0 { + b.WriteByte(' ') + b.WriteString(op) + b.WriteByte(' ') + } + emitContained(c, precedence, b) + } + return b.String() +} + +func emitContained(c Condition, p int, b *bytes.Buffer) { + if p > c.Precedence() { + b.WriteByte('(') + b.WriteString(c.String()) + b.WriteByte(')') + } else { + b.WriteString(c.String()) + } +} diff --git a/vendor/github.com/lyraproj/servicesdk/wf/crd.go b/vendor/github.com/lyraproj/servicesdk/wf/crd.go new file mode 100644 index 0000000..82badb4 --- /dev/null +++ b/vendor/github.com/lyraproj/servicesdk/wf/crd.go @@ -0,0 +1,34 @@ +package wf + +import "github.com/lyraproj/pcore/px" + +type ErrorConstant string + +func (e ErrorConstant) Error() string { + return string(e) +} + +// Error returned by Read, Delete, and Update when the requested state isn't found +const NotFound = ErrorConstant(`not found`) + +type CRD interface { + // Create creates the desired state and returns a possibly amended version of that state + // together with the externalId by which the state can henceforth be identified. + Create(state px.OrderedMap) (px.OrderedMap, string, error) + + // Read reads and returns the current state identified by the given externalId. The error NotFound + // is returned when no state can be found. + Read(externalId string) (px.OrderedMap, error) + + // Delete deletes the state identified by the given externalId. The error NotFound is returned when + // no state can be found. + Delete(externalId string) error +} + +type CRUD interface { + CRD + + // Update updates the state identified by the given externalId to a new state and returns a possibly + // amended version of that state. The error NotFound is returned when no state can be found. + Update(externalId string, state px.OrderedMap) (px.OrderedMap, error) +} diff --git a/vendor/github.com/lyraproj/servicesdk/wf/issues.go b/vendor/github.com/lyraproj/servicesdk/wf/issues.go new file mode 100644 index 0000000..7c68f14 --- /dev/null +++ b/vendor/github.com/lyraproj/servicesdk/wf/issues.go @@ -0,0 +1,25 @@ +package wf + +import "github.com/lyraproj/issue/issue" + +const ( + ConditionSyntaxError = `WF_CONDITION_SYNTAX_ERROR` + ConditionMissingRp = `WF_CONDITION_MISSING_RP` + ConditionInvalidName = `WF_CONDITION_INVALID_NAME` + ConditionUnexpectedEnd = `WF_CONDITION_UNEXPECTED_END` + IllegalIterationStyle = `WF_ILLEGAL_ITERATION_STYLE` + IllegalOperation = `WF_ILLEGAL_OPERATION` + StepNoName = `WF_STEP_NO_NAME` + IteratorNotOneStep = `WF_ITERATOR_NOT_ONE_STEP` +) + +func init() { + issue.Hard(ConditionSyntaxError, `syntax error in condition '%{text}' at position %{pos}`) + issue.Hard(ConditionMissingRp, `expected right parenthesis in condition '%{text}' at position %{pos}`) + issue.Hard(ConditionInvalidName, `invalid name '%{name}' in condition '%{text}' at position %{pos}`) + issue.Hard(ConditionUnexpectedEnd, `unexpected end of condition '%{text}' at position %{pos}`) + issue.Hard(IllegalIterationStyle, `no such iteration style '%{style}'`) + issue.Hard(IllegalOperation, `no such operation '%{operation}'`) + issue.Hard(StepNoName, `an step must have a name`) + issue.Hard(IteratorNotOneStep, `an iterator must have exactly one step`) +} diff --git a/vendor/github.com/lyraproj/servicesdk/wf/iterator.go b/vendor/github.com/lyraproj/servicesdk/wf/iterator.go new file mode 100644 index 0000000..7bc541c --- /dev/null +++ b/vendor/github.com/lyraproj/servicesdk/wf/iterator.go @@ -0,0 +1,100 @@ +package wf + +import ( + "github.com/lyraproj/issue/issue" + "github.com/lyraproj/pcore/px" +) + +type IterationStyle int + +const IterationStyleEach = 1 +const IterationStyleEachPair = 2 +const IterationStyleRange = 3 +const IterationStyleTimes = 4 + +func (is IterationStyle) String() string { + switch is { + case IterationStyleEach: + return `each` + case IterationStyleEachPair: + return `eachPair` + case IterationStyleRange: + return `range` + case IterationStyleTimes: + return `times` + default: + return `unknown iteration style` + } +} + +func NewIterationStyle(style string) IterationStyle { + switch style { + case `each`: + return IterationStyleEach + case `eachPair`: + return IterationStyleEachPair + case `range`: + return IterationStyleRange + case `times`: + return IterationStyleTimes + } + panic(px.Error(IllegalIterationStyle, issue.H{`style`: style})) +} + +type Iterator interface { + Step + + // IterationStyle returns the style of iterator, times, range, each, or eachPair. + IterationStyle() IterationStyle + + // Producer returns the Step that will be invoked once for each iteration + Producer() Step + + // Over returns what this iterator will iterate over. + Over() px.Value + + // Variables returns the variables that this iterator will produce for each iteration. These + // variables will be removed from the declared parameters set when the final requirements + // for the step are computed. + Variables() []px.Parameter + + // Into names the returns from the iteration + Into() string +} + +type iterator struct { + step + style IterationStyle + producer Step + over px.Value + variables []px.Parameter + into string +} + +func MakeIterator(name string, when Condition, parameters, returns []px.Parameter, style IterationStyle, producer Step, over px.Value, variables []px.Parameter, into string) Iterator { + return &iterator{step{name, when, parameters, returns}, style, producer, over, variables, into} +} + +func (it *iterator) Label() string { + return `iterator ` + it.name +} + +func (it *iterator) IterationStyle() IterationStyle { + return it.style +} + +func (it *iterator) Producer() Step { + return it.producer +} + +func (it *iterator) Over() px.Value { + return it.over +} + +func (it *iterator) Into() string { + return it.into +} + +func (it *iterator) Variables() []px.Parameter { + return it.variables +} diff --git a/vendor/github.com/lyraproj/servicesdk/wf/operation.go b/vendor/github.com/lyraproj/servicesdk/wf/operation.go new file mode 100644 index 0000000..9bba1f9 --- /dev/null +++ b/vendor/github.com/lyraproj/servicesdk/wf/operation.go @@ -0,0 +1,37 @@ +package wf + +import ( + "github.com/lyraproj/issue/issue" + "github.com/lyraproj/pcore/px" +) + +type Operation int + +const Read Operation = 1 +const Delete Operation = 2 +const Upsert Operation = 3 + +func (is Operation) String() string { + switch is { + case Read: + return `read` + case Delete: + return `delete` + case Upsert: + return `upsert` + default: + return `unknown operation` + } +} + +func NewOperation(operation string) Operation { + switch operation { + case `read`: + return Read + case `delete`: + return Delete + case `upsert`: + return Upsert + } + panic(px.Error(IllegalOperation, issue.H{`operation`: operation})) +} diff --git a/vendor/github.com/lyraproj/servicesdk/wf/parser.go b/vendor/github.com/lyraproj/servicesdk/wf/parser.go new file mode 100644 index 0000000..acfbe72 --- /dev/null +++ b/vendor/github.com/lyraproj/servicesdk/wf/parser.go @@ -0,0 +1,96 @@ +package wf + +import ( + "regexp" + "strings" + "text/scanner" + + "github.com/lyraproj/issue/issue" + "github.com/lyraproj/pcore/px" +) + +var namePattern = regexp.MustCompile(`\A[a-z][a-zA-Z0-9_]*\z`) + +type parser struct { + str string + scn scanner.Scanner +} + +func Parse(str string) Condition { + if str == `` { + return Always + } + p := &parser{} + p.str = str + p.scn.Init(strings.NewReader(str)) + c, r := p.parseOr() + if r != scanner.EOF { + panic(px.Error(ConditionSyntaxError, issue.H{`text`: p.str, `pos`: p.scn.Offset})) + } + return c +} + +func (p *parser) parseOr() (Condition, rune) { + es := make([]Condition, 0) + for { + lh, r := p.parseAnd() + es = append(es, lh) + if p.scn.TokenText() != `or` { + if len(es) == 1 { + return es[0], r + } + return Or(es), r + } + } +} + +func (p *parser) parseAnd() (Condition, rune) { + es := make([]Condition, 0) + for { + lh, r := p.parseUnary() + es = append(es, lh) + if p.scn.TokenText() != `and` { + if len(es) == 1 { + return es[0], r + } + return And(es), r + } + } +} + +func (p *parser) parseUnary() (c Condition, r rune) { + r = p.scn.Scan() + if r == '!' { + c, r = p.parseAtom(p.scn.Scan()) + return Not(c), r + } + return p.parseAtom(r) +} + +func (p *parser) parseAtom(r rune) (Condition, rune) { + if r == scanner.EOF { + panic(px.Error(ConditionUnexpectedEnd, issue.H{`text`: p.str, `pos`: p.scn.Offset})) + } + + if r == '(' { + var c Condition + c, r = p.parseOr() + if r != ')' { + panic(px.Error(ConditionMissingRp, issue.H{`text`: p.str, `pos`: p.scn.Offset})) + } + return c, p.scn.Scan() + } + w := p.scn.TokenText() + if namePattern.MatchString(w) { + r = p.scn.Scan() + switch w { + case `true`: + return Always, r + case `false`: + return Never, r + default: + return Truthy(w), r + } + } + panic(px.Error(ConditionInvalidName, issue.H{`name`: w, `text`: p.str, `pos`: p.scn.Offset})) +} diff --git a/vendor/github.com/lyraproj/servicesdk/wf/reference.go b/vendor/github.com/lyraproj/servicesdk/wf/reference.go new file mode 100644 index 0000000..a080c04 --- /dev/null +++ b/vendor/github.com/lyraproj/servicesdk/wf/reference.go @@ -0,0 +1,29 @@ +package wf + +import ( + "github.com/lyraproj/pcore/px" +) + +type Reference interface { + Step + + // Reference is the name of the activity that is referenced + Reference() string +} + +type reference struct { + step + referencedStep string +} + +func MakeReference(name string, when Condition, input, output []px.Parameter, referencedStep string) Reference { + return &reference{step{name, when, input, output}, referencedStep} +} + +func (s *reference) Label() string { + return `reference ` + s.name +} + +func (s *reference) Reference() string { + return s.referencedStep +} diff --git a/vendor/github.com/lyraproj/servicesdk/wf/resource.go b/vendor/github.com/lyraproj/servicesdk/wf/resource.go new file mode 100644 index 0000000..b034aef --- /dev/null +++ b/vendor/github.com/lyraproj/servicesdk/wf/resource.go @@ -0,0 +1,42 @@ +package wf + +import ( + "github.com/lyraproj/pcore/px" +) + +type State interface { + Type() px.ObjectType + State() interface{} +} + +type StateConverter func(ctx px.Context, state State, parameters px.OrderedMap) px.PuppetObject + +type Resource interface { + Step + + ExternalId() string + + State() State +} + +type resource struct { + step + state State + extId string +} + +func MakeResource(name string, when Condition, parameters, returns []px.Parameter, extId string, state State) Resource { + return &resource{step{name, when, parameters, returns}, state, extId} +} + +func (r *resource) ExternalId() string { + return r.extId +} + +func (r *resource) Label() string { + return `resource ` + r.name +} + +func (r *resource) State() State { + return r.state +} diff --git a/vendor/github.com/lyraproj/servicesdk/wf/statehandler.go b/vendor/github.com/lyraproj/servicesdk/wf/statehandler.go new file mode 100644 index 0000000..f0ce6b2 --- /dev/null +++ b/vendor/github.com/lyraproj/servicesdk/wf/statehandler.go @@ -0,0 +1,28 @@ +package wf + +import ( + "github.com/lyraproj/pcore/px" +) + +type StateHandler interface { + Step + + Interface() interface{} +} + +type stateHandler struct { + step + api interface{} +} + +func MakeStateHandler(name string, when Condition, parameters, returns []px.Parameter, api interface{}) StateHandler { + return &stateHandler{step{name, when, parameters, returns}, api} +} + +func (a *stateHandler) Label() string { + return `stateHandler ` + a.name +} + +func (a *stateHandler) Interface() interface{} { + return a.api +} diff --git a/vendor/github.com/lyraproj/servicesdk/wf/step.go b/vendor/github.com/lyraproj/servicesdk/wf/step.go new file mode 100644 index 0000000..10928c2 --- /dev/null +++ b/vendor/github.com/lyraproj/servicesdk/wf/step.go @@ -0,0 +1,51 @@ +package wf + +import ( + "github.com/lyraproj/issue/issue" + "github.com/lyraproj/pcore/px" +) + +// An Step of a Workflow. The workflow is an Step in itself and can be used in +// another Workflow. +type Step interface { + issue.Labeled + + // When returns an optional Condition that controls whether or not this step participates + // in the workflow. + When() Condition + + // Name returns the fully qualified name of the Step + Name() string + + // Parameters returns the parameters requirements for the Step + Parameters() []px.Parameter + + // Returns returns the definition of that this Step will produce + Returns() []px.Parameter +} + +type step struct { + name string + when Condition + parameters []px.Parameter + returns []px.Parameter +} + +func (a *step) When() Condition { + return a.when +} + +func (a *step) Name() string { + return a.name +} + +func (a *step) Parameters() []px.Parameter { + return a.parameters +} + +func (a *step) Returns() []px.Parameter { + return a.returns +} + +func (a *step) Resolve(px.Context) { +} diff --git a/vendor/github.com/lyraproj/servicesdk/wf/workflow.go b/vendor/github.com/lyraproj/servicesdk/wf/workflow.go new file mode 100644 index 0000000..1513d2a --- /dev/null +++ b/vendor/github.com/lyraproj/servicesdk/wf/workflow.go @@ -0,0 +1,28 @@ +package wf + +import ( + "github.com/lyraproj/pcore/px" +) + +type Workflow interface { + Step + + Steps() []Step +} + +type workflow struct { + step + steps []Step +} + +func MakeWorkflow(name string, when Condition, parameters, returns []px.Parameter, steps []Step) Workflow { + return &workflow{step{name, when, parameters, returns}, steps} +} + +func (w *workflow) Label() string { + return `workflow ` + w.name +} + +func (w *workflow) Steps() []Step { + return w.steps +} diff --git a/vendor/github.com/lyraproj/wfe/LICENSE b/vendor/github.com/lyraproj/wfe/LICENSE new file mode 100644 index 0000000..261eeb9 --- /dev/null +++ b/vendor/github.com/lyraproj/wfe/LICENSE @@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/vendor/github.com/lyraproj/wfe/api/issues.go b/vendor/github.com/lyraproj/wfe/api/issues.go new file mode 100644 index 0000000..fb23315 --- /dev/null +++ b/vendor/github.com/lyraproj/wfe/api/issues.go @@ -0,0 +1,30 @@ +package api + +import "github.com/lyraproj/issue/issue" + +const ( + FailedToLoadPlugin = `WF_FAILED_TO_LOAD_PLUGIN` + LyraLinkNoMap = `WF_LYRA_LINK_NO_MAP` + LyraLinkNoExe = `WF_LYRA_LINK_NO_EXE` + NoSuchAttribute = `WF_NO_SUCH_ATTRIBUTE` + NoSuchReferencedValue = `WF_NO_SUCH_REFERENCED_VALUE` + NoStepContext = `WF_NO_STEP_CONTEXT` + MissingRequiredProperty = `WF_MISSING_REQUIRED_PROPERTY` + MultipleErrors = `WF_MULTIPLE_ERRORS` + UnableToLoadRequired = `WF_UNABLE_TO_LOAD_REQUIRED` + UnableToDetermineExternalId = `WF_UNABLE_TO_DETERMINE_EXTERNAL_ID` +) + +func init() { + issue.Hard(FailedToLoadPlugin, `error while loading plugin executable '%{executable}': %{message}`) + issue.Hard(LyraLinkNoMap, `Lyra Link did not contain a YAML map`) + issue.Hard(LyraLinkNoExe, `Lyra Link did not contain a valid 'executable' entry`) + issue.Hard2(NoSuchReferencedValue, `referenced %{activity} has no %{valueType} named '%{name}'`, + issue.HF{`activity`: issue.Label}) + issue.Hard2(NoSuchAttribute, `%{step} has no attribute named '%{name}'`, issue.HF{`step`: issue.Label}) + issue.Hard(NoStepContext, `no step context was found in current scope`) + issue.Hard(MissingRequiredProperty, `definition %{service} %{definition} is missing required property '%{key}'`) + issue.Hard2(MultipleErrors, `multiple errors: %{errors}`, issue.HF{`errors`: issue.JoinErrors}) + issue.Hard(UnableToLoadRequired, `unable to load required %{namespace} '%{name}'`) + issue.Hard(UnableToDetermineExternalId, `unable to determine external ID for %{style} '%{id}'`) +} diff --git a/vendor/github.com/lyraproj/wfe/api/iterator.go b/vendor/github.com/lyraproj/wfe/api/iterator.go new file mode 100644 index 0000000..39043b3 --- /dev/null +++ b/vendor/github.com/lyraproj/wfe/api/iterator.go @@ -0,0 +1,24 @@ +package api + +import ( + "github.com/lyraproj/pcore/px" + "github.com/lyraproj/servicesdk/wf" +) + +type Iterator interface { + Step + + // Style returns the style of iterator, times, range, each, or eachPair. + IterationStyle() wf.IterationStyle + + // Producer returns the Step that will be invoked once for each iteration + Producer() Step + + // Over returns what this iterator will iterate over + Over() px.Value + + // Variables returns the variables that this iterator will produce for each iteration. These + // variables will be removed from the declared parameters set when the final requirements + // for the step are computed. + Variables() []px.Parameter +} diff --git a/vendor/github.com/lyraproj/wfe/api/loader.go b/vendor/github.com/lyraproj/wfe/api/loader.go new file mode 100644 index 0000000..ffad49a --- /dev/null +++ b/vendor/github.com/lyraproj/wfe/api/loader.go @@ -0,0 +1,10 @@ +package api + +import "github.com/lyraproj/pcore/px" + +var LyraLinkPath = px.PathType(`lyralink`) +var GoPluginPath = px.PathType(`goplugin`) +var PpManifestPath = px.PathType(`ppmanifest`) +var YamlManifestPath = px.PathType(`yamlmanifest`) + +const LyraDlvConfigKey = `Lyra::DlvConfig` diff --git a/vendor/github.com/lyraproj/wfe/api/resource.go b/vendor/github.com/lyraproj/wfe/api/resource.go new file mode 100644 index 0000000..341a807 --- /dev/null +++ b/vendor/github.com/lyraproj/wfe/api/resource.go @@ -0,0 +1,13 @@ +package api + +import "github.com/lyraproj/pcore/px" + +type Resource interface { + Step + + Type() px.ObjectType + + HandlerId() px.TypedName + + ExtId() px.Value +} diff --git a/vendor/github.com/lyraproj/wfe/api/step.go b/vendor/github.com/lyraproj/wfe/api/step.go new file mode 100644 index 0000000..420a70f --- /dev/null +++ b/vendor/github.com/lyraproj/wfe/api/step.go @@ -0,0 +1,49 @@ +package api + +import ( + "net/url" + + "github.com/lyraproj/issue/issue" + "github.com/lyraproj/pcore/px" + "github.com/lyraproj/servicesdk/wf" +) + +// An Step of a Workflow. The workflow is an Step in itself and can be used in +// another Workflow. +type Step interface { + issue.Labeled + + // When returns an optional Condition that controls whether or not this step participates + // in the workflow. + When() wf.Condition + + // Identifier returns a string that uniquely identifies the step within a resource. The string + // is guaranteed to remain stable across invocations provided that no step names, resource types + // or iterator parameters changes within the parent chain of this Step. + Identifier() string + + // IdParams returns optional URL parameter values that becomes part of the Identifier + IdParams() url.Values + + // The Id of the service that provides this step + ServiceId() px.TypedName + + // Returns a copy of this Step with index set to the given value + WithIndex(index int) Step + + // Style returns the step style, 'workflow', 'resource', 'stateHandler', or 'action'. + Style() string + + // Name returns the fully qualified name of the Step + Name() string + + // Parameters returns the parameters requirements for the Step + Parameters() []px.Parameter + + // Returns returns the definition of that this Step will produce + Returns() []px.Parameter + + // Run will execute this Step. The given parameters must match the declared Parameters. It will return + // a value that corresponds to the Returns declaration. + Run(ctx px.Context, parameters px.OrderedMap) px.OrderedMap +} diff --git a/vendor/github.com/lyraproj/wfe/api/workflow.go b/vendor/github.com/lyraproj/wfe/api/workflow.go new file mode 100644 index 0000000..89faf89 --- /dev/null +++ b/vendor/github.com/lyraproj/wfe/api/workflow.go @@ -0,0 +1,7 @@ +package api + +type Workflow interface { + Step + + Steps() []Step +} diff --git a/vendor/github.com/lyraproj/wfe/service/crud.go b/vendor/github.com/lyraproj/wfe/service/crud.go new file mode 100644 index 0000000..69b8a03 --- /dev/null +++ b/vendor/github.com/lyraproj/wfe/service/crud.go @@ -0,0 +1,214 @@ +package service + +import ( + "github.com/hashicorp/go-hclog" + "github.com/lyraproj/issue/issue" + "github.com/lyraproj/pcore/px" + "github.com/lyraproj/pcore/types" + "github.com/lyraproj/servicesdk/annotation" + "github.com/lyraproj/servicesdk/grpc" + "github.com/lyraproj/servicesdk/service" + "github.com/lyraproj/servicesdk/serviceapi" + "github.com/lyraproj/servicesdk/wf" + "github.com/lyraproj/wfe/api" +) + +func StartEra(c px.Context) { + getIdentity(c).bumpEra(c) +} + +// SweepAndGC performs a sweep of the Identity store, retrieves all garbage, and +// then tells the handler for each garbage entry to delete the resource. The entry +// is then purged from the Identity store +func SweepAndGC(c px.Context, prefix string) { + identity := getIdentity(c) + log := hclog.Default() + log.Debug("GC Sweep", "prefix", prefix) + identity.sweep(c, prefix) + log.Debug("GC Collect garbage", "prefix", prefix) + gl := identity.garbage(c, prefix) + ng := gl.Len() + log.Debug("GC Collect garbage", "prefix", prefix, "count", ng) + rs := make([]px.List, ng) + + // Store in reverse order + ng-- + gl.EachWithIndex(func(t px.Value, i int) { + rs[ng-i] = t.(px.List) + }) + + for _, l := range rs { + uri := types.ParseURI(l.At(0).String()) + hid := uri.Query().Get(`hid`) + handlerDef := GetHandler(c, px.NewTypedName(px.NsHandler, hid)) + handler := GetService(c, handlerDef.ServiceId()) + + extId := l.At(1) + log.Debug("GC delete", "prefix", prefix, "intId", uri.String(), "extId", extId) + handler.Invoke(c, handlerDef.Identifier().Name(), `delete`, extId) + identity.purgeExternal(c, extId) + } +} + +func readOrNotFound(c px.Context, handler serviceapi.Service, hn string, extId px.Value, identity *identity) px.Value { + defer func() { + if r := recover(); r != nil { + if e, ok := r.(issue.Reported); ok && e.Code() == grpc.InvocationError { + if e.Argument(`code`) == service.NotFound { + // Not found by remote. Purge the extId and return nil. + hclog.Default().Debug("Removing obsolete extId from Identity service", "extId", extId) + identity.purgeExternal(c, extId) + return + } + } + panic(r) + } + }() + + hclog.Default().Debug("Read state", "extId", extId) + return handler.Invoke(c, hn, `read`, extId) +} +func ApplyState(c px.Context, resource api.Resource, parameters px.OrderedMap) px.OrderedMap { + ac := StepContext(c) + op := GetOperation(ac) + + log := hclog.Default() + handlerDef := GetHandler(c, resource.HandlerId()) + crd := GetProperty(handlerDef, `interface`, types.NewTypeType(types.DefaultObjectType())).(px.ObjectType) + identity := getIdentity(c) + handler := GetService(c, handlerDef.ServiceId()) + + intId := types.WrapString(resource.Identifier()) + extId := resource.ExtId() + explicitExtId := extId != nil + if !explicitExtId { + // external id must exist in order to do a read or delete + extId = identity.getExternal(c, intId, op == wf.Read || op == wf.Delete) + log.Debug("GetExternal", "intId", intId, "extId", extId) + } + + var result px.PuppetObject + hn := handlerDef.Identifier().Name() + switch op { + case wf.Read: + if extId == nil { + return px.EmptyMap + } + rt := readOrNotFound(c, handler, hn, extId, identity) + if rt == nil { + return px.EmptyMap + } + result = px.AssertInstance(handlerDef.Label, resource.Type(), rt).(px.PuppetObject) + + case wf.Upsert: + if explicitExtId { + // An explicit externalId is for resources not managed by us. Only possible action + // here is a read + rt := readOrNotFound(c, handler, hn, extId, identity) + if rt == nil { + // False positive from the Identity service + return px.EmptyMap + } + result = px.AssertInstance(handlerDef.Label, resource.Type(), rt).(px.PuppetObject) + break + } + + desiredState := GetService(c, resource.ServiceId()).State(c, resource.Name(), parameters) + if extId != nil { + // Read current state and check if an update is needed + rt := readOrNotFound(c, handler, hn, extId, identity) + if rt == nil { + // False positive from the Identity service + extId = nil + } else { + result = px.AssertInstance(handlerDef.Label, resource.Type(), rt).(px.PuppetObject) + } + } + + if extId == nil { + // Nothing exists yet. Create a new instance + log.Debug("Create state", "intId", intId) + rt := handler.Invoke(c, hn, `create`, desiredState).(px.List) + result = px.AssertInstance(handlerDef.Label, resource.Type(), rt.At(0)).(px.PuppetObject) + extId = rt.At(1) + log.Debug("Associate state", "intId", intId, "extId", extId) + identity.associate(c, intId, extId) + break + } + + var updateNeeded, recreateNeeded bool + if a, ok := resource.Type().Annotations(c).Get(annotation.ResourceType); ok { + ra := a.(annotation.Resource) + updateNeeded, recreateNeeded = ra.Changed(desiredState, result) + } else { + updateNeeded = !desiredState.Equals(result, nil) + recreateNeeded = false + } + + if updateNeeded { + if !recreateNeeded { + // Update existing content. If an update method exists, call it. If not, then fall back + // to delete + create + if _, ok := crd.Member(`update`); ok { + log.Debug("Update state", "extId", extId) + result = px.AssertInstance(handlerDef.Label, resource.Type(), handler.Invoke(c, hn, `update`, extId, desiredState)).(px.PuppetObject) + break + } + } + + // Rely on that deletion happens by means of GC at end of run + log.Debug("Remove external", "extId", extId) + identity.removeExternal(c, extId) + + log.Debug("Create state", "intId", intId) + rt := handler.Invoke(c, hn, `create`, desiredState) + rl := rt.(px.List) + result = px.AssertInstance(handlerDef.Label, resource.Type(), rl.At(0)).(px.PuppetObject) + extId = rl.At(1) + log.Debug("Associate state", "intId", intId, "extId", extId) + identity.associate(c, intId, extId) + } + default: + panic(px.Error(wf.IllegalOperation, issue.H{`operation`: op})) + } + + switch op { + case wf.Read, wf.Upsert: + returns := resource.Returns() + entries := make([]*types.HashEntry, len(returns)) + for i, o := range returns { + entries[i] = getValue(o, resource, result) + } + return types.WrapHash(entries) + } + return px.EmptyMap +} + +func getValue(p px.Parameter, r api.Resource, o px.PuppetObject) *types.HashEntry { + n := p.Name() + a := n + if p.HasValue() { + v := p.Value() + if a, ok := v.(*types.Array); ok { + // Build hash from multiple attributes + entries := make([]*types.HashEntry, a.Len()) + a.EachWithIndex(func(e px.Value, i int) { + a := e.String() + if v, ok := o.Get(a); ok { + entries[i] = types.WrapHashEntry(e, v) + } else { + panic(px.Error(api.NoSuchAttribute, issue.H{`step`: r, `name`: a})) + } + }) + return types.WrapHashEntry2(n, types.WrapHash(entries)) + } + + if s, ok := v.(px.StringValue); ok { + a = s.String() + } + } + if v, ok := o.Get(a); ok { + return types.WrapHashEntry2(n, v) + } + panic(px.Error(api.NoSuchAttribute, issue.H{`step`: r, `name`: a})) +} diff --git a/vendor/github.com/lyraproj/wfe/service/identity.go b/vendor/github.com/lyraproj/wfe/service/identity.go new file mode 100644 index 0000000..24f6497 --- /dev/null +++ b/vendor/github.com/lyraproj/wfe/service/identity.go @@ -0,0 +1,91 @@ +package service + +import ( + "github.com/lyraproj/issue/issue" + "github.com/lyraproj/pcore/px" + "github.com/lyraproj/pcore/types" + "github.com/lyraproj/servicesdk/serviceapi" + "github.com/lyraproj/wfe/api" +) + +type identity struct { + id string + invokable serviceapi.Invokable +} + +func (i *identity) associate(c px.Context, internalID, externalID px.Value) { + i.invokable.Invoke(c, i.id, `associate`, internalID, externalID) +} + +func (i *identity) bumpEra(c px.Context) { + i.invokable.Invoke(c, i.id, `bumpEra`) +} + +func (i *identity) garbage(c px.Context, prefix string) px.List { + result := i.invokable.Invoke(c, i.id, `garbage`, types.WrapString(prefix)) + if l, ok := result.(px.List); ok { + return l + } + return nil +} + +func (i *identity) getExternal(c px.Context, internalId px.Value, required bool) px.Value { + result := i.invokable.Invoke(c, i.id, `getExternal`, internalId) + if id, ok := result.(px.StringValue); ok && id.String() != `` { + return id + } + if required { + panic(px.Error(api.UnableToDetermineExternalId, issue.H{`id`: internalId})) + } + return nil +} + +func (i *identity) sweep(c px.Context, prefix string) { + i.invokable.Invoke(c, i.id, `sweep`, types.WrapString(prefix)) +} + +func (i *identity) purgeExternal(c px.Context, externalID px.Value) { + i.invokable.Invoke(c, i.id, `purgeExternal`, externalID) +} + +func (i *identity) removeExternal(c px.Context, externalID px.Value) { + i.invokable.Invoke(c, i.id, `removeExternal`, externalID) +} + +/* +func (i *identity) search(c px.Context, prefix string) px.List { + result := i.invokable.Invoke(c, i.id, `search`, types.WrapString(prefix)) + if l, ok := result.(px.List); ok { + return l + } + return nil +} + +func (i *identity) exists(c px.Context, internalId px.Value) bool { + result := i.invokable.Invoke(c, i.id, `getExternal`, internalId).(px.List) + return result.At(1).(px.Boolean).Bool() +} + +func (i *identity) getInternal(c px.Context, externalID px.Value) (px.Value, bool) { + result := i.invokable.Invoke(c, i.id, `getInternal`, externalID) + if id, ok := result.(px.StringValue); ok && id.String() != `` { + return id, ok + } + return nil, false +} + +func (i *identity) purgeInternal(c px.Context, internalID px.Value) { + i.invokable.Invoke(c, i.id, `purgeInternal`, internalID) +} + +func (i *identity) removeInternal(c px.Context, internalID px.Value) { + i.invokable.Invoke(c, i.id, `removeInternal`, internalID) +} +*/ + +var IdentityId = px.NewTypedName(px.NsDefinition, "Identity::Service") + +func getIdentity(c px.Context) *identity { + d := GetDefinition(c, IdentityId) + return &identity{d.Identifier().Name(), GetService(c, d.ServiceId())} +} diff --git a/vendor/github.com/lyraproj/wfe/service/util.go b/vendor/github.com/lyraproj/wfe/service/util.go new file mode 100644 index 0000000..9856dce --- /dev/null +++ b/vendor/github.com/lyraproj/wfe/service/util.go @@ -0,0 +1,83 @@ +package service + +import ( + "fmt" + "strings" + + "github.com/lyraproj/issue/issue" + "github.com/lyraproj/pcore/px" + "github.com/lyraproj/pcore/types" + "github.com/lyraproj/servicesdk/serviceapi" + "github.com/lyraproj/servicesdk/wf" + "github.com/lyraproj/wfe/api" +) + +const StepContextKey = `step::context` + +func StepContext(c px.Context) px.OrderedMap { + if ac, ok := c.Get(StepContextKey); ok { + return px.AssertInstance(`invalid step context`, types.DefaultHashType(), ac.(px.Value)).(px.OrderedMap) + } + panic(px.Error(api.NoStepContext, issue.NoArgs)) +} + +func GetOperation(ac px.OrderedMap) wf.Operation { + if op, ok := ac.Get4(`operation`); ok { + return wf.Operation(op.(px.Integer).Int()) + } + return wf.Read +} + +func GetService(c px.Context, serviceId px.TypedName) serviceapi.Service { + if serviceId.Namespace() == px.NsService { + if sm, ok := px.Load(c, serviceId); ok { + return sm.(serviceapi.Service) + } + } + panic(px.Error(api.UnableToLoadRequired, issue.H{`namespace`: string(px.NsService), `name`: serviceId.String()})) +} + +func GetDefinition(c px.Context, definitionId px.TypedName) serviceapi.Definition { + if definitionId.Namespace() == px.NsDefinition { + if sm, ok := px.Load(c, definitionId); ok { + return sm.(serviceapi.Definition) + } + } + panic(px.Error(api.UnableToLoadRequired, issue.H{`namespace`: string(px.NsDefinition), `name`: definitionId.String()})) +} + +func GetHandler(c px.Context, handlerId px.TypedName) serviceapi.Definition { + if handlerId.Namespace() == px.NsHandler { + if sm, ok := px.Load(c, handlerId); ok { + return sm.(serviceapi.Definition) + } + } + panic(px.Error(api.UnableToLoadRequired, issue.H{`namespace`: string(px.NsHandler), `name`: handlerId.String()})) +} + +func GetStringProperty(def serviceapi.Definition, key string) string { + return GetProperty(def, key, types.DefaultStringType()).String() +} + +func GetProperty(def serviceapi.Definition, key string, typ px.Type) px.Value { + if prop, ok := def.Properties().Get4(key); ok { + return px.AssertInstance(func() string { + return fmt.Sprintf(`%s %s, property %s`, def.ServiceId(), def.Identifier(), key) + }, typ, prop) + } + panic(px.Error(api.MissingRequiredProperty, issue.H{`service`: def.ServiceId(), `definition`: def.Identifier(), `key`: key})) +} + +func GetOptionalProperty(def serviceapi.Definition, key string, typ px.Type) (px.Value, bool) { + if prop, ok := def.Properties().Get4(key); ok { + return px.AssertInstance(func() string { + return fmt.Sprintf(`%s %s, property %s`, def.ServiceId(), def.Identifier(), key) + }, typ, prop), true + } + return nil, false +} + +func LeafName(name string) string { + names := strings.Split(name, `::`) + return names[len(names)-1] +} diff --git a/vendor/github.com/lyraproj/wfe/wfe/action.go b/vendor/github.com/lyraproj/wfe/wfe/action.go new file mode 100644 index 0000000..a05565e --- /dev/null +++ b/vendor/github.com/lyraproj/wfe/wfe/action.go @@ -0,0 +1,60 @@ +package wfe + +import ( + "github.com/hashicorp/go-hclog" + "github.com/lyraproj/pcore/px" + "github.com/lyraproj/servicesdk/serviceapi" + "github.com/lyraproj/wfe/api" +) + +type action struct { + Step + api px.ObjectType +} + +/* TODO: Add type check using expectedType +var ioType = types.NewHashType(types.DefaultStringType(), types.DefaultRichDataType(), nil) +var expectedType = types.NewCallableType( + types.NewTupleType([]px.Type{ioType}, nil), ioType, nil) +*/ + +func Action(def serviceapi.Definition) api.Step { + a := &action{} + a.Init(def) + return a +} + +func (s *action) Init(d serviceapi.Definition) { + s.Step.Init(d) + if i, ok := d.Properties().Get4(`interface`); ok { + s.api = i.(px.ObjectType) + } +} + +func (s *action) Run(ctx px.Context, parameters px.OrderedMap) px.OrderedMap { + service := s.GetService(ctx) + hclog.Default().Debug(`executing action`, `name`, s.name) + result := service.Invoke(ctx, s.Name(), `do`, parameters) + if m, ok := result.(px.OrderedMap); ok { + return m + } + panic(result.String()) +} + +func (s *action) Label() string { + return StepLabel(s) +} + +func (a *action) Identifier() string { + return StepId(a) +} + +func (s *action) Style() string { + return `action` +} + +func (a *action) WithIndex(index int) api.Step { + ac := *a // Copy by value + ac.setIndex(index) + return &ac +} diff --git a/vendor/github.com/lyraproj/wfe/wfe/issues.go b/vendor/github.com/lyraproj/wfe/wfe/issues.go new file mode 100644 index 0000000..655c5e8 --- /dev/null +++ b/vendor/github.com/lyraproj/wfe/wfe/issues.go @@ -0,0 +1,32 @@ +package wfe + +import "github.com/lyraproj/issue/issue" + +const ( + GraphDotMarshal = `WF_GRAPH_DOT_MARSHAL` + AlreadyDefined = `WF_ALREADY_DEFINED` + MultipleProducersOfValue = `WF_MULTIPLE_PRODUCERS_OF_VALUE` + NoProducerOfValue = `WF_NO_PRODUCER_OF_VALUE` + IterationStepWrongParameters = `WF_ITERATION_STEP_WRONG_PARAMETERS` + IterationStepWrongReturns = `WF_ITERATION_STEP_WRONG_OUTPUT` + IterationParameterInvalidCount = `WF_ITERATION_PARAMETER_INVALID_COUNT` + IterationParameterWrongType = `WF_ITERATION_PARAMETER_WRONG_TYPE` + IterationVariableInvalidCount = `WF_ITERATION_VARIABLE_INVALID_COUNT` + ParameterUnresolved = `WF_PARAMETER_UNRESOLVED` + TooManyGuards = `WF_TOO_MANY_GUARDS` +) + +func init() { + issue.Hard(GraphDotMarshal, `error while marshalling graph to dot: %{detail}`) + issue.Hard2(AlreadyDefined, `%{step} is already defined`, issue.HF{`step`: issue.Label}) + issue.Hard2(MultipleProducersOfValue, `both %{step1} and %{step2} returns the value '%{value}'`, issue.HF{`step1`: issue.Label, `step2`: issue.Label}) + issue.Hard2(NoProducerOfValue, `%{step} value '%{value}' is never produced`, issue.HF{`step`: issue.Label}) + issue.Hard2(IterationStepWrongParameters, `%{iterator} parameters must consume returns produced by the iterator`, issue.HF{`iterator`: issue.Label}) + issue.Hard2(IterationStepWrongReturns, `%{iterator} returns must consist of a 'key' and a 'value'`, issue.HF{`iterator`: issue.Label}) + issue.Hard2(IterationParameterInvalidCount, `%{iterator} wrong number of parameters. Expected %{expected}, actual %{actual}`, issue.HF{`iterator`: issue.Label}) + issue.Hard2(IterationParameterInvalidCount, `%{iterator} wrong number of parameters. Expected %{expected}, actual %{actual}`, issue.HF{`iterator`: issue.Label}) + issue.Hard2(IterationParameterWrongType, `%{iterator} parameter %{parameter} if of wrong type. Expected %{expected}, actual %{actual}`, issue.HF{`iterator`: issue.Label}) + issue.Hard2(IterationVariableInvalidCount, `%{iterator}, wrong number of variables. Expected %{expected}, actual %{actual}`, issue.HF{`iterator`: issue.Label}) + issue.Hard2(TooManyGuards, `%{step} is too complex. Expected %{max} guards maximum. Have %{actual}`, issue.HF{`step`: issue.Label}) + issue.Hard2(ParameterUnresolved, `%{step}, parameter %{parameter} cannot be resolved`, issue.HF{`step`: issue.Label}) +} diff --git a/vendor/github.com/lyraproj/wfe/wfe/iterator.go b/vendor/github.com/lyraproj/wfe/wfe/iterator.go new file mode 100644 index 0000000..ac62cd1 --- /dev/null +++ b/vendor/github.com/lyraproj/wfe/wfe/iterator.go @@ -0,0 +1,382 @@ +package wfe + +import ( + "fmt" + "sync/atomic" + + "github.com/lyraproj/issue/issue" + "github.com/lyraproj/pcore/px" + "github.com/lyraproj/pcore/types" + "github.com/lyraproj/servicesdk/serviceapi" + "github.com/lyraproj/servicesdk/wf" + "github.com/lyraproj/wfe/api" + "github.com/lyraproj/wfe/service" +) + +const maxParallel = 100 + +type iterator struct { + api.Step + over px.Value + variables []px.Parameter + resultName string +} + +func Iterator(c px.Context, def serviceapi.Definition) api.Step { + over := def.Properties().Get5(`over`, px.Undef) + variables := getParameters(`variable`, def.Properties()) + if len(variables) == 0 { + variables = getParameters(`variables`, def.Properties()) + } + style := wf.NewIterationStyle(service.GetStringProperty(def, `iterationStyle`)) + step := CreateStep(c, service.GetProperty(def, `producer`, serviceapi.DefinitionMetaType).(serviceapi.Definition)) + var resultName string + if into, ok := def.Properties().Get4(`into`); ok { + resultName = into.String() + } else { + resultName = wf.LeafName(def.Identifier().Name()) + } + switch style { + case wf.IterationStyleEach: + return NewEach(step, resultName, over, variables) + case wf.IterationStyleEachPair: + return NewEachPair(step, resultName, over, variables) + case wf.IterationStyleRange: + return NewRange(step, resultName, over, variables) + case wf.IterationStyleTimes: + return NewTimes(step, resultName, over, variables) + default: + panic(px.Error(wf.IllegalIterationStyle, issue.H{`style`: style.String()})) + } +} + +func (it *iterator) IterationStyle() wf.IterationStyle { + panic("implement me") +} + +func (it *iterator) Over() px.Value { + return it.over +} + +func (it *iterator) Producer() api.Step { + return it.Step +} + +// Parameters returns the Parameters declared for the stateHandler + Over() and - Variables +func (it *iterator) Parameters() []px.Parameter { + parameters := it.Producer().Parameters() + all := make([]px.Parameter, 0, len(parameters)-len(it.variables)) +nextParameters: + for _, in := range parameters { + for _, v := range it.variables { + if in.Name() == v.Name() { + continue nextParameters + } + } + all = append(all, in) + } + return all +} + +// Returns returns one parameter named after the step. It is always an Array. Each +// entry of that array is an element that reflects the returns from the iterated +// step. +// +// An step that only returns one element will produce an array of such elements +// An step that produces multiple elements will produce an array where each +// element is a hash +func (it *iterator) Returns() []px.Parameter { + returns := it.Producer().Returns() + var vt px.Type + if len(returns) == 1 { + vt = returns[0].Type() + } else { + se := make([]*types.StructElement, len(returns)) + for i, p := range returns { + se[i] = types.NewStructElement(types.WrapString(p.Name()), p.Type()) + } + vt = types.NewStructType(se) + } + return []px.Parameter{ + px.NewParameter(it.resultName, types.NewArrayType(vt, nil), nil, false)} +} + +func (it *iterator) Variables() []px.Parameter { + return it.variables +} + +func (it *iterator) iterate(ctx px.Context, vars px.OrderedMap, start, end int64, iterFunc func(int) px.OrderedMap) px.OrderedMap { + + done := make(chan bool) + count := end - start + numWorkers := int(count) + if count > maxParallel { + numWorkers = maxParallel + } + + els := make([]px.Value, count) + jobs := make(chan int64) + for i := 0; i < numWorkers; i++ { + px.Fork(ctx, func(fc px.Context) { + for ix := range jobs { + func() { + defer func() { + if atomic.AddInt64(&count, -1) <= 0 { + close(jobs) + done <- true + } + }() + p := it.Producer() + parameters := iterFunc(int(ix)) + ip := p.WithIndex(int(ix)) + result := ip.Run(fc, parameters) + v := px.Undef + if len(p.Returns()) == 1 { + if result.Len() > 0 { + v = result.At(0).(px.MapEntry).Value() + } + } else { + v = result + } + els[ix-start] = v + }() + } + }) + } + + for i := start; i < end; i++ { + jobs <- i + } + <-done + return px.SingletonMap(it.resultName, types.WrapValues(els)) +} + +func resolveParameters(c px.Context, it api.Iterator, parameters px.OrderedMap) (px.Value, px.OrderedMap) { + // Resolve the parameters that acts as parameters to the iteration. + vars := make([]*types.HashEntry, 0, len(it.Parameters())) + + // Strip parameters intended for the iterator from the list intended for the step that will be called by the iterator + for _, ap := range it.Producer().Parameters() { + if ev, ok := parameters.GetEntry(ap.Name()); ok { + vars = append(vars, ev.(*types.HashEntry)) + } + } + return types.ResolveDeferred(c, it.Over(), c.Scope()), types.WrapHash(vars) +} + +func Validate(it api.Iterator) { + a := it.Producer() + + // Ensure that parameters contains returns produced by the iterator + is := a.Parameters() + vs := it.Variables() +nextVar: + for _, v := range vs { + for _, i := range is { + if i.Name() == v.Name() { + continue nextVar + } + } + panic(px.Error(IterationStepWrongParameters, issue.H{`iterator`: it})) + } +} + +func assertInt(t api.Iterator, arg px.Value, name string) int64 { + iv, ok := arg.(px.Integer) + if !ok { + panic(px.Error(IterationParameterWrongType, issue.H{ + `iterator`: t, `parameter`: name, `expected`: `Integer`, `actual`: arg.PType()})) + } + return iv.Int() +} + +func assertRange(t api.Iterator, arg px.Value) (int64, int64) { + a, ok := arg.(*types.Array) + if !(ok && a.Len() == 2) { + panic(px.Error(IterationParameterWrongType, issue.H{ + `iterator`: t, `parameter`: `over`, `expected`: `Array`, `actual`: arg.PType()})) + } + return assertInt(t, a.At(0), `over[0]`), assertInt(t, a.At(1), `over[1]`) +} + +func assertList(t api.Iterator, arg px.Value) px.List { + if a, ok := arg.(*types.Array); ok { + return a + } + panic(px.Error(IterationParameterWrongType, issue.H{ + `iterator`: t, `parameter`: `over`, `expected`: `Array`, `actual`: arg.PType()})) +} + +func assertMap(t api.Iterator, arg px.Value) px.OrderedMap { + if h, ok := arg.(px.OrderedMap); ok { + return h + } + panic(px.Error(IterationParameterWrongType, issue.H{ + `iterator`: t, `parameter`: `over`, `expected`: `Hash`, `actual`: arg.PType()})) +} + +func iterLabel(it api.Iterator) string { + return fmt.Sprintf(`%s %s iteration`, it.Style(), StepLabel(it)) +} + +type each struct { + iterator +} + +func NewEach(step api.Step, name string, over px.Value, variables []px.Parameter) api.Iterator { + it := &each{iterator{step, over, variables, name}} + Validate(it) + return it +} + +func (t *each) Label() string { + return iterLabel(t) +} + +func (t *each) IterationStyle() wf.IterationStyle { + return wf.IterationStyleEach +} + +func (t *each) Run(ctx px.Context, parameters px.OrderedMap) px.OrderedMap { + over, vars := resolveParameters(ctx, t, parameters) + list := assertList(t, over) + return t.iterate(ctx, vars, 0, int64(list.Len()), func(ix int) px.OrderedMap { + vs := t.Variables() + nv := len(vs) + el := list.At(ix) + switch nv { + case 0: + // Do nothing + case 1: + parameters = parameters.Merge(px.SingletonMap(vs[0].Name(), el)) + default: + es := make([]*types.HashEntry, 0, len(vs)) + switch el := el.(type) { + case *types.HashEntry: + // Map key and value to first two positions + es = append(es, types.WrapHashEntry2(vs[0].Name(), el.Key()), types.WrapHashEntry2(vs[1].Name(), el.Value())) + case *types.Array: + // Map as many as possible by index + el.EachWithIndex(func(e px.Value, i int) { + if i < nv { + es = append(es, types.WrapHashEntry2(vs[i].Name(), e)) + } + }) + case *types.Hash: + // Map as many as possible by name + for _, p := range vs { + if v, ok := el.Get4(p.Name()); ok { + es = append(es, types.WrapHashEntry2(p.Name(), v)) + } + } + case px.PuppetObject: + // Map as many as possible by name + pt := el.PType().(px.ObjectType) + for _, p := range vs { + if v, ok := pt.Member(p.Name()); ok { + if a, ok := v.(px.Attribute); ok { + es = append(es, types.WrapHashEntry2(p.Name(), a.Get(el))) + } + } + } + default: + es = append(es, types.WrapHashEntry2(vs[0].Name(), el)) + } + + if len(es) > 0 { + parameters = parameters.Merge(types.WrapHash(es)) + } + } + return parameters + }) +} + +type eachPair struct { + iterator +} + +func NewEachPair(step api.Step, name string, over px.Value, variables []px.Parameter) api.Iterator { + it := &eachPair{iterator{step, over, variables, name}} + Validate(it) + return it +} + +func (t *eachPair) Label() string { + return iterLabel(t) +} + +func (t *eachPair) IterationStyle() wf.IterationStyle { + return wf.IterationStyleEachPair +} + +func (t *eachPair) Run(ctx px.Context, parameters px.OrderedMap) px.OrderedMap { + over, vars := resolveParameters(ctx, t, parameters) + mp := assertMap(t, over) + p0 := t.Variables()[0].Name() + p1 := t.Variables()[1].Name() + return t.iterate(ctx, vars, 0, int64(mp.Len()), func(ix int) px.OrderedMap { + entry := mp.At(ix).(px.MapEntry) + ke := types.WrapHashEntry2(p0, entry.Key()) + ve := types.WrapHashEntry2(p1, entry.Value()) + return vars.Merge(types.WrapHash([]*types.HashEntry{ke, ve})) + }) +} + +type times struct { + iterator +} + +func NewTimes(step api.Step, name string, over px.Value, variables []px.Parameter) api.Iterator { + it := ×{iterator{step, over, variables, name}} + Validate(it) + return it +} + +func (t *times) Label() string { + return iterLabel(t) +} + +func (t *times) IterationStyle() wf.IterationStyle { + return wf.IterationStyleTimes +} + +func (t *times) Run(ctx px.Context, parameters px.OrderedMap) px.OrderedMap { + over, vars := resolveParameters(ctx, t, parameters) + return t.iterate(ctx, vars, 0, assertInt(t, over, `over`), func(ix int) px.OrderedMap { + vs := t.Variables() + if len(vs) > 0 { + parameters = parameters.Merge(px.SingletonMap(t.Variables()[0].Name(), types.WrapInteger(int64(ix)))) + } + return parameters + }) +} + +type itRange struct { + iterator +} + +func NewRange(step api.Step, name string, over px.Value, variables []px.Parameter) api.Iterator { + it := &itRange{iterator{step, over, variables, name}} + Validate(it) + return it +} + +func (t *itRange) Label() string { + return iterLabel(t) +} + +func (t *itRange) IterationStyle() wf.IterationStyle { + return wf.IterationStyleRange +} + +func (t *itRange) Run(ctx px.Context, parameters px.OrderedMap) px.OrderedMap { + over, vars := resolveParameters(ctx, t, parameters) + from, to := assertRange(t, over) + return t.iterate(ctx, vars, from, to, func(ix int) px.OrderedMap { + vs := t.Variables() + if len(vs) > 0 { + parameters = parameters.Merge(px.SingletonMap(t.Variables()[0].Name(), types.WrapInteger(int64(ix)))) + } + return parameters + }) +} diff --git a/vendor/github.com/lyraproj/wfe/wfe/loader.go b/vendor/github.com/lyraproj/wfe/wfe/loader.go new file mode 100644 index 0000000..ddca6e8 --- /dev/null +++ b/vendor/github.com/lyraproj/wfe/wfe/loader.go @@ -0,0 +1,219 @@ +package wfe + +import ( + "os" + "os/exec" + "path/filepath" + "regexp" + "sync" + + "github.com/hashicorp/go-hclog" + "github.com/lyraproj/issue/issue" + "github.com/lyraproj/pcore/loader" + "github.com/lyraproj/pcore/px" + "github.com/lyraproj/pcore/types" + "github.com/lyraproj/pcore/yaml" + "github.com/lyraproj/servicesdk/grpc" + "github.com/lyraproj/servicesdk/service" + "github.com/lyraproj/servicesdk/serviceapi" + "github.com/lyraproj/wfe/api" + wfs "github.com/lyraproj/wfe/service" +) + +func init() { + loader.SmartPathFactories[api.LyraLinkPath] = newLyraLinkPath + loader.SmartPathFactories[api.GoPluginPath] = newGoPluginPath + loader.SmartPathFactories[api.PpManifestPath] = newPpManifestPath + loader.SmartPathFactories[api.YamlManifestPath] = newYamlManifestPath +} + +func newGoPluginPath(ml px.ModuleLoader, moduleNameRelative bool) loader.SmartPath { + return loader.NewSmartPath(`goplugins`, ``, ml, []px.Namespace{px.NsService, px.NsHandler, px.NsDefinition}, moduleNameRelative, false, instantiateGoPlugin) +} + +func instantiateGoPlugin(c px.Context, l loader.ContentProvidingLoader, tn px.TypedName, sources []string) { + loadPluginMetadata(c, l.(px.DefiningLoader), sources[0]) +} + +func newLyraLinkPath(ml px.ModuleLoader, moduleNameRelative bool) loader.SmartPath { + return loader.NewSmartPath(`workflows`, `.ll`, ml, []px.Namespace{px.NsDefinition}, moduleNameRelative, false, instantiateLyraLink) +} + +func instantiateLyraLink(c px.Context, l loader.ContentProvidingLoader, tn px.TypedName, sources []string) { + dl := hclog.Default() + lf := sources[0] + dl.Debug("reading Lyra Link", "file", lf) + bts := types.BinaryFromFile(lf) + link, ok := yaml.Unmarshal(c, bts.Bytes()).(px.OrderedMap) + if !ok { + panic(px.Error2(issue.NewLocation(lf, 0, 0), api.LyraLinkNoMap, issue.NoArgs)) + } + exe := `` + if v, ok := link.Get4(`executable`); ok { + if s, ok := v.(px.StringValue); ok { + exe = s.String() + } + } + if exe == `` { + panic(px.Error2(issue.NewLocation(lf, 0, 0), api.LyraLinkNoExe, issue.NoArgs)) + } + exe = os.ExpandEnv(exe) + args := []string{} + if v, ok := link.Get4(`arguments`); ok { + // Accepts array of strings or a string + if a, ok := v.(*types.Array); ok { + args = make([]string, a.Len()) + a.EachWithIndex(func(s px.Value, i int) { args[i] = os.ExpandEnv(s.String()) }) + } else if s, ok := v.(px.StringValue); ok { + args = []string{os.ExpandEnv(s.String())} + } + } + loadPluginMetadata(c, l.(px.DefiningLoader), exe, args...) +} + +func newYamlManifestPath(ml px.ModuleLoader, moduleNameRelative bool) loader.SmartPath { + return loader.NewSmartPath(`workflows`, `.yaml`, ml, []px.Namespace{px.NsDefinition}, moduleNameRelative, false, instantiateYaml) +} + +func instantiateYaml(c px.Context, l loader.ContentProvidingLoader, tn px.TypedName, sources []string) { + // No actual difference until the plugins puppet-workflow and yaml-workflow become separated + instantiatePp(c, l, tn, sources) +} + +func newPpManifestPath(ml px.ModuleLoader, moduleNameRelative bool) loader.SmartPath { + return loader.NewSmartPath(`workflows`, `.pp`, ml, []px.Namespace{px.NsDefinition}, moduleNameRelative, false, instantiatePp) +} + +func instantiatePp(c px.Context, l loader.ContentProvidingLoader, tn px.TypedName, sources []string) { + ppServer := wfs.GetService(c, px.NewTypedName(px.NsService, `Puppet`)) + lg := hclog.Default() + f := sources[0] + lg.Debug("loading manifest", "file", f) + def := ppServer.Invoke( + c, `Puppet::ManifestLoader`, `loadManifest`, + types.WrapString(filepath.Dir(filepath.Dir(f))), // Search for 'workflows/../types' + types.WrapString(f)).(serviceapi.Definition) + sa := service.NewSubService(def) + dl := l.(px.DefiningLoader) + dl.SetEntry(sa.Identifier(c), px.NewLoaderEntry(sa, nil)) + loadMetadata(c, dl, ``, nil, sa) +} + +var once sync.Once +var dlvConfigType px.Type + +const dlvListenDefault = `localhost:2345` +const dlvBinaryDefault = `dlv` +const dlvApiVersionDefault = `2` + +func convertToDebug(c px.Context, dc px.Value, cmd string, cmdArgs []string) (string, []string) { + once.Do(func() { + dlvConfigType = c.ParseType( + `Variant[ + String[1], + Struct[ + process => Variant[String[1], Hash[String[1],String[1]]], + Optional[binary] => String[1], + Optional[api] => Integer[1]]]`) + }) + + px.AssertInstance(`dlv configuration`, dlvConfigType, dc) + lg := hclog.Default() + match := func(v px.Value) bool { + s := v.String() + pm, err := regexp.Compile(s) + if err != nil { + lg.Error(`Unable to compile dlv configuration process match: `, `pattern`, s, `error`, err.Error()) + } + if pm.MatchString(cmd) { + lg.Debug(`dlv configuration process match`, `pattern`, s, `process`, cmd) + return true + } + return false + } + + found := false + listen := dlvListenDefault + api := dlvApiVersionDefault + dlv := dlvBinaryDefault + if p, ok := dc.(px.StringValue); ok { + // Config is just a string. The string then denotes the process pattern + found = match(p) + } else { + dch := dc.(px.OrderedMap) + pe, _ := dch.Get4(`process`) + if p, ok := pe.(px.StringValue); ok { + found = match(p) + } else { + // Key is process pattern, value is listen address. First match wins + pe.(px.OrderedMap).Find(func(v px.Value) bool { + e := v.(px.MapEntry) + found = match(e.Key()) + if found { + listen = e.Value().String() + } + return found + }) + } + + if found { + if a, ok := dch.Get4(`api`); ok { + api = a.String() + } + if binary, ok := dch.Get4(`binary`); ok { + dlv = binary.String() + } + } + } + + if found { + lg.Info(`starting plugin for debugging`, `dlv`, dlv, `command`, cmd, `listen`, listen) + cmdArgs = append([]string{`exec`, cmd, `--headless`, `--listen`, listen, `--log-dest`, `2`, `--api-version`, api}, cmdArgs...) + cmd = dlv + } + return cmd, cmdArgs +} + +func loadPluginMetadata(c px.Context, dl px.DefiningLoader, cmd string, cmdArgs ...string) { + if dc, ok := c.Get(api.LyraDlvConfigKey); ok { + cmd, cmdArgs = convertToDebug(c, dc.(px.Value), cmd, cmdArgs) + } + serviceCmd := exec.CommandContext(c, cmd, cmdArgs...) + service, err := grpc.Load(serviceCmd, nil) + if err != nil { + panic(px.Error(api.FailedToLoadPlugin, issue.H{`executable`: cmd, `message`: err.Error()})) + } + + lg := hclog.Default() + ti := service.Identifier(c) + lg.Debug("loaded executable", "plugin", ti) + + dl.SetEntry(ti, px.NewLoaderEntry(service, nil)) + + lg.Debug("loading metadata", "plugin", cmd) + loadMetadata(c, dl, cmd, cmdArgs, service) + lg.Debug("done loading metadata", "plugin", cmd) +} + +func loadMetadata(c px.Context, l px.DefiningLoader, cmd string, cmdArgs []string, service serviceapi.Service) { + ts, defs := service.Metadata(c) + + lg := hclog.Default() + if ts != nil { + lg.Debug(`loaded TypeSet`, `name`, ts.Name(), `count`, ts.Types().Len()) + } + + if len(defs) > 0 { + lg.Debug(`loaded Definitions`, `count`, len(defs)) + + // Register definitions + for _, def := range defs { + le := px.NewLoaderEntry(def, nil) + l.SetEntry(def.Identifier(), le) + if handlerFor, ok := def.Properties().Get4(`handlerFor`); ok { + hn := px.NewTypedName(px.NsHandler, handlerFor.(issue.Named).Name()) + l.SetEntry(hn, le) + } + } + } +} diff --git a/vendor/github.com/lyraproj/wfe/wfe/reference.go b/vendor/github.com/lyraproj/wfe/wfe/reference.go new file mode 100644 index 0000000..7fdb632 --- /dev/null +++ b/vendor/github.com/lyraproj/wfe/wfe/reference.go @@ -0,0 +1,126 @@ +package wfe + +import ( + "github.com/hashicorp/go-hclog" + "github.com/lyraproj/pcore/px" + "github.com/lyraproj/pcore/types" + "github.com/lyraproj/servicesdk/serviceapi" + "github.com/lyraproj/servicesdk/wf" + "github.com/lyraproj/wfe/api" + "github.com/lyraproj/wfe/service" +) + +type reference struct { + Step + ra api.Step +} + +func Reference(c px.Context, def serviceapi.Definition) api.Step { + r := &reference{} + r.Init(def) + reference := service.GetStringProperty(def, `reference`) + hclog.Default().Debug(`resolving activity reference`, `name`, r.name, `reference`, reference) + r.ra = CreateStep(c, service.GetDefinition(c, px.NewTypedName(px.NsDefinition, reference))) + return r +} + +func (r *reference) Identifier() string { + return StepId(r) +} + +func (r *reference) Parameters() []px.Parameter { + var input []px.Parameter + if len(r.parameters) == 0 { + input = r.ra.Parameters() + } else { + // Return a copy with the alias stripped of. Input parameters with values (alias is a value) + // doesn't pick up a new value from the workflow. + input = make([]px.Parameter, len(r.parameters)) + for i, p := range r.parameters { + input[i] = px.NewParameter(p.Name(), p.Type(), nil, false) + } + } + return input +} + +func (r *reference) Returns() []px.Parameter { + output := r.returns + if len(output) == 0 { + output = r.ra.Returns() + } + return output +} + +func (r *reference) When() wf.Condition { + when := r.when + if when == nil { + when = r.ra.When() + } else { + if r.ra.When() != nil { + when = wf.And([]wf.Condition{when, r.ra.When()}) + } + } + return when +} + +func (r *reference) Run(ctx px.Context, input px.OrderedMap) px.OrderedMap { + return r.mapOutput(r.ra.Run(ctx, r.mapInput(input))) +} + +func (r *reference) Label() string { + return StepLabel(r) +} + +func (r *reference) Style() string { + return `reference` +} + +func (r *reference) mapInput(input px.OrderedMap) px.OrderedMap { + ips := r.parameters + if len(ips) == 0 { + ips = r.ra.Parameters() + if len(ips) == 0 { + return input + } + } + return input.MapEntries(func(entry px.MapEntry) px.MapEntry { + key := entry.Key() + kn := key.String() + for _, p := range ips { + if p.Name() == kn { + if p.HasValue() { + if alias, ok := p.Value().(px.StringValue); ok { + entry = types.WrapHashEntry(alias, entry.Value()) + } + } + break + } + } + return entry + }) +} + +func (r *reference) mapOutput(output px.OrderedMap) px.OrderedMap { + ops := r.Returns() + if len(ops) == 0 { + return output + } + return output.MapEntries(func(entry px.MapEntry) px.MapEntry { + key := entry.Key() + for _, p := range ops { + if p.HasValue() { + if alias, ok := p.Value().(px.StringValue); ok && alias.Equals(key, nil) { + entry = types.WrapHashEntry2(p.Name(), entry.Value()) + break + } + } + } + return entry + }) +} + +func (r *reference) WithIndex(index int) api.Step { + rc := *r // Copy by value + rc.setIndex(index) + return &rc +} diff --git a/vendor/github.com/lyraproj/wfe/wfe/resource.go b/vendor/github.com/lyraproj/wfe/wfe/resource.go new file mode 100644 index 0000000..326fdc3 --- /dev/null +++ b/vendor/github.com/lyraproj/wfe/wfe/resource.go @@ -0,0 +1,82 @@ +package wfe + +import ( + "net/url" + + "github.com/lyraproj/pcore/px" + "github.com/lyraproj/pcore/types" + "github.com/lyraproj/servicesdk/serviceapi" + "github.com/lyraproj/wfe/api" + "github.com/lyraproj/wfe/service" +) + +type resource struct { + Step + typ px.ObjectType + handler px.TypedName + extId px.Value +} + +func (r *resource) Type() px.ObjectType { + return r.typ +} + +func (r *resource) HandlerId() px.TypedName { + return r.handler +} + +func (r *resource) ExtId() px.Value { + return r.extId +} + +func Resource(c px.Context, def serviceapi.Definition) api.Step { + r := &resource{} + r.Init(def) + if eid, ok := service.GetOptionalProperty(def, `externalId`, types.DefaultStringType()); ok { + r.extId = eid + } + + rt := service.GetProperty(def, `resourceType`, types.DefaultTypeType()).(px.Type) + if rs, ok := rt.(px.ResolvableType); ok { + // Ensure that the handler for the resource type is loaded prior to attempting + // the resolve. + if tr, ok := rs.(*types.TypeReferenceType); ok && types.TypeNamePattern.MatchString(tr.TypeString()) { + if _, ok = px.Load(c, px.NewTypedName(px.NsHandler, tr.TypeString())); ok { + rt = rs.Resolve(c) + } + } + } + r.typ = px.AssertType(func() string { return "property resourceType of step " + def.Identifier().Name() }, + types.DefaultObjectType(), rt).(px.ObjectType) + r.handler = px.NewTypedName(px.NsHandler, r.typ.Name()) + return r +} + +func (r *resource) Identifier() string { + return StepId(r) +} + +func (r *resource) IdParams() url.Values { + vs := r.Step.IdParams() + vs.Add(`rt`, r.typ.Name()) + vs.Add(`hid`, r.HandlerId().Name()) + return vs +} + +func (r *resource) Run(c px.Context, parameters px.OrderedMap) px.OrderedMap { + return service.ApplyState(c, r, parameters) +} + +func (r *resource) Label() string { + return StepLabel(r) +} + +func (r *resource) Style() string { + return `resource` +} + +func (r *resource) WithIndex(index int) api.Step { + rc := *r // Copy by value + rc.setIndex(index) + return &rc +} diff --git a/vendor/github.com/lyraproj/wfe/wfe/statehandler.go b/vendor/github.com/lyraproj/wfe/wfe/statehandler.go new file mode 100644 index 0000000..5133e47 --- /dev/null +++ b/vendor/github.com/lyraproj/wfe/wfe/statehandler.go @@ -0,0 +1,67 @@ +package wfe + +import ( + "github.com/lyraproj/issue/issue" + "github.com/lyraproj/pcore/px" + "github.com/lyraproj/pcore/types" + "github.com/lyraproj/servicesdk/serviceapi" + "github.com/lyraproj/servicesdk/wf" + "github.com/lyraproj/wfe/api" + "github.com/lyraproj/wfe/service" +) + +type stateHandler struct { + Step + typ px.ObjectType +} + +func StateHandler(def serviceapi.Definition) api.Step { + a := &stateHandler{} + a.Init(def) + return a +} + +func (a *stateHandler) Init(def serviceapi.Definition) { + a.Step.Init(def) + // TODO: Type validation. The typ must be an ObjectType implementing read, upsert, and delete. + a.typ = service.GetProperty(def, `interface`, types.NewTypeType(types.DefaultObjectType())).(px.ObjectType) +} + +func (a *stateHandler) Run(c px.Context, parameters px.OrderedMap) px.OrderedMap { + ac := service.StepContext(c) + op := service.GetOperation(ac) + invokable := a.GetService(c) + + switch op { + case wf.Read: + return invokable.Invoke(c, a.name, `read`, parameters).(px.OrderedMap) + + case wf.Upsert: + return invokable.Invoke(c, a.name, `upsert`, parameters).(px.OrderedMap) + + case wf.Delete: + invokable.Invoke(c, a.name, `delete`, parameters) + return px.EmptyMap + default: + panic(px.Error(wf.IllegalOperation, issue.H{`operation`: op})) + } +} + +func (a *stateHandler) Identifier() string { + return StepId(a) +} + +func (a *stateHandler) Label() string { + return StepLabel(a) +} + +func (a *stateHandler) Style() string { + return `stateHandler` +} + +func (a *stateHandler) WithIndex(index int) api.Step { + ac := stateHandler{} + ac = *a // Copy by value + ac.setIndex(index) + return &ac +} diff --git a/vendor/github.com/lyraproj/wfe/wfe/step.go b/vendor/github.com/lyraproj/wfe/wfe/step.go new file mode 100644 index 0000000..44ab29c --- /dev/null +++ b/vendor/github.com/lyraproj/wfe/wfe/step.go @@ -0,0 +1,135 @@ +package wfe + +import ( + "bytes" + "fmt" + "net/url" + "strconv" + "strings" + + "github.com/hashicorp/go-hclog" + "github.com/lyraproj/issue/issue" + "github.com/lyraproj/pcore/px" + "github.com/lyraproj/pcore/types" + "github.com/lyraproj/servicesdk/serviceapi" + "github.com/lyraproj/servicesdk/wf" + "github.com/lyraproj/wfe/api" + "github.com/lyraproj/wfe/service" +) + +type Step struct { + serviceId px.TypedName + name string + when wf.Condition + parameters []px.Parameter + returns []px.Parameter + index int +} + +func CreateStep(c px.Context, def serviceapi.Definition) api.Step { + hclog.Default().Debug(`creating step`, `style`, service.GetStringProperty(def, `style`)) + + switch service.GetStringProperty(def, `style`) { + case `stateHandler`: + return StateHandler(def) + case `iterator`: + return Iterator(c, def) + case `resource`: + return Resource(c, def) + case `workflow`: + return Workflow(c, def) + case `action`: + return Action(def) + case `reference`: + return Reference(c, def) + } + return nil +} + +func (a *Step) GetService(c px.Context) serviceapi.Service { + return service.GetService(c, a.serviceId) +} + +func (a *Step) ServiceId() px.TypedName { + return a.serviceId +} + +func StepLabel(a api.Step) string { + return fmt.Sprintf(`%s '%s'`, a.Style(), a.Name()) +} + +func StepId(a api.Step) string { + b := bytes.NewBufferString(`lyra://puppet.com`) + for _, s := range strings.Split(a.Name(), `::`) { + b.WriteByte('/') + b.WriteString(url.PathEscape(s)) + } + vs := a.IdParams() + if len(vs) > 0 { + b.WriteByte('?') + b.WriteString(vs.Encode()) + } + return b.String() +} + +func (a *Step) When() wf.Condition { + return a.when +} + +func (a *Step) Name() string { + return a.name +} + +func (a *Step) Parameters() []px.Parameter { + return a.parameters +} + +func (a *Step) Returns() []px.Parameter { + return a.returns +} + +func (a *Step) Init(def serviceapi.Definition) { + a.index = -1 + a.serviceId = def.ServiceId() + a.name = def.Identifier().Name() + props := def.Properties() + a.parameters = getParameters(`parameters`, props) + a.returns = getParameters(`returns`, props) + if wh, ok := props.Get4(`when`); ok { + a.when = wh.(wf.Condition) + } else { + a.when = wf.Always + } +} + +func getParameters(key string, props px.OrderedMap) []px.Parameter { + if parameters, ok := props.Get4(key); ok { + ia := parameters.(px.List) + is := make([]px.Parameter, ia.Len()) + ia.EachWithIndex(func(iv px.Value, idx int) { is[idx] = iv.(px.Parameter) }) + return is + } + return []px.Parameter{} +} + +func (a *Step) IdParams() url.Values { + if a.index >= 0 { + return url.Values{`index`: {strconv.Itoa(a.index)}} + } + return url.Values{} +} + +func ResolveParameters(ctx px.Context, a api.Step, parameters px.OrderedMap, p px.Parameter) px.Value { + if !p.HasValue() { + if v, ok := parameters.Get4(p.Name()); ok { + return v + } + panic(px.Error(ParameterUnresolved, issue.H{`step`: a, `parameter`: p.Name()})) + } + return types.ResolveDeferred(ctx, p.Value(), parameters) +} + +// setIndex must only be called after a direct cloning operation on the instance, i.e. from WithIndex() +func (a *Step) setIndex(index int) { + a.index = index +} diff --git a/vendor/github.com/lyraproj/wfe/wfe/workflow.go b/vendor/github.com/lyraproj/wfe/wfe/workflow.go new file mode 100644 index 0000000..cc54537 --- /dev/null +++ b/vendor/github.com/lyraproj/wfe/wfe/workflow.go @@ -0,0 +1,55 @@ +package wfe + +import ( + "github.com/lyraproj/pcore/px" + "github.com/lyraproj/pcore/types" + "github.com/lyraproj/servicesdk/serviceapi" + "github.com/lyraproj/wfe/api" + "github.com/lyraproj/wfe/service" +) + +type workflow struct { + Step + + steps []api.Step +} + +var DefinitionListType = types.NewArrayType(serviceapi.DefinitionMetaType, nil) + +func (w *workflow) Run(ctx px.Context, parameters px.OrderedMap) px.OrderedMap { + wf := NewWorkflowEngine(w) + wf.Validate() + return wf.Run(ctx, parameters) +} + +func (w *workflow) Identifier() string { + return StepId(w) +} + +func (w *workflow) Label() string { + return StepLabel(w) +} + +func (w *workflow) Style() string { + return `workflow` +} + +func (w *workflow) WithIndex(index int) api.Step { + wc := *w // Copy by value + wc.setIndex(index) + return &wc +} + +func Workflow(c px.Context, def serviceapi.Definition) api.Workflow { + wf := &workflow{} + wf.Init(def) + steps := service.GetProperty(def, `steps`, DefinitionListType).(px.List) + as := make([]api.Step, steps.Len()) + steps.EachWithIndex(func(v px.Value, i int) { as[i] = CreateStep(c, v.(serviceapi.Definition)) }) + wf.steps = as + return wf +} + +func (w *workflow) Steps() []api.Step { + return w.steps +} diff --git a/vendor/github.com/lyraproj/wfe/wfe/workflowengine.go b/vendor/github.com/lyraproj/wfe/wfe/workflowengine.go new file mode 100644 index 0000000..d5ce931 --- /dev/null +++ b/vendor/github.com/lyraproj/wfe/wfe/workflowengine.go @@ -0,0 +1,498 @@ +package wfe + +import ( + "bytes" + "errors" + "fmt" + "math" + "sort" + "sync" + "sync/atomic" + + "github.com/lyraproj/issue/issue" + "github.com/lyraproj/pcore/px" + "github.com/lyraproj/pcore/types" + "github.com/lyraproj/servicesdk/wf" + "github.com/lyraproj/wfe/api" + "github.com/lyraproj/wfe/service" + "gonum.org/v1/gonum/graph" + "gonum.org/v1/gonum/graph/encoding" + "gonum.org/v1/gonum/graph/encoding/dot" + "gonum.org/v1/gonum/graph/simple" +) + +type WorkflowEngine interface { + Run(ctx px.Context, parameters px.OrderedMap) px.OrderedMap + + BuildInvertedGraph(ctx px.Context, existsFunc func(string) bool) + + GraphAsDot() []byte + + // Validate ensures that all consumed values have a corresponding producer and that only + // one producer exists for each produced value. + Validate() +} + +type serverStep struct { + api.Step + graph.Node + resolved chan bool +} + +func appendParameterNames(params []px.Parameter, b *bytes.Buffer) { + for i, p := range params { + if i > 0 { + b.WriteByte(',') + } + b.WriteString(p.Name()) + } +} + +func (a *serverStep) Attributes() []encoding.Attribute { + b := bytes.NewBufferString(`"`) + b.WriteString(service.LeafName(a.Name())) + b.WriteByte('{') + b.WriteString("\nparameters:[") + appendParameterNames(a.Parameters(), b) + b.WriteString("],\nreturns:[") + appendParameterNames(a.Returns(), b) + b.WriteString(`]}"`) + return []encoding.Attribute{{Key: "label", Value: b.String()}} +} + +func (a *serverStep) DOTID() string { + return service.LeafName(a.Name()) +} + +func (a *serverStep) SetResolved() { + close(a.resolved) +} + +func (a *serverStep) Resolved() <-chan bool { + return a.resolved +} + +type workflowEngine struct { + api.Workflow + runLatchLock sync.Mutex + valuesLock sync.RWMutex + runLatch map[int64]bool + values map[string]px.Value + inbox chan *serverStep + jobCounter int32 + done chan bool + graph *simple.DirectedGraph + errors []error +} + +func NewWorkflowEngine(workflow api.Workflow) WorkflowEngine { + as := &workflowEngine{ + Workflow: workflow, + runLatch: make(map[int64]bool), + graph: simple.NewDirectedGraph(), + inbox: make(chan *serverStep, 20), + done: make(chan bool)} + + for _, a := range workflow.Steps() { + as.addStep(a) + } + return as +} + +func (s *workflowEngine) addStep(na api.Step) { + // Check that no other step is a producer of the same values + ni := s.graph.Nodes() + if ni != nil { + for ni.Next() { + a := ni.Node().(api.Step) + if a.Name() == na.Name() { + panic(px.Error(AlreadyDefined, issue.H{`name`: na.Name()})) + } + } + } + a := &serverStep{Step: na, Node: s.graph.NewNode(), resolved: make(chan bool)} + s.graph.AddNode(a) +} + +// maxGuards control how many possible variations there can be of the workflow graph. The +// actual number is 2 to the power maxGuards. +const maxGuards = 8 + +func (s *workflowEngine) GraphAsDot() []byte { + de, err := dot.Marshal(s.graph, s.Name(), ``, ` `) + if err != nil { + panic(px.Error(GraphDotMarshal, issue.H{`detail`: err.Error()})) + } + return de +} + +func (s *workflowEngine) BuildInvertedGraph(c px.Context, existsFunc func(string) bool) { + g := s.graph + ni := g.Nodes() + if ni == nil { + return + } + + ei := g.Edges() + for ei.Next() { + e := ei.Edge() + g.RemoveEdge(e.From().ID(), e.To().ID()) + } + + // Add workflow as the producer of parameters with values. + vp := make(valueProducers, ni.Len()*5) + vp.add(s, s.Parameters()) + for ni.Next() { + fa := ni.Node().(*serverStep) + if fa.When() == wf.Always || existsFunc(fa.Identifier()) { + vp.add(fa, fa.Returns()) + } + } + + ni.Reset() + for ni.Next() { + fa := ni.Node().(*serverStep) + if fa.When() == wf.Always || existsFunc(fa.Identifier()) { + ds := s.dependents(fa, vp) + for _, dep := range ds { + g.SetEdge(g.NewEdge(fa, dep.(graph.Node))) + } + } + } +} + +func (s *workflowEngine) Validate() { + // Build a map that associates a produced value with the producer of that value + guards := make(map[string]bool) + + ni := s.graph.Nodes() + if ni == nil { + return + } + + for ni.Next() { + for _, g := range ni.Node().(*serverStep).When().Names() { + guards[g] = false + } + } + + gc := uint(len(guards)) + if gc > 0 { + maxVariations := int(math.Pow(2.0, float64(gc))) + if gc > maxGuards { + panic(px.Error(TooManyGuards, issue.H{`step`: s, `max`: maxGuards, `count`: gc})) + } + + guardNames := make([]string, 0, gc) + for n := range guards { + guardNames = append(guardNames, n) + } + sort.Strings(guardNames) + + // Check all variations for validity with respect to parameters and returns + for bitmap := 0; bitmap <= maxVariations; bitmap++ { + es := make([]*types.HashEntry, gc) + for i := uint(0); i < gc; i++ { + es[i] = types.WrapHashEntry2(guardNames[i], types.WrapBoolean(bitmap&(1< 1 { + panic(px.Error(MultipleProducersOfValue, issue.H{`step1`: v[0], `step2`: v[1], `value`: k})) + } + } + for _, param := range a.Returns() { + if _, found := vp[param.Name()]; found { + continue + } + panic(px.Error(NoProducerOfValue, issue.H{`step`: a, `value`: param.Name()})) + } +} + +func (vp valueProducers) validateParameters(a api.Step) { + var checkDep = func(name string) { + if _, found := vp[name]; !found { + panic(px.Error(NoProducerOfValue, issue.H{`step`: a, `value`: name})) + } + } + for _, name := range a.When().Names() { + checkDep(name) + } + for _, param := range a.Parameters() { + if !param.HasValue() { + checkDep(param.Name()) + } + } +} + +func (s *workflowEngine) Run(ctx px.Context, parameters px.OrderedMap) px.OrderedMap { + s.values = make(map[string]px.Value, 37) + parameters.EachPair(func(k, v px.Value) { + s.values[k.String()] = v + }) + + // Run all nodes that can run, i.e. root nodes + ni := s.graph.Nodes() + if ni == nil || ni.Len() == 0 { + return nil + } + + for _, param := range s.Workflow.Parameters() { + s.values[param.Name()] = s.resolveParameter(ctx, s.Workflow, param) + } + + for w := 1; w <= 5; w++ { + px.Fork(ctx, func(cf px.Context) { s.stepWorker(cf, w) }) + } + for ni.Next() { + n := ni.Node() + if s.graph.To(n.ID()).Len() == 0 { + s.scheduleStep(n.(*serverStep)) + } + } + <-s.done + + if s.errors != nil { + var err error + if len(s.errors) == 1 { + err = s.errors[0] + } else { + err = px.Error(api.MultipleErrors, issue.H{`errors`: s.errors}) + } + panic(err) + } + + entries := make([]*types.HashEntry, len(s.Returns())) + for i, out := range s.Returns() { + n := out.Name() + entries[i] = types.WrapHashEntry2(n, s.values[n]) + } + return types.WrapHash(entries) +} + +func (s *workflowEngine) DumpVariables() { + names := make([]string, 0, len(s.values)) + for n := range s.values { + names = append(names, n) + } + sort.Strings(names) + for _, n := range names { + fmt.Printf("%s = %v\n", n, s.values[n]) + } +} + +func (s *workflowEngine) dependents(a api.Step, vp valueProducers) []api.Step { + + dam := make(map[string]api.Step) + var addDeps = func(name string) { + if ds, found := vp[name]; found { + for _, d := range ds { + if d != s { // Workflow itself only has external dependencies + dam[d.Name()] = d + } + } + return + } + panic(px.Error(NoProducerOfValue, issue.H{`step`: a, `value`: name})) + } + +nextName: + for _, name := range a.When().Names() { + for _, param := range a.Parameters() { + if name == param.Name() { + continue nextName + } + } + addDeps(name) + } + for _, param := range a.Parameters() { + if !param.HasValue() { + addDeps(param.Name()) + } + } + + da := make([]api.Step, 0, len(dam)) + for _, vp := range dam { + da = append(da, vp) + } + + // Ensure that steps are sorted by name + sort.Slice(da, func(i, j int) bool { + return da[i].Name() < da[j].Name() + }) + return da +} + +// This function represents a worker that spawns steps +func (s *workflowEngine) stepWorker(ctx px.Context, id int) { + for a := range s.inbox { + s.runStep(ctx, a) + } +} + +func (s *workflowEngine) runStep(ctx px.Context, a *serverStep) { + defer func() { + r := recover() + if r != nil { + var err error + switch r := r.(type) { + case error: + err = r + case string: + err = errors.New(r) + case fmt.Stringer: + err = errors.New(r.String()) + default: + err = fmt.Errorf("%v", r) + } + s.runLatchLock.Lock() + if s.errors == nil { + s.errors = []error{err} + } else { + s.errors = append(s.errors, err) + } + s.runLatchLock.Unlock() + } + if atomic.AddInt32(&s.jobCounter, -1) <= 0 { + close(s.inbox) + close(s.done) + } + }() + + s.runLatchLock.Lock() + if s.errors != nil || s.runLatch[a.ID()] { + s.runLatchLock.Unlock() + return + } + s.runLatch[a.ID()] = true + s.runLatchLock.Unlock() + + s.waitForEdgesTo(a) + + params := a.Parameters() + entries := make([]*types.HashEntry, len(params)) + for i, param := range params { + entries[i] = types.WrapHashEntry2(param.Name(), s.resolveParameter(ctx, a, param)) + } + args := types.WrapHash(entries) + + result := a.Run(ctx, args).(px.OrderedMap) + if result != nil && result.Len() > 0 { + s.valuesLock.Lock() + result.EachPair(func(k, v px.Value) { + s.values[k.String()] = v + }) + s.valuesLock.Unlock() + } + a.SetResolved() + + // Schedule all steps that are dependent on this step. Since a node can be + // dependent on several steps, it might be scheduled several times. It will + // however only run once. This is controlled by the runLatch. + ni := s.graph.From(a.ID()) + for ni.Next() { + s.scheduleStep(ni.Node().(*serverStep)) + } +} + +func (s *workflowEngine) resolveParameter(ctx px.Context, step api.Step, param px.Parameter) px.Value { + n := param.Name() + if !param.HasValue() { + s.valuesLock.RLock() + v, ok := s.values[n] + s.valuesLock.RUnlock() + if ok { + return v + } + panic(px.Error(NoProducerOfValue, issue.H{`step`: step, `value`: n})) + } + return types.ResolveDeferred(ctx, param.Value(), ctx.Scope()) +} + +// Ensure that all nodes that has an edge to this node have been +// fully resolved. +func (s *workflowEngine) waitForEdgesTo(a *serverStep) { + parents := s.graph.To(a.ID()) + for parents.Next() { + <-parents.Node().(*serverStep).Resolved() + } +} + +func (s *workflowEngine) scheduleStep(a *serverStep) { + atomic.AddInt32(&s.jobCounter, 1) + s.inbox <- a +} diff --git a/vendor/github.com/mattn/go-colorable/LICENSE b/vendor/github.com/mattn/go-colorable/LICENSE new file mode 100644 index 0000000..91b5cef --- /dev/null +++ b/vendor/github.com/mattn/go-colorable/LICENSE @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) 2016 Yasuhiro Matsumoto + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/vendor/github.com/mattn/go-colorable/_example/escape-seq/main.go b/vendor/github.com/mattn/go-colorable/_example/escape-seq/main.go new file mode 100644 index 0000000..8cbcb90 --- /dev/null +++ b/vendor/github.com/mattn/go-colorable/_example/escape-seq/main.go @@ -0,0 +1,16 @@ +package main + +import ( + "bufio" + "fmt" + + "github.com/mattn/go-colorable" +) + +func main() { + stdOut := bufio.NewWriter(colorable.NewColorableStdout()) + + fmt.Fprint(stdOut, "\x1B[3GMove to 3rd Column\n") + fmt.Fprint(stdOut, "\x1B[1;2HMove to 2nd Column on 1st Line\n") + stdOut.Flush() +} diff --git a/vendor/github.com/mattn/go-colorable/_example/logrus/main.go b/vendor/github.com/mattn/go-colorable/_example/logrus/main.go new file mode 100644 index 0000000..c569164 --- /dev/null +++ b/vendor/github.com/mattn/go-colorable/_example/logrus/main.go @@ -0,0 +1,16 @@ +package main + +import ( + "github.com/mattn/go-colorable" + "github.com/sirupsen/logrus" +) + +func main() { + logrus.SetFormatter(&logrus.TextFormatter{ForceColors: true}) + logrus.SetOutput(colorable.NewColorableStdout()) + + logrus.Info("succeeded") + logrus.Warn("not correct") + logrus.Error("something error") + logrus.Fatal("panic") +} diff --git a/vendor/github.com/mattn/go-colorable/_example/title/main.go b/vendor/github.com/mattn/go-colorable/_example/title/main.go new file mode 100644 index 0000000..e208870 --- /dev/null +++ b/vendor/github.com/mattn/go-colorable/_example/title/main.go @@ -0,0 +1,14 @@ +package main + +import ( + "fmt" + "os" + . "github.com/mattn/go-colorable" +) + +func main() { + out := NewColorableStdout() + fmt.Fprint(out, "\x1B]0;TITLE Changed\007(See title and hit any key)") + var c [1]byte + os.Stdin.Read(c[:]) +} diff --git a/vendor/github.com/mattn/go-colorable/cmd/colorable/colorable.go b/vendor/github.com/mattn/go-colorable/cmd/colorable/colorable.go new file mode 100644 index 0000000..8790477 --- /dev/null +++ b/vendor/github.com/mattn/go-colorable/cmd/colorable/colorable.go @@ -0,0 +1,12 @@ +package main + +import ( + "io" + "os" + + "github.com/mattn/go-colorable" +) + +func main() { + io.Copy(colorable.NewColorableStdout(), os.Stdin) +} diff --git a/vendor/github.com/mattn/go-colorable/colorable_appengine.go b/vendor/github.com/mattn/go-colorable/colorable_appengine.go new file mode 100644 index 0000000..1f28d77 --- /dev/null +++ b/vendor/github.com/mattn/go-colorable/colorable_appengine.go @@ -0,0 +1,29 @@ +// +build appengine + +package colorable + +import ( + "io" + "os" + + _ "github.com/mattn/go-isatty" +) + +// NewColorable return new instance of Writer which handle escape sequence. +func NewColorable(file *os.File) io.Writer { + if file == nil { + panic("nil passed instead of *os.File to NewColorable()") + } + + return file +} + +// NewColorableStdout return new instance of Writer which handle escape sequence for stdout. +func NewColorableStdout() io.Writer { + return os.Stdout +} + +// NewColorableStderr return new instance of Writer which handle escape sequence for stderr. +func NewColorableStderr() io.Writer { + return os.Stderr +} diff --git a/vendor/github.com/mattn/go-colorable/colorable_others.go b/vendor/github.com/mattn/go-colorable/colorable_others.go new file mode 100644 index 0000000..887f203 --- /dev/null +++ b/vendor/github.com/mattn/go-colorable/colorable_others.go @@ -0,0 +1,30 @@ +// +build !windows +// +build !appengine + +package colorable + +import ( + "io" + "os" + + _ "github.com/mattn/go-isatty" +) + +// NewColorable return new instance of Writer which handle escape sequence. +func NewColorable(file *os.File) io.Writer { + if file == nil { + panic("nil passed instead of *os.File to NewColorable()") + } + + return file +} + +// NewColorableStdout return new instance of Writer which handle escape sequence for stdout. +func NewColorableStdout() io.Writer { + return os.Stdout +} + +// NewColorableStderr return new instance of Writer which handle escape sequence for stderr. +func NewColorableStderr() io.Writer { + return os.Stderr +} diff --git a/vendor/github.com/mattn/go-colorable/colorable_windows.go b/vendor/github.com/mattn/go-colorable/colorable_windows.go new file mode 100644 index 0000000..404e10c --- /dev/null +++ b/vendor/github.com/mattn/go-colorable/colorable_windows.go @@ -0,0 +1,980 @@ +// +build windows +// +build !appengine + +package colorable + +import ( + "bytes" + "io" + "math" + "os" + "strconv" + "strings" + "syscall" + "unsafe" + + "github.com/mattn/go-isatty" +) + +const ( + foregroundBlue = 0x1 + foregroundGreen = 0x2 + foregroundRed = 0x4 + foregroundIntensity = 0x8 + foregroundMask = (foregroundRed | foregroundBlue | foregroundGreen | foregroundIntensity) + backgroundBlue = 0x10 + backgroundGreen = 0x20 + backgroundRed = 0x40 + backgroundIntensity = 0x80 + backgroundMask = (backgroundRed | backgroundBlue | backgroundGreen | backgroundIntensity) +) + +const ( + genericRead = 0x80000000 + genericWrite = 0x40000000 +) + +const ( + consoleTextmodeBuffer = 0x1 +) + +type wchar uint16 +type short int16 +type dword uint32 +type word uint16 + +type coord struct { + x short + y short +} + +type smallRect struct { + left short + top short + right short + bottom short +} + +type consoleScreenBufferInfo struct { + size coord + cursorPosition coord + attributes word + window smallRect + maximumWindowSize coord +} + +type consoleCursorInfo struct { + size dword + visible int32 +} + +var ( + kernel32 = syscall.NewLazyDLL("kernel32.dll") + procGetConsoleScreenBufferInfo = kernel32.NewProc("GetConsoleScreenBufferInfo") + procSetConsoleTextAttribute = kernel32.NewProc("SetConsoleTextAttribute") + procSetConsoleCursorPosition = kernel32.NewProc("SetConsoleCursorPosition") + procFillConsoleOutputCharacter = kernel32.NewProc("FillConsoleOutputCharacterW") + procFillConsoleOutputAttribute = kernel32.NewProc("FillConsoleOutputAttribute") + procGetConsoleCursorInfo = kernel32.NewProc("GetConsoleCursorInfo") + procSetConsoleCursorInfo = kernel32.NewProc("SetConsoleCursorInfo") + procSetConsoleTitle = kernel32.NewProc("SetConsoleTitleW") + procCreateConsoleScreenBuffer = kernel32.NewProc("CreateConsoleScreenBuffer") +) + +// Writer provide colorable Writer to the console +type Writer struct { + out io.Writer + handle syscall.Handle + althandle syscall.Handle + oldattr word + oldpos coord + rest bytes.Buffer +} + +// NewColorable return new instance of Writer which handle escape sequence from File. +func NewColorable(file *os.File) io.Writer { + if file == nil { + panic("nil passed instead of *os.File to NewColorable()") + } + + if isatty.IsTerminal(file.Fd()) { + var csbi consoleScreenBufferInfo + handle := syscall.Handle(file.Fd()) + procGetConsoleScreenBufferInfo.Call(uintptr(handle), uintptr(unsafe.Pointer(&csbi))) + return &Writer{out: file, handle: handle, oldattr: csbi.attributes, oldpos: coord{0, 0}} + } + return file +} + +// NewColorableStdout return new instance of Writer which handle escape sequence for stdout. +func NewColorableStdout() io.Writer { + return NewColorable(os.Stdout) +} + +// NewColorableStderr return new instance of Writer which handle escape sequence for stderr. +func NewColorableStderr() io.Writer { + return NewColorable(os.Stderr) +} + +var color256 = map[int]int{ + 0: 0x000000, + 1: 0x800000, + 2: 0x008000, + 3: 0x808000, + 4: 0x000080, + 5: 0x800080, + 6: 0x008080, + 7: 0xc0c0c0, + 8: 0x808080, + 9: 0xff0000, + 10: 0x00ff00, + 11: 0xffff00, + 12: 0x0000ff, + 13: 0xff00ff, + 14: 0x00ffff, + 15: 0xffffff, + 16: 0x000000, + 17: 0x00005f, + 18: 0x000087, + 19: 0x0000af, + 20: 0x0000d7, + 21: 0x0000ff, + 22: 0x005f00, + 23: 0x005f5f, + 24: 0x005f87, + 25: 0x005faf, + 26: 0x005fd7, + 27: 0x005fff, + 28: 0x008700, + 29: 0x00875f, + 30: 0x008787, + 31: 0x0087af, + 32: 0x0087d7, + 33: 0x0087ff, + 34: 0x00af00, + 35: 0x00af5f, + 36: 0x00af87, + 37: 0x00afaf, + 38: 0x00afd7, + 39: 0x00afff, + 40: 0x00d700, + 41: 0x00d75f, + 42: 0x00d787, + 43: 0x00d7af, + 44: 0x00d7d7, + 45: 0x00d7ff, + 46: 0x00ff00, + 47: 0x00ff5f, + 48: 0x00ff87, + 49: 0x00ffaf, + 50: 0x00ffd7, + 51: 0x00ffff, + 52: 0x5f0000, + 53: 0x5f005f, + 54: 0x5f0087, + 55: 0x5f00af, + 56: 0x5f00d7, + 57: 0x5f00ff, + 58: 0x5f5f00, + 59: 0x5f5f5f, + 60: 0x5f5f87, + 61: 0x5f5faf, + 62: 0x5f5fd7, + 63: 0x5f5fff, + 64: 0x5f8700, + 65: 0x5f875f, + 66: 0x5f8787, + 67: 0x5f87af, + 68: 0x5f87d7, + 69: 0x5f87ff, + 70: 0x5faf00, + 71: 0x5faf5f, + 72: 0x5faf87, + 73: 0x5fafaf, + 74: 0x5fafd7, + 75: 0x5fafff, + 76: 0x5fd700, + 77: 0x5fd75f, + 78: 0x5fd787, + 79: 0x5fd7af, + 80: 0x5fd7d7, + 81: 0x5fd7ff, + 82: 0x5fff00, + 83: 0x5fff5f, + 84: 0x5fff87, + 85: 0x5fffaf, + 86: 0x5fffd7, + 87: 0x5fffff, + 88: 0x870000, + 89: 0x87005f, + 90: 0x870087, + 91: 0x8700af, + 92: 0x8700d7, + 93: 0x8700ff, + 94: 0x875f00, + 95: 0x875f5f, + 96: 0x875f87, + 97: 0x875faf, + 98: 0x875fd7, + 99: 0x875fff, + 100: 0x878700, + 101: 0x87875f, + 102: 0x878787, + 103: 0x8787af, + 104: 0x8787d7, + 105: 0x8787ff, + 106: 0x87af00, + 107: 0x87af5f, + 108: 0x87af87, + 109: 0x87afaf, + 110: 0x87afd7, + 111: 0x87afff, + 112: 0x87d700, + 113: 0x87d75f, + 114: 0x87d787, + 115: 0x87d7af, + 116: 0x87d7d7, + 117: 0x87d7ff, + 118: 0x87ff00, + 119: 0x87ff5f, + 120: 0x87ff87, + 121: 0x87ffaf, + 122: 0x87ffd7, + 123: 0x87ffff, + 124: 0xaf0000, + 125: 0xaf005f, + 126: 0xaf0087, + 127: 0xaf00af, + 128: 0xaf00d7, + 129: 0xaf00ff, + 130: 0xaf5f00, + 131: 0xaf5f5f, + 132: 0xaf5f87, + 133: 0xaf5faf, + 134: 0xaf5fd7, + 135: 0xaf5fff, + 136: 0xaf8700, + 137: 0xaf875f, + 138: 0xaf8787, + 139: 0xaf87af, + 140: 0xaf87d7, + 141: 0xaf87ff, + 142: 0xafaf00, + 143: 0xafaf5f, + 144: 0xafaf87, + 145: 0xafafaf, + 146: 0xafafd7, + 147: 0xafafff, + 148: 0xafd700, + 149: 0xafd75f, + 150: 0xafd787, + 151: 0xafd7af, + 152: 0xafd7d7, + 153: 0xafd7ff, + 154: 0xafff00, + 155: 0xafff5f, + 156: 0xafff87, + 157: 0xafffaf, + 158: 0xafffd7, + 159: 0xafffff, + 160: 0xd70000, + 161: 0xd7005f, + 162: 0xd70087, + 163: 0xd700af, + 164: 0xd700d7, + 165: 0xd700ff, + 166: 0xd75f00, + 167: 0xd75f5f, + 168: 0xd75f87, + 169: 0xd75faf, + 170: 0xd75fd7, + 171: 0xd75fff, + 172: 0xd78700, + 173: 0xd7875f, + 174: 0xd78787, + 175: 0xd787af, + 176: 0xd787d7, + 177: 0xd787ff, + 178: 0xd7af00, + 179: 0xd7af5f, + 180: 0xd7af87, + 181: 0xd7afaf, + 182: 0xd7afd7, + 183: 0xd7afff, + 184: 0xd7d700, + 185: 0xd7d75f, + 186: 0xd7d787, + 187: 0xd7d7af, + 188: 0xd7d7d7, + 189: 0xd7d7ff, + 190: 0xd7ff00, + 191: 0xd7ff5f, + 192: 0xd7ff87, + 193: 0xd7ffaf, + 194: 0xd7ffd7, + 195: 0xd7ffff, + 196: 0xff0000, + 197: 0xff005f, + 198: 0xff0087, + 199: 0xff00af, + 200: 0xff00d7, + 201: 0xff00ff, + 202: 0xff5f00, + 203: 0xff5f5f, + 204: 0xff5f87, + 205: 0xff5faf, + 206: 0xff5fd7, + 207: 0xff5fff, + 208: 0xff8700, + 209: 0xff875f, + 210: 0xff8787, + 211: 0xff87af, + 212: 0xff87d7, + 213: 0xff87ff, + 214: 0xffaf00, + 215: 0xffaf5f, + 216: 0xffaf87, + 217: 0xffafaf, + 218: 0xffafd7, + 219: 0xffafff, + 220: 0xffd700, + 221: 0xffd75f, + 222: 0xffd787, + 223: 0xffd7af, + 224: 0xffd7d7, + 225: 0xffd7ff, + 226: 0xffff00, + 227: 0xffff5f, + 228: 0xffff87, + 229: 0xffffaf, + 230: 0xffffd7, + 231: 0xffffff, + 232: 0x080808, + 233: 0x121212, + 234: 0x1c1c1c, + 235: 0x262626, + 236: 0x303030, + 237: 0x3a3a3a, + 238: 0x444444, + 239: 0x4e4e4e, + 240: 0x585858, + 241: 0x626262, + 242: 0x6c6c6c, + 243: 0x767676, + 244: 0x808080, + 245: 0x8a8a8a, + 246: 0x949494, + 247: 0x9e9e9e, + 248: 0xa8a8a8, + 249: 0xb2b2b2, + 250: 0xbcbcbc, + 251: 0xc6c6c6, + 252: 0xd0d0d0, + 253: 0xdadada, + 254: 0xe4e4e4, + 255: 0xeeeeee, +} + +// `\033]0;TITLESTR\007` +func doTitleSequence(er *bytes.Reader) error { + var c byte + var err error + + c, err = er.ReadByte() + if err != nil { + return err + } + if c != '0' && c != '2' { + return nil + } + c, err = er.ReadByte() + if err != nil { + return err + } + if c != ';' { + return nil + } + title := make([]byte, 0, 80) + for { + c, err = er.ReadByte() + if err != nil { + return err + } + if c == 0x07 || c == '\n' { + break + } + title = append(title, c) + } + if len(title) > 0 { + title8, err := syscall.UTF16PtrFromString(string(title)) + if err == nil { + procSetConsoleTitle.Call(uintptr(unsafe.Pointer(title8))) + } + } + return nil +} + +// Write write data on console +func (w *Writer) Write(data []byte) (n int, err error) { + var csbi consoleScreenBufferInfo + procGetConsoleScreenBufferInfo.Call(uintptr(w.handle), uintptr(unsafe.Pointer(&csbi))) + + handle := w.handle + + var er *bytes.Reader + if w.rest.Len() > 0 { + var rest bytes.Buffer + w.rest.WriteTo(&rest) + w.rest.Reset() + rest.Write(data) + er = bytes.NewReader(rest.Bytes()) + } else { + er = bytes.NewReader(data) + } + var bw [1]byte +loop: + for { + c1, err := er.ReadByte() + if err != nil { + break loop + } + if c1 != 0x1b { + bw[0] = c1 + w.out.Write(bw[:]) + continue + } + c2, err := er.ReadByte() + if err != nil { + break loop + } + + switch c2 { + case '>': + continue + case ']': + w.rest.WriteByte(c1) + w.rest.WriteByte(c2) + er.WriteTo(&w.rest) + if bytes.IndexByte(w.rest.Bytes(), 0x07) == -1 { + break loop + } + er = bytes.NewReader(w.rest.Bytes()[2:]) + err := doTitleSequence(er) + if err != nil { + break loop + } + w.rest.Reset() + continue + // https://github.com/mattn/go-colorable/issues/27 + case '7': + procGetConsoleScreenBufferInfo.Call(uintptr(handle), uintptr(unsafe.Pointer(&csbi))) + w.oldpos = csbi.cursorPosition + continue + case '8': + procSetConsoleCursorPosition.Call(uintptr(handle), *(*uintptr)(unsafe.Pointer(&w.oldpos))) + continue + case 0x5b: + // execute part after switch + default: + continue + } + + w.rest.WriteByte(c1) + w.rest.WriteByte(c2) + er.WriteTo(&w.rest) + + var buf bytes.Buffer + var m byte + for i, c := range w.rest.Bytes()[2:] { + if ('a' <= c && c <= 'z') || ('A' <= c && c <= 'Z') || c == '@' { + m = c + er = bytes.NewReader(w.rest.Bytes()[2+i+1:]) + w.rest.Reset() + break + } + buf.Write([]byte(string(c))) + } + if m == 0 { + break loop + } + + switch m { + case 'A': + n, err = strconv.Atoi(buf.String()) + if err != nil { + continue + } + procGetConsoleScreenBufferInfo.Call(uintptr(handle), uintptr(unsafe.Pointer(&csbi))) + csbi.cursorPosition.y -= short(n) + procSetConsoleCursorPosition.Call(uintptr(handle), *(*uintptr)(unsafe.Pointer(&csbi.cursorPosition))) + case 'B': + n, err = strconv.Atoi(buf.String()) + if err != nil { + continue + } + procGetConsoleScreenBufferInfo.Call(uintptr(handle), uintptr(unsafe.Pointer(&csbi))) + csbi.cursorPosition.y += short(n) + procSetConsoleCursorPosition.Call(uintptr(handle), *(*uintptr)(unsafe.Pointer(&csbi.cursorPosition))) + case 'C': + n, err = strconv.Atoi(buf.String()) + if err != nil { + continue + } + procGetConsoleScreenBufferInfo.Call(uintptr(handle), uintptr(unsafe.Pointer(&csbi))) + csbi.cursorPosition.x += short(n) + procSetConsoleCursorPosition.Call(uintptr(handle), *(*uintptr)(unsafe.Pointer(&csbi.cursorPosition))) + case 'D': + n, err = strconv.Atoi(buf.String()) + if err != nil { + continue + } + procGetConsoleScreenBufferInfo.Call(uintptr(handle), uintptr(unsafe.Pointer(&csbi))) + csbi.cursorPosition.x -= short(n) + if csbi.cursorPosition.x < 0 { + csbi.cursorPosition.x = 0 + } + procSetConsoleCursorPosition.Call(uintptr(handle), *(*uintptr)(unsafe.Pointer(&csbi.cursorPosition))) + case 'E': + n, err = strconv.Atoi(buf.String()) + if err != nil { + continue + } + procGetConsoleScreenBufferInfo.Call(uintptr(handle), uintptr(unsafe.Pointer(&csbi))) + csbi.cursorPosition.x = 0 + csbi.cursorPosition.y += short(n) + procSetConsoleCursorPosition.Call(uintptr(handle), *(*uintptr)(unsafe.Pointer(&csbi.cursorPosition))) + case 'F': + n, err = strconv.Atoi(buf.String()) + if err != nil { + continue + } + procGetConsoleScreenBufferInfo.Call(uintptr(handle), uintptr(unsafe.Pointer(&csbi))) + csbi.cursorPosition.x = 0 + csbi.cursorPosition.y -= short(n) + procSetConsoleCursorPosition.Call(uintptr(handle), *(*uintptr)(unsafe.Pointer(&csbi.cursorPosition))) + case 'G': + n, err = strconv.Atoi(buf.String()) + if err != nil { + continue + } + procGetConsoleScreenBufferInfo.Call(uintptr(handle), uintptr(unsafe.Pointer(&csbi))) + csbi.cursorPosition.x = short(n - 1) + procSetConsoleCursorPosition.Call(uintptr(handle), *(*uintptr)(unsafe.Pointer(&csbi.cursorPosition))) + case 'H', 'f': + procGetConsoleScreenBufferInfo.Call(uintptr(handle), uintptr(unsafe.Pointer(&csbi))) + if buf.Len() > 0 { + token := strings.Split(buf.String(), ";") + switch len(token) { + case 1: + n1, err := strconv.Atoi(token[0]) + if err != nil { + continue + } + csbi.cursorPosition.y = short(n1 - 1) + case 2: + n1, err := strconv.Atoi(token[0]) + if err != nil { + continue + } + n2, err := strconv.Atoi(token[1]) + if err != nil { + continue + } + csbi.cursorPosition.x = short(n2 - 1) + csbi.cursorPosition.y = short(n1 - 1) + } + } else { + csbi.cursorPosition.y = 0 + } + procSetConsoleCursorPosition.Call(uintptr(handle), *(*uintptr)(unsafe.Pointer(&csbi.cursorPosition))) + case 'J': + n := 0 + if buf.Len() > 0 { + n, err = strconv.Atoi(buf.String()) + if err != nil { + continue + } + } + var count, written dword + var cursor coord + procGetConsoleScreenBufferInfo.Call(uintptr(handle), uintptr(unsafe.Pointer(&csbi))) + switch n { + case 0: + cursor = coord{x: csbi.cursorPosition.x, y: csbi.cursorPosition.y} + count = dword(csbi.size.x) - dword(csbi.cursorPosition.x) + dword(csbi.size.y-csbi.cursorPosition.y)*dword(csbi.size.x) + case 1: + cursor = coord{x: csbi.window.left, y: csbi.window.top} + count = dword(csbi.size.x) - dword(csbi.cursorPosition.x) + dword(csbi.window.top-csbi.cursorPosition.y)*dword(csbi.size.x) + case 2: + cursor = coord{x: csbi.window.left, y: csbi.window.top} + count = dword(csbi.size.x) - dword(csbi.cursorPosition.x) + dword(csbi.size.y-csbi.cursorPosition.y)*dword(csbi.size.x) + } + procFillConsoleOutputCharacter.Call(uintptr(handle), uintptr(' '), uintptr(count), *(*uintptr)(unsafe.Pointer(&cursor)), uintptr(unsafe.Pointer(&written))) + procFillConsoleOutputAttribute.Call(uintptr(handle), uintptr(csbi.attributes), uintptr(count), *(*uintptr)(unsafe.Pointer(&cursor)), uintptr(unsafe.Pointer(&written))) + case 'K': + n := 0 + if buf.Len() > 0 { + n, err = strconv.Atoi(buf.String()) + if err != nil { + continue + } + } + procGetConsoleScreenBufferInfo.Call(uintptr(handle), uintptr(unsafe.Pointer(&csbi))) + var cursor coord + var count, written dword + switch n { + case 0: + cursor = coord{x: csbi.cursorPosition.x, y: csbi.cursorPosition.y} + count = dword(csbi.size.x - csbi.cursorPosition.x) + case 1: + cursor = coord{x: csbi.window.left, y: csbi.cursorPosition.y} + count = dword(csbi.size.x - csbi.cursorPosition.x) + case 2: + cursor = coord{x: csbi.window.left, y: csbi.cursorPosition.y} + count = dword(csbi.size.x) + } + procFillConsoleOutputCharacter.Call(uintptr(handle), uintptr(' '), uintptr(count), *(*uintptr)(unsafe.Pointer(&cursor)), uintptr(unsafe.Pointer(&written))) + procFillConsoleOutputAttribute.Call(uintptr(handle), uintptr(csbi.attributes), uintptr(count), *(*uintptr)(unsafe.Pointer(&cursor)), uintptr(unsafe.Pointer(&written))) + case 'm': + procGetConsoleScreenBufferInfo.Call(uintptr(handle), uintptr(unsafe.Pointer(&csbi))) + attr := csbi.attributes + cs := buf.String() + if cs == "" { + procSetConsoleTextAttribute.Call(uintptr(handle), uintptr(w.oldattr)) + continue + } + token := strings.Split(cs, ";") + for i := 0; i < len(token); i++ { + ns := token[i] + if n, err = strconv.Atoi(ns); err == nil { + switch { + case n == 0 || n == 100: + attr = w.oldattr + case 1 <= n && n <= 5: + attr |= foregroundIntensity + case n == 7: + attr = ((attr & foregroundMask) << 4) | ((attr & backgroundMask) >> 4) + case n == 22 || n == 25: + attr |= foregroundIntensity + case n == 27: + attr = ((attr & foregroundMask) << 4) | ((attr & backgroundMask) >> 4) + case 30 <= n && n <= 37: + attr &= backgroundMask + if (n-30)&1 != 0 { + attr |= foregroundRed + } + if (n-30)&2 != 0 { + attr |= foregroundGreen + } + if (n-30)&4 != 0 { + attr |= foregroundBlue + } + case n == 38: // set foreground color. + if i < len(token)-2 && (token[i+1] == "5" || token[i+1] == "05") { + if n256, err := strconv.Atoi(token[i+2]); err == nil { + if n256foreAttr == nil { + n256setup() + } + attr &= backgroundMask + attr |= n256foreAttr[n256] + i += 2 + } + } else if len(token) == 5 && token[i+1] == "2" { + var r, g, b int + r, _ = strconv.Atoi(token[i+2]) + g, _ = strconv.Atoi(token[i+3]) + b, _ = strconv.Atoi(token[i+4]) + i += 4 + if r > 127 { + attr |= foregroundRed + } + if g > 127 { + attr |= foregroundGreen + } + if b > 127 { + attr |= foregroundBlue + } + } else { + attr = attr & (w.oldattr & backgroundMask) + } + case n == 39: // reset foreground color. + attr &= backgroundMask + attr |= w.oldattr & foregroundMask + case 40 <= n && n <= 47: + attr &= foregroundMask + if (n-40)&1 != 0 { + attr |= backgroundRed + } + if (n-40)&2 != 0 { + attr |= backgroundGreen + } + if (n-40)&4 != 0 { + attr |= backgroundBlue + } + case n == 48: // set background color. + if i < len(token)-2 && token[i+1] == "5" { + if n256, err := strconv.Atoi(token[i+2]); err == nil { + if n256backAttr == nil { + n256setup() + } + attr &= foregroundMask + attr |= n256backAttr[n256] + i += 2 + } + } else if len(token) == 5 && token[i+1] == "2" { + var r, g, b int + r, _ = strconv.Atoi(token[i+2]) + g, _ = strconv.Atoi(token[i+3]) + b, _ = strconv.Atoi(token[i+4]) + i += 4 + if r > 127 { + attr |= backgroundRed + } + if g > 127 { + attr |= backgroundGreen + } + if b > 127 { + attr |= backgroundBlue + } + } else { + attr = attr & (w.oldattr & foregroundMask) + } + case n == 49: // reset foreground color. + attr &= foregroundMask + attr |= w.oldattr & backgroundMask + case 90 <= n && n <= 97: + attr = (attr & backgroundMask) + attr |= foregroundIntensity + if (n-90)&1 != 0 { + attr |= foregroundRed + } + if (n-90)&2 != 0 { + attr |= foregroundGreen + } + if (n-90)&4 != 0 { + attr |= foregroundBlue + } + case 100 <= n && n <= 107: + attr = (attr & foregroundMask) + attr |= backgroundIntensity + if (n-100)&1 != 0 { + attr |= backgroundRed + } + if (n-100)&2 != 0 { + attr |= backgroundGreen + } + if (n-100)&4 != 0 { + attr |= backgroundBlue + } + } + procSetConsoleTextAttribute.Call(uintptr(handle), uintptr(attr)) + } + } + case 'h': + var ci consoleCursorInfo + cs := buf.String() + if cs == "5>" { + procGetConsoleCursorInfo.Call(uintptr(handle), uintptr(unsafe.Pointer(&ci))) + ci.visible = 0 + procSetConsoleCursorInfo.Call(uintptr(handle), uintptr(unsafe.Pointer(&ci))) + } else if cs == "?25" { + procGetConsoleCursorInfo.Call(uintptr(handle), uintptr(unsafe.Pointer(&ci))) + ci.visible = 1 + procSetConsoleCursorInfo.Call(uintptr(handle), uintptr(unsafe.Pointer(&ci))) + } else if cs == "?1049" { + if w.althandle == 0 { + h, _, _ := procCreateConsoleScreenBuffer.Call(uintptr(genericRead|genericWrite), 0, 0, uintptr(consoleTextmodeBuffer), 0, 0) + w.althandle = syscall.Handle(h) + if w.althandle != 0 { + handle = w.althandle + } + } + } + case 'l': + var ci consoleCursorInfo + cs := buf.String() + if cs == "5>" { + procGetConsoleCursorInfo.Call(uintptr(handle), uintptr(unsafe.Pointer(&ci))) + ci.visible = 1 + procSetConsoleCursorInfo.Call(uintptr(handle), uintptr(unsafe.Pointer(&ci))) + } else if cs == "?25" { + procGetConsoleCursorInfo.Call(uintptr(handle), uintptr(unsafe.Pointer(&ci))) + ci.visible = 0 + procSetConsoleCursorInfo.Call(uintptr(handle), uintptr(unsafe.Pointer(&ci))) + } else if cs == "?1049" { + if w.althandle != 0 { + syscall.CloseHandle(w.althandle) + w.althandle = 0 + handle = w.handle + } + } + case 's': + procGetConsoleScreenBufferInfo.Call(uintptr(handle), uintptr(unsafe.Pointer(&csbi))) + w.oldpos = csbi.cursorPosition + case 'u': + procSetConsoleCursorPosition.Call(uintptr(handle), *(*uintptr)(unsafe.Pointer(&w.oldpos))) + } + } + + return len(data), nil +} + +type consoleColor struct { + rgb int + red bool + green bool + blue bool + intensity bool +} + +func (c consoleColor) foregroundAttr() (attr word) { + if c.red { + attr |= foregroundRed + } + if c.green { + attr |= foregroundGreen + } + if c.blue { + attr |= foregroundBlue + } + if c.intensity { + attr |= foregroundIntensity + } + return +} + +func (c consoleColor) backgroundAttr() (attr word) { + if c.red { + attr |= backgroundRed + } + if c.green { + attr |= backgroundGreen + } + if c.blue { + attr |= backgroundBlue + } + if c.intensity { + attr |= backgroundIntensity + } + return +} + +var color16 = []consoleColor{ + {0x000000, false, false, false, false}, + {0x000080, false, false, true, false}, + {0x008000, false, true, false, false}, + {0x008080, false, true, true, false}, + {0x800000, true, false, false, false}, + {0x800080, true, false, true, false}, + {0x808000, true, true, false, false}, + {0xc0c0c0, true, true, true, false}, + {0x808080, false, false, false, true}, + {0x0000ff, false, false, true, true}, + {0x00ff00, false, true, false, true}, + {0x00ffff, false, true, true, true}, + {0xff0000, true, false, false, true}, + {0xff00ff, true, false, true, true}, + {0xffff00, true, true, false, true}, + {0xffffff, true, true, true, true}, +} + +type hsv struct { + h, s, v float32 +} + +func (a hsv) dist(b hsv) float32 { + dh := a.h - b.h + switch { + case dh > 0.5: + dh = 1 - dh + case dh < -0.5: + dh = -1 - dh + } + ds := a.s - b.s + dv := a.v - b.v + return float32(math.Sqrt(float64(dh*dh + ds*ds + dv*dv))) +} + +func toHSV(rgb int) hsv { + r, g, b := float32((rgb&0xFF0000)>>16)/256.0, + float32((rgb&0x00FF00)>>8)/256.0, + float32(rgb&0x0000FF)/256.0 + min, max := minmax3f(r, g, b) + h := max - min + if h > 0 { + if max == r { + h = (g - b) / h + if h < 0 { + h += 6 + } + } else if max == g { + h = 2 + (b-r)/h + } else { + h = 4 + (r-g)/h + } + } + h /= 6.0 + s := max - min + if max != 0 { + s /= max + } + v := max + return hsv{h: h, s: s, v: v} +} + +type hsvTable []hsv + +func toHSVTable(rgbTable []consoleColor) hsvTable { + t := make(hsvTable, len(rgbTable)) + for i, c := range rgbTable { + t[i] = toHSV(c.rgb) + } + return t +} + +func (t hsvTable) find(rgb int) consoleColor { + hsv := toHSV(rgb) + n := 7 + l := float32(5.0) + for i, p := range t { + d := hsv.dist(p) + if d < l { + l, n = d, i + } + } + return color16[n] +} + +func minmax3f(a, b, c float32) (min, max float32) { + if a < b { + if b < c { + return a, c + } else if a < c { + return a, b + } else { + return c, b + } + } else { + if a < c { + return b, c + } else if b < c { + return b, a + } else { + return c, a + } + } +} + +var n256foreAttr []word +var n256backAttr []word + +func n256setup() { + n256foreAttr = make([]word, 256) + n256backAttr = make([]word, 256) + t := toHSVTable(color16) + for i, rgb := range color256 { + c := t.find(rgb) + n256foreAttr[i] = c.foregroundAttr() + n256backAttr[i] = c.backgroundAttr() + } +} diff --git a/vendor/github.com/mattn/go-colorable/noncolorable.go b/vendor/github.com/mattn/go-colorable/noncolorable.go new file mode 100644 index 0000000..9721e16 --- /dev/null +++ b/vendor/github.com/mattn/go-colorable/noncolorable.go @@ -0,0 +1,55 @@ +package colorable + +import ( + "bytes" + "io" +) + +// NonColorable hold writer but remove escape sequence. +type NonColorable struct { + out io.Writer +} + +// NewNonColorable return new instance of Writer which remove escape sequence from Writer. +func NewNonColorable(w io.Writer) io.Writer { + return &NonColorable{out: w} +} + +// Write write data on console +func (w *NonColorable) Write(data []byte) (n int, err error) { + er := bytes.NewReader(data) + var bw [1]byte +loop: + for { + c1, err := er.ReadByte() + if err != nil { + break loop + } + if c1 != 0x1b { + bw[0] = c1 + w.out.Write(bw[:]) + continue + } + c2, err := er.ReadByte() + if err != nil { + break loop + } + if c2 != 0x5b { + continue + } + + var buf bytes.Buffer + for { + c, err := er.ReadByte() + if err != nil { + break loop + } + if ('a' <= c && c <= 'z') || ('A' <= c && c <= 'Z') || c == '@' { + break + } + buf.Write([]byte(string(c))) + } + } + + return len(data), nil +} diff --git a/vendor/github.com/mattn/go-isatty/LICENSE b/vendor/github.com/mattn/go-isatty/LICENSE new file mode 100644 index 0000000..65dc692 --- /dev/null +++ b/vendor/github.com/mattn/go-isatty/LICENSE @@ -0,0 +1,9 @@ +Copyright (c) Yasuhiro MATSUMOTO + +MIT License (Expat) + +Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/vendor/github.com/mattn/go-isatty/doc.go b/vendor/github.com/mattn/go-isatty/doc.go new file mode 100644 index 0000000..17d4f90 --- /dev/null +++ b/vendor/github.com/mattn/go-isatty/doc.go @@ -0,0 +1,2 @@ +// Package isatty implements interface to isatty +package isatty diff --git a/vendor/github.com/mattn/go-isatty/isatty_android.go b/vendor/github.com/mattn/go-isatty/isatty_android.go new file mode 100644 index 0000000..d3567cb --- /dev/null +++ b/vendor/github.com/mattn/go-isatty/isatty_android.go @@ -0,0 +1,23 @@ +// +build android + +package isatty + +import ( + "syscall" + "unsafe" +) + +const ioctlReadTermios = syscall.TCGETS + +// IsTerminal return true if the file descriptor is terminal. +func IsTerminal(fd uintptr) bool { + var termios syscall.Termios + _, _, err := syscall.Syscall6(syscall.SYS_IOCTL, fd, ioctlReadTermios, uintptr(unsafe.Pointer(&termios)), 0, 0, 0) + return err == 0 +} + +// IsCygwinTerminal return true if the file descriptor is a cygwin or msys2 +// terminal. This is also always false on this environment. +func IsCygwinTerminal(fd uintptr) bool { + return false +} diff --git a/vendor/github.com/mattn/go-isatty/isatty_bsd.go b/vendor/github.com/mattn/go-isatty/isatty_bsd.go new file mode 100644 index 0000000..07e9303 --- /dev/null +++ b/vendor/github.com/mattn/go-isatty/isatty_bsd.go @@ -0,0 +1,24 @@ +// +build darwin freebsd openbsd netbsd dragonfly +// +build !appengine + +package isatty + +import ( + "syscall" + "unsafe" +) + +const ioctlReadTermios = syscall.TIOCGETA + +// IsTerminal return true if the file descriptor is terminal. +func IsTerminal(fd uintptr) bool { + var termios syscall.Termios + _, _, err := syscall.Syscall6(syscall.SYS_IOCTL, fd, ioctlReadTermios, uintptr(unsafe.Pointer(&termios)), 0, 0, 0) + return err == 0 +} + +// IsCygwinTerminal return true if the file descriptor is a cygwin or msys2 +// terminal. This is also always false on this environment. +func IsCygwinTerminal(fd uintptr) bool { + return false +} diff --git a/vendor/github.com/mattn/go-isatty/isatty_linux.go b/vendor/github.com/mattn/go-isatty/isatty_linux.go new file mode 100644 index 0000000..4f8af46 --- /dev/null +++ b/vendor/github.com/mattn/go-isatty/isatty_linux.go @@ -0,0 +1,19 @@ +// +build linux +// +build !appengine +// +build !android + +package isatty + +import "golang.org/x/sys/unix" + +// IsTerminal return true if the file descriptor is terminal. +func IsTerminal(fd uintptr) bool { + _, err := unix.IoctlGetTermios(int(fd), unix.TCGETS) + return err == nil +} + +// IsCygwinTerminal return true if the file descriptor is a cygwin or msys2 +// terminal. This is also always false on this environment. +func IsCygwinTerminal(fd uintptr) bool { + return false +} diff --git a/vendor/github.com/mattn/go-isatty/isatty_others.go b/vendor/github.com/mattn/go-isatty/isatty_others.go new file mode 100644 index 0000000..f02849c --- /dev/null +++ b/vendor/github.com/mattn/go-isatty/isatty_others.go @@ -0,0 +1,15 @@ +// +build appengine js + +package isatty + +// IsTerminal returns true if the file descriptor is terminal which +// is always false on js and appengine classic which is a sandboxed PaaS. +func IsTerminal(fd uintptr) bool { + return false +} + +// IsCygwinTerminal() return true if the file descriptor is a cygwin or msys2 +// terminal. This is also always false on this environment. +func IsCygwinTerminal(fd uintptr) bool { + return false +} diff --git a/vendor/github.com/mattn/go-isatty/isatty_solaris.go b/vendor/github.com/mattn/go-isatty/isatty_solaris.go new file mode 100644 index 0000000..bdd5c79 --- /dev/null +++ b/vendor/github.com/mattn/go-isatty/isatty_solaris.go @@ -0,0 +1,22 @@ +// +build solaris +// +build !appengine + +package isatty + +import ( + "golang.org/x/sys/unix" +) + +// IsTerminal returns true if the given file descriptor is a terminal. +// see: http://src.illumos.org/source/xref/illumos-gate/usr/src/lib/libbc/libc/gen/common/isatty.c +func IsTerminal(fd uintptr) bool { + var termio unix.Termio + err := unix.IoctlSetTermio(int(fd), unix.TCGETA, &termio) + return err == nil +} + +// IsCygwinTerminal return true if the file descriptor is a cygwin or msys2 +// terminal. This is also always false on this environment. +func IsCygwinTerminal(fd uintptr) bool { + return false +} diff --git a/vendor/github.com/mattn/go-isatty/isatty_windows.go b/vendor/github.com/mattn/go-isatty/isatty_windows.go new file mode 100644 index 0000000..af51cbc --- /dev/null +++ b/vendor/github.com/mattn/go-isatty/isatty_windows.go @@ -0,0 +1,94 @@ +// +build windows +// +build !appengine + +package isatty + +import ( + "strings" + "syscall" + "unicode/utf16" + "unsafe" +) + +const ( + fileNameInfo uintptr = 2 + fileTypePipe = 3 +) + +var ( + kernel32 = syscall.NewLazyDLL("kernel32.dll") + procGetConsoleMode = kernel32.NewProc("GetConsoleMode") + procGetFileInformationByHandleEx = kernel32.NewProc("GetFileInformationByHandleEx") + procGetFileType = kernel32.NewProc("GetFileType") +) + +func init() { + // Check if GetFileInformationByHandleEx is available. + if procGetFileInformationByHandleEx.Find() != nil { + procGetFileInformationByHandleEx = nil + } +} + +// IsTerminal return true if the file descriptor is terminal. +func IsTerminal(fd uintptr) bool { + var st uint32 + r, _, e := syscall.Syscall(procGetConsoleMode.Addr(), 2, fd, uintptr(unsafe.Pointer(&st)), 0) + return r != 0 && e == 0 +} + +// Check pipe name is used for cygwin/msys2 pty. +// Cygwin/MSYS2 PTY has a name like: +// \{cygwin,msys}-XXXXXXXXXXXXXXXX-ptyN-{from,to}-master +func isCygwinPipeName(name string) bool { + token := strings.Split(name, "-") + if len(token) < 5 { + return false + } + + if token[0] != `\msys` && token[0] != `\cygwin` { + return false + } + + if token[1] == "" { + return false + } + + if !strings.HasPrefix(token[2], "pty") { + return false + } + + if token[3] != `from` && token[3] != `to` { + return false + } + + if token[4] != "master" { + return false + } + + return true +} + +// IsCygwinTerminal() return true if the file descriptor is a cygwin or msys2 +// terminal. +func IsCygwinTerminal(fd uintptr) bool { + if procGetFileInformationByHandleEx == nil { + return false + } + + // Cygwin/msys's pty is a pipe. + ft, _, e := syscall.Syscall(procGetFileType.Addr(), 1, fd, 0, 0) + if ft != fileTypePipe || e != 0 { + return false + } + + var buf [2 + syscall.MAX_PATH]uint16 + r, _, e := syscall.Syscall6(procGetFileInformationByHandleEx.Addr(), + 4, fd, fileNameInfo, uintptr(unsafe.Pointer(&buf)), + uintptr(len(buf)*2), 0, 0) + if r == 0 || e != 0 { + return false + } + + l := *(*uint32)(unsafe.Pointer(&buf)) + return isCygwinPipeName(string(utf16.Decode(buf[2 : 2+l/2]))) +} diff --git a/vendor/github.com/mgutz/ansi/LICENSE b/vendor/github.com/mgutz/ansi/LICENSE new file mode 100644 index 0000000..06ce0c3 --- /dev/null +++ b/vendor/github.com/mgutz/ansi/LICENSE @@ -0,0 +1,9 @@ +The MIT License (MIT) +Copyright (c) 2013 Mario L. Gutierrez + +Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. + diff --git a/vendor/github.com/mgutz/ansi/ansi.go b/vendor/github.com/mgutz/ansi/ansi.go new file mode 100644 index 0000000..dc04136 --- /dev/null +++ b/vendor/github.com/mgutz/ansi/ansi.go @@ -0,0 +1,285 @@ +package ansi + +import ( + "bytes" + "fmt" + "strconv" + "strings" +) + +const ( + black = iota + red + green + yellow + blue + magenta + cyan + white + defaultt = 9 + + normalIntensityFG = 30 + highIntensityFG = 90 + normalIntensityBG = 40 + highIntensityBG = 100 + + start = "\033[" + bold = "1;" + blink = "5;" + underline = "4;" + inverse = "7;" + strikethrough = "9;" + + // Reset is the ANSI reset escape sequence + Reset = "\033[0m" + // DefaultBG is the default background + DefaultBG = "\033[49m" + // DefaultFG is the default foreground + DefaultFG = "\033[39m" +) + +// Black FG +var Black string + +// Red FG +var Red string + +// Green FG +var Green string + +// Yellow FG +var Yellow string + +// Blue FG +var Blue string + +// Magenta FG +var Magenta string + +// Cyan FG +var Cyan string + +// White FG +var White string + +// LightBlack FG +var LightBlack string + +// LightRed FG +var LightRed string + +// LightGreen FG +var LightGreen string + +// LightYellow FG +var LightYellow string + +// LightBlue FG +var LightBlue string + +// LightMagenta FG +var LightMagenta string + +// LightCyan FG +var LightCyan string + +// LightWhite FG +var LightWhite string + +var ( + plain = false + // Colors maps common color names to their ANSI color code. + Colors = map[string]int{ + "black": black, + "red": red, + "green": green, + "yellow": yellow, + "blue": blue, + "magenta": magenta, + "cyan": cyan, + "white": white, + "default": defaultt, + } +) + +func init() { + for i := 0; i < 256; i++ { + Colors[strconv.Itoa(i)] = i + } + + Black = ColorCode("black") + Red = ColorCode("red") + Green = ColorCode("green") + Yellow = ColorCode("yellow") + Blue = ColorCode("blue") + Magenta = ColorCode("magenta") + Cyan = ColorCode("cyan") + White = ColorCode("white") + LightBlack = ColorCode("black+h") + LightRed = ColorCode("red+h") + LightGreen = ColorCode("green+h") + LightYellow = ColorCode("yellow+h") + LightBlue = ColorCode("blue+h") + LightMagenta = ColorCode("magenta+h") + LightCyan = ColorCode("cyan+h") + LightWhite = ColorCode("white+h") +} + +// ColorCode returns the ANSI color color code for style. +func ColorCode(style string) string { + return colorCode(style).String() +} + +// Gets the ANSI color code for a style. +func colorCode(style string) *bytes.Buffer { + buf := bytes.NewBufferString("") + if plain || style == "" { + return buf + } + if style == "reset" { + buf.WriteString(Reset) + return buf + } else if style == "off" { + return buf + } + + foregroundBackground := strings.Split(style, ":") + foreground := strings.Split(foregroundBackground[0], "+") + fgKey := foreground[0] + fg := Colors[fgKey] + fgStyle := "" + if len(foreground) > 1 { + fgStyle = foreground[1] + } + + bg, bgStyle := "", "" + + if len(foregroundBackground) > 1 { + background := strings.Split(foregroundBackground[1], "+") + bg = background[0] + if len(background) > 1 { + bgStyle = background[1] + } + } + + buf.WriteString(start) + base := normalIntensityFG + if len(fgStyle) > 0 { + if strings.Contains(fgStyle, "b") { + buf.WriteString(bold) + } + if strings.Contains(fgStyle, "B") { + buf.WriteString(blink) + } + if strings.Contains(fgStyle, "u") { + buf.WriteString(underline) + } + if strings.Contains(fgStyle, "i") { + buf.WriteString(inverse) + } + if strings.Contains(fgStyle, "s") { + buf.WriteString(strikethrough) + } + if strings.Contains(fgStyle, "h") { + base = highIntensityFG + } + } + + // if 256-color + n, err := strconv.Atoi(fgKey) + if err == nil { + fmt.Fprintf(buf, "38;5;%d;", n) + } else { + fmt.Fprintf(buf, "%d;", base+fg) + } + + base = normalIntensityBG + if len(bg) > 0 { + if strings.Contains(bgStyle, "h") { + base = highIntensityBG + } + // if 256-color + n, err := strconv.Atoi(bg) + if err == nil { + fmt.Fprintf(buf, "48;5;%d;", n) + } else { + fmt.Fprintf(buf, "%d;", base+Colors[bg]) + } + } + + // remove last ";" + buf.Truncate(buf.Len() - 1) + buf.WriteRune('m') + return buf +} + +// Color colors a string based on the ANSI color code for style. +func Color(s, style string) string { + if plain || len(style) < 1 { + return s + } + buf := colorCode(style) + buf.WriteString(s) + buf.WriteString(Reset) + return buf.String() +} + +// ColorFunc creates a closure to avoid computation ANSI color code. +func ColorFunc(style string) func(string) string { + if style == "" { + return func(s string) string { + return s + } + } + color := ColorCode(style) + return func(s string) string { + if plain || s == "" { + return s + } + buf := bytes.NewBufferString(color) + buf.WriteString(s) + buf.WriteString(Reset) + result := buf.String() + return result + } +} + +// DisableColors disables ANSI color codes. The default is false (colors are on). +func DisableColors(disable bool) { + plain = disable + if plain { + Black = "" + Red = "" + Green = "" + Yellow = "" + Blue = "" + Magenta = "" + Cyan = "" + White = "" + LightBlack = "" + LightRed = "" + LightGreen = "" + LightYellow = "" + LightBlue = "" + LightMagenta = "" + LightCyan = "" + LightWhite = "" + } else { + Black = ColorCode("black") + Red = ColorCode("red") + Green = ColorCode("green") + Yellow = ColorCode("yellow") + Blue = ColorCode("blue") + Magenta = ColorCode("magenta") + Cyan = ColorCode("cyan") + White = ColorCode("white") + LightBlack = ColorCode("black+h") + LightRed = ColorCode("red+h") + LightGreen = ColorCode("green+h") + LightYellow = ColorCode("yellow+h") + LightBlue = ColorCode("blue+h") + LightMagenta = ColorCode("magenta+h") + LightCyan = ColorCode("cyan+h") + LightWhite = ColorCode("white+h") + } +} diff --git a/vendor/github.com/mgutz/ansi/cmd/ansi-mgutz/main.go b/vendor/github.com/mgutz/ansi/cmd/ansi-mgutz/main.go new file mode 100644 index 0000000..736b45d --- /dev/null +++ b/vendor/github.com/mgutz/ansi/cmd/ansi-mgutz/main.go @@ -0,0 +1,135 @@ +package main + +import ( + "fmt" + "sort" + "strconv" + + "github.com/mattn/go-colorable" + "github.com/mgutz/ansi" +) + +func main() { + printColors() + print256Colors() + printConstants() +} + +func pad(s string, length int) string { + for len(s) < length { + s += " " + } + return s +} + +func padColor(s string, styles []string) string { + buffer := "" + for _, style := range styles { + buffer += ansi.Color(pad(s+style, 20), s+style) + } + return buffer +} + +func printPlain() { + ansi.DisableColors(true) + bgColors := []string{ + "", + ":black", + ":red", + ":green", + ":yellow", + ":blue", + ":magenta", + ":cyan", + ":white", + } + for fg := range ansi.Colors { + for _, bg := range bgColors { + println(padColor(fg, []string{"" + bg, "+b" + bg, "+bh" + bg, "+u" + bg})) + println(padColor(fg, []string{"+uh" + bg, "+B" + bg, "+Bb" + bg /* backgrounds */, "" + bg + "+h"})) + println(padColor(fg, []string{"+b" + bg + "+h", "+bh" + bg + "+h", "+u" + bg + "+h", "+uh" + bg + "+h"})) + } + } +} + +func printColors() { + ansi.DisableColors(false) + stdout := colorable.NewColorableStdout() + + bgColors := []string{ + "", + ":black", + ":red", + ":green", + ":yellow", + ":blue", + ":magenta", + ":cyan", + ":white", + } + + keys := []string{} + for fg := range ansi.Colors { + _, err := strconv.Atoi(fg) + if err != nil { + keys = append(keys, fg) + } + } + sort.Strings(keys) + + for _, fg := range keys { + for _, bg := range bgColors { + fmt.Fprintln(stdout, padColor(fg, []string{"" + bg, "+b" + bg, "+bh" + bg, "+u" + bg})) + fmt.Fprintln(stdout, padColor(fg, []string{"+uh" + bg, "+B" + bg, "+Bb" + bg /* backgrounds */, "" + bg + "+h", "+s" + bg})) + fmt.Fprintln(stdout, padColor(fg, []string{"+b" + bg + "+h", "+bh" + bg + "+h", "+u" + bg + "+h", "+uh" + bg + "+h"})) + } + } +} + +func print256Colors() { + ansi.DisableColors(false) + stdout := colorable.NewColorableStdout() + + bgColors := []string{""} + for i := 0; i < 256; i++ { + key := fmt.Sprintf(":%d", i) + bgColors = append(bgColors, key) + } + + keys := []string{} + for fg := range ansi.Colors { + n, err := strconv.Atoi(fg) + if err == nil { + keys = append(keys, fmt.Sprintf("%3d", n)) + } + } + sort.Strings(keys) + + for _, fg := range keys { + for _, bg := range bgColors { + fmt.Fprintln(stdout, padColor(fg, []string{"" + bg, "+b" + bg, "+u" + bg})) + fmt.Fprintln(stdout, padColor(fg, []string{"+B" + bg, "+Bb" + bg, "+s" + bg})) + } + } +} + +func printConstants() { + stdout := colorable.NewColorableStdout() + fmt.Fprintln(stdout, ansi.DefaultFG, "ansi.DefaultFG", ansi.Reset) + fmt.Fprintln(stdout, ansi.Black, "ansi.Black", ansi.Reset) + fmt.Fprintln(stdout, ansi.Red, "ansi.Red", ansi.Reset) + fmt.Fprintln(stdout, ansi.Green, "ansi.Green", ansi.Reset) + fmt.Fprintln(stdout, ansi.Yellow, "ansi.Yellow", ansi.Reset) + fmt.Fprintln(stdout, ansi.Blue, "ansi.Blue", ansi.Reset) + fmt.Fprintln(stdout, ansi.Magenta, "ansi.Magenta", ansi.Reset) + fmt.Fprintln(stdout, ansi.Cyan, "ansi.Cyan", ansi.Reset) + fmt.Fprintln(stdout, ansi.White, "ansi.White", ansi.Reset) + fmt.Fprintln(stdout, ansi.LightBlack, "ansi.LightBlack", ansi.Reset) + fmt.Fprintln(stdout, ansi.LightRed, "ansi.LightRed", ansi.Reset) + fmt.Fprintln(stdout, ansi.LightGreen, "ansi.LightGreen", ansi.Reset) + fmt.Fprintln(stdout, ansi.LightYellow, "ansi.LightYellow", ansi.Reset) + fmt.Fprintln(stdout, ansi.LightBlue, "ansi.LightBlue", ansi.Reset) + fmt.Fprintln(stdout, ansi.LightMagenta, "ansi.LightMagenta", ansi.Reset) + fmt.Fprintln(stdout, ansi.LightCyan, "ansi.LightCyan", ansi.Reset) + fmt.Fprintln(stdout, ansi.LightWhite, "ansi.LightWhite", ansi.Reset) +} diff --git a/vendor/github.com/mgutz/ansi/doc.go b/vendor/github.com/mgutz/ansi/doc.go new file mode 100644 index 0000000..43c217e --- /dev/null +++ b/vendor/github.com/mgutz/ansi/doc.go @@ -0,0 +1,65 @@ +/* +Package ansi is a small, fast library to create ANSI colored strings and codes. + +Installation + + # this installs the color viewer and the package + go get -u github.com/mgutz/ansi/cmd/ansi-mgutz + +Example + + // colorize a string, SLOW + msg := ansi.Color("foo", "red+b:white") + + // create a closure to avoid recalculating ANSI code compilation + phosphorize := ansi.ColorFunc("green+h:black") + msg = phosphorize("Bring back the 80s!") + msg2 := phospohorize("Look, I'm a CRT!") + + // cache escape codes and build strings manually + lime := ansi.ColorCode("green+h:black") + reset := ansi.ColorCode("reset") + + fmt.Println(lime, "Bring back the 80s!", reset) + +Other examples + + Color(s, "red") // red + Color(s, "red+b") // red bold + Color(s, "red+B") // red blinking + Color(s, "red+u") // red underline + Color(s, "red+bh") // red bold bright + Color(s, "red:white") // red on white + Color(s, "red+b:white+h") // red bold on white bright + Color(s, "red+B:white+h") // red blink on white bright + +To view color combinations, from terminal + + ansi-mgutz + +Style format + + "foregroundColor+attributes:backgroundColor+attributes" + +Colors + + black + red + green + yellow + blue + magenta + cyan + white + +Attributes + + b = bold foreground + B = Blink foreground + u = underline foreground + h = high intensity (bright) foreground, background + i = inverse + +Wikipedia ANSI escape codes [Colors](http://en.wikipedia.org/wiki/ANSI_escape_code#Colors) +*/ +package ansi diff --git a/vendor/github.com/mgutz/ansi/print.go b/vendor/github.com/mgutz/ansi/print.go new file mode 100644 index 0000000..806f436 --- /dev/null +++ b/vendor/github.com/mgutz/ansi/print.go @@ -0,0 +1,57 @@ +package ansi + +import ( + "fmt" + "sort" + + colorable "github.com/mattn/go-colorable" +) + +// PrintStyles prints all style combinations to the terminal. +func PrintStyles() { + // for compatibility with Windows, not needed for *nix + stdout := colorable.NewColorableStdout() + + bgColors := []string{ + "", + ":black", + ":red", + ":green", + ":yellow", + ":blue", + ":magenta", + ":cyan", + ":white", + } + + keys := make([]string, 0, len(Colors)) + for k := range Colors { + keys = append(keys, k) + } + + sort.Sort(sort.StringSlice(keys)) + + for _, fg := range keys { + for _, bg := range bgColors { + fmt.Fprintln(stdout, padColor(fg, []string{"" + bg, "+b" + bg, "+bh" + bg, "+u" + bg})) + fmt.Fprintln(stdout, padColor(fg, []string{"+s" + bg, "+i" + bg})) + fmt.Fprintln(stdout, padColor(fg, []string{"+uh" + bg, "+B" + bg, "+Bb" + bg /* backgrounds */, "" + bg + "+h"})) + fmt.Fprintln(stdout, padColor(fg, []string{"+b" + bg + "+h", "+bh" + bg + "+h", "+u" + bg + "+h", "+uh" + bg + "+h"})) + } + } +} + +func pad(s string, length int) string { + for len(s) < length { + s += " " + } + return s +} + +func padColor(color string, styles []string) string { + buffer := "" + for _, style := range styles { + buffer += Color(pad(color+style, 20), color+style) + } + return buffer +} diff --git a/vendor/github.com/mitchellh/go-testing-interface/LICENSE b/vendor/github.com/mitchellh/go-testing-interface/LICENSE new file mode 100644 index 0000000..a3866a2 --- /dev/null +++ b/vendor/github.com/mitchellh/go-testing-interface/LICENSE @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) 2016 Mitchell Hashimoto + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/vendor/github.com/mitchellh/go-testing-interface/testing.go b/vendor/github.com/mitchellh/go-testing-interface/testing.go new file mode 100644 index 0000000..204afb4 --- /dev/null +++ b/vendor/github.com/mitchellh/go-testing-interface/testing.go @@ -0,0 +1,84 @@ +// +build !go1.9 + +package testing + +import ( + "fmt" + "log" +) + +// T is the interface that mimics the standard library *testing.T. +// +// In unit tests you can just pass a *testing.T struct. At runtime, outside +// of tests, you can pass in a RuntimeT struct from this package. +type T interface { + Error(args ...interface{}) + Errorf(format string, args ...interface{}) + Fail() + FailNow() + Failed() bool + Fatal(args ...interface{}) + Fatalf(format string, args ...interface{}) + Log(args ...interface{}) + Logf(format string, args ...interface{}) + Name() string + Skip(args ...interface{}) + SkipNow() + Skipf(format string, args ...interface{}) + Skipped() bool +} + +// RuntimeT implements T and can be instantiated and run at runtime to +// mimic *testing.T behavior. Unlike *testing.T, this will simply panic +// for calls to Fatal. For calls to Error, you'll have to check the errors +// list to determine whether to exit yourself. Name and Skip methods are +// unimplemented noops. +type RuntimeT struct { + failed bool +} + +func (t *RuntimeT) Error(args ...interface{}) { + log.Println(fmt.Sprintln(args...)) + t.Fail() +} + +func (t *RuntimeT) Errorf(format string, args ...interface{}) { + log.Println(fmt.Sprintf(format, args...)) + t.Fail() +} + +func (t *RuntimeT) Fatal(args ...interface{}) { + log.Println(fmt.Sprintln(args...)) + t.FailNow() +} + +func (t *RuntimeT) Fatalf(format string, args ...interface{}) { + log.Println(fmt.Sprintf(format, args...)) + t.FailNow() +} + +func (t *RuntimeT) Fail() { + t.failed = true +} + +func (t *RuntimeT) FailNow() { + panic("testing.T failed, see logs for output (if any)") +} + +func (t *RuntimeT) Failed() bool { + return t.failed +} + +func (t *RuntimeT) Log(args ...interface{}) { + log.Println(fmt.Sprintln(args...)) +} + +func (t *RuntimeT) Logf(format string, args ...interface{}) { + log.Println(fmt.Sprintf(format, args...)) +} + +func (t *RuntimeT) Name() string { return "" } +func (t *RuntimeT) Skip(args ...interface{}) {} +func (t *RuntimeT) SkipNow() {} +func (t *RuntimeT) Skipf(format string, args ...interface{}) {} +func (t *RuntimeT) Skipped() bool { return false } diff --git a/vendor/github.com/mitchellh/go-testing-interface/testing_go19.go b/vendor/github.com/mitchellh/go-testing-interface/testing_go19.go new file mode 100644 index 0000000..31b42ca --- /dev/null +++ b/vendor/github.com/mitchellh/go-testing-interface/testing_go19.go @@ -0,0 +1,108 @@ +// +build go1.9 + +// NOTE: This is a temporary copy of testing.go for Go 1.9 with the addition +// of "Helper" to the T interface. Go 1.9 at the time of typing is in RC +// and is set for release shortly. We'll support this on master as the default +// as soon as 1.9 is released. + +package testing + +import ( + "fmt" + "log" +) + +// T is the interface that mimics the standard library *testing.T. +// +// In unit tests you can just pass a *testing.T struct. At runtime, outside +// of tests, you can pass in a RuntimeT struct from this package. +type T interface { + Error(args ...interface{}) + Errorf(format string, args ...interface{}) + Fail() + FailNow() + Failed() bool + Fatal(args ...interface{}) + Fatalf(format string, args ...interface{}) + Log(args ...interface{}) + Logf(format string, args ...interface{}) + Name() string + Skip(args ...interface{}) + SkipNow() + Skipf(format string, args ...interface{}) + Skipped() bool + Helper() +} + +// RuntimeT implements T and can be instantiated and run at runtime to +// mimic *testing.T behavior. Unlike *testing.T, this will simply panic +// for calls to Fatal. For calls to Error, you'll have to check the errors +// list to determine whether to exit yourself. +type RuntimeT struct { + skipped bool + failed bool +} + +func (t *RuntimeT) Error(args ...interface{}) { + log.Println(fmt.Sprintln(args...)) + t.Fail() +} + +func (t *RuntimeT) Errorf(format string, args ...interface{}) { + log.Printf(format, args...) + t.Fail() +} + +func (t *RuntimeT) Fail() { + t.failed = true +} + +func (t *RuntimeT) FailNow() { + panic("testing.T failed, see logs for output (if any)") +} + +func (t *RuntimeT) Failed() bool { + return t.failed +} + +func (t *RuntimeT) Fatal(args ...interface{}) { + log.Print(args...) + t.FailNow() +} + +func (t *RuntimeT) Fatalf(format string, args ...interface{}) { + log.Printf(format, args...) + t.FailNow() +} + +func (t *RuntimeT) Log(args ...interface{}) { + log.Println(fmt.Sprintln(args...)) +} + +func (t *RuntimeT) Logf(format string, args ...interface{}) { + log.Println(fmt.Sprintf(format, args...)) +} + +func (t *RuntimeT) Name() string { + return "" +} + +func (t *RuntimeT) Skip(args ...interface{}) { + log.Print(args...) + t.SkipNow() +} + +func (t *RuntimeT) SkipNow() { + t.skipped = true +} + +func (t *RuntimeT) Skipf(format string, args ...interface{}) { + log.Printf(format, args...) + t.SkipNow() +} + +func (t *RuntimeT) Skipped() bool { + return t.skipped +} + +func (t *RuntimeT) Helper() {} diff --git a/vendor/github.com/oklog/run/LICENSE b/vendor/github.com/oklog/run/LICENSE new file mode 100644 index 0000000..261eeb9 --- /dev/null +++ b/vendor/github.com/oklog/run/LICENSE @@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/vendor/github.com/oklog/run/group.go b/vendor/github.com/oklog/run/group.go new file mode 100644 index 0000000..832d47d --- /dev/null +++ b/vendor/github.com/oklog/run/group.go @@ -0,0 +1,62 @@ +// Package run implements an actor-runner with deterministic teardown. It is +// somewhat similar to package errgroup, except it does not require actor +// goroutines to understand context semantics. This makes it suitable for use in +// more circumstances; for example, goroutines which are handling connections +// from net.Listeners, or scanning input from a closable io.Reader. +package run + +// Group collects actors (functions) and runs them concurrently. +// When one actor (function) returns, all actors are interrupted. +// The zero value of a Group is useful. +type Group struct { + actors []actor +} + +// Add an actor (function) to the group. Each actor must be pre-emptable by an +// interrupt function. That is, if interrupt is invoked, execute should return. +// Also, it must be safe to call interrupt even after execute has returned. +// +// The first actor (function) to return interrupts all running actors. +// The error is passed to the interrupt functions, and is returned by Run. +func (g *Group) Add(execute func() error, interrupt func(error)) { + g.actors = append(g.actors, actor{execute, interrupt}) +} + +// Run all actors (functions) concurrently. +// When the first actor returns, all others are interrupted. +// Run only returns when all actors have exited. +// Run returns the error returned by the first exiting actor. +func (g *Group) Run() error { + if len(g.actors) == 0 { + return nil + } + + // Run each actor. + errors := make(chan error, len(g.actors)) + for _, a := range g.actors { + go func(a actor) { + errors <- a.execute() + }(a) + } + + // Wait for the first actor to stop. + err := <-errors + + // Signal all actors to stop. + for _, a := range g.actors { + a.interrupt(err) + } + + // Wait for all actors to stop. + for i := 1; i < cap(errors); i++ { + <-errors + } + + // Return the original error. + return err +} + +type actor struct { + execute func() error + interrupt func(error) +} diff --git a/vendor/github.com/spf13/cobra/LICENSE.txt b/vendor/github.com/spf13/cobra/LICENSE.txt new file mode 100644 index 0000000..298f0e2 --- /dev/null +++ b/vendor/github.com/spf13/cobra/LICENSE.txt @@ -0,0 +1,174 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. diff --git a/vendor/github.com/spf13/cobra/args.go b/vendor/github.com/spf13/cobra/args.go new file mode 100644 index 0000000..a5d8a92 --- /dev/null +++ b/vendor/github.com/spf13/cobra/args.go @@ -0,0 +1,89 @@ +package cobra + +import ( + "fmt" +) + +type PositionalArgs func(cmd *Command, args []string) error + +// Legacy arg validation has the following behaviour: +// - root commands with no subcommands can take arbitrary arguments +// - root commands with subcommands will do subcommand validity checking +// - subcommands will always accept arbitrary arguments +func legacyArgs(cmd *Command, args []string) error { + // no subcommand, always take args + if !cmd.HasSubCommands() { + return nil + } + + // root command with subcommands, do subcommand checking. + if !cmd.HasParent() && len(args) > 0 { + return fmt.Errorf("unknown command %q for %q%s", args[0], cmd.CommandPath(), cmd.findSuggestions(args[0])) + } + return nil +} + +// NoArgs returns an error if any args are included. +func NoArgs(cmd *Command, args []string) error { + if len(args) > 0 { + return fmt.Errorf("unknown command %q for %q", args[0], cmd.CommandPath()) + } + return nil +} + +// OnlyValidArgs returns an error if any args are not in the list of ValidArgs. +func OnlyValidArgs(cmd *Command, args []string) error { + if len(cmd.ValidArgs) > 0 { + for _, v := range args { + if !stringInSlice(v, cmd.ValidArgs) { + return fmt.Errorf("invalid argument %q for %q%s", v, cmd.CommandPath(), cmd.findSuggestions(args[0])) + } + } + } + return nil +} + +// ArbitraryArgs never returns an error. +func ArbitraryArgs(cmd *Command, args []string) error { + return nil +} + +// MinimumNArgs returns an error if there is not at least N args. +func MinimumNArgs(n int) PositionalArgs { + return func(cmd *Command, args []string) error { + if len(args) < n { + return fmt.Errorf("requires at least %d arg(s), only received %d", n, len(args)) + } + return nil + } +} + +// MaximumNArgs returns an error if there are more than N args. +func MaximumNArgs(n int) PositionalArgs { + return func(cmd *Command, args []string) error { + if len(args) > n { + return fmt.Errorf("accepts at most %d arg(s), received %d", n, len(args)) + } + return nil + } +} + +// ExactArgs returns an error if there are not exactly n args. +func ExactArgs(n int) PositionalArgs { + return func(cmd *Command, args []string) error { + if len(args) != n { + return fmt.Errorf("accepts %d arg(s), received %d", n, len(args)) + } + return nil + } +} + +// RangeArgs returns an error if the number of args is not within the expected range. +func RangeArgs(min int, max int) PositionalArgs { + return func(cmd *Command, args []string) error { + if len(args) < min || len(args) > max { + return fmt.Errorf("accepts between %d and %d arg(s), received %d", min, max, len(args)) + } + return nil + } +} diff --git a/vendor/github.com/spf13/cobra/bash_completions.go b/vendor/github.com/spf13/cobra/bash_completions.go new file mode 100644 index 0000000..8fa8f48 --- /dev/null +++ b/vendor/github.com/spf13/cobra/bash_completions.go @@ -0,0 +1,584 @@ +package cobra + +import ( + "bytes" + "fmt" + "io" + "os" + "sort" + "strings" + + "github.com/spf13/pflag" +) + +// Annotations for Bash completion. +const ( + BashCompFilenameExt = "cobra_annotation_bash_completion_filename_extensions" + BashCompCustom = "cobra_annotation_bash_completion_custom" + BashCompOneRequiredFlag = "cobra_annotation_bash_completion_one_required_flag" + BashCompSubdirsInDir = "cobra_annotation_bash_completion_subdirs_in_dir" +) + +func writePreamble(buf *bytes.Buffer, name string) { + buf.WriteString(fmt.Sprintf("# bash completion for %-36s -*- shell-script -*-\n", name)) + buf.WriteString(fmt.Sprintf(` +__%[1]s_debug() +{ + if [[ -n ${BASH_COMP_DEBUG_FILE} ]]; then + echo "$*" >> "${BASH_COMP_DEBUG_FILE}" + fi +} + +# Homebrew on Macs have version 1.3 of bash-completion which doesn't include +# _init_completion. This is a very minimal version of that function. +__%[1]s_init_completion() +{ + COMPREPLY=() + _get_comp_words_by_ref "$@" cur prev words cword +} + +__%[1]s_index_of_word() +{ + local w word=$1 + shift + index=0 + for w in "$@"; do + [[ $w = "$word" ]] && return + index=$((index+1)) + done + index=-1 +} + +__%[1]s_contains_word() +{ + local w word=$1; shift + for w in "$@"; do + [[ $w = "$word" ]] && return + done + return 1 +} + +__%[1]s_handle_reply() +{ + __%[1]s_debug "${FUNCNAME[0]}" + case $cur in + -*) + if [[ $(type -t compopt) = "builtin" ]]; then + compopt -o nospace + fi + local allflags + if [ ${#must_have_one_flag[@]} -ne 0 ]; then + allflags=("${must_have_one_flag[@]}") + else + allflags=("${flags[*]} ${two_word_flags[*]}") + fi + COMPREPLY=( $(compgen -W "${allflags[*]}" -- "$cur") ) + if [[ $(type -t compopt) = "builtin" ]]; then + [[ "${COMPREPLY[0]}" == *= ]] || compopt +o nospace + fi + + # complete after --flag=abc + if [[ $cur == *=* ]]; then + if [[ $(type -t compopt) = "builtin" ]]; then + compopt +o nospace + fi + + local index flag + flag="${cur%%=*}" + __%[1]s_index_of_word "${flag}" "${flags_with_completion[@]}" + COMPREPLY=() + if [[ ${index} -ge 0 ]]; then + PREFIX="" + cur="${cur#*=}" + ${flags_completion[${index}]} + if [ -n "${ZSH_VERSION}" ]; then + # zsh completion needs --flag= prefix + eval "COMPREPLY=( \"\${COMPREPLY[@]/#/${flag}=}\" )" + fi + fi + fi + return 0; + ;; + esac + + # check if we are handling a flag with special work handling + local index + __%[1]s_index_of_word "${prev}" "${flags_with_completion[@]}" + if [[ ${index} -ge 0 ]]; then + ${flags_completion[${index}]} + return + fi + + # we are parsing a flag and don't have a special handler, no completion + if [[ ${cur} != "${words[cword]}" ]]; then + return + fi + + local completions + completions=("${commands[@]}") + if [[ ${#must_have_one_noun[@]} -ne 0 ]]; then + completions=("${must_have_one_noun[@]}") + fi + if [[ ${#must_have_one_flag[@]} -ne 0 ]]; then + completions+=("${must_have_one_flag[@]}") + fi + COMPREPLY=( $(compgen -W "${completions[*]}" -- "$cur") ) + + if [[ ${#COMPREPLY[@]} -eq 0 && ${#noun_aliases[@]} -gt 0 && ${#must_have_one_noun[@]} -ne 0 ]]; then + COMPREPLY=( $(compgen -W "${noun_aliases[*]}" -- "$cur") ) + fi + + if [[ ${#COMPREPLY[@]} -eq 0 ]]; then + declare -F __custom_func >/dev/null && __custom_func + fi + + # available in bash-completion >= 2, not always present on macOS + if declare -F __ltrim_colon_completions >/dev/null; then + __ltrim_colon_completions "$cur" + fi + + # If there is only 1 completion and it is a flag with an = it will be completed + # but we don't want a space after the = + if [[ "${#COMPREPLY[@]}" -eq "1" ]] && [[ $(type -t compopt) = "builtin" ]] && [[ "${COMPREPLY[0]}" == --*= ]]; then + compopt -o nospace + fi +} + +# The arguments should be in the form "ext1|ext2|extn" +__%[1]s_handle_filename_extension_flag() +{ + local ext="$1" + _filedir "@(${ext})" +} + +__%[1]s_handle_subdirs_in_dir_flag() +{ + local dir="$1" + pushd "${dir}" >/dev/null 2>&1 && _filedir -d && popd >/dev/null 2>&1 +} + +__%[1]s_handle_flag() +{ + __%[1]s_debug "${FUNCNAME[0]}: c is $c words[c] is ${words[c]}" + + # if a command required a flag, and we found it, unset must_have_one_flag() + local flagname=${words[c]} + local flagvalue + # if the word contained an = + if [[ ${words[c]} == *"="* ]]; then + flagvalue=${flagname#*=} # take in as flagvalue after the = + flagname=${flagname%%=*} # strip everything after the = + flagname="${flagname}=" # but put the = back + fi + __%[1]s_debug "${FUNCNAME[0]}: looking for ${flagname}" + if __%[1]s_contains_word "${flagname}" "${must_have_one_flag[@]}"; then + must_have_one_flag=() + fi + + # if you set a flag which only applies to this command, don't show subcommands + if __%[1]s_contains_word "${flagname}" "${local_nonpersistent_flags[@]}"; then + commands=() + fi + + # keep flag value with flagname as flaghash + # flaghash variable is an associative array which is only supported in bash > 3. + if [[ -z "${BASH_VERSION}" || "${BASH_VERSINFO[0]}" -gt 3 ]]; then + if [ -n "${flagvalue}" ] ; then + flaghash[${flagname}]=${flagvalue} + elif [ -n "${words[ $((c+1)) ]}" ] ; then + flaghash[${flagname}]=${words[ $((c+1)) ]} + else + flaghash[${flagname}]="true" # pad "true" for bool flag + fi + fi + + # skip the argument to a two word flag + if __%[1]s_contains_word "${words[c]}" "${two_word_flags[@]}"; then + c=$((c+1)) + # if we are looking for a flags value, don't show commands + if [[ $c -eq $cword ]]; then + commands=() + fi + fi + + c=$((c+1)) + +} + +__%[1]s_handle_noun() +{ + __%[1]s_debug "${FUNCNAME[0]}: c is $c words[c] is ${words[c]}" + + if __%[1]s_contains_word "${words[c]}" "${must_have_one_noun[@]}"; then + must_have_one_noun=() + elif __%[1]s_contains_word "${words[c]}" "${noun_aliases[@]}"; then + must_have_one_noun=() + fi + + nouns+=("${words[c]}") + c=$((c+1)) +} + +__%[1]s_handle_command() +{ + __%[1]s_debug "${FUNCNAME[0]}: c is $c words[c] is ${words[c]}" + + local next_command + if [[ -n ${last_command} ]]; then + next_command="_${last_command}_${words[c]//:/__}" + else + if [[ $c -eq 0 ]]; then + next_command="_%[1]s_root_command" + else + next_command="_${words[c]//:/__}" + fi + fi + c=$((c+1)) + __%[1]s_debug "${FUNCNAME[0]}: looking for ${next_command}" + declare -F "$next_command" >/dev/null && $next_command +} + +__%[1]s_handle_word() +{ + if [[ $c -ge $cword ]]; then + __%[1]s_handle_reply + return + fi + __%[1]s_debug "${FUNCNAME[0]}: c is $c words[c] is ${words[c]}" + if [[ "${words[c]}" == -* ]]; then + __%[1]s_handle_flag + elif __%[1]s_contains_word "${words[c]}" "${commands[@]}"; then + __%[1]s_handle_command + elif [[ $c -eq 0 ]]; then + __%[1]s_handle_command + elif __%[1]s_contains_word "${words[c]}" "${command_aliases[@]}"; then + # aliashash variable is an associative array which is only supported in bash > 3. + if [[ -z "${BASH_VERSION}" || "${BASH_VERSINFO[0]}" -gt 3 ]]; then + words[c]=${aliashash[${words[c]}]} + __%[1]s_handle_command + else + __%[1]s_handle_noun + fi + else + __%[1]s_handle_noun + fi + __%[1]s_handle_word +} + +`, name)) +} + +func writePostscript(buf *bytes.Buffer, name string) { + name = strings.Replace(name, ":", "__", -1) + buf.WriteString(fmt.Sprintf("__start_%s()\n", name)) + buf.WriteString(fmt.Sprintf(`{ + local cur prev words cword + declare -A flaghash 2>/dev/null || : + declare -A aliashash 2>/dev/null || : + if declare -F _init_completion >/dev/null 2>&1; then + _init_completion -s || return + else + __%[1]s_init_completion -n "=" || return + fi + + local c=0 + local flags=() + local two_word_flags=() + local local_nonpersistent_flags=() + local flags_with_completion=() + local flags_completion=() + local commands=("%[1]s") + local must_have_one_flag=() + local must_have_one_noun=() + local last_command + local nouns=() + + __%[1]s_handle_word +} + +`, name)) + buf.WriteString(fmt.Sprintf(`if [[ $(type -t compopt) = "builtin" ]]; then + complete -o default -F __start_%s %s +else + complete -o default -o nospace -F __start_%s %s +fi + +`, name, name, name, name)) + buf.WriteString("# ex: ts=4 sw=4 et filetype=sh\n") +} + +func writeCommands(buf *bytes.Buffer, cmd *Command) { + buf.WriteString(" commands=()\n") + for _, c := range cmd.Commands() { + if !c.IsAvailableCommand() || c == cmd.helpCommand { + continue + } + buf.WriteString(fmt.Sprintf(" commands+=(%q)\n", c.Name())) + writeCmdAliases(buf, c) + } + buf.WriteString("\n") +} + +func writeFlagHandler(buf *bytes.Buffer, name string, annotations map[string][]string, cmd *Command) { + for key, value := range annotations { + switch key { + case BashCompFilenameExt: + buf.WriteString(fmt.Sprintf(" flags_with_completion+=(%q)\n", name)) + + var ext string + if len(value) > 0 { + ext = fmt.Sprintf("__%s_handle_filename_extension_flag ", cmd.Root().Name()) + strings.Join(value, "|") + } else { + ext = "_filedir" + } + buf.WriteString(fmt.Sprintf(" flags_completion+=(%q)\n", ext)) + case BashCompCustom: + buf.WriteString(fmt.Sprintf(" flags_with_completion+=(%q)\n", name)) + if len(value) > 0 { + handlers := strings.Join(value, "; ") + buf.WriteString(fmt.Sprintf(" flags_completion+=(%q)\n", handlers)) + } else { + buf.WriteString(" flags_completion+=(:)\n") + } + case BashCompSubdirsInDir: + buf.WriteString(fmt.Sprintf(" flags_with_completion+=(%q)\n", name)) + + var ext string + if len(value) == 1 { + ext = fmt.Sprintf("__%s_handle_subdirs_in_dir_flag ", cmd.Root().Name()) + value[0] + } else { + ext = "_filedir -d" + } + buf.WriteString(fmt.Sprintf(" flags_completion+=(%q)\n", ext)) + } + } +} + +func writeShortFlag(buf *bytes.Buffer, flag *pflag.Flag, cmd *Command) { + name := flag.Shorthand + format := " " + if len(flag.NoOptDefVal) == 0 { + format += "two_word_" + } + format += "flags+=(\"-%s\")\n" + buf.WriteString(fmt.Sprintf(format, name)) + writeFlagHandler(buf, "-"+name, flag.Annotations, cmd) +} + +func writeFlag(buf *bytes.Buffer, flag *pflag.Flag, cmd *Command) { + name := flag.Name + format := " flags+=(\"--%s" + if len(flag.NoOptDefVal) == 0 { + format += "=" + } + format += "\")\n" + buf.WriteString(fmt.Sprintf(format, name)) + writeFlagHandler(buf, "--"+name, flag.Annotations, cmd) +} + +func writeLocalNonPersistentFlag(buf *bytes.Buffer, flag *pflag.Flag) { + name := flag.Name + format := " local_nonpersistent_flags+=(\"--%s" + if len(flag.NoOptDefVal) == 0 { + format += "=" + } + format += "\")\n" + buf.WriteString(fmt.Sprintf(format, name)) +} + +func writeFlags(buf *bytes.Buffer, cmd *Command) { + buf.WriteString(` flags=() + two_word_flags=() + local_nonpersistent_flags=() + flags_with_completion=() + flags_completion=() + +`) + localNonPersistentFlags := cmd.LocalNonPersistentFlags() + cmd.NonInheritedFlags().VisitAll(func(flag *pflag.Flag) { + if nonCompletableFlag(flag) { + return + } + writeFlag(buf, flag, cmd) + if len(flag.Shorthand) > 0 { + writeShortFlag(buf, flag, cmd) + } + if localNonPersistentFlags.Lookup(flag.Name) != nil { + writeLocalNonPersistentFlag(buf, flag) + } + }) + cmd.InheritedFlags().VisitAll(func(flag *pflag.Flag) { + if nonCompletableFlag(flag) { + return + } + writeFlag(buf, flag, cmd) + if len(flag.Shorthand) > 0 { + writeShortFlag(buf, flag, cmd) + } + }) + + buf.WriteString("\n") +} + +func writeRequiredFlag(buf *bytes.Buffer, cmd *Command) { + buf.WriteString(" must_have_one_flag=()\n") + flags := cmd.NonInheritedFlags() + flags.VisitAll(func(flag *pflag.Flag) { + if nonCompletableFlag(flag) { + return + } + for key := range flag.Annotations { + switch key { + case BashCompOneRequiredFlag: + format := " must_have_one_flag+=(\"--%s" + if flag.Value.Type() != "bool" { + format += "=" + } + format += "\")\n" + buf.WriteString(fmt.Sprintf(format, flag.Name)) + + if len(flag.Shorthand) > 0 { + buf.WriteString(fmt.Sprintf(" must_have_one_flag+=(\"-%s\")\n", flag.Shorthand)) + } + } + } + }) +} + +func writeRequiredNouns(buf *bytes.Buffer, cmd *Command) { + buf.WriteString(" must_have_one_noun=()\n") + sort.Sort(sort.StringSlice(cmd.ValidArgs)) + for _, value := range cmd.ValidArgs { + buf.WriteString(fmt.Sprintf(" must_have_one_noun+=(%q)\n", value)) + } +} + +func writeCmdAliases(buf *bytes.Buffer, cmd *Command) { + if len(cmd.Aliases) == 0 { + return + } + + sort.Sort(sort.StringSlice(cmd.Aliases)) + + buf.WriteString(fmt.Sprint(` if [[ -z "${BASH_VERSION}" || "${BASH_VERSINFO[0]}" -gt 3 ]]; then`, "\n")) + for _, value := range cmd.Aliases { + buf.WriteString(fmt.Sprintf(" command_aliases+=(%q)\n", value)) + buf.WriteString(fmt.Sprintf(" aliashash[%q]=%q\n", value, cmd.Name())) + } + buf.WriteString(` fi`) + buf.WriteString("\n") +} +func writeArgAliases(buf *bytes.Buffer, cmd *Command) { + buf.WriteString(" noun_aliases=()\n") + sort.Sort(sort.StringSlice(cmd.ArgAliases)) + for _, value := range cmd.ArgAliases { + buf.WriteString(fmt.Sprintf(" noun_aliases+=(%q)\n", value)) + } +} + +func gen(buf *bytes.Buffer, cmd *Command) { + for _, c := range cmd.Commands() { + if !c.IsAvailableCommand() || c == cmd.helpCommand { + continue + } + gen(buf, c) + } + commandName := cmd.CommandPath() + commandName = strings.Replace(commandName, " ", "_", -1) + commandName = strings.Replace(commandName, ":", "__", -1) + + if cmd.Root() == cmd { + buf.WriteString(fmt.Sprintf("_%s_root_command()\n{\n", commandName)) + } else { + buf.WriteString(fmt.Sprintf("_%s()\n{\n", commandName)) + } + + buf.WriteString(fmt.Sprintf(" last_command=%q\n", commandName)) + buf.WriteString("\n") + buf.WriteString(" command_aliases=()\n") + buf.WriteString("\n") + + writeCommands(buf, cmd) + writeFlags(buf, cmd) + writeRequiredFlag(buf, cmd) + writeRequiredNouns(buf, cmd) + writeArgAliases(buf, cmd) + buf.WriteString("}\n\n") +} + +// GenBashCompletion generates bash completion file and writes to the passed writer. +func (c *Command) GenBashCompletion(w io.Writer) error { + buf := new(bytes.Buffer) + writePreamble(buf, c.Name()) + if len(c.BashCompletionFunction) > 0 { + buf.WriteString(c.BashCompletionFunction + "\n") + } + gen(buf, c) + writePostscript(buf, c.Name()) + + _, err := buf.WriteTo(w) + return err +} + +func nonCompletableFlag(flag *pflag.Flag) bool { + return flag.Hidden || len(flag.Deprecated) > 0 +} + +// GenBashCompletionFile generates bash completion file. +func (c *Command) GenBashCompletionFile(filename string) error { + outFile, err := os.Create(filename) + if err != nil { + return err + } + defer outFile.Close() + + return c.GenBashCompletion(outFile) +} + +// MarkFlagRequired adds the BashCompOneRequiredFlag annotation to the named flag if it exists, +// and causes your command to report an error if invoked without the flag. +func (c *Command) MarkFlagRequired(name string) error { + return MarkFlagRequired(c.Flags(), name) +} + +// MarkPersistentFlagRequired adds the BashCompOneRequiredFlag annotation to the named persistent flag if it exists, +// and causes your command to report an error if invoked without the flag. +func (c *Command) MarkPersistentFlagRequired(name string) error { + return MarkFlagRequired(c.PersistentFlags(), name) +} + +// MarkFlagRequired adds the BashCompOneRequiredFlag annotation to the named flag if it exists, +// and causes your command to report an error if invoked without the flag. +func MarkFlagRequired(flags *pflag.FlagSet, name string) error { + return flags.SetAnnotation(name, BashCompOneRequiredFlag, []string{"true"}) +} + +// MarkFlagFilename adds the BashCompFilenameExt annotation to the named flag, if it exists. +// Generated bash autocompletion will select filenames for the flag, limiting to named extensions if provided. +func (c *Command) MarkFlagFilename(name string, extensions ...string) error { + return MarkFlagFilename(c.Flags(), name, extensions...) +} + +// MarkFlagCustom adds the BashCompCustom annotation to the named flag, if it exists. +// Generated bash autocompletion will call the bash function f for the flag. +func (c *Command) MarkFlagCustom(name string, f string) error { + return MarkFlagCustom(c.Flags(), name, f) +} + +// MarkPersistentFlagFilename adds the BashCompFilenameExt annotation to the named persistent flag, if it exists. +// Generated bash autocompletion will select filenames for the flag, limiting to named extensions if provided. +func (c *Command) MarkPersistentFlagFilename(name string, extensions ...string) error { + return MarkFlagFilename(c.PersistentFlags(), name, extensions...) +} + +// MarkFlagFilename adds the BashCompFilenameExt annotation to the named flag in the flag set, if it exists. +// Generated bash autocompletion will select filenames for the flag, limiting to named extensions if provided. +func MarkFlagFilename(flags *pflag.FlagSet, name string, extensions ...string) error { + return flags.SetAnnotation(name, BashCompFilenameExt, extensions) +} + +// MarkFlagCustom adds the BashCompCustom annotation to the named flag in the flag set, if it exists. +// Generated bash autocompletion will call the bash function f for the flag. +func MarkFlagCustom(flags *pflag.FlagSet, name string, f string) error { + return flags.SetAnnotation(name, BashCompCustom, []string{f}) +} diff --git a/vendor/github.com/spf13/cobra/cobra.go b/vendor/github.com/spf13/cobra/cobra.go new file mode 100644 index 0000000..7010fd1 --- /dev/null +++ b/vendor/github.com/spf13/cobra/cobra.go @@ -0,0 +1,200 @@ +// Copyright © 2013 Steve Francia . +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// Commands similar to git, go tools and other modern CLI tools +// inspired by go, go-Commander, gh and subcommand + +package cobra + +import ( + "fmt" + "io" + "reflect" + "strconv" + "strings" + "text/template" + "unicode" +) + +var templateFuncs = template.FuncMap{ + "trim": strings.TrimSpace, + "trimRightSpace": trimRightSpace, + "trimTrailingWhitespaces": trimRightSpace, + "appendIfNotPresent": appendIfNotPresent, + "rpad": rpad, + "gt": Gt, + "eq": Eq, +} + +var initializers []func() + +// EnablePrefixMatching allows to set automatic prefix matching. Automatic prefix matching can be a dangerous thing +// to automatically enable in CLI tools. +// Set this to true to enable it. +var EnablePrefixMatching = false + +// EnableCommandSorting controls sorting of the slice of commands, which is turned on by default. +// To disable sorting, set it to false. +var EnableCommandSorting = true + +// MousetrapHelpText enables an information splash screen on Windows +// if the CLI is started from explorer.exe. +// To disable the mousetrap, just set this variable to blank string (""). +// Works only on Microsoft Windows. +var MousetrapHelpText string = `This is a command line tool. + +You need to open cmd.exe and run it from there. +` + +// AddTemplateFunc adds a template function that's available to Usage and Help +// template generation. +func AddTemplateFunc(name string, tmplFunc interface{}) { + templateFuncs[name] = tmplFunc +} + +// AddTemplateFuncs adds multiple template functions that are available to Usage and +// Help template generation. +func AddTemplateFuncs(tmplFuncs template.FuncMap) { + for k, v := range tmplFuncs { + templateFuncs[k] = v + } +} + +// OnInitialize sets the passed functions to be run when each command's +// Execute method is called. +func OnInitialize(y ...func()) { + initializers = append(initializers, y...) +} + +// FIXME Gt is unused by cobra and should be removed in a version 2. It exists only for compatibility with users of cobra. + +// Gt takes two types and checks whether the first type is greater than the second. In case of types Arrays, Chans, +// Maps and Slices, Gt will compare their lengths. Ints are compared directly while strings are first parsed as +// ints and then compared. +func Gt(a interface{}, b interface{}) bool { + var left, right int64 + av := reflect.ValueOf(a) + + switch av.Kind() { + case reflect.Array, reflect.Chan, reflect.Map, reflect.Slice: + left = int64(av.Len()) + case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64: + left = av.Int() + case reflect.String: + left, _ = strconv.ParseInt(av.String(), 10, 64) + } + + bv := reflect.ValueOf(b) + + switch bv.Kind() { + case reflect.Array, reflect.Chan, reflect.Map, reflect.Slice: + right = int64(bv.Len()) + case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64: + right = bv.Int() + case reflect.String: + right, _ = strconv.ParseInt(bv.String(), 10, 64) + } + + return left > right +} + +// FIXME Eq is unused by cobra and should be removed in a version 2. It exists only for compatibility with users of cobra. + +// Eq takes two types and checks whether they are equal. Supported types are int and string. Unsupported types will panic. +func Eq(a interface{}, b interface{}) bool { + av := reflect.ValueOf(a) + bv := reflect.ValueOf(b) + + switch av.Kind() { + case reflect.Array, reflect.Chan, reflect.Map, reflect.Slice: + panic("Eq called on unsupported type") + case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64: + return av.Int() == bv.Int() + case reflect.String: + return av.String() == bv.String() + } + return false +} + +func trimRightSpace(s string) string { + return strings.TrimRightFunc(s, unicode.IsSpace) +} + +// FIXME appendIfNotPresent is unused by cobra and should be removed in a version 2. It exists only for compatibility with users of cobra. + +// appendIfNotPresent will append stringToAppend to the end of s, but only if it's not yet present in s. +func appendIfNotPresent(s, stringToAppend string) string { + if strings.Contains(s, stringToAppend) { + return s + } + return s + " " + stringToAppend +} + +// rpad adds padding to the right of a string. +func rpad(s string, padding int) string { + template := fmt.Sprintf("%%-%ds", padding) + return fmt.Sprintf(template, s) +} + +// tmpl executes the given template text on data, writing the result to w. +func tmpl(w io.Writer, text string, data interface{}) error { + t := template.New("top") + t.Funcs(templateFuncs) + template.Must(t.Parse(text)) + return t.Execute(w, data) +} + +// ld compares two strings and returns the levenshtein distance between them. +func ld(s, t string, ignoreCase bool) int { + if ignoreCase { + s = strings.ToLower(s) + t = strings.ToLower(t) + } + d := make([][]int, len(s)+1) + for i := range d { + d[i] = make([]int, len(t)+1) + } + for i := range d { + d[i][0] = i + } + for j := range d[0] { + d[0][j] = j + } + for j := 1; j <= len(t); j++ { + for i := 1; i <= len(s); i++ { + if s[i-1] == t[j-1] { + d[i][j] = d[i-1][j-1] + } else { + min := d[i-1][j] + if d[i][j-1] < min { + min = d[i][j-1] + } + if d[i-1][j-1] < min { + min = d[i-1][j-1] + } + d[i][j] = min + 1 + } + } + + } + return d[len(s)][len(t)] +} + +func stringInSlice(a string, list []string) bool { + for _, b := range list { + if b == a { + return true + } + } + return false +} diff --git a/vendor/github.com/spf13/cobra/cobra/cmd/add.go b/vendor/github.com/spf13/cobra/cobra/cmd/add.go new file mode 100644 index 0000000..fb22096 --- /dev/null +++ b/vendor/github.com/spf13/cobra/cobra/cmd/add.go @@ -0,0 +1,179 @@ +// Copyright © 2015 Steve Francia . +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package cmd + +import ( + "fmt" + "os" + "path/filepath" + "unicode" + + "github.com/spf13/cobra" +) + +func init() { + addCmd.Flags().StringVarP(&packageName, "package", "t", "", "target package name (e.g. github.com/spf13/hugo)") + addCmd.Flags().StringVarP(&parentName, "parent", "p", "rootCmd", "variable name of parent command for this command") +} + +var packageName, parentName string + +var addCmd = &cobra.Command{ + Use: "add [command name]", + Aliases: []string{"command"}, + Short: "Add a command to a Cobra Application", + Long: `Add (cobra add) will create a new command, with a license and +the appropriate structure for a Cobra-based CLI application, +and register it to its parent (default rootCmd). + +If you want your command to be public, pass in the command name +with an initial uppercase letter. + +Example: cobra add server -> resulting in a new cmd/server.go`, + + Run: func(cmd *cobra.Command, args []string) { + if len(args) < 1 { + er("add needs a name for the command") + } + + var project *Project + if packageName != "" { + project = NewProject(packageName) + } else { + wd, err := os.Getwd() + if err != nil { + er(err) + } + project = NewProjectFromPath(wd) + } + + cmdName := validateCmdName(args[0]) + cmdPath := filepath.Join(project.CmdPath(), cmdName+".go") + createCmdFile(project.License(), cmdPath, cmdName) + + fmt.Fprintln(cmd.OutOrStdout(), cmdName, "created at", cmdPath) + }, +} + +// validateCmdName returns source without any dashes and underscore. +// If there will be dash or underscore, next letter will be uppered. +// It supports only ASCII (1-byte character) strings. +// https://github.com/spf13/cobra/issues/269 +func validateCmdName(source string) string { + i := 0 + l := len(source) + // The output is initialized on demand, then first dash or underscore + // occurs. + var output string + + for i < l { + if source[i] == '-' || source[i] == '_' { + if output == "" { + output = source[:i] + } + + // If it's last rune and it's dash or underscore, + // don't add it output and break the loop. + if i == l-1 { + break + } + + // If next character is dash or underscore, + // just skip the current character. + if source[i+1] == '-' || source[i+1] == '_' { + i++ + continue + } + + // If the current character is dash or underscore, + // upper next letter and add to output. + output += string(unicode.ToUpper(rune(source[i+1]))) + // We know, what source[i] is dash or underscore and source[i+1] is + // uppered character, so make i = i+2. + i += 2 + continue + } + + // If the current character isn't dash or underscore, + // just add it. + if output != "" { + output += string(source[i]) + } + i++ + } + + if output == "" { + return source // source is initially valid name. + } + return output +} + +func createCmdFile(license License, path, cmdName string) { + template := `{{comment .copyright}} +{{if .license}}{{comment .license}}{{end}} + +package {{.cmdPackage}} + +import ( + "fmt" + + "github.com/spf13/cobra" +) + +// {{.cmdName}}Cmd represents the {{.cmdName}} command +var {{.cmdName}}Cmd = &cobra.Command{ + Use: "{{.cmdName}}", + Short: "A brief description of your command", + Long: ` + "`" + `A longer description that spans multiple lines and likely contains examples +and usage of using your command. For example: + +Cobra is a CLI library for Go that empowers applications. +This application is a tool to generate the needed files +to quickly create a Cobra application.` + "`" + `, + Run: func(cmd *cobra.Command, args []string) { + fmt.Println("{{.cmdName}} called") + }, +} + +func init() { + {{.parentName}}.AddCommand({{.cmdName}}Cmd) + + // Here you will define your flags and configuration settings. + + // Cobra supports Persistent Flags which will work for this command + // and all subcommands, e.g.: + // {{.cmdName}}Cmd.PersistentFlags().String("foo", "", "A help for foo") + + // Cobra supports local flags which will only run when this command + // is called directly, e.g.: + // {{.cmdName}}Cmd.Flags().BoolP("toggle", "t", false, "Help message for toggle") +} +` + + data := make(map[string]interface{}) + data["copyright"] = copyrightLine() + data["license"] = license.Header + data["cmdPackage"] = filepath.Base(filepath.Dir(path)) // last dir of path + data["parentName"] = parentName + data["cmdName"] = cmdName + + cmdScript, err := executeTemplate(template, data) + if err != nil { + er(err) + } + err = writeStringToFile(path, cmdScript) + if err != nil { + er(err) + } +} diff --git a/vendor/github.com/spf13/cobra/cobra/cmd/helpers.go b/vendor/github.com/spf13/cobra/cobra/cmd/helpers.go new file mode 100644 index 0000000..cd94b3e --- /dev/null +++ b/vendor/github.com/spf13/cobra/cobra/cmd/helpers.go @@ -0,0 +1,168 @@ +// Copyright © 2015 Steve Francia . +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package cmd + +import ( + "bytes" + "fmt" + "io" + "os" + "os/exec" + "path/filepath" + "strings" + "text/template" +) + +var srcPaths []string + +func init() { + // Initialize srcPaths. + envGoPath := os.Getenv("GOPATH") + goPaths := filepath.SplitList(envGoPath) + if len(goPaths) == 0 { + // Adapted from https://github.com/Masterminds/glide/pull/798/files. + // As of Go 1.8 the GOPATH is no longer required to be set. Instead there + // is a default value. If there is no GOPATH check for the default value. + // Note, checking the GOPATH first to avoid invoking the go toolchain if + // possible. + + goExecutable := os.Getenv("COBRA_GO_EXECUTABLE") + if len(goExecutable) <= 0 { + goExecutable = "go" + } + + out, err := exec.Command(goExecutable, "env", "GOPATH").Output() + if err != nil { + er(err) + } + + toolchainGoPath := strings.TrimSpace(string(out)) + goPaths = filepath.SplitList(toolchainGoPath) + if len(goPaths) == 0 { + er("$GOPATH is not set") + } + } + srcPaths = make([]string, 0, len(goPaths)) + for _, goPath := range goPaths { + srcPaths = append(srcPaths, filepath.Join(goPath, "src")) + } +} + +func er(msg interface{}) { + fmt.Println("Error:", msg) + os.Exit(1) +} + +// isEmpty checks if a given path is empty. +// Hidden files in path are ignored. +func isEmpty(path string) bool { + fi, err := os.Stat(path) + if err != nil { + er(err) + } + + if !fi.IsDir() { + return fi.Size() == 0 + } + + f, err := os.Open(path) + if err != nil { + er(err) + } + defer f.Close() + + names, err := f.Readdirnames(-1) + if err != nil && err != io.EOF { + er(err) + } + + for _, name := range names { + if len(name) > 0 && name[0] != '.' { + return false + } + } + return true +} + +// exists checks if a file or directory exists. +func exists(path string) bool { + if path == "" { + return false + } + _, err := os.Stat(path) + if err == nil { + return true + } + if !os.IsNotExist(err) { + er(err) + } + return false +} + +func executeTemplate(tmplStr string, data interface{}) (string, error) { + tmpl, err := template.New("").Funcs(template.FuncMap{"comment": commentifyString}).Parse(tmplStr) + if err != nil { + return "", err + } + + buf := new(bytes.Buffer) + err = tmpl.Execute(buf, data) + return buf.String(), err +} + +func writeStringToFile(path string, s string) error { + return writeToFile(path, strings.NewReader(s)) +} + +// writeToFile writes r to file with path only +// if file/directory on given path doesn't exist. +func writeToFile(path string, r io.Reader) error { + if exists(path) { + return fmt.Errorf("%v already exists", path) + } + + dir := filepath.Dir(path) + if dir != "" { + if err := os.MkdirAll(dir, 0777); err != nil { + return err + } + } + + file, err := os.Create(path) + if err != nil { + return err + } + defer file.Close() + + _, err = io.Copy(file, r) + return err +} + +// commentfyString comments every line of in. +func commentifyString(in string) string { + var newlines []string + lines := strings.Split(in, "\n") + for _, line := range lines { + if strings.HasPrefix(line, "//") { + newlines = append(newlines, line) + } else { + if line == "" { + newlines = append(newlines, "//") + } else { + newlines = append(newlines, "// "+line) + } + } + } + return strings.Join(newlines, "\n") +} diff --git a/vendor/github.com/spf13/cobra/cobra/cmd/init.go b/vendor/github.com/spf13/cobra/cobra/cmd/init.go new file mode 100644 index 0000000..d65e6c8 --- /dev/null +++ b/vendor/github.com/spf13/cobra/cobra/cmd/init.go @@ -0,0 +1,234 @@ +// Copyright © 2015 Steve Francia . +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package cmd + +import ( + "fmt" + "os" + "path" + "path/filepath" + + "github.com/spf13/cobra" + "github.com/spf13/viper" +) + +var initCmd = &cobra.Command{ + Use: "init [name]", + Aliases: []string{"initialize", "initialise", "create"}, + Short: "Initialize a Cobra Application", + Long: `Initialize (cobra init) will create a new application, with a license +and the appropriate structure for a Cobra-based CLI application. + + * If a name is provided, it will be created in the current directory; + * If no name is provided, the current directory will be assumed; + * If a relative path is provided, it will be created inside $GOPATH + (e.g. github.com/spf13/hugo); + * If an absolute path is provided, it will be created; + * If the directory already exists but is empty, it will be used. + +Init will not use an existing directory with contents.`, + + Run: func(cmd *cobra.Command, args []string) { + wd, err := os.Getwd() + if err != nil { + er(err) + } + + var project *Project + if len(args) == 0 { + project = NewProjectFromPath(wd) + } else if len(args) == 1 { + arg := args[0] + if arg[0] == '.' { + arg = filepath.Join(wd, arg) + } + if filepath.IsAbs(arg) { + project = NewProjectFromPath(arg) + } else { + project = NewProject(arg) + } + } else { + er("please provide only one argument") + } + + initializeProject(project) + + fmt.Fprintln(cmd.OutOrStdout(), `Your Cobra application is ready at +`+project.AbsPath()+` + +Give it a try by going there and running `+"`go run main.go`."+` +Add commands to it by running `+"`cobra add [cmdname]`.") + }, +} + +func initializeProject(project *Project) { + if !exists(project.AbsPath()) { // If path doesn't yet exist, create it + err := os.MkdirAll(project.AbsPath(), os.ModePerm) + if err != nil { + er(err) + } + } else if !isEmpty(project.AbsPath()) { // If path exists and is not empty don't use it + er("Cobra will not create a new project in a non empty directory: " + project.AbsPath()) + } + + // We have a directory and it's empty. Time to initialize it. + createLicenseFile(project.License(), project.AbsPath()) + createMainFile(project) + createRootCmdFile(project) +} + +func createLicenseFile(license License, path string) { + data := make(map[string]interface{}) + data["copyright"] = copyrightLine() + + // Generate license template from text and data. + text, err := executeTemplate(license.Text, data) + if err != nil { + er(err) + } + + // Write license text to LICENSE file. + err = writeStringToFile(filepath.Join(path, "LICENSE"), text) + if err != nil { + er(err) + } +} + +func createMainFile(project *Project) { + mainTemplate := `{{ comment .copyright }} +{{if .license}}{{ comment .license }}{{end}} + +package main + +import "{{ .importpath }}" + +func main() { + cmd.Execute() +} +` + data := make(map[string]interface{}) + data["copyright"] = copyrightLine() + data["license"] = project.License().Header + data["importpath"] = path.Join(project.Name(), filepath.Base(project.CmdPath())) + + mainScript, err := executeTemplate(mainTemplate, data) + if err != nil { + er(err) + } + + err = writeStringToFile(filepath.Join(project.AbsPath(), "main.go"), mainScript) + if err != nil { + er(err) + } +} + +func createRootCmdFile(project *Project) { + template := `{{comment .copyright}} +{{if .license}}{{comment .license}}{{end}} + +package cmd + +import ( + "fmt" + "os" +{{if .viper}} + homedir "github.com/mitchellh/go-homedir"{{end}} + "github.com/spf13/cobra"{{if .viper}} + "github.com/spf13/viper"{{end}} +){{if .viper}} + +var cfgFile string{{end}} + +// rootCmd represents the base command when called without any subcommands +var rootCmd = &cobra.Command{ + Use: "{{.appName}}", + Short: "A brief description of your application", + Long: ` + "`" + `A longer description that spans multiple lines and likely contains +examples and usage of using your application. For example: + +Cobra is a CLI library for Go that empowers applications. +This application is a tool to generate the needed files +to quickly create a Cobra application.` + "`" + `, + // Uncomment the following line if your bare application + // has an action associated with it: + // Run: func(cmd *cobra.Command, args []string) { }, +} + +// Execute adds all child commands to the root command and sets flags appropriately. +// This is called by main.main(). It only needs to happen once to the rootCmd. +func Execute() { + if err := rootCmd.Execute(); err != nil { + fmt.Println(err) + os.Exit(1) + } +} + +func init() { {{- if .viper}} + cobra.OnInitialize(initConfig) +{{end}} + // Here you will define your flags and configuration settings. + // Cobra supports persistent flags, which, if defined here, + // will be global for your application.{{ if .viper }} + rootCmd.PersistentFlags().StringVar(&cfgFile, "config", "", "config file (default is $HOME/.{{ .appName }}.yaml)"){{ else }} + // rootCmd.PersistentFlags().StringVar(&cfgFile, "config", "", "config file (default is $HOME/.{{ .appName }}.yaml)"){{ end }} + + // Cobra also supports local flags, which will only run + // when this action is called directly. + rootCmd.Flags().BoolP("toggle", "t", false, "Help message for toggle") +}{{ if .viper }} + +// initConfig reads in config file and ENV variables if set. +func initConfig() { + if cfgFile != "" { + // Use config file from the flag. + viper.SetConfigFile(cfgFile) + } else { + // Find home directory. + home, err := homedir.Dir() + if err != nil { + fmt.Println(err) + os.Exit(1) + } + + // Search config in home directory with name ".{{ .appName }}" (without extension). + viper.AddConfigPath(home) + viper.SetConfigName(".{{ .appName }}") + } + + viper.AutomaticEnv() // read in environment variables that match + + // If a config file is found, read it in. + if err := viper.ReadInConfig(); err == nil { + fmt.Println("Using config file:", viper.ConfigFileUsed()) + } +}{{ end }} +` + + data := make(map[string]interface{}) + data["copyright"] = copyrightLine() + data["viper"] = viper.GetBool("useViper") + data["license"] = project.License().Header + data["appName"] = path.Base(project.Name()) + + rootCmdScript, err := executeTemplate(template, data) + if err != nil { + er(err) + } + + err = writeStringToFile(filepath.Join(project.CmdPath(), "root.go"), rootCmdScript) + if err != nil { + er(err) + } + +} diff --git a/vendor/github.com/spf13/cobra/cobra/cmd/license_agpl.go b/vendor/github.com/spf13/cobra/cobra/cmd/license_agpl.go new file mode 100644 index 0000000..bc22e97 --- /dev/null +++ b/vendor/github.com/spf13/cobra/cobra/cmd/license_agpl.go @@ -0,0 +1,683 @@ +package cmd + +func initAgpl() { + Licenses["agpl"] = License{ + Name: "GNU Affero General Public License", + PossibleMatches: []string{"agpl", "affero gpl", "gnu agpl"}, + Header: ` +This program is free software: you can redistribute it and/or modify +it under the terms of the GNU Affero General Public License as published by +the Free Software Foundation, either version 3 of the License, or +(at your option) any later version. + +This program is distributed in the hope that it will be useful, +but WITHOUT ANY WARRANTY; without even the implied warranty of +MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +GNU Affero General Public License for more details. + +You should have received a copy of the GNU Affero General Public License +along with this program. If not, see .`, + Text: ` GNU AFFERO GENERAL PUBLIC LICENSE + Version 3, 19 November 2007 + + Copyright (C) 2007 Free Software Foundation, Inc. + Everyone is permitted to copy and distribute verbatim copies + of this license document, but changing it is not allowed. + + Preamble + + The GNU Affero General Public License is a free, copyleft license for +software and other kinds of works, specifically designed to ensure +cooperation with the community in the case of network server software. + + The licenses for most software and other practical works are designed +to take away your freedom to share and change the works. By contrast, +our General Public Licenses are intended to guarantee your freedom to +share and change all versions of a program--to make sure it remains free +software for all its users. + + When we speak of free software, we are referring to freedom, not +price. Our General Public Licenses are designed to make sure that you +have the freedom to distribute copies of free software (and charge for +them if you wish), that you receive source code or can get it if you +want it, that you can change the software or use pieces of it in new +free programs, and that you know you can do these things. + + Developers that use our General Public Licenses protect your rights +with two steps: (1) assert copyright on the software, and (2) offer +you this License which gives you legal permission to copy, distribute +and/or modify the software. + + A secondary benefit of defending all users' freedom is that +improvements made in alternate versions of the program, if they +receive widespread use, become available for other developers to +incorporate. Many developers of free software are heartened and +encouraged by the resulting cooperation. However, in the case of +software used on network servers, this result may fail to come about. +The GNU General Public License permits making a modified version and +letting the public access it on a server without ever releasing its +source code to the public. + + The GNU Affero General Public License is designed specifically to +ensure that, in such cases, the modified source code becomes available +to the community. It requires the operator of a network server to +provide the source code of the modified version running there to the +users of that server. Therefore, public use of a modified version, on +a publicly accessible server, gives the public access to the source +code of the modified version. + + An older license, called the Affero General Public License and +published by Affero, was designed to accomplish similar goals. This is +a different license, not a version of the Affero GPL, but Affero has +released a new version of the Affero GPL which permits relicensing under +this license. + + The precise terms and conditions for copying, distribution and +modification follow. + + TERMS AND CONDITIONS + + 0. Definitions. + + "This License" refers to version 3 of the GNU Affero General Public License. + + "Copyright" also means copyright-like laws that apply to other kinds of +works, such as semiconductor masks. + + "The Program" refers to any copyrightable work licensed under this +License. Each licensee is addressed as "you". "Licensees" and +"recipients" may be individuals or organizations. + + To "modify" a work means to copy from or adapt all or part of the work +in a fashion requiring copyright permission, other than the making of an +exact copy. The resulting work is called a "modified version" of the +earlier work or a work "based on" the earlier work. + + A "covered work" means either the unmodified Program or a work based +on the Program. + + To "propagate" a work means to do anything with it that, without +permission, would make you directly or secondarily liable for +infringement under applicable copyright law, except executing it on a +computer or modifying a private copy. Propagation includes copying, +distribution (with or without modification), making available to the +public, and in some countries other activities as well. + + To "convey" a work means any kind of propagation that enables other +parties to make or receive copies. Mere interaction with a user through +a computer network, with no transfer of a copy, is not conveying. + + An interactive user interface displays "Appropriate Legal Notices" +to the extent that it includes a convenient and prominently visible +feature that (1) displays an appropriate copyright notice, and (2) +tells the user that there is no warranty for the work (except to the +extent that warranties are provided), that licensees may convey the +work under this License, and how to view a copy of this License. If +the interface presents a list of user commands or options, such as a +menu, a prominent item in the list meets this criterion. + + 1. Source Code. + + The "source code" for a work means the preferred form of the work +for making modifications to it. "Object code" means any non-source +form of a work. + + A "Standard Interface" means an interface that either is an official +standard defined by a recognized standards body, or, in the case of +interfaces specified for a particular programming language, one that +is widely used among developers working in that language. + + The "System Libraries" of an executable work include anything, other +than the work as a whole, that (a) is included in the normal form of +packaging a Major Component, but which is not part of that Major +Component, and (b) serves only to enable use of the work with that +Major Component, or to implement a Standard Interface for which an +implementation is available to the public in source code form. A +"Major Component", in this context, means a major essential component +(kernel, window system, and so on) of the specific operating system +(if any) on which the executable work runs, or a compiler used to +produce the work, or an object code interpreter used to run it. + + The "Corresponding Source" for a work in object code form means all +the source code needed to generate, install, and (for an executable +work) run the object code and to modify the work, including scripts to +control those activities. However, it does not include the work's +System Libraries, or general-purpose tools or generally available free +programs which are used unmodified in performing those activities but +which are not part of the work. For example, Corresponding Source +includes interface definition files associated with source files for +the work, and the source code for shared libraries and dynamically +linked subprograms that the work is specifically designed to require, +such as by intimate data communication or control flow between those +subprograms and other parts of the work. + + The Corresponding Source need not include anything that users +can regenerate automatically from other parts of the Corresponding +Source. + + The Corresponding Source for a work in source code form is that +same work. + + 2. Basic Permissions. + + All rights granted under this License are granted for the term of +copyright on the Program, and are irrevocable provided the stated +conditions are met. This License explicitly affirms your unlimited +permission to run the unmodified Program. The output from running a +covered work is covered by this License only if the output, given its +content, constitutes a covered work. This License acknowledges your +rights of fair use or other equivalent, as provided by copyright law. + + You may make, run and propagate covered works that you do not +convey, without conditions so long as your license otherwise remains +in force. You may convey covered works to others for the sole purpose +of having them make modifications exclusively for you, or provide you +with facilities for running those works, provided that you comply with +the terms of this License in conveying all material for which you do +not control copyright. Those thus making or running the covered works +for you must do so exclusively on your behalf, under your direction +and control, on terms that prohibit them from making any copies of +your copyrighted material outside their relationship with you. + + Conveying under any other circumstances is permitted solely under +the conditions stated below. Sublicensing is not allowed; section 10 +makes it unnecessary. + + 3. Protecting Users' Legal Rights From Anti-Circumvention Law. + + No covered work shall be deemed part of an effective technological +measure under any applicable law fulfilling obligations under article +11 of the WIPO copyright treaty adopted on 20 December 1996, or +similar laws prohibiting or restricting circumvention of such +measures. + + When you convey a covered work, you waive any legal power to forbid +circumvention of technological measures to the extent such circumvention +is effected by exercising rights under this License with respect to +the covered work, and you disclaim any intention to limit operation or +modification of the work as a means of enforcing, against the work's +users, your or third parties' legal rights to forbid circumvention of +technological measures. + + 4. Conveying Verbatim Copies. + + You may convey verbatim copies of the Program's source code as you +receive it, in any medium, provided that you conspicuously and +appropriately publish on each copy an appropriate copyright notice; +keep intact all notices stating that this License and any +non-permissive terms added in accord with section 7 apply to the code; +keep intact all notices of the absence of any warranty; and give all +recipients a copy of this License along with the Program. + + You may charge any price or no price for each copy that you convey, +and you may offer support or warranty protection for a fee. + + 5. Conveying Modified Source Versions. + + You may convey a work based on the Program, or the modifications to +produce it from the Program, in the form of source code under the +terms of section 4, provided that you also meet all of these conditions: + + a) The work must carry prominent notices stating that you modified + it, and giving a relevant date. + + b) The work must carry prominent notices stating that it is + released under this License and any conditions added under section + 7. This requirement modifies the requirement in section 4 to + "keep intact all notices". + + c) You must license the entire work, as a whole, under this + License to anyone who comes into possession of a copy. This + License will therefore apply, along with any applicable section 7 + additional terms, to the whole of the work, and all its parts, + regardless of how they are packaged. This License gives no + permission to license the work in any other way, but it does not + invalidate such permission if you have separately received it. + + d) If the work has interactive user interfaces, each must display + Appropriate Legal Notices; however, if the Program has interactive + interfaces that do not display Appropriate Legal Notices, your + work need not make them do so. + + A compilation of a covered work with other separate and independent +works, which are not by their nature extensions of the covered work, +and which are not combined with it such as to form a larger program, +in or on a volume of a storage or distribution medium, is called an +"aggregate" if the compilation and its resulting copyright are not +used to limit the access or legal rights of the compilation's users +beyond what the individual works permit. Inclusion of a covered work +in an aggregate does not cause this License to apply to the other +parts of the aggregate. + + 6. Conveying Non-Source Forms. + + You may convey a covered work in object code form under the terms +of sections 4 and 5, provided that you also convey the +machine-readable Corresponding Source under the terms of this License, +in one of these ways: + + a) Convey the object code in, or embodied in, a physical product + (including a physical distribution medium), accompanied by the + Corresponding Source fixed on a durable physical medium + customarily used for software interchange. + + b) Convey the object code in, or embodied in, a physical product + (including a physical distribution medium), accompanied by a + written offer, valid for at least three years and valid for as + long as you offer spare parts or customer support for that product + model, to give anyone who possesses the object code either (1) a + copy of the Corresponding Source for all the software in the + product that is covered by this License, on a durable physical + medium customarily used for software interchange, for a price no + more than your reasonable cost of physically performing this + conveying of source, or (2) access to copy the + Corresponding Source from a network server at no charge. + + c) Convey individual copies of the object code with a copy of the + written offer to provide the Corresponding Source. This + alternative is allowed only occasionally and noncommercially, and + only if you received the object code with such an offer, in accord + with subsection 6b. + + d) Convey the object code by offering access from a designated + place (gratis or for a charge), and offer equivalent access to the + Corresponding Source in the same way through the same place at no + further charge. You need not require recipients to copy the + Corresponding Source along with the object code. If the place to + copy the object code is a network server, the Corresponding Source + may be on a different server (operated by you or a third party) + that supports equivalent copying facilities, provided you maintain + clear directions next to the object code saying where to find the + Corresponding Source. Regardless of what server hosts the + Corresponding Source, you remain obligated to ensure that it is + available for as long as needed to satisfy these requirements. + + e) Convey the object code using peer-to-peer transmission, provided + you inform other peers where the object code and Corresponding + Source of the work are being offered to the general public at no + charge under subsection 6d. + + A separable portion of the object code, whose source code is excluded +from the Corresponding Source as a System Library, need not be +included in conveying the object code work. + + A "User Product" is either (1) a "consumer product", which means any +tangible personal property which is normally used for personal, family, +or household purposes, or (2) anything designed or sold for incorporation +into a dwelling. In determining whether a product is a consumer product, +doubtful cases shall be resolved in favor of coverage. For a particular +product received by a particular user, "normally used" refers to a +typical or common use of that class of product, regardless of the status +of the particular user or of the way in which the particular user +actually uses, or expects or is expected to use, the product. A product +is a consumer product regardless of whether the product has substantial +commercial, industrial or non-consumer uses, unless such uses represent +the only significant mode of use of the product. + + "Installation Information" for a User Product means any methods, +procedures, authorization keys, or other information required to install +and execute modified versions of a covered work in that User Product from +a modified version of its Corresponding Source. The information must +suffice to ensure that the continued functioning of the modified object +code is in no case prevented or interfered with solely because +modification has been made. + + If you convey an object code work under this section in, or with, or +specifically for use in, a User Product, and the conveying occurs as +part of a transaction in which the right of possession and use of the +User Product is transferred to the recipient in perpetuity or for a +fixed term (regardless of how the transaction is characterized), the +Corresponding Source conveyed under this section must be accompanied +by the Installation Information. But this requirement does not apply +if neither you nor any third party retains the ability to install +modified object code on the User Product (for example, the work has +been installed in ROM). + + The requirement to provide Installation Information does not include a +requirement to continue to provide support service, warranty, or updates +for a work that has been modified or installed by the recipient, or for +the User Product in which it has been modified or installed. Access to a +network may be denied when the modification itself materially and +adversely affects the operation of the network or violates the rules and +protocols for communication across the network. + + Corresponding Source conveyed, and Installation Information provided, +in accord with this section must be in a format that is publicly +documented (and with an implementation available to the public in +source code form), and must require no special password or key for +unpacking, reading or copying. + + 7. Additional Terms. + + "Additional permissions" are terms that supplement the terms of this +License by making exceptions from one or more of its conditions. +Additional permissions that are applicable to the entire Program shall +be treated as though they were included in this License, to the extent +that they are valid under applicable law. If additional permissions +apply only to part of the Program, that part may be used separately +under those permissions, but the entire Program remains governed by +this License without regard to the additional permissions. + + When you convey a copy of a covered work, you may at your option +remove any additional permissions from that copy, or from any part of +it. (Additional permissions may be written to require their own +removal in certain cases when you modify the work.) You may place +additional permissions on material, added by you to a covered work, +for which you have or can give appropriate copyright permission. + + Notwithstanding any other provision of this License, for material you +add to a covered work, you may (if authorized by the copyright holders of +that material) supplement the terms of this License with terms: + + a) Disclaiming warranty or limiting liability differently from the + terms of sections 15 and 16 of this License; or + + b) Requiring preservation of specified reasonable legal notices or + author attributions in that material or in the Appropriate Legal + Notices displayed by works containing it; or + + c) Prohibiting misrepresentation of the origin of that material, or + requiring that modified versions of such material be marked in + reasonable ways as different from the original version; or + + d) Limiting the use for publicity purposes of names of licensors or + authors of the material; or + + e) Declining to grant rights under trademark law for use of some + trade names, trademarks, or service marks; or + + f) Requiring indemnification of licensors and authors of that + material by anyone who conveys the material (or modified versions of + it) with contractual assumptions of liability to the recipient, for + any liability that these contractual assumptions directly impose on + those licensors and authors. + + All other non-permissive additional terms are considered "further +restrictions" within the meaning of section 10. If the Program as you +received it, or any part of it, contains a notice stating that it is +governed by this License along with a term that is a further +restriction, you may remove that term. If a license document contains +a further restriction but permits relicensing or conveying under this +License, you may add to a covered work material governed by the terms +of that license document, provided that the further restriction does +not survive such relicensing or conveying. + + If you add terms to a covered work in accord with this section, you +must place, in the relevant source files, a statement of the +additional terms that apply to those files, or a notice indicating +where to find the applicable terms. + + Additional terms, permissive or non-permissive, may be stated in the +form of a separately written license, or stated as exceptions; +the above requirements apply either way. + + 8. Termination. + + You may not propagate or modify a covered work except as expressly +provided under this License. Any attempt otherwise to propagate or +modify it is void, and will automatically terminate your rights under +this License (including any patent licenses granted under the third +paragraph of section 11). + + However, if you cease all violation of this License, then your +license from a particular copyright holder is reinstated (a) +provisionally, unless and until the copyright holder explicitly and +finally terminates your license, and (b) permanently, if the copyright +holder fails to notify you of the violation by some reasonable means +prior to 60 days after the cessation. + + Moreover, your license from a particular copyright holder is +reinstated permanently if the copyright holder notifies you of the +violation by some reasonable means, this is the first time you have +received notice of violation of this License (for any work) from that +copyright holder, and you cure the violation prior to 30 days after +your receipt of the notice. + + Termination of your rights under this section does not terminate the +licenses of parties who have received copies or rights from you under +this License. If your rights have been terminated and not permanently +reinstated, you do not qualify to receive new licenses for the same +material under section 10. + + 9. Acceptance Not Required for Having Copies. + + You are not required to accept this License in order to receive or +run a copy of the Program. Ancillary propagation of a covered work +occurring solely as a consequence of using peer-to-peer transmission +to receive a copy likewise does not require acceptance. However, +nothing other than this License grants you permission to propagate or +modify any covered work. These actions infringe copyright if you do +not accept this License. Therefore, by modifying or propagating a +covered work, you indicate your acceptance of this License to do so. + + 10. Automatic Licensing of Downstream Recipients. + + Each time you convey a covered work, the recipient automatically +receives a license from the original licensors, to run, modify and +propagate that work, subject to this License. You are not responsible +for enforcing compliance by third parties with this License. + + An "entity transaction" is a transaction transferring control of an +organization, or substantially all assets of one, or subdividing an +organization, or merging organizations. If propagation of a covered +work results from an entity transaction, each party to that +transaction who receives a copy of the work also receives whatever +licenses to the work the party's predecessor in interest had or could +give under the previous paragraph, plus a right to possession of the +Corresponding Source of the work from the predecessor in interest, if +the predecessor has it or can get it with reasonable efforts. + + You may not impose any further restrictions on the exercise of the +rights granted or affirmed under this License. For example, you may +not impose a license fee, royalty, or other charge for exercise of +rights granted under this License, and you may not initiate litigation +(including a cross-claim or counterclaim in a lawsuit) alleging that +any patent claim is infringed by making, using, selling, offering for +sale, or importing the Program or any portion of it. + + 11. Patents. + + A "contributor" is a copyright holder who authorizes use under this +License of the Program or a work on which the Program is based. The +work thus licensed is called the contributor's "contributor version". + + A contributor's "essential patent claims" are all patent claims +owned or controlled by the contributor, whether already acquired or +hereafter acquired, that would be infringed by some manner, permitted +by this License, of making, using, or selling its contributor version, +but do not include claims that would be infringed only as a +consequence of further modification of the contributor version. For +purposes of this definition, "control" includes the right to grant +patent sublicenses in a manner consistent with the requirements of +this License. + + Each contributor grants you a non-exclusive, worldwide, royalty-free +patent license under the contributor's essential patent claims, to +make, use, sell, offer for sale, import and otherwise run, modify and +propagate the contents of its contributor version. + + In the following three paragraphs, a "patent license" is any express +agreement or commitment, however denominated, not to enforce a patent +(such as an express permission to practice a patent or covenant not to +sue for patent infringement). To "grant" such a patent license to a +party means to make such an agreement or commitment not to enforce a +patent against the party. + + If you convey a covered work, knowingly relying on a patent license, +and the Corresponding Source of the work is not available for anyone +to copy, free of charge and under the terms of this License, through a +publicly available network server or other readily accessible means, +then you must either (1) cause the Corresponding Source to be so +available, or (2) arrange to deprive yourself of the benefit of the +patent license for this particular work, or (3) arrange, in a manner +consistent with the requirements of this License, to extend the patent +license to downstream recipients. "Knowingly relying" means you have +actual knowledge that, but for the patent license, your conveying the +covered work in a country, or your recipient's use of the covered work +in a country, would infringe one or more identifiable patents in that +country that you have reason to believe are valid. + + If, pursuant to or in connection with a single transaction or +arrangement, you convey, or propagate by procuring conveyance of, a +covered work, and grant a patent license to some of the parties +receiving the covered work authorizing them to use, propagate, modify +or convey a specific copy of the covered work, then the patent license +you grant is automatically extended to all recipients of the covered +work and works based on it. + + A patent license is "discriminatory" if it does not include within +the scope of its coverage, prohibits the exercise of, or is +conditioned on the non-exercise of one or more of the rights that are +specifically granted under this License. You may not convey a covered +work if you are a party to an arrangement with a third party that is +in the business of distributing software, under which you make payment +to the third party based on the extent of your activity of conveying +the work, and under which the third party grants, to any of the +parties who would receive the covered work from you, a discriminatory +patent license (a) in connection with copies of the covered work +conveyed by you (or copies made from those copies), or (b) primarily +for and in connection with specific products or compilations that +contain the covered work, unless you entered into that arrangement, +or that patent license was granted, prior to 28 March 2007. + + Nothing in this License shall be construed as excluding or limiting +any implied license or other defenses to infringement that may +otherwise be available to you under applicable patent law. + + 12. No Surrender of Others' Freedom. + + If conditions are imposed on you (whether by court order, agreement or +otherwise) that contradict the conditions of this License, they do not +excuse you from the conditions of this License. If you cannot convey a +covered work so as to satisfy simultaneously your obligations under this +License and any other pertinent obligations, then as a consequence you may +not convey it at all. For example, if you agree to terms that obligate you +to collect a royalty for further conveying from those to whom you convey +the Program, the only way you could satisfy both those terms and this +License would be to refrain entirely from conveying the Program. + + 13. Remote Network Interaction; Use with the GNU General Public License. + + Notwithstanding any other provision of this License, if you modify the +Program, your modified version must prominently offer all users +interacting with it remotely through a computer network (if your version +supports such interaction) an opportunity to receive the Corresponding +Source of your version by providing access to the Corresponding Source +from a network server at no charge, through some standard or customary +means of facilitating copying of software. This Corresponding Source +shall include the Corresponding Source for any work covered by version 3 +of the GNU General Public License that is incorporated pursuant to the +following paragraph. + + Notwithstanding any other provision of this License, you have +permission to link or combine any covered work with a work licensed +under version 3 of the GNU General Public License into a single +combined work, and to convey the resulting work. The terms of this +License will continue to apply to the part which is the covered work, +but the work with which it is combined will remain governed by version +3 of the GNU General Public License. + + 14. Revised Versions of this License. + + The Free Software Foundation may publish revised and/or new versions of +the GNU Affero General Public License from time to time. Such new versions +will be similar in spirit to the present version, but may differ in detail to +address new problems or concerns. + + Each version is given a distinguishing version number. If the +Program specifies that a certain numbered version of the GNU Affero General +Public License "or any later version" applies to it, you have the +option of following the terms and conditions either of that numbered +version or of any later version published by the Free Software +Foundation. If the Program does not specify a version number of the +GNU Affero General Public License, you may choose any version ever published +by the Free Software Foundation. + + If the Program specifies that a proxy can decide which future +versions of the GNU Affero General Public License can be used, that proxy's +public statement of acceptance of a version permanently authorizes you +to choose that version for the Program. + + Later license versions may give you additional or different +permissions. However, no additional obligations are imposed on any +author or copyright holder as a result of your choosing to follow a +later version. + + 15. Disclaimer of Warranty. + + THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY +APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT +HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY +OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, +THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR +PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM +IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF +ALL NECESSARY SERVICING, REPAIR OR CORRECTION. + + 16. Limitation of Liability. + + IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING +WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS +THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY +GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE +USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF +DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD +PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS), +EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF +SUCH DAMAGES. + + 17. Interpretation of Sections 15 and 16. + + If the disclaimer of warranty and limitation of liability provided +above cannot be given local legal effect according to their terms, +reviewing courts shall apply local law that most closely approximates +an absolute waiver of all civil liability in connection with the +Program, unless a warranty or assumption of liability accompanies a +copy of the Program in return for a fee. + + END OF TERMS AND CONDITIONS + + How to Apply These Terms to Your New Programs + + If you develop a new program, and you want it to be of the greatest +possible use to the public, the best way to achieve this is to make it +free software which everyone can redistribute and change under these terms. + + To do so, attach the following notices to the program. It is safest +to attach them to the start of each source file to most effectively +state the exclusion of warranty; and each file should have at least +the "copyright" line and a pointer to where the full notice is found. + + + Copyright (C) + + This program is free software: you can redistribute it and/or modify + it under the terms of the GNU Affero General Public License as published by + the Free Software Foundation, either version 3 of the License, or + (at your option) any later version. + + This program is distributed in the hope that it will be useful, + but WITHOUT ANY WARRANTY; without even the implied warranty of + MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + GNU Affero General Public License for more details. + + You should have received a copy of the GNU Affero General Public License + along with this program. If not, see . + +Also add information on how to contact you by electronic and paper mail. + + If your software can interact with users remotely through a computer +network, you should also make sure that it provides a way for users to +get its source. For example, if your program is a web application, its +interface could display a "Source" link that leads users to an archive +of the code. There are many ways you could offer source, and different +solutions will be better for different programs; see section 13 for the +specific requirements. + + You should also get your employer (if you work as a programmer) or school, +if any, to sign a "copyright disclaimer" for the program, if necessary. +For more information on this, and how to apply and follow the GNU AGPL, see +. +`, + } +} diff --git a/vendor/github.com/spf13/cobra/cobra/cmd/license_apache_2.go b/vendor/github.com/spf13/cobra/cobra/cmd/license_apache_2.go new file mode 100644 index 0000000..38393d5 --- /dev/null +++ b/vendor/github.com/spf13/cobra/cobra/cmd/license_apache_2.go @@ -0,0 +1,238 @@ +// Copyright © 2015 Steve Francia . +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// Parts inspired by https://github.com/ryanuber/go-license + +package cmd + +func initApache2() { + Licenses["apache"] = License{ + Name: "Apache 2.0", + PossibleMatches: []string{"apache", "apache20", "apache 2.0", "apache2.0", "apache-2.0"}, + Header: ` +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License.`, + Text: ` + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +`, + } +} diff --git a/vendor/github.com/spf13/cobra/cobra/cmd/license_bsd_clause_2.go b/vendor/github.com/spf13/cobra/cobra/cmd/license_bsd_clause_2.go new file mode 100644 index 0000000..4a847e0 --- /dev/null +++ b/vendor/github.com/spf13/cobra/cobra/cmd/license_bsd_clause_2.go @@ -0,0 +1,71 @@ +// Copyright © 2015 Steve Francia . +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// Parts inspired by https://github.com/ryanuber/go-license + +package cmd + +func initBsdClause2() { + Licenses["freebsd"] = License{ + Name: "Simplified BSD License", + PossibleMatches: []string{"freebsd", "simpbsd", "simple bsd", "2-clause bsd", + "2 clause bsd", "simplified bsd license"}, + Header: `All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are met: + +1. Redistributions of source code must retain the above copyright notice, + this list of conditions and the following disclaimer. + +2. Redistributions in binary form must reproduce the above copyright notice, + this list of conditions and the following disclaimer in the documentation + and/or other materials provided with the distribution. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE +ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE +LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR +CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF +SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS +INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN +CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) +ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE +POSSIBILITY OF SUCH DAMAGE.`, + Text: `{{ .copyright }} +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are met: + +1. Redistributions of source code must retain the above copyright notice, + this list of conditions and the following disclaimer. + +2. Redistributions in binary form must reproduce the above copyright notice, + this list of conditions and the following disclaimer in the documentation + and/or other materials provided with the distribution. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE +FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, +OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +`, + } +} diff --git a/vendor/github.com/spf13/cobra/cobra/cmd/license_bsd_clause_3.go b/vendor/github.com/spf13/cobra/cobra/cmd/license_bsd_clause_3.go new file mode 100644 index 0000000..c7476b3 --- /dev/null +++ b/vendor/github.com/spf13/cobra/cobra/cmd/license_bsd_clause_3.go @@ -0,0 +1,78 @@ +// Copyright © 2015 Steve Francia . +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// Parts inspired by https://github.com/ryanuber/go-license + +package cmd + +func initBsdClause3() { + Licenses["bsd"] = License{ + Name: "NewBSD", + PossibleMatches: []string{"bsd", "newbsd", "3 clause bsd", "3-clause bsd"}, + Header: `All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are met: + +1. Redistributions of source code must retain the above copyright notice, + this list of conditions and the following disclaimer. + +2. Redistributions in binary form must reproduce the above copyright notice, + this list of conditions and the following disclaimer in the documentation + and/or other materials provided with the distribution. + +3. Neither the name of the copyright holder nor the names of its contributors + may be used to endorse or promote products derived from this software + without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE +ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE +LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR +CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF +SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS +INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN +CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) +ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE +POSSIBILITY OF SUCH DAMAGE.`, + Text: `{{ .copyright }} +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are met: + +1. Redistributions of source code must retain the above copyright notice, + this list of conditions and the following disclaimer. + +2. Redistributions in binary form must reproduce the above copyright notice, + this list of conditions and the following disclaimer in the documentation + and/or other materials provided with the distribution. + +3. Neither the name of the copyright holder nor the names of its contributors + may be used to endorse or promote products derived from this software + without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE +FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, +OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +`, + } +} diff --git a/vendor/github.com/spf13/cobra/cobra/cmd/license_gpl_2.go b/vendor/github.com/spf13/cobra/cobra/cmd/license_gpl_2.go new file mode 100644 index 0000000..03e05b3 --- /dev/null +++ b/vendor/github.com/spf13/cobra/cobra/cmd/license_gpl_2.go @@ -0,0 +1,376 @@ +// Copyright © 2015 Steve Francia . +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// Parts inspired by https://github.com/ryanuber/go-license + +package cmd + +func initGpl2() { + Licenses["gpl2"] = License{ + Name: "GNU General Public License 2.0", + PossibleMatches: []string{"gpl2", "gnu gpl2", "gplv2"}, + Header: ` +This program is free software; you can redistribute it and/or +modify it under the terms of the GNU General Public License +as published by the Free Software Foundation; either version 2 +of the License, or (at your option) any later version. + +This program is distributed in the hope that it will be useful, +but WITHOUT ANY WARRANTY; without even the implied warranty of +MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +GNU General Public License for more details. + +You should have received a copy of the GNU Lesser General Public License +along with this program. If not, see .`, + Text: ` GNU GENERAL PUBLIC LICENSE + Version 2, June 1991 + + Copyright (C) 1989, 1991 Free Software Foundation, Inc., + 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA + Everyone is permitted to copy and distribute verbatim copies + of this license document, but changing it is not allowed. + + Preamble + + The licenses for most software are designed to take away your +freedom to share and change it. By contrast, the GNU General Public +License is intended to guarantee your freedom to share and change free +software--to make sure the software is free for all its users. This +General Public License applies to most of the Free Software +Foundation's software and to any other program whose authors commit to +using it. (Some other Free Software Foundation software is covered by +the GNU Lesser General Public License instead.) You can apply it to +your programs, too. + + When we speak of free software, we are referring to freedom, not +price. Our General Public Licenses are designed to make sure that you +have the freedom to distribute copies of free software (and charge for +this service if you wish), that you receive source code or can get it +if you want it, that you can change the software or use pieces of it +in new free programs; and that you know you can do these things. + + To protect your rights, we need to make restrictions that forbid +anyone to deny you these rights or to ask you to surrender the rights. +These restrictions translate to certain responsibilities for you if you +distribute copies of the software, or if you modify it. + + For example, if you distribute copies of such a program, whether +gratis or for a fee, you must give the recipients all the rights that +you have. You must make sure that they, too, receive or can get the +source code. And you must show them these terms so they know their +rights. + + We protect your rights with two steps: (1) copyright the software, and +(2) offer you this license which gives you legal permission to copy, +distribute and/or modify the software. + + Also, for each author's protection and ours, we want to make certain +that everyone understands that there is no warranty for this free +software. If the software is modified by someone else and passed on, we +want its recipients to know that what they have is not the original, so +that any problems introduced by others will not reflect on the original +authors' reputations. + + Finally, any free program is threatened constantly by software +patents. We wish to avoid the danger that redistributors of a free +program will individually obtain patent licenses, in effect making the +program proprietary. To prevent this, we have made it clear that any +patent must be licensed for everyone's free use or not licensed at all. + + The precise terms and conditions for copying, distribution and +modification follow. + + GNU GENERAL PUBLIC LICENSE + TERMS AND CONDITIONS FOR COPYING, DISTRIBUTION AND MODIFICATION + + 0. This License applies to any program or other work which contains +a notice placed by the copyright holder saying it may be distributed +under the terms of this General Public License. The "Program", below, +refers to any such program or work, and a "work based on the Program" +means either the Program or any derivative work under copyright law: +that is to say, a work containing the Program or a portion of it, +either verbatim or with modifications and/or translated into another +language. (Hereinafter, translation is included without limitation in +the term "modification".) Each licensee is addressed as "you". + +Activities other than copying, distribution and modification are not +covered by this License; they are outside its scope. The act of +running the Program is not restricted, and the output from the Program +is covered only if its contents constitute a work based on the +Program (independent of having been made by running the Program). +Whether that is true depends on what the Program does. + + 1. You may copy and distribute verbatim copies of the Program's +source code as you receive it, in any medium, provided that you +conspicuously and appropriately publish on each copy an appropriate +copyright notice and disclaimer of warranty; keep intact all the +notices that refer to this License and to the absence of any warranty; +and give any other recipients of the Program a copy of this License +along with the Program. + +You may charge a fee for the physical act of transferring a copy, and +you may at your option offer warranty protection in exchange for a fee. + + 2. You may modify your copy or copies of the Program or any portion +of it, thus forming a work based on the Program, and copy and +distribute such modifications or work under the terms of Section 1 +above, provided that you also meet all of these conditions: + + a) You must cause the modified files to carry prominent notices + stating that you changed the files and the date of any change. + + b) You must cause any work that you distribute or publish, that in + whole or in part contains or is derived from the Program or any + part thereof, to be licensed as a whole at no charge to all third + parties under the terms of this License. + + c) If the modified program normally reads commands interactively + when run, you must cause it, when started running for such + interactive use in the most ordinary way, to print or display an + announcement including an appropriate copyright notice and a + notice that there is no warranty (or else, saying that you provide + a warranty) and that users may redistribute the program under + these conditions, and telling the user how to view a copy of this + License. (Exception: if the Program itself is interactive but + does not normally print such an announcement, your work based on + the Program is not required to print an announcement.) + +These requirements apply to the modified work as a whole. If +identifiable sections of that work are not derived from the Program, +and can be reasonably considered independent and separate works in +themselves, then this License, and its terms, do not apply to those +sections when you distribute them as separate works. But when you +distribute the same sections as part of a whole which is a work based +on the Program, the distribution of the whole must be on the terms of +this License, whose permissions for other licensees extend to the +entire whole, and thus to each and every part regardless of who wrote it. + +Thus, it is not the intent of this section to claim rights or contest +your rights to work written entirely by you; rather, the intent is to +exercise the right to control the distribution of derivative or +collective works based on the Program. + +In addition, mere aggregation of another work not based on the Program +with the Program (or with a work based on the Program) on a volume of +a storage or distribution medium does not bring the other work under +the scope of this License. + + 3. You may copy and distribute the Program (or a work based on it, +under Section 2) in object code or executable form under the terms of +Sections 1 and 2 above provided that you also do one of the following: + + a) Accompany it with the complete corresponding machine-readable + source code, which must be distributed under the terms of Sections + 1 and 2 above on a medium customarily used for software interchange; or, + + b) Accompany it with a written offer, valid for at least three + years, to give any third party, for a charge no more than your + cost of physically performing source distribution, a complete + machine-readable copy of the corresponding source code, to be + distributed under the terms of Sections 1 and 2 above on a medium + customarily used for software interchange; or, + + c) Accompany it with the information you received as to the offer + to distribute corresponding source code. (This alternative is + allowed only for noncommercial distribution and only if you + received the program in object code or executable form with such + an offer, in accord with Subsection b above.) + +The source code for a work means the preferred form of the work for +making modifications to it. For an executable work, complete source +code means all the source code for all modules it contains, plus any +associated interface definition files, plus the scripts used to +control compilation and installation of the executable. However, as a +special exception, the source code distributed need not include +anything that is normally distributed (in either source or binary +form) with the major components (compiler, kernel, and so on) of the +operating system on which the executable runs, unless that component +itself accompanies the executable. + +If distribution of executable or object code is made by offering +access to copy from a designated place, then offering equivalent +access to copy the source code from the same place counts as +distribution of the source code, even though third parties are not +compelled to copy the source along with the object code. + + 4. You may not copy, modify, sublicense, or distribute the Program +except as expressly provided under this License. Any attempt +otherwise to copy, modify, sublicense or distribute the Program is +void, and will automatically terminate your rights under this License. +However, parties who have received copies, or rights, from you under +this License will not have their licenses terminated so long as such +parties remain in full compliance. + + 5. You are not required to accept this License, since you have not +signed it. However, nothing else grants you permission to modify or +distribute the Program or its derivative works. These actions are +prohibited by law if you do not accept this License. Therefore, by +modifying or distributing the Program (or any work based on the +Program), you indicate your acceptance of this License to do so, and +all its terms and conditions for copying, distributing or modifying +the Program or works based on it. + + 6. Each time you redistribute the Program (or any work based on the +Program), the recipient automatically receives a license from the +original licensor to copy, distribute or modify the Program subject to +these terms and conditions. You may not impose any further +restrictions on the recipients' exercise of the rights granted herein. +You are not responsible for enforcing compliance by third parties to +this License. + + 7. If, as a consequence of a court judgment or allegation of patent +infringement or for any other reason (not limited to patent issues), +conditions are imposed on you (whether by court order, agreement or +otherwise) that contradict the conditions of this License, they do not +excuse you from the conditions of this License. If you cannot +distribute so as to satisfy simultaneously your obligations under this +License and any other pertinent obligations, then as a consequence you +may not distribute the Program at all. For example, if a patent +license would not permit royalty-free redistribution of the Program by +all those who receive copies directly or indirectly through you, then +the only way you could satisfy both it and this License would be to +refrain entirely from distribution of the Program. + +If any portion of this section is held invalid or unenforceable under +any particular circumstance, the balance of the section is intended to +apply and the section as a whole is intended to apply in other +circumstances. + +It is not the purpose of this section to induce you to infringe any +patents or other property right claims or to contest validity of any +such claims; this section has the sole purpose of protecting the +integrity of the free software distribution system, which is +implemented by public license practices. Many people have made +generous contributions to the wide range of software distributed +through that system in reliance on consistent application of that +system; it is up to the author/donor to decide if he or she is willing +to distribute software through any other system and a licensee cannot +impose that choice. + +This section is intended to make thoroughly clear what is believed to +be a consequence of the rest of this License. + + 8. If the distribution and/or use of the Program is restricted in +certain countries either by patents or by copyrighted interfaces, the +original copyright holder who places the Program under this License +may add an explicit geographical distribution limitation excluding +those countries, so that distribution is permitted only in or among +countries not thus excluded. In such case, this License incorporates +the limitation as if written in the body of this License. + + 9. The Free Software Foundation may publish revised and/or new versions +of the General Public License from time to time. Such new versions will +be similar in spirit to the present version, but may differ in detail to +address new problems or concerns. + +Each version is given a distinguishing version number. If the Program +specifies a version number of this License which applies to it and "any +later version", you have the option of following the terms and conditions +either of that version or of any later version published by the Free +Software Foundation. If the Program does not specify a version number of +this License, you may choose any version ever published by the Free Software +Foundation. + + 10. If you wish to incorporate parts of the Program into other free +programs whose distribution conditions are different, write to the author +to ask for permission. For software which is copyrighted by the Free +Software Foundation, write to the Free Software Foundation; we sometimes +make exceptions for this. Our decision will be guided by the two goals +of preserving the free status of all derivatives of our free software and +of promoting the sharing and reuse of software generally. + + NO WARRANTY + + 11. BECAUSE THE PROGRAM IS LICENSED FREE OF CHARGE, THERE IS NO WARRANTY +FOR THE PROGRAM, TO THE EXTENT PERMITTED BY APPLICABLE LAW. EXCEPT WHEN +OTHERWISE STATED IN WRITING THE COPYRIGHT HOLDERS AND/OR OTHER PARTIES +PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY OF ANY KIND, EITHER EXPRESSED +OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF +MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. THE ENTIRE RISK AS +TO THE QUALITY AND PERFORMANCE OF THE PROGRAM IS WITH YOU. SHOULD THE +PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF ALL NECESSARY SERVICING, +REPAIR OR CORRECTION. + + 12. IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING +WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MAY MODIFY AND/OR +REDISTRIBUTE THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, +INCLUDING ANY GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING +OUT OF THE USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED +TO LOSS OF DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY +YOU OR THIRD PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER +PROGRAMS), EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE +POSSIBILITY OF SUCH DAMAGES. + + END OF TERMS AND CONDITIONS + + How to Apply These Terms to Your New Programs + + If you develop a new program, and you want it to be of the greatest +possible use to the public, the best way to achieve this is to make it +free software which everyone can redistribute and change under these terms. + + To do so, attach the following notices to the program. It is safest +to attach them to the start of each source file to most effectively +convey the exclusion of warranty; and each file should have at least +the "copyright" line and a pointer to where the full notice is found. + + + Copyright (C) + + This program is free software; you can redistribute it and/or modify + it under the terms of the GNU General Public License as published by + the Free Software Foundation; either version 2 of the License, or + (at your option) any later version. + + This program is distributed in the hope that it will be useful, + but WITHOUT ANY WARRANTY; without even the implied warranty of + MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + GNU General Public License for more details. + + You should have received a copy of the GNU General Public License along + with this program; if not, write to the Free Software Foundation, Inc., + 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. + +Also add information on how to contact you by electronic and paper mail. + +If the program is interactive, make it output a short notice like this +when it starts in an interactive mode: + + Gnomovision version 69, Copyright (C) year name of author + Gnomovision comes with ABSOLUTELY NO WARRANTY; for details type 'show w'. + This is free software, and you are welcome to redistribute it + under certain conditions; type 'show c' for details. + +The hypothetical commands 'show w' and 'show c' should show the appropriate +parts of the General Public License. Of course, the commands you use may +be called something other than 'show w' and 'show c'; they could even be +mouse-clicks or menu items--whatever suits your program. + +You should also get your employer (if you work as a programmer) or your +school, if any, to sign a "copyright disclaimer" for the program, if +necessary. Here is a sample; alter the names: + + Yoyodyne, Inc., hereby disclaims all copyright interest in the program + 'Gnomovision' (which makes passes at compilers) written by James Hacker. + + , 1 April 1989 + Ty Coon, President of Vice + +This General Public License does not permit incorporating your program into +proprietary programs. If your program is a subroutine library, you may +consider it more useful to permit linking proprietary applications with the +library. If this is what you want to do, use the GNU Lesser General +Public License instead of this License. +`, + } +} diff --git a/vendor/github.com/spf13/cobra/cobra/cmd/license_gpl_3.go b/vendor/github.com/spf13/cobra/cobra/cmd/license_gpl_3.go new file mode 100644 index 0000000..ce07679 --- /dev/null +++ b/vendor/github.com/spf13/cobra/cobra/cmd/license_gpl_3.go @@ -0,0 +1,711 @@ +// Copyright © 2015 Steve Francia . +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// Parts inspired by https://github.com/ryanuber/go-license + +package cmd + +func initGpl3() { + Licenses["gpl3"] = License{ + Name: "GNU General Public License 3.0", + PossibleMatches: []string{"gpl3", "gplv3", "gpl", "gnu gpl3", "gnu gpl"}, + Header: ` +This program is free software: you can redistribute it and/or modify +it under the terms of the GNU General Public License as published by +the Free Software Foundation, either version 3 of the License, or +(at your option) any later version. + +This program is distributed in the hope that it will be useful, +but WITHOUT ANY WARRANTY; without even the implied warranty of +MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +GNU General Public License for more details. + +You should have received a copy of the GNU General Public License +along with this program. If not, see .`, + Text: ` GNU GENERAL PUBLIC LICENSE + Version 3, 29 June 2007 + + Copyright (C) 2007 Free Software Foundation, Inc. + Everyone is permitted to copy and distribute verbatim copies + of this license document, but changing it is not allowed. + + Preamble + + The GNU General Public License is a free, copyleft license for +software and other kinds of works. + + The licenses for most software and other practical works are designed +to take away your freedom to share and change the works. By contrast, +the GNU General Public License is intended to guarantee your freedom to +share and change all versions of a program--to make sure it remains free +software for all its users. We, the Free Software Foundation, use the +GNU General Public License for most of our software; it applies also to +any other work released this way by its authors. You can apply it to +your programs, too. + + When we speak of free software, we are referring to freedom, not +price. Our General Public Licenses are designed to make sure that you +have the freedom to distribute copies of free software (and charge for +them if you wish), that you receive source code or can get it if you +want it, that you can change the software or use pieces of it in new +free programs, and that you know you can do these things. + + To protect your rights, we need to prevent others from denying you +these rights or asking you to surrender the rights. Therefore, you have +certain responsibilities if you distribute copies of the software, or if +you modify it: responsibilities to respect the freedom of others. + + For example, if you distribute copies of such a program, whether +gratis or for a fee, you must pass on to the recipients the same +freedoms that you received. You must make sure that they, too, receive +or can get the source code. And you must show them these terms so they +know their rights. + + Developers that use the GNU GPL protect your rights with two steps: +(1) assert copyright on the software, and (2) offer you this License +giving you legal permission to copy, distribute and/or modify it. + + For the developers' and authors' protection, the GPL clearly explains +that there is no warranty for this free software. For both users' and +authors' sake, the GPL requires that modified versions be marked as +changed, so that their problems will not be attributed erroneously to +authors of previous versions. + + Some devices are designed to deny users access to install or run +modified versions of the software inside them, although the manufacturer +can do so. This is fundamentally incompatible with the aim of +protecting users' freedom to change the software. The systematic +pattern of such abuse occurs in the area of products for individuals to +use, which is precisely where it is most unacceptable. Therefore, we +have designed this version of the GPL to prohibit the practice for those +products. If such problems arise substantially in other domains, we +stand ready to extend this provision to those domains in future versions +of the GPL, as needed to protect the freedom of users. + + Finally, every program is threatened constantly by software patents. +States should not allow patents to restrict development and use of +software on general-purpose computers, but in those that do, we wish to +avoid the special danger that patents applied to a free program could +make it effectively proprietary. To prevent this, the GPL assures that +patents cannot be used to render the program non-free. + + The precise terms and conditions for copying, distribution and +modification follow. + + TERMS AND CONDITIONS + + 0. Definitions. + + "This License" refers to version 3 of the GNU General Public License. + + "Copyright" also means copyright-like laws that apply to other kinds of +works, such as semiconductor masks. + + "The Program" refers to any copyrightable work licensed under this +License. Each licensee is addressed as "you". "Licensees" and +"recipients" may be individuals or organizations. + + To "modify" a work means to copy from or adapt all or part of the work +in a fashion requiring copyright permission, other than the making of an +exact copy. The resulting work is called a "modified version" of the +earlier work or a work "based on" the earlier work. + + A "covered work" means either the unmodified Program or a work based +on the Program. + + To "propagate" a work means to do anything with it that, without +permission, would make you directly or secondarily liable for +infringement under applicable copyright law, except executing it on a +computer or modifying a private copy. Propagation includes copying, +distribution (with or without modification), making available to the +public, and in some countries other activities as well. + + To "convey" a work means any kind of propagation that enables other +parties to make or receive copies. Mere interaction with a user through +a computer network, with no transfer of a copy, is not conveying. + + An interactive user interface displays "Appropriate Legal Notices" +to the extent that it includes a convenient and prominently visible +feature that (1) displays an appropriate copyright notice, and (2) +tells the user that there is no warranty for the work (except to the +extent that warranties are provided), that licensees may convey the +work under this License, and how to view a copy of this License. If +the interface presents a list of user commands or options, such as a +menu, a prominent item in the list meets this criterion. + + 1. Source Code. + + The "source code" for a work means the preferred form of the work +for making modifications to it. "Object code" means any non-source +form of a work. + + A "Standard Interface" means an interface that either is an official +standard defined by a recognized standards body, or, in the case of +interfaces specified for a particular programming language, one that +is widely used among developers working in that language. + + The "System Libraries" of an executable work include anything, other +than the work as a whole, that (a) is included in the normal form of +packaging a Major Component, but which is not part of that Major +Component, and (b) serves only to enable use of the work with that +Major Component, or to implement a Standard Interface for which an +implementation is available to the public in source code form. A +"Major Component", in this context, means a major essential component +(kernel, window system, and so on) of the specific operating system +(if any) on which the executable work runs, or a compiler used to +produce the work, or an object code interpreter used to run it. + + The "Corresponding Source" for a work in object code form means all +the source code needed to generate, install, and (for an executable +work) run the object code and to modify the work, including scripts to +control those activities. However, it does not include the work's +System Libraries, or general-purpose tools or generally available free +programs which are used unmodified in performing those activities but +which are not part of the work. For example, Corresponding Source +includes interface definition files associated with source files for +the work, and the source code for shared libraries and dynamically +linked subprograms that the work is specifically designed to require, +such as by intimate data communication or control flow between those +subprograms and other parts of the work. + + The Corresponding Source need not include anything that users +can regenerate automatically from other parts of the Corresponding +Source. + + The Corresponding Source for a work in source code form is that +same work. + + 2. Basic Permissions. + + All rights granted under this License are granted for the term of +copyright on the Program, and are irrevocable provided the stated +conditions are met. This License explicitly affirms your unlimited +permission to run the unmodified Program. The output from running a +covered work is covered by this License only if the output, given its +content, constitutes a covered work. This License acknowledges your +rights of fair use or other equivalent, as provided by copyright law. + + You may make, run and propagate covered works that you do not +convey, without conditions so long as your license otherwise remains +in force. You may convey covered works to others for the sole purpose +of having them make modifications exclusively for you, or provide you +with facilities for running those works, provided that you comply with +the terms of this License in conveying all material for which you do +not control copyright. Those thus making or running the covered works +for you must do so exclusively on your behalf, under your direction +and control, on terms that prohibit them from making any copies of +your copyrighted material outside their relationship with you. + + Conveying under any other circumstances is permitted solely under +the conditions stated below. Sublicensing is not allowed; section 10 +makes it unnecessary. + + 3. Protecting Users' Legal Rights From Anti-Circumvention Law. + + No covered work shall be deemed part of an effective technological +measure under any applicable law fulfilling obligations under article +11 of the WIPO copyright treaty adopted on 20 December 1996, or +similar laws prohibiting or restricting circumvention of such +measures. + + When you convey a covered work, you waive any legal power to forbid +circumvention of technological measures to the extent such circumvention +is effected by exercising rights under this License with respect to +the covered work, and you disclaim any intention to limit operation or +modification of the work as a means of enforcing, against the work's +users, your or third parties' legal rights to forbid circumvention of +technological measures. + + 4. Conveying Verbatim Copies. + + You may convey verbatim copies of the Program's source code as you +receive it, in any medium, provided that you conspicuously and +appropriately publish on each copy an appropriate copyright notice; +keep intact all notices stating that this License and any +non-permissive terms added in accord with section 7 apply to the code; +keep intact all notices of the absence of any warranty; and give all +recipients a copy of this License along with the Program. + + You may charge any price or no price for each copy that you convey, +and you may offer support or warranty protection for a fee. + + 5. Conveying Modified Source Versions. + + You may convey a work based on the Program, or the modifications to +produce it from the Program, in the form of source code under the +terms of section 4, provided that you also meet all of these conditions: + + a) The work must carry prominent notices stating that you modified + it, and giving a relevant date. + + b) The work must carry prominent notices stating that it is + released under this License and any conditions added under section + 7. This requirement modifies the requirement in section 4 to + "keep intact all notices". + + c) You must license the entire work, as a whole, under this + License to anyone who comes into possession of a copy. This + License will therefore apply, along with any applicable section 7 + additional terms, to the whole of the work, and all its parts, + regardless of how they are packaged. This License gives no + permission to license the work in any other way, but it does not + invalidate such permission if you have separately received it. + + d) If the work has interactive user interfaces, each must display + Appropriate Legal Notices; however, if the Program has interactive + interfaces that do not display Appropriate Legal Notices, your + work need not make them do so. + + A compilation of a covered work with other separate and independent +works, which are not by their nature extensions of the covered work, +and which are not combined with it such as to form a larger program, +in or on a volume of a storage or distribution medium, is called an +"aggregate" if the compilation and its resulting copyright are not +used to limit the access or legal rights of the compilation's users +beyond what the individual works permit. Inclusion of a covered work +in an aggregate does not cause this License to apply to the other +parts of the aggregate. + + 6. Conveying Non-Source Forms. + + You may convey a covered work in object code form under the terms +of sections 4 and 5, provided that you also convey the +machine-readable Corresponding Source under the terms of this License, +in one of these ways: + + a) Convey the object code in, or embodied in, a physical product + (including a physical distribution medium), accompanied by the + Corresponding Source fixed on a durable physical medium + customarily used for software interchange. + + b) Convey the object code in, or embodied in, a physical product + (including a physical distribution medium), accompanied by a + written offer, valid for at least three years and valid for as + long as you offer spare parts or customer support for that product + model, to give anyone who possesses the object code either (1) a + copy of the Corresponding Source for all the software in the + product that is covered by this License, on a durable physical + medium customarily used for software interchange, for a price no + more than your reasonable cost of physically performing this + conveying of source, or (2) access to copy the + Corresponding Source from a network server at no charge. + + c) Convey individual copies of the object code with a copy of the + written offer to provide the Corresponding Source. This + alternative is allowed only occasionally and noncommercially, and + only if you received the object code with such an offer, in accord + with subsection 6b. + + d) Convey the object code by offering access from a designated + place (gratis or for a charge), and offer equivalent access to the + Corresponding Source in the same way through the same place at no + further charge. You need not require recipients to copy the + Corresponding Source along with the object code. If the place to + copy the object code is a network server, the Corresponding Source + may be on a different server (operated by you or a third party) + that supports equivalent copying facilities, provided you maintain + clear directions next to the object code saying where to find the + Corresponding Source. Regardless of what server hosts the + Corresponding Source, you remain obligated to ensure that it is + available for as long as needed to satisfy these requirements. + + e) Convey the object code using peer-to-peer transmission, provided + you inform other peers where the object code and Corresponding + Source of the work are being offered to the general public at no + charge under subsection 6d. + + A separable portion of the object code, whose source code is excluded +from the Corresponding Source as a System Library, need not be +included in conveying the object code work. + + A "User Product" is either (1) a "consumer product", which means any +tangible personal property which is normally used for personal, family, +or household purposes, or (2) anything designed or sold for incorporation +into a dwelling. In determining whether a product is a consumer product, +doubtful cases shall be resolved in favor of coverage. For a particular +product received by a particular user, "normally used" refers to a +typical or common use of that class of product, regardless of the status +of the particular user or of the way in which the particular user +actually uses, or expects or is expected to use, the product. A product +is a consumer product regardless of whether the product has substantial +commercial, industrial or non-consumer uses, unless such uses represent +the only significant mode of use of the product. + + "Installation Information" for a User Product means any methods, +procedures, authorization keys, or other information required to install +and execute modified versions of a covered work in that User Product from +a modified version of its Corresponding Source. The information must +suffice to ensure that the continued functioning of the modified object +code is in no case prevented or interfered with solely because +modification has been made. + + If you convey an object code work under this section in, or with, or +specifically for use in, a User Product, and the conveying occurs as +part of a transaction in which the right of possession and use of the +User Product is transferred to the recipient in perpetuity or for a +fixed term (regardless of how the transaction is characterized), the +Corresponding Source conveyed under this section must be accompanied +by the Installation Information. But this requirement does not apply +if neither you nor any third party retains the ability to install +modified object code on the User Product (for example, the work has +been installed in ROM). + + The requirement to provide Installation Information does not include a +requirement to continue to provide support service, warranty, or updates +for a work that has been modified or installed by the recipient, or for +the User Product in which it has been modified or installed. Access to a +network may be denied when the modification itself materially and +adversely affects the operation of the network or violates the rules and +protocols for communication across the network. + + Corresponding Source conveyed, and Installation Information provided, +in accord with this section must be in a format that is publicly +documented (and with an implementation available to the public in +source code form), and must require no special password or key for +unpacking, reading or copying. + + 7. Additional Terms. + + "Additional permissions" are terms that supplement the terms of this +License by making exceptions from one or more of its conditions. +Additional permissions that are applicable to the entire Program shall +be treated as though they were included in this License, to the extent +that they are valid under applicable law. If additional permissions +apply only to part of the Program, that part may be used separately +under those permissions, but the entire Program remains governed by +this License without regard to the additional permissions. + + When you convey a copy of a covered work, you may at your option +remove any additional permissions from that copy, or from any part of +it. (Additional permissions may be written to require their own +removal in certain cases when you modify the work.) You may place +additional permissions on material, added by you to a covered work, +for which you have or can give appropriate copyright permission. + + Notwithstanding any other provision of this License, for material you +add to a covered work, you may (if authorized by the copyright holders of +that material) supplement the terms of this License with terms: + + a) Disclaiming warranty or limiting liability differently from the + terms of sections 15 and 16 of this License; or + + b) Requiring preservation of specified reasonable legal notices or + author attributions in that material or in the Appropriate Legal + Notices displayed by works containing it; or + + c) Prohibiting misrepresentation of the origin of that material, or + requiring that modified versions of such material be marked in + reasonable ways as different from the original version; or + + d) Limiting the use for publicity purposes of names of licensors or + authors of the material; or + + e) Declining to grant rights under trademark law for use of some + trade names, trademarks, or service marks; or + + f) Requiring indemnification of licensors and authors of that + material by anyone who conveys the material (or modified versions of + it) with contractual assumptions of liability to the recipient, for + any liability that these contractual assumptions directly impose on + those licensors and authors. + + All other non-permissive additional terms are considered "further +restrictions" within the meaning of section 10. If the Program as you +received it, or any part of it, contains a notice stating that it is +governed by this License along with a term that is a further +restriction, you may remove that term. If a license document contains +a further restriction but permits relicensing or conveying under this +License, you may add to a covered work material governed by the terms +of that license document, provided that the further restriction does +not survive such relicensing or conveying. + + If you add terms to a covered work in accord with this section, you +must place, in the relevant source files, a statement of the +additional terms that apply to those files, or a notice indicating +where to find the applicable terms. + + Additional terms, permissive or non-permissive, may be stated in the +form of a separately written license, or stated as exceptions; +the above requirements apply either way. + + 8. Termination. + + You may not propagate or modify a covered work except as expressly +provided under this License. Any attempt otherwise to propagate or +modify it is void, and will automatically terminate your rights under +this License (including any patent licenses granted under the third +paragraph of section 11). + + However, if you cease all violation of this License, then your +license from a particular copyright holder is reinstated (a) +provisionally, unless and until the copyright holder explicitly and +finally terminates your license, and (b) permanently, if the copyright +holder fails to notify you of the violation by some reasonable means +prior to 60 days after the cessation. + + Moreover, your license from a particular copyright holder is +reinstated permanently if the copyright holder notifies you of the +violation by some reasonable means, this is the first time you have +received notice of violation of this License (for any work) from that +copyright holder, and you cure the violation prior to 30 days after +your receipt of the notice. + + Termination of your rights under this section does not terminate the +licenses of parties who have received copies or rights from you under +this License. If your rights have been terminated and not permanently +reinstated, you do not qualify to receive new licenses for the same +material under section 10. + + 9. Acceptance Not Required for Having Copies. + + You are not required to accept this License in order to receive or +run a copy of the Program. Ancillary propagation of a covered work +occurring solely as a consequence of using peer-to-peer transmission +to receive a copy likewise does not require acceptance. However, +nothing other than this License grants you permission to propagate or +modify any covered work. These actions infringe copyright if you do +not accept this License. Therefore, by modifying or propagating a +covered work, you indicate your acceptance of this License to do so. + + 10. Automatic Licensing of Downstream Recipients. + + Each time you convey a covered work, the recipient automatically +receives a license from the original licensors, to run, modify and +propagate that work, subject to this License. You are not responsible +for enforcing compliance by third parties with this License. + + An "entity transaction" is a transaction transferring control of an +organization, or substantially all assets of one, or subdividing an +organization, or merging organizations. If propagation of a covered +work results from an entity transaction, each party to that +transaction who receives a copy of the work also receives whatever +licenses to the work the party's predecessor in interest had or could +give under the previous paragraph, plus a right to possession of the +Corresponding Source of the work from the predecessor in interest, if +the predecessor has it or can get it with reasonable efforts. + + You may not impose any further restrictions on the exercise of the +rights granted or affirmed under this License. For example, you may +not impose a license fee, royalty, or other charge for exercise of +rights granted under this License, and you may not initiate litigation +(including a cross-claim or counterclaim in a lawsuit) alleging that +any patent claim is infringed by making, using, selling, offering for +sale, or importing the Program or any portion of it. + + 11. Patents. + + A "contributor" is a copyright holder who authorizes use under this +License of the Program or a work on which the Program is based. The +work thus licensed is called the contributor's "contributor version". + + A contributor's "essential patent claims" are all patent claims +owned or controlled by the contributor, whether already acquired or +hereafter acquired, that would be infringed by some manner, permitted +by this License, of making, using, or selling its contributor version, +but do not include claims that would be infringed only as a +consequence of further modification of the contributor version. For +purposes of this definition, "control" includes the right to grant +patent sublicenses in a manner consistent with the requirements of +this License. + + Each contributor grants you a non-exclusive, worldwide, royalty-free +patent license under the contributor's essential patent claims, to +make, use, sell, offer for sale, import and otherwise run, modify and +propagate the contents of its contributor version. + + In the following three paragraphs, a "patent license" is any express +agreement or commitment, however denominated, not to enforce a patent +(such as an express permission to practice a patent or covenant not to +sue for patent infringement). To "grant" such a patent license to a +party means to make such an agreement or commitment not to enforce a +patent against the party. + + If you convey a covered work, knowingly relying on a patent license, +and the Corresponding Source of the work is not available for anyone +to copy, free of charge and under the terms of this License, through a +publicly available network server or other readily accessible means, +then you must either (1) cause the Corresponding Source to be so +available, or (2) arrange to deprive yourself of the benefit of the +patent license for this particular work, or (3) arrange, in a manner +consistent with the requirements of this License, to extend the patent +license to downstream recipients. "Knowingly relying" means you have +actual knowledge that, but for the patent license, your conveying the +covered work in a country, or your recipient's use of the covered work +in a country, would infringe one or more identifiable patents in that +country that you have reason to believe are valid. + + If, pursuant to or in connection with a single transaction or +arrangement, you convey, or propagate by procuring conveyance of, a +covered work, and grant a patent license to some of the parties +receiving the covered work authorizing them to use, propagate, modify +or convey a specific copy of the covered work, then the patent license +you grant is automatically extended to all recipients of the covered +work and works based on it. + + A patent license is "discriminatory" if it does not include within +the scope of its coverage, prohibits the exercise of, or is +conditioned on the non-exercise of one or more of the rights that are +specifically granted under this License. You may not convey a covered +work if you are a party to an arrangement with a third party that is +in the business of distributing software, under which you make payment +to the third party based on the extent of your activity of conveying +the work, and under which the third party grants, to any of the +parties who would receive the covered work from you, a discriminatory +patent license (a) in connection with copies of the covered work +conveyed by you (or copies made from those copies), or (b) primarily +for and in connection with specific products or compilations that +contain the covered work, unless you entered into that arrangement, +or that patent license was granted, prior to 28 March 2007. + + Nothing in this License shall be construed as excluding or limiting +any implied license or other defenses to infringement that may +otherwise be available to you under applicable patent law. + + 12. No Surrender of Others' Freedom. + + If conditions are imposed on you (whether by court order, agreement or +otherwise) that contradict the conditions of this License, they do not +excuse you from the conditions of this License. If you cannot convey a +covered work so as to satisfy simultaneously your obligations under this +License and any other pertinent obligations, then as a consequence you may +not convey it at all. For example, if you agree to terms that obligate you +to collect a royalty for further conveying from those to whom you convey +the Program, the only way you could satisfy both those terms and this +License would be to refrain entirely from conveying the Program. + + 13. Use with the GNU Affero General Public License. + + Notwithstanding any other provision of this License, you have +permission to link or combine any covered work with a work licensed +under version 3 of the GNU Affero General Public License into a single +combined work, and to convey the resulting work. The terms of this +License will continue to apply to the part which is the covered work, +but the special requirements of the GNU Affero General Public License, +section 13, concerning interaction through a network will apply to the +combination as such. + + 14. Revised Versions of this License. + + The Free Software Foundation may publish revised and/or new versions of +the GNU General Public License from time to time. Such new versions will +be similar in spirit to the present version, but may differ in detail to +address new problems or concerns. + + Each version is given a distinguishing version number. If the +Program specifies that a certain numbered version of the GNU General +Public License "or any later version" applies to it, you have the +option of following the terms and conditions either of that numbered +version or of any later version published by the Free Software +Foundation. If the Program does not specify a version number of the +GNU General Public License, you may choose any version ever published +by the Free Software Foundation. + + If the Program specifies that a proxy can decide which future +versions of the GNU General Public License can be used, that proxy's +public statement of acceptance of a version permanently authorizes you +to choose that version for the Program. + + Later license versions may give you additional or different +permissions. However, no additional obligations are imposed on any +author or copyright holder as a result of your choosing to follow a +later version. + + 15. Disclaimer of Warranty. + + THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY +APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT +HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY +OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, +THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR +PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM +IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF +ALL NECESSARY SERVICING, REPAIR OR CORRECTION. + + 16. Limitation of Liability. + + IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING +WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS +THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY +GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE +USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF +DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD +PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS), +EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF +SUCH DAMAGES. + + 17. Interpretation of Sections 15 and 16. + + If the disclaimer of warranty and limitation of liability provided +above cannot be given local legal effect according to their terms, +reviewing courts shall apply local law that most closely approximates +an absolute waiver of all civil liability in connection with the +Program, unless a warranty or assumption of liability accompanies a +copy of the Program in return for a fee. + + END OF TERMS AND CONDITIONS + + How to Apply These Terms to Your New Programs + + If you develop a new program, and you want it to be of the greatest +possible use to the public, the best way to achieve this is to make it +free software which everyone can redistribute and change under these terms. + + To do so, attach the following notices to the program. It is safest +to attach them to the start of each source file to most effectively +state the exclusion of warranty; and each file should have at least +the "copyright" line and a pointer to where the full notice is found. + + + Copyright (C) + + This program is free software: you can redistribute it and/or modify + it under the terms of the GNU General Public License as published by + the Free Software Foundation, either version 3 of the License, or + (at your option) any later version. + + This program is distributed in the hope that it will be useful, + but WITHOUT ANY WARRANTY; without even the implied warranty of + MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + GNU General Public License for more details. + + You should have received a copy of the GNU General Public License + along with this program. If not, see . + +Also add information on how to contact you by electronic and paper mail. + + If the program does terminal interaction, make it output a short +notice like this when it starts in an interactive mode: + + Copyright (C) + This program comes with ABSOLUTELY NO WARRANTY; for details type 'show w'. + This is free software, and you are welcome to redistribute it + under certain conditions; type 'show c' for details. + +The hypothetical commands 'show w' and 'show c' should show the appropriate +parts of the General Public License. Of course, your program's commands +might be different; for a GUI interface, you would use an "about box". + + You should also get your employer (if you work as a programmer) or school, +if any, to sign a "copyright disclaimer" for the program, if necessary. +For more information on this, and how to apply and follow the GNU GPL, see +. + + The GNU General Public License does not permit incorporating your program +into proprietary programs. If your program is a subroutine library, you +may consider it more useful to permit linking proprietary applications with +the library. If this is what you want to do, use the GNU Lesser General +Public License instead of this License. But first, please read +. +`, + } +} diff --git a/vendor/github.com/spf13/cobra/cobra/cmd/license_lgpl.go b/vendor/github.com/spf13/cobra/cobra/cmd/license_lgpl.go new file mode 100644 index 0000000..0f8b96c --- /dev/null +++ b/vendor/github.com/spf13/cobra/cobra/cmd/license_lgpl.go @@ -0,0 +1,186 @@ +package cmd + +func initLgpl() { + Licenses["lgpl"] = License{ + Name: "GNU Lesser General Public License", + PossibleMatches: []string{"lgpl", "lesser gpl", "gnu lgpl"}, + Header: ` +This program is free software: you can redistribute it and/or modify +it under the terms of the GNU Lesser General Public License as published by +the Free Software Foundation, either version 3 of the License, or +(at your option) any later version. + +This program is distributed in the hope that it will be useful, +but WITHOUT ANY WARRANTY; without even the implied warranty of +MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +GNU Lesser General Public License for more details. + +You should have received a copy of the GNU Lesser General Public License +along with this program. If not, see .`, + Text: ` GNU LESSER GENERAL PUBLIC LICENSE + Version 3, 29 June 2007 + + Copyright (C) 2007 Free Software Foundation, Inc. + Everyone is permitted to copy and distribute verbatim copies + of this license document, but changing it is not allowed. + + + This version of the GNU Lesser General Public License incorporates +the terms and conditions of version 3 of the GNU General Public +License, supplemented by the additional permissions listed below. + + 0. Additional Definitions. + + As used herein, "this License" refers to version 3 of the GNU Lesser +General Public License, and the "GNU GPL" refers to version 3 of the GNU +General Public License. + + "The Library" refers to a covered work governed by this License, +other than an Application or a Combined Work as defined below. + + An "Application" is any work that makes use of an interface provided +by the Library, but which is not otherwise based on the Library. +Defining a subclass of a class defined by the Library is deemed a mode +of using an interface provided by the Library. + + A "Combined Work" is a work produced by combining or linking an +Application with the Library. The particular version of the Library +with which the Combined Work was made is also called the "Linked +Version". + + The "Minimal Corresponding Source" for a Combined Work means the +Corresponding Source for the Combined Work, excluding any source code +for portions of the Combined Work that, considered in isolation, are +based on the Application, and not on the Linked Version. + + The "Corresponding Application Code" for a Combined Work means the +object code and/or source code for the Application, including any data +and utility programs needed for reproducing the Combined Work from the +Application, but excluding the System Libraries of the Combined Work. + + 1. Exception to Section 3 of the GNU GPL. + + You may convey a covered work under sections 3 and 4 of this License +without being bound by section 3 of the GNU GPL. + + 2. Conveying Modified Versions. + + If you modify a copy of the Library, and, in your modifications, a +facility refers to a function or data to be supplied by an Application +that uses the facility (other than as an argument passed when the +facility is invoked), then you may convey a copy of the modified +version: + + a) under this License, provided that you make a good faith effort to + ensure that, in the event an Application does not supply the + function or data, the facility still operates, and performs + whatever part of its purpose remains meaningful, or + + b) under the GNU GPL, with none of the additional permissions of + this License applicable to that copy. + + 3. Object Code Incorporating Material from Library Header Files. + + The object code form of an Application may incorporate material from +a header file that is part of the Library. You may convey such object +code under terms of your choice, provided that, if the incorporated +material is not limited to numerical parameters, data structure +layouts and accessors, or small macros, inline functions and templates +(ten or fewer lines in length), you do both of the following: + + a) Give prominent notice with each copy of the object code that the + Library is used in it and that the Library and its use are + covered by this License. + + b) Accompany the object code with a copy of the GNU GPL and this license + document. + + 4. Combined Works. + + You may convey a Combined Work under terms of your choice that, +taken together, effectively do not restrict modification of the +portions of the Library contained in the Combined Work and reverse +engineering for debugging such modifications, if you also do each of +the following: + + a) Give prominent notice with each copy of the Combined Work that + the Library is used in it and that the Library and its use are + covered by this License. + + b) Accompany the Combined Work with a copy of the GNU GPL and this license + document. + + c) For a Combined Work that displays copyright notices during + execution, include the copyright notice for the Library among + these notices, as well as a reference directing the user to the + copies of the GNU GPL and this license document. + + d) Do one of the following: + + 0) Convey the Minimal Corresponding Source under the terms of this + License, and the Corresponding Application Code in a form + suitable for, and under terms that permit, the user to + recombine or relink the Application with a modified version of + the Linked Version to produce a modified Combined Work, in the + manner specified by section 6 of the GNU GPL for conveying + Corresponding Source. + + 1) Use a suitable shared library mechanism for linking with the + Library. A suitable mechanism is one that (a) uses at run time + a copy of the Library already present on the user's computer + system, and (b) will operate properly with a modified version + of the Library that is interface-compatible with the Linked + Version. + + e) Provide Installation Information, but only if you would otherwise + be required to provide such information under section 6 of the + GNU GPL, and only to the extent that such information is + necessary to install and execute a modified version of the + Combined Work produced by recombining or relinking the + Application with a modified version of the Linked Version. (If + you use option 4d0, the Installation Information must accompany + the Minimal Corresponding Source and Corresponding Application + Code. If you use option 4d1, you must provide the Installation + Information in the manner specified by section 6 of the GNU GPL + for conveying Corresponding Source.) + + 5. Combined Libraries. + + You may place library facilities that are a work based on the +Library side by side in a single library together with other library +facilities that are not Applications and are not covered by this +License, and convey such a combined library under terms of your +choice, if you do both of the following: + + a) Accompany the combined library with a copy of the same work based + on the Library, uncombined with any other library facilities, + conveyed under the terms of this License. + + b) Give prominent notice with the combined library that part of it + is a work based on the Library, and explaining where to find the + accompanying uncombined form of the same work. + + 6. Revised Versions of the GNU Lesser General Public License. + + The Free Software Foundation may publish revised and/or new versions +of the GNU Lesser General Public License from time to time. Such new +versions will be similar in spirit to the present version, but may +differ in detail to address new problems or concerns. + + Each version is given a distinguishing version number. If the +Library as you received it specifies that a certain numbered version +of the GNU Lesser General Public License "or any later version" +applies to it, you have the option of following the terms and +conditions either of that published version or of any later version +published by the Free Software Foundation. If the Library as you +received it does not specify a version number of the GNU Lesser +General Public License, you may choose any version of the GNU Lesser +General Public License ever published by the Free Software Foundation. + + If the Library as you received it specifies that a proxy can decide +whether future versions of the GNU Lesser General Public License shall +apply, that proxy's public statement of acceptance of any version is +permanent authorization for you to choose that version for the +Library.`, + } +} diff --git a/vendor/github.com/spf13/cobra/cobra/cmd/license_mit.go b/vendor/github.com/spf13/cobra/cobra/cmd/license_mit.go new file mode 100644 index 0000000..bd2d0c4 --- /dev/null +++ b/vendor/github.com/spf13/cobra/cobra/cmd/license_mit.go @@ -0,0 +1,63 @@ +// Copyright © 2015 Steve Francia . +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// Parts inspired by https://github.com/ryanuber/go-license + +package cmd + +func initMit() { + Licenses["mit"] = License{ + Name: "MIT License", + PossibleMatches: []string{"mit"}, + Header: ` +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE.`, + Text: `The MIT License (MIT) + +{{ .copyright }} + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. +`, + } +} diff --git a/vendor/github.com/spf13/cobra/cobra/cmd/licenses.go b/vendor/github.com/spf13/cobra/cobra/cmd/licenses.go new file mode 100644 index 0000000..a070134 --- /dev/null +++ b/vendor/github.com/spf13/cobra/cobra/cmd/licenses.go @@ -0,0 +1,118 @@ +// Copyright © 2015 Steve Francia . +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// Parts inspired by https://github.com/ryanuber/go-license + +package cmd + +import ( + "strings" + "time" + + "github.com/spf13/viper" +) + +// Licenses contains all possible licenses a user can choose from. +var Licenses = make(map[string]License) + +// License represents a software license agreement, containing the Name of +// the license, its possible matches (on the command line as given to cobra), +// the header to be used with each file on the file's creating, and the text +// of the license +type License struct { + Name string // The type of license in use + PossibleMatches []string // Similar names to guess + Text string // License text data + Header string // License header for source files +} + +func init() { + // Allows a user to not use a license. + Licenses["none"] = License{"None", []string{"none", "false"}, "", ""} + + initApache2() + initMit() + initBsdClause3() + initBsdClause2() + initGpl2() + initGpl3() + initLgpl() + initAgpl() +} + +// getLicense returns license specified by user in flag or in config. +// If user didn't specify the license, it returns Apache License 2.0. +// +// TODO: Inspect project for existing license +func getLicense() License { + // If explicitly flagged, use that. + if userLicense != "" { + return findLicense(userLicense) + } + + // If user wants to have custom license, use that. + if viper.IsSet("license.header") || viper.IsSet("license.text") { + return License{Header: viper.GetString("license.header"), + Text: viper.GetString("license.text")} + } + + // If user wants to have built-in license, use that. + if viper.IsSet("license") { + return findLicense(viper.GetString("license")) + } + + // If user didn't set any license, use Apache 2.0 by default. + return Licenses["apache"] +} + +func copyrightLine() string { + author := viper.GetString("author") + + year := viper.GetString("year") // For tests. + if year == "" { + year = time.Now().Format("2006") + } + + return "Copyright © " + year + " " + author +} + +// findLicense looks for License object of built-in licenses. +// If it didn't find license, then the app will be terminated and +// error will be printed. +func findLicense(name string) License { + found := matchLicense(name) + if found == "" { + er("unknown license: " + name) + } + return Licenses[found] +} + +// matchLicense compares the given a license name +// to PossibleMatches of all built-in licenses. +// It returns blank string, if name is blank string or it didn't find +// then appropriate match to name. +func matchLicense(name string) string { + if name == "" { + return "" + } + + for key, lic := range Licenses { + for _, match := range lic.PossibleMatches { + if strings.EqualFold(name, match) { + return key + } + } + } + + return "" +} diff --git a/vendor/github.com/spf13/cobra/cobra/cmd/project.go b/vendor/github.com/spf13/cobra/cobra/cmd/project.go new file mode 100644 index 0000000..7ddb825 --- /dev/null +++ b/vendor/github.com/spf13/cobra/cobra/cmd/project.go @@ -0,0 +1,200 @@ +package cmd + +import ( + "os" + "path/filepath" + "runtime" + "strings" +) + +// Project contains name, license and paths to projects. +type Project struct { + absPath string + cmdPath string + srcPath string + license License + name string +} + +// NewProject returns Project with specified project name. +func NewProject(projectName string) *Project { + if projectName == "" { + er("can't create project with blank name") + } + + p := new(Project) + p.name = projectName + + // 1. Find already created protect. + p.absPath = findPackage(projectName) + + // 2. If there are no created project with this path, and user is in GOPATH, + // then use GOPATH/src/projectName. + if p.absPath == "" { + wd, err := os.Getwd() + if err != nil { + er(err) + } + for _, srcPath := range srcPaths { + goPath := filepath.Dir(srcPath) + if filepathHasPrefix(wd, goPath) { + p.absPath = filepath.Join(srcPath, projectName) + break + } + } + } + + // 3. If user is not in GOPATH, then use (first GOPATH)/src/projectName. + if p.absPath == "" { + p.absPath = filepath.Join(srcPaths[0], projectName) + } + + return p +} + +// findPackage returns full path to existing go package in GOPATHs. +func findPackage(packageName string) string { + if packageName == "" { + return "" + } + + for _, srcPath := range srcPaths { + packagePath := filepath.Join(srcPath, packageName) + if exists(packagePath) { + return packagePath + } + } + + return "" +} + +// NewProjectFromPath returns Project with specified absolute path to +// package. +func NewProjectFromPath(absPath string) *Project { + if absPath == "" { + er("can't create project: absPath can't be blank") + } + if !filepath.IsAbs(absPath) { + er("can't create project: absPath is not absolute") + } + + // If absPath is symlink, use its destination. + fi, err := os.Lstat(absPath) + if err != nil { + er("can't read path info: " + err.Error()) + } + if fi.Mode()&os.ModeSymlink != 0 { + path, err := os.Readlink(absPath) + if err != nil { + er("can't read the destination of symlink: " + err.Error()) + } + absPath = path + } + + p := new(Project) + p.absPath = strings.TrimSuffix(absPath, findCmdDir(absPath)) + p.name = filepath.ToSlash(trimSrcPath(p.absPath, p.SrcPath())) + return p +} + +// trimSrcPath trims at the beginning of absPath the srcPath. +func trimSrcPath(absPath, srcPath string) string { + relPath, err := filepath.Rel(srcPath, absPath) + if err != nil { + er(err) + } + return relPath +} + +// License returns the License object of project. +func (p *Project) License() License { + if p.license.Text == "" && p.license.Name != "None" { + p.license = getLicense() + } + return p.license +} + +// Name returns the name of project, e.g. "github.com/spf13/cobra" +func (p Project) Name() string { + return p.name +} + +// CmdPath returns absolute path to directory, where all commands are located. +func (p *Project) CmdPath() string { + if p.absPath == "" { + return "" + } + if p.cmdPath == "" { + p.cmdPath = filepath.Join(p.absPath, findCmdDir(p.absPath)) + } + return p.cmdPath +} + +// findCmdDir checks if base of absPath is cmd dir and returns it or +// looks for existing cmd dir in absPath. +func findCmdDir(absPath string) string { + if !exists(absPath) || isEmpty(absPath) { + return "cmd" + } + + if isCmdDir(absPath) { + return filepath.Base(absPath) + } + + files, _ := filepath.Glob(filepath.Join(absPath, "c*")) + for _, file := range files { + if isCmdDir(file) { + return filepath.Base(file) + } + } + + return "cmd" +} + +// isCmdDir checks if base of name is one of cmdDir. +func isCmdDir(name string) bool { + name = filepath.Base(name) + for _, cmdDir := range []string{"cmd", "cmds", "command", "commands"} { + if name == cmdDir { + return true + } + } + return false +} + +// AbsPath returns absolute path of project. +func (p Project) AbsPath() string { + return p.absPath +} + +// SrcPath returns absolute path to $GOPATH/src where project is located. +func (p *Project) SrcPath() string { + if p.srcPath != "" { + return p.srcPath + } + if p.absPath == "" { + p.srcPath = srcPaths[0] + return p.srcPath + } + + for _, srcPath := range srcPaths { + if filepathHasPrefix(p.absPath, srcPath) { + p.srcPath = srcPath + break + } + } + + return p.srcPath +} + +func filepathHasPrefix(path string, prefix string) bool { + if len(path) <= len(prefix) { + return false + } + if runtime.GOOS == "windows" { + // Paths in windows are case-insensitive. + return strings.EqualFold(path[0:len(prefix)], prefix) + } + return path[0:len(prefix)] == prefix + +} diff --git a/vendor/github.com/spf13/cobra/cobra/cmd/root.go b/vendor/github.com/spf13/cobra/cobra/cmd/root.go new file mode 100644 index 0000000..19568f9 --- /dev/null +++ b/vendor/github.com/spf13/cobra/cobra/cmd/root.go @@ -0,0 +1,79 @@ +// Copyright © 2015 Steve Francia . +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package cmd + +import ( + "fmt" + + homedir "github.com/mitchellh/go-homedir" + "github.com/spf13/cobra" + "github.com/spf13/viper" +) + +var ( + // Used for flags. + cfgFile, userLicense string + + rootCmd = &cobra.Command{ + Use: "cobra", + Short: "A generator for Cobra based Applications", + Long: `Cobra is a CLI library for Go that empowers applications. +This application is a tool to generate the needed files +to quickly create a Cobra application.`, + } +) + +// Execute executes the root command. +func Execute() { + rootCmd.Execute() +} + +func init() { + cobra.OnInitialize(initConfig) + + rootCmd.PersistentFlags().StringVar(&cfgFile, "config", "", "config file (default is $HOME/.cobra.yaml)") + rootCmd.PersistentFlags().StringP("author", "a", "YOUR NAME", "author name for copyright attribution") + rootCmd.PersistentFlags().StringVarP(&userLicense, "license", "l", "", "name of license for the project") + rootCmd.PersistentFlags().Bool("viper", true, "use Viper for configuration") + viper.BindPFlag("author", rootCmd.PersistentFlags().Lookup("author")) + viper.BindPFlag("useViper", rootCmd.PersistentFlags().Lookup("viper")) + viper.SetDefault("author", "NAME HERE ") + viper.SetDefault("license", "apache") + + rootCmd.AddCommand(addCmd) + rootCmd.AddCommand(initCmd) +} + +func initConfig() { + if cfgFile != "" { + // Use config file from the flag. + viper.SetConfigFile(cfgFile) + } else { + // Find home directory. + home, err := homedir.Dir() + if err != nil { + er(err) + } + + // Search config in home directory with name ".cobra" (without extension). + viper.AddConfigPath(home) + viper.SetConfigName(".cobra") + } + + viper.AutomaticEnv() + + if err := viper.ReadInConfig(); err == nil { + fmt.Println("Using config file:", viper.ConfigFileUsed()) + } +} diff --git a/vendor/github.com/spf13/cobra/cobra/cmd/testdata/LICENSE.golden b/vendor/github.com/spf13/cobra/cobra/cmd/testdata/LICENSE.golden new file mode 100644 index 0000000..d645695 --- /dev/null +++ b/vendor/github.com/spf13/cobra/cobra/cmd/testdata/LICENSE.golden @@ -0,0 +1,202 @@ + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/vendor/github.com/spf13/cobra/cobra/main.go b/vendor/github.com/spf13/cobra/cobra/main.go new file mode 100644 index 0000000..c3a9d9c --- /dev/null +++ b/vendor/github.com/spf13/cobra/cobra/main.go @@ -0,0 +1,20 @@ +// Copyright © 2015 Steve Francia . +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package main + +import "github.com/spf13/cobra/cobra/cmd" + +func main() { + cmd.Execute() +} diff --git a/vendor/github.com/spf13/cobra/command.go b/vendor/github.com/spf13/cobra/command.go new file mode 100644 index 0000000..34d1bf3 --- /dev/null +++ b/vendor/github.com/spf13/cobra/command.go @@ -0,0 +1,1517 @@ +// Copyright © 2013 Steve Francia . +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// Package cobra is a commander providing a simple interface to create powerful modern CLI interfaces. +// In addition to providing an interface, Cobra simultaneously provides a controller to organize your application code. +package cobra + +import ( + "bytes" + "fmt" + "io" + "os" + "path/filepath" + "sort" + "strings" + + flag "github.com/spf13/pflag" +) + +// FParseErrWhitelist configures Flag parse errors to be ignored +type FParseErrWhitelist flag.ParseErrorsWhitelist + +// Command is just that, a command for your application. +// E.g. 'go run ...' - 'run' is the command. Cobra requires +// you to define the usage and description as part of your command +// definition to ensure usability. +type Command struct { + // Use is the one-line usage message. + Use string + + // Aliases is an array of aliases that can be used instead of the first word in Use. + Aliases []string + + // SuggestFor is an array of command names for which this command will be suggested - + // similar to aliases but only suggests. + SuggestFor []string + + // Short is the short description shown in the 'help' output. + Short string + + // Long is the long message shown in the 'help ' output. + Long string + + // Example is examples of how to use the command. + Example string + + // ValidArgs is list of all valid non-flag arguments that are accepted in bash completions + ValidArgs []string + + // Expected arguments + Args PositionalArgs + + // ArgAliases is List of aliases for ValidArgs. + // These are not suggested to the user in the bash completion, + // but accepted if entered manually. + ArgAliases []string + + // BashCompletionFunction is custom functions used by the bash autocompletion generator. + BashCompletionFunction string + + // Deprecated defines, if this command is deprecated and should print this string when used. + Deprecated string + + // Hidden defines, if this command is hidden and should NOT show up in the list of available commands. + Hidden bool + + // Annotations are key/value pairs that can be used by applications to identify or + // group commands. + Annotations map[string]string + + // Version defines the version for this command. If this value is non-empty and the command does not + // define a "version" flag, a "version" boolean flag will be added to the command and, if specified, + // will print content of the "Version" variable. + Version string + + // The *Run functions are executed in the following order: + // * PersistentPreRun() + // * PreRun() + // * Run() + // * PostRun() + // * PersistentPostRun() + // All functions get the same args, the arguments after the command name. + // + // PersistentPreRun: children of this command will inherit and execute. + PersistentPreRun func(cmd *Command, args []string) + // PersistentPreRunE: PersistentPreRun but returns an error. + PersistentPreRunE func(cmd *Command, args []string) error + // PreRun: children of this command will not inherit. + PreRun func(cmd *Command, args []string) + // PreRunE: PreRun but returns an error. + PreRunE func(cmd *Command, args []string) error + // Run: Typically the actual work function. Most commands will only implement this. + Run func(cmd *Command, args []string) + // RunE: Run but returns an error. + RunE func(cmd *Command, args []string) error + // PostRun: run after the Run command. + PostRun func(cmd *Command, args []string) + // PostRunE: PostRun but returns an error. + PostRunE func(cmd *Command, args []string) error + // PersistentPostRun: children of this command will inherit and execute after PostRun. + PersistentPostRun func(cmd *Command, args []string) + // PersistentPostRunE: PersistentPostRun but returns an error. + PersistentPostRunE func(cmd *Command, args []string) error + + // SilenceErrors is an option to quiet errors down stream. + SilenceErrors bool + + // SilenceUsage is an option to silence usage when an error occurs. + SilenceUsage bool + + // DisableFlagParsing disables the flag parsing. + // If this is true all flags will be passed to the command as arguments. + DisableFlagParsing bool + + // DisableAutoGenTag defines, if gen tag ("Auto generated by spf13/cobra...") + // will be printed by generating docs for this command. + DisableAutoGenTag bool + + // DisableFlagsInUseLine will disable the addition of [flags] to the usage + // line of a command when printing help or generating docs + DisableFlagsInUseLine bool + + // DisableSuggestions disables the suggestions based on Levenshtein distance + // that go along with 'unknown command' messages. + DisableSuggestions bool + // SuggestionsMinimumDistance defines minimum levenshtein distance to display suggestions. + // Must be > 0. + SuggestionsMinimumDistance int + + // TraverseChildren parses flags on all parents before executing child command. + TraverseChildren bool + + //FParseErrWhitelist flag parse errors to be ignored + FParseErrWhitelist FParseErrWhitelist + + // commands is the list of commands supported by this program. + commands []*Command + // parent is a parent command for this command. + parent *Command + // Max lengths of commands' string lengths for use in padding. + commandsMaxUseLen int + commandsMaxCommandPathLen int + commandsMaxNameLen int + // commandsAreSorted defines, if command slice are sorted or not. + commandsAreSorted bool + // commandCalledAs is the name or alias value used to call this command. + commandCalledAs struct { + name string + called bool + } + + // args is actual args parsed from flags. + args []string + // flagErrorBuf contains all error messages from pflag. + flagErrorBuf *bytes.Buffer + // flags is full set of flags. + flags *flag.FlagSet + // pflags contains persistent flags. + pflags *flag.FlagSet + // lflags contains local flags. + lflags *flag.FlagSet + // iflags contains inherited flags. + iflags *flag.FlagSet + // parentsPflags is all persistent flags of cmd's parents. + parentsPflags *flag.FlagSet + // globNormFunc is the global normalization function + // that we can use on every pflag set and children commands + globNormFunc func(f *flag.FlagSet, name string) flag.NormalizedName + + // output is an output writer defined by user. + output io.Writer + // usageFunc is usage func defined by user. + usageFunc func(*Command) error + // usageTemplate is usage template defined by user. + usageTemplate string + // flagErrorFunc is func defined by user and it's called when the parsing of + // flags returns an error. + flagErrorFunc func(*Command, error) error + // helpTemplate is help template defined by user. + helpTemplate string + // helpFunc is help func defined by user. + helpFunc func(*Command, []string) + // helpCommand is command with usage 'help'. If it's not defined by user, + // cobra uses default help command. + helpCommand *Command + // versionTemplate is the version template defined by user. + versionTemplate string +} + +// SetArgs sets arguments for the command. It is set to os.Args[1:] by default, if desired, can be overridden +// particularly useful when testing. +func (c *Command) SetArgs(a []string) { + c.args = a +} + +// SetOutput sets the destination for usage and error messages. +// If output is nil, os.Stderr is used. +func (c *Command) SetOutput(output io.Writer) { + c.output = output +} + +// SetUsageFunc sets usage function. Usage can be defined by application. +func (c *Command) SetUsageFunc(f func(*Command) error) { + c.usageFunc = f +} + +// SetUsageTemplate sets usage template. Can be defined by Application. +func (c *Command) SetUsageTemplate(s string) { + c.usageTemplate = s +} + +// SetFlagErrorFunc sets a function to generate an error when flag parsing +// fails. +func (c *Command) SetFlagErrorFunc(f func(*Command, error) error) { + c.flagErrorFunc = f +} + +// SetHelpFunc sets help function. Can be defined by Application. +func (c *Command) SetHelpFunc(f func(*Command, []string)) { + c.helpFunc = f +} + +// SetHelpCommand sets help command. +func (c *Command) SetHelpCommand(cmd *Command) { + c.helpCommand = cmd +} + +// SetHelpTemplate sets help template to be used. Application can use it to set custom template. +func (c *Command) SetHelpTemplate(s string) { + c.helpTemplate = s +} + +// SetVersionTemplate sets version template to be used. Application can use it to set custom template. +func (c *Command) SetVersionTemplate(s string) { + c.versionTemplate = s +} + +// SetGlobalNormalizationFunc sets a normalization function to all flag sets and also to child commands. +// The user should not have a cyclic dependency on commands. +func (c *Command) SetGlobalNormalizationFunc(n func(f *flag.FlagSet, name string) flag.NormalizedName) { + c.Flags().SetNormalizeFunc(n) + c.PersistentFlags().SetNormalizeFunc(n) + c.globNormFunc = n + + for _, command := range c.commands { + command.SetGlobalNormalizationFunc(n) + } +} + +// OutOrStdout returns output to stdout. +func (c *Command) OutOrStdout() io.Writer { + return c.getOut(os.Stdout) +} + +// OutOrStderr returns output to stderr +func (c *Command) OutOrStderr() io.Writer { + return c.getOut(os.Stderr) +} + +func (c *Command) getOut(def io.Writer) io.Writer { + if c.output != nil { + return c.output + } + if c.HasParent() { + return c.parent.getOut(def) + } + return def +} + +// UsageFunc returns either the function set by SetUsageFunc for this command +// or a parent, or it returns a default usage function. +func (c *Command) UsageFunc() (f func(*Command) error) { + if c.usageFunc != nil { + return c.usageFunc + } + if c.HasParent() { + return c.Parent().UsageFunc() + } + return func(c *Command) error { + c.mergePersistentFlags() + err := tmpl(c.OutOrStderr(), c.UsageTemplate(), c) + if err != nil { + c.Println(err) + } + return err + } +} + +// Usage puts out the usage for the command. +// Used when a user provides invalid input. +// Can be defined by user by overriding UsageFunc. +func (c *Command) Usage() error { + return c.UsageFunc()(c) +} + +// HelpFunc returns either the function set by SetHelpFunc for this command +// or a parent, or it returns a function with default help behavior. +func (c *Command) HelpFunc() func(*Command, []string) { + if c.helpFunc != nil { + return c.helpFunc + } + if c.HasParent() { + return c.Parent().HelpFunc() + } + return func(c *Command, a []string) { + c.mergePersistentFlags() + err := tmpl(c.OutOrStdout(), c.HelpTemplate(), c) + if err != nil { + c.Println(err) + } + } +} + +// Help puts out the help for the command. +// Used when a user calls help [command]. +// Can be defined by user by overriding HelpFunc. +func (c *Command) Help() error { + c.HelpFunc()(c, []string{}) + return nil +} + +// UsageString return usage string. +func (c *Command) UsageString() string { + tmpOutput := c.output + bb := new(bytes.Buffer) + c.SetOutput(bb) + c.Usage() + c.output = tmpOutput + return bb.String() +} + +// FlagErrorFunc returns either the function set by SetFlagErrorFunc for this +// command or a parent, or it returns a function which returns the original +// error. +func (c *Command) FlagErrorFunc() (f func(*Command, error) error) { + if c.flagErrorFunc != nil { + return c.flagErrorFunc + } + + if c.HasParent() { + return c.parent.FlagErrorFunc() + } + return func(c *Command, err error) error { + return err + } +} + +var minUsagePadding = 25 + +// UsagePadding return padding for the usage. +func (c *Command) UsagePadding() int { + if c.parent == nil || minUsagePadding > c.parent.commandsMaxUseLen { + return minUsagePadding + } + return c.parent.commandsMaxUseLen +} + +var minCommandPathPadding = 11 + +// CommandPathPadding return padding for the command path. +func (c *Command) CommandPathPadding() int { + if c.parent == nil || minCommandPathPadding > c.parent.commandsMaxCommandPathLen { + return minCommandPathPadding + } + return c.parent.commandsMaxCommandPathLen +} + +var minNamePadding = 11 + +// NamePadding returns padding for the name. +func (c *Command) NamePadding() int { + if c.parent == nil || minNamePadding > c.parent.commandsMaxNameLen { + return minNamePadding + } + return c.parent.commandsMaxNameLen +} + +// UsageTemplate returns usage template for the command. +func (c *Command) UsageTemplate() string { + if c.usageTemplate != "" { + return c.usageTemplate + } + + if c.HasParent() { + return c.parent.UsageTemplate() + } + return `Usage:{{if .Runnable}} + {{.UseLine}}{{end}}{{if .HasAvailableSubCommands}} + {{.CommandPath}} [command]{{end}}{{if gt (len .Aliases) 0}} + +Aliases: + {{.NameAndAliases}}{{end}}{{if .HasExample}} + +Examples: +{{.Example}}{{end}}{{if .HasAvailableSubCommands}} + +Available Commands:{{range .Commands}}{{if (or .IsAvailableCommand (eq .Name "help"))}} + {{rpad .Name .NamePadding }} {{.Short}}{{end}}{{end}}{{end}}{{if .HasAvailableLocalFlags}} + +Flags: +{{.LocalFlags.FlagUsages | trimTrailingWhitespaces}}{{end}}{{if .HasAvailableInheritedFlags}} + +Global Flags: +{{.InheritedFlags.FlagUsages | trimTrailingWhitespaces}}{{end}}{{if .HasHelpSubCommands}} + +Additional help topics:{{range .Commands}}{{if .IsAdditionalHelpTopicCommand}} + {{rpad .CommandPath .CommandPathPadding}} {{.Short}}{{end}}{{end}}{{end}}{{if .HasAvailableSubCommands}} + +Use "{{.CommandPath}} [command] --help" for more information about a command.{{end}} +` +} + +// HelpTemplate return help template for the command. +func (c *Command) HelpTemplate() string { + if c.helpTemplate != "" { + return c.helpTemplate + } + + if c.HasParent() { + return c.parent.HelpTemplate() + } + return `{{with (or .Long .Short)}}{{. | trimTrailingWhitespaces}} + +{{end}}{{if or .Runnable .HasSubCommands}}{{.UsageString}}{{end}}` +} + +// VersionTemplate return version template for the command. +func (c *Command) VersionTemplate() string { + if c.versionTemplate != "" { + return c.versionTemplate + } + + if c.HasParent() { + return c.parent.VersionTemplate() + } + return `{{with .Name}}{{printf "%s " .}}{{end}}{{printf "version %s" .Version}} +` +} + +func hasNoOptDefVal(name string, fs *flag.FlagSet) bool { + flag := fs.Lookup(name) + if flag == nil { + return false + } + return flag.NoOptDefVal != "" +} + +func shortHasNoOptDefVal(name string, fs *flag.FlagSet) bool { + if len(name) == 0 { + return false + } + + flag := fs.ShorthandLookup(name[:1]) + if flag == nil { + return false + } + return flag.NoOptDefVal != "" +} + +func stripFlags(args []string, c *Command) []string { + if len(args) == 0 { + return args + } + c.mergePersistentFlags() + + commands := []string{} + flags := c.Flags() + +Loop: + for len(args) > 0 { + s := args[0] + args = args[1:] + switch { + case s == "--": + // "--" terminates the flags + break Loop + case strings.HasPrefix(s, "--") && !strings.Contains(s, "=") && !hasNoOptDefVal(s[2:], flags): + // If '--flag arg' then + // delete arg from args. + fallthrough // (do the same as below) + case strings.HasPrefix(s, "-") && !strings.Contains(s, "=") && len(s) == 2 && !shortHasNoOptDefVal(s[1:], flags): + // If '-f arg' then + // delete 'arg' from args or break the loop if len(args) <= 1. + if len(args) <= 1 { + break Loop + } else { + args = args[1:] + continue + } + case s != "" && !strings.HasPrefix(s, "-"): + commands = append(commands, s) + } + } + + return commands +} + +// argsMinusFirstX removes only the first x from args. Otherwise, commands that look like +// openshift admin policy add-role-to-user admin my-user, lose the admin argument (arg[4]). +func argsMinusFirstX(args []string, x string) []string { + for i, y := range args { + if x == y { + ret := []string{} + ret = append(ret, args[:i]...) + ret = append(ret, args[i+1:]...) + return ret + } + } + return args +} + +func isFlagArg(arg string) bool { + return ((len(arg) >= 3 && arg[1] == '-') || + (len(arg) >= 2 && arg[0] == '-' && arg[1] != '-')) +} + +// Find the target command given the args and command tree +// Meant to be run on the highest node. Only searches down. +func (c *Command) Find(args []string) (*Command, []string, error) { + var innerfind func(*Command, []string) (*Command, []string) + + innerfind = func(c *Command, innerArgs []string) (*Command, []string) { + argsWOflags := stripFlags(innerArgs, c) + if len(argsWOflags) == 0 { + return c, innerArgs + } + nextSubCmd := argsWOflags[0] + + cmd := c.findNext(nextSubCmd) + if cmd != nil { + return innerfind(cmd, argsMinusFirstX(innerArgs, nextSubCmd)) + } + return c, innerArgs + } + + commandFound, a := innerfind(c, args) + if commandFound.Args == nil { + return commandFound, a, legacyArgs(commandFound, stripFlags(a, commandFound)) + } + return commandFound, a, nil +} + +func (c *Command) findSuggestions(arg string) string { + if c.DisableSuggestions { + return "" + } + if c.SuggestionsMinimumDistance <= 0 { + c.SuggestionsMinimumDistance = 2 + } + suggestionsString := "" + if suggestions := c.SuggestionsFor(arg); len(suggestions) > 0 { + suggestionsString += "\n\nDid you mean this?\n" + for _, s := range suggestions { + suggestionsString += fmt.Sprintf("\t%v\n", s) + } + } + return suggestionsString +} + +func (c *Command) findNext(next string) *Command { + matches := make([]*Command, 0) + for _, cmd := range c.commands { + if cmd.Name() == next || cmd.HasAlias(next) { + cmd.commandCalledAs.name = next + return cmd + } + if EnablePrefixMatching && cmd.hasNameOrAliasPrefix(next) { + matches = append(matches, cmd) + } + } + + if len(matches) == 1 { + return matches[0] + } + + return nil +} + +// Traverse the command tree to find the command, and parse args for +// each parent. +func (c *Command) Traverse(args []string) (*Command, []string, error) { + flags := []string{} + inFlag := false + + for i, arg := range args { + switch { + // A long flag with a space separated value + case strings.HasPrefix(arg, "--") && !strings.Contains(arg, "="): + // TODO: this isn't quite right, we should really check ahead for 'true' or 'false' + inFlag = !hasNoOptDefVal(arg[2:], c.Flags()) + flags = append(flags, arg) + continue + // A short flag with a space separated value + case strings.HasPrefix(arg, "-") && !strings.Contains(arg, "=") && len(arg) == 2 && !shortHasNoOptDefVal(arg[1:], c.Flags()): + inFlag = true + flags = append(flags, arg) + continue + // The value for a flag + case inFlag: + inFlag = false + flags = append(flags, arg) + continue + // A flag without a value, or with an `=` separated value + case isFlagArg(arg): + flags = append(flags, arg) + continue + } + + cmd := c.findNext(arg) + if cmd == nil { + return c, args, nil + } + + if err := c.ParseFlags(flags); err != nil { + return nil, args, err + } + return cmd.Traverse(args[i+1:]) + } + return c, args, nil +} + +// SuggestionsFor provides suggestions for the typedName. +func (c *Command) SuggestionsFor(typedName string) []string { + suggestions := []string{} + for _, cmd := range c.commands { + if cmd.IsAvailableCommand() { + levenshteinDistance := ld(typedName, cmd.Name(), true) + suggestByLevenshtein := levenshteinDistance <= c.SuggestionsMinimumDistance + suggestByPrefix := strings.HasPrefix(strings.ToLower(cmd.Name()), strings.ToLower(typedName)) + if suggestByLevenshtein || suggestByPrefix { + suggestions = append(suggestions, cmd.Name()) + } + for _, explicitSuggestion := range cmd.SuggestFor { + if strings.EqualFold(typedName, explicitSuggestion) { + suggestions = append(suggestions, cmd.Name()) + } + } + } + } + return suggestions +} + +// VisitParents visits all parents of the command and invokes fn on each parent. +func (c *Command) VisitParents(fn func(*Command)) { + if c.HasParent() { + fn(c.Parent()) + c.Parent().VisitParents(fn) + } +} + +// Root finds root command. +func (c *Command) Root() *Command { + if c.HasParent() { + return c.Parent().Root() + } + return c +} + +// ArgsLenAtDash will return the length of c.Flags().Args at the moment +// when a -- was found during args parsing. +func (c *Command) ArgsLenAtDash() int { + return c.Flags().ArgsLenAtDash() +} + +func (c *Command) execute(a []string) (err error) { + if c == nil { + return fmt.Errorf("Called Execute() on a nil Command") + } + + if len(c.Deprecated) > 0 { + c.Printf("Command %q is deprecated, %s\n", c.Name(), c.Deprecated) + } + + // initialize help and version flag at the last point possible to allow for user + // overriding + c.InitDefaultHelpFlag() + c.InitDefaultVersionFlag() + + err = c.ParseFlags(a) + if err != nil { + return c.FlagErrorFunc()(c, err) + } + + // If help is called, regardless of other flags, return we want help. + // Also say we need help if the command isn't runnable. + helpVal, err := c.Flags().GetBool("help") + if err != nil { + // should be impossible to get here as we always declare a help + // flag in InitDefaultHelpFlag() + c.Println("\"help\" flag declared as non-bool. Please correct your code") + return err + } + + if helpVal { + return flag.ErrHelp + } + + // for back-compat, only add version flag behavior if version is defined + if c.Version != "" { + versionVal, err := c.Flags().GetBool("version") + if err != nil { + c.Println("\"version\" flag declared as non-bool. Please correct your code") + return err + } + if versionVal { + err := tmpl(c.OutOrStdout(), c.VersionTemplate(), c) + if err != nil { + c.Println(err) + } + return err + } + } + + if !c.Runnable() { + return flag.ErrHelp + } + + c.preRun() + + argWoFlags := c.Flags().Args() + if c.DisableFlagParsing { + argWoFlags = a + } + + if err := c.ValidateArgs(argWoFlags); err != nil { + return err + } + + for p := c; p != nil; p = p.Parent() { + if p.PersistentPreRunE != nil { + if err := p.PersistentPreRunE(c, argWoFlags); err != nil { + return err + } + break + } else if p.PersistentPreRun != nil { + p.PersistentPreRun(c, argWoFlags) + break + } + } + if c.PreRunE != nil { + if err := c.PreRunE(c, argWoFlags); err != nil { + return err + } + } else if c.PreRun != nil { + c.PreRun(c, argWoFlags) + } + + if err := c.validateRequiredFlags(); err != nil { + return err + } + if c.RunE != nil { + if err := c.RunE(c, argWoFlags); err != nil { + return err + } + } else { + c.Run(c, argWoFlags) + } + if c.PostRunE != nil { + if err := c.PostRunE(c, argWoFlags); err != nil { + return err + } + } else if c.PostRun != nil { + c.PostRun(c, argWoFlags) + } + for p := c; p != nil; p = p.Parent() { + if p.PersistentPostRunE != nil { + if err := p.PersistentPostRunE(c, argWoFlags); err != nil { + return err + } + break + } else if p.PersistentPostRun != nil { + p.PersistentPostRun(c, argWoFlags) + break + } + } + + return nil +} + +func (c *Command) preRun() { + for _, x := range initializers { + x() + } +} + +// Execute uses the args (os.Args[1:] by default) +// and run through the command tree finding appropriate matches +// for commands and then corresponding flags. +func (c *Command) Execute() error { + _, err := c.ExecuteC() + return err +} + +// ExecuteC executes the command. +func (c *Command) ExecuteC() (cmd *Command, err error) { + // Regardless of what command execute is called on, run on Root only + if c.HasParent() { + return c.Root().ExecuteC() + } + + // windows hook + if preExecHookFn != nil { + preExecHookFn(c) + } + + // initialize help as the last point possible to allow for user + // overriding + c.InitDefaultHelpCmd() + + var args []string + + // Workaround FAIL with "go test -v" or "cobra.test -test.v", see #155 + if c.args == nil && filepath.Base(os.Args[0]) != "cobra.test" { + args = os.Args[1:] + } else { + args = c.args + } + + var flags []string + if c.TraverseChildren { + cmd, flags, err = c.Traverse(args) + } else { + cmd, flags, err = c.Find(args) + } + if err != nil { + // If found parse to a subcommand and then failed, talk about the subcommand + if cmd != nil { + c = cmd + } + if !c.SilenceErrors { + c.Println("Error:", err.Error()) + c.Printf("Run '%v --help' for usage.\n", c.CommandPath()) + } + return c, err + } + + cmd.commandCalledAs.called = true + if cmd.commandCalledAs.name == "" { + cmd.commandCalledAs.name = cmd.Name() + } + + err = cmd.execute(flags) + if err != nil { + // Always show help if requested, even if SilenceErrors is in + // effect + if err == flag.ErrHelp { + cmd.HelpFunc()(cmd, args) + return cmd, nil + } + + // If root command has SilentErrors flagged, + // all subcommands should respect it + if !cmd.SilenceErrors && !c.SilenceErrors { + c.Println("Error:", err.Error()) + } + + // If root command has SilentUsage flagged, + // all subcommands should respect it + if !cmd.SilenceUsage && !c.SilenceUsage { + c.Println(cmd.UsageString()) + } + } + return cmd, err +} + +func (c *Command) ValidateArgs(args []string) error { + if c.Args == nil { + return nil + } + return c.Args(c, args) +} + +func (c *Command) validateRequiredFlags() error { + flags := c.Flags() + missingFlagNames := []string{} + flags.VisitAll(func(pflag *flag.Flag) { + requiredAnnotation, found := pflag.Annotations[BashCompOneRequiredFlag] + if !found { + return + } + if (requiredAnnotation[0] == "true") && !pflag.Changed { + missingFlagNames = append(missingFlagNames, pflag.Name) + } + }) + + if len(missingFlagNames) > 0 { + return fmt.Errorf(`required flag(s) "%s" not set`, strings.Join(missingFlagNames, `", "`)) + } + return nil +} + +// InitDefaultHelpFlag adds default help flag to c. +// It is called automatically by executing the c or by calling help and usage. +// If c already has help flag, it will do nothing. +func (c *Command) InitDefaultHelpFlag() { + c.mergePersistentFlags() + if c.Flags().Lookup("help") == nil { + usage := "help for " + if c.Name() == "" { + usage += "this command" + } else { + usage += c.Name() + } + c.Flags().BoolP("help", "h", false, usage) + } +} + +// InitDefaultVersionFlag adds default version flag to c. +// It is called automatically by executing the c. +// If c already has a version flag, it will do nothing. +// If c.Version is empty, it will do nothing. +func (c *Command) InitDefaultVersionFlag() { + if c.Version == "" { + return + } + + c.mergePersistentFlags() + if c.Flags().Lookup("version") == nil { + usage := "version for " + if c.Name() == "" { + usage += "this command" + } else { + usage += c.Name() + } + c.Flags().Bool("version", false, usage) + } +} + +// InitDefaultHelpCmd adds default help command to c. +// It is called automatically by executing the c or by calling help and usage. +// If c already has help command or c has no subcommands, it will do nothing. +func (c *Command) InitDefaultHelpCmd() { + if !c.HasSubCommands() { + return + } + + if c.helpCommand == nil { + c.helpCommand = &Command{ + Use: "help [command]", + Short: "Help about any command", + Long: `Help provides help for any command in the application. +Simply type ` + c.Name() + ` help [path to command] for full details.`, + + Run: func(c *Command, args []string) { + cmd, _, e := c.Root().Find(args) + if cmd == nil || e != nil { + c.Printf("Unknown help topic %#q\n", args) + c.Root().Usage() + } else { + cmd.InitDefaultHelpFlag() // make possible 'help' flag to be shown + cmd.Help() + } + }, + } + } + c.RemoveCommand(c.helpCommand) + c.AddCommand(c.helpCommand) +} + +// ResetCommands delete parent, subcommand and help command from c. +func (c *Command) ResetCommands() { + c.parent = nil + c.commands = nil + c.helpCommand = nil + c.parentsPflags = nil +} + +// Sorts commands by their names. +type commandSorterByName []*Command + +func (c commandSorterByName) Len() int { return len(c) } +func (c commandSorterByName) Swap(i, j int) { c[i], c[j] = c[j], c[i] } +func (c commandSorterByName) Less(i, j int) bool { return c[i].Name() < c[j].Name() } + +// Commands returns a sorted slice of child commands. +func (c *Command) Commands() []*Command { + // do not sort commands if it already sorted or sorting was disabled + if EnableCommandSorting && !c.commandsAreSorted { + sort.Sort(commandSorterByName(c.commands)) + c.commandsAreSorted = true + } + return c.commands +} + +// AddCommand adds one or more commands to this parent command. +func (c *Command) AddCommand(cmds ...*Command) { + for i, x := range cmds { + if cmds[i] == c { + panic("Command can't be a child of itself") + } + cmds[i].parent = c + // update max lengths + usageLen := len(x.Use) + if usageLen > c.commandsMaxUseLen { + c.commandsMaxUseLen = usageLen + } + commandPathLen := len(x.CommandPath()) + if commandPathLen > c.commandsMaxCommandPathLen { + c.commandsMaxCommandPathLen = commandPathLen + } + nameLen := len(x.Name()) + if nameLen > c.commandsMaxNameLen { + c.commandsMaxNameLen = nameLen + } + // If global normalization function exists, update all children + if c.globNormFunc != nil { + x.SetGlobalNormalizationFunc(c.globNormFunc) + } + c.commands = append(c.commands, x) + c.commandsAreSorted = false + } +} + +// RemoveCommand removes one or more commands from a parent command. +func (c *Command) RemoveCommand(cmds ...*Command) { + commands := []*Command{} +main: + for _, command := range c.commands { + for _, cmd := range cmds { + if command == cmd { + command.parent = nil + continue main + } + } + commands = append(commands, command) + } + c.commands = commands + // recompute all lengths + c.commandsMaxUseLen = 0 + c.commandsMaxCommandPathLen = 0 + c.commandsMaxNameLen = 0 + for _, command := range c.commands { + usageLen := len(command.Use) + if usageLen > c.commandsMaxUseLen { + c.commandsMaxUseLen = usageLen + } + commandPathLen := len(command.CommandPath()) + if commandPathLen > c.commandsMaxCommandPathLen { + c.commandsMaxCommandPathLen = commandPathLen + } + nameLen := len(command.Name()) + if nameLen > c.commandsMaxNameLen { + c.commandsMaxNameLen = nameLen + } + } +} + +// Print is a convenience method to Print to the defined output, fallback to Stderr if not set. +func (c *Command) Print(i ...interface{}) { + fmt.Fprint(c.OutOrStderr(), i...) +} + +// Println is a convenience method to Println to the defined output, fallback to Stderr if not set. +func (c *Command) Println(i ...interface{}) { + c.Print(fmt.Sprintln(i...)) +} + +// Printf is a convenience method to Printf to the defined output, fallback to Stderr if not set. +func (c *Command) Printf(format string, i ...interface{}) { + c.Print(fmt.Sprintf(format, i...)) +} + +// CommandPath returns the full path to this command. +func (c *Command) CommandPath() string { + if c.HasParent() { + return c.Parent().CommandPath() + " " + c.Name() + } + return c.Name() +} + +// UseLine puts out the full usage for a given command (including parents). +func (c *Command) UseLine() string { + var useline string + if c.HasParent() { + useline = c.parent.CommandPath() + " " + c.Use + } else { + useline = c.Use + } + if c.DisableFlagsInUseLine { + return useline + } + if c.HasAvailableFlags() && !strings.Contains(useline, "[flags]") { + useline += " [flags]" + } + return useline +} + +// DebugFlags used to determine which flags have been assigned to which commands +// and which persist. +func (c *Command) DebugFlags() { + c.Println("DebugFlags called on", c.Name()) + var debugflags func(*Command) + + debugflags = func(x *Command) { + if x.HasFlags() || x.HasPersistentFlags() { + c.Println(x.Name()) + } + if x.HasFlags() { + x.flags.VisitAll(func(f *flag.Flag) { + if x.HasPersistentFlags() && x.persistentFlag(f.Name) != nil { + c.Println(" -"+f.Shorthand+",", "--"+f.Name, "["+f.DefValue+"]", "", f.Value, " [LP]") + } else { + c.Println(" -"+f.Shorthand+",", "--"+f.Name, "["+f.DefValue+"]", "", f.Value, " [L]") + } + }) + } + if x.HasPersistentFlags() { + x.pflags.VisitAll(func(f *flag.Flag) { + if x.HasFlags() { + if x.flags.Lookup(f.Name) == nil { + c.Println(" -"+f.Shorthand+",", "--"+f.Name, "["+f.DefValue+"]", "", f.Value, " [P]") + } + } else { + c.Println(" -"+f.Shorthand+",", "--"+f.Name, "["+f.DefValue+"]", "", f.Value, " [P]") + } + }) + } + c.Println(x.flagErrorBuf) + if x.HasSubCommands() { + for _, y := range x.commands { + debugflags(y) + } + } + } + + debugflags(c) +} + +// Name returns the command's name: the first word in the use line. +func (c *Command) Name() string { + name := c.Use + i := strings.Index(name, " ") + if i >= 0 { + name = name[:i] + } + return name +} + +// HasAlias determines if a given string is an alias of the command. +func (c *Command) HasAlias(s string) bool { + for _, a := range c.Aliases { + if a == s { + return true + } + } + return false +} + +// CalledAs returns the command name or alias that was used to invoke +// this command or an empty string if the command has not been called. +func (c *Command) CalledAs() string { + if c.commandCalledAs.called { + return c.commandCalledAs.name + } + return "" +} + +// hasNameOrAliasPrefix returns true if the Name or any of aliases start +// with prefix +func (c *Command) hasNameOrAliasPrefix(prefix string) bool { + if strings.HasPrefix(c.Name(), prefix) { + c.commandCalledAs.name = c.Name() + return true + } + for _, alias := range c.Aliases { + if strings.HasPrefix(alias, prefix) { + c.commandCalledAs.name = alias + return true + } + } + return false +} + +// NameAndAliases returns a list of the command name and all aliases +func (c *Command) NameAndAliases() string { + return strings.Join(append([]string{c.Name()}, c.Aliases...), ", ") +} + +// HasExample determines if the command has example. +func (c *Command) HasExample() bool { + return len(c.Example) > 0 +} + +// Runnable determines if the command is itself runnable. +func (c *Command) Runnable() bool { + return c.Run != nil || c.RunE != nil +} + +// HasSubCommands determines if the command has children commands. +func (c *Command) HasSubCommands() bool { + return len(c.commands) > 0 +} + +// IsAvailableCommand determines if a command is available as a non-help command +// (this includes all non deprecated/hidden commands). +func (c *Command) IsAvailableCommand() bool { + if len(c.Deprecated) != 0 || c.Hidden { + return false + } + + if c.HasParent() && c.Parent().helpCommand == c { + return false + } + + if c.Runnable() || c.HasAvailableSubCommands() { + return true + } + + return false +} + +// IsAdditionalHelpTopicCommand determines if a command is an additional +// help topic command; additional help topic command is determined by the +// fact that it is NOT runnable/hidden/deprecated, and has no sub commands that +// are runnable/hidden/deprecated. +// Concrete example: https://github.com/spf13/cobra/issues/393#issuecomment-282741924. +func (c *Command) IsAdditionalHelpTopicCommand() bool { + // if a command is runnable, deprecated, or hidden it is not a 'help' command + if c.Runnable() || len(c.Deprecated) != 0 || c.Hidden { + return false + } + + // if any non-help sub commands are found, the command is not a 'help' command + for _, sub := range c.commands { + if !sub.IsAdditionalHelpTopicCommand() { + return false + } + } + + // the command either has no sub commands, or no non-help sub commands + return true +} + +// HasHelpSubCommands determines if a command has any available 'help' sub commands +// that need to be shown in the usage/help default template under 'additional help +// topics'. +func (c *Command) HasHelpSubCommands() bool { + // return true on the first found available 'help' sub command + for _, sub := range c.commands { + if sub.IsAdditionalHelpTopicCommand() { + return true + } + } + + // the command either has no sub commands, or no available 'help' sub commands + return false +} + +// HasAvailableSubCommands determines if a command has available sub commands that +// need to be shown in the usage/help default template under 'available commands'. +func (c *Command) HasAvailableSubCommands() bool { + // return true on the first found available (non deprecated/help/hidden) + // sub command + for _, sub := range c.commands { + if sub.IsAvailableCommand() { + return true + } + } + + // the command either has no sub commands, or no available (non deprecated/help/hidden) + // sub commands + return false +} + +// HasParent determines if the command is a child command. +func (c *Command) HasParent() bool { + return c.parent != nil +} + +// GlobalNormalizationFunc returns the global normalization function or nil if it doesn't exist. +func (c *Command) GlobalNormalizationFunc() func(f *flag.FlagSet, name string) flag.NormalizedName { + return c.globNormFunc +} + +// Flags returns the complete FlagSet that applies +// to this command (local and persistent declared here and by all parents). +func (c *Command) Flags() *flag.FlagSet { + if c.flags == nil { + c.flags = flag.NewFlagSet(c.Name(), flag.ContinueOnError) + if c.flagErrorBuf == nil { + c.flagErrorBuf = new(bytes.Buffer) + } + c.flags.SetOutput(c.flagErrorBuf) + } + + return c.flags +} + +// LocalNonPersistentFlags are flags specific to this command which will NOT persist to subcommands. +func (c *Command) LocalNonPersistentFlags() *flag.FlagSet { + persistentFlags := c.PersistentFlags() + + out := flag.NewFlagSet(c.Name(), flag.ContinueOnError) + c.LocalFlags().VisitAll(func(f *flag.Flag) { + if persistentFlags.Lookup(f.Name) == nil { + out.AddFlag(f) + } + }) + return out +} + +// LocalFlags returns the local FlagSet specifically set in the current command. +func (c *Command) LocalFlags() *flag.FlagSet { + c.mergePersistentFlags() + + if c.lflags == nil { + c.lflags = flag.NewFlagSet(c.Name(), flag.ContinueOnError) + if c.flagErrorBuf == nil { + c.flagErrorBuf = new(bytes.Buffer) + } + c.lflags.SetOutput(c.flagErrorBuf) + } + c.lflags.SortFlags = c.Flags().SortFlags + if c.globNormFunc != nil { + c.lflags.SetNormalizeFunc(c.globNormFunc) + } + + addToLocal := func(f *flag.Flag) { + if c.lflags.Lookup(f.Name) == nil && c.parentsPflags.Lookup(f.Name) == nil { + c.lflags.AddFlag(f) + } + } + c.Flags().VisitAll(addToLocal) + c.PersistentFlags().VisitAll(addToLocal) + return c.lflags +} + +// InheritedFlags returns all flags which were inherited from parents commands. +func (c *Command) InheritedFlags() *flag.FlagSet { + c.mergePersistentFlags() + + if c.iflags == nil { + c.iflags = flag.NewFlagSet(c.Name(), flag.ContinueOnError) + if c.flagErrorBuf == nil { + c.flagErrorBuf = new(bytes.Buffer) + } + c.iflags.SetOutput(c.flagErrorBuf) + } + + local := c.LocalFlags() + if c.globNormFunc != nil { + c.iflags.SetNormalizeFunc(c.globNormFunc) + } + + c.parentsPflags.VisitAll(func(f *flag.Flag) { + if c.iflags.Lookup(f.Name) == nil && local.Lookup(f.Name) == nil { + c.iflags.AddFlag(f) + } + }) + return c.iflags +} + +// NonInheritedFlags returns all flags which were not inherited from parent commands. +func (c *Command) NonInheritedFlags() *flag.FlagSet { + return c.LocalFlags() +} + +// PersistentFlags returns the persistent FlagSet specifically set in the current command. +func (c *Command) PersistentFlags() *flag.FlagSet { + if c.pflags == nil { + c.pflags = flag.NewFlagSet(c.Name(), flag.ContinueOnError) + if c.flagErrorBuf == nil { + c.flagErrorBuf = new(bytes.Buffer) + } + c.pflags.SetOutput(c.flagErrorBuf) + } + return c.pflags +} + +// ResetFlags deletes all flags from command. +func (c *Command) ResetFlags() { + c.flagErrorBuf = new(bytes.Buffer) + c.flagErrorBuf.Reset() + c.flags = flag.NewFlagSet(c.Name(), flag.ContinueOnError) + c.flags.SetOutput(c.flagErrorBuf) + c.pflags = flag.NewFlagSet(c.Name(), flag.ContinueOnError) + c.pflags.SetOutput(c.flagErrorBuf) + + c.lflags = nil + c.iflags = nil + c.parentsPflags = nil +} + +// HasFlags checks if the command contains any flags (local plus persistent from the entire structure). +func (c *Command) HasFlags() bool { + return c.Flags().HasFlags() +} + +// HasPersistentFlags checks if the command contains persistent flags. +func (c *Command) HasPersistentFlags() bool { + return c.PersistentFlags().HasFlags() +} + +// HasLocalFlags checks if the command has flags specifically declared locally. +func (c *Command) HasLocalFlags() bool { + return c.LocalFlags().HasFlags() +} + +// HasInheritedFlags checks if the command has flags inherited from its parent command. +func (c *Command) HasInheritedFlags() bool { + return c.InheritedFlags().HasFlags() +} + +// HasAvailableFlags checks if the command contains any flags (local plus persistent from the entire +// structure) which are not hidden or deprecated. +func (c *Command) HasAvailableFlags() bool { + return c.Flags().HasAvailableFlags() +} + +// HasAvailablePersistentFlags checks if the command contains persistent flags which are not hidden or deprecated. +func (c *Command) HasAvailablePersistentFlags() bool { + return c.PersistentFlags().HasAvailableFlags() +} + +// HasAvailableLocalFlags checks if the command has flags specifically declared locally which are not hidden +// or deprecated. +func (c *Command) HasAvailableLocalFlags() bool { + return c.LocalFlags().HasAvailableFlags() +} + +// HasAvailableInheritedFlags checks if the command has flags inherited from its parent command which are +// not hidden or deprecated. +func (c *Command) HasAvailableInheritedFlags() bool { + return c.InheritedFlags().HasAvailableFlags() +} + +// Flag climbs up the command tree looking for matching flag. +func (c *Command) Flag(name string) (flag *flag.Flag) { + flag = c.Flags().Lookup(name) + + if flag == nil { + flag = c.persistentFlag(name) + } + + return +} + +// Recursively find matching persistent flag. +func (c *Command) persistentFlag(name string) (flag *flag.Flag) { + if c.HasPersistentFlags() { + flag = c.PersistentFlags().Lookup(name) + } + + if flag == nil { + c.updateParentsPflags() + flag = c.parentsPflags.Lookup(name) + } + return +} + +// ParseFlags parses persistent flag tree and local flags. +func (c *Command) ParseFlags(args []string) error { + if c.DisableFlagParsing { + return nil + } + + if c.flagErrorBuf == nil { + c.flagErrorBuf = new(bytes.Buffer) + } + beforeErrorBufLen := c.flagErrorBuf.Len() + c.mergePersistentFlags() + + //do it here after merging all flags and just before parse + c.Flags().ParseErrorsWhitelist = flag.ParseErrorsWhitelist(c.FParseErrWhitelist) + + err := c.Flags().Parse(args) + // Print warnings if they occurred (e.g. deprecated flag messages). + if c.flagErrorBuf.Len()-beforeErrorBufLen > 0 && err == nil { + c.Print(c.flagErrorBuf.String()) + } + + return err +} + +// Parent returns a commands parent command. +func (c *Command) Parent() *Command { + return c.parent +} + +// mergePersistentFlags merges c.PersistentFlags() to c.Flags() +// and adds missing persistent flags of all parents. +func (c *Command) mergePersistentFlags() { + c.updateParentsPflags() + c.Flags().AddFlagSet(c.PersistentFlags()) + c.Flags().AddFlagSet(c.parentsPflags) +} + +// updateParentsPflags updates c.parentsPflags by adding +// new persistent flags of all parents. +// If c.parentsPflags == nil, it makes new. +func (c *Command) updateParentsPflags() { + if c.parentsPflags == nil { + c.parentsPflags = flag.NewFlagSet(c.Name(), flag.ContinueOnError) + c.parentsPflags.SetOutput(c.flagErrorBuf) + c.parentsPflags.SortFlags = false + } + + if c.globNormFunc != nil { + c.parentsPflags.SetNormalizeFunc(c.globNormFunc) + } + + c.Root().PersistentFlags().AddFlagSet(flag.CommandLine) + + c.VisitParents(func(parent *Command) { + c.parentsPflags.AddFlagSet(parent.PersistentFlags()) + }) +} diff --git a/vendor/github.com/spf13/cobra/command_notwin.go b/vendor/github.com/spf13/cobra/command_notwin.go new file mode 100644 index 0000000..6159c1c --- /dev/null +++ b/vendor/github.com/spf13/cobra/command_notwin.go @@ -0,0 +1,5 @@ +// +build !windows + +package cobra + +var preExecHookFn func(*Command) diff --git a/vendor/github.com/spf13/cobra/command_win.go b/vendor/github.com/spf13/cobra/command_win.go new file mode 100644 index 0000000..edec728 --- /dev/null +++ b/vendor/github.com/spf13/cobra/command_win.go @@ -0,0 +1,20 @@ +// +build windows + +package cobra + +import ( + "os" + "time" + + "github.com/inconshreveable/mousetrap" +) + +var preExecHookFn = preExecHook + +func preExecHook(c *Command) { + if MousetrapHelpText != "" && mousetrap.StartedByExplorer() { + c.Print(MousetrapHelpText) + time.Sleep(5 * time.Second) + os.Exit(1) + } +} diff --git a/vendor/github.com/spf13/cobra/doc/man_docs.go b/vendor/github.com/spf13/cobra/doc/man_docs.go new file mode 100644 index 0000000..baa4811 --- /dev/null +++ b/vendor/github.com/spf13/cobra/doc/man_docs.go @@ -0,0 +1,236 @@ +// Copyright 2015 Red Hat Inc. All rights reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package doc + +import ( + "bytes" + "fmt" + "io" + "os" + "path/filepath" + "sort" + "strings" + "time" + + "github.com/cpuguy83/go-md2man/md2man" + "github.com/spf13/cobra" + "github.com/spf13/pflag" +) + +// GenManTree will generate a man page for this command and all descendants +// in the directory given. The header may be nil. This function may not work +// correctly if your command names have `-` in them. If you have `cmd` with two +// subcmds, `sub` and `sub-third`, and `sub` has a subcommand called `third` +// it is undefined which help output will be in the file `cmd-sub-third.1`. +func GenManTree(cmd *cobra.Command, header *GenManHeader, dir string) error { + return GenManTreeFromOpts(cmd, GenManTreeOptions{ + Header: header, + Path: dir, + CommandSeparator: "-", + }) +} + +// GenManTreeFromOpts generates a man page for the command and all descendants. +// The pages are written to the opts.Path directory. +func GenManTreeFromOpts(cmd *cobra.Command, opts GenManTreeOptions) error { + header := opts.Header + if header == nil { + header = &GenManHeader{} + } + for _, c := range cmd.Commands() { + if !c.IsAvailableCommand() || c.IsAdditionalHelpTopicCommand() { + continue + } + if err := GenManTreeFromOpts(c, opts); err != nil { + return err + } + } + section := "1" + if header.Section != "" { + section = header.Section + } + + separator := "_" + if opts.CommandSeparator != "" { + separator = opts.CommandSeparator + } + basename := strings.Replace(cmd.CommandPath(), " ", separator, -1) + filename := filepath.Join(opts.Path, basename+"."+section) + f, err := os.Create(filename) + if err != nil { + return err + } + defer f.Close() + + headerCopy := *header + return GenMan(cmd, &headerCopy, f) +} + +// GenManTreeOptions is the options for generating the man pages. +// Used only in GenManTreeFromOpts. +type GenManTreeOptions struct { + Header *GenManHeader + Path string + CommandSeparator string +} + +// GenManHeader is a lot like the .TH header at the start of man pages. These +// include the title, section, date, source, and manual. We will use the +// current time if Date if unset and will use "Auto generated by spf13/cobra" +// if the Source is unset. +type GenManHeader struct { + Title string + Section string + Date *time.Time + date string + Source string + Manual string +} + +// GenMan will generate a man page for the given command and write it to +// w. The header argument may be nil, however obviously w may not. +func GenMan(cmd *cobra.Command, header *GenManHeader, w io.Writer) error { + if header == nil { + header = &GenManHeader{} + } + fillHeader(header, cmd.CommandPath()) + + b := genMan(cmd, header) + _, err := w.Write(md2man.Render(b)) + return err +} + +func fillHeader(header *GenManHeader, name string) { + if header.Title == "" { + header.Title = strings.ToUpper(strings.Replace(name, " ", "\\-", -1)) + } + if header.Section == "" { + header.Section = "1" + } + if header.Date == nil { + now := time.Now() + header.Date = &now + } + header.date = (*header.Date).Format("Jan 2006") + if header.Source == "" { + header.Source = "Auto generated by spf13/cobra" + } +} + +func manPreamble(buf *bytes.Buffer, header *GenManHeader, cmd *cobra.Command, dashedName string) { + description := cmd.Long + if len(description) == 0 { + description = cmd.Short + } + + buf.WriteString(fmt.Sprintf(`%% %s(%s)%s +%% %s +%% %s +# NAME +`, header.Title, header.Section, header.date, header.Source, header.Manual)) + buf.WriteString(fmt.Sprintf("%s \\- %s\n\n", dashedName, cmd.Short)) + buf.WriteString("# SYNOPSIS\n") + buf.WriteString(fmt.Sprintf("**%s**\n\n", cmd.UseLine())) + buf.WriteString("# DESCRIPTION\n") + buf.WriteString(description + "\n\n") +} + +func manPrintFlags(buf *bytes.Buffer, flags *pflag.FlagSet) { + flags.VisitAll(func(flag *pflag.Flag) { + if len(flag.Deprecated) > 0 || flag.Hidden { + return + } + format := "" + if len(flag.Shorthand) > 0 && len(flag.ShorthandDeprecated) == 0 { + format = fmt.Sprintf("**-%s**, **--%s**", flag.Shorthand, flag.Name) + } else { + format = fmt.Sprintf("**--%s**", flag.Name) + } + if len(flag.NoOptDefVal) > 0 { + format += "[" + } + if flag.Value.Type() == "string" { + // put quotes on the value + format += "=%q" + } else { + format += "=%s" + } + if len(flag.NoOptDefVal) > 0 { + format += "]" + } + format += "\n\t%s\n\n" + buf.WriteString(fmt.Sprintf(format, flag.DefValue, flag.Usage)) + }) +} + +func manPrintOptions(buf *bytes.Buffer, command *cobra.Command) { + flags := command.NonInheritedFlags() + if flags.HasAvailableFlags() { + buf.WriteString("# OPTIONS\n") + manPrintFlags(buf, flags) + buf.WriteString("\n") + } + flags = command.InheritedFlags() + if flags.HasAvailableFlags() { + buf.WriteString("# OPTIONS INHERITED FROM PARENT COMMANDS\n") + manPrintFlags(buf, flags) + buf.WriteString("\n") + } +} + +func genMan(cmd *cobra.Command, header *GenManHeader) []byte { + cmd.InitDefaultHelpCmd() + cmd.InitDefaultHelpFlag() + + // something like `rootcmd-subcmd1-subcmd2` + dashCommandName := strings.Replace(cmd.CommandPath(), " ", "-", -1) + + buf := new(bytes.Buffer) + + manPreamble(buf, header, cmd, dashCommandName) + manPrintOptions(buf, cmd) + if len(cmd.Example) > 0 { + buf.WriteString("# EXAMPLE\n") + buf.WriteString(fmt.Sprintf("```\n%s\n```\n", cmd.Example)) + } + if hasSeeAlso(cmd) { + buf.WriteString("# SEE ALSO\n") + seealsos := make([]string, 0) + if cmd.HasParent() { + parentPath := cmd.Parent().CommandPath() + dashParentPath := strings.Replace(parentPath, " ", "-", -1) + seealso := fmt.Sprintf("**%s(%s)**", dashParentPath, header.Section) + seealsos = append(seealsos, seealso) + cmd.VisitParents(func(c *cobra.Command) { + if c.DisableAutoGenTag { + cmd.DisableAutoGenTag = c.DisableAutoGenTag + } + }) + } + children := cmd.Commands() + sort.Sort(byName(children)) + for _, c := range children { + if !c.IsAvailableCommand() || c.IsAdditionalHelpTopicCommand() { + continue + } + seealso := fmt.Sprintf("**%s-%s(%s)**", dashCommandName, c.Name(), header.Section) + seealsos = append(seealsos, seealso) + } + buf.WriteString(strings.Join(seealsos, ", ") + "\n") + } + if !cmd.DisableAutoGenTag { + buf.WriteString(fmt.Sprintf("# HISTORY\n%s Auto generated by spf13/cobra\n", header.Date.Format("2-Jan-2006"))) + } + return buf.Bytes() +} diff --git a/vendor/github.com/spf13/cobra/doc/md_docs.go b/vendor/github.com/spf13/cobra/doc/md_docs.go new file mode 100644 index 0000000..d76f6d5 --- /dev/null +++ b/vendor/github.com/spf13/cobra/doc/md_docs.go @@ -0,0 +1,159 @@ +//Copyright 2015 Red Hat Inc. All rights reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package doc + +import ( + "bytes" + "fmt" + "io" + "os" + "path/filepath" + "sort" + "strings" + "time" + + "github.com/spf13/cobra" +) + +func printOptions(buf *bytes.Buffer, cmd *cobra.Command, name string) error { + flags := cmd.NonInheritedFlags() + flags.SetOutput(buf) + if flags.HasAvailableFlags() { + buf.WriteString("### Options\n\n```\n") + flags.PrintDefaults() + buf.WriteString("```\n\n") + } + + parentFlags := cmd.InheritedFlags() + parentFlags.SetOutput(buf) + if parentFlags.HasAvailableFlags() { + buf.WriteString("### Options inherited from parent commands\n\n```\n") + parentFlags.PrintDefaults() + buf.WriteString("```\n\n") + } + return nil +} + +// GenMarkdown creates markdown output. +func GenMarkdown(cmd *cobra.Command, w io.Writer) error { + return GenMarkdownCustom(cmd, w, func(s string) string { return s }) +} + +// GenMarkdownCustom creates custom markdown output. +func GenMarkdownCustom(cmd *cobra.Command, w io.Writer, linkHandler func(string) string) error { + cmd.InitDefaultHelpCmd() + cmd.InitDefaultHelpFlag() + + buf := new(bytes.Buffer) + name := cmd.CommandPath() + + short := cmd.Short + long := cmd.Long + if len(long) == 0 { + long = short + } + + buf.WriteString("## " + name + "\n\n") + buf.WriteString(short + "\n\n") + buf.WriteString("### Synopsis\n\n") + buf.WriteString(long + "\n\n") + + if cmd.Runnable() { + buf.WriteString(fmt.Sprintf("```\n%s\n```\n\n", cmd.UseLine())) + } + + if len(cmd.Example) > 0 { + buf.WriteString("### Examples\n\n") + buf.WriteString(fmt.Sprintf("```\n%s\n```\n\n", cmd.Example)) + } + + if err := printOptions(buf, cmd, name); err != nil { + return err + } + if hasSeeAlso(cmd) { + buf.WriteString("### SEE ALSO\n\n") + if cmd.HasParent() { + parent := cmd.Parent() + pname := parent.CommandPath() + link := pname + ".md" + link = strings.Replace(link, " ", "_", -1) + buf.WriteString(fmt.Sprintf("* [%s](%s)\t - %s\n", pname, linkHandler(link), parent.Short)) + cmd.VisitParents(func(c *cobra.Command) { + if c.DisableAutoGenTag { + cmd.DisableAutoGenTag = c.DisableAutoGenTag + } + }) + } + + children := cmd.Commands() + sort.Sort(byName(children)) + + for _, child := range children { + if !child.IsAvailableCommand() || child.IsAdditionalHelpTopicCommand() { + continue + } + cname := name + " " + child.Name() + link := cname + ".md" + link = strings.Replace(link, " ", "_", -1) + buf.WriteString(fmt.Sprintf("* [%s](%s)\t - %s\n", cname, linkHandler(link), child.Short)) + } + buf.WriteString("\n") + } + if !cmd.DisableAutoGenTag { + buf.WriteString("###### Auto generated by spf13/cobra on " + time.Now().Format("2-Jan-2006") + "\n") + } + _, err := buf.WriteTo(w) + return err +} + +// GenMarkdownTree will generate a markdown page for this command and all +// descendants in the directory given. The header may be nil. +// This function may not work correctly if your command names have `-` in them. +// If you have `cmd` with two subcmds, `sub` and `sub-third`, +// and `sub` has a subcommand called `third`, it is undefined which +// help output will be in the file `cmd-sub-third.1`. +func GenMarkdownTree(cmd *cobra.Command, dir string) error { + identity := func(s string) string { return s } + emptyStr := func(s string) string { return "" } + return GenMarkdownTreeCustom(cmd, dir, emptyStr, identity) +} + +// GenMarkdownTreeCustom is the the same as GenMarkdownTree, but +// with custom filePrepender and linkHandler. +func GenMarkdownTreeCustom(cmd *cobra.Command, dir string, filePrepender, linkHandler func(string) string) error { + for _, c := range cmd.Commands() { + if !c.IsAvailableCommand() || c.IsAdditionalHelpTopicCommand() { + continue + } + if err := GenMarkdownTreeCustom(c, dir, filePrepender, linkHandler); err != nil { + return err + } + } + + basename := strings.Replace(cmd.CommandPath(), " ", "_", -1) + ".md" + filename := filepath.Join(dir, basename) + f, err := os.Create(filename) + if err != nil { + return err + } + defer f.Close() + + if _, err := io.WriteString(f, filePrepender(filename)); err != nil { + return err + } + if err := GenMarkdownCustom(cmd, f, linkHandler); err != nil { + return err + } + return nil +} diff --git a/vendor/github.com/spf13/cobra/doc/rest_docs.go b/vendor/github.com/spf13/cobra/doc/rest_docs.go new file mode 100644 index 0000000..051d8dc --- /dev/null +++ b/vendor/github.com/spf13/cobra/doc/rest_docs.go @@ -0,0 +1,185 @@ +//Copyright 2015 Red Hat Inc. All rights reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package doc + +import ( + "bytes" + "fmt" + "io" + "os" + "path/filepath" + "sort" + "strings" + "time" + + "github.com/spf13/cobra" +) + +func printOptionsReST(buf *bytes.Buffer, cmd *cobra.Command, name string) error { + flags := cmd.NonInheritedFlags() + flags.SetOutput(buf) + if flags.HasAvailableFlags() { + buf.WriteString("Options\n") + buf.WriteString("~~~~~~~\n\n::\n\n") + flags.PrintDefaults() + buf.WriteString("\n") + } + + parentFlags := cmd.InheritedFlags() + parentFlags.SetOutput(buf) + if parentFlags.HasAvailableFlags() { + buf.WriteString("Options inherited from parent commands\n") + buf.WriteString("~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~\n\n::\n\n") + parentFlags.PrintDefaults() + buf.WriteString("\n") + } + return nil +} + +// linkHandler for default ReST hyperlink markup +func defaultLinkHandler(name, ref string) string { + return fmt.Sprintf("`%s <%s.rst>`_", name, ref) +} + +// GenReST creates reStructured Text output. +func GenReST(cmd *cobra.Command, w io.Writer) error { + return GenReSTCustom(cmd, w, defaultLinkHandler) +} + +// GenReSTCustom creates custom reStructured Text output. +func GenReSTCustom(cmd *cobra.Command, w io.Writer, linkHandler func(string, string) string) error { + cmd.InitDefaultHelpCmd() + cmd.InitDefaultHelpFlag() + + buf := new(bytes.Buffer) + name := cmd.CommandPath() + + short := cmd.Short + long := cmd.Long + if len(long) == 0 { + long = short + } + ref := strings.Replace(name, " ", "_", -1) + + buf.WriteString(".. _" + ref + ":\n\n") + buf.WriteString(name + "\n") + buf.WriteString(strings.Repeat("-", len(name)) + "\n\n") + buf.WriteString(short + "\n\n") + buf.WriteString("Synopsis\n") + buf.WriteString("~~~~~~~~\n\n") + buf.WriteString("\n" + long + "\n\n") + + if cmd.Runnable() { + buf.WriteString(fmt.Sprintf("::\n\n %s\n\n", cmd.UseLine())) + } + + if len(cmd.Example) > 0 { + buf.WriteString("Examples\n") + buf.WriteString("~~~~~~~~\n\n") + buf.WriteString(fmt.Sprintf("::\n\n%s\n\n", indentString(cmd.Example, " "))) + } + + if err := printOptionsReST(buf, cmd, name); err != nil { + return err + } + if hasSeeAlso(cmd) { + buf.WriteString("SEE ALSO\n") + buf.WriteString("~~~~~~~~\n\n") + if cmd.HasParent() { + parent := cmd.Parent() + pname := parent.CommandPath() + ref = strings.Replace(pname, " ", "_", -1) + buf.WriteString(fmt.Sprintf("* %s \t - %s\n", linkHandler(pname, ref), parent.Short)) + cmd.VisitParents(func(c *cobra.Command) { + if c.DisableAutoGenTag { + cmd.DisableAutoGenTag = c.DisableAutoGenTag + } + }) + } + + children := cmd.Commands() + sort.Sort(byName(children)) + + for _, child := range children { + if !child.IsAvailableCommand() || child.IsAdditionalHelpTopicCommand() { + continue + } + cname := name + " " + child.Name() + ref = strings.Replace(cname, " ", "_", -1) + buf.WriteString(fmt.Sprintf("* %s \t - %s\n", linkHandler(cname, ref), child.Short)) + } + buf.WriteString("\n") + } + if !cmd.DisableAutoGenTag { + buf.WriteString("*Auto generated by spf13/cobra on " + time.Now().Format("2-Jan-2006") + "*\n") + } + _, err := buf.WriteTo(w) + return err +} + +// GenReSTTree will generate a ReST page for this command and all +// descendants in the directory given. +// This function may not work correctly if your command names have `-` in them. +// If you have `cmd` with two subcmds, `sub` and `sub-third`, +// and `sub` has a subcommand called `third`, it is undefined which +// help output will be in the file `cmd-sub-third.1`. +func GenReSTTree(cmd *cobra.Command, dir string) error { + emptyStr := func(s string) string { return "" } + return GenReSTTreeCustom(cmd, dir, emptyStr, defaultLinkHandler) +} + +// GenReSTTreeCustom is the the same as GenReSTTree, but +// with custom filePrepender and linkHandler. +func GenReSTTreeCustom(cmd *cobra.Command, dir string, filePrepender func(string) string, linkHandler func(string, string) string) error { + for _, c := range cmd.Commands() { + if !c.IsAvailableCommand() || c.IsAdditionalHelpTopicCommand() { + continue + } + if err := GenReSTTreeCustom(c, dir, filePrepender, linkHandler); err != nil { + return err + } + } + + basename := strings.Replace(cmd.CommandPath(), " ", "_", -1) + ".rst" + filename := filepath.Join(dir, basename) + f, err := os.Create(filename) + if err != nil { + return err + } + defer f.Close() + + if _, err := io.WriteString(f, filePrepender(filename)); err != nil { + return err + } + if err := GenReSTCustom(cmd, f, linkHandler); err != nil { + return err + } + return nil +} + +// adapted from: https://github.com/kr/text/blob/main/indent.go +func indentString(s, p string) string { + var res []byte + b := []byte(s) + prefix := []byte(p) + bol := true + for _, c := range b { + if bol && c != '\n' { + res = append(res, prefix...) + } + res = append(res, c) + bol = c == '\n' + } + return string(res) +} diff --git a/vendor/github.com/spf13/cobra/doc/util.go b/vendor/github.com/spf13/cobra/doc/util.go new file mode 100644 index 0000000..8d3dbec --- /dev/null +++ b/vendor/github.com/spf13/cobra/doc/util.go @@ -0,0 +1,51 @@ +// Copyright 2015 Red Hat Inc. All rights reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package doc + +import ( + "strings" + + "github.com/spf13/cobra" +) + +// Test to see if we have a reason to print See Also information in docs +// Basically this is a test for a parent commend or a subcommand which is +// both not deprecated and not the autogenerated help command. +func hasSeeAlso(cmd *cobra.Command) bool { + if cmd.HasParent() { + return true + } + for _, c := range cmd.Commands() { + if !c.IsAvailableCommand() || c.IsAdditionalHelpTopicCommand() { + continue + } + return true + } + return false +} + +// Temporary workaround for yaml lib generating incorrect yaml with long strings +// that do not contain \n. +func forceMultiLine(s string) string { + if len(s) > 60 && !strings.Contains(s, "\n") { + s = s + "\n" + } + return s +} + +type byName []*cobra.Command + +func (s byName) Len() int { return len(s) } +func (s byName) Swap(i, j int) { s[i], s[j] = s[j], s[i] } +func (s byName) Less(i, j int) bool { return s[i].Name() < s[j].Name() } diff --git a/vendor/github.com/spf13/cobra/doc/yaml_docs.go b/vendor/github.com/spf13/cobra/doc/yaml_docs.go new file mode 100644 index 0000000..ea00af0 --- /dev/null +++ b/vendor/github.com/spf13/cobra/doc/yaml_docs.go @@ -0,0 +1,169 @@ +// Copyright 2016 French Ben. All rights reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package doc + +import ( + "fmt" + "io" + "os" + "path/filepath" + "sort" + "strings" + + "github.com/spf13/cobra" + "github.com/spf13/pflag" + "gopkg.in/yaml.v2" +) + +type cmdOption struct { + Name string + Shorthand string `yaml:",omitempty"` + DefaultValue string `yaml:"default_value,omitempty"` + Usage string `yaml:",omitempty"` +} + +type cmdDoc struct { + Name string + Synopsis string `yaml:",omitempty"` + Description string `yaml:",omitempty"` + Options []cmdOption `yaml:",omitempty"` + InheritedOptions []cmdOption `yaml:"inherited_options,omitempty"` + Example string `yaml:",omitempty"` + SeeAlso []string `yaml:"see_also,omitempty"` +} + +// GenYamlTree creates yaml structured ref files for this command and all descendants +// in the directory given. This function may not work +// correctly if your command names have `-` in them. If you have `cmd` with two +// subcmds, `sub` and `sub-third`, and `sub` has a subcommand called `third` +// it is undefined which help output will be in the file `cmd-sub-third.1`. +func GenYamlTree(cmd *cobra.Command, dir string) error { + identity := func(s string) string { return s } + emptyStr := func(s string) string { return "" } + return GenYamlTreeCustom(cmd, dir, emptyStr, identity) +} + +// GenYamlTreeCustom creates yaml structured ref files. +func GenYamlTreeCustom(cmd *cobra.Command, dir string, filePrepender, linkHandler func(string) string) error { + for _, c := range cmd.Commands() { + if !c.IsAvailableCommand() || c.IsAdditionalHelpTopicCommand() { + continue + } + if err := GenYamlTreeCustom(c, dir, filePrepender, linkHandler); err != nil { + return err + } + } + + basename := strings.Replace(cmd.CommandPath(), " ", "_", -1) + ".yaml" + filename := filepath.Join(dir, basename) + f, err := os.Create(filename) + if err != nil { + return err + } + defer f.Close() + + if _, err := io.WriteString(f, filePrepender(filename)); err != nil { + return err + } + if err := GenYamlCustom(cmd, f, linkHandler); err != nil { + return err + } + return nil +} + +// GenYaml creates yaml output. +func GenYaml(cmd *cobra.Command, w io.Writer) error { + return GenYamlCustom(cmd, w, func(s string) string { return s }) +} + +// GenYamlCustom creates custom yaml output. +func GenYamlCustom(cmd *cobra.Command, w io.Writer, linkHandler func(string) string) error { + cmd.InitDefaultHelpCmd() + cmd.InitDefaultHelpFlag() + + yamlDoc := cmdDoc{} + yamlDoc.Name = cmd.CommandPath() + + yamlDoc.Synopsis = forceMultiLine(cmd.Short) + yamlDoc.Description = forceMultiLine(cmd.Long) + + if len(cmd.Example) > 0 { + yamlDoc.Example = cmd.Example + } + + flags := cmd.NonInheritedFlags() + if flags.HasFlags() { + yamlDoc.Options = genFlagResult(flags) + } + flags = cmd.InheritedFlags() + if flags.HasFlags() { + yamlDoc.InheritedOptions = genFlagResult(flags) + } + + if hasSeeAlso(cmd) { + result := []string{} + if cmd.HasParent() { + parent := cmd.Parent() + result = append(result, parent.CommandPath()+" - "+parent.Short) + } + children := cmd.Commands() + sort.Sort(byName(children)) + for _, child := range children { + if !child.IsAvailableCommand() || child.IsAdditionalHelpTopicCommand() { + continue + } + result = append(result, child.Name()+" - "+child.Short) + } + yamlDoc.SeeAlso = result + } + + final, err := yaml.Marshal(&yamlDoc) + if err != nil { + fmt.Println(err) + os.Exit(1) + } + + if _, err := w.Write(final); err != nil { + return err + } + return nil +} + +func genFlagResult(flags *pflag.FlagSet) []cmdOption { + var result []cmdOption + + flags.VisitAll(func(flag *pflag.Flag) { + // Todo, when we mark a shorthand is deprecated, but specify an empty message. + // The flag.ShorthandDeprecated is empty as the shorthand is deprecated. + // Using len(flag.ShorthandDeprecated) > 0 can't handle this, others are ok. + if !(len(flag.ShorthandDeprecated) > 0) && len(flag.Shorthand) > 0 { + opt := cmdOption{ + flag.Name, + flag.Shorthand, + flag.DefValue, + forceMultiLine(flag.Usage), + } + result = append(result, opt) + } else { + opt := cmdOption{ + Name: flag.Name, + DefaultValue: forceMultiLine(flag.DefValue), + Usage: forceMultiLine(flag.Usage), + } + result = append(result, opt) + } + }) + + return result +} diff --git a/vendor/github.com/spf13/cobra/zsh_completions.go b/vendor/github.com/spf13/cobra/zsh_completions.go new file mode 100644 index 0000000..889c22e --- /dev/null +++ b/vendor/github.com/spf13/cobra/zsh_completions.go @@ -0,0 +1,126 @@ +package cobra + +import ( + "bytes" + "fmt" + "io" + "os" + "strings" +) + +// GenZshCompletionFile generates zsh completion file. +func (c *Command) GenZshCompletionFile(filename string) error { + outFile, err := os.Create(filename) + if err != nil { + return err + } + defer outFile.Close() + + return c.GenZshCompletion(outFile) +} + +// GenZshCompletion generates a zsh completion file and writes to the passed writer. +func (c *Command) GenZshCompletion(w io.Writer) error { + buf := new(bytes.Buffer) + + writeHeader(buf, c) + maxDepth := maxDepth(c) + writeLevelMapping(buf, maxDepth) + writeLevelCases(buf, maxDepth, c) + + _, err := buf.WriteTo(w) + return err +} + +func writeHeader(w io.Writer, cmd *Command) { + fmt.Fprintf(w, "#compdef %s\n\n", cmd.Name()) +} + +func maxDepth(c *Command) int { + if len(c.Commands()) == 0 { + return 0 + } + maxDepthSub := 0 + for _, s := range c.Commands() { + subDepth := maxDepth(s) + if subDepth > maxDepthSub { + maxDepthSub = subDepth + } + } + return 1 + maxDepthSub +} + +func writeLevelMapping(w io.Writer, numLevels int) { + fmt.Fprintln(w, `_arguments \`) + for i := 1; i <= numLevels; i++ { + fmt.Fprintf(w, ` '%d: :->level%d' \`, i, i) + fmt.Fprintln(w) + } + fmt.Fprintf(w, ` '%d: :%s'`, numLevels+1, "_files") + fmt.Fprintln(w) +} + +func writeLevelCases(w io.Writer, maxDepth int, root *Command) { + fmt.Fprintln(w, "case $state in") + defer fmt.Fprintln(w, "esac") + + for i := 1; i <= maxDepth; i++ { + fmt.Fprintf(w, " level%d)\n", i) + writeLevel(w, root, i) + fmt.Fprintln(w, " ;;") + } + fmt.Fprintln(w, " *)") + fmt.Fprintln(w, " _arguments '*: :_files'") + fmt.Fprintln(w, " ;;") +} + +func writeLevel(w io.Writer, root *Command, i int) { + fmt.Fprintf(w, " case $words[%d] in\n", i) + defer fmt.Fprintln(w, " esac") + + commands := filterByLevel(root, i) + byParent := groupByParent(commands) + + for p, c := range byParent { + names := names(c) + fmt.Fprintf(w, " %s)\n", p) + fmt.Fprintf(w, " _arguments '%d: :(%s)'\n", i, strings.Join(names, " ")) + fmt.Fprintln(w, " ;;") + } + fmt.Fprintln(w, " *)") + fmt.Fprintln(w, " _arguments '*: :_files'") + fmt.Fprintln(w, " ;;") + +} + +func filterByLevel(c *Command, l int) []*Command { + cs := make([]*Command, 0) + if l == 0 { + cs = append(cs, c) + return cs + } + for _, s := range c.Commands() { + cs = append(cs, filterByLevel(s, l-1)...) + } + return cs +} + +func groupByParent(commands []*Command) map[string][]*Command { + m := make(map[string][]*Command) + for _, c := range commands { + parent := c.Parent() + if parent == nil { + continue + } + m[parent.Name()] = append(m[parent.Name()], c) + } + return m +} + +func names(commands []*Command) []string { + ns := make([]string, len(commands)) + for i, c := range commands { + ns[i] = c.Name() + } + return ns +} diff --git a/vendor/gonum.org/v1/gonum/.travis/deps.d/linux/01-deps.sh b/vendor/gonum.org/v1/gonum/.travis/deps.d/linux/01-deps.sh new file mode 120000 index 0000000..920836a --- /dev/null +++ b/vendor/gonum.org/v1/gonum/.travis/deps.d/linux/01-deps.sh @@ -0,0 +1 @@ +../../script.d/deps.sh \ No newline at end of file diff --git a/vendor/gonum.org/v1/gonum/.travis/deps.d/osx/nothing.sh b/vendor/gonum.org/v1/gonum/.travis/deps.d/osx/nothing.sh new file mode 120000 index 0000000..18b1728 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/.travis/deps.d/osx/nothing.sh @@ -0,0 +1 @@ +../../script.d/nothing.sh \ No newline at end of file diff --git a/vendor/gonum.org/v1/gonum/.travis/deps.d/windows/nothing.sh b/vendor/gonum.org/v1/gonum/.travis/deps.d/windows/nothing.sh new file mode 120000 index 0000000..18b1728 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/.travis/deps.d/windows/nothing.sh @@ -0,0 +1 @@ +../../script.d/nothing.sh \ No newline at end of file diff --git a/vendor/gonum.org/v1/gonum/.travis/run.d/linux/01-check-copyright.sh b/vendor/gonum.org/v1/gonum/.travis/run.d/linux/01-check-copyright.sh new file mode 120000 index 0000000..31835de --- /dev/null +++ b/vendor/gonum.org/v1/gonum/.travis/run.d/linux/01-check-copyright.sh @@ -0,0 +1 @@ +../../script.d/check-copyright.sh \ No newline at end of file diff --git a/vendor/gonum.org/v1/gonum/.travis/run.d/linux/02-check-imports.sh b/vendor/gonum.org/v1/gonum/.travis/run.d/linux/02-check-imports.sh new file mode 120000 index 0000000..2eb0ad5 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/.travis/run.d/linux/02-check-imports.sh @@ -0,0 +1 @@ +../../script.d/check-imports.sh \ No newline at end of file diff --git a/vendor/gonum.org/v1/gonum/.travis/run.d/linux/03-check-formatting.sh b/vendor/gonum.org/v1/gonum/.travis/run.d/linux/03-check-formatting.sh new file mode 120000 index 0000000..e82d24d --- /dev/null +++ b/vendor/gonum.org/v1/gonum/.travis/run.d/linux/03-check-formatting.sh @@ -0,0 +1 @@ +../../script.d/check-formatting.sh \ No newline at end of file diff --git a/vendor/gonum.org/v1/gonum/.travis/run.d/linux/04-test.sh b/vendor/gonum.org/v1/gonum/.travis/run.d/linux/04-test.sh new file mode 120000 index 0000000..b86e940 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/.travis/run.d/linux/04-test.sh @@ -0,0 +1 @@ +../../script.d/test.sh \ No newline at end of file diff --git a/vendor/gonum.org/v1/gonum/.travis/run.d/linux/05-test-coverage.sh b/vendor/gonum.org/v1/gonum/.travis/run.d/linux/05-test-coverage.sh new file mode 120000 index 0000000..3557f72 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/.travis/run.d/linux/05-test-coverage.sh @@ -0,0 +1 @@ +../../script.d/test-coverage.sh \ No newline at end of file diff --git a/vendor/gonum.org/v1/gonum/.travis/run.d/linux/06-check-generate.sh b/vendor/gonum.org/v1/gonum/.travis/run.d/linux/06-check-generate.sh new file mode 120000 index 0000000..3131460 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/.travis/run.d/linux/06-check-generate.sh @@ -0,0 +1 @@ +../../script.d/check-generate.sh \ No newline at end of file diff --git a/vendor/gonum.org/v1/gonum/.travis/run.d/osx/01-test.sh b/vendor/gonum.org/v1/gonum/.travis/run.d/osx/01-test.sh new file mode 120000 index 0000000..b86e940 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/.travis/run.d/osx/01-test.sh @@ -0,0 +1 @@ +../../script.d/test.sh \ No newline at end of file diff --git a/vendor/gonum.org/v1/gonum/AUTHORS b/vendor/gonum.org/v1/gonum/AUTHORS new file mode 100644 index 0000000..7a60a67 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/AUTHORS @@ -0,0 +1,92 @@ +# This is the official list of gonum authors for copyright purposes. +# This file is distinct from the CONTRIBUTORS files. +# See the latter for an explanation. + +# Names should be added to this file as +# Name or Organization +# The email address is not required for organizations. + +# Please keep the list sorted. + +Alexander Egurnov +Bill Gray +Bill Noon +Brendan Tracey +Brent Pedersen +Chad Kunde +Chih-Wei Chang +Chris Tessum +Christophe Meessen +Clayton Northey +Dan Kortschak +Daniel Fireman +David Samborski +Davor Kapsa +DeepMind Technologies +Delaney Gillilan +Dezmond Goff +Dong-hee Na +Egon Elbre +Ekaterina Efimova +Ethan Burns +Evert Lammerts +Facundo Gaich +Fazlul Shahriar +Francesc Campoy +Google Inc +Gustaf Johansson +Iakov Davydov +Igor Mikushkin +Iskander Sharipov +Jalem Raj Rohit +James Bell +James Bowman +James Holmes <32bitkid@gmail.com> +Janne Snabb +Jeff Juozapaitis +Jeremy Atkinson +Jonas Kahler +Jonas Schulze +Jonathan J Lawlor +Jonathan Reiter +Jonathan Schroeder +Joseph Watson +Josh Wilson +Julien Roland +Kai Trukenmüller +Kent English +Kevin C. Zimmerman +Kirill Motkov +Konstantin Shaposhnikov +Leonid Kneller +Lyron Winderbaum +Martin Diz +Matthieu Di Mercurio +Max Halford +MinJae Kwon +Nick Potts +Olivier Wulveryck +Or Rikon +Pontus Melke +Renée French +Rishi Desai +Robin Eklind +Sam Zaydel +Samuel Kelemen +Saran Ahluwalia +Scott Holden +Sebastien Binet +Shawn Smith +source{d} +Spencer Lyon +Steve McCoy +Taesu Pyo +Takeshi Yoneda +The University of Adelaide +The University of Minnesota +The University of Washington +Thomas Berg +Tobin Harding +Vincent Thiery +Vladimír Chalupecký +Yevgeniy Vahlis diff --git a/vendor/gonum.org/v1/gonum/CONTRIBUTORS b/vendor/gonum.org/v1/gonum/CONTRIBUTORS new file mode 100644 index 0000000..1734ca4 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/CONTRIBUTORS @@ -0,0 +1,94 @@ +# This is the official list of people who can contribute +# (and typically have contributed) code to the gonum +# repository. +# +# The AUTHORS file lists the copyright holders; this file +# lists people. For example, Google employees would be listed here +# but not in AUTHORS, because Google would hold the copyright. +# +# When adding J Random Contributor's name to this file, +# either J's name or J's organization's name should be +# added to the AUTHORS file. +# +# Names should be added to this file like so: +# Name +# +# Please keep the list sorted. + +Alexander Egurnov +Andrew Brampton +Bill Gray +Bill Noon +Brendan Tracey +Brent Pedersen +Chad Kunde +Chih-Wei Chang +Chris Tessum +Christophe Meessen +Clayton Northey +Dan Kortschak +Daniel Fireman +David Samborski +Davor Kapsa +Delaney Gillilan +Dezmond Goff +Dong-hee Na +Egon Elbre +Ekaterina Efimova +Ethan Burns +Evert Lammerts +Facundo Gaich +Fazlul Shahriar +Francesc Campoy +Gustaf Johansson +Iakov Davydov +Igor Mikushkin +Iskander Sharipov +Jalem Raj Rohit +James Bell +James Bowman +James Holmes <32bitkid@gmail.com> +Janne Snabb +Jeff Juozapaitis +Jeremy Atkinson +Jonas Kahler +Jonas Schulze +Jonathan J Lawlor +Jonathan Reiter +Jonathan Schroeder +Joseph Watson +Josh Wilson +Julien Roland +Kai Trukenmüller +Kent English +Kevin C. Zimmerman +Kirill Motkov +Konstantin Shaposhnikov +Leonid Kneller +Lyron Winderbaum +Martin Diz +Matthieu Di Mercurio +Max Halford +MinJae Kwon +Nick Potts +Olivier Wulveryck +Or Rikon +Pontus Melke +Renée French +Rishi Desai +Robin Eklind +Sam Zaydel +Samuel Kelemen +Saran Ahluwalia +Scott Holden +Sebastien Binet +Shawn Smith +Spencer Lyon +Steve McCoy +Taesu Pyo +Takeshi Yoneda +Thomas Berg +Tobin Harding +Vincent Thiery +Vladimír Chalupecký +Yevgeniy Vahlis diff --git a/vendor/gonum.org/v1/gonum/LICENSE b/vendor/gonum.org/v1/gonum/LICENSE new file mode 100644 index 0000000..5f1c3f9 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/LICENSE @@ -0,0 +1,23 @@ +Copyright ©2013 The Gonum Authors. All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are met: + * Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in the + documentation and/or other materials provided with the distribution. + * Neither the name of the gonum project nor the names of its authors and + contributors may be used to endorse or promote products derived from this + software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND +ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE +FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, +OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. \ No newline at end of file diff --git a/vendor/gonum.org/v1/gonum/blas/blas.go b/vendor/gonum.org/v1/gonum/blas/blas.go new file mode 100644 index 0000000..9b933e3 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/blas/blas.go @@ -0,0 +1,283 @@ +// Copyright ©2013 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +//go:generate ./conversions.bash + +package blas + +// Flag constants indicate Givens transformation H matrix state. +type Flag int + +const ( + Identity Flag = -2 // H is the identity matrix; no rotation is needed. + Rescaling Flag = -1 // H specifies rescaling. + OffDiagonal Flag = 0 // Off-diagonal elements of H are non-unit. + Diagonal Flag = 1 // Diagonal elements of H are non-unit. +) + +// SrotmParams contains Givens transformation parameters returned +// by the Float32 Srotm method. +type SrotmParams struct { + Flag + H [4]float32 // Column-major 2 by 2 matrix. +} + +// DrotmParams contains Givens transformation parameters returned +// by the Float64 Drotm method. +type DrotmParams struct { + Flag + H [4]float64 // Column-major 2 by 2 matrix. +} + +// Transpose specifies the transposition operation of a matrix. +type Transpose byte + +const ( + NoTrans Transpose = 'N' + Trans Transpose = 'T' + ConjTrans Transpose = 'C' +) + +// Uplo specifies whether a matrix is upper or lower triangular. +type Uplo byte + +const ( + Upper Uplo = 'U' + Lower Uplo = 'L' + All Uplo = 'A' +) + +// Diag specifies whether a matrix is unit triangular. +type Diag byte + +const ( + NonUnit Diag = 'N' + Unit Diag = 'U' +) + +// Side specifies from which side a multiplication operation is performed. +type Side byte + +const ( + Left Side = 'L' + Right Side = 'R' +) + +// Float32 implements the single precision real BLAS routines. +type Float32 interface { + Float32Level1 + Float32Level2 + Float32Level3 +} + +// Float32Level1 implements the single precision real BLAS Level 1 routines. +type Float32Level1 interface { + Sdsdot(n int, alpha float32, x []float32, incX int, y []float32, incY int) float32 + Dsdot(n int, x []float32, incX int, y []float32, incY int) float64 + Sdot(n int, x []float32, incX int, y []float32, incY int) float32 + Snrm2(n int, x []float32, incX int) float32 + Sasum(n int, x []float32, incX int) float32 + Isamax(n int, x []float32, incX int) int + Sswap(n int, x []float32, incX int, y []float32, incY int) + Scopy(n int, x []float32, incX int, y []float32, incY int) + Saxpy(n int, alpha float32, x []float32, incX int, y []float32, incY int) + Srotg(a, b float32) (c, s, r, z float32) + Srotmg(d1, d2, b1, b2 float32) (p SrotmParams, rd1, rd2, rb1 float32) + Srot(n int, x []float32, incX int, y []float32, incY int, c, s float32) + Srotm(n int, x []float32, incX int, y []float32, incY int, p SrotmParams) + Sscal(n int, alpha float32, x []float32, incX int) +} + +// Float32Level2 implements the single precision real BLAS Level 2 routines. +type Float32Level2 interface { + Sgemv(tA Transpose, m, n int, alpha float32, a []float32, lda int, x []float32, incX int, beta float32, y []float32, incY int) + Sgbmv(tA Transpose, m, n, kL, kU int, alpha float32, a []float32, lda int, x []float32, incX int, beta float32, y []float32, incY int) + Strmv(ul Uplo, tA Transpose, d Diag, n int, a []float32, lda int, x []float32, incX int) + Stbmv(ul Uplo, tA Transpose, d Diag, n, k int, a []float32, lda int, x []float32, incX int) + Stpmv(ul Uplo, tA Transpose, d Diag, n int, ap []float32, x []float32, incX int) + Strsv(ul Uplo, tA Transpose, d Diag, n int, a []float32, lda int, x []float32, incX int) + Stbsv(ul Uplo, tA Transpose, d Diag, n, k int, a []float32, lda int, x []float32, incX int) + Stpsv(ul Uplo, tA Transpose, d Diag, n int, ap []float32, x []float32, incX int) + Ssymv(ul Uplo, n int, alpha float32, a []float32, lda int, x []float32, incX int, beta float32, y []float32, incY int) + Ssbmv(ul Uplo, n, k int, alpha float32, a []float32, lda int, x []float32, incX int, beta float32, y []float32, incY int) + Sspmv(ul Uplo, n int, alpha float32, ap []float32, x []float32, incX int, beta float32, y []float32, incY int) + Sger(m, n int, alpha float32, x []float32, incX int, y []float32, incY int, a []float32, lda int) + Ssyr(ul Uplo, n int, alpha float32, x []float32, incX int, a []float32, lda int) + Sspr(ul Uplo, n int, alpha float32, x []float32, incX int, ap []float32) + Ssyr2(ul Uplo, n int, alpha float32, x []float32, incX int, y []float32, incY int, a []float32, lda int) + Sspr2(ul Uplo, n int, alpha float32, x []float32, incX int, y []float32, incY int, a []float32) +} + +// Float32Level3 implements the single precision real BLAS Level 3 routines. +type Float32Level3 interface { + Sgemm(tA, tB Transpose, m, n, k int, alpha float32, a []float32, lda int, b []float32, ldb int, beta float32, c []float32, ldc int) + Ssymm(s Side, ul Uplo, m, n int, alpha float32, a []float32, lda int, b []float32, ldb int, beta float32, c []float32, ldc int) + Ssyrk(ul Uplo, t Transpose, n, k int, alpha float32, a []float32, lda int, beta float32, c []float32, ldc int) + Ssyr2k(ul Uplo, t Transpose, n, k int, alpha float32, a []float32, lda int, b []float32, ldb int, beta float32, c []float32, ldc int) + Strmm(s Side, ul Uplo, tA Transpose, d Diag, m, n int, alpha float32, a []float32, lda int, b []float32, ldb int) + Strsm(s Side, ul Uplo, tA Transpose, d Diag, m, n int, alpha float32, a []float32, lda int, b []float32, ldb int) +} + +// Float64 implements the single precision real BLAS routines. +type Float64 interface { + Float64Level1 + Float64Level2 + Float64Level3 +} + +// Float64Level1 implements the double precision real BLAS Level 1 routines. +type Float64Level1 interface { + Ddot(n int, x []float64, incX int, y []float64, incY int) float64 + Dnrm2(n int, x []float64, incX int) float64 + Dasum(n int, x []float64, incX int) float64 + Idamax(n int, x []float64, incX int) int + Dswap(n int, x []float64, incX int, y []float64, incY int) + Dcopy(n int, x []float64, incX int, y []float64, incY int) + Daxpy(n int, alpha float64, x []float64, incX int, y []float64, incY int) + Drotg(a, b float64) (c, s, r, z float64) + Drotmg(d1, d2, b1, b2 float64) (p DrotmParams, rd1, rd2, rb1 float64) + Drot(n int, x []float64, incX int, y []float64, incY int, c float64, s float64) + Drotm(n int, x []float64, incX int, y []float64, incY int, p DrotmParams) + Dscal(n int, alpha float64, x []float64, incX int) +} + +// Float64Level2 implements the double precision real BLAS Level 2 routines. +type Float64Level2 interface { + Dgemv(tA Transpose, m, n int, alpha float64, a []float64, lda int, x []float64, incX int, beta float64, y []float64, incY int) + Dgbmv(tA Transpose, m, n, kL, kU int, alpha float64, a []float64, lda int, x []float64, incX int, beta float64, y []float64, incY int) + Dtrmv(ul Uplo, tA Transpose, d Diag, n int, a []float64, lda int, x []float64, incX int) + Dtbmv(ul Uplo, tA Transpose, d Diag, n, k int, a []float64, lda int, x []float64, incX int) + Dtpmv(ul Uplo, tA Transpose, d Diag, n int, ap []float64, x []float64, incX int) + Dtrsv(ul Uplo, tA Transpose, d Diag, n int, a []float64, lda int, x []float64, incX int) + Dtbsv(ul Uplo, tA Transpose, d Diag, n, k int, a []float64, lda int, x []float64, incX int) + Dtpsv(ul Uplo, tA Transpose, d Diag, n int, ap []float64, x []float64, incX int) + Dsymv(ul Uplo, n int, alpha float64, a []float64, lda int, x []float64, incX int, beta float64, y []float64, incY int) + Dsbmv(ul Uplo, n, k int, alpha float64, a []float64, lda int, x []float64, incX int, beta float64, y []float64, incY int) + Dspmv(ul Uplo, n int, alpha float64, ap []float64, x []float64, incX int, beta float64, y []float64, incY int) + Dger(m, n int, alpha float64, x []float64, incX int, y []float64, incY int, a []float64, lda int) + Dsyr(ul Uplo, n int, alpha float64, x []float64, incX int, a []float64, lda int) + Dspr(ul Uplo, n int, alpha float64, x []float64, incX int, ap []float64) + Dsyr2(ul Uplo, n int, alpha float64, x []float64, incX int, y []float64, incY int, a []float64, lda int) + Dspr2(ul Uplo, n int, alpha float64, x []float64, incX int, y []float64, incY int, a []float64) +} + +// Float64Level3 implements the double precision real BLAS Level 3 routines. +type Float64Level3 interface { + Dgemm(tA, tB Transpose, m, n, k int, alpha float64, a []float64, lda int, b []float64, ldb int, beta float64, c []float64, ldc int) + Dsymm(s Side, ul Uplo, m, n int, alpha float64, a []float64, lda int, b []float64, ldb int, beta float64, c []float64, ldc int) + Dsyrk(ul Uplo, t Transpose, n, k int, alpha float64, a []float64, lda int, beta float64, c []float64, ldc int) + Dsyr2k(ul Uplo, t Transpose, n, k int, alpha float64, a []float64, lda int, b []float64, ldb int, beta float64, c []float64, ldc int) + Dtrmm(s Side, ul Uplo, tA Transpose, d Diag, m, n int, alpha float64, a []float64, lda int, b []float64, ldb int) + Dtrsm(s Side, ul Uplo, tA Transpose, d Diag, m, n int, alpha float64, a []float64, lda int, b []float64, ldb int) +} + +// Complex64 implements the single precision complex BLAS routines. +type Complex64 interface { + Complex64Level1 + Complex64Level2 + Complex64Level3 +} + +// Complex64Level1 implements the single precision complex BLAS Level 1 routines. +type Complex64Level1 interface { + Cdotu(n int, x []complex64, incX int, y []complex64, incY int) (dotu complex64) + Cdotc(n int, x []complex64, incX int, y []complex64, incY int) (dotc complex64) + Scnrm2(n int, x []complex64, incX int) float32 + Scasum(n int, x []complex64, incX int) float32 + Icamax(n int, x []complex64, incX int) int + Cswap(n int, x []complex64, incX int, y []complex64, incY int) + Ccopy(n int, x []complex64, incX int, y []complex64, incY int) + Caxpy(n int, alpha complex64, x []complex64, incX int, y []complex64, incY int) + Cscal(n int, alpha complex64, x []complex64, incX int) + Csscal(n int, alpha float32, x []complex64, incX int) +} + +// Complex64Level2 implements the single precision complex BLAS routines Level 2 routines. +type Complex64Level2 interface { + Cgemv(tA Transpose, m, n int, alpha complex64, a []complex64, lda int, x []complex64, incX int, beta complex64, y []complex64, incY int) + Cgbmv(tA Transpose, m, n, kL, kU int, alpha complex64, a []complex64, lda int, x []complex64, incX int, beta complex64, y []complex64, incY int) + Ctrmv(ul Uplo, tA Transpose, d Diag, n int, a []complex64, lda int, x []complex64, incX int) + Ctbmv(ul Uplo, tA Transpose, d Diag, n, k int, a []complex64, lda int, x []complex64, incX int) + Ctpmv(ul Uplo, tA Transpose, d Diag, n int, ap []complex64, x []complex64, incX int) + Ctrsv(ul Uplo, tA Transpose, d Diag, n int, a []complex64, lda int, x []complex64, incX int) + Ctbsv(ul Uplo, tA Transpose, d Diag, n, k int, a []complex64, lda int, x []complex64, incX int) + Ctpsv(ul Uplo, tA Transpose, d Diag, n int, ap []complex64, x []complex64, incX int) + Chemv(ul Uplo, n int, alpha complex64, a []complex64, lda int, x []complex64, incX int, beta complex64, y []complex64, incY int) + Chbmv(ul Uplo, n, k int, alpha complex64, a []complex64, lda int, x []complex64, incX int, beta complex64, y []complex64, incY int) + Chpmv(ul Uplo, n int, alpha complex64, ap []complex64, x []complex64, incX int, beta complex64, y []complex64, incY int) + Cgeru(m, n int, alpha complex64, x []complex64, incX int, y []complex64, incY int, a []complex64, lda int) + Cgerc(m, n int, alpha complex64, x []complex64, incX int, y []complex64, incY int, a []complex64, lda int) + Cher(ul Uplo, n int, alpha float32, x []complex64, incX int, a []complex64, lda int) + Chpr(ul Uplo, n int, alpha float32, x []complex64, incX int, a []complex64) + Cher2(ul Uplo, n int, alpha complex64, x []complex64, incX int, y []complex64, incY int, a []complex64, lda int) + Chpr2(ul Uplo, n int, alpha complex64, x []complex64, incX int, y []complex64, incY int, ap []complex64) +} + +// Complex64Level3 implements the single precision complex BLAS Level 3 routines. +type Complex64Level3 interface { + Cgemm(tA, tB Transpose, m, n, k int, alpha complex64, a []complex64, lda int, b []complex64, ldb int, beta complex64, c []complex64, ldc int) + Csymm(s Side, ul Uplo, m, n int, alpha complex64, a []complex64, lda int, b []complex64, ldb int, beta complex64, c []complex64, ldc int) + Csyrk(ul Uplo, t Transpose, n, k int, alpha complex64, a []complex64, lda int, beta complex64, c []complex64, ldc int) + Csyr2k(ul Uplo, t Transpose, n, k int, alpha complex64, a []complex64, lda int, b []complex64, ldb int, beta complex64, c []complex64, ldc int) + Ctrmm(s Side, ul Uplo, tA Transpose, d Diag, m, n int, alpha complex64, a []complex64, lda int, b []complex64, ldb int) + Ctrsm(s Side, ul Uplo, tA Transpose, d Diag, m, n int, alpha complex64, a []complex64, lda int, b []complex64, ldb int) + Chemm(s Side, ul Uplo, m, n int, alpha complex64, a []complex64, lda int, b []complex64, ldb int, beta complex64, c []complex64, ldc int) + Cherk(ul Uplo, t Transpose, n, k int, alpha float32, a []complex64, lda int, beta float32, c []complex64, ldc int) + Cher2k(ul Uplo, t Transpose, n, k int, alpha complex64, a []complex64, lda int, b []complex64, ldb int, beta float32, c []complex64, ldc int) +} + +// Complex128 implements the double precision complex BLAS routines. +type Complex128 interface { + Complex128Level1 + Complex128Level2 + Complex128Level3 +} + +// Complex128Level1 implements the double precision complex BLAS Level 1 routines. +type Complex128Level1 interface { + Zdotu(n int, x []complex128, incX int, y []complex128, incY int) (dotu complex128) + Zdotc(n int, x []complex128, incX int, y []complex128, incY int) (dotc complex128) + Dznrm2(n int, x []complex128, incX int) float64 + Dzasum(n int, x []complex128, incX int) float64 + Izamax(n int, x []complex128, incX int) int + Zswap(n int, x []complex128, incX int, y []complex128, incY int) + Zcopy(n int, x []complex128, incX int, y []complex128, incY int) + Zaxpy(n int, alpha complex128, x []complex128, incX int, y []complex128, incY int) + Zscal(n int, alpha complex128, x []complex128, incX int) + Zdscal(n int, alpha float64, x []complex128, incX int) +} + +// Complex128Level2 implements the double precision complex BLAS Level 2 routines. +type Complex128Level2 interface { + Zgemv(tA Transpose, m, n int, alpha complex128, a []complex128, lda int, x []complex128, incX int, beta complex128, y []complex128, incY int) + Zgbmv(tA Transpose, m, n int, kL int, kU int, alpha complex128, a []complex128, lda int, x []complex128, incX int, beta complex128, y []complex128, incY int) + Ztrmv(ul Uplo, tA Transpose, d Diag, n int, a []complex128, lda int, x []complex128, incX int) + Ztbmv(ul Uplo, tA Transpose, d Diag, n, k int, a []complex128, lda int, x []complex128, incX int) + Ztpmv(ul Uplo, tA Transpose, d Diag, n int, ap []complex128, x []complex128, incX int) + Ztrsv(ul Uplo, tA Transpose, d Diag, n int, a []complex128, lda int, x []complex128, incX int) + Ztbsv(ul Uplo, tA Transpose, d Diag, n, k int, a []complex128, lda int, x []complex128, incX int) + Ztpsv(ul Uplo, tA Transpose, d Diag, n int, ap []complex128, x []complex128, incX int) + Zhemv(ul Uplo, n int, alpha complex128, a []complex128, lda int, x []complex128, incX int, beta complex128, y []complex128, incY int) + Zhbmv(ul Uplo, n, k int, alpha complex128, a []complex128, lda int, x []complex128, incX int, beta complex128, y []complex128, incY int) + Zhpmv(ul Uplo, n int, alpha complex128, ap []complex128, x []complex128, incX int, beta complex128, y []complex128, incY int) + Zgeru(m, n int, alpha complex128, x []complex128, incX int, y []complex128, incY int, a []complex128, lda int) + Zgerc(m, n int, alpha complex128, x []complex128, incX int, y []complex128, incY int, a []complex128, lda int) + Zher(ul Uplo, n int, alpha float64, x []complex128, incX int, a []complex128, lda int) + Zhpr(ul Uplo, n int, alpha float64, x []complex128, incX int, a []complex128) + Zher2(ul Uplo, n int, alpha complex128, x []complex128, incX int, y []complex128, incY int, a []complex128, lda int) + Zhpr2(ul Uplo, n int, alpha complex128, x []complex128, incX int, y []complex128, incY int, ap []complex128) +} + +// Complex128Level3 implements the double precision complex BLAS Level 3 routines. +type Complex128Level3 interface { + Zgemm(tA, tB Transpose, m, n, k int, alpha complex128, a []complex128, lda int, b []complex128, ldb int, beta complex128, c []complex128, ldc int) + Zsymm(s Side, ul Uplo, m, n int, alpha complex128, a []complex128, lda int, b []complex128, ldb int, beta complex128, c []complex128, ldc int) + Zsyrk(ul Uplo, t Transpose, n, k int, alpha complex128, a []complex128, lda int, beta complex128, c []complex128, ldc int) + Zsyr2k(ul Uplo, t Transpose, n, k int, alpha complex128, a []complex128, lda int, b []complex128, ldb int, beta complex128, c []complex128, ldc int) + Ztrmm(s Side, ul Uplo, tA Transpose, d Diag, m, n int, alpha complex128, a []complex128, lda int, b []complex128, ldb int) + Ztrsm(s Side, ul Uplo, tA Transpose, d Diag, m, n int, alpha complex128, a []complex128, lda int, b []complex128, ldb int) + Zhemm(s Side, ul Uplo, m, n int, alpha complex128, a []complex128, lda int, b []complex128, ldb int, beta complex128, c []complex128, ldc int) + Zherk(ul Uplo, t Transpose, n, k int, alpha float64, a []complex128, lda int, beta float64, c []complex128, ldc int) + Zher2k(ul Uplo, t Transpose, n, k int, alpha complex128, a []complex128, lda int, b []complex128, ldb int, beta float64, c []complex128, ldc int) +} diff --git a/vendor/gonum.org/v1/gonum/blas/blas32/blas32.go b/vendor/gonum.org/v1/gonum/blas/blas32/blas32.go new file mode 100644 index 0000000..926a5ac --- /dev/null +++ b/vendor/gonum.org/v1/gonum/blas/blas32/blas32.go @@ -0,0 +1,458 @@ +// Copyright ©2015 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package blas32 + +import ( + "gonum.org/v1/gonum/blas" + "gonum.org/v1/gonum/blas/gonum" +) + +var blas32 blas.Float32 = gonum.Implementation{} + +// Use sets the BLAS float32 implementation to be used by subsequent BLAS calls. +// The default implementation is +// gonum.org/v1/gonum/blas/gonum.Implementation. +func Use(b blas.Float32) { + blas32 = b +} + +// Implementation returns the current BLAS float32 implementation. +// +// Implementation allows direct calls to the current the BLAS float32 implementation +// giving finer control of parameters. +func Implementation() blas.Float32 { + return blas32 +} + +// Vector represents a vector with an associated element increment. +type Vector struct { + Inc int + Data []float32 +} + +// General represents a matrix using the conventional storage scheme. +type General struct { + Rows, Cols int + Stride int + Data []float32 +} + +// Band represents a band matrix using the band storage scheme. +type Band struct { + Rows, Cols int + KL, KU int + Stride int + Data []float32 +} + +// Triangular represents a triangular matrix using the conventional storage scheme. +type Triangular struct { + N int + Stride int + Data []float32 + Uplo blas.Uplo + Diag blas.Diag +} + +// TriangularBand represents a triangular matrix using the band storage scheme. +type TriangularBand struct { + N, K int + Stride int + Data []float32 + Uplo blas.Uplo + Diag blas.Diag +} + +// TriangularPacked represents a triangular matrix using the packed storage scheme. +type TriangularPacked struct { + N int + Data []float32 + Uplo blas.Uplo + Diag blas.Diag +} + +// Symmetric represents a symmetric matrix using the conventional storage scheme. +type Symmetric struct { + N int + Stride int + Data []float32 + Uplo blas.Uplo +} + +// SymmetricBand represents a symmetric matrix using the band storage scheme. +type SymmetricBand struct { + N, K int + Stride int + Data []float32 + Uplo blas.Uplo +} + +// SymmetricPacked represents a symmetric matrix using the packed storage scheme. +type SymmetricPacked struct { + N int + Data []float32 + Uplo blas.Uplo +} + +// Level 1 + +const negInc = "blas32: negative vector increment" + +// Dot computes the dot product of the two vectors: +// \sum_i x[i]*y[i]. +func Dot(n int, x, y Vector) float32 { + return blas32.Sdot(n, x.Data, x.Inc, y.Data, y.Inc) +} + +// DDot computes the dot product of the two vectors: +// \sum_i x[i]*y[i]. +func DDot(n int, x, y Vector) float64 { + return blas32.Dsdot(n, x.Data, x.Inc, y.Data, y.Inc) +} + +// SDDot computes the dot product of the two vectors adding a constant: +// alpha + \sum_i x[i]*y[i]. +func SDDot(n int, alpha float32, x, y Vector) float32 { + return blas32.Sdsdot(n, alpha, x.Data, x.Inc, y.Data, y.Inc) +} + +// Nrm2 computes the Euclidean norm of the vector x: +// sqrt(\sum_i x[i]*x[i]). +// +// Nrm2 will panic if the vector increment is negative. +func Nrm2(n int, x Vector) float32 { + if x.Inc < 0 { + panic(negInc) + } + return blas32.Snrm2(n, x.Data, x.Inc) +} + +// Asum computes the sum of the absolute values of the elements of x: +// \sum_i |x[i]|. +// +// Asum will panic if the vector increment is negative. +func Asum(n int, x Vector) float32 { + if x.Inc < 0 { + panic(negInc) + } + return blas32.Sasum(n, x.Data, x.Inc) +} + +// Iamax returns the index of an element of x with the largest absolute value. +// If there are multiple such indices the earliest is returned. +// Iamax returns -1 if n == 0. +// +// Iamax will panic if the vector increment is negative. +func Iamax(n int, x Vector) int { + if x.Inc < 0 { + panic(negInc) + } + return blas32.Isamax(n, x.Data, x.Inc) +} + +// Swap exchanges the elements of the two vectors: +// x[i], y[i] = y[i], x[i] for all i. +func Swap(n int, x, y Vector) { + blas32.Sswap(n, x.Data, x.Inc, y.Data, y.Inc) +} + +// Copy copies the elements of x into the elements of y: +// y[i] = x[i] for all i. +func Copy(n int, x, y Vector) { + blas32.Scopy(n, x.Data, x.Inc, y.Data, y.Inc) +} + +// Axpy adds x scaled by alpha to y: +// y[i] += alpha*x[i] for all i. +func Axpy(n int, alpha float32, x, y Vector) { + blas32.Saxpy(n, alpha, x.Data, x.Inc, y.Data, y.Inc) +} + +// Rotg computes the parameters of a Givens plane rotation so that +// ⎡ c s⎤ ⎡a⎤ ⎡r⎤ +// ⎣-s c⎦ * ⎣b⎦ = ⎣0⎦ +// where a and b are the Cartesian coordinates of a given point. +// c, s, and r are defined as +// r = ±Sqrt(a^2 + b^2), +// c = a/r, the cosine of the rotation angle, +// s = a/r, the sine of the rotation angle, +// and z is defined such that +// if |a| > |b|, z = s, +// otherwise if c != 0, z = 1/c, +// otherwise z = 1. +func Rotg(a, b float32) (c, s, r, z float32) { + return blas32.Srotg(a, b) +} + +// Rotmg computes the modified Givens rotation. See +// http://www.netlib.org/lapack/explore-html/df/deb/drotmg_8f.html +// for more details. +func Rotmg(d1, d2, b1, b2 float32) (p blas.SrotmParams, rd1, rd2, rb1 float32) { + return blas32.Srotmg(d1, d2, b1, b2) +} + +// Rot applies a plane transformation to n points represented by the vectors x +// and y: +// x[i] = c*x[i] + s*y[i], +// y[i] = -s*x[i] + c*y[i], for all i. +func Rot(n int, x, y Vector, c, s float32) { + blas32.Srot(n, x.Data, x.Inc, y.Data, y.Inc, c, s) +} + +// Rotm applies the modified Givens rotation to n points represented by the +// vectors x and y. +func Rotm(n int, x, y Vector, p blas.SrotmParams) { + blas32.Srotm(n, x.Data, x.Inc, y.Data, y.Inc, p) +} + +// Scal scales the vector x by alpha: +// x[i] *= alpha for all i. +// +// Scal will panic if the vector increment is negative. +func Scal(n int, alpha float32, x Vector) { + if x.Inc < 0 { + panic(negInc) + } + blas32.Sscal(n, alpha, x.Data, x.Inc) +} + +// Level 2 + +// Gemv computes +// y = alpha * A * x + beta * y, if t == blas.NoTrans, +// y = alpha * A^T * x + beta * y, if t == blas.Trans or blas.ConjTrans, +// where A is an m×n dense matrix, x and y are vectors, and alpha and beta are scalars. +func Gemv(t blas.Transpose, alpha float32, a General, x Vector, beta float32, y Vector) { + blas32.Sgemv(t, a.Rows, a.Cols, alpha, a.Data, a.Stride, x.Data, x.Inc, beta, y.Data, y.Inc) +} + +// Gbmv computes +// y = alpha * A * x + beta * y, if t == blas.NoTrans, +// y = alpha * A^T * x + beta * y, if t == blas.Trans or blas.ConjTrans, +// where A is an m×n band matrix, x and y are vectors, and alpha and beta are scalars. +func Gbmv(t blas.Transpose, alpha float32, a Band, x Vector, beta float32, y Vector) { + blas32.Sgbmv(t, a.Rows, a.Cols, a.KL, a.KU, alpha, a.Data, a.Stride, x.Data, x.Inc, beta, y.Data, y.Inc) +} + +// Trmv computes +// x = A * x, if t == blas.NoTrans, +// x = A^T * x, if t == blas.Trans or blas.ConjTrans, +// where A is an n×n triangular matrix, and x is a vector. +func Trmv(t blas.Transpose, a Triangular, x Vector) { + blas32.Strmv(a.Uplo, t, a.Diag, a.N, a.Data, a.Stride, x.Data, x.Inc) +} + +// Tbmv computes +// x = A * x, if t == blas.NoTrans, +// x = A^T * x, if t == blas.Trans or blas.ConjTrans, +// where A is an n×n triangular band matrix, and x is a vector. +func Tbmv(t blas.Transpose, a TriangularBand, x Vector) { + blas32.Stbmv(a.Uplo, t, a.Diag, a.N, a.K, a.Data, a.Stride, x.Data, x.Inc) +} + +// Tpmv computes +// x = A * x, if t == blas.NoTrans, +// x = A^T * x, if t == blas.Trans or blas.ConjTrans, +// where A is an n×n triangular matrix in packed format, and x is a vector. +func Tpmv(t blas.Transpose, a TriangularPacked, x Vector) { + blas32.Stpmv(a.Uplo, t, a.Diag, a.N, a.Data, x.Data, x.Inc) +} + +// Trsv solves +// A * x = b, if t == blas.NoTrans, +// A^T * x = b, if t == blas.Trans or blas.ConjTrans, +// where A is an n×n triangular matrix, and x and b are vectors. +// +// At entry to the function, x contains the values of b, and the result is +// stored in-place into x. +// +// No test for singularity or near-singularity is included in this +// routine. Such tests must be performed before calling this routine. +func Trsv(t blas.Transpose, a Triangular, x Vector) { + blas32.Strsv(a.Uplo, t, a.Diag, a.N, a.Data, a.Stride, x.Data, x.Inc) +} + +// Tbsv solves +// A * x = b, if t == blas.NoTrans, +// A^T * x = b, if t == blas.Trans or blas.ConjTrans, +// where A is an n×n triangular band matrix, and x and b are vectors. +// +// At entry to the function, x contains the values of b, and the result is +// stored in place into x. +// +// No test for singularity or near-singularity is included in this +// routine. Such tests must be performed before calling this routine. +func Tbsv(t blas.Transpose, a TriangularBand, x Vector) { + blas32.Stbsv(a.Uplo, t, a.Diag, a.N, a.K, a.Data, a.Stride, x.Data, x.Inc) +} + +// Tpsv solves +// A * x = b, if t == blas.NoTrans, +// A^T * x = b, if t == blas.Trans or blas.ConjTrans, +// where A is an n×n triangular matrix in packed format, and x and b are +// vectors. +// +// At entry to the function, x contains the values of b, and the result is +// stored in place into x. +// +// No test for singularity or near-singularity is included in this +// routine. Such tests must be performed before calling this routine. +func Tpsv(t blas.Transpose, a TriangularPacked, x Vector) { + blas32.Stpsv(a.Uplo, t, a.Diag, a.N, a.Data, x.Data, x.Inc) +} + +// Symv computes +// y = alpha * A * x + beta * y, +// where A is an n×n symmetric matrix, x and y are vectors, and alpha and +// beta are scalars. +func Symv(alpha float32, a Symmetric, x Vector, beta float32, y Vector) { + blas32.Ssymv(a.Uplo, a.N, alpha, a.Data, a.Stride, x.Data, x.Inc, beta, y.Data, y.Inc) +} + +// Sbmv performs +// y = alpha * A * x + beta * y, +// where A is an n×n symmetric band matrix, x and y are vectors, and alpha +// and beta are scalars. +func Sbmv(alpha float32, a SymmetricBand, x Vector, beta float32, y Vector) { + blas32.Ssbmv(a.Uplo, a.N, a.K, alpha, a.Data, a.Stride, x.Data, x.Inc, beta, y.Data, y.Inc) +} + +// Spmv performs +// y = alpha * A * x + beta * y, +// where A is an n×n symmetric matrix in packed format, x and y are vectors, +// and alpha and beta are scalars. +func Spmv(alpha float32, a SymmetricPacked, x Vector, beta float32, y Vector) { + blas32.Sspmv(a.Uplo, a.N, alpha, a.Data, x.Data, x.Inc, beta, y.Data, y.Inc) +} + +// Ger performs a rank-1 update +// A += alpha * x * y^T, +// where A is an m×n dense matrix, x and y are vectors, and alpha is a scalar. +func Ger(alpha float32, x, y Vector, a General) { + blas32.Sger(a.Rows, a.Cols, alpha, x.Data, x.Inc, y.Data, y.Inc, a.Data, a.Stride) +} + +// Syr performs a rank-1 update +// A += alpha * x * x^T, +// where A is an n×n symmetric matrix, x is a vector, and alpha is a scalar. +func Syr(alpha float32, x Vector, a Symmetric) { + blas32.Ssyr(a.Uplo, a.N, alpha, x.Data, x.Inc, a.Data, a.Stride) +} + +// Spr performs the rank-1 update +// A += alpha * x * x^T, +// where A is an n×n symmetric matrix in packed format, x is a vector, and +// alpha is a scalar. +func Spr(alpha float32, x Vector, a SymmetricPacked) { + blas32.Sspr(a.Uplo, a.N, alpha, x.Data, x.Inc, a.Data) +} + +// Syr2 performs a rank-2 update +// A += alpha * x * y^T + alpha * y * x^T, +// where A is a symmetric n×n matrix, x and y are vectors, and alpha is a scalar. +func Syr2(alpha float32, x, y Vector, a Symmetric) { + blas32.Ssyr2(a.Uplo, a.N, alpha, x.Data, x.Inc, y.Data, y.Inc, a.Data, a.Stride) +} + +// Spr2 performs a rank-2 update +// A += alpha * x * y^T + alpha * y * x^T, +// where A is an n×n symmetric matrix in packed format, x and y are vectors, +// and alpha is a scalar. +func Spr2(alpha float32, x, y Vector, a SymmetricPacked) { + blas32.Sspr2(a.Uplo, a.N, alpha, x.Data, x.Inc, y.Data, y.Inc, a.Data) +} + +// Level 3 + +// Gemm computes +// C = alpha * A * B + beta * C, +// where A, B, and C are dense matrices, and alpha and beta are scalars. +// tA and tB specify whether A or B are transposed. +func Gemm(tA, tB blas.Transpose, alpha float32, a, b General, beta float32, c General) { + var m, n, k int + if tA == blas.NoTrans { + m, k = a.Rows, a.Cols + } else { + m, k = a.Cols, a.Rows + } + if tB == blas.NoTrans { + n = b.Cols + } else { + n = b.Rows + } + blas32.Sgemm(tA, tB, m, n, k, alpha, a.Data, a.Stride, b.Data, b.Stride, beta, c.Data, c.Stride) +} + +// Symm performs +// C = alpha * A * B + beta * C, if s == blas.Left, +// C = alpha * B * A + beta * C, if s == blas.Right, +// where A is an n×n or m×m symmetric matrix, B and C are m×n matrices, and +// alpha is a scalar. +func Symm(s blas.Side, alpha float32, a Symmetric, b General, beta float32, c General) { + var m, n int + if s == blas.Left { + m, n = a.N, b.Cols + } else { + m, n = b.Rows, a.N + } + blas32.Ssymm(s, a.Uplo, m, n, alpha, a.Data, a.Stride, b.Data, b.Stride, beta, c.Data, c.Stride) +} + +// Syrk performs a symmetric rank-k update +// C = alpha * A * A^T + beta * C, if t == blas.NoTrans, +// C = alpha * A^T * A + beta * C, if t == blas.Trans or blas.ConjTrans, +// where C is an n×n symmetric matrix, A is an n×k matrix if t == blas.NoTrans and +// a k×n matrix otherwise, and alpha and beta are scalars. +func Syrk(t blas.Transpose, alpha float32, a General, beta float32, c Symmetric) { + var n, k int + if t == blas.NoTrans { + n, k = a.Rows, a.Cols + } else { + n, k = a.Cols, a.Rows + } + blas32.Ssyrk(c.Uplo, t, n, k, alpha, a.Data, a.Stride, beta, c.Data, c.Stride) +} + +// Syr2k performs a symmetric rank-2k update +// C = alpha * A * B^T + alpha * B * A^T + beta * C, if t == blas.NoTrans, +// C = alpha * A^T * B + alpha * B^T * A + beta * C, if t == blas.Trans or blas.ConjTrans, +// where C is an n×n symmetric matrix, A and B are n×k matrices if t == NoTrans +// and k×n matrices otherwise, and alpha and beta are scalars. +func Syr2k(t blas.Transpose, alpha float32, a, b General, beta float32, c Symmetric) { + var n, k int + if t == blas.NoTrans { + n, k = a.Rows, a.Cols + } else { + n, k = a.Cols, a.Rows + } + blas32.Ssyr2k(c.Uplo, t, n, k, alpha, a.Data, a.Stride, b.Data, b.Stride, beta, c.Data, c.Stride) +} + +// Trmm performs +// B = alpha * A * B, if tA == blas.NoTrans and s == blas.Left, +// B = alpha * A^T * B, if tA == blas.Trans or blas.ConjTrans, and s == blas.Left, +// B = alpha * B * A, if tA == blas.NoTrans and s == blas.Right, +// B = alpha * B * A^T, if tA == blas.Trans or blas.ConjTrans, and s == blas.Right, +// where A is an n×n or m×m triangular matrix, B is an m×n matrix, and alpha is +// a scalar. +func Trmm(s blas.Side, tA blas.Transpose, alpha float32, a Triangular, b General) { + blas32.Strmm(s, a.Uplo, tA, a.Diag, b.Rows, b.Cols, alpha, a.Data, a.Stride, b.Data, b.Stride) +} + +// Trsm solves +// A * X = alpha * B, if tA == blas.NoTrans and s == blas.Left, +// A^T * X = alpha * B, if tA == blas.Trans or blas.ConjTrans, and s == blas.Left, +// X * A = alpha * B, if tA == blas.NoTrans and s == blas.Right, +// X * A^T = alpha * B, if tA == blas.Trans or blas.ConjTrans, and s == blas.Right, +// where A is an n×n or m×m triangular matrix, X and B are m×n matrices, and +// alpha is a scalar. +// +// At entry to the function, X contains the values of B, and the result is +// stored in-place into X. +// +// No check is made that A is invertible. +func Trsm(s blas.Side, tA blas.Transpose, alpha float32, a Triangular, b General) { + blas32.Strsm(s, a.Uplo, tA, a.Diag, b.Rows, b.Cols, alpha, a.Data, a.Stride, b.Data, b.Stride) +} diff --git a/vendor/gonum.org/v1/gonum/blas/blas32/conv.go b/vendor/gonum.org/v1/gonum/blas/blas32/conv.go new file mode 100644 index 0000000..aadca23 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/blas/blas32/conv.go @@ -0,0 +1,279 @@ +// Code generated by "go generate gonum.org/v1/gonum/blas”; DO NOT EDIT. + +// Copyright ©2015 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package blas32 + +import "gonum.org/v1/gonum/blas" + +// GeneralCols represents a matrix using the conventional column-major storage scheme. +type GeneralCols General + +// From fills the receiver with elements from a. The receiver +// must have the same dimensions as a and have adequate backing +// data storage. +func (t GeneralCols) From(a General) { + if t.Rows != a.Rows || t.Cols != a.Cols { + panic("blas32: mismatched dimension") + } + if len(t.Data) < (t.Cols-1)*t.Stride+t.Rows { + panic("blas32: short data slice") + } + for i := 0; i < a.Rows; i++ { + for j, v := range a.Data[i*a.Stride : i*a.Stride+a.Cols] { + t.Data[i+j*t.Stride] = v + } + } +} + +// From fills the receiver with elements from a. The receiver +// must have the same dimensions as a and have adequate backing +// data storage. +func (t General) From(a GeneralCols) { + if t.Rows != a.Rows || t.Cols != a.Cols { + panic("blas32: mismatched dimension") + } + if len(t.Data) < (t.Rows-1)*t.Stride+t.Cols { + panic("blas32: short data slice") + } + for j := 0; j < a.Cols; j++ { + for i, v := range a.Data[j*a.Stride : j*a.Stride+a.Rows] { + t.Data[i*t.Stride+j] = v + } + } +} + +// TriangularCols represents a matrix using the conventional column-major storage scheme. +type TriangularCols Triangular + +// From fills the receiver with elements from a. The receiver +// must have the same dimensions, uplo and diag as a and have +// adequate backing data storage. +func (t TriangularCols) From(a Triangular) { + if t.N != a.N { + panic("blas32: mismatched dimension") + } + if t.Uplo != a.Uplo { + panic("blas32: mismatched BLAS uplo") + } + if t.Diag != a.Diag { + panic("blas32: mismatched BLAS diag") + } + switch a.Uplo { + default: + panic("blas32: bad BLAS uplo") + case blas.Upper: + for i := 0; i < a.N; i++ { + for j := i; j < a.N; j++ { + t.Data[i+j*t.Stride] = a.Data[i*a.Stride+j] + } + } + case blas.Lower: + for i := 0; i < a.N; i++ { + for j := 0; j <= i; j++ { + t.Data[i+j*t.Stride] = a.Data[i*a.Stride+j] + } + } + case blas.All: + for i := 0; i < a.N; i++ { + for j := 0; j < a.N; j++ { + t.Data[i+j*t.Stride] = a.Data[i*a.Stride+j] + } + } + } +} + +// From fills the receiver with elements from a. The receiver +// must have the same dimensions, uplo and diag as a and have +// adequate backing data storage. +func (t Triangular) From(a TriangularCols) { + if t.N != a.N { + panic("blas32: mismatched dimension") + } + if t.Uplo != a.Uplo { + panic("blas32: mismatched BLAS uplo") + } + if t.Diag != a.Diag { + panic("blas32: mismatched BLAS diag") + } + switch a.Uplo { + default: + panic("blas32: bad BLAS uplo") + case blas.Upper: + for i := 0; i < a.N; i++ { + for j := i; j < a.N; j++ { + t.Data[i*t.Stride+j] = a.Data[i+j*a.Stride] + } + } + case blas.Lower: + for i := 0; i < a.N; i++ { + for j := 0; j <= i; j++ { + t.Data[i*t.Stride+j] = a.Data[i+j*a.Stride] + } + } + case blas.All: + for i := 0; i < a.N; i++ { + for j := 0; j < a.N; j++ { + t.Data[i*t.Stride+j] = a.Data[i+j*a.Stride] + } + } + } +} + +// BandCols represents a matrix using the band column-major storage scheme. +type BandCols Band + +// From fills the receiver with elements from a. The receiver +// must have the same dimensions and bandwidth as a and have +// adequate backing data storage. +func (t BandCols) From(a Band) { + if t.Rows != a.Rows || t.Cols != a.Cols { + panic("blas32: mismatched dimension") + } + if t.KL != a.KL || t.KU != a.KU { + panic("blas32: mismatched bandwidth") + } + if a.Stride < a.KL+a.KU+1 { + panic("blas32: short stride for source") + } + if t.Stride < t.KL+t.KU+1 { + panic("blas32: short stride for destination") + } + for i := 0; i < a.Rows; i++ { + for j := max(0, i-a.KL); j < min(i+a.KU+1, a.Cols); j++ { + t.Data[i+t.KU-j+j*t.Stride] = a.Data[j+a.KL-i+i*a.Stride] + } + } +} + +// From fills the receiver with elements from a. The receiver +// must have the same dimensions and bandwidth as a and have +// adequate backing data storage. +func (t Band) From(a BandCols) { + if t.Rows != a.Rows || t.Cols != a.Cols { + panic("blas32: mismatched dimension") + } + if t.KL != a.KL || t.KU != a.KU { + panic("blas32: mismatched bandwidth") + } + if a.Stride < a.KL+a.KU+1 { + panic("blas32: short stride for source") + } + if t.Stride < t.KL+t.KU+1 { + panic("blas32: short stride for destination") + } + for j := 0; j < a.Cols; j++ { + for i := max(0, j-a.KU); i < min(j+a.KL+1, a.Rows); i++ { + t.Data[j+a.KL-i+i*a.Stride] = a.Data[i+t.KU-j+j*t.Stride] + } + } +} + +// TriangularBandCols represents a symmetric matrix using the band column-major storage scheme. +type TriangularBandCols TriangularBand + +// From fills the receiver with elements from a. The receiver +// must have the same dimensions, bandwidth and uplo as a and +// have adequate backing data storage. +func (t TriangularBandCols) From(a TriangularBand) { + if t.N != a.N { + panic("blas32: mismatched dimension") + } + if t.K != a.K { + panic("blas32: mismatched bandwidth") + } + if a.Stride < a.K+1 { + panic("blas32: short stride for source") + } + if t.Stride < t.K+1 { + panic("blas32: short stride for destination") + } + if t.Uplo != a.Uplo { + panic("blas32: mismatched BLAS uplo") + } + if t.Diag != a.Diag { + panic("blas32: mismatched BLAS diag") + } + dst := BandCols{ + Rows: t.N, Cols: t.N, + Stride: t.Stride, + Data: t.Data, + } + src := Band{ + Rows: a.N, Cols: a.N, + Stride: a.Stride, + Data: a.Data, + } + switch a.Uplo { + default: + panic("blas32: bad BLAS uplo") + case blas.Upper: + dst.KU = t.K + src.KU = a.K + case blas.Lower: + dst.KL = t.K + src.KL = a.K + } + dst.From(src) +} + +// From fills the receiver with elements from a. The receiver +// must have the same dimensions, bandwidth and uplo as a and +// have adequate backing data storage. +func (t TriangularBand) From(a TriangularBandCols) { + if t.N != a.N { + panic("blas32: mismatched dimension") + } + if t.K != a.K { + panic("blas32: mismatched bandwidth") + } + if a.Stride < a.K+1 { + panic("blas32: short stride for source") + } + if t.Stride < t.K+1 { + panic("blas32: short stride for destination") + } + if t.Uplo != a.Uplo { + panic("blas32: mismatched BLAS uplo") + } + if t.Diag != a.Diag { + panic("blas32: mismatched BLAS diag") + } + dst := Band{ + Rows: t.N, Cols: t.N, + Stride: t.Stride, + Data: t.Data, + } + src := BandCols{ + Rows: a.N, Cols: a.N, + Stride: a.Stride, + Data: a.Data, + } + switch a.Uplo { + default: + panic("blas32: bad BLAS uplo") + case blas.Upper: + dst.KU = t.K + src.KU = a.K + case blas.Lower: + dst.KL = t.K + src.KL = a.K + } + dst.From(src) +} + +func min(a, b int) int { + if a < b { + return a + } + return b +} + +func max(a, b int) int { + if a > b { + return a + } + return b +} diff --git a/vendor/gonum.org/v1/gonum/blas/blas32/conv_symmetric.go b/vendor/gonum.org/v1/gonum/blas/blas32/conv_symmetric.go new file mode 100644 index 0000000..162f3fc --- /dev/null +++ b/vendor/gonum.org/v1/gonum/blas/blas32/conv_symmetric.go @@ -0,0 +1,155 @@ +// Code generated by "go generate gonum.org/v1/gonum/blas”; DO NOT EDIT. + +// Copyright ©2015 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package blas32 + +import "gonum.org/v1/gonum/blas" + +// SymmetricCols represents a matrix using the conventional column-major storage scheme. +type SymmetricCols Symmetric + +// From fills the receiver with elements from a. The receiver +// must have the same dimensions and uplo as a and have adequate +// backing data storage. +func (t SymmetricCols) From(a Symmetric) { + if t.N != a.N { + panic("blas32: mismatched dimension") + } + if t.Uplo != a.Uplo { + panic("blas32: mismatched BLAS uplo") + } + switch a.Uplo { + default: + panic("blas32: bad BLAS uplo") + case blas.Upper: + for i := 0; i < a.N; i++ { + for j := i; j < a.N; j++ { + t.Data[i+j*t.Stride] = a.Data[i*a.Stride+j] + } + } + case blas.Lower: + for i := 0; i < a.N; i++ { + for j := 0; j <= i; j++ { + t.Data[i+j*t.Stride] = a.Data[i*a.Stride+j] + } + } + } +} + +// From fills the receiver with elements from a. The receiver +// must have the same dimensions and uplo as a and have adequate +// backing data storage. +func (t Symmetric) From(a SymmetricCols) { + if t.N != a.N { + panic("blas32: mismatched dimension") + } + if t.Uplo != a.Uplo { + panic("blas32: mismatched BLAS uplo") + } + switch a.Uplo { + default: + panic("blas32: bad BLAS uplo") + case blas.Upper: + for i := 0; i < a.N; i++ { + for j := i; j < a.N; j++ { + t.Data[i*t.Stride+j] = a.Data[i+j*a.Stride] + } + } + case blas.Lower: + for i := 0; i < a.N; i++ { + for j := 0; j <= i; j++ { + t.Data[i*t.Stride+j] = a.Data[i+j*a.Stride] + } + } + } +} + +// SymmetricBandCols represents a symmetric matrix using the band column-major storage scheme. +type SymmetricBandCols SymmetricBand + +// From fills the receiver with elements from a. The receiver +// must have the same dimensions, bandwidth and uplo as a and +// have adequate backing data storage. +func (t SymmetricBandCols) From(a SymmetricBand) { + if t.N != a.N { + panic("blas32: mismatched dimension") + } + if t.K != a.K { + panic("blas32: mismatched bandwidth") + } + if a.Stride < a.K+1 { + panic("blas32: short stride for source") + } + if t.Stride < t.K+1 { + panic("blas32: short stride for destination") + } + if t.Uplo != a.Uplo { + panic("blas32: mismatched BLAS uplo") + } + dst := BandCols{ + Rows: t.N, Cols: t.N, + Stride: t.Stride, + Data: t.Data, + } + src := Band{ + Rows: a.N, Cols: a.N, + Stride: a.Stride, + Data: a.Data, + } + switch a.Uplo { + default: + panic("blas32: bad BLAS uplo") + case blas.Upper: + dst.KU = t.K + src.KU = a.K + case blas.Lower: + dst.KL = t.K + src.KL = a.K + } + dst.From(src) +} + +// From fills the receiver with elements from a. The receiver +// must have the same dimensions, bandwidth and uplo as a and +// have adequate backing data storage. +func (t SymmetricBand) From(a SymmetricBandCols) { + if t.N != a.N { + panic("blas32: mismatched dimension") + } + if t.K != a.K { + panic("blas32: mismatched bandwidth") + } + if a.Stride < a.K+1 { + panic("blas32: short stride for source") + } + if t.Stride < t.K+1 { + panic("blas32: short stride for destination") + } + if t.Uplo != a.Uplo { + panic("blas32: mismatched BLAS uplo") + } + dst := Band{ + Rows: t.N, Cols: t.N, + Stride: t.Stride, + Data: t.Data, + } + src := BandCols{ + Rows: a.N, Cols: a.N, + Stride: a.Stride, + Data: a.Data, + } + switch a.Uplo { + default: + panic("blas32: bad BLAS uplo") + case blas.Upper: + dst.KU = t.K + src.KU = a.K + case blas.Lower: + dst.KL = t.K + src.KL = a.K + } + dst.From(src) +} diff --git a/vendor/gonum.org/v1/gonum/blas/blas32/doc.go b/vendor/gonum.org/v1/gonum/blas/blas32/doc.go new file mode 100644 index 0000000..68680f3 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/blas/blas32/doc.go @@ -0,0 +1,6 @@ +// Copyright ©2017 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Package blas32 provides a simple interface to the float32 BLAS API. +package blas32 // import "gonum.org/v1/gonum/blas/blas32" diff --git a/vendor/gonum.org/v1/gonum/blas/blas64/blas64.go b/vendor/gonum.org/v1/gonum/blas/blas64/blas64.go new file mode 100644 index 0000000..5519838 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/blas/blas64/blas64.go @@ -0,0 +1,469 @@ +// Copyright ©2015 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package blas64 + +import ( + "gonum.org/v1/gonum/blas" + "gonum.org/v1/gonum/blas/gonum" +) + +var blas64 blas.Float64 = gonum.Implementation{} + +// Use sets the BLAS float64 implementation to be used by subsequent BLAS calls. +// The default implementation is +// gonum.org/v1/gonum/blas/gonum.Implementation. +func Use(b blas.Float64) { + blas64 = b +} + +// Implementation returns the current BLAS float64 implementation. +// +// Implementation allows direct calls to the current the BLAS float64 implementation +// giving finer control of parameters. +func Implementation() blas.Float64 { + return blas64 +} + +// Vector represents a vector with an associated element increment. +type Vector struct { + N int + Data []float64 + Inc int +} + +// General represents a matrix using the conventional storage scheme. +type General struct { + Rows, Cols int + Data []float64 + Stride int +} + +// Band represents a band matrix using the band storage scheme. +type Band struct { + Rows, Cols int + KL, KU int + Data []float64 + Stride int +} + +// Triangular represents a triangular matrix using the conventional storage scheme. +type Triangular struct { + Uplo blas.Uplo + Diag blas.Diag + N int + Data []float64 + Stride int +} + +// TriangularBand represents a triangular matrix using the band storage scheme. +type TriangularBand struct { + Uplo blas.Uplo + Diag blas.Diag + N, K int + Data []float64 + Stride int +} + +// TriangularPacked represents a triangular matrix using the packed storage scheme. +type TriangularPacked struct { + Uplo blas.Uplo + Diag blas.Diag + N int + Data []float64 +} + +// Symmetric represents a symmetric matrix using the conventional storage scheme. +type Symmetric struct { + Uplo blas.Uplo + N int + Data []float64 + Stride int +} + +// SymmetricBand represents a symmetric matrix using the band storage scheme. +type SymmetricBand struct { + Uplo blas.Uplo + N, K int + Data []float64 + Stride int +} + +// SymmetricPacked represents a symmetric matrix using the packed storage scheme. +type SymmetricPacked struct { + Uplo blas.Uplo + N int + Data []float64 +} + +// Level 1 + +const ( + negInc = "blas64: negative vector increment" + badLength = "blas64: vector length mismatch" +) + +// Dot computes the dot product of the two vectors: +// \sum_i x[i]*y[i]. +func Dot(x, y Vector) float64 { + if x.N != y.N { + panic(badLength) + } + return blas64.Ddot(x.N, x.Data, x.Inc, y.Data, y.Inc) +} + +// Nrm2 computes the Euclidean norm of the vector x: +// sqrt(\sum_i x[i]*x[i]). +// +// Nrm2 will panic if the vector increment is negative. +func Nrm2(x Vector) float64 { + if x.Inc < 0 { + panic(negInc) + } + return blas64.Dnrm2(x.N, x.Data, x.Inc) +} + +// Asum computes the sum of the absolute values of the elements of x: +// \sum_i |x[i]|. +// +// Asum will panic if the vector increment is negative. +func Asum(x Vector) float64 { + if x.Inc < 0 { + panic(negInc) + } + return blas64.Dasum(x.N, x.Data, x.Inc) +} + +// Iamax returns the index of an element of x with the largest absolute value. +// If there are multiple such indices the earliest is returned. +// Iamax returns -1 if n == 0. +// +// Iamax will panic if the vector increment is negative. +func Iamax(x Vector) int { + if x.Inc < 0 { + panic(negInc) + } + return blas64.Idamax(x.N, x.Data, x.Inc) +} + +// Swap exchanges the elements of the two vectors: +// x[i], y[i] = y[i], x[i] for all i. +func Swap(x, y Vector) { + if x.N != y.N { + panic(badLength) + } + blas64.Dswap(x.N, x.Data, x.Inc, y.Data, y.Inc) +} + +// Copy copies the elements of x into the elements of y: +// y[i] = x[i] for all i. +// Copy requires that the lengths of x and y match and will panic otherwise. +func Copy(x, y Vector) { + if x.N != y.N { + panic(badLength) + } + blas64.Dcopy(x.N, x.Data, x.Inc, y.Data, y.Inc) +} + +// Axpy adds x scaled by alpha to y: +// y[i] += alpha*x[i] for all i. +func Axpy(alpha float64, x, y Vector) { + if x.N != y.N { + panic(badLength) + } + blas64.Daxpy(x.N, alpha, x.Data, x.Inc, y.Data, y.Inc) +} + +// Rotg computes the parameters of a Givens plane rotation so that +// ⎡ c s⎤ ⎡a⎤ ⎡r⎤ +// ⎣-s c⎦ * ⎣b⎦ = ⎣0⎦ +// where a and b are the Cartesian coordinates of a given point. +// c, s, and r are defined as +// r = ±Sqrt(a^2 + b^2), +// c = a/r, the cosine of the rotation angle, +// s = a/r, the sine of the rotation angle, +// and z is defined such that +// if |a| > |b|, z = s, +// otherwise if c != 0, z = 1/c, +// otherwise z = 1. +func Rotg(a, b float64) (c, s, r, z float64) { + return blas64.Drotg(a, b) +} + +// Rotmg computes the modified Givens rotation. See +// http://www.netlib.org/lapack/explore-html/df/deb/drotmg_8f.html +// for more details. +func Rotmg(d1, d2, b1, b2 float64) (p blas.DrotmParams, rd1, rd2, rb1 float64) { + return blas64.Drotmg(d1, d2, b1, b2) +} + +// Rot applies a plane transformation to n points represented by the vectors x +// and y: +// x[i] = c*x[i] + s*y[i], +// y[i] = -s*x[i] + c*y[i], for all i. +func Rot(x, y Vector, c, s float64) { + if x.N != y.N { + panic(badLength) + } + blas64.Drot(x.N, x.Data, x.Inc, y.Data, y.Inc, c, s) +} + +// Rotm applies the modified Givens rotation to n points represented by the +// vectors x and y. +func Rotm(x, y Vector, p blas.DrotmParams) { + if x.N != y.N { + panic(badLength) + } + blas64.Drotm(x.N, x.Data, x.Inc, y.Data, y.Inc, p) +} + +// Scal scales the vector x by alpha: +// x[i] *= alpha for all i. +// +// Scal will panic if the vector increment is negative. +func Scal(alpha float64, x Vector) { + if x.Inc < 0 { + panic(negInc) + } + blas64.Dscal(x.N, alpha, x.Data, x.Inc) +} + +// Level 2 + +// Gemv computes +// y = alpha * A * x + beta * y, if t == blas.NoTrans, +// y = alpha * A^T * x + beta * y, if t == blas.Trans or blas.ConjTrans, +// where A is an m×n dense matrix, x and y are vectors, and alpha and beta are scalars. +func Gemv(t blas.Transpose, alpha float64, a General, x Vector, beta float64, y Vector) { + blas64.Dgemv(t, a.Rows, a.Cols, alpha, a.Data, a.Stride, x.Data, x.Inc, beta, y.Data, y.Inc) +} + +// Gbmv computes +// y = alpha * A * x + beta * y, if t == blas.NoTrans, +// y = alpha * A^T * x + beta * y, if t == blas.Trans or blas.ConjTrans, +// where A is an m×n band matrix, x and y are vectors, and alpha and beta are scalars. +func Gbmv(t blas.Transpose, alpha float64, a Band, x Vector, beta float64, y Vector) { + blas64.Dgbmv(t, a.Rows, a.Cols, a.KL, a.KU, alpha, a.Data, a.Stride, x.Data, x.Inc, beta, y.Data, y.Inc) +} + +// Trmv computes +// x = A * x, if t == blas.NoTrans, +// x = A^T * x, if t == blas.Trans or blas.ConjTrans, +// where A is an n×n triangular matrix, and x is a vector. +func Trmv(t blas.Transpose, a Triangular, x Vector) { + blas64.Dtrmv(a.Uplo, t, a.Diag, a.N, a.Data, a.Stride, x.Data, x.Inc) +} + +// Tbmv computes +// x = A * x, if t == blas.NoTrans, +// x = A^T * x, if t == blas.Trans or blas.ConjTrans, +// where A is an n×n triangular band matrix, and x is a vector. +func Tbmv(t blas.Transpose, a TriangularBand, x Vector) { + blas64.Dtbmv(a.Uplo, t, a.Diag, a.N, a.K, a.Data, a.Stride, x.Data, x.Inc) +} + +// Tpmv computes +// x = A * x, if t == blas.NoTrans, +// x = A^T * x, if t == blas.Trans or blas.ConjTrans, +// where A is an n×n triangular matrix in packed format, and x is a vector. +func Tpmv(t blas.Transpose, a TriangularPacked, x Vector) { + blas64.Dtpmv(a.Uplo, t, a.Diag, a.N, a.Data, x.Data, x.Inc) +} + +// Trsv solves +// A * x = b, if t == blas.NoTrans, +// A^T * x = b, if t == blas.Trans or blas.ConjTrans, +// where A is an n×n triangular matrix, and x and b are vectors. +// +// At entry to the function, x contains the values of b, and the result is +// stored in-place into x. +// +// No test for singularity or near-singularity is included in this +// routine. Such tests must be performed before calling this routine. +func Trsv(t blas.Transpose, a Triangular, x Vector) { + blas64.Dtrsv(a.Uplo, t, a.Diag, a.N, a.Data, a.Stride, x.Data, x.Inc) +} + +// Tbsv solves +// A * x = b, if t == blas.NoTrans, +// A^T * x = b, if t == blas.Trans or blas.ConjTrans, +// where A is an n×n triangular band matrix, and x and b are vectors. +// +// At entry to the function, x contains the values of b, and the result is +// stored in place into x. +// +// No test for singularity or near-singularity is included in this +// routine. Such tests must be performed before calling this routine. +func Tbsv(t blas.Transpose, a TriangularBand, x Vector) { + blas64.Dtbsv(a.Uplo, t, a.Diag, a.N, a.K, a.Data, a.Stride, x.Data, x.Inc) +} + +// Tpsv solves +// A * x = b, if t == blas.NoTrans, +// A^T * x = b, if t == blas.Trans or blas.ConjTrans, +// where A is an n×n triangular matrix in packed format, and x and b are +// vectors. +// +// At entry to the function, x contains the values of b, and the result is +// stored in place into x. +// +// No test for singularity or near-singularity is included in this +// routine. Such tests must be performed before calling this routine. +func Tpsv(t blas.Transpose, a TriangularPacked, x Vector) { + blas64.Dtpsv(a.Uplo, t, a.Diag, a.N, a.Data, x.Data, x.Inc) +} + +// Symv computes +// y = alpha * A * x + beta * y, +// where A is an n×n symmetric matrix, x and y are vectors, and alpha and +// beta are scalars. +func Symv(alpha float64, a Symmetric, x Vector, beta float64, y Vector) { + blas64.Dsymv(a.Uplo, a.N, alpha, a.Data, a.Stride, x.Data, x.Inc, beta, y.Data, y.Inc) +} + +// Sbmv performs +// y = alpha * A * x + beta * y, +// where A is an n×n symmetric band matrix, x and y are vectors, and alpha +// and beta are scalars. +func Sbmv(alpha float64, a SymmetricBand, x Vector, beta float64, y Vector) { + blas64.Dsbmv(a.Uplo, a.N, a.K, alpha, a.Data, a.Stride, x.Data, x.Inc, beta, y.Data, y.Inc) +} + +// Spmv performs +// y = alpha * A * x + beta * y, +// where A is an n×n symmetric matrix in packed format, x and y are vectors, +// and alpha and beta are scalars. +func Spmv(alpha float64, a SymmetricPacked, x Vector, beta float64, y Vector) { + blas64.Dspmv(a.Uplo, a.N, alpha, a.Data, x.Data, x.Inc, beta, y.Data, y.Inc) +} + +// Ger performs a rank-1 update +// A += alpha * x * y^T, +// where A is an m×n dense matrix, x and y are vectors, and alpha is a scalar. +func Ger(alpha float64, x, y Vector, a General) { + blas64.Dger(a.Rows, a.Cols, alpha, x.Data, x.Inc, y.Data, y.Inc, a.Data, a.Stride) +} + +// Syr performs a rank-1 update +// A += alpha * x * x^T, +// where A is an n×n symmetric matrix, x is a vector, and alpha is a scalar. +func Syr(alpha float64, x Vector, a Symmetric) { + blas64.Dsyr(a.Uplo, a.N, alpha, x.Data, x.Inc, a.Data, a.Stride) +} + +// Spr performs the rank-1 update +// A += alpha * x * x^T, +// where A is an n×n symmetric matrix in packed format, x is a vector, and +// alpha is a scalar. +func Spr(alpha float64, x Vector, a SymmetricPacked) { + blas64.Dspr(a.Uplo, a.N, alpha, x.Data, x.Inc, a.Data) +} + +// Syr2 performs a rank-2 update +// A += alpha * x * y^T + alpha * y * x^T, +// where A is a symmetric n×n matrix, x and y are vectors, and alpha is a scalar. +func Syr2(alpha float64, x, y Vector, a Symmetric) { + blas64.Dsyr2(a.Uplo, a.N, alpha, x.Data, x.Inc, y.Data, y.Inc, a.Data, a.Stride) +} + +// Spr2 performs a rank-2 update +// A += alpha * x * y^T + alpha * y * x^T, +// where A is an n×n symmetric matrix in packed format, x and y are vectors, +// and alpha is a scalar. +func Spr2(alpha float64, x, y Vector, a SymmetricPacked) { + blas64.Dspr2(a.Uplo, a.N, alpha, x.Data, x.Inc, y.Data, y.Inc, a.Data) +} + +// Level 3 + +// Gemm computes +// C = alpha * A * B + beta * C, +// where A, B, and C are dense matrices, and alpha and beta are scalars. +// tA and tB specify whether A or B are transposed. +func Gemm(tA, tB blas.Transpose, alpha float64, a, b General, beta float64, c General) { + var m, n, k int + if tA == blas.NoTrans { + m, k = a.Rows, a.Cols + } else { + m, k = a.Cols, a.Rows + } + if tB == blas.NoTrans { + n = b.Cols + } else { + n = b.Rows + } + blas64.Dgemm(tA, tB, m, n, k, alpha, a.Data, a.Stride, b.Data, b.Stride, beta, c.Data, c.Stride) +} + +// Symm performs +// C = alpha * A * B + beta * C, if s == blas.Left, +// C = alpha * B * A + beta * C, if s == blas.Right, +// where A is an n×n or m×m symmetric matrix, B and C are m×n matrices, and +// alpha is a scalar. +func Symm(s blas.Side, alpha float64, a Symmetric, b General, beta float64, c General) { + var m, n int + if s == blas.Left { + m, n = a.N, b.Cols + } else { + m, n = b.Rows, a.N + } + blas64.Dsymm(s, a.Uplo, m, n, alpha, a.Data, a.Stride, b.Data, b.Stride, beta, c.Data, c.Stride) +} + +// Syrk performs a symmetric rank-k update +// C = alpha * A * A^T + beta * C, if t == blas.NoTrans, +// C = alpha * A^T * A + beta * C, if t == blas.Trans or blas.ConjTrans, +// where C is an n×n symmetric matrix, A is an n×k matrix if t == blas.NoTrans and +// a k×n matrix otherwise, and alpha and beta are scalars. +func Syrk(t blas.Transpose, alpha float64, a General, beta float64, c Symmetric) { + var n, k int + if t == blas.NoTrans { + n, k = a.Rows, a.Cols + } else { + n, k = a.Cols, a.Rows + } + blas64.Dsyrk(c.Uplo, t, n, k, alpha, a.Data, a.Stride, beta, c.Data, c.Stride) +} + +// Syr2k performs a symmetric rank-2k update +// C = alpha * A * B^T + alpha * B * A^T + beta * C, if t == blas.NoTrans, +// C = alpha * A^T * B + alpha * B^T * A + beta * C, if t == blas.Trans or blas.ConjTrans, +// where C is an n×n symmetric matrix, A and B are n×k matrices if t == NoTrans +// and k×n matrices otherwise, and alpha and beta are scalars. +func Syr2k(t blas.Transpose, alpha float64, a, b General, beta float64, c Symmetric) { + var n, k int + if t == blas.NoTrans { + n, k = a.Rows, a.Cols + } else { + n, k = a.Cols, a.Rows + } + blas64.Dsyr2k(c.Uplo, t, n, k, alpha, a.Data, a.Stride, b.Data, b.Stride, beta, c.Data, c.Stride) +} + +// Trmm performs +// B = alpha * A * B, if tA == blas.NoTrans and s == blas.Left, +// B = alpha * A^T * B, if tA == blas.Trans or blas.ConjTrans, and s == blas.Left, +// B = alpha * B * A, if tA == blas.NoTrans and s == blas.Right, +// B = alpha * B * A^T, if tA == blas.Trans or blas.ConjTrans, and s == blas.Right, +// where A is an n×n or m×m triangular matrix, B is an m×n matrix, and alpha is +// a scalar. +func Trmm(s blas.Side, tA blas.Transpose, alpha float64, a Triangular, b General) { + blas64.Dtrmm(s, a.Uplo, tA, a.Diag, b.Rows, b.Cols, alpha, a.Data, a.Stride, b.Data, b.Stride) +} + +// Trsm solves +// A * X = alpha * B, if tA == blas.NoTrans and s == blas.Left, +// A^T * X = alpha * B, if tA == blas.Trans or blas.ConjTrans, and s == blas.Left, +// X * A = alpha * B, if tA == blas.NoTrans and s == blas.Right, +// X * A^T = alpha * B, if tA == blas.Trans or blas.ConjTrans, and s == blas.Right, +// where A is an n×n or m×m triangular matrix, X and B are m×n matrices, and +// alpha is a scalar. +// +// At entry to the function, X contains the values of B, and the result is +// stored in-place into X. +// +// No check is made that A is invertible. +func Trsm(s blas.Side, tA blas.Transpose, alpha float64, a Triangular, b General) { + blas64.Dtrsm(s, a.Uplo, tA, a.Diag, b.Rows, b.Cols, alpha, a.Data, a.Stride, b.Data, b.Stride) +} diff --git a/vendor/gonum.org/v1/gonum/blas/blas64/conv.go b/vendor/gonum.org/v1/gonum/blas/blas64/conv.go new file mode 100644 index 0000000..882fd8a --- /dev/null +++ b/vendor/gonum.org/v1/gonum/blas/blas64/conv.go @@ -0,0 +1,277 @@ +// Copyright ©2015 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package blas64 + +import "gonum.org/v1/gonum/blas" + +// GeneralCols represents a matrix using the conventional column-major storage scheme. +type GeneralCols General + +// From fills the receiver with elements from a. The receiver +// must have the same dimensions as a and have adequate backing +// data storage. +func (t GeneralCols) From(a General) { + if t.Rows != a.Rows || t.Cols != a.Cols { + panic("blas64: mismatched dimension") + } + if len(t.Data) < (t.Cols-1)*t.Stride+t.Rows { + panic("blas64: short data slice") + } + for i := 0; i < a.Rows; i++ { + for j, v := range a.Data[i*a.Stride : i*a.Stride+a.Cols] { + t.Data[i+j*t.Stride] = v + } + } +} + +// From fills the receiver with elements from a. The receiver +// must have the same dimensions as a and have adequate backing +// data storage. +func (t General) From(a GeneralCols) { + if t.Rows != a.Rows || t.Cols != a.Cols { + panic("blas64: mismatched dimension") + } + if len(t.Data) < (t.Rows-1)*t.Stride+t.Cols { + panic("blas64: short data slice") + } + for j := 0; j < a.Cols; j++ { + for i, v := range a.Data[j*a.Stride : j*a.Stride+a.Rows] { + t.Data[i*t.Stride+j] = v + } + } +} + +// TriangularCols represents a matrix using the conventional column-major storage scheme. +type TriangularCols Triangular + +// From fills the receiver with elements from a. The receiver +// must have the same dimensions, uplo and diag as a and have +// adequate backing data storage. +func (t TriangularCols) From(a Triangular) { + if t.N != a.N { + panic("blas64: mismatched dimension") + } + if t.Uplo != a.Uplo { + panic("blas64: mismatched BLAS uplo") + } + if t.Diag != a.Diag { + panic("blas64: mismatched BLAS diag") + } + switch a.Uplo { + default: + panic("blas64: bad BLAS uplo") + case blas.Upper: + for i := 0; i < a.N; i++ { + for j := i; j < a.N; j++ { + t.Data[i+j*t.Stride] = a.Data[i*a.Stride+j] + } + } + case blas.Lower: + for i := 0; i < a.N; i++ { + for j := 0; j <= i; j++ { + t.Data[i+j*t.Stride] = a.Data[i*a.Stride+j] + } + } + case blas.All: + for i := 0; i < a.N; i++ { + for j := 0; j < a.N; j++ { + t.Data[i+j*t.Stride] = a.Data[i*a.Stride+j] + } + } + } +} + +// From fills the receiver with elements from a. The receiver +// must have the same dimensions, uplo and diag as a and have +// adequate backing data storage. +func (t Triangular) From(a TriangularCols) { + if t.N != a.N { + panic("blas64: mismatched dimension") + } + if t.Uplo != a.Uplo { + panic("blas64: mismatched BLAS uplo") + } + if t.Diag != a.Diag { + panic("blas64: mismatched BLAS diag") + } + switch a.Uplo { + default: + panic("blas64: bad BLAS uplo") + case blas.Upper: + for i := 0; i < a.N; i++ { + for j := i; j < a.N; j++ { + t.Data[i*t.Stride+j] = a.Data[i+j*a.Stride] + } + } + case blas.Lower: + for i := 0; i < a.N; i++ { + for j := 0; j <= i; j++ { + t.Data[i*t.Stride+j] = a.Data[i+j*a.Stride] + } + } + case blas.All: + for i := 0; i < a.N; i++ { + for j := 0; j < a.N; j++ { + t.Data[i*t.Stride+j] = a.Data[i+j*a.Stride] + } + } + } +} + +// BandCols represents a matrix using the band column-major storage scheme. +type BandCols Band + +// From fills the receiver with elements from a. The receiver +// must have the same dimensions and bandwidth as a and have +// adequate backing data storage. +func (t BandCols) From(a Band) { + if t.Rows != a.Rows || t.Cols != a.Cols { + panic("blas64: mismatched dimension") + } + if t.KL != a.KL || t.KU != a.KU { + panic("blas64: mismatched bandwidth") + } + if a.Stride < a.KL+a.KU+1 { + panic("blas64: short stride for source") + } + if t.Stride < t.KL+t.KU+1 { + panic("blas64: short stride for destination") + } + for i := 0; i < a.Rows; i++ { + for j := max(0, i-a.KL); j < min(i+a.KU+1, a.Cols); j++ { + t.Data[i+t.KU-j+j*t.Stride] = a.Data[j+a.KL-i+i*a.Stride] + } + } +} + +// From fills the receiver with elements from a. The receiver +// must have the same dimensions and bandwidth as a and have +// adequate backing data storage. +func (t Band) From(a BandCols) { + if t.Rows != a.Rows || t.Cols != a.Cols { + panic("blas64: mismatched dimension") + } + if t.KL != a.KL || t.KU != a.KU { + panic("blas64: mismatched bandwidth") + } + if a.Stride < a.KL+a.KU+1 { + panic("blas64: short stride for source") + } + if t.Stride < t.KL+t.KU+1 { + panic("blas64: short stride for destination") + } + for j := 0; j < a.Cols; j++ { + for i := max(0, j-a.KU); i < min(j+a.KL+1, a.Rows); i++ { + t.Data[j+a.KL-i+i*a.Stride] = a.Data[i+t.KU-j+j*t.Stride] + } + } +} + +// TriangularBandCols represents a symmetric matrix using the band column-major storage scheme. +type TriangularBandCols TriangularBand + +// From fills the receiver with elements from a. The receiver +// must have the same dimensions, bandwidth and uplo as a and +// have adequate backing data storage. +func (t TriangularBandCols) From(a TriangularBand) { + if t.N != a.N { + panic("blas64: mismatched dimension") + } + if t.K != a.K { + panic("blas64: mismatched bandwidth") + } + if a.Stride < a.K+1 { + panic("blas64: short stride for source") + } + if t.Stride < t.K+1 { + panic("blas64: short stride for destination") + } + if t.Uplo != a.Uplo { + panic("blas64: mismatched BLAS uplo") + } + if t.Diag != a.Diag { + panic("blas64: mismatched BLAS diag") + } + dst := BandCols{ + Rows: t.N, Cols: t.N, + Stride: t.Stride, + Data: t.Data, + } + src := Band{ + Rows: a.N, Cols: a.N, + Stride: a.Stride, + Data: a.Data, + } + switch a.Uplo { + default: + panic("blas64: bad BLAS uplo") + case blas.Upper: + dst.KU = t.K + src.KU = a.K + case blas.Lower: + dst.KL = t.K + src.KL = a.K + } + dst.From(src) +} + +// From fills the receiver with elements from a. The receiver +// must have the same dimensions, bandwidth and uplo as a and +// have adequate backing data storage. +func (t TriangularBand) From(a TriangularBandCols) { + if t.N != a.N { + panic("blas64: mismatched dimension") + } + if t.K != a.K { + panic("blas64: mismatched bandwidth") + } + if a.Stride < a.K+1 { + panic("blas64: short stride for source") + } + if t.Stride < t.K+1 { + panic("blas64: short stride for destination") + } + if t.Uplo != a.Uplo { + panic("blas64: mismatched BLAS uplo") + } + if t.Diag != a.Diag { + panic("blas64: mismatched BLAS diag") + } + dst := Band{ + Rows: t.N, Cols: t.N, + Stride: t.Stride, + Data: t.Data, + } + src := BandCols{ + Rows: a.N, Cols: a.N, + Stride: a.Stride, + Data: a.Data, + } + switch a.Uplo { + default: + panic("blas64: bad BLAS uplo") + case blas.Upper: + dst.KU = t.K + src.KU = a.K + case blas.Lower: + dst.KL = t.K + src.KL = a.K + } + dst.From(src) +} + +func min(a, b int) int { + if a < b { + return a + } + return b +} + +func max(a, b int) int { + if a > b { + return a + } + return b +} diff --git a/vendor/gonum.org/v1/gonum/blas/blas64/conv_symmetric.go b/vendor/gonum.org/v1/gonum/blas/blas64/conv_symmetric.go new file mode 100644 index 0000000..5146f1a --- /dev/null +++ b/vendor/gonum.org/v1/gonum/blas/blas64/conv_symmetric.go @@ -0,0 +1,153 @@ +// Copyright ©2015 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package blas64 + +import "gonum.org/v1/gonum/blas" + +// SymmetricCols represents a matrix using the conventional column-major storage scheme. +type SymmetricCols Symmetric + +// From fills the receiver with elements from a. The receiver +// must have the same dimensions and uplo as a and have adequate +// backing data storage. +func (t SymmetricCols) From(a Symmetric) { + if t.N != a.N { + panic("blas64: mismatched dimension") + } + if t.Uplo != a.Uplo { + panic("blas64: mismatched BLAS uplo") + } + switch a.Uplo { + default: + panic("blas64: bad BLAS uplo") + case blas.Upper: + for i := 0; i < a.N; i++ { + for j := i; j < a.N; j++ { + t.Data[i+j*t.Stride] = a.Data[i*a.Stride+j] + } + } + case blas.Lower: + for i := 0; i < a.N; i++ { + for j := 0; j <= i; j++ { + t.Data[i+j*t.Stride] = a.Data[i*a.Stride+j] + } + } + } +} + +// From fills the receiver with elements from a. The receiver +// must have the same dimensions and uplo as a and have adequate +// backing data storage. +func (t Symmetric) From(a SymmetricCols) { + if t.N != a.N { + panic("blas64: mismatched dimension") + } + if t.Uplo != a.Uplo { + panic("blas64: mismatched BLAS uplo") + } + switch a.Uplo { + default: + panic("blas64: bad BLAS uplo") + case blas.Upper: + for i := 0; i < a.N; i++ { + for j := i; j < a.N; j++ { + t.Data[i*t.Stride+j] = a.Data[i+j*a.Stride] + } + } + case blas.Lower: + for i := 0; i < a.N; i++ { + for j := 0; j <= i; j++ { + t.Data[i*t.Stride+j] = a.Data[i+j*a.Stride] + } + } + } +} + +// SymmetricBandCols represents a symmetric matrix using the band column-major storage scheme. +type SymmetricBandCols SymmetricBand + +// From fills the receiver with elements from a. The receiver +// must have the same dimensions, bandwidth and uplo as a and +// have adequate backing data storage. +func (t SymmetricBandCols) From(a SymmetricBand) { + if t.N != a.N { + panic("blas64: mismatched dimension") + } + if t.K != a.K { + panic("blas64: mismatched bandwidth") + } + if a.Stride < a.K+1 { + panic("blas64: short stride for source") + } + if t.Stride < t.K+1 { + panic("blas64: short stride for destination") + } + if t.Uplo != a.Uplo { + panic("blas64: mismatched BLAS uplo") + } + dst := BandCols{ + Rows: t.N, Cols: t.N, + Stride: t.Stride, + Data: t.Data, + } + src := Band{ + Rows: a.N, Cols: a.N, + Stride: a.Stride, + Data: a.Data, + } + switch a.Uplo { + default: + panic("blas64: bad BLAS uplo") + case blas.Upper: + dst.KU = t.K + src.KU = a.K + case blas.Lower: + dst.KL = t.K + src.KL = a.K + } + dst.From(src) +} + +// From fills the receiver with elements from a. The receiver +// must have the same dimensions, bandwidth and uplo as a and +// have adequate backing data storage. +func (t SymmetricBand) From(a SymmetricBandCols) { + if t.N != a.N { + panic("blas64: mismatched dimension") + } + if t.K != a.K { + panic("blas64: mismatched bandwidth") + } + if a.Stride < a.K+1 { + panic("blas64: short stride for source") + } + if t.Stride < t.K+1 { + panic("blas64: short stride for destination") + } + if t.Uplo != a.Uplo { + panic("blas64: mismatched BLAS uplo") + } + dst := Band{ + Rows: t.N, Cols: t.N, + Stride: t.Stride, + Data: t.Data, + } + src := BandCols{ + Rows: a.N, Cols: a.N, + Stride: a.Stride, + Data: a.Data, + } + switch a.Uplo { + default: + panic("blas64: bad BLAS uplo") + case blas.Upper: + dst.KU = t.K + src.KU = a.K + case blas.Lower: + dst.KL = t.K + src.KL = a.K + } + dst.From(src) +} diff --git a/vendor/gonum.org/v1/gonum/blas/blas64/doc.go b/vendor/gonum.org/v1/gonum/blas/blas64/doc.go new file mode 100644 index 0000000..7410cee --- /dev/null +++ b/vendor/gonum.org/v1/gonum/blas/blas64/doc.go @@ -0,0 +1,6 @@ +// Copyright ©2017 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Package blas64 provides a simple interface to the float64 BLAS API. +package blas64 // import "gonum.org/v1/gonum/blas/blas64" diff --git a/vendor/gonum.org/v1/gonum/blas/cblas128/cblas128.go b/vendor/gonum.org/v1/gonum/blas/cblas128/cblas128.go new file mode 100644 index 0000000..1205da8 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/blas/cblas128/cblas128.go @@ -0,0 +1,508 @@ +// Copyright ©2015 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package cblas128 + +import ( + "gonum.org/v1/gonum/blas" + "gonum.org/v1/gonum/blas/gonum" +) + +var cblas128 blas.Complex128 = gonum.Implementation{} + +// Use sets the BLAS complex128 implementation to be used by subsequent BLAS calls. +// The default implementation is +// gonum.org/v1/gonum/blas/gonum.Implementation. +func Use(b blas.Complex128) { + cblas128 = b +} + +// Implementation returns the current BLAS complex128 implementation. +// +// Implementation allows direct calls to the current the BLAS complex128 implementation +// giving finer control of parameters. +func Implementation() blas.Complex128 { + return cblas128 +} + +// Vector represents a vector with an associated element increment. +type Vector struct { + Inc int + Data []complex128 +} + +// General represents a matrix using the conventional storage scheme. +type General struct { + Rows, Cols int + Stride int + Data []complex128 +} + +// Band represents a band matrix using the band storage scheme. +type Band struct { + Rows, Cols int + KL, KU int + Stride int + Data []complex128 +} + +// Triangular represents a triangular matrix using the conventional storage scheme. +type Triangular struct { + N int + Stride int + Data []complex128 + Uplo blas.Uplo + Diag blas.Diag +} + +// TriangularBand represents a triangular matrix using the band storage scheme. +type TriangularBand struct { + N, K int + Stride int + Data []complex128 + Uplo blas.Uplo + Diag blas.Diag +} + +// TriangularPacked represents a triangular matrix using the packed storage scheme. +type TriangularPacked struct { + N int + Data []complex128 + Uplo blas.Uplo + Diag blas.Diag +} + +// Symmetric represents a symmetric matrix using the conventional storage scheme. +type Symmetric struct { + N int + Stride int + Data []complex128 + Uplo blas.Uplo +} + +// SymmetricBand represents a symmetric matrix using the band storage scheme. +type SymmetricBand struct { + N, K int + Stride int + Data []complex128 + Uplo blas.Uplo +} + +// SymmetricPacked represents a symmetric matrix using the packed storage scheme. +type SymmetricPacked struct { + N int + Data []complex128 + Uplo blas.Uplo +} + +// Hermitian represents an Hermitian matrix using the conventional storage scheme. +type Hermitian Symmetric + +// HermitianBand represents an Hermitian matrix using the band storage scheme. +type HermitianBand SymmetricBand + +// HermitianPacked represents an Hermitian matrix using the packed storage scheme. +type HermitianPacked SymmetricPacked + +// Level 1 + +const negInc = "cblas128: negative vector increment" + +// Dotu computes the dot product of the two vectors without +// complex conjugation: +// x^T * y. +func Dotu(n int, x, y Vector) complex128 { + return cblas128.Zdotu(n, x.Data, x.Inc, y.Data, y.Inc) +} + +// Dotc computes the dot product of the two vectors with +// complex conjugation: +// x^H * y. +func Dotc(n int, x, y Vector) complex128 { + return cblas128.Zdotc(n, x.Data, x.Inc, y.Data, y.Inc) +} + +// Nrm2 computes the Euclidean norm of the vector x: +// sqrt(\sum_i x[i] * x[i]). +// +// Nrm2 will panic if the vector increment is negative. +func Nrm2(n int, x Vector) float64 { + if x.Inc < 0 { + panic(negInc) + } + return cblas128.Dznrm2(n, x.Data, x.Inc) +} + +// Asum computes the sum of magnitudes of the real and imaginary parts of +// elements of the vector x: +// \sum_i (|Re x[i]| + |Im x[i]|). +// +// Asum will panic if the vector increment is negative. +func Asum(n int, x Vector) float64 { + if x.Inc < 0 { + panic(negInc) + } + return cblas128.Dzasum(n, x.Data, x.Inc) +} + +// Iamax returns the index of an element of x with the largest sum of +// magnitudes of the real and imaginary parts (|Re x[i]|+|Im x[i]|). +// If there are multiple such indices, the earliest is returned. +// +// Iamax returns -1 if n == 0. +// +// Iamax will panic if the vector increment is negative. +func Iamax(n int, x Vector) int { + if x.Inc < 0 { + panic(negInc) + } + return cblas128.Izamax(n, x.Data, x.Inc) +} + +// Swap exchanges the elements of two vectors: +// x[i], y[i] = y[i], x[i] for all i. +func Swap(n int, x, y Vector) { + cblas128.Zswap(n, x.Data, x.Inc, y.Data, y.Inc) +} + +// Copy copies the elements of x into the elements of y: +// y[i] = x[i] for all i. +func Copy(n int, x, y Vector) { + cblas128.Zcopy(n, x.Data, x.Inc, y.Data, y.Inc) +} + +// Axpy computes +// y = alpha * x + y, +// where x and y are vectors, and alpha is a scalar. +func Axpy(n int, alpha complex128, x, y Vector) { + cblas128.Zaxpy(n, alpha, x.Data, x.Inc, y.Data, y.Inc) +} + +// Scal computes +// x = alpha * x, +// where x is a vector, and alpha is a scalar. +// +// Scal will panic if the vector increment is negative. +func Scal(n int, alpha complex128, x Vector) { + if x.Inc < 0 { + panic(negInc) + } + cblas128.Zscal(n, alpha, x.Data, x.Inc) +} + +// Dscal computes +// x = alpha * x, +// where x is a vector, and alpha is a real scalar. +// +// Dscal will panic if the vector increment is negative. +func Dscal(n int, alpha float64, x Vector) { + if x.Inc < 0 { + panic(negInc) + } + cblas128.Zdscal(n, alpha, x.Data, x.Inc) +} + +// Level 2 + +// Gemv computes +// y = alpha * A * x + beta * y, if t == blas.NoTrans, +// y = alpha * A^T * x + beta * y, if t == blas.Trans, +// y = alpha * A^H * x + beta * y, if t == blas.ConjTrans, +// where A is an m×n dense matrix, x and y are vectors, and alpha and beta are +// scalars. +func Gemv(t blas.Transpose, alpha complex128, a General, x Vector, beta complex128, y Vector) { + cblas128.Zgemv(t, a.Rows, a.Cols, alpha, a.Data, a.Stride, x.Data, x.Inc, beta, y.Data, y.Inc) +} + +// Gbmv computes +// y = alpha * A * x + beta * y, if t == blas.NoTrans, +// y = alpha * A^T * x + beta * y, if t == blas.Trans, +// y = alpha * A^H * x + beta * y, if t == blas.ConjTrans, +// where A is an m×n band matrix, x and y are vectors, and alpha and beta are +// scalars. +func Gbmv(t blas.Transpose, alpha complex128, a Band, x Vector, beta complex128, y Vector) { + cblas128.Zgbmv(t, a.Rows, a.Cols, a.KL, a.KU, alpha, a.Data, a.Stride, x.Data, x.Inc, beta, y.Data, y.Inc) +} + +// Trmv computes +// x = A * x, if t == blas.NoTrans, +// x = A^T * x, if t == blas.Trans, +// x = A^H * x, if t == blas.ConjTrans, +// where A is an n×n triangular matrix, and x is a vector. +func Trmv(t blas.Transpose, a Triangular, x Vector) { + cblas128.Ztrmv(a.Uplo, t, a.Diag, a.N, a.Data, a.Stride, x.Data, x.Inc) +} + +// Tbmv computes +// x = A * x, if t == blas.NoTrans, +// x = A^T * x, if t == blas.Trans, +// x = A^H * x, if t == blas.ConjTrans, +// where A is an n×n triangular band matrix, and x is a vector. +func Tbmv(t blas.Transpose, a TriangularBand, x Vector) { + cblas128.Ztbmv(a.Uplo, t, a.Diag, a.N, a.K, a.Data, a.Stride, x.Data, x.Inc) +} + +// Tpmv computes +// x = A * x, if t == blas.NoTrans, +// x = A^T * x, if t == blas.Trans, +// x = A^H * x, if t == blas.ConjTrans, +// where A is an n×n triangular matrix in packed format, and x is a vector. +func Tpmv(t blas.Transpose, a TriangularPacked, x Vector) { + cblas128.Ztpmv(a.Uplo, t, a.Diag, a.N, a.Data, x.Data, x.Inc) +} + +// Trsv solves +// A * x = b, if t == blas.NoTrans, +// A^T * x = b, if t == blas.Trans, +// A^H * x = b, if t == blas.ConjTrans, +// where A is an n×n triangular matrix and x is a vector. +// +// At entry to the function, x contains the values of b, and the result is +// stored in-place into x. +// +// No test for singularity or near-singularity is included in this +// routine. Such tests must be performed before calling this routine. +func Trsv(t blas.Transpose, a Triangular, x Vector) { + cblas128.Ztrsv(a.Uplo, t, a.Diag, a.N, a.Data, a.Stride, x.Data, x.Inc) +} + +// Tbsv solves +// A * x = b, if t == blas.NoTrans, +// A^T * x = b, if t == blas.Trans, +// A^H * x = b, if t == blas.ConjTrans, +// where A is an n×n triangular band matrix, and x is a vector. +// +// At entry to the function, x contains the values of b, and the result is +// stored in-place into x. +// +// No test for singularity or near-singularity is included in this +// routine. Such tests must be performed before calling this routine. +func Tbsv(t blas.Transpose, a TriangularBand, x Vector) { + cblas128.Ztbsv(a.Uplo, t, a.Diag, a.N, a.K, a.Data, a.Stride, x.Data, x.Inc) +} + +// Tpsv solves +// A * x = b, if t == blas.NoTrans, +// A^T * x = b, if t == blas.Trans, +// A^H * x = b, if t == blas.ConjTrans, +// where A is an n×n triangular matrix in packed format and x is a vector. +// +// At entry to the function, x contains the values of b, and the result is +// stored in-place into x. +// +// No test for singularity or near-singularity is included in this +// routine. Such tests must be performed before calling this routine. +func Tpsv(t blas.Transpose, a TriangularPacked, x Vector) { + cblas128.Ztpsv(a.Uplo, t, a.Diag, a.N, a.Data, x.Data, x.Inc) +} + +// Hemv computes +// y = alpha * A * x + beta * y, +// where A is an n×n Hermitian matrix, x and y are vectors, and alpha and +// beta are scalars. +func Hemv(alpha complex128, a Hermitian, x Vector, beta complex128, y Vector) { + cblas128.Zhemv(a.Uplo, a.N, alpha, a.Data, a.Stride, x.Data, x.Inc, beta, y.Data, y.Inc) +} + +// Hbmv performs +// y = alpha * A * x + beta * y, +// where A is an n×n Hermitian band matrix, x and y are vectors, and alpha +// and beta are scalars. +func Hbmv(alpha complex128, a HermitianBand, x Vector, beta complex128, y Vector) { + cblas128.Zhbmv(a.Uplo, a.N, a.K, alpha, a.Data, a.Stride, x.Data, x.Inc, beta, y.Data, y.Inc) +} + +// Hpmv performs +// y = alpha * A * x + beta * y, +// where A is an n×n Hermitian matrix in packed format, x and y are vectors, +// and alpha and beta are scalars. +func Hpmv(alpha complex128, a HermitianPacked, x Vector, beta complex128, y Vector) { + cblas128.Zhpmv(a.Uplo, a.N, alpha, a.Data, x.Data, x.Inc, beta, y.Data, y.Inc) +} + +// Geru performs a rank-1 update +// A += alpha * x * y^T, +// where A is an m×n dense matrix, x and y are vectors, and alpha is a scalar. +func Geru(alpha complex128, x, y Vector, a General) { + cblas128.Zgeru(a.Rows, a.Cols, alpha, x.Data, x.Inc, y.Data, y.Inc, a.Data, a.Stride) +} + +// Gerc performs a rank-1 update +// A += alpha * x * y^H, +// where A is an m×n dense matrix, x and y are vectors, and alpha is a scalar. +func Gerc(alpha complex128, x, y Vector, a General) { + cblas128.Zgerc(a.Rows, a.Cols, alpha, x.Data, x.Inc, y.Data, y.Inc, a.Data, a.Stride) +} + +// Her performs a rank-1 update +// A += alpha * x * y^T, +// where A is an m×n Hermitian matrix, x and y are vectors, and alpha is a scalar. +func Her(alpha float64, x Vector, a Hermitian) { + cblas128.Zher(a.Uplo, a.N, alpha, x.Data, x.Inc, a.Data, a.Stride) +} + +// Hpr performs a rank-1 update +// A += alpha * x * x^H, +// where A is an n×n Hermitian matrix in packed format, x is a vector, and +// alpha is a scalar. +func Hpr(alpha float64, x Vector, a HermitianPacked) { + cblas128.Zhpr(a.Uplo, a.N, alpha, x.Data, x.Inc, a.Data) +} + +// Her2 performs a rank-2 update +// A += alpha * x * y^H + conj(alpha) * y * x^H, +// where A is an n×n Hermitian matrix, x and y are vectors, and alpha is a scalar. +func Her2(alpha complex128, x, y Vector, a Hermitian) { + cblas128.Zher2(a.Uplo, a.N, alpha, x.Data, x.Inc, y.Data, y.Inc, a.Data, a.Stride) +} + +// Hpr2 performs a rank-2 update +// A += alpha * x * y^H + conj(alpha) * y * x^H, +// where A is an n×n Hermitian matrix in packed format, x and y are vectors, +// and alpha is a scalar. +func Hpr2(alpha complex128, x, y Vector, a HermitianPacked) { + cblas128.Zhpr2(a.Uplo, a.N, alpha, x.Data, x.Inc, y.Data, y.Inc, a.Data) +} + +// Level 3 + +// Gemm computes +// C = alpha * A * B + beta * C, +// where A, B, and C are dense matrices, and alpha and beta are scalars. +// tA and tB specify whether A or B are transposed or conjugated. +func Gemm(tA, tB blas.Transpose, alpha complex128, a, b General, beta complex128, c General) { + var m, n, k int + if tA == blas.NoTrans { + m, k = a.Rows, a.Cols + } else { + m, k = a.Cols, a.Rows + } + if tB == blas.NoTrans { + n = b.Cols + } else { + n = b.Rows + } + cblas128.Zgemm(tA, tB, m, n, k, alpha, a.Data, a.Stride, b.Data, b.Stride, beta, c.Data, c.Stride) +} + +// Symm performs +// C = alpha * A * B + beta * C, if s == blas.Left, +// C = alpha * B * A + beta * C, if s == blas.Right, +// where A is an n×n or m×m symmetric matrix, B and C are m×n matrices, and +// alpha and beta are scalars. +func Symm(s blas.Side, alpha complex128, a Symmetric, b General, beta complex128, c General) { + var m, n int + if s == blas.Left { + m, n = a.N, b.Cols + } else { + m, n = b.Rows, a.N + } + cblas128.Zsymm(s, a.Uplo, m, n, alpha, a.Data, a.Stride, b.Data, b.Stride, beta, c.Data, c.Stride) +} + +// Syrk performs a symmetric rank-k update +// C = alpha * A * A^T + beta * C, if t == blas.NoTrans, +// C = alpha * A^T * A + beta * C, if t == blas.Trans, +// where C is an n×n symmetric matrix, A is an n×k matrix if t == blas.NoTrans +// and a k×n matrix otherwise, and alpha and beta are scalars. +func Syrk(t blas.Transpose, alpha complex128, a General, beta complex128, c Symmetric) { + var n, k int + if t == blas.NoTrans { + n, k = a.Rows, a.Cols + } else { + n, k = a.Cols, a.Rows + } + cblas128.Zsyrk(c.Uplo, t, n, k, alpha, a.Data, a.Stride, beta, c.Data, c.Stride) +} + +// Syr2k performs a symmetric rank-2k update +// C = alpha * A * B^T + alpha * B * A^T + beta * C, if t == blas.NoTrans, +// C = alpha * A^T * B + alpha * B^T * A + beta * C, if t == blas.Trans, +// where C is an n×n symmetric matrix, A and B are n×k matrices if +// t == blas.NoTrans and k×n otherwise, and alpha and beta are scalars. +func Syr2k(t blas.Transpose, alpha complex128, a, b General, beta complex128, c Symmetric) { + var n, k int + if t == blas.NoTrans { + n, k = a.Rows, a.Cols + } else { + n, k = a.Cols, a.Rows + } + cblas128.Zsyr2k(c.Uplo, t, n, k, alpha, a.Data, a.Stride, b.Data, b.Stride, beta, c.Data, c.Stride) +} + +// Trmm performs +// B = alpha * A * B, if tA == blas.NoTrans and s == blas.Left, +// B = alpha * A^T * B, if tA == blas.Trans and s == blas.Left, +// B = alpha * A^H * B, if tA == blas.ConjTrans and s == blas.Left, +// B = alpha * B * A, if tA == blas.NoTrans and s == blas.Right, +// B = alpha * B * A^T, if tA == blas.Trans and s == blas.Right, +// B = alpha * B * A^H, if tA == blas.ConjTrans and s == blas.Right, +// where A is an n×n or m×m triangular matrix, B is an m×n matrix, and alpha is +// a scalar. +func Trmm(s blas.Side, tA blas.Transpose, alpha complex128, a Triangular, b General) { + cblas128.Ztrmm(s, a.Uplo, tA, a.Diag, b.Rows, b.Cols, alpha, a.Data, a.Stride, b.Data, b.Stride) +} + +// Trsm solves +// A * X = alpha * B, if tA == blas.NoTrans and s == blas.Left, +// A^T * X = alpha * B, if tA == blas.Trans and s == blas.Left, +// A^H * X = alpha * B, if tA == blas.ConjTrans and s == blas.Left, +// X * A = alpha * B, if tA == blas.NoTrans and s == blas.Right, +// X * A^T = alpha * B, if tA == blas.Trans and s == blas.Right, +// X * A^H = alpha * B, if tA == blas.ConjTrans and s == blas.Right, +// where A is an n×n or m×m triangular matrix, X and B are m×n matrices, and +// alpha is a scalar. +// +// At entry to the function, b contains the values of B, and the result is +// stored in-place into b. +// +// No check is made that A is invertible. +func Trsm(s blas.Side, tA blas.Transpose, alpha complex128, a Triangular, b General) { + cblas128.Ztrsm(s, a.Uplo, tA, a.Diag, b.Rows, b.Cols, alpha, a.Data, a.Stride, b.Data, b.Stride) +} + +// Hemm performs +// C = alpha * A * B + beta * C, if s == blas.Left, +// C = alpha * B * A + beta * C, if s == blas.Right, +// where A is an n×n or m×m Hermitian matrix, B and C are m×n matrices, and +// alpha and beta are scalars. +func Hemm(s blas.Side, alpha complex128, a Hermitian, b General, beta complex128, c General) { + var m, n int + if s == blas.Left { + m, n = a.N, b.Cols + } else { + m, n = b.Rows, a.N + } + cblas128.Zhemm(s, a.Uplo, m, n, alpha, a.Data, a.Stride, b.Data, b.Stride, beta, c.Data, c.Stride) +} + +// Herk performs the Hermitian rank-k update +// C = alpha * A * A^H + beta*C, if t == blas.NoTrans, +// C = alpha * A^H * A + beta*C, if t == blas.ConjTrans, +// where C is an n×n Hermitian matrix, A is an n×k matrix if t == blas.NoTrans +// and a k×n matrix otherwise, and alpha and beta are scalars. +func Herk(t blas.Transpose, alpha float64, a General, beta float64, c Hermitian) { + var n, k int + if t == blas.NoTrans { + n, k = a.Rows, a.Cols + } else { + n, k = a.Cols, a.Rows + } + cblas128.Zherk(c.Uplo, t, n, k, alpha, a.Data, a.Stride, beta, c.Data, c.Stride) +} + +// Her2k performs the Hermitian rank-2k update +// C = alpha * A * B^H + conj(alpha) * B * A^H + beta * C, if t == blas.NoTrans, +// C = alpha * A^H * B + conj(alpha) * B^H * A + beta * C, if t == blas.ConjTrans, +// where C is an n×n Hermitian matrix, A and B are n×k matrices if t == NoTrans +// and k×n matrices otherwise, and alpha and beta are scalars. +func Her2k(t blas.Transpose, alpha complex128, a, b General, beta float64, c Hermitian) { + var n, k int + if t == blas.NoTrans { + n, k = a.Rows, a.Cols + } else { + n, k = a.Cols, a.Rows + } + cblas128.Zher2k(c.Uplo, t, n, k, alpha, a.Data, a.Stride, b.Data, b.Stride, beta, c.Data, c.Stride) +} diff --git a/vendor/gonum.org/v1/gonum/blas/cblas128/conv.go b/vendor/gonum.org/v1/gonum/blas/cblas128/conv.go new file mode 100644 index 0000000..93e3cd2 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/blas/cblas128/conv.go @@ -0,0 +1,279 @@ +// Code generated by "go generate gonum.org/v1/gonum/blas”; DO NOT EDIT. + +// Copyright ©2015 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package cblas128 + +import "gonum.org/v1/gonum/blas" + +// GeneralCols represents a matrix using the conventional column-major storage scheme. +type GeneralCols General + +// From fills the receiver with elements from a. The receiver +// must have the same dimensions as a and have adequate backing +// data storage. +func (t GeneralCols) From(a General) { + if t.Rows != a.Rows || t.Cols != a.Cols { + panic("cblas128: mismatched dimension") + } + if len(t.Data) < (t.Cols-1)*t.Stride+t.Rows { + panic("cblas128: short data slice") + } + for i := 0; i < a.Rows; i++ { + for j, v := range a.Data[i*a.Stride : i*a.Stride+a.Cols] { + t.Data[i+j*t.Stride] = v + } + } +} + +// From fills the receiver with elements from a. The receiver +// must have the same dimensions as a and have adequate backing +// data storage. +func (t General) From(a GeneralCols) { + if t.Rows != a.Rows || t.Cols != a.Cols { + panic("cblas128: mismatched dimension") + } + if len(t.Data) < (t.Rows-1)*t.Stride+t.Cols { + panic("cblas128: short data slice") + } + for j := 0; j < a.Cols; j++ { + for i, v := range a.Data[j*a.Stride : j*a.Stride+a.Rows] { + t.Data[i*t.Stride+j] = v + } + } +} + +// TriangularCols represents a matrix using the conventional column-major storage scheme. +type TriangularCols Triangular + +// From fills the receiver with elements from a. The receiver +// must have the same dimensions, uplo and diag as a and have +// adequate backing data storage. +func (t TriangularCols) From(a Triangular) { + if t.N != a.N { + panic("cblas128: mismatched dimension") + } + if t.Uplo != a.Uplo { + panic("cblas128: mismatched BLAS uplo") + } + if t.Diag != a.Diag { + panic("cblas128: mismatched BLAS diag") + } + switch a.Uplo { + default: + panic("cblas128: bad BLAS uplo") + case blas.Upper: + for i := 0; i < a.N; i++ { + for j := i; j < a.N; j++ { + t.Data[i+j*t.Stride] = a.Data[i*a.Stride+j] + } + } + case blas.Lower: + for i := 0; i < a.N; i++ { + for j := 0; j <= i; j++ { + t.Data[i+j*t.Stride] = a.Data[i*a.Stride+j] + } + } + case blas.All: + for i := 0; i < a.N; i++ { + for j := 0; j < a.N; j++ { + t.Data[i+j*t.Stride] = a.Data[i*a.Stride+j] + } + } + } +} + +// From fills the receiver with elements from a. The receiver +// must have the same dimensions, uplo and diag as a and have +// adequate backing data storage. +func (t Triangular) From(a TriangularCols) { + if t.N != a.N { + panic("cblas128: mismatched dimension") + } + if t.Uplo != a.Uplo { + panic("cblas128: mismatched BLAS uplo") + } + if t.Diag != a.Diag { + panic("cblas128: mismatched BLAS diag") + } + switch a.Uplo { + default: + panic("cblas128: bad BLAS uplo") + case blas.Upper: + for i := 0; i < a.N; i++ { + for j := i; j < a.N; j++ { + t.Data[i*t.Stride+j] = a.Data[i+j*a.Stride] + } + } + case blas.Lower: + for i := 0; i < a.N; i++ { + for j := 0; j <= i; j++ { + t.Data[i*t.Stride+j] = a.Data[i+j*a.Stride] + } + } + case blas.All: + for i := 0; i < a.N; i++ { + for j := 0; j < a.N; j++ { + t.Data[i*t.Stride+j] = a.Data[i+j*a.Stride] + } + } + } +} + +// BandCols represents a matrix using the band column-major storage scheme. +type BandCols Band + +// From fills the receiver with elements from a. The receiver +// must have the same dimensions and bandwidth as a and have +// adequate backing data storage. +func (t BandCols) From(a Band) { + if t.Rows != a.Rows || t.Cols != a.Cols { + panic("cblas128: mismatched dimension") + } + if t.KL != a.KL || t.KU != a.KU { + panic("cblas128: mismatched bandwidth") + } + if a.Stride < a.KL+a.KU+1 { + panic("cblas128: short stride for source") + } + if t.Stride < t.KL+t.KU+1 { + panic("cblas128: short stride for destination") + } + for i := 0; i < a.Rows; i++ { + for j := max(0, i-a.KL); j < min(i+a.KU+1, a.Cols); j++ { + t.Data[i+t.KU-j+j*t.Stride] = a.Data[j+a.KL-i+i*a.Stride] + } + } +} + +// From fills the receiver with elements from a. The receiver +// must have the same dimensions and bandwidth as a and have +// adequate backing data storage. +func (t Band) From(a BandCols) { + if t.Rows != a.Rows || t.Cols != a.Cols { + panic("cblas128: mismatched dimension") + } + if t.KL != a.KL || t.KU != a.KU { + panic("cblas128: mismatched bandwidth") + } + if a.Stride < a.KL+a.KU+1 { + panic("cblas128: short stride for source") + } + if t.Stride < t.KL+t.KU+1 { + panic("cblas128: short stride for destination") + } + for j := 0; j < a.Cols; j++ { + for i := max(0, j-a.KU); i < min(j+a.KL+1, a.Rows); i++ { + t.Data[j+a.KL-i+i*a.Stride] = a.Data[i+t.KU-j+j*t.Stride] + } + } +} + +// TriangularBandCols represents a symmetric matrix using the band column-major storage scheme. +type TriangularBandCols TriangularBand + +// From fills the receiver with elements from a. The receiver +// must have the same dimensions, bandwidth and uplo as a and +// have adequate backing data storage. +func (t TriangularBandCols) From(a TriangularBand) { + if t.N != a.N { + panic("cblas128: mismatched dimension") + } + if t.K != a.K { + panic("cblas128: mismatched bandwidth") + } + if a.Stride < a.K+1 { + panic("cblas128: short stride for source") + } + if t.Stride < t.K+1 { + panic("cblas128: short stride for destination") + } + if t.Uplo != a.Uplo { + panic("cblas128: mismatched BLAS uplo") + } + if t.Diag != a.Diag { + panic("cblas128: mismatched BLAS diag") + } + dst := BandCols{ + Rows: t.N, Cols: t.N, + Stride: t.Stride, + Data: t.Data, + } + src := Band{ + Rows: a.N, Cols: a.N, + Stride: a.Stride, + Data: a.Data, + } + switch a.Uplo { + default: + panic("cblas128: bad BLAS uplo") + case blas.Upper: + dst.KU = t.K + src.KU = a.K + case blas.Lower: + dst.KL = t.K + src.KL = a.K + } + dst.From(src) +} + +// From fills the receiver with elements from a. The receiver +// must have the same dimensions, bandwidth and uplo as a and +// have adequate backing data storage. +func (t TriangularBand) From(a TriangularBandCols) { + if t.N != a.N { + panic("cblas128: mismatched dimension") + } + if t.K != a.K { + panic("cblas128: mismatched bandwidth") + } + if a.Stride < a.K+1 { + panic("cblas128: short stride for source") + } + if t.Stride < t.K+1 { + panic("cblas128: short stride for destination") + } + if t.Uplo != a.Uplo { + panic("cblas128: mismatched BLAS uplo") + } + if t.Diag != a.Diag { + panic("cblas128: mismatched BLAS diag") + } + dst := Band{ + Rows: t.N, Cols: t.N, + Stride: t.Stride, + Data: t.Data, + } + src := BandCols{ + Rows: a.N, Cols: a.N, + Stride: a.Stride, + Data: a.Data, + } + switch a.Uplo { + default: + panic("cblas128: bad BLAS uplo") + case blas.Upper: + dst.KU = t.K + src.KU = a.K + case blas.Lower: + dst.KL = t.K + src.KL = a.K + } + dst.From(src) +} + +func min(a, b int) int { + if a < b { + return a + } + return b +} + +func max(a, b int) int { + if a > b { + return a + } + return b +} diff --git a/vendor/gonum.org/v1/gonum/blas/cblas128/conv_hermitian.go b/vendor/gonum.org/v1/gonum/blas/cblas128/conv_hermitian.go new file mode 100644 index 0000000..51c3a57 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/blas/cblas128/conv_hermitian.go @@ -0,0 +1,155 @@ +// Code generated by "go generate gonum.org/v1/gonum/blas”; DO NOT EDIT. + +// Copyright ©2015 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package cblas128 + +import "gonum.org/v1/gonum/blas" + +// HermitianCols represents a matrix using the conventional column-major storage scheme. +type HermitianCols Hermitian + +// From fills the receiver with elements from a. The receiver +// must have the same dimensions and uplo as a and have adequate +// backing data storage. +func (t HermitianCols) From(a Hermitian) { + if t.N != a.N { + panic("cblas128: mismatched dimension") + } + if t.Uplo != a.Uplo { + panic("cblas128: mismatched BLAS uplo") + } + switch a.Uplo { + default: + panic("cblas128: bad BLAS uplo") + case blas.Upper: + for i := 0; i < a.N; i++ { + for j := i; j < a.N; j++ { + t.Data[i+j*t.Stride] = a.Data[i*a.Stride+j] + } + } + case blas.Lower: + for i := 0; i < a.N; i++ { + for j := 0; j <= i; j++ { + t.Data[i+j*t.Stride] = a.Data[i*a.Stride+j] + } + } + } +} + +// From fills the receiver with elements from a. The receiver +// must have the same dimensions and uplo as a and have adequate +// backing data storage. +func (t Hermitian) From(a HermitianCols) { + if t.N != a.N { + panic("cblas128: mismatched dimension") + } + if t.Uplo != a.Uplo { + panic("cblas128: mismatched BLAS uplo") + } + switch a.Uplo { + default: + panic("cblas128: bad BLAS uplo") + case blas.Upper: + for i := 0; i < a.N; i++ { + for j := i; j < a.N; j++ { + t.Data[i*t.Stride+j] = a.Data[i+j*a.Stride] + } + } + case blas.Lower: + for i := 0; i < a.N; i++ { + for j := 0; j <= i; j++ { + t.Data[i*t.Stride+j] = a.Data[i+j*a.Stride] + } + } + } +} + +// HermitianBandCols represents an Hermitian matrix using the band column-major storage scheme. +type HermitianBandCols HermitianBand + +// From fills the receiver with elements from a. The receiver +// must have the same dimensions, bandwidth and uplo as a and +// have adequate backing data storage. +func (t HermitianBandCols) From(a HermitianBand) { + if t.N != a.N { + panic("cblas128: mismatched dimension") + } + if t.K != a.K { + panic("cblas128: mismatched bandwidth") + } + if a.Stride < a.K+1 { + panic("cblas128: short stride for source") + } + if t.Stride < t.K+1 { + panic("cblas128: short stride for destination") + } + if t.Uplo != a.Uplo { + panic("cblas128: mismatched BLAS uplo") + } + dst := BandCols{ + Rows: t.N, Cols: t.N, + Stride: t.Stride, + Data: t.Data, + } + src := Band{ + Rows: a.N, Cols: a.N, + Stride: a.Stride, + Data: a.Data, + } + switch a.Uplo { + default: + panic("cblas128: bad BLAS uplo") + case blas.Upper: + dst.KU = t.K + src.KU = a.K + case blas.Lower: + dst.KL = t.K + src.KL = a.K + } + dst.From(src) +} + +// From fills the receiver with elements from a. The receiver +// must have the same dimensions, bandwidth and uplo as a and +// have adequate backing data storage. +func (t HermitianBand) From(a HermitianBandCols) { + if t.N != a.N { + panic("cblas128: mismatched dimension") + } + if t.K != a.K { + panic("cblas128: mismatched bandwidth") + } + if a.Stride < a.K+1 { + panic("cblas128: short stride for source") + } + if t.Stride < t.K+1 { + panic("cblas128: short stride for destination") + } + if t.Uplo != a.Uplo { + panic("cblas128: mismatched BLAS uplo") + } + dst := Band{ + Rows: t.N, Cols: t.N, + Stride: t.Stride, + Data: t.Data, + } + src := BandCols{ + Rows: a.N, Cols: a.N, + Stride: a.Stride, + Data: a.Data, + } + switch a.Uplo { + default: + panic("cblas128: bad BLAS uplo") + case blas.Upper: + dst.KU = t.K + src.KU = a.K + case blas.Lower: + dst.KL = t.K + src.KL = a.K + } + dst.From(src) +} diff --git a/vendor/gonum.org/v1/gonum/blas/cblas128/conv_symmetric.go b/vendor/gonum.org/v1/gonum/blas/cblas128/conv_symmetric.go new file mode 100644 index 0000000..f1bf40c --- /dev/null +++ b/vendor/gonum.org/v1/gonum/blas/cblas128/conv_symmetric.go @@ -0,0 +1,155 @@ +// Code generated by "go generate gonum.org/v1/gonum/blas”; DO NOT EDIT. + +// Copyright ©2015 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package cblas128 + +import "gonum.org/v1/gonum/blas" + +// SymmetricCols represents a matrix using the conventional column-major storage scheme. +type SymmetricCols Symmetric + +// From fills the receiver with elements from a. The receiver +// must have the same dimensions and uplo as a and have adequate +// backing data storage. +func (t SymmetricCols) From(a Symmetric) { + if t.N != a.N { + panic("cblas128: mismatched dimension") + } + if t.Uplo != a.Uplo { + panic("cblas128: mismatched BLAS uplo") + } + switch a.Uplo { + default: + panic("cblas128: bad BLAS uplo") + case blas.Upper: + for i := 0; i < a.N; i++ { + for j := i; j < a.N; j++ { + t.Data[i+j*t.Stride] = a.Data[i*a.Stride+j] + } + } + case blas.Lower: + for i := 0; i < a.N; i++ { + for j := 0; j <= i; j++ { + t.Data[i+j*t.Stride] = a.Data[i*a.Stride+j] + } + } + } +} + +// From fills the receiver with elements from a. The receiver +// must have the same dimensions and uplo as a and have adequate +// backing data storage. +func (t Symmetric) From(a SymmetricCols) { + if t.N != a.N { + panic("cblas128: mismatched dimension") + } + if t.Uplo != a.Uplo { + panic("cblas128: mismatched BLAS uplo") + } + switch a.Uplo { + default: + panic("cblas128: bad BLAS uplo") + case blas.Upper: + for i := 0; i < a.N; i++ { + for j := i; j < a.N; j++ { + t.Data[i*t.Stride+j] = a.Data[i+j*a.Stride] + } + } + case blas.Lower: + for i := 0; i < a.N; i++ { + for j := 0; j <= i; j++ { + t.Data[i*t.Stride+j] = a.Data[i+j*a.Stride] + } + } + } +} + +// SymmetricBandCols represents a symmetric matrix using the band column-major storage scheme. +type SymmetricBandCols SymmetricBand + +// From fills the receiver with elements from a. The receiver +// must have the same dimensions, bandwidth and uplo as a and +// have adequate backing data storage. +func (t SymmetricBandCols) From(a SymmetricBand) { + if t.N != a.N { + panic("cblas128: mismatched dimension") + } + if t.K != a.K { + panic("cblas128: mismatched bandwidth") + } + if a.Stride < a.K+1 { + panic("cblas128: short stride for source") + } + if t.Stride < t.K+1 { + panic("cblas128: short stride for destination") + } + if t.Uplo != a.Uplo { + panic("cblas128: mismatched BLAS uplo") + } + dst := BandCols{ + Rows: t.N, Cols: t.N, + Stride: t.Stride, + Data: t.Data, + } + src := Band{ + Rows: a.N, Cols: a.N, + Stride: a.Stride, + Data: a.Data, + } + switch a.Uplo { + default: + panic("cblas128: bad BLAS uplo") + case blas.Upper: + dst.KU = t.K + src.KU = a.K + case blas.Lower: + dst.KL = t.K + src.KL = a.K + } + dst.From(src) +} + +// From fills the receiver with elements from a. The receiver +// must have the same dimensions, bandwidth and uplo as a and +// have adequate backing data storage. +func (t SymmetricBand) From(a SymmetricBandCols) { + if t.N != a.N { + panic("cblas128: mismatched dimension") + } + if t.K != a.K { + panic("cblas128: mismatched bandwidth") + } + if a.Stride < a.K+1 { + panic("cblas128: short stride for source") + } + if t.Stride < t.K+1 { + panic("cblas128: short stride for destination") + } + if t.Uplo != a.Uplo { + panic("cblas128: mismatched BLAS uplo") + } + dst := Band{ + Rows: t.N, Cols: t.N, + Stride: t.Stride, + Data: t.Data, + } + src := BandCols{ + Rows: a.N, Cols: a.N, + Stride: a.Stride, + Data: a.Data, + } + switch a.Uplo { + default: + panic("cblas128: bad BLAS uplo") + case blas.Upper: + dst.KU = t.K + src.KU = a.K + case blas.Lower: + dst.KL = t.K + src.KL = a.K + } + dst.From(src) +} diff --git a/vendor/gonum.org/v1/gonum/blas/cblas128/doc.go b/vendor/gonum.org/v1/gonum/blas/cblas128/doc.go new file mode 100644 index 0000000..09719b1 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/blas/cblas128/doc.go @@ -0,0 +1,6 @@ +// Copyright ©2017 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Package cblas128 provides a simple interface to the complex128 BLAS API. +package cblas128 // import "gonum.org/v1/gonum/blas/cblas128" diff --git a/vendor/gonum.org/v1/gonum/blas/cblas64/cblas64.go b/vendor/gonum.org/v1/gonum/blas/cblas64/cblas64.go new file mode 100644 index 0000000..042a7da --- /dev/null +++ b/vendor/gonum.org/v1/gonum/blas/cblas64/cblas64.go @@ -0,0 +1,508 @@ +// Copyright ©2015 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package cblas64 + +import ( + "gonum.org/v1/gonum/blas" + "gonum.org/v1/gonum/blas/gonum" +) + +var cblas64 blas.Complex64 = gonum.Implementation{} + +// Use sets the BLAS complex64 implementation to be used by subsequent BLAS calls. +// The default implementation is +// gonum.org/v1/gonum/blas/gonum.Implementation. +func Use(b blas.Complex64) { + cblas64 = b +} + +// Implementation returns the current BLAS complex64 implementation. +// +// Implementation allows direct calls to the current the BLAS complex64 implementation +// giving finer control of parameters. +func Implementation() blas.Complex64 { + return cblas64 +} + +// Vector represents a vector with an associated element increment. +type Vector struct { + Inc int + Data []complex64 +} + +// General represents a matrix using the conventional storage scheme. +type General struct { + Rows, Cols int + Stride int + Data []complex64 +} + +// Band represents a band matrix using the band storage scheme. +type Band struct { + Rows, Cols int + KL, KU int + Stride int + Data []complex64 +} + +// Triangular represents a triangular matrix using the conventional storage scheme. +type Triangular struct { + N int + Stride int + Data []complex64 + Uplo blas.Uplo + Diag blas.Diag +} + +// TriangularBand represents a triangular matrix using the band storage scheme. +type TriangularBand struct { + N, K int + Stride int + Data []complex64 + Uplo blas.Uplo + Diag blas.Diag +} + +// TriangularPacked represents a triangular matrix using the packed storage scheme. +type TriangularPacked struct { + N int + Data []complex64 + Uplo blas.Uplo + Diag blas.Diag +} + +// Symmetric represents a symmetric matrix using the conventional storage scheme. +type Symmetric struct { + N int + Stride int + Data []complex64 + Uplo blas.Uplo +} + +// SymmetricBand represents a symmetric matrix using the band storage scheme. +type SymmetricBand struct { + N, K int + Stride int + Data []complex64 + Uplo blas.Uplo +} + +// SymmetricPacked represents a symmetric matrix using the packed storage scheme. +type SymmetricPacked struct { + N int + Data []complex64 + Uplo blas.Uplo +} + +// Hermitian represents an Hermitian matrix using the conventional storage scheme. +type Hermitian Symmetric + +// HermitianBand represents an Hermitian matrix using the band storage scheme. +type HermitianBand SymmetricBand + +// HermitianPacked represents an Hermitian matrix using the packed storage scheme. +type HermitianPacked SymmetricPacked + +// Level 1 + +const negInc = "cblas64: negative vector increment" + +// Dotu computes the dot product of the two vectors without +// complex conjugation: +// x^T * y +func Dotu(n int, x, y Vector) complex64 { + return cblas64.Cdotu(n, x.Data, x.Inc, y.Data, y.Inc) +} + +// Dotc computes the dot product of the two vectors with +// complex conjugation: +// x^H * y. +func Dotc(n int, x, y Vector) complex64 { + return cblas64.Cdotc(n, x.Data, x.Inc, y.Data, y.Inc) +} + +// Nrm2 computes the Euclidean norm of the vector x: +// sqrt(\sum_i x[i] * x[i]). +// +// Nrm2 will panic if the vector increment is negative. +func Nrm2(n int, x Vector) float32 { + if x.Inc < 0 { + panic(negInc) + } + return cblas64.Scnrm2(n, x.Data, x.Inc) +} + +// Asum computes the sum of magnitudes of the real and imaginary parts of +// elements of the vector x: +// \sum_i (|Re x[i]| + |Im x[i]|). +// +// Asum will panic if the vector increment is negative. +func Asum(n int, x Vector) float32 { + if x.Inc < 0 { + panic(negInc) + } + return cblas64.Scasum(n, x.Data, x.Inc) +} + +// Iamax returns the index of an element of x with the largest sum of +// magnitudes of the real and imaginary parts (|Re x[i]|+|Im x[i]|). +// If there are multiple such indices, the earliest is returned. +// +// Iamax returns -1 if n == 0. +// +// Iamax will panic if the vector increment is negative. +func Iamax(n int, x Vector) int { + if x.Inc < 0 { + panic(negInc) + } + return cblas64.Icamax(n, x.Data, x.Inc) +} + +// Swap exchanges the elements of two vectors: +// x[i], y[i] = y[i], x[i] for all i. +func Swap(n int, x, y Vector) { + cblas64.Cswap(n, x.Data, x.Inc, y.Data, y.Inc) +} + +// Copy copies the elements of x into the elements of y: +// y[i] = x[i] for all i. +func Copy(n int, x, y Vector) { + cblas64.Ccopy(n, x.Data, x.Inc, y.Data, y.Inc) +} + +// Axpy computes +// y = alpha * x + y, +// where x and y are vectors, and alpha is a scalar. +func Axpy(n int, alpha complex64, x, y Vector) { + cblas64.Caxpy(n, alpha, x.Data, x.Inc, y.Data, y.Inc) +} + +// Scal computes +// x = alpha * x, +// where x is a vector, and alpha is a scalar. +// +// Scal will panic if the vector increment is negative. +func Scal(n int, alpha complex64, x Vector) { + if x.Inc < 0 { + panic(negInc) + } + cblas64.Cscal(n, alpha, x.Data, x.Inc) +} + +// Dscal computes +// x = alpha * x, +// where x is a vector, and alpha is a real scalar. +// +// Dscal will panic if the vector increment is negative. +func Dscal(n int, alpha float32, x Vector) { + if x.Inc < 0 { + panic(negInc) + } + cblas64.Csscal(n, alpha, x.Data, x.Inc) +} + +// Level 2 + +// Gemv computes +// y = alpha * A * x + beta * y, if t == blas.NoTrans, +// y = alpha * A^T * x + beta * y, if t == blas.Trans, +// y = alpha * A^H * x + beta * y, if t == blas.ConjTrans, +// where A is an m×n dense matrix, x and y are vectors, and alpha and beta are +// scalars. +func Gemv(t blas.Transpose, alpha complex64, a General, x Vector, beta complex64, y Vector) { + cblas64.Cgemv(t, a.Rows, a.Cols, alpha, a.Data, a.Stride, x.Data, x.Inc, beta, y.Data, y.Inc) +} + +// Gbmv computes +// y = alpha * A * x + beta * y, if t == blas.NoTrans, +// y = alpha * A^T * x + beta * y, if t == blas.Trans, +// y = alpha * A^H * x + beta * y, if t == blas.ConjTrans, +// where A is an m×n band matrix, x and y are vectors, and alpha and beta are +// scalars. +func Gbmv(t blas.Transpose, alpha complex64, a Band, x Vector, beta complex64, y Vector) { + cblas64.Cgbmv(t, a.Rows, a.Cols, a.KL, a.KU, alpha, a.Data, a.Stride, x.Data, x.Inc, beta, y.Data, y.Inc) +} + +// Trmv computes +// x = A * x, if t == blas.NoTrans, +// x = A^T * x, if t == blas.Trans, +// x = A^H * x, if t == blas.ConjTrans, +// where A is an n×n triangular matrix, and x is a vector. +func Trmv(t blas.Transpose, a Triangular, x Vector) { + cblas64.Ctrmv(a.Uplo, t, a.Diag, a.N, a.Data, a.Stride, x.Data, x.Inc) +} + +// Tbmv computes +// x = A * x, if t == blas.NoTrans, +// x = A^T * x, if t == blas.Trans, +// x = A^H * x, if t == blas.ConjTrans, +// where A is an n×n triangular band matrix, and x is a vector. +func Tbmv(t blas.Transpose, a TriangularBand, x Vector) { + cblas64.Ctbmv(a.Uplo, t, a.Diag, a.N, a.K, a.Data, a.Stride, x.Data, x.Inc) +} + +// Tpmv computes +// x = A * x, if t == blas.NoTrans, +// x = A^T * x, if t == blas.Trans, +// x = A^H * x, if t == blas.ConjTrans, +// where A is an n×n triangular matrix in packed format, and x is a vector. +func Tpmv(t blas.Transpose, a TriangularPacked, x Vector) { + cblas64.Ctpmv(a.Uplo, t, a.Diag, a.N, a.Data, x.Data, x.Inc) +} + +// Trsv solves +// A * x = b, if t == blas.NoTrans, +// A^T * x = b, if t == blas.Trans, +// A^H * x = b, if t == blas.ConjTrans, +// where A is an n×n triangular matrix and x is a vector. +// +// At entry to the function, x contains the values of b, and the result is +// stored in-place into x. +// +// No test for singularity or near-singularity is included in this +// routine. Such tests must be performed before calling this routine. +func Trsv(t blas.Transpose, a Triangular, x Vector) { + cblas64.Ctrsv(a.Uplo, t, a.Diag, a.N, a.Data, a.Stride, x.Data, x.Inc) +} + +// Tbsv solves +// A * x = b, if t == blas.NoTrans, +// A^T * x = b, if t == blas.Trans, +// A^H * x = b, if t == blas.ConjTrans, +// where A is an n×n triangular band matrix, and x is a vector. +// +// At entry to the function, x contains the values of b, and the result is +// stored in-place into x. +// +// No test for singularity or near-singularity is included in this +// routine. Such tests must be performed before calling this routine. +func Tbsv(t blas.Transpose, a TriangularBand, x Vector) { + cblas64.Ctbsv(a.Uplo, t, a.Diag, a.N, a.K, a.Data, a.Stride, x.Data, x.Inc) +} + +// Tpsv solves +// A * x = b, if t == blas.NoTrans, +// A^T * x = b, if t == blas.Trans, +// A^H * x = b, if t == blas.ConjTrans, +// where A is an n×n triangular matrix in packed format and x is a vector. +// +// At entry to the function, x contains the values of b, and the result is +// stored in-place into x. +// +// No test for singularity or near-singularity is included in this +// routine. Such tests must be performed before calling this routine. +func Tpsv(t blas.Transpose, a TriangularPacked, x Vector) { + cblas64.Ctpsv(a.Uplo, t, a.Diag, a.N, a.Data, x.Data, x.Inc) +} + +// Hemv computes +// y = alpha * A * x + beta * y, +// where A is an n×n Hermitian matrix, x and y are vectors, and alpha and +// beta are scalars. +func Hemv(alpha complex64, a Hermitian, x Vector, beta complex64, y Vector) { + cblas64.Chemv(a.Uplo, a.N, alpha, a.Data, a.Stride, x.Data, x.Inc, beta, y.Data, y.Inc) +} + +// Hbmv performs +// y = alpha * A * x + beta * y, +// where A is an n×n Hermitian band matrix, x and y are vectors, and alpha +// and beta are scalars. +func Hbmv(alpha complex64, a HermitianBand, x Vector, beta complex64, y Vector) { + cblas64.Chbmv(a.Uplo, a.N, a.K, alpha, a.Data, a.Stride, x.Data, x.Inc, beta, y.Data, y.Inc) +} + +// Hpmv performs +// y = alpha * A * x + beta * y, +// where A is an n×n Hermitian matrix in packed format, x and y are vectors, +// and alpha and beta are scalars. +func Hpmv(alpha complex64, a HermitianPacked, x Vector, beta complex64, y Vector) { + cblas64.Chpmv(a.Uplo, a.N, alpha, a.Data, x.Data, x.Inc, beta, y.Data, y.Inc) +} + +// Geru performs a rank-1 update +// A += alpha * x * y^T, +// where A is an m×n dense matrix, x and y are vectors, and alpha is a scalar. +func Geru(alpha complex64, x, y Vector, a General) { + cblas64.Cgeru(a.Rows, a.Cols, alpha, x.Data, x.Inc, y.Data, y.Inc, a.Data, a.Stride) +} + +// Gerc performs a rank-1 update +// A += alpha * x * y^H, +// where A is an m×n dense matrix, x and y are vectors, and alpha is a scalar. +func Gerc(alpha complex64, x, y Vector, a General) { + cblas64.Cgerc(a.Rows, a.Cols, alpha, x.Data, x.Inc, y.Data, y.Inc, a.Data, a.Stride) +} + +// Her performs a rank-1 update +// A += alpha * x * y^T, +// where A is an m×n Hermitian matrix, x and y are vectors, and alpha is a scalar. +func Her(alpha float32, x Vector, a Hermitian) { + cblas64.Cher(a.Uplo, a.N, alpha, x.Data, x.Inc, a.Data, a.Stride) +} + +// Hpr performs a rank-1 update +// A += alpha * x * x^H, +// where A is an n×n Hermitian matrix in packed format, x is a vector, and +// alpha is a scalar. +func Hpr(alpha float32, x Vector, a HermitianPacked) { + cblas64.Chpr(a.Uplo, a.N, alpha, x.Data, x.Inc, a.Data) +} + +// Her2 performs a rank-2 update +// A += alpha * x * y^H + conj(alpha) * y * x^H, +// where A is an n×n Hermitian matrix, x and y are vectors, and alpha is a scalar. +func Her2(alpha complex64, x, y Vector, a Hermitian) { + cblas64.Cher2(a.Uplo, a.N, alpha, x.Data, x.Inc, y.Data, y.Inc, a.Data, a.Stride) +} + +// Hpr2 performs a rank-2 update +// A += alpha * x * y^H + conj(alpha) * y * x^H, +// where A is an n×n Hermitian matrix in packed format, x and y are vectors, +// and alpha is a scalar. +func Hpr2(alpha complex64, x, y Vector, a HermitianPacked) { + cblas64.Chpr2(a.Uplo, a.N, alpha, x.Data, x.Inc, y.Data, y.Inc, a.Data) +} + +// Level 3 + +// Gemm computes +// C = alpha * A * B + beta * C, +// where A, B, and C are dense matrices, and alpha and beta are scalars. +// tA and tB specify whether A or B are transposed or conjugated. +func Gemm(tA, tB blas.Transpose, alpha complex64, a, b General, beta complex64, c General) { + var m, n, k int + if tA == blas.NoTrans { + m, k = a.Rows, a.Cols + } else { + m, k = a.Cols, a.Rows + } + if tB == blas.NoTrans { + n = b.Cols + } else { + n = b.Rows + } + cblas64.Cgemm(tA, tB, m, n, k, alpha, a.Data, a.Stride, b.Data, b.Stride, beta, c.Data, c.Stride) +} + +// Symm performs +// C = alpha * A * B + beta * C, if s == blas.Left, +// C = alpha * B * A + beta * C, if s == blas.Right, +// where A is an n×n or m×m symmetric matrix, B and C are m×n matrices, and +// alpha and beta are scalars. +func Symm(s blas.Side, alpha complex64, a Symmetric, b General, beta complex64, c General) { + var m, n int + if s == blas.Left { + m, n = a.N, b.Cols + } else { + m, n = b.Rows, a.N + } + cblas64.Csymm(s, a.Uplo, m, n, alpha, a.Data, a.Stride, b.Data, b.Stride, beta, c.Data, c.Stride) +} + +// Syrk performs a symmetric rank-k update +// C = alpha * A * A^T + beta * C, if t == blas.NoTrans, +// C = alpha * A^T * A + beta * C, if t == blas.Trans, +// where C is an n×n symmetric matrix, A is an n×k matrix if t == blas.NoTrans +// and a k×n matrix otherwise, and alpha and beta are scalars. +func Syrk(t blas.Transpose, alpha complex64, a General, beta complex64, c Symmetric) { + var n, k int + if t == blas.NoTrans { + n, k = a.Rows, a.Cols + } else { + n, k = a.Cols, a.Rows + } + cblas64.Csyrk(c.Uplo, t, n, k, alpha, a.Data, a.Stride, beta, c.Data, c.Stride) +} + +// Syr2k performs a symmetric rank-2k update +// C = alpha * A * B^T + alpha * B * A^T + beta * C, if t == blas.NoTrans, +// C = alpha * A^T * B + alpha * B^T * A + beta * C, if t == blas.Trans, +// where C is an n×n symmetric matrix, A and B are n×k matrices if +// t == blas.NoTrans and k×n otherwise, and alpha and beta are scalars. +func Syr2k(t blas.Transpose, alpha complex64, a, b General, beta complex64, c Symmetric) { + var n, k int + if t == blas.NoTrans { + n, k = a.Rows, a.Cols + } else { + n, k = a.Cols, a.Rows + } + cblas64.Csyr2k(c.Uplo, t, n, k, alpha, a.Data, a.Stride, b.Data, b.Stride, beta, c.Data, c.Stride) +} + +// Trmm performs +// B = alpha * A * B, if tA == blas.NoTrans and s == blas.Left, +// B = alpha * A^T * B, if tA == blas.Trans and s == blas.Left, +// B = alpha * A^H * B, if tA == blas.ConjTrans and s == blas.Left, +// B = alpha * B * A, if tA == blas.NoTrans and s == blas.Right, +// B = alpha * B * A^T, if tA == blas.Trans and s == blas.Right, +// B = alpha * B * A^H, if tA == blas.ConjTrans and s == blas.Right, +// where A is an n×n or m×m triangular matrix, B is an m×n matrix, and alpha is +// a scalar. +func Trmm(s blas.Side, tA blas.Transpose, alpha complex64, a Triangular, b General) { + cblas64.Ctrmm(s, a.Uplo, tA, a.Diag, b.Rows, b.Cols, alpha, a.Data, a.Stride, b.Data, b.Stride) +} + +// Trsm solves +// A * X = alpha * B, if tA == blas.NoTrans and s == blas.Left, +// A^T * X = alpha * B, if tA == blas.Trans and s == blas.Left, +// A^H * X = alpha * B, if tA == blas.ConjTrans and s == blas.Left, +// X * A = alpha * B, if tA == blas.NoTrans and s == blas.Right, +// X * A^T = alpha * B, if tA == blas.Trans and s == blas.Right, +// X * A^H = alpha * B, if tA == blas.ConjTrans and s == blas.Right, +// where A is an n×n or m×m triangular matrix, X and B are m×n matrices, and +// alpha is a scalar. +// +// At entry to the function, b contains the values of B, and the result is +// stored in-place into b. +// +// No check is made that A is invertible. +func Trsm(s blas.Side, tA blas.Transpose, alpha complex64, a Triangular, b General) { + cblas64.Ctrsm(s, a.Uplo, tA, a.Diag, b.Rows, b.Cols, alpha, a.Data, a.Stride, b.Data, b.Stride) +} + +// Hemm performs +// C = alpha * A * B + beta * C, if s == blas.Left, +// C = alpha * B * A + beta * C, if s == blas.Right, +// where A is an n×n or m×m Hermitian matrix, B and C are m×n matrices, and +// alpha and beta are scalars. +func Hemm(s blas.Side, alpha complex64, a Hermitian, b General, beta complex64, c General) { + var m, n int + if s == blas.Left { + m, n = a.N, b.Cols + } else { + m, n = b.Rows, a.N + } + cblas64.Chemm(s, a.Uplo, m, n, alpha, a.Data, a.Stride, b.Data, b.Stride, beta, c.Data, c.Stride) +} + +// Herk performs the Hermitian rank-k update +// C = alpha * A * A^H + beta*C, if t == blas.NoTrans, +// C = alpha * A^H * A + beta*C, if t == blas.ConjTrans, +// where C is an n×n Hermitian matrix, A is an n×k matrix if t == blas.NoTrans +// and a k×n matrix otherwise, and alpha and beta are scalars. +func Herk(t blas.Transpose, alpha float32, a General, beta float32, c Hermitian) { + var n, k int + if t == blas.NoTrans { + n, k = a.Rows, a.Cols + } else { + n, k = a.Cols, a.Rows + } + cblas64.Cherk(c.Uplo, t, n, k, alpha, a.Data, a.Stride, beta, c.Data, c.Stride) +} + +// Her2k performs the Hermitian rank-2k update +// C = alpha * A * B^H + conj(alpha) * B * A^H + beta * C, if t == blas.NoTrans, +// C = alpha * A^H * B + conj(alpha) * B^H * A + beta * C, if t == blas.ConjTrans, +// where C is an n×n Hermitian matrix, A and B are n×k matrices if t == NoTrans +// and k×n matrices otherwise, and alpha and beta are scalars. +func Her2k(t blas.Transpose, alpha complex64, a, b General, beta float32, c Hermitian) { + var n, k int + if t == blas.NoTrans { + n, k = a.Rows, a.Cols + } else { + n, k = a.Cols, a.Rows + } + cblas64.Cher2k(c.Uplo, t, n, k, alpha, a.Data, a.Stride, b.Data, b.Stride, beta, c.Data, c.Stride) +} diff --git a/vendor/gonum.org/v1/gonum/blas/cblas64/conv.go b/vendor/gonum.org/v1/gonum/blas/cblas64/conv.go new file mode 100644 index 0000000..4ba5148 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/blas/cblas64/conv.go @@ -0,0 +1,279 @@ +// Code generated by "go generate gonum.org/v1/gonum/blas”; DO NOT EDIT. + +// Copyright ©2015 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package cblas64 + +import "gonum.org/v1/gonum/blas" + +// GeneralCols represents a matrix using the conventional column-major storage scheme. +type GeneralCols General + +// From fills the receiver with elements from a. The receiver +// must have the same dimensions as a and have adequate backing +// data storage. +func (t GeneralCols) From(a General) { + if t.Rows != a.Rows || t.Cols != a.Cols { + panic("cblas64: mismatched dimension") + } + if len(t.Data) < (t.Cols-1)*t.Stride+t.Rows { + panic("cblas64: short data slice") + } + for i := 0; i < a.Rows; i++ { + for j, v := range a.Data[i*a.Stride : i*a.Stride+a.Cols] { + t.Data[i+j*t.Stride] = v + } + } +} + +// From fills the receiver with elements from a. The receiver +// must have the same dimensions as a and have adequate backing +// data storage. +func (t General) From(a GeneralCols) { + if t.Rows != a.Rows || t.Cols != a.Cols { + panic("cblas64: mismatched dimension") + } + if len(t.Data) < (t.Rows-1)*t.Stride+t.Cols { + panic("cblas64: short data slice") + } + for j := 0; j < a.Cols; j++ { + for i, v := range a.Data[j*a.Stride : j*a.Stride+a.Rows] { + t.Data[i*t.Stride+j] = v + } + } +} + +// TriangularCols represents a matrix using the conventional column-major storage scheme. +type TriangularCols Triangular + +// From fills the receiver with elements from a. The receiver +// must have the same dimensions, uplo and diag as a and have +// adequate backing data storage. +func (t TriangularCols) From(a Triangular) { + if t.N != a.N { + panic("cblas64: mismatched dimension") + } + if t.Uplo != a.Uplo { + panic("cblas64: mismatched BLAS uplo") + } + if t.Diag != a.Diag { + panic("cblas64: mismatched BLAS diag") + } + switch a.Uplo { + default: + panic("cblas64: bad BLAS uplo") + case blas.Upper: + for i := 0; i < a.N; i++ { + for j := i; j < a.N; j++ { + t.Data[i+j*t.Stride] = a.Data[i*a.Stride+j] + } + } + case blas.Lower: + for i := 0; i < a.N; i++ { + for j := 0; j <= i; j++ { + t.Data[i+j*t.Stride] = a.Data[i*a.Stride+j] + } + } + case blas.All: + for i := 0; i < a.N; i++ { + for j := 0; j < a.N; j++ { + t.Data[i+j*t.Stride] = a.Data[i*a.Stride+j] + } + } + } +} + +// From fills the receiver with elements from a. The receiver +// must have the same dimensions, uplo and diag as a and have +// adequate backing data storage. +func (t Triangular) From(a TriangularCols) { + if t.N != a.N { + panic("cblas64: mismatched dimension") + } + if t.Uplo != a.Uplo { + panic("cblas64: mismatched BLAS uplo") + } + if t.Diag != a.Diag { + panic("cblas64: mismatched BLAS diag") + } + switch a.Uplo { + default: + panic("cblas64: bad BLAS uplo") + case blas.Upper: + for i := 0; i < a.N; i++ { + for j := i; j < a.N; j++ { + t.Data[i*t.Stride+j] = a.Data[i+j*a.Stride] + } + } + case blas.Lower: + for i := 0; i < a.N; i++ { + for j := 0; j <= i; j++ { + t.Data[i*t.Stride+j] = a.Data[i+j*a.Stride] + } + } + case blas.All: + for i := 0; i < a.N; i++ { + for j := 0; j < a.N; j++ { + t.Data[i*t.Stride+j] = a.Data[i+j*a.Stride] + } + } + } +} + +// BandCols represents a matrix using the band column-major storage scheme. +type BandCols Band + +// From fills the receiver with elements from a. The receiver +// must have the same dimensions and bandwidth as a and have +// adequate backing data storage. +func (t BandCols) From(a Band) { + if t.Rows != a.Rows || t.Cols != a.Cols { + panic("cblas64: mismatched dimension") + } + if t.KL != a.KL || t.KU != a.KU { + panic("cblas64: mismatched bandwidth") + } + if a.Stride < a.KL+a.KU+1 { + panic("cblas64: short stride for source") + } + if t.Stride < t.KL+t.KU+1 { + panic("cblas64: short stride for destination") + } + for i := 0; i < a.Rows; i++ { + for j := max(0, i-a.KL); j < min(i+a.KU+1, a.Cols); j++ { + t.Data[i+t.KU-j+j*t.Stride] = a.Data[j+a.KL-i+i*a.Stride] + } + } +} + +// From fills the receiver with elements from a. The receiver +// must have the same dimensions and bandwidth as a and have +// adequate backing data storage. +func (t Band) From(a BandCols) { + if t.Rows != a.Rows || t.Cols != a.Cols { + panic("cblas64: mismatched dimension") + } + if t.KL != a.KL || t.KU != a.KU { + panic("cblas64: mismatched bandwidth") + } + if a.Stride < a.KL+a.KU+1 { + panic("cblas64: short stride for source") + } + if t.Stride < t.KL+t.KU+1 { + panic("cblas64: short stride for destination") + } + for j := 0; j < a.Cols; j++ { + for i := max(0, j-a.KU); i < min(j+a.KL+1, a.Rows); i++ { + t.Data[j+a.KL-i+i*a.Stride] = a.Data[i+t.KU-j+j*t.Stride] + } + } +} + +// TriangularBandCols represents a symmetric matrix using the band column-major storage scheme. +type TriangularBandCols TriangularBand + +// From fills the receiver with elements from a. The receiver +// must have the same dimensions, bandwidth and uplo as a and +// have adequate backing data storage. +func (t TriangularBandCols) From(a TriangularBand) { + if t.N != a.N { + panic("cblas64: mismatched dimension") + } + if t.K != a.K { + panic("cblas64: mismatched bandwidth") + } + if a.Stride < a.K+1 { + panic("cblas64: short stride for source") + } + if t.Stride < t.K+1 { + panic("cblas64: short stride for destination") + } + if t.Uplo != a.Uplo { + panic("cblas64: mismatched BLAS uplo") + } + if t.Diag != a.Diag { + panic("cblas64: mismatched BLAS diag") + } + dst := BandCols{ + Rows: t.N, Cols: t.N, + Stride: t.Stride, + Data: t.Data, + } + src := Band{ + Rows: a.N, Cols: a.N, + Stride: a.Stride, + Data: a.Data, + } + switch a.Uplo { + default: + panic("cblas64: bad BLAS uplo") + case blas.Upper: + dst.KU = t.K + src.KU = a.K + case blas.Lower: + dst.KL = t.K + src.KL = a.K + } + dst.From(src) +} + +// From fills the receiver with elements from a. The receiver +// must have the same dimensions, bandwidth and uplo as a and +// have adequate backing data storage. +func (t TriangularBand) From(a TriangularBandCols) { + if t.N != a.N { + panic("cblas64: mismatched dimension") + } + if t.K != a.K { + panic("cblas64: mismatched bandwidth") + } + if a.Stride < a.K+1 { + panic("cblas64: short stride for source") + } + if t.Stride < t.K+1 { + panic("cblas64: short stride for destination") + } + if t.Uplo != a.Uplo { + panic("cblas64: mismatched BLAS uplo") + } + if t.Diag != a.Diag { + panic("cblas64: mismatched BLAS diag") + } + dst := Band{ + Rows: t.N, Cols: t.N, + Stride: t.Stride, + Data: t.Data, + } + src := BandCols{ + Rows: a.N, Cols: a.N, + Stride: a.Stride, + Data: a.Data, + } + switch a.Uplo { + default: + panic("cblas64: bad BLAS uplo") + case blas.Upper: + dst.KU = t.K + src.KU = a.K + case blas.Lower: + dst.KL = t.K + src.KL = a.K + } + dst.From(src) +} + +func min(a, b int) int { + if a < b { + return a + } + return b +} + +func max(a, b int) int { + if a > b { + return a + } + return b +} diff --git a/vendor/gonum.org/v1/gonum/blas/cblas64/conv_hermitian.go b/vendor/gonum.org/v1/gonum/blas/cblas64/conv_hermitian.go new file mode 100644 index 0000000..13a9e9e --- /dev/null +++ b/vendor/gonum.org/v1/gonum/blas/cblas64/conv_hermitian.go @@ -0,0 +1,155 @@ +// Code generated by "go generate gonum.org/v1/gonum/blas”; DO NOT EDIT. + +// Copyright ©2015 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package cblas64 + +import "gonum.org/v1/gonum/blas" + +// HermitianCols represents a matrix using the conventional column-major storage scheme. +type HermitianCols Hermitian + +// From fills the receiver with elements from a. The receiver +// must have the same dimensions and uplo as a and have adequate +// backing data storage. +func (t HermitianCols) From(a Hermitian) { + if t.N != a.N { + panic("cblas64: mismatched dimension") + } + if t.Uplo != a.Uplo { + panic("cblas64: mismatched BLAS uplo") + } + switch a.Uplo { + default: + panic("cblas64: bad BLAS uplo") + case blas.Upper: + for i := 0; i < a.N; i++ { + for j := i; j < a.N; j++ { + t.Data[i+j*t.Stride] = a.Data[i*a.Stride+j] + } + } + case blas.Lower: + for i := 0; i < a.N; i++ { + for j := 0; j <= i; j++ { + t.Data[i+j*t.Stride] = a.Data[i*a.Stride+j] + } + } + } +} + +// From fills the receiver with elements from a. The receiver +// must have the same dimensions and uplo as a and have adequate +// backing data storage. +func (t Hermitian) From(a HermitianCols) { + if t.N != a.N { + panic("cblas64: mismatched dimension") + } + if t.Uplo != a.Uplo { + panic("cblas64: mismatched BLAS uplo") + } + switch a.Uplo { + default: + panic("cblas64: bad BLAS uplo") + case blas.Upper: + for i := 0; i < a.N; i++ { + for j := i; j < a.N; j++ { + t.Data[i*t.Stride+j] = a.Data[i+j*a.Stride] + } + } + case blas.Lower: + for i := 0; i < a.N; i++ { + for j := 0; j <= i; j++ { + t.Data[i*t.Stride+j] = a.Data[i+j*a.Stride] + } + } + } +} + +// HermitianBandCols represents an Hermitian matrix using the band column-major storage scheme. +type HermitianBandCols HermitianBand + +// From fills the receiver with elements from a. The receiver +// must have the same dimensions, bandwidth and uplo as a and +// have adequate backing data storage. +func (t HermitianBandCols) From(a HermitianBand) { + if t.N != a.N { + panic("cblas64: mismatched dimension") + } + if t.K != a.K { + panic("cblas64: mismatched bandwidth") + } + if a.Stride < a.K+1 { + panic("cblas64: short stride for source") + } + if t.Stride < t.K+1 { + panic("cblas64: short stride for destination") + } + if t.Uplo != a.Uplo { + panic("cblas64: mismatched BLAS uplo") + } + dst := BandCols{ + Rows: t.N, Cols: t.N, + Stride: t.Stride, + Data: t.Data, + } + src := Band{ + Rows: a.N, Cols: a.N, + Stride: a.Stride, + Data: a.Data, + } + switch a.Uplo { + default: + panic("cblas64: bad BLAS uplo") + case blas.Upper: + dst.KU = t.K + src.KU = a.K + case blas.Lower: + dst.KL = t.K + src.KL = a.K + } + dst.From(src) +} + +// From fills the receiver with elements from a. The receiver +// must have the same dimensions, bandwidth and uplo as a and +// have adequate backing data storage. +func (t HermitianBand) From(a HermitianBandCols) { + if t.N != a.N { + panic("cblas64: mismatched dimension") + } + if t.K != a.K { + panic("cblas64: mismatched bandwidth") + } + if a.Stride < a.K+1 { + panic("cblas64: short stride for source") + } + if t.Stride < t.K+1 { + panic("cblas64: short stride for destination") + } + if t.Uplo != a.Uplo { + panic("cblas64: mismatched BLAS uplo") + } + dst := Band{ + Rows: t.N, Cols: t.N, + Stride: t.Stride, + Data: t.Data, + } + src := BandCols{ + Rows: a.N, Cols: a.N, + Stride: a.Stride, + Data: a.Data, + } + switch a.Uplo { + default: + panic("cblas64: bad BLAS uplo") + case blas.Upper: + dst.KU = t.K + src.KU = a.K + case blas.Lower: + dst.KL = t.K + src.KL = a.K + } + dst.From(src) +} diff --git a/vendor/gonum.org/v1/gonum/blas/cblas64/doc.go b/vendor/gonum.org/v1/gonum/blas/cblas64/doc.go new file mode 100644 index 0000000..2a11ccd --- /dev/null +++ b/vendor/gonum.org/v1/gonum/blas/cblas64/doc.go @@ -0,0 +1,6 @@ +// Copyright ©2017 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Package cblas64 provides a simple interface to the complex64 BLAS API. +package cblas64 // import "gonum.org/v1/gonum/blas/cblas64" diff --git a/vendor/gonum.org/v1/gonum/blas/doc.go b/vendor/gonum.org/v1/gonum/blas/doc.go new file mode 100644 index 0000000..ea4b16c --- /dev/null +++ b/vendor/gonum.org/v1/gonum/blas/doc.go @@ -0,0 +1,108 @@ +// Copyright ©2017 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +/* +Package blas provides interfaces for the BLAS linear algebra standard. + +All methods must perform appropriate parameter checking and panic if +provided parameters that do not conform to the requirements specified +by the BLAS standard. + +Quick Reference Guide to the BLAS from http://www.netlib.org/lapack/lug/node145.html + +This version is modified to remove the "order" option. All matrix operations are +on row-order matrices. + +Level 1 BLAS + + dim scalar vector vector scalars 5-element prefixes + struct + + _rotg ( a, b ) S, D + _rotmg( d1, d2, a, b ) S, D + _rot ( n, x, incX, y, incY, c, s ) S, D + _rotm ( n, x, incX, y, incY, param ) S, D + _swap ( n, x, incX, y, incY ) S, D, C, Z + _scal ( n, alpha, x, incX ) S, D, C, Z, Cs, Zd + _copy ( n, x, incX, y, incY ) S, D, C, Z + _axpy ( n, alpha, x, incX, y, incY ) S, D, C, Z + _dot ( n, x, incX, y, incY ) S, D, Ds + _dotu ( n, x, incX, y, incY ) C, Z + _dotc ( n, x, incX, y, incY ) C, Z + __dot ( n, alpha, x, incX, y, incY ) Sds + _nrm2 ( n, x, incX ) S, D, Sc, Dz + _asum ( n, x, incX ) S, D, Sc, Dz + I_amax( n, x, incX ) s, d, c, z + +Level 2 BLAS + + options dim b-width scalar matrix vector scalar vector prefixes + + _gemv ( trans, m, n, alpha, a, lda, x, incX, beta, y, incY ) S, D, C, Z + _gbmv ( trans, m, n, kL, kU, alpha, a, lda, x, incX, beta, y, incY ) S, D, C, Z + _hemv ( uplo, n, alpha, a, lda, x, incX, beta, y, incY ) C, Z + _hbmv ( uplo, n, k, alpha, a, lda, x, incX, beta, y, incY ) C, Z + _hpmv ( uplo, n, alpha, ap, x, incX, beta, y, incY ) C, Z + _symv ( uplo, n, alpha, a, lda, x, incX, beta, y, incY ) S, D + _sbmv ( uplo, n, k, alpha, a, lda, x, incX, beta, y, incY ) S, D + _spmv ( uplo, n, alpha, ap, x, incX, beta, y, incY ) S, D + _trmv ( uplo, trans, diag, n, a, lda, x, incX ) S, D, C, Z + _tbmv ( uplo, trans, diag, n, k, a, lda, x, incX ) S, D, C, Z + _tpmv ( uplo, trans, diag, n, ap, x, incX ) S, D, C, Z + _trsv ( uplo, trans, diag, n, a, lda, x, incX ) S, D, C, Z + _tbsv ( uplo, trans, diag, n, k, a, lda, x, incX ) S, D, C, Z + _tpsv ( uplo, trans, diag, n, ap, x, incX ) S, D, C, Z + + options dim scalar vector vector matrix prefixes + + _ger ( m, n, alpha, x, incX, y, incY, a, lda ) S, D + _geru ( m, n, alpha, x, incX, y, incY, a, lda ) C, Z + _gerc ( m, n, alpha, x, incX, y, incY, a, lda ) C, Z + _her ( uplo, n, alpha, x, incX, a, lda ) C, Z + _hpr ( uplo, n, alpha, x, incX, ap ) C, Z + _her2 ( uplo, n, alpha, x, incX, y, incY, a, lda ) C, Z + _hpr2 ( uplo, n, alpha, x, incX, y, incY, ap ) C, Z + _syr ( uplo, n, alpha, x, incX, a, lda ) S, D + _spr ( uplo, n, alpha, x, incX, ap ) S, D + _syr2 ( uplo, n, alpha, x, incX, y, incY, a, lda ) S, D + _spr2 ( uplo, n, alpha, x, incX, y, incY, ap ) S, D + +Level 3 BLAS + + options dim scalar matrix matrix scalar matrix prefixes + + _gemm ( transA, transB, m, n, k, alpha, a, lda, b, ldb, beta, c, ldc ) S, D, C, Z + _symm ( side, uplo, m, n, alpha, a, lda, b, ldb, beta, c, ldc ) S, D, C, Z + _hemm ( side, uplo, m, n, alpha, a, lda, b, ldb, beta, c, ldc ) C, Z + _syrk ( uplo, trans, n, k, alpha, a, lda, beta, c, ldc ) S, D, C, Z + _herk ( uplo, trans, n, k, alpha, a, lda, beta, c, ldc ) C, Z + _syr2k( uplo, trans, n, k, alpha, a, lda, b, ldb, beta, c, ldc ) S, D, C, Z + _her2k( uplo, trans, n, k, alpha, a, lda, b, ldb, beta, c, ldc ) C, Z + _trmm ( side, uplo, transA, diag, m, n, alpha, a, lda, b, ldb ) S, D, C, Z + _trsm ( side, uplo, transA, diag, m, n, alpha, a, lda, b, ldb ) S, D, C, Z + +Meaning of prefixes + + S - float32 C - complex64 + D - float64 Z - complex128 + +Matrix types + + GE - GEneral GB - General Band + SY - SYmmetric SB - Symmetric Band SP - Symmetric Packed + HE - HErmitian HB - Hermitian Band HP - Hermitian Packed + TR - TRiangular TB - Triangular Band TP - Triangular Packed + +Options + + trans = NoTrans, Trans, ConjTrans + uplo = Upper, Lower + diag = Nonunit, Unit + side = Left, Right (A or op(A) on the left, or A or op(A) on the right) + +For real matrices, Trans and ConjTrans have the same meaning. +For Hermitian matrices, trans = Trans is not allowed. +For complex symmetric matrices, trans = ConjTrans is not allowed. +*/ +package blas // import "gonum.org/v1/gonum/blas" diff --git a/vendor/gonum.org/v1/gonum/blas/gonum/dgemm.go b/vendor/gonum.org/v1/gonum/blas/gonum/dgemm.go new file mode 100644 index 0000000..ec3fcc6 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/blas/gonum/dgemm.go @@ -0,0 +1,314 @@ +// Copyright ©2014 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package gonum + +import ( + "runtime" + "sync" + + "gonum.org/v1/gonum/blas" + "gonum.org/v1/gonum/internal/asm/f64" +) + +// Dgemm performs one of the matrix-matrix operations +// C = alpha * A * B + beta * C +// C = alpha * A^T * B + beta * C +// C = alpha * A * B^T + beta * C +// C = alpha * A^T * B^T + beta * C +// where A is an m×k or k×m dense matrix, B is an n×k or k×n dense matrix, C is +// an m×n matrix, and alpha and beta are scalars. tA and tB specify whether A or +// B are transposed. +func (Implementation) Dgemm(tA, tB blas.Transpose, m, n, k int, alpha float64, a []float64, lda int, b []float64, ldb int, beta float64, c []float64, ldc int) { + switch tA { + default: + panic(badTranspose) + case blas.NoTrans, blas.Trans, blas.ConjTrans: + } + switch tB { + default: + panic(badTranspose) + case blas.NoTrans, blas.Trans, blas.ConjTrans: + } + if m < 0 { + panic(mLT0) + } + if n < 0 { + panic(nLT0) + } + if k < 0 { + panic(kLT0) + } + aTrans := tA == blas.Trans || tA == blas.ConjTrans + if aTrans { + if lda < max(1, m) { + panic(badLdA) + } + } else { + if lda < max(1, k) { + panic(badLdA) + } + } + bTrans := tB == blas.Trans || tB == blas.ConjTrans + if bTrans { + if ldb < max(1, k) { + panic(badLdB) + } + } else { + if ldb < max(1, n) { + panic(badLdB) + } + } + if ldc < max(1, n) { + panic(badLdC) + } + + // Quick return if possible. + if m == 0 || n == 0 { + return + } + + // For zero matrix size the following slice length checks are trivially satisfied. + if aTrans { + if len(a) < (k-1)*lda+m { + panic(shortA) + } + } else { + if len(a) < (m-1)*lda+k { + panic(shortA) + } + } + if bTrans { + if len(b) < (n-1)*ldb+k { + panic(shortB) + } + } else { + if len(b) < (k-1)*ldb+n { + panic(shortB) + } + } + if len(c) < (m-1)*ldc+n { + panic(shortC) + } + + // Quick return if possible. + if (alpha == 0 || k == 0) && beta == 1 { + return + } + + // scale c + if beta != 1 { + if beta == 0 { + for i := 0; i < m; i++ { + ctmp := c[i*ldc : i*ldc+n] + for j := range ctmp { + ctmp[j] = 0 + } + } + } else { + for i := 0; i < m; i++ { + ctmp := c[i*ldc : i*ldc+n] + for j := range ctmp { + ctmp[j] *= beta + } + } + } + } + + dgemmParallel(aTrans, bTrans, m, n, k, a, lda, b, ldb, c, ldc, alpha) +} + +func dgemmParallel(aTrans, bTrans bool, m, n, k int, a []float64, lda int, b []float64, ldb int, c []float64, ldc int, alpha float64) { + // dgemmParallel computes a parallel matrix multiplication by partitioning + // a and b into sub-blocks, and updating c with the multiplication of the sub-block + // In all cases, + // A = [ A_11 A_12 ... A_1j + // A_21 A_22 ... A_2j + // ... + // A_i1 A_i2 ... A_ij] + // + // and same for B. All of the submatrix sizes are blockSize×blockSize except + // at the edges. + // + // In all cases, there is one dimension for each matrix along which + // C must be updated sequentially. + // Cij = \sum_k Aik Bki, (A * B) + // Cij = \sum_k Aki Bkj, (A^T * B) + // Cij = \sum_k Aik Bjk, (A * B^T) + // Cij = \sum_k Aki Bjk, (A^T * B^T) + // + // This code computes one {i, j} block sequentially along the k dimension, + // and computes all of the {i, j} blocks concurrently. This + // partitioning allows Cij to be updated in-place without race-conditions. + // Instead of launching a goroutine for each possible concurrent computation, + // a number of worker goroutines are created and channels are used to pass + // available and completed cases. + // + // http://alexkr.com/docs/matrixmult.pdf is a good reference on matrix-matrix + // multiplies, though this code does not copy matrices to attempt to eliminate + // cache misses. + + maxKLen := k + parBlocks := blocks(m, blockSize) * blocks(n, blockSize) + if parBlocks < minParBlock { + // The matrix multiplication is small in the dimensions where it can be + // computed concurrently. Just do it in serial. + dgemmSerial(aTrans, bTrans, m, n, k, a, lda, b, ldb, c, ldc, alpha) + return + } + + nWorkers := runtime.GOMAXPROCS(0) + if parBlocks < nWorkers { + nWorkers = parBlocks + } + // There is a tradeoff between the workers having to wait for work + // and a large buffer making operations slow. + buf := buffMul * nWorkers + if buf > parBlocks { + buf = parBlocks + } + + sendChan := make(chan subMul, buf) + + // Launch workers. A worker receives an {i, j} submatrix of c, and computes + // A_ik B_ki (or the transposed version) storing the result in c_ij. When the + // channel is finally closed, it signals to the waitgroup that it has finished + // computing. + var wg sync.WaitGroup + for i := 0; i < nWorkers; i++ { + wg.Add(1) + go func() { + defer wg.Done() + for sub := range sendChan { + i := sub.i + j := sub.j + leni := blockSize + if i+leni > m { + leni = m - i + } + lenj := blockSize + if j+lenj > n { + lenj = n - j + } + + cSub := sliceView64(c, ldc, i, j, leni, lenj) + + // Compute A_ik B_kj for all k + for k := 0; k < maxKLen; k += blockSize { + lenk := blockSize + if k+lenk > maxKLen { + lenk = maxKLen - k + } + var aSub, bSub []float64 + if aTrans { + aSub = sliceView64(a, lda, k, i, lenk, leni) + } else { + aSub = sliceView64(a, lda, i, k, leni, lenk) + } + if bTrans { + bSub = sliceView64(b, ldb, j, k, lenj, lenk) + } else { + bSub = sliceView64(b, ldb, k, j, lenk, lenj) + } + dgemmSerial(aTrans, bTrans, leni, lenj, lenk, aSub, lda, bSub, ldb, cSub, ldc, alpha) + } + } + }() + } + + // Send out all of the {i, j} subblocks for computation. + for i := 0; i < m; i += blockSize { + for j := 0; j < n; j += blockSize { + sendChan <- subMul{ + i: i, + j: j, + } + } + } + close(sendChan) + wg.Wait() +} + +// dgemmSerial is serial matrix multiply +func dgemmSerial(aTrans, bTrans bool, m, n, k int, a []float64, lda int, b []float64, ldb int, c []float64, ldc int, alpha float64) { + switch { + case !aTrans && !bTrans: + dgemmSerialNotNot(m, n, k, a, lda, b, ldb, c, ldc, alpha) + return + case aTrans && !bTrans: + dgemmSerialTransNot(m, n, k, a, lda, b, ldb, c, ldc, alpha) + return + case !aTrans && bTrans: + dgemmSerialNotTrans(m, n, k, a, lda, b, ldb, c, ldc, alpha) + return + case aTrans && bTrans: + dgemmSerialTransTrans(m, n, k, a, lda, b, ldb, c, ldc, alpha) + return + default: + panic("unreachable") + } +} + +// dgemmSerial where neither a nor b are transposed +func dgemmSerialNotNot(m, n, k int, a []float64, lda int, b []float64, ldb int, c []float64, ldc int, alpha float64) { + // This style is used instead of the literal [i*stride +j]) is used because + // approximately 5 times faster as of go 1.3. + for i := 0; i < m; i++ { + ctmp := c[i*ldc : i*ldc+n] + for l, v := range a[i*lda : i*lda+k] { + tmp := alpha * v + if tmp != 0 { + f64.AxpyUnitary(tmp, b[l*ldb:l*ldb+n], ctmp) + } + } + } +} + +// dgemmSerial where neither a is transposed and b is not +func dgemmSerialTransNot(m, n, k int, a []float64, lda int, b []float64, ldb int, c []float64, ldc int, alpha float64) { + // This style is used instead of the literal [i*stride +j]) is used because + // approximately 5 times faster as of go 1.3. + for l := 0; l < k; l++ { + btmp := b[l*ldb : l*ldb+n] + for i, v := range a[l*lda : l*lda+m] { + tmp := alpha * v + if tmp != 0 { + ctmp := c[i*ldc : i*ldc+n] + f64.AxpyUnitary(tmp, btmp, ctmp) + } + } + } +} + +// dgemmSerial where neither a is not transposed and b is +func dgemmSerialNotTrans(m, n, k int, a []float64, lda int, b []float64, ldb int, c []float64, ldc int, alpha float64) { + // This style is used instead of the literal [i*stride +j]) is used because + // approximately 5 times faster as of go 1.3. + for i := 0; i < m; i++ { + atmp := a[i*lda : i*lda+k] + ctmp := c[i*ldc : i*ldc+n] + for j := 0; j < n; j++ { + ctmp[j] += alpha * f64.DotUnitary(atmp, b[j*ldb:j*ldb+k]) + } + } +} + +// dgemmSerial where both are transposed +func dgemmSerialTransTrans(m, n, k int, a []float64, lda int, b []float64, ldb int, c []float64, ldc int, alpha float64) { + // This style is used instead of the literal [i*stride +j]) is used because + // approximately 5 times faster as of go 1.3. + for l := 0; l < k; l++ { + for i, v := range a[l*lda : l*lda+m] { + tmp := alpha * v + if tmp != 0 { + ctmp := c[i*ldc : i*ldc+n] + f64.AxpyInc(tmp, b[l:], ctmp, uintptr(n), uintptr(ldb), 1, 0, 0) + } + } + } +} + +func sliceView64(a []float64, lda, i, j, r, c int) []float64 { + return a[i*lda+j : (i+r-1)*lda+j+c] +} diff --git a/vendor/gonum.org/v1/gonum/blas/gonum/doc.go b/vendor/gonum.org/v1/gonum/blas/gonum/doc.go new file mode 100644 index 0000000..3f4b6c1 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/blas/gonum/doc.go @@ -0,0 +1,88 @@ +// Copyright ©2015 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Ensure changes made to blas/native are reflected in blas/cgo where relevant. + +/* +Package gonum is a Go implementation of the BLAS API. This implementation +panics when the input arguments are invalid as per the standard, for example +if a vector increment is zero. Note that the treatment of NaN values +is not specified, and differs among the BLAS implementations. +gonum.org/v1/gonum/blas/blas64 provides helpful wrapper functions to the BLAS +interface. The rest of this text describes the layout of the data for the input types. + +Note that in the function documentation, x[i] refers to the i^th element +of the vector, which will be different from the i^th element of the slice if +incX != 1. + +See http://www.netlib.org/lapack/explore-html/d4/de1/_l_i_c_e_n_s_e_source.html +for more license information. + +Vector arguments are effectively strided slices. They have two input arguments, +a number of elements, n, and an increment, incX. The increment specifies the +distance between elements of the vector. The actual Go slice may be longer +than necessary. +The increment may be positive or negative, except in functions with only +a single vector argument where the increment may only be positive. If the increment +is negative, s[0] is the last element in the slice. Note that this is not the same +as counting backward from the end of the slice, as len(s) may be longer than +necessary. So, for example, if n = 5 and incX = 3, the elements of s are + [0 * * 1 * * 2 * * 3 * * 4 * * * ...] +where ∗ elements are never accessed. If incX = -3, the same elements are +accessed, just in reverse order (4, 3, 2, 1, 0). + +Dense matrices are specified by a number of rows, a number of columns, and a stride. +The stride specifies the number of entries in the slice between the first element +of successive rows. The stride must be at least as large as the number of columns +but may be longer. + [a00 ... a0n a0* ... a1stride-1 a21 ... amn am* ... amstride-1] +Thus, dense[i*ld + j] refers to the {i, j}th element of the matrix. + +Symmetric and triangular matrices (non-packed) are stored identically to Dense, +except that only elements in one triangle of the matrix are accessed. + +Packed symmetric and packed triangular matrices are laid out with the entries +condensed such that all of the unreferenced elements are removed. So, the upper triangular +matrix + [ + 1 2 3 + 0 4 5 + 0 0 6 + ] +and the lower-triangular matrix + [ + 1 0 0 + 2 3 0 + 4 5 6 + ] +will both be compacted as [1 2 3 4 5 6]. The (i, j) element of the original +dense matrix can be found at element i*n - (i-1)*i/2 + j for upper triangular, +and at element i * (i+1) /2 + j for lower triangular. + +Banded matrices are laid out in a compact format, constructed by removing the +zeros in the rows and aligning the diagonals. For example, the matrix + [ + 1 2 3 0 0 0 + 4 5 6 7 0 0 + 0 8 9 10 11 0 + 0 0 12 13 14 15 + 0 0 0 16 17 18 + 0 0 0 0 19 20 + ] + +implicitly becomes (∗ entries are never accessed) + [ + * 1 2 3 + 4 5 6 7 + 8 9 10 11 + 12 13 14 15 + 16 17 18 * + 19 20 * * + ] +which is given to the BLAS routine as [∗ 1 2 3 4 ...]. + +See http://www.crest.iu.edu/research/mtl/reference/html/banded.html +for more information +*/ +package gonum // import "gonum.org/v1/gonum/blas/gonum" diff --git a/vendor/gonum.org/v1/gonum/blas/gonum/errors.go b/vendor/gonum.org/v1/gonum/blas/gonum/errors.go new file mode 100644 index 0000000..e98575d --- /dev/null +++ b/vendor/gonum.org/v1/gonum/blas/gonum/errors.go @@ -0,0 +1,35 @@ +// Copyright ©2015 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package gonum + +// Panic strings used during parameter checks. +// This list is duplicated in netlib/blas/netlib. Keep in sync. +const ( + zeroIncX = "blas: zero x index increment" + zeroIncY = "blas: zero y index increment" + + mLT0 = "blas: m < 0" + nLT0 = "blas: n < 0" + kLT0 = "blas: k < 0" + kLLT0 = "blas: kL < 0" + kULT0 = "blas: kU < 0" + + badUplo = "blas: illegal triangle" + badTranspose = "blas: illegal transpose" + badDiag = "blas: illegal diagonal" + badSide = "blas: illegal side" + badFlag = "blas: illegal rotm flag" + + badLdA = "blas: bad leading dimension of A" + badLdB = "blas: bad leading dimension of B" + badLdC = "blas: bad leading dimension of C" + + shortX = "blas: insufficient length of x" + shortY = "blas: insufficient length of y" + shortAP = "blas: insufficient length of ap" + shortA = "blas: insufficient length of a" + shortB = "blas: insufficient length of b" + shortC = "blas: insufficient length of c" +) diff --git a/vendor/gonum.org/v1/gonum/blas/gonum/gemv.go b/vendor/gonum.org/v1/gonum/blas/gonum/gemv.go new file mode 100644 index 0000000..9b9a1be --- /dev/null +++ b/vendor/gonum.org/v1/gonum/blas/gonum/gemv.go @@ -0,0 +1,190 @@ +// Copyright ©2018 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package gonum + +import ( + "gonum.org/v1/gonum/blas" + "gonum.org/v1/gonum/internal/asm/f32" + "gonum.org/v1/gonum/internal/asm/f64" +) + +// TODO(Kunde21): Merge these methods back into level2double/level2single when Sgemv assembly kernels are merged into f32. + +// Dgemv computes +// y = alpha * A * x + beta * y if tA = blas.NoTrans +// y = alpha * A^T * x + beta * y if tA = blas.Trans or blas.ConjTrans +// where A is an m×n dense matrix, x and y are vectors, and alpha and beta are scalars. +func (Implementation) Dgemv(tA blas.Transpose, m, n int, alpha float64, a []float64, lda int, x []float64, incX int, beta float64, y []float64, incY int) { + if tA != blas.NoTrans && tA != blas.Trans && tA != blas.ConjTrans { + panic(badTranspose) + } + if m < 0 { + panic(mLT0) + } + if n < 0 { + panic(nLT0) + } + if lda < max(1, n) { + panic(badLdA) + } + if incX == 0 { + panic(zeroIncX) + } + if incY == 0 { + panic(zeroIncY) + } + // Set up indexes + lenX := m + lenY := n + if tA == blas.NoTrans { + lenX = n + lenY = m + } + + // Quick return if possible + if m == 0 || n == 0 { + return + } + + if (incX > 0 && (lenX-1)*incX >= len(x)) || (incX < 0 && (1-lenX)*incX >= len(x)) { + panic(shortX) + } + if (incY > 0 && (lenY-1)*incY >= len(y)) || (incY < 0 && (1-lenY)*incY >= len(y)) { + panic(shortY) + } + if len(a) < lda*(m-1)+n { + panic(shortA) + } + + // Quick return if possible + if alpha == 0 && beta == 1 { + return + } + + if alpha == 0 { + // First form y = beta * y + if incY > 0 { + Implementation{}.Dscal(lenY, beta, y, incY) + } else { + Implementation{}.Dscal(lenY, beta, y, -incY) + } + return + } + + // Form y = alpha * A * x + y + if tA == blas.NoTrans { + f64.GemvN(uintptr(m), uintptr(n), alpha, a, uintptr(lda), x, uintptr(incX), beta, y, uintptr(incY)) + return + } + // Cases where a is transposed. + f64.GemvT(uintptr(m), uintptr(n), alpha, a, uintptr(lda), x, uintptr(incX), beta, y, uintptr(incY)) +} + +// Sgemv computes +// y = alpha * A * x + beta * y if tA = blas.NoTrans +// y = alpha * A^T * x + beta * y if tA = blas.Trans or blas.ConjTrans +// where A is an m×n dense matrix, x and y are vectors, and alpha and beta are scalars. +// +// Float32 implementations are autogenerated and not directly tested. +func (Implementation) Sgemv(tA blas.Transpose, m, n int, alpha float32, a []float32, lda int, x []float32, incX int, beta float32, y []float32, incY int) { + if tA != blas.NoTrans && tA != blas.Trans && tA != blas.ConjTrans { + panic(badTranspose) + } + if m < 0 { + panic(mLT0) + } + if n < 0 { + panic(nLT0) + } + if lda < max(1, n) { + panic(badLdA) + } + if incX == 0 { + panic(zeroIncX) + } + if incY == 0 { + panic(zeroIncY) + } + + // Quick return if possible. + if m == 0 || n == 0 { + return + } + + // Set up indexes + lenX := m + lenY := n + if tA == blas.NoTrans { + lenX = n + lenY = m + } + if (incX > 0 && (lenX-1)*incX >= len(x)) || (incX < 0 && (1-lenX)*incX >= len(x)) { + panic(shortX) + } + if (incY > 0 && (lenY-1)*incY >= len(y)) || (incY < 0 && (1-lenY)*incY >= len(y)) { + panic(shortY) + } + if len(a) < lda*(m-1)+n { + panic(shortA) + } + + // Quick return if possible. + if alpha == 0 && beta == 1 { + return + } + + // First form y = beta * y + if incY > 0 { + Implementation{}.Sscal(lenY, beta, y, incY) + } else { + Implementation{}.Sscal(lenY, beta, y, -incY) + } + + if alpha == 0 { + return + } + + var kx, ky int + if incX < 0 { + kx = -(lenX - 1) * incX + } + if incY < 0 { + ky = -(lenY - 1) * incY + } + + // Form y = alpha * A * x + y + if tA == blas.NoTrans { + if incX == 1 && incY == 1 { + for i := 0; i < m; i++ { + y[i] += alpha * f32.DotUnitary(a[lda*i:lda*i+n], x[:n]) + } + return + } + iy := ky + for i := 0; i < m; i++ { + y[iy] += alpha * f32.DotInc(x, a[lda*i:lda*i+n], uintptr(n), uintptr(incX), 1, uintptr(kx), 0) + iy += incY + } + return + } + // Cases where a is transposed. + if incX == 1 && incY == 1 { + for i := 0; i < m; i++ { + tmp := alpha * x[i] + if tmp != 0 { + f32.AxpyUnitaryTo(y, tmp, a[lda*i:lda*i+n], y[:n]) + } + } + return + } + ix := kx + for i := 0; i < m; i++ { + tmp := alpha * x[ix] + if tmp != 0 { + f32.AxpyInc(tmp, a[lda*i:lda*i+n], y, uintptr(n), 1, uintptr(incY), 0, uintptr(ky)) + } + ix += incX + } +} diff --git a/vendor/gonum.org/v1/gonum/blas/gonum/gonum.go b/vendor/gonum.org/v1/gonum/blas/gonum/gonum.go new file mode 100644 index 0000000..8ab8d43 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/blas/gonum/gonum.go @@ -0,0 +1,58 @@ +// Copyright ©2015 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +//go:generate ./single_precision.bash + +package gonum + +import ( + "math" + + "gonum.org/v1/gonum/internal/math32" +) + +type Implementation struct{} + +// [SD]gemm behavior constants. These are kept here to keep them out of the +// way during single precision code genration. +const ( + blockSize = 64 // b x b matrix + minParBlock = 4 // minimum number of blocks needed to go parallel + buffMul = 4 // how big is the buffer relative to the number of workers +) + +// subMul is a common type shared by [SD]gemm. +type subMul struct { + i, j int // index of block +} + +func max(a, b int) int { + if a > b { + return a + } + return b +} + +func min(a, b int) int { + if a > b { + return b + } + return a +} + +// blocks returns the number of divisions of the dimension length with the given +// block size. +func blocks(dim, bsize int) int { + return (dim + bsize - 1) / bsize +} + +// dcabs1 returns |real(z)|+|imag(z)|. +func dcabs1(z complex128) float64 { + return math.Abs(real(z)) + math.Abs(imag(z)) +} + +// scabs1 returns |real(z)|+|imag(z)|. +func scabs1(z complex64) float32 { + return math32.Abs(real(z)) + math32.Abs(imag(z)) +} diff --git a/vendor/gonum.org/v1/gonum/blas/gonum/level1cmplx128.go b/vendor/gonum.org/v1/gonum/blas/gonum/level1cmplx128.go new file mode 100644 index 0000000..e37bf44 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/blas/gonum/level1cmplx128.go @@ -0,0 +1,445 @@ +// Copyright ©2017 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package gonum + +import ( + "math" + + "gonum.org/v1/gonum/blas" + "gonum.org/v1/gonum/internal/asm/c128" +) + +var _ blas.Complex128Level1 = Implementation{} + +// Dzasum returns the sum of the absolute values of the elements of x +// \sum_i |Re(x[i])| + |Im(x[i])| +// Dzasum returns 0 if incX is negative. +func (Implementation) Dzasum(n int, x []complex128, incX int) float64 { + if n < 0 { + panic(nLT0) + } + if incX < 1 { + if incX == 0 { + panic(zeroIncX) + } + return 0 + } + var sum float64 + if incX == 1 { + if len(x) < n { + panic(shortX) + } + for _, v := range x[:n] { + sum += dcabs1(v) + } + return sum + } + if (n-1)*incX >= len(x) { + panic(shortX) + } + for i := 0; i < n; i++ { + v := x[i*incX] + sum += dcabs1(v) + } + return sum +} + +// Dznrm2 computes the Euclidean norm of the complex vector x, +// ‖x‖_2 = sqrt(\sum_i x[i] * conj(x[i])). +// This function returns 0 if incX is negative. +func (Implementation) Dznrm2(n int, x []complex128, incX int) float64 { + if incX < 1 { + if incX == 0 { + panic(zeroIncX) + } + return 0 + } + if n < 1 { + if n == 0 { + return 0 + } + panic(nLT0) + } + if (n-1)*incX >= len(x) { + panic(shortX) + } + var ( + scale float64 + ssq float64 = 1 + ) + if incX == 1 { + for _, v := range x[:n] { + re, im := math.Abs(real(v)), math.Abs(imag(v)) + if re != 0 { + if re > scale { + ssq = 1 + ssq*(scale/re)*(scale/re) + scale = re + } else { + ssq += (re / scale) * (re / scale) + } + } + if im != 0 { + if im > scale { + ssq = 1 + ssq*(scale/im)*(scale/im) + scale = im + } else { + ssq += (im / scale) * (im / scale) + } + } + } + if math.IsInf(scale, 1) { + return math.Inf(1) + } + return scale * math.Sqrt(ssq) + } + for ix := 0; ix < n*incX; ix += incX { + re, im := math.Abs(real(x[ix])), math.Abs(imag(x[ix])) + if re != 0 { + if re > scale { + ssq = 1 + ssq*(scale/re)*(scale/re) + scale = re + } else { + ssq += (re / scale) * (re / scale) + } + } + if im != 0 { + if im > scale { + ssq = 1 + ssq*(scale/im)*(scale/im) + scale = im + } else { + ssq += (im / scale) * (im / scale) + } + } + } + if math.IsInf(scale, 1) { + return math.Inf(1) + } + return scale * math.Sqrt(ssq) +} + +// Izamax returns the index of the first element of x having largest |Re(·)|+|Im(·)|. +// Izamax returns -1 if n is 0 or incX is negative. +func (Implementation) Izamax(n int, x []complex128, incX int) int { + if incX < 1 { + if incX == 0 { + panic(zeroIncX) + } + // Return invalid index. + return -1 + } + if n < 1 { + if n == 0 { + // Return invalid index. + return -1 + } + panic(nLT0) + } + if len(x) <= (n-1)*incX { + panic(shortX) + } + idx := 0 + max := dcabs1(x[0]) + if incX == 1 { + for i, v := range x[1:n] { + absV := dcabs1(v) + if absV > max { + max = absV + idx = i + 1 + } + } + return idx + } + ix := incX + for i := 1; i < n; i++ { + absV := dcabs1(x[ix]) + if absV > max { + max = absV + idx = i + } + ix += incX + } + return idx +} + +// Zaxpy adds alpha times x to y: +// y[i] += alpha * x[i] for all i +func (Implementation) Zaxpy(n int, alpha complex128, x []complex128, incX int, y []complex128, incY int) { + if incX == 0 { + panic(zeroIncX) + } + if incY == 0 { + panic(zeroIncY) + } + if n < 1 { + if n == 0 { + return + } + panic(nLT0) + } + if (incX > 0 && (n-1)*incX >= len(x)) || (incX < 0 && (1-n)*incX >= len(x)) { + panic(shortX) + } + if (incY > 0 && (n-1)*incY >= len(y)) || (incY < 0 && (1-n)*incY >= len(y)) { + panic(shortY) + } + if alpha == 0 { + return + } + if incX == 1 && incY == 1 { + c128.AxpyUnitary(alpha, x[:n], y[:n]) + return + } + var ix, iy int + if incX < 0 { + ix = (1 - n) * incX + } + if incY < 0 { + iy = (1 - n) * incY + } + c128.AxpyInc(alpha, x, y, uintptr(n), uintptr(incX), uintptr(incY), uintptr(ix), uintptr(iy)) +} + +// Zcopy copies the vector x to vector y. +func (Implementation) Zcopy(n int, x []complex128, incX int, y []complex128, incY int) { + if incX == 0 { + panic(zeroIncX) + } + if incY == 0 { + panic(zeroIncY) + } + if n < 1 { + if n == 0 { + return + } + panic(nLT0) + } + if (incX > 0 && (n-1)*incX >= len(x)) || (incX < 0 && (1-n)*incX >= len(x)) { + panic(shortX) + } + if (incY > 0 && (n-1)*incY >= len(y)) || (incY < 0 && (1-n)*incY >= len(y)) { + panic(shortY) + } + if incX == 1 && incY == 1 { + copy(y[:n], x[:n]) + return + } + var ix, iy int + if incX < 0 { + ix = (-n + 1) * incX + } + if incY < 0 { + iy = (-n + 1) * incY + } + for i := 0; i < n; i++ { + y[iy] = x[ix] + ix += incX + iy += incY + } +} + +// Zdotc computes the dot product +// x^H · y +// of two complex vectors x and y. +func (Implementation) Zdotc(n int, x []complex128, incX int, y []complex128, incY int) complex128 { + if incX == 0 { + panic(zeroIncX) + } + if incY == 0 { + panic(zeroIncY) + } + if n <= 0 { + if n == 0 { + return 0 + } + panic(nLT0) + } + if incX == 1 && incY == 1 { + if len(x) < n { + panic(shortX) + } + if len(y) < n { + panic(shortY) + } + return c128.DotcUnitary(x[:n], y[:n]) + } + var ix, iy int + if incX < 0 { + ix = (-n + 1) * incX + } + if incY < 0 { + iy = (-n + 1) * incY + } + if ix >= len(x) || (n-1)*incX >= len(x) { + panic(shortX) + } + if iy >= len(y) || (n-1)*incY >= len(y) { + panic(shortY) + } + return c128.DotcInc(x, y, uintptr(n), uintptr(incX), uintptr(incY), uintptr(ix), uintptr(iy)) +} + +// Zdotu computes the dot product +// x^T · y +// of two complex vectors x and y. +func (Implementation) Zdotu(n int, x []complex128, incX int, y []complex128, incY int) complex128 { + if incX == 0 { + panic(zeroIncX) + } + if incY == 0 { + panic(zeroIncY) + } + if n <= 0 { + if n == 0 { + return 0 + } + panic(nLT0) + } + if incX == 1 && incY == 1 { + if len(x) < n { + panic(shortX) + } + if len(y) < n { + panic(shortY) + } + return c128.DotuUnitary(x[:n], y[:n]) + } + var ix, iy int + if incX < 0 { + ix = (-n + 1) * incX + } + if incY < 0 { + iy = (-n + 1) * incY + } + if ix >= len(x) || (n-1)*incX >= len(x) { + panic(shortX) + } + if iy >= len(y) || (n-1)*incY >= len(y) { + panic(shortY) + } + return c128.DotuInc(x, y, uintptr(n), uintptr(incX), uintptr(incY), uintptr(ix), uintptr(iy)) +} + +// Zdscal scales the vector x by a real scalar alpha. +// Zdscal has no effect if incX < 0. +func (Implementation) Zdscal(n int, alpha float64, x []complex128, incX int) { + if incX < 1 { + if incX == 0 { + panic(zeroIncX) + } + return + } + if (n-1)*incX >= len(x) { + panic(shortX) + } + if n < 1 { + if n == 0 { + return + } + panic(nLT0) + } + if alpha == 0 { + if incX == 1 { + x = x[:n] + for i := range x { + x[i] = 0 + } + return + } + for ix := 0; ix < n*incX; ix += incX { + x[ix] = 0 + } + return + } + if incX == 1 { + x = x[:n] + for i, v := range x { + x[i] = complex(alpha*real(v), alpha*imag(v)) + } + return + } + for ix := 0; ix < n*incX; ix += incX { + v := x[ix] + x[ix] = complex(alpha*real(v), alpha*imag(v)) + } +} + +// Zscal scales the vector x by a complex scalar alpha. +// Zscal has no effect if incX < 0. +func (Implementation) Zscal(n int, alpha complex128, x []complex128, incX int) { + if incX < 1 { + if incX == 0 { + panic(zeroIncX) + } + return + } + if (n-1)*incX >= len(x) { + panic(shortX) + } + if n < 1 { + if n == 0 { + return + } + panic(nLT0) + } + if alpha == 0 { + if incX == 1 { + x = x[:n] + for i := range x { + x[i] = 0 + } + return + } + for ix := 0; ix < n*incX; ix += incX { + x[ix] = 0 + } + return + } + if incX == 1 { + c128.ScalUnitary(alpha, x[:n]) + return + } + c128.ScalInc(alpha, x, uintptr(n), uintptr(incX)) +} + +// Zswap exchanges the elements of two complex vectors x and y. +func (Implementation) Zswap(n int, x []complex128, incX int, y []complex128, incY int) { + if incX == 0 { + panic(zeroIncX) + } + if incY == 0 { + panic(zeroIncY) + } + if n < 1 { + if n == 0 { + return + } + panic(nLT0) + } + if (incX > 0 && (n-1)*incX >= len(x)) || (incX < 0 && (1-n)*incX >= len(x)) { + panic(shortX) + } + if (incY > 0 && (n-1)*incY >= len(y)) || (incY < 0 && (1-n)*incY >= len(y)) { + panic(shortY) + } + if incX == 1 && incY == 1 { + x = x[:n] + for i, v := range x { + x[i], y[i] = y[i], v + } + return + } + var ix, iy int + if incX < 0 { + ix = (-n + 1) * incX + } + if incY < 0 { + iy = (-n + 1) * incY + } + for i := 0; i < n; i++ { + x[ix], y[iy] = y[iy], x[ix] + ix += incX + iy += incY + } +} diff --git a/vendor/gonum.org/v1/gonum/blas/gonum/level1cmplx64.go b/vendor/gonum.org/v1/gonum/blas/gonum/level1cmplx64.go new file mode 100644 index 0000000..ba192ea --- /dev/null +++ b/vendor/gonum.org/v1/gonum/blas/gonum/level1cmplx64.go @@ -0,0 +1,467 @@ +// Code generated by "go generate gonum.org/v1/gonum/blas/gonum”; DO NOT EDIT. + +// Copyright ©2017 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package gonum + +import ( + math "gonum.org/v1/gonum/internal/math32" + + "gonum.org/v1/gonum/blas" + "gonum.org/v1/gonum/internal/asm/c64" +) + +var _ blas.Complex64Level1 = Implementation{} + +// Scasum returns the sum of the absolute values of the elements of x +// \sum_i |Re(x[i])| + |Im(x[i])| +// Scasum returns 0 if incX is negative. +// +// Complex64 implementations are autogenerated and not directly tested. +func (Implementation) Scasum(n int, x []complex64, incX int) float32 { + if n < 0 { + panic(nLT0) + } + if incX < 1 { + if incX == 0 { + panic(zeroIncX) + } + return 0 + } + var sum float32 + if incX == 1 { + if len(x) < n { + panic(shortX) + } + for _, v := range x[:n] { + sum += scabs1(v) + } + return sum + } + if (n-1)*incX >= len(x) { + panic(shortX) + } + for i := 0; i < n; i++ { + v := x[i*incX] + sum += scabs1(v) + } + return sum +} + +// Scnrm2 computes the Euclidean norm of the complex vector x, +// ‖x‖_2 = sqrt(\sum_i x[i] * conj(x[i])). +// This function returns 0 if incX is negative. +// +// Complex64 implementations are autogenerated and not directly tested. +func (Implementation) Scnrm2(n int, x []complex64, incX int) float32 { + if incX < 1 { + if incX == 0 { + panic(zeroIncX) + } + return 0 + } + if n < 1 { + if n == 0 { + return 0 + } + panic(nLT0) + } + if (n-1)*incX >= len(x) { + panic(shortX) + } + var ( + scale float32 + ssq float32 = 1 + ) + if incX == 1 { + for _, v := range x[:n] { + re, im := math.Abs(real(v)), math.Abs(imag(v)) + if re != 0 { + if re > scale { + ssq = 1 + ssq*(scale/re)*(scale/re) + scale = re + } else { + ssq += (re / scale) * (re / scale) + } + } + if im != 0 { + if im > scale { + ssq = 1 + ssq*(scale/im)*(scale/im) + scale = im + } else { + ssq += (im / scale) * (im / scale) + } + } + } + if math.IsInf(scale, 1) { + return math.Inf(1) + } + return scale * math.Sqrt(ssq) + } + for ix := 0; ix < n*incX; ix += incX { + re, im := math.Abs(real(x[ix])), math.Abs(imag(x[ix])) + if re != 0 { + if re > scale { + ssq = 1 + ssq*(scale/re)*(scale/re) + scale = re + } else { + ssq += (re / scale) * (re / scale) + } + } + if im != 0 { + if im > scale { + ssq = 1 + ssq*(scale/im)*(scale/im) + scale = im + } else { + ssq += (im / scale) * (im / scale) + } + } + } + if math.IsInf(scale, 1) { + return math.Inf(1) + } + return scale * math.Sqrt(ssq) +} + +// Icamax returns the index of the first element of x having largest |Re(·)|+|Im(·)|. +// Icamax returns -1 if n is 0 or incX is negative. +// +// Complex64 implementations are autogenerated and not directly tested. +func (Implementation) Icamax(n int, x []complex64, incX int) int { + if incX < 1 { + if incX == 0 { + panic(zeroIncX) + } + // Return invalid index. + return -1 + } + if n < 1 { + if n == 0 { + // Return invalid index. + return -1 + } + panic(nLT0) + } + if len(x) <= (n-1)*incX { + panic(shortX) + } + idx := 0 + max := scabs1(x[0]) + if incX == 1 { + for i, v := range x[1:n] { + absV := scabs1(v) + if absV > max { + max = absV + idx = i + 1 + } + } + return idx + } + ix := incX + for i := 1; i < n; i++ { + absV := scabs1(x[ix]) + if absV > max { + max = absV + idx = i + } + ix += incX + } + return idx +} + +// Caxpy adds alpha times x to y: +// y[i] += alpha * x[i] for all i +// +// Complex64 implementations are autogenerated and not directly tested. +func (Implementation) Caxpy(n int, alpha complex64, x []complex64, incX int, y []complex64, incY int) { + if incX == 0 { + panic(zeroIncX) + } + if incY == 0 { + panic(zeroIncY) + } + if n < 1 { + if n == 0 { + return + } + panic(nLT0) + } + if (incX > 0 && (n-1)*incX >= len(x)) || (incX < 0 && (1-n)*incX >= len(x)) { + panic(shortX) + } + if (incY > 0 && (n-1)*incY >= len(y)) || (incY < 0 && (1-n)*incY >= len(y)) { + panic(shortY) + } + if alpha == 0 { + return + } + if incX == 1 && incY == 1 { + c64.AxpyUnitary(alpha, x[:n], y[:n]) + return + } + var ix, iy int + if incX < 0 { + ix = (1 - n) * incX + } + if incY < 0 { + iy = (1 - n) * incY + } + c64.AxpyInc(alpha, x, y, uintptr(n), uintptr(incX), uintptr(incY), uintptr(ix), uintptr(iy)) +} + +// Ccopy copies the vector x to vector y. +// +// Complex64 implementations are autogenerated and not directly tested. +func (Implementation) Ccopy(n int, x []complex64, incX int, y []complex64, incY int) { + if incX == 0 { + panic(zeroIncX) + } + if incY == 0 { + panic(zeroIncY) + } + if n < 1 { + if n == 0 { + return + } + panic(nLT0) + } + if (incX > 0 && (n-1)*incX >= len(x)) || (incX < 0 && (1-n)*incX >= len(x)) { + panic(shortX) + } + if (incY > 0 && (n-1)*incY >= len(y)) || (incY < 0 && (1-n)*incY >= len(y)) { + panic(shortY) + } + if incX == 1 && incY == 1 { + copy(y[:n], x[:n]) + return + } + var ix, iy int + if incX < 0 { + ix = (-n + 1) * incX + } + if incY < 0 { + iy = (-n + 1) * incY + } + for i := 0; i < n; i++ { + y[iy] = x[ix] + ix += incX + iy += incY + } +} + +// Cdotc computes the dot product +// x^H · y +// of two complex vectors x and y. +// +// Complex64 implementations are autogenerated and not directly tested. +func (Implementation) Cdotc(n int, x []complex64, incX int, y []complex64, incY int) complex64 { + if incX == 0 { + panic(zeroIncX) + } + if incY == 0 { + panic(zeroIncY) + } + if n <= 0 { + if n == 0 { + return 0 + } + panic(nLT0) + } + if incX == 1 && incY == 1 { + if len(x) < n { + panic(shortX) + } + if len(y) < n { + panic(shortY) + } + return c64.DotcUnitary(x[:n], y[:n]) + } + var ix, iy int + if incX < 0 { + ix = (-n + 1) * incX + } + if incY < 0 { + iy = (-n + 1) * incY + } + if ix >= len(x) || (n-1)*incX >= len(x) { + panic(shortX) + } + if iy >= len(y) || (n-1)*incY >= len(y) { + panic(shortY) + } + return c64.DotcInc(x, y, uintptr(n), uintptr(incX), uintptr(incY), uintptr(ix), uintptr(iy)) +} + +// Cdotu computes the dot product +// x^T · y +// of two complex vectors x and y. +// +// Complex64 implementations are autogenerated and not directly tested. +func (Implementation) Cdotu(n int, x []complex64, incX int, y []complex64, incY int) complex64 { + if incX == 0 { + panic(zeroIncX) + } + if incY == 0 { + panic(zeroIncY) + } + if n <= 0 { + if n == 0 { + return 0 + } + panic(nLT0) + } + if incX == 1 && incY == 1 { + if len(x) < n { + panic(shortX) + } + if len(y) < n { + panic(shortY) + } + return c64.DotuUnitary(x[:n], y[:n]) + } + var ix, iy int + if incX < 0 { + ix = (-n + 1) * incX + } + if incY < 0 { + iy = (-n + 1) * incY + } + if ix >= len(x) || (n-1)*incX >= len(x) { + panic(shortX) + } + if iy >= len(y) || (n-1)*incY >= len(y) { + panic(shortY) + } + return c64.DotuInc(x, y, uintptr(n), uintptr(incX), uintptr(incY), uintptr(ix), uintptr(iy)) +} + +// Csscal scales the vector x by a real scalar alpha. +// Csscal has no effect if incX < 0. +// +// Complex64 implementations are autogenerated and not directly tested. +func (Implementation) Csscal(n int, alpha float32, x []complex64, incX int) { + if incX < 1 { + if incX == 0 { + panic(zeroIncX) + } + return + } + if (n-1)*incX >= len(x) { + panic(shortX) + } + if n < 1 { + if n == 0 { + return + } + panic(nLT0) + } + if alpha == 0 { + if incX == 1 { + x = x[:n] + for i := range x { + x[i] = 0 + } + return + } + for ix := 0; ix < n*incX; ix += incX { + x[ix] = 0 + } + return + } + if incX == 1 { + x = x[:n] + for i, v := range x { + x[i] = complex(alpha*real(v), alpha*imag(v)) + } + return + } + for ix := 0; ix < n*incX; ix += incX { + v := x[ix] + x[ix] = complex(alpha*real(v), alpha*imag(v)) + } +} + +// Cscal scales the vector x by a complex scalar alpha. +// Cscal has no effect if incX < 0. +// +// Complex64 implementations are autogenerated and not directly tested. +func (Implementation) Cscal(n int, alpha complex64, x []complex64, incX int) { + if incX < 1 { + if incX == 0 { + panic(zeroIncX) + } + return + } + if (n-1)*incX >= len(x) { + panic(shortX) + } + if n < 1 { + if n == 0 { + return + } + panic(nLT0) + } + if alpha == 0 { + if incX == 1 { + x = x[:n] + for i := range x { + x[i] = 0 + } + return + } + for ix := 0; ix < n*incX; ix += incX { + x[ix] = 0 + } + return + } + if incX == 1 { + c64.ScalUnitary(alpha, x[:n]) + return + } + c64.ScalInc(alpha, x, uintptr(n), uintptr(incX)) +} + +// Cswap exchanges the elements of two complex vectors x and y. +// +// Complex64 implementations are autogenerated and not directly tested. +func (Implementation) Cswap(n int, x []complex64, incX int, y []complex64, incY int) { + if incX == 0 { + panic(zeroIncX) + } + if incY == 0 { + panic(zeroIncY) + } + if n < 1 { + if n == 0 { + return + } + panic(nLT0) + } + if (incX > 0 && (n-1)*incX >= len(x)) || (incX < 0 && (1-n)*incX >= len(x)) { + panic(shortX) + } + if (incY > 0 && (n-1)*incY >= len(y)) || (incY < 0 && (1-n)*incY >= len(y)) { + panic(shortY) + } + if incX == 1 && incY == 1 { + x = x[:n] + for i, v := range x { + x[i], y[i] = y[i], v + } + return + } + var ix, iy int + if incX < 0 { + ix = (-n + 1) * incX + } + if incY < 0 { + iy = (-n + 1) * incY + } + for i := 0; i < n; i++ { + x[ix], y[iy] = y[iy], x[ix] + ix += incX + iy += incY + } +} diff --git a/vendor/gonum.org/v1/gonum/blas/gonum/level1float32.go b/vendor/gonum.org/v1/gonum/blas/gonum/level1float32.go new file mode 100644 index 0000000..ee82083 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/blas/gonum/level1float32.go @@ -0,0 +1,644 @@ +// Code generated by "go generate gonum.org/v1/gonum/blas/gonum”; DO NOT EDIT. + +// Copyright ©2015 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package gonum + +import ( + math "gonum.org/v1/gonum/internal/math32" + + "gonum.org/v1/gonum/blas" + "gonum.org/v1/gonum/internal/asm/f32" +) + +var _ blas.Float32Level1 = Implementation{} + +// Snrm2 computes the Euclidean norm of a vector, +// sqrt(\sum_i x[i] * x[i]). +// This function returns 0 if incX is negative. +// +// Float32 implementations are autogenerated and not directly tested. +func (Implementation) Snrm2(n int, x []float32, incX int) float32 { + if incX < 1 { + if incX == 0 { + panic(zeroIncX) + } + return 0 + } + if len(x) <= (n-1)*incX { + panic(shortX) + } + if n < 2 { + if n == 1 { + return math.Abs(x[0]) + } + if n == 0 { + return 0 + } + panic(nLT0) + } + var ( + scale float32 = 0 + sumSquares float32 = 1 + ) + if incX == 1 { + x = x[:n] + for _, v := range x { + if v == 0 { + continue + } + absxi := math.Abs(v) + if math.IsNaN(absxi) { + return math.NaN() + } + if scale < absxi { + sumSquares = 1 + sumSquares*(scale/absxi)*(scale/absxi) + scale = absxi + } else { + sumSquares = sumSquares + (absxi/scale)*(absxi/scale) + } + } + if math.IsInf(scale, 1) { + return math.Inf(1) + } + return scale * math.Sqrt(sumSquares) + } + for ix := 0; ix < n*incX; ix += incX { + val := x[ix] + if val == 0 { + continue + } + absxi := math.Abs(val) + if math.IsNaN(absxi) { + return math.NaN() + } + if scale < absxi { + sumSquares = 1 + sumSquares*(scale/absxi)*(scale/absxi) + scale = absxi + } else { + sumSquares = sumSquares + (absxi/scale)*(absxi/scale) + } + } + if math.IsInf(scale, 1) { + return math.Inf(1) + } + return scale * math.Sqrt(sumSquares) +} + +// Sasum computes the sum of the absolute values of the elements of x. +// \sum_i |x[i]| +// Sasum returns 0 if incX is negative. +// +// Float32 implementations are autogenerated and not directly tested. +func (Implementation) Sasum(n int, x []float32, incX int) float32 { + var sum float32 + if n < 0 { + panic(nLT0) + } + if incX < 1 { + if incX == 0 { + panic(zeroIncX) + } + return 0 + } + if len(x) <= (n-1)*incX { + panic(shortX) + } + if incX == 1 { + x = x[:n] + for _, v := range x { + sum += math.Abs(v) + } + return sum + } + for i := 0; i < n; i++ { + sum += math.Abs(x[i*incX]) + } + return sum +} + +// Isamax returns the index of an element of x with the largest absolute value. +// If there are multiple such indices the earliest is returned. +// Isamax returns -1 if n == 0. +// +// Float32 implementations are autogenerated and not directly tested. +func (Implementation) Isamax(n int, x []float32, incX int) int { + if incX < 1 { + if incX == 0 { + panic(zeroIncX) + } + return -1 + } + if len(x) <= (n-1)*incX { + panic(shortX) + } + if n < 2 { + if n == 1 { + return 0 + } + if n == 0 { + return -1 // Netlib returns invalid index when n == 0. + } + panic(nLT0) + } + idx := 0 + max := math.Abs(x[0]) + if incX == 1 { + for i, v := range x[:n] { + absV := math.Abs(v) + if absV > max { + max = absV + idx = i + } + } + return idx + } + ix := incX + for i := 1; i < n; i++ { + v := x[ix] + absV := math.Abs(v) + if absV > max { + max = absV + idx = i + } + ix += incX + } + return idx +} + +// Sswap exchanges the elements of two vectors. +// x[i], y[i] = y[i], x[i] for all i +// +// Float32 implementations are autogenerated and not directly tested. +func (Implementation) Sswap(n int, x []float32, incX int, y []float32, incY int) { + if incX == 0 { + panic(zeroIncX) + } + if incY == 0 { + panic(zeroIncY) + } + if n < 1 { + if n == 0 { + return + } + panic(nLT0) + } + if (incX > 0 && len(x) <= (n-1)*incX) || (incX < 0 && len(x) <= (1-n)*incX) { + panic(shortX) + } + if (incY > 0 && len(y) <= (n-1)*incY) || (incY < 0 && len(y) <= (1-n)*incY) { + panic(shortY) + } + if incX == 1 && incY == 1 { + x = x[:n] + for i, v := range x { + x[i], y[i] = y[i], v + } + return + } + var ix, iy int + if incX < 0 { + ix = (-n + 1) * incX + } + if incY < 0 { + iy = (-n + 1) * incY + } + for i := 0; i < n; i++ { + x[ix], y[iy] = y[iy], x[ix] + ix += incX + iy += incY + } +} + +// Scopy copies the elements of x into the elements of y. +// y[i] = x[i] for all i +// +// Float32 implementations are autogenerated and not directly tested. +func (Implementation) Scopy(n int, x []float32, incX int, y []float32, incY int) { + if incX == 0 { + panic(zeroIncX) + } + if incY == 0 { + panic(zeroIncY) + } + if n < 1 { + if n == 0 { + return + } + panic(nLT0) + } + if (incX > 0 && len(x) <= (n-1)*incX) || (incX < 0 && len(x) <= (1-n)*incX) { + panic(shortX) + } + if (incY > 0 && len(y) <= (n-1)*incY) || (incY < 0 && len(y) <= (1-n)*incY) { + panic(shortY) + } + if incX == 1 && incY == 1 { + copy(y[:n], x[:n]) + return + } + var ix, iy int + if incX < 0 { + ix = (-n + 1) * incX + } + if incY < 0 { + iy = (-n + 1) * incY + } + for i := 0; i < n; i++ { + y[iy] = x[ix] + ix += incX + iy += incY + } +} + +// Saxpy adds alpha times x to y +// y[i] += alpha * x[i] for all i +// +// Float32 implementations are autogenerated and not directly tested. +func (Implementation) Saxpy(n int, alpha float32, x []float32, incX int, y []float32, incY int) { + if incX == 0 { + panic(zeroIncX) + } + if incY == 0 { + panic(zeroIncY) + } + if n < 1 { + if n == 0 { + return + } + panic(nLT0) + } + if (incX > 0 && len(x) <= (n-1)*incX) || (incX < 0 && len(x) <= (1-n)*incX) { + panic(shortX) + } + if (incY > 0 && len(y) <= (n-1)*incY) || (incY < 0 && len(y) <= (1-n)*incY) { + panic(shortY) + } + if alpha == 0 { + return + } + if incX == 1 && incY == 1 { + f32.AxpyUnitary(alpha, x[:n], y[:n]) + return + } + var ix, iy int + if incX < 0 { + ix = (-n + 1) * incX + } + if incY < 0 { + iy = (-n + 1) * incY + } + f32.AxpyInc(alpha, x, y, uintptr(n), uintptr(incX), uintptr(incY), uintptr(ix), uintptr(iy)) +} + +// Srotg computes the plane rotation +// _ _ _ _ _ _ +// | c s | | a | | r | +// | -s c | * | b | = | 0 | +// ‾ ‾ ‾ ‾ ‾ ‾ +// where +// r = ±√(a^2 + b^2) +// c = a/r, the cosine of the plane rotation +// s = b/r, the sine of the plane rotation +// +// NOTE: There is a discrepancy between the reference implementation and the BLAS +// technical manual regarding the sign for r when a or b are zero. +// Srotg agrees with the definition in the manual and other +// common BLAS implementations. +// +// Float32 implementations are autogenerated and not directly tested. +func (Implementation) Srotg(a, b float32) (c, s, r, z float32) { + if b == 0 && a == 0 { + return 1, 0, a, 0 + } + absA := math.Abs(a) + absB := math.Abs(b) + aGTb := absA > absB + r = math.Hypot(a, b) + if aGTb { + r = math.Copysign(r, a) + } else { + r = math.Copysign(r, b) + } + c = a / r + s = b / r + if aGTb { + z = s + } else if c != 0 { // r == 0 case handled above + z = 1 / c + } else { + z = 1 + } + return +} + +// Srotmg computes the modified Givens rotation. See +// http://www.netlib.org/lapack/explore-html/df/deb/drotmg_8f.html +// for more details. +// +// Float32 implementations are autogenerated and not directly tested. +func (Implementation) Srotmg(d1, d2, x1, y1 float32) (p blas.SrotmParams, rd1, rd2, rx1 float32) { + // The implementation of Drotmg used here is taken from Hopkins 1997 + // Appendix A: https://doi.org/10.1145/289251.289253 + // with the exception of the gam constants below. + + const ( + gam = 4096.0 + gamsq = gam * gam + rgamsq = 1.0 / gamsq + ) + + if d1 < 0 { + p.Flag = blas.Rescaling // Error state. + return p, 0, 0, 0 + } + + if d2 == 0 || y1 == 0 { + p.Flag = blas.Identity + return p, d1, d2, x1 + } + + var h11, h12, h21, h22 float32 + if (d1 == 0 || x1 == 0) && d2 > 0 { + p.Flag = blas.Diagonal + h12 = 1 + h21 = -1 + x1 = y1 + d1, d2 = d2, d1 + } else { + p2 := d2 * y1 + p1 := d1 * x1 + q2 := p2 * y1 + q1 := p1 * x1 + if math.Abs(q1) > math.Abs(q2) { + p.Flag = blas.OffDiagonal + h11 = 1 + h22 = 1 + h21 = -y1 / x1 + h12 = p2 / p1 + u := 1 - h12*h21 + if u <= 0 { + p.Flag = blas.Rescaling // Error state. + return p, 0, 0, 0 + } + + d1 /= u + d2 /= u + x1 *= u + } else { + if q2 < 0 { + p.Flag = blas.Rescaling // Error state. + return p, 0, 0, 0 + } + + p.Flag = blas.Diagonal + h21 = -1 + h12 = 1 + h11 = p1 / p2 + h22 = x1 / y1 + u := 1 + h11*h22 + d1, d2 = d2/u, d1/u + x1 = y1 * u + } + } + + for d1 <= rgamsq && d1 != 0 { + p.Flag = blas.Rescaling + d1 = (d1 * gam) * gam + x1 /= gam + h11 /= gam + h12 /= gam + } + for d1 > gamsq { + p.Flag = blas.Rescaling + d1 = (d1 / gam) / gam + x1 *= gam + h11 *= gam + h12 *= gam + } + + for math.Abs(d2) <= rgamsq && d2 != 0 { + p.Flag = blas.Rescaling + d2 = (d2 * gam) * gam + h21 /= gam + h22 /= gam + } + for math.Abs(d2) > gamsq { + p.Flag = blas.Rescaling + d2 = (d2 / gam) / gam + h21 *= gam + h22 *= gam + } + + switch p.Flag { + case blas.Diagonal: + p.H = [4]float32{0: h11, 3: h22} + case blas.OffDiagonal: + p.H = [4]float32{1: h21, 2: h12} + case blas.Rescaling: + p.H = [4]float32{h11, h21, h12, h22} + default: + panic(badFlag) + } + + return p, d1, d2, x1 +} + +// Srot applies a plane transformation. +// x[i] = c * x[i] + s * y[i] +// y[i] = c * y[i] - s * x[i] +// +// Float32 implementations are autogenerated and not directly tested. +func (Implementation) Srot(n int, x []float32, incX int, y []float32, incY int, c float32, s float32) { + if incX == 0 { + panic(zeroIncX) + } + if incY == 0 { + panic(zeroIncY) + } + if n < 1 { + if n == 0 { + return + } + panic(nLT0) + } + if (incX > 0 && len(x) <= (n-1)*incX) || (incX < 0 && len(x) <= (1-n)*incX) { + panic(shortX) + } + if (incY > 0 && len(y) <= (n-1)*incY) || (incY < 0 && len(y) <= (1-n)*incY) { + panic(shortY) + } + if incX == 1 && incY == 1 { + x = x[:n] + for i, vx := range x { + vy := y[i] + x[i], y[i] = c*vx+s*vy, c*vy-s*vx + } + return + } + var ix, iy int + if incX < 0 { + ix = (-n + 1) * incX + } + if incY < 0 { + iy = (-n + 1) * incY + } + for i := 0; i < n; i++ { + vx := x[ix] + vy := y[iy] + x[ix], y[iy] = c*vx+s*vy, c*vy-s*vx + ix += incX + iy += incY + } +} + +// Srotm applies the modified Givens rotation to the 2×n matrix. +// +// Float32 implementations are autogenerated and not directly tested. +func (Implementation) Srotm(n int, x []float32, incX int, y []float32, incY int, p blas.SrotmParams) { + if incX == 0 { + panic(zeroIncX) + } + if incY == 0 { + panic(zeroIncY) + } + if n <= 0 { + if n == 0 { + return + } + panic(nLT0) + } + if (incX > 0 && len(x) <= (n-1)*incX) || (incX < 0 && len(x) <= (1-n)*incX) { + panic(shortX) + } + if (incY > 0 && len(y) <= (n-1)*incY) || (incY < 0 && len(y) <= (1-n)*incY) { + panic(shortY) + } + + if p.Flag == blas.Identity { + return + } + + switch p.Flag { + case blas.Rescaling: + h11 := p.H[0] + h12 := p.H[2] + h21 := p.H[1] + h22 := p.H[3] + if incX == 1 && incY == 1 { + x = x[:n] + for i, vx := range x { + vy := y[i] + x[i], y[i] = vx*h11+vy*h12, vx*h21+vy*h22 + } + return + } + var ix, iy int + if incX < 0 { + ix = (-n + 1) * incX + } + if incY < 0 { + iy = (-n + 1) * incY + } + for i := 0; i < n; i++ { + vx := x[ix] + vy := y[iy] + x[ix], y[iy] = vx*h11+vy*h12, vx*h21+vy*h22 + ix += incX + iy += incY + } + case blas.OffDiagonal: + h12 := p.H[2] + h21 := p.H[1] + if incX == 1 && incY == 1 { + x = x[:n] + for i, vx := range x { + vy := y[i] + x[i], y[i] = vx+vy*h12, vx*h21+vy + } + return + } + var ix, iy int + if incX < 0 { + ix = (-n + 1) * incX + } + if incY < 0 { + iy = (-n + 1) * incY + } + for i := 0; i < n; i++ { + vx := x[ix] + vy := y[iy] + x[ix], y[iy] = vx+vy*h12, vx*h21+vy + ix += incX + iy += incY + } + case blas.Diagonal: + h11 := p.H[0] + h22 := p.H[3] + if incX == 1 && incY == 1 { + x = x[:n] + for i, vx := range x { + vy := y[i] + x[i], y[i] = vx*h11+vy, -vx+vy*h22 + } + return + } + var ix, iy int + if incX < 0 { + ix = (-n + 1) * incX + } + if incY < 0 { + iy = (-n + 1) * incY + } + for i := 0; i < n; i++ { + vx := x[ix] + vy := y[iy] + x[ix], y[iy] = vx*h11+vy, -vx+vy*h22 + ix += incX + iy += incY + } + } +} + +// Sscal scales x by alpha. +// x[i] *= alpha +// Sscal has no effect if incX < 0. +// +// Float32 implementations are autogenerated and not directly tested. +func (Implementation) Sscal(n int, alpha float32, x []float32, incX int) { + if incX < 1 { + if incX == 0 { + panic(zeroIncX) + } + return + } + if n < 1 { + if n == 0 { + return + } + panic(nLT0) + } + if (n-1)*incX >= len(x) { + panic(shortX) + } + if alpha == 0 { + if incX == 1 { + x = x[:n] + for i := range x { + x[i] = 0 + } + return + } + for ix := 0; ix < n*incX; ix += incX { + x[ix] = 0 + } + return + } + if incX == 1 { + f32.ScalUnitary(alpha, x[:n]) + return + } + f32.ScalInc(alpha, x, uintptr(n), uintptr(incX)) +} diff --git a/vendor/gonum.org/v1/gonum/blas/gonum/level1float32_dsdot.go b/vendor/gonum.org/v1/gonum/blas/gonum/level1float32_dsdot.go new file mode 100644 index 0000000..089e0d8 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/blas/gonum/level1float32_dsdot.go @@ -0,0 +1,53 @@ +// Code generated by "go generate gonum.org/v1/gonum/blas/gonum”; DO NOT EDIT. + +// Copyright ©2015 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package gonum + +import ( + "gonum.org/v1/gonum/internal/asm/f32" +) + +// Dsdot computes the dot product of the two vectors +// \sum_i x[i]*y[i] +// +// Float32 implementations are autogenerated and not directly tested. +func (Implementation) Dsdot(n int, x []float32, incX int, y []float32, incY int) float64 { + if incX == 0 { + panic(zeroIncX) + } + if incY == 0 { + panic(zeroIncY) + } + if n <= 0 { + if n == 0 { + return 0 + } + panic(nLT0) + } + if incX == 1 && incY == 1 { + if len(x) < n { + panic(shortX) + } + if len(y) < n { + panic(shortY) + } + return f32.DdotUnitary(x[:n], y[:n]) + } + var ix, iy int + if incX < 0 { + ix = (-n + 1) * incX + } + if incY < 0 { + iy = (-n + 1) * incY + } + if ix >= len(x) || ix+(n-1)*incX >= len(x) { + panic(shortX) + } + if iy >= len(y) || iy+(n-1)*incY >= len(y) { + panic(shortY) + } + return f32.DdotInc(x, y, uintptr(n), uintptr(incX), uintptr(incY), uintptr(ix), uintptr(iy)) +} diff --git a/vendor/gonum.org/v1/gonum/blas/gonum/level1float32_sdot.go b/vendor/gonum.org/v1/gonum/blas/gonum/level1float32_sdot.go new file mode 100644 index 0000000..41c3e79 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/blas/gonum/level1float32_sdot.go @@ -0,0 +1,53 @@ +// Code generated by "go generate gonum.org/v1/gonum/blas/gonum”; DO NOT EDIT. + +// Copyright ©2015 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package gonum + +import ( + "gonum.org/v1/gonum/internal/asm/f32" +) + +// Sdot computes the dot product of the two vectors +// \sum_i x[i]*y[i] +// +// Float32 implementations are autogenerated and not directly tested. +func (Implementation) Sdot(n int, x []float32, incX int, y []float32, incY int) float32 { + if incX == 0 { + panic(zeroIncX) + } + if incY == 0 { + panic(zeroIncY) + } + if n <= 0 { + if n == 0 { + return 0 + } + panic(nLT0) + } + if incX == 1 && incY == 1 { + if len(x) < n { + panic(shortX) + } + if len(y) < n { + panic(shortY) + } + return f32.DotUnitary(x[:n], y[:n]) + } + var ix, iy int + if incX < 0 { + ix = (-n + 1) * incX + } + if incY < 0 { + iy = (-n + 1) * incY + } + if ix >= len(x) || ix+(n-1)*incX >= len(x) { + panic(shortX) + } + if iy >= len(y) || iy+(n-1)*incY >= len(y) { + panic(shortY) + } + return f32.DotInc(x, y, uintptr(n), uintptr(incX), uintptr(incY), uintptr(ix), uintptr(iy)) +} diff --git a/vendor/gonum.org/v1/gonum/blas/gonum/level1float32_sdsdot.go b/vendor/gonum.org/v1/gonum/blas/gonum/level1float32_sdsdot.go new file mode 100644 index 0000000..69dd8aa --- /dev/null +++ b/vendor/gonum.org/v1/gonum/blas/gonum/level1float32_sdsdot.go @@ -0,0 +1,53 @@ +// Code generated by "go generate gonum.org/v1/gonum/blas/gonum”; DO NOT EDIT. + +// Copyright ©2015 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package gonum + +import ( + "gonum.org/v1/gonum/internal/asm/f32" +) + +// Sdsdot computes the dot product of the two vectors plus a constant +// alpha + \sum_i x[i]*y[i] +// +// Float32 implementations are autogenerated and not directly tested. +func (Implementation) Sdsdot(n int, alpha float32, x []float32, incX int, y []float32, incY int) float32 { + if incX == 0 { + panic(zeroIncX) + } + if incY == 0 { + panic(zeroIncY) + } + if n <= 0 { + if n == 0 { + return 0 + } + panic(nLT0) + } + if incX == 1 && incY == 1 { + if len(x) < n { + panic(shortX) + } + if len(y) < n { + panic(shortY) + } + return alpha + float32(f32.DdotUnitary(x[:n], y[:n])) + } + var ix, iy int + if incX < 0 { + ix = (-n + 1) * incX + } + if incY < 0 { + iy = (-n + 1) * incY + } + if ix >= len(x) || ix+(n-1)*incX >= len(x) { + panic(shortX) + } + if iy >= len(y) || iy+(n-1)*incY >= len(y) { + panic(shortY) + } + return alpha + float32(f32.DdotInc(x, y, uintptr(n), uintptr(incX), uintptr(incY), uintptr(ix), uintptr(iy))) +} diff --git a/vendor/gonum.org/v1/gonum/blas/gonum/level1float64.go b/vendor/gonum.org/v1/gonum/blas/gonum/level1float64.go new file mode 100644 index 0000000..2e8ed54 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/blas/gonum/level1float64.go @@ -0,0 +1,620 @@ +// Copyright ©2015 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package gonum + +import ( + "math" + + "gonum.org/v1/gonum/blas" + "gonum.org/v1/gonum/internal/asm/f64" +) + +var _ blas.Float64Level1 = Implementation{} + +// Dnrm2 computes the Euclidean norm of a vector, +// sqrt(\sum_i x[i] * x[i]). +// This function returns 0 if incX is negative. +func (Implementation) Dnrm2(n int, x []float64, incX int) float64 { + if incX < 1 { + if incX == 0 { + panic(zeroIncX) + } + return 0 + } + if len(x) <= (n-1)*incX { + panic(shortX) + } + if n < 2 { + if n == 1 { + return math.Abs(x[0]) + } + if n == 0 { + return 0 + } + panic(nLT0) + } + var ( + scale float64 = 0 + sumSquares float64 = 1 + ) + if incX == 1 { + x = x[:n] + for _, v := range x { + if v == 0 { + continue + } + absxi := math.Abs(v) + if math.IsNaN(absxi) { + return math.NaN() + } + if scale < absxi { + sumSquares = 1 + sumSquares*(scale/absxi)*(scale/absxi) + scale = absxi + } else { + sumSquares = sumSquares + (absxi/scale)*(absxi/scale) + } + } + if math.IsInf(scale, 1) { + return math.Inf(1) + } + return scale * math.Sqrt(sumSquares) + } + for ix := 0; ix < n*incX; ix += incX { + val := x[ix] + if val == 0 { + continue + } + absxi := math.Abs(val) + if math.IsNaN(absxi) { + return math.NaN() + } + if scale < absxi { + sumSquares = 1 + sumSquares*(scale/absxi)*(scale/absxi) + scale = absxi + } else { + sumSquares = sumSquares + (absxi/scale)*(absxi/scale) + } + } + if math.IsInf(scale, 1) { + return math.Inf(1) + } + return scale * math.Sqrt(sumSquares) +} + +// Dasum computes the sum of the absolute values of the elements of x. +// \sum_i |x[i]| +// Dasum returns 0 if incX is negative. +func (Implementation) Dasum(n int, x []float64, incX int) float64 { + var sum float64 + if n < 0 { + panic(nLT0) + } + if incX < 1 { + if incX == 0 { + panic(zeroIncX) + } + return 0 + } + if len(x) <= (n-1)*incX { + panic(shortX) + } + if incX == 1 { + x = x[:n] + for _, v := range x { + sum += math.Abs(v) + } + return sum + } + for i := 0; i < n; i++ { + sum += math.Abs(x[i*incX]) + } + return sum +} + +// Idamax returns the index of an element of x with the largest absolute value. +// If there are multiple such indices the earliest is returned. +// Idamax returns -1 if n == 0. +func (Implementation) Idamax(n int, x []float64, incX int) int { + if incX < 1 { + if incX == 0 { + panic(zeroIncX) + } + return -1 + } + if len(x) <= (n-1)*incX { + panic(shortX) + } + if n < 2 { + if n == 1 { + return 0 + } + if n == 0 { + return -1 // Netlib returns invalid index when n == 0. + } + panic(nLT0) + } + idx := 0 + max := math.Abs(x[0]) + if incX == 1 { + for i, v := range x[:n] { + absV := math.Abs(v) + if absV > max { + max = absV + idx = i + } + } + return idx + } + ix := incX + for i := 1; i < n; i++ { + v := x[ix] + absV := math.Abs(v) + if absV > max { + max = absV + idx = i + } + ix += incX + } + return idx +} + +// Dswap exchanges the elements of two vectors. +// x[i], y[i] = y[i], x[i] for all i +func (Implementation) Dswap(n int, x []float64, incX int, y []float64, incY int) { + if incX == 0 { + panic(zeroIncX) + } + if incY == 0 { + panic(zeroIncY) + } + if n < 1 { + if n == 0 { + return + } + panic(nLT0) + } + if (incX > 0 && len(x) <= (n-1)*incX) || (incX < 0 && len(x) <= (1-n)*incX) { + panic(shortX) + } + if (incY > 0 && len(y) <= (n-1)*incY) || (incY < 0 && len(y) <= (1-n)*incY) { + panic(shortY) + } + if incX == 1 && incY == 1 { + x = x[:n] + for i, v := range x { + x[i], y[i] = y[i], v + } + return + } + var ix, iy int + if incX < 0 { + ix = (-n + 1) * incX + } + if incY < 0 { + iy = (-n + 1) * incY + } + for i := 0; i < n; i++ { + x[ix], y[iy] = y[iy], x[ix] + ix += incX + iy += incY + } +} + +// Dcopy copies the elements of x into the elements of y. +// y[i] = x[i] for all i +func (Implementation) Dcopy(n int, x []float64, incX int, y []float64, incY int) { + if incX == 0 { + panic(zeroIncX) + } + if incY == 0 { + panic(zeroIncY) + } + if n < 1 { + if n == 0 { + return + } + panic(nLT0) + } + if (incX > 0 && len(x) <= (n-1)*incX) || (incX < 0 && len(x) <= (1-n)*incX) { + panic(shortX) + } + if (incY > 0 && len(y) <= (n-1)*incY) || (incY < 0 && len(y) <= (1-n)*incY) { + panic(shortY) + } + if incX == 1 && incY == 1 { + copy(y[:n], x[:n]) + return + } + var ix, iy int + if incX < 0 { + ix = (-n + 1) * incX + } + if incY < 0 { + iy = (-n + 1) * incY + } + for i := 0; i < n; i++ { + y[iy] = x[ix] + ix += incX + iy += incY + } +} + +// Daxpy adds alpha times x to y +// y[i] += alpha * x[i] for all i +func (Implementation) Daxpy(n int, alpha float64, x []float64, incX int, y []float64, incY int) { + if incX == 0 { + panic(zeroIncX) + } + if incY == 0 { + panic(zeroIncY) + } + if n < 1 { + if n == 0 { + return + } + panic(nLT0) + } + if (incX > 0 && len(x) <= (n-1)*incX) || (incX < 0 && len(x) <= (1-n)*incX) { + panic(shortX) + } + if (incY > 0 && len(y) <= (n-1)*incY) || (incY < 0 && len(y) <= (1-n)*incY) { + panic(shortY) + } + if alpha == 0 { + return + } + if incX == 1 && incY == 1 { + f64.AxpyUnitary(alpha, x[:n], y[:n]) + return + } + var ix, iy int + if incX < 0 { + ix = (-n + 1) * incX + } + if incY < 0 { + iy = (-n + 1) * incY + } + f64.AxpyInc(alpha, x, y, uintptr(n), uintptr(incX), uintptr(incY), uintptr(ix), uintptr(iy)) +} + +// Drotg computes the plane rotation +// _ _ _ _ _ _ +// | c s | | a | | r | +// | -s c | * | b | = | 0 | +// ‾ ‾ ‾ ‾ ‾ ‾ +// where +// r = ±√(a^2 + b^2) +// c = a/r, the cosine of the plane rotation +// s = b/r, the sine of the plane rotation +// +// NOTE: There is a discrepancy between the reference implementation and the BLAS +// technical manual regarding the sign for r when a or b are zero. +// Drotg agrees with the definition in the manual and other +// common BLAS implementations. +func (Implementation) Drotg(a, b float64) (c, s, r, z float64) { + if b == 0 && a == 0 { + return 1, 0, a, 0 + } + absA := math.Abs(a) + absB := math.Abs(b) + aGTb := absA > absB + r = math.Hypot(a, b) + if aGTb { + r = math.Copysign(r, a) + } else { + r = math.Copysign(r, b) + } + c = a / r + s = b / r + if aGTb { + z = s + } else if c != 0 { // r == 0 case handled above + z = 1 / c + } else { + z = 1 + } + return +} + +// Drotmg computes the modified Givens rotation. See +// http://www.netlib.org/lapack/explore-html/df/deb/drotmg_8f.html +// for more details. +func (Implementation) Drotmg(d1, d2, x1, y1 float64) (p blas.DrotmParams, rd1, rd2, rx1 float64) { + // The implementation of Drotmg used here is taken from Hopkins 1997 + // Appendix A: https://doi.org/10.1145/289251.289253 + // with the exception of the gam constants below. + + const ( + gam = 4096.0 + gamsq = gam * gam + rgamsq = 1.0 / gamsq + ) + + if d1 < 0 { + p.Flag = blas.Rescaling // Error state. + return p, 0, 0, 0 + } + + if d2 == 0 || y1 == 0 { + p.Flag = blas.Identity + return p, d1, d2, x1 + } + + var h11, h12, h21, h22 float64 + if (d1 == 0 || x1 == 0) && d2 > 0 { + p.Flag = blas.Diagonal + h12 = 1 + h21 = -1 + x1 = y1 + d1, d2 = d2, d1 + } else { + p2 := d2 * y1 + p1 := d1 * x1 + q2 := p2 * y1 + q1 := p1 * x1 + if math.Abs(q1) > math.Abs(q2) { + p.Flag = blas.OffDiagonal + h11 = 1 + h22 = 1 + h21 = -y1 / x1 + h12 = p2 / p1 + u := 1 - h12*h21 + if u <= 0 { + p.Flag = blas.Rescaling // Error state. + return p, 0, 0, 0 + } + + d1 /= u + d2 /= u + x1 *= u + } else { + if q2 < 0 { + p.Flag = blas.Rescaling // Error state. + return p, 0, 0, 0 + } + + p.Flag = blas.Diagonal + h21 = -1 + h12 = 1 + h11 = p1 / p2 + h22 = x1 / y1 + u := 1 + h11*h22 + d1, d2 = d2/u, d1/u + x1 = y1 * u + } + } + + for d1 <= rgamsq && d1 != 0 { + p.Flag = blas.Rescaling + d1 = (d1 * gam) * gam + x1 /= gam + h11 /= gam + h12 /= gam + } + for d1 > gamsq { + p.Flag = blas.Rescaling + d1 = (d1 / gam) / gam + x1 *= gam + h11 *= gam + h12 *= gam + } + + for math.Abs(d2) <= rgamsq && d2 != 0 { + p.Flag = blas.Rescaling + d2 = (d2 * gam) * gam + h21 /= gam + h22 /= gam + } + for math.Abs(d2) > gamsq { + p.Flag = blas.Rescaling + d2 = (d2 / gam) / gam + h21 *= gam + h22 *= gam + } + + switch p.Flag { + case blas.Diagonal: + p.H = [4]float64{0: h11, 3: h22} + case blas.OffDiagonal: + p.H = [4]float64{1: h21, 2: h12} + case blas.Rescaling: + p.H = [4]float64{h11, h21, h12, h22} + default: + panic(badFlag) + } + + return p, d1, d2, x1 +} + +// Drot applies a plane transformation. +// x[i] = c * x[i] + s * y[i] +// y[i] = c * y[i] - s * x[i] +func (Implementation) Drot(n int, x []float64, incX int, y []float64, incY int, c float64, s float64) { + if incX == 0 { + panic(zeroIncX) + } + if incY == 0 { + panic(zeroIncY) + } + if n < 1 { + if n == 0 { + return + } + panic(nLT0) + } + if (incX > 0 && len(x) <= (n-1)*incX) || (incX < 0 && len(x) <= (1-n)*incX) { + panic(shortX) + } + if (incY > 0 && len(y) <= (n-1)*incY) || (incY < 0 && len(y) <= (1-n)*incY) { + panic(shortY) + } + if incX == 1 && incY == 1 { + x = x[:n] + for i, vx := range x { + vy := y[i] + x[i], y[i] = c*vx+s*vy, c*vy-s*vx + } + return + } + var ix, iy int + if incX < 0 { + ix = (-n + 1) * incX + } + if incY < 0 { + iy = (-n + 1) * incY + } + for i := 0; i < n; i++ { + vx := x[ix] + vy := y[iy] + x[ix], y[iy] = c*vx+s*vy, c*vy-s*vx + ix += incX + iy += incY + } +} + +// Drotm applies the modified Givens rotation to the 2×n matrix. +func (Implementation) Drotm(n int, x []float64, incX int, y []float64, incY int, p blas.DrotmParams) { + if incX == 0 { + panic(zeroIncX) + } + if incY == 0 { + panic(zeroIncY) + } + if n <= 0 { + if n == 0 { + return + } + panic(nLT0) + } + if (incX > 0 && len(x) <= (n-1)*incX) || (incX < 0 && len(x) <= (1-n)*incX) { + panic(shortX) + } + if (incY > 0 && len(y) <= (n-1)*incY) || (incY < 0 && len(y) <= (1-n)*incY) { + panic(shortY) + } + + if p.Flag == blas.Identity { + return + } + + switch p.Flag { + case blas.Rescaling: + h11 := p.H[0] + h12 := p.H[2] + h21 := p.H[1] + h22 := p.H[3] + if incX == 1 && incY == 1 { + x = x[:n] + for i, vx := range x { + vy := y[i] + x[i], y[i] = vx*h11+vy*h12, vx*h21+vy*h22 + } + return + } + var ix, iy int + if incX < 0 { + ix = (-n + 1) * incX + } + if incY < 0 { + iy = (-n + 1) * incY + } + for i := 0; i < n; i++ { + vx := x[ix] + vy := y[iy] + x[ix], y[iy] = vx*h11+vy*h12, vx*h21+vy*h22 + ix += incX + iy += incY + } + case blas.OffDiagonal: + h12 := p.H[2] + h21 := p.H[1] + if incX == 1 && incY == 1 { + x = x[:n] + for i, vx := range x { + vy := y[i] + x[i], y[i] = vx+vy*h12, vx*h21+vy + } + return + } + var ix, iy int + if incX < 0 { + ix = (-n + 1) * incX + } + if incY < 0 { + iy = (-n + 1) * incY + } + for i := 0; i < n; i++ { + vx := x[ix] + vy := y[iy] + x[ix], y[iy] = vx+vy*h12, vx*h21+vy + ix += incX + iy += incY + } + case blas.Diagonal: + h11 := p.H[0] + h22 := p.H[3] + if incX == 1 && incY == 1 { + x = x[:n] + for i, vx := range x { + vy := y[i] + x[i], y[i] = vx*h11+vy, -vx+vy*h22 + } + return + } + var ix, iy int + if incX < 0 { + ix = (-n + 1) * incX + } + if incY < 0 { + iy = (-n + 1) * incY + } + for i := 0; i < n; i++ { + vx := x[ix] + vy := y[iy] + x[ix], y[iy] = vx*h11+vy, -vx+vy*h22 + ix += incX + iy += incY + } + } +} + +// Dscal scales x by alpha. +// x[i] *= alpha +// Dscal has no effect if incX < 0. +func (Implementation) Dscal(n int, alpha float64, x []float64, incX int) { + if incX < 1 { + if incX == 0 { + panic(zeroIncX) + } + return + } + if n < 1 { + if n == 0 { + return + } + panic(nLT0) + } + if (n-1)*incX >= len(x) { + panic(shortX) + } + if alpha == 0 { + if incX == 1 { + x = x[:n] + for i := range x { + x[i] = 0 + } + return + } + for ix := 0; ix < n*incX; ix += incX { + x[ix] = 0 + } + return + } + if incX == 1 { + f64.ScalUnitary(alpha, x[:n]) + return + } + f64.ScalInc(alpha, x, uintptr(n), uintptr(incX)) +} diff --git a/vendor/gonum.org/v1/gonum/blas/gonum/level1float64_ddot.go b/vendor/gonum.org/v1/gonum/blas/gonum/level1float64_ddot.go new file mode 100644 index 0000000..be87ba1 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/blas/gonum/level1float64_ddot.go @@ -0,0 +1,49 @@ +// Copyright ©2015 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package gonum + +import ( + "gonum.org/v1/gonum/internal/asm/f64" +) + +// Ddot computes the dot product of the two vectors +// \sum_i x[i]*y[i] +func (Implementation) Ddot(n int, x []float64, incX int, y []float64, incY int) float64 { + if incX == 0 { + panic(zeroIncX) + } + if incY == 0 { + panic(zeroIncY) + } + if n <= 0 { + if n == 0 { + return 0 + } + panic(nLT0) + } + if incX == 1 && incY == 1 { + if len(x) < n { + panic(shortX) + } + if len(y) < n { + panic(shortY) + } + return f64.DotUnitary(x[:n], y[:n]) + } + var ix, iy int + if incX < 0 { + ix = (-n + 1) * incX + } + if incY < 0 { + iy = (-n + 1) * incY + } + if ix >= len(x) || ix+(n-1)*incX >= len(x) { + panic(shortX) + } + if iy >= len(y) || iy+(n-1)*incY >= len(y) { + panic(shortY) + } + return f64.DotInc(x, y, uintptr(n), uintptr(incX), uintptr(incY), uintptr(ix), uintptr(iy)) +} diff --git a/vendor/gonum.org/v1/gonum/blas/gonum/level2cmplx128.go b/vendor/gonum.org/v1/gonum/blas/gonum/level2cmplx128.go new file mode 100644 index 0000000..03ee328 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/blas/gonum/level2cmplx128.go @@ -0,0 +1,2906 @@ +// Copyright ©2017 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package gonum + +import ( + "math/cmplx" + + "gonum.org/v1/gonum/blas" + "gonum.org/v1/gonum/internal/asm/c128" +) + +var _ blas.Complex128Level2 = Implementation{} + +// Zgbmv performs one of the matrix-vector operations +// y = alpha * A * x + beta * y if trans = blas.NoTrans +// y = alpha * A^T * x + beta * y if trans = blas.Trans +// y = alpha * A^H * x + beta * y if trans = blas.ConjTrans +// where alpha and beta are scalars, x and y are vectors, and A is an m×n band matrix +// with kL sub-diagonals and kU super-diagonals. +func (Implementation) Zgbmv(trans blas.Transpose, m, n, kL, kU int, alpha complex128, a []complex128, lda int, x []complex128, incX int, beta complex128, y []complex128, incY int) { + switch trans { + default: + panic(badTranspose) + case blas.NoTrans, blas.Trans, blas.ConjTrans: + } + if m < 0 { + panic(mLT0) + } + if n < 0 { + panic(nLT0) + } + if kL < 0 { + panic(kLLT0) + } + if kU < 0 { + panic(kULT0) + } + if lda < kL+kU+1 { + panic(badLdA) + } + if incX == 0 { + panic(zeroIncX) + } + if incY == 0 { + panic(zeroIncY) + } + + // Quick return if possible. + if m == 0 || n == 0 { + return + } + + // For zero matrix size the following slice length checks are trivially satisfied. + if len(a) < lda*(min(m, n+kL)-1)+kL+kU+1 { + panic(shortA) + } + var lenX, lenY int + if trans == blas.NoTrans { + lenX, lenY = n, m + } else { + lenX, lenY = m, n + } + if (incX > 0 && len(x) <= (lenX-1)*incX) || (incX < 0 && len(x) <= (1-lenX)*incX) { + panic(shortX) + } + if (incY > 0 && len(y) <= (lenY-1)*incY) || (incY < 0 && len(y) <= (1-lenY)*incY) { + panic(shortY) + } + + // Quick return if possible. + if alpha == 0 && beta == 1 { + return + } + + var kx int + if incX < 0 { + kx = (1 - lenX) * incX + } + var ky int + if incY < 0 { + ky = (1 - lenY) * incY + } + + // Form y = beta*y. + if beta != 1 { + if incY == 1 { + if beta == 0 { + for i := range y[:lenY] { + y[i] = 0 + } + } else { + c128.ScalUnitary(beta, y[:lenY]) + } + } else { + iy := ky + if beta == 0 { + for i := 0; i < lenY; i++ { + y[iy] = 0 + iy += incY + } + } else { + if incY > 0 { + c128.ScalInc(beta, y, uintptr(lenY), uintptr(incY)) + } else { + c128.ScalInc(beta, y, uintptr(lenY), uintptr(-incY)) + } + } + } + } + + nRow := min(m, n+kL) + nCol := kL + 1 + kU + switch trans { + case blas.NoTrans: + iy := ky + if incX == 1 { + for i := 0; i < nRow; i++ { + l := max(0, kL-i) + u := min(nCol, n+kL-i) + aRow := a[i*lda+l : i*lda+u] + off := max(0, i-kL) + xtmp := x[off : off+u-l] + var sum complex128 + for j, v := range aRow { + sum += xtmp[j] * v + } + y[iy] += alpha * sum + iy += incY + } + } else { + for i := 0; i < nRow; i++ { + l := max(0, kL-i) + u := min(nCol, n+kL-i) + aRow := a[i*lda+l : i*lda+u] + off := max(0, i-kL) * incX + jx := kx + var sum complex128 + for _, v := range aRow { + sum += x[off+jx] * v + jx += incX + } + y[iy] += alpha * sum + iy += incY + } + } + case blas.Trans: + if incX == 1 { + for i := 0; i < nRow; i++ { + l := max(0, kL-i) + u := min(nCol, n+kL-i) + aRow := a[i*lda+l : i*lda+u] + off := max(0, i-kL) * incY + alphaxi := alpha * x[i] + jy := ky + for _, v := range aRow { + y[off+jy] += alphaxi * v + jy += incY + } + } + } else { + ix := kx + for i := 0; i < nRow; i++ { + l := max(0, kL-i) + u := min(nCol, n+kL-i) + aRow := a[i*lda+l : i*lda+u] + off := max(0, i-kL) * incY + alphaxi := alpha * x[ix] + jy := ky + for _, v := range aRow { + y[off+jy] += alphaxi * v + jy += incY + } + ix += incX + } + } + case blas.ConjTrans: + if incX == 1 { + for i := 0; i < nRow; i++ { + l := max(0, kL-i) + u := min(nCol, n+kL-i) + aRow := a[i*lda+l : i*lda+u] + off := max(0, i-kL) * incY + alphaxi := alpha * x[i] + jy := ky + for _, v := range aRow { + y[off+jy] += alphaxi * cmplx.Conj(v) + jy += incY + } + } + } else { + ix := kx + for i := 0; i < nRow; i++ { + l := max(0, kL-i) + u := min(nCol, n+kL-i) + aRow := a[i*lda+l : i*lda+u] + off := max(0, i-kL) * incY + alphaxi := alpha * x[ix] + jy := ky + for _, v := range aRow { + y[off+jy] += alphaxi * cmplx.Conj(v) + jy += incY + } + ix += incX + } + } + } +} + +// Zgemv performs one of the matrix-vector operations +// y = alpha * A * x + beta * y if trans = blas.NoTrans +// y = alpha * A^T * x + beta * y if trans = blas.Trans +// y = alpha * A^H * x + beta * y if trans = blas.ConjTrans +// where alpha and beta are scalars, x and y are vectors, and A is an m×n dense matrix. +func (Implementation) Zgemv(trans blas.Transpose, m, n int, alpha complex128, a []complex128, lda int, x []complex128, incX int, beta complex128, y []complex128, incY int) { + switch trans { + default: + panic(badTranspose) + case blas.NoTrans, blas.Trans, blas.ConjTrans: + } + if m < 0 { + panic(mLT0) + } + if n < 0 { + panic(nLT0) + } + if lda < max(1, n) { + panic(badLdA) + } + if incX == 0 { + panic(zeroIncX) + } + if incY == 0 { + panic(zeroIncY) + } + + // Quick return if possible. + if m == 0 || n == 0 { + return + } + + // For zero matrix size the following slice length checks are trivially satisfied. + var lenX, lenY int + if trans == blas.NoTrans { + lenX = n + lenY = m + } else { + lenX = m + lenY = n + } + if len(a) < lda*(m-1)+n { + panic(shortA) + } + if (incX > 0 && len(x) <= (lenX-1)*incX) || (incX < 0 && len(x) <= (1-lenX)*incX) { + panic(shortX) + } + if (incY > 0 && len(y) <= (lenY-1)*incY) || (incY < 0 && len(y) <= (1-lenY)*incY) { + panic(shortY) + } + + // Quick return if possible. + if alpha == 0 && beta == 1 { + return + } + + var kx int + if incX < 0 { + kx = (1 - lenX) * incX + } + var ky int + if incY < 0 { + ky = (1 - lenY) * incY + } + + // Form y = beta*y. + if beta != 1 { + if incY == 1 { + if beta == 0 { + for i := range y[:lenY] { + y[i] = 0 + } + } else { + c128.ScalUnitary(beta, y[:lenY]) + } + } else { + iy := ky + if beta == 0 { + for i := 0; i < lenY; i++ { + y[iy] = 0 + iy += incY + } + } else { + if incY > 0 { + c128.ScalInc(beta, y, uintptr(lenY), uintptr(incY)) + } else { + c128.ScalInc(beta, y, uintptr(lenY), uintptr(-incY)) + } + } + } + } + + if alpha == 0 { + return + } + + switch trans { + default: + // Form y = alpha*A*x + y. + iy := ky + if incX == 1 { + for i := 0; i < m; i++ { + y[iy] += alpha * c128.DotuUnitary(a[i*lda:i*lda+n], x[:n]) + iy += incY + } + return + } + for i := 0; i < m; i++ { + y[iy] += alpha * c128.DotuInc(a[i*lda:i*lda+n], x, uintptr(n), 1, uintptr(incX), 0, uintptr(kx)) + iy += incY + } + return + + case blas.Trans: + // Form y = alpha*A^T*x + y. + ix := kx + if incY == 1 { + for i := 0; i < m; i++ { + c128.AxpyUnitary(alpha*x[ix], a[i*lda:i*lda+n], y[:n]) + ix += incX + } + return + } + for i := 0; i < m; i++ { + c128.AxpyInc(alpha*x[ix], a[i*lda:i*lda+n], y, uintptr(n), 1, uintptr(incY), 0, uintptr(ky)) + ix += incX + } + return + + case blas.ConjTrans: + // Form y = alpha*A^H*x + y. + ix := kx + if incY == 1 { + for i := 0; i < m; i++ { + tmp := alpha * x[ix] + for j := 0; j < n; j++ { + y[j] += tmp * cmplx.Conj(a[i*lda+j]) + } + ix += incX + } + return + } + for i := 0; i < m; i++ { + tmp := alpha * x[ix] + jy := ky + for j := 0; j < n; j++ { + y[jy] += tmp * cmplx.Conj(a[i*lda+j]) + jy += incY + } + ix += incX + } + return + } +} + +// Zgerc performs the rank-one operation +// A += alpha * x * y^H +// where A is an m×n dense matrix, alpha is a scalar, x is an m element vector, +// and y is an n element vector. +func (Implementation) Zgerc(m, n int, alpha complex128, x []complex128, incX int, y []complex128, incY int, a []complex128, lda int) { + if m < 0 { + panic(mLT0) + } + if n < 0 { + panic(nLT0) + } + if lda < max(1, n) { + panic(badLdA) + } + if incX == 0 { + panic(zeroIncX) + } + if incY == 0 { + panic(zeroIncY) + } + + // Quick return if possible. + if m == 0 || n == 0 { + return + } + + // For zero matrix size the following slice length checks are trivially satisfied. + if (incX > 0 && len(x) <= (m-1)*incX) || (incX < 0 && len(x) <= (1-m)*incX) { + panic(shortX) + } + if (incY > 0 && len(y) <= (n-1)*incY) || (incY < 0 && len(y) <= (1-n)*incY) { + panic(shortY) + } + if len(a) < lda*(m-1)+n { + panic(shortA) + } + + // Quick return if possible. + if alpha == 0 { + return + } + + var kx, jy int + if incX < 0 { + kx = (1 - m) * incX + } + if incY < 0 { + jy = (1 - n) * incY + } + for j := 0; j < n; j++ { + if y[jy] != 0 { + tmp := alpha * cmplx.Conj(y[jy]) + c128.AxpyInc(tmp, x, a[j:], uintptr(m), uintptr(incX), uintptr(lda), uintptr(kx), 0) + } + jy += incY + } +} + +// Zgeru performs the rank-one operation +// A += alpha * x * y^T +// where A is an m×n dense matrix, alpha is a scalar, x is an m element vector, +// and y is an n element vector. +func (Implementation) Zgeru(m, n int, alpha complex128, x []complex128, incX int, y []complex128, incY int, a []complex128, lda int) { + if m < 0 { + panic(mLT0) + } + if n < 0 { + panic(nLT0) + } + if lda < max(1, n) { + panic(badLdA) + } + if incX == 0 { + panic(zeroIncX) + } + if incY == 0 { + panic(zeroIncY) + } + + // Quick return if possible. + if m == 0 || n == 0 { + return + } + + // For zero matrix size the following slice length checks are trivially satisfied. + if (incX > 0 && len(x) <= (m-1)*incX) || (incX < 0 && len(x) <= (1-m)*incX) { + panic(shortX) + } + if (incY > 0 && len(y) <= (n-1)*incY) || (incY < 0 && len(y) <= (1-n)*incY) { + panic(shortY) + } + if len(a) < lda*(m-1)+n { + panic(shortA) + } + + // Quick return if possible. + if alpha == 0 { + return + } + + var kx int + if incX < 0 { + kx = (1 - m) * incX + } + if incY == 1 { + for i := 0; i < m; i++ { + if x[kx] != 0 { + tmp := alpha * x[kx] + c128.AxpyUnitary(tmp, y[:n], a[i*lda:i*lda+n]) + } + kx += incX + } + return + } + var jy int + if incY < 0 { + jy = (1 - n) * incY + } + for i := 0; i < m; i++ { + if x[kx] != 0 { + tmp := alpha * x[kx] + c128.AxpyInc(tmp, y, a[i*lda:i*lda+n], uintptr(n), uintptr(incY), 1, uintptr(jy), 0) + } + kx += incX + } +} + +// Zhbmv performs the matrix-vector operation +// y = alpha * A * x + beta * y +// where alpha and beta are scalars, x and y are vectors, and A is an n×n +// Hermitian band matrix with k super-diagonals. The imaginary parts of +// the diagonal elements of A are ignored and assumed to be zero. +func (Implementation) Zhbmv(uplo blas.Uplo, n, k int, alpha complex128, a []complex128, lda int, x []complex128, incX int, beta complex128, y []complex128, incY int) { + switch uplo { + default: + panic(badUplo) + case blas.Upper, blas.Lower: + } + if n < 0 { + panic(nLT0) + } + if k < 0 { + panic(kLT0) + } + if lda < k+1 { + panic(badLdA) + } + if incX == 0 { + panic(zeroIncX) + } + if incY == 0 { + panic(zeroIncY) + } + + // Quick return if possible. + if n == 0 { + return + } + + // For zero matrix size the following slice length checks are trivially satisfied. + if len(a) < lda*(n-1)+k+1 { + panic(shortA) + } + if (incX > 0 && len(x) <= (n-1)*incX) || (incX < 0 && len(x) <= (1-n)*incX) { + panic(shortX) + } + if (incY > 0 && len(y) <= (n-1)*incY) || (incY < 0 && len(y) <= (1-n)*incY) { + panic(shortY) + } + + // Quick return if possible. + if alpha == 0 && beta == 1 { + return + } + + // Set up the start indices in X and Y. + var kx int + if incX < 0 { + kx = (1 - n) * incX + } + var ky int + if incY < 0 { + ky = (1 - n) * incY + } + + // Form y = beta*y. + if beta != 1 { + if incY == 1 { + if beta == 0 { + for i := range y[:n] { + y[i] = 0 + } + } else { + for i, v := range y[:n] { + y[i] = beta * v + } + } + } else { + iy := ky + if beta == 0 { + for i := 0; i < n; i++ { + y[iy] = 0 + iy += incY + } + } else { + for i := 0; i < n; i++ { + y[iy] = beta * y[iy] + iy += incY + } + } + } + } + + if alpha == 0 { + return + } + + // The elements of A are accessed sequentially with one pass through a. + switch uplo { + case blas.Upper: + iy := ky + if incX == 1 { + for i := 0; i < n; i++ { + aRow := a[i*lda:] + alphaxi := alpha * x[i] + sum := alphaxi * complex(real(aRow[0]), 0) + u := min(k+1, n-i) + jy := incY + for j := 1; j < u; j++ { + v := aRow[j] + sum += alpha * x[i+j] * v + y[iy+jy] += alphaxi * cmplx.Conj(v) + jy += incY + } + y[iy] += sum + iy += incY + } + } else { + ix := kx + for i := 0; i < n; i++ { + aRow := a[i*lda:] + alphaxi := alpha * x[ix] + sum := alphaxi * complex(real(aRow[0]), 0) + u := min(k+1, n-i) + jx := incX + jy := incY + for j := 1; j < u; j++ { + v := aRow[j] + sum += alpha * x[ix+jx] * v + y[iy+jy] += alphaxi * cmplx.Conj(v) + jx += incX + jy += incY + } + y[iy] += sum + ix += incX + iy += incY + } + } + case blas.Lower: + iy := ky + if incX == 1 { + for i := 0; i < n; i++ { + l := max(0, k-i) + alphaxi := alpha * x[i] + jy := l * incY + aRow := a[i*lda:] + for j := l; j < k; j++ { + v := aRow[j] + y[iy] += alpha * v * x[i-k+j] + y[iy-k*incY+jy] += alphaxi * cmplx.Conj(v) + jy += incY + } + y[iy] += alphaxi * complex(real(aRow[k]), 0) + iy += incY + } + } else { + ix := kx + for i := 0; i < n; i++ { + l := max(0, k-i) + alphaxi := alpha * x[ix] + jx := l * incX + jy := l * incY + aRow := a[i*lda:] + for j := l; j < k; j++ { + v := aRow[j] + y[iy] += alpha * v * x[ix-k*incX+jx] + y[iy-k*incY+jy] += alphaxi * cmplx.Conj(v) + jx += incX + jy += incY + } + y[iy] += alphaxi * complex(real(aRow[k]), 0) + ix += incX + iy += incY + } + } + } +} + +// Zhemv performs the matrix-vector operation +// y = alpha * A * x + beta * y +// where alpha and beta are scalars, x and y are vectors, and A is an n×n +// Hermitian matrix. The imaginary parts of the diagonal elements of A are +// ignored and assumed to be zero. +func (Implementation) Zhemv(uplo blas.Uplo, n int, alpha complex128, a []complex128, lda int, x []complex128, incX int, beta complex128, y []complex128, incY int) { + switch uplo { + default: + panic(badUplo) + case blas.Upper, blas.Lower: + } + if n < 0 { + panic(nLT0) + } + if lda < max(1, n) { + panic(badLdA) + } + if incX == 0 { + panic(zeroIncX) + } + if incY == 0 { + panic(zeroIncY) + } + + // Quick return if possible. + if n == 0 { + return + } + + // For zero matrix size the following slice length checks are trivially satisfied. + if len(a) < lda*(n-1)+n { + panic(shortA) + } + if (incX > 0 && len(x) <= (n-1)*incX) || (incX < 0 && len(x) <= (1-n)*incX) { + panic(shortX) + } + if (incY > 0 && len(y) <= (n-1)*incY) || (incY < 0 && len(y) <= (1-n)*incY) { + panic(shortY) + } + + // Quick return if possible. + if alpha == 0 && beta == 1 { + return + } + + // Set up the start indices in X and Y. + var kx int + if incX < 0 { + kx = (1 - n) * incX + } + var ky int + if incY < 0 { + ky = (1 - n) * incY + } + + // Form y = beta*y. + if beta != 1 { + if incY == 1 { + if beta == 0 { + for i := range y[:n] { + y[i] = 0 + } + } else { + for i, v := range y[:n] { + y[i] = beta * v + } + } + } else { + iy := ky + if beta == 0 { + for i := 0; i < n; i++ { + y[iy] = 0 + iy += incY + } + } else { + for i := 0; i < n; i++ { + y[iy] = beta * y[iy] + iy += incY + } + } + } + } + + if alpha == 0 { + return + } + + // The elements of A are accessed sequentially with one pass through + // the triangular part of A. + + if uplo == blas.Upper { + // Form y when A is stored in upper triangle. + if incX == 1 && incY == 1 { + for i := 0; i < n; i++ { + tmp1 := alpha * x[i] + var tmp2 complex128 + for j := i + 1; j < n; j++ { + y[j] += tmp1 * cmplx.Conj(a[i*lda+j]) + tmp2 += a[i*lda+j] * x[j] + } + aii := complex(real(a[i*lda+i]), 0) + y[i] += tmp1*aii + alpha*tmp2 + } + } else { + ix := kx + iy := ky + for i := 0; i < n; i++ { + tmp1 := alpha * x[ix] + var tmp2 complex128 + jx := ix + jy := iy + for j := i + 1; j < n; j++ { + jx += incX + jy += incY + y[jy] += tmp1 * cmplx.Conj(a[i*lda+j]) + tmp2 += a[i*lda+j] * x[jx] + } + aii := complex(real(a[i*lda+i]), 0) + y[iy] += tmp1*aii + alpha*tmp2 + ix += incX + iy += incY + } + } + return + } + + // Form y when A is stored in lower triangle. + if incX == 1 && incY == 1 { + for i := 0; i < n; i++ { + tmp1 := alpha * x[i] + var tmp2 complex128 + for j := 0; j < i; j++ { + y[j] += tmp1 * cmplx.Conj(a[i*lda+j]) + tmp2 += a[i*lda+j] * x[j] + } + aii := complex(real(a[i*lda+i]), 0) + y[i] += tmp1*aii + alpha*tmp2 + } + } else { + ix := kx + iy := ky + for i := 0; i < n; i++ { + tmp1 := alpha * x[ix] + var tmp2 complex128 + jx := kx + jy := ky + for j := 0; j < i; j++ { + y[jy] += tmp1 * cmplx.Conj(a[i*lda+j]) + tmp2 += a[i*lda+j] * x[jx] + jx += incX + jy += incY + } + aii := complex(real(a[i*lda+i]), 0) + y[iy] += tmp1*aii + alpha*tmp2 + ix += incX + iy += incY + } + } +} + +// Zher performs the Hermitian rank-one operation +// A += alpha * x * x^H +// where A is an n×n Hermitian matrix, alpha is a real scalar, and x is an n +// element vector. On entry, the imaginary parts of the diagonal elements of A +// are ignored and assumed to be zero, on return they will be set to zero. +func (Implementation) Zher(uplo blas.Uplo, n int, alpha float64, x []complex128, incX int, a []complex128, lda int) { + switch uplo { + default: + panic(badUplo) + case blas.Upper, blas.Lower: + } + if n < 0 { + panic(nLT0) + } + if lda < max(1, n) { + panic(badLdA) + } + if incX == 0 { + panic(zeroIncX) + } + + // Quick return if possible. + if n == 0 { + return + } + + // For zero matrix size the following slice length checks are trivially satisfied. + if (incX > 0 && len(x) <= (n-1)*incX) || (incX < 0 && len(x) <= (1-n)*incX) { + panic(shortX) + } + if len(a) < lda*(n-1)+n { + panic(shortA) + } + + // Quick return if possible. + if alpha == 0 { + return + } + + var kx int + if incX < 0 { + kx = (1 - n) * incX + } + if uplo == blas.Upper { + if incX == 1 { + for i := 0; i < n; i++ { + if x[i] != 0 { + tmp := complex(alpha*real(x[i]), alpha*imag(x[i])) + aii := real(a[i*lda+i]) + xtmp := real(tmp * cmplx.Conj(x[i])) + a[i*lda+i] = complex(aii+xtmp, 0) + for j := i + 1; j < n; j++ { + a[i*lda+j] += tmp * cmplx.Conj(x[j]) + } + } else { + aii := real(a[i*lda+i]) + a[i*lda+i] = complex(aii, 0) + } + } + return + } + + ix := kx + for i := 0; i < n; i++ { + if x[ix] != 0 { + tmp := complex(alpha*real(x[ix]), alpha*imag(x[ix])) + aii := real(a[i*lda+i]) + xtmp := real(tmp * cmplx.Conj(x[ix])) + a[i*lda+i] = complex(aii+xtmp, 0) + jx := ix + incX + for j := i + 1; j < n; j++ { + a[i*lda+j] += tmp * cmplx.Conj(x[jx]) + jx += incX + } + } else { + aii := real(a[i*lda+i]) + a[i*lda+i] = complex(aii, 0) + } + ix += incX + } + return + } + + if incX == 1 { + for i := 0; i < n; i++ { + if x[i] != 0 { + tmp := complex(alpha*real(x[i]), alpha*imag(x[i])) + for j := 0; j < i; j++ { + a[i*lda+j] += tmp * cmplx.Conj(x[j]) + } + aii := real(a[i*lda+i]) + xtmp := real(tmp * cmplx.Conj(x[i])) + a[i*lda+i] = complex(aii+xtmp, 0) + } else { + aii := real(a[i*lda+i]) + a[i*lda+i] = complex(aii, 0) + } + } + return + } + + ix := kx + for i := 0; i < n; i++ { + if x[ix] != 0 { + tmp := complex(alpha*real(x[ix]), alpha*imag(x[ix])) + jx := kx + for j := 0; j < i; j++ { + a[i*lda+j] += tmp * cmplx.Conj(x[jx]) + jx += incX + } + aii := real(a[i*lda+i]) + xtmp := real(tmp * cmplx.Conj(x[ix])) + a[i*lda+i] = complex(aii+xtmp, 0) + + } else { + aii := real(a[i*lda+i]) + a[i*lda+i] = complex(aii, 0) + } + ix += incX + } +} + +// Zher2 performs the Hermitian rank-two operation +// A += alpha * x * y^H + conj(alpha) * y * x^H +// where alpha is a scalar, x and y are n element vectors and A is an n×n +// Hermitian matrix. On entry, the imaginary parts of the diagonal elements are +// ignored and assumed to be zero. On return they will be set to zero. +func (Implementation) Zher2(uplo blas.Uplo, n int, alpha complex128, x []complex128, incX int, y []complex128, incY int, a []complex128, lda int) { + switch uplo { + default: + panic(badUplo) + case blas.Upper, blas.Lower: + } + if n < 0 { + panic(nLT0) + } + if lda < max(1, n) { + panic(badLdA) + } + if incX == 0 { + panic(zeroIncX) + } + if incY == 0 { + panic(zeroIncY) + } + + // Quick return if possible. + if n == 0 { + return + } + + // For zero matrix size the following slice length checks are trivially satisfied. + if (incX > 0 && len(x) <= (n-1)*incX) || (incX < 0 && len(x) <= (1-n)*incX) { + panic(shortX) + } + if (incY > 0 && len(y) <= (n-1)*incY) || (incY < 0 && len(y) <= (1-n)*incY) { + panic(shortY) + } + if len(a) < lda*(n-1)+n { + panic(shortA) + } + + // Quick return if possible. + if alpha == 0 { + return + } + + var kx, ky int + var ix, iy int + if incX != 1 || incY != 1 { + if incX < 0 { + kx = (1 - n) * incX + } + if incY < 0 { + ky = (1 - n) * incY + } + ix = kx + iy = ky + } + if uplo == blas.Upper { + if incX == 1 && incY == 1 { + for i := 0; i < n; i++ { + if x[i] != 0 || y[i] != 0 { + tmp1 := alpha * x[i] + tmp2 := cmplx.Conj(alpha) * y[i] + aii := real(a[i*lda+i]) + real(tmp1*cmplx.Conj(y[i])) + real(tmp2*cmplx.Conj(x[i])) + a[i*lda+i] = complex(aii, 0) + for j := i + 1; j < n; j++ { + a[i*lda+j] += tmp1*cmplx.Conj(y[j]) + tmp2*cmplx.Conj(x[j]) + } + } else { + aii := real(a[i*lda+i]) + a[i*lda+i] = complex(aii, 0) + } + } + return + } + for i := 0; i < n; i++ { + if x[ix] != 0 || y[iy] != 0 { + tmp1 := alpha * x[ix] + tmp2 := cmplx.Conj(alpha) * y[iy] + aii := real(a[i*lda+i]) + real(tmp1*cmplx.Conj(y[iy])) + real(tmp2*cmplx.Conj(x[ix])) + a[i*lda+i] = complex(aii, 0) + jx := ix + incX + jy := iy + incY + for j := i + 1; j < n; j++ { + a[i*lda+j] += tmp1*cmplx.Conj(y[jy]) + tmp2*cmplx.Conj(x[jx]) + jx += incX + jy += incY + } + } else { + aii := real(a[i*lda+i]) + a[i*lda+i] = complex(aii, 0) + } + ix += incX + iy += incY + } + return + } + + if incX == 1 && incY == 1 { + for i := 0; i < n; i++ { + if x[i] != 0 || y[i] != 0 { + tmp1 := alpha * x[i] + tmp2 := cmplx.Conj(alpha) * y[i] + for j := 0; j < i; j++ { + a[i*lda+j] += tmp1*cmplx.Conj(y[j]) + tmp2*cmplx.Conj(x[j]) + } + aii := real(a[i*lda+i]) + real(tmp1*cmplx.Conj(y[i])) + real(tmp2*cmplx.Conj(x[i])) + a[i*lda+i] = complex(aii, 0) + } else { + aii := real(a[i*lda+i]) + a[i*lda+i] = complex(aii, 0) + } + } + return + } + for i := 0; i < n; i++ { + if x[ix] != 0 || y[iy] != 0 { + tmp1 := alpha * x[ix] + tmp2 := cmplx.Conj(alpha) * y[iy] + jx := kx + jy := ky + for j := 0; j < i; j++ { + a[i*lda+j] += tmp1*cmplx.Conj(y[jy]) + tmp2*cmplx.Conj(x[jx]) + jx += incX + jy += incY + } + aii := real(a[i*lda+i]) + real(tmp1*cmplx.Conj(y[iy])) + real(tmp2*cmplx.Conj(x[ix])) + a[i*lda+i] = complex(aii, 0) + } else { + aii := real(a[i*lda+i]) + a[i*lda+i] = complex(aii, 0) + } + ix += incX + iy += incY + } +} + +// Zhpmv performs the matrix-vector operation +// y = alpha * A * x + beta * y +// where alpha and beta are scalars, x and y are vectors, and A is an n×n +// Hermitian matrix in packed form. The imaginary parts of the diagonal +// elements of A are ignored and assumed to be zero. +func (Implementation) Zhpmv(uplo blas.Uplo, n int, alpha complex128, ap []complex128, x []complex128, incX int, beta complex128, y []complex128, incY int) { + switch uplo { + default: + panic(badUplo) + case blas.Upper, blas.Lower: + } + if n < 0 { + panic(nLT0) + } + if incX == 0 { + panic(zeroIncX) + } + if incY == 0 { + panic(zeroIncY) + } + + // Quick return if possible. + if n == 0 { + return + } + + // For zero matrix size the following slice length checks are trivially satisfied. + if len(ap) < n*(n+1)/2 { + panic(shortAP) + } + if (incX > 0 && len(x) <= (n-1)*incX) || (incX < 0 && len(x) <= (1-n)*incX) { + panic(shortX) + } + if (incY > 0 && len(y) <= (n-1)*incY) || (incY < 0 && len(y) <= (1-n)*incY) { + panic(shortY) + } + + // Quick return if possible. + if alpha == 0 && beta == 1 { + return + } + + // Set up the start indices in X and Y. + var kx int + if incX < 0 { + kx = (1 - n) * incX + } + var ky int + if incY < 0 { + ky = (1 - n) * incY + } + + // Form y = beta*y. + if beta != 1 { + if incY == 1 { + if beta == 0 { + for i := range y[:n] { + y[i] = 0 + } + } else { + for i, v := range y[:n] { + y[i] = beta * v + } + } + } else { + iy := ky + if beta == 0 { + for i := 0; i < n; i++ { + y[iy] = 0 + iy += incY + } + } else { + for i := 0; i < n; i++ { + y[iy] *= beta + iy += incY + } + } + } + } + + if alpha == 0 { + return + } + + // The elements of A are accessed sequentially with one pass through ap. + + var kk int + if uplo == blas.Upper { + // Form y when ap contains the upper triangle. + // Here, kk points to the current diagonal element in ap. + if incX == 1 && incY == 1 { + for i := 0; i < n; i++ { + tmp1 := alpha * x[i] + y[i] += tmp1 * complex(real(ap[kk]), 0) + var tmp2 complex128 + k := kk + 1 + for j := i + 1; j < n; j++ { + y[j] += tmp1 * cmplx.Conj(ap[k]) + tmp2 += ap[k] * x[j] + k++ + } + y[i] += alpha * tmp2 + kk += n - i + } + } else { + ix := kx + iy := ky + for i := 0; i < n; i++ { + tmp1 := alpha * x[ix] + y[iy] += tmp1 * complex(real(ap[kk]), 0) + var tmp2 complex128 + jx := ix + jy := iy + for k := kk + 1; k < kk+n-i; k++ { + jx += incX + jy += incY + y[jy] += tmp1 * cmplx.Conj(ap[k]) + tmp2 += ap[k] * x[jx] + } + y[iy] += alpha * tmp2 + ix += incX + iy += incY + kk += n - i + } + } + return + } + + // Form y when ap contains the lower triangle. + // Here, kk points to the beginning of current row in ap. + if incX == 1 && incY == 1 { + for i := 0; i < n; i++ { + tmp1 := alpha * x[i] + var tmp2 complex128 + k := kk + for j := 0; j < i; j++ { + y[j] += tmp1 * cmplx.Conj(ap[k]) + tmp2 += ap[k] * x[j] + k++ + } + aii := complex(real(ap[kk+i]), 0) + y[i] += tmp1*aii + alpha*tmp2 + kk += i + 1 + } + } else { + ix := kx + iy := ky + for i := 0; i < n; i++ { + tmp1 := alpha * x[ix] + var tmp2 complex128 + jx := kx + jy := ky + for k := kk; k < kk+i; k++ { + y[jy] += tmp1 * cmplx.Conj(ap[k]) + tmp2 += ap[k] * x[jx] + jx += incX + jy += incY + } + aii := complex(real(ap[kk+i]), 0) + y[iy] += tmp1*aii + alpha*tmp2 + ix += incX + iy += incY + kk += i + 1 + } + } +} + +// Zhpr performs the Hermitian rank-1 operation +// A += alpha * x * x^H +// where alpha is a real scalar, x is a vector, and A is an n×n hermitian matrix +// in packed form. On entry, the imaginary parts of the diagonal elements are +// assumed to be zero, and on return they are set to zero. +func (Implementation) Zhpr(uplo blas.Uplo, n int, alpha float64, x []complex128, incX int, ap []complex128) { + switch uplo { + default: + panic(badUplo) + case blas.Upper, blas.Lower: + } + if n < 0 { + panic(nLT0) + } + if incX == 0 { + panic(zeroIncX) + } + + // Quick return if possible. + if n == 0 { + return + } + + // For zero matrix size the following slice length checks are trivially satisfied. + if (incX > 0 && len(x) <= (n-1)*incX) || (incX < 0 && len(x) <= (1-n)*incX) { + panic(shortX) + } + if len(ap) < n*(n+1)/2 { + panic(shortAP) + } + + // Quick return if possible. + if alpha == 0 { + return + } + + // Set up start index in X. + var kx int + if incX < 0 { + kx = (1 - n) * incX + } + + // The elements of A are accessed sequentially with one pass through ap. + + var kk int + if uplo == blas.Upper { + // Form A when upper triangle is stored in AP. + // Here, kk points to the current diagonal element in ap. + if incX == 1 { + for i := 0; i < n; i++ { + xi := x[i] + if xi != 0 { + aii := real(ap[kk]) + alpha*real(cmplx.Conj(xi)*xi) + ap[kk] = complex(aii, 0) + + tmp := complex(alpha, 0) * xi + a := ap[kk+1 : kk+n-i] + x := x[i+1 : n] + for j, v := range x { + a[j] += tmp * cmplx.Conj(v) + } + } else { + ap[kk] = complex(real(ap[kk]), 0) + } + kk += n - i + } + } else { + ix := kx + for i := 0; i < n; i++ { + xi := x[ix] + if xi != 0 { + aii := real(ap[kk]) + alpha*real(cmplx.Conj(xi)*xi) + ap[kk] = complex(aii, 0) + + tmp := complex(alpha, 0) * xi + jx := ix + incX + a := ap[kk+1 : kk+n-i] + for k := range a { + a[k] += tmp * cmplx.Conj(x[jx]) + jx += incX + } + } else { + ap[kk] = complex(real(ap[kk]), 0) + } + ix += incX + kk += n - i + } + } + return + } + + // Form A when lower triangle is stored in AP. + // Here, kk points to the beginning of current row in ap. + if incX == 1 { + for i := 0; i < n; i++ { + xi := x[i] + if xi != 0 { + tmp := complex(alpha, 0) * xi + a := ap[kk : kk+i] + for j, v := range x[:i] { + a[j] += tmp * cmplx.Conj(v) + } + + aii := real(ap[kk+i]) + alpha*real(cmplx.Conj(xi)*xi) + ap[kk+i] = complex(aii, 0) + } else { + ap[kk+i] = complex(real(ap[kk+i]), 0) + } + kk += i + 1 + } + } else { + ix := kx + for i := 0; i < n; i++ { + xi := x[ix] + if xi != 0 { + tmp := complex(alpha, 0) * xi + a := ap[kk : kk+i] + jx := kx + for k := range a { + a[k] += tmp * cmplx.Conj(x[jx]) + jx += incX + } + + aii := real(ap[kk+i]) + alpha*real(cmplx.Conj(xi)*xi) + ap[kk+i] = complex(aii, 0) + } else { + ap[kk+i] = complex(real(ap[kk+i]), 0) + } + ix += incX + kk += i + 1 + } + } +} + +// Zhpr2 performs the Hermitian rank-2 operation +// A += alpha * x * y^H + conj(alpha) * y * x^H +// where alpha is a complex scalar, x and y are n element vectors, and A is an +// n×n Hermitian matrix, supplied in packed form. On entry, the imaginary parts +// of the diagonal elements are assumed to be zero, and on return they are set to zero. +func (Implementation) Zhpr2(uplo blas.Uplo, n int, alpha complex128, x []complex128, incX int, y []complex128, incY int, ap []complex128) { + switch uplo { + default: + panic(badUplo) + case blas.Upper, blas.Lower: + } + if n < 0 { + panic(nLT0) + } + if incX == 0 { + panic(zeroIncX) + } + if incY == 0 { + panic(zeroIncY) + } + + // Quick return if possible. + if n == 0 { + return + } + + // For zero matrix size the following slice length checks are trivially satisfied. + if (incX > 0 && len(x) <= (n-1)*incX) || (incX < 0 && len(x) <= (1-n)*incX) { + panic(shortX) + } + if (incY > 0 && len(y) <= (n-1)*incY) || (incY < 0 && len(y) <= (1-n)*incY) { + panic(shortY) + } + if len(ap) < n*(n+1)/2 { + panic(shortAP) + } + + // Quick return if possible. + if alpha == 0 { + return + } + + // Set up start indices in X and Y. + var kx int + if incX < 0 { + kx = (1 - n) * incX + } + var ky int + if incY < 0 { + ky = (1 - n) * incY + } + + // The elements of A are accessed sequentially with one pass through ap. + + var kk int + if uplo == blas.Upper { + // Form A when upper triangle is stored in AP. + // Here, kk points to the current diagonal element in ap. + if incX == 1 && incY == 1 { + for i := 0; i < n; i++ { + if x[i] != 0 || y[i] != 0 { + tmp1 := alpha * x[i] + tmp2 := cmplx.Conj(alpha) * y[i] + aii := real(ap[kk]) + real(tmp1*cmplx.Conj(y[i])) + real(tmp2*cmplx.Conj(x[i])) + ap[kk] = complex(aii, 0) + k := kk + 1 + for j := i + 1; j < n; j++ { + ap[k] += tmp1*cmplx.Conj(y[j]) + tmp2*cmplx.Conj(x[j]) + k++ + } + } else { + ap[kk] = complex(real(ap[kk]), 0) + } + kk += n - i + } + } else { + ix := kx + iy := ky + for i := 0; i < n; i++ { + if x[ix] != 0 || y[iy] != 0 { + tmp1 := alpha * x[ix] + tmp2 := cmplx.Conj(alpha) * y[iy] + aii := real(ap[kk]) + real(tmp1*cmplx.Conj(y[iy])) + real(tmp2*cmplx.Conj(x[ix])) + ap[kk] = complex(aii, 0) + jx := ix + incX + jy := iy + incY + for k := kk + 1; k < kk+n-i; k++ { + ap[k] += tmp1*cmplx.Conj(y[jy]) + tmp2*cmplx.Conj(x[jx]) + jx += incX + jy += incY + } + } else { + ap[kk] = complex(real(ap[kk]), 0) + } + ix += incX + iy += incY + kk += n - i + } + } + return + } + + // Form A when lower triangle is stored in AP. + // Here, kk points to the beginning of current row in ap. + if incX == 1 && incY == 1 { + for i := 0; i < n; i++ { + if x[i] != 0 || y[i] != 0 { + tmp1 := alpha * x[i] + tmp2 := cmplx.Conj(alpha) * y[i] + k := kk + for j := 0; j < i; j++ { + ap[k] += tmp1*cmplx.Conj(y[j]) + tmp2*cmplx.Conj(x[j]) + k++ + } + aii := real(ap[kk+i]) + real(tmp1*cmplx.Conj(y[i])) + real(tmp2*cmplx.Conj(x[i])) + ap[kk+i] = complex(aii, 0) + } else { + ap[kk+i] = complex(real(ap[kk+i]), 0) + } + kk += i + 1 + } + } else { + ix := kx + iy := ky + for i := 0; i < n; i++ { + if x[ix] != 0 || y[iy] != 0 { + tmp1 := alpha * x[ix] + tmp2 := cmplx.Conj(alpha) * y[iy] + jx := kx + jy := ky + for k := kk; k < kk+i; k++ { + ap[k] += tmp1*cmplx.Conj(y[jy]) + tmp2*cmplx.Conj(x[jx]) + jx += incX + jy += incY + } + aii := real(ap[kk+i]) + real(tmp1*cmplx.Conj(y[iy])) + real(tmp2*cmplx.Conj(x[ix])) + ap[kk+i] = complex(aii, 0) + } else { + ap[kk+i] = complex(real(ap[kk+i]), 0) + } + ix += incX + iy += incY + kk += i + 1 + } + } +} + +// Ztbmv performs one of the matrix-vector operations +// x = A * x if trans = blas.NoTrans +// x = A^T * x if trans = blas.Trans +// x = A^H * x if trans = blas.ConjTrans +// where x is an n element vector and A is an n×n triangular band matrix, with +// (k+1) diagonals. +func (Implementation) Ztbmv(uplo blas.Uplo, trans blas.Transpose, diag blas.Diag, n, k int, a []complex128, lda int, x []complex128, incX int) { + switch trans { + default: + panic(badTranspose) + case blas.NoTrans, blas.Trans, blas.ConjTrans: + } + switch uplo { + default: + panic(badUplo) + case blas.Upper, blas.Lower: + } + switch diag { + default: + panic(badDiag) + case blas.NonUnit, blas.Unit: + } + if n < 0 { + panic(nLT0) + } + if k < 0 { + panic(kLT0) + } + if lda < k+1 { + panic(badLdA) + } + if incX == 0 { + panic(zeroIncX) + } + + // Quick return if possible. + if n == 0 { + return + } + + // For zero matrix size the following slice length checks are trivially satisfied. + if len(a) < lda*(n-1)+k+1 { + panic(shortA) + } + if (incX > 0 && len(x) <= (n-1)*incX) || (incX < 0 && len(x) <= (1-n)*incX) { + panic(shortX) + } + + // Set up start index in X. + var kx int + if incX < 0 { + kx = (1 - n) * incX + } + + switch trans { + case blas.NoTrans: + if uplo == blas.Upper { + if incX == 1 { + for i := 0; i < n; i++ { + xi := x[i] + if diag == blas.NonUnit { + xi *= a[i*lda] + } + kk := min(k, n-i-1) + for j, aij := range a[i*lda+1 : i*lda+kk+1] { + xi += x[i+j+1] * aij + } + x[i] = xi + } + } else { + ix := kx + for i := 0; i < n; i++ { + xi := x[ix] + if diag == blas.NonUnit { + xi *= a[i*lda] + } + kk := min(k, n-i-1) + jx := ix + incX + for _, aij := range a[i*lda+1 : i*lda+kk+1] { + xi += x[jx] * aij + jx += incX + } + x[ix] = xi + ix += incX + } + } + } else { + if incX == 1 { + for i := n - 1; i >= 0; i-- { + xi := x[i] + if diag == blas.NonUnit { + xi *= a[i*lda+k] + } + kk := min(k, i) + for j, aij := range a[i*lda+k-kk : i*lda+k] { + xi += x[i-kk+j] * aij + } + x[i] = xi + } + } else { + ix := kx + (n-1)*incX + for i := n - 1; i >= 0; i-- { + xi := x[ix] + if diag == blas.NonUnit { + xi *= a[i*lda+k] + } + kk := min(k, i) + jx := ix - kk*incX + for _, aij := range a[i*lda+k-kk : i*lda+k] { + xi += x[jx] * aij + jx += incX + } + x[ix] = xi + ix -= incX + } + } + } + case blas.Trans: + if uplo == blas.Upper { + if incX == 1 { + for i := n - 1; i >= 0; i-- { + kk := min(k, n-i-1) + xi := x[i] + for j, aij := range a[i*lda+1 : i*lda+kk+1] { + x[i+j+1] += xi * aij + } + if diag == blas.NonUnit { + x[i] *= a[i*lda] + } + } + } else { + ix := kx + (n-1)*incX + for i := n - 1; i >= 0; i-- { + kk := min(k, n-i-1) + jx := ix + incX + xi := x[ix] + for _, aij := range a[i*lda+1 : i*lda+kk+1] { + x[jx] += xi * aij + jx += incX + } + if diag == blas.NonUnit { + x[ix] *= a[i*lda] + } + ix -= incX + } + } + } else { + if incX == 1 { + for i := 0; i < n; i++ { + kk := min(k, i) + xi := x[i] + for j, aij := range a[i*lda+k-kk : i*lda+k] { + x[i-kk+j] += xi * aij + } + if diag == blas.NonUnit { + x[i] *= a[i*lda+k] + } + } + } else { + ix := kx + for i := 0; i < n; i++ { + kk := min(k, i) + jx := ix - kk*incX + xi := x[ix] + for _, aij := range a[i*lda+k-kk : i*lda+k] { + x[jx] += xi * aij + jx += incX + } + if diag == blas.NonUnit { + x[ix] *= a[i*lda+k] + } + ix += incX + } + } + } + case blas.ConjTrans: + if uplo == blas.Upper { + if incX == 1 { + for i := n - 1; i >= 0; i-- { + kk := min(k, n-i-1) + xi := x[i] + for j, aij := range a[i*lda+1 : i*lda+kk+1] { + x[i+j+1] += xi * cmplx.Conj(aij) + } + if diag == blas.NonUnit { + x[i] *= cmplx.Conj(a[i*lda]) + } + } + } else { + ix := kx + (n-1)*incX + for i := n - 1; i >= 0; i-- { + kk := min(k, n-i-1) + jx := ix + incX + xi := x[ix] + for _, aij := range a[i*lda+1 : i*lda+kk+1] { + x[jx] += xi * cmplx.Conj(aij) + jx += incX + } + if diag == blas.NonUnit { + x[ix] *= cmplx.Conj(a[i*lda]) + } + ix -= incX + } + } + } else { + if incX == 1 { + for i := 0; i < n; i++ { + kk := min(k, i) + xi := x[i] + for j, aij := range a[i*lda+k-kk : i*lda+k] { + x[i-kk+j] += xi * cmplx.Conj(aij) + } + if diag == blas.NonUnit { + x[i] *= cmplx.Conj(a[i*lda+k]) + } + } + } else { + ix := kx + for i := 0; i < n; i++ { + kk := min(k, i) + jx := ix - kk*incX + xi := x[ix] + for _, aij := range a[i*lda+k-kk : i*lda+k] { + x[jx] += xi * cmplx.Conj(aij) + jx += incX + } + if diag == blas.NonUnit { + x[ix] *= cmplx.Conj(a[i*lda+k]) + } + ix += incX + } + } + } + } +} + +// Ztbsv solves one of the systems of equations +// A * x = b if trans == blas.NoTrans +// A^T * x = b if trans == blas.Trans +// A^H * x = b if trans == blas.ConjTrans +// where b and x are n element vectors and A is an n×n triangular band matrix +// with (k+1) diagonals. +// +// On entry, x contains the values of b, and the solution is +// stored in-place into x. +// +// No test for singularity or near-singularity is included in this +// routine. Such tests must be performed before calling this routine. +func (Implementation) Ztbsv(uplo blas.Uplo, trans blas.Transpose, diag blas.Diag, n, k int, a []complex128, lda int, x []complex128, incX int) { + switch trans { + default: + panic(badTranspose) + case blas.NoTrans, blas.Trans, blas.ConjTrans: + } + switch uplo { + default: + panic(badUplo) + case blas.Upper, blas.Lower: + } + switch diag { + default: + panic(badDiag) + case blas.NonUnit, blas.Unit: + } + if n < 0 { + panic(nLT0) + } + if k < 0 { + panic(kLT0) + } + if lda < k+1 { + panic(badLdA) + } + if incX == 0 { + panic(zeroIncX) + } + + // Quick return if possible. + if n == 0 { + return + } + + // For zero matrix size the following slice length checks are trivially satisfied. + if len(a) < lda*(n-1)+k+1 { + panic(shortA) + } + if (incX > 0 && len(x) <= (n-1)*incX) || (incX < 0 && len(x) <= (1-n)*incX) { + panic(shortX) + } + + // Set up start index in X. + var kx int + if incX < 0 { + kx = (1 - n) * incX + } + + switch trans { + case blas.NoTrans: + if uplo == blas.Upper { + if incX == 1 { + for i := n - 1; i >= 0; i-- { + kk := min(k, n-i-1) + var sum complex128 + for j, aij := range a[i*lda+1 : i*lda+kk+1] { + sum += x[i+1+j] * aij + } + x[i] -= sum + if diag == blas.NonUnit { + x[i] /= a[i*lda] + } + } + } else { + ix := kx + (n-1)*incX + for i := n - 1; i >= 0; i-- { + kk := min(k, n-i-1) + var sum complex128 + jx := ix + incX + for _, aij := range a[i*lda+1 : i*lda+kk+1] { + sum += x[jx] * aij + jx += incX + } + x[ix] -= sum + if diag == blas.NonUnit { + x[ix] /= a[i*lda] + } + ix -= incX + } + } + } else { + if incX == 1 { + for i := 0; i < n; i++ { + kk := min(k, i) + var sum complex128 + for j, aij := range a[i*lda+k-kk : i*lda+k] { + sum += x[i-kk+j] * aij + } + x[i] -= sum + if diag == blas.NonUnit { + x[i] /= a[i*lda+k] + } + } + } else { + ix := kx + for i := 0; i < n; i++ { + kk := min(k, i) + var sum complex128 + jx := ix - kk*incX + for _, aij := range a[i*lda+k-kk : i*lda+k] { + sum += x[jx] * aij + jx += incX + } + x[ix] -= sum + if diag == blas.NonUnit { + x[ix] /= a[i*lda+k] + } + ix += incX + } + } + } + case blas.Trans: + if uplo == blas.Upper { + if incX == 1 { + for i := 0; i < n; i++ { + if diag == blas.NonUnit { + x[i] /= a[i*lda] + } + kk := min(k, n-i-1) + xi := x[i] + for j, aij := range a[i*lda+1 : i*lda+kk+1] { + x[i+1+j] -= xi * aij + } + } + } else { + ix := kx + for i := 0; i < n; i++ { + if diag == blas.NonUnit { + x[ix] /= a[i*lda] + } + kk := min(k, n-i-1) + xi := x[ix] + jx := ix + incX + for _, aij := range a[i*lda+1 : i*lda+kk+1] { + x[jx] -= xi * aij + jx += incX + } + ix += incX + } + } + } else { + if incX == 1 { + for i := n - 1; i >= 0; i-- { + if diag == blas.NonUnit { + x[i] /= a[i*lda+k] + } + kk := min(k, i) + xi := x[i] + for j, aij := range a[i*lda+k-kk : i*lda+k] { + x[i-kk+j] -= xi * aij + } + } + } else { + ix := kx + (n-1)*incX + for i := n - 1; i >= 0; i-- { + if diag == blas.NonUnit { + x[ix] /= a[i*lda+k] + } + kk := min(k, i) + xi := x[ix] + jx := ix - kk*incX + for _, aij := range a[i*lda+k-kk : i*lda+k] { + x[jx] -= xi * aij + jx += incX + } + ix -= incX + } + } + } + case blas.ConjTrans: + if uplo == blas.Upper { + if incX == 1 { + for i := 0; i < n; i++ { + if diag == blas.NonUnit { + x[i] /= cmplx.Conj(a[i*lda]) + } + kk := min(k, n-i-1) + xi := x[i] + for j, aij := range a[i*lda+1 : i*lda+kk+1] { + x[i+1+j] -= xi * cmplx.Conj(aij) + } + } + } else { + ix := kx + for i := 0; i < n; i++ { + if diag == blas.NonUnit { + x[ix] /= cmplx.Conj(a[i*lda]) + } + kk := min(k, n-i-1) + xi := x[ix] + jx := ix + incX + for _, aij := range a[i*lda+1 : i*lda+kk+1] { + x[jx] -= xi * cmplx.Conj(aij) + jx += incX + } + ix += incX + } + } + } else { + if incX == 1 { + for i := n - 1; i >= 0; i-- { + if diag == blas.NonUnit { + x[i] /= cmplx.Conj(a[i*lda+k]) + } + kk := min(k, i) + xi := x[i] + for j, aij := range a[i*lda+k-kk : i*lda+k] { + x[i-kk+j] -= xi * cmplx.Conj(aij) + } + } + } else { + ix := kx + (n-1)*incX + for i := n - 1; i >= 0; i-- { + if diag == blas.NonUnit { + x[ix] /= cmplx.Conj(a[i*lda+k]) + } + kk := min(k, i) + xi := x[ix] + jx := ix - kk*incX + for _, aij := range a[i*lda+k-kk : i*lda+k] { + x[jx] -= xi * cmplx.Conj(aij) + jx += incX + } + ix -= incX + } + } + } + } +} + +// Ztpmv performs one of the matrix-vector operations +// x = A * x if trans = blas.NoTrans +// x = A^T * x if trans = blas.Trans +// x = A^H * x if trans = blas.ConjTrans +// where x is an n element vector and A is an n×n triangular matrix, supplied in +// packed form. +func (Implementation) Ztpmv(uplo blas.Uplo, trans blas.Transpose, diag blas.Diag, n int, ap []complex128, x []complex128, incX int) { + switch uplo { + default: + panic(badUplo) + case blas.Upper, blas.Lower: + } + switch trans { + default: + panic(badTranspose) + case blas.NoTrans, blas.Trans, blas.ConjTrans: + } + switch diag { + default: + panic(badDiag) + case blas.NonUnit, blas.Unit: + } + if n < 0 { + panic(nLT0) + } + if incX == 0 { + panic(zeroIncX) + } + + // Quick return if possible. + if n == 0 { + return + } + + // For zero matrix size the following slice length checks are trivially satisfied. + if len(ap) < n*(n+1)/2 { + panic(shortAP) + } + if (incX > 0 && len(x) <= (n-1)*incX) || (incX < 0 && len(x) <= (1-n)*incX) { + panic(shortX) + } + + // Set up start index in X. + var kx int + if incX < 0 { + kx = (1 - n) * incX + } + + // The elements of A are accessed sequentially with one pass through A. + + if trans == blas.NoTrans { + // Form x = A*x. + if uplo == blas.Upper { + // kk points to the current diagonal element in ap. + kk := 0 + if incX == 1 { + x = x[:n] + for i := range x { + if diag == blas.NonUnit { + x[i] *= ap[kk] + } + if n-i-1 > 0 { + x[i] += c128.DotuUnitary(ap[kk+1:kk+n-i], x[i+1:]) + } + kk += n - i + } + } else { + ix := kx + for i := 0; i < n; i++ { + if diag == blas.NonUnit { + x[ix] *= ap[kk] + } + if n-i-1 > 0 { + x[ix] += c128.DotuInc(ap[kk+1:kk+n-i], x, uintptr(n-i-1), 1, uintptr(incX), 0, uintptr(ix+incX)) + } + ix += incX + kk += n - i + } + } + } else { + // kk points to the beginning of current row in ap. + kk := n*(n+1)/2 - n + if incX == 1 { + for i := n - 1; i >= 0; i-- { + if diag == blas.NonUnit { + x[i] *= ap[kk+i] + } + if i > 0 { + x[i] += c128.DotuUnitary(ap[kk:kk+i], x[:i]) + } + kk -= i + } + } else { + ix := kx + (n-1)*incX + for i := n - 1; i >= 0; i-- { + if diag == blas.NonUnit { + x[ix] *= ap[kk+i] + } + if i > 0 { + x[ix] += c128.DotuInc(ap[kk:kk+i], x, uintptr(i), 1, uintptr(incX), 0, uintptr(kx)) + } + ix -= incX + kk -= i + } + } + } + return + } + + if trans == blas.Trans { + // Form x = A^T*x. + if uplo == blas.Upper { + // kk points to the current diagonal element in ap. + kk := n*(n+1)/2 - 1 + if incX == 1 { + for i := n - 1; i >= 0; i-- { + xi := x[i] + if diag == blas.NonUnit { + x[i] *= ap[kk] + } + if n-i-1 > 0 { + c128.AxpyUnitary(xi, ap[kk+1:kk+n-i], x[i+1:n]) + } + kk -= n - i + 1 + } + } else { + ix := kx + (n-1)*incX + for i := n - 1; i >= 0; i-- { + xi := x[ix] + if diag == blas.NonUnit { + x[ix] *= ap[kk] + } + if n-i-1 > 0 { + c128.AxpyInc(xi, ap[kk+1:kk+n-i], x, uintptr(n-i-1), 1, uintptr(incX), 0, uintptr(ix+incX)) + } + ix -= incX + kk -= n - i + 1 + } + } + } else { + // kk points to the beginning of current row in ap. + kk := 0 + if incX == 1 { + x = x[:n] + for i := range x { + if i > 0 { + c128.AxpyUnitary(x[i], ap[kk:kk+i], x[:i]) + } + if diag == blas.NonUnit { + x[i] *= ap[kk+i] + } + kk += i + 1 + } + } else { + ix := kx + for i := 0; i < n; i++ { + if i > 0 { + c128.AxpyInc(x[ix], ap[kk:kk+i], x, uintptr(i), 1, uintptr(incX), 0, uintptr(kx)) + } + if diag == blas.NonUnit { + x[ix] *= ap[kk+i] + } + ix += incX + kk += i + 1 + } + } + } + return + } + + // Form x = A^H*x. + if uplo == blas.Upper { + // kk points to the current diagonal element in ap. + kk := n*(n+1)/2 - 1 + if incX == 1 { + for i := n - 1; i >= 0; i-- { + xi := x[i] + if diag == blas.NonUnit { + x[i] *= cmplx.Conj(ap[kk]) + } + k := kk + 1 + for j := i + 1; j < n; j++ { + x[j] += xi * cmplx.Conj(ap[k]) + k++ + } + kk -= n - i + 1 + } + } else { + ix := kx + (n-1)*incX + for i := n - 1; i >= 0; i-- { + xi := x[ix] + if diag == blas.NonUnit { + x[ix] *= cmplx.Conj(ap[kk]) + } + jx := ix + incX + k := kk + 1 + for j := i + 1; j < n; j++ { + x[jx] += xi * cmplx.Conj(ap[k]) + jx += incX + k++ + } + ix -= incX + kk -= n - i + 1 + } + } + } else { + // kk points to the beginning of current row in ap. + kk := 0 + if incX == 1 { + x = x[:n] + for i, xi := range x { + for j := 0; j < i; j++ { + x[j] += xi * cmplx.Conj(ap[kk+j]) + } + if diag == blas.NonUnit { + x[i] *= cmplx.Conj(ap[kk+i]) + } + kk += i + 1 + } + } else { + ix := kx + for i := 0; i < n; i++ { + xi := x[ix] + jx := kx + for j := 0; j < i; j++ { + x[jx] += xi * cmplx.Conj(ap[kk+j]) + jx += incX + } + if diag == blas.NonUnit { + x[ix] *= cmplx.Conj(ap[kk+i]) + } + ix += incX + kk += i + 1 + } + } + } +} + +// Ztpsv solves one of the systems of equations +// A * x = b if trans == blas.NoTrans +// A^T * x = b if trans == blas.Trans +// A^H * x = b if trans == blas.ConjTrans +// where b and x are n element vectors and A is an n×n triangular matrix in +// packed form. +// +// On entry, x contains the values of b, and the solution is +// stored in-place into x. +// +// No test for singularity or near-singularity is included in this +// routine. Such tests must be performed before calling this routine. +func (Implementation) Ztpsv(uplo blas.Uplo, trans blas.Transpose, diag blas.Diag, n int, ap []complex128, x []complex128, incX int) { + switch uplo { + default: + panic(badUplo) + case blas.Upper, blas.Lower: + } + switch trans { + default: + panic(badTranspose) + case blas.NoTrans, blas.Trans, blas.ConjTrans: + } + switch diag { + default: + panic(badDiag) + case blas.NonUnit, blas.Unit: + } + if n < 0 { + panic(nLT0) + } + if incX == 0 { + panic(zeroIncX) + } + + // Quick return if possible. + if n == 0 { + return + } + + // For zero matrix size the following slice length checks are trivially satisfied. + if len(ap) < n*(n+1)/2 { + panic(shortAP) + } + if (incX > 0 && len(x) <= (n-1)*incX) || (incX < 0 && len(x) <= (1-n)*incX) { + panic(shortX) + } + + // Set up start index in X. + var kx int + if incX < 0 { + kx = (1 - n) * incX + } + + // The elements of A are accessed sequentially with one pass through ap. + + if trans == blas.NoTrans { + // Form x = inv(A)*x. + if uplo == blas.Upper { + kk := n*(n+1)/2 - 1 + if incX == 1 { + for i := n - 1; i >= 0; i-- { + aii := ap[kk] + if n-i-1 > 0 { + x[i] -= c128.DotuUnitary(x[i+1:n], ap[kk+1:kk+n-i]) + } + if diag == blas.NonUnit { + x[i] /= aii + } + kk -= n - i + 1 + } + } else { + ix := kx + (n-1)*incX + for i := n - 1; i >= 0; i-- { + aii := ap[kk] + if n-i-1 > 0 { + x[ix] -= c128.DotuInc(x, ap[kk+1:kk+n-i], uintptr(n-i-1), uintptr(incX), 1, uintptr(ix+incX), 0) + } + if diag == blas.NonUnit { + x[ix] /= aii + } + ix -= incX + kk -= n - i + 1 + } + } + } else { + kk := 0 + if incX == 1 { + for i := 0; i < n; i++ { + if i > 0 { + x[i] -= c128.DotuUnitary(x[:i], ap[kk:kk+i]) + } + if diag == blas.NonUnit { + x[i] /= ap[kk+i] + } + kk += i + 1 + } + } else { + ix := kx + for i := 0; i < n; i++ { + if i > 0 { + x[ix] -= c128.DotuInc(x, ap[kk:kk+i], uintptr(i), uintptr(incX), 1, uintptr(kx), 0) + } + if diag == blas.NonUnit { + x[ix] /= ap[kk+i] + } + ix += incX + kk += i + 1 + } + } + } + return + } + + if trans == blas.Trans { + // Form x = inv(A^T)*x. + if uplo == blas.Upper { + kk := 0 + if incX == 1 { + for j := 0; j < n; j++ { + if diag == blas.NonUnit { + x[j] /= ap[kk] + } + if n-j-1 > 0 { + c128.AxpyUnitary(-x[j], ap[kk+1:kk+n-j], x[j+1:n]) + } + kk += n - j + } + } else { + jx := kx + for j := 0; j < n; j++ { + if diag == blas.NonUnit { + x[jx] /= ap[kk] + } + if n-j-1 > 0 { + c128.AxpyInc(-x[jx], ap[kk+1:kk+n-j], x, uintptr(n-j-1), 1, uintptr(incX), 0, uintptr(jx+incX)) + } + jx += incX + kk += n - j + } + } + } else { + kk := n*(n+1)/2 - n + if incX == 1 { + for j := n - 1; j >= 0; j-- { + if diag == blas.NonUnit { + x[j] /= ap[kk+j] + } + if j > 0 { + c128.AxpyUnitary(-x[j], ap[kk:kk+j], x[:j]) + } + kk -= j + } + } else { + jx := kx + (n-1)*incX + for j := n - 1; j >= 0; j-- { + if diag == blas.NonUnit { + x[jx] /= ap[kk+j] + } + if j > 0 { + c128.AxpyInc(-x[jx], ap[kk:kk+j], x, uintptr(j), 1, uintptr(incX), 0, uintptr(kx)) + } + jx -= incX + kk -= j + } + } + } + return + } + + // Form x = inv(A^H)*x. + if uplo == blas.Upper { + kk := 0 + if incX == 1 { + for j := 0; j < n; j++ { + if diag == blas.NonUnit { + x[j] /= cmplx.Conj(ap[kk]) + } + xj := x[j] + k := kk + 1 + for i := j + 1; i < n; i++ { + x[i] -= xj * cmplx.Conj(ap[k]) + k++ + } + kk += n - j + } + } else { + jx := kx + for j := 0; j < n; j++ { + if diag == blas.NonUnit { + x[jx] /= cmplx.Conj(ap[kk]) + } + xj := x[jx] + ix := jx + incX + k := kk + 1 + for i := j + 1; i < n; i++ { + x[ix] -= xj * cmplx.Conj(ap[k]) + ix += incX + k++ + } + jx += incX + kk += n - j + } + } + } else { + kk := n*(n+1)/2 - n + if incX == 1 { + for j := n - 1; j >= 0; j-- { + if diag == blas.NonUnit { + x[j] /= cmplx.Conj(ap[kk+j]) + } + xj := x[j] + for i := 0; i < j; i++ { + x[i] -= xj * cmplx.Conj(ap[kk+i]) + } + kk -= j + } + } else { + jx := kx + (n-1)*incX + for j := n - 1; j >= 0; j-- { + if diag == blas.NonUnit { + x[jx] /= cmplx.Conj(ap[kk+j]) + } + xj := x[jx] + ix := kx + for i := 0; i < j; i++ { + x[ix] -= xj * cmplx.Conj(ap[kk+i]) + ix += incX + } + jx -= incX + kk -= j + } + } + } +} + +// Ztrmv performs one of the matrix-vector operations +// x = A * x if trans = blas.NoTrans +// x = A^T * x if trans = blas.Trans +// x = A^H * x if trans = blas.ConjTrans +// where x is a vector, and A is an n×n triangular matrix. +func (Implementation) Ztrmv(uplo blas.Uplo, trans blas.Transpose, diag blas.Diag, n int, a []complex128, lda int, x []complex128, incX int) { + switch trans { + default: + panic(badTranspose) + case blas.NoTrans, blas.Trans, blas.ConjTrans: + } + switch uplo { + default: + panic(badUplo) + case blas.Upper, blas.Lower: + } + switch diag { + default: + panic(badDiag) + case blas.NonUnit, blas.Unit: + } + if n < 0 { + panic(nLT0) + } + if lda < max(1, n) { + panic(badLdA) + } + if incX == 0 { + panic(zeroIncX) + } + + // Quick return if possible. + if n == 0 { + return + } + + // For zero matrix size the following slice length checks are trivially satisfied. + if len(a) < lda*(n-1)+n { + panic(shortA) + } + if (incX > 0 && len(x) <= (n-1)*incX) || (incX < 0 && len(x) <= (1-n)*incX) { + panic(shortX) + } + + // Set up start index in X. + var kx int + if incX < 0 { + kx = (1 - n) * incX + } + + // The elements of A are accessed sequentially with one pass through A. + + if trans == blas.NoTrans { + // Form x = A*x. + if uplo == blas.Upper { + if incX == 1 { + for i := 0; i < n; i++ { + if diag == blas.NonUnit { + x[i] *= a[i*lda+i] + } + if n-i-1 > 0 { + x[i] += c128.DotuUnitary(a[i*lda+i+1:i*lda+n], x[i+1:n]) + } + } + } else { + ix := kx + for i := 0; i < n; i++ { + if diag == blas.NonUnit { + x[ix] *= a[i*lda+i] + } + if n-i-1 > 0 { + x[ix] += c128.DotuInc(a[i*lda+i+1:i*lda+n], x, uintptr(n-i-1), 1, uintptr(incX), 0, uintptr(ix+incX)) + } + ix += incX + } + } + } else { + if incX == 1 { + for i := n - 1; i >= 0; i-- { + if diag == blas.NonUnit { + x[i] *= a[i*lda+i] + } + if i > 0 { + x[i] += c128.DotuUnitary(a[i*lda:i*lda+i], x[:i]) + } + } + } else { + ix := kx + (n-1)*incX + for i := n - 1; i >= 0; i-- { + if diag == blas.NonUnit { + x[ix] *= a[i*lda+i] + } + if i > 0 { + x[ix] += c128.DotuInc(a[i*lda:i*lda+i], x, uintptr(i), 1, uintptr(incX), 0, uintptr(kx)) + } + ix -= incX + } + } + } + return + } + + if trans == blas.Trans { + // Form x = A^T*x. + if uplo == blas.Upper { + if incX == 1 { + for i := n - 1; i >= 0; i-- { + xi := x[i] + if diag == blas.NonUnit { + x[i] *= a[i*lda+i] + } + if n-i-1 > 0 { + c128.AxpyUnitary(xi, a[i*lda+i+1:i*lda+n], x[i+1:n]) + } + } + } else { + ix := kx + (n-1)*incX + for i := n - 1; i >= 0; i-- { + xi := x[ix] + if diag == blas.NonUnit { + x[ix] *= a[i*lda+i] + } + if n-i-1 > 0 { + c128.AxpyInc(xi, a[i*lda+i+1:i*lda+n], x, uintptr(n-i-1), 1, uintptr(incX), 0, uintptr(ix+incX)) + } + ix -= incX + } + } + } else { + if incX == 1 { + for i := 0; i < n; i++ { + if i > 0 { + c128.AxpyUnitary(x[i], a[i*lda:i*lda+i], x[:i]) + } + if diag == blas.NonUnit { + x[i] *= a[i*lda+i] + } + } + } else { + ix := kx + for i := 0; i < n; i++ { + if i > 0 { + c128.AxpyInc(x[ix], a[i*lda:i*lda+i], x, uintptr(i), 1, uintptr(incX), 0, uintptr(kx)) + } + if diag == blas.NonUnit { + x[ix] *= a[i*lda+i] + } + ix += incX + } + } + } + return + } + + // Form x = A^H*x. + if uplo == blas.Upper { + if incX == 1 { + for i := n - 1; i >= 0; i-- { + xi := x[i] + if diag == blas.NonUnit { + x[i] *= cmplx.Conj(a[i*lda+i]) + } + for j := i + 1; j < n; j++ { + x[j] += xi * cmplx.Conj(a[i*lda+j]) + } + } + } else { + ix := kx + (n-1)*incX + for i := n - 1; i >= 0; i-- { + xi := x[ix] + if diag == blas.NonUnit { + x[ix] *= cmplx.Conj(a[i*lda+i]) + } + jx := ix + incX + for j := i + 1; j < n; j++ { + x[jx] += xi * cmplx.Conj(a[i*lda+j]) + jx += incX + } + ix -= incX + } + } + } else { + if incX == 1 { + for i := 0; i < n; i++ { + for j := 0; j < i; j++ { + x[j] += x[i] * cmplx.Conj(a[i*lda+j]) + } + if diag == blas.NonUnit { + x[i] *= cmplx.Conj(a[i*lda+i]) + } + } + } else { + ix := kx + for i := 0; i < n; i++ { + jx := kx + for j := 0; j < i; j++ { + x[jx] += x[ix] * cmplx.Conj(a[i*lda+j]) + jx += incX + } + if diag == blas.NonUnit { + x[ix] *= cmplx.Conj(a[i*lda+i]) + } + ix += incX + } + } + } +} + +// Ztrsv solves one of the systems of equations +// A * x = b if trans == blas.NoTrans +// A^T * x = b if trans == blas.Trans +// A^H * x = b if trans == blas.ConjTrans +// where b and x are n element vectors and A is an n×n triangular matrix. +// +// On entry, x contains the values of b, and the solution is +// stored in-place into x. +// +// No test for singularity or near-singularity is included in this +// routine. Such tests must be performed before calling this routine. +func (Implementation) Ztrsv(uplo blas.Uplo, trans blas.Transpose, diag blas.Diag, n int, a []complex128, lda int, x []complex128, incX int) { + switch trans { + default: + panic(badTranspose) + case blas.NoTrans, blas.Trans, blas.ConjTrans: + } + switch uplo { + default: + panic(badUplo) + case blas.Upper, blas.Lower: + } + switch diag { + default: + panic(badDiag) + case blas.NonUnit, blas.Unit: + } + if n < 0 { + panic(nLT0) + } + if lda < max(1, n) { + panic(badLdA) + } + if incX == 0 { + panic(zeroIncX) + } + + // Quick return if possible. + if n == 0 { + return + } + + // For zero matrix size the following slice length checks are trivially satisfied. + if len(a) < lda*(n-1)+n { + panic(shortA) + } + if (incX > 0 && len(x) <= (n-1)*incX) || (incX < 0 && len(x) <= (1-n)*incX) { + panic(shortX) + } + + // Set up start index in X. + var kx int + if incX < 0 { + kx = (1 - n) * incX + } + + // The elements of A are accessed sequentially with one pass through A. + + if trans == blas.NoTrans { + // Form x = inv(A)*x. + if uplo == blas.Upper { + if incX == 1 { + for i := n - 1; i >= 0; i-- { + aii := a[i*lda+i] + if n-i-1 > 0 { + x[i] -= c128.DotuUnitary(x[i+1:n], a[i*lda+i+1:i*lda+n]) + } + if diag == blas.NonUnit { + x[i] /= aii + } + } + } else { + ix := kx + (n-1)*incX + for i := n - 1; i >= 0; i-- { + aii := a[i*lda+i] + if n-i-1 > 0 { + x[ix] -= c128.DotuInc(x, a[i*lda+i+1:i*lda+n], uintptr(n-i-1), uintptr(incX), 1, uintptr(ix+incX), 0) + } + if diag == blas.NonUnit { + x[ix] /= aii + } + ix -= incX + } + } + } else { + if incX == 1 { + for i := 0; i < n; i++ { + if i > 0 { + x[i] -= c128.DotuUnitary(x[:i], a[i*lda:i*lda+i]) + } + if diag == blas.NonUnit { + x[i] /= a[i*lda+i] + } + } + } else { + ix := kx + for i := 0; i < n; i++ { + if i > 0 { + x[ix] -= c128.DotuInc(x, a[i*lda:i*lda+i], uintptr(i), uintptr(incX), 1, uintptr(kx), 0) + } + if diag == blas.NonUnit { + x[ix] /= a[i*lda+i] + } + ix += incX + } + } + } + return + } + + if trans == blas.Trans { + // Form x = inv(A^T)*x. + if uplo == blas.Upper { + if incX == 1 { + for j := 0; j < n; j++ { + if diag == blas.NonUnit { + x[j] /= a[j*lda+j] + } + if n-j-1 > 0 { + c128.AxpyUnitary(-x[j], a[j*lda+j+1:j*lda+n], x[j+1:n]) + } + } + } else { + jx := kx + for j := 0; j < n; j++ { + if diag == blas.NonUnit { + x[jx] /= a[j*lda+j] + } + if n-j-1 > 0 { + c128.AxpyInc(-x[jx], a[j*lda+j+1:j*lda+n], x, uintptr(n-j-1), 1, uintptr(incX), 0, uintptr(jx+incX)) + } + jx += incX + } + } + } else { + if incX == 1 { + for j := n - 1; j >= 0; j-- { + if diag == blas.NonUnit { + x[j] /= a[j*lda+j] + } + xj := x[j] + if j > 0 { + c128.AxpyUnitary(-xj, a[j*lda:j*lda+j], x[:j]) + } + } + } else { + jx := kx + (n-1)*incX + for j := n - 1; j >= 0; j-- { + if diag == blas.NonUnit { + x[jx] /= a[j*lda+j] + } + if j > 0 { + c128.AxpyInc(-x[jx], a[j*lda:j*lda+j], x, uintptr(j), 1, uintptr(incX), 0, uintptr(kx)) + } + jx -= incX + } + } + } + return + } + + // Form x = inv(A^H)*x. + if uplo == blas.Upper { + if incX == 1 { + for j := 0; j < n; j++ { + if diag == blas.NonUnit { + x[j] /= cmplx.Conj(a[j*lda+j]) + } + xj := x[j] + for i := j + 1; i < n; i++ { + x[i] -= xj * cmplx.Conj(a[j*lda+i]) + } + } + } else { + jx := kx + for j := 0; j < n; j++ { + if diag == blas.NonUnit { + x[jx] /= cmplx.Conj(a[j*lda+j]) + } + xj := x[jx] + ix := jx + incX + for i := j + 1; i < n; i++ { + x[ix] -= xj * cmplx.Conj(a[j*lda+i]) + ix += incX + } + jx += incX + } + } + } else { + if incX == 1 { + for j := n - 1; j >= 0; j-- { + if diag == blas.NonUnit { + x[j] /= cmplx.Conj(a[j*lda+j]) + } + xj := x[j] + for i := 0; i < j; i++ { + x[i] -= xj * cmplx.Conj(a[j*lda+i]) + } + } + } else { + jx := kx + (n-1)*incX + for j := n - 1; j >= 0; j-- { + if diag == blas.NonUnit { + x[jx] /= cmplx.Conj(a[j*lda+j]) + } + xj := x[jx] + ix := kx + for i := 0; i < j; i++ { + x[ix] -= xj * cmplx.Conj(a[j*lda+i]) + ix += incX + } + jx -= incX + } + } + } +} diff --git a/vendor/gonum.org/v1/gonum/blas/gonum/level2cmplx64.go b/vendor/gonum.org/v1/gonum/blas/gonum/level2cmplx64.go new file mode 100644 index 0000000..10faf8f --- /dev/null +++ b/vendor/gonum.org/v1/gonum/blas/gonum/level2cmplx64.go @@ -0,0 +1,2942 @@ +// Code generated by "go generate gonum.org/v1/gonum/blas/gonum”; DO NOT EDIT. + +// Copyright ©2017 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package gonum + +import ( + cmplx "gonum.org/v1/gonum/internal/cmplx64" + + "gonum.org/v1/gonum/blas" + "gonum.org/v1/gonum/internal/asm/c64" +) + +var _ blas.Complex64Level2 = Implementation{} + +// Cgbmv performs one of the matrix-vector operations +// y = alpha * A * x + beta * y if trans = blas.NoTrans +// y = alpha * A^T * x + beta * y if trans = blas.Trans +// y = alpha * A^H * x + beta * y if trans = blas.ConjTrans +// where alpha and beta are scalars, x and y are vectors, and A is an m×n band matrix +// with kL sub-diagonals and kU super-diagonals. +// +// Complex64 implementations are autogenerated and not directly tested. +func (Implementation) Cgbmv(trans blas.Transpose, m, n, kL, kU int, alpha complex64, a []complex64, lda int, x []complex64, incX int, beta complex64, y []complex64, incY int) { + switch trans { + default: + panic(badTranspose) + case blas.NoTrans, blas.Trans, blas.ConjTrans: + } + if m < 0 { + panic(mLT0) + } + if n < 0 { + panic(nLT0) + } + if kL < 0 { + panic(kLLT0) + } + if kU < 0 { + panic(kULT0) + } + if lda < kL+kU+1 { + panic(badLdA) + } + if incX == 0 { + panic(zeroIncX) + } + if incY == 0 { + panic(zeroIncY) + } + + // Quick return if possible. + if m == 0 || n == 0 { + return + } + + // For zero matrix size the following slice length checks are trivially satisfied. + if len(a) < lda*(min(m, n+kL)-1)+kL+kU+1 { + panic(shortA) + } + var lenX, lenY int + if trans == blas.NoTrans { + lenX, lenY = n, m + } else { + lenX, lenY = m, n + } + if (incX > 0 && len(x) <= (lenX-1)*incX) || (incX < 0 && len(x) <= (1-lenX)*incX) { + panic(shortX) + } + if (incY > 0 && len(y) <= (lenY-1)*incY) || (incY < 0 && len(y) <= (1-lenY)*incY) { + panic(shortY) + } + + // Quick return if possible. + if alpha == 0 && beta == 1 { + return + } + + var kx int + if incX < 0 { + kx = (1 - lenX) * incX + } + var ky int + if incY < 0 { + ky = (1 - lenY) * incY + } + + // Form y = beta*y. + if beta != 1 { + if incY == 1 { + if beta == 0 { + for i := range y[:lenY] { + y[i] = 0 + } + } else { + c64.ScalUnitary(beta, y[:lenY]) + } + } else { + iy := ky + if beta == 0 { + for i := 0; i < lenY; i++ { + y[iy] = 0 + iy += incY + } + } else { + if incY > 0 { + c64.ScalInc(beta, y, uintptr(lenY), uintptr(incY)) + } else { + c64.ScalInc(beta, y, uintptr(lenY), uintptr(-incY)) + } + } + } + } + + nRow := min(m, n+kL) + nCol := kL + 1 + kU + switch trans { + case blas.NoTrans: + iy := ky + if incX == 1 { + for i := 0; i < nRow; i++ { + l := max(0, kL-i) + u := min(nCol, n+kL-i) + aRow := a[i*lda+l : i*lda+u] + off := max(0, i-kL) + xtmp := x[off : off+u-l] + var sum complex64 + for j, v := range aRow { + sum += xtmp[j] * v + } + y[iy] += alpha * sum + iy += incY + } + } else { + for i := 0; i < nRow; i++ { + l := max(0, kL-i) + u := min(nCol, n+kL-i) + aRow := a[i*lda+l : i*lda+u] + off := max(0, i-kL) * incX + jx := kx + var sum complex64 + for _, v := range aRow { + sum += x[off+jx] * v + jx += incX + } + y[iy] += alpha * sum + iy += incY + } + } + case blas.Trans: + if incX == 1 { + for i := 0; i < nRow; i++ { + l := max(0, kL-i) + u := min(nCol, n+kL-i) + aRow := a[i*lda+l : i*lda+u] + off := max(0, i-kL) * incY + alphaxi := alpha * x[i] + jy := ky + for _, v := range aRow { + y[off+jy] += alphaxi * v + jy += incY + } + } + } else { + ix := kx + for i := 0; i < nRow; i++ { + l := max(0, kL-i) + u := min(nCol, n+kL-i) + aRow := a[i*lda+l : i*lda+u] + off := max(0, i-kL) * incY + alphaxi := alpha * x[ix] + jy := ky + for _, v := range aRow { + y[off+jy] += alphaxi * v + jy += incY + } + ix += incX + } + } + case blas.ConjTrans: + if incX == 1 { + for i := 0; i < nRow; i++ { + l := max(0, kL-i) + u := min(nCol, n+kL-i) + aRow := a[i*lda+l : i*lda+u] + off := max(0, i-kL) * incY + alphaxi := alpha * x[i] + jy := ky + for _, v := range aRow { + y[off+jy] += alphaxi * cmplx.Conj(v) + jy += incY + } + } + } else { + ix := kx + for i := 0; i < nRow; i++ { + l := max(0, kL-i) + u := min(nCol, n+kL-i) + aRow := a[i*lda+l : i*lda+u] + off := max(0, i-kL) * incY + alphaxi := alpha * x[ix] + jy := ky + for _, v := range aRow { + y[off+jy] += alphaxi * cmplx.Conj(v) + jy += incY + } + ix += incX + } + } + } +} + +// Cgemv performs one of the matrix-vector operations +// y = alpha * A * x + beta * y if trans = blas.NoTrans +// y = alpha * A^T * x + beta * y if trans = blas.Trans +// y = alpha * A^H * x + beta * y if trans = blas.ConjTrans +// where alpha and beta are scalars, x and y are vectors, and A is an m×n dense matrix. +// +// Complex64 implementations are autogenerated and not directly tested. +func (Implementation) Cgemv(trans blas.Transpose, m, n int, alpha complex64, a []complex64, lda int, x []complex64, incX int, beta complex64, y []complex64, incY int) { + switch trans { + default: + panic(badTranspose) + case blas.NoTrans, blas.Trans, blas.ConjTrans: + } + if m < 0 { + panic(mLT0) + } + if n < 0 { + panic(nLT0) + } + if lda < max(1, n) { + panic(badLdA) + } + if incX == 0 { + panic(zeroIncX) + } + if incY == 0 { + panic(zeroIncY) + } + + // Quick return if possible. + if m == 0 || n == 0 { + return + } + + // For zero matrix size the following slice length checks are trivially satisfied. + var lenX, lenY int + if trans == blas.NoTrans { + lenX = n + lenY = m + } else { + lenX = m + lenY = n + } + if len(a) < lda*(m-1)+n { + panic(shortA) + } + if (incX > 0 && len(x) <= (lenX-1)*incX) || (incX < 0 && len(x) <= (1-lenX)*incX) { + panic(shortX) + } + if (incY > 0 && len(y) <= (lenY-1)*incY) || (incY < 0 && len(y) <= (1-lenY)*incY) { + panic(shortY) + } + + // Quick return if possible. + if alpha == 0 && beta == 1 { + return + } + + var kx int + if incX < 0 { + kx = (1 - lenX) * incX + } + var ky int + if incY < 0 { + ky = (1 - lenY) * incY + } + + // Form y = beta*y. + if beta != 1 { + if incY == 1 { + if beta == 0 { + for i := range y[:lenY] { + y[i] = 0 + } + } else { + c64.ScalUnitary(beta, y[:lenY]) + } + } else { + iy := ky + if beta == 0 { + for i := 0; i < lenY; i++ { + y[iy] = 0 + iy += incY + } + } else { + if incY > 0 { + c64.ScalInc(beta, y, uintptr(lenY), uintptr(incY)) + } else { + c64.ScalInc(beta, y, uintptr(lenY), uintptr(-incY)) + } + } + } + } + + if alpha == 0 { + return + } + + switch trans { + default: + // Form y = alpha*A*x + y. + iy := ky + if incX == 1 { + for i := 0; i < m; i++ { + y[iy] += alpha * c64.DotuUnitary(a[i*lda:i*lda+n], x[:n]) + iy += incY + } + return + } + for i := 0; i < m; i++ { + y[iy] += alpha * c64.DotuInc(a[i*lda:i*lda+n], x, uintptr(n), 1, uintptr(incX), 0, uintptr(kx)) + iy += incY + } + return + + case blas.Trans: + // Form y = alpha*A^T*x + y. + ix := kx + if incY == 1 { + for i := 0; i < m; i++ { + c64.AxpyUnitary(alpha*x[ix], a[i*lda:i*lda+n], y[:n]) + ix += incX + } + return + } + for i := 0; i < m; i++ { + c64.AxpyInc(alpha*x[ix], a[i*lda:i*lda+n], y, uintptr(n), 1, uintptr(incY), 0, uintptr(ky)) + ix += incX + } + return + + case blas.ConjTrans: + // Form y = alpha*A^H*x + y. + ix := kx + if incY == 1 { + for i := 0; i < m; i++ { + tmp := alpha * x[ix] + for j := 0; j < n; j++ { + y[j] += tmp * cmplx.Conj(a[i*lda+j]) + } + ix += incX + } + return + } + for i := 0; i < m; i++ { + tmp := alpha * x[ix] + jy := ky + for j := 0; j < n; j++ { + y[jy] += tmp * cmplx.Conj(a[i*lda+j]) + jy += incY + } + ix += incX + } + return + } +} + +// Cgerc performs the rank-one operation +// A += alpha * x * y^H +// where A is an m×n dense matrix, alpha is a scalar, x is an m element vector, +// and y is an n element vector. +// +// Complex64 implementations are autogenerated and not directly tested. +func (Implementation) Cgerc(m, n int, alpha complex64, x []complex64, incX int, y []complex64, incY int, a []complex64, lda int) { + if m < 0 { + panic(mLT0) + } + if n < 0 { + panic(nLT0) + } + if lda < max(1, n) { + panic(badLdA) + } + if incX == 0 { + panic(zeroIncX) + } + if incY == 0 { + panic(zeroIncY) + } + + // Quick return if possible. + if m == 0 || n == 0 { + return + } + + // For zero matrix size the following slice length checks are trivially satisfied. + if (incX > 0 && len(x) <= (m-1)*incX) || (incX < 0 && len(x) <= (1-m)*incX) { + panic(shortX) + } + if (incY > 0 && len(y) <= (n-1)*incY) || (incY < 0 && len(y) <= (1-n)*incY) { + panic(shortY) + } + if len(a) < lda*(m-1)+n { + panic(shortA) + } + + // Quick return if possible. + if alpha == 0 { + return + } + + var kx, jy int + if incX < 0 { + kx = (1 - m) * incX + } + if incY < 0 { + jy = (1 - n) * incY + } + for j := 0; j < n; j++ { + if y[jy] != 0 { + tmp := alpha * cmplx.Conj(y[jy]) + c64.AxpyInc(tmp, x, a[j:], uintptr(m), uintptr(incX), uintptr(lda), uintptr(kx), 0) + } + jy += incY + } +} + +// Cgeru performs the rank-one operation +// A += alpha * x * y^T +// where A is an m×n dense matrix, alpha is a scalar, x is an m element vector, +// and y is an n element vector. +// +// Complex64 implementations are autogenerated and not directly tested. +func (Implementation) Cgeru(m, n int, alpha complex64, x []complex64, incX int, y []complex64, incY int, a []complex64, lda int) { + if m < 0 { + panic(mLT0) + } + if n < 0 { + panic(nLT0) + } + if lda < max(1, n) { + panic(badLdA) + } + if incX == 0 { + panic(zeroIncX) + } + if incY == 0 { + panic(zeroIncY) + } + + // Quick return if possible. + if m == 0 || n == 0 { + return + } + + // For zero matrix size the following slice length checks are trivially satisfied. + if (incX > 0 && len(x) <= (m-1)*incX) || (incX < 0 && len(x) <= (1-m)*incX) { + panic(shortX) + } + if (incY > 0 && len(y) <= (n-1)*incY) || (incY < 0 && len(y) <= (1-n)*incY) { + panic(shortY) + } + if len(a) < lda*(m-1)+n { + panic(shortA) + } + + // Quick return if possible. + if alpha == 0 { + return + } + + var kx int + if incX < 0 { + kx = (1 - m) * incX + } + if incY == 1 { + for i := 0; i < m; i++ { + if x[kx] != 0 { + tmp := alpha * x[kx] + c64.AxpyUnitary(tmp, y[:n], a[i*lda:i*lda+n]) + } + kx += incX + } + return + } + var jy int + if incY < 0 { + jy = (1 - n) * incY + } + for i := 0; i < m; i++ { + if x[kx] != 0 { + tmp := alpha * x[kx] + c64.AxpyInc(tmp, y, a[i*lda:i*lda+n], uintptr(n), uintptr(incY), 1, uintptr(jy), 0) + } + kx += incX + } +} + +// Chbmv performs the matrix-vector operation +// y = alpha * A * x + beta * y +// where alpha and beta are scalars, x and y are vectors, and A is an n×n +// Hermitian band matrix with k super-diagonals. The imaginary parts of +// the diagonal elements of A are ignored and assumed to be zero. +// +// Complex64 implementations are autogenerated and not directly tested. +func (Implementation) Chbmv(uplo blas.Uplo, n, k int, alpha complex64, a []complex64, lda int, x []complex64, incX int, beta complex64, y []complex64, incY int) { + switch uplo { + default: + panic(badUplo) + case blas.Upper, blas.Lower: + } + if n < 0 { + panic(nLT0) + } + if k < 0 { + panic(kLT0) + } + if lda < k+1 { + panic(badLdA) + } + if incX == 0 { + panic(zeroIncX) + } + if incY == 0 { + panic(zeroIncY) + } + + // Quick return if possible. + if n == 0 { + return + } + + // For zero matrix size the following slice length checks are trivially satisfied. + if len(a) < lda*(n-1)+k+1 { + panic(shortA) + } + if (incX > 0 && len(x) <= (n-1)*incX) || (incX < 0 && len(x) <= (1-n)*incX) { + panic(shortX) + } + if (incY > 0 && len(y) <= (n-1)*incY) || (incY < 0 && len(y) <= (1-n)*incY) { + panic(shortY) + } + + // Quick return if possible. + if alpha == 0 && beta == 1 { + return + } + + // Set up the start indices in X and Y. + var kx int + if incX < 0 { + kx = (1 - n) * incX + } + var ky int + if incY < 0 { + ky = (1 - n) * incY + } + + // Form y = beta*y. + if beta != 1 { + if incY == 1 { + if beta == 0 { + for i := range y[:n] { + y[i] = 0 + } + } else { + for i, v := range y[:n] { + y[i] = beta * v + } + } + } else { + iy := ky + if beta == 0 { + for i := 0; i < n; i++ { + y[iy] = 0 + iy += incY + } + } else { + for i := 0; i < n; i++ { + y[iy] = beta * y[iy] + iy += incY + } + } + } + } + + if alpha == 0 { + return + } + + // The elements of A are accessed sequentially with one pass through a. + switch uplo { + case blas.Upper: + iy := ky + if incX == 1 { + for i := 0; i < n; i++ { + aRow := a[i*lda:] + alphaxi := alpha * x[i] + sum := alphaxi * complex(real(aRow[0]), 0) + u := min(k+1, n-i) + jy := incY + for j := 1; j < u; j++ { + v := aRow[j] + sum += alpha * x[i+j] * v + y[iy+jy] += alphaxi * cmplx.Conj(v) + jy += incY + } + y[iy] += sum + iy += incY + } + } else { + ix := kx + for i := 0; i < n; i++ { + aRow := a[i*lda:] + alphaxi := alpha * x[ix] + sum := alphaxi * complex(real(aRow[0]), 0) + u := min(k+1, n-i) + jx := incX + jy := incY + for j := 1; j < u; j++ { + v := aRow[j] + sum += alpha * x[ix+jx] * v + y[iy+jy] += alphaxi * cmplx.Conj(v) + jx += incX + jy += incY + } + y[iy] += sum + ix += incX + iy += incY + } + } + case blas.Lower: + iy := ky + if incX == 1 { + for i := 0; i < n; i++ { + l := max(0, k-i) + alphaxi := alpha * x[i] + jy := l * incY + aRow := a[i*lda:] + for j := l; j < k; j++ { + v := aRow[j] + y[iy] += alpha * v * x[i-k+j] + y[iy-k*incY+jy] += alphaxi * cmplx.Conj(v) + jy += incY + } + y[iy] += alphaxi * complex(real(aRow[k]), 0) + iy += incY + } + } else { + ix := kx + for i := 0; i < n; i++ { + l := max(0, k-i) + alphaxi := alpha * x[ix] + jx := l * incX + jy := l * incY + aRow := a[i*lda:] + for j := l; j < k; j++ { + v := aRow[j] + y[iy] += alpha * v * x[ix-k*incX+jx] + y[iy-k*incY+jy] += alphaxi * cmplx.Conj(v) + jx += incX + jy += incY + } + y[iy] += alphaxi * complex(real(aRow[k]), 0) + ix += incX + iy += incY + } + } + } +} + +// Chemv performs the matrix-vector operation +// y = alpha * A * x + beta * y +// where alpha and beta are scalars, x and y are vectors, and A is an n×n +// Hermitian matrix. The imaginary parts of the diagonal elements of A are +// ignored and assumed to be zero. +// +// Complex64 implementations are autogenerated and not directly tested. +func (Implementation) Chemv(uplo blas.Uplo, n int, alpha complex64, a []complex64, lda int, x []complex64, incX int, beta complex64, y []complex64, incY int) { + switch uplo { + default: + panic(badUplo) + case blas.Upper, blas.Lower: + } + if n < 0 { + panic(nLT0) + } + if lda < max(1, n) { + panic(badLdA) + } + if incX == 0 { + panic(zeroIncX) + } + if incY == 0 { + panic(zeroIncY) + } + + // Quick return if possible. + if n == 0 { + return + } + + // For zero matrix size the following slice length checks are trivially satisfied. + if len(a) < lda*(n-1)+n { + panic(shortA) + } + if (incX > 0 && len(x) <= (n-1)*incX) || (incX < 0 && len(x) <= (1-n)*incX) { + panic(shortX) + } + if (incY > 0 && len(y) <= (n-1)*incY) || (incY < 0 && len(y) <= (1-n)*incY) { + panic(shortY) + } + + // Quick return if possible. + if alpha == 0 && beta == 1 { + return + } + + // Set up the start indices in X and Y. + var kx int + if incX < 0 { + kx = (1 - n) * incX + } + var ky int + if incY < 0 { + ky = (1 - n) * incY + } + + // Form y = beta*y. + if beta != 1 { + if incY == 1 { + if beta == 0 { + for i := range y[:n] { + y[i] = 0 + } + } else { + for i, v := range y[:n] { + y[i] = beta * v + } + } + } else { + iy := ky + if beta == 0 { + for i := 0; i < n; i++ { + y[iy] = 0 + iy += incY + } + } else { + for i := 0; i < n; i++ { + y[iy] = beta * y[iy] + iy += incY + } + } + } + } + + if alpha == 0 { + return + } + + // The elements of A are accessed sequentially with one pass through + // the triangular part of A. + + if uplo == blas.Upper { + // Form y when A is stored in upper triangle. + if incX == 1 && incY == 1 { + for i := 0; i < n; i++ { + tmp1 := alpha * x[i] + var tmp2 complex64 + for j := i + 1; j < n; j++ { + y[j] += tmp1 * cmplx.Conj(a[i*lda+j]) + tmp2 += a[i*lda+j] * x[j] + } + aii := complex(real(a[i*lda+i]), 0) + y[i] += tmp1*aii + alpha*tmp2 + } + } else { + ix := kx + iy := ky + for i := 0; i < n; i++ { + tmp1 := alpha * x[ix] + var tmp2 complex64 + jx := ix + jy := iy + for j := i + 1; j < n; j++ { + jx += incX + jy += incY + y[jy] += tmp1 * cmplx.Conj(a[i*lda+j]) + tmp2 += a[i*lda+j] * x[jx] + } + aii := complex(real(a[i*lda+i]), 0) + y[iy] += tmp1*aii + alpha*tmp2 + ix += incX + iy += incY + } + } + return + } + + // Form y when A is stored in lower triangle. + if incX == 1 && incY == 1 { + for i := 0; i < n; i++ { + tmp1 := alpha * x[i] + var tmp2 complex64 + for j := 0; j < i; j++ { + y[j] += tmp1 * cmplx.Conj(a[i*lda+j]) + tmp2 += a[i*lda+j] * x[j] + } + aii := complex(real(a[i*lda+i]), 0) + y[i] += tmp1*aii + alpha*tmp2 + } + } else { + ix := kx + iy := ky + for i := 0; i < n; i++ { + tmp1 := alpha * x[ix] + var tmp2 complex64 + jx := kx + jy := ky + for j := 0; j < i; j++ { + y[jy] += tmp1 * cmplx.Conj(a[i*lda+j]) + tmp2 += a[i*lda+j] * x[jx] + jx += incX + jy += incY + } + aii := complex(real(a[i*lda+i]), 0) + y[iy] += tmp1*aii + alpha*tmp2 + ix += incX + iy += incY + } + } +} + +// Cher performs the Hermitian rank-one operation +// A += alpha * x * x^H +// where A is an n×n Hermitian matrix, alpha is a real scalar, and x is an n +// element vector. On entry, the imaginary parts of the diagonal elements of A +// are ignored and assumed to be zero, on return they will be set to zero. +// +// Complex64 implementations are autogenerated and not directly tested. +func (Implementation) Cher(uplo blas.Uplo, n int, alpha float32, x []complex64, incX int, a []complex64, lda int) { + switch uplo { + default: + panic(badUplo) + case blas.Upper, blas.Lower: + } + if n < 0 { + panic(nLT0) + } + if lda < max(1, n) { + panic(badLdA) + } + if incX == 0 { + panic(zeroIncX) + } + + // Quick return if possible. + if n == 0 { + return + } + + // For zero matrix size the following slice length checks are trivially satisfied. + if (incX > 0 && len(x) <= (n-1)*incX) || (incX < 0 && len(x) <= (1-n)*incX) { + panic(shortX) + } + if len(a) < lda*(n-1)+n { + panic(shortA) + } + + // Quick return if possible. + if alpha == 0 { + return + } + + var kx int + if incX < 0 { + kx = (1 - n) * incX + } + if uplo == blas.Upper { + if incX == 1 { + for i := 0; i < n; i++ { + if x[i] != 0 { + tmp := complex(alpha*real(x[i]), alpha*imag(x[i])) + aii := real(a[i*lda+i]) + xtmp := real(tmp * cmplx.Conj(x[i])) + a[i*lda+i] = complex(aii+xtmp, 0) + for j := i + 1; j < n; j++ { + a[i*lda+j] += tmp * cmplx.Conj(x[j]) + } + } else { + aii := real(a[i*lda+i]) + a[i*lda+i] = complex(aii, 0) + } + } + return + } + + ix := kx + for i := 0; i < n; i++ { + if x[ix] != 0 { + tmp := complex(alpha*real(x[ix]), alpha*imag(x[ix])) + aii := real(a[i*lda+i]) + xtmp := real(tmp * cmplx.Conj(x[ix])) + a[i*lda+i] = complex(aii+xtmp, 0) + jx := ix + incX + for j := i + 1; j < n; j++ { + a[i*lda+j] += tmp * cmplx.Conj(x[jx]) + jx += incX + } + } else { + aii := real(a[i*lda+i]) + a[i*lda+i] = complex(aii, 0) + } + ix += incX + } + return + } + + if incX == 1 { + for i := 0; i < n; i++ { + if x[i] != 0 { + tmp := complex(alpha*real(x[i]), alpha*imag(x[i])) + for j := 0; j < i; j++ { + a[i*lda+j] += tmp * cmplx.Conj(x[j]) + } + aii := real(a[i*lda+i]) + xtmp := real(tmp * cmplx.Conj(x[i])) + a[i*lda+i] = complex(aii+xtmp, 0) + } else { + aii := real(a[i*lda+i]) + a[i*lda+i] = complex(aii, 0) + } + } + return + } + + ix := kx + for i := 0; i < n; i++ { + if x[ix] != 0 { + tmp := complex(alpha*real(x[ix]), alpha*imag(x[ix])) + jx := kx + for j := 0; j < i; j++ { + a[i*lda+j] += tmp * cmplx.Conj(x[jx]) + jx += incX + } + aii := real(a[i*lda+i]) + xtmp := real(tmp * cmplx.Conj(x[ix])) + a[i*lda+i] = complex(aii+xtmp, 0) + + } else { + aii := real(a[i*lda+i]) + a[i*lda+i] = complex(aii, 0) + } + ix += incX + } +} + +// Cher2 performs the Hermitian rank-two operation +// A += alpha * x * y^H + conj(alpha) * y * x^H +// where alpha is a scalar, x and y are n element vectors and A is an n×n +// Hermitian matrix. On entry, the imaginary parts of the diagonal elements are +// ignored and assumed to be zero. On return they will be set to zero. +// +// Complex64 implementations are autogenerated and not directly tested. +func (Implementation) Cher2(uplo blas.Uplo, n int, alpha complex64, x []complex64, incX int, y []complex64, incY int, a []complex64, lda int) { + switch uplo { + default: + panic(badUplo) + case blas.Upper, blas.Lower: + } + if n < 0 { + panic(nLT0) + } + if lda < max(1, n) { + panic(badLdA) + } + if incX == 0 { + panic(zeroIncX) + } + if incY == 0 { + panic(zeroIncY) + } + + // Quick return if possible. + if n == 0 { + return + } + + // For zero matrix size the following slice length checks are trivially satisfied. + if (incX > 0 && len(x) <= (n-1)*incX) || (incX < 0 && len(x) <= (1-n)*incX) { + panic(shortX) + } + if (incY > 0 && len(y) <= (n-1)*incY) || (incY < 0 && len(y) <= (1-n)*incY) { + panic(shortY) + } + if len(a) < lda*(n-1)+n { + panic(shortA) + } + + // Quick return if possible. + if alpha == 0 { + return + } + + var kx, ky int + var ix, iy int + if incX != 1 || incY != 1 { + if incX < 0 { + kx = (1 - n) * incX + } + if incY < 0 { + ky = (1 - n) * incY + } + ix = kx + iy = ky + } + if uplo == blas.Upper { + if incX == 1 && incY == 1 { + for i := 0; i < n; i++ { + if x[i] != 0 || y[i] != 0 { + tmp1 := alpha * x[i] + tmp2 := cmplx.Conj(alpha) * y[i] + aii := real(a[i*lda+i]) + real(tmp1*cmplx.Conj(y[i])) + real(tmp2*cmplx.Conj(x[i])) + a[i*lda+i] = complex(aii, 0) + for j := i + 1; j < n; j++ { + a[i*lda+j] += tmp1*cmplx.Conj(y[j]) + tmp2*cmplx.Conj(x[j]) + } + } else { + aii := real(a[i*lda+i]) + a[i*lda+i] = complex(aii, 0) + } + } + return + } + for i := 0; i < n; i++ { + if x[ix] != 0 || y[iy] != 0 { + tmp1 := alpha * x[ix] + tmp2 := cmplx.Conj(alpha) * y[iy] + aii := real(a[i*lda+i]) + real(tmp1*cmplx.Conj(y[iy])) + real(tmp2*cmplx.Conj(x[ix])) + a[i*lda+i] = complex(aii, 0) + jx := ix + incX + jy := iy + incY + for j := i + 1; j < n; j++ { + a[i*lda+j] += tmp1*cmplx.Conj(y[jy]) + tmp2*cmplx.Conj(x[jx]) + jx += incX + jy += incY + } + } else { + aii := real(a[i*lda+i]) + a[i*lda+i] = complex(aii, 0) + } + ix += incX + iy += incY + } + return + } + + if incX == 1 && incY == 1 { + for i := 0; i < n; i++ { + if x[i] != 0 || y[i] != 0 { + tmp1 := alpha * x[i] + tmp2 := cmplx.Conj(alpha) * y[i] + for j := 0; j < i; j++ { + a[i*lda+j] += tmp1*cmplx.Conj(y[j]) + tmp2*cmplx.Conj(x[j]) + } + aii := real(a[i*lda+i]) + real(tmp1*cmplx.Conj(y[i])) + real(tmp2*cmplx.Conj(x[i])) + a[i*lda+i] = complex(aii, 0) + } else { + aii := real(a[i*lda+i]) + a[i*lda+i] = complex(aii, 0) + } + } + return + } + for i := 0; i < n; i++ { + if x[ix] != 0 || y[iy] != 0 { + tmp1 := alpha * x[ix] + tmp2 := cmplx.Conj(alpha) * y[iy] + jx := kx + jy := ky + for j := 0; j < i; j++ { + a[i*lda+j] += tmp1*cmplx.Conj(y[jy]) + tmp2*cmplx.Conj(x[jx]) + jx += incX + jy += incY + } + aii := real(a[i*lda+i]) + real(tmp1*cmplx.Conj(y[iy])) + real(tmp2*cmplx.Conj(x[ix])) + a[i*lda+i] = complex(aii, 0) + } else { + aii := real(a[i*lda+i]) + a[i*lda+i] = complex(aii, 0) + } + ix += incX + iy += incY + } +} + +// Chpmv performs the matrix-vector operation +// y = alpha * A * x + beta * y +// where alpha and beta are scalars, x and y are vectors, and A is an n×n +// Hermitian matrix in packed form. The imaginary parts of the diagonal +// elements of A are ignored and assumed to be zero. +// +// Complex64 implementations are autogenerated and not directly tested. +func (Implementation) Chpmv(uplo blas.Uplo, n int, alpha complex64, ap []complex64, x []complex64, incX int, beta complex64, y []complex64, incY int) { + switch uplo { + default: + panic(badUplo) + case blas.Upper, blas.Lower: + } + if n < 0 { + panic(nLT0) + } + if incX == 0 { + panic(zeroIncX) + } + if incY == 0 { + panic(zeroIncY) + } + + // Quick return if possible. + if n == 0 { + return + } + + // For zero matrix size the following slice length checks are trivially satisfied. + if len(ap) < n*(n+1)/2 { + panic(shortAP) + } + if (incX > 0 && len(x) <= (n-1)*incX) || (incX < 0 && len(x) <= (1-n)*incX) { + panic(shortX) + } + if (incY > 0 && len(y) <= (n-1)*incY) || (incY < 0 && len(y) <= (1-n)*incY) { + panic(shortY) + } + + // Quick return if possible. + if alpha == 0 && beta == 1 { + return + } + + // Set up the start indices in X and Y. + var kx int + if incX < 0 { + kx = (1 - n) * incX + } + var ky int + if incY < 0 { + ky = (1 - n) * incY + } + + // Form y = beta*y. + if beta != 1 { + if incY == 1 { + if beta == 0 { + for i := range y[:n] { + y[i] = 0 + } + } else { + for i, v := range y[:n] { + y[i] = beta * v + } + } + } else { + iy := ky + if beta == 0 { + for i := 0; i < n; i++ { + y[iy] = 0 + iy += incY + } + } else { + for i := 0; i < n; i++ { + y[iy] *= beta + iy += incY + } + } + } + } + + if alpha == 0 { + return + } + + // The elements of A are accessed sequentially with one pass through ap. + + var kk int + if uplo == blas.Upper { + // Form y when ap contains the upper triangle. + // Here, kk points to the current diagonal element in ap. + if incX == 1 && incY == 1 { + for i := 0; i < n; i++ { + tmp1 := alpha * x[i] + y[i] += tmp1 * complex(real(ap[kk]), 0) + var tmp2 complex64 + k := kk + 1 + for j := i + 1; j < n; j++ { + y[j] += tmp1 * cmplx.Conj(ap[k]) + tmp2 += ap[k] * x[j] + k++ + } + y[i] += alpha * tmp2 + kk += n - i + } + } else { + ix := kx + iy := ky + for i := 0; i < n; i++ { + tmp1 := alpha * x[ix] + y[iy] += tmp1 * complex(real(ap[kk]), 0) + var tmp2 complex64 + jx := ix + jy := iy + for k := kk + 1; k < kk+n-i; k++ { + jx += incX + jy += incY + y[jy] += tmp1 * cmplx.Conj(ap[k]) + tmp2 += ap[k] * x[jx] + } + y[iy] += alpha * tmp2 + ix += incX + iy += incY + kk += n - i + } + } + return + } + + // Form y when ap contains the lower triangle. + // Here, kk points to the beginning of current row in ap. + if incX == 1 && incY == 1 { + for i := 0; i < n; i++ { + tmp1 := alpha * x[i] + var tmp2 complex64 + k := kk + for j := 0; j < i; j++ { + y[j] += tmp1 * cmplx.Conj(ap[k]) + tmp2 += ap[k] * x[j] + k++ + } + aii := complex(real(ap[kk+i]), 0) + y[i] += tmp1*aii + alpha*tmp2 + kk += i + 1 + } + } else { + ix := kx + iy := ky + for i := 0; i < n; i++ { + tmp1 := alpha * x[ix] + var tmp2 complex64 + jx := kx + jy := ky + for k := kk; k < kk+i; k++ { + y[jy] += tmp1 * cmplx.Conj(ap[k]) + tmp2 += ap[k] * x[jx] + jx += incX + jy += incY + } + aii := complex(real(ap[kk+i]), 0) + y[iy] += tmp1*aii + alpha*tmp2 + ix += incX + iy += incY + kk += i + 1 + } + } +} + +// Chpr performs the Hermitian rank-1 operation +// A += alpha * x * x^H +// where alpha is a real scalar, x is a vector, and A is an n×n hermitian matrix +// in packed form. On entry, the imaginary parts of the diagonal elements are +// assumed to be zero, and on return they are set to zero. +// +// Complex64 implementations are autogenerated and not directly tested. +func (Implementation) Chpr(uplo blas.Uplo, n int, alpha float32, x []complex64, incX int, ap []complex64) { + switch uplo { + default: + panic(badUplo) + case blas.Upper, blas.Lower: + } + if n < 0 { + panic(nLT0) + } + if incX == 0 { + panic(zeroIncX) + } + + // Quick return if possible. + if n == 0 { + return + } + + // For zero matrix size the following slice length checks are trivially satisfied. + if (incX > 0 && len(x) <= (n-1)*incX) || (incX < 0 && len(x) <= (1-n)*incX) { + panic(shortX) + } + if len(ap) < n*(n+1)/2 { + panic(shortAP) + } + + // Quick return if possible. + if alpha == 0 { + return + } + + // Set up start index in X. + var kx int + if incX < 0 { + kx = (1 - n) * incX + } + + // The elements of A are accessed sequentially with one pass through ap. + + var kk int + if uplo == blas.Upper { + // Form A when upper triangle is stored in AP. + // Here, kk points to the current diagonal element in ap. + if incX == 1 { + for i := 0; i < n; i++ { + xi := x[i] + if xi != 0 { + aii := real(ap[kk]) + alpha*real(cmplx.Conj(xi)*xi) + ap[kk] = complex(aii, 0) + + tmp := complex(alpha, 0) * xi + a := ap[kk+1 : kk+n-i] + x := x[i+1 : n] + for j, v := range x { + a[j] += tmp * cmplx.Conj(v) + } + } else { + ap[kk] = complex(real(ap[kk]), 0) + } + kk += n - i + } + } else { + ix := kx + for i := 0; i < n; i++ { + xi := x[ix] + if xi != 0 { + aii := real(ap[kk]) + alpha*real(cmplx.Conj(xi)*xi) + ap[kk] = complex(aii, 0) + + tmp := complex(alpha, 0) * xi + jx := ix + incX + a := ap[kk+1 : kk+n-i] + for k := range a { + a[k] += tmp * cmplx.Conj(x[jx]) + jx += incX + } + } else { + ap[kk] = complex(real(ap[kk]), 0) + } + ix += incX + kk += n - i + } + } + return + } + + // Form A when lower triangle is stored in AP. + // Here, kk points to the beginning of current row in ap. + if incX == 1 { + for i := 0; i < n; i++ { + xi := x[i] + if xi != 0 { + tmp := complex(alpha, 0) * xi + a := ap[kk : kk+i] + for j, v := range x[:i] { + a[j] += tmp * cmplx.Conj(v) + } + + aii := real(ap[kk+i]) + alpha*real(cmplx.Conj(xi)*xi) + ap[kk+i] = complex(aii, 0) + } else { + ap[kk+i] = complex(real(ap[kk+i]), 0) + } + kk += i + 1 + } + } else { + ix := kx + for i := 0; i < n; i++ { + xi := x[ix] + if xi != 0 { + tmp := complex(alpha, 0) * xi + a := ap[kk : kk+i] + jx := kx + for k := range a { + a[k] += tmp * cmplx.Conj(x[jx]) + jx += incX + } + + aii := real(ap[kk+i]) + alpha*real(cmplx.Conj(xi)*xi) + ap[kk+i] = complex(aii, 0) + } else { + ap[kk+i] = complex(real(ap[kk+i]), 0) + } + ix += incX + kk += i + 1 + } + } +} + +// Chpr2 performs the Hermitian rank-2 operation +// A += alpha * x * y^H + conj(alpha) * y * x^H +// where alpha is a complex scalar, x and y are n element vectors, and A is an +// n×n Hermitian matrix, supplied in packed form. On entry, the imaginary parts +// of the diagonal elements are assumed to be zero, and on return they are set to zero. +// +// Complex64 implementations are autogenerated and not directly tested. +func (Implementation) Chpr2(uplo blas.Uplo, n int, alpha complex64, x []complex64, incX int, y []complex64, incY int, ap []complex64) { + switch uplo { + default: + panic(badUplo) + case blas.Upper, blas.Lower: + } + if n < 0 { + panic(nLT0) + } + if incX == 0 { + panic(zeroIncX) + } + if incY == 0 { + panic(zeroIncY) + } + + // Quick return if possible. + if n == 0 { + return + } + + // For zero matrix size the following slice length checks are trivially satisfied. + if (incX > 0 && len(x) <= (n-1)*incX) || (incX < 0 && len(x) <= (1-n)*incX) { + panic(shortX) + } + if (incY > 0 && len(y) <= (n-1)*incY) || (incY < 0 && len(y) <= (1-n)*incY) { + panic(shortY) + } + if len(ap) < n*(n+1)/2 { + panic(shortAP) + } + + // Quick return if possible. + if alpha == 0 { + return + } + + // Set up start indices in X and Y. + var kx int + if incX < 0 { + kx = (1 - n) * incX + } + var ky int + if incY < 0 { + ky = (1 - n) * incY + } + + // The elements of A are accessed sequentially with one pass through ap. + + var kk int + if uplo == blas.Upper { + // Form A when upper triangle is stored in AP. + // Here, kk points to the current diagonal element in ap. + if incX == 1 && incY == 1 { + for i := 0; i < n; i++ { + if x[i] != 0 || y[i] != 0 { + tmp1 := alpha * x[i] + tmp2 := cmplx.Conj(alpha) * y[i] + aii := real(ap[kk]) + real(tmp1*cmplx.Conj(y[i])) + real(tmp2*cmplx.Conj(x[i])) + ap[kk] = complex(aii, 0) + k := kk + 1 + for j := i + 1; j < n; j++ { + ap[k] += tmp1*cmplx.Conj(y[j]) + tmp2*cmplx.Conj(x[j]) + k++ + } + } else { + ap[kk] = complex(real(ap[kk]), 0) + } + kk += n - i + } + } else { + ix := kx + iy := ky + for i := 0; i < n; i++ { + if x[ix] != 0 || y[iy] != 0 { + tmp1 := alpha * x[ix] + tmp2 := cmplx.Conj(alpha) * y[iy] + aii := real(ap[kk]) + real(tmp1*cmplx.Conj(y[iy])) + real(tmp2*cmplx.Conj(x[ix])) + ap[kk] = complex(aii, 0) + jx := ix + incX + jy := iy + incY + for k := kk + 1; k < kk+n-i; k++ { + ap[k] += tmp1*cmplx.Conj(y[jy]) + tmp2*cmplx.Conj(x[jx]) + jx += incX + jy += incY + } + } else { + ap[kk] = complex(real(ap[kk]), 0) + } + ix += incX + iy += incY + kk += n - i + } + } + return + } + + // Form A when lower triangle is stored in AP. + // Here, kk points to the beginning of current row in ap. + if incX == 1 && incY == 1 { + for i := 0; i < n; i++ { + if x[i] != 0 || y[i] != 0 { + tmp1 := alpha * x[i] + tmp2 := cmplx.Conj(alpha) * y[i] + k := kk + for j := 0; j < i; j++ { + ap[k] += tmp1*cmplx.Conj(y[j]) + tmp2*cmplx.Conj(x[j]) + k++ + } + aii := real(ap[kk+i]) + real(tmp1*cmplx.Conj(y[i])) + real(tmp2*cmplx.Conj(x[i])) + ap[kk+i] = complex(aii, 0) + } else { + ap[kk+i] = complex(real(ap[kk+i]), 0) + } + kk += i + 1 + } + } else { + ix := kx + iy := ky + for i := 0; i < n; i++ { + if x[ix] != 0 || y[iy] != 0 { + tmp1 := alpha * x[ix] + tmp2 := cmplx.Conj(alpha) * y[iy] + jx := kx + jy := ky + for k := kk; k < kk+i; k++ { + ap[k] += tmp1*cmplx.Conj(y[jy]) + tmp2*cmplx.Conj(x[jx]) + jx += incX + jy += incY + } + aii := real(ap[kk+i]) + real(tmp1*cmplx.Conj(y[iy])) + real(tmp2*cmplx.Conj(x[ix])) + ap[kk+i] = complex(aii, 0) + } else { + ap[kk+i] = complex(real(ap[kk+i]), 0) + } + ix += incX + iy += incY + kk += i + 1 + } + } +} + +// Ctbmv performs one of the matrix-vector operations +// x = A * x if trans = blas.NoTrans +// x = A^T * x if trans = blas.Trans +// x = A^H * x if trans = blas.ConjTrans +// where x is an n element vector and A is an n×n triangular band matrix, with +// (k+1) diagonals. +// +// Complex64 implementations are autogenerated and not directly tested. +func (Implementation) Ctbmv(uplo blas.Uplo, trans blas.Transpose, diag blas.Diag, n, k int, a []complex64, lda int, x []complex64, incX int) { + switch trans { + default: + panic(badTranspose) + case blas.NoTrans, blas.Trans, blas.ConjTrans: + } + switch uplo { + default: + panic(badUplo) + case blas.Upper, blas.Lower: + } + switch diag { + default: + panic(badDiag) + case blas.NonUnit, blas.Unit: + } + if n < 0 { + panic(nLT0) + } + if k < 0 { + panic(kLT0) + } + if lda < k+1 { + panic(badLdA) + } + if incX == 0 { + panic(zeroIncX) + } + + // Quick return if possible. + if n == 0 { + return + } + + // For zero matrix size the following slice length checks are trivially satisfied. + if len(a) < lda*(n-1)+k+1 { + panic(shortA) + } + if (incX > 0 && len(x) <= (n-1)*incX) || (incX < 0 && len(x) <= (1-n)*incX) { + panic(shortX) + } + + // Set up start index in X. + var kx int + if incX < 0 { + kx = (1 - n) * incX + } + + switch trans { + case blas.NoTrans: + if uplo == blas.Upper { + if incX == 1 { + for i := 0; i < n; i++ { + xi := x[i] + if diag == blas.NonUnit { + xi *= a[i*lda] + } + kk := min(k, n-i-1) + for j, aij := range a[i*lda+1 : i*lda+kk+1] { + xi += x[i+j+1] * aij + } + x[i] = xi + } + } else { + ix := kx + for i := 0; i < n; i++ { + xi := x[ix] + if diag == blas.NonUnit { + xi *= a[i*lda] + } + kk := min(k, n-i-1) + jx := ix + incX + for _, aij := range a[i*lda+1 : i*lda+kk+1] { + xi += x[jx] * aij + jx += incX + } + x[ix] = xi + ix += incX + } + } + } else { + if incX == 1 { + for i := n - 1; i >= 0; i-- { + xi := x[i] + if diag == blas.NonUnit { + xi *= a[i*lda+k] + } + kk := min(k, i) + for j, aij := range a[i*lda+k-kk : i*lda+k] { + xi += x[i-kk+j] * aij + } + x[i] = xi + } + } else { + ix := kx + (n-1)*incX + for i := n - 1; i >= 0; i-- { + xi := x[ix] + if diag == blas.NonUnit { + xi *= a[i*lda+k] + } + kk := min(k, i) + jx := ix - kk*incX + for _, aij := range a[i*lda+k-kk : i*lda+k] { + xi += x[jx] * aij + jx += incX + } + x[ix] = xi + ix -= incX + } + } + } + case blas.Trans: + if uplo == blas.Upper { + if incX == 1 { + for i := n - 1; i >= 0; i-- { + kk := min(k, n-i-1) + xi := x[i] + for j, aij := range a[i*lda+1 : i*lda+kk+1] { + x[i+j+1] += xi * aij + } + if diag == blas.NonUnit { + x[i] *= a[i*lda] + } + } + } else { + ix := kx + (n-1)*incX + for i := n - 1; i >= 0; i-- { + kk := min(k, n-i-1) + jx := ix + incX + xi := x[ix] + for _, aij := range a[i*lda+1 : i*lda+kk+1] { + x[jx] += xi * aij + jx += incX + } + if diag == blas.NonUnit { + x[ix] *= a[i*lda] + } + ix -= incX + } + } + } else { + if incX == 1 { + for i := 0; i < n; i++ { + kk := min(k, i) + xi := x[i] + for j, aij := range a[i*lda+k-kk : i*lda+k] { + x[i-kk+j] += xi * aij + } + if diag == blas.NonUnit { + x[i] *= a[i*lda+k] + } + } + } else { + ix := kx + for i := 0; i < n; i++ { + kk := min(k, i) + jx := ix - kk*incX + xi := x[ix] + for _, aij := range a[i*lda+k-kk : i*lda+k] { + x[jx] += xi * aij + jx += incX + } + if diag == blas.NonUnit { + x[ix] *= a[i*lda+k] + } + ix += incX + } + } + } + case blas.ConjTrans: + if uplo == blas.Upper { + if incX == 1 { + for i := n - 1; i >= 0; i-- { + kk := min(k, n-i-1) + xi := x[i] + for j, aij := range a[i*lda+1 : i*lda+kk+1] { + x[i+j+1] += xi * cmplx.Conj(aij) + } + if diag == blas.NonUnit { + x[i] *= cmplx.Conj(a[i*lda]) + } + } + } else { + ix := kx + (n-1)*incX + for i := n - 1; i >= 0; i-- { + kk := min(k, n-i-1) + jx := ix + incX + xi := x[ix] + for _, aij := range a[i*lda+1 : i*lda+kk+1] { + x[jx] += xi * cmplx.Conj(aij) + jx += incX + } + if diag == blas.NonUnit { + x[ix] *= cmplx.Conj(a[i*lda]) + } + ix -= incX + } + } + } else { + if incX == 1 { + for i := 0; i < n; i++ { + kk := min(k, i) + xi := x[i] + for j, aij := range a[i*lda+k-kk : i*lda+k] { + x[i-kk+j] += xi * cmplx.Conj(aij) + } + if diag == blas.NonUnit { + x[i] *= cmplx.Conj(a[i*lda+k]) + } + } + } else { + ix := kx + for i := 0; i < n; i++ { + kk := min(k, i) + jx := ix - kk*incX + xi := x[ix] + for _, aij := range a[i*lda+k-kk : i*lda+k] { + x[jx] += xi * cmplx.Conj(aij) + jx += incX + } + if diag == blas.NonUnit { + x[ix] *= cmplx.Conj(a[i*lda+k]) + } + ix += incX + } + } + } + } +} + +// Ctbsv solves one of the systems of equations +// A * x = b if trans == blas.NoTrans +// A^T * x = b if trans == blas.Trans +// A^H * x = b if trans == blas.ConjTrans +// where b and x are n element vectors and A is an n×n triangular band matrix +// with (k+1) diagonals. +// +// On entry, x contains the values of b, and the solution is +// stored in-place into x. +// +// No test for singularity or near-singularity is included in this +// routine. Such tests must be performed before calling this routine. +// +// Complex64 implementations are autogenerated and not directly tested. +func (Implementation) Ctbsv(uplo blas.Uplo, trans blas.Transpose, diag blas.Diag, n, k int, a []complex64, lda int, x []complex64, incX int) { + switch trans { + default: + panic(badTranspose) + case blas.NoTrans, blas.Trans, blas.ConjTrans: + } + switch uplo { + default: + panic(badUplo) + case blas.Upper, blas.Lower: + } + switch diag { + default: + panic(badDiag) + case blas.NonUnit, blas.Unit: + } + if n < 0 { + panic(nLT0) + } + if k < 0 { + panic(kLT0) + } + if lda < k+1 { + panic(badLdA) + } + if incX == 0 { + panic(zeroIncX) + } + + // Quick return if possible. + if n == 0 { + return + } + + // For zero matrix size the following slice length checks are trivially satisfied. + if len(a) < lda*(n-1)+k+1 { + panic(shortA) + } + if (incX > 0 && len(x) <= (n-1)*incX) || (incX < 0 && len(x) <= (1-n)*incX) { + panic(shortX) + } + + // Set up start index in X. + var kx int + if incX < 0 { + kx = (1 - n) * incX + } + + switch trans { + case blas.NoTrans: + if uplo == blas.Upper { + if incX == 1 { + for i := n - 1; i >= 0; i-- { + kk := min(k, n-i-1) + var sum complex64 + for j, aij := range a[i*lda+1 : i*lda+kk+1] { + sum += x[i+1+j] * aij + } + x[i] -= sum + if diag == blas.NonUnit { + x[i] /= a[i*lda] + } + } + } else { + ix := kx + (n-1)*incX + for i := n - 1; i >= 0; i-- { + kk := min(k, n-i-1) + var sum complex64 + jx := ix + incX + for _, aij := range a[i*lda+1 : i*lda+kk+1] { + sum += x[jx] * aij + jx += incX + } + x[ix] -= sum + if diag == blas.NonUnit { + x[ix] /= a[i*lda] + } + ix -= incX + } + } + } else { + if incX == 1 { + for i := 0; i < n; i++ { + kk := min(k, i) + var sum complex64 + for j, aij := range a[i*lda+k-kk : i*lda+k] { + sum += x[i-kk+j] * aij + } + x[i] -= sum + if diag == blas.NonUnit { + x[i] /= a[i*lda+k] + } + } + } else { + ix := kx + for i := 0; i < n; i++ { + kk := min(k, i) + var sum complex64 + jx := ix - kk*incX + for _, aij := range a[i*lda+k-kk : i*lda+k] { + sum += x[jx] * aij + jx += incX + } + x[ix] -= sum + if diag == blas.NonUnit { + x[ix] /= a[i*lda+k] + } + ix += incX + } + } + } + case blas.Trans: + if uplo == blas.Upper { + if incX == 1 { + for i := 0; i < n; i++ { + if diag == blas.NonUnit { + x[i] /= a[i*lda] + } + kk := min(k, n-i-1) + xi := x[i] + for j, aij := range a[i*lda+1 : i*lda+kk+1] { + x[i+1+j] -= xi * aij + } + } + } else { + ix := kx + for i := 0; i < n; i++ { + if diag == blas.NonUnit { + x[ix] /= a[i*lda] + } + kk := min(k, n-i-1) + xi := x[ix] + jx := ix + incX + for _, aij := range a[i*lda+1 : i*lda+kk+1] { + x[jx] -= xi * aij + jx += incX + } + ix += incX + } + } + } else { + if incX == 1 { + for i := n - 1; i >= 0; i-- { + if diag == blas.NonUnit { + x[i] /= a[i*lda+k] + } + kk := min(k, i) + xi := x[i] + for j, aij := range a[i*lda+k-kk : i*lda+k] { + x[i-kk+j] -= xi * aij + } + } + } else { + ix := kx + (n-1)*incX + for i := n - 1; i >= 0; i-- { + if diag == blas.NonUnit { + x[ix] /= a[i*lda+k] + } + kk := min(k, i) + xi := x[ix] + jx := ix - kk*incX + for _, aij := range a[i*lda+k-kk : i*lda+k] { + x[jx] -= xi * aij + jx += incX + } + ix -= incX + } + } + } + case blas.ConjTrans: + if uplo == blas.Upper { + if incX == 1 { + for i := 0; i < n; i++ { + if diag == blas.NonUnit { + x[i] /= cmplx.Conj(a[i*lda]) + } + kk := min(k, n-i-1) + xi := x[i] + for j, aij := range a[i*lda+1 : i*lda+kk+1] { + x[i+1+j] -= xi * cmplx.Conj(aij) + } + } + } else { + ix := kx + for i := 0; i < n; i++ { + if diag == blas.NonUnit { + x[ix] /= cmplx.Conj(a[i*lda]) + } + kk := min(k, n-i-1) + xi := x[ix] + jx := ix + incX + for _, aij := range a[i*lda+1 : i*lda+kk+1] { + x[jx] -= xi * cmplx.Conj(aij) + jx += incX + } + ix += incX + } + } + } else { + if incX == 1 { + for i := n - 1; i >= 0; i-- { + if diag == blas.NonUnit { + x[i] /= cmplx.Conj(a[i*lda+k]) + } + kk := min(k, i) + xi := x[i] + for j, aij := range a[i*lda+k-kk : i*lda+k] { + x[i-kk+j] -= xi * cmplx.Conj(aij) + } + } + } else { + ix := kx + (n-1)*incX + for i := n - 1; i >= 0; i-- { + if diag == blas.NonUnit { + x[ix] /= cmplx.Conj(a[i*lda+k]) + } + kk := min(k, i) + xi := x[ix] + jx := ix - kk*incX + for _, aij := range a[i*lda+k-kk : i*lda+k] { + x[jx] -= xi * cmplx.Conj(aij) + jx += incX + } + ix -= incX + } + } + } + } +} + +// Ctpmv performs one of the matrix-vector operations +// x = A * x if trans = blas.NoTrans +// x = A^T * x if trans = blas.Trans +// x = A^H * x if trans = blas.ConjTrans +// where x is an n element vector and A is an n×n triangular matrix, supplied in +// packed form. +// +// Complex64 implementations are autogenerated and not directly tested. +func (Implementation) Ctpmv(uplo blas.Uplo, trans blas.Transpose, diag blas.Diag, n int, ap []complex64, x []complex64, incX int) { + switch uplo { + default: + panic(badUplo) + case blas.Upper, blas.Lower: + } + switch trans { + default: + panic(badTranspose) + case blas.NoTrans, blas.Trans, blas.ConjTrans: + } + switch diag { + default: + panic(badDiag) + case blas.NonUnit, blas.Unit: + } + if n < 0 { + panic(nLT0) + } + if incX == 0 { + panic(zeroIncX) + } + + // Quick return if possible. + if n == 0 { + return + } + + // For zero matrix size the following slice length checks are trivially satisfied. + if len(ap) < n*(n+1)/2 { + panic(shortAP) + } + if (incX > 0 && len(x) <= (n-1)*incX) || (incX < 0 && len(x) <= (1-n)*incX) { + panic(shortX) + } + + // Set up start index in X. + var kx int + if incX < 0 { + kx = (1 - n) * incX + } + + // The elements of A are accessed sequentially with one pass through A. + + if trans == blas.NoTrans { + // Form x = A*x. + if uplo == blas.Upper { + // kk points to the current diagonal element in ap. + kk := 0 + if incX == 1 { + x = x[:n] + for i := range x { + if diag == blas.NonUnit { + x[i] *= ap[kk] + } + if n-i-1 > 0 { + x[i] += c64.DotuUnitary(ap[kk+1:kk+n-i], x[i+1:]) + } + kk += n - i + } + } else { + ix := kx + for i := 0; i < n; i++ { + if diag == blas.NonUnit { + x[ix] *= ap[kk] + } + if n-i-1 > 0 { + x[ix] += c64.DotuInc(ap[kk+1:kk+n-i], x, uintptr(n-i-1), 1, uintptr(incX), 0, uintptr(ix+incX)) + } + ix += incX + kk += n - i + } + } + } else { + // kk points to the beginning of current row in ap. + kk := n*(n+1)/2 - n + if incX == 1 { + for i := n - 1; i >= 0; i-- { + if diag == blas.NonUnit { + x[i] *= ap[kk+i] + } + if i > 0 { + x[i] += c64.DotuUnitary(ap[kk:kk+i], x[:i]) + } + kk -= i + } + } else { + ix := kx + (n-1)*incX + for i := n - 1; i >= 0; i-- { + if diag == blas.NonUnit { + x[ix] *= ap[kk+i] + } + if i > 0 { + x[ix] += c64.DotuInc(ap[kk:kk+i], x, uintptr(i), 1, uintptr(incX), 0, uintptr(kx)) + } + ix -= incX + kk -= i + } + } + } + return + } + + if trans == blas.Trans { + // Form x = A^T*x. + if uplo == blas.Upper { + // kk points to the current diagonal element in ap. + kk := n*(n+1)/2 - 1 + if incX == 1 { + for i := n - 1; i >= 0; i-- { + xi := x[i] + if diag == blas.NonUnit { + x[i] *= ap[kk] + } + if n-i-1 > 0 { + c64.AxpyUnitary(xi, ap[kk+1:kk+n-i], x[i+1:n]) + } + kk -= n - i + 1 + } + } else { + ix := kx + (n-1)*incX + for i := n - 1; i >= 0; i-- { + xi := x[ix] + if diag == blas.NonUnit { + x[ix] *= ap[kk] + } + if n-i-1 > 0 { + c64.AxpyInc(xi, ap[kk+1:kk+n-i], x, uintptr(n-i-1), 1, uintptr(incX), 0, uintptr(ix+incX)) + } + ix -= incX + kk -= n - i + 1 + } + } + } else { + // kk points to the beginning of current row in ap. + kk := 0 + if incX == 1 { + x = x[:n] + for i := range x { + if i > 0 { + c64.AxpyUnitary(x[i], ap[kk:kk+i], x[:i]) + } + if diag == blas.NonUnit { + x[i] *= ap[kk+i] + } + kk += i + 1 + } + } else { + ix := kx + for i := 0; i < n; i++ { + if i > 0 { + c64.AxpyInc(x[ix], ap[kk:kk+i], x, uintptr(i), 1, uintptr(incX), 0, uintptr(kx)) + } + if diag == blas.NonUnit { + x[ix] *= ap[kk+i] + } + ix += incX + kk += i + 1 + } + } + } + return + } + + // Form x = A^H*x. + if uplo == blas.Upper { + // kk points to the current diagonal element in ap. + kk := n*(n+1)/2 - 1 + if incX == 1 { + for i := n - 1; i >= 0; i-- { + xi := x[i] + if diag == blas.NonUnit { + x[i] *= cmplx.Conj(ap[kk]) + } + k := kk + 1 + for j := i + 1; j < n; j++ { + x[j] += xi * cmplx.Conj(ap[k]) + k++ + } + kk -= n - i + 1 + } + } else { + ix := kx + (n-1)*incX + for i := n - 1; i >= 0; i-- { + xi := x[ix] + if diag == blas.NonUnit { + x[ix] *= cmplx.Conj(ap[kk]) + } + jx := ix + incX + k := kk + 1 + for j := i + 1; j < n; j++ { + x[jx] += xi * cmplx.Conj(ap[k]) + jx += incX + k++ + } + ix -= incX + kk -= n - i + 1 + } + } + } else { + // kk points to the beginning of current row in ap. + kk := 0 + if incX == 1 { + x = x[:n] + for i, xi := range x { + for j := 0; j < i; j++ { + x[j] += xi * cmplx.Conj(ap[kk+j]) + } + if diag == blas.NonUnit { + x[i] *= cmplx.Conj(ap[kk+i]) + } + kk += i + 1 + } + } else { + ix := kx + for i := 0; i < n; i++ { + xi := x[ix] + jx := kx + for j := 0; j < i; j++ { + x[jx] += xi * cmplx.Conj(ap[kk+j]) + jx += incX + } + if diag == blas.NonUnit { + x[ix] *= cmplx.Conj(ap[kk+i]) + } + ix += incX + kk += i + 1 + } + } + } +} + +// Ctpsv solves one of the systems of equations +// A * x = b if trans == blas.NoTrans +// A^T * x = b if trans == blas.Trans +// A^H * x = b if trans == blas.ConjTrans +// where b and x are n element vectors and A is an n×n triangular matrix in +// packed form. +// +// On entry, x contains the values of b, and the solution is +// stored in-place into x. +// +// No test for singularity or near-singularity is included in this +// routine. Such tests must be performed before calling this routine. +// +// Complex64 implementations are autogenerated and not directly tested. +func (Implementation) Ctpsv(uplo blas.Uplo, trans blas.Transpose, diag blas.Diag, n int, ap []complex64, x []complex64, incX int) { + switch uplo { + default: + panic(badUplo) + case blas.Upper, blas.Lower: + } + switch trans { + default: + panic(badTranspose) + case blas.NoTrans, blas.Trans, blas.ConjTrans: + } + switch diag { + default: + panic(badDiag) + case blas.NonUnit, blas.Unit: + } + if n < 0 { + panic(nLT0) + } + if incX == 0 { + panic(zeroIncX) + } + + // Quick return if possible. + if n == 0 { + return + } + + // For zero matrix size the following slice length checks are trivially satisfied. + if len(ap) < n*(n+1)/2 { + panic(shortAP) + } + if (incX > 0 && len(x) <= (n-1)*incX) || (incX < 0 && len(x) <= (1-n)*incX) { + panic(shortX) + } + + // Set up start index in X. + var kx int + if incX < 0 { + kx = (1 - n) * incX + } + + // The elements of A are accessed sequentially with one pass through ap. + + if trans == blas.NoTrans { + // Form x = inv(A)*x. + if uplo == blas.Upper { + kk := n*(n+1)/2 - 1 + if incX == 1 { + for i := n - 1; i >= 0; i-- { + aii := ap[kk] + if n-i-1 > 0 { + x[i] -= c64.DotuUnitary(x[i+1:n], ap[kk+1:kk+n-i]) + } + if diag == blas.NonUnit { + x[i] /= aii + } + kk -= n - i + 1 + } + } else { + ix := kx + (n-1)*incX + for i := n - 1; i >= 0; i-- { + aii := ap[kk] + if n-i-1 > 0 { + x[ix] -= c64.DotuInc(x, ap[kk+1:kk+n-i], uintptr(n-i-1), uintptr(incX), 1, uintptr(ix+incX), 0) + } + if diag == blas.NonUnit { + x[ix] /= aii + } + ix -= incX + kk -= n - i + 1 + } + } + } else { + kk := 0 + if incX == 1 { + for i := 0; i < n; i++ { + if i > 0 { + x[i] -= c64.DotuUnitary(x[:i], ap[kk:kk+i]) + } + if diag == blas.NonUnit { + x[i] /= ap[kk+i] + } + kk += i + 1 + } + } else { + ix := kx + for i := 0; i < n; i++ { + if i > 0 { + x[ix] -= c64.DotuInc(x, ap[kk:kk+i], uintptr(i), uintptr(incX), 1, uintptr(kx), 0) + } + if diag == blas.NonUnit { + x[ix] /= ap[kk+i] + } + ix += incX + kk += i + 1 + } + } + } + return + } + + if trans == blas.Trans { + // Form x = inv(A^T)*x. + if uplo == blas.Upper { + kk := 0 + if incX == 1 { + for j := 0; j < n; j++ { + if diag == blas.NonUnit { + x[j] /= ap[kk] + } + if n-j-1 > 0 { + c64.AxpyUnitary(-x[j], ap[kk+1:kk+n-j], x[j+1:n]) + } + kk += n - j + } + } else { + jx := kx + for j := 0; j < n; j++ { + if diag == blas.NonUnit { + x[jx] /= ap[kk] + } + if n-j-1 > 0 { + c64.AxpyInc(-x[jx], ap[kk+1:kk+n-j], x, uintptr(n-j-1), 1, uintptr(incX), 0, uintptr(jx+incX)) + } + jx += incX + kk += n - j + } + } + } else { + kk := n*(n+1)/2 - n + if incX == 1 { + for j := n - 1; j >= 0; j-- { + if diag == blas.NonUnit { + x[j] /= ap[kk+j] + } + if j > 0 { + c64.AxpyUnitary(-x[j], ap[kk:kk+j], x[:j]) + } + kk -= j + } + } else { + jx := kx + (n-1)*incX + for j := n - 1; j >= 0; j-- { + if diag == blas.NonUnit { + x[jx] /= ap[kk+j] + } + if j > 0 { + c64.AxpyInc(-x[jx], ap[kk:kk+j], x, uintptr(j), 1, uintptr(incX), 0, uintptr(kx)) + } + jx -= incX + kk -= j + } + } + } + return + } + + // Form x = inv(A^H)*x. + if uplo == blas.Upper { + kk := 0 + if incX == 1 { + for j := 0; j < n; j++ { + if diag == blas.NonUnit { + x[j] /= cmplx.Conj(ap[kk]) + } + xj := x[j] + k := kk + 1 + for i := j + 1; i < n; i++ { + x[i] -= xj * cmplx.Conj(ap[k]) + k++ + } + kk += n - j + } + } else { + jx := kx + for j := 0; j < n; j++ { + if diag == blas.NonUnit { + x[jx] /= cmplx.Conj(ap[kk]) + } + xj := x[jx] + ix := jx + incX + k := kk + 1 + for i := j + 1; i < n; i++ { + x[ix] -= xj * cmplx.Conj(ap[k]) + ix += incX + k++ + } + jx += incX + kk += n - j + } + } + } else { + kk := n*(n+1)/2 - n + if incX == 1 { + for j := n - 1; j >= 0; j-- { + if diag == blas.NonUnit { + x[j] /= cmplx.Conj(ap[kk+j]) + } + xj := x[j] + for i := 0; i < j; i++ { + x[i] -= xj * cmplx.Conj(ap[kk+i]) + } + kk -= j + } + } else { + jx := kx + (n-1)*incX + for j := n - 1; j >= 0; j-- { + if diag == blas.NonUnit { + x[jx] /= cmplx.Conj(ap[kk+j]) + } + xj := x[jx] + ix := kx + for i := 0; i < j; i++ { + x[ix] -= xj * cmplx.Conj(ap[kk+i]) + ix += incX + } + jx -= incX + kk -= j + } + } + } +} + +// Ctrmv performs one of the matrix-vector operations +// x = A * x if trans = blas.NoTrans +// x = A^T * x if trans = blas.Trans +// x = A^H * x if trans = blas.ConjTrans +// where x is a vector, and A is an n×n triangular matrix. +// +// Complex64 implementations are autogenerated and not directly tested. +func (Implementation) Ctrmv(uplo blas.Uplo, trans blas.Transpose, diag blas.Diag, n int, a []complex64, lda int, x []complex64, incX int) { + switch trans { + default: + panic(badTranspose) + case blas.NoTrans, blas.Trans, blas.ConjTrans: + } + switch uplo { + default: + panic(badUplo) + case blas.Upper, blas.Lower: + } + switch diag { + default: + panic(badDiag) + case blas.NonUnit, blas.Unit: + } + if n < 0 { + panic(nLT0) + } + if lda < max(1, n) { + panic(badLdA) + } + if incX == 0 { + panic(zeroIncX) + } + + // Quick return if possible. + if n == 0 { + return + } + + // For zero matrix size the following slice length checks are trivially satisfied. + if len(a) < lda*(n-1)+n { + panic(shortA) + } + if (incX > 0 && len(x) <= (n-1)*incX) || (incX < 0 && len(x) <= (1-n)*incX) { + panic(shortX) + } + + // Set up start index in X. + var kx int + if incX < 0 { + kx = (1 - n) * incX + } + + // The elements of A are accessed sequentially with one pass through A. + + if trans == blas.NoTrans { + // Form x = A*x. + if uplo == blas.Upper { + if incX == 1 { + for i := 0; i < n; i++ { + if diag == blas.NonUnit { + x[i] *= a[i*lda+i] + } + if n-i-1 > 0 { + x[i] += c64.DotuUnitary(a[i*lda+i+1:i*lda+n], x[i+1:n]) + } + } + } else { + ix := kx + for i := 0; i < n; i++ { + if diag == blas.NonUnit { + x[ix] *= a[i*lda+i] + } + if n-i-1 > 0 { + x[ix] += c64.DotuInc(a[i*lda+i+1:i*lda+n], x, uintptr(n-i-1), 1, uintptr(incX), 0, uintptr(ix+incX)) + } + ix += incX + } + } + } else { + if incX == 1 { + for i := n - 1; i >= 0; i-- { + if diag == blas.NonUnit { + x[i] *= a[i*lda+i] + } + if i > 0 { + x[i] += c64.DotuUnitary(a[i*lda:i*lda+i], x[:i]) + } + } + } else { + ix := kx + (n-1)*incX + for i := n - 1; i >= 0; i-- { + if diag == blas.NonUnit { + x[ix] *= a[i*lda+i] + } + if i > 0 { + x[ix] += c64.DotuInc(a[i*lda:i*lda+i], x, uintptr(i), 1, uintptr(incX), 0, uintptr(kx)) + } + ix -= incX + } + } + } + return + } + + if trans == blas.Trans { + // Form x = A^T*x. + if uplo == blas.Upper { + if incX == 1 { + for i := n - 1; i >= 0; i-- { + xi := x[i] + if diag == blas.NonUnit { + x[i] *= a[i*lda+i] + } + if n-i-1 > 0 { + c64.AxpyUnitary(xi, a[i*lda+i+1:i*lda+n], x[i+1:n]) + } + } + } else { + ix := kx + (n-1)*incX + for i := n - 1; i >= 0; i-- { + xi := x[ix] + if diag == blas.NonUnit { + x[ix] *= a[i*lda+i] + } + if n-i-1 > 0 { + c64.AxpyInc(xi, a[i*lda+i+1:i*lda+n], x, uintptr(n-i-1), 1, uintptr(incX), 0, uintptr(ix+incX)) + } + ix -= incX + } + } + } else { + if incX == 1 { + for i := 0; i < n; i++ { + if i > 0 { + c64.AxpyUnitary(x[i], a[i*lda:i*lda+i], x[:i]) + } + if diag == blas.NonUnit { + x[i] *= a[i*lda+i] + } + } + } else { + ix := kx + for i := 0; i < n; i++ { + if i > 0 { + c64.AxpyInc(x[ix], a[i*lda:i*lda+i], x, uintptr(i), 1, uintptr(incX), 0, uintptr(kx)) + } + if diag == blas.NonUnit { + x[ix] *= a[i*lda+i] + } + ix += incX + } + } + } + return + } + + // Form x = A^H*x. + if uplo == blas.Upper { + if incX == 1 { + for i := n - 1; i >= 0; i-- { + xi := x[i] + if diag == blas.NonUnit { + x[i] *= cmplx.Conj(a[i*lda+i]) + } + for j := i + 1; j < n; j++ { + x[j] += xi * cmplx.Conj(a[i*lda+j]) + } + } + } else { + ix := kx + (n-1)*incX + for i := n - 1; i >= 0; i-- { + xi := x[ix] + if diag == blas.NonUnit { + x[ix] *= cmplx.Conj(a[i*lda+i]) + } + jx := ix + incX + for j := i + 1; j < n; j++ { + x[jx] += xi * cmplx.Conj(a[i*lda+j]) + jx += incX + } + ix -= incX + } + } + } else { + if incX == 1 { + for i := 0; i < n; i++ { + for j := 0; j < i; j++ { + x[j] += x[i] * cmplx.Conj(a[i*lda+j]) + } + if diag == blas.NonUnit { + x[i] *= cmplx.Conj(a[i*lda+i]) + } + } + } else { + ix := kx + for i := 0; i < n; i++ { + jx := kx + for j := 0; j < i; j++ { + x[jx] += x[ix] * cmplx.Conj(a[i*lda+j]) + jx += incX + } + if diag == blas.NonUnit { + x[ix] *= cmplx.Conj(a[i*lda+i]) + } + ix += incX + } + } + } +} + +// Ctrsv solves one of the systems of equations +// A * x = b if trans == blas.NoTrans +// A^T * x = b if trans == blas.Trans +// A^H * x = b if trans == blas.ConjTrans +// where b and x are n element vectors and A is an n×n triangular matrix. +// +// On entry, x contains the values of b, and the solution is +// stored in-place into x. +// +// No test for singularity or near-singularity is included in this +// routine. Such tests must be performed before calling this routine. +// +// Complex64 implementations are autogenerated and not directly tested. +func (Implementation) Ctrsv(uplo blas.Uplo, trans blas.Transpose, diag blas.Diag, n int, a []complex64, lda int, x []complex64, incX int) { + switch trans { + default: + panic(badTranspose) + case blas.NoTrans, blas.Trans, blas.ConjTrans: + } + switch uplo { + default: + panic(badUplo) + case blas.Upper, blas.Lower: + } + switch diag { + default: + panic(badDiag) + case blas.NonUnit, blas.Unit: + } + if n < 0 { + panic(nLT0) + } + if lda < max(1, n) { + panic(badLdA) + } + if incX == 0 { + panic(zeroIncX) + } + + // Quick return if possible. + if n == 0 { + return + } + + // For zero matrix size the following slice length checks are trivially satisfied. + if len(a) < lda*(n-1)+n { + panic(shortA) + } + if (incX > 0 && len(x) <= (n-1)*incX) || (incX < 0 && len(x) <= (1-n)*incX) { + panic(shortX) + } + + // Set up start index in X. + var kx int + if incX < 0 { + kx = (1 - n) * incX + } + + // The elements of A are accessed sequentially with one pass through A. + + if trans == blas.NoTrans { + // Form x = inv(A)*x. + if uplo == blas.Upper { + if incX == 1 { + for i := n - 1; i >= 0; i-- { + aii := a[i*lda+i] + if n-i-1 > 0 { + x[i] -= c64.DotuUnitary(x[i+1:n], a[i*lda+i+1:i*lda+n]) + } + if diag == blas.NonUnit { + x[i] /= aii + } + } + } else { + ix := kx + (n-1)*incX + for i := n - 1; i >= 0; i-- { + aii := a[i*lda+i] + if n-i-1 > 0 { + x[ix] -= c64.DotuInc(x, a[i*lda+i+1:i*lda+n], uintptr(n-i-1), uintptr(incX), 1, uintptr(ix+incX), 0) + } + if diag == blas.NonUnit { + x[ix] /= aii + } + ix -= incX + } + } + } else { + if incX == 1 { + for i := 0; i < n; i++ { + if i > 0 { + x[i] -= c64.DotuUnitary(x[:i], a[i*lda:i*lda+i]) + } + if diag == blas.NonUnit { + x[i] /= a[i*lda+i] + } + } + } else { + ix := kx + for i := 0; i < n; i++ { + if i > 0 { + x[ix] -= c64.DotuInc(x, a[i*lda:i*lda+i], uintptr(i), uintptr(incX), 1, uintptr(kx), 0) + } + if diag == blas.NonUnit { + x[ix] /= a[i*lda+i] + } + ix += incX + } + } + } + return + } + + if trans == blas.Trans { + // Form x = inv(A^T)*x. + if uplo == blas.Upper { + if incX == 1 { + for j := 0; j < n; j++ { + if diag == blas.NonUnit { + x[j] /= a[j*lda+j] + } + if n-j-1 > 0 { + c64.AxpyUnitary(-x[j], a[j*lda+j+1:j*lda+n], x[j+1:n]) + } + } + } else { + jx := kx + for j := 0; j < n; j++ { + if diag == blas.NonUnit { + x[jx] /= a[j*lda+j] + } + if n-j-1 > 0 { + c64.AxpyInc(-x[jx], a[j*lda+j+1:j*lda+n], x, uintptr(n-j-1), 1, uintptr(incX), 0, uintptr(jx+incX)) + } + jx += incX + } + } + } else { + if incX == 1 { + for j := n - 1; j >= 0; j-- { + if diag == blas.NonUnit { + x[j] /= a[j*lda+j] + } + xj := x[j] + if j > 0 { + c64.AxpyUnitary(-xj, a[j*lda:j*lda+j], x[:j]) + } + } + } else { + jx := kx + (n-1)*incX + for j := n - 1; j >= 0; j-- { + if diag == blas.NonUnit { + x[jx] /= a[j*lda+j] + } + if j > 0 { + c64.AxpyInc(-x[jx], a[j*lda:j*lda+j], x, uintptr(j), 1, uintptr(incX), 0, uintptr(kx)) + } + jx -= incX + } + } + } + return + } + + // Form x = inv(A^H)*x. + if uplo == blas.Upper { + if incX == 1 { + for j := 0; j < n; j++ { + if diag == blas.NonUnit { + x[j] /= cmplx.Conj(a[j*lda+j]) + } + xj := x[j] + for i := j + 1; i < n; i++ { + x[i] -= xj * cmplx.Conj(a[j*lda+i]) + } + } + } else { + jx := kx + for j := 0; j < n; j++ { + if diag == blas.NonUnit { + x[jx] /= cmplx.Conj(a[j*lda+j]) + } + xj := x[jx] + ix := jx + incX + for i := j + 1; i < n; i++ { + x[ix] -= xj * cmplx.Conj(a[j*lda+i]) + ix += incX + } + jx += incX + } + } + } else { + if incX == 1 { + for j := n - 1; j >= 0; j-- { + if diag == blas.NonUnit { + x[j] /= cmplx.Conj(a[j*lda+j]) + } + xj := x[j] + for i := 0; i < j; i++ { + x[i] -= xj * cmplx.Conj(a[j*lda+i]) + } + } + } else { + jx := kx + (n-1)*incX + for j := n - 1; j >= 0; j-- { + if diag == blas.NonUnit { + x[jx] /= cmplx.Conj(a[j*lda+j]) + } + xj := x[jx] + ix := kx + for i := 0; i < j; i++ { + x[ix] -= xj * cmplx.Conj(a[j*lda+i]) + ix += incX + } + jx -= incX + } + } + } +} diff --git a/vendor/gonum.org/v1/gonum/blas/gonum/level2float32.go b/vendor/gonum.org/v1/gonum/blas/gonum/level2float32.go new file mode 100644 index 0000000..08e1927 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/blas/gonum/level2float32.go @@ -0,0 +1,2296 @@ +// Code generated by "go generate gonum.org/v1/gonum/blas/gonum”; DO NOT EDIT. + +// Copyright ©2014 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package gonum + +import ( + "gonum.org/v1/gonum/blas" + "gonum.org/v1/gonum/internal/asm/f32" +) + +var _ blas.Float32Level2 = Implementation{} + +// Sger performs the rank-one operation +// A += alpha * x * y^T +// where A is an m×n dense matrix, x and y are vectors, and alpha is a scalar. +// +// Float32 implementations are autogenerated and not directly tested. +func (Implementation) Sger(m, n int, alpha float32, x []float32, incX int, y []float32, incY int, a []float32, lda int) { + if m < 0 { + panic(mLT0) + } + if n < 0 { + panic(nLT0) + } + if lda < max(1, n) { + panic(badLdA) + } + if incX == 0 { + panic(zeroIncX) + } + if incY == 0 { + panic(zeroIncY) + } + + // Quick return if possible. + if m == 0 || n == 0 { + return + } + + // For zero matrix size the following slice length checks are trivially satisfied. + if (incX > 0 && len(x) <= (m-1)*incX) || (incX < 0 && len(x) <= (1-m)*incX) { + panic(shortX) + } + if (incY > 0 && len(y) <= (n-1)*incY) || (incY < 0 && len(y) <= (1-n)*incY) { + panic(shortY) + } + if len(a) < lda*(m-1)+n { + panic(shortA) + } + + // Quick return if possible. + if alpha == 0 { + return + } + f32.Ger(uintptr(m), uintptr(n), + alpha, + x, uintptr(incX), + y, uintptr(incY), + a, uintptr(lda)) +} + +// Sgbmv performs one of the matrix-vector operations +// y = alpha * A * x + beta * y if tA == blas.NoTrans +// y = alpha * A^T * x + beta * y if tA == blas.Trans or blas.ConjTrans +// where A is an m×n band matrix with kL sub-diagonals and kU super-diagonals, +// x and y are vectors, and alpha and beta are scalars. +// +// Float32 implementations are autogenerated and not directly tested. +func (Implementation) Sgbmv(tA blas.Transpose, m, n, kL, kU int, alpha float32, a []float32, lda int, x []float32, incX int, beta float32, y []float32, incY int) { + if tA != blas.NoTrans && tA != blas.Trans && tA != blas.ConjTrans { + panic(badTranspose) + } + if m < 0 { + panic(mLT0) + } + if n < 0 { + panic(nLT0) + } + if kL < 0 { + panic(kLLT0) + } + if kU < 0 { + panic(kULT0) + } + if lda < kL+kU+1 { + panic(badLdA) + } + if incX == 0 { + panic(zeroIncX) + } + if incY == 0 { + panic(zeroIncY) + } + + // Quick return if possible. + if m == 0 || n == 0 { + return + } + + // For zero matrix size the following slice length checks are trivially satisfied. + if len(a) < lda*(min(m, n+kL)-1)+kL+kU+1 { + panic(shortA) + } + lenX := m + lenY := n + if tA == blas.NoTrans { + lenX = n + lenY = m + } + if (incX > 0 && len(x) <= (lenX-1)*incX) || (incX < 0 && len(x) <= (1-lenX)*incX) { + panic(shortX) + } + if (incY > 0 && len(y) <= (lenY-1)*incY) || (incY < 0 && len(y) <= (1-lenY)*incY) { + panic(shortY) + } + + // Quick return if possible. + if alpha == 0 && beta == 1 { + return + } + + var kx, ky int + if incX < 0 { + kx = -(lenX - 1) * incX + } + if incY < 0 { + ky = -(lenY - 1) * incY + } + + // Form y = beta * y. + if beta != 1 { + if incY == 1 { + if beta == 0 { + for i := range y[:lenY] { + y[i] = 0 + } + } else { + f32.ScalUnitary(beta, y[:lenY]) + } + } else { + iy := ky + if beta == 0 { + for i := 0; i < lenY; i++ { + y[iy] = 0 + iy += incY + } + } else { + if incY > 0 { + f32.ScalInc(beta, y, uintptr(lenY), uintptr(incY)) + } else { + f32.ScalInc(beta, y, uintptr(lenY), uintptr(-incY)) + } + } + } + } + + if alpha == 0 { + return + } + + // i and j are indices of the compacted banded matrix. + // off is the offset into the dense matrix (off + j = densej) + nCol := kU + 1 + kL + if tA == blas.NoTrans { + iy := ky + if incX == 1 { + for i := 0; i < min(m, n+kL); i++ { + l := max(0, kL-i) + u := min(nCol, n+kL-i) + off := max(0, i-kL) + atmp := a[i*lda+l : i*lda+u] + xtmp := x[off : off+u-l] + var sum float32 + for j, v := range atmp { + sum += xtmp[j] * v + } + y[iy] += sum * alpha + iy += incY + } + return + } + for i := 0; i < min(m, n+kL); i++ { + l := max(0, kL-i) + u := min(nCol, n+kL-i) + off := max(0, i-kL) + atmp := a[i*lda+l : i*lda+u] + jx := kx + var sum float32 + for _, v := range atmp { + sum += x[off*incX+jx] * v + jx += incX + } + y[iy] += sum * alpha + iy += incY + } + return + } + if incX == 1 { + for i := 0; i < min(m, n+kL); i++ { + l := max(0, kL-i) + u := min(nCol, n+kL-i) + off := max(0, i-kL) + atmp := a[i*lda+l : i*lda+u] + tmp := alpha * x[i] + jy := ky + for _, v := range atmp { + y[jy+off*incY] += tmp * v + jy += incY + } + } + return + } + ix := kx + for i := 0; i < min(m, n+kL); i++ { + l := max(0, kL-i) + u := min(nCol, n+kL-i) + off := max(0, i-kL) + atmp := a[i*lda+l : i*lda+u] + tmp := alpha * x[ix] + jy := ky + for _, v := range atmp { + y[jy+off*incY] += tmp * v + jy += incY + } + ix += incX + } +} + +// Strmv performs one of the matrix-vector operations +// x = A * x if tA == blas.NoTrans +// x = A^T * x if tA == blas.Trans or blas.ConjTrans +// where A is an n×n triangular matrix, and x is a vector. +// +// Float32 implementations are autogenerated and not directly tested. +func (Implementation) Strmv(ul blas.Uplo, tA blas.Transpose, d blas.Diag, n int, a []float32, lda int, x []float32, incX int) { + if ul != blas.Lower && ul != blas.Upper { + panic(badUplo) + } + if tA != blas.NoTrans && tA != blas.Trans && tA != blas.ConjTrans { + panic(badTranspose) + } + if d != blas.NonUnit && d != blas.Unit { + panic(badDiag) + } + if n < 0 { + panic(nLT0) + } + if lda < max(1, n) { + panic(badLdA) + } + if incX == 0 { + panic(zeroIncX) + } + + // Quick return if possible. + if n == 0 { + return + } + + // For zero matrix size the following slice length checks are trivially satisfied. + if len(a) < lda*(n-1)+n { + panic(shortA) + } + if (incX > 0 && len(x) <= (n-1)*incX) || (incX < 0 && len(x) <= (1-n)*incX) { + panic(shortX) + } + + nonUnit := d != blas.Unit + if n == 1 { + if nonUnit { + x[0] *= a[0] + } + return + } + var kx int + if incX <= 0 { + kx = -(n - 1) * incX + } + if tA == blas.NoTrans { + if ul == blas.Upper { + if incX == 1 { + for i := 0; i < n; i++ { + ilda := i * lda + var tmp float32 + if nonUnit { + tmp = a[ilda+i] * x[i] + } else { + tmp = x[i] + } + x[i] = tmp + f32.DotUnitary(a[ilda+i+1:ilda+n], x[i+1:n]) + } + return + } + ix := kx + for i := 0; i < n; i++ { + ilda := i * lda + var tmp float32 + if nonUnit { + tmp = a[ilda+i] * x[ix] + } else { + tmp = x[ix] + } + x[ix] = tmp + f32.DotInc(x, a[ilda+i+1:ilda+n], uintptr(n-i-1), uintptr(incX), 1, uintptr(ix+incX), 0) + ix += incX + } + return + } + if incX == 1 { + for i := n - 1; i >= 0; i-- { + ilda := i * lda + var tmp float32 + if nonUnit { + tmp += a[ilda+i] * x[i] + } else { + tmp = x[i] + } + x[i] = tmp + f32.DotUnitary(a[ilda:ilda+i], x[:i]) + } + return + } + ix := kx + (n-1)*incX + for i := n - 1; i >= 0; i-- { + ilda := i * lda + var tmp float32 + if nonUnit { + tmp = a[ilda+i] * x[ix] + } else { + tmp = x[ix] + } + x[ix] = tmp + f32.DotInc(x, a[ilda:ilda+i], uintptr(i), uintptr(incX), 1, uintptr(kx), 0) + ix -= incX + } + return + } + // Cases where a is transposed. + if ul == blas.Upper { + if incX == 1 { + for i := n - 1; i >= 0; i-- { + ilda := i * lda + xi := x[i] + f32.AxpyUnitary(xi, a[ilda+i+1:ilda+n], x[i+1:n]) + if nonUnit { + x[i] *= a[ilda+i] + } + } + return + } + ix := kx + (n-1)*incX + for i := n - 1; i >= 0; i-- { + ilda := i * lda + xi := x[ix] + f32.AxpyInc(xi, a[ilda+i+1:ilda+n], x, uintptr(n-i-1), 1, uintptr(incX), 0, uintptr(kx+(i+1)*incX)) + if nonUnit { + x[ix] *= a[ilda+i] + } + ix -= incX + } + return + } + if incX == 1 { + for i := 0; i < n; i++ { + ilda := i * lda + xi := x[i] + f32.AxpyUnitary(xi, a[ilda:ilda+i], x[:i]) + if nonUnit { + x[i] *= a[i*lda+i] + } + } + return + } + ix := kx + for i := 0; i < n; i++ { + ilda := i * lda + xi := x[ix] + f32.AxpyInc(xi, a[ilda:ilda+i], x, uintptr(i), 1, uintptr(incX), 0, uintptr(kx)) + if nonUnit { + x[ix] *= a[ilda+i] + } + ix += incX + } +} + +// Strsv solves one of the systems of equations +// A * x = b if tA == blas.NoTrans +// A^T * x = b if tA == blas.Trans or blas.ConjTrans +// where A is an n×n triangular matrix, and x and b are vectors. +// +// At entry to the function, x contains the values of b, and the result is +// stored in-place into x. +// +// No test for singularity or near-singularity is included in this +// routine. Such tests must be performed before calling this routine. +// +// Float32 implementations are autogenerated and not directly tested. +func (Implementation) Strsv(ul blas.Uplo, tA blas.Transpose, d blas.Diag, n int, a []float32, lda int, x []float32, incX int) { + if ul != blas.Lower && ul != blas.Upper { + panic(badUplo) + } + if tA != blas.NoTrans && tA != blas.Trans && tA != blas.ConjTrans { + panic(badTranspose) + } + if d != blas.NonUnit && d != blas.Unit { + panic(badDiag) + } + if n < 0 { + panic(nLT0) + } + if lda < max(1, n) { + panic(badLdA) + } + if incX == 0 { + panic(zeroIncX) + } + + // Quick return if possible. + if n == 0 { + return + } + + // For zero matrix size the following slice length checks are trivially satisfied. + if len(a) < lda*(n-1)+n { + panic(shortA) + } + if (incX > 0 && len(x) <= (n-1)*incX) || (incX < 0 && len(x) <= (1-n)*incX) { + panic(shortX) + } + + if n == 1 { + if d == blas.NonUnit { + x[0] /= a[0] + } + return + } + + var kx int + if incX < 0 { + kx = -(n - 1) * incX + } + nonUnit := d == blas.NonUnit + if tA == blas.NoTrans { + if ul == blas.Upper { + if incX == 1 { + for i := n - 1; i >= 0; i-- { + var sum float32 + atmp := a[i*lda+i+1 : i*lda+n] + for j, v := range atmp { + jv := i + j + 1 + sum += x[jv] * v + } + x[i] -= sum + if nonUnit { + x[i] /= a[i*lda+i] + } + } + return + } + ix := kx + (n-1)*incX + for i := n - 1; i >= 0; i-- { + var sum float32 + jx := ix + incX + atmp := a[i*lda+i+1 : i*lda+n] + for _, v := range atmp { + sum += x[jx] * v + jx += incX + } + x[ix] -= sum + if nonUnit { + x[ix] /= a[i*lda+i] + } + ix -= incX + } + return + } + if incX == 1 { + for i := 0; i < n; i++ { + var sum float32 + atmp := a[i*lda : i*lda+i] + for j, v := range atmp { + sum += x[j] * v + } + x[i] -= sum + if nonUnit { + x[i] /= a[i*lda+i] + } + } + return + } + ix := kx + for i := 0; i < n; i++ { + jx := kx + var sum float32 + atmp := a[i*lda : i*lda+i] + for _, v := range atmp { + sum += x[jx] * v + jx += incX + } + x[ix] -= sum + if nonUnit { + x[ix] /= a[i*lda+i] + } + ix += incX + } + return + } + // Cases where a is transposed. + if ul == blas.Upper { + if incX == 1 { + for i := 0; i < n; i++ { + if nonUnit { + x[i] /= a[i*lda+i] + } + xi := x[i] + atmp := a[i*lda+i+1 : i*lda+n] + for j, v := range atmp { + jv := j + i + 1 + x[jv] -= v * xi + } + } + return + } + ix := kx + for i := 0; i < n; i++ { + if nonUnit { + x[ix] /= a[i*lda+i] + } + xi := x[ix] + jx := kx + (i+1)*incX + atmp := a[i*lda+i+1 : i*lda+n] + for _, v := range atmp { + x[jx] -= v * xi + jx += incX + } + ix += incX + } + return + } + if incX == 1 { + for i := n - 1; i >= 0; i-- { + if nonUnit { + x[i] /= a[i*lda+i] + } + xi := x[i] + atmp := a[i*lda : i*lda+i] + for j, v := range atmp { + x[j] -= v * xi + } + } + return + } + ix := kx + (n-1)*incX + for i := n - 1; i >= 0; i-- { + if nonUnit { + x[ix] /= a[i*lda+i] + } + xi := x[ix] + jx := kx + atmp := a[i*lda : i*lda+i] + for _, v := range atmp { + x[jx] -= v * xi + jx += incX + } + ix -= incX + } +} + +// Ssymv performs the matrix-vector operation +// y = alpha * A * x + beta * y +// where A is an n×n symmetric matrix, x and y are vectors, and alpha and +// beta are scalars. +// +// Float32 implementations are autogenerated and not directly tested. +func (Implementation) Ssymv(ul blas.Uplo, n int, alpha float32, a []float32, lda int, x []float32, incX int, beta float32, y []float32, incY int) { + if ul != blas.Lower && ul != blas.Upper { + panic(badUplo) + } + if n < 0 { + panic(nLT0) + } + if lda < max(1, n) { + panic(badLdA) + } + if incX == 0 { + panic(zeroIncX) + } + if incY == 0 { + panic(zeroIncY) + } + + // Quick return if possible. + if n == 0 { + return + } + + // For zero matrix size the following slice length checks are trivially satisfied. + if len(a) < lda*(n-1)+n { + panic(shortA) + } + if (incX > 0 && len(x) <= (n-1)*incX) || (incX < 0 && len(x) <= (1-n)*incX) { + panic(shortX) + } + if (incY > 0 && len(y) <= (n-1)*incY) || (incY < 0 && len(y) <= (1-n)*incY) { + panic(shortY) + } + + // Quick return if possible. + if alpha == 0 && beta == 1 { + return + } + + // Set up start points + var kx, ky int + if incX < 0 { + kx = -(n - 1) * incX + } + if incY < 0 { + ky = -(n - 1) * incY + } + + // Form y = beta * y + if beta != 1 { + if incY == 1 { + if beta == 0 { + for i := range y[:n] { + y[i] = 0 + } + } else { + f32.ScalUnitary(beta, y[:n]) + } + } else { + iy := ky + if beta == 0 { + for i := 0; i < n; i++ { + y[iy] = 0 + iy += incY + } + } else { + if incY > 0 { + f32.ScalInc(beta, y, uintptr(n), uintptr(incY)) + } else { + f32.ScalInc(beta, y, uintptr(n), uintptr(-incY)) + } + } + } + } + + if alpha == 0 { + return + } + + if n == 1 { + y[0] += alpha * a[0] * x[0] + return + } + + if ul == blas.Upper { + if incX == 1 { + iy := ky + for i := 0; i < n; i++ { + xv := x[i] * alpha + sum := x[i] * a[i*lda+i] + jy := ky + (i+1)*incY + atmp := a[i*lda+i+1 : i*lda+n] + for j, v := range atmp { + jp := j + i + 1 + sum += x[jp] * v + y[jy] += xv * v + jy += incY + } + y[iy] += alpha * sum + iy += incY + } + return + } + ix := kx + iy := ky + for i := 0; i < n; i++ { + xv := x[ix] * alpha + sum := x[ix] * a[i*lda+i] + jx := kx + (i+1)*incX + jy := ky + (i+1)*incY + atmp := a[i*lda+i+1 : i*lda+n] + for _, v := range atmp { + sum += x[jx] * v + y[jy] += xv * v + jx += incX + jy += incY + } + y[iy] += alpha * sum + ix += incX + iy += incY + } + return + } + // Cases where a is lower triangular. + if incX == 1 { + iy := ky + for i := 0; i < n; i++ { + jy := ky + xv := alpha * x[i] + atmp := a[i*lda : i*lda+i] + var sum float32 + for j, v := range atmp { + sum += x[j] * v + y[jy] += xv * v + jy += incY + } + sum += x[i] * a[i*lda+i] + sum *= alpha + y[iy] += sum + iy += incY + } + return + } + ix := kx + iy := ky + for i := 0; i < n; i++ { + jx := kx + jy := ky + xv := alpha * x[ix] + atmp := a[i*lda : i*lda+i] + var sum float32 + for _, v := range atmp { + sum += x[jx] * v + y[jy] += xv * v + jx += incX + jy += incY + } + sum += x[ix] * a[i*lda+i] + sum *= alpha + y[iy] += sum + ix += incX + iy += incY + } +} + +// Stbmv performs one of the matrix-vector operations +// x = A * x if tA == blas.NoTrans +// x = A^T * x if tA == blas.Trans or blas.ConjTrans +// where A is an n×n triangular band matrix with k+1 diagonals, and x is a vector. +// +// Float32 implementations are autogenerated and not directly tested. +func (Implementation) Stbmv(ul blas.Uplo, tA blas.Transpose, d blas.Diag, n, k int, a []float32, lda int, x []float32, incX int) { + if ul != blas.Lower && ul != blas.Upper { + panic(badUplo) + } + if tA != blas.NoTrans && tA != blas.Trans && tA != blas.ConjTrans { + panic(badTranspose) + } + if d != blas.NonUnit && d != blas.Unit { + panic(badDiag) + } + if n < 0 { + panic(nLT0) + } + if k < 0 { + panic(kLT0) + } + if lda < k+1 { + panic(badLdA) + } + if incX == 0 { + panic(zeroIncX) + } + + // Quick return if possible. + if n == 0 { + return + } + + // For zero matrix size the following slice length checks are trivially satisfied. + if len(a) < lda*(n-1)+k+1 { + panic(shortA) + } + if (incX > 0 && len(x) <= (n-1)*incX) || (incX < 0 && len(x) <= (1-n)*incX) { + panic(shortX) + } + + var kx int + if incX < 0 { + kx = -(n - 1) * incX + } + + nonunit := d != blas.Unit + + if tA == blas.NoTrans { + if ul == blas.Upper { + if incX == 1 { + for i := 0; i < n; i++ { + u := min(1+k, n-i) + var sum float32 + atmp := a[i*lda:] + xtmp := x[i:] + for j := 1; j < u; j++ { + sum += xtmp[j] * atmp[j] + } + if nonunit { + sum += xtmp[0] * atmp[0] + } else { + sum += xtmp[0] + } + x[i] = sum + } + return + } + ix := kx + for i := 0; i < n; i++ { + u := min(1+k, n-i) + var sum float32 + atmp := a[i*lda:] + jx := incX + for j := 1; j < u; j++ { + sum += x[ix+jx] * atmp[j] + jx += incX + } + if nonunit { + sum += x[ix] * atmp[0] + } else { + sum += x[ix] + } + x[ix] = sum + ix += incX + } + return + } + if incX == 1 { + for i := n - 1; i >= 0; i-- { + l := max(0, k-i) + atmp := a[i*lda:] + var sum float32 + for j := l; j < k; j++ { + sum += x[i-k+j] * atmp[j] + } + if nonunit { + sum += x[i] * atmp[k] + } else { + sum += x[i] + } + x[i] = sum + } + return + } + ix := kx + (n-1)*incX + for i := n - 1; i >= 0; i-- { + l := max(0, k-i) + atmp := a[i*lda:] + var sum float32 + jx := l * incX + for j := l; j < k; j++ { + sum += x[ix-k*incX+jx] * atmp[j] + jx += incX + } + if nonunit { + sum += x[ix] * atmp[k] + } else { + sum += x[ix] + } + x[ix] = sum + ix -= incX + } + return + } + if ul == blas.Upper { + if incX == 1 { + for i := n - 1; i >= 0; i-- { + u := k + 1 + if i < u { + u = i + 1 + } + var sum float32 + for j := 1; j < u; j++ { + sum += x[i-j] * a[(i-j)*lda+j] + } + if nonunit { + sum += x[i] * a[i*lda] + } else { + sum += x[i] + } + x[i] = sum + } + return + } + ix := kx + (n-1)*incX + for i := n - 1; i >= 0; i-- { + u := k + 1 + if i < u { + u = i + 1 + } + var sum float32 + jx := incX + for j := 1; j < u; j++ { + sum += x[ix-jx] * a[(i-j)*lda+j] + jx += incX + } + if nonunit { + sum += x[ix] * a[i*lda] + } else { + sum += x[ix] + } + x[ix] = sum + ix -= incX + } + return + } + if incX == 1 { + for i := 0; i < n; i++ { + u := k + if i+k >= n { + u = n - i - 1 + } + var sum float32 + for j := 0; j < u; j++ { + sum += x[i+j+1] * a[(i+j+1)*lda+k-j-1] + } + if nonunit { + sum += x[i] * a[i*lda+k] + } else { + sum += x[i] + } + x[i] = sum + } + return + } + ix := kx + for i := 0; i < n; i++ { + u := k + if i+k >= n { + u = n - i - 1 + } + var ( + sum float32 + jx int + ) + for j := 0; j < u; j++ { + sum += x[ix+jx+incX] * a[(i+j+1)*lda+k-j-1] + jx += incX + } + if nonunit { + sum += x[ix] * a[i*lda+k] + } else { + sum += x[ix] + } + x[ix] = sum + ix += incX + } +} + +// Stpmv performs one of the matrix-vector operations +// x = A * x if tA == blas.NoTrans +// x = A^T * x if tA == blas.Trans or blas.ConjTrans +// where A is an n×n triangular matrix in packed format, and x is a vector. +// +// Float32 implementations are autogenerated and not directly tested. +func (Implementation) Stpmv(ul blas.Uplo, tA blas.Transpose, d blas.Diag, n int, ap []float32, x []float32, incX int) { + if ul != blas.Lower && ul != blas.Upper { + panic(badUplo) + } + if tA != blas.NoTrans && tA != blas.Trans && tA != blas.ConjTrans { + panic(badTranspose) + } + if d != blas.NonUnit && d != blas.Unit { + panic(badDiag) + } + if n < 0 { + panic(nLT0) + } + if incX == 0 { + panic(zeroIncX) + } + + // Quick return if possible. + if n == 0 { + return + } + + // For zero matrix size the following slice length checks are trivially satisfied. + if len(ap) < n*(n+1)/2 { + panic(shortAP) + } + if (incX > 0 && len(x) <= (n-1)*incX) || (incX < 0 && len(x) <= (1-n)*incX) { + panic(shortX) + } + + var kx int + if incX < 0 { + kx = -(n - 1) * incX + } + + nonUnit := d == blas.NonUnit + var offset int // Offset is the index of (i,i) + if tA == blas.NoTrans { + if ul == blas.Upper { + if incX == 1 { + for i := 0; i < n; i++ { + xi := x[i] + if nonUnit { + xi *= ap[offset] + } + atmp := ap[offset+1 : offset+n-i] + xtmp := x[i+1:] + for j, v := range atmp { + xi += v * xtmp[j] + } + x[i] = xi + offset += n - i + } + return + } + ix := kx + for i := 0; i < n; i++ { + xix := x[ix] + if nonUnit { + xix *= ap[offset] + } + atmp := ap[offset+1 : offset+n-i] + jx := kx + (i+1)*incX + for _, v := range atmp { + xix += v * x[jx] + jx += incX + } + x[ix] = xix + offset += n - i + ix += incX + } + return + } + if incX == 1 { + offset = n*(n+1)/2 - 1 + for i := n - 1; i >= 0; i-- { + xi := x[i] + if nonUnit { + xi *= ap[offset] + } + atmp := ap[offset-i : offset] + for j, v := range atmp { + xi += v * x[j] + } + x[i] = xi + offset -= i + 1 + } + return + } + ix := kx + (n-1)*incX + offset = n*(n+1)/2 - 1 + for i := n - 1; i >= 0; i-- { + xix := x[ix] + if nonUnit { + xix *= ap[offset] + } + atmp := ap[offset-i : offset] + jx := kx + for _, v := range atmp { + xix += v * x[jx] + jx += incX + } + x[ix] = xix + offset -= i + 1 + ix -= incX + } + return + } + // Cases where ap is transposed. + if ul == blas.Upper { + if incX == 1 { + offset = n*(n+1)/2 - 1 + for i := n - 1; i >= 0; i-- { + xi := x[i] + atmp := ap[offset+1 : offset+n-i] + xtmp := x[i+1:] + for j, v := range atmp { + xtmp[j] += v * xi + } + if nonUnit { + x[i] *= ap[offset] + } + offset -= n - i + 1 + } + return + } + ix := kx + (n-1)*incX + offset = n*(n+1)/2 - 1 + for i := n - 1; i >= 0; i-- { + xix := x[ix] + jx := kx + (i+1)*incX + atmp := ap[offset+1 : offset+n-i] + for _, v := range atmp { + x[jx] += v * xix + jx += incX + } + if nonUnit { + x[ix] *= ap[offset] + } + offset -= n - i + 1 + ix -= incX + } + return + } + if incX == 1 { + for i := 0; i < n; i++ { + xi := x[i] + atmp := ap[offset-i : offset] + for j, v := range atmp { + x[j] += v * xi + } + if nonUnit { + x[i] *= ap[offset] + } + offset += i + 2 + } + return + } + ix := kx + for i := 0; i < n; i++ { + xix := x[ix] + jx := kx + atmp := ap[offset-i : offset] + for _, v := range atmp { + x[jx] += v * xix + jx += incX + } + if nonUnit { + x[ix] *= ap[offset] + } + ix += incX + offset += i + 2 + } +} + +// Stbsv solves one of the systems of equations +// A * x = b if tA == blas.NoTrans +// A^T * x = b if tA == blas.Trans or tA == blas.ConjTrans +// where A is an n×n triangular band matrix with k+1 diagonals, +// and x and b are vectors. +// +// At entry to the function, x contains the values of b, and the result is +// stored in-place into x. +// +// No test for singularity or near-singularity is included in this +// routine. Such tests must be performed before calling this routine. +// +// Float32 implementations are autogenerated and not directly tested. +func (Implementation) Stbsv(ul blas.Uplo, tA blas.Transpose, d blas.Diag, n, k int, a []float32, lda int, x []float32, incX int) { + if ul != blas.Lower && ul != blas.Upper { + panic(badUplo) + } + if tA != blas.NoTrans && tA != blas.Trans && tA != blas.ConjTrans { + panic(badTranspose) + } + if d != blas.NonUnit && d != blas.Unit { + panic(badDiag) + } + if n < 0 { + panic(nLT0) + } + if k < 0 { + panic(kLT0) + } + if lda < k+1 { + panic(badLdA) + } + if incX == 0 { + panic(zeroIncX) + } + + // Quick return if possible. + if n == 0 { + return + } + + // For zero matrix size the following slice length checks are trivially satisfied. + if len(a) < lda*(n-1)+k+1 { + panic(shortA) + } + if (incX > 0 && len(x) <= (n-1)*incX) || (incX < 0 && len(x) <= (1-n)*incX) { + panic(shortX) + } + + var kx int + if incX < 0 { + kx = -(n - 1) * incX + } + nonUnit := d == blas.NonUnit + // Form x = A^-1 x. + // Several cases below use subslices for speed improvement. + // The incX != 1 cases usually do not because incX may be negative. + if tA == blas.NoTrans { + if ul == blas.Upper { + if incX == 1 { + for i := n - 1; i >= 0; i-- { + bands := k + if i+bands >= n { + bands = n - i - 1 + } + atmp := a[i*lda+1:] + xtmp := x[i+1 : i+bands+1] + var sum float32 + for j, v := range xtmp { + sum += v * atmp[j] + } + x[i] -= sum + if nonUnit { + x[i] /= a[i*lda] + } + } + return + } + ix := kx + (n-1)*incX + for i := n - 1; i >= 0; i-- { + max := k + 1 + if i+max > n { + max = n - i + } + atmp := a[i*lda:] + var ( + jx int + sum float32 + ) + for j := 1; j < max; j++ { + jx += incX + sum += x[ix+jx] * atmp[j] + } + x[ix] -= sum + if nonUnit { + x[ix] /= atmp[0] + } + ix -= incX + } + return + } + if incX == 1 { + for i := 0; i < n; i++ { + bands := k + if i-k < 0 { + bands = i + } + atmp := a[i*lda+k-bands:] + xtmp := x[i-bands : i] + var sum float32 + for j, v := range xtmp { + sum += v * atmp[j] + } + x[i] -= sum + if nonUnit { + x[i] /= atmp[bands] + } + } + return + } + ix := kx + for i := 0; i < n; i++ { + bands := k + if i-k < 0 { + bands = i + } + atmp := a[i*lda+k-bands:] + var ( + sum float32 + jx int + ) + for j := 0; j < bands; j++ { + sum += x[ix-bands*incX+jx] * atmp[j] + jx += incX + } + x[ix] -= sum + if nonUnit { + x[ix] /= atmp[bands] + } + ix += incX + } + return + } + // Cases where a is transposed. + if ul == blas.Upper { + if incX == 1 { + for i := 0; i < n; i++ { + bands := k + if i-k < 0 { + bands = i + } + var sum float32 + for j := 0; j < bands; j++ { + sum += x[i-bands+j] * a[(i-bands+j)*lda+bands-j] + } + x[i] -= sum + if nonUnit { + x[i] /= a[i*lda] + } + } + return + } + ix := kx + for i := 0; i < n; i++ { + bands := k + if i-k < 0 { + bands = i + } + var ( + sum float32 + jx int + ) + for j := 0; j < bands; j++ { + sum += x[ix-bands*incX+jx] * a[(i-bands+j)*lda+bands-j] + jx += incX + } + x[ix] -= sum + if nonUnit { + x[ix] /= a[i*lda] + } + ix += incX + } + return + } + if incX == 1 { + for i := n - 1; i >= 0; i-- { + bands := k + if i+bands >= n { + bands = n - i - 1 + } + var sum float32 + xtmp := x[i+1 : i+1+bands] + for j, v := range xtmp { + sum += v * a[(i+j+1)*lda+k-j-1] + } + x[i] -= sum + if nonUnit { + x[i] /= a[i*lda+k] + } + } + return + } + ix := kx + (n-1)*incX + for i := n - 1; i >= 0; i-- { + bands := k + if i+bands >= n { + bands = n - i - 1 + } + var ( + sum float32 + jx int + ) + for j := 0; j < bands; j++ { + sum += x[ix+jx+incX] * a[(i+j+1)*lda+k-j-1] + jx += incX + } + x[ix] -= sum + if nonUnit { + x[ix] /= a[i*lda+k] + } + ix -= incX + } +} + +// Ssbmv performs the matrix-vector operation +// y = alpha * A * x + beta * y +// where A is an n×n symmetric band matrix with k super-diagonals, x and y are +// vectors, and alpha and beta are scalars. +// +// Float32 implementations are autogenerated and not directly tested. +func (Implementation) Ssbmv(ul blas.Uplo, n, k int, alpha float32, a []float32, lda int, x []float32, incX int, beta float32, y []float32, incY int) { + if ul != blas.Lower && ul != blas.Upper { + panic(badUplo) + } + if n < 0 { + panic(nLT0) + } + if k < 0 { + panic(kLT0) + } + if lda < k+1 { + panic(badLdA) + } + if incX == 0 { + panic(zeroIncX) + } + if incY == 0 { + panic(zeroIncY) + } + + // Quick return if possible. + if n == 0 { + return + } + + // For zero matrix size the following slice length checks are trivially satisfied. + if len(a) < lda*(n-1)+k+1 { + panic(shortA) + } + if (incX > 0 && len(x) <= (n-1)*incX) || (incX < 0 && len(x) <= (1-n)*incX) { + panic(shortX) + } + if (incY > 0 && len(y) <= (n-1)*incY) || (incY < 0 && len(y) <= (1-n)*incY) { + panic(shortY) + } + + // Quick return if possible. + if alpha == 0 && beta == 1 { + return + } + + // Set up indexes + lenX := n + lenY := n + var kx, ky int + if incX < 0 { + kx = -(lenX - 1) * incX + } + if incY < 0 { + ky = -(lenY - 1) * incY + } + + // Form y = beta * y. + if beta != 1 { + if incY == 1 { + if beta == 0 { + for i := range y[:n] { + y[i] = 0 + } + } else { + f32.ScalUnitary(beta, y[:n]) + } + } else { + iy := ky + if beta == 0 { + for i := 0; i < n; i++ { + y[iy] = 0 + iy += incY + } + } else { + if incY > 0 { + f32.ScalInc(beta, y, uintptr(n), uintptr(incY)) + } else { + f32.ScalInc(beta, y, uintptr(n), uintptr(-incY)) + } + } + } + } + + if alpha == 0 { + return + } + + if ul == blas.Upper { + if incX == 1 { + iy := ky + for i := 0; i < n; i++ { + atmp := a[i*lda:] + tmp := alpha * x[i] + sum := tmp * atmp[0] + u := min(k, n-i-1) + jy := incY + for j := 1; j <= u; j++ { + v := atmp[j] + sum += alpha * x[i+j] * v + y[iy+jy] += tmp * v + jy += incY + } + y[iy] += sum + iy += incY + } + return + } + ix := kx + iy := ky + for i := 0; i < n; i++ { + atmp := a[i*lda:] + tmp := alpha * x[ix] + sum := tmp * atmp[0] + u := min(k, n-i-1) + jx := incX + jy := incY + for j := 1; j <= u; j++ { + v := atmp[j] + sum += alpha * x[ix+jx] * v + y[iy+jy] += tmp * v + jx += incX + jy += incY + } + y[iy] += sum + ix += incX + iy += incY + } + return + } + + // Casses where a has bands below the diagonal. + if incX == 1 { + iy := ky + for i := 0; i < n; i++ { + l := max(0, k-i) + tmp := alpha * x[i] + jy := l * incY + atmp := a[i*lda:] + for j := l; j < k; j++ { + v := atmp[j] + y[iy] += alpha * v * x[i-k+j] + y[iy-k*incY+jy] += tmp * v + jy += incY + } + y[iy] += tmp * atmp[k] + iy += incY + } + return + } + ix := kx + iy := ky + for i := 0; i < n; i++ { + l := max(0, k-i) + tmp := alpha * x[ix] + jx := l * incX + jy := l * incY + atmp := a[i*lda:] + for j := l; j < k; j++ { + v := atmp[j] + y[iy] += alpha * v * x[ix-k*incX+jx] + y[iy-k*incY+jy] += tmp * v + jx += incX + jy += incY + } + y[iy] += tmp * atmp[k] + ix += incX + iy += incY + } +} + +// Ssyr performs the symmetric rank-one update +// A += alpha * x * x^T +// where A is an n×n symmetric matrix, and x is a vector. +// +// Float32 implementations are autogenerated and not directly tested. +func (Implementation) Ssyr(ul blas.Uplo, n int, alpha float32, x []float32, incX int, a []float32, lda int) { + if ul != blas.Lower && ul != blas.Upper { + panic(badUplo) + } + if n < 0 { + panic(nLT0) + } + if lda < max(1, n) { + panic(badLdA) + } + if incX == 0 { + panic(zeroIncX) + } + + // Quick return if possible. + if n == 0 { + return + } + + // For zero matrix size the following slice length checks are trivially satisfied. + if (incX > 0 && len(x) <= (n-1)*incX) || (incX < 0 && len(x) <= (1-n)*incX) { + panic(shortX) + } + if len(a) < lda*(n-1)+n { + panic(shortA) + } + + // Quick return if possible. + if alpha == 0 { + return + } + + lenX := n + var kx int + if incX < 0 { + kx = -(lenX - 1) * incX + } + if ul == blas.Upper { + if incX == 1 { + for i := 0; i < n; i++ { + tmp := x[i] * alpha + if tmp != 0 { + atmp := a[i*lda+i : i*lda+n] + xtmp := x[i:n] + for j, v := range xtmp { + atmp[j] += v * tmp + } + } + } + return + } + ix := kx + for i := 0; i < n; i++ { + tmp := x[ix] * alpha + if tmp != 0 { + jx := ix + atmp := a[i*lda:] + for j := i; j < n; j++ { + atmp[j] += x[jx] * tmp + jx += incX + } + } + ix += incX + } + return + } + // Cases where a is lower triangular. + if incX == 1 { + for i := 0; i < n; i++ { + tmp := x[i] * alpha + if tmp != 0 { + atmp := a[i*lda:] + xtmp := x[:i+1] + for j, v := range xtmp { + atmp[j] += tmp * v + } + } + } + return + } + ix := kx + for i := 0; i < n; i++ { + tmp := x[ix] * alpha + if tmp != 0 { + atmp := a[i*lda:] + jx := kx + for j := 0; j < i+1; j++ { + atmp[j] += tmp * x[jx] + jx += incX + } + } + ix += incX + } +} + +// Ssyr2 performs the symmetric rank-two update +// A += alpha * x * y^T + alpha * y * x^T +// where A is an n×n symmetric matrix, x and y are vectors, and alpha is a scalar. +// +// Float32 implementations are autogenerated and not directly tested. +func (Implementation) Ssyr2(ul blas.Uplo, n int, alpha float32, x []float32, incX int, y []float32, incY int, a []float32, lda int) { + if ul != blas.Lower && ul != blas.Upper { + panic(badUplo) + } + if n < 0 { + panic(nLT0) + } + if lda < max(1, n) { + panic(badLdA) + } + if incX == 0 { + panic(zeroIncX) + } + if incY == 0 { + panic(zeroIncY) + } + + // Quick return if possible. + if n == 0 { + return + } + + // For zero matrix size the following slice length checks are trivially satisfied. + if (incX > 0 && len(x) <= (n-1)*incX) || (incX < 0 && len(x) <= (1-n)*incX) { + panic(shortX) + } + if (incY > 0 && len(y) <= (n-1)*incY) || (incY < 0 && len(y) <= (1-n)*incY) { + panic(shortY) + } + if len(a) < lda*(n-1)+n { + panic(shortA) + } + + // Quick return if possible. + if alpha == 0 { + return + } + + var ky, kx int + if incY < 0 { + ky = -(n - 1) * incY + } + if incX < 0 { + kx = -(n - 1) * incX + } + if ul == blas.Upper { + if incX == 1 && incY == 1 { + for i := 0; i < n; i++ { + xi := x[i] + yi := y[i] + atmp := a[i*lda:] + for j := i; j < n; j++ { + atmp[j] += alpha * (xi*y[j] + x[j]*yi) + } + } + return + } + ix := kx + iy := ky + for i := 0; i < n; i++ { + jx := kx + i*incX + jy := ky + i*incY + xi := x[ix] + yi := y[iy] + atmp := a[i*lda:] + for j := i; j < n; j++ { + atmp[j] += alpha * (xi*y[jy] + x[jx]*yi) + jx += incX + jy += incY + } + ix += incX + iy += incY + } + return + } + if incX == 1 && incY == 1 { + for i := 0; i < n; i++ { + xi := x[i] + yi := y[i] + atmp := a[i*lda:] + for j := 0; j <= i; j++ { + atmp[j] += alpha * (xi*y[j] + x[j]*yi) + } + } + return + } + ix := kx + iy := ky + for i := 0; i < n; i++ { + jx := kx + jy := ky + xi := x[ix] + yi := y[iy] + atmp := a[i*lda:] + for j := 0; j <= i; j++ { + atmp[j] += alpha * (xi*y[jy] + x[jx]*yi) + jx += incX + jy += incY + } + ix += incX + iy += incY + } +} + +// Stpsv solves one of the systems of equations +// A * x = b if tA == blas.NoTrans +// A^T * x = b if tA == blas.Trans or blas.ConjTrans +// where A is an n×n triangular matrix in packed format, and x and b are vectors. +// +// At entry to the function, x contains the values of b, and the result is +// stored in-place into x. +// +// No test for singularity or near-singularity is included in this +// routine. Such tests must be performed before calling this routine. +// +// Float32 implementations are autogenerated and not directly tested. +func (Implementation) Stpsv(ul blas.Uplo, tA blas.Transpose, d blas.Diag, n int, ap []float32, x []float32, incX int) { + if ul != blas.Lower && ul != blas.Upper { + panic(badUplo) + } + if tA != blas.NoTrans && tA != blas.Trans && tA != blas.ConjTrans { + panic(badTranspose) + } + if d != blas.NonUnit && d != blas.Unit { + panic(badDiag) + } + if n < 0 { + panic(nLT0) + } + if incX == 0 { + panic(zeroIncX) + } + + // Quick return if possible. + if n == 0 { + return + } + + // For zero matrix size the following slice length checks are trivially satisfied. + if len(ap) < n*(n+1)/2 { + panic(shortAP) + } + if (incX > 0 && len(x) <= (n-1)*incX) || (incX < 0 && len(x) <= (1-n)*incX) { + panic(shortX) + } + + var kx int + if incX < 0 { + kx = -(n - 1) * incX + } + + nonUnit := d == blas.NonUnit + var offset int // Offset is the index of (i,i) + if tA == blas.NoTrans { + if ul == blas.Upper { + offset = n*(n+1)/2 - 1 + if incX == 1 { + for i := n - 1; i >= 0; i-- { + atmp := ap[offset+1 : offset+n-i] + xtmp := x[i+1:] + var sum float32 + for j, v := range atmp { + sum += v * xtmp[j] + } + x[i] -= sum + if nonUnit { + x[i] /= ap[offset] + } + offset -= n - i + 1 + } + return + } + ix := kx + (n-1)*incX + for i := n - 1; i >= 0; i-- { + atmp := ap[offset+1 : offset+n-i] + jx := kx + (i+1)*incX + var sum float32 + for _, v := range atmp { + sum += v * x[jx] + jx += incX + } + x[ix] -= sum + if nonUnit { + x[ix] /= ap[offset] + } + ix -= incX + offset -= n - i + 1 + } + return + } + if incX == 1 { + for i := 0; i < n; i++ { + atmp := ap[offset-i : offset] + var sum float32 + for j, v := range atmp { + sum += v * x[j] + } + x[i] -= sum + if nonUnit { + x[i] /= ap[offset] + } + offset += i + 2 + } + return + } + ix := kx + for i := 0; i < n; i++ { + jx := kx + atmp := ap[offset-i : offset] + var sum float32 + for _, v := range atmp { + sum += v * x[jx] + jx += incX + } + x[ix] -= sum + if nonUnit { + x[ix] /= ap[offset] + } + ix += incX + offset += i + 2 + } + return + } + // Cases where ap is transposed. + if ul == blas.Upper { + if incX == 1 { + for i := 0; i < n; i++ { + if nonUnit { + x[i] /= ap[offset] + } + xi := x[i] + atmp := ap[offset+1 : offset+n-i] + xtmp := x[i+1:] + for j, v := range atmp { + xtmp[j] -= v * xi + } + offset += n - i + } + return + } + ix := kx + for i := 0; i < n; i++ { + if nonUnit { + x[ix] /= ap[offset] + } + xix := x[ix] + atmp := ap[offset+1 : offset+n-i] + jx := kx + (i+1)*incX + for _, v := range atmp { + x[jx] -= v * xix + jx += incX + } + ix += incX + offset += n - i + } + return + } + if incX == 1 { + offset = n*(n+1)/2 - 1 + for i := n - 1; i >= 0; i-- { + if nonUnit { + x[i] /= ap[offset] + } + xi := x[i] + atmp := ap[offset-i : offset] + for j, v := range atmp { + x[j] -= v * xi + } + offset -= i + 1 + } + return + } + ix := kx + (n-1)*incX + offset = n*(n+1)/2 - 1 + for i := n - 1; i >= 0; i-- { + if nonUnit { + x[ix] /= ap[offset] + } + xix := x[ix] + atmp := ap[offset-i : offset] + jx := kx + for _, v := range atmp { + x[jx] -= v * xix + jx += incX + } + ix -= incX + offset -= i + 1 + } +} + +// Sspmv performs the matrix-vector operation +// y = alpha * A * x + beta * y +// where A is an n×n symmetric matrix in packed format, x and y are vectors, +// and alpha and beta are scalars. +// +// Float32 implementations are autogenerated and not directly tested. +func (Implementation) Sspmv(ul blas.Uplo, n int, alpha float32, ap []float32, x []float32, incX int, beta float32, y []float32, incY int) { + if ul != blas.Lower && ul != blas.Upper { + panic(badUplo) + } + if n < 0 { + panic(nLT0) + } + if incX == 0 { + panic(zeroIncX) + } + if incY == 0 { + panic(zeroIncY) + } + + // Quick return if possible. + if n == 0 { + return + } + + // For zero matrix size the following slice length checks are trivially satisfied. + if len(ap) < n*(n+1)/2 { + panic(shortAP) + } + if (incX > 0 && len(x) <= (n-1)*incX) || (incX < 0 && len(x) <= (1-n)*incX) { + panic(shortX) + } + if (incY > 0 && len(y) <= (n-1)*incY) || (incY < 0 && len(y) <= (1-n)*incY) { + panic(shortY) + } + + // Quick return if possible. + if alpha == 0 && beta == 1 { + return + } + + // Set up start points + var kx, ky int + if incX < 0 { + kx = -(n - 1) * incX + } + if incY < 0 { + ky = -(n - 1) * incY + } + + // Form y = beta * y. + if beta != 1 { + if incY == 1 { + if beta == 0 { + for i := range y[:n] { + y[i] = 0 + } + } else { + f32.ScalUnitary(beta, y[:n]) + } + } else { + iy := ky + if beta == 0 { + for i := 0; i < n; i++ { + y[iy] = 0 + iy += incY + } + } else { + if incY > 0 { + f32.ScalInc(beta, y, uintptr(n), uintptr(incY)) + } else { + f32.ScalInc(beta, y, uintptr(n), uintptr(-incY)) + } + } + } + } + + if alpha == 0 { + return + } + + if n == 1 { + y[0] += alpha * ap[0] * x[0] + return + } + var offset int // Offset is the index of (i,i). + if ul == blas.Upper { + if incX == 1 { + iy := ky + for i := 0; i < n; i++ { + xv := x[i] * alpha + sum := ap[offset] * x[i] + atmp := ap[offset+1 : offset+n-i] + xtmp := x[i+1:] + jy := ky + (i+1)*incY + for j, v := range atmp { + sum += v * xtmp[j] + y[jy] += v * xv + jy += incY + } + y[iy] += alpha * sum + iy += incY + offset += n - i + } + return + } + ix := kx + iy := ky + for i := 0; i < n; i++ { + xv := x[ix] * alpha + sum := ap[offset] * x[ix] + atmp := ap[offset+1 : offset+n-i] + jx := kx + (i+1)*incX + jy := ky + (i+1)*incY + for _, v := range atmp { + sum += v * x[jx] + y[jy] += v * xv + jx += incX + jy += incY + } + y[iy] += alpha * sum + ix += incX + iy += incY + offset += n - i + } + return + } + if incX == 1 { + iy := ky + for i := 0; i < n; i++ { + xv := x[i] * alpha + atmp := ap[offset-i : offset] + jy := ky + var sum float32 + for j, v := range atmp { + sum += v * x[j] + y[jy] += v * xv + jy += incY + } + sum += ap[offset] * x[i] + y[iy] += alpha * sum + iy += incY + offset += i + 2 + } + return + } + ix := kx + iy := ky + for i := 0; i < n; i++ { + xv := x[ix] * alpha + atmp := ap[offset-i : offset] + jx := kx + jy := ky + var sum float32 + for _, v := range atmp { + sum += v * x[jx] + y[jy] += v * xv + jx += incX + jy += incY + } + + sum += ap[offset] * x[ix] + y[iy] += alpha * sum + ix += incX + iy += incY + offset += i + 2 + } +} + +// Sspr performs the symmetric rank-one operation +// A += alpha * x * x^T +// where A is an n×n symmetric matrix in packed format, x is a vector, and +// alpha is a scalar. +// +// Float32 implementations are autogenerated and not directly tested. +func (Implementation) Sspr(ul blas.Uplo, n int, alpha float32, x []float32, incX int, ap []float32) { + if ul != blas.Lower && ul != blas.Upper { + panic(badUplo) + } + if n < 0 { + panic(nLT0) + } + if incX == 0 { + panic(zeroIncX) + } + + // Quick return if possible. + if n == 0 { + return + } + + // For zero matrix size the following slice length checks are trivially satisfied. + if (incX > 0 && len(x) <= (n-1)*incX) || (incX < 0 && len(x) <= (1-n)*incX) { + panic(shortX) + } + if len(ap) < n*(n+1)/2 { + panic(shortAP) + } + + // Quick return if possible. + if alpha == 0 { + return + } + + lenX := n + var kx int + if incX < 0 { + kx = -(lenX - 1) * incX + } + var offset int // Offset is the index of (i,i). + if ul == blas.Upper { + if incX == 1 { + for i := 0; i < n; i++ { + atmp := ap[offset:] + xv := alpha * x[i] + xtmp := x[i:n] + for j, v := range xtmp { + atmp[j] += xv * v + } + offset += n - i + } + return + } + ix := kx + for i := 0; i < n; i++ { + jx := kx + i*incX + atmp := ap[offset:] + xv := alpha * x[ix] + for j := 0; j < n-i; j++ { + atmp[j] += xv * x[jx] + jx += incX + } + ix += incX + offset += n - i + } + return + } + if incX == 1 { + for i := 0; i < n; i++ { + atmp := ap[offset-i:] + xv := alpha * x[i] + xtmp := x[:i+1] + for j, v := range xtmp { + atmp[j] += xv * v + } + offset += i + 2 + } + return + } + ix := kx + for i := 0; i < n; i++ { + jx := kx + atmp := ap[offset-i:] + xv := alpha * x[ix] + for j := 0; j <= i; j++ { + atmp[j] += xv * x[jx] + jx += incX + } + ix += incX + offset += i + 2 + } +} + +// Sspr2 performs the symmetric rank-2 update +// A += alpha * x * y^T + alpha * y * x^T +// where A is an n×n symmetric matrix in packed format, x and y are vectors, +// and alpha is a scalar. +// +// Float32 implementations are autogenerated and not directly tested. +func (Implementation) Sspr2(ul blas.Uplo, n int, alpha float32, x []float32, incX int, y []float32, incY int, ap []float32) { + if ul != blas.Lower && ul != blas.Upper { + panic(badUplo) + } + if n < 0 { + panic(nLT0) + } + if incX == 0 { + panic(zeroIncX) + } + if incY == 0 { + panic(zeroIncY) + } + + // Quick return if possible. + if n == 0 { + return + } + + // For zero matrix size the following slice length checks are trivially satisfied. + if (incX > 0 && len(x) <= (n-1)*incX) || (incX < 0 && len(x) <= (1-n)*incX) { + panic(shortX) + } + if (incY > 0 && len(y) <= (n-1)*incY) || (incY < 0 && len(y) <= (1-n)*incY) { + panic(shortY) + } + if len(ap) < n*(n+1)/2 { + panic(shortAP) + } + + // Quick return if possible. + if alpha == 0 { + return + } + + var ky, kx int + if incY < 0 { + ky = -(n - 1) * incY + } + if incX < 0 { + kx = -(n - 1) * incX + } + var offset int // Offset is the index of (i,i). + if ul == blas.Upper { + if incX == 1 && incY == 1 { + for i := 0; i < n; i++ { + atmp := ap[offset:] + xi := x[i] + yi := y[i] + xtmp := x[i:n] + ytmp := y[i:n] + for j, v := range xtmp { + atmp[j] += alpha * (xi*ytmp[j] + v*yi) + } + offset += n - i + } + return + } + ix := kx + iy := ky + for i := 0; i < n; i++ { + jx := kx + i*incX + jy := ky + i*incY + atmp := ap[offset:] + xi := x[ix] + yi := y[iy] + for j := 0; j < n-i; j++ { + atmp[j] += alpha * (xi*y[jy] + x[jx]*yi) + jx += incX + jy += incY + } + ix += incX + iy += incY + offset += n - i + } + return + } + if incX == 1 && incY == 1 { + for i := 0; i < n; i++ { + atmp := ap[offset-i:] + xi := x[i] + yi := y[i] + xtmp := x[:i+1] + for j, v := range xtmp { + atmp[j] += alpha * (xi*y[j] + v*yi) + } + offset += i + 2 + } + return + } + ix := kx + iy := ky + for i := 0; i < n; i++ { + jx := kx + jy := ky + atmp := ap[offset-i:] + for j := 0; j <= i; j++ { + atmp[j] += alpha * (x[ix]*y[jy] + x[jx]*y[iy]) + jx += incX + jy += incY + } + ix += incX + iy += incY + offset += i + 2 + } +} diff --git a/vendor/gonum.org/v1/gonum/blas/gonum/level2float64.go b/vendor/gonum.org/v1/gonum/blas/gonum/level2float64.go new file mode 100644 index 0000000..2612578 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/blas/gonum/level2float64.go @@ -0,0 +1,2264 @@ +// Copyright ©2014 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package gonum + +import ( + "gonum.org/v1/gonum/blas" + "gonum.org/v1/gonum/internal/asm/f64" +) + +var _ blas.Float64Level2 = Implementation{} + +// Dger performs the rank-one operation +// A += alpha * x * y^T +// where A is an m×n dense matrix, x and y are vectors, and alpha is a scalar. +func (Implementation) Dger(m, n int, alpha float64, x []float64, incX int, y []float64, incY int, a []float64, lda int) { + if m < 0 { + panic(mLT0) + } + if n < 0 { + panic(nLT0) + } + if lda < max(1, n) { + panic(badLdA) + } + if incX == 0 { + panic(zeroIncX) + } + if incY == 0 { + panic(zeroIncY) + } + + // Quick return if possible. + if m == 0 || n == 0 { + return + } + + // For zero matrix size the following slice length checks are trivially satisfied. + if (incX > 0 && len(x) <= (m-1)*incX) || (incX < 0 && len(x) <= (1-m)*incX) { + panic(shortX) + } + if (incY > 0 && len(y) <= (n-1)*incY) || (incY < 0 && len(y) <= (1-n)*incY) { + panic(shortY) + } + if len(a) < lda*(m-1)+n { + panic(shortA) + } + + // Quick return if possible. + if alpha == 0 { + return + } + f64.Ger(uintptr(m), uintptr(n), + alpha, + x, uintptr(incX), + y, uintptr(incY), + a, uintptr(lda)) +} + +// Dgbmv performs one of the matrix-vector operations +// y = alpha * A * x + beta * y if tA == blas.NoTrans +// y = alpha * A^T * x + beta * y if tA == blas.Trans or blas.ConjTrans +// where A is an m×n band matrix with kL sub-diagonals and kU super-diagonals, +// x and y are vectors, and alpha and beta are scalars. +func (Implementation) Dgbmv(tA blas.Transpose, m, n, kL, kU int, alpha float64, a []float64, lda int, x []float64, incX int, beta float64, y []float64, incY int) { + if tA != blas.NoTrans && tA != blas.Trans && tA != blas.ConjTrans { + panic(badTranspose) + } + if m < 0 { + panic(mLT0) + } + if n < 0 { + panic(nLT0) + } + if kL < 0 { + panic(kLLT0) + } + if kU < 0 { + panic(kULT0) + } + if lda < kL+kU+1 { + panic(badLdA) + } + if incX == 0 { + panic(zeroIncX) + } + if incY == 0 { + panic(zeroIncY) + } + + // Quick return if possible. + if m == 0 || n == 0 { + return + } + + // For zero matrix size the following slice length checks are trivially satisfied. + if len(a) < lda*(min(m, n+kL)-1)+kL+kU+1 { + panic(shortA) + } + lenX := m + lenY := n + if tA == blas.NoTrans { + lenX = n + lenY = m + } + if (incX > 0 && len(x) <= (lenX-1)*incX) || (incX < 0 && len(x) <= (1-lenX)*incX) { + panic(shortX) + } + if (incY > 0 && len(y) <= (lenY-1)*incY) || (incY < 0 && len(y) <= (1-lenY)*incY) { + panic(shortY) + } + + // Quick return if possible. + if alpha == 0 && beta == 1 { + return + } + + var kx, ky int + if incX < 0 { + kx = -(lenX - 1) * incX + } + if incY < 0 { + ky = -(lenY - 1) * incY + } + + // Form y = beta * y. + if beta != 1 { + if incY == 1 { + if beta == 0 { + for i := range y[:lenY] { + y[i] = 0 + } + } else { + f64.ScalUnitary(beta, y[:lenY]) + } + } else { + iy := ky + if beta == 0 { + for i := 0; i < lenY; i++ { + y[iy] = 0 + iy += incY + } + } else { + if incY > 0 { + f64.ScalInc(beta, y, uintptr(lenY), uintptr(incY)) + } else { + f64.ScalInc(beta, y, uintptr(lenY), uintptr(-incY)) + } + } + } + } + + if alpha == 0 { + return + } + + // i and j are indices of the compacted banded matrix. + // off is the offset into the dense matrix (off + j = densej) + nCol := kU + 1 + kL + if tA == blas.NoTrans { + iy := ky + if incX == 1 { + for i := 0; i < min(m, n+kL); i++ { + l := max(0, kL-i) + u := min(nCol, n+kL-i) + off := max(0, i-kL) + atmp := a[i*lda+l : i*lda+u] + xtmp := x[off : off+u-l] + var sum float64 + for j, v := range atmp { + sum += xtmp[j] * v + } + y[iy] += sum * alpha + iy += incY + } + return + } + for i := 0; i < min(m, n+kL); i++ { + l := max(0, kL-i) + u := min(nCol, n+kL-i) + off := max(0, i-kL) + atmp := a[i*lda+l : i*lda+u] + jx := kx + var sum float64 + for _, v := range atmp { + sum += x[off*incX+jx] * v + jx += incX + } + y[iy] += sum * alpha + iy += incY + } + return + } + if incX == 1 { + for i := 0; i < min(m, n+kL); i++ { + l := max(0, kL-i) + u := min(nCol, n+kL-i) + off := max(0, i-kL) + atmp := a[i*lda+l : i*lda+u] + tmp := alpha * x[i] + jy := ky + for _, v := range atmp { + y[jy+off*incY] += tmp * v + jy += incY + } + } + return + } + ix := kx + for i := 0; i < min(m, n+kL); i++ { + l := max(0, kL-i) + u := min(nCol, n+kL-i) + off := max(0, i-kL) + atmp := a[i*lda+l : i*lda+u] + tmp := alpha * x[ix] + jy := ky + for _, v := range atmp { + y[jy+off*incY] += tmp * v + jy += incY + } + ix += incX + } +} + +// Dtrmv performs one of the matrix-vector operations +// x = A * x if tA == blas.NoTrans +// x = A^T * x if tA == blas.Trans or blas.ConjTrans +// where A is an n×n triangular matrix, and x is a vector. +func (Implementation) Dtrmv(ul blas.Uplo, tA blas.Transpose, d blas.Diag, n int, a []float64, lda int, x []float64, incX int) { + if ul != blas.Lower && ul != blas.Upper { + panic(badUplo) + } + if tA != blas.NoTrans && tA != blas.Trans && tA != blas.ConjTrans { + panic(badTranspose) + } + if d != blas.NonUnit && d != blas.Unit { + panic(badDiag) + } + if n < 0 { + panic(nLT0) + } + if lda < max(1, n) { + panic(badLdA) + } + if incX == 0 { + panic(zeroIncX) + } + + // Quick return if possible. + if n == 0 { + return + } + + // For zero matrix size the following slice length checks are trivially satisfied. + if len(a) < lda*(n-1)+n { + panic(shortA) + } + if (incX > 0 && len(x) <= (n-1)*incX) || (incX < 0 && len(x) <= (1-n)*incX) { + panic(shortX) + } + + nonUnit := d != blas.Unit + if n == 1 { + if nonUnit { + x[0] *= a[0] + } + return + } + var kx int + if incX <= 0 { + kx = -(n - 1) * incX + } + if tA == blas.NoTrans { + if ul == blas.Upper { + if incX == 1 { + for i := 0; i < n; i++ { + ilda := i * lda + var tmp float64 + if nonUnit { + tmp = a[ilda+i] * x[i] + } else { + tmp = x[i] + } + x[i] = tmp + f64.DotUnitary(a[ilda+i+1:ilda+n], x[i+1:n]) + } + return + } + ix := kx + for i := 0; i < n; i++ { + ilda := i * lda + var tmp float64 + if nonUnit { + tmp = a[ilda+i] * x[ix] + } else { + tmp = x[ix] + } + x[ix] = tmp + f64.DotInc(x, a[ilda+i+1:ilda+n], uintptr(n-i-1), uintptr(incX), 1, uintptr(ix+incX), 0) + ix += incX + } + return + } + if incX == 1 { + for i := n - 1; i >= 0; i-- { + ilda := i * lda + var tmp float64 + if nonUnit { + tmp += a[ilda+i] * x[i] + } else { + tmp = x[i] + } + x[i] = tmp + f64.DotUnitary(a[ilda:ilda+i], x[:i]) + } + return + } + ix := kx + (n-1)*incX + for i := n - 1; i >= 0; i-- { + ilda := i * lda + var tmp float64 + if nonUnit { + tmp = a[ilda+i] * x[ix] + } else { + tmp = x[ix] + } + x[ix] = tmp + f64.DotInc(x, a[ilda:ilda+i], uintptr(i), uintptr(incX), 1, uintptr(kx), 0) + ix -= incX + } + return + } + // Cases where a is transposed. + if ul == blas.Upper { + if incX == 1 { + for i := n - 1; i >= 0; i-- { + ilda := i * lda + xi := x[i] + f64.AxpyUnitary(xi, a[ilda+i+1:ilda+n], x[i+1:n]) + if nonUnit { + x[i] *= a[ilda+i] + } + } + return + } + ix := kx + (n-1)*incX + for i := n - 1; i >= 0; i-- { + ilda := i * lda + xi := x[ix] + f64.AxpyInc(xi, a[ilda+i+1:ilda+n], x, uintptr(n-i-1), 1, uintptr(incX), 0, uintptr(kx+(i+1)*incX)) + if nonUnit { + x[ix] *= a[ilda+i] + } + ix -= incX + } + return + } + if incX == 1 { + for i := 0; i < n; i++ { + ilda := i * lda + xi := x[i] + f64.AxpyUnitary(xi, a[ilda:ilda+i], x[:i]) + if nonUnit { + x[i] *= a[i*lda+i] + } + } + return + } + ix := kx + for i := 0; i < n; i++ { + ilda := i * lda + xi := x[ix] + f64.AxpyInc(xi, a[ilda:ilda+i], x, uintptr(i), 1, uintptr(incX), 0, uintptr(kx)) + if nonUnit { + x[ix] *= a[ilda+i] + } + ix += incX + } +} + +// Dtrsv solves one of the systems of equations +// A * x = b if tA == blas.NoTrans +// A^T * x = b if tA == blas.Trans or blas.ConjTrans +// where A is an n×n triangular matrix, and x and b are vectors. +// +// At entry to the function, x contains the values of b, and the result is +// stored in-place into x. +// +// No test for singularity or near-singularity is included in this +// routine. Such tests must be performed before calling this routine. +func (Implementation) Dtrsv(ul blas.Uplo, tA blas.Transpose, d blas.Diag, n int, a []float64, lda int, x []float64, incX int) { + if ul != blas.Lower && ul != blas.Upper { + panic(badUplo) + } + if tA != blas.NoTrans && tA != blas.Trans && tA != blas.ConjTrans { + panic(badTranspose) + } + if d != blas.NonUnit && d != blas.Unit { + panic(badDiag) + } + if n < 0 { + panic(nLT0) + } + if lda < max(1, n) { + panic(badLdA) + } + if incX == 0 { + panic(zeroIncX) + } + + // Quick return if possible. + if n == 0 { + return + } + + // For zero matrix size the following slice length checks are trivially satisfied. + if len(a) < lda*(n-1)+n { + panic(shortA) + } + if (incX > 0 && len(x) <= (n-1)*incX) || (incX < 0 && len(x) <= (1-n)*incX) { + panic(shortX) + } + + if n == 1 { + if d == blas.NonUnit { + x[0] /= a[0] + } + return + } + + var kx int + if incX < 0 { + kx = -(n - 1) * incX + } + nonUnit := d == blas.NonUnit + if tA == blas.NoTrans { + if ul == blas.Upper { + if incX == 1 { + for i := n - 1; i >= 0; i-- { + var sum float64 + atmp := a[i*lda+i+1 : i*lda+n] + for j, v := range atmp { + jv := i + j + 1 + sum += x[jv] * v + } + x[i] -= sum + if nonUnit { + x[i] /= a[i*lda+i] + } + } + return + } + ix := kx + (n-1)*incX + for i := n - 1; i >= 0; i-- { + var sum float64 + jx := ix + incX + atmp := a[i*lda+i+1 : i*lda+n] + for _, v := range atmp { + sum += x[jx] * v + jx += incX + } + x[ix] -= sum + if nonUnit { + x[ix] /= a[i*lda+i] + } + ix -= incX + } + return + } + if incX == 1 { + for i := 0; i < n; i++ { + var sum float64 + atmp := a[i*lda : i*lda+i] + for j, v := range atmp { + sum += x[j] * v + } + x[i] -= sum + if nonUnit { + x[i] /= a[i*lda+i] + } + } + return + } + ix := kx + for i := 0; i < n; i++ { + jx := kx + var sum float64 + atmp := a[i*lda : i*lda+i] + for _, v := range atmp { + sum += x[jx] * v + jx += incX + } + x[ix] -= sum + if nonUnit { + x[ix] /= a[i*lda+i] + } + ix += incX + } + return + } + // Cases where a is transposed. + if ul == blas.Upper { + if incX == 1 { + for i := 0; i < n; i++ { + if nonUnit { + x[i] /= a[i*lda+i] + } + xi := x[i] + atmp := a[i*lda+i+1 : i*lda+n] + for j, v := range atmp { + jv := j + i + 1 + x[jv] -= v * xi + } + } + return + } + ix := kx + for i := 0; i < n; i++ { + if nonUnit { + x[ix] /= a[i*lda+i] + } + xi := x[ix] + jx := kx + (i+1)*incX + atmp := a[i*lda+i+1 : i*lda+n] + for _, v := range atmp { + x[jx] -= v * xi + jx += incX + } + ix += incX + } + return + } + if incX == 1 { + for i := n - 1; i >= 0; i-- { + if nonUnit { + x[i] /= a[i*lda+i] + } + xi := x[i] + atmp := a[i*lda : i*lda+i] + for j, v := range atmp { + x[j] -= v * xi + } + } + return + } + ix := kx + (n-1)*incX + for i := n - 1; i >= 0; i-- { + if nonUnit { + x[ix] /= a[i*lda+i] + } + xi := x[ix] + jx := kx + atmp := a[i*lda : i*lda+i] + for _, v := range atmp { + x[jx] -= v * xi + jx += incX + } + ix -= incX + } +} + +// Dsymv performs the matrix-vector operation +// y = alpha * A * x + beta * y +// where A is an n×n symmetric matrix, x and y are vectors, and alpha and +// beta are scalars. +func (Implementation) Dsymv(ul blas.Uplo, n int, alpha float64, a []float64, lda int, x []float64, incX int, beta float64, y []float64, incY int) { + if ul != blas.Lower && ul != blas.Upper { + panic(badUplo) + } + if n < 0 { + panic(nLT0) + } + if lda < max(1, n) { + panic(badLdA) + } + if incX == 0 { + panic(zeroIncX) + } + if incY == 0 { + panic(zeroIncY) + } + + // Quick return if possible. + if n == 0 { + return + } + + // For zero matrix size the following slice length checks are trivially satisfied. + if len(a) < lda*(n-1)+n { + panic(shortA) + } + if (incX > 0 && len(x) <= (n-1)*incX) || (incX < 0 && len(x) <= (1-n)*incX) { + panic(shortX) + } + if (incY > 0 && len(y) <= (n-1)*incY) || (incY < 0 && len(y) <= (1-n)*incY) { + panic(shortY) + } + + // Quick return if possible. + if alpha == 0 && beta == 1 { + return + } + + // Set up start points + var kx, ky int + if incX < 0 { + kx = -(n - 1) * incX + } + if incY < 0 { + ky = -(n - 1) * incY + } + + // Form y = beta * y + if beta != 1 { + if incY == 1 { + if beta == 0 { + for i := range y[:n] { + y[i] = 0 + } + } else { + f64.ScalUnitary(beta, y[:n]) + } + } else { + iy := ky + if beta == 0 { + for i := 0; i < n; i++ { + y[iy] = 0 + iy += incY + } + } else { + if incY > 0 { + f64.ScalInc(beta, y, uintptr(n), uintptr(incY)) + } else { + f64.ScalInc(beta, y, uintptr(n), uintptr(-incY)) + } + } + } + } + + if alpha == 0 { + return + } + + if n == 1 { + y[0] += alpha * a[0] * x[0] + return + } + + if ul == blas.Upper { + if incX == 1 { + iy := ky + for i := 0; i < n; i++ { + xv := x[i] * alpha + sum := x[i] * a[i*lda+i] + jy := ky + (i+1)*incY + atmp := a[i*lda+i+1 : i*lda+n] + for j, v := range atmp { + jp := j + i + 1 + sum += x[jp] * v + y[jy] += xv * v + jy += incY + } + y[iy] += alpha * sum + iy += incY + } + return + } + ix := kx + iy := ky + for i := 0; i < n; i++ { + xv := x[ix] * alpha + sum := x[ix] * a[i*lda+i] + jx := kx + (i+1)*incX + jy := ky + (i+1)*incY + atmp := a[i*lda+i+1 : i*lda+n] + for _, v := range atmp { + sum += x[jx] * v + y[jy] += xv * v + jx += incX + jy += incY + } + y[iy] += alpha * sum + ix += incX + iy += incY + } + return + } + // Cases where a is lower triangular. + if incX == 1 { + iy := ky + for i := 0; i < n; i++ { + jy := ky + xv := alpha * x[i] + atmp := a[i*lda : i*lda+i] + var sum float64 + for j, v := range atmp { + sum += x[j] * v + y[jy] += xv * v + jy += incY + } + sum += x[i] * a[i*lda+i] + sum *= alpha + y[iy] += sum + iy += incY + } + return + } + ix := kx + iy := ky + for i := 0; i < n; i++ { + jx := kx + jy := ky + xv := alpha * x[ix] + atmp := a[i*lda : i*lda+i] + var sum float64 + for _, v := range atmp { + sum += x[jx] * v + y[jy] += xv * v + jx += incX + jy += incY + } + sum += x[ix] * a[i*lda+i] + sum *= alpha + y[iy] += sum + ix += incX + iy += incY + } +} + +// Dtbmv performs one of the matrix-vector operations +// x = A * x if tA == blas.NoTrans +// x = A^T * x if tA == blas.Trans or blas.ConjTrans +// where A is an n×n triangular band matrix with k+1 diagonals, and x is a vector. +func (Implementation) Dtbmv(ul blas.Uplo, tA blas.Transpose, d blas.Diag, n, k int, a []float64, lda int, x []float64, incX int) { + if ul != blas.Lower && ul != blas.Upper { + panic(badUplo) + } + if tA != blas.NoTrans && tA != blas.Trans && tA != blas.ConjTrans { + panic(badTranspose) + } + if d != blas.NonUnit && d != blas.Unit { + panic(badDiag) + } + if n < 0 { + panic(nLT0) + } + if k < 0 { + panic(kLT0) + } + if lda < k+1 { + panic(badLdA) + } + if incX == 0 { + panic(zeroIncX) + } + + // Quick return if possible. + if n == 0 { + return + } + + // For zero matrix size the following slice length checks are trivially satisfied. + if len(a) < lda*(n-1)+k+1 { + panic(shortA) + } + if (incX > 0 && len(x) <= (n-1)*incX) || (incX < 0 && len(x) <= (1-n)*incX) { + panic(shortX) + } + + var kx int + if incX < 0 { + kx = -(n - 1) * incX + } + + nonunit := d != blas.Unit + + if tA == blas.NoTrans { + if ul == blas.Upper { + if incX == 1 { + for i := 0; i < n; i++ { + u := min(1+k, n-i) + var sum float64 + atmp := a[i*lda:] + xtmp := x[i:] + for j := 1; j < u; j++ { + sum += xtmp[j] * atmp[j] + } + if nonunit { + sum += xtmp[0] * atmp[0] + } else { + sum += xtmp[0] + } + x[i] = sum + } + return + } + ix := kx + for i := 0; i < n; i++ { + u := min(1+k, n-i) + var sum float64 + atmp := a[i*lda:] + jx := incX + for j := 1; j < u; j++ { + sum += x[ix+jx] * atmp[j] + jx += incX + } + if nonunit { + sum += x[ix] * atmp[0] + } else { + sum += x[ix] + } + x[ix] = sum + ix += incX + } + return + } + if incX == 1 { + for i := n - 1; i >= 0; i-- { + l := max(0, k-i) + atmp := a[i*lda:] + var sum float64 + for j := l; j < k; j++ { + sum += x[i-k+j] * atmp[j] + } + if nonunit { + sum += x[i] * atmp[k] + } else { + sum += x[i] + } + x[i] = sum + } + return + } + ix := kx + (n-1)*incX + for i := n - 1; i >= 0; i-- { + l := max(0, k-i) + atmp := a[i*lda:] + var sum float64 + jx := l * incX + for j := l; j < k; j++ { + sum += x[ix-k*incX+jx] * atmp[j] + jx += incX + } + if nonunit { + sum += x[ix] * atmp[k] + } else { + sum += x[ix] + } + x[ix] = sum + ix -= incX + } + return + } + if ul == blas.Upper { + if incX == 1 { + for i := n - 1; i >= 0; i-- { + u := k + 1 + if i < u { + u = i + 1 + } + var sum float64 + for j := 1; j < u; j++ { + sum += x[i-j] * a[(i-j)*lda+j] + } + if nonunit { + sum += x[i] * a[i*lda] + } else { + sum += x[i] + } + x[i] = sum + } + return + } + ix := kx + (n-1)*incX + for i := n - 1; i >= 0; i-- { + u := k + 1 + if i < u { + u = i + 1 + } + var sum float64 + jx := incX + for j := 1; j < u; j++ { + sum += x[ix-jx] * a[(i-j)*lda+j] + jx += incX + } + if nonunit { + sum += x[ix] * a[i*lda] + } else { + sum += x[ix] + } + x[ix] = sum + ix -= incX + } + return + } + if incX == 1 { + for i := 0; i < n; i++ { + u := k + if i+k >= n { + u = n - i - 1 + } + var sum float64 + for j := 0; j < u; j++ { + sum += x[i+j+1] * a[(i+j+1)*lda+k-j-1] + } + if nonunit { + sum += x[i] * a[i*lda+k] + } else { + sum += x[i] + } + x[i] = sum + } + return + } + ix := kx + for i := 0; i < n; i++ { + u := k + if i+k >= n { + u = n - i - 1 + } + var ( + sum float64 + jx int + ) + for j := 0; j < u; j++ { + sum += x[ix+jx+incX] * a[(i+j+1)*lda+k-j-1] + jx += incX + } + if nonunit { + sum += x[ix] * a[i*lda+k] + } else { + sum += x[ix] + } + x[ix] = sum + ix += incX + } +} + +// Dtpmv performs one of the matrix-vector operations +// x = A * x if tA == blas.NoTrans +// x = A^T * x if tA == blas.Trans or blas.ConjTrans +// where A is an n×n triangular matrix in packed format, and x is a vector. +func (Implementation) Dtpmv(ul blas.Uplo, tA blas.Transpose, d blas.Diag, n int, ap []float64, x []float64, incX int) { + if ul != blas.Lower && ul != blas.Upper { + panic(badUplo) + } + if tA != blas.NoTrans && tA != blas.Trans && tA != blas.ConjTrans { + panic(badTranspose) + } + if d != blas.NonUnit && d != blas.Unit { + panic(badDiag) + } + if n < 0 { + panic(nLT0) + } + if incX == 0 { + panic(zeroIncX) + } + + // Quick return if possible. + if n == 0 { + return + } + + // For zero matrix size the following slice length checks are trivially satisfied. + if len(ap) < n*(n+1)/2 { + panic(shortAP) + } + if (incX > 0 && len(x) <= (n-1)*incX) || (incX < 0 && len(x) <= (1-n)*incX) { + panic(shortX) + } + + var kx int + if incX < 0 { + kx = -(n - 1) * incX + } + + nonUnit := d == blas.NonUnit + var offset int // Offset is the index of (i,i) + if tA == blas.NoTrans { + if ul == blas.Upper { + if incX == 1 { + for i := 0; i < n; i++ { + xi := x[i] + if nonUnit { + xi *= ap[offset] + } + atmp := ap[offset+1 : offset+n-i] + xtmp := x[i+1:] + for j, v := range atmp { + xi += v * xtmp[j] + } + x[i] = xi + offset += n - i + } + return + } + ix := kx + for i := 0; i < n; i++ { + xix := x[ix] + if nonUnit { + xix *= ap[offset] + } + atmp := ap[offset+1 : offset+n-i] + jx := kx + (i+1)*incX + for _, v := range atmp { + xix += v * x[jx] + jx += incX + } + x[ix] = xix + offset += n - i + ix += incX + } + return + } + if incX == 1 { + offset = n*(n+1)/2 - 1 + for i := n - 1; i >= 0; i-- { + xi := x[i] + if nonUnit { + xi *= ap[offset] + } + atmp := ap[offset-i : offset] + for j, v := range atmp { + xi += v * x[j] + } + x[i] = xi + offset -= i + 1 + } + return + } + ix := kx + (n-1)*incX + offset = n*(n+1)/2 - 1 + for i := n - 1; i >= 0; i-- { + xix := x[ix] + if nonUnit { + xix *= ap[offset] + } + atmp := ap[offset-i : offset] + jx := kx + for _, v := range atmp { + xix += v * x[jx] + jx += incX + } + x[ix] = xix + offset -= i + 1 + ix -= incX + } + return + } + // Cases where ap is transposed. + if ul == blas.Upper { + if incX == 1 { + offset = n*(n+1)/2 - 1 + for i := n - 1; i >= 0; i-- { + xi := x[i] + atmp := ap[offset+1 : offset+n-i] + xtmp := x[i+1:] + for j, v := range atmp { + xtmp[j] += v * xi + } + if nonUnit { + x[i] *= ap[offset] + } + offset -= n - i + 1 + } + return + } + ix := kx + (n-1)*incX + offset = n*(n+1)/2 - 1 + for i := n - 1; i >= 0; i-- { + xix := x[ix] + jx := kx + (i+1)*incX + atmp := ap[offset+1 : offset+n-i] + for _, v := range atmp { + x[jx] += v * xix + jx += incX + } + if nonUnit { + x[ix] *= ap[offset] + } + offset -= n - i + 1 + ix -= incX + } + return + } + if incX == 1 { + for i := 0; i < n; i++ { + xi := x[i] + atmp := ap[offset-i : offset] + for j, v := range atmp { + x[j] += v * xi + } + if nonUnit { + x[i] *= ap[offset] + } + offset += i + 2 + } + return + } + ix := kx + for i := 0; i < n; i++ { + xix := x[ix] + jx := kx + atmp := ap[offset-i : offset] + for _, v := range atmp { + x[jx] += v * xix + jx += incX + } + if nonUnit { + x[ix] *= ap[offset] + } + ix += incX + offset += i + 2 + } +} + +// Dtbsv solves one of the systems of equations +// A * x = b if tA == blas.NoTrans +// A^T * x = b if tA == blas.Trans or tA == blas.ConjTrans +// where A is an n×n triangular band matrix with k+1 diagonals, +// and x and b are vectors. +// +// At entry to the function, x contains the values of b, and the result is +// stored in-place into x. +// +// No test for singularity or near-singularity is included in this +// routine. Such tests must be performed before calling this routine. +func (Implementation) Dtbsv(ul blas.Uplo, tA blas.Transpose, d blas.Diag, n, k int, a []float64, lda int, x []float64, incX int) { + if ul != blas.Lower && ul != blas.Upper { + panic(badUplo) + } + if tA != blas.NoTrans && tA != blas.Trans && tA != blas.ConjTrans { + panic(badTranspose) + } + if d != blas.NonUnit && d != blas.Unit { + panic(badDiag) + } + if n < 0 { + panic(nLT0) + } + if k < 0 { + panic(kLT0) + } + if lda < k+1 { + panic(badLdA) + } + if incX == 0 { + panic(zeroIncX) + } + + // Quick return if possible. + if n == 0 { + return + } + + // For zero matrix size the following slice length checks are trivially satisfied. + if len(a) < lda*(n-1)+k+1 { + panic(shortA) + } + if (incX > 0 && len(x) <= (n-1)*incX) || (incX < 0 && len(x) <= (1-n)*incX) { + panic(shortX) + } + + var kx int + if incX < 0 { + kx = -(n - 1) * incX + } + nonUnit := d == blas.NonUnit + // Form x = A^-1 x. + // Several cases below use subslices for speed improvement. + // The incX != 1 cases usually do not because incX may be negative. + if tA == blas.NoTrans { + if ul == blas.Upper { + if incX == 1 { + for i := n - 1; i >= 0; i-- { + bands := k + if i+bands >= n { + bands = n - i - 1 + } + atmp := a[i*lda+1:] + xtmp := x[i+1 : i+bands+1] + var sum float64 + for j, v := range xtmp { + sum += v * atmp[j] + } + x[i] -= sum + if nonUnit { + x[i] /= a[i*lda] + } + } + return + } + ix := kx + (n-1)*incX + for i := n - 1; i >= 0; i-- { + max := k + 1 + if i+max > n { + max = n - i + } + atmp := a[i*lda:] + var ( + jx int + sum float64 + ) + for j := 1; j < max; j++ { + jx += incX + sum += x[ix+jx] * atmp[j] + } + x[ix] -= sum + if nonUnit { + x[ix] /= atmp[0] + } + ix -= incX + } + return + } + if incX == 1 { + for i := 0; i < n; i++ { + bands := k + if i-k < 0 { + bands = i + } + atmp := a[i*lda+k-bands:] + xtmp := x[i-bands : i] + var sum float64 + for j, v := range xtmp { + sum += v * atmp[j] + } + x[i] -= sum + if nonUnit { + x[i] /= atmp[bands] + } + } + return + } + ix := kx + for i := 0; i < n; i++ { + bands := k + if i-k < 0 { + bands = i + } + atmp := a[i*lda+k-bands:] + var ( + sum float64 + jx int + ) + for j := 0; j < bands; j++ { + sum += x[ix-bands*incX+jx] * atmp[j] + jx += incX + } + x[ix] -= sum + if nonUnit { + x[ix] /= atmp[bands] + } + ix += incX + } + return + } + // Cases where a is transposed. + if ul == blas.Upper { + if incX == 1 { + for i := 0; i < n; i++ { + bands := k + if i-k < 0 { + bands = i + } + var sum float64 + for j := 0; j < bands; j++ { + sum += x[i-bands+j] * a[(i-bands+j)*lda+bands-j] + } + x[i] -= sum + if nonUnit { + x[i] /= a[i*lda] + } + } + return + } + ix := kx + for i := 0; i < n; i++ { + bands := k + if i-k < 0 { + bands = i + } + var ( + sum float64 + jx int + ) + for j := 0; j < bands; j++ { + sum += x[ix-bands*incX+jx] * a[(i-bands+j)*lda+bands-j] + jx += incX + } + x[ix] -= sum + if nonUnit { + x[ix] /= a[i*lda] + } + ix += incX + } + return + } + if incX == 1 { + for i := n - 1; i >= 0; i-- { + bands := k + if i+bands >= n { + bands = n - i - 1 + } + var sum float64 + xtmp := x[i+1 : i+1+bands] + for j, v := range xtmp { + sum += v * a[(i+j+1)*lda+k-j-1] + } + x[i] -= sum + if nonUnit { + x[i] /= a[i*lda+k] + } + } + return + } + ix := kx + (n-1)*incX + for i := n - 1; i >= 0; i-- { + bands := k + if i+bands >= n { + bands = n - i - 1 + } + var ( + sum float64 + jx int + ) + for j := 0; j < bands; j++ { + sum += x[ix+jx+incX] * a[(i+j+1)*lda+k-j-1] + jx += incX + } + x[ix] -= sum + if nonUnit { + x[ix] /= a[i*lda+k] + } + ix -= incX + } +} + +// Dsbmv performs the matrix-vector operation +// y = alpha * A * x + beta * y +// where A is an n×n symmetric band matrix with k super-diagonals, x and y are +// vectors, and alpha and beta are scalars. +func (Implementation) Dsbmv(ul blas.Uplo, n, k int, alpha float64, a []float64, lda int, x []float64, incX int, beta float64, y []float64, incY int) { + if ul != blas.Lower && ul != blas.Upper { + panic(badUplo) + } + if n < 0 { + panic(nLT0) + } + if k < 0 { + panic(kLT0) + } + if lda < k+1 { + panic(badLdA) + } + if incX == 0 { + panic(zeroIncX) + } + if incY == 0 { + panic(zeroIncY) + } + + // Quick return if possible. + if n == 0 { + return + } + + // For zero matrix size the following slice length checks are trivially satisfied. + if len(a) < lda*(n-1)+k+1 { + panic(shortA) + } + if (incX > 0 && len(x) <= (n-1)*incX) || (incX < 0 && len(x) <= (1-n)*incX) { + panic(shortX) + } + if (incY > 0 && len(y) <= (n-1)*incY) || (incY < 0 && len(y) <= (1-n)*incY) { + panic(shortY) + } + + // Quick return if possible. + if alpha == 0 && beta == 1 { + return + } + + // Set up indexes + lenX := n + lenY := n + var kx, ky int + if incX < 0 { + kx = -(lenX - 1) * incX + } + if incY < 0 { + ky = -(lenY - 1) * incY + } + + // Form y = beta * y. + if beta != 1 { + if incY == 1 { + if beta == 0 { + for i := range y[:n] { + y[i] = 0 + } + } else { + f64.ScalUnitary(beta, y[:n]) + } + } else { + iy := ky + if beta == 0 { + for i := 0; i < n; i++ { + y[iy] = 0 + iy += incY + } + } else { + if incY > 0 { + f64.ScalInc(beta, y, uintptr(n), uintptr(incY)) + } else { + f64.ScalInc(beta, y, uintptr(n), uintptr(-incY)) + } + } + } + } + + if alpha == 0 { + return + } + + if ul == blas.Upper { + if incX == 1 { + iy := ky + for i := 0; i < n; i++ { + atmp := a[i*lda:] + tmp := alpha * x[i] + sum := tmp * atmp[0] + u := min(k, n-i-1) + jy := incY + for j := 1; j <= u; j++ { + v := atmp[j] + sum += alpha * x[i+j] * v + y[iy+jy] += tmp * v + jy += incY + } + y[iy] += sum + iy += incY + } + return + } + ix := kx + iy := ky + for i := 0; i < n; i++ { + atmp := a[i*lda:] + tmp := alpha * x[ix] + sum := tmp * atmp[0] + u := min(k, n-i-1) + jx := incX + jy := incY + for j := 1; j <= u; j++ { + v := atmp[j] + sum += alpha * x[ix+jx] * v + y[iy+jy] += tmp * v + jx += incX + jy += incY + } + y[iy] += sum + ix += incX + iy += incY + } + return + } + + // Casses where a has bands below the diagonal. + if incX == 1 { + iy := ky + for i := 0; i < n; i++ { + l := max(0, k-i) + tmp := alpha * x[i] + jy := l * incY + atmp := a[i*lda:] + for j := l; j < k; j++ { + v := atmp[j] + y[iy] += alpha * v * x[i-k+j] + y[iy-k*incY+jy] += tmp * v + jy += incY + } + y[iy] += tmp * atmp[k] + iy += incY + } + return + } + ix := kx + iy := ky + for i := 0; i < n; i++ { + l := max(0, k-i) + tmp := alpha * x[ix] + jx := l * incX + jy := l * incY + atmp := a[i*lda:] + for j := l; j < k; j++ { + v := atmp[j] + y[iy] += alpha * v * x[ix-k*incX+jx] + y[iy-k*incY+jy] += tmp * v + jx += incX + jy += incY + } + y[iy] += tmp * atmp[k] + ix += incX + iy += incY + } +} + +// Dsyr performs the symmetric rank-one update +// A += alpha * x * x^T +// where A is an n×n symmetric matrix, and x is a vector. +func (Implementation) Dsyr(ul blas.Uplo, n int, alpha float64, x []float64, incX int, a []float64, lda int) { + if ul != blas.Lower && ul != blas.Upper { + panic(badUplo) + } + if n < 0 { + panic(nLT0) + } + if lda < max(1, n) { + panic(badLdA) + } + if incX == 0 { + panic(zeroIncX) + } + + // Quick return if possible. + if n == 0 { + return + } + + // For zero matrix size the following slice length checks are trivially satisfied. + if (incX > 0 && len(x) <= (n-1)*incX) || (incX < 0 && len(x) <= (1-n)*incX) { + panic(shortX) + } + if len(a) < lda*(n-1)+n { + panic(shortA) + } + + // Quick return if possible. + if alpha == 0 { + return + } + + lenX := n + var kx int + if incX < 0 { + kx = -(lenX - 1) * incX + } + if ul == blas.Upper { + if incX == 1 { + for i := 0; i < n; i++ { + tmp := x[i] * alpha + if tmp != 0 { + atmp := a[i*lda+i : i*lda+n] + xtmp := x[i:n] + for j, v := range xtmp { + atmp[j] += v * tmp + } + } + } + return + } + ix := kx + for i := 0; i < n; i++ { + tmp := x[ix] * alpha + if tmp != 0 { + jx := ix + atmp := a[i*lda:] + for j := i; j < n; j++ { + atmp[j] += x[jx] * tmp + jx += incX + } + } + ix += incX + } + return + } + // Cases where a is lower triangular. + if incX == 1 { + for i := 0; i < n; i++ { + tmp := x[i] * alpha + if tmp != 0 { + atmp := a[i*lda:] + xtmp := x[:i+1] + for j, v := range xtmp { + atmp[j] += tmp * v + } + } + } + return + } + ix := kx + for i := 0; i < n; i++ { + tmp := x[ix] * alpha + if tmp != 0 { + atmp := a[i*lda:] + jx := kx + for j := 0; j < i+1; j++ { + atmp[j] += tmp * x[jx] + jx += incX + } + } + ix += incX + } +} + +// Dsyr2 performs the symmetric rank-two update +// A += alpha * x * y^T + alpha * y * x^T +// where A is an n×n symmetric matrix, x and y are vectors, and alpha is a scalar. +func (Implementation) Dsyr2(ul blas.Uplo, n int, alpha float64, x []float64, incX int, y []float64, incY int, a []float64, lda int) { + if ul != blas.Lower && ul != blas.Upper { + panic(badUplo) + } + if n < 0 { + panic(nLT0) + } + if lda < max(1, n) { + panic(badLdA) + } + if incX == 0 { + panic(zeroIncX) + } + if incY == 0 { + panic(zeroIncY) + } + + // Quick return if possible. + if n == 0 { + return + } + + // For zero matrix size the following slice length checks are trivially satisfied. + if (incX > 0 && len(x) <= (n-1)*incX) || (incX < 0 && len(x) <= (1-n)*incX) { + panic(shortX) + } + if (incY > 0 && len(y) <= (n-1)*incY) || (incY < 0 && len(y) <= (1-n)*incY) { + panic(shortY) + } + if len(a) < lda*(n-1)+n { + panic(shortA) + } + + // Quick return if possible. + if alpha == 0 { + return + } + + var ky, kx int + if incY < 0 { + ky = -(n - 1) * incY + } + if incX < 0 { + kx = -(n - 1) * incX + } + if ul == blas.Upper { + if incX == 1 && incY == 1 { + for i := 0; i < n; i++ { + xi := x[i] + yi := y[i] + atmp := a[i*lda:] + for j := i; j < n; j++ { + atmp[j] += alpha * (xi*y[j] + x[j]*yi) + } + } + return + } + ix := kx + iy := ky + for i := 0; i < n; i++ { + jx := kx + i*incX + jy := ky + i*incY + xi := x[ix] + yi := y[iy] + atmp := a[i*lda:] + for j := i; j < n; j++ { + atmp[j] += alpha * (xi*y[jy] + x[jx]*yi) + jx += incX + jy += incY + } + ix += incX + iy += incY + } + return + } + if incX == 1 && incY == 1 { + for i := 0; i < n; i++ { + xi := x[i] + yi := y[i] + atmp := a[i*lda:] + for j := 0; j <= i; j++ { + atmp[j] += alpha * (xi*y[j] + x[j]*yi) + } + } + return + } + ix := kx + iy := ky + for i := 0; i < n; i++ { + jx := kx + jy := ky + xi := x[ix] + yi := y[iy] + atmp := a[i*lda:] + for j := 0; j <= i; j++ { + atmp[j] += alpha * (xi*y[jy] + x[jx]*yi) + jx += incX + jy += incY + } + ix += incX + iy += incY + } +} + +// Dtpsv solves one of the systems of equations +// A * x = b if tA == blas.NoTrans +// A^T * x = b if tA == blas.Trans or blas.ConjTrans +// where A is an n×n triangular matrix in packed format, and x and b are vectors. +// +// At entry to the function, x contains the values of b, and the result is +// stored in-place into x. +// +// No test for singularity or near-singularity is included in this +// routine. Such tests must be performed before calling this routine. +func (Implementation) Dtpsv(ul blas.Uplo, tA blas.Transpose, d blas.Diag, n int, ap []float64, x []float64, incX int) { + if ul != blas.Lower && ul != blas.Upper { + panic(badUplo) + } + if tA != blas.NoTrans && tA != blas.Trans && tA != blas.ConjTrans { + panic(badTranspose) + } + if d != blas.NonUnit && d != blas.Unit { + panic(badDiag) + } + if n < 0 { + panic(nLT0) + } + if incX == 0 { + panic(zeroIncX) + } + + // Quick return if possible. + if n == 0 { + return + } + + // For zero matrix size the following slice length checks are trivially satisfied. + if len(ap) < n*(n+1)/2 { + panic(shortAP) + } + if (incX > 0 && len(x) <= (n-1)*incX) || (incX < 0 && len(x) <= (1-n)*incX) { + panic(shortX) + } + + var kx int + if incX < 0 { + kx = -(n - 1) * incX + } + + nonUnit := d == blas.NonUnit + var offset int // Offset is the index of (i,i) + if tA == blas.NoTrans { + if ul == blas.Upper { + offset = n*(n+1)/2 - 1 + if incX == 1 { + for i := n - 1; i >= 0; i-- { + atmp := ap[offset+1 : offset+n-i] + xtmp := x[i+1:] + var sum float64 + for j, v := range atmp { + sum += v * xtmp[j] + } + x[i] -= sum + if nonUnit { + x[i] /= ap[offset] + } + offset -= n - i + 1 + } + return + } + ix := kx + (n-1)*incX + for i := n - 1; i >= 0; i-- { + atmp := ap[offset+1 : offset+n-i] + jx := kx + (i+1)*incX + var sum float64 + for _, v := range atmp { + sum += v * x[jx] + jx += incX + } + x[ix] -= sum + if nonUnit { + x[ix] /= ap[offset] + } + ix -= incX + offset -= n - i + 1 + } + return + } + if incX == 1 { + for i := 0; i < n; i++ { + atmp := ap[offset-i : offset] + var sum float64 + for j, v := range atmp { + sum += v * x[j] + } + x[i] -= sum + if nonUnit { + x[i] /= ap[offset] + } + offset += i + 2 + } + return + } + ix := kx + for i := 0; i < n; i++ { + jx := kx + atmp := ap[offset-i : offset] + var sum float64 + for _, v := range atmp { + sum += v * x[jx] + jx += incX + } + x[ix] -= sum + if nonUnit { + x[ix] /= ap[offset] + } + ix += incX + offset += i + 2 + } + return + } + // Cases where ap is transposed. + if ul == blas.Upper { + if incX == 1 { + for i := 0; i < n; i++ { + if nonUnit { + x[i] /= ap[offset] + } + xi := x[i] + atmp := ap[offset+1 : offset+n-i] + xtmp := x[i+1:] + for j, v := range atmp { + xtmp[j] -= v * xi + } + offset += n - i + } + return + } + ix := kx + for i := 0; i < n; i++ { + if nonUnit { + x[ix] /= ap[offset] + } + xix := x[ix] + atmp := ap[offset+1 : offset+n-i] + jx := kx + (i+1)*incX + for _, v := range atmp { + x[jx] -= v * xix + jx += incX + } + ix += incX + offset += n - i + } + return + } + if incX == 1 { + offset = n*(n+1)/2 - 1 + for i := n - 1; i >= 0; i-- { + if nonUnit { + x[i] /= ap[offset] + } + xi := x[i] + atmp := ap[offset-i : offset] + for j, v := range atmp { + x[j] -= v * xi + } + offset -= i + 1 + } + return + } + ix := kx + (n-1)*incX + offset = n*(n+1)/2 - 1 + for i := n - 1; i >= 0; i-- { + if nonUnit { + x[ix] /= ap[offset] + } + xix := x[ix] + atmp := ap[offset-i : offset] + jx := kx + for _, v := range atmp { + x[jx] -= v * xix + jx += incX + } + ix -= incX + offset -= i + 1 + } +} + +// Dspmv performs the matrix-vector operation +// y = alpha * A * x + beta * y +// where A is an n×n symmetric matrix in packed format, x and y are vectors, +// and alpha and beta are scalars. +func (Implementation) Dspmv(ul blas.Uplo, n int, alpha float64, ap []float64, x []float64, incX int, beta float64, y []float64, incY int) { + if ul != blas.Lower && ul != blas.Upper { + panic(badUplo) + } + if n < 0 { + panic(nLT0) + } + if incX == 0 { + panic(zeroIncX) + } + if incY == 0 { + panic(zeroIncY) + } + + // Quick return if possible. + if n == 0 { + return + } + + // For zero matrix size the following slice length checks are trivially satisfied. + if len(ap) < n*(n+1)/2 { + panic(shortAP) + } + if (incX > 0 && len(x) <= (n-1)*incX) || (incX < 0 && len(x) <= (1-n)*incX) { + panic(shortX) + } + if (incY > 0 && len(y) <= (n-1)*incY) || (incY < 0 && len(y) <= (1-n)*incY) { + panic(shortY) + } + + // Quick return if possible. + if alpha == 0 && beta == 1 { + return + } + + // Set up start points + var kx, ky int + if incX < 0 { + kx = -(n - 1) * incX + } + if incY < 0 { + ky = -(n - 1) * incY + } + + // Form y = beta * y. + if beta != 1 { + if incY == 1 { + if beta == 0 { + for i := range y[:n] { + y[i] = 0 + } + } else { + f64.ScalUnitary(beta, y[:n]) + } + } else { + iy := ky + if beta == 0 { + for i := 0; i < n; i++ { + y[iy] = 0 + iy += incY + } + } else { + if incY > 0 { + f64.ScalInc(beta, y, uintptr(n), uintptr(incY)) + } else { + f64.ScalInc(beta, y, uintptr(n), uintptr(-incY)) + } + } + } + } + + if alpha == 0 { + return + } + + if n == 1 { + y[0] += alpha * ap[0] * x[0] + return + } + var offset int // Offset is the index of (i,i). + if ul == blas.Upper { + if incX == 1 { + iy := ky + for i := 0; i < n; i++ { + xv := x[i] * alpha + sum := ap[offset] * x[i] + atmp := ap[offset+1 : offset+n-i] + xtmp := x[i+1:] + jy := ky + (i+1)*incY + for j, v := range atmp { + sum += v * xtmp[j] + y[jy] += v * xv + jy += incY + } + y[iy] += alpha * sum + iy += incY + offset += n - i + } + return + } + ix := kx + iy := ky + for i := 0; i < n; i++ { + xv := x[ix] * alpha + sum := ap[offset] * x[ix] + atmp := ap[offset+1 : offset+n-i] + jx := kx + (i+1)*incX + jy := ky + (i+1)*incY + for _, v := range atmp { + sum += v * x[jx] + y[jy] += v * xv + jx += incX + jy += incY + } + y[iy] += alpha * sum + ix += incX + iy += incY + offset += n - i + } + return + } + if incX == 1 { + iy := ky + for i := 0; i < n; i++ { + xv := x[i] * alpha + atmp := ap[offset-i : offset] + jy := ky + var sum float64 + for j, v := range atmp { + sum += v * x[j] + y[jy] += v * xv + jy += incY + } + sum += ap[offset] * x[i] + y[iy] += alpha * sum + iy += incY + offset += i + 2 + } + return + } + ix := kx + iy := ky + for i := 0; i < n; i++ { + xv := x[ix] * alpha + atmp := ap[offset-i : offset] + jx := kx + jy := ky + var sum float64 + for _, v := range atmp { + sum += v * x[jx] + y[jy] += v * xv + jx += incX + jy += incY + } + + sum += ap[offset] * x[ix] + y[iy] += alpha * sum + ix += incX + iy += incY + offset += i + 2 + } +} + +// Dspr performs the symmetric rank-one operation +// A += alpha * x * x^T +// where A is an n×n symmetric matrix in packed format, x is a vector, and +// alpha is a scalar. +func (Implementation) Dspr(ul blas.Uplo, n int, alpha float64, x []float64, incX int, ap []float64) { + if ul != blas.Lower && ul != blas.Upper { + panic(badUplo) + } + if n < 0 { + panic(nLT0) + } + if incX == 0 { + panic(zeroIncX) + } + + // Quick return if possible. + if n == 0 { + return + } + + // For zero matrix size the following slice length checks are trivially satisfied. + if (incX > 0 && len(x) <= (n-1)*incX) || (incX < 0 && len(x) <= (1-n)*incX) { + panic(shortX) + } + if len(ap) < n*(n+1)/2 { + panic(shortAP) + } + + // Quick return if possible. + if alpha == 0 { + return + } + + lenX := n + var kx int + if incX < 0 { + kx = -(lenX - 1) * incX + } + var offset int // Offset is the index of (i,i). + if ul == blas.Upper { + if incX == 1 { + for i := 0; i < n; i++ { + atmp := ap[offset:] + xv := alpha * x[i] + xtmp := x[i:n] + for j, v := range xtmp { + atmp[j] += xv * v + } + offset += n - i + } + return + } + ix := kx + for i := 0; i < n; i++ { + jx := kx + i*incX + atmp := ap[offset:] + xv := alpha * x[ix] + for j := 0; j < n-i; j++ { + atmp[j] += xv * x[jx] + jx += incX + } + ix += incX + offset += n - i + } + return + } + if incX == 1 { + for i := 0; i < n; i++ { + atmp := ap[offset-i:] + xv := alpha * x[i] + xtmp := x[:i+1] + for j, v := range xtmp { + atmp[j] += xv * v + } + offset += i + 2 + } + return + } + ix := kx + for i := 0; i < n; i++ { + jx := kx + atmp := ap[offset-i:] + xv := alpha * x[ix] + for j := 0; j <= i; j++ { + atmp[j] += xv * x[jx] + jx += incX + } + ix += incX + offset += i + 2 + } +} + +// Dspr2 performs the symmetric rank-2 update +// A += alpha * x * y^T + alpha * y * x^T +// where A is an n×n symmetric matrix in packed format, x and y are vectors, +// and alpha is a scalar. +func (Implementation) Dspr2(ul blas.Uplo, n int, alpha float64, x []float64, incX int, y []float64, incY int, ap []float64) { + if ul != blas.Lower && ul != blas.Upper { + panic(badUplo) + } + if n < 0 { + panic(nLT0) + } + if incX == 0 { + panic(zeroIncX) + } + if incY == 0 { + panic(zeroIncY) + } + + // Quick return if possible. + if n == 0 { + return + } + + // For zero matrix size the following slice length checks are trivially satisfied. + if (incX > 0 && len(x) <= (n-1)*incX) || (incX < 0 && len(x) <= (1-n)*incX) { + panic(shortX) + } + if (incY > 0 && len(y) <= (n-1)*incY) || (incY < 0 && len(y) <= (1-n)*incY) { + panic(shortY) + } + if len(ap) < n*(n+1)/2 { + panic(shortAP) + } + + // Quick return if possible. + if alpha == 0 { + return + } + + var ky, kx int + if incY < 0 { + ky = -(n - 1) * incY + } + if incX < 0 { + kx = -(n - 1) * incX + } + var offset int // Offset is the index of (i,i). + if ul == blas.Upper { + if incX == 1 && incY == 1 { + for i := 0; i < n; i++ { + atmp := ap[offset:] + xi := x[i] + yi := y[i] + xtmp := x[i:n] + ytmp := y[i:n] + for j, v := range xtmp { + atmp[j] += alpha * (xi*ytmp[j] + v*yi) + } + offset += n - i + } + return + } + ix := kx + iy := ky + for i := 0; i < n; i++ { + jx := kx + i*incX + jy := ky + i*incY + atmp := ap[offset:] + xi := x[ix] + yi := y[iy] + for j := 0; j < n-i; j++ { + atmp[j] += alpha * (xi*y[jy] + x[jx]*yi) + jx += incX + jy += incY + } + ix += incX + iy += incY + offset += n - i + } + return + } + if incX == 1 && incY == 1 { + for i := 0; i < n; i++ { + atmp := ap[offset-i:] + xi := x[i] + yi := y[i] + xtmp := x[:i+1] + for j, v := range xtmp { + atmp[j] += alpha * (xi*y[j] + v*yi) + } + offset += i + 2 + } + return + } + ix := kx + iy := ky + for i := 0; i < n; i++ { + jx := kx + jy := ky + atmp := ap[offset-i:] + for j := 0; j <= i; j++ { + atmp[j] += alpha * (x[ix]*y[jy] + x[jx]*y[iy]) + jx += incX + jy += incY + } + ix += incX + iy += incY + offset += i + 2 + } +} diff --git a/vendor/gonum.org/v1/gonum/blas/gonum/level3cmplx128.go b/vendor/gonum.org/v1/gonum/blas/gonum/level3cmplx128.go new file mode 100644 index 0000000..e4a2bb5 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/blas/gonum/level3cmplx128.go @@ -0,0 +1,1715 @@ +// Copyright ©2019 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package gonum + +import ( + "math/cmplx" + + "gonum.org/v1/gonum/blas" + "gonum.org/v1/gonum/internal/asm/c128" +) + +var _ blas.Complex128Level3 = Implementation{} + +// Zgemm performs one of the matrix-matrix operations +// C = alpha * op(A) * op(B) + beta * C +// where op(X) is one of +// op(X) = X or op(X) = X^T or op(X) = X^H, +// alpha and beta are scalars, and A, B and C are matrices, with op(A) an m×k matrix, +// op(B) a k×n matrix and C an m×n matrix. +func (Implementation) Zgemm(tA, tB blas.Transpose, m, n, k int, alpha complex128, a []complex128, lda int, b []complex128, ldb int, beta complex128, c []complex128, ldc int) { + switch tA { + default: + panic(badTranspose) + case blas.NoTrans, blas.Trans, blas.ConjTrans: + } + switch tB { + default: + panic(badTranspose) + case blas.NoTrans, blas.Trans, blas.ConjTrans: + } + switch { + case m < 0: + panic(mLT0) + case n < 0: + panic(nLT0) + case k < 0: + panic(kLT0) + } + rowA, colA := m, k + if tA != blas.NoTrans { + rowA, colA = k, m + } + if lda < max(1, colA) { + panic(badLdA) + } + rowB, colB := k, n + if tB != blas.NoTrans { + rowB, colB = n, k + } + if ldb < max(1, colB) { + panic(badLdB) + } + if ldc < max(1, n) { + panic(badLdC) + } + + // Quick return if possible. + if m == 0 || n == 0 { + return + } + + // For zero matrix size the following slice length checks are trivially satisfied. + if len(a) < (rowA-1)*lda+colA { + panic(shortA) + } + if len(b) < (rowB-1)*ldb+colB { + panic(shortB) + } + if len(c) < (m-1)*ldc+n { + panic(shortC) + } + + // Quick return if possible. + if (alpha == 0 || k == 0) && beta == 1 { + return + } + + if alpha == 0 { + if beta == 0 { + for i := 0; i < m; i++ { + for j := 0; j < n; j++ { + c[i*ldc+j] = 0 + } + } + } else { + for i := 0; i < m; i++ { + for j := 0; j < n; j++ { + c[i*ldc+j] *= beta + } + } + } + return + } + + switch tA { + case blas.NoTrans: + switch tB { + case blas.NoTrans: + // Form C = alpha * A * B + beta * C. + for i := 0; i < m; i++ { + switch { + case beta == 0: + for j := 0; j < n; j++ { + c[i*ldc+j] = 0 + } + case beta != 1: + for j := 0; j < n; j++ { + c[i*ldc+j] *= beta + } + } + for l := 0; l < k; l++ { + tmp := alpha * a[i*lda+l] + for j := 0; j < n; j++ { + c[i*ldc+j] += tmp * b[l*ldb+j] + } + } + } + case blas.Trans: + // Form C = alpha * A * B^T + beta * C. + for i := 0; i < m; i++ { + switch { + case beta == 0: + for j := 0; j < n; j++ { + c[i*ldc+j] = 0 + } + case beta != 1: + for j := 0; j < n; j++ { + c[i*ldc+j] *= beta + } + } + for l := 0; l < k; l++ { + tmp := alpha * a[i*lda+l] + for j := 0; j < n; j++ { + c[i*ldc+j] += tmp * b[j*ldb+l] + } + } + } + case blas.ConjTrans: + // Form C = alpha * A * B^H + beta * C. + for i := 0; i < m; i++ { + switch { + case beta == 0: + for j := 0; j < n; j++ { + c[i*ldc+j] = 0 + } + case beta != 1: + for j := 0; j < n; j++ { + c[i*ldc+j] *= beta + } + } + for l := 0; l < k; l++ { + tmp := alpha * a[i*lda+l] + for j := 0; j < n; j++ { + c[i*ldc+j] += tmp * cmplx.Conj(b[j*ldb+l]) + } + } + } + } + case blas.Trans: + switch tB { + case blas.NoTrans: + // Form C = alpha * A^T * B + beta * C. + for i := 0; i < m; i++ { + for j := 0; j < n; j++ { + var tmp complex128 + for l := 0; l < k; l++ { + tmp += a[l*lda+i] * b[l*ldb+j] + } + if beta == 0 { + c[i*ldc+j] = alpha * tmp + } else { + c[i*ldc+j] = alpha*tmp + beta*c[i*ldc+j] + } + } + } + case blas.Trans: + // Form C = alpha * A^T * B^T + beta * C. + for i := 0; i < m; i++ { + for j := 0; j < n; j++ { + var tmp complex128 + for l := 0; l < k; l++ { + tmp += a[l*lda+i] * b[j*ldb+l] + } + if beta == 0 { + c[i*ldc+j] = alpha * tmp + } else { + c[i*ldc+j] = alpha*tmp + beta*c[i*ldc+j] + } + } + } + case blas.ConjTrans: + // Form C = alpha * A^T * B^H + beta * C. + for i := 0; i < m; i++ { + for j := 0; j < n; j++ { + var tmp complex128 + for l := 0; l < k; l++ { + tmp += a[l*lda+i] * cmplx.Conj(b[j*ldb+l]) + } + if beta == 0 { + c[i*ldc+j] = alpha * tmp + } else { + c[i*ldc+j] = alpha*tmp + beta*c[i*ldc+j] + } + } + } + } + case blas.ConjTrans: + switch tB { + case blas.NoTrans: + // Form C = alpha * A^H * B + beta * C. + for i := 0; i < m; i++ { + for j := 0; j < n; j++ { + var tmp complex128 + for l := 0; l < k; l++ { + tmp += cmplx.Conj(a[l*lda+i]) * b[l*ldb+j] + } + if beta == 0 { + c[i*ldc+j] = alpha * tmp + } else { + c[i*ldc+j] = alpha*tmp + beta*c[i*ldc+j] + } + } + } + case blas.Trans: + // Form C = alpha * A^H * B^T + beta * C. + for i := 0; i < m; i++ { + for j := 0; j < n; j++ { + var tmp complex128 + for l := 0; l < k; l++ { + tmp += cmplx.Conj(a[l*lda+i]) * b[j*ldb+l] + } + if beta == 0 { + c[i*ldc+j] = alpha * tmp + } else { + c[i*ldc+j] = alpha*tmp + beta*c[i*ldc+j] + } + } + } + case blas.ConjTrans: + // Form C = alpha * A^H * B^H + beta * C. + for i := 0; i < m; i++ { + for j := 0; j < n; j++ { + var tmp complex128 + for l := 0; l < k; l++ { + tmp += cmplx.Conj(a[l*lda+i]) * cmplx.Conj(b[j*ldb+l]) + } + if beta == 0 { + c[i*ldc+j] = alpha * tmp + } else { + c[i*ldc+j] = alpha*tmp + beta*c[i*ldc+j] + } + } + } + } + } +} + +// Zhemm performs one of the matrix-matrix operations +// C = alpha*A*B + beta*C if side == blas.Left +// C = alpha*B*A + beta*C if side == blas.Right +// where alpha and beta are scalars, A is an m×m or n×n hermitian matrix and B +// and C are m×n matrices. The imaginary parts of the diagonal elements of A are +// assumed to be zero. +func (Implementation) Zhemm(side blas.Side, uplo blas.Uplo, m, n int, alpha complex128, a []complex128, lda int, b []complex128, ldb int, beta complex128, c []complex128, ldc int) { + na := m + if side == blas.Right { + na = n + } + switch { + case side != blas.Left && side != blas.Right: + panic(badSide) + case uplo != blas.Lower && uplo != blas.Upper: + panic(badUplo) + case m < 0: + panic(mLT0) + case n < 0: + panic(nLT0) + case lda < max(1, na): + panic(badLdA) + case ldb < max(1, n): + panic(badLdB) + case ldc < max(1, n): + panic(badLdC) + } + + // Quick return if possible. + if m == 0 || n == 0 { + return + } + + // For zero matrix size the following slice length checks are trivially satisfied. + if len(a) < lda*(na-1)+na { + panic(shortA) + } + if len(b) < ldb*(m-1)+n { + panic(shortB) + } + if len(c) < ldc*(m-1)+n { + panic(shortC) + } + + // Quick return if possible. + if alpha == 0 && beta == 1 { + return + } + + if alpha == 0 { + if beta == 0 { + for i := 0; i < m; i++ { + ci := c[i*ldc : i*ldc+n] + for j := range ci { + ci[j] = 0 + } + } + } else { + for i := 0; i < m; i++ { + ci := c[i*ldc : i*ldc+n] + c128.ScalUnitary(beta, ci) + } + } + return + } + + if side == blas.Left { + // Form C = alpha*A*B + beta*C. + for i := 0; i < m; i++ { + atmp := alpha * complex(real(a[i*lda+i]), 0) + bi := b[i*ldb : i*ldb+n] + ci := c[i*ldc : i*ldc+n] + if beta == 0 { + for j, bij := range bi { + ci[j] = atmp * bij + } + } else { + for j, bij := range bi { + ci[j] = atmp*bij + beta*ci[j] + } + } + if uplo == blas.Upper { + for k := 0; k < i; k++ { + atmp = alpha * cmplx.Conj(a[k*lda+i]) + c128.AxpyUnitary(atmp, b[k*ldb:k*ldb+n], ci) + } + for k := i + 1; k < m; k++ { + atmp = alpha * a[i*lda+k] + c128.AxpyUnitary(atmp, b[k*ldb:k*ldb+n], ci) + } + } else { + for k := 0; k < i; k++ { + atmp = alpha * a[i*lda+k] + c128.AxpyUnitary(atmp, b[k*ldb:k*ldb+n], ci) + } + for k := i + 1; k < m; k++ { + atmp = alpha * cmplx.Conj(a[k*lda+i]) + c128.AxpyUnitary(atmp, b[k*ldb:k*ldb+n], ci) + } + } + } + } else { + // Form C = alpha*B*A + beta*C. + if uplo == blas.Upper { + for i := 0; i < m; i++ { + for j := n - 1; j >= 0; j-- { + abij := alpha * b[i*ldb+j] + aj := a[j*lda+j+1 : j*lda+n] + bi := b[i*ldb+j+1 : i*ldb+n] + ci := c[i*ldc+j+1 : i*ldc+n] + var tmp complex128 + for k, ajk := range aj { + ci[k] += abij * ajk + tmp += bi[k] * cmplx.Conj(ajk) + } + ajj := complex(real(a[j*lda+j]), 0) + if beta == 0 { + c[i*ldc+j] = abij*ajj + alpha*tmp + } else { + c[i*ldc+j] = abij*ajj + alpha*tmp + beta*c[i*ldc+j] + } + } + } + } else { + for i := 0; i < m; i++ { + for j := 0; j < n; j++ { + abij := alpha * b[i*ldb+j] + aj := a[j*lda : j*lda+j] + bi := b[i*ldb : i*ldb+j] + ci := c[i*ldc : i*ldc+j] + var tmp complex128 + for k, ajk := range aj { + ci[k] += abij * ajk + tmp += bi[k] * cmplx.Conj(ajk) + } + ajj := complex(real(a[j*lda+j]), 0) + if beta == 0 { + c[i*ldc+j] = abij*ajj + alpha*tmp + } else { + c[i*ldc+j] = abij*ajj + alpha*tmp + beta*c[i*ldc+j] + } + } + } + } + } +} + +// Zherk performs one of the hermitian rank-k operations +// C = alpha*A*A^H + beta*C if trans == blas.NoTrans +// C = alpha*A^H*A + beta*C if trans == blas.ConjTrans +// where alpha and beta are real scalars, C is an n×n hermitian matrix and A is +// an n×k matrix in the first case and a k×n matrix in the second case. +// +// The imaginary parts of the diagonal elements of C are assumed to be zero, and +// on return they will be set to zero. +func (Implementation) Zherk(uplo blas.Uplo, trans blas.Transpose, n, k int, alpha float64, a []complex128, lda int, beta float64, c []complex128, ldc int) { + var rowA, colA int + switch trans { + default: + panic(badTranspose) + case blas.NoTrans: + rowA, colA = n, k + case blas.ConjTrans: + rowA, colA = k, n + } + switch { + case uplo != blas.Lower && uplo != blas.Upper: + panic(badUplo) + case n < 0: + panic(nLT0) + case k < 0: + panic(kLT0) + case lda < max(1, colA): + panic(badLdA) + case ldc < max(1, n): + panic(badLdC) + } + + // Quick return if possible. + if n == 0 { + return + } + + // For zero matrix size the following slice length checks are trivially satisfied. + if len(a) < (rowA-1)*lda+colA { + panic(shortA) + } + if len(c) < (n-1)*ldc+n { + panic(shortC) + } + + // Quick return if possible. + if (alpha == 0 || k == 0) && beta == 1 { + return + } + + if alpha == 0 { + if uplo == blas.Upper { + if beta == 0 { + for i := 0; i < n; i++ { + ci := c[i*ldc+i : i*ldc+n] + for j := range ci { + ci[j] = 0 + } + } + } else { + for i := 0; i < n; i++ { + ci := c[i*ldc+i : i*ldc+n] + ci[0] = complex(beta*real(ci[0]), 0) + if i != n-1 { + c128.DscalUnitary(beta, ci[1:]) + } + } + } + } else { + if beta == 0 { + for i := 0; i < n; i++ { + ci := c[i*ldc : i*ldc+i+1] + for j := range ci { + ci[j] = 0 + } + } + } else { + for i := 0; i < n; i++ { + ci := c[i*ldc : i*ldc+i+1] + if i != 0 { + c128.DscalUnitary(beta, ci[:i]) + } + ci[i] = complex(beta*real(ci[i]), 0) + } + } + } + return + } + + calpha := complex(alpha, 0) + if trans == blas.NoTrans { + // Form C = alpha*A*A^H + beta*C. + cbeta := complex(beta, 0) + if uplo == blas.Upper { + for i := 0; i < n; i++ { + ci := c[i*ldc+i : i*ldc+n] + ai := a[i*lda : i*lda+k] + switch { + case beta == 0: + // Handle the i-th diagonal element of C. + ci[0] = complex(alpha*real(c128.DotcUnitary(ai, ai)), 0) + // Handle the remaining elements on the i-th row of C. + for jc := range ci[1:] { + j := i + 1 + jc + ci[jc+1] = calpha * c128.DotcUnitary(a[j*lda:j*lda+k], ai) + } + case beta != 1: + cii := calpha*c128.DotcUnitary(ai, ai) + cbeta*ci[0] + ci[0] = complex(real(cii), 0) + for jc, cij := range ci[1:] { + j := i + 1 + jc + ci[jc+1] = calpha*c128.DotcUnitary(a[j*lda:j*lda+k], ai) + cbeta*cij + } + default: + cii := calpha*c128.DotcUnitary(ai, ai) + ci[0] + ci[0] = complex(real(cii), 0) + for jc, cij := range ci[1:] { + j := i + 1 + jc + ci[jc+1] = calpha*c128.DotcUnitary(a[j*lda:j*lda+k], ai) + cij + } + } + } + } else { + for i := 0; i < n; i++ { + ci := c[i*ldc : i*ldc+i+1] + ai := a[i*lda : i*lda+k] + switch { + case beta == 0: + // Handle the first i-1 elements on the i-th row of C. + for j := range ci[:i] { + ci[j] = calpha * c128.DotcUnitary(a[j*lda:j*lda+k], ai) + } + // Handle the i-th diagonal element of C. + ci[i] = complex(alpha*real(c128.DotcUnitary(ai, ai)), 0) + case beta != 1: + for j, cij := range ci[:i] { + ci[j] = calpha*c128.DotcUnitary(a[j*lda:j*lda+k], ai) + cbeta*cij + } + cii := calpha*c128.DotcUnitary(ai, ai) + cbeta*ci[i] + ci[i] = complex(real(cii), 0) + default: + for j, cij := range ci[:i] { + ci[j] = calpha*c128.DotcUnitary(a[j*lda:j*lda+k], ai) + cij + } + cii := calpha*c128.DotcUnitary(ai, ai) + ci[i] + ci[i] = complex(real(cii), 0) + } + } + } + } else { + // Form C = alpha*A^H*A + beta*C. + if uplo == blas.Upper { + for i := 0; i < n; i++ { + ci := c[i*ldc+i : i*ldc+n] + switch { + case beta == 0: + for jc := range ci { + ci[jc] = 0 + } + case beta != 1: + c128.DscalUnitary(beta, ci) + ci[0] = complex(real(ci[0]), 0) + default: + ci[0] = complex(real(ci[0]), 0) + } + for j := 0; j < k; j++ { + aji := cmplx.Conj(a[j*lda+i]) + if aji != 0 { + c128.AxpyUnitary(calpha*aji, a[j*lda+i:j*lda+n], ci) + } + } + c[i*ldc+i] = complex(real(c[i*ldc+i]), 0) + } + } else { + for i := 0; i < n; i++ { + ci := c[i*ldc : i*ldc+i+1] + switch { + case beta == 0: + for j := range ci { + ci[j] = 0 + } + case beta != 1: + c128.DscalUnitary(beta, ci) + ci[i] = complex(real(ci[i]), 0) + default: + ci[i] = complex(real(ci[i]), 0) + } + for j := 0; j < k; j++ { + aji := cmplx.Conj(a[j*lda+i]) + if aji != 0 { + c128.AxpyUnitary(calpha*aji, a[j*lda:j*lda+i+1], ci) + } + } + c[i*ldc+i] = complex(real(c[i*ldc+i]), 0) + } + } + } +} + +// Zher2k performs one of the hermitian rank-2k operations +// C = alpha*A*B^H + conj(alpha)*B*A^H + beta*C if trans == blas.NoTrans +// C = alpha*A^H*B + conj(alpha)*B^H*A + beta*C if trans == blas.ConjTrans +// where alpha and beta are scalars with beta real, C is an n×n hermitian matrix +// and A and B are n×k matrices in the first case and k×n matrices in the second case. +// +// The imaginary parts of the diagonal elements of C are assumed to be zero, and +// on return they will be set to zero. +func (Implementation) Zher2k(uplo blas.Uplo, trans blas.Transpose, n, k int, alpha complex128, a []complex128, lda int, b []complex128, ldb int, beta float64, c []complex128, ldc int) { + var row, col int + switch trans { + default: + panic(badTranspose) + case blas.NoTrans: + row, col = n, k + case blas.ConjTrans: + row, col = k, n + } + switch { + case uplo != blas.Lower && uplo != blas.Upper: + panic(badUplo) + case n < 0: + panic(nLT0) + case k < 0: + panic(kLT0) + case lda < max(1, col): + panic(badLdA) + case ldb < max(1, col): + panic(badLdB) + case ldc < max(1, n): + panic(badLdC) + } + + // Quick return if possible. + if n == 0 { + return + } + + // For zero matrix size the following slice length checks are trivially satisfied. + if len(a) < (row-1)*lda+col { + panic(shortA) + } + if len(b) < (row-1)*ldb+col { + panic(shortB) + } + if len(c) < (n-1)*ldc+n { + panic(shortC) + } + + // Quick return if possible. + if (alpha == 0 || k == 0) && beta == 1 { + return + } + + if alpha == 0 { + if uplo == blas.Upper { + if beta == 0 { + for i := 0; i < n; i++ { + ci := c[i*ldc+i : i*ldc+n] + for j := range ci { + ci[j] = 0 + } + } + } else { + for i := 0; i < n; i++ { + ci := c[i*ldc+i : i*ldc+n] + ci[0] = complex(beta*real(ci[0]), 0) + if i != n-1 { + c128.DscalUnitary(beta, ci[1:]) + } + } + } + } else { + if beta == 0 { + for i := 0; i < n; i++ { + ci := c[i*ldc : i*ldc+i+1] + for j := range ci { + ci[j] = 0 + } + } + } else { + for i := 0; i < n; i++ { + ci := c[i*ldc : i*ldc+i+1] + if i != 0 { + c128.DscalUnitary(beta, ci[:i]) + } + ci[i] = complex(beta*real(ci[i]), 0) + } + } + } + return + } + + conjalpha := cmplx.Conj(alpha) + cbeta := complex(beta, 0) + if trans == blas.NoTrans { + // Form C = alpha*A*B^H + conj(alpha)*B*A^H + beta*C. + if uplo == blas.Upper { + for i := 0; i < n; i++ { + ci := c[i*ldc+i+1 : i*ldc+n] + ai := a[i*lda : i*lda+k] + bi := b[i*ldb : i*ldb+k] + if beta == 0 { + cii := alpha*c128.DotcUnitary(bi, ai) + conjalpha*c128.DotcUnitary(ai, bi) + c[i*ldc+i] = complex(real(cii), 0) + for jc := range ci { + j := i + 1 + jc + ci[jc] = alpha*c128.DotcUnitary(b[j*ldb:j*ldb+k], ai) + conjalpha*c128.DotcUnitary(a[j*lda:j*lda+k], bi) + } + } else { + cii := alpha*c128.DotcUnitary(bi, ai) + conjalpha*c128.DotcUnitary(ai, bi) + cbeta*c[i*ldc+i] + c[i*ldc+i] = complex(real(cii), 0) + for jc, cij := range ci { + j := i + 1 + jc + ci[jc] = alpha*c128.DotcUnitary(b[j*ldb:j*ldb+k], ai) + conjalpha*c128.DotcUnitary(a[j*lda:j*lda+k], bi) + cbeta*cij + } + } + } + } else { + for i := 0; i < n; i++ { + ci := c[i*ldc : i*ldc+i] + ai := a[i*lda : i*lda+k] + bi := b[i*ldb : i*ldb+k] + if beta == 0 { + for j := range ci { + ci[j] = alpha*c128.DotcUnitary(b[j*ldb:j*ldb+k], ai) + conjalpha*c128.DotcUnitary(a[j*lda:j*lda+k], bi) + } + cii := alpha*c128.DotcUnitary(bi, ai) + conjalpha*c128.DotcUnitary(ai, bi) + c[i*ldc+i] = complex(real(cii), 0) + } else { + for j, cij := range ci { + ci[j] = alpha*c128.DotcUnitary(b[j*ldb:j*ldb+k], ai) + conjalpha*c128.DotcUnitary(a[j*lda:j*lda+k], bi) + cbeta*cij + } + cii := alpha*c128.DotcUnitary(bi, ai) + conjalpha*c128.DotcUnitary(ai, bi) + cbeta*c[i*ldc+i] + c[i*ldc+i] = complex(real(cii), 0) + } + } + } + } else { + // Form C = alpha*A^H*B + conj(alpha)*B^H*A + beta*C. + if uplo == blas.Upper { + for i := 0; i < n; i++ { + ci := c[i*ldc+i : i*ldc+n] + switch { + case beta == 0: + for jc := range ci { + ci[jc] = 0 + } + case beta != 1: + c128.DscalUnitary(beta, ci) + ci[0] = complex(real(ci[0]), 0) + default: + ci[0] = complex(real(ci[0]), 0) + } + for j := 0; j < k; j++ { + aji := a[j*lda+i] + bji := b[j*ldb+i] + if aji != 0 { + c128.AxpyUnitary(alpha*cmplx.Conj(aji), b[j*ldb+i:j*ldb+n], ci) + } + if bji != 0 { + c128.AxpyUnitary(conjalpha*cmplx.Conj(bji), a[j*lda+i:j*lda+n], ci) + } + } + ci[0] = complex(real(ci[0]), 0) + } + } else { + for i := 0; i < n; i++ { + ci := c[i*ldc : i*ldc+i+1] + switch { + case beta == 0: + for j := range ci { + ci[j] = 0 + } + case beta != 1: + c128.DscalUnitary(beta, ci) + ci[i] = complex(real(ci[i]), 0) + default: + ci[i] = complex(real(ci[i]), 0) + } + for j := 0; j < k; j++ { + aji := a[j*lda+i] + bji := b[j*ldb+i] + if aji != 0 { + c128.AxpyUnitary(alpha*cmplx.Conj(aji), b[j*ldb:j*ldb+i+1], ci) + } + if bji != 0 { + c128.AxpyUnitary(conjalpha*cmplx.Conj(bji), a[j*lda:j*lda+i+1], ci) + } + } + ci[i] = complex(real(ci[i]), 0) + } + } + } +} + +// Zsymm performs one of the matrix-matrix operations +// C = alpha*A*B + beta*C if side == blas.Left +// C = alpha*B*A + beta*C if side == blas.Right +// where alpha and beta are scalars, A is an m×m or n×n symmetric matrix and B +// and C are m×n matrices. +func (Implementation) Zsymm(side blas.Side, uplo blas.Uplo, m, n int, alpha complex128, a []complex128, lda int, b []complex128, ldb int, beta complex128, c []complex128, ldc int) { + na := m + if side == blas.Right { + na = n + } + switch { + case side != blas.Left && side != blas.Right: + panic(badSide) + case uplo != blas.Lower && uplo != blas.Upper: + panic(badUplo) + case m < 0: + panic(mLT0) + case n < 0: + panic(nLT0) + case lda < max(1, na): + panic(badLdA) + case ldb < max(1, n): + panic(badLdB) + case ldc < max(1, n): + panic(badLdC) + } + + // Quick return if possible. + if m == 0 || n == 0 { + return + } + + // For zero matrix size the following slice length checks are trivially satisfied. + if len(a) < lda*(na-1)+na { + panic(shortA) + } + if len(b) < ldb*(m-1)+n { + panic(shortB) + } + if len(c) < ldc*(m-1)+n { + panic(shortC) + } + + // Quick return if possible. + if alpha == 0 && beta == 1 { + return + } + + if alpha == 0 { + if beta == 0 { + for i := 0; i < m; i++ { + ci := c[i*ldc : i*ldc+n] + for j := range ci { + ci[j] = 0 + } + } + } else { + for i := 0; i < m; i++ { + ci := c[i*ldc : i*ldc+n] + c128.ScalUnitary(beta, ci) + } + } + return + } + + if side == blas.Left { + // Form C = alpha*A*B + beta*C. + for i := 0; i < m; i++ { + atmp := alpha * a[i*lda+i] + bi := b[i*ldb : i*ldb+n] + ci := c[i*ldc : i*ldc+n] + if beta == 0 { + for j, bij := range bi { + ci[j] = atmp * bij + } + } else { + for j, bij := range bi { + ci[j] = atmp*bij + beta*ci[j] + } + } + if uplo == blas.Upper { + for k := 0; k < i; k++ { + atmp = alpha * a[k*lda+i] + c128.AxpyUnitary(atmp, b[k*ldb:k*ldb+n], ci) + } + for k := i + 1; k < m; k++ { + atmp = alpha * a[i*lda+k] + c128.AxpyUnitary(atmp, b[k*ldb:k*ldb+n], ci) + } + } else { + for k := 0; k < i; k++ { + atmp = alpha * a[i*lda+k] + c128.AxpyUnitary(atmp, b[k*ldb:k*ldb+n], ci) + } + for k := i + 1; k < m; k++ { + atmp = alpha * a[k*lda+i] + c128.AxpyUnitary(atmp, b[k*ldb:k*ldb+n], ci) + } + } + } + } else { + // Form C = alpha*B*A + beta*C. + if uplo == blas.Upper { + for i := 0; i < m; i++ { + for j := n - 1; j >= 0; j-- { + abij := alpha * b[i*ldb+j] + aj := a[j*lda+j+1 : j*lda+n] + bi := b[i*ldb+j+1 : i*ldb+n] + ci := c[i*ldc+j+1 : i*ldc+n] + var tmp complex128 + for k, ajk := range aj { + ci[k] += abij * ajk + tmp += bi[k] * ajk + } + if beta == 0 { + c[i*ldc+j] = abij*a[j*lda+j] + alpha*tmp + } else { + c[i*ldc+j] = abij*a[j*lda+j] + alpha*tmp + beta*c[i*ldc+j] + } + } + } + } else { + for i := 0; i < m; i++ { + for j := 0; j < n; j++ { + abij := alpha * b[i*ldb+j] + aj := a[j*lda : j*lda+j] + bi := b[i*ldb : i*ldb+j] + ci := c[i*ldc : i*ldc+j] + var tmp complex128 + for k, ajk := range aj { + ci[k] += abij * ajk + tmp += bi[k] * ajk + } + if beta == 0 { + c[i*ldc+j] = abij*a[j*lda+j] + alpha*tmp + } else { + c[i*ldc+j] = abij*a[j*lda+j] + alpha*tmp + beta*c[i*ldc+j] + } + } + } + } + } +} + +// Zsyrk performs one of the symmetric rank-k operations +// C = alpha*A*A^T + beta*C if trans == blas.NoTrans +// C = alpha*A^T*A + beta*C if trans == blas.Trans +// where alpha and beta are scalars, C is an n×n symmetric matrix and A is +// an n×k matrix in the first case and a k×n matrix in the second case. +func (Implementation) Zsyrk(uplo blas.Uplo, trans blas.Transpose, n, k int, alpha complex128, a []complex128, lda int, beta complex128, c []complex128, ldc int) { + var rowA, colA int + switch trans { + default: + panic(badTranspose) + case blas.NoTrans: + rowA, colA = n, k + case blas.Trans: + rowA, colA = k, n + } + switch { + case uplo != blas.Lower && uplo != blas.Upper: + panic(badUplo) + case n < 0: + panic(nLT0) + case k < 0: + panic(kLT0) + case lda < max(1, colA): + panic(badLdA) + case ldc < max(1, n): + panic(badLdC) + } + + // Quick return if possible. + if n == 0 { + return + } + + // For zero matrix size the following slice length checks are trivially satisfied. + if len(a) < (rowA-1)*lda+colA { + panic(shortA) + } + if len(c) < (n-1)*ldc+n { + panic(shortC) + } + + // Quick return if possible. + if (alpha == 0 || k == 0) && beta == 1 { + return + } + + if alpha == 0 { + if uplo == blas.Upper { + if beta == 0 { + for i := 0; i < n; i++ { + ci := c[i*ldc+i : i*ldc+n] + for j := range ci { + ci[j] = 0 + } + } + } else { + for i := 0; i < n; i++ { + ci := c[i*ldc+i : i*ldc+n] + c128.ScalUnitary(beta, ci) + } + } + } else { + if beta == 0 { + for i := 0; i < n; i++ { + ci := c[i*ldc : i*ldc+i+1] + for j := range ci { + ci[j] = 0 + } + } + } else { + for i := 0; i < n; i++ { + ci := c[i*ldc : i*ldc+i+1] + c128.ScalUnitary(beta, ci) + } + } + } + return + } + + if trans == blas.NoTrans { + // Form C = alpha*A*A^T + beta*C. + if uplo == blas.Upper { + for i := 0; i < n; i++ { + ci := c[i*ldc+i : i*ldc+n] + ai := a[i*lda : i*lda+k] + for jc, cij := range ci { + j := i + jc + ci[jc] = beta*cij + alpha*c128.DotuUnitary(ai, a[j*lda:j*lda+k]) + } + } + } else { + for i := 0; i < n; i++ { + ci := c[i*ldc : i*ldc+i+1] + ai := a[i*lda : i*lda+k] + for j, cij := range ci { + ci[j] = beta*cij + alpha*c128.DotuUnitary(ai, a[j*lda:j*lda+k]) + } + } + } + } else { + // Form C = alpha*A^T*A + beta*C. + if uplo == blas.Upper { + for i := 0; i < n; i++ { + ci := c[i*ldc+i : i*ldc+n] + switch { + case beta == 0: + for jc := range ci { + ci[jc] = 0 + } + case beta != 1: + for jc := range ci { + ci[jc] *= beta + } + } + for j := 0; j < k; j++ { + aji := a[j*lda+i] + if aji != 0 { + c128.AxpyUnitary(alpha*aji, a[j*lda+i:j*lda+n], ci) + } + } + } + } else { + for i := 0; i < n; i++ { + ci := c[i*ldc : i*ldc+i+1] + switch { + case beta == 0: + for j := range ci { + ci[j] = 0 + } + case beta != 1: + for j := range ci { + ci[j] *= beta + } + } + for j := 0; j < k; j++ { + aji := a[j*lda+i] + if aji != 0 { + c128.AxpyUnitary(alpha*aji, a[j*lda:j*lda+i+1], ci) + } + } + } + } + } +} + +// Zsyr2k performs one of the symmetric rank-2k operations +// C = alpha*A*B^T + alpha*B*A^T + beta*C if trans == blas.NoTrans +// C = alpha*A^T*B + alpha*B^T*A + beta*C if trans == blas.Trans +// where alpha and beta are scalars, C is an n×n symmetric matrix and A and B +// are n×k matrices in the first case and k×n matrices in the second case. +func (Implementation) Zsyr2k(uplo blas.Uplo, trans blas.Transpose, n, k int, alpha complex128, a []complex128, lda int, b []complex128, ldb int, beta complex128, c []complex128, ldc int) { + var row, col int + switch trans { + default: + panic(badTranspose) + case blas.NoTrans: + row, col = n, k + case blas.Trans: + row, col = k, n + } + switch { + case uplo != blas.Lower && uplo != blas.Upper: + panic(badUplo) + case n < 0: + panic(nLT0) + case k < 0: + panic(kLT0) + case lda < max(1, col): + panic(badLdA) + case ldb < max(1, col): + panic(badLdB) + case ldc < max(1, n): + panic(badLdC) + } + + // Quick return if possible. + if n == 0 { + return + } + + // For zero matrix size the following slice length checks are trivially satisfied. + if len(a) < (row-1)*lda+col { + panic(shortA) + } + if len(b) < (row-1)*ldb+col { + panic(shortB) + } + if len(c) < (n-1)*ldc+n { + panic(shortC) + } + + // Quick return if possible. + if (alpha == 0 || k == 0) && beta == 1 { + return + } + + if alpha == 0 { + if uplo == blas.Upper { + if beta == 0 { + for i := 0; i < n; i++ { + ci := c[i*ldc+i : i*ldc+n] + for j := range ci { + ci[j] = 0 + } + } + } else { + for i := 0; i < n; i++ { + ci := c[i*ldc+i : i*ldc+n] + c128.ScalUnitary(beta, ci) + } + } + } else { + if beta == 0 { + for i := 0; i < n; i++ { + ci := c[i*ldc : i*ldc+i+1] + for j := range ci { + ci[j] = 0 + } + } + } else { + for i := 0; i < n; i++ { + ci := c[i*ldc : i*ldc+i+1] + c128.ScalUnitary(beta, ci) + } + } + } + return + } + + if trans == blas.NoTrans { + // Form C = alpha*A*B^T + alpha*B*A^T + beta*C. + if uplo == blas.Upper { + for i := 0; i < n; i++ { + ci := c[i*ldc+i : i*ldc+n] + ai := a[i*lda : i*lda+k] + bi := b[i*ldb : i*ldb+k] + if beta == 0 { + for jc := range ci { + j := i + jc + ci[jc] = alpha*c128.DotuUnitary(ai, b[j*ldb:j*ldb+k]) + alpha*c128.DotuUnitary(bi, a[j*lda:j*lda+k]) + } + } else { + for jc, cij := range ci { + j := i + jc + ci[jc] = alpha*c128.DotuUnitary(ai, b[j*ldb:j*ldb+k]) + alpha*c128.DotuUnitary(bi, a[j*lda:j*lda+k]) + beta*cij + } + } + } + } else { + for i := 0; i < n; i++ { + ci := c[i*ldc : i*ldc+i+1] + ai := a[i*lda : i*lda+k] + bi := b[i*ldb : i*ldb+k] + if beta == 0 { + for j := range ci { + ci[j] = alpha*c128.DotuUnitary(ai, b[j*ldb:j*ldb+k]) + alpha*c128.DotuUnitary(bi, a[j*lda:j*lda+k]) + } + } else { + for j, cij := range ci { + ci[j] = alpha*c128.DotuUnitary(ai, b[j*ldb:j*ldb+k]) + alpha*c128.DotuUnitary(bi, a[j*lda:j*lda+k]) + beta*cij + } + } + } + } + } else { + // Form C = alpha*A^T*B + alpha*B^T*A + beta*C. + if uplo == blas.Upper { + for i := 0; i < n; i++ { + ci := c[i*ldc+i : i*ldc+n] + switch { + case beta == 0: + for jc := range ci { + ci[jc] = 0 + } + case beta != 1: + for jc := range ci { + ci[jc] *= beta + } + } + for j := 0; j < k; j++ { + aji := a[j*lda+i] + bji := b[j*ldb+i] + if aji != 0 { + c128.AxpyUnitary(alpha*aji, b[j*ldb+i:j*ldb+n], ci) + } + if bji != 0 { + c128.AxpyUnitary(alpha*bji, a[j*lda+i:j*lda+n], ci) + } + } + } + } else { + for i := 0; i < n; i++ { + ci := c[i*ldc : i*ldc+i+1] + switch { + case beta == 0: + for j := range ci { + ci[j] = 0 + } + case beta != 1: + for j := range ci { + ci[j] *= beta + } + } + for j := 0; j < k; j++ { + aji := a[j*lda+i] + bji := b[j*ldb+i] + if aji != 0 { + c128.AxpyUnitary(alpha*aji, b[j*ldb:j*ldb+i+1], ci) + } + if bji != 0 { + c128.AxpyUnitary(alpha*bji, a[j*lda:j*lda+i+1], ci) + } + } + } + } + } +} + +// Ztrmm performs one of the matrix-matrix operations +// B = alpha * op(A) * B if side == blas.Left, +// B = alpha * B * op(A) if side == blas.Right, +// where alpha is a scalar, B is an m×n matrix, A is a unit, or non-unit, +// upper or lower triangular matrix and op(A) is one of +// op(A) = A if trans == blas.NoTrans, +// op(A) = A^T if trans == blas.Trans, +// op(A) = A^H if trans == blas.ConjTrans. +func (Implementation) Ztrmm(side blas.Side, uplo blas.Uplo, trans blas.Transpose, diag blas.Diag, m, n int, alpha complex128, a []complex128, lda int, b []complex128, ldb int) { + na := m + if side == blas.Right { + na = n + } + switch { + case side != blas.Left && side != blas.Right: + panic(badSide) + case uplo != blas.Lower && uplo != blas.Upper: + panic(badUplo) + case trans != blas.NoTrans && trans != blas.Trans && trans != blas.ConjTrans: + panic(badTranspose) + case diag != blas.Unit && diag != blas.NonUnit: + panic(badDiag) + case m < 0: + panic(mLT0) + case n < 0: + panic(nLT0) + case lda < max(1, na): + panic(badLdA) + case ldb < max(1, n): + panic(badLdB) + } + + // Quick return if possible. + if m == 0 || n == 0 { + return + } + + // For zero matrix size the following slice length checks are trivially satisfied. + if len(a) < (na-1)*lda+na { + panic(shortA) + } + if len(b) < (m-1)*ldb+n { + panic(shortB) + } + + // Quick return if possible. + if alpha == 0 { + for i := 0; i < m; i++ { + bi := b[i*ldb : i*ldb+n] + for j := range bi { + bi[j] = 0 + } + } + return + } + + noConj := trans != blas.ConjTrans + noUnit := diag == blas.NonUnit + if side == blas.Left { + if trans == blas.NoTrans { + // Form B = alpha*A*B. + if uplo == blas.Upper { + for i := 0; i < m; i++ { + aii := alpha + if noUnit { + aii *= a[i*lda+i] + } + bi := b[i*ldb : i*ldb+n] + for j := range bi { + bi[j] *= aii + } + for ja, aij := range a[i*lda+i+1 : i*lda+m] { + j := ja + i + 1 + if aij != 0 { + c128.AxpyUnitary(alpha*aij, b[j*ldb:j*ldb+n], bi) + } + } + } + } else { + for i := m - 1; i >= 0; i-- { + aii := alpha + if noUnit { + aii *= a[i*lda+i] + } + bi := b[i*ldb : i*ldb+n] + for j := range bi { + bi[j] *= aii + } + for j, aij := range a[i*lda : i*lda+i] { + if aij != 0 { + c128.AxpyUnitary(alpha*aij, b[j*ldb:j*ldb+n], bi) + } + } + } + } + } else { + // Form B = alpha*A^T*B or B = alpha*A^H*B. + if uplo == blas.Upper { + for k := m - 1; k >= 0; k-- { + bk := b[k*ldb : k*ldb+n] + for ja, ajk := range a[k*lda+k+1 : k*lda+m] { + if ajk == 0 { + continue + } + j := k + 1 + ja + if noConj { + c128.AxpyUnitary(alpha*ajk, bk, b[j*ldb:j*ldb+n]) + } else { + c128.AxpyUnitary(alpha*cmplx.Conj(ajk), bk, b[j*ldb:j*ldb+n]) + } + } + akk := alpha + if noUnit { + if noConj { + akk *= a[k*lda+k] + } else { + akk *= cmplx.Conj(a[k*lda+k]) + } + } + if akk != 1 { + c128.ScalUnitary(akk, bk) + } + } + } else { + for k := 0; k < m; k++ { + bk := b[k*ldb : k*ldb+n] + for j, ajk := range a[k*lda : k*lda+k] { + if ajk == 0 { + continue + } + if noConj { + c128.AxpyUnitary(alpha*ajk, bk, b[j*ldb:j*ldb+n]) + } else { + c128.AxpyUnitary(alpha*cmplx.Conj(ajk), bk, b[j*ldb:j*ldb+n]) + } + } + akk := alpha + if noUnit { + if noConj { + akk *= a[k*lda+k] + } else { + akk *= cmplx.Conj(a[k*lda+k]) + } + } + if akk != 1 { + c128.ScalUnitary(akk, bk) + } + } + } + } + } else { + if trans == blas.NoTrans { + // Form B = alpha*B*A. + if uplo == blas.Upper { + for i := 0; i < m; i++ { + bi := b[i*ldb : i*ldb+n] + for k := n - 1; k >= 0; k-- { + abik := alpha * bi[k] + if abik == 0 { + continue + } + bi[k] = abik + if noUnit { + bi[k] *= a[k*lda+k] + } + c128.AxpyUnitary(abik, a[k*lda+k+1:k*lda+n], bi[k+1:]) + } + } + } else { + for i := 0; i < m; i++ { + bi := b[i*ldb : i*ldb+n] + for k := 0; k < n; k++ { + abik := alpha * bi[k] + if abik == 0 { + continue + } + bi[k] = abik + if noUnit { + bi[k] *= a[k*lda+k] + } + c128.AxpyUnitary(abik, a[k*lda:k*lda+k], bi[:k]) + } + } + } + } else { + // Form B = alpha*B*A^T or B = alpha*B*A^H. + if uplo == blas.Upper { + for i := 0; i < m; i++ { + bi := b[i*ldb : i*ldb+n] + for j, bij := range bi { + if noConj { + if noUnit { + bij *= a[j*lda+j] + } + bij += c128.DotuUnitary(a[j*lda+j+1:j*lda+n], bi[j+1:n]) + } else { + if noUnit { + bij *= cmplx.Conj(a[j*lda+j]) + } + bij += c128.DotcUnitary(a[j*lda+j+1:j*lda+n], bi[j+1:n]) + } + bi[j] = alpha * bij + } + } + } else { + for i := 0; i < m; i++ { + bi := b[i*ldb : i*ldb+n] + for j := n - 1; j >= 0; j-- { + bij := bi[j] + if noConj { + if noUnit { + bij *= a[j*lda+j] + } + bij += c128.DotuUnitary(a[j*lda:j*lda+j], bi[:j]) + } else { + if noUnit { + bij *= cmplx.Conj(a[j*lda+j]) + } + bij += c128.DotcUnitary(a[j*lda:j*lda+j], bi[:j]) + } + bi[j] = alpha * bij + } + } + } + } + } +} + +// Ztrsm solves one of the matrix equations +// op(A) * X = alpha * B if side == blas.Left, +// X * op(A) = alpha * B if side == blas.Right, +// where alpha is a scalar, X and B are m×n matrices, A is a unit or +// non-unit, upper or lower triangular matrix and op(A) is one of +// op(A) = A if transA == blas.NoTrans, +// op(A) = A^T if transA == blas.Trans, +// op(A) = A^H if transA == blas.ConjTrans. +// On return the matrix X is overwritten on B. +func (Implementation) Ztrsm(side blas.Side, uplo blas.Uplo, transA blas.Transpose, diag blas.Diag, m, n int, alpha complex128, a []complex128, lda int, b []complex128, ldb int) { + na := m + if side == blas.Right { + na = n + } + switch { + case side != blas.Left && side != blas.Right: + panic(badSide) + case uplo != blas.Lower && uplo != blas.Upper: + panic(badUplo) + case transA != blas.NoTrans && transA != blas.Trans && transA != blas.ConjTrans: + panic(badTranspose) + case diag != blas.Unit && diag != blas.NonUnit: + panic(badDiag) + case m < 0: + panic(mLT0) + case n < 0: + panic(nLT0) + case lda < max(1, na): + panic(badLdA) + case ldb < max(1, n): + panic(badLdB) + } + + // Quick return if possible. + if m == 0 || n == 0 { + return + } + + // For zero matrix size the following slice length checks are trivially satisfied. + if len(a) < (na-1)*lda+na { + panic(shortA) + } + if len(b) < (m-1)*ldb+n { + panic(shortB) + } + + if alpha == 0 { + for i := 0; i < m; i++ { + for j := 0; j < n; j++ { + b[i*ldb+j] = 0 + } + } + return + } + + noConj := transA != blas.ConjTrans + noUnit := diag == blas.NonUnit + if side == blas.Left { + if transA == blas.NoTrans { + // Form B = alpha*inv(A)*B. + if uplo == blas.Upper { + for i := m - 1; i >= 0; i-- { + bi := b[i*ldb : i*ldb+n] + if alpha != 1 { + c128.ScalUnitary(alpha, bi) + } + for ka, aik := range a[i*lda+i+1 : i*lda+m] { + k := i + 1 + ka + if aik != 0 { + c128.AxpyUnitary(-aik, b[k*ldb:k*ldb+n], bi) + } + } + if noUnit { + c128.ScalUnitary(1/a[i*lda+i], bi) + } + } + } else { + for i := 0; i < m; i++ { + bi := b[i*ldb : i*ldb+n] + if alpha != 1 { + c128.ScalUnitary(alpha, bi) + } + for j, aij := range a[i*lda : i*lda+i] { + if aij != 0 { + c128.AxpyUnitary(-aij, b[j*ldb:j*ldb+n], bi) + } + } + if noUnit { + c128.ScalUnitary(1/a[i*lda+i], bi) + } + } + } + } else { + // Form B = alpha*inv(A^T)*B or B = alpha*inv(A^H)*B. + if uplo == blas.Upper { + for i := 0; i < m; i++ { + bi := b[i*ldb : i*ldb+n] + if noUnit { + if noConj { + c128.ScalUnitary(1/a[i*lda+i], bi) + } else { + c128.ScalUnitary(1/cmplx.Conj(a[i*lda+i]), bi) + } + } + for ja, aij := range a[i*lda+i+1 : i*lda+m] { + if aij == 0 { + continue + } + j := i + 1 + ja + if noConj { + c128.AxpyUnitary(-aij, bi, b[j*ldb:j*ldb+n]) + } else { + c128.AxpyUnitary(-cmplx.Conj(aij), bi, b[j*ldb:j*ldb+n]) + } + } + if alpha != 1 { + c128.ScalUnitary(alpha, bi) + } + } + } else { + for i := m - 1; i >= 0; i-- { + bi := b[i*ldb : i*ldb+n] + if noUnit { + if noConj { + c128.ScalUnitary(1/a[i*lda+i], bi) + } else { + c128.ScalUnitary(1/cmplx.Conj(a[i*lda+i]), bi) + } + } + for j, aij := range a[i*lda : i*lda+i] { + if aij == 0 { + continue + } + if noConj { + c128.AxpyUnitary(-aij, bi, b[j*ldb:j*ldb+n]) + } else { + c128.AxpyUnitary(-cmplx.Conj(aij), bi, b[j*ldb:j*ldb+n]) + } + } + if alpha != 1 { + c128.ScalUnitary(alpha, bi) + } + } + } + } + } else { + if transA == blas.NoTrans { + // Form B = alpha*B*inv(A). + if uplo == blas.Upper { + for i := 0; i < m; i++ { + bi := b[i*ldb : i*ldb+n] + if alpha != 1 { + c128.ScalUnitary(alpha, bi) + } + for j, bij := range bi { + if bij == 0 { + continue + } + if noUnit { + bi[j] /= a[j*lda+j] + } + c128.AxpyUnitary(-bi[j], a[j*lda+j+1:j*lda+n], bi[j+1:n]) + } + } + } else { + for i := 0; i < m; i++ { + bi := b[i*ldb : i*ldb+n] + if alpha != 1 { + c128.ScalUnitary(alpha, bi) + } + for j := n - 1; j >= 0; j-- { + if bi[j] == 0 { + continue + } + if noUnit { + bi[j] /= a[j*lda+j] + } + c128.AxpyUnitary(-bi[j], a[j*lda:j*lda+j], bi[:j]) + } + } + } + } else { + // Form B = alpha*B*inv(A^T) or B = alpha*B*inv(A^H). + if uplo == blas.Upper { + for i := 0; i < m; i++ { + bi := b[i*ldb : i*ldb+n] + for j := n - 1; j >= 0; j-- { + bij := alpha * bi[j] + if noConj { + bij -= c128.DotuUnitary(a[j*lda+j+1:j*lda+n], bi[j+1:n]) + if noUnit { + bij /= a[j*lda+j] + } + } else { + bij -= c128.DotcUnitary(a[j*lda+j+1:j*lda+n], bi[j+1:n]) + if noUnit { + bij /= cmplx.Conj(a[j*lda+j]) + } + } + bi[j] = bij + } + } + } else { + for i := 0; i < m; i++ { + bi := b[i*ldb : i*ldb+n] + for j, bij := range bi { + bij *= alpha + if noConj { + bij -= c128.DotuUnitary(a[j*lda:j*lda+j], bi[:j]) + if noUnit { + bij /= a[j*lda+j] + } + } else { + bij -= c128.DotcUnitary(a[j*lda:j*lda+j], bi[:j]) + if noUnit { + bij /= cmplx.Conj(a[j*lda+j]) + } + } + bi[j] = bij + } + } + } + } + } +} diff --git a/vendor/gonum.org/v1/gonum/blas/gonum/level3cmplx64.go b/vendor/gonum.org/v1/gonum/blas/gonum/level3cmplx64.go new file mode 100644 index 0000000..436c545 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/blas/gonum/level3cmplx64.go @@ -0,0 +1,1735 @@ +// Code generated by "go generate gonum.org/v1/gonum/blas/gonum”; DO NOT EDIT. + +// Copyright ©2019 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package gonum + +import ( + cmplx "gonum.org/v1/gonum/internal/cmplx64" + + "gonum.org/v1/gonum/blas" + "gonum.org/v1/gonum/internal/asm/c64" +) + +var _ blas.Complex64Level3 = Implementation{} + +// Cgemm performs one of the matrix-matrix operations +// C = alpha * op(A) * op(B) + beta * C +// where op(X) is one of +// op(X) = X or op(X) = X^T or op(X) = X^H, +// alpha and beta are scalars, and A, B and C are matrices, with op(A) an m×k matrix, +// op(B) a k×n matrix and C an m×n matrix. +// +// Complex64 implementations are autogenerated and not directly tested. +func (Implementation) Cgemm(tA, tB blas.Transpose, m, n, k int, alpha complex64, a []complex64, lda int, b []complex64, ldb int, beta complex64, c []complex64, ldc int) { + switch tA { + default: + panic(badTranspose) + case blas.NoTrans, blas.Trans, blas.ConjTrans: + } + switch tB { + default: + panic(badTranspose) + case blas.NoTrans, blas.Trans, blas.ConjTrans: + } + switch { + case m < 0: + panic(mLT0) + case n < 0: + panic(nLT0) + case k < 0: + panic(kLT0) + } + rowA, colA := m, k + if tA != blas.NoTrans { + rowA, colA = k, m + } + if lda < max(1, colA) { + panic(badLdA) + } + rowB, colB := k, n + if tB != blas.NoTrans { + rowB, colB = n, k + } + if ldb < max(1, colB) { + panic(badLdB) + } + if ldc < max(1, n) { + panic(badLdC) + } + + // Quick return if possible. + if m == 0 || n == 0 { + return + } + + // For zero matrix size the following slice length checks are trivially satisfied. + if len(a) < (rowA-1)*lda+colA { + panic(shortA) + } + if len(b) < (rowB-1)*ldb+colB { + panic(shortB) + } + if len(c) < (m-1)*ldc+n { + panic(shortC) + } + + // Quick return if possible. + if (alpha == 0 || k == 0) && beta == 1 { + return + } + + if alpha == 0 { + if beta == 0 { + for i := 0; i < m; i++ { + for j := 0; j < n; j++ { + c[i*ldc+j] = 0 + } + } + } else { + for i := 0; i < m; i++ { + for j := 0; j < n; j++ { + c[i*ldc+j] *= beta + } + } + } + return + } + + switch tA { + case blas.NoTrans: + switch tB { + case blas.NoTrans: + // Form C = alpha * A * B + beta * C. + for i := 0; i < m; i++ { + switch { + case beta == 0: + for j := 0; j < n; j++ { + c[i*ldc+j] = 0 + } + case beta != 1: + for j := 0; j < n; j++ { + c[i*ldc+j] *= beta + } + } + for l := 0; l < k; l++ { + tmp := alpha * a[i*lda+l] + for j := 0; j < n; j++ { + c[i*ldc+j] += tmp * b[l*ldb+j] + } + } + } + case blas.Trans: + // Form C = alpha * A * B^T + beta * C. + for i := 0; i < m; i++ { + switch { + case beta == 0: + for j := 0; j < n; j++ { + c[i*ldc+j] = 0 + } + case beta != 1: + for j := 0; j < n; j++ { + c[i*ldc+j] *= beta + } + } + for l := 0; l < k; l++ { + tmp := alpha * a[i*lda+l] + for j := 0; j < n; j++ { + c[i*ldc+j] += tmp * b[j*ldb+l] + } + } + } + case blas.ConjTrans: + // Form C = alpha * A * B^H + beta * C. + for i := 0; i < m; i++ { + switch { + case beta == 0: + for j := 0; j < n; j++ { + c[i*ldc+j] = 0 + } + case beta != 1: + for j := 0; j < n; j++ { + c[i*ldc+j] *= beta + } + } + for l := 0; l < k; l++ { + tmp := alpha * a[i*lda+l] + for j := 0; j < n; j++ { + c[i*ldc+j] += tmp * cmplx.Conj(b[j*ldb+l]) + } + } + } + } + case blas.Trans: + switch tB { + case blas.NoTrans: + // Form C = alpha * A^T * B + beta * C. + for i := 0; i < m; i++ { + for j := 0; j < n; j++ { + var tmp complex64 + for l := 0; l < k; l++ { + tmp += a[l*lda+i] * b[l*ldb+j] + } + if beta == 0 { + c[i*ldc+j] = alpha * tmp + } else { + c[i*ldc+j] = alpha*tmp + beta*c[i*ldc+j] + } + } + } + case blas.Trans: + // Form C = alpha * A^T * B^T + beta * C. + for i := 0; i < m; i++ { + for j := 0; j < n; j++ { + var tmp complex64 + for l := 0; l < k; l++ { + tmp += a[l*lda+i] * b[j*ldb+l] + } + if beta == 0 { + c[i*ldc+j] = alpha * tmp + } else { + c[i*ldc+j] = alpha*tmp + beta*c[i*ldc+j] + } + } + } + case blas.ConjTrans: + // Form C = alpha * A^T * B^H + beta * C. + for i := 0; i < m; i++ { + for j := 0; j < n; j++ { + var tmp complex64 + for l := 0; l < k; l++ { + tmp += a[l*lda+i] * cmplx.Conj(b[j*ldb+l]) + } + if beta == 0 { + c[i*ldc+j] = alpha * tmp + } else { + c[i*ldc+j] = alpha*tmp + beta*c[i*ldc+j] + } + } + } + } + case blas.ConjTrans: + switch tB { + case blas.NoTrans: + // Form C = alpha * A^H * B + beta * C. + for i := 0; i < m; i++ { + for j := 0; j < n; j++ { + var tmp complex64 + for l := 0; l < k; l++ { + tmp += cmplx.Conj(a[l*lda+i]) * b[l*ldb+j] + } + if beta == 0 { + c[i*ldc+j] = alpha * tmp + } else { + c[i*ldc+j] = alpha*tmp + beta*c[i*ldc+j] + } + } + } + case blas.Trans: + // Form C = alpha * A^H * B^T + beta * C. + for i := 0; i < m; i++ { + for j := 0; j < n; j++ { + var tmp complex64 + for l := 0; l < k; l++ { + tmp += cmplx.Conj(a[l*lda+i]) * b[j*ldb+l] + } + if beta == 0 { + c[i*ldc+j] = alpha * tmp + } else { + c[i*ldc+j] = alpha*tmp + beta*c[i*ldc+j] + } + } + } + case blas.ConjTrans: + // Form C = alpha * A^H * B^H + beta * C. + for i := 0; i < m; i++ { + for j := 0; j < n; j++ { + var tmp complex64 + for l := 0; l < k; l++ { + tmp += cmplx.Conj(a[l*lda+i]) * cmplx.Conj(b[j*ldb+l]) + } + if beta == 0 { + c[i*ldc+j] = alpha * tmp + } else { + c[i*ldc+j] = alpha*tmp + beta*c[i*ldc+j] + } + } + } + } + } +} + +// Chemm performs one of the matrix-matrix operations +// C = alpha*A*B + beta*C if side == blas.Left +// C = alpha*B*A + beta*C if side == blas.Right +// where alpha and beta are scalars, A is an m×m or n×n hermitian matrix and B +// and C are m×n matrices. The imaginary parts of the diagonal elements of A are +// assumed to be zero. +// +// Complex64 implementations are autogenerated and not directly tested. +func (Implementation) Chemm(side blas.Side, uplo blas.Uplo, m, n int, alpha complex64, a []complex64, lda int, b []complex64, ldb int, beta complex64, c []complex64, ldc int) { + na := m + if side == blas.Right { + na = n + } + switch { + case side != blas.Left && side != blas.Right: + panic(badSide) + case uplo != blas.Lower && uplo != blas.Upper: + panic(badUplo) + case m < 0: + panic(mLT0) + case n < 0: + panic(nLT0) + case lda < max(1, na): + panic(badLdA) + case ldb < max(1, n): + panic(badLdB) + case ldc < max(1, n): + panic(badLdC) + } + + // Quick return if possible. + if m == 0 || n == 0 { + return + } + + // For zero matrix size the following slice length checks are trivially satisfied. + if len(a) < lda*(na-1)+na { + panic(shortA) + } + if len(b) < ldb*(m-1)+n { + panic(shortB) + } + if len(c) < ldc*(m-1)+n { + panic(shortC) + } + + // Quick return if possible. + if alpha == 0 && beta == 1 { + return + } + + if alpha == 0 { + if beta == 0 { + for i := 0; i < m; i++ { + ci := c[i*ldc : i*ldc+n] + for j := range ci { + ci[j] = 0 + } + } + } else { + for i := 0; i < m; i++ { + ci := c[i*ldc : i*ldc+n] + c64.ScalUnitary(beta, ci) + } + } + return + } + + if side == blas.Left { + // Form C = alpha*A*B + beta*C. + for i := 0; i < m; i++ { + atmp := alpha * complex(real(a[i*lda+i]), 0) + bi := b[i*ldb : i*ldb+n] + ci := c[i*ldc : i*ldc+n] + if beta == 0 { + for j, bij := range bi { + ci[j] = atmp * bij + } + } else { + for j, bij := range bi { + ci[j] = atmp*bij + beta*ci[j] + } + } + if uplo == blas.Upper { + for k := 0; k < i; k++ { + atmp = alpha * cmplx.Conj(a[k*lda+i]) + c64.AxpyUnitary(atmp, b[k*ldb:k*ldb+n], ci) + } + for k := i + 1; k < m; k++ { + atmp = alpha * a[i*lda+k] + c64.AxpyUnitary(atmp, b[k*ldb:k*ldb+n], ci) + } + } else { + for k := 0; k < i; k++ { + atmp = alpha * a[i*lda+k] + c64.AxpyUnitary(atmp, b[k*ldb:k*ldb+n], ci) + } + for k := i + 1; k < m; k++ { + atmp = alpha * cmplx.Conj(a[k*lda+i]) + c64.AxpyUnitary(atmp, b[k*ldb:k*ldb+n], ci) + } + } + } + } else { + // Form C = alpha*B*A + beta*C. + if uplo == blas.Upper { + for i := 0; i < m; i++ { + for j := n - 1; j >= 0; j-- { + abij := alpha * b[i*ldb+j] + aj := a[j*lda+j+1 : j*lda+n] + bi := b[i*ldb+j+1 : i*ldb+n] + ci := c[i*ldc+j+1 : i*ldc+n] + var tmp complex64 + for k, ajk := range aj { + ci[k] += abij * ajk + tmp += bi[k] * cmplx.Conj(ajk) + } + ajj := complex(real(a[j*lda+j]), 0) + if beta == 0 { + c[i*ldc+j] = abij*ajj + alpha*tmp + } else { + c[i*ldc+j] = abij*ajj + alpha*tmp + beta*c[i*ldc+j] + } + } + } + } else { + for i := 0; i < m; i++ { + for j := 0; j < n; j++ { + abij := alpha * b[i*ldb+j] + aj := a[j*lda : j*lda+j] + bi := b[i*ldb : i*ldb+j] + ci := c[i*ldc : i*ldc+j] + var tmp complex64 + for k, ajk := range aj { + ci[k] += abij * ajk + tmp += bi[k] * cmplx.Conj(ajk) + } + ajj := complex(real(a[j*lda+j]), 0) + if beta == 0 { + c[i*ldc+j] = abij*ajj + alpha*tmp + } else { + c[i*ldc+j] = abij*ajj + alpha*tmp + beta*c[i*ldc+j] + } + } + } + } + } +} + +// Cherk performs one of the hermitian rank-k operations +// C = alpha*A*A^H + beta*C if trans == blas.NoTrans +// C = alpha*A^H*A + beta*C if trans == blas.ConjTrans +// where alpha and beta are real scalars, C is an n×n hermitian matrix and A is +// an n×k matrix in the first case and a k×n matrix in the second case. +// +// The imaginary parts of the diagonal elements of C are assumed to be zero, and +// on return they will be set to zero. +// +// Complex64 implementations are autogenerated and not directly tested. +func (Implementation) Cherk(uplo blas.Uplo, trans blas.Transpose, n, k int, alpha float32, a []complex64, lda int, beta float32, c []complex64, ldc int) { + var rowA, colA int + switch trans { + default: + panic(badTranspose) + case blas.NoTrans: + rowA, colA = n, k + case blas.ConjTrans: + rowA, colA = k, n + } + switch { + case uplo != blas.Lower && uplo != blas.Upper: + panic(badUplo) + case n < 0: + panic(nLT0) + case k < 0: + panic(kLT0) + case lda < max(1, colA): + panic(badLdA) + case ldc < max(1, n): + panic(badLdC) + } + + // Quick return if possible. + if n == 0 { + return + } + + // For zero matrix size the following slice length checks are trivially satisfied. + if len(a) < (rowA-1)*lda+colA { + panic(shortA) + } + if len(c) < (n-1)*ldc+n { + panic(shortC) + } + + // Quick return if possible. + if (alpha == 0 || k == 0) && beta == 1 { + return + } + + if alpha == 0 { + if uplo == blas.Upper { + if beta == 0 { + for i := 0; i < n; i++ { + ci := c[i*ldc+i : i*ldc+n] + for j := range ci { + ci[j] = 0 + } + } + } else { + for i := 0; i < n; i++ { + ci := c[i*ldc+i : i*ldc+n] + ci[0] = complex(beta*real(ci[0]), 0) + if i != n-1 { + c64.SscalUnitary(beta, ci[1:]) + } + } + } + } else { + if beta == 0 { + for i := 0; i < n; i++ { + ci := c[i*ldc : i*ldc+i+1] + for j := range ci { + ci[j] = 0 + } + } + } else { + for i := 0; i < n; i++ { + ci := c[i*ldc : i*ldc+i+1] + if i != 0 { + c64.SscalUnitary(beta, ci[:i]) + } + ci[i] = complex(beta*real(ci[i]), 0) + } + } + } + return + } + + calpha := complex(alpha, 0) + if trans == blas.NoTrans { + // Form C = alpha*A*A^H + beta*C. + cbeta := complex(beta, 0) + if uplo == blas.Upper { + for i := 0; i < n; i++ { + ci := c[i*ldc+i : i*ldc+n] + ai := a[i*lda : i*lda+k] + switch { + case beta == 0: + // Handle the i-th diagonal element of C. + ci[0] = complex(alpha*real(c64.DotcUnitary(ai, ai)), 0) + // Handle the remaining elements on the i-th row of C. + for jc := range ci[1:] { + j := i + 1 + jc + ci[jc+1] = calpha * c64.DotcUnitary(a[j*lda:j*lda+k], ai) + } + case beta != 1: + cii := calpha*c64.DotcUnitary(ai, ai) + cbeta*ci[0] + ci[0] = complex(real(cii), 0) + for jc, cij := range ci[1:] { + j := i + 1 + jc + ci[jc+1] = calpha*c64.DotcUnitary(a[j*lda:j*lda+k], ai) + cbeta*cij + } + default: + cii := calpha*c64.DotcUnitary(ai, ai) + ci[0] + ci[0] = complex(real(cii), 0) + for jc, cij := range ci[1:] { + j := i + 1 + jc + ci[jc+1] = calpha*c64.DotcUnitary(a[j*lda:j*lda+k], ai) + cij + } + } + } + } else { + for i := 0; i < n; i++ { + ci := c[i*ldc : i*ldc+i+1] + ai := a[i*lda : i*lda+k] + switch { + case beta == 0: + // Handle the first i-1 elements on the i-th row of C. + for j := range ci[:i] { + ci[j] = calpha * c64.DotcUnitary(a[j*lda:j*lda+k], ai) + } + // Handle the i-th diagonal element of C. + ci[i] = complex(alpha*real(c64.DotcUnitary(ai, ai)), 0) + case beta != 1: + for j, cij := range ci[:i] { + ci[j] = calpha*c64.DotcUnitary(a[j*lda:j*lda+k], ai) + cbeta*cij + } + cii := calpha*c64.DotcUnitary(ai, ai) + cbeta*ci[i] + ci[i] = complex(real(cii), 0) + default: + for j, cij := range ci[:i] { + ci[j] = calpha*c64.DotcUnitary(a[j*lda:j*lda+k], ai) + cij + } + cii := calpha*c64.DotcUnitary(ai, ai) + ci[i] + ci[i] = complex(real(cii), 0) + } + } + } + } else { + // Form C = alpha*A^H*A + beta*C. + if uplo == blas.Upper { + for i := 0; i < n; i++ { + ci := c[i*ldc+i : i*ldc+n] + switch { + case beta == 0: + for jc := range ci { + ci[jc] = 0 + } + case beta != 1: + c64.SscalUnitary(beta, ci) + ci[0] = complex(real(ci[0]), 0) + default: + ci[0] = complex(real(ci[0]), 0) + } + for j := 0; j < k; j++ { + aji := cmplx.Conj(a[j*lda+i]) + if aji != 0 { + c64.AxpyUnitary(calpha*aji, a[j*lda+i:j*lda+n], ci) + } + } + c[i*ldc+i] = complex(real(c[i*ldc+i]), 0) + } + } else { + for i := 0; i < n; i++ { + ci := c[i*ldc : i*ldc+i+1] + switch { + case beta == 0: + for j := range ci { + ci[j] = 0 + } + case beta != 1: + c64.SscalUnitary(beta, ci) + ci[i] = complex(real(ci[i]), 0) + default: + ci[i] = complex(real(ci[i]), 0) + } + for j := 0; j < k; j++ { + aji := cmplx.Conj(a[j*lda+i]) + if aji != 0 { + c64.AxpyUnitary(calpha*aji, a[j*lda:j*lda+i+1], ci) + } + } + c[i*ldc+i] = complex(real(c[i*ldc+i]), 0) + } + } + } +} + +// Cher2k performs one of the hermitian rank-2k operations +// C = alpha*A*B^H + conj(alpha)*B*A^H + beta*C if trans == blas.NoTrans +// C = alpha*A^H*B + conj(alpha)*B^H*A + beta*C if trans == blas.ConjTrans +// where alpha and beta are scalars with beta real, C is an n×n hermitian matrix +// and A and B are n×k matrices in the first case and k×n matrices in the second case. +// +// The imaginary parts of the diagonal elements of C are assumed to be zero, and +// on return they will be set to zero. +// +// Complex64 implementations are autogenerated and not directly tested. +func (Implementation) Cher2k(uplo blas.Uplo, trans blas.Transpose, n, k int, alpha complex64, a []complex64, lda int, b []complex64, ldb int, beta float32, c []complex64, ldc int) { + var row, col int + switch trans { + default: + panic(badTranspose) + case blas.NoTrans: + row, col = n, k + case blas.ConjTrans: + row, col = k, n + } + switch { + case uplo != blas.Lower && uplo != blas.Upper: + panic(badUplo) + case n < 0: + panic(nLT0) + case k < 0: + panic(kLT0) + case lda < max(1, col): + panic(badLdA) + case ldb < max(1, col): + panic(badLdB) + case ldc < max(1, n): + panic(badLdC) + } + + // Quick return if possible. + if n == 0 { + return + } + + // For zero matrix size the following slice length checks are trivially satisfied. + if len(a) < (row-1)*lda+col { + panic(shortA) + } + if len(b) < (row-1)*ldb+col { + panic(shortB) + } + if len(c) < (n-1)*ldc+n { + panic(shortC) + } + + // Quick return if possible. + if (alpha == 0 || k == 0) && beta == 1 { + return + } + + if alpha == 0 { + if uplo == blas.Upper { + if beta == 0 { + for i := 0; i < n; i++ { + ci := c[i*ldc+i : i*ldc+n] + for j := range ci { + ci[j] = 0 + } + } + } else { + for i := 0; i < n; i++ { + ci := c[i*ldc+i : i*ldc+n] + ci[0] = complex(beta*real(ci[0]), 0) + if i != n-1 { + c64.SscalUnitary(beta, ci[1:]) + } + } + } + } else { + if beta == 0 { + for i := 0; i < n; i++ { + ci := c[i*ldc : i*ldc+i+1] + for j := range ci { + ci[j] = 0 + } + } + } else { + for i := 0; i < n; i++ { + ci := c[i*ldc : i*ldc+i+1] + if i != 0 { + c64.SscalUnitary(beta, ci[:i]) + } + ci[i] = complex(beta*real(ci[i]), 0) + } + } + } + return + } + + conjalpha := cmplx.Conj(alpha) + cbeta := complex(beta, 0) + if trans == blas.NoTrans { + // Form C = alpha*A*B^H + conj(alpha)*B*A^H + beta*C. + if uplo == blas.Upper { + for i := 0; i < n; i++ { + ci := c[i*ldc+i+1 : i*ldc+n] + ai := a[i*lda : i*lda+k] + bi := b[i*ldb : i*ldb+k] + if beta == 0 { + cii := alpha*c64.DotcUnitary(bi, ai) + conjalpha*c64.DotcUnitary(ai, bi) + c[i*ldc+i] = complex(real(cii), 0) + for jc := range ci { + j := i + 1 + jc + ci[jc] = alpha*c64.DotcUnitary(b[j*ldb:j*ldb+k], ai) + conjalpha*c64.DotcUnitary(a[j*lda:j*lda+k], bi) + } + } else { + cii := alpha*c64.DotcUnitary(bi, ai) + conjalpha*c64.DotcUnitary(ai, bi) + cbeta*c[i*ldc+i] + c[i*ldc+i] = complex(real(cii), 0) + for jc, cij := range ci { + j := i + 1 + jc + ci[jc] = alpha*c64.DotcUnitary(b[j*ldb:j*ldb+k], ai) + conjalpha*c64.DotcUnitary(a[j*lda:j*lda+k], bi) + cbeta*cij + } + } + } + } else { + for i := 0; i < n; i++ { + ci := c[i*ldc : i*ldc+i] + ai := a[i*lda : i*lda+k] + bi := b[i*ldb : i*ldb+k] + if beta == 0 { + for j := range ci { + ci[j] = alpha*c64.DotcUnitary(b[j*ldb:j*ldb+k], ai) + conjalpha*c64.DotcUnitary(a[j*lda:j*lda+k], bi) + } + cii := alpha*c64.DotcUnitary(bi, ai) + conjalpha*c64.DotcUnitary(ai, bi) + c[i*ldc+i] = complex(real(cii), 0) + } else { + for j, cij := range ci { + ci[j] = alpha*c64.DotcUnitary(b[j*ldb:j*ldb+k], ai) + conjalpha*c64.DotcUnitary(a[j*lda:j*lda+k], bi) + cbeta*cij + } + cii := alpha*c64.DotcUnitary(bi, ai) + conjalpha*c64.DotcUnitary(ai, bi) + cbeta*c[i*ldc+i] + c[i*ldc+i] = complex(real(cii), 0) + } + } + } + } else { + // Form C = alpha*A^H*B + conj(alpha)*B^H*A + beta*C. + if uplo == blas.Upper { + for i := 0; i < n; i++ { + ci := c[i*ldc+i : i*ldc+n] + switch { + case beta == 0: + for jc := range ci { + ci[jc] = 0 + } + case beta != 1: + c64.SscalUnitary(beta, ci) + ci[0] = complex(real(ci[0]), 0) + default: + ci[0] = complex(real(ci[0]), 0) + } + for j := 0; j < k; j++ { + aji := a[j*lda+i] + bji := b[j*ldb+i] + if aji != 0 { + c64.AxpyUnitary(alpha*cmplx.Conj(aji), b[j*ldb+i:j*ldb+n], ci) + } + if bji != 0 { + c64.AxpyUnitary(conjalpha*cmplx.Conj(bji), a[j*lda+i:j*lda+n], ci) + } + } + ci[0] = complex(real(ci[0]), 0) + } + } else { + for i := 0; i < n; i++ { + ci := c[i*ldc : i*ldc+i+1] + switch { + case beta == 0: + for j := range ci { + ci[j] = 0 + } + case beta != 1: + c64.SscalUnitary(beta, ci) + ci[i] = complex(real(ci[i]), 0) + default: + ci[i] = complex(real(ci[i]), 0) + } + for j := 0; j < k; j++ { + aji := a[j*lda+i] + bji := b[j*ldb+i] + if aji != 0 { + c64.AxpyUnitary(alpha*cmplx.Conj(aji), b[j*ldb:j*ldb+i+1], ci) + } + if bji != 0 { + c64.AxpyUnitary(conjalpha*cmplx.Conj(bji), a[j*lda:j*lda+i+1], ci) + } + } + ci[i] = complex(real(ci[i]), 0) + } + } + } +} + +// Csymm performs one of the matrix-matrix operations +// C = alpha*A*B + beta*C if side == blas.Left +// C = alpha*B*A + beta*C if side == blas.Right +// where alpha and beta are scalars, A is an m×m or n×n symmetric matrix and B +// and C are m×n matrices. +// +// Complex64 implementations are autogenerated and not directly tested. +func (Implementation) Csymm(side blas.Side, uplo blas.Uplo, m, n int, alpha complex64, a []complex64, lda int, b []complex64, ldb int, beta complex64, c []complex64, ldc int) { + na := m + if side == blas.Right { + na = n + } + switch { + case side != blas.Left && side != blas.Right: + panic(badSide) + case uplo != blas.Lower && uplo != blas.Upper: + panic(badUplo) + case m < 0: + panic(mLT0) + case n < 0: + panic(nLT0) + case lda < max(1, na): + panic(badLdA) + case ldb < max(1, n): + panic(badLdB) + case ldc < max(1, n): + panic(badLdC) + } + + // Quick return if possible. + if m == 0 || n == 0 { + return + } + + // For zero matrix size the following slice length checks are trivially satisfied. + if len(a) < lda*(na-1)+na { + panic(shortA) + } + if len(b) < ldb*(m-1)+n { + panic(shortB) + } + if len(c) < ldc*(m-1)+n { + panic(shortC) + } + + // Quick return if possible. + if alpha == 0 && beta == 1 { + return + } + + if alpha == 0 { + if beta == 0 { + for i := 0; i < m; i++ { + ci := c[i*ldc : i*ldc+n] + for j := range ci { + ci[j] = 0 + } + } + } else { + for i := 0; i < m; i++ { + ci := c[i*ldc : i*ldc+n] + c64.ScalUnitary(beta, ci) + } + } + return + } + + if side == blas.Left { + // Form C = alpha*A*B + beta*C. + for i := 0; i < m; i++ { + atmp := alpha * a[i*lda+i] + bi := b[i*ldb : i*ldb+n] + ci := c[i*ldc : i*ldc+n] + if beta == 0 { + for j, bij := range bi { + ci[j] = atmp * bij + } + } else { + for j, bij := range bi { + ci[j] = atmp*bij + beta*ci[j] + } + } + if uplo == blas.Upper { + for k := 0; k < i; k++ { + atmp = alpha * a[k*lda+i] + c64.AxpyUnitary(atmp, b[k*ldb:k*ldb+n], ci) + } + for k := i + 1; k < m; k++ { + atmp = alpha * a[i*lda+k] + c64.AxpyUnitary(atmp, b[k*ldb:k*ldb+n], ci) + } + } else { + for k := 0; k < i; k++ { + atmp = alpha * a[i*lda+k] + c64.AxpyUnitary(atmp, b[k*ldb:k*ldb+n], ci) + } + for k := i + 1; k < m; k++ { + atmp = alpha * a[k*lda+i] + c64.AxpyUnitary(atmp, b[k*ldb:k*ldb+n], ci) + } + } + } + } else { + // Form C = alpha*B*A + beta*C. + if uplo == blas.Upper { + for i := 0; i < m; i++ { + for j := n - 1; j >= 0; j-- { + abij := alpha * b[i*ldb+j] + aj := a[j*lda+j+1 : j*lda+n] + bi := b[i*ldb+j+1 : i*ldb+n] + ci := c[i*ldc+j+1 : i*ldc+n] + var tmp complex64 + for k, ajk := range aj { + ci[k] += abij * ajk + tmp += bi[k] * ajk + } + if beta == 0 { + c[i*ldc+j] = abij*a[j*lda+j] + alpha*tmp + } else { + c[i*ldc+j] = abij*a[j*lda+j] + alpha*tmp + beta*c[i*ldc+j] + } + } + } + } else { + for i := 0; i < m; i++ { + for j := 0; j < n; j++ { + abij := alpha * b[i*ldb+j] + aj := a[j*lda : j*lda+j] + bi := b[i*ldb : i*ldb+j] + ci := c[i*ldc : i*ldc+j] + var tmp complex64 + for k, ajk := range aj { + ci[k] += abij * ajk + tmp += bi[k] * ajk + } + if beta == 0 { + c[i*ldc+j] = abij*a[j*lda+j] + alpha*tmp + } else { + c[i*ldc+j] = abij*a[j*lda+j] + alpha*tmp + beta*c[i*ldc+j] + } + } + } + } + } +} + +// Csyrk performs one of the symmetric rank-k operations +// C = alpha*A*A^T + beta*C if trans == blas.NoTrans +// C = alpha*A^T*A + beta*C if trans == blas.Trans +// where alpha and beta are scalars, C is an n×n symmetric matrix and A is +// an n×k matrix in the first case and a k×n matrix in the second case. +// +// Complex64 implementations are autogenerated and not directly tested. +func (Implementation) Csyrk(uplo blas.Uplo, trans blas.Transpose, n, k int, alpha complex64, a []complex64, lda int, beta complex64, c []complex64, ldc int) { + var rowA, colA int + switch trans { + default: + panic(badTranspose) + case blas.NoTrans: + rowA, colA = n, k + case blas.Trans: + rowA, colA = k, n + } + switch { + case uplo != blas.Lower && uplo != blas.Upper: + panic(badUplo) + case n < 0: + panic(nLT0) + case k < 0: + panic(kLT0) + case lda < max(1, colA): + panic(badLdA) + case ldc < max(1, n): + panic(badLdC) + } + + // Quick return if possible. + if n == 0 { + return + } + + // For zero matrix size the following slice length checks are trivially satisfied. + if len(a) < (rowA-1)*lda+colA { + panic(shortA) + } + if len(c) < (n-1)*ldc+n { + panic(shortC) + } + + // Quick return if possible. + if (alpha == 0 || k == 0) && beta == 1 { + return + } + + if alpha == 0 { + if uplo == blas.Upper { + if beta == 0 { + for i := 0; i < n; i++ { + ci := c[i*ldc+i : i*ldc+n] + for j := range ci { + ci[j] = 0 + } + } + } else { + for i := 0; i < n; i++ { + ci := c[i*ldc+i : i*ldc+n] + c64.ScalUnitary(beta, ci) + } + } + } else { + if beta == 0 { + for i := 0; i < n; i++ { + ci := c[i*ldc : i*ldc+i+1] + for j := range ci { + ci[j] = 0 + } + } + } else { + for i := 0; i < n; i++ { + ci := c[i*ldc : i*ldc+i+1] + c64.ScalUnitary(beta, ci) + } + } + } + return + } + + if trans == blas.NoTrans { + // Form C = alpha*A*A^T + beta*C. + if uplo == blas.Upper { + for i := 0; i < n; i++ { + ci := c[i*ldc+i : i*ldc+n] + ai := a[i*lda : i*lda+k] + for jc, cij := range ci { + j := i + jc + ci[jc] = beta*cij + alpha*c64.DotuUnitary(ai, a[j*lda:j*lda+k]) + } + } + } else { + for i := 0; i < n; i++ { + ci := c[i*ldc : i*ldc+i+1] + ai := a[i*lda : i*lda+k] + for j, cij := range ci { + ci[j] = beta*cij + alpha*c64.DotuUnitary(ai, a[j*lda:j*lda+k]) + } + } + } + } else { + // Form C = alpha*A^T*A + beta*C. + if uplo == blas.Upper { + for i := 0; i < n; i++ { + ci := c[i*ldc+i : i*ldc+n] + switch { + case beta == 0: + for jc := range ci { + ci[jc] = 0 + } + case beta != 1: + for jc := range ci { + ci[jc] *= beta + } + } + for j := 0; j < k; j++ { + aji := a[j*lda+i] + if aji != 0 { + c64.AxpyUnitary(alpha*aji, a[j*lda+i:j*lda+n], ci) + } + } + } + } else { + for i := 0; i < n; i++ { + ci := c[i*ldc : i*ldc+i+1] + switch { + case beta == 0: + for j := range ci { + ci[j] = 0 + } + case beta != 1: + for j := range ci { + ci[j] *= beta + } + } + for j := 0; j < k; j++ { + aji := a[j*lda+i] + if aji != 0 { + c64.AxpyUnitary(alpha*aji, a[j*lda:j*lda+i+1], ci) + } + } + } + } + } +} + +// Csyr2k performs one of the symmetric rank-2k operations +// C = alpha*A*B^T + alpha*B*A^T + beta*C if trans == blas.NoTrans +// C = alpha*A^T*B + alpha*B^T*A + beta*C if trans == blas.Trans +// where alpha and beta are scalars, C is an n×n symmetric matrix and A and B +// are n×k matrices in the first case and k×n matrices in the second case. +// +// Complex64 implementations are autogenerated and not directly tested. +func (Implementation) Csyr2k(uplo blas.Uplo, trans blas.Transpose, n, k int, alpha complex64, a []complex64, lda int, b []complex64, ldb int, beta complex64, c []complex64, ldc int) { + var row, col int + switch trans { + default: + panic(badTranspose) + case blas.NoTrans: + row, col = n, k + case blas.Trans: + row, col = k, n + } + switch { + case uplo != blas.Lower && uplo != blas.Upper: + panic(badUplo) + case n < 0: + panic(nLT0) + case k < 0: + panic(kLT0) + case lda < max(1, col): + panic(badLdA) + case ldb < max(1, col): + panic(badLdB) + case ldc < max(1, n): + panic(badLdC) + } + + // Quick return if possible. + if n == 0 { + return + } + + // For zero matrix size the following slice length checks are trivially satisfied. + if len(a) < (row-1)*lda+col { + panic(shortA) + } + if len(b) < (row-1)*ldb+col { + panic(shortB) + } + if len(c) < (n-1)*ldc+n { + panic(shortC) + } + + // Quick return if possible. + if (alpha == 0 || k == 0) && beta == 1 { + return + } + + if alpha == 0 { + if uplo == blas.Upper { + if beta == 0 { + for i := 0; i < n; i++ { + ci := c[i*ldc+i : i*ldc+n] + for j := range ci { + ci[j] = 0 + } + } + } else { + for i := 0; i < n; i++ { + ci := c[i*ldc+i : i*ldc+n] + c64.ScalUnitary(beta, ci) + } + } + } else { + if beta == 0 { + for i := 0; i < n; i++ { + ci := c[i*ldc : i*ldc+i+1] + for j := range ci { + ci[j] = 0 + } + } + } else { + for i := 0; i < n; i++ { + ci := c[i*ldc : i*ldc+i+1] + c64.ScalUnitary(beta, ci) + } + } + } + return + } + + if trans == blas.NoTrans { + // Form C = alpha*A*B^T + alpha*B*A^T + beta*C. + if uplo == blas.Upper { + for i := 0; i < n; i++ { + ci := c[i*ldc+i : i*ldc+n] + ai := a[i*lda : i*lda+k] + bi := b[i*ldb : i*ldb+k] + if beta == 0 { + for jc := range ci { + j := i + jc + ci[jc] = alpha*c64.DotuUnitary(ai, b[j*ldb:j*ldb+k]) + alpha*c64.DotuUnitary(bi, a[j*lda:j*lda+k]) + } + } else { + for jc, cij := range ci { + j := i + jc + ci[jc] = alpha*c64.DotuUnitary(ai, b[j*ldb:j*ldb+k]) + alpha*c64.DotuUnitary(bi, a[j*lda:j*lda+k]) + beta*cij + } + } + } + } else { + for i := 0; i < n; i++ { + ci := c[i*ldc : i*ldc+i+1] + ai := a[i*lda : i*lda+k] + bi := b[i*ldb : i*ldb+k] + if beta == 0 { + for j := range ci { + ci[j] = alpha*c64.DotuUnitary(ai, b[j*ldb:j*ldb+k]) + alpha*c64.DotuUnitary(bi, a[j*lda:j*lda+k]) + } + } else { + for j, cij := range ci { + ci[j] = alpha*c64.DotuUnitary(ai, b[j*ldb:j*ldb+k]) + alpha*c64.DotuUnitary(bi, a[j*lda:j*lda+k]) + beta*cij + } + } + } + } + } else { + // Form C = alpha*A^T*B + alpha*B^T*A + beta*C. + if uplo == blas.Upper { + for i := 0; i < n; i++ { + ci := c[i*ldc+i : i*ldc+n] + switch { + case beta == 0: + for jc := range ci { + ci[jc] = 0 + } + case beta != 1: + for jc := range ci { + ci[jc] *= beta + } + } + for j := 0; j < k; j++ { + aji := a[j*lda+i] + bji := b[j*ldb+i] + if aji != 0 { + c64.AxpyUnitary(alpha*aji, b[j*ldb+i:j*ldb+n], ci) + } + if bji != 0 { + c64.AxpyUnitary(alpha*bji, a[j*lda+i:j*lda+n], ci) + } + } + } + } else { + for i := 0; i < n; i++ { + ci := c[i*ldc : i*ldc+i+1] + switch { + case beta == 0: + for j := range ci { + ci[j] = 0 + } + case beta != 1: + for j := range ci { + ci[j] *= beta + } + } + for j := 0; j < k; j++ { + aji := a[j*lda+i] + bji := b[j*ldb+i] + if aji != 0 { + c64.AxpyUnitary(alpha*aji, b[j*ldb:j*ldb+i+1], ci) + } + if bji != 0 { + c64.AxpyUnitary(alpha*bji, a[j*lda:j*lda+i+1], ci) + } + } + } + } + } +} + +// Ctrmm performs one of the matrix-matrix operations +// B = alpha * op(A) * B if side == blas.Left, +// B = alpha * B * op(A) if side == blas.Right, +// where alpha is a scalar, B is an m×n matrix, A is a unit, or non-unit, +// upper or lower triangular matrix and op(A) is one of +// op(A) = A if trans == blas.NoTrans, +// op(A) = A^T if trans == blas.Trans, +// op(A) = A^H if trans == blas.ConjTrans. +// +// Complex64 implementations are autogenerated and not directly tested. +func (Implementation) Ctrmm(side blas.Side, uplo blas.Uplo, trans blas.Transpose, diag blas.Diag, m, n int, alpha complex64, a []complex64, lda int, b []complex64, ldb int) { + na := m + if side == blas.Right { + na = n + } + switch { + case side != blas.Left && side != blas.Right: + panic(badSide) + case uplo != blas.Lower && uplo != blas.Upper: + panic(badUplo) + case trans != blas.NoTrans && trans != blas.Trans && trans != blas.ConjTrans: + panic(badTranspose) + case diag != blas.Unit && diag != blas.NonUnit: + panic(badDiag) + case m < 0: + panic(mLT0) + case n < 0: + panic(nLT0) + case lda < max(1, na): + panic(badLdA) + case ldb < max(1, n): + panic(badLdB) + } + + // Quick return if possible. + if m == 0 || n == 0 { + return + } + + // For zero matrix size the following slice length checks are trivially satisfied. + if len(a) < (na-1)*lda+na { + panic(shortA) + } + if len(b) < (m-1)*ldb+n { + panic(shortB) + } + + // Quick return if possible. + if alpha == 0 { + for i := 0; i < m; i++ { + bi := b[i*ldb : i*ldb+n] + for j := range bi { + bi[j] = 0 + } + } + return + } + + noConj := trans != blas.ConjTrans + noUnit := diag == blas.NonUnit + if side == blas.Left { + if trans == blas.NoTrans { + // Form B = alpha*A*B. + if uplo == blas.Upper { + for i := 0; i < m; i++ { + aii := alpha + if noUnit { + aii *= a[i*lda+i] + } + bi := b[i*ldb : i*ldb+n] + for j := range bi { + bi[j] *= aii + } + for ja, aij := range a[i*lda+i+1 : i*lda+m] { + j := ja + i + 1 + if aij != 0 { + c64.AxpyUnitary(alpha*aij, b[j*ldb:j*ldb+n], bi) + } + } + } + } else { + for i := m - 1; i >= 0; i-- { + aii := alpha + if noUnit { + aii *= a[i*lda+i] + } + bi := b[i*ldb : i*ldb+n] + for j := range bi { + bi[j] *= aii + } + for j, aij := range a[i*lda : i*lda+i] { + if aij != 0 { + c64.AxpyUnitary(alpha*aij, b[j*ldb:j*ldb+n], bi) + } + } + } + } + } else { + // Form B = alpha*A^T*B or B = alpha*A^H*B. + if uplo == blas.Upper { + for k := m - 1; k >= 0; k-- { + bk := b[k*ldb : k*ldb+n] + for ja, ajk := range a[k*lda+k+1 : k*lda+m] { + if ajk == 0 { + continue + } + j := k + 1 + ja + if noConj { + c64.AxpyUnitary(alpha*ajk, bk, b[j*ldb:j*ldb+n]) + } else { + c64.AxpyUnitary(alpha*cmplx.Conj(ajk), bk, b[j*ldb:j*ldb+n]) + } + } + akk := alpha + if noUnit { + if noConj { + akk *= a[k*lda+k] + } else { + akk *= cmplx.Conj(a[k*lda+k]) + } + } + if akk != 1 { + c64.ScalUnitary(akk, bk) + } + } + } else { + for k := 0; k < m; k++ { + bk := b[k*ldb : k*ldb+n] + for j, ajk := range a[k*lda : k*lda+k] { + if ajk == 0 { + continue + } + if noConj { + c64.AxpyUnitary(alpha*ajk, bk, b[j*ldb:j*ldb+n]) + } else { + c64.AxpyUnitary(alpha*cmplx.Conj(ajk), bk, b[j*ldb:j*ldb+n]) + } + } + akk := alpha + if noUnit { + if noConj { + akk *= a[k*lda+k] + } else { + akk *= cmplx.Conj(a[k*lda+k]) + } + } + if akk != 1 { + c64.ScalUnitary(akk, bk) + } + } + } + } + } else { + if trans == blas.NoTrans { + // Form B = alpha*B*A. + if uplo == blas.Upper { + for i := 0; i < m; i++ { + bi := b[i*ldb : i*ldb+n] + for k := n - 1; k >= 0; k-- { + abik := alpha * bi[k] + if abik == 0 { + continue + } + bi[k] = abik + if noUnit { + bi[k] *= a[k*lda+k] + } + c64.AxpyUnitary(abik, a[k*lda+k+1:k*lda+n], bi[k+1:]) + } + } + } else { + for i := 0; i < m; i++ { + bi := b[i*ldb : i*ldb+n] + for k := 0; k < n; k++ { + abik := alpha * bi[k] + if abik == 0 { + continue + } + bi[k] = abik + if noUnit { + bi[k] *= a[k*lda+k] + } + c64.AxpyUnitary(abik, a[k*lda:k*lda+k], bi[:k]) + } + } + } + } else { + // Form B = alpha*B*A^T or B = alpha*B*A^H. + if uplo == blas.Upper { + for i := 0; i < m; i++ { + bi := b[i*ldb : i*ldb+n] + for j, bij := range bi { + if noConj { + if noUnit { + bij *= a[j*lda+j] + } + bij += c64.DotuUnitary(a[j*lda+j+1:j*lda+n], bi[j+1:n]) + } else { + if noUnit { + bij *= cmplx.Conj(a[j*lda+j]) + } + bij += c64.DotcUnitary(a[j*lda+j+1:j*lda+n], bi[j+1:n]) + } + bi[j] = alpha * bij + } + } + } else { + for i := 0; i < m; i++ { + bi := b[i*ldb : i*ldb+n] + for j := n - 1; j >= 0; j-- { + bij := bi[j] + if noConj { + if noUnit { + bij *= a[j*lda+j] + } + bij += c64.DotuUnitary(a[j*lda:j*lda+j], bi[:j]) + } else { + if noUnit { + bij *= cmplx.Conj(a[j*lda+j]) + } + bij += c64.DotcUnitary(a[j*lda:j*lda+j], bi[:j]) + } + bi[j] = alpha * bij + } + } + } + } + } +} + +// Ctrsm solves one of the matrix equations +// op(A) * X = alpha * B if side == blas.Left, +// X * op(A) = alpha * B if side == blas.Right, +// where alpha is a scalar, X and B are m×n matrices, A is a unit or +// non-unit, upper or lower triangular matrix and op(A) is one of +// op(A) = A if transA == blas.NoTrans, +// op(A) = A^T if transA == blas.Trans, +// op(A) = A^H if transA == blas.ConjTrans. +// On return the matrix X is overwritten on B. +// +// Complex64 implementations are autogenerated and not directly tested. +func (Implementation) Ctrsm(side blas.Side, uplo blas.Uplo, transA blas.Transpose, diag blas.Diag, m, n int, alpha complex64, a []complex64, lda int, b []complex64, ldb int) { + na := m + if side == blas.Right { + na = n + } + switch { + case side != blas.Left && side != blas.Right: + panic(badSide) + case uplo != blas.Lower && uplo != blas.Upper: + panic(badUplo) + case transA != blas.NoTrans && transA != blas.Trans && transA != blas.ConjTrans: + panic(badTranspose) + case diag != blas.Unit && diag != blas.NonUnit: + panic(badDiag) + case m < 0: + panic(mLT0) + case n < 0: + panic(nLT0) + case lda < max(1, na): + panic(badLdA) + case ldb < max(1, n): + panic(badLdB) + } + + // Quick return if possible. + if m == 0 || n == 0 { + return + } + + // For zero matrix size the following slice length checks are trivially satisfied. + if len(a) < (na-1)*lda+na { + panic(shortA) + } + if len(b) < (m-1)*ldb+n { + panic(shortB) + } + + if alpha == 0 { + for i := 0; i < m; i++ { + for j := 0; j < n; j++ { + b[i*ldb+j] = 0 + } + } + return + } + + noConj := transA != blas.ConjTrans + noUnit := diag == blas.NonUnit + if side == blas.Left { + if transA == blas.NoTrans { + // Form B = alpha*inv(A)*B. + if uplo == blas.Upper { + for i := m - 1; i >= 0; i-- { + bi := b[i*ldb : i*ldb+n] + if alpha != 1 { + c64.ScalUnitary(alpha, bi) + } + for ka, aik := range a[i*lda+i+1 : i*lda+m] { + k := i + 1 + ka + if aik != 0 { + c64.AxpyUnitary(-aik, b[k*ldb:k*ldb+n], bi) + } + } + if noUnit { + c64.ScalUnitary(1/a[i*lda+i], bi) + } + } + } else { + for i := 0; i < m; i++ { + bi := b[i*ldb : i*ldb+n] + if alpha != 1 { + c64.ScalUnitary(alpha, bi) + } + for j, aij := range a[i*lda : i*lda+i] { + if aij != 0 { + c64.AxpyUnitary(-aij, b[j*ldb:j*ldb+n], bi) + } + } + if noUnit { + c64.ScalUnitary(1/a[i*lda+i], bi) + } + } + } + } else { + // Form B = alpha*inv(A^T)*B or B = alpha*inv(A^H)*B. + if uplo == blas.Upper { + for i := 0; i < m; i++ { + bi := b[i*ldb : i*ldb+n] + if noUnit { + if noConj { + c64.ScalUnitary(1/a[i*lda+i], bi) + } else { + c64.ScalUnitary(1/cmplx.Conj(a[i*lda+i]), bi) + } + } + for ja, aij := range a[i*lda+i+1 : i*lda+m] { + if aij == 0 { + continue + } + j := i + 1 + ja + if noConj { + c64.AxpyUnitary(-aij, bi, b[j*ldb:j*ldb+n]) + } else { + c64.AxpyUnitary(-cmplx.Conj(aij), bi, b[j*ldb:j*ldb+n]) + } + } + if alpha != 1 { + c64.ScalUnitary(alpha, bi) + } + } + } else { + for i := m - 1; i >= 0; i-- { + bi := b[i*ldb : i*ldb+n] + if noUnit { + if noConj { + c64.ScalUnitary(1/a[i*lda+i], bi) + } else { + c64.ScalUnitary(1/cmplx.Conj(a[i*lda+i]), bi) + } + } + for j, aij := range a[i*lda : i*lda+i] { + if aij == 0 { + continue + } + if noConj { + c64.AxpyUnitary(-aij, bi, b[j*ldb:j*ldb+n]) + } else { + c64.AxpyUnitary(-cmplx.Conj(aij), bi, b[j*ldb:j*ldb+n]) + } + } + if alpha != 1 { + c64.ScalUnitary(alpha, bi) + } + } + } + } + } else { + if transA == blas.NoTrans { + // Form B = alpha*B*inv(A). + if uplo == blas.Upper { + for i := 0; i < m; i++ { + bi := b[i*ldb : i*ldb+n] + if alpha != 1 { + c64.ScalUnitary(alpha, bi) + } + for j, bij := range bi { + if bij == 0 { + continue + } + if noUnit { + bi[j] /= a[j*lda+j] + } + c64.AxpyUnitary(-bi[j], a[j*lda+j+1:j*lda+n], bi[j+1:n]) + } + } + } else { + for i := 0; i < m; i++ { + bi := b[i*ldb : i*ldb+n] + if alpha != 1 { + c64.ScalUnitary(alpha, bi) + } + for j := n - 1; j >= 0; j-- { + if bi[j] == 0 { + continue + } + if noUnit { + bi[j] /= a[j*lda+j] + } + c64.AxpyUnitary(-bi[j], a[j*lda:j*lda+j], bi[:j]) + } + } + } + } else { + // Form B = alpha*B*inv(A^T) or B = alpha*B*inv(A^H). + if uplo == blas.Upper { + for i := 0; i < m; i++ { + bi := b[i*ldb : i*ldb+n] + for j := n - 1; j >= 0; j-- { + bij := alpha * bi[j] + if noConj { + bij -= c64.DotuUnitary(a[j*lda+j+1:j*lda+n], bi[j+1:n]) + if noUnit { + bij /= a[j*lda+j] + } + } else { + bij -= c64.DotcUnitary(a[j*lda+j+1:j*lda+n], bi[j+1:n]) + if noUnit { + bij /= cmplx.Conj(a[j*lda+j]) + } + } + bi[j] = bij + } + } + } else { + for i := 0; i < m; i++ { + bi := b[i*ldb : i*ldb+n] + for j, bij := range bi { + bij *= alpha + if noConj { + bij -= c64.DotuUnitary(a[j*lda:j*lda+j], bi[:j]) + if noUnit { + bij /= a[j*lda+j] + } + } else { + bij -= c64.DotcUnitary(a[j*lda:j*lda+j], bi[:j]) + if noUnit { + bij /= cmplx.Conj(a[j*lda+j]) + } + } + bi[j] = bij + } + } + } + } + } +} diff --git a/vendor/gonum.org/v1/gonum/blas/gonum/level3float32.go b/vendor/gonum.org/v1/gonum/blas/gonum/level3float32.go new file mode 100644 index 0000000..13c4a79 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/blas/gonum/level3float32.go @@ -0,0 +1,876 @@ +// Code generated by "go generate gonum.org/v1/gonum/blas/gonum”; DO NOT EDIT. + +// Copyright ©2014 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package gonum + +import ( + "gonum.org/v1/gonum/blas" + "gonum.org/v1/gonum/internal/asm/f32" +) + +var _ blas.Float32Level3 = Implementation{} + +// Strsm solves one of the matrix equations +// A * X = alpha * B if tA == blas.NoTrans and side == blas.Left +// A^T * X = alpha * B if tA == blas.Trans or blas.ConjTrans, and side == blas.Left +// X * A = alpha * B if tA == blas.NoTrans and side == blas.Right +// X * A^T = alpha * B if tA == blas.Trans or blas.ConjTrans, and side == blas.Right +// where A is an n×n or m×m triangular matrix, X and B are m×n matrices, and alpha is a +// scalar. +// +// At entry to the function, X contains the values of B, and the result is +// stored in-place into X. +// +// No check is made that A is invertible. +// +// Float32 implementations are autogenerated and not directly tested. +func (Implementation) Strsm(s blas.Side, ul blas.Uplo, tA blas.Transpose, d blas.Diag, m, n int, alpha float32, a []float32, lda int, b []float32, ldb int) { + if s != blas.Left && s != blas.Right { + panic(badSide) + } + if ul != blas.Lower && ul != blas.Upper { + panic(badUplo) + } + if tA != blas.NoTrans && tA != blas.Trans && tA != blas.ConjTrans { + panic(badTranspose) + } + if d != blas.NonUnit && d != blas.Unit { + panic(badDiag) + } + if m < 0 { + panic(mLT0) + } + if n < 0 { + panic(nLT0) + } + k := n + if s == blas.Left { + k = m + } + if lda < max(1, k) { + panic(badLdA) + } + if ldb < max(1, n) { + panic(badLdB) + } + + // Quick return if possible. + if m == 0 || n == 0 { + return + } + + // For zero matrix size the following slice length checks are trivially satisfied. + if len(a) < lda*(k-1)+k { + panic(shortA) + } + if len(b) < ldb*(m-1)+n { + panic(shortB) + } + + if alpha == 0 { + for i := 0; i < m; i++ { + btmp := b[i*ldb : i*ldb+n] + for j := range btmp { + btmp[j] = 0 + } + } + return + } + nonUnit := d == blas.NonUnit + if s == blas.Left { + if tA == blas.NoTrans { + if ul == blas.Upper { + for i := m - 1; i >= 0; i-- { + btmp := b[i*ldb : i*ldb+n] + if alpha != 1 { + f32.ScalUnitary(alpha, btmp) + } + for ka, va := range a[i*lda+i+1 : i*lda+m] { + if va != 0 { + k := ka + i + 1 + f32.AxpyUnitary(-va, b[k*ldb:k*ldb+n], btmp) + } + } + if nonUnit { + tmp := 1 / a[i*lda+i] + f32.ScalUnitary(tmp, btmp) + } + } + return + } + for i := 0; i < m; i++ { + btmp := b[i*ldb : i*ldb+n] + if alpha != 1 { + f32.ScalUnitary(alpha, btmp) + } + for k, va := range a[i*lda : i*lda+i] { + if va != 0 { + f32.AxpyUnitary(-va, b[k*ldb:k*ldb+n], btmp) + } + } + if nonUnit { + tmp := 1 / a[i*lda+i] + f32.ScalUnitary(tmp, btmp) + } + } + return + } + // Cases where a is transposed + if ul == blas.Upper { + for k := 0; k < m; k++ { + btmpk := b[k*ldb : k*ldb+n] + if nonUnit { + tmp := 1 / a[k*lda+k] + f32.ScalUnitary(tmp, btmpk) + } + for ia, va := range a[k*lda+k+1 : k*lda+m] { + if va != 0 { + i := ia + k + 1 + f32.AxpyUnitary(-va, btmpk, b[i*ldb:i*ldb+n]) + } + } + if alpha != 1 { + f32.ScalUnitary(alpha, btmpk) + } + } + return + } + for k := m - 1; k >= 0; k-- { + btmpk := b[k*ldb : k*ldb+n] + if nonUnit { + tmp := 1 / a[k*lda+k] + f32.ScalUnitary(tmp, btmpk) + } + for i, va := range a[k*lda : k*lda+k] { + if va != 0 { + f32.AxpyUnitary(-va, btmpk, b[i*ldb:i*ldb+n]) + } + } + if alpha != 1 { + f32.ScalUnitary(alpha, btmpk) + } + } + return + } + // Cases where a is to the right of X. + if tA == blas.NoTrans { + if ul == blas.Upper { + for i := 0; i < m; i++ { + btmp := b[i*ldb : i*ldb+n] + if alpha != 1 { + f32.ScalUnitary(alpha, btmp) + } + for k, vb := range btmp { + if vb == 0 { + continue + } + if nonUnit { + btmp[k] /= a[k*lda+k] + } + f32.AxpyUnitary(-btmp[k], a[k*lda+k+1:k*lda+n], btmp[k+1:n]) + } + } + return + } + for i := 0; i < m; i++ { + btmp := b[i*ldb : i*ldb+n] + if alpha != 1 { + f32.ScalUnitary(alpha, btmp) + } + for k := n - 1; k >= 0; k-- { + if btmp[k] == 0 { + continue + } + if nonUnit { + btmp[k] /= a[k*lda+k] + } + f32.AxpyUnitary(-btmp[k], a[k*lda:k*lda+k], btmp[:k]) + } + } + return + } + // Cases where a is transposed. + if ul == blas.Upper { + for i := 0; i < m; i++ { + btmp := b[i*ldb : i*ldb+n] + for j := n - 1; j >= 0; j-- { + tmp := alpha*btmp[j] - f32.DotUnitary(a[j*lda+j+1:j*lda+n], btmp[j+1:]) + if nonUnit { + tmp /= a[j*lda+j] + } + btmp[j] = tmp + } + } + return + } + for i := 0; i < m; i++ { + btmp := b[i*ldb : i*ldb+n] + for j := 0; j < n; j++ { + tmp := alpha*btmp[j] - f32.DotUnitary(a[j*lda:j*lda+j], btmp[:j]) + if nonUnit { + tmp /= a[j*lda+j] + } + btmp[j] = tmp + } + } +} + +// Ssymm performs one of the matrix-matrix operations +// C = alpha * A * B + beta * C if side == blas.Left +// C = alpha * B * A + beta * C if side == blas.Right +// where A is an n×n or m×m symmetric matrix, B and C are m×n matrices, and alpha +// is a scalar. +// +// Float32 implementations are autogenerated and not directly tested. +func (Implementation) Ssymm(s blas.Side, ul blas.Uplo, m, n int, alpha float32, a []float32, lda int, b []float32, ldb int, beta float32, c []float32, ldc int) { + if s != blas.Right && s != blas.Left { + panic(badSide) + } + if ul != blas.Lower && ul != blas.Upper { + panic(badUplo) + } + if m < 0 { + panic(mLT0) + } + if n < 0 { + panic(nLT0) + } + k := n + if s == blas.Left { + k = m + } + if lda < max(1, k) { + panic(badLdA) + } + if ldb < max(1, n) { + panic(badLdB) + } + if ldc < max(1, n) { + panic(badLdC) + } + + // Quick return if possible. + if m == 0 || n == 0 { + return + } + + // For zero matrix size the following slice length checks are trivially satisfied. + if len(a) < lda*(k-1)+k { + panic(shortA) + } + if len(b) < ldb*(m-1)+n { + panic(shortB) + } + if len(c) < ldc*(m-1)+n { + panic(shortC) + } + + // Quick return if possible. + if alpha == 0 && beta == 1 { + return + } + + if alpha == 0 { + if beta == 0 { + for i := 0; i < m; i++ { + ctmp := c[i*ldc : i*ldc+n] + for j := range ctmp { + ctmp[j] = 0 + } + } + return + } + for i := 0; i < m; i++ { + ctmp := c[i*ldc : i*ldc+n] + for j := 0; j < n; j++ { + ctmp[j] *= beta + } + } + return + } + + isUpper := ul == blas.Upper + if s == blas.Left { + for i := 0; i < m; i++ { + atmp := alpha * a[i*lda+i] + btmp := b[i*ldb : i*ldb+n] + ctmp := c[i*ldc : i*ldc+n] + for j, v := range btmp { + ctmp[j] *= beta + ctmp[j] += atmp * v + } + + for k := 0; k < i; k++ { + var atmp float32 + if isUpper { + atmp = a[k*lda+i] + } else { + atmp = a[i*lda+k] + } + atmp *= alpha + f32.AxpyUnitary(atmp, b[k*ldb:k*ldb+n], ctmp) + } + for k := i + 1; k < m; k++ { + var atmp float32 + if isUpper { + atmp = a[i*lda+k] + } else { + atmp = a[k*lda+i] + } + atmp *= alpha + f32.AxpyUnitary(atmp, b[k*ldb:k*ldb+n], ctmp) + } + } + return + } + if isUpper { + for i := 0; i < m; i++ { + for j := n - 1; j >= 0; j-- { + tmp := alpha * b[i*ldb+j] + var tmp2 float32 + atmp := a[j*lda+j+1 : j*lda+n] + btmp := b[i*ldb+j+1 : i*ldb+n] + ctmp := c[i*ldc+j+1 : i*ldc+n] + for k, v := range atmp { + ctmp[k] += tmp * v + tmp2 += btmp[k] * v + } + c[i*ldc+j] *= beta + c[i*ldc+j] += tmp*a[j*lda+j] + alpha*tmp2 + } + } + return + } + for i := 0; i < m; i++ { + for j := 0; j < n; j++ { + tmp := alpha * b[i*ldb+j] + var tmp2 float32 + atmp := a[j*lda : j*lda+j] + btmp := b[i*ldb : i*ldb+j] + ctmp := c[i*ldc : i*ldc+j] + for k, v := range atmp { + ctmp[k] += tmp * v + tmp2 += btmp[k] * v + } + c[i*ldc+j] *= beta + c[i*ldc+j] += tmp*a[j*lda+j] + alpha*tmp2 + } + } +} + +// Ssyrk performs one of the symmetric rank-k operations +// C = alpha * A * A^T + beta * C if tA == blas.NoTrans +// C = alpha * A^T * A + beta * C if tA == blas.Trans or tA == blas.ConjTrans +// where A is an n×k or k×n matrix, C is an n×n symmetric matrix, and alpha and +// beta are scalars. +// +// Float32 implementations are autogenerated and not directly tested. +func (Implementation) Ssyrk(ul blas.Uplo, tA blas.Transpose, n, k int, alpha float32, a []float32, lda int, beta float32, c []float32, ldc int) { + if ul != blas.Lower && ul != blas.Upper { + panic(badUplo) + } + if tA != blas.Trans && tA != blas.NoTrans && tA != blas.ConjTrans { + panic(badTranspose) + } + if n < 0 { + panic(nLT0) + } + if k < 0 { + panic(kLT0) + } + row, col := k, n + if tA == blas.NoTrans { + row, col = n, k + } + if lda < max(1, col) { + panic(badLdA) + } + if ldc < max(1, n) { + panic(badLdC) + } + + // Quick return if possible. + if n == 0 { + return + } + + // For zero matrix size the following slice length checks are trivially satisfied. + if len(a) < lda*(row-1)+col { + panic(shortA) + } + if len(c) < ldc*(n-1)+n { + panic(shortC) + } + + if alpha == 0 { + if beta == 0 { + if ul == blas.Upper { + for i := 0; i < n; i++ { + ctmp := c[i*ldc+i : i*ldc+n] + for j := range ctmp { + ctmp[j] = 0 + } + } + return + } + for i := 0; i < n; i++ { + ctmp := c[i*ldc : i*ldc+i+1] + for j := range ctmp { + ctmp[j] = 0 + } + } + return + } + if ul == blas.Upper { + for i := 0; i < n; i++ { + ctmp := c[i*ldc+i : i*ldc+n] + for j := range ctmp { + ctmp[j] *= beta + } + } + return + } + for i := 0; i < n; i++ { + ctmp := c[i*ldc : i*ldc+i+1] + for j := range ctmp { + ctmp[j] *= beta + } + } + return + } + if tA == blas.NoTrans { + if ul == blas.Upper { + for i := 0; i < n; i++ { + ctmp := c[i*ldc+i : i*ldc+n] + atmp := a[i*lda : i*lda+k] + if beta == 0 { + for jc := range ctmp { + j := jc + i + ctmp[jc] = alpha * f32.DotUnitary(atmp, a[j*lda:j*lda+k]) + } + } else { + for jc, vc := range ctmp { + j := jc + i + ctmp[jc] = vc*beta + alpha*f32.DotUnitary(atmp, a[j*lda:j*lda+k]) + } + } + } + return + } + for i := 0; i < n; i++ { + ctmp := c[i*ldc : i*ldc+i+1] + atmp := a[i*lda : i*lda+k] + if beta == 0 { + for j := range ctmp { + ctmp[j] = alpha * f32.DotUnitary(a[j*lda:j*lda+k], atmp) + } + } else { + for j, vc := range ctmp { + ctmp[j] = vc*beta + alpha*f32.DotUnitary(a[j*lda:j*lda+k], atmp) + } + } + } + return + } + // Cases where a is transposed. + if ul == blas.Upper { + for i := 0; i < n; i++ { + ctmp := c[i*ldc+i : i*ldc+n] + if beta == 0 { + for j := range ctmp { + ctmp[j] = 0 + } + } else if beta != 1 { + for j := range ctmp { + ctmp[j] *= beta + } + } + for l := 0; l < k; l++ { + tmp := alpha * a[l*lda+i] + if tmp != 0 { + f32.AxpyUnitary(tmp, a[l*lda+i:l*lda+n], ctmp) + } + } + } + return + } + for i := 0; i < n; i++ { + ctmp := c[i*ldc : i*ldc+i+1] + if beta != 1 { + for j := range ctmp { + ctmp[j] *= beta + } + } + for l := 0; l < k; l++ { + tmp := alpha * a[l*lda+i] + if tmp != 0 { + f32.AxpyUnitary(tmp, a[l*lda:l*lda+i+1], ctmp) + } + } + } +} + +// Ssyr2k performs one of the symmetric rank 2k operations +// C = alpha * A * B^T + alpha * B * A^T + beta * C if tA == blas.NoTrans +// C = alpha * A^T * B + alpha * B^T * A + beta * C if tA == blas.Trans or tA == blas.ConjTrans +// where A and B are n×k or k×n matrices, C is an n×n symmetric matrix, and +// alpha and beta are scalars. +// +// Float32 implementations are autogenerated and not directly tested. +func (Implementation) Ssyr2k(ul blas.Uplo, tA blas.Transpose, n, k int, alpha float32, a []float32, lda int, b []float32, ldb int, beta float32, c []float32, ldc int) { + if ul != blas.Lower && ul != blas.Upper { + panic(badUplo) + } + if tA != blas.Trans && tA != blas.NoTrans && tA != blas.ConjTrans { + panic(badTranspose) + } + if n < 0 { + panic(nLT0) + } + if k < 0 { + panic(kLT0) + } + row, col := k, n + if tA == blas.NoTrans { + row, col = n, k + } + if lda < max(1, col) { + panic(badLdA) + } + if ldb < max(1, col) { + panic(badLdB) + } + if ldc < max(1, n) { + panic(badLdC) + } + + // Quick return if possible. + if n == 0 { + return + } + + // For zero matrix size the following slice length checks are trivially satisfied. + if len(a) < lda*(row-1)+col { + panic(shortA) + } + if len(b) < ldb*(row-1)+col { + panic(shortB) + } + if len(c) < ldc*(n-1)+n { + panic(shortC) + } + + if alpha == 0 { + if beta == 0 { + if ul == blas.Upper { + for i := 0; i < n; i++ { + ctmp := c[i*ldc+i : i*ldc+n] + for j := range ctmp { + ctmp[j] = 0 + } + } + return + } + for i := 0; i < n; i++ { + ctmp := c[i*ldc : i*ldc+i+1] + for j := range ctmp { + ctmp[j] = 0 + } + } + return + } + if ul == blas.Upper { + for i := 0; i < n; i++ { + ctmp := c[i*ldc+i : i*ldc+n] + for j := range ctmp { + ctmp[j] *= beta + } + } + return + } + for i := 0; i < n; i++ { + ctmp := c[i*ldc : i*ldc+i+1] + for j := range ctmp { + ctmp[j] *= beta + } + } + return + } + if tA == blas.NoTrans { + if ul == blas.Upper { + for i := 0; i < n; i++ { + atmp := a[i*lda : i*lda+k] + btmp := b[i*ldb : i*ldb+k] + ctmp := c[i*ldc+i : i*ldc+n] + for jc := range ctmp { + j := i + jc + var tmp1, tmp2 float32 + binner := b[j*ldb : j*ldb+k] + for l, v := range a[j*lda : j*lda+k] { + tmp1 += v * btmp[l] + tmp2 += atmp[l] * binner[l] + } + ctmp[jc] *= beta + ctmp[jc] += alpha * (tmp1 + tmp2) + } + } + return + } + for i := 0; i < n; i++ { + atmp := a[i*lda : i*lda+k] + btmp := b[i*ldb : i*ldb+k] + ctmp := c[i*ldc : i*ldc+i+1] + for j := 0; j <= i; j++ { + var tmp1, tmp2 float32 + binner := b[j*ldb : j*ldb+k] + for l, v := range a[j*lda : j*lda+k] { + tmp1 += v * btmp[l] + tmp2 += atmp[l] * binner[l] + } + ctmp[j] *= beta + ctmp[j] += alpha * (tmp1 + tmp2) + } + } + return + } + if ul == blas.Upper { + for i := 0; i < n; i++ { + ctmp := c[i*ldc+i : i*ldc+n] + if beta != 1 { + for j := range ctmp { + ctmp[j] *= beta + } + } + for l := 0; l < k; l++ { + tmp1 := alpha * b[l*ldb+i] + tmp2 := alpha * a[l*lda+i] + btmp := b[l*ldb+i : l*ldb+n] + if tmp1 != 0 || tmp2 != 0 { + for j, v := range a[l*lda+i : l*lda+n] { + ctmp[j] += v*tmp1 + btmp[j]*tmp2 + } + } + } + } + return + } + for i := 0; i < n; i++ { + ctmp := c[i*ldc : i*ldc+i+1] + if beta != 1 { + for j := range ctmp { + ctmp[j] *= beta + } + } + for l := 0; l < k; l++ { + tmp1 := alpha * b[l*ldb+i] + tmp2 := alpha * a[l*lda+i] + btmp := b[l*ldb : l*ldb+i+1] + if tmp1 != 0 || tmp2 != 0 { + for j, v := range a[l*lda : l*lda+i+1] { + ctmp[j] += v*tmp1 + btmp[j]*tmp2 + } + } + } + } +} + +// Strmm performs one of the matrix-matrix operations +// B = alpha * A * B if tA == blas.NoTrans and side == blas.Left +// B = alpha * A^T * B if tA == blas.Trans or blas.ConjTrans, and side == blas.Left +// B = alpha * B * A if tA == blas.NoTrans and side == blas.Right +// B = alpha * B * A^T if tA == blas.Trans or blas.ConjTrans, and side == blas.Right +// where A is an n×n or m×m triangular matrix, B is an m×n matrix, and alpha is a scalar. +// +// Float32 implementations are autogenerated and not directly tested. +func (Implementation) Strmm(s blas.Side, ul blas.Uplo, tA blas.Transpose, d blas.Diag, m, n int, alpha float32, a []float32, lda int, b []float32, ldb int) { + if s != blas.Left && s != blas.Right { + panic(badSide) + } + if ul != blas.Lower && ul != blas.Upper { + panic(badUplo) + } + if tA != blas.NoTrans && tA != blas.Trans && tA != blas.ConjTrans { + panic(badTranspose) + } + if d != blas.NonUnit && d != blas.Unit { + panic(badDiag) + } + if m < 0 { + panic(mLT0) + } + if n < 0 { + panic(nLT0) + } + k := n + if s == blas.Left { + k = m + } + if lda < max(1, k) { + panic(badLdA) + } + if ldb < max(1, n) { + panic(badLdB) + } + + // Quick return if possible. + if m == 0 || n == 0 { + return + } + + // For zero matrix size the following slice length checks are trivially satisfied. + if len(a) < lda*(k-1)+k { + panic(shortA) + } + if len(b) < ldb*(m-1)+n { + panic(shortB) + } + + if alpha == 0 { + for i := 0; i < m; i++ { + btmp := b[i*ldb : i*ldb+n] + for j := range btmp { + btmp[j] = 0 + } + } + return + } + + nonUnit := d == blas.NonUnit + if s == blas.Left { + if tA == blas.NoTrans { + if ul == blas.Upper { + for i := 0; i < m; i++ { + tmp := alpha + if nonUnit { + tmp *= a[i*lda+i] + } + btmp := b[i*ldb : i*ldb+n] + f32.ScalUnitary(tmp, btmp) + for ka, va := range a[i*lda+i+1 : i*lda+m] { + k := ka + i + 1 + if va != 0 { + f32.AxpyUnitary(alpha*va, b[k*ldb:k*ldb+n], btmp) + } + } + } + return + } + for i := m - 1; i >= 0; i-- { + tmp := alpha + if nonUnit { + tmp *= a[i*lda+i] + } + btmp := b[i*ldb : i*ldb+n] + f32.ScalUnitary(tmp, btmp) + for k, va := range a[i*lda : i*lda+i] { + if va != 0 { + f32.AxpyUnitary(alpha*va, b[k*ldb:k*ldb+n], btmp) + } + } + } + return + } + // Cases where a is transposed. + if ul == blas.Upper { + for k := m - 1; k >= 0; k-- { + btmpk := b[k*ldb : k*ldb+n] + for ia, va := range a[k*lda+k+1 : k*lda+m] { + i := ia + k + 1 + btmp := b[i*ldb : i*ldb+n] + if va != 0 { + f32.AxpyUnitary(alpha*va, btmpk, btmp) + } + } + tmp := alpha + if nonUnit { + tmp *= a[k*lda+k] + } + if tmp != 1 { + f32.ScalUnitary(tmp, btmpk) + } + } + return + } + for k := 0; k < m; k++ { + btmpk := b[k*ldb : k*ldb+n] + for i, va := range a[k*lda : k*lda+k] { + btmp := b[i*ldb : i*ldb+n] + if va != 0 { + f32.AxpyUnitary(alpha*va, btmpk, btmp) + } + } + tmp := alpha + if nonUnit { + tmp *= a[k*lda+k] + } + if tmp != 1 { + f32.ScalUnitary(tmp, btmpk) + } + } + return + } + // Cases where a is on the right + if tA == blas.NoTrans { + if ul == blas.Upper { + for i := 0; i < m; i++ { + btmp := b[i*ldb : i*ldb+n] + for k := n - 1; k >= 0; k-- { + tmp := alpha * btmp[k] + if tmp == 0 { + continue + } + btmp[k] = tmp + if nonUnit { + btmp[k] *= a[k*lda+k] + } + f32.AxpyUnitary(tmp, a[k*lda+k+1:k*lda+n], btmp[k+1:n]) + } + } + return + } + for i := 0; i < m; i++ { + btmp := b[i*ldb : i*ldb+n] + for k := 0; k < n; k++ { + tmp := alpha * btmp[k] + if tmp == 0 { + continue + } + btmp[k] = tmp + if nonUnit { + btmp[k] *= a[k*lda+k] + } + f32.AxpyUnitary(tmp, a[k*lda:k*lda+k], btmp[:k]) + } + } + return + } + // Cases where a is transposed. + if ul == blas.Upper { + for i := 0; i < m; i++ { + btmp := b[i*ldb : i*ldb+n] + for j, vb := range btmp { + tmp := vb + if nonUnit { + tmp *= a[j*lda+j] + } + tmp += f32.DotUnitary(a[j*lda+j+1:j*lda+n], btmp[j+1:n]) + btmp[j] = alpha * tmp + } + } + return + } + for i := 0; i < m; i++ { + btmp := b[i*ldb : i*ldb+n] + for j := n - 1; j >= 0; j-- { + tmp := btmp[j] + if nonUnit { + tmp *= a[j*lda+j] + } + tmp += f32.DotUnitary(a[j*lda:j*lda+j], btmp[:j]) + btmp[j] = alpha * tmp + } + } +} diff --git a/vendor/gonum.org/v1/gonum/blas/gonum/level3float64.go b/vendor/gonum.org/v1/gonum/blas/gonum/level3float64.go new file mode 100644 index 0000000..9eebd90 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/blas/gonum/level3float64.go @@ -0,0 +1,864 @@ +// Copyright ©2014 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package gonum + +import ( + "gonum.org/v1/gonum/blas" + "gonum.org/v1/gonum/internal/asm/f64" +) + +var _ blas.Float64Level3 = Implementation{} + +// Dtrsm solves one of the matrix equations +// A * X = alpha * B if tA == blas.NoTrans and side == blas.Left +// A^T * X = alpha * B if tA == blas.Trans or blas.ConjTrans, and side == blas.Left +// X * A = alpha * B if tA == blas.NoTrans and side == blas.Right +// X * A^T = alpha * B if tA == blas.Trans or blas.ConjTrans, and side == blas.Right +// where A is an n×n or m×m triangular matrix, X and B are m×n matrices, and alpha is a +// scalar. +// +// At entry to the function, X contains the values of B, and the result is +// stored in-place into X. +// +// No check is made that A is invertible. +func (Implementation) Dtrsm(s blas.Side, ul blas.Uplo, tA blas.Transpose, d blas.Diag, m, n int, alpha float64, a []float64, lda int, b []float64, ldb int) { + if s != blas.Left && s != blas.Right { + panic(badSide) + } + if ul != blas.Lower && ul != blas.Upper { + panic(badUplo) + } + if tA != blas.NoTrans && tA != blas.Trans && tA != blas.ConjTrans { + panic(badTranspose) + } + if d != blas.NonUnit && d != blas.Unit { + panic(badDiag) + } + if m < 0 { + panic(mLT0) + } + if n < 0 { + panic(nLT0) + } + k := n + if s == blas.Left { + k = m + } + if lda < max(1, k) { + panic(badLdA) + } + if ldb < max(1, n) { + panic(badLdB) + } + + // Quick return if possible. + if m == 0 || n == 0 { + return + } + + // For zero matrix size the following slice length checks are trivially satisfied. + if len(a) < lda*(k-1)+k { + panic(shortA) + } + if len(b) < ldb*(m-1)+n { + panic(shortB) + } + + if alpha == 0 { + for i := 0; i < m; i++ { + btmp := b[i*ldb : i*ldb+n] + for j := range btmp { + btmp[j] = 0 + } + } + return + } + nonUnit := d == blas.NonUnit + if s == blas.Left { + if tA == blas.NoTrans { + if ul == blas.Upper { + for i := m - 1; i >= 0; i-- { + btmp := b[i*ldb : i*ldb+n] + if alpha != 1 { + f64.ScalUnitary(alpha, btmp) + } + for ka, va := range a[i*lda+i+1 : i*lda+m] { + if va != 0 { + k := ka + i + 1 + f64.AxpyUnitary(-va, b[k*ldb:k*ldb+n], btmp) + } + } + if nonUnit { + tmp := 1 / a[i*lda+i] + f64.ScalUnitary(tmp, btmp) + } + } + return + } + for i := 0; i < m; i++ { + btmp := b[i*ldb : i*ldb+n] + if alpha != 1 { + f64.ScalUnitary(alpha, btmp) + } + for k, va := range a[i*lda : i*lda+i] { + if va != 0 { + f64.AxpyUnitary(-va, b[k*ldb:k*ldb+n], btmp) + } + } + if nonUnit { + tmp := 1 / a[i*lda+i] + f64.ScalUnitary(tmp, btmp) + } + } + return + } + // Cases where a is transposed + if ul == blas.Upper { + for k := 0; k < m; k++ { + btmpk := b[k*ldb : k*ldb+n] + if nonUnit { + tmp := 1 / a[k*lda+k] + f64.ScalUnitary(tmp, btmpk) + } + for ia, va := range a[k*lda+k+1 : k*lda+m] { + if va != 0 { + i := ia + k + 1 + f64.AxpyUnitary(-va, btmpk, b[i*ldb:i*ldb+n]) + } + } + if alpha != 1 { + f64.ScalUnitary(alpha, btmpk) + } + } + return + } + for k := m - 1; k >= 0; k-- { + btmpk := b[k*ldb : k*ldb+n] + if nonUnit { + tmp := 1 / a[k*lda+k] + f64.ScalUnitary(tmp, btmpk) + } + for i, va := range a[k*lda : k*lda+k] { + if va != 0 { + f64.AxpyUnitary(-va, btmpk, b[i*ldb:i*ldb+n]) + } + } + if alpha != 1 { + f64.ScalUnitary(alpha, btmpk) + } + } + return + } + // Cases where a is to the right of X. + if tA == blas.NoTrans { + if ul == blas.Upper { + for i := 0; i < m; i++ { + btmp := b[i*ldb : i*ldb+n] + if alpha != 1 { + f64.ScalUnitary(alpha, btmp) + } + for k, vb := range btmp { + if vb == 0 { + continue + } + if nonUnit { + btmp[k] /= a[k*lda+k] + } + f64.AxpyUnitary(-btmp[k], a[k*lda+k+1:k*lda+n], btmp[k+1:n]) + } + } + return + } + for i := 0; i < m; i++ { + btmp := b[i*ldb : i*ldb+n] + if alpha != 1 { + f64.ScalUnitary(alpha, btmp) + } + for k := n - 1; k >= 0; k-- { + if btmp[k] == 0 { + continue + } + if nonUnit { + btmp[k] /= a[k*lda+k] + } + f64.AxpyUnitary(-btmp[k], a[k*lda:k*lda+k], btmp[:k]) + } + } + return + } + // Cases where a is transposed. + if ul == blas.Upper { + for i := 0; i < m; i++ { + btmp := b[i*ldb : i*ldb+n] + for j := n - 1; j >= 0; j-- { + tmp := alpha*btmp[j] - f64.DotUnitary(a[j*lda+j+1:j*lda+n], btmp[j+1:]) + if nonUnit { + tmp /= a[j*lda+j] + } + btmp[j] = tmp + } + } + return + } + for i := 0; i < m; i++ { + btmp := b[i*ldb : i*ldb+n] + for j := 0; j < n; j++ { + tmp := alpha*btmp[j] - f64.DotUnitary(a[j*lda:j*lda+j], btmp[:j]) + if nonUnit { + tmp /= a[j*lda+j] + } + btmp[j] = tmp + } + } +} + +// Dsymm performs one of the matrix-matrix operations +// C = alpha * A * B + beta * C if side == blas.Left +// C = alpha * B * A + beta * C if side == blas.Right +// where A is an n×n or m×m symmetric matrix, B and C are m×n matrices, and alpha +// is a scalar. +func (Implementation) Dsymm(s blas.Side, ul blas.Uplo, m, n int, alpha float64, a []float64, lda int, b []float64, ldb int, beta float64, c []float64, ldc int) { + if s != blas.Right && s != blas.Left { + panic(badSide) + } + if ul != blas.Lower && ul != blas.Upper { + panic(badUplo) + } + if m < 0 { + panic(mLT0) + } + if n < 0 { + panic(nLT0) + } + k := n + if s == blas.Left { + k = m + } + if lda < max(1, k) { + panic(badLdA) + } + if ldb < max(1, n) { + panic(badLdB) + } + if ldc < max(1, n) { + panic(badLdC) + } + + // Quick return if possible. + if m == 0 || n == 0 { + return + } + + // For zero matrix size the following slice length checks are trivially satisfied. + if len(a) < lda*(k-1)+k { + panic(shortA) + } + if len(b) < ldb*(m-1)+n { + panic(shortB) + } + if len(c) < ldc*(m-1)+n { + panic(shortC) + } + + // Quick return if possible. + if alpha == 0 && beta == 1 { + return + } + + if alpha == 0 { + if beta == 0 { + for i := 0; i < m; i++ { + ctmp := c[i*ldc : i*ldc+n] + for j := range ctmp { + ctmp[j] = 0 + } + } + return + } + for i := 0; i < m; i++ { + ctmp := c[i*ldc : i*ldc+n] + for j := 0; j < n; j++ { + ctmp[j] *= beta + } + } + return + } + + isUpper := ul == blas.Upper + if s == blas.Left { + for i := 0; i < m; i++ { + atmp := alpha * a[i*lda+i] + btmp := b[i*ldb : i*ldb+n] + ctmp := c[i*ldc : i*ldc+n] + for j, v := range btmp { + ctmp[j] *= beta + ctmp[j] += atmp * v + } + + for k := 0; k < i; k++ { + var atmp float64 + if isUpper { + atmp = a[k*lda+i] + } else { + atmp = a[i*lda+k] + } + atmp *= alpha + f64.AxpyUnitary(atmp, b[k*ldb:k*ldb+n], ctmp) + } + for k := i + 1; k < m; k++ { + var atmp float64 + if isUpper { + atmp = a[i*lda+k] + } else { + atmp = a[k*lda+i] + } + atmp *= alpha + f64.AxpyUnitary(atmp, b[k*ldb:k*ldb+n], ctmp) + } + } + return + } + if isUpper { + for i := 0; i < m; i++ { + for j := n - 1; j >= 0; j-- { + tmp := alpha * b[i*ldb+j] + var tmp2 float64 + atmp := a[j*lda+j+1 : j*lda+n] + btmp := b[i*ldb+j+1 : i*ldb+n] + ctmp := c[i*ldc+j+1 : i*ldc+n] + for k, v := range atmp { + ctmp[k] += tmp * v + tmp2 += btmp[k] * v + } + c[i*ldc+j] *= beta + c[i*ldc+j] += tmp*a[j*lda+j] + alpha*tmp2 + } + } + return + } + for i := 0; i < m; i++ { + for j := 0; j < n; j++ { + tmp := alpha * b[i*ldb+j] + var tmp2 float64 + atmp := a[j*lda : j*lda+j] + btmp := b[i*ldb : i*ldb+j] + ctmp := c[i*ldc : i*ldc+j] + for k, v := range atmp { + ctmp[k] += tmp * v + tmp2 += btmp[k] * v + } + c[i*ldc+j] *= beta + c[i*ldc+j] += tmp*a[j*lda+j] + alpha*tmp2 + } + } +} + +// Dsyrk performs one of the symmetric rank-k operations +// C = alpha * A * A^T + beta * C if tA == blas.NoTrans +// C = alpha * A^T * A + beta * C if tA == blas.Trans or tA == blas.ConjTrans +// where A is an n×k or k×n matrix, C is an n×n symmetric matrix, and alpha and +// beta are scalars. +func (Implementation) Dsyrk(ul blas.Uplo, tA blas.Transpose, n, k int, alpha float64, a []float64, lda int, beta float64, c []float64, ldc int) { + if ul != blas.Lower && ul != blas.Upper { + panic(badUplo) + } + if tA != blas.Trans && tA != blas.NoTrans && tA != blas.ConjTrans { + panic(badTranspose) + } + if n < 0 { + panic(nLT0) + } + if k < 0 { + panic(kLT0) + } + row, col := k, n + if tA == blas.NoTrans { + row, col = n, k + } + if lda < max(1, col) { + panic(badLdA) + } + if ldc < max(1, n) { + panic(badLdC) + } + + // Quick return if possible. + if n == 0 { + return + } + + // For zero matrix size the following slice length checks are trivially satisfied. + if len(a) < lda*(row-1)+col { + panic(shortA) + } + if len(c) < ldc*(n-1)+n { + panic(shortC) + } + + if alpha == 0 { + if beta == 0 { + if ul == blas.Upper { + for i := 0; i < n; i++ { + ctmp := c[i*ldc+i : i*ldc+n] + for j := range ctmp { + ctmp[j] = 0 + } + } + return + } + for i := 0; i < n; i++ { + ctmp := c[i*ldc : i*ldc+i+1] + for j := range ctmp { + ctmp[j] = 0 + } + } + return + } + if ul == blas.Upper { + for i := 0; i < n; i++ { + ctmp := c[i*ldc+i : i*ldc+n] + for j := range ctmp { + ctmp[j] *= beta + } + } + return + } + for i := 0; i < n; i++ { + ctmp := c[i*ldc : i*ldc+i+1] + for j := range ctmp { + ctmp[j] *= beta + } + } + return + } + if tA == blas.NoTrans { + if ul == blas.Upper { + for i := 0; i < n; i++ { + ctmp := c[i*ldc+i : i*ldc+n] + atmp := a[i*lda : i*lda+k] + if beta == 0 { + for jc := range ctmp { + j := jc + i + ctmp[jc] = alpha * f64.DotUnitary(atmp, a[j*lda:j*lda+k]) + } + } else { + for jc, vc := range ctmp { + j := jc + i + ctmp[jc] = vc*beta + alpha*f64.DotUnitary(atmp, a[j*lda:j*lda+k]) + } + } + } + return + } + for i := 0; i < n; i++ { + ctmp := c[i*ldc : i*ldc+i+1] + atmp := a[i*lda : i*lda+k] + if beta == 0 { + for j := range ctmp { + ctmp[j] = alpha * f64.DotUnitary(a[j*lda:j*lda+k], atmp) + } + } else { + for j, vc := range ctmp { + ctmp[j] = vc*beta + alpha*f64.DotUnitary(a[j*lda:j*lda+k], atmp) + } + } + } + return + } + // Cases where a is transposed. + if ul == blas.Upper { + for i := 0; i < n; i++ { + ctmp := c[i*ldc+i : i*ldc+n] + if beta == 0 { + for j := range ctmp { + ctmp[j] = 0 + } + } else if beta != 1 { + for j := range ctmp { + ctmp[j] *= beta + } + } + for l := 0; l < k; l++ { + tmp := alpha * a[l*lda+i] + if tmp != 0 { + f64.AxpyUnitary(tmp, a[l*lda+i:l*lda+n], ctmp) + } + } + } + return + } + for i := 0; i < n; i++ { + ctmp := c[i*ldc : i*ldc+i+1] + if beta != 1 { + for j := range ctmp { + ctmp[j] *= beta + } + } + for l := 0; l < k; l++ { + tmp := alpha * a[l*lda+i] + if tmp != 0 { + f64.AxpyUnitary(tmp, a[l*lda:l*lda+i+1], ctmp) + } + } + } +} + +// Dsyr2k performs one of the symmetric rank 2k operations +// C = alpha * A * B^T + alpha * B * A^T + beta * C if tA == blas.NoTrans +// C = alpha * A^T * B + alpha * B^T * A + beta * C if tA == blas.Trans or tA == blas.ConjTrans +// where A and B are n×k or k×n matrices, C is an n×n symmetric matrix, and +// alpha and beta are scalars. +func (Implementation) Dsyr2k(ul blas.Uplo, tA blas.Transpose, n, k int, alpha float64, a []float64, lda int, b []float64, ldb int, beta float64, c []float64, ldc int) { + if ul != blas.Lower && ul != blas.Upper { + panic(badUplo) + } + if tA != blas.Trans && tA != blas.NoTrans && tA != blas.ConjTrans { + panic(badTranspose) + } + if n < 0 { + panic(nLT0) + } + if k < 0 { + panic(kLT0) + } + row, col := k, n + if tA == blas.NoTrans { + row, col = n, k + } + if lda < max(1, col) { + panic(badLdA) + } + if ldb < max(1, col) { + panic(badLdB) + } + if ldc < max(1, n) { + panic(badLdC) + } + + // Quick return if possible. + if n == 0 { + return + } + + // For zero matrix size the following slice length checks are trivially satisfied. + if len(a) < lda*(row-1)+col { + panic(shortA) + } + if len(b) < ldb*(row-1)+col { + panic(shortB) + } + if len(c) < ldc*(n-1)+n { + panic(shortC) + } + + if alpha == 0 { + if beta == 0 { + if ul == blas.Upper { + for i := 0; i < n; i++ { + ctmp := c[i*ldc+i : i*ldc+n] + for j := range ctmp { + ctmp[j] = 0 + } + } + return + } + for i := 0; i < n; i++ { + ctmp := c[i*ldc : i*ldc+i+1] + for j := range ctmp { + ctmp[j] = 0 + } + } + return + } + if ul == blas.Upper { + for i := 0; i < n; i++ { + ctmp := c[i*ldc+i : i*ldc+n] + for j := range ctmp { + ctmp[j] *= beta + } + } + return + } + for i := 0; i < n; i++ { + ctmp := c[i*ldc : i*ldc+i+1] + for j := range ctmp { + ctmp[j] *= beta + } + } + return + } + if tA == blas.NoTrans { + if ul == blas.Upper { + for i := 0; i < n; i++ { + atmp := a[i*lda : i*lda+k] + btmp := b[i*ldb : i*ldb+k] + ctmp := c[i*ldc+i : i*ldc+n] + for jc := range ctmp { + j := i + jc + var tmp1, tmp2 float64 + binner := b[j*ldb : j*ldb+k] + for l, v := range a[j*lda : j*lda+k] { + tmp1 += v * btmp[l] + tmp2 += atmp[l] * binner[l] + } + ctmp[jc] *= beta + ctmp[jc] += alpha * (tmp1 + tmp2) + } + } + return + } + for i := 0; i < n; i++ { + atmp := a[i*lda : i*lda+k] + btmp := b[i*ldb : i*ldb+k] + ctmp := c[i*ldc : i*ldc+i+1] + for j := 0; j <= i; j++ { + var tmp1, tmp2 float64 + binner := b[j*ldb : j*ldb+k] + for l, v := range a[j*lda : j*lda+k] { + tmp1 += v * btmp[l] + tmp2 += atmp[l] * binner[l] + } + ctmp[j] *= beta + ctmp[j] += alpha * (tmp1 + tmp2) + } + } + return + } + if ul == blas.Upper { + for i := 0; i < n; i++ { + ctmp := c[i*ldc+i : i*ldc+n] + if beta != 1 { + for j := range ctmp { + ctmp[j] *= beta + } + } + for l := 0; l < k; l++ { + tmp1 := alpha * b[l*ldb+i] + tmp2 := alpha * a[l*lda+i] + btmp := b[l*ldb+i : l*ldb+n] + if tmp1 != 0 || tmp2 != 0 { + for j, v := range a[l*lda+i : l*lda+n] { + ctmp[j] += v*tmp1 + btmp[j]*tmp2 + } + } + } + } + return + } + for i := 0; i < n; i++ { + ctmp := c[i*ldc : i*ldc+i+1] + if beta != 1 { + for j := range ctmp { + ctmp[j] *= beta + } + } + for l := 0; l < k; l++ { + tmp1 := alpha * b[l*ldb+i] + tmp2 := alpha * a[l*lda+i] + btmp := b[l*ldb : l*ldb+i+1] + if tmp1 != 0 || tmp2 != 0 { + for j, v := range a[l*lda : l*lda+i+1] { + ctmp[j] += v*tmp1 + btmp[j]*tmp2 + } + } + } + } +} + +// Dtrmm performs one of the matrix-matrix operations +// B = alpha * A * B if tA == blas.NoTrans and side == blas.Left +// B = alpha * A^T * B if tA == blas.Trans or blas.ConjTrans, and side == blas.Left +// B = alpha * B * A if tA == blas.NoTrans and side == blas.Right +// B = alpha * B * A^T if tA == blas.Trans or blas.ConjTrans, and side == blas.Right +// where A is an n×n or m×m triangular matrix, B is an m×n matrix, and alpha is a scalar. +func (Implementation) Dtrmm(s blas.Side, ul blas.Uplo, tA blas.Transpose, d blas.Diag, m, n int, alpha float64, a []float64, lda int, b []float64, ldb int) { + if s != blas.Left && s != blas.Right { + panic(badSide) + } + if ul != blas.Lower && ul != blas.Upper { + panic(badUplo) + } + if tA != blas.NoTrans && tA != blas.Trans && tA != blas.ConjTrans { + panic(badTranspose) + } + if d != blas.NonUnit && d != blas.Unit { + panic(badDiag) + } + if m < 0 { + panic(mLT0) + } + if n < 0 { + panic(nLT0) + } + k := n + if s == blas.Left { + k = m + } + if lda < max(1, k) { + panic(badLdA) + } + if ldb < max(1, n) { + panic(badLdB) + } + + // Quick return if possible. + if m == 0 || n == 0 { + return + } + + // For zero matrix size the following slice length checks are trivially satisfied. + if len(a) < lda*(k-1)+k { + panic(shortA) + } + if len(b) < ldb*(m-1)+n { + panic(shortB) + } + + if alpha == 0 { + for i := 0; i < m; i++ { + btmp := b[i*ldb : i*ldb+n] + for j := range btmp { + btmp[j] = 0 + } + } + return + } + + nonUnit := d == blas.NonUnit + if s == blas.Left { + if tA == blas.NoTrans { + if ul == blas.Upper { + for i := 0; i < m; i++ { + tmp := alpha + if nonUnit { + tmp *= a[i*lda+i] + } + btmp := b[i*ldb : i*ldb+n] + f64.ScalUnitary(tmp, btmp) + for ka, va := range a[i*lda+i+1 : i*lda+m] { + k := ka + i + 1 + if va != 0 { + f64.AxpyUnitary(alpha*va, b[k*ldb:k*ldb+n], btmp) + } + } + } + return + } + for i := m - 1; i >= 0; i-- { + tmp := alpha + if nonUnit { + tmp *= a[i*lda+i] + } + btmp := b[i*ldb : i*ldb+n] + f64.ScalUnitary(tmp, btmp) + for k, va := range a[i*lda : i*lda+i] { + if va != 0 { + f64.AxpyUnitary(alpha*va, b[k*ldb:k*ldb+n], btmp) + } + } + } + return + } + // Cases where a is transposed. + if ul == blas.Upper { + for k := m - 1; k >= 0; k-- { + btmpk := b[k*ldb : k*ldb+n] + for ia, va := range a[k*lda+k+1 : k*lda+m] { + i := ia + k + 1 + btmp := b[i*ldb : i*ldb+n] + if va != 0 { + f64.AxpyUnitary(alpha*va, btmpk, btmp) + } + } + tmp := alpha + if nonUnit { + tmp *= a[k*lda+k] + } + if tmp != 1 { + f64.ScalUnitary(tmp, btmpk) + } + } + return + } + for k := 0; k < m; k++ { + btmpk := b[k*ldb : k*ldb+n] + for i, va := range a[k*lda : k*lda+k] { + btmp := b[i*ldb : i*ldb+n] + if va != 0 { + f64.AxpyUnitary(alpha*va, btmpk, btmp) + } + } + tmp := alpha + if nonUnit { + tmp *= a[k*lda+k] + } + if tmp != 1 { + f64.ScalUnitary(tmp, btmpk) + } + } + return + } + // Cases where a is on the right + if tA == blas.NoTrans { + if ul == blas.Upper { + for i := 0; i < m; i++ { + btmp := b[i*ldb : i*ldb+n] + for k := n - 1; k >= 0; k-- { + tmp := alpha * btmp[k] + if tmp == 0 { + continue + } + btmp[k] = tmp + if nonUnit { + btmp[k] *= a[k*lda+k] + } + f64.AxpyUnitary(tmp, a[k*lda+k+1:k*lda+n], btmp[k+1:n]) + } + } + return + } + for i := 0; i < m; i++ { + btmp := b[i*ldb : i*ldb+n] + for k := 0; k < n; k++ { + tmp := alpha * btmp[k] + if tmp == 0 { + continue + } + btmp[k] = tmp + if nonUnit { + btmp[k] *= a[k*lda+k] + } + f64.AxpyUnitary(tmp, a[k*lda:k*lda+k], btmp[:k]) + } + } + return + } + // Cases where a is transposed. + if ul == blas.Upper { + for i := 0; i < m; i++ { + btmp := b[i*ldb : i*ldb+n] + for j, vb := range btmp { + tmp := vb + if nonUnit { + tmp *= a[j*lda+j] + } + tmp += f64.DotUnitary(a[j*lda+j+1:j*lda+n], btmp[j+1:n]) + btmp[j] = alpha * tmp + } + } + return + } + for i := 0; i < m; i++ { + btmp := b[i*ldb : i*ldb+n] + for j := n - 1; j >= 0; j-- { + tmp := btmp[j] + if nonUnit { + tmp *= a[j*lda+j] + } + tmp += f64.DotUnitary(a[j*lda:j*lda+j], btmp[:j]) + btmp[j] = alpha * tmp + } + } +} diff --git a/vendor/gonum.org/v1/gonum/blas/gonum/sgemm.go b/vendor/gonum.org/v1/gonum/blas/gonum/sgemm.go new file mode 100644 index 0000000..e868a10 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/blas/gonum/sgemm.go @@ -0,0 +1,318 @@ +// Code generated by "go generate gonum.org/v1/gonum/blas/gonum”; DO NOT EDIT. + +// Copyright ©2014 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package gonum + +import ( + "runtime" + "sync" + + "gonum.org/v1/gonum/blas" + "gonum.org/v1/gonum/internal/asm/f32" +) + +// Sgemm performs one of the matrix-matrix operations +// C = alpha * A * B + beta * C +// C = alpha * A^T * B + beta * C +// C = alpha * A * B^T + beta * C +// C = alpha * A^T * B^T + beta * C +// where A is an m×k or k×m dense matrix, B is an n×k or k×n dense matrix, C is +// an m×n matrix, and alpha and beta are scalars. tA and tB specify whether A or +// B are transposed. +// +// Float32 implementations are autogenerated and not directly tested. +func (Implementation) Sgemm(tA, tB blas.Transpose, m, n, k int, alpha float32, a []float32, lda int, b []float32, ldb int, beta float32, c []float32, ldc int) { + switch tA { + default: + panic(badTranspose) + case blas.NoTrans, blas.Trans, blas.ConjTrans: + } + switch tB { + default: + panic(badTranspose) + case blas.NoTrans, blas.Trans, blas.ConjTrans: + } + if m < 0 { + panic(mLT0) + } + if n < 0 { + panic(nLT0) + } + if k < 0 { + panic(kLT0) + } + aTrans := tA == blas.Trans || tA == blas.ConjTrans + if aTrans { + if lda < max(1, m) { + panic(badLdA) + } + } else { + if lda < max(1, k) { + panic(badLdA) + } + } + bTrans := tB == blas.Trans || tB == blas.ConjTrans + if bTrans { + if ldb < max(1, k) { + panic(badLdB) + } + } else { + if ldb < max(1, n) { + panic(badLdB) + } + } + if ldc < max(1, n) { + panic(badLdC) + } + + // Quick return if possible. + if m == 0 || n == 0 { + return + } + + // For zero matrix size the following slice length checks are trivially satisfied. + if aTrans { + if len(a) < (k-1)*lda+m { + panic(shortA) + } + } else { + if len(a) < (m-1)*lda+k { + panic(shortA) + } + } + if bTrans { + if len(b) < (n-1)*ldb+k { + panic(shortB) + } + } else { + if len(b) < (k-1)*ldb+n { + panic(shortB) + } + } + if len(c) < (m-1)*ldc+n { + panic(shortC) + } + + // Quick return if possible. + if (alpha == 0 || k == 0) && beta == 1 { + return + } + + // scale c + if beta != 1 { + if beta == 0 { + for i := 0; i < m; i++ { + ctmp := c[i*ldc : i*ldc+n] + for j := range ctmp { + ctmp[j] = 0 + } + } + } else { + for i := 0; i < m; i++ { + ctmp := c[i*ldc : i*ldc+n] + for j := range ctmp { + ctmp[j] *= beta + } + } + } + } + + sgemmParallel(aTrans, bTrans, m, n, k, a, lda, b, ldb, c, ldc, alpha) +} + +func sgemmParallel(aTrans, bTrans bool, m, n, k int, a []float32, lda int, b []float32, ldb int, c []float32, ldc int, alpha float32) { + // dgemmParallel computes a parallel matrix multiplication by partitioning + // a and b into sub-blocks, and updating c with the multiplication of the sub-block + // In all cases, + // A = [ A_11 A_12 ... A_1j + // A_21 A_22 ... A_2j + // ... + // A_i1 A_i2 ... A_ij] + // + // and same for B. All of the submatrix sizes are blockSize×blockSize except + // at the edges. + // + // In all cases, there is one dimension for each matrix along which + // C must be updated sequentially. + // Cij = \sum_k Aik Bki, (A * B) + // Cij = \sum_k Aki Bkj, (A^T * B) + // Cij = \sum_k Aik Bjk, (A * B^T) + // Cij = \sum_k Aki Bjk, (A^T * B^T) + // + // This code computes one {i, j} block sequentially along the k dimension, + // and computes all of the {i, j} blocks concurrently. This + // partitioning allows Cij to be updated in-place without race-conditions. + // Instead of launching a goroutine for each possible concurrent computation, + // a number of worker goroutines are created and channels are used to pass + // available and completed cases. + // + // http://alexkr.com/docs/matrixmult.pdf is a good reference on matrix-matrix + // multiplies, though this code does not copy matrices to attempt to eliminate + // cache misses. + + maxKLen := k + parBlocks := blocks(m, blockSize) * blocks(n, blockSize) + if parBlocks < minParBlock { + // The matrix multiplication is small in the dimensions where it can be + // computed concurrently. Just do it in serial. + sgemmSerial(aTrans, bTrans, m, n, k, a, lda, b, ldb, c, ldc, alpha) + return + } + + nWorkers := runtime.GOMAXPROCS(0) + if parBlocks < nWorkers { + nWorkers = parBlocks + } + // There is a tradeoff between the workers having to wait for work + // and a large buffer making operations slow. + buf := buffMul * nWorkers + if buf > parBlocks { + buf = parBlocks + } + + sendChan := make(chan subMul, buf) + + // Launch workers. A worker receives an {i, j} submatrix of c, and computes + // A_ik B_ki (or the transposed version) storing the result in c_ij. When the + // channel is finally closed, it signals to the waitgroup that it has finished + // computing. + var wg sync.WaitGroup + for i := 0; i < nWorkers; i++ { + wg.Add(1) + go func() { + defer wg.Done() + for sub := range sendChan { + i := sub.i + j := sub.j + leni := blockSize + if i+leni > m { + leni = m - i + } + lenj := blockSize + if j+lenj > n { + lenj = n - j + } + + cSub := sliceView32(c, ldc, i, j, leni, lenj) + + // Compute A_ik B_kj for all k + for k := 0; k < maxKLen; k += blockSize { + lenk := blockSize + if k+lenk > maxKLen { + lenk = maxKLen - k + } + var aSub, bSub []float32 + if aTrans { + aSub = sliceView32(a, lda, k, i, lenk, leni) + } else { + aSub = sliceView32(a, lda, i, k, leni, lenk) + } + if bTrans { + bSub = sliceView32(b, ldb, j, k, lenj, lenk) + } else { + bSub = sliceView32(b, ldb, k, j, lenk, lenj) + } + sgemmSerial(aTrans, bTrans, leni, lenj, lenk, aSub, lda, bSub, ldb, cSub, ldc, alpha) + } + } + }() + } + + // Send out all of the {i, j} subblocks for computation. + for i := 0; i < m; i += blockSize { + for j := 0; j < n; j += blockSize { + sendChan <- subMul{ + i: i, + j: j, + } + } + } + close(sendChan) + wg.Wait() +} + +// sgemmSerial is serial matrix multiply +func sgemmSerial(aTrans, bTrans bool, m, n, k int, a []float32, lda int, b []float32, ldb int, c []float32, ldc int, alpha float32) { + switch { + case !aTrans && !bTrans: + sgemmSerialNotNot(m, n, k, a, lda, b, ldb, c, ldc, alpha) + return + case aTrans && !bTrans: + sgemmSerialTransNot(m, n, k, a, lda, b, ldb, c, ldc, alpha) + return + case !aTrans && bTrans: + sgemmSerialNotTrans(m, n, k, a, lda, b, ldb, c, ldc, alpha) + return + case aTrans && bTrans: + sgemmSerialTransTrans(m, n, k, a, lda, b, ldb, c, ldc, alpha) + return + default: + panic("unreachable") + } +} + +// sgemmSerial where neither a nor b are transposed +func sgemmSerialNotNot(m, n, k int, a []float32, lda int, b []float32, ldb int, c []float32, ldc int, alpha float32) { + // This style is used instead of the literal [i*stride +j]) is used because + // approximately 5 times faster as of go 1.3. + for i := 0; i < m; i++ { + ctmp := c[i*ldc : i*ldc+n] + for l, v := range a[i*lda : i*lda+k] { + tmp := alpha * v + if tmp != 0 { + f32.AxpyUnitary(tmp, b[l*ldb:l*ldb+n], ctmp) + } + } + } +} + +// sgemmSerial where neither a is transposed and b is not +func sgemmSerialTransNot(m, n, k int, a []float32, lda int, b []float32, ldb int, c []float32, ldc int, alpha float32) { + // This style is used instead of the literal [i*stride +j]) is used because + // approximately 5 times faster as of go 1.3. + for l := 0; l < k; l++ { + btmp := b[l*ldb : l*ldb+n] + for i, v := range a[l*lda : l*lda+m] { + tmp := alpha * v + if tmp != 0 { + ctmp := c[i*ldc : i*ldc+n] + f32.AxpyUnitary(tmp, btmp, ctmp) + } + } + } +} + +// sgemmSerial where neither a is not transposed and b is +func sgemmSerialNotTrans(m, n, k int, a []float32, lda int, b []float32, ldb int, c []float32, ldc int, alpha float32) { + // This style is used instead of the literal [i*stride +j]) is used because + // approximately 5 times faster as of go 1.3. + for i := 0; i < m; i++ { + atmp := a[i*lda : i*lda+k] + ctmp := c[i*ldc : i*ldc+n] + for j := 0; j < n; j++ { + ctmp[j] += alpha * f32.DotUnitary(atmp, b[j*ldb:j*ldb+k]) + } + } +} + +// sgemmSerial where both are transposed +func sgemmSerialTransTrans(m, n, k int, a []float32, lda int, b []float32, ldb int, c []float32, ldc int, alpha float32) { + // This style is used instead of the literal [i*stride +j]) is used because + // approximately 5 times faster as of go 1.3. + for l := 0; l < k; l++ { + for i, v := range a[l*lda : l*lda+m] { + tmp := alpha * v + if tmp != 0 { + ctmp := c[i*ldc : i*ldc+n] + f32.AxpyInc(tmp, b[l:], ctmp, uintptr(n), uintptr(ldb), 1, 0, 0) + } + } + } +} + +func sliceView32(a []float32, lda, i, j, r, c int) []float32 { + return a[i*lda+j : (i+r-1)*lda+j+c] +} diff --git a/vendor/gonum.org/v1/gonum/blas/testblas/benchautogen/autogen_bench_level1double.go b/vendor/gonum.org/v1/gonum/blas/testblas/benchautogen/autogen_bench_level1double.go new file mode 100644 index 0000000..a9dd402 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/blas/testblas/benchautogen/autogen_bench_level1double.go @@ -0,0 +1,288 @@ +// Copyright ©2014 The Gonum Authors. All rights reserved. +// Use of this code is governed by a BSD-style +// license that can be found in the LICENSE file + +// Script for automatic code generation of the benchmark routines. +package main + +import ( + "fmt" + "os" + "os/exec" + "path/filepath" + "strconv" +) + +var gopath string + +var copyrightnotice = []byte(`// Copyright ©2014 The Gonum Authors. All rights reserved. +// Use of this code is governed by a BSD-style +// license that can be found in the LICENSE file`) + +var autogen = []byte("// Code generated by \"go run gonum.org/v1/gonum/blas/testblas/benchautogen/autogen_bench_level1double.go\"; DO NOT EDIT.\n") + +var imports = []byte(`import( + "golang.org/x/exp/rand" + "testing" + + "gonum.org/v1/gonum/blas" +)`) + +var randomSliceFunction = []byte(`func randomSlice(l, idx int) ([]float64) { + if idx < 0{ + idx = -idx + } + s := make([]float64, l * idx) + for i := range s { + s[i] = rand.Float64() + } + return s +}`) + +const ( + posInc1 = 5 + posInc2 = 3 + negInc1 = -3 + negInc2 = -4 +) + +var level1Sizes = []struct { + lower string + upper string + camel string + size int +}{ + { + lower: "small", + upper: "SMALL_SLICE", + camel: "Small", + size: 10, + }, + { + lower: "medium", + upper: "MEDIUM_SLICE", + camel: "Medium", + size: 1000, + }, + { + lower: "large", + upper: "LARGE_SLICE", + camel: "Large", + size: 100000, + }, + { + lower: "huge", + upper: "HUGE_SLICE", + camel: "Huge", + size: 10000000, + }, +} + +type level1functionStruct struct { + camel string + sig string + call string + extraSetup string + oneInput bool + extraName string // if have a couple different cases for the same function +} + +var level1Functions = []level1functionStruct{ + { + camel: "Ddot", + sig: "n int, x []float64, incX int, y []float64, incY int", + call: "n, x, incX, y, incY", + oneInput: false, + }, + { + camel: "Dnrm2", + sig: "n int, x []float64, incX int", + call: "n, x, incX", + oneInput: true, + }, + { + camel: "Dasum", + sig: "n int, x []float64, incX int", + call: "n, x, incX", + oneInput: true, + }, + { + camel: "Idamax", + sig: "n int, x []float64, incX int", + call: "n, x, incX", + oneInput: true, + }, + { + camel: "Dswap", + sig: "n int, x []float64, incX int, y []float64, incY int", + call: "n, x, incX, y, incY", + oneInput: false, + }, + { + camel: "Dcopy", + sig: "n int, x []float64, incX int, y []float64, incY int", + call: "n, x, incX, y, incY", + oneInput: false, + }, + { + camel: "Daxpy", + sig: "n int, alpha float64, x []float64, incX int, y []float64, incY int", + call: "n, alpha, x, incX, y, incY", + extraSetup: "alpha := 2.4", + oneInput: false, + }, + { + camel: "Drot", + sig: "n int, x []float64, incX int, y []float64, incY int, c, s float64", + call: "n, x, incX, y, incY, c, s", + extraSetup: "c := 0.89725836967\ns:= 0.44150585279", + oneInput: false, + }, + { + camel: "Drotm", + sig: "n int, x []float64, incX int, y []float64, incY int, p blas.DrotmParams", + call: "n, x, incX, y, incY, p", + extraSetup: "p := blas.DrotmParams{Flag: blas.OffDiagonal, H: [4]float64{0, -0.625, 0.9375,0}}", + oneInput: false, + extraName: "OffDia", + }, + { + camel: "Drotm", + sig: "n int, x []float64, incX int, y []float64, incY int, p blas.DrotmParams", + call: "n, x, incX, y, incY, p", + extraSetup: "p := blas.DrotmParams{Flag: blas.OffDiagonal, H: [4]float64{5.0 / 12, 0, 0, 0.625}}", + oneInput: false, + extraName: "Dia", + }, + { + camel: "Drotm", + sig: "n int, x []float64, incX int, y []float64, incY int, p blas.DrotmParams", + call: "n, x, incX, y, incY, p", + extraSetup: "p := blas.DrotmParams{Flag: blas.OffDiagonal, H: [4]float64{4096, -3584, 1792, 4096}}", + oneInput: false, + extraName: "Resc", + }, + { + camel: "Dscal", + sig: "n int, alpha float64, x []float64, incX int", + call: "n, alpha, x, incX", + extraSetup: "alpha := 2.4", + oneInput: true, + }, +} + +func init() { + gopath = os.Getenv("GOPATH") + if gopath == "" { + panic("gopath not set") + } +} + +func main() { + pkgs := []string{"gonum", "netlib"} + for _, pkg := range pkgs { + blasPath := filepath.Join(gopath, "src", "gonum.org", "v1", pkg, "blas", pkg) + err := level1(blasPath, pkg) + if err != nil { + fmt.Println(err) + os.Exit(1) + } + + err = exec.Command("goimports", "-w", blasPath).Run() + if err != nil { + fmt.Println(err) + os.Exit(1) + } + } +} + +func printHeader(f *os.File, name string) error { + if _, err := f.Write(autogen); err != nil { + return err + } + f.WriteString("\n\n") + f.Write(copyrightnotice) + f.WriteString("\n\n") + f.WriteString("package " + name) + f.WriteString("\n\n") + f.Write(imports) + f.WriteString("\n\n") + return nil +} + +// Generate the benchmark scripts for level1 +func level1(benchPath string, pkgname string) error { + // Generate level 1 benchmarks + level1Filepath := filepath.Join(benchPath, "level1float64_bench_test.go") + f, err := os.Create(level1Filepath) + if err != nil { + return err + } + defer f.Close() + printHeader(f, pkgname) + + // Print all of the constants + f.WriteString("const (\n") + f.WriteString("\tposInc1 = " + strconv.Itoa(posInc1) + "\n") + f.WriteString("\tposInc2 = " + strconv.Itoa(posInc2) + "\n") + f.WriteString("\tnegInc1 = " + strconv.Itoa(negInc1) + "\n") + f.WriteString("\tnegInc2 = " + strconv.Itoa(negInc2) + "\n") + for _, con := range level1Sizes { + f.WriteString("\t" + con.upper + " = " + strconv.Itoa(con.size) + "\n") + } + f.WriteString(")\n") + f.WriteString("\n") + + // Write the randomSlice function + f.Write(randomSliceFunction) + f.WriteString("\n\n") + + // Start writing the benchmarks + for _, fun := range level1Functions { + writeLevel1Benchmark(fun, f) + f.WriteString("\n/* ------------------ */ \n") + } + + return nil +} + +func writeLevel1Benchmark(fun level1functionStruct, f *os.File) { + // First, write the base benchmark file + f.WriteString("func benchmark" + fun.camel + fun.extraName + "(b *testing.B, ") + f.WriteString(fun.sig) + f.WriteString(") {\n") + + f.WriteString("b.ResetTimer()\n") + f.WriteString("for i := 0; i < b.N; i++{\n") + f.WriteString("\timpl." + fun.camel + "(") + + f.WriteString(fun.call) + f.WriteString(")\n}\n}\n") + f.WriteString("\n") + + // Write all of the benchmarks to call it + for _, sz := range level1Sizes { + lambda := func(incX, incY, name string, twoInput bool) { + f.WriteString("func Benchmark" + fun.camel + fun.extraName + sz.camel + name + "(b *testing.B){\n") + f.WriteString("n := " + sz.upper + "\n") + f.WriteString("incX := " + incX + "\n") + f.WriteString("x := randomSlice(n, incX)\n") + if twoInput { + f.WriteString("incY := " + incY + "\n") + f.WriteString("y := randomSlice(n, incY)\n") + } + f.WriteString(fun.extraSetup + "\n") + f.WriteString("benchmark" + fun.camel + fun.extraName + "(b, " + fun.call + ")\n") + f.WriteString("}\n\n") + } + if fun.oneInput { + lambda("1", "", "UnitaryInc", false) + lambda("posInc1", "", "PosInc", false) + } else { + lambda("1", "1", "BothUnitary", true) + lambda("posInc1", "1", "IncUni", true) + lambda("1", "negInc1", "UniInc", true) + lambda("posInc1", "negInc1", "BothInc", true) + } + } +} diff --git a/vendor/gonum.org/v1/gonum/blas/testblas/benchsize.go b/vendor/gonum.org/v1/gonum/blas/testblas/benchsize.go new file mode 100644 index 0000000..dcb5cd9 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/blas/testblas/benchsize.go @@ -0,0 +1,12 @@ +// Copyright ©2014 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package testblas + +const ( + SmallMat = 10 + MediumMat = 100 + LargeMat = 1000 + HugeMat = 10000 +) diff --git a/vendor/gonum.org/v1/gonum/blas/testblas/common.go b/vendor/gonum.org/v1/gonum/blas/testblas/common.go new file mode 100644 index 0000000..508893a --- /dev/null +++ b/vendor/gonum.org/v1/gonum/blas/testblas/common.go @@ -0,0 +1,728 @@ +// Copyright ©2014 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package testblas + +import ( + "math" + "math/cmplx" + "testing" + + "golang.org/x/exp/rand" + "gonum.org/v1/gonum/blas" + "gonum.org/v1/gonum/floats" +) + +// throwPanic will throw unexpected panics if true, or will just report them as errors if false +const throwPanic = true + +var znan = cmplx.NaN() + +func dTolEqual(a, b float64) bool { + if math.IsNaN(a) && math.IsNaN(b) { + return true + } + if a == b { + return true + } + m := math.Max(math.Abs(a), math.Abs(b)) + if m > 1 { + a /= m + b /= m + } + if math.Abs(a-b) < 1e-14 { + return true + } + return false +} + +func dSliceTolEqual(a, b []float64) bool { + if len(a) != len(b) { + return false + } + for i := range a { + if !dTolEqual(a[i], b[i]) { + return false + } + } + return true +} + +func dStridedSliceTolEqual(n int, a []float64, inca int, b []float64, incb int) bool { + ia := 0 + ib := 0 + if inca <= 0 { + ia = -(n - 1) * inca + } + if incb <= 0 { + ib = -(n - 1) * incb + } + for i := 0; i < n; i++ { + if !dTolEqual(a[ia], b[ib]) { + return false + } + ia += inca + ib += incb + } + return true +} + +func dSliceEqual(a, b []float64) bool { + if len(a) != len(b) { + return false + } + for i := range a { + if !dTolEqual(a[i], b[i]) { + return false + } + } + return true +} + +func dCopyTwoTmp(x, xTmp, y, yTmp []float64) { + if len(x) != len(xTmp) { + panic("x size mismatch") + } + if len(y) != len(yTmp) { + panic("y size mismatch") + } + copy(xTmp, x) + copy(yTmp, y) +} + +// returns true if the function panics +func panics(f func()) (b bool) { + defer func() { + err := recover() + if err != nil { + b = true + } + }() + f() + return +} + +func testpanics(f func(), name string, t *testing.T) { + b := panics(f) + if !b { + t.Errorf("%v should panic and does not", name) + } +} + +func sliceOfSliceCopy(a [][]float64) [][]float64 { + n := make([][]float64, len(a)) + for i := range a { + n[i] = make([]float64, len(a[i])) + copy(n[i], a[i]) + } + return n +} + +func sliceCopy(a []float64) []float64 { + n := make([]float64, len(a)) + copy(n, a) + return n +} + +func flatten(a [][]float64) []float64 { + if len(a) == 0 { + return nil + } + m := len(a) + n := len(a[0]) + s := make([]float64, m*n) + for i := 0; i < m; i++ { + for j := 0; j < n; j++ { + s[i*n+j] = a[i][j] + } + } + return s +} + +func unflatten(a []float64, m, n int) [][]float64 { + s := make([][]float64, m) + for i := 0; i < m; i++ { + s[i] = make([]float64, n) + for j := 0; j < n; j++ { + s[i][j] = a[i*n+j] + } + } + return s +} + +// flattenTriangular turns the upper or lower triangle of a dense slice of slice +// into a single slice with packed storage. a must be a square matrix. +func flattenTriangular(a [][]float64, ul blas.Uplo) []float64 { + m := len(a) + aFlat := make([]float64, m*(m+1)/2) + var k int + if ul == blas.Upper { + for i := 0; i < m; i++ { + k += copy(aFlat[k:], a[i][i:]) + } + return aFlat + } + for i := 0; i < m; i++ { + k += copy(aFlat[k:], a[i][:i+1]) + } + return aFlat +} + +// flattenBanded turns a dense banded slice of slice into the compact banded matrix format +func flattenBanded(a [][]float64, ku, kl int) []float64 { + m := len(a) + n := len(a[0]) + if ku < 0 || kl < 0 { + panic("testblas: negative band length") + } + nRows := m + nCols := (ku + kl + 1) + aflat := make([]float64, nRows*nCols) + for i := range aflat { + aflat[i] = math.NaN() + } + // loop over the rows, and then the bands + // elements in the ith row stay in the ith row + // order in bands is kept + for i := 0; i < nRows; i++ { + min := -kl + if i-kl < 0 { + min = -i + } + max := ku + if i+ku >= n { + max = n - i - 1 + } + for j := min; j <= max; j++ { + col := kl + j + aflat[i*nCols+col] = a[i][i+j] + } + } + return aflat +} + +// makeIncremented takes a float64 slice with inc == 1 and makes an incremented version +// and adds extra values on the end +func makeIncremented(x []float64, inc int, extra int) []float64 { + if inc == 0 { + panic("zero inc") + } + absinc := inc + if absinc < 0 { + absinc = -inc + } + xcopy := make([]float64, len(x)) + if inc > 0 { + copy(xcopy, x) + } else { + for i := 0; i < len(x); i++ { + xcopy[i] = x[len(x)-i-1] + } + } + + // don't use NaN because it makes comparison hard + // Do use a weird unique value for easier debugging + counter := 100.0 + var xnew []float64 + for i, v := range xcopy { + xnew = append(xnew, v) + if i != len(x)-1 { + for j := 0; j < absinc-1; j++ { + xnew = append(xnew, counter) + counter++ + } + } + } + for i := 0; i < extra; i++ { + xnew = append(xnew, counter) + counter++ + } + return xnew +} + +// makeIncremented32 takes a float32 slice with inc == 1 and makes an incremented version +// and adds extra values on the end +func makeIncremented32(x []float32, inc int, extra int) []float32 { + if inc == 0 { + panic("zero inc") + } + absinc := inc + if absinc < 0 { + absinc = -inc + } + xcopy := make([]float32, len(x)) + if inc > 0 { + copy(xcopy, x) + } else { + for i := 0; i < len(x); i++ { + xcopy[i] = x[len(x)-i-1] + } + } + + // don't use NaN because it makes comparison hard + // Do use a weird unique value for easier debugging + var counter float32 = 100.0 + var xnew []float32 + for i, v := range xcopy { + xnew = append(xnew, v) + if i != len(x)-1 { + for j := 0; j < absinc-1; j++ { + xnew = append(xnew, counter) + counter++ + } + } + } + for i := 0; i < extra; i++ { + xnew = append(xnew, counter) + counter++ + } + return xnew +} + +func abs(x int) int { + if x < 0 { + return -x + } + return x +} + +func allPairs(x, y []int) [][2]int { + var p [][2]int + for _, v0 := range x { + for _, v1 := range y { + p = append(p, [2]int{v0, v1}) + } + } + return p +} + +func sameFloat64(a, b float64) bool { + return a == b || math.IsNaN(a) && math.IsNaN(b) +} + +func sameComplex128(x, y complex128) bool { + return sameFloat64(real(x), real(y)) && sameFloat64(imag(x), imag(y)) +} + +func zsame(x, y []complex128) bool { + if len(x) != len(y) { + return false + } + for i, v := range x { + w := y[i] + if !sameComplex128(v, w) { + return false + } + } + return true +} + +// zSameAtNonstrided returns whether elements at non-stride positions of vectors +// x and y are same. +func zSameAtNonstrided(x, y []complex128, inc int) bool { + if len(x) != len(y) { + return false + } + if inc < 0 { + inc = -inc + } + for i, v := range x { + if i%inc == 0 { + continue + } + w := y[i] + if !sameComplex128(v, w) { + return false + } + } + return true +} + +// zEqualApproxAtStrided returns whether elements at stride positions of vectors +// x and y are approximately equal within tol. +func zEqualApproxAtStrided(x, y []complex128, inc int, tol float64) bool { + if len(x) != len(y) { + return false + } + if inc < 0 { + inc = -inc + } + for i := 0; i < len(x); i += inc { + v := x[i] + w := y[i] + if !(cmplx.Abs(v-w) <= tol) { + return false + } + } + return true +} + +func makeZVector(data []complex128, inc int) []complex128 { + if inc == 0 { + panic("bad test") + } + if len(data) == 0 { + return nil + } + inc = abs(inc) + x := make([]complex128, (len(data)-1)*inc+1) + for i := range x { + x[i] = znan + } + for i, v := range data { + x[i*inc] = v + } + return x +} + +func makeZGeneral(data []complex128, m, n int, ld int) []complex128 { + if m < 0 || n < 0 { + panic("bad test") + } + if data != nil && len(data) != m*n { + panic("bad test") + } + if ld < max(1, n) { + panic("bad test") + } + if m == 0 || n == 0 { + return nil + } + a := make([]complex128, (m-1)*ld+n) + for i := range a { + a[i] = znan + } + if data != nil { + for i := 0; i < m; i++ { + copy(a[i*ld:i*ld+n], data[i*n:i*n+n]) + } + } + return a +} + +func max(a, b int) int { + if a < b { + return b + } + return a +} + +func min(a, b int) int { + if a < b { + return a + } + return b +} + +// zPack returns the uplo triangle of an n×n matrix A in packed format. +func zPack(uplo blas.Uplo, n int, a []complex128, lda int) []complex128 { + if n == 0 { + return nil + } + ap := make([]complex128, n*(n+1)/2) + var ii int + if uplo == blas.Upper { + for i := 0; i < n; i++ { + for j := i; j < n; j++ { + ap[ii] = a[i*lda+j] + ii++ + } + } + } else { + for i := 0; i < n; i++ { + for j := 0; j <= i; j++ { + ap[ii] = a[i*lda+j] + ii++ + } + } + } + return ap +} + +// zUnpackAsHermitian returns an n×n general Hermitian matrix (with stride n) +// whose packed uplo triangle is stored on entry in ap. +func zUnpackAsHermitian(uplo blas.Uplo, n int, ap []complex128) []complex128 { + if n == 0 { + return nil + } + a := make([]complex128, n*n) + lda := n + var ii int + if uplo == blas.Upper { + for i := 0; i < n; i++ { + for j := i; j < n; j++ { + a[i*lda+j] = ap[ii] + if i != j { + a[j*lda+i] = cmplx.Conj(ap[ii]) + } + ii++ + } + } + } else { + for i := 0; i < n; i++ { + for j := 0; j <= i; j++ { + a[i*lda+j] = ap[ii] + if i != j { + a[j*lda+i] = cmplx.Conj(ap[ii]) + } + ii++ + } + } + } + return a +} + +// zPackBand returns the (kL+1+kU) band of an m×n general matrix A in band +// matrix format with ldab stride. Out-of-range elements are filled with NaN. +func zPackBand(kL, kU, ldab int, m, n int, a []complex128, lda int) []complex128 { + if m == 0 || n == 0 { + return nil + } + nRow := min(m, n+kL) + ab := make([]complex128, (nRow-1)*ldab+kL+1+kU) + for i := range ab { + ab[i] = znan + } + for i := 0; i < m; i++ { + off := max(0, kL-i) + var k int + for j := max(0, i-kL); j < min(n, i+kU+1); j++ { + ab[i*ldab+off+k] = a[i*lda+j] + k++ + } + } + return ab +} + +// zPackTriBand returns in band matrix format the (k+1) band in the uplo +// triangle of an n×n matrix A. Out-of-range elements are filled with NaN. +func zPackTriBand(k, ldab int, uplo blas.Uplo, n int, a []complex128, lda int) []complex128 { + if n == 0 { + return nil + } + ab := make([]complex128, (n-1)*ldab+k+1) + for i := range ab { + ab[i] = znan + } + if uplo == blas.Upper { + for i := 0; i < n; i++ { + var k int + for j := i; j < min(n, i+k+1); j++ { + ab[i*ldab+k] = a[i*lda+j] + k++ + } + } + } else { + for i := 0; i < n; i++ { + off := max(0, k-i) + var kk int + for j := max(0, i-k); j <= i; j++ { + ab[i*ldab+off+kk] = a[i*lda+j] + kk++ + } + } + } + return ab +} + +// zEqualApprox returns whether the slices a and b are approximately equal. +func zEqualApprox(a, b []complex128, tol float64) bool { + if len(a) != len(b) { + panic("mismatched slice length") + } + for i, ai := range a { + if !floats.EqualWithinAbs(cmplx.Abs(ai), cmplx.Abs(b[i]), tol) { + return false + } + } + return true +} + +// rndComplex128 returns a complex128 with random components. +func rndComplex128(rnd *rand.Rand) complex128 { + return complex(rnd.NormFloat64(), rnd.NormFloat64()) +} + +// zmm returns the result of one of the matrix-matrix operations +// alpha * op(A) * op(B) + beta * C +// where op(X) is one of +// op(X) = X or op(X) = X^T or op(X) = X^H, +// alpha and beta are scalars, and A, B and C are matrices, with op(A) an m×k matrix, +// op(B) a k×n matrix and C an m×n matrix. +// +// The returned slice is newly allocated, has the same length as c and the +// matrix it represents has the stride ldc. Out-of-range elements are equal to +// those of C to ease comparison of results from BLAS Level 3 functions. +func zmm(tA, tB blas.Transpose, m, n, k int, alpha complex128, a []complex128, lda int, b []complex128, ldb int, beta complex128, c []complex128, ldc int) []complex128 { + r := make([]complex128, len(c)) + copy(r, c) + for i := 0; i < m; i++ { + for j := 0; j < n; j++ { + r[i*ldc+j] = 0 + } + } + switch tA { + case blas.NoTrans: + switch tB { + case blas.NoTrans: + for i := 0; i < m; i++ { + for j := 0; j < n; j++ { + for l := 0; l < k; l++ { + r[i*ldc+j] += a[i*lda+l] * b[l*ldb+j] + } + } + } + case blas.Trans: + for i := 0; i < m; i++ { + for j := 0; j < n; j++ { + for l := 0; l < k; l++ { + r[i*ldc+j] += a[i*lda+l] * b[j*ldb+l] + } + } + } + case blas.ConjTrans: + for i := 0; i < m; i++ { + for j := 0; j < n; j++ { + for l := 0; l < k; l++ { + r[i*ldc+j] += a[i*lda+l] * cmplx.Conj(b[j*ldb+l]) + } + } + } + } + case blas.Trans: + switch tB { + case blas.NoTrans: + for i := 0; i < m; i++ { + for j := 0; j < n; j++ { + for l := 0; l < k; l++ { + r[i*ldc+j] += a[l*lda+i] * b[l*ldb+j] + } + } + } + case blas.Trans: + for i := 0; i < m; i++ { + for j := 0; j < n; j++ { + for l := 0; l < k; l++ { + r[i*ldc+j] += a[l*lda+i] * b[j*ldb+l] + } + } + } + case blas.ConjTrans: + for i := 0; i < m; i++ { + for j := 0; j < n; j++ { + for l := 0; l < k; l++ { + r[i*ldc+j] += a[l*lda+i] * cmplx.Conj(b[j*ldb+l]) + } + } + } + } + case blas.ConjTrans: + switch tB { + case blas.NoTrans: + for i := 0; i < m; i++ { + for j := 0; j < n; j++ { + for l := 0; l < k; l++ { + r[i*ldc+j] += cmplx.Conj(a[l*lda+i]) * b[l*ldb+j] + } + } + } + case blas.Trans: + for i := 0; i < m; i++ { + for j := 0; j < n; j++ { + for l := 0; l < k; l++ { + r[i*ldc+j] += cmplx.Conj(a[l*lda+i]) * b[j*ldb+l] + } + } + } + case blas.ConjTrans: + for i := 0; i < m; i++ { + for j := 0; j < n; j++ { + for l := 0; l < k; l++ { + r[i*ldc+j] += cmplx.Conj(a[l*lda+i]) * cmplx.Conj(b[j*ldb+l]) + } + } + } + } + } + for i := 0; i < m; i++ { + for j := 0; j < n; j++ { + r[i*ldc+j] = alpha*r[i*ldc+j] + beta*c[i*ldc+j] + } + } + return r +} + +// transString returns a string representation of blas.Transpose. +func transString(t blas.Transpose) string { + switch t { + case blas.NoTrans: + return "NoTrans" + case blas.Trans: + return "Trans" + case blas.ConjTrans: + return "ConjTrans" + } + return "unknown trans" +} + +// uploString returns a string representation of blas.Uplo. +func uploString(uplo blas.Uplo) string { + switch uplo { + case blas.Lower: + return "Lower" + case blas.Upper: + return "Upper" + } + return "unknown uplo" +} + +// sideString returns a string representation of blas.Side. +func sideString(side blas.Side) string { + switch side { + case blas.Left: + return "Left" + case blas.Right: + return "Right" + } + return "unknown side" +} + +// diagString returns a string representation of blas.Diag. +func diagString(diag blas.Diag) string { + switch diag { + case blas.Unit: + return "Unit" + case blas.NonUnit: + return "NonUnit" + } + return "unknown diag" +} + +// zSameLowerTri returns whether n×n matrices A and B are same under the diagonal. +func zSameLowerTri(n int, a []complex128, lda int, b []complex128, ldb int) bool { + for i := 1; i < n; i++ { + for j := 0; j < i; j++ { + aij := a[i*lda+j] + bij := b[i*ldb+j] + if !sameComplex128(aij, bij) { + return false + } + } + } + return true +} + +// zSameUpperTri returns whether n×n matrices A and B are same above the diagonal. +func zSameUpperTri(n int, a []complex128, lda int, b []complex128, ldb int) bool { + for i := 0; i < n-1; i++ { + for j := i + 1; j < n; j++ { + aij := a[i*lda+j] + bij := b[i*ldb+j] + if !sameComplex128(aij, bij) { + return false + } + } + } + return true +} diff --git a/vendor/gonum.org/v1/gonum/blas/testblas/dgbmv.go b/vendor/gonum.org/v1/gonum/blas/testblas/dgbmv.go new file mode 100644 index 0000000..6d768ce --- /dev/null +++ b/vendor/gonum.org/v1/gonum/blas/testblas/dgbmv.go @@ -0,0 +1,153 @@ +// Copyright ©2014 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package testblas + +import ( + "testing" + + "gonum.org/v1/gonum/blas" +) + +type Dgbmver interface { + Dgbmv(tA blas.Transpose, m, n, kL, kU int, alpha float64, a []float64, lda int, x []float64, incX int, beta float64, y []float64, incY int) +} + +func DgbmvTest(t *testing.T, blasser Dgbmver) { + for i, test := range []struct { + tA blas.Transpose + m, n int + kL, kU int + alpha float64 + a [][]float64 + lda int + x []float64 + beta float64 + y []float64 + ans []float64 + }{ + { + tA: blas.NoTrans, + m: 9, + n: 6, + lda: 4, + kL: 2, + kU: 1, + alpha: 3.0, + beta: 2.0, + a: [][]float64{ + {5, 3, 0, 0, 0, 0}, + {-1, 2, 9, 0, 0, 0}, + {4, 8, 3, 6, 0, 0}, + {0, -1, 8, 2, 1, 0}, + {0, 0, 9, 9, 9, 5}, + {0, 0, 0, 2, -3, 2}, + {0, 0, 0, 0, 1, 5}, + {0, 0, 0, 0, 0, 6}, + }, + x: []float64{1, 2, 3, 4, 5, 6}, + y: []float64{-1, -2, -3, -4, -5, -6, -7, -8, -9}, + ans: []float64{31, 86, 153, 97, 404, 3, 91, 92, -18}, + }, + { + tA: blas.Trans, + m: 9, + n: 6, + lda: 4, + kL: 2, + kU: 1, + alpha: 3.0, + beta: 2.0, + a: [][]float64{ + {5, 3, 0, 0, 0, 0}, + {-1, 2, 9, 0, 0, 0}, + {4, 8, 3, 6, 0, 0}, + {0, -1, 8, 2, 1, 0}, + {0, 0, 9, 9, 9, 5}, + {0, 0, 0, 2, -3, 2}, + {0, 0, 0, 0, 1, 5}, + {0, 0, 0, 0, 0, 6}, + }, + x: []float64{1, 2, 3, 4, 5, 6, 7, 8, 9}, + y: []float64{-1, -2, -3, -4, -5, -6}, + ans: []float64{43, 77, 306, 241, 104, 348}, + }, + { + tA: blas.NoTrans, + m: 6, + n: 3, + lda: 1, + kL: 0, + kU: 0, + alpha: 2.0, + beta: 1.0, + a: [][]float64{ + {1, 0, 0}, + {0, 1, 0}, + {0, 0, 1}, + }, + x: []float64{1, 2, 3}, + y: []float64{-1, -2, -3, -4, -5, -6}, + ans: []float64{1, 2, 3, -4, -5, -6}, + }, + { + tA: blas.Trans, + m: 6, + n: 3, + lda: 1, + kL: 0, + kU: 0, + alpha: 2.0, + beta: 1.0, + a: [][]float64{ + {1, 0, 0}, + {0, 1, 0}, + {0, 0, 1}, + {0, 0, 0}, + {0, 0, 0}, + {0, 0, 0}, + }, + x: []float64{1, 2, 3, 4, 5, 6}, + y: []float64{-1, -2, -3}, + ans: []float64{1, 2, 3}, + }, + { + tA: blas.NoTrans, + m: 3, + n: 5, + lda: 4, + kL: 1, + kU: 2, + alpha: 2.0, + beta: 1.0, + a: [][]float64{ + {1, 2, 3, 0, 0}, + {1, 3, 6, 9, 0}, + {0, 1, 1, 1, 1}, + }, + x: []float64{1, 2, 3, 4, 5}, + y: []float64{-1, -2, -3}, + ans: []float64{-1 + 2*(1+4+9), -2 + 2*(1+6+18+36), -3 + 2*(2+3+4+5)}, + }, + } { + extra := 3 + aFlat := flattenBanded(test.a, test.kU, test.kL) + incTest := func(incX, incY, extra int) { + xnew := makeIncremented(test.x, incX, extra) + ynew := makeIncremented(test.y, incY, extra) + ans := makeIncremented(test.ans, incY, extra) + blasser.Dgbmv(test.tA, test.m, test.n, test.kL, test.kU, test.alpha, aFlat, test.lda, xnew, incX, test.beta, ynew, incY) + if !dSliceTolEqual(ans, ynew) { + t.Errorf("Case %v: Want %v, got %v", i, ans, ynew) + } + } + incTest(1, 1, extra) + incTest(1, 3, extra) + incTest(1, -3, extra) + incTest(2, 3, extra) + incTest(2, -3, extra) + incTest(3, 2, extra) + incTest(-3, 2, extra) + } +} diff --git a/vendor/gonum.org/v1/gonum/blas/testblas/dgemm.go b/vendor/gonum.org/v1/gonum/blas/testblas/dgemm.go new file mode 100644 index 0000000..bfec8e5 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/blas/testblas/dgemm.go @@ -0,0 +1,248 @@ +// Copyright ©2014 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package testblas + +import ( + "testing" + + "gonum.org/v1/gonum/blas" +) + +type Dgemmer interface { + Dgemm(tA, tB blas.Transpose, m, n, k int, alpha float64, a []float64, lda int, b []float64, ldb int, beta float64, c []float64, ldc int) +} + +type DgemmCase struct { + m, n, k int + alpha, beta float64 + a [][]float64 + b [][]float64 + c [][]float64 + ans [][]float64 +} + +var DgemmCases = []DgemmCase{ + + { + m: 4, + n: 3, + k: 2, + alpha: 2, + beta: 0.5, + a: [][]float64{ + {1, 2}, + {4, 5}, + {7, 8}, + {10, 11}, + }, + b: [][]float64{ + {1, 5, 6}, + {5, -8, 8}, + }, + c: [][]float64{ + {4, 8, -9}, + {12, 16, -8}, + {1, 5, 15}, + {-3, -4, 7}, + }, + ans: [][]float64{ + {24, -18, 39.5}, + {64, -32, 124}, + {94.5, -55.5, 219.5}, + {128.5, -78, 299.5}, + }, + }, + { + m: 4, + n: 2, + k: 3, + alpha: 2, + beta: 0.5, + a: [][]float64{ + {1, 2, 3}, + {4, 5, 6}, + {7, 8, 9}, + {10, 11, 12}, + }, + b: [][]float64{ + {1, 5}, + {5, -8}, + {6, 2}, + }, + c: [][]float64{ + {4, 8}, + {12, 16}, + {1, 5}, + {-3, -4}, + }, + ans: [][]float64{ + {60, -6}, + {136, -8}, + {202.5, -19.5}, + {272.5, -30}, + }, + }, + { + m: 3, + n: 2, + k: 4, + alpha: 2, + beta: 0.5, + a: [][]float64{ + {1, 2, 3, 4}, + {4, 5, 6, 7}, + {8, 9, 10, 11}, + }, + b: [][]float64{ + {1, 5}, + {5, -8}, + {6, 2}, + {8, 10}, + }, + c: [][]float64{ + {4, 8}, + {12, 16}, + {9, -10}, + }, + ans: [][]float64{ + {124, 74}, + {248, 132}, + {406.5, 191}, + }, + }, + { + m: 3, + n: 4, + k: 2, + alpha: 2, + beta: 0.5, + a: [][]float64{ + {1, 2}, + {4, 5}, + {8, 9}, + }, + b: [][]float64{ + {1, 5, 2, 1}, + {5, -8, 2, 1}, + }, + c: [][]float64{ + {4, 8, 2, 2}, + {12, 16, 8, 9}, + {9, -10, 10, 10}, + }, + ans: [][]float64{ + {24, -18, 13, 7}, + {64, -32, 40, 22.5}, + {110.5, -69, 73, 39}, + }, + }, + { + m: 2, + n: 4, + k: 3, + alpha: 2, + beta: 0.5, + a: [][]float64{ + {1, 2, 3}, + {4, 5, 6}, + }, + b: [][]float64{ + {1, 5, 8, 8}, + {5, -8, 9, 10}, + {6, 2, -3, 2}, + }, + c: [][]float64{ + {4, 8, 7, 8}, + {12, 16, -2, 6}, + }, + ans: [][]float64{ + {60, -6, 37.5, 72}, + {136, -8, 117, 191}, + }, + }, + { + m: 2, + n: 3, + k: 4, + alpha: 2, + beta: 0.5, + a: [][]float64{ + {1, 2, 3, 4}, + {4, 5, 6, 7}, + }, + b: [][]float64{ + {1, 5, 8}, + {5, -8, 9}, + {6, 2, -3}, + {8, 10, 2}, + }, + c: [][]float64{ + {4, 8, 1}, + {12, 16, 6}, + }, + ans: [][]float64{ + {124, 74, 50.5}, + {248, 132, 149}, + }, + }, +} + +// assumes [][]float64 is actually a matrix +func transpose(a [][]float64) [][]float64 { + b := make([][]float64, len(a[0])) + for i := range b { + b[i] = make([]float64, len(a)) + for j := range b[i] { + b[i][j] = a[j][i] + } + } + return b +} + +func TestDgemm(t *testing.T, blasser Dgemmer) { + for i, test := range DgemmCases { + // Test that it passes row major + dgemmcomp(i, "RowMajorNoTrans", t, blasser, blas.NoTrans, blas.NoTrans, + test.m, test.n, test.k, test.alpha, test.beta, test.a, test.b, test.c, test.ans) + // Try with A transposed + dgemmcomp(i, "RowMajorTransA", t, blasser, blas.Trans, blas.NoTrans, + test.m, test.n, test.k, test.alpha, test.beta, transpose(test.a), test.b, test.c, test.ans) + // Try with B transposed + dgemmcomp(i, "RowMajorTransB", t, blasser, blas.NoTrans, blas.Trans, + test.m, test.n, test.k, test.alpha, test.beta, test.a, transpose(test.b), test.c, test.ans) + // Try with both transposed + dgemmcomp(i, "RowMajorTransBoth", t, blasser, blas.Trans, blas.Trans, + test.m, test.n, test.k, test.alpha, test.beta, transpose(test.a), transpose(test.b), test.c, test.ans) + } +} + +func dgemmcomp(i int, name string, t *testing.T, blasser Dgemmer, tA, tB blas.Transpose, m, n, k int, + alpha, beta float64, a [][]float64, b [][]float64, c [][]float64, ans [][]float64) { + + aFlat := flatten(a) + aCopy := flatten(a) + bFlat := flatten(b) + bCopy := flatten(b) + cFlat := flatten(c) + ansFlat := flatten(ans) + lda := len(a[0]) + ldb := len(b[0]) + ldc := len(c[0]) + + // Compute the matrix multiplication + blasser.Dgemm(tA, tB, m, n, k, alpha, aFlat, lda, bFlat, ldb, beta, cFlat, ldc) + + if !dSliceEqual(aFlat, aCopy) { + t.Errorf("Test %v case %v: a changed during call to Dgemm", i, name) + } + if !dSliceEqual(bFlat, bCopy) { + t.Errorf("Test %v case %v: b changed during call to Dgemm", i, name) + } + + if !dSliceTolEqual(ansFlat, cFlat) { + t.Errorf("Test %v case %v: answer mismatch. Expected %v, Found %v", i, name, ansFlat, cFlat) + } + // TODO: Need to add a sub-slice test where don't use up full matrix +} diff --git a/vendor/gonum.org/v1/gonum/blas/testblas/dgemmbench.go b/vendor/gonum.org/v1/gonum/blas/testblas/dgemmbench.go new file mode 100644 index 0000000..7d51145 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/blas/testblas/dgemmbench.go @@ -0,0 +1,44 @@ +// Copyright ©2014 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package testblas + +import ( + "testing" + + "golang.org/x/exp/rand" + + "gonum.org/v1/gonum/blas" +) + +func DgemmBenchmark(b *testing.B, dgemm Dgemmer, m, n, k int, tA, tB blas.Transpose) { + a := make([]float64, m*k) + for i := range a { + a[i] = rand.Float64() + } + bv := make([]float64, k*n) + for i := range bv { + bv[i] = rand.Float64() + } + c := make([]float64, m*n) + for i := range c { + c[i] = rand.Float64() + } + var lda, ldb int + if tA == blas.Trans { + lda = m + } else { + lda = k + } + if tB == blas.Trans { + ldb = k + } else { + ldb = n + } + ldc := n + b.ResetTimer() + for i := 0; i < b.N; i++ { + dgemm.Dgemm(tA, tB, m, n, k, 3.0, a, lda, bv, ldb, 1.0, c, ldc) + } +} diff --git a/vendor/gonum.org/v1/gonum/blas/testblas/dgemv.go b/vendor/gonum.org/v1/gonum/blas/testblas/dgemv.go new file mode 100644 index 0000000..a971dee --- /dev/null +++ b/vendor/gonum.org/v1/gonum/blas/testblas/dgemv.go @@ -0,0 +1,728 @@ +// Copyright ©2014 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package testblas + +import ( + "testing" + + "gonum.org/v1/gonum/blas" +) + +type DgemvCase struct { + Name string + m int + n int + A [][]float64 + tA blas.Transpose + x []float64 + incX int + y []float64 + incY int + + Subcases []DgemvSubcase +} + +type DgemvSubcase struct { + mulXNeg1 bool + mulYNeg1 bool + alpha float64 + beta float64 + ans []float64 +} + +var DgemvCases = []DgemvCase{ + { + Name: "M_gt_N_Inc1_NoTrans", + tA: blas.NoTrans, + m: 5, + n: 3, + A: [][]float64{ + {4.1, 6.2, 8.1}, + {9.6, 3.5, 9.1}, + {10, 7, 3}, + {1, 1, 2}, + {9, 2, 5}, + }, + incX: 1, + incY: 1, + x: []float64{1, 2, 3}, + y: []float64{7, 8, 9, 10, 11}, + + Subcases: []DgemvSubcase{ + { + alpha: 0, + beta: 0, + ans: []float64{0, 0, 0, 0, 0}, + }, + { + alpha: 0, + beta: 1, + ans: []float64{7, 8, 9, 10, 11}, + }, + { + alpha: 1, + beta: 0, + ans: []float64{40.8, 43.9, 33, 9, 28}, + }, + { + alpha: 8, + beta: -6, + ans: []float64{284.4, 303.2, 210, 12, 158}, + }, + }, + }, + { + Name: "M_gt_N_Inc1_Trans", + tA: blas.Trans, + m: 5, + n: 3, + A: [][]float64{ + {4.1, 6.2, 8.1}, + {9.6, 3.5, 9.1}, + {10, 7, 3}, + {1, 1, 2}, + {9, 2, 5}, + }, + incX: 1, + incY: 1, + x: []float64{1, 2, 3, -4, 5}, + y: []float64{7, 8, 9}, + + Subcases: []DgemvSubcase{ + { + alpha: 0, + beta: 0, + ans: []float64{0, 0, 0}, + }, + { + alpha: 0, + beta: 1, + ans: []float64{7, 8, 9}, + }, + { + alpha: 1, + beta: 0, + ans: []float64{94.3, 40.2, 52.3}, + }, + { + alpha: 8, + beta: -6, + ans: []float64{712.4, 273.6, 364.4}, + }, + }, + }, + { + Name: "M_eq_N_Inc1_NoTrans", + tA: blas.NoTrans, + m: 3, + n: 3, + A: [][]float64{ + {4.1, 6.2, 8.1}, + {9.6, 3.5, 9.1}, + {10, 7, 3}, + }, + incX: 1, + incY: 1, + x: []float64{1, 2, 3}, + y: []float64{7, 2, 2}, + + Subcases: []DgemvSubcase{ + { + alpha: 0, + beta: 0, + ans: []float64{0, 0, 0}, + }, + { + alpha: 0, + beta: 1, + ans: []float64{7, 2, 2}, + }, + { + alpha: 1, + beta: 0, + ans: []float64{40.8, 43.9, 33}, + }, + { + alpha: 8, + beta: -6, + ans: []float64{40.8*8 - 6*7, 43.9*8 - 6*2, 33*8 - 6*2}, + }, + }, + }, + { + Name: "M_eq_N_Inc1_Trans", + tA: blas.Trans, + m: 3, + n: 3, + A: [][]float64{ + {4.1, 6.2, 8.1}, + {9.6, 3.5, 9.1}, + {10, 7, 3}, + }, + incX: 1, + incY: 1, + x: []float64{1, 2, 3}, + y: []float64{7, 2, 2}, + + Subcases: []DgemvSubcase{ + { + alpha: 8, + beta: -6, + ans: []float64{384.4, 261.6, 270.4}, + }, + }, + }, + { + Name: "M_lt_N_Inc1_NoTrans", + tA: blas.NoTrans, + m: 3, + n: 5, + A: [][]float64{ + {4.1, 6.2, 8.1, 10, 7}, + {9.6, 3.5, 9.1, -2, 9}, + {10, 7, 3, 1, -5}, + }, + incX: 1, + incY: 1, + x: []float64{1, 2, 3, -7.6, 8.1}, + y: []float64{7, 2, 2}, + + Subcases: []DgemvSubcase{ + { + alpha: 0, + beta: 0, + ans: []float64{0, 0, 0}, + }, + { + alpha: 0, + beta: 1, + ans: []float64{7, 2, 2}, + }, + { + alpha: 1, + beta: 0, + ans: []float64{21.5, 132, -15.1}, + }, + + { + alpha: 8, + beta: -6, + ans: []float64{21.5*8 - 6*7, 132*8 - 6*2, -15.1*8 - 6*2}, + }, + }, + }, + { + Name: "M_lt_N_Inc1_Trans", + tA: blas.Trans, + m: 3, + n: 5, + A: [][]float64{ + {4.1, 6.2, 8.1, 10, 7}, + {9.6, 3.5, 9.1, -2, 9}, + {10, 7, 3, 1, -5}, + }, + incX: 1, + incY: 1, + x: []float64{1, 2, 3}, + y: []float64{7, 2, 2, -3, 5}, + + Subcases: []DgemvSubcase{ + { + alpha: 8, + beta: -6, + ans: []float64{384.4, 261.6, 270.4, 90, 50}, + }, + }, + }, + { + Name: "M_gt_N_Part1_NoTrans", + tA: blas.NoTrans, + m: 5, + n: 3, + A: [][]float64{ + {4.1, 6.2, 8.1}, + {9.6, 3.5, 9.1}, + {10, 7, 3}, + {1, 1, 2}, + {9, 2, 5}, + }, + incX: 1, + incY: 2, + x: []float64{1, 2, 3}, + y: []float64{7, 100, 8, 101, 9, 102, 10, 103, 11}, + + Subcases: []DgemvSubcase{ + { + alpha: 0, + beta: 0, + ans: []float64{0, 100, 0, 101, 0, 102, 0, 103, 0}, + }, + { + alpha: 0, + beta: 1, + ans: []float64{7, 100, 8, 101, 9, 102, 10, 103, 11}, + }, + { + alpha: 1, + beta: 0, + ans: []float64{40.8, 100, 43.9, 101, 33, 102, 9, 103, 28}, + }, + { + alpha: 8, + beta: -6, + ans: []float64{284.4, 100, 303.2, 101, 210, 102, 12, 103, 158}, + }, + }, + }, + { + Name: "M_gt_N_Part1_Trans", + tA: blas.Trans, + m: 5, + n: 3, + A: [][]float64{ + {4.1, 6.2, 8.1}, + {9.6, 3.5, 9.1}, + {10, 7, 3}, + {1, 1, 2}, + {9, 2, 5}, + }, + incX: 1, + incY: 2, + x: []float64{1, 2, 3, -4, 5}, + y: []float64{7, 100, 8, 101, 9}, + + Subcases: []DgemvSubcase{ + { + alpha: 0, + beta: 0, + ans: []float64{0, 100, 0, 101, 0}, + }, + { + alpha: 0, + beta: 1, + ans: []float64{7, 100, 8, 101, 9}, + }, + { + alpha: 1, + beta: 0, + ans: []float64{94.3, 100, 40.2, 101, 52.3}, + }, + { + alpha: 8, + beta: -6, + ans: []float64{712.4, 100, 273.6, 101, 364.4}, + }, + }, + }, + { + Name: "M_gt_N_IncNot1_NoTrans", + tA: blas.NoTrans, + m: 5, + n: 3, + + A: [][]float64{ + {4.1, 6.2, 8.1}, + {9.6, 3.5, 9.1}, + {10, 7, 3}, + {1, 1, 2}, + {9, 2, 5}, + }, + incX: 2, + incY: 3, + x: []float64{1, 15, 2, 150, 3}, + y: []float64{7, 2, 6, 8, -4, -5, 9, 1, 1, 10, 19, 22, 11}, + Subcases: []DgemvSubcase{ + { + alpha: 8, + beta: -6, + ans: []float64{284.4, 2, 6, 303.2, -4, -5, 210, 1, 1, 12, 19, 22, 158}, + }, + { + mulXNeg1: true, + alpha: 8, + beta: -6, + ans: []float64{220.4, 2, 6, 311.2, -4, -5, 322, 1, 1, -4, 19, 22, 222}, + }, + { + mulYNeg1: true, + alpha: 8, + beta: -6, + ans: []float64{182, 2, 6, 24, -4, -5, 210, 1, 1, 291.2, 19, 22, 260.4}, + }, + { + mulXNeg1: true, + mulYNeg1: true, + alpha: 8, + beta: -6, + ans: []float64{246, 2, 6, 8, -4, -5, 322, 1, 1, 299.2, 19, 22, 196.4}, + }, + }, + }, + { + Name: "M_gt_N_IncNot1_Trans", + tA: blas.Trans, + m: 5, + n: 3, + + A: [][]float64{ + {4.1, 6.2, 8.1}, + {9.6, 3.5, 9.1}, + {10, 7, 3}, + {1, 1, 2}, + {9, 2, 5}, + }, + incX: 2, + incY: 3, + x: []float64{1, 15, 2, 150, 3, 8, -3, 6, 5}, + y: []float64{7, 2, 6, 8, -4, -5, 9}, + Subcases: []DgemvSubcase{ + { + alpha: 8, + beta: -6, + ans: []float64{720.4, 2, 6, 281.6, -4, -5, 380.4}, + }, + { + mulXNeg1: true, + alpha: 8, + beta: -6, + ans: []float64{219.6, 2, 6, 316, -4, -5, 195.6}, + }, + { + mulYNeg1: true, + alpha: 8, + beta: -6, + ans: []float64{392.4, 2, 6, 281.6, -4, -5, 708.4}, + }, + { + mulXNeg1: true, + mulYNeg1: true, + alpha: 8, + beta: -6, + ans: []float64{207.6, 2, 6, 316, -4, -5, 207.6}, + }, + }, + }, + { + Name: "M_eq_N_IncNot1_NoTrans", + tA: blas.NoTrans, + m: 3, + n: 3, + A: [][]float64{ + {4.1, 6.2, 8.1}, + {9.6, 3.5, 9.1}, + {10, 7, 3}, + }, + incX: 2, + incY: 3, + x: []float64{1, 15, 2, 150, 3}, + y: []float64{7, 2, 6, 8, -4, -5, 9}, + Subcases: []DgemvSubcase{ + { + alpha: 8, + beta: -6, + ans: []float64{284.4, 2, 6, 303.2, -4, -5, 210}, + }, + { + mulXNeg1: true, + alpha: 8, + beta: -6, + ans: []float64{220.4, 2, 6, 311.2, -4, -5, 322}, + }, + { + mulYNeg1: true, + alpha: 8, + beta: -6, + ans: []float64{222, 2, 6, 303.2, -4, -5, 272.4}, + }, + { + mulXNeg1: true, + mulYNeg1: true, + alpha: 8, + beta: -6, + ans: []float64{334, 2, 6, 311.2, -4, -5, 208.4}, + }, + }, + }, + { + Name: "M_eq_N_IncNot1_Trans", + tA: blas.Trans, + m: 3, + n: 3, + A: [][]float64{ + {4.1, 6.2, 8.1}, + {9.6, 3.5, 9.1}, + {10, 7, 3}, + }, + incX: 2, + incY: 3, + x: []float64{1, 15, 2, 150, 3}, + y: []float64{7, 2, 6, 8, -4, -5, 9}, + + Subcases: []DgemvSubcase{ + { + alpha: 8, + beta: -6, + ans: []float64{384.4, 2, 6, 225.6, -4, -5, 228.4}, + }, + { + mulXNeg1: true, + alpha: 8, + beta: -6, + ans: []float64{290, 2, 6, 212.8, -4, -5, 310}, + }, + { + mulYNeg1: true, + alpha: 8, + beta: -6, + ans: []float64{240.4, 2, 6, 225.6, -4, -5, 372.4}, + }, + { + mulXNeg1: true, + mulYNeg1: true, + alpha: 8, + beta: -6, + ans: []float64{322, 2, 6, 212.8, -4, -5, 278}, + }, + }, + }, + { + Name: "M_lt_N_IncNot1_NoTrans", + tA: blas.NoTrans, + m: 3, + n: 5, + A: [][]float64{ + {4.1, 6.2, 8.1, 10, 11}, + {9.6, 3.5, 9.1, -3, -2}, + {10, 7, 3, -7, -4}, + }, + incX: 2, + incY: 3, + x: []float64{1, 15, 2, 150, 3, -2, -4, 8, -9}, + y: []float64{7, 2, 6, 8, -4, -5, 9}, + + Subcases: []DgemvSubcase{ + { + alpha: 8, + beta: -6, + ans: []float64{-827.6, 2, 6, 543.2, -4, -5, 722}, + }, + { + mulXNeg1: true, + alpha: 8, + beta: -6, + ans: []float64{-93.2, 2, 6, -696.8, -4, -5, -1070}, + }, + { + mulYNeg1: true, + alpha: 8, + beta: -6, + ans: []float64{734, 2, 6, 543.2, -4, -5, -839.6}, + }, + { + mulXNeg1: true, + mulYNeg1: true, + alpha: 8, + beta: -6, + ans: []float64{-1058, 2, 6, -696.8, -4, -5, -105.2}, + }, + }, + }, + { + Name: "M_lt_N_IncNot1_Trans", + tA: blas.Trans, + m: 3, + n: 5, + A: [][]float64{ + {4.1, 6.2, 8.1, 10, 11}, + {9.6, 3.5, 9.1, -3, -2}, + {10, 7, 3, -7, -4}, + }, + incX: 2, + incY: 3, + x: []float64{1, 15, 2, 150, 3}, + y: []float64{7, 2, 6, 8, -4, -5, 9, -4, -1, -9, 1, 1, 2}, + + Subcases: []DgemvSubcase{ + { + alpha: 8, + beta: -6, + ans: []float64{384.4, 2, 6, 225.6, -4, -5, 228.4, -4, -1, -82, 1, 1, -52}, + }, + { + mulXNeg1: true, + alpha: 8, + beta: -6, + ans: []float64{290, 2, 6, 212.8, -4, -5, 310, -4, -1, 190, 1, 1, 188}, + }, + { + mulYNeg1: true, + alpha: 8, + beta: -6, + ans: []float64{-82, 2, 6, -184, -4, -5, 228.4, -4, -1, 327.6, 1, 1, 414.4}, + }, + { + mulXNeg1: true, + mulYNeg1: true, + alpha: 8, + beta: -6, + ans: []float64{158, 2, 6, 88, -4, -5, 310, -4, -1, 314.8, 1, 1, 320}, + }, + }, + }, + { + Name: "M_eq_N_Lg_IncNot1_Trans", + tA: blas.Trans, + m: 7, + n: 7, + A: [][]float64{ + {4.1, 6.2, 8.1, 2.5, 3.3, 7.4, 9.3}, + {9.6, 3.5, 9.1, 1.2, 5.4, 4.8, 8.7}, + {10, 7, 3, 2, 4, 1, 12}, + {9.6, 3.5, 9.1, 1.2, 5.4, 4.8, 8.7}, + {4.1, 6.2, 8.1, 2.5, 3.3, 7.4, 9.3}, + {10, 7, 3, 2, 4, 1, 12}, + {9.6, 3.5, 9.1, 1.2, 5.4, 4.8, 8.7}, + }, + incX: 2, + incY: 3, + x: []float64{1, 105, 2, 150, 3, 200, 4, 300, 5, 600, 6, 700, 7}, + y: []float64{7, 200, 600, 8, -400, -500, 9, 600, 700, 10, 500, 400, 11, 200, 300, 12, 100, 200, 13, 300, 400, 14}, + + Subcases: []DgemvSubcase{ + { + alpha: 8, + beta: -6, + ans: []float64{1873.1999999999998, 200, 600, 1117.6, -400, -500, 1497.1999999999998, 600, 700, 328.8, 500, 400, 942, 200, 300, 854.4000000000001, 100, 200, 2137.2, 300, 400, 14}, + }, + { + mulXNeg1: true, + alpha: 8, + beta: -6, + ans: []float64{1690.8, 200, 600, 1148, -400, -500, 1562.8, 600, 700, 357.6, 500, 400, 897.2, 200, 300, 998.4, 100, 200, 2103.6000000000004, 300, 400, 14}, + }, + { + mulYNeg1: true, + alpha: 8, + beta: -6, + ans: []float64{2173.2, 200, 600, 878.4000000000001, -400, -500, 954, 600, 700, 328.8, 500, 400, 1485.1999999999998, 200, 300, 1093.6, 100, 200, 1837.1999999999998, 300, 400, 14}, + }, + { + mulXNeg1: true, + mulYNeg1: true, + alpha: 8, + beta: -6, + ans: []float64{2139.6, 200, 600, 1022.4, -400, -500, 909.2, 600, 700, 357.6, 500, 400, 1550.8, 200, 300, 1124, 100, 200, 1654.8, 300, 400, 14}, + }, + }, + }, + + // TODO: A can be longer than mxn. Add cases where it is longer + // TODO: x and y can also be longer. Add tests for these + // TODO: Add tests for dimension mismatch + // TODO: Add places with a "submatrix view", where lda != m +} + +type Dgemver interface { + Dgemv(tA blas.Transpose, m, n int, alpha float64, a []float64, lda int, x []float64, incX int, beta float64, y []float64, incY int) +} + +func DgemvTest(t *testing.T, blasser Dgemver) { + for _, test := range DgemvCases { + for i, cas := range test.Subcases { + // Test that it passes with row-major + dgemvcomp(t, test, cas, i, blasser) + + // Test the bad inputs + dgemvbad(t, test, cas, i, blasser) + } + } +} + +func dgemvcomp(t *testing.T, test DgemvCase, cas DgemvSubcase, i int, blasser Dgemver) { + x := sliceCopy(test.x) + y := sliceCopy(test.y) + a := sliceOfSliceCopy(test.A) + aFlat := flatten(a) + + lda := test.n + + incX := test.incX + if cas.mulXNeg1 { + incX *= -1 + } + incY := test.incY + if cas.mulYNeg1 { + incY *= -1 + } + + f := func() { + blasser.Dgemv(test.tA, test.m, test.n, cas.alpha, aFlat, lda, x, incX, cas.beta, y, incY) + } + if panics(f) { + t.Errorf("Test %v case %v: unexpected panic", test.Name, i) + if throwPanic { + blasser.Dgemv(test.tA, test.m, test.n, cas.alpha, aFlat, lda, x, incX, cas.beta, y, incY) + } + return + } + // Check that x and a are unchanged + if !dSliceEqual(x, test.x) { + t.Errorf("Test %v, case %v: x modified during call", test.Name, i) + } + aFlat2 := flatten(sliceOfSliceCopy(test.A)) + if !dSliceEqual(aFlat2, aFlat) { + t.Errorf("Test %v, case %v: a modified during call", test.Name, i) + } + + // Check that the answer matches + if !dSliceTolEqual(cas.ans, y) { + t.Errorf("Test %v, case %v: answer mismatch: Expected %v, Found %v", test.Name, i, cas.ans, y) + } +} + +func dgemvbad(t *testing.T, test DgemvCase, cas DgemvSubcase, i int, blasser Dgemver) { + x := sliceCopy(test.x) + y := sliceCopy(test.y) + a := sliceOfSliceCopy(test.A) + aFlatRow := flatten(a) + ldaRow := test.n + + f := func() { + blasser.Dgemv('X', test.m, test.n, cas.alpha, aFlatRow, ldaRow, x, test.incX, cas.beta, y, test.incY) + } + if !panics(f) { + t.Errorf("Test %v case %v: no panic for bad transpose", test.Name, i) + } + f = func() { + blasser.Dgemv(test.tA, -2, test.n, cas.alpha, aFlatRow, ldaRow, x, test.incX, cas.beta, y, test.incY) + } + if !panics(f) { + t.Errorf("Test %v case %v: no panic for m negative", test.Name, i) + } + f = func() { + blasser.Dgemv(test.tA, test.m, -4, cas.alpha, aFlatRow, ldaRow, x, test.incX, cas.beta, y, test.incY) + } + if !panics(f) { + t.Errorf("Test %v case %v: no panic for n negative", test.Name, i) + } + f = func() { + blasser.Dgemv(test.tA, test.m, test.n, cas.alpha, aFlatRow, ldaRow, x, 0, cas.beta, y, test.incY) + } + if !panics(f) { + t.Errorf("Test %v case %v: no panic for incX zero", test.Name, i) + } + f = func() { + blasser.Dgemv(test.tA, test.m, test.n, cas.alpha, aFlatRow, ldaRow, x, test.incX, cas.beta, y, 0) + } + if !panics(f) { + t.Errorf("Test %v case %v: no panic for incY zero", test.Name, i) + } + f = func() { + blasser.Dgemv(test.tA, test.m, test.n, cas.alpha, aFlatRow, ldaRow-1, x, test.incX, cas.beta, y, test.incY) + } + if !panics(f) { + t.Errorf("Test %v case %v: no panic for lda too small row major", test.Name, i) + } +} diff --git a/vendor/gonum.org/v1/gonum/blas/testblas/dger.go b/vendor/gonum.org/v1/gonum/blas/testblas/dger.go new file mode 100644 index 0000000..99bf235 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/blas/testblas/dger.go @@ -0,0 +1,235 @@ +// Copyright ©2014 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package testblas + +import ( + "math" + "testing" +) + +type Dgerer interface { + Dger(m, n int, alpha float64, x []float64, incX int, y []float64, incY int, a []float64, lda int) +} + +func DgerTest(t *testing.T, blasser Dgerer) { + for _, test := range []struct { + name string + a [][]float64 + m int + n int + x []float64 + y []float64 + incX int + incY int + + want [][]float64 + }{ + { + name: "M gt N inc 1", + m: 5, + n: 3, + a: [][]float64{ + {1.3, 2.4, 3.5}, + {2.6, 2.8, 3.3}, + {-1.3, -4.3, -9.7}, + {8, 9, -10}, + {-12, -14, -6}, + }, + x: []float64{-2, -3, 0, 1, 2}, + y: []float64{-1.1, 5, 0}, + incX: 1, + incY: 1, + want: [][]float64{{3.5, -7.6, 3.5}, {5.9, -12.2, 3.3}, {-1.3, -4.3, -9.7}, {6.9, 14, -10}, {-14.2, -4, -6}}, + }, + { + name: "M eq N inc 1", + m: 3, + n: 3, + a: [][]float64{ + {1.3, 2.4, 3.5}, + {2.6, 2.8, 3.3}, + {-1.3, -4.3, -9.7}, + }, + x: []float64{-2, -3, 0}, + y: []float64{-1.1, 5, 0}, + incX: 1, + incY: 1, + want: [][]float64{{3.5, -7.6, 3.5}, {5.9, -12.2, 3.3}, {-1.3, -4.3, -9.7}}, + }, + + { + name: "M lt N inc 1", + m: 3, + n: 6, + a: [][]float64{ + {1.3, 2.4, 3.5, 4.8, 1.11, -9}, + {2.6, 2.8, 3.3, -3.4, 6.2, -8.7}, + {-1.3, -4.3, -9.7, -3.1, 8.9, 8.9}, + }, + x: []float64{-2, -3, 0}, + y: []float64{-1.1, 5, 0, 9, 19, 22}, + incX: 1, + incY: 1, + want: [][]float64{{3.5, -7.6, 3.5, -13.2, -36.89, -53}, {5.9, -12.2, 3.3, -30.4, -50.8, -74.7}, {-1.3, -4.3, -9.7, -3.1, 8.9, 8.9}}, + }, + { + name: "M gt N inc not 1", + m: 5, + n: 3, + a: [][]float64{ + {1.3, 2.4, 3.5}, + {2.6, 2.8, 3.3}, + {-1.3, -4.3, -9.7}, + {8, 9, -10}, + {-12, -14, -6}, + }, + x: []float64{-2, -3, 0, 1, 2, 6, 0, 9, 7}, + y: []float64{-1.1, 5, 0, 8, 7, -5, 7}, + incX: 2, + incY: 3, + want: [][]float64{{3.5, -13.6, -10.5}, {2.6, 2.8, 3.3}, {-3.5, 11.7, 4.3}, {8, 9, -10}, {-19.700000000000003, 42, 43}}, + }, + { + name: "M eq N inc not 1", + m: 3, + n: 3, + a: [][]float64{ + {1.3, 2.4, 3.5}, + {2.6, 2.8, 3.3}, + {-1.3, -4.3, -9.7}, + }, + x: []float64{-2, -3, 0, 8, 7, -9, 7, -6, 12, 6, 6, 6, -11}, + y: []float64{-1.1, 5, 0, 0, 9, 8, 6}, + incX: 4, + incY: 3, + want: [][]float64{{3.5, 2.4, -8.5}, {-5.1, 2.8, 45.3}, {-14.5, -4.3, 62.3}}, + }, + { + name: "M lt N inc not 1", + m: 3, + n: 6, + a: [][]float64{ + {1.3, 2.4, 3.5, 4.8, 1.11, -9}, + {2.6, 2.8, 3.3, -3.4, 6.2, -8.7}, + {-1.3, -4.3, -9.7, -3.1, 8.9, 8.9}, + }, + x: []float64{-2, -3, 0, 0, 8, 0, 9, -3}, + y: []float64{-1.1, 5, 0, 9, 19, 22, 11, -8.11, -9.22, 9.87, 7}, + incX: 3, + incY: 2, + want: [][]float64{{3.5, 2.4, -34.5, -17.2, 19.55, -23}, {2.6, 2.8, 3.3, -3.4, 6.2, -8.7}, {-11.2, -4.3, 161.3, 95.9, -74.08, 71.9}}, + }, + { + name: "Y NaN element", + m: 1, + n: 1, + a: [][]float64{{1.3}}, + x: []float64{1.3}, + y: []float64{math.NaN()}, + incX: 1, + incY: 1, + want: [][]float64{{math.NaN()}}, + }, + { + name: "M eq N large inc 1", + m: 7, + n: 7, + x: []float64{6.2, -5, 88.68, 43.4, -30.5, -40.2, 19.9}, + y: []float64{1.5, 21.7, -28.7, -11.9, 18.1, 3.1, 21}, + a: [][]float64{ + {-20.5, 17.1, -8.4, -23.8, 3.9, 7.7, 6.25}, + {2.9, -0.29, 25.6, -9.4, 36.5, 9.7, 2.3}, + {4.1, -34.1, 10.3, 4.5, -42.05, 9.4, 4}, + {19.2, 9.8, -32.7, 4.1, 4.4, -22.5, -7.8}, + {3.6, -24.5, 21.7, 8.6, -13.82, 38.05, -2.29}, + {39.4, -40.5, 7.9, -2.5, -7.7, 18.1, -25.5}, + {-18.5, 43.2, 2.1, 30.1, 3.02, -31.1, -7.6}, + }, + incX: 1, + incY: 1, + want: [][]float64{ + {-11.2, 151.64, -186.34, -97.58, 116.12, 26.92, 136.45}, + {-4.6, -108.79, 169.1, 50.1, -54, -5.8, -102.7}, + {137.12, 1890.256, -2534.816, -1050.792, 1563.058, 284.308, 1866.28}, + {84.3, 951.58, -1278.28, -512.36, 789.94, 112.04, 903.6}, + {-42.15, -686.35, 897.05, 371.55, -565.87, -56.5, -642.79}, + {-20.9, -912.84, 1161.64, 475.88, -735.32, -106.52, -869.7}, + {11.35, 475.03, -569.03, -206.71, 363.21, 30.59, 410.3}, + }, + }, + { + name: "M eq N large inc not 1", + m: 7, + n: 7, + x: []float64{6.2, 100, 200, -5, 300, 400, 88.68, 100, 200, 43.4, 300, 400, -30.5, 100, 200, -40.2, 300, 400, 19.9}, + y: []float64{1.5, 100, 200, 300, 21.7, 100, 200, 300, -28.7, 100, 200, 300, -11.9, 100, 200, 300, 18.1, 100, 200, 300, 3.1, 100, 200, 300, 21}, + a: [][]float64{ + {-20.5, 17.1, -8.4, -23.8, 3.9, 7.7, 6.25}, + {2.9, -0.29, 25.6, -9.4, 36.5, 9.7, 2.3}, + {4.1, -34.1, 10.3, 4.5, -42.05, 9.4, 4}, + {19.2, 9.8, -32.7, 4.1, 4.4, -22.5, -7.8}, + {3.6, -24.5, 21.7, 8.6, -13.82, 38.05, -2.29}, + {39.4, -40.5, 7.9, -2.5, -7.7, 18.1, -25.5}, + {-18.5, 43.2, 2.1, 30.1, 3.02, -31.1, -7.6}, + }, + incX: 3, + incY: 4, + want: [][]float64{ + {-11.2, 151.64, -186.34, -97.58, 116.12, 26.92, 136.45}, + {-4.6, -108.79, 169.1, 50.1, -54, -5.8, -102.7}, + {137.12, 1890.256, -2534.816, -1050.792, 1563.058, 284.308, 1866.28}, + {84.3, 951.58, -1278.28, -512.36, 789.94, 112.04, 903.6}, + {-42.15, -686.35, 897.05, 371.55, -565.87, -56.5, -642.79}, + {-20.9, -912.84, 1161.64, 475.88, -735.32, -106.52, -869.7}, + {11.35, 475.03, -569.03, -206.71, 363.21, 30.59, 410.3}, + }, + }, + } { + // TODO: Add tests where a is longer + // TODO: Add panic tests + // TODO: Add negative increment tests + + x := sliceCopy(test.x) + y := sliceCopy(test.y) + + a := sliceOfSliceCopy(test.a) + + // Test with row major + alpha := 1.0 + aFlat := flatten(a) + blasser.Dger(test.m, test.n, alpha, x, test.incX, y, test.incY, aFlat, test.n) + ans := unflatten(aFlat, test.m, test.n) + dgercomp(t, x, test.x, y, test.y, ans, test.want, test.name+" row maj") + + // Test with different alpha + alpha = 4.0 + aFlat = flatten(a) + blasser.Dger(test.m, test.n, alpha, x, test.incX, y, test.incY, aFlat, test.n) + ans = unflatten(aFlat, test.m, test.n) + trueCopy := sliceOfSliceCopy(test.want) + for i := range trueCopy { + for j := range trueCopy[i] { + trueCopy[i][j] = alpha*(trueCopy[i][j]-a[i][j]) + a[i][j] + } + } + dgercomp(t, x, test.x, y, test.y, ans, trueCopy, test.name+" row maj alpha") + } +} + +func dgercomp(t *testing.T, x, xCopy, y, yCopy []float64, ans [][]float64, trueAns [][]float64, name string) { + if !dSliceEqual(x, xCopy) { + t.Errorf("case %v: x modified during call to dger\n%v\n%v", name, x, xCopy) + } + if !dSliceEqual(y, yCopy) { + t.Errorf("case %v: y modified during call to dger\n%v\n%v", name, y, yCopy) + } + + for i := range ans { + if !dSliceTolEqual(ans[i], trueAns[i]) { + t.Errorf("case %v: answer mismatch at %v.\nExpected %v,\nFound %v", name, i, trueAns, ans) + break + } + } +} diff --git a/vendor/gonum.org/v1/gonum/blas/testblas/doc.go b/vendor/gonum.org/v1/gonum/blas/testblas/doc.go new file mode 100644 index 0000000..aae52ba --- /dev/null +++ b/vendor/gonum.org/v1/gonum/blas/testblas/doc.go @@ -0,0 +1,6 @@ +// Copyright ©2017 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Package testblas provides tests for blas implementations. +package testblas // import "gonum.org/v1/gonum/blas/testblas" diff --git a/vendor/gonum.org/v1/gonum/blas/testblas/dsbmv.go b/vendor/gonum.org/v1/gonum/blas/testblas/dsbmv.go new file mode 100644 index 0000000..392f979 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/blas/testblas/dsbmv.go @@ -0,0 +1,87 @@ +// Copyright ©2014 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package testblas + +import ( + "testing" + + "gonum.org/v1/gonum/blas" +) + +type Dsbmver interface { + Dsbmv(ul blas.Uplo, n, k int, alpha float64, a []float64, lda int, x []float64, incX int, beta float64, y []float64, incY int) +} + +func DsbmvTest(t *testing.T, blasser Dsbmver) { + for i, test := range []struct { + ul blas.Uplo + n int + k int + alpha float64 + beta float64 + a [][]float64 + x []float64 + y []float64 + + ans []float64 + }{ + { + ul: blas.Upper, + n: 4, + k: 2, + alpha: 2, + beta: 3, + a: [][]float64{ + {7, 8, 2, 0}, + {0, 8, 2, -3}, + {0, 0, 3, 6}, + {0, 0, 0, 9}, + }, + x: []float64{1, 2, 3, 4}, + y: []float64{-1, -2, -3, -4}, + ans: []float64{55, 30, 69, 84}, + }, + { + ul: blas.Lower, + n: 4, + k: 2, + alpha: 2, + beta: 3, + a: [][]float64{ + {7, 0, 0, 0}, + {8, 8, 0, 0}, + {2, 2, 3, 0}, + {0, -3, 6, 9}, + }, + x: []float64{1, 2, 3, 4}, + y: []float64{-1, -2, -3, -4}, + ans: []float64{55, 30, 69, 84}, + }, + } { + extra := 0 + var aFlat []float64 + if test.ul == blas.Upper { + aFlat = flattenBanded(test.a, test.k, 0) + } else { + aFlat = flattenBanded(test.a, 0, test.k) + } + incTest := func(incX, incY, extra int) { + xnew := makeIncremented(test.x, incX, extra) + ynew := makeIncremented(test.y, incY, extra) + ans := makeIncremented(test.ans, incY, extra) + blasser.Dsbmv(test.ul, test.n, test.k, test.alpha, aFlat, test.k+1, xnew, incX, test.beta, ynew, incY) + if !dSliceTolEqual(ans, ynew) { + t.Errorf("Case %v: Want %v, got %v", i, ans, ynew) + } + } + incTest(1, 1, extra) + incTest(1, 3, extra) + incTest(1, -3, extra) + incTest(2, 3, extra) + incTest(2, -3, extra) + incTest(3, 2, extra) + incTest(-3, 2, extra) + } +} diff --git a/vendor/gonum.org/v1/gonum/blas/testblas/dspmv.go b/vendor/gonum.org/v1/gonum/blas/testblas/dspmv.go new file mode 100644 index 0000000..79fd07f --- /dev/null +++ b/vendor/gonum.org/v1/gonum/blas/testblas/dspmv.go @@ -0,0 +1,77 @@ +// Copyright ©2014 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package testblas + +import ( + "testing" + + "gonum.org/v1/gonum/blas" + "gonum.org/v1/gonum/floats" +) + +type Dspmver interface { + Dspmv(ul blas.Uplo, n int, alpha float64, ap []float64, x []float64, incX int, beta float64, y []float64, incY int) +} + +func DspmvTest(t *testing.T, blasser Dspmver) { + for i, test := range []struct { + ul blas.Uplo + n int + a [][]float64 + x []float64 + y []float64 + alpha float64 + beta float64 + ans []float64 + }{ + { + ul: blas.Upper, + n: 3, + a: [][]float64{ + {5, 6, 7}, + {0, 8, 10}, + {0, 0, 13}, + }, + x: []float64{3, 4, 5}, + y: []float64{6, 7, 8}, + alpha: 2.1, + beta: -3, + ans: []float64{137.4, 189, 240.6}, + }, + { + ul: blas.Lower, + n: 3, + a: [][]float64{ + {5, 0, 0}, + {6, 8, 0}, + {7, 10, 13}, + }, + x: []float64{3, 4, 5}, + y: []float64{6, 7, 8}, + alpha: 2.1, + beta: -3, + ans: []float64{137.4, 189, 240.6}, + }, + } { + incTest := func(incX, incY, extra int) { + x := makeIncremented(test.x, incX, extra) + y := makeIncremented(test.y, incY, extra) + aFlat := flattenTriangular(test.a, test.ul) + ans := makeIncremented(test.ans, incY, extra) + + blasser.Dspmv(test.ul, test.n, test.alpha, aFlat, x, incX, test.beta, y, incY) + if !floats.EqualApprox(ans, y, 1e-14) { + t.Errorf("Case %v, incX=%v, incY=%v: Want %v, got %v.", i, incX, incY, ans, y) + } + } + incTest(1, 1, 0) + incTest(2, 3, 0) + incTest(3, 2, 0) + incTest(-3, 2, 0) + incTest(-2, 4, 0) + incTest(2, -1, 0) + incTest(-3, -4, 3) + } +} diff --git a/vendor/gonum.org/v1/gonum/blas/testblas/dspr.go b/vendor/gonum.org/v1/gonum/blas/testblas/dspr.go new file mode 100644 index 0000000..507246f --- /dev/null +++ b/vendor/gonum.org/v1/gonum/blas/testblas/dspr.go @@ -0,0 +1,75 @@ +// Copyright ©2014 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package testblas + +import ( + "testing" + + "gonum.org/v1/gonum/blas" +) + +type Dsprer interface { + Dspr(ul blas.Uplo, n int, alpha float64, x []float64, incX int, a []float64) +} + +func DsprTest(t *testing.T, blasser Dsprer) { + for i, test := range []struct { + ul blas.Uplo + n int + a [][]float64 + x []float64 + alpha float64 + ans [][]float64 + }{ + { + ul: blas.Upper, + n: 4, + a: [][]float64{ + {10, 2, 0, 1}, + {0, 1, 2, 3}, + {0, 0, 9, 15}, + {0, 0, 0, -6}, + }, + x: []float64{1, 2, 0, 5}, + alpha: 8, + ans: [][]float64{ + {18, 18, 0, 41}, + {0, 33, 2, 83}, + {0, 0, 9, 15}, + {0, 0, 0, 194}, + }, + }, + { + ul: blas.Lower, + n: 3, + a: [][]float64{ + {10, 2, 0}, + {4, 1, 2}, + {2, 7, 9}, + }, + x: []float64{3, 0, 5}, + alpha: 8, + ans: [][]float64{ + {82, 2, 0}, + {4, 1, 2}, + {122, 7, 209}, + }, + }, + } { + incTest := func(incX, extra int) { + xnew := makeIncremented(test.x, incX, extra) + aFlat := flattenTriangular(test.a, test.ul) + ans := flattenTriangular(test.ans, test.ul) + blasser.Dspr(test.ul, test.n, test.alpha, xnew, incX, aFlat) + if !dSliceTolEqual(aFlat, ans) { + t.Errorf("Case %v, idx %v: Want %v, got %v.", i, incX, ans, aFlat) + } + } + incTest(1, 3) + incTest(1, 0) + incTest(3, 2) + incTest(-2, 2) + } +} diff --git a/vendor/gonum.org/v1/gonum/blas/testblas/dspr2.go b/vendor/gonum.org/v1/gonum/blas/testblas/dspr2.go new file mode 100644 index 0000000..f2fcc5b --- /dev/null +++ b/vendor/gonum.org/v1/gonum/blas/testblas/dspr2.go @@ -0,0 +1,80 @@ +// Copyright ©2014 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package testblas + +import ( + "testing" + + "gonum.org/v1/gonum/blas" + "gonum.org/v1/gonum/floats" +) + +type Dspr2er interface { + Dspr2(ul blas.Uplo, n int, alpha float64, x []float64, incX int, y []float64, incY int, a []float64) +} + +func Dspr2Test(t *testing.T, blasser Dspr2er) { + for i, test := range []struct { + n int + a [][]float64 + ul blas.Uplo + x []float64 + y []float64 + alpha float64 + ans [][]float64 + }{ + { + n: 3, + a: [][]float64{ + {7, 2, 4}, + {0, 3, 5}, + {0, 0, 6}, + }, + x: []float64{2, 3, 4}, + y: []float64{5, 6, 7}, + alpha: 2, + ul: blas.Upper, + ans: [][]float64{ + {47, 56, 72}, + {0, 75, 95}, + {0, 0, 118}, + }, + }, + { + n: 3, + a: [][]float64{ + {7, 0, 0}, + {2, 3, 0}, + {4, 5, 6}, + }, + x: []float64{2, 3, 4}, + y: []float64{5, 6, 7}, + alpha: 2, + ul: blas.Lower, + ans: [][]float64{ + {47, 0, 0}, + {56, 75, 0}, + {72, 95, 118}, + }, + }, + } { + incTest := func(incX, incY, extra int) { + aFlat := flattenTriangular(test.a, test.ul) + x := makeIncremented(test.x, incX, extra) + y := makeIncremented(test.y, incY, extra) + blasser.Dspr2(test.ul, test.n, test.alpha, x, incX, y, incY, aFlat) + ansFlat := flattenTriangular(test.ans, test.ul) + if !floats.EqualApprox(aFlat, ansFlat, 1e-14) { + t.Errorf("Case %v, incX = %v, incY = %v. Want %v, got %v.", i, incX, incY, ansFlat, aFlat) + } + } + incTest(1, 1, 0) + incTest(-2, 1, 0) + incTest(-2, 3, 0) + incTest(2, -3, 0) + incTest(3, -2, 0) + incTest(-3, -4, 0) + } +} diff --git a/vendor/gonum.org/v1/gonum/blas/testblas/dsymm.go b/vendor/gonum.org/v1/gonum/blas/testblas/dsymm.go new file mode 100644 index 0000000..bfe4bd0 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/blas/testblas/dsymm.go @@ -0,0 +1,281 @@ +// Copyright ©2014 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package testblas + +import ( + "testing" + + "gonum.org/v1/gonum/blas" + "gonum.org/v1/gonum/floats" +) + +type Dsymmer interface { + Dsymm(s blas.Side, ul blas.Uplo, m, n int, alpha float64, a []float64, lda int, b []float64, ldb int, beta float64, c []float64, ldc int) +} + +func DsymmTest(t *testing.T, blasser Dsymmer) { + for i, test := range []struct { + m int + n int + side blas.Side + ul blas.Uplo + a [][]float64 + b [][]float64 + c [][]float64 + alpha float64 + beta float64 + ans [][]float64 + }{ + { + side: blas.Left, + ul: blas.Upper, + m: 3, + n: 4, + a: [][]float64{ + {2, 3, 4}, + {0, 6, 7}, + {0, 0, 10}, + }, + b: [][]float64{ + {2, 3, 4, 8}, + {5, 6, 7, 15}, + {8, 9, 10, 20}, + }, + c: [][]float64{ + {8, 12, 2, 1}, + {9, 12, 9, 9}, + {12, 1, -1, 5}, + }, + alpha: 2, + beta: 3, + ans: [][]float64{ + {126, 156, 144, 285}, + {211, 252, 275, 535}, + {282, 291, 327, 689}, + }, + }, + { + side: blas.Left, + ul: blas.Upper, + m: 4, + n: 3, + a: [][]float64{ + {2, 3, 4, 8}, + {0, 6, 7, 9}, + {0, 0, 10, 10}, + {0, 0, 0, 11}, + }, + b: [][]float64{ + {2, 3, 4}, + {5, 6, 7}, + {8, 9, 10}, + {2, 1, 1}, + }, + c: [][]float64{ + {8, 12, 2}, + {9, 12, 9}, + {12, 1, -1}, + {1, 9, 5}, + }, + alpha: 2, + beta: 3, + ans: [][]float64{ + {158, 172, 160}, + {247, 270, 293}, + {322, 311, 347}, + {329, 385, 427}, + }, + }, + { + side: blas.Left, + ul: blas.Lower, + m: 3, + n: 4, + a: [][]float64{ + {2, 0, 0}, + {3, 6, 0}, + {4, 7, 10}, + }, + b: [][]float64{ + {2, 3, 4, 8}, + {5, 6, 7, 15}, + {8, 9, 10, 20}, + }, + c: [][]float64{ + {8, 12, 2, 1}, + {9, 12, 9, 9}, + {12, 1, -1, 5}, + }, + alpha: 2, + beta: 3, + ans: [][]float64{ + {126, 156, 144, 285}, + {211, 252, 275, 535}, + {282, 291, 327, 689}, + }, + }, + { + side: blas.Left, + ul: blas.Lower, + m: 4, + n: 3, + a: [][]float64{ + {2, 0, 0, 0}, + {3, 6, 0, 0}, + {4, 7, 10, 0}, + {8, 9, 10, 11}, + }, + b: [][]float64{ + {2, 3, 4}, + {5, 6, 7}, + {8, 9, 10}, + {2, 1, 1}, + }, + c: [][]float64{ + {8, 12, 2}, + {9, 12, 9}, + {12, 1, -1}, + {1, 9, 5}, + }, + alpha: 2, + beta: 3, + ans: [][]float64{ + {158, 172, 160}, + {247, 270, 293}, + {322, 311, 347}, + {329, 385, 427}, + }, + }, + { + side: blas.Right, + ul: blas.Upper, + m: 3, + n: 4, + a: [][]float64{ + {2, 0, 0, 0}, + {3, 6, 0, 0}, + {4, 7, 10, 0}, + {3, 4, 5, 6}, + }, + b: [][]float64{ + {2, 3, 4, 9}, + {5, 6, 7, -3}, + {8, 9, 10, -2}, + }, + c: [][]float64{ + {8, 12, 2, 10}, + {9, 12, 9, 10}, + {12, 1, -1, 10}, + }, + alpha: 2, + beta: 3, + ans: [][]float64{ + {32, 72, 86, 138}, + {47, 108, 167, -6}, + {68, 111, 197, 6}, + }, + }, + { + side: blas.Right, + ul: blas.Upper, + m: 4, + n: 3, + a: [][]float64{ + {2, 0, 0}, + {3, 6, 0}, + {4, 7, 10}, + }, + b: [][]float64{ + {2, 3, 4}, + {5, 6, 7}, + {8, 9, 10}, + {2, 1, 1}, + }, + c: [][]float64{ + {8, 12, 2}, + {9, 12, 9}, + {12, 1, -1}, + {1, 9, 5}, + }, + alpha: 2, + beta: 3, + ans: [][]float64{ + {32, 72, 86}, + {47, 108, 167}, + {68, 111, 197}, + {11, 39, 35}, + }, + }, + { + side: blas.Right, + ul: blas.Lower, + m: 3, + n: 4, + a: [][]float64{ + {2, 0, 0, 0}, + {3, 6, 0, 0}, + {4, 7, 10, 0}, + {3, 4, 5, 6}, + }, + b: [][]float64{ + {2, 3, 4, 2}, + {5, 6, 7, 1}, + {8, 9, 10, 1}, + }, + c: [][]float64{ + {8, 12, 2, 1}, + {9, 12, 9, 9}, + {12, 1, -1, 5}, + }, + alpha: 2, + beta: 3, + ans: [][]float64{ + {94, 156, 164, 103}, + {145, 244, 301, 187}, + {208, 307, 397, 247}, + }, + }, + { + side: blas.Right, + ul: blas.Lower, + m: 4, + n: 3, + a: [][]float64{ + {2, 0, 0}, + {3, 6, 0}, + {4, 7, 10}, + }, + b: [][]float64{ + {2, 3, 4}, + {5, 6, 7}, + {8, 9, 10}, + {2, 1, 1}, + }, + c: [][]float64{ + {8, 12, 2}, + {9, 12, 9}, + {12, 1, -1}, + {1, 9, 5}, + }, + alpha: 2, + beta: 3, + ans: [][]float64{ + {82, 140, 144}, + {139, 236, 291}, + {202, 299, 387}, + {25, 65, 65}, + }, + }, + } { + aFlat := flatten(test.a) + bFlat := flatten(test.b) + cFlat := flatten(test.c) + ansFlat := flatten(test.ans) + blasser.Dsymm(test.side, test.ul, test.m, test.n, test.alpha, aFlat, len(test.a[0]), bFlat, test.n, test.beta, cFlat, test.n) + if !floats.EqualApprox(cFlat, ansFlat, 1e-14) { + t.Errorf("Case %v: Want %v, got %v.", i, ansFlat, cFlat) + } + } +} diff --git a/vendor/gonum.org/v1/gonum/blas/testblas/dsymv.go b/vendor/gonum.org/v1/gonum/blas/testblas/dsymv.go new file mode 100644 index 0000000..4cfc456 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/blas/testblas/dsymv.go @@ -0,0 +1,77 @@ +// Copyright ©2014 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package testblas + +import ( + "testing" + + "gonum.org/v1/gonum/blas" + "gonum.org/v1/gonum/floats" +) + +type Dsymver interface { + Dsymv(ul blas.Uplo, n int, alpha float64, a []float64, lda int, x []float64, incX int, beta float64, y []float64, incY int) +} + +func DsymvTest(t *testing.T, blasser Dsymver) { + for i, test := range []struct { + ul blas.Uplo + n int + a [][]float64 + x []float64 + y []float64 + alpha float64 + beta float64 + ans []float64 + }{ + { + ul: blas.Upper, + n: 3, + a: [][]float64{ + {5, 6, 7}, + {0, 8, 10}, + {0, 0, 13}, + }, + x: []float64{3, 4, 5}, + y: []float64{6, 7, 8}, + alpha: 2.1, + beta: -3, + ans: []float64{137.4, 189, 240.6}, + }, + { + ul: blas.Lower, + n: 3, + a: [][]float64{ + {5, 0, 0}, + {6, 8, 0}, + {7, 10, 13}, + }, + x: []float64{3, 4, 5}, + y: []float64{6, 7, 8}, + alpha: 2.1, + beta: -3, + ans: []float64{137.4, 189, 240.6}, + }, + } { + incTest := func(incX, incY, extra int) { + x := makeIncremented(test.x, incX, extra) + y := makeIncremented(test.y, incY, extra) + aFlat := flatten(test.a) + ans := makeIncremented(test.ans, incY, extra) + + blasser.Dsymv(test.ul, test.n, test.alpha, aFlat, test.n, x, incX, test.beta, y, incY) + if !floats.EqualApprox(ans, y, 1e-14) { + t.Errorf("Case %v, incX=%v, incY=%v: Want %v, got %v.", i, incX, incY, ans, y) + } + } + incTest(1, 1, 0) + incTest(2, 3, 0) + incTest(3, 2, 0) + incTest(-3, 2, 0) + incTest(-2, 4, 0) + incTest(2, -1, 0) + incTest(-3, -4, 3) + } +} diff --git a/vendor/gonum.org/v1/gonum/blas/testblas/dsyr.go b/vendor/gonum.org/v1/gonum/blas/testblas/dsyr.go new file mode 100644 index 0000000..a0bb075 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/blas/testblas/dsyr.go @@ -0,0 +1,76 @@ +// Copyright ©2014 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package testblas + +import ( + "testing" + + "gonum.org/v1/gonum/blas" +) + +type Dsyrer interface { + Dsyr(ul blas.Uplo, n int, alpha float64, x []float64, incX int, a []float64, lda int) +} + +func DsyrTest(t *testing.T, blasser Dsyrer) { + for i, test := range []struct { + ul blas.Uplo + n int + a [][]float64 + x []float64 + alpha float64 + ans [][]float64 + }{ + { + ul: blas.Upper, + n: 4, + a: [][]float64{ + {10, 2, 0, 1}, + {0, 1, 2, 3}, + {0, 0, 9, 15}, + {0, 0, 0, -6}, + }, + x: []float64{1, 2, 0, 5}, + alpha: 8, + ans: [][]float64{ + {18, 18, 0, 41}, + {0, 33, 2, 83}, + {0, 0, 9, 15}, + {0, 0, 0, 194}, + }, + }, + { + ul: blas.Lower, + n: 3, + a: [][]float64{ + {10, 2, 0}, + {4, 1, 2}, + {2, 7, 9}, + }, + x: []float64{3, 0, 5}, + alpha: 8, + ans: [][]float64{ + {82, 2, 0}, + {4, 1, 2}, + {122, 7, 209}, + }, + }, + } { + incTest := func(incX, extra int) { + xnew := makeIncremented(test.x, incX, extra) + aFlat := flatten(test.a) + ans := flatten(test.ans) + lda := test.n + blasser.Dsyr(test.ul, test.n, test.alpha, xnew, incX, aFlat, lda) + if !dSliceTolEqual(aFlat, ans) { + t.Errorf("Case %v, idx %v: Want %v, got %v.", i, incX, ans, aFlat) + } + } + incTest(1, 3) + incTest(1, 0) + incTest(3, 2) + incTest(-2, 2) + } +} diff --git a/vendor/gonum.org/v1/gonum/blas/testblas/dsyr2.go b/vendor/gonum.org/v1/gonum/blas/testblas/dsyr2.go new file mode 100644 index 0000000..e68591f --- /dev/null +++ b/vendor/gonum.org/v1/gonum/blas/testblas/dsyr2.go @@ -0,0 +1,80 @@ +// Copyright ©2014 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package testblas + +import ( + "testing" + + "gonum.org/v1/gonum/blas" + "gonum.org/v1/gonum/floats" +) + +type Dsyr2er interface { + Dsyr2(ul blas.Uplo, n int, alpha float64, x []float64, incX int, y []float64, incY int, a []float64, lda int) +} + +func Dsyr2Test(t *testing.T, blasser Dsyr2er) { + for i, test := range []struct { + n int + a [][]float64 + ul blas.Uplo + x []float64 + y []float64 + alpha float64 + ans [][]float64 + }{ + { + n: 3, + a: [][]float64{ + {7, 2, 4}, + {0, 3, 5}, + {0, 0, 6}, + }, + x: []float64{2, 3, 4}, + y: []float64{5, 6, 7}, + alpha: 2, + ul: blas.Upper, + ans: [][]float64{ + {47, 56, 72}, + {0, 75, 95}, + {0, 0, 118}, + }, + }, + { + n: 3, + a: [][]float64{ + {7, 0, 0}, + {2, 3, 0}, + {4, 5, 6}, + }, + x: []float64{2, 3, 4}, + y: []float64{5, 6, 7}, + alpha: 2, + ul: blas.Lower, + ans: [][]float64{ + {47, 0, 0}, + {56, 75, 0}, + {72, 95, 118}, + }, + }, + } { + incTest := func(incX, incY, extra int) { + aFlat := flatten(test.a) + x := makeIncremented(test.x, incX, extra) + y := makeIncremented(test.y, incY, extra) + blasser.Dsyr2(test.ul, test.n, test.alpha, x, incX, y, incY, aFlat, test.n) + ansFlat := flatten(test.ans) + if !floats.EqualApprox(aFlat, ansFlat, 1e-14) { + t.Errorf("Case %v, incX = %v, incY = %v. Want %v, got %v.", i, incX, incY, ansFlat, aFlat) + } + } + incTest(1, 1, 0) + incTest(-2, 1, 0) + incTest(-2, 3, 0) + incTest(2, -3, 0) + incTest(3, -2, 0) + incTest(-3, -4, 0) + } +} diff --git a/vendor/gonum.org/v1/gonum/blas/testblas/dsyr2k.go b/vendor/gonum.org/v1/gonum/blas/testblas/dsyr2k.go new file mode 100644 index 0000000..991936f --- /dev/null +++ b/vendor/gonum.org/v1/gonum/blas/testblas/dsyr2k.go @@ -0,0 +1,205 @@ +// Copyright ©2014 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package testblas + +import ( + "testing" + + "gonum.org/v1/gonum/blas" + "gonum.org/v1/gonum/floats" +) + +type Dsyr2ker interface { + Dsyr2k(ul blas.Uplo, tA blas.Transpose, n, k int, alpha float64, a []float64, lda int, b []float64, ldb int, beta float64, c []float64, ldc int) +} + +func Dsyr2kTest(t *testing.T, blasser Dsyr2ker) { + for i, test := range []struct { + ul blas.Uplo + tA blas.Transpose + n int + k int + alpha float64 + a [][]float64 + b [][]float64 + c [][]float64 + beta float64 + ans [][]float64 + }{ + { + ul: blas.Upper, + tA: blas.NoTrans, + n: 3, + k: 2, + alpha: 0, + a: [][]float64{ + {1, 2}, + {3, 4}, + {5, 6}, + }, + b: [][]float64{ + {7, 8}, + {9, 10}, + {11, 12}, + }, + c: [][]float64{ + {1, 2, 3}, + {0, 5, 6}, + {0, 0, 9}, + }, + beta: 2, + ans: [][]float64{ + {2, 4, 6}, + {0, 10, 12}, + {0, 0, 18}, + }, + }, + { + ul: blas.Lower, + tA: blas.NoTrans, + n: 3, + k: 2, + alpha: 0, + a: [][]float64{ + {1, 2}, + {3, 4}, + {5, 6}, + }, + b: [][]float64{ + {7, 8}, + {9, 10}, + {11, 12}, + }, + c: [][]float64{ + {1, 0, 0}, + {2, 3, 0}, + {4, 5, 6}, + }, + beta: 2, + ans: [][]float64{ + {2, 0, 0}, + {4, 6, 0}, + {8, 10, 12}, + }, + }, + { + ul: blas.Upper, + tA: blas.NoTrans, + n: 3, + k: 2, + alpha: 3, + a: [][]float64{ + {1, 2}, + {3, 4}, + {5, 6}, + }, + b: [][]float64{ + {7, 8}, + {9, 10}, + {11, 12}, + }, + c: [][]float64{ + {1, 2, 3}, + {0, 4, 5}, + {0, 0, 6}, + }, + beta: 2, + ans: [][]float64{ + {140, 250, 360}, + {0, 410, 568}, + {0, 0, 774}, + }, + }, + { + ul: blas.Lower, + tA: blas.NoTrans, + n: 3, + k: 2, + alpha: 3, + a: [][]float64{ + {1, 2}, + {3, 4}, + {5, 6}, + }, + b: [][]float64{ + {7, 8}, + {9, 10}, + {11, 12}, + }, + c: [][]float64{ + {1, 0, 0}, + {2, 4, 0}, + {3, 5, 6}, + }, + beta: 2, + ans: [][]float64{ + {140, 0, 0}, + {250, 410, 0}, + {360, 568, 774}, + }, + }, + { + ul: blas.Upper, + tA: blas.Trans, + n: 3, + k: 2, + alpha: 3, + a: [][]float64{ + {1, 3, 5}, + {2, 4, 6}, + }, + b: [][]float64{ + {7, 9, 11}, + {8, 10, 12}, + }, + c: [][]float64{ + {1, 2, 3}, + {0, 4, 5}, + {0, 0, 6}, + }, + beta: 2, + ans: [][]float64{ + {140, 250, 360}, + {0, 410, 568}, + {0, 0, 774}, + }, + }, + { + ul: blas.Lower, + tA: blas.Trans, + n: 3, + k: 2, + alpha: 3, + a: [][]float64{ + {1, 3, 5}, + {2, 4, 6}, + }, + b: [][]float64{ + {7, 9, 11}, + {8, 10, 12}, + }, + c: [][]float64{ + {1, 0, 0}, + {2, 4, 0}, + {3, 5, 6}, + }, + beta: 2, + ans: [][]float64{ + {140, 0, 0}, + {250, 410, 0}, + {360, 568, 774}, + }, + }, + } { + aFlat := flatten(test.a) + bFlat := flatten(test.b) + cFlat := flatten(test.c) + ansFlat := flatten(test.ans) + blasser.Dsyr2k(test.ul, test.tA, test.n, test.k, test.alpha, aFlat, len(test.a[0]), bFlat, len(test.b[0]), test.beta, cFlat, len(test.c[0])) + if !floats.EqualApprox(ansFlat, cFlat, 1e-14) { + t.Errorf("Case %v. Want %v, got %v.", i, ansFlat, cFlat) + } + } +} diff --git a/vendor/gonum.org/v1/gonum/blas/testblas/dsyrk.go b/vendor/gonum.org/v1/gonum/blas/testblas/dsyrk.go new file mode 100644 index 0000000..9027339 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/blas/testblas/dsyrk.go @@ -0,0 +1,219 @@ +// Copyright ©2014 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package testblas + +import ( + "testing" + + "gonum.org/v1/gonum/blas" + "gonum.org/v1/gonum/floats" +) + +type Dsyker interface { + Dsyrk(ul blas.Uplo, tA blas.Transpose, n, k int, alpha float64, a []float64, lda int, beta float64, c []float64, ldc int) +} + +func DsyrkTest(t *testing.T, blasser Dsyker) { + for i, test := range []struct { + ul blas.Uplo + tA blas.Transpose + n int + k int + alpha float64 + a [][]float64 + c [][]float64 + beta float64 + ans [][]float64 + }{ + { + ul: blas.Upper, + tA: blas.NoTrans, + n: 3, + k: 2, + alpha: 0, + a: [][]float64{ + {1, 2}, + {3, 4}, + {5, 6}, + }, + c: [][]float64{ + {1, 2, 3}, + {0, 5, 6}, + {0, 0, 9}, + }, + beta: 2, + ans: [][]float64{ + {2, 4, 6}, + {0, 10, 12}, + {0, 0, 18}, + }, + }, + { + ul: blas.Lower, + tA: blas.NoTrans, + n: 3, + k: 2, + alpha: 0, + a: [][]float64{ + {1, 2}, + {3, 4}, + {5, 6}, + }, + c: [][]float64{ + {1, 0, 0}, + {2, 3, 0}, + {4, 5, 6}, + }, + beta: 2, + ans: [][]float64{ + {2, 0, 0}, + {4, 6, 0}, + {8, 10, 12}, + }, + }, + { + ul: blas.Upper, + tA: blas.NoTrans, + n: 3, + k: 2, + alpha: 3, + a: [][]float64{ + {1, 2}, + {3, 4}, + {5, 6}, + }, + c: [][]float64{ + {1, 2, 3}, + {0, 4, 5}, + {0, 0, 6}, + }, + beta: 2, + ans: [][]float64{ + {17, 37, 57}, + {0, 83, 127}, + {0, 0, 195}, + }, + }, + { + ul: blas.Lower, + tA: blas.NoTrans, + n: 3, + k: 2, + alpha: 3, + a: [][]float64{ + {1, 2}, + {3, 4}, + {5, 6}, + }, + c: [][]float64{ + {1, 0, 0}, + {2, 4, 0}, + {3, 5, 6}, + }, + beta: 2, + ans: [][]float64{ + {17, 0, 0}, + {37, 83, 0}, + {57, 127, 195}, + }, + }, + { + ul: blas.Upper, + tA: blas.Trans, + n: 3, + k: 2, + alpha: 3, + a: [][]float64{ + {1, 3, 5}, + {2, 4, 6}, + }, + c: [][]float64{ + {1, 2, 3}, + {0, 4, 5}, + {0, 0, 6}, + }, + beta: 2, + ans: [][]float64{ + {17, 37, 57}, + {0, 83, 127}, + {0, 0, 195}, + }, + }, + { + ul: blas.Lower, + tA: blas.Trans, + n: 3, + k: 2, + alpha: 3, + a: [][]float64{ + {1, 3, 5}, + {2, 4, 6}, + }, + c: [][]float64{ + {1, 0, 0}, + {2, 4, 0}, + {3, 5, 6}, + }, + beta: 2, + ans: [][]float64{ + {17, 0, 0}, + {37, 83, 0}, + {57, 127, 195}, + }, + }, + { + ul: blas.Upper, + tA: blas.Trans, + n: 3, + k: 2, + alpha: 3, + a: [][]float64{ + {1, 3, 5}, + {2, 4, 6}, + }, + c: [][]float64{ + {1, 2, 3}, + {0, 4, 5}, + {0, 0, 6}, + }, + beta: 0, + ans: [][]float64{ + {15, 33, 51}, + {0, 75, 117}, + {0, 0, 183}, + }, + }, + { + ul: blas.Lower, + tA: blas.Trans, + n: 3, + k: 2, + alpha: 3, + a: [][]float64{ + {1, 3, 5}, + {2, 4, 6}, + }, + c: [][]float64{ + {1, 0, 0}, + {2, 4, 0}, + {3, 5, 6}, + }, + beta: 0, + ans: [][]float64{ + {15, 0, 0}, + {33, 75, 0}, + {51, 117, 183}, + }, + }, + } { + aFlat := flatten(test.a) + cFlat := flatten(test.c) + ansFlat := flatten(test.ans) + blasser.Dsyrk(test.ul, test.tA, test.n, test.k, test.alpha, aFlat, len(test.a[0]), test.beta, cFlat, len(test.c[0])) + if !floats.EqualApprox(ansFlat, cFlat, 1e-14) { + t.Errorf("Case %v. Want %v, got %v.", i, ansFlat, cFlat) + } + } +} diff --git a/vendor/gonum.org/v1/gonum/blas/testblas/dtbmv.go b/vendor/gonum.org/v1/gonum/blas/testblas/dtbmv.go new file mode 100644 index 0000000..33ede7b --- /dev/null +++ b/vendor/gonum.org/v1/gonum/blas/testblas/dtbmv.go @@ -0,0 +1,127 @@ +// Copyright ©2014 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package testblas + +import ( + "testing" + + "gonum.org/v1/gonum/blas" +) + +type Dtbmver interface { + Dtbmv(ul blas.Uplo, tA blas.Transpose, d blas.Diag, n, k int, a []float64, lda int, x []float64, incX int) +} + +func DtbmvTest(t *testing.T, blasser Dtbmver) { + for i, test := range []struct { + ul blas.Uplo + tA blas.Transpose + d blas.Diag + n int + k int + a [][]float64 + x []float64 + ans []float64 + }{ + { + ul: blas.Upper, + tA: blas.NoTrans, + d: blas.Unit, + n: 3, + k: 1, + a: [][]float64{ + {1, 2, 0}, + {0, 1, 4}, + {0, 0, 1}, + }, + x: []float64{2, 3, 4}, + ans: []float64{8, 19, 4}, + }, + { + ul: blas.Upper, + tA: blas.NoTrans, + d: blas.NonUnit, + n: 5, + k: 1, + a: [][]float64{ + {1, 3, 0, 0, 0}, + {0, 6, 7, 0, 0}, + {0, 0, 2, 1, 0}, + {0, 0, 0, 12, 3}, + {0, 0, 0, 0, -1}, + }, + x: []float64{1, 2, 3, 4, 5}, + ans: []float64{7, 33, 10, 63, -5}, + }, + { + ul: blas.Lower, + tA: blas.NoTrans, + d: blas.NonUnit, + n: 5, + k: 1, + a: [][]float64{ + {7, 0, 0, 0, 0}, + {3, 6, 0, 0, 0}, + {0, 7, 2, 0, 0}, + {0, 0, 1, 12, 0}, + {0, 0, 0, 3, -1}, + }, + x: []float64{1, 2, 3, 4, 5}, + ans: []float64{7, 15, 20, 51, 7}, + }, + { + ul: blas.Upper, + tA: blas.Trans, + d: blas.NonUnit, + n: 5, + k: 2, + a: [][]float64{ + {7, 3, 9, 0, 0}, + {0, 6, 7, 10, 0}, + {0, 0, 2, 1, 11}, + {0, 0, 0, 12, 3}, + {0, 0, 0, 0, -1}, + }, + x: []float64{1, 2, 3, 4, 5}, + ans: []float64{7, 15, 29, 71, 40}, + }, + { + ul: blas.Lower, + tA: blas.Trans, + d: blas.NonUnit, + n: 5, + k: 2, + a: [][]float64{ + {7, 0, 0, 0, 0}, + {3, 6, 0, 0, 0}, + {9, 7, 2, 0, 0}, + {0, 10, 1, 12, 0}, + {0, 0, 11, 3, -1}, + }, + x: []float64{1, 2, 3, 4, 5}, + ans: []float64{40, 73, 65, 63, -5}, + }, + } { + extra := 0 + var aFlat []float64 + if test.ul == blas.Upper { + aFlat = flattenBanded(test.a, test.k, 0) + } else { + aFlat = flattenBanded(test.a, 0, test.k) + } + incTest := func(incX, extra int) { + xnew := makeIncremented(test.x, incX, extra) + ans := makeIncremented(test.ans, incX, extra) + lda := test.k + 1 + blasser.Dtbmv(test.ul, test.tA, test.d, test.n, test.k, aFlat, lda, xnew, incX) + if !dSliceTolEqual(ans, xnew) { + t.Errorf("Case %v, Inc %v: Want %v, got %v", i, incX, ans, xnew) + } + } + incTest(1, extra) + incTest(3, extra) + incTest(-2, extra) + } +} diff --git a/vendor/gonum.org/v1/gonum/blas/testblas/dtbsv.go b/vendor/gonum.org/v1/gonum/blas/testblas/dtbsv.go new file mode 100644 index 0000000..59d2327 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/blas/testblas/dtbsv.go @@ -0,0 +1,259 @@ +// Copyright ©2014 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package testblas + +import ( + "testing" + + "gonum.org/v1/gonum/blas" +) + +type Dtbsver interface { + Dtbsv(ul blas.Uplo, tA blas.Transpose, d blas.Diag, n, k int, a []float64, lda int, x []float64, incX int) + Dtrsv(ul blas.Uplo, tA blas.Transpose, d blas.Diag, n int, a []float64, lda int, x []float64, incX int) +} + +func DtbsvTest(t *testing.T, blasser Dtbsver) { + for i, test := range []struct { + ul blas.Uplo + tA blas.Transpose + d blas.Diag + n, k int + a [][]float64 + x []float64 + incX int + ans []float64 + }{ + { + ul: blas.Upper, + tA: blas.NoTrans, + d: blas.NonUnit, + n: 5, + k: 1, + a: [][]float64{ + {1, 3, 0, 0, 0}, + {0, 6, 7, 0, 0}, + {0, 0, 2, 1, 0}, + {0, 0, 0, 12, 3}, + {0, 0, 0, 0, -1}, + }, + x: []float64{1, 2, 3, 4, 5}, + incX: 1, + ans: []float64{2.479166666666667, -0.493055555555556, 0.708333333333333, 1.583333333333333, -5.000000000000000}, + }, + { + ul: blas.Upper, + tA: blas.NoTrans, + d: blas.NonUnit, + n: 5, + k: 2, + a: [][]float64{ + {1, 3, 5, 0, 0}, + {0, 6, 7, 5, 0}, + {0, 0, 2, 1, 5}, + {0, 0, 0, 12, 3}, + {0, 0, 0, 0, -1}, + }, + x: []float64{1, 2, 3, 4, 5}, + incX: 1, + ans: []float64{-15.854166666666664, -16.395833333333336, 13.208333333333334, 1.583333333333333, -5.000000000000000}, + }, + { + ul: blas.Upper, + tA: blas.NoTrans, + d: blas.NonUnit, + n: 5, + k: 1, + a: [][]float64{ + {1, 3, 0, 0, 0}, + {0, 6, 7, 0, 0}, + {0, 0, 2, 1, 0}, + {0, 0, 0, 12, 3}, + {0, 0, 0, 0, -1}, + }, + x: []float64{1, -101, 2, -201, 3, -301, 4, -401, 5, -501, -601, -701}, + incX: 2, + ans: []float64{2.479166666666667, -101, -0.493055555555556, -201, 0.708333333333333, -301, 1.583333333333333, -401, -5.000000000000000, -501, -601, -701}, + }, + { + ul: blas.Upper, + tA: blas.NoTrans, + d: blas.NonUnit, + n: 5, + k: 2, + a: [][]float64{ + {1, 3, 5, 0, 0}, + {0, 6, 7, 5, 0}, + {0, 0, 2, 1, 5}, + {0, 0, 0, 12, 3}, + {0, 0, 0, 0, -1}, + }, + x: []float64{1, -101, 2, -201, 3, -301, 4, -401, 5, -501, -601, -701}, + incX: 2, + ans: []float64{-15.854166666666664, -101, -16.395833333333336, -201, 13.208333333333334, -301, 1.583333333333333, -401, -5.000000000000000, -501, -601, -701}, + }, + { + ul: blas.Lower, + tA: blas.NoTrans, + d: blas.NonUnit, + n: 5, + k: 2, + a: [][]float64{ + {1, 0, 0, 0, 0}, + {3, 6, 0, 0, 0}, + {5, 7, 2, 0, 0}, + {0, 5, 1, 12, 0}, + {0, 0, 5, 3, -1}, + }, + x: []float64{1, 2, 3, 4, 5}, + incX: 1, + ans: []float64{1, -0.166666666666667, -0.416666666666667, 0.437500000000000, -5.770833333333334}, + }, + { + ul: blas.Lower, + tA: blas.NoTrans, + d: blas.NonUnit, + n: 5, + k: 2, + a: [][]float64{ + {1, 0, 0, 0, 0}, + {3, 6, 0, 0, 0}, + {5, 7, 2, 0, 0}, + {0, 5, 1, 12, 0}, + {0, 0, 5, 3, -1}, + }, + x: []float64{1, -101, 2, -201, 3, -301, 4, -401, 5, -501, -601, -701}, + incX: 2, + ans: []float64{1, -101, -0.166666666666667, -201, -0.416666666666667, -301, 0.437500000000000, -401, -5.770833333333334, -501, -601, -701}, + }, + { + ul: blas.Upper, + tA: blas.Trans, + d: blas.NonUnit, + n: 5, + k: 2, + a: [][]float64{ + {1, 3, 5, 0, 0}, + {0, 6, 7, 5, 0}, + {0, 0, 2, 1, 5}, + {0, 0, 0, 12, 3}, + {0, 0, 0, 0, -1}, + }, + x: []float64{1, 2, 3, 4, 5}, + incX: 1, + ans: []float64{1, -0.166666666666667, -0.416666666666667, 0.437500000000000, -5.770833333333334}, + }, + { + ul: blas.Upper, + tA: blas.Trans, + d: blas.NonUnit, + n: 5, + k: 2, + a: [][]float64{ + {1, 3, 5, 0, 0}, + {0, 6, 7, 5, 0}, + {0, 0, 2, 1, 5}, + {0, 0, 0, 12, 3}, + {0, 0, 0, 0, -1}, + }, + x: []float64{1, -101, 2, -201, 3, -301, 4, -401, 5, -501, -601, -701}, + incX: 2, + ans: []float64{1, -101, -0.166666666666667, -201, -0.416666666666667, -301, 0.437500000000000, -401, -5.770833333333334, -501, -601, -701}, + }, + { + ul: blas.Lower, + tA: blas.Trans, + d: blas.NonUnit, + n: 5, + k: 2, + a: [][]float64{ + {1, 0, 0, 0, 0}, + {3, 6, 0, 0, 0}, + {5, 7, 2, 0, 0}, + {0, 5, 1, 12, 0}, + {0, 0, 5, 3, -1}, + }, + x: []float64{1, 2, 3, 4, 5}, + incX: 1, + ans: []float64{-15.854166666666664, -16.395833333333336, 13.208333333333334, 1.583333333333333, -5.000000000000000}, + }, + { + ul: blas.Lower, + tA: blas.Trans, + d: blas.NonUnit, + n: 5, + k: 2, + a: [][]float64{ + {1, 0, 0, 0, 0}, + {3, 6, 0, 0, 0}, + {5, 7, 2, 0, 0}, + {0, 5, 1, 12, 0}, + {0, 0, 5, 3, -1}, + }, + x: []float64{1, -101, 2, -201, 3, -301, 4, -401, 5, -501, -601, -701}, + incX: 2, + ans: []float64{-15.854166666666664, -101, -16.395833333333336, -201, 13.208333333333334, -301, 1.583333333333333, -401, -5.000000000000000, -501, -601, -701}, + }, + } { + var aFlat []float64 + if test.ul == blas.Upper { + aFlat = flattenBanded(test.a, test.k, 0) + } else { + aFlat = flattenBanded(test.a, 0, test.k) + } + xCopy := sliceCopy(test.x) + // TODO: Have tests where the banded matrix is constructed explicitly + // to allow testing for lda =! k+1 + blasser.Dtbsv(test.ul, test.tA, test.d, test.n, test.k, aFlat, test.k+1, xCopy, test.incX) + if !dSliceTolEqual(test.ans, xCopy) { + t.Errorf("Case %v: Want %v, got %v", i, test.ans, xCopy) + } + } + + /* + // TODO: Uncomment when Dtrsv is fixed + // Compare with dense for larger matrices + for _, ul := range [...]blas.Uplo{blas.Upper, blas.Lower} { + for _, tA := range [...]blas.Transpose{blas.NoTrans, blas.Trans} { + for _, n := range [...]int{7, 8, 11} { + for _, d := range [...]blas.Diag{blas.NonUnit, blas.Unit} { + for _, k := range [...]int{0, 1, 3} { + for _, incX := range [...]int{1, 3} { + a := make([][]float64, n) + for i := range a { + a[i] = make([]float64, n) + for j := range a[i] { + a[i][j] = rand.Float64() + } + } + x := make([]float64, n) + for i := range x { + x[i] = rand.Float64() + } + extra := 3 + xinc := makeIncremented(x, incX, extra) + bandX := sliceCopy(xinc) + var aFlatBand []float64 + if ul == blas.Upper { + aFlatBand = flattenBanded(a, k, 0) + } else { + aFlatBand = flattenBanded(a, 0, k) + } + blasser.Dtbsv(ul, tA, d, n, k, aFlatBand, k+1, bandX, incX) + + aFlatDense := flatten(a) + denseX := sliceCopy(xinc) + blasser.Dtrsv(ul, tA, d, n, aFlatDense, n, denseX, incX) + if !dSliceTolEqual(denseX, bandX) { + t.Errorf("Case %v: dense banded mismatch") + } + } + } + } + } + } + } + */ +} diff --git a/vendor/gonum.org/v1/gonum/blas/testblas/dtpmv.go b/vendor/gonum.org/v1/gonum/blas/testblas/dtpmv.go new file mode 100644 index 0000000..0303c68 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/blas/testblas/dtpmv.go @@ -0,0 +1,133 @@ +// Copyright ©2014 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package testblas + +import ( + "testing" + + "gonum.org/v1/gonum/blas" + "gonum.org/v1/gonum/floats" +) + +type Dtpmver interface { + Dtpmv(ul blas.Uplo, tA blas.Transpose, d blas.Diag, n int, ap []float64, x []float64, incX int) +} + +func DtpmvTest(t *testing.T, blasser Dtpmver) { + for i, test := range []struct { + n int + a [][]float64 + x []float64 + d blas.Diag + ul blas.Uplo + tA blas.Transpose + ans []float64 + }{ + { + n: 3, + a: [][]float64{ + {5, 6, 7}, + {0, 9, 10}, + {0, 0, 13}, + }, + x: []float64{3, 4, 5}, + d: blas.NonUnit, + ul: blas.Upper, + tA: blas.NoTrans, + ans: []float64{74, 86, 65}, + }, + { + n: 3, + a: [][]float64{ + {5, 6, 7}, + {0, 9, 10}, + {0, 0, 13}, + }, + x: []float64{3, 4, 5}, + d: blas.Unit, + ul: blas.Upper, + tA: blas.NoTrans, + ans: []float64{62, 54, 5}, + }, + { + n: 3, + a: [][]float64{ + {5, 0, 0}, + {6, 9, 0}, + {7, 10, 13}, + }, + x: []float64{3, 4, 5}, + d: blas.NonUnit, + ul: blas.Lower, + tA: blas.NoTrans, + ans: []float64{15, 54, 126}, + }, + { + n: 3, + a: [][]float64{ + {1, 0, 0}, + {6, 1, 0}, + {7, 10, 1}, + }, + x: []float64{3, 4, 5}, + d: blas.Unit, + ul: blas.Lower, + tA: blas.NoTrans, + ans: []float64{3, 22, 66}, + }, + { + n: 3, + a: [][]float64{ + {5, 6, 7}, + {0, 9, 10}, + {0, 0, 13}, + }, + x: []float64{3, 4, 5}, + d: blas.NonUnit, + ul: blas.Upper, + tA: blas.Trans, + ans: []float64{15, 54, 126}, + }, + { + n: 3, + a: [][]float64{ + {1, 6, 7}, + {0, 1, 10}, + {0, 0, 1}, + }, + x: []float64{3, 4, 5}, + d: blas.Unit, + ul: blas.Upper, + tA: blas.Trans, + ans: []float64{3, 22, 66}, + }, + { + n: 3, + a: [][]float64{ + {5, 0, 0}, + {6, 9, 0}, + {7, 10, 13}, + }, + x: []float64{3, 4, 5}, + d: blas.NonUnit, + ul: blas.Lower, + tA: blas.Trans, + ans: []float64{74, 86, 65}, + }, + } { + incTest := func(incX, extra int) { + aFlat := flattenTriangular(test.a, test.ul) + x := makeIncremented(test.x, incX, extra) + blasser.Dtpmv(test.ul, test.tA, test.d, test.n, aFlat, x, incX) + ans := makeIncremented(test.ans, incX, extra) + if !floats.EqualApprox(x, ans, 1e-14) { + t.Errorf("Case %v, idx %v: Want %v, got %v.", i, incX, ans, x) + } + } + incTest(1, 0) + incTest(-3, 3) + incTest(4, 3) + } +} diff --git a/vendor/gonum.org/v1/gonum/blas/testblas/dtpsv.go b/vendor/gonum.org/v1/gonum/blas/testblas/dtpsv.go new file mode 100644 index 0000000..eb23450 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/blas/testblas/dtpsv.go @@ -0,0 +1,148 @@ +// Copyright ©2014 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package testblas + +import ( + "testing" + + "gonum.org/v1/gonum/blas" + "gonum.org/v1/gonum/floats" +) + +type Dtpsver interface { + Dtpsv(ul blas.Uplo, tA blas.Transpose, d blas.Diag, n int, ap []float64, x []float64, incX int) +} + +func DtpsvTest(t *testing.T, blasser Dtpsver) { + for i, test := range []struct { + n int + a [][]float64 + ul blas.Uplo + tA blas.Transpose + d blas.Diag + x []float64 + ans []float64 + }{ + { + n: 3, + a: [][]float64{ + {1, 2, 3}, + {0, 8, 15}, + {0, 0, 8}, + }, + ul: blas.Upper, + tA: blas.NoTrans, + d: blas.NonUnit, + x: []float64{5, 6, 7}, + ans: []float64{4.15625, -0.890625, 0.875}, + }, + { + n: 3, + a: [][]float64{ + {1, 2, 3}, + {0, 1, 15}, + {0, 0, 1}, + }, + ul: blas.Upper, + tA: blas.NoTrans, + d: blas.Unit, + x: []float64{5, 6, 7}, + ans: []float64{182, -99, 7}, + }, + { + n: 3, + a: [][]float64{ + {1, 0, 0}, + {2, 8, 0}, + {3, 15, 8}, + }, + ul: blas.Lower, + tA: blas.NoTrans, + d: blas.NonUnit, + x: []float64{5, 6, 7}, + ans: []float64{5, -0.5, -0.0625}, + }, + { + n: 3, + a: [][]float64{ + {1, 0, 0}, + {2, 8, 0}, + {3, 15, 8}, + }, + ul: blas.Lower, + tA: blas.NoTrans, + d: blas.Unit, + x: []float64{5, 6, 7}, + ans: []float64{5, -4, 52}, + }, + { + n: 3, + a: [][]float64{ + {1, 2, 3}, + {0, 8, 15}, + {0, 0, 8}, + }, + ul: blas.Upper, + tA: blas.Trans, + d: blas.NonUnit, + x: []float64{5, 6, 7}, + ans: []float64{5, -0.5, -0.0625}, + }, + { + n: 3, + a: [][]float64{ + {1, 2, 3}, + {0, 8, 15}, + {0, 0, 8}, + }, + ul: blas.Upper, + tA: blas.Trans, + d: blas.Unit, + x: []float64{5, 6, 7}, + ans: []float64{5, -4, 52}, + }, + { + n: 3, + a: [][]float64{ + {1, 0, 0}, + {2, 8, 0}, + {3, 15, 8}, + }, + ul: blas.Lower, + tA: blas.Trans, + d: blas.NonUnit, + x: []float64{5, 6, 7}, + ans: []float64{4.15625, -0.890625, 0.875}, + }, + { + n: 3, + a: [][]float64{ + {1, 0, 0}, + {2, 1, 0}, + {3, 15, 1}, + }, + ul: blas.Lower, + tA: blas.Trans, + d: blas.Unit, + x: []float64{5, 6, 7}, + ans: []float64{182, -99, 7}, + }, + } { + incTest := func(incX, extra int) { + aFlat := flattenTriangular(test.a, test.ul) + x := makeIncremented(test.x, incX, extra) + blasser.Dtpsv(test.ul, test.tA, test.d, test.n, aFlat, x, incX) + ans := makeIncremented(test.ans, incX, extra) + if !floats.EqualApprox(x, ans, 1e-14) { + t.Errorf("Case %v, incX = %v: Want %v, got %v.", i, incX, ans, x) + } + } + incTest(1, 0) + incTest(-2, 0) + incTest(3, 0) + incTest(-3, 8) + incTest(4, 2) + } +} diff --git a/vendor/gonum.org/v1/gonum/blas/testblas/dtrmm.go b/vendor/gonum.org/v1/gonum/blas/testblas/dtrmm.go new file mode 100644 index 0000000..e52a7c5 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/blas/testblas/dtrmm.go @@ -0,0 +1,810 @@ +// Copyright ©2014 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package testblas + +import ( + "testing" + + "gonum.org/v1/gonum/blas" + "gonum.org/v1/gonum/floats" +) + +type Dtrmmer interface { + Dtrmm(s blas.Side, ul blas.Uplo, tA blas.Transpose, d blas.Diag, m, n int, alpha float64, a []float64, lda int, b []float64, ldb int) +} + +func DtrmmTest(t *testing.T, blasser Dtrmmer) { + for i, test := range []struct { + s blas.Side + ul blas.Uplo + tA blas.Transpose + d blas.Diag + m int + n int + alpha float64 + a [][]float64 + b [][]float64 + ans [][]float64 + }{ + { + s: blas.Left, + ul: blas.Upper, + tA: blas.NoTrans, + d: blas.NonUnit, + m: 4, + n: 3, + alpha: 2, + a: [][]float64{ + {1, 2, 3, 4}, + {0, 5, 6, 7}, + {0, 0, 8, 9}, + {0, 0, 0, 10}, + }, + b: [][]float64{ + {10, 11, 12}, + {13, 14, 15}, + {16, 17, 18}, + {19, 20, 21}, + }, + ans: [][]float64{ + {320, 340, 360}, + {588, 624, 660}, + {598, 632, 666}, + {380, 400, 420}, + }, + }, + { + s: blas.Left, + ul: blas.Upper, + tA: blas.NoTrans, + d: blas.NonUnit, + m: 2, + n: 3, + alpha: 2, + a: [][]float64{ + {1, 2}, + {0, 5}, + }, + b: [][]float64{ + {10, 11, 12}, + {13, 14, 15}, + }, + ans: [][]float64{ + {72, 78, 84}, + {130, 140, 150}, + }, + }, + { + s: blas.Left, + ul: blas.Upper, + tA: blas.NoTrans, + d: blas.Unit, + m: 4, + n: 3, + alpha: 2, + a: [][]float64{ + {1, 2, 3, 4}, + {0, 5, 6, 7}, + {0, 0, 8, 9}, + {0, 0, 0, 10}, + }, + b: [][]float64{ + {10, 11, 12}, + {13, 14, 15}, + {16, 17, 18}, + {19, 20, 21}, + }, + ans: [][]float64{ + {320, 340, 360}, + {484, 512, 540}, + {374, 394, 414}, + {38, 40, 42}, + }, + }, + { + s: blas.Left, + ul: blas.Upper, + tA: blas.NoTrans, + d: blas.Unit, + m: 2, + n: 3, + alpha: 2, + a: [][]float64{ + {1, 2}, + {0, 5}, + }, + b: [][]float64{ + {10, 11, 12}, + {13, 14, 15}, + }, + ans: [][]float64{ + {72, 78, 84}, + {26, 28, 30}, + }, + }, + { + s: blas.Left, + ul: blas.Lower, + tA: blas.NoTrans, + d: blas.NonUnit, + m: 4, + n: 3, + alpha: 2, + a: [][]float64{ + {1, 0, 0, 0}, + {2, 5, 0, 0}, + {3, 6, 8, 0}, + {4, 7, 9, 10}, + }, + b: [][]float64{ + {10, 11, 12}, + {13, 14, 15}, + {16, 17, 18}, + {19, 20, 21}, + }, + ans: [][]float64{ + {20, 22, 24}, + {170, 184, 198}, + {472, 506, 540}, + {930, 990, 1050}, + }, + }, + { + s: blas.Left, + ul: blas.Lower, + tA: blas.NoTrans, + d: blas.NonUnit, + m: 2, + n: 3, + alpha: 2, + a: [][]float64{ + {1, 0}, + {2, 5}, + }, + b: [][]float64{ + {10, 11, 12}, + {13, 14, 15}, + }, + ans: [][]float64{ + {20, 22, 24}, + {170, 184, 198}, + }, + }, + { + s: blas.Left, + ul: blas.Lower, + tA: blas.NoTrans, + d: blas.Unit, + m: 4, + n: 3, + alpha: 2, + a: [][]float64{ + {1, 0, 0, 0}, + {2, 5, 0, 0}, + {3, 6, 8, 0}, + {4, 7, 9, 10}, + }, + b: [][]float64{ + {10, 11, 12}, + {13, 14, 15}, + {16, 17, 18}, + {19, 20, 21}, + }, + ans: [][]float64{ + {20, 22, 24}, + {66, 72, 78}, + {248, 268, 288}, + {588, 630, 672}, + }, + }, + { + s: blas.Left, + ul: blas.Lower, + tA: blas.NoTrans, + d: blas.Unit, + m: 2, + n: 3, + alpha: 2, + a: [][]float64{ + {1, 0}, + {2, 5}, + }, + b: [][]float64{ + {10, 11, 12}, + {13, 14, 15}, + }, + ans: [][]float64{ + {20, 22, 24}, + {66, 72, 78}, + }, + }, + { + s: blas.Left, + ul: blas.Upper, + tA: blas.Trans, + d: blas.NonUnit, + m: 4, + n: 3, + alpha: 2, + a: [][]float64{ + {1, 2, 3, 4}, + {0, 5, 6, 7}, + {0, 0, 8, 9}, + {0, 0, 0, 10}, + }, + b: [][]float64{ + {10, 11, 12}, + {13, 14, 15}, + {16, 17, 18}, + {19, 20, 21}, + }, + ans: [][]float64{ + {20, 22, 24}, + {170, 184, 198}, + {472, 506, 540}, + {930, 990, 1050}, + }, + }, + { + s: blas.Left, + ul: blas.Upper, + tA: blas.Trans, + d: blas.NonUnit, + m: 2, + n: 3, + alpha: 2, + a: [][]float64{ + {1, 2}, + {0, 5}, + }, + b: [][]float64{ + {10, 11, 12}, + {13, 14, 15}, + }, + ans: [][]float64{ + {20, 22, 24}, + {170, 184, 198}, + }, + }, + { + s: blas.Left, + ul: blas.Upper, + tA: blas.Trans, + d: blas.Unit, + m: 4, + n: 3, + alpha: 2, + a: [][]float64{ + {1, 2, 3, 4}, + {0, 5, 6, 7}, + {0, 0, 8, 9}, + {0, 0, 0, 10}, + }, + b: [][]float64{ + {10, 11, 12}, + {13, 14, 15}, + {16, 17, 18}, + {19, 20, 21}, + }, + ans: [][]float64{ + {20, 22, 24}, + {66, 72, 78}, + {248, 268, 288}, + {588, 630, 672}, + }, + }, + { + s: blas.Left, + ul: blas.Upper, + tA: blas.Trans, + d: blas.Unit, + m: 2, + n: 3, + alpha: 2, + a: [][]float64{ + {1, 2}, + {0, 5}, + }, + b: [][]float64{ + {10, 11, 12}, + {13, 14, 15}, + }, + ans: [][]float64{ + {20, 22, 24}, + {66, 72, 78}, + }, + }, + { + s: blas.Left, + ul: blas.Lower, + tA: blas.Trans, + d: blas.NonUnit, + m: 4, + n: 3, + alpha: 2, + a: [][]float64{ + {1, 0, 0, 0}, + {2, 5, 0, 0}, + {3, 6, 8, 0}, + {4, 7, 9, 10}, + }, + b: [][]float64{ + {10, 11, 12}, + {13, 14, 15}, + {16, 17, 18}, + {19, 20, 21}, + }, + ans: [][]float64{ + {320, 340, 360}, + {588, 624, 660}, + {598, 632, 666}, + {380, 400, 420}, + }, + }, + { + s: blas.Left, + ul: blas.Lower, + tA: blas.Trans, + d: blas.NonUnit, + m: 2, + n: 3, + alpha: 2, + a: [][]float64{ + {1, 0}, + {2, 5}, + }, + b: [][]float64{ + {10, 11, 12}, + {13, 14, 15}, + }, + ans: [][]float64{ + {72, 78, 84}, + {130, 140, 150}, + }, + }, + { + s: blas.Left, + ul: blas.Lower, + tA: blas.Trans, + d: blas.Unit, + m: 4, + n: 3, + alpha: 2, + a: [][]float64{ + {1, 0, 0, 0}, + {2, 5, 0, 0}, + {3, 6, 8, 0}, + {4, 7, 9, 10}, + }, + b: [][]float64{ + {10, 11, 12}, + {13, 14, 15}, + {16, 17, 18}, + {19, 20, 21}, + }, + ans: [][]float64{ + {320, 340, 360}, + {484, 512, 540}, + {374, 394, 414}, + {38, 40, 42}, + }, + }, + { + s: blas.Left, + ul: blas.Lower, + tA: blas.Trans, + d: blas.Unit, + m: 2, + n: 3, + alpha: 2, + a: [][]float64{ + {1, 0}, + {2, 5}, + }, + b: [][]float64{ + {10, 11, 12}, + {13, 14, 15}, + }, + ans: [][]float64{ + {72, 78, 84}, + {26, 28, 30}, + }, + }, + { + s: blas.Right, + ul: blas.Upper, + tA: blas.NoTrans, + d: blas.NonUnit, + m: 4, + n: 3, + alpha: 2, + a: [][]float64{ + {1, 2, 3}, + {0, 4, 5}, + {0, 0, 6}, + }, + b: [][]float64{ + {10, 11, 12}, + {13, 14, 15}, + {16, 17, 18}, + {19, 20, 21}, + }, + ans: [][]float64{ + {20, 128, 314}, + {26, 164, 398}, + {32, 200, 482}, + {38, 236, 566}, + }, + }, + { + s: blas.Right, + ul: blas.Upper, + tA: blas.NoTrans, + d: blas.NonUnit, + m: 2, + n: 3, + alpha: 2, + a: [][]float64{ + {1, 2, 3}, + {0, 4, 5}, + {0, 0, 6}, + }, + b: [][]float64{ + {10, 11, 12}, + {13, 14, 15}, + }, + ans: [][]float64{ + {20, 128, 314}, + {26, 164, 398}, + }, + }, + { + s: blas.Right, + ul: blas.Upper, + tA: blas.NoTrans, + d: blas.Unit, + m: 4, + n: 3, + alpha: 2, + a: [][]float64{ + {1, 2, 3}, + {0, 4, 5}, + {0, 0, 6}, + }, + b: [][]float64{ + {10, 11, 12}, + {13, 14, 15}, + {16, 17, 18}, + {19, 20, 21}, + }, + ans: [][]float64{ + {20, 62, 194}, + {26, 80, 248}, + {32, 98, 302}, + {38, 116, 356}, + }, + }, + { + s: blas.Right, + ul: blas.Upper, + tA: blas.NoTrans, + d: blas.Unit, + m: 2, + n: 3, + alpha: 2, + a: [][]float64{ + {1, 2, 3}, + {0, 4, 5}, + {0, 0, 6}, + }, + b: [][]float64{ + {10, 11, 12}, + {13, 14, 15}, + }, + ans: [][]float64{ + {20, 62, 194}, + {26, 80, 248}, + }, + }, + { + s: blas.Right, + ul: blas.Lower, + tA: blas.NoTrans, + d: blas.NonUnit, + m: 4, + n: 3, + alpha: 2, + a: [][]float64{ + {1, 0, 0}, + {2, 4, 0}, + {3, 5, 6}, + }, + b: [][]float64{ + {10, 11, 12}, + {13, 14, 15}, + {16, 17, 18}, + {19, 20, 21}, + }, + ans: [][]float64{ + {136, 208, 144}, + {172, 262, 180}, + {208, 316, 216}, + {244, 370, 252}, + }, + }, + { + s: blas.Right, + ul: blas.Lower, + tA: blas.NoTrans, + d: blas.NonUnit, + m: 2, + n: 3, + alpha: 2, + a: [][]float64{ + {1, 0, 0}, + {2, 4, 0}, + {3, 5, 6}, + }, + b: [][]float64{ + {10, 11, 12}, + {13, 14, 15}, + }, + ans: [][]float64{ + {136, 208, 144}, + {172, 262, 180}, + }, + }, + { + s: blas.Right, + ul: blas.Lower, + tA: blas.NoTrans, + d: blas.Unit, + m: 4, + n: 3, + alpha: 2, + a: [][]float64{ + {1, 0, 0}, + {2, 4, 0}, + {3, 5, 6}, + }, + b: [][]float64{ + {10, 11, 12}, + {13, 14, 15}, + {16, 17, 18}, + {19, 20, 21}, + }, + ans: [][]float64{ + {136, 142, 24}, + {172, 178, 30}, + {208, 214, 36}, + {244, 250, 42}, + }, + }, + { + s: blas.Right, + ul: blas.Lower, + tA: blas.NoTrans, + d: blas.Unit, + m: 2, + n: 3, + alpha: 2, + a: [][]float64{ + {1, 0, 0}, + {2, 4, 0}, + {3, 5, 6}, + }, + b: [][]float64{ + {10, 11, 12}, + {13, 14, 15}, + }, + ans: [][]float64{ + {136, 142, 24}, + {172, 178, 30}, + }, + }, + + { + s: blas.Right, + ul: blas.Upper, + tA: blas.Trans, + d: blas.NonUnit, + m: 4, + n: 3, + alpha: 2, + a: [][]float64{ + {1, 2, 3}, + {0, 4, 5}, + {0, 0, 6}, + }, + b: [][]float64{ + {10, 11, 12}, + {13, 14, 15}, + {16, 17, 18}, + {19, 20, 21}, + }, + ans: [][]float64{ + {136, 208, 144}, + {172, 262, 180}, + {208, 316, 216}, + {244, 370, 252}, + }, + }, + { + s: blas.Right, + ul: blas.Upper, + tA: blas.Trans, + d: blas.NonUnit, + m: 2, + n: 3, + alpha: 2, + a: [][]float64{ + {1, 2, 3}, + {0, 4, 5}, + {0, 0, 6}, + }, + b: [][]float64{ + {10, 11, 12}, + {13, 14, 15}, + }, + ans: [][]float64{ + {136, 208, 144}, + {172, 262, 180}, + }, + }, + { + s: blas.Right, + ul: blas.Upper, + tA: blas.Trans, + d: blas.Unit, + m: 4, + n: 3, + alpha: 2, + a: [][]float64{ + {1, 2, 3}, + {0, 4, 5}, + {0, 0, 6}, + }, + b: [][]float64{ + {10, 11, 12}, + {13, 14, 15}, + {16, 17, 18}, + {19, 20, 21}, + }, + ans: [][]float64{ + {136, 142, 24}, + {172, 178, 30}, + {208, 214, 36}, + {244, 250, 42}, + }, + }, + { + s: blas.Right, + ul: blas.Upper, + tA: blas.Trans, + d: blas.Unit, + m: 2, + n: 3, + alpha: 2, + a: [][]float64{ + {1, 2, 3}, + {0, 4, 5}, + {0, 0, 6}, + }, + b: [][]float64{ + {10, 11, 12}, + {13, 14, 15}, + }, + ans: [][]float64{ + {136, 142, 24}, + {172, 178, 30}, + }, + }, + + { + s: blas.Right, + ul: blas.Lower, + tA: blas.Trans, + d: blas.NonUnit, + m: 4, + n: 3, + alpha: 2, + a: [][]float64{ + {1, 0, 0}, + {2, 4, 0}, + {3, 5, 6}, + }, + b: [][]float64{ + {10, 11, 12}, + {13, 14, 15}, + {16, 17, 18}, + {19, 20, 21}, + }, + ans: [][]float64{ + {20, 128, 314}, + {26, 164, 398}, + {32, 200, 482}, + {38, 236, 566}, + }, + }, + { + s: blas.Right, + ul: blas.Lower, + tA: blas.Trans, + d: blas.NonUnit, + m: 2, + n: 3, + alpha: 2, + a: [][]float64{ + {1, 0, 0}, + {2, 4, 0}, + {3, 5, 6}, + }, + b: [][]float64{ + {10, 11, 12}, + {13, 14, 15}, + }, + ans: [][]float64{ + {20, 128, 314}, + {26, 164, 398}, + }, + }, + { + s: blas.Right, + ul: blas.Lower, + tA: blas.Trans, + d: blas.Unit, + m: 4, + n: 3, + alpha: 2, + a: [][]float64{ + {1, 0, 0}, + {2, 4, 0}, + {3, 5, 6}, + }, + b: [][]float64{ + {10, 11, 12}, + {13, 14, 15}, + {16, 17, 18}, + {19, 20, 21}, + }, + ans: [][]float64{ + {20, 62, 194}, + {26, 80, 248}, + {32, 98, 302}, + {38, 116, 356}, + }, + }, + { + s: blas.Right, + ul: blas.Lower, + tA: blas.Trans, + d: blas.Unit, + m: 2, + n: 3, + alpha: 2, + a: [][]float64{ + {1, 0, 0}, + {2, 4, 0}, + {3, 5, 6}, + }, + b: [][]float64{ + {10, 11, 12}, + {13, 14, 15}, + }, + ans: [][]float64{ + {20, 62, 194}, + {26, 80, 248}, + }, + }, + } { + aFlat := flatten(test.a) + bFlat := flatten(test.b) + ansFlat := flatten(test.ans) + blasser.Dtrmm(test.s, test.ul, test.tA, test.d, test.m, test.n, test.alpha, aFlat, len(test.a[0]), bFlat, len(test.b[0])) + if !floats.EqualApprox(ansFlat, bFlat, 1e-14) { + t.Errorf("Case %v. Want %v, got %v.", i, ansFlat, bFlat) + } + } +} diff --git a/vendor/gonum.org/v1/gonum/blas/testblas/dtrmv.go b/vendor/gonum.org/v1/gonum/blas/testblas/dtrmv.go new file mode 100644 index 0000000..a9e5aa9 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/blas/testblas/dtrmv.go @@ -0,0 +1,151 @@ +// Copyright ©2014 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package testblas + +import ( + "testing" + + "gonum.org/v1/gonum/blas" + "gonum.org/v1/gonum/floats" +) + +type Dtrmver interface { + Dtrmv(ul blas.Uplo, tA blas.Transpose, d blas.Diag, n int, a []float64, lda int, x []float64, incX int) +} + +func DtrmvTest(t *testing.T, blasser Dtrmver) { + for i, test := range []struct { + n int + a [][]float64 + x []float64 + d blas.Diag + ul blas.Uplo + tA blas.Transpose + ans []float64 + }{ + { + n: 1, + a: [][]float64{{5}}, + x: []float64{2}, + d: blas.NonUnit, + ul: blas.Upper, + tA: blas.NoTrans, + ans: []float64{10}, + }, + { + n: 1, + a: [][]float64{{5}}, + x: []float64{2}, + d: blas.Unit, + ul: blas.Upper, + tA: blas.NoTrans, + ans: []float64{2}, + }, + { + n: 3, + a: [][]float64{ + {5, 6, 7}, + {0, 9, 10}, + {0, 0, 13}, + }, + x: []float64{3, 4, 5}, + d: blas.NonUnit, + ul: blas.Upper, + tA: blas.NoTrans, + ans: []float64{74, 86, 65}, + }, + { + n: 3, + a: [][]float64{ + {5, 6, 7}, + {0, 9, 10}, + {0, 0, 13}, + }, + x: []float64{3, 4, 5}, + d: blas.Unit, + ul: blas.Upper, + tA: blas.NoTrans, + ans: []float64{62, 54, 5}, + }, + { + n: 3, + a: [][]float64{ + {5, 0, 0}, + {6, 9, 0}, + {7, 10, 13}, + }, + x: []float64{3, 4, 5}, + d: blas.NonUnit, + ul: blas.Lower, + tA: blas.NoTrans, + ans: []float64{15, 54, 126}, + }, + { + n: 3, + a: [][]float64{ + {1, 0, 0}, + {6, 1, 0}, + {7, 10, 1}, + }, + x: []float64{3, 4, 5}, + d: blas.Unit, + ul: blas.Lower, + tA: blas.NoTrans, + ans: []float64{3, 22, 66}, + }, + { + n: 3, + a: [][]float64{ + {5, 6, 7}, + {0, 9, 10}, + {0, 0, 13}, + }, + x: []float64{3, 4, 5}, + d: blas.NonUnit, + ul: blas.Upper, + tA: blas.Trans, + ans: []float64{15, 54, 126}, + }, + { + n: 3, + a: [][]float64{ + {1, 6, 7}, + {0, 1, 10}, + {0, 0, 1}, + }, + x: []float64{3, 4, 5}, + d: blas.Unit, + ul: blas.Upper, + tA: blas.Trans, + ans: []float64{3, 22, 66}, + }, + { + n: 3, + a: [][]float64{ + {5, 0, 0}, + {6, 9, 0}, + {7, 10, 13}, + }, + x: []float64{3, 4, 5}, + d: blas.NonUnit, + ul: blas.Lower, + tA: blas.Trans, + ans: []float64{74, 86, 65}, + }, + } { + incTest := func(incX, extra int) { + aFlat := flatten(test.a) + x := makeIncremented(test.x, incX, extra) + blasser.Dtrmv(test.ul, test.tA, test.d, test.n, aFlat, test.n, x, incX) + ans := makeIncremented(test.ans, incX, extra) + if !floats.EqualApprox(x, ans, 1e-14) { + t.Errorf("Case %v, idx %v: Want %v, got %v.", i, incX, ans, x) + } + } + incTest(1, 3) + incTest(-3, 3) + incTest(4, 3) + } +} diff --git a/vendor/gonum.org/v1/gonum/blas/testblas/dtrmvbench.go b/vendor/gonum.org/v1/gonum/blas/testblas/dtrmvbench.go new file mode 100644 index 0000000..f78e306 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/blas/testblas/dtrmvbench.go @@ -0,0 +1,31 @@ +// Copyright ©2015 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package testblas + +import ( + "testing" + + "golang.org/x/exp/rand" + + "gonum.org/v1/gonum/blas" +) + +func DtrmvBenchmark(b *testing.B, dtrmv Dtrmver, n, lda, incX int, ul blas.Uplo, tA blas.Transpose, d blas.Diag) { + rnd := rand.New(rand.NewSource(0)) + a := make([]float64, n*lda) + for i := range a { + a[i] = rnd.Float64() + } + + x := make([]float64, n*incX) + for i := range x { + x[i] = rnd.Float64() + } + + b.ResetTimer() + for i := 0; i < b.N; i++ { + dtrmv.Dtrmv(ul, tA, d, n, a, lda, x, incX) + } +} diff --git a/vendor/gonum.org/v1/gonum/blas/testblas/dtrsm.go b/vendor/gonum.org/v1/gonum/blas/testblas/dtrsm.go new file mode 100644 index 0000000..b404d31 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/blas/testblas/dtrsm.go @@ -0,0 +1,870 @@ +// Copyright ©2014 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package testblas + +import ( + "testing" + + "golang.org/x/exp/rand" + + "gonum.org/v1/gonum/blas" + "gonum.org/v1/gonum/floats" +) + +type Dtrsmer interface { + Dtrsm(s blas.Side, ul blas.Uplo, tA blas.Transpose, d blas.Diag, m, n int, + alpha float64, a []float64, lda int, b []float64, ldb int) +} + +func DtrsmTest(t *testing.T, impl Dtrsmer) { + rnd := rand.New(rand.NewSource(1)) + for i, test := range []struct { + s blas.Side + ul blas.Uplo + tA blas.Transpose + d blas.Diag + m int + n int + alpha float64 + a [][]float64 + b [][]float64 + + want [][]float64 + }{ + { + s: blas.Left, + ul: blas.Upper, + tA: blas.NoTrans, + d: blas.NonUnit, + m: 3, + n: 2, + alpha: 2, + a: [][]float64{ + {1, 2, 3}, + {0, 4, 5}, + {0, 0, 5}, + }, + b: [][]float64{ + {3, 6}, + {4, 7}, + {5, 8}, + }, + want: [][]float64{ + {1, 3.4}, + {-0.5, -0.5}, + {2, 3.2}, + }, + }, + { + s: blas.Left, + ul: blas.Upper, + tA: blas.NoTrans, + d: blas.Unit, + m: 3, + n: 2, + alpha: 2, + a: [][]float64{ + {1, 2, 3}, + {0, 4, 5}, + {0, 0, 5}, + }, + b: [][]float64{ + {3, 6}, + {4, 7}, + {5, 8}, + }, + want: [][]float64{ + {60, 96}, + {-42, -66}, + {10, 16}, + }, + }, + { + s: blas.Left, + ul: blas.Upper, + tA: blas.NoTrans, + d: blas.NonUnit, + m: 3, + n: 4, + alpha: 2, + a: [][]float64{ + {1, 2, 3}, + {0, 4, 5}, + {0, 0, 5}, + }, + b: [][]float64{ + {3, 6, 2, 9}, + {4, 7, 1, 3}, + {5, 8, 9, 10}, + }, + want: [][]float64{ + {1, 3.4, 1.2, 13}, + {-0.5, -0.5, -4, -3.5}, + {2, 3.2, 3.6, 4}, + }, + }, + { + s: blas.Left, + ul: blas.Upper, + tA: blas.NoTrans, + d: blas.Unit, + m: 3, + n: 4, + alpha: 2, + a: [][]float64{ + {1, 2, 3}, + {0, 4, 5}, + {0, 0, 5}, + }, + b: [][]float64{ + {3, 6, 2, 9}, + {4, 7, 1, 3}, + {5, 8, 9, 10}, + }, + want: [][]float64{ + {60, 96, 126, 146}, + {-42, -66, -88, -94}, + {10, 16, 18, 20}, + }, + }, + { + s: blas.Left, + ul: blas.Lower, + tA: blas.NoTrans, + d: blas.NonUnit, + m: 3, + n: 2, + alpha: 3, + a: [][]float64{ + {2, 0, 0}, + {3, 4, 0}, + {5, 6, 7}, + }, + b: [][]float64{ + {3, 6}, + {4, 7}, + {5, 8}, + }, + want: [][]float64{ + {4.5, 9}, + {-0.375, -1.5}, + {-0.75, -12.0 / 7}, + }, + }, + { + s: blas.Left, + ul: blas.Lower, + tA: blas.NoTrans, + d: blas.Unit, + m: 3, + n: 2, + alpha: 3, + a: [][]float64{ + {2, 0, 0}, + {3, 4, 0}, + {5, 6, 7}, + }, + b: [][]float64{ + {3, 6}, + {4, 7}, + {5, 8}, + }, + want: [][]float64{ + {9, 18}, + {-15, -33}, + {60, 132}, + }, + }, + { + s: blas.Left, + ul: blas.Lower, + tA: blas.NoTrans, + d: blas.NonUnit, + m: 3, + n: 4, + alpha: 3, + a: [][]float64{ + {2, 0, 0}, + {3, 4, 0}, + {5, 6, 7}, + }, + b: [][]float64{ + {3, 6, 2, 9}, + {4, 7, 1, 3}, + {5, 8, 9, 10}, + }, + want: [][]float64{ + {4.5, 9, 3, 13.5}, + {-0.375, -1.5, -1.5, -63.0 / 8}, + {-0.75, -12.0 / 7, 3, 39.0 / 28}, + }, + }, + { + s: blas.Left, + ul: blas.Lower, + tA: blas.NoTrans, + d: blas.Unit, + m: 3, + n: 4, + alpha: 3, + a: [][]float64{ + {2, 0, 0}, + {3, 4, 0}, + {5, 6, 7}, + }, + b: [][]float64{ + {3, 6, 2, 9}, + {4, 7, 1, 3}, + {5, 8, 9, 10}, + }, + want: [][]float64{ + {9, 18, 6, 27}, + {-15, -33, -15, -72}, + {60, 132, 87, 327}, + }, + }, + { + s: blas.Left, + ul: blas.Upper, + tA: blas.Trans, + d: blas.NonUnit, + m: 3, + n: 2, + alpha: 3, + a: [][]float64{ + {2, 3, 4}, + {0, 5, 6}, + {0, 0, 7}, + }, + b: [][]float64{ + {3, 6}, + {4, 7}, + {5, 8}, + }, + want: [][]float64{ + {4.5, 9}, + {-0.30, -1.2}, + {-6.0 / 35, -24.0 / 35}, + }, + }, + { + s: blas.Left, + ul: blas.Upper, + tA: blas.Trans, + d: blas.Unit, + m: 3, + n: 2, + alpha: 3, + a: [][]float64{ + {2, 3, 4}, + {0, 5, 6}, + {0, 0, 7}, + }, + b: [][]float64{ + {3, 6}, + {4, 7}, + {5, 8}, + }, + want: [][]float64{ + {9, 18}, + {-15, -33}, + {69, 150}, + }, + }, + { + s: blas.Left, + ul: blas.Upper, + tA: blas.Trans, + d: blas.NonUnit, + m: 3, + n: 4, + alpha: 3, + a: [][]float64{ + {2, 3, 4}, + {0, 5, 6}, + {0, 0, 7}, + }, + b: [][]float64{ + {3, 6, 6, 7}, + {4, 7, 8, 9}, + {5, 8, 10, 11}, + }, + want: [][]float64{ + {4.5, 9, 9, 10.5}, + {-0.3, -1.2, -0.6, -0.9}, + {-6.0 / 35, -24.0 / 35, -12.0 / 35, -18.0 / 35}, + }, + }, + { + s: blas.Left, + ul: blas.Upper, + tA: blas.Trans, + d: blas.Unit, + m: 3, + n: 4, + alpha: 3, + a: [][]float64{ + {2, 3, 4}, + {0, 5, 6}, + {0, 0, 7}, + }, + b: [][]float64{ + {3, 6, 6, 7}, + {4, 7, 8, 9}, + {5, 8, 10, 11}, + }, + want: [][]float64{ + {9, 18, 18, 21}, + {-15, -33, -30, -36}, + {69, 150, 138, 165}, + }, + }, + { + s: blas.Left, + ul: blas.Lower, + tA: blas.Trans, + d: blas.NonUnit, + m: 3, + n: 2, + alpha: 3, + a: [][]float64{ + {2, 0, 0}, + {3, 4, 0}, + {5, 6, 8}, + }, + b: [][]float64{ + {3, 6}, + {4, 7}, + {5, 8}, + }, + want: [][]float64{ + {-0.46875, 0.375}, + {0.1875, 0.75}, + {1.875, 3}, + }, + }, + { + s: blas.Left, + ul: blas.Lower, + tA: blas.Trans, + d: blas.Unit, + m: 3, + n: 2, + alpha: 3, + a: [][]float64{ + {2, 0, 0}, + {3, 4, 0}, + {5, 6, 8}, + }, + b: [][]float64{ + {3, 6}, + {4, 7}, + {5, 8}, + }, + want: [][]float64{ + {168, 267}, + {-78, -123}, + {15, 24}, + }, + }, + { + s: blas.Left, + ul: blas.Lower, + tA: blas.Trans, + d: blas.NonUnit, + m: 3, + n: 4, + alpha: 3, + a: [][]float64{ + {2, 0, 0}, + {3, 4, 0}, + {5, 6, 8}, + }, + b: [][]float64{ + {3, 6, 2, 3}, + {4, 7, 4, 5}, + {5, 8, 6, 7}, + }, + want: [][]float64{ + {-0.46875, 0.375, -2.0625, -1.78125}, + {0.1875, 0.75, -0.375, -0.1875}, + {1.875, 3, 2.25, 2.625}, + }, + }, + { + s: blas.Left, + ul: blas.Lower, + tA: blas.Trans, + d: blas.Unit, + m: 3, + n: 4, + alpha: 3, + a: [][]float64{ + {2, 0, 0}, + {3, 4, 0}, + {5, 6, 8}, + }, + b: [][]float64{ + {3, 6, 2, 3}, + {4, 7, 4, 5}, + {5, 8, 6, 7}, + }, + want: [][]float64{ + {168, 267, 204, 237}, + {-78, -123, -96, -111}, + {15, 24, 18, 21}, + }, + }, + { + s: blas.Right, + ul: blas.Upper, + tA: blas.NoTrans, + d: blas.NonUnit, + m: 4, + n: 3, + alpha: 3, + a: [][]float64{ + {2, 3, 4}, + {0, 5, 6}, + {0, 0, 7}, + }, + b: [][]float64{ + {10, 11, 12}, + {13, 14, 15}, + {16, 17, 18}, + {19, 20, 21}, + }, + want: [][]float64{ + {15, -2.4, -48.0 / 35}, + {19.5, -3.3, -66.0 / 35}, + {24, -4.2, -2.4}, + {28.5, -5.1, -102.0 / 35}, + }, + }, + { + s: blas.Right, + ul: blas.Upper, + tA: blas.NoTrans, + d: blas.Unit, + m: 4, + n: 3, + alpha: 3, + a: [][]float64{ + {2, 3, 4}, + {0, 5, 6}, + {0, 0, 8}, + }, + b: [][]float64{ + {10, 11, 12}, + {13, 14, 15}, + {16, 17, 18}, + {19, 20, 21}, + }, + want: [][]float64{ + {30, -57, 258}, + {39, -75, 339}, + {48, -93, 420}, + {57, -111, 501}, + }, + }, + { + s: blas.Right, + ul: blas.Upper, + tA: blas.NoTrans, + d: blas.NonUnit, + m: 2, + n: 3, + alpha: 3, + a: [][]float64{ + {2, 3, 4}, + {0, 5, 6}, + {0, 0, 7}, + }, + b: [][]float64{ + {10, 11, 12}, + {13, 14, 15}, + }, + want: [][]float64{ + {15, -2.4, -48.0 / 35}, + {19.5, -3.3, -66.0 / 35}, + }, + }, + { + s: blas.Right, + ul: blas.Upper, + tA: blas.NoTrans, + d: blas.Unit, + m: 2, + n: 3, + alpha: 3, + a: [][]float64{ + {2, 3, 4}, + {0, 5, 6}, + {0, 0, 8}, + }, + b: [][]float64{ + {10, 11, 12}, + {13, 14, 15}, + }, + want: [][]float64{ + {30, -57, 258}, + {39, -75, 339}, + }, + }, + { + s: blas.Right, + ul: blas.Lower, + tA: blas.NoTrans, + d: blas.NonUnit, + m: 4, + n: 3, + alpha: 3, + a: [][]float64{ + {2, 0, 0}, + {3, 5, 0}, + {4, 6, 8}, + }, + b: [][]float64{ + {10, 11, 12}, + {13, 14, 15}, + {16, 17, 18}, + {19, 20, 21}, + }, + want: [][]float64{ + {4.2, 1.2, 4.5}, + {5.775, 1.65, 5.625}, + {7.35, 2.1, 6.75}, + {8.925, 2.55, 7.875}, + }, + }, + { + s: blas.Right, + ul: blas.Lower, + tA: blas.NoTrans, + d: blas.Unit, + m: 4, + n: 3, + alpha: 3, + a: [][]float64{ + {2, 0, 0}, + {3, 5, 0}, + {4, 6, 8}, + }, + b: [][]float64{ + {10, 11, 12}, + {13, 14, 15}, + {16, 17, 18}, + {19, 20, 21}, + }, + want: [][]float64{ + {435, -183, 36}, + {543, -228, 45}, + {651, -273, 54}, + {759, -318, 63}, + }, + }, + { + s: blas.Right, + ul: blas.Lower, + tA: blas.NoTrans, + d: blas.NonUnit, + m: 2, + n: 3, + alpha: 3, + a: [][]float64{ + {2, 0, 0}, + {3, 5, 0}, + {4, 6, 8}, + }, + b: [][]float64{ + {10, 11, 12}, + {13, 14, 15}, + }, + want: [][]float64{ + {4.2, 1.2, 4.5}, + {5.775, 1.65, 5.625}, + }, + }, + { + s: blas.Right, + ul: blas.Lower, + tA: blas.NoTrans, + d: blas.Unit, + m: 2, + n: 3, + alpha: 3, + a: [][]float64{ + {2, 0, 0}, + {3, 5, 0}, + {4, 6, 8}, + }, + b: [][]float64{ + {10, 11, 12}, + {13, 14, 15}, + }, + want: [][]float64{ + {435, -183, 36}, + {543, -228, 45}, + }, + }, + { + s: blas.Right, + ul: blas.Upper, + tA: blas.Trans, + d: blas.NonUnit, + m: 4, + n: 3, + alpha: 3, + a: [][]float64{ + {2, 3, 4}, + {0, 5, 6}, + {0, 0, 8}, + }, + b: [][]float64{ + {10, 11, 12}, + {13, 14, 15}, + {16, 17, 18}, + {19, 20, 21}, + }, + want: [][]float64{ + {4.2, 1.2, 4.5}, + {5.775, 1.65, 5.625}, + {7.35, 2.1, 6.75}, + {8.925, 2.55, 7.875}, + }, + }, + { + s: blas.Right, + ul: blas.Upper, + tA: blas.Trans, + d: blas.Unit, + m: 4, + n: 3, + alpha: 3, + a: [][]float64{ + {2, 3, 4}, + {0, 5, 6}, + {0, 0, 8}, + }, + b: [][]float64{ + {10, 11, 12}, + {13, 14, 15}, + {16, 17, 18}, + {19, 20, 21}, + }, + want: [][]float64{ + {435, -183, 36}, + {543, -228, 45}, + {651, -273, 54}, + {759, -318, 63}, + }, + }, + { + s: blas.Right, + ul: blas.Upper, + tA: blas.Trans, + d: blas.NonUnit, + m: 2, + n: 3, + alpha: 3, + a: [][]float64{ + {2, 3, 4}, + {0, 5, 6}, + {0, 0, 8}, + }, + b: [][]float64{ + {10, 11, 12}, + {13, 14, 15}, + }, + want: [][]float64{ + {4.2, 1.2, 4.5}, + {5.775, 1.65, 5.625}, + }, + }, + { + s: blas.Right, + ul: blas.Upper, + tA: blas.Trans, + d: blas.Unit, + m: 2, + n: 3, + alpha: 3, + a: [][]float64{ + {2, 3, 4}, + {0, 5, 6}, + {0, 0, 8}, + }, + b: [][]float64{ + {10, 11, 12}, + {13, 14, 15}, + }, + want: [][]float64{ + {435, -183, 36}, + {543, -228, 45}, + }, + }, + { + s: blas.Right, + ul: blas.Lower, + tA: blas.Trans, + d: blas.NonUnit, + m: 4, + n: 3, + alpha: 3, + a: [][]float64{ + {2, 0, 0}, + {3, 5, 0}, + {4, 6, 8}, + }, + b: [][]float64{ + {10, 11, 12}, + {13, 14, 15}, + {16, 17, 18}, + {19, 20, 21}, + }, + want: [][]float64{ + {15, -2.4, -1.2}, + {19.5, -3.3, -1.65}, + {24, -4.2, -2.1}, + {28.5, -5.1, -2.55}, + }, + }, + { + s: blas.Right, + ul: blas.Lower, + tA: blas.Trans, + d: blas.Unit, + m: 4, + n: 3, + alpha: 3, + a: [][]float64{ + {2, 0, 0}, + {3, 5, 0}, + {4, 6, 8}, + }, + b: [][]float64{ + {10, 11, 12}, + {13, 14, 15}, + {16, 17, 18}, + {19, 20, 21}, + }, + want: [][]float64{ + {30, -57, 258}, + {39, -75, 339}, + {48, -93, 420}, + {57, -111, 501}, + }, + }, + { + s: blas.Right, + ul: blas.Lower, + tA: blas.Trans, + d: blas.NonUnit, + m: 2, + n: 3, + alpha: 3, + a: [][]float64{ + {2, 0, 0}, + {3, 5, 0}, + {4, 6, 8}, + }, + b: [][]float64{ + {10, 11, 12}, + {13, 14, 15}, + }, + want: [][]float64{ + {15, -2.4, -1.2}, + {19.5, -3.3, -1.65}, + }, + }, + { + s: blas.Right, + ul: blas.Lower, + tA: blas.Trans, + d: blas.Unit, + m: 2, + n: 3, + alpha: 3, + a: [][]float64{ + {2, 0, 0}, + {3, 5, 0}, + {4, 6, 8}, + }, + b: [][]float64{ + {10, 11, 12}, + {13, 14, 15}, + }, + want: [][]float64{ + {30, -57, 258}, + {39, -75, 339}, + }, + }, + { + s: blas.Right, + ul: blas.Lower, + tA: blas.Trans, + d: blas.Unit, + m: 2, + n: 3, + alpha: 0, + a: [][]float64{ + {2, 0, 0}, + {3, 5, 0}, + {4, 6, 8}, + }, + b: [][]float64{ + {10, 11, 12}, + {13, 14, 15}, + }, + want: [][]float64{ + {0, 0, 0}, + {0, 0, 0}, + }, + }, + } { + m := test.m + n := test.n + na := m + if test.s == blas.Right { + na = n + } + for _, lda := range []int{na, na + 3} { + for _, ldb := range []int{n, n + 5} { + a := make([]float64, na*lda) + for i := range a { + a[i] = rnd.NormFloat64() + } + for i := 0; i < na; i++ { + for j := 0; j < na; j++ { + a[i*lda+j] = test.a[i][j] + } + } + + b := make([]float64, m*ldb) + for i := range b { + b[i] = rnd.NormFloat64() + } + for i := 0; i < m; i++ { + for j := 0; j < n; j++ { + b[i*ldb+j] = test.b[i][j] + } + } + + impl.Dtrsm(test.s, test.ul, test.tA, test.d, test.m, test.n, test.alpha, a, lda, b, ldb) + + want := make([]float64, len(b)) + copy(want, b) + for i := 0; i < m; i++ { + for j := 0; j < n; j++ { + want[i*ldb+j] = test.want[i][j] + } + } + if !floats.EqualApprox(want, b, 1e-13) { + t.Errorf("Case %v: Want %v, got %v.", i, want, b) + } + } + } + } +} diff --git a/vendor/gonum.org/v1/gonum/blas/testblas/dtrsv.go b/vendor/gonum.org/v1/gonum/blas/testblas/dtrsv.go new file mode 100644 index 0000000..a0e7c34 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/blas/testblas/dtrsv.go @@ -0,0 +1,148 @@ +// Copyright ©2014 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package testblas + +import ( + "testing" + + "gonum.org/v1/gonum/blas" + "gonum.org/v1/gonum/floats" +) + +type Dtrsver interface { + Dtrsv(ul blas.Uplo, tA blas.Transpose, d blas.Diag, n int, a []float64, lda int, x []float64, incX int) +} + +func DtrsvTest(t *testing.T, blasser Dtrsver) { + for i, test := range []struct { + n int + a [][]float64 + ul blas.Uplo + tA blas.Transpose + d blas.Diag + x []float64 + ans []float64 + }{ + { + n: 3, + a: [][]float64{ + {1, 2, 3}, + {0, 8, 15}, + {0, 0, 8}, + }, + ul: blas.Upper, + tA: blas.NoTrans, + d: blas.NonUnit, + x: []float64{5, 6, 7}, + ans: []float64{4.15625, -0.890625, 0.875}, + }, + { + n: 3, + a: [][]float64{ + {1, 2, 3}, + {0, 1, 15}, + {0, 0, 1}, + }, + ul: blas.Upper, + tA: blas.NoTrans, + d: blas.Unit, + x: []float64{5, 6, 7}, + ans: []float64{182, -99, 7}, + }, + { + n: 3, + a: [][]float64{ + {1, 0, 0}, + {2, 8, 0}, + {3, 15, 8}, + }, + ul: blas.Lower, + tA: blas.NoTrans, + d: blas.NonUnit, + x: []float64{5, 6, 7}, + ans: []float64{5, -0.5, -0.0625}, + }, + { + n: 3, + a: [][]float64{ + {1, 0, 0}, + {2, 8, 0}, + {3, 15, 8}, + }, + ul: blas.Lower, + tA: blas.NoTrans, + d: blas.Unit, + x: []float64{5, 6, 7}, + ans: []float64{5, -4, 52}, + }, + { + n: 3, + a: [][]float64{ + {1, 2, 3}, + {0, 8, 15}, + {0, 0, 8}, + }, + ul: blas.Upper, + tA: blas.Trans, + d: blas.NonUnit, + x: []float64{5, 6, 7}, + ans: []float64{5, -0.5, -0.0625}, + }, + { + n: 3, + a: [][]float64{ + {1, 2, 3}, + {0, 8, 15}, + {0, 0, 8}, + }, + ul: blas.Upper, + tA: blas.Trans, + d: blas.Unit, + x: []float64{5, 6, 7}, + ans: []float64{5, -4, 52}, + }, + { + n: 3, + a: [][]float64{ + {1, 0, 0}, + {2, 8, 0}, + {3, 15, 8}, + }, + ul: blas.Lower, + tA: blas.Trans, + d: blas.NonUnit, + x: []float64{5, 6, 7}, + ans: []float64{4.15625, -0.890625, 0.875}, + }, + { + n: 3, + a: [][]float64{ + {1, 0, 0}, + {2, 1, 0}, + {3, 15, 1}, + }, + ul: blas.Lower, + tA: blas.Trans, + d: blas.Unit, + x: []float64{5, 6, 7}, + ans: []float64{182, -99, 7}, + }, + } { + incTest := func(incX, extra int) { + aFlat := flatten(test.a) + x := makeIncremented(test.x, incX, extra) + blasser.Dtrsv(test.ul, test.tA, test.d, test.n, aFlat, test.n, x, incX) + ans := makeIncremented(test.ans, incX, extra) + if !floats.EqualApprox(x, ans, 1e-14) { + t.Errorf("Case %v, incX = %v: Want %v, got %v.", i, incX, ans, x) + } + } + incTest(1, 0) + incTest(-2, 0) + incTest(3, 0) + incTest(-3, 8) + incTest(4, 2) + } +} diff --git a/vendor/gonum.org/v1/gonum/blas/testblas/dtxmv.go b/vendor/gonum.org/v1/gonum/blas/testblas/dtxmv.go new file mode 100644 index 0000000..2cf68ac --- /dev/null +++ b/vendor/gonum.org/v1/gonum/blas/testblas/dtxmv.go @@ -0,0 +1,149 @@ +// Copyright ©2014 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package testblas + +import ( + "testing" + + "gonum.org/v1/gonum/blas" +) + +type Dtxmver interface { + Dtrmv(ul blas.Uplo, tA blas.Transpose, d blas.Diag, n int, a []float64, lda int, x []float64, incX int) + Dtbmv(ul blas.Uplo, tA blas.Transpose, d blas.Diag, n, k int, a []float64, lda int, x []float64, incX int) + Dtpmv(ul blas.Uplo, tA blas.Transpose, d blas.Diag, n int, a []float64, x []float64, incX int) +} + +type vec struct { + data []float64 + inc int +} + +var cases = []struct { + n, k int + ul blas.Uplo + d blas.Diag + ldab int + tr, tb, tp []float64 + ins []vec + solNoTrans []float64 + solTrans []float64 +}{ + { + n: 3, + k: 1, + ul: blas.Upper, + d: blas.NonUnit, + tr: []float64{1, 2, 0, 0, 3, 4, 0, 0, 5}, + tb: []float64{1, 2, 3, 4, 5, 0}, + ldab: 2, + tp: []float64{1, 2, 0, 3, 4, 5}, + ins: []vec{ + {[]float64{2, 3, 4}, 1}, + {[]float64{2, 1, 3, 1, 4}, 2}, + {[]float64{4, 1, 3, 1, 2}, -2}, + }, + solNoTrans: []float64{8, 25, 20}, + solTrans: []float64{2, 13, 32}, + }, + { + n: 3, + k: 1, + ul: blas.Upper, + d: blas.Unit, + tr: []float64{1, 2, 0, 0, 3, 4, 0, 0, 5}, + tb: []float64{1, 2, 3, 4, 5, 0}, + ldab: 2, + tp: []float64{1, 2, 0, 3, 4, 5}, + ins: []vec{ + {[]float64{2, 3, 4}, 1}, + {[]float64{2, 1, 3, 1, 4}, 2}, + {[]float64{4, 1, 3, 1, 2}, -2}, + }, + solNoTrans: []float64{8, 19, 4}, + solTrans: []float64{2, 7, 16}, + }, + { + n: 3, + k: 1, + ul: blas.Lower, + d: blas.NonUnit, + tr: []float64{1, 0, 0, 2, 3, 0, 0, 4, 5}, + tb: []float64{0, 1, 2, 3, 4, 5}, + ldab: 2, + tp: []float64{1, 2, 3, 0, 4, 5}, + ins: []vec{ + {[]float64{2, 3, 4}, 1}, + {[]float64{2, 1, 3, 1, 4}, 2}, + {[]float64{4, 1, 3, 1, 2}, -2}, + }, + solNoTrans: []float64{2, 13, 32}, + solTrans: []float64{8, 25, 20}, + }, + { + n: 3, + k: 1, + ul: blas.Lower, + d: blas.Unit, + tr: []float64{1, 0, 0, 2, 3, 0, 0, 4, 5}, + tb: []float64{0, 1, 2, 3, 4, 5}, + ldab: 2, + tp: []float64{1, 2, 3, 0, 4, 5}, + ins: []vec{ + {[]float64{2, 3, 4}, 1}, + {[]float64{2, 1, 3, 1, 4}, 2}, + {[]float64{4, 1, 3, 1, 2}, -2}, + }, + solNoTrans: []float64{2, 7, 16}, + solTrans: []float64{8, 19, 4}, + }, +} + +func DtxmvTest(t *testing.T, blasser Dtxmver) { + + for nc, c := range cases { + for nx, x := range c.ins { + in := make([]float64, len(x.data)) + copy(in, x.data) + blasser.Dtrmv(c.ul, blas.NoTrans, c.d, c.n, c.tr, c.n, in, x.inc) + if !dStridedSliceTolEqual(c.n, in, x.inc, c.solNoTrans, 1) { + t.Error("Wrong Dtrmv result for: NoTrans in Case:", nc, "input:", nx) + } + + in = make([]float64, len(x.data)) + copy(in, x.data) + blasser.Dtrmv(c.ul, blas.Trans, c.d, c.n, c.tr, c.n, in, x.inc) + if !dStridedSliceTolEqual(c.n, in, x.inc, c.solTrans, 1) { + t.Error("Wrong Dtrmv result for: Trans in Case:", nc, "input:", nx) + } + in = make([]float64, len(x.data)) + copy(in, x.data) + blasser.Dtbmv(c.ul, blas.NoTrans, c.d, c.n, c.k, c.tb, c.ldab, in, x.inc) + if !dStridedSliceTolEqual(c.n, in, x.inc, c.solNoTrans, 1) { + t.Error("Wrong Dtbmv result for: NoTrans in Case:", nc, "input:", nx) + } + + in = make([]float64, len(x.data)) + copy(in, x.data) + blasser.Dtbmv(c.ul, blas.Trans, c.d, c.n, c.k, c.tb, c.ldab, in, x.inc) + if !dStridedSliceTolEqual(c.n, in, x.inc, c.solTrans, 1) { + t.Error("Wrong Dtbmv result for: Trans in Case:", nc, "input:", nx) + } + in = make([]float64, len(x.data)) + copy(in, x.data) + blasser.Dtpmv(c.ul, blas.NoTrans, c.d, c.n, c.tp, in, x.inc) + if !dStridedSliceTolEqual(c.n, in, x.inc, c.solNoTrans, 1) { + t.Error("Wrong Dtpmv result for: NoTrans in Case:", nc, "input:", nx) + } + + in = make([]float64, len(x.data)) + copy(in, x.data) + blasser.Dtpmv(c.ul, blas.Trans, c.d, c.n, c.tp, in, x.inc) + if !dStridedSliceTolEqual(c.n, in, x.inc, c.solTrans, 1) { + t.Error("Wrong Dtpmv result for: Trans in Case:", nc, "input:", nx) + } + } + } +} diff --git a/vendor/gonum.org/v1/gonum/blas/testblas/dzasum.go b/vendor/gonum.org/v1/gonum/blas/testblas/dzasum.go new file mode 100644 index 0000000..fd2adb1 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/blas/testblas/dzasum.go @@ -0,0 +1,58 @@ +// Copyright ©2017 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package testblas + +import ( + "testing" + + "golang.org/x/exp/rand" + + "gonum.org/v1/gonum/floats" +) + +type Dzasumer interface { + Dzasum(n int, x []complex128, incX int) float64 +} + +func DzasumTest(t *testing.T, impl Dzasumer) { + const tol = 1e-14 + rnd := rand.New(rand.NewSource(1)) + for _, n := range []int{0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 50, 100} { + for _, incX := range []int{-5, 1, 2, 10} { + aincX := abs(incX) + var x []complex128 + if n > 0 { + x = make([]complex128, (n-1)*aincX+1) + } + for i := range x { + x[i] = znan + } + for i := 0; i < n; i++ { + re := float64(2*i + 1) + if rnd.Intn(2) == 0 { + re *= -1 + } + im := float64(2 * (i + 1)) + if rnd.Intn(2) == 0 { + im *= -1 + } + x[i*aincX] = complex(re, im) + } + + want := float64(n * (2*n + 1)) + got := impl.Dzasum(n, x, incX) + + if incX < 0 { + if got != 0 { + t.Errorf("Case n=%v,incX=%v: non-zero result when incX < 0. got %v", n, incX, got) + } + continue + } + if !floats.EqualWithinAbsOrRel(got, want, tol, tol) { + t.Errorf("Case n=%v,incX=%v: unexpected result. want %v, got %v", n, incX, want, got) + } + } + } +} diff --git a/vendor/gonum.org/v1/gonum/blas/testblas/dznrm2.go b/vendor/gonum.org/v1/gonum/blas/testblas/dznrm2.go new file mode 100644 index 0000000..8b0dcab --- /dev/null +++ b/vendor/gonum.org/v1/gonum/blas/testblas/dznrm2.go @@ -0,0 +1,137 @@ +// Copyright ©2017 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package testblas + +import ( + "fmt" + "math" + "testing" + + "golang.org/x/exp/rand" + + "gonum.org/v1/gonum/floats" +) + +type Dznrm2er interface { + Dznrm2(n int, x []complex128, incX int) float64 + Dnrm2er +} + +func Dznrm2Test(t *testing.T, impl Dznrm2er) { + tol := 1e-12 + for tc, test := range []struct { + x []complex128 + want float64 + }{ + { + x: nil, + want: 0, + }, + { + x: []complex128{1 + 2i}, + want: 2.2360679774998, + }, + { + x: []complex128{1 + 2i, 3 + 4i}, + want: 5.4772255750517, + }, + { + x: []complex128{1 + 2i, 3 + 4i, 5 + 6i}, + want: 9.5393920141695, + }, + { + x: []complex128{1 + 2i, 3 + 4i, 5 + 6i, 7 + 8i}, + want: 1.4282856857086e+01, + }, + { + x: []complex128{1 + 2i, 3 + 4i, 5 + 6i, 7 + 8i, 9 + 10i}, + want: 1.9621416870349e+01, + }, + { + x: []complex128{1 + 2i, 3 + 4i, 5 + 6i, 7 + 8i, 9 + 10i, 11 + 12i}, + want: 2.5495097567964e+01, + }, + { + x: []complex128{1 + 2i, 3 + 4i, 5 + 6i, 7 + 8i, 9 + 10i, 11 + 12i, 13 + 14i}, + want: 3.1859064644148e+01, + }, + { + x: []complex128{1 + 2i, 3 + 4i, 5 + 6i, 7 + 8i, 9 + 10i, 11 + 12i, 13 + 14i, 15 + 16i}, + want: 3.8678159211627e+01, + }, + { + x: []complex128{1 + 2i, 3 + 4i, 5 + 6i, 7 + 8i, 9 + 10i, 11 + 12i, 13 + 14i, 15 + 16i, 17 + 18i}, + want: 4.5923850012820e+01, + }, + { + x: []complex128{1 + 2i, 3 + 4i, 5 + 6i, 7 + 8i, 9 + 10i, 11 + 12i, 13 + 14i, 15 + 16i, 17 + 18i, 19 + 20i}, + want: 5.3572380943915e+01, + }, + { + x: []complex128{1 + 2i, 3 + 4i, 5 + 6i, 7 + 8i, 9 + 10i, 11 + 12i, 13 + 14i, 15 + 16i, 17 + 18i, 19 + 20i, 21 + 22i}, + want: 6.1603571325046e+01, + }, + { + x: []complex128{1 + 2i, 3 + 4i, 5 + 6i, 7 + 8i, 9 + 10i, 11 + 12i, 13 + 14i, 15 + 16i, 17 + 18i, 19 + 20i, 21 + 22i, 23 + 24i}, + want: 70, + }, + } { + n := len(test.x) + for _, incX := range []int{-10, -1, 1, 2, 9, 17} { + x := makeZVector(test.x, incX) + xCopy := make([]complex128, len(x)) + copy(xCopy, x) + + got := impl.Dznrm2(n, x, incX) + + prefix := fmt.Sprintf("Case %v (n=%v,incX=%v):", tc, n, incX) + + if !zsame(x, xCopy) { + t.Errorf("%v: unexpected modification of x", prefix) + } + + if incX < 0 { + if got != 0 { + t.Errorf("%v: non-zero result when incX < 0. got %v", prefix, got) + } + continue + } + + if !floats.EqualWithinAbsOrRel(test.want, got, tol, tol) { + t.Errorf("%v: unexpected result. want %v, got %v", prefix, test.want, got) + } + } + } + + tol = 1e-14 + rnd := rand.New(rand.NewSource(1)) + for _, n := range []int{10, 50, 100} { + for _, incX := range []int{1, 2, 10} { + re := make([]float64, n) + for i := range re { + re[i] = rnd.NormFloat64() + } + im := make([]float64, n) + for i := range im { + im[i] = rnd.NormFloat64() + } + want := math.Hypot(impl.Dnrm2(n, re, 1), impl.Dnrm2(n, im, 1)) + + x := make([]complex128, (n-1)*incX+1) + for i := range x { + x[i] = znan + } + for i := range re { + x[i*incX] = complex(re[i], im[i]) + } + + got := impl.Dznrm2(n, x, incX) + + if !floats.EqualWithinAbsOrRel(want, got, tol, tol) { + t.Errorf("Case n=%v,incX=%v: unexpected result using Dnrm2. want %v, got %v", n, incX, want, got) + } + } + } +} diff --git a/vendor/gonum.org/v1/gonum/blas/testblas/izamax.go b/vendor/gonum.org/v1/gonum/blas/testblas/izamax.go new file mode 100644 index 0000000..85d8d94 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/blas/testblas/izamax.go @@ -0,0 +1,47 @@ +// Copyright ©2017 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package testblas + +import ( + "testing" + + "golang.org/x/exp/rand" +) + +type Izamaxer interface { + Izamax(n int, x []complex128, incX int) int +} + +func IzamaxTest(t *testing.T, impl Izamaxer) { + rnd := rand.New(rand.NewSource(1)) + for _, n := range []int{0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 50, 100} { + for _, incX := range []int{-5, 1, 2, 10} { + aincX := abs(incX) + var x []complex128 + if n > 0 { + x = make([]complex128, (n-1)*aincX+1) + } + for i := range x { + x[i] = znan + } + for i := 0; i < n; i++ { + re := 2*rnd.Float64() - 1 + im := 2*rnd.Float64() - 1 + x[i*aincX] = complex(re, im) + } + + want := -1 + if incX > 0 && n > 0 { + want = rnd.Intn(n) + x[want*incX] = 10 + 10i + } + got := impl.Izamax(n, x, incX) + + if got != want { + t.Errorf("Case n=%v,incX=%v: unexpected result. want %v, got %v", n, incX, want, got) + } + } + } +} diff --git a/vendor/gonum.org/v1/gonum/blas/testblas/level1double.go b/vendor/gonum.org/v1/gonum/blas/testblas/level1double.go new file mode 100644 index 0000000..d36a60c --- /dev/null +++ b/vendor/gonum.org/v1/gonum/blas/testblas/level1double.go @@ -0,0 +1,2194 @@ +// Copyright ©2015 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package testblas + +import ( + "fmt" + "math" + "testing" + + "gonum.org/v1/gonum/blas" + "gonum.org/v1/gonum/floats" +) + +type DoubleOneVectorCase struct { + Name string + X []float64 + Incx int + N int + Panic bool + Dasum float64 + Dnrm2 float64 + Idamax int + DscalCases []DScalCase +} + +type DScalCase struct { + Alpha float64 + Ans []float64 + Name string +} + +var DoubleOneVectorCases = []DoubleOneVectorCase{ + { + Name: "AllPositive", + X: []float64{6, 5, 4, 2, 6}, + Incx: 1, + N: 5, + Panic: false, + Dasum: 23, + Dnrm2: 10.81665382639196787935766380241148783875388972153573863813135, + Idamax: 0, + DscalCases: []DScalCase{ + { + Alpha: 0, + Ans: []float64{0, 0, 0, 0, 0}, + }, + { + Alpha: 1, + Ans: []float64{6, 5, 4, 2, 6}, + }, + { + Alpha: -2, + Ans: []float64{-12, -10, -8, -4, -12}, + }, + }, + }, + { + Name: "LeadingZero", + X: []float64{0, 1}, + Incx: 1, + N: 2, + Panic: false, + Dasum: 1, + Dnrm2: 1, + Idamax: 1, + DscalCases: []DScalCase{ + { + Alpha: 0, + Ans: []float64{0, 0}, + }, + { + Alpha: 1, + Ans: []float64{0, 1}, + }, + { + Alpha: -2, + Ans: []float64{0, -2}, + }, + }, + }, + { + Name: "MaxInMiddle", + X: []float64{6, 5, 9, 0, 6}, + Incx: 1, + N: 5, + Panic: false, + Dasum: 26, + Dnrm2: 13.34166406412633371248943627250846646911846482744007727141318, + Idamax: 2, + DscalCases: []DScalCase{ + { + Alpha: -2, + Ans: []float64{-12, -10, -18, 0, -12}, + }, + }, + }, + { + Name: "MaxAtEnd", + X: []float64{6, 5, -9, 0, 10}, + Incx: 1, + N: 5, + Panic: false, + Dasum: 30, + Dnrm2: 15.55634918610404553681857596630667886426639062914642880494347, + Idamax: 4, + DscalCases: []DScalCase{ + { + Alpha: -2, + Ans: []float64{-12, -10, 18, 0, -20}, + }, + }, + }, + { + Name: "AllNegative", + X: []float64{-6, -5, -4, -2, -6}, + Incx: 1, + N: 5, + Panic: false, + Dasum: 23, + Dnrm2: 10.81665382639196787935766380241148783875388972153573863813135, + Idamax: 0, + DscalCases: []DScalCase{ + { + Alpha: -2, + Ans: []float64{12, 10, 8, 4, 12}, + }, + }, + }, + { + Name: "AllMixed", + X: []float64{-6, 5, 4, -2, -6}, + Incx: 1, + N: 5, + Panic: false, + Dasum: 23, + Dnrm2: 10.81665382639196787935766380241148783875388972153573863813135, + Idamax: 0, + DscalCases: []DScalCase{ + { + Alpha: -2, + Ans: []float64{12, -10, -8, 4, 12}, + }, + }, + }, + { + Name: "ZeroN", + X: []float64{-6, 5, 4, -2, -6}, + Incx: 1, + N: 0, + Panic: false, + Dasum: 0, + Dnrm2: 0, + Idamax: -1, + DscalCases: []DScalCase{ + { + Alpha: -2, + Ans: []float64{-6, 5, 4, -2, -6}, + }, + }, + }, + { + Name: "OneN", + X: []float64{-6, 5, 4, -2, -6}, + Incx: 1, + N: 1, + Panic: false, + Dasum: 6, + Dnrm2: 6, + Idamax: 0, + DscalCases: []DScalCase{ + { + Alpha: -2, + Ans: []float64{12, 5, 4, -2, -6}, + }, + }, + }, + { + Name: "PositiveExactInc", + X: []float64{-6, 5, 10, -2, -5}, + Incx: 2, + N: 3, + Panic: false, + Dasum: 21, + Dnrm2: 12.68857754044952038019377274608948979173952662752515253090272, + Idamax: 1, + DscalCases: []DScalCase{ + { + Alpha: -2, + Ans: []float64{12, 5, -20, -2, 10}, + }, + }, + }, + { + Name: "PositiveOffInc", + X: []float64{-6, 5, 4, -2, -6, 8, 10, 11}, + Incx: 3, + N: 3, + Panic: false, + Dasum: 18, + Dnrm2: 11.83215956619923208513465658312323409683100246158868064575943, + Idamax: 2, + DscalCases: []DScalCase{ + { + Alpha: -2, + Ans: []float64{12, 5, 4, 4, -6, 8, -20, 11}, + }, + }, + }, + { + Name: "PositiveShortInc", + X: []float64{-6, 5, 4, -2, -6, 8, 10, 11}, + Incx: 3, + N: 2, + Panic: false, + Dasum: 8, + Dnrm2: 6.324555320336758663997787088865437067439110278650433653715009, + Idamax: 0, + DscalCases: []DScalCase{ + { + Alpha: -2, + Ans: []float64{12, 5, 4, 4, -6, 8, 10, 11}, + }, + }, + }, + { + Name: "NegativeInc", + X: []float64{-6, 5, 4, -2, -6}, + Incx: -1, + N: 5, + Panic: false, + Dasum: 0, + Dnrm2: 0, + Idamax: -1, + DscalCases: []DScalCase{ + { + Alpha: -2, + Ans: []float64{-6, 5, 4, -2, -6}, + }, + }, + }, + { + Name: "NegativeExactInc", + X: []float64{-6, 5, 4, -2, -6}, + Incx: -2, + N: 3, + Panic: false, + Dasum: 0, + Dnrm2: 0, + Idamax: -1, + DscalCases: []DScalCase{ + { + Alpha: -2, + Ans: []float64{-6, 5, 4, -2, -6}, + }, + }, + }, + { + Name: "NegativeOffInc", + X: []float64{-6, 5, 4, -2, -6, 8, 10, 11}, + Incx: -3, + N: 2, + Panic: false, + Dasum: 0, + Dnrm2: 0, + Idamax: -1, + DscalCases: []DScalCase{ + { + Alpha: -2, + Ans: []float64{-6, 5, 4, -2, -6, 8, 10, 11}, + }, + }, + }, + { + Name: "NegativeShortInc", + X: []float64{-6, 5, 4, -2, -6, 8, 10, 11}, + Incx: -3, + N: 2, + Panic: false, + Dasum: 0, + Dnrm2: 0, + Idamax: -1, + DscalCases: []DScalCase{ + { + Alpha: -2, + Ans: []float64{-6, 5, 4, -2, -6, 8, 10, 11}, + }, + }, + }, + { + Name: "NegativeN", + X: []float64{-6, 5, 4, -2, -6}, + Incx: 2, + N: -5, + Panic: true, + DscalCases: []DScalCase{ + { + Alpha: -2, + Ans: []float64{-6, 5, 4, -2, -6}, + }, + }, + }, + { + Name: "ZeroInc", + X: []float64{-6, 5, 4, -2, -6}, + Incx: 0, + N: 5, + Panic: true, + DscalCases: []DScalCase{ + { + Alpha: -2, + Ans: []float64{-6, 5, 4, -2, -6}, + }, + }, + }, + { + Name: "OutOfBounds", + X: []float64{-6, 5, 4, -2, -6}, + Incx: 2, + N: 6, + Panic: true, + DscalCases: []DScalCase{ + { + Alpha: -2, + Ans: []float64{-6, 5, 4, -2, -6}, + }, + }, + }, + { + Name: "NegativeOutOfBounds", + X: []float64{-6, 5, 4, -2, -6}, + Incx: -2, + N: 6, + Panic: false, + Dasum: 0, + Dnrm2: 0, + Idamax: -1, + DscalCases: []DScalCase{ + { + Alpha: -2, + Ans: []float64{-6, 5, 4, -2, -6}, + }, + }, + }, + { + Name: "NaN", + X: []float64{math.NaN(), 2.0}, + Incx: 1, + N: 2, + Panic: false, + Dasum: math.NaN(), + Dnrm2: math.NaN(), + Idamax: 0, + DscalCases: []DScalCase{ + { + Alpha: -2, + Ans: []float64{math.NaN(), -4.0}, + }, + { + Alpha: 0, + Ans: []float64{0, 0}, + }, + }, + }, + { + Name: "NaNInc", + X: []float64{math.NaN(), math.NaN(), 2.0}, + Incx: 2, + N: 2, + Panic: false, + Dasum: math.NaN(), + Dnrm2: math.NaN(), + Idamax: 0, + DscalCases: []DScalCase{ + { + Alpha: -2, + Ans: []float64{math.NaN(), math.NaN(), -4.0}, + }, + { + Alpha: 0, + Ans: []float64{0, math.NaN(), 0}, + }, + }, + }, + { + Name: "Empty", + X: []float64{}, + Incx: 1, + N: 0, + Panic: false, + Dasum: 0, + Dnrm2: 0, + Idamax: -1, + DscalCases: []DScalCase{ + { + Alpha: -2, + Ans: []float64{}, + }, + { + Alpha: 0, + Ans: []float64{}, + }, + }, + }, + { + Name: "EmptyZeroInc", + X: []float64{}, + Incx: 0, + N: 0, + Panic: true, + Dasum: 0, + Dnrm2: 0, + Idamax: -1, + DscalCases: []DScalCase{ + { + Alpha: -2, + Ans: []float64{}, + }, + { + Alpha: 0, + Ans: []float64{}, + }, + }, + }, + { + Name: "EmptyReverse", + X: []float64{}, + Incx: -1, + N: 0, + Panic: false, + Dasum: 0, + Dnrm2: 0, + Idamax: -1, + DscalCases: []DScalCase{ + { + Alpha: -2, + Ans: []float64{}, + }, + { + Alpha: 0, + Ans: []float64{}, + }, + }, + }, + { + Name: "MultiInf", + X: []float64{5, math.Inf(1), math.Inf(-1), 8, 9}, + Incx: 1, + N: 5, + Panic: false, + Dasum: math.Inf(1), + Dnrm2: math.Inf(1), + Idamax: 1, + DscalCases: []DScalCase{ + { + Alpha: -2, + Ans: []float64{-10, math.Inf(-1), math.Inf(1), -16, -18}, + }, + { + Alpha: 0, + Ans: []float64{0, 0, 0, 0, 0}, + }, + }, + }, + { + Name: "NaNInf", + X: []float64{5, math.NaN(), math.Inf(-1), 8, 9}, + Incx: 1, + N: 5, + Panic: false, + Dasum: math.NaN(), + Dnrm2: math.NaN(), + Idamax: 2, + DscalCases: []DScalCase{ + { + Alpha: -2, + Ans: []float64{-10, math.NaN(), math.Inf(1), -16, -18}, + }, + { + Alpha: 0, + Ans: []float64{0, 0, 0, 0, 0}, + }, + }, + }, + { + Name: "InfNaN", + X: []float64{5, math.Inf(1), math.NaN(), 8, 9}, + Incx: 1, + N: 5, + Panic: false, + Dasum: math.NaN(), + Dnrm2: math.NaN(), + Idamax: 1, + DscalCases: []DScalCase{ + { + Alpha: -2, + Ans: []float64{-10, math.Inf(-1), math.NaN(), -16, -18}, + }, + { + Alpha: 0, + Ans: []float64{0, 0, 0, 0, 0}, + }, + }, + }, +} + +type DoubleTwoVectorCase struct { + Name string + X []float64 + Y []float64 + XTmp []float64 + YTmp []float64 + Incx int + Incy int + N int + Panic bool + // For Daxpy + DaxpyCases []DaxpyCase + DdotAns float64 + DswapAns DTwoVecAnswer + DcopyAns DTwoVecAnswer + DrotCases []DrotCase + DrotmCases []DrotmCase +} + +type DaxpyCase struct { + Alpha float64 + Ans []float64 +} + +type DrotCase struct { + C float64 + S float64 + XAns []float64 + YAns []float64 +} + +type DrotmCase struct { + P blas.DrotmParams + XAns []float64 + YAns []float64 + Name string +} + +type DTwoVecAnswer struct { + X []float64 + Y []float64 +} + +var DoubleTwoVectorCases = []DoubleTwoVectorCase{ + { + Name: "UnitaryInc", + X: []float64{10, 15, -6, 3, 14, 7}, + Y: []float64{8, -2, 4, 7, 6, -3}, + XTmp: []float64{0, 0, 0, 0, 0, 0}, + YTmp: []float64{0, 0, 0, 0, 0, 0}, + Incx: 1, + Incy: 1, + N: 6, + Panic: false, + DaxpyCases: []DaxpyCase{ + { + Alpha: 1, + Ans: []float64{18, 13, -2, 10, 20, 4}, + }, + { + Alpha: 2, + Ans: []float64{28, 28, -8, 13, 34, 11}, + }, + { + Alpha: -3, + Ans: []float64{-22, -47, 22, -2, -36, -24}, + }, + { + Alpha: 0, + Ans: []float64{8, -2, 4, 7, 6, -3}, + }, + }, + DdotAns: 110, + DswapAns: DTwoVecAnswer{ + X: []float64{8, -2, 4, 7, 6, -3}, + Y: []float64{10, 15, -6, 3, 14, 7}, + }, + DcopyAns: DTwoVecAnswer{ + X: []float64{10, 15, -6, 3, 14, 7}, + Y: []float64{10, 15, -6, 3, 14, 7}, + }, + DrotCases: []DrotCase{ + { + C: math.Cos(0), + S: math.Sin(0), + XAns: []float64{10, 15, -6, 3, 14, 7}, + YAns: []float64{8, -2, 4, 7, 6, -3}, + }, + { + C: math.Cos(25 * math.Pi / 180), + S: math.Sin(25 * math.Pi / 180), + XAns: []float64{12.444023964292095, 12.749380282068351, -3.7473736752571014, 5.677251193294846, 15.224018588957296, 5.076299724034451}, + YAns: []float64{3.024279678886205, -8.151889500183792, 6.160940718590796, 5.076299724034451, -0.4788089421498931, -5.677251193294846}, + }, + { + C: math.Cos(0.5 * math.Pi), + S: math.Sin(0.5 * math.Pi), + XAns: []float64{8, -2, 4, 7, 6, -3}, + YAns: []float64{-10, -15, 6, -3, -14, -7}, + }, + { + C: math.Cos(math.Pi), + S: math.Sin(math.Pi), + XAns: []float64{-10, -15, 6, -3, -14, -7}, + YAns: []float64{-8, 2, -4, -7, -6, 3}, + }, + }, + DrotmCases: []DrotmCase{ + { + P: blas.DrotmParams{ + Flag: blas.Identity, + H: [4]float64{0.9, 0.1, -0.1, 0.5}, + }, + XAns: []float64{10, 15, -6, 3, 14, 7}, + YAns: []float64{8, -2, 4, 7, 6, -3}, + Name: "Neg2Flag", + }, + { + P: blas.DrotmParams{ + Flag: blas.Rescaling, + H: [4]float64{0.9, 0.1, -0.1, 0.5}, + }, + XAns: []float64{8.2, 13.7, -5.8, 2, 12, 6.6}, + YAns: []float64{5, 0.5, 1.4, 3.8, 4.4, -0.8}, + Name: "Neg1Flag", + }, + { + P: blas.DrotmParams{ + Flag: blas.OffDiagonal, + H: [4]float64{1, 0.1, -0.1, 1}, + }, + XAns: []float64{9.2, 15.2, -6.4, 2.3, 13.4, 7.3}, + YAns: []float64{9, -0.5, 3.4, 7.3, 7.4, -2.3}, + Name: "ZeroFlag", + }, + { + P: blas.DrotmParams{ + Flag: blas.Diagonal, + H: [4]float64{0.5, -1, 1, 0.7}, + }, + XAns: []float64{13, 5.5, 1, 8.5, 13, 0.5}, + YAns: []float64{-4.4, -16.4, 8.8, 1.9, -9.8, -9.1}, + Name: "OneFlag", + }, + }, + }, + { + Name: "UnitaryIncLong", + X: []float64{10, 15, -6, 3, 14, 7, 8, -9, 10}, + Y: []float64{8, -2, 4, 7, 6, -3, 7, -6}, + XTmp: []float64{0, 0, 0, 0, 0, 0, 0, 0, 0}, + YTmp: []float64{0, 0, 0, 0, 0, 0, 0, 0}, + Incx: 1, + Incy: 1, + N: 6, + Panic: false, + DaxpyCases: []DaxpyCase{ + { + Alpha: 1, + Ans: []float64{18, 13, -2, 10, 20, 4, 7, -6}, + }, + { + Alpha: 2, + Ans: []float64{28, 28, -8, 13, 34, 11, 7, -6}, + }, + { + Alpha: -3, + Ans: []float64{-22, -47, 22, -2, -36, -24, 7, -6}, + }, + { + Alpha: 0, + Ans: []float64{8, -2, 4, 7, 6, -3, 7, -6}, + }, + }, + DdotAns: 110, + DswapAns: DTwoVecAnswer{ + X: []float64{8, -2, 4, 7, 6, -3, 8, -9, 10}, + Y: []float64{10, 15, -6, 3, 14, 7, 7, -6}, + }, + DcopyAns: DTwoVecAnswer{ + X: []float64{10, 15, -6, 3, 14, 7, 8, -9, 10}, + Y: []float64{10, 15, -6, 3, 14, 7, 7, -6}, + }, + DrotCases: []DrotCase{ + { + C: math.Cos(0), + S: math.Sin(0), + XAns: []float64{10, 15, -6, 3, 14, 7, 8, -9, 10}, + YAns: []float64{8, -2, 4, 7, 6, -3, 7, -6}, + }, + { + C: math.Cos(25 * math.Pi / 180), + S: math.Sin(25 * math.Pi / 180), + XAns: []float64{12.444023964292095, 12.749380282068351, -3.7473736752571014, 5.677251193294846, 15.224018588957296, 5.076299724034451, 8, -9, 10}, + YAns: []float64{3.024279678886205, -8.151889500183792, 6.160940718590796, 5.076299724034451, -0.4788089421498931, -5.677251193294846, 7, -6}, + }, + { + C: math.Cos(0.5 * math.Pi), + S: math.Sin(0.5 * math.Pi), + XAns: []float64{8, -2, 4, 7, 6, -3, 8, -9, 10}, + YAns: []float64{-10, -15, 6, -3, -14, -7, 7, -6}, + }, + { + C: math.Cos(math.Pi), + S: math.Sin(math.Pi), + XAns: []float64{-10, -15, 6, -3, -14, -7, 8, -9, 10}, + YAns: []float64{-8, 2, -4, -7, -6, 3, 7, -6}, + }, + }, + DrotmCases: []DrotmCase{ + { + P: blas.DrotmParams{ + Flag: blas.Identity, + H: [4]float64{0.9, 0.1, -0.1, 0.5}, + }, + XAns: []float64{10, 15, -6, 3, 14, 7, 8, -9, 10}, + YAns: []float64{8, -2, 4, 7, 6, -3, 7, -6}, + Name: "Neg2Flag", + }, + { + P: blas.DrotmParams{ + Flag: blas.Rescaling, + H: [4]float64{0.9, 0.1, -0.1, 0.5}, + }, + XAns: []float64{8.2, 13.7, -5.8, 2, 12, 6.6, 8, -9, 10}, + YAns: []float64{5, 0.5, 1.4, 3.8, 4.4, -0.8, 7, -6}, + Name: "Neg1Flag", + }, + { + P: blas.DrotmParams{ + Flag: blas.OffDiagonal, + H: [4]float64{1, 0.1, -0.1, 1}, + }, + XAns: []float64{9.2, 15.2, -6.4, 2.3, 13.4, 7.3, 8, -9, 10}, + YAns: []float64{9, -0.5, 3.4, 7.3, 7.4, -2.3, 7, -6}, + Name: "ZeroFlag", + }, + { + P: blas.DrotmParams{ + Flag: blas.Diagonal, + H: [4]float64{0.5, -1, 1, 0.7}, + }, + XAns: []float64{13, 5.5, 1, 8.5, 13, 0.5, 8, -9, 10}, + YAns: []float64{-4.4, -16.4, 8.8, 1.9, -9.8, -9.1, 7, -6}, + Name: "OneFlag", + }, + }, + }, + { + Name: "PositiveInc", + X: []float64{10, 15, -6, 3, 14, 7}, + Y: []float64{8, -2, 4, 7, 6, -3, -4, 10}, + XTmp: []float64{0, 0, 0, 0, 0, 0}, + YTmp: []float64{0, 0, 0, 0, 0, 0, 0, 0}, + Incx: 2, + Incy: 3, + N: 3, + Panic: false, + DaxpyCases: []DaxpyCase{ + { + Alpha: 2, + Ans: []float64{28, -2, 4, -5, 6, -3, 24, 10}, + }, + }, + DdotAns: -18, + DswapAns: DTwoVecAnswer{ + X: []float64{8, 15, 7, 3, -4, 7}, + Y: []float64{10, -2, 4, -6, 6, -3, 14, 10}, + }, + DcopyAns: DTwoVecAnswer{ + X: []float64{10, 15, -6, 3, 14, 7}, + Y: []float64{10, -2, 4, -6, 6, -3, 14, 10}, + }, + DrotCases: []DrotCase{ + { + C: math.Cos(25 * math.Pi / 180), + S: math.Sin(25 * math.Pi / 180), + XAns: []float64{12.444023964292095, 15, -2.479518890035003, 3, 10.997835971550302, 7}, + YAns: []float64{3.024279678886205, -2, 4, 8.879864079700745, 6, -3, -9.541886812516392, 10}, + }, + }, + DrotmCases: []DrotmCase{ + { + P: blas.DrotmParams{ + Flag: blas.Rescaling, + H: [4]float64{0.9, 0.1, -0.1, 0.5}, + }, + XAns: []float64{8.2, 15, -6.1, 3, 13, 7}, + YAns: []float64{5, -2, 4, 2.9, 6, -3, -0.6, 10}, + }, + { + P: blas.DrotmParams{ + Flag: blas.OffDiagonal, + H: [4]float64{1, 0.1, -0.1, 1}, + }, + XAns: []float64{9.2, 15, -6.7, 3, 14.4, 7}, + YAns: []float64{9, -2, 4, 6.4, 6, -3, -2.6, 10}, + }, + { + P: blas.DrotmParams{ + Flag: blas.Diagonal, + H: [4]float64{0.5, -1, 1, 0.7}, + }, + XAns: []float64{13, 15, 4, 3, 3, 7}, + YAns: []float64{-4.4, -2, 4, 10.9, 6, -3, -16.8, 10}, + }, + }, + }, + { + Name: "NegativeInc", + X: []float64{10, 15, -6, 3, 14, 7}, + Y: []float64{8, -2, 4, 7, 6, -3, -4, 10}, + XTmp: []float64{0, 0, 0, 0, 0, 0}, + YTmp: []float64{0, 0, 0, 0, 0, 0, 0, 0}, + Incx: -2, + Incy: -3, + N: 3, + Panic: false, + DaxpyCases: []DaxpyCase{ + { + Alpha: 2, + Ans: []float64{28, -2, 4, -5, 6, -3, 24, 10}, + }, + }, + DdotAns: -18, + DswapAns: DTwoVecAnswer{ + X: []float64{8, 15, 7, 3, -4, 7}, + Y: []float64{10, -2, 4, -6, 6, -3, 14, 10}, + }, + DcopyAns: DTwoVecAnswer{ + X: []float64{10, 15, -6, 3, 14, 7}, + Y: []float64{10, -2, 4, -6, 6, -3, 14, 10}, + }, + DrotCases: []DrotCase{ + { + C: math.Cos(25 * math.Pi / 180), + S: math.Sin(25 * math.Pi / 180), + XAns: []float64{12.444023964292095, 15, -2.479518890035003, 3, 10.997835971550302, 7}, + YAns: []float64{3.024279678886205, -2, 4, 8.879864079700745, 6, -3, -9.541886812516392, 10}, + }, + }, + DrotmCases: []DrotmCase{ + { + P: blas.DrotmParams{ + Flag: blas.Rescaling, + H: [4]float64{0.9, 0.1, -0.1, 0.5}, + }, + XAns: []float64{8.2, 15, -6.1, 3, 13, 7}, + YAns: []float64{5, -2, 4, 2.9, 6, -3, -0.6, 10}, + }, + { + P: blas.DrotmParams{ + Flag: blas.OffDiagonal, + H: [4]float64{1, 0.1, -0.1, 1}, + }, + XAns: []float64{9.2, 15, -6.7, 3, 14.4, 7}, + YAns: []float64{9, -2, 4, 6.4, 6, -3, -2.6, 10}, + }, + { + P: blas.DrotmParams{ + Flag: blas.Diagonal, + H: [4]float64{0.5, -1, 1, 0.7}, + }, + XAns: []float64{13, 15, 4, 3, 3, 7}, + YAns: []float64{-4.4, -2, 4, 10.9, 6, -3, -16.8, 10}, + }, + }, + }, + { + Name: "MixedInc1", + X: []float64{10, 15, -6, 3, 14, 7}, + Y: []float64{8, -2, 4, 7, 6, -3, -4, 10}, + XTmp: []float64{0, 0, 0, 0, 0, 0}, + YTmp: []float64{0, 0, 0, 0, 0, 0, 0, 0}, + Incx: 2, + Incy: -3, + N: 3, + Panic: false, + DaxpyCases: []DaxpyCase{ + { + Alpha: 2, + Ans: []float64{36, -2, 4, -5, 6, -3, 16, 10}, + }, + }, + DdotAns: 30, + DswapAns: DTwoVecAnswer{ + X: []float64{-4, 15, 7, 3, 8, 7}, + Y: []float64{14, -2, 4, -6, 6, -3, 10, 10}, + }, + DcopyAns: DTwoVecAnswer{ + X: []float64{10, 15, -6, 3, 14, 7}, + Y: []float64{14, -2, 4, -6, 6, -3, 10, 10}, + }, + DrotCases: []DrotCase{ + { + C: math.Cos(25 * math.Pi / 180), + S: math.Sin(25 * math.Pi / 180), + XAns: []float64{7.372604823403701, 15, -2.479518890035003, 3, 16.069255112438693, 7}, + YAns: []float64{1.333806631923407, -2, 4, 8.879864079700745, 6, -3, -7.851413765553595, 10}, + }, + }, + DrotmCases: []DrotmCase{ + { + P: blas.DrotmParams{ + Flag: blas.Rescaling, + H: [4]float64{0.9, 0.1, -0.1, 0.5}, + }, + XAns: []float64{9.4, 15, -6.1, 3, 11.8, 7}, + YAns: []float64{5.4, -2, 4, 2.9, 6, -3, -1, 10}, + }, + { + P: blas.DrotmParams{ + Flag: blas.OffDiagonal, + H: [4]float64{1, 0.1, -0.1, 1}, + }, + XAns: []float64{10.4, 15, -6.7, 3, 13.2, 7}, + YAns: []float64{9.4, -2, 4, 6.4, 6, -3, -3, 10}, + }, + { + P: blas.DrotmParams{ + Flag: blas.Diagonal, + H: [4]float64{0.5, -1, 1, 0.7}, + }, + XAns: []float64{1, 15, 4, 3, 15, 7}, + YAns: []float64{-8.4, -2, 4, 10.9, 6, -3, -12.8, 10}, + }, + }, + }, + { + Name: "MixedInc2", + X: []float64{10, 15, -6, 3, 14, 7}, + Y: []float64{8, -2, 4, 7, 6, -3, -4, 10}, + XTmp: []float64{0, 0, 0, 0, 0, 0}, + YTmp: []float64{0, 0, 0, 0, 0, 0, 0, 0}, + Incx: -2, + Incy: 3, + N: 3, + Panic: false, + DaxpyCases: []DaxpyCase{ + { + Alpha: 2, + Ans: []float64{36, -2, 4, -5, 6, -3, 16, 10}, + }, + }, + DdotAns: 30, + DswapAns: DTwoVecAnswer{ + X: []float64{-4, 15, 7, 3, 8, 7}, + Y: []float64{14, -2, 4, -6, 6, -3, 10, 10}, + }, + DcopyAns: DTwoVecAnswer{ + X: []float64{10, 15, -6, 3, 14, 7}, + Y: []float64{14, -2, 4, -6, 6, -3, 10, 10}, + }, + DrotCases: []DrotCase{ + { + C: math.Cos(25 * math.Pi / 180), + S: math.Sin(25 * math.Pi / 180), + XAns: []float64{7.372604823403701, 15, -2.479518890035003, 3, 16.069255112438693, 7}, + YAns: []float64{1.333806631923407, -2, 4, 8.879864079700745, 6, -3, -7.851413765553595, 10}, + }, + }, + DrotmCases: []DrotmCase{ + { + P: blas.DrotmParams{ + Flag: blas.Rescaling, + H: [4]float64{0.9, 0.1, -0.1, 0.5}, + }, + XAns: []float64{9.4, 15, -6.1, 3, 11.8, 7}, + YAns: []float64{5.4, -2, 4, 2.9, 6, -3, -1, 10}, + }, + { + P: blas.DrotmParams{ + Flag: blas.OffDiagonal, + H: [4]float64{1, 0.1, -0.1, 1}, + }, + XAns: []float64{10.4, 15, -6.7, 3, 13.2, 7}, + YAns: []float64{9.4, -2, 4, 6.4, 6, -3, -3, 10}, + }, + { + P: blas.DrotmParams{ + Flag: blas.Diagonal, + H: [4]float64{0.5, -1, 1, 0.7}, + }, + XAns: []float64{1, 15, 4, 3, 15, 7}, + YAns: []float64{-8.4, -2, 4, 10.9, 6, -3, -12.8, 10}, + }, + }, + }, + { + Name: "ZeroN", + X: []float64{10, 15, -6, 3, 14, 7}, + Y: []float64{8, -2, 4, 7, 6, -3, -4, 10}, + XTmp: []float64{0, 0, 0, 0, 0, 0}, + YTmp: []float64{0, 0, 0, 0, 0, 0, 0, 0}, + Incx: -2, + Incy: 3, + N: 0, + Panic: false, + DaxpyCases: []DaxpyCase{ + { + Alpha: 2, + Ans: []float64{8, -2, 4, 7, 6, -3, -4, 10}, + }, + }, + DswapAns: DTwoVecAnswer{ + X: []float64{10, 15, -6, 3, 14, 7}, + Y: []float64{8, -2, 4, 7, 6, -3, -4, 10}, + }, + DcopyAns: DTwoVecAnswer{ + X: []float64{10, 15, -6, 3, 14, 7}, + Y: []float64{8, -2, 4, 7, 6, -3, -4, 10}, + }, + DrotCases: []DrotCase{ + { + C: math.Cos(25 * math.Pi / 180), + S: math.Sin(25 * math.Pi / 180), + XAns: []float64{10, 15, -6, 3, 14, 7}, + YAns: []float64{8, -2, 4, 7, 6, -3, -4, 10}, + }, + }, + DrotmCases: []DrotmCase{ + { + P: blas.DrotmParams{ + Flag: blas.Rescaling, + H: [4]float64{0.9, 0.1, -0.1, 0.5}, + }, + XAns: []float64{10, 15, -6, 3, 14, 7}, + YAns: []float64{8, -2, 4, 7, 6, -3, -4, 10}, + }, + }, + }, + { + Name: "NegativeN", + X: []float64{10, 15, -6, 3, 14, 7}, + Y: []float64{8, -2, 4, 7, 6, -3, -4, 10}, + XTmp: []float64{0, 0, 0, 0, 0, 0}, + YTmp: []float64{0, 0, 0, 0, 0, 0, 0, 0}, + Incx: -2, + Incy: 3, + N: -3, + Panic: true, + DaxpyCases: []DaxpyCase{ + { + Alpha: 2, + Ans: []float64{36, -2, 4, -5, 6, -3, 16, 10}, + }, + }, + DrotCases: []DrotCase{ + { + C: math.Cos(25 * math.Pi / 180), + S: math.Sin(25 * math.Pi / 180), + XAns: []float64{10, 15, -6, 3, 14, 7}, + YAns: []float64{8, -2, 4, 7, 6, -3, -4, 10}, + }, + }, + DrotmCases: []DrotmCase{ + { + P: blas.DrotmParams{ + Flag: blas.Rescaling, + H: [4]float64{0.9, 0.1, -0.1, 0.5}, + }, + XAns: []float64{8.2, 13.7, -5.8, 2, 12, 6.6}, + YAns: []float64{5, 0.5, 1.4, 3.8, 4.4, -0.8}, + }, + }, + }, + { + Name: "ZeroIncX", + X: []float64{10, 15, -6, 3, 14, 7}, + Y: []float64{8, -2, 4, 7, 6, -3, -4, 10}, + XTmp: []float64{0, 0, 0, 0, 0, 0}, + YTmp: []float64{0, 0, 0, 0, 0, 0, 0, 0}, + Incx: 0, + Incy: 3, + N: 2, + Panic: true, + DaxpyCases: []DaxpyCase{ + { + Alpha: 2, + Ans: []float64{36, -2, 4, -5, 6, -3, 16, 10}, + }, + }, + DrotCases: []DrotCase{ + { + C: math.Cos(25 * math.Pi / 180), + S: math.Sin(25 * math.Pi / 180), + XAns: []float64{10, 15, -6, 3, 14, 7}, + YAns: []float64{8, -2, 4, 7, 6, -3, -4, 10}, + }, + }, + DrotmCases: []DrotmCase{ + { + P: blas.DrotmParams{ + Flag: blas.Rescaling, + H: [4]float64{0.9, 0.1, -0.1, 0.5}, + }, + XAns: []float64{8.2, 13.7, -5.8, 2, 12, 6.6}, + YAns: []float64{5, 0.5, 1.4, 3.8, 4.4, -0.8}, + }, + }, + }, + { + Name: "ZeroIncY", + X: []float64{10, 15, -6, 3, 14, 7}, + Y: []float64{8, -2, 4, 7, 6, -3, -4, 10}, + XTmp: []float64{0, 0, 0, 0, 0, 0}, + YTmp: []float64{0, 0, 0, 0, 0, 0, 0, 0}, + Incx: 1, + Incy: 0, + N: 2, + Panic: true, + DaxpyCases: []DaxpyCase{ + { + Alpha: 2, + Ans: []float64{36, -2, 4, -5, 6, -3, 16, 10}, + }, + }, + DrotCases: []DrotCase{ + { + C: math.Cos(25 * math.Pi / 180), + S: math.Sin(25 * math.Pi / 180), + XAns: []float64{10, 15, -6, 3, 14, 7}, + YAns: []float64{8, -2, 4, 7, 6, -3, -4, 10}, + }, + }, + DrotmCases: []DrotmCase{ + { + P: blas.DrotmParams{ + Flag: blas.Rescaling, + H: [4]float64{0.9, 0.1, -0.1, 0.5}, + }, + XAns: []float64{8.2, 13.7, -5.8, 2, 12, 6.6}, + YAns: []float64{5, 0.5, 1.4, 3.8, 4.4, -0.8}, + }, + }, + }, + { + Name: "OutOfBoundsX", + X: []float64{10, 15, -6, 3, 14, 7}, + Y: []float64{8, -2, 4, 7, 6, -3, -4, 10}, + XTmp: []float64{0, 0, 0, 0, 0, 0}, + YTmp: []float64{0, 0, 0, 0, 0, 0, 0, 0}, + Incx: 8, + Incy: 2, + N: 2, + Panic: true, + DaxpyCases: []DaxpyCase{ + { + Alpha: 2, + Ans: []float64{36, -2, 4, -5, 6, -3, 16, 10}, + }, + }, + DrotCases: []DrotCase{ + { + C: math.Cos(25 * math.Pi / 180), + S: math.Sin(25 * math.Pi / 180), + XAns: []float64{10, 15, -6, 3, 14, 7}, + YAns: []float64{8, -2, 4, 7, 6, -3, -4, 10}, + }, + }, + DrotmCases: []DrotmCase{ + { + P: blas.DrotmParams{ + Flag: blas.Rescaling, + H: [4]float64{0.9, 0.1, -0.1, 0.5}, + }, + XAns: []float64{8.2, 13.7, -5.8, 2, 12, 6.6}, + YAns: []float64{5, 0.5, 1.4, 3.8, 4.4, -0.8}, + }, + }, + }, + { + Name: "OutOfBoundsY", + X: []float64{10, 15, -6, 3, 14, 7}, + Y: []float64{8, -2, 4, 7, 6, -3, -4, 10}, + XTmp: []float64{0, 0, 0, 0, 0, 0}, + YTmp: []float64{0, 0, 0, 0, 0, 0, 0, 0}, + Incx: 2, + Incy: 8, + N: 2, + Panic: true, + DaxpyCases: []DaxpyCase{ + { + Alpha: 2, + Ans: []float64{36, -2, 4, -5, 6, -3, 16, 10}, + }, + }, + DrotCases: []DrotCase{ + { + C: math.Cos(25 * math.Pi / 180), + S: math.Sin(25 * math.Pi / 180), + XAns: []float64{10, 15, -6, 3, 14, 7}, + YAns: []float64{8, -2, 4, 7, 6, -3, -4, 10}, + }, + }, + DrotmCases: []DrotmCase{ + { + P: blas.DrotmParams{ + Flag: blas.Rescaling, + H: [4]float64{0.9, 0.1, -0.1, 0.5}, + }, + XAns: []float64{10, 15, -6, 3, 14, 7}, + YAns: []float64{8, -2, 4, 7, 6, -3, -4, 10}, + }, + }, + }, + { + Name: "Empty", + X: []float64{}, + Y: []float64{}, + Incx: 1, + Incy: 1, + N: 0, + Panic: false, + DaxpyCases: []DaxpyCase{ + { + Alpha: 2, + Ans: []float64{}, + }, + }, + DrotCases: []DrotCase{ + { + C: math.Cos(25 * math.Pi / 180), + S: math.Sin(25 * math.Pi / 180), + XAns: []float64{}, + YAns: []float64{}, + }, + }, + DrotmCases: []DrotmCase{ + { + P: blas.DrotmParams{ + Flag: blas.Rescaling, + H: [4]float64{0.9, 0.1, -0.1, 0.5}, + }, + XAns: []float64{}, + YAns: []float64{}, + }, + }, + }, + { + Name: "EmptyZeroIncX", + X: []float64{}, + Y: []float64{}, + Incx: 0, + Incy: 1, + N: 0, + Panic: true, + DaxpyCases: []DaxpyCase{ + { + Alpha: 2, + Ans: []float64{}, + }, + }, + DrotCases: []DrotCase{ + { + C: math.Cos(25 * math.Pi / 180), + S: math.Sin(25 * math.Pi / 180), + XAns: []float64{}, + YAns: []float64{}, + }, + }, + DrotmCases: []DrotmCase{ + { + P: blas.DrotmParams{ + Flag: blas.Rescaling, + H: [4]float64{0.9, 0.1, -0.1, 0.5}, + }, + XAns: []float64{}, + YAns: []float64{}, + }, + }, + }, + { + Name: "EmptyZeroIncY", + X: []float64{}, + Y: []float64{}, + Incx: 1, + Incy: 0, + N: 0, + Panic: true, + DaxpyCases: []DaxpyCase{ + { + Alpha: 2, + Ans: []float64{}, + }, + }, + DrotCases: []DrotCase{ + { + C: math.Cos(25 * math.Pi / 180), + S: math.Sin(25 * math.Pi / 180), + XAns: []float64{}, + YAns: []float64{}, + }, + }, + DrotmCases: []DrotmCase{ + { + P: blas.DrotmParams{ + Flag: blas.Rescaling, + H: [4]float64{0.9, 0.1, -0.1, 0.5}, + }, + XAns: []float64{}, + YAns: []float64{}, + }, + }, + }, + { + Name: "EmptyReverse", + X: []float64{}, + Y: []float64{}, + Incx: -1, + Incy: -1, + N: 0, + Panic: false, + DaxpyCases: []DaxpyCase{ + { + Alpha: 2, + Ans: []float64{}, + }, + }, + DrotCases: []DrotCase{ + { + C: math.Cos(25 * math.Pi / 180), + S: math.Sin(25 * math.Pi / 180), + XAns: []float64{}, + YAns: []float64{}, + }, + }, + DrotmCases: []DrotmCase{ + { + P: blas.DrotmParams{ + Flag: blas.Rescaling, + H: [4]float64{0.9, 0.1, -0.1, 0.5}, + }, + XAns: []float64{}, + YAns: []float64{}, + }, + }, + }, +} + +type Ddotter interface { + Ddot(n int, x []float64, incX int, y []float64, incY int) float64 +} + +func DdotTest(t *testing.T, d Ddotter) { + ddot := d.Ddot + for _, c := range DoubleTwoVectorCases { + dCopyTwoTmp(c.X, c.XTmp, c.Y, c.YTmp) + if c.Panic { + f := func() { ddot(c.N, c.XTmp, c.Incx, c.YTmp, c.Incy) } + testpanics(f, c.Name, t) + continue + } + dot := ddot(c.N, c.XTmp, c.Incx, c.YTmp, c.Incy) + if !dTolEqual(dot, c.DdotAns) { + t.Errorf("ddot: mismatch %v: expected %v, found %v", c.Name, c.DdotAns, dot) + } + } + + // check it works for 16-byte unaligned slices + x := []float64{1, 1, 1, 1, 1} + if n := ddot(4, x[:4], 1, x[1:], 1); n != 4 { + t.Errorf("ddot: mismatch Unaligned: expected %v, found %v", 4, n) + } + if n := ddot(2, x[:4], 2, x[1:], 2); n != 2 { + t.Errorf("ddot: mismatch Unaligned: expected %v, found %v", 2, n) + } + if n := ddot(2, x[:4], 3, x[1:], 3); n != 2 { + t.Errorf("ddot: mismatch Unaligned: expected %v, found %v", 2, n) + } +} + +type Dnrm2er interface { + Dnrm2(n int, x []float64, incX int) float64 +} + +func Dnrm2Test(t *testing.T, blasser Dnrm2er) { + dnrm2 := blasser.Dnrm2 + for _, c := range DoubleOneVectorCases { + if c.Panic { + f := func() { dnrm2(c.N, c.X, c.Incx) } + testpanics(f, c.Name, t) + continue + } + v := dnrm2(c.N, c.X, c.Incx) + if !dTolEqual(v, c.Dnrm2) { + t.Errorf("dnrm2: mismatch %v: expected %v, found %v", c.Name, c.Dnrm2, v) + } + } +} + +type Dasumer interface { + Dasum(n int, x []float64, incX int) float64 +} + +func DasumTest(t *testing.T, blasser Dasumer) { + dasum := blasser.Dasum + for _, c := range DoubleOneVectorCases { + if c.Panic { + f := func() { dasum(c.N, c.X, c.Incx) } + testpanics(f, c.Name, t) + continue + } + v := dasum(c.N, c.X, c.Incx) + if !dTolEqual(v, c.Dasum) { + t.Errorf("dasum: mismatch %v: expected %v, found %v", c.Name, c.Dasum, v) + } + } +} + +type Idamaxer interface { + Idamax(n int, x []float64, incX int) int +} + +func IdamaxTest(t *testing.T, blasser Idamaxer) { + idamax := blasser.Idamax + for _, c := range DoubleOneVectorCases { + if c.Panic { + f := func() { idamax(c.N, c.X, c.Incx) } + testpanics(f, c.Name, t) + continue + } + v := idamax(c.N, c.X, c.Incx) + if v != c.Idamax { + s := fmt.Sprintf("idamax: mismatch %v: expected %v, found %v", c.Name, c.Idamax, v) + if floats.HasNaN(c.X) { + t.Log(s) + } else { + t.Errorf(s) + } + } + } +} + +type Dswapper interface { + Dswap(n int, x []float64, incX int, y []float64, incY int) +} + +func DswapTest(t *testing.T, d Dswapper) { + dswap := d.Dswap + for _, c := range DoubleTwoVectorCases { + dCopyTwoTmp(c.X, c.XTmp, c.Y, c.YTmp) + if c.Panic { + f := func() { dswap(c.N, c.XTmp, c.Incx, c.YTmp, c.Incy) } + testpanics(f, c.Name, t) + continue + } + dswap(c.N, c.XTmp, c.Incx, c.YTmp, c.Incy) + if !dSliceTolEqual(c.XTmp, c.DswapAns.X) { + t.Errorf("dswap: x mismatch %v: expected %v, found %v", c.Name, c.DswapAns.X, c.XTmp) + } + if !dSliceTolEqual(c.YTmp, c.DswapAns.Y) { + t.Errorf("dswap: y mismatch %v: expected %v, found %v", c.Name, c.DswapAns.Y, c.YTmp) + } + } +} + +type Dcopier interface { + Dcopy(n int, x []float64, incX int, y []float64, incY int) +} + +func DcopyTest(t *testing.T, d Dcopier) { + dcopy := d.Dcopy + for _, c := range DoubleTwoVectorCases { + dCopyTwoTmp(c.X, c.XTmp, c.Y, c.YTmp) + if c.Panic { + f := func() { dcopy(c.N, c.XTmp, c.Incx, c.YTmp, c.Incy) } + testpanics(f, c.Name, t) + continue + } + dcopy(c.N, c.XTmp, c.Incx, c.YTmp, c.Incy) + if !dSliceTolEqual(c.XTmp, c.DcopyAns.X) { + t.Errorf("dswap: x mismatch %v: expected %v, found %v", c.Name, c.DcopyAns.X, c.XTmp) + } + if !dSliceTolEqual(c.YTmp, c.DcopyAns.Y) { + t.Errorf("dswap: y mismatch %v: expected %v, found %v", c.Name, c.DcopyAns.Y, c.YTmp) + } + } +} + +type Daxpyer interface { + Daxpy(n int, alpha float64, x []float64, incX int, y []float64, incY int) +} + +func DaxpyTest(t *testing.T, d Daxpyer) { + daxpy := d.Daxpy + for _, c := range DoubleTwoVectorCases { + for _, kind := range c.DaxpyCases { + dCopyTwoTmp(c.X, c.XTmp, c.Y, c.YTmp) + if c.Panic { + f := func() { daxpy(c.N, kind.Alpha, c.XTmp, c.Incx, c.YTmp, c.Incy) } + testpanics(f, c.Name, t) + continue + } + daxpy(c.N, kind.Alpha, c.XTmp, c.Incx, c.YTmp, c.Incy) + if !dSliceTolEqual(c.YTmp, kind.Ans) { + t.Errorf("daxpy: mismatch %v: expected %v, found %v", c.Name, kind.Ans, c.YTmp) + } + } + } +} + +type DrotgTestStruct struct { + Name string + A, B float64 + C, S, R, Z float64 +} + +var DrotgTests = []DrotgTestStruct{ + { + Name: "ZeroAB", + C: 1, + }, + { + Name: "PosA_ZeroB", + A: 0.5, + C: 1, + R: 0.5, + }, + { + Name: "NegA_ZeroB", + A: -4.6, + C: 1, + R: -4.6, + }, + { + Name: "ZeroA_PosB", + B: 3, + S: 1, + R: 3, + Z: 1, + }, + { + Name: "ZeroA_NegB", + B: -0.3, + S: 1, + R: -0.3, + Z: 1, + }, + { + Name: "PosA_PosB_AGTB", + A: 5, + B: 0.3, + C: 0.99820484546577868593549038000, + S: 0.05989229072794672115612942280, + R: 5.00899191454727744602429072688, + Z: 0.05989229072794672115612942280, + }, + { + Name: "PosA_PosB_ALTB", + A: 3, + B: 4, + C: 3.0 / 5, + S: 4.0 / 5, + R: 5, + Z: 5.0 / 3.0, + }, + + { + Name: "PosA_NegB_AGTB", + A: 2.6, + B: -0.9, + C: 0.94498607344025815971847507095, + S: -0.32711056388316628605639521686, + R: 2.751363298439520872718790879655, + Z: -0.3271105638831662860563952168, + }, + { + Name: "PosA_NegB_ALTB", + A: 2.6, + B: -2.9, + C: -0.6675450157520258540548049558, + S: 0.7445694406464903756765132200, + R: -3.8948684188300893100043812234, + Z: 1 / -0.6675450157520258540548049558, + }, + { + Name: "NegA_PosB_AGTB", + A: -11.4, + B: 10.3, + C: 0.7419981952497362418487847947, + S: -0.6704018781642353764072353847, + R: -15.363918770938617534070671122, + Z: -0.6704018781642353764072353847, + }, + { + Name: "NegA_PosB_ALTB", + A: -1.4, + B: 10.3, + C: -0.1346838895922121112404717523, + S: 0.9908886162855605326977564640, + R: 10.394710193170370442523552032, + Z: 1 / -0.1346838895922121112404717523, + }, + { + Name: "NegA_NegB_AGTB", + A: -11.4, + B: 10.3, + C: 0.7419981952497362418487847947, + S: -0.6704018781642353764072353847, + R: -15.363918770938617534070671122, + Z: -0.6704018781642353764072353847, + }, + { + Name: "NegA_NegB_ALTB", + A: -1.4, + B: -10.3, + C: 0.1346838895922121112404717523, + S: 0.9908886162855605326977564640, + R: -10.394710193170370442523552032, + Z: 1 / 0.1346838895922121112404717523, + }, +} + +type Drotger interface { + Drotg(a, b float64) (c, s, r, z float64) +} + +func DrotgTest(t *testing.T, d Drotger) { + drotg := d.Drotg + for _, test := range DrotgTests { + c, s, r, z := drotg(test.A, test.B) + if !dTolEqual(c, test.C) { + t.Errorf("drotg: c mismatch %v: expected %v, found %v", test.Name, test.C, c) + } + if !dTolEqual(s, test.S) { + t.Errorf("drotg: s mismatch %v: expected %v, found %v", test.Name, test.S, s) + } + if !dTolEqual(r, test.R) { + t.Errorf("drotg: r mismatch %v: expected %v, found %v", test.Name, test.R, r) + } + if !dTolEqual(z, test.Z) { + t.Errorf("drotg: z mismatch %v: expected %v, found %v", test.Name, test.Z, z) + } + } +} + +type DrotmgTestStruct struct { + Name string + D1, D2, X1, Y1 float64 + P *blas.DrotmParams + Rd1, Rd2, Rx1 float64 +} + +var DrotmgTests = []DrotmgTestStruct{ + { + Name: "NegD1", + P: &blas.DrotmParams{ + Flag: blas.Rescaling, + }, + D1: -4, + D2: 6, + X1: 8, + Y1: -4, + }, + { + Name: "ZeroD2", + P: &blas.DrotmParams{ + Flag: blas.Identity, + }, + D1: 4, + X1: 8, + Y1: -5, + Rd1: 4, + Rx1: 8, + }, + { + Name: "ZeroY1", + P: &blas.DrotmParams{ + Flag: blas.Identity, + }, + D1: 4, + D2: -6, + X1: 8, + Rd1: 4, + Rd2: -6, + Rx1: 8, + }, + { + Name: "NegQ2_and_AQ1_LT_AQ2", + P: &blas.DrotmParams{ + Flag: blas.Rescaling, + }, + D1: 8, + D2: -6, + X1: 4, + Y1: 8, + Rd1: 0, + Rd2: 0, + Rx1: 0, + }, + { + Name: "ZeroD1", + P: &blas.DrotmParams{ + Flag: blas.Diagonal, + H: [4]float64{0, 0, 0, 0}, + }, + D1: 0, + D2: 2, + X1: 8, + Y1: 4, + Rd1: 2, + Rd2: 0, + Rx1: 4, + }, + { + Name: "AbsQ1_GT_AbsQU__D2_Pos", + P: &blas.DrotmParams{ + Flag: blas.OffDiagonal, + H: [4]float64{0, -0.625, 0.9375, 0}, + }, + D1: 2, + D2: 3, + X1: 8, + Y1: 5, + Rd1: 1.2610837438423645, + Rd2: 1.8916256157635467, + Rx1: 12.6875, + }, + { + Name: "AbsQ1_GT_AbsQU__D2_Neg", + P: &blas.DrotmParams{ + Flag: blas.OffDiagonal, + H: [4]float64{0, -0.625, -0.9375, 0}, + }, + D1: 2, + D2: -3, + X1: 8, + Y1: 5, + Rd1: 4.830188679245283, + Rd2: -7.245283018867925, + Rx1: 3.3125, + }, + { + Name: "AbsQ1_LT_AbsQU__D2_Pos", + P: &blas.DrotmParams{ + Flag: blas.Diagonal, + H: [4]float64{5.0 / 12, 0, 0, 0.625}, + }, + D1: 2, + D2: 3, + X1: 5, + Y1: 8, + Rd1: 2.3801652892561984, + Rd2: 1.586776859504132, + Rx1: 121.0 / 12, + }, + { + Name: "D1=D2_X1=X2", + P: &blas.DrotmParams{ + Flag: blas.Diagonal, + H: [4]float64{1, 0, 0, 1}, + }, + D1: 2, + D2: 2, + X1: 8, + Y1: 8, + Rd1: 1, + Rd2: 1, + Rx1: 16, + }, + { + Name: "RD1_Big_RD2_Big_Flag_0", + P: &blas.DrotmParams{ + Flag: blas.Rescaling, + H: [4]float64{4096, -3584, 1792, 4096}, + }, + D1: 1600000000, + D2: 800000000, + X1: 8, + Y1: 7, + Rd1: 68.96627824858757, + Rd2: 34.483139124293785, + Rx1: 45312, + }, + { + Name: "RD1_Big_RD2_Big_Flag_1", + P: &blas.DrotmParams{ + Flag: blas.Rescaling, + H: [4]float64{2340.5714285714284, -4096, 4096, 4681.142857142857}, + }, + D1: 800000000, + D2: 1600000000, + X1: 8, + Y1: 7, + Rd1: 57.6914092640818, + Rd2: 28.8457046320409, + Rx1: 47396.57142857142, + }, + { + Name: "RD1_Big_RD2_Med_Flag_0", + P: &blas.DrotmParams{ + Flag: blas.Rescaling, + H: [4]float64{4096, -1, 0.0004096, 1}, + }, + D1: 20000000, + D2: 2, + X1: 8, + Y1: 8, + Rd1: 1.1920927762985347, + Rd2: 1.9999998000000199, + Rx1: 32768.0032768, + }, + { + Name: "RD1_Big_RD2_Med_Flag_1", + P: &blas.DrotmParams{ + Flag: blas.Rescaling, + H: [4]float64{4.096e-17, -1, 4096, 1e-10}, + }, + D1: 2, + D2: 20000000000, + X1: 8, + Y1: 80000000000, + Rd1: 1192.0928955078125, + Rd2: 2, + Rx1: 3.2768e+14, + }, + + // TODO: Add D1 big, D2 small, Flag = 0 + { + Name: "D1_Big_D2_Small_Flag_1", + P: &blas.DrotmParams{ + Flag: blas.Rescaling, + H: [4]float64{2.8671999999999997e-26, -0.000244140625, 4096, 2.44140625e-16}, + }, + D1: 0.000000014, + D2: 2000000000, + X1: 0.000008, + Y1: 8000000, + Rd1: 119.20928955078125, + Rd2: 0.234881024, + Rx1: 3.2768e+10, + }, + + { + Name: "RD1_Med_RD2_Big_Flag_0", + P: &blas.DrotmParams{ + Flag: blas.Rescaling, + H: [4]float64{1, -0.0004096, 1000, 4096}, + }, + D1: 2, + D2: 20000000000, + X1: 80000000, + Y1: 8, + Rd1: 1.9998000199980002, + Rd2: 1191.9736981379988, + Rx1: 8.0008e+07, + }, + { + Name: "D1_Med_D2_Big_Flag_1", + P: &blas.DrotmParams{ + Flag: blas.Rescaling, + H: [4]float64{50, -4096, 1, 4.096e-06}, + }, + D1: 20000000000, + D2: 0.4, + X1: 80000000, + Y1: 80000000000000000, + Rd1: 0.39999998000000103, + Rd2: 1192.092835903171, + Rx1: 8.0000004e+16, + }, + { + Name: "RD1_Med_RD2_Small_Flag_0", + P: &blas.DrotmParams{ + Flag: blas.Rescaling, + H: [4]float64{1, -0.0007233796296296296, 1.1111111111111111e-10, 0.000244140625}, + }, + D1: 1.2, + D2: 0.000000000045, + X1: 2.7, + Y1: 8, + Rd1: 1.1999999996049382, + Rd2: 0.0007549747197514486, + Rx1: 2.700000000888889, + }, + { + Name: "RD1_Med_RD2_Small_Flag_1", + P: &blas.DrotmParams{ + Flag: blas.Rescaling, + H: [4]float64{0.0002197265625, -1, 0.000244140625, 3.375e-11}, + }, + D1: 1.2, + D2: 0.000000000045, + X1: 2.7, + Y1: 80000000000, + Rd1: 0.0007549747199770676, + Rd2: 1.19999999996355, + Rx1: 1.9531250000593264e+07, + }, + // TODO: Add Small, Big, 0 case + { + Name: "D1_Small_D2_Big_Flag_1", + P: &blas.DrotmParams{ + Flag: blas.Rescaling, + H: [4]float64{2.3731773997569866e+10, -1.6777216e+07, 0.000244140625, 1.6777216e-07}, + }, + D1: 120000000000000000, + D2: 0.000000000012345, + X1: 0.08, + Y1: 8000000000000, + Rd1: 0.00010502490698765249, + Rd2: 216.1836123957717, + Rx1: 3.8516669198055897e+09, + }, + { + Name: "RD1_Small_RD2_Med_Flag_0", + P: &blas.DrotmParams{ + Flag: blas.Rescaling, + H: [4]float64{0.000244140625, -1e-08, 0.24414062499999997, 1}, + }, + D1: 0.0000000002, + D2: 20, + X1: 0.8, + Y1: 0.000000008, + Rd1: 0.003355409645903541, + Rd2: 19.99980000199998, + Rx1: 0.000195314453125, + }, + { + Name: "RD1_Small_RD2_Med_Flag_1", + P: &blas.DrotmParams{ + Flag: blas.Rescaling, + H: [4]float64{0.0012207031250000002, -1, 0.000244140625, 1e-09}, + }, + D1: 0.02, + D2: 0.000000000004, + X1: 0.008, + Y1: 8000000, + Rd1: 6.710886366445568e-05, + Rd2: 0.019999999900000003, + Rx1: 1953.125009765625, + }, + { + // Values consistent with the low precision output posted at the OpenBLAS issue. + // See https://github.com/xianyi/OpenBLAS/issues/1452. + Name: "OpenBLAS#1452", + P: &blas.DrotmParams{ + Flag: blas.Rescaling, + H: [4]float64{1.6110934624105326e-06, -0.000244140625, 0.000244140625, 1.6276041666666668e-06}, + }, + D1: 5.9e-8, + D2: 5.960464e-8, + X1: 1, + Y1: 150, + Rd1: 0.9999559282289687, + Rd2: 0.9898121986058326, + Rx1: 0.03662270484346241, + }, + { + Name: "netlib/BLAS/TESTING#1", + P: &blas.DrotmParams{ + Flag: blas.OffDiagonal, + H: [4]float64{0, -0.16666666666666669, 0.5, 0}, + }, + D1: 0.10000000000000001, + D2: 0.29999999999999999, + X1: 1.2000000000000000, + Y1: 0.20000000000000001, + Rd1: 9.2307692307692313e-2, + Rd2: 0.27692307692307694, + Rx1: 1.2999999999999998, + }, + { + Name: "netlib/BLAS/TESTING#2", + P: &blas.DrotmParams{ + Flag: blas.Diagonal, + H: [4]float64{0.5, 0, 0, 0.14285714285714285}, + }, + D1: 0.69999999999999996, + D2: 0.20000000000000001, + X1: 0.59999999999999998, + Y1: 4.2000000000000002, + Rd1: 0.18666666666666668, + Rd2: 0.65333333333333332, + Rx1: 4.5000000000000000, + }, + { + Name: "netlib/BLAS/TESTING#3", + P: &blas.DrotmParams{ + Flag: blas.Identity, + H: [4]float64{0, 0, 0, 0}, + }, + D1: 0, + D2: 0, + X1: 0, + Y1: 0, + Rd1: 0, + Rd2: 0, + Rx1: 0, + }, + { + Name: "netlib/BLAS/TESTING#4", + P: &blas.DrotmParams{ + Flag: blas.Rescaling, + H: [4]float64{0, 0, 0, 0}, + }, + D1: 4, + D2: -1, + X1: 2, + Y1: 4, + Rd1: 0, + Rd2: 0, + Rx1: 0, + }, + { + Name: "netlib/BLAS/TESTING#5", + P: &blas.DrotmParams{ + Flag: blas.Rescaling, + H: [4]float64{0.244140625e-03, -0.1e-3, 0.8138020833333334, 1}, + }, + D1: 6e-10, + D2: 2e-2, + X1: 100000, + Y1: 10, + Rd1: 7.5497471999999991e-3, + Rd2: 1.4999999999999999e-2, + Rx1: 32.552083333333336, + }, + { + Name: "netlib/BLAS/TESTING#6", + P: &blas.DrotmParams{ + Flag: blas.Rescaling, + H: [4]float64{4096, -999999.99999999988, 2.0479999999999999e-3, 1}, + }, + D1: 40000000000, + D2: 2e-2, + X1: 1.0000000000000001e-5, + Y1: 10, + Rd1: 1589.4571940104167, + Rd2: 1.3333333333333334e-2, + Rx1: 6.1440000000000008e-2, + }, + { + Name: "netlib/BLAS/TESTING#7", + P: &blas.DrotmParams{ + Flag: blas.Rescaling, + H: [4]float64{0.5e-4, -0.2441406250e-3, 1, 2.441406250}, + }, + D1: 2.0000000000000001e-10, + D2: 4.0000000000000001e-2, + X1: 100000, + Y1: 10, + Rd1: 2.6666666666666668e-2, + Rd2: 2.2369621333333334e-3, + Rx1: 15, + }, + { + Name: "netlib/BLAS/TESTING#8", + P: &blas.DrotmParams{ + Flag: blas.Rescaling, + H: [4]float64{500000, -4096, 1, 4.096e-3}, + }, + D1: 20000000000, + D2: 4.0000000000000001e-2, + X1: 1.0000000000000001e-5, + Y1: 10, + Rd1: 2.6666666666666668e-2, + Rd2: 794.72859700520837, + Rx1: 15, + }, + // TODO: Add Small, Small, 0 case + // TODO: Add Small, Small, 1 case +} + +type Drotmger interface { + Drotmg(d1, d2, x1, y1 float64) (p blas.DrotmParams, rd1, rd2, rx1 float64) + Drotmer +} + +func DrotmgTest(t *testing.T, d Drotmger) { + for _, test := range DrotmgTests { + + p, rd1, rd2, rx1 := d.Drotmg(test.D1, test.D2, test.X1, test.Y1) + + if p.Flag != test.P.Flag { + t.Errorf("drotmg flag mismatch %v: expected %v, found %v", test.Name, test.P.Flag, p.Flag) + } + for i, val := range p.H { + if !dTolEqual(test.P.H[i], val) { + t.Errorf("drotmg H mismatch %v: expected %v, found %v", test.Name, test.P.H, p.H) + break + } + } + if !dTolEqual(rd1, test.Rd1) { + t.Errorf("drotmg rd1 mismatch %v: expected %v, found %v", test.Name, test.Rd1, rd1) + } + if !dTolEqual(rd2, test.Rd2) { + t.Errorf("drotmg rd2 mismatch %v: expected %v, found %v", test.Name, test.Rd2, rd2) + } + if !dTolEqual(rx1, test.Rx1) { + t.Errorf("drotmg rx1 mismatch %v: expected %v, found %v", test.Name, test.Rx1, rx1) + } + + // Drotmg routines compute the components of a modified Givens transformation + // matrix H that zeros the y-component of the resulting vector, + // + // [x1; 0] := H[x1 sqrt(d1); y1 sqrt(d2)]. + // + // Drotm performs a modified Givens rotation of points in the plane, + // + // [x1; y1] := H[x1; y1]. + y := []float64{test.Y1} + d.Drotm(1, []float64{test.X1}, 1, y, 1, p) + for i, v := range y { + if rd2 >= 0 { + v *= math.Sqrt(rd2) + } + if !dTolEqual(v, 0) { + t.Errorf("drotm y_%d mismatch %v: expected 0, found %v", i, test.Name, v) + } + } + } +} + +type Droter interface { + Drot(n int, x []float64, incX int, y []float64, incY int, c, s float64) +} + +func DrotTest(t *testing.T, d Droter) { + drot := d.Drot + for _, c := range DoubleTwoVectorCases { + for _, kind := range c.DrotCases { + dCopyTwoTmp(c.X, c.XTmp, c.Y, c.YTmp) + if c.Panic { + f := func() { drot(c.N, c.XTmp, c.Incx, c.YTmp, c.Incy, kind.C, kind.S) } + testpanics(f, c.Name, t) + continue + } + drot(c.N, c.XTmp, c.Incx, c.YTmp, c.Incy, kind.C, kind.S) + if !dSliceTolEqual(c.XTmp, kind.XAns) { + t.Errorf("drot: x mismatch %v: expected %v, found %v", c.Name, kind.XAns, c.XTmp) + } + if !dSliceTolEqual(c.YTmp, kind.YAns) { + t.Errorf("drot: y mismatch %v: expected %v, found %v", c.Name, kind.YAns, c.YTmp) + } + } + } +} + +type Drotmer interface { + Drotm(n int, x []float64, incX int, y []float64, incY int, p blas.DrotmParams) +} + +func DrotmTest(t *testing.T, d Drotmer) { + drotm := d.Drotm + for _, c := range DoubleTwoVectorCases { + for _, kind := range c.DrotmCases { + dCopyTwoTmp(c.X, c.XTmp, c.Y, c.YTmp) + if c.Panic { + f := func() { drotm(c.N, c.XTmp, c.Incx, c.YTmp, c.Incy, kind.P) } + testpanics(f, c.Name+", "+kind.Name, t) + continue + } + drotm(c.N, c.XTmp, c.Incx, c.YTmp, c.Incy, kind.P) + if !dSliceTolEqual(c.XTmp, kind.XAns) { + t.Errorf("drotm: mismatch %v: expected %v, found %v", c.Name, kind.XAns, c.XTmp) + } + if !dSliceTolEqual(c.YTmp, kind.YAns) { + t.Errorf("drotm: mismatch %v: expected %v, found %v", c.Name, kind.YAns, c.YTmp) + } + } + } +} + +type Dscaler interface { + Dscal(n int, alpha float64, x []float64, incX int) +} + +func DscalTest(t *testing.T, blasser Dscaler) { + dscal := blasser.Dscal + for _, c := range DoubleOneVectorCases { + for _, kind := range c.DscalCases { + xTmp := make([]float64, len(c.X)) + copy(xTmp, c.X) + if c.Panic { + f := func() { dscal(c.N, kind.Alpha, xTmp, c.Incx) } + testpanics(f, c.Name, t) + continue + } + dscal(c.N, kind.Alpha, xTmp, c.Incx) + if !dSliceTolEqual(xTmp, kind.Ans) { + t.Errorf("dscal: mismatch %v, %v: expected %v, found %v", c.Name, kind.Name, kind.Ans, xTmp) + } + } + } +} diff --git a/vendor/gonum.org/v1/gonum/blas/testblas/level2bench.go b/vendor/gonum.org/v1/gonum/blas/testblas/level2bench.go new file mode 100644 index 0000000..6d58c12 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/blas/testblas/level2bench.go @@ -0,0 +1,91 @@ +// Copyright ©2014 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package testblas + +import ( + "testing" + + "golang.org/x/exp/rand" + + "gonum.org/v1/gonum/blas" +) + +func DgemvBenchmark(b *testing.B, impl Dgemver, tA blas.Transpose, m, n, incX, incY int) { + var lenX, lenY int + if tA == blas.NoTrans { + lenX = n + lenY = m + } else { + lenX = m + lenY = n + } + xr := make([]float64, lenX) + for i := range xr { + xr[i] = rand.Float64() + } + x := makeIncremented(xr, incX, 0) + yr := make([]float64, lenY) + for i := range yr { + yr[i] = rand.Float64() + } + y := makeIncremented(yr, incY, 0) + a := make([]float64, m*n) + for i := range a { + a[i] = rand.Float64() + } + + b.ResetTimer() + for i := 0; i < b.N; i++ { + impl.Dgemv(tA, m, n, 2, a, n, x, incX, 3, y, incY) + } +} + +func DgerBenchmark(b *testing.B, impl Dgerer, m, n, incX, incY int) { + xr := make([]float64, m) + for i := range xr { + xr[i] = rand.Float64() + } + x := makeIncremented(xr, incX, 0) + yr := make([]float64, n) + for i := range yr { + yr[i] = rand.Float64() + } + y := makeIncremented(yr, incY, 0) + a := make([]float64, m*n) + for i := range a { + a[i] = rand.Float64() + } + + b.ResetTimer() + for i := 0; i < b.N; i++ { + impl.Dger(m, n, 2, x, incX, y, incY, a, n) + } +} + +type Sgerer interface { + Sger(m, n int, alpha float32, x []float32, incX int, y []float32, incY int, a []float32, lda int) +} + +func SgerBenchmark(b *testing.B, blasser Sgerer, m, n, incX, incY int) { + xr := make([]float32, m) + for i := range xr { + xr[i] = rand.Float32() + } + x := makeIncremented32(xr, incX, 0) + yr := make([]float32, n) + for i := range yr { + yr[i] = rand.Float32() + } + y := makeIncremented32(yr, incY, 0) + a := make([]float32, m*n) + for i := range a { + a[i] = rand.Float32() + } + + b.ResetTimer() + for i := 0; i < b.N; i++ { + blasser.Sger(m, n, 2, x, incX, y, incY, a, n) + } +} diff --git a/vendor/gonum.org/v1/gonum/blas/testblas/zaxpy.go b/vendor/gonum.org/v1/gonum/blas/testblas/zaxpy.go new file mode 100644 index 0000000..20580e7 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/blas/testblas/zaxpy.go @@ -0,0 +1,157 @@ +// Copyright ©2017 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package testblas + +import ( + "fmt" + "testing" +) + +type Zaxpyer interface { + Zaxpy(n int, alpha complex128, x []complex128, incX int, y []complex128, incY int) +} + +func ZaxpyTest(t *testing.T, impl Zaxpyer) { + for tc, test := range []struct { + alpha complex128 + x, y []complex128 + + want []complex128 // Result when both increments have the same sign. + wantRev []complex128 // Result when the increments have opposite sign. + }{ + { + alpha: 0, + x: []complex128{1 + 2i, 3 + 4i, 5 + 6i, 7 + 8i, 9 + 10i, 11 + 12i, 13 + 14i, 15 + 16i, 17 + 18i, 19 + 20i, 21 + 22i, 23 + 24i}, + y: []complex128{30 + 31i, 33 + 34i, 36 + 37i, 39 + 40i, 42 + 43i, 45 + 46i, 48 + 49i, 51 + 52i, 54 + 55i, 57 + 58i, 60 + 61i, 63 + 64i}, + want: []complex128{30 + 31i, 33 + 34i, 36 + 37i, 39 + 40i, 42 + 43i, 45 + 46i, 48 + 49i, 51 + 52i, 54 + 55i, 57 + 58i, 60 + 61i, 63 + 64i}, + wantRev: []complex128{30 + 31i, 33 + 34i, 36 + 37i, 39 + 40i, 42 + 43i, 45 + 46i, 48 + 49i, 51 + 52i, 54 + 55i, 57 + 58i, 60 + 61i, 63 + 64i}, + }, + { + alpha: 1, + x: []complex128{1 + 2i, 3 + 4i, 5 + 6i, 7 + 8i, 9 + 10i, 11 + 12i, 13 + 14i, 15 + 16i, 17 + 18i, 19 + 20i, 21 + 22i, 23 + 24i}, + y: []complex128{30 + 31i, 33 + 34i, 36 + 37i, 39 + 40i, 42 + 43i, 45 + 46i, 48 + 49i, 51 + 52i, 54 + 55i, 57 + 58i, 60 + 61i, 63 + 64i}, + want: []complex128{31 + 33i, 36 + 38i, 41 + 43i, 46 + 48i, 51 + 53i, 56 + 58i, 61 + 63i, 66 + 68i, 71 + 73i, 76 + 78i, 81 + 83i, 86 + 88i}, + wantRev: []complex128{53 + 55i, 54 + 56i, 55 + 57i, 56 + 58i, 57 + 59i, 58 + 60i, 59 + 61i, 60 + 62i, 61 + 63i, 62 + 64i, 63 + 65i, 64 + 66i}, + }, + { + alpha: 3 + 7i, + x: []complex128{1 + 2i}, + y: []complex128{30 + 31i}, + want: []complex128{19 + 44i}, + wantRev: []complex128{19 + 44i}, + }, + { + alpha: 3 + 7i, + x: []complex128{1 + 2i, 3 + 4i}, + y: []complex128{30 + 31i, 33 + 34i}, + want: []complex128{19 + 44i, 14 + 67i}, + wantRev: []complex128{11 + 64i, 22 + 47i}, + }, + { + alpha: 3 + 7i, + x: []complex128{1 + 2i, 3 + 4i, 5 + 6i}, + y: []complex128{30 + 31i, 33 + 34i, 36 + 37i}, + want: []complex128{19 + 44i, 14 + 67i, 9 + 90i}, + wantRev: []complex128{3 + 84i, 14 + 67i, 25 + 50i}, + }, + { + alpha: 3 + 7i, + x: []complex128{1 + 2i, 3 + 4i, 5 + 6i, 7 + 8i}, + y: []complex128{30 + 31i, 33 + 34i, 36 + 37i, 39 + 40i}, + want: []complex128{19 + 44i, 14 + 67i, 9 + 90i, 4 + 113i}, + wantRev: []complex128{-5 + 104i, 6 + 87i, 17 + 70i, 28 + 53i}, + }, + { + alpha: 3 + 7i, + x: []complex128{1 + 2i, 3 + 4i, 5 + 6i, 7 + 8i, 9 + 10i}, + y: []complex128{30 + 31i, 33 + 34i, 36 + 37i, 39 + 40i, 42 + 43i}, + want: []complex128{19 + 44i, 14 + 67i, 9 + 90i, 4 + 113i, -1 + 136i}, + wantRev: []complex128{-13 + 124i, -2 + 107i, 9 + 90i, 20 + 73i, 31 + 56i}, + }, + { + alpha: 3 + 7i, + x: []complex128{1 + 2i, 3 + 4i, 5 + 6i, 7 + 8i, 9 + 10i, 11 + 12i}, + y: []complex128{30 + 31i, 33 + 34i, 36 + 37i, 39 + 40i, 42 + 43i, 45 + 46i}, + want: []complex128{19 + 44i, 14 + 67i, 9 + 90i, 4 + 113i, -1 + 136i, -6 + 159i}, + wantRev: []complex128{-21 + 144i, -10 + 127i, 1 + 110i, 12 + 93i, 23 + 76i, 34 + 59i}, + }, + { + alpha: 3 + 7i, + x: []complex128{1 + 2i, 3 + 4i, 5 + 6i, 7 + 8i, 9 + 10i, 11 + 12i, 13 + 14i}, + y: []complex128{30 + 31i, 33 + 34i, 36 + 37i, 39 + 40i, 42 + 43i, 45 + 46i, 48 + 49i}, + want: []complex128{19 + 44i, 14 + 67i, 9 + 90i, 4 + 113i, -1 + 136i, -6 + 159i, -11 + 182i}, + wantRev: []complex128{-29 + 164i, -18 + 147i, -7 + 130i, 4 + 113i, 15 + 96i, 26 + 79i, 37 + 62i}, + }, + { + alpha: 3 + 7i, + x: []complex128{1 + 2i, 3 + 4i, 5 + 6i, 7 + 8i, 9 + 10i, 11 + 12i, 13 + 14i, 15 + 16i}, + y: []complex128{30 + 31i, 33 + 34i, 36 + 37i, 39 + 40i, 42 + 43i, 45 + 46i, 48 + 49i, 51 + 52i}, + want: []complex128{19 + 44i, 14 + 67i, 9 + 90i, 4 + 113i, -1 + 136i, -6 + 159i, -11 + 182i, -16 + 205i}, + wantRev: []complex128{-37 + 184i, -26 + 167i, -15 + 150i, -4 + 133i, 7 + 116i, 18 + 99i, 29 + 82i, 40 + 65i}, + }, + { + alpha: 3 + 7i, + x: []complex128{1 + 2i, 3 + 4i, 5 + 6i, 7 + 8i, 9 + 10i, 11 + 12i, 13 + 14i, 15 + 16i, 17 + 18i}, + y: []complex128{30 + 31i, 33 + 34i, 36 + 37i, 39 + 40i, 42 + 43i, 45 + 46i, 48 + 49i, 51 + 52i, 54 + 55i}, + want: []complex128{19 + 44i, 14 + 67i, 9 + 90i, 4 + 113i, -1 + 136i, -6 + 159i, -11 + 182i, -16 + 205i, -21 + 228i}, + wantRev: []complex128{-45 + 204i, -34 + 187i, -23 + 170i, -12 + 153i, -1 + 136i, 10 + 119i, 21 + 102i, 32 + 85i, 43 + 68i}, + }, + { + alpha: 3 + 7i, + x: []complex128{1 + 2i, 3 + 4i, 5 + 6i, 7 + 8i, 9 + 10i, 11 + 12i, 13 + 14i, 15 + 16i, 17 + 18i, 19 + 20i}, + y: []complex128{30 + 31i, 33 + 34i, 36 + 37i, 39 + 40i, 42 + 43i, 45 + 46i, 48 + 49i, 51 + 52i, 54 + 55i, 57 + 58i}, + want: []complex128{19 + 44i, 14 + 67i, 9 + 90i, 4 + 113i, -1 + 136i, -6 + 159i, -11 + 182i, -16 + 205i, -21 + 228i, -26 + 251i}, + wantRev: []complex128{-53 + 224i, -42 + 207i, -31 + 190i, -20 + 173i, -9 + 156i, 2 + 139i, 13 + 122i, 24 + 105i, 35 + 88i, 46 + 71i}, + }, + { + alpha: 3 + 7i, + x: []complex128{1 + 2i, 3 + 4i, 5 + 6i, 7 + 8i, 9 + 10i, 11 + 12i, 13 + 14i, 15 + 16i, 17 + 18i, 19 + 20i, 21 + 22i}, + y: []complex128{30 + 31i, 33 + 34i, 36 + 37i, 39 + 40i, 42 + 43i, 45 + 46i, 48 + 49i, 51 + 52i, 54 + 55i, 57 + 58i, 60 + 61i}, + want: []complex128{19 + 44i, 14 + 67i, 9 + 90i, 4 + 113i, -1 + 136i, -6 + 159i, -11 + 182i, -16 + 205i, -21 + 228i, -26 + 251i, -31 + 274i}, + wantRev: []complex128{-61 + 244i, -50 + 227i, -39 + 210i, -28 + 193i, -17 + 176i, -6 + 159i, 5 + 142i, 16 + 125i, 27 + 108i, 38 + 91i, 49 + 74i}, + }, + { + alpha: 3 + 7i, + x: []complex128{1 + 2i, 3 + 4i, 5 + 6i, 7 + 8i, 9 + 10i, 11 + 12i, 13 + 14i, 15 + 16i, 17 + 18i, 19 + 20i, 21 + 22i, 23 + 24i}, + y: []complex128{30 + 31i, 33 + 34i, 36 + 37i, 39 + 40i, 42 + 43i, 45 + 46i, 48 + 49i, 51 + 52i, 54 + 55i, 57 + 58i, 60 + 61i, 63 + 64i}, + want: []complex128{19 + 44i, 14 + 67i, 9 + 90i, 4 + 113i, -1 + 136i, -6 + 159i, -11 + 182i, -16 + 205i, -21 + 228i, -26 + 251i, -31 + 274i, -36 + 297i}, + wantRev: []complex128{-69 + 264i, -58 + 247i, -47 + 230i, -36 + 213i, -25 + 196i, -14 + 179i, -3 + 162i, 8 + 145i, 19 + 128i, 30 + 111i, 41 + 94i, 52 + 77i}, + }, + } { + n := len(test.x) + if len(test.y) != n || len(test.want) != n || len(test.wantRev) != n { + panic("bad test") + } + for _, inc := range allPairs([]int{-7, -3, 1, 13}, []int{-11, -5, 1, 17}) { + incX := inc[0] + incY := inc[1] + + x := makeZVector(test.x, incX) + xCopy := make([]complex128, len(x)) + copy(xCopy, x) + + y := makeZVector(test.y, incY) + + var want []complex128 + if incX*incY > 0 { + want = makeZVector(test.want, incY) + } else { + want = makeZVector(test.wantRev, incY) + } + + impl.Zaxpy(n, test.alpha, x, incX, y, incY) + + prefix := fmt.Sprintf("Case %v (incX=%v,incY=%v):", tc, incX, incY) + + if !zsame(x, xCopy) { + t.Errorf("%v: unexpected modification of x", prefix) + } + + if !zsame(y, want) { + t.Errorf("%v: unexpected y:\nwant %v\ngot %v", prefix, want, y) + } + } + } +} diff --git a/vendor/gonum.org/v1/gonum/blas/testblas/zcopy.go b/vendor/gonum.org/v1/gonum/blas/testblas/zcopy.go new file mode 100644 index 0000000..0a58a84 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/blas/testblas/zcopy.go @@ -0,0 +1,74 @@ +// Copyright ©2017 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package testblas + +import ( + "fmt" + "testing" + + "golang.org/x/exp/rand" +) + +type Zcopyer interface { + Zcopy(n int, x []complex128, incX int, y []complex128, incY int) +} + +func ZcopyTest(t *testing.T, impl Zcopyer) { + rnd := rand.New(rand.NewSource(1)) + for n := 0; n <= 20; n++ { + for _, inc := range allPairs([]int{-7, -3, 1, 13}, []int{-11, -5, 1, 17}) { + incX := inc[0] + incY := inc[1] + aincX := abs(incX) + aincY := abs(incY) + + var x []complex128 + if n > 0 { + x = make([]complex128, (n-1)*aincX+1) + } + for i := range x { + x[i] = znan + } + for i := 0; i < n; i++ { + x[i*aincX] = complex(rnd.NormFloat64(), rnd.NormFloat64()) + } + xCopy := make([]complex128, len(x)) + copy(xCopy, x) + + var y []complex128 + if n > 0 { + y = make([]complex128, (n-1)*aincY+1) + } + for i := range y { + y[i] = znan + } + + want := make([]complex128, len(y)) + for i := range want { + want[i] = znan + } + if incX*incY > 0 { + for i := 0; i < n; i++ { + want[i*aincY] = x[i*aincX] + } + } else { + for i := 0; i < n; i++ { + want[i*aincY] = x[(n-1-i)*aincX] + } + } + + impl.Zcopy(n, x, incX, y, incY) + + prefix := fmt.Sprintf("Case n=%v,incX=%v,incY=%v:", n, incX, incY) + + if !zsame(x, xCopy) { + t.Errorf("%v: unexpected modification of x", prefix) + } + if !zsame(y, want) { + t.Errorf("%v: unexpected y:\nwant %v\ngot %v", prefix, want, y) + } + } + } +} diff --git a/vendor/gonum.org/v1/gonum/blas/testblas/zdotc.go b/vendor/gonum.org/v1/gonum/blas/testblas/zdotc.go new file mode 100644 index 0000000..fe26f5d --- /dev/null +++ b/vendor/gonum.org/v1/gonum/blas/testblas/zdotc.go @@ -0,0 +1,139 @@ +// Copyright ©2017 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package testblas + +import ( + "fmt" + "testing" +) + +type Zdotcer interface { + Zdotc(n int, x []complex128, incX int, y []complex128, incY int) complex128 +} + +func ZdotcTest(t *testing.T, impl Zdotcer) { + for tc, test := range []struct { + x, y []complex128 + + want complex128 // Result when both increments have the same sign. + wantRev complex128 // Result when the increments have opposite sign. + }{ + { + x: nil, + y: nil, + want: 0, + wantRev: 0, + }, + { + x: []complex128{1 + 2i}, + y: []complex128{30 + 31i}, + want: 92 - 29i, + wantRev: 92 - 29i, + }, + { + x: []complex128{1 + 2i, 3 + 4i}, + y: []complex128{30 + 31i, 33 + 34i}, + want: 327 - 59i, + wantRev: 315 - 59i, + }, + { + x: []complex128{1 + 2i, 3 + 4i, 5 + 6i}, + y: []complex128{30 + 31i, 33 + 34i, 36 + 37i}, + want: 729 - 90i, + wantRev: 681 - 90i, + }, + { + x: []complex128{1 + 2i, 3 + 4i, 5 + 6i, 7 + 8i}, + y: []complex128{30 + 31i, 33 + 34i, 36 + 37i, 39 + 40i}, + want: 1322 - 122i, + wantRev: 1202 - 122i, + }, + { + x: []complex128{1 + 2i, 3 + 4i, 5 + 6i, 7 + 8i, 9 + 10i}, + y: []complex128{30 + 31i, 33 + 34i, 36 + 37i, 39 + 40i, 42 + 43i}, + want: 2130 - 155i, + wantRev: 1890 - 155i, + }, + { + x: []complex128{1 + 2i, 3 + 4i, 5 + 6i, 7 + 8i, 9 + 10i, 11 + 12i}, + y: []complex128{30 + 31i, 33 + 34i, 36 + 37i, 39 + 40i, 42 + 43i, 45 + 46i}, + want: 3177 - 189i, + wantRev: 2757 - 189i, + }, + { + x: []complex128{1 + 2i, 3 + 4i, 5 + 6i, 7 + 8i, 9 + 10i, 11 + 12i, 13 + 14i}, + y: []complex128{30 + 31i, 33 + 34i, 36 + 37i, 39 + 40i, 42 + 43i, 45 + 46i, 48 + 49i}, + want: 4487 - 224i, + wantRev: 3815 - 224i, + }, + { + x: []complex128{1 + 2i, 3 + 4i, 5 + 6i, 7 + 8i, 9 + 10i, 11 + 12i, 13 + 14i, 15 + 16i}, + y: []complex128{30 + 31i, 33 + 34i, 36 + 37i, 39 + 40i, 42 + 43i, 45 + 46i, 48 + 49i, 51 + 52i}, + want: 6084 - 260i, + wantRev: 5076 - 260i, + }, + { + x: []complex128{1 + 2i, 3 + 4i, 5 + 6i, 7 + 8i, 9 + 10i, 11 + 12i, 13 + 14i, 15 + 16i, 17 + 18i}, + y: []complex128{30 + 31i, 33 + 34i, 36 + 37i, 39 + 40i, 42 + 43i, 45 + 46i, 48 + 49i, 51 + 52i, 54 + 55i}, + want: 7992 - 297i, + wantRev: 6552 - 297i, + }, + { + x: []complex128{1 + 2i, 3 + 4i, 5 + 6i, 7 + 8i, 9 + 10i, 11 + 12i, 13 + 14i, 15 + 16i, 17 + 18i, 19 + 20i}, + y: []complex128{30 + 31i, 33 + 34i, 36 + 37i, 39 + 40i, 42 + 43i, 45 + 46i, 48 + 49i, 51 + 52i, 54 + 55i, 57 + 58i}, + want: 10235 - 335i, + wantRev: 8255 - 335i, + }, + { + x: []complex128{1 + 2i, 3 + 4i, 5 + 6i, 7 + 8i, 9 + 10i, 11 + 12i, 13 + 14i, 15 + 16i, 17 + 18i, 19 + 20i, 21 + 22i}, + y: []complex128{30 + 31i, 33 + 34i, 36 + 37i, 39 + 40i, 42 + 43i, 45 + 46i, 48 + 49i, 51 + 52i, 54 + 55i, 57 + 58i, 60 + 61i}, + want: 12837 - 374i, + wantRev: 10197 - 374i, + }, + { + x: []complex128{1 + 2i, 3 + 4i, 5 + 6i, 7 + 8i, 9 + 10i, 11 + 12i, 13 + 14i, 15 + 16i, 17 + 18i, 19 + 20i, 21 + 22i, 23 + 24i}, + y: []complex128{30 + 31i, 33 + 34i, 36 + 37i, 39 + 40i, 42 + 43i, 45 + 46i, 48 + 49i, 51 + 52i, 54 + 55i, 57 + 58i, 60 + 61i, 63 + 64i}, + want: 15822 - 414i, + wantRev: 12390 - 414i, + }, + } { + n := len(test.x) + if len(test.y) != n { + panic("bad test") + } + for _, inc := range allPairs([]int{-7, -3, 1, 13}, []int{-11, -5, 1, 17}) { + incX := inc[0] + incY := inc[1] + + x := makeZVector(test.x, incX) + xCopy := make([]complex128, len(x)) + copy(xCopy, x) + + y := makeZVector(test.y, incY) + yCopy := make([]complex128, len(y)) + copy(yCopy, y) + + want := test.want + if incX*incY < 0 { + want = test.wantRev + } + + got := impl.Zdotc(n, x, incX, y, incY) + + prefix := fmt.Sprintf("Case %v (incX=%v,incY=%v):", tc, incX, incY) + + if !zsame(x, xCopy) { + t.Errorf("%v: unexpected modification of x", prefix) + } + if !zsame(y, yCopy) { + t.Errorf("%v: unexpected modification of y", prefix) + } + + if got != want { + t.Errorf("%v: unexpected result. want %v, got %v", prefix, want, got) + } + } + } +} diff --git a/vendor/gonum.org/v1/gonum/blas/testblas/zdotu.go b/vendor/gonum.org/v1/gonum/blas/testblas/zdotu.go new file mode 100644 index 0000000..c10b7f7 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/blas/testblas/zdotu.go @@ -0,0 +1,139 @@ +// Copyright ©2017 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package testblas + +import ( + "fmt" + "testing" +) + +type Zdotuer interface { + Zdotu(n int, x []complex128, incX int, y []complex128, incY int) complex128 +} + +func ZdotuTest(t *testing.T, impl Zdotuer) { + for tc, test := range []struct { + x, y []complex128 + + want complex128 // Result when both increments have the same sign. + wantRev complex128 // Result when the increments have opposite sign. + }{ + { + x: nil, + y: nil, + want: 0, + wantRev: 0, + }, + { + x: []complex128{1 + 2i}, + y: []complex128{30 + 31i}, + want: -32 + 91i, + wantRev: -32 + 91i, + }, + { + x: []complex128{1 + 2i, 3 + 4i}, + y: []complex128{30 + 31i, 33 + 34i}, + want: -69 + 325i, + wantRev: -69 + 313i, + }, + { + x: []complex128{1 + 2i, 3 + 4i, 5 + 6i}, + y: []complex128{30 + 31i, 33 + 34i, 36 + 37i}, + want: -111 + 726i, + wantRev: -111 + 678i, + }, + { + x: []complex128{1 + 2i, 3 + 4i, 5 + 6i, 7 + 8i}, + y: []complex128{30 + 31i, 33 + 34i, 36 + 37i, 39 + 40i}, + want: -158 + 1318i, + wantRev: -158 + 1198i, + }, + { + x: []complex128{1 + 2i, 3 + 4i, 5 + 6i, 7 + 8i, 9 + 10i}, + y: []complex128{30 + 31i, 33 + 34i, 36 + 37i, 39 + 40i, 42 + 43i}, + want: -210 + 2125i, + wantRev: -210 + 1885i, + }, + { + x: []complex128{1 + 2i, 3 + 4i, 5 + 6i, 7 + 8i, 9 + 10i, 11 + 12i}, + y: []complex128{30 + 31i, 33 + 34i, 36 + 37i, 39 + 40i, 42 + 43i, 45 + 46i}, + want: -267 + 3171i, + wantRev: -267 + 2751i, + }, + { + x: []complex128{1 + 2i, 3 + 4i, 5 + 6i, 7 + 8i, 9 + 10i, 11 + 12i, 13 + 14i}, + y: []complex128{30 + 31i, 33 + 34i, 36 + 37i, 39 + 40i, 42 + 43i, 45 + 46i, 48 + 49i}, + want: -329 + 4480i, + wantRev: -329 + 3808i, + }, + { + x: []complex128{1 + 2i, 3 + 4i, 5 + 6i, 7 + 8i, 9 + 10i, 11 + 12i, 13 + 14i, 15 + 16i}, + y: []complex128{30 + 31i, 33 + 34i, 36 + 37i, 39 + 40i, 42 + 43i, 45 + 46i, 48 + 49i, 51 + 52i}, + want: -396 + 6076i, + wantRev: -396 + 5068i, + }, + { + x: []complex128{1 + 2i, 3 + 4i, 5 + 6i, 7 + 8i, 9 + 10i, 11 + 12i, 13 + 14i, 15 + 16i, 17 + 18i}, + y: []complex128{30 + 31i, 33 + 34i, 36 + 37i, 39 + 40i, 42 + 43i, 45 + 46i, 48 + 49i, 51 + 52i, 54 + 55i}, + want: -468 + 7983i, + wantRev: -468 + 6543i, + }, + { + x: []complex128{1 + 2i, 3 + 4i, 5 + 6i, 7 + 8i, 9 + 10i, 11 + 12i, 13 + 14i, 15 + 16i, 17 + 18i, 19 + 20i}, + y: []complex128{30 + 31i, 33 + 34i, 36 + 37i, 39 + 40i, 42 + 43i, 45 + 46i, 48 + 49i, 51 + 52i, 54 + 55i, 57 + 58i}, + want: -545 + 10225i, + wantRev: -545 + 8245i, + }, + { + x: []complex128{1 + 2i, 3 + 4i, 5 + 6i, 7 + 8i, 9 + 10i, 11 + 12i, 13 + 14i, 15 + 16i, 17 + 18i, 19 + 20i, 21 + 22i}, + y: []complex128{30 + 31i, 33 + 34i, 36 + 37i, 39 + 40i, 42 + 43i, 45 + 46i, 48 + 49i, 51 + 52i, 54 + 55i, 57 + 58i, 60 + 61i}, + want: -627 + 12826i, + wantRev: -627 + 10186i, + }, + { + x: []complex128{1 + 2i, 3 + 4i, 5 + 6i, 7 + 8i, 9 + 10i, 11 + 12i, 13 + 14i, 15 + 16i, 17 + 18i, 19 + 20i, 21 + 22i, 23 + 24i}, + y: []complex128{30 + 31i, 33 + 34i, 36 + 37i, 39 + 40i, 42 + 43i, 45 + 46i, 48 + 49i, 51 + 52i, 54 + 55i, 57 + 58i, 60 + 61i, 63 + 64i}, + want: -714 + 15810i, + wantRev: -714 + 12378i, + }, + } { + n := len(test.x) + if len(test.y) != n { + panic("bad test") + } + for _, inc := range allPairs([]int{-7, -3, 1, 13}, []int{-11, -5, 1, 17}) { + incX := inc[0] + incY := inc[1] + + x := makeZVector(test.x, incX) + xCopy := make([]complex128, len(x)) + copy(xCopy, x) + + y := makeZVector(test.y, incY) + yCopy := make([]complex128, len(y)) + copy(yCopy, y) + + want := test.want + if incX*incY < 0 { + want = test.wantRev + } + + got := impl.Zdotu(n, x, incX, y, incY) + + prefix := fmt.Sprintf("Case %v (incX=%v,incY=%v):", tc, incX, incY) + + if !zsame(x, xCopy) { + t.Errorf("%v: unexpected modification of x", prefix) + } + if !zsame(y, yCopy) { + t.Errorf("%v: unexpected modification of y", prefix) + } + + if got != want { + t.Errorf("%v: unexpected result. want %v, got %v", prefix, want, got) + } + } + } +} diff --git a/vendor/gonum.org/v1/gonum/blas/testblas/zdscal.go b/vendor/gonum.org/v1/gonum/blas/testblas/zdscal.go new file mode 100644 index 0000000..c433d10 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/blas/testblas/zdscal.go @@ -0,0 +1,119 @@ +// Copyright ©2017 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package testblas + +import ( + "fmt" + "testing" +) + +type Zdscaler interface { + Zdscal(n int, alpha float64, x []complex128, incX int) +} + +func ZdscalTest(t *testing.T, impl Zdscaler) { + for tc, test := range []struct { + alpha float64 + x []complex128 + want []complex128 + }{ + { + alpha: 3, + x: nil, + want: nil, + }, + { + alpha: 3, + x: []complex128{1 + 2i}, + want: []complex128{3 + 6i}, + }, + { + alpha: 3, + x: []complex128{1 + 2i, 3 + 4i}, + want: []complex128{3 + 6i, 9 + 12i}, + }, + { + alpha: 3, + x: []complex128{1 + 2i, 3 + 4i, 5 + 6i}, + want: []complex128{3 + 6i, 9 + 12i, 15 + 18i}, + }, + { + alpha: 3, + x: []complex128{1 + 2i, 3 + 4i, 5 + 6i, 7 + 8i}, + want: []complex128{3 + 6i, 9 + 12i, 15 + 18i, 21 + 24i}, + }, + { + alpha: 3, + x: []complex128{1 + 2i, 3 + 4i, 5 + 6i, 7 + 8i, 9 + 10i}, + want: []complex128{3 + 6i, 9 + 12i, 15 + 18i, 21 + 24i, 27 + 30i}, + }, + { + alpha: 3, + x: []complex128{1 + 2i, 3 + 4i, 5 + 6i, 7 + 8i, 9 + 10i, 11 + 12i}, + want: []complex128{3 + 6i, 9 + 12i, 15 + 18i, 21 + 24i, 27 + 30i, 33 + 36i}, + }, + { + alpha: 3, + x: []complex128{1 + 2i, 3 + 4i, 5 + 6i, 7 + 8i, 9 + 10i, 11 + 12i, 13 + 14i}, + want: []complex128{3 + 6i, 9 + 12i, 15 + 18i, 21 + 24i, 27 + 30i, 33 + 36i, 39 + 42i}, + }, + { + alpha: 3, + x: []complex128{1 + 2i, 3 + 4i, 5 + 6i, 7 + 8i, 9 + 10i, 11 + 12i, 13 + 14i, 15 + 16i}, + want: []complex128{3 + 6i, 9 + 12i, 15 + 18i, 21 + 24i, 27 + 30i, 33 + 36i, 39 + 42i, 45 + 48i}, + }, + { + alpha: 3, + x: []complex128{1 + 2i, 3 + 4i, 5 + 6i, 7 + 8i, 9 + 10i, 11 + 12i, 13 + 14i, 15 + 16i, 17 + 18i}, + want: []complex128{3 + 6i, 9 + 12i, 15 + 18i, 21 + 24i, 27 + 30i, 33 + 36i, 39 + 42i, 45 + 48i, 51 + 54i}, + }, + { + alpha: 3, + x: []complex128{1 + 2i, 3 + 4i, 5 + 6i, 7 + 8i, 9 + 10i, 11 + 12i, 13 + 14i, 15 + 16i, 17 + 18i, 19 + 20i}, + want: []complex128{3 + 6i, 9 + 12i, 15 + 18i, 21 + 24i, 27 + 30i, 33 + 36i, 39 + 42i, 45 + 48i, 51 + 54i, 57 + 60i}, + }, + { + alpha: 3, + x: []complex128{1 + 2i, 3 + 4i, 5 + 6i, 7 + 8i, 9 + 10i, 11 + 12i, 13 + 14i, 15 + 16i, 17 + 18i, 19 + 20i, 21 + 22i}, + want: []complex128{3 + 6i, 9 + 12i, 15 + 18i, 21 + 24i, 27 + 30i, 33 + 36i, 39 + 42i, 45 + 48i, 51 + 54i, 57 + 60i, 63 + 66i}, + }, + { + alpha: 3, + x: []complex128{1 + 2i, 3 + 4i, 5 + 6i, 7 + 8i, 9 + 10i, 11 + 12i, 13 + 14i, 15 + 16i, 17 + 18i, 19 + 20i, 21 + 22i, 23 + 24i}, + want: []complex128{3 + 6i, 9 + 12i, 15 + 18i, 21 + 24i, 27 + 30i, 33 + 36i, 39 + 42i, 45 + 48i, 51 + 54i, 57 + 60i, 63 + 66i, 69 + 72i}, + }, + { + alpha: 0, + x: []complex128{1 + 2i, 3 + 4i, 5 + 6i, 7 + 8i, 9 + 10i, 11 + 12i, 13 + 14i, 15 + 16i, 17 + 18i, 19 + 20i, 21 + 22i, 23 + 24i}, + want: []complex128{0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0}, + }, + } { + n := len(test.x) + if len(test.want) != n { + panic("bad test") + } + for _, incX := range []int{-3, -1, 1, 2, 4, 7, 10} { + x := makeZVector(test.x, incX) + xCopy := make([]complex128, len(x)) + copy(xCopy, x) + + want := makeZVector(test.want, incX) + + impl.Zdscal(n, test.alpha, x, incX) + + prefix := fmt.Sprintf("Case %v (n=%v,incX=%v):", tc, n, incX) + + if incX < 0 { + if !zsame(x, xCopy) { + t.Errorf("%v: unexpected modification of x\nwant %v\ngot %v", prefix, want, x) + } + continue + } + if !zsame(x, want) { + t.Errorf("%v: unexpected result:\nwant: %v\ngot: %v", prefix, want, x) + } + } + } +} diff --git a/vendor/gonum.org/v1/gonum/blas/testblas/zgbmv.go b/vendor/gonum.org/v1/gonum/blas/testblas/zgbmv.go new file mode 100644 index 0000000..93f6c6c --- /dev/null +++ b/vendor/gonum.org/v1/gonum/blas/testblas/zgbmv.go @@ -0,0 +1,147 @@ +// Copyright ©2018 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package testblas + +import ( + "fmt" + "testing" + + "golang.org/x/exp/rand" + "gonum.org/v1/gonum/blas" +) + +type Zgbmver interface { + Zgbmv(trans blas.Transpose, m, n, kL, kU int, alpha complex128, ab []complex128, ldab int, x []complex128, incX int, beta complex128, y []complex128, incY int) + + Zgemver +} + +func ZgbmvTest(t *testing.T, impl Zgbmver) { + rnd := rand.New(rand.NewSource(1)) + for _, trans := range []blas.Transpose{blas.NoTrans, blas.Trans, blas.ConjTrans} { + // Generate all possible size combinations. + for _, mn := range allPairs([]int{1, 2, 3, 5}, []int{1, 2, 3, 5}) { + m := mn[0] + n := mn[1] + // Generate all possible numbers of lower and upper + // diagonals. Use slices to reduce indentation. + kLs := make([]int, max(1, m)) + for i := range kLs { + kLs[i] = i + } + kUs := make([]int, max(1, n)) + for i := range kUs { + kUs[i] = i + } + for _, ks := range allPairs(kLs, kUs) { + kL := ks[0] + kU := ks[1] + for _, ab := range []struct { + alpha complex128 + beta complex128 + }{ + // All potentially relevant values of + // alpha and beta. + {0, 0}, + {0, 1}, + {0, complex(rnd.NormFloat64(), rnd.NormFloat64())}, + {complex(rnd.NormFloat64(), rnd.NormFloat64()), 0}, + {complex(rnd.NormFloat64(), rnd.NormFloat64()), 1}, + {complex(rnd.NormFloat64(), rnd.NormFloat64()), complex(rnd.NormFloat64(), rnd.NormFloat64())}, + } { + for _, ldab := range []int{kL + kU + 1, kL + kU + 20} { + for _, inc := range allPairs([]int{-3, -2, -1, 1, 2, 3}, []int{-3, -2, -1, 1, 2, 3}) { + incX := inc[0] + incY := inc[1] + testZgbmv(t, impl, rnd, trans, m, n, kL, kU, ab.alpha, ab.beta, ldab, incX, incY) + } + } + } + } + } + } +} + +// testZgbmv tests Zgbmv by comparing its output to that of Zgemv. +func testZgbmv(t *testing.T, impl Zgbmver, rnd *rand.Rand, trans blas.Transpose, m, n, kL, kU int, alpha, beta complex128, ldab, incX, incY int) { + const tol = 1e-13 + + // Allocate a dense-storage band matrix filled with NaNs that will be + // used as the reference matrix for Zgemv. + lda := max(1, n) + a := makeZGeneral(nil, m, n, lda) + // Fill the matrix with zeros. + for i := 0; i < m; i++ { + for j := 0; j < n; j++ { + a[i*lda+j] = 0 + } + } + // Fill the band with random data. + for i := 0; i < m; i++ { + for j := max(0, i-kL); j < min(n, i+kU+1); j++ { + re := rnd.NormFloat64() + im := rnd.NormFloat64() + a[i*lda+j] = complex(re, im) + } + } + // Create the actual band matrix. + ab := zPackBand(kL, kU, ldab, m, n, a, lda) + abCopy := make([]complex128, len(ab)) + copy(abCopy, ab) + + // Compute correct lengths of vectors x and y. + var lenX, lenY int + switch trans { + case blas.NoTrans: + lenX = n + lenY = m + case blas.Trans, blas.ConjTrans: + lenX = m + lenY = n + } + + // Generate a random complex vector x. + xtest := make([]complex128, lenX) + for i := range xtest { + re := rnd.NormFloat64() + im := rnd.NormFloat64() + xtest[i] = complex(re, im) + } + x := makeZVector(xtest, incX) + xCopy := make([]complex128, len(x)) + copy(xCopy, x) + + // Generate a random complex vector y. + ytest := make([]complex128, lenY) + for i := range ytest { + re := rnd.NormFloat64() + im := rnd.NormFloat64() + ytest[i] = complex(re, im) + } + y := makeZVector(ytest, incY) + + want := make([]complex128, len(y)) + copy(want, y) + + // Compute the reference result of alpha*op(A)*x + beta*y, storing it + // into want. + impl.Zgemv(trans, m, n, alpha, a, lda, x, incX, beta, want, incY) + // Compute alpha*op(A)*x + beta*y, storing the result in-place into y. + impl.Zgbmv(trans, m, n, kL, kU, alpha, ab, ldab, x, incX, beta, y, incY) + + name := fmt.Sprintf("trans=%v,m=%v,n=%v,kL=%v,kU=%v,lda=%v,incX=%v,incY=%v", trans, m, n, kL, kU, lda, incX, incY) + if !zsame(ab, abCopy) { + t.Errorf("%v: unexpected modification of ab", name) + } + if !zsame(x, xCopy) { + t.Errorf("%v: unexpected modification of x", name) + } + if !zSameAtNonstrided(y, want, incY) { + t.Errorf("%v: unexpected modification of y", name) + } + if !zEqualApproxAtStrided(y, want, incY, tol) { + t.Errorf("%v: unexpected result\ngot %v\nwant %v\n", name, y, want) + } +} diff --git a/vendor/gonum.org/v1/gonum/blas/testblas/zgemm.go b/vendor/gonum.org/v1/gonum/blas/testblas/zgemm.go new file mode 100644 index 0000000..f119b20 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/blas/testblas/zgemm.go @@ -0,0 +1,104 @@ +// Copyright ©2019 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package testblas + +import ( + "fmt" + "testing" + + "golang.org/x/exp/rand" + "gonum.org/v1/gonum/blas" +) + +type Zgemmer interface { + Zgemm(tA, tB blas.Transpose, m, n, k int, alpha complex128, a []complex128, lda int, b []complex128, ldb int, beta complex128, c []complex128, ldc int) +} + +func ZgemmTest(t *testing.T, impl Zgemmer) { + for _, tA := range []blas.Transpose{blas.NoTrans, blas.Trans, blas.ConjTrans} { + for _, tB := range []blas.Transpose{blas.NoTrans, blas.Trans, blas.ConjTrans} { + name := transString(tA) + "-" + transString(tB) + t.Run(name, func(t *testing.T) { + for _, m := range []int{0, 1, 2, 5, 10} { + for _, n := range []int{0, 1, 2, 5, 10} { + for _, k := range []int{0, 1, 2, 7, 11} { + zgemmTest(t, impl, tA, tB, m, n, k) + } + } + } + }) + } + } +} + +func zgemmTest(t *testing.T, impl Zgemmer, tA, tB blas.Transpose, m, n, k int) { + const tol = 1e-13 + + rnd := rand.New(rand.NewSource(1)) + + rowA, colA := m, k + if tA != blas.NoTrans { + rowA, colA = k, m + } + rowB, colB := k, n + if tB != blas.NoTrans { + rowB, colB = n, k + } + + for _, lda := range []int{max(1, colA), colA + 2} { + for _, ldb := range []int{max(1, colB), colB + 3} { + for _, ldc := range []int{max(1, n), n + 4} { + for _, alpha := range []complex128{0, 1, complex(0.7, -0.9)} { + for _, beta := range []complex128{0, 1, complex(1.3, -1.1)} { + // Allocate the matrix A and fill it with random numbers. + a := make([]complex128, rowA*lda) + for i := range a { + a[i] = rndComplex128(rnd) + } + // Create a copy of A. + aCopy := make([]complex128, len(a)) + copy(aCopy, a) + + // Allocate the matrix B and fill it with random numbers. + b := make([]complex128, rowB*ldb) + for i := range b { + b[i] = rndComplex128(rnd) + } + // Create a copy of B. + bCopy := make([]complex128, len(b)) + copy(bCopy, b) + + // Allocate the matrix C and fill it with random numbers. + c := make([]complex128, m*ldc) + for i := range c { + c[i] = rndComplex128(rnd) + } + + // Compute the expected result using an internal Zgemm implementation. + want := zmm(tA, tB, m, n, k, alpha, a, lda, b, ldb, beta, c, ldc) + + // Compute a result using Zgemm. + impl.Zgemm(tA, tB, m, n, k, alpha, a, lda, b, ldb, beta, c, ldc) + + prefix := fmt.Sprintf("m=%v,n=%v,k=%v,lda=%v,ldb=%v,ldc=%v,alpha=%v,beta=%v", m, n, k, lda, ldb, ldc, alpha, beta) + + if !zsame(a, aCopy) { + t.Errorf("%v: unexpected modification of A", prefix) + continue + } + if !zsame(b, bCopy) { + t.Errorf("%v: unexpected modification of B", prefix) + continue + } + + if !zEqualApprox(c, want, tol) { + t.Errorf("%v: unexpected result,\nwant=%v\ngot =%v\n", prefix, want, c) + } + } + } + } + } + } +} diff --git a/vendor/gonum.org/v1/gonum/blas/testblas/zgemv.go b/vendor/gonum.org/v1/gonum/blas/testblas/zgemv.go new file mode 100644 index 0000000..d472bbc --- /dev/null +++ b/vendor/gonum.org/v1/gonum/blas/testblas/zgemv.go @@ -0,0 +1,344 @@ +// Copyright ©2017 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package testblas + +import ( + "testing" + + "gonum.org/v1/gonum/blas" +) + +type Zgemver interface { + Zgemv(trans blas.Transpose, m, n int, alpha complex128, a []complex128, lda int, x []complex128, incX int, beta complex128, y []complex128, incY int) +} + +func ZgemvTest(t *testing.T, impl Zgemver) { + for tc, test := range []struct { + trans blas.Transpose + alpha complex128 + a []complex128 + x []complex128 + beta complex128 + y []complex128 + + want []complex128 + wantXNeg []complex128 + wantYNeg []complex128 + wantXYNeg []complex128 + }{ + { + trans: blas.NoTrans, + alpha: 1 + 2i, + beta: 3 + 4i, + }, + { + trans: blas.NoTrans, + alpha: 1 + 2i, + a: []complex128{ + 9 + 5i, -2 + 6i, 5 + 1i, 9 + 2i, 10 + 4i, + 0 - 7i, 9 - 9i, 5 + 3i, -8 - 1i, 7 - 7i, + 10 - 7i, -1 + 3i, 2 + 2i, 7 + 6i, 9 + 1i, + 10 + 0i, 8 - 6i, 4 - 6i, -2 - 10i, -5 + 0i, + }, + x: []complex128{ + 4 - 9i, + 8 + 5i, + -2 - 10i, + 2 - 4i, + -6 + 6i, + }, + beta: 3 + 4i, + y: []complex128{ + -2 + 3i, + 10 + 5i, + -8 - 5i, + -8 + 7i, + }, + want: []complex128{ + 101 - 116i, + 58 + 166i, + 126 - 242i, + 336 - 75i, + }, + wantXNeg: []complex128{ + 98 + 128i, + 374 - 252i, + -113 + 205i, + -60 - 312i, + }, + wantYNeg: []complex128{ + 370 - 63i, + 140 - 140i, + 44 + 64i, + 67 - 128i, + }, + wantXYNeg: []complex128{ + -26 - 300i, + -99 + 307i, + 360 - 354i, + 64 + 116i, + }, + }, + { + trans: blas.Trans, + alpha: 1 + 2i, + a: []complex128{ + 9 + 5i, -2 + 6i, 5 + 1i, 9 + 2i, 10 + 4i, + 0 - 7i, 9 - 9i, 5 + 3i, -8 - 1i, 7 - 7i, + 10 - 7i, -1 + 3i, 2 + 2i, 7 + 6i, 9 + 1i, + 10 + 0i, 8 - 6i, 4 - 6i, -2 - 10i, -5 + 0i, + }, + x: []complex128{ + 4 - 9i, + 8 + 5i, + -2 - 10i, + 2 - 4i, + }, + beta: 3 + 4i, + y: []complex128{ + 8 - 6i, + -8 - 2i, + 9 + 5i, + 4 - 1i, + 6 - 4i, + }, + want: []complex128{ + 580 - 137i, + 221 + 311i, + 149 + 115i, + 443 - 208i, + 517 + 143i, + }, + wantXNeg: []complex128{ + 387 + 152i, + 109 - 433i, + 225 - 53i, + -246 + 44i, + 13 + 20i, + }, + wantYNeg: []complex128{ + 531 + 145i, + 411 - 259i, + 149 + 115i, + 253 + 362i, + 566 - 139i, + }, + wantXYNeg: []complex128{ + 27 + 22i, + -278 - 7i, + 225 - 53i, + 141 - 382i, + 373 + 150i, + }, + }, + { + trans: blas.ConjTrans, + alpha: 1 + 2i, + a: []complex128{ + 9 + 5i, -2 + 6i, 5 + 1i, 9 + 2i, 10 + 4i, + 0 - 7i, 9 - 9i, 5 + 3i, -8 - 1i, 7 - 7i, + 10 - 7i, -1 + 3i, 2 + 2i, 7 + 6i, 9 + 1i, + 10 + 0i, 8 - 6i, 4 - 6i, -2 - 10i, -5 + 0i, + }, + x: []complex128{ + 4 - 9i, + 8 + 5i, + -2 - 10i, + 2 - 4i, + }, + beta: 3 + 4i, + y: []complex128{ + 8 - 6i, + -8 - 2i, + 9 + 5i, + 4 - 1i, + 6 - 4i, + }, + want: []complex128{ + 472 - 133i, + -253 + 23i, + 217 + 131i, + 229 - 316i, + 187 - 97i, + }, + wantXNeg: []complex128{ + 289 + 276i, + 499 + 47i, + 237 + 91i, + 54 + 504i, + 251 + 196i, + }, + wantYNeg: []complex128{ + 201 - 95i, + 197 - 367i, + 217 + 131i, + -221 + 74i, + 458 - 135i, + }, + wantXYNeg: []complex128{ + 265 + 198i, + 22 + 453i, + 237 + 91i, + 531 + 98i, + 275 + 274i, + }, + }, + { + trans: blas.ConjTrans, + alpha: 1 + 2i, + a: []complex128{ + 9 + 5i, -2 + 6i, 5 + 1i, 9 + 2i, 10 + 4i, + 0 - 7i, 9 - 9i, 5 + 3i, -8 - 1i, 7 - 7i, + 10 - 7i, -1 + 3i, 2 + 2i, 7 + 6i, 9 + 1i, + 10 + 0i, 8 - 6i, 4 - 6i, -2 - 10i, -5 + 0i, + }, + x: []complex128{ + 4 - 9i, + 8 + 5i, + -2 - 10i, + 2 - 4i, + }, + beta: 0, + y: []complex128{ + 8 - 6i, + -8 - 2i, + 9 + 5i, + 4 - 1i, + 6 - 4i, + }, + want: []complex128{ + 424 - 147i, + -237 + 61i, + 210 + 80i, + 213 - 329i, + 153 - 109i, + }, + wantXNeg: []complex128{ + 241 + 262i, + 515 + 85i, + 230 + 40i, + 38 + 491i, + 217 + 184i, + }, + wantYNeg: []complex128{ + 153 - 109i, + 213 - 329i, + 210 + 80i, + -237 + 61i, + 424 - 147i, + }, + wantXYNeg: []complex128{ + 217 + 184i, + 38 + 491i, + 230 + 40i, + 515 + 85i, + 241 + 262i, + }, + }, + { + trans: blas.ConjTrans, + alpha: 0, + a: []complex128{ + 9 + 5i, -2 + 6i, 5 + 1i, 9 + 2i, 10 + 4i, + 0 - 7i, 9 - 9i, 5 + 3i, -8 - 1i, 7 - 7i, + 10 - 7i, -1 + 3i, 2 + 2i, 7 + 6i, 9 + 1i, + 10 + 0i, 8 - 6i, 4 - 6i, -2 - 10i, -5 + 0i, + }, + x: []complex128{ + 4 - 9i, + 8 + 5i, + -2 - 10i, + 2 - 4i, + }, + beta: 3 + 4i, + y: []complex128{ + 8 - 6i, + -8 - 2i, + 9 + 5i, + 4 - 1i, + 6 - 4i, + }, + want: []complex128{ + 48 + 14i, + -16 - 38i, + 7 + 51i, + 16 + 13i, + 34 + 12i, + }, + wantXNeg: []complex128{ + 48 + 14i, + -16 - 38i, + 7 + 51i, + 16 + 13i, + 34 + 12i, + }, + wantYNeg: []complex128{ + 48 + 14i, + -16 - 38i, + 7 + 51i, + 16 + 13i, + 34 + 12i, + }, + wantXYNeg: []complex128{ + 48 + 14i, + -16 - 38i, + 7 + 51i, + 16 + 13i, + 34 + 12i, + }, + }, + } { + var m, n int + switch test.trans { + case blas.NoTrans: + m = len(test.y) + n = len(test.x) + case blas.Trans, blas.ConjTrans: + m = len(test.x) + n = len(test.y) + } + for _, incX := range []int{-11, -2, -1, 1, 2, 7} { + for _, incY := range []int{-11, -2, -1, 1, 2, 7} { + for _, lda := range []int{max(1, n), n + 11} { + alpha := test.alpha + + a := makeZGeneral(test.a, m, n, lda) + aCopy := make([]complex128, len(a)) + copy(aCopy, a) + + x := makeZVector(test.x, incX) + xCopy := make([]complex128, len(x)) + copy(xCopy, x) + + y := makeZVector(test.y, incY) + + impl.Zgemv(test.trans, m, n, alpha, a, lda, x, incX, test.beta, y, incY) + + if !zsame(x, xCopy) { + t.Errorf("Case %v (incX=%v,incY=%v,lda=%v): unexpected modification of x", tc, incX, incY, lda) + } + if !zsame(a, aCopy) { + t.Errorf("Case %v (incX=%v,incY=%v,lda=%v): unexpected modification of A", tc, incX, incY, lda) + } + + var want []complex128 + switch { + case incX > 0 && incY > 0: + want = makeZVector(test.want, incY) + case incX < 0 && incY > 0: + want = makeZVector(test.wantXNeg, incY) + case incX > 0 && incY < 0: + want = makeZVector(test.wantYNeg, incY) + default: + want = makeZVector(test.wantXYNeg, incY) + } + if !zsame(y, want) { + t.Errorf("Case %v (incX=%v,incY=%v,lda=%v): unexpected result\nwant %v\ngot %v", tc, incX, incY, lda, want, y) + } + } + } + } + } +} diff --git a/vendor/gonum.org/v1/gonum/blas/testblas/zgerc.go b/vendor/gonum.org/v1/gonum/blas/testblas/zgerc.go new file mode 100644 index 0000000..1f61c3a --- /dev/null +++ b/vendor/gonum.org/v1/gonum/blas/testblas/zgerc.go @@ -0,0 +1,183 @@ +// Copyright ©2017 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package testblas + +import ( + "testing" +) + +type Zgercer interface { + Zgerc(m, n int, alpha complex128, x []complex128, incX int, y []complex128, incY int, a []complex128, lda int) +} + +func ZgercTest(t *testing.T, impl Zgercer) { + for tc, test := range []struct { + alpha complex128 + x []complex128 + incX int + y []complex128 + incY int + a []complex128 + + want []complex128 + }{ + { + incX: 1, + incY: 1, + alpha: 1 + 2i, + }, + { + incX: 1, + incY: 1, + alpha: 1 + 2i, + x: []complex128{1 + 13i, 18 + 15i, 10 + 18i}, + y: []complex128{15 + 12i, 4 + 8i, 5 + 16i, 19 + 12i}, + a: []complex128{ + 10 + 9i, 6 + 17i, 3 + 10i, 6 + 7i, + 3 + 4i, 11 + 16i, 5 + 14i, 11 + 18i, + 18 + 6i, 4 + 1i, 13 + 2i, 14 + 3i}, + want: []complex128{ + -185 + 534i, 26 + 277i, 118 + 485i, -289 + 592i, + 435 + 913i, 371 + 316i, 761 + 461i, 395 + 1131i, + 84 + 888i, 204 + 361i, 491 + 608i, -24 + 1037i}, + }, + { + incX: 7, + incY: 13, + alpha: 1 + 2i, + x: []complex128{1 + 13i, 18 + 15i, 10 + 18i}, + y: []complex128{15 + 12i, 4 + 8i, 5 + 16i, 19 + 12i}, + a: []complex128{ + 10 + 9i, 6 + 17i, 3 + 10i, 6 + 7i, + 3 + 4i, 11 + 16i, 5 + 14i, 11 + 18i, + 18 + 6i, 4 + 1i, 13 + 2i, 14 + 3i}, + want: []complex128{ + -185 + 534i, 26 + 277i, 118 + 485i, -289 + 592i, + 435 + 913i, 371 + 316i, 761 + 461i, 395 + 1131i, + 84 + 888i, 204 + 361i, 491 + 608i, -24 + 1037i}, + }, + { + incX: -7, + incY: -13, + alpha: 1 + 2i, + x: []complex128{10 + 18i, 18 + 15i, 1 + 13i}, + y: []complex128{19 + 12i, 5 + 16i, 4 + 8i, 15 + 12i}, + a: []complex128{ + 10 + 9i, 6 + 17i, 3 + 10i, 6 + 7i, + 3 + 4i, 11 + 16i, 5 + 14i, 11 + 18i, + 18 + 6i, 4 + 1i, 13 + 2i, 14 + 3i}, + want: []complex128{ + -185 + 534i, 26 + 277i, 118 + 485i, -289 + 592i, + 435 + 913i, 371 + 316i, 761 + 461i, 395 + 1131i, + 84 + 888i, 204 + 361i, 491 + 608i, -24 + 1037i}, + }, + { + incX: 1, + incY: 1, + alpha: 1 + 2i, + x: []complex128{5 + 16i, 12 + 19i, 9 + 7i, 2 + 4i}, + y: []complex128{18 + 7i, 20 + 15i, 12 + 14i}, + a: []complex128{ + 11 + 4i, 17 + 18i, 7 + 13i, + 14 + 20i, 14 + 10i, 7 + 5i, + 7 + 17i, 10 + 6i, 11 + 13i, + 7 + 6i, 19 + 16i, 8 + 8i, + }, + want: []complex128{ + -293 + 661i, -133 + 943i, 47 + 703i, + -153 + 976i, 139 + 1260i, 297 + 885i, + 92 + 502i, 285 + 581i, 301 + 383i, + -45 + 192i, 19 + 266i, 48 + 188i, + }, + }, + { + incX: 7, + incY: 13, + alpha: 1 + 2i, + x: []complex128{5 + 16i, 12 + 19i, 9 + 7i, 2 + 4i}, + y: []complex128{18 + 7i, 20 + 15i, 12 + 14i}, + a: []complex128{ + 11 + 4i, 17 + 18i, 7 + 13i, + 14 + 20i, 14 + 10i, 7 + 5i, + 7 + 17i, 10 + 6i, 11 + 13i, + 7 + 6i, 19 + 16i, 8 + 8i, + }, + want: []complex128{ + -293 + 661i, -133 + 943i, 47 + 703i, + -153 + 976i, 139 + 1260i, 297 + 885i, + 92 + 502i, 285 + 581i, 301 + 383i, + -45 + 192i, 19 + 266i, 48 + 188i, + }, + }, + { + incX: -7, + incY: -13, + alpha: 1 + 2i, + x: []complex128{2 + 4i, 9 + 7i, 12 + 19i, 5 + 16i}, + y: []complex128{12 + 14i, 20 + 15i, 18 + 7i}, + a: []complex128{ + 11 + 4i, 17 + 18i, 7 + 13i, + 14 + 20i, 14 + 10i, 7 + 5i, + 7 + 17i, 10 + 6i, 11 + 13i, + 7 + 6i, 19 + 16i, 8 + 8i, + }, + want: []complex128{ + -293 + 661i, -133 + 943i, 47 + 703i, + -153 + 976i, 139 + 1260i, 297 + 885i, + 92 + 502i, 285 + 581i, 301 + 383i, + -45 + 192i, 19 + 266i, 48 + 188i, + }, + }, + { + incX: -7, + incY: -13, + alpha: 0, + x: []complex128{2 + 4i, 9 + 7i, 12 + 19i, 5 + 16i}, + y: []complex128{12 + 14i, 20 + 15i, 18 + 7i}, + a: []complex128{ + 11 + 4i, 17 + 18i, 7 + 13i, + 14 + 20i, 14 + 10i, 7 + 5i, + 7 + 17i, 10 + 6i, 11 + 13i, + 7 + 6i, 19 + 16i, 8 + 8i, + }, + want: []complex128{ + 11 + 4i, 17 + 18i, 7 + 13i, + 14 + 20i, 14 + 10i, 7 + 5i, + 7 + 17i, 10 + 6i, 11 + 13i, + 7 + 6i, 19 + 16i, 8 + 8i, + }, + }, + } { + m := len(test.x) + n := len(test.y) + incX := test.incX + incY := test.incY + + for _, lda := range []int{max(1, n), n + 20} { + x := makeZVector(test.x, incX) + xCopy := make([]complex128, len(x)) + copy(xCopy, x) + + y := makeZVector(test.y, incY) + yCopy := make([]complex128, len(y)) + copy(yCopy, y) + + a := makeZGeneral(test.a, m, n, lda) + want := makeZGeneral(test.want, m, n, lda) + + impl.Zgerc(m, n, test.alpha, x, incX, y, incY, a, lda) + + if !zsame(x, xCopy) { + t.Errorf("Case %v: unexpected modification of x", tc) + } + if !zsame(y, yCopy) { + t.Errorf("Case %v: unexpected modification of y", tc) + } + if !zsame(want, a) { + t.Errorf("Case %v: unexpected result\nwant %v\ngot %v", tc, want, a) + } + } + } +} diff --git a/vendor/gonum.org/v1/gonum/blas/testblas/zgeru.go b/vendor/gonum.org/v1/gonum/blas/testblas/zgeru.go new file mode 100644 index 0000000..82dabd5 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/blas/testblas/zgeru.go @@ -0,0 +1,257 @@ +// Copyright ©2017 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package testblas + +import ( + "testing" +) + +type Zgeruer interface { + Zgeru(m, n int, alpha complex128, x []complex128, incX int, y []complex128, incY int, a []complex128, lda int) +} + +func ZgeruTest(t *testing.T, impl Zgeruer) { + for tc, test := range []struct { + alpha complex128 + x []complex128 + incX int + y []complex128 + incY int + a []complex128 + + want []complex128 + }{ + { + incX: 1, + incY: 1, + alpha: 1 + 2i, + }, + { + incX: 1, + incY: 1, + alpha: 1 + 2i, + x: []complex128{1 + 13i, 18 + 15i, 10 + 18i}, + y: []complex128{15 + 12i, 4 + 8i, 5 + 16i, 19 + 12i}, + a: []complex128{ + 4 + 7i, 4 + 7i, 12 + 3i, 9 + 10i, + 3 + 3i, 1 + 2i, 17 + 17i, 9 + 18i, + 14 + 12i, 9 + 16i, 1 + 1i, 9 + 1i, + }, + want: []complex128{ + -551 - 68i, -216 - 133i, -353 - 322i, -646 - 5i, + -789 + 624i, -455 + 110i, -859 + 80i, -831 + 843i, + -832 + 270i, -399 - 40i, -737 - 225i, -941 + 411i, + }, + }, + { + incX: 7, + incY: 13, + alpha: 1 + 2i, + x: []complex128{1 + 13i, 18 + 15i, 10 + 18i}, + y: []complex128{15 + 12i, 4 + 8i, 5 + 16i, 19 + 12i}, + a: []complex128{ + 4 + 7i, 4 + 7i, 12 + 3i, 9 + 10i, + 3 + 3i, 1 + 2i, 17 + 17i, 9 + 18i, + 14 + 12i, 9 + 16i, 1 + 1i, 9 + 1i, + }, + want: []complex128{ + -551 - 68i, -216 - 133i, -353 - 322i, -646 - 5i, + -789 + 624i, -455 + 110i, -859 + 80i, -831 + 843i, + -832 + 270i, -399 - 40i, -737 - 225i, -941 + 411i, + }, + }, + { + incX: 1, + incY: 13, + alpha: 1 + 2i, + x: []complex128{1 + 13i, 18 + 15i, 10 + 18i}, + y: []complex128{15 + 12i, 4 + 8i, 5 + 16i, 19 + 12i}, + a: []complex128{ + 4 + 7i, 4 + 7i, 12 + 3i, 9 + 10i, + 3 + 3i, 1 + 2i, 17 + 17i, 9 + 18i, + 14 + 12i, 9 + 16i, 1 + 1i, 9 + 1i, + }, + want: []complex128{ + -551 - 68i, -216 - 133i, -353 - 322i, -646 - 5i, + -789 + 624i, -455 + 110i, -859 + 80i, -831 + 843i, + -832 + 270i, -399 - 40i, -737 - 225i, -941 + 411i, + }, + }, + { + incX: 1, + incY: -13, + alpha: 1 + 2i, + x: []complex128{1 + 13i, 18 + 15i, 10 + 18i}, + y: []complex128{19 + 12i, 5 + 16i, 4 + 8i, 15 + 12i}, + a: []complex128{ + 4 + 7i, 4 + 7i, 12 + 3i, 9 + 10i, + 3 + 3i, 1 + 2i, 17 + 17i, 9 + 18i, + 14 + 12i, 9 + 16i, 1 + 1i, 9 + 1i, + }, + want: []complex128{ + -551 - 68i, -216 - 133i, -353 - 322i, -646 - 5i, + -789 + 624i, -455 + 110i, -859 + 80i, -831 + 843i, + -832 + 270i, -399 - 40i, -737 - 225i, -941 + 411i, + }, + }, + { + incX: 7, + incY: 1, + alpha: 1 + 2i, + x: []complex128{1 + 13i, 18 + 15i, 10 + 18i}, + y: []complex128{15 + 12i, 4 + 8i, 5 + 16i, 19 + 12i}, + a: []complex128{ + 4 + 7i, 4 + 7i, 12 + 3i, 9 + 10i, + 3 + 3i, 1 + 2i, 17 + 17i, 9 + 18i, + 14 + 12i, 9 + 16i, 1 + 1i, 9 + 1i, + }, + want: []complex128{ + -551 - 68i, -216 - 133i, -353 - 322i, -646 - 5i, + -789 + 624i, -455 + 110i, -859 + 80i, -831 + 843i, + -832 + 270i, -399 - 40i, -737 - 225i, -941 + 411i, + }, + }, + { + incX: -7, + incY: 1, + alpha: 1 + 2i, + x: []complex128{10 + 18i, 18 + 15i, 1 + 13i}, + y: []complex128{15 + 12i, 4 + 8i, 5 + 16i, 19 + 12i}, + a: []complex128{ + 4 + 7i, 4 + 7i, 12 + 3i, 9 + 10i, + 3 + 3i, 1 + 2i, 17 + 17i, 9 + 18i, + 14 + 12i, 9 + 16i, 1 + 1i, 9 + 1i, + }, + want: []complex128{ + -551 - 68i, -216 - 133i, -353 - 322i, -646 - 5i, + -789 + 624i, -455 + 110i, -859 + 80i, -831 + 843i, + -832 + 270i, -399 - 40i, -737 - 225i, -941 + 411i, + }, + }, + { + incX: -7, + incY: -13, + alpha: 1 + 2i, + x: []complex128{10 + 18i, 18 + 15i, 1 + 13i}, + y: []complex128{19 + 12i, 5 + 16i, 4 + 8i, 15 + 12i}, + a: []complex128{ + 4 + 7i, 4 + 7i, 12 + 3i, 9 + 10i, + 3 + 3i, 1 + 2i, 17 + 17i, 9 + 18i, + 14 + 12i, 9 + 16i, 1 + 1i, 9 + 1i, + }, + want: []complex128{ + -551 - 68i, -216 - 133i, -353 - 322i, -646 - 5i, + -789 + 624i, -455 + 110i, -859 + 80i, -831 + 843i, + -832 + 270i, -399 - 40i, -737 - 225i, -941 + 411i, + }, + }, + { + incX: 1, + incY: 1, + alpha: 1 + 2i, + x: []complex128{5 + 16i, 12 + 19i, 9 + 7i, 2 + 4i}, + y: []complex128{18 + 7i, 20 + 15i, 12 + 14i}, + a: []complex128{ + 8 + 17i, 2 + 2i, 8 + 17i, + 1 + 10i, 10 + 15i, 4 + 18i, + 11 + 3i, 15 + 7i, 12 + 15i, + 20 + 10i, 8 + 13i, 19 + 10i, + }, + want: []complex128{ + -660 + 296i, -928 + 117i, -680 - 49i, + -768 + 602i, -1155 + 485i, -910 + 170i, + -254 + 418i, -460 + 432i, -398 + 245i, + -144 + 112i, -232 + 83i, -165 + 22i, + }, + }, + { + incX: 7, + incY: 13, + alpha: 1 + 2i, + x: []complex128{5 + 16i, 12 + 19i, 9 + 7i, 2 + 4i}, + y: []complex128{18 + 7i, 20 + 15i, 12 + 14i}, + a: []complex128{ + 8 + 17i, 2 + 2i, 8 + 17i, + 1 + 10i, 10 + 15i, 4 + 18i, + 11 + 3i, 15 + 7i, 12 + 15i, + 20 + 10i, 8 + 13i, 19 + 10i, + }, + want: []complex128{ + -660 + 296i, -928 + 117i, -680 - 49i, + -768 + 602i, -1155 + 485i, -910 + 170i, + -254 + 418i, -460 + 432i, -398 + 245i, + -144 + 112i, -232 + 83i, -165 + 22i, + }, + }, + { + incX: -7, + incY: -13, + alpha: 1 + 2i, + x: []complex128{2 + 4i, 9 + 7i, 12 + 19i, 5 + 16i}, + y: []complex128{12 + 14i, 20 + 15i, 18 + 7i}, + a: []complex128{ + 8 + 17i, 2 + 2i, 8 + 17i, + 1 + 10i, 10 + 15i, 4 + 18i, + 11 + 3i, 15 + 7i, 12 + 15i, + 20 + 10i, 8 + 13i, 19 + 10i, + }, + want: []complex128{ + -660 + 296i, -928 + 117i, -680 - 49i, + -768 + 602i, -1155 + 485i, -910 + 170i, + -254 + 418i, -460 + 432i, -398 + 245i, + -144 + 112i, -232 + 83i, -165 + 22i, + }, + }, + { + incX: -7, + incY: -13, + alpha: 0, + x: []complex128{5 + 16i, 12 + 19i, 9 + 7i, 2 + 4i}, + y: []complex128{18 + 7i, 20 + 15i, 12 + 14i}, + a: []complex128{ + 8 + 17i, 2 + 2i, 8 + 17i, + 1 + 10i, 10 + 15i, 4 + 18i, + 11 + 3i, 15 + 7i, 12 + 15i, + 20 + 10i, 8 + 13i, 19 + 10i, + }, + want: []complex128{ + 8 + 17i, 2 + 2i, 8 + 17i, + 1 + 10i, 10 + 15i, 4 + 18i, + 11 + 3i, 15 + 7i, 12 + 15i, + 20 + 10i, 8 + 13i, 19 + 10i, + }, + }, + } { + m := len(test.x) + n := len(test.y) + incX := test.incX + incY := test.incY + + for _, lda := range []int{max(1, n), n + 20} { + x := makeZVector(test.x, incX) + xCopy := make([]complex128, len(x)) + copy(xCopy, x) + + y := makeZVector(test.y, incY) + yCopy := make([]complex128, len(y)) + copy(yCopy, y) + + a := makeZGeneral(test.a, m, n, lda) + want := makeZGeneral(test.want, m, n, lda) + + impl.Zgeru(m, n, test.alpha, x, incX, y, incY, a, lda) + + if !zsame(x, xCopy) { + t.Errorf("Case %v: unexpected modification of x", tc) + } + if !zsame(y, yCopy) { + t.Errorf("Case %v: unexpected modification of y", tc) + } + if !zsame(want, a) { + t.Errorf("Case %v: unexpected result\nwant %v\ngot %v", tc, want, a) + } + } + } +} diff --git a/vendor/gonum.org/v1/gonum/blas/testblas/zhbmv.go b/vendor/gonum.org/v1/gonum/blas/testblas/zhbmv.go new file mode 100644 index 0000000..a437217 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/blas/testblas/zhbmv.go @@ -0,0 +1,138 @@ +// Copyright ©2018 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package testblas + +import ( + "fmt" + "math" + "testing" + + "golang.org/x/exp/rand" + "gonum.org/v1/gonum/blas" +) + +type Zhbmver interface { + Zhbmv(uplo blas.Uplo, n, k int, alpha complex128, ab []complex128, ldab int, x []complex128, incX int, beta complex128, y []complex128, incY int) + + Zhemver +} + +func ZhbmvTest(t *testing.T, impl Zhbmver) { + rnd := rand.New(rand.NewSource(1)) + for _, uplo := range []blas.Uplo{blas.Upper, blas.Lower} { + for _, n := range []int{0, 1, 2, 3, 5} { + for k := 0; k < n; k++ { + for _, ldab := range []int{k + 1, k + 1 + 10} { + // Generate all possible combinations of given increments. + // Use slices to reduce indentation. + for _, inc := range allPairs([]int{-11, 1, 7}, []int{-3, 1, 5}) { + incX := inc[0] + incY := inc[1] + for _, ab := range []struct { + alpha complex128 + beta complex128 + }{ + // All potentially relevant values of + // alpha and beta. + {0, 0}, + {0, 1}, + {0, complex(rnd.NormFloat64(), rnd.NormFloat64())}, + {complex(rnd.NormFloat64(), rnd.NormFloat64()), 0}, + {complex(rnd.NormFloat64(), rnd.NormFloat64()), 1}, + {complex(rnd.NormFloat64(), rnd.NormFloat64()), complex(rnd.NormFloat64(), rnd.NormFloat64())}, + } { + testZhbmv(t, impl, rnd, uplo, n, k, ab.alpha, ab.beta, ldab, incX, incY) + } + } + } + } + } + } +} + +// testZhbmv tests Zhbmv by comparing its output to that of Zhemv. +func testZhbmv(t *testing.T, impl Zhbmver, rnd *rand.Rand, uplo blas.Uplo, n, k int, alpha, beta complex128, ldab, incX, incY int) { + const tol = 1e-13 + + // Allocate a dense-storage Hermitian band matrix filled with NaNs that will be + // used as the reference matrix for Zhemv. + lda := max(1, n) + a := makeZGeneral(nil, n, n, lda) + // Fill the matrix with zeros. + for i := 0; i < n; i++ { + for j := 0; j < n; j++ { + a[i*lda+j] = 0 + } + } + // Fill the triangle band with random data, invalidating the imaginary + // part of diagonal elements because it should not be referenced by + // Zhbmv and Zhemv. + if uplo == blas.Upper { + for i := 0; i < n; i++ { + a[i*lda+i] = complex(rnd.NormFloat64(), math.NaN()) + for j := i + 1; j < min(n, i+k+1); j++ { + re := rnd.NormFloat64() + im := rnd.NormFloat64() + a[i*lda+j] = complex(re, im) + } + } + } else { + for i := 0; i < n; i++ { + for j := max(0, i-k); j < i; j++ { + re := rnd.NormFloat64() + im := rnd.NormFloat64() + a[i*lda+j] = complex(re, im) + } + a[i*lda+i] = complex(rnd.NormFloat64(), math.NaN()) + } + } + // Create the actual Hermitian band matrix. + ab := zPackTriBand(k, ldab, uplo, n, a, lda) + abCopy := make([]complex128, len(ab)) + copy(abCopy, ab) + + // Generate a random complex vector x. + xtest := make([]complex128, n) + for i := range xtest { + re := rnd.NormFloat64() + im := rnd.NormFloat64() + xtest[i] = complex(re, im) + } + x := makeZVector(xtest, incX) + xCopy := make([]complex128, len(x)) + copy(xCopy, x) + + // Generate a random complex vector y. + ytest := make([]complex128, n) + for i := range ytest { + re := rnd.NormFloat64() + im := rnd.NormFloat64() + ytest[i] = complex(re, im) + } + y := makeZVector(ytest, incY) + + want := make([]complex128, len(y)) + copy(want, y) + + // Compute the reference result of alpha*op(A)*x + beta*y, storing it + // into want. + impl.Zhemv(uplo, n, alpha, a, lda, x, incX, beta, want, incY) + // Compute alpha*op(A)*x + beta*y, storing the result in-place into y. + impl.Zhbmv(uplo, n, k, alpha, ab, ldab, x, incX, beta, y, incY) + + prefix := fmt.Sprintf("uplo=%v,n=%v,k=%v,incX=%v,incY=%v,ldab=%v", uplo, n, k, incX, incY, ldab) + if !zsame(x, xCopy) { + t.Errorf("%v: unexpected modification of x", prefix) + } + if !zsame(ab, abCopy) { + t.Errorf("%v: unexpected modification of ab", prefix) + } + if !zSameAtNonstrided(y, want, incY) { + t.Errorf("%v: unexpected modification of y", prefix) + } + if !zEqualApproxAtStrided(y, want, incY, tol) { + t.Errorf("%v: unexpected result\nwant %v\ngot %v", prefix, want, y) + } +} diff --git a/vendor/gonum.org/v1/gonum/blas/testblas/zhemm.go b/vendor/gonum.org/v1/gonum/blas/testblas/zhemm.go new file mode 100644 index 0000000..5b3607a --- /dev/null +++ b/vendor/gonum.org/v1/gonum/blas/testblas/zhemm.go @@ -0,0 +1,130 @@ +// Copyright ©2019 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package testblas + +import ( + "fmt" + "math/cmplx" + "testing" + + "golang.org/x/exp/rand" + "gonum.org/v1/gonum/blas" +) + +type Zhemmer interface { + Zhemm(side blas.Side, uplo blas.Uplo, m, n int, alpha complex128, a []complex128, lda int, b []complex128, ldb int, beta complex128, c []complex128, ldc int) +} + +func ZhemmTest(t *testing.T, impl Zhemmer) { + for _, side := range []blas.Side{blas.Left, blas.Right} { + for _, uplo := range []blas.Uplo{blas.Lower, blas.Upper} { + name := sideString(side) + "-" + uploString(uplo) + t.Run(name, func(t *testing.T) { + for _, m := range []int{0, 1, 2, 3, 4, 5} { + for _, n := range []int{0, 1, 2, 3, 4, 5} { + zhemmTest(t, impl, side, uplo, m, n) + } + } + }) + } + } +} + +func zhemmTest(t *testing.T, impl Zhemmer, side blas.Side, uplo blas.Uplo, m, n int) { + const tol = 1e-13 + + rnd := rand.New(rand.NewSource(1)) + + nA := m + if side == blas.Right { + nA = n + } + for _, lda := range []int{max(1, nA), nA + 2} { + for _, ldb := range []int{max(1, n), n + 3} { + for _, ldc := range []int{max(1, n), n + 4} { + for _, alpha := range []complex128{0, 1, complex(0.7, -0.9)} { + for _, beta := range []complex128{0, 1, complex(1.3, -1.1)} { + // Allocate the matrix A and fill it with random numbers. + a := make([]complex128, nA*lda) + for i := range a { + a[i] = rndComplex128(rnd) + } + // Create a copy of A for checking that + // Zhemm does not modify its triangle + // opposite to uplo. + aCopy := make([]complex128, len(a)) + copy(aCopy, a) + // Create a copy of A expanded into a + // full hermitian matrix for computing + // the expected result using zmm. + aHem := make([]complex128, len(a)) + copy(aHem, a) + if uplo == blas.Upper { + for i := 0; i < nA; i++ { + aHem[i*lda+i] = complex(real(aHem[i*lda+i]), 0) + for j := i + 1; j < nA; j++ { + aHem[j*lda+i] = cmplx.Conj(aHem[i*lda+j]) + } + } + } else { + for i := 0; i < nA; i++ { + for j := 0; j < i; j++ { + aHem[j*lda+i] = cmplx.Conj(aHem[i*lda+j]) + } + aHem[i*lda+i] = complex(real(aHem[i*lda+i]), 0) + } + } + + // Allocate the matrix B and fill it with random numbers. + b := make([]complex128, m*ldb) + for i := range b { + b[i] = rndComplex128(rnd) + } + // Create a copy of B for checking that + // Zhemm does not modify B. + bCopy := make([]complex128, len(b)) + copy(bCopy, b) + + // Allocate the matrix C and fill it with random numbers. + c := make([]complex128, m*ldc) + for i := range c { + c[i] = rndComplex128(rnd) + } + // Create a copy of C for checking that + // Zhemm does not modify C. + cCopy := make([]complex128, len(c)) + copy(cCopy, c) + + // Compute the expected result using an internal Zgemm implementation. + var want []complex128 + if side == blas.Left { + want = zmm(blas.NoTrans, blas.NoTrans, m, n, m, alpha, aHem, lda, b, ldb, beta, c, ldc) + } else { + want = zmm(blas.NoTrans, blas.NoTrans, m, n, n, alpha, b, ldb, aHem, lda, beta, c, ldc) + } + + // Compute the result using Zhemm. + impl.Zhemm(side, uplo, m, n, alpha, a, lda, b, ldb, beta, c, ldc) + + prefix := fmt.Sprintf("m=%v,n=%v,lda=%v,ldb=%v,ldc=%v,alpha=%v,beta=%v", m, n, lda, ldb, ldc, alpha, beta) + + if !zsame(a, aCopy) { + t.Errorf("%v: unexpected modification of A", prefix) + continue + } + if !zsame(b, bCopy) { + t.Errorf("%v: unexpected modification of B", prefix) + continue + } + + if !zEqualApprox(c, want, tol) { + t.Errorf("%v: unexpected result", prefix) + } + } + } + } + } + } +} diff --git a/vendor/gonum.org/v1/gonum/blas/testblas/zhemv.go b/vendor/gonum.org/v1/gonum/blas/testblas/zhemv.go new file mode 100644 index 0000000..f970f41 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/blas/testblas/zhemv.go @@ -0,0 +1,276 @@ +// Copyright ©2017 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package testblas + +import ( + "testing" + + "gonum.org/v1/gonum/blas" +) + +var zhemvTestCases = []struct { + uplo blas.Uplo + alpha complex128 + a []complex128 + x []complex128 + beta complex128 + y []complex128 + + want []complex128 + wantXNeg []complex128 + wantYNeg []complex128 + wantXYNeg []complex128 +}{ + { + uplo: blas.Upper, + alpha: 6 + 2i, + beta: -6 - 7i, + }, + { + uplo: blas.Lower, + alpha: 6 + 2i, + beta: -6 - 7i, + }, + { + uplo: blas.Upper, + alpha: 6 + 2i, + a: []complex128{ + 7, 8 + 4i, -9 - 6i, -9 + 3i, + znan, -3, -10 - 6i, 0 + 3i, + znan, znan, 6, 2 + 8i, + znan, znan, znan, -4, + }, + x: []complex128{ + -4 + 0i, + -2 - 5i, + 8 + 0i, + 6 - 1i, + }, + beta: -6 - 7i, + y: []complex128{ + 1 - 5i, + -2 - 5i, + 0 - 4i, + 7 + 7i, + }, + want: []complex128{ + -785 - 685i, + -643 - 156i, + 776 + 692i, + 169 - 317i, + }, + wantXNeg: []complex128{ + 599 + 703i, + 1 + 172i, + -978 - 86i, + -449 - 423i, + }, + wantYNeg: []complex128{ + 121 - 203i, + 781 + 712i, + -648 - 176i, + -737 - 799i, + }, + wantXYNeg: []complex128{ + -497 - 309i, + -973 - 66i, + -4 + 152i, + 647 + 589i, + }, + }, + { + uplo: blas.Lower, + alpha: 6 + 2i, + a: []complex128{ + 7, znan, znan, znan, + 8 - 4i, -3, znan, znan, + -9 + 6i, -10 + 6i, 6, znan, + -9 - 3i, 0 - 3i, 2 - 8i, -4, + }, + x: []complex128{ + -4 + 0i, + -2 - 5i, + 8 + 0i, + 6 - 1i, + }, + beta: -6 - 7i, + y: []complex128{ + 1 - 5i, + -2 - 5i, + 0 - 4i, + 7 + 7i, + }, + want: []complex128{ + -785 - 685i, + -643 - 156i, + 776 + 692i, + 169 - 317i, + }, + wantXNeg: []complex128{ + 599 + 703i, + 1 + 172i, + -978 - 86i, + -449 - 423i, + }, + wantYNeg: []complex128{ + 121 - 203i, + 781 + 712i, + -648 - 176i, + -737 - 799i, + }, + wantXYNeg: []complex128{ + -497 - 309i, + -973 - 66i, + -4 + 152i, + 647 + 589i, + }, + }, + { + uplo: blas.Upper, + alpha: 0, + a: []complex128{ + 7, 8 + 4i, -9 - 6i, -9 + 3i, + znan, -3, -10 - 6i, 0 + 3i, + znan, znan, 6, 2 + 8i, + znan, znan, znan, -4, + }, + x: []complex128{ + -4 + 0i, + -2 - 5i, + 8 + 0i, + 6 - 1i, + }, + beta: -6 - 7i, + y: []complex128{ + 1 - 5i, + -2 - 5i, + 0 - 4i, + 7 + 7i, + }, + want: []complex128{ + -41 + 23i, + -23 + 44i, + -28 + 24i, + 7 - 91i, + }, + wantXNeg: []complex128{ + -41 + 23i, + -23 + 44i, + -28 + 24i, + 7 - 91i, + }, + wantYNeg: []complex128{ + -41 + 23i, + -23 + 44i, + -28 + 24i, + 7 - 91i, + }, + wantXYNeg: []complex128{ + -41 + 23i, + -23 + 44i, + -28 + 24i, + 7 - 91i, + }, + }, + { + uplo: blas.Upper, + alpha: 6 + 2i, + a: []complex128{ + 7, 8 + 4i, -9 - 6i, -9 + 3i, + znan, -3, -10 - 6i, 0 + 3i, + znan, znan, 6, 2 + 8i, + znan, znan, znan, -4, + }, + x: []complex128{ + -4 + 0i, + -2 - 5i, + 8 + 0i, + 6 - 1i, + }, + beta: 0, + y: []complex128{ + 1 - 5i, + -2 - 5i, + 0 - 4i, + 7 + 7i, + }, + want: []complex128{ + -744 - 708i, + -620 - 200i, + 804 + 668i, + 162 - 226i, + }, + wantXNeg: []complex128{ + 640 + 680i, + 24 + 128i, + -950 - 110i, + -456 - 332i, + }, + wantYNeg: []complex128{ + 162 - 226i, + 804 + 668i, + -620 - 200i, + -744 - 708i, + }, + wantXYNeg: []complex128{ + -456 - 332i, + -950 - 110i, + 24 + 128i, + 640 + 680i, + }, + }, +} + +type Zhemver interface { + Zhemv(uplo blas.Uplo, n int, alpha complex128, a []complex128, lda int, x []complex128, incX int, beta complex128, y []complex128, incY int) +} + +func ZhemvTest(t *testing.T, impl Zhemver) { + for tc, test := range zhemvTestCases { + n := len(test.x) + for _, incX := range []int{-11, -2, -1, 1, 2, 7} { + for _, incY := range []int{-11, -2, -1, 1, 2, 7} { + for _, lda := range []int{max(1, n), n + 11} { + alpha := test.alpha + beta := test.beta + + a := makeZGeneral(test.a, n, n, lda) + aCopy := make([]complex128, len(a)) + copy(aCopy, a) + + x := makeZVector(test.x, incX) + xCopy := make([]complex128, len(x)) + copy(xCopy, x) + + y := makeZVector(test.y, incY) + + impl.Zhemv(test.uplo, n, alpha, a, lda, x, incX, beta, y, incY) + + if !zsame(x, xCopy) { + t.Errorf("Case %v (incX=%v,incY=%v,lda=%v): unexpected modification of x", tc, incX, incY, lda) + } + if !zsame(a, aCopy) { + t.Errorf("Case %v (incX=%v,incY=%v,lda=%v): unexpected modification of A", tc, incX, incY, lda) + } + + var want []complex128 + switch { + case incX > 0 && incY > 0: + want = makeZVector(test.want, incY) + case incX < 0 && incY > 0: + want = makeZVector(test.wantXNeg, incY) + case incX > 0 && incY < 0: + want = makeZVector(test.wantYNeg, incY) + default: + want = makeZVector(test.wantXYNeg, incY) + } + if !zsame(y, want) { + t.Errorf("Case %v (incX=%v,incY=%v,lda=%v): unexpected result\nwant %v\ngot %v", tc, incX, incY, lda, want, y) + } + } + } + } + } +} diff --git a/vendor/gonum.org/v1/gonum/blas/testblas/zher.go b/vendor/gonum.org/v1/gonum/blas/testblas/zher.go new file mode 100644 index 0000000..0268ec5 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/blas/testblas/zher.go @@ -0,0 +1,153 @@ +// Copyright ©2017 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package testblas + +import ( + "testing" + + "gonum.org/v1/gonum/blas" +) + +var zherTestCases = []struct { + alpha float64 + x []complex128 + a []complex128 + + want []complex128 + wantRev []complex128 // Result when incX is negative. +}{ + { + alpha: 1, + }, + { + alpha: 3, + x: []complex128{ + 0 - 3i, + 6 + 10i, + -2 - 7i, + }, + a: []complex128{ + -2 + 3i, -3 - 11i, 0 + 4i, + -3 + 11i, -6 + 3i, 7 + 2i, + 0 - 4i, 7 - 2i, 18 + 3i, + }, + want: []complex128{ + 25 + 0i, -93 - 65i, 63 + 22i, + -93 + 65i, 402 + 0i, -239 + 68i, + 63 - 22i, -239 - 68i, 177 + 0i}, + wantRev: []complex128{ + 157 + 0i, -249 - 77i, 63 - 14i, + -249 + 77i, 402 + 0i, -83 + 56i, + 63 + 14i, -83 - 56i, 45 + 0i, + }, + }, + { + alpha: 3, + x: []complex128{ + -6 + 2i, + -2 - 4i, + 0 + 0i, + 0 + 7i, + }, + a: []complex128{ + 2 + 3i, -9 + 7i, 3 + 11i, 10 - 1i, + -9 - 7i, 16 + 3i, -5 + 2i, -7 - 5i, + 3 - 11i, -5 - 2i, 14 + 3i, 2 - 1i, + 10 + 1i, -7 + 5i, 2 + 1i, 18 + 3i, + }, + want: []complex128{ + 122 + 0i, 3 - 77i, 3 + 11i, 52 + 125i, + 3 + 77i, 76 + 0i, -5 + 2i, -91 + 37i, + 3 - 11i, -5 - 2i, 14 + 0i, 2 - 1i, + 52 - 125i, -91 - 37i, 2 + 1i, 165 + 0i, + }, + wantRev: []complex128{ + 149 + 0i, -9 + 7i, -81 - 31i, 52 - 127i, + -9 - 7i, 16 + 0i, -5 + 2i, -7 - 5i, + -81 + 31i, -5 - 2i, 74 + 0i, 14 + 83i, + 52 + 127i, -7 + 5i, 14 - 83i, 138 + 0i, + }, + }, + { + alpha: 0, + x: []complex128{ + -6 + 2i, + -2 - 4i, + 0 + 0i, + 0 + 7i, + }, + a: []complex128{ + 2 + 0i, -9 + 7i, 3 + 11i, 10 - 1i, + -9 - 7i, 16 + 0i, -5 + 2i, -7 - 5i, + 3 - 11i, -5 - 2i, 14 + 0i, 2 - 1i, + 10 + 1i, -7 + 5i, 2 + 1i, 18 + 0i, + }, + want: []complex128{ + 2 + 0i, -9 + 7i, 3 + 11i, 10 - 1i, + -9 - 7i, 16 + 0i, -5 + 2i, -7 - 5i, + 3 - 11i, -5 - 2i, 14 + 0i, 2 - 1i, + 10 + 1i, -7 + 5i, 2 + 1i, 18 + 0i, + }, + wantRev: []complex128{ + 2 + 0i, -9 + 7i, 3 + 11i, 10 - 1i, + -9 - 7i, 16 + 0i, -5 + 2i, -7 - 5i, + 3 - 11i, -5 - 2i, 14 + 0i, 2 - 1i, + 10 + 1i, -7 + 5i, 2 + 1i, 18 + 0i, + }, + }, +} + +type Zherer interface { + Zher(uplo blas.Uplo, n int, alpha float64, x []complex128, incX int, a []complex128, lda int) +} + +func ZherTest(t *testing.T, impl Zherer) { + for tc, test := range zherTestCases { + n := len(test.x) + for _, uplo := range []blas.Uplo{blas.Lower, blas.Upper} { + for _, incX := range []int{-11, -2, -1, 1, 2, 7} { + for _, lda := range []int{max(1, n), n + 11} { + x := makeZVector(test.x, incX) + xCopy := make([]complex128, len(x)) + copy(xCopy, x) + + a := makeZGeneral(test.a, n, n, lda) + + var want []complex128 + if incX > 0 { + want = makeZGeneral(test.want, n, n, lda) + } else { + want = makeZGeneral(test.wantRev, n, n, lda) + } + + if uplo == blas.Upper { + for i := 0; i < n; i++ { + for j := 0; j < i; j++ { + a[i*lda+j] = znan + want[i*lda+j] = znan + } + } + } else { + for i := 0; i < n; i++ { + for j := i + 1; j < n; j++ { + a[i*lda+j] = znan + want[i*lda+j] = znan + } + } + } + + impl.Zher(uplo, n, test.alpha, x, incX, a, lda) + + if !zsame(x, xCopy) { + t.Errorf("Case %v (uplo=%v,incX=%v,lda=%v,alpha=%v): unexpected modification of x", tc, uplo, incX, test.alpha, lda) + } + if !zsame(want, a) { + t.Errorf("Case %v (uplo=%v,incX=%v,lda=%v,alpha=%v): unexpected result\nwant: %v\ngot: %v", tc, uplo, incX, lda, test.alpha, want, a) + } + } + } + } + } +} diff --git a/vendor/gonum.org/v1/gonum/blas/testblas/zher2.go b/vendor/gonum.org/v1/gonum/blas/testblas/zher2.go new file mode 100644 index 0000000..b16686a --- /dev/null +++ b/vendor/gonum.org/v1/gonum/blas/testblas/zher2.go @@ -0,0 +1,313 @@ +// Copyright ©2017 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package testblas + +import ( + "testing" + + "gonum.org/v1/gonum/blas" +) + +var zher2TestCases = []struct { + alpha complex128 + incX, incY int + x []complex128 + y []complex128 + a []complex128 + + want []complex128 +}{ + { + alpha: 1 + 2i, + incX: 1, + incY: 1, + }, + { + alpha: 1 + 2i, + incX: 1, + incY: 1, + x: []complex128{ + -6 + 2i, + -2 - 4i, + 0 + 0i, + 0 + 7i, + }, + y: []complex128{ + 2 - 5i, + 0 + 0i, + -8 - 9i, + 6 + 6i, + }, + a: []complex128{ + 2 + 0i, -9 + 7i, 3 + 11i, 10 - 1i, + -9 - 7i, 16 + 0i, -5 + 2i, -7 - 5i, + 3 - 11i, -5 - 2i, 14 + 0i, 2 - 1i, + 10 + 1i, -7 + 5i, 2 + 1i, 18 + 0i, + }, + want: []complex128{ + 62 + 0i, 43 - 7i, 173 + 1i, -173 + 55i, + 43 + 7i, 16 + 0i, 19 + 120i, -19 - 89i, + 173 - 1i, 19 - 120i, 14 + 0i, 51 + 181i, + -173 - 55i, -19 + 89i, 51 - 181i, -66 + 0i, + }, + }, + { + alpha: 1 + 2i, + incX: 1, + incY: 1, + x: []complex128{ + -6 + 2i, + -2 - 4i, + 0 + 0i, + 0 + 7i, + }, + y: []complex128{ + 2 - 5i, + -8 - 9i, + 0 + 0i, + 6 + 6i, + }, + a: []complex128{ + 2 + 0i, -9 + 7i, 3 + 11i, 10 - 1i, + -9 - 7i, 16 + 0i, -5 + 2i, -7 - 5i, + 3 - 11i, -5 - 2i, 14 + 0i, 2 - 1i, + 10 + 1i, -7 + 5i, 2 + 1i, 18 + 0i, + }, + want: []complex128{ + 62 + 0i, 213 - 17i, 3 + 11i, -173 + 55i, + 213 + 17i, 64 + 0i, -5 + 2i, 30 + 93i, + 3 - 11i, -5 - 2i, 14 + 0i, 2 - 1i, + -173 - 55i, 30 - 93i, 2 + 1i, -66 + 0i, + }, + }, + { + alpha: 1 + 2i, + incX: 2, + incY: 4, + x: []complex128{ + -6 + 2i, + -2 - 4i, + 0 + 0i, + 0 + 7i, + }, + y: []complex128{ + 2 - 5i, + -8 - 9i, + 0 + 0i, + 6 + 6i, + }, + a: []complex128{ + 2 + 0i, -9 + 7i, 3 + 11i, 10 - 1i, + -9 - 7i, 16 + 0i, -5 + 2i, -7 - 5i, + 3 - 11i, -5 - 2i, 14 + 0i, 2 - 1i, + 10 + 1i, -7 + 5i, 2 + 1i, 18 + 0i, + }, + want: []complex128{ + 62 + 0i, 213 - 17i, 3 + 11i, -173 + 55i, + 213 + 17i, 64 + 0i, -5 + 2i, 30 + 93i, + 3 - 11i, -5 - 2i, 14 + 0i, 2 - 1i, + -173 - 55i, 30 - 93i, 2 + 1i, -66 + 0i, + }, + }, + { + alpha: 1 + 2i, + incX: 3, + incY: 7, + x: []complex128{ + -6 + 2i, + -2 - 4i, + 0 + 0i, + 0 + 7i, + }, + y: []complex128{ + 2 - 5i, + 0 + 0i, + -8 - 9i, + 6 + 6i, + }, + a: []complex128{ + 2 + 0i, -9 + 7i, 3 + 11i, 10 - 1i, + -9 - 7i, 16 + 0i, -5 + 2i, -7 - 5i, + 3 - 11i, -5 - 2i, 14 + 0i, 2 - 1i, + 10 + 1i, -7 + 5i, 2 + 1i, 18 + 0i, + }, + want: []complex128{ + 62 + 0i, 43 - 7i, 173 + 1i, -173 + 55i, + 43 + 7i, 16 + 0i, 19 + 120i, -19 - 89i, + 173 - 1i, 19 - 120i, 14 + 0i, 51 + 181i, + -173 - 55i, -19 + 89i, 51 - 181i, -66 + 0i, + }, + }, + { + alpha: 1 + 2i, + incX: -3, + incY: 7, + x: []complex128{ + 0 + 7i, + 0 + 0i, + -2 - 4i, + -6 + 2i, + }, + y: []complex128{ + 2 - 5i, + 0 + 0i, + -8 - 9i, + 6 + 6i, + }, + a: []complex128{ + 2 + 0i, -9 + 7i, 3 + 11i, 10 - 1i, + -9 - 7i, 16 + 0i, -5 + 2i, -7 - 5i, + 3 - 11i, -5 - 2i, 14 + 0i, 2 - 1i, + 10 + 1i, -7 + 5i, 2 + 1i, 18 + 0i, + }, + want: []complex128{ + 62 + 0i, 43 - 7i, 173 + 1i, -173 + 55i, + 43 + 7i, 16 + 0i, 19 + 120i, -19 - 89i, + 173 - 1i, 19 - 120i, 14 + 0i, 51 + 181i, + -173 - 55i, -19 + 89i, 51 - 181i, -66 + 0i, + }, + }, + { + alpha: 1 + 2i, + incX: 3, + incY: -7, + x: []complex128{ + -6 + 2i, + -2 - 4i, + 0 + 0i, + 0 + 7i, + }, + y: []complex128{ + 6 + 6i, + -8 - 9i, + 0 + 0i, + 2 - 5i, + }, + a: []complex128{ + 2 + 0i, -9 + 7i, 3 + 11i, 10 - 1i, + -9 - 7i, 16 + 0i, -5 + 2i, -7 - 5i, + 3 - 11i, -5 - 2i, 14 + 0i, 2 - 1i, + 10 + 1i, -7 + 5i, 2 + 1i, 18 + 0i, + }, + want: []complex128{ + 62 + 0i, 43 - 7i, 173 + 1i, -173 + 55i, + 43 + 7i, 16 + 0i, 19 + 120i, -19 - 89i, + 173 - 1i, 19 - 120i, 14 + 0i, 51 + 181i, + -173 - 55i, -19 + 89i, 51 - 181i, -66 + 0i, + }, + }, + { + alpha: 1 + 2i, + incX: -3, + incY: -7, + x: []complex128{ + 0 + 7i, + 0 + 0i, + -2 - 4i, + -6 + 2i, + }, + y: []complex128{ + 6 + 6i, + -8 - 9i, + 0 + 0i, + 2 - 5i, + }, + a: []complex128{ + 2 + 0i, -9 + 7i, 3 + 11i, 10 - 1i, + -9 - 7i, 16 + 0i, -5 + 2i, -7 - 5i, + 3 - 11i, -5 - 2i, 14 + 0i, 2 - 1i, + 10 + 1i, -7 + 5i, 2 + 1i, 18 + 0i, + }, + want: []complex128{ + 62 + 0i, 43 - 7i, 173 + 1i, -173 + 55i, + 43 + 7i, 16 + 0i, 19 + 120i, -19 - 89i, + 173 - 1i, 19 - 120i, 14 + 0i, 51 + 181i, + -173 - 55i, -19 + 89i, 51 - 181i, -66 + 0i, + }, + }, + { + alpha: 0, + incX: 1, + incY: 1, + x: []complex128{ + -6 + 2i, + -2 - 4i, + 0 + 0i, + 0 + 7i, + }, + y: []complex128{ + 2 - 5i, + 0 + 0i, + -8 - 9i, + 6 + 6i, + }, + a: []complex128{ + 2 + 0i, -9 + 7i, 3 + 11i, 10 - 1i, + -9 - 7i, 16 + 0i, -5 + 2i, -7 - 5i, + 3 - 11i, -5 - 2i, 14 + 0i, 2 - 1i, + 10 + 1i, -7 + 5i, 2 + 1i, 18 + 0i, + }, + want: []complex128{ + 2 + 0i, -9 + 7i, 3 + 11i, 10 - 1i, + -9 - 7i, 16 + 0i, -5 + 2i, -7 - 5i, + 3 - 11i, -5 - 2i, 14 + 0i, 2 - 1i, + 10 + 1i, -7 + 5i, 2 + 1i, 18 + 0i, + }, + }, +} + +type Zher2er interface { + Zher2(uplo blas.Uplo, n int, alpha complex128, x []complex128, incX int, y []complex128, incY int, a []complex128, lda int) +} + +func Zher2Test(t *testing.T, impl Zher2er) { + for tc, test := range zher2TestCases { + n := len(test.x) + incX := test.incX + incY := test.incY + for _, uplo := range []blas.Uplo{blas.Lower, blas.Upper} { + for _, lda := range []int{max(1, n), n + 11} { + x := makeZVector(test.x, incX) + xCopy := make([]complex128, len(x)) + copy(xCopy, x) + + y := makeZVector(test.y, incY) + yCopy := make([]complex128, len(y)) + copy(yCopy, y) + + a := makeZGeneral(test.a, n, n, lda) + want := makeZGeneral(test.want, n, n, lda) + + if uplo == blas.Upper { + for i := 0; i < n; i++ { + for j := 0; j < i; j++ { + a[i*lda+j] = znan + want[i*lda+j] = znan + } + } + } else { + for i := 0; i < n; i++ { + for j := i + 1; j < n; j++ { + a[i*lda+j] = znan + want[i*lda+j] = znan + } + } + } + + impl.Zher2(uplo, n, test.alpha, x, incX, y, incY, a, lda) + + if !zsame(x, xCopy) { + t.Errorf("Case %v (uplo=%v,incX=%v,incY=%v,lda=%v: unexpected modification of x", tc, uplo, incX, incY, lda) + } + if !zsame(y, yCopy) { + t.Errorf("Case %v (uplo=%v,incX=%v,incY=%v,lda=%v: unexpected modification of y", tc, uplo, incX, incY, lda) + } + if !zsame(want, a) { + t.Errorf("Case %v (uplo=%v,incX=%v,incY=%v,lda=%v: unexpected result\nwant: %v\ngot: %v", tc, uplo, incX, incY, lda, want, a) + } + } + } + } +} diff --git a/vendor/gonum.org/v1/gonum/blas/testblas/zher2k.go b/vendor/gonum.org/v1/gonum/blas/testblas/zher2k.go new file mode 100644 index 0000000..1af9814 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/blas/testblas/zher2k.go @@ -0,0 +1,177 @@ +// Copyright ©2019 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package testblas + +import ( + "fmt" + "math/cmplx" + "testing" + + "golang.org/x/exp/rand" + "gonum.org/v1/gonum/blas" +) + +type Zher2ker interface { + Zher2k(uplo blas.Uplo, trans blas.Transpose, n, k int, alpha complex128, a []complex128, lda int, b []complex128, ldb int, beta float64, c []complex128, ldc int) +} + +func Zher2kTest(t *testing.T, impl Zher2ker) { + for _, uplo := range []blas.Uplo{blas.Upper, blas.Lower} { + for _, trans := range []blas.Transpose{blas.NoTrans, blas.ConjTrans} { + name := uploString(uplo) + "-" + transString(trans) + t.Run(name, func(t *testing.T) { + for _, n := range []int{0, 1, 2, 3, 4, 5} { + for _, k := range []int{0, 1, 2, 3, 4, 5, 7} { + zher2kTest(t, impl, uplo, trans, n, k) + } + } + }) + } + } +} + +func zher2kTest(t *testing.T, impl Zher2ker, uplo blas.Uplo, trans blas.Transpose, n, k int) { + const tol = 1e-13 + + rnd := rand.New(rand.NewSource(1)) + + row, col := n, k + if trans == blas.ConjTrans { + row, col = k, n + } + for _, lda := range []int{max(1, col), col + 2} { + for _, ldb := range []int{max(1, col), col + 3} { + for _, ldc := range []int{max(1, n), n + 4} { + for _, alpha := range []complex128{0, 1, complex(0.7, -0.9)} { + for _, beta := range []float64{0, 1, 1.3} { + // Allocate the matrix A and fill it with random numbers. + a := make([]complex128, row*lda) + for i := range a { + a[i] = rndComplex128(rnd) + } + // Create a copy of A for checking that + // Zher2k does not modify A. + aCopy := make([]complex128, len(a)) + copy(aCopy, a) + + // Allocate the matrix B and fill it with random numbers. + b := make([]complex128, row*ldb) + for i := range b { + b[i] = rndComplex128(rnd) + } + // Create a copy of B for checking that + // Zher2k does not modify B. + bCopy := make([]complex128, len(b)) + copy(bCopy, b) + + // Allocate the matrix C and fill it with random numbers. + c := make([]complex128, n*ldc) + for i := range c { + c[i] = rndComplex128(rnd) + } + if (alpha == 0 || k == 0) && beta == 1 { + // In case of a quick return + // zero out the diagonal. + for i := 0; i < n; i++ { + c[i*ldc+i] = complex(real(c[i*ldc+i]), 0) + } + } + // Create a copy of C for checking that + // Zher2k does not modify its triangle + // opposite to uplo. + cCopy := make([]complex128, len(c)) + copy(cCopy, c) + // Create a copy of C expanded into a + // full hermitian matrix for computing + // the expected result using zmm. + cHer := make([]complex128, len(c)) + copy(cHer, c) + if uplo == blas.Upper { + for i := 0; i < n; i++ { + cHer[i*ldc+i] = complex(real(cHer[i*ldc+i]), 0) + for j := i + 1; j < n; j++ { + cHer[j*ldc+i] = cmplx.Conj(cHer[i*ldc+j]) + } + } + } else { + for i := 0; i < n; i++ { + for j := 0; j < i; j++ { + cHer[j*ldc+i] = cmplx.Conj(cHer[i*ldc+j]) + } + cHer[i*ldc+i] = complex(real(cHer[i*ldc+i]), 0) + } + } + + // Compute the expected result using an internal Zgemm implementation. + var want []complex128 + if trans == blas.NoTrans { + // C = alpha*A*B^H + conj(alpha)*B*A^H + beta*C + tmp := zmm(blas.NoTrans, blas.ConjTrans, n, n, k, alpha, a, lda, b, ldb, complex(beta, 0), cHer, ldc) + want = zmm(blas.NoTrans, blas.ConjTrans, n, n, k, cmplx.Conj(alpha), b, ldb, a, lda, 1, tmp, ldc) + } else { + // C = alpha*A^H*B + conj(alpha)*B^H*A + beta*C + tmp := zmm(blas.ConjTrans, blas.NoTrans, n, n, k, alpha, a, lda, b, ldb, complex(beta, 0), cHer, ldc) + want = zmm(blas.ConjTrans, blas.NoTrans, n, n, k, cmplx.Conj(alpha), b, ldb, a, lda, 1, tmp, ldc) + } + + // Compute the result using Zher2k. + impl.Zher2k(uplo, trans, n, k, alpha, a, lda, b, ldb, beta, c, ldc) + + prefix := fmt.Sprintf("n=%v,k=%v,lda=%v,ldb=%v,ldc=%v,alpha=%v,beta=%v", n, k, lda, ldb, ldc, alpha, beta) + + if !zsame(a, aCopy) { + t.Errorf("%v: unexpected modification of A", prefix) + continue + } + if !zsame(b, bCopy) { + t.Errorf("%v: unexpected modification of B", prefix) + continue + } + if uplo == blas.Upper && !zSameLowerTri(n, c, ldc, cCopy, ldc) { + t.Errorf("%v: unexpected modification in lower triangle of C", prefix) + continue + } + if uplo == blas.Lower && !zSameUpperTri(n, c, ldc, cCopy, ldc) { + t.Errorf("%v: unexpected modification in upper triangle of C", prefix) + continue + } + + // Check that the diagonal of C has only real elements. + hasRealDiag := true + for i := 0; i < n; i++ { + if imag(c[i*ldc+i]) != 0 { + hasRealDiag = false + break + } + } + if !hasRealDiag { + t.Errorf("%v: diagonal of C has imaginary elements\ngot=%v", prefix, c) + continue + } + + // Expand C into a full hermitian matrix + // for comparison with the result from zmm. + if uplo == blas.Upper { + for i := 0; i < n-1; i++ { + for j := i + 1; j < n; j++ { + c[j*ldc+i] = cmplx.Conj(c[i*ldc+j]) + } + } + } else { + for i := 1; i < n; i++ { + for j := 0; j < i; j++ { + c[j*ldc+i] = cmplx.Conj(c[i*ldc+j]) + } + } + } + if !zEqualApprox(c, want, tol) { + t.Errorf("%v: unexpected result\nwant=%v\ngot= %v", prefix, want, c) + } + } + } + } + } + } +} diff --git a/vendor/gonum.org/v1/gonum/blas/testblas/zherk.go b/vendor/gonum.org/v1/gonum/blas/testblas/zherk.go new file mode 100644 index 0000000..7d986de --- /dev/null +++ b/vendor/gonum.org/v1/gonum/blas/testblas/zherk.go @@ -0,0 +1,158 @@ +// Copyright ©2019 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package testblas + +import ( + "fmt" + "math/cmplx" + "testing" + + "golang.org/x/exp/rand" + + "gonum.org/v1/gonum/blas" +) + +type Zherker interface { + Zherk(uplo blas.Uplo, trans blas.Transpose, n, k int, alpha float64, a []complex128, lda int, beta float64, c []complex128, ldc int) +} + +func ZherkTest(t *testing.T, impl Zherker) { + for _, uplo := range []blas.Uplo{blas.Upper, blas.Lower} { + for _, trans := range []blas.Transpose{blas.NoTrans, blas.ConjTrans} { + name := uploString(uplo) + "-" + transString(trans) + t.Run(name, func(t *testing.T) { + for _, n := range []int{0, 1, 2, 3, 4, 5} { + for _, k := range []int{0, 1, 2, 3, 4, 5, 7} { + zherkTest(t, impl, uplo, trans, n, k) + } + } + }) + } + } +} + +func zherkTest(t *testing.T, impl Zherker, uplo blas.Uplo, trans blas.Transpose, n, k int) { + const tol = 1e-13 + + rnd := rand.New(rand.NewSource(1)) + + rowA, colA := n, k + if trans == blas.ConjTrans { + rowA, colA = k, n + } + for _, lda := range []int{max(1, colA), colA + 2} { + for _, ldc := range []int{max(1, n), n + 4} { + for _, alpha := range []float64{0, 1, 0.7} { + for _, beta := range []float64{0, 1, 1.3} { + // Allocate the matrix A and fill it with random numbers. + a := make([]complex128, rowA*lda) + for i := range a { + a[i] = rndComplex128(rnd) + } + // Create a copy of A for checking that + // Zherk does not modify A. + aCopy := make([]complex128, len(a)) + copy(aCopy, a) + + // Allocate the matrix C and fill it with random numbers. + c := make([]complex128, n*ldc) + for i := range c { + c[i] = rndComplex128(rnd) + } + if (alpha == 0 || k == 0) && beta == 1 { + // In case of a quick return + // zero out the diagonal. + for i := 0; i < n; i++ { + c[i*ldc+i] = complex(real(c[i*ldc+i]), 0) + } + } + // Create a copy of C for checking that + // Zherk does not modify its triangle + // opposite to uplo. + cCopy := make([]complex128, len(c)) + copy(cCopy, c) + // Create a copy of C expanded into a + // full hermitian matrix for computing + // the expected result using zmm. + cHer := make([]complex128, len(c)) + copy(cHer, c) + if uplo == blas.Upper { + for i := 0; i < n; i++ { + cHer[i*ldc+i] = complex(real(cHer[i*ldc+i]), 0) + for j := i + 1; j < n; j++ { + cHer[j*ldc+i] = cmplx.Conj(cHer[i*ldc+j]) + } + } + } else { + for i := 0; i < n; i++ { + for j := 0; j < i; j++ { + cHer[j*ldc+i] = cmplx.Conj(cHer[i*ldc+j]) + } + cHer[i*ldc+i] = complex(real(cHer[i*ldc+i]), 0) + } + } + + // Compute the expected result using an internal Zgemm implementation. + var want []complex128 + if trans == blas.NoTrans { + want = zmm(blas.NoTrans, blas.ConjTrans, n, n, k, complex(alpha, 0), a, lda, a, lda, complex(beta, 0), cHer, ldc) + } else { + want = zmm(blas.ConjTrans, blas.NoTrans, n, n, k, complex(alpha, 0), a, lda, a, lda, complex(beta, 0), cHer, ldc) + } + + // Compute the result using Zherk. + impl.Zherk(uplo, trans, n, k, alpha, a, lda, beta, c, ldc) + + prefix := fmt.Sprintf("n=%v,k=%v,lda=%v,ldc=%v,alpha=%v,beta=%v", n, k, lda, ldc, alpha, beta) + + if !zsame(a, aCopy) { + t.Errorf("%v: unexpected modification of A", prefix) + continue + } + if uplo == blas.Upper && !zSameLowerTri(n, c, ldc, cCopy, ldc) { + t.Errorf("%v: unexpected modification in lower triangle of C", prefix) + continue + } + if uplo == blas.Lower && !zSameUpperTri(n, c, ldc, cCopy, ldc) { + t.Errorf("%v: unexpected modification in upper triangle of C", prefix) + continue + } + + // Check that the diagonal of C has only real elements. + hasRealDiag := true + for i := 0; i < n; i++ { + if imag(c[i*ldc+i]) != 0 { + hasRealDiag = false + break + } + } + if !hasRealDiag { + t.Errorf("%v: diagonal of C has imaginary elements\ngot=%v", prefix, c) + continue + } + + // Expand C into a full hermitian matrix + // for comparison with the result from zmm. + if uplo == blas.Upper { + for i := 0; i < n-1; i++ { + for j := i + 1; j < n; j++ { + c[j*ldc+i] = cmplx.Conj(c[i*ldc+j]) + } + } + } else { + for i := 1; i < n; i++ { + for j := 0; j < i; j++ { + c[j*ldc+i] = cmplx.Conj(c[i*ldc+j]) + } + } + } + if !zEqualApprox(c, want, tol) { + t.Errorf("%v: unexpected result\nwant=%v\ngot= %v", prefix, want, c) + } + } + } + } + } +} diff --git a/vendor/gonum.org/v1/gonum/blas/testblas/zhpmv.go b/vendor/gonum.org/v1/gonum/blas/testblas/zhpmv.go new file mode 100644 index 0000000..fe60fda --- /dev/null +++ b/vendor/gonum.org/v1/gonum/blas/testblas/zhpmv.go @@ -0,0 +1,61 @@ +// Copyright ©2017 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package testblas + +import ( + "testing" + + "gonum.org/v1/gonum/blas" +) + +type Zhpmver interface { + Zhpmv(uplo blas.Uplo, n int, alpha complex128, ap []complex128, x []complex128, incX int, beta complex128, y []complex128, incY int) +} + +func ZhpmvTest(t *testing.T, impl Zhpmver) { + for tc, test := range zhemvTestCases { + uplo := test.uplo + n := len(test.x) + alpha := test.alpha + beta := test.beta + for _, incX := range []int{-11, -2, -1, 1, 2, 7} { + for _, incY := range []int{-11, -2, -1, 1, 2, 7} { + x := makeZVector(test.x, incX) + xCopy := make([]complex128, len(x)) + copy(xCopy, x) + + y := makeZVector(test.y, incY) + + ap := zPack(uplo, n, test.a, n) + apCopy := make([]complex128, len(ap)) + copy(apCopy, ap) + + impl.Zhpmv(test.uplo, n, alpha, ap, x, incX, beta, y, incY) + + if !zsame(x, xCopy) { + t.Errorf("Case %v (incX=%v,incY=%v): unexpected modification of x", tc, incX, incY) + } + if !zsame(ap, apCopy) { + t.Errorf("Case %v (incX=%v,incY=%v): unexpected modification of A", tc, incX, incY) + } + + var want []complex128 + switch { + case incX > 0 && incY > 0: + want = makeZVector(test.want, incY) + case incX < 0 && incY > 0: + want = makeZVector(test.wantXNeg, incY) + case incX > 0 && incY < 0: + want = makeZVector(test.wantYNeg, incY) + default: + want = makeZVector(test.wantXYNeg, incY) + } + if !zsame(y, want) { + t.Errorf("Case %v (incX=%v,incY=%v): unexpected result\nwant %v\ngot %v", tc, incX, incY, want, y) + } + } + } + } +} diff --git a/vendor/gonum.org/v1/gonum/blas/testblas/zhpr.go b/vendor/gonum.org/v1/gonum/blas/testblas/zhpr.go new file mode 100644 index 0000000..57b6627 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/blas/testblas/zhpr.go @@ -0,0 +1,46 @@ +// Copyright ©2017 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package testblas + +import ( + "testing" + + "gonum.org/v1/gonum/blas" +) + +type Zhprer interface { + Zhpr(uplo blas.Uplo, n int, alpha float64, x []complex128, incX int, ap []complex128) +} + +func ZhprTest(t *testing.T, impl Zhprer) { + for tc, test := range zherTestCases { + n := len(test.x) + for _, uplo := range []blas.Uplo{blas.Upper, blas.Lower} { + for _, incX := range []int{-11, -2, -1, 1, 2, 7} { + x := makeZVector(test.x, incX) + xCopy := make([]complex128, len(x)) + copy(xCopy, x) + + ap := zPack(uplo, n, test.a, n) + impl.Zhpr(uplo, n, test.alpha, x, incX, ap) + a := zUnpackAsHermitian(uplo, n, ap) + + var want []complex128 + if incX > 0 { + want = makeZGeneral(test.want, n, n, max(1, n)) + } else { + want = makeZGeneral(test.wantRev, n, n, max(1, n)) + } + + if !zsame(x, xCopy) { + t.Errorf("Case %v (uplo=%v,incX=%v,alpha=%v): unexpected modification of x", tc, uplo, incX, test.alpha) + } + if !zsame(want, a) { + t.Errorf("Case %v (uplo=%v,incX=%v,alpha=%v): unexpected result\nwant: %v\ngot: %v", tc, uplo, incX, test.alpha, want, a) + } + } + } + } +} diff --git a/vendor/gonum.org/v1/gonum/blas/testblas/zhpr2.go b/vendor/gonum.org/v1/gonum/blas/testblas/zhpr2.go new file mode 100644 index 0000000..a1a984f --- /dev/null +++ b/vendor/gonum.org/v1/gonum/blas/testblas/zhpr2.go @@ -0,0 +1,46 @@ +// Copyright ©2017 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package testblas + +import ( + "testing" + + "gonum.org/v1/gonum/blas" +) + +type Zhpr2er interface { + Zhpr2(uplo blas.Uplo, n int, alpha complex128, x []complex128, incX int, y []complex128, incY int, ap []complex128) +} + +func Zhpr2Test(t *testing.T, impl Zhpr2er) { + for tc, test := range zher2TestCases { + n := len(test.x) + incX := test.incX + incY := test.incY + for _, uplo := range []blas.Uplo{blas.Upper, blas.Lower} { + x := makeZVector(test.x, incX) + xCopy := make([]complex128, len(x)) + copy(xCopy, x) + + y := makeZVector(test.y, incY) + yCopy := make([]complex128, len(y)) + copy(yCopy, y) + + ap := zPack(uplo, n, test.a, n) + impl.Zhpr2(uplo, n, test.alpha, x, incX, y, incY, ap) + a := zUnpackAsHermitian(uplo, n, ap) + + if !zsame(x, xCopy) { + t.Errorf("Case %v (uplo=%v,incX=%v,incY=%v): unexpected modification of x", tc, uplo, incX, incY) + } + if !zsame(y, yCopy) { + t.Errorf("Case %v (uplo=%v,incX=%v,incY=%v): unexpected modification of y", tc, uplo, incX, incY) + } + if !zsame(test.want, a) { + t.Errorf("Case %v (uplo=%v,incX=%v,incY=%v): unexpected result\nwant: %v\ngot: %v", tc, uplo, incX, incY, test.want, a) + } + } + } +} diff --git a/vendor/gonum.org/v1/gonum/blas/testblas/zscal.go b/vendor/gonum.org/v1/gonum/blas/testblas/zscal.go new file mode 100644 index 0000000..600a9de --- /dev/null +++ b/vendor/gonum.org/v1/gonum/blas/testblas/zscal.go @@ -0,0 +1,119 @@ +// Copyright ©2017 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package testblas + +import ( + "fmt" + "testing" +) + +type Zscaler interface { + Zscal(n int, alpha complex128, x []complex128, incX int) +} + +func ZscalTest(t *testing.T, impl Zscaler) { + for tc, test := range []struct { + alpha complex128 + x []complex128 + want []complex128 + }{ + { + alpha: 2 + 5i, + x: nil, + want: nil, + }, + { + alpha: 2 + 5i, + x: []complex128{1 + 2i}, + want: []complex128{-8 + 9i}, + }, + { + alpha: 2 + 5i, + x: []complex128{1 + 2i, 3 + 4i}, + want: []complex128{-8 + 9i, -14 + 23i}, + }, + { + alpha: 2 + 5i, + x: []complex128{1 + 2i, 3 + 4i, 5 + 6i}, + want: []complex128{-8 + 9i, -14 + 23i, -20 + 37i}, + }, + { + alpha: 2 + 5i, + x: []complex128{1 + 2i, 3 + 4i, 5 + 6i, 7 + 8i}, + want: []complex128{-8 + 9i, -14 + 23i, -20 + 37i, -26 + 51i}, + }, + { + alpha: 2 + 5i, + x: []complex128{1 + 2i, 3 + 4i, 5 + 6i, 7 + 8i, 9 + 10i}, + want: []complex128{-8 + 9i, -14 + 23i, -20 + 37i, -26 + 51i, -32 + 65i}, + }, + { + alpha: 2 + 5i, + x: []complex128{1 + 2i, 3 + 4i, 5 + 6i, 7 + 8i, 9 + 10i, 11 + 12i}, + want: []complex128{-8 + 9i, -14 + 23i, -20 + 37i, -26 + 51i, -32 + 65i, -38 + 79i}, + }, + { + alpha: 2 + 5i, + x: []complex128{1 + 2i, 3 + 4i, 5 + 6i, 7 + 8i, 9 + 10i, 11 + 12i, 13 + 14i}, + want: []complex128{-8 + 9i, -14 + 23i, -20 + 37i, -26 + 51i, -32 + 65i, -38 + 79i, -44 + 93i}, + }, + { + alpha: 2 + 5i, + x: []complex128{1 + 2i, 3 + 4i, 5 + 6i, 7 + 8i, 9 + 10i, 11 + 12i, 13 + 14i, 15 + 16i}, + want: []complex128{-8 + 9i, -14 + 23i, -20 + 37i, -26 + 51i, -32 + 65i, -38 + 79i, -44 + 93i, -50 + 107i}, + }, + { + alpha: 2 + 5i, + x: []complex128{1 + 2i, 3 + 4i, 5 + 6i, 7 + 8i, 9 + 10i, 11 + 12i, 13 + 14i, 15 + 16i, 17 + 18i}, + want: []complex128{-8 + 9i, -14 + 23i, -20 + 37i, -26 + 51i, -32 + 65i, -38 + 79i, -44 + 93i, -50 + 107i, -56 + 121i}, + }, + { + alpha: 2 + 5i, + x: []complex128{1 + 2i, 3 + 4i, 5 + 6i, 7 + 8i, 9 + 10i, 11 + 12i, 13 + 14i, 15 + 16i, 17 + 18i, 19 + 20i}, + want: []complex128{-8 + 9i, -14 + 23i, -20 + 37i, -26 + 51i, -32 + 65i, -38 + 79i, -44 + 93i, -50 + 107i, -56 + 121i, -62 + 135i}, + }, + { + alpha: 2 + 5i, + x: []complex128{1 + 2i, 3 + 4i, 5 + 6i, 7 + 8i, 9 + 10i, 11 + 12i, 13 + 14i, 15 + 16i, 17 + 18i, 19 + 20i, 21 + 22i}, + want: []complex128{-8 + 9i, -14 + 23i, -20 + 37i, -26 + 51i, -32 + 65i, -38 + 79i, -44 + 93i, -50 + 107i, -56 + 121i, -62 + 135i, -68 + 149i}, + }, + { + alpha: 2 + 5i, + x: []complex128{1 + 2i, 3 + 4i, 5 + 6i, 7 + 8i, 9 + 10i, 11 + 12i, 13 + 14i, 15 + 16i, 17 + 18i, 19 + 20i, 21 + 22i, 23 + 24i}, + want: []complex128{-8 + 9i, -14 + 23i, -20 + 37i, -26 + 51i, -32 + 65i, -38 + 79i, -44 + 93i, -50 + 107i, -56 + 121i, -62 + 135i, -68 + 149i, -74 + 163i}, + }, + { + alpha: 0, + x: []complex128{1 + 2i, 3 + 4i, 5 + 6i, 7 + 8i, 9 + 10i, 11 + 12i, 13 + 14i, 15 + 16i, 17 + 18i, 19 + 20i, 21 + 22i, 23 + 24i}, + want: []complex128{0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0}, + }, + } { + n := len(test.x) + if len(test.want) != n { + panic("bad test") + } + for _, incX := range []int{-3, -1, 1, 2, 4, 7, 10} { + x := makeZVector(test.x, incX) + xCopy := make([]complex128, len(x)) + copy(xCopy, x) + + want := makeZVector(test.want, incX) + + impl.Zscal(n, test.alpha, x, incX) + + prefix := fmt.Sprintf("Case %v (n=%v,incX=%v):", tc, n, incX) + + if incX < 0 { + if !zsame(x, xCopy) { + t.Errorf("%v: unexpected modification of x\nwant %v\ngot %v", prefix, want, x) + } + continue + } + if !zsame(x, want) { + t.Errorf("%v: unexpected result:\nwant: %v\ngot: %v", prefix, want, x) + } + } + } +} diff --git a/vendor/gonum.org/v1/gonum/blas/testblas/zswap.go b/vendor/gonum.org/v1/gonum/blas/testblas/zswap.go new file mode 100644 index 0000000..ce1ab7a --- /dev/null +++ b/vendor/gonum.org/v1/gonum/blas/testblas/zswap.go @@ -0,0 +1,74 @@ +// Copyright ©2017 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package testblas + +import ( + "fmt" + "testing" + + "golang.org/x/exp/rand" +) + +type Zswaper interface { + Zswap(n int, x []complex128, incX int, y []complex128, incY int) +} + +func ZswapTest(t *testing.T, impl Zswaper) { + rnd := rand.New(rand.NewSource(1)) + for n := 0; n < 20; n++ { + for _, inc := range allPairs([]int{-5, -1, 1, 2, 5, 10}, []int{-3, -1, 1, 3, 7, 12}) { + incX := inc[0] + incY := inc[1] + aincX := abs(incX) + aincY := abs(incY) + + var x, y []complex128 + if n > 0 { + x = make([]complex128, (n-1)*aincX+1) + y = make([]complex128, (n-1)*aincY+1) + } + for i := range x { + x[i] = znan + } + for i := range y { + y[i] = znan + } + for i := 0; i < n; i++ { + x[i*aincX] = complex(rnd.NormFloat64(), rnd.NormFloat64()) + y[i*aincY] = complex(rnd.NormFloat64(), rnd.NormFloat64()) + } + + xWant := make([]complex128, len(x)) + for i := range xWant { + xWant[i] = znan + } + yWant := make([]complex128, len(y)) + for i := range yWant { + yWant[i] = znan + } + if incX*incY > 0 { + for i := 0; i < n; i++ { + xWant[i*aincX] = y[i*aincY] + yWant[i*aincY] = x[i*aincX] + } + } else { + for i := 0; i < n; i++ { + xWant[(n-i-1)*aincX] = y[i*aincY] + yWant[(n-i-1)*aincY] = x[i*aincX] + } + } + + impl.Zswap(n, x, incX, y, incY) + + prefix := fmt.Sprintf("Case n=%v,incX=%v,incY=%v:", n, incX, incY) + if !zsame(x, xWant) { + t.Errorf("%v: unexpected x:\nwant %v\ngot %v", prefix, xWant, x) + } + if !zsame(y, yWant) { + t.Errorf("%v: unexpected y:\nwant %v\ngot %v", prefix, yWant, y) + } + } + } +} diff --git a/vendor/gonum.org/v1/gonum/blas/testblas/zsymm.go b/vendor/gonum.org/v1/gonum/blas/testblas/zsymm.go new file mode 100644 index 0000000..5151e6e --- /dev/null +++ b/vendor/gonum.org/v1/gonum/blas/testblas/zsymm.go @@ -0,0 +1,127 @@ +// Copyright ©2019 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package testblas + +import ( + "fmt" + "testing" + + "golang.org/x/exp/rand" + "gonum.org/v1/gonum/blas" +) + +type Zsymmer interface { + Zsymm(side blas.Side, uplo blas.Uplo, m, n int, alpha complex128, a []complex128, lda int, b []complex128, ldb int, beta complex128, c []complex128, ldc int) +} + +func ZsymmTest(t *testing.T, impl Zsymmer) { + for _, side := range []blas.Side{blas.Left, blas.Right} { + for _, uplo := range []blas.Uplo{blas.Lower, blas.Upper} { + name := sideString(side) + "-" + uploString(uplo) + t.Run(name, func(t *testing.T) { + for _, m := range []int{0, 1, 2, 3, 4, 5} { + for _, n := range []int{0, 1, 2, 3, 4, 5} { + zsymmTest(t, impl, side, uplo, m, n) + } + } + }) + } + } +} + +func zsymmTest(t *testing.T, impl Zsymmer, side blas.Side, uplo blas.Uplo, m, n int) { + const tol = 1e-13 + + rnd := rand.New(rand.NewSource(1)) + + nA := m + if side == blas.Right { + nA = n + } + for _, lda := range []int{max(1, nA), nA + 2} { + for _, ldb := range []int{max(1, n), n + 3} { + for _, ldc := range []int{max(1, n), n + 4} { + for _, alpha := range []complex128{0, 1, complex(0.7, -0.9)} { + for _, beta := range []complex128{0, 1, complex(1.3, -1.1)} { + // Allocate the matrix A and fill it with random numbers. + a := make([]complex128, nA*lda) + for i := range a { + a[i] = rndComplex128(rnd) + } + // Create a copy of A for checking that + // Zsymm does not modify its triangle + // opposite to uplo. + aCopy := make([]complex128, len(a)) + copy(aCopy, a) + // Create a copy of A expanded into a + // full symmetric matrix for computing + // the expected result using zmm. + aSym := make([]complex128, len(a)) + copy(aSym, a) + if uplo == blas.Upper { + for i := 0; i < nA-1; i++ { + for j := i + 1; j < nA; j++ { + aSym[j*lda+i] = aSym[i*lda+j] + } + } + } else { + for i := 1; i < nA; i++ { + for j := 0; j < i; j++ { + aSym[j*lda+i] = aSym[i*lda+j] + } + } + } + + // Allocate the matrix B and fill it with random numbers. + b := make([]complex128, m*ldb) + for i := range b { + b[i] = rndComplex128(rnd) + } + // Create a copy of B for checking that + // Zsymm does not modify B. + bCopy := make([]complex128, len(b)) + copy(bCopy, b) + + // Allocate the matrix C and fill it with random numbers. + c := make([]complex128, m*ldc) + for i := range c { + c[i] = rndComplex128(rnd) + } + // Create a copy of C for checking that + // Zsymm does not modify C. + cCopy := make([]complex128, len(c)) + copy(cCopy, c) + + // Compute the expected result using an internal Zgemm implementation. + var want []complex128 + if side == blas.Left { + want = zmm(blas.NoTrans, blas.NoTrans, m, n, m, alpha, aSym, lda, b, ldb, beta, c, ldc) + } else { + want = zmm(blas.NoTrans, blas.NoTrans, m, n, n, alpha, b, ldb, aSym, lda, beta, c, ldc) + } + + // Compute the result using Zsymm. + impl.Zsymm(side, uplo, m, n, alpha, a, lda, b, ldb, beta, c, ldc) + + prefix := fmt.Sprintf("m=%v,n=%v,lda=%v,ldb=%v,ldc=%v,alpha=%v,beta=%v", m, n, lda, ldb, ldc, alpha, beta) + + if !zsame(a, aCopy) { + t.Errorf("%v: unexpected modification of A", prefix) + continue + } + if !zsame(b, bCopy) { + t.Errorf("%v: unexpected modification of B", prefix) + continue + } + + if !zEqualApprox(c, want, tol) { + t.Errorf("%v: unexpected result", prefix) + } + } + } + } + } + } +} diff --git a/vendor/gonum.org/v1/gonum/blas/testblas/zsyr2k.go b/vendor/gonum.org/v1/gonum/blas/testblas/zsyr2k.go new file mode 100644 index 0000000..ea40ec2 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/blas/testblas/zsyr2k.go @@ -0,0 +1,155 @@ +// Copyright ©2019 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package testblas + +import ( + "fmt" + "testing" + + "golang.org/x/exp/rand" + + "gonum.org/v1/gonum/blas" +) + +type Zsyr2ker interface { + Zsyr2k(uplo blas.Uplo, trans blas.Transpose, n, k int, alpha complex128, a []complex128, lda int, b []complex128, ldb int, beta complex128, c []complex128, ldc int) +} + +func Zsyr2kTest(t *testing.T, impl Zsyr2ker) { + for _, uplo := range []blas.Uplo{blas.Upper, blas.Lower} { + for _, trans := range []blas.Transpose{blas.NoTrans, blas.Trans} { + name := uploString(uplo) + "-" + transString(trans) + t.Run(name, func(t *testing.T) { + for _, n := range []int{0, 1, 2, 3, 4, 5} { + for _, k := range []int{0, 1, 2, 3, 4, 5, 7} { + zsyr2kTest(t, impl, uplo, trans, n, k) + } + } + }) + } + } +} + +func zsyr2kTest(t *testing.T, impl Zsyr2ker, uplo blas.Uplo, trans blas.Transpose, n, k int) { + const tol = 1e-13 + + rnd := rand.New(rand.NewSource(1)) + + row, col := n, k + if trans == blas.Trans { + row, col = k, n + } + for _, lda := range []int{max(1, col), col + 2} { + for _, ldb := range []int{max(1, col), col + 3} { + for _, ldc := range []int{max(1, n), n + 4} { + for _, alpha := range []complex128{0, 1, complex(0.7, -0.9)} { + for _, beta := range []complex128{0, 1, complex(1.3, -1.1)} { + // Allocate the matrix A and fill it with random numbers. + a := make([]complex128, row*lda) + for i := range a { + a[i] = rndComplex128(rnd) + } + // Create a copy of A for checking that + // Zsyr2k does not modify A. + aCopy := make([]complex128, len(a)) + copy(aCopy, a) + + // Allocate the matrix B and fill it with random numbers. + b := make([]complex128, row*ldb) + for i := range b { + b[i] = rndComplex128(rnd) + } + // Create a copy of B for checking that + // Zsyr2k does not modify B. + bCopy := make([]complex128, len(b)) + copy(bCopy, b) + + // Allocate the matrix C and fill it with random numbers. + c := make([]complex128, n*ldc) + for i := range c { + c[i] = rndComplex128(rnd) + } + // Create a copy of C for checking that + // Zsyr2k does not modify its triangle + // opposite to uplo. + cCopy := make([]complex128, len(c)) + copy(cCopy, c) + // Create a copy of C expanded into a + // full symmetric matrix for computing + // the expected result using zmm. + cSym := make([]complex128, len(c)) + copy(cSym, c) + if uplo == blas.Upper { + for i := 0; i < n-1; i++ { + for j := i + 1; j < n; j++ { + cSym[j*ldc+i] = cSym[i*ldc+j] + } + } + } else { + for i := 1; i < n; i++ { + for j := 0; j < i; j++ { + cSym[j*ldc+i] = cSym[i*ldc+j] + } + } + } + + // Compute the expected result using an internal Zgemm implementation. + var want []complex128 + if trans == blas.NoTrans { + // C = alpha*A*B^T + alpha*B*A^T + beta*C + tmp := zmm(blas.NoTrans, blas.Trans, n, n, k, alpha, a, lda, b, ldb, beta, cSym, ldc) + want = zmm(blas.NoTrans, blas.Trans, n, n, k, alpha, b, ldb, a, lda, 1, tmp, ldc) + } else { + // C = alpha*A^T*B + alpha*B^T*A + beta*C + tmp := zmm(blas.Trans, blas.NoTrans, n, n, k, alpha, a, lda, b, ldb, beta, cSym, ldc) + want = zmm(blas.Trans, blas.NoTrans, n, n, k, alpha, b, ldb, a, lda, 1, tmp, ldc) + } + + // Compute the result using Zsyr2k. + impl.Zsyr2k(uplo, trans, n, k, alpha, a, lda, b, ldb, beta, c, ldc) + + prefix := fmt.Sprintf("n=%v,k=%v,lda=%v,ldb=%v,ldc=%v,alpha=%v,beta=%v", n, k, lda, ldb, ldc, alpha, beta) + + if !zsame(a, aCopy) { + t.Errorf("%v: unexpected modification of A", prefix) + continue + } + if !zsame(b, bCopy) { + t.Errorf("%v: unexpected modification of B", prefix) + continue + } + if uplo == blas.Upper && !zSameLowerTri(n, c, ldc, cCopy, ldc) { + t.Errorf("%v: unexpected modification in lower triangle of C", prefix) + continue + } + if uplo == blas.Lower && !zSameUpperTri(n, c, ldc, cCopy, ldc) { + t.Errorf("%v: unexpected modification in upper triangle of C", prefix) + continue + } + + // Expand C into a full symmetric matrix + // for comparison with the result from zmm. + if uplo == blas.Upper { + for i := 0; i < n-1; i++ { + for j := i + 1; j < n; j++ { + c[j*ldc+i] = c[i*ldc+j] + } + } + } else { + for i := 1; i < n; i++ { + for j := 0; j < i; j++ { + c[j*ldc+i] = c[i*ldc+j] + } + } + } + if !zEqualApprox(c, want, tol) { + t.Errorf("%v: unexpected result", prefix) + } + } + } + } + } + } +} diff --git a/vendor/gonum.org/v1/gonum/blas/testblas/zsyrk.go b/vendor/gonum.org/v1/gonum/blas/testblas/zsyrk.go new file mode 100644 index 0000000..bc124b2 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/blas/testblas/zsyrk.go @@ -0,0 +1,135 @@ +// Copyright ©2019 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package testblas + +import ( + "fmt" + "testing" + + "golang.org/x/exp/rand" + + "gonum.org/v1/gonum/blas" +) + +type Zsyrker interface { + Zsyrk(uplo blas.Uplo, trans blas.Transpose, n, k int, alpha complex128, a []complex128, lda int, beta complex128, c []complex128, ldc int) +} + +func ZsyrkTest(t *testing.T, impl Zsyrker) { + for _, uplo := range []blas.Uplo{blas.Upper, blas.Lower} { + for _, trans := range []blas.Transpose{blas.NoTrans, blas.Trans} { + name := uploString(uplo) + "-" + transString(trans) + t.Run(name, func(t *testing.T) { + for _, n := range []int{0, 1, 2, 3, 4, 5} { + for _, k := range []int{0, 1, 2, 3, 4, 5, 7} { + zsyrkTest(t, impl, uplo, trans, n, k) + } + } + }) + } + } +} + +func zsyrkTest(t *testing.T, impl Zsyrker, uplo blas.Uplo, trans blas.Transpose, n, k int) { + const tol = 1e-13 + + rnd := rand.New(rand.NewSource(1)) + + rowA, colA := n, k + if trans == blas.Trans { + rowA, colA = k, n + } + for _, lda := range []int{max(1, colA), colA + 2} { + for _, ldc := range []int{max(1, n), n + 4} { + for _, alpha := range []complex128{0, 1, complex(0.7, -0.9)} { + for _, beta := range []complex128{0, 1, complex(1.3, -1.1)} { + // Allocate the matrix A and fill it with random numbers. + a := make([]complex128, rowA*lda) + for i := range a { + a[i] = rndComplex128(rnd) + } + // Create a copy of A for checking that + // Zsyrk does not modify A. + aCopy := make([]complex128, len(a)) + copy(aCopy, a) + + // Allocate the matrix C and fill it with random numbers. + c := make([]complex128, n*ldc) + for i := range c { + c[i] = rndComplex128(rnd) + } + // Create a copy of C for checking that + // Zsyrk does not modify its triangle + // opposite to uplo. + cCopy := make([]complex128, len(c)) + copy(cCopy, c) + // Create a copy of C expanded into a + // full symmetric matrix for computing + // the expected result using zmm. + cSym := make([]complex128, len(c)) + copy(cSym, c) + if uplo == blas.Upper { + for i := 0; i < n-1; i++ { + for j := i + 1; j < n; j++ { + cSym[j*ldc+i] = cSym[i*ldc+j] + } + } + } else { + for i := 1; i < n; i++ { + for j := 0; j < i; j++ { + cSym[j*ldc+i] = cSym[i*ldc+j] + } + } + } + + // Compute the expected result using an internal Zgemm implementation. + var want []complex128 + if trans == blas.NoTrans { + want = zmm(blas.NoTrans, blas.Trans, n, n, k, alpha, a, lda, a, lda, beta, cSym, ldc) + } else { + want = zmm(blas.Trans, blas.NoTrans, n, n, k, alpha, a, lda, a, lda, beta, cSym, ldc) + } + + // Compute the result using Zsyrk. + impl.Zsyrk(uplo, trans, n, k, alpha, a, lda, beta, c, ldc) + + prefix := fmt.Sprintf("n=%v,k=%v,lda=%v,ldc=%v,alpha=%v,beta=%v", n, k, lda, ldc, alpha, beta) + + if !zsame(a, aCopy) { + t.Errorf("%v: unexpected modification of A", prefix) + continue + } + if uplo == blas.Upper && !zSameLowerTri(n, c, ldc, cCopy, ldc) { + t.Errorf("%v: unexpected modification in lower triangle of C", prefix) + continue + } + if uplo == blas.Lower && !zSameUpperTri(n, c, ldc, cCopy, ldc) { + t.Errorf("%v: unexpected modification in upper triangle of C", prefix) + continue + } + + // Expand C into a full symmetric matrix + // for comparison with the result from zmm. + if uplo == blas.Upper { + for i := 0; i < n-1; i++ { + for j := i + 1; j < n; j++ { + c[j*ldc+i] = c[i*ldc+j] + } + } + } else { + for i := 1; i < n; i++ { + for j := 0; j < i; j++ { + c[j*ldc+i] = c[i*ldc+j] + } + } + } + if !zEqualApprox(c, want, tol) { + t.Errorf("%v: unexpected result", prefix) + } + } + } + } + } +} diff --git a/vendor/gonum.org/v1/gonum/blas/testblas/ztbmv.go b/vendor/gonum.org/v1/gonum/blas/testblas/ztbmv.go new file mode 100644 index 0000000..d55d90e --- /dev/null +++ b/vendor/gonum.org/v1/gonum/blas/testblas/ztbmv.go @@ -0,0 +1,114 @@ +// Copyright ©2018 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package testblas + +import ( + "fmt" + "testing" + + "golang.org/x/exp/rand" + "gonum.org/v1/gonum/blas" +) + +type Ztbmver interface { + Ztbmv(uplo blas.Uplo, trans blas.Transpose, diag blas.Diag, n, k int, ab []complex128, ldab int, x []complex128, incX int) + + Ztrmver +} + +func ZtbmvTest(t *testing.T, impl Ztbmver) { + rnd := rand.New(rand.NewSource(1)) + for _, uplo := range []blas.Uplo{blas.Upper, blas.Lower} { + for _, trans := range []blas.Transpose{blas.NoTrans, blas.Trans, blas.ConjTrans} { + for _, diag := range []blas.Diag{blas.NonUnit, blas.Unit} { + for _, n := range []int{1, 2, 3, 5} { + for k := 0; k < n; k++ { + for _, ldab := range []int{k + 1, k + 1 + 10} { + for _, incX := range []int{-4, 1, 5} { + testZtbmv(t, impl, rnd, uplo, trans, diag, n, k, ldab, incX) + } + } + } + } + } + } + } +} + +// testZtbmv tests Ztbmv by comparing its output to that of Ztrmv. +func testZtbmv(t *testing.T, impl Ztbmver, rnd *rand.Rand, uplo blas.Uplo, trans blas.Transpose, diag blas.Diag, n, k, ldab, incX int) { + const tol = 1e-13 + + // Allocate a dense-storage triangular band matrix filled with NaNs that + // will be used as the reference matrix for Ztrmv. + lda := max(1, n) + a := makeZGeneral(nil, n, n, lda) + // Fill the referenced triangle with random data within the band and + // with zeros outside. + if uplo == blas.Upper { + for i := 0; i < n; i++ { + for j := i; j < min(n, i+k+1); j++ { + re := rnd.NormFloat64() + im := rnd.NormFloat64() + a[i*lda+j] = complex(re, im) + } + for j := i + k + 1; j < n; j++ { + a[i*lda+j] = 0 + } + } + } else { + for i := 0; i < n; i++ { + for j := 0; j < i-k; j++ { + a[i*lda+j] = 0 + } + for j := max(0, i-k); j <= i; j++ { + re := rnd.NormFloat64() + im := rnd.NormFloat64() + a[i*lda+j] = complex(re, im) + } + } + } + if diag == blas.Unit { + // The diagonal should not be referenced by Ztbmv and Ztrmv, so + // invalidate it with NaNs. + for i := 0; i < n; i++ { + a[i*lda+i] = znan + } + } + // Create the triangular band matrix. + ab := zPackTriBand(k, ldab, uplo, n, a, lda) + abCopy := make([]complex128, len(ab)) + copy(abCopy, ab) + + // Generate a random complex vector x. + xtest := make([]complex128, n) + for i := range xtest { + re := rnd.NormFloat64() + im := rnd.NormFloat64() + xtest[i] = complex(re, im) + } + x := makeZVector(xtest, incX) + xCopy := make([]complex128, len(x)) + copy(xCopy, x) + + want := make([]complex128, len(x)) + copy(want, x) + + // Compute the reference result of op(A)*x, storing it into want. + impl.Ztrmv(uplo, trans, diag, n, a, lda, want, incX) + // Compute op(A)*x, storing the result in-place into x. + impl.Ztbmv(uplo, trans, diag, n, k, ab, ldab, x, incX) + + name := fmt.Sprintf("uplo=%v,trans=%v,diag=%v,n=%v,k=%v,ldab=%v,incX=%v", uplo, trans, diag, n, k, ldab, incX) + if !zsame(ab, abCopy) { + t.Errorf("%v: unexpected modification of ab", name) + } + if !zSameAtNonstrided(x, want, incX) { + t.Errorf("%v: unexpected modification of x", name) + } + if !zEqualApproxAtStrided(x, want, incX, tol) { + t.Errorf("%v: unexpected result\nwant %v\ngot %v", name, want, x) + } +} diff --git a/vendor/gonum.org/v1/gonum/blas/testblas/ztbsv.go b/vendor/gonum.org/v1/gonum/blas/testblas/ztbsv.go new file mode 100644 index 0000000..4c3f834 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/blas/testblas/ztbsv.go @@ -0,0 +1,115 @@ +// Copyright ©2018 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package testblas + +import ( + "fmt" + "testing" + + "golang.org/x/exp/rand" + "gonum.org/v1/gonum/blas" +) + +type Ztbsver interface { + Ztbsv(uplo blas.Uplo, trans blas.Transpose, diag blas.Diag, n, k int, ab []complex128, ldab int, x []complex128, incX int) + + Ztbmver +} + +func ZtbsvTest(t *testing.T, impl Ztbsver) { + rnd := rand.New(rand.NewSource(1)) + for _, uplo := range []blas.Uplo{blas.Upper, blas.Lower} { + for _, trans := range []blas.Transpose{blas.NoTrans, blas.Trans, blas.ConjTrans} { + for _, diag := range []blas.Diag{blas.NonUnit, blas.Unit} { + for _, n := range []int{1, 2, 3, 4, 10} { + for k := 0; k < n; k++ { + for _, ldab := range []int{k + 1, k + 1 + 10} { + for _, incX := range []int{-4, 1, 5} { + ztbsvTest(t, impl, rnd, uplo, trans, diag, n, k, ldab, incX) + } + } + } + } + } + } + } +} + +// ztbsvTest tests Ztbsv by checking whether Ztbmv followed by Ztbsv +// round-trip. +func ztbsvTest(t *testing.T, impl Ztbsver, rnd *rand.Rand, uplo blas.Uplo, trans blas.Transpose, diag blas.Diag, n, k, ldab, incX int) { + const tol = 1e-10 + + // Allocate a dense-storage triangular band matrix filled with NaNs that + // will be used as a for creating the actual triangular band matrix. + lda := max(1, n) + a := makeZGeneral(nil, n, n, lda) + // Fill the referenced triangle of A with random data within the band + // and with zeros outside. + if uplo == blas.Upper { + for i := 0; i < n; i++ { + for j := i; j < min(n, i+k+1); j++ { + re := rnd.NormFloat64() + im := rnd.NormFloat64() + a[i*lda+j] = complex(re, im) + } + for j := i + k + 1; j < n; j++ { + a[i*lda+j] = 0 + } + } + } else { + for i := 0; i < n; i++ { + for j := 0; j < i-k; j++ { + a[i*lda+j] = 0 + } + for j := max(0, i-k); j <= i; j++ { + re := rnd.NormFloat64() + im := rnd.NormFloat64() + a[i*lda+j] = complex(re, im) + } + } + } + if diag == blas.Unit { + // The diagonal should not be referenced by Ztbmv and Ztbsv, so + // invalidate it with NaNs. + for i := 0; i < n; i++ { + a[i*lda+i] = znan + } + } + // Create the triangular band matrix. + ab := zPackTriBand(k, ldab, uplo, n, a, lda) + abCopy := make([]complex128, len(ab)) + copy(abCopy, ab) + + // Generate a random complex vector x. + xtest := make([]complex128, n) + for i := range xtest { + re := rnd.NormFloat64() + im := rnd.NormFloat64() + xtest[i] = complex(re, im) + } + x := makeZVector(xtest, incX) + + // Store a copy of x as the correct result that we want. + want := make([]complex128, len(x)) + copy(want, x) + + // Compute A*x, denoting the result by b and storing it in x. + impl.Ztbmv(uplo, trans, diag, n, k, ab, ldab, x, incX) + // Solve A*x = b, that is, x = A^{-1}*b = A^{-1}*A*x. + impl.Ztbsv(uplo, trans, diag, n, k, ab, ldab, x, incX) + // If Ztbsv is correct, A^{-1}*A = I and x contains again its original value. + + name := fmt.Sprintf("uplo=%v,trans=%v,diag=%v,n=%v,k=%v,ldab=%v,incX=%v", uplo, trans, diag, n, k, ldab, incX) + if !zsame(ab, abCopy) { + t.Errorf("%v: unexpected modification of A", name) + } + if !zSameAtNonstrided(x, want, incX) { + t.Errorf("%v: unexpected modification of x\nwant %v\ngot %v", name, want, x) + } + if !zEqualApproxAtStrided(x, want, incX, tol) { + t.Errorf("%v: unexpected result\nwant %v\ngot %v", name, want, x) + } +} diff --git a/vendor/gonum.org/v1/gonum/blas/testblas/ztpmv.go b/vendor/gonum.org/v1/gonum/blas/testblas/ztpmv.go new file mode 100644 index 0000000..e018494 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/blas/testblas/ztpmv.go @@ -0,0 +1,75 @@ +// Copyright ©2018 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package testblas + +import ( + "testing" + + "gonum.org/v1/gonum/blas" +) + +type Ztpmver interface { + Ztpmv(uplo blas.Uplo, trans blas.Transpose, diag blas.Diag, n int, ap []complex128, x []complex128, incX int) +} + +func ZtpmvTest(t *testing.T, impl Ztpmver) { + for tc, test := range ztrmvTestCases { + n := len(test.x) + uplo := test.uplo + for _, trans := range []blas.Transpose{blas.NoTrans, blas.Trans, blas.ConjTrans} { + for _, diag := range []blas.Diag{blas.NonUnit, blas.Unit} { + for _, incX := range []int{-11, -2, -1, 1, 2, 7} { + ap := zPack(uplo, n, test.a, n) + apCopy := make([]complex128, len(ap)) + copy(apCopy, ap) + + x := makeZVector(test.x, incX) + + impl.Ztpmv(uplo, trans, diag, n, ap, x, incX) + + if !zsame(ap, apCopy) { + t.Errorf("Case %v (uplo=%v,trans=%v,diag=%v,incX=%v): unexpected modification of A", tc, uplo, trans, diag, incX) + } + + var want []complex128 + if diag == blas.NonUnit { + switch { + case trans == blas.NoTrans && incX > 0: + want = makeZVector(test.want, incX) + case trans == blas.NoTrans && incX < 0: + want = makeZVector(test.wantNeg, incX) + case trans == blas.Trans && incX > 0: + want = makeZVector(test.wantTrans, incX) + case trans == blas.Trans && incX < 0: + want = makeZVector(test.wantTransNeg, incX) + case trans == blas.ConjTrans && incX > 0: + want = makeZVector(test.wantConjTrans, incX) + case trans == blas.ConjTrans && incX < 0: + want = makeZVector(test.wantConjTransNeg, incX) + } + } else { + switch { + case trans == blas.NoTrans && incX > 0: + want = makeZVector(test.wantUnit, incX) + case trans == blas.NoTrans && incX < 0: + want = makeZVector(test.wantUnitNeg, incX) + case trans == blas.Trans && incX > 0: + want = makeZVector(test.wantUnitTrans, incX) + case trans == blas.Trans && incX < 0: + want = makeZVector(test.wantUnitTransNeg, incX) + case trans == blas.ConjTrans && incX > 0: + want = makeZVector(test.wantUnitConjTrans, incX) + case trans == blas.ConjTrans && incX < 0: + want = makeZVector(test.wantUnitConjTransNeg, incX) + } + } + if !zsame(x, want) { + t.Errorf("Case %v (uplo=%v,trans=%v,diag=%v,incX=%v): unexpected result\nwant %v\ngot %v", tc, uplo, trans, diag, incX, want, x) + } + } + } + } + } +} diff --git a/vendor/gonum.org/v1/gonum/blas/testblas/ztpsv.go b/vendor/gonum.org/v1/gonum/blas/testblas/ztpsv.go new file mode 100644 index 0000000..b405227 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/blas/testblas/ztpsv.go @@ -0,0 +1,105 @@ +// Copyright ©2017 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package testblas + +import ( + "fmt" + "testing" + + "golang.org/x/exp/rand" + "gonum.org/v1/gonum/blas" +) + +type Ztpsver interface { + Ztpsv(uplo blas.Uplo, trans blas.Transpose, diag blas.Diag, n int, ap []complex128, x []complex128, incX int) + + Ztpmver +} + +func ZtpsvTest(t *testing.T, impl Ztpsver) { + rnd := rand.New(rand.NewSource(1)) + for _, uplo := range []blas.Uplo{blas.Upper, blas.Lower} { + for _, trans := range []blas.Transpose{blas.NoTrans, blas.Trans, blas.ConjTrans} { + for _, diag := range []blas.Diag{blas.NonUnit, blas.Unit} { + for _, n := range []int{0, 1, 2, 3, 4, 10} { + for _, incX := range []int{-11, -3, -2, -1, 1, 2, 3, 7} { + ztpsvTest(t, impl, uplo, trans, diag, n, incX, rnd) + } + } + } + } + } +} + +// ztpsvTest tests Ztpsv by checking whether Ztpmv followed by Ztpsv +// round-trip. +func ztpsvTest(t *testing.T, impl Ztpsver, uplo blas.Uplo, trans blas.Transpose, diag blas.Diag, n, incX int, rnd *rand.Rand) { + const tol = 1e-10 + + // Allocate a dense-storage triangular matrix filled with NaNs that + // will be used as a for creating the actual triangular matrix in packed + // storage. + lda := n + a := makeZGeneral(nil, n, n, max(1, lda)) + // Fill the referenced triangle of A with random data. + if uplo == blas.Upper { + for i := 0; i < n; i++ { + for j := i; j < n; j++ { + re := rnd.NormFloat64() + im := rnd.NormFloat64() + a[i*lda+j] = complex(re, im) + } + } + } else { + for i := 0; i < n; i++ { + for j := 0; j <= i; j++ { + re := rnd.NormFloat64() + im := rnd.NormFloat64() + a[i*lda+j] = complex(re, im) + } + } + } + if diag == blas.Unit { + // The diagonal should not be referenced by Ztpmv and Ztpsv, so + // invalidate it with NaNs. + for i := 0; i < n; i++ { + a[i*lda+i] = znan + } + } + // Create the triangular matrix in packed storage. + ap := zPack(uplo, n, a, n) + apCopy := make([]complex128, len(ap)) + copy(apCopy, ap) + + // Generate a random complex vector x. + xtest := make([]complex128, n) + for i := range xtest { + re := rnd.NormFloat64() + im := rnd.NormFloat64() + xtest[i] = complex(re, im) + } + x := makeZVector(xtest, incX) + + // Store a copy of x as the correct result that we want. + want := make([]complex128, len(x)) + copy(want, x) + + // Compute A*x, denoting the result by b and storing it in x. + impl.Ztpmv(uplo, trans, diag, n, ap, x, incX) + // Solve A*x = b, that is, x = A^{-1}*b = A^{-1}*A*x. + impl.Ztpsv(uplo, trans, diag, n, ap, x, incX) + // If Ztpsv is correct, A^{-1}*A = I and x contains again its original value. + + name := fmt.Sprintf("uplo=%v,trans=%v,diag=%v,n=%v,incX=%v", uplo, trans, diag, n, incX) + if !zsame(ap, apCopy) { + t.Errorf("%v: unexpected modification of ap", name) + } + if !zSameAtNonstrided(x, want, incX) { + t.Errorf("%v: unexpected modification of x\nwant %v\ngot %v", name, want, x) + } + if !zEqualApproxAtStrided(x, want, incX, tol) { + t.Errorf("%v: unexpected result\nwant %v\ngot %v", name, want, x) + } +} diff --git a/vendor/gonum.org/v1/gonum/blas/testblas/ztrmm.go b/vendor/gonum.org/v1/gonum/blas/testblas/ztrmm.go new file mode 100644 index 0000000..487c99b --- /dev/null +++ b/vendor/gonum.org/v1/gonum/blas/testblas/ztrmm.go @@ -0,0 +1,128 @@ +// Copyright ©2019 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package testblas + +import ( + "fmt" + "testing" + + "golang.org/x/exp/rand" + + "gonum.org/v1/gonum/blas" +) + +type Ztrmmer interface { + Ztrmm(side blas.Side, uplo blas.Uplo, trans blas.Transpose, diag blas.Diag, m, n int, alpha complex128, a []complex128, lda int, b []complex128, ldb int) +} + +func ZtrmmTest(t *testing.T, impl Ztrmmer) { + for _, side := range []blas.Side{blas.Left, blas.Right} { + for _, uplo := range []blas.Uplo{blas.Lower, blas.Upper} { + for _, trans := range []blas.Transpose{blas.NoTrans, blas.Trans, blas.ConjTrans} { + for _, diag := range []blas.Diag{blas.Unit, blas.NonUnit} { + name := sideString(side) + "-" + uploString(uplo) + "-" + transString(trans) + "-" + diagString(diag) + t.Run(name, func(t *testing.T) { + for _, m := range []int{0, 1, 2, 3, 4, 5} { + for _, n := range []int{0, 1, 2, 3, 4, 5} { + ztrmmTest(t, impl, side, uplo, trans, diag, m, n) + } + } + }) + } + } + } + } +} + +func ztrmmTest(t *testing.T, impl Ztrmmer, side blas.Side, uplo blas.Uplo, trans blas.Transpose, diag blas.Diag, m, n int) { + const tol = 1e-13 + + rnd := rand.New(rand.NewSource(1)) + + nA := m + if side == blas.Right { + nA = n + } + for _, lda := range []int{max(1, nA), nA + 2} { + for _, ldb := range []int{max(1, n), n + 3} { + for _, alpha := range []complex128{0, 1, complex(0.7, -0.9)} { + // Allocate the matrix A and fill it with random numbers. + a := make([]complex128, nA*lda) + for i := range a { + a[i] = rndComplex128(rnd) + } + // Put a zero into A to cover special cases in Ztrmm. + if nA > 1 { + if uplo == blas.Upper { + a[nA-1] = 0 + } else { + a[(nA-1)*lda] = 0 + } + } + // Create a copy of A for checking that Ztrmm + // does not modify its triangle opposite to + // uplo. + aCopy := make([]complex128, len(a)) + copy(aCopy, a) + // Create a dense representation of A for + // computing the expected result using zmm. + aTri := make([]complex128, len(a)) + copy(aTri, a) + if uplo == blas.Upper { + for i := 0; i < nA; i++ { + // Zero out the lower triangle. + for j := 0; j < i; j++ { + aTri[i*lda+j] = 0 + } + if diag == blas.Unit { + aTri[i*lda+i] = 1 + } + } + } else { + for i := 0; i < nA; i++ { + if diag == blas.Unit { + aTri[i*lda+i] = 1 + } + // Zero out the upper triangle. + for j := i + 1; j < nA; j++ { + aTri[i*lda+j] = 0 + } + } + } + + // Allocate the matrix B and fill it with random numbers. + b := make([]complex128, m*ldb) + for i := range b { + b[i] = rndComplex128(rnd) + } + // Put a zero into B to cover special cases in Ztrmm. + if m > 0 && n > 0 { + b[0] = 0 + } + + // Compute the expected result using an internal Zgemm implementation. + var want []complex128 + if side == blas.Left { + want = zmm(trans, blas.NoTrans, m, n, m, alpha, aTri, lda, b, ldb, 0, b, ldb) + } else { + want = zmm(blas.NoTrans, trans, m, n, n, alpha, b, ldb, aTri, lda, 0, b, ldb) + } + + // Compute the result using Ztrmm. + impl.Ztrmm(side, uplo, trans, diag, m, n, alpha, a, lda, b, ldb) + + prefix := fmt.Sprintf("m=%v,n=%v,lda=%v,ldb=%v,alpha=%v", m, n, lda, ldb, alpha) + if !zsame(a, aCopy) { + t.Errorf("%v: unexpected modification of A", prefix) + continue + } + + if !zEqualApprox(b, want, tol) { + t.Errorf("%v: unexpected result", prefix) + } + } + } + } +} diff --git a/vendor/gonum.org/v1/gonum/blas/testblas/ztrmv.go b/vendor/gonum.org/v1/gonum/blas/testblas/ztrmv.go new file mode 100644 index 0000000..3478410 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/blas/testblas/ztrmv.go @@ -0,0 +1,285 @@ +// Copyright ©2017 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package testblas + +import ( + "testing" + + "gonum.org/v1/gonum/blas" +) + +var ztrmvTestCases = []struct { + uplo blas.Uplo + a []complex128 + x []complex128 + + // Results with non-unit diagonal. + want []complex128 + wantNeg []complex128 + wantTrans []complex128 + wantTransNeg []complex128 + wantConjTrans []complex128 + wantConjTransNeg []complex128 + + // Results with unit diagonal. + wantUnit []complex128 + wantUnitNeg []complex128 + wantUnitTrans []complex128 + wantUnitTransNeg []complex128 + wantUnitConjTrans []complex128 + wantUnitConjTransNeg []complex128 +}{ + {uplo: blas.Upper}, + {uplo: blas.Lower}, + { + uplo: blas.Upper, + a: []complex128{ + 6 - 8i, -10 + 10i, -6 - 3i, -1 - 8i, + znan, 7 + 8i, -7 + 9i, 3 + 6i, + znan, znan, 6 - 4i, -2 - 5i, + znan, znan, znan, 4 - 8i, + }, + x: []complex128{ + 10 - 5i, + -2 + 2i, + 8 - 1i, + -7 + 9i, + }, + + want: []complex128{ + 48 - 121i, + -152 + 62i, + 103 - 21i, + 44 + 92i, + }, + wantNeg: []complex128{ + 0 - 100i, + -49 - 20i, + 120 + 70i, + -72 + 119i, + }, + wantTrans: []complex128{ + 20 - 110i, + -80 + 148i, + -35 - 70i, + -45 - 27i, + }, + wantTransNeg: []complex128{ + 123 - 2i, + 18 + 66i, + 44 - 103i, + 30 + 110i, + }, + wantConjTrans: []complex128{ + 100 + 50i, + -148 - 20i, + 39 + 90i, + -75 + 125i, + }, + wantConjTransNeg: []complex128{ + 27 - 70i, + -70 - 136i, + 208 - 91i, + -114 - 2i, + }, + + wantUnit: []complex128{ + 38 - 16i, + -124 + 66i, + 67 + 16i, + -7 + 9i, + }, + wantUnitNeg: []complex128{ + 10 - 5i, + -47 - 38i, + 64 + 12i, + -109 + 18i, + }, + wantUnitTrans: []complex128{ + 10 - 5i, + -52 + 152i, + -71 - 33i, + -96 - 110i, + }, + wantUnitTransNeg: []complex128{ + 133 + 93i, + 20 + 48i, + -12 - 161i, + -7 + 9i, + }, + wantUnitConjTrans: []complex128{ + 10 - 5i, + -152 - 48i, + -5 + 63i, + 18 + 154i, + }, + wantUnitConjTransNeg: []complex128{ + -43 - 135i, + -52 - 138i, + 168 - 21i, + -7 + 9i, + }, + }, + { + uplo: blas.Lower, + a: []complex128{ + 10 - 8i, znan, znan, znan, + 1 - 6i, -4 + 8i, znan, znan, + 2 - 6i, 4 - 8i, 5 + 3i, znan, + -7 - 4i, 1 + 3i, -2 - 4i, 9 + 8i, + }, + x: []complex128{ + 10 + 5i, + -7 + 1i, + 3 - 1i, + 9 + 10i, + }, + + want: []complex128{ + 140 - 30i, + 60 - 115i, + 48 + 14i, + -69 + 57i, + }, + wantNeg: []complex128{ + 51 + 53i, + 44 - 78i, + 65 - 16i, + 170 + 28i, + }, + wantTrans: []complex128{ + 116 - 113i, + 3 - 51i, + 40 - 52i, + 1 + 162i, + }, + wantTransNeg: []complex128{ + 50 + 125i, + -38 - 66i, + -29 + 123i, + 109 - 22i, + }, + wantConjTrans: []complex128{ + -44 + 71i, + 95 + 55i, + -46 + 2i, + 161 + 18i, + }, + wantConjTransNeg: []complex128{ + 130 - 35i, + -72 + 56i, + -31 - 97i, + -91 + 154i, + }, + + wantUnit: []complex128{ + 10 + 5i, + 33 - 54i, + 33 + 9i, + -61 - 95i, + }, + wantUnitNeg: []complex128{ + 11 - 67i, + 75 - 61i, + 72 - 45i, + 9 + 10i, + }, + wantUnitTrans: []complex128{ + -14 - 78i, + -24 + 10i, + 25 - 57i, + 9 + 10i, + }, + wantUnitTransNeg: []complex128{ + 10 + 5i, + -7 - 49i, + -22 + 94i, + -52 - 40i, + }, + wantUnitConjTrans: []complex128{ + -94 - 54i, + 52 + 4i, + -55 + 15i, + 9 + 10i, + }, + wantUnitConjTransNeg: []complex128{ + 10 + 5i, + -47 + 31i, + -8 - 78i, + -92 - 8i, + }, + }, +} + +type Ztrmver interface { + Ztrmv(uplo blas.Uplo, trans blas.Transpose, diag blas.Diag, n int, a []complex128, lda int, x []complex128, incX int) +} + +func ZtrmvTest(t *testing.T, impl Ztrmver) { + for tc, test := range ztrmvTestCases { + n := len(test.x) + uplo := test.uplo + for _, trans := range []blas.Transpose{blas.NoTrans, blas.Trans, blas.ConjTrans} { + for _, diag := range []blas.Diag{blas.NonUnit, blas.Unit} { + for _, incX := range []int{-11, -2, -1, 1, 2, 7} { + for _, lda := range []int{max(1, n), n + 11} { + a := makeZGeneral(test.a, n, n, lda) + if diag == blas.Unit { + for i := 0; i < n; i++ { + a[i*lda+i] = znan + } + } + aCopy := make([]complex128, len(a)) + copy(aCopy, a) + + x := makeZVector(test.x, incX) + + impl.Ztrmv(uplo, trans, diag, n, a, lda, x, incX) + + if !zsame(a, aCopy) { + t.Errorf("Case %v (uplo=%v,trans=%v,diag=%v,lda=%v,incX=%v): unexpected modification of A", tc, uplo, trans, diag, lda, incX) + } + + var want []complex128 + if diag == blas.NonUnit { + switch { + case trans == blas.NoTrans && incX > 0: + want = makeZVector(test.want, incX) + case trans == blas.NoTrans && incX < 0: + want = makeZVector(test.wantNeg, incX) + case trans == blas.Trans && incX > 0: + want = makeZVector(test.wantTrans, incX) + case trans == blas.Trans && incX < 0: + want = makeZVector(test.wantTransNeg, incX) + case trans == blas.ConjTrans && incX > 0: + want = makeZVector(test.wantConjTrans, incX) + case trans == blas.ConjTrans && incX < 0: + want = makeZVector(test.wantConjTransNeg, incX) + } + } else { + switch { + case trans == blas.NoTrans && incX > 0: + want = makeZVector(test.wantUnit, incX) + case trans == blas.NoTrans && incX < 0: + want = makeZVector(test.wantUnitNeg, incX) + case trans == blas.Trans && incX > 0: + want = makeZVector(test.wantUnitTrans, incX) + case trans == blas.Trans && incX < 0: + want = makeZVector(test.wantUnitTransNeg, incX) + case trans == blas.ConjTrans && incX > 0: + want = makeZVector(test.wantUnitConjTrans, incX) + case trans == blas.ConjTrans && incX < 0: + want = makeZVector(test.wantUnitConjTransNeg, incX) + } + } + if !zsame(x, want) { + t.Errorf("Case %v (uplo=%v,trans=%v,diag=%v,lda=%v,incX=%v): unexpected result\nwant %v\ngot %v", tc, uplo, trans, diag, lda, incX, want, x) + } + } + } + } + } + } +} diff --git a/vendor/gonum.org/v1/gonum/blas/testblas/ztrsm.go b/vendor/gonum.org/v1/gonum/blas/testblas/ztrsm.go new file mode 100644 index 0000000..25e253b --- /dev/null +++ b/vendor/gonum.org/v1/gonum/blas/testblas/ztrsm.go @@ -0,0 +1,144 @@ +// Copyright ©2019 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package testblas + +import ( + "fmt" + "testing" + + "golang.org/x/exp/rand" + + "gonum.org/v1/gonum/blas" +) + +type Ztrsmer interface { + Ztrsm(side blas.Side, uplo blas.Uplo, transA blas.Transpose, diag blas.Diag, m, n int, alpha complex128, a []complex128, lda int, b []complex128, ldb int) +} + +func ZtrsmTest(t *testing.T, impl Ztrsmer) { + for _, side := range []blas.Side{blas.Left, blas.Right} { + for _, uplo := range []blas.Uplo{blas.Lower, blas.Upper} { + for _, trans := range []blas.Transpose{blas.NoTrans, blas.Trans, blas.ConjTrans} { + for _, diag := range []blas.Diag{blas.Unit, blas.NonUnit} { + name := sideString(side) + "-" + uploString(uplo) + "-" + transString(trans) + "-" + diagString(diag) + t.Run(name, func(t *testing.T) { + for _, m := range []int{0, 1, 2, 3, 4, 5} { + for _, n := range []int{0, 1, 2, 3, 4, 5} { + ztrsmTest(t, impl, side, uplo, trans, diag, m, n) + } + } + }) + } + } + } + } +} + +func ztrsmTest(t *testing.T, impl Ztrsmer, side blas.Side, uplo blas.Uplo, trans blas.Transpose, diag blas.Diag, m, n int) { + const tol = 1e-13 + + rnd := rand.New(rand.NewSource(1)) + + nA := m + if side == blas.Right { + nA = n + } + for _, lda := range []int{max(1, nA), nA + 2} { + for _, ldb := range []int{max(1, n), n + 3} { + for _, alpha := range []complex128{0, 1, complex(0.7, -0.9)} { + // Allocate the matrix A and fill it with random numbers. + a := make([]complex128, nA*lda) + for i := range a { + a[i] = rndComplex128(rnd) + } + // Set some elements of A to 0 and 1 to cover special cases in Ztrsm. + if nA > 2 { + if uplo == blas.Upper { + a[nA-2] = 1 + a[nA-1] = 0 + } else { + a[(nA-2)*lda] = 1 + a[(nA-1)*lda] = 0 + } + } + // Create a copy of A for checking that Ztrsm + // does not modify its triangle opposite to uplo. + aCopy := make([]complex128, len(a)) + copy(aCopy, a) + // Create a dense representation of A for + // computing the right-hand side matrix using zmm. + aTri := make([]complex128, len(a)) + copy(aTri, a) + if uplo == blas.Upper { + for i := 0; i < nA; i++ { + // Zero out the lower triangle. + for j := 0; j < i; j++ { + aTri[i*lda+j] = 0 + } + if diag == blas.Unit { + aTri[i*lda+i] = 1 + } + } + } else { + for i := 0; i < nA; i++ { + if diag == blas.Unit { + aTri[i*lda+i] = 1 + } + // Zero out the upper triangle. + for j := i + 1; j < nA; j++ { + aTri[i*lda+j] = 0 + } + } + } + + // Allocate the right-hand side matrix B and fill it with random numbers. + b := make([]complex128, m*ldb) + for i := range b { + b[i] = rndComplex128(rnd) + } + // Set some elements of B to 0 to cover special cases in Ztrsm. + if m > 1 && n > 1 { + b[0] = 0 + b[(m-1)*ldb+n-1] = 0 + } + bCopy := make([]complex128, len(b)) + copy(bCopy, b) + + // Compute the solution matrix X using Ztrsm. + // X is overwritten on B. + impl.Ztrsm(side, uplo, trans, diag, m, n, alpha, a, lda, b, ldb) + x := b + + prefix := fmt.Sprintf("m=%v,n=%v,lda=%v,ldb=%v,alpha=%v", m, n, lda, ldb, alpha) + + if !zsame(a, aCopy) { + t.Errorf("%v: unexpected modification of A", prefix) + continue + } + + // Compute the left-hand side matrix of op(A)*X=alpha*B or X*op(A)=alpha*B + // using an internal Zgemm implementation. + var lhs []complex128 + if side == blas.Left { + lhs = zmm(trans, blas.NoTrans, m, n, m, 1, aTri, lda, x, ldb, 0, b, ldb) + } else { + lhs = zmm(blas.NoTrans, trans, m, n, n, 1, x, ldb, aTri, lda, 0, b, ldb) + } + + // Compute the right-hand side matrix alpha*B. + rhs := bCopy + for i := 0; i < m; i++ { + for j := 0; j < n; j++ { + rhs[i*ldb+j] *= alpha + } + } + + if !zEqualApprox(lhs, rhs, tol) { + t.Errorf("%v: unexpected result", prefix) + } + } + } + } +} diff --git a/vendor/gonum.org/v1/gonum/blas/testblas/ztrsv.go b/vendor/gonum.org/v1/gonum/blas/testblas/ztrsv.go new file mode 100644 index 0000000..c981cdd --- /dev/null +++ b/vendor/gonum.org/v1/gonum/blas/testblas/ztrsv.go @@ -0,0 +1,102 @@ +// Copyright ©2017 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package testblas + +import ( + "fmt" + "testing" + + "golang.org/x/exp/rand" + "gonum.org/v1/gonum/blas" +) + +type Ztrsver interface { + Ztrsv(uplo blas.Uplo, trans blas.Transpose, diag blas.Diag, n int, a []complex128, lda int, x []complex128, incX int) + + Ztrmver +} + +func ZtrsvTest(t *testing.T, impl Ztrsver) { + rnd := rand.New(rand.NewSource(1)) + for _, uplo := range []blas.Uplo{blas.Upper, blas.Lower} { + for _, trans := range []blas.Transpose{blas.NoTrans, blas.Trans, blas.ConjTrans} { + for _, diag := range []blas.Diag{blas.NonUnit, blas.Unit} { + for _, n := range []int{0, 1, 2, 3, 4, 10} { + for _, lda := range []int{max(1, n), n + 11} { + for _, incX := range []int{-11, -3, -2, -1, 1, 2, 3, 7} { + ztrsvTest(t, impl, uplo, trans, diag, n, lda, incX, rnd) + } + } + } + } + } + } +} + +// ztrsvTest tests Ztrsv by checking whether Ztrmv followed by Ztrsv +// round-trip. +func ztrsvTest(t *testing.T, impl Ztrsver, uplo blas.Uplo, trans blas.Transpose, diag blas.Diag, n, lda, incX int, rnd *rand.Rand) { + const tol = 1e-10 + + // Allocate a dense-storage triangular matrix A filled with NaNs. + a := makeZGeneral(nil, n, n, lda) + // Fill the referenced triangle of A with random data. + if uplo == blas.Upper { + for i := 0; i < n; i++ { + for j := i; j < n; j++ { + re := rnd.NormFloat64() + im := rnd.NormFloat64() + a[i*lda+j] = complex(re, im) + } + } + } else { + for i := 0; i < n; i++ { + for j := 0; j <= i; j++ { + re := rnd.NormFloat64() + im := rnd.NormFloat64() + a[i*lda+j] = complex(re, im) + } + } + } + if diag == blas.Unit { + // The diagonal should not be referenced by Ztrmv and Ztrsv, so + // invalidate it with NaNs. + for i := 0; i < n; i++ { + a[i*lda+i] = znan + } + } + aCopy := make([]complex128, len(a)) + copy(aCopy, a) + + // Generate a random complex vector x. + xtest := make([]complex128, n) + for i := range xtest { + re := rnd.NormFloat64() + im := rnd.NormFloat64() + xtest[i] = complex(re, im) + } + x := makeZVector(xtest, incX) + + // Store a copy of x as the correct result that we want. + want := make([]complex128, len(x)) + copy(want, x) + + // Compute A*x, denoting the result by b and storing it in x. + impl.Ztrmv(uplo, trans, diag, n, a, lda, x, incX) + // Solve A*x = b, that is, x = A^{-1}*b = A^{-1}*A*x. + impl.Ztrsv(uplo, trans, diag, n, a, lda, x, incX) + // If Ztrsv is correct, A^{-1}*A = I and x contains again its original value. + + name := fmt.Sprintf("uplo=%v,trans=%v,diag=%v,n=%v,lda=%v,incX=%v", uplo, trans, diag, n, lda, incX) + if !zsame(a, aCopy) { + t.Errorf("%v: unexpected modification of A", name) + } + if !zSameAtNonstrided(x, want, incX) { + t.Errorf("%v: unexpected modification of x\nwant %v\ngot %v", name, want, x) + } + if !zEqualApproxAtStrided(x, want, incX, tol) { + t.Errorf("%v: unexpected result\nwant %v\ngot %v", name, want, x) + } +} diff --git a/vendor/gonum.org/v1/gonum/bound/bound.go b/vendor/gonum.org/v1/gonum/bound/bound.go new file mode 100644 index 0000000..6d55654 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/bound/bound.go @@ -0,0 +1,75 @@ +// Copyright ©2019 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package bound + +import ( + "math" + "sort" +) + +// Bound represents [Min, Max] bounds. +type Bound struct { + Min, Max float64 +} + +// IsValid returns whether the bound is valid. A valid bound will have +// the minimum less than or equal to the maximum. +func (b Bound) IsValid() bool { + return b.Min <= b.Max +} + +// Intersection returns the intersection of the input bounds. If the +// intersection is empty an invalid Bound is returned. +func Intersection(bounds ...Bound) Bound { + if len(bounds) == 0 { + return Bound{Min: math.NaN(), Max: math.NaN()} + } + + intersection := Bound{Min: bounds[0].Min, Max: bounds[0].Max} + for _, b := range bounds[1:] { + intersection.Min = math.Max(intersection.Min, b.Min) + intersection.Max = math.Min(intersection.Max, b.Max) + } + + if !intersection.IsValid() { + return Bound{Min: math.NaN(), Max: math.NaN()} + } + + return intersection +} + +// Union returns the contiguous union of the input bounds if possible. +// Otherwise an invalid Bound is returned. If bounds is a slice of Bound +// and is not sorted, the order of elements will be changed so that +// they are ordered ascending by Min. +func Union(bounds ...Bound) Bound { + if len(bounds) == 0 { + return Bound{Min: math.NaN(), Max: math.NaN()} + } + if len(bounds) > 1 && !sort.IsSorted(byMin(bounds)) { + sort.Sort(byMin(bounds)) + } + + union := Bound{Min: bounds[0].Min, Max: bounds[0].Max} + for _, b := range bounds[1:] { + if b.Max < union.Min || union.Max < b.Min { + return Bound{Min: math.NaN(), Max: math.NaN()} + } + union.Min = math.Min(union.Min, b.Min) + union.Max = math.Max(union.Max, b.Max) + } + + if !union.IsValid() { + return Bound{Min: math.NaN(), Max: math.NaN()} + } + + return union +} + +type byMin []Bound + +func (b byMin) Len() int { return len(b) } +func (b byMin) Less(i, j int) bool { return b[i].Min < b[j].Max } +func (b byMin) Swap(i, j int) { b[i], b[j] = b[j], b[i] } diff --git a/vendor/gonum.org/v1/gonum/bound/doc.go b/vendor/gonum.org/v1/gonum/bound/doc.go new file mode 100644 index 0000000..7fe1cf2 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/bound/doc.go @@ -0,0 +1,6 @@ +// Copyright ©2019 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Package bound provides types for bounded data. +package bound // import "gonum.org/v1/gonum/bound" diff --git a/vendor/gonum.org/v1/gonum/diff/fd/crosslaplacian.go b/vendor/gonum.org/v1/gonum/diff/fd/crosslaplacian.go new file mode 100644 index 0000000..22f2e72 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/diff/fd/crosslaplacian.go @@ -0,0 +1,186 @@ +// Copyright ©2017 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package fd + +import ( + "math" + "sync" +) + +// CrossLaplacian computes a Laplacian-like quantity for a function of two vectors +// at the locations x and y. +// It computes +// ∇_y · ∇_x f(x,y) = \sum_i ∂^2 f(x,y)/∂x_i ∂y_i +// The two input vector lengths must be the same. +// +// Finite difference formula and other options are specified by settings. If +// settings is nil, CrossLaplacian will be estimated using the Forward formula and +// a default step size. +// +// CrossLaplacian panics if the two input vectors are not the same length, or if +// the derivative order of the formula is not 1. +func CrossLaplacian(f func(x, y []float64) float64, x, y []float64, settings *Settings) float64 { + n := len(x) + if n == 0 { + panic("crosslaplacian: x has zero length") + } + if len(x) != len(y) { + panic("crosslaplacian: input vector length mismatch") + } + + // Default settings. + formula := Forward + step := math.Sqrt(formula.Step) // Use the sqrt because taking derivatives of derivatives. + var originValue float64 + var originKnown, concurrent bool + + // Use user settings if provided. + if settings != nil { + if !settings.Formula.isZero() { + formula = settings.Formula + step = math.Sqrt(formula.Step) + checkFormula(formula) + if formula.Derivative != 1 { + panic(badDerivOrder) + } + } + if settings.Step != 0 { + if settings.Step < 0 { + panic(negativeStep) + } + step = settings.Step + } + originKnown = settings.OriginKnown + originValue = settings.OriginValue + concurrent = settings.Concurrent + } + + evals := n * len(formula.Stencil) * len(formula.Stencil) + if usesOrigin(formula.Stencil) { + evals -= n + } + + nWorkers := computeWorkers(concurrent, evals) + if nWorkers == 1 { + return crossLaplacianSerial(f, x, y, formula.Stencil, step, originKnown, originValue) + } + return crossLaplacianConcurrent(nWorkers, evals, f, x, y, formula.Stencil, step, originKnown, originValue) +} + +func crossLaplacianSerial(f func(x, y []float64) float64, x, y []float64, stencil []Point, step float64, originKnown bool, originValue float64) float64 { + n := len(x) + xCopy := make([]float64, len(x)) + yCopy := make([]float64, len(y)) + fo := func() float64 { + // Copy x and y in case they are modified during the call. + copy(xCopy, x) + copy(yCopy, y) + return f(x, y) + } + origin := getOrigin(originKnown, originValue, fo, stencil) + + is2 := 1 / (step * step) + var laplacian float64 + for i := 0; i < n; i++ { + for _, pty := range stencil { + for _, ptx := range stencil { + var v float64 + if ptx.Loc == 0 && pty.Loc == 0 { + v = origin + } else { + // Copying the data anew has two benefits. First, it + // avoids floating point issues where adding and then + // subtracting the step don't return to the exact same + // location. Secondly, it protects against the function + // modifying the input data. + copy(yCopy, y) + copy(xCopy, x) + yCopy[i] += pty.Loc * step + xCopy[i] += ptx.Loc * step + v = f(xCopy, yCopy) + } + laplacian += v * ptx.Coeff * pty.Coeff * is2 + } + } + } + return laplacian +} + +func crossLaplacianConcurrent(nWorkers, evals int, f func(x, y []float64) float64, x, y []float64, stencil []Point, step float64, originKnown bool, originValue float64) float64 { + n := len(x) + type run struct { + i int + xIdx, yIdx int + result float64 + } + + send := make(chan run, evals) + ans := make(chan run, evals) + + var originWG sync.WaitGroup + hasOrigin := usesOrigin(stencil) + if hasOrigin { + originWG.Add(1) + // Launch worker to compute the origin. + go func() { + defer originWG.Done() + xCopy := make([]float64, len(x)) + yCopy := make([]float64, len(y)) + copy(xCopy, x) + copy(yCopy, y) + originValue = f(xCopy, yCopy) + }() + } + + var workerWG sync.WaitGroup + // Launch workers. + for i := 0; i < nWorkers; i++ { + workerWG.Add(1) + go func(send <-chan run, ans chan<- run) { + defer workerWG.Done() + xCopy := make([]float64, len(x)) + yCopy := make([]float64, len(y)) + for r := range send { + if stencil[r.xIdx].Loc == 0 && stencil[r.yIdx].Loc == 0 { + originWG.Wait() + r.result = originValue + } else { + // See crossLaplacianSerial for comment on the copy. + copy(xCopy, x) + copy(yCopy, y) + xCopy[r.i] += stencil[r.xIdx].Loc * step + yCopy[r.i] += stencil[r.yIdx].Loc * step + r.result = f(xCopy, yCopy) + } + ans <- r + } + }(send, ans) + } + + // Launch the distributor, which sends all of runs. + go func(send chan<- run) { + for i := 0; i < n; i++ { + for xIdx := range stencil { + for yIdx := range stencil { + send <- run{ + i: i, xIdx: xIdx, yIdx: yIdx, + } + } + } + } + close(send) + // Wait for all the workers to quit, then close the ans channel. + workerWG.Wait() + close(ans) + }(send) + + // Read in the results. + is2 := 1 / (step * step) + var laplacian float64 + for r := range ans { + laplacian += r.result * stencil[r.xIdx].Coeff * stencil[r.yIdx].Coeff * is2 + } + return laplacian +} diff --git a/vendor/gonum.org/v1/gonum/diff/fd/derivative.go b/vendor/gonum.org/v1/gonum/diff/fd/derivative.go new file mode 100644 index 0000000..fad6920 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/diff/fd/derivative.go @@ -0,0 +1,71 @@ +// Copyright ©2017 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package fd + +import ( + "math" + "runtime" + "sync" +) + +// Derivative estimates the derivative of the function f at the given location. +// The finite difference formula, the step size, and other options are +// specified by settings. If settings is nil, the first derivative will be +// estimated using the Forward formula and a default step size. +func Derivative(f func(float64) float64, x float64, settings *Settings) float64 { + // Default settings. + formula := Forward + step := formula.Step + var originValue float64 + var originKnown, concurrent bool + + // Use user settings if provided. + if settings != nil { + if !settings.Formula.isZero() { + formula = settings.Formula + step = formula.Step + checkFormula(formula) + } + if settings.Step != 0 { + step = settings.Step + } + originKnown = settings.OriginKnown + originValue = settings.OriginValue + concurrent = settings.Concurrent + } + + var deriv float64 + if !concurrent || runtime.GOMAXPROCS(0) == 1 { + for _, pt := range formula.Stencil { + if originKnown && pt.Loc == 0 { + deriv += pt.Coeff * originValue + continue + } + deriv += pt.Coeff * f(x+step*pt.Loc) + } + return deriv / math.Pow(step, float64(formula.Derivative)) + } + + wg := &sync.WaitGroup{} + mux := &sync.Mutex{} + for _, pt := range formula.Stencil { + if originKnown && pt.Loc == 0 { + mux.Lock() + deriv += pt.Coeff * originValue + mux.Unlock() + continue + } + wg.Add(1) + go func(pt Point) { + defer wg.Done() + fofx := f(x + step*pt.Loc) + mux.Lock() + defer mux.Unlock() + deriv += pt.Coeff * fofx + }(pt) + } + wg.Wait() + return deriv / math.Pow(step, float64(formula.Derivative)) +} diff --git a/vendor/gonum.org/v1/gonum/diff/fd/diff.go b/vendor/gonum.org/v1/gonum/diff/fd/diff.go new file mode 100644 index 0000000..8dfb11b --- /dev/null +++ b/vendor/gonum.org/v1/gonum/diff/fd/diff.go @@ -0,0 +1,148 @@ +// Copyright ©2014 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package fd + +import ( + "math" + "runtime" +) + +// A Point is a stencil location in a finite difference formula. +type Point struct { + Loc float64 + Coeff float64 +} + +// Formula represents a finite difference formula on a regularly spaced grid +// that approximates the derivative of order k of a function f at x as +// d^k f(x) ≈ (1 / Step^k) * \sum_i Coeff_i * f(x + Step * Loc_i). +// Step must be positive, or the finite difference formula will panic. +type Formula struct { + // Stencil is the set of sampling Points which are used to estimate the + // derivative. The locations will be scaled by Step and are relative to x. + Stencil []Point + Derivative int // The order of the approximated derivative. + Step float64 // Default step size for the formula. +} + +func (f Formula) isZero() bool { + return f.Stencil == nil && f.Derivative == 0 && f.Step == 0 +} + +// Settings is the settings structure for computing finite differences. +type Settings struct { + // Formula is the finite difference formula used + // for approximating the derivative. + // Zero value indicates a default formula. + Formula Formula + // Step is the distance between points of the stencil. + // If equal to 0, formula's default step will be used. + Step float64 + + OriginKnown bool // Flag that the value at the origin x is known. + OriginValue float64 // Value at the origin (only used if OriginKnown is true). + + Concurrent bool // Should the function calls be executed concurrently. +} + +// Forward represents a first-order accurate forward approximation +// to the first derivative. +var Forward = Formula{ + Stencil: []Point{{Loc: 0, Coeff: -1}, {Loc: 1, Coeff: 1}}, + Derivative: 1, + Step: 2e-8, +} + +// Forward2nd represents a first-order accurate forward approximation +// to the second derivative. +var Forward2nd = Formula{ + Stencil: []Point{{Loc: 0, Coeff: 1}, {Loc: 1, Coeff: -2}, {Loc: 2, Coeff: 1}}, + Derivative: 2, + Step: 1e-4, +} + +// Backward represents a first-order accurate backward approximation +// to the first derivative. +var Backward = Formula{ + Stencil: []Point{{Loc: -1, Coeff: -1}, {Loc: 0, Coeff: 1}}, + Derivative: 1, + Step: 2e-8, +} + +// Backward2nd represents a first-order accurate forward approximation +// to the second derivative. +var Backward2nd = Formula{ + Stencil: []Point{{Loc: 0, Coeff: 1}, {Loc: -1, Coeff: -2}, {Loc: -2, Coeff: 1}}, + Derivative: 2, + Step: 1e-4, +} + +// Central represents a second-order accurate centered approximation +// to the first derivative. +var Central = Formula{ + Stencil: []Point{{Loc: -1, Coeff: -0.5}, {Loc: 1, Coeff: 0.5}}, + Derivative: 1, + Step: 6e-6, +} + +// Central2nd represents a secord-order accurate centered approximation +// to the second derivative. +var Central2nd = Formula{ + Stencil: []Point{{Loc: -1, Coeff: 1}, {Loc: 0, Coeff: -2}, {Loc: 1, Coeff: 1}}, + Derivative: 2, + Step: 1e-4, +} + +var negativeStep = "fd: negative step" + +// checkFormula checks if the formula is valid, and panics otherwise. +func checkFormula(formula Formula) { + if formula.Derivative == 0 || formula.Stencil == nil || formula.Step <= 0 { + panic("fd: bad formula") + } +} + +// computeWorkers returns the desired number of workers given the concurrency +// level and number of evaluations. +func computeWorkers(concurrent bool, evals int) int { + if !concurrent { + return 1 + } + nWorkers := runtime.GOMAXPROCS(0) + if nWorkers > evals { + nWorkers = evals + } + return nWorkers +} + +// usesOrigin returns whether the stencil uses the origin, which is true iff +// one of the locations in the stencil equals 0. +func usesOrigin(stencil []Point) bool { + for _, pt := range stencil { + if pt.Loc == 0 { + return true + } + } + return false +} + +// getOrigin returns the value at the origin. It returns originValue if originKnown +// is true. It returns the value returned by f if stencil contains a point with +// zero location, and NaN otherwise. +func getOrigin(originKnown bool, originValue float64, f func() float64, stencil []Point) float64 { + if originKnown { + return originValue + } + for _, pt := range stencil { + if pt.Loc == 0 { + return f() + } + } + return math.NaN() +} + +const ( + badDerivOrder = "fd: invalid derivative order" +) diff --git a/vendor/gonum.org/v1/gonum/diff/fd/doc.go b/vendor/gonum.org/v1/gonum/diff/fd/doc.go new file mode 100644 index 0000000..f304696 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/diff/fd/doc.go @@ -0,0 +1,6 @@ +// Copyright ©2017 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Package fd provides functions to approximate derivatives using finite differences. +package fd // import "gonum.org/v1/gonum/diff/fd" diff --git a/vendor/gonum.org/v1/gonum/diff/fd/gradient.go b/vendor/gonum.org/v1/gonum/diff/fd/gradient.go new file mode 100644 index 0000000..1b851ba --- /dev/null +++ b/vendor/gonum.org/v1/gonum/diff/fd/gradient.go @@ -0,0 +1,145 @@ +// Copyright ©2017 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package fd + +import "gonum.org/v1/gonum/floats" + +// Gradient estimates the gradient of the multivariate function f at the +// location x. If dst is not nil, the result will be stored in-place into dst +// and returned, otherwise a new slice will be allocated first. Finite +// difference formula and other options are specified by settings. If settings is +// nil, the gradient will be estimated using the Forward formula and a default +// step size. +// +// Gradient panics if the length of dst and x is not equal, or if the derivative +// order of the formula is not 1. +func Gradient(dst []float64, f func([]float64) float64, x []float64, settings *Settings) []float64 { + if dst == nil { + dst = make([]float64, len(x)) + } + if len(dst) != len(x) { + panic("fd: slice length mismatch") + } + + // Default settings. + formula := Forward + step := formula.Step + var originValue float64 + var originKnown, concurrent bool + + // Use user settings if provided. + if settings != nil { + if !settings.Formula.isZero() { + formula = settings.Formula + step = formula.Step + checkFormula(formula) + if formula.Derivative != 1 { + panic(badDerivOrder) + } + } + if settings.Step != 0 { + step = settings.Step + } + originKnown = settings.OriginKnown + originValue = settings.OriginValue + concurrent = settings.Concurrent + } + + evals := len(formula.Stencil) * len(x) + nWorkers := computeWorkers(concurrent, evals) + + hasOrigin := usesOrigin(formula.Stencil) + // Copy x in case it is modified during the call. + xcopy := make([]float64, len(x)) + if hasOrigin && !originKnown { + copy(xcopy, x) + originValue = f(xcopy) + } + + if nWorkers == 1 { + for i := range xcopy { + var deriv float64 + for _, pt := range formula.Stencil { + if pt.Loc == 0 { + deriv += pt.Coeff * originValue + continue + } + // Copying the data anew has two benefits. First, it + // avoids floating point issues where adding and then + // subtracting the step don't return to the exact same + // location. Secondly, it protects against the function + // modifying the input data. + copy(xcopy, x) + xcopy[i] += pt.Loc * step + deriv += pt.Coeff * f(xcopy) + } + dst[i] = deriv / step + } + return dst + } + + sendChan := make(chan fdrun, evals) + ansChan := make(chan fdrun, evals) + quit := make(chan struct{}) + defer close(quit) + + // Launch workers. Workers receive an index and a step, and compute the answer. + for i := 0; i < nWorkers; i++ { + go func(sendChan <-chan fdrun, ansChan chan<- fdrun, quit <-chan struct{}) { + xcopy := make([]float64, len(x)) + for { + select { + case <-quit: + return + case run := <-sendChan: + // See above comment on the copy. + copy(xcopy, x) + xcopy[run.idx] += run.pt.Loc * step + run.result = f(xcopy) + ansChan <- run + } + } + }(sendChan, ansChan, quit) + } + + // Launch the distributor. Distributor sends the cases to be computed. + go func(sendChan chan<- fdrun, ansChan chan<- fdrun) { + for i := range x { + for _, pt := range formula.Stencil { + if pt.Loc == 0 { + // Answer already known. Send the answer on the answer channel. + ansChan <- fdrun{ + idx: i, + pt: pt, + result: originValue, + } + continue + } + // Answer not known, send the answer to be computed. + sendChan <- fdrun{ + idx: i, + pt: pt, + } + } + } + }(sendChan, ansChan) + + for i := range dst { + dst[i] = 0 + } + // Read in all of the results. + for i := 0; i < evals; i++ { + run := <-ansChan + dst[run.idx] += run.pt.Coeff * run.result + } + floats.Scale(1/step, dst) + return dst +} + +type fdrun struct { + idx int + pt Point + result float64 +} diff --git a/vendor/gonum.org/v1/gonum/diff/fd/hessian.go b/vendor/gonum.org/v1/gonum/diff/fd/hessian.go new file mode 100644 index 0000000..0d96c2d --- /dev/null +++ b/vendor/gonum.org/v1/gonum/diff/fd/hessian.go @@ -0,0 +1,186 @@ +// Copyright ©2017 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package fd + +import ( + "math" + "sync" + + "gonum.org/v1/gonum/mat" +) + +// Hessian approximates the Hessian matrix of the multivariate function f at +// the location x. That is +// H_{i,j} = ∂^2 f(x)/∂x_i ∂x_j +// The resulting H will be stored in dst. Finite difference formula and other +// options are specified by settings. If settings is nil, the Hessian will be +// estimated using the Forward formula and a default step size. +// +// If the dst matrix is zero-sized it will be resized to the correct dimensions, +// otherwise the dimensions of dst must match the length of x or Hessian will panic. +// Hessian will panic if the derivative order of the formula is not 1. +func Hessian(dst *mat.SymDense, f func(x []float64) float64, x []float64, settings *Settings) { + n := len(x) + if dst.IsZero() { + *dst = *(dst.GrowSym(n).(*mat.SymDense)) + } else if dst.Symmetric() != n { + panic("hessian: dst size mismatch") + } + dst.Zero() + + // Default settings. + formula := Forward + step := math.Sqrt(formula.Step) // Use the sqrt because taking derivatives of derivatives. + var originValue float64 + var originKnown, concurrent bool + + // Use user settings if provided. + if settings != nil { + if !settings.Formula.isZero() { + formula = settings.Formula + step = math.Sqrt(formula.Step) + checkFormula(formula) + if formula.Derivative != 1 { + panic(badDerivOrder) + } + } + if settings.Step != 0 { + if settings.Step < 0 { + panic(negativeStep) + } + step = settings.Step + } + originKnown = settings.OriginKnown + originValue = settings.OriginValue + concurrent = settings.Concurrent + } + + evals := n * (n + 1) / 2 * len(formula.Stencil) * len(formula.Stencil) + for _, pt := range formula.Stencil { + if pt.Loc == 0 { + evals -= n * (n + 1) / 2 + break + } + } + + nWorkers := computeWorkers(concurrent, evals) + if nWorkers == 1 { + hessianSerial(dst, f, x, formula.Stencil, step, originKnown, originValue) + return + } + hessianConcurrent(dst, nWorkers, evals, f, x, formula.Stencil, step, originKnown, originValue) +} + +func hessianSerial(dst *mat.SymDense, f func(x []float64) float64, x []float64, stencil []Point, step float64, originKnown bool, originValue float64) { + n := len(x) + xCopy := make([]float64, n) + fo := func() float64 { + // Copy x in case it is modified during the call. + copy(xCopy, x) + return f(x) + } + is2 := 1 / (step * step) + origin := getOrigin(originKnown, originValue, fo, stencil) + for i := 0; i < n; i++ { + for j := i; j < n; j++ { + var hess float64 + for _, pti := range stencil { + for _, ptj := range stencil { + var v float64 + if pti.Loc == 0 && ptj.Loc == 0 { + v = origin + } else { + // Copying the data anew has two benefits. First, it + // avoids floating point issues where adding and then + // subtracting the step don't return to the exact same + // location. Secondly, it protects against the function + // modifying the input data. + copy(xCopy, x) + xCopy[i] += pti.Loc * step + xCopy[j] += ptj.Loc * step + v = f(xCopy) + } + hess += v * pti.Coeff * ptj.Coeff * is2 + } + } + dst.SetSym(i, j, hess) + } + } +} + +func hessianConcurrent(dst *mat.SymDense, nWorkers, evals int, f func(x []float64) float64, x []float64, stencil []Point, step float64, originKnown bool, originValue float64) { + n := dst.Symmetric() + type run struct { + i, j int + iIdx, jIdx int + result float64 + } + + send := make(chan run, evals) + ans := make(chan run, evals) + + var originWG sync.WaitGroup + hasOrigin := usesOrigin(stencil) + if hasOrigin { + originWG.Add(1) + // Launch worker to compute the origin. + go func() { + defer originWG.Done() + xCopy := make([]float64, len(x)) + copy(xCopy, x) + originValue = f(xCopy) + }() + } + + var workerWG sync.WaitGroup + // Launch workers. + for i := 0; i < nWorkers; i++ { + workerWG.Add(1) + go func(send <-chan run, ans chan<- run) { + defer workerWG.Done() + xCopy := make([]float64, len(x)) + for r := range send { + if stencil[r.iIdx].Loc == 0 && stencil[r.jIdx].Loc == 0 { + originWG.Wait() + r.result = originValue + } else { + // See hessianSerial for comment on the copy. + copy(xCopy, x) + xCopy[r.i] += stencil[r.iIdx].Loc * step + xCopy[r.j] += stencil[r.jIdx].Loc * step + r.result = f(xCopy) + } + ans <- r + } + }(send, ans) + } + + // Launch the distributor, which sends all of runs. + go func(send chan<- run) { + for i := 0; i < n; i++ { + for j := i; j < n; j++ { + for iIdx := range stencil { + for jIdx := range stencil { + send <- run{ + i: i, j: j, iIdx: iIdx, jIdx: jIdx, + } + } + } + } + } + close(send) + // Wait for all the workers to quit, then close the ans channel. + workerWG.Wait() + close(ans) + }(send) + + is2 := 1 / (step * step) + // Read in the results. + for r := range ans { + v := r.result * stencil[r.iIdx].Coeff * stencil[r.jIdx].Coeff * is2 + v += dst.At(r.i, r.j) + dst.SetSym(r.i, r.j, v) + } +} diff --git a/vendor/gonum.org/v1/gonum/diff/fd/jacobian.go b/vendor/gonum.org/v1/gonum/diff/fd/jacobian.go new file mode 100644 index 0000000..8811256 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/diff/fd/jacobian.go @@ -0,0 +1,202 @@ +// Copyright ©2016 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package fd + +import ( + "sync" + + "gonum.org/v1/gonum/floats" + "gonum.org/v1/gonum/mat" +) + +type JacobianSettings struct { + Formula Formula + OriginValue []float64 + Step float64 + Concurrent bool +} + +// Jacobian approximates the Jacobian matrix of a vector-valued function f at +// the location x and stores the result in-place into dst. +// +// Finite difference formula and other options are specified by settings. If +// settings is nil, the Jacobian will be estimated using the Forward formula and +// a default step size. +// +// The Jacobian matrix J is the matrix of all first-order partial derivatives of f. +// If f maps an n-dimensional vector x to an m-dimensional vector y = f(x), J is +// an m×n matrix whose elements are given as +// J_{i,j} = ∂f_i/∂x_j, +// or expanded out +// [ ∂f_1/∂x_1 ... ∂f_1/∂x_n ] +// [ . . . ] +// J = [ . . . ] +// [ . . . ] +// [ ∂f_m/∂x_1 ... ∂f_m/∂x_n ] +// +// dst must be non-nil, the number of its columns must equal the length of x, and +// the derivative order of the formula must be 1, otherwise Jacobian will panic. +func Jacobian(dst *mat.Dense, f func(y, x []float64), x []float64, settings *JacobianSettings) { + n := len(x) + if n == 0 { + panic("jacobian: x has zero length") + } + m, c := dst.Dims() + if c != n { + panic("jacobian: mismatched matrix size") + } + + // Default settings. + formula := Forward + step := formula.Step + var originValue []float64 + var concurrent bool + + // Use user settings if provided. + if settings != nil { + if !settings.Formula.isZero() { + formula = settings.Formula + step = formula.Step + checkFormula(formula) + if formula.Derivative != 1 { + panic(badDerivOrder) + } + } + if settings.Step != 0 { + step = settings.Step + } + originValue = settings.OriginValue + if originValue != nil && len(originValue) != m { + panic("jacobian: mismatched OriginValue slice length") + } + concurrent = settings.Concurrent + } + + evals := n * len(formula.Stencil) + for _, pt := range formula.Stencil { + if pt.Loc == 0 { + evals -= n - 1 + break + } + } + + nWorkers := computeWorkers(concurrent, evals) + if nWorkers == 1 { + jacobianSerial(dst, f, x, originValue, formula, step) + return + } + jacobianConcurrent(dst, f, x, originValue, formula, step, nWorkers) +} + +func jacobianSerial(dst *mat.Dense, f func([]float64, []float64), x, origin []float64, formula Formula, step float64) { + m, n := dst.Dims() + xcopy := make([]float64, n) + y := make([]float64, m) + col := make([]float64, m) + for j := 0; j < n; j++ { + for i := range col { + col[i] = 0 + } + for _, pt := range formula.Stencil { + if pt.Loc == 0 { + if origin == nil { + origin = make([]float64, m) + copy(xcopy, x) + f(origin, xcopy) + } + floats.AddScaled(col, pt.Coeff, origin) + } else { + copy(xcopy, x) + xcopy[j] += pt.Loc * step + f(y, xcopy) + floats.AddScaled(col, pt.Coeff, y) + } + } + dst.SetCol(j, col) + } + dst.Scale(1/step, dst) +} + +func jacobianConcurrent(dst *mat.Dense, f func([]float64, []float64), x, origin []float64, formula Formula, step float64, nWorkers int) { + m, n := dst.Dims() + for i := 0; i < m; i++ { + for j := 0; j < n; j++ { + dst.Set(i, j, 0) + } + } + + var ( + wg sync.WaitGroup + mu = make([]sync.Mutex, n) // Guard access to individual columns. + ) + worker := func(jobs <-chan jacJob) { + defer wg.Done() + xcopy := make([]float64, n) + y := make([]float64, m) + yVec := mat.NewVecDense(m, y) + var col mat.VecDense + for job := range jobs { + copy(xcopy, x) + xcopy[job.j] += job.pt.Loc * step + f(y, xcopy) + col.ColViewOf(dst, job.j) + mu[job.j].Lock() + col.AddScaledVec(&col, job.pt.Coeff, yVec) + mu[job.j].Unlock() + } + } + jobs := make(chan jacJob, nWorkers) + for i := 0; i < nWorkers; i++ { + wg.Add(1) + go worker(jobs) + } + var hasOrigin bool + for _, pt := range formula.Stencil { + if pt.Loc == 0 { + hasOrigin = true + continue + } + for j := 0; j < n; j++ { + jobs <- jacJob{j, pt} + } + } + close(jobs) + if hasOrigin && origin == nil { + wg.Add(1) + go func() { + defer wg.Done() + origin = make([]float64, m) + xcopy := make([]float64, n) + copy(xcopy, x) + f(origin, xcopy) + }() + } + wg.Wait() + + if hasOrigin { + // The formula evaluated at x, we need to add scaled origin to + // all columns of dst. Iterate again over all Formula points + // because we don't forbid repeated locations. + + originVec := mat.NewVecDense(m, origin) + for _, pt := range formula.Stencil { + if pt.Loc != 0 { + continue + } + var col mat.VecDense + for j := 0; j < n; j++ { + col.ColViewOf(dst, j) + col.AddScaledVec(&col, pt.Coeff, originVec) + } + } + } + + dst.Scale(1/step, dst) +} + +type jacJob struct { + j int + pt Point +} diff --git a/vendor/gonum.org/v1/gonum/diff/fd/laplacian.go b/vendor/gonum.org/v1/gonum/diff/fd/laplacian.go new file mode 100644 index 0000000..5f1a27b --- /dev/null +++ b/vendor/gonum.org/v1/gonum/diff/fd/laplacian.go @@ -0,0 +1,158 @@ +// Copyright ©2017 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package fd + +import "sync" + +// Laplacian computes the Laplacian of the multivariate function f at the location +// x. That is, Laplacian returns +// ∆ f(x) = ∇ · ∇ f(x) = \sum_i ∂^2 f(x)/∂x_i^2 +// The finite difference formula and other options are specified by settings. +// The order of the difference formula must be 2 or Laplacian will panic. +func Laplacian(f func(x []float64) float64, x []float64, settings *Settings) float64 { + n := len(x) + if n == 0 { + panic("laplacian: x has zero length") + } + + // Default settings. + formula := Central2nd + step := formula.Step + var originValue float64 + var originKnown, concurrent bool + + // Use user settings if provided. + if settings != nil { + if !settings.Formula.isZero() { + formula = settings.Formula + step = formula.Step + checkFormula(formula) + if formula.Derivative != 2 { + panic(badDerivOrder) + } + } + if settings.Step != 0 { + if settings.Step < 0 { + panic(negativeStep) + } + step = settings.Step + } + originKnown = settings.OriginKnown + originValue = settings.OriginValue + concurrent = settings.Concurrent + } + + evals := n * len(formula.Stencil) + if usesOrigin(formula.Stencil) { + evals -= n + } + + nWorkers := computeWorkers(concurrent, evals) + if nWorkers == 1 { + return laplacianSerial(f, x, formula.Stencil, step, originKnown, originValue) + } + return laplacianConcurrent(nWorkers, evals, f, x, formula.Stencil, step, originKnown, originValue) +} + +func laplacianSerial(f func(x []float64) float64, x []float64, stencil []Point, step float64, originKnown bool, originValue float64) float64 { + n := len(x) + xCopy := make([]float64, n) + fo := func() float64 { + // Copy x in case it is modified during the call. + copy(xCopy, x) + return f(x) + } + is2 := 1 / (step * step) + origin := getOrigin(originKnown, originValue, fo, stencil) + var laplacian float64 + for i := 0; i < n; i++ { + for _, pt := range stencil { + var v float64 + if pt.Loc == 0 { + v = origin + } else { + // Copying the data anew has two benefits. First, it + // avoids floating point issues where adding and then + // subtracting the step don't return to the exact same + // location. Secondly, it protects against the function + // modifying the input data. + copy(xCopy, x) + xCopy[i] += pt.Loc * step + v = f(xCopy) + } + laplacian += v * pt.Coeff * is2 + } + } + return laplacian +} + +func laplacianConcurrent(nWorkers, evals int, f func(x []float64) float64, x []float64, stencil []Point, step float64, originKnown bool, originValue float64) float64 { + type run struct { + i int + idx int + result float64 + } + n := len(x) + send := make(chan run, evals) + ans := make(chan run, evals) + + var originWG sync.WaitGroup + hasOrigin := usesOrigin(stencil) + if hasOrigin { + originWG.Add(1) + // Launch worker to compute the origin. + go func() { + defer originWG.Done() + xCopy := make([]float64, len(x)) + copy(xCopy, x) + originValue = f(xCopy) + }() + } + + var workerWG sync.WaitGroup + // Launch workers. + for i := 0; i < nWorkers; i++ { + workerWG.Add(1) + go func(send <-chan run, ans chan<- run) { + defer workerWG.Done() + xCopy := make([]float64, len(x)) + for r := range send { + if stencil[r.idx].Loc == 0 { + originWG.Wait() + r.result = originValue + } else { + // See laplacianSerial for comment on the copy. + copy(xCopy, x) + xCopy[r.i] += stencil[r.idx].Loc * step + r.result = f(xCopy) + } + ans <- r + } + }(send, ans) + } + + // Launch the distributor, which sends all of runs. + go func(send chan<- run) { + for i := 0; i < n; i++ { + for idx := range stencil { + send <- run{ + i: i, idx: idx, + } + } + } + close(send) + // Wait for all the workers to quit, then close the ans channel. + workerWG.Wait() + close(ans) + }(send) + + // Read in the results. + is2 := 1 / (step * step) + var laplacian float64 + for r := range ans { + laplacian += r.result * stencil[r.idx].Coeff * is2 + } + return laplacian +} diff --git a/vendor/gonum.org/v1/gonum/doc.go b/vendor/gonum.org/v1/gonum/doc.go new file mode 100644 index 0000000..13ed311 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/doc.go @@ -0,0 +1,12 @@ +// Copyright ©2019 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Gonum is a set of packages designed to make writing numerical and +// scientific algorithms productive, performant, and scalable. +// +// Gonum contains libraries for matrices and linear algebra; statistics, +// probability distributions, and sampling; tools for function +// differentiation, integration, and optimization; network creation and +// analysis; and more. +package gonum // import "gonum.org/v1/gonum" diff --git a/vendor/gonum.org/v1/gonum/floats/doc.go b/vendor/gonum.org/v1/gonum/floats/doc.go new file mode 100644 index 0000000..bfe05c1 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/floats/doc.go @@ -0,0 +1,11 @@ +// Copyright ©2017 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Package floats provides a set of helper routines for dealing with slices +// of float64. The functions avoid allocations to allow for use within tight +// loops without garbage collection overhead. +// +// The convention used is that when a slice is being modified in place, it has +// the name dst. +package floats // import "gonum.org/v1/gonum/floats" diff --git a/vendor/gonum.org/v1/gonum/floats/floats.go b/vendor/gonum.org/v1/gonum/floats/floats.go new file mode 100644 index 0000000..ae004a6 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/floats/floats.go @@ -0,0 +1,933 @@ +// Copyright ©2013 The Gonum Authors. All rights reserved. +// Use of this code is governed by a BSD-style +// license that can be found in the LICENSE file + +package floats + +import ( + "errors" + "math" + "sort" + "strconv" + + "gonum.org/v1/gonum/internal/asm/f64" +) + +// Add adds, element-wise, the elements of s and dst, and stores in dst. +// Panics if the lengths of dst and s do not match. +func Add(dst, s []float64) { + if len(dst) != len(s) { + panic("floats: length of the slices do not match") + } + f64.AxpyUnitaryTo(dst, 1, s, dst) +} + +// AddTo adds, element-wise, the elements of s and t and +// stores the result in dst. Panics if the lengths of s, t and dst do not match. +func AddTo(dst, s, t []float64) []float64 { + if len(s) != len(t) { + panic("floats: length of adders do not match") + } + if len(dst) != len(s) { + panic("floats: length of destination does not match length of adder") + } + f64.AxpyUnitaryTo(dst, 1, s, t) + return dst +} + +// AddConst adds the scalar c to all of the values in dst. +func AddConst(c float64, dst []float64) { + f64.AddConst(c, dst) +} + +// AddScaled performs dst = dst + alpha * s. +// It panics if the lengths of dst and s are not equal. +func AddScaled(dst []float64, alpha float64, s []float64) { + if len(dst) != len(s) { + panic("floats: length of destination and source to not match") + } + f64.AxpyUnitaryTo(dst, alpha, s, dst) +} + +// AddScaledTo performs dst = y + alpha * s, where alpha is a scalar, +// and dst, y and s are all slices. +// It panics if the lengths of dst, y, and s are not equal. +// +// At the return of the function, dst[i] = y[i] + alpha * s[i] +func AddScaledTo(dst, y []float64, alpha float64, s []float64) []float64 { + if len(dst) != len(s) || len(dst) != len(y) { + panic("floats: lengths of slices do not match") + } + f64.AxpyUnitaryTo(dst, alpha, s, y) + return dst +} + +// argsort is a helper that implements sort.Interface, as used by +// Argsort. +type argsort struct { + s []float64 + inds []int +} + +func (a argsort) Len() int { + return len(a.s) +} + +func (a argsort) Less(i, j int) bool { + return a.s[i] < a.s[j] +} + +func (a argsort) Swap(i, j int) { + a.s[i], a.s[j] = a.s[j], a.s[i] + a.inds[i], a.inds[j] = a.inds[j], a.inds[i] +} + +// Argsort sorts the elements of dst while tracking their original order. +// At the conclusion of Argsort, dst will contain the original elements of dst +// but sorted in increasing order, and inds will contain the original position +// of the elements in the slice such that dst[i] = origDst[inds[i]]. +// It panics if the lengths of dst and inds do not match. +func Argsort(dst []float64, inds []int) { + if len(dst) != len(inds) { + panic("floats: length of inds does not match length of slice") + } + for i := range dst { + inds[i] = i + } + + a := argsort{s: dst, inds: inds} + sort.Sort(a) +} + +// Count applies the function f to every element of s and returns the number +// of times the function returned true. +func Count(f func(float64) bool, s []float64) int { + var n int + for _, val := range s { + if f(val) { + n++ + } + } + return n +} + +// CumProd finds the cumulative product of the first i elements in +// s and puts them in place into the ith element of the +// destination dst. A panic will occur if the lengths of arguments +// do not match. +// +// At the return of the function, dst[i] = s[i] * s[i-1] * s[i-2] * ... +func CumProd(dst, s []float64) []float64 { + if len(dst) != len(s) { + panic("floats: length of destination does not match length of the source") + } + if len(dst) == 0 { + return dst + } + return f64.CumProd(dst, s) +} + +// CumSum finds the cumulative sum of the first i elements in +// s and puts them in place into the ith element of the +// destination dst. A panic will occur if the lengths of arguments +// do not match. +// +// At the return of the function, dst[i] = s[i] + s[i-1] + s[i-2] + ... +func CumSum(dst, s []float64) []float64 { + if len(dst) != len(s) { + panic("floats: length of destination does not match length of the source") + } + if len(dst) == 0 { + return dst + } + return f64.CumSum(dst, s) +} + +// Distance computes the L-norm of s - t. See Norm for special cases. +// A panic will occur if the lengths of s and t do not match. +func Distance(s, t []float64, L float64) float64 { + if len(s) != len(t) { + panic("floats: slice lengths do not match") + } + if len(s) == 0 { + return 0 + } + var norm float64 + if L == 2 { + for i, v := range s { + diff := t[i] - v + norm = math.Hypot(norm, diff) + } + return norm + } + if L == 1 { + for i, v := range s { + norm += math.Abs(t[i] - v) + } + return norm + } + if math.IsInf(L, 1) { + for i, v := range s { + absDiff := math.Abs(t[i] - v) + if absDiff > norm { + norm = absDiff + } + } + return norm + } + for i, v := range s { + norm += math.Pow(math.Abs(t[i]-v), L) + } + return math.Pow(norm, 1/L) +} + +// Div performs element-wise division dst / s +// and stores the value in dst. It panics if the +// lengths of s and t are not equal. +func Div(dst, s []float64) { + if len(dst) != len(s) { + panic("floats: slice lengths do not match") + } + f64.Div(dst, s) +} + +// DivTo performs element-wise division s / t +// and stores the value in dst. It panics if the +// lengths of s, t, and dst are not equal. +func DivTo(dst, s, t []float64) []float64 { + if len(s) != len(t) || len(dst) != len(t) { + panic("floats: slice lengths do not match") + } + return f64.DivTo(dst, s, t) +} + +// Dot computes the dot product of s1 and s2, i.e. +// sum_{i = 1}^N s1[i]*s2[i]. +// A panic will occur if lengths of arguments do not match. +func Dot(s1, s2 []float64) float64 { + if len(s1) != len(s2) { + panic("floats: lengths of the slices do not match") + } + return f64.DotUnitary(s1, s2) +} + +// Equal returns true if the slices have equal lengths and +// all elements are numerically identical. +func Equal(s1, s2 []float64) bool { + if len(s1) != len(s2) { + return false + } + for i, val := range s1 { + if s2[i] != val { + return false + } + } + return true +} + +// EqualApprox returns true if the slices have equal lengths and +// all element pairs have an absolute tolerance less than tol or a +// relative tolerance less than tol. +func EqualApprox(s1, s2 []float64, tol float64) bool { + if len(s1) != len(s2) { + return false + } + for i, a := range s1 { + if !EqualWithinAbsOrRel(a, s2[i], tol, tol) { + return false + } + } + return true +} + +// EqualFunc returns true if the slices have the same lengths +// and the function returns true for all element pairs. +func EqualFunc(s1, s2 []float64, f func(float64, float64) bool) bool { + if len(s1) != len(s2) { + return false + } + for i, val := range s1 { + if !f(val, s2[i]) { + return false + } + } + return true +} + +// EqualWithinAbs returns true if a and b have an absolute +// difference of less than tol. +func EqualWithinAbs(a, b, tol float64) bool { + return a == b || math.Abs(a-b) <= tol +} + +const minNormalFloat64 = 2.2250738585072014e-308 + +// EqualWithinRel returns true if the difference between a and b +// is not greater than tol times the greater value. +func EqualWithinRel(a, b, tol float64) bool { + if a == b { + return true + } + delta := math.Abs(a - b) + if delta <= minNormalFloat64 { + return delta <= tol*minNormalFloat64 + } + // We depend on the division in this relationship to identify + // infinities (we rely on the NaN to fail the test) otherwise + // we compare Infs of the same sign and evaluate Infs as equal + // independent of sign. + return delta/math.Max(math.Abs(a), math.Abs(b)) <= tol +} + +// EqualWithinAbsOrRel returns true if a and b are equal to within +// the absolute tolerance. +func EqualWithinAbsOrRel(a, b, absTol, relTol float64) bool { + if EqualWithinAbs(a, b, absTol) { + return true + } + return EqualWithinRel(a, b, relTol) +} + +// EqualWithinULP returns true if a and b are equal to within +// the specified number of floating point units in the last place. +func EqualWithinULP(a, b float64, ulp uint) bool { + if a == b { + return true + } + if math.IsNaN(a) || math.IsNaN(b) { + return false + } + if math.Signbit(a) != math.Signbit(b) { + return math.Float64bits(math.Abs(a))+math.Float64bits(math.Abs(b)) <= uint64(ulp) + } + return ulpDiff(math.Float64bits(a), math.Float64bits(b)) <= uint64(ulp) +} + +func ulpDiff(a, b uint64) uint64 { + if a > b { + return a - b + } + return b - a +} + +// EqualLengths returns true if all of the slices have equal length, +// and false otherwise. Returns true if there are no input slices. +func EqualLengths(slices ...[]float64) bool { + // This length check is needed: http://play.golang.org/p/sdty6YiLhM + if len(slices) == 0 { + return true + } + l := len(slices[0]) + for i := 1; i < len(slices); i++ { + if len(slices[i]) != l { + return false + } + } + return true +} + +// Find applies f to every element of s and returns the indices of the first +// k elements for which the f returns true, or all such elements +// if k < 0. +// Find will reslice inds to have 0 length, and will append +// found indices to inds. +// If k > 0 and there are fewer than k elements in s satisfying f, +// all of the found elements will be returned along with an error. +// At the return of the function, the input inds will be in an undetermined state. +func Find(inds []int, f func(float64) bool, s []float64, k int) ([]int, error) { + // inds is also returned to allow for calling with nil + + // Reslice inds to have zero length + inds = inds[:0] + + // If zero elements requested, can just return + if k == 0 { + return inds, nil + } + + // If k < 0, return all of the found indices + if k < 0 { + for i, val := range s { + if f(val) { + inds = append(inds, i) + } + } + return inds, nil + } + + // Otherwise, find the first k elements + nFound := 0 + for i, val := range s { + if f(val) { + inds = append(inds, i) + nFound++ + if nFound == k { + return inds, nil + } + } + } + // Finished iterating over the loop, which means k elements were not found + return inds, errors.New("floats: insufficient elements found") +} + +// HasNaN returns true if the slice s has any values that are NaN and false +// otherwise. +func HasNaN(s []float64) bool { + for _, v := range s { + if math.IsNaN(v) { + return true + } + } + return false +} + +// LogSpan returns a set of n equally spaced points in log space between, +// l and u where N is equal to len(dst). The first element of the +// resulting dst will be l and the final element of dst will be u. +// Panics if len(dst) < 2 +// Note that this call will return NaNs if either l or u are negative, and +// will return all zeros if l or u is zero. +// Also returns the mutated slice dst, so that it can be used in range, like: +// +// for i, x := range LogSpan(dst, l, u) { ... } +func LogSpan(dst []float64, l, u float64) []float64 { + Span(dst, math.Log(l), math.Log(u)) + for i := range dst { + dst[i] = math.Exp(dst[i]) + } + return dst +} + +// LogSumExp returns the log of the sum of the exponentials of the values in s. +// Panics if s is an empty slice. +func LogSumExp(s []float64) float64 { + // Want to do this in a numerically stable way which avoids + // overflow and underflow + // First, find the maximum value in the slice. + maxval := Max(s) + if math.IsInf(maxval, 0) { + // If it's infinity either way, the logsumexp will be infinity as well + // returning now avoids NaNs + return maxval + } + var lse float64 + // Compute the sumexp part + for _, val := range s { + lse += math.Exp(val - maxval) + } + // Take the log and add back on the constant taken out + return math.Log(lse) + maxval +} + +// Max returns the maximum value in the input slice. If the slice is empty, Max will panic. +func Max(s []float64) float64 { + return s[MaxIdx(s)] +} + +// MaxIdx returns the index of the maximum value in the input slice. If several +// entries have the maximum value, the first such index is returned. If the slice +// is empty, MaxIdx will panic. +func MaxIdx(s []float64) int { + if len(s) == 0 { + panic("floats: zero slice length") + } + max := math.NaN() + var ind int + for i, v := range s { + if math.IsNaN(v) { + continue + } + if v > max || math.IsNaN(max) { + max = v + ind = i + } + } + return ind +} + +// Min returns the maximum value in the input slice. If the slice is empty, Min will panic. +func Min(s []float64) float64 { + return s[MinIdx(s)] +} + +// MinIdx returns the index of the minimum value in the input slice. If several +// entries have the maximum value, the first such index is returned. If the slice +// is empty, MinIdx will panic. +func MinIdx(s []float64) int { + if len(s) == 0 { + panic("floats: zero slice length") + } + min := math.NaN() + var ind int + for i, v := range s { + if math.IsNaN(v) { + continue + } + if v < min || math.IsNaN(min) { + min = v + ind = i + } + } + return ind +} + +// Mul performs element-wise multiplication between dst +// and s and stores the value in dst. Panics if the +// lengths of s and t are not equal. +func Mul(dst, s []float64) { + if len(dst) != len(s) { + panic("floats: slice lengths do not match") + } + for i, val := range s { + dst[i] *= val + } +} + +// MulTo performs element-wise multiplication between s +// and t and stores the value in dst. Panics if the +// lengths of s, t, and dst are not equal. +func MulTo(dst, s, t []float64) []float64 { + if len(s) != len(t) || len(dst) != len(t) { + panic("floats: slice lengths do not match") + } + for i, val := range t { + dst[i] = val * s[i] + } + return dst +} + +const ( + nanBits = 0x7ff8000000000000 + nanMask = 0xfff8000000000000 +) + +// NaNWith returns an IEEE 754 "quiet not-a-number" value with the +// payload specified in the low 51 bits of payload. +// The NaN returned by math.NaN has a bit pattern equal to NaNWith(1). +func NaNWith(payload uint64) float64 { + return math.Float64frombits(nanBits | (payload &^ nanMask)) +} + +// NaNPayload returns the lowest 51 bits payload of an IEEE 754 "quiet +// not-a-number". For values of f other than quiet-NaN, NaNPayload +// returns zero and false. +func NaNPayload(f float64) (payload uint64, ok bool) { + b := math.Float64bits(f) + if b&nanBits != nanBits { + return 0, false + } + return b &^ nanMask, true +} + +// NearestIdx returns the index of the element in s +// whose value is nearest to v. If several such +// elements exist, the lowest index is returned. +// NearestIdx panics if len(s) == 0. +func NearestIdx(s []float64, v float64) int { + if len(s) == 0 { + panic("floats: zero length slice") + } + switch { + case math.IsNaN(v): + return 0 + case math.IsInf(v, 1): + return MaxIdx(s) + case math.IsInf(v, -1): + return MinIdx(s) + } + var ind int + dist := math.NaN() + for i, val := range s { + newDist := math.Abs(v - val) + // A NaN distance will not be closer. + if math.IsNaN(newDist) { + continue + } + if newDist < dist || math.IsNaN(dist) { + dist = newDist + ind = i + } + } + return ind +} + +// NearestIdxForSpan return the index of a hypothetical vector created +// by Span with length n and bounds l and u whose value is closest +// to v. That is, NearestIdxForSpan(n, l, u, v) is equivalent to +// Nearest(Span(make([]float64, n),l,u),v) without an allocation. +// NearestIdxForSpan panics if n is less than two. +func NearestIdxForSpan(n int, l, u float64, v float64) int { + if n <= 1 { + panic("floats: span must have length >1") + } + if math.IsNaN(v) { + return 0 + } + + // Special cases for Inf and NaN. + switch { + case math.IsNaN(l) && !math.IsNaN(u): + return n - 1 + case math.IsNaN(u): + return 0 + case math.IsInf(l, 0) && math.IsInf(u, 0): + if l == u { + return 0 + } + if n%2 == 1 { + if !math.IsInf(v, 0) { + return n / 2 + } + if math.Copysign(1, v) == math.Copysign(1, l) { + return 0 + } + return n/2 + 1 + } + if math.Copysign(1, v) == math.Copysign(1, l) { + return 0 + } + return n / 2 + case math.IsInf(l, 0): + if v == l { + return 0 + } + return n - 1 + case math.IsInf(u, 0): + if v == u { + return n - 1 + } + return 0 + case math.IsInf(v, -1): + if l <= u { + return 0 + } + return n - 1 + case math.IsInf(v, 1): + if u <= l { + return 0 + } + return n - 1 + } + + // Special cases for v outside (l, u) and (u, l). + switch { + case l < u: + if v <= l { + return 0 + } + if v >= u { + return n - 1 + } + case l > u: + if v >= l { + return 0 + } + if v <= u { + return n - 1 + } + default: + return 0 + } + + // Can't guarantee anything about exactly halfway between + // because of floating point weirdness. + return int((float64(n)-1)/(u-l)*(v-l) + 0.5) +} + +// Norm returns the L norm of the slice S, defined as +// (sum_{i=1}^N s[i]^L)^{1/L} +// Special cases: +// L = math.Inf(1) gives the maximum absolute value. +// Does not correctly compute the zero norm (use Count). +func Norm(s []float64, L float64) float64 { + // Should this complain if L is not positive? + // Should this be done in log space for better numerical stability? + // would be more cost + // maybe only if L is high? + if len(s) == 0 { + return 0 + } + if L == 2 { + twoNorm := math.Abs(s[0]) + for i := 1; i < len(s); i++ { + twoNorm = math.Hypot(twoNorm, s[i]) + } + return twoNorm + } + var norm float64 + if L == 1 { + for _, val := range s { + norm += math.Abs(val) + } + return norm + } + if math.IsInf(L, 1) { + for _, val := range s { + norm = math.Max(norm, math.Abs(val)) + } + return norm + } + for _, val := range s { + norm += math.Pow(math.Abs(val), L) + } + return math.Pow(norm, 1/L) +} + +// ParseWithNA converts the string s to a float64 in v. +// If s equals missing, w is returned as 0, otherwise 1. +func ParseWithNA(s, missing string) (v, w float64, err error) { + if s == missing { + return 0, 0, nil + } + v, err = strconv.ParseFloat(s, 64) + if err == nil { + w = 1 + } + return v, w, err +} + +// Prod returns the product of the elements of the slice. +// Returns 1 if len(s) = 0. +func Prod(s []float64) float64 { + prod := 1.0 + for _, val := range s { + prod *= val + } + return prod +} + +// Reverse reverses the order of elements in the slice. +func Reverse(s []float64) { + for i, j := 0, len(s)-1; i < j; i, j = i+1, j-1 { + s[i], s[j] = s[j], s[i] + } +} + +// Round returns the half away from zero rounded value of x with prec precision. +// +// Special cases are: +// Round(±0) = +0 +// Round(±Inf) = ±Inf +// Round(NaN) = NaN +func Round(x float64, prec int) float64 { + if x == 0 { + // Make sure zero is returned + // without the negative bit set. + return 0 + } + // Fast path for positive precision on integers. + if prec >= 0 && x == math.Trunc(x) { + return x + } + pow := math.Pow10(prec) + intermed := x * pow + if math.IsInf(intermed, 0) { + return x + } + if x < 0 { + x = math.Ceil(intermed - 0.5) + } else { + x = math.Floor(intermed + 0.5) + } + + if x == 0 { + return 0 + } + + return x / pow +} + +// RoundEven returns the half even rounded value of x with prec precision. +// +// Special cases are: +// RoundEven(±0) = +0 +// RoundEven(±Inf) = ±Inf +// RoundEven(NaN) = NaN +func RoundEven(x float64, prec int) float64 { + if x == 0 { + // Make sure zero is returned + // without the negative bit set. + return 0 + } + // Fast path for positive precision on integers. + if prec >= 0 && x == math.Trunc(x) { + return x + } + pow := math.Pow10(prec) + intermed := x * pow + if math.IsInf(intermed, 0) { + return x + } + if isHalfway(intermed) { + correction, _ := math.Modf(math.Mod(intermed, 2)) + intermed += correction + if intermed > 0 { + x = math.Floor(intermed) + } else { + x = math.Ceil(intermed) + } + } else { + if x < 0 { + x = math.Ceil(intermed - 0.5) + } else { + x = math.Floor(intermed + 0.5) + } + } + + if x == 0 { + return 0 + } + + return x / pow +} + +func isHalfway(x float64) bool { + _, frac := math.Modf(x) + frac = math.Abs(frac) + return frac == 0.5 || (math.Nextafter(frac, math.Inf(-1)) < 0.5 && math.Nextafter(frac, math.Inf(1)) > 0.5) +} + +// Same returns true if the input slices have the same length and the all elements +// have the same value with NaN treated as the same. +func Same(s, t []float64) bool { + if len(s) != len(t) { + return false + } + for i, v := range s { + w := t[i] + if v != w && !(math.IsNaN(v) && math.IsNaN(w)) { + return false + } + } + return true +} + +// Scale multiplies every element in dst by the scalar c. +func Scale(c float64, dst []float64) { + if len(dst) > 0 { + f64.ScalUnitary(c, dst) + } +} + +// ScaleTo multiplies the elements in s by c and stores the result in dst. +func ScaleTo(dst []float64, c float64, s []float64) []float64 { + if len(dst) != len(s) { + panic("floats: lengths of slices do not match") + } + if len(dst) > 0 { + f64.ScalUnitaryTo(dst, c, s) + } + return dst +} + +// Span returns a set of N equally spaced points between l and u, where N +// is equal to the length of the destination. The first element of the destination +// is l, the final element of the destination is u. +// +// Panics if len(dst) < 2. +// +// Span also returns the mutated slice dst, so that it can be used in range expressions, +// like: +// +// for i, x := range Span(dst, l, u) { ... } +func Span(dst []float64, l, u float64) []float64 { + n := len(dst) + if n < 2 { + panic("floats: destination must have length >1") + } + + // Special cases for Inf and NaN. + switch { + case math.IsNaN(l): + for i := range dst[:len(dst)-1] { + dst[i] = math.NaN() + } + dst[len(dst)-1] = u + return dst + case math.IsNaN(u): + for i := range dst[1:] { + dst[i+1] = math.NaN() + } + dst[0] = l + return dst + case math.IsInf(l, 0) && math.IsInf(u, 0): + for i := range dst[:len(dst)/2] { + dst[i] = l + dst[len(dst)-i-1] = u + } + if len(dst)%2 == 1 { + if l != u { + dst[len(dst)/2] = 0 + } else { + dst[len(dst)/2] = l + } + } + return dst + case math.IsInf(l, 0): + for i := range dst[:len(dst)-1] { + dst[i] = l + } + dst[len(dst)-1] = u + return dst + case math.IsInf(u, 0): + for i := range dst[1:] { + dst[i+1] = u + } + dst[0] = l + return dst + } + + step := (u - l) / float64(n-1) + for i := range dst { + dst[i] = l + step*float64(i) + } + return dst +} + +// Sub subtracts, element-wise, the elements of s from dst. Panics if +// the lengths of dst and s do not match. +func Sub(dst, s []float64) { + if len(dst) != len(s) { + panic("floats: length of the slices do not match") + } + f64.AxpyUnitaryTo(dst, -1, s, dst) +} + +// SubTo subtracts, element-wise, the elements of t from s and +// stores the result in dst. Panics if the lengths of s, t and dst do not match. +func SubTo(dst, s, t []float64) []float64 { + if len(s) != len(t) { + panic("floats: length of subtractor and subtractee do not match") + } + if len(dst) != len(s) { + panic("floats: length of destination does not match length of subtractor") + } + f64.AxpyUnitaryTo(dst, -1, t, s) + return dst +} + +// Sum returns the sum of the elements of the slice. +func Sum(s []float64) float64 { + return f64.Sum(s) +} + +// Within returns the first index i where s[i] <= v < s[i+1]. Within panics if: +// - len(s) < 2 +// - s is not sorted +func Within(s []float64, v float64) int { + if len(s) < 2 { + panic("floats: slice length less than 2") + } + if !sort.Float64sAreSorted(s) { + panic("floats: input slice not sorted") + } + if v < s[0] || v >= s[len(s)-1] || math.IsNaN(v) { + return -1 + } + for i, f := range s[1:] { + if v < f { + return i + } + } + return -1 +} diff --git a/vendor/gonum.org/v1/gonum/fourier/doc.go b/vendor/gonum.org/v1/gonum/fourier/doc.go new file mode 100644 index 0000000..49ac326 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/fourier/doc.go @@ -0,0 +1,6 @@ +// Copyright ©2018 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Package fourier provides functions to perform Discrete Fourier Transforms. +package fourier // import "gonum.org/v1/gonum/fourier" diff --git a/vendor/gonum.org/v1/gonum/fourier/fourier.go b/vendor/gonum.org/v1/gonum/fourier/fourier.go new file mode 100644 index 0000000..fb8874e --- /dev/null +++ b/vendor/gonum.org/v1/gonum/fourier/fourier.go @@ -0,0 +1,260 @@ +// Copyright ©2018 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package fourier + +import "gonum.org/v1/gonum/fourier/internal/fftpack" + +// FFT implements Fast Fourier Transform and its inverse for real sequences. +type FFT struct { + work []float64 + ifac [15]int + + // real temporarily store complex data as + // pairs of real values to allow passing to + // the backing code. The length of real + // must always be half the length of work. + real []float64 +} + +// NewFFT returns an FFT initialized for work on sequences of length n. +func NewFFT(n int) *FFT { + var t FFT + t.Reset(n) + return &t +} + +// Len returns the length of the acceptable input. +func (t *FFT) Len() int { return len(t.real) } + +// Reset reinitializes the FFT for work on sequences of length n. +func (t *FFT) Reset(n int) { + if 2*n <= cap(t.work) { + t.work = t.work[:2*n] + t.real = t.real[:n] + } else { + t.work = make([]float64, 2*n) + t.real = make([]float64, n) + } + fftpack.Rffti(n, t.work, t.ifac[:]) +} + +// Coefficients computes the Fourier coefficients of the input sequence, +// converting the time series in seq into the frequency spectrum, placing +// the result in dst and returning it. This transform is unnormalized; a +// call to Coefficients followed by a call of Sequence will multiply the +// input sequence by the length of the sequence. +// +// If the length of seq is not t.Len(), Coefficients will panic. +// If dst is nil, a new slice is allocated and returned. If dst is not nil and +// the length of dst does not equal t.Len()/2+1, Coefficients will panic. +func (t *FFT) Coefficients(dst []complex128, seq []float64) []complex128 { + if len(seq) != t.Len() { + panic("fourier: sequence length mismatch") + } + if dst == nil { + dst = make([]complex128, t.Len()/2+1) + } else if len(dst) != t.Len()/2+1 { + panic("fourier: destination length mismatch") + } + copy(t.real, seq) + fftpack.Rfftf(len(t.real), t.real, t.work, t.ifac[:]) + dst[0] = complex(t.real[0], 0) + if len(seq) < 2 { + return dst + } + if len(seq)%2 == 1 { + dst[len(dst)-1] = complex(t.real[len(t.real)-2], t.real[len(t.real)-1]) + } else { + dst[len(dst)-1] = complex(t.real[len(t.real)-1], 0) + } + for i := 1; i < len(dst)-1; i++ { + dst[i] = complex(t.real[2*i-1], t.real[2*i]) + } + return dst +} + +// Sequence computes the real perodic sequence from the Fourier coefficients, +// converting the frequency spectrum in coeff into a time series, placing the +// result in dst and returning it. This transform is unnormalized; a call to +// Coefficients followed by a call of Sequence will multiply the input sequence +// by the length of the sequence. +// +// If the length of coeff is not t.Len()/2+1, Sequence will panic. +// If dst is nil, a new slice is allocated and returned. If dst is not nil and +// the length of dst does not equal the length of coeff, Sequence will panic. +func (t *FFT) Sequence(dst []float64, coeff []complex128) []float64 { + if len(coeff) != t.Len()/2+1 { + panic("fourier: coefficients length mismatch") + } + if dst == nil { + dst = make([]float64, t.Len()) + } else if len(dst) != t.Len() { + panic("fourier: destination length mismatch") + } + dst[0] = real(coeff[0]) + if len(dst) < 2 { + return dst + } + nf := coeff[len(coeff)-1] + if len(dst)%2 == 1 { + dst[len(dst)-2] = real(nf) + dst[len(dst)-1] = imag(nf) + } else { + dst[len(dst)-1] = real(nf) + } + + for i, cv := range coeff[1 : len(coeff)-1] { + dst[2*i+1] = real(cv) + dst[2*i+2] = imag(cv) + } + fftpack.Rfftb(len(dst), dst, t.work, t.ifac[:]) + return dst +} + +// Freq returns the relative frequency center for coefficient i. +// Freq will panic if i is negative or greater than or equal to t.Len(). +func (t *FFT) Freq(i int) float64 { + if i < 0 || t.Len() <= i { + panic("fourier: index out of range") + } + step := 1 / float64(t.Len()) + return step * float64(i) +} + +// CmplxFFT implements Fast Fourier Transform and its inverse for complex sequences. +type CmplxFFT struct { + work []float64 + ifac [15]int + + // real temporarily store complex data as + // pairs of real values to allow passing to + // the backing code. The length of real + // must always be half the length of work. + real []float64 +} + +// NewCmplxFFT returns an CmplxFFT initialized for work on sequences of length n. +func NewCmplxFFT(n int) *CmplxFFT { + var t CmplxFFT + t.Reset(n) + return &t +} + +// Len returns the length of the acceptable input. +func (t *CmplxFFT) Len() int { return len(t.work) / 4 } + +// Reset reinitializes the FFT for work on sequences of length n. +func (t *CmplxFFT) Reset(n int) { + if 4*n <= cap(t.work) { + t.work = t.work[:4*n] + t.real = t.real[:2*n] + } else { + t.work = make([]float64, 4*n) + t.real = make([]float64, 2*n) + } + fftpack.Cffti(n, t.work, t.ifac[:]) +} + +// Coefficients computes the Fourier coefficients of a complex input sequence, +// converting the time series in seq into the frequency spectrum, placing +// the result in dst and returning it. This transform is unnormalized; a call +// to Coefficients followed by a call of Sequence will multiply the input +// sequence by the length of the sequence. +// +// If the length of seq is not t.Len(), Coefficients will panic. +// If dst is nil, a new slice is allocated and returned. If dst is not nil and +// the length of dst does not equal the length of seq, Coefficients will panic. +// It is safe to use the same slice for dst and seq. +func (t *CmplxFFT) Coefficients(dst, seq []complex128) []complex128 { + if len(seq) != t.Len() { + panic("fourier: sequence length mismatch") + } + if dst == nil { + dst = make([]complex128, len(seq)) + } else if len(dst) != len(seq) { + panic("fourier: destination length mismatch") + } + for i, cv := range seq { + t.real[2*i] = real(cv) + t.real[2*i+1] = imag(cv) + } + fftpack.Cfftf(len(dst), t.real, t.work, t.ifac[:]) + for i := range dst { + dst[i] = complex(t.real[2*i], t.real[2*i+1]) + } + return dst +} + +// Sequence computes the complex perodic sequence from the Fourier coefficients, +// converting the frequency spectrum in coeff into a time series, placing the +// result in dst and returning it. This transform is unnormalized; a call to +// Coefficients followed by a call of Sequence will multiply the input sequence +// by the length of the sequence. +// +// If the length of coeff is not t.Len(), Sequence will panic. +// If dst is nil, a new slice is allocated and returned. If dst is not nil and +// the length of dst does not equal the length of coeff, Sequence will panic. +// It is safe to use the same slice for dst and coeff. +func (t *CmplxFFT) Sequence(dst, coeff []complex128) []complex128 { + if len(coeff) != t.Len() { + panic("fourier: coefficients length mismatch") + } + if dst == nil { + dst = make([]complex128, len(coeff)) + } else if len(dst) != len(coeff) { + panic("fourier: destination length mismatch") + } + for i, cv := range coeff { + t.real[2*i] = real(cv) + t.real[2*i+1] = imag(cv) + } + fftpack.Cfftb(len(dst), t.real, t.work, t.ifac[:]) + for i := range dst { + dst[i] = complex(t.real[2*i], t.real[2*i+1]) + } + return dst +} + +// Freq returns the relative frequency center for coefficient i. +// Freq will panic if i is negative or greater than or equal to t.Len(). +func (t *CmplxFFT) Freq(i int) float64 { + if i < 0 || t.Len() <= i { + panic("fourier: index out of range") + } + step := 1 / float64(t.Len()) + if i < (t.Len()-1)/2+1 { + return step * float64(i) + } + return step * float64(i-t.Len()) +} + +// ShiftIdx returns a shifted index into a slice of coefficients +// returned by the CmplxFFT so that indexing into the coefficients +// places the zero frequency component at the center of the spectrum. +// ShiftIdx will panic if i is negative or greater than or equal to +// t.Len(). +func (t *CmplxFFT) ShiftIdx(i int) int { + if i < 0 || t.Len() <= i { + panic("fourier: index out of range") + } + h := t.Len() / 2 + if i < h { + return i + (t.Len()+1)/2 + } + return i - h +} + +// UnshiftIdx returns inverse of ShiftIdx. UnshiftIdx will panic if i is +// negative or greater than or equal to t.Len(). +func (t *CmplxFFT) UnshiftIdx(i int) int { + if i < 0 || t.Len() <= i { + panic("fourier: index out of range") + } + h := (t.Len() + 1) / 2 + if i < h { + return i + t.Len()/2 + } + return i - h +} diff --git a/vendor/gonum.org/v1/gonum/fourier/internal/fftpack/array_bounds_checks.go b/vendor/gonum.org/v1/gonum/fourier/internal/fftpack/array_bounds_checks.go new file mode 100644 index 0000000..b0c812b --- /dev/null +++ b/vendor/gonum.org/v1/gonum/fourier/internal/fftpack/array_bounds_checks.go @@ -0,0 +1,88 @@ +// Copyright ©2018 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// This file must be kept in sync with array_no_bound_checks.go. + +// +build bounds + +package fftpack + +import "fmt" + +// The types in array.go implement Fortran-like arrays for bootstrapping +// the implementation of the FFT functions translated from FFTPACK; they +// are column-major. + +type twoArray struct { + i, j int + jStride int + data []float64 +} + +func newTwoArray(i, j int, data []float64) twoArray { + if len(data) < i*j { + panic(fmt.Sprintf("short data: len(data)=%d, i=%d, j=%d", len(data), i, j)) + } + return twoArray{ + i: i, + j: j, + jStride: i, + data: data[:i*j], + } +} + +func (a twoArray) at(i, j int) float64 { + if i < 0 || a.i <= i || j < 0 || a.j <= j { + panic(fmt.Sprintf("out of bounds at(%d, %d): bounds i=%d, j=%d", i, j, a.i, a.j)) + } + return a.data[i+a.jStride*j] +} + +func (a twoArray) set(i, j int, v float64) { + if i < 0 || a.i <= i || j < 0 || a.j <= j { + panic(fmt.Sprintf("out of bounds set(%d, %d): bounds i=%d, j=%d", i, j, a.i, a.j)) + } + a.data[i+a.jStride*j] = v +} + +func (a twoArray) add(i, j int, v float64) { + if i < 0 || a.i <= i || j < 0 || a.j <= j { + panic(fmt.Sprintf("out of bounds set(%d, %d): bounds i=%d, j=%d", i, j, a.i, a.j)) + } + a.data[i+a.jStride*j] += v +} + +type threeArray struct { + i, j, k int + jStride, kStride int + data []float64 +} + +func newThreeArray(i, j, k int, data []float64) threeArray { + if len(data) < i*j*k { + panic(fmt.Sprintf("short data: len(data)=%d, i=%d, j=%d, k=%d", len(data), i, j, k)) + } + return threeArray{ + i: i, + j: j, + k: k, + jStride: i, + kStride: i * j, + data: data[:i*j*k], + } +} + +func (a threeArray) at(i, j, k int) float64 { + if i < 0 || a.i <= i || j < 0 || a.j <= j || k < 0 || a.k <= k { + panic(fmt.Sprintf("out of bounds at(%d, %d, %d): bounds i=%d, j=%d, k=%d", i, j, k, a.i, a.j, a.k)) + } + return a.data[i+a.jStride*j+a.kStride*k] +} + +func (a threeArray) set(i, j, k int, v float64) { + if i < 0 || a.i <= i || j < 0 || a.j <= j || k < 0 || a.k <= k { + panic(fmt.Sprintf("out of bounds set(%d, %d, %d): bounds i=%d, j=%d, k=%d", i, j, k, a.i, a.j, a.k)) + } + a.data[i+a.jStride*j+a.kStride*k] = v +} diff --git a/vendor/gonum.org/v1/gonum/fourier/internal/fftpack/array_no_bounds_checks.go b/vendor/gonum.org/v1/gonum/fourier/internal/fftpack/array_no_bounds_checks.go new file mode 100644 index 0000000..ee25655 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/fourier/internal/fftpack/array_no_bounds_checks.go @@ -0,0 +1,64 @@ +// Copyright ©2018 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// This file must be kept in sync with array_bound_checks.go. + +// +build !bounds + +package fftpack + +// The types in array.go implement Fortran-like arrays for bootstrapping +// the implementation of the FFT functions translated from FFTPACK; they +// are column-major. + +type twoArray struct { + jStride int + data []float64 +} + +func newTwoArray(i, j int, data []float64) twoArray { + if len(data) < i*j { + panic("fourier: short data") + } + return twoArray{ + jStride: i, + data: data[:i*j], + } +} + +func (a twoArray) at(i, j int) float64 { + return a.data[i+a.jStride*j] +} + +func (a twoArray) set(i, j int, v float64) { + a.data[i+a.jStride*j] = v +} + +func (a twoArray) add(i, j int, v float64) { + a.data[i+a.jStride*j] += v +} + +type threeArray struct { + jStride, kStride int + data []float64 +} + +func newThreeArray(i, j, k int, data []float64) threeArray { + if len(data) < i*j*k { + panic("fourier: short data") + } + return threeArray{ + jStride: i, + kStride: i * j, + data: data[:i*j*k], + } +} + +func (a threeArray) at(i, j, k int) float64 { + return a.data[i+a.jStride*j+a.kStride*k] +} + +func (a threeArray) set(i, j, k int, v float64) { + a.data[i+a.jStride*j+a.kStride*k] = v +} diff --git a/vendor/gonum.org/v1/gonum/fourier/internal/fftpack/cfft.go b/vendor/gonum.org/v1/gonum/fourier/internal/fftpack/cfft.go new file mode 100644 index 0000000..d6cb911 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/fourier/internal/fftpack/cfft.go @@ -0,0 +1,652 @@ +// Copyright ©2018 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// This is a translation of the FFTPACK cfft functions by +// Paul N Swarztrauber, placed in the public domain at +// http://www.netlib.org/fftpack/. + +package fftpack + +import "math" + +// Cffti initializes the array work which is used in both Cfftf +// and Cfftb. the prime factorization of n together with a +// tabulation of the trigonometric functions are computed and +// stored in work. +// +// input parameter +// +// n The length of the sequence to be transformed. +// +// Output parameters: +// +// work A work array which must be dimensioned at least 4*n. +// the same work array can be used for both Cfftf and Cfftb +// as long as n remains unchanged. Different work arrays +// are required for different values of n. The contents of +// work must not be changed between calls of Cfftf or Cfftb. +// +// ifac A work array containing the factors of n. ifac must have +// length 15. +func Cffti(n int, work []float64, ifac []int) { + if len(work) < 4*n { + panic("fourier: short work") + } + if len(ifac) < 15 { + panic("fourier: short ifac") + } + if n == 1 { + return + } + cffti1(n, work[2*n:4*n], ifac[:15]) +} + +func cffti1(n int, wa []float64, ifac []int) { + ntryh := [4]int{3, 4, 2, 5} + + nl := n + nf := 0 + +outer: + for j, ntry := 0, 0; ; j++ { + if j < 4 { + ntry = ntryh[j] + } else { + ntry += 2 + } + for { + if nl%ntry != 0 { + continue outer + } + + ifac[nf+2] = ntry + nl /= ntry + nf++ + + if ntry == 2 && nf != 1 { + for i := 1; i < nf; i++ { + ib := nf - i + 1 + ifac[ib+1] = ifac[ib] + } + ifac[2] = 2 + } + + if nl == 1 { + break outer + } + } + } + + ifac[0] = n + ifac[1] = nf + + argh := 2 * math.Pi / float64(n) + i := 1 + l1 := 1 + for k1 := 0; k1 < nf; k1++ { + ip := ifac[k1+2] + ld := 0 + l2 := l1 * ip + ido := n / l2 + idot := 2*ido + 2 + for j := 0; j < ip-1; j++ { + i1 := i + wa[i-1] = 1 + wa[i] = 0 + ld += l1 + var fi float64 + argld := float64(ld) * argh + for ii := 3; ii < idot; ii += 2 { + i += 2 + fi++ + arg := fi * argld + wa[i-1] = math.Cos(arg) + wa[i] = math.Sin(arg) + } + if ip > 5 { + wa[i1-1] = wa[i-1] + wa[i1] = wa[i] + } + } + l1 = l2 + } +} + +// Cfftf computes the forward complex Discrete Fourier transform +// (the Fourier analysis). Equivalently, Cfftf computes the +// Fourier coefficients of a complex periodic sequence. The +// transform is defined below at output parameter c. +// +// Input parameters: +// +// n The length of the array c to be transformed. The method +// is most efficient when n is a product of small primes. +// n may change so long as different work arrays are provided. +// +// c A complex array of length n which contains the sequence +// to be transformed. +// +// work A real work array which must be dimensioned at least 4*n. +// in the program that calls Cfftf. The work array must be +// initialized by calling subroutine Cffti(n,work,ifac) and a +// different work array must be used for each different +// value of n. This initialization does not have to be +// repeated so long as n remains unchanged thus subsequent +// transforms can be obtained faster than the first. +// the same work array can be used by Cfftf and Cfftb. +// +// ifac A work array containing the factors of n. ifac must have +// length of at least 15. +// +// Output parameters: +// +// c for j=0, ..., n-1 +// c[j]=the sum from k=0, ..., n-1 of +// c[k]*exp(-i*j*k*2*pi/n) +// +// where i=sqrt(-1) +// +// This transform is unnormalized since a call of Cfftf +// followed by a call of Cfftb will multiply the input +// sequence by n. +// +// The n elements of c are represented in n pairs of real +// values in r where c[j] = r[j*2]+r[j*2+1]i. +// +// work Contains results which must not be destroyed between +// calls of Cfftf or Cfftb. +// ifac Contains results which must not be destroyed between +// calls of Cfftf or Cfftb. +func Cfftf(n int, r, work []float64, ifac []int) { + if len(r) < 2*n { + panic("fourier: short sequence") + } + if len(work) < 4*n { + panic("fourier: short work") + } + if len(ifac) < 15 { + panic("fourier: short ifac") + } + if n == 1 { + return + } + cfft1(n, r[:2*n], work[:2*n], work[2*n:4*n], ifac[:15], -1) +} + +// Cfftb computes the backward complex Discrete Fourier Transform +// (the Fourier synthesis). Equivalently, Cfftf computes the computes +// a complex periodic sequence from its Fourier coefficients. The +// transform is defined below at output parameter c. +// +// Input parameters: +// +// n The length of the array c to be transformed. The method +// is most efficient when n is a product of small primes. +// n may change so long as different work arrays are provided. +// +// c A complex array of length n which contains the sequence +// to be transformed. +// +// work A real work array which must be dimensioned at least 4*n. +// in the program that calls Cfftb. The work array must be +// initialized by calling subroutine Cffti(n,work,ifac) and a +// different work array must be used for each different +// value of n. This initialization does not have to be +// repeated so long as n remains unchanged thus subsequent +// transforms can be obtained faster than the first. +// The same work array can be used by Cfftf and Cfftb. +// +// ifac A work array containing the factors of n. ifac must have +// length of at least 15. +// +// Output parameters: +// +// c for j=0, ..., n-1 +// c[j]=the sum from k=0, ..., n-1 of +// c[k]*exp(i*j*k*2*pi/n) +// +// where i=sqrt(-1) +// +// This transform is unnormalized since a call of Cfftf +// followed by a call of Cfftb will multiply the input +// sequence by n. +// +// The n elements of c are represented in n pairs of real +// values in r where c[j] = r[j*2]+r[j*2+1]i. +// +// work Contains results which must not be destroyed between +// calls of Cfftf or Cfftb. +// ifac Contains results which must not be destroyed between +// calls of Cfftf or Cfftb. +func Cfftb(n int, r, work []float64, ifac []int) { + if len(r) < 2*n { + panic("fourier: short sequence") + } + if len(work) < 4*n { + panic("fourier: short work") + } + if len(ifac) < 15 { + panic("fourier: short ifac") + } + if n == 1 { + return + } + cfft1(n, r[:2*n], work[:2*n], work[2*n:4*n], ifac[:15], 1) +} + +// cfft1 implements cfftf1 and cfftb1 depending on sign. +func cfft1(n int, c, ch, wa []float64, ifac []int, sign float64) { + nf := ifac[1] + na := false + l1 := 1 + iw := 0 + + for k1 := 1; k1 <= nf; k1++ { + ip := ifac[k1+1] + l2 := ip * l1 + ido := n / l2 + idot := 2 * ido + idl1 := idot * l1 + + switch ip { + case 4: + ix2 := iw + idot + ix3 := ix2 + idot + if na { + pass4(idot, l1, ch, c, wa[iw:], wa[ix2:], wa[ix3:], sign) + } else { + pass4(idot, l1, c, ch, wa[iw:], wa[ix2:], wa[ix3:], sign) + } + na = !na + case 2: + if na { + pass2(idot, l1, ch, c, wa[iw:], sign) + } else { + pass2(idot, l1, c, ch, wa[iw:], sign) + } + na = !na + case 3: + ix2 := iw + idot + if na { + pass3(idot, l1, ch, c, wa[iw:], wa[ix2:], sign) + } else { + pass3(idot, l1, c, ch, wa[iw:], wa[ix2:], sign) + } + na = !na + case 5: + ix2 := iw + idot + ix3 := ix2 + idot + ix4 := ix3 + idot + if na { + pass5(idot, l1, ch, c, wa[iw:], wa[ix2:], wa[ix3:], wa[ix4:], sign) + } else { + pass5(idot, l1, c, ch, wa[iw:], wa[ix2:], wa[ix3:], wa[ix4:], sign) + } + na = !na + default: + var nac bool + if na { + nac = pass(idot, ip, l1, idl1, ch, ch, ch, c, c, wa[iw:], sign) + } else { + nac = pass(idot, ip, l1, idl1, c, c, c, ch, ch, wa[iw:], sign) + } + if nac { + na = !na + } + } + + l1 = l2 + iw += (ip - 1) * idot + } + + if na { + for i := 0; i < 2*n; i++ { + c[i] = ch[i] + } + } +} + +// pass2 implements passf2 and passb2 depending on sign. +func pass2(ido, l1 int, cc, ch, wa1 []float64, sign float64) { + cc3 := newThreeArray(ido, 2, l1, cc) + ch3 := newThreeArray(ido, l1, 2, ch) + + if ido <= 2 { + for k := 0; k < l1; k++ { + ch3.set(0, k, 0, cc3.at(0, 0, k)+cc3.at(0, 1, k)) + ch3.set(0, k, 1, cc3.at(0, 0, k)-cc3.at(0, 1, k)) + ch3.set(1, k, 0, cc3.at(1, 0, k)+cc3.at(1, 1, k)) + ch3.set(1, k, 1, cc3.at(1, 0, k)-cc3.at(1, 1, k)) + } + return + } + for k := 0; k < l1; k++ { + for i := 1; i < ido; i += 2 { + ch3.set(i-1, k, 0, cc3.at(i-1, 0, k)+cc3.at(i-1, 1, k)) + tr2 := cc3.at(i-1, 0, k) - cc3.at(i-1, 1, k) + ch3.set(i, k, 0, cc3.at(i, 0, k)+cc3.at(i, 1, k)) + ti2 := cc3.at(i, 0, k) - cc3.at(i, 1, k) + ch3.set(i, k, 1, wa1[i-1]*ti2+sign*wa1[i]*tr2) + ch3.set(i-1, k, 1, wa1[i-1]*tr2-sign*wa1[i]*ti2) + } + } +} + +// pass3 implements passf3 and passb3 depending on sign. +func pass3(ido, l1 int, cc, ch, wa1, wa2 []float64, sign float64) { + const ( + taur = -0.5 + taui = 0.866025403784439 // sqrt(3)/2 + ) + + cc3 := newThreeArray(ido, 3, l1, cc) + ch3 := newThreeArray(ido, l1, 3, ch) + + if ido == 2 { + for k := 0; k < l1; k++ { + tr2 := cc3.at(0, 1, k) + cc3.at(0, 2, k) + cr2 := cc3.at(0, 0, k) + taur*tr2 + ch3.set(0, k, 0, cc3.at(0, 0, k)+tr2) + ti2 := cc3.at(1, 1, k) + cc3.at(1, 2, k) + ci2 := cc3.at(1, 0, k) + taur*ti2 + ch3.set(1, k, 0, cc3.at(1, 0, k)+ti2) + cr3 := sign * taui * (cc3.at(0, 1, k) - cc3.at(0, 2, k)) + ci3 := sign * taui * (cc3.at(1, 1, k) - cc3.at(1, 2, k)) + ch3.set(0, k, 1, cr2-ci3) + ch3.set(0, k, 2, cr2+ci3) + ch3.set(1, k, 1, ci2+cr3) + ch3.set(1, k, 2, ci2-cr3) + } + return + } + for k := 0; k < l1; k++ { + for i := 1; i < ido; i += 2 { + tr2 := cc3.at(i-1, 1, k) + cc3.at(i-1, 2, k) + cr2 := cc3.at(i-1, 0, k) + taur*tr2 + ch3.set(i-1, k, 0, cc3.at(i-1, 0, k)+tr2) + ti2 := cc3.at(i, 1, k) + cc3.at(i, 2, k) + ci2 := cc3.at(i, 0, k) + taur*ti2 + ch3.set(i, k, 0, cc3.at(i, 0, k)+ti2) + cr3 := sign * taui * (cc3.at(i-1, 1, k) - cc3.at(i-1, 2, k)) + ci3 := sign * taui * (cc3.at(i, 1, k) - cc3.at(i, 2, k)) + dr2 := cr2 - ci3 + dr3 := cr2 + ci3 + di2 := ci2 + cr3 + di3 := ci2 - cr3 + ch3.set(i, k, 1, wa1[i-1]*di2+sign*wa1[i]*dr2) + ch3.set(i-1, k, 1, wa1[i-1]*dr2-sign*wa1[i]*di2) + ch3.set(i, k, 2, wa2[i-1]*di3+sign*wa2[i]*dr3) + ch3.set(i-1, k, 2, wa2[i-1]*dr3-sign*wa2[i]*di3) + } + } +} + +// pass4 implements passf4 and passb4 depending on sign. +func pass4(ido, l1 int, cc, ch, wa1, wa2, wa3 []float64, sign float64) { + cc3 := newThreeArray(ido, 4, l1, cc) + ch3 := newThreeArray(ido, l1, 4, ch) + + if ido == 2 { + for k := 0; k < l1; k++ { + ti1 := cc3.at(1, 0, k) - cc3.at(1, 2, k) + ti2 := cc3.at(1, 0, k) + cc3.at(1, 2, k) + tr4 := sign * (cc3.at(1, 3, k) - cc3.at(1, 1, k)) + ti3 := cc3.at(1, 1, k) + cc3.at(1, 3, k) + tr1 := cc3.at(0, 0, k) - cc3.at(0, 2, k) + tr2 := cc3.at(0, 0, k) + cc3.at(0, 2, k) + ti4 := sign * (cc3.at(0, 1, k) - cc3.at(0, 3, k)) + tr3 := cc3.at(0, 1, k) + cc3.at(0, 3, k) + ch3.set(0, k, 0, tr2+tr3) + ch3.set(0, k, 2, tr2-tr3) + ch3.set(1, k, 0, ti2+ti3) + ch3.set(1, k, 2, ti2-ti3) + ch3.set(0, k, 1, tr1+tr4) + ch3.set(0, k, 3, tr1-tr4) + ch3.set(1, k, 1, ti1+ti4) + ch3.set(1, k, 3, ti1-ti4) + } + return + } + for k := 0; k < l1; k++ { + for i := 1; i < ido; i += 2 { + ti1 := cc3.at(i, 0, k) - cc3.at(i, 2, k) + ti2 := cc3.at(i, 0, k) + cc3.at(i, 2, k) + ti3 := cc3.at(i, 1, k) + cc3.at(i, 3, k) + tr4 := sign * (cc3.at(i, 3, k) - cc3.at(i, 1, k)) + tr1 := cc3.at(i-1, 0, k) - cc3.at(i-1, 2, k) + tr2 := cc3.at(i-1, 0, k) + cc3.at(i-1, 2, k) + ti4 := sign * (cc3.at(i-1, 1, k) - cc3.at(i-1, 3, k)) + tr3 := cc3.at(i-1, 1, k) + cc3.at(i-1, 3, k) + ch3.set(i-1, k, 0, tr2+tr3) + cr3 := tr2 - tr3 + ch3.set(i, k, 0, ti2+ti3) + ci3 := ti2 - ti3 + cr2 := tr1 + tr4 + cr4 := tr1 - tr4 + ci2 := ti1 + ti4 + ci4 := ti1 - ti4 + ch3.set(i-1, k, 1, wa1[i-1]*cr2-sign*wa1[i]*ci2) + ch3.set(i, k, 1, wa1[i-1]*ci2+sign*wa1[i]*cr2) + ch3.set(i-1, k, 2, wa2[i-1]*cr3-sign*wa2[i]*ci3) + ch3.set(i, k, 2, wa2[i-1]*ci3+sign*wa2[i]*cr3) + ch3.set(i-1, k, 3, wa3[i-1]*cr4-sign*wa3[i]*ci4) + ch3.set(i, k, 3, wa3[i-1]*ci4+sign*wa3[i]*cr4) + } + } +} + +// pass5 implements passf5 and passb5 depending on sign. +func pass5(ido, l1 int, cc, ch, wa1, wa2, wa3, wa4 []float64, sign float64) { + const ( + tr11 = 0.309016994374947 + ti11 = 0.951056516295154 + tr12 = -0.809016994374947 + ti12 = 0.587785252292473 + ) + + cc3 := newThreeArray(ido, 5, l1, cc) + ch3 := newThreeArray(ido, l1, 5, ch) + + if ido == 2 { + for k := 0; k < l1; k++ { + ti5 := cc3.at(1, 1, k) - cc3.at(1, 4, k) + ti2 := cc3.at(1, 1, k) + cc3.at(1, 4, k) + ti4 := cc3.at(1, 2, k) - cc3.at(1, 3, k) + ti3 := cc3.at(1, 2, k) + cc3.at(1, 3, k) + tr5 := cc3.at(0, 1, k) - cc3.at(0, 4, k) + tr2 := cc3.at(0, 1, k) + cc3.at(0, 4, k) + tr4 := cc3.at(0, 2, k) - cc3.at(0, 3, k) + tr3 := cc3.at(0, 2, k) + cc3.at(0, 3, k) + ch3.set(0, k, 0, cc3.at(0, 0, k)+tr2+tr3) + ch3.set(1, k, 0, cc3.at(1, 0, k)+ti2+ti3) + cr2 := cc3.at(0, 0, k) + tr11*tr2 + tr12*tr3 + ci2 := cc3.at(1, 0, k) + tr11*ti2 + tr12*ti3 + cr3 := cc3.at(0, 0, k) + tr12*tr2 + tr11*tr3 + ci3 := cc3.at(1, 0, k) + tr12*ti2 + tr11*ti3 + cr5 := sign * (ti11*tr5 + ti12*tr4) + ci5 := sign * (ti11*ti5 + ti12*ti4) + cr4 := sign * (ti12*tr5 - ti11*tr4) + ci4 := sign * (ti12*ti5 - ti11*ti4) + ch3.set(0, k, 1, cr2-ci5) + ch3.set(0, k, 4, cr2+ci5) + ch3.set(1, k, 1, ci2+cr5) + ch3.set(1, k, 2, ci3+cr4) + ch3.set(0, k, 2, cr3-ci4) + ch3.set(0, k, 3, cr3+ci4) + ch3.set(1, k, 3, ci3-cr4) + ch3.set(1, k, 4, ci2-cr5) + } + return + } + for k := 0; k < l1; k++ { + for i := 1; i < ido; i += 2 { + ti5 := cc3.at(i, 1, k) - cc3.at(i, 4, k) + ti2 := cc3.at(i, 1, k) + cc3.at(i, 4, k) + ti4 := cc3.at(i, 2, k) - cc3.at(i, 3, k) + ti3 := cc3.at(i, 2, k) + cc3.at(i, 3, k) + tr5 := cc3.at(i-1, 1, k) - cc3.at(i-1, 4, k) + tr2 := cc3.at(i-1, 1, k) + cc3.at(i-1, 4, k) + tr4 := cc3.at(i-1, 2, k) - cc3.at(i-1, 3, k) + tr3 := cc3.at(i-1, 2, k) + cc3.at(i-1, 3, k) + ch3.set(i-1, k, 0, cc3.at(i-1, 0, k)+tr2+tr3) + ch3.set(i, k, 0, cc3.at(i, 0, k)+ti2+ti3) + cr2 := cc3.at(i-1, 0, k) + tr11*tr2 + tr12*tr3 + ci2 := cc3.at(i, 0, k) + tr11*ti2 + tr12*ti3 + cr3 := cc3.at(i-1, 0, k) + tr12*tr2 + tr11*tr3 + ci3 := cc3.at(i, 0, k) + tr12*ti2 + tr11*ti3 + cr5 := sign * (ti11*tr5 + ti12*tr4) + ci5 := sign * (ti11*ti5 + ti12*ti4) + cr4 := sign * (ti12*tr5 - ti11*tr4) + ci4 := sign * (ti12*ti5 - ti11*ti4) + dr3 := cr3 - ci4 + dr4 := cr3 + ci4 + di3 := ci3 + cr4 + di4 := ci3 - cr4 + dr5 := cr2 + ci5 + dr2 := cr2 - ci5 + di5 := ci2 - cr5 + di2 := ci2 + cr5 + ch3.set(i-1, k, 1, wa1[i-1]*dr2-sign*wa1[i]*di2) + ch3.set(i, k, 1, wa1[i-1]*di2+sign*wa1[i]*dr2) + ch3.set(i-1, k, 2, wa2[i-1]*dr3-sign*wa2[i]*di3) + ch3.set(i, k, 2, wa2[i-1]*di3+sign*wa2[i]*dr3) + ch3.set(i-1, k, 3, wa3[i-1]*dr4-sign*wa3[i]*di4) + ch3.set(i, k, 3, wa3[i-1]*di4+sign*wa3[i]*dr4) + ch3.set(i-1, k, 4, wa4[i-1]*dr5-sign*wa4[i]*di5) + ch3.set(i, k, 4, wa4[i-1]*di5+sign*wa4[i]*dr5) + } + } +} + +// pass implements passf and passb depending on sign. +func pass(ido, ip, l1, idl1 int, cc, c1, c2, ch, ch2, wa []float64, sign float64) (nac bool) { + cc3 := newThreeArray(ido, ip, l1, cc) + c13 := newThreeArray(ido, l1, ip, c1) + ch3 := newThreeArray(ido, l1, ip, ch) + c2m := newTwoArray(idl1, ip, c2) + ch2m := newTwoArray(idl1, ip, ch2) + + idot := ido / 2 + ipph := (ip + 1) / 2 + idp := ip * ido + + if ido < l1 { + for j := 1; j < ipph; j++ { + jc := ip - j + for i := 0; i < ido; i++ { + for k := 0; k < l1; k++ { + ch3.set(i, k, j, cc3.at(i, j, k)+cc3.at(i, jc, k)) + ch3.set(i, k, jc, cc3.at(i, j, k)-cc3.at(i, jc, k)) + } + } + } + for i := 0; i < ido; i++ { + for k := 0; k < l1; k++ { + ch3.set(i, k, 0, cc3.at(i, 0, k)) + } + } + } else { + for j := 1; j < ipph; j++ { + jc := ip - j + for k := 0; k < l1; k++ { + for i := 0; i < ido; i++ { + ch3.set(i, k, j, cc3.at(i, j, k)+cc3.at(i, jc, k)) + ch3.set(i, k, jc, cc3.at(i, j, k)-cc3.at(i, jc, k)) + } + } + } + for k := 0; k < l1; k++ { + for i := 0; i < ido; i++ { + ch3.set(i, k, 0, cc3.at(i, 0, k)) + } + } + } + + idl := 1 - ido + inc := 0 + for l := 1; l < ipph; l++ { + lc := ip - l + idl += ido + for ik := 0; ik < idl1; ik++ { + c2m.set(ik, l, ch2m.at(ik, 0)+wa[idl-1]*ch2m.at(ik, 1)) + c2m.set(ik, lc, sign*wa[idl]*ch2m.at(ik, ip-1)) + } + idlj := idl + inc += ido + for j := 2; j < ipph; j++ { + jc := ip - j + idlj += inc + if idlj > idp { + idlj -= idp + } + war := wa[idlj-1] + wai := wa[idlj] + for ik := 0; ik < idl1; ik++ { + c2m.add(ik, l, war*ch2m.at(ik, j)) + c2m.add(ik, lc, sign*wai*ch2m.at(ik, jc)) + } + } + } + + for j := 1; j < ipph; j++ { + for ik := 0; ik < idl1; ik++ { + ch2m.add(ik, 0, ch2m.at(ik, j)) + } + } + + for j := 1; j < ipph; j++ { + jc := ip - j + for ik := 1; ik < idl1; ik += 2 { + ch2m.set(ik-1, j, c2m.at(ik-1, j)-c2m.at(ik, jc)) + ch2m.set(ik-1, jc, c2m.at(ik-1, j)+c2m.at(ik, jc)) + ch2m.set(ik, j, c2m.at(ik, j)+c2m.at(ik-1, jc)) + ch2m.set(ik, jc, c2m.at(ik, j)-c2m.at(ik-1, jc)) + } + } + + if ido == 2 { + return true + } + + for ik := 0; ik < idl1; ik++ { + c2m.set(ik, 0, ch2m.at(ik, 0)) + } + + for j := 1; j < ip; j++ { + for k := 0; k < l1; k++ { + c13.set(0, k, j, ch3.at(0, k, j)) + c13.set(1, k, j, ch3.at(1, k, j)) + } + } + + if idot > l1 { + idj := 1 - ido + for j := 1; j < ip; j++ { + idj += ido + for k := 0; k < l1; k++ { + idij := idj + for i := 3; i < ido; i += 2 { + idij += 2 + c13.set(i-1, k, j, wa[idij-1]*ch3.at(i-1, k, j)-sign*wa[idij]*ch3.at(i, k, j)) + c13.set(i, k, j, wa[idij-1]*ch3.at(i, k, j)+sign*wa[idij]*ch3.at(i-1, k, j)) + } + } + } + + return false + } + + idij := -1 + for j := 1; j < ip; j++ { + idij += 2 + for i := 3; i < ido; i += 2 { + idij += 2 + for k := 0; k < l1; k++ { + c13.set(i-1, k, j, wa[idij-1]*ch3.at(i-1, k, j)-sign*wa[idij]*ch3.at(i, k, j)) + c13.set(i, k, j, wa[idij-1]*ch3.at(i, k, j)+sign*wa[idij]*ch3.at(i-1, k, j)) + } + } + } + return false +} diff --git a/vendor/gonum.org/v1/gonum/fourier/internal/fftpack/cosq.go b/vendor/gonum.org/v1/gonum/fourier/internal/fftpack/cosq.go new file mode 100644 index 0000000..3c24f8d --- /dev/null +++ b/vendor/gonum.org/v1/gonum/fourier/internal/fftpack/cosq.go @@ -0,0 +1,220 @@ +// Copyright ©2018 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// This is a translation of the FFTPACK cosq functions by +// Paul N Swarztrauber, placed in the public domain at +// http://www.netlib.org/fftpack/. + +package fftpack + +import "math" + +// Cosqi initializes the array work which is used in both Cosqf +// and Cosqb. The prime factorization of n together with a +// tabulation of the trigonometric functions are computed and +// stored in work. +// +// Input parameter +// +// n The length of the sequence to be transformed. the method +// is most efficient when n+1 is a product of small primes. +// +// Output parameters +// +// work A work array which must be dimensioned at least 3*n. +// The same work array can be used for both Cosqf and Cosqb +// as long as n remains unchanged. Different work arrays +// are required for different values of n. The contents of +// work must not be changed between calls of Cosqf or Cosqb. +// +// ifac An integer work array of length at least 15. +func Cosqi(n int, work []float64, ifac []int) { + if len(work) < 3*n { + panic("fourier: short work") + } + if len(ifac) < 15 { + panic("fourier: short ifac") + } + dt := 0.5 * math.Pi / float64(n) + for k := range work[:n] { + work[k] = math.Cos(float64(k+1) * dt) + } + Rffti(n, work[n:], ifac) +} + +// Cosqf computes the Fast Fourier Transform of quarter wave data. +// That is, Cosqf computes the coefficients in a cosine series +// representation with only odd wave numbers. The transform is +// defined below at output parameter x. +// +// Cosqb is the unnormalized inverse of Cosqf since a call of Cosqf +// followed by a call of Cosqb will multiply the input sequence x +// by 4*n. +// +// The array work which is used by subroutine Cosqf must be +// initialized by calling subroutine Cosqi(n,work). +// +// Input parameters +// +// n The length of the array x to be transformed. The method +// is most efficient when n is a product of small primes. +// +// x An array which contains the sequence to be transformed. +// +// work A work array which must be dimensioned at least 3*n +// in the program that calls Cosqf. The work array must be +// initialized by calling subroutine Cosqi(n,work) and a +// different work array must be used for each different +// value of n. This initialization does not have to be +// repeated so long as n remains unchanged thus subsequent +// transforms can be obtained faster than the first. +// +// ifac An integer work array of length at least 15. +// +// Output parameters +// +// x for i=0, ..., n-1 +// x[i] = x[i] + the sum from k=0 to k=n-2 of +// 2*x[k]*cos((2*i+1)*k*pi/(2*n)) +// +// A call of Cosqf followed by a call of +// Cosqb will multiply the sequence x by 4*n. +// Therefore Cosqb is the unnormalized inverse +// of Cosqf. +// +// work Contains initialization calculations which must not +// be destroyed between calls of Cosqf or Cosqb. +func Cosqf(n int, x, work []float64, ifac []int) { + if len(x) < n { + panic("fourier: short sequence") + } + if len(work) < 3*n { + panic("fourier: short work") + } + if len(ifac) < 15 { + panic("fourier: short ifac") + } + if n < 2 { + return + } + if n == 2 { + tsqx := math.Sqrt2 * x[1] + x[1] = x[0] - tsqx + x[0] += tsqx + return + } + cosqf1(n, x, work, work[n:], ifac) +} + +func cosqf1(n int, x, w, xh []float64, ifac []int) { + for k := 1; k < (n+1)/2; k++ { + kc := n - k + xh[k] = x[k] + x[kc] + xh[kc] = x[k] - x[kc] + } + if n%2 == 0 { + xh[(n+1)/2] = 2 * x[(n+1)/2] + } + for k := 1; k < (n+1)/2; k++ { + kc := n - k + x[k] = w[k-1]*xh[kc] + w[kc-1]*xh[k] + x[kc] = w[k-1]*xh[k] - w[kc-1]*xh[kc] + } + if n%2 == 0 { + x[(n+1)/2] = w[(n-1)/2] * xh[(n+1)/2] + } + Rfftf(n, x, xh, ifac) + for i := 2; i < n; i += 2 { + x[i-1], x[i] = x[i-1]-x[i], x[i-1]+x[i] + } +} + +// Cosqb computes the Fast Fourier Transform of quarter wave data. +// That is, Cosqb computes a sequence from its representation in +// terms of a cosine series with odd wave numbers. The transform +// is defined below at output parameter x. +// +// Cosqf is the unnormalized inverse of Cosqb since a call of Cosqb +// followed by a call of Cosqf will multiply the input sequence x +// by 4*n. +// +// The array work which is used by subroutine Cosqb must be +// initialized by calling subroutine Cosqi(n,work). +// +// +// Input parameters +// +// n The length of the array x to be transformed. The method +// is most efficient when n is a product of small primes. +// +// x An array which contains the sequence to be transformed. +// +// work A work array which must be dimensioned at least 3*n +// in the program that calls Cosqb. The work array must be +// initialized by calling subroutine Cosqi(n,work) and a +// different work array must be used for each different +// value of n. This initialization does not have to be +// repeated so long as n remains unchanged thus subsequent +// transforms can be obtained faster than the first. +// +// ifac An integer work array of length at least 15. +// +// Output parameters +// +// x for i=0, ..., n-1 +// x[i]= the sum from k=0 to k=n-1 of +// 4*x[k]*cos((2*k+1)*i*pi/(2*n)) +// +// A call of Cosqb followed by a call of +// Cosqf will multiply the sequence x by 4*n. +// Therefore Cosqf is the unnormalized inverse +// of Cosqb. +// +// work Contains initialization calculations which must not +// be destroyed between calls of Cosqb or Cosqf. +func Cosqb(n int, x, work []float64, ifac []int) { + if len(x) < n { + panic("fourier: short sequence") + } + if len(work) < 3*n { + panic("fourier: short work") + } + if len(ifac) < 15 { + panic("fourier: short ifac") + } + + if n < 2 { + x[0] *= 4 + return + } + if n == 2 { + x[0], x[1] = 4*(x[0]+x[1]), 2*math.Sqrt2*(x[0]-x[1]) + return + } + cosqb1(n, x, work, work[n:], ifac) +} + +func cosqb1(n int, x, w, xh []float64, ifac []int) { + for i := 2; i < n; i += 2 { + x[i-1], x[i] = x[i-1]+x[i], x[i]-x[i-1] + } + x[0] *= 2 + if n%2 == 0 { + x[n-1] *= 2 + } + Rfftb(n, x, xh, ifac) + for k := 1; k < (n+1)/2; k++ { + kc := n - k + xh[k] = w[k-1]*x[kc] + w[kc-1]*x[k] + xh[kc] = w[k-1]*x[k] - w[kc-1]*x[kc] + } + if n%2 == 0 { + x[(n+1)/2] *= 2 * w[(n-1)/2] + } + for k := 1; k < (n+1)/2; k++ { + x[k] = xh[k] + xh[n-k] + x[n-k] = xh[k] - xh[n-k] + } + x[0] *= 2 +} diff --git a/vendor/gonum.org/v1/gonum/fourier/internal/fftpack/cost.go b/vendor/gonum.org/v1/gonum/fourier/internal/fftpack/cost.go new file mode 100644 index 0000000..5a335aa --- /dev/null +++ b/vendor/gonum.org/v1/gonum/fourier/internal/fftpack/cost.go @@ -0,0 +1,143 @@ +// Copyright ©2018 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// This is a translation of the FFTPACK cost functions by +// Paul N Swarztrauber, placed in the public domain at +// http://www.netlib.org/fftpack/. + +package fftpack + +import "math" + +// Costi initializes the array work which is used in subroutine +// Cost. The prime factorization of n together with a tabulation +// of the trigonometric functions are computed and stored in work. +// +// Input parameter +// +// n The length of the sequence to be transformed. The method +// is most efficient when n-1 is a product of small primes. +// +// Output parameters +// +// work A work array which must be dimensioned at least 3*n. +// Different work arrays are required for different values +// of n. The contents of work must not be changed between +// calls of Cost. +// +// ifac An integer work array of length at least 15. +func Costi(n int, work []float64, ifac []int) { + if len(work) < 3*n { + panic("fourier: short work") + } + if len(ifac) < 15 { + panic("fourier: short ifac") + } + if n < 4 { + return + } + dt := math.Pi / float64(n-1) + for k := 1; k < n/2; k++ { + fk := float64(k) + work[k] = 2 * math.Sin(fk*dt) + work[n-k-1] = 2 * math.Cos(fk*dt) + } + Rffti(n-1, work[n:], ifac) +} + +// Cost computes the Discrete Fourier Cosine Transform of an even +// sequence x(i). The transform is defined below at output parameter x. +// +// Cost is the unnormalized inverse of itself since a call of Cost +// followed by another call of Cost will multiply the input sequence +// x by 2*(n-1). The transform is defined below at output parameter x +// +// The array work which is used by subroutine Cost must be +// initialized by calling subroutine Costi(n,work). +// +// Input parameters +// +// n The length of the sequence x. n must be greater than 1. +// The method is most efficient when n-1 is a product of +// small primes. +// +// x An array which contains the sequence to be transformed. +// +// work A work array which must be dimensioned at least 3*n +// in the program that calls Cost. The work array must be +// initialized by calling subroutine Costi(n,work) and a +// different work array must be used for each different +// value of n. This initialization does not have to be +// repeated so long as n remains unchanged thus subsequent +// transforms can be obtained faster than the first. +// +// ifac An integer work array of length at least 15. +// +// Output parameters +// +// x for i=1,...,n +// x(i) = x(1)+(-1)**(i-1)*x(n) +// + the sum from k=2 to k=n-1 +// 2*x(k)*cos((k-1)*(i-1)*pi/(n-1)) +// +// A call of Cost followed by another call of +// Cost will multiply the sequence x by 2*(n-1). +// Hence Cost is the unnormalized inverse +// of itself. +// +// work Contains initialization calculations which must not be +// destroyed between calls of Cost. +// +// ifac An integer work array of length at least 15. +func Cost(n int, x, work []float64, ifac []int) { + if len(x) < n { + panic("fourier: short sequence") + } + if len(work) < 3*n { + panic("fourier: short work") + } + if len(ifac) < 15 { + panic("fourier: short ifac") + } + if n < 2 { + return + } + switch n { + case 2: + x[0], x[1] = x[0]+x[1], x[0]-x[1] + case 3: + x1p3 := x[0] + x[2] + tx2 := 2 * x[1] + x[1] = x[0] - x[2] + x[0] = x1p3 + tx2 + x[2] = x1p3 - tx2 + default: + c1 := x[0] - x[n-1] + x[0] += x[n-1] + for k := 1; k < n/2; k++ { + kc := n - k - 1 + t1 := x[k] + x[kc] + t2 := x[k] - x[kc] + c1 += work[kc] * t2 + t2 *= work[k] + x[k] = t1 - t2 + x[kc] = t1 + t2 + } + if n%2 != 0 { + x[n/2] *= 2 + } + Rfftf(n-1, x, work[n:], ifac) + xim2 := x[1] + x[1] = c1 + for i := 3; i < n; i += 2 { + xi := x[i] + x[i] = x[i-2] - x[i-1] + x[i-1] = xim2 + xim2 = xi + } + if n%2 != 0 { + x[n-1] = xim2 + } + } +} diff --git a/vendor/gonum.org/v1/gonum/fourier/internal/fftpack/doc.go b/vendor/gonum.org/v1/gonum/fourier/internal/fftpack/doc.go new file mode 100644 index 0000000..042317c --- /dev/null +++ b/vendor/gonum.org/v1/gonum/fourier/internal/fftpack/doc.go @@ -0,0 +1,7 @@ +// Copyright ©2018 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Package fftpack implements Discrete Fourier Transform functions +// ported from the Fortran implementation of FFTPACK. +package fftpack // import "gonum.org/v1/gonum/fourier/internal/fftpack" diff --git a/vendor/gonum.org/v1/gonum/fourier/internal/fftpack/rfft.go b/vendor/gonum.org/v1/gonum/fourier/internal/fftpack/rfft.go new file mode 100644 index 0000000..9bb3738 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/fourier/internal/fftpack/rfft.go @@ -0,0 +1,1151 @@ +// Copyright ©2018 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// This is a translation of the FFTPACK rfft functions by +// Paul N Swarztrauber, placed in the public domain at +// http://www.netlib.org/fftpack/. + +package fftpack + +import "math" + +// Rffti initializes the array work which is used in both Rfftf +// and Rfftb. The prime factorization of n together with a +// tabulation of the trigonometric functions are computed and +// stored in work. +// +// Input parameter: +// +// n The length of the sequence to be transformed. +// +// Output parameters: +// +// work A work array which must be dimensioned at least 2*n. +// The same work array can be used for both Rfftf and Rfftb +// as long as n remains unchanged. different work arrays +// are required for different values of n. The contents of +// work must not be changed between calls of Rfftf or Rfftb. +// +// ifac A work array containing the factors of n. ifac must have +// length of at least 15. +func Rffti(n int, work []float64, ifac []int) { + if len(work) < 2*n { + panic("fourier: short work") + } + if len(ifac) < 15 { + panic("fourier: short ifac") + } + if n == 1 { + return + } + rffti1(n, work[n:2*n], ifac[:15]) +} + +func rffti1(n int, wa []float64, ifac []int) { + ntryh := [4]int{4, 2, 3, 5} + + nl := n + nf := 0 + +outer: + for j, ntry := 0, 0; ; j++ { + if j < 4 { + ntry = ntryh[j] + } else { + ntry += 2 + } + for { + if nl%ntry != 0 { + continue outer + } + + ifac[nf+2] = ntry + nl /= ntry + nf++ + + if ntry == 2 && nf != 1 { + for i := 1; i < nf; i++ { + ib := nf - i + 1 + ifac[ib+1] = ifac[ib] + } + ifac[2] = 2 + } + + if nl == 1 { + break outer + } + } + } + + ifac[0] = n + ifac[1] = nf + if nf == 1 { + return + } + argh := 2 * math.Pi / float64(n) + + is := 0 + l1 := 1 + for k1 := 0; k1 < nf-1; k1++ { + ip := ifac[k1+2] + ld := 0 + l2 := l1 * ip + ido := n / l2 + for j := 0; j < ip-1; j++ { + ld += l1 + i := is + fi := 0. + argld := float64(ld) * argh + for ii := 2; ii < ido; ii += 2 { + fi++ + arg := fi * argld + wa[i] = math.Cos(arg) + wa[i+1] = math.Sin(arg) + i += 2 + } + is += ido + } + l1 = l2 + } +} + +// Rfftf computes the Fourier coefficients of a real perodic sequence +// (Fourier analysis). The transform is defined below at output +// parameter r. +// +// Input parameters: +// +// n The length of the array r to be transformed. The method +// is most efficient when n is a product of small primes. +// n may change so long as different work arrays are provided. +// +// r A real array of length n which contains the sequence +// to be transformed. +// +// work a work array which must be dimensioned at least 2*n. +// in the program that calls Rfftf. the work array must be +// initialized by calling subroutine rffti(n,work,ifac) and a +// different work array must be used for each different +// value of n. This initialization does not have to be +// repeated so long as n remains unchanged. Thus subsequent +// transforms can be obtained faster than the first. +// The same work array can be used by Rfftf and Rfftb. +// +// ifac A work array containing the factors of n. ifac must have +// length of at least 15. +// +// Output parameters: +// +// r r[0] = the sum from i=0 to i=n-1 of r[i] +// +// if n is even set l=n/2, if n is odd set l = (n+1)/2 +// then for k = 1, ..., l-1 +// r[2*k-1] = the sum from i = 0 to i = n-1 of +// r[i]*cos(k*i*2*pi/n) +// r[2*k] = the sum from i = 0 to i = n-1 of +// -r[i]*sin(k*i*2*pi/n) +// +// if n is even +// r[n-1] = the sum from i = 0 to i = n-1 of +// (-1)^i*r[i] +// +// This transform is unnormalized since a call of Rfftf +// followed by a call of Rfftb will multiply the input +// sequence by n. +// +// work contains results which must not be destroyed between +// calls of Rfftf or Rfftb. +// ifac contains results which must not be destroyed between +// calls of Rfftf or Rfftb. +func Rfftf(n int, r, work []float64, ifac []int) { + if len(r) < n { + panic("fourier: short sequence") + } + if len(work) < 2*n { + panic("fourier: short work") + } + if len(ifac) < 15 { + panic("fourier: short ifac") + } + if n == 1 { + return + } + rfftf1(n, r[:n], work[:n], work[n:2*n], ifac[:15]) +} + +func rfftf1(n int, c, ch, wa []float64, ifac []int) { + nf := ifac[1] + na := true + l2 := n + iw := n - 1 + + for k1 := 1; k1 <= nf; k1++ { + kh := nf - k1 + ip := ifac[kh+2] + l1 := l2 / ip + ido := n / l2 + idl1 := ido * l1 + iw -= (ip - 1) * ido + na = !na + + switch ip { + case 4: + ix2 := iw + ido + ix3 := ix2 + ido + if na { + radf4(ido, l1, ch, c, wa[iw:], wa[ix2:], wa[ix3:]) + } else { + radf4(ido, l1, c, ch, wa[iw:], wa[ix2:], wa[ix3:]) + } + case 2: + if na { + radf2(ido, l1, ch, c, wa[iw:]) + } else { + radf2(ido, l1, c, ch, wa[iw:]) + } + case 3: + ix2 := iw + ido + if na { + radf3(ido, l1, ch, c, wa[iw:], wa[ix2:]) + } else { + radf3(ido, l1, c, ch, wa[iw:], wa[ix2:]) + } + case 5: + ix2 := iw + ido + ix3 := ix2 + ido + ix4 := ix3 + ido + if na { + radf5(ido, l1, ch, c, wa[iw:], wa[ix2:], wa[ix3:], wa[ix4:]) + } else { + radf5(ido, l1, c, ch, wa[iw:], wa[ix2:], wa[ix3:], wa[ix4:]) + } + default: + if ido == 1 { + na = !na + } + if na { + radfg(ido, ip, l1, idl1, ch, ch, ch, c, c, wa[iw:]) + na = false + } else { + radfg(ido, ip, l1, idl1, c, c, c, ch, ch, wa[iw:]) + na = true + } + } + + l2 = l1 + } + + if na { + return + } + for i := 0; i < n; i++ { + c[i] = ch[i] + } +} + +func radf2(ido, l1 int, cc, ch, wa1 []float64) { + cc3 := newThreeArray(ido, l1, 2, cc) + ch3 := newThreeArray(ido, 2, l1, ch) + + for k := 0; k < l1; k++ { + ch3.set(0, 0, k, cc3.at(0, k, 0)+cc3.at(0, k, 1)) + ch3.set(ido-1, 1, k, cc3.at(0, k, 0)-cc3.at(0, k, 1)) + } + if ido < 2 { + return + } + if ido > 2 { + idp2 := ido + 1 + for k := 0; k < l1; k++ { + for i := 2; i < ido; i += 2 { + ic := idp2 - (i + 1) + tr2 := wa1[i-2]*cc3.at(i-1, k, 1) + wa1[i-1]*cc3.at(i, k, 1) + ti2 := wa1[i-2]*cc3.at(i, k, 1) - wa1[i-1]*cc3.at(i-1, k, 1) + ch3.set(i, 0, k, cc3.at(i, k, 0)+ti2) + ch3.set(ic, 1, k, ti2-cc3.at(i, k, 0)) + ch3.set(i-1, 0, k, cc3.at(i-1, k, 0)+tr2) + ch3.set(ic-1, 1, k, cc3.at(i-1, k, 0)-tr2) + } + } + if ido%2 == 1 { + return + } + } + for k := 0; k < l1; k++ { + ch3.set(0, 1, k, -cc3.at(ido-1, k, 1)) + ch3.set(ido-1, 0, k, cc3.at(ido-1, k, 0)) + } +} + +func radf3(ido, l1 int, cc, ch, wa1, wa2 []float64) { + const ( + taur = -0.5 + taui = 0.866025403784439 // sqrt(3)/2 + ) + + cc3 := newThreeArray(ido, l1, 3, cc) + ch3 := newThreeArray(ido, 3, l1, ch) + + for k := 0; k < l1; k++ { + cr2 := cc3.at(0, k, 1) + cc3.at(0, k, 2) + ch3.set(0, 0, k, cc3.at(0, k, 0)+cr2) + ch3.set(0, 2, k, taui*(cc3.at(0, k, 2)-cc3.at(0, k, 1))) + ch3.set(ido-1, 1, k, cc3.at(0, k, 0)+taur*cr2) + } + if ido < 2 { + return + } + idp2 := ido + 1 + for k := 0; k < l1; k++ { + for i := 2; i < ido; i += 2 { + ic := idp2 - (i + 1) + dr2 := wa1[i-2]*cc3.at(i-1, k, 1) + wa1[i-1]*cc3.at(i, k, 1) + di2 := wa1[i-2]*cc3.at(i, k, 1) - wa1[i-1]*cc3.at(i-1, k, 1) + dr3 := wa2[i-2]*cc3.at(i-1, k, 2) + wa2[i-1]*cc3.at(i, k, 2) + di3 := wa2[i-2]*cc3.at(i, k, 2) - wa2[i-1]*cc3.at(i-1, k, 2) + cr2 := dr2 + dr3 + ci2 := di2 + di3 + ch3.set(i-1, 0, k, cc3.at(i-1, k, 0)+cr2) + ch3.set(i, 0, k, cc3.at(i, k, 0)+ci2) + tr2 := cc3.at(i-1, k, 0) + taur*cr2 + ti2 := cc3.at(i, k, 0) + taur*ci2 + tr3 := taui * (di2 - di3) + ti3 := taui * (dr3 - dr2) + ch3.set(i-1, 2, k, tr2+tr3) + ch3.set(ic-1, 1, k, tr2-tr3) + ch3.set(i, 2, k, ti2+ti3) + ch3.set(ic, 1, k, ti3-ti2) + } + } +} + +func radf4(ido, l1 int, cc, ch, wa1, wa2, wa3 []float64) { + const hsqt2 = math.Sqrt2 / 2 + + cc3 := newThreeArray(ido, l1, 4, cc) + ch3 := newThreeArray(ido, 4, l1, ch) + + for k := 0; k < l1; k++ { + tr1 := cc3.at(0, k, 1) + cc3.at(0, k, 3) + tr2 := cc3.at(0, k, 0) + cc3.at(0, k, 2) + ch3.set(0, 0, k, tr1+tr2) + ch3.set(ido-1, 3, k, tr2-tr1) + ch3.set(ido-1, 1, k, cc3.at(0, k, 0)-cc3.at(0, k, 2)) + ch3.set(0, 2, k, cc3.at(0, k, 3)-cc3.at(0, k, 1)) + } + if ido < 2 { + return + } + if ido > 2 { + idp2 := ido + 1 + for k := 0; k < l1; k++ { + for i := 2; i < ido; i += 2 { + ic := idp2 - (i + 1) + cr2 := wa1[i-2]*cc3.at(i-1, k, 1) + wa1[i-1]*cc3.at(i, k, 1) + ci2 := wa1[i-2]*cc3.at(i, k, 1) - wa1[i-1]*cc3.at(i-1, k, 1) + cr3 := wa2[i-2]*cc3.at(i-1, k, 2) + wa2[i-1]*cc3.at(i, k, 2) + ci3 := wa2[i-2]*cc3.at(i, k, 2) - wa2[i-1]*cc3.at(i-1, k, 2) + cr4 := wa3[i-2]*cc3.at(i-1, k, 3) + wa3[i-1]*cc3.at(i, k, 3) + ci4 := wa3[i-2]*cc3.at(i, k, 3) - wa3[i-1]*cc3.at(i-1, k, 3) + tr1 := cr2 + cr4 + tr4 := cr4 - cr2 + ti1 := ci2 + ci4 + ti4 := ci2 - ci4 + ti2 := cc3.at(i, k, 0) + ci3 + ti3 := cc3.at(i, k, 0) - ci3 + tr2 := cc3.at(i-1, k, 0) + cr3 + tr3 := cc3.at(i-1, k, 0) - cr3 + ch3.set(i-1, 0, k, tr1+tr2) + ch3.set(ic-1, 3, k, tr2-tr1) + ch3.set(i, 0, k, ti1+ti2) + ch3.set(ic, 3, k, ti1-ti2) + ch3.set(i-1, 2, k, ti4+tr3) + ch3.set(ic-1, 1, k, tr3-ti4) + ch3.set(i, 2, k, tr4+ti3) + ch3.set(ic, 1, k, tr4-ti3) + } + } + + if ido%2 == 1 { + return + } + } + for k := 0; k < l1; k++ { + ti1 := -hsqt2 * (cc3.at(ido-1, k, 1) + cc3.at(ido-1, k, 3)) + tr1 := hsqt2 * (cc3.at(ido-1, k, 1) - cc3.at(ido-1, k, 3)) + ch3.set(ido-1, 0, k, tr1+cc3.at(ido-1, k, 0)) + ch3.set(ido-1, 2, k, cc3.at(ido-1, k, 0)-tr1) + ch3.set(0, 1, k, ti1-cc3.at(ido-1, k, 2)) + ch3.set(0, 3, k, ti1+cc3.at(ido-1, k, 2)) + } +} + +func radf5(ido, l1 int, cc, ch, wa1, wa2, wa3, wa4 []float64) { + const ( + tr11 = 0.309016994374947 + ti11 = 0.951056516295154 + tr12 = -0.809016994374947 + ti12 = 0.587785252292473 + ) + + cc3 := newThreeArray(ido, l1, 5, cc) + ch3 := newThreeArray(ido, 5, l1, ch) + + for k := 0; k < l1; k++ { + cr2 := cc3.at(0, k, 4) + cc3.at(0, k, 1) + ci5 := cc3.at(0, k, 4) - cc3.at(0, k, 1) + cr3 := cc3.at(0, k, 3) + cc3.at(0, k, 2) + ci4 := cc3.at(0, k, 3) - cc3.at(0, k, 2) + ch3.set(0, 0, k, cc3.at(0, k, 0)+cr2+cr3) + ch3.set(ido-1, 1, k, cc3.at(0, k, 0)+tr11*cr2+tr12*cr3) + ch3.set(0, 2, k, ti11*ci5+ti12*ci4) + ch3.set(ido-1, 3, k, cc3.at(0, k, 0)+tr12*cr2+tr11*cr3) + ch3.set(0, 4, k, ti12*ci5-ti11*ci4) + } + + if ido < 2 { + return + } + idp2 := ido + 1 + for k := 0; k < l1; k++ { + for i := 2; i < ido; i += 2 { + ic := idp2 - (i + 1) + dr2 := wa1[i-2]*cc3.at(i-1, k, 1) + wa1[i-1]*cc3.at(i, k, 1) + di2 := wa1[i-2]*cc3.at(i, k, 1) - wa1[i-1]*cc3.at(i-1, k, 1) + dr3 := wa2[i-2]*cc3.at(i-1, k, 2) + wa2[i-1]*cc3.at(i, k, 2) + di3 := wa2[i-2]*cc3.at(i, k, 2) - wa2[i-1]*cc3.at(i-1, k, 2) + dr4 := wa3[i-2]*cc3.at(i-1, k, 3) + wa3[i-1]*cc3.at(i, k, 3) + di4 := wa3[i-2]*cc3.at(i, k, 3) - wa3[i-1]*cc3.at(i-1, k, 3) + dr5 := wa4[i-2]*cc3.at(i-1, k, 4) + wa4[i-1]*cc3.at(i, k, 4) + di5 := wa4[i-2]*cc3.at(i, k, 4) - wa4[i-1]*cc3.at(i-1, k, 4) + cr2 := dr2 + dr5 + ci5 := dr5 - dr2 + cr5 := di2 - di5 + ci2 := di2 + di5 + cr3 := dr3 + dr4 + ci4 := dr4 - dr3 + cr4 := di3 - di4 + ci3 := di3 + di4 + ch3.set(i-1, 0, k, cc3.at(i-1, k, 0)+cr2+cr3) + ch3.set(i, 0, k, cc3.at(i, k, 0)+ci2+ci3) + tr2 := cc3.at(i-1, k, 0) + tr11*cr2 + tr12*cr3 + ti2 := cc3.at(i, k, 0) + tr11*ci2 + tr12*ci3 + tr3 := cc3.at(i-1, k, 0) + tr12*cr2 + tr11*cr3 + ti3 := cc3.at(i, k, 0) + tr12*ci2 + tr11*ci3 + tr5 := ti11*cr5 + ti12*cr4 + ti5 := ti11*ci5 + ti12*ci4 + tr4 := ti12*cr5 - ti11*cr4 + ti4 := ti12*ci5 - ti11*ci4 + ch3.set(i-1, 2, k, tr2+tr5) + ch3.set(ic-1, 1, k, tr2-tr5) + ch3.set(i, 2, k, ti2+ti5) + ch3.set(ic, 1, k, ti5-ti2) + ch3.set(i-1, 4, k, tr3+tr4) + ch3.set(ic-1, 3, k, tr3-tr4) + ch3.set(i, 4, k, ti3+ti4) + ch3.set(ic, 3, k, ti4-ti3) + } + } +} + +func radfg(ido, ip, l1, idl1 int, cc, c1, c2, ch, ch2, wa []float64) { + cc3 := newThreeArray(ido, ip, l1, cc) + c13 := newThreeArray(ido, l1, ip, c1) + ch3 := newThreeArray(ido, l1, ip, ch) + c2m := newTwoArray(idl1, ip, c2) + ch2m := newTwoArray(idl1, ip, ch2) + + arg := 2 * math.Pi / float64(ip) + dcp := math.Cos(arg) + dsp := math.Sin(arg) + ipph := (ip + 1) / 2 + nbd := (ido - 1) / 2 + + if ido == 1 { + for ik := 0; ik < idl1; ik++ { + c2m.set(ik, 0, ch2m.at(ik, 0)) + } + } else { + for ik := 0; ik < idl1; ik++ { + ch2m.set(ik, 0, c2m.at(ik, 0)) + } + for j := 1; j < ip; j++ { + for k := 0; k < l1; k++ { + ch3.set(0, k, j, c13.at(0, k, j)) + } + } + + is := -ido - 1 + if nbd > l1 { + for j := 1; j < ip; j++ { + is += ido + for k := 0; k < l1; k++ { + idij := is + for i := 2; i < ido; i += 2 { + idij += 2 + ch3.set(i-1, k, j, wa[idij-1]*c13.at(i-1, k, j)+wa[idij]*c13.at(i, k, j)) + ch3.set(i, k, j, wa[idij-1]*c13.at(i, k, j)-wa[idij]*c13.at(i-1, k, j)) + } + } + } + } else { + for j := 1; j < ip; j++ { + is += ido + idij := is + for i := 2; i < ido; i += 2 { + idij += 2 + for k := 0; k < l1; k++ { + ch3.set(i-1, k, j, wa[idij-1]*c13.at(i-1, k, j)+wa[idij]*c13.at(i, k, j)) + ch3.set(i, k, j, wa[idij-1]*c13.at(i, k, j)-wa[idij]*c13.at(i-1, k, j)) + } + } + } + } + if nbd < l1 { + for j := 1; j < ipph; j++ { + jc := ip - j + for i := 2; i < ido; i += 2 { + for k := 0; k < l1; k++ { + c13.set(i-1, k, j, ch3.at(i-1, k, j)+ch3.at(i-1, k, jc)) + c13.set(i-1, k, jc, ch3.at(i, k, j)-ch3.at(i, k, jc)) + c13.set(i, k, j, ch3.at(i, k, j)+ch3.at(i, k, jc)) + c13.set(i, k, jc, ch3.at(i-1, k, jc)-ch3.at(i-1, k, j)) + } + } + } + } else { + for j := 1; j < ipph; j++ { + jc := ip - j + for k := 0; k < l1; k++ { + for i := 2; i < ido; i += 2 { + c13.set(i-1, k, j, ch3.at(i-1, k, j)+ch3.at(i-1, k, jc)) + c13.set(i-1, k, jc, ch3.at(i, k, j)-ch3.at(i, k, jc)) + c13.set(i, k, j, ch3.at(i, k, j)+ch3.at(i, k, jc)) + c13.set(i, k, jc, ch3.at(i-1, k, jc)-ch3.at(i-1, k, j)) + } + } + } + } + } + + for j := 1; j < ipph; j++ { + jc := ip - j + for k := 0; k < l1; k++ { + c13.set(0, k, j, ch3.at(0, k, j)+ch3.at(0, k, jc)) + c13.set(0, k, jc, ch3.at(0, k, jc)-ch3.at(0, k, j)) + } + } + ar1 := 1.0 + ai1 := 0.0 + for l := 1; l < ipph; l++ { + lc := ip - l + ar1h := dcp*ar1 - dsp*ai1 + ai1 = dcp*ai1 + dsp*ar1 + ar1 = ar1h + for ik := 0; ik < idl1; ik++ { + ch2m.set(ik, l, c2m.at(ik, 0)+ar1*c2m.at(ik, 1)) + ch2m.set(ik, lc, ai1*c2m.at(ik, ip-1)) + } + dc2 := ar1 + ds2 := ai1 + ar2 := ar1 + ai2 := ai1 + for j := 2; j < ipph; j++ { + jc := ip - j + ar2h := dc2*ar2 - ds2*ai2 + ai2 = dc2*ai2 + ds2*ar2 + ar2 = ar2h + for ik := 0; ik < idl1; ik++ { + ch2m.add(ik, l, ar2*c2m.at(ik, j)) + ch2m.add(ik, lc, ai2*c2m.at(ik, jc)) + } + } + } + for j := 1; j < ipph; j++ { + for ik := 0; ik < idl1; ik++ { + ch2m.add(ik, 0, c2m.at(ik, j)) + } + } + + if ido < l1 { + for i := 0; i < ido; i++ { + for k := 0; k < l1; k++ { + cc3.set(i, 0, k, ch3.at(i, k, 0)) + } + } + } else { + for k := 0; k < l1; k++ { + for i := 0; i < ido; i++ { + cc3.set(i, 0, k, ch3.at(i, k, 0)) + } + } + } + for j := 1; j < ipph; j++ { + jc := ip - j + j2 := 2 * j + for k := 0; k < l1; k++ { + cc3.set(ido-1, j2-1, k, ch3.at(0, k, j)) + cc3.set(0, j2, k, ch3.at(0, k, jc)) + } + } + + if ido == 1 { + return + } + if nbd < l1 { + for j := 1; j < ipph; j++ { + jc := ip - j + j2 := 2 * j + for i := 2; i < ido; i += 2 { + ic := ido - i + for k := 0; k < l1; k++ { + cc3.set(i-1, j2, k, ch3.at(i-1, k, j)+ch3.at(i-1, k, jc)) + cc3.set(ic-1, j2-1, k, ch3.at(i-1, k, j)-ch3.at(i-1, k, jc)) + cc3.set(i, j2, k, ch3.at(i, k, j)+ch3.at(i, k, jc)) + cc3.set(ic, j2-1, k, ch3.at(i, k, jc)-ch3.at(i, k, j)) + } + } + } + return + } + for j := 1; j < ipph; j++ { + jc := ip - j + j2 := 2 * j + for k := 0; k < l1; k++ { + for i := 2; i < ido; i += 2 { + ic := ido - i + cc3.set(i-1, j2, k, ch3.at(i-1, k, j)+ch3.at(i-1, k, jc)) + cc3.set(ic-1, j2-1, k, ch3.at(i-1, k, j)-ch3.at(i-1, k, jc)) + cc3.set(i, j2, k, ch3.at(i, k, j)+ch3.at(i, k, jc)) + cc3.set(ic, j2-1, k, ch3.at(i, k, jc)-ch3.at(i, k, j)) + } + } + } +} + +// Rfftb computes the real perodic sequence from its Fourier +// coefficients (Fourier synthesis). The transform is defined +// below at output parameter r. +// +// Input parameters +// +// n The length of the array r to be transformed. The method +// is most efficient when n is a product of small primes. +// n may change so long as different work arrays are provided. +// +// r A real array of length n which contains the sequence +// to be transformed. +// +// work A work array which must be dimensioned at least 2*n. +// in the program that calls Rfftb. The work array must be +// initialized by calling subroutine rffti(n,work,ifac) and a +// different work array must be used for each different +// value of n. This initialization does not have to be +// repeated so long as n remains unchanged thus subsequent +// transforms can be obtained faster than the first. +// The same work array can be used by Rfftf and Rfftb. +// +// ifac A work array containing the factors of n. ifac must have +// length of at least 15. +// +// output parameters +// +// r for n even and for i = 0, ..., n +// r[i] = r[0]+(-1)^i*r[n-1] +// plus the sum from k=1 to k=n/2-1 of +// 2*r(2*k-1)*cos(k*i*2*pi/n) +// -2*r(2*k)*sin(k*i*2*pi/n) +// +// for n odd and for i = 0, ..., n-1 +// r[i] = r[0] plus the sum from k=1 to k=(n-1)/2 of +// 2*r(2*k-1)*cos(k*i*2*pi/n) +// -2*r(2*k)*sin(k*i*2*pi/n) +// +// This transform is unnormalized since a call of Rfftf +// followed by a call of Rfftb will multiply the input +// sequence by n. +// +// work Contains results which must not be destroyed between +// calls of Rfftf or Rfftb. +// ifac Contains results which must not be destroyed between +// calls of Rfftf or Rfftb. +func Rfftb(n int, r, work []float64, ifac []int) { + if len(r) < n { + panic("fourier: short sequence") + } + if len(work) < 2*n { + panic("fourier: short work") + } + if len(ifac) < 15 { + panic("fourier: short ifac") + } + if n == 1 { + return + } + rfftb1(n, r[:n], work[:n], work[n:2*n], ifac[:15]) +} + +func rfftb1(n int, c, ch, wa []float64, ifac []int) { + nf := ifac[1] + na := false + l1 := 1 + iw := 0 + + for k1 := 1; k1 <= nf; k1++ { + ip := ifac[k1+1] + l2 := ip * l1 + ido := n / l2 + idl1 := ido * l1 + + switch ip { + case 4: + ix2 := iw + ido + ix3 := ix2 + ido + if na { + radb4(ido, l1, ch, c, wa[iw:], wa[ix2:], wa[ix3:]) + } else { + radb4(ido, l1, c, ch, wa[iw:], wa[ix2:], wa[ix3:]) + } + na = !na + case 2: + if na { + radb2(ido, l1, ch, c, wa[iw:]) + } else { + radb2(ido, l1, c, ch, wa[iw:]) + } + na = !na + case 3: + ix2 := iw + ido + if na { + radb3(ido, l1, ch, c, wa[iw:], wa[ix2:]) + } else { + radb3(ido, l1, c, ch, wa[iw:], wa[ix2:]) + } + na = !na + case 5: + ix2 := iw + ido + ix3 := ix2 + ido + ix4 := ix3 + ido + if na { + radb5(ido, l1, ch, c, wa[iw:], wa[ix2:], wa[ix3:], wa[ix4:]) + } else { + radb5(ido, l1, c, ch, wa[iw:], wa[ix2:], wa[ix3:], wa[ix4:]) + } + na = !na + default: + if na { + radbg(ido, ip, l1, idl1, ch, ch, ch, c, c, wa[iw:]) + } else { + radbg(ido, ip, l1, idl1, c, c, c, ch, ch, wa[iw:]) + } + if ido == 1 { + na = !na + } + } + + l1 = l2 + iw += (ip - 1) * ido + } + + if na { + for i := 0; i < n; i++ { + c[i] = ch[i] + } + } +} + +func radb2(ido, l1 int, cc, ch, wa1 []float64) { + cc3 := newThreeArray(ido, 2, l1, cc) + ch3 := newThreeArray(ido, l1, 2, ch) + + for k := 0; k < l1; k++ { + ch3.set(0, k, 0, cc3.at(0, 0, k)+cc3.at(ido-1, 1, k)) + ch3.set(0, k, 1, cc3.at(0, 0, k)-cc3.at(ido-1, 1, k)) + } + + if ido < 2 { + return + } + if ido > 2 { + idp2 := ido + 1 + for k := 0; k < l1; k++ { + for i := 2; i < ido; i += 2 { + ic := idp2 - (i + 1) + ch3.set(i-1, k, 0, cc3.at(i-1, 0, k)+cc3.at(ic-1, 1, k)) + tr2 := cc3.at(i-1, 0, k) - cc3.at(ic-1, 1, k) + ch3.set(i, k, 0, cc3.at(i, 0, k)-cc3.at(ic, 1, k)) + ti2 := cc3.at(i, 0, k) + cc3.at(ic, 1, k) + ch3.set(i-1, k, 1, wa1[i-2]*tr2-wa1[i-1]*ti2) + ch3.set(i, k, 1, wa1[i-2]*ti2+wa1[i-1]*tr2) + } + } + + if ido%2 == 1 { + return + } + } + for k := 0; k < l1; k++ { + ch3.set(ido-1, k, 0, 2*cc3.at(ido-1, 0, k)) + ch3.set(ido-1, k, 1, -2*cc3.at(0, 1, k)) + } +} + +func radb3(ido, l1 int, cc, ch, wa1, wa2 []float64) { + const ( + taur = -0.5 + taui = 0.866025403784439 // sqrt(3)/2 + ) + + cc3 := newThreeArray(ido, 3, l1, cc) + ch3 := newThreeArray(ido, l1, 3, ch) + + for k := 0; k < l1; k++ { + tr2 := cc3.at(ido-1, 1, k) + cc3.at(ido-1, 1, k) + cr2 := cc3.at(0, 0, k) + taur*tr2 + ch3.set(0, k, 0, cc3.at(0, 0, k)+tr2) + ci3 := taui * (cc3.at(0, 2, k) + cc3.at(0, 2, k)) + ch3.set(0, k, 1, cr2-ci3) + ch3.set(0, k, 2, cr2+ci3) + } + + if ido == 1 { + return + } + + idp2 := ido + 1 + for k := 0; k < l1; k++ { + for i := 2; i < ido; i += 2 { + ic := idp2 - (i + 1) + tr2 := cc3.at(i-1, 2, k) + cc3.at(ic-1, 1, k) + cr2 := cc3.at(i-1, 0, k) + taur*tr2 + ch3.set(i-1, k, 0, cc3.at(i-1, 0, k)+tr2) + ti2 := cc3.at(i, 2, k) - cc3.at(ic, 1, k) + ci2 := cc3.at(i, 0, k) + taur*ti2 + ch3.set(i, k, 0, cc3.at(i, 0, k)+ti2) + cr3 := taui * (cc3.at(i-1, 2, k) - cc3.at(ic-1, 1, k)) + ci3 := taui * (cc3.at(i, 2, k) + cc3.at(ic, 1, k)) + dr2 := cr2 - ci3 + dr3 := cr2 + ci3 + di2 := ci2 + cr3 + di3 := ci2 - cr3 + ch3.set(i-1, k, 1, wa1[i-2]*dr2-wa1[i-1]*di2) + ch3.set(i, k, 1, wa1[i-2]*di2+wa1[i-1]*dr2) + ch3.set(i-1, k, 2, wa2[i-2]*dr3-wa2[i-1]*di3) + ch3.set(i, k, 2, wa2[i-2]*di3+wa2[i-1]*dr3) + } + } +} + +func radb4(ido, l1 int, cc, ch, wa1, wa2, wa3 []float64) { + cc3 := newThreeArray(ido, 4, l1, cc) + ch3 := newThreeArray(ido, l1, 4, ch) + + for k := 0; k < l1; k++ { + tr1 := cc3.at(0, 0, k) - cc3.at(ido-1, 3, k) + tr2 := cc3.at(0, 0, k) + cc3.at(ido-1, 3, k) + tr3 := cc3.at(ido-1, 1, k) + cc3.at(ido-1, 1, k) + tr4 := cc3.at(0, 2, k) + cc3.at(0, 2, k) + ch3.set(0, k, 0, tr2+tr3) + ch3.set(0, k, 1, tr1-tr4) + ch3.set(0, k, 2, tr2-tr3) + ch3.set(0, k, 3, tr1+tr4) + } + + if ido < 2 { + return + } + if ido > 2 { + idp2 := ido + 1 + for k := 0; k < l1; k++ { + for i := 2; i < ido; i += 2 { + ic := idp2 - (i + 1) + ti1 := cc3.at(i, 0, k) + cc3.at(ic, 3, k) + ti2 := cc3.at(i, 0, k) - cc3.at(ic, 3, k) + ti3 := cc3.at(i, 2, k) - cc3.at(ic, 1, k) + tr4 := cc3.at(i, 2, k) + cc3.at(ic, 1, k) + tr1 := cc3.at(i-1, 0, k) - cc3.at(ic-1, 3, k) + tr2 := cc3.at(i-1, 0, k) + cc3.at(ic-1, 3, k) + ti4 := cc3.at(i-1, 2, k) - cc3.at(ic-1, 1, k) + tr3 := cc3.at(i-1, 2, k) + cc3.at(ic-1, 1, k) + ch3.set(i-1, k, 0, tr2+tr3) + cr3 := tr2 - tr3 + ch3.set(i, k, 0, ti2+ti3) + ci3 := ti2 - ti3 + cr2 := tr1 - tr4 + cr4 := tr1 + tr4 + ci2 := ti1 + ti4 + ci4 := ti1 - ti4 + ch3.set(i-1, k, 1, wa1[i-2]*cr2-wa1[i-1]*ci2) + ch3.set(i, k, 1, wa1[i-2]*ci2+wa1[i-1]*cr2) + ch3.set(i-1, k, 2, wa2[i-2]*cr3-wa2[i-1]*ci3) + ch3.set(i, k, 2, wa2[i-2]*ci3+wa2[i-1]*cr3) + ch3.set(i-1, k, 3, wa3[i-2]*cr4-wa3[i-1]*ci4) + ch3.set(i, k, 3, wa3[i-2]*ci4+wa3[i-1]*cr4) + } + } + + if ido%2 == 1 { + return + } + } + for k := 0; k < l1; k++ { + ti1 := cc3.at(0, 1, k) + cc3.at(0, 3, k) + ti2 := cc3.at(0, 3, k) - cc3.at(0, 1, k) + tr1 := cc3.at(ido-1, 0, k) - cc3.at(ido-1, 2, k) + tr2 := cc3.at(ido-1, 0, k) + cc3.at(ido-1, 2, k) + ch3.set(ido-1, k, 0, tr2+tr2) + ch3.set(ido-1, k, 1, math.Sqrt2*(tr1-ti1)) + ch3.set(ido-1, k, 2, ti2+ti2) + ch3.set(ido-1, k, 3, -math.Sqrt2*(tr1+ti1)) + } +} + +func radb5(ido, l1 int, cc, ch, wa1, wa2, wa3, wa4 []float64) { + const ( + tr11 = 0.309016994374947 + ti11 = 0.951056516295154 + tr12 = -0.809016994374947 + ti12 = 0.587785252292473 + ) + + cc3 := newThreeArray(ido, 5, l1, cc) + ch3 := newThreeArray(ido, l1, 5, ch) + + for k := 0; k < l1; k++ { + ti5 := cc3.at(0, 2, k) + cc3.at(0, 2, k) + ti4 := cc3.at(0, 4, k) + cc3.at(0, 4, k) + tr2 := cc3.at(ido-1, 1, k) + cc3.at(ido-1, 1, k) + tr3 := cc3.at(ido-1, 3, k) + cc3.at(ido-1, 3, k) + ch3.set(0, k, 0, cc3.at(0, 0, k)+tr2+tr3) + cr2 := cc3.at(0, 0, k) + tr11*tr2 + tr12*tr3 + cr3 := cc3.at(0, 0, k) + tr12*tr2 + tr11*tr3 + ci5 := ti11*ti5 + ti12*ti4 + ci4 := ti12*ti5 - ti11*ti4 + ch3.set(0, k, 1, cr2-ci5) + ch3.set(0, k, 2, cr3-ci4) + ch3.set(0, k, 3, cr3+ci4) + ch3.set(0, k, 4, cr2+ci5) + } + + if ido == 1 { + return + } + + idp2 := ido + 1 + for k := 0; k < l1; k++ { + for i := 2; i < ido; i += 2 { + ic := idp2 - (i + 1) + ti5 := cc3.at(i, 2, k) + cc3.at(ic, 1, k) + ti2 := cc3.at(i, 2, k) - cc3.at(ic, 1, k) + ti4 := cc3.at(i, 4, k) + cc3.at(ic, 3, k) + ti3 := cc3.at(i, 4, k) - cc3.at(ic, 3, k) + tr5 := cc3.at(i-1, 2, k) - cc3.at(ic-1, 1, k) + tr2 := cc3.at(i-1, 2, k) + cc3.at(ic-1, 1, k) + tr4 := cc3.at(i-1, 4, k) - cc3.at(ic-1, 3, k) + tr3 := cc3.at(i-1, 4, k) + cc3.at(ic-1, 3, k) + ch3.set(i-1, k, 0, cc3.at(i-1, 0, k)+tr2+tr3) + ch3.set(i, k, 0, cc3.at(i, 0, k)+ti2+ti3) + cr2 := cc3.at(i-1, 0, k) + tr11*tr2 + tr12*tr3 + ci2 := cc3.at(i, 0, k) + tr11*ti2 + tr12*ti3 + cr3 := cc3.at(i-1, 0, k) + tr12*tr2 + tr11*tr3 + ci3 := cc3.at(i, 0, k) + tr12*ti2 + tr11*ti3 + cr5 := ti11*tr5 + ti12*tr4 + ci5 := ti11*ti5 + ti12*ti4 + cr4 := ti12*tr5 - ti11*tr4 + ci4 := ti12*ti5 - ti11*ti4 + dr3 := cr3 - ci4 + dr4 := cr3 + ci4 + di3 := ci3 + cr4 + di4 := ci3 - cr4 + dr5 := cr2 + ci5 + dr2 := cr2 - ci5 + di5 := ci2 - cr5 + di2 := ci2 + cr5 + ch3.set(i-1, k, 1, wa1[i-2]*dr2-wa1[i-1]*di2) + ch3.set(i, k, 1, wa1[i-2]*di2+wa1[i-1]*dr2) + ch3.set(i-1, k, 2, wa2[i-2]*dr3-wa2[i-1]*di3) + ch3.set(i, k, 2, wa2[i-2]*di3+wa2[i-1]*dr3) + ch3.set(i-1, k, 3, wa3[i-2]*dr4-wa3[i-1]*di4) + ch3.set(i, k, 3, wa3[i-2]*di4+wa3[i-1]*dr4) + ch3.set(i-1, k, 4, wa4[i-2]*dr5-wa4[i-1]*di5) + ch3.set(i, k, 4, wa4[i-2]*di5+wa4[i-1]*dr5) + } + } +} + +func radbg(ido, ip, l1, idl1 int, cc, c1, c2, ch, ch2, wa []float64) { + cc3 := newThreeArray(ido, ip, l1, cc) + c13 := newThreeArray(ido, l1, ip, c1) + ch3 := newThreeArray(ido, l1, ip, ch) + c2m := newTwoArray(idl1, ip, c2) + ch2m := newTwoArray(idl1, ip, ch2) + + arg := 2 * math.Pi / float64(ip) + dcp := math.Cos(arg) + dsp := math.Sin(arg) + ipph := (ip + 1) / 2 + nbd := (ido - 1) / 2 + + if ido < l1 { + for i := 0; i < ido; i++ { + for k := 0; k < l1; k++ { + ch3.set(i, k, 0, cc3.at(i, 0, k)) + } + } + } else { + for k := 0; k < l1; k++ { + for i := 0; i < ido; i++ { + ch3.set(i, k, 0, cc3.at(i, 0, k)) + } + } + } + + for j := 1; j < ipph; j++ { + jc := ip - j + j2 := 2 * j + for k := 0; k < l1; k++ { + ch3.set(0, k, j, cc3.at(ido-1, j2-1, k)+cc3.at(ido-1, j2-1, k)) + ch3.set(0, k, jc, cc3.at(0, j2, k)+cc3.at(0, j2, k)) + } + } + + if ido != 1 { + if nbd < l1 { + for j := 1; j < ipph; j++ { + jc := ip - j + j2 := 2 * j + for i := 2; i < ido; i += 2 { + ic := ido - i + for k := 0; k < l1; k++ { + ch3.set(i-1, k, j, cc3.at(i-1, j2, k)+cc3.at(ic-1, j2-1, k)) + ch3.set(i-1, k, jc, cc3.at(i-1, j2, k)-cc3.at(ic-1, j2-1, k)) + ch3.set(i, k, j, cc3.at(i, j2, k)-cc3.at(ic, j2-1, k)) + ch3.set(i, k, jc, cc3.at(i, j2, k)+cc3.at(ic, j2-1, k)) + } + } + } + } else { + for j := 1; j < ipph; j++ { + jc := ip - j + j2 := 2 * j + for k := 0; k < l1; k++ { + for i := 2; i < ido; i += 2 { + ic := ido - i + ch3.set(i-1, k, j, cc3.at(i-1, j2, k)+cc3.at(ic-1, j2-1, k)) + ch3.set(i-1, k, jc, cc3.at(i-1, j2, k)-cc3.at(ic-1, j2-1, k)) + ch3.set(i, k, j, cc3.at(i, j2, k)-cc3.at(ic, j2-1, k)) + ch3.set(i, k, jc, cc3.at(i, j2, k)+cc3.at(ic, j2-1, k)) + } + } + } + } + } + + ar1 := 1.0 + ai1 := 0.0 + for l := 1; l < ipph; l++ { + lc := ip - l + ar1h := dcp*ar1 - dsp*ai1 + ai1 = dcp*ai1 + dsp*ar1 + ar1 = ar1h + for ik := 0; ik < idl1; ik++ { + c2m.set(ik, l, ch2m.at(ik, 0)+ar1*ch2m.at(ik, 1)) + c2m.set(ik, lc, ai1*ch2m.at(ik, ip-1)) + } + dc2 := ar1 + ds2 := ai1 + ar2 := ar1 + ai2 := ai1 + for j := 2; j < ipph; j++ { + jc := ip - j + ar2h := dc2*ar2 - ds2*ai2 + ai2 = dc2*ai2 + ds2*ar2 + ar2 = ar2h + for ik := 0; ik < idl1; ik++ { + c2m.add(ik, l, ar2*ch2m.at(ik, j)) + c2m.add(ik, lc, ai2*ch2m.at(ik, jc)) + } + } + } + + for j := 1; j < ipph; j++ { + for ik := 0; ik < idl1; ik++ { + ch2m.add(ik, 0, ch2m.at(ik, j)) + } + } + for j := 1; j < ipph; j++ { + jc := ip - j + for k := 0; k < l1; k++ { + ch3.set(0, k, j, c13.at(0, k, j)-c13.at(0, k, jc)) + ch3.set(0, k, jc, c13.at(0, k, j)+c13.at(0, k, jc)) + } + } + + if ido != 1 { + if nbd < l1 { + for j := 1; j < ipph; j++ { + jc := ip - j + for i := 2; i < ido; i += 2 { + for k := 0; k < l1; k++ { + ch3.set(i-1, k, j, c13.at(i-1, k, j)-c13.at(i, k, jc)) + ch3.set(i-1, k, jc, c13.at(i-1, k, j)+c13.at(i, k, jc)) + ch3.set(i, k, j, c13.at(i, k, j)+c13.at(i-1, k, jc)) + ch3.set(i, k, jc, c13.at(i, k, j)-c13.at(i-1, k, jc)) + } + } + } + } else { + for j := 1; j < ipph; j++ { + jc := ip - j + for k := 0; k < l1; k++ { + for i := 2; i < ido; i += 2 { + ch3.set(i-1, k, j, c13.at(i-1, k, j)-c13.at(i, k, jc)) + ch3.set(i-1, k, jc, c13.at(i-1, k, j)+c13.at(i, k, jc)) + ch3.set(i, k, j, c13.at(i, k, j)+c13.at(i-1, k, jc)) + ch3.set(i, k, jc, c13.at(i, k, j)-c13.at(i-1, k, jc)) + } + } + } + } + } + + if ido == 1 { + return + } + for ik := 0; ik < idl1; ik++ { + c2m.set(ik, 0, ch2m.at(ik, 0)) + } + for j := 1; j < ip; j++ { + for k := 0; k < l1; k++ { + c13.set(0, k, j, ch3.at(0, k, j)) + } + } + + is := -ido - 1 + if nbd > l1 { + for j := 1; j < ip; j++ { + is += ido + for k := 0; k < l1; k++ { + idij := is + for i := 2; i < ido; i += 2 { + idij += 2 + c13.set(i-1, k, j, wa[idij-1]*ch3.at(i-1, k, j)-wa[idij]*ch3.at(i, k, j)) + c13.set(i, k, j, wa[idij-1]*ch3.at(i, k, j)+wa[idij]*ch3.at(i-1, k, j)) + } + } + } + return + } + for j := 1; j < ip; j++ { + is += ido + idij := is + for i := 2; i < ido; i += 2 { + idij += 2 + for k := 0; k < l1; k++ { + c13.set(i-1, k, j, wa[idij-1]*ch3.at(i-1, k, j)-wa[idij]*ch3.at(i, k, j)) + c13.set(i, k, j, wa[idij-1]*ch3.at(i, k, j)+wa[idij]*ch3.at(i-1, k, j)) + } + } + } +} diff --git a/vendor/gonum.org/v1/gonum/fourier/internal/fftpack/sinq.go b/vendor/gonum.org/v1/gonum/fourier/internal/fftpack/sinq.go new file mode 100644 index 0000000..35fac7b --- /dev/null +++ b/vendor/gonum.org/v1/gonum/fourier/internal/fftpack/sinq.go @@ -0,0 +1,179 @@ +// Copyright ©2018 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// This is a translation of the FFTPACK sinq functions by +// Paul N Swarztrauber, placed in the public domain at +// http://www.netlib.org/fftpack/. + +package fftpack + +import "math" + +// Sinqi initializes the array work which is used in both Sinqf and +// Sinqb. The prime factorization of n together with a tabulation +// of the trigonometric functions are computed and stored in work. +// +// Input parameter +// +// n The length of the sequence to be transformed. The method +// is most efficient when n+1 is a product of small primes. +// +// Output parameter +// +// work A work array which must be dimensioned at least 3*n. +// The same work array can be used for both Sinqf and Sinqb +// as long as n remains unchanged. Different work arrays +// are required for different values of n. The contents of +// work must not be changed between calls of Sinqf or Sinqb. +// +// ifac An integer work array of length at least 15. +func Sinqi(n int, work []float64, ifac []int) { + if len(work) < 3*n { + panic("fourier: short work") + } + if len(ifac) < 15 { + panic("fourier: short ifac") + } + dt := 0.5 * math.Pi / float64(n) + for k := range work[:n] { + work[k] = math.Cos(float64(k+1) * dt) + } + Rffti(n, work[n:], ifac) +} + +// Sinqf computes the Fast Fourier Transform of quarter wave data. +// That is, Sinqf computes the coefficients in a sine series +// representation with only odd wave numbers. The transform is +// defined below at output parameter x. +// +// Sinqb is the unnormalized inverse of Sinqf since a call of Sinqf +// followed by a call of Sinqb will multiply the input sequence x +// by 4*n. +// +// The array work which is used by subroutine Sinqf must be +// initialized by calling subroutine Sinqi(n,work). +// +// Input parameters +// +// n The length of the array x to be transformed. The method +// is most efficient when n is a product of small primes. +// +// x An array which contains the sequence to be transformed. +// +// work A work array which must be dimensioned at least 3*n. +// in the program that calls Sinqf. The work array must be +// initialized by calling subroutine Sinqi(n,work) and a +// different work array must be used for each different +// value of n. This initialization does not have to be +// repeated so long as n remains unchanged thus subsequent +// transforms can be obtained faster than the first. +// +// ifac An integer work array of length at least 15. +// +// Output parameters +// +// x for i=0, ..., n-1 +// x[i] = (-1)^(i)*x[n-1] +// + the sum from k=0 to k=n-2 of +// 2*x[k]*sin((2*i+1)*k*pi/(2*n)) +// +// A call of Sinqf followed by a call of +// Sinqb will multiply the sequence x by 4*n. +// Therefore Sinqb is the unnormalized inverse +// of Sinqf. +// +// work Contains initialization calculations which must not +// be destroyed between calls of Sinqf or Sinqb. +func Sinqf(n int, x, work []float64, ifac []int) { + if len(x) < n { + panic("fourier: short sequence") + } + if len(work) < 3*n { + panic("fourier: short work") + } + if len(ifac) < 15 { + panic("fourier: short ifac") + } + if n == 1 { + return + } + for k := 0; k < n/2; k++ { + kc := n - k - 1 + x[k], x[kc] = x[kc], x[k] + } + Cosqf(n, x, work, ifac) + for k := 1; k < n; k += 2 { + x[k] = -x[k] + } +} + +// Sinqb computes the Fast Fourier Transform of quarter wave data. +// That is, Sinqb computes a sequence from its representation in +// terms of a sine series with odd wave numbers. The transform is +// defined below at output parameter x. +// +// Sinqf is the unnormalized inverse of Sinqb since a call of Sinqb +// followed by a call of Sinqf will multiply the input sequence x +// by 4*n. +// +// The array work which is used by subroutine Sinqb must be +// initialized by calling subroutine Sinqi(n,work). +// +// Input parameters +// +// n The length of the array x to be transformed. The method +// is most efficient when n is a product of small primes. +// +// x An array which contains the sequence to be transformed. +// +// work A work array which must be dimensioned at least 3*n. +// in the program that calls Sinqb. The work array must be +// initialized by calling subroutine Sinqi(n,work) and a +// different work array must be used for each different +// value of n. This initialization does not have to be +// repeated so long as n remains unchanged thus subsequent +// transforms can be obtained faster than the first. +// +// ifac An integer work array of length at least 15. +// +// Output parameters +// +// x for i=0, ..., n-1 +// x[i]= the sum from k=0 to k=n-1 of +// 4*x[k]*sin((2*k+1)*i*pi/(2*n)) +// +// A call of Sinqb followed by a call of +// Sinqf will multiply the sequence x by 4*n. +// Therefore Sinqf is the unnormalized inverse +// of Sinqb. +// +// work Contains initialization calculations which must not +// be destroyed between calls of Sinqb or Sinqf. +func Sinqb(n int, x, work []float64, ifac []int) { + if len(x) < n { + panic("fourier: short sequence") + } + if len(work) < 3*n { + panic("fourier: short work") + } + if len(ifac) < 15 { + panic("fourier: short ifac") + } + switch n { + case 1: + x[0] *= 4 + fallthrough + case 0: + return + default: + for k := 1; k < n; k += 2 { + x[k] = -x[k] + } + Cosqb(n, x, work, ifac) + for k := 0; k < n/2; k++ { + kc := n - k - 1 + x[k], x[kc] = x[kc], x[k] + } + } +} diff --git a/vendor/gonum.org/v1/gonum/fourier/internal/fftpack/sint.go b/vendor/gonum.org/v1/gonum/fourier/internal/fftpack/sint.go new file mode 100644 index 0000000..8f38d4b --- /dev/null +++ b/vendor/gonum.org/v1/gonum/fourier/internal/fftpack/sint.go @@ -0,0 +1,146 @@ +// Copyright ©2018 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// This is a translation of the FFTPACK sint functions by +// Paul N Swarztrauber, placed in the public domain at +// http://www.netlib.org/fftpack/. + +package fftpack + +import "math" + +// Sinti initializes the array work which is used in subroutine Sint. +// The prime factorization of n together with a tabulation of the +// trigonometric functions are computed and stored in work. +// +// Input parameter +// +// n The length of the sequence to be transformed. The method +// is most efficient when n+1 is a product of small primes. +// +// Output parameter +// +// work A work array with at least ceil(2.5*n) locations. +// Different work arrays are required for different values +// of n. The contents of work must not be changed between +// calls of Sint. +// +// ifac An integer work array of length at least 15. +func Sinti(n int, work []float64, ifac []int) { + if len(work) < 5*(n+1)/2 { + panic("fourier: short work") + } + if len(ifac) < 15 { + panic("fourier: short ifac") + } + if n <= 1 { + return + } + dt := math.Pi / float64(n+1) + for k := 0; k < n/2; k++ { + work[k] = 2 * math.Sin(float64(k+1)*dt) + } + Rffti(n+1, work[n/2:], ifac) +} + +// Sint computes the Discrete Fourier Sine Transform of an odd +// sequence x(i). The transform is defined below at output parameter x. +// +// Sint is the unnormalized inverse of itself since a call of Sint +// followed by another call of Sint will multiply the input sequence +// x by 2*(n+1). +// +// The array work which is used by subroutine Sint must be +// initialized by calling subroutine Sinti(n,work). +// +// Input parameters +// +// n The length of the sequence to be transformed. The method +// is most efficient when n+1 is the product of small primes. +// +// x An array which contains the sequence to be transformed. +// +// +// work A work array with dimension at least ceil(2.5*n) +// in the program that calls Sint. The work array must be +// initialized by calling subroutine Sinti(n,work) and a +// different work array must be used for each different +// value of n. This initialization does not have to be +// repeated so long as n remains unchanged thus subsequent +// transforms can be obtained faster than the first. +// +// ifac An integer work array of length at least 15. +// +// Output parameters +// +// x for i=1,...,n +// x(i)= the sum from k=1 to k=n +// 2*x(k)*sin(k*i*pi/(n+1)) +// +// A call of Sint followed by another call of +// Sint will multiply the sequence x by 2*(n+1). +// Hence Sint is the unnormalized inverse +// of itself. +// +// work Contains initialization calculations which must not be +// destroyed between calls of Sint. +// ifac Contains initialization calculations which must not be +// destroyed between calls of Sint. +func Sint(n int, x, work []float64, ifac []int) { + if len(x) < n { + panic("fourier: short sequence") + } + if len(work) < 5*(n+1)/2 { + panic("fourier: short work") + } + if len(ifac) < 15 { + panic("fourier: short ifac") + } + if n == 0 { + return + } + sint1(n, x, work, work[n/2:], work[n/2+n+1:], ifac) +} + +func sint1(n int, war, was, xh, x []float64, ifac []int) { + const sqrt3 = 1.73205080756888 + + for i := 0; i < n; i++ { + xh[i] = war[i] + war[i] = x[i] + } + + switch n { + case 1: + xh[0] *= 2 + case 2: + xh[0], xh[1] = sqrt3*(xh[0]+xh[1]), sqrt3*(xh[0]-xh[1]) + default: + x[0] = 0 + for k := 0; k < n/2; k++ { + kc := n - k - 1 + t1 := xh[k] - xh[kc] + t2 := was[k] * (xh[k] + xh[kc]) + x[k+1] = t1 + t2 + x[kc+1] = t2 - t1 + } + if n%2 != 0 { + x[n/2+1] = 4 * xh[n/2] + } + rfftf1(n+1, x, xh, war, ifac) + xh[0] = 0.5 * x[0] + for i := 2; i < n; i += 2 { + xh[i-1] = -x[i] + xh[i] = xh[i-2] + x[i-1] + } + if n%2 == 0 { + xh[n-1] = -x[n] + } + } + + for i := 0; i < n; i++ { + x[i] = war[i] + war[i] = xh[i] + } +} diff --git a/vendor/gonum.org/v1/gonum/fourier/quarter.go b/vendor/gonum.org/v1/gonum/fourier/quarter.go new file mode 100644 index 0000000..f332bf0 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/fourier/quarter.go @@ -0,0 +1,133 @@ +// Copyright ©2018 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package fourier + +import "gonum.org/v1/gonum/fourier/internal/fftpack" + +// QuarterWaveFFT implements Fast Fourier Transform for quarter wave data. +type QuarterWaveFFT struct { + work []float64 + ifac [15]int +} + +// NewQuarterWaveFFT returns a QuarterWaveFFT initialized for work on sequences of length n. +func NewQuarterWaveFFT(n int) *QuarterWaveFFT { + var t QuarterWaveFFT + t.Reset(n) + return &t +} + +// Len returns the length of the acceptable input. +func (t *QuarterWaveFFT) Len() int { return len(t.work) / 3 } + +// Reset reinitializes the QuarterWaveFFT for work on sequences of length n. +func (t *QuarterWaveFFT) Reset(n int) { + if 3*n <= cap(t.work) { + t.work = t.work[:3*n] + } else { + t.work = make([]float64, 3*n) + } + fftpack.Cosqi(n, t.work, t.ifac[:]) +} + +// CosCoefficients computes the Fast Fourier Transform of quarter wave data for +// the input sequence, seq, placing the cosine series coefficients in dst and +// returning it. +// This transform is unnormalized; a call to CosCoefficients followed by a call +// to CosSequence will multiply the input sequence by 4*n, where n is the length +// of the sequence. +// +// If the length of seq is not t.Len(), CosCoefficients will panic. +// If dst is nil, a new slice is allocated and returned. If dst is not nil and +// the length of dst does not equal t.Len(), CosCoefficients will panic. +// It is safe to use the same slice for dst and seq. +func (t *QuarterWaveFFT) CosCoefficients(dst, seq []float64) []float64 { + if len(seq) != t.Len() { + panic("fourier: sequence length mismatch") + } + if dst == nil { + dst = make([]float64, t.Len()) + } else if len(dst) != t.Len() { + panic("fourier: destination length mismatch") + } + copy(dst, seq) + fftpack.Cosqf(len(dst), dst, t.work, t.ifac[:]) + return dst +} + +// CosSequence computes the Inverse Fast Fourier Transform of quarter wave data for +// the input cosine series coefficients, coeff, placing the sequence data in dst +// and returning it. +// This transform is unnormalized; a call to CosSequence followed by a call +// to CosCoefficients will multiply the input sequence by 4*n, where n is the length +// of the sequence. +// +// If the length of seq is not t.Len(), CosSequence will panic. +// If dst is nil, a new slice is allocated and returned. If dst is not nil and +// the length of dst does not equal t.Len(), CosSequence will panic. +// It is safe to use the same slice for dst and seq. +func (t *QuarterWaveFFT) CosSequence(dst, coeff []float64) []float64 { + if len(coeff) != t.Len() { + panic("fourier: coefficients length mismatch") + } + if dst == nil { + dst = make([]float64, t.Len()) + } else if len(dst) != t.Len() { + panic("fourier: destination length mismatch") + } + copy(dst, coeff) + fftpack.Cosqb(len(dst), dst, t.work, t.ifac[:]) + return dst +} + +// SinCoefficients computes the Fast Fourier Transform of quarter wave data for +// the input sequence, seq, placing the sine series coefficients in dst and +// returning it. +// This transform is unnormalized; a call to SinCoefficients followed by a call +// to SinSequence will multiply the input sequence by 4*n, where n is the length +// of the sequence. +// +// If the length of seq is not t.Len(), SinCoefficients will panic. +// If dst is nil, a new slice is allocated and returned. If dst is not nil and +// the length of dst does not equal t.Len(), SinCoefficients will panic. +// It is safe to use the same slice for dst and seq. +func (t *QuarterWaveFFT) SinCoefficients(dst, seq []float64) []float64 { + if len(seq) != t.Len() { + panic("fourier: sequence length mismatch") + } + if dst == nil { + dst = make([]float64, t.Len()) + } else if len(dst) != t.Len() { + panic("fourier: destination length mismatch") + } + copy(dst, seq) + fftpack.Sinqf(len(dst), dst, t.work, t.ifac[:]) + return dst +} + +// SinSequence computes the Inverse Fast Fourier Transform of quarter wave data for +// the input sine series coefficients, coeff, placing the sequence data in dst +// and returning it. +// This transform is unnormalized; a call to SinSequence followed by a call +// to SinCoefficients will multiply the input sequence by 4*n, where n is the length +// of the sequence. +// +// If the length of seq is not t.Len(), SinSequence will panic. +// If dst is nil, a new slice is allocated and returned. If dst is not nil and +// the length of dst does not equal t.Len(), SinSequence will panic. +// It is safe to use the same slice for dst and seq. +func (t *QuarterWaveFFT) SinSequence(dst, coeff []float64) []float64 { + if len(coeff) != t.Len() { + panic("fourier: coefficients length mismatch") + } + if dst == nil { + dst = make([]float64, t.Len()) + } else if len(dst) != t.Len() { + panic("fourier: destination length mismatch") + } + copy(dst, coeff) + fftpack.Sinqb(len(dst), dst, t.work, t.ifac[:]) + return dst +} diff --git a/vendor/gonum.org/v1/gonum/fourier/sincos.go b/vendor/gonum.org/v1/gonum/fourier/sincos.go new file mode 100644 index 0000000..b79230b --- /dev/null +++ b/vendor/gonum.org/v1/gonum/fourier/sincos.go @@ -0,0 +1,112 @@ +// Copyright ©2018 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package fourier + +import "gonum.org/v1/gonum/fourier/internal/fftpack" + +// DCT implements Discrete Cosine Transform for real sequences. +type DCT struct { + work []float64 + ifac [15]int +} + +// NewDCT returns a DCT initialized for work on sequences of length n. +// NewDCT will panic is n is not greater than 1. +func NewDCT(n int) *DCT { + var t DCT + t.Reset(n) + return &t +} + +// Len returns the length of the acceptable input. +func (t *DCT) Len() int { return len(t.work) / 3 } + +// Reset reinitializes the DCT for work on sequences of length n. +// Reset will panic is n is not greater than 1. +func (t *DCT) Reset(n int) { + if n <= 1 { + panic("fourier: n less than 2") + } + if 3*n <= cap(t.work) { + t.work = t.work[:3*n] + } else { + t.work = make([]float64, 3*n) + } + fftpack.Costi(n, t.work, t.ifac[:]) +} + +// Transform computes the Discrete Fourier Cosine Transform of +// the input data, src, placing the result in dst and returning it. +// This transform is unnormalized; a call to Transform followed by +// another call to Transform will multiply the input sequence by 2*(n-1), +// where n is the length of the sequence. +// +// If the length of src is not t.Len(), Transform will panic. +// If dst is nil, a new slice is allocated and returned. If dst is not nil and +// the length of dst does not equal t.Len(), FFT will panic. +// It is safe to use the same slice for dst and src. +func (t *DCT) Transform(dst, src []float64) []float64 { + if len(src) != t.Len() { + panic("fourier: sequence length mismatch") + } + if dst == nil { + dst = make([]float64, t.Len()) + } else if len(dst) != t.Len() { + panic("fourier: destination length mismatch") + } + copy(dst, src) + fftpack.Cost(len(dst), dst, t.work, t.ifac[:]) + return dst +} + +// DST implements Discrete Sine Transform for real sequences. +type DST struct { + work []float64 + ifac [15]int +} + +// NewDST returns a DST initialized for work on sequences of length n. +func NewDST(n int) *DST { + var t DST + t.Reset(n) + return &t +} + +// Len returns the length of the acceptable input. +func (t *DST) Len() int { return (2*len(t.work)+1)/5 - 1 } + +// Reset reinitializes the DCT for work on sequences of length n. +func (t *DST) Reset(n int) { + if 5*(n+1)/2 <= cap(t.work) { + t.work = t.work[:5*(n+1)/2] + } else { + t.work = make([]float64, 5*(n+1)/2) + } + fftpack.Sinti(n, t.work, t.ifac[:]) +} + +// Transform computes the Discrete Fourier Sine Transform of the input +// data, src, placing the result in dst and returning it. +// This transform is unnormalized; a call to Transform followed by +// another call to Transform will multiply the input sequence by 2*(n-1), +// where n is the length of the sequence. +// +// If the length of src is not t.Len(), Transform will panic. +// If dst is nil, a new slice is allocated and returned. If dst is not nil and +// the length of dst does not equal t.Len(), FFT will panic. +// It is safe to use the same slice for dst and src. +func (t *DST) Transform(dst, src []float64) []float64 { + if len(src) != t.Len() { + panic("fourier: sequence length mismatch") + } + if dst == nil { + dst = make([]float64, t.Len()) + } else if len(dst) != t.Len() { + panic("fourier: destination length mismatch") + } + copy(dst, src) + fftpack.Sint(len(dst), dst, t.work, t.ifac[:]) + return dst +} diff --git a/vendor/gonum.org/v1/gonum/graph/community/bisect.go b/vendor/gonum.org/v1/gonum/graph/community/bisect.go new file mode 100644 index 0000000..11d38eb --- /dev/null +++ b/vendor/gonum.org/v1/gonum/graph/community/bisect.go @@ -0,0 +1,249 @@ +// Copyright ©2016 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package community + +import ( + "errors" + "fmt" + "math" + + "golang.org/x/exp/rand" + + "gonum.org/v1/gonum/graph" +) + +// Interval is an interval of resolutions with a common score. +type Interval struct { + // Low and High delimit the interval + // such that the interval is [low, high). + Low, High float64 + + // Score is the score of the interval. + Score float64 + + // Reduced is the best scoring + // community membership found for the + // interval. + Reduced +} + +// Reduced is a graph reduction. +type Reduced interface { + // Communities returns the community + // structure of the reduction. + Communities() [][]graph.Node +} + +// Size is a score function that is the reciprocal of the number of communities. +func Size(g ReducedGraph) float64 { return 1 / float64(len(g.Structure())) } + +// Weight is a score function that is the sum of community weights. The concrete +// type of g must be a pointer to a ReducedUndirected or a ReducedDirected, otherwise +// Weight will panic. +func Weight(g ReducedGraph) float64 { + var w float64 + switch g := g.(type) { + case *ReducedUndirected: + for _, n := range g.nodes { + w += n.weight + } + case *ReducedDirected: + for _, n := range g.nodes { + w += n.weight + } + default: + panic(fmt.Sprintf("community: invalid graph type: %T", g)) + } + return w +} + +// ModularScore returns a modularized scoring function for Profile based on the +// graph g and the given score function. The effort parameter determines how +// many attempts will be made to get an improved score for any given resolution. +func ModularScore(g graph.Graph, score func(ReducedGraph) float64, effort int, src rand.Source) func(float64) (float64, Reduced) { + return func(resolution float64) (float64, Reduced) { + max := math.Inf(-1) + var best Reduced + for i := 0; i < effort; i++ { + r := Modularize(g, resolution, src) + s := score(r) + if s > max { + max = s + best = r + } + } + return max, best + } +} + +// SizeMultiplex is a score function that is the reciprocal of the number of communities. +func SizeMultiplex(g ReducedMultiplex) float64 { return 1 / float64(len(g.Structure())) } + +// WeightMultiplex is a score function that is the sum of community weights. The concrete +// type of g must be pointer to a ReducedUndirectedMultiplex or a ReducedDirectedMultiplex, +// otherwise WeightMultiplex will panic. +func WeightMultiplex(g ReducedMultiplex) float64 { + var w float64 + switch g := g.(type) { + case *ReducedUndirectedMultiplex: + for _, n := range g.nodes { + for _, lw := range n.weights { + w += lw + } + } + case *ReducedDirectedMultiplex: + for _, n := range g.nodes { + for _, lw := range n.weights { + w += lw + } + } + default: + panic(fmt.Sprintf("community: invalid graph type: %T", g)) + } + return w +} + +// ModularMultiplexScore returns a modularized scoring function for Profile based +// on the graph g and the given score function. The effort parameter determines how +// many attempts will be made to get an improved score for any given resolution. +func ModularMultiplexScore(g Multiplex, weights []float64, all bool, score func(ReducedMultiplex) float64, effort int, src rand.Source) func(float64) (float64, Reduced) { + return func(resolution float64) (float64, Reduced) { + max := math.Inf(-1) + var best Reduced + for i := 0; i < effort; i++ { + r := ModularizeMultiplex(g, weights, []float64{resolution}, all, src) + s := score(r) + if s > max { + max = s + best = r + } + } + return max, best + } +} + +// Profile returns an approximate profile of score values in the resolution domain [low,high) +// at the given granularity. The score is calculated by bisecting calls to fn. If log is true, +// log space bisection is used, otherwise bisection is linear. The function fn should be +// monotonically decreasing in at least 1/grain evaluations. Profile will attempt to detect +// non-monotonicity during the bisection. +// +// Since exact modularity optimization is known to be NP-hard and Profile calls modularization +// routines repeatedly, it is unlikely to return the exact resolution profile. +func Profile(fn func(float64) (float64, Reduced), log bool, grain, low, high float64) (profile []Interval, err error) { + if low >= high { + return nil, errors.New("community: zero or negative width domain") + } + + defer func() { + r := recover() + e, ok := r.(nonDecreasing) + if ok { + err = e + return + } + if r != nil { + panic(r) + } + }() + left, comm := fn(low) + right, _ := fn(high) + for i := 1; i < int(1/grain); i++ { + rt, _ := fn(high) + right = math.Max(right, rt) + } + profile = bisect(fn, log, grain, low, left, high, right, comm) + + // We may have missed some non-monotonicity, + // so merge low score discordant domains into + // their lower resolution neighbours. + return fixUp(profile), nil +} + +type nonDecreasing int + +func (n nonDecreasing) Error() string { + return fmt.Sprintf("community: profile does not reliably monotonically decrease: tried %d times", n) +} + +func bisect(fn func(float64) (float64, Reduced), log bool, grain, low, scoreLow, high, scoreHigh float64, comm Reduced) []Interval { + if low >= high { + panic("community: zero or negative width domain") + } + if math.IsNaN(scoreLow) || math.IsNaN(scoreHigh) { + return nil + } + + // Heuristically determine a reasonable number + // of times to try to get a higher value. + maxIter := int(1 / grain) + + lowComm := comm + for n := 0; scoreLow < scoreHigh; n++ { + if n > maxIter { + panic(nonDecreasing(n)) + } + scoreLow, lowComm = fn(low) + } + + if scoreLow == scoreHigh || tooSmall(low, high, grain, log) { + return []Interval{{Low: low, High: high, Score: scoreLow, Reduced: lowComm}} + } + + var mid float64 + if log { + mid = math.Sqrt(low * high) + } else { + mid = (low + high) / 2 + } + + scoreMid := math.Inf(-1) + var midComm Reduced + for n := 0; scoreMid < scoreHigh; n++ { + if n > maxIter { + panic(nonDecreasing(n)) + } + scoreMid, midComm = fn(mid) + } + + lower := bisect(fn, log, grain, low, scoreLow, mid, scoreMid, lowComm) + higher := bisect(fn, log, grain, mid, scoreMid, high, scoreHigh, midComm) + for n := 0; lower[len(lower)-1].Score < higher[0].Score; n++ { + if n > maxIter { + panic(nonDecreasing(n)) + } + lower[len(lower)-1].Score, lower[len(lower)-1].Reduced = fn(low) + } + + if lower[len(lower)-1].Score == higher[0].Score { + higher[0].Low = lower[len(lower)-1].Low + lower = lower[:len(lower)-1] + if len(lower) == 0 { + return higher + } + } + return append(lower, higher...) +} + +// fixUp non-monotonically decreasing interval scores. +func fixUp(profile []Interval) []Interval { + max := profile[len(profile)-1].Score + for i := len(profile) - 2; i >= 0; i-- { + if profile[i].Score > max { + max = profile[i].Score + continue + } + profile[i+1].Low = profile[i].Low + profile = append(profile[:i], profile[i+1:]...) + } + return profile +} + +func tooSmall(low, high, grain float64, log bool) bool { + if log { + return math.Log(high/low) < grain + } + return high-low < grain +} diff --git a/vendor/gonum.org/v1/gonum/graph/community/doc.go b/vendor/gonum.org/v1/gonum/graph/community/doc.go new file mode 100644 index 0000000..3520222 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/graph/community/doc.go @@ -0,0 +1,6 @@ +// Copyright ©2017 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Package community provides graph community detection functions. +package community // import "gonum.org/v1/gonum/graph/community" diff --git a/vendor/gonum.org/v1/gonum/graph/community/k_communities.go b/vendor/gonum.org/v1/gonum/graph/community/k_communities.go new file mode 100644 index 0000000..7d9e1bc --- /dev/null +++ b/vendor/gonum.org/v1/gonum/graph/community/k_communities.go @@ -0,0 +1,98 @@ +// Copyright ©2017 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package community + +import ( + "gonum.org/v1/gonum/graph" + "gonum.org/v1/gonum/graph/internal/set" + "gonum.org/v1/gonum/graph/simple" + "gonum.org/v1/gonum/graph/topo" + "gonum.org/v1/gonum/graph/traverse" +) + +// KCliqueCommunities returns the k-clique communties of the undirected graph g for +// k greater than zero. The returned communities are identified by linkage via k-clique +// adjacency, where adjacency is defined as having k-1 common nodes. KCliqueCommunities +// returns a single component including the full set of nodes of g when k is 1, +// and the classical connected components of g when k is 2. Note that k-clique +// communities may contain common nodes from g. +// +// k-clique communities are described in Palla et al. doi:10.1038/nature03607. +func KCliqueCommunities(k int, g graph.Undirected) [][]graph.Node { + if k < 1 { + panic("community: invalid k for k-clique communities") + } + switch k { + case 1: + return [][]graph.Node{graph.NodesOf(g.Nodes())} + case 2: + return topo.ConnectedComponents(g) + default: + cg := simple.NewUndirectedGraph() + topo.CliqueGraph(cg, g) + cc := kConnectedComponents(k, cg) + + // Extract the nodes in g from cg, + // removing duplicates and separating + // cliques smaller than k into separate + // single nodes. + var kcc [][]graph.Node + single := set.NewNodes() + inCommunity := set.NewNodes() + for _, c := range cc { + nodes := set.NewNodesSize(len(c)) + for _, cn := range c { + for _, n := range cn.(topo.Clique).Nodes() { + nodes.Add(n) + } + } + if len(nodes) < k { + for _, n := range nodes { + single.Add(n) + } + continue + } + var kc []graph.Node + for _, n := range nodes { + inCommunity.Add(n) + kc = append(kc, n) + } + kcc = append(kcc, kc) + } + for _, n := range single { + if !inCommunity.Has(n) { + kcc = append(kcc, []graph.Node{n}) + } + } + + return kcc + } +} + +// kConnectedComponents returns the connected components of topo.Clique nodes that +// are joined by k-1 underlying shared nodes in the graph that created the clique +// graph cg. +func kConnectedComponents(k int, cg graph.Undirected) [][]graph.Node { + var ( + c []graph.Node + cc [][]graph.Node + ) + during := func(n graph.Node) { + c = append(c, n) + } + after := func() { + cc = append(cc, []graph.Node(nil)) + cc[len(cc)-1] = append(cc[len(cc)-1], c...) + c = c[:0] + } + w := traverse.DepthFirst{ + Traverse: func(e graph.Edge) bool { + return len(e.(topo.CliqueGraphEdge).Nodes()) >= k-1 + }, + } + w.WalkAll(cg, nil, after, during) + + return cc +} diff --git a/vendor/gonum.org/v1/gonum/graph/community/louvain_common.go b/vendor/gonum.org/v1/gonum/graph/community/louvain_common.go new file mode 100644 index 0000000..534651e --- /dev/null +++ b/vendor/gonum.org/v1/gonum/graph/community/louvain_common.go @@ -0,0 +1,418 @@ +// Copyright ©2015 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package community + +import ( + "fmt" + "sort" + + "golang.org/x/exp/rand" + + "gonum.org/v1/gonum/graph" + "gonum.org/v1/gonum/graph/internal/set" +) + +// Q returns the modularity Q score of the graph g subdivided into the +// given communities at the given resolution. If communities is nil, the +// unclustered modularity score is returned. The resolution parameter +// is γ as defined in Reichardt and Bornholdt doi:10.1103/PhysRevE.74.016110. +// Q will panic if g has any edge with negative edge weight. +// +// If g is undirected, Q is calculated according to +// Q = 1/2m \sum_{ij} [ A_{ij} - (\gamma k_i k_j)/2m ] \delta(c_i,c_j), +// If g is directed, it is calculated according to +// Q = 1/m \sum_{ij} [ A_{ij} - (\gamma k_i^in k_j^out)/m ] \delta(c_i,c_j). +// +// graph.Undirect may be used as a shim to allow calculation of Q for +// directed graphs with the undirected modularity function. +func Q(g graph.Graph, communities [][]graph.Node, resolution float64) float64 { + switch g := g.(type) { + case graph.Undirected: + return qUndirected(g, communities, resolution) + case graph.Directed: + return qDirected(g, communities, resolution) + default: + panic(fmt.Sprintf("community: invalid graph type: %T", g)) + } +} + +// ReducedGraph is a modularised graph. +type ReducedGraph interface { + graph.Graph + + // Communities returns the community memberships + // of the nodes in the graph used to generate + // the reduced graph. + Communities() [][]graph.Node + + // Structure returns the community structure of + // the current level of the module clustering. + // Each slice in the returned value recursively + // describes the membership of a community at + // the current level by indexing via the node + // ID into the structure of the non-nil + // ReducedGraph returned by Expanded, or when the + // ReducedGraph is nil, by containing nodes + // from the original input graph. + // + // The returned value should not be mutated. + Structure() [][]graph.Node + + // Expanded returns the next lower level of the + // module clustering or nil if at the lowest level. + // + // The returned ReducedGraph will be the same + // concrete type as the receiver. + Expanded() ReducedGraph +} + +// Modularize returns the hierarchical modularization of g at the given resolution +// using the Louvain algorithm. If src is nil, rand.Intn is used as the random +// generator. Modularize will panic if g has any edge with negative edge weight. +// +// If g is undirected it is modularised to minimise +// Q = 1/2m \sum_{ij} [ A_{ij} - (\gamma k_i k_j)/2m ] \delta(c_i,c_j), +// If g is directed it is modularised to minimise +// Q = 1/m \sum_{ij} [ A_{ij} - (\gamma k_i^in k_j^out)/m ] \delta(c_i,c_j). +// +// The concrete type of the ReducedGraph will be a pointer to either a +// ReducedUndirected or a ReducedDirected depending on the type of g. +// +// graph.Undirect may be used as a shim to allow modularization of +// directed graphs with the undirected modularity function. +func Modularize(g graph.Graph, resolution float64, src rand.Source) ReducedGraph { + switch g := g.(type) { + case graph.Undirected: + return louvainUndirected(g, resolution, src) + case graph.Directed: + return louvainDirected(g, resolution, src) + default: + panic(fmt.Sprintf("community: invalid graph type: %T", g)) + } +} + +// Multiplex is a multiplex graph. +type Multiplex interface { + // Nodes returns the nodes + // for the multiplex graph. + // All layers must refer to the same + // set of nodes. + Nodes() graph.Nodes + + // Depth returns the number of layers + // in the multiplex graph. + Depth() int +} + +// QMultiplex returns the modularity Q score of the multiplex graph layers +// subdivided into the given communities at the given resolutions and weights. Q is +// returned as the vector of weighted Q scores for each layer of the multiplex graph. +// If communities is nil, the unclustered modularity score is returned. +// If weights is nil layers are equally weighted, otherwise the length of +// weights must equal the number of layers. If resolutions is nil, a resolution +// of 1.0 is used for all layers, otherwise either a single element slice may be used +// to specify a global resolution, or the length of resolutions must equal the number +// of layers. The resolution parameter is γ as defined in Reichardt and Bornholdt +// doi:10.1103/PhysRevE.74.016110. +// QMultiplex will panic if the graph has any layer weight-scaled edge with +// negative edge weight. +// +// If g is undirected, Q is calculated according to +// Q_{layer} = w_{layer} \sum_{ij} [ A_{layer}*_{ij} - (\gamma_{layer} k_i k_j)/2m_{layer} ] \delta(c_i,c_j), +// If g is directed, it is calculated according to +// Q_{layer} = w_{layer} \sum_{ij} [ A_{layer}*_{ij} - (\gamma_{layer} k_i^in k_j^out)/m_{layer} ] \delta(c_i,c_j). +// +// Note that Q values for multiplex graphs are not scaled by the total layer edge weight. +// +// graph.Undirect may be used as a shim to allow calculation of Q for +// directed graphs. +func QMultiplex(g Multiplex, communities [][]graph.Node, weights, resolutions []float64) []float64 { + if weights != nil && len(weights) != g.Depth() { + panic("community: weights vector length mismatch") + } + if resolutions != nil && len(resolutions) != 1 && len(resolutions) != g.Depth() { + panic("community: resolutions vector length mismatch") + } + + switch g := g.(type) { + case UndirectedMultiplex: + return qUndirectedMultiplex(g, communities, weights, resolutions) + case DirectedMultiplex: + return qDirectedMultiplex(g, communities, weights, resolutions) + default: + panic(fmt.Sprintf("community: invalid graph type: %T", g)) + } +} + +// ReducedMultiplex is a modularised multiplex graph. +type ReducedMultiplex interface { + Multiplex + + // Communities returns the community memberships + // of the nodes in the graph used to generate + // the reduced graph. + Communities() [][]graph.Node + + // Structure returns the community structure of + // the current level of the module clustering. + // Each slice in the returned value recursively + // describes the membership of a community at + // the current level by indexing via the node + // ID into the structure of the non-nil + // ReducedGraph returned by Expanded, or when the + // ReducedGraph is nil, by containing nodes + // from the original input graph. + // + // The returned value should not be mutated. + Structure() [][]graph.Node + + // Expanded returns the next lower level of the + // module clustering or nil if at the lowest level. + // + // The returned ReducedGraph will be the same + // concrete type as the receiver. + Expanded() ReducedMultiplex +} + +// ModularizeMultiplex returns the hierarchical modularization of g at the given resolution +// using the Louvain algorithm. If all is true and g have negatively weighted layers, all +// communities will be searched during the modularization. If src is nil, rand.Intn is +// used as the random generator. ModularizeMultiplex will panic if g has any edge with +// edge weight that does not sign-match the layer weight. +// +// If g is undirected it is modularised to minimise +// Q = \sum w_{layer} \sum_{ij} [ A_{layer}*_{ij} - (\gamma_{layer} k_i k_j)/2m ] \delta(c_i,c_j). +// If g is directed it is modularised to minimise +// Q = \sum w_{layer} \sum_{ij} [ A_{layer}*_{ij} - (\gamma_{layer} k_i^in k_j^out)/m_{layer} ] \delta(c_i,c_j). +// +// The concrete type of the ReducedMultiplex will be a pointer to a +// ReducedUndirectedMultiplex. +// +// graph.Undirect may be used as a shim to allow modularization of +// directed graphs with the undirected modularity function. +func ModularizeMultiplex(g Multiplex, weights, resolutions []float64, all bool, src rand.Source) ReducedMultiplex { + if weights != nil && len(weights) != g.Depth() { + panic("community: weights vector length mismatch") + } + if resolutions != nil && len(resolutions) != 1 && len(resolutions) != g.Depth() { + panic("community: resolutions vector length mismatch") + } + + switch g := g.(type) { + case UndirectedMultiplex: + return louvainUndirectedMultiplex(g, weights, resolutions, all, src) + case DirectedMultiplex: + return louvainDirectedMultiplex(g, weights, resolutions, all, src) + default: + panic(fmt.Sprintf("community: invalid graph type: %T", g)) + } +} + +// undirectedEdges is the edge structure of a reduced undirected graph. +type undirectedEdges struct { + // edges and weights is the set + // of edges between nodes. + // weights is keyed such that + // the first element of the key + // is less than the second. + edges [][]int + weights map[[2]int]float64 +} + +// directedEdges is the edge structure of a reduced directed graph. +type directedEdges struct { + // edgesFrom, edgesTo and weights + // is the set of edges between nodes. + edgesFrom [][]int + edgesTo [][]int + weights map[[2]int]float64 +} + +// isValidID returns whether id is a valid ID for a community, +// multiplexCommunity or node. These are all graph.Node types +// stored in []T with a mapping between their index and their ID +// so IDs must be positive and fit within the int type. +func isValidID(id int64) bool { + return id == int64(int(id)) && id >= 0 +} + +// community is a reduced graph node describing its membership. +type community struct { + // community graphs are internal, in-memory + // with dense IDs, so id is always an int. + id int + + nodes []graph.Node + + weight float64 +} + +func (n community) ID() int64 { return int64(n.id) } + +// edge is a reduced graph edge. +type edge struct { + from, to community + weight float64 +} + +func (e edge) From() graph.Node { return e.from } +func (e edge) To() graph.Node { return e.to } +func (e edge) ReversedEdge() graph.Edge { e.from, e.to = e.to, e.from; return e } +func (e edge) Weight() float64 { return e.weight } + +// multiplexCommunity is a reduced multiplex graph node describing its membership. +type multiplexCommunity struct { + // community graphs are internal, in-memory + // with dense IDs, so id is always an int. + id int + + nodes []graph.Node + + weights []float64 +} + +func (n multiplexCommunity) ID() int64 { return int64(n.id) } + +// multiplexEdge is a reduced graph edge for a multiplex graph. +type multiplexEdge struct { + from, to multiplexCommunity + weight float64 +} + +func (e multiplexEdge) From() graph.Node { return e.from } +func (e multiplexEdge) To() graph.Node { return e.to } +func (e multiplexEdge) ReversedEdge() graph.Edge { e.from, e.to = e.to, e.from; return e } +func (e multiplexEdge) Weight() float64 { return e.weight } + +// commIdx is an index of a node in a community held by a localMover. +type commIdx struct { + community int + node int +} + +// node is defined to avoid an import of .../graph/simple. node is +// used in in-memory, dense ID graphs and so is always an int. +type node int + +func (n node) ID() int64 { return int64(n) } + +// minTaker is a set iterator. +type minTaker interface { + TakeMin(p *int) bool +} + +// dense is a dense integer set iterator. +type dense struct { + pos int + n int +} + +// TakeMin mimics intsets.Sparse TakeMin for dense sets. If the dense +// iterator position is less than the iterator size, TakeMin sets *p +// to the iterator position and increments the position and returns +// true. +// Otherwise, it returns false and *p is undefined. +func (d *dense) TakeMin(p *int) bool { + if d.pos >= d.n { + return false + } + *p = d.pos + d.pos++ + return true +} + +// slice is a sparse integer set iterator. +type slice struct { + pos int + elems []int +} + +// newSlice returns a new slice of elements from s, sorted ascending. +func newSlice(s set.Ints) *slice { + elems := make([]int, 0, len(s)) + for i := range s { + elems = append(elems, i) + } + sort.Ints(elems) + return &slice{elems: elems} +} + +// TakeMin mimics intsets.Sparse TakeMin for a sorted set. If the set +// iterator position is less than the iterator size, TakeMin sets *p +// to the iterator position's element and increments the position +// and returns true. +// Otherwise, it returns false and *p is undefined. +func (s *slice) TakeMin(p *int) bool { + if s.pos >= len(s.elems) { + return false + } + *p = s.elems[s.pos] + s.pos++ + return true +} + +const ( + negativeWeight = "community: unexpected negative edge weight" + positiveWeight = "community: unexpected positive edge weight" +) + +// positiveWeightFuncFor returns a constructed weight function for the +// positively weighted g. Unweighted graphs have unit weight for existing +// edges. +func positiveWeightFuncFor(g graph.Graph) func(xid, yid int64) float64 { + if wg, ok := g.(graph.Weighted); ok { + return func(xid, yid int64) float64 { + w, ok := wg.Weight(xid, yid) + if !ok { + return 0 + } + if w < 0 { + panic(negativeWeight) + } + return w + } + } + return func(xid, yid int64) float64 { + e := g.Edge(xid, yid) + if e == nil { + return 0 + } + return 1 + } +} + +// negativeWeightFuncFor returns a constructed weight function for the +// negatively weighted g. Unweighted graphs have unit weight for existing +// edges. +func negativeWeightFuncFor(g graph.Graph) func(xid, yid int64) float64 { + if wg, ok := g.(graph.Weighted); ok { + return func(xid, yid int64) float64 { + w, ok := wg.Weight(xid, yid) + if !ok { + return 0 + } + if w > 0 { + panic(positiveWeight) + } + return -w + } + } + return func(xid, yid int64) float64 { + e := g.Edge(xid, yid) + if e == nil { + return 0 + } + return 1 + } +} + +// depth returns max(1, len(weights)). It is used to ensure +// that multiplex community weights are properly initialised. +func depth(weights []float64) int { + if weights == nil { + return 1 + } + return len(weights) +} diff --git a/vendor/gonum.org/v1/gonum/graph/community/louvain_directed.go b/vendor/gonum.org/v1/gonum/graph/community/louvain_directed.go new file mode 100644 index 0000000..77e0787 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/graph/community/louvain_directed.go @@ -0,0 +1,677 @@ +// Copyright ©2015 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package community + +import ( + "math" + "sort" + + "golang.org/x/exp/rand" + + "gonum.org/v1/gonum/graph" + "gonum.org/v1/gonum/graph/internal/ordered" + "gonum.org/v1/gonum/graph/internal/set" + "gonum.org/v1/gonum/graph/iterator" +) + +// qDirected returns the modularity Q score of the graph g subdivided into the +// given communities at the given resolution. If communities is nil, the +// unclustered modularity score is returned. The resolution parameter +// is γ as defined in Reichardt and Bornholdt doi:10.1103/PhysRevE.74.016110. +// qDirected will panic if g has any edge with negative edge weight. +// +// Q = 1/m \sum_{ij} [ A_{ij} - (\gamma k_i^in k_j^out)/m ] \delta(c_i,c_j) +// +func qDirected(g graph.Directed, communities [][]graph.Node, resolution float64) float64 { + nodes := graph.NodesOf(g.Nodes()) + weight := positiveWeightFuncFor(g) + + // Calculate the total edge weight of the graph + // and the table of penetrating edge weight sums. + var m float64 + k := make(map[int64]directedWeights, len(nodes)) + for _, n := range nodes { + var wOut float64 + u := n + uid := u.ID() + to := g.From(uid) + for to.Next() { + wOut += weight(uid, to.Node().ID()) + } + var wIn float64 + v := n + vid := v.ID() + from := g.To(vid) + for from.Next() { + wIn += weight(from.Node().ID(), vid) + } + id := n.ID() + w := weight(id, id) + m += w + wOut // We only need to count edges once. + k[id] = directedWeights{out: w + wOut, in: w + wIn} + } + + if communities == nil { + var q float64 + for _, u := range nodes { + uid := u.ID() + kU := k[uid] + q += weight(uid, uid) - resolution*kU.out*kU.in/m + } + return q / m + } + + var q float64 + for _, c := range communities { + for _, u := range c { + uid := u.ID() + kU := k[uid] + for _, v := range c { + vid := v.ID() + kV := k[vid] + q += weight(uid, vid) - resolution*kU.out*kV.in/m + } + } + } + return q / m +} + +// louvainDirected returns the hierarchical modularization of g at the given +// resolution using the Louvain algorithm. If src is nil, rand.Intn is used +// as the random generator. louvainDirected will panic if g has any edge with negative +// edge weight. +func louvainDirected(g graph.Directed, resolution float64, src rand.Source) ReducedGraph { + // See louvain.tex for a detailed description + // of the algorithm used here. + + c := reduceDirected(g, nil) + rnd := rand.Intn + if src != nil { + rnd = rand.New(src).Intn + } + for { + l := newDirectedLocalMover(c, c.communities, resolution) + if l == nil { + return c + } + if done := l.localMovingHeuristic(rnd); done { + return c + } + c = reduceDirected(c, l.communities) + } +} + +// ReducedDirected is a directed graph of communities derived from a +// parent graph by reduction. +type ReducedDirected struct { + // nodes is the set of nodes held + // by the graph. In a ReducedDirected + // the node ID is the index into + // nodes. + nodes []community + directedEdges + + // communities is the community + // structure of the graph. + communities [][]graph.Node + + parent *ReducedDirected +} + +var ( + reducedDirected = (*ReducedDirected)(nil) + + _ graph.WeightedDirected = reducedDirected + _ ReducedGraph = reducedDirected +) + +// Communities returns the community memberships of the nodes in the +// graph used to generate the reduced graph. +func (g *ReducedDirected) Communities() [][]graph.Node { + communities := make([][]graph.Node, len(g.communities)) + if g.parent == nil { + for i, members := range g.communities { + comm := make([]graph.Node, len(members)) + for j, n := range members { + nodes := g.nodes[n.ID()].nodes + if len(nodes) != 1 { + panic("community: unexpected number of nodes in base graph community") + } + comm[j] = nodes[0] + } + communities[i] = comm + } + return communities + } + sub := g.parent.Communities() + for i, members := range g.communities { + var comm []graph.Node + for _, n := range members { + comm = append(comm, sub[n.ID()]...) + } + communities[i] = comm + } + return communities +} + +// Structure returns the community structure of the current level of +// the module clustering. The first index of the returned value +// corresponds to the index of the nodes in the next higher level if +// it exists. The returned value should not be mutated. +func (g *ReducedDirected) Structure() [][]graph.Node { + return g.communities +} + +// Expanded returns the next lower level of the module clustering or nil +// if at the lowest level. +func (g *ReducedDirected) Expanded() ReducedGraph { + return g.parent +} + +// reduceDirected returns a reduced graph constructed from g divided +// into the given communities. The communities value is mutated +// by the call to reduceDirected. If communities is nil and g is a +// ReducedDirected, it is returned unaltered. +func reduceDirected(g graph.Directed, communities [][]graph.Node) *ReducedDirected { + if communities == nil { + if r, ok := g.(*ReducedDirected); ok { + return r + } + + nodes := graph.NodesOf(g.Nodes()) + // TODO(kortschak) This sort is necessary really only + // for testing. In practice we would not be using the + // community provided by the user for a Q calculation. + // Probably we should use a function to map the + // communities in the test sets to the remapped order. + sort.Sort(ordered.ByID(nodes)) + communities = make([][]graph.Node, len(nodes)) + for i := range nodes { + communities[i] = []graph.Node{node(i)} + } + + weight := positiveWeightFuncFor(g) + r := ReducedDirected{ + nodes: make([]community, len(nodes)), + directedEdges: directedEdges{ + edgesFrom: make([][]int, len(nodes)), + edgesTo: make([][]int, len(nodes)), + weights: make(map[[2]int]float64), + }, + communities: communities, + } + communityOf := make(map[int64]int, len(nodes)) + for i, n := range nodes { + r.nodes[i] = community{id: i, nodes: []graph.Node{n}} + communityOf[n.ID()] = i + } + for _, n := range nodes { + id := communityOf[n.ID()] + + var out []int + u := n + uid := u.ID() + to := g.From(uid) + for to.Next() { + vid := to.Node().ID() + vcid := communityOf[vid] + if vcid != id { + out = append(out, vcid) + } + r.weights[[2]int{id, vcid}] = weight(uid, vid) + } + r.edgesFrom[id] = out + + var in []int + v := n + vid := v.ID() + from := g.To(vid) + for from.Next() { + uid := from.Node().ID() + ucid := communityOf[uid] + if ucid != id { + in = append(in, ucid) + } + r.weights[[2]int{ucid, id}] = weight(uid, vid) + } + r.edgesTo[id] = in + } + return &r + } + + // Remove zero length communities destructively. + var commNodes int + for i := 0; i < len(communities); { + comm := communities[i] + if len(comm) == 0 { + communities[i] = communities[len(communities)-1] + communities[len(communities)-1] = nil + communities = communities[:len(communities)-1] + } else { + commNodes += len(comm) + i++ + } + } + + r := ReducedDirected{ + nodes: make([]community, len(communities)), + directedEdges: directedEdges{ + edgesFrom: make([][]int, len(communities)), + edgesTo: make([][]int, len(communities)), + weights: make(map[[2]int]float64), + }, + } + r.communities = make([][]graph.Node, len(communities)) + for i := range r.communities { + r.communities[i] = []graph.Node{node(i)} + } + if g, ok := g.(*ReducedDirected); ok { + // Make sure we retain the truncated + // community structure. + g.communities = communities + r.parent = g + } + weight := positiveWeightFuncFor(g) + communityOf := make(map[int64]int, commNodes) + for i, comm := range communities { + r.nodes[i] = community{id: i, nodes: comm} + for _, n := range comm { + communityOf[n.ID()] = i + } + } + for id, comm := range communities { + var out, in []int + for _, n := range comm { + u := n + uid := u.ID() + for _, v := range comm { + r.nodes[id].weight += weight(uid, v.ID()) + } + + to := g.From(uid) + for to.Next() { + vid := to.Node().ID() + vcid := communityOf[vid] + found := false + for _, e := range out { + if e == vcid { + found = true + break + } + } + if !found && vcid != id { + out = append(out, vcid) + } + // Add half weights because the other + // ends of edges are also counted. + r.weights[[2]int{id, vcid}] += weight(uid, vid) / 2 + } + + v := n + vid := v.ID() + from := g.To(vid) + for from.Next() { + uid := from.Node().ID() + ucid := communityOf[uid] + found := false + for _, e := range in { + if e == ucid { + found = true + break + } + } + if !found && ucid != id { + in = append(in, ucid) + } + // Add half weights because the other + // ends of edges are also counted. + r.weights[[2]int{ucid, id}] += weight(uid, vid) / 2 + } + } + r.edgesFrom[id] = out + r.edgesTo[id] = in + } + return &r +} + +// Node returns the node with the given ID if it exists in the graph, +// and nil otherwise. +func (g *ReducedDirected) Node(id int64) graph.Node { + if g.has(id) { + return g.nodes[id] + } + return nil +} + +// has returns whether the node exists within the graph. +func (g *ReducedDirected) has(id int64) bool { + return 0 <= id && id < int64(len(g.nodes)) +} + +// Nodes returns all the nodes in the graph. +func (g *ReducedDirected) Nodes() graph.Nodes { + nodes := make([]graph.Node, len(g.nodes)) + for i := range g.nodes { + nodes[i] = node(i) + } + return iterator.NewOrderedNodes(nodes) +} + +// From returns all nodes in g that can be reached directly from u. +func (g *ReducedDirected) From(uid int64) graph.Nodes { + out := g.edgesFrom[uid] + nodes := make([]graph.Node, len(out)) + for i, vid := range out { + nodes[i] = g.nodes[vid] + } + return iterator.NewOrderedNodes(nodes) +} + +// To returns all nodes in g that can reach directly to v. +func (g *ReducedDirected) To(vid int64) graph.Nodes { + in := g.edgesTo[vid] + nodes := make([]graph.Node, len(in)) + for i, uid := range in { + nodes[i] = g.nodes[uid] + } + return iterator.NewOrderedNodes(nodes) +} + +// HasEdgeBetween returns whether an edge exists between nodes x and y. +func (g *ReducedDirected) HasEdgeBetween(xid, yid int64) bool { + if xid == yid || !isValidID(xid) || !isValidID(yid) { + return false + } + _, ok := g.weights[[2]int{int(xid), int(yid)}] + if ok { + return true + } + _, ok = g.weights[[2]int{int(yid), int(xid)}] + return ok +} + +// HasEdgeFromTo returns whether an edge exists from node u to v. +func (g *ReducedDirected) HasEdgeFromTo(uid, vid int64) bool { + if uid == vid || !isValidID(uid) || !isValidID(vid) { + return false + } + _, ok := g.weights[[2]int{int(uid), int(vid)}] + return ok +} + +// Edge returns the edge from u to v if such an edge exists and nil otherwise. +// The node v must be directly reachable from u as defined by the From method. +func (g *ReducedDirected) Edge(uid, vid int64) graph.Edge { + return g.WeightedEdge(uid, vid) +} + +// WeightedEdge returns the weighted edge from u to v if such an edge exists and nil otherwise. +// The node v must be directly reachable from u as defined by the From method. +func (g *ReducedDirected) WeightedEdge(uid, vid int64) graph.WeightedEdge { + if uid == vid || !isValidID(uid) || !isValidID(vid) { + return nil + } + w, ok := g.weights[[2]int{int(uid), int(vid)}] + if !ok { + return nil + } + return edge{from: g.nodes[uid], to: g.nodes[vid], weight: w} +} + +// Weight returns the weight for the edge between x and y if Edge(x, y) returns a non-nil Edge. +// If x and y are the same node the internal node weight is returned. If there is no joining +// edge between the two nodes the weight value returned is zero. Weight returns true if an edge +// exists between x and y or if x and y have the same ID, false otherwise. +func (g *ReducedDirected) Weight(xid, yid int64) (w float64, ok bool) { + if !isValidID(xid) || !isValidID(yid) { + return 0, false + } + if xid == yid { + return g.nodes[xid].weight, true + } + w, ok = g.weights[[2]int{int(xid), int(yid)}] + return w, ok +} + +// directedLocalMover is a step in graph modularity optimization. +type directedLocalMover struct { + g *ReducedDirected + + // nodes is the set of working nodes. + nodes []graph.Node + // edgeWeightsOf is the weighted degree + // of each node indexed by ID. + edgeWeightsOf []directedWeights + + // m is the total sum of edge + // weights in g. + m float64 + + // weight is the weight function + // provided by g or a function + // that returns the Weight value + // of the non-nil edge between x + // and y. + weight func(xid, yid int64) float64 + + // communities is the current + // division of g. + communities [][]graph.Node + // memberships is a mapping between + // node ID and community membership. + memberships []int + + // resolution is the Reichardt and + // Bornholdt γ parameter as defined + // in doi:10.1103/PhysRevE.74.016110. + resolution float64 + + // moved indicates that a call to + // move has been made since the last + // call to shuffle. + moved bool + + // changed indicates that a move + // has been made since the creation + // of the local mover. + changed bool +} + +type directedWeights struct { + out, in float64 +} + +// newDirectedLocalMover returns a new directedLocalMover initialized with +// the graph g, a set of communities and a modularity resolution parameter. +// The node IDs of g must be contiguous in [0,n) where n is the number of +// nodes. +// If g has a zero edge weight sum, nil is returned. +func newDirectedLocalMover(g *ReducedDirected, communities [][]graph.Node, resolution float64) *directedLocalMover { + nodes := graph.NodesOf(g.Nodes()) + l := directedLocalMover{ + g: g, + nodes: nodes, + edgeWeightsOf: make([]directedWeights, len(nodes)), + communities: communities, + memberships: make([]int, len(nodes)), + resolution: resolution, + weight: positiveWeightFuncFor(g), + } + + // Calculate the total edge weight of the graph + // and degree weights for each node. + for _, n := range l.nodes { + u := n + var wOut float64 + uid := u.ID() + to := g.From(uid) + for to.Next() { + wOut += l.weight(uid, to.Node().ID()) + } + + v := n + var wIn float64 + vid := v.ID() + from := g.To(vid) + for from.Next() { + wIn += l.weight(from.Node().ID(), vid) + } + + id := n.ID() + w := l.weight(id, id) + l.edgeWeightsOf[id] = directedWeights{out: w + wOut, in: w + wIn} + l.m += w + wOut + } + + // Assign membership mappings. + for i, c := range communities { + for _, n := range c { + l.memberships[n.ID()] = i + } + } + + return &l +} + +// localMovingHeuristic performs the Louvain local moving heuristic until +// no further moves can be made. It returns a boolean indicating that the +// directedLocalMover has not made any improvement to the community structure and +// so the Louvain algorithm is done. +func (l *directedLocalMover) localMovingHeuristic(rnd func(int) int) (done bool) { + for { + l.shuffle(rnd) + for _, n := range l.nodes { + dQ, dst, src := l.deltaQ(n) + if dQ <= 0 { + continue + } + l.move(dst, src) + } + if !l.moved { + return !l.changed + } + } +} + +// shuffle performs a Fisher-Yates shuffle on the nodes held by the +// directedLocalMover using the random source rnd which should return an +// integer in the range [0,n). +func (l *directedLocalMover) shuffle(rnd func(n int) int) { + l.moved = false + for i := range l.nodes[:len(l.nodes)-1] { + j := i + rnd(len(l.nodes)-i) + l.nodes[i], l.nodes[j] = l.nodes[j], l.nodes[i] + } +} + +// move moves the node at src to the community at dst. +func (l *directedLocalMover) move(dst int, src commIdx) { + l.moved = true + l.changed = true + + srcComm := l.communities[src.community] + n := srcComm[src.node] + + l.memberships[n.ID()] = dst + + l.communities[dst] = append(l.communities[dst], n) + srcComm[src.node], srcComm[len(srcComm)-1] = srcComm[len(srcComm)-1], nil + l.communities[src.community] = srcComm[:len(srcComm)-1] +} + +// deltaQ returns the highest gain in modularity attainable by moving +// n from its current community to another connected community and +// the index of the chosen destination. The index into the directedLocalMover's +// communities field is returned in src if n is in communities. +func (l *directedLocalMover) deltaQ(n graph.Node) (deltaQ float64, dst int, src commIdx) { + id := n.ID() + + a_aa := l.weight(id, id) + k_a := l.edgeWeightsOf[id] + m := l.m + gamma := l.resolution + + // Find communities connected to n. + connected := make(set.Ints) + // The following for loop is equivalent to: + // + // for _, v := range l.g.From(n) { + // connected.Add(l.memberships[v.ID()]) + // } + // for _, v := range l.g.To(n) { + // connected.Add(l.memberships[v.ID()]) + // } + // + // This is done to avoid two allocations. + for _, vid := range l.g.edgesFrom[id] { + connected.Add(l.memberships[vid]) + } + for _, vid := range l.g.edgesTo[id] { + connected.Add(l.memberships[vid]) + } + // Insert the node's own community. + connected.Add(l.memberships[id]) + + candidates := make([]int, 0, len(connected)) + for i := range connected { + candidates = append(candidates, i) + } + sort.Ints(candidates) + + // Calculate the highest modularity gain + // from moving into another community and + // keep the index of that community. + var dQremove float64 + dQadd, dst, src := math.Inf(-1), -1, commIdx{-1, -1} + for _, i := range candidates { + c := l.communities[i] + var k_aC, sigma_totC directedWeights // C is a substitution for ^𝛼 or ^𝛽. + var removal bool + for j, u := range c { + uid := u.ID() + if uid == id { + if src.community != -1 { + panic("community: multiple sources") + } + src = commIdx{i, j} + removal = true + } + + k_aC.in += l.weight(uid, id) + k_aC.out += l.weight(id, uid) + // sigma_totC could be kept for each community + // and updated for moves, changing the calculation + // of sigma_totC here from O(n_c) to O(1), but + // in practice the time savings do not appear + // to be compelling and do not make up for the + // increase in code complexity and space required. + w := l.edgeWeightsOf[uid] + sigma_totC.in += w.in + sigma_totC.out += w.out + } + + // See louvain.tex for a derivation of these equations. + switch { + case removal: + // The community c was the current community, + // so calculate the change due to removal. + dQremove = (k_aC.in /*^𝛼*/ - a_aa) + (k_aC.out /*^𝛼*/ - a_aa) - + gamma*(k_a.in*(sigma_totC.out /*^𝛼*/ -k_a.out)+k_a.out*(sigma_totC.in /*^𝛼*/ -k_a.in))/m + + default: + // Otherwise calculate the change due to an addition + // to c and retain if it is the current best. + dQ := k_aC.in /*^𝛽*/ + k_aC.out /*^𝛽*/ - + gamma*(k_a.in*sigma_totC.out /*^𝛽*/ +k_a.out*sigma_totC.in /*^𝛽*/)/m + + if dQ > dQadd { + dQadd = dQ + dst = i + } + } + } + + return (dQadd - dQremove) / m, dst, src +} diff --git a/vendor/gonum.org/v1/gonum/graph/community/louvain_directed_multiplex.go b/vendor/gonum.org/v1/gonum/graph/community/louvain_directed_multiplex.go new file mode 100644 index 0000000..e4fad04 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/graph/community/louvain_directed_multiplex.go @@ -0,0 +1,917 @@ +// Copyright ©2015 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package community + +import ( + "fmt" + "math" + "sort" + + "golang.org/x/exp/rand" + + "gonum.org/v1/gonum/graph" + "gonum.org/v1/gonum/graph/internal/ordered" + "gonum.org/v1/gonum/graph/internal/set" + "gonum.org/v1/gonum/graph/iterator" +) + +// DirectedMultiplex is a directed multiplex graph. +type DirectedMultiplex interface { + Multiplex + + // Layer returns the lth layer of the + // multiplex graph. + Layer(l int) graph.Directed +} + +// qDirectedMultiplex returns the modularity Q score of the multiplex graph layers +// subdivided into the given communities at the given resolutions and weights. Q is +// returned as the vector of weighted Q scores for each layer of the multiplex graph. +// If communities is nil, the unclustered modularity score is returned. +// If weights is nil layers are equally weighted, otherwise the length of +// weights must equal the number of layers. If resolutions is nil, a resolution +// of 1.0 is used for all layers, otherwise either a single element slice may be used +// to specify a global resolution, or the length of resolutions must equal the number +// of layers. The resolution parameter is γ as defined in Reichardt and Bornholdt +// doi:10.1103/PhysRevE.74.016110. +// qUndirectedMultiplex will panic if the graph has any layer weight-scaled edge with +// negative edge weight. +// +// Q_{layer} = w_{layer} \sum_{ij} [ A_{layer}*_{ij} - (\gamma_{layer} k_i k_j)/2m ] \delta(c_i,c_j) +// +// Note that Q values for multiplex graphs are not scaled by the total layer edge weight. +func qDirectedMultiplex(g DirectedMultiplex, communities [][]graph.Node, weights, resolutions []float64) []float64 { + q := make([]float64, g.Depth()) + nodes := graph.NodesOf(g.Nodes()) + layerWeight := 1.0 + layerResolution := 1.0 + if len(resolutions) == 1 { + layerResolution = resolutions[0] + } + for l := 0; l < g.Depth(); l++ { + layer := g.Layer(l) + + if weights != nil { + layerWeight = weights[l] + } + if layerWeight == 0 { + continue + } + + if len(resolutions) > 1 { + layerResolution = resolutions[l] + } + + var weight func(xid, yid int64) float64 + if layerWeight < 0 { + weight = negativeWeightFuncFor(layer) + } else { + weight = positiveWeightFuncFor(layer) + } + + // Calculate the total edge weight of the layer + // and the table of penetrating edge weight sums. + var m float64 + k := make(map[int64]directedWeights, len(nodes)) + for _, n := range nodes { + var wOut float64 + u := n + uid := u.ID() + to := layer.From(uid) + for to.Next() { + wOut += weight(uid, to.Node().ID()) + } + var wIn float64 + v := n + vid := v.ID() + from := layer.To(vid) + for from.Next() { + wIn += weight(from.Node().ID(), vid) + } + id := n.ID() + w := weight(id, id) + m += w + wOut // We only need to count edges once. + k[n.ID()] = directedWeights{out: w + wOut, in: w + wIn} + } + + if communities == nil { + var qLayer float64 + for _, u := range nodes { + uid := u.ID() + kU := k[uid] + qLayer += weight(uid, uid) - layerResolution*kU.out*kU.in/m + } + q[l] = layerWeight * qLayer + continue + } + + var qLayer float64 + for _, c := range communities { + for _, u := range c { + uid := u.ID() + kU := k[uid] + for _, v := range c { + vid := v.ID() + kV := k[vid] + qLayer += weight(uid, vid) - layerResolution*kU.out*kV.in/m + } + } + } + q[l] = layerWeight * qLayer + } + + return q +} + +// DirectedLayers implements DirectedMultiplex. +type DirectedLayers []graph.Directed + +// NewDirectedLayers returns a DirectedLayers using the provided layers +// ensuring there is a match between IDs for each layer. +func NewDirectedLayers(layers ...graph.Directed) (DirectedLayers, error) { + if len(layers) == 0 { + return nil, nil + } + base := make(set.Int64s) + nodes := layers[0].Nodes() + for nodes.Next() { + base.Add(nodes.Node().ID()) + } + for i, l := range layers[1:] { + next := make(set.Int64s) + nodes := l.Nodes() + for nodes.Next() { + next.Add(nodes.Node().ID()) + } + if !set.Int64sEqual(base, next) { + return nil, fmt.Errorf("community: layer ID mismatch between layers: %d", i+1) + } + } + return layers, nil +} + +// Nodes returns the nodes of the receiver. +func (g DirectedLayers) Nodes() graph.Nodes { + if len(g) == 0 { + return nil + } + return g[0].Nodes() +} + +// Depth returns the depth of the multiplex graph. +func (g DirectedLayers) Depth() int { return len(g) } + +// Layer returns the lth layer of the multiplex graph. +func (g DirectedLayers) Layer(l int) graph.Directed { return g[l] } + +// louvainDirectedMultiplex returns the hierarchical modularization of g at the given resolution +// using the Louvain algorithm. If all is true and g has negatively weighted layers, all +// communities will be searched during the modularization. If src is nil, rand.Intn is +// used as the random generator. louvainDirectedMultiplex will panic if g has any edge with +// edge weight that does not sign-match the layer weight. +// +// graph.Undirect may be used as a shim to allow modularization of directed graphs. +func louvainDirectedMultiplex(g DirectedMultiplex, weights, resolutions []float64, all bool, src rand.Source) *ReducedDirectedMultiplex { + if weights != nil && len(weights) != g.Depth() { + panic("community: weights vector length mismatch") + } + if resolutions != nil && len(resolutions) != 1 && len(resolutions) != g.Depth() { + panic("community: resolutions vector length mismatch") + } + + // See louvain.tex for a detailed description + // of the algorithm used here. + + c := reduceDirectedMultiplex(g, nil, weights) + rnd := rand.Intn + if src != nil { + rnd = rand.New(src).Intn + } + for { + l := newDirectedMultiplexLocalMover(c, c.communities, weights, resolutions, all) + if l == nil { + return c + } + if done := l.localMovingHeuristic(rnd); done { + return c + } + c = reduceDirectedMultiplex(c, l.communities, weights) + } +} + +// ReducedDirectedMultiplex is a directed graph of communities derived from a +// parent graph by reduction. +type ReducedDirectedMultiplex struct { + // nodes is the set of nodes held + // by the graph. In a ReducedDirectedMultiplex + // the node ID is the index into + // nodes. + nodes []multiplexCommunity + layers []directedEdges + + // communities is the community + // structure of the graph. + communities [][]graph.Node + + parent *ReducedDirectedMultiplex +} + +var ( + _ DirectedMultiplex = (*ReducedDirectedMultiplex)(nil) + _ graph.WeightedDirected = (*directedLayerHandle)(nil) +) + +// Nodes returns all the nodes in the graph. +func (g *ReducedDirectedMultiplex) Nodes() graph.Nodes { + nodes := make([]graph.Node, len(g.nodes)) + for i := range g.nodes { + nodes[i] = node(i) + } + return iterator.NewOrderedNodes(nodes) +} + +// Depth returns the number of layers in the multiplex graph. +func (g *ReducedDirectedMultiplex) Depth() int { return len(g.layers) } + +// Layer returns the lth layer of the multiplex graph. +func (g *ReducedDirectedMultiplex) Layer(l int) graph.Directed { + return directedLayerHandle{multiplex: g, layer: l} +} + +// Communities returns the community memberships of the nodes in the +// graph used to generate the reduced graph. +func (g *ReducedDirectedMultiplex) Communities() [][]graph.Node { + communities := make([][]graph.Node, len(g.communities)) + if g.parent == nil { + for i, members := range g.communities { + comm := make([]graph.Node, len(members)) + for j, n := range members { + nodes := g.nodes[n.ID()].nodes + if len(nodes) != 1 { + panic("community: unexpected number of nodes in base graph community") + } + comm[j] = nodes[0] + } + communities[i] = comm + } + return communities + } + sub := g.parent.Communities() + for i, members := range g.communities { + var comm []graph.Node + for _, n := range members { + comm = append(comm, sub[n.ID()]...) + } + communities[i] = comm + } + return communities +} + +// Structure returns the community structure of the current level of +// the module clustering. The first index of the returned value +// corresponds to the index of the nodes in the next higher level if +// it exists. The returned value should not be mutated. +func (g *ReducedDirectedMultiplex) Structure() [][]graph.Node { + return g.communities +} + +// Expanded returns the next lower level of the module clustering or nil +// if at the lowest level. +func (g *ReducedDirectedMultiplex) Expanded() ReducedMultiplex { + return g.parent +} + +// reduceDirectedMultiplex returns a reduced graph constructed from g divided +// into the given communities. The communities value is mutated +// by the call to reduceDirectedMultiplex. If communities is nil and g is a +// ReducedDirectedMultiplex, it is returned unaltered. +func reduceDirectedMultiplex(g DirectedMultiplex, communities [][]graph.Node, weights []float64) *ReducedDirectedMultiplex { + if communities == nil { + if r, ok := g.(*ReducedDirectedMultiplex); ok { + return r + } + + nodes := graph.NodesOf(g.Nodes()) + // TODO(kortschak) This sort is necessary really only + // for testing. In practice we would not be using the + // community provided by the user for a Q calculation. + // Probably we should use a function to map the + // communities in the test sets to the remapped order. + sort.Sort(ordered.ByID(nodes)) + communities = make([][]graph.Node, len(nodes)) + for i := range nodes { + communities[i] = []graph.Node{node(i)} + } + + r := ReducedDirectedMultiplex{ + nodes: make([]multiplexCommunity, len(nodes)), + layers: make([]directedEdges, g.Depth()), + communities: communities, + } + communityOf := make(map[int64]int, len(nodes)) + for i, n := range nodes { + r.nodes[i] = multiplexCommunity{id: i, nodes: []graph.Node{n}, weights: make([]float64, depth(weights))} + communityOf[n.ID()] = i + } + for i := range r.layers { + r.layers[i] = directedEdges{ + edgesFrom: make([][]int, len(nodes)), + edgesTo: make([][]int, len(nodes)), + weights: make(map[[2]int]float64), + } + } + w := 1.0 + for l := 0; l < g.Depth(); l++ { + layer := g.Layer(l) + if weights != nil { + w = weights[l] + } + if w == 0 { + continue + } + var sign float64 + var weight func(xid, yid int64) float64 + if w < 0 { + sign, weight = -1, negativeWeightFuncFor(layer) + } else { + sign, weight = 1, positiveWeightFuncFor(layer) + } + for _, n := range nodes { + id := communityOf[n.ID()] + + var out []int + u := n + uid := u.ID() + to := layer.From(uid) + for to.Next() { + vid := to.Node().ID() + vcid := communityOf[vid] + if vcid != id { + out = append(out, vcid) + } + r.layers[l].weights[[2]int{id, vcid}] = sign * weight(uid, vid) + } + r.layers[l].edgesFrom[id] = out + + var in []int + v := n + vid := v.ID() + from := layer.To(vid) + for from.Next() { + uid := from.Node().ID() + ucid := communityOf[uid] + if ucid != id { + in = append(in, ucid) + } + r.layers[l].weights[[2]int{ucid, id}] = sign * weight(uid, vid) + } + r.layers[l].edgesTo[id] = in + } + } + return &r + } + + // Remove zero length communities destructively. + var commNodes int + for i := 0; i < len(communities); { + comm := communities[i] + if len(comm) == 0 { + communities[i] = communities[len(communities)-1] + communities[len(communities)-1] = nil + communities = communities[:len(communities)-1] + } else { + commNodes += len(comm) + i++ + } + } + + r := ReducedDirectedMultiplex{ + nodes: make([]multiplexCommunity, len(communities)), + layers: make([]directedEdges, g.Depth()), + } + communityOf := make(map[int64]int, commNodes) + for i, comm := range communities { + r.nodes[i] = multiplexCommunity{id: i, nodes: comm, weights: make([]float64, depth(weights))} + for _, n := range comm { + communityOf[n.ID()] = i + } + } + for i := range r.layers { + r.layers[i] = directedEdges{ + edgesFrom: make([][]int, len(communities)), + edgesTo: make([][]int, len(communities)), + weights: make(map[[2]int]float64), + } + } + r.communities = make([][]graph.Node, len(communities)) + for i := range r.communities { + r.communities[i] = []graph.Node{node(i)} + } + if g, ok := g.(*ReducedDirectedMultiplex); ok { + // Make sure we retain the truncated + // community structure. + g.communities = communities + r.parent = g + } + w := 1.0 + for l := 0; l < g.Depth(); l++ { + layer := g.Layer(l) + if weights != nil { + w = weights[l] + } + if w == 0 { + continue + } + var sign float64 + var weight func(xid, yid int64) float64 + if w < 0 { + sign, weight = -1, negativeWeightFuncFor(layer) + } else { + sign, weight = 1, positiveWeightFuncFor(layer) + } + for id, comm := range communities { + var out, in []int + for _, n := range comm { + u := n + uid := u.ID() + for _, v := range comm { + r.nodes[id].weights[l] += sign * weight(uid, v.ID()) + } + + to := layer.From(uid) + for to.Next() { + vid := to.Node().ID() + vcid := communityOf[vid] + found := false + for _, e := range out { + if e == vcid { + found = true + break + } + } + if !found && vcid != id { + out = append(out, vcid) + } + // Add half weights because the other + // ends of edges are also counted. + r.layers[l].weights[[2]int{id, vcid}] += sign * weight(uid, vid) / 2 + } + + v := n + vid := v.ID() + from := layer.To(vid) + for from.Next() { + uid := from.Node().ID() + ucid := communityOf[uid] + found := false + for _, e := range in { + if e == ucid { + found = true + break + } + } + if !found && ucid != id { + in = append(in, ucid) + } + // Add half weights because the other + // ends of edges are also counted. + r.layers[l].weights[[2]int{ucid, id}] += sign * weight(uid, vid) / 2 + } + + } + r.layers[l].edgesFrom[id] = out + r.layers[l].edgesTo[id] = in + } + } + return &r +} + +// directedLayerHandle is a handle to a multiplex graph layer. +type directedLayerHandle struct { + // multiplex is the complete + // multiplex graph. + multiplex *ReducedDirectedMultiplex + + // layer is an index into the + // multiplex for the current + // layer. + layer int +} + +// Node returns the node with the given ID if it exists in the graph, +// and nil otherwise. +func (g directedLayerHandle) Node(id int64) graph.Node { + if g.has(id) { + return g.multiplex.nodes[id] + } + return nil +} + +// has returns whether the node exists within the graph. +func (g directedLayerHandle) has(id int64) bool { + return 0 <= id && id < int64(len(g.multiplex.nodes)) +} + +// Nodes returns all the nodes in the graph. +func (g directedLayerHandle) Nodes() graph.Nodes { + nodes := make([]graph.Node, len(g.multiplex.nodes)) + for i := range g.multiplex.nodes { + nodes[i] = node(i) + } + return iterator.NewOrderedNodes(nodes) +} + +// From returns all nodes in g that can be reached directly from u. +func (g directedLayerHandle) From(uid int64) graph.Nodes { + out := g.multiplex.layers[g.layer].edgesFrom[uid] + nodes := make([]graph.Node, len(out)) + for i, vid := range out { + nodes[i] = g.multiplex.nodes[vid] + } + return iterator.NewOrderedNodes(nodes) +} + +// To returns all nodes in g that can reach directly to v. +func (g directedLayerHandle) To(vid int64) graph.Nodes { + in := g.multiplex.layers[g.layer].edgesTo[vid] + nodes := make([]graph.Node, len(in)) + for i, uid := range in { + nodes[i] = g.multiplex.nodes[uid] + } + return iterator.NewOrderedNodes(nodes) +} + +// HasEdgeBetween returns whether an edge exists between nodes x and y. +func (g directedLayerHandle) HasEdgeBetween(xid, yid int64) bool { + if xid == yid { + return false + } + if xid == yid || !isValidID(xid) || !isValidID(yid) { + return false + } + _, ok := g.multiplex.layers[g.layer].weights[[2]int{int(xid), int(yid)}] + if ok { + return true + } + _, ok = g.multiplex.layers[g.layer].weights[[2]int{int(yid), int(xid)}] + return ok +} + +// HasEdgeFromTo returns whether an edge exists from node u to v. +func (g directedLayerHandle) HasEdgeFromTo(uid, vid int64) bool { + if uid == vid || !isValidID(uid) || !isValidID(vid) { + return false + } + _, ok := g.multiplex.layers[g.layer].weights[[2]int{int(uid), int(vid)}] + return ok +} + +// Edge returns the edge from u to v if such an edge exists and nil otherwise. +// The node v must be directly reachable from u as defined by the From method. +func (g directedLayerHandle) Edge(uid, vid int64) graph.Edge { + return g.WeightedEdge(uid, vid) +} + +// WeightedEdge returns the weighted edge from u to v if such an edge exists and nil otherwise. +// The node v must be directly reachable from u as defined by the From method. +func (g directedLayerHandle) WeightedEdge(uid, vid int64) graph.WeightedEdge { + if uid == vid || !isValidID(uid) || !isValidID(vid) { + return nil + } + w, ok := g.multiplex.layers[g.layer].weights[[2]int{int(uid), int(vid)}] + if !ok { + return nil + } + return multiplexEdge{from: g.multiplex.nodes[uid], to: g.multiplex.nodes[vid], weight: w} +} + +// Weight returns the weight for the edge between x and y if Edge(x, y) returns a non-nil Edge. +// If x and y are the same node the internal node weight is returned. If there is no joining +// edge between the two nodes the weight value returned is zero. Weight returns true if an edge +// exists between x and y or if x and y have the same ID, false otherwise. +func (g directedLayerHandle) Weight(xid, yid int64) (w float64, ok bool) { + if !isValidID(xid) || !isValidID(yid) { + return 0, false + } + if xid == yid { + return g.multiplex.nodes[xid].weights[g.layer], true + } + w, ok = g.multiplex.layers[g.layer].weights[[2]int{int(xid), int(yid)}] + return w, ok +} + +// directedMultiplexLocalMover is a step in graph modularity optimization. +type directedMultiplexLocalMover struct { + g *ReducedDirectedMultiplex + + // nodes is the set of working nodes. + nodes []graph.Node + // edgeWeightsOf is the weighted degree + // of each node indexed by ID. + edgeWeightsOf [][]directedWeights + + // m is the total sum of + // edge weights in g. + m []float64 + + // weight is the weight function + // provided by g or a function + // that returns the Weight value + // of the non-nil edge between x + // and y. + weight []func(xid, yid int64) float64 + + // communities is the current + // division of g. + communities [][]graph.Node + // memberships is a mapping between + // node ID and community membership. + memberships []int + + // resolution is the Reichardt and + // Bornholdt γ parameter as defined + // in doi:10.1103/PhysRevE.74.016110. + resolutions []float64 + + // weights is the layer weights for + // the modularisation. + weights []float64 + + // searchAll specifies whether the local + // mover should consider non-connected + // communities during the local moving + // heuristic. + searchAll bool + + // moved indicates that a call to + // move has been made since the last + // call to shuffle. + moved bool + + // changed indicates that a move + // has been made since the creation + // of the local mover. + changed bool +} + +// newDirectedMultiplexLocalMover returns a new directedMultiplexLocalMover initialized with +// the graph g, a set of communities and a modularity resolution parameter. The +// node IDs of g must be contiguous in [0,n) where n is the number of nodes. +// If g has a zero edge weight sum, nil is returned. +func newDirectedMultiplexLocalMover(g *ReducedDirectedMultiplex, communities [][]graph.Node, weights, resolutions []float64, all bool) *directedMultiplexLocalMover { + nodes := graph.NodesOf(g.Nodes()) + l := directedMultiplexLocalMover{ + g: g, + nodes: nodes, + edgeWeightsOf: make([][]directedWeights, g.Depth()), + m: make([]float64, g.Depth()), + communities: communities, + memberships: make([]int, len(nodes)), + resolutions: resolutions, + weights: weights, + weight: make([]func(xid, yid int64) float64, g.Depth()), + } + + // Calculate the total edge weight of the graph + // and degree weights for each node. + var zero int + for i := 0; i < g.Depth(); i++ { + l.edgeWeightsOf[i] = make([]directedWeights, len(nodes)) + var weight func(xid, yid int64) float64 + + if weights != nil { + if weights[i] == 0 { + zero++ + continue + } + if weights[i] < 0 { + weight = negativeWeightFuncFor(g.Layer(i)) + l.searchAll = all + } else { + weight = positiveWeightFuncFor(g.Layer(i)) + } + } else { + weight = positiveWeightFuncFor(g.Layer(i)) + } + + l.weight[i] = weight + layer := g.Layer(i) + for _, n := range l.nodes { + u := n + uid := u.ID() + var wOut float64 + to := layer.From(uid) + for to.Next() { + wOut += weight(uid, to.Node().ID()) + } + + v := n + vid := v.ID() + var wIn float64 + from := layer.To(vid) + for from.Next() { + wIn += weight(from.Node().ID(), vid) + } + + id := n.ID() + w := weight(id, id) + l.edgeWeightsOf[i][uid] = directedWeights{out: w + wOut, in: w + wIn} + l.m[i] += w + wOut + } + if l.m[i] == 0 { + zero++ + } + } + if zero == g.Depth() { + return nil + } + + // Assign membership mappings. + for i, c := range communities { + for _, n := range c { + l.memberships[n.ID()] = i + } + } + + return &l +} + +// localMovingHeuristic performs the Louvain local moving heuristic until +// no further moves can be made. It returns a boolean indicating that the +// directedMultiplexLocalMover has not made any improvement to the community +// structure and so the Louvain algorithm is done. +func (l *directedMultiplexLocalMover) localMovingHeuristic(rnd func(int) int) (done bool) { + for { + l.shuffle(rnd) + for _, n := range l.nodes { + dQ, dst, src := l.deltaQ(n) + if dQ <= 0 { + continue + } + l.move(dst, src) + } + if !l.moved { + return !l.changed + } + } +} + +// shuffle performs a Fisher-Yates shuffle on the nodes held by the +// directedMultiplexLocalMover using the random source rnd which should return +// an integer in the range [0,n). +func (l *directedMultiplexLocalMover) shuffle(rnd func(n int) int) { + l.moved = false + for i := range l.nodes[:len(l.nodes)-1] { + j := i + rnd(len(l.nodes)-i) + l.nodes[i], l.nodes[j] = l.nodes[j], l.nodes[i] + } +} + +// move moves the node at src to the community at dst. +func (l *directedMultiplexLocalMover) move(dst int, src commIdx) { + l.moved = true + l.changed = true + + srcComm := l.communities[src.community] + n := srcComm[src.node] + + l.memberships[n.ID()] = dst + + l.communities[dst] = append(l.communities[dst], n) + srcComm[src.node], srcComm[len(srcComm)-1] = srcComm[len(srcComm)-1], nil + l.communities[src.community] = srcComm[:len(srcComm)-1] +} + +// deltaQ returns the highest gain in modularity attainable by moving +// n from its current community to another connected community and +// the index of the chosen destination. The index into the +// directedMultiplexLocalMover's communities field is returned in src if n +// is in communities. +func (l *directedMultiplexLocalMover) deltaQ(n graph.Node) (deltaQ float64, dst int, src commIdx) { + id := n.ID() + + var iterator minTaker + if l.searchAll { + iterator = &dense{n: len(l.communities)} + } else { + // Find communities connected to n. + connected := make(set.Ints) + // The following for loop is equivalent to: + // + // for i := 0; i < l.g.Depth(); i++ { + // for _, v := range l.g.Layer(i).From(n) { + // connected.Add(l.memberships[v.ID()]) + // } + // for _, v := range l.g.Layer(i).To(n) { + // connected.Add(l.memberships[v.ID()]) + // } + // } + // + // This is done to avoid an allocation for + // each layer. + for _, layer := range l.g.layers { + for _, vid := range layer.edgesFrom[id] { + connected.Add(l.memberships[vid]) + } + for _, vid := range layer.edgesTo[id] { + connected.Add(l.memberships[vid]) + } + } + // Insert the node's own community. + connected.Add(l.memberships[id]) + iterator = newSlice(connected) + } + + // Calculate the highest modularity gain + // from moving into another community and + // keep the index of that community. + var dQremove float64 + dQadd, dst, src := math.Inf(-1), -1, commIdx{-1, -1} + var i int + for iterator.TakeMin(&i) { + c := l.communities[i] + var removal bool + var _dQadd float64 + for layer := 0; layer < l.g.Depth(); layer++ { + m := l.m[layer] + if m == 0 { + // Do not consider layers with zero sum edge weight. + continue + } + w := 1.0 + if l.weights != nil { + w = l.weights[layer] + } + if w == 0 { + // Do not consider layers with zero weighting. + continue + } + + var k_aC, sigma_totC directedWeights // C is a substitution for ^𝛼 or ^𝛽. + removal = false + for j, u := range c { + uid := u.ID() + if uid == id { + // Only mark and check src community on the first layer. + if layer == 0 { + if src.community != -1 { + panic("community: multiple sources") + } + src = commIdx{i, j} + } + removal = true + } + + k_aC.in += l.weight[layer](id, uid) + k_aC.out += l.weight[layer](uid, id) + // sigma_totC could be kept for each community + // and updated for moves, changing the calculation + // of sigma_totC here from O(n_c) to O(1), but + // in practice the time savings do not appear + // to be compelling and do not make up for the + // increase in code complexity and space required. + w := l.edgeWeightsOf[layer][uid] + sigma_totC.in += w.in + sigma_totC.out += w.out + } + + a_aa := l.weight[layer](id, id) + k_a := l.edgeWeightsOf[layer][id] + gamma := 1.0 + if l.resolutions != nil { + if len(l.resolutions) == 1 { + gamma = l.resolutions[0] + } else { + gamma = l.resolutions[layer] + } + } + + // See louvain.tex for a derivation of these equations. + // The weighting term, w, is described in V Traag, + // "Algorithms and dynamical models for communities and + // reputation in social networks", chapter 5. + // http://www.traag.net/wp/wp-content/papercite-data/pdf/traag_algorithms_2013.pdf + switch { + case removal: + // The community c was the current community, + // so calculate the change due to removal. + dQremove += w * ((k_aC.in /*^𝛼*/ - a_aa) + (k_aC.out /*^𝛼*/ - a_aa) - + gamma*(k_a.in*(sigma_totC.out /*^𝛼*/ -k_a.out)+k_a.out*(sigma_totC.in /*^𝛼*/ -k_a.in))/m) + + default: + // Otherwise calculate the change due to an addition + // to c. + _dQadd += w * (k_aC.in /*^𝛽*/ + k_aC.out /*^𝛽*/ - + gamma*(k_a.in*sigma_totC.out /*^𝛽*/ +k_a.out*sigma_totC.in /*^𝛽*/)/m) + } + } + if !removal && _dQadd > dQadd { + dQadd = _dQadd + dst = i + } + } + + return dQadd - dQremove, dst, src +} diff --git a/vendor/gonum.org/v1/gonum/graph/community/louvain_undirected.go b/vendor/gonum.org/v1/gonum/graph/community/louvain_undirected.go new file mode 100644 index 0000000..9bfa38a --- /dev/null +++ b/vendor/gonum.org/v1/gonum/graph/community/louvain_undirected.go @@ -0,0 +1,607 @@ +// Copyright ©2015 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package community + +import ( + "math" + "sort" + + "golang.org/x/exp/rand" + + "gonum.org/v1/gonum/graph" + "gonum.org/v1/gonum/graph/internal/ordered" + "gonum.org/v1/gonum/graph/internal/set" + "gonum.org/v1/gonum/graph/iterator" +) + +// qUndirected returns the modularity Q score of the graph g subdivided into the +// given communities at the given resolution. If communities is nil, the +// unclustered modularity score is returned. The resolution parameter +// is γ as defined in Reichardt and Bornholdt doi:10.1103/PhysRevE.74.016110. +// qUndirected will panic if g has any edge with negative edge weight. +// +// Q = 1/2m \sum_{ij} [ A_{ij} - (\gamma k_i k_j)/2m ] \delta(c_i,c_j) +// +// graph.Undirect may be used as a shim to allow calculation of Q for +// directed graphs. +func qUndirected(g graph.Undirected, communities [][]graph.Node, resolution float64) float64 { + nodes := graph.NodesOf(g.Nodes()) + weight := positiveWeightFuncFor(g) + + // Calculate the total edge weight of the graph + // and the table of penetrating edge weight sums. + var m2 float64 + k := make(map[int64]float64, len(nodes)) + for _, u := range nodes { + uid := u.ID() + w := weight(uid, uid) + to := g.From(uid) + for to.Next() { + w += weight(uid, to.Node().ID()) + } + m2 += w + k[uid] = w + } + + if communities == nil { + var q float64 + for _, u := range nodes { + uid := u.ID() + kU := k[uid] + q += weight(uid, uid) - resolution*kU*kU/m2 + } + return q / m2 + } + + // Iterate over the communities, calculating + // the non-self edge weights for the upper + // triangle and adjust the diagonal. + var q float64 + for _, c := range communities { + for i, u := range c { + uid := u.ID() + kU := k[uid] + q += weight(uid, uid) - resolution*kU*kU/m2 + for _, v := range c[i+1:] { + vid := v.ID() + q += 2 * (weight(uid, vid) - resolution*kU*k[vid]/m2) + } + } + } + return q / m2 +} + +// louvainUndirected returns the hierarchical modularization of g at the given +// resolution using the Louvain algorithm. If src is nil, rand.Intn is used as +// the random generator. louvainUndirected will panic if g has any edge with negative edge +// weight. +// +// graph.Undirect may be used as a shim to allow modularization of directed graphs. +func louvainUndirected(g graph.Undirected, resolution float64, src rand.Source) *ReducedUndirected { + // See louvain.tex for a detailed description + // of the algorithm used here. + + c := reduceUndirected(g, nil) + rnd := rand.Intn + if src != nil { + rnd = rand.New(src).Intn + } + for { + l := newUndirectedLocalMover(c, c.communities, resolution) + if l == nil { + return c + } + if done := l.localMovingHeuristic(rnd); done { + return c + } + c = reduceUndirected(c, l.communities) + } +} + +// ReducedUndirected is an undirected graph of communities derived from a +// parent graph by reduction. +type ReducedUndirected struct { + // nodes is the set of nodes held + // by the graph. In a ReducedUndirected + // the node ID is the index into + // nodes. + nodes []community + undirectedEdges + + // communities is the community + // structure of the graph. + communities [][]graph.Node + + parent *ReducedUndirected +} + +var ( + reducedUndirected = (*ReducedUndirected)(nil) + + _ graph.WeightedUndirected = reducedUndirected + _ ReducedGraph = reducedUndirected +) + +// Communities returns the community memberships of the nodes in the +// graph used to generate the reduced graph. +func (g *ReducedUndirected) Communities() [][]graph.Node { + communities := make([][]graph.Node, len(g.communities)) + if g.parent == nil { + for i, members := range g.communities { + comm := make([]graph.Node, len(members)) + for j, n := range members { + nodes := g.nodes[n.ID()].nodes + if len(nodes) != 1 { + panic("community: unexpected number of nodes in base graph community") + } + comm[j] = nodes[0] + } + communities[i] = comm + } + return communities + } + sub := g.parent.Communities() + for i, members := range g.communities { + var comm []graph.Node + for _, n := range members { + comm = append(comm, sub[n.ID()]...) + } + communities[i] = comm + } + return communities +} + +// Structure returns the community structure of the current level of +// the module clustering. The first index of the returned value +// corresponds to the index of the nodes in the next higher level if +// it exists. The returned value should not be mutated. +func (g *ReducedUndirected) Structure() [][]graph.Node { + return g.communities +} + +// Expanded returns the next lower level of the module clustering or nil +// if at the lowest level. +func (g *ReducedUndirected) Expanded() ReducedGraph { + return g.parent +} + +// reduceUndirected returns a reduced graph constructed from g divided +// into the given communities. The communities value is mutated +// by the call to reduceUndirected. If communities is nil and g is a +// ReducedUndirected, it is returned unaltered. +func reduceUndirected(g graph.Undirected, communities [][]graph.Node) *ReducedUndirected { + if communities == nil { + if r, ok := g.(*ReducedUndirected); ok { + return r + } + + nodes := graph.NodesOf(g.Nodes()) + // TODO(kortschak) This sort is necessary really only + // for testing. In practice we would not be using the + // community provided by the user for a Q calculation. + // Probably we should use a function to map the + // communities in the test sets to the remapped order. + sort.Sort(ordered.ByID(nodes)) + communities = make([][]graph.Node, len(nodes)) + for i := range nodes { + communities[i] = []graph.Node{node(i)} + } + + weight := positiveWeightFuncFor(g) + r := ReducedUndirected{ + nodes: make([]community, len(nodes)), + undirectedEdges: undirectedEdges{ + edges: make([][]int, len(nodes)), + weights: make(map[[2]int]float64), + }, + communities: communities, + } + communityOf := make(map[int64]int, len(nodes)) + for i, n := range nodes { + r.nodes[i] = community{id: i, nodes: []graph.Node{n}} + communityOf[n.ID()] = i + } + for _, u := range nodes { + uid := u.ID() + ucid := communityOf[uid] + var out []int + to := g.From(uid) + for to.Next() { + vid := to.Node().ID() + vcid := communityOf[vid] + if vcid != ucid { + out = append(out, vcid) + } + if ucid < vcid { + // Only store the weight once. + r.weights[[2]int{ucid, vcid}] = weight(uid, vid) + } + } + r.edges[ucid] = out + } + return &r + } + + // Remove zero length communities destructively. + var commNodes int + for i := 0; i < len(communities); { + comm := communities[i] + if len(comm) == 0 { + communities[i] = communities[len(communities)-1] + communities[len(communities)-1] = nil + communities = communities[:len(communities)-1] + } else { + commNodes += len(comm) + i++ + } + } + + r := ReducedUndirected{ + nodes: make([]community, len(communities)), + undirectedEdges: undirectedEdges{ + edges: make([][]int, len(communities)), + weights: make(map[[2]int]float64), + }, + } + r.communities = make([][]graph.Node, len(communities)) + for i := range r.communities { + r.communities[i] = []graph.Node{node(i)} + } + if g, ok := g.(*ReducedUndirected); ok { + // Make sure we retain the truncated + // community structure. + g.communities = communities + r.parent = g + } + weight := positiveWeightFuncFor(g) + communityOf := make(map[int64]int, commNodes) + for i, comm := range communities { + r.nodes[i] = community{id: i, nodes: comm} + for _, n := range comm { + communityOf[n.ID()] = i + } + } + for ucid, comm := range communities { + var out []int + for i, u := range comm { + uid := u.ID() + r.nodes[ucid].weight += weight(uid, uid) + for _, v := range comm[i+1:] { + r.nodes[ucid].weight += 2 * weight(uid, v.ID()) + } + to := g.From(uid) + for to.Next() { + vid := to.Node().ID() + vcid := communityOf[vid] + found := false + for _, e := range out { + if e == vcid { + found = true + break + } + } + if !found && vcid != ucid { + out = append(out, vcid) + } + if ucid < vcid { + // Only store the weight once. + r.weights[[2]int{ucid, vcid}] += weight(uid, vid) + } + } + } + r.edges[ucid] = out + } + return &r +} + +// Node returns the node with the given ID if it exists in the graph, +// and nil otherwise. +func (g *ReducedUndirected) Node(id int64) graph.Node { + if g.has(id) { + return g.nodes[id] + } + return nil +} + +// has returns whether the node exists within the graph. +func (g *ReducedUndirected) has(id int64) bool { + return 0 <= id && id < int64(len(g.nodes)) +} + +// Nodes returns all the nodes in the graph. +func (g *ReducedUndirected) Nodes() graph.Nodes { + nodes := make([]graph.Node, len(g.nodes)) + for i := range g.nodes { + nodes[i] = node(i) + } + return iterator.NewOrderedNodes(nodes) +} + +// From returns all nodes in g that can be reached directly from u. +func (g *ReducedUndirected) From(uid int64) graph.Nodes { + out := g.edges[uid] + nodes := make([]graph.Node, len(out)) + for i, vid := range out { + nodes[i] = g.nodes[vid] + } + return iterator.NewOrderedNodes(nodes) +} + +// HasEdgeBetween returns whether an edge exists between nodes x and y. +func (g *ReducedUndirected) HasEdgeBetween(xid, yid int64) bool { + if xid == yid || !isValidID(xid) || !isValidID(yid) { + return false + } + if xid > yid { + xid, yid = yid, xid + } + _, ok := g.weights[[2]int{int(xid), int(yid)}] + return ok +} + +// Edge returns the edge from u to v if such an edge exists and nil otherwise. +// The node v must be directly reachable from u as defined by the From method. +func (g *ReducedUndirected) Edge(uid, vid int64) graph.Edge { + return g.WeightedEdgeBetween(uid, vid) +} + +// WeightedEdge returns the weighted edge from u to v if such an edge exists and nil otherwise. +// The node v must be directly reachable from u as defined by the From method. +func (g *ReducedUndirected) WeightedEdge(uid, vid int64) graph.WeightedEdge { + return g.WeightedEdgeBetween(uid, vid) +} + +// EdgeBetween returns the edge between nodes x and y. +func (g *ReducedUndirected) EdgeBetween(xid, yid int64) graph.Edge { + return g.WeightedEdgeBetween(xid, yid) +} + +// WeightedEdgeBetween returns the weighted edge between nodes x and y. +func (g *ReducedUndirected) WeightedEdgeBetween(xid, yid int64) graph.WeightedEdge { + if xid == yid || !isValidID(xid) || !isValidID(yid) { + return nil + } + if yid < xid { + xid, yid = yid, xid + } + w, ok := g.weights[[2]int{int(xid), int(yid)}] + if !ok { + return nil + } + return edge{from: g.nodes[xid], to: g.nodes[yid], weight: w} +} + +// Weight returns the weight for the edge between x and y if Edge(x, y) returns a non-nil Edge. +// If x and y are the same node the internal node weight is returned. If there is no joining +// edge between the two nodes the weight value returned is zero. Weight returns true if an edge +// exists between x and y or if x and y have the same ID, false otherwise. +func (g *ReducedUndirected) Weight(xid, yid int64) (w float64, ok bool) { + if !isValidID(xid) || !isValidID(yid) { + return 0, false + } + if xid == yid { + return g.nodes[xid].weight, true + } + if xid > yid { + xid, yid = yid, xid + } + w, ok = g.weights[[2]int{int(xid), int(yid)}] + return w, ok +} + +// undirectedLocalMover is a step in graph modularity optimization. +type undirectedLocalMover struct { + g *ReducedUndirected + + // nodes is the set of working nodes. + nodes []graph.Node + // edgeWeightOf is the weighted degree + // of each node indexed by ID. + edgeWeightOf []float64 + + // m2 is the total sum of + // edge weights in g. + m2 float64 + + // weight is the weight function + // provided by g or a function + // that returns the Weight value + // of the non-nil edge between x + // and y. + weight func(xid, yid int64) float64 + + // communities is the current + // division of g. + communities [][]graph.Node + // memberships is a mapping between + // node ID and community membership. + memberships []int + + // resolution is the Reichardt and + // Bornholdt γ parameter as defined + // in doi:10.1103/PhysRevE.74.016110. + resolution float64 + + // moved indicates that a call to + // move has been made since the last + // call to shuffle. + moved bool + + // changed indicates that a move + // has been made since the creation + // of the local mover. + changed bool +} + +// newUndirectedLocalMover returns a new undirectedLocalMover initialized with +// the graph g, a set of communities and a modularity resolution parameter. The +// node IDs of g must be contiguous in [0,n) where n is the number of nodes. +// If g has a zero edge weight sum, nil is returned. +func newUndirectedLocalMover(g *ReducedUndirected, communities [][]graph.Node, resolution float64) *undirectedLocalMover { + nodes := graph.NodesOf(g.Nodes()) + l := undirectedLocalMover{ + g: g, + nodes: nodes, + edgeWeightOf: make([]float64, len(nodes)), + communities: communities, + memberships: make([]int, len(nodes)), + resolution: resolution, + weight: positiveWeightFuncFor(g), + } + + // Calculate the total edge weight of the graph + // and degree weights for each node. + for _, u := range l.nodes { + uid := u.ID() + w := l.weight(uid, uid) + to := g.From(uid) + for to.Next() { + w += l.weight(uid, to.Node().ID()) + } + l.edgeWeightOf[uid] = w + l.m2 += w + } + if l.m2 == 0 { + return nil + } + + // Assign membership mappings. + for i, c := range communities { + for _, u := range c { + l.memberships[u.ID()] = i + } + } + + return &l +} + +// localMovingHeuristic performs the Louvain local moving heuristic until +// no further moves can be made. It returns a boolean indicating that the +// undirectedLocalMover has not made any improvement to the community +// structure and so the Louvain algorithm is done. +func (l *undirectedLocalMover) localMovingHeuristic(rnd func(int) int) (done bool) { + for { + l.shuffle(rnd) + for _, n := range l.nodes { + dQ, dst, src := l.deltaQ(n) + if dQ <= 0 { + continue + } + l.move(dst, src) + } + if !l.moved { + return !l.changed + } + } +} + +// shuffle performs a Fisher-Yates shuffle on the nodes held by the +// undirectedLocalMover using the random source rnd which should return +// an integer in the range [0,n). +func (l *undirectedLocalMover) shuffle(rnd func(n int) int) { + l.moved = false + for i := range l.nodes[:len(l.nodes)-1] { + j := i + rnd(len(l.nodes)-i) + l.nodes[i], l.nodes[j] = l.nodes[j], l.nodes[i] + } +} + +// move moves the node at src to the community at dst. +func (l *undirectedLocalMover) move(dst int, src commIdx) { + l.moved = true + l.changed = true + + srcComm := l.communities[src.community] + n := srcComm[src.node] + + l.memberships[n.ID()] = dst + + l.communities[dst] = append(l.communities[dst], n) + srcComm[src.node], srcComm[len(srcComm)-1] = srcComm[len(srcComm)-1], nil + l.communities[src.community] = srcComm[:len(srcComm)-1] +} + +// deltaQ returns the highest gain in modularity attainable by moving +// n from its current community to another connected community and +// the index of the chosen destination. The index into the +// undirectedLocalMover's communities field is returned in src if n +// is in communities. +func (l *undirectedLocalMover) deltaQ(n graph.Node) (deltaQ float64, dst int, src commIdx) { + id := n.ID() + a_aa := l.weight(id, id) + k_a := l.edgeWeightOf[id] + m2 := l.m2 + gamma := l.resolution + + // Find communities connected to n. + connected := make(set.Ints) + // The following for loop is equivalent to: + // + // for _, v := range l.g.From(n) { + // connected.Add(l.memberships[v.ID()]) + // } + // + // This is done to avoid an allocation. + for _, vid := range l.g.edges[id] { + connected.Add(l.memberships[vid]) + } + // Insert the node's own community. + connected.Add(l.memberships[id]) + + candidates := make([]int, 0, len(connected)) + for i := range connected { + candidates = append(candidates, i) + } + sort.Ints(candidates) + + // Calculate the highest modularity gain + // from moving into another community and + // keep the index of that community. + var dQremove float64 + dQadd, dst, src := math.Inf(-1), -1, commIdx{-1, -1} + for _, i := range candidates { + c := l.communities[i] + var k_aC, sigma_totC float64 // C is a substitution for ^𝛼 or ^𝛽. + var removal bool + for j, u := range c { + uid := u.ID() + if uid == id { + if src.community != -1 { + panic("community: multiple sources") + } + src = commIdx{i, j} + removal = true + } + + k_aC += l.weight(id, uid) + // sigma_totC could be kept for each community + // and updated for moves, changing the calculation + // of sigma_totC here from O(n_c) to O(1), but + // in practice the time savings do not appear + // to be compelling and do not make up for the + // increase in code complexity and space required. + sigma_totC += l.edgeWeightOf[uid] + } + + // See louvain.tex for a derivation of these equations. + switch { + case removal: + // The community c was the current community, + // so calculate the change due to removal. + dQremove = k_aC /*^𝛼*/ - a_aa - gamma*k_a*(sigma_totC /*^𝛼*/ -k_a)/m2 + + default: + // Otherwise calculate the change due to an addition + // to c and retain if it is the current best. + dQ := k_aC /*^𝛽*/ - gamma*k_a*sigma_totC /*^𝛽*/ /m2 + if dQ > dQadd { + dQadd = dQ + dst = i + } + } + } + + return 2 * (dQadd - dQremove) / m2, dst, src +} diff --git a/vendor/gonum.org/v1/gonum/graph/community/louvain_undirected_multiplex.go b/vendor/gonum.org/v1/gonum/graph/community/louvain_undirected_multiplex.go new file mode 100644 index 0000000..e48b24a --- /dev/null +++ b/vendor/gonum.org/v1/gonum/graph/community/louvain_undirected_multiplex.go @@ -0,0 +1,845 @@ +// Copyright ©2015 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package community + +import ( + "fmt" + "math" + "sort" + + "golang.org/x/exp/rand" + + "gonum.org/v1/gonum/graph" + "gonum.org/v1/gonum/graph/internal/ordered" + "gonum.org/v1/gonum/graph/internal/set" + "gonum.org/v1/gonum/graph/iterator" +) + +// UndirectedMultiplex is an undirected multiplex graph. +type UndirectedMultiplex interface { + Multiplex + + // Layer returns the lth layer of the + // multiplex graph. + Layer(l int) graph.Undirected +} + +// qUndirectedMultiplex returns the modularity Q score of the multiplex graph layers +// subdivided into the given communities at the given resolutions and weights. Q is +// returned as the vector of weighted Q scores for each layer of the multiplex graph. +// If communities is nil, the unclustered modularity score is returned. +// If weights is nil layers are equally weighted, otherwise the length of +// weights must equal the number of layers. If resolutions is nil, a resolution +// of 1.0 is used for all layers, otherwise either a single element slice may be used +// to specify a global resolution, or the length of resolutions must equal the number +// of layers. The resolution parameter is γ as defined in Reichardt and Bornholdt +// doi:10.1103/PhysRevE.74.016110. +// qUndirectedMultiplex will panic if the graph has any layer weight-scaled edge with +// negative edge weight. +// +// Q_{layer} = w_{layer} \sum_{ij} [ A_{layer}*_{ij} - (\gamma_{layer} k_i k_j)/2m ] \delta(c_i,c_j) +// +// Note that Q values for multiplex graphs are not scaled by the total layer edge weight. +// +// graph.Undirect may be used as a shim to allow calculation of Q for +// directed graphs. +func qUndirectedMultiplex(g UndirectedMultiplex, communities [][]graph.Node, weights, resolutions []float64) []float64 { + q := make([]float64, g.Depth()) + nodes := graph.NodesOf(g.Nodes()) + layerWeight := 1.0 + layerResolution := 1.0 + if len(resolutions) == 1 { + layerResolution = resolutions[0] + } + for l := 0; l < g.Depth(); l++ { + layer := g.Layer(l) + + if weights != nil { + layerWeight = weights[l] + } + if layerWeight == 0 { + continue + } + + if len(resolutions) > 1 { + layerResolution = resolutions[l] + } + + var weight func(xid, yid int64) float64 + if layerWeight < 0 { + weight = negativeWeightFuncFor(layer) + } else { + weight = positiveWeightFuncFor(layer) + } + + // Calculate the total edge weight of the layer + // and the table of penetrating edge weight sums. + var m2 float64 + k := make(map[int64]float64, len(nodes)) + for _, u := range nodes { + uid := u.ID() + w := weight(uid, uid) + to := layer.From(uid) + for to.Next() { + w += weight(uid, to.Node().ID()) + } + m2 += w + k[uid] = w + } + + if communities == nil { + var qLayer float64 + for _, u := range nodes { + uid := u.ID() + kU := k[uid] + qLayer += weight(uid, uid) - layerResolution*kU*kU/m2 + } + q[l] = layerWeight * qLayer + continue + } + + // Iterate over the communities, calculating + // the non-self edge weights for the upper + // triangle and adjust the diagonal. + var qLayer float64 + for _, c := range communities { + for i, u := range c { + uid := u.ID() + kU := k[uid] + qLayer += weight(uid, uid) - layerResolution*kU*kU/m2 + for _, v := range c[i+1:] { + vid := v.ID() + qLayer += 2 * (weight(uid, vid) - layerResolution*kU*k[vid]/m2) + } + } + } + q[l] = layerWeight * qLayer + } + + return q +} + +// UndirectedLayers implements UndirectedMultiplex. +type UndirectedLayers []graph.Undirected + +// NewUndirectedLayers returns an UndirectedLayers using the provided layers +// ensuring there is a match between IDs for each layer. +func NewUndirectedLayers(layers ...graph.Undirected) (UndirectedLayers, error) { + if len(layers) == 0 { + return nil, nil + } + base := make(set.Int64s) + nodes := layers[0].Nodes() + for nodes.Next() { + base.Add(nodes.Node().ID()) + } + for i, l := range layers[1:] { + next := make(set.Int64s) + nodes := l.Nodes() + for nodes.Next() { + next.Add(nodes.Node().ID()) + } + if !set.Int64sEqual(next, base) { + return nil, fmt.Errorf("community: layer ID mismatch between layers: %d", i+1) + } + } + return layers, nil +} + +// Nodes returns the nodes of the receiver. +func (g UndirectedLayers) Nodes() graph.Nodes { + if len(g) == 0 { + return nil + } + return g[0].Nodes() +} + +// Depth returns the depth of the multiplex graph. +func (g UndirectedLayers) Depth() int { return len(g) } + +// Layer returns the lth layer of the multiplex graph. +func (g UndirectedLayers) Layer(l int) graph.Undirected { return g[l] } + +// louvainUndirectedMultiplex returns the hierarchical modularization of g at the given resolution +// using the Louvain algorithm. If all is true and g has negatively weighted layers, all +// communities will be searched during the modularization. If src is nil, rand.Intn is +// used as the random generator. louvainUndirectedMultiplex will panic if g has any edge with +// edge weight that does not sign-match the layer weight. +// +// graph.Undirect may be used as a shim to allow modularization of directed graphs. +func louvainUndirectedMultiplex(g UndirectedMultiplex, weights, resolutions []float64, all bool, src rand.Source) *ReducedUndirectedMultiplex { + if weights != nil && len(weights) != g.Depth() { + panic("community: weights vector length mismatch") + } + if resolutions != nil && len(resolutions) != 1 && len(resolutions) != g.Depth() { + panic("community: resolutions vector length mismatch") + } + + // See louvain.tex for a detailed description + // of the algorithm used here. + + c := reduceUndirectedMultiplex(g, nil, weights) + rnd := rand.Intn + if src != nil { + rnd = rand.New(src).Intn + } + for { + l := newUndirectedMultiplexLocalMover(c, c.communities, weights, resolutions, all) + if l == nil { + return c + } + if done := l.localMovingHeuristic(rnd); done { + return c + } + c = reduceUndirectedMultiplex(c, l.communities, weights) + } +} + +// ReducedUndirectedMultiplex is an undirected graph of communities derived from a +// parent graph by reduction. +type ReducedUndirectedMultiplex struct { + // nodes is the set of nodes held + // by the graph. In a ReducedUndirectedMultiplex + // the node ID is the index into + // nodes. + nodes []multiplexCommunity + layers []undirectedEdges + + // communities is the community + // structure of the graph. + communities [][]graph.Node + + parent *ReducedUndirectedMultiplex +} + +var ( + _ UndirectedMultiplex = (*ReducedUndirectedMultiplex)(nil) + _ graph.WeightedUndirected = (*undirectedLayerHandle)(nil) +) + +// Nodes returns all the nodes in the graph. +func (g *ReducedUndirectedMultiplex) Nodes() graph.Nodes { + nodes := make([]graph.Node, len(g.nodes)) + for i := range g.nodes { + nodes[i] = node(i) + } + return iterator.NewOrderedNodes(nodes) +} + +// Depth returns the number of layers in the multiplex graph. +func (g *ReducedUndirectedMultiplex) Depth() int { return len(g.layers) } + +// Layer returns the lth layer of the multiplex graph. +func (g *ReducedUndirectedMultiplex) Layer(l int) graph.Undirected { + return undirectedLayerHandle{multiplex: g, layer: l} +} + +// Communities returns the community memberships of the nodes in the +// graph used to generate the reduced graph. +func (g *ReducedUndirectedMultiplex) Communities() [][]graph.Node { + communities := make([][]graph.Node, len(g.communities)) + if g.parent == nil { + for i, members := range g.communities { + comm := make([]graph.Node, len(members)) + for j, n := range members { + nodes := g.nodes[n.ID()].nodes + if len(nodes) != 1 { + panic("community: unexpected number of nodes in base graph community") + } + comm[j] = nodes[0] + } + communities[i] = comm + } + return communities + } + sub := g.parent.Communities() + for i, members := range g.communities { + var comm []graph.Node + for _, n := range members { + comm = append(comm, sub[n.ID()]...) + } + communities[i] = comm + } + return communities +} + +// Structure returns the community structure of the current level of +// the module clustering. The first index of the returned value +// corresponds to the index of the nodes in the next higher level if +// it exists. The returned value should not be mutated. +func (g *ReducedUndirectedMultiplex) Structure() [][]graph.Node { + return g.communities +} + +// Expanded returns the next lower level of the module clustering or nil +// if at the lowest level. +func (g *ReducedUndirectedMultiplex) Expanded() ReducedMultiplex { + return g.parent +} + +// reduceUndirectedMultiplex returns a reduced graph constructed from g divided +// into the given communities. The communities value is mutated +// by the call to reduceUndirectedMultiplex. If communities is nil and g is a +// ReducedUndirectedMultiplex, it is returned unaltered. +func reduceUndirectedMultiplex(g UndirectedMultiplex, communities [][]graph.Node, weights []float64) *ReducedUndirectedMultiplex { + if communities == nil { + if r, ok := g.(*ReducedUndirectedMultiplex); ok { + return r + } + + nodes := graph.NodesOf(g.Nodes()) + // TODO(kortschak) This sort is necessary really only + // for testing. In practice we would not be using the + // community provided by the user for a Q calculation. + // Probably we should use a function to map the + // communities in the test sets to the remapped order. + sort.Sort(ordered.ByID(nodes)) + communities = make([][]graph.Node, len(nodes)) + for i := range nodes { + communities[i] = []graph.Node{node(i)} + } + + r := ReducedUndirectedMultiplex{ + nodes: make([]multiplexCommunity, len(nodes)), + layers: make([]undirectedEdges, g.Depth()), + communities: communities, + } + communityOf := make(map[int64]int, len(nodes)) + for i, n := range nodes { + r.nodes[i] = multiplexCommunity{id: i, nodes: []graph.Node{n}, weights: make([]float64, depth(weights))} + communityOf[n.ID()] = i + } + for i := range r.layers { + r.layers[i] = undirectedEdges{ + edges: make([][]int, len(nodes)), + weights: make(map[[2]int]float64), + } + } + w := 1.0 + for l := 0; l < g.Depth(); l++ { + layer := g.Layer(l) + if weights != nil { + w = weights[l] + } + if w == 0 { + continue + } + var sign float64 + var weight func(xid, yid int64) float64 + if w < 0 { + sign, weight = -1, negativeWeightFuncFor(layer) + } else { + sign, weight = 1, positiveWeightFuncFor(layer) + } + for _, u := range nodes { + var out []int + uid := u.ID() + ucid := communityOf[uid] + to := layer.From(uid) + for to.Next() { + vid := to.Node().ID() + vcid := communityOf[vid] + if vcid != ucid { + out = append(out, vcid) + } + if ucid < vcid { + // Only store the weight once. + r.layers[l].weights[[2]int{ucid, vcid}] = sign * weight(uid, vid) + } + } + r.layers[l].edges[ucid] = out + } + } + return &r + } + + // Remove zero length communities destructively. + var commNodes int + for i := 0; i < len(communities); { + comm := communities[i] + if len(comm) == 0 { + communities[i] = communities[len(communities)-1] + communities[len(communities)-1] = nil + communities = communities[:len(communities)-1] + } else { + commNodes += len(comm) + i++ + } + } + + r := ReducedUndirectedMultiplex{ + nodes: make([]multiplexCommunity, len(communities)), + layers: make([]undirectedEdges, g.Depth()), + } + communityOf := make(map[int64]int, commNodes) + for i, comm := range communities { + r.nodes[i] = multiplexCommunity{id: i, nodes: comm, weights: make([]float64, depth(weights))} + for _, n := range comm { + communityOf[n.ID()] = i + } + } + for i := range r.layers { + r.layers[i] = undirectedEdges{ + edges: make([][]int, len(communities)), + weights: make(map[[2]int]float64), + } + } + r.communities = make([][]graph.Node, len(communities)) + for i := range r.communities { + r.communities[i] = []graph.Node{node(i)} + } + if g, ok := g.(*ReducedUndirectedMultiplex); ok { + // Make sure we retain the truncated + // community structure. + g.communities = communities + r.parent = g + } + w := 1.0 + for l := 0; l < g.Depth(); l++ { + layer := g.Layer(l) + if weights != nil { + w = weights[l] + } + if w == 0 { + continue + } + var sign float64 + var weight func(xid, yid int64) float64 + if w < 0 { + sign, weight = -1, negativeWeightFuncFor(layer) + } else { + sign, weight = 1, positiveWeightFuncFor(layer) + } + for ucid, comm := range communities { + var out []int + for i, u := range comm { + uid := u.ID() + r.nodes[ucid].weights[l] += sign * weight(uid, uid) + for _, v := range comm[i+1:] { + r.nodes[ucid].weights[l] += 2 * sign * weight(uid, v.ID()) + } + to := layer.From(uid) + for to.Next() { + vid := to.Node().ID() + vcid := communityOf[vid] + found := false + for _, e := range out { + if e == vcid { + found = true + break + } + } + if !found && vcid != ucid { + out = append(out, vcid) + } + if ucid < vcid { + // Only store the weight once. + r.layers[l].weights[[2]int{ucid, vcid}] += sign * weight(uid, vid) + } + } + } + r.layers[l].edges[ucid] = out + } + } + return &r +} + +// undirectedLayerHandle is a handle to a multiplex graph layer. +type undirectedLayerHandle struct { + // multiplex is the complete + // multiplex graph. + multiplex *ReducedUndirectedMultiplex + + // layer is an index into the + // multiplex for the current + // layer. + layer int +} + +// Node returns the node with the given ID if it exists in the graph, +// and nil otherwise. +func (g undirectedLayerHandle) Node(id int64) graph.Node { + if g.has(id) { + return g.multiplex.nodes[id] + } + return nil +} + +// has returns whether the node exists within the graph. +func (g undirectedLayerHandle) has(id int64) bool { + return 0 <= id && id < int64(len(g.multiplex.nodes)) +} + +// Nodes returns all the nodes in the graph. +func (g undirectedLayerHandle) Nodes() graph.Nodes { + nodes := make([]graph.Node, len(g.multiplex.nodes)) + for i := range g.multiplex.nodes { + nodes[i] = node(i) + } + return iterator.NewOrderedNodes(nodes) +} + +// From returns all nodes in g that can be reached directly from u. +func (g undirectedLayerHandle) From(uid int64) graph.Nodes { + out := g.multiplex.layers[g.layer].edges[uid] + nodes := make([]graph.Node, len(out)) + for i, vid := range out { + nodes[i] = g.multiplex.nodes[vid] + } + return iterator.NewOrderedNodes(nodes) +} + +// HasEdgeBetween returns whether an edge exists between nodes x and y. +func (g undirectedLayerHandle) HasEdgeBetween(xid, yid int64) bool { + if xid == yid || !isValidID(xid) || !isValidID(yid) { + return false + } + if xid > yid { + xid, yid = yid, xid + } + _, ok := g.multiplex.layers[g.layer].weights[[2]int{int(xid), int(yid)}] + return ok +} + +// Edge returns the edge from u to v if such an edge exists and nil otherwise. +// The node v must be directly reachable from u as defined by the From method. +func (g undirectedLayerHandle) Edge(uid, vid int64) graph.Edge { + return g.WeightedEdgeBetween(uid, vid) +} + +// WeightedEdge returns the weighted edge from u to v if such an edge exists and nil otherwise. +// The node v must be directly reachable from u as defined by the From method. +func (g undirectedLayerHandle) WeightedEdge(uid, vid int64) graph.WeightedEdge { + return g.WeightedEdgeBetween(uid, vid) +} + +// EdgeBetween returns the edge between nodes x and y. +func (g undirectedLayerHandle) EdgeBetween(xid, yid int64) graph.Edge { + return g.WeightedEdgeBetween(xid, yid) +} + +// WeightedEdgeBetween returns the weighted edge between nodes x and y. +func (g undirectedLayerHandle) WeightedEdgeBetween(xid, yid int64) graph.WeightedEdge { + if xid == yid || !isValidID(xid) || !isValidID(yid) { + return nil + } + if yid < xid { + xid, yid = yid, xid + } + w, ok := g.multiplex.layers[g.layer].weights[[2]int{int(xid), int(yid)}] + if !ok { + return nil + } + return multiplexEdge{from: g.multiplex.nodes[xid], to: g.multiplex.nodes[yid], weight: w} +} + +// Weight returns the weight for the edge between x and y if Edge(x, y) returns a non-nil Edge. +// If x and y are the same node the internal node weight is returned. If there is no joining +// edge between the two nodes the weight value returned is zero. Weight returns true if an edge +// exists between x and y or if x and y have the same ID, false otherwise. +func (g undirectedLayerHandle) Weight(xid, yid int64) (w float64, ok bool) { + if !isValidID(xid) || !isValidID(yid) { + return 0, false + } + if xid == yid { + return g.multiplex.nodes[xid].weights[g.layer], true + } + if xid > yid { + xid, yid = yid, xid + } + w, ok = g.multiplex.layers[g.layer].weights[[2]int{int(xid), int(yid)}] + return w, ok +} + +// undirectedMultiplexLocalMover is a step in graph modularity optimization. +type undirectedMultiplexLocalMover struct { + g *ReducedUndirectedMultiplex + + // nodes is the set of working nodes. + nodes []graph.Node + // edgeWeightOf is the weighted degree + // of each node indexed by ID. + edgeWeightOf [][]float64 + + // m2 is the total sum of + // edge weights in g. + m2 []float64 + + // weight is the weight function + // provided by g or a function + // that returns the Weight value + // of the non-nil edge between x + // and y. + weight []func(xid, yid int64) float64 + + // communities is the current + // division of g. + communities [][]graph.Node + // memberships is a mapping between + // node ID and community membership. + memberships []int + + // resolution is the Reichardt and + // Bornholdt γ parameter as defined + // in doi:10.1103/PhysRevE.74.016110. + resolutions []float64 + + // weights is the layer weights for + // the modularisation. + weights []float64 + + // searchAll specifies whether the local + // mover should consider non-connected + // communities during the local moving + // heuristic. + searchAll bool + + // moved indicates that a call to + // move has been made since the last + // call to shuffle. + moved bool + + // changed indicates that a move + // has been made since the creation + // of the local mover. + changed bool +} + +// newUndirectedMultiplexLocalMover returns a new undirectedMultiplexLocalMover initialized with +// the graph g, a set of communities and a modularity resolution parameter. The +// node IDs of g must be contiguous in [0,n) where n is the number of nodes. +// If g has a zero edge weight sum, nil is returned. +func newUndirectedMultiplexLocalMover(g *ReducedUndirectedMultiplex, communities [][]graph.Node, weights, resolutions []float64, all bool) *undirectedMultiplexLocalMover { + nodes := graph.NodesOf(g.Nodes()) + l := undirectedMultiplexLocalMover{ + g: g, + nodes: nodes, + edgeWeightOf: make([][]float64, g.Depth()), + m2: make([]float64, g.Depth()), + communities: communities, + memberships: make([]int, len(nodes)), + resolutions: resolutions, + weights: weights, + weight: make([]func(xid, yid int64) float64, g.Depth()), + } + + // Calculate the total edge weight of the graph + // and degree weights for each node. + var zero int + for i := 0; i < g.Depth(); i++ { + l.edgeWeightOf[i] = make([]float64, len(nodes)) + var weight func(xid, yid int64) float64 + + if weights != nil { + if weights[i] == 0 { + zero++ + continue + } + if weights[i] < 0 { + weight = negativeWeightFuncFor(g.Layer(i)) + l.searchAll = all + } else { + weight = positiveWeightFuncFor(g.Layer(i)) + } + } else { + weight = positiveWeightFuncFor(g.Layer(i)) + } + + l.weight[i] = weight + layer := g.Layer(i) + for _, u := range l.nodes { + uid := u.ID() + w := weight(uid, uid) + to := layer.From(uid) + for to.Next() { + w += weight(uid, to.Node().ID()) + } + l.edgeWeightOf[i][uid] = w + l.m2[i] += w + } + if l.m2[i] == 0 { + zero++ + } + } + if zero == g.Depth() { + return nil + } + + // Assign membership mappings. + for i, c := range communities { + for _, u := range c { + l.memberships[u.ID()] = i + } + } + + return &l +} + +// localMovingHeuristic performs the Louvain local moving heuristic until +// no further moves can be made. It returns a boolean indicating that the +// undirectedMultiplexLocalMover has not made any improvement to the community +// structure and so the Louvain algorithm is done. +func (l *undirectedMultiplexLocalMover) localMovingHeuristic(rnd func(int) int) (done bool) { + for { + l.shuffle(rnd) + for _, n := range l.nodes { + dQ, dst, src := l.deltaQ(n) + if dQ <= 0 { + continue + } + l.move(dst, src) + } + if !l.moved { + return !l.changed + } + } +} + +// shuffle performs a Fisher-Yates shuffle on the nodes held by the +// undirectedMultiplexLocalMover using the random source rnd which should return +// an integer in the range [0,n). +func (l *undirectedMultiplexLocalMover) shuffle(rnd func(n int) int) { + l.moved = false + for i := range l.nodes[:len(l.nodes)-1] { + j := i + rnd(len(l.nodes)-i) + l.nodes[i], l.nodes[j] = l.nodes[j], l.nodes[i] + } +} + +// move moves the node at src to the community at dst. +func (l *undirectedMultiplexLocalMover) move(dst int, src commIdx) { + l.moved = true + l.changed = true + + srcComm := l.communities[src.community] + n := srcComm[src.node] + + l.memberships[n.ID()] = dst + + l.communities[dst] = append(l.communities[dst], n) + srcComm[src.node], srcComm[len(srcComm)-1] = srcComm[len(srcComm)-1], nil + l.communities[src.community] = srcComm[:len(srcComm)-1] +} + +// deltaQ returns the highest gain in modularity attainable by moving +// n from its current community to another connected community and +// the index of the chosen destination. The index into the +// undirectedMultiplexLocalMover's communities field is returned in src if n +// is in communities. +func (l *undirectedMultiplexLocalMover) deltaQ(n graph.Node) (deltaQ float64, dst int, src commIdx) { + id := n.ID() + + var iterator minTaker + if l.searchAll { + iterator = &dense{n: len(l.communities)} + } else { + // Find communities connected to n. + connected := make(set.Ints) + // The following for loop is equivalent to: + // + // for i := 0; i < l.g.Depth(); i++ { + // for _, v := range l.g.Layer(i).From(n) { + // connected.Add(l.memberships[v.ID()]) + // } + // } + // + // This is done to avoid an allocation for + // each layer. + for _, layer := range l.g.layers { + for _, vid := range layer.edges[id] { + connected.Add(l.memberships[vid]) + } + } + // Insert the node's own community. + connected.Add(l.memberships[id]) + iterator = newSlice(connected) + } + + // Calculate the highest modularity gain + // from moving into another community and + // keep the index of that community. + var dQremove float64 + dQadd, dst, src := math.Inf(-1), -1, commIdx{-1, -1} + var i int + for iterator.TakeMin(&i) { + c := l.communities[i] + var removal bool + var _dQadd float64 + for layer := 0; layer < l.g.Depth(); layer++ { + m2 := l.m2[layer] + if m2 == 0 { + // Do not consider layers with zero sum edge weight. + continue + } + w := 1.0 + if l.weights != nil { + w = l.weights[layer] + } + if w == 0 { + // Do not consider layers with zero weighting. + continue + } + + var k_aC, sigma_totC float64 // C is a substitution for ^𝛼 or ^𝛽. + removal = false + for j, u := range c { + uid := u.ID() + if uid == id { + // Only mark and check src community on the first layer. + if layer == 0 { + if src.community != -1 { + panic("community: multiple sources") + } + src = commIdx{i, j} + } + removal = true + } + + k_aC += l.weight[layer](id, uid) + // sigma_totC could be kept for each community + // and updated for moves, changing the calculation + // of sigma_totC here from O(n_c) to O(1), but + // in practice the time savings do not appear + // to be compelling and do not make up for the + // increase in code complexity and space required. + sigma_totC += l.edgeWeightOf[layer][uid] + } + + a_aa := l.weight[layer](id, id) + k_a := l.edgeWeightOf[layer][id] + gamma := 1.0 + if l.resolutions != nil { + if len(l.resolutions) == 1 { + gamma = l.resolutions[0] + } else { + gamma = l.resolutions[layer] + } + } + + // See louvain.tex for a derivation of these equations. + // The weighting term, w, is described in V Traag, + // "Algorithms and dynamical models for communities and + // reputation in social networks", chapter 5. + // http://www.traag.net/wp/wp-content/papercite-data/pdf/traag_algorithms_2013.pdf + switch { + case removal: + // The community c was the current community, + // so calculate the change due to removal. + dQremove += w * (k_aC /*^𝛼*/ - a_aa - gamma*k_a*(sigma_totC /*^𝛼*/ -k_a)/m2) + + default: + // Otherwise calculate the change due to an addition + // to c. + _dQadd += w * (k_aC /*^𝛽*/ - gamma*k_a*sigma_totC /*^𝛽*/ /m2) + } + } + if !removal && _dQadd > dQadd { + dQadd = _dQadd + dst = i + } + } + + return 2 * (dQadd - dQremove), dst, src +} diff --git a/vendor/gonum.org/v1/gonum/graph/doc.go b/vendor/gonum.org/v1/gonum/graph/doc.go new file mode 100644 index 0000000..7eedd09 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/graph/doc.go @@ -0,0 +1,9 @@ +// Copyright ©2014 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Package graph defines graph interfaces. +// +// Routines to test contract compliance by user implemented graph types +// are available in gonum.org/v1/gonum/graph/testgraph. +package graph // import "gonum.org/v1/gonum/graph" diff --git a/vendor/gonum.org/v1/gonum/graph/encoding/digraph6/digraph6.go b/vendor/gonum.org/v1/gonum/graph/encoding/digraph6/digraph6.go new file mode 100644 index 0000000..ef10812 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/graph/encoding/digraph6/digraph6.go @@ -0,0 +1,338 @@ +// Copyright ©2018 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Package digraph6 implements graphs specified by digraph6 strings. +package digraph6 // import "gonum.org/v1/gonum/graph/encoding/digraph6" + +import ( + "fmt" + "math/big" + "sort" + "strings" + + "gonum.org/v1/gonum/graph" + "gonum.org/v1/gonum/graph/internal/ordered" + "gonum.org/v1/gonum/graph/iterator" + "gonum.org/v1/gonum/graph/simple" +) + +// Graph is a digraph6-represented directed graph. +// +// See https://users.cecs.anu.edu.au/~bdm/data/formats.txt for details. +// +// Note that the digraph6 format specifies that the first character of the graph +// string is a '&'. This character must be present for use in the digraph6 package. +// A Graph without this prefix is treated as the null graph. +type Graph string + +var ( + d6 Graph + + _ graph.Graph = d6 + _ graph.Directed = d6 +) + +// Encode returns a graph6 encoding of the topology of the given graph using a +// lexical ordering of the nodes by ID to map them to [0, n). +func Encode(g graph.Graph) Graph { + nodes := graph.NodesOf(g.Nodes()) + n := len(nodes) + sort.Sort(ordered.ByID(nodes)) + indexOf := make(map[int64]int, n) + for i, n := range nodes { + indexOf[n.ID()] = i + } + + size := n * n + var b big.Int + for i, u := range nodes { + it := g.From(u.ID()) + for it.Next() { + vid := it.Node().ID() + j := indexOf[vid] + b.SetBit(&b, bitFor(int64(i), int64(j), int64(n)), 1) + } + } + + var buf strings.Builder + buf.WriteByte('&') + switch { + case n < 63: + buf.WriteByte(byte(n) + 63) + case n < 258048: + buf.Write([]byte{126, byte(n>>12) + 63, byte(n>>6) + 63, byte(n) + 63}) + case n < 68719476736: + buf.Write([]byte{126, 126, byte(n>>30) + 63, byte(n>>24) + 63, byte(n>>18) + 63, byte(n>>12) + 63, byte(n>>6) + 63, byte(n) + 63}) + default: + panic("digraph6: too large") + } + + var c byte + for i := 0; i < size; i++ { + bit := i % 6 + c |= byte(b.Bit(i)) << uint(5-bit) + if bit == 5 { + buf.WriteByte(c + 63) + c = 0 + } + } + if size%6 != 0 { + buf.WriteByte(c + 63) + } + + return Graph(buf.String()) +} + +// IsValid returns whether the graph is a valid digraph6 encoding. An invalid Graph +// behaves as the null graph. +func IsValid(g Graph) bool { + n := int(numberOf(g)) + if n < 0 { + return false + } + size := (n*n + 5) / 6 // ceil(n^2 / 6) + g = g[1:] + switch { + case g[0] != 126: + return len(g[1:]) == size + case g[1] != 126: + return len(g[4:]) == size + default: + return len(g[8:]) == size + } +} + +// Edge returns the edge from u to v, with IDs uid and vid, if such an edge +// exists and nil otherwise. The node v must be directly reachable from u as +// defined by the From method. +func (g Graph) Edge(uid, vid int64) graph.Edge { + if !IsValid(g) { + return nil + } + if !g.HasEdgeFromTo(uid, vid) { + return nil + } + return simple.Edge{simple.Node(uid), simple.Node(vid)} +} + +// From returns all nodes that can be reached directly from the node with the +// given ID. +func (g Graph) From(id int64) graph.Nodes { + if !IsValid(g) { + return graph.Empty + } + if g.Node(id) == nil { + return nil + } + return &d6ForwardIterator{g: g, from: id, to: -1} +} + +// HasEdgeBetween returns whether an edge exists between nodes with IDs xid +// and yid without considering direction. +func (g Graph) HasEdgeBetween(xid, yid int64) bool { + if !IsValid(g) { + return false + } + return g.HasEdgeFromTo(xid, yid) || g.HasEdgeFromTo(yid, xid) +} + +// HasEdgeFromTo returns whether an edge exists in the graph from u to v with +// IDs uid and vid. +func (g Graph) HasEdgeFromTo(uid, vid int64) bool { + if !IsValid(g) { + return false + } + if uid == vid { + return false + } + n := numberOf(g) + if uid < 0 || n <= uid { + return false + } + if vid < 0 || n <= vid { + return false + } + return isSet(bitFor(uid, vid, n), g) +} + +// Node returns the node with the given ID if it exists in the graph, and nil +// otherwise. +func (g Graph) Node(id int64) graph.Node { + if !IsValid(g) { + return nil + } + if id < 0 || numberOf(g) <= id { + return nil + } + return simple.Node(id) +} + +// Nodes returns all the nodes in the graph. +func (g Graph) Nodes() graph.Nodes { + if !IsValid(g) { + return graph.Empty + } + return iterator.NewImplicitNodes(0, int(numberOf(g)), func(id int) graph.Node { return simple.Node(id) }) +} + +// To returns all nodes that can reach directly to the node with the given ID. +func (g Graph) To(id int64) graph.Nodes { + if !IsValid(g) || g.Node(id) == nil { + return graph.Empty + } + return &d6ReverseIterator{g: g, from: -1, to: id} +} + +// d6ForwardIterator is a graph.Nodes for digraph6 graph edges for forward hops. +type d6ForwardIterator struct { + g Graph + from int64 + to int64 +} + +var _ graph.Nodes = (*d6ForwardIterator)(nil) + +func (i *d6ForwardIterator) Next() bool { + n := numberOf(i.g) + for i.to < n-1 { + i.to++ + if i.to != i.from && isSet(bitFor(i.from, i.to, n), i.g) { + return true + } + } + return false +} + +func (i *d6ForwardIterator) Len() int { + var cnt int + n := numberOf(i.g) + for to := i.to; to < n-1; { + to++ + if to != i.from && isSet(bitFor(i.from, to, n), i.g) { + cnt++ + } + } + return cnt +} + +func (i *d6ForwardIterator) Reset() { i.to = -1 } + +func (i *d6ForwardIterator) Node() graph.Node { return simple.Node(i.to) } + +// d6ReverseIterator is a graph.Nodes for digraph6 graph edges for reverse hops. +type d6ReverseIterator struct { + g Graph + from int64 + to int64 +} + +var _ graph.Nodes = (*d6ReverseIterator)(nil) + +func (i *d6ReverseIterator) Next() bool { + n := numberOf(i.g) + for i.from < n-1 { + i.from++ + if i.to != i.from && isSet(bitFor(i.from, i.to, n), i.g) { + return true + } + } + return false +} + +func (i *d6ReverseIterator) Len() int { + var cnt int + n := numberOf(i.g) + for from := i.from; from < n-1; { + from++ + if from != i.to && isSet(bitFor(from, i.to, n), i.g) { + cnt++ + } + } + return cnt +} + +func (i *d6ReverseIterator) Reset() { i.from = -1 } + +func (i *d6ReverseIterator) Node() graph.Node { return simple.Node(i.from) } + +// numberOf returns the digraph6-encoded number corresponding to g. +func numberOf(g Graph) int64 { + if len(g) < 2 { + return -1 + } + if g[0] != '&' { + return -1 + } + g = g[1:] + if g[0] != 126 { + return int64(g[0] - 63) + } + if len(g) < 4 { + return -1 + } + if g[1] != 126 { + return int64(g[1]-63)<<12 | int64(g[2]-63)<<6 | int64(g[3]-63) + } + if len(g) < 8 { + return -1 + } + return int64(g[2]-63)<<30 | int64(g[3]-63)<<24 | int64(g[4]-63)<<18 | int64(g[5]-63)<<12 | int64(g[6]-63)<<6 | int64(g[7]-63) +} + +// bitFor returns the index into the digraph6 adjacency matrix for uid->vid in a graph +// order n. +func bitFor(uid, vid, n int64) int { + return int(uid*n + vid) +} + +// isSet returns whether the given bit of the adjacency matrix is set. +func isSet(bit int, g Graph) bool { + g = g[1:] + switch { + case g[0] != 126: + g = g[1:] + case g[1] != 126: + g = g[4:] + default: + g = g[8:] + } + if bit/6 >= len(g) { + panic("digraph6: index out of range") + } + return (g[bit/6]-63)&(1< 0 +} + +// appendSubgraphNode appends the given node to the slice of nodes processed +// within the context of a subgraph. +func (gen *generator) appendSubgraphNode(n graph.Node) { + gen.subNodes = append(gen.subNodes, n) +} + +type multiGraph struct{ generator } + +// addStmt adds the given statement to the multigraph. +func (gen *multiGraph) addStmt(dst encoding.MultiBuilder, stmt ast.Stmt) { + switch stmt := stmt.(type) { + case *ast.NodeStmt: + n, ok := gen.node(dst, stmt.Node.ID).(encoding.AttributeSetter) + if !ok { + return + } + for _, attr := range stmt.Attrs { + a := encoding.Attribute{ + Key: unquoteID(attr.Key), + Value: unquoteID(attr.Val), + } + if err := n.SetAttribute(a); err != nil { + panic(fmt.Errorf("unable to unmarshal node DOT attribute (%s=%s): %v", a.Key, a.Value, err)) + } + } + case *ast.EdgeStmt: + gen.addEdgeStmt(dst, stmt) + case *ast.AttrStmt: + var n encoding.AttributeSetter + var dst string + switch stmt.Kind { + case ast.GraphKind: + if gen.graphAttr == nil { + return + } + n = gen.graphAttr + dst = "graph" + case ast.NodeKind: + if gen.nodeAttr == nil { + return + } + n = gen.nodeAttr + dst = "node" + case ast.EdgeKind: + if gen.edgeAttr == nil { + return + } + n = gen.edgeAttr + dst = "edge" + default: + panic("unreachable") + } + for _, attr := range stmt.Attrs { + a := encoding.Attribute{ + Key: unquoteID(attr.Key), + Value: unquoteID(attr.Val), + } + if err := n.SetAttribute(a); err != nil { + panic(fmt.Errorf("unable to unmarshal global %s DOT attribute (%s=%s): %v", dst, a.Key, a.Value, err)) + } + } + case *ast.Attr: + // ignore. + case *ast.Subgraph: + for _, stmt := range stmt.Stmts { + gen.addStmt(dst, stmt) + } + default: + panic(fmt.Sprintf("unknown statement type %T", stmt)) + } +} + +// addEdgeStmt adds the given edge statement to the multigraph. +func (gen *multiGraph) addEdgeStmt(dst encoding.MultiBuilder, stmt *ast.EdgeStmt) { + fs := gen.addVertex(dst, stmt.From) + ts := gen.addLine(dst, stmt.To, stmt.Attrs) + for _, f := range fs { + for _, t := range ts { + edge := dst.NewLine(f, t) + dst.SetLine(edge) + applyPortsToEdge(stmt.From, stmt.To, edge) + addEdgeAttrs(edge, stmt.Attrs) + } + } +} + +// addVertex adds the given vertex to the multigraph, and returns its set of nodes. +func (gen *multiGraph) addVertex(dst encoding.MultiBuilder, v ast.Vertex) []graph.Node { + switch v := v.(type) { + case *ast.Node: + n := gen.node(dst, v.ID) + return []graph.Node{n} + case *ast.Subgraph: + gen.pushSubgraph() + for _, stmt := range v.Stmts { + gen.addStmt(dst, stmt) + } + return gen.popSubgraph() + default: + panic(fmt.Sprintf("unknown vertex type %T", v)) + } +} + +// addLine adds the given edge to the multigraph, and returns its set of nodes. +func (gen *multiGraph) addLine(dst encoding.MultiBuilder, to *ast.Edge, attrs []*ast.Attr) []graph.Node { + if !gen.directed && to.Directed { + panic(fmt.Errorf("directed edge to %v in undirected graph", to.Vertex)) + } + fs := gen.addVertex(dst, to.Vertex) + if to.To != nil { + ts := gen.addLine(dst, to.To, attrs) + for _, f := range fs { + for _, t := range ts { + edge := dst.NewLine(f, t) + dst.SetLine(edge) + applyPortsToEdge(to.Vertex, to.To, edge) + addEdgeAttrs(edge, attrs) + } + } + } + return fs +} + +// addEdgeAttrs adds the attributes to the given edge. +func addEdgeAttrs(edge basicEdge, attrs []*ast.Attr) { + e, ok := edge.(encoding.AttributeSetter) + if !ok { + return + } + for _, attr := range attrs { + a := encoding.Attribute{ + Key: unquoteID(attr.Key), + Value: unquoteID(attr.Val), + } + if err := e.SetAttribute(a); err != nil { + panic(fmt.Errorf("unable to unmarshal edge DOT attribute (%s=%s): %v", a.Key, a.Value, err)) + } + } +} + +// unquoteID unquotes the given string if needed in the context of an ID. If s +// is not already quoted the original string is returned. +func unquoteID(s string) string { + // To make round-trips idempotent, don't unquote quoted HTML-like strings + // + // /^"<.*>"$/ + if len(s) >= 4 && strings.HasPrefix(s, `"<`) && strings.HasSuffix(s, `>"`) { + return s + } + // Unquote quoted string if possible. + if t, err := strconv.Unquote(s); err == nil { + return t + } + // On error, either s is not quoted or s is quoted but contains invalid + // characters, in both cases we return the original string rather than + // panicking. + return s +} diff --git a/vendor/gonum.org/v1/gonum/graph/encoding/dot/doc.go b/vendor/gonum.org/v1/gonum/graph/encoding/dot/doc.go new file mode 100644 index 0000000..235ac0f --- /dev/null +++ b/vendor/gonum.org/v1/gonum/graph/encoding/dot/doc.go @@ -0,0 +1,21 @@ +// Copyright ©2017 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Package dot implements GraphViz DOT marshaling and unmarshaling of graphs. +// +// See the GraphViz DOT Guide and the DOT grammar for more information +// on using specific aspects of the DOT language: +// +// DOT Guide: https://www.graphviz.org/pdf/dotguide.pdf +// +// DOT grammar: http://www.graphviz.org/doc/info/lang.html +// +// Attribute quoting +// +// Attributes and IDs are quoted if needed during marshalling, to conform with +// valid DOT syntax. Quoted IDs and attributes are unquoted during unmarshaling, +// so the data is kept in raw form. As an exception, quoted text with a leading +// `"<` and a trailing `>"` is not unquoted to ensure preservation of the string +// during a round-trip. +package dot // import "gonum.org/v1/gonum/graph/encoding/dot" diff --git a/vendor/gonum.org/v1/gonum/graph/encoding/dot/encode.go b/vendor/gonum.org/v1/gonum/graph/encoding/dot/encode.go new file mode 100644 index 0000000..10fc7d9 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/graph/encoding/dot/encode.go @@ -0,0 +1,657 @@ +// Copyright ©2015 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package dot + +import ( + "bytes" + "errors" + "fmt" + "regexp" + "sort" + "strconv" + "strings" + + "gonum.org/v1/gonum/graph" + "gonum.org/v1/gonum/graph/encoding" + "gonum.org/v1/gonum/graph/internal/ordered" +) + +// Node is a DOT graph node. +type Node interface { + // DOTID returns a DOT node ID. + // + // An ID is one of the following: + // + // - a string of alphabetic ([a-zA-Z\x80-\xff]) characters, underscores ('_'). + // digits ([0-9]), not beginning with a digit. + // - a numeral [-]?(.[0-9]+ | [0-9]+(.[0-9]*)?). + // - a double-quoted string ("...") possibly containing escaped quotes (\"). + // - an HTML string (<...>). + DOTID() string +} + +// Attributers are graph.Graph values that specify top-level DOT +// attributes. +type Attributers interface { + DOTAttributers() (graph, node, edge encoding.Attributer) +} + +// Porter defines the behavior of graph.Edge values that can specify +// connection ports for their end points. The returned port corresponds +// to the DOT node port to be used by the edge, compass corresponds +// to DOT compass point to which the edge will be aimed. +type Porter interface { + // FromPort returns the port and compass for + // the From node of a graph.Edge. + FromPort() (port, compass string) + + // ToPort returns the port and compass for + // the To node of a graph.Edge. + ToPort() (port, compass string) +} + +// Structurer represents a graph.Graph that can define subgraphs. +type Structurer interface { + Structure() []Graph +} + +// MultiStructurer represents a graph.Multigraph that can define subgraphs. +type MultiStructurer interface { + Structure() []Multigraph +} + +// Graph wraps named graph.Graph values. +type Graph interface { + graph.Graph + DOTID() string +} + +// Multigraph wraps named graph.Multigraph values. +type Multigraph interface { + graph.Multigraph + DOTID() string +} + +// Subgrapher wraps graph.Node values that represent subgraphs. +type Subgrapher interface { + Subgraph() graph.Graph +} + +// MultiSubgrapher wraps graph.Node values that represent subgraphs. +type MultiSubgrapher interface { + Subgraph() graph.Multigraph +} + +// Marshal returns the DOT encoding for the graph g, applying the prefix and +// indent to the encoding. Name is used to specify the graph name. If name is +// empty and g implements Graph, the returned string from DOTID will be used. +// +// Graph serialization will work for a graph.Graph without modification, +// however, advanced GraphViz DOT features provided by Marshal depend on +// implementation of the Node, Attributer, Porter, Attributers, Structurer, +// Subgrapher and Graph interfaces. +// +// Attributes and IDs are quoted if needed during marshalling. +func Marshal(g graph.Graph, name, prefix, indent string) ([]byte, error) { + var p simpleGraphPrinter + p.indent = indent + p.prefix = prefix + p.visited = make(map[edge]bool) + err := p.print(g, name, false, false) + if err != nil { + return nil, err + } + return p.buf.Bytes(), nil +} + +// MarshalMulti returns the DOT encoding for the multigraph g, applying the +// prefix and indent to the encoding. Name is used to specify the graph name. If +// name is empty and g implements Graph, the returned string from DOTID will be +// used. +// +// Graph serialization will work for a graph.Multigraph without modification, +// however, advanced GraphViz DOT features provided by Marshal depend on +// implementation of the Node, Attributer, Porter, Attributers, Structurer, +// MultiSubgrapher and Multigraph interfaces. +// +// Attributes and IDs are quoted if needed during marshalling. +func MarshalMulti(g graph.Multigraph, name, prefix, indent string) ([]byte, error) { + var p multiGraphPrinter + p.indent = indent + p.prefix = prefix + p.visited = make(map[line]bool) + err := p.print(g, name, false, false) + if err != nil { + return nil, err + } + return p.buf.Bytes(), nil +} + +type printer struct { + buf bytes.Buffer + + prefix string + indent string + depth int + + err error +} + +type edge struct { + inGraph string + from, to int64 +} + +func (p *simpleGraphPrinter) print(g graph.Graph, name string, needsIndent, isSubgraph bool) error { + if name == "" { + if g, ok := g.(Graph); ok { + name = g.DOTID() + } + } + + _, isDirected := g.(graph.Directed) + p.printFrontMatter(name, needsIndent, isSubgraph, isDirected, true) + + if a, ok := g.(Attributers); ok { + p.writeAttributeComplex(a) + } + if s, ok := g.(Structurer); ok { + for _, g := range s.Structure() { + _, subIsDirected := g.(graph.Directed) + if subIsDirected != isDirected { + return errors.New("dot: mismatched graph type") + } + p.buf.WriteByte('\n') + p.print(g, g.DOTID(), true, true) + } + } + + nodes := graph.NodesOf(g.Nodes()) + sort.Sort(ordered.ByID(nodes)) + + havePrintedNodeHeader := false + for _, n := range nodes { + if s, ok := n.(Subgrapher); ok { + // If the node is not linked to any other node + // the graph needs to be written now. + if g.From(n.ID()).Len() == 0 { + g := s.Subgraph() + _, subIsDirected := g.(graph.Directed) + if subIsDirected != isDirected { + return errors.New("dot: mismatched graph type") + } + if !havePrintedNodeHeader { + p.newline() + p.buf.WriteString("// Node definitions.") + havePrintedNodeHeader = true + } + p.newline() + p.print(g, graphID(g, n), false, true) + } + continue + } + if !havePrintedNodeHeader { + p.newline() + p.buf.WriteString("// Node definitions.") + havePrintedNodeHeader = true + } + p.newline() + p.writeNode(n) + if a, ok := n.(encoding.Attributer); ok { + p.writeAttributeList(a) + } + p.buf.WriteByte(';') + } + + havePrintedEdgeHeader := false + for _, n := range nodes { + nid := n.ID() + to := graph.NodesOf(g.From(nid)) + sort.Sort(ordered.ByID(to)) + for _, t := range to { + tid := t.ID() + if isDirected { + if p.visited[edge{inGraph: name, from: nid, to: tid}] { + continue + } + p.visited[edge{inGraph: name, from: nid, to: tid}] = true + } else { + if p.visited[edge{inGraph: name, from: nid, to: tid}] { + continue + } + p.visited[edge{inGraph: name, from: nid, to: tid}] = true + p.visited[edge{inGraph: name, from: tid, to: n.ID()}] = true + } + + if !havePrintedEdgeHeader { + p.buf.WriteByte('\n') + p.buf.WriteString(strings.TrimRight(p.prefix, " \t\n")) // Trim whitespace suffix. + p.newline() + p.buf.WriteString("// Edge definitions.") + havePrintedEdgeHeader = true + } + p.newline() + + if s, ok := n.(Subgrapher); ok { + g := s.Subgraph() + _, subIsDirected := g.(graph.Directed) + if subIsDirected != isDirected { + return errors.New("dot: mismatched graph type") + } + p.print(g, graphID(g, n), false, true) + } else { + p.writeNode(n) + } + e := g.Edge(nid, tid) + porter, edgeIsPorter := e.(Porter) + if edgeIsPorter { + if e.From().ID() == nid { + p.writePorts(porter.FromPort()) + } else { + p.writePorts(porter.ToPort()) + } + } + + if isDirected { + p.buf.WriteString(" -> ") + } else { + p.buf.WriteString(" -- ") + } + + if s, ok := t.(Subgrapher); ok { + g := s.Subgraph() + _, subIsDirected := g.(graph.Directed) + if subIsDirected != isDirected { + return errors.New("dot: mismatched graph type") + } + p.print(g, graphID(g, t), false, true) + } else { + p.writeNode(t) + } + if edgeIsPorter { + if e.From().ID() == nid { + p.writePorts(porter.ToPort()) + } else { + p.writePorts(porter.FromPort()) + } + } + + if a, ok := g.Edge(nid, tid).(encoding.Attributer); ok { + p.writeAttributeList(a) + } + + p.buf.WriteByte(';') + } + } + + p.closeBlock("}") + + return nil +} + +func (p *printer) printFrontMatter(name string, needsIndent, isSubgraph, isDirected, isStrict bool) error { + p.buf.WriteString(p.prefix) + if needsIndent { + for i := 0; i < p.depth; i++ { + p.buf.WriteString(p.indent) + } + } + + if !isSubgraph && isStrict { + p.buf.WriteString("strict ") + } + + if isSubgraph { + p.buf.WriteString("sub") + } else if isDirected { + p.buf.WriteString("di") + } + p.buf.WriteString("graph") + + if name != "" { + p.buf.WriteByte(' ') + p.buf.WriteString(quoteID(name)) + } + + p.openBlock(" {") + return nil +} + +func (p *printer) writeNode(n graph.Node) { + p.buf.WriteString(quoteID(nodeID(n))) +} + +func (p *printer) writePorts(port, cp string) { + if port != "" { + p.buf.WriteByte(':') + p.buf.WriteString(quoteID(port)) + } + if cp != "" { + p.buf.WriteByte(':') + p.buf.WriteString(cp) + } +} + +func nodeID(n graph.Node) string { + switch n := n.(type) { + case Node: + return n.DOTID() + default: + return fmt.Sprint(n.ID()) + } +} + +func graphID(g interface{}, n graph.Node) string { + switch g := g.(type) { + case Node: + return g.DOTID() + default: + return nodeID(n) + } +} + +func (p *printer) writeAttributeList(a encoding.Attributer) { + attributes := a.Attributes() + switch len(attributes) { + case 0: + case 1: + p.buf.WriteString(" [") + p.buf.WriteString(quoteID(attributes[0].Key)) + p.buf.WriteByte('=') + p.buf.WriteString(quoteID(attributes[0].Value)) + p.buf.WriteString("]") + default: + p.openBlock(" [") + for _, att := range attributes { + p.newline() + p.buf.WriteString(quoteID(att.Key)) + p.buf.WriteByte('=') + p.buf.WriteString(quoteID(att.Value)) + } + p.closeBlock("]") + } +} + +var attType = []string{"graph", "node", "edge"} + +func (p *printer) writeAttributeComplex(ca Attributers) { + g, n, e := ca.DOTAttributers() + haveWrittenBlock := false + for i, a := range []encoding.Attributer{g, n, e} { + attributes := a.Attributes() + if len(attributes) == 0 { + continue + } + if haveWrittenBlock { + p.buf.WriteByte(';') + } + p.newline() + p.buf.WriteString(attType[i]) + p.openBlock(" [") + for _, att := range attributes { + p.newline() + p.buf.WriteString(quoteID(att.Key)) + p.buf.WriteByte('=') + p.buf.WriteString(quoteID(att.Value)) + } + p.closeBlock("]") + haveWrittenBlock = true + } + if haveWrittenBlock { + p.buf.WriteString(";\n") + } +} + +func (p *printer) newline() { + p.buf.WriteByte('\n') + p.buf.WriteString(p.prefix) + for i := 0; i < p.depth; i++ { + p.buf.WriteString(p.indent) + } +} + +func (p *printer) openBlock(b string) { + p.buf.WriteString(b) + p.depth++ +} + +func (p *printer) closeBlock(b string) { + p.depth-- + p.newline() + p.buf.WriteString(b) +} + +type simpleGraphPrinter struct { + printer + visited map[edge]bool +} + +type multiGraphPrinter struct { + printer + visited map[line]bool +} + +type line struct { + inGraph string + id int64 +} + +func (p *multiGraphPrinter) print(g graph.Multigraph, name string, needsIndent, isSubgraph bool) error { + if name == "" { + if g, ok := g.(Multigraph); ok { + name = g.DOTID() + } + } + + _, isDirected := g.(graph.Directed) + p.printFrontMatter(name, needsIndent, isSubgraph, isDirected, false) + + if a, ok := g.(Attributers); ok { + p.writeAttributeComplex(a) + } + if s, ok := g.(MultiStructurer); ok { + for _, g := range s.Structure() { + _, subIsDirected := g.(graph.Directed) + if subIsDirected != isDirected { + return errors.New("dot: mismatched graph type") + } + p.buf.WriteByte('\n') + p.print(g, g.DOTID(), true, true) + } + } + + nodes := graph.NodesOf(g.Nodes()) + sort.Sort(ordered.ByID(nodes)) + + havePrintedNodeHeader := false + for _, n := range nodes { + if s, ok := n.(MultiSubgrapher); ok { + // If the node is not linked to any other node + // the graph needs to be written now. + if g.From(n.ID()).Len() == 0 { + g := s.Subgraph() + _, subIsDirected := g.(graph.Directed) + if subIsDirected != isDirected { + return errors.New("dot: mismatched graph type") + } + if !havePrintedNodeHeader { + p.newline() + p.buf.WriteString("// Node definitions.") + havePrintedNodeHeader = true + } + p.newline() + p.print(g, graphID(g, n), false, true) + } + continue + } + if !havePrintedNodeHeader { + p.newline() + p.buf.WriteString("// Node definitions.") + havePrintedNodeHeader = true + } + p.newline() + p.writeNode(n) + if a, ok := n.(encoding.Attributer); ok { + p.writeAttributeList(a) + } + p.buf.WriteByte(';') + } + + havePrintedEdgeHeader := false + for _, n := range nodes { + nid := n.ID() + to := graph.NodesOf(g.From(nid)) + sort.Sort(ordered.ByID(to)) + + for _, t := range to { + tid := t.ID() + + lines := graph.LinesOf(g.Lines(nid, tid)) + sort.Sort(ordered.LinesByIDs(lines)) + + for _, l := range lines { + lid := l.ID() + if p.visited[line{inGraph: name, id: lid}] { + continue + } + p.visited[line{inGraph: name, id: lid}] = true + + if !havePrintedEdgeHeader { + p.buf.WriteByte('\n') + p.buf.WriteString(strings.TrimRight(p.prefix, " \t\n")) // Trim whitespace suffix. + p.newline() + p.buf.WriteString("// Edge definitions.") + havePrintedEdgeHeader = true + } + p.newline() + + if s, ok := n.(MultiSubgrapher); ok { + g := s.Subgraph() + _, subIsDirected := g.(graph.Directed) + if subIsDirected != isDirected { + return errors.New("dot: mismatched graph type") + } + p.print(g, graphID(g, n), false, true) + } else { + p.writeNode(n) + } + + porter, edgeIsPorter := l.(Porter) + if edgeIsPorter { + if l.From().ID() == nid { + p.writePorts(porter.FromPort()) + } else { + p.writePorts(porter.ToPort()) + } + } + + if isDirected { + p.buf.WriteString(" -> ") + } else { + p.buf.WriteString(" -- ") + } + + if s, ok := t.(MultiSubgrapher); ok { + g := s.Subgraph() + _, subIsDirected := g.(graph.Directed) + if subIsDirected != isDirected { + return errors.New("dot: mismatched graph type") + } + p.print(g, graphID(g, t), false, true) + } else { + p.writeNode(t) + } + if edgeIsPorter { + if l.From().ID() == nid { + p.writePorts(porter.ToPort()) + } else { + p.writePorts(porter.FromPort()) + } + } + + if a, ok := l.(encoding.Attributer); ok { + p.writeAttributeList(a) + } + + p.buf.WriteByte(';') + } + } + } + + p.closeBlock("}") + + return nil +} + +// quoteID quotes the given string if needed in the context of an ID. If s is +// already quoted, or if s does not contain any spaces or special characters +// that need escaping, the original string is returned. +func quoteID(s string) string { + // To use a keyword as an ID, it must be quoted. + if isKeyword(s) { + return strconv.Quote(s) + } + // Quote if s is not an ID. This includes strings containing spaces, except + // if those spaces are used within HTML string IDs (e.g. ). + if !isID(s) { + return strconv.Quote(s) + } + return s +} + +// isKeyword reports whether the given string is a keyword in the DOT language. +func isKeyword(s string) bool { + // ref: https://www.graphviz.org/doc/info/lang.html + keywords := []string{"node", "edge", "graph", "digraph", "subgraph", "strict"} + for _, keyword := range keywords { + if strings.EqualFold(s, keyword) { + return true + } + } + return false +} + +// FIXME: see if we rewrite this in another way to remove our regexp dependency. + +// Regular expression to match identifier and numeral IDs. +var ( + reIdent = regexp.MustCompile(`^[a-zA-Z\200-\377_][0-9a-zA-Z\200-\377_]*$`) + reNumeral = regexp.MustCompile(`^[-]?(\.[0-9]+|[0-9]+(\.[0-9]*)?)$`) +) + +// isID reports whether the given string is an ID. +// +// An ID is one of the following: +// +// 1. Any string of alphabetic ([a-zA-Z\200-\377]) characters, underscores ('_') +// or digits ([0-9]), not beginning with a digit; +// 2. a numeral [-]?(.[0-9]+ | [0-9]+(.[0-9]*)? ); +// 3. any double-quoted string ("...") possibly containing escaped quotes (\"); +// 4. an HTML string (<...>). +func isID(s string) bool { + // 1. an identifier. + if reIdent.MatchString(s) { + return true + } + // 2. a numeral. + if reNumeral.MatchString(s) { + return true + } + // 3. double-quote string ID. + if len(s) >= 2 && strings.HasPrefix(s, `"`) && strings.HasSuffix(s, `"`) { + // Check that escape sequences within the double-quotes are valid. + if _, err := strconv.Unquote(s); err == nil { + return true + } + } + // 4. HTML ID. + return isHTMLID(s) +} + +// isHTMLID reports whether the given string an HTML ID. +func isHTMLID(s string) bool { + // HTML IDs have the format /^<.*>$/ + return len(s) >= 2 && strings.HasPrefix(s, "<") && strings.HasSuffix(s, ">") +} diff --git a/vendor/gonum.org/v1/gonum/graph/encoding/encoding.go b/vendor/gonum.org/v1/gonum/graph/encoding/encoding.go new file mode 100644 index 0000000..53ef0d5 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/graph/encoding/encoding.go @@ -0,0 +1,36 @@ +// Copyright ©2017 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package encoding + +import "gonum.org/v1/gonum/graph" + +// Builder is a graph that can have user-defined nodes and edges added. +type Builder interface { + graph.Graph + graph.Builder +} + +// MultiBuilder is a graph that can have user-defined nodes and edges added. +type MultiBuilder interface { + graph.Multigraph + graph.MultigraphBuilder +} + +// AttributeSetter is implemented by types that can set an encoded graph +// attribute. +type AttributeSetter interface { + SetAttribute(Attribute) error +} + +// Attributer defines graph.Node or graph.Edge values that can +// specify graph attributes. +type Attributer interface { + Attributes() []Attribute +} + +// Attribute is an encoded key value attribute pair use in graph encoding. +type Attribute struct { + Key, Value string +} diff --git a/vendor/gonum.org/v1/gonum/graph/encoding/graph6/graph6.go b/vendor/gonum.org/v1/gonum/graph/encoding/graph6/graph6.go new file mode 100644 index 0000000..32d55ac --- /dev/null +++ b/vendor/gonum.org/v1/gonum/graph/encoding/graph6/graph6.go @@ -0,0 +1,283 @@ +// Copyright ©2018 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Package graph6 implements graphs specified by graph6 strings. +package graph6 // import "gonum.org/v1/gonum/graph/encoding/graph6" + +import ( + "fmt" + "math/big" + "sort" + "strings" + + "gonum.org/v1/gonum/graph" + "gonum.org/v1/gonum/graph/internal/ordered" + "gonum.org/v1/gonum/graph/iterator" + "gonum.org/v1/gonum/graph/simple" +) + +// Graph is a graph6-represented undirected graph. +// +// See https://users.cecs.anu.edu.au/~bdm/data/formats.txt for details +// and https://hog.grinvin.org/ for a source of interesting graphs in graph6 +// format. +type Graph string + +var ( + g6 Graph + + _ graph.Graph = g6 + _ graph.Undirected = g6 +) + +// Encode returns a graph6 encoding of the topology of the given graph using a +// lexical ordering of the nodes by ID to map them to [0, n). +func Encode(g graph.Graph) Graph { + nodes := graph.NodesOf(g.Nodes()) + n := len(nodes) + sort.Sort(ordered.ByID(nodes)) + indexOf := make(map[int64]int, n) + for i, n := range nodes { + indexOf[n.ID()] = i + } + + size := (n*n - n) / 2 + var b big.Int + for i, u := range nodes { + uid := u.ID() + it := g.From(uid) + for it.Next() { + vid := it.Node().ID() + if vid < uid { + continue + } + j := indexOf[vid] + b.SetBit(&b, bitFor(int64(i), int64(j)), 1) + } + } + + var buf strings.Builder + switch { + case n < 63: + buf.WriteByte(byte(n) + 63) + case n < 258048: + buf.Write([]byte{126, byte(n>>12) + 63, byte(n>>6) + 63, byte(n) + 63}) + case n < 68719476736: + buf.Write([]byte{126, 126, byte(n>>30) + 63, byte(n>>24) + 63, byte(n>>18) + 63, byte(n>>12) + 63, byte(n>>6) + 63, byte(n) + 63}) + default: + panic("graph6: too large") + } + + var c byte + for i := 0; i < size; i++ { + bit := i % 6 + c |= byte(b.Bit(i)) << uint(5-bit) + if bit == 5 { + buf.WriteByte(c + 63) + c = 0 + } + } + if size%6 != 0 { + buf.WriteByte(c + 63) + } + + return Graph(buf.String()) +} + +// IsValid returns whether the graph is a valid graph6 encoding. An invalid Graph +// behaves as the null graph. +func IsValid(g Graph) bool { + n := int(numberOf(g)) + if n < 0 { + return false + } + size := ((n*n-n)/2 + 5) / 6 // ceil(((n*n-n)/2) / 6) + switch { + case g[0] != 126: + return len(g[1:]) == size + case g[1] != 126: + return len(g[4:]) == size + default: + return len(g[8:]) == size + } +} + +// Edge returns the edge from u to v, with IDs uid and vid, if such an edge +// exists and nil otherwise. The node v must be directly reachable from u as +// defined by the From method. +func (g Graph) Edge(uid, vid int64) graph.Edge { + if !IsValid(g) { + return nil + } + if !g.HasEdgeBetween(uid, vid) { + return nil + } + return simple.Edge{simple.Node(uid), simple.Node(vid)} +} + +// EdgeBetween returns the edge between nodes x and y with IDs xid and yid. +func (g Graph) EdgeBetween(xid, yid int64) graph.Edge { + return g.Edge(xid, yid) +} + +// From returns all nodes that can be reached directly from the node with the +// given ID. +func (g Graph) From(id int64) graph.Nodes { + if !IsValid(g) { + return graph.Empty + } + if g.Node(id) == nil { + return nil + } + return &g6Iterator{g: g, from: id, to: -1} +} + +// HasEdgeBetween returns whether an edge exists between nodes with IDs xid +// and yid without considering direction. +func (g Graph) HasEdgeBetween(xid, yid int64) bool { + if !IsValid(g) { + return false + } + if xid == yid { + return false + } + if xid < 0 || numberOf(g) <= xid { + return false + } + if yid < 0 || numberOf(g) <= yid { + return false + } + return isSet(bitFor(xid, yid), g) +} + +// Node returns the node with the given ID if it exists in the graph, and nil +// otherwise. +func (g Graph) Node(id int64) graph.Node { + if !IsValid(g) { + return nil + } + if id < 0 || numberOf(g) <= id { + return nil + } + return simple.Node(id) +} + +// Nodes returns all the nodes in the graph. +func (g Graph) Nodes() graph.Nodes { + if !IsValid(g) { + return graph.Empty + } + return iterator.NewImplicitNodes(0, int(numberOf(g)), func(id int) graph.Node { return simple.Node(id) }) +} + +// g6Iterator is a graph.Nodes for graph6 graph edges. +type g6Iterator struct { + g Graph + from int64 + to int64 +} + +var _ graph.Nodes = (*g6Iterator)(nil) + +func (i *g6Iterator) Next() bool { + n := numberOf(i.g) + for i.to < n-1 { + i.to++ + if i.to != i.from && isSet(bitFor(i.from, i.to), i.g) { + return true + } + } + return false +} + +func (i *g6Iterator) Len() int { + var cnt int + n := numberOf(i.g) + for to := i.to; to < n-1; { + to++ + if to != i.from && isSet(bitFor(i.from, to), i.g) { + cnt++ + } + } + return cnt +} + +func (i *g6Iterator) Reset() { i.to = -1 } + +func (i *g6Iterator) Node() graph.Node { return simple.Node(i.to) } + +// numberOf returns the graph6-encoded number corresponding to g. +func numberOf(g Graph) int64 { + if len(g) < 1 { + return -1 + } + if g[0] != 126 { + return int64(g[0] - 63) + } + if len(g) < 4 { + return -1 + } + if g[1] != 126 { + return int64(g[1]-63)<<12 | int64(g[2]-63)<<6 | int64(g[3]-63) + } + if len(g) < 8 { + return -1 + } + return int64(g[2]-63)<<30 | int64(g[3]-63)<<24 | int64(g[4]-63)<<18 | int64(g[5]-63)<<12 | int64(g[6]-63)<<6 | int64(g[7]-63) +} + +// bitFor returns the index into the graph6 adjacency matrix for xid--yid. +func bitFor(xid, yid int64) int { + if xid < yid { + xid, yid = yid, xid + } + return int((xid*xid-xid)/2 + yid) +} + +// isSet returns whether the given bit of the adjacency matrix is set. +func isSet(bit int, g Graph) bool { + switch { + case g[0] != 126: + g = g[1:] + case g[1] != 126: + g = g[4:] + default: + g = g[8:] + } + if bit/6 >= len(g) { + panic("g6: index out of range") + } + return (g[bit/6]-63)&(1< 0; i-- { + w := lt.nodes[i] + + // step 2. + for _, v := range w.pred { + u := lt.eval(v) + + if u.semi < w.semi { + w.semi = u.semi + } + } + + lt.nodes[w.semi].bucket[w] = struct{}{} + lt.link(w.parent, w) + + // step 3. + for v := range w.parent.bucket { + delete(w.parent.bucket, v) + + u := lt.eval(v) + if u.semi < v.semi { + v.dom = u + } else { + v.dom = w.parent + } + } + } + + // step 4. + for _, w := range lt.nodes[1:] { + if w.dom.node.ID() != lt.nodes[w.semi].node.ID() { + w.dom = w.dom.dom + } + } + + // Construct the public-facing dominator tree structure. + dominatorOf := make(map[int64]graph.Node) + dominatedBy := make(map[int64][]graph.Node) + for _, w := range lt.nodes[1:] { + dominatorOf[w.node.ID()] = w.dom.node + did := w.dom.node.ID() + dominatedBy[did] = append(dominatedBy[did], w.node) + } + return DominatorTree{root: root, dominatorOf: dominatorOf, dominatedBy: dominatedBy} +} + +// lengauerTarjan holds global state of the Lengauer-Tarjan algorithm. +// This is a mapping between nodes and the postordering of the nodes. +type lengauerTarjan struct { + // nodes is the nodes traversed during the + // Lengauer-Tarjan depth-first-search. + nodes []*ltNode + // indexOf contains a mapping between + // the id-dense representation of the + // graph and the potentially id-sparse + // nodes held in nodes. + // + // This corresponds to the vertex + // number of the node in the Lengauer- + // Tarjan algorithm. + indexOf map[int64]int +} + +// ltNode is a graph node with accounting for the Lengauer-Tarjan +// algorithm. +// +// For the purposes of documentation the ltNode is given the name w. +type ltNode struct { + node graph.Node + + // parent is vertex which is the parent of w + // in the spanning tree generated by the search. + parent *ltNode + + // pred is the set of vertices v such that (v, w) + // is an edge of the graph. + pred []*ltNode + + // semi is a number defined as follows: + // (i) After w is numbered but before its semidominator + // is computed, semi is the number of w. + // (ii) After the semidominator of w is computed, semi + // is the number of the semidominator of w. + semi int + + // bucket is the set of vertices whose + // semidominator is w. + bucket map[*ltNode]struct{} + + // dom is vertex defined as follows: + // (i) After step 3, if the semidominator of w is its + // immediate dominator, then dom is the immediate + // dominator of w. Otherwise dom is a vertex v + // whose number is smaller than w and whose immediate + // dominator is also w's immediate dominator. + // (ii) After step 4, dom is the immediate dominator of w. + dom *ltNode + + // In general ancestor is nil only if w is a tree root + // in the forest; otherwise ancestor is an ancestor + // of w in the forest. + ancestor *ltNode + + // Initially label is w. It is adjusted during + // the algorithm to maintain invariant (3) in the + // Lengauer and Tarjan paper. + label *ltNode +} + +// dfs is the Lengauer-Tarjan DFS procedure. +func (lt *lengauerTarjan) dfs(g graph.Directed, v graph.Node) { + i := len(lt.nodes) + lt.indexOf[v.ID()] = i + ltv := <Node{ + node: v, + semi: i, + bucket: make(map[*ltNode]struct{}), + } + ltv.label = ltv + lt.nodes = append(lt.nodes, ltv) + + for _, w := range graph.NodesOf(g.From(v.ID())) { + wid := w.ID() + + idx, ok := lt.indexOf[wid] + if !ok { + lt.dfs(g, w) + + // We place this below the recursive call + // in contrast to the original algorithm + // since w needs to be initialised, and + // this happens in the child call to dfs. + idx, ok = lt.indexOf[wid] + if !ok { + panic("path: unintialized node") + } + lt.nodes[idx].parent = ltv + } + ltw := lt.nodes[idx] + ltw.pred = append(ltw.pred, ltv) + } +} + +// compress is the Lengauer-Tarjan COMPRESS procedure. +func (lt *lengauerTarjan) compress(v *ltNode) { + if v.ancestor.ancestor != nil { + lt.compress(v.ancestor) + if v.ancestor.label.semi < v.label.semi { + v.label = v.ancestor.label + } + v.ancestor = v.ancestor.ancestor + } +} + +// eval is the Lengauer-Tarjan EVAL function. +func (lt *lengauerTarjan) eval(v *ltNode) *ltNode { + if v.ancestor == nil { + return v + } + lt.compress(v) + return v.label +} + +// link is the Lengauer-Tarjan LINK procedure. +func (*lengauerTarjan) link(v, w *ltNode) { + w.ancestor = v +} + +// DominatorTree is a flow graph dominator tree. +type DominatorTree struct { + root graph.Node + dominatorOf map[int64]graph.Node + dominatedBy map[int64][]graph.Node +} + +// Root returns the root of the tree. +func (d DominatorTree) Root() graph.Node { return d.root } + +// DominatorOf returns the immediate dominator of the node with the given ID. +func (d DominatorTree) DominatorOf(id int64) graph.Node { + return d.dominatorOf[id] +} + +// DominatedBy returns a slice of all nodes immediately dominated by the node +// with the given ID. Elements of the slice are retained by the DominatorTree. +func (d DominatorTree) DominatedBy(id int64) []graph.Node { + return d.dominatedBy[id] +} diff --git a/vendor/gonum.org/v1/gonum/graph/flow/control_flow_slt.go b/vendor/gonum.org/v1/gonum/graph/flow/control_flow_slt.go new file mode 100644 index 0000000..b05a58e --- /dev/null +++ b/vendor/gonum.org/v1/gonum/graph/flow/control_flow_slt.go @@ -0,0 +1,232 @@ +// Copyright ©2017 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package flow + +import "gonum.org/v1/gonum/graph" + +// DominatorsSLT returns a dominator tree for all nodes in the flow graph +// g starting from the given root node using the sophisticated version of +// the Lengauer-Tarjan algorithm. The SLT algorithm may outperform the +// simple LT algorithm for very large dense graphs. +func DominatorsSLT(root graph.Node, g graph.Directed) DominatorTree { + // The algorithm used here is essentially the + // sophisticated Lengauer and Tarjan algorithm + // described in + // https://doi.org/10.1145%2F357062.357071 + + lt := sLengauerTarjan{ + indexOf: make(map[int64]int), + base: sltNode{semi: -1}, + } + lt.base.label = <.base + + // step 1. + lt.dfs(g, root) + + for i := len(lt.nodes) - 1; i > 0; i-- { + w := lt.nodes[i] + + // step 2. + for _, v := range w.pred { + u := lt.eval(v) + + if u.semi < w.semi { + w.semi = u.semi + } + } + + lt.nodes[w.semi].bucket[w] = struct{}{} + lt.link(w.parent, w) + + // step 3. + for v := range w.parent.bucket { + delete(w.parent.bucket, v) + + u := lt.eval(v) + if u.semi < v.semi { + v.dom = u + } else { + v.dom = w.parent + } + } + } + + // step 4. + for _, w := range lt.nodes[1:] { + if w.dom.node.ID() != lt.nodes[w.semi].node.ID() { + w.dom = w.dom.dom + } + } + + // Construct the public-facing dominator tree structure. + dominatorOf := make(map[int64]graph.Node) + dominatedBy := make(map[int64][]graph.Node) + for _, w := range lt.nodes[1:] { + dominatorOf[w.node.ID()] = w.dom.node + did := w.dom.node.ID() + dominatedBy[did] = append(dominatedBy[did], w.node) + } + return DominatorTree{root: root, dominatorOf: dominatorOf, dominatedBy: dominatedBy} +} + +// sLengauerTarjan holds global state of the Lengauer-Tarjan algorithm. +// This is a mapping between nodes and the postordering of the nodes. +type sLengauerTarjan struct { + // nodes is the nodes traversed during the + // Lengauer-Tarjan depth-first-search. + nodes []*sltNode + // indexOf contains a mapping between + // the id-dense representation of the + // graph and the potentially id-sparse + // nodes held in nodes. + // + // This corresponds to the vertex + // number of the node in the Lengauer- + // Tarjan algorithm. + indexOf map[int64]int + + // base is the base label for balanced + // tree path compression used in the + // sophisticated Lengauer-Tarjan + // algorith, + base sltNode +} + +// sltNode is a graph node with accounting for the Lengauer-Tarjan +// algorithm. +// +// For the purposes of documentation the ltNode is given the name w. +type sltNode struct { + node graph.Node + + // parent is vertex which is the parent of w + // in the spanning tree generated by the search. + parent *sltNode + + // pred is the set of vertices v such that (v, w) + // is an edge of the graph. + pred []*sltNode + + // semi is a number defined as follows: + // (i) After w is numbered but before its semidominator + // is computed, semi is the number of w. + // (ii) After the semidominator of w is computed, semi + // is the number of the semidominator of w. + semi int + + // size is the tree size of w used in the + // sophisticated algorithm. + size int + + // child is the child node of w used in the + // sophisticated algorithm. + child *sltNode + + // bucket is the set of vertices whose + // semidominator is w. + bucket map[*sltNode]struct{} + + // dom is vertex defined as follows: + // (i) After step 3, if the semidominator of w is its + // immediate dominator, then dom is the immediate + // dominator of w. Otherwise dom is a vertex v + // whose number is smaller than w and whose immediate + // dominator is also w's immediate dominator. + // (ii) After step 4, dom is the immediate dominator of w. + dom *sltNode + + // In general ancestor is nil only if w is a tree root + // in the forest; otherwise ancestor is an ancestor + // of w in the forest. + ancestor *sltNode + + // Initially label is w. It is adjusted during + // the algorithm to maintain invariant (3) in the + // Lengauer and Tarjan paper. + label *sltNode +} + +// dfs is the Sophisticated Lengauer-Tarjan DFS procedure. +func (lt *sLengauerTarjan) dfs(g graph.Directed, v graph.Node) { + i := len(lt.nodes) + lt.indexOf[v.ID()] = i + ltv := &sltNode{ + node: v, + semi: i, + size: 1, + child: <.base, + bucket: make(map[*sltNode]struct{}), + } + ltv.label = ltv + lt.nodes = append(lt.nodes, ltv) + + for _, w := range graph.NodesOf(g.From(v.ID())) { + wid := w.ID() + + idx, ok := lt.indexOf[wid] + if !ok { + lt.dfs(g, w) + + // We place this below the recursive call + // in contrast to the original algorithm + // since w needs to be initialised, and + // this happens in the child call to dfs. + idx, ok = lt.indexOf[wid] + if !ok { + panic("path: unintialized node") + } + lt.nodes[idx].parent = ltv + } + ltw := lt.nodes[idx] + ltw.pred = append(ltw.pred, ltv) + } +} + +// compress is the Sophisticated Lengauer-Tarjan COMPRESS procedure. +func (lt *sLengauerTarjan) compress(v *sltNode) { + if v.ancestor.ancestor != nil { + lt.compress(v.ancestor) + if v.ancestor.label.semi < v.label.semi { + v.label = v.ancestor.label + } + v.ancestor = v.ancestor.ancestor + } +} + +// eval is the Sophisticated Lengauer-Tarjan EVAL function. +func (lt *sLengauerTarjan) eval(v *sltNode) *sltNode { + if v.ancestor == nil { + return v.label + } + lt.compress(v) + if v.ancestor.label.semi >= v.label.semi { + return v.label + } + return v.ancestor.label +} + +// link is the Sophisticated Lengauer-Tarjan LINK procedure. +func (*sLengauerTarjan) link(v, w *sltNode) { + s := w + for w.label.semi < s.child.label.semi { + if s.size+s.child.child.size >= 2*s.child.size { + s.child.ancestor = s + s.child = s.child.child + } else { + s.child.size = s.size + s.ancestor = s.child + s = s.child + } + } + s.label = w.label + v.size += w.size + if v.size < 2*w.size { + s, v.child = v.child, s + } + for s != nil { + s.ancestor = v + s = s.child + } +} diff --git a/vendor/gonum.org/v1/gonum/graph/flow/doc.go b/vendor/gonum.org/v1/gonum/graph/flow/doc.go new file mode 100644 index 0000000..2bef0df --- /dev/null +++ b/vendor/gonum.org/v1/gonum/graph/flow/doc.go @@ -0,0 +1,6 @@ +// Copyright ©2019 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Package flow provides control flow analysis functions. +package flow // import "gonum.org/v1/gonum/graph/flow" diff --git a/vendor/gonum.org/v1/gonum/graph/formats/cytoscapejs/cytoscapejs.go b/vendor/gonum.org/v1/gonum/graph/formats/cytoscapejs/cytoscapejs.go new file mode 100644 index 0000000..49355ce --- /dev/null +++ b/vendor/gonum.org/v1/gonum/graph/formats/cytoscapejs/cytoscapejs.go @@ -0,0 +1,310 @@ +// Copyright ©2018 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Package cytoscapejs implements marshaling and unmarshaling of Cytoscape.js JSON documents. +// +// See http://js.cytoscape.org/ for Cytoscape.js documentation. +package cytoscapejs // import "gonum.org/v1/gonum/graph/formats/cytoscapejs" + +import ( + "encoding/json" + "errors" + "fmt" +) + +// GraphElem is a Cytoscape.js graph with mixed graph elements. +type GraphElem struct { + Elements []Element `json:"elements"` + Layout interface{} `json:"layout,omitempty"` + Style []interface{} `json:"style,omitempty"` +} + +// Element is a mixed graph element. +type Element struct { + Group string `json:"group,omitempty"` + Data ElemData `json:"data"` + Position *Position `json:"position,omitempty"` + RenderedPosition *Position `json:"renderedPosition,omitempty"` + Selected bool `json:"selected,omitempty"` + Selectable bool `json:"selectable,omitempty"` + Locked bool `json:"locked,omitempty"` + Grabbable bool `json:"grabbable,omitempty"` + Classes string `json:"classes,omitempty"` + Scratch interface{} `json:"scratch,omitempty"` +} + +// ElemType describes an Element type. +type ElemType int + +const ( + InvalidElement ElemType = iota - 1 + NodeElement + EdgeElement +) + +// Type returns the element type of the receiver. It returns an error if the Element Group +// is invalid or does not match the Element Data, or if the Elelement Data is an incomplete +// edge. +func (e Element) Type() (ElemType, error) { + et := InvalidElement + switch { + case e.Data.Source == "" && e.Data.Target == "": + et = NodeElement + case e.Data.Source != "" && e.Data.Target != "": + et = EdgeElement + default: + return et, errors.New("cytoscapejs: invalid element: incomplete edge") + } + switch { + case e.Group == "": + return et, nil + case e.Group == "node" && et == NodeElement: + return NodeElement, nil + case e.Group == "edge" && et == EdgeElement: + return NodeElement, nil + default: + return InvalidElement, errors.New("cytoscapejs: invalid element: mismatched group") + } +} + +// ElemData is a graph element's data container. +type ElemData struct { + ID string + Source string + Target string + Parent string + Attributes map[string]interface{} +} + +var ( + _ json.Marshaler = (*ElemData)(nil) + _ json.Unmarshaler = (*ElemData)(nil) +) + +// MarshalJSON implements the json.Marshaler interface. +func (e *ElemData) MarshalJSON() ([]byte, error) { + if e.Attributes == nil { + type elem struct { + ID string `json:"id"` + Source string `json:"source,omitempty"` + Target string `json:"target,omitempty"` + Parent string `json:"parent,omitempty"` + } + return json.Marshal(elem{ID: e.ID, Source: e.Source, Target: e.Target, Parent: e.Parent}) + } + e.Attributes["id"] = e.ID + if e.Source != "" { + e.Attributes["source"] = e.Source + } + if e.Target != "" { + e.Attributes["target"] = e.Target + } + if e.Parent != "" { + e.Attributes["parent"] = e.Parent + } + b, err := json.Marshal(e.Attributes) + delete(e.Attributes, "id") + if e.Source != "" { + delete(e.Attributes, "source") + } + if e.Target != "" { + delete(e.Attributes, "target") + } + if e.Parent != "" { + delete(e.Attributes, "parent") + } + return b, err +} + +// UnmarshalJSON implements the json.Unmarshaler interface. +func (e *ElemData) UnmarshalJSON(data []byte) error { + var attrs map[string]interface{} + err := json.Unmarshal(data, &attrs) + if err != nil { + return err + } + id, ok := attrs["id"] + if !ok { + return errors.New("cytoscapejs: no ID") + } + e.ID = fmt.Sprint(id) + source, ok := attrs["source"] + if ok { + e.Source = fmt.Sprint(source) + } + target, ok := attrs["target"] + if ok { + e.Target = fmt.Sprint(target) + } + p, ok := attrs["parent"] + if ok { + e.Parent = fmt.Sprint(p) + } + delete(attrs, "id") + delete(attrs, "source") + delete(attrs, "target") + delete(attrs, "parent") + if len(attrs) != 0 { + e.Attributes = attrs + } + return nil +} + +// GraphNodeEdge is a Cytoscape.js graph with separated nodes and edges. +type GraphNodeEdge struct { + Elements Elements `json:"elements"` + Layout interface{} `json:"layout,omitempty"` + Style []interface{} `json:"style,omitempty"` +} + +// Elements contains the nodes and edges of a GraphNodeEdge. +type Elements struct { + Nodes []Node `json:"nodes"` + Edges []Edge `json:"edges"` +} + +// Node is a Cytoscape.js node. +type Node struct { + Data NodeData `json:"data"` + Position *Position `json:"position,omitempty"` + RenderedPosition *Position `json:"renderedPosition,omitempty"` + Selected bool `json:"selected,omitempty"` + Selectable bool `json:"selectable,omitempty"` + Locked bool `json:"locked,omitempty"` + Grabbable bool `json:"grabbable,omitempty"` + Classes string `json:"classes,omitempty"` + Scratch interface{} `json:"scratch,omitempty"` +} + +// NodeData is a graph node's data container. +type NodeData struct { + ID string + Parent string + Attributes map[string]interface{} +} + +var ( + _ json.Marshaler = (*NodeData)(nil) + _ json.Unmarshaler = (*NodeData)(nil) +) + +// MarshalJSON implements the json.Marshaler interface. +func (n *NodeData) MarshalJSON() ([]byte, error) { + if n.Attributes == nil { + type node struct { + ID string `json:"id"` + Parent string `json:"parent,omitempty"` + } + return json.Marshal(node{ID: n.ID, Parent: n.Parent}) + } + n.Attributes["id"] = n.ID + n.Attributes["parent"] = n.Parent + b, err := json.Marshal(n.Attributes) + delete(n.Attributes, "id") + delete(n.Attributes, "parent") + return b, err +} + +// UnmarshalJSON implements the json.Unmarshaler interface. +func (n *NodeData) UnmarshalJSON(data []byte) error { + var attrs map[string]interface{} + err := json.Unmarshal(data, &attrs) + if err != nil { + return err + } + id, ok := attrs["id"] + if !ok { + return errors.New("cytoscapejs: no ID") + } + n.ID = fmt.Sprint(id) + delete(attrs, "id") + p, ok := attrs["parent"] + if ok { + n.Parent = fmt.Sprint(p) + } + delete(attrs, "parent") + if len(attrs) != 0 { + n.Attributes = attrs + } + return nil +} + +// Edge is a Cytoscape.js edge. +type Edge struct { + Data EdgeData `json:"data"` + Selected bool `json:"selected,omitempty"` + Selectable bool `json:"selectable,omitempty"` + Classes string `json:"classes,omitempty"` + Scratch interface{} `json:"scratch,omitempty"` +} + +// EdgeData is a graph edge's data container. +type EdgeData struct { + ID string + Source string + Target string + Attributes map[string]interface{} +} + +var ( + _ json.Marshaler = (*EdgeData)(nil) + _ json.Unmarshaler = (*EdgeData)(nil) +) + +// MarshalJSON implements the json.Marshaler interface. +func (e *EdgeData) MarshalJSON() ([]byte, error) { + if e.Attributes == nil { + type edge struct { + ID string `json:"id"` + Source string `json:"source"` + Target string `json:"target"` + } + return json.Marshal(edge{ID: e.ID, Source: e.Source, Target: e.Target}) + } + e.Attributes["id"] = e.ID + e.Attributes["source"] = e.Source + e.Attributes["target"] = e.Target + b, err := json.Marshal(e.Attributes) + delete(e.Attributes, "id") + delete(e.Attributes, "source") + delete(e.Attributes, "target") + return b, err +} + +// UnmarshalJSON implements the json.Unmarshaler interface. +func (e *EdgeData) UnmarshalJSON(data []byte) error { + var attrs map[string]interface{} + err := json.Unmarshal(data, &attrs) + if err != nil { + return err + } + id, ok := attrs["id"] + if !ok { + return errors.New("cytoscapejs: no ID") + } + source, ok := attrs["source"] + if !ok { + return errors.New("cytoscapejs: no source") + } + target, ok := attrs["target"] + if !ok { + return errors.New("cytoscapejs: no target") + } + e.ID = fmt.Sprint(id) + e.Source = fmt.Sprint(source) + e.Target = fmt.Sprint(target) + delete(attrs, "id") + delete(attrs, "source") + delete(attrs, "target") + if len(attrs) != 0 { + e.Attributes = attrs + } + return nil +} + +// Position is a node position. +type Position struct { + X float64 `json:"x"` + Y float64 `json:"y"` +} diff --git a/vendor/gonum.org/v1/gonum/graph/formats/cytoscapejs/testdata/LICENSE b/vendor/gonum.org/v1/gonum/graph/formats/cytoscapejs/testdata/LICENSE new file mode 100644 index 0000000..9e21a7e --- /dev/null +++ b/vendor/gonum.org/v1/gonum/graph/formats/cytoscapejs/testdata/LICENSE @@ -0,0 +1,21 @@ + + +Copyright (c) 2016-2018, The Cytoscape Consortium. + +Permission is hereby granted, free of charge, to any person obtaining a copy of +this software and associated documentation files (the “Software”), to deal in +the Software without restriction, including without limitation the rights to +use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies +of the Software, and to permit persons to whom the Software is furnished to do +so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED “AS IS”, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. \ No newline at end of file diff --git a/vendor/gonum.org/v1/gonum/graph/formats/dot/ast/ast.go b/vendor/gonum.org/v1/gonum/graph/formats/dot/ast/ast.go new file mode 100644 index 0000000..4ed00d7 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/graph/formats/dot/ast/ast.go @@ -0,0 +1,409 @@ +// This file is dual licensed under CC0 and The gonum license. +// +// Copyright ©2017 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. +// +// Copyright ©2017 Robin Eklind. +// This file is made available under a Creative Commons CC0 1.0 +// Universal Public Domain Dedication. + +package ast + +import ( + "bytes" + "fmt" +) + +// === [ File ] ================================================================ + +// A File represents a DOT file. +// +// Examples. +// +// digraph G { +// A -> B +// } +// graph H { +// C - D +// } +type File struct { + // Graphs. + Graphs []*Graph +} + +// String returns the string representation of the file. +func (f *File) String() string { + buf := new(bytes.Buffer) + for i, graph := range f.Graphs { + if i != 0 { + buf.WriteString("\n") + } + buf.WriteString(graph.String()) + } + return buf.String() +} + +// === [ Graphs ] ============================================================== + +// A Graph represents a directed or an undirected graph. +// +// Examples. +// +// digraph G { +// A -> {B C} +// B -> C +// } +type Graph struct { + // Strict graph; multi-edges forbidden. + Strict bool + // Directed graph. + Directed bool + // Graph ID; or empty if anonymous. + ID string + // Graph statements. + Stmts []Stmt +} + +// String returns the string representation of the graph. +func (g *Graph) String() string { + buf := new(bytes.Buffer) + if g.Strict { + buf.WriteString("strict ") + } + if g.Directed { + buf.WriteString("digraph ") + } else { + buf.WriteString("graph ") + } + if len(g.ID) > 0 { + fmt.Fprintf(buf, "%s ", g.ID) + } + buf.WriteString("{\n") + for _, stmt := range g.Stmts { + fmt.Fprintf(buf, "\t%s\n", stmt) + } + buf.WriteString("}") + return buf.String() +} + +// === [ Statements ] ========================================================== + +// A Stmt represents a statement, and has one of the following underlying types. +// +// *NodeStmt +// *EdgeStmt +// *AttrStmt +// *Attr +// *Subgraph +type Stmt interface { + fmt.Stringer + // isStmt ensures that only statements can be assigned to the Stmt interface. + isStmt() +} + +// --- [ Node statement ] ------------------------------------------------------ + +// A NodeStmt represents a node statement. +// +// Examples. +// +// A [color=blue] +type NodeStmt struct { + // Node. + Node *Node + // Node attributes. + Attrs []*Attr +} + +// String returns the string representation of the node statement. +func (e *NodeStmt) String() string { + buf := new(bytes.Buffer) + buf.WriteString(e.Node.String()) + if len(e.Attrs) > 0 { + buf.WriteString(" [") + for i, attr := range e.Attrs { + if i != 0 { + buf.WriteString(" ") + } + buf.WriteString(attr.String()) + } + buf.WriteString("]") + } + return buf.String() +} + +// --- [ Edge statement ] ------------------------------------------------------ + +// An EdgeStmt represents an edge statement. +// +// Examples. +// +// A -> B +// A -> {B C} +// A -> B -> C +type EdgeStmt struct { + // Source vertex. + From Vertex + // Outgoing edge. + To *Edge + // Edge attributes. + Attrs []*Attr +} + +// String returns the string representation of the edge statement. +func (e *EdgeStmt) String() string { + buf := new(bytes.Buffer) + fmt.Fprintf(buf, "%s %s", e.From, e.To) + if len(e.Attrs) > 0 { + buf.WriteString(" [") + for i, attr := range e.Attrs { + if i != 0 { + buf.WriteString(" ") + } + buf.WriteString(attr.String()) + } + buf.WriteString("]") + } + return buf.String() +} + +// An Edge represents an edge between two vertices. +type Edge struct { + // Directed edge. + Directed bool + // Destination vertex. + Vertex Vertex + // Outgoing edge; or nil if none. + To *Edge +} + +// String returns the string representation of the edge. +func (e *Edge) String() string { + op := "--" + if e.Directed { + op = "->" + } + if e.To != nil { + return fmt.Sprintf("%s %s %s", op, e.Vertex, e.To) + } + return fmt.Sprintf("%s %s", op, e.Vertex) +} + +// --- [ Attribute statement ] ------------------------------------------------- + +// An AttrStmt represents an attribute statement. +// +// Examples. +// +// graph [rankdir=LR] +// node [color=blue fillcolor=red] +// edge [minlen=1] +type AttrStmt struct { + // Graph component kind to which the attributes are assigned. + Kind Kind + // Attributes. + Attrs []*Attr +} + +// String returns the string representation of the attribute statement. +func (a *AttrStmt) String() string { + buf := new(bytes.Buffer) + fmt.Fprintf(buf, "%s [", a.Kind) + for i, attr := range a.Attrs { + if i != 0 { + buf.WriteString(" ") + } + buf.WriteString(attr.String()) + } + buf.WriteString("]") + return buf.String() +} + +// Kind specifies the set of graph components to which attribute statements may +// be assigned. +type Kind uint + +// Graph component kinds. +const ( + GraphKind Kind = iota // graph + NodeKind // node + EdgeKind // edge +) + +// String returns the string representation of the graph component kind. +func (k Kind) String() string { + switch k { + case GraphKind: + return "graph" + case NodeKind: + return "node" + case EdgeKind: + return "edge" + } + panic(fmt.Sprintf("invalid graph component kind (%d)", k)) +} + +// --- [ Attribute ] ----------------------------------------------------------- + +// An Attr represents an attribute. +// +// Examples. +// +// rank=same +type Attr struct { + // Attribute key. + Key string + // Attribute value. + Val string +} + +// String returns the string representation of the attribute. +func (a *Attr) String() string { + return fmt.Sprintf("%s=%s", a.Key, a.Val) +} + +// --- [ Subgraph ] ------------------------------------------------------------ + +// A Subgraph represents a subgraph vertex. +// +// Examples. +// +// subgraph S {A B C} +type Subgraph struct { + // Subgraph ID; or empty if none. + ID string + // Subgraph statements. + Stmts []Stmt +} + +// String returns the string representation of the subgraph. +func (s *Subgraph) String() string { + buf := new(bytes.Buffer) + if len(s.ID) > 0 { + fmt.Fprintf(buf, "subgraph %s ", s.ID) + } + buf.WriteString("{") + for i, stmt := range s.Stmts { + if i != 0 { + buf.WriteString(" ") + } + buf.WriteString(stmt.String()) + } + buf.WriteString("}") + return buf.String() +} + +// isStmt ensures that only statements can be assigned to the Stmt interface. +func (*NodeStmt) isStmt() {} +func (*EdgeStmt) isStmt() {} +func (*AttrStmt) isStmt() {} +func (*Attr) isStmt() {} +func (*Subgraph) isStmt() {} + +// === [ Vertices ] ============================================================ + +// A Vertex represents a vertex, and has one of the following underlying types. +// +// *Node +// *Subgraph +type Vertex interface { + fmt.Stringer + // isVertex ensures that only vertices can be assigned to the Vertex + // interface. + isVertex() +} + +// --- [ Node identifier ] ----------------------------------------------------- + +// A Node represents a node vertex. +// +// Examples. +// +// A +// A:nw +type Node struct { + // Node ID. + ID string + // Node port; or nil if none. + Port *Port +} + +// String returns the string representation of the node. +func (n *Node) String() string { + if n.Port != nil { + return fmt.Sprintf("%s%s", n.ID, n.Port) + } + return n.ID +} + +// A Port specifies where on a node an edge should be aimed. +type Port struct { + // Port ID; or empty if none. + ID string + // Compass point. + CompassPoint CompassPoint +} + +// String returns the string representation of the port. +func (p *Port) String() string { + buf := new(bytes.Buffer) + if len(p.ID) > 0 { + fmt.Fprintf(buf, ":%s", p.ID) + } + if p.CompassPoint != CompassPointNone { + fmt.Fprintf(buf, ":%s", p.CompassPoint) + } + return buf.String() +} + +// CompassPoint specifies the set of compass points. +type CompassPoint uint + +// Compass points. +const ( + CompassPointNone CompassPoint = iota // + CompassPointNorth // n + CompassPointNorthEast // ne + CompassPointEast // e + CompassPointSouthEast // se + CompassPointSouth // s + CompassPointSouthWest // sw + CompassPointWest // w + CompassPointNorthWest // nw + CompassPointCenter // c + CompassPointDefault // _ +) + +// String returns the string representation of the compass point. +func (c CompassPoint) String() string { + switch c { + case CompassPointNone: + return "" + case CompassPointNorth: + return "n" + case CompassPointNorthEast: + return "ne" + case CompassPointEast: + return "e" + case CompassPointSouthEast: + return "se" + case CompassPointSouth: + return "s" + case CompassPointSouthWest: + return "sw" + case CompassPointWest: + return "w" + case CompassPointNorthWest: + return "nw" + case CompassPointCenter: + return "c" + case CompassPointDefault: + return "_" + } + panic(fmt.Sprintf("invalid compass point (%d)", uint(c))) +} + +// isVertex ensures that only vertices can be assigned to the Vertex interface. +func (*Node) isVertex() {} +func (*Subgraph) isVertex() {} diff --git a/vendor/gonum.org/v1/gonum/graph/formats/dot/ast/doc.go b/vendor/gonum.org/v1/gonum/graph/formats/dot/ast/doc.go new file mode 100644 index 0000000..72220d0 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/graph/formats/dot/ast/doc.go @@ -0,0 +1,7 @@ +// Copyright ©2017 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Package ast declares the types used to represent abstract syntax trees of +// Graphviz DOT graphs. +package ast // import "gonum.org/v1/gonum/graph/formats/dot/ast" diff --git a/vendor/gonum.org/v1/gonum/graph/formats/dot/doc.go b/vendor/gonum.org/v1/gonum/graph/formats/dot/doc.go new file mode 100644 index 0000000..32689fd --- /dev/null +++ b/vendor/gonum.org/v1/gonum/graph/formats/dot/doc.go @@ -0,0 +1,6 @@ +// Copyright ©2017 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Package dot implements a parser for Graphviz DOT files. +package dot // import "gonum.org/v1/gonum/graph/formats/dot" diff --git a/vendor/gonum.org/v1/gonum/graph/formats/dot/dot.go b/vendor/gonum.org/v1/gonum/graph/formats/dot/dot.go new file mode 100644 index 0000000..c439cad --- /dev/null +++ b/vendor/gonum.org/v1/gonum/graph/formats/dot/dot.go @@ -0,0 +1,64 @@ +// This file is dual licensed under CC0 and The gonum license. +// +// Copyright ©2017 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. +// +// Copyright ©2017 Robin Eklind. +// This file is made available under a Creative Commons CC0 1.0 +// Universal Public Domain Dedication. + +//go:generate ./makeinternal.bash + +package dot + +import ( + "fmt" + "io" + "io/ioutil" + + "gonum.org/v1/gonum/graph/formats/dot/ast" + "gonum.org/v1/gonum/graph/formats/dot/internal/lexer" + "gonum.org/v1/gonum/graph/formats/dot/internal/parser" +) + +// ParseFile parses the given Graphviz DOT file into an AST. +func ParseFile(path string) (*ast.File, error) { + buf, err := ioutil.ReadFile(path) + if err != nil { + return nil, err + } + return ParseBytes(buf) +} + +// Parse parses the given Graphviz DOT file into an AST, reading from r. +func Parse(r io.Reader) (*ast.File, error) { + buf, err := ioutil.ReadAll(r) + if err != nil { + return nil, err + } + return ParseBytes(buf) +} + +// ParseBytes parses the given Graphviz DOT file into an AST, reading from b. +func ParseBytes(b []byte) (*ast.File, error) { + l := lexer.NewLexer(b) + p := parser.NewParser() + file, err := p.Parse(l) + if err != nil { + return nil, err + } + f, ok := file.(*ast.File) + if !ok { + return nil, fmt.Errorf("invalid file type; expected *ast.File, got %T", file) + } + if err := check(f); err != nil { + return nil, err + } + return f, nil +} + +// ParseString parses the given Graphviz DOT file into an AST, reading from s. +func ParseString(s string) (*ast.File, error) { + return ParseBytes([]byte(s)) +} diff --git a/vendor/gonum.org/v1/gonum/graph/formats/dot/fuzz/fuzz.go b/vendor/gonum.org/v1/gonum/graph/formats/dot/fuzz/fuzz.go new file mode 100644 index 0000000..f122c6a --- /dev/null +++ b/vendor/gonum.org/v1/gonum/graph/formats/dot/fuzz/fuzz.go @@ -0,0 +1,39 @@ +// Copyright ©2019 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// +build gofuzz + +package fuzz + +import ( + "bytes" + "os/exec" + + "gonum.org/v1/gonum/graph/formats/dot" +) + +// Fuzz implements the fuzzing function required for go-fuzz. +// +// See documentation at https://github.com/dvyukov/go-fuzz. +func Fuzz(data []byte) int { + // We don't accept empty data; the dot command does. + if len(data) == 0 || bytes.Equal(data, []byte{0}) { + return -1 + } + + // Check that dot accepts the input without complaint. + cmd := exec.Command("dot") + cmd.Stdin = bytes.NewReader(data) + err := cmd.Run() + if err != nil { + return 0 + } + + // Try to parse the data. + _, err = dot.Parse(bytes.NewReader(data)) + if err != nil { + panic("could not parse good dot") + } + return 1 +} diff --git a/vendor/gonum.org/v1/gonum/graph/formats/dot/internal/astx/astx.go b/vendor/gonum.org/v1/gonum/graph/formats/dot/internal/astx/astx.go new file mode 100644 index 0000000..4e37067 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/graph/formats/dot/internal/astx/astx.go @@ -0,0 +1,326 @@ +// This file is dual licensed under CC0 and The gonum license. +// +// Copyright ©2017 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. +// +// Copyright ©2017 Robin Eklind. +// This file is made available under a Creative Commons CC0 1.0 +// Universal Public Domain Dedication. + +package astx + +import ( + "fmt" + "strings" + + "gonum.org/v1/gonum/graph/formats/dot/ast" + "gonum.org/v1/gonum/graph/formats/dot/internal/token" +) + +// === [ File ] ================================================================ + +// NewFile returns a new file based on the given graph. +func NewFile(graph interface{}) (*ast.File, error) { + g, ok := graph.(*ast.Graph) + if !ok { + return nil, fmt.Errorf("invalid graph type; expected *ast.Graph, got %T", graph) + } + return &ast.File{Graphs: []*ast.Graph{g}}, nil +} + +// AppendGraph appends graph to the given file. +func AppendGraph(file, graph interface{}) (*ast.File, error) { + f, ok := file.(*ast.File) + if !ok { + return nil, fmt.Errorf("invalid file type; expected *ast.File, got %T", file) + } + g, ok := graph.(*ast.Graph) + if !ok { + return nil, fmt.Errorf("invalid graph type; expected *ast.Graph, got %T", graph) + } + f.Graphs = append(f.Graphs, g) + return f, nil +} + +// === [ Graphs ] ============================================================== + +// NewGraph returns a new graph based on the given graph strictness, direction, +// optional ID and optional statements. +func NewGraph(strict, directed, optID, optStmts interface{}) (*ast.Graph, error) { + s, ok := strict.(bool) + if !ok { + return nil, fmt.Errorf("invalid strictness type; expected bool, got %T", strict) + } + d, ok := directed.(bool) + if !ok { + return nil, fmt.Errorf("invalid direction type; expected bool, got %T", directed) + } + id, ok := optID.(string) + if optID != nil && !ok { + return nil, fmt.Errorf("invalid ID type; expected string or nil, got %T", optID) + } + stmts, ok := optStmts.([]ast.Stmt) + if optStmts != nil && !ok { + return nil, fmt.Errorf("invalid statements type; expected []ast.Stmt or nil, got %T", optStmts) + } + return &ast.Graph{Strict: s, Directed: d, ID: id, Stmts: stmts}, nil +} + +// === [ Statements ] ========================================================== + +// NewStmtList returns a new statement list based on the given statement. +func NewStmtList(stmt interface{}) ([]ast.Stmt, error) { + s, ok := stmt.(ast.Stmt) + if !ok { + return nil, fmt.Errorf("invalid statement type; expected ast.Stmt, got %T", stmt) + } + return []ast.Stmt{s}, nil +} + +// AppendStmt appends stmt to the given statement list. +func AppendStmt(list, stmt interface{}) ([]ast.Stmt, error) { + l, ok := list.([]ast.Stmt) + if !ok { + return nil, fmt.Errorf("invalid statement list type; expected []ast.Stmt, got %T", list) + } + s, ok := stmt.(ast.Stmt) + if !ok { + return nil, fmt.Errorf("invalid statement type; expected ast.Stmt, got %T", stmt) + } + return append(l, s), nil +} + +// --- [ Node statement ] ------------------------------------------------------ + +// NewNodeStmt returns a new node statement based on the given node and optional +// attributes. +func NewNodeStmt(node, optAttrs interface{}) (*ast.NodeStmt, error) { + n, ok := node.(*ast.Node) + if !ok { + return nil, fmt.Errorf("invalid node type; expected *ast.Node, got %T", node) + } + attrs, ok := optAttrs.([]*ast.Attr) + if optAttrs != nil && !ok { + return nil, fmt.Errorf("invalid attributes type; expected []*ast.Attr or nil, got %T", optAttrs) + } + return &ast.NodeStmt{Node: n, Attrs: attrs}, nil +} + +// --- [ Edge statement ] ------------------------------------------------------ + +// NewEdgeStmt returns a new edge statement based on the given source vertex, +// outgoing edge and optional attributes. +func NewEdgeStmt(from, to, optAttrs interface{}) (*ast.EdgeStmt, error) { + f, ok := from.(ast.Vertex) + if !ok { + return nil, fmt.Errorf("invalid source vertex type; expected ast.Vertex, got %T", from) + } + t, ok := to.(*ast.Edge) + if !ok { + return nil, fmt.Errorf("invalid outgoing edge type; expected *ast.Edge, got %T", to) + } + attrs, ok := optAttrs.([]*ast.Attr) + if optAttrs != nil && !ok { + return nil, fmt.Errorf("invalid attributes type; expected []*ast.Attr or nil, got %T", optAttrs) + } + return &ast.EdgeStmt{From: f, To: t, Attrs: attrs}, nil +} + +// NewEdge returns a new edge based on the given edge direction, destination +// vertex and optional outgoing edge. +func NewEdge(directed, vertex, optTo interface{}) (*ast.Edge, error) { + d, ok := directed.(bool) + if !ok { + return nil, fmt.Errorf("invalid direction type; expected bool, got %T", directed) + } + v, ok := vertex.(ast.Vertex) + if !ok { + return nil, fmt.Errorf("invalid destination vertex type; expected ast.Vertex, got %T", vertex) + } + to, ok := optTo.(*ast.Edge) + if optTo != nil && !ok { + return nil, fmt.Errorf("invalid outgoing edge type; expected *ast.Edge or nil, got %T", optTo) + } + return &ast.Edge{Directed: d, Vertex: v, To: to}, nil +} + +// --- [ Attribute statement ] ------------------------------------------------- + +// NewAttrStmt returns a new attribute statement based on the given graph +// component kind and attributes. +func NewAttrStmt(kind, optAttrs interface{}) (*ast.AttrStmt, error) { + k, ok := kind.(ast.Kind) + if !ok { + return nil, fmt.Errorf("invalid graph component kind type; expected ast.Kind, got %T", kind) + } + attrs, ok := optAttrs.([]*ast.Attr) + if optAttrs != nil && !ok { + return nil, fmt.Errorf("invalid attributes type; expected []*ast.Attr or nil, got %T", optAttrs) + } + return &ast.AttrStmt{Kind: k, Attrs: attrs}, nil +} + +// NewAttrList returns a new attribute list based on the given attribute. +func NewAttrList(attr interface{}) ([]*ast.Attr, error) { + a, ok := attr.(*ast.Attr) + if !ok { + return nil, fmt.Errorf("invalid attribute type; expected *ast.Attr, got %T", attr) + } + return []*ast.Attr{a}, nil +} + +// AppendAttr appends attr to the given attribute list. +func AppendAttr(list, attr interface{}) ([]*ast.Attr, error) { + l, ok := list.([]*ast.Attr) + if !ok { + return nil, fmt.Errorf("invalid attribute list type; expected []*ast.Attr, got %T", list) + } + a, ok := attr.(*ast.Attr) + if !ok { + return nil, fmt.Errorf("invalid attribute type; expected *ast.Attr, got %T", attr) + } + return append(l, a), nil +} + +// AppendAttrList appends the optional attrs to the given optional attribute +// list. +func AppendAttrList(optList, optAttrs interface{}) ([]*ast.Attr, error) { + list, ok := optList.([]*ast.Attr) + if optList != nil && !ok { + return nil, fmt.Errorf("invalid attribute list type; expected []*ast.Attr or nil, got %T", optList) + } + attrs, ok := optAttrs.([]*ast.Attr) + if optAttrs != nil && !ok { + return nil, fmt.Errorf("invalid attributes type; expected []*ast.Attr or nil, got %T", optAttrs) + } + return append(list, attrs...), nil +} + +// --- [ Attribute ] ----------------------------------------------------------- + +// NewAttr returns a new attribute based on the given key-value pair. +func NewAttr(key, val interface{}) (*ast.Attr, error) { + k, ok := key.(string) + if !ok { + return nil, fmt.Errorf("invalid key type; expected string, got %T", key) + } + v, ok := val.(string) + if !ok { + return nil, fmt.Errorf("invalid value type; expected string, got %T", val) + } + return &ast.Attr{Key: k, Val: v}, nil +} + +// --- [ Subgraph ] ------------------------------------------------------------ + +// NewSubgraph returns a new subgraph based on the given optional subgraph ID +// and optional statements. +func NewSubgraph(optID, optStmts interface{}) (*ast.Subgraph, error) { + id, ok := optID.(string) + if optID != nil && !ok { + return nil, fmt.Errorf("invalid ID type; expected string or nil, got %T", optID) + } + stmts, ok := optStmts.([]ast.Stmt) + if optStmts != nil && !ok { + return nil, fmt.Errorf("invalid statements type; expected []ast.Stmt or nil, got %T", optStmts) + } + return &ast.Subgraph{ID: id, Stmts: stmts}, nil +} + +// === [ Vertices ] ============================================================ + +// --- [ Node identifier ] ----------------------------------------------------- + +// NewNode returns a new node based on the given node id and optional port. +func NewNode(id, optPort interface{}) (*ast.Node, error) { + i, ok := id.(string) + if !ok { + return nil, fmt.Errorf("invalid ID type; expected string, got %T", id) + } + port, ok := optPort.(*ast.Port) + if optPort != nil && !ok { + return nil, fmt.Errorf("invalid port type; expected *ast.Port or nil, got %T", optPort) + } + return &ast.Node{ID: i, Port: port}, nil +} + +// NewPort returns a new port based on the given id and optional compass point. +func NewPort(id, optCompassPoint interface{}) (*ast.Port, error) { + // Note, if optCompassPoint is nil, id may be either an identifier or a + // compass point. + // + // The following strings are valid compass points: + // + // "n", "ne", "e", "se", "s", "sw", "w", "nw", "c" and "_" + i, ok := id.(string) + if !ok { + return nil, fmt.Errorf("invalid ID type; expected string, got %T", id) + } + + // Early return if optional compass point is absent and ID is a valid compass + // point. + if optCompassPoint == nil { + if compassPoint, ok := getCompassPoint(i); ok { + return &ast.Port{CompassPoint: compassPoint}, nil + } + } + + c, ok := optCompassPoint.(string) + if optCompassPoint != nil && !ok { + return nil, fmt.Errorf("invalid compass point type; expected string or nil, got %T", optCompassPoint) + } + compassPoint, _ := getCompassPoint(c) + return &ast.Port{ID: i, CompassPoint: compassPoint}, nil +} + +// getCompassPoint returns the corresponding compass point to the given string, +// and a boolean value indicating if such a compass point exists. +func getCompassPoint(s string) (ast.CompassPoint, bool) { + switch s { + case "_": + return ast.CompassPointDefault, true + case "n": + return ast.CompassPointNorth, true + case "ne": + return ast.CompassPointNorthEast, true + case "e": + return ast.CompassPointEast, true + case "se": + return ast.CompassPointSouthEast, true + case "s": + return ast.CompassPointSouth, true + case "sw": + return ast.CompassPointSouthWest, true + case "w": + return ast.CompassPointWest, true + case "nw": + return ast.CompassPointNorthWest, true + case "c": + return ast.CompassPointCenter, true + } + return ast.CompassPointNone, false +} + +// === [ Identifiers ] ========================================================= + +// NewID returns a new identifier based on the given ID token. +func NewID(id interface{}) (string, error) { + i, ok := id.(*token.Token) + if !ok { + return "", fmt.Errorf("invalid identifier type; expected *token.Token, got %T", id) + } + s := string(i.Lit) + + // As another aid for readability, dot allows double-quoted strings to span + // multiple physical lines using the standard C convention of a backslash + // immediately preceding a newline character. + if strings.HasPrefix(s, `"`) && strings.HasSuffix(s, `"`) { + // Strip "\\\n" sequences. + s = strings.Replace(s, "\\\n", "", -1) + } + + // TODO: Add support for concatenated using a '+' operator. + + return s, nil +} diff --git a/vendor/gonum.org/v1/gonum/graph/formats/dot/internal/astx/doc.go b/vendor/gonum.org/v1/gonum/graph/formats/dot/internal/astx/doc.go new file mode 100644 index 0000000..0e39a77 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/graph/formats/dot/internal/astx/doc.go @@ -0,0 +1,7 @@ +// Copyright ©2017 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Package astx implements utility functions for generating abstract syntax +// trees of Graphviz DOT graphs. +package astx // import "gonum.org/v1/gonum/graph/formats/dot/internal/astx" diff --git a/vendor/gonum.org/v1/gonum/graph/formats/dot/internal/errors/doc.go b/vendor/gonum.org/v1/gonum/graph/formats/dot/internal/errors/doc.go new file mode 100644 index 0000000..9606242 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/graph/formats/dot/internal/errors/doc.go @@ -0,0 +1,6 @@ +// Copyright ©2018 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Package error provides generated internal error functions for DOT parsing. +package errors // import "gonum.org/v1/gonum/graph/formats/dot/internal/errors" diff --git a/vendor/gonum.org/v1/gonum/graph/formats/dot/internal/errors/errors.go b/vendor/gonum.org/v1/gonum/graph/formats/dot/internal/errors/errors.go new file mode 100644 index 0000000..ecbef4c --- /dev/null +++ b/vendor/gonum.org/v1/gonum/graph/formats/dot/internal/errors/errors.go @@ -0,0 +1,66 @@ +// Code generated by gocc; DO NOT EDIT. + +// This file is dual licensed under CC0 and The gonum license. +// +// Copyright ©2017 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. +// +// Copyright ©2017 Robin Eklind. +// This file is made available under a Creative Commons CC0 1.0 +// Universal Public Domain Dedication. + +package errors + +import ( + "fmt" + "strings" + + "gonum.org/v1/gonum/graph/formats/dot/internal/token" +) + +type ErrorSymbol interface { +} + +type Error struct { + Err error + ErrorToken *token.Token + ErrorSymbols []ErrorSymbol + ExpectedTokens []string + StackTop int +} + +func (e *Error) String() string { + w := new(strings.Builder) + fmt.Fprintf(w, "Error") + if e.Err != nil { + fmt.Fprintf(w, " %s\n", e.Err) + } else { + fmt.Fprintf(w, "\n") + } + fmt.Fprintf(w, "Token: type=%d, lit=%s\n", e.ErrorToken.Type, e.ErrorToken.Lit) + fmt.Fprintf(w, "Pos: offset=%d, line=%d, column=%d\n", e.ErrorToken.Pos.Offset, e.ErrorToken.Pos.Line, e.ErrorToken.Pos.Column) + fmt.Fprintf(w, "Expected one of: ") + for _, sym := range e.ExpectedTokens { + fmt.Fprintf(w, "%s ", sym) + } + fmt.Fprintf(w, "ErrorSymbol:\n") + for _, sym := range e.ErrorSymbols { + fmt.Fprintf(w, "%v\n", sym) + } + return w.String() +} + +func (e *Error) Error() string { + w := new(strings.Builder) + fmt.Fprintf(w, "Error in S%d: %s, %s", e.StackTop, token.TokMap.TokenString(e.ErrorToken), e.ErrorToken.Pos.String()) + if e.Err != nil { + fmt.Fprintf(w, ": %+v", e.Err) + } else { + fmt.Fprintf(w, ", expected one of: ") + for _, expected := range e.ExpectedTokens { + fmt.Fprintf(w, "%s ", expected) + } + } + return w.String() +} diff --git a/vendor/gonum.org/v1/gonum/graph/formats/dot/internal/lexer/acttab.go b/vendor/gonum.org/v1/gonum/graph/formats/dot/internal/lexer/acttab.go new file mode 100644 index 0000000..b3e4ab9 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/graph/formats/dot/internal/lexer/acttab.go @@ -0,0 +1,605 @@ +// Code generated by gocc; DO NOT EDIT. + +// This file is dual licensed under CC0 and The gonum license. +// +// Copyright ©2017 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. +// +// Copyright ©2017 Robin Eklind. +// This file is made available under a Creative Commons CC0 1.0 +// Universal Public Domain Dedication. + +package lexer + +import ( + "fmt" + + "gonum.org/v1/gonum/graph/formats/dot/internal/token" +) + +type ActionTable [NumStates]ActionRow + +type ActionRow struct { + Accept token.Type + Ignore string +} + +func (a ActionRow) String() string { + return fmt.Sprintf("Accept=%d, Ignore=%s", a.Accept, a.Ignore) +} + +var ActTab = ActionTable{ + ActionRow{ // S0 + Accept: 0, + Ignore: "", + }, + ActionRow{ // S1 + Accept: -1, + Ignore: "!whitespace", + }, + ActionRow{ // S2 + Accept: 0, + Ignore: "", + }, + ActionRow{ // S3 + Accept: 0, + Ignore: "", + }, + ActionRow{ // S4 + Accept: 15, + Ignore: "", + }, + ActionRow{ // S5 + Accept: 0, + Ignore: "", + }, + ActionRow{ // S6 + Accept: 0, + Ignore: "", + }, + ActionRow{ // S7 + Accept: 0, + Ignore: "", + }, + ActionRow{ // S8 + Accept: 19, + Ignore: "", + }, + ActionRow{ // S9 + Accept: 18, + Ignore: "", + }, + ActionRow{ // S10 + Accept: 8, + Ignore: "", + }, + ActionRow{ // S11 + Accept: 0, + Ignore: "", + }, + ActionRow{ // S12 + Accept: 16, + Ignore: "", + }, + ActionRow{ // S13 + Accept: 19, + Ignore: "", + }, + ActionRow{ // S14 + Accept: 19, + Ignore: "", + }, + ActionRow{ // S15 + Accept: 19, + Ignore: "", + }, + ActionRow{ // S16 + Accept: 19, + Ignore: "", + }, + ActionRow{ // S17 + Accept: 19, + Ignore: "", + }, + ActionRow{ // S18 + Accept: 19, + Ignore: "", + }, + ActionRow{ // S19 + Accept: 13, + Ignore: "", + }, + ActionRow{ // S20 + Accept: 14, + Ignore: "", + }, + ActionRow{ // S21 + Accept: 19, + Ignore: "", + }, + ActionRow{ // S22 + Accept: 19, + Ignore: "", + }, + ActionRow{ // S23 + Accept: 19, + Ignore: "", + }, + ActionRow{ // S24 + Accept: 19, + Ignore: "", + }, + ActionRow{ // S25 + Accept: 19, + Ignore: "", + }, + ActionRow{ // S26 + Accept: 19, + Ignore: "", + }, + ActionRow{ // S27 + Accept: 2, + Ignore: "", + }, + ActionRow{ // S28 + Accept: 3, + Ignore: "", + }, + ActionRow{ // S29 + Accept: 19, + Ignore: "", + }, + ActionRow{ // S30 + Accept: 0, + Ignore: "", + }, + ActionRow{ // S31 + Accept: 19, + Ignore: "", + }, + ActionRow{ // S32 + Accept: 0, + Ignore: "", + }, + ActionRow{ // S33 + Accept: 0, + Ignore: "", + }, + ActionRow{ // S34 + Accept: -1, + Ignore: "!comment", + }, + ActionRow{ // S35 + Accept: 9, + Ignore: "", + }, + ActionRow{ // S36 + Accept: 10, + Ignore: "", + }, + ActionRow{ // S37 + Accept: 19, + Ignore: "", + }, + ActionRow{ // S38 + Accept: 0, + Ignore: "", + }, + ActionRow{ // S39 + Accept: 0, + Ignore: "", + }, + ActionRow{ // S40 + Accept: 19, + Ignore: "", + }, + ActionRow{ // S41 + Accept: 0, + Ignore: "", + }, + ActionRow{ // S42 + Accept: 0, + Ignore: "", + }, + ActionRow{ // S43 + Accept: 19, + Ignore: "", + }, + ActionRow{ // S44 + Accept: 0, + Ignore: "", + }, + ActionRow{ // S45 + Accept: 19, + Ignore: "", + }, + ActionRow{ // S46 + Accept: 19, + Ignore: "", + }, + ActionRow{ // S47 + Accept: 19, + Ignore: "", + }, + ActionRow{ // S48 + Accept: 19, + Ignore: "", + }, + ActionRow{ // S49 + Accept: 19, + Ignore: "", + }, + ActionRow{ // S50 + Accept: 19, + Ignore: "", + }, + ActionRow{ // S51 + Accept: 19, + Ignore: "", + }, + ActionRow{ // S52 + Accept: 19, + Ignore: "", + }, + ActionRow{ // S53 + Accept: 19, + Ignore: "", + }, + ActionRow{ // S54 + Accept: 19, + Ignore: "", + }, + ActionRow{ // S55 + Accept: 19, + Ignore: "", + }, + ActionRow{ // S56 + Accept: 19, + Ignore: "", + }, + ActionRow{ // S57 + Accept: 19, + Ignore: "", + }, + ActionRow{ // S58 + Accept: 19, + Ignore: "", + }, + ActionRow{ // S59 + Accept: 19, + Ignore: "", + }, + ActionRow{ // S60 + Accept: 19, + Ignore: "", + }, + ActionRow{ // S61 + Accept: 19, + Ignore: "", + }, + ActionRow{ // S62 + Accept: 19, + Ignore: "", + }, + ActionRow{ // S63 + Accept: 19, + Ignore: "", + }, + ActionRow{ // S64 + Accept: 0, + Ignore: "", + }, + ActionRow{ // S65 + Accept: 0, + Ignore: "", + }, + ActionRow{ // S66 + Accept: 0, + Ignore: "", + }, + ActionRow{ // S67 + Accept: 0, + Ignore: "", + }, + ActionRow{ // S68 + Accept: 19, + Ignore: "", + }, + ActionRow{ // S69 + Accept: 0, + Ignore: "", + }, + ActionRow{ // S70 + Accept: 0, + Ignore: "", + }, + ActionRow{ // S71 + Accept: 19, + Ignore: "", + }, + ActionRow{ // S72 + Accept: 19, + Ignore: "", + }, + ActionRow{ // S73 + Accept: 19, + Ignore: "", + }, + ActionRow{ // S74 + Accept: 19, + Ignore: "", + }, + ActionRow{ // S75 + Accept: 19, + Ignore: "", + }, + ActionRow{ // S76 + Accept: 19, + Ignore: "", + }, + ActionRow{ // S77 + Accept: 19, + Ignore: "", + }, + ActionRow{ // S78 + Accept: 19, + Ignore: "", + }, + ActionRow{ // S79 + Accept: 19, + Ignore: "", + }, + ActionRow{ // S80 + Accept: 19, + Ignore: "", + }, + ActionRow{ // S81 + Accept: 19, + Ignore: "", + }, + ActionRow{ // S82 + Accept: 19, + Ignore: "", + }, + ActionRow{ // S83 + Accept: 19, + Ignore: "", + }, + ActionRow{ // S84 + Accept: 19, + Ignore: "", + }, + ActionRow{ // S85 + Accept: 19, + Ignore: "", + }, + ActionRow{ // S86 + Accept: 19, + Ignore: "", + }, + ActionRow{ // S87 + Accept: 19, + Ignore: "", + }, + ActionRow{ // S88 + Accept: 19, + Ignore: "", + }, + ActionRow{ // S89 + Accept: 19, + Ignore: "", + }, + ActionRow{ // S90 + Accept: 19, + Ignore: "", + }, + ActionRow{ // S91 + Accept: -1, + Ignore: "!comment", + }, + ActionRow{ // S92 + Accept: 0, + Ignore: "", + }, + ActionRow{ // S93 + Accept: 19, + Ignore: "", + }, + ActionRow{ // S94 + Accept: 19, + Ignore: "", + }, + ActionRow{ // S95 + Accept: 19, + Ignore: "", + }, + ActionRow{ // S96 + Accept: 12, + Ignore: "", + }, + ActionRow{ // S97 + Accept: 19, + Ignore: "", + }, + ActionRow{ // S98 + Accept: 19, + Ignore: "", + }, + ActionRow{ // S99 + Accept: 11, + Ignore: "", + }, + ActionRow{ // S100 + Accept: 19, + Ignore: "", + }, + ActionRow{ // S101 + Accept: 19, + Ignore: "", + }, + ActionRow{ // S102 + Accept: 19, + Ignore: "", + }, + ActionRow{ // S103 + Accept: 19, + Ignore: "", + }, + ActionRow{ // S104 + Accept: 19, + Ignore: "", + }, + ActionRow{ // S105 + Accept: 19, + Ignore: "", + }, + ActionRow{ // S106 + Accept: 19, + Ignore: "", + }, + ActionRow{ // S107 + Accept: 19, + Ignore: "", + }, + ActionRow{ // S108 + Accept: 19, + Ignore: "", + }, + ActionRow{ // S109 + Accept: 19, + Ignore: "", + }, + ActionRow{ // S110 + Accept: 19, + Ignore: "", + }, + ActionRow{ // S111 + Accept: 19, + Ignore: "", + }, + ActionRow{ // S112 + Accept: 19, + Ignore: "", + }, + ActionRow{ // S113 + Accept: 19, + Ignore: "", + }, + ActionRow{ // S114 + Accept: 6, + Ignore: "", + }, + ActionRow{ // S115 + Accept: 19, + Ignore: "", + }, + ActionRow{ // S116 + Accept: 19, + Ignore: "", + }, + ActionRow{ // S117 + Accept: 19, + Ignore: "", + }, + ActionRow{ // S118 + Accept: 19, + Ignore: "", + }, + ActionRow{ // S119 + Accept: 19, + Ignore: "", + }, + ActionRow{ // S120 + Accept: 19, + Ignore: "", + }, + ActionRow{ // S121 + Accept: 19, + Ignore: "", + }, + ActionRow{ // S122 + Accept: 19, + Ignore: "", + }, + ActionRow{ // S123 + Accept: 19, + Ignore: "", + }, + ActionRow{ // S124 + Accept: 19, + Ignore: "", + }, + ActionRow{ // S125 + Accept: 19, + Ignore: "", + }, + ActionRow{ // S126 + Accept: 19, + Ignore: "", + }, + ActionRow{ // S127 + Accept: 19, + Ignore: "", + }, + ActionRow{ // S128 + Accept: 5, + Ignore: "", + }, + ActionRow{ // S129 + Accept: 19, + Ignore: "", + }, + ActionRow{ // S130 + Accept: 19, + Ignore: "", + }, + ActionRow{ // S131 + Accept: 19, + Ignore: "", + }, + ActionRow{ // S132 + Accept: 19, + Ignore: "", + }, + ActionRow{ // S133 + Accept: 19, + Ignore: "", + }, + ActionRow{ // S134 + Accept: 19, + Ignore: "", + }, + ActionRow{ // S135 + Accept: 19, + Ignore: "", + }, + ActionRow{ // S136 + Accept: 7, + Ignore: "", + }, + ActionRow{ // S137 + Accept: 19, + Ignore: "", + }, + ActionRow{ // S138 + Accept: 19, + Ignore: "", + }, + ActionRow{ // S139 + Accept: 19, + Ignore: "", + }, + ActionRow{ // S140 + Accept: 19, + Ignore: "", + }, + ActionRow{ // S141 + Accept: 19, + Ignore: "", + }, + ActionRow{ // S142 + Accept: 17, + Ignore: "", + }, +} diff --git a/vendor/gonum.org/v1/gonum/graph/formats/dot/internal/lexer/doc.go b/vendor/gonum.org/v1/gonum/graph/formats/dot/internal/lexer/doc.go new file mode 100644 index 0000000..7ba072c --- /dev/null +++ b/vendor/gonum.org/v1/gonum/graph/formats/dot/internal/lexer/doc.go @@ -0,0 +1,6 @@ +// Copyright ©2018 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Package lexer provides generated internal lexer functions for DOT parsing. +package lexer // import "gonum.org/v1/gonum/graph/formats/dot/internal/lexer" diff --git a/vendor/gonum.org/v1/gonum/graph/formats/dot/internal/lexer/lexer.go b/vendor/gonum.org/v1/gonum/graph/formats/dot/internal/lexer/lexer.go new file mode 100644 index 0000000..81f3ad7 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/graph/formats/dot/internal/lexer/lexer.go @@ -0,0 +1,310 @@ +// Code generated by gocc; DO NOT EDIT. + +// This file is dual licensed under CC0 and The gonum license. +// +// Copyright ©2017 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. +// +// Copyright ©2017 Robin Eklind. +// This file is made available under a Creative Commons CC0 1.0 +// Universal Public Domain Dedication. + +package lexer + +import ( + "io/ioutil" + "unicode/utf8" + + "gonum.org/v1/gonum/graph/formats/dot/internal/token" +) + +const ( + NoState = -1 + NumStates = 143 + NumSymbols = 184 +) + +type Lexer struct { + src []byte + pos int + line int + column int +} + +func NewLexer(src []byte) *Lexer { + lexer := &Lexer{ + src: src, + pos: 0, + line: 1, + column: 1, + } + return lexer +} + +func NewLexerFile(fpath string) (*Lexer, error) { + src, err := ioutil.ReadFile(fpath) + if err != nil { + return nil, err + } + return NewLexer(src), nil +} + +func (l *Lexer) Scan() (tok *token.Token) { + tok = new(token.Token) + if l.pos >= len(l.src) { + tok.Type = token.EOF + tok.Pos.Offset, tok.Pos.Line, tok.Pos.Column = l.pos, l.line, l.column + return + } + start, startLine, startColumn, end := l.pos, l.line, l.column, 0 + tok.Type = token.INVALID + state, rune1, size := 0, rune(-1), 0 + for state != -1 { + if l.pos >= len(l.src) { + rune1 = -1 + } else { + rune1, size = utf8.DecodeRune(l.src[l.pos:]) + l.pos += size + } + + nextState := -1 + if rune1 != -1 { + nextState = TransTab[state](rune1) + } + state = nextState + + if state != -1 { + + switch rune1 { + case '\n': + l.line++ + l.column = 1 + case '\r': + l.column = 1 + case '\t': + l.column += 4 + default: + l.column++ + } + + switch { + case ActTab[state].Accept != -1: + tok.Type = ActTab[state].Accept + end = l.pos + case ActTab[state].Ignore != "": + start, startLine, startColumn = l.pos, l.line, l.column + state = 0 + if start >= len(l.src) { + tok.Type = token.EOF + } + + } + } else { + if tok.Type == token.INVALID { + end = l.pos + } + } + } + if end > start { + l.pos = end + tok.Lit = l.src[start:end] + } else { + tok.Lit = []byte{} + } + tok.Pos.Offset, tok.Pos.Line, tok.Pos.Column = start, startLine, startColumn + + return +} + +func (l *Lexer) Reset() { + l.pos = 0 +} + +/* +Lexer symbols: +0: 'n' +1: 'o' +2: 'd' +3: 'e' +4: 'N' +5: 'o' +6: 'd' +7: 'e' +8: 'N' +9: 'O' +10: 'D' +11: 'E' +12: 'e' +13: 'd' +14: 'g' +15: 'e' +16: 'E' +17: 'd' +18: 'g' +19: 'e' +20: 'E' +21: 'D' +22: 'G' +23: 'E' +24: 'g' +25: 'r' +26: 'a' +27: 'p' +28: 'h' +29: 'G' +30: 'r' +31: 'a' +32: 'p' +33: 'h' +34: 'G' +35: 'R' +36: 'A' +37: 'P' +38: 'H' +39: 'd' +40: 'i' +41: 'g' +42: 'r' +43: 'a' +44: 'p' +45: 'h' +46: 'D' +47: 'i' +48: 'g' +49: 'r' +50: 'a' +51: 'p' +52: 'h' +53: 'd' +54: 'i' +55: 'G' +56: 'r' +57: 'a' +58: 'p' +59: 'h' +60: 'D' +61: 'i' +62: 'G' +63: 'r' +64: 'a' +65: 'p' +66: 'h' +67: 'D' +68: 'I' +69: 'G' +70: 'R' +71: 'A' +72: 'P' +73: 'H' +74: 's' +75: 'u' +76: 'b' +77: 'g' +78: 'r' +79: 'a' +80: 'p' +81: 'h' +82: 'S' +83: 'u' +84: 'b' +85: 'g' +86: 'r' +87: 'a' +88: 'p' +89: 'h' +90: 's' +91: 'u' +92: 'b' +93: 'G' +94: 'r' +95: 'a' +96: 'p' +97: 'h' +98: 'S' +99: 'u' +100: 'b' +101: 'G' +102: 'r' +103: 'a' +104: 'p' +105: 'h' +106: 'S' +107: 'U' +108: 'B' +109: 'G' +110: 'R' +111: 'A' +112: 'P' +113: 'H' +114: 's' +115: 't' +116: 'r' +117: 'i' +118: 'c' +119: 't' +120: 'S' +121: 't' +122: 'r' +123: 'i' +124: 'c' +125: 't' +126: 'S' +127: 'T' +128: 'R' +129: 'I' +130: 'C' +131: 'T' +132: '{' +133: '}' +134: ';' +135: '-' +136: '-' +137: '-' +138: '>' +139: '[' +140: ']' +141: ',' +142: '=' +143: ':' +144: '_' +145: '-' +146: '.' +147: '-' +148: '.' +149: '\' +150: '"' +151: '\' +152: '"' +153: '"' +154: '=' +155: '<' +156: '>' +157: '<' +158: '>' +159: '/' +160: '/' +161: '\n' +162: '#' +163: '\n' +164: '/' +165: '*' +166: '*' +167: '*' +168: '/' +169: ' ' +170: '\t' +171: '\r' +172: '\n' +173: \u0001-'!' +174: '#'-'[' +175: ']'-\u007f +176: 'a'-'z' +177: 'A'-'Z' +178: '0'-'9' +179: \u0080-\ufffc +180: \ufffe-\U0010ffff +181: \u0001-';' +182: '?'-\u00ff +183: . +*/ diff --git a/vendor/gonum.org/v1/gonum/graph/formats/dot/internal/lexer/transitiontable.go b/vendor/gonum.org/v1/gonum/graph/formats/dot/internal/lexer/transitiontable.go new file mode 100644 index 0000000..c010387 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/graph/formats/dot/internal/lexer/transitiontable.go @@ -0,0 +1,2813 @@ +// Code generated by gocc; DO NOT EDIT. + +// This file is dual licensed under CC0 and The gonum license. +// +// Copyright ©2017 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. +// +// Copyright ©2017 Robin Eklind. +// This file is made available under a Creative Commons CC0 1.0 +// Universal Public Domain Dedication. + +package lexer + +/* +Let s be the current state +Let r be the current input rune +transitionTable[s](r) returns the next state. +*/ +type TransitionTable [NumStates]func(rune) int + +var TransTab = TransitionTable{ + // S0 + func(r rune) int { + switch { + case r == 9: // ['\t','\t'] + return 1 + case r == 10: // ['\n','\n'] + return 1 + case r == 13: // ['\r','\r'] + return 1 + case r == 32: // [' ',' '] + return 1 + case r == 34: // ['"','"'] + return 2 + case r == 35: // ['#','#'] + return 3 + case r == 44: // [',',','] + return 4 + case r == 45: // ['-','-'] + return 5 + case r == 46: // ['.','.'] + return 6 + case r == 47: // ['/','/'] + return 7 + case 48 <= r && r <= 57: // ['0','9'] + return 8 + case r == 58: // [':',':'] + return 9 + case r == 59: // [';',';'] + return 10 + case r == 60: // ['<','<'] + return 11 + case r == 61: // ['=','='] + return 12 + case 65 <= r && r <= 67: // ['A','C'] + return 13 + case r == 68: // ['D','D'] + return 14 + case r == 69: // ['E','E'] + return 15 + case r == 70: // ['F','F'] + return 13 + case r == 71: // ['G','G'] + return 16 + case 72 <= r && r <= 77: // ['H','M'] + return 13 + case r == 78: // ['N','N'] + return 17 + case 79 <= r && r <= 82: // ['O','R'] + return 13 + case r == 83: // ['S','S'] + return 18 + case 84 <= r && r <= 90: // ['T','Z'] + return 13 + case r == 91: // ['[','['] + return 19 + case r == 93: // [']',']'] + return 20 + case r == 95: // ['_','_'] + return 21 + case 97 <= r && r <= 99: // ['a','c'] + return 13 + case r == 100: // ['d','d'] + return 22 + case r == 101: // ['e','e'] + return 23 + case r == 102: // ['f','f'] + return 13 + case r == 103: // ['g','g'] + return 24 + case 104 <= r && r <= 109: // ['h','m'] + return 13 + case r == 110: // ['n','n'] + return 25 + case 111 <= r && r <= 114: // ['o','r'] + return 13 + case r == 115: // ['s','s'] + return 26 + case 116 <= r && r <= 122: // ['t','z'] + return 13 + case r == 123: // ['{','{'] + return 27 + case r == 125: // ['}','}'] + return 28 + case 128 <= r && r <= 65532: // [\u0080,\ufffc] + return 29 + case 65534 <= r && r <= 1114111: // [\ufffe,\U0010ffff] + return 29 + } + return NoState + }, + // S1 + func(r rune) int { + switch { + } + return NoState + }, + // S2 + func(r rune) int { + switch { + case 1 <= r && r <= 33: // [\u0001,'!'] + return 30 + case r == 34: // ['"','"'] + return 31 + case 35 <= r && r <= 91: // ['#','['] + return 30 + case r == 92: // ['\','\'] + return 32 + case 93 <= r && r <= 127: // [']',\u007f] + return 30 + case 128 <= r && r <= 65532: // [\u0080,\ufffc] + return 33 + case 65534 <= r && r <= 1114111: // [\ufffe,\U0010ffff] + return 33 + } + return NoState + }, + // S3 + func(r rune) int { + switch { + case r == 10: // ['\n','\n'] + return 34 + default: + return 3 + } + }, + // S4 + func(r rune) int { + switch { + } + return NoState + }, + // S5 + func(r rune) int { + switch { + case r == 45: // ['-','-'] + return 35 + case r == 46: // ['.','.'] + return 6 + case 48 <= r && r <= 57: // ['0','9'] + return 8 + case r == 62: // ['>','>'] + return 36 + } + return NoState + }, + // S6 + func(r rune) int { + switch { + case 48 <= r && r <= 57: // ['0','9'] + return 37 + } + return NoState + }, + // S7 + func(r rune) int { + switch { + case r == 42: // ['*','*'] + return 38 + case r == 47: // ['/','/'] + return 39 + } + return NoState + }, + // S8 + func(r rune) int { + switch { + case r == 46: // ['.','.'] + return 40 + case 48 <= r && r <= 57: // ['0','9'] + return 8 + } + return NoState + }, + // S9 + func(r rune) int { + switch { + } + return NoState + }, + // S10 + func(r rune) int { + switch { + } + return NoState + }, + // S11 + func(r rune) int { + switch { + case 1 <= r && r <= 59: // [\u0001,';'] + return 41 + case r == 60: // ['<','<'] + return 42 + case r == 61: // ['=','='] + return 41 + case r == 62: // ['>','>'] + return 43 + case 63 <= r && r <= 127: // ['?',\u007f] + return 41 + case 128 <= r && r <= 255: // [\u0080,\u00ff] + return 44 + case 256 <= r && r <= 65532: // [\u0100,\ufffc] + return 44 + case 65534 <= r && r <= 1114111: // [\ufffe,\U0010ffff] + return 44 + } + return NoState + }, + // S12 + func(r rune) int { + switch { + } + return NoState + }, + // S13 + func(r rune) int { + switch { + case 48 <= r && r <= 57: // ['0','9'] + return 45 + case 65 <= r && r <= 90: // ['A','Z'] + return 13 + case r == 95: // ['_','_'] + return 21 + case 97 <= r && r <= 122: // ['a','z'] + return 13 + case 128 <= r && r <= 65532: // [\u0080,\ufffc] + return 29 + case 65534 <= r && r <= 1114111: // [\ufffe,\U0010ffff] + return 29 + } + return NoState + }, + // S14 + func(r rune) int { + switch { + case 48 <= r && r <= 57: // ['0','9'] + return 45 + case 65 <= r && r <= 72: // ['A','H'] + return 13 + case r == 73: // ['I','I'] + return 46 + case 74 <= r && r <= 90: // ['J','Z'] + return 13 + case r == 95: // ['_','_'] + return 21 + case 97 <= r && r <= 104: // ['a','h'] + return 13 + case r == 105: // ['i','i'] + return 47 + case 106 <= r && r <= 122: // ['j','z'] + return 13 + case 128 <= r && r <= 65532: // [\u0080,\ufffc] + return 29 + case 65534 <= r && r <= 1114111: // [\ufffe,\U0010ffff] + return 29 + } + return NoState + }, + // S15 + func(r rune) int { + switch { + case 48 <= r && r <= 57: // ['0','9'] + return 45 + case 65 <= r && r <= 67: // ['A','C'] + return 13 + case r == 68: // ['D','D'] + return 48 + case 69 <= r && r <= 90: // ['E','Z'] + return 13 + case r == 95: // ['_','_'] + return 21 + case 97 <= r && r <= 99: // ['a','c'] + return 13 + case r == 100: // ['d','d'] + return 49 + case 101 <= r && r <= 122: // ['e','z'] + return 13 + case 128 <= r && r <= 65532: // [\u0080,\ufffc] + return 29 + case 65534 <= r && r <= 1114111: // [\ufffe,\U0010ffff] + return 29 + } + return NoState + }, + // S16 + func(r rune) int { + switch { + case 48 <= r && r <= 57: // ['0','9'] + return 45 + case 65 <= r && r <= 81: // ['A','Q'] + return 13 + case r == 82: // ['R','R'] + return 50 + case 83 <= r && r <= 90: // ['S','Z'] + return 13 + case r == 95: // ['_','_'] + return 21 + case 97 <= r && r <= 113: // ['a','q'] + return 13 + case r == 114: // ['r','r'] + return 51 + case 115 <= r && r <= 122: // ['s','z'] + return 13 + case 128 <= r && r <= 65532: // [\u0080,\ufffc] + return 29 + case 65534 <= r && r <= 1114111: // [\ufffe,\U0010ffff] + return 29 + } + return NoState + }, + // S17 + func(r rune) int { + switch { + case 48 <= r && r <= 57: // ['0','9'] + return 45 + case 65 <= r && r <= 78: // ['A','N'] + return 13 + case r == 79: // ['O','O'] + return 52 + case 80 <= r && r <= 90: // ['P','Z'] + return 13 + case r == 95: // ['_','_'] + return 21 + case 97 <= r && r <= 110: // ['a','n'] + return 13 + case r == 111: // ['o','o'] + return 53 + case 112 <= r && r <= 122: // ['p','z'] + return 13 + case 128 <= r && r <= 65532: // [\u0080,\ufffc] + return 29 + case 65534 <= r && r <= 1114111: // [\ufffe,\U0010ffff] + return 29 + } + return NoState + }, + // S18 + func(r rune) int { + switch { + case 48 <= r && r <= 57: // ['0','9'] + return 45 + case 65 <= r && r <= 83: // ['A','S'] + return 13 + case r == 84: // ['T','T'] + return 54 + case r == 85: // ['U','U'] + return 55 + case 86 <= r && r <= 90: // ['V','Z'] + return 13 + case r == 95: // ['_','_'] + return 21 + case 97 <= r && r <= 115: // ['a','s'] + return 13 + case r == 116: // ['t','t'] + return 56 + case r == 117: // ['u','u'] + return 57 + case 118 <= r && r <= 122: // ['v','z'] + return 13 + case 128 <= r && r <= 65532: // [\u0080,\ufffc] + return 29 + case 65534 <= r && r <= 1114111: // [\ufffe,\U0010ffff] + return 29 + } + return NoState + }, + // S19 + func(r rune) int { + switch { + } + return NoState + }, + // S20 + func(r rune) int { + switch { + } + return NoState + }, + // S21 + func(r rune) int { + switch { + case 48 <= r && r <= 57: // ['0','9'] + return 45 + case 65 <= r && r <= 90: // ['A','Z'] + return 13 + case r == 95: // ['_','_'] + return 21 + case 97 <= r && r <= 122: // ['a','z'] + return 13 + case 128 <= r && r <= 65532: // [\u0080,\ufffc] + return 29 + case 65534 <= r && r <= 1114111: // [\ufffe,\U0010ffff] + return 29 + } + return NoState + }, + // S22 + func(r rune) int { + switch { + case 48 <= r && r <= 57: // ['0','9'] + return 45 + case 65 <= r && r <= 90: // ['A','Z'] + return 13 + case r == 95: // ['_','_'] + return 21 + case 97 <= r && r <= 104: // ['a','h'] + return 13 + case r == 105: // ['i','i'] + return 58 + case 106 <= r && r <= 122: // ['j','z'] + return 13 + case 128 <= r && r <= 65532: // [\u0080,\ufffc] + return 29 + case 65534 <= r && r <= 1114111: // [\ufffe,\U0010ffff] + return 29 + } + return NoState + }, + // S23 + func(r rune) int { + switch { + case 48 <= r && r <= 57: // ['0','9'] + return 45 + case 65 <= r && r <= 90: // ['A','Z'] + return 13 + case r == 95: // ['_','_'] + return 21 + case 97 <= r && r <= 99: // ['a','c'] + return 13 + case r == 100: // ['d','d'] + return 59 + case 101 <= r && r <= 122: // ['e','z'] + return 13 + case 128 <= r && r <= 65532: // [\u0080,\ufffc] + return 29 + case 65534 <= r && r <= 1114111: // [\ufffe,\U0010ffff] + return 29 + } + return NoState + }, + // S24 + func(r rune) int { + switch { + case 48 <= r && r <= 57: // ['0','9'] + return 45 + case 65 <= r && r <= 90: // ['A','Z'] + return 13 + case r == 95: // ['_','_'] + return 21 + case 97 <= r && r <= 113: // ['a','q'] + return 13 + case r == 114: // ['r','r'] + return 60 + case 115 <= r && r <= 122: // ['s','z'] + return 13 + case 128 <= r && r <= 65532: // [\u0080,\ufffc] + return 29 + case 65534 <= r && r <= 1114111: // [\ufffe,\U0010ffff] + return 29 + } + return NoState + }, + // S25 + func(r rune) int { + switch { + case 48 <= r && r <= 57: // ['0','9'] + return 45 + case 65 <= r && r <= 90: // ['A','Z'] + return 13 + case r == 95: // ['_','_'] + return 21 + case 97 <= r && r <= 110: // ['a','n'] + return 13 + case r == 111: // ['o','o'] + return 61 + case 112 <= r && r <= 122: // ['p','z'] + return 13 + case 128 <= r && r <= 65532: // [\u0080,\ufffc] + return 29 + case 65534 <= r && r <= 1114111: // [\ufffe,\U0010ffff] + return 29 + } + return NoState + }, + // S26 + func(r rune) int { + switch { + case 48 <= r && r <= 57: // ['0','9'] + return 45 + case 65 <= r && r <= 90: // ['A','Z'] + return 13 + case r == 95: // ['_','_'] + return 21 + case 97 <= r && r <= 115: // ['a','s'] + return 13 + case r == 116: // ['t','t'] + return 62 + case r == 117: // ['u','u'] + return 63 + case 118 <= r && r <= 122: // ['v','z'] + return 13 + case 128 <= r && r <= 65532: // [\u0080,\ufffc] + return 29 + case 65534 <= r && r <= 1114111: // [\ufffe,\U0010ffff] + return 29 + } + return NoState + }, + // S27 + func(r rune) int { + switch { + } + return NoState + }, + // S28 + func(r rune) int { + switch { + } + return NoState + }, + // S29 + func(r rune) int { + switch { + case 48 <= r && r <= 57: // ['0','9'] + return 45 + case 65 <= r && r <= 90: // ['A','Z'] + return 13 + case r == 95: // ['_','_'] + return 21 + case 97 <= r && r <= 122: // ['a','z'] + return 13 + case 128 <= r && r <= 65532: // [\u0080,\ufffc] + return 29 + case 65534 <= r && r <= 1114111: // [\ufffe,\U0010ffff] + return 29 + } + return NoState + }, + // S30 + func(r rune) int { + switch { + case 1 <= r && r <= 33: // [\u0001,'!'] + return 30 + case r == 34: // ['"','"'] + return 31 + case 35 <= r && r <= 91: // ['#','['] + return 30 + case r == 92: // ['\','\'] + return 32 + case 93 <= r && r <= 127: // [']',\u007f] + return 30 + case 128 <= r && r <= 65532: // [\u0080,\ufffc] + return 33 + case 65534 <= r && r <= 1114111: // [\ufffe,\U0010ffff] + return 33 + } + return NoState + }, + // S31 + func(r rune) int { + switch { + } + return NoState + }, + // S32 + func(r rune) int { + switch { + case 1 <= r && r <= 33: // [\u0001,'!'] + return 64 + case r == 34: // ['"','"'] + return 65 + case 35 <= r && r <= 91: // ['#','['] + return 64 + case r == 92: // ['\','\'] + return 65 + case 93 <= r && r <= 127: // [']',\u007f] + return 64 + case 128 <= r && r <= 65532: // [\u0080,\ufffc] + return 66 + case 65534 <= r && r <= 1114111: // [\ufffe,\U0010ffff] + return 66 + } + return NoState + }, + // S33 + func(r rune) int { + switch { + case 1 <= r && r <= 33: // [\u0001,'!'] + return 30 + case r == 34: // ['"','"'] + return 31 + case 35 <= r && r <= 91: // ['#','['] + return 30 + case r == 92: // ['\','\'] + return 32 + case 93 <= r && r <= 127: // [']',\u007f] + return 30 + case 128 <= r && r <= 65532: // [\u0080,\ufffc] + return 33 + case 65534 <= r && r <= 1114111: // [\ufffe,\U0010ffff] + return 33 + } + return NoState + }, + // S34 + func(r rune) int { + switch { + } + return NoState + }, + // S35 + func(r rune) int { + switch { + } + return NoState + }, + // S36 + func(r rune) int { + switch { + } + return NoState + }, + // S37 + func(r rune) int { + switch { + case 48 <= r && r <= 57: // ['0','9'] + return 37 + } + return NoState + }, + // S38 + func(r rune) int { + switch { + case r == 42: // ['*','*'] + return 67 + default: + return 38 + } + }, + // S39 + func(r rune) int { + switch { + case r == 10: // ['\n','\n'] + return 34 + default: + return 39 + } + }, + // S40 + func(r rune) int { + switch { + case 48 <= r && r <= 57: // ['0','9'] + return 68 + } + return NoState + }, + // S41 + func(r rune) int { + switch { + case 1 <= r && r <= 59: // [\u0001,';'] + return 41 + case r == 60: // ['<','<'] + return 42 + case r == 61: // ['=','='] + return 41 + case r == 62: // ['>','>'] + return 43 + case 63 <= r && r <= 127: // ['?',\u007f] + return 41 + case 128 <= r && r <= 255: // [\u0080,\u00ff] + return 44 + case 256 <= r && r <= 65532: // [\u0100,\ufffc] + return 44 + case 65534 <= r && r <= 1114111: // [\ufffe,\U0010ffff] + return 44 + } + return NoState + }, + // S42 + func(r rune) int { + switch { + case 1 <= r && r <= 59: // [\u0001,';'] + return 69 + case r == 61: // ['=','='] + return 69 + case 63 <= r && r <= 127: // ['?',\u007f] + return 69 + case 128 <= r && r <= 255: // [\u0080,\u00ff] + return 70 + case 256 <= r && r <= 65532: // [\u0100,\ufffc] + return 70 + case 65534 <= r && r <= 1114111: // [\ufffe,\U0010ffff] + return 70 + } + return NoState + }, + // S43 + func(r rune) int { + switch { + } + return NoState + }, + // S44 + func(r rune) int { + switch { + case 1 <= r && r <= 59: // [\u0001,';'] + return 41 + case r == 60: // ['<','<'] + return 42 + case r == 61: // ['=','='] + return 41 + case r == 62: // ['>','>'] + return 43 + case 63 <= r && r <= 127: // ['?',\u007f] + return 41 + case 128 <= r && r <= 255: // [\u0080,\u00ff] + return 44 + case 256 <= r && r <= 65532: // [\u0100,\ufffc] + return 44 + case 65534 <= r && r <= 1114111: // [\ufffe,\U0010ffff] + return 44 + } + return NoState + }, + // S45 + func(r rune) int { + switch { + case 48 <= r && r <= 57: // ['0','9'] + return 45 + case 65 <= r && r <= 90: // ['A','Z'] + return 13 + case r == 95: // ['_','_'] + return 21 + case 97 <= r && r <= 122: // ['a','z'] + return 13 + case 128 <= r && r <= 65532: // [\u0080,\ufffc] + return 29 + case 65534 <= r && r <= 1114111: // [\ufffe,\U0010ffff] + return 29 + } + return NoState + }, + // S46 + func(r rune) int { + switch { + case 48 <= r && r <= 57: // ['0','9'] + return 45 + case 65 <= r && r <= 70: // ['A','F'] + return 13 + case r == 71: // ['G','G'] + return 71 + case 72 <= r && r <= 90: // ['H','Z'] + return 13 + case r == 95: // ['_','_'] + return 21 + case 97 <= r && r <= 122: // ['a','z'] + return 13 + case 128 <= r && r <= 65532: // [\u0080,\ufffc] + return 29 + case 65534 <= r && r <= 1114111: // [\ufffe,\U0010ffff] + return 29 + } + return NoState + }, + // S47 + func(r rune) int { + switch { + case 48 <= r && r <= 57: // ['0','9'] + return 45 + case 65 <= r && r <= 70: // ['A','F'] + return 13 + case r == 71: // ['G','G'] + return 72 + case 72 <= r && r <= 90: // ['H','Z'] + return 13 + case r == 95: // ['_','_'] + return 21 + case 97 <= r && r <= 102: // ['a','f'] + return 13 + case r == 103: // ['g','g'] + return 73 + case 104 <= r && r <= 122: // ['h','z'] + return 13 + case 128 <= r && r <= 65532: // [\u0080,\ufffc] + return 29 + case 65534 <= r && r <= 1114111: // [\ufffe,\U0010ffff] + return 29 + } + return NoState + }, + // S48 + func(r rune) int { + switch { + case 48 <= r && r <= 57: // ['0','9'] + return 45 + case 65 <= r && r <= 70: // ['A','F'] + return 13 + case r == 71: // ['G','G'] + return 74 + case 72 <= r && r <= 90: // ['H','Z'] + return 13 + case r == 95: // ['_','_'] + return 21 + case 97 <= r && r <= 122: // ['a','z'] + return 13 + case 128 <= r && r <= 65532: // [\u0080,\ufffc] + return 29 + case 65534 <= r && r <= 1114111: // [\ufffe,\U0010ffff] + return 29 + } + return NoState + }, + // S49 + func(r rune) int { + switch { + case 48 <= r && r <= 57: // ['0','9'] + return 45 + case 65 <= r && r <= 90: // ['A','Z'] + return 13 + case r == 95: // ['_','_'] + return 21 + case 97 <= r && r <= 102: // ['a','f'] + return 13 + case r == 103: // ['g','g'] + return 75 + case 104 <= r && r <= 122: // ['h','z'] + return 13 + case 128 <= r && r <= 65532: // [\u0080,\ufffc] + return 29 + case 65534 <= r && r <= 1114111: // [\ufffe,\U0010ffff] + return 29 + } + return NoState + }, + // S50 + func(r rune) int { + switch { + case 48 <= r && r <= 57: // ['0','9'] + return 45 + case r == 65: // ['A','A'] + return 76 + case 66 <= r && r <= 90: // ['B','Z'] + return 13 + case r == 95: // ['_','_'] + return 21 + case 97 <= r && r <= 122: // ['a','z'] + return 13 + case 128 <= r && r <= 65532: // [\u0080,\ufffc] + return 29 + case 65534 <= r && r <= 1114111: // [\ufffe,\U0010ffff] + return 29 + } + return NoState + }, + // S51 + func(r rune) int { + switch { + case 48 <= r && r <= 57: // ['0','9'] + return 45 + case 65 <= r && r <= 90: // ['A','Z'] + return 13 + case r == 95: // ['_','_'] + return 21 + case r == 97: // ['a','a'] + return 77 + case 98 <= r && r <= 122: // ['b','z'] + return 13 + case 128 <= r && r <= 65532: // [\u0080,\ufffc] + return 29 + case 65534 <= r && r <= 1114111: // [\ufffe,\U0010ffff] + return 29 + } + return NoState + }, + // S52 + func(r rune) int { + switch { + case 48 <= r && r <= 57: // ['0','9'] + return 45 + case 65 <= r && r <= 67: // ['A','C'] + return 13 + case r == 68: // ['D','D'] + return 78 + case 69 <= r && r <= 90: // ['E','Z'] + return 13 + case r == 95: // ['_','_'] + return 21 + case 97 <= r && r <= 122: // ['a','z'] + return 13 + case 128 <= r && r <= 65532: // [\u0080,\ufffc] + return 29 + case 65534 <= r && r <= 1114111: // [\ufffe,\U0010ffff] + return 29 + } + return NoState + }, + // S53 + func(r rune) int { + switch { + case 48 <= r && r <= 57: // ['0','9'] + return 45 + case 65 <= r && r <= 90: // ['A','Z'] + return 13 + case r == 95: // ['_','_'] + return 21 + case 97 <= r && r <= 99: // ['a','c'] + return 13 + case r == 100: // ['d','d'] + return 79 + case 101 <= r && r <= 122: // ['e','z'] + return 13 + case 128 <= r && r <= 65532: // [\u0080,\ufffc] + return 29 + case 65534 <= r && r <= 1114111: // [\ufffe,\U0010ffff] + return 29 + } + return NoState + }, + // S54 + func(r rune) int { + switch { + case 48 <= r && r <= 57: // ['0','9'] + return 45 + case 65 <= r && r <= 81: // ['A','Q'] + return 13 + case r == 82: // ['R','R'] + return 80 + case 83 <= r && r <= 90: // ['S','Z'] + return 13 + case r == 95: // ['_','_'] + return 21 + case 97 <= r && r <= 122: // ['a','z'] + return 13 + case 128 <= r && r <= 65532: // [\u0080,\ufffc] + return 29 + case 65534 <= r && r <= 1114111: // [\ufffe,\U0010ffff] + return 29 + } + return NoState + }, + // S55 + func(r rune) int { + switch { + case 48 <= r && r <= 57: // ['0','9'] + return 45 + case r == 65: // ['A','A'] + return 13 + case r == 66: // ['B','B'] + return 81 + case 67 <= r && r <= 90: // ['C','Z'] + return 13 + case r == 95: // ['_','_'] + return 21 + case 97 <= r && r <= 122: // ['a','z'] + return 13 + case 128 <= r && r <= 65532: // [\u0080,\ufffc] + return 29 + case 65534 <= r && r <= 1114111: // [\ufffe,\U0010ffff] + return 29 + } + return NoState + }, + // S56 + func(r rune) int { + switch { + case 48 <= r && r <= 57: // ['0','9'] + return 45 + case 65 <= r && r <= 90: // ['A','Z'] + return 13 + case r == 95: // ['_','_'] + return 21 + case 97 <= r && r <= 113: // ['a','q'] + return 13 + case r == 114: // ['r','r'] + return 82 + case 115 <= r && r <= 122: // ['s','z'] + return 13 + case 128 <= r && r <= 65532: // [\u0080,\ufffc] + return 29 + case 65534 <= r && r <= 1114111: // [\ufffe,\U0010ffff] + return 29 + } + return NoState + }, + // S57 + func(r rune) int { + switch { + case 48 <= r && r <= 57: // ['0','9'] + return 45 + case 65 <= r && r <= 90: // ['A','Z'] + return 13 + case r == 95: // ['_','_'] + return 21 + case r == 97: // ['a','a'] + return 13 + case r == 98: // ['b','b'] + return 83 + case 99 <= r && r <= 122: // ['c','z'] + return 13 + case 128 <= r && r <= 65532: // [\u0080,\ufffc] + return 29 + case 65534 <= r && r <= 1114111: // [\ufffe,\U0010ffff] + return 29 + } + return NoState + }, + // S58 + func(r rune) int { + switch { + case 48 <= r && r <= 57: // ['0','9'] + return 45 + case 65 <= r && r <= 70: // ['A','F'] + return 13 + case r == 71: // ['G','G'] + return 84 + case 72 <= r && r <= 90: // ['H','Z'] + return 13 + case r == 95: // ['_','_'] + return 21 + case 97 <= r && r <= 102: // ['a','f'] + return 13 + case r == 103: // ['g','g'] + return 85 + case 104 <= r && r <= 122: // ['h','z'] + return 13 + case 128 <= r && r <= 65532: // [\u0080,\ufffc] + return 29 + case 65534 <= r && r <= 1114111: // [\ufffe,\U0010ffff] + return 29 + } + return NoState + }, + // S59 + func(r rune) int { + switch { + case 48 <= r && r <= 57: // ['0','9'] + return 45 + case 65 <= r && r <= 90: // ['A','Z'] + return 13 + case r == 95: // ['_','_'] + return 21 + case 97 <= r && r <= 102: // ['a','f'] + return 13 + case r == 103: // ['g','g'] + return 86 + case 104 <= r && r <= 122: // ['h','z'] + return 13 + case 128 <= r && r <= 65532: // [\u0080,\ufffc] + return 29 + case 65534 <= r && r <= 1114111: // [\ufffe,\U0010ffff] + return 29 + } + return NoState + }, + // S60 + func(r rune) int { + switch { + case 48 <= r && r <= 57: // ['0','9'] + return 45 + case 65 <= r && r <= 90: // ['A','Z'] + return 13 + case r == 95: // ['_','_'] + return 21 + case r == 97: // ['a','a'] + return 87 + case 98 <= r && r <= 122: // ['b','z'] + return 13 + case 128 <= r && r <= 65532: // [\u0080,\ufffc] + return 29 + case 65534 <= r && r <= 1114111: // [\ufffe,\U0010ffff] + return 29 + } + return NoState + }, + // S61 + func(r rune) int { + switch { + case 48 <= r && r <= 57: // ['0','9'] + return 45 + case 65 <= r && r <= 90: // ['A','Z'] + return 13 + case r == 95: // ['_','_'] + return 21 + case 97 <= r && r <= 99: // ['a','c'] + return 13 + case r == 100: // ['d','d'] + return 88 + case 101 <= r && r <= 122: // ['e','z'] + return 13 + case 128 <= r && r <= 65532: // [\u0080,\ufffc] + return 29 + case 65534 <= r && r <= 1114111: // [\ufffe,\U0010ffff] + return 29 + } + return NoState + }, + // S62 + func(r rune) int { + switch { + case 48 <= r && r <= 57: // ['0','9'] + return 45 + case 65 <= r && r <= 90: // ['A','Z'] + return 13 + case r == 95: // ['_','_'] + return 21 + case 97 <= r && r <= 113: // ['a','q'] + return 13 + case r == 114: // ['r','r'] + return 89 + case 115 <= r && r <= 122: // ['s','z'] + return 13 + case 128 <= r && r <= 65532: // [\u0080,\ufffc] + return 29 + case 65534 <= r && r <= 1114111: // [\ufffe,\U0010ffff] + return 29 + } + return NoState + }, + // S63 + func(r rune) int { + switch { + case 48 <= r && r <= 57: // ['0','9'] + return 45 + case 65 <= r && r <= 90: // ['A','Z'] + return 13 + case r == 95: // ['_','_'] + return 21 + case r == 97: // ['a','a'] + return 13 + case r == 98: // ['b','b'] + return 90 + case 99 <= r && r <= 122: // ['c','z'] + return 13 + case 128 <= r && r <= 65532: // [\u0080,\ufffc] + return 29 + case 65534 <= r && r <= 1114111: // [\ufffe,\U0010ffff] + return 29 + } + return NoState + }, + // S64 + func(r rune) int { + switch { + case 1 <= r && r <= 33: // [\u0001,'!'] + return 30 + case r == 34: // ['"','"'] + return 31 + case 35 <= r && r <= 91: // ['#','['] + return 30 + case r == 92: // ['\','\'] + return 32 + case 93 <= r && r <= 127: // [']',\u007f] + return 30 + case 128 <= r && r <= 65532: // [\u0080,\ufffc] + return 33 + case 65534 <= r && r <= 1114111: // [\ufffe,\U0010ffff] + return 33 + } + return NoState + }, + // S65 + func(r rune) int { + switch { + case 1 <= r && r <= 33: // [\u0001,'!'] + return 30 + case r == 34: // ['"','"'] + return 31 + case 35 <= r && r <= 91: // ['#','['] + return 30 + case r == 92: // ['\','\'] + return 32 + case 93 <= r && r <= 127: // [']',\u007f] + return 30 + case 128 <= r && r <= 65532: // [\u0080,\ufffc] + return 33 + case 65534 <= r && r <= 1114111: // [\ufffe,\U0010ffff] + return 33 + } + return NoState + }, + // S66 + func(r rune) int { + switch { + case 1 <= r && r <= 33: // [\u0001,'!'] + return 30 + case r == 34: // ['"','"'] + return 31 + case 35 <= r && r <= 91: // ['#','['] + return 30 + case r == 92: // ['\','\'] + return 32 + case 93 <= r && r <= 127: // [']',\u007f] + return 30 + case 128 <= r && r <= 65532: // [\u0080,\ufffc] + return 33 + case 65534 <= r && r <= 1114111: // [\ufffe,\U0010ffff] + return 33 + } + return NoState + }, + // S67 + func(r rune) int { + switch { + case r == 42: // ['*','*'] + return 67 + case r == 47: // ['/','/'] + return 91 + default: + return 38 + } + }, + // S68 + func(r rune) int { + switch { + case 48 <= r && r <= 57: // ['0','9'] + return 68 + } + return NoState + }, + // S69 + func(r rune) int { + switch { + case 1 <= r && r <= 59: // [\u0001,';'] + return 69 + case r == 61: // ['=','='] + return 69 + case r == 62: // ['>','>'] + return 92 + case 63 <= r && r <= 127: // ['?',\u007f] + return 69 + case 128 <= r && r <= 255: // [\u0080,\u00ff] + return 70 + case 256 <= r && r <= 65532: // [\u0100,\ufffc] + return 70 + case 65534 <= r && r <= 1114111: // [\ufffe,\U0010ffff] + return 70 + } + return NoState + }, + // S70 + func(r rune) int { + switch { + case 1 <= r && r <= 59: // [\u0001,';'] + return 69 + case r == 61: // ['=','='] + return 69 + case r == 62: // ['>','>'] + return 92 + case 63 <= r && r <= 127: // ['?',\u007f] + return 69 + case 128 <= r && r <= 255: // [\u0080,\u00ff] + return 70 + case 256 <= r && r <= 65532: // [\u0100,\ufffc] + return 70 + case 65534 <= r && r <= 1114111: // [\ufffe,\U0010ffff] + return 70 + } + return NoState + }, + // S71 + func(r rune) int { + switch { + case 48 <= r && r <= 57: // ['0','9'] + return 45 + case 65 <= r && r <= 81: // ['A','Q'] + return 13 + case r == 82: // ['R','R'] + return 93 + case 83 <= r && r <= 90: // ['S','Z'] + return 13 + case r == 95: // ['_','_'] + return 21 + case 97 <= r && r <= 122: // ['a','z'] + return 13 + case 128 <= r && r <= 65532: // [\u0080,\ufffc] + return 29 + case 65534 <= r && r <= 1114111: // [\ufffe,\U0010ffff] + return 29 + } + return NoState + }, + // S72 + func(r rune) int { + switch { + case 48 <= r && r <= 57: // ['0','9'] + return 45 + case 65 <= r && r <= 90: // ['A','Z'] + return 13 + case r == 95: // ['_','_'] + return 21 + case 97 <= r && r <= 113: // ['a','q'] + return 13 + case r == 114: // ['r','r'] + return 94 + case 115 <= r && r <= 122: // ['s','z'] + return 13 + case 128 <= r && r <= 65532: // [\u0080,\ufffc] + return 29 + case 65534 <= r && r <= 1114111: // [\ufffe,\U0010ffff] + return 29 + } + return NoState + }, + // S73 + func(r rune) int { + switch { + case 48 <= r && r <= 57: // ['0','9'] + return 45 + case 65 <= r && r <= 90: // ['A','Z'] + return 13 + case r == 95: // ['_','_'] + return 21 + case 97 <= r && r <= 113: // ['a','q'] + return 13 + case r == 114: // ['r','r'] + return 95 + case 115 <= r && r <= 122: // ['s','z'] + return 13 + case 128 <= r && r <= 65532: // [\u0080,\ufffc] + return 29 + case 65534 <= r && r <= 1114111: // [\ufffe,\U0010ffff] + return 29 + } + return NoState + }, + // S74 + func(r rune) int { + switch { + case 48 <= r && r <= 57: // ['0','9'] + return 45 + case 65 <= r && r <= 68: // ['A','D'] + return 13 + case r == 69: // ['E','E'] + return 96 + case 70 <= r && r <= 90: // ['F','Z'] + return 13 + case r == 95: // ['_','_'] + return 21 + case 97 <= r && r <= 122: // ['a','z'] + return 13 + case 128 <= r && r <= 65532: // [\u0080,\ufffc] + return 29 + case 65534 <= r && r <= 1114111: // [\ufffe,\U0010ffff] + return 29 + } + return NoState + }, + // S75 + func(r rune) int { + switch { + case 48 <= r && r <= 57: // ['0','9'] + return 45 + case 65 <= r && r <= 90: // ['A','Z'] + return 13 + case r == 95: // ['_','_'] + return 21 + case 97 <= r && r <= 100: // ['a','d'] + return 13 + case r == 101: // ['e','e'] + return 96 + case 102 <= r && r <= 122: // ['f','z'] + return 13 + case 128 <= r && r <= 65532: // [\u0080,\ufffc] + return 29 + case 65534 <= r && r <= 1114111: // [\ufffe,\U0010ffff] + return 29 + } + return NoState + }, + // S76 + func(r rune) int { + switch { + case 48 <= r && r <= 57: // ['0','9'] + return 45 + case 65 <= r && r <= 79: // ['A','O'] + return 13 + case r == 80: // ['P','P'] + return 97 + case 81 <= r && r <= 90: // ['Q','Z'] + return 13 + case r == 95: // ['_','_'] + return 21 + case 97 <= r && r <= 122: // ['a','z'] + return 13 + case 128 <= r && r <= 65532: // [\u0080,\ufffc] + return 29 + case 65534 <= r && r <= 1114111: // [\ufffe,\U0010ffff] + return 29 + } + return NoState + }, + // S77 + func(r rune) int { + switch { + case 48 <= r && r <= 57: // ['0','9'] + return 45 + case 65 <= r && r <= 90: // ['A','Z'] + return 13 + case r == 95: // ['_','_'] + return 21 + case 97 <= r && r <= 111: // ['a','o'] + return 13 + case r == 112: // ['p','p'] + return 98 + case 113 <= r && r <= 122: // ['q','z'] + return 13 + case 128 <= r && r <= 65532: // [\u0080,\ufffc] + return 29 + case 65534 <= r && r <= 1114111: // [\ufffe,\U0010ffff] + return 29 + } + return NoState + }, + // S78 + func(r rune) int { + switch { + case 48 <= r && r <= 57: // ['0','9'] + return 45 + case 65 <= r && r <= 68: // ['A','D'] + return 13 + case r == 69: // ['E','E'] + return 99 + case 70 <= r && r <= 90: // ['F','Z'] + return 13 + case r == 95: // ['_','_'] + return 21 + case 97 <= r && r <= 122: // ['a','z'] + return 13 + case 128 <= r && r <= 65532: // [\u0080,\ufffc] + return 29 + case 65534 <= r && r <= 1114111: // [\ufffe,\U0010ffff] + return 29 + } + return NoState + }, + // S79 + func(r rune) int { + switch { + case 48 <= r && r <= 57: // ['0','9'] + return 45 + case 65 <= r && r <= 90: // ['A','Z'] + return 13 + case r == 95: // ['_','_'] + return 21 + case 97 <= r && r <= 100: // ['a','d'] + return 13 + case r == 101: // ['e','e'] + return 99 + case 102 <= r && r <= 122: // ['f','z'] + return 13 + case 128 <= r && r <= 65532: // [\u0080,\ufffc] + return 29 + case 65534 <= r && r <= 1114111: // [\ufffe,\U0010ffff] + return 29 + } + return NoState + }, + // S80 + func(r rune) int { + switch { + case 48 <= r && r <= 57: // ['0','9'] + return 45 + case 65 <= r && r <= 72: // ['A','H'] + return 13 + case r == 73: // ['I','I'] + return 100 + case 74 <= r && r <= 90: // ['J','Z'] + return 13 + case r == 95: // ['_','_'] + return 21 + case 97 <= r && r <= 122: // ['a','z'] + return 13 + case 128 <= r && r <= 65532: // [\u0080,\ufffc] + return 29 + case 65534 <= r && r <= 1114111: // [\ufffe,\U0010ffff] + return 29 + } + return NoState + }, + // S81 + func(r rune) int { + switch { + case 48 <= r && r <= 57: // ['0','9'] + return 45 + case 65 <= r && r <= 70: // ['A','F'] + return 13 + case r == 71: // ['G','G'] + return 101 + case 72 <= r && r <= 90: // ['H','Z'] + return 13 + case r == 95: // ['_','_'] + return 21 + case 97 <= r && r <= 122: // ['a','z'] + return 13 + case 128 <= r && r <= 65532: // [\u0080,\ufffc] + return 29 + case 65534 <= r && r <= 1114111: // [\ufffe,\U0010ffff] + return 29 + } + return NoState + }, + // S82 + func(r rune) int { + switch { + case 48 <= r && r <= 57: // ['0','9'] + return 45 + case 65 <= r && r <= 90: // ['A','Z'] + return 13 + case r == 95: // ['_','_'] + return 21 + case 97 <= r && r <= 104: // ['a','h'] + return 13 + case r == 105: // ['i','i'] + return 102 + case 106 <= r && r <= 122: // ['j','z'] + return 13 + case 128 <= r && r <= 65532: // [\u0080,\ufffc] + return 29 + case 65534 <= r && r <= 1114111: // [\ufffe,\U0010ffff] + return 29 + } + return NoState + }, + // S83 + func(r rune) int { + switch { + case 48 <= r && r <= 57: // ['0','9'] + return 45 + case 65 <= r && r <= 70: // ['A','F'] + return 13 + case r == 71: // ['G','G'] + return 103 + case 72 <= r && r <= 90: // ['H','Z'] + return 13 + case r == 95: // ['_','_'] + return 21 + case 97 <= r && r <= 102: // ['a','f'] + return 13 + case r == 103: // ['g','g'] + return 104 + case 104 <= r && r <= 122: // ['h','z'] + return 13 + case 128 <= r && r <= 65532: // [\u0080,\ufffc] + return 29 + case 65534 <= r && r <= 1114111: // [\ufffe,\U0010ffff] + return 29 + } + return NoState + }, + // S84 + func(r rune) int { + switch { + case 48 <= r && r <= 57: // ['0','9'] + return 45 + case 65 <= r && r <= 90: // ['A','Z'] + return 13 + case r == 95: // ['_','_'] + return 21 + case 97 <= r && r <= 113: // ['a','q'] + return 13 + case r == 114: // ['r','r'] + return 105 + case 115 <= r && r <= 122: // ['s','z'] + return 13 + case 128 <= r && r <= 65532: // [\u0080,\ufffc] + return 29 + case 65534 <= r && r <= 1114111: // [\ufffe,\U0010ffff] + return 29 + } + return NoState + }, + // S85 + func(r rune) int { + switch { + case 48 <= r && r <= 57: // ['0','9'] + return 45 + case 65 <= r && r <= 90: // ['A','Z'] + return 13 + case r == 95: // ['_','_'] + return 21 + case 97 <= r && r <= 113: // ['a','q'] + return 13 + case r == 114: // ['r','r'] + return 106 + case 115 <= r && r <= 122: // ['s','z'] + return 13 + case 128 <= r && r <= 65532: // [\u0080,\ufffc] + return 29 + case 65534 <= r && r <= 1114111: // [\ufffe,\U0010ffff] + return 29 + } + return NoState + }, + // S86 + func(r rune) int { + switch { + case 48 <= r && r <= 57: // ['0','9'] + return 45 + case 65 <= r && r <= 90: // ['A','Z'] + return 13 + case r == 95: // ['_','_'] + return 21 + case 97 <= r && r <= 100: // ['a','d'] + return 13 + case r == 101: // ['e','e'] + return 96 + case 102 <= r && r <= 122: // ['f','z'] + return 13 + case 128 <= r && r <= 65532: // [\u0080,\ufffc] + return 29 + case 65534 <= r && r <= 1114111: // [\ufffe,\U0010ffff] + return 29 + } + return NoState + }, + // S87 + func(r rune) int { + switch { + case 48 <= r && r <= 57: // ['0','9'] + return 45 + case 65 <= r && r <= 90: // ['A','Z'] + return 13 + case r == 95: // ['_','_'] + return 21 + case 97 <= r && r <= 111: // ['a','o'] + return 13 + case r == 112: // ['p','p'] + return 107 + case 113 <= r && r <= 122: // ['q','z'] + return 13 + case 128 <= r && r <= 65532: // [\u0080,\ufffc] + return 29 + case 65534 <= r && r <= 1114111: // [\ufffe,\U0010ffff] + return 29 + } + return NoState + }, + // S88 + func(r rune) int { + switch { + case 48 <= r && r <= 57: // ['0','9'] + return 45 + case 65 <= r && r <= 90: // ['A','Z'] + return 13 + case r == 95: // ['_','_'] + return 21 + case 97 <= r && r <= 100: // ['a','d'] + return 13 + case r == 101: // ['e','e'] + return 99 + case 102 <= r && r <= 122: // ['f','z'] + return 13 + case 128 <= r && r <= 65532: // [\u0080,\ufffc] + return 29 + case 65534 <= r && r <= 1114111: // [\ufffe,\U0010ffff] + return 29 + } + return NoState + }, + // S89 + func(r rune) int { + switch { + case 48 <= r && r <= 57: // ['0','9'] + return 45 + case 65 <= r && r <= 90: // ['A','Z'] + return 13 + case r == 95: // ['_','_'] + return 21 + case 97 <= r && r <= 104: // ['a','h'] + return 13 + case r == 105: // ['i','i'] + return 108 + case 106 <= r && r <= 122: // ['j','z'] + return 13 + case 128 <= r && r <= 65532: // [\u0080,\ufffc] + return 29 + case 65534 <= r && r <= 1114111: // [\ufffe,\U0010ffff] + return 29 + } + return NoState + }, + // S90 + func(r rune) int { + switch { + case 48 <= r && r <= 57: // ['0','9'] + return 45 + case 65 <= r && r <= 70: // ['A','F'] + return 13 + case r == 71: // ['G','G'] + return 109 + case 72 <= r && r <= 90: // ['H','Z'] + return 13 + case r == 95: // ['_','_'] + return 21 + case 97 <= r && r <= 102: // ['a','f'] + return 13 + case r == 103: // ['g','g'] + return 110 + case 104 <= r && r <= 122: // ['h','z'] + return 13 + case 128 <= r && r <= 65532: // [\u0080,\ufffc] + return 29 + case 65534 <= r && r <= 1114111: // [\ufffe,\U0010ffff] + return 29 + } + return NoState + }, + // S91 + func(r rune) int { + switch { + } + return NoState + }, + // S92 + func(r rune) int { + switch { + case 1 <= r && r <= 59: // [\u0001,';'] + return 41 + case r == 60: // ['<','<'] + return 42 + case r == 61: // ['=','='] + return 41 + case r == 62: // ['>','>'] + return 43 + case 63 <= r && r <= 127: // ['?',\u007f] + return 41 + case 128 <= r && r <= 255: // [\u0080,\u00ff] + return 44 + case 256 <= r && r <= 65532: // [\u0100,\ufffc] + return 44 + case 65534 <= r && r <= 1114111: // [\ufffe,\U0010ffff] + return 44 + } + return NoState + }, + // S93 + func(r rune) int { + switch { + case 48 <= r && r <= 57: // ['0','9'] + return 45 + case r == 65: // ['A','A'] + return 111 + case 66 <= r && r <= 90: // ['B','Z'] + return 13 + case r == 95: // ['_','_'] + return 21 + case 97 <= r && r <= 122: // ['a','z'] + return 13 + case 128 <= r && r <= 65532: // [\u0080,\ufffc] + return 29 + case 65534 <= r && r <= 1114111: // [\ufffe,\U0010ffff] + return 29 + } + return NoState + }, + // S94 + func(r rune) int { + switch { + case 48 <= r && r <= 57: // ['0','9'] + return 45 + case 65 <= r && r <= 90: // ['A','Z'] + return 13 + case r == 95: // ['_','_'] + return 21 + case r == 97: // ['a','a'] + return 112 + case 98 <= r && r <= 122: // ['b','z'] + return 13 + case 128 <= r && r <= 65532: // [\u0080,\ufffc] + return 29 + case 65534 <= r && r <= 1114111: // [\ufffe,\U0010ffff] + return 29 + } + return NoState + }, + // S95 + func(r rune) int { + switch { + case 48 <= r && r <= 57: // ['0','9'] + return 45 + case 65 <= r && r <= 90: // ['A','Z'] + return 13 + case r == 95: // ['_','_'] + return 21 + case r == 97: // ['a','a'] + return 113 + case 98 <= r && r <= 122: // ['b','z'] + return 13 + case 128 <= r && r <= 65532: // [\u0080,\ufffc] + return 29 + case 65534 <= r && r <= 1114111: // [\ufffe,\U0010ffff] + return 29 + } + return NoState + }, + // S96 + func(r rune) int { + switch { + case 48 <= r && r <= 57: // ['0','9'] + return 45 + case 65 <= r && r <= 90: // ['A','Z'] + return 13 + case r == 95: // ['_','_'] + return 21 + case 97 <= r && r <= 122: // ['a','z'] + return 13 + case 128 <= r && r <= 65532: // [\u0080,\ufffc] + return 29 + case 65534 <= r && r <= 1114111: // [\ufffe,\U0010ffff] + return 29 + } + return NoState + }, + // S97 + func(r rune) int { + switch { + case 48 <= r && r <= 57: // ['0','9'] + return 45 + case 65 <= r && r <= 71: // ['A','G'] + return 13 + case r == 72: // ['H','H'] + return 114 + case 73 <= r && r <= 90: // ['I','Z'] + return 13 + case r == 95: // ['_','_'] + return 21 + case 97 <= r && r <= 122: // ['a','z'] + return 13 + case 128 <= r && r <= 65532: // [\u0080,\ufffc] + return 29 + case 65534 <= r && r <= 1114111: // [\ufffe,\U0010ffff] + return 29 + } + return NoState + }, + // S98 + func(r rune) int { + switch { + case 48 <= r && r <= 57: // ['0','9'] + return 45 + case 65 <= r && r <= 90: // ['A','Z'] + return 13 + case r == 95: // ['_','_'] + return 21 + case 97 <= r && r <= 103: // ['a','g'] + return 13 + case r == 104: // ['h','h'] + return 114 + case 105 <= r && r <= 122: // ['i','z'] + return 13 + case 128 <= r && r <= 65532: // [\u0080,\ufffc] + return 29 + case 65534 <= r && r <= 1114111: // [\ufffe,\U0010ffff] + return 29 + } + return NoState + }, + // S99 + func(r rune) int { + switch { + case 48 <= r && r <= 57: // ['0','9'] + return 45 + case 65 <= r && r <= 90: // ['A','Z'] + return 13 + case r == 95: // ['_','_'] + return 21 + case 97 <= r && r <= 122: // ['a','z'] + return 13 + case 128 <= r && r <= 65532: // [\u0080,\ufffc] + return 29 + case 65534 <= r && r <= 1114111: // [\ufffe,\U0010ffff] + return 29 + } + return NoState + }, + // S100 + func(r rune) int { + switch { + case 48 <= r && r <= 57: // ['0','9'] + return 45 + case 65 <= r && r <= 66: // ['A','B'] + return 13 + case r == 67: // ['C','C'] + return 115 + case 68 <= r && r <= 90: // ['D','Z'] + return 13 + case r == 95: // ['_','_'] + return 21 + case 97 <= r && r <= 122: // ['a','z'] + return 13 + case 128 <= r && r <= 65532: // [\u0080,\ufffc] + return 29 + case 65534 <= r && r <= 1114111: // [\ufffe,\U0010ffff] + return 29 + } + return NoState + }, + // S101 + func(r rune) int { + switch { + case 48 <= r && r <= 57: // ['0','9'] + return 45 + case 65 <= r && r <= 81: // ['A','Q'] + return 13 + case r == 82: // ['R','R'] + return 116 + case 83 <= r && r <= 90: // ['S','Z'] + return 13 + case r == 95: // ['_','_'] + return 21 + case 97 <= r && r <= 122: // ['a','z'] + return 13 + case 128 <= r && r <= 65532: // [\u0080,\ufffc] + return 29 + case 65534 <= r && r <= 1114111: // [\ufffe,\U0010ffff] + return 29 + } + return NoState + }, + // S102 + func(r rune) int { + switch { + case 48 <= r && r <= 57: // ['0','9'] + return 45 + case 65 <= r && r <= 90: // ['A','Z'] + return 13 + case r == 95: // ['_','_'] + return 21 + case 97 <= r && r <= 98: // ['a','b'] + return 13 + case r == 99: // ['c','c'] + return 117 + case 100 <= r && r <= 122: // ['d','z'] + return 13 + case 128 <= r && r <= 65532: // [\u0080,\ufffc] + return 29 + case 65534 <= r && r <= 1114111: // [\ufffe,\U0010ffff] + return 29 + } + return NoState + }, + // S103 + func(r rune) int { + switch { + case 48 <= r && r <= 57: // ['0','9'] + return 45 + case 65 <= r && r <= 90: // ['A','Z'] + return 13 + case r == 95: // ['_','_'] + return 21 + case 97 <= r && r <= 113: // ['a','q'] + return 13 + case r == 114: // ['r','r'] + return 118 + case 115 <= r && r <= 122: // ['s','z'] + return 13 + case 128 <= r && r <= 65532: // [\u0080,\ufffc] + return 29 + case 65534 <= r && r <= 1114111: // [\ufffe,\U0010ffff] + return 29 + } + return NoState + }, + // S104 + func(r rune) int { + switch { + case 48 <= r && r <= 57: // ['0','9'] + return 45 + case 65 <= r && r <= 90: // ['A','Z'] + return 13 + case r == 95: // ['_','_'] + return 21 + case 97 <= r && r <= 113: // ['a','q'] + return 13 + case r == 114: // ['r','r'] + return 119 + case 115 <= r && r <= 122: // ['s','z'] + return 13 + case 128 <= r && r <= 65532: // [\u0080,\ufffc] + return 29 + case 65534 <= r && r <= 1114111: // [\ufffe,\U0010ffff] + return 29 + } + return NoState + }, + // S105 + func(r rune) int { + switch { + case 48 <= r && r <= 57: // ['0','9'] + return 45 + case 65 <= r && r <= 90: // ['A','Z'] + return 13 + case r == 95: // ['_','_'] + return 21 + case r == 97: // ['a','a'] + return 120 + case 98 <= r && r <= 122: // ['b','z'] + return 13 + case 128 <= r && r <= 65532: // [\u0080,\ufffc] + return 29 + case 65534 <= r && r <= 1114111: // [\ufffe,\U0010ffff] + return 29 + } + return NoState + }, + // S106 + func(r rune) int { + switch { + case 48 <= r && r <= 57: // ['0','9'] + return 45 + case 65 <= r && r <= 90: // ['A','Z'] + return 13 + case r == 95: // ['_','_'] + return 21 + case r == 97: // ['a','a'] + return 121 + case 98 <= r && r <= 122: // ['b','z'] + return 13 + case 128 <= r && r <= 65532: // [\u0080,\ufffc] + return 29 + case 65534 <= r && r <= 1114111: // [\ufffe,\U0010ffff] + return 29 + } + return NoState + }, + // S107 + func(r rune) int { + switch { + case 48 <= r && r <= 57: // ['0','9'] + return 45 + case 65 <= r && r <= 90: // ['A','Z'] + return 13 + case r == 95: // ['_','_'] + return 21 + case 97 <= r && r <= 103: // ['a','g'] + return 13 + case r == 104: // ['h','h'] + return 114 + case 105 <= r && r <= 122: // ['i','z'] + return 13 + case 128 <= r && r <= 65532: // [\u0080,\ufffc] + return 29 + case 65534 <= r && r <= 1114111: // [\ufffe,\U0010ffff] + return 29 + } + return NoState + }, + // S108 + func(r rune) int { + switch { + case 48 <= r && r <= 57: // ['0','9'] + return 45 + case 65 <= r && r <= 90: // ['A','Z'] + return 13 + case r == 95: // ['_','_'] + return 21 + case 97 <= r && r <= 98: // ['a','b'] + return 13 + case r == 99: // ['c','c'] + return 122 + case 100 <= r && r <= 122: // ['d','z'] + return 13 + case 128 <= r && r <= 65532: // [\u0080,\ufffc] + return 29 + case 65534 <= r && r <= 1114111: // [\ufffe,\U0010ffff] + return 29 + } + return NoState + }, + // S109 + func(r rune) int { + switch { + case 48 <= r && r <= 57: // ['0','9'] + return 45 + case 65 <= r && r <= 90: // ['A','Z'] + return 13 + case r == 95: // ['_','_'] + return 21 + case 97 <= r && r <= 113: // ['a','q'] + return 13 + case r == 114: // ['r','r'] + return 123 + case 115 <= r && r <= 122: // ['s','z'] + return 13 + case 128 <= r && r <= 65532: // [\u0080,\ufffc] + return 29 + case 65534 <= r && r <= 1114111: // [\ufffe,\U0010ffff] + return 29 + } + return NoState + }, + // S110 + func(r rune) int { + switch { + case 48 <= r && r <= 57: // ['0','9'] + return 45 + case 65 <= r && r <= 90: // ['A','Z'] + return 13 + case r == 95: // ['_','_'] + return 21 + case 97 <= r && r <= 113: // ['a','q'] + return 13 + case r == 114: // ['r','r'] + return 124 + case 115 <= r && r <= 122: // ['s','z'] + return 13 + case 128 <= r && r <= 65532: // [\u0080,\ufffc] + return 29 + case 65534 <= r && r <= 1114111: // [\ufffe,\U0010ffff] + return 29 + } + return NoState + }, + // S111 + func(r rune) int { + switch { + case 48 <= r && r <= 57: // ['0','9'] + return 45 + case 65 <= r && r <= 79: // ['A','O'] + return 13 + case r == 80: // ['P','P'] + return 125 + case 81 <= r && r <= 90: // ['Q','Z'] + return 13 + case r == 95: // ['_','_'] + return 21 + case 97 <= r && r <= 122: // ['a','z'] + return 13 + case 128 <= r && r <= 65532: // [\u0080,\ufffc] + return 29 + case 65534 <= r && r <= 1114111: // [\ufffe,\U0010ffff] + return 29 + } + return NoState + }, + // S112 + func(r rune) int { + switch { + case 48 <= r && r <= 57: // ['0','9'] + return 45 + case 65 <= r && r <= 90: // ['A','Z'] + return 13 + case r == 95: // ['_','_'] + return 21 + case 97 <= r && r <= 111: // ['a','o'] + return 13 + case r == 112: // ['p','p'] + return 126 + case 113 <= r && r <= 122: // ['q','z'] + return 13 + case 128 <= r && r <= 65532: // [\u0080,\ufffc] + return 29 + case 65534 <= r && r <= 1114111: // [\ufffe,\U0010ffff] + return 29 + } + return NoState + }, + // S113 + func(r rune) int { + switch { + case 48 <= r && r <= 57: // ['0','9'] + return 45 + case 65 <= r && r <= 90: // ['A','Z'] + return 13 + case r == 95: // ['_','_'] + return 21 + case 97 <= r && r <= 111: // ['a','o'] + return 13 + case r == 112: // ['p','p'] + return 127 + case 113 <= r && r <= 122: // ['q','z'] + return 13 + case 128 <= r && r <= 65532: // [\u0080,\ufffc] + return 29 + case 65534 <= r && r <= 1114111: // [\ufffe,\U0010ffff] + return 29 + } + return NoState + }, + // S114 + func(r rune) int { + switch { + case 48 <= r && r <= 57: // ['0','9'] + return 45 + case 65 <= r && r <= 90: // ['A','Z'] + return 13 + case r == 95: // ['_','_'] + return 21 + case 97 <= r && r <= 122: // ['a','z'] + return 13 + case 128 <= r && r <= 65532: // [\u0080,\ufffc] + return 29 + case 65534 <= r && r <= 1114111: // [\ufffe,\U0010ffff] + return 29 + } + return NoState + }, + // S115 + func(r rune) int { + switch { + case 48 <= r && r <= 57: // ['0','9'] + return 45 + case 65 <= r && r <= 83: // ['A','S'] + return 13 + case r == 84: // ['T','T'] + return 128 + case 85 <= r && r <= 90: // ['U','Z'] + return 13 + case r == 95: // ['_','_'] + return 21 + case 97 <= r && r <= 122: // ['a','z'] + return 13 + case 128 <= r && r <= 65532: // [\u0080,\ufffc] + return 29 + case 65534 <= r && r <= 1114111: // [\ufffe,\U0010ffff] + return 29 + } + return NoState + }, + // S116 + func(r rune) int { + switch { + case 48 <= r && r <= 57: // ['0','9'] + return 45 + case r == 65: // ['A','A'] + return 129 + case 66 <= r && r <= 90: // ['B','Z'] + return 13 + case r == 95: // ['_','_'] + return 21 + case 97 <= r && r <= 122: // ['a','z'] + return 13 + case 128 <= r && r <= 65532: // [\u0080,\ufffc] + return 29 + case 65534 <= r && r <= 1114111: // [\ufffe,\U0010ffff] + return 29 + } + return NoState + }, + // S117 + func(r rune) int { + switch { + case 48 <= r && r <= 57: // ['0','9'] + return 45 + case 65 <= r && r <= 90: // ['A','Z'] + return 13 + case r == 95: // ['_','_'] + return 21 + case 97 <= r && r <= 115: // ['a','s'] + return 13 + case r == 116: // ['t','t'] + return 128 + case 117 <= r && r <= 122: // ['u','z'] + return 13 + case 128 <= r && r <= 65532: // [\u0080,\ufffc] + return 29 + case 65534 <= r && r <= 1114111: // [\ufffe,\U0010ffff] + return 29 + } + return NoState + }, + // S118 + func(r rune) int { + switch { + case 48 <= r && r <= 57: // ['0','9'] + return 45 + case 65 <= r && r <= 90: // ['A','Z'] + return 13 + case r == 95: // ['_','_'] + return 21 + case r == 97: // ['a','a'] + return 130 + case 98 <= r && r <= 122: // ['b','z'] + return 13 + case 128 <= r && r <= 65532: // [\u0080,\ufffc] + return 29 + case 65534 <= r && r <= 1114111: // [\ufffe,\U0010ffff] + return 29 + } + return NoState + }, + // S119 + func(r rune) int { + switch { + case 48 <= r && r <= 57: // ['0','9'] + return 45 + case 65 <= r && r <= 90: // ['A','Z'] + return 13 + case r == 95: // ['_','_'] + return 21 + case r == 97: // ['a','a'] + return 131 + case 98 <= r && r <= 122: // ['b','z'] + return 13 + case 128 <= r && r <= 65532: // [\u0080,\ufffc] + return 29 + case 65534 <= r && r <= 1114111: // [\ufffe,\U0010ffff] + return 29 + } + return NoState + }, + // S120 + func(r rune) int { + switch { + case 48 <= r && r <= 57: // ['0','9'] + return 45 + case 65 <= r && r <= 90: // ['A','Z'] + return 13 + case r == 95: // ['_','_'] + return 21 + case 97 <= r && r <= 111: // ['a','o'] + return 13 + case r == 112: // ['p','p'] + return 132 + case 113 <= r && r <= 122: // ['q','z'] + return 13 + case 128 <= r && r <= 65532: // [\u0080,\ufffc] + return 29 + case 65534 <= r && r <= 1114111: // [\ufffe,\U0010ffff] + return 29 + } + return NoState + }, + // S121 + func(r rune) int { + switch { + case 48 <= r && r <= 57: // ['0','9'] + return 45 + case 65 <= r && r <= 90: // ['A','Z'] + return 13 + case r == 95: // ['_','_'] + return 21 + case 97 <= r && r <= 111: // ['a','o'] + return 13 + case r == 112: // ['p','p'] + return 133 + case 113 <= r && r <= 122: // ['q','z'] + return 13 + case 128 <= r && r <= 65532: // [\u0080,\ufffc] + return 29 + case 65534 <= r && r <= 1114111: // [\ufffe,\U0010ffff] + return 29 + } + return NoState + }, + // S122 + func(r rune) int { + switch { + case 48 <= r && r <= 57: // ['0','9'] + return 45 + case 65 <= r && r <= 90: // ['A','Z'] + return 13 + case r == 95: // ['_','_'] + return 21 + case 97 <= r && r <= 115: // ['a','s'] + return 13 + case r == 116: // ['t','t'] + return 128 + case 117 <= r && r <= 122: // ['u','z'] + return 13 + case 128 <= r && r <= 65532: // [\u0080,\ufffc] + return 29 + case 65534 <= r && r <= 1114111: // [\ufffe,\U0010ffff] + return 29 + } + return NoState + }, + // S123 + func(r rune) int { + switch { + case 48 <= r && r <= 57: // ['0','9'] + return 45 + case 65 <= r && r <= 90: // ['A','Z'] + return 13 + case r == 95: // ['_','_'] + return 21 + case r == 97: // ['a','a'] + return 134 + case 98 <= r && r <= 122: // ['b','z'] + return 13 + case 128 <= r && r <= 65532: // [\u0080,\ufffc] + return 29 + case 65534 <= r && r <= 1114111: // [\ufffe,\U0010ffff] + return 29 + } + return NoState + }, + // S124 + func(r rune) int { + switch { + case 48 <= r && r <= 57: // ['0','9'] + return 45 + case 65 <= r && r <= 90: // ['A','Z'] + return 13 + case r == 95: // ['_','_'] + return 21 + case r == 97: // ['a','a'] + return 135 + case 98 <= r && r <= 122: // ['b','z'] + return 13 + case 128 <= r && r <= 65532: // [\u0080,\ufffc] + return 29 + case 65534 <= r && r <= 1114111: // [\ufffe,\U0010ffff] + return 29 + } + return NoState + }, + // S125 + func(r rune) int { + switch { + case 48 <= r && r <= 57: // ['0','9'] + return 45 + case 65 <= r && r <= 71: // ['A','G'] + return 13 + case r == 72: // ['H','H'] + return 136 + case 73 <= r && r <= 90: // ['I','Z'] + return 13 + case r == 95: // ['_','_'] + return 21 + case 97 <= r && r <= 122: // ['a','z'] + return 13 + case 128 <= r && r <= 65532: // [\u0080,\ufffc] + return 29 + case 65534 <= r && r <= 1114111: // [\ufffe,\U0010ffff] + return 29 + } + return NoState + }, + // S126 + func(r rune) int { + switch { + case 48 <= r && r <= 57: // ['0','9'] + return 45 + case 65 <= r && r <= 90: // ['A','Z'] + return 13 + case r == 95: // ['_','_'] + return 21 + case 97 <= r && r <= 103: // ['a','g'] + return 13 + case r == 104: // ['h','h'] + return 136 + case 105 <= r && r <= 122: // ['i','z'] + return 13 + case 128 <= r && r <= 65532: // [\u0080,\ufffc] + return 29 + case 65534 <= r && r <= 1114111: // [\ufffe,\U0010ffff] + return 29 + } + return NoState + }, + // S127 + func(r rune) int { + switch { + case 48 <= r && r <= 57: // ['0','9'] + return 45 + case 65 <= r && r <= 90: // ['A','Z'] + return 13 + case r == 95: // ['_','_'] + return 21 + case 97 <= r && r <= 103: // ['a','g'] + return 13 + case r == 104: // ['h','h'] + return 136 + case 105 <= r && r <= 122: // ['i','z'] + return 13 + case 128 <= r && r <= 65532: // [\u0080,\ufffc] + return 29 + case 65534 <= r && r <= 1114111: // [\ufffe,\U0010ffff] + return 29 + } + return NoState + }, + // S128 + func(r rune) int { + switch { + case 48 <= r && r <= 57: // ['0','9'] + return 45 + case 65 <= r && r <= 90: // ['A','Z'] + return 13 + case r == 95: // ['_','_'] + return 21 + case 97 <= r && r <= 122: // ['a','z'] + return 13 + case 128 <= r && r <= 65532: // [\u0080,\ufffc] + return 29 + case 65534 <= r && r <= 1114111: // [\ufffe,\U0010ffff] + return 29 + } + return NoState + }, + // S129 + func(r rune) int { + switch { + case 48 <= r && r <= 57: // ['0','9'] + return 45 + case 65 <= r && r <= 79: // ['A','O'] + return 13 + case r == 80: // ['P','P'] + return 137 + case 81 <= r && r <= 90: // ['Q','Z'] + return 13 + case r == 95: // ['_','_'] + return 21 + case 97 <= r && r <= 122: // ['a','z'] + return 13 + case 128 <= r && r <= 65532: // [\u0080,\ufffc] + return 29 + case 65534 <= r && r <= 1114111: // [\ufffe,\U0010ffff] + return 29 + } + return NoState + }, + // S130 + func(r rune) int { + switch { + case 48 <= r && r <= 57: // ['0','9'] + return 45 + case 65 <= r && r <= 90: // ['A','Z'] + return 13 + case r == 95: // ['_','_'] + return 21 + case 97 <= r && r <= 111: // ['a','o'] + return 13 + case r == 112: // ['p','p'] + return 138 + case 113 <= r && r <= 122: // ['q','z'] + return 13 + case 128 <= r && r <= 65532: // [\u0080,\ufffc] + return 29 + case 65534 <= r && r <= 1114111: // [\ufffe,\U0010ffff] + return 29 + } + return NoState + }, + // S131 + func(r rune) int { + switch { + case 48 <= r && r <= 57: // ['0','9'] + return 45 + case 65 <= r && r <= 90: // ['A','Z'] + return 13 + case r == 95: // ['_','_'] + return 21 + case 97 <= r && r <= 111: // ['a','o'] + return 13 + case r == 112: // ['p','p'] + return 139 + case 113 <= r && r <= 122: // ['q','z'] + return 13 + case 128 <= r && r <= 65532: // [\u0080,\ufffc] + return 29 + case 65534 <= r && r <= 1114111: // [\ufffe,\U0010ffff] + return 29 + } + return NoState + }, + // S132 + func(r rune) int { + switch { + case 48 <= r && r <= 57: // ['0','9'] + return 45 + case 65 <= r && r <= 90: // ['A','Z'] + return 13 + case r == 95: // ['_','_'] + return 21 + case 97 <= r && r <= 103: // ['a','g'] + return 13 + case r == 104: // ['h','h'] + return 136 + case 105 <= r && r <= 122: // ['i','z'] + return 13 + case 128 <= r && r <= 65532: // [\u0080,\ufffc] + return 29 + case 65534 <= r && r <= 1114111: // [\ufffe,\U0010ffff] + return 29 + } + return NoState + }, + // S133 + func(r rune) int { + switch { + case 48 <= r && r <= 57: // ['0','9'] + return 45 + case 65 <= r && r <= 90: // ['A','Z'] + return 13 + case r == 95: // ['_','_'] + return 21 + case 97 <= r && r <= 103: // ['a','g'] + return 13 + case r == 104: // ['h','h'] + return 136 + case 105 <= r && r <= 122: // ['i','z'] + return 13 + case 128 <= r && r <= 65532: // [\u0080,\ufffc] + return 29 + case 65534 <= r && r <= 1114111: // [\ufffe,\U0010ffff] + return 29 + } + return NoState + }, + // S134 + func(r rune) int { + switch { + case 48 <= r && r <= 57: // ['0','9'] + return 45 + case 65 <= r && r <= 90: // ['A','Z'] + return 13 + case r == 95: // ['_','_'] + return 21 + case 97 <= r && r <= 111: // ['a','o'] + return 13 + case r == 112: // ['p','p'] + return 140 + case 113 <= r && r <= 122: // ['q','z'] + return 13 + case 128 <= r && r <= 65532: // [\u0080,\ufffc] + return 29 + case 65534 <= r && r <= 1114111: // [\ufffe,\U0010ffff] + return 29 + } + return NoState + }, + // S135 + func(r rune) int { + switch { + case 48 <= r && r <= 57: // ['0','9'] + return 45 + case 65 <= r && r <= 90: // ['A','Z'] + return 13 + case r == 95: // ['_','_'] + return 21 + case 97 <= r && r <= 111: // ['a','o'] + return 13 + case r == 112: // ['p','p'] + return 141 + case 113 <= r && r <= 122: // ['q','z'] + return 13 + case 128 <= r && r <= 65532: // [\u0080,\ufffc] + return 29 + case 65534 <= r && r <= 1114111: // [\ufffe,\U0010ffff] + return 29 + } + return NoState + }, + // S136 + func(r rune) int { + switch { + case 48 <= r && r <= 57: // ['0','9'] + return 45 + case 65 <= r && r <= 90: // ['A','Z'] + return 13 + case r == 95: // ['_','_'] + return 21 + case 97 <= r && r <= 122: // ['a','z'] + return 13 + case 128 <= r && r <= 65532: // [\u0080,\ufffc] + return 29 + case 65534 <= r && r <= 1114111: // [\ufffe,\U0010ffff] + return 29 + } + return NoState + }, + // S137 + func(r rune) int { + switch { + case 48 <= r && r <= 57: // ['0','9'] + return 45 + case 65 <= r && r <= 71: // ['A','G'] + return 13 + case r == 72: // ['H','H'] + return 142 + case 73 <= r && r <= 90: // ['I','Z'] + return 13 + case r == 95: // ['_','_'] + return 21 + case 97 <= r && r <= 122: // ['a','z'] + return 13 + case 128 <= r && r <= 65532: // [\u0080,\ufffc] + return 29 + case 65534 <= r && r <= 1114111: // [\ufffe,\U0010ffff] + return 29 + } + return NoState + }, + // S138 + func(r rune) int { + switch { + case 48 <= r && r <= 57: // ['0','9'] + return 45 + case 65 <= r && r <= 90: // ['A','Z'] + return 13 + case r == 95: // ['_','_'] + return 21 + case 97 <= r && r <= 103: // ['a','g'] + return 13 + case r == 104: // ['h','h'] + return 142 + case 105 <= r && r <= 122: // ['i','z'] + return 13 + case 128 <= r && r <= 65532: // [\u0080,\ufffc] + return 29 + case 65534 <= r && r <= 1114111: // [\ufffe,\U0010ffff] + return 29 + } + return NoState + }, + // S139 + func(r rune) int { + switch { + case 48 <= r && r <= 57: // ['0','9'] + return 45 + case 65 <= r && r <= 90: // ['A','Z'] + return 13 + case r == 95: // ['_','_'] + return 21 + case 97 <= r && r <= 103: // ['a','g'] + return 13 + case r == 104: // ['h','h'] + return 142 + case 105 <= r && r <= 122: // ['i','z'] + return 13 + case 128 <= r && r <= 65532: // [\u0080,\ufffc] + return 29 + case 65534 <= r && r <= 1114111: // [\ufffe,\U0010ffff] + return 29 + } + return NoState + }, + // S140 + func(r rune) int { + switch { + case 48 <= r && r <= 57: // ['0','9'] + return 45 + case 65 <= r && r <= 90: // ['A','Z'] + return 13 + case r == 95: // ['_','_'] + return 21 + case 97 <= r && r <= 103: // ['a','g'] + return 13 + case r == 104: // ['h','h'] + return 142 + case 105 <= r && r <= 122: // ['i','z'] + return 13 + case 128 <= r && r <= 65532: // [\u0080,\ufffc] + return 29 + case 65534 <= r && r <= 1114111: // [\ufffe,\U0010ffff] + return 29 + } + return NoState + }, + // S141 + func(r rune) int { + switch { + case 48 <= r && r <= 57: // ['0','9'] + return 45 + case 65 <= r && r <= 90: // ['A','Z'] + return 13 + case r == 95: // ['_','_'] + return 21 + case 97 <= r && r <= 103: // ['a','g'] + return 13 + case r == 104: // ['h','h'] + return 142 + case 105 <= r && r <= 122: // ['i','z'] + return 13 + case 128 <= r && r <= 65532: // [\u0080,\ufffc] + return 29 + case 65534 <= r && r <= 1114111: // [\ufffe,\U0010ffff] + return 29 + } + return NoState + }, + // S142 + func(r rune) int { + switch { + case 48 <= r && r <= 57: // ['0','9'] + return 45 + case 65 <= r && r <= 90: // ['A','Z'] + return 13 + case r == 95: // ['_','_'] + return 21 + case 97 <= r && r <= 122: // ['a','z'] + return 13 + case 128 <= r && r <= 65532: // [\u0080,\ufffc] + return 29 + case 65534 <= r && r <= 1114111: // [\ufffe,\U0010ffff] + return 29 + } + return NoState + }, +} diff --git a/vendor/gonum.org/v1/gonum/graph/formats/dot/internal/parser/action.go b/vendor/gonum.org/v1/gonum/graph/formats/dot/internal/parser/action.go new file mode 100644 index 0000000..ee1849d --- /dev/null +++ b/vendor/gonum.org/v1/gonum/graph/formats/dot/internal/parser/action.go @@ -0,0 +1,61 @@ +// Code generated by gocc; DO NOT EDIT. + +// This file is dual licensed under CC0 and The gonum license. +// +// Copyright ©2017 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. +// +// Copyright ©2017 Robin Eklind. +// This file is made available under a Creative Commons CC0 1.0 +// Universal Public Domain Dedication. + +package parser + +import ( + "fmt" +) + +type action interface { + act() + String() string +} + +type ( + accept bool + shift int // value is next state index + reduce int // value is production index +) + +func (this accept) act() {} +func (this shift) act() {} +func (this reduce) act() {} + +func (this accept) Equal(that action) bool { + if _, ok := that.(accept); ok { + return true + } + return false +} + +func (this reduce) Equal(that action) bool { + that1, ok := that.(reduce) + if !ok { + return false + } + return this == that1 +} + +func (this shift) Equal(that action) bool { + that1, ok := that.(shift) + if !ok { + return false + } + return this == that1 +} + +func (this accept) String() string { return "accept(0)" } +func (this shift) String() string { return fmt.Sprintf("shift:%d", this) } +func (this reduce) String() string { + return fmt.Sprintf("reduce:%d(%s)", this, productionsTable[this].String) +} diff --git a/vendor/gonum.org/v1/gonum/graph/formats/dot/internal/parser/actiontable.go b/vendor/gonum.org/v1/gonum/graph/formats/dot/internal/parser/actiontable.go new file mode 100644 index 0000000..1c0479f --- /dev/null +++ b/vendor/gonum.org/v1/gonum/graph/formats/dot/internal/parser/actiontable.go @@ -0,0 +1,2199 @@ +// Code generated by gocc; DO NOT EDIT. + +// This file is dual licensed under CC0 and The gonum license. +// +// Copyright ©2017 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. +// +// Copyright ©2017 Robin Eklind. +// This file is made available under a Creative Commons CC0 1.0 +// Universal Public Domain Dedication. + +package parser + +type ( + actionTable [numStates]actionRow + actionRow struct { + canRecover bool + actions [numSymbols]action + } +) + +var actionTab = actionTable{ + actionRow{ // S0 + canRecover: false, + actions: [numSymbols]action{ + nil, /* INVALID */ + nil, /* $ */ + nil, /* { */ + nil, /* } */ + nil, /* empty */ + shift(4), /* strict */ + reduce(4), /* graphx, reduce: OptStrict */ + reduce(4), /* digraph, reduce: OptStrict */ + nil, /* ; */ + nil, /* -- */ + nil, /* -> */ + nil, /* node */ + nil, /* edge */ + nil, /* [ */ + nil, /* ] */ + nil, /* , */ + nil, /* = */ + nil, /* subgraph */ + nil, /* : */ + nil, /* id */ + }, + }, + actionRow{ // S1 + canRecover: false, + actions: [numSymbols]action{ + nil, /* INVALID */ + accept(true), /* $ */ + nil, /* { */ + nil, /* } */ + nil, /* empty */ + shift(4), /* strict */ + reduce(4), /* graphx, reduce: OptStrict */ + reduce(4), /* digraph, reduce: OptStrict */ + nil, /* ; */ + nil, /* -- */ + nil, /* -> */ + nil, /* node */ + nil, /* edge */ + nil, /* [ */ + nil, /* ] */ + nil, /* , */ + nil, /* = */ + nil, /* subgraph */ + nil, /* : */ + nil, /* id */ + }, + }, + actionRow{ // S2 + canRecover: false, + actions: [numSymbols]action{ + nil, /* INVALID */ + reduce(1), /* $, reduce: File */ + nil, /* { */ + nil, /* } */ + nil, /* empty */ + reduce(1), /* strict, reduce: File */ + reduce(1), /* graphx, reduce: File */ + reduce(1), /* digraph, reduce: File */ + nil, /* ; */ + nil, /* -- */ + nil, /* -> */ + nil, /* node */ + nil, /* edge */ + nil, /* [ */ + nil, /* ] */ + nil, /* , */ + nil, /* = */ + nil, /* subgraph */ + nil, /* : */ + nil, /* id */ + }, + }, + actionRow{ // S3 + canRecover: false, + actions: [numSymbols]action{ + nil, /* INVALID */ + nil, /* $ */ + nil, /* { */ + nil, /* } */ + nil, /* empty */ + nil, /* strict */ + shift(7), /* graphx */ + shift(8), /* digraph */ + nil, /* ; */ + nil, /* -- */ + nil, /* -> */ + nil, /* node */ + nil, /* edge */ + nil, /* [ */ + nil, /* ] */ + nil, /* , */ + nil, /* = */ + nil, /* subgraph */ + nil, /* : */ + nil, /* id */ + }, + }, + actionRow{ // S4 + canRecover: false, + actions: [numSymbols]action{ + nil, /* INVALID */ + nil, /* $ */ + nil, /* { */ + nil, /* } */ + nil, /* empty */ + nil, /* strict */ + reduce(5), /* graphx, reduce: OptStrict */ + reduce(5), /* digraph, reduce: OptStrict */ + nil, /* ; */ + nil, /* -- */ + nil, /* -> */ + nil, /* node */ + nil, /* edge */ + nil, /* [ */ + nil, /* ] */ + nil, /* , */ + nil, /* = */ + nil, /* subgraph */ + nil, /* : */ + nil, /* id */ + }, + }, + actionRow{ // S5 + canRecover: false, + actions: [numSymbols]action{ + nil, /* INVALID */ + reduce(2), /* $, reduce: File */ + nil, /* { */ + nil, /* } */ + nil, /* empty */ + reduce(2), /* strict, reduce: File */ + reduce(2), /* graphx, reduce: File */ + reduce(2), /* digraph, reduce: File */ + nil, /* ; */ + nil, /* -- */ + nil, /* -> */ + nil, /* node */ + nil, /* edge */ + nil, /* [ */ + nil, /* ] */ + nil, /* , */ + nil, /* = */ + nil, /* subgraph */ + nil, /* : */ + nil, /* id */ + }, + }, + actionRow{ // S6 + canRecover: false, + actions: [numSymbols]action{ + nil, /* INVALID */ + nil, /* $ */ + reduce(53), /* {, reduce: OptID */ + nil, /* } */ + nil, /* empty */ + nil, /* strict */ + nil, /* graphx */ + nil, /* digraph */ + nil, /* ; */ + nil, /* -- */ + nil, /* -> */ + nil, /* node */ + nil, /* edge */ + nil, /* [ */ + nil, /* ] */ + nil, /* , */ + nil, /* = */ + nil, /* subgraph */ + nil, /* : */ + shift(11), /* id */ + }, + }, + actionRow{ // S7 + canRecover: false, + actions: [numSymbols]action{ + nil, /* INVALID */ + nil, /* $ */ + reduce(6), /* {, reduce: DirectedGraph */ + nil, /* } */ + nil, /* empty */ + nil, /* strict */ + nil, /* graphx */ + nil, /* digraph */ + nil, /* ; */ + nil, /* -- */ + nil, /* -> */ + nil, /* node */ + nil, /* edge */ + nil, /* [ */ + nil, /* ] */ + nil, /* , */ + nil, /* = */ + nil, /* subgraph */ + nil, /* : */ + reduce(6), /* id, reduce: DirectedGraph */ + }, + }, + actionRow{ // S8 + canRecover: false, + actions: [numSymbols]action{ + nil, /* INVALID */ + nil, /* $ */ + reduce(7), /* {, reduce: DirectedGraph */ + nil, /* } */ + nil, /* empty */ + nil, /* strict */ + nil, /* graphx */ + nil, /* digraph */ + nil, /* ; */ + nil, /* -- */ + nil, /* -> */ + nil, /* node */ + nil, /* edge */ + nil, /* [ */ + nil, /* ] */ + nil, /* , */ + nil, /* = */ + nil, /* subgraph */ + nil, /* : */ + reduce(7), /* id, reduce: DirectedGraph */ + }, + }, + actionRow{ // S9 + canRecover: false, + actions: [numSymbols]action{ + nil, /* INVALID */ + nil, /* $ */ + shift(12), /* { */ + nil, /* } */ + nil, /* empty */ + nil, /* strict */ + nil, /* graphx */ + nil, /* digraph */ + nil, /* ; */ + nil, /* -- */ + nil, /* -> */ + nil, /* node */ + nil, /* edge */ + nil, /* [ */ + nil, /* ] */ + nil, /* , */ + nil, /* = */ + nil, /* subgraph */ + nil, /* : */ + nil, /* id */ + }, + }, + actionRow{ // S10 + canRecover: false, + actions: [numSymbols]action{ + nil, /* INVALID */ + nil, /* $ */ + reduce(54), /* {, reduce: OptID */ + nil, /* } */ + nil, /* empty */ + nil, /* strict */ + nil, /* graphx */ + nil, /* digraph */ + nil, /* ; */ + nil, /* -- */ + nil, /* -> */ + nil, /* node */ + nil, /* edge */ + nil, /* [ */ + nil, /* ] */ + nil, /* , */ + nil, /* = */ + nil, /* subgraph */ + nil, /* : */ + nil, /* id */ + }, + }, + actionRow{ // S11 + canRecover: false, + actions: [numSymbols]action{ + nil, /* INVALID */ + nil, /* $ */ + reduce(52), /* {, reduce: ID */ + nil, /* } */ + nil, /* empty */ + nil, /* strict */ + nil, /* graphx */ + nil, /* digraph */ + nil, /* ; */ + nil, /* -- */ + nil, /* -> */ + nil, /* node */ + nil, /* edge */ + nil, /* [ */ + nil, /* ] */ + nil, /* , */ + nil, /* = */ + nil, /* subgraph */ + nil, /* : */ + nil, /* id */ + }, + }, + actionRow{ // S12 + canRecover: false, + actions: [numSymbols]action{ + nil, /* INVALID */ + nil, /* $ */ + reduce(43), /* {, reduce: OptSubgraphID */ + reduce(10), /* }, reduce: OptStmtList */ + nil, /* empty */ + nil, /* strict */ + shift(14), /* graphx */ + nil, /* digraph */ + nil, /* ; */ + nil, /* -- */ + nil, /* -> */ + shift(25), /* node */ + shift(26), /* edge */ + nil, /* [ */ + nil, /* ] */ + nil, /* , */ + nil, /* = */ + shift(29), /* subgraph */ + nil, /* : */ + shift(30), /* id */ + }, + }, + actionRow{ // S13 + canRecover: false, + actions: [numSymbols]action{ + nil, /* INVALID */ + nil, /* $ */ + nil, /* { */ + shift(31), /* } */ + nil, /* empty */ + nil, /* strict */ + nil, /* graphx */ + nil, /* digraph */ + nil, /* ; */ + nil, /* -- */ + nil, /* -> */ + nil, /* node */ + nil, /* edge */ + nil, /* [ */ + nil, /* ] */ + nil, /* , */ + nil, /* = */ + nil, /* subgraph */ + nil, /* : */ + nil, /* id */ + }, + }, + actionRow{ // S14 + canRecover: false, + actions: [numSymbols]action{ + nil, /* INVALID */ + nil, /* $ */ + nil, /* { */ + nil, /* } */ + nil, /* empty */ + nil, /* strict */ + nil, /* graphx */ + nil, /* digraph */ + nil, /* ; */ + nil, /* -- */ + nil, /* -> */ + nil, /* node */ + nil, /* edge */ + reduce(27), /* [, reduce: Component */ + nil, /* ] */ + nil, /* , */ + nil, /* = */ + nil, /* subgraph */ + nil, /* : */ + nil, /* id */ + }, + }, + actionRow{ // S15 + canRecover: false, + actions: [numSymbols]action{ + nil, /* INVALID */ + nil, /* $ */ + reduce(43), /* {, reduce: OptSubgraphID */ + reduce(11), /* }, reduce: OptStmtList */ + nil, /* empty */ + nil, /* strict */ + shift(14), /* graphx */ + nil, /* digraph */ + nil, /* ; */ + nil, /* -- */ + nil, /* -> */ + shift(25), /* node */ + shift(26), /* edge */ + nil, /* [ */ + nil, /* ] */ + nil, /* , */ + nil, /* = */ + shift(29), /* subgraph */ + nil, /* : */ + shift(30), /* id */ + }, + }, + actionRow{ // S16 + canRecover: false, + actions: [numSymbols]action{ + nil, /* INVALID */ + nil, /* $ */ + reduce(17), /* {, reduce: OptSemi */ + reduce(17), /* }, reduce: OptSemi */ + nil, /* empty */ + nil, /* strict */ + reduce(17), /* graphx, reduce: OptSemi */ + nil, /* digraph */ + shift(34), /* ; */ + nil, /* -- */ + nil, /* -> */ + reduce(17), /* node, reduce: OptSemi */ + reduce(17), /* edge, reduce: OptSemi */ + nil, /* [ */ + nil, /* ] */ + nil, /* , */ + nil, /* = */ + reduce(17), /* subgraph, reduce: OptSemi */ + nil, /* : */ + reduce(17), /* id, reduce: OptSemi */ + }, + }, + actionRow{ // S17 + canRecover: false, + actions: [numSymbols]action{ + nil, /* INVALID */ + nil, /* $ */ + reduce(12), /* {, reduce: Stmt */ + reduce(12), /* }, reduce: Stmt */ + nil, /* empty */ + nil, /* strict */ + reduce(12), /* graphx, reduce: Stmt */ + nil, /* digraph */ + reduce(12), /* ;, reduce: Stmt */ + nil, /* -- */ + nil, /* -> */ + reduce(12), /* node, reduce: Stmt */ + reduce(12), /* edge, reduce: Stmt */ + nil, /* [ */ + nil, /* ] */ + nil, /* , */ + nil, /* = */ + reduce(12), /* subgraph, reduce: Stmt */ + nil, /* : */ + reduce(12), /* id, reduce: Stmt */ + }, + }, + actionRow{ // S18 + canRecover: false, + actions: [numSymbols]action{ + nil, /* INVALID */ + nil, /* $ */ + reduce(13), /* {, reduce: Stmt */ + reduce(13), /* }, reduce: Stmt */ + nil, /* empty */ + nil, /* strict */ + reduce(13), /* graphx, reduce: Stmt */ + nil, /* digraph */ + reduce(13), /* ;, reduce: Stmt */ + nil, /* -- */ + nil, /* -> */ + reduce(13), /* node, reduce: Stmt */ + reduce(13), /* edge, reduce: Stmt */ + nil, /* [ */ + nil, /* ] */ + nil, /* , */ + nil, /* = */ + reduce(13), /* subgraph, reduce: Stmt */ + nil, /* : */ + reduce(13), /* id, reduce: Stmt */ + }, + }, + actionRow{ // S19 + canRecover: false, + actions: [numSymbols]action{ + nil, /* INVALID */ + nil, /* $ */ + reduce(14), /* {, reduce: Stmt */ + reduce(14), /* }, reduce: Stmt */ + nil, /* empty */ + nil, /* strict */ + reduce(14), /* graphx, reduce: Stmt */ + nil, /* digraph */ + reduce(14), /* ;, reduce: Stmt */ + nil, /* -- */ + nil, /* -> */ + reduce(14), /* node, reduce: Stmt */ + reduce(14), /* edge, reduce: Stmt */ + nil, /* [ */ + nil, /* ] */ + nil, /* , */ + nil, /* = */ + reduce(14), /* subgraph, reduce: Stmt */ + nil, /* : */ + reduce(14), /* id, reduce: Stmt */ + }, + }, + actionRow{ // S20 + canRecover: false, + actions: [numSymbols]action{ + nil, /* INVALID */ + nil, /* $ */ + reduce(15), /* {, reduce: Stmt */ + reduce(15), /* }, reduce: Stmt */ + nil, /* empty */ + nil, /* strict */ + reduce(15), /* graphx, reduce: Stmt */ + nil, /* digraph */ + reduce(15), /* ;, reduce: Stmt */ + nil, /* -- */ + nil, /* -> */ + reduce(15), /* node, reduce: Stmt */ + reduce(15), /* edge, reduce: Stmt */ + nil, /* [ */ + nil, /* ] */ + nil, /* , */ + nil, /* = */ + reduce(15), /* subgraph, reduce: Stmt */ + nil, /* : */ + reduce(15), /* id, reduce: Stmt */ + }, + }, + actionRow{ // S21 + canRecover: false, + actions: [numSymbols]action{ + nil, /* INVALID */ + nil, /* $ */ + reduce(16), /* {, reduce: Stmt */ + reduce(16), /* }, reduce: Stmt */ + nil, /* empty */ + nil, /* strict */ + reduce(16), /* graphx, reduce: Stmt */ + nil, /* digraph */ + reduce(16), /* ;, reduce: Stmt */ + reduce(46), /* --, reduce: Vertex */ + reduce(46), /* ->, reduce: Vertex */ + reduce(16), /* node, reduce: Stmt */ + reduce(16), /* edge, reduce: Stmt */ + nil, /* [ */ + nil, /* ] */ + nil, /* , */ + nil, /* = */ + reduce(16), /* subgraph, reduce: Stmt */ + nil, /* : */ + reduce(16), /* id, reduce: Stmt */ + }, + }, + actionRow{ // S22 + canRecover: false, + actions: [numSymbols]action{ + nil, /* INVALID */ + nil, /* $ */ + reduce(32), /* {, reduce: OptAttrList */ + reduce(32), /* }, reduce: OptAttrList */ + nil, /* empty */ + nil, /* strict */ + reduce(32), /* graphx, reduce: OptAttrList */ + nil, /* digraph */ + reduce(32), /* ;, reduce: OptAttrList */ + reduce(45), /* --, reduce: Vertex */ + reduce(45), /* ->, reduce: Vertex */ + reduce(32), /* node, reduce: OptAttrList */ + reduce(32), /* edge, reduce: OptAttrList */ + shift(37), /* [ */ + nil, /* ] */ + nil, /* , */ + nil, /* = */ + reduce(32), /* subgraph, reduce: OptAttrList */ + nil, /* : */ + reduce(32), /* id, reduce: OptAttrList */ + }, + }, + actionRow{ // S23 + canRecover: false, + actions: [numSymbols]action{ + nil, /* INVALID */ + nil, /* $ */ + nil, /* { */ + nil, /* } */ + nil, /* empty */ + nil, /* strict */ + nil, /* graphx */ + nil, /* digraph */ + nil, /* ; */ + shift(40), /* -- */ + shift(41), /* -> */ + nil, /* node */ + nil, /* edge */ + nil, /* [ */ + nil, /* ] */ + nil, /* , */ + nil, /* = */ + nil, /* subgraph */ + nil, /* : */ + nil, /* id */ + }, + }, + actionRow{ // S24 + canRecover: false, + actions: [numSymbols]action{ + nil, /* INVALID */ + nil, /* $ */ + nil, /* { */ + nil, /* } */ + nil, /* empty */ + nil, /* strict */ + nil, /* graphx */ + nil, /* digraph */ + nil, /* ; */ + nil, /* -- */ + nil, /* -> */ + nil, /* node */ + nil, /* edge */ + shift(37), /* [ */ + nil, /* ] */ + nil, /* , */ + nil, /* = */ + nil, /* subgraph */ + nil, /* : */ + nil, /* id */ + }, + }, + actionRow{ // S25 + canRecover: false, + actions: [numSymbols]action{ + nil, /* INVALID */ + nil, /* $ */ + nil, /* { */ + nil, /* } */ + nil, /* empty */ + nil, /* strict */ + nil, /* graphx */ + nil, /* digraph */ + nil, /* ; */ + nil, /* -- */ + nil, /* -> */ + nil, /* node */ + nil, /* edge */ + reduce(28), /* [, reduce: Component */ + nil, /* ] */ + nil, /* , */ + nil, /* = */ + nil, /* subgraph */ + nil, /* : */ + nil, /* id */ + }, + }, + actionRow{ // S26 + canRecover: false, + actions: [numSymbols]action{ + nil, /* INVALID */ + nil, /* $ */ + nil, /* { */ + nil, /* } */ + nil, /* empty */ + nil, /* strict */ + nil, /* graphx */ + nil, /* digraph */ + nil, /* ; */ + nil, /* -- */ + nil, /* -> */ + nil, /* node */ + nil, /* edge */ + reduce(29), /* [, reduce: Component */ + nil, /* ] */ + nil, /* , */ + nil, /* = */ + nil, /* subgraph */ + nil, /* : */ + nil, /* id */ + }, + }, + actionRow{ // S27 + canRecover: false, + actions: [numSymbols]action{ + nil, /* INVALID */ + nil, /* $ */ + reduce(50), /* {, reduce: OptPort */ + reduce(50), /* }, reduce: OptPort */ + nil, /* empty */ + nil, /* strict */ + reduce(50), /* graphx, reduce: OptPort */ + nil, /* digraph */ + reduce(50), /* ;, reduce: OptPort */ + reduce(50), /* --, reduce: OptPort */ + reduce(50), /* ->, reduce: OptPort */ + reduce(50), /* node, reduce: OptPort */ + reduce(50), /* edge, reduce: OptPort */ + reduce(50), /* [, reduce: OptPort */ + nil, /* ] */ + nil, /* , */ + shift(43), /* = */ + reduce(50), /* subgraph, reduce: OptPort */ + shift(46), /* : */ + reduce(50), /* id, reduce: OptPort */ + }, + }, + actionRow{ // S28 + canRecover: false, + actions: [numSymbols]action{ + nil, /* INVALID */ + nil, /* $ */ + shift(47), /* { */ + nil, /* } */ + nil, /* empty */ + nil, /* strict */ + nil, /* graphx */ + nil, /* digraph */ + nil, /* ; */ + nil, /* -- */ + nil, /* -> */ + nil, /* node */ + nil, /* edge */ + nil, /* [ */ + nil, /* ] */ + nil, /* , */ + nil, /* = */ + nil, /* subgraph */ + nil, /* : */ + nil, /* id */ + }, + }, + actionRow{ // S29 + canRecover: false, + actions: [numSymbols]action{ + nil, /* INVALID */ + nil, /* $ */ + reduce(53), /* {, reduce: OptID */ + nil, /* } */ + nil, /* empty */ + nil, /* strict */ + nil, /* graphx */ + nil, /* digraph */ + nil, /* ; */ + nil, /* -- */ + nil, /* -> */ + nil, /* node */ + nil, /* edge */ + nil, /* [ */ + nil, /* ] */ + nil, /* , */ + nil, /* = */ + nil, /* subgraph */ + nil, /* : */ + shift(11), /* id */ + }, + }, + actionRow{ // S30 + canRecover: false, + actions: [numSymbols]action{ + nil, /* INVALID */ + nil, /* $ */ + reduce(52), /* {, reduce: ID */ + reduce(52), /* }, reduce: ID */ + nil, /* empty */ + nil, /* strict */ + reduce(52), /* graphx, reduce: ID */ + nil, /* digraph */ + reduce(52), /* ;, reduce: ID */ + reduce(52), /* --, reduce: ID */ + reduce(52), /* ->, reduce: ID */ + reduce(52), /* node, reduce: ID */ + reduce(52), /* edge, reduce: ID */ + reduce(52), /* [, reduce: ID */ + nil, /* ] */ + nil, /* , */ + reduce(52), /* =, reduce: ID */ + reduce(52), /* subgraph, reduce: ID */ + reduce(52), /* :, reduce: ID */ + reduce(52), /* id, reduce: ID */ + }, + }, + actionRow{ // S31 + canRecover: false, + actions: [numSymbols]action{ + nil, /* INVALID */ + reduce(3), /* $, reduce: Graph */ + nil, /* { */ + nil, /* } */ + nil, /* empty */ + reduce(3), /* strict, reduce: Graph */ + reduce(3), /* graphx, reduce: Graph */ + reduce(3), /* digraph, reduce: Graph */ + nil, /* ; */ + nil, /* -- */ + nil, /* -> */ + nil, /* node */ + nil, /* edge */ + nil, /* [ */ + nil, /* ] */ + nil, /* , */ + nil, /* = */ + nil, /* subgraph */ + nil, /* : */ + nil, /* id */ + }, + }, + actionRow{ // S32 + canRecover: false, + actions: [numSymbols]action{ + nil, /* INVALID */ + nil, /* $ */ + reduce(17), /* {, reduce: OptSemi */ + reduce(17), /* }, reduce: OptSemi */ + nil, /* empty */ + nil, /* strict */ + reduce(17), /* graphx, reduce: OptSemi */ + nil, /* digraph */ + shift(34), /* ; */ + nil, /* -- */ + nil, /* -> */ + reduce(17), /* node, reduce: OptSemi */ + reduce(17), /* edge, reduce: OptSemi */ + nil, /* [ */ + nil, /* ] */ + nil, /* , */ + nil, /* = */ + reduce(17), /* subgraph, reduce: OptSemi */ + nil, /* : */ + reduce(17), /* id, reduce: OptSemi */ + }, + }, + actionRow{ // S33 + canRecover: false, + actions: [numSymbols]action{ + nil, /* INVALID */ + nil, /* $ */ + reduce(8), /* {, reduce: StmtList */ + reduce(8), /* }, reduce: StmtList */ + nil, /* empty */ + nil, /* strict */ + reduce(8), /* graphx, reduce: StmtList */ + nil, /* digraph */ + nil, /* ; */ + nil, /* -- */ + nil, /* -> */ + reduce(8), /* node, reduce: StmtList */ + reduce(8), /* edge, reduce: StmtList */ + nil, /* [ */ + nil, /* ] */ + nil, /* , */ + nil, /* = */ + reduce(8), /* subgraph, reduce: StmtList */ + nil, /* : */ + reduce(8), /* id, reduce: StmtList */ + }, + }, + actionRow{ // S34 + canRecover: false, + actions: [numSymbols]action{ + nil, /* INVALID */ + nil, /* $ */ + reduce(18), /* {, reduce: OptSemi */ + reduce(18), /* }, reduce: OptSemi */ + nil, /* empty */ + nil, /* strict */ + reduce(18), /* graphx, reduce: OptSemi */ + nil, /* digraph */ + nil, /* ; */ + nil, /* -- */ + nil, /* -> */ + reduce(18), /* node, reduce: OptSemi */ + reduce(18), /* edge, reduce: OptSemi */ + nil, /* [ */ + nil, /* ] */ + nil, /* , */ + nil, /* = */ + reduce(18), /* subgraph, reduce: OptSemi */ + nil, /* : */ + reduce(18), /* id, reduce: OptSemi */ + }, + }, + actionRow{ // S35 + canRecover: false, + actions: [numSymbols]action{ + nil, /* INVALID */ + nil, /* $ */ + reduce(19), /* {, reduce: NodeStmt */ + reduce(19), /* }, reduce: NodeStmt */ + nil, /* empty */ + nil, /* strict */ + reduce(19), /* graphx, reduce: NodeStmt */ + nil, /* digraph */ + reduce(19), /* ;, reduce: NodeStmt */ + nil, /* -- */ + nil, /* -> */ + reduce(19), /* node, reduce: NodeStmt */ + reduce(19), /* edge, reduce: NodeStmt */ + nil, /* [ */ + nil, /* ] */ + nil, /* , */ + nil, /* = */ + reduce(19), /* subgraph, reduce: NodeStmt */ + nil, /* : */ + reduce(19), /* id, reduce: NodeStmt */ + }, + }, + actionRow{ // S36 + canRecover: false, + actions: [numSymbols]action{ + nil, /* INVALID */ + nil, /* $ */ + reduce(33), /* {, reduce: OptAttrList */ + reduce(33), /* }, reduce: OptAttrList */ + nil, /* empty */ + nil, /* strict */ + reduce(33), /* graphx, reduce: OptAttrList */ + nil, /* digraph */ + reduce(33), /* ;, reduce: OptAttrList */ + nil, /* -- */ + nil, /* -> */ + reduce(33), /* node, reduce: OptAttrList */ + reduce(33), /* edge, reduce: OptAttrList */ + shift(50), /* [ */ + nil, /* ] */ + nil, /* , */ + nil, /* = */ + reduce(33), /* subgraph, reduce: OptAttrList */ + nil, /* : */ + reduce(33), /* id, reduce: OptAttrList */ + }, + }, + actionRow{ // S37 + canRecover: false, + actions: [numSymbols]action{ + nil, /* INVALID */ + nil, /* $ */ + nil, /* { */ + nil, /* } */ + nil, /* empty */ + nil, /* strict */ + nil, /* graphx */ + nil, /* digraph */ + nil, /* ; */ + nil, /* -- */ + nil, /* -> */ + nil, /* node */ + nil, /* edge */ + nil, /* [ */ + reduce(36), /* ], reduce: OptAList */ + nil, /* , */ + nil, /* = */ + nil, /* subgraph */ + nil, /* : */ + shift(55), /* id */ + }, + }, + actionRow{ // S38 + canRecover: false, + actions: [numSymbols]action{ + nil, /* INVALID */ + nil, /* $ */ + reduce(32), /* {, reduce: OptAttrList */ + reduce(32), /* }, reduce: OptAttrList */ + nil, /* empty */ + nil, /* strict */ + reduce(32), /* graphx, reduce: OptAttrList */ + nil, /* digraph */ + reduce(32), /* ;, reduce: OptAttrList */ + nil, /* -- */ + nil, /* -> */ + reduce(32), /* node, reduce: OptAttrList */ + reduce(32), /* edge, reduce: OptAttrList */ + shift(37), /* [ */ + nil, /* ] */ + nil, /* , */ + nil, /* = */ + reduce(32), /* subgraph, reduce: OptAttrList */ + nil, /* : */ + reduce(32), /* id, reduce: OptAttrList */ + }, + }, + actionRow{ // S39 + canRecover: false, + actions: [numSymbols]action{ + nil, /* INVALID */ + nil, /* $ */ + reduce(43), /* {, reduce: OptSubgraphID */ + nil, /* } */ + nil, /* empty */ + nil, /* strict */ + nil, /* graphx */ + nil, /* digraph */ + nil, /* ; */ + nil, /* -- */ + nil, /* -> */ + nil, /* node */ + nil, /* edge */ + nil, /* [ */ + nil, /* ] */ + nil, /* , */ + nil, /* = */ + shift(29), /* subgraph */ + nil, /* : */ + shift(62), /* id */ + }, + }, + actionRow{ // S40 + canRecover: false, + actions: [numSymbols]action{ + nil, /* INVALID */ + nil, /* $ */ + reduce(22), /* {, reduce: DirectedEdge */ + nil, /* } */ + nil, /* empty */ + nil, /* strict */ + nil, /* graphx */ + nil, /* digraph */ + nil, /* ; */ + nil, /* -- */ + nil, /* -> */ + nil, /* node */ + nil, /* edge */ + nil, /* [ */ + nil, /* ] */ + nil, /* , */ + nil, /* = */ + reduce(22), /* subgraph, reduce: DirectedEdge */ + nil, /* : */ + reduce(22), /* id, reduce: DirectedEdge */ + }, + }, + actionRow{ // S41 + canRecover: false, + actions: [numSymbols]action{ + nil, /* INVALID */ + nil, /* $ */ + reduce(23), /* {, reduce: DirectedEdge */ + nil, /* } */ + nil, /* empty */ + nil, /* strict */ + nil, /* graphx */ + nil, /* digraph */ + nil, /* ; */ + nil, /* -- */ + nil, /* -> */ + nil, /* node */ + nil, /* edge */ + nil, /* [ */ + nil, /* ] */ + nil, /* , */ + nil, /* = */ + reduce(23), /* subgraph, reduce: DirectedEdge */ + nil, /* : */ + reduce(23), /* id, reduce: DirectedEdge */ + }, + }, + actionRow{ // S42 + canRecover: false, + actions: [numSymbols]action{ + nil, /* INVALID */ + nil, /* $ */ + reduce(26), /* {, reduce: AttrStmt */ + reduce(26), /* }, reduce: AttrStmt */ + nil, /* empty */ + nil, /* strict */ + reduce(26), /* graphx, reduce: AttrStmt */ + nil, /* digraph */ + reduce(26), /* ;, reduce: AttrStmt */ + nil, /* -- */ + nil, /* -> */ + reduce(26), /* node, reduce: AttrStmt */ + reduce(26), /* edge, reduce: AttrStmt */ + shift(50), /* [ */ + nil, /* ] */ + nil, /* , */ + nil, /* = */ + reduce(26), /* subgraph, reduce: AttrStmt */ + nil, /* : */ + reduce(26), /* id, reduce: AttrStmt */ + }, + }, + actionRow{ // S43 + canRecover: false, + actions: [numSymbols]action{ + nil, /* INVALID */ + nil, /* $ */ + nil, /* { */ + nil, /* } */ + nil, /* empty */ + nil, /* strict */ + nil, /* graphx */ + nil, /* digraph */ + nil, /* ; */ + nil, /* -- */ + nil, /* -> */ + nil, /* node */ + nil, /* edge */ + nil, /* [ */ + nil, /* ] */ + nil, /* , */ + nil, /* = */ + nil, /* subgraph */ + nil, /* : */ + shift(64), /* id */ + }, + }, + actionRow{ // S44 + canRecover: false, + actions: [numSymbols]action{ + nil, /* INVALID */ + nil, /* $ */ + reduce(47), /* {, reduce: Node */ + reduce(47), /* }, reduce: Node */ + nil, /* empty */ + nil, /* strict */ + reduce(47), /* graphx, reduce: Node */ + nil, /* digraph */ + reduce(47), /* ;, reduce: Node */ + reduce(47), /* --, reduce: Node */ + reduce(47), /* ->, reduce: Node */ + reduce(47), /* node, reduce: Node */ + reduce(47), /* edge, reduce: Node */ + reduce(47), /* [, reduce: Node */ + nil, /* ] */ + nil, /* , */ + nil, /* = */ + reduce(47), /* subgraph, reduce: Node */ + nil, /* : */ + reduce(47), /* id, reduce: Node */ + }, + }, + actionRow{ // S45 + canRecover: false, + actions: [numSymbols]action{ + nil, /* INVALID */ + nil, /* $ */ + reduce(51), /* {, reduce: OptPort */ + reduce(51), /* }, reduce: OptPort */ + nil, /* empty */ + nil, /* strict */ + reduce(51), /* graphx, reduce: OptPort */ + nil, /* digraph */ + reduce(51), /* ;, reduce: OptPort */ + reduce(51), /* --, reduce: OptPort */ + reduce(51), /* ->, reduce: OptPort */ + reduce(51), /* node, reduce: OptPort */ + reduce(51), /* edge, reduce: OptPort */ + reduce(51), /* [, reduce: OptPort */ + nil, /* ] */ + nil, /* , */ + nil, /* = */ + reduce(51), /* subgraph, reduce: OptPort */ + nil, /* : */ + reduce(51), /* id, reduce: OptPort */ + }, + }, + actionRow{ // S46 + canRecover: false, + actions: [numSymbols]action{ + nil, /* INVALID */ + nil, /* $ */ + nil, /* { */ + nil, /* } */ + nil, /* empty */ + nil, /* strict */ + nil, /* graphx */ + nil, /* digraph */ + nil, /* ; */ + nil, /* -- */ + nil, /* -> */ + nil, /* node */ + nil, /* edge */ + nil, /* [ */ + nil, /* ] */ + nil, /* , */ + nil, /* = */ + nil, /* subgraph */ + nil, /* : */ + shift(62), /* id */ + }, + }, + actionRow{ // S47 + canRecover: false, + actions: [numSymbols]action{ + nil, /* INVALID */ + nil, /* $ */ + reduce(43), /* {, reduce: OptSubgraphID */ + reduce(10), /* }, reduce: OptStmtList */ + nil, /* empty */ + nil, /* strict */ + shift(14), /* graphx */ + nil, /* digraph */ + nil, /* ; */ + nil, /* -- */ + nil, /* -> */ + shift(25), /* node */ + shift(26), /* edge */ + nil, /* [ */ + nil, /* ] */ + nil, /* , */ + nil, /* = */ + shift(29), /* subgraph */ + nil, /* : */ + shift(30), /* id */ + }, + }, + actionRow{ // S48 + canRecover: false, + actions: [numSymbols]action{ + nil, /* INVALID */ + nil, /* $ */ + reduce(44), /* {, reduce: OptSubgraphID */ + nil, /* } */ + nil, /* empty */ + nil, /* strict */ + nil, /* graphx */ + nil, /* digraph */ + nil, /* ; */ + nil, /* -- */ + nil, /* -> */ + nil, /* node */ + nil, /* edge */ + nil, /* [ */ + nil, /* ] */ + nil, /* , */ + nil, /* = */ + nil, /* subgraph */ + nil, /* : */ + nil, /* id */ + }, + }, + actionRow{ // S49 + canRecover: false, + actions: [numSymbols]action{ + nil, /* INVALID */ + nil, /* $ */ + reduce(9), /* {, reduce: StmtList */ + reduce(9), /* }, reduce: StmtList */ + nil, /* empty */ + nil, /* strict */ + reduce(9), /* graphx, reduce: StmtList */ + nil, /* digraph */ + nil, /* ; */ + nil, /* -- */ + nil, /* -> */ + reduce(9), /* node, reduce: StmtList */ + reduce(9), /* edge, reduce: StmtList */ + nil, /* [ */ + nil, /* ] */ + nil, /* , */ + nil, /* = */ + reduce(9), /* subgraph, reduce: StmtList */ + nil, /* : */ + reduce(9), /* id, reduce: StmtList */ + }, + }, + actionRow{ // S50 + canRecover: false, + actions: [numSymbols]action{ + nil, /* INVALID */ + nil, /* $ */ + nil, /* { */ + nil, /* } */ + nil, /* empty */ + nil, /* strict */ + nil, /* graphx */ + nil, /* digraph */ + nil, /* ; */ + nil, /* -- */ + nil, /* -> */ + nil, /* node */ + nil, /* edge */ + nil, /* [ */ + reduce(36), /* ], reduce: OptAList */ + nil, /* , */ + nil, /* = */ + nil, /* subgraph */ + nil, /* : */ + shift(55), /* id */ + }, + }, + actionRow{ // S51 + canRecover: false, + actions: [numSymbols]action{ + nil, /* INVALID */ + nil, /* $ */ + nil, /* { */ + nil, /* } */ + nil, /* empty */ + nil, /* strict */ + nil, /* graphx */ + nil, /* digraph */ + shift(68), /* ; */ + nil, /* -- */ + nil, /* -> */ + nil, /* node */ + nil, /* edge */ + nil, /* [ */ + reduce(38), /* ], reduce: OptSep */ + shift(70), /* , */ + nil, /* = */ + nil, /* subgraph */ + nil, /* : */ + reduce(38), /* id, reduce: OptSep */ + }, + }, + actionRow{ // S52 + canRecover: false, + actions: [numSymbols]action{ + nil, /* INVALID */ + nil, /* $ */ + nil, /* { */ + nil, /* } */ + nil, /* empty */ + nil, /* strict */ + nil, /* graphx */ + nil, /* digraph */ + nil, /* ; */ + nil, /* -- */ + nil, /* -> */ + nil, /* node */ + nil, /* edge */ + nil, /* [ */ + shift(71), /* ] */ + nil, /* , */ + nil, /* = */ + nil, /* subgraph */ + nil, /* : */ + nil, /* id */ + }, + }, + actionRow{ // S53 + canRecover: false, + actions: [numSymbols]action{ + nil, /* INVALID */ + nil, /* $ */ + nil, /* { */ + nil, /* } */ + nil, /* empty */ + nil, /* strict */ + nil, /* graphx */ + nil, /* digraph */ + nil, /* ; */ + nil, /* -- */ + nil, /* -> */ + nil, /* node */ + nil, /* edge */ + nil, /* [ */ + reduce(37), /* ], reduce: OptAList */ + nil, /* , */ + nil, /* = */ + nil, /* subgraph */ + nil, /* : */ + shift(55), /* id */ + }, + }, + actionRow{ // S54 + canRecover: false, + actions: [numSymbols]action{ + nil, /* INVALID */ + nil, /* $ */ + nil, /* { */ + nil, /* } */ + nil, /* empty */ + nil, /* strict */ + nil, /* graphx */ + nil, /* digraph */ + nil, /* ; */ + nil, /* -- */ + nil, /* -> */ + nil, /* node */ + nil, /* edge */ + nil, /* [ */ + nil, /* ] */ + nil, /* , */ + shift(73), /* = */ + nil, /* subgraph */ + nil, /* : */ + nil, /* id */ + }, + }, + actionRow{ // S55 + canRecover: false, + actions: [numSymbols]action{ + nil, /* INVALID */ + nil, /* $ */ + nil, /* { */ + nil, /* } */ + nil, /* empty */ + nil, /* strict */ + nil, /* graphx */ + nil, /* digraph */ + nil, /* ; */ + nil, /* -- */ + nil, /* -> */ + nil, /* node */ + nil, /* edge */ + nil, /* [ */ + nil, /* ] */ + nil, /* , */ + reduce(52), /* =, reduce: ID */ + nil, /* subgraph */ + nil, /* : */ + nil, /* id */ + }, + }, + actionRow{ // S56 + canRecover: false, + actions: [numSymbols]action{ + nil, /* INVALID */ + nil, /* $ */ + reduce(20), /* {, reduce: EdgeStmt */ + reduce(20), /* }, reduce: EdgeStmt */ + nil, /* empty */ + nil, /* strict */ + reduce(20), /* graphx, reduce: EdgeStmt */ + nil, /* digraph */ + reduce(20), /* ;, reduce: EdgeStmt */ + nil, /* -- */ + nil, /* -> */ + reduce(20), /* node, reduce: EdgeStmt */ + reduce(20), /* edge, reduce: EdgeStmt */ + nil, /* [ */ + nil, /* ] */ + nil, /* , */ + nil, /* = */ + reduce(20), /* subgraph, reduce: EdgeStmt */ + nil, /* : */ + reduce(20), /* id, reduce: EdgeStmt */ + }, + }, + actionRow{ // S57 + canRecover: false, + actions: [numSymbols]action{ + nil, /* INVALID */ + nil, /* $ */ + reduce(46), /* {, reduce: Vertex */ + reduce(46), /* }, reduce: Vertex */ + nil, /* empty */ + nil, /* strict */ + reduce(46), /* graphx, reduce: Vertex */ + nil, /* digraph */ + reduce(46), /* ;, reduce: Vertex */ + reduce(46), /* --, reduce: Vertex */ + reduce(46), /* ->, reduce: Vertex */ + reduce(46), /* node, reduce: Vertex */ + reduce(46), /* edge, reduce: Vertex */ + reduce(46), /* [, reduce: Vertex */ + nil, /* ] */ + nil, /* , */ + nil, /* = */ + reduce(46), /* subgraph, reduce: Vertex */ + nil, /* : */ + reduce(46), /* id, reduce: Vertex */ + }, + }, + actionRow{ // S58 + canRecover: false, + actions: [numSymbols]action{ + nil, /* INVALID */ + nil, /* $ */ + reduce(45), /* {, reduce: Vertex */ + reduce(45), /* }, reduce: Vertex */ + nil, /* empty */ + nil, /* strict */ + reduce(45), /* graphx, reduce: Vertex */ + nil, /* digraph */ + reduce(45), /* ;, reduce: Vertex */ + reduce(45), /* --, reduce: Vertex */ + reduce(45), /* ->, reduce: Vertex */ + reduce(45), /* node, reduce: Vertex */ + reduce(45), /* edge, reduce: Vertex */ + reduce(45), /* [, reduce: Vertex */ + nil, /* ] */ + nil, /* , */ + nil, /* = */ + reduce(45), /* subgraph, reduce: Vertex */ + nil, /* : */ + reduce(45), /* id, reduce: Vertex */ + }, + }, + actionRow{ // S59 + canRecover: false, + actions: [numSymbols]action{ + nil, /* INVALID */ + nil, /* $ */ + reduce(24), /* {, reduce: OptEdge */ + reduce(24), /* }, reduce: OptEdge */ + nil, /* empty */ + nil, /* strict */ + reduce(24), /* graphx, reduce: OptEdge */ + nil, /* digraph */ + reduce(24), /* ;, reduce: OptEdge */ + shift(40), /* -- */ + shift(41), /* -> */ + reduce(24), /* node, reduce: OptEdge */ + reduce(24), /* edge, reduce: OptEdge */ + reduce(24), /* [, reduce: OptEdge */ + nil, /* ] */ + nil, /* , */ + nil, /* = */ + reduce(24), /* subgraph, reduce: OptEdge */ + nil, /* : */ + reduce(24), /* id, reduce: OptEdge */ + }, + }, + actionRow{ // S60 + canRecover: false, + actions: [numSymbols]action{ + nil, /* INVALID */ + nil, /* $ */ + reduce(50), /* {, reduce: OptPort */ + reduce(50), /* }, reduce: OptPort */ + nil, /* empty */ + nil, /* strict */ + reduce(50), /* graphx, reduce: OptPort */ + nil, /* digraph */ + reduce(50), /* ;, reduce: OptPort */ + reduce(50), /* --, reduce: OptPort */ + reduce(50), /* ->, reduce: OptPort */ + reduce(50), /* node, reduce: OptPort */ + reduce(50), /* edge, reduce: OptPort */ + reduce(50), /* [, reduce: OptPort */ + nil, /* ] */ + nil, /* , */ + nil, /* = */ + reduce(50), /* subgraph, reduce: OptPort */ + shift(46), /* : */ + reduce(50), /* id, reduce: OptPort */ + }, + }, + actionRow{ // S61 + canRecover: false, + actions: [numSymbols]action{ + nil, /* INVALID */ + nil, /* $ */ + shift(76), /* { */ + nil, /* } */ + nil, /* empty */ + nil, /* strict */ + nil, /* graphx */ + nil, /* digraph */ + nil, /* ; */ + nil, /* -- */ + nil, /* -> */ + nil, /* node */ + nil, /* edge */ + nil, /* [ */ + nil, /* ] */ + nil, /* , */ + nil, /* = */ + nil, /* subgraph */ + nil, /* : */ + nil, /* id */ + }, + }, + actionRow{ // S62 + canRecover: false, + actions: [numSymbols]action{ + nil, /* INVALID */ + nil, /* $ */ + reduce(52), /* {, reduce: ID */ + reduce(52), /* }, reduce: ID */ + nil, /* empty */ + nil, /* strict */ + reduce(52), /* graphx, reduce: ID */ + nil, /* digraph */ + reduce(52), /* ;, reduce: ID */ + reduce(52), /* --, reduce: ID */ + reduce(52), /* ->, reduce: ID */ + reduce(52), /* node, reduce: ID */ + reduce(52), /* edge, reduce: ID */ + reduce(52), /* [, reduce: ID */ + nil, /* ] */ + nil, /* , */ + nil, /* = */ + reduce(52), /* subgraph, reduce: ID */ + reduce(52), /* :, reduce: ID */ + reduce(52), /* id, reduce: ID */ + }, + }, + actionRow{ // S63 + canRecover: false, + actions: [numSymbols]action{ + nil, /* INVALID */ + nil, /* $ */ + reduce(41), /* {, reduce: Attr */ + reduce(41), /* }, reduce: Attr */ + nil, /* empty */ + nil, /* strict */ + reduce(41), /* graphx, reduce: Attr */ + nil, /* digraph */ + reduce(41), /* ;, reduce: Attr */ + nil, /* -- */ + nil, /* -> */ + reduce(41), /* node, reduce: Attr */ + reduce(41), /* edge, reduce: Attr */ + nil, /* [ */ + nil, /* ] */ + nil, /* , */ + nil, /* = */ + reduce(41), /* subgraph, reduce: Attr */ + nil, /* : */ + reduce(41), /* id, reduce: Attr */ + }, + }, + actionRow{ // S64 + canRecover: false, + actions: [numSymbols]action{ + nil, /* INVALID */ + nil, /* $ */ + reduce(52), /* {, reduce: ID */ + reduce(52), /* }, reduce: ID */ + nil, /* empty */ + nil, /* strict */ + reduce(52), /* graphx, reduce: ID */ + nil, /* digraph */ + reduce(52), /* ;, reduce: ID */ + nil, /* -- */ + nil, /* -> */ + reduce(52), /* node, reduce: ID */ + reduce(52), /* edge, reduce: ID */ + nil, /* [ */ + nil, /* ] */ + nil, /* , */ + nil, /* = */ + reduce(52), /* subgraph, reduce: ID */ + nil, /* : */ + reduce(52), /* id, reduce: ID */ + }, + }, + actionRow{ // S65 + canRecover: false, + actions: [numSymbols]action{ + nil, /* INVALID */ + nil, /* $ */ + reduce(48), /* {, reduce: Port */ + reduce(48), /* }, reduce: Port */ + nil, /* empty */ + nil, /* strict */ + reduce(48), /* graphx, reduce: Port */ + nil, /* digraph */ + reduce(48), /* ;, reduce: Port */ + reduce(48), /* --, reduce: Port */ + reduce(48), /* ->, reduce: Port */ + reduce(48), /* node, reduce: Port */ + reduce(48), /* edge, reduce: Port */ + reduce(48), /* [, reduce: Port */ + nil, /* ] */ + nil, /* , */ + nil, /* = */ + reduce(48), /* subgraph, reduce: Port */ + shift(77), /* : */ + reduce(48), /* id, reduce: Port */ + }, + }, + actionRow{ // S66 + canRecover: false, + actions: [numSymbols]action{ + nil, /* INVALID */ + nil, /* $ */ + nil, /* { */ + shift(78), /* } */ + nil, /* empty */ + nil, /* strict */ + nil, /* graphx */ + nil, /* digraph */ + nil, /* ; */ + nil, /* -- */ + nil, /* -> */ + nil, /* node */ + nil, /* edge */ + nil, /* [ */ + nil, /* ] */ + nil, /* , */ + nil, /* = */ + nil, /* subgraph */ + nil, /* : */ + nil, /* id */ + }, + }, + actionRow{ // S67 + canRecover: false, + actions: [numSymbols]action{ + nil, /* INVALID */ + nil, /* $ */ + nil, /* { */ + nil, /* } */ + nil, /* empty */ + nil, /* strict */ + nil, /* graphx */ + nil, /* digraph */ + nil, /* ; */ + nil, /* -- */ + nil, /* -> */ + nil, /* node */ + nil, /* edge */ + nil, /* [ */ + shift(79), /* ] */ + nil, /* , */ + nil, /* = */ + nil, /* subgraph */ + nil, /* : */ + nil, /* id */ + }, + }, + actionRow{ // S68 + canRecover: false, + actions: [numSymbols]action{ + nil, /* INVALID */ + nil, /* $ */ + nil, /* { */ + nil, /* } */ + nil, /* empty */ + nil, /* strict */ + nil, /* graphx */ + nil, /* digraph */ + nil, /* ; */ + nil, /* -- */ + nil, /* -> */ + nil, /* node */ + nil, /* edge */ + nil, /* [ */ + reduce(39), /* ], reduce: OptSep */ + nil, /* , */ + nil, /* = */ + nil, /* subgraph */ + nil, /* : */ + reduce(39), /* id, reduce: OptSep */ + }, + }, + actionRow{ // S69 + canRecover: false, + actions: [numSymbols]action{ + nil, /* INVALID */ + nil, /* $ */ + nil, /* { */ + nil, /* } */ + nil, /* empty */ + nil, /* strict */ + nil, /* graphx */ + nil, /* digraph */ + nil, /* ; */ + nil, /* -- */ + nil, /* -> */ + nil, /* node */ + nil, /* edge */ + nil, /* [ */ + reduce(34), /* ], reduce: AList */ + nil, /* , */ + nil, /* = */ + nil, /* subgraph */ + nil, /* : */ + reduce(34), /* id, reduce: AList */ + }, + }, + actionRow{ // S70 + canRecover: false, + actions: [numSymbols]action{ + nil, /* INVALID */ + nil, /* $ */ + nil, /* { */ + nil, /* } */ + nil, /* empty */ + nil, /* strict */ + nil, /* graphx */ + nil, /* digraph */ + nil, /* ; */ + nil, /* -- */ + nil, /* -> */ + nil, /* node */ + nil, /* edge */ + nil, /* [ */ + reduce(40), /* ], reduce: OptSep */ + nil, /* , */ + nil, /* = */ + nil, /* subgraph */ + nil, /* : */ + reduce(40), /* id, reduce: OptSep */ + }, + }, + actionRow{ // S71 + canRecover: false, + actions: [numSymbols]action{ + nil, /* INVALID */ + nil, /* $ */ + reduce(30), /* {, reduce: AttrList */ + reduce(30), /* }, reduce: AttrList */ + nil, /* empty */ + nil, /* strict */ + reduce(30), /* graphx, reduce: AttrList */ + nil, /* digraph */ + reduce(30), /* ;, reduce: AttrList */ + nil, /* -- */ + nil, /* -> */ + reduce(30), /* node, reduce: AttrList */ + reduce(30), /* edge, reduce: AttrList */ + reduce(30), /* [, reduce: AttrList */ + nil, /* ] */ + nil, /* , */ + nil, /* = */ + reduce(30), /* subgraph, reduce: AttrList */ + nil, /* : */ + reduce(30), /* id, reduce: AttrList */ + }, + }, + actionRow{ // S72 + canRecover: false, + actions: [numSymbols]action{ + nil, /* INVALID */ + nil, /* $ */ + nil, /* { */ + nil, /* } */ + nil, /* empty */ + nil, /* strict */ + nil, /* graphx */ + nil, /* digraph */ + shift(68), /* ; */ + nil, /* -- */ + nil, /* -> */ + nil, /* node */ + nil, /* edge */ + nil, /* [ */ + reduce(38), /* ], reduce: OptSep */ + shift(70), /* , */ + nil, /* = */ + nil, /* subgraph */ + nil, /* : */ + reduce(38), /* id, reduce: OptSep */ + }, + }, + actionRow{ // S73 + canRecover: false, + actions: [numSymbols]action{ + nil, /* INVALID */ + nil, /* $ */ + nil, /* { */ + nil, /* } */ + nil, /* empty */ + nil, /* strict */ + nil, /* graphx */ + nil, /* digraph */ + nil, /* ; */ + nil, /* -- */ + nil, /* -> */ + nil, /* node */ + nil, /* edge */ + nil, /* [ */ + nil, /* ] */ + nil, /* , */ + nil, /* = */ + nil, /* subgraph */ + nil, /* : */ + shift(82), /* id */ + }, + }, + actionRow{ // S74 + canRecover: false, + actions: [numSymbols]action{ + nil, /* INVALID */ + nil, /* $ */ + reduce(25), /* {, reduce: OptEdge */ + reduce(25), /* }, reduce: OptEdge */ + nil, /* empty */ + nil, /* strict */ + reduce(25), /* graphx, reduce: OptEdge */ + nil, /* digraph */ + reduce(25), /* ;, reduce: OptEdge */ + nil, /* -- */ + nil, /* -> */ + reduce(25), /* node, reduce: OptEdge */ + reduce(25), /* edge, reduce: OptEdge */ + reduce(25), /* [, reduce: OptEdge */ + nil, /* ] */ + nil, /* , */ + nil, /* = */ + reduce(25), /* subgraph, reduce: OptEdge */ + nil, /* : */ + reduce(25), /* id, reduce: OptEdge */ + }, + }, + actionRow{ // S75 + canRecover: false, + actions: [numSymbols]action{ + nil, /* INVALID */ + nil, /* $ */ + reduce(21), /* {, reduce: Edge */ + reduce(21), /* }, reduce: Edge */ + nil, /* empty */ + nil, /* strict */ + reduce(21), /* graphx, reduce: Edge */ + nil, /* digraph */ + reduce(21), /* ;, reduce: Edge */ + nil, /* -- */ + nil, /* -> */ + reduce(21), /* node, reduce: Edge */ + reduce(21), /* edge, reduce: Edge */ + reduce(21), /* [, reduce: Edge */ + nil, /* ] */ + nil, /* , */ + nil, /* = */ + reduce(21), /* subgraph, reduce: Edge */ + nil, /* : */ + reduce(21), /* id, reduce: Edge */ + }, + }, + actionRow{ // S76 + canRecover: false, + actions: [numSymbols]action{ + nil, /* INVALID */ + nil, /* $ */ + reduce(43), /* {, reduce: OptSubgraphID */ + reduce(10), /* }, reduce: OptStmtList */ + nil, /* empty */ + nil, /* strict */ + shift(14), /* graphx */ + nil, /* digraph */ + nil, /* ; */ + nil, /* -- */ + nil, /* -> */ + shift(25), /* node */ + shift(26), /* edge */ + nil, /* [ */ + nil, /* ] */ + nil, /* , */ + nil, /* = */ + shift(29), /* subgraph */ + nil, /* : */ + shift(30), /* id */ + }, + }, + actionRow{ // S77 + canRecover: false, + actions: [numSymbols]action{ + nil, /* INVALID */ + nil, /* $ */ + nil, /* { */ + nil, /* } */ + nil, /* empty */ + nil, /* strict */ + nil, /* graphx */ + nil, /* digraph */ + nil, /* ; */ + nil, /* -- */ + nil, /* -> */ + nil, /* node */ + nil, /* edge */ + nil, /* [ */ + nil, /* ] */ + nil, /* , */ + nil, /* = */ + nil, /* subgraph */ + nil, /* : */ + shift(85), /* id */ + }, + }, + actionRow{ // S78 + canRecover: false, + actions: [numSymbols]action{ + nil, /* INVALID */ + nil, /* $ */ + reduce(42), /* {, reduce: Subgraph */ + reduce(42), /* }, reduce: Subgraph */ + nil, /* empty */ + nil, /* strict */ + reduce(42), /* graphx, reduce: Subgraph */ + nil, /* digraph */ + reduce(42), /* ;, reduce: Subgraph */ + reduce(42), /* --, reduce: Subgraph */ + reduce(42), /* ->, reduce: Subgraph */ + reduce(42), /* node, reduce: Subgraph */ + reduce(42), /* edge, reduce: Subgraph */ + nil, /* [ */ + nil, /* ] */ + nil, /* , */ + nil, /* = */ + reduce(42), /* subgraph, reduce: Subgraph */ + nil, /* : */ + reduce(42), /* id, reduce: Subgraph */ + }, + }, + actionRow{ // S79 + canRecover: false, + actions: [numSymbols]action{ + nil, /* INVALID */ + nil, /* $ */ + reduce(31), /* {, reduce: AttrList */ + reduce(31), /* }, reduce: AttrList */ + nil, /* empty */ + nil, /* strict */ + reduce(31), /* graphx, reduce: AttrList */ + nil, /* digraph */ + reduce(31), /* ;, reduce: AttrList */ + nil, /* -- */ + nil, /* -> */ + reduce(31), /* node, reduce: AttrList */ + reduce(31), /* edge, reduce: AttrList */ + reduce(31), /* [, reduce: AttrList */ + nil, /* ] */ + nil, /* , */ + nil, /* = */ + reduce(31), /* subgraph, reduce: AttrList */ + nil, /* : */ + reduce(31), /* id, reduce: AttrList */ + }, + }, + actionRow{ // S80 + canRecover: false, + actions: [numSymbols]action{ + nil, /* INVALID */ + nil, /* $ */ + nil, /* { */ + nil, /* } */ + nil, /* empty */ + nil, /* strict */ + nil, /* graphx */ + nil, /* digraph */ + nil, /* ; */ + nil, /* -- */ + nil, /* -> */ + nil, /* node */ + nil, /* edge */ + nil, /* [ */ + reduce(35), /* ], reduce: AList */ + nil, /* , */ + nil, /* = */ + nil, /* subgraph */ + nil, /* : */ + reduce(35), /* id, reduce: AList */ + }, + }, + actionRow{ // S81 + canRecover: false, + actions: [numSymbols]action{ + nil, /* INVALID */ + nil, /* $ */ + nil, /* { */ + nil, /* } */ + nil, /* empty */ + nil, /* strict */ + nil, /* graphx */ + nil, /* digraph */ + reduce(41), /* ;, reduce: Attr */ + nil, /* -- */ + nil, /* -> */ + nil, /* node */ + nil, /* edge */ + nil, /* [ */ + reduce(41), /* ], reduce: Attr */ + reduce(41), /* ,, reduce: Attr */ + nil, /* = */ + nil, /* subgraph */ + nil, /* : */ + reduce(41), /* id, reduce: Attr */ + }, + }, + actionRow{ // S82 + canRecover: false, + actions: [numSymbols]action{ + nil, /* INVALID */ + nil, /* $ */ + nil, /* { */ + nil, /* } */ + nil, /* empty */ + nil, /* strict */ + nil, /* graphx */ + nil, /* digraph */ + reduce(52), /* ;, reduce: ID */ + nil, /* -- */ + nil, /* -> */ + nil, /* node */ + nil, /* edge */ + nil, /* [ */ + reduce(52), /* ], reduce: ID */ + reduce(52), /* ,, reduce: ID */ + nil, /* = */ + nil, /* subgraph */ + nil, /* : */ + reduce(52), /* id, reduce: ID */ + }, + }, + actionRow{ // S83 + canRecover: false, + actions: [numSymbols]action{ + nil, /* INVALID */ + nil, /* $ */ + nil, /* { */ + shift(86), /* } */ + nil, /* empty */ + nil, /* strict */ + nil, /* graphx */ + nil, /* digraph */ + nil, /* ; */ + nil, /* -- */ + nil, /* -> */ + nil, /* node */ + nil, /* edge */ + nil, /* [ */ + nil, /* ] */ + nil, /* , */ + nil, /* = */ + nil, /* subgraph */ + nil, /* : */ + nil, /* id */ + }, + }, + actionRow{ // S84 + canRecover: false, + actions: [numSymbols]action{ + nil, /* INVALID */ + nil, /* $ */ + reduce(49), /* {, reduce: Port */ + reduce(49), /* }, reduce: Port */ + nil, /* empty */ + nil, /* strict */ + reduce(49), /* graphx, reduce: Port */ + nil, /* digraph */ + reduce(49), /* ;, reduce: Port */ + reduce(49), /* --, reduce: Port */ + reduce(49), /* ->, reduce: Port */ + reduce(49), /* node, reduce: Port */ + reduce(49), /* edge, reduce: Port */ + reduce(49), /* [, reduce: Port */ + nil, /* ] */ + nil, /* , */ + nil, /* = */ + reduce(49), /* subgraph, reduce: Port */ + nil, /* : */ + reduce(49), /* id, reduce: Port */ + }, + }, + actionRow{ // S85 + canRecover: false, + actions: [numSymbols]action{ + nil, /* INVALID */ + nil, /* $ */ + reduce(52), /* {, reduce: ID */ + reduce(52), /* }, reduce: ID */ + nil, /* empty */ + nil, /* strict */ + reduce(52), /* graphx, reduce: ID */ + nil, /* digraph */ + reduce(52), /* ;, reduce: ID */ + reduce(52), /* --, reduce: ID */ + reduce(52), /* ->, reduce: ID */ + reduce(52), /* node, reduce: ID */ + reduce(52), /* edge, reduce: ID */ + reduce(52), /* [, reduce: ID */ + nil, /* ] */ + nil, /* , */ + nil, /* = */ + reduce(52), /* subgraph, reduce: ID */ + nil, /* : */ + reduce(52), /* id, reduce: ID */ + }, + }, + actionRow{ // S86 + canRecover: false, + actions: [numSymbols]action{ + nil, /* INVALID */ + nil, /* $ */ + reduce(42), /* {, reduce: Subgraph */ + reduce(42), /* }, reduce: Subgraph */ + nil, /* empty */ + nil, /* strict */ + reduce(42), /* graphx, reduce: Subgraph */ + nil, /* digraph */ + reduce(42), /* ;, reduce: Subgraph */ + reduce(42), /* --, reduce: Subgraph */ + reduce(42), /* ->, reduce: Subgraph */ + reduce(42), /* node, reduce: Subgraph */ + reduce(42), /* edge, reduce: Subgraph */ + reduce(42), /* [, reduce: Subgraph */ + nil, /* ] */ + nil, /* , */ + nil, /* = */ + reduce(42), /* subgraph, reduce: Subgraph */ + nil, /* : */ + reduce(42), /* id, reduce: Subgraph */ + }, + }, +} diff --git a/vendor/gonum.org/v1/gonum/graph/formats/dot/internal/parser/doc.go b/vendor/gonum.org/v1/gonum/graph/formats/dot/internal/parser/doc.go new file mode 100644 index 0000000..5954ca2 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/graph/formats/dot/internal/parser/doc.go @@ -0,0 +1,6 @@ +// Copyright ©2018 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Package parser provides generated internal parsing functions for DOT parsing. +package parser // import "gonum.org/v1/gonum/graph/formats/dot/internal/parser" diff --git a/vendor/gonum.org/v1/gonum/graph/formats/dot/internal/parser/gototable.go b/vendor/gonum.org/v1/gonum/graph/formats/dot/internal/parser/gototable.go new file mode 100644 index 0000000..eca01cd --- /dev/null +++ b/vendor/gonum.org/v1/gonum/graph/formats/dot/internal/parser/gototable.go @@ -0,0 +1,2807 @@ +// Code generated by gocc; DO NOT EDIT. + +// This file is dual licensed under CC0 and The gonum license. +// +// Copyright ©2017 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. +// +// Copyright ©2017 Robin Eklind. +// This file is made available under a Creative Commons CC0 1.0 +// Universal Public Domain Dedication. + +package parser + +const numNTSymbols = 30 + +type ( + gotoTable [numStates]gotoRow + gotoRow [numNTSymbols]int +) + +var gotoTab = gotoTable{ + gotoRow{ // S0 + -1, // S' + 1, // File + 2, // Graph + 3, // OptStrict + -1, // DirectedGraph + -1, // StmtList + -1, // OptStmtList + -1, // Stmt + -1, // OptSemi + -1, // NodeStmt + -1, // EdgeStmt + -1, // Edge + -1, // DirectedEdge + -1, // OptEdge + -1, // AttrStmt + -1, // Component + -1, // AttrList + -1, // OptAttrList + -1, // AList + -1, // OptAList + -1, // OptSep + -1, // Attr + -1, // Subgraph + -1, // OptSubgraphID + -1, // Vertex + -1, // Node + -1, // Port + -1, // OptPort + -1, // ID + -1, // OptID + }, + gotoRow{ // S1 + -1, // S' + -1, // File + 5, // Graph + 3, // OptStrict + -1, // DirectedGraph + -1, // StmtList + -1, // OptStmtList + -1, // Stmt + -1, // OptSemi + -1, // NodeStmt + -1, // EdgeStmt + -1, // Edge + -1, // DirectedEdge + -1, // OptEdge + -1, // AttrStmt + -1, // Component + -1, // AttrList + -1, // OptAttrList + -1, // AList + -1, // OptAList + -1, // OptSep + -1, // Attr + -1, // Subgraph + -1, // OptSubgraphID + -1, // Vertex + -1, // Node + -1, // Port + -1, // OptPort + -1, // ID + -1, // OptID + }, + gotoRow{ // S2 + -1, // S' + -1, // File + -1, // Graph + -1, // OptStrict + -1, // DirectedGraph + -1, // StmtList + -1, // OptStmtList + -1, // Stmt + -1, // OptSemi + -1, // NodeStmt + -1, // EdgeStmt + -1, // Edge + -1, // DirectedEdge + -1, // OptEdge + -1, // AttrStmt + -1, // Component + -1, // AttrList + -1, // OptAttrList + -1, // AList + -1, // OptAList + -1, // OptSep + -1, // Attr + -1, // Subgraph + -1, // OptSubgraphID + -1, // Vertex + -1, // Node + -1, // Port + -1, // OptPort + -1, // ID + -1, // OptID + }, + gotoRow{ // S3 + -1, // S' + -1, // File + -1, // Graph + -1, // OptStrict + 6, // DirectedGraph + -1, // StmtList + -1, // OptStmtList + -1, // Stmt + -1, // OptSemi + -1, // NodeStmt + -1, // EdgeStmt + -1, // Edge + -1, // DirectedEdge + -1, // OptEdge + -1, // AttrStmt + -1, // Component + -1, // AttrList + -1, // OptAttrList + -1, // AList + -1, // OptAList + -1, // OptSep + -1, // Attr + -1, // Subgraph + -1, // OptSubgraphID + -1, // Vertex + -1, // Node + -1, // Port + -1, // OptPort + -1, // ID + -1, // OptID + }, + gotoRow{ // S4 + -1, // S' + -1, // File + -1, // Graph + -1, // OptStrict + -1, // DirectedGraph + -1, // StmtList + -1, // OptStmtList + -1, // Stmt + -1, // OptSemi + -1, // NodeStmt + -1, // EdgeStmt + -1, // Edge + -1, // DirectedEdge + -1, // OptEdge + -1, // AttrStmt + -1, // Component + -1, // AttrList + -1, // OptAttrList + -1, // AList + -1, // OptAList + -1, // OptSep + -1, // Attr + -1, // Subgraph + -1, // OptSubgraphID + -1, // Vertex + -1, // Node + -1, // Port + -1, // OptPort + -1, // ID + -1, // OptID + }, + gotoRow{ // S5 + -1, // S' + -1, // File + -1, // Graph + -1, // OptStrict + -1, // DirectedGraph + -1, // StmtList + -1, // OptStmtList + -1, // Stmt + -1, // OptSemi + -1, // NodeStmt + -1, // EdgeStmt + -1, // Edge + -1, // DirectedEdge + -1, // OptEdge + -1, // AttrStmt + -1, // Component + -1, // AttrList + -1, // OptAttrList + -1, // AList + -1, // OptAList + -1, // OptSep + -1, // Attr + -1, // Subgraph + -1, // OptSubgraphID + -1, // Vertex + -1, // Node + -1, // Port + -1, // OptPort + -1, // ID + -1, // OptID + }, + gotoRow{ // S6 + -1, // S' + -1, // File + -1, // Graph + -1, // OptStrict + -1, // DirectedGraph + -1, // StmtList + -1, // OptStmtList + -1, // Stmt + -1, // OptSemi + -1, // NodeStmt + -1, // EdgeStmt + -1, // Edge + -1, // DirectedEdge + -1, // OptEdge + -1, // AttrStmt + -1, // Component + -1, // AttrList + -1, // OptAttrList + -1, // AList + -1, // OptAList + -1, // OptSep + -1, // Attr + -1, // Subgraph + -1, // OptSubgraphID + -1, // Vertex + -1, // Node + -1, // Port + -1, // OptPort + 10, // ID + 9, // OptID + }, + gotoRow{ // S7 + -1, // S' + -1, // File + -1, // Graph + -1, // OptStrict + -1, // DirectedGraph + -1, // StmtList + -1, // OptStmtList + -1, // Stmt + -1, // OptSemi + -1, // NodeStmt + -1, // EdgeStmt + -1, // Edge + -1, // DirectedEdge + -1, // OptEdge + -1, // AttrStmt + -1, // Component + -1, // AttrList + -1, // OptAttrList + -1, // AList + -1, // OptAList + -1, // OptSep + -1, // Attr + -1, // Subgraph + -1, // OptSubgraphID + -1, // Vertex + -1, // Node + -1, // Port + -1, // OptPort + -1, // ID + -1, // OptID + }, + gotoRow{ // S8 + -1, // S' + -1, // File + -1, // Graph + -1, // OptStrict + -1, // DirectedGraph + -1, // StmtList + -1, // OptStmtList + -1, // Stmt + -1, // OptSemi + -1, // NodeStmt + -1, // EdgeStmt + -1, // Edge + -1, // DirectedEdge + -1, // OptEdge + -1, // AttrStmt + -1, // Component + -1, // AttrList + -1, // OptAttrList + -1, // AList + -1, // OptAList + -1, // OptSep + -1, // Attr + -1, // Subgraph + -1, // OptSubgraphID + -1, // Vertex + -1, // Node + -1, // Port + -1, // OptPort + -1, // ID + -1, // OptID + }, + gotoRow{ // S9 + -1, // S' + -1, // File + -1, // Graph + -1, // OptStrict + -1, // DirectedGraph + -1, // StmtList + -1, // OptStmtList + -1, // Stmt + -1, // OptSemi + -1, // NodeStmt + -1, // EdgeStmt + -1, // Edge + -1, // DirectedEdge + -1, // OptEdge + -1, // AttrStmt + -1, // Component + -1, // AttrList + -1, // OptAttrList + -1, // AList + -1, // OptAList + -1, // OptSep + -1, // Attr + -1, // Subgraph + -1, // OptSubgraphID + -1, // Vertex + -1, // Node + -1, // Port + -1, // OptPort + -1, // ID + -1, // OptID + }, + gotoRow{ // S10 + -1, // S' + -1, // File + -1, // Graph + -1, // OptStrict + -1, // DirectedGraph + -1, // StmtList + -1, // OptStmtList + -1, // Stmt + -1, // OptSemi + -1, // NodeStmt + -1, // EdgeStmt + -1, // Edge + -1, // DirectedEdge + -1, // OptEdge + -1, // AttrStmt + -1, // Component + -1, // AttrList + -1, // OptAttrList + -1, // AList + -1, // OptAList + -1, // OptSep + -1, // Attr + -1, // Subgraph + -1, // OptSubgraphID + -1, // Vertex + -1, // Node + -1, // Port + -1, // OptPort + -1, // ID + -1, // OptID + }, + gotoRow{ // S11 + -1, // S' + -1, // File + -1, // Graph + -1, // OptStrict + -1, // DirectedGraph + -1, // StmtList + -1, // OptStmtList + -1, // Stmt + -1, // OptSemi + -1, // NodeStmt + -1, // EdgeStmt + -1, // Edge + -1, // DirectedEdge + -1, // OptEdge + -1, // AttrStmt + -1, // Component + -1, // AttrList + -1, // OptAttrList + -1, // AList + -1, // OptAList + -1, // OptSep + -1, // Attr + -1, // Subgraph + -1, // OptSubgraphID + -1, // Vertex + -1, // Node + -1, // Port + -1, // OptPort + -1, // ID + -1, // OptID + }, + gotoRow{ // S12 + -1, // S' + -1, // File + -1, // Graph + -1, // OptStrict + -1, // DirectedGraph + 15, // StmtList + 13, // OptStmtList + 16, // Stmt + -1, // OptSemi + 17, // NodeStmt + 18, // EdgeStmt + -1, // Edge + -1, // DirectedEdge + -1, // OptEdge + 19, // AttrStmt + 24, // Component + -1, // AttrList + -1, // OptAttrList + -1, // AList + -1, // OptAList + -1, // OptSep + 20, // Attr + 21, // Subgraph + 28, // OptSubgraphID + 23, // Vertex + 22, // Node + -1, // Port + -1, // OptPort + 27, // ID + -1, // OptID + }, + gotoRow{ // S13 + -1, // S' + -1, // File + -1, // Graph + -1, // OptStrict + -1, // DirectedGraph + -1, // StmtList + -1, // OptStmtList + -1, // Stmt + -1, // OptSemi + -1, // NodeStmt + -1, // EdgeStmt + -1, // Edge + -1, // DirectedEdge + -1, // OptEdge + -1, // AttrStmt + -1, // Component + -1, // AttrList + -1, // OptAttrList + -1, // AList + -1, // OptAList + -1, // OptSep + -1, // Attr + -1, // Subgraph + -1, // OptSubgraphID + -1, // Vertex + -1, // Node + -1, // Port + -1, // OptPort + -1, // ID + -1, // OptID + }, + gotoRow{ // S14 + -1, // S' + -1, // File + -1, // Graph + -1, // OptStrict + -1, // DirectedGraph + -1, // StmtList + -1, // OptStmtList + -1, // Stmt + -1, // OptSemi + -1, // NodeStmt + -1, // EdgeStmt + -1, // Edge + -1, // DirectedEdge + -1, // OptEdge + -1, // AttrStmt + -1, // Component + -1, // AttrList + -1, // OptAttrList + -1, // AList + -1, // OptAList + -1, // OptSep + -1, // Attr + -1, // Subgraph + -1, // OptSubgraphID + -1, // Vertex + -1, // Node + -1, // Port + -1, // OptPort + -1, // ID + -1, // OptID + }, + gotoRow{ // S15 + -1, // S' + -1, // File + -1, // Graph + -1, // OptStrict + -1, // DirectedGraph + -1, // StmtList + -1, // OptStmtList + 32, // Stmt + -1, // OptSemi + 17, // NodeStmt + 18, // EdgeStmt + -1, // Edge + -1, // DirectedEdge + -1, // OptEdge + 19, // AttrStmt + 24, // Component + -1, // AttrList + -1, // OptAttrList + -1, // AList + -1, // OptAList + -1, // OptSep + 20, // Attr + 21, // Subgraph + 28, // OptSubgraphID + 23, // Vertex + 22, // Node + -1, // Port + -1, // OptPort + 27, // ID + -1, // OptID + }, + gotoRow{ // S16 + -1, // S' + -1, // File + -1, // Graph + -1, // OptStrict + -1, // DirectedGraph + -1, // StmtList + -1, // OptStmtList + -1, // Stmt + 33, // OptSemi + -1, // NodeStmt + -1, // EdgeStmt + -1, // Edge + -1, // DirectedEdge + -1, // OptEdge + -1, // AttrStmt + -1, // Component + -1, // AttrList + -1, // OptAttrList + -1, // AList + -1, // OptAList + -1, // OptSep + -1, // Attr + -1, // Subgraph + -1, // OptSubgraphID + -1, // Vertex + -1, // Node + -1, // Port + -1, // OptPort + -1, // ID + -1, // OptID + }, + gotoRow{ // S17 + -1, // S' + -1, // File + -1, // Graph + -1, // OptStrict + -1, // DirectedGraph + -1, // StmtList + -1, // OptStmtList + -1, // Stmt + -1, // OptSemi + -1, // NodeStmt + -1, // EdgeStmt + -1, // Edge + -1, // DirectedEdge + -1, // OptEdge + -1, // AttrStmt + -1, // Component + -1, // AttrList + -1, // OptAttrList + -1, // AList + -1, // OptAList + -1, // OptSep + -1, // Attr + -1, // Subgraph + -1, // OptSubgraphID + -1, // Vertex + -1, // Node + -1, // Port + -1, // OptPort + -1, // ID + -1, // OptID + }, + gotoRow{ // S18 + -1, // S' + -1, // File + -1, // Graph + -1, // OptStrict + -1, // DirectedGraph + -1, // StmtList + -1, // OptStmtList + -1, // Stmt + -1, // OptSemi + -1, // NodeStmt + -1, // EdgeStmt + -1, // Edge + -1, // DirectedEdge + -1, // OptEdge + -1, // AttrStmt + -1, // Component + -1, // AttrList + -1, // OptAttrList + -1, // AList + -1, // OptAList + -1, // OptSep + -1, // Attr + -1, // Subgraph + -1, // OptSubgraphID + -1, // Vertex + -1, // Node + -1, // Port + -1, // OptPort + -1, // ID + -1, // OptID + }, + gotoRow{ // S19 + -1, // S' + -1, // File + -1, // Graph + -1, // OptStrict + -1, // DirectedGraph + -1, // StmtList + -1, // OptStmtList + -1, // Stmt + -1, // OptSemi + -1, // NodeStmt + -1, // EdgeStmt + -1, // Edge + -1, // DirectedEdge + -1, // OptEdge + -1, // AttrStmt + -1, // Component + -1, // AttrList + -1, // OptAttrList + -1, // AList + -1, // OptAList + -1, // OptSep + -1, // Attr + -1, // Subgraph + -1, // OptSubgraphID + -1, // Vertex + -1, // Node + -1, // Port + -1, // OptPort + -1, // ID + -1, // OptID + }, + gotoRow{ // S20 + -1, // S' + -1, // File + -1, // Graph + -1, // OptStrict + -1, // DirectedGraph + -1, // StmtList + -1, // OptStmtList + -1, // Stmt + -1, // OptSemi + -1, // NodeStmt + -1, // EdgeStmt + -1, // Edge + -1, // DirectedEdge + -1, // OptEdge + -1, // AttrStmt + -1, // Component + -1, // AttrList + -1, // OptAttrList + -1, // AList + -1, // OptAList + -1, // OptSep + -1, // Attr + -1, // Subgraph + -1, // OptSubgraphID + -1, // Vertex + -1, // Node + -1, // Port + -1, // OptPort + -1, // ID + -1, // OptID + }, + gotoRow{ // S21 + -1, // S' + -1, // File + -1, // Graph + -1, // OptStrict + -1, // DirectedGraph + -1, // StmtList + -1, // OptStmtList + -1, // Stmt + -1, // OptSemi + -1, // NodeStmt + -1, // EdgeStmt + -1, // Edge + -1, // DirectedEdge + -1, // OptEdge + -1, // AttrStmt + -1, // Component + -1, // AttrList + -1, // OptAttrList + -1, // AList + -1, // OptAList + -1, // OptSep + -1, // Attr + -1, // Subgraph + -1, // OptSubgraphID + -1, // Vertex + -1, // Node + -1, // Port + -1, // OptPort + -1, // ID + -1, // OptID + }, + gotoRow{ // S22 + -1, // S' + -1, // File + -1, // Graph + -1, // OptStrict + -1, // DirectedGraph + -1, // StmtList + -1, // OptStmtList + -1, // Stmt + -1, // OptSemi + -1, // NodeStmt + -1, // EdgeStmt + -1, // Edge + -1, // DirectedEdge + -1, // OptEdge + -1, // AttrStmt + -1, // Component + 36, // AttrList + 35, // OptAttrList + -1, // AList + -1, // OptAList + -1, // OptSep + -1, // Attr + -1, // Subgraph + -1, // OptSubgraphID + -1, // Vertex + -1, // Node + -1, // Port + -1, // OptPort + -1, // ID + -1, // OptID + }, + gotoRow{ // S23 + -1, // S' + -1, // File + -1, // Graph + -1, // OptStrict + -1, // DirectedGraph + -1, // StmtList + -1, // OptStmtList + -1, // Stmt + -1, // OptSemi + -1, // NodeStmt + -1, // EdgeStmt + 38, // Edge + 39, // DirectedEdge + -1, // OptEdge + -1, // AttrStmt + -1, // Component + -1, // AttrList + -1, // OptAttrList + -1, // AList + -1, // OptAList + -1, // OptSep + -1, // Attr + -1, // Subgraph + -1, // OptSubgraphID + -1, // Vertex + -1, // Node + -1, // Port + -1, // OptPort + -1, // ID + -1, // OptID + }, + gotoRow{ // S24 + -1, // S' + -1, // File + -1, // Graph + -1, // OptStrict + -1, // DirectedGraph + -1, // StmtList + -1, // OptStmtList + -1, // Stmt + -1, // OptSemi + -1, // NodeStmt + -1, // EdgeStmt + -1, // Edge + -1, // DirectedEdge + -1, // OptEdge + -1, // AttrStmt + -1, // Component + 42, // AttrList + -1, // OptAttrList + -1, // AList + -1, // OptAList + -1, // OptSep + -1, // Attr + -1, // Subgraph + -1, // OptSubgraphID + -1, // Vertex + -1, // Node + -1, // Port + -1, // OptPort + -1, // ID + -1, // OptID + }, + gotoRow{ // S25 + -1, // S' + -1, // File + -1, // Graph + -1, // OptStrict + -1, // DirectedGraph + -1, // StmtList + -1, // OptStmtList + -1, // Stmt + -1, // OptSemi + -1, // NodeStmt + -1, // EdgeStmt + -1, // Edge + -1, // DirectedEdge + -1, // OptEdge + -1, // AttrStmt + -1, // Component + -1, // AttrList + -1, // OptAttrList + -1, // AList + -1, // OptAList + -1, // OptSep + -1, // Attr + -1, // Subgraph + -1, // OptSubgraphID + -1, // Vertex + -1, // Node + -1, // Port + -1, // OptPort + -1, // ID + -1, // OptID + }, + gotoRow{ // S26 + -1, // S' + -1, // File + -1, // Graph + -1, // OptStrict + -1, // DirectedGraph + -1, // StmtList + -1, // OptStmtList + -1, // Stmt + -1, // OptSemi + -1, // NodeStmt + -1, // EdgeStmt + -1, // Edge + -1, // DirectedEdge + -1, // OptEdge + -1, // AttrStmt + -1, // Component + -1, // AttrList + -1, // OptAttrList + -1, // AList + -1, // OptAList + -1, // OptSep + -1, // Attr + -1, // Subgraph + -1, // OptSubgraphID + -1, // Vertex + -1, // Node + -1, // Port + -1, // OptPort + -1, // ID + -1, // OptID + }, + gotoRow{ // S27 + -1, // S' + -1, // File + -1, // Graph + -1, // OptStrict + -1, // DirectedGraph + -1, // StmtList + -1, // OptStmtList + -1, // Stmt + -1, // OptSemi + -1, // NodeStmt + -1, // EdgeStmt + -1, // Edge + -1, // DirectedEdge + -1, // OptEdge + -1, // AttrStmt + -1, // Component + -1, // AttrList + -1, // OptAttrList + -1, // AList + -1, // OptAList + -1, // OptSep + -1, // Attr + -1, // Subgraph + -1, // OptSubgraphID + -1, // Vertex + -1, // Node + 45, // Port + 44, // OptPort + -1, // ID + -1, // OptID + }, + gotoRow{ // S28 + -1, // S' + -1, // File + -1, // Graph + -1, // OptStrict + -1, // DirectedGraph + -1, // StmtList + -1, // OptStmtList + -1, // Stmt + -1, // OptSemi + -1, // NodeStmt + -1, // EdgeStmt + -1, // Edge + -1, // DirectedEdge + -1, // OptEdge + -1, // AttrStmt + -1, // Component + -1, // AttrList + -1, // OptAttrList + -1, // AList + -1, // OptAList + -1, // OptSep + -1, // Attr + -1, // Subgraph + -1, // OptSubgraphID + -1, // Vertex + -1, // Node + -1, // Port + -1, // OptPort + -1, // ID + -1, // OptID + }, + gotoRow{ // S29 + -1, // S' + -1, // File + -1, // Graph + -1, // OptStrict + -1, // DirectedGraph + -1, // StmtList + -1, // OptStmtList + -1, // Stmt + -1, // OptSemi + -1, // NodeStmt + -1, // EdgeStmt + -1, // Edge + -1, // DirectedEdge + -1, // OptEdge + -1, // AttrStmt + -1, // Component + -1, // AttrList + -1, // OptAttrList + -1, // AList + -1, // OptAList + -1, // OptSep + -1, // Attr + -1, // Subgraph + -1, // OptSubgraphID + -1, // Vertex + -1, // Node + -1, // Port + -1, // OptPort + 10, // ID + 48, // OptID + }, + gotoRow{ // S30 + -1, // S' + -1, // File + -1, // Graph + -1, // OptStrict + -1, // DirectedGraph + -1, // StmtList + -1, // OptStmtList + -1, // Stmt + -1, // OptSemi + -1, // NodeStmt + -1, // EdgeStmt + -1, // Edge + -1, // DirectedEdge + -1, // OptEdge + -1, // AttrStmt + -1, // Component + -1, // AttrList + -1, // OptAttrList + -1, // AList + -1, // OptAList + -1, // OptSep + -1, // Attr + -1, // Subgraph + -1, // OptSubgraphID + -1, // Vertex + -1, // Node + -1, // Port + -1, // OptPort + -1, // ID + -1, // OptID + }, + gotoRow{ // S31 + -1, // S' + -1, // File + -1, // Graph + -1, // OptStrict + -1, // DirectedGraph + -1, // StmtList + -1, // OptStmtList + -1, // Stmt + -1, // OptSemi + -1, // NodeStmt + -1, // EdgeStmt + -1, // Edge + -1, // DirectedEdge + -1, // OptEdge + -1, // AttrStmt + -1, // Component + -1, // AttrList + -1, // OptAttrList + -1, // AList + -1, // OptAList + -1, // OptSep + -1, // Attr + -1, // Subgraph + -1, // OptSubgraphID + -1, // Vertex + -1, // Node + -1, // Port + -1, // OptPort + -1, // ID + -1, // OptID + }, + gotoRow{ // S32 + -1, // S' + -1, // File + -1, // Graph + -1, // OptStrict + -1, // DirectedGraph + -1, // StmtList + -1, // OptStmtList + -1, // Stmt + 49, // OptSemi + -1, // NodeStmt + -1, // EdgeStmt + -1, // Edge + -1, // DirectedEdge + -1, // OptEdge + -1, // AttrStmt + -1, // Component + -1, // AttrList + -1, // OptAttrList + -1, // AList + -1, // OptAList + -1, // OptSep + -1, // Attr + -1, // Subgraph + -1, // OptSubgraphID + -1, // Vertex + -1, // Node + -1, // Port + -1, // OptPort + -1, // ID + -1, // OptID + }, + gotoRow{ // S33 + -1, // S' + -1, // File + -1, // Graph + -1, // OptStrict + -1, // DirectedGraph + -1, // StmtList + -1, // OptStmtList + -1, // Stmt + -1, // OptSemi + -1, // NodeStmt + -1, // EdgeStmt + -1, // Edge + -1, // DirectedEdge + -1, // OptEdge + -1, // AttrStmt + -1, // Component + -1, // AttrList + -1, // OptAttrList + -1, // AList + -1, // OptAList + -1, // OptSep + -1, // Attr + -1, // Subgraph + -1, // OptSubgraphID + -1, // Vertex + -1, // Node + -1, // Port + -1, // OptPort + -1, // ID + -1, // OptID + }, + gotoRow{ // S34 + -1, // S' + -1, // File + -1, // Graph + -1, // OptStrict + -1, // DirectedGraph + -1, // StmtList + -1, // OptStmtList + -1, // Stmt + -1, // OptSemi + -1, // NodeStmt + -1, // EdgeStmt + -1, // Edge + -1, // DirectedEdge + -1, // OptEdge + -1, // AttrStmt + -1, // Component + -1, // AttrList + -1, // OptAttrList + -1, // AList + -1, // OptAList + -1, // OptSep + -1, // Attr + -1, // Subgraph + -1, // OptSubgraphID + -1, // Vertex + -1, // Node + -1, // Port + -1, // OptPort + -1, // ID + -1, // OptID + }, + gotoRow{ // S35 + -1, // S' + -1, // File + -1, // Graph + -1, // OptStrict + -1, // DirectedGraph + -1, // StmtList + -1, // OptStmtList + -1, // Stmt + -1, // OptSemi + -1, // NodeStmt + -1, // EdgeStmt + -1, // Edge + -1, // DirectedEdge + -1, // OptEdge + -1, // AttrStmt + -1, // Component + -1, // AttrList + -1, // OptAttrList + -1, // AList + -1, // OptAList + -1, // OptSep + -1, // Attr + -1, // Subgraph + -1, // OptSubgraphID + -1, // Vertex + -1, // Node + -1, // Port + -1, // OptPort + -1, // ID + -1, // OptID + }, + gotoRow{ // S36 + -1, // S' + -1, // File + -1, // Graph + -1, // OptStrict + -1, // DirectedGraph + -1, // StmtList + -1, // OptStmtList + -1, // Stmt + -1, // OptSemi + -1, // NodeStmt + -1, // EdgeStmt + -1, // Edge + -1, // DirectedEdge + -1, // OptEdge + -1, // AttrStmt + -1, // Component + -1, // AttrList + -1, // OptAttrList + -1, // AList + -1, // OptAList + -1, // OptSep + -1, // Attr + -1, // Subgraph + -1, // OptSubgraphID + -1, // Vertex + -1, // Node + -1, // Port + -1, // OptPort + -1, // ID + -1, // OptID + }, + gotoRow{ // S37 + -1, // S' + -1, // File + -1, // Graph + -1, // OptStrict + -1, // DirectedGraph + -1, // StmtList + -1, // OptStmtList + -1, // Stmt + -1, // OptSemi + -1, // NodeStmt + -1, // EdgeStmt + -1, // Edge + -1, // DirectedEdge + -1, // OptEdge + -1, // AttrStmt + -1, // Component + -1, // AttrList + -1, // OptAttrList + 53, // AList + 52, // OptAList + -1, // OptSep + 51, // Attr + -1, // Subgraph + -1, // OptSubgraphID + -1, // Vertex + -1, // Node + -1, // Port + -1, // OptPort + 54, // ID + -1, // OptID + }, + gotoRow{ // S38 + -1, // S' + -1, // File + -1, // Graph + -1, // OptStrict + -1, // DirectedGraph + -1, // StmtList + -1, // OptStmtList + -1, // Stmt + -1, // OptSemi + -1, // NodeStmt + -1, // EdgeStmt + -1, // Edge + -1, // DirectedEdge + -1, // OptEdge + -1, // AttrStmt + -1, // Component + 36, // AttrList + 56, // OptAttrList + -1, // AList + -1, // OptAList + -1, // OptSep + -1, // Attr + -1, // Subgraph + -1, // OptSubgraphID + -1, // Vertex + -1, // Node + -1, // Port + -1, // OptPort + -1, // ID + -1, // OptID + }, + gotoRow{ // S39 + -1, // S' + -1, // File + -1, // Graph + -1, // OptStrict + -1, // DirectedGraph + -1, // StmtList + -1, // OptStmtList + -1, // Stmt + -1, // OptSemi + -1, // NodeStmt + -1, // EdgeStmt + -1, // Edge + -1, // DirectedEdge + -1, // OptEdge + -1, // AttrStmt + -1, // Component + -1, // AttrList + -1, // OptAttrList + -1, // AList + -1, // OptAList + -1, // OptSep + -1, // Attr + 57, // Subgraph + 61, // OptSubgraphID + 59, // Vertex + 58, // Node + -1, // Port + -1, // OptPort + 60, // ID + -1, // OptID + }, + gotoRow{ // S40 + -1, // S' + -1, // File + -1, // Graph + -1, // OptStrict + -1, // DirectedGraph + -1, // StmtList + -1, // OptStmtList + -1, // Stmt + -1, // OptSemi + -1, // NodeStmt + -1, // EdgeStmt + -1, // Edge + -1, // DirectedEdge + -1, // OptEdge + -1, // AttrStmt + -1, // Component + -1, // AttrList + -1, // OptAttrList + -1, // AList + -1, // OptAList + -1, // OptSep + -1, // Attr + -1, // Subgraph + -1, // OptSubgraphID + -1, // Vertex + -1, // Node + -1, // Port + -1, // OptPort + -1, // ID + -1, // OptID + }, + gotoRow{ // S41 + -1, // S' + -1, // File + -1, // Graph + -1, // OptStrict + -1, // DirectedGraph + -1, // StmtList + -1, // OptStmtList + -1, // Stmt + -1, // OptSemi + -1, // NodeStmt + -1, // EdgeStmt + -1, // Edge + -1, // DirectedEdge + -1, // OptEdge + -1, // AttrStmt + -1, // Component + -1, // AttrList + -1, // OptAttrList + -1, // AList + -1, // OptAList + -1, // OptSep + -1, // Attr + -1, // Subgraph + -1, // OptSubgraphID + -1, // Vertex + -1, // Node + -1, // Port + -1, // OptPort + -1, // ID + -1, // OptID + }, + gotoRow{ // S42 + -1, // S' + -1, // File + -1, // Graph + -1, // OptStrict + -1, // DirectedGraph + -1, // StmtList + -1, // OptStmtList + -1, // Stmt + -1, // OptSemi + -1, // NodeStmt + -1, // EdgeStmt + -1, // Edge + -1, // DirectedEdge + -1, // OptEdge + -1, // AttrStmt + -1, // Component + -1, // AttrList + -1, // OptAttrList + -1, // AList + -1, // OptAList + -1, // OptSep + -1, // Attr + -1, // Subgraph + -1, // OptSubgraphID + -1, // Vertex + -1, // Node + -1, // Port + -1, // OptPort + -1, // ID + -1, // OptID + }, + gotoRow{ // S43 + -1, // S' + -1, // File + -1, // Graph + -1, // OptStrict + -1, // DirectedGraph + -1, // StmtList + -1, // OptStmtList + -1, // Stmt + -1, // OptSemi + -1, // NodeStmt + -1, // EdgeStmt + -1, // Edge + -1, // DirectedEdge + -1, // OptEdge + -1, // AttrStmt + -1, // Component + -1, // AttrList + -1, // OptAttrList + -1, // AList + -1, // OptAList + -1, // OptSep + -1, // Attr + -1, // Subgraph + -1, // OptSubgraphID + -1, // Vertex + -1, // Node + -1, // Port + -1, // OptPort + 63, // ID + -1, // OptID + }, + gotoRow{ // S44 + -1, // S' + -1, // File + -1, // Graph + -1, // OptStrict + -1, // DirectedGraph + -1, // StmtList + -1, // OptStmtList + -1, // Stmt + -1, // OptSemi + -1, // NodeStmt + -1, // EdgeStmt + -1, // Edge + -1, // DirectedEdge + -1, // OptEdge + -1, // AttrStmt + -1, // Component + -1, // AttrList + -1, // OptAttrList + -1, // AList + -1, // OptAList + -1, // OptSep + -1, // Attr + -1, // Subgraph + -1, // OptSubgraphID + -1, // Vertex + -1, // Node + -1, // Port + -1, // OptPort + -1, // ID + -1, // OptID + }, + gotoRow{ // S45 + -1, // S' + -1, // File + -1, // Graph + -1, // OptStrict + -1, // DirectedGraph + -1, // StmtList + -1, // OptStmtList + -1, // Stmt + -1, // OptSemi + -1, // NodeStmt + -1, // EdgeStmt + -1, // Edge + -1, // DirectedEdge + -1, // OptEdge + -1, // AttrStmt + -1, // Component + -1, // AttrList + -1, // OptAttrList + -1, // AList + -1, // OptAList + -1, // OptSep + -1, // Attr + -1, // Subgraph + -1, // OptSubgraphID + -1, // Vertex + -1, // Node + -1, // Port + -1, // OptPort + -1, // ID + -1, // OptID + }, + gotoRow{ // S46 + -1, // S' + -1, // File + -1, // Graph + -1, // OptStrict + -1, // DirectedGraph + -1, // StmtList + -1, // OptStmtList + -1, // Stmt + -1, // OptSemi + -1, // NodeStmt + -1, // EdgeStmt + -1, // Edge + -1, // DirectedEdge + -1, // OptEdge + -1, // AttrStmt + -1, // Component + -1, // AttrList + -1, // OptAttrList + -1, // AList + -1, // OptAList + -1, // OptSep + -1, // Attr + -1, // Subgraph + -1, // OptSubgraphID + -1, // Vertex + -1, // Node + -1, // Port + -1, // OptPort + 65, // ID + -1, // OptID + }, + gotoRow{ // S47 + -1, // S' + -1, // File + -1, // Graph + -1, // OptStrict + -1, // DirectedGraph + 15, // StmtList + 66, // OptStmtList + 16, // Stmt + -1, // OptSemi + 17, // NodeStmt + 18, // EdgeStmt + -1, // Edge + -1, // DirectedEdge + -1, // OptEdge + 19, // AttrStmt + 24, // Component + -1, // AttrList + -1, // OptAttrList + -1, // AList + -1, // OptAList + -1, // OptSep + 20, // Attr + 21, // Subgraph + 28, // OptSubgraphID + 23, // Vertex + 22, // Node + -1, // Port + -1, // OptPort + 27, // ID + -1, // OptID + }, + gotoRow{ // S48 + -1, // S' + -1, // File + -1, // Graph + -1, // OptStrict + -1, // DirectedGraph + -1, // StmtList + -1, // OptStmtList + -1, // Stmt + -1, // OptSemi + -1, // NodeStmt + -1, // EdgeStmt + -1, // Edge + -1, // DirectedEdge + -1, // OptEdge + -1, // AttrStmt + -1, // Component + -1, // AttrList + -1, // OptAttrList + -1, // AList + -1, // OptAList + -1, // OptSep + -1, // Attr + -1, // Subgraph + -1, // OptSubgraphID + -1, // Vertex + -1, // Node + -1, // Port + -1, // OptPort + -1, // ID + -1, // OptID + }, + gotoRow{ // S49 + -1, // S' + -1, // File + -1, // Graph + -1, // OptStrict + -1, // DirectedGraph + -1, // StmtList + -1, // OptStmtList + -1, // Stmt + -1, // OptSemi + -1, // NodeStmt + -1, // EdgeStmt + -1, // Edge + -1, // DirectedEdge + -1, // OptEdge + -1, // AttrStmt + -1, // Component + -1, // AttrList + -1, // OptAttrList + -1, // AList + -1, // OptAList + -1, // OptSep + -1, // Attr + -1, // Subgraph + -1, // OptSubgraphID + -1, // Vertex + -1, // Node + -1, // Port + -1, // OptPort + -1, // ID + -1, // OptID + }, + gotoRow{ // S50 + -1, // S' + -1, // File + -1, // Graph + -1, // OptStrict + -1, // DirectedGraph + -1, // StmtList + -1, // OptStmtList + -1, // Stmt + -1, // OptSemi + -1, // NodeStmt + -1, // EdgeStmt + -1, // Edge + -1, // DirectedEdge + -1, // OptEdge + -1, // AttrStmt + -1, // Component + -1, // AttrList + -1, // OptAttrList + 53, // AList + 67, // OptAList + -1, // OptSep + 51, // Attr + -1, // Subgraph + -1, // OptSubgraphID + -1, // Vertex + -1, // Node + -1, // Port + -1, // OptPort + 54, // ID + -1, // OptID + }, + gotoRow{ // S51 + -1, // S' + -1, // File + -1, // Graph + -1, // OptStrict + -1, // DirectedGraph + -1, // StmtList + -1, // OptStmtList + -1, // Stmt + -1, // OptSemi + -1, // NodeStmt + -1, // EdgeStmt + -1, // Edge + -1, // DirectedEdge + -1, // OptEdge + -1, // AttrStmt + -1, // Component + -1, // AttrList + -1, // OptAttrList + -1, // AList + -1, // OptAList + 69, // OptSep + -1, // Attr + -1, // Subgraph + -1, // OptSubgraphID + -1, // Vertex + -1, // Node + -1, // Port + -1, // OptPort + -1, // ID + -1, // OptID + }, + gotoRow{ // S52 + -1, // S' + -1, // File + -1, // Graph + -1, // OptStrict + -1, // DirectedGraph + -1, // StmtList + -1, // OptStmtList + -1, // Stmt + -1, // OptSemi + -1, // NodeStmt + -1, // EdgeStmt + -1, // Edge + -1, // DirectedEdge + -1, // OptEdge + -1, // AttrStmt + -1, // Component + -1, // AttrList + -1, // OptAttrList + -1, // AList + -1, // OptAList + -1, // OptSep + -1, // Attr + -1, // Subgraph + -1, // OptSubgraphID + -1, // Vertex + -1, // Node + -1, // Port + -1, // OptPort + -1, // ID + -1, // OptID + }, + gotoRow{ // S53 + -1, // S' + -1, // File + -1, // Graph + -1, // OptStrict + -1, // DirectedGraph + -1, // StmtList + -1, // OptStmtList + -1, // Stmt + -1, // OptSemi + -1, // NodeStmt + -1, // EdgeStmt + -1, // Edge + -1, // DirectedEdge + -1, // OptEdge + -1, // AttrStmt + -1, // Component + -1, // AttrList + -1, // OptAttrList + -1, // AList + -1, // OptAList + -1, // OptSep + 72, // Attr + -1, // Subgraph + -1, // OptSubgraphID + -1, // Vertex + -1, // Node + -1, // Port + -1, // OptPort + 54, // ID + -1, // OptID + }, + gotoRow{ // S54 + -1, // S' + -1, // File + -1, // Graph + -1, // OptStrict + -1, // DirectedGraph + -1, // StmtList + -1, // OptStmtList + -1, // Stmt + -1, // OptSemi + -1, // NodeStmt + -1, // EdgeStmt + -1, // Edge + -1, // DirectedEdge + -1, // OptEdge + -1, // AttrStmt + -1, // Component + -1, // AttrList + -1, // OptAttrList + -1, // AList + -1, // OptAList + -1, // OptSep + -1, // Attr + -1, // Subgraph + -1, // OptSubgraphID + -1, // Vertex + -1, // Node + -1, // Port + -1, // OptPort + -1, // ID + -1, // OptID + }, + gotoRow{ // S55 + -1, // S' + -1, // File + -1, // Graph + -1, // OptStrict + -1, // DirectedGraph + -1, // StmtList + -1, // OptStmtList + -1, // Stmt + -1, // OptSemi + -1, // NodeStmt + -1, // EdgeStmt + -1, // Edge + -1, // DirectedEdge + -1, // OptEdge + -1, // AttrStmt + -1, // Component + -1, // AttrList + -1, // OptAttrList + -1, // AList + -1, // OptAList + -1, // OptSep + -1, // Attr + -1, // Subgraph + -1, // OptSubgraphID + -1, // Vertex + -1, // Node + -1, // Port + -1, // OptPort + -1, // ID + -1, // OptID + }, + gotoRow{ // S56 + -1, // S' + -1, // File + -1, // Graph + -1, // OptStrict + -1, // DirectedGraph + -1, // StmtList + -1, // OptStmtList + -1, // Stmt + -1, // OptSemi + -1, // NodeStmt + -1, // EdgeStmt + -1, // Edge + -1, // DirectedEdge + -1, // OptEdge + -1, // AttrStmt + -1, // Component + -1, // AttrList + -1, // OptAttrList + -1, // AList + -1, // OptAList + -1, // OptSep + -1, // Attr + -1, // Subgraph + -1, // OptSubgraphID + -1, // Vertex + -1, // Node + -1, // Port + -1, // OptPort + -1, // ID + -1, // OptID + }, + gotoRow{ // S57 + -1, // S' + -1, // File + -1, // Graph + -1, // OptStrict + -1, // DirectedGraph + -1, // StmtList + -1, // OptStmtList + -1, // Stmt + -1, // OptSemi + -1, // NodeStmt + -1, // EdgeStmt + -1, // Edge + -1, // DirectedEdge + -1, // OptEdge + -1, // AttrStmt + -1, // Component + -1, // AttrList + -1, // OptAttrList + -1, // AList + -1, // OptAList + -1, // OptSep + -1, // Attr + -1, // Subgraph + -1, // OptSubgraphID + -1, // Vertex + -1, // Node + -1, // Port + -1, // OptPort + -1, // ID + -1, // OptID + }, + gotoRow{ // S58 + -1, // S' + -1, // File + -1, // Graph + -1, // OptStrict + -1, // DirectedGraph + -1, // StmtList + -1, // OptStmtList + -1, // Stmt + -1, // OptSemi + -1, // NodeStmt + -1, // EdgeStmt + -1, // Edge + -1, // DirectedEdge + -1, // OptEdge + -1, // AttrStmt + -1, // Component + -1, // AttrList + -1, // OptAttrList + -1, // AList + -1, // OptAList + -1, // OptSep + -1, // Attr + -1, // Subgraph + -1, // OptSubgraphID + -1, // Vertex + -1, // Node + -1, // Port + -1, // OptPort + -1, // ID + -1, // OptID + }, + gotoRow{ // S59 + -1, // S' + -1, // File + -1, // Graph + -1, // OptStrict + -1, // DirectedGraph + -1, // StmtList + -1, // OptStmtList + -1, // Stmt + -1, // OptSemi + -1, // NodeStmt + -1, // EdgeStmt + 74, // Edge + 39, // DirectedEdge + 75, // OptEdge + -1, // AttrStmt + -1, // Component + -1, // AttrList + -1, // OptAttrList + -1, // AList + -1, // OptAList + -1, // OptSep + -1, // Attr + -1, // Subgraph + -1, // OptSubgraphID + -1, // Vertex + -1, // Node + -1, // Port + -1, // OptPort + -1, // ID + -1, // OptID + }, + gotoRow{ // S60 + -1, // S' + -1, // File + -1, // Graph + -1, // OptStrict + -1, // DirectedGraph + -1, // StmtList + -1, // OptStmtList + -1, // Stmt + -1, // OptSemi + -1, // NodeStmt + -1, // EdgeStmt + -1, // Edge + -1, // DirectedEdge + -1, // OptEdge + -1, // AttrStmt + -1, // Component + -1, // AttrList + -1, // OptAttrList + -1, // AList + -1, // OptAList + -1, // OptSep + -1, // Attr + -1, // Subgraph + -1, // OptSubgraphID + -1, // Vertex + -1, // Node + 45, // Port + 44, // OptPort + -1, // ID + -1, // OptID + }, + gotoRow{ // S61 + -1, // S' + -1, // File + -1, // Graph + -1, // OptStrict + -1, // DirectedGraph + -1, // StmtList + -1, // OptStmtList + -1, // Stmt + -1, // OptSemi + -1, // NodeStmt + -1, // EdgeStmt + -1, // Edge + -1, // DirectedEdge + -1, // OptEdge + -1, // AttrStmt + -1, // Component + -1, // AttrList + -1, // OptAttrList + -1, // AList + -1, // OptAList + -1, // OptSep + -1, // Attr + -1, // Subgraph + -1, // OptSubgraphID + -1, // Vertex + -1, // Node + -1, // Port + -1, // OptPort + -1, // ID + -1, // OptID + }, + gotoRow{ // S62 + -1, // S' + -1, // File + -1, // Graph + -1, // OptStrict + -1, // DirectedGraph + -1, // StmtList + -1, // OptStmtList + -1, // Stmt + -1, // OptSemi + -1, // NodeStmt + -1, // EdgeStmt + -1, // Edge + -1, // DirectedEdge + -1, // OptEdge + -1, // AttrStmt + -1, // Component + -1, // AttrList + -1, // OptAttrList + -1, // AList + -1, // OptAList + -1, // OptSep + -1, // Attr + -1, // Subgraph + -1, // OptSubgraphID + -1, // Vertex + -1, // Node + -1, // Port + -1, // OptPort + -1, // ID + -1, // OptID + }, + gotoRow{ // S63 + -1, // S' + -1, // File + -1, // Graph + -1, // OptStrict + -1, // DirectedGraph + -1, // StmtList + -1, // OptStmtList + -1, // Stmt + -1, // OptSemi + -1, // NodeStmt + -1, // EdgeStmt + -1, // Edge + -1, // DirectedEdge + -1, // OptEdge + -1, // AttrStmt + -1, // Component + -1, // AttrList + -1, // OptAttrList + -1, // AList + -1, // OptAList + -1, // OptSep + -1, // Attr + -1, // Subgraph + -1, // OptSubgraphID + -1, // Vertex + -1, // Node + -1, // Port + -1, // OptPort + -1, // ID + -1, // OptID + }, + gotoRow{ // S64 + -1, // S' + -1, // File + -1, // Graph + -1, // OptStrict + -1, // DirectedGraph + -1, // StmtList + -1, // OptStmtList + -1, // Stmt + -1, // OptSemi + -1, // NodeStmt + -1, // EdgeStmt + -1, // Edge + -1, // DirectedEdge + -1, // OptEdge + -1, // AttrStmt + -1, // Component + -1, // AttrList + -1, // OptAttrList + -1, // AList + -1, // OptAList + -1, // OptSep + -1, // Attr + -1, // Subgraph + -1, // OptSubgraphID + -1, // Vertex + -1, // Node + -1, // Port + -1, // OptPort + -1, // ID + -1, // OptID + }, + gotoRow{ // S65 + -1, // S' + -1, // File + -1, // Graph + -1, // OptStrict + -1, // DirectedGraph + -1, // StmtList + -1, // OptStmtList + -1, // Stmt + -1, // OptSemi + -1, // NodeStmt + -1, // EdgeStmt + -1, // Edge + -1, // DirectedEdge + -1, // OptEdge + -1, // AttrStmt + -1, // Component + -1, // AttrList + -1, // OptAttrList + -1, // AList + -1, // OptAList + -1, // OptSep + -1, // Attr + -1, // Subgraph + -1, // OptSubgraphID + -1, // Vertex + -1, // Node + -1, // Port + -1, // OptPort + -1, // ID + -1, // OptID + }, + gotoRow{ // S66 + -1, // S' + -1, // File + -1, // Graph + -1, // OptStrict + -1, // DirectedGraph + -1, // StmtList + -1, // OptStmtList + -1, // Stmt + -1, // OptSemi + -1, // NodeStmt + -1, // EdgeStmt + -1, // Edge + -1, // DirectedEdge + -1, // OptEdge + -1, // AttrStmt + -1, // Component + -1, // AttrList + -1, // OptAttrList + -1, // AList + -1, // OptAList + -1, // OptSep + -1, // Attr + -1, // Subgraph + -1, // OptSubgraphID + -1, // Vertex + -1, // Node + -1, // Port + -1, // OptPort + -1, // ID + -1, // OptID + }, + gotoRow{ // S67 + -1, // S' + -1, // File + -1, // Graph + -1, // OptStrict + -1, // DirectedGraph + -1, // StmtList + -1, // OptStmtList + -1, // Stmt + -1, // OptSemi + -1, // NodeStmt + -1, // EdgeStmt + -1, // Edge + -1, // DirectedEdge + -1, // OptEdge + -1, // AttrStmt + -1, // Component + -1, // AttrList + -1, // OptAttrList + -1, // AList + -1, // OptAList + -1, // OptSep + -1, // Attr + -1, // Subgraph + -1, // OptSubgraphID + -1, // Vertex + -1, // Node + -1, // Port + -1, // OptPort + -1, // ID + -1, // OptID + }, + gotoRow{ // S68 + -1, // S' + -1, // File + -1, // Graph + -1, // OptStrict + -1, // DirectedGraph + -1, // StmtList + -1, // OptStmtList + -1, // Stmt + -1, // OptSemi + -1, // NodeStmt + -1, // EdgeStmt + -1, // Edge + -1, // DirectedEdge + -1, // OptEdge + -1, // AttrStmt + -1, // Component + -1, // AttrList + -1, // OptAttrList + -1, // AList + -1, // OptAList + -1, // OptSep + -1, // Attr + -1, // Subgraph + -1, // OptSubgraphID + -1, // Vertex + -1, // Node + -1, // Port + -1, // OptPort + -1, // ID + -1, // OptID + }, + gotoRow{ // S69 + -1, // S' + -1, // File + -1, // Graph + -1, // OptStrict + -1, // DirectedGraph + -1, // StmtList + -1, // OptStmtList + -1, // Stmt + -1, // OptSemi + -1, // NodeStmt + -1, // EdgeStmt + -1, // Edge + -1, // DirectedEdge + -1, // OptEdge + -1, // AttrStmt + -1, // Component + -1, // AttrList + -1, // OptAttrList + -1, // AList + -1, // OptAList + -1, // OptSep + -1, // Attr + -1, // Subgraph + -1, // OptSubgraphID + -1, // Vertex + -1, // Node + -1, // Port + -1, // OptPort + -1, // ID + -1, // OptID + }, + gotoRow{ // S70 + -1, // S' + -1, // File + -1, // Graph + -1, // OptStrict + -1, // DirectedGraph + -1, // StmtList + -1, // OptStmtList + -1, // Stmt + -1, // OptSemi + -1, // NodeStmt + -1, // EdgeStmt + -1, // Edge + -1, // DirectedEdge + -1, // OptEdge + -1, // AttrStmt + -1, // Component + -1, // AttrList + -1, // OptAttrList + -1, // AList + -1, // OptAList + -1, // OptSep + -1, // Attr + -1, // Subgraph + -1, // OptSubgraphID + -1, // Vertex + -1, // Node + -1, // Port + -1, // OptPort + -1, // ID + -1, // OptID + }, + gotoRow{ // S71 + -1, // S' + -1, // File + -1, // Graph + -1, // OptStrict + -1, // DirectedGraph + -1, // StmtList + -1, // OptStmtList + -1, // Stmt + -1, // OptSemi + -1, // NodeStmt + -1, // EdgeStmt + -1, // Edge + -1, // DirectedEdge + -1, // OptEdge + -1, // AttrStmt + -1, // Component + -1, // AttrList + -1, // OptAttrList + -1, // AList + -1, // OptAList + -1, // OptSep + -1, // Attr + -1, // Subgraph + -1, // OptSubgraphID + -1, // Vertex + -1, // Node + -1, // Port + -1, // OptPort + -1, // ID + -1, // OptID + }, + gotoRow{ // S72 + -1, // S' + -1, // File + -1, // Graph + -1, // OptStrict + -1, // DirectedGraph + -1, // StmtList + -1, // OptStmtList + -1, // Stmt + -1, // OptSemi + -1, // NodeStmt + -1, // EdgeStmt + -1, // Edge + -1, // DirectedEdge + -1, // OptEdge + -1, // AttrStmt + -1, // Component + -1, // AttrList + -1, // OptAttrList + -1, // AList + -1, // OptAList + 80, // OptSep + -1, // Attr + -1, // Subgraph + -1, // OptSubgraphID + -1, // Vertex + -1, // Node + -1, // Port + -1, // OptPort + -1, // ID + -1, // OptID + }, + gotoRow{ // S73 + -1, // S' + -1, // File + -1, // Graph + -1, // OptStrict + -1, // DirectedGraph + -1, // StmtList + -1, // OptStmtList + -1, // Stmt + -1, // OptSemi + -1, // NodeStmt + -1, // EdgeStmt + -1, // Edge + -1, // DirectedEdge + -1, // OptEdge + -1, // AttrStmt + -1, // Component + -1, // AttrList + -1, // OptAttrList + -1, // AList + -1, // OptAList + -1, // OptSep + -1, // Attr + -1, // Subgraph + -1, // OptSubgraphID + -1, // Vertex + -1, // Node + -1, // Port + -1, // OptPort + 81, // ID + -1, // OptID + }, + gotoRow{ // S74 + -1, // S' + -1, // File + -1, // Graph + -1, // OptStrict + -1, // DirectedGraph + -1, // StmtList + -1, // OptStmtList + -1, // Stmt + -1, // OptSemi + -1, // NodeStmt + -1, // EdgeStmt + -1, // Edge + -1, // DirectedEdge + -1, // OptEdge + -1, // AttrStmt + -1, // Component + -1, // AttrList + -1, // OptAttrList + -1, // AList + -1, // OptAList + -1, // OptSep + -1, // Attr + -1, // Subgraph + -1, // OptSubgraphID + -1, // Vertex + -1, // Node + -1, // Port + -1, // OptPort + -1, // ID + -1, // OptID + }, + gotoRow{ // S75 + -1, // S' + -1, // File + -1, // Graph + -1, // OptStrict + -1, // DirectedGraph + -1, // StmtList + -1, // OptStmtList + -1, // Stmt + -1, // OptSemi + -1, // NodeStmt + -1, // EdgeStmt + -1, // Edge + -1, // DirectedEdge + -1, // OptEdge + -1, // AttrStmt + -1, // Component + -1, // AttrList + -1, // OptAttrList + -1, // AList + -1, // OptAList + -1, // OptSep + -1, // Attr + -1, // Subgraph + -1, // OptSubgraphID + -1, // Vertex + -1, // Node + -1, // Port + -1, // OptPort + -1, // ID + -1, // OptID + }, + gotoRow{ // S76 + -1, // S' + -1, // File + -1, // Graph + -1, // OptStrict + -1, // DirectedGraph + 15, // StmtList + 83, // OptStmtList + 16, // Stmt + -1, // OptSemi + 17, // NodeStmt + 18, // EdgeStmt + -1, // Edge + -1, // DirectedEdge + -1, // OptEdge + 19, // AttrStmt + 24, // Component + -1, // AttrList + -1, // OptAttrList + -1, // AList + -1, // OptAList + -1, // OptSep + 20, // Attr + 21, // Subgraph + 28, // OptSubgraphID + 23, // Vertex + 22, // Node + -1, // Port + -1, // OptPort + 27, // ID + -1, // OptID + }, + gotoRow{ // S77 + -1, // S' + -1, // File + -1, // Graph + -1, // OptStrict + -1, // DirectedGraph + -1, // StmtList + -1, // OptStmtList + -1, // Stmt + -1, // OptSemi + -1, // NodeStmt + -1, // EdgeStmt + -1, // Edge + -1, // DirectedEdge + -1, // OptEdge + -1, // AttrStmt + -1, // Component + -1, // AttrList + -1, // OptAttrList + -1, // AList + -1, // OptAList + -1, // OptSep + -1, // Attr + -1, // Subgraph + -1, // OptSubgraphID + -1, // Vertex + -1, // Node + -1, // Port + -1, // OptPort + 84, // ID + -1, // OptID + }, + gotoRow{ // S78 + -1, // S' + -1, // File + -1, // Graph + -1, // OptStrict + -1, // DirectedGraph + -1, // StmtList + -1, // OptStmtList + -1, // Stmt + -1, // OptSemi + -1, // NodeStmt + -1, // EdgeStmt + -1, // Edge + -1, // DirectedEdge + -1, // OptEdge + -1, // AttrStmt + -1, // Component + -1, // AttrList + -1, // OptAttrList + -1, // AList + -1, // OptAList + -1, // OptSep + -1, // Attr + -1, // Subgraph + -1, // OptSubgraphID + -1, // Vertex + -1, // Node + -1, // Port + -1, // OptPort + -1, // ID + -1, // OptID + }, + gotoRow{ // S79 + -1, // S' + -1, // File + -1, // Graph + -1, // OptStrict + -1, // DirectedGraph + -1, // StmtList + -1, // OptStmtList + -1, // Stmt + -1, // OptSemi + -1, // NodeStmt + -1, // EdgeStmt + -1, // Edge + -1, // DirectedEdge + -1, // OptEdge + -1, // AttrStmt + -1, // Component + -1, // AttrList + -1, // OptAttrList + -1, // AList + -1, // OptAList + -1, // OptSep + -1, // Attr + -1, // Subgraph + -1, // OptSubgraphID + -1, // Vertex + -1, // Node + -1, // Port + -1, // OptPort + -1, // ID + -1, // OptID + }, + gotoRow{ // S80 + -1, // S' + -1, // File + -1, // Graph + -1, // OptStrict + -1, // DirectedGraph + -1, // StmtList + -1, // OptStmtList + -1, // Stmt + -1, // OptSemi + -1, // NodeStmt + -1, // EdgeStmt + -1, // Edge + -1, // DirectedEdge + -1, // OptEdge + -1, // AttrStmt + -1, // Component + -1, // AttrList + -1, // OptAttrList + -1, // AList + -1, // OptAList + -1, // OptSep + -1, // Attr + -1, // Subgraph + -1, // OptSubgraphID + -1, // Vertex + -1, // Node + -1, // Port + -1, // OptPort + -1, // ID + -1, // OptID + }, + gotoRow{ // S81 + -1, // S' + -1, // File + -1, // Graph + -1, // OptStrict + -1, // DirectedGraph + -1, // StmtList + -1, // OptStmtList + -1, // Stmt + -1, // OptSemi + -1, // NodeStmt + -1, // EdgeStmt + -1, // Edge + -1, // DirectedEdge + -1, // OptEdge + -1, // AttrStmt + -1, // Component + -1, // AttrList + -1, // OptAttrList + -1, // AList + -1, // OptAList + -1, // OptSep + -1, // Attr + -1, // Subgraph + -1, // OptSubgraphID + -1, // Vertex + -1, // Node + -1, // Port + -1, // OptPort + -1, // ID + -1, // OptID + }, + gotoRow{ // S82 + -1, // S' + -1, // File + -1, // Graph + -1, // OptStrict + -1, // DirectedGraph + -1, // StmtList + -1, // OptStmtList + -1, // Stmt + -1, // OptSemi + -1, // NodeStmt + -1, // EdgeStmt + -1, // Edge + -1, // DirectedEdge + -1, // OptEdge + -1, // AttrStmt + -1, // Component + -1, // AttrList + -1, // OptAttrList + -1, // AList + -1, // OptAList + -1, // OptSep + -1, // Attr + -1, // Subgraph + -1, // OptSubgraphID + -1, // Vertex + -1, // Node + -1, // Port + -1, // OptPort + -1, // ID + -1, // OptID + }, + gotoRow{ // S83 + -1, // S' + -1, // File + -1, // Graph + -1, // OptStrict + -1, // DirectedGraph + -1, // StmtList + -1, // OptStmtList + -1, // Stmt + -1, // OptSemi + -1, // NodeStmt + -1, // EdgeStmt + -1, // Edge + -1, // DirectedEdge + -1, // OptEdge + -1, // AttrStmt + -1, // Component + -1, // AttrList + -1, // OptAttrList + -1, // AList + -1, // OptAList + -1, // OptSep + -1, // Attr + -1, // Subgraph + -1, // OptSubgraphID + -1, // Vertex + -1, // Node + -1, // Port + -1, // OptPort + -1, // ID + -1, // OptID + }, + gotoRow{ // S84 + -1, // S' + -1, // File + -1, // Graph + -1, // OptStrict + -1, // DirectedGraph + -1, // StmtList + -1, // OptStmtList + -1, // Stmt + -1, // OptSemi + -1, // NodeStmt + -1, // EdgeStmt + -1, // Edge + -1, // DirectedEdge + -1, // OptEdge + -1, // AttrStmt + -1, // Component + -1, // AttrList + -1, // OptAttrList + -1, // AList + -1, // OptAList + -1, // OptSep + -1, // Attr + -1, // Subgraph + -1, // OptSubgraphID + -1, // Vertex + -1, // Node + -1, // Port + -1, // OptPort + -1, // ID + -1, // OptID + }, + gotoRow{ // S85 + -1, // S' + -1, // File + -1, // Graph + -1, // OptStrict + -1, // DirectedGraph + -1, // StmtList + -1, // OptStmtList + -1, // Stmt + -1, // OptSemi + -1, // NodeStmt + -1, // EdgeStmt + -1, // Edge + -1, // DirectedEdge + -1, // OptEdge + -1, // AttrStmt + -1, // Component + -1, // AttrList + -1, // OptAttrList + -1, // AList + -1, // OptAList + -1, // OptSep + -1, // Attr + -1, // Subgraph + -1, // OptSubgraphID + -1, // Vertex + -1, // Node + -1, // Port + -1, // OptPort + -1, // ID + -1, // OptID + }, + gotoRow{ // S86 + -1, // S' + -1, // File + -1, // Graph + -1, // OptStrict + -1, // DirectedGraph + -1, // StmtList + -1, // OptStmtList + -1, // Stmt + -1, // OptSemi + -1, // NodeStmt + -1, // EdgeStmt + -1, // Edge + -1, // DirectedEdge + -1, // OptEdge + -1, // AttrStmt + -1, // Component + -1, // AttrList + -1, // OptAttrList + -1, // AList + -1, // OptAList + -1, // OptSep + -1, // Attr + -1, // Subgraph + -1, // OptSubgraphID + -1, // Vertex + -1, // Node + -1, // Port + -1, // OptPort + -1, // ID + -1, // OptID + }, +} diff --git a/vendor/gonum.org/v1/gonum/graph/formats/dot/internal/parser/parser.go b/vendor/gonum.org/v1/gonum/graph/formats/dot/internal/parser/parser.go new file mode 100644 index 0000000..52e4951 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/graph/formats/dot/internal/parser/parser.go @@ -0,0 +1,226 @@ +// Code generated by gocc; DO NOT EDIT. + +// This file is dual licensed under CC0 and The gonum license. +// +// Copyright ©2017 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. +// +// Copyright ©2017 Robin Eklind. +// This file is made available under a Creative Commons CC0 1.0 +// Universal Public Domain Dedication. + +package parser + +import ( + "fmt" + "strings" + + parseError "gonum.org/v1/gonum/graph/formats/dot/internal/errors" + "gonum.org/v1/gonum/graph/formats/dot/internal/token" +) + +const ( + numProductions = 55 + numStates = 87 + numSymbols = 50 +) + +// Stack + +type stack struct { + state []int + attrib []Attrib +} + +const iNITIAL_STACK_SIZE = 100 + +func newStack() *stack { + return &stack{ + state: make([]int, 0, iNITIAL_STACK_SIZE), + attrib: make([]Attrib, 0, iNITIAL_STACK_SIZE), + } +} + +func (s *stack) reset() { + s.state = s.state[:0] + s.attrib = s.attrib[:0] +} + +func (s *stack) push(state int, a Attrib) { + s.state = append(s.state, state) + s.attrib = append(s.attrib, a) +} + +func (s *stack) top() int { + return s.state[len(s.state)-1] +} + +func (s *stack) peek(pos int) int { + return s.state[pos] +} + +func (s *stack) topIndex() int { + return len(s.state) - 1 +} + +func (s *stack) popN(items int) []Attrib { + lo, hi := len(s.state)-items, len(s.state) + + attrib := s.attrib[lo:hi] + + s.state = s.state[:lo] + s.attrib = s.attrib[:lo] + + return attrib +} + +func (s *stack) String() string { + w := new(strings.Builder) + fmt.Fprintf(w, "stack:\n") + for i, st := range s.state { + fmt.Fprintf(w, "\t%d: %d , ", i, st) + if s.attrib[i] == nil { + fmt.Fprintf(w, "nil") + } else { + switch attr := s.attrib[i].(type) { + case *token.Token: + fmt.Fprintf(w, "%s", attr.Lit) + default: + fmt.Fprintf(w, "%v", attr) + } + } + fmt.Fprintf(w, "\n") + } + return w.String() +} + +// Parser + +type Parser struct { + stack *stack + nextToken *token.Token + pos int +} + +type Scanner interface { + Scan() (tok *token.Token) +} + +func NewParser() *Parser { + p := &Parser{stack: newStack()} + p.Reset() + return p +} + +func (p *Parser) Reset() { + p.stack.reset() + p.stack.push(0, nil) +} + +func (p *Parser) Error(err error, scanner Scanner) (recovered bool, errorAttrib *parseError.Error) { + errorAttrib = &parseError.Error{ + Err: err, + ErrorToken: p.nextToken, + ErrorSymbols: p.popNonRecoveryStates(), + ExpectedTokens: make([]string, 0, 8), + } + for t, action := range actionTab[p.stack.top()].actions { + if action != nil { + errorAttrib.ExpectedTokens = append(errorAttrib.ExpectedTokens, token.TokMap.Id(token.Type(t))) + } + } + + if action := actionTab[p.stack.top()].actions[token.TokMap.Type("error")]; action != nil { + p.stack.push(int(action.(shift)), errorAttrib) // action can only be shift + } else { + return + } + + if action := actionTab[p.stack.top()].actions[p.nextToken.Type]; action != nil { + recovered = true + } + for !recovered && p.nextToken.Type != token.EOF { + p.nextToken = scanner.Scan() + if action := actionTab[p.stack.top()].actions[p.nextToken.Type]; action != nil { + recovered = true + } + } + + return +} + +func (p *Parser) popNonRecoveryStates() (removedAttribs []parseError.ErrorSymbol) { + if rs, ok := p.firstRecoveryState(); ok { + errorSymbols := p.stack.popN(p.stack.topIndex() - rs) + removedAttribs = make([]parseError.ErrorSymbol, len(errorSymbols)) + for i, e := range errorSymbols { + removedAttribs[i] = e + } + } else { + removedAttribs = []parseError.ErrorSymbol{} + } + return +} + +// recoveryState points to the highest state on the stack, which can recover +func (p *Parser) firstRecoveryState() (recoveryState int, canRecover bool) { + recoveryState, canRecover = p.stack.topIndex(), actionTab[p.stack.top()].canRecover + for recoveryState > 0 && !canRecover { + recoveryState-- + canRecover = actionTab[p.stack.peek(recoveryState)].canRecover + } + return +} + +func (p *Parser) newError(err error) error { + e := &parseError.Error{ + Err: err, + StackTop: p.stack.top(), + ErrorToken: p.nextToken, + } + actRow := actionTab[p.stack.top()] + for i, t := range actRow.actions { + if t != nil { + e.ExpectedTokens = append(e.ExpectedTokens, token.TokMap.Id(token.Type(i))) + } + } + return e +} + +func (p *Parser) Parse(scanner Scanner) (res interface{}, err error) { + p.Reset() + p.nextToken = scanner.Scan() + for acc := false; !acc; { + action := actionTab[p.stack.top()].actions[p.nextToken.Type] + if action == nil { + if recovered, errAttrib := p.Error(nil, scanner); !recovered { + p.nextToken = errAttrib.ErrorToken + return nil, p.newError(nil) + } + if action = actionTab[p.stack.top()].actions[p.nextToken.Type]; action == nil { + panic("Error recovery led to invalid action") + } + } + + switch act := action.(type) { + case accept: + res = p.stack.popN(1)[0] + acc = true + case shift: + p.stack.push(int(act), p.nextToken) + p.nextToken = scanner.Scan() + case reduce: + prod := productionsTable[int(act)] + attrib, err := prod.ReduceFunc(p.stack.popN(prod.NumSymbols)) + if err != nil { + return nil, p.newError(err) + } else { + p.stack.push(gotoTab[p.stack.top()][prod.NTType], attrib) + } + default: + panic("unknown action: " + action.String()) + } + } + return res, nil +} diff --git a/vendor/gonum.org/v1/gonum/graph/formats/dot/internal/parser/productionstable.go b/vendor/gonum.org/v1/gonum/graph/formats/dot/internal/parser/productionstable.go new file mode 100644 index 0000000..6848066 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/graph/formats/dot/internal/parser/productionstable.go @@ -0,0 +1,586 @@ +// Code generated by gocc; DO NOT EDIT. + +// This file is dual licensed under CC0 and The gonum license. +// +// Copyright ©2017 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. +// +// Copyright ©2017 Robin Eklind. +// This file is made available under a Creative Commons CC0 1.0 +// Universal Public Domain Dedication. + +package parser + +import ( + "gonum.org/v1/gonum/graph/formats/dot/ast" + "gonum.org/v1/gonum/graph/formats/dot/internal/astx" +) + +type ( + //TODO: change type and variable names to be consistent with other tables + ProdTab [numProductions]ProdTabEntry + ProdTabEntry struct { + String string + Id string + NTType int + Index int + NumSymbols int + ReduceFunc func([]Attrib) (Attrib, error) + } + Attrib interface { + } +) + +var productionsTable = ProdTab{ + ProdTabEntry{ + String: `S' : File << >>`, + Id: "S'", + NTType: 0, + Index: 0, + NumSymbols: 1, + ReduceFunc: func(X []Attrib) (Attrib, error) { + return X[0], nil + }, + }, + ProdTabEntry{ + String: `File : Graph << astx.NewFile(X[0]) >>`, + Id: "File", + NTType: 1, + Index: 1, + NumSymbols: 1, + ReduceFunc: func(X []Attrib) (Attrib, error) { + return astx.NewFile(X[0]) + }, + }, + ProdTabEntry{ + String: `File : File Graph << astx.AppendGraph(X[0], X[1]) >>`, + Id: "File", + NTType: 1, + Index: 2, + NumSymbols: 2, + ReduceFunc: func(X []Attrib) (Attrib, error) { + return astx.AppendGraph(X[0], X[1]) + }, + }, + ProdTabEntry{ + String: `Graph : OptStrict DirectedGraph OptID "{" OptStmtList "}" << astx.NewGraph(X[0], X[1], X[2], X[4]) >>`, + Id: "Graph", + NTType: 2, + Index: 3, + NumSymbols: 6, + ReduceFunc: func(X []Attrib) (Attrib, error) { + return astx.NewGraph(X[0], X[1], X[2], X[4]) + }, + }, + ProdTabEntry{ + String: `OptStrict : empty << false, nil >>`, + Id: "OptStrict", + NTType: 3, + Index: 4, + NumSymbols: 0, + ReduceFunc: func(X []Attrib) (Attrib, error) { + return false, nil + }, + }, + ProdTabEntry{ + String: `OptStrict : strict << true, nil >>`, + Id: "OptStrict", + NTType: 3, + Index: 5, + NumSymbols: 1, + ReduceFunc: func(X []Attrib) (Attrib, error) { + return true, nil + }, + }, + ProdTabEntry{ + String: `DirectedGraph : graphx << false, nil >>`, + Id: "DirectedGraph", + NTType: 4, + Index: 6, + NumSymbols: 1, + ReduceFunc: func(X []Attrib) (Attrib, error) { + return false, nil + }, + }, + ProdTabEntry{ + String: `DirectedGraph : digraph << true, nil >>`, + Id: "DirectedGraph", + NTType: 4, + Index: 7, + NumSymbols: 1, + ReduceFunc: func(X []Attrib) (Attrib, error) { + return true, nil + }, + }, + ProdTabEntry{ + String: `StmtList : Stmt OptSemi << astx.NewStmtList(X[0]) >>`, + Id: "StmtList", + NTType: 5, + Index: 8, + NumSymbols: 2, + ReduceFunc: func(X []Attrib) (Attrib, error) { + return astx.NewStmtList(X[0]) + }, + }, + ProdTabEntry{ + String: `StmtList : StmtList Stmt OptSemi << astx.AppendStmt(X[0], X[1]) >>`, + Id: "StmtList", + NTType: 5, + Index: 9, + NumSymbols: 3, + ReduceFunc: func(X []Attrib) (Attrib, error) { + return astx.AppendStmt(X[0], X[1]) + }, + }, + ProdTabEntry{ + String: `OptStmtList : empty << >>`, + Id: "OptStmtList", + NTType: 6, + Index: 10, + NumSymbols: 0, + ReduceFunc: func(X []Attrib) (Attrib, error) { + return nil, nil + }, + }, + ProdTabEntry{ + String: `OptStmtList : StmtList << >>`, + Id: "OptStmtList", + NTType: 6, + Index: 11, + NumSymbols: 1, + ReduceFunc: func(X []Attrib) (Attrib, error) { + return X[0], nil + }, + }, + ProdTabEntry{ + String: `Stmt : NodeStmt << >>`, + Id: "Stmt", + NTType: 7, + Index: 12, + NumSymbols: 1, + ReduceFunc: func(X []Attrib) (Attrib, error) { + return X[0], nil + }, + }, + ProdTabEntry{ + String: `Stmt : EdgeStmt << >>`, + Id: "Stmt", + NTType: 7, + Index: 13, + NumSymbols: 1, + ReduceFunc: func(X []Attrib) (Attrib, error) { + return X[0], nil + }, + }, + ProdTabEntry{ + String: `Stmt : AttrStmt << >>`, + Id: "Stmt", + NTType: 7, + Index: 14, + NumSymbols: 1, + ReduceFunc: func(X []Attrib) (Attrib, error) { + return X[0], nil + }, + }, + ProdTabEntry{ + String: `Stmt : Attr << >>`, + Id: "Stmt", + NTType: 7, + Index: 15, + NumSymbols: 1, + ReduceFunc: func(X []Attrib) (Attrib, error) { + return X[0], nil + }, + }, + ProdTabEntry{ + String: `Stmt : Subgraph << >>`, + Id: "Stmt", + NTType: 7, + Index: 16, + NumSymbols: 1, + ReduceFunc: func(X []Attrib) (Attrib, error) { + return X[0], nil + }, + }, + ProdTabEntry{ + String: `OptSemi : empty << >>`, + Id: "OptSemi", + NTType: 8, + Index: 17, + NumSymbols: 0, + ReduceFunc: func(X []Attrib) (Attrib, error) { + return nil, nil + }, + }, + ProdTabEntry{ + String: `OptSemi : ";" << >>`, + Id: "OptSemi", + NTType: 8, + Index: 18, + NumSymbols: 1, + ReduceFunc: func(X []Attrib) (Attrib, error) { + return X[0], nil + }, + }, + ProdTabEntry{ + String: `NodeStmt : Node OptAttrList << astx.NewNodeStmt(X[0], X[1]) >>`, + Id: "NodeStmt", + NTType: 9, + Index: 19, + NumSymbols: 2, + ReduceFunc: func(X []Attrib) (Attrib, error) { + return astx.NewNodeStmt(X[0], X[1]) + }, + }, + ProdTabEntry{ + String: `EdgeStmt : Vertex Edge OptAttrList << astx.NewEdgeStmt(X[0], X[1], X[2]) >>`, + Id: "EdgeStmt", + NTType: 10, + Index: 20, + NumSymbols: 3, + ReduceFunc: func(X []Attrib) (Attrib, error) { + return astx.NewEdgeStmt(X[0], X[1], X[2]) + }, + }, + ProdTabEntry{ + String: `Edge : DirectedEdge Vertex OptEdge << astx.NewEdge(X[0], X[1], X[2]) >>`, + Id: "Edge", + NTType: 11, + Index: 21, + NumSymbols: 3, + ReduceFunc: func(X []Attrib) (Attrib, error) { + return astx.NewEdge(X[0], X[1], X[2]) + }, + }, + ProdTabEntry{ + String: `DirectedEdge : "--" << false, nil >>`, + Id: "DirectedEdge", + NTType: 12, + Index: 22, + NumSymbols: 1, + ReduceFunc: func(X []Attrib) (Attrib, error) { + return false, nil + }, + }, + ProdTabEntry{ + String: `DirectedEdge : "->" << true, nil >>`, + Id: "DirectedEdge", + NTType: 12, + Index: 23, + NumSymbols: 1, + ReduceFunc: func(X []Attrib) (Attrib, error) { + return true, nil + }, + }, + ProdTabEntry{ + String: `OptEdge : empty << >>`, + Id: "OptEdge", + NTType: 13, + Index: 24, + NumSymbols: 0, + ReduceFunc: func(X []Attrib) (Attrib, error) { + return nil, nil + }, + }, + ProdTabEntry{ + String: `OptEdge : Edge << >>`, + Id: "OptEdge", + NTType: 13, + Index: 25, + NumSymbols: 1, + ReduceFunc: func(X []Attrib) (Attrib, error) { + return X[0], nil + }, + }, + ProdTabEntry{ + String: `AttrStmt : Component AttrList << astx.NewAttrStmt(X[0], X[1]) >>`, + Id: "AttrStmt", + NTType: 14, + Index: 26, + NumSymbols: 2, + ReduceFunc: func(X []Attrib) (Attrib, error) { + return astx.NewAttrStmt(X[0], X[1]) + }, + }, + ProdTabEntry{ + String: `Component : graphx << ast.GraphKind, nil >>`, + Id: "Component", + NTType: 15, + Index: 27, + NumSymbols: 1, + ReduceFunc: func(X []Attrib) (Attrib, error) { + return ast.GraphKind, nil + }, + }, + ProdTabEntry{ + String: `Component : node << ast.NodeKind, nil >>`, + Id: "Component", + NTType: 15, + Index: 28, + NumSymbols: 1, + ReduceFunc: func(X []Attrib) (Attrib, error) { + return ast.NodeKind, nil + }, + }, + ProdTabEntry{ + String: `Component : edge << ast.EdgeKind, nil >>`, + Id: "Component", + NTType: 15, + Index: 29, + NumSymbols: 1, + ReduceFunc: func(X []Attrib) (Attrib, error) { + return ast.EdgeKind, nil + }, + }, + ProdTabEntry{ + String: `AttrList : "[" OptAList "]" << X[1], nil >>`, + Id: "AttrList", + NTType: 16, + Index: 30, + NumSymbols: 3, + ReduceFunc: func(X []Attrib) (Attrib, error) { + return X[1], nil + }, + }, + ProdTabEntry{ + String: `AttrList : AttrList "[" OptAList "]" << astx.AppendAttrList(X[0], X[2]) >>`, + Id: "AttrList", + NTType: 16, + Index: 31, + NumSymbols: 4, + ReduceFunc: func(X []Attrib) (Attrib, error) { + return astx.AppendAttrList(X[0], X[2]) + }, + }, + ProdTabEntry{ + String: `OptAttrList : empty << >>`, + Id: "OptAttrList", + NTType: 17, + Index: 32, + NumSymbols: 0, + ReduceFunc: func(X []Attrib) (Attrib, error) { + return nil, nil + }, + }, + ProdTabEntry{ + String: `OptAttrList : AttrList << >>`, + Id: "OptAttrList", + NTType: 17, + Index: 33, + NumSymbols: 1, + ReduceFunc: func(X []Attrib) (Attrib, error) { + return X[0], nil + }, + }, + ProdTabEntry{ + String: `AList : Attr OptSep << astx.NewAttrList(X[0]) >>`, + Id: "AList", + NTType: 18, + Index: 34, + NumSymbols: 2, + ReduceFunc: func(X []Attrib) (Attrib, error) { + return astx.NewAttrList(X[0]) + }, + }, + ProdTabEntry{ + String: `AList : AList Attr OptSep << astx.AppendAttr(X[0], X[1]) >>`, + Id: "AList", + NTType: 18, + Index: 35, + NumSymbols: 3, + ReduceFunc: func(X []Attrib) (Attrib, error) { + return astx.AppendAttr(X[0], X[1]) + }, + }, + ProdTabEntry{ + String: `OptAList : empty << >>`, + Id: "OptAList", + NTType: 19, + Index: 36, + NumSymbols: 0, + ReduceFunc: func(X []Attrib) (Attrib, error) { + return nil, nil + }, + }, + ProdTabEntry{ + String: `OptAList : AList << >>`, + Id: "OptAList", + NTType: 19, + Index: 37, + NumSymbols: 1, + ReduceFunc: func(X []Attrib) (Attrib, error) { + return X[0], nil + }, + }, + ProdTabEntry{ + String: `OptSep : empty << >>`, + Id: "OptSep", + NTType: 20, + Index: 38, + NumSymbols: 0, + ReduceFunc: func(X []Attrib) (Attrib, error) { + return nil, nil + }, + }, + ProdTabEntry{ + String: `OptSep : ";" << >>`, + Id: "OptSep", + NTType: 20, + Index: 39, + NumSymbols: 1, + ReduceFunc: func(X []Attrib) (Attrib, error) { + return X[0], nil + }, + }, + ProdTabEntry{ + String: `OptSep : "," << >>`, + Id: "OptSep", + NTType: 20, + Index: 40, + NumSymbols: 1, + ReduceFunc: func(X []Attrib) (Attrib, error) { + return X[0], nil + }, + }, + ProdTabEntry{ + String: `Attr : ID "=" ID << astx.NewAttr(X[0], X[2]) >>`, + Id: "Attr", + NTType: 21, + Index: 41, + NumSymbols: 3, + ReduceFunc: func(X []Attrib) (Attrib, error) { + return astx.NewAttr(X[0], X[2]) + }, + }, + ProdTabEntry{ + String: `Subgraph : OptSubgraphID "{" OptStmtList "}" << astx.NewSubgraph(X[0], X[2]) >>`, + Id: "Subgraph", + NTType: 22, + Index: 42, + NumSymbols: 4, + ReduceFunc: func(X []Attrib) (Attrib, error) { + return astx.NewSubgraph(X[0], X[2]) + }, + }, + ProdTabEntry{ + String: `OptSubgraphID : empty << >>`, + Id: "OptSubgraphID", + NTType: 23, + Index: 43, + NumSymbols: 0, + ReduceFunc: func(X []Attrib) (Attrib, error) { + return nil, nil + }, + }, + ProdTabEntry{ + String: `OptSubgraphID : subgraph OptID << X[1], nil >>`, + Id: "OptSubgraphID", + NTType: 23, + Index: 44, + NumSymbols: 2, + ReduceFunc: func(X []Attrib) (Attrib, error) { + return X[1], nil + }, + }, + ProdTabEntry{ + String: `Vertex : Node << >>`, + Id: "Vertex", + NTType: 24, + Index: 45, + NumSymbols: 1, + ReduceFunc: func(X []Attrib) (Attrib, error) { + return X[0], nil + }, + }, + ProdTabEntry{ + String: `Vertex : Subgraph << >>`, + Id: "Vertex", + NTType: 24, + Index: 46, + NumSymbols: 1, + ReduceFunc: func(X []Attrib) (Attrib, error) { + return X[0], nil + }, + }, + ProdTabEntry{ + String: `Node : ID OptPort << astx.NewNode(X[0], X[1]) >>`, + Id: "Node", + NTType: 25, + Index: 47, + NumSymbols: 2, + ReduceFunc: func(X []Attrib) (Attrib, error) { + return astx.NewNode(X[0], X[1]) + }, + }, + ProdTabEntry{ + String: `Port : ":" ID << astx.NewPort(X[1], nil) >>`, + Id: "Port", + NTType: 26, + Index: 48, + NumSymbols: 2, + ReduceFunc: func(X []Attrib) (Attrib, error) { + return astx.NewPort(X[1], nil) + }, + }, + ProdTabEntry{ + String: `Port : ":" ID ":" ID << astx.NewPort(X[1], X[3]) >>`, + Id: "Port", + NTType: 26, + Index: 49, + NumSymbols: 4, + ReduceFunc: func(X []Attrib) (Attrib, error) { + return astx.NewPort(X[1], X[3]) + }, + }, + ProdTabEntry{ + String: `OptPort : empty << >>`, + Id: "OptPort", + NTType: 27, + Index: 50, + NumSymbols: 0, + ReduceFunc: func(X []Attrib) (Attrib, error) { + return nil, nil + }, + }, + ProdTabEntry{ + String: `OptPort : Port << >>`, + Id: "OptPort", + NTType: 27, + Index: 51, + NumSymbols: 1, + ReduceFunc: func(X []Attrib) (Attrib, error) { + return X[0], nil + }, + }, + ProdTabEntry{ + String: `ID : id << astx.NewID(X[0]) >>`, + Id: "ID", + NTType: 28, + Index: 52, + NumSymbols: 1, + ReduceFunc: func(X []Attrib) (Attrib, error) { + return astx.NewID(X[0]) + }, + }, + ProdTabEntry{ + String: `OptID : empty << "", nil >>`, + Id: "OptID", + NTType: 29, + Index: 53, + NumSymbols: 0, + ReduceFunc: func(X []Attrib) (Attrib, error) { + return "", nil + }, + }, + ProdTabEntry{ + String: `OptID : ID << >>`, + Id: "OptID", + NTType: 29, + Index: 54, + NumSymbols: 1, + ReduceFunc: func(X []Attrib) (Attrib, error) { + return X[0], nil + }, + }, +} diff --git a/vendor/gonum.org/v1/gonum/graph/formats/dot/internal/paste_copyright.go b/vendor/gonum.org/v1/gonum/graph/formats/dot/internal/paste_copyright.go new file mode 100644 index 0000000..d4dd0fb --- /dev/null +++ b/vendor/gonum.org/v1/gonum/graph/formats/dot/internal/paste_copyright.go @@ -0,0 +1,57 @@ +// This file is dual licensed under CC0 and The gonum license. +// +// Copyright ©2017 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. +// +// Copyright ©2017 Robin Eklind. +// This file is made available under a Creative Commons CC0 1.0 +// Universal Public Domain Dedication.`) + +// +build ignore + +package main + +import ( + "bytes" + "fmt" + "io/ioutil" + "os" + "path/filepath" +) + +var location = []byte(`// Code generated by gocc; DO NOT EDIT.`) +var copyright = []byte(`// Code generated by gocc; DO NOT EDIT. + +// This file is dual licensed under CC0 and The gonum license. +// +// Copyright ©2017 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. +// +// Copyright ©2017 Robin Eklind. +// This file is made available under a Creative Commons CC0 1.0 +// Universal Public Domain Dedication.`) + +func main() { + err := filepath.Walk(".", func(path string, info os.FileInfo, err error) error { + if err != nil { + return err + } + if info.IsDir() || filepath.Dir(path) == "." || filepath.Ext(path) != ".go" { + return nil + } + + content, err := ioutil.ReadFile(path) + if err != nil { + return err + } + + content = bytes.Replace(content, location, copyright, 1) + return ioutil.WriteFile(path, content, info.Mode()) + }) + + if err != nil { + fmt.Printf("error walking the path: %v\n", err) + } +} diff --git a/vendor/gonum.org/v1/gonum/graph/formats/dot/internal/token/doc.go b/vendor/gonum.org/v1/gonum/graph/formats/dot/internal/token/doc.go new file mode 100644 index 0000000..15a9c16 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/graph/formats/dot/internal/token/doc.go @@ -0,0 +1,6 @@ +// Copyright ©2018 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Package token provides generated internal tokenizing functions for DOT parsing. +package token // import "gonum.org/v1/gonum/graph/formats/dot/internal/token" diff --git a/vendor/gonum.org/v1/gonum/graph/formats/dot/internal/token/token.go b/vendor/gonum.org/v1/gonum/graph/formats/dot/internal/token/token.go new file mode 100644 index 0000000..9245f3c --- /dev/null +++ b/vendor/gonum.org/v1/gonum/graph/formats/dot/internal/token/token.go @@ -0,0 +1,116 @@ +// Code generated by gocc; DO NOT EDIT. + +// This file is dual licensed under CC0 and The gonum license. +// +// Copyright ©2017 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. +// +// Copyright ©2017 Robin Eklind. +// This file is made available under a Creative Commons CC0 1.0 +// Universal Public Domain Dedication. + +package token + +import ( + "fmt" +) + +type Token struct { + Type + Lit []byte + Pos +} + +type Type int + +const ( + INVALID Type = iota + EOF +) + +type Pos struct { + Offset int + Line int + Column int +} + +func (p Pos) String() string { + return fmt.Sprintf("Pos(offset=%d, line=%d, column=%d)", p.Offset, p.Line, p.Column) +} + +type TokenMap struct { + typeMap []string + idMap map[string]Type +} + +func (m TokenMap) Id(tok Type) string { + if int(tok) < len(m.typeMap) { + return m.typeMap[tok] + } + return "unknown" +} + +func (m TokenMap) Type(tok string) Type { + if typ, exist := m.idMap[tok]; exist { + return typ + } + return INVALID +} + +func (m TokenMap) TokenString(tok *Token) string { + //TODO: refactor to print pos & token string properly + return fmt.Sprintf("%s(%d,%s)", m.Id(tok.Type), tok.Type, tok.Lit) +} + +func (m TokenMap) StringType(typ Type) string { + return fmt.Sprintf("%s(%d)", m.Id(typ), typ) +} + +var TokMap = TokenMap{ + typeMap: []string{ + "INVALID", + "$", + "{", + "}", + "empty", + "strict", + "graphx", + "digraph", + ";", + "--", + "->", + "node", + "edge", + "[", + "]", + ",", + "=", + "subgraph", + ":", + "id", + }, + + idMap: map[string]Type{ + "INVALID": 0, + "$": 1, + "{": 2, + "}": 3, + "empty": 4, + "strict": 5, + "graphx": 6, + "digraph": 7, + ";": 8, + "--": 9, + "->": 10, + "node": 11, + "edge": 12, + "[": 13, + "]": 14, + ",": 15, + "=": 16, + "subgraph": 17, + ":": 18, + "id": 19, + }, +} diff --git a/vendor/gonum.org/v1/gonum/graph/formats/dot/internal/util/doc.go b/vendor/gonum.org/v1/gonum/graph/formats/dot/internal/util/doc.go new file mode 100644 index 0000000..a11b3be --- /dev/null +++ b/vendor/gonum.org/v1/gonum/graph/formats/dot/internal/util/doc.go @@ -0,0 +1,6 @@ +// Copyright ©2018 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Package util provides generated internal utility functions for DOT parsing. +package util // import "gonum.org/v1/gonum/graph/formats/dot/internal/util" diff --git a/vendor/gonum.org/v1/gonum/graph/formats/dot/internal/util/litconv.go b/vendor/gonum.org/v1/gonum/graph/formats/dot/internal/util/litconv.go new file mode 100644 index 0000000..30c9c62 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/graph/formats/dot/internal/util/litconv.go @@ -0,0 +1,118 @@ +// Code generated by gocc; DO NOT EDIT. + +// This file is dual licensed under CC0 and The gonum license. +// +// Copyright ©2017 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. +// +// Copyright ©2017 Robin Eklind. +// This file is made available under a Creative Commons CC0 1.0 +// Universal Public Domain Dedication. + +package util + +import ( + "fmt" + "strconv" + "unicode" + "unicode/utf8" +) + +/* Interface */ + +/* +Convert the literal value of a scanned token to rune +*/ +func RuneValue(lit []byte) rune { + if lit[1] == '\\' { + return escapeCharVal(lit) + } + r, size := utf8.DecodeRune(lit[1:]) + if size != len(lit)-2 { + panic(fmt.Sprintf("Error decoding rune. Lit: %s, rune: %d, size%d\n", lit, r, size)) + } + return r +} + +/* +Convert the literal value of a scanned token to int64 +*/ +func IntValue(lit []byte) (int64, error) { + return strconv.ParseInt(string(lit), 10, 64) +} + +/* +Convert the literal value of a scanned token to uint64 +*/ +func UintValue(lit []byte) (uint64, error) { + return strconv.ParseUint(string(lit), 10, 64) +} + +/* Util */ + +func escapeCharVal(lit []byte) rune { + var i, base, max uint32 + offset := 2 + switch lit[offset] { + case 'a': + return '\a' + case 'b': + return '\b' + case 'f': + return '\f' + case 'n': + return '\n' + case 'r': + return '\r' + case 't': + return '\t' + case 'v': + return '\v' + case '\\': + return '\\' + case '\'': + return '\'' + case '0', '1', '2', '3', '4', '5', '6', '7': + i, base, max = 3, 8, 255 + case 'x': + i, base, max = 2, 16, 255 + offset++ + case 'u': + i, base, max = 4, 16, unicode.MaxRune + offset++ + case 'U': + i, base, max = 8, 16, unicode.MaxRune + offset++ + default: + panic(fmt.Sprintf("Error decoding character literal: %s\n", lit)) + } + + var x uint32 + for ; i > 0 && offset < len(lit)-1; i-- { + ch, size := utf8.DecodeRune(lit[offset:]) + offset += size + d := uint32(digitVal(ch)) + if d >= base { + panic(fmt.Sprintf("charVal(%s): illegal character (%c) in escape sequence. size=%d, offset=%d", lit, ch, size, offset)) + } + x = x*base + d + } + if x > max || 0xD800 <= x && x < 0xE000 { + panic(fmt.Sprintf("Error decoding escape char value. Lit:%s, offset:%d, escape sequence is invalid Unicode code point\n", lit, offset)) + } + + return rune(x) +} + +func digitVal(ch rune) int { + switch { + case '0' <= ch && ch <= '9': + return int(ch) - '0' + case 'a' <= ch && ch <= 'f': + return int(ch) - 'a' + 10 + case 'A' <= ch && ch <= 'F': + return int(ch) - 'A' + 10 + } + return 16 // larger than any legal digit val +} diff --git a/vendor/gonum.org/v1/gonum/graph/formats/dot/internal/util/rune.go b/vendor/gonum.org/v1/gonum/graph/formats/dot/internal/util/rune.go new file mode 100644 index 0000000..f4e4f12 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/graph/formats/dot/internal/util/rune.go @@ -0,0 +1,49 @@ +// Code generated by gocc; DO NOT EDIT. + +// This file is dual licensed under CC0 and The gonum license. +// +// Copyright ©2017 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. +// +// Copyright ©2017 Robin Eklind. +// This file is made available under a Creative Commons CC0 1.0 +// Universal Public Domain Dedication. + +package util + +import ( + "fmt" +) + +func RuneToString(r rune) string { + if r >= 0x20 && r < 0x7f { + return fmt.Sprintf("'%c'", r) + } + switch r { + case 0x07: + return "'\\a'" + case 0x08: + return "'\\b'" + case 0x0C: + return "'\\f'" + case 0x0A: + return "'\\n'" + case 0x0D: + return "'\\r'" + case 0x09: + return "'\\t'" + case 0x0b: + return "'\\v'" + case 0x5c: + return "'\\\\\\'" + case 0x27: + return "'\\''" + case 0x22: + return "'\\\"'" + } + if r < 0x10000 { + return fmt.Sprintf("\\u%04x", r) + } + return fmt.Sprintf("\\U%08x", r) +} diff --git a/vendor/gonum.org/v1/gonum/graph/formats/dot/sem.go b/vendor/gonum.org/v1/gonum/graph/formats/dot/sem.go new file mode 100644 index 0000000..2c59006 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/graph/formats/dot/sem.go @@ -0,0 +1,160 @@ +// This file is dual licensed under CC0 and The gonum license. +// +// Copyright ©2017 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. +// +// Copyright ©2017 Robin Eklind. +// This file is made available under a Creative Commons CC0 1.0 +// Universal Public Domain Dedication. + +package dot + +import ( + "fmt" + + "gonum.org/v1/gonum/graph/formats/dot/ast" +) + +// check validates the semantics of the given DOT file. +func check(file *ast.File) error { + for _, graph := range file.Graphs { + // TODO: Check graph.ID for duplicates? + if err := checkGraph(graph); err != nil { + return err + } + } + return nil +} + +// check validates the semantics of the given graph. +func checkGraph(graph *ast.Graph) error { + for _, stmt := range graph.Stmts { + if err := checkStmt(graph, stmt); err != nil { + return err + } + } + return nil +} + +// check validates the semantics of the given statement. +func checkStmt(graph *ast.Graph, stmt ast.Stmt) error { + switch stmt := stmt.(type) { + case *ast.NodeStmt: + return checkNodeStmt(graph, stmt) + case *ast.EdgeStmt: + return checkEdgeStmt(graph, stmt) + case *ast.AttrStmt: + return checkAttrStmt(graph, stmt) + case *ast.Attr: + // TODO: Verify that the attribute is indeed of graph component kind. + return checkAttr(graph, ast.GraphKind, stmt) + case *ast.Subgraph: + return checkSubgraph(graph, stmt) + default: + panic(fmt.Sprintf("support for statement of type %T not yet implemented", stmt)) + } +} + +// checkNodeStmt validates the semantics of the given node statement. +func checkNodeStmt(graph *ast.Graph, stmt *ast.NodeStmt) error { + if err := checkNode(graph, stmt.Node); err != nil { + return err + } + for _, attr := range stmt.Attrs { + // TODO: Verify that the attribute is indeed of node component kind. + if err := checkAttr(graph, ast.NodeKind, attr); err != nil { + return err + } + } + return nil +} + +// checkEdgeStmt validates the semantics of the given edge statement. +func checkEdgeStmt(graph *ast.Graph, stmt *ast.EdgeStmt) error { + // TODO: if graph.Strict, check for multi-edges. + if err := checkVertex(graph, stmt.From); err != nil { + return err + } + for _, attr := range stmt.Attrs { + // TODO: Verify that the attribute is indeed of edge component kind. + if err := checkAttr(graph, ast.EdgeKind, attr); err != nil { + return err + } + } + return checkEdge(graph, stmt.From, stmt.To) +} + +// checkEdge validates the semantics of the given edge. +func checkEdge(graph *ast.Graph, from ast.Vertex, to *ast.Edge) error { + if !graph.Directed && to.Directed { + return fmt.Errorf("undirected graph %q contains directed edge from %q to %q", graph.ID, from, to.Vertex) + } + if err := checkVertex(graph, to.Vertex); err != nil { + return err + } + if to.To != nil { + return checkEdge(graph, to.Vertex, to.To) + } + return nil +} + +// checkAttrStmt validates the semantics of the given attribute statement. +func checkAttrStmt(graph *ast.Graph, stmt *ast.AttrStmt) error { + for _, attr := range stmt.Attrs { + if err := checkAttr(graph, stmt.Kind, attr); err != nil { + return err + } + } + return nil +} + +// checkAttr validates the semantics of the given attribute for the given +// component kind. +func checkAttr(graph *ast.Graph, kind ast.Kind, attr *ast.Attr) error { + switch kind { + case ast.GraphKind: + // TODO: Validate key-value pairs for graphs. + return nil + case ast.NodeKind: + // TODO: Validate key-value pairs for nodes. + return nil + case ast.EdgeKind: + // TODO: Validate key-value pairs for edges. + return nil + default: + panic(fmt.Sprintf("support for component kind %v not yet supported", kind)) + } +} + +// checkSubgraph validates the semantics of the given subgraph. +func checkSubgraph(graph *ast.Graph, subgraph *ast.Subgraph) error { + // TODO: Check subgraph.ID for duplicates? + for _, stmt := range subgraph.Stmts { + // TODO: Refine handling of subgraph statements? + // checkSubgraphStmt(graph, subgraph, stmt) + if err := checkStmt(graph, stmt); err != nil { + return err + } + } + return nil +} + +// checkVertex validates the semantics of the given vertex. +func checkVertex(graph *ast.Graph, vertex ast.Vertex) error { + switch vertex := vertex.(type) { + case *ast.Node: + return checkNode(graph, vertex) + case *ast.Subgraph: + return checkSubgraph(graph, vertex) + default: + panic(fmt.Sprintf("support for vertex of type %T not yet supported", vertex)) + } +} + +// checNode validates the semantics of the given node. +func checkNode(graph *ast.Graph, node *ast.Node) error { + // TODO: Check node.ID for duplicates? + // TODO: Validate node.Port. + return nil +} diff --git a/vendor/gonum.org/v1/gonum/graph/formats/gexf12/gexf.go b/vendor/gonum.org/v1/gonum/graph/formats/gexf12/gexf.go new file mode 100644 index 0000000..e35a155 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/graph/formats/gexf12/gexf.go @@ -0,0 +1,304 @@ +// Copyright ©2018 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Package gexf12 implements marshaling and unmarshaling of GEXF1.2 documents. +// +// For details of GEXF see https://gephi.org/gexf/format/. +package gexf12 // import "gonum.org/v1/gonum/graph/formats/gexf12" + +import ( + "bytes" + "encoding/xml" + "time" +) + +// BUG(kortschak): The namespace for GEFX1.2 is 1.2draft, though it has +// already been deprecated. There is no specification for 1.3, although +// it is being used in the wild. + +// Content holds a GEFX graph and metadata. +type Content struct { + XMLName xml.Name `xml:"http://www.gexf.net/1.2draft gexf"` + Meta *Meta `xml:"meta,omitempty"` + Graph Graph `xml:"graph"` + // Version must be "1.2". + Version string `xml:"version,attr"` + Variant string `xml:"variant,attr,omitempty"` +} + +// Meta holds optional metadata associated with the graph. +type Meta struct { + Creator string `xml:"creator,omitempty"` + Keywords string `xml:"keywords,omitempty"` + Description string `xml:"description,omitempty"` + LastModified time.Time `xml:"lastmodifieddate,attr,omitempty"` +} + +// MarshalXML implements the xml.Marshaler interface. +func (t *Meta) MarshalXML(e *xml.Encoder, start xml.StartElement) error { + type T Meta + var layout struct { + *T + LastModified *xsdDate `xml:"lastmodifieddate,attr,omitempty"` + } + layout.T = (*T)(t) + layout.LastModified = (*xsdDate)(&layout.T.LastModified) + return e.EncodeElement(layout, start) +} + +// UnmarshalXML implements the xml.Unmarshaler interface. +func (t *Meta) UnmarshalXML(d *xml.Decoder, start xml.StartElement) error { + type T Meta + var overlay struct { + *T + LastModified *xsdDate `xml:"lastmodifieddate,attr,omitempty"` + } + overlay.T = (*T)(t) + overlay.LastModified = (*xsdDate)(&overlay.T.LastModified) + return d.DecodeElement(&overlay, &start) +} + +// Graph stores the graph nodes, edges, dynamics and visualization data. +type Graph struct { + Attributes []Attributes `xml:"attributes"` + Nodes Nodes `xml:"nodes"` + Edges Edges `xml:"edges"` + // TimeFormat may be one of "integer", "double", "date" or "dateTime". + TimeFormat string `xml:"timeformat,attr,omitempty"` + Start string `xml:"start,attr,omitempty"` + StartOpen string `xml:"startopen,attr,omitempty"` + End string `xml:"end,attr,omitempty"` + EndOpen string `xml:"endopen,attr,omitempty"` + // DefaultEdgeType may be one of "directed", "undirected" or "mutual". + DefaultEdgeType string `xml:"defaultedgetype,attr,omitempty"` + // IDType may be one of "integer" or "string". + IDType string `xml:"idtype,attr,omitempty"` + // Mode may be "static" or "dynamic". + Mode string `xml:"mode,attr,omitempty"` +} + +// Attributes holds a collection of potentially dynamic attributes +// associated with a graph. +type Attributes struct { + Attributes []Attribute `xml:"attribute,omitempty"` + // Class be one of "node" or "edge". + Class string `xml:"class,attr"` + // Mode may be "static" or "dynamic". + Mode string `xml:"mode,attr,omitempty"` + Start string `xml:"start,attr,omitempty"` + StartOpen string `xml:"startopen,attr,omitempty"` + End string `xml:"end,attr,omitempty"` + EndOpen string `xml:"endopen,attr,omitempty"` +} + +// Attribute holds a single graph attribute. +type Attribute struct { + ID string `xml:"id,attr"` + Title string `xml:"title,attr"` + // Type may be one of "integer", "long", "double", "float", + // "boolean", "liststring", "string", or "anyURI". + Type string `xml:"type,attr"` + Default string `xml:"default,omitempty"` + Options string `xml:"options,omitempty"` +} + +// Nodes holds a collection of nodes constituting a graph or subgraph. +type Nodes struct { + Count int `xml:"count,attr,omitempty"` + Nodes []Node `xml:"node,omitempty"` +} + +// Node is a single node and its associated attributes. +type Node struct { + ID string `xml:"id,attr,omitempty"` + Label string `xml:"label,attr,omitempty"` + AttValues *AttValues `xml:"attvalues"` + Spells *Spells `xml:"spells"` + Nodes *Nodes `xml:"nodes"` + Edges *Edges `xml:"edges"` + ParentID string `xml:"pid,attr,omitempty"` + Parents *Parents `xml:"parents"` + Color *Color `xml:"http://www.gexf.net/1.2draft/viz color"` + Position *Position `xml:"http://www.gexf.net/1.2draft/viz position"` + Size *Size `xml:"http://www.gexf.net/1.2draft/viz size"` + Shape *NodeShape `xml:"http://www.gexf.net/1.2draft/viz shape"` + Start string `xml:"start,attr,omitempty"` + StartOpen string `xml:"startopen,attr,omitempty"` + End string `xml:"end,attr,omitempty"` + EndOpen string `xml:"endopen,attr,omitempty"` +} + +// NodeShape holds the visual representation of a node with associated +// dynamics. +type NodeShape struct { + Spells *Spells `xml:"spells,omitempty"` + // Value be one of "disc", "square", "triangle", + // "diamond" or "image". + Shape string `xml:"value,attr"` + URI string `xml:"uri,attr,omitempty"` + Start string `xml:"start,attr,omitempty"` + StartOpen string `xml:"startopen,attr,omitempty"` + End string `xml:"end,attr,omitempty"` + EndOpen string `xml:"endopen,attr,omitempty"` +} + +// Color represents a node or edge color and its associated dynamics. +type Color struct { + Spells *Spells `xml:"spells,omitempty"` + R byte `xml:"r,attr"` + G byte `xml:"g,attr"` + B byte `xml:"b,attr"` + A float64 `xml:"a,attr,omitempty"` + Start string `xml:"start,attr,omitempty"` + StartOpen string `xml:"startopen,attr,omitempty"` + End string `xml:"end,attr,omitempty"` + EndOpen string `xml:"endopen,attr,omitempty"` +} + +// Edges holds a collection of edges constituting a graph or subgraph. +type Edges struct { + Count int `xml:"count,attr,omitempty"` + Edges []Edge `xml:"edge,omitempty"` +} + +// Edge is a single edge and its associated attributes. +type Edge struct { + ID string `xml:"id,attr,omitempty"` + AttValues *AttValues `xml:"attvalues"` + Spells *Spells `xml:"spells"` + Color *Color `xml:"http://www.gexf.net/1.2draft/viz color"` + Thickness *Thickness `xml:"http://www.gexf.net/1.2draft/viz thickness"` + Shape *Edgeshape `xml:"http://www.gexf.net/1.2draft/viz shape"` + Start string `xml:"start,attr,omitempty"` + StartOpen string `xml:"startopen,attr,omitempty"` + End string `xml:"end,attr,omitempty"` + EndOpen string `xml:"endopen,attr,omitempty"` + // Type may be one of directed, undirected, mutual + Type string `xml:"type,attr,omitempty"` + Label string `xml:"label,attr,omitempty"` + Source string `xml:"source,attr"` + Target string `xml:"target,attr"` + Weight float64 `xml:"weight,attr,omitempty"` +} + +// AttVlues holds a collection of attribute values. +type AttValues struct { + AttValues []AttValue `xml:"attvalue,omitempty"` +} + +// AttValues holds a single attribute value and its associated dynamics. +type AttValue struct { + For string `xml:"for,attr"` + Value string `xml:"value,attr"` + Start string `xml:"start,attr,omitempty"` + StartOpen string `xml:"startopen,attr,omitempty"` + End string `xml:"end,attr,omitempty"` + EndOpen string `xml:"endopen,attr,omitempty"` +} + +// EdgeShape holds the visual representation of an edge with associated +// dynamics. +type Edgeshape struct { + // Shape be one of solid, dotted, dashed, double + Shape string `xml:"value,attr"` + Spells *Spells `xml:"spells,omitempty"` + Start string `xml:"start,attr,omitempty"` + StartOpen string `xml:"startopen,attr,omitempty"` + End string `xml:"end,attr,omitempty"` + EndOpen string `xml:"endopen,attr,omitempty"` +} + +// Parents holds parent relationships between nodes in a hierarchical +// graph. +type Parents struct { + Parents []Parent `xml:"parent,omitempty"` +} + +// Parent is a single parent relationship. +type Parent struct { + For string `xml:"for,attr"` +} + +// Position hold the spatial position of a node and its dynamics. +type Position struct { + X float64 `xml:"x,attr"` + Y float64 `xml:"y,attr"` + Z float64 `xml:"z,attr"` + Spells *Spells `xml:"spells,omitempty"` + Start string `xml:"start,attr,omitempty"` + StartOpen string `xml:"startopen,attr,omitempty"` + End string `xml:"end,attr,omitempty"` + EndOpen string `xml:"endopen,attr,omitempty"` +} + +// Size hold the visual size of a node and its dynamics. +type Size struct { + Value float64 `xml:"value,attr"` + Spells *Spells `xml:"http://www.gexf.net/1.2draft/viz spells,omitempty"` + Start string `xml:"start,attr,omitempty"` + StartOpen string `xml:"startopen,attr,omitempty"` + End string `xml:"end,attr,omitempty"` + EndOpen string `xml:"endopen,attr,omitempty"` +} + +// Thickness hold the visual thickness of an edge and its dynamics. +type Thickness struct { + Value float64 `xml:"value,attr"` + Spells *Spells `xml:"http://www.gexf.net/1.2draft/viz spells,omitempty"` + Start string `xml:"start,attr,omitempty"` + StartOpen string `xml:"startopen,attr,omitempty"` + End string `xml:"end,attr,omitempty"` + EndOpen string `xml:"endopen,attr,omitempty"` +} + +// Spells holds a collection of time dynamics for a graph entity. +type Spells struct { + Spells []Spell `xml:"spell"` +} + +// Spell is a time interval. +type Spell struct { + Start string `xml:"start,attr,omitempty"` + StartOpen string `xml:"startopen,attr,omitempty"` + End string `xml:"end,attr,omitempty"` + EndOpen string `xml:"endopen,attr,omitempty"` +} + +type xsdDate time.Time + +func (t *xsdDate) UnmarshalText(text []byte) error { + return _unmarshalTime(text, (*time.Time)(t), "2006-01-02") +} + +func (t xsdDate) MarshalText() ([]byte, error) { + return []byte((time.Time)(t).Format("2006-01-02")), nil +} + +func (t xsdDate) MarshalXML(e *xml.Encoder, start xml.StartElement) error { + if (time.Time)(t).IsZero() { + return nil + } + m, err := t.MarshalText() + if err != nil { + return err + } + return e.EncodeElement(m, start) +} + +func (t xsdDate) MarshalXMLAttr(name xml.Name) (xml.Attr, error) { + if (time.Time)(t).IsZero() { + return xml.Attr{}, nil + } + m, err := t.MarshalText() + return xml.Attr{Name: name, Value: string(m)}, err +} + +func _unmarshalTime(text []byte, t *time.Time, format string) (err error) { + s := string(bytes.TrimSpace(text)) + *t, err = time.Parse(format, s) + if _, ok := err.(*time.ParseError); ok { + *t, err = time.Parse(format+"Z07:00", s) + } + return err +} diff --git a/vendor/gonum.org/v1/gonum/graph/formats/sigmajs/sigmajs.go b/vendor/gonum.org/v1/gonum/graph/formats/sigmajs/sigmajs.go new file mode 100644 index 0000000..0950003 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/graph/formats/sigmajs/sigmajs.go @@ -0,0 +1,128 @@ +// Copyright ©2018 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Package sigmajs implements marshaling and unmarshaling of Sigma.js JSON documents. +// +// See http://sigmajs.org/ for Sigma.js documentation. +package sigmajs // import "gonum.org/v1/gonum/graph/formats/sigmajs" + +import ( + "encoding/json" + "errors" + "fmt" +) + +// Graph is a Sigma.js graph. +type Graph struct { + Nodes []Node `json:"nodes"` + Edges []Edge `json:"edges"` +} + +// Node is a Sigma.js node. +type Node struct { + ID string + Attributes map[string]interface{} +} + +var ( + _ json.Marshaler = (*Node)(nil) + _ json.Unmarshaler = (*Node)(nil) +) + +// MarshalJSON implements the json.Marshaler interface. +func (n *Node) MarshalJSON() ([]byte, error) { + if n.Attributes == nil { + type node struct { + ID string `json:"id"` + } + return json.Marshal(node{ID: n.ID}) + } + n.Attributes["id"] = n.ID + b, err := json.Marshal(n.Attributes) + delete(n.Attributes, "id") + return b, err +} + +// UnmarshalJSON implements the json.Unmarshaler interface. +func (n *Node) UnmarshalJSON(data []byte) error { + var attrs map[string]interface{} + err := json.Unmarshal(data, &attrs) + if err != nil { + return err + } + id, ok := attrs["id"] + if !ok { + return errors.New("sigmajs: no ID") + } + n.ID = fmt.Sprint(id) + delete(attrs, "id") + if len(attrs) != 0 { + n.Attributes = attrs + } + return nil +} + +// Edge is a Sigma.js edge. +type Edge struct { + ID string + Source string + Target string + Attributes map[string]interface{} +} + +var ( + _ json.Marshaler = (*Edge)(nil) + _ json.Unmarshaler = (*Edge)(nil) +) + +// MarshalJSON implements the json.Marshaler interface. +func (e *Edge) MarshalJSON() ([]byte, error) { + if e.Attributes == nil { + type edge struct { + ID string `json:"id"` + Source string `json:"source"` + Target string `json:"target"` + } + return json.Marshal(edge{ID: e.ID, Source: e.Source, Target: e.Target}) + } + e.Attributes["id"] = e.ID + e.Attributes["source"] = e.Source + e.Attributes["target"] = e.Target + b, err := json.Marshal(e.Attributes) + delete(e.Attributes, "id") + delete(e.Attributes, "source") + delete(e.Attributes, "target") + return b, err +} + +// UnmarshalJSON implements the json.Unmarshaler interface. +func (e *Edge) UnmarshalJSON(data []byte) error { + var attrs map[string]interface{} + err := json.Unmarshal(data, &attrs) + if err != nil { + return err + } + id, ok := attrs["id"] + if !ok { + return errors.New("sigmajs: no ID") + } + source, ok := attrs["source"] + if !ok { + return errors.New("sigmajs: no source") + } + target, ok := attrs["target"] + if !ok { + return errors.New("sigmajs: no target") + } + e.ID = fmt.Sprint(id) + e.Source = fmt.Sprint(source) + e.Target = fmt.Sprint(target) + delete(attrs, "id") + delete(attrs, "source") + delete(attrs, "target") + if len(attrs) != 0 { + e.Attributes = attrs + } + return nil +} diff --git a/vendor/gonum.org/v1/gonum/graph/formats/sigmajs/testdata/LICENSE.txt b/vendor/gonum.org/v1/gonum/graph/formats/sigmajs/testdata/LICENSE.txt new file mode 100644 index 0000000..81739df --- /dev/null +++ b/vendor/gonum.org/v1/gonum/graph/formats/sigmajs/testdata/LICENSE.txt @@ -0,0 +1,12 @@ +Copyright (C) 2013-2014, Alexis Jacomy, http://sigmajs.org + +Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), +to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, +and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS +IN THE SOFTWARE. diff --git a/vendor/gonum.org/v1/gonum/graph/graph.go b/vendor/gonum.org/v1/gonum/graph/graph.go new file mode 100644 index 0000000..c973583 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/graph/graph.go @@ -0,0 +1,282 @@ +// Copyright ©2014 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package graph + +// Node is a graph node. It returns a graph-unique integer ID. +type Node interface { + ID() int64 +} + +// Edge is a graph edge. In directed graphs, the direction of the +// edge is given from -> to, otherwise the edge is semantically +// unordered. +type Edge interface { + // From returns the from node of the edge. + From() Node + + // To returns the to node of the edge. + To() Node + + // ReversedEdge returns an edge that has + // the end points of the receiver swapped. + ReversedEdge() Edge +} + +// WeightedEdge is a weighted graph edge. In directed graphs, the direction +// of the edge is given from -> to, otherwise the edge is semantically +// unordered. +type WeightedEdge interface { + Edge + Weight() float64 +} + +// Graph is a generalized graph. +type Graph interface { + // Node returns the node with the given ID if it exists + // in the graph, and nil otherwise. + Node(id int64) Node + + // Nodes returns all the nodes in the graph. + // + // Nodes must not return nil. + Nodes() Nodes + + // From returns all nodes that can be reached directly + // from the node with the given ID. + // + // From must not return nil. + From(id int64) Nodes + + // HasEdgeBetween returns whether an edge exists between + // nodes with IDs xid and yid without considering direction. + HasEdgeBetween(xid, yid int64) bool + + // Edge returns the edge from u to v, with IDs uid and vid, + // if such an edge exists and nil otherwise. The node v + // must be directly reachable from u as defined by the + // From method. + Edge(uid, vid int64) Edge +} + +// Weighted is a weighted graph. +type Weighted interface { + Graph + + // WeightedEdge returns the weighted edge from u to v + // with IDs uid and vid if such an edge exists and + // nil otherwise. The node v must be directly + // reachable from u as defined by the From method. + WeightedEdge(uid, vid int64) WeightedEdge + + // Weight returns the weight for the edge between + // x and y with IDs xid and yid if Edge(xid, yid) + // returns a non-nil Edge. + // If x and y are the same node or there is no + // joining edge between the two nodes the weight + // value returned is implementation dependent. + // Weight returns true if an edge exists between + // x and y or if x and y have the same ID, false + // otherwise. + Weight(xid, yid int64) (w float64, ok bool) +} + +// Undirected is an undirected graph. +type Undirected interface { + Graph + + // EdgeBetween returns the edge between nodes x and y + // with IDs xid and yid. + EdgeBetween(xid, yid int64) Edge +} + +// WeightedUndirected is a weighted undirected graph. +type WeightedUndirected interface { + Weighted + + // WeightedEdgeBetween returns the edge between nodes + // x and y with IDs xid and yid. + WeightedEdgeBetween(xid, yid int64) WeightedEdge +} + +// Directed is a directed graph. +type Directed interface { + Graph + + // HasEdgeFromTo returns whether an edge exists + // in the graph from u to v with IDs uid and vid. + HasEdgeFromTo(uid, vid int64) bool + + // To returns all nodes that can reach directly + // to the node with the given ID. + // + // To must not return nil. + To(id int64) Nodes +} + +// WeightedDirected is a weighted directed graph. +type WeightedDirected interface { + Weighted + + // HasEdgeFromTo returns whether an edge exists + // in the graph from u to v with the IDs uid and + // vid. + HasEdgeFromTo(uid, vid int64) bool + + // To returns all nodes that can reach directly + // to the node with the given ID. + // + // To must not return nil. + To(id int64) Nodes +} + +// NodeAdder is an interface for adding arbitrary nodes to a graph. +type NodeAdder interface { + // NewNode returns a new Node with a unique + // arbitrary ID. + NewNode() Node + + // AddNode adds a node to the graph. AddNode panics if + // the added node ID matches an existing node ID. + AddNode(Node) +} + +// NodeRemover is an interface for removing nodes from a graph. +type NodeRemover interface { + // RemoveNode removes the node with the given ID + // from the graph, as well as any edges attached + // to it. If the node is not in the graph it is + // a no-op. + RemoveNode(id int64) +} + +// EdgeAdder is an interface for adding edges to a graph. +type EdgeAdder interface { + // NewEdge returns a new Edge from the source to the destination node. + NewEdge(from, to Node) Edge + + // SetEdge adds an edge from one node to another. + // If the graph supports node addition the nodes + // will be added if they do not exist, otherwise + // SetEdge will panic. + // The behavior of an EdgeAdder when the IDs + // returned by e.From() and e.To() are equal is + // implementation-dependent. + // Whether e, e.From() and e.To() are stored + // within the graph is implementation dependent. + SetEdge(e Edge) +} + +// WeightedEdgeAdder is an interface for adding edges to a graph. +type WeightedEdgeAdder interface { + // NewWeightedEdge returns a new WeightedEdge from + // the source to the destination node. + NewWeightedEdge(from, to Node, weight float64) WeightedEdge + + // SetWeightedEdge adds an edge from one node to + // another. If the graph supports node addition + // the nodes will be added if they do not exist, + // otherwise SetWeightedEdge will panic. + // The behavior of a WeightedEdgeAdder when the IDs + // returned by e.From() and e.To() are equal is + // implementation-dependent. + // Whether e, e.From() and e.To() are stored + // within the graph is implementation dependent. + SetWeightedEdge(e WeightedEdge) +} + +// EdgeRemover is an interface for removing nodes from a graph. +type EdgeRemover interface { + // RemoveEdge removes the edge with the given end + // IDs, leaving the terminal nodes. If the edge + // does not exist it is a no-op. + RemoveEdge(fid, tid int64) +} + +// Builder is a graph that can have nodes and edges added. +type Builder interface { + NodeAdder + EdgeAdder +} + +// WeightedBuilder is a graph that can have nodes and weighted edges added. +type WeightedBuilder interface { + NodeAdder + WeightedEdgeAdder +} + +// UndirectedBuilder is an undirected graph builder. +type UndirectedBuilder interface { + Undirected + Builder +} + +// UndirectedWeightedBuilder is an undirected weighted graph builder. +type UndirectedWeightedBuilder interface { + Undirected + WeightedBuilder +} + +// DirectedBuilder is a directed graph builder. +type DirectedBuilder interface { + Directed + Builder +} + +// DirectedWeightedBuilder is a directed weighted graph builder. +type DirectedWeightedBuilder interface { + Directed + WeightedBuilder +} + +// Copy copies nodes and edges as undirected edges from the source to the destination +// without first clearing the destination. Copy will panic if a node ID in the source +// graph matches a node ID in the destination. +// +// If the source is undirected and the destination is directed both directions will +// be present in the destination after the copy is complete. +func Copy(dst Builder, src Graph) { + nodes := src.Nodes() + for nodes.Next() { + dst.AddNode(nodes.Node()) + } + nodes.Reset() + for nodes.Next() { + u := nodes.Node() + uid := u.ID() + to := src.From(uid) + for to.Next() { + v := to.Node() + dst.SetEdge(src.Edge(uid, v.ID())) + } + } +} + +// CopyWeighted copies nodes and edges as undirected edges from the source to the destination +// without first clearing the destination. Copy will panic if a node ID in the source +// graph matches a node ID in the destination. +// +// If the source is undirected and the destination is directed both directions will +// be present in the destination after the copy is complete. +// +// If the source is a directed graph, the destination is undirected, and a fundamental +// cycle exists with two nodes where the edge weights differ, the resulting destination +// graph's edge weight between those nodes is undefined. If there is a defined function +// to resolve such conflicts, an UndirectWeighted may be used to do this. +func CopyWeighted(dst WeightedBuilder, src Weighted) { + nodes := src.Nodes() + for nodes.Next() { + dst.AddNode(nodes.Node()) + } + nodes.Reset() + for nodes.Next() { + u := nodes.Node() + uid := u.ID() + to := src.From(uid) + for to.Next() { + v := to.Node() + dst.SetWeightedEdge(src.WeightedEdge(uid, v.ID())) + } + } +} diff --git a/vendor/gonum.org/v1/gonum/graph/graphs/gen/batagelj_brandes.go b/vendor/gonum.org/v1/gonum/graph/graphs/gen/batagelj_brandes.go new file mode 100644 index 0000000..e637bbc --- /dev/null +++ b/vendor/gonum.org/v1/gonum/graph/graphs/gen/batagelj_brandes.go @@ -0,0 +1,396 @@ +// Copyright ©2015 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// The functions in this file are random graph generators from the paper +// by Batagelj and Brandes http://algo.uni-konstanz.de/publications/bb-eglrn-05.pdf + +package gen + +import ( + "fmt" + "math" + + "golang.org/x/exp/rand" + + "gonum.org/v1/gonum/graph" +) + +// Gnp constructs a Gilbert’s model subgraph in the destination, dst, of order n. Edges +// between nodes are formed with the probability, p. If src is not nil it is used +// as the random source, otherwise rand.Float64 is used. The graph is constructed +// in O(n+m) time where m is the number of edges added. +func Gnp(dst graph.Builder, n int, p float64, src rand.Source) error { + if p == 0 { + for i := 0; i < n; i++ { + dst.AddNode(dst.NewNode()) + } + return nil + } + if p < 0 || p > 1 { + return fmt.Errorf("gen: bad probability: p=%v", p) + } + var r func() float64 + if src == nil { + r = rand.Float64 + } else { + r = rand.New(src).Float64 + } + + nodes := make([]graph.Node, n) + for i := range nodes { + u := dst.NewNode() + dst.AddNode(u) + nodes[i] = u + } + + lp := math.Log(1 - p) + + // Add forward edges for all graphs. + for v, w := 1, -1; v < n; { + w += 1 + int(math.Log(1-r())/lp) + for w >= v && v < n { + w -= v + v++ + } + if v < n { + dst.SetEdge(dst.NewEdge(nodes[w], nodes[v])) + } + } + + // Add backward edges for directed graphs. + if _, ok := dst.(graph.Directed); !ok { + return nil + } + for v, w := 1, -1; v < n; { + w += 1 + int(math.Log(1-r())/lp) + for w >= v && v < n { + w -= v + v++ + } + if v < n { + dst.SetEdge(dst.NewEdge(nodes[v], nodes[w])) + } + } + + return nil +} + +// Gnm constructs a Erdős-Rényi model subgraph in the destination, dst, of +// order n and size m. If src is not nil it is used as the random source, +// otherwise rand.Intn is used. The graph is constructed in O(m) expected +// time for m ≤ (n choose 2)/2. +func Gnm(dst GraphBuilder, n, m int, src rand.Source) error { + if m == 0 { + for i := 0; i < n; i++ { + dst.AddNode(dst.NewNode()) + } + return nil + } + + hasEdge := dst.HasEdgeBetween + d, isDirected := dst.(graph.Directed) + if isDirected { + m /= 2 + hasEdge = d.HasEdgeFromTo + } + + nChoose2 := (n - 1) * n / 2 + if m < 0 || m > nChoose2 { + return fmt.Errorf("gen: bad size: m=%d", m) + } + + var rnd func(int) int + if src == nil { + rnd = rand.Intn + } else { + rnd = rand.New(src).Intn + } + + nodes := make([]graph.Node, n) + for i := range nodes { + u := dst.NewNode() + dst.AddNode(u) + nodes[i] = u + } + + // Add forward edges for all graphs. + for i := 0; i < m; i++ { + for { + v, w := edgeNodesFor(rnd(nChoose2), nodes) + if !hasEdge(w.ID(), v.ID()) { + dst.SetEdge(dst.NewEdge(w, v)) + break + } + } + } + + // Add backward edges for directed graphs. + if !isDirected { + return nil + } + for i := 0; i < m; i++ { + for { + v, w := edgeNodesFor(rnd(nChoose2), nodes) + if !hasEdge(v.ID(), w.ID()) { + dst.SetEdge(dst.NewEdge(v, w)) + break + } + } + } + + return nil +} + +// SmallWorldsBB constructs a small worlds subgraph of order n in the destination, dst. +// Node degree is specified by d and edge replacement by the probability, p. +// If src is not nil it is used as the random source, otherwise rand.Float64 is used. +// The graph is constructed in O(nd) time. +// +// The algorithm used is described in http://algo.uni-konstanz.de/publications/bb-eglrn-05.pdf +func SmallWorldsBB(dst GraphBuilder, n, d int, p float64, src rand.Source) error { + if d < 1 || d > (n-1)/2 { + return fmt.Errorf("gen: bad degree: d=%d", d) + } + if p == 0 { + for i := 0; i < n; i++ { + dst.AddNode(dst.NewNode()) + } + return nil + } + if p < 0 || p >= 1 { + return fmt.Errorf("gen: bad replacement: p=%v", p) + } + var ( + rnd func() float64 + rndN func(int) int + ) + if src == nil { + rnd = rand.Float64 + rndN = rand.Intn + } else { + r := rand.New(src) + rnd = r.Float64 + rndN = r.Intn + } + + hasEdge := dst.HasEdgeBetween + dg, isDirected := dst.(graph.Directed) + if isDirected { + hasEdge = dg.HasEdgeFromTo + } + + nodes := make([]graph.Node, n) + for i := range nodes { + u := dst.NewNode() + dst.AddNode(u) + nodes[i] = u + } + + nChoose2 := (n - 1) * n / 2 + + lp := math.Log(1 - p) + + // Add forward edges for all graphs. + k := int(math.Log(1-rnd()) / lp) + m := 0 + replace := make(map[int]int) + for v := 0; v < n; v++ { + for i := 1; i <= d; i++ { + if k > 0 { + j := v*(v-1)/2 + (v+i)%n + if v, u := edgeNodesFor(j, nodes); !hasEdge(u.ID(), v.ID()) { + dst.SetEdge(dst.NewEdge(u, v)) + } + k-- + m++ + + // For small graphs, m may be an + // edge that has an end that is + // not in the subgraph. + if m >= nChoose2 { + // Since m is monotonically + // increasing, no m edges from + // here on are valid, so don't + // add them to replace. + continue + } + + if v, u := edgeNodesFor(m, nodes); !hasEdge(u.ID(), v.ID()) { + replace[j] = m + } else { + replace[j] = replace[m] + } + } else { + k = int(math.Log(1-rnd()) / lp) + } + } + } + for i := m + 1; i <= n*d && i < nChoose2; i++ { + r := rndN(nChoose2-i) + i + if v, u := edgeNodesFor(r, nodes); !hasEdge(u.ID(), v.ID()) { + dst.SetEdge(dst.NewEdge(u, v)) + } else if v, u = edgeNodesFor(replace[r], nodes); !hasEdge(u.ID(), v.ID()) { + dst.SetEdge(dst.NewEdge(u, v)) + } + if v, u := edgeNodesFor(i, nodes); !hasEdge(u.ID(), v.ID()) { + replace[r] = i + } else { + replace[r] = replace[i] + } + } + + // Add backward edges for directed graphs. + if !isDirected { + return nil + } + k = int(math.Log(1-rnd()) / lp) + m = 0 + replace = make(map[int]int) + for v := 0; v < n; v++ { + for i := 1; i <= d; i++ { + if k > 0 { + j := v*(v-1)/2 + (v+i)%n + if u, v := edgeNodesFor(j, nodes); !hasEdge(u.ID(), v.ID()) { + dst.SetEdge(dst.NewEdge(u, v)) + } + k-- + m++ + + // For small graphs, m may be an + // edge that has an end that is + // not in the subgraph. + if m >= nChoose2 { + // Since m is monotonically + // increasing, no m edges from + // here on are valid, so don't + // add them to replace. + continue + } + + if u, v := edgeNodesFor(m, nodes); !hasEdge(u.ID(), v.ID()) { + replace[j] = m + } else { + replace[j] = replace[m] + } + } else { + k = int(math.Log(1-rnd()) / lp) + } + } + } + for i := m + 1; i <= n*d && i < nChoose2; i++ { + r := rndN(nChoose2-i) + i + if u, v := edgeNodesFor(r, nodes); !hasEdge(u.ID(), v.ID()) { + dst.SetEdge(dst.NewEdge(u, v)) + } else if u, v = edgeNodesFor(replace[r], nodes); !hasEdge(u.ID(), v.ID()) { + dst.SetEdge(dst.NewEdge(u, v)) + } + if u, v := edgeNodesFor(i, nodes); !hasEdge(u.ID(), v.ID()) { + replace[r] = i + } else { + replace[r] = replace[i] + } + } + + return nil +} + +// edgeNodesFor returns the pair of nodes for the ith edge in a simple +// undirected graph. The pair is returned such that the index of w in +// nodes is less than the index of v in nodes. +func edgeNodesFor(i int, nodes []graph.Node) (v, w graph.Node) { + // This is an algebraic simplification of the expressions described + // on p3 of http://algo.uni-konstanz.de/publications/bb-eglrn-05.pdf + vi := int(0.5 + math.Sqrt(float64(1+8*i))/2) + wi := i - vi*(vi-1)/2 + return nodes[vi], nodes[wi] +} + +// Multigraph generators. + +// PowerLaw constructs a power-law degree graph by preferential attachment in dst +// with n nodes and minimum degree d. PowerLaw does not consider nodes in dst prior +// to the call. If src is not nil it is used as the random source, otherwise rand.Intn +// is used. +// The graph is constructed in O(nd) — O(n+m) — time. +// +// The algorithm used is described in http://algo.uni-konstanz.de/publications/bb-eglrn-05.pdf +func PowerLaw(dst graph.MultigraphBuilder, n, d int, src rand.Source) error { + if d < 1 { + return fmt.Errorf("gen: bad minimum degree: d=%d", d) + } + var rnd func(int) int + if src == nil { + rnd = rand.Intn + } else { + rnd = rand.New(src).Intn + } + + m := make([]graph.Node, 2*n*d) + for v := 0; v < n; v++ { + x := dst.NewNode() + dst.AddNode(x) + + for i := 0; i < d; i++ { + m[2*(v*d+i)] = x + m[2*(v*d+i)+1] = m[rnd(2*v*d+i+1)] + } + } + for i := 0; i < n*d; i++ { + dst.SetLine(dst.NewLine(m[2*i], m[2*i+1])) + } + + return nil +} + +// BipartitePowerLaw constructs a bipartite power-law degree graph by preferential attachment +// in dst with 2×n nodes and minimum degree d. BipartitePowerLaw does not consider nodes in +// dst prior to the call. The two partitions are returned in p1 and p2. If src is not nil it is +// used as the random source, otherwise rand.Intn is used. +// The graph is constructed in O(nd) — O(n+m) — time. +// +// The algorithm used is described in http://algo.uni-konstanz.de/publications/bb-eglrn-05.pdf +func BipartitePowerLaw(dst graph.MultigraphBuilder, n, d int, src rand.Source) (p1, p2 []graph.Node, err error) { + if d < 1 { + return nil, nil, fmt.Errorf("gen: bad minimum degree: d=%d", d) + } + var rnd func(int) int + if src == nil { + rnd = rand.Intn + } else { + rnd = rand.New(src).Intn + } + + p := make([]graph.Node, 2*n) + for i := range p { + u := dst.NewNode() + dst.AddNode(u) + p[i] = u + } + + m1 := make([]graph.Node, 2*n*d) + m2 := make([]graph.Node, 2*n*d) + for v := 0; v < n; v++ { + for i := 0; i < d; i++ { + m1[2*(v*d+i)] = p[v] + m2[2*(v*d+i)] = p[n+v] + + if r := rnd(2*v*d + i + 1); r&0x1 == 0 { + m1[2*(v*d+i)+1] = m2[r] + } else { + m1[2*(v*d+i)+1] = m1[r] + } + + if r := rnd(2*v*d + i + 1); r&0x1 == 0 { + m2[2*(v*d+i)+1] = m1[r] + } else { + m2[2*(v*d+i)+1] = m2[r] + } + } + } + for i := 0; i < n*d; i++ { + dst.SetLine(dst.NewLine(m1[2*i], m1[2*i+1])) + dst.SetLine(dst.NewLine(m2[2*i], m2[2*i+1])) + } + return p[:n], p[n:], nil +} diff --git a/vendor/gonum.org/v1/gonum/graph/graphs/gen/doc.go b/vendor/gonum.org/v1/gonum/graph/graphs/gen/doc.go new file mode 100644 index 0000000..0cb899d --- /dev/null +++ b/vendor/gonum.org/v1/gonum/graph/graphs/gen/doc.go @@ -0,0 +1,6 @@ +// Copyright ©2017 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Package gen provides random graph generation functions. +package gen // import "gonum.org/v1/gonum/graph/graphs/gen" diff --git a/vendor/gonum.org/v1/gonum/graph/graphs/gen/duplication.go b/vendor/gonum.org/v1/gonum/graph/graphs/gen/duplication.go new file mode 100644 index 0000000..dd9fba6 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/graph/graphs/gen/duplication.go @@ -0,0 +1,131 @@ +// Copyright ©2015 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package gen + +import ( + "fmt" + "math" + "sort" + + "golang.org/x/exp/rand" + + "gonum.org/v1/gonum/graph" + "gonum.org/v1/gonum/graph/internal/ordered" +) + +// UndirectedMutator is an undirected graph builder that can remove edges. +type UndirectedMutator interface { + graph.UndirectedBuilder + graph.EdgeRemover +} + +// Duplication constructs a graph in the destination, dst, of order n. New nodes +// are created by duplicating an existing node and all its edges. Each new edge is +// deleted with probability delta. Additional edges are added between the new node +// and existing nodes with probability alpha/|V|. An exception to this addition +// rule is made for the parent node when sigma is not NaN; in this case an edge is +// created with probability sigma. With the exception of the sigma parameter, this +// corresponds to the completely correlated case in doi:10.1016/S0022-5193(03)00028-6. +// If src is not nil it is used as the random source, otherwise rand.Float64 is used. +func Duplication(dst UndirectedMutator, n int, delta, alpha, sigma float64, src rand.Source) error { + // As described in doi:10.1016/S0022-5193(03)00028-6 but + // also clarified in doi:10.1186/gb-2007-8-4-r51. + + if delta < 0 || delta > 1 { + return fmt.Errorf("gen: bad delta: delta=%v", delta) + } + if alpha <= 0 || alpha > 1 { + return fmt.Errorf("gen: bad alpha: alpha=%v", alpha) + } + if sigma < 0 || sigma > 1 { + return fmt.Errorf("gen: bad sigma: sigma=%v", sigma) + } + + var ( + rnd func() float64 + rndN func(int) int + ) + if src == nil { + rnd = rand.Float64 + rndN = rand.Intn + } else { + r := rand.New(src) + rnd = r.Float64 + rndN = r.Intn + } + + nodes := graph.NodesOf(dst.Nodes()) + sort.Sort(ordered.ByID(nodes)) + if len(nodes) == 0 { + n-- + u := dst.NewNode() + dst.AddNode(u) + nodes = append(nodes, u) + } + for i := 0; i < n; i++ { + u := nodes[rndN(len(nodes))] + d := dst.NewNode() + did := d.ID() + + // Add the duplicate node. + dst.AddNode(d) + + // Loop until we have connectivity + // into the rest of the graph. + for { + // Add edges to parent's neighbours. + to := graph.NodesOf(dst.From(u.ID())) + sort.Sort(ordered.ByID(to)) + for _, v := range to { + vid := v.ID() + if rnd() < delta || dst.HasEdgeBetween(vid, did) { + continue + } + if vid < did { + dst.SetEdge(dst.NewEdge(v, d)) + } else { + dst.SetEdge(dst.NewEdge(d, v)) + } + } + + // Add edges to old nodes. + scaledAlpha := alpha / float64(len(nodes)) + for _, v := range nodes { + uid := u.ID() + vid := v.ID() + switch vid { + case uid: + if !math.IsNaN(sigma) { + if i == 0 || rnd() < sigma { + if vid < did { + dst.SetEdge(dst.NewEdge(v, d)) + } else { + dst.SetEdge(dst.NewEdge(d, v)) + } + } + continue + } + fallthrough + default: + if rnd() < scaledAlpha && !dst.HasEdgeBetween(vid, did) { + if vid < did { + dst.SetEdge(dst.NewEdge(v, d)) + } else { + dst.SetEdge(dst.NewEdge(d, v)) + } + } + } + } + + if dst.From(did).Len() != 0 { + break + } + } + + nodes = append(nodes, d) + } + + return nil +} diff --git a/vendor/gonum.org/v1/gonum/graph/graphs/gen/gen.go b/vendor/gonum.org/v1/gonum/graph/graphs/gen/gen.go new file mode 100644 index 0000000..e632b8f --- /dev/null +++ b/vendor/gonum.org/v1/gonum/graph/graphs/gen/gen.go @@ -0,0 +1,20 @@ +// Copyright ©2015 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package gen + +import "gonum.org/v1/gonum/graph" + +// GraphBuilder is a graph that can have nodes and edges added. +type GraphBuilder interface { + HasEdgeBetween(xid, yid int64) bool + graph.Builder +} + +func abs(a int) int { + if a < 0 { + return -a + } + return a +} diff --git a/vendor/gonum.org/v1/gonum/graph/graphs/gen/holme_kim.go b/vendor/gonum.org/v1/gonum/graph/graphs/gen/holme_kim.go new file mode 100644 index 0000000..8f49995 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/graph/graphs/gen/holme_kim.go @@ -0,0 +1,170 @@ +// Copyright ©2015 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package gen + +import ( + "errors" + "fmt" + + "golang.org/x/exp/rand" + + "gonum.org/v1/gonum/graph" + "gonum.org/v1/gonum/graph/simple" + "gonum.org/v1/gonum/stat/sampleuv" +) + +// TunableClusteringScaleFree constructs a subgraph in the destination, dst, of order n. +// The graph is constructed successively starting from an m order graph with one node +// having degree m-1. At each iteration of graph addition, one node is added with m +// additional edges joining existing nodes with probability proportional to the nodes' +// degrees. The edges are formed as a triad with probability, p. +// If src is not nil it is used as the random source, otherwise rand.Float64 and +// rand.Intn are used for the random number generators. +// +// The algorithm is essentially as described in http://arxiv.org/abs/cond-mat/0110452. +func TunableClusteringScaleFree(dst graph.UndirectedBuilder, n, m int, p float64, src rand.Source) error { + if p < 0 || p > 1 { + return fmt.Errorf("gen: bad probability: p=%v", p) + } + if n <= m { + return fmt.Errorf("gen: n <= m: n=%v m=%d", n, m) + } + + var ( + rnd func() float64 + rndN func(int) int + ) + if src == nil { + rnd = rand.Float64 + rndN = rand.Intn + } else { + r := rand.New(src) + rnd = r.Float64 + rndN = r.Intn + } + + // Initial condition. + wt := make([]float64, n) + id := make([]int64, n) + for u := 0; u < m; u++ { + un := dst.NewNode() + dst.AddNode(un) + id[u] = un.ID() + // We need to give equal probability for + // adding the first generation of edges. + wt[u] = 1 + } + ws := sampleuv.NewWeighted(wt, src) + for i := range wt { + // These weights will organically grow + // after the first growth iteration. + wt[i] = 0 + } + + // Growth. + for v := m; v < n; v++ { + vn := dst.NewNode() + dst.AddNode(vn) + id[v] = vn.ID() + var u int + pa: + for i := 0; i < m; i++ { + // Triad formation. + if i != 0 && rnd() < p { + // TODO(kortschak): Decide whether the node + // order in this input to permute should be + // sorted first to allow repeatable runs. + for _, w := range permute(graph.NodesOf(dst.From(id[u])), rndN) { + wid := w.ID() + if wid == id[v] || dst.HasEdgeBetween(wid, id[v]) { + continue + } + + dst.SetEdge(dst.NewEdge(w, vn)) + wt[wid]++ + wt[v]++ + continue pa + } + } + + // Preferential attachment. + for { + var ok bool + u, ok = ws.Take() + if !ok { + return errors.New("gen: depleted distribution") + } + if u == v || dst.HasEdgeBetween(id[u], id[v]) { + continue + } + dst.SetEdge(dst.NewEdge(dst.Node(id[u]), vn)) + wt[u]++ + wt[v]++ + break + } + } + + ws.ReweightAll(wt) + } + + return nil +} + +func permute(n []graph.Node, rnd func(int) int) []graph.Node { + for i := range n[:len(n)-1] { + j := rnd(len(n)-i) + i + n[i], n[j] = n[j], n[i] + } + return n +} + +// PreferentialAttachment constructs a graph in the destination, dst, of order n. +// The graph is constructed successively starting from an m order graph with one +// node having degree m-1. At each iteration of graph addition, one node is added +// with m additional edges joining existing nodes with probability proportional +// to the nodes' degrees. If src is not nil it is used as the random source, +// otherwise rand.Float64 is used for the random number generator. +// +// The algorithm is essentially as described in http://arxiv.org/abs/cond-mat/0110452 +// after 10.1126/science.286.5439.509. +func PreferentialAttachment(dst graph.UndirectedBuilder, n, m int, src rand.Source) error { + if n <= m { + return fmt.Errorf("gen: n <= m: n=%v m=%d", n, m) + } + + // Initial condition. + wt := make([]float64, n) + for u := 0; u < m; u++ { + if dst.Node(int64(u)) == nil { + dst.AddNode(simple.Node(u)) + } + // We need to give equal probability for + // adding the first generation of edges. + wt[u] = 1 + } + ws := sampleuv.NewWeighted(wt, src) + for i := range wt { + // These weights will organically grow + // after the first growth iteration. + wt[i] = 0 + } + + // Growth. + for v := m; v < n; v++ { + for i := 0; i < m; i++ { + // Preferential attachment. + u, ok := ws.Take() + if !ok { + return errors.New("gen: depleted distribution") + } + dst.SetEdge(simple.Edge{F: simple.Node(u), T: simple.Node(v)}) + wt[u]++ + wt[v]++ + } + ws.ReweightAll(wt) + } + + return nil +} diff --git a/vendor/gonum.org/v1/gonum/graph/graphs/gen/small_world.go b/vendor/gonum.org/v1/gonum/graph/graphs/gen/small_world.go new file mode 100644 index 0000000..1846ef1 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/graph/graphs/gen/small_world.go @@ -0,0 +1,204 @@ +// Copyright ©2015 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package gen + +import ( + "errors" + "fmt" + "math" + + "golang.org/x/exp/rand" + + "gonum.org/v1/gonum/graph" + "gonum.org/v1/gonum/stat/sampleuv" +) + +// NavigableSmallWorld constructs an N-dimensional grid with guaranteed local connectivity +// and random long-range connectivity as a subgraph in the destination, dst. +// The dims parameters specifies the length of each of the N dimensions, p defines the +// Manhattan distance between local nodes, and q defines the number of out-going long-range +// connections from each node. Long-range connections are made with a probability +// proportional to |d(u,v)|^-r where d is the Manhattan distance between non-local nodes. +// +// The algorithm is essentially as described on p4 of http://www.cs.cornell.edu/home/kleinber/swn.pdf. +func NavigableSmallWorld(dst GraphBuilder, dims []int, p, q int, r float64, src rand.Source) (err error) { + if p < 1 { + return fmt.Errorf("gen: bad local distance: p=%v", p) + } + if q < 0 { + return fmt.Errorf("gen: bad distant link count: q=%v", q) + } + if r < 0 { + return fmt.Errorf("gen: bad decay constant: r=%v", r) + } + + n := 1 + for _, d := range dims { + n *= d + } + nodes := make([]graph.Node, n) + for i := range nodes { + u := dst.NewNode() + dst.AddNode(u) + nodes[i] = u + } + + hasEdge := dst.HasEdgeBetween + d, isDirected := dst.(graph.Directed) + if isDirected { + hasEdge = d.HasEdgeFromTo + } + + locality := make([]int, len(dims)) + for i := range locality { + locality[i] = p*2 + 1 + } + iterateOver(dims, func(u []int) { + un := nodes[idxFrom(u, dims)] + iterateOver(locality, func(delta []int) { + d := manhattanDelta(u, delta, dims, -p) + if d == 0 || d > p { + return + } + vn := nodes[idxFromDelta(u, delta, dims, -p)] + if un.ID() > vn.ID() { + un, vn = vn, un + } + if !hasEdge(un.ID(), vn.ID()) { + dst.SetEdge(dst.NewEdge(un, vn)) + } + if !isDirected { + return + } + un, vn = vn, un + if !hasEdge(un.ID(), vn.ID()) { + dst.SetEdge(dst.NewEdge(un, vn)) + } + }) + }) + + defer func() { + r := recover() + if r != nil { + if r != "depleted distribution" { + panic(r) + } + err = errors.New("depleted distribution") + } + }() + w := make([]float64, n) + ws := sampleuv.NewWeighted(w, src) + iterateOver(dims, func(u []int) { + un := nodes[idxFrom(u, dims)] + iterateOver(dims, func(v []int) { + d := manhattanBetween(u, v) + if d <= p { + return + } + w[idxFrom(v, dims)] = math.Pow(float64(d), -r) + }) + ws.ReweightAll(w) + for i := 0; i < q; i++ { + vidx, ok := ws.Take() + if !ok { + panic("depleted distribution") + } + vn := nodes[vidx] + if !isDirected && un.ID() > vn.ID() { + un, vn = vn, un + } + if !hasEdge(un.ID(), vn.ID()) { + dst.SetEdge(dst.NewEdge(un, vn)) + } + } + for i := range w { + w[i] = 0 + } + }) + + return nil +} + +// iterateOver performs an iteration over all dimensions of dims, calling fn +// for each state. The elements of state must not be mutated by fn. +func iterateOver(dims []int, fn func(state []int)) { + iterator(0, dims, make([]int, len(dims)), fn) +} + +func iterator(d int, dims, state []int, fn func(state []int)) { + if d >= len(dims) { + fn(state) + return + } + for i := 0; i < dims[d]; i++ { + state[d] = i + iterator(d+1, dims, state, fn) + } +} + +// manhattanBetween returns the Manhattan distance between a and b. +func manhattanBetween(a, b []int) int { + if len(a) != len(b) { + panic("gen: unexpected dimension") + } + var d int + for i, v := range a { + d += abs(v - b[i]) + } + return d +} + +// manhattanDelta returns the Manhattan norm of delta+translate. If a +// translated by delta+translate is out of the range given by dims, +// zero is returned. +func manhattanDelta(a, delta, dims []int, translate int) int { + if len(a) != len(dims) { + panic("gen: unexpected dimension") + } + if len(delta) != len(dims) { + panic("gen: unexpected dimension") + } + var d int + for i, v := range delta { + v += translate + t := a[i] + v + if t < 0 || t >= dims[i] { + return 0 + } + d += abs(v) + } + return d +} + +// idxFrom returns a node index for the slice n over the given dimensions. +func idxFrom(n, dims []int) int { + s := 1 + var id int + for d, m := range dims { + p := n[d] + if p < 0 || p >= m { + panic("gen: element out of range") + } + id += p * s + s *= m + } + return id +} + +// idxFromDelta returns a node index for the slice base plus the delta over the given +// dimensions and applying the translation. +func idxFromDelta(base, delta, dims []int, translate int) int { + s := 1 + var id int + for d, m := range dims { + n := base[d] + delta[d] + translate + if n < 0 || n >= m { + panic("gen: element out of range") + } + id += n * s + s *= m + } + return id +} diff --git a/vendor/gonum.org/v1/gonum/graph/internal/linear/doc.go b/vendor/gonum.org/v1/gonum/graph/internal/linear/doc.go new file mode 100644 index 0000000..88d1cb8 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/graph/internal/linear/doc.go @@ -0,0 +1,6 @@ +// Copyright ©2017 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Package linear provides common linear data structures. +package linear // import "gonum.org/v1/gonum/graph/internal/linear" diff --git a/vendor/gonum.org/v1/gonum/graph/internal/linear/linear.go b/vendor/gonum.org/v1/gonum/graph/internal/linear/linear.go new file mode 100644 index 0000000..62e19db --- /dev/null +++ b/vendor/gonum.org/v1/gonum/graph/internal/linear/linear.go @@ -0,0 +1,73 @@ +// Copyright ©2015 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package linear + +import ( + "gonum.org/v1/gonum/graph" +) + +// NodeStack implements a LIFO stack of graph.Node. +type NodeStack []graph.Node + +// Len returns the number of graph.Nodes on the stack. +func (s *NodeStack) Len() int { return len(*s) } + +// Pop returns the last graph.Node on the stack and removes it +// from the stack. +func (s *NodeStack) Pop() graph.Node { + v := *s + v, n := v[:len(v)-1], v[len(v)-1] + *s = v + return n +} + +// Push adds the node n to the stack at the last position. +func (s *NodeStack) Push(n graph.Node) { *s = append(*s, n) } + +// NodeQueue implements a FIFO queue. +type NodeQueue struct { + head int + data []graph.Node +} + +// Len returns the number of graph.Nodes in the queue. +func (q *NodeQueue) Len() int { return len(q.data) - q.head } + +// Enqueue adds the node n to the back of the queue. +func (q *NodeQueue) Enqueue(n graph.Node) { + if len(q.data) == cap(q.data) && q.head > 0 { + l := q.Len() + copy(q.data, q.data[q.head:]) + q.head = 0 + q.data = append(q.data[:l], n) + } else { + q.data = append(q.data, n) + } +} + +// Dequeue returns the graph.Node at the front of the queue and +// removes it from the queue. +func (q *NodeQueue) Dequeue() graph.Node { + if q.Len() == 0 { + panic("queue: empty queue") + } + + var n graph.Node + n, q.data[q.head] = q.data[q.head], nil + q.head++ + + if q.Len() == 0 { + q.head = 0 + q.data = q.data[:0] + } + + return n +} + +// Reset clears the queue for reuse. +func (q *NodeQueue) Reset() { + q.head = 0 + q.data = q.data[:0] +} diff --git a/vendor/gonum.org/v1/gonum/graph/internal/ordered/doc.go b/vendor/gonum.org/v1/gonum/graph/internal/ordered/doc.go new file mode 100644 index 0000000..563df6f --- /dev/null +++ b/vendor/gonum.org/v1/gonum/graph/internal/ordered/doc.go @@ -0,0 +1,6 @@ +// Copyright ©2017 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Package ordered provides common sort ordering types. +package ordered // import "gonum.org/v1/gonum/graph/internal/ordered" diff --git a/vendor/gonum.org/v1/gonum/graph/internal/ordered/sort.go b/vendor/gonum.org/v1/gonum/graph/internal/ordered/sort.go new file mode 100644 index 0000000..a7250d1 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/graph/internal/ordered/sort.go @@ -0,0 +1,93 @@ +// Copyright ©2015 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package ordered + +import "gonum.org/v1/gonum/graph" + +// ByID implements the sort.Interface sorting a slice of graph.Node +// by ID. +type ByID []graph.Node + +func (n ByID) Len() int { return len(n) } +func (n ByID) Less(i, j int) bool { return n[i].ID() < n[j].ID() } +func (n ByID) Swap(i, j int) { n[i], n[j] = n[j], n[i] } + +// BySliceValues implements the sort.Interface sorting a slice of +// []int64 lexically by the values of the []int64. +type BySliceValues [][]int64 + +func (c BySliceValues) Len() int { return len(c) } +func (c BySliceValues) Less(i, j int) bool { + a, b := c[i], c[j] + l := len(a) + if len(b) < l { + l = len(b) + } + for k, v := range a[:l] { + if v < b[k] { + return true + } + if v > b[k] { + return false + } + } + return len(a) < len(b) +} +func (c BySliceValues) Swap(i, j int) { c[i], c[j] = c[j], c[i] } + +// BySliceIDs implements the sort.Interface sorting a slice of +// []graph.Node lexically by the IDs of the []graph.Node. +type BySliceIDs [][]graph.Node + +func (c BySliceIDs) Len() int { return len(c) } +func (c BySliceIDs) Less(i, j int) bool { + a, b := c[i], c[j] + l := len(a) + if len(b) < l { + l = len(b) + } + for k, v := range a[:l] { + if v.ID() < b[k].ID() { + return true + } + if v.ID() > b[k].ID() { + return false + } + } + return len(a) < len(b) +} +func (c BySliceIDs) Swap(i, j int) { c[i], c[j] = c[j], c[i] } + +// Int64s implements the sort.Interface sorting a slice of +// int64. +type Int64s []int64 + +func (s Int64s) Len() int { return len(s) } +func (s Int64s) Less(i, j int) bool { return s[i] < s[j] } +func (s Int64s) Swap(i, j int) { s[i], s[j] = s[j], s[i] } + +// Reverse reverses the order of nodes. +func Reverse(nodes []graph.Node) { + for i, j := 0, len(nodes)-1; i < j; i, j = i+1, j-1 { + nodes[i], nodes[j] = nodes[j], nodes[i] + } +} + +// LinesByIDs implements the sort.Interface sorting a slice of graph.LinesByIDs +// lexically by the From IDs, then by the To IDs, finally by the Line IDs. +type LinesByIDs []graph.Line + +func (n LinesByIDs) Len() int { return len(n) } +func (n LinesByIDs) Less(i, j int) bool { + a, b := n[i], n[j] + if a.From().ID() != b.From().ID() { + return a.From().ID() < b.From().ID() + } + if a.To().ID() != b.To().ID() { + return a.To().ID() < b.To().ID() + } + return n[i].ID() < n[j].ID() +} +func (n LinesByIDs) Swap(i, j int) { n[i], n[j] = n[j], n[i] } diff --git a/vendor/gonum.org/v1/gonum/graph/internal/set/doc.go b/vendor/gonum.org/v1/gonum/graph/internal/set/doc.go new file mode 100644 index 0000000..86f2afc --- /dev/null +++ b/vendor/gonum.org/v1/gonum/graph/internal/set/doc.go @@ -0,0 +1,6 @@ +// Copyright ©2017 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Package set provides integer and graph.Node sets. +package set // import "gonum.org/v1/gonum/graph/internal/set" diff --git a/vendor/gonum.org/v1/gonum/graph/internal/set/same.go b/vendor/gonum.org/v1/gonum/graph/internal/set/same.go new file mode 100644 index 0000000..f95a4e1 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/graph/internal/set/same.go @@ -0,0 +1,36 @@ +// Copyright ©2014 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// +build !appengine,!safe + +package set + +import "unsafe" + +// same determines whether two sets are backed by the same store. In the +// current implementation using hash maps it makes use of the fact that +// hash maps are passed as a pointer to a runtime Hmap struct. A map is +// not seen by the runtime as a pointer though, so we use unsafe to get +// the maps' pointer values to compare. +func same(a, b Nodes) bool { + return *(*uintptr)(unsafe.Pointer(&a)) == *(*uintptr)(unsafe.Pointer(&b)) +} + +// intsSame determines whether two sets are backed by the same store. In the +// current implementation using hash maps it makes use of the fact that +// hash maps are passed as a pointer to a runtime Hmap struct. A map is +// not seen by the runtime as a pointer though, so we use unsafe to get +// the maps' pointer values to compare. +func intsSame(a, b Ints) bool { + return *(*uintptr)(unsafe.Pointer(&a)) == *(*uintptr)(unsafe.Pointer(&b)) +} + +// int64sSame determines whether two sets are backed by the same store. In the +// current implementation using hash maps it makes use of the fact that +// hash maps are passed as a pointer to a runtime Hmap struct. A map is +// not seen by the runtime as a pointer though, so we use unsafe to get +// the maps' pointer values to compare. +func int64sSame(a, b Int64s) bool { + return *(*uintptr)(unsafe.Pointer(&a)) == *(*uintptr)(unsafe.Pointer(&b)) +} diff --git a/vendor/gonum.org/v1/gonum/graph/internal/set/same_appengine.go b/vendor/gonum.org/v1/gonum/graph/internal/set/same_appengine.go new file mode 100644 index 0000000..4ff4f4e --- /dev/null +++ b/vendor/gonum.org/v1/gonum/graph/internal/set/same_appengine.go @@ -0,0 +1,36 @@ +// Copyright ©2014 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// +build appengine safe + +package set + +import "reflect" + +// same determines whether two sets are backed by the same store. In the +// current implementation using hash maps it makes use of the fact that +// hash maps are passed as a pointer to a runtime Hmap struct. A map is +// not seen by the runtime as a pointer though, so we use reflect to get +// the maps' pointer values to compare. +func same(a, b Nodes) bool { + return reflect.ValueOf(a).Pointer() == reflect.ValueOf(b).Pointer() +} + +// intsSame determines whether two sets are backed by the same store. In the +// current implementation using hash maps it makes use of the fact that +// hash maps are passed as a pointer to a runtime Hmap struct. A map is +// not seen by the runtime as a pointer though, so we use reflect to get +// the maps' pointer values to compare. +func intsSame(a, b Ints) bool { + return reflect.ValueOf(a).Pointer() == reflect.ValueOf(b).Pointer() +} + +// int64sSame determines whether two sets are backed by the same store. In the +// current implementation using hash maps it makes use of the fact that +// hash maps are passed as a pointer to a runtime Hmap struct. A map is +// not seen by the runtime as a pointer though, so we use reflect to get +// the maps' pointer values to compare. +func int64sSame(a, b Int64s) bool { + return reflect.ValueOf(a).Pointer() == reflect.ValueOf(b).Pointer() +} diff --git a/vendor/gonum.org/v1/gonum/graph/internal/set/set.go b/vendor/gonum.org/v1/gonum/graph/internal/set/set.go new file mode 100644 index 0000000..0506b8e --- /dev/null +++ b/vendor/gonum.org/v1/gonum/graph/internal/set/set.go @@ -0,0 +1,228 @@ +// Copyright ©2014 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package set + +import "gonum.org/v1/gonum/graph" + +// Ints is a set of int identifiers. +type Ints map[int]struct{} + +// The simple accessor methods for Ints are provided to allow ease of +// implementation change should the need arise. + +// Add inserts an element into the set. +func (s Ints) Add(e int) { + s[e] = struct{}{} +} + +// Has reports the existence of the element in the set. +func (s Ints) Has(e int) bool { + _, ok := s[e] + return ok +} + +// Remove deletes the specified element from the set. +func (s Ints) Remove(e int) { + delete(s, e) +} + +// Count reports the number of elements stored in the set. +func (s Ints) Count() int { + return len(s) +} + +// IntsEqual reports set equality between the parameters. Sets are equal if +// and only if they have the same elements. +func IntsEqual(a, b Ints) bool { + if intsSame(a, b) { + return true + } + + if len(a) != len(b) { + return false + } + + for e := range a { + if _, ok := b[e]; !ok { + return false + } + } + + return true +} + +// Int64s is a set of int64 identifiers. +type Int64s map[int64]struct{} + +// The simple accessor methods for Ints are provided to allow ease of +// implementation change should the need arise. + +// Add inserts an element into the set. +func (s Int64s) Add(e int64) { + s[e] = struct{}{} +} + +// Has reports the existence of the element in the set. +func (s Int64s) Has(e int64) bool { + _, ok := s[e] + return ok +} + +// Remove deletes the specified element from the set. +func (s Int64s) Remove(e int64) { + delete(s, e) +} + +// Count reports the number of elements stored in the set. +func (s Int64s) Count() int { + return len(s) +} + +// Int64sEqual reports set equality between the parameters. Sets are equal if +// and only if they have the same elements. +func Int64sEqual(a, b Int64s) bool { + if int64sSame(a, b) { + return true + } + + if len(a) != len(b) { + return false + } + + for e := range a { + if _, ok := b[e]; !ok { + return false + } + } + + return true +} + +// Nodes is a set of nodes keyed in their integer identifiers. +type Nodes map[int64]graph.Node + +// NewNodes returns a new Nodes. +func NewNodes() Nodes { + return make(Nodes) +} + +// NewNodes returns a new Nodes with the given size hint, n. +func NewNodesSize(n int) Nodes { + return make(Nodes, n) +} + +// The simple accessor methods for Nodes are provided to allow ease of +// implementation change should the need arise. + +// Add inserts an element into the set. +func (s Nodes) Add(n graph.Node) { + s[n.ID()] = n +} + +// Remove deletes the specified element from the set. +func (s Nodes) Remove(e graph.Node) { + delete(s, e.ID()) +} + +// Count returns the number of element in the set. +func (s Nodes) Count() int { + return len(s) +} + +// Has reports the existence of the elements in the set. +func (s Nodes) Has(n graph.Node) bool { + _, ok := s[n.ID()] + return ok +} + +// CloneNodes returns a clone of src. +func CloneNodes(src Nodes) Nodes { + dst := make(Nodes, len(src)) + for e, n := range src { + dst[e] = n + } + return dst +} + +// Equal reports set equality between the parameters. Sets are equal if +// and only if they have the same elements. +func Equal(a, b Nodes) bool { + if same(a, b) { + return true + } + + if len(a) != len(b) { + return false + } + + for e := range a { + if _, ok := b[e]; !ok { + return false + } + } + + return true +} + +// UnionOfNodes returns the union of a and b. +// +// The union of two sets, a and b, is the set containing all the +// elements of each, for instance: +// +// {a,b,c} UNION {d,e,f} = {a,b,c,d,e,f} +// +// Since sets may not have repetition, unions of two sets that overlap +// do not contain repeat elements, that is: +// +// {a,b,c} UNION {b,c,d} = {a,b,c,d} +// +func UnionOfNodes(a, b Nodes) Nodes { + if same(a, b) { + return CloneNodes(a) + } + + dst := make(Nodes) + for e, n := range a { + dst[e] = n + } + for e, n := range b { + dst[e] = n + } + + return dst +} + +// IntersectionOfNodes returns the intersection of a and b. +// +// The intersection of two sets, a and b, is the set containing all +// the elements shared between the two sets, for instance: +// +// {a,b,c} INTERSECT {b,c,d} = {b,c} +// +// The intersection between a set and itself is itself, and thus +// effectively a copy operation: +// +// {a,b,c} INTERSECT {a,b,c} = {a,b,c} +// +// The intersection between two sets that share no elements is the empty +// set: +// +// {a,b,c} INTERSECT {d,e,f} = {} +// +func IntersectionOfNodes(a, b Nodes) Nodes { + if same(a, b) { + return CloneNodes(a) + } + dst := make(Nodes) + if len(a) > len(b) { + a, b = b, a + } + for e, n := range a { + if _, ok := b[e]; ok { + dst[e] = n + } + } + return dst +} diff --git a/vendor/gonum.org/v1/gonum/graph/internal/uid/uid.go b/vendor/gonum.org/v1/gonum/graph/internal/uid/uid.go new file mode 100644 index 0000000..5f503c1 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/graph/internal/uid/uid.go @@ -0,0 +1,54 @@ +// Copyright ©2014 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Package uid implements unique ID provision for graphs. +package uid + +import "gonum.org/v1/gonum/graph/internal/set" + +// Max is the maximum value of int64. +const Max = int64(^uint64(0) >> 1) + +// Set implements available ID storage. +type Set struct { + maxID int64 + used, free set.Int64s +} + +// NewSet returns a new Set. The returned value should not be passed except by pointer. +func NewSet() Set { + return Set{maxID: -1, used: make(set.Int64s), free: make(set.Int64s)} +} + +// NewID returns a new unique ID. The ID returned is not considered used +// until passed in a call to use. +func (s *Set) NewID() int64 { + for id := range s.free { + return id + } + if s.maxID != Max { + return s.maxID + 1 + } + for id := int64(0); id <= s.maxID+1; id++ { + if !s.used.Has(id) { + return id + } + } + panic("unreachable") +} + +// Use adds the id to the used IDs in the Set. +func (s *Set) Use(id int64) { + s.used.Add(id) + s.free.Remove(id) + if id > s.maxID { + s.maxID = id + } +} + +// Release frees the id for reuse. +func (s *Set) Release(id int64) { + s.free.Add(id) + s.used.Remove(id) +} diff --git a/vendor/gonum.org/v1/gonum/graph/iterator/doc.go b/vendor/gonum.org/v1/gonum/graph/iterator/doc.go new file mode 100644 index 0000000..0983bc7 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/graph/iterator/doc.go @@ -0,0 +1,9 @@ +// Copyright ©2018 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Package iterator provides node, edge and line iterators. +// +// The iterators provided satisfy the graph.Nodes, graph.Edges and +// graph.Lines interfaces. +package iterator diff --git a/vendor/gonum.org/v1/gonum/graph/iterator/edges.go b/vendor/gonum.org/v1/gonum/graph/iterator/edges.go new file mode 100644 index 0000000..21ef043 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/graph/iterator/edges.go @@ -0,0 +1,131 @@ +// Copyright ©2018 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package iterator + +import "gonum.org/v1/gonum/graph" + +// OrderedEdges implements the graph.Edges and graph.EdgeSlicer interfaces. +// The iteration order of OrderedEdges is the order of edges passed to +// NewEdgeIterator. +type OrderedEdges struct { + idx int + edges []graph.Edge +} + +// NewOrderedEdges returns an OrderedEdges initialized with the provided edges. +func NewOrderedEdges(edges []graph.Edge) *OrderedEdges { + return &OrderedEdges{idx: -1, edges: edges} +} + +// Len returns the remaining number of edges to be iterated over. +func (e *OrderedEdges) Len() int { + if e.idx >= len(e.edges) { + return 0 + } + if e.idx <= 0 { + return len(e.edges) + } + return len(e.edges[e.idx:]) +} + +// Next returns whether the next call of Edge will return a valid edge. +func (e *OrderedEdges) Next() bool { + if uint(e.idx)+1 < uint(len(e.edges)) { + e.idx++ + return true + } + e.idx = len(e.edges) + return false +} + +// Edge returns the current edge of the iterator. Next must have been +// called prior to a call to Edge. +func (e *OrderedEdges) Edge() graph.Edge { + if e.idx >= len(e.edges) || e.idx < 0 { + return nil + } + return e.edges[e.idx] +} + +// EdgeSlice returns all the remaining edges in the iterator and advances +// the iterator. +func (e *OrderedEdges) EdgeSlice() []graph.Edge { + if e.idx >= len(e.edges) { + return nil + } + idx := e.idx + if idx == -1 { + idx = 0 + } + e.idx = len(e.edges) + return e.edges[idx:] +} + +// Reset returns the iterator to its initial state. +func (e *OrderedEdges) Reset() { + e.idx = -1 +} + +// OrderedWeightedEdges implements the graph.Edges and graph.EdgeSlicer interfaces. +// The iteration order of OrderedWeightedEdges is the order of edges passed to +// NewEdgeIterator. +type OrderedWeightedEdges struct { + idx int + edges []graph.WeightedEdge +} + +// NewOrderedWeightedEdges returns an OrderedWeightedEdges initialized with the provided edges. +func NewOrderedWeightedEdges(edges []graph.WeightedEdge) *OrderedWeightedEdges { + return &OrderedWeightedEdges{idx: -1, edges: edges} +} + +// Len returns the remaining number of edges to be iterated over. +func (e *OrderedWeightedEdges) Len() int { + if e.idx >= len(e.edges) { + return 0 + } + if e.idx <= 0 { + return len(e.edges) + } + return len(e.edges[e.idx:]) +} + +// Next returns whether the next call of WeightedEdge will return a valid edge. +func (e *OrderedWeightedEdges) Next() bool { + if uint(e.idx)+1 < uint(len(e.edges)) { + e.idx++ + return true + } + e.idx = len(e.edges) + return false +} + +// WeightedEdge returns the current edge of the iterator. Next must have been +// called prior to a call to WeightedEdge. +func (e *OrderedWeightedEdges) WeightedEdge() graph.WeightedEdge { + if e.idx >= len(e.edges) || e.idx < 0 { + return nil + } + return e.edges[e.idx] +} + +// WeightedEdgeSlice returns all the remaining edges in the iterator and advances +// the iterator. +func (e *OrderedWeightedEdges) WeightedEdgeSlice() []graph.WeightedEdge { + if e.idx >= len(e.edges) { + return nil + } + idx := e.idx + if idx == -1 { + idx = 0 + } + e.idx = len(e.edges) + return e.edges[idx:] +} + +// Reset returns the iterator to its initial state. +func (e *OrderedWeightedEdges) Reset() { + e.idx = -1 +} diff --git a/vendor/gonum.org/v1/gonum/graph/iterator/lines.go b/vendor/gonum.org/v1/gonum/graph/iterator/lines.go new file mode 100644 index 0000000..ed655df --- /dev/null +++ b/vendor/gonum.org/v1/gonum/graph/iterator/lines.go @@ -0,0 +1,131 @@ +// Copyright ©2018 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package iterator + +import "gonum.org/v1/gonum/graph" + +// OrderedLines implements the graph.Lines and graph.LineSlicer interfaces. +// The iteration order of OrderedLines is the order of lines passed to +// NewLineIterator. +type OrderedLines struct { + idx int + lines []graph.Line +} + +// NewOrderedLines returns an OrderedLines initialized with the provided lines. +func NewOrderedLines(lines []graph.Line) *OrderedLines { + return &OrderedLines{idx: -1, lines: lines} +} + +// Len returns the remaining number of lines to be iterated over. +func (e *OrderedLines) Len() int { + if e.idx >= len(e.lines) { + return 0 + } + if e.idx <= 0 { + return len(e.lines) + } + return len(e.lines[e.idx:]) +} + +// Next returns whether the next call of Line will return a valid line. +func (e *OrderedLines) Next() bool { + if uint(e.idx)+1 < uint(len(e.lines)) { + e.idx++ + return true + } + e.idx = len(e.lines) + return false +} + +// Line returns the current line of the iterator. Next must have been +// called prior to a call to Line. +func (e *OrderedLines) Line() graph.Line { + if e.idx >= len(e.lines) || e.idx < 0 { + return nil + } + return e.lines[e.idx] +} + +// LineSlice returns all the remaining lines in the iterator and advances +// the iterator. +func (e *OrderedLines) LineSlice() []graph.Line { + if e.idx >= len(e.lines) { + return nil + } + idx := e.idx + if idx == -1 { + idx = 0 + } + e.idx = len(e.lines) + return e.lines[idx:] +} + +// Reset returns the iterator to its initial state. +func (e *OrderedLines) Reset() { + e.idx = -1 +} + +// OrderedWeightedLines implements the graph.Lines and graph.LineSlicer interfaces. +// The iteration order of OrderedWeightedLines is the order of lines passed to +// NewLineIterator. +type OrderedWeightedLines struct { + idx int + lines []graph.WeightedLine +} + +// NewWeightedLineIterator returns an OrderedWeightedLines initialized with the provided lines. +func NewOrderedWeightedLines(lines []graph.WeightedLine) *OrderedWeightedLines { + return &OrderedWeightedLines{idx: -1, lines: lines} +} + +// Len returns the remaining number of lines to be iterated over. +func (e *OrderedWeightedLines) Len() int { + if e.idx >= len(e.lines) { + return 0 + } + if e.idx <= 0 { + return len(e.lines) + } + return len(e.lines[e.idx:]) +} + +// Next returns whether the next call of WeightedLine will return a valid line. +func (e *OrderedWeightedLines) Next() bool { + if uint(e.idx)+1 < uint(len(e.lines)) { + e.idx++ + return true + } + e.idx = len(e.lines) + return false +} + +// WeightedLine returns the current line of the iterator. Next must have been +// called prior to a call to WeightedLine. +func (e *OrderedWeightedLines) WeightedLine() graph.WeightedLine { + if e.idx >= len(e.lines) || e.idx < 0 { + return nil + } + return e.lines[e.idx] +} + +// WeightedLineSlice returns all the remaining lines in the iterator and advances +// the iterator. +func (e *OrderedWeightedLines) WeightedLineSlice() []graph.WeightedLine { + if e.idx >= len(e.lines) { + return nil + } + idx := e.idx + if idx == -1 { + idx = 0 + } + e.idx = len(e.lines) + return e.lines[idx:] +} + +// Reset returns the iterator to its initial state. +func (e *OrderedWeightedLines) Reset() { + e.idx = -1 +} diff --git a/vendor/gonum.org/v1/gonum/graph/iterator/nodes.go b/vendor/gonum.org/v1/gonum/graph/iterator/nodes.go new file mode 100644 index 0000000..952dd77 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/graph/iterator/nodes.go @@ -0,0 +1,125 @@ +// Copyright ©2018 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package iterator + +import "gonum.org/v1/gonum/graph" + +// OrderedNodes implements the graph.Nodes and graph.NodeSlicer interfaces. +// The iteration order of OrderedNodes is the order of nodes passed to +// NewNodeIterator. +type OrderedNodes struct { + idx int + nodes []graph.Node +} + +// NewOrderedNodes returns a OrderedNodes initialized with the provided nodes. +func NewOrderedNodes(nodes []graph.Node) *OrderedNodes { + return &OrderedNodes{idx: -1, nodes: nodes} +} + +// Len returns the remaining number of nodes to be iterated over. +func (n *OrderedNodes) Len() int { + if n.idx >= len(n.nodes) { + return 0 + } + if n.idx <= 0 { + return len(n.nodes) + } + return len(n.nodes[n.idx:]) +} + +// Next returns whether the next call of Node will return a valid node. +func (n *OrderedNodes) Next() bool { + if uint(n.idx)+1 < uint(len(n.nodes)) { + n.idx++ + return true + } + n.idx = len(n.nodes) + return false +} + +// Node returns the current node of the iterator. Next must have been +// called prior to a call to Node. +func (n *OrderedNodes) Node() graph.Node { + if n.idx >= len(n.nodes) || n.idx < 0 { + return nil + } + return n.nodes[n.idx] +} + +// NodeSlice returns all the remaining nodes in the iterator and advances +// the iterator. +func (n *OrderedNodes) NodeSlice() []graph.Node { + if n.idx >= len(n.nodes) { + return nil + } + idx := n.idx + if idx == -1 { + idx = 0 + } + n.idx = len(n.nodes) + return n.nodes[idx:] +} + +// Reset returns the iterator to its initial state. +func (n *OrderedNodes) Reset() { + n.idx = -1 +} + +// ImplicitNodes implements the graph.Nodes interface for a set of nodes over +// a contiguous ID range. +type ImplicitNodes struct { + beg, end int + curr int + newNode func(id int) graph.Node +} + +// NewImplicitNodes returns a new implicit node iterator spanning nodes in [beg,end). +// The provided new func maps the id to a graph.Node. NewImplicitNodes will panic +// if beg is greater than end. +func NewImplicitNodes(beg, end int, new func(id int) graph.Node) *ImplicitNodes { + if beg > end { + panic("iterator: invalid range") + } + return &ImplicitNodes{beg: beg, end: end, curr: beg - 1, newNode: new} +} + +// Len returns the remaining number of nodes to be iterated over. +func (n *ImplicitNodes) Len() int { + return n.end - n.curr - 1 +} + +// Next returns whether the next call of Node will return a valid node. +func (n *ImplicitNodes) Next() bool { + if n.curr == n.end { + return false + } + n.curr++ + return n.curr < n.end +} + +// Node returns the current node of the iterator. Next must have been +// called prior to a call to Node. +func (n *ImplicitNodes) Node() graph.Node { + if n.Len() == -1 || n.curr < n.beg { + return nil + } + return n.newNode(n.curr) +} + +// Reset returns the iterator to its initial state. +func (n *ImplicitNodes) Reset() { + n.curr = n.beg - 1 +} + +// NodeSlice returns all the remaining nodes in the iterator and advances +// the iterator. +func (n *ImplicitNodes) NodeSlice() []graph.Node { + nodes := make([]graph.Node, 0, n.Len()) + for n.curr++; n.curr < n.end; n.curr++ { + nodes = append(nodes, n.newNode(n.curr)) + } + return nodes +} diff --git a/vendor/gonum.org/v1/gonum/graph/multi/directed.go b/vendor/gonum.org/v1/gonum/graph/multi/directed.go new file mode 100644 index 0000000..c63deb2 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/graph/multi/directed.go @@ -0,0 +1,278 @@ +// Copyright ©2014 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package multi + +import ( + "fmt" + + "gonum.org/v1/gonum/graph" + "gonum.org/v1/gonum/graph/internal/uid" + "gonum.org/v1/gonum/graph/iterator" +) + +var ( + dg *DirectedGraph + + _ graph.Graph = dg + _ graph.Directed = dg + _ graph.Multigraph = dg + _ graph.DirectedMultigraph = dg + _ graph.NodeAdder = dg + _ graph.NodeRemover = dg + _ graph.LineAdder = dg + _ graph.LineRemover = dg +) + +// DirectedGraph implements a generalized directed graph. +type DirectedGraph struct { + nodes map[int64]graph.Node + from map[int64]map[int64]map[int64]graph.Line + to map[int64]map[int64]map[int64]graph.Line + + nodeIDs uid.Set + lineIDs uid.Set +} + +// NewDirectedGraph returns a DirectedGraph. +func NewDirectedGraph() *DirectedGraph { + return &DirectedGraph{ + nodes: make(map[int64]graph.Node), + from: make(map[int64]map[int64]map[int64]graph.Line), + to: make(map[int64]map[int64]map[int64]graph.Line), + + nodeIDs: uid.NewSet(), + lineIDs: uid.NewSet(), + } +} + +// AddNode adds n to the graph. It panics if the added node ID matches an existing node ID. +func (g *DirectedGraph) AddNode(n graph.Node) { + if _, exists := g.nodes[n.ID()]; exists { + panic(fmt.Sprintf("simple: node ID collision: %d", n.ID())) + } + g.nodes[n.ID()] = n + g.from[n.ID()] = make(map[int64]map[int64]graph.Line) + g.to[n.ID()] = make(map[int64]map[int64]graph.Line) + g.nodeIDs.Use(n.ID()) +} + +// Edge returns the edge from u to v if such an edge exists and nil otherwise. +// The node v must be directly reachable from u as defined by the From method. +// The returned graph.Edge is a multi.Edge if an edge exists. +func (g *DirectedGraph) Edge(uid, vid int64) graph.Edge { + l := g.Lines(uid, vid) + if l == nil { + return nil + } + return Edge{F: g.Node(uid), T: g.Node(vid), Lines: l} +} + +// Edges returns all the edges in the graph. Each edge in the returned slice +// is a multi.Edge. +func (g *DirectedGraph) Edges() graph.Edges { + if len(g.nodes) == 0 { + return graph.Empty + } + var edges []graph.Edge + for _, u := range g.nodes { + for _, e := range g.from[u.ID()] { + var lines []graph.Line + for _, l := range e { + lines = append(lines, l) + } + if len(lines) != 0 { + edges = append(edges, Edge{ + F: g.Node(u.ID()), + T: g.Node(lines[0].To().ID()), + Lines: iterator.NewOrderedLines(lines), + }) + } + } + } + if len(edges) == 0 { + return graph.Empty + } + return iterator.NewOrderedEdges(edges) +} + +// From returns all nodes in g that can be reached directly from n. +func (g *DirectedGraph) From(id int64) graph.Nodes { + if _, ok := g.from[id]; !ok { + return graph.Empty + } + + from := make([]graph.Node, len(g.from[id])) + i := 0 + for vid := range g.from[id] { + from[i] = g.nodes[vid] + i++ + } + if len(from) == 0 { + return graph.Empty + } + return iterator.NewOrderedNodes(from) +} + +// HasEdgeBetween returns whether an edge exists between nodes x and y without +// considering direction. +func (g *DirectedGraph) HasEdgeBetween(xid, yid int64) bool { + if _, ok := g.from[xid][yid]; ok { + return true + } + _, ok := g.from[yid][xid] + return ok +} + +// HasEdgeFromTo returns whether an edge exists in the graph from u to v. +func (g *DirectedGraph) HasEdgeFromTo(uid, vid int64) bool { + _, ok := g.from[uid][vid] + return ok +} + +// Lines returns the lines from u to v if such any such lines exists and nil otherwise. +// The node v must be directly reachable from u as defined by the From method. +func (g *DirectedGraph) Lines(uid, vid int64) graph.Lines { + edge := g.from[uid][vid] + if len(edge) == 0 { + return graph.Empty + } + var lines []graph.Line + for _, l := range edge { + lines = append(lines, l) + } + return iterator.NewOrderedLines(lines) +} + +// NewLine returns a new Line from the source to the destination node. +// The returned Line will have a graph-unique ID. +// The Line's ID does not become valid in g until the Line is added to g. +func (g *DirectedGraph) NewLine(from, to graph.Node) graph.Line { + return &Line{F: from, T: to, UID: g.lineIDs.NewID()} +} + +// NewNode returns a new unique Node to be added to g. The Node's ID does +// not become valid in g until the Node is added to g. +func (g *DirectedGraph) NewNode() graph.Node { + if len(g.nodes) == 0 { + return Node(0) + } + if int64(len(g.nodes)) == uid.Max { + panic("simple: cannot allocate node: no slot") + } + return Node(g.nodeIDs.NewID()) +} + +// Node returns the node with the given ID if it exists in the graph, +// and nil otherwise. +func (g *DirectedGraph) Node(id int64) graph.Node { + return g.nodes[id] +} + +// Nodes returns all the nodes in the graph. +func (g *DirectedGraph) Nodes() graph.Nodes { + if len(g.nodes) == 0 { + return graph.Empty + } + nodes := make([]graph.Node, len(g.nodes)) + i := 0 + for _, n := range g.nodes { + nodes[i] = n + i++ + } + return iterator.NewOrderedNodes(nodes) +} + +// RemoveLine removes the line with the given end point and line IDs from the graph, leaving +// the terminal nodes. If the line does not exist it is a no-op. +func (g *DirectedGraph) RemoveLine(fid, tid, id int64) { + if _, ok := g.nodes[fid]; !ok { + return + } + if _, ok := g.nodes[tid]; !ok { + return + } + + delete(g.from[fid][tid], id) + if len(g.from[fid][tid]) == 0 { + delete(g.from[fid], tid) + } + delete(g.to[tid][fid], id) + if len(g.to[tid][fid]) == 0 { + delete(g.to[tid], fid) + } + g.lineIDs.Release(id) +} + +// RemoveNode removes the node with the given ID from the graph, as well as any edges attached +// to it. If the node is not in the graph it is a no-op. +func (g *DirectedGraph) RemoveNode(id int64) { + if _, ok := g.nodes[id]; !ok { + return + } + delete(g.nodes, id) + + for from := range g.from[id] { + delete(g.to[from], id) + } + delete(g.from, id) + + for to := range g.to[id] { + delete(g.from[to], id) + } + delete(g.to, id) + + g.nodeIDs.Release(id) +} + +// SetLine adds l, a line from one node to another. If the nodes do not exist, they are added +// and are set to the nodes of the line otherwise. +func (g *DirectedGraph) SetLine(l graph.Line) { + var ( + from = l.From() + fid = from.ID() + to = l.To() + tid = to.ID() + lid = l.ID() + ) + + if _, ok := g.nodes[fid]; !ok { + g.AddNode(from) + } else { + g.nodes[fid] = from + } + if g.from[fid][tid] == nil { + g.from[fid][tid] = make(map[int64]graph.Line) + } + if _, ok := g.nodes[tid]; !ok { + g.AddNode(to) + } else { + g.nodes[tid] = to + } + if g.to[tid][fid] == nil { + g.to[tid][fid] = make(map[int64]graph.Line) + } + + g.from[fid][tid][lid] = l + g.to[tid][fid][lid] = l + g.lineIDs.Use(lid) +} + +// To returns all nodes in g that can reach directly to n. +func (g *DirectedGraph) To(id int64) graph.Nodes { + if _, ok := g.from[id]; !ok { + return graph.Empty + } + + to := make([]graph.Node, len(g.to[id])) + i := 0 + for uid := range g.to[id] { + to[i] = g.nodes[uid] + i++ + } + if len(to) == 0 { + return graph.Empty + } + return iterator.NewOrderedNodes(to) +} diff --git a/vendor/gonum.org/v1/gonum/graph/multi/doc.go b/vendor/gonum.org/v1/gonum/graph/multi/doc.go new file mode 100644 index 0000000..7a3c267 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/graph/multi/doc.go @@ -0,0 +1,9 @@ +// Copyright ©2017 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Package multi provides a suite of multigraph implementations satisfying +// the gonum/graph interfaces. +// +// All types in multi return the graph.Empty value for empty iterators. +package multi // import "gonum.org/v1/gonum/graph/multi" diff --git a/vendor/gonum.org/v1/gonum/graph/multi/multi.go b/vendor/gonum.org/v1/gonum/graph/multi/multi.go new file mode 100644 index 0000000..d87f2cd --- /dev/null +++ b/vendor/gonum.org/v1/gonum/graph/multi/multi.go @@ -0,0 +1,130 @@ +// Copyright ©2014 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package multi + +import "gonum.org/v1/gonum/graph" + +// Node here is a duplication of simple.Node +// to avoid needing to import both packages. + +// Node is a simple graph node. +type Node int64 + +// ID returns the ID number of the node. +func (n Node) ID() int64 { + return int64(n) +} + +// Edge is a collection of multigraph edges sharing end points. +type Edge struct { + F, T graph.Node + + graph.Lines +} + +// From returns the from-node of the edge. +func (e Edge) From() graph.Node { return e.F } + +// To returns the to-node of the edge. +func (e Edge) To() graph.Node { return e.T } + +// ReversedEdge returns a new Edge with the F and T fields +// swapped. +func (e Edge) ReversedEdge() graph.Edge { return Edge{F: e.T, T: e.F} } + +// Line is a multigraph edge. +type Line struct { + F, T graph.Node + + UID int64 +} + +// From returns the from-node of the line. +func (l Line) From() graph.Node { return l.F } + +// To returns the to-node of the line. +func (l Line) To() graph.Node { return l.T } + +// ReversedLine returns a new Line with the F and T fields +// swapped. The UID of the new Line is the same as the +// UID of the receiver. The Lines within the Edge are +// not altered. +func (l Line) ReversedLine() graph.Line { l.F, l.T = l.T, l.F; return l } + +// ID returns the ID of the line. +func (l Line) ID() int64 { return l.UID } + +// WeightedEdge is a collection of weighted multigraph edges sharing end points. +type WeightedEdge struct { + F, T graph.Node + + graph.WeightedLines + + // WeightFunc calculates the aggregate + // weight of the lines in Lines. If + // WeightFunc is nil, the sum of weights + // is used as the edge weight. + // The graph.WeightedLines can be expected + // to be positioned at the first line of + // the iterator on entry and must be + // Reset before exit. + // WeightFunc must accept a nil input. + WeightFunc func(graph.WeightedLines) float64 +} + +// From returns the from-node of the edge. +func (e WeightedEdge) From() graph.Node { return e.F } + +// To returns the to-node of the edge. +func (e WeightedEdge) To() graph.Node { return e.T } + +// ReversedEdge returns a new Edge with the F and T fields +// swapped. The Lines within the WeightedEdge are not +// altered. +func (e WeightedEdge) ReversedEdge() graph.Edge { e.F, e.T = e.T, e.F; return e } + +// Weight returns the weight of the edge. Weight uses WeightFunc +// field to calculate the weight, so the WeightedLines field is +// expected to be positioned at the first line and is reset before +// Weight returns. +func (e WeightedEdge) Weight() float64 { + if e.WeightFunc != nil { + return e.WeightFunc(e.WeightedLines) + } + if e.WeightedLines == nil { + return 0 + } + var w float64 + for e.Next() { + w += e.WeightedLine().Weight() + } + e.WeightedLines.Reset() + return w +} + +// WeightedLine is a weighted multigraph edge. +type WeightedLine struct { + F, T graph.Node + W float64 + + UID int64 +} + +// From returns the from-node of the line. +func (l WeightedLine) From() graph.Node { return l.F } + +// To returns the to-node of the line. +func (l WeightedLine) To() graph.Node { return l.T } + +// ReversedLine returns a new Line with the F and T fields +// swapped. The UID and W of the new Line are the same as the +// UID and W of the receiver. +func (l WeightedLine) ReversedLine() graph.Line { l.F, l.T = l.T, l.F; return l } + +// ID returns the ID of the line. +func (l WeightedLine) ID() int64 { return l.UID } + +// Weight returns the weight of the edge. +func (l WeightedLine) Weight() float64 { return l.W } diff --git a/vendor/gonum.org/v1/gonum/graph/multi/undirected.go b/vendor/gonum.org/v1/gonum/graph/multi/undirected.go new file mode 100644 index 0000000..fac8462 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/graph/multi/undirected.go @@ -0,0 +1,260 @@ +// Copyright ©2014 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package multi + +import ( + "fmt" + + "gonum.org/v1/gonum/graph" + "gonum.org/v1/gonum/graph/internal/uid" + "gonum.org/v1/gonum/graph/iterator" +) + +var ( + ug *UndirectedGraph + + _ graph.Graph = ug + _ graph.Undirected = ug + _ graph.Multigraph = ug + _ graph.UndirectedMultigraph = ug + _ graph.NodeAdder = ug + _ graph.NodeRemover = ug + _ graph.LineAdder = ug + _ graph.LineRemover = ug +) + +// UndirectedGraph implements a generalized undirected graph. +type UndirectedGraph struct { + nodes map[int64]graph.Node + lines map[int64]map[int64]map[int64]graph.Line + + nodeIDs uid.Set + lineIDs uid.Set +} + +// NewUndirectedGraph returns an UndirectedGraph. +func NewUndirectedGraph() *UndirectedGraph { + return &UndirectedGraph{ + nodes: make(map[int64]graph.Node), + lines: make(map[int64]map[int64]map[int64]graph.Line), + + nodeIDs: uid.NewSet(), + lineIDs: uid.NewSet(), + } +} + +// AddNode adds n to the graph. It panics if the added node ID matches an existing node ID. +func (g *UndirectedGraph) AddNode(n graph.Node) { + if _, exists := g.nodes[n.ID()]; exists { + panic(fmt.Sprintf("simple: node ID collision: %d", n.ID())) + } + g.nodes[n.ID()] = n + g.lines[n.ID()] = make(map[int64]map[int64]graph.Line) + g.nodeIDs.Use(n.ID()) +} + +// Edge returns the edge from u to v if such an edge exists and nil otherwise. +// The node v must be directly reachable from u as defined by the From method. +// The returned graph.Edge is a multi.Edge if an edge exists. +func (g *UndirectedGraph) Edge(uid, vid int64) graph.Edge { + l := g.LinesBetween(uid, vid) + if l == nil { + return nil + } + return Edge{F: g.Node(uid), T: g.Node(vid), Lines: l} +} + +// EdgeBetween returns the edge between nodes x and y. +func (g *UndirectedGraph) EdgeBetween(xid, yid int64) graph.Edge { + return g.Edge(xid, yid) +} + +// Edges returns all the edges in the graph. Each edge in the returned slice +// is a multi.Edge. +func (g *UndirectedGraph) Edges() graph.Edges { + if len(g.lines) == 0 { + return graph.Empty + } + var edges []graph.Edge + seen := make(map[int64]struct{}) + for _, u := range g.lines { + for _, e := range u { + var lines []graph.Line + for _, l := range e { + lid := l.ID() + if _, ok := seen[lid]; ok { + continue + } + seen[lid] = struct{}{} + lines = append(lines, l) + } + if len(lines) != 0 { + edges = append(edges, Edge{ + F: g.Node(lines[0].From().ID()), + T: g.Node(lines[0].To().ID()), + Lines: iterator.NewOrderedLines(lines), + }) + } + } + } + if len(edges) == 0 { + return graph.Empty + } + return iterator.NewOrderedEdges(edges) +} + +// From returns all nodes in g that can be reached directly from n. +func (g *UndirectedGraph) From(id int64) graph.Nodes { + if _, ok := g.nodes[id]; !ok { + return graph.Empty + } + + nodes := make([]graph.Node, len(g.lines[id])) + i := 0 + for from := range g.lines[id] { + nodes[i] = g.nodes[from] + i++ + } + if len(nodes) == 0 { + return graph.Empty + } + return iterator.NewOrderedNodes(nodes) +} + +// HasEdgeBetween returns whether an edge exists between nodes x and y. +func (g *UndirectedGraph) HasEdgeBetween(xid, yid int64) bool { + _, ok := g.lines[xid][yid] + return ok +} + +// Lines returns the lines from u to v if such an edge exists and nil otherwise. +// The node v must be directly reachable from u as defined by the From method. +func (g *UndirectedGraph) Lines(uid, vid int64) graph.Lines { + return g.LinesBetween(uid, vid) +} + +// LinesBetween returns the lines between nodes x and y. +func (g *UndirectedGraph) LinesBetween(xid, yid int64) graph.Lines { + if !g.HasEdgeBetween(xid, yid) { + return graph.Empty + } + var lines []graph.Line + for _, l := range g.lines[xid][yid] { + if l.From().ID() != xid { + l = l.ReversedLine() + } + lines = append(lines, l) + } + return iterator.NewOrderedLines(lines) +} + +// NewLine returns a new Line from the source to the destination node. +// The returned Line will have a graph-unique ID. +// The Line's ID does not become valid in g until the Line is added to g. +func (g *UndirectedGraph) NewLine(from, to graph.Node) graph.Line { + return &Line{F: from, T: to, UID: g.lineIDs.NewID()} +} + +// NewNode returns a new unique Node to be added to g. The Node's ID does +// not become valid in g until the Node is added to g. +func (g *UndirectedGraph) NewNode() graph.Node { + if len(g.nodes) == 0 { + return Node(0) + } + if int64(len(g.nodes)) == uid.Max { + panic("simple: cannot allocate node: no slot") + } + return Node(g.nodeIDs.NewID()) +} + +// Node returns the node with the given ID if it exists in the graph, +// and nil otherwise. +func (g *UndirectedGraph) Node(id int64) graph.Node { + return g.nodes[id] +} + +// Nodes returns all the nodes in the graph. +func (g *UndirectedGraph) Nodes() graph.Nodes { + if len(g.nodes) == 0 { + return graph.Empty + } + nodes := make([]graph.Node, len(g.nodes)) + i := 0 + for _, n := range g.nodes { + nodes[i] = n + i++ + } + return iterator.NewOrderedNodes(nodes) +} + +// RemoveLine removes the line with the given end point and line Ids from the graph, leaving +// the terminal nodes. If the line does not exist it is a no-op. +func (g *UndirectedGraph) RemoveLine(fid, tid, id int64) { + if _, ok := g.nodes[fid]; !ok { + return + } + if _, ok := g.nodes[tid]; !ok { + return + } + + delete(g.lines[fid][tid], id) + if len(g.lines[fid][tid]) == 0 { + delete(g.lines[fid], tid) + } + delete(g.lines[tid][fid], id) + if len(g.lines[tid][fid]) == 0 { + delete(g.lines[tid], fid) + } + g.lineIDs.Release(id) +} + +// RemoveNode removes the node with the given ID from the graph, as well as any edges attached +// to it. If the node is not in the graph it is a no-op. +func (g *UndirectedGraph) RemoveNode(id int64) { + if _, ok := g.nodes[id]; !ok { + return + } + delete(g.nodes, id) + + for from := range g.lines[id] { + delete(g.lines[from], id) + } + delete(g.lines, id) + + g.nodeIDs.Release(id) +} + +// SetLine adds l, a line from one node to another. If the nodes do not exist, they are added +// and are set to the nodes of the line otherwise. +func (g *UndirectedGraph) SetLine(l graph.Line) { + var ( + from = l.From() + fid = from.ID() + to = l.To() + tid = to.ID() + lid = l.ID() + ) + + if _, ok := g.nodes[fid]; !ok { + g.AddNode(from) + } else { + g.nodes[fid] = from + } + if g.lines[fid][tid] == nil { + g.lines[fid][tid] = make(map[int64]graph.Line) + } + if _, ok := g.nodes[tid]; !ok { + g.AddNode(to) + } else { + g.nodes[tid] = to + } + if g.lines[tid][fid] == nil { + g.lines[tid][fid] = make(map[int64]graph.Line) + } + + g.lines[fid][tid][lid] = l + g.lines[tid][fid][lid] = l + g.lineIDs.Use(lid) +} diff --git a/vendor/gonum.org/v1/gonum/graph/multi/weighted_directed.go b/vendor/gonum.org/v1/gonum/graph/multi/weighted_directed.go new file mode 100644 index 0000000..f856c01 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/graph/multi/weighted_directed.go @@ -0,0 +1,352 @@ +// Copyright ©2014 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package multi + +import ( + "fmt" + + "gonum.org/v1/gonum/graph" + "gonum.org/v1/gonum/graph/internal/uid" + "gonum.org/v1/gonum/graph/iterator" +) + +var ( + wdg *WeightedDirectedGraph + + _ graph.Graph = wdg + _ graph.Weighted = wdg + _ graph.Directed = wdg + _ graph.WeightedDirected = wdg + _ graph.Multigraph = wdg + _ graph.DirectedMultigraph = wdg + _ graph.WeightedDirectedMultigraph = wdg + _ graph.NodeAdder = wdg + _ graph.NodeRemover = wdg + _ graph.WeightedLineAdder = wdg + _ graph.LineRemover = wdg +) + +// WeightedDirectedGraph implements a generalized directed graph. +type WeightedDirectedGraph struct { + // EdgeWeightFunc is used to provide + // the WeightFunc function for WeightedEdge + // values returned by the graph. + // WeightFunc must accept a nil input. + EdgeWeightFunc func(graph.WeightedLines) float64 + + nodes map[int64]graph.Node + from map[int64]map[int64]map[int64]graph.WeightedLine + to map[int64]map[int64]map[int64]graph.WeightedLine + + nodeIDs uid.Set + lineIDs uid.Set +} + +// NewWeightedDirectedGraph returns a WeightedDirectedGraph. +func NewWeightedDirectedGraph() *WeightedDirectedGraph { + return &WeightedDirectedGraph{ + nodes: make(map[int64]graph.Node), + from: make(map[int64]map[int64]map[int64]graph.WeightedLine), + to: make(map[int64]map[int64]map[int64]graph.WeightedLine), + + nodeIDs: uid.NewSet(), + lineIDs: uid.NewSet(), + } +} + +// AddNode adds n to the graph. It panics if the added node ID matches an existing node ID. +func (g *WeightedDirectedGraph) AddNode(n graph.Node) { + if _, exists := g.nodes[n.ID()]; exists { + panic(fmt.Sprintf("simple: node ID collision: %d", n.ID())) + } + g.nodes[n.ID()] = n + g.from[n.ID()] = make(map[int64]map[int64]graph.WeightedLine) + g.to[n.ID()] = make(map[int64]map[int64]graph.WeightedLine) + g.nodeIDs.Use(n.ID()) +} + +// Edge returns the edge from u to v if such an edge exists and nil otherwise. +// The node v must be directly reachable from u as defined by the From method. +// The returned graph.Edge is a multi.WeightedEdge if an edge exists. +func (g *WeightedDirectedGraph) Edge(uid, vid int64) graph.Edge { + return g.WeightedEdge(uid, vid) +} + +// Edges returns all the edges in the graph. Each edge in the returned slice +// is a multi.WeightedEdge. +func (g *WeightedDirectedGraph) Edges() graph.Edges { + if len(g.nodes) == 0 { + return graph.Empty + } + var edges []graph.Edge + for _, u := range g.nodes { + for _, e := range g.from[u.ID()] { + var lines []graph.WeightedLine + for _, l := range e { + lines = append(lines, l) + } + if len(lines) != 0 { + edges = append(edges, WeightedEdge{ + F: g.Node(u.ID()), + T: g.Node(lines[0].To().ID()), + WeightedLines: iterator.NewOrderedWeightedLines(lines), + WeightFunc: g.EdgeWeightFunc, + }) + } + } + } + if len(edges) == 0 { + return graph.Empty + } + return iterator.NewOrderedEdges(edges) +} + +// From returns all nodes in g that can be reached directly from n. +func (g *WeightedDirectedGraph) From(id int64) graph.Nodes { + if _, ok := g.from[id]; !ok { + return graph.Empty + } + + from := make([]graph.Node, len(g.from[id])) + i := 0 + for vid := range g.from[id] { + from[i] = g.nodes[vid] + i++ + } + if len(from) == 0 { + return graph.Empty + } + return iterator.NewOrderedNodes(from) +} + +// HasEdgeBetween returns whether an edge exists between nodes x and y without +// considering direction. +func (g *WeightedDirectedGraph) HasEdgeBetween(xid, yid int64) bool { + if _, ok := g.from[xid][yid]; ok { + return true + } + _, ok := g.from[yid][xid] + return ok +} + +// HasEdgeFromTo returns whether an edge exists in the graph from u to v. +func (g *WeightedDirectedGraph) HasEdgeFromTo(uid, vid int64) bool { + if _, ok := g.from[uid][vid]; !ok { + return false + } + return true +} + +// Lines returns the lines from u to v if such any such lines exists and nil otherwise. +// The node v must be directly reachable from u as defined by the From method. +func (g *WeightedDirectedGraph) Lines(uid, vid int64) graph.Lines { + edge := g.from[uid][vid] + if len(edge) == 0 { + return graph.Empty + } + var lines []graph.Line + for _, l := range edge { + lines = append(lines, l) + } + return iterator.NewOrderedLines(lines) +} + +// NewNode returns a new unique Node to be added to g. The Node's ID does +// not become valid in g until the Node is added to g. +func (g *WeightedDirectedGraph) NewNode() graph.Node { + if len(g.nodes) == 0 { + return Node(0) + } + if int64(len(g.nodes)) == uid.Max { + panic("simple: cannot allocate node: no slot") + } + return Node(g.nodeIDs.NewID()) +} + +// NewWeightedLine returns a new WeightedLine from the source to the destination node. +// The returned WeightedLine will have a graph-unique ID. +// The Line's ID does not become valid in g until the Line is added to g. +func (g *WeightedDirectedGraph) NewWeightedLine(from, to graph.Node, weight float64) graph.WeightedLine { + return &WeightedLine{F: from, T: to, W: weight, UID: g.lineIDs.NewID()} +} + +// Node returns the node with the given ID if it exists in the graph, +// and nil otherwise. +func (g *WeightedDirectedGraph) Node(id int64) graph.Node { + return g.nodes[id] +} + +// Nodes returns all the nodes in the graph. +func (g *WeightedDirectedGraph) Nodes() graph.Nodes { + if len(g.nodes) == 0 { + return graph.Empty + } + nodes := make([]graph.Node, len(g.nodes)) + i := 0 + for _, n := range g.nodes { + nodes[i] = n + i++ + } + + return iterator.NewOrderedNodes(nodes) +} + +// RemoveLine removes the line with the given end point and line IDs from the graph, +// leaving the terminal nodes. If the line does not exist it is a no-op. +func (g *WeightedDirectedGraph) RemoveLine(fid, tid, id int64) { + if _, ok := g.nodes[fid]; !ok { + return + } + if _, ok := g.nodes[tid]; !ok { + return + } + + delete(g.from[fid][tid], id) + if len(g.from[fid][tid]) == 0 { + delete(g.from[fid], tid) + } + delete(g.to[tid][fid], id) + if len(g.to[tid][fid]) == 0 { + delete(g.to[tid], fid) + } + g.lineIDs.Release(id) +} + +// RemoveNode removes the node with the given ID from the graph, as well as any edges attached +// to it. If the node is not in the graph it is a no-op. +func (g *WeightedDirectedGraph) RemoveNode(id int64) { + if _, ok := g.nodes[id]; !ok { + return + } + delete(g.nodes, id) + + for from := range g.from[id] { + delete(g.to[from], id) + } + delete(g.from, id) + + for to := range g.to[id] { + delete(g.from[to], id) + } + delete(g.to, id) + + g.nodeIDs.Release(id) +} + +// SetWeightedLine adds l, a line from one node to another. If the nodes do not exist, they are added +// and are set to the nodes of the line otherwise. +func (g *WeightedDirectedGraph) SetWeightedLine(l graph.WeightedLine) { + var ( + from = l.From() + fid = from.ID() + to = l.To() + tid = to.ID() + lid = l.ID() + ) + + if _, ok := g.nodes[fid]; !ok { + g.AddNode(from) + } else { + g.nodes[fid] = from + } + if g.from[fid][tid] == nil { + g.from[fid][tid] = make(map[int64]graph.WeightedLine) + } + if _, ok := g.nodes[tid]; !ok { + g.AddNode(to) + } else { + g.nodes[tid] = to + } + if g.to[tid][fid] == nil { + g.to[tid][fid] = make(map[int64]graph.WeightedLine) + } + + g.from[fid][tid][lid] = l + g.to[tid][fid][lid] = l + g.lineIDs.Use(l.ID()) +} + +// To returns all nodes in g that can reach directly to n. +func (g *WeightedDirectedGraph) To(id int64) graph.Nodes { + if _, ok := g.from[id]; !ok { + return graph.Empty + } + + to := make([]graph.Node, len(g.to[id])) + i := 0 + for uid := range g.to[id] { + to[i] = g.nodes[uid] + i++ + } + if len(to) == 0 { + return graph.Empty + } + return iterator.NewOrderedNodes(to) +} + +// Weight returns the weight for the lines between x and y summarised by the receiver's +// EdgeWeightFunc. Weight returns true if an edge exists between x and y, false otherwise. +func (g *WeightedDirectedGraph) Weight(uid, vid int64) (w float64, ok bool) { + lines := g.WeightedLines(uid, vid) + return WeightedEdge{WeightedLines: lines, WeightFunc: g.EdgeWeightFunc}.Weight(), lines != graph.Empty +} + +// WeightedEdge returns the weighted edge from u to v if such an edge exists and nil otherwise. +// The node v must be directly reachable from u as defined by the From method. +// The returned graph.WeightedEdge is a multi.WeightedEdge if an edge exists. +func (g *WeightedDirectedGraph) WeightedEdge(uid, vid int64) graph.WeightedEdge { + lines := g.WeightedLines(uid, vid) + if lines == graph.Empty { + return nil + } + return WeightedEdge{ + F: g.Node(uid), T: g.Node(vid), + WeightedLines: lines, + WeightFunc: g.EdgeWeightFunc, + } +} + +// WeightedEdges returns all the edges in the graph. Each edge in the returned slice +// is a multi.WeightedEdge. +func (g *WeightedDirectedGraph) WeightedEdges() graph.WeightedEdges { + if len(g.nodes) == 0 { + return graph.Empty + } + var edges []graph.WeightedEdge + for _, u := range g.nodes { + for _, e := range g.from[u.ID()] { + var lines []graph.WeightedLine + for _, l := range e { + lines = append(lines, l) + } + if len(lines) != 0 { + edges = append(edges, WeightedEdge{ + F: g.Node(u.ID()), + T: g.Node(lines[0].To().ID()), + WeightedLines: iterator.NewOrderedWeightedLines(lines), + WeightFunc: g.EdgeWeightFunc, + }) + } + } + } + if len(edges) == 0 { + return graph.Empty + } + return iterator.NewOrderedWeightedEdges(edges) +} + +// WeightedLines returns the weighted lines from u to v if such any such lines exists +// and nil otherwise. The node v must be directly reachable from u as defined by the From method. +func (g *WeightedDirectedGraph) WeightedLines(uid, vid int64) graph.WeightedLines { + edge := g.from[uid][vid] + if len(edge) == 0 { + return graph.Empty + } + var lines []graph.WeightedLine + for _, l := range edge { + lines = append(lines, l) + } + return iterator.NewOrderedWeightedLines(lines) +} diff --git a/vendor/gonum.org/v1/gonum/graph/multi/weighted_undirected.go b/vendor/gonum.org/v1/gonum/graph/multi/weighted_undirected.go new file mode 100644 index 0000000..bce322f --- /dev/null +++ b/vendor/gonum.org/v1/gonum/graph/multi/weighted_undirected.go @@ -0,0 +1,360 @@ +// Copyright ©2014 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package multi + +import ( + "fmt" + + "gonum.org/v1/gonum/graph" + "gonum.org/v1/gonum/graph/internal/uid" + "gonum.org/v1/gonum/graph/iterator" +) + +var ( + wug *WeightedUndirectedGraph + + _ graph.Graph = wug + _ graph.Weighted = wug + _ graph.Undirected = wug + _ graph.WeightedUndirected = wug + _ graph.Multigraph = wug + _ graph.UndirectedMultigraph = wug + _ graph.WeightedUndirectedMultigraph = wug + _ graph.NodeAdder = wug + _ graph.NodeRemover = wug + _ graph.WeightedLineAdder = wug + _ graph.LineRemover = wug +) + +// WeightedUndirectedGraph implements a generalized undirected graph. +type WeightedUndirectedGraph struct { + // EdgeWEightFunc is used to provide + // the WeightFunc function for WeightedEdge + // values returned by the graph. + // WeightFunc must accept a nil input. + EdgeWeightFunc func(graph.WeightedLines) float64 + + nodes map[int64]graph.Node + lines map[int64]map[int64]map[int64]graph.WeightedLine + + nodeIDs uid.Set + lineIDs uid.Set +} + +// NewWeightedUndirectedGraph returns an WeightedUndirectedGraph. +func NewWeightedUndirectedGraph() *WeightedUndirectedGraph { + return &WeightedUndirectedGraph{ + nodes: make(map[int64]graph.Node), + lines: make(map[int64]map[int64]map[int64]graph.WeightedLine), + + nodeIDs: uid.NewSet(), + lineIDs: uid.NewSet(), + } +} + +// AddNode adds n to the graph. It panics if the added node ID matches an existing node ID. +func (g *WeightedUndirectedGraph) AddNode(n graph.Node) { + if _, exists := g.nodes[n.ID()]; exists { + panic(fmt.Sprintf("simple: node ID collision: %d", n.ID())) + } + g.nodes[n.ID()] = n + g.lines[n.ID()] = make(map[int64]map[int64]graph.WeightedLine) + g.nodeIDs.Use(n.ID()) +} + +// Edge returns the edge from u to v if such an edge exists and nil otherwise. +// The node v must be directly reachable from u as defined by the From method. +// The returned graph.Edge is a multi.WeightedEdge if an edge exists. +func (g *WeightedUndirectedGraph) Edge(uid, vid int64) graph.Edge { + return g.WeightedEdge(uid, vid) +} + +// EdgeBetween returns the edge between nodes x and y. +func (g *WeightedUndirectedGraph) EdgeBetween(xid, yid int64) graph.Edge { + return g.WeightedEdge(xid, yid) +} + +// Edges returns all the edges in the graph. Each edge in the returned slice +// is a multi.Edge. +func (g *WeightedUndirectedGraph) Edges() graph.Edges { + if len(g.lines) == 0 { + return graph.Empty + } + var edges []graph.Edge + seen := make(map[int64]struct{}) + for _, u := range g.lines { + for _, e := range u { + var lines []graph.WeightedLine + for _, l := range e { + lid := l.ID() + if _, ok := seen[lid]; ok { + continue + } + seen[lid] = struct{}{} + lines = append(lines, l) + } + if len(lines) != 0 { + edges = append(edges, WeightedEdge{ + F: g.Node(lines[0].From().ID()), + T: g.Node(lines[0].To().ID()), + WeightedLines: iterator.NewOrderedWeightedLines(lines), + WeightFunc: g.EdgeWeightFunc, + }) + } + } + } + if len(edges) == 0 { + return graph.Empty + } + return iterator.NewOrderedEdges(edges) +} + +// From returns all nodes in g that can be reached directly from n. +func (g *WeightedUndirectedGraph) From(id int64) graph.Nodes { + if _, ok := g.nodes[id]; !ok { + return graph.Empty + } + + nodes := make([]graph.Node, len(g.lines[id])) + i := 0 + for from := range g.lines[id] { + nodes[i] = g.nodes[from] + i++ + } + if len(nodes) == 0 { + return graph.Empty + } + return iterator.NewOrderedNodes(nodes) +} + +// HasEdgeBetween returns whether an edge exists between nodes x and y. +func (g *WeightedUndirectedGraph) HasEdgeBetween(xid, yid int64) bool { + _, ok := g.lines[xid][yid] + return ok +} + +// Lines returns the lines from u to v if such an edge exists and nil otherwise. +// The node v must be directly reachable from u as defined by the From method. +func (g *WeightedUndirectedGraph) Lines(uid, vid int64) graph.Lines { + return g.LinesBetween(uid, vid) +} + +// LinesBetween returns the lines between nodes x and y. +func (g *WeightedUndirectedGraph) LinesBetween(xid, yid int64) graph.Lines { + edge := g.lines[xid][yid] + if len(edge) == 0 { + return graph.Empty + } + var lines []graph.Line + seen := make(map[int64]struct{}) + for _, l := range edge { + lid := l.ID() + if _, ok := seen[lid]; ok { + continue + } + seen[lid] = struct{}{} + lines = append(lines, l) + } + return iterator.NewOrderedLines(lines) +} + +// NewNode returns a new unique Node to be added to g. The Node's ID does +// not become valid in g until the Node is added to g. +func (g *WeightedUndirectedGraph) NewNode() graph.Node { + if len(g.nodes) == 0 { + return Node(0) + } + if int64(len(g.nodes)) == uid.Max { + panic("simple: cannot allocate node: no slot") + } + return Node(g.nodeIDs.NewID()) +} + +// NewWeightedLine returns a new WeightedLine from the source to the destination node. +// The returned WeightedLine will have a graph-unique ID. +// The Line's ID does not become valid in g until the Line is added to g. +func (g *WeightedUndirectedGraph) NewWeightedLine(from, to graph.Node, weight float64) graph.WeightedLine { + return &WeightedLine{F: from, T: to, W: weight, UID: g.lineIDs.NewID()} +} + +// Node returns the node with the given ID if it exists in the graph, +// and nil otherwise. +func (g *WeightedUndirectedGraph) Node(id int64) graph.Node { + return g.nodes[id] +} + +// Nodes returns all the nodes in the graph. +func (g *WeightedUndirectedGraph) Nodes() graph.Nodes { + if len(g.nodes) == 0 { + return graph.Empty + } + nodes := make([]graph.Node, len(g.nodes)) + i := 0 + for _, n := range g.nodes { + nodes[i] = n + i++ + } + return iterator.NewOrderedNodes(nodes) +} + +// RemoveLine removes the line with the given end point and line IDs from the graph, +// leaving the terminal nodes. If the line does not exist it is a no-op. +func (g *WeightedUndirectedGraph) RemoveLine(fid, tid, id int64) { + if _, ok := g.nodes[fid]; !ok { + return + } + if _, ok := g.nodes[tid]; !ok { + return + } + + delete(g.lines[fid][tid], id) + if len(g.lines[fid][tid]) == 0 { + delete(g.lines[fid], tid) + } + delete(g.lines[tid][fid], id) + if len(g.lines[tid][fid]) == 0 { + delete(g.lines[tid], fid) + } + g.lineIDs.Release(id) +} + +// RemoveNode removes the node with the given ID from the graph, as well as any edges attached +// to it. If the node is not in the graph it is a no-op. +func (g *WeightedUndirectedGraph) RemoveNode(id int64) { + if _, ok := g.nodes[id]; !ok { + return + } + delete(g.nodes, id) + + for from := range g.lines[id] { + delete(g.lines[from], id) + } + delete(g.lines, id) + + g.nodeIDs.Release(id) +} + +// SetWeightedLine adds l, a line from one node to another. If the nodes do not exist, they are added +// and are set to the nodes of the line otherwise. +func (g *WeightedUndirectedGraph) SetWeightedLine(l graph.WeightedLine) { + var ( + from = l.From() + fid = from.ID() + to = l.To() + tid = to.ID() + lid = l.ID() + ) + + if _, ok := g.nodes[fid]; !ok { + g.AddNode(from) + } else { + g.nodes[fid] = from + } + if g.lines[fid][tid] == nil { + g.lines[fid][tid] = make(map[int64]graph.WeightedLine) + } + if _, ok := g.nodes[tid]; !ok { + g.AddNode(to) + } else { + g.nodes[tid] = to + } + if g.lines[tid][fid] == nil { + g.lines[tid][fid] = make(map[int64]graph.WeightedLine) + } + + g.lines[fid][tid][lid] = l + g.lines[tid][fid][lid] = l + g.lineIDs.Use(lid) +} + +// Weight returns the weight for the lines between x and y summarised by the receiver's +// EdgeWeightFunc. Weight returns true if an edge exists between x and y, false otherwise. +func (g *WeightedUndirectedGraph) Weight(xid, yid int64) (w float64, ok bool) { + lines := g.WeightedLines(xid, yid) + return WeightedEdge{WeightedLines: lines, WeightFunc: g.EdgeWeightFunc}.Weight(), lines != graph.Empty +} + +// WeightedEdge returns the weighted edge from u to v if such an edge exists and nil otherwise. +// The node v must be directly reachable from u as defined by the From method. +// The returned graph.WeightedEdge is a multi.WeightedEdge if an edge exists. +func (g *WeightedUndirectedGraph) WeightedEdge(uid, vid int64) graph.WeightedEdge { + lines := g.WeightedLines(uid, vid) + if lines == graph.Empty { + return nil + } + return WeightedEdge{ + F: g.Node(uid), T: g.Node(vid), + WeightedLines: lines, + WeightFunc: g.EdgeWeightFunc, + } +} + +// WeightedEdgeBetween returns the weighted edge between nodes x and y. +func (g *WeightedUndirectedGraph) WeightedEdgeBetween(xid, yid int64) graph.WeightedEdge { + return g.WeightedEdge(xid, yid) +} + +// WeightedEdges returns all the edges in the graph. Each edge in the returned slice +// is a multi.Edge. +func (g *WeightedUndirectedGraph) WeightedEdges() graph.WeightedEdges { + if len(g.lines) == 0 { + return graph.Empty + } + var edges []graph.WeightedEdge + seen := make(map[int64]struct{}) + for _, u := range g.lines { + for _, e := range u { + var lines []graph.WeightedLine + for _, l := range e { + lid := l.ID() + if _, ok := seen[lid]; ok { + continue + } + seen[lid] = struct{}{} + lines = append(lines, l) + } + if len(lines) != 0 { + edges = append(edges, WeightedEdge{ + F: g.Node(lines[0].From().ID()), + T: g.Node(lines[0].To().ID()), + WeightedLines: iterator.NewOrderedWeightedLines(lines), + WeightFunc: g.EdgeWeightFunc, + }) + } + } + } + if len(edges) == 0 { + return graph.Empty + } + return iterator.NewOrderedWeightedEdges(edges) +} + +// WeightedLines returns the lines from u to v if such an edge exists and nil otherwise. +// The node v must be directly reachable from u as defined by the From method. +func (g *WeightedUndirectedGraph) WeightedLines(uid, vid int64) graph.WeightedLines { + return g.WeightedLinesBetween(uid, vid) +} + +// WeightedLinesBetween returns the lines between nodes x and y. +func (g *WeightedUndirectedGraph) WeightedLinesBetween(xid, yid int64) graph.WeightedLines { + edge := g.lines[xid][yid] + if len(edge) == 0 { + return graph.Empty + } + var lines []graph.WeightedLine + seen := make(map[int64]struct{}) + for _, l := range edge { + lid := l.ID() + if _, ok := seen[lid]; ok { + continue + } + seen[lid] = struct{}{} + if l.From().ID() != xid { + l = l.ReversedLine().(graph.WeightedLine) + } + lines = append(lines, l) + } + return iterator.NewOrderedWeightedLines(lines) +} diff --git a/vendor/gonum.org/v1/gonum/graph/multigraph.go b/vendor/gonum.org/v1/gonum/graph/multigraph.go new file mode 100644 index 0000000..038a3d5 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/graph/multigraph.go @@ -0,0 +1,198 @@ +// Copyright ©2014 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package graph + +// Line is an edge in a multigraph. A Line returns an ID that must +// distinguish Lines sharing Node end points. +type Line interface { + // From returns the from node of the edge. + From() Node + + // To returns the to node of the edge. + To() Node + + // ReversedLine returns a line that has the + // end points of the receiver swapped. + ReversedLine() Line + + // ID returns the unique ID for the Line. + ID() int64 +} + +// WeightedLine is a weighted multigraph edge. +type WeightedLine interface { + Line + Weight() float64 +} + +// Multigraph is a generalized multigraph. +type Multigraph interface { + // Node returns the node with the given ID if it exists + // in the multigraph, and nil otherwise. + Node(id int64) Node + + // Nodes returns all the nodes in the multigraph. + // + // Nodes must not return nil. + Nodes() Nodes + + // From returns all nodes that can be reached directly + // from the node with the given ID. + // + // From must not return nil. + From(id int64) Nodes + + // HasEdgeBetween returns whether an edge exists between + // nodes with IDs xid and yid without considering direction. + HasEdgeBetween(xid, yid int64) bool + + // Lines returns the lines from u to v, with IDs uid and + // vid, if any such lines exist and nil otherwise. The + // node v must be directly reachable from u as defined by + // the From method. + // + // Lines must not return nil. + Lines(uid, vid int64) Lines +} + +// WeightedMultigraph is a weighted multigraph. +type WeightedMultigraph interface { + Multigraph + + // WeightedLines returns the weighted lines from u to v + // with IDs uid and vid if any such lines exist and nil + // otherwise. The node v must be directly reachable + // from u as defined by the From method. + // + // WeightedLines must not return nil. + WeightedLines(uid, vid int64) WeightedLines +} + +// UndirectedMultigraph is an undirected multigraph. +type UndirectedMultigraph interface { + Multigraph + + // LinesBetween returns the lines between nodes x and y + // with IDs xid and yid. + // + // LinesBetween must not return nil. + LinesBetween(xid, yid int64) Lines +} + +// WeightedUndirectedMultigraph is a weighted undirected multigraph. +type WeightedUndirectedMultigraph interface { + WeightedMultigraph + + // WeightedLinesBetween returns the lines between nodes + // x and y with IDs xid and yid. + // + // WeightedLinesBetween must not return nil. + WeightedLinesBetween(xid, yid int64) WeightedLines +} + +// DirectedMultigraph is a directed multigraph. +type DirectedMultigraph interface { + Multigraph + + // HasEdgeFromTo returns whether an edge exists + // in the multigraph from u to v with IDs uid + // and vid. + HasEdgeFromTo(uid, vid int64) bool + + // To returns all nodes that can reach directly + // to the node with the given ID. + // + // To must not return nil. + To(id int64) Nodes +} + +// WeightedDirectedMultigraph is a weighted directed multigraph. +type WeightedDirectedMultigraph interface { + WeightedMultigraph + + // HasEdgeFromTo returns whether an edge exists + // in the multigraph from u to v with IDs uid + // and vid. + HasEdgeFromTo(uid, vid int64) bool + + // To returns all nodes that can reach directly + // to the node with the given ID. + // + // To must not return nil. + To(id int64) Nodes +} + +// LineAdder is an interface for adding lines to a multigraph. +type LineAdder interface { + // NewLine returns a new Line from the source to the destination node. + NewLine(from, to Node) Line + + // SetLine adds a Line from one node to another. + // If the multigraph supports node addition the nodes + // will be added if they do not exist, otherwise + // SetLine will panic. + // Whether l, l.From() and l.To() are stored + // within the graph is implementation dependent. + SetLine(l Line) +} + +// WeightedLineAdder is an interface for adding lines to a multigraph. +type WeightedLineAdder interface { + // NewWeightedLine returns a new WeightedLine from + // the source to the destination node. + NewWeightedLine(from, to Node, weight float64) WeightedLine + + // SetWeightedLine adds a weighted line from one node + // to another. If the multigraph supports node addition + // the nodes will be added if they do not exist, + // otherwise SetWeightedLine will panic. + // Whether l, l.From() and l.To() are stored + // within the graph is implementation dependent. + SetWeightedLine(l WeightedLine) +} + +// LineRemover is an interface for removing lines from a multigraph. +type LineRemover interface { + // RemoveLine removes the line with the given end + // and line IDs, leaving the terminal nodes. If + // the line does not exist it is a no-op. + RemoveLine(fid, tid, id int64) +} + +// MultigraphBuilder is a multigraph that can have nodes and lines added. +type MultigraphBuilder interface { + NodeAdder + LineAdder +} + +// WeightedMultigraphBuilder is a multigraph that can have nodes and weighted lines added. +type WeightedMultigraphBuilder interface { + NodeAdder + WeightedLineAdder +} + +// UndirectedMultgraphBuilder is an undirected multigraph builder. +type UndirectedMultigraphBuilder interface { + UndirectedMultigraph + MultigraphBuilder +} + +// UndirectedWeightedMultigraphBuilder is an undirected weighted multigraph builder. +type UndirectedWeightedMultigraphBuilder interface { + UndirectedMultigraph + WeightedMultigraphBuilder +} + +// DirectedMultigraphBuilder is a directed multigraph builder. +type DirectedMultigraphBuilder interface { + DirectedMultigraph + MultigraphBuilder +} + +// DirectedWeightedMultigraphBuilder is a directed weighted multigraph builder. +type DirectedWeightedMultigraphBuilder interface { + DirectedMultigraph + WeightedMultigraphBuilder +} diff --git a/vendor/gonum.org/v1/gonum/graph/network/betweenness.go b/vendor/gonum.org/v1/gonum/graph/network/betweenness.go new file mode 100644 index 0000000..12349f4 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/graph/network/betweenness.go @@ -0,0 +1,256 @@ +// Copyright ©2015 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package network + +import ( + "math" + + "gonum.org/v1/gonum/graph" + "gonum.org/v1/gonum/graph/internal/linear" + "gonum.org/v1/gonum/graph/path" +) + +// Betweenness returns the non-zero betweenness centrality for nodes in the unweighted graph g. +// +// C_B(v) = \sum_{s ≠ v ≠ t ∈ V} (\sigma_{st}(v) / \sigma_{st}) +// +// where \sigma_{st} and \sigma_{st}(v) are the number of shortest paths from s to t, +// and the subset of those paths containing v respectively. +func Betweenness(g graph.Graph) map[int64]float64 { + // Brandes' algorithm for finding betweenness centrality for nodes in + // and unweighted graph: + // + // http://www.inf.uni-konstanz.de/algo/publications/b-fabc-01.pdf + + // TODO(kortschak): Consider using the parallel algorithm when + // GOMAXPROCS != 1. + // + // http://htor.inf.ethz.ch/publications/img/edmonds-hoefler-lumsdaine-bc.pdf + + // Also note special case for sparse networks: + // http://wwwold.iit.cnr.it/staff/marco.pellegrini/papiri/asonam-final.pdf + + cb := make(map[int64]float64) + brandes(g, func(s graph.Node, stack linear.NodeStack, p map[int64][]graph.Node, delta, sigma map[int64]float64) { + for stack.Len() != 0 { + w := stack.Pop() + for _, v := range p[w.ID()] { + delta[v.ID()] += sigma[v.ID()] / sigma[w.ID()] * (1 + delta[w.ID()]) + } + if w.ID() != s.ID() { + if d := delta[w.ID()]; d != 0 { + cb[w.ID()] += d + } + } + } + }) + return cb +} + +// EdgeBetweenness returns the non-zero betweenness centrality for edges in the +// unweighted graph g. For an edge e the centrality C_B is computed as +// +// C_B(e) = \sum_{s ≠ t ∈ V} (\sigma_{st}(e) / \sigma_{st}), +// +// where \sigma_{st} and \sigma_{st}(e) are the number of shortest paths from s +// to t, and the subset of those paths containing e, respectively. +// +// If g is undirected, edges are retained such that u.ID < v.ID where u and v are +// the nodes of e. +func EdgeBetweenness(g graph.Graph) map[[2]int64]float64 { + // Modified from Brandes' original algorithm as described in Algorithm 7 + // with the exception that node betweenness is not calculated: + // + // http://algo.uni-konstanz.de/publications/b-vspbc-08.pdf + + _, isUndirected := g.(graph.Undirected) + cb := make(map[[2]int64]float64) + brandes(g, func(s graph.Node, stack linear.NodeStack, p map[int64][]graph.Node, delta, sigma map[int64]float64) { + for stack.Len() != 0 { + w := stack.Pop() + for _, v := range p[w.ID()] { + c := sigma[v.ID()] / sigma[w.ID()] * (1 + delta[w.ID()]) + vid := v.ID() + wid := w.ID() + if isUndirected && wid < vid { + vid, wid = wid, vid + } + cb[[2]int64{vid, wid}] += c + delta[v.ID()] += c + } + } + }) + return cb +} + +// brandes is the common code for Betweenness and EdgeBetweenness. It corresponds +// to algorithm 1 in http://algo.uni-konstanz.de/publications/b-vspbc-08.pdf with +// the accumulation loop provided by the accumulate closure. +func brandes(g graph.Graph, accumulate func(s graph.Node, stack linear.NodeStack, p map[int64][]graph.Node, delta, sigma map[int64]float64)) { + var ( + nodes = graph.NodesOf(g.Nodes()) + stack linear.NodeStack + p = make(map[int64][]graph.Node, len(nodes)) + sigma = make(map[int64]float64, len(nodes)) + d = make(map[int64]int, len(nodes)) + delta = make(map[int64]float64, len(nodes)) + queue linear.NodeQueue + ) + for _, s := range nodes { + stack = stack[:0] + + for _, w := range nodes { + p[w.ID()] = p[w.ID()][:0] + } + + for _, t := range nodes { + sigma[t.ID()] = 0 + d[t.ID()] = -1 + } + sigma[s.ID()] = 1 + d[s.ID()] = 0 + + queue.Enqueue(s) + for queue.Len() != 0 { + v := queue.Dequeue() + vid := v.ID() + stack.Push(v) + for _, w := range graph.NodesOf(g.From(vid)) { + wid := w.ID() + // w found for the first time? + if d[wid] < 0 { + queue.Enqueue(w) + d[wid] = d[vid] + 1 + } + // shortest path to w via v? + if d[wid] == d[vid]+1 { + sigma[wid] += sigma[vid] + p[wid] = append(p[wid], v) + } + } + } + + for _, v := range nodes { + delta[v.ID()] = 0 + } + + // S returns vertices in order of non-increasing distance from s + accumulate(s, stack, p, delta, sigma) + } +} + +// BetweennessWeighted returns the non-zero betweenness centrality for nodes in the weighted +// graph g used to construct the given shortest paths. +// +// C_B(v) = \sum_{s ≠ v ≠ t ∈ V} (\sigma_{st}(v) / \sigma_{st}) +// +// where \sigma_{st} and \sigma_{st}(v) are the number of shortest paths from s to t, +// and the subset of those paths containing v respectively. +func BetweennessWeighted(g graph.Weighted, p path.AllShortest) map[int64]float64 { + cb := make(map[int64]float64) + + nodes := graph.NodesOf(g.Nodes()) + for i, s := range nodes { + sid := s.ID() + for j, t := range nodes { + if i == j { + continue + } + tid := t.ID() + d := p.Weight(sid, tid) + if math.IsInf(d, 0) { + continue + } + + // If we have a unique path, don't do the + // extra work needed to get all paths. + path, _, unique := p.Between(sid, tid) + if unique { + for _, v := range path[1 : len(path)-1] { + // For undirected graphs we double count + // passage though nodes. This is consistent + // with Brandes' algorithm's behaviour. + cb[v.ID()]++ + } + continue + } + + // Otherwise iterate over all paths. + paths, _ := p.AllBetween(sid, tid) + stFrac := 1 / float64(len(paths)) + for _, path := range paths { + for _, v := range path[1 : len(path)-1] { + cb[v.ID()] += stFrac + } + } + } + } + + return cb +} + +// EdgeBetweennessWeighted returns the non-zero betweenness centrality for edges in +// the weighted graph g. For an edge e the centrality C_B is computed as +// +// C_B(e) = \sum_{s ≠ t ∈ V} (\sigma_{st}(e) / \sigma_{st}), +// +// where \sigma_{st} and \sigma_{st}(e) are the number of shortest paths from s +// to t, and the subset of those paths containing e, respectively. +// +// If g is undirected, edges are retained such that u.ID < v.ID where u and v are +// the nodes of e. +func EdgeBetweennessWeighted(g graph.Weighted, p path.AllShortest) map[[2]int64]float64 { + cb := make(map[[2]int64]float64) + + _, isUndirected := g.(graph.Undirected) + nodes := graph.NodesOf(g.Nodes()) + for i, s := range nodes { + sid := s.ID() + for j, t := range nodes { + if i == j { + continue + } + tid := t.ID() + d := p.Weight(sid, tid) + if math.IsInf(d, 0) { + continue + } + + // If we have a unique path, don't do the + // extra work needed to get all paths. + path, _, unique := p.Between(sid, tid) + if unique { + for k, v := range path[1:] { + // For undirected graphs we double count + // passage though edges. This is consistent + // with Brandes' algorithm's behaviour. + uid := path[k].ID() + vid := v.ID() + if isUndirected && vid < uid { + uid, vid = vid, uid + } + cb[[2]int64{uid, vid}]++ + } + continue + } + + // Otherwise iterate over all paths. + paths, _ := p.AllBetween(sid, tid) + stFrac := 1 / float64(len(paths)) + for _, path := range paths { + for k, v := range path[1:] { + uid := path[k].ID() + vid := v.ID() + if isUndirected && vid < uid { + uid, vid = vid, uid + } + cb[[2]int64{uid, vid}] += stFrac + } + } + } + } + + return cb +} diff --git a/vendor/gonum.org/v1/gonum/graph/network/diffusion.go b/vendor/gonum.org/v1/gonum/graph/network/diffusion.go new file mode 100644 index 0000000..6aec9c0 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/graph/network/diffusion.go @@ -0,0 +1,212 @@ +// Copyright ©2017 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package network + +import ( + "math" + + "gonum.org/v1/gonum/graph" + "gonum.org/v1/gonum/mat" +) + +// Diffuse performs a heat diffusion across nodes of the undirected +// graph described by the given Laplacian using the initial heat distribution, +// h, according to the Laplacian with a diffusion time of t. +// The resulting heat distribution is returned, written into the map dst and +// returned, +// d = exp(-Lt)×h +// where L is the graph Laplacian. Indexing into h and dst is defined by the +// Laplacian Index field. If dst is nil, a new map is created. +// +// Nodes without corresponding entries in h are given an initial heat of zero, +// and entries in h without a corresponding node in the original graph are +// not altered when written to dst. +func Diffuse(dst, h map[int64]float64, by Laplacian, t float64) map[int64]float64 { + heat := make([]float64, len(by.Index)) + for id, i := range by.Index { + heat[i] = h[id] + } + v := mat.NewVecDense(len(heat), heat) + + var m, tl mat.Dense + tl.Scale(-t, by) + m.Exp(&tl) + v.MulVec(&m, v) + + if dst == nil { + dst = make(map[int64]float64) + } + for i, n := range heat { + dst[by.Nodes[i].ID()] = n + } + return dst +} + +// DiffuseToEquilibrium performs a heat diffusion across nodes of the +// graph described by the given Laplacian using the initial heat +// distribution, h, according to the Laplacian until the update function +// h_{n+1} = h_n - L×h_n +// results in a 2-norm update difference within tol, or iters updates have +// been made. +// The resulting heat distribution is returned as eq, written into the map dst, +// and a boolean indicating whether the equilibrium converged to within tol. +// Indexing into h and dst is defined by the Laplacian Index field. If dst +// is nil, a new map is created. +// +// Nodes without corresponding entries in h are given an initial heat of zero, +// and entries in h without a corresponding node in the original graph are +// not altered when written to dst. +func DiffuseToEquilibrium(dst, h map[int64]float64, by Laplacian, tol float64, iters int) (eq map[int64]float64, ok bool) { + heat := make([]float64, len(by.Index)) + for id, i := range by.Index { + heat[i] = h[id] + } + v := mat.NewVecDense(len(heat), heat) + + last := make([]float64, len(by.Index)) + for id, i := range by.Index { + last[i] = h[id] + } + lastV := mat.NewVecDense(len(last), last) + + var tmp mat.VecDense + for { + iters-- + if iters < 0 { + break + } + lastV, v = v, lastV + tmp.MulVec(by.Matrix, lastV) + v.SubVec(lastV, &tmp) + if normDiff(heat, last) < tol { + ok = true + break + } + } + + if dst == nil { + dst = make(map[int64]float64) + } + for i, n := range v.RawVector().Data { + dst[by.Nodes[i].ID()] = n + } + return dst, ok +} + +// Laplacian is a graph Laplacian matrix. +type Laplacian struct { + // Matrix holds the Laplacian matrix. + mat.Matrix + + // Nodes holds the input graph nodes. + Nodes []graph.Node + + // Index is a mapping from the graph + // node IDs to row and column indices. + Index map[int64]int +} + +// NewLaplacian returns a Laplacian matrix for the simple undirected graph g. +// The Laplacian is defined as D-A where D is a diagonal matrix holding the +// degree of each node and A is the graph adjacency matrix of the input graph. +// If g contains self edges, NewLaplacian will panic. +func NewLaplacian(g graph.Undirected) Laplacian { + nodes := graph.NodesOf(g.Nodes()) + indexOf := make(map[int64]int, len(nodes)) + for i, n := range nodes { + id := n.ID() + indexOf[id] = i + } + + l := mat.NewSymDense(len(nodes), nil) + for j, u := range nodes { + uid := u.ID() + to := graph.NodesOf(g.From(uid)) + l.SetSym(j, j, float64(len(to))) + for _, v := range to { + vid := v.ID() + if uid == vid { + panic("network: self edge in graph") + } + if uid < vid { + l.SetSym(indexOf[vid], j, -1) + } + } + } + + return Laplacian{Matrix: l, Nodes: nodes, Index: indexOf} +} + +// NewSymNormLaplacian returns a symmetric normalized Laplacian matrix for the +// simple undirected graph g. +// The normalized Laplacian is defined as I-D^(-1/2)AD^(-1/2) where D is a +// diagonal matrix holding the degree of each node and A is the graph adjacency +// matrix of the input graph. +// If g contains self edges, NewSymNormLaplacian will panic. +func NewSymNormLaplacian(g graph.Undirected) Laplacian { + nodes := graph.NodesOf(g.Nodes()) + indexOf := make(map[int64]int, len(nodes)) + for i, n := range nodes { + id := n.ID() + indexOf[id] = i + } + + l := mat.NewSymDense(len(nodes), nil) + for j, u := range nodes { + uid := u.ID() + to := graph.NodesOf(g.From(uid)) + if len(to) == 0 { + continue + } + l.SetSym(j, j, 1) + squdeg := math.Sqrt(float64(len(to))) + for _, v := range to { + vid := v.ID() + if uid == vid { + panic("network: self edge in graph") + } + if uid < vid { + l.SetSym(indexOf[vid], j, -1/(squdeg*math.Sqrt(float64(g.From(vid).Len())))) + } + } + } + + return Laplacian{Matrix: l, Nodes: nodes, Index: indexOf} +} + +// NewRandomWalkLaplacian returns a damp-scaled random walk Laplacian matrix for +// the simple graph g. +// The random walk Laplacian is defined as I-D^(-1)A where D is a diagonal matrix +// holding the degree of each node and A is the graph adjacency matrix of the input +// graph. +// If g contains self edges, NewRandomWalkLaplacian will panic. +func NewRandomWalkLaplacian(g graph.Graph, damp float64) Laplacian { + nodes := graph.NodesOf(g.Nodes()) + indexOf := make(map[int64]int, len(nodes)) + for i, n := range nodes { + id := n.ID() + indexOf[id] = i + } + + l := mat.NewDense(len(nodes), len(nodes), nil) + for j, u := range nodes { + uid := u.ID() + to := graph.NodesOf(g.From(uid)) + if len(to) == 0 { + continue + } + l.Set(j, j, 1-damp) + rudeg := (damp - 1) / float64(len(to)) + for _, v := range to { + vid := v.ID() + if uid == vid { + panic("network: self edge in graph") + } + l.Set(indexOf[vid], j, rudeg) + } + } + + return Laplacian{Matrix: l, Nodes: nodes, Index: indexOf} +} diff --git a/vendor/gonum.org/v1/gonum/graph/network/distance.go b/vendor/gonum.org/v1/gonum/graph/network/distance.go new file mode 100644 index 0000000..3092249 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/graph/network/distance.go @@ -0,0 +1,132 @@ +// Copyright ©2015 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package network + +import ( + "math" + + "gonum.org/v1/gonum/graph" + "gonum.org/v1/gonum/graph/path" +) + +// Closeness returns the closeness centrality for nodes in the graph g used to +// construct the given shortest paths. +// +// C(v) = 1 / \sum_u d(u,v) +// +// For directed graphs the incoming paths are used. Infinite distances are +// not considered. +func Closeness(g graph.Graph, p path.AllShortest) map[int64]float64 { + nodes := graph.NodesOf(g.Nodes()) + c := make(map[int64]float64, len(nodes)) + for _, u := range nodes { + uid := u.ID() + var sum float64 + for _, v := range nodes { + vid := v.ID() + // The ordering here is not relevant for + // undirected graphs, but we make sure we + // are counting incoming paths. + d := p.Weight(vid, uid) + if math.IsInf(d, 0) { + continue + } + sum += d + } + c[u.ID()] = 1 / sum + } + return c +} + +// Farness returns the farness for nodes in the graph g used to construct +// the given shortest paths. +// +// F(v) = \sum_u d(u,v) +// +// For directed graphs the incoming paths are used. Infinite distances are +// not considered. +func Farness(g graph.Graph, p path.AllShortest) map[int64]float64 { + nodes := graph.NodesOf(g.Nodes()) + f := make(map[int64]float64, len(nodes)) + for _, u := range nodes { + uid := u.ID() + var sum float64 + for _, v := range nodes { + vid := v.ID() + // The ordering here is not relevant for + // undirected graphs, but we make sure we + // are counting incoming paths. + d := p.Weight(vid, uid) + if math.IsInf(d, 0) { + continue + } + sum += d + } + f[u.ID()] = sum + } + return f +} + +// Harmonic returns the harmonic centrality for nodes in the graph g used to +// construct the given shortest paths. +// +// H(v)= \sum_{u ≠ v} 1 / d(u,v) +// +// For directed graphs the incoming paths are used. Infinite distances are +// not considered. +func Harmonic(g graph.Graph, p path.AllShortest) map[int64]float64 { + nodes := graph.NodesOf(g.Nodes()) + h := make(map[int64]float64, len(nodes)) + for i, u := range nodes { + uid := u.ID() + var sum float64 + for j, v := range nodes { + vid := v.ID() + // The ordering here is not relevant for + // undirected graphs, but we make sure we + // are counting incoming paths. + d := p.Weight(vid, uid) + if math.IsInf(d, 0) { + continue + } + if i != j { + sum += 1 / d + } + } + h[u.ID()] = sum + } + return h +} + +// Residual returns the Dangalchev's residual closeness for nodes in the graph +// g used to construct the given shortest paths. +// +// C(v)= \sum_{u ≠ v} 1 / 2^d(u,v) +// +// For directed graphs the incoming paths are used. Infinite distances are +// not considered. +func Residual(g graph.Graph, p path.AllShortest) map[int64]float64 { + nodes := graph.NodesOf(g.Nodes()) + r := make(map[int64]float64, len(nodes)) + for i, u := range nodes { + uid := u.ID() + var sum float64 + for j, v := range nodes { + vid := v.ID() + // The ordering here is not relevant for + // undirected graphs, but we make sure we + // are counting incoming paths. + d := p.Weight(vid, uid) + if math.IsInf(d, 0) { + continue + } + if i != j { + sum += math.Exp2(-d) + } + } + r[u.ID()] = sum + } + return r +} diff --git a/vendor/gonum.org/v1/gonum/graph/network/doc.go b/vendor/gonum.org/v1/gonum/graph/network/doc.go new file mode 100644 index 0000000..a70c6ba --- /dev/null +++ b/vendor/gonum.org/v1/gonum/graph/network/doc.go @@ -0,0 +1,6 @@ +// Copyright ©2017 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Package network provides network analysis functions. +package network // import "gonum.org/v1/gonum/graph/network" diff --git a/vendor/gonum.org/v1/gonum/graph/network/hits.go b/vendor/gonum.org/v1/gonum/graph/network/hits.go new file mode 100644 index 0000000..ec89113 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/graph/network/hits.go @@ -0,0 +1,101 @@ +// Copyright ©2015 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package network + +import ( + "math" + + "gonum.org/v1/gonum/floats" + "gonum.org/v1/gonum/graph" +) + +// HubAuthority is a Hyperlink-Induced Topic Search hub-authority score pair. +type HubAuthority struct { + Hub float64 + Authority float64 +} + +// HITS returns the Hyperlink-Induced Topic Search hub-authority scores for +// nodes of the directed graph g. HITS terminates when the 2-norm of the +// vector difference between iterations is below tol. The returned map is +// keyed on the graph node IDs. +func HITS(g graph.Directed, tol float64) map[int64]HubAuthority { + nodes := graph.NodesOf(g.Nodes()) + + // Make a topological copy of g with dense node IDs. + indexOf := make(map[int64]int, len(nodes)) + for i, n := range nodes { + indexOf[n.ID()] = i + } + nodesLinkingTo := make([][]int, len(nodes)) + nodesLinkedFrom := make([][]int, len(nodes)) + for i, n := range nodes { + id := n.ID() + for _, u := range graph.NodesOf(g.To(id)) { + nodesLinkingTo[i] = append(nodesLinkingTo[i], indexOf[u.ID()]) + } + for _, v := range graph.NodesOf(g.From(id)) { + nodesLinkedFrom[i] = append(nodesLinkedFrom[i], indexOf[v.ID()]) + } + } + + w := make([]float64, 4*len(nodes)) + auth := w[:len(nodes)] + hub := w[len(nodes) : 2*len(nodes)] + for i := range nodes { + auth[i] = 1 + hub[i] = 1 + } + deltaAuth := w[2*len(nodes) : 3*len(nodes)] + deltaHub := w[3*len(nodes):] + + var norm float64 + for { + norm = 0 + for v := range nodes { + var a float64 + for _, u := range nodesLinkingTo[v] { + a += hub[u] + } + deltaAuth[v] = auth[v] + auth[v] = a + norm += a * a + } + norm = math.Sqrt(norm) + + for i := range auth { + auth[i] /= norm + deltaAuth[i] -= auth[i] + } + + norm = 0 + for u := range nodes { + var h float64 + for _, v := range nodesLinkedFrom[u] { + h += auth[v] + } + deltaHub[u] = hub[u] + hub[u] = h + norm += h * h + } + norm = math.Sqrt(norm) + + for i := range hub { + hub[i] /= norm + deltaHub[i] -= hub[i] + } + + if floats.Norm(deltaAuth, 2) < tol && floats.Norm(deltaHub, 2) < tol { + break + } + } + + hubAuth := make(map[int64]HubAuthority, len(nodes)) + for i, n := range nodes { + hubAuth[n.ID()] = HubAuthority{Hub: hub[i], Authority: auth[i]} + } + + return hubAuth +} diff --git a/vendor/gonum.org/v1/gonum/graph/network/network.go b/vendor/gonum.org/v1/gonum/graph/network/network.go new file mode 100644 index 0000000..239c864 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/graph/network/network.go @@ -0,0 +1,13 @@ +// Copyright ©2015 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// TODO(kortschak): Implement: +// * edge-weighted PageRank and HITS +// * PersonalizedPageRank: +// http://infolab.stanford.edu/~backrub/google.html 2.1.2 Intuitive Justification +// http://ilpubs.stanford.edu:8090/596/1/2003-35.pdf +// http://www.vldb.org/pvldb/vol7/p1023-maehara.pdf +// * other centrality measures + +package network diff --git a/vendor/gonum.org/v1/gonum/graph/network/page.go b/vendor/gonum.org/v1/gonum/graph/network/page.go new file mode 100644 index 0000000..8c1634e --- /dev/null +++ b/vendor/gonum.org/v1/gonum/graph/network/page.go @@ -0,0 +1,418 @@ +// Copyright ©2015 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package network + +import ( + "math" + + "golang.org/x/exp/rand" + + "gonum.org/v1/gonum/floats" + "gonum.org/v1/gonum/graph" + "gonum.org/v1/gonum/mat" +) + +// PageRank returns the PageRank weights for nodes of the directed graph g +// using the given damping factor and terminating when the 2-norm of the +// vector difference between iterations is below tol. The returned map is +// keyed on the graph node IDs. +// If g is a graph.WeightedDirected, an edge-weighted PageRank is calculated. +func PageRank(g graph.Directed, damp, tol float64) map[int64]float64 { + if g, ok := g.(graph.WeightedDirected); ok { + return edgeWeightedPageRank(g, damp, tol) + } + return pageRank(g, damp, tol) +} + +// PageRankSparse returns the PageRank weights for nodes of the sparse directed +// graph g using the given damping factor and terminating when the 2-norm of the +// vector difference between iterations is below tol. The returned map is +// keyed on the graph node IDs. +// If g is a graph.WeightedDirected, an edge-weighted PageRank is calculated. +func PageRankSparse(g graph.Directed, damp, tol float64) map[int64]float64 { + if g, ok := g.(graph.WeightedDirected); ok { + return edgeWeightedPageRankSparse(g, damp, tol) + } + return pageRankSparse(g, damp, tol) +} + +// edgeWeightedPageRank returns the PageRank weights for nodes of the weighted directed graph g +// using the given damping factor and terminating when the 2-norm of the +// vector difference between iterations is below tol. The returned map is +// keyed on the graph node IDs. +func edgeWeightedPageRank(g graph.WeightedDirected, damp, tol float64) map[int64]float64 { + // edgeWeightedPageRank is implemented according to "How Google Finds Your Needle + // in the Web's Haystack" with the modification that + // the columns of hyperlink matrix H are calculated with edge weights. + // + // G.I^k = alpha.H.I^k + alpha.A.I^k + (1-alpha).1/n.1.I^k + // + // http://www.ams.org/samplings/feature-column/fcarc-pagerank + + nodes := graph.NodesOf(g.Nodes()) + indexOf := make(map[int64]int, len(nodes)) + for i, n := range nodes { + indexOf[n.ID()] = i + } + + m := mat.NewDense(len(nodes), len(nodes), nil) + dangling := damp / float64(len(nodes)) + for j, u := range nodes { + to := graph.NodesOf(g.From(u.ID())) + var z float64 + for _, v := range to { + if w, ok := g.Weight(u.ID(), v.ID()); ok { + z += w + } + } + if z != 0 { + for _, v := range to { + if w, ok := g.Weight(u.ID(), v.ID()); ok { + m.Set(indexOf[v.ID()], j, (w*damp)/z) + } + } + } else { + for i := range nodes { + m.Set(i, j, dangling) + } + } + } + + matrix := m.RawMatrix().Data + dt := (1 - damp) / float64(len(nodes)) + for i := range matrix { + matrix[i] += dt + } + + last := make([]float64, len(nodes)) + for i := range last { + last[i] = 1 + } + lastV := mat.NewVecDense(len(nodes), last) + + vec := make([]float64, len(nodes)) + var sum float64 + for i := range vec { + r := rand.NormFloat64() + sum += r + vec[i] = r + } + f := 1 / sum + for i := range vec { + vec[i] *= f + } + v := mat.NewVecDense(len(nodes), vec) + + for { + lastV, v = v, lastV + v.MulVec(m, lastV) + if normDiff(vec, last) < tol { + break + } + } + + ranks := make(map[int64]float64, len(nodes)) + for i, r := range v.RawVector().Data { + ranks[nodes[i].ID()] = r + } + + return ranks +} + +// edgeWeightedPageRankSparse returns the PageRank weights for nodes of the sparse weighted directed +// graph g using the given damping factor and terminating when the 2-norm of the +// vector difference between iterations is below tol. The returned map is +// keyed on the graph node IDs. +func edgeWeightedPageRankSparse(g graph.WeightedDirected, damp, tol float64) map[int64]float64 { + // edgeWeightedPageRankSparse is implemented according to "How Google Finds Your Needle + // in the Web's Haystack" with the modification that + // the columns of hyperlink matrix H are calculated with edge weights. + // + // G.I^k = alpha.H.I^k + alpha.A.I^k + (1-alpha).1/n.1.I^k + // + // http://www.ams.org/samplings/feature-column/fcarc-pagerank + + nodes := graph.NodesOf(g.Nodes()) + indexOf := make(map[int64]int, len(nodes)) + for i, n := range nodes { + indexOf[n.ID()] = i + } + + m := make(rowCompressedMatrix, len(nodes)) + var dangling compressedRow + df := damp / float64(len(nodes)) + for j, u := range nodes { + to := graph.NodesOf(g.From(u.ID())) + var z float64 + for _, v := range to { + if w, ok := g.Weight(u.ID(), v.ID()); ok { + z += w + } + } + if z != 0 { + for _, v := range to { + if w, ok := g.Weight(u.ID(), v.ID()); ok { + m.addTo(indexOf[v.ID()], j, (w*damp)/z) + } + } + } else { + dangling.addTo(j, df) + } + } + + last := make([]float64, len(nodes)) + for i := range last { + last[i] = 1 + } + lastV := mat.NewVecDense(len(nodes), last) + + vec := make([]float64, len(nodes)) + var sum float64 + for i := range vec { + r := rand.NormFloat64() + sum += r + vec[i] = r + } + f := 1 / sum + for i := range vec { + vec[i] *= f + } + v := mat.NewVecDense(len(nodes), vec) + + dt := (1 - damp) / float64(len(nodes)) + for { + lastV, v = v, lastV + + m.mulVecUnitary(v, lastV) // First term of the G matrix equation; + with := dangling.dotUnitary(lastV) // Second term; + away := onesDotUnitary(dt, lastV) // Last term. + + floats.AddConst(with+away, v.RawVector().Data) + if normDiff(vec, last) < tol { + break + } + } + + ranks := make(map[int64]float64, len(nodes)) + for i, r := range v.RawVector().Data { + ranks[nodes[i].ID()] = r + } + + return ranks +} + +// pageRank returns the PageRank weights for nodes of the directed graph g +// using the given damping factor and terminating when the 2-norm of the +// vector difference between iterations is below tol. The returned map is +// keyed on the graph node IDs. +func pageRank(g graph.Directed, damp, tol float64) map[int64]float64 { + // pageRank is implemented according to "How Google Finds Your Needle + // in the Web's Haystack". + // + // G.I^k = alpha.S.I^k + (1-alpha).1/n.1.I^k + // + // http://www.ams.org/samplings/feature-column/fcarc-pagerank + + nodes := graph.NodesOf(g.Nodes()) + indexOf := make(map[int64]int, len(nodes)) + for i, n := range nodes { + indexOf[n.ID()] = i + } + + m := mat.NewDense(len(nodes), len(nodes), nil) + dangling := damp / float64(len(nodes)) + for j, u := range nodes { + to := graph.NodesOf(g.From(u.ID())) + f := damp / float64(len(to)) + for _, v := range to { + m.Set(indexOf[v.ID()], j, f) + } + if len(to) == 0 { + for i := range nodes { + m.Set(i, j, dangling) + } + } + } + matrix := m.RawMatrix().Data + dt := (1 - damp) / float64(len(nodes)) + for i := range matrix { + matrix[i] += dt + } + + last := make([]float64, len(nodes)) + for i := range last { + last[i] = 1 + } + lastV := mat.NewVecDense(len(nodes), last) + + vec := make([]float64, len(nodes)) + var sum float64 + for i := range vec { + r := rand.NormFloat64() + sum += r + vec[i] = r + } + f := 1 / sum + for i := range vec { + vec[i] *= f + } + v := mat.NewVecDense(len(nodes), vec) + + for { + lastV, v = v, lastV + v.MulVec(m, lastV) + if normDiff(vec, last) < tol { + break + } + } + + ranks := make(map[int64]float64, len(nodes)) + for i, r := range v.RawVector().Data { + ranks[nodes[i].ID()] = r + } + + return ranks +} + +// pageRankSparse returns the PageRank weights for nodes of the sparse directed +// graph g using the given damping factor and terminating when the 2-norm of the +// vector difference between iterations is below tol. The returned map is +// keyed on the graph node IDs. +func pageRankSparse(g graph.Directed, damp, tol float64) map[int64]float64 { + // pageRankSparse is implemented according to "How Google Finds Your Needle + // in the Web's Haystack". + // + // G.I^k = alpha.H.I^k + alpha.A.I^k + (1-alpha).1/n.1.I^k + // + // http://www.ams.org/samplings/feature-column/fcarc-pagerank + + nodes := graph.NodesOf(g.Nodes()) + indexOf := make(map[int64]int, len(nodes)) + for i, n := range nodes { + indexOf[n.ID()] = i + } + + m := make(rowCompressedMatrix, len(nodes)) + var dangling compressedRow + df := damp / float64(len(nodes)) + for j, u := range nodes { + to := graph.NodesOf(g.From(u.ID())) + f := damp / float64(len(to)) + for _, v := range to { + m.addTo(indexOf[v.ID()], j, f) + } + if len(to) == 0 { + dangling.addTo(j, df) + } + } + + last := make([]float64, len(nodes)) + for i := range last { + last[i] = 1 + } + lastV := mat.NewVecDense(len(nodes), last) + + vec := make([]float64, len(nodes)) + var sum float64 + for i := range vec { + r := rand.NormFloat64() + sum += r + vec[i] = r + } + f := 1 / sum + for i := range vec { + vec[i] *= f + } + v := mat.NewVecDense(len(nodes), vec) + + dt := (1 - damp) / float64(len(nodes)) + for { + lastV, v = v, lastV + + m.mulVecUnitary(v, lastV) // First term of the G matrix equation; + with := dangling.dotUnitary(lastV) // Second term; + away := onesDotUnitary(dt, lastV) // Last term. + + floats.AddConst(with+away, v.RawVector().Data) + if normDiff(vec, last) < tol { + break + } + } + + ranks := make(map[int64]float64, len(nodes)) + for i, r := range v.RawVector().Data { + ranks[nodes[i].ID()] = r + } + + return ranks +} + +// rowCompressedMatrix implements row-compressed +// matrix/vector multiplication. +type rowCompressedMatrix []compressedRow + +// addTo adds the value v to the matrix element at (i,j). Repeated +// calls to addTo with the same column index will result in +// non-unique element representation. +func (m rowCompressedMatrix) addTo(i, j int, v float64) { m[i].addTo(j, v) } + +// mulVecUnitary multiplies the receiver by the src vector, storing +// the result in dst. It assumes src and dst are the same length as m +// and that both have unitary vector increments. +func (m rowCompressedMatrix) mulVecUnitary(dst, src *mat.VecDense) { + dMat := dst.RawVector().Data + for i, r := range m { + dMat[i] = r.dotUnitary(src) + } +} + +// compressedRow implements a simplified scatter-based Ddot. +type compressedRow []sparseElement + +// addTo adds the value v to the vector element at j. Repeated +// calls to addTo with the same vector index will result in +// non-unique element representation. +func (r *compressedRow) addTo(j int, v float64) { + *r = append(*r, sparseElement{index: j, value: v}) +} + +// dotUnitary performs a simplified scatter-based Ddot operations on +// v and the receiver. v must have a unitary vector increment. +func (r compressedRow) dotUnitary(v *mat.VecDense) float64 { + var sum float64 + vec := v.RawVector().Data + for _, e := range r { + sum += vec[e.index] * e.value + } + return sum +} + +// sparseElement is a sparse vector or matrix element. +type sparseElement struct { + index int + value float64 +} + +// onesDotUnitary performs the equivalent of a Ddot of v with +// a ones vector of equal length. v must have a unitary vector +// increment. +func onesDotUnitary(alpha float64, v *mat.VecDense) float64 { + var sum float64 + for _, f := range v.RawVector().Data { + sum += alpha * f + } + return sum +} + +// normDiff returns the 2-norm of the difference between x and y. +// This is a cut down version of gonum/floats.Distance. +func normDiff(x, y []float64) float64 { + var sum float64 + for i, v := range x { + d := v - y[i] + sum += d * d + } + return math.Sqrt(sum) +} diff --git a/vendor/gonum.org/v1/gonum/graph/nodes_edges.go b/vendor/gonum.org/v1/gonum/graph/nodes_edges.go new file mode 100644 index 0000000..3d5dae1 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/graph/nodes_edges.go @@ -0,0 +1,300 @@ +// Copyright ©2018 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package graph + +// Iterator is an item iterator. +type Iterator interface { + // Next advances the iterator and returns whether + // the next call to the item method will return a + // non-nil item. + // + // Next should be called prior to any call to the + // iterator's item retrieval method after the + // iterator has been obtained or reset. + // + // The order of iteration is implementation + // dependent. + Next() bool + + // Len returns the number of items remaining in the + // iterator. + // + // If the number of items in the iterator is unknown, + // too large to materialize or too costly to calculate + // then Len may return a negative value. + // In this case the consuming function must be able + // to operate on the items of the iterator directly + // without materializing the items into a slice. + // The magnitude of a negative length has + // implementation-dependent semantics. + Len() int + + // Reset returns the iterator to its start position. + Reset() +} + +// Nodes is a Node iterator. +type Nodes interface { + Iterator + + // Node returns the current Node from the iterator. + Node() Node +} + +// NodeSlicer wraps the NodeSlice method. +type NodeSlicer interface { + // NodeSlice returns the set of nodes remaining + // to be iterated by a Nodes iterator. + // The holder of the iterator may arbitrarily + // change elements in the returned slice, but + // those changes may be reflected to other + // iterators. + NodeSlice() []Node +} + +// NodesOf returns it.Len() nodes from it. If it is a NodeSlicer, the NodeSlice method +// is used to obtain the nodes. It is safe to pass a nil Nodes to NodesOf. +// +// If the Nodes has an indeterminate length, NodesOf will panic. +func NodesOf(it Nodes) []Node { + if it == nil { + return nil + } + len := it.Len() + switch { + case len == 0: + return nil + case len < 0: + panic("graph: called NodesOf on indeterminate iterator") + } + switch it := it.(type) { + case NodeSlicer: + return it.NodeSlice() + } + n := make([]Node, 0, len) + for it.Next() { + n = append(n, it.Node()) + } + return n +} + +// Edges is an Edge iterator. +type Edges interface { + Iterator + + // Edge returns the current Edge from the iterator. + Edge() Edge +} + +// EdgeSlicer wraps the EdgeSlice method. +type EdgeSlicer interface { + // EdgeSlice returns the set of edges remaining + // to be iterated by an Edges iterator. + // The holder of the iterator may arbitrarily + // change elements in the returned slice, but + // those changes may be reflected to other + // iterators. + EdgeSlice() []Edge +} + +// EdgesOf returns it.Len() nodes from it. If it is an EdgeSlicer, the EdgeSlice method is used +// to obtain the edges. It is safe to pass a nil Edges to EdgesOf. +// +// If the Edges has an indeterminate length, EdgesOf will panic. +func EdgesOf(it Edges) []Edge { + if it == nil { + return nil + } + len := it.Len() + switch { + case len == 0: + return nil + case len < 0: + panic("graph: called EdgesOf on indeterminate iterator") + } + switch it := it.(type) { + case EdgeSlicer: + return it.EdgeSlice() + } + e := make([]Edge, 0, len) + for it.Next() { + e = append(e, it.Edge()) + } + return e +} + +// WeightedEdges is a WeightedEdge iterator. +type WeightedEdges interface { + Iterator + + // Edge returns the current Edge from the iterator. + WeightedEdge() WeightedEdge +} + +// WeightedEdgeSlicer wraps the WeightedEdgeSlice method. +type WeightedEdgeSlicer interface { + // EdgeSlice returns the set of edges remaining + // to be iterated by an Edges iterator. + // The holder of the iterator may arbitrarily + // change elements in the returned slice, but + // those changes may be reflected to other + // iterators. + WeightedEdgeSlice() []WeightedEdge +} + +// WeightedEdgesOf returns it.Len() weighted edge from it. If it is a WeightedEdgeSlicer, the +// WeightedEdgeSlice method is used to obtain the edges. It is safe to pass a nil WeightedEdges +// to WeightedEdgesOf. +// +// If the WeightedEdges has an indeterminate length, WeightedEdgesOf will panic. +func WeightedEdgesOf(it WeightedEdges) []WeightedEdge { + if it == nil { + return nil + } + len := it.Len() + switch { + case len == 0: + return nil + case len < 0: + panic("graph: called WeightedEdgesOf on indeterminate iterator") + } + switch it := it.(type) { + case WeightedEdgeSlicer: + return it.WeightedEdgeSlice() + } + e := make([]WeightedEdge, 0, len) + for it.Next() { + e = append(e, it.WeightedEdge()) + } + return e +} + +// Lines is a Line iterator. +type Lines interface { + Iterator + + // Line returns the current Line from the iterator. + Line() Line +} + +// LineSlicer wraps the LineSlice method. +type LineSlicer interface { + // LineSlice returns the set of lines remaining + // to be iterated by an Lines iterator. + // The holder of the iterator may arbitrarily + // change elements in the returned slice, but + // those changes may be reflected to other + // iterators. + LineSlice() []Line +} + +// LinesOf returns it.Len() nodes from it. If it is a LineSlicer, the LineSlice method is used +// to obtain the lines. It is safe to pass a nil Lines to LinesOf. +// +// If the Lines has an indeterminate length, LinesOf will panic. +func LinesOf(it Lines) []Line { + if it == nil { + return nil + } + len := it.Len() + switch { + case len == 0: + return nil + case len < 0: + panic("graph: called LinesOf on indeterminate iterator") + } + switch it := it.(type) { + case LineSlicer: + return it.LineSlice() + } + l := make([]Line, 0, len) + for it.Next() { + l = append(l, it.Line()) + } + return l +} + +// WeightedLines is a WeightedLine iterator. +type WeightedLines interface { + Iterator + + // Line returns the current Line from the iterator. + WeightedLine() WeightedLine +} + +// WeightedLineSlicer wraps the WeightedLineSlice method. +type WeightedLineSlicer interface { + // LineSlice returns the set of lines remaining + // to be iterated by an Lines iterator. + // The holder of the iterator may arbitrarily + // change elements in the returned slice, but + // those changes may be reflected to other + // iterators. + WeightedLineSlice() []WeightedLine +} + +// WeightedLinesOf returns it.Len() weighted line from it. If it is a WeightedLineSlicer, the +// WeightedLineSlice method is used to obtain the lines. It is safe to pass a nil WeightedLines +// to WeightedLinesOf. +// +// If the WeightedLines has an indeterminate length, WeightedLinesOf will panic. +func WeightedLinesOf(it WeightedLines) []WeightedLine { + if it == nil { + return nil + } + len := it.Len() + switch { + case len == 0: + return nil + case len < 0: + panic("graph: called WeightedLinesOf on indeterminate iterator") + } + switch it := it.(type) { + case WeightedLineSlicer: + return it.WeightedLineSlice() + } + l := make([]WeightedLine, 0, len) + for it.Next() { + l = append(l, it.WeightedLine()) + } + return l +} + +// Empty is an empty set of nodes, edges or lines. It should be used when +// a graph returns a zero-length Iterator. Empty implements the slicer +// interfaces for nodes, edges and lines, returning nil for each of these. +const Empty = nothing + +var ( + _ Iterator = Empty + _ Nodes = Empty + _ NodeSlicer = Empty + _ Edges = Empty + _ EdgeSlicer = Empty + _ WeightedEdges = Empty + _ WeightedEdgeSlicer = Empty + _ Lines = Empty + _ LineSlicer = Empty + _ WeightedLines = Empty + _ WeightedLineSlicer = Empty +) + +const nothing = empty(true) + +type empty bool + +func (empty) Next() bool { return false } +func (empty) Len() int { return 0 } +func (empty) Reset() {} +func (empty) Node() Node { return nil } +func (empty) NodeSlice() []Node { return nil } +func (empty) Edge() Edge { return nil } +func (empty) EdgeSlice() []Edge { return nil } +func (empty) WeightedEdge() WeightedEdge { return nil } +func (empty) WeightedEdgeSlice() []WeightedEdge { return nil } +func (empty) Line() Line { return nil } +func (empty) LineSlice() []Line { return nil } +func (empty) WeightedLine() WeightedLine { return nil } +func (empty) WeightedLineSlice() []WeightedLine { return nil } diff --git a/vendor/gonum.org/v1/gonum/graph/path/a_star.go b/vendor/gonum.org/v1/gonum/graph/path/a_star.go new file mode 100644 index 0000000..e12d655 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/graph/path/a_star.go @@ -0,0 +1,151 @@ +// Copyright ©2014 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package path + +import ( + "container/heap" + + "gonum.org/v1/gonum/graph" + "gonum.org/v1/gonum/graph/internal/set" +) + +// AStar finds the A*-shortest path from s to t in g using the heuristic h. The path and +// its cost are returned in a Shortest along with paths and costs to all nodes explored +// during the search. The number of expanded nodes is also returned. This value may help +// with heuristic tuning. +// +// The path will be the shortest path if the heuristic is admissible. A heuristic is +// admissible if for any node, n, in the graph, the heuristic estimate of the cost of +// the path from n to t is less than or equal to the true cost of that path. +// +// If h is nil, AStar will use the g.HeuristicCost method if g implements HeuristicCoster, +// falling back to NullHeuristic otherwise. If the graph does not implement Weighted, +// UniformCost is used. AStar will panic if g has an A*-reachable negative edge weight. +func AStar(s, t graph.Node, g graph.Graph, h Heuristic) (path Shortest, expanded int) { + if g.Node(s.ID()) == nil || g.Node(t.ID()) == nil { + return Shortest{from: s}, 0 + } + var weight Weighting + if wg, ok := g.(Weighted); ok { + weight = wg.Weight + } else { + weight = UniformCost(g) + } + if h == nil { + if g, ok := g.(HeuristicCoster); ok { + h = g.HeuristicCost + } else { + h = NullHeuristic + } + } + + path = newShortestFrom(s, graph.NodesOf(g.Nodes())) + tid := t.ID() + + visited := make(set.Int64s) + open := &aStarQueue{indexOf: make(map[int64]int)} + heap.Push(open, aStarNode{node: s, gscore: 0, fscore: h(s, t)}) + + for open.Len() != 0 { + u := heap.Pop(open).(aStarNode) + uid := u.node.ID() + i := path.indexOf[uid] + expanded++ + + if uid == tid { + break + } + + visited.Add(uid) + for _, v := range graph.NodesOf(g.From(u.node.ID())) { + vid := v.ID() + if visited.Has(vid) { + continue + } + j := path.indexOf[vid] + + w, ok := weight(u.node.ID(), vid) + if !ok { + panic("A*: unexpected invalid weight") + } + if w < 0 { + panic("A*: negative edge weight") + } + g := u.gscore + w + if n, ok := open.node(vid); !ok { + path.set(j, g, i) + heap.Push(open, aStarNode{node: v, gscore: g, fscore: g + h(v, t)}) + } else if g < n.gscore { + path.set(j, g, i) + open.update(vid, g, g+h(v, t)) + } + } + } + + return path, expanded +} + +// NullHeuristic is an admissible, consistent heuristic that will not speed up computation. +func NullHeuristic(_, _ graph.Node) float64 { + return 0 +} + +// aStarNode adds A* accounting to a graph.Node. +type aStarNode struct { + node graph.Node + gscore float64 + fscore float64 +} + +// aStarQueue is an A* priority queue. +type aStarQueue struct { + indexOf map[int64]int + nodes []aStarNode +} + +func (q *aStarQueue) Less(i, j int) bool { + return q.nodes[i].fscore < q.nodes[j].fscore +} + +func (q *aStarQueue) Swap(i, j int) { + q.indexOf[q.nodes[i].node.ID()] = j + q.indexOf[q.nodes[j].node.ID()] = i + q.nodes[i], q.nodes[j] = q.nodes[j], q.nodes[i] +} + +func (q *aStarQueue) Len() int { + return len(q.nodes) +} + +func (q *aStarQueue) Push(x interface{}) { + n := x.(aStarNode) + q.indexOf[n.node.ID()] = len(q.nodes) + q.nodes = append(q.nodes, n) +} + +func (q *aStarQueue) Pop() interface{} { + n := q.nodes[len(q.nodes)-1] + q.nodes = q.nodes[:len(q.nodes)-1] + delete(q.indexOf, n.node.ID()) + return n +} + +func (q *aStarQueue) update(id int64, g, f float64) { + i, ok := q.indexOf[id] + if !ok { + return + } + q.nodes[i].gscore = g + q.nodes[i].fscore = f + heap.Fix(q, i) +} + +func (q *aStarQueue) node(id int64) (aStarNode, bool) { + loc, ok := q.indexOf[id] + if ok { + return q.nodes[loc], true + } + return aStarNode{}, false +} diff --git a/vendor/gonum.org/v1/gonum/graph/path/bellman_ford_moore.go b/vendor/gonum.org/v1/gonum/graph/path/bellman_ford_moore.go new file mode 100644 index 0000000..1174995 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/graph/path/bellman_ford_moore.go @@ -0,0 +1,72 @@ +// Copyright ©2015 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package path + +import "gonum.org/v1/gonum/graph" + +// BellmanFordFrom returns a shortest-path tree for a shortest path from u to all nodes in +// the graph g, or false indicating that a negative cycle exists in the graph. If the graph +// does not implement Weighted, UniformCost is used. +// +// The time complexity of BellmanFordFrom is O(|V|.|E|). +func BellmanFordFrom(u graph.Node, g graph.Graph) (path Shortest, ok bool) { + if g.Node(u.ID()) == nil { + return Shortest{from: u}, true + } + var weight Weighting + if wg, ok := g.(Weighted); ok { + weight = wg.Weight + } else { + weight = UniformCost(g) + } + + nodes := graph.NodesOf(g.Nodes()) + + path = newShortestFrom(u, nodes) + path.dist[path.indexOf[u.ID()]] = 0 + + // TODO(kortschak): Consider adding further optimisations + // from http://arxiv.org/abs/1111.5414. + for i := 1; i < len(nodes); i++ { + changed := false + for j, u := range nodes { + uid := u.ID() + for _, v := range graph.NodesOf(g.From(uid)) { + vid := v.ID() + k := path.indexOf[vid] + w, ok := weight(uid, vid) + if !ok { + panic("bellman-ford: unexpected invalid weight") + } + joint := path.dist[j] + w + if joint < path.dist[k] { + path.set(k, joint, j) + changed = true + } + } + } + if !changed { + break + } + } + + for j, u := range nodes { + uid := u.ID() + for _, v := range graph.NodesOf(g.From(uid)) { + vid := v.ID() + k := path.indexOf[vid] + w, ok := weight(uid, vid) + if !ok { + panic("bellman-ford: unexpected invalid weight") + } + if path.dist[j]+w < path.dist[k] { + path.hasNegativeCycle = true + return path, false + } + } + } + + return path, true +} diff --git a/vendor/gonum.org/v1/gonum/graph/path/dijkstra.go b/vendor/gonum.org/v1/gonum/graph/path/dijkstra.go new file mode 100644 index 0000000..680025c --- /dev/null +++ b/vendor/gonum.org/v1/gonum/graph/path/dijkstra.go @@ -0,0 +1,165 @@ +// Copyright ©2015 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package path + +import ( + "container/heap" + + "gonum.org/v1/gonum/graph" + "gonum.org/v1/gonum/graph/traverse" +) + +// DijkstraFrom returns a shortest-path tree for a shortest path from u to all nodes in +// the graph g. If the graph does not implement Weighted, UniformCost is used. +// DijkstraFrom will panic if g has a u-reachable negative edge weight. +// +// If g is a graph.Graph, all nodes of the graph will be stored in the shortest-path +// tree, otherwise only nodes reachable from u will be stored. +// +// The time complexity of DijkstrFrom is O(|E|.log|V|). +func DijkstraFrom(u graph.Node, g traverse.Graph) Shortest { + var path Shortest + if h, ok := g.(graph.Graph); ok { + if h.Node(u.ID()) == nil { + return Shortest{from: u} + } + path = newShortestFrom(u, graph.NodesOf(h.Nodes())) + } else { + if g.From(u.ID()) == nil { + return Shortest{from: u} + } + path = newShortestFrom(u, []graph.Node{u}) + } + + var weight Weighting + if wg, ok := g.(Weighted); ok { + weight = wg.Weight + } else { + weight = UniformCost(g) + } + + // Dijkstra's algorithm here is implemented essentially as + // described in Function B.2 in figure 6 of UTCS Technical + // Report TR-07-54. + // + // This implementation deviates from the report as follows: + // - the value of path.dist for the start vertex u is initialized to 0; + // - outdated elements from the priority queue (i.e. with respect to the dist value) + // are skipped. + // + // http://www.cs.utexas.edu/ftp/techreports/tr07-54.pdf + Q := priorityQueue{{node: u, dist: 0}} + for Q.Len() != 0 { + mid := heap.Pop(&Q).(distanceNode) + k := path.indexOf[mid.node.ID()] + if mid.dist > path.dist[k] { + continue + } + mnid := mid.node.ID() + for _, v := range graph.NodesOf(g.From(mnid)) { + vid := v.ID() + j, ok := path.indexOf[vid] + if !ok { + j = path.add(v) + } + w, ok := weight(mnid, vid) + if !ok { + panic("dijkstra: unexpected invalid weight") + } + if w < 0 { + panic("dijkstra: negative edge weight") + } + joint := path.dist[k] + w + if joint < path.dist[j] { + heap.Push(&Q, distanceNode{node: v, dist: joint}) + path.set(j, joint, k) + } + } + } + + return path +} + +// DijkstraAllPaths returns a shortest-path tree for shortest paths in the graph g. +// If the graph does not implement graph.Weighter, UniformCost is used. +// DijkstraAllPaths will panic if g has a negative edge weight. +// +// The time complexity of DijkstrAllPaths is O(|V|.|E|+|V|^2.log|V|). +func DijkstraAllPaths(g graph.Graph) (paths AllShortest) { + paths = newAllShortest(graph.NodesOf(g.Nodes()), false) + dijkstraAllPaths(g, paths) + return paths +} + +// dijkstraAllPaths is the all-paths implementation of Dijkstra. It is shared +// between DijkstraAllPaths and JohnsonAllPaths to avoid repeated allocation +// of the nodes slice and the indexOf map. It returns nothing, but stores the +// result of the work in the paths parameter which is a reference type. +func dijkstraAllPaths(g graph.Graph, paths AllShortest) { + var weight Weighting + if wg, ok := g.(graph.Weighted); ok { + weight = wg.Weight + } else { + weight = UniformCost(g) + } + + var Q priorityQueue + for i, u := range paths.nodes { + // Dijkstra's algorithm here is implemented essentially as + // described in Function B.2 in figure 6 of UTCS Technical + // Report TR-07-54 with the addition of handling multiple + // co-equal paths. + // + // http://www.cs.utexas.edu/ftp/techreports/tr07-54.pdf + + // Q must be empty at this point. + heap.Push(&Q, distanceNode{node: u, dist: 0}) + for Q.Len() != 0 { + mid := heap.Pop(&Q).(distanceNode) + k := paths.indexOf[mid.node.ID()] + if mid.dist < paths.dist.At(i, k) { + paths.dist.Set(i, k, mid.dist) + } + mnid := mid.node.ID() + for _, v := range graph.NodesOf(g.From(mnid)) { + vid := v.ID() + j := paths.indexOf[vid] + w, ok := weight(mnid, vid) + if !ok { + panic("dijkstra: unexpected invalid weight") + } + if w < 0 { + panic("dijkstra: negative edge weight") + } + joint := paths.dist.At(i, k) + w + if joint < paths.dist.At(i, j) { + heap.Push(&Q, distanceNode{node: v, dist: joint}) + paths.set(i, j, joint, k) + } else if joint == paths.dist.At(i, j) { + paths.add(i, j, k) + } + } + } + } +} + +type distanceNode struct { + node graph.Node + dist float64 +} + +// priorityQueue implements a no-dec priority queue. +type priorityQueue []distanceNode + +func (q priorityQueue) Len() int { return len(q) } +func (q priorityQueue) Less(i, j int) bool { return q[i].dist < q[j].dist } +func (q priorityQueue) Swap(i, j int) { q[i], q[j] = q[j], q[i] } +func (q *priorityQueue) Push(n interface{}) { *q = append(*q, n.(distanceNode)) } +func (q *priorityQueue) Pop() interface{} { + t := *q + var n interface{} + n, *q = t[len(t)-1], t[:len(t)-1] + return n +} diff --git a/vendor/gonum.org/v1/gonum/graph/path/disjoint.go b/vendor/gonum.org/v1/gonum/graph/path/disjoint.go new file mode 100644 index 0000000..235694c --- /dev/null +++ b/vendor/gonum.org/v1/gonum/graph/path/disjoint.go @@ -0,0 +1,87 @@ +// Copyright ©2014 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package path + +// A disjoint set is a collection of non-overlapping sets. That is, for any two sets in the +// disjoint set, their intersection is the empty set. +// +// A disjoint set has three principle operations: Make Set, Find, and Union. +// +// Make set creates a new set for an element (presuming it does not already exist in any set in +// the disjoint set), Find finds the set containing that element (if any), and Union merges two +// sets in the disjoint set. In general, algorithms operating on disjoint sets are "union-find" +// algorithms, where two sets are found with Find, and then joined with Union. +// +// A concrete example of a union-find algorithm can be found as discrete.Kruskal -- which unions +// two sets when an edge is created between two vertices, and refuses to make an edge between two +// vertices if they're part of the same set. +type disjointSet struct { + master map[int64]*disjointSetNode +} + +type disjointSetNode struct { + parent *disjointSetNode + rank int +} + +func newDisjointSet() *disjointSet { + return &disjointSet{master: make(map[int64]*disjointSetNode)} +} + +// If the element isn't already somewhere in there, adds it to the master set and its own tiny set. +func (ds *disjointSet) makeSet(e int64) { + if _, ok := ds.master[e]; ok { + return + } + dsNode := &disjointSetNode{rank: 0} + dsNode.parent = dsNode + ds.master[e] = dsNode +} + +// Returns the set the element belongs to, or nil if none. +func (ds *disjointSet) find(e int64) *disjointSetNode { + dsNode, ok := ds.master[e] + if !ok { + return nil + } + + return find(dsNode) +} + +func find(dsNode *disjointSetNode) *disjointSetNode { + if dsNode.parent != dsNode { + dsNode.parent = find(dsNode.parent) + } + + return dsNode.parent +} + +// Unions two subsets within the disjointSet. +// +// If x or y are not in this disjoint set, the behavior is undefined. If either pointer is nil, +// this function will panic. +func (ds *disjointSet) union(x, y *disjointSetNode) { + if x == nil || y == nil { + panic("Disjoint Set union on nil sets") + } + xRoot := find(x) + yRoot := find(y) + if xRoot == nil || yRoot == nil { + return + } + + if xRoot == yRoot { + return + } + + if xRoot.rank < yRoot.rank { + xRoot.parent = yRoot + } else if yRoot.rank < xRoot.rank { + yRoot.parent = xRoot + } else { + yRoot.parent = xRoot + xRoot.rank++ + } +} diff --git a/vendor/gonum.org/v1/gonum/graph/path/doc.go b/vendor/gonum.org/v1/gonum/graph/path/doc.go new file mode 100644 index 0000000..e02e2d5 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/graph/path/doc.go @@ -0,0 +1,6 @@ +// Copyright ©2016 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Package path provides graph path finding functions. +package path // import "gonum.org/v1/gonum/graph/path" diff --git a/vendor/gonum.org/v1/gonum/graph/path/dynamic/doc.go b/vendor/gonum.org/v1/gonum/graph/path/dynamic/doc.go new file mode 100644 index 0000000..9030765 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/graph/path/dynamic/doc.go @@ -0,0 +1,6 @@ +// Copyright ©2016 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Package dynamic provides incremental heuristic graph path finding functions. +package dynamic // import "gonum.org/v1/gonum/graph/path/dynamic" diff --git a/vendor/gonum.org/v1/gonum/graph/path/dynamic/dstarlite.go b/vendor/gonum.org/v1/gonum/graph/path/dynamic/dstarlite.go new file mode 100644 index 0000000..3dc0acc --- /dev/null +++ b/vendor/gonum.org/v1/gonum/graph/path/dynamic/dstarlite.go @@ -0,0 +1,502 @@ +// Copyright ©2014 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package dynamic + +import ( + "container/heap" + "fmt" + "math" + + "gonum.org/v1/gonum/graph" + "gonum.org/v1/gonum/graph/path" + "gonum.org/v1/gonum/graph/simple" +) + +// DStarLite implements the D* Lite dynamic re-planning path search algorithm. +// +// doi:10.1109/tro.2004.838026 and ISBN:0-262-51129-0 pp476-483 +// +type DStarLite struct { + s, t *dStarLiteNode + last *dStarLiteNode + + model WorldModel + queue dStarLiteQueue + keyModifier float64 + + weight path.Weighting + heuristic path.Heuristic +} + +// WorldModel is a mutable weighted directed graph that returns nodes identified +// by id number. +type WorldModel interface { + graph.WeightedBuilder + graph.WeightedDirected +} + +// NewDStarLite returns a new DStarLite planner for the path from s to t in g using the +// heuristic h. The world model, m, is used to store shortest path information during path +// planning. The world model must be an empty graph when NewDStarLite is called. +// +// If h is nil, the DStarLite will use the g.HeuristicCost method if g implements +// path.HeuristicCoster, falling back to path.NullHeuristic otherwise. If the graph does not +// implement graph.Weighter, path.UniformCost is used. NewDStarLite will panic if g has +// a negative edge weight. +func NewDStarLite(s, t graph.Node, g graph.Graph, h path.Heuristic, m WorldModel) *DStarLite { + /* + procedure Initialize() + {02”} U = ∅; + {03”} k_m = 0; + {04”} for all s ∈ S rhs(s) = g(s) = ∞; + {05”} rhs(s_goal) = 0; + {06”} U.Insert(s_goal, [h(s_start, s_goal); 0]); + */ + + d := &DStarLite{ + s: newDStarLiteNode(s), + t: newDStarLiteNode(t), // badKey is overwritten below. + + model: m, + + heuristic: h, + } + d.t.rhs = 0 + + /* + procedure Main() + {29”} s_last = s_start; + {30”} Initialize(); + */ + d.last = d.s + + if wg, ok := g.(graph.Weighted); ok { + d.weight = wg.Weight + } else { + d.weight = path.UniformCost(g) + } + if d.heuristic == nil { + if g, ok := g.(path.HeuristicCoster); ok { + d.heuristic = g.HeuristicCost + } else { + d.heuristic = path.NullHeuristic + } + } + + d.queue.insert(d.t, key{d.heuristic(s, t), 0}) + + for _, n := range graph.NodesOf(g.Nodes()) { + switch n.ID() { + case d.s.ID(): + d.model.AddNode(d.s) + case d.t.ID(): + d.model.AddNode(d.t) + default: + d.model.AddNode(newDStarLiteNode(n)) + } + } + for _, u := range graph.NodesOf(d.model.Nodes()) { + uid := u.ID() + for _, v := range graph.NodesOf(g.From(uid)) { + vid := v.ID() + w := edgeWeight(d.weight, uid, vid) + if w < 0 { + panic("D* Lite: negative edge weight") + } + d.model.SetWeightedEdge(simple.WeightedEdge{F: u, T: d.model.Node(vid), W: w}) + } + } + + /* + procedure Main() + {31”} ComputeShortestPath(); + */ + d.findShortestPath() + + return d +} + +// edgeWeight is a helper function that returns the weight of the edge between +// two connected nodes, u and v, using the provided weight function. It panics +// if there is no edge between u and v. +func edgeWeight(weight path.Weighting, uid, vid int64) float64 { + w, ok := weight(uid, vid) + if !ok { + panic("D* Lite: unexpected invalid weight") + } + return w +} + +// keyFor is the CalculateKey procedure in the D* Lite papers. +func (d *DStarLite) keyFor(s *dStarLiteNode) key { + /* + procedure CalculateKey(s) + {01”} return [min(g(s), rhs(s)) + h(s_start, s) + k_m; min(g(s), rhs(s))]; + */ + k := key{1: math.Min(s.g, s.rhs)} + k[0] = k[1] + d.heuristic(d.s.Node, s.Node) + d.keyModifier + return k +} + +// update is the UpdateVertex procedure in the D* Lite papers. +func (d *DStarLite) update(u *dStarLiteNode) { + /* + procedure UpdateVertex(u) + {07”} if (g(u) != rhs(u) AND u ∈ U) U.Update(u,CalculateKey(u)); + {08”} else if (g(u) != rhs(u) AND u /∈ U) U.Insert(u,CalculateKey(u)); + {09”} else if (g(u) = rhs(u) AND u ∈ U) U.Remove(u); + */ + inQueue := u.inQueue() + switch { + case inQueue && u.g != u.rhs: + d.queue.update(u, d.keyFor(u)) + case !inQueue && u.g != u.rhs: + d.queue.insert(u, d.keyFor(u)) + case inQueue && u.g == u.rhs: + d.queue.remove(u) + } +} + +// findShortestPath is the ComputeShortestPath procedure in the D* Lite papers. +func (d *DStarLite) findShortestPath() { + /* + procedure ComputeShortestPath() + {10”} while (U.TopKey() < CalculateKey(s_start) OR rhs(s_start) > g(s_start)) + {11”} u = U.Top(); + {12”} k_old = U.TopKey(); + {13”} k_new = CalculateKey(u); + {14”} if(k_old < k_new) + {15”} U.Update(u, k_new); + {16”} else if (g(u) > rhs(u)) + {17”} g(u) = rhs(u); + {18”} U.Remove(u); + {19”} for all s ∈ Pred(u) + {20”} if (s != s_goal) rhs(s) = min(rhs(s), c(s, u) + g(u)); + {21”} UpdateVertex(s); + {22”} else + {23”} g_old = g(u); + {24”} g(u) = ∞; + {25”} for all s ∈ Pred(u) ∪ {u} + {26”} if (rhs(s) = c(s, u) + g_old) + {27”} if (s != s_goal) rhs(s) = min s'∈Succ(s)(c(s, s') + g(s')); + {28”} UpdateVertex(s); + */ + for d.queue.Len() != 0 { // We use d.queue.Len since d.queue does not return an infinite key when empty. + u := d.queue.top() + if !u.key.less(d.keyFor(d.s)) && d.s.rhs <= d.s.g { + break + } + uid := u.ID() + switch kNew := d.keyFor(u); { + case u.key.less(kNew): + d.queue.update(u, kNew) + case u.g > u.rhs: + u.g = u.rhs + d.queue.remove(u) + for _, _s := range graph.NodesOf(d.model.To(uid)) { + s := _s.(*dStarLiteNode) + sid := s.ID() + if sid != d.t.ID() { + s.rhs = math.Min(s.rhs, edgeWeight(d.model.Weight, sid, uid)+u.g) + } + d.update(s) + } + default: + gOld := u.g + u.g = math.Inf(1) + for _, _s := range append(graph.NodesOf(d.model.To(uid)), u) { + s := _s.(*dStarLiteNode) + sid := s.ID() + if s.rhs == edgeWeight(d.model.Weight, sid, uid)+gOld { + if s.ID() != d.t.ID() { + s.rhs = math.Inf(1) + for _, t := range graph.NodesOf(d.model.From(sid)) { + tid := t.ID() + s.rhs = math.Min(s.rhs, edgeWeight(d.model.Weight, sid, tid)+t.(*dStarLiteNode).g) + } + } + } + d.update(s) + } + } + } +} + +// Step performs one movement step along the best path towards the goal. +// It returns false if no further progression toward the goal can be +// achieved, either because the goal has been reached or because there +// is no path. +func (d *DStarLite) Step() bool { + /* + procedure Main() + {32”} while (s_start != s_goal) + {33”} // if (rhs(s_start) = ∞) then there is no known path + {34”} s_start = argmin s'∈Succ(s_start)(c(s_start, s') + g(s')); + */ + if d.s.ID() == d.t.ID() { + return false + } + if math.IsInf(d.s.rhs, 1) { + return false + } + + // We use rhs comparison to break ties + // between coequally weighted nodes. + rhs := math.Inf(1) + min := math.Inf(1) + + var next *dStarLiteNode + dsid := d.s.ID() + for _, _s := range graph.NodesOf(d.model.From(dsid)) { + s := _s.(*dStarLiteNode) + w := edgeWeight(d.model.Weight, dsid, s.ID()) + s.g + if w < min || (w == min && s.rhs < rhs) { + next = s + min = w + rhs = s.rhs + } + } + d.s = next + + /* + procedure Main() + {35”} Move to s_start; + */ + return true +} + +// MoveTo moves to n in the world graph. +func (d *DStarLite) MoveTo(n graph.Node) { + d.last = d.s + d.s = d.model.Node(n.ID()).(*dStarLiteNode) + d.keyModifier += d.heuristic(d.last, d.s) +} + +// UpdateWorld updates or adds edges in the world graph. UpdateWorld will +// panic if changes include a negative edge weight. +func (d *DStarLite) UpdateWorld(changes []graph.Edge) { + /* + procedure Main() + {36”} Scan graph for changed edge costs; + {37”} if any edge costs changed + {38”} k_m = k_m + h(s_last, s_start); + {39”} s_last = s_start; + {40”} for all directed edges (u, v) with changed edge costs + {41”} c_old = c(u, v); + {42”} Update the edge cost c(u, v); + {43”} if (c_old > c(u, v)) + {44”} if (u != s_goal) rhs(u) = min(rhs(u), c(u, v) + g(v)); + {45”} else if (rhs(u) = c_old + g(v)) + {46”} if (u != s_goal) rhs(u) = min s'∈Succ(u)(c(u, s') + g(s')); + {47”} UpdateVertex(u); + {48”} ComputeShortestPath() + */ + if len(changes) == 0 { + return + } + d.keyModifier += d.heuristic(d.last, d.s) + d.last = d.s + for _, e := range changes { + from := e.From() + fid := from.ID() + to := e.To() + tid := to.ID() + c, _ := d.weight(fid, tid) + if c < 0 { + panic("D* Lite: negative edge weight") + } + cOld, _ := d.model.Weight(fid, tid) + u := d.worldNodeFor(from) + v := d.worldNodeFor(to) + d.model.SetWeightedEdge(simple.WeightedEdge{F: u, T: v, W: c}) + uid := u.ID() + if cOld > c { + if uid != d.t.ID() { + u.rhs = math.Min(u.rhs, c+v.g) + } + } else if u.rhs == cOld+v.g { + if uid != d.t.ID() { + u.rhs = math.Inf(1) + for _, t := range graph.NodesOf(d.model.From(uid)) { + u.rhs = math.Min(u.rhs, edgeWeight(d.model.Weight, uid, t.ID())+t.(*dStarLiteNode).g) + } + } + } + d.update(u) + } + d.findShortestPath() +} + +func (d *DStarLite) worldNodeFor(n graph.Node) *dStarLiteNode { + switch w := d.model.Node(n.ID()).(type) { + case *dStarLiteNode: + return w + case graph.Node: + panic(fmt.Sprintf("D* Lite: illegal world model node type: %T", w)) + default: + return newDStarLiteNode(n) + } +} + +// Here returns the current location. +func (d *DStarLite) Here() graph.Node { + return d.s.Node +} + +// Path returns the path from the current location to the goal and the +// weight of the path. +func (d *DStarLite) Path() (p []graph.Node, weight float64) { + u := d.s + p = []graph.Node{u.Node} + for u.ID() != d.t.ID() { + if math.IsInf(u.rhs, 1) { + return nil, math.Inf(1) + } + + // We use stored rhs comparison to break + // ties between calculated rhs-coequal nodes. + rhsMin := math.Inf(1) + min := math.Inf(1) + var ( + next *dStarLiteNode + cost float64 + ) + uid := u.ID() + for _, _v := range graph.NodesOf(d.model.From(uid)) { + v := _v.(*dStarLiteNode) + vid := v.ID() + w := edgeWeight(d.model.Weight, uid, vid) + if rhs := w + v.g; rhs < min || (rhs == min && v.rhs < rhsMin) { + next = v + min = rhs + rhsMin = v.rhs + cost = w + } + } + if next == nil { + return nil, math.NaN() + } + u = next + weight += cost + p = append(p, u.Node) + } + return p, weight +} + +/* +The pseudocode uses the following functions to manage the priority +queue: + + * U.Top() returns a vertex with the smallest priority of all + vertices in priority queue U. + * U.TopKey() returns the smallest priority of all vertices in + priority queue U. (If is empty, then U.TopKey() returns [∞;∞].) + * U.Pop() deletes the vertex with the smallest priority in + priority queue U and returns the vertex. + * U.Insert(s, k) inserts vertex s into priority queue with + priority k. + * U.Update(s, k) changes the priority of vertex s in priority + queue U to k. (It does nothing if the current priority of vertex + s already equals k.) + * Finally, U.Remove(s) removes vertex s from priority queue U. +*/ + +// key is a D* Lite priority queue key. +type key [2]float64 + +var badKey = key{math.NaN(), math.NaN()} + +// less returns whether k is less than other. From ISBN:0-262-51129-0 pp476-483: +// +// k ≤ k' iff k₁ < k'₁ OR (k₁ == k'₁ AND k₂ ≤ k'₂) +// +func (k key) less(other key) bool { + if k != k || other != other { + panic("D* Lite: poisoned key") + } + return k[0] < other[0] || (k[0] == other[0] && k[1] < other[1]) +} + +// dStarLiteNode adds D* Lite accounting to a graph.Node. +type dStarLiteNode struct { + graph.Node + key key + idx int + rhs float64 + g float64 +} + +// newDStarLiteNode returns a dStarLite node that is in a legal state +// for existence outside the DStarLite priority queue. +func newDStarLiteNode(n graph.Node) *dStarLiteNode { + return &dStarLiteNode{ + Node: n, + rhs: math.Inf(1), + g: math.Inf(1), + key: badKey, + idx: -1, + } +} + +// inQueue returns whether the node is in the queue. +func (q *dStarLiteNode) inQueue() bool { + return q.idx >= 0 +} + +// dStarLiteQueue is a D* Lite priority queue. +type dStarLiteQueue []*dStarLiteNode + +func (q dStarLiteQueue) Less(i, j int) bool { + return q[i].key.less(q[j].key) +} + +func (q dStarLiteQueue) Swap(i, j int) { + q[i], q[j] = q[j], q[i] + q[i].idx = i + q[j].idx = j +} + +func (q dStarLiteQueue) Len() int { + return len(q) +} + +func (q *dStarLiteQueue) Push(x interface{}) { + n := x.(*dStarLiteNode) + n.idx = len(*q) + *q = append(*q, n) +} + +func (q *dStarLiteQueue) Pop() interface{} { + n := (*q)[len(*q)-1] + n.idx = -1 + *q = (*q)[:len(*q)-1] + return n +} + +// top returns the top node in the queue. Note that instead of +// returning a key [∞;∞] when q is empty, the caller checks for +// an empty queue by calling q.Len. +func (q dStarLiteQueue) top() *dStarLiteNode { + return q[0] +} + +// insert puts the node u into the queue with the key k. +func (q *dStarLiteQueue) insert(u *dStarLiteNode, k key) { + u.key = k + heap.Push(q, u) +} + +// update updates the node in the queue identified by id with the key k. +func (q *dStarLiteQueue) update(n *dStarLiteNode, k key) { + n.key = k + heap.Fix(q, n.idx) +} + +// remove removes the node identified by id from the queue. +func (q *dStarLiteQueue) remove(n *dStarLiteNode) { + heap.Remove(q, n.idx) + n.key = badKey + n.idx = -1 +} diff --git a/vendor/gonum.org/v1/gonum/graph/path/floydwarshall.go b/vendor/gonum.org/v1/gonum/graph/path/floydwarshall.go new file mode 100644 index 0000000..398e883 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/graph/path/floydwarshall.go @@ -0,0 +1,91 @@ +// Copyright ©2015 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package path + +import ( + "math" + + "gonum.org/v1/gonum/graph" +) + +// FloydWarshall returns a shortest-path tree for the graph g or false indicating +// that a negative cycle exists in the graph. If a negative cycle exists in the graph +// the returned paths will be valid and edge weights on the negative cycle will be +// set to -Inf. If the graph does not implement Weighted, UniformCost is used. +// +// The time complexity of FloydWarshall is O(|V|^3). +func FloydWarshall(g graph.Graph) (paths AllShortest, ok bool) { + var weight Weighting + if wg, ok := g.(Weighted); ok { + weight = wg.Weight + } else { + weight = UniformCost(g) + } + + nodes := graph.NodesOf(g.Nodes()) + paths = newAllShortest(nodes, true) + for i, u := range nodes { + paths.dist.Set(i, i, 0) + uid := u.ID() + to := g.From(uid) + for to.Next() { + vid := to.Node().ID() + j := paths.indexOf[vid] + w, ok := weight(uid, vid) + if !ok { + panic("floyd-warshall: unexpected invalid weight") + } + paths.set(i, j, w, j) + } + } + + for k := range nodes { + for i := range nodes { + for j := range nodes { + ij := paths.dist.At(i, j) + joint := paths.dist.At(i, k) + paths.dist.At(k, j) + if ij > joint { + paths.set(i, j, joint, paths.at(i, k)...) + } else if ij-joint == 0 { + paths.add(i, j, paths.at(i, k)...) + } + } + } + } + + ok = true + for i := range nodes { + if paths.dist.At(i, i) < 0 { + ok = false + break + } + } + + if !ok { + // If we have a negative cycle, mark all + // the edges in the cycles with NaN(0xdefaced) + // weight. These weights are internal, being + // returned as -Inf in user calls. + + d := paths.dist + for i := range nodes { + for j := range nodes { + for k := range nodes { + if math.IsInf(d.At(i, k), 1) || math.IsInf(d.At(k, j), 1) { + continue + } + if d.At(k, k) < 0 { + d.Set(k, k, defaced) + d.Set(i, j, defaced) + } else if math.Float64bits(d.At(k, k)) == defacedBits { + d.Set(i, j, defaced) + } + } + } + } + } + + return paths, ok +} diff --git a/vendor/gonum.org/v1/gonum/graph/path/internal/testgraphs/doc.go b/vendor/gonum.org/v1/gonum/graph/path/internal/testgraphs/doc.go new file mode 100644 index 0000000..2845d13 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/graph/path/internal/testgraphs/doc.go @@ -0,0 +1,7 @@ +// Copyright ©2018 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Package testsgraphs provides a number of graphs used for testing +// routines in the path and path/dynamic packages. +package testgraphs // import "gonum.org/v1/gonum/graph/path/internal/testgraphs" diff --git a/vendor/gonum.org/v1/gonum/graph/path/internal/testgraphs/grid.go b/vendor/gonum.org/v1/gonum/graph/path/internal/testgraphs/grid.go new file mode 100644 index 0000000..5af7ed5 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/graph/path/internal/testgraphs/grid.go @@ -0,0 +1,303 @@ +// Copyright ©2014 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package testgraphs + +import ( + "errors" + "fmt" + "math" + + "gonum.org/v1/gonum/graph" + "gonum.org/v1/gonum/graph/iterator" + "gonum.org/v1/gonum/graph/simple" +) + +const ( + Closed = '*' // Closed is the closed grid node representation. + Open = '.' // Open is the open grid node repesentation. + Unknown = '?' // Unknown is the unknown grid node repesentation. +) + +// Grid is a 2D grid planar undirected graph. +type Grid struct { + // AllowDiagonal specifies whether + // diagonally adjacent nodes can + // be connected by an edge. + AllowDiagonal bool + // UnitEdgeWeight specifies whether + // finite edge weights are returned as + // the unit length. Otherwise edge + // weights are the Euclidean distance + // between connected nodes. + UnitEdgeWeight bool + + // AllVisible specifies whether + // non-open nodes are visible + // in calls to Nodes and HasNode. + AllVisible bool + + open []bool + r, c int +} + +// NewGrid returns an r by c grid with all positions +// set to the specified open state. +func NewGrid(r, c int, open bool) *Grid { + states := make([]bool, r*c) + if open { + for i := range states { + states[i] = true + } + } + return &Grid{ + open: states, + r: r, + c: c, + } +} + +// NewGridFrom returns a grid specified by the rows strings. All rows must +// be the same length and must only contain the Open or Closed characters, +// NewGridFrom will panic otherwise. +func NewGridFrom(rows ...string) *Grid { + if len(rows) == 0 { + return nil + } + for i, r := range rows[:len(rows)-1] { + if len(r) != len(rows[i+1]) { + panic("grid: unequal row lengths") + } + } + states := make([]bool, 0, len(rows)*len(rows[0])) + for _, r := range rows { + for _, b := range r { + switch b { + case Closed: + states = append(states, false) + case Open: + states = append(states, true) + default: + panic(fmt.Sprintf("grid: invalid state: %q", r)) + } + } + } + return &Grid{ + open: states, + r: len(rows), + c: len(rows[0]), + } +} + +// Nodes returns all the open nodes in the grid if AllVisible is +// false, otherwise all nodes are returned. +func (g *Grid) Nodes() graph.Nodes { + var nodes []graph.Node + for id, ok := range g.open { + if ok || g.AllVisible { + nodes = append(nodes, simple.Node(id)) + } + } + return iterator.NewOrderedNodes(nodes) +} + +// Node returns the node with the given ID if it exists in the graph, +// and nil otherwise. +func (g *Grid) Node(id int64) graph.Node { + if g.has(id) { + return simple.Node(id) + } + return nil +} + +// has returns whether id represents a node in the grid. The state of +// the AllVisible field determines whether a non-open node is present. +func (g *Grid) has(id int64) bool { + return 0 <= id && id < int64(len(g.open)) && (g.AllVisible || g.open[id]) +} + +// HasOpen returns whether n is an open node in the grid. +func (g *Grid) HasOpen(id int64) bool { + return 0 <= id && id < int64(len(g.open)) && g.open[id] +} + +// Set sets the node at position (r, c) to the specified open state. +func (g *Grid) Set(r, c int, open bool) { + if r < 0 || r >= g.r { + panic("grid: illegal row index") + } + if c < 0 || c >= g.c { + panic("grid: illegal column index") + } + g.open[r*g.c+c] = open +} + +// Dims returns the dimensions of the grid. +func (g *Grid) Dims() (r, c int) { + return g.r, g.c +} + +// RowCol returns the row and column of the id. RowCol will panic if the +// node id is outside the range of the grid. +func (g *Grid) RowCol(id int64) (r, c int) { + if id < 0 || int64(len(g.open)) <= id { + panic("grid: illegal node id") + } + return int(id) / g.c, int(id) % g.c +} + +// XY returns the cartesian coordinates of n. If n is not a node +// in the grid, (NaN, NaN) is returned. +func (g *Grid) XY(id int64) (x, y float64) { + if !g.has(id) { + return math.NaN(), math.NaN() + } + r, c := g.RowCol(id) + return float64(c), float64(r) +} + +// NodeAt returns the node at (r, c). The returned node may be open or closed. +func (g *Grid) NodeAt(r, c int) graph.Node { + if r < 0 || r >= g.r || c < 0 || c >= g.c { + return nil + } + return simple.Node(r*g.c + c) +} + +// From returns all the nodes reachable from u. Reachabilty requires that both +// ends of an edge must be open. +func (g *Grid) From(uid int64) graph.Nodes { + if !g.HasOpen(uid) { + return graph.Empty + } + nr, nc := g.RowCol(uid) + var to []graph.Node + for r := nr - 1; r <= nr+1; r++ { + for c := nc - 1; c <= nc+1; c++ { + if v := g.NodeAt(r, c); v != nil && g.HasEdgeBetween(uid, v.ID()) { + to = append(to, v) + } + } + } + if len(to) == 0 { + return graph.Empty + } + return iterator.NewOrderedNodes(to) +} + +// HasEdgeBetween returns whether there is an edge between u and v. +func (g *Grid) HasEdgeBetween(uid, vid int64) bool { + if !g.HasOpen(uid) || !g.HasOpen(vid) || uid == vid { + return false + } + ur, uc := g.RowCol(uid) + vr, vc := g.RowCol(vid) + if abs(ur-vr) > 1 || abs(uc-vc) > 1 { + return false + } + return g.AllowDiagonal || ur == vr || uc == vc +} + +func abs(i int) int { + if i < 0 { + return -i + } + return i +} + +// Edge returns the edge between u and v. +func (g *Grid) Edge(uid, vid int64) graph.Edge { + return g.WeightedEdgeBetween(uid, vid) +} + +// WeightedEdge returns the weighted edge between u and v. +func (g *Grid) WeightedEdge(uid, vid int64) graph.WeightedEdge { + return g.WeightedEdgeBetween(uid, vid) +} + +// EdgeBetween returns the edge between u and v. +func (g *Grid) EdgeBetween(uid, vid int64) graph.Edge { + return g.WeightedEdgeBetween(uid, vid) +} + +// WeightedEdgeBetween returns the weighted edge between u and v. +func (g *Grid) WeightedEdgeBetween(uid, vid int64) graph.WeightedEdge { + if g.HasEdgeBetween(uid, vid) { + if !g.AllowDiagonal || g.UnitEdgeWeight { + return simple.WeightedEdge{F: simple.Node(uid), T: simple.Node(vid), W: 1} + } + ux, uy := g.XY(uid) + vx, vy := g.XY(vid) + return simple.WeightedEdge{F: simple.Node(uid), T: simple.Node(vid), W: math.Hypot(ux-vx, uy-vy)} + } + return nil +} + +// Weight returns the weight of the given edge. +func (g *Grid) Weight(xid, yid int64) (w float64, ok bool) { + if xid == yid { + return 0, true + } + if !g.HasEdgeBetween(xid, yid) { + return math.Inf(1), false + } + if e := g.EdgeBetween(xid, yid); e != nil { + if !g.AllowDiagonal || g.UnitEdgeWeight { + return 1, true + } + ux, uy := g.XY(e.From().ID()) + vx, vy := g.XY(e.To().ID()) + return math.Hypot(ux-vx, uy-vy), true + } + return math.Inf(1), true +} + +// String returns a string representation of the grid. +func (g *Grid) String() string { + b, _ := g.Render(nil) + return string(b) +} + +// Render returns a text representation of the graph +// with the given path included. If the path is not a path +// in the grid Render returns a non-nil error and the +// path up to that point. +func (g *Grid) Render(path []graph.Node) ([]byte, error) { + b := make([]byte, g.r*(g.c+1)-1) + for r := 0; r < g.r; r++ { + for c := 0; c < g.c; c++ { + if g.open[r*g.c+c] { + b[r*(g.c+1)+c] = Open + } else { + b[r*(g.c+1)+c] = Closed + } + } + if r < g.r-1 { + b[r*(g.c+1)+g.c] = '\n' + } + } + + // We don't use topo.IsPathIn at the outset because we + // want to draw as much as possible before failing. + for i, n := range path { + id := n.ID() + if !g.has(id) || (i != 0 && !g.HasEdgeBetween(path[i-1].ID(), id)) { + if 0 <= id && id < int64(len(g.open)) { + r, c := g.RowCol(id) + b[r*(g.c+1)+c] = '!' + } + return b, errors.New("grid: not a path in graph") + } + r, c := g.RowCol(id) + switch i { + case len(path) - 1: + b[r*(g.c+1)+c] = 'G' + case 0: + b[r*(g.c+1)+c] = 'S' + default: + b[r*(g.c+1)+c] = 'o' + } + } + return b, nil +} diff --git a/vendor/gonum.org/v1/gonum/graph/path/internal/testgraphs/limited.go b/vendor/gonum.org/v1/gonum/graph/path/internal/testgraphs/limited.go new file mode 100644 index 0000000..bc08674 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/graph/path/internal/testgraphs/limited.go @@ -0,0 +1,329 @@ +// Copyright ©2015 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package testgraphs + +import ( + "errors" + "math" + + "gonum.org/v1/gonum/graph" + "gonum.org/v1/gonum/graph/iterator" + "gonum.org/v1/gonum/graph/simple" +) + +// LimitedVisionGrid is a 2D grid planar undirected graph where the capacity +// to determine the presence of edges is dependent on the current and past +// positions on the grid. In the absence of information, the grid is +// optimistic. +type LimitedVisionGrid struct { + Grid *Grid + + // Location is the current + // location on the grid. + Location graph.Node + + // VisionRadius specifies how far + // away edges can be detected. + VisionRadius float64 + + // Known holds a store of known + // nodes, if not nil. + Known map[int64]bool +} + +// MoveTo moves to the node n on the grid and returns a slice of newly seen and +// already known edges. MoveTo panics if n is nil. +func (l *LimitedVisionGrid) MoveTo(n graph.Node) (new, old []graph.Edge) { + l.Location = n + row, column := l.RowCol(n.ID()) + x := float64(column) + y := float64(row) + seen := make(map[[2]int64]bool) + bound := int(l.VisionRadius + 0.5) + for r := row - bound; r <= row+bound; r++ { + for c := column - bound; c <= column+bound; c++ { + u := l.NodeAt(r, c) + if u == nil { + continue + } + uid := u.ID() + ux, uy := l.XY(uid) + if math.Hypot(x-ux, y-uy) > l.VisionRadius { + continue + } + for _, v := range l.allPossibleFrom(uid) { + vid := v.ID() + if seen[[2]int64{uid, vid}] { + continue + } + seen[[2]int64{uid, vid}] = true + + vx, vy := l.XY(vid) + if !l.Known[vid] && math.Hypot(x-vx, y-vy) > l.VisionRadius { + continue + } + + e := simple.Edge{F: u, T: v} + if !l.Known[uid] || !l.Known[vid] { + new = append(new, e) + } else { + old = append(old, e) + } + } + } + } + + if l.Known != nil { + for r := row - bound; r <= row+bound; r++ { + for c := column - bound; c <= column+bound; c++ { + u := l.NodeAt(r, c) + if u == nil { + continue + } + uid := u.ID() + ux, uy := l.XY(uid) + if math.Hypot(x-ux, y-uy) > l.VisionRadius { + continue + } + for _, v := range l.allPossibleFrom(uid) { + vid := v.ID() + vx, vy := l.XY(vid) + if math.Hypot(x-vx, y-vy) > l.VisionRadius { + continue + } + l.Known[vid] = true + } + l.Known[uid] = true + } + } + + } + + return new, old +} + +// allPossibleFrom returns all the nodes possibly reachable from u. +func (l *LimitedVisionGrid) allPossibleFrom(uid int64) []graph.Node { + if !l.has(uid) { + return nil + } + nr, nc := l.RowCol(uid) + var to []graph.Node + for r := nr - 1; r <= nr+1; r++ { + for c := nc - 1; c <= nc+1; c++ { + v := l.NodeAt(r, c) + if v == nil || uid == v.ID() { + continue + } + ur, uc := l.RowCol(uid) + vr, vc := l.RowCol(v.ID()) + if abs(ur-vr) > 1 || abs(uc-vc) > 1 { + continue + } + if !l.Grid.AllowDiagonal && ur != vr && uc != vc { + continue + } + to = append(to, v) + } + } + return to +} + +// RowCol returns the row and column of the id. RowCol will panic if the +// node id is outside the range of the grid. +func (l *LimitedVisionGrid) RowCol(id int64) (r, c int) { + return l.Grid.RowCol(id) +} + +// XY returns the cartesian coordinates of n. If n is not a node +// in the grid, (NaN, NaN) is returned. +func (l *LimitedVisionGrid) XY(id int64) (x, y float64) { + if !l.has(id) { + return math.NaN(), math.NaN() + } + r, c := l.RowCol(id) + return float64(c), float64(r) +} + +// Nodes returns all the nodes in the grid. +func (l *LimitedVisionGrid) Nodes() graph.Nodes { + nodes := make([]graph.Node, 0, len(l.Grid.open)) + for id := range l.Grid.open { + nodes = append(nodes, simple.Node(id)) + } + return iterator.NewOrderedNodes(nodes) +} + +// NodeAt returns the node at (r, c). The returned node may be open or closed. +func (l *LimitedVisionGrid) NodeAt(r, c int) graph.Node { + return l.Grid.NodeAt(r, c) +} + +// Node returns the node with the given ID if it exists in the graph, +// and nil otherwise. +func (l *LimitedVisionGrid) Node(id int64) graph.Node { + if l.has(id) { + return simple.Node(id) + } + return nil +} + +// has returns whether the node with the given ID is a node in the grid. +func (l *LimitedVisionGrid) has(id int64) bool { + return 0 <= id && id < int64(len(l.Grid.open)) +} + +// From returns nodes that are optimistically reachable from u. +func (l *LimitedVisionGrid) From(uid int64) graph.Nodes { + if !l.has(uid) { + return graph.Empty + } + + nr, nc := l.RowCol(uid) + var to []graph.Node + for r := nr - 1; r <= nr+1; r++ { + for c := nc - 1; c <= nc+1; c++ { + if v := l.NodeAt(r, c); v != nil && l.HasEdgeBetween(uid, v.ID()) { + to = append(to, v) + } + } + } + if len(to) == 0 { + return graph.Empty + } + return iterator.NewOrderedNodes(to) +} + +// HasEdgeBetween optimistically returns whether an edge is exists between u and v. +func (l *LimitedVisionGrid) HasEdgeBetween(uid, vid int64) bool { + if uid == vid { + return false + } + ur, uc := l.RowCol(uid) + vr, vc := l.RowCol(vid) + if abs(ur-vr) > 1 || abs(uc-vc) > 1 { + return false + } + if !l.Grid.AllowDiagonal && ur != vr && uc != vc { + return false + } + + x, y := l.XY(l.Location.ID()) + ux, uy := l.XY(uid) + vx, vy := l.XY(vid) + uKnown := l.Known[uid] || math.Hypot(x-ux, y-uy) <= l.VisionRadius + vKnown := l.Known[vid] || math.Hypot(x-vx, y-vy) <= l.VisionRadius + + switch { + case uKnown && vKnown: + return l.Grid.HasEdgeBetween(uid, vid) + case uKnown: + return l.Grid.HasOpen(uid) + case vKnown: + return l.Grid.HasOpen(vid) + default: + return true + } +} + +// Edge optimistically returns the edge from u to v. +func (l *LimitedVisionGrid) Edge(uid, vid int64) graph.Edge { + return l.WeightedEdgeBetween(uid, vid) +} + +// Edge optimistically returns the weighted edge from u to v. +func (l *LimitedVisionGrid) WeightedEdge(uid, vid int64) graph.WeightedEdge { + return l.WeightedEdgeBetween(uid, vid) +} + +// WeightedEdgeBetween optimistically returns the edge between u and v. +func (l *LimitedVisionGrid) EdgeBetween(uid, vid int64) graph.Edge { + return l.WeightedEdgeBetween(uid, vid) +} + +// WeightedEdgeBetween optimistically returns the weighted edge between u and v. +func (l *LimitedVisionGrid) WeightedEdgeBetween(uid, vid int64) graph.WeightedEdge { + if l.HasEdgeBetween(uid, vid) { + if !l.Grid.AllowDiagonal || l.Grid.UnitEdgeWeight { + return simple.WeightedEdge{F: simple.Node(uid), T: simple.Node(vid), W: 1} + } + ux, uy := l.XY(uid) + vx, vy := l.XY(vid) + return simple.WeightedEdge{F: simple.Node(uid), T: simple.Node(vid), W: math.Hypot(ux-vx, uy-vy)} + } + return nil +} + +// Weight returns the weight of the given edge. +func (l *LimitedVisionGrid) Weight(xid, yid int64) (w float64, ok bool) { + if xid == yid { + return 0, true + } + if !l.HasEdgeBetween(xid, yid) { + return math.Inf(1), false + } + if e := l.EdgeBetween(xid, yid); e != nil { + if !l.Grid.AllowDiagonal || l.Grid.UnitEdgeWeight { + return 1, true + } + ux, uy := l.XY(e.From().ID()) + vx, vy := l.XY(e.To().ID()) + return math.Hypot(ux-vx, uy-vy), true + + } + return math.Inf(1), true +} + +// String returns a string representation of the grid. +func (l *LimitedVisionGrid) String() string { + b, _ := l.Render(nil) + return string(b) +} + +// Render returns a text representation of the graph +// with the given path included. If the path is not a path +// in the grid Render returns a non-nil error and the +// path up to that point. +func (l *LimitedVisionGrid) Render(path []graph.Node) ([]byte, error) { + rows, cols := l.Grid.Dims() + b := make([]byte, rows*(cols+1)-1) + for r := 0; r < rows; r++ { + for c := 0; c < cols; c++ { + if !l.Known[int64(r*cols+c)] { + b[r*(cols+1)+c] = Unknown + } else if l.Grid.open[r*cols+c] { + b[r*(cols+1)+c] = Open + } else { + b[r*(cols+1)+c] = Closed + } + } + if r < rows-1 { + b[r*(cols+1)+cols] = '\n' + } + } + + // We don't use topo.IsPathIn at the outset because we + // want to draw as much as possible before failing. + for i, n := range path { + id := n.ID() + if !l.has(id) || (i != 0 && !l.HasEdgeBetween(path[i-1].ID(), id)) { + if 0 <= id && id < int64(len(l.Grid.open)) { + r, c := l.RowCol(id) + b[r*(cols+1)+c] = '!' + } + return b, errors.New("grid: not a path in graph") + } + r, c := l.RowCol(id) + switch i { + case len(path) - 1: + b[r*(cols+1)+c] = 'G' + case 0: + b[r*(cols+1)+c] = 'S' + default: + b[r*(cols+1)+c] = 'o' + } + } + return b, nil +} diff --git a/vendor/gonum.org/v1/gonum/graph/path/internal/testgraphs/shortest.go b/vendor/gonum.org/v1/gonum/graph/path/internal/testgraphs/shortest.go new file mode 100644 index 0000000..f18ecc2 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/graph/path/internal/testgraphs/shortest.go @@ -0,0 +1,654 @@ +// Copyright ©2014 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package testgraphs + +import ( + "fmt" + "math" + + "gonum.org/v1/gonum/graph" + "gonum.org/v1/gonum/graph/simple" +) + +func init() { + for _, test := range ShortestPathTests { + if len(test.WantPaths) != 1 && test.HasUniquePath { + panic(fmt.Sprintf("%q: bad shortest path test: non-unique paths marked unique", test.Name)) + } + } +} + +// ShortestPathTests are graphs used to test the static shortest path routines in path: BellmanFord, +// DijkstraAllPaths, DijkstraFrom, FloydWarshall and Johnson, and the static degenerate case for the +// dynamic shortest path routine in path/dynamic: DStarLite. +var ShortestPathTests = []struct { + Name string + Graph func() graph.WeightedEdgeAdder + Edges []simple.WeightedEdge + HasNegativeWeight bool + HasNegativeCycle bool + + Query simple.Edge + Weight float64 + WantPaths [][]int64 + HasUniquePath bool + + NoPathFor simple.Edge +}{ + // Positive weighted graphs. + { + Name: "empty directed", + Graph: func() graph.WeightedEdgeAdder { return simple.NewWeightedDirectedGraph(0, math.Inf(1)) }, + + Query: simple.Edge{F: simple.Node(0), T: simple.Node(1)}, + Weight: math.Inf(1), + + NoPathFor: simple.Edge{F: simple.Node(0), T: simple.Node(1)}, + }, + { + Name: "empty undirected", + Graph: func() graph.WeightedEdgeAdder { return simple.NewWeightedUndirectedGraph(0, math.Inf(1)) }, + + Query: simple.Edge{F: simple.Node(0), T: simple.Node(1)}, + Weight: math.Inf(1), + + NoPathFor: simple.Edge{F: simple.Node(0), T: simple.Node(1)}, + }, + { + Name: "one edge directed", + Graph: func() graph.WeightedEdgeAdder { return simple.NewWeightedDirectedGraph(0, math.Inf(1)) }, + Edges: []simple.WeightedEdge{ + {F: simple.Node(0), T: simple.Node(1), W: 1}, + }, + + Query: simple.Edge{F: simple.Node(0), T: simple.Node(1)}, + Weight: 1, + WantPaths: [][]int64{ + {0, 1}, + }, + HasUniquePath: true, + + NoPathFor: simple.Edge{F: simple.Node(2), T: simple.Node(3)}, + }, + { + Name: "one edge self directed", + Graph: func() graph.WeightedEdgeAdder { return simple.NewWeightedDirectedGraph(0, math.Inf(1)) }, + Edges: []simple.WeightedEdge{ + {F: simple.Node(0), T: simple.Node(1), W: 1}, + }, + + Query: simple.Edge{F: simple.Node(0), T: simple.Node(0)}, + Weight: 0, + WantPaths: [][]int64{ + {0}, + }, + HasUniquePath: true, + + NoPathFor: simple.Edge{F: simple.Node(2), T: simple.Node(3)}, + }, + { + Name: "one edge undirected", + Graph: func() graph.WeightedEdgeAdder { return simple.NewWeightedUndirectedGraph(0, math.Inf(1)) }, + Edges: []simple.WeightedEdge{ + {F: simple.Node(0), T: simple.Node(1), W: 1}, + }, + + Query: simple.Edge{F: simple.Node(0), T: simple.Node(1)}, + Weight: 1, + WantPaths: [][]int64{ + {0, 1}, + }, + HasUniquePath: true, + + NoPathFor: simple.Edge{F: simple.Node(2), T: simple.Node(3)}, + }, + { + Name: "two paths directed", + Graph: func() graph.WeightedEdgeAdder { return simple.NewWeightedDirectedGraph(0, math.Inf(1)) }, + Edges: []simple.WeightedEdge{ + {F: simple.Node(0), T: simple.Node(2), W: 2}, + {F: simple.Node(0), T: simple.Node(1), W: 1}, + {F: simple.Node(1), T: simple.Node(2), W: 1}, + }, + + Query: simple.Edge{F: simple.Node(0), T: simple.Node(2)}, + Weight: 2, + WantPaths: [][]int64{ + {0, 1, 2}, + {0, 2}, + }, + HasUniquePath: false, + + NoPathFor: simple.Edge{F: simple.Node(2), T: simple.Node(1)}, + }, + { + Name: "two paths undirected", + Graph: func() graph.WeightedEdgeAdder { return simple.NewWeightedUndirectedGraph(0, math.Inf(1)) }, + Edges: []simple.WeightedEdge{ + {F: simple.Node(0), T: simple.Node(2), W: 2}, + {F: simple.Node(0), T: simple.Node(1), W: 1}, + {F: simple.Node(1), T: simple.Node(2), W: 1}, + }, + + Query: simple.Edge{F: simple.Node(0), T: simple.Node(2)}, + Weight: 2, + WantPaths: [][]int64{ + {0, 1, 2}, + {0, 2}, + }, + HasUniquePath: false, + + NoPathFor: simple.Edge{F: simple.Node(2), T: simple.Node(4)}, + }, + { + Name: "confounding paths directed", + Graph: func() graph.WeightedEdgeAdder { return simple.NewWeightedDirectedGraph(0, math.Inf(1)) }, + Edges: []simple.WeightedEdge{ + // Add a path from 0->5 of weight 4 + {F: simple.Node(0), T: simple.Node(1), W: 1}, + {F: simple.Node(1), T: simple.Node(2), W: 1}, + {F: simple.Node(2), T: simple.Node(3), W: 1}, + {F: simple.Node(3), T: simple.Node(5), W: 1}, + + // Add direct edge to goal of weight 4 + {F: simple.Node(0), T: simple.Node(5), W: 4}, + + // Add edge to a node that's still optimal + {F: simple.Node(0), T: simple.Node(2), W: 2}, + + // Add edge to 3 that's overpriced + {F: simple.Node(0), T: simple.Node(3), W: 4}, + + // Add very cheap edge to 4 which is a dead end + {F: simple.Node(0), T: simple.Node(4), W: 0.25}, + }, + + Query: simple.Edge{F: simple.Node(0), T: simple.Node(5)}, + Weight: 4, + WantPaths: [][]int64{ + {0, 1, 2, 3, 5}, + {0, 2, 3, 5}, + {0, 5}, + }, + HasUniquePath: false, + + NoPathFor: simple.Edge{F: simple.Node(4), T: simple.Node(5)}, + }, + { + Name: "confounding paths undirected", + Graph: func() graph.WeightedEdgeAdder { return simple.NewWeightedUndirectedGraph(0, math.Inf(1)) }, + Edges: []simple.WeightedEdge{ + // Add a path from 0->5 of weight 4 + {F: simple.Node(0), T: simple.Node(1), W: 1}, + {F: simple.Node(1), T: simple.Node(2), W: 1}, + {F: simple.Node(2), T: simple.Node(3), W: 1}, + {F: simple.Node(3), T: simple.Node(5), W: 1}, + + // Add direct edge to goal of weight 4 + {F: simple.Node(0), T: simple.Node(5), W: 4}, + + // Add edge to a node that's still optimal + {F: simple.Node(0), T: simple.Node(2), W: 2}, + + // Add edge to 3 that's overpriced + {F: simple.Node(0), T: simple.Node(3), W: 4}, + + // Add very cheap edge to 4 which is a dead end + {F: simple.Node(0), T: simple.Node(4), W: 0.25}, + }, + + Query: simple.Edge{F: simple.Node(0), T: simple.Node(5)}, + Weight: 4, + WantPaths: [][]int64{ + {0, 1, 2, 3, 5}, + {0, 2, 3, 5}, + {0, 5}, + }, + HasUniquePath: false, + + NoPathFor: simple.Edge{F: simple.Node(5), T: simple.Node(6)}, + }, + { + Name: "confounding paths directed 2-step", + Graph: func() graph.WeightedEdgeAdder { return simple.NewWeightedDirectedGraph(0, math.Inf(1)) }, + Edges: []simple.WeightedEdge{ + // Add a path from 0->5 of weight 4 + {F: simple.Node(0), T: simple.Node(1), W: 1}, + {F: simple.Node(1), T: simple.Node(2), W: 1}, + {F: simple.Node(2), T: simple.Node(3), W: 1}, + {F: simple.Node(3), T: simple.Node(5), W: 1}, + + // Add two step path to goal of weight 4 + {F: simple.Node(0), T: simple.Node(6), W: 2}, + {F: simple.Node(6), T: simple.Node(5), W: 2}, + + // Add edge to a node that's still optimal + {F: simple.Node(0), T: simple.Node(2), W: 2}, + + // Add edge to 3 that's overpriced + {F: simple.Node(0), T: simple.Node(3), W: 4}, + + // Add very cheap edge to 4 which is a dead end + {F: simple.Node(0), T: simple.Node(4), W: 0.25}, + }, + + Query: simple.Edge{F: simple.Node(0), T: simple.Node(5)}, + Weight: 4, + WantPaths: [][]int64{ + {0, 1, 2, 3, 5}, + {0, 2, 3, 5}, + {0, 6, 5}, + }, + HasUniquePath: false, + + NoPathFor: simple.Edge{F: simple.Node(4), T: simple.Node(5)}, + }, + { + Name: "confounding paths undirected 2-step", + Graph: func() graph.WeightedEdgeAdder { return simple.NewWeightedUndirectedGraph(0, math.Inf(1)) }, + Edges: []simple.WeightedEdge{ + // Add a path from 0->5 of weight 4 + {F: simple.Node(0), T: simple.Node(1), W: 1}, + {F: simple.Node(1), T: simple.Node(2), W: 1}, + {F: simple.Node(2), T: simple.Node(3), W: 1}, + {F: simple.Node(3), T: simple.Node(5), W: 1}, + + // Add two step path to goal of weight 4 + {F: simple.Node(0), T: simple.Node(6), W: 2}, + {F: simple.Node(6), T: simple.Node(5), W: 2}, + + // Add edge to a node that's still optimal + {F: simple.Node(0), T: simple.Node(2), W: 2}, + + // Add edge to 3 that's overpriced + {F: simple.Node(0), T: simple.Node(3), W: 4}, + + // Add very cheap edge to 4 which is a dead end + {F: simple.Node(0), T: simple.Node(4), W: 0.25}, + }, + + Query: simple.Edge{F: simple.Node(0), T: simple.Node(5)}, + Weight: 4, + WantPaths: [][]int64{ + {0, 1, 2, 3, 5}, + {0, 2, 3, 5}, + {0, 6, 5}, + }, + HasUniquePath: false, + + NoPathFor: simple.Edge{F: simple.Node(5), T: simple.Node(7)}, + }, + { + Name: "zero-weight cycle directed", + Graph: func() graph.WeightedEdgeAdder { return simple.NewWeightedDirectedGraph(0, math.Inf(1)) }, + Edges: []simple.WeightedEdge{ + // Add a path from 0->4 of weight 4 + {F: simple.Node(0), T: simple.Node(1), W: 1}, + {F: simple.Node(1), T: simple.Node(2), W: 1}, + {F: simple.Node(2), T: simple.Node(3), W: 1}, + {F: simple.Node(3), T: simple.Node(4), W: 1}, + + // Add a zero-weight cycle. + {F: simple.Node(1), T: simple.Node(5), W: 0}, + {F: simple.Node(5), T: simple.Node(1), W: 0}, + }, + + Query: simple.Edge{F: simple.Node(0), T: simple.Node(4)}, + Weight: 4, + WantPaths: [][]int64{ + {0, 1, 2, 3, 4}, + }, + HasUniquePath: false, + + NoPathFor: simple.Edge{F: simple.Node(4), T: simple.Node(5)}, + }, + { + Name: "zero-weight cycle^2 directed", + Graph: func() graph.WeightedEdgeAdder { return simple.NewWeightedDirectedGraph(0, math.Inf(1)) }, + Edges: []simple.WeightedEdge{ + // Add a path from 0->4 of weight 4 + {F: simple.Node(0), T: simple.Node(1), W: 1}, + {F: simple.Node(1), T: simple.Node(2), W: 1}, + {F: simple.Node(2), T: simple.Node(3), W: 1}, + {F: simple.Node(3), T: simple.Node(4), W: 1}, + + // Add a zero-weight cycle. + {F: simple.Node(1), T: simple.Node(5), W: 0}, + {F: simple.Node(5), T: simple.Node(1), W: 0}, + // With its own zero-weight cycle. + {F: simple.Node(5), T: simple.Node(6), W: 0}, + {F: simple.Node(6), T: simple.Node(5), W: 0}, + }, + + Query: simple.Edge{F: simple.Node(0), T: simple.Node(4)}, + Weight: 4, + WantPaths: [][]int64{ + {0, 1, 2, 3, 4}, + }, + HasUniquePath: false, + + NoPathFor: simple.Edge{F: simple.Node(4), T: simple.Node(5)}, + }, + { + Name: "zero-weight cycle^2 confounding directed", + Graph: func() graph.WeightedEdgeAdder { return simple.NewWeightedDirectedGraph(0, math.Inf(1)) }, + Edges: []simple.WeightedEdge{ + // Add a path from 0->4 of weight 4 + {F: simple.Node(0), T: simple.Node(1), W: 1}, + {F: simple.Node(1), T: simple.Node(2), W: 1}, + {F: simple.Node(2), T: simple.Node(3), W: 1}, + {F: simple.Node(3), T: simple.Node(4), W: 1}, + + // Add a zero-weight cycle. + {F: simple.Node(1), T: simple.Node(5), W: 0}, + {F: simple.Node(5), T: simple.Node(1), W: 0}, + // With its own zero-weight cycle. + {F: simple.Node(5), T: simple.Node(6), W: 0}, + {F: simple.Node(6), T: simple.Node(5), W: 0}, + // But leading to the target. + {F: simple.Node(5), T: simple.Node(4), W: 3}, + }, + + Query: simple.Edge{F: simple.Node(0), T: simple.Node(4)}, + Weight: 4, + WantPaths: [][]int64{ + {0, 1, 2, 3, 4}, + {0, 1, 5, 4}, + }, + HasUniquePath: false, + + NoPathFor: simple.Edge{F: simple.Node(4), T: simple.Node(5)}, + }, + { + Name: "zero-weight cycle^3 directed", + Graph: func() graph.WeightedEdgeAdder { return simple.NewWeightedDirectedGraph(0, math.Inf(1)) }, + Edges: []simple.WeightedEdge{ + // Add a path from 0->4 of weight 4 + {F: simple.Node(0), T: simple.Node(1), W: 1}, + {F: simple.Node(1), T: simple.Node(2), W: 1}, + {F: simple.Node(2), T: simple.Node(3), W: 1}, + {F: simple.Node(3), T: simple.Node(4), W: 1}, + + // Add a zero-weight cycle. + {F: simple.Node(1), T: simple.Node(5), W: 0}, + {F: simple.Node(5), T: simple.Node(1), W: 0}, + // With its own zero-weight cycle. + {F: simple.Node(5), T: simple.Node(6), W: 0}, + {F: simple.Node(6), T: simple.Node(5), W: 0}, + // With its own zero-weight cycle. + {F: simple.Node(6), T: simple.Node(7), W: 0}, + {F: simple.Node(7), T: simple.Node(6), W: 0}, + }, + + Query: simple.Edge{F: simple.Node(0), T: simple.Node(4)}, + Weight: 4, + WantPaths: [][]int64{ + {0, 1, 2, 3, 4}, + }, + HasUniquePath: false, + + NoPathFor: simple.Edge{F: simple.Node(4), T: simple.Node(5)}, + }, + { + Name: "zero-weight 3·cycle^2 confounding directed", + Graph: func() graph.WeightedEdgeAdder { return simple.NewWeightedDirectedGraph(0, math.Inf(1)) }, + Edges: []simple.WeightedEdge{ + // Add a path from 0->4 of weight 4 + {F: simple.Node(0), T: simple.Node(1), W: 1}, + {F: simple.Node(1), T: simple.Node(2), W: 1}, + {F: simple.Node(2), T: simple.Node(3), W: 1}, + {F: simple.Node(3), T: simple.Node(4), W: 1}, + + // Add a zero-weight cycle. + {F: simple.Node(1), T: simple.Node(5), W: 0}, + {F: simple.Node(5), T: simple.Node(1), W: 0}, + // With 3 of its own zero-weight cycles. + {F: simple.Node(5), T: simple.Node(6), W: 0}, + {F: simple.Node(6), T: simple.Node(5), W: 0}, + {F: simple.Node(5), T: simple.Node(7), W: 0}, + {F: simple.Node(7), T: simple.Node(5), W: 0}, + // Each leading to the target. + {F: simple.Node(5), T: simple.Node(4), W: 3}, + {F: simple.Node(6), T: simple.Node(4), W: 3}, + {F: simple.Node(7), T: simple.Node(4), W: 3}, + }, + + Query: simple.Edge{F: simple.Node(0), T: simple.Node(4)}, + Weight: 4, + WantPaths: [][]int64{ + {0, 1, 2, 3, 4}, + {0, 1, 5, 4}, + {0, 1, 5, 6, 4}, + {0, 1, 5, 7, 4}, + }, + HasUniquePath: false, + + NoPathFor: simple.Edge{F: simple.Node(4), T: simple.Node(5)}, + }, + { + Name: "zero-weight reversed 3·cycle^2 confounding directed", + Graph: func() graph.WeightedEdgeAdder { return simple.NewWeightedDirectedGraph(0, math.Inf(1)) }, + Edges: []simple.WeightedEdge{ + // Add a path from 0->4 of weight 4 + {F: simple.Node(0), T: simple.Node(1), W: 1}, + {F: simple.Node(1), T: simple.Node(2), W: 1}, + {F: simple.Node(2), T: simple.Node(3), W: 1}, + {F: simple.Node(3), T: simple.Node(4), W: 1}, + + // Add a zero-weight cycle. + {F: simple.Node(3), T: simple.Node(5), W: 0}, + {F: simple.Node(5), T: simple.Node(3), W: 0}, + // With 3 of its own zero-weight cycles. + {F: simple.Node(5), T: simple.Node(6), W: 0}, + {F: simple.Node(6), T: simple.Node(5), W: 0}, + {F: simple.Node(5), T: simple.Node(7), W: 0}, + {F: simple.Node(7), T: simple.Node(5), W: 0}, + // Each leading from the source. + {F: simple.Node(0), T: simple.Node(5), W: 3}, + {F: simple.Node(0), T: simple.Node(6), W: 3}, + {F: simple.Node(0), T: simple.Node(7), W: 3}, + }, + + Query: simple.Edge{F: simple.Node(0), T: simple.Node(4)}, + Weight: 4, + WantPaths: [][]int64{ + {0, 1, 2, 3, 4}, + {0, 5, 3, 4}, + {0, 6, 5, 3, 4}, + {0, 7, 5, 3, 4}, + }, + HasUniquePath: false, + + NoPathFor: simple.Edge{F: simple.Node(4), T: simple.Node(5)}, + }, + { + Name: "zero-weight |V|·cycle^(n/|V|) directed", + Graph: func() graph.WeightedEdgeAdder { return simple.NewWeightedDirectedGraph(0, math.Inf(1)) }, + Edges: func() []simple.WeightedEdge { + e := []simple.WeightedEdge{ + // Add a path from 0->4 of weight 4 + {F: simple.Node(0), T: simple.Node(1), W: 1}, + {F: simple.Node(1), T: simple.Node(2), W: 1}, + {F: simple.Node(2), T: simple.Node(3), W: 1}, + {F: simple.Node(3), T: simple.Node(4), W: 1}, + } + next := len(e) + 1 + + // Add n zero-weight cycles. + const n = 100 + for i := 0; i < n; i++ { + e = append(e, + simple.WeightedEdge{F: simple.Node(next + i), T: simple.Node(i), W: 0}, + simple.WeightedEdge{F: simple.Node(i), T: simple.Node(next + i), W: 0}, + ) + } + return e + }(), + + Query: simple.Edge{F: simple.Node(0), T: simple.Node(4)}, + Weight: 4, + WantPaths: [][]int64{ + {0, 1, 2, 3, 4}, + }, + HasUniquePath: false, + + NoPathFor: simple.Edge{F: simple.Node(4), T: simple.Node(5)}, + }, + { + Name: "zero-weight n·cycle directed", + Graph: func() graph.WeightedEdgeAdder { return simple.NewWeightedDirectedGraph(0, math.Inf(1)) }, + Edges: func() []simple.WeightedEdge { + e := []simple.WeightedEdge{ + // Add a path from 0->4 of weight 4 + {F: simple.Node(0), T: simple.Node(1), W: 1}, + {F: simple.Node(1), T: simple.Node(2), W: 1}, + {F: simple.Node(2), T: simple.Node(3), W: 1}, + {F: simple.Node(3), T: simple.Node(4), W: 1}, + } + next := len(e) + 1 + + // Add n zero-weight cycles. + const n = 100 + for i := 0; i < n; i++ { + e = append(e, + simple.WeightedEdge{F: simple.Node(next + i), T: simple.Node(1), W: 0}, + simple.WeightedEdge{F: simple.Node(1), T: simple.Node(next + i), W: 0}, + ) + } + return e + }(), + + Query: simple.Edge{F: simple.Node(0), T: simple.Node(4)}, + Weight: 4, + WantPaths: [][]int64{ + {0, 1, 2, 3, 4}, + }, + HasUniquePath: false, + + NoPathFor: simple.Edge{F: simple.Node(4), T: simple.Node(5)}, + }, + { + Name: "zero-weight bi-directional tree with single exit directed", + Graph: func() graph.WeightedEdgeAdder { return simple.NewWeightedDirectedGraph(0, math.Inf(1)) }, + Edges: func() []simple.WeightedEdge { + e := []simple.WeightedEdge{ + // Add a path from 0->4 of weight 4 + {F: simple.Node(0), T: simple.Node(1), W: 1}, + {F: simple.Node(1), T: simple.Node(2), W: 1}, + {F: simple.Node(2), T: simple.Node(3), W: 1}, + {F: simple.Node(3), T: simple.Node(4), W: 1}, + } + + // Make a bi-directional tree rooted at node 2 with + // a single exit to node 4 and co-equal cost from + // 2 to 4. + const ( + depth = 4 + branching = 4 + ) + + next := len(e) + 1 + src := 2 + var i, last int + for l := 0; l < depth; l++ { + for i = 0; i < branching; i++ { + last = next + i + e = append(e, simple.WeightedEdge{F: simple.Node(src), T: simple.Node(last), W: 0}) + e = append(e, simple.WeightedEdge{F: simple.Node(last), T: simple.Node(src), W: 0}) + } + src = next + 1 + next += branching + } + e = append(e, simple.WeightedEdge{F: simple.Node(last), T: simple.Node(4), W: 2}) + return e + }(), + + Query: simple.Edge{F: simple.Node(0), T: simple.Node(4)}, + Weight: 4, + WantPaths: [][]int64{ + {0, 1, 2, 3, 4}, + {0, 1, 2, 6, 10, 14, 20, 4}, + }, + HasUniquePath: false, + + NoPathFor: simple.Edge{F: simple.Node(4), T: simple.Node(5)}, + }, + + // Negative weighted graphs. + { + Name: "one edge directed negative", + Graph: func() graph.WeightedEdgeAdder { return simple.NewWeightedDirectedGraph(0, math.Inf(1)) }, + Edges: []simple.WeightedEdge{ + {F: simple.Node(0), T: simple.Node(1), W: -1}, + }, + HasNegativeWeight: true, + + Query: simple.Edge{F: simple.Node(0), T: simple.Node(1)}, + Weight: -1, + WantPaths: [][]int64{ + {0, 1}, + }, + HasUniquePath: true, + + NoPathFor: simple.Edge{F: simple.Node(2), T: simple.Node(3)}, + }, + { + Name: "one edge undirected negative", + Graph: func() graph.WeightedEdgeAdder { return simple.NewWeightedUndirectedGraph(0, math.Inf(1)) }, + Edges: []simple.WeightedEdge{ + {F: simple.Node(0), T: simple.Node(1), W: -1}, + }, + HasNegativeWeight: true, + HasNegativeCycle: true, + + Query: simple.Edge{F: simple.Node(0), T: simple.Node(1)}, + }, + { + Name: "wp graph negative", // http://en.wikipedia.org/w/index.php?title=Johnson%27s_algorithm&oldid=564595231 + Graph: func() graph.WeightedEdgeAdder { return simple.NewWeightedDirectedGraph(0, math.Inf(1)) }, + Edges: []simple.WeightedEdge{ + {F: simple.Node('w'), T: simple.Node('z'), W: 2}, + {F: simple.Node('x'), T: simple.Node('w'), W: 6}, + {F: simple.Node('x'), T: simple.Node('y'), W: 3}, + {F: simple.Node('y'), T: simple.Node('w'), W: 4}, + {F: simple.Node('y'), T: simple.Node('z'), W: 5}, + {F: simple.Node('z'), T: simple.Node('x'), W: -7}, + {F: simple.Node('z'), T: simple.Node('y'), W: -3}, + }, + HasNegativeWeight: true, + + Query: simple.Edge{F: simple.Node('z'), T: simple.Node('y')}, + Weight: -4, + WantPaths: [][]int64{ + {'z', 'x', 'y'}, + }, + HasUniquePath: true, + + NoPathFor: simple.Edge{F: simple.Node(2), T: simple.Node(3)}, + }, + { + Name: "roughgarden negative", + Graph: func() graph.WeightedEdgeAdder { return simple.NewWeightedDirectedGraph(0, math.Inf(1)) }, + Edges: []simple.WeightedEdge{ + {F: simple.Node('a'), T: simple.Node('b'), W: -2}, + {F: simple.Node('b'), T: simple.Node('c'), W: -1}, + {F: simple.Node('c'), T: simple.Node('a'), W: 4}, + {F: simple.Node('c'), T: simple.Node('x'), W: 2}, + {F: simple.Node('c'), T: simple.Node('y'), W: -3}, + {F: simple.Node('z'), T: simple.Node('x'), W: 1}, + {F: simple.Node('z'), T: simple.Node('y'), W: -4}, + }, + HasNegativeWeight: true, + + Query: simple.Edge{F: simple.Node('a'), T: simple.Node('y')}, + Weight: -6, + WantPaths: [][]int64{ + {'a', 'b', 'c', 'y'}, + }, + HasUniquePath: true, + + NoPathFor: simple.Edge{F: simple.Node(2), T: simple.Node(3)}, + }, +} diff --git a/vendor/gonum.org/v1/gonum/graph/path/johnson_apsp.go b/vendor/gonum.org/v1/gonum/graph/path/johnson_apsp.go new file mode 100644 index 0000000..cc74a23 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/graph/path/johnson_apsp.go @@ -0,0 +1,199 @@ +// Copyright ©2015 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package path + +import ( + "math" + + "golang.org/x/exp/rand" + + "gonum.org/v1/gonum/graph" + "gonum.org/v1/gonum/graph/simple" +) + +// JohnsonAllPaths returns a shortest-path tree for shortest paths in the graph g. +// If the graph does not implement Weighted, UniformCost is used. If a negative cycle +// exists in g, ok will be returned false and paths will not contain valid data. +// +// The time complexity of JohnsonAllPaths is O(|V|.|E|+|V|^2.log|V|). +func JohnsonAllPaths(g graph.Graph) (paths AllShortest, ok bool) { + jg := johnsonWeightAdjuster{ + g: g, + from: g.From, + edgeTo: g.Edge, + } + if wg, ok := g.(Weighted); ok { + jg.weight = wg.Weight + } else { + jg.weight = UniformCost(g) + } + + paths = newAllShortest(graph.NodesOf(g.Nodes()), false) + + sign := int64(-1) + for { + // Choose a random node ID until we find + // one that is not in g. + jg.q = sign * rand.Int63() + if _, exists := paths.indexOf[jg.q]; !exists { + break + } + sign *= -1 + } + + jg.bellmanFord = true + jg.adjustBy, ok = BellmanFordFrom(johnsonGraphNode(jg.q), jg) + if !ok { + return paths, false + } + + jg.bellmanFord = false + dijkstraAllPaths(jg, paths) + + for i, u := range paths.nodes { + hu := jg.adjustBy.WeightTo(u.ID()) + for j, v := range paths.nodes { + if i == j { + continue + } + hv := jg.adjustBy.WeightTo(v.ID()) + paths.dist.Set(i, j, paths.dist.At(i, j)-hu+hv) + } + } + + return paths, ok +} + +type johnsonWeightAdjuster struct { + q int64 + g graph.Graph + + from func(id int64) graph.Nodes + edgeTo func(uid, vid int64) graph.Edge + weight Weighting + + bellmanFord bool + adjustBy Shortest +} + +var ( + // johnsonWeightAdjuster has the behaviour + // of a directed graph, but we don't need + // to be explicit with the type since it + // is not exported. + _ graph.Graph = johnsonWeightAdjuster{} + _ graph.Weighted = johnsonWeightAdjuster{} +) + +func (g johnsonWeightAdjuster) Node(id int64) graph.Node { + if g.bellmanFord && id == g.q { + return simple.Node(id) + } + panic("path: unintended use of johnsonWeightAdjuster") +} + +func (g johnsonWeightAdjuster) Nodes() graph.Nodes { + if g.bellmanFord { + return newJohnsonNodeIterator(g.q, g.g.Nodes()) + } + return g.g.Nodes() +} + +func (g johnsonWeightAdjuster) From(id int64) graph.Nodes { + if g.bellmanFord && id == g.q { + return g.g.Nodes() + } + return g.from(id) +} + +func (g johnsonWeightAdjuster) WeightedEdge(_, _ int64) graph.WeightedEdge { + panic("path: unintended use of johnsonWeightAdjuster") +} + +func (g johnsonWeightAdjuster) Edge(uid, vid int64) graph.Edge { + if g.bellmanFord && uid == g.q && g.g.Node(vid) != nil { + return simple.Edge{F: johnsonGraphNode(g.q), T: simple.Node(vid)} + } + return g.edgeTo(uid, vid) +} + +func (g johnsonWeightAdjuster) Weight(xid, yid int64) (w float64, ok bool) { + if g.bellmanFord { + switch g.q { + case xid: + return 0, true + case yid: + return math.Inf(1), false + default: + return g.weight(xid, yid) + } + } + w, ok = g.weight(xid, yid) + return w + g.adjustBy.WeightTo(xid) - g.adjustBy.WeightTo(yid), ok +} + +func (johnsonWeightAdjuster) HasEdgeBetween(_, _ int64) bool { + panic("path: unintended use of johnsonWeightAdjuster") +} + +type johnsonGraphNode int64 + +func (n johnsonGraphNode) ID() int64 { return int64(n) } + +func newJohnsonNodeIterator(q int64, nodes graph.Nodes) *johnsonNodeIterator { + return &johnsonNodeIterator{q: q, nodes: nodes} +} + +type johnsonNodeIterator struct { + q int64 + nodes graph.Nodes + qUsed, qOK bool +} + +func (it *johnsonNodeIterator) Len() int { + var len int + if it.nodes != nil { + len = it.nodes.Len() + } + if !it.qUsed { + len++ + } + return len +} + +func (it *johnsonNodeIterator) Next() bool { + if it.nodes != nil { + ok := it.nodes.Next() + if ok { + return true + } + } + if !it.qUsed { + it.qOK = true + it.qUsed = true + return true + } + it.qOK = false + return false +} + +func (it *johnsonNodeIterator) Node() graph.Node { + if it.qOK { + return johnsonGraphNode(it.q) + } + if it.nodes == nil { + return nil + } + return it.nodes.Node() +} + +func (it *johnsonNodeIterator) Reset() { + it.qOK = false + it.qUsed = false + if it.nodes == nil { + return + } + it.nodes.Reset() +} diff --git a/vendor/gonum.org/v1/gonum/graph/path/shortest.go b/vendor/gonum.org/v1/gonum/graph/path/shortest.go new file mode 100644 index 0000000..cce35ee --- /dev/null +++ b/vendor/gonum.org/v1/gonum/graph/path/shortest.go @@ -0,0 +1,405 @@ +// Copyright ©2015 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package path + +import ( + "math" + + "golang.org/x/exp/rand" + + "gonum.org/v1/gonum/floats" + "gonum.org/v1/gonum/graph" + "gonum.org/v1/gonum/graph/internal/ordered" + "gonum.org/v1/gonum/graph/internal/set" + "gonum.org/v1/gonum/mat" +) + +// Shortest is a shortest-path tree created by the BellmanFordFrom or DijkstraFrom +// single-source shortest path functions. +type Shortest struct { + // from holds the source node given to + // the function that returned the + // Shortest value. + from graph.Node + + // nodes hold the nodes of the analysed + // graph. + nodes []graph.Node + // indexOf contains a mapping between + // the id-dense representation of the + // graph and the potentially id-sparse + // nodes held in nodes. + indexOf map[int64]int + + // dist and next represent the shortest + // paths between nodes. + // + // Indices into dist and next are + // mapped through indexOf. + // + // dist contains the distances + // from the from node for each + // node in the graph. + dist []float64 + // next contains the shortest-path + // tree of the graph. The index is a + // linear mapping of to-dense-id. + next []int + + // hasNegativeCycle indicates + // whether the Shortest includes + // a negative cycle. This should + // be set by the function that + // returned the Shortest value. + hasNegativeCycle bool +} + +func newShortestFrom(u graph.Node, nodes []graph.Node) Shortest { + indexOf := make(map[int64]int, len(nodes)) + uid := u.ID() + for i, n := range nodes { + indexOf[n.ID()] = i + if n.ID() == uid { + u = n + } + } + + p := Shortest{ + from: u, + + nodes: nodes, + indexOf: indexOf, + + dist: make([]float64, len(nodes)), + next: make([]int, len(nodes)), + } + for i := range nodes { + p.dist[i] = math.Inf(1) + p.next[i] = -1 + } + p.dist[indexOf[uid]] = 0 + + return p +} + +// add adds a node to the Shortest, initialising its stored index and returning, and +// setting the distance and position as unconnected. add will panic if the node is +// already present. +func (p *Shortest) add(u graph.Node) int { + uid := u.ID() + if _, exists := p.indexOf[uid]; exists { + panic("shortest: adding existing node") + } + idx := len(p.nodes) + p.indexOf[uid] = idx + p.nodes = append(p.nodes, u) + p.dist = append(p.dist, math.Inf(1)) + p.next = append(p.next, -1) + return idx +} + +func (p Shortest) set(to int, weight float64, mid int) { + p.dist[to] = weight + p.next[to] = mid +} + +// From returns the starting node of the paths held by the Shortest. +func (p Shortest) From() graph.Node { return p.from } + +// WeightTo returns the weight of the minimum path to v. If the path to v includes +// a negative cycle, the returned weight will not reflect the true path weight. +func (p Shortest) WeightTo(vid int64) float64 { + to, toOK := p.indexOf[vid] + if !toOK { + return math.Inf(1) + } + return p.dist[to] +} + +// To returns a shortest path to v and the weight of the path. If the path +// to v includes a negative cycle, one pass through the cycle will be included +// in path and weight will be returned as -Inf. +func (p Shortest) To(vid int64) (path []graph.Node, weight float64) { + to, toOK := p.indexOf[vid] + if !toOK || math.IsInf(p.dist[to], 1) { + return nil, math.Inf(1) + } + from := p.indexOf[p.from.ID()] + path = []graph.Node{p.nodes[to]} + weight = math.Inf(1) + if p.hasNegativeCycle { + seen := make(set.Ints) + seen.Add(from) + for to != from { + if seen.Has(to) { + weight = math.Inf(-1) + break + } + seen.Add(to) + path = append(path, p.nodes[p.next[to]]) + to = p.next[to] + } + } else { + n := len(p.nodes) + for to != from { + path = append(path, p.nodes[p.next[to]]) + to = p.next[to] + if n < 0 { + panic("path: unexpected negative cycle") + } + n-- + } + } + ordered.Reverse(path) + return path, math.Min(weight, p.dist[p.indexOf[vid]]) +} + +// AllShortest is a shortest-path tree created by the DijkstraAllPaths, FloydWarshall +// or JohnsonAllPaths all-pairs shortest paths functions. +type AllShortest struct { + // nodes hold the nodes of the analysed + // graph. + nodes []graph.Node + // indexOf contains a mapping between + // the id-dense representation of the + // graph and the potentially id-sparse + // nodes held in nodes. + indexOf map[int64]int + + // dist, next and forward represent + // the shortest paths between nodes. + // + // Indices into dist and next are + // mapped through indexOf. + // + // dist contains the pairwise + // distances between nodes. + // + // Internally, edges on negative + // cycles are given a special NaN + // weight, NaN(0xdefaced). + // This is returned to the user as + // -Inf. This approach allows -Inf + // weight edges on simple paths to be + // distinguished from -Inf weight + // paths that contain negative cycles. + // The distinction is visible to the + // user through whether then path + // returned with a -Inf weight is + // nil or contains a set of nodes. + dist *mat.Dense + // next contains the shortest-path + // tree of the graph. The first index + // is a linear mapping of from-dense-id + // and to-dense-id, to-major with a + // stride equal to len(nodes); the + // slice indexed to is the list of + // intermediates leading from the 'from' + // node to the 'to' node represented + // by dense id. + // The interpretation of next is + // dependent on the state of forward. + next [][]int + // forward indicates the direction of + // path reconstruction. Forward + // reconstruction is used for Floyd- + // Warshall and reverse is used for + // Dijkstra. + forward bool +} + +var ( + // defaced is NaN(0xdefaced) used as a marker for -Inf weight edges + // within paths containing negative cycles. Routines marking these + // edges should use this value. + defaced = floats.NaNWith(0xdefaced) + // defacedBits is the bit pattern we look for in AllShortest to + // identify the edges. + defacedBits = math.Float64bits(defaced) +) + +func newAllShortest(nodes []graph.Node, forward bool) AllShortest { + if len(nodes) == 0 { + return AllShortest{} + } + indexOf := make(map[int64]int, len(nodes)) + for i, n := range nodes { + indexOf[n.ID()] = i + } + dist := make([]float64, len(nodes)*len(nodes)) + for i := range dist { + dist[i] = math.Inf(1) + } + return AllShortest{ + nodes: nodes, + indexOf: indexOf, + + dist: mat.NewDense(len(nodes), len(nodes), dist), + next: make([][]int, len(nodes)*len(nodes)), + forward: forward, + } +} + +func (p AllShortest) at(from, to int) (mid []int) { + return p.next[from+to*len(p.nodes)] +} + +func (p AllShortest) set(from, to int, weight float64, mid ...int) { + p.dist.Set(from, to, weight) + p.next[from+to*len(p.nodes)] = append(p.next[from+to*len(p.nodes)][:0], mid...) +} + +func (p AllShortest) add(from, to int, mid ...int) { +loop: // These are likely to be rare, so just loop over collisions. + for _, k := range mid { + for _, v := range p.next[from+to*len(p.nodes)] { + if k == v { + continue loop + } + } + p.next[from+to*len(p.nodes)] = append(p.next[from+to*len(p.nodes)], k) + } +} + +// Weight returns the weight of the minimum path between u and v. +func (p AllShortest) Weight(uid, vid int64) float64 { + from, fromOK := p.indexOf[uid] + to, toOK := p.indexOf[vid] + if !fromOK || !toOK { + return math.Inf(1) + } + w := p.dist.At(from, to) + if math.Float64bits(w) == defacedBits { + return math.Inf(-1) + } + return w +} + +// Between returns a shortest path from u to v and the weight of the path. If more than +// one shortest path exists between u and v, a randomly chosen path will be returned and +// unique is returned false. If a cycle with zero weight exists in the path, it will not +// be included, but unique will be returned false. If a negative cycle exists on the path +// from u to v, path will be returned nil, weight will be -Inf and unique will be false. +func (p AllShortest) Between(uid, vid int64) (path []graph.Node, weight float64, unique bool) { + from, fromOK := p.indexOf[uid] + to, toOK := p.indexOf[vid] + if !fromOK || !toOK || len(p.at(from, to)) == 0 { + if uid == vid { + return []graph.Node{p.nodes[from]}, 0, true + } + return nil, math.Inf(1), false + } + + weight = p.dist.At(from, to) + if math.Float64bits(weight) == defacedBits { + return nil, math.Inf(-1), false + } + + seen := make([]int, len(p.nodes)) + for i := range seen { + seen[i] = -1 + } + var n graph.Node + if p.forward { + n = p.nodes[from] + seen[from] = 0 + } else { + n = p.nodes[to] + seen[to] = 0 + } + + path = []graph.Node{n} + unique = true + + var next int + for from != to { + c := p.at(from, to) + if len(c) != 1 { + unique = false + next = c[rand.Intn(len(c))] + } else { + next = c[0] + } + if seen[next] >= 0 { + path = path[:seen[next]] + } + seen[next] = len(path) + path = append(path, p.nodes[next]) + if p.forward { + from = next + } else { + to = next + } + } + if !p.forward { + ordered.Reverse(path) + } + + return path, weight, unique +} + +// AllBetween returns all shortest paths from u to v and the weight of the paths. Paths +// containing zero-weight cycles are not returned. If a negative cycle exists between +// u and v, paths is returned nil and weight is returned as -Inf. +func (p AllShortest) AllBetween(uid, vid int64) (paths [][]graph.Node, weight float64) { + from, fromOK := p.indexOf[uid] + to, toOK := p.indexOf[vid] + if !fromOK || !toOK || len(p.at(from, to)) == 0 { + if uid == vid { + return [][]graph.Node{{p.nodes[from]}}, 0 + } + return nil, math.Inf(1) + } + + weight = p.dist.At(from, to) + if math.Float64bits(weight) == defacedBits { + return nil, math.Inf(-1) + } + + var n graph.Node + if p.forward { + n = p.nodes[from] + } else { + n = p.nodes[to] + } + seen := make([]bool, len(p.nodes)) + paths = p.allBetween(from, to, seen, []graph.Node{n}, nil) + + return paths, weight +} + +func (p AllShortest) allBetween(from, to int, seen []bool, path []graph.Node, paths [][]graph.Node) [][]graph.Node { + if p.forward { + seen[from] = true + } else { + seen[to] = true + } + if from == to { + if path == nil { + return paths + } + if !p.forward { + ordered.Reverse(path) + } + return append(paths, path) + } + first := true + for _, n := range p.at(from, to) { + if seen[n] { + continue + } + if first { + path = append([]graph.Node(nil), path...) + first = false + } + if p.forward { + from = n + } else { + to = n + } + paths = p.allBetween(from, to, append([]bool(nil), seen...), append(path, p.nodes[n]), paths) + } + return paths +} diff --git a/vendor/gonum.org/v1/gonum/graph/path/spanning_tree.go b/vendor/gonum.org/v1/gonum/graph/path/spanning_tree.go new file mode 100644 index 0000000..55b6192 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/graph/path/spanning_tree.go @@ -0,0 +1,189 @@ +// Copyright ©2014 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package path + +import ( + "container/heap" + "math" + "sort" + + "gonum.org/v1/gonum/graph" + "gonum.org/v1/gonum/graph/simple" +) + +// WeightedBuilder is a type that can add nodes and weighted edges. +type WeightedBuilder interface { + AddNode(graph.Node) + SetWeightedEdge(graph.WeightedEdge) +} + +// Prim generates a minimum spanning tree of g by greedy tree extension, placing +// the result in the destination, dst. If the edge weights of g are distinct +// it will be the unique minimum spanning tree of g. The destination is not cleared +// first. The weight of the minimum spanning tree is returned. If g is not connected, +// a minimum spanning forest will be constructed in dst and the sum of minimum +// spanning tree weights will be returned. +// +// Nodes and Edges from g are used to construct dst, so if the Node and Edge +// types used in g are pointer or reference-like, then the values will be shared +// between the graphs. +// +// If dst has nodes that exist in g, Prim will panic. +func Prim(dst WeightedBuilder, g graph.WeightedUndirected) float64 { + nodes := graph.NodesOf(g.Nodes()) + if len(nodes) == 0 { + return 0 + } + + q := &primQueue{ + indexOf: make(map[int64]int, len(nodes)-1), + nodes: make([]simple.WeightedEdge, 0, len(nodes)-1), + } + dst.AddNode(nodes[0]) + for _, u := range nodes[1:] { + dst.AddNode(u) + heap.Push(q, simple.WeightedEdge{F: u, W: math.Inf(1)}) + } + + u := nodes[0] + uid := u.ID() + for _, v := range graph.NodesOf(g.From(uid)) { + w, ok := g.Weight(uid, v.ID()) + if !ok { + panic("prim: unexpected invalid weight") + } + q.update(v, u, w) + } + + var w float64 + for q.Len() > 0 { + e := heap.Pop(q).(simple.WeightedEdge) + if e.To() != nil && g.HasEdgeBetween(e.From().ID(), e.To().ID()) { + dst.SetWeightedEdge(g.WeightedEdge(e.From().ID(), e.To().ID())) + w += e.Weight() + } + + u = e.From() + uid := u.ID() + for _, n := range graph.NodesOf(g.From(uid)) { + if key, ok := q.key(n); ok { + w, ok := g.Weight(uid, n.ID()) + if !ok { + panic("prim: unexpected invalid weight") + } + if w < key { + q.update(n, u, w) + } + } + } + } + return w +} + +// primQueue is a Prim's priority queue. The priority queue is a +// queue of edge From nodes keyed on the minimum edge weight to +// a node in the set of nodes already connected to the minimum +// spanning forest. +type primQueue struct { + indexOf map[int64]int + nodes []simple.WeightedEdge +} + +func (q *primQueue) Less(i, j int) bool { + return q.nodes[i].Weight() < q.nodes[j].Weight() +} + +func (q *primQueue) Swap(i, j int) { + q.indexOf[q.nodes[i].From().ID()] = j + q.indexOf[q.nodes[j].From().ID()] = i + q.nodes[i], q.nodes[j] = q.nodes[j], q.nodes[i] +} + +func (q *primQueue) Len() int { + return len(q.nodes) +} + +func (q *primQueue) Push(x interface{}) { + n := x.(simple.WeightedEdge) + q.indexOf[n.From().ID()] = len(q.nodes) + q.nodes = append(q.nodes, n) +} + +func (q *primQueue) Pop() interface{} { + n := q.nodes[len(q.nodes)-1] + q.nodes = q.nodes[:len(q.nodes)-1] + delete(q.indexOf, n.From().ID()) + return n +} + +// key returns the key for the node u and whether the node is +// in the queue. If the node is not in the queue, key is returned +// as +Inf. +func (q *primQueue) key(u graph.Node) (key float64, ok bool) { + i, ok := q.indexOf[u.ID()] + if !ok { + return math.Inf(1), false + } + return q.nodes[i].Weight(), ok +} + +// update updates u's position in the queue with the new closest +// MST-connected neighbour, v, and the key weight between u and v. +func (q *primQueue) update(u, v graph.Node, key float64) { + id := u.ID() + i, ok := q.indexOf[id] + if !ok { + return + } + q.nodes[i].T = v + q.nodes[i].W = key + heap.Fix(q, i) +} + +// UndirectedWeightLister is an undirected graph that returns edge weights and +// the set of edges in the graph. +type UndirectedWeightLister interface { + graph.WeightedUndirected + WeightedEdges() graph.WeightedEdges +} + +// Kruskal generates a minimum spanning tree of g by greedy tree coalescence, placing +// the result in the destination, dst. If the edge weights of g are distinct +// it will be the unique minimum spanning tree of g. The destination is not cleared +// first. The weight of the minimum spanning tree is returned. If g is not connected, +// a minimum spanning forest will be constructed in dst and the sum of minimum +// spanning tree weights will be returned. +// +// Nodes and Edges from g are used to construct dst, so if the Node and Edge +// types used in g are pointer or reference-like, then the values will be shared +// between the graphs. +// +// If dst has nodes that exist in g, Kruskal will panic. +func Kruskal(dst WeightedBuilder, g UndirectedWeightLister) float64 { + edges := graph.WeightedEdgesOf(g.WeightedEdges()) + sort.Sort(byWeight(edges)) + + ds := newDisjointSet() + for _, node := range graph.NodesOf(g.Nodes()) { + dst.AddNode(node) + ds.makeSet(node.ID()) + } + + var w float64 + for _, e := range edges { + if s1, s2 := ds.find(e.From().ID()), ds.find(e.To().ID()); s1 != s2 { + ds.union(s1, s2) + dst.SetWeightedEdge(g.WeightedEdge(e.From().ID(), e.To().ID())) + w += e.Weight() + } + } + return w +} + +type byWeight []graph.WeightedEdge + +func (e byWeight) Len() int { return len(e) } +func (e byWeight) Less(i, j int) bool { return e[i].Weight() < e[j].Weight() } +func (e byWeight) Swap(i, j int) { e[i], e[j] = e[j], e[i] } diff --git a/vendor/gonum.org/v1/gonum/graph/path/weight.go b/vendor/gonum.org/v1/gonum/graph/path/weight.go new file mode 100644 index 0000000..625dda4 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/graph/path/weight.go @@ -0,0 +1,53 @@ +// Copyright ©2015 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package path + +import ( + "math" + + "gonum.org/v1/gonum/graph" + "gonum.org/v1/gonum/graph/traverse" +) + +// Weighted is a weighted graph. It is a subset of graph.Weighted. +type Weighted interface { + // Weight returns the weight for the edge between + // x and y with IDs xid and yid if Edge(xid, yid) + // returns a non-nil Edge. + // If x and y are the same node or there is no + // joining edge between the two nodes the weight + // value returned is implementation dependent. + // Weight returns true if an edge exists between + // x and y or if x and y have the same ID, false + // otherwise. + Weight(xid, yid int64) (w float64, ok bool) +} + +// Weighting is a mapping between a pair of nodes and a weight. It follows the +// semantics of the Weighter interface. +type Weighting func(xid, yid int64) (w float64, ok bool) + +// UniformCost returns a Weighting that returns an edge cost of 1 for existing +// edges, zero for node identity and Inf for otherwise absent edges. +func UniformCost(g traverse.Graph) Weighting { + return func(xid, yid int64) (w float64, ok bool) { + if xid == yid { + return 0, true + } + if e := g.Edge(xid, yid); e != nil { + return 1, true + } + return math.Inf(1), false + } +} + +// Heuristic returns an estimate of the cost of travelling between two nodes. +type Heuristic func(x, y graph.Node) float64 + +// HeuristicCoster wraps the HeuristicCost method. A graph implementing the +// interface provides a heuristic between any two given nodes. +type HeuristicCoster interface { + HeuristicCost(x, y graph.Node) float64 +} diff --git a/vendor/gonum.org/v1/gonum/graph/path/yen_ksp.go b/vendor/gonum.org/v1/gonum/graph/path/yen_ksp.go new file mode 100644 index 0000000..315c28f --- /dev/null +++ b/vendor/gonum.org/v1/gonum/graph/path/yen_ksp.go @@ -0,0 +1,151 @@ +// Copyright ©2018 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package path + +import ( + "sort" + + "gonum.org/v1/gonum/graph" + "gonum.org/v1/gonum/graph/iterator" +) + +// YenKShortestPaths returns the k-shortest loopless paths from s to t in g. +// YenKShortestPaths will panic if g contains a negative edge weight. +func YenKShortestPaths(g graph.Graph, k int, s, t graph.Node) [][]graph.Node { + _, isDirected := g.(graph.Directed) + yk := yenKSPAdjuster{ + Graph: g, + isDirected: isDirected, + } + + if wg, ok := g.(Weighted); ok { + yk.weight = wg.Weight + } else { + yk.weight = UniformCost(g) + } + + shortest, _ := DijkstraFrom(s, yk).To(t.ID()) + switch len(shortest) { + case 0: + return nil + case 1: + return [][]graph.Node{shortest} + } + paths := [][]graph.Node{shortest} + + var pot []yenShortest + var root []graph.Node + for i := int64(1); i < int64(k); i++ { + for n := 0; n < len(paths[i-1])-1; n++ { + yk.reset() + + spur := paths[i-1][n] + root := append(root[:0], paths[i-1][:n+1]...) + + for _, path := range paths { + if len(path) <= n { + continue + } + ok := true + for x := 0; x < len(root); x++ { + if path[x].ID() != root[x].ID() { + ok = false + break + } + } + if ok { + yk.removeEdge(path[n].ID(), path[n+1].ID()) + } + } + + spath, weight := DijkstraFrom(spur, yk).To(t.ID()) + if len(root) > 1 { + var rootWeight float64 + for x := 1; x < len(root); x++ { + w, _ := yk.weight(root[x-1].ID(), root[x].ID()) + rootWeight += w + } + root = append(root[:len(root)-1], spath...) + pot = append(pot, yenShortest{root, weight + rootWeight}) + } else { + pot = append(pot, yenShortest{spath, weight}) + } + } + + if len(pot) == 0 { + break + } + + sort.Sort(byPathWeight(pot)) + best := pot[0].path + if len(best) <= 1 { + break + } + paths = append(paths, best) + pot = pot[1:] + } + + return paths +} + +// yenShortest holds a path and its weight for sorting. +type yenShortest struct { + path []graph.Node + weight float64 +} + +type byPathWeight []yenShortest + +func (s byPathWeight) Len() int { return len(s) } +func (s byPathWeight) Swap(i, j int) { s[i], s[j] = s[j], s[i] } +func (s byPathWeight) Less(i, j int) bool { return s[i].weight < s[j].weight } + +// yenKSPAdjuster allows walked edges to be omitted from a graph +// without altering the embedded graph. +type yenKSPAdjuster struct { + graph.Graph + isDirected bool + + // weight is the edge weight function + // used for shortest path calculation. + weight Weighting + + // visitedEdges holds the edges that have + // been removed by Yen's algorithm. + visitedEdges map[[2]int64]struct{} +} + +func (g yenKSPAdjuster) From(id int64) graph.Nodes { + nodes := graph.NodesOf(g.Graph.From(id)) + for i := 0; i < len(nodes); { + if g.canWalk(id, nodes[i].ID()) { + i++ + continue + } + nodes[i] = nodes[len(nodes)-1] + nodes = nodes[:len(nodes)-1] + } + return iterator.NewOrderedNodes(nodes) +} + +func (g yenKSPAdjuster) canWalk(u, v int64) bool { + _, ok := g.visitedEdges[[2]int64{u, v}] + return !ok +} + +func (g yenKSPAdjuster) removeEdge(u, v int64) { + g.visitedEdges[[2]int64{u, v}] = struct{}{} + if g.isDirected { + g.visitedEdges[[2]int64{v, u}] = struct{}{} + } +} + +func (g *yenKSPAdjuster) reset() { + g.visitedEdges = make(map[[2]int64]struct{}) +} + +func (g yenKSPAdjuster) Weight(xid, yid int64) (w float64, ok bool) { + return g.weight(xid, yid) +} diff --git a/vendor/gonum.org/v1/gonum/graph/simple/dense_directed_matrix.go b/vendor/gonum.org/v1/gonum/graph/simple/dense_directed_matrix.go new file mode 100644 index 0000000..3daca9a --- /dev/null +++ b/vendor/gonum.org/v1/gonum/graph/simple/dense_directed_matrix.go @@ -0,0 +1,301 @@ +// Copyright ©2014 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package simple + +import ( + "sort" + + "gonum.org/v1/gonum/graph" + "gonum.org/v1/gonum/graph/internal/ordered" + "gonum.org/v1/gonum/graph/iterator" + "gonum.org/v1/gonum/mat" +) + +var ( + dm *DirectedMatrix + + _ graph.Graph = dm + _ graph.Directed = dm + _ edgeSetter = dm + _ weightedEdgeSetter = dm +) + +// DirectedMatrix represents a directed graph using an adjacency +// matrix such that all IDs are in a contiguous block from 0 to n-1. +// Edges are stored implicitly as an edge weight, so edges stored in +// the graph are not recoverable. +type DirectedMatrix struct { + mat *mat.Dense + nodes []graph.Node + + self float64 + absent float64 +} + +// NewDirectedMatrix creates a directed dense graph with n nodes. +// All edges are initialized with the weight given by init. The self parameter +// specifies the cost of self connection, and absent specifies the weight +// returned for absent edges. +func NewDirectedMatrix(n int, init, self, absent float64) *DirectedMatrix { + matrix := make([]float64, n*n) + if init != 0 { + for i := range matrix { + matrix[i] = init + } + } + for i := 0; i < len(matrix); i += n + 1 { + matrix[i] = self + } + return &DirectedMatrix{ + mat: mat.NewDense(n, n, matrix), + self: self, + absent: absent, + } +} + +// NewDirectedMatrixFrom creates a directed dense graph with the given nodes. +// The IDs of the nodes must be contiguous from 0 to len(nodes)-1, but may +// be in any order. If IDs are not contiguous NewDirectedMatrixFrom will panic. +// All edges are initialized with the weight given by init. The self parameter +// specifies the cost of self connection, and absent specifies the weight +// returned for absent edges. +func NewDirectedMatrixFrom(nodes []graph.Node, init, self, absent float64) *DirectedMatrix { + sort.Sort(ordered.ByID(nodes)) + for i, n := range nodes { + if int64(i) != n.ID() { + panic("simple: non-contiguous node IDs") + } + } + g := NewDirectedMatrix(len(nodes), init, self, absent) + g.nodes = nodes + return g +} + +// Edge returns the edge from u to v if such an edge exists and nil otherwise. +// The node v must be directly reachable from u as defined by the From method. +func (g *DirectedMatrix) Edge(uid, vid int64) graph.Edge { + return g.WeightedEdge(uid, vid) +} + +// Edges returns all the edges in the graph. +func (g *DirectedMatrix) Edges() graph.Edges { + var edges []graph.Edge + r, _ := g.mat.Dims() + for i := 0; i < r; i++ { + for j := 0; j < r; j++ { + if i == j { + continue + } + if w := g.mat.At(i, j); !isSame(w, g.absent) { + edges = append(edges, WeightedEdge{F: g.Node(int64(i)), T: g.Node(int64(j)), W: w}) + } + } + } + if len(edges) == 0 { + return graph.Empty + } + return iterator.NewOrderedEdges(edges) +} + +// From returns all nodes in g that can be reached directly from n. +func (g *DirectedMatrix) From(id int64) graph.Nodes { + if !g.has(id) { + return graph.Empty + } + var nodes []graph.Node + _, c := g.mat.Dims() + for j := 0; j < c; j++ { + if int64(j) == id { + continue + } + // id is not greater than maximum int by this point. + if !isSame(g.mat.At(int(id), j), g.absent) { + nodes = append(nodes, g.Node(int64(j))) + } + } + if len(nodes) == 0 { + return graph.Empty + } + return iterator.NewOrderedNodes(nodes) +} + +// HasEdgeBetween returns whether an edge exists between nodes x and y without +// considering direction. +func (g *DirectedMatrix) HasEdgeBetween(xid, yid int64) bool { + if !g.has(xid) { + return false + } + if !g.has(yid) { + return false + } + // xid and yid are not greater than maximum int by this point. + return xid != yid && (!isSame(g.mat.At(int(xid), int(yid)), g.absent) || !isSame(g.mat.At(int(yid), int(xid)), g.absent)) +} + +// HasEdgeFromTo returns whether an edge exists in the graph from u to v. +func (g *DirectedMatrix) HasEdgeFromTo(uid, vid int64) bool { + if !g.has(uid) { + return false + } + if !g.has(vid) { + return false + } + // uid and vid are not greater than maximum int by this point. + return uid != vid && !isSame(g.mat.At(int(uid), int(vid)), g.absent) +} + +// Matrix returns the mat.Matrix representation of the graph. The orientation +// of the matrix is such that the matrix entry at G_{ij} is the weight of the edge +// from node i to node j. +func (g *DirectedMatrix) Matrix() mat.Matrix { + // Prevent alteration of dimensions of the returned matrix. + m := *g.mat + return &m +} + +// Node returns the node with the given ID if it exists in the graph, +// and nil otherwise. +func (g *DirectedMatrix) Node(id int64) graph.Node { + if !g.has(id) { + return nil + } + if g.nodes == nil { + return Node(id) + } + return g.nodes[id] +} + +// Nodes returns all the nodes in the graph. +func (g *DirectedMatrix) Nodes() graph.Nodes { + if g.nodes != nil { + nodes := make([]graph.Node, len(g.nodes)) + copy(nodes, g.nodes) + return iterator.NewOrderedNodes(nodes) + } + r, _ := g.mat.Dims() + // Matrix graphs must have at least one node. + return iterator.NewImplicitNodes(0, r, newSimpleNode) +} + +// RemoveEdge removes the edge with the given end point nodes from the graph, leaving the terminal +// nodes. If the edge does not exist it is a no-op. +func (g *DirectedMatrix) RemoveEdge(fid, tid int64) { + if !g.has(fid) { + return + } + if !g.has(tid) { + return + } + // fid and tid are not greater than maximum int by this point. + g.mat.Set(int(fid), int(tid), g.absent) +} + +// SetEdge sets e, an edge from one node to another with unit weight. If the ends of the edge +// are not in g or the edge is a self loop, SetEdge panics. SetEdge will store the nodes of +// e in the graph if it was initialized with NewDirectedMatrixFrom. +func (g *DirectedMatrix) SetEdge(e graph.Edge) { + g.setWeightedEdge(e, 1) +} + +// SetWeightedEdge sets e, an edge from one node to another. If the ends of the edge are not in g +// or the edge is a self loop, SetWeightedEdge panics. SetWeightedEdge will store the nodes of +// e in the graph if it was initialized with NewDirectedMatrixFrom. +func (g *DirectedMatrix) SetWeightedEdge(e graph.WeightedEdge) { + g.setWeightedEdge(e, e.Weight()) +} + +func (g *DirectedMatrix) setWeightedEdge(e graph.Edge, weight float64) { + from := e.From() + fid := from.ID() + to := e.To() + tid := to.ID() + if fid == tid { + panic("simple: set illegal edge") + } + if int64(int(fid)) != fid { + panic("simple: unavailable from node ID for dense graph") + } + if int64(int(tid)) != tid { + panic("simple: unavailable to node ID for dense graph") + } + if g.nodes != nil { + g.nodes[fid] = from + g.nodes[tid] = to + } + // fid and tid are not greater than maximum int by this point. + g.mat.Set(int(fid), int(tid), weight) +} + +// To returns all nodes in g that can reach directly to n. +func (g *DirectedMatrix) To(id int64) graph.Nodes { + if !g.has(id) { + return graph.Empty + } + var nodes []graph.Node + r, _ := g.mat.Dims() + for i := 0; i < r; i++ { + if int64(i) == id { + continue + } + // id is not greater than maximum int by this point. + if !isSame(g.mat.At(i, int(id)), g.absent) { + nodes = append(nodes, g.Node(int64(i))) + } + } + if len(nodes) == 0 { + return graph.Empty + } + return iterator.NewOrderedNodes(nodes) +} + +// Weight returns the weight for the edge between x and y if Edge(x, y) returns a non-nil Edge. +// If x and y are the same node or there is no joining edge between the two nodes the weight +// value returned is either the graph's absent or self value. Weight returns true if an edge +// exists between x and y or if x and y have the same ID, false otherwise. +func (g *DirectedMatrix) Weight(xid, yid int64) (w float64, ok bool) { + if xid == yid { + return g.self, true + } + if g.HasEdgeFromTo(xid, yid) { + // xid and yid are not greater than maximum int by this point. + return g.mat.At(int(xid), int(yid)), true + } + return g.absent, false +} + +// WeightedEdge returns the weighted edge from u to v if such an edge exists and nil otherwise. +// The node v must be directly reachable from u as defined by the From method. +func (g *DirectedMatrix) WeightedEdge(uid, vid int64) graph.WeightedEdge { + if g.HasEdgeFromTo(uid, vid) { + // xid and yid are not greater than maximum int by this point. + return WeightedEdge{F: g.Node(uid), T: g.Node(vid), W: g.mat.At(int(uid), int(vid))} + } + return nil +} + +// WeightedEdges returns all the edges in the graph. +func (g *DirectedMatrix) WeightedEdges() graph.WeightedEdges { + var edges []graph.WeightedEdge + r, _ := g.mat.Dims() + for i := 0; i < r; i++ { + for j := 0; j < r; j++ { + if i == j { + continue + } + if w := g.mat.At(i, j); !isSame(w, g.absent) { + edges = append(edges, WeightedEdge{F: g.Node(int64(i)), T: g.Node(int64(j)), W: w}) + } + } + } + if len(edges) == 0 { + return graph.Empty + } + return iterator.NewOrderedWeightedEdges(edges) +} + +func (g *DirectedMatrix) has(id int64) bool { + r, _ := g.mat.Dims() + return 0 <= id && id < int64(r) +} diff --git a/vendor/gonum.org/v1/gonum/graph/simple/dense_undirected_matrix.go b/vendor/gonum.org/v1/gonum/graph/simple/dense_undirected_matrix.go new file mode 100644 index 0000000..f51debb --- /dev/null +++ b/vendor/gonum.org/v1/gonum/graph/simple/dense_undirected_matrix.go @@ -0,0 +1,268 @@ +// Copyright ©2014 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package simple + +import ( + "sort" + + "gonum.org/v1/gonum/graph" + "gonum.org/v1/gonum/graph/internal/ordered" + "gonum.org/v1/gonum/graph/iterator" + "gonum.org/v1/gonum/mat" +) + +var ( + um *UndirectedMatrix + + _ graph.Graph = um + _ graph.Undirected = um + _ edgeSetter = um + _ weightedEdgeSetter = um +) + +// UndirectedMatrix represents an undirected graph using an adjacency +// matrix such that all IDs are in a contiguous block from 0 to n-1. +// Edges are stored implicitly as an edge weight, so edges stored in +// the graph are not recoverable. +type UndirectedMatrix struct { + mat *mat.SymDense + nodes []graph.Node + + self float64 + absent float64 +} + +// NewUndirectedMatrix creates an undirected dense graph with n nodes. +// All edges are initialized with the weight given by init. The self parameter +// specifies the cost of self connection, and absent specifies the weight +// returned for absent edges. +func NewUndirectedMatrix(n int, init, self, absent float64) *UndirectedMatrix { + matrix := make([]float64, n*n) + if init != 0 { + for i := range matrix { + matrix[i] = init + } + } + for i := 0; i < len(matrix); i += n + 1 { + matrix[i] = self + } + return &UndirectedMatrix{ + mat: mat.NewSymDense(n, matrix), + self: self, + absent: absent, + } +} + +// NewUndirectedMatrixFrom creates an undirected dense graph with the given nodes. +// The IDs of the nodes must be contiguous from 0 to len(nodes)-1, but may +// be in any order. If IDs are not contiguous NewUndirectedMatrixFrom will panic. +// All edges are initialized with the weight given by init. The self parameter +// specifies the cost of self connection, and absent specifies the weight +// returned for absent edges. +func NewUndirectedMatrixFrom(nodes []graph.Node, init, self, absent float64) *UndirectedMatrix { + sort.Sort(ordered.ByID(nodes)) + for i, n := range nodes { + if int64(i) != n.ID() { + panic("simple: non-contiguous node IDs") + } + } + g := NewUndirectedMatrix(len(nodes), init, self, absent) + g.nodes = nodes + return g +} + +// Edge returns the edge from u to v if such an edge exists and nil otherwise. +// The node v must be directly reachable from u as defined by the From method. +func (g *UndirectedMatrix) Edge(uid, vid int64) graph.Edge { + return g.WeightedEdgeBetween(uid, vid) +} + +// EdgeBetween returns the edge between nodes x and y. +func (g *UndirectedMatrix) EdgeBetween(uid, vid int64) graph.Edge { + return g.WeightedEdgeBetween(uid, vid) +} + +// Edges returns all the edges in the graph. +func (g *UndirectedMatrix) Edges() graph.Edges { + var edges []graph.Edge + r, _ := g.mat.Dims() + for i := 0; i < r; i++ { + for j := i + 1; j < r; j++ { + if w := g.mat.At(i, j); !isSame(w, g.absent) { + edges = append(edges, WeightedEdge{F: g.Node(int64(i)), T: g.Node(int64(j)), W: w}) + } + } + } + if len(edges) == 0 { + return graph.Empty + } + return iterator.NewOrderedEdges(edges) +} + +// From returns all nodes in g that can be reached directly from n. +func (g *UndirectedMatrix) From(id int64) graph.Nodes { + if !g.has(id) { + return graph.Empty + } + var nodes []graph.Node + r := g.mat.Symmetric() + for i := 0; i < r; i++ { + if int64(i) == id { + continue + } + // id is not greater than maximum int by this point. + if !isSame(g.mat.At(int(id), i), g.absent) { + nodes = append(nodes, g.Node(int64(i))) + } + } + if len(nodes) == 0 { + return graph.Empty + } + return iterator.NewOrderedNodes(nodes) +} + +// HasEdgeBetween returns whether an edge exists between nodes x and y. +func (g *UndirectedMatrix) HasEdgeBetween(uid, vid int64) bool { + if !g.has(uid) { + return false + } + if !g.has(vid) { + return false + } + // uid and vid are not greater than maximum int by this point. + return uid != vid && !isSame(g.mat.At(int(uid), int(vid)), g.absent) +} + +// Matrix returns the mat.Matrix representation of the graph. +func (g *UndirectedMatrix) Matrix() mat.Matrix { + // Prevent alteration of dimensions of the returned matrix. + m := *g.mat + return &m +} + +// Node returns the node with the given ID if it exists in the graph, +// and nil otherwise. +func (g *UndirectedMatrix) Node(id int64) graph.Node { + if !g.has(id) { + return nil + } + if g.nodes == nil { + return Node(id) + } + return g.nodes[id] +} + +// Nodes returns all the nodes in the graph. +func (g *UndirectedMatrix) Nodes() graph.Nodes { + if g.nodes != nil { + nodes := make([]graph.Node, len(g.nodes)) + copy(nodes, g.nodes) + return iterator.NewOrderedNodes(nodes) + } + r := g.mat.Symmetric() + // Matrix graphs must have at least one node. + return iterator.NewImplicitNodes(0, r, newSimpleNode) +} + +// RemoveEdge removes the edge with the given end point IDs from the graph, leaving the terminal +// nodes. If the edge does not exist it is a no-op. +func (g *UndirectedMatrix) RemoveEdge(fid, tid int64) { + if !g.has(fid) { + return + } + if !g.has(tid) { + return + } + // fid and tid are not greater than maximum int by this point. + g.mat.SetSym(int(fid), int(tid), g.absent) +} + +// SetEdge sets e, an edge from one node to another with unit weight. If the ends of the edge are +// not in g or the edge is a self loop, SetEdge panics. SetEdge will store the nodes of +// e in the graph if it was initialized with NewUndirectedMatrixFrom. +func (g *UndirectedMatrix) SetEdge(e graph.Edge) { + g.setWeightedEdge(e, 1) +} + +// SetWeightedEdge sets e, an edge from one node to another. If the ends of the edge are not in g +// or the edge is a self loop, SetWeightedEdge panics. SetWeightedEdge will store the nodes of +// e in the graph if it was initialized with NewUndirectedMatrixFrom. +func (g *UndirectedMatrix) SetWeightedEdge(e graph.WeightedEdge) { + g.setWeightedEdge(e, e.Weight()) +} + +func (g *UndirectedMatrix) setWeightedEdge(e graph.Edge, weight float64) { + from := e.From() + fid := from.ID() + to := e.To() + tid := to.ID() + if fid == tid { + panic("simple: set illegal edge") + } + if int64(int(fid)) != fid { + panic("simple: unavailable from node ID for dense graph") + } + if int64(int(tid)) != tid { + panic("simple: unavailable to node ID for dense graph") + } + if g.nodes != nil { + g.nodes[fid] = from + g.nodes[tid] = to + } + // fid and tid are not greater than maximum int by this point. + g.mat.SetSym(int(fid), int(tid), weight) +} + +// Weight returns the weight for the edge between x and y if Edge(x, y) returns a non-nil Edge. +// If x and y are the same node or there is no joining edge between the two nodes the weight +// value returned is either the graph's absent or self value. Weight returns true if an edge +// exists between x and y or if x and y have the same ID, false otherwise. +func (g *UndirectedMatrix) Weight(xid, yid int64) (w float64, ok bool) { + if xid == yid { + return g.self, true + } + if g.HasEdgeBetween(xid, yid) { + // xid and yid are not greater than maximum int by this point. + return g.mat.At(int(xid), int(yid)), true + } + return g.absent, false +} + +// WeightedEdge returns the weighted edge from u to v if such an edge exists and nil otherwise. +// The node v must be directly reachable from u as defined by the From method. +func (g *UndirectedMatrix) WeightedEdge(uid, vid int64) graph.WeightedEdge { + return g.WeightedEdgeBetween(uid, vid) +} + +// WeightedEdgeBetween returns the weighted edge between nodes x and y. +func (g *UndirectedMatrix) WeightedEdgeBetween(uid, vid int64) graph.WeightedEdge { + if g.HasEdgeBetween(uid, vid) { + // uid and vid are not greater than maximum int by this point. + return WeightedEdge{F: g.Node(uid), T: g.Node(vid), W: g.mat.At(int(uid), int(vid))} + } + return nil +} + +// WeightedEdges returns all the edges in the graph. +func (g *UndirectedMatrix) WeightedEdges() graph.WeightedEdges { + var edges []graph.WeightedEdge + r, _ := g.mat.Dims() + for i := 0; i < r; i++ { + for j := i + 1; j < r; j++ { + if w := g.mat.At(i, j); !isSame(w, g.absent) { + edges = append(edges, WeightedEdge{F: g.Node(int64(i)), T: g.Node(int64(j)), W: w}) + } + } + } + if len(edges) == 0 { + return graph.Empty + } + return iterator.NewOrderedWeightedEdges(edges) +} + +func (g *UndirectedMatrix) has(id int64) bool { + r := g.mat.Symmetric() + return 0 <= id && id < int64(r) +} diff --git a/vendor/gonum.org/v1/gonum/graph/simple/directed.go b/vendor/gonum.org/v1/gonum/graph/simple/directed.go new file mode 100644 index 0000000..f19efbd --- /dev/null +++ b/vendor/gonum.org/v1/gonum/graph/simple/directed.go @@ -0,0 +1,235 @@ +// Copyright ©2014 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package simple + +import ( + "fmt" + + "gonum.org/v1/gonum/graph" + "gonum.org/v1/gonum/graph/internal/uid" + "gonum.org/v1/gonum/graph/iterator" +) + +var ( + dg *DirectedGraph + + _ graph.Graph = dg + _ graph.Directed = dg + _ graph.NodeAdder = dg + _ graph.NodeRemover = dg + _ graph.EdgeAdder = dg + _ graph.EdgeRemover = dg +) + +// DirectedGraph implements a generalized directed graph. +type DirectedGraph struct { + nodes map[int64]graph.Node + from map[int64]map[int64]graph.Edge + to map[int64]map[int64]graph.Edge + + nodeIDs uid.Set +} + +// NewDirectedGraph returns a DirectedGraph. +func NewDirectedGraph() *DirectedGraph { + return &DirectedGraph{ + nodes: make(map[int64]graph.Node), + from: make(map[int64]map[int64]graph.Edge), + to: make(map[int64]map[int64]graph.Edge), + + nodeIDs: uid.NewSet(), + } +} + +// AddNode adds n to the graph. It panics if the added node ID matches an existing node ID. +func (g *DirectedGraph) AddNode(n graph.Node) { + if _, exists := g.nodes[n.ID()]; exists { + panic(fmt.Sprintf("simple: node ID collision: %d", n.ID())) + } + g.nodes[n.ID()] = n + g.from[n.ID()] = make(map[int64]graph.Edge) + g.to[n.ID()] = make(map[int64]graph.Edge) + g.nodeIDs.Use(n.ID()) +} + +// Edge returns the edge from u to v if such an edge exists and nil otherwise. +// The node v must be directly reachable from u as defined by the From method. +func (g *DirectedGraph) Edge(uid, vid int64) graph.Edge { + edge, ok := g.from[uid][vid] + if !ok { + return nil + } + return edge +} + +// Edges returns all the edges in the graph. +func (g *DirectedGraph) Edges() graph.Edges { + var edges []graph.Edge + for _, u := range g.nodes { + for _, e := range g.from[u.ID()] { + edges = append(edges, e) + } + } + if len(edges) == 0 { + return graph.Empty + } + return iterator.NewOrderedEdges(edges) +} + +// From returns all nodes in g that can be reached directly from n. +func (g *DirectedGraph) From(id int64) graph.Nodes { + if _, ok := g.from[id]; !ok { + return graph.Empty + } + + from := make([]graph.Node, len(g.from[id])) + i := 0 + for vid := range g.from[id] { + from[i] = g.nodes[vid] + i++ + } + if len(from) == 0 { + return graph.Empty + } + return iterator.NewOrderedNodes(from) +} + +// HasEdgeBetween returns whether an edge exists between nodes x and y without +// considering direction. +func (g *DirectedGraph) HasEdgeBetween(xid, yid int64) bool { + if _, ok := g.from[xid][yid]; ok { + return true + } + _, ok := g.from[yid][xid] + return ok +} + +// HasEdgeFromTo returns whether an edge exists in the graph from u to v. +func (g *DirectedGraph) HasEdgeFromTo(uid, vid int64) bool { + if _, ok := g.from[uid][vid]; !ok { + return false + } + return true +} + +// NewEdge returns a new Edge from the source to the destination node. +func (g *DirectedGraph) NewEdge(from, to graph.Node) graph.Edge { + return &Edge{F: from, T: to} +} + +// NewNode returns a new unique Node to be added to g. The Node's ID does +// not become valid in g until the Node is added to g. +func (g *DirectedGraph) NewNode() graph.Node { + if len(g.nodes) == 0 { + return Node(0) + } + if int64(len(g.nodes)) == uid.Max { + panic("simple: cannot allocate node: no slot") + } + return Node(g.nodeIDs.NewID()) +} + +// Node returns the node with the given ID if it exists in the graph, +// and nil otherwise. +func (g *DirectedGraph) Node(id int64) graph.Node { + return g.nodes[id] +} + +// Nodes returns all the nodes in the graph. +func (g *DirectedGraph) Nodes() graph.Nodes { + if len(g.nodes) == 0 { + return graph.Empty + } + nodes := make([]graph.Node, len(g.nodes)) + i := 0 + for _, n := range g.nodes { + nodes[i] = n + i++ + } + return iterator.NewOrderedNodes(nodes) +} + +// RemoveEdge removes the edge with the given end point IDs from the graph, leaving the terminal +// nodes. If the edge does not exist it is a no-op. +func (g *DirectedGraph) RemoveEdge(fid, tid int64) { + if _, ok := g.nodes[fid]; !ok { + return + } + if _, ok := g.nodes[tid]; !ok { + return + } + + delete(g.from[fid], tid) + delete(g.to[tid], fid) +} + +// RemoveNode removes the node with the given ID from the graph, as well as any edges attached +// to it. If the node is not in the graph it is a no-op. +func (g *DirectedGraph) RemoveNode(id int64) { + if _, ok := g.nodes[id]; !ok { + return + } + delete(g.nodes, id) + + for from := range g.from[id] { + delete(g.to[from], id) + } + delete(g.from, id) + + for to := range g.to[id] { + delete(g.from[to], id) + } + delete(g.to, id) + + g.nodeIDs.Release(id) +} + +// SetEdge adds e, an edge from one node to another. If the nodes do not exist, they are added +// and are set to the nodes of the edge otherwise. +// It will panic if the IDs of the e.From and e.To are equal. +func (g *DirectedGraph) SetEdge(e graph.Edge) { + var ( + from = e.From() + fid = from.ID() + to = e.To() + tid = to.ID() + ) + + if fid == tid { + panic("simple: adding self edge") + } + + if _, ok := g.nodes[fid]; !ok { + g.AddNode(from) + } else { + g.nodes[fid] = from + } + if _, ok := g.nodes[tid]; !ok { + g.AddNode(to) + } else { + g.nodes[tid] = to + } + + g.from[fid][tid] = e + g.to[tid][fid] = e +} + +// To returns all nodes in g that can reach directly to n. +func (g *DirectedGraph) To(id int64) graph.Nodes { + if _, ok := g.from[id]; !ok { + return graph.Empty + } + + to := make([]graph.Node, len(g.to[id])) + i := 0 + for uid := range g.to[id] { + to[i] = g.nodes[uid] + i++ + } + if len(to) == 0 { + return graph.Empty + } + return iterator.NewOrderedNodes(to) +} diff --git a/vendor/gonum.org/v1/gonum/graph/simple/doc.go b/vendor/gonum.org/v1/gonum/graph/simple/doc.go new file mode 100644 index 0000000..dc3f24c --- /dev/null +++ b/vendor/gonum.org/v1/gonum/graph/simple/doc.go @@ -0,0 +1,9 @@ +// Copyright ©2017 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Package simple provides a suite of simple graph implementations satisfying +// the gonum/graph interfaces. +// +// All types in simple return the graph.Empty value for empty iterators. +package simple // import "gonum.org/v1/gonum/graph/simple" diff --git a/vendor/gonum.org/v1/gonum/graph/simple/simple.go b/vendor/gonum.org/v1/gonum/graph/simple/simple.go new file mode 100644 index 0000000..3b45765 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/graph/simple/simple.go @@ -0,0 +1,72 @@ +// Copyright ©2014 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package simple + +import ( + "math" + + "gonum.org/v1/gonum/graph" +) + +// Node is a simple graph node. +type Node int64 + +// ID returns the ID number of the node. +func (n Node) ID() int64 { + return int64(n) +} + +func newSimpleNode(id int) graph.Node { + return Node(id) +} + +// Edge is a simple graph edge. +type Edge struct { + F, T graph.Node +} + +// From returns the from-node of the edge. +func (e Edge) From() graph.Node { return e.F } + +// To returns the to-node of the edge. +func (e Edge) To() graph.Node { return e.T } + +// ReversedLine returns a new Edge with the F and T fields +// swapped. +func (e Edge) ReversedEdge() graph.Edge { return Edge{F: e.T, T: e.F} } + +// WeightedEdge is a simple weighted graph edge. +type WeightedEdge struct { + F, T graph.Node + W float64 +} + +// From returns the from-node of the edge. +func (e WeightedEdge) From() graph.Node { return e.F } + +// To returns the to-node of the edge. +func (e WeightedEdge) To() graph.Node { return e.T } + +// ReversedLine returns a new Edge with the F and T fields +// swapped. The weight of the new Edge is the same as +// the weight of the receiver. +func (e WeightedEdge) ReversedEdge() graph.Edge { return WeightedEdge{F: e.T, T: e.F, W: e.W} } + +// Weight returns the weight of the edge. +func (e WeightedEdge) Weight() float64 { return e.W } + +// isSame returns whether two float64 values are the same where NaN values +// are equalable. +func isSame(a, b float64) bool { + return a == b || (math.IsNaN(a) && math.IsNaN(b)) +} + +type edgeSetter interface { + SetEdge(e graph.Edge) +} + +type weightedEdgeSetter interface { + SetWeightedEdge(e graph.WeightedEdge) +} diff --git a/vendor/gonum.org/v1/gonum/graph/simple/undirected.go b/vendor/gonum.org/v1/gonum/graph/simple/undirected.go new file mode 100644 index 0000000..841a8e3 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/graph/simple/undirected.go @@ -0,0 +1,216 @@ +// Copyright ©2014 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package simple + +import ( + "fmt" + + "gonum.org/v1/gonum/graph" + "gonum.org/v1/gonum/graph/internal/uid" + "gonum.org/v1/gonum/graph/iterator" +) + +var ( + ug *UndirectedGraph + + _ graph.Graph = ug + _ graph.Undirected = ug + _ graph.NodeAdder = ug + _ graph.NodeRemover = ug + _ graph.EdgeAdder = ug + _ graph.EdgeRemover = ug +) + +// UndirectedGraph implements a generalized undirected graph. +type UndirectedGraph struct { + nodes map[int64]graph.Node + edges map[int64]map[int64]graph.Edge + + nodeIDs uid.Set +} + +// NewUndirectedGraph returns an UndirectedGraph. +func NewUndirectedGraph() *UndirectedGraph { + return &UndirectedGraph{ + nodes: make(map[int64]graph.Node), + edges: make(map[int64]map[int64]graph.Edge), + + nodeIDs: uid.NewSet(), + } +} + +// AddNode adds n to the graph. It panics if the added node ID matches an existing node ID. +func (g *UndirectedGraph) AddNode(n graph.Node) { + if _, exists := g.nodes[n.ID()]; exists { + panic(fmt.Sprintf("simple: node ID collision: %d", n.ID())) + } + g.nodes[n.ID()] = n + g.edges[n.ID()] = make(map[int64]graph.Edge) + g.nodeIDs.Use(n.ID()) +} + +// Edge returns the edge from u to v if such an edge exists and nil otherwise. +// The node v must be directly reachable from u as defined by the From method. +func (g *UndirectedGraph) Edge(uid, vid int64) graph.Edge { + return g.EdgeBetween(uid, vid) +} + +// EdgeBetween returns the edge between nodes x and y. +func (g *UndirectedGraph) EdgeBetween(xid, yid int64) graph.Edge { + edge, ok := g.edges[xid][yid] + if !ok { + return nil + } + if edge.From().ID() == xid { + return edge + } + return edge.ReversedEdge() +} + +// Edges returns all the edges in the graph. +func (g *UndirectedGraph) Edges() graph.Edges { + if len(g.edges) == 0 { + return graph.Empty + } + var edges []graph.Edge + seen := make(map[[2]int64]struct{}) + for _, u := range g.edges { + for _, e := range u { + uid := e.From().ID() + vid := e.To().ID() + if _, ok := seen[[2]int64{uid, vid}]; ok { + continue + } + seen[[2]int64{uid, vid}] = struct{}{} + seen[[2]int64{vid, uid}] = struct{}{} + edges = append(edges, e) + } + } + if len(edges) == 0 { + return graph.Empty + } + return iterator.NewOrderedEdges(edges) +} + +// From returns all nodes in g that can be reached directly from n. +func (g *UndirectedGraph) From(id int64) graph.Nodes { + if _, ok := g.nodes[id]; !ok { + return graph.Empty + } + + nodes := make([]graph.Node, len(g.edges[id])) + i := 0 + for from := range g.edges[id] { + nodes[i] = g.nodes[from] + i++ + } + if len(nodes) == 0 { + return graph.Empty + } + return iterator.NewOrderedNodes(nodes) +} + +// HasEdgeBetween returns whether an edge exists between nodes x and y. +func (g *UndirectedGraph) HasEdgeBetween(xid, yid int64) bool { + _, ok := g.edges[xid][yid] + return ok +} + +// NewEdge returns a new Edge from the source to the destination node. +func (g *UndirectedGraph) NewEdge(from, to graph.Node) graph.Edge { + return &Edge{F: from, T: to} +} + +// NewNode returns a new unique Node to be added to g. The Node's ID does +// not become valid in g until the Node is added to g. +func (g *UndirectedGraph) NewNode() graph.Node { + if len(g.nodes) == 0 { + return Node(0) + } + if int64(len(g.nodes)) == uid.Max { + panic("simple: cannot allocate node: no slot") + } + return Node(g.nodeIDs.NewID()) +} + +// Node returns the node with the given ID if it exists in the graph, +// and nil otherwise. +func (g *UndirectedGraph) Node(id int64) graph.Node { + return g.nodes[id] +} + +// Nodes returns all the nodes in the graph. +func (g *UndirectedGraph) Nodes() graph.Nodes { + if len(g.nodes) == 0 { + return graph.Empty + } + nodes := make([]graph.Node, len(g.nodes)) + i := 0 + for _, n := range g.nodes { + nodes[i] = n + i++ + } + return iterator.NewOrderedNodes(nodes) +} + +// RemoveEdge removes the edge with the given end IDs from the graph, leaving the terminal nodes. +// If the edge does not exist it is a no-op. +func (g *UndirectedGraph) RemoveEdge(fid, tid int64) { + if _, ok := g.nodes[fid]; !ok { + return + } + if _, ok := g.nodes[tid]; !ok { + return + } + + delete(g.edges[fid], tid) + delete(g.edges[tid], fid) +} + +// RemoveNode removes the node with the given ID from the graph, as well as any edges attached +// to it. If the node is not in the graph it is a no-op. +func (g *UndirectedGraph) RemoveNode(id int64) { + if _, ok := g.nodes[id]; !ok { + return + } + delete(g.nodes, id) + + for from := range g.edges[id] { + delete(g.edges[from], id) + } + delete(g.edges, id) + + g.nodeIDs.Release(id) +} + +// SetEdge adds e, an edge from one node to another. If the nodes do not exist, they are added +// and are set to the nodes of the edge otherwise. +// It will panic if the IDs of the e.From and e.To are equal. +func (g *UndirectedGraph) SetEdge(e graph.Edge) { + var ( + from = e.From() + fid = from.ID() + to = e.To() + tid = to.ID() + ) + + if fid == tid { + panic("simple: adding self edge") + } + + if _, ok := g.nodes[fid]; !ok { + g.AddNode(from) + } else { + g.nodes[fid] = from + } + if _, ok := g.nodes[tid]; !ok { + g.AddNode(to) + } else { + g.nodes[tid] = to + } + + g.edges[fid][tid] = e + g.edges[tid][fid] = e +} diff --git a/vendor/gonum.org/v1/gonum/graph/simple/weighted_directed.go b/vendor/gonum.org/v1/gonum/graph/simple/weighted_directed.go new file mode 100644 index 0000000..92bd284 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/graph/simple/weighted_directed.go @@ -0,0 +1,279 @@ +// Copyright ©2014 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package simple + +import ( + "fmt" + + "gonum.org/v1/gonum/graph" + "gonum.org/v1/gonum/graph/internal/uid" + "gonum.org/v1/gonum/graph/iterator" +) + +var ( + wdg *WeightedDirectedGraph + + _ graph.Graph = wdg + _ graph.Weighted = wdg + _ graph.Directed = wdg + _ graph.WeightedDirected = wdg + _ graph.NodeAdder = wdg + _ graph.NodeRemover = wdg + _ graph.WeightedEdgeAdder = wdg + _ graph.EdgeRemover = wdg +) + +// WeightedDirectedGraph implements a generalized weighted directed graph. +type WeightedDirectedGraph struct { + nodes map[int64]graph.Node + from map[int64]map[int64]graph.WeightedEdge + to map[int64]map[int64]graph.WeightedEdge + + self, absent float64 + + nodeIDs uid.Set +} + +// NewWeightedDirectedGraph returns a WeightedDirectedGraph with the specified self and absent +// edge weight values. +func NewWeightedDirectedGraph(self, absent float64) *WeightedDirectedGraph { + return &WeightedDirectedGraph{ + nodes: make(map[int64]graph.Node), + from: make(map[int64]map[int64]graph.WeightedEdge), + to: make(map[int64]map[int64]graph.WeightedEdge), + + self: self, + absent: absent, + + nodeIDs: uid.NewSet(), + } +} + +// AddNode adds n to the graph. It panics if the added node ID matches an existing node ID. +func (g *WeightedDirectedGraph) AddNode(n graph.Node) { + if _, exists := g.nodes[n.ID()]; exists { + panic(fmt.Sprintf("simple: node ID collision: %d", n.ID())) + } + g.nodes[n.ID()] = n + g.from[n.ID()] = make(map[int64]graph.WeightedEdge) + g.to[n.ID()] = make(map[int64]graph.WeightedEdge) + g.nodeIDs.Use(n.ID()) +} + +// Edge returns the edge from u to v if such an edge exists and nil otherwise. +// The node v must be directly reachable from u as defined by the From method. +func (g *WeightedDirectedGraph) Edge(uid, vid int64) graph.Edge { + return g.WeightedEdge(uid, vid) +} + +// Edges returns all the edges in the graph. +func (g *WeightedDirectedGraph) Edges() graph.Edges { + var edges []graph.Edge + for _, u := range g.nodes { + for _, e := range g.from[u.ID()] { + edges = append(edges, e) + } + } + if len(edges) == 0 { + return graph.Empty + } + return iterator.NewOrderedEdges(edges) +} + +// From returns all nodes in g that can be reached directly from n. +func (g *WeightedDirectedGraph) From(id int64) graph.Nodes { + if _, ok := g.from[id]; !ok { + return graph.Empty + } + + from := make([]graph.Node, len(g.from[id])) + i := 0 + for vid := range g.from[id] { + from[i] = g.nodes[vid] + i++ + } + if len(from) == 0 { + return graph.Empty + } + return iterator.NewOrderedNodes(from) +} + +// HasEdgeBetween returns whether an edge exists between nodes x and y without +// considering direction. +func (g *WeightedDirectedGraph) HasEdgeBetween(xid, yid int64) bool { + if _, ok := g.from[xid][yid]; ok { + return true + } + _, ok := g.from[yid][xid] + return ok +} + +// HasEdgeFromTo returns whether an edge exists in the graph from u to v. +func (g *WeightedDirectedGraph) HasEdgeFromTo(uid, vid int64) bool { + if _, ok := g.from[uid][vid]; !ok { + return false + } + return true +} + +// NewNode returns a new unique Node to be added to g. The Node's ID does +// not become valid in g until the Node is added to g. +func (g *WeightedDirectedGraph) NewNode() graph.Node { + if len(g.nodes) == 0 { + return Node(0) + } + if int64(len(g.nodes)) == uid.Max { + panic("simple: cannot allocate node: no slot") + } + return Node(g.nodeIDs.NewID()) +} + +// NewWeightedEdge returns a new weighted edge from the source to the destination node. +func (g *WeightedDirectedGraph) NewWeightedEdge(from, to graph.Node, weight float64) graph.WeightedEdge { + return &WeightedEdge{F: from, T: to, W: weight} +} + +// Node returns the node with the given ID if it exists in the graph, +// and nil otherwise. +func (g *WeightedDirectedGraph) Node(id int64) graph.Node { + return g.nodes[id] +} + +// Nodes returns all the nodes in the graph. +func (g *WeightedDirectedGraph) Nodes() graph.Nodes { + if len(g.from) == 0 { + return graph.Empty + } + nodes := make([]graph.Node, len(g.nodes)) + i := 0 + for _, n := range g.nodes { + nodes[i] = n + i++ + } + return iterator.NewOrderedNodes(nodes) +} + +// RemoveEdge removes the edge with the given end point IDs from the graph, leaving the terminal +// nodes. If the edge does not exist it is a no-op. +func (g *WeightedDirectedGraph) RemoveEdge(fid, tid int64) { + if _, ok := g.nodes[fid]; !ok { + return + } + if _, ok := g.nodes[tid]; !ok { + return + } + + delete(g.from[fid], tid) + delete(g.to[tid], fid) +} + +// RemoveNode removes the node with the given ID from the graph, as well as any edges attached +// to it. If the node is not in the graph it is a no-op. +func (g *WeightedDirectedGraph) RemoveNode(id int64) { + if _, ok := g.nodes[id]; !ok { + return + } + delete(g.nodes, id) + + for from := range g.from[id] { + delete(g.to[from], id) + } + delete(g.from, id) + + for to := range g.to[id] { + delete(g.from[to], id) + } + delete(g.to, id) + + g.nodeIDs.Release(id) +} + +// SetWeightedEdge adds a weighted edge from one node to another. If the nodes do not exist, they are added +// and are set to the nodes of the edge otherwise. +// It will panic if the IDs of the e.From and e.To are equal. +func (g *WeightedDirectedGraph) SetWeightedEdge(e graph.WeightedEdge) { + var ( + from = e.From() + fid = from.ID() + to = e.To() + tid = to.ID() + ) + + if fid == tid { + panic("simple: adding self edge") + } + + if _, ok := g.nodes[fid]; !ok { + g.AddNode(from) + } else { + g.nodes[fid] = from + } + if _, ok := g.nodes[tid]; !ok { + g.AddNode(to) + } else { + g.nodes[tid] = to + } + + g.from[fid][tid] = e + g.to[tid][fid] = e +} + +// To returns all nodes in g that can reach directly to n. +func (g *WeightedDirectedGraph) To(id int64) graph.Nodes { + if _, ok := g.from[id]; !ok { + return graph.Empty + } + + to := make([]graph.Node, len(g.to[id])) + i := 0 + for uid := range g.to[id] { + to[i] = g.nodes[uid] + i++ + } + if len(to) == 0 { + return graph.Empty + } + return iterator.NewOrderedNodes(to) +} + +// Weight returns the weight for the edge between x and y if Edge(x, y) returns a non-nil Edge. +// If x and y are the same node or there is no joining edge between the two nodes the weight +// value returned is either the graph's absent or self value. Weight returns true if an edge +// exists between x and y or if x and y have the same ID, false otherwise. +func (g *WeightedDirectedGraph) Weight(xid, yid int64) (w float64, ok bool) { + if xid == yid { + return g.self, true + } + if to, ok := g.from[xid]; ok { + if e, ok := to[yid]; ok { + return e.Weight(), true + } + } + return g.absent, false +} + +// WeightedEdge returns the weighted edge from u to v if such an edge exists and nil otherwise. +// The node v must be directly reachable from u as defined by the From method. +func (g *WeightedDirectedGraph) WeightedEdge(uid, vid int64) graph.WeightedEdge { + edge, ok := g.from[uid][vid] + if !ok { + return nil + } + return edge +} + +// WeightedEdges returns all the weighted edges in the graph. +func (g *WeightedDirectedGraph) WeightedEdges() graph.WeightedEdges { + var edges []graph.WeightedEdge + for _, u := range g.nodes { + for _, e := range g.from[u.ID()] { + edges = append(edges, e) + } + } + if len(edges) == 0 { + return graph.Empty + } + return iterator.NewOrderedWeightedEdges(edges) +} diff --git a/vendor/gonum.org/v1/gonum/graph/simple/weighted_undirected.go b/vendor/gonum.org/v1/gonum/graph/simple/weighted_undirected.go new file mode 100644 index 0000000..5932576 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/graph/simple/weighted_undirected.go @@ -0,0 +1,273 @@ +// Copyright ©2014 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package simple + +import ( + "fmt" + + "gonum.org/v1/gonum/graph" + "gonum.org/v1/gonum/graph/internal/uid" + "gonum.org/v1/gonum/graph/iterator" +) + +var ( + wug *WeightedUndirectedGraph + + _ graph.Graph = wug + _ graph.Weighted = wug + _ graph.Undirected = wug + _ graph.WeightedUndirected = wug + _ graph.NodeAdder = wug + _ graph.NodeRemover = wug + _ graph.WeightedEdgeAdder = wug + _ graph.EdgeRemover = wug +) + +// WeightedUndirectedGraph implements a generalized weighted undirected graph. +type WeightedUndirectedGraph struct { + nodes map[int64]graph.Node + edges map[int64]map[int64]graph.WeightedEdge + + self, absent float64 + + nodeIDs uid.Set +} + +// NewWeightedUndirectedGraph returns an WeightedUndirectedGraph with the specified self and absent +// edge weight values. +func NewWeightedUndirectedGraph(self, absent float64) *WeightedUndirectedGraph { + return &WeightedUndirectedGraph{ + nodes: make(map[int64]graph.Node), + edges: make(map[int64]map[int64]graph.WeightedEdge), + + self: self, + absent: absent, + + nodeIDs: uid.NewSet(), + } +} + +// AddNode adds n to the graph. It panics if the added node ID matches an existing node ID. +func (g *WeightedUndirectedGraph) AddNode(n graph.Node) { + if _, exists := g.nodes[n.ID()]; exists { + panic(fmt.Sprintf("simple: node ID collision: %d", n.ID())) + } + g.nodes[n.ID()] = n + g.edges[n.ID()] = make(map[int64]graph.WeightedEdge) + g.nodeIDs.Use(n.ID()) +} + +// Edge returns the edge from u to v if such an edge exists and nil otherwise. +// The node v must be directly reachable from u as defined by the From method. +func (g *WeightedUndirectedGraph) Edge(uid, vid int64) graph.Edge { + return g.WeightedEdgeBetween(uid, vid) +} + +// EdgeBetween returns the edge between nodes x and y. +func (g *WeightedUndirectedGraph) EdgeBetween(xid, yid int64) graph.Edge { + return g.WeightedEdgeBetween(xid, yid) +} + +// Edges returns all the edges in the graph. +func (g *WeightedUndirectedGraph) Edges() graph.Edges { + if len(g.edges) == 0 { + return graph.Empty + } + var edges []graph.Edge + seen := make(map[[2]int64]struct{}) + for _, u := range g.edges { + for _, e := range u { + uid := e.From().ID() + vid := e.To().ID() + if _, ok := seen[[2]int64{uid, vid}]; ok { + continue + } + seen[[2]int64{uid, vid}] = struct{}{} + seen[[2]int64{vid, uid}] = struct{}{} + edges = append(edges, e) + } + } + if len(edges) == 0 { + return graph.Empty + } + return iterator.NewOrderedEdges(edges) +} + +// From returns all nodes in g that can be reached directly from n. +func (g *WeightedUndirectedGraph) From(id int64) graph.Nodes { + if _, ok := g.nodes[id]; !ok { + return graph.Empty + } + + nodes := make([]graph.Node, len(g.edges[id])) + i := 0 + for from := range g.edges[id] { + nodes[i] = g.nodes[from] + i++ + } + if len(nodes) == 0 { + return graph.Empty + } + return iterator.NewOrderedNodes(nodes) +} + +// HasEdgeBetween returns whether an edge exists between nodes x and y. +func (g *WeightedUndirectedGraph) HasEdgeBetween(xid, yid int64) bool { + _, ok := g.edges[xid][yid] + return ok +} + +// NewNode returns a new unique Node to be added to g. The Node's ID does +// not become valid in g until the Node is added to g. +func (g *WeightedUndirectedGraph) NewNode() graph.Node { + if len(g.nodes) == 0 { + return Node(0) + } + if int64(len(g.nodes)) == uid.Max { + panic("simple: cannot allocate node: no slot") + } + return Node(g.nodeIDs.NewID()) +} + +// NewWeightedEdge returns a new weighted edge from the source to the destination node. +func (g *WeightedUndirectedGraph) NewWeightedEdge(from, to graph.Node, weight float64) graph.WeightedEdge { + return &WeightedEdge{F: from, T: to, W: weight} +} + +// Node returns the node with the given ID if it exists in the graph, +// and nil otherwise. +func (g *WeightedUndirectedGraph) Node(id int64) graph.Node { + return g.nodes[id] +} + +// Nodes returns all the nodes in the graph. +func (g *WeightedUndirectedGraph) Nodes() graph.Nodes { + if len(g.nodes) == 0 { + return graph.Empty + } + nodes := make([]graph.Node, len(g.nodes)) + i := 0 + for _, n := range g.nodes { + nodes[i] = n + i++ + } + return iterator.NewOrderedNodes(nodes) +} + +// RemoveEdge removes the edge with the given end point IDs from the graph, leaving the terminal +// nodes. If the edge does not exist it is a no-op. +func (g *WeightedUndirectedGraph) RemoveEdge(fid, tid int64) { + if _, ok := g.nodes[fid]; !ok { + return + } + if _, ok := g.nodes[tid]; !ok { + return + } + + delete(g.edges[fid], tid) + delete(g.edges[tid], fid) +} + +// RemoveNode removes the node with the given ID from the graph, as well as any edges attached +// to it. If the node is not in the graph it is a no-op. +func (g *WeightedUndirectedGraph) RemoveNode(id int64) { + if _, ok := g.nodes[id]; !ok { + return + } + delete(g.nodes, id) + + for from := range g.edges[id] { + delete(g.edges[from], id) + } + delete(g.edges, id) + + g.nodeIDs.Release(id) +} + +// SetWeightedEdge adds a weighted edge from one node to another. If the nodes do not exist, they are added +// and are set to the nodes of the edge otherwise. +// It will panic if the IDs of the e.From and e.To are equal. +func (g *WeightedUndirectedGraph) SetWeightedEdge(e graph.WeightedEdge) { + var ( + from = e.From() + fid = from.ID() + to = e.To() + tid = to.ID() + ) + + if fid == tid { + panic("simple: adding self edge") + } + + if _, ok := g.nodes[fid]; !ok { + g.AddNode(from) + } else { + g.nodes[fid] = from + } + if _, ok := g.nodes[tid]; !ok { + g.AddNode(to) + } else { + g.nodes[tid] = to + } + + g.edges[fid][tid] = e + g.edges[tid][fid] = e +} + +// Weight returns the weight for the edge between x and y if Edge(x, y) returns a non-nil Edge. +// If x and y are the same node or there is no joining edge between the two nodes the weight +// value returned is either the graph's absent or self value. Weight returns true if an edge +// exists between x and y or if x and y have the same ID, false otherwise. +func (g *WeightedUndirectedGraph) Weight(xid, yid int64) (w float64, ok bool) { + if xid == yid { + return g.self, true + } + if n, ok := g.edges[xid]; ok { + if e, ok := n[yid]; ok { + return e.Weight(), true + } + } + return g.absent, false +} + +// WeightedEdge returns the weighted edge from u to v if such an edge exists and nil otherwise. +// The node v must be directly reachable from u as defined by the From method. +func (g *WeightedUndirectedGraph) WeightedEdge(uid, vid int64) graph.WeightedEdge { + return g.WeightedEdgeBetween(uid, vid) +} + +// WeightedEdgeBetween returns the weighted edge between nodes x and y. +func (g *WeightedUndirectedGraph) WeightedEdgeBetween(xid, yid int64) graph.WeightedEdge { + edge, ok := g.edges[xid][yid] + if !ok { + return nil + } + if edge.From().ID() == xid { + return edge + } + return edge.ReversedEdge().(graph.WeightedEdge) +} + +// WeightedEdges returns all the weighted edges in the graph. +func (g *WeightedUndirectedGraph) WeightedEdges() graph.WeightedEdges { + var edges []graph.WeightedEdge + seen := make(map[[2]int64]struct{}) + for _, u := range g.edges { + for _, e := range u { + uid := e.From().ID() + vid := e.To().ID() + if _, ok := seen[[2]int64{uid, vid}]; ok { + continue + } + seen[[2]int64{uid, vid}] = struct{}{} + seen[[2]int64{vid, uid}] = struct{}{} + edges = append(edges, e) + } + } + if len(edges) == 0 { + return graph.Empty + } + return iterator.NewOrderedWeightedEdges(edges) +} diff --git a/vendor/gonum.org/v1/gonum/graph/testgraph/testcases.go b/vendor/gonum.org/v1/gonum/graph/testgraph/testcases.go new file mode 100644 index 0000000..0c0c870 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/graph/testgraph/testcases.go @@ -0,0 +1,347 @@ +// Copyright ©2018 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package testgraph + +import ( + "math" + + "gonum.org/v1/gonum/graph" +) + +// node is a graph.Node implementation that is not exported +// so that other packages will not be aware of its implementation. +type node int64 + +func (n node) ID() int64 { return int64(n) } + +// line is an extended graph.Edge implementation that is not exported +// so that other packages will not be aware of its implementation. It +// covers all the edge types exported by graph. +type line struct { + F, T graph.Node + UID int64 + W float64 +} + +func (e line) From() graph.Node { return e.F } +func (e line) To() graph.Node { return e.T } +func (e line) ReversedEdge() graph.Edge { e.F, e.T = e.T, e.F; return e } +func (e line) ID() int64 { return e.UID } +func (e line) Weight() float64 { return e.W } + +var testCases = []struct { + // name is the name of the test. + name string + + // nodes is the set of nodes that should be used + // to construct the graph. + nodes []graph.Node + + // edges is the set of edges that should be used + // to construct the graph. + edges []WeightedLine + + // nonexist is a set of nodes that should not be + // found within the graph. + nonexist []graph.Node + + // self is the weight value associated with + // a self edge for simple graphs that do not + // store individual self edges. + self float64 + + // absent is the weight value associated + // with absent edges. + absent float64 +}{ + { + name: "empty", + nonexist: []graph.Node{node(-1), node(0), node(1)}, + self: 0, + absent: math.Inf(1), + }, + { + name: "one - negative", + nodes: []graph.Node{node(-1)}, + nonexist: []graph.Node{node(0), node(1)}, + self: 0, + absent: math.Inf(1), + }, + { + name: "one - zero", + nodes: []graph.Node{node(0)}, + nonexist: []graph.Node{node(-1), node(1)}, + self: 0, + absent: math.Inf(1), + }, + { + name: "one - positive", + nodes: []graph.Node{node(1)}, + nonexist: []graph.Node{node(-1), node(0)}, + self: 0, + absent: math.Inf(1), + }, + + { + name: "one - self loop", + nodes: []graph.Node{node(0)}, + edges: []WeightedLine{line{F: node(0), T: node(0), UID: 0, W: 0.5}}, + nonexist: []graph.Node{node(-1), node(1)}, + self: 0, + absent: math.Inf(1), + }, + + { + name: "two - positive", + nodes: []graph.Node{node(1), node(2)}, + edges: []WeightedLine{line{F: node(1), T: node(2), UID: 0, W: 0.5}}, + nonexist: []graph.Node{node(-1), node(0)}, + self: 0, + absent: math.Inf(1), + }, + { + name: "two - negative", + nodes: []graph.Node{node(-1), node(-2)}, + edges: []WeightedLine{line{F: node(-1), T: node(-2), UID: 0, W: 0.5}}, + nonexist: []graph.Node{node(0), node(-3)}, + self: 0, + absent: math.Inf(1), + }, + { + name: "two - zero spanning", + nodes: []graph.Node{node(-1), node(1)}, + edges: []WeightedLine{line{F: node(-1), T: node(1), UID: 0, W: 0.5}}, + nonexist: []graph.Node{node(0), node(2)}, + self: 0, + absent: math.Inf(1), + }, + { + name: "two - zero contiguous", + nodes: []graph.Node{node(0), node(1)}, + edges: []WeightedLine{line{F: node(0), T: node(1), UID: 0, W: 0.5}}, + nonexist: []graph.Node{node(-1), node(2)}, + self: 0, + absent: math.Inf(1), + }, + + { + name: "three - positive", + nodes: []graph.Node{node(1), node(2), node(3)}, + edges: []WeightedLine{line{F: node(1), T: node(2), UID: 0, W: 0.5}}, + nonexist: []graph.Node{node(-1), node(0)}, + self: 0, + absent: math.Inf(1), + }, + { + name: "three - negative", + nodes: []graph.Node{node(-1), node(-2), node(-3)}, + edges: []WeightedLine{line{F: node(-1), T: node(-2), UID: 0, W: 0.5}}, + nonexist: []graph.Node{node(0), node(1)}, + self: 0, + absent: math.Inf(1), + }, + { + name: "three - zero spanning", + nodes: []graph.Node{node(-1), node(0), node(1)}, + edges: []WeightedLine{line{F: node(-1), T: node(1), UID: 0, W: 0.5}}, + nonexist: []graph.Node{node(-2), node(2)}, + self: 0, + absent: math.Inf(1), + }, + { + name: "three - zero contiguous", + nodes: []graph.Node{node(0), node(1), node(2)}, + edges: []WeightedLine{line{F: node(0), T: node(1), UID: 0, W: 0.5}}, + nonexist: []graph.Node{node(-1), node(3)}, + self: 0, + absent: math.Inf(1), + }, + + { + name: "4-clique - single(non-prepared)", + edges: func() []WeightedLine { + const n = 4 + var uid int64 + var edges []WeightedLine + for i := 0; i < n; i++ { + for j := i + 1; j < 4; j++ { + edges = append(edges, line{F: node(i), T: node(j), UID: uid, W: 0.5}) + uid++ + } + } + return edges + }(), + nonexist: []graph.Node{node(-1), node(4)}, + self: 0, + absent: math.Inf(1), + }, + { + name: "4-clique+ - single(non-prepared)", + edges: func() []WeightedLine { + const n = 4 + var uid int64 + var edges []WeightedLine + for i := 0; i < n; i++ { + for j := i; j < 4; j++ { + edges = append(edges, line{F: node(i), T: node(j), UID: uid, W: 0.5}) + uid++ + } + } + return edges + }(), + nonexist: []graph.Node{node(-1), node(4)}, + self: 0, + absent: math.Inf(1), + }, + { + name: "4-clique - single(prepared)", + nodes: func() []graph.Node { + const n = 4 + nodes := make([]graph.Node, n) + for i := range nodes { + nodes[i] = node(i) + } + return nodes + }(), + edges: func() []WeightedLine { + const n = 4 + var uid int64 + var edges []WeightedLine + for i := 0; i < n; i++ { + for j := i + 1; j < n; j++ { + edges = append(edges, line{F: node(i), T: node(j), UID: uid, W: 0.5}) + uid++ + } + } + return edges + }(), + nonexist: []graph.Node{node(-1), node(4)}, + self: 0, + absent: math.Inf(1), + }, + { + name: "4-clique+ - single(prepared)", + nodes: func() []graph.Node { + const n = 4 + nodes := make([]graph.Node, n) + for i := range nodes { + nodes[i] = node(i) + } + return nodes + }(), + edges: func() []WeightedLine { + const n = 4 + var uid int64 + var edges []WeightedLine + for i := 0; i < n; i++ { + for j := i; j < n; j++ { + edges = append(edges, line{F: node(i), T: node(j), UID: uid, W: 0.5}) + uid++ + } + } + return edges + }(), + nonexist: []graph.Node{node(-1), node(4)}, + self: 0, + absent: math.Inf(1), + }, + + { + name: "4-clique - double(non-prepared)", + edges: func() []WeightedLine { + const n = 4 + var uid int64 + var edges []WeightedLine + for i := 0; i < n; i++ { + for j := i + 1; j < n; j++ { + edges = append(edges, line{F: node(i), T: node(j), UID: uid, W: 0.5}) + uid++ + edges = append(edges, line{F: node(j), T: node(i), UID: uid, W: 0.5}) + uid++ + } + } + return edges + }(), + nonexist: []graph.Node{node(-1), node(4)}, + self: 0, + absent: math.Inf(1), + }, + { + name: "4-clique+ - double(non-prepared)", + edges: func() []WeightedLine { + const n = 4 + var uid int64 + var edges []WeightedLine + for i := 0; i < n; i++ { + for j := i; j < n; j++ { + edges = append(edges, line{F: node(i), T: node(j), UID: uid, W: 0.5}) + uid++ + edges = append(edges, line{F: node(j), T: node(i), UID: uid, W: 0.5}) + uid++ + } + } + return edges + }(), + nonexist: []graph.Node{node(-1), node(4)}, + self: 0, + absent: math.Inf(1), + }, + { + name: "4-clique - double(prepared)", + nodes: func() []graph.Node { + const n = 4 + nodes := make([]graph.Node, n) + for i := range nodes { + nodes[i] = node(i) + } + return nodes + }(), + edges: func() []WeightedLine { + const n = 4 + var uid int64 + var edges []WeightedLine + for i := 0; i < n; i++ { + for j := i + 1; j < n; j++ { + edges = append(edges, line{F: node(i), T: node(j), UID: uid, W: 0.5}) + uid++ + edges = append(edges, line{F: node(j), T: node(i), UID: uid, W: 0.5}) + uid++ + } + } + return edges + }(), + nonexist: []graph.Node{node(-1), node(4)}, + self: 0, + absent: math.Inf(1), + }, + { + name: "4-clique+ - double(prepared)", + nodes: func() []graph.Node { + const n = 4 + nodes := make([]graph.Node, n) + for i := range nodes { + nodes[i] = node(i) + } + return nodes + }(), + edges: func() []WeightedLine { + const n = 4 + var uid int64 + var edges []WeightedLine + for i := 0; i < n; i++ { + for j := i; j < n; j++ { + edges = append(edges, line{F: node(i), T: node(j), UID: uid, W: 0.5}) + uid++ + edges = append(edges, line{F: node(j), T: node(i), UID: uid, W: 0.5}) + uid++ + } + } + return edges + }(), + nonexist: []graph.Node{node(-1), node(4)}, + self: 0, + absent: math.Inf(1), + }, +} diff --git a/vendor/gonum.org/v1/gonum/graph/testgraph/testgraph.go b/vendor/gonum.org/v1/gonum/graph/testgraph/testgraph.go new file mode 100644 index 0000000..a9aea7b --- /dev/null +++ b/vendor/gonum.org/v1/gonum/graph/testgraph/testgraph.go @@ -0,0 +1,2112 @@ +// Copyright ©2018 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Package testgraph provides a set of testing helper functions +// that test gonum graph interface implementations. +package testgraph // import "gonum.org/v1/gonum/graph/testgraph" + +import ( + "fmt" + "math" + "reflect" + "sort" + "testing" + + "golang.org/x/exp/rand" + + "gonum.org/v1/gonum/graph" + "gonum.org/v1/gonum/graph/internal/ordered" + "gonum.org/v1/gonum/graph/internal/set" + "gonum.org/v1/gonum/mat" +) + +// BUG(kortschak): Edge equality is tested in part with reflect.DeepEqual and +// direct equality of weight values. This means that edges returned by graphs +// must not contain NaN values. Weights returned by the Weight method are +// compared with NaN-awareness, so they may be NaN when there is no edge +// associated with the Weight call. + +func isValidIterator(it graph.Iterator) bool { + return it != nil +} + +func checkEmptyIterator(t *testing.T, it graph.Iterator, useEmpty bool) { + t.Helper() + + if it.Len() != 0 { + return + } + if it != graph.Empty { + if useEmpty { + t.Errorf("unexpected empty iterator: got:%T", it) + return + } + // Only log this since we say that a graph should + // return a graph.Empty when it is empty. + t.Logf("unexpected empty iterator: got:%T", it) + } +} + +// Edge supports basic edge operations. +type Edge interface { + // From returns the from node of the edge. + From() graph.Node + + // To returns the to node of the edge. + To() graph.Node +} + +// WeightedLine is a generalized graph edge that supports all graph +// edge operations except reversal. +type WeightedLine interface { + Edge + + // ID returns the unique ID for the Line. + ID() int64 + + // Weight returns the weight of the edge. + Weight() float64 +} + +// A Builder function returns a graph constructed from the nodes, edges and +// default weights passed in, potentially altering the nodes and edges to +// conform to the requirements of the graph. The graph is returned along with +// the nodes, edges and default weights used to construct the graph. +// The returned edges may be any of graph.Edge, graph.WeightedEdge, graph.Line +// or graph.WeightedLine depending on what the graph requires. +// The client may skip a test case by returning ok=false when the input is not +// a valid graph construction. +type Builder func(nodes []graph.Node, edges []WeightedLine, self, absent float64) (g graph.Graph, n []graph.Node, e []Edge, s, a float64, ok bool) + +// edgeLister is a graph that can return all its edges. +type edgeLister interface { + // Edges returns all the edges of a graph. + Edges() graph.Edges +} + +// weightedEdgeLister is a graph that can return all its weighted edges. +type weightedEdgeLister interface { + // WeightedEdges returns all the weighted edges of a graph. + WeightedEdges() graph.WeightedEdges +} + +// matrixer is a graph that can return an adjacency matrix. +type matrixer interface { + // Matrix returns the graph's adjacency matrix. + Matrix() mat.Matrix +} + +// ReturnAllNodes tests the constructed graph for the ability to return all +// the nodes it claims it has used in its construction. This is a check of +// the Nodes method of graph.Graph and the iterator that is returned. +// If useEmpty is true, graph iterators will be checked for the use of +// graph.Empty if they are empty. +func ReturnAllNodes(t *testing.T, b Builder, useEmpty bool) { + for _, test := range testCases { + g, want, _, _, _, ok := b(test.nodes, test.edges, test.self, test.absent) + if !ok { + t.Logf("skipping test case: %q", test.name) + continue + } + + it := g.Nodes() + if !isValidIterator(it) { + t.Errorf("invalid iterator for test %q: got:%#v", test.name, it) + continue + } + checkEmptyIterator(t, it, useEmpty) + var got []graph.Node + for it.Next() { + got = append(got, it.Node()) + } + + sort.Sort(ordered.ByID(got)) + sort.Sort(ordered.ByID(want)) + + if !reflect.DeepEqual(got, want) { + t.Errorf("unexpected nodes result for test %q:\ngot: %v\nwant:%v", test.name, got, want) + } + } +} + +// ReturnNodeSlice tests the constructed graph for the ability to return all +// the nodes it claims it has used in its construction using the NodeSlicer +// interface. This is a check of the Nodes method of graph.Graph and the +// iterator that is returned. +// If useEmpty is true, graph iterators will be checked for the use of +// graph.Empty if they are empty. +func ReturnNodeSlice(t *testing.T, b Builder, useEmpty bool) { + for _, test := range testCases { + g, want, _, _, _, ok := b(test.nodes, test.edges, test.self, test.absent) + if !ok { + t.Logf("skipping test case: %q", test.name) + continue + } + + it := g.Nodes() + if !isValidIterator(it) { + t.Errorf("invalid iterator for test %q: got:%#v", test.name, it) + continue + } + checkEmptyIterator(t, it, useEmpty) + if it == nil { + continue + } + s, ok := it.(graph.NodeSlicer) + if !ok { + t.Errorf("invalid type for test %q: %T cannot return node slicer", test.name, g) + continue + } + got := s.NodeSlice() + + sort.Sort(ordered.ByID(got)) + sort.Sort(ordered.ByID(want)) + + if !reflect.DeepEqual(got, want) { + t.Errorf("unexpected nodes result for test %q:\ngot: %v\nwant:%v", test.name, got, want) + } + } +} + +// NodeExistence tests the constructed graph for the ability to correctly +// return the existence of nodes within the graph. This is a check of the +// Node method of graph.Graph. +func NodeExistence(t *testing.T, b Builder) { + for _, test := range testCases { + g, want, _, _, _, ok := b(test.nodes, test.edges, test.self, test.absent) + if !ok { + t.Logf("skipping test case: %q", test.name) + continue + } + + seen := set.NewNodes() + for _, exist := range want { + seen.Add(exist) + if g.Node(exist.ID()) == nil { + t.Errorf("missing node for test %q: %v", test.name, exist) + } + } + for _, ghost := range test.nonexist { + if g.Node(ghost.ID()) != nil { + if seen.Has(ghost) { + // Do not fail nodes that the graph builder says can exist + // even if the test case input thinks they should not. + t.Logf("builder has modified non-exist node set: %v is now allowed and present", ghost) + continue + } + t.Errorf("unexpected node for test %q: %v", test.name, ghost) + } + } + } +} + +// ReturnAllEdges tests the constructed graph for the ability to return all +// the edges it claims it has used in its construction. This is a check of +// the Edges method of graph.Graph and the iterator that is returned. +// ReturnAllEdges also checks that the edge end nodes exist within the graph, +// checking the Node method of graph.Graph. +// If useEmpty is true, graph iterators will be checked for the use of +// graph.Empty if they are empty. +func ReturnAllEdges(t *testing.T, b Builder, useEmpty bool) { + for _, test := range testCases { + g, _, want, _, _, ok := b(test.nodes, test.edges, test.self, test.absent) + if !ok { + t.Logf("skipping test case: %q", test.name) + continue + } + + var got []Edge + switch eg := g.(type) { + case edgeLister: + it := eg.Edges() + if !isValidIterator(it) { + t.Errorf("invalid iterator for test %q: got:%#v", test.name, it) + continue + } + checkEmptyIterator(t, it, useEmpty) + for it.Next() { + e := it.Edge() + got = append(got, e) + qe := g.Edge(e.From().ID(), e.To().ID()) + if qe == nil { + t.Errorf("missing edge for test %q: %v", test.name, e) + } else if qe.From().ID() != e.From().ID() || qe.To().ID() != e.To().ID() { + t.Errorf("inverted edge for test %q query with F=%d T=%d: got:%#v", + test.name, e.From().ID(), e.To().ID(), qe) + } + if g.Node(e.From().ID()) == nil { + t.Errorf("missing from node for test %q: %v", test.name, e.From().ID()) + } + if g.Node(e.To().ID()) == nil { + t.Errorf("missing to node for test %q: %v", test.name, e.To().ID()) + } + } + + default: + t.Errorf("invalid type for test %q: %T cannot return edge iterator", test.name, g) + continue + } + + checkEdges(t, test.name, g, got, want) + } +} + +// ReturnEdgeSlice tests the constructed graph for the ability to return all +// the edges it claims it has used in its construction using the EdgeSlicer +// interface. This is a check of the Edges method of graph.Graph and the +// iterator that is returned. ReturnEdgeSlice also checks that the edge end +// nodes exist within the graph, checking the Node method of graph.Graph. +// If useEmpty is true, graph iterators will be checked for the use of +// graph.Empty if they are empty. +func ReturnEdgeSlice(t *testing.T, b Builder, useEmpty bool) { + for _, test := range testCases { + g, _, want, _, _, ok := b(test.nodes, test.edges, test.self, test.absent) + if !ok { + t.Logf("skipping test case: %q", test.name) + continue + } + + var got []Edge + switch eg := g.(type) { + case edgeLister: + it := eg.Edges() + if !isValidIterator(it) { + t.Errorf("invalid iterator for test %q: got:%#v", test.name, it) + continue + } + checkEmptyIterator(t, it, useEmpty) + if it == nil { + continue + } + s, ok := it.(graph.EdgeSlicer) + if !ok { + t.Errorf("invalid type for test %q: %T cannot return edge slicer", test.name, g) + continue + } + gotNative := s.EdgeSlice() + if len(gotNative) != 0 { + got = make([]Edge, len(gotNative)) + } + for i, e := range gotNative { + got[i] = e + + qe := g.Edge(e.From().ID(), e.To().ID()) + if qe == nil { + t.Errorf("missing edge for test %q: %v", test.name, e) + } else if qe.From().ID() != e.From().ID() || qe.To().ID() != e.To().ID() { + t.Errorf("inverted edge for test %q query with F=%d T=%d: got:%#v", + test.name, e.From().ID(), e.To().ID(), qe) + } + if g.Node(e.From().ID()) == nil { + t.Errorf("missing from node for test %q: %v", test.name, e.From().ID()) + } + if g.Node(e.To().ID()) == nil { + t.Errorf("missing to node for test %q: %v", test.name, e.To().ID()) + } + } + + default: + t.Errorf("invalid type for test %T: cannot return edge iterator", g) + continue + } + + checkEdges(t, test.name, g, got, want) + } +} + +// ReturnAllLines tests the constructed graph for the ability to return all +// the edges it claims it has used in its construction and then recover all +// the lines that contribute to those edges. This is a check of the Edges +// method of graph.Graph and the iterator that is returned and the graph.Lines +// implementation of those edges. ReturnAllLines also checks that the edge +// end nodes exist within the graph, checking the Node method of graph.Graph. +// +// The edges used within and returned by the Builder function should be +// graph.Line. The edge parameter passed to b will contain only graph.Line. +// If useEmpty is true, graph iterators will be checked for the use of +// graph.Empty if they are empty. +func ReturnAllLines(t *testing.T, b Builder, useEmpty bool) { + for _, test := range testCases { + g, _, want, _, _, ok := b(test.nodes, test.edges, test.self, test.absent) + if !ok { + t.Logf("skipping test case: %q", test.name) + continue + } + + var got []Edge + switch eg := g.(type) { + case edgeLister: + it := eg.Edges() + if !isValidIterator(it) { + t.Errorf("invalid iterator for test %q: got:%#v", test.name, it) + continue + } + checkEmptyIterator(t, it, useEmpty) + for _, e := range graph.EdgesOf(it) { + qe := g.Edge(e.From().ID(), e.To().ID()) + if qe == nil { + t.Errorf("missing edge for test %q: %v", test.name, e) + } else if qe.From().ID() != e.From().ID() || qe.To().ID() != e.To().ID() { + t.Errorf("inverted edge for test %q query with F=%d T=%d: got:%#v", + test.name, e.From().ID(), e.To().ID(), qe) + } + + // FIXME(kortschak): This would not be necessary + // if graph.WeightedLines (and by symmetry) + // graph.WeightedEdges also were graph.Lines + // and graph.Edges. + switch lit := e.(type) { + case graph.Lines: + if !isValidIterator(lit) { + t.Errorf("invalid iterator for test %q: got:%#v", test.name, lit) + continue + } + checkEmptyIterator(t, lit, useEmpty) + for lit.Next() { + got = append(got, lit.Line()) + } + case graph.WeightedLines: + if !isValidIterator(lit) { + t.Errorf("invalid iterator for test %q: got:%#v", test.name, lit) + continue + } + checkEmptyIterator(t, lit, useEmpty) + for lit.Next() { + got = append(got, lit.WeightedLine()) + } + default: + continue + } + + if g.Node(e.From().ID()) == nil { + t.Errorf("missing from node for test %q: %v", test.name, e.From().ID()) + } + if g.Node(e.To().ID()) == nil { + t.Errorf("missing to node for test %q: %v", test.name, e.To().ID()) + } + } + + default: + t.Errorf("invalid type for test: %T cannot return edge iterator", g) + continue + } + + checkEdges(t, test.name, g, got, want) + } +} + +// ReturnAllWeightedEdges tests the constructed graph for the ability to return +// all the edges it claims it has used in its construction. This is a check of +// the Edges method of graph.Graph and the iterator that is returned. +// ReturnAllWeightedEdges also checks that the edge end nodes exist within the +// graph, checking the Node method of graph.Graph. +// +// The edges used within and returned by the Builder function should be +// graph.WeightedEdge. The edge parameter passed to b will contain only +// graph.WeightedEdge. +// If useEmpty is true, graph iterators will be checked for the use of +// graph.Empty if they are empty. +func ReturnAllWeightedEdges(t *testing.T, b Builder, useEmpty bool) { + for _, test := range testCases { + g, _, want, _, _, ok := b(test.nodes, test.edges, test.self, test.absent) + if !ok { + t.Logf("skipping test case: %q", test.name) + continue + } + + var got []Edge + switch eg := g.(type) { + case weightedEdgeLister: + it := eg.WeightedEdges() + if !isValidIterator(it) { + t.Errorf("invalid iterator for test %q: got:%#v", test.name, it) + continue + } + checkEmptyIterator(t, it, useEmpty) + for it.Next() { + e := it.WeightedEdge() + got = append(got, e) + switch g := g.(type) { + case graph.Weighted: + qe := g.WeightedEdge(e.From().ID(), e.To().ID()) + if qe == nil { + t.Errorf("missing edge for test %q: %v", test.name, e) + } else if qe.From().ID() != e.From().ID() || qe.To().ID() != e.To().ID() { + t.Errorf("inverted edge for test %q query with F=%d T=%d: got:%#v", + test.name, e.From().ID(), e.To().ID(), qe) + } + default: + t.Logf("weighted edge lister is not a weighted graph - are you sure?: %T", g) + qe := g.Edge(e.From().ID(), e.To().ID()) + if qe == nil { + t.Errorf("missing edge for test %q: %v", test.name, e) + } else if qe.From().ID() != e.From().ID() || qe.To().ID() != e.To().ID() { + t.Errorf("inverted edge for test %q query with F=%d T=%d: got:%#v", + test.name, e.From().ID(), e.To().ID(), qe) + } + } + if g.Node(e.From().ID()) == nil { + t.Errorf("missing from node for test %q: %v", test.name, e.From().ID()) + } + if g.Node(e.To().ID()) == nil { + t.Errorf("missing to node for test %q: %v", test.name, e.To().ID()) + } + } + + default: + t.Errorf("invalid type for test: %T cannot return weighted edge iterator", g) + continue + } + + checkEdges(t, test.name, g, got, want) + } +} + +// ReturnWeightedEdgeSlice tests the constructed graph for the ability to +// return all the edges it claims it has used in its construction using the +// WeightedEdgeSlicer interface. This is a check of the Edges method of +// graph.Graph and the iterator that is returned. ReturnWeightedEdgeSlice +// also checks that the edge end nodes exist within the graph, checking +// the Node method of graph.Graph. +// +// The edges used within and returned by the Builder function should be +// graph.WeightedEdge. The edge parameter passed to b will contain only +// graph.WeightedEdge. +// If useEmpty is true, graph iterators will be checked for the use of +// graph.Empty if they are empty. +func ReturnWeightedEdgeSlice(t *testing.T, b Builder, useEmpty bool) { + for _, test := range testCases { + g, _, want, _, _, ok := b(test.nodes, test.edges, test.self, test.absent) + if !ok { + t.Logf("skipping test case: %q", test.name) + continue + } + + var got []Edge + switch eg := g.(type) { + case weightedEdgeLister: + it := eg.WeightedEdges() + if !isValidIterator(it) { + t.Errorf("invalid iterator for test %q: got:%#v", test.name, it) + continue + } + checkEmptyIterator(t, it, useEmpty) + s, ok := it.(graph.WeightedEdgeSlicer) + if !ok { + t.Errorf("invalid type for test %T: cannot return weighted edge slice", g) + continue + } + for _, e := range s.WeightedEdgeSlice() { + got = append(got, e) + qe := g.Edge(e.From().ID(), e.To().ID()) + if qe == nil { + t.Errorf("missing edge for test %q: %v", test.name, e) + } else if qe.From().ID() != e.From().ID() || qe.To().ID() != e.To().ID() { + t.Errorf("inverted edge for test %q query with F=%d T=%d: got:%#v", + test.name, e.From().ID(), e.To().ID(), qe) + } + if g.Node(e.From().ID()) == nil { + t.Errorf("missing from node for test %q: %v", test.name, e.From().ID()) + } + if g.Node(e.To().ID()) == nil { + t.Errorf("missing to node for test %q: %v", test.name, e.To().ID()) + } + } + + default: + t.Errorf("invalid type for test: %T cannot return weighted edge iterator", g) + continue + } + + checkEdges(t, test.name, g, got, want) + } +} + +// ReturnAllWeightedLines tests the constructed graph for the ability to return +// all the edges it claims it has used in its construction and then recover all +// the lines that contribute to those edges. This is a check of the Edges +// method of graph.Graph and the iterator that is returned and the graph.Lines +// implementation of those edges. ReturnAllWeightedLines also checks that the +// edge end nodes exist within the graph, checking the Node method of +// graph.Graph. +// +// The edges used within and returned by the Builder function should be +// graph.WeightedLine. The edge parameter passed to b will contain only +// graph.WeightedLine. +// If useEmpty is true, graph iterators will be checked for the use of +// graph.Empty if they are empty. +func ReturnAllWeightedLines(t *testing.T, b Builder, useEmpty bool) { + for _, test := range testCases { + g, _, want, _, _, ok := b(test.nodes, test.edges, test.self, test.absent) + if !ok { + t.Logf("skipping test case: %q", test.name) + continue + } + + var got []Edge + switch eg := g.(type) { + case weightedEdgeLister: + it := eg.WeightedEdges() + if !isValidIterator(it) { + t.Errorf("invalid iterator for test %q: got:%#v", test.name, it) + continue + } + checkEmptyIterator(t, it, useEmpty) + for _, e := range graph.WeightedEdgesOf(it) { + qe := g.Edge(e.From().ID(), e.To().ID()) + if qe == nil { + t.Errorf("missing edge for test %q: %v", test.name, e) + } else if qe.From().ID() != e.From().ID() || qe.To().ID() != e.To().ID() { + t.Errorf("inverted edge for test %q query with F=%d T=%d: got:%#v", + test.name, e.From().ID(), e.To().ID(), qe) + } + + // FIXME(kortschak): This would not be necessary + // if graph.WeightedLines (and by symmetry) + // graph.WeightedEdges also were graph.Lines + // and graph.Edges. + switch lit := e.(type) { + case graph.Lines: + if !isValidIterator(lit) { + t.Errorf("invalid iterator for test %q: got:%#v", test.name, lit) + continue + } + checkEmptyIterator(t, lit, useEmpty) + for lit.Next() { + got = append(got, lit.Line()) + } + case graph.WeightedLines: + if !isValidIterator(lit) { + t.Errorf("invalid iterator for test %q: got:%#v", test.name, lit) + continue + } + checkEmptyIterator(t, lit, useEmpty) + for lit.Next() { + got = append(got, lit.WeightedLine()) + } + default: + continue + } + + if g.Node(e.From().ID()) == nil { + t.Errorf("missing from node for test %q: %v", test.name, e.From().ID()) + } + if g.Node(e.To().ID()) == nil { + t.Errorf("missing to node for test %q: %v", test.name, e.To().ID()) + } + } + + default: + t.Errorf("invalid type for test: %T cannot return edge iterator", g) + continue + } + + checkEdges(t, test.name, g, got, want) + } +} + +// checkEdges compares got and want for the given graph type. +func checkEdges(t *testing.T, name string, g graph.Graph, got, want []Edge) { + t.Helper() + switch g.(type) { + case graph.Undirected: + sort.Sort(lexicalUndirectedEdges(got)) + sort.Sort(lexicalUndirectedEdges(want)) + if !undirectedEdgeSetEqual(got, want) { + t.Errorf("unexpected edges result for test %q:\ngot: %#v\nwant:%#v", name, got, want) + } + default: + sort.Sort(lexicalEdges(got)) + sort.Sort(lexicalEdges(want)) + if !reflect.DeepEqual(got, want) { + t.Errorf("unexpected edges result for test %q:\ngot: %#v\nwant:%#v", name, got, want) + } + } +} + +// EdgeExistence tests the constructed graph for the ability to correctly +// return the existence of edges within the graph. This is a check of the +// Edge methods of graph.Graph, the EdgeBetween method of graph.Undirected +// and the EdgeFromTo method of graph.Directed. EdgeExistence also checks +// that the nodes and traversed edges exist within the graph, checking the +// Node, Edge, EdgeBetween and HasEdgeBetween methods of graph.Graph, the +// EdgeBetween method of graph.Undirected and the HasEdgeFromTo method of +// graph.Directed. +func EdgeExistence(t *testing.T, b Builder) { + for _, test := range testCases { + g, nodes, edges, _, _, ok := b(test.nodes, test.edges, test.self, test.absent) + if !ok { + t.Logf("skipping test case: %q", test.name) + continue + } + + want := make(map[edge]bool) + for _, e := range edges { + want[edge{f: e.From().ID(), t: e.To().ID()}] = true + } + for _, x := range nodes { + for _, y := range nodes { + between := want[edge{f: x.ID(), t: y.ID()}] || want[edge{f: y.ID(), t: x.ID()}] + + if has := g.HasEdgeBetween(x.ID(), y.ID()); has != between { + if has { + t.Errorf("unexpected edge for test %q: (%v)--(%v)", test.name, x.ID(), y.ID()) + } else { + t.Errorf("missing edge for test %q: (%v)--(%v)", test.name, x.ID(), y.ID()) + } + } else { + if want[edge{f: x.ID(), t: y.ID()}] { + e := g.Edge(x.ID(), y.ID()) + if e == nil { + t.Errorf("missing edge for test %q: (%v)--(%v)", test.name, x.ID(), y.ID()) + } else if e.From().ID() != x.ID() || e.To().ID() != y.ID() { + t.Errorf("inverted edge for test %q query with F=%d T=%d: got:%#v", + test.name, x.ID(), y.ID(), e) + } + } + if between && !g.HasEdgeBetween(x.ID(), y.ID()) { + t.Errorf("missing edge for test %q: (%v)--(%v)", test.name, x.ID(), y.ID()) + } + if g.Node(x.ID()) == nil { + t.Errorf("missing from node for test %q: %v", test.name, x.ID()) + } + if g.Node(y.ID()) == nil { + t.Errorf("missing to node for test %q: %v", test.name, y.ID()) + } + } + + switch g := g.(type) { + case graph.Directed: + u := x + v := y + if has := g.HasEdgeFromTo(u.ID(), v.ID()); has != want[edge{f: u.ID(), t: v.ID()}] { + if has { + t.Errorf("unexpected edge for test %q: (%v)->(%v)", test.name, u.ID(), v.ID()) + } else { + t.Errorf("missing edge for test %q: (%v)->(%v)", test.name, u.ID(), v.ID()) + } + continue + } + // Edge has already been tested above. + if g.Node(u.ID()) == nil { + t.Errorf("missing from node for test %q: %v", test.name, u.ID()) + } + if g.Node(v.ID()) == nil { + t.Errorf("missing to node for test %q: %v", test.name, v.ID()) + } + + case graph.Undirected: + // HasEdgeBetween is already tested above. + if between && g.Edge(x.ID(), y.ID()) == nil { + t.Errorf("missing edge for test %q: (%v)--(%v)", test.name, x.ID(), y.ID()) + } + if between && g.EdgeBetween(x.ID(), y.ID()) == nil { + t.Errorf("missing edge for test %q: (%v)--(%v)", test.name, x.ID(), y.ID()) + } + } + + switch g := g.(type) { + case graph.WeightedDirected: + u := x + v := y + if has := g.WeightedEdge(u.ID(), v.ID()) != nil; has != want[edge{f: u.ID(), t: v.ID()}] { + if has { + t.Errorf("unexpected edge for test %q: (%v)->(%v)", test.name, u.ID(), v.ID()) + } else { + t.Errorf("missing edge for test %q: (%v)->(%v)", test.name, u.ID(), v.ID()) + } + continue + } + + case graph.WeightedUndirected: + // HasEdgeBetween is already tested above. + if between && g.WeightedEdge(x.ID(), y.ID()) == nil { + t.Errorf("missing edge for test %q: (%v)--(%v)", test.name, x.ID(), y.ID()) + } + if between && g.WeightedEdgeBetween(x.ID(), y.ID()) == nil { + t.Errorf("missing edge for test %q: (%v)--(%v)", test.name, x.ID(), y.ID()) + } + } + } + } + } +} + +// LineExistence tests the constructed graph for the ability to correctly +// return the existence of lines within the graph. This is a check of the +// Line methods of graph.MultiGraph, the EdgeBetween method of graph.Undirected +// and the EdgeFromTo method of graph.Directed. LineExistence also checks +// that the nodes and traversed edges exist within the graph, checking the +// Node, Edge, EdgeBetween and HasEdgeBetween methods of graph.Graph, the +// EdgeBetween method of graph.Undirected and the HasEdgeFromTo method of +// graph.Directed. +func LineExistence(t *testing.T, b Builder, useEmpty bool) { + for _, test := range testCases { + g, nodes, edges, _, _, ok := b(test.nodes, test.edges, test.self, test.absent) + if !ok { + t.Logf("skipping test case: %q", test.name) + continue + } + + switch mg := g.(type) { + case graph.Multigraph: + want := make(map[edge]bool) + for _, e := range edges { + want[edge{f: e.From().ID(), t: e.To().ID()}] = true + } + for _, x := range nodes { + for _, y := range nodes { + between := want[edge{f: x.ID(), t: y.ID()}] || want[edge{f: y.ID(), t: x.ID()}] + + if has := g.HasEdgeBetween(x.ID(), y.ID()); has != between { + if has { + t.Errorf("unexpected edge for test %q: (%v)--(%v)", test.name, x.ID(), y.ID()) + } else { + t.Errorf("missing edge for test %q: (%v)--(%v)", test.name, x.ID(), y.ID()) + } + } else { + if want[edge{f: x.ID(), t: y.ID()}] { + lit := mg.Lines(x.ID(), y.ID()) + if !isValidIterator(lit) { + t.Errorf("invalid iterator for test %q: got:%#v", test.name, lit) + continue + } + checkEmptyIterator(t, lit, useEmpty) + if lit.Len() == 0 { + t.Errorf("missing edge for test %q: (%v)--(%v)", test.name, x.ID(), y.ID()) + } else { + for lit.Next() { + l := lit.Line() + if l.From().ID() != x.ID() || l.To().ID() != y.ID() { + t.Errorf("inverted edge for test %q query with F=%d T=%d: got:%#v", + test.name, x.ID(), y.ID(), l) + } + } + } + } + if between && !g.HasEdgeBetween(x.ID(), y.ID()) { + t.Errorf("missing edge for test %q: (%v)--(%v)", test.name, x.ID(), y.ID()) + } + if g.Node(x.ID()) == nil { + t.Errorf("missing from node for test %q: %v", test.name, x.ID()) + } + if g.Node(y.ID()) == nil { + t.Errorf("missing to node for test %q: %v", test.name, y.ID()) + } + } + + switch g := g.(type) { + case graph.DirectedMultigraph: + u := x + v := y + if has := g.HasEdgeFromTo(u.ID(), v.ID()); has != want[edge{f: u.ID(), t: v.ID()}] { + if has { + t.Errorf("unexpected edge for test %q: (%v)->(%v)", test.name, u.ID(), v.ID()) + } else { + t.Errorf("missing edge for test %q: (%v)->(%v)", test.name, u.ID(), v.ID()) + } + continue + } + // Edge has already been tested above. + if g.Node(u.ID()) == nil { + t.Errorf("missing from node for test %q: %v", test.name, u.ID()) + } + if g.Node(v.ID()) == nil { + t.Errorf("missing to node for test %q: %v", test.name, v.ID()) + } + + case graph.UndirectedMultigraph: + // HasEdgeBetween is already tested above. + lit := g.Lines(x.ID(), y.ID()) + if !isValidIterator(lit) { + t.Errorf("invalid iterator for test %q: got:%#v", test.name, lit) + continue + } + checkEmptyIterator(t, lit, useEmpty) + if between && lit.Len() == 0 { + t.Errorf("missing edge for test %q: (%v)--(%v)", test.name, x.ID(), y.ID()) + } else { + for lit.Next() { + l := lit.Line() + if l.From().ID() != x.ID() || l.To().ID() != y.ID() { + t.Errorf("inverted edge for test %q query with F=%d T=%d: got:%#v", + test.name, x.ID(), y.ID(), l) + } + } + } + lit = g.LinesBetween(x.ID(), y.ID()) + if !isValidIterator(lit) { + t.Errorf("invalid iterator for test %q: got:%#v", test.name, lit) + continue + } + checkEmptyIterator(t, lit, useEmpty) + if between && lit.Len() == 0 { + t.Errorf("missing edge for test %q: (%v)--(%v)", test.name, x.ID(), y.ID()) + } else { + for lit.Next() { + l := lit.Line() + if l.From().ID() != x.ID() || l.To().ID() != y.ID() { + t.Errorf("inverted edge for test %q query with F=%d T=%d: got:%#v", + test.name, x.ID(), y.ID(), l) + } + } + } + } + + switch g := g.(type) { + case graph.WeightedDirectedMultigraph: + u := x + v := y + lit := g.WeightedLines(u.ID(), v.ID()) + if !isValidIterator(lit) { + t.Errorf("invalid iterator for test %q: got:%#v", test.name, lit) + continue + } + checkEmptyIterator(t, lit, useEmpty) + if has := lit != graph.Empty; has != want[edge{f: u.ID(), t: v.ID()}] { + if has { + t.Errorf("unexpected edge for test %q: (%v)->(%v)", test.name, u.ID(), v.ID()) + } else { + t.Errorf("missing edge for test %q: (%v)->(%v)", test.name, u.ID(), v.ID()) + } + continue + } + for lit.Next() { + l := lit.WeightedLine() + if l.From().ID() != x.ID() || l.To().ID() != y.ID() { + t.Errorf("inverted edge for test %q query with F=%d T=%d: got:%#v", + test.name, x.ID(), y.ID(), l) + } + } + // Edge has already been tested above. + if g.Node(u.ID()) == nil { + t.Errorf("missing from node for test %q: %v", test.name, u.ID()) + } + if g.Node(v.ID()) == nil { + t.Errorf("missing to node for test %q: %v", test.name, v.ID()) + } + + case graph.WeightedUndirectedMultigraph: + // HasEdgeBetween is already tested above. + lit := g.WeightedLines(x.ID(), y.ID()) + if !isValidIterator(lit) { + t.Errorf("invalid iterator for test %q: got:%#v", test.name, lit) + continue + } + checkEmptyIterator(t, lit, useEmpty) + if between && lit.Len() == 0 { + t.Errorf("missing edge for test %q: (%v)--(%v)", test.name, x.ID(), y.ID()) + } else { + for lit.Next() { + l := lit.WeightedLine() + if l.From().ID() != x.ID() || l.To().ID() != y.ID() { + t.Errorf("inverted edge for test %q query with F=%d T=%d: got:%#v", + test.name, x.ID(), y.ID(), l) + } + } + } + lit = g.WeightedLinesBetween(x.ID(), y.ID()) + if !isValidIterator(lit) { + t.Errorf("invalid iterator for test %q: got:%#v", test.name, lit) + continue + } + checkEmptyIterator(t, lit, useEmpty) + if between && lit.Len() == 0 { + t.Errorf("missing edge for test %q: (%v)--(%v)", test.name, x.ID(), y.ID()) + } else { + for lit.Next() { + l := lit.WeightedLine() + if l.From().ID() != x.ID() || l.To().ID() != y.ID() { + t.Errorf("inverted edge for test %q query with F=%d T=%d: got:%#v", + test.name, x.ID(), y.ID(), l) + } + } + } + } + } + } + default: + t.Errorf("invalid type for test: %T not a multigraph", g) + continue + } + } +} + +// ReturnAdjacentNodes tests the constructed graph for the ability to correctly +// return the nodes reachable from each node within the graph. This is a check +// of the From method of graph.Graph and the To method of graph.Directed. +// ReturnAdjacentNodes also checks that the nodes and traversed edges exist +// within the graph, checking the Node, Edge, EdgeBetween and HasEdgeBetween +// methods of graph.Graph, the EdgeBetween method of graph.Undirected and the +// HasEdgeFromTo method of graph.Directed. +// If useEmpty is true, graph iterators will be checked for the use of +// graph.Empty if they are empty. +func ReturnAdjacentNodes(t *testing.T, b Builder, useEmpty bool) { + for _, test := range testCases { + g, nodes, edges, _, _, ok := b(test.nodes, test.edges, test.self, test.absent) + if !ok { + t.Logf("skipping test case: %q", test.name) + continue + } + + want := make(map[edge]bool) + for _, e := range edges { + want[edge{f: e.From().ID(), t: e.To().ID()}] = true + } + for _, x := range nodes { + switch g := g.(type) { + case graph.Directed: + // Test forward. + u := x + it := g.From(u.ID()) + if !isValidIterator(it) { + t.Errorf("invalid iterator for test %q: got:%#v", test.name, it) + continue + } + checkEmptyIterator(t, it, useEmpty) + for i := 0; it.Next(); i++ { + v := it.Node() + if i == 0 && g.Node(u.ID()) == nil { + t.Errorf("missing from node for test %q: %v", test.name, u.ID()) + } + if g.Node(v.ID()) == nil { + t.Errorf("missing to node for test %q: %v", test.name, v.ID()) + } + qe := g.Edge(u.ID(), v.ID()) + if qe == nil { + t.Errorf("missing from edge for test %q: (%v)->(%v)", test.name, u.ID(), v.ID()) + } else if qe.From().ID() != u.ID() || qe.To().ID() != v.ID() { + t.Errorf("inverted edge for test %q query with F=%d T=%d: got:%#v", + test.name, u.ID(), v.ID(), qe) + } + if !g.HasEdgeBetween(u.ID(), v.ID()) { + t.Errorf("missing from edge for test %q: (%v)--(%v)", test.name, u.ID(), v.ID()) + } + if !g.HasEdgeFromTo(u.ID(), v.ID()) { + t.Errorf("missing from edge for test %q: (%v)->(%v)", test.name, u.ID(), v.ID()) + } + if !want[edge{f: u.ID(), t: v.ID()}] { + t.Errorf("unexpected edge for test %q: (%v)->(%v)", test.name, u.ID(), v.ID()) + } + } + + // Test backward. + v := x + it = g.To(v.ID()) + if !isValidIterator(it) { + t.Errorf("invalid iterator for test %q: got:%#v", test.name, it) + continue + } + checkEmptyIterator(t, it, useEmpty) + for i := 0; it.Next(); i++ { + u := it.Node() + if i == 0 && g.Node(v.ID()) == nil { + t.Errorf("missing to node for test %q: %v", test.name, v.ID()) + } + if g.Node(u.ID()) == nil { + t.Errorf("missing from node for test %q: %v", test.name, u.ID()) + } + qe := g.Edge(u.ID(), v.ID()) + if qe == nil { + t.Errorf("missing from edge for test %q: (%v)->(%v)", test.name, u.ID(), v.ID()) + continue + } + if qe.From().ID() != u.ID() || qe.To().ID() != v.ID() { + t.Errorf("inverted edge for test %q query with F=%d T=%d: got:%#v", + test.name, u.ID(), v.ID(), qe) + } + if !g.HasEdgeBetween(u.ID(), v.ID()) { + t.Errorf("missing from edge for test %q: (%v)--(%v)", test.name, u.ID(), v.ID()) + continue + } + if !g.HasEdgeFromTo(u.ID(), v.ID()) { + t.Errorf("missing from edge for test %q: (%v)->(%v)", test.name, u.ID(), v.ID()) + continue + } + if !want[edge{f: u.ID(), t: v.ID()}] { + t.Errorf("unexpected edge for test %q: (%v)->(%v)", test.name, u.ID(), v.ID()) + } + } + + case graph.Undirected: + u := x + it := g.From(u.ID()) + if !isValidIterator(it) { + t.Errorf("invalid iterator for test %q: got:%#v", test.name, it) + continue + } + checkEmptyIterator(t, it, useEmpty) + for i := 0; it.Next(); i++ { + v := it.Node() + if i == 0 && g.Node(u.ID()) == nil { + t.Errorf("missing from node for test %q: %v", test.name, u.ID()) + } + qe := g.Edge(u.ID(), v.ID()) + if qe == nil { + t.Errorf("missing from edge for test %q: (%v)--(%v)", test.name, u.ID(), v.ID()) + continue + } + if qe.From().ID() != u.ID() || qe.To().ID() != v.ID() { + t.Errorf("inverted edge for test %q query with F=%d T=%d: got:%#v", + test.name, u.ID(), v.ID(), qe) + } + qe = g.EdgeBetween(u.ID(), v.ID()) + if qe == nil { + t.Errorf("missing from edge for test %q: (%v)--(%v)", test.name, u.ID(), v.ID()) + continue + } + if qe.From().ID() != u.ID() || qe.To().ID() != v.ID() { + t.Errorf("inverted edge for test %q query with F=%d T=%d: got:%#v", + test.name, u.ID(), v.ID(), qe) + } + if !g.HasEdgeBetween(u.ID(), v.ID()) { + t.Errorf("missing from edge for test %q: (%v)--(%v)", test.name, u.ID(), v.ID()) + continue + } + between := want[edge{f: u.ID(), t: v.ID()}] || want[edge{f: v.ID(), t: u.ID()}] + if !between { + t.Errorf("unexpected edge for test %q: (%v)->(%v)", test.name, u.ID(), v.ID()) + } + } + + default: + u := x + it := g.From(u.ID()) + if !isValidIterator(it) { + t.Errorf("invalid iterator for test %q: got:%#v", test.name, it) + continue + } + checkEmptyIterator(t, it, useEmpty) + for i := 0; it.Next(); i++ { + v := it.Node() + if i == 0 && g.Node(u.ID()) == nil { + t.Errorf("missing from node for test %q: %v", test.name, u.ID()) + } + qe := g.Edge(u.ID(), v.ID()) + if qe == nil { + t.Errorf("missing from edge for test %q: (%v)--(%v)", test.name, u.ID(), v.ID()) + continue + } + if qe.From().ID() != u.ID() || qe.To().ID() != v.ID() { + t.Errorf("inverted edge for test %q query with F=%d T=%d: got:%#v", + test.name, u.ID(), v.ID(), qe) + } + if !g.HasEdgeBetween(u.ID(), v.ID()) { + t.Errorf("missing from edge for test %q: (%v)--(%v)", test.name, u.ID(), v.ID()) + continue + } + between := want[edge{f: u.ID(), t: v.ID()}] || want[edge{f: v.ID(), t: u.ID()}] + if !between { + t.Errorf("unexpected edge for test %q: (%v)->(%v)", test.name, u.ID(), v.ID()) + } + } + } + } + } +} + +// Weight tests the constructed graph for the ability to correctly return +// the weight between to nodes, checking the Weight method of graph.Weighted. +// +// The self and absent values returned by the Builder should match the values +// used by the Weight method. +func Weight(t *testing.T, b Builder) { + for _, test := range testCases { + g, nodes, _, self, absent, ok := b(test.nodes, test.edges, test.self, test.absent) + if !ok { + t.Logf("skipping test case: %q", test.name) + continue + } + wg, ok := g.(graph.Weighted) + if !ok { + t.Errorf("invalid graph type for test %q: %T is not graph.Weighted", test.name, g) + } + _, multi := g.(graph.Multigraph) + + for _, x := range nodes { + for _, y := range nodes { + w, ok := wg.Weight(x.ID(), y.ID()) + e := wg.WeightedEdge(x.ID(), y.ID()) + switch { + case !ok: + if e != nil { + t.Errorf("missing edge weight for existing edge for test %q: (%v)--(%v)", test.name, x.ID(), y.ID()) + } + if !same(w, absent) { + t.Errorf("unexpected absent weight for test %q: got:%v want:%v", test.name, w, absent) + } + + case !multi && x.ID() == y.ID(): + if !same(w, self) { + t.Errorf("unexpected self weight for test %q: got:%v want:%v", test.name, w, self) + } + + case e == nil: + t.Errorf("missing edge for existing non-self weight for test %q: (%v)--(%v)", test.name, x.ID(), y.ID()) + + case e.Weight() != w: + t.Errorf("weight mismatch for test %q: edge=%v graph=%v", test.name, e.Weight(), w) + } + } + } + } +} + +// AdjacencyMatrix tests the constructed graph for the ability to correctly +// return an adjacency matrix that matches the weights returned by the graphs +// Weight method. +// +// The self and absent values returned by the Builder should match the values +// used by the Weight method. +func AdjacencyMatrix(t *testing.T, b Builder) { + for _, test := range testCases { + g, nodes, _, self, absent, ok := b(test.nodes, test.edges, test.self, test.absent) + if !ok { + t.Logf("skipping test case: %q", test.name) + continue + } + wg, ok := g.(graph.Weighted) + if !ok { + t.Errorf("invalid graph type for test %q: %T is not graph.Weighted", test.name, g) + } + mg, ok := g.(matrixer) + if !ok { + t.Errorf("invalid graph type for test %q: %T cannot return adjacency matrix", test.name, g) + } + m := mg.Matrix() + + r, c := m.Dims() + if r != c || r != len(nodes) { + t.Errorf("dimension mismatch for test %q: r=%d c=%d order=%d", test.name, r, c, len(nodes)) + } + + for _, x := range nodes { + i := int(x.ID()) + for _, y := range nodes { + j := int(y.ID()) + w, ok := wg.Weight(x.ID(), y.ID()) + switch { + case !ok: + if !same(m.At(i, j), absent) { + t.Errorf("weight mismatch for test %q: (%v)--(%v) matrix=%v graph=%v", test.name, x.ID(), y.ID(), m.At(i, j), w) + } + case x.ID() == y.ID(): + if !same(m.At(i, j), self) { + t.Errorf("weight mismatch for test %q: (%v)--(%v) matrix=%v graph=%v", test.name, x.ID(), y.ID(), m.At(i, j), w) + } + default: + if !same(m.At(i, j), w) { + t.Errorf("weight mismatch for test %q: (%v)--(%v) matrix=%v graph=%v", test.name, x.ID(), y.ID(), m.At(i, j), w) + } + } + } + } + } +} + +// lexicalEdges sorts a collection of edges lexically on the +// keys: from.ID > to.ID > [line.ID] > [weight]. +type lexicalEdges []Edge + +func (e lexicalEdges) Len() int { return len(e) } +func (e lexicalEdges) Less(i, j int) bool { + if e[i].From().ID() < e[j].From().ID() { + return true + } + sf := e[i].From().ID() == e[j].From().ID() + if sf && e[i].To().ID() < e[j].To().ID() { + return true + } + st := e[i].To().ID() == e[j].To().ID() + li, oki := e[i].(graph.Line) + lj, okj := e[j].(graph.Line) + if oki != okj { + panic(fmt.Sprintf("testgraph: mismatched types %T != %T", e[i], e[j])) + } + if !oki { + return sf && st && lessWeight(e[i], e[j]) + } + if sf && st && li.ID() < lj.ID() { + return true + } + return sf && st && li.ID() == lj.ID() && lessWeight(e[i], e[j]) +} +func (e lexicalEdges) Swap(i, j int) { e[i], e[j] = e[j], e[i] } + +// lexicalUndirectedEdges sorts a collection of edges lexically on the +// keys: lo.ID > hi.ID > [line.ID] > [weight]. +type lexicalUndirectedEdges []Edge + +func (e lexicalUndirectedEdges) Len() int { return len(e) } +func (e lexicalUndirectedEdges) Less(i, j int) bool { + lidi, hidi, _ := undirectedIDs(e[i]) + lidj, hidj, _ := undirectedIDs(e[j]) + + if lidi < lidj { + return true + } + sl := lidi == lidj + if sl && hidi < hidj { + return true + } + sh := hidi == hidj + li, oki := e[i].(graph.Line) + lj, okj := e[j].(graph.Line) + if oki != okj { + panic(fmt.Sprintf("testgraph: mismatched types %T != %T", e[i], e[j])) + } + if !oki { + return sl && sh && lessWeight(e[i], e[j]) + } + if sl && sh && li.ID() < lj.ID() { + return true + } + return sl && sh && li.ID() == lj.ID() && lessWeight(e[i], e[j]) +} +func (e lexicalUndirectedEdges) Swap(i, j int) { e[i], e[j] = e[j], e[i] } + +func lessWeight(ei, ej Edge) bool { + wei, oki := ei.(graph.WeightedEdge) + wej, okj := ej.(graph.WeightedEdge) + if oki != okj { + panic(fmt.Sprintf("testgraph: mismatched types %T != %T", ei, ej)) + } + if !oki { + return false + } + return wei.Weight() < wej.Weight() +} + +// undirectedEdgeSetEqual returned whether a pair of undirected edge +// slices sorted by lexicalUndirectedEdges are equal. +func undirectedEdgeSetEqual(a, b []Edge) bool { + if len(a) == 0 && len(b) == 0 { + return true + } + if len(a) == 0 || len(b) == 0 { + return false + } + if !undirectedEdgeEqual(a[0], b[0]) { + return false + } + i, j := 0, 0 + for { + switch { + case i == len(a)-1 && j == len(b)-1: + return true + + case i < len(a)-1 && undirectedEdgeEqual(a[i+1], b[j]): + i++ + + case j < len(b)-1 && undirectedEdgeEqual(a[i], b[j+1]): + j++ + + case i < len(a)-1 && j < len(b)-1 && undirectedEdgeEqual(a[i+1], b[j+1]): + i++ + j++ + + default: + return false + } + } +} + +// undirectedEdgeEqual returns whether a pair of undirected edges are equal +// after canonicalising from and to IDs by numerical sort order. +func undirectedEdgeEqual(a, b Edge) bool { + loa, hia, inva := undirectedIDs(a) + lob, hib, invb := undirectedIDs(b) + // Use reflect.DeepEqual if the edges are parallel + // rather anti-parallel. + if inva == invb { + return reflect.DeepEqual(a, b) + } + if loa != lob || hia != hib { + return false + } + la, oka := a.(graph.Line) + lb, okb := b.(graph.Line) + if !oka && !okb { + return true + } + if la.ID() != lb.ID() { + return false + } + wea, oka := a.(graph.WeightedEdge) + web, okb := b.(graph.WeightedEdge) + if !oka && !okb { + return true + } + return wea.Weight() == web.Weight() +} + +// NodeAdder is a graph.NodeAdder graph. +type NodeAdder interface { + graph.Graph + graph.NodeAdder +} + +// AddNodes tests whether g correctly implements the graph.NodeAdder interface. +// AddNodes gets a new node from g and adds it to the graph, repeating this pair +// of operations n times. The existence of added nodes is confirmed in the graph. +// AddNodes also checks that re-adding each of the added nodes causes a panic. +func AddNodes(t *testing.T, g NodeAdder, n int) { + defer func() { + r := recover() + if r != nil { + t.Errorf("unexpected panic: %v", r) + } + }() + + var addedNodes []graph.Node + for i := 0; i < n; i++ { + node := g.NewNode() + prev := g.Nodes().Len() + if g.Node(node.ID()) != nil { + curr := g.Nodes().Len() + if curr != prev { + t.Fatalf("NewNode mutated graph: prev graph order != curr graph order, %d != %d", prev, curr) + } + t.Fatalf("NewNode returned existing: %#v", node) + } + g.AddNode(node) + addedNodes = append(addedNodes, node) + curr := g.Nodes().Len() + if curr != prev+1 { + t.Fatalf("AddNode failed to mutate graph: curr graph order != prev graph order+1, %d != %d", curr, prev+1) + } + if g.Node(node.ID()) == nil { + t.Fatalf("AddNode failed to add node to graph trying to add %#v", node) + } + } + + sort.Sort(ordered.ByID(addedNodes)) + graphNodes := graph.NodesOf(g.Nodes()) + sort.Sort(ordered.ByID(graphNodes)) + if !reflect.DeepEqual(addedNodes, graphNodes) { + if n > 20 { + t.Errorf("unexpected node set after node addition: got len:%v want len:%v", len(graphNodes), len(addedNodes)) + } else { + t.Errorf("unexpected node set after node addition: got:\n %v\nwant:\n%v", graphNodes, addedNodes) + } + } + + it := g.Nodes() + for it.Next() { + panicked := panics(func() { + g.AddNode(it.Node()) + }) + if !panicked { + t.Fatalf("expected panic adding existing node: %v", it.Node()) + } + } +} + +// AddArbitraryNodes tests whether g correctly implements the AddNode method. Not all +// graph.NodeAdder graphs are expected to implement the semantics of this test. +// AddArbitraryNodes iterates over add, adding each node to the graph. The existence +// of each added node in the graph is confirmed. +func AddArbitraryNodes(t *testing.T, g NodeAdder, add graph.Nodes) { + defer func() { + r := recover() + if r != nil { + t.Errorf("unexpected panic: %v", r) + } + }() + + for add.Next() { + node := add.Node() + prev := g.Nodes().Len() + g.AddNode(node) + curr := g.Nodes().Len() + if curr != prev+1 { + t.Fatalf("AddNode failed to mutate graph: curr graph order != prev graph order+1, %d != %d", curr, prev+1) + } + if g.Node(node.ID()) == nil { + t.Fatalf("AddNode failed to add node to graph trying to add %#v", node) + } + } + + add.Reset() + addedNodes := graph.NodesOf(add) + sort.Sort(ordered.ByID(addedNodes)) + graphNodes := graph.NodesOf(g.Nodes()) + sort.Sort(ordered.ByID(graphNodes)) + if !reflect.DeepEqual(addedNodes, graphNodes) { + t.Errorf("unexpected node set after node addition: got:\n %v\nwant:\n%v", graphNodes, addedNodes) + } + + it := g.Nodes() + for it.Next() { + panicked := panics(func() { + g.AddNode(it.Node()) + }) + if !panicked { + t.Fatalf("expected panic adding existing node: %v", it.Node()) + } + } +} + +// NodeRemover is a graph.NodeRemover graph. +type NodeRemover interface { + graph.Graph + graph.NodeRemover +} + +// RemoveNodes tests whether g correctly implements the graph.NodeRemover interface. +// The input graph g must contain a set of nodes with some edges between them. +func RemoveNodes(t *testing.T, g NodeRemover) { + defer func() { + r := recover() + if r != nil { + t.Errorf("unexpected panic: %v", r) + } + }() + + it := g.Nodes() + first := true + for it.Next() { + u := it.Node() + + seen := make(map[edge]struct{}) + + // Collect all incident edges. + var incident []graph.Edge + to := g.From(u.ID()) + for to.Next() { + v := to.Node() + e := g.Edge(u.ID(), v.ID()) + if e == nil { + t.Fatalf("bad graph: neighbors not connected: u=%#v v=%#v", u, v) + } + if _, ok := g.(graph.Undirected); ok { + seen[edge{f: e.To().ID(), t: e.From().ID()}] = struct{}{} + } + seen[edge{f: e.From().ID(), t: e.To().ID()}] = struct{}{} + incident = append(incident, e) + } + + // Collect all other edges. + var others []graph.Edge + currit := g.Nodes() + for currit.Next() { + u := currit.Node() + to := g.From(u.ID()) + for to.Next() { + v := to.Node() + e := g.Edge(u.ID(), v.ID()) + if e == nil { + t.Fatalf("bad graph: neighbors not connected: u=%#v v=%#v", u, v) + } + seen[edge{f: e.From().ID(), t: e.To().ID()}] = struct{}{} + others = append(others, e) + } + } + + if first && len(seen) == 0 { + t.Fatal("incomplete test: no edges in graph") + } + first = false + + prev := g.Nodes().Len() + g.RemoveNode(u.ID()) + curr := g.Nodes().Len() + if curr != prev-1 { + t.Fatalf("RemoveNode failed to mutate graph: curr graph order != prev graph order-1, %d != %d", curr, prev-1) + } + if g.Node(u.ID()) != nil { + t.Fatalf("failed to remove node: %#v", u) + } + + for _, e := range incident { + if g.HasEdgeBetween(e.From().ID(), e.To().ID()) { + t.Fatalf("RemoveNode failed to remove connected edge: %#v", e) + } + } + + for _, e := range others { + if e.From().ID() == u.ID() || e.To().ID() == u.ID() { + continue + } + if g.Edge(e.From().ID(), e.To().ID()) == nil { + t.Fatalf("RemoveNode %v removed unconnected edge: %#v", u, e) + } + } + } +} + +// EdgeAdder is a graph.EdgeAdder graph. +type EdgeAdder interface { + graph.Graph + graph.EdgeAdder +} + +// AddEdges tests whether g correctly implements the graph.EdgeAdder interface. +// AddEdges creates n pairs of nodes with random IDs in [0,n) and joins edges +// each node in the pair using SetEdge. AddEdges confirms that the end point +// nodes are added to the graph and that the edges are stored in the graph. +// If canLoop is true, self edges may be created. If canSet is true, a second +// call to SetEdge is made for each edge to confirm that the nodes corresponding +// the end points are updated. +func AddEdges(t *testing.T, n int, g EdgeAdder, newNode func(id int64) graph.Node, canLoop, canSetNode bool) { + defer func() { + r := recover() + if r != nil { + t.Errorf("unexpected panic: %v", r) + } + }() + + type altNode struct { + graph.Node + } + + rnd := rand.New(rand.NewSource(1)) + for i := 0; i < n; i++ { + u := newNode(rnd.Int63n(int64(n))) + var v graph.Node + for { + v = newNode(rnd.Int63n(int64(n))) + if canLoop || u.ID() != v.ID() { + break + } + } + e := g.NewEdge(u, v) + if g.Edge(u.ID(), v.ID()) != nil { + t.Fatalf("NewEdge returned existing: %#v", e) + } + g.SetEdge(e) + if g.Edge(u.ID(), v.ID()) == nil { + t.Fatalf("SetEdge failed to add edge: %#v", e) + } + if g.Node(u.ID()) == nil { + t.Fatalf("SetEdge failed to add from node: %#v", u) + } + if g.Node(v.ID()) == nil { + t.Fatalf("SetEdge failed to add to node: %#v", v) + } + + if !canSetNode { + continue + } + + g.SetEdge(g.NewEdge(altNode{u}, altNode{v})) + if nu := g.Node(u.ID()); nu == u { + t.Fatalf("SetEdge failed to update from node: u=%#v nu=%#v", u, nu) + } + if nv := g.Node(v.ID()); nv == v { + t.Fatalf("SetEdge failed to update to node: v=%#v nv=%#v", v, nv) + } + } +} + +// WeightedEdgeAdder is a graph.EdgeAdder graph. +type WeightedEdgeAdder interface { + graph.Graph + graph.WeightedEdgeAdder +} + +// AddWeightedEdges tests whether g correctly implements the graph.WeightedEdgeAdder +// interface. AddWeightedEdges creates n pairs of nodes with random IDs in [0,n) and +// joins edges each node in the pair using SetWeightedEdge with weight w. +// AddWeightedEdges confirms that the end point nodes are added to the graph and that +// the edges are stored in the graph. If canLoop is true, self edges may be created. +// If canSet is true, a second call to SetWeightedEdge is made for each edge to +// confirm that the nodes corresponding the end points are updated. +func AddWeightedEdges(t *testing.T, n int, g WeightedEdgeAdder, w float64, newNode func(id int64) graph.Node, canLoop, canSetNode bool) { + defer func() { + r := recover() + if r != nil { + t.Errorf("unexpected panic: %v", r) + } + }() + + type altNode struct { + graph.Node + } + + rnd := rand.New(rand.NewSource(1)) + for i := 0; i < n; i++ { + u := newNode(rnd.Int63n(int64(n))) + var v graph.Node + for { + v = newNode(rnd.Int63n(int64(n))) + if canLoop || u.ID() != v.ID() { + break + } + } + e := g.NewWeightedEdge(u, v, w) + if g.Edge(u.ID(), v.ID()) != nil { + t.Fatalf("NewEdge returned existing: %#v", e) + } + g.SetWeightedEdge(e) + ne := g.Edge(u.ID(), v.ID()) + if ne == nil { + t.Fatalf("SetWeightedEdge failed to add edge: %#v", e) + } + we, ok := ne.(graph.WeightedEdge) + if !ok { + t.Fatalf("SetWeightedEdge failed to add weighted edge: %#v", e) + } + if we.Weight() != w { + t.Fatalf("edge weight mismatch: got:%f want:%f", we.Weight(), w) + } + + if g.Node(u.ID()) == nil { + t.Fatalf("SetWeightedEdge failed to add from node: %#v", u) + } + if g.Node(v.ID()) == nil { + t.Fatalf("SetWeightedEdge failed to add to node: %#v", v) + } + + if !canSetNode { + continue + } + + g.SetWeightedEdge(g.NewWeightedEdge(altNode{u}, altNode{v}, w)) + if nu := g.Node(u.ID()); nu == u { + t.Fatalf("SetWeightedEdge failed to update from node: u=%#v nu=%#v", u, nu) + } + if nv := g.Node(v.ID()); nv == v { + t.Fatalf("SetWeightedEdge failed to update to node: v=%#v nv=%#v", v, nv) + } + } +} + +// NoLoopAddEdges tests whether g panics for self-loop addition. NoLoopAddEdges +// adds n nodes with IDs in [0,n) and creates an edge from the graph with NewEdge. +// NoLoopAddEdges confirms that this does not panic and then adds the edge to the +// graph to ensure that SetEdge will panic when adding a self-loop. +func NoLoopAddEdges(t *testing.T, n int, g EdgeAdder, newNode func(id int64) graph.Node) { + defer func() { + r := recover() + if r != nil { + t.Errorf("unexpected panic: %v", r) + } + }() + + for id := 0; id < n; id++ { + node := newNode(int64(id)) + e := g.NewEdge(node, node) + panicked := panics(func() { + g.SetEdge(e) + }) + if !panicked { + t.Errorf("expected panic for self-edge: %#v", e) + } + } +} + +// NoLoopAddWeightedEdges tests whether g panics for self-loop addition. NoLoopAddWeightedEdges +// adds n nodes with IDs in [0,n) and creates an edge from the graph with NewWeightedEdge. +// NoLoopAddWeightedEdges confirms that this does not panic and then adds the edge to the +// graph to ensure that SetWeightedEdge will panic when adding a self-loop. +func NoLoopAddWeightedEdges(t *testing.T, n int, g WeightedEdgeAdder, w float64, newNode func(id int64) graph.Node) { + defer func() { + r := recover() + if r != nil { + t.Errorf("unexpected panic: %v", r) + } + }() + + for id := 0; id < n; id++ { + node := newNode(int64(id)) + e := g.NewWeightedEdge(node, node, w) + panicked := panics(func() { + g.SetWeightedEdge(e) + }) + if !panicked { + t.Errorf("expected panic for self-edge: %#v", e) + } + } +} + +// LineAdder is a graph.LineAdder multigraph. +type LineAdder interface { + graph.Multigraph + graph.LineAdder +} + +// AddLines tests whether g correctly implements the graph.LineAdder interface. +// AddLines creates n pairs of nodes with random IDs in [0,n) and joins edges +// each node in the pair using SetLine. AddLines confirms that the end point +// nodes are added to the graph and that the edges are stored in the graph. +// If canSet is true, a second call to SetLine is made for each edge to confirm +// that the nodes corresponding the end points are updated. +func AddLines(t *testing.T, n int, g LineAdder, newNode func(id int64) graph.Node, canSetNode bool) { + defer func() { + r := recover() + if r != nil { + t.Errorf("unexpected panic: %v", r) + } + }() + + type altNode struct { + graph.Node + } + + rnd := rand.New(rand.NewSource(1)) + seen := make(set.Int64s) + for i := 0; i < n; i++ { + u := newNode(rnd.Int63n(int64(n))) + v := newNode(rnd.Int63n(int64(n))) + prev := g.Lines(u.ID(), v.ID()) + l := g.NewLine(u, v) + if seen.Has(l.ID()) { + t.Fatalf("NewLine returned an existing line: %#v", l) + } + if g.Lines(u.ID(), v.ID()).Len() != prev.Len() { + t.Fatalf("NewLine added a line: %#v", l) + } + g.SetLine(l) + seen.Add(l.ID()) + if g.Lines(u.ID(), v.ID()).Len() != prev.Len()+1 { + t.Fatalf("SetLine failed to add line: %#v", l) + } + if g.Node(u.ID()) == nil { + t.Fatalf("SetLine failed to add from node: %#v", u) + } + if g.Node(v.ID()) == nil { + t.Fatalf("SetLine failed to add to node: %#v", v) + } + + if !canSetNode { + continue + } + + g.SetLine(g.NewLine(altNode{u}, altNode{v})) + if nu := g.Node(u.ID()); nu == u { + t.Fatalf("SetLine failed to update from node: u=%#v nu=%#v", u, nu) + } + if nv := g.Node(v.ID()); nv == v { + t.Fatalf("SetLine failed to update to node: v=%#v nv=%#v", v, nv) + } + } +} + +// WeightedLineAdder is a graph.WeightedLineAdder multigraph. +type WeightedLineAdder interface { + graph.Multigraph + graph.WeightedLineAdder +} + +// AddWeightedLines tests whether g correctly implements the graph.WeightedEdgeAdder +// interface. AddWeightedLines creates n pairs of nodes with random IDs in [0,n) and +// joins edges each node in the pair using SetWeightedLine with weight w. +// AddWeightedLines confirms that the end point nodes are added to the graph and that +// the edges are stored in the graph. If canSet is true, a second call to SetWeightedLine +// is made for each edge to confirm that the nodes corresponding the end points are +// updated. +func AddWeightedLines(t *testing.T, n int, g WeightedLineAdder, w float64, newNode func(id int64) graph.Node, canSetNode bool) { + defer func() { + r := recover() + if r != nil { + t.Errorf("unexpected panic: %v", r) + } + }() + + type altNode struct { + graph.Node + } + + rnd := rand.New(rand.NewSource(1)) + seen := make(set.Int64s) + for i := 0; i < n; i++ { + u := newNode(rnd.Int63n(int64(n))) + v := newNode(rnd.Int63n(int64(n))) + prev := g.Lines(u.ID(), v.ID()) + l := g.NewWeightedLine(u, v, w) + if seen.Has(l.ID()) { + t.Fatalf("NewWeightedLine returned an existing line: %#v", l) + } + if g.Lines(u.ID(), v.ID()).Len() != prev.Len() { + t.Fatalf("NewWeightedLine added a line: %#v", l) + } + g.SetWeightedLine(l) + seen.Add(l.ID()) + curr := g.Lines(u.ID(), v.ID()) + if curr.Len() != prev.Len()+1 { + t.Fatalf("SetWeightedLine failed to add line: %#v", l) + } + var found bool + for curr.Next() { + if curr.Line().ID() == l.ID() { + found = true + wl, ok := curr.Line().(graph.WeightedLine) + if !ok { + t.Fatalf("SetWeightedLine failed to add weighted line: %#v", l) + } + if wl.Weight() != w { + t.Fatalf("line weight mismatch: got:%f want:%f", wl.Weight(), w) + } + break + } + } + if !found { + t.Fatalf("SetWeightedLine failed to add line: %#v", l) + } + if g.Node(u.ID()) == nil { + t.Fatalf("SetWeightedLine failed to add from node: %#v", u) + } + if g.Node(v.ID()) == nil { + t.Fatalf("SetWeightedLine failed to add to node: %#v", v) + } + + if !canSetNode { + continue + } + + g.SetWeightedLine(g.NewWeightedLine(altNode{u}, altNode{v}, w)) + if nu := g.Node(u.ID()); nu == u { + t.Fatalf("SetWeightedLine failed to update from node: u=%#v nu=%#v", u, nu) + } + if nv := g.Node(v.ID()); nv == v { + t.Fatalf("SetWeightedLine failed to update to node: v=%#v nv=%#v", v, nv) + } + } +} + +// EdgeRemover is a graph.EdgeRemover graph. +type EdgeRemover interface { + graph.Graph + graph.EdgeRemover +} + +// RemoveEdges tests whether g correctly implements the graph.EdgeRemover interface. +// The input graph g must contain a set of nodes with some edges between them. +// RemoveEdges iterates over remove, which must contain edges in g, removing each +// edge. RemoveEdges confirms that the edge is removed, leaving its end-point nodes +// and all other edges in the graph. +func RemoveEdges(t *testing.T, g EdgeRemover, remove graph.Edges) { + edges := make(map[edge]struct{}) + nodes := g.Nodes() + for nodes.Next() { + u := nodes.Node() + uid := u.ID() + to := g.From(uid) + for to.Next() { + v := to.Node() + edges[edge{f: u.ID(), t: v.ID()}] = struct{}{} + } + } + + for remove.Next() { + e := remove.Edge() + if g.Edge(e.From().ID(), e.To().ID()) == nil { + t.Fatalf("bad tests: missing edge: %#v", e) + } + if g.Node(e.From().ID()) == nil { + t.Fatalf("bad tests: missing from node: %#v", e.From()) + } + if g.Node(e.To().ID()) == nil { + t.Fatalf("bad tests: missing to node: %#v", e.To()) + } + + g.RemoveEdge(e.From().ID(), e.To().ID()) + + if _, ok := g.(graph.Undirected); ok { + delete(edges, edge{f: e.To().ID(), t: e.From().ID()}) + } + delete(edges, edge{f: e.From().ID(), t: e.To().ID()}) + for ge := range edges { + if g.Edge(ge.f, ge.t) == nil { + t.Fatalf("unexpected missing edge after removing edge %#v: %#v", e, ge) + } + } + + if ne := g.Edge(e.From().ID(), e.To().ID()); ne != nil { + t.Fatalf("expected nil edge: got:%#v", ne) + } + if g.Node(e.From().ID()) == nil { + t.Fatalf("unexpected deletion of from node: %#v", e.From()) + } + if g.Node(e.To().ID()) == nil { + t.Fatalf("unexpected deletion to node: %#v", e.To()) + } + } +} + +// LineRemover is a graph.EdgeRemove graph. +type LineRemover interface { + graph.Multigraph + graph.LineRemover +} + +// RemoveLines tests whether g correctly implements the graph.LineRemover interface. +// The input graph g must contain a set of nodes with some lines between them. +// RemoveLines iterates over remove, which must contain lines in g, removing each +// line. RemoveLines confirms that the line is removed, leaving its end-point nodes +// and all other lines in the graph. +func RemoveLines(t *testing.T, g LineRemover, remove graph.Lines) { + // lines is the set of lines in the graph. + // The presence of a key indicates that the + // line should exist in the graph. The value + // for each key is used to indicate whether + // it has been found during testing. + lines := make(map[edge]bool) + nodes := g.Nodes() + for nodes.Next() { + u := nodes.Node() + uid := u.ID() + to := g.From(uid) + for to.Next() { + v := to.Node() + lit := g.Lines(u.ID(), v.ID()) + for lit.Next() { + lines[edge{f: u.ID(), t: v.ID(), id: lit.Line().ID()}] = true + } + } + } + + for remove.Next() { + l := remove.Line() + if g.Lines(l.From().ID(), l.To().ID()) == graph.Empty { + t.Fatalf("bad tests: missing line: %#v", l) + } + if g.Node(l.From().ID()) == nil { + t.Fatalf("bad tests: missing from node: %#v", l.From()) + } + if g.Node(l.To().ID()) == nil { + t.Fatalf("bad tests: missing to node: %#v", l.To()) + } + + prev := g.Lines(l.From().ID(), l.To().ID()) + + g.RemoveLine(l.From().ID(), l.To().ID(), l.ID()) + + if _, ok := g.(graph.Undirected); ok { + delete(lines, edge{f: l.To().ID(), t: l.From().ID(), id: l.ID()}) + } + delete(lines, edge{f: l.From().ID(), t: l.To().ID(), id: l.ID()}) + + // Mark all lines as not found. + for gl := range lines { + lines[gl] = false + } + + // Mark found lines. This could be done far more efficiently. + for gl := range lines { + lit := g.Lines(gl.f, gl.t) + for lit.Next() { + lid := lit.Line().ID() + if lid == gl.id { + lines[gl] = true + break + } + } + } + for gl, found := range lines { + if !found { + t.Fatalf("unexpected missing line after removing line %#v: %#v", l, gl) + } + } + + if curr := g.Lines(l.From().ID(), l.To().ID()); curr.Len() != prev.Len()-1 { + t.Fatalf("RemoveLine failed to mutate graph: curr edge size != prev edge size-1, %d != %d", curr.Len(), prev.Len()-1) + } + if g.Node(l.From().ID()) == nil { + t.Fatalf("unexpected deletion of from node: %#v", l.From()) + } + if g.Node(l.To().ID()) == nil { + t.Fatalf("unexpected deletion to node: %#v", l.To()) + } + } +} + +// undirectedIDs returns a numerical sort ordered canonicalisation of the +// IDs of e. +func undirectedIDs(e Edge) (lo, hi int64, inverted bool) { + lid := e.From().ID() + hid := e.To().ID() + if hid < lid { + inverted = true + hid, lid = lid, hid + } + return lid, hid, inverted +} + +type edge struct { + f, t, id int64 +} + +func same(a, b float64) bool { + return (math.IsNaN(a) && math.IsNaN(b)) || a == b +} + +func panics(fn func()) (ok bool) { + defer func() { + ok = recover() != nil + }() + fn() + return +} + +// RandomNodes implements the graph.Nodes interface for a set of random nodes. +type RandomNodes struct { + n int + seed uint64 + newNode func(int64) graph.Node + + curr int64 + + state *rand.Rand + seen set.Int64s + count int +} + +var _ graph.Nodes = (*RandomNodes)(nil) + +// NewRandomNodes returns a new implicit node iterator containing a set of n nodes +// with IDs generated from a PRNG seeded by the given seed. +// The provided new func maps the id to a graph.Node. +func NewRandomNodes(n int, seed uint64, new func(id int64) graph.Node) *RandomNodes { + return &RandomNodes{ + n: n, + seed: seed, + newNode: new, + + state: rand.New(rand.NewSource(seed)), + seen: make(set.Int64s), + count: 0, + } +} + +// Len returns the remaining number of nodes to be iterated over. +func (n *RandomNodes) Len() int { + return n.n - n.count +} + +// Next returns whether the next call of Node will return a valid node. +func (n *RandomNodes) Next() bool { + if n.count >= n.n { + return false + } + n.count++ + for { + sign := int64(1) + if n.state.Float64() < 0.5 { + sign *= -1 + } + n.curr = sign * n.state.Int63() + if !n.seen.Has(n.curr) { + n.seen.Add(n.curr) + return true + } + } +} + +// Node returns the current node of the iterator. Next must have been +// called prior to a call to Node. +func (n *RandomNodes) Node() graph.Node { + if n.Len() == -1 || n.count == 0 { + return nil + } + return n.newNode(n.curr) +} + +// Reset returns the iterator to its initial state. +func (n *RandomNodes) Reset() { + n.state = rand.New(rand.NewSource(n.seed)) + n.seen = make(set.Int64s) + n.count = 0 +} diff --git a/vendor/gonum.org/v1/gonum/graph/topo/bron_kerbosch.go b/vendor/gonum.org/v1/gonum/graph/topo/bron_kerbosch.go new file mode 100644 index 0000000..83fdb5b --- /dev/null +++ b/vendor/gonum.org/v1/gonum/graph/topo/bron_kerbosch.go @@ -0,0 +1,250 @@ +// Copyright ©2015 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package topo + +import ( + "gonum.org/v1/gonum/graph" + "gonum.org/v1/gonum/graph/internal/ordered" + "gonum.org/v1/gonum/graph/internal/set" +) + +// DegeneracyOrdering returns the degeneracy ordering and the k-cores of +// the undirected graph g. +func DegeneracyOrdering(g graph.Undirected) (order []graph.Node, cores [][]graph.Node) { + order, offsets := degeneracyOrdering(g) + + ordered.Reverse(order) + cores = make([][]graph.Node, len(offsets)) + offset := len(order) + for i, n := range offsets { + cores[i] = order[offset-n : offset] + offset -= n + } + return order, cores +} + +// KCore returns the k-core of the undirected graph g with nodes in an +// optimal ordering for the coloring number. +func KCore(k int, g graph.Undirected) []graph.Node { + order, offsets := degeneracyOrdering(g) + + var offset int + for _, n := range offsets[:k] { + offset += n + } + core := make([]graph.Node, len(order)-offset) + copy(core, order[offset:]) + return core +} + +// degeneracyOrdering is the common code for DegeneracyOrdering and KCore. It +// returns l, the nodes of g in optimal ordering for coloring number and +// s, a set of relative offsets into l for each k-core, where k is an index +// into s. +func degeneracyOrdering(g graph.Undirected) (l []graph.Node, s []int) { + nodes := graph.NodesOf(g.Nodes()) + + // The algorithm used here is essentially as described at + // http://en.wikipedia.org/w/index.php?title=Degeneracy_%28graph_theory%29&oldid=640308710 + + // Initialize an output list L in return parameters. + + // Compute a number d_v for each vertex v in G, + // the number of neighbors of v that are not already in L. + // Initially, these numbers are just the degrees of the vertices. + dv := make(map[int64]int, len(nodes)) + var ( + maxDegree int + neighbours = make(map[int64][]graph.Node) + ) + for _, n := range nodes { + id := n.ID() + adj := graph.NodesOf(g.From(id)) + neighbours[id] = adj + dv[id] = len(adj) + if len(adj) > maxDegree { + maxDegree = len(adj) + } + } + + // Initialize an array D such that D[i] contains a list of the + // vertices v that are not already in L for which d_v = i. + d := make([][]graph.Node, maxDegree+1) + for _, n := range nodes { + deg := dv[n.ID()] + d[deg] = append(d[deg], n) + } + + // Initialize k to 0. + k := 0 + // Repeat n times: + s = []int{0} + for range nodes { + // Scan the array cells D[0], D[1], ... until + // finding an i for which D[i] is nonempty. + var ( + i int + di []graph.Node + ) + for i, di = range d { + if len(di) != 0 { + break + } + } + + // Set k to max(k,i). + if i > k { + k = i + s = append(s, make([]int, k-len(s)+1)...) + } + + // Select a vertex v from D[i]. Add v to the + // beginning of L and remove it from D[i]. + var v graph.Node + v, d[i] = di[len(di)-1], di[:len(di)-1] + l = append(l, v) + s[k]++ + delete(dv, v.ID()) + + // For each neighbor w of v not already in L, + // subtract one from d_w and move w to the + // cell of D corresponding to the new value of d_w. + for _, w := range neighbours[v.ID()] { + dw, ok := dv[w.ID()] + if !ok { + continue + } + for i, n := range d[dw] { + if n.ID() == w.ID() { + d[dw][i], d[dw] = d[dw][len(d[dw])-1], d[dw][:len(d[dw])-1] + dw-- + d[dw] = append(d[dw], w) + break + } + } + dv[w.ID()] = dw + } + } + + return l, s +} + +// BronKerbosch returns the set of maximal cliques of the undirected graph g. +func BronKerbosch(g graph.Undirected) [][]graph.Node { + nodes := graph.NodesOf(g.Nodes()) + + // The algorithm used here is essentially BronKerbosch3 as described at + // http://en.wikipedia.org/w/index.php?title=Bron%E2%80%93Kerbosch_algorithm&oldid=656805858 + + p := set.NewNodesSize(len(nodes)) + for _, n := range nodes { + p.Add(n) + } + x := set.NewNodes() + var bk bronKerbosch + order, _ := degeneracyOrdering(g) + ordered.Reverse(order) + for _, v := range order { + neighbours := graph.NodesOf(g.From(v.ID())) + nv := set.NewNodesSize(len(neighbours)) + for _, n := range neighbours { + nv.Add(n) + } + bk.maximalCliquePivot(g, []graph.Node{v}, set.IntersectionOfNodes(p, nv), set.IntersectionOfNodes(x, nv)) + p.Remove(v) + x.Add(v) + } + return bk +} + +type bronKerbosch [][]graph.Node + +func (bk *bronKerbosch) maximalCliquePivot(g graph.Undirected, r []graph.Node, p, x set.Nodes) { + if len(p) == 0 && len(x) == 0 { + *bk = append(*bk, r) + return + } + + neighbours := bk.choosePivotFrom(g, p, x) + nu := set.NewNodesSize(len(neighbours)) + for _, n := range neighbours { + nu.Add(n) + } + for _, v := range p { + if nu.Has(v) { + continue + } + vid := v.ID() + neighbours := graph.NodesOf(g.From(vid)) + nv := set.NewNodesSize(len(neighbours)) + for _, n := range neighbours { + nv.Add(n) + } + + var found bool + for _, n := range r { + if n.ID() == vid { + found = true + break + } + } + var sr []graph.Node + if !found { + sr = append(r[:len(r):len(r)], v) + } + + bk.maximalCliquePivot(g, sr, set.IntersectionOfNodes(p, nv), set.IntersectionOfNodes(x, nv)) + p.Remove(v) + x.Add(v) + } +} + +func (*bronKerbosch) choosePivotFrom(g graph.Undirected, p, x set.Nodes) (neighbors []graph.Node) { + // TODO(kortschak): Investigate the impact of pivot choice that maximises + // |p ⋂ neighbours(u)| as a function of input size. Until then, leave as + // compile time option. + if !tomitaTanakaTakahashi { + for _, n := range p { + return graph.NodesOf(g.From(n.ID())) + } + for _, n := range x { + return graph.NodesOf(g.From(n.ID())) + } + panic("bronKerbosch: empty set") + } + + var ( + max = -1 + pivot graph.Node + ) + maxNeighbors := func(s set.Nodes) { + outer: + for _, u := range s { + nb := graph.NodesOf(g.From(u.ID())) + c := len(nb) + if c <= max { + continue + } + for n := range nb { + if _, ok := p[int64(n)]; ok { + continue + } + c-- + if c <= max { + continue outer + } + } + max = c + pivot = u + neighbors = nb + } + } + maxNeighbors(p) + maxNeighbors(x) + if pivot == nil { + panic("bronKerbosch: empty set") + } + return neighbors +} diff --git a/vendor/gonum.org/v1/gonum/graph/topo/clique_graph.go b/vendor/gonum.org/v1/gonum/graph/topo/clique_graph.go new file mode 100644 index 0000000..28f1b96 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/graph/topo/clique_graph.go @@ -0,0 +1,111 @@ +// Copyright ©2017 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package topo + +import ( + "sort" + + "gonum.org/v1/gonum/graph" + "gonum.org/v1/gonum/graph/internal/ordered" + "gonum.org/v1/gonum/graph/internal/set" +) + +// Builder is a pure topological graph construction type. +type Builder interface { + AddNode(graph.Node) + SetEdge(graph.Edge) +} + +// CliqueGraph builds the clique graph of g in dst using Clique and CliqueGraphEdge +// nodes and edges. The nodes returned by calls to Nodes on the nodes and edges of +// the constructed graph are the cliques and the common nodes between cliques +// respectively. The dst graph is not cleared. +func CliqueGraph(dst Builder, g graph.Undirected) { + cliques := BronKerbosch(g) + + // Construct a consistent view of cliques in g. Sorting costs + // us a little, but not as much as the cliques themselves. + for _, c := range cliques { + sort.Sort(ordered.ByID(c)) + } + sort.Sort(ordered.BySliceIDs(cliques)) + + cliqueNodes := make(cliqueNodeSets, len(cliques)) + for id, c := range cliques { + s := set.NewNodesSize(len(c)) + for _, n := range c { + s.Add(n) + } + ns := &nodeSet{Clique: Clique{id: int64(id), nodes: c}, nodes: s} + dst.AddNode(ns.Clique) + for _, n := range c { + nid := n.ID() + cliqueNodes[nid] = append(cliqueNodes[nid], ns) + } + } + + for _, cliques := range cliqueNodes { + for i, uc := range cliques { + for _, vc := range cliques[i+1:] { + // Retain the nodes that contribute to the + // edge between the cliques. + var edgeNodes []graph.Node + switch 1 { + case len(uc.Clique.nodes): + edgeNodes = []graph.Node{uc.Clique.nodes[0]} + case len(vc.Clique.nodes): + edgeNodes = []graph.Node{vc.Clique.nodes[0]} + default: + for _, n := range set.IntersectionOfNodes(uc.nodes, vc.nodes) { + edgeNodes = append(edgeNodes, n) + } + sort.Sort(ordered.ByID(edgeNodes)) + } + + dst.SetEdge(CliqueGraphEdge{from: uc.Clique, to: vc.Clique, nodes: edgeNodes}) + } + } + } +} + +type cliqueNodeSets map[int64][]*nodeSet + +type nodeSet struct { + Clique + nodes set.Nodes +} + +// Clique is a node in a clique graph. +type Clique struct { + id int64 + nodes []graph.Node +} + +// ID returns the node ID. +func (n Clique) ID() int64 { return n.id } + +// Nodes returns the nodes in the clique. +func (n Clique) Nodes() []graph.Node { return n.nodes } + +// CliqueGraphEdge is an edge in a clique graph. +type CliqueGraphEdge struct { + from, to Clique + nodes []graph.Node +} + +// From returns the from node of the edge. +func (e CliqueGraphEdge) From() graph.Node { return e.from } + +// To returns the to node of the edge. +func (e CliqueGraphEdge) To() graph.Node { return e.to } + +// ReversedEdge returns a new CliqueGraphEdge with +// the edge end points swapped. The nodes of the +// new edge are shared with the receiver. +func (e CliqueGraphEdge) ReversedEdge() graph.Edge { e.from, e.to = e.to, e.from; return e } + +// Nodes returns the common nodes in the cliques of the underlying graph +// corresponding to the from and to nodes in the clique graph. +func (e CliqueGraphEdge) Nodes() []graph.Node { return e.nodes } diff --git a/vendor/gonum.org/v1/gonum/graph/topo/doc.go b/vendor/gonum.org/v1/gonum/graph/topo/doc.go new file mode 100644 index 0000000..cbcdff1 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/graph/topo/doc.go @@ -0,0 +1,6 @@ +// Copyright ©2017 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Package topo provides graph topology analysis functions. +package topo // import "gonum.org/v1/gonum/graph/topo" diff --git a/vendor/gonum.org/v1/gonum/graph/topo/johnson_cycles.go b/vendor/gonum.org/v1/gonum/graph/topo/johnson_cycles.go new file mode 100644 index 0000000..8a78ba2 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/graph/topo/johnson_cycles.go @@ -0,0 +1,285 @@ +// Copyright ©2015 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package topo + +import ( + "sort" + + "gonum.org/v1/gonum/graph" + "gonum.org/v1/gonum/graph/internal/ordered" + "gonum.org/v1/gonum/graph/internal/set" + "gonum.org/v1/gonum/graph/iterator" +) + +// johnson implements Johnson's "Finding all the elementary +// circuits of a directed graph" algorithm. SIAM J. Comput. 4(1):1975. +// +// Comments in the johnson methods are kept in sync with the comments +// and labels from the paper. +type johnson struct { + adjacent johnsonGraph // SCC adjacency list. + b []set.Ints // Johnson's "B-list". + blocked []bool + s int + + stack []graph.Node + + result [][]graph.Node +} + +// DirectedCyclesIn returns the set of elementary cycles in the graph g. +func DirectedCyclesIn(g graph.Directed) [][]graph.Node { + jg := johnsonGraphFrom(g) + j := johnson{ + adjacent: jg, + b: make([]set.Ints, len(jg.orig)), + blocked: make([]bool, len(jg.orig)), + } + + // len(j.nodes) is the order of g. + for j.s < len(j.adjacent.orig)-1 { + // We use the previous SCC adjacency to reduce the work needed. + sccs := TarjanSCC(j.adjacent.subgraph(j.s)) + // A_k = adjacency structure of strong component K with least + // vertex in subgraph of G induced by {s, s+1, ... ,n}. + j.adjacent = j.adjacent.sccSubGraph(sccs, 2) // Only allow SCCs with >= 2 vertices. + if j.adjacent.order() == 0 { + break + } + + // s = least vertex in V_k + if s := j.adjacent.leastVertexIndex(); s < j.s { + j.s = s + } + for i, v := range j.adjacent.orig { + if !j.adjacent.nodes.Has(v.ID()) { + continue + } + if len(j.adjacent.succ[v.ID()]) > 0 { + j.blocked[i] = false + j.b[i] = make(set.Ints) + } + } + //L3: + _ = j.circuit(j.s) + j.s++ + } + + return j.result +} + +// circuit is the CIRCUIT sub-procedure in the paper. +func (j *johnson) circuit(v int) bool { + f := false + n := j.adjacent.orig[v] + j.stack = append(j.stack, n) + j.blocked[v] = true + + //L1: + for w := range j.adjacent.succ[n.ID()] { + w := j.adjacent.indexOf(w) + if w == j.s { + // Output circuit composed of stack followed by s. + r := make([]graph.Node, len(j.stack)+1) + copy(r, j.stack) + r[len(r)-1] = j.adjacent.orig[j.s] + j.result = append(j.result, r) + f = true + } else if !j.blocked[w] { + if j.circuit(w) { + f = true + } + } + } + + //L2: + if f { + j.unblock(v) + } else { + for w := range j.adjacent.succ[n.ID()] { + j.b[j.adjacent.indexOf(w)].Add(v) + } + } + j.stack = j.stack[:len(j.stack)-1] + + return f +} + +// unblock is the UNBLOCK sub-procedure in the paper. +func (j *johnson) unblock(u int) { + j.blocked[u] = false + for w := range j.b[u] { + j.b[u].Remove(w) + if j.blocked[w] { + j.unblock(w) + } + } +} + +// johnsonGraph is an edge list representation of a graph with helpers +// necessary for Johnson's algorithm +type johnsonGraph struct { + // Keep the original graph nodes and a + // look-up to into the non-sparse + // collection of potentially sparse IDs. + orig []graph.Node + index map[int64]int + + nodes set.Int64s + succ map[int64]set.Int64s +} + +// johnsonGraphFrom returns a deep copy of the graph g. +func johnsonGraphFrom(g graph.Directed) johnsonGraph { + nodes := graph.NodesOf(g.Nodes()) + sort.Sort(ordered.ByID(nodes)) + c := johnsonGraph{ + orig: nodes, + index: make(map[int64]int, len(nodes)), + + nodes: make(set.Int64s, len(nodes)), + succ: make(map[int64]set.Int64s), + } + for i, u := range nodes { + uid := u.ID() + c.index[uid] = i + for _, v := range graph.NodesOf(g.From(uid)) { + if c.succ[uid] == nil { + c.succ[uid] = make(set.Int64s) + c.nodes.Add(uid) + } + c.nodes.Add(v.ID()) + c.succ[uid].Add(v.ID()) + } + } + return c +} + +// order returns the order of the graph. +func (g johnsonGraph) order() int { return g.nodes.Count() } + +// indexOf returns the index of the retained node for the given node ID. +func (g johnsonGraph) indexOf(id int64) int { + return g.index[id] +} + +// leastVertexIndex returns the index into orig of the least vertex. +func (g johnsonGraph) leastVertexIndex() int { + for _, v := range g.orig { + if g.nodes.Has(v.ID()) { + return g.indexOf(v.ID()) + } + } + panic("johnsonCycles: empty set") +} + +// subgraph returns a subgraph of g induced by {s, s+1, ... , n}. The +// subgraph is destructively generated in g. +func (g johnsonGraph) subgraph(s int) johnsonGraph { + sn := g.orig[s].ID() + for u, e := range g.succ { + if u < sn { + g.nodes.Remove(u) + delete(g.succ, u) + continue + } + for v := range e { + if v < sn { + g.succ[u].Remove(v) + } + } + } + return g +} + +// sccSubGraph returns the graph of the tarjan's strongly connected +// components with each SCC containing at least min vertices. +// sccSubGraph returns nil if there is no SCC with at least min +// members. +func (g johnsonGraph) sccSubGraph(sccs [][]graph.Node, min int) johnsonGraph { + if len(g.nodes) == 0 { + g.nodes = nil + g.succ = nil + return g + } + sub := johnsonGraph{ + orig: g.orig, + index: g.index, + nodes: make(set.Int64s), + succ: make(map[int64]set.Int64s), + } + + var n int + for _, scc := range sccs { + if len(scc) < min { + continue + } + n++ + for _, u := range scc { + for _, v := range scc { + if _, ok := g.succ[u.ID()][v.ID()]; ok { + if sub.succ[u.ID()] == nil { + sub.succ[u.ID()] = make(set.Int64s) + sub.nodes.Add(u.ID()) + } + sub.nodes.Add(v.ID()) + sub.succ[u.ID()].Add(v.ID()) + } + } + } + } + if n == 0 { + g.nodes = nil + g.succ = nil + return g + } + + return sub +} + +// Nodes is required to satisfy Tarjan. +func (g johnsonGraph) Nodes() graph.Nodes { + n := make([]graph.Node, 0, len(g.nodes)) + for id := range g.nodes { + n = append(n, johnsonGraphNode(id)) + } + return iterator.NewOrderedNodes(n) +} + +// Successors is required to satisfy Tarjan. +func (g johnsonGraph) From(id int64) graph.Nodes { + adj := g.succ[id] + if len(adj) == 0 { + return graph.Empty + } + succ := make([]graph.Node, 0, len(adj)) + for id := range adj { + succ = append(succ, johnsonGraphNode(id)) + } + return iterator.NewOrderedNodes(succ) +} + +func (johnsonGraph) Has(int64) bool { + panic("topo: unintended use of johnsonGraph") +} +func (johnsonGraph) Node(int64) graph.Node { + panic("topo: unintended use of johnsonGraph") +} +func (johnsonGraph) HasEdgeBetween(_, _ int64) bool { + panic("topo: unintended use of johnsonGraph") +} +func (johnsonGraph) Edge(_, _ int64) graph.Edge { + panic("topo: unintended use of johnsonGraph") +} +func (johnsonGraph) HasEdgeFromTo(_, _ int64) bool { + panic("topo: unintended use of johnsonGraph") +} +func (johnsonGraph) To(int64) graph.Nodes { + panic("topo: unintended use of johnsonGraph") +} + +type johnsonGraphNode int64 + +func (n johnsonGraphNode) ID() int64 { return int64(n) } diff --git a/vendor/gonum.org/v1/gonum/graph/topo/non_tomita_choice.go b/vendor/gonum.org/v1/gonum/graph/topo/non_tomita_choice.go new file mode 100644 index 0000000..36171d6 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/graph/topo/non_tomita_choice.go @@ -0,0 +1,9 @@ +// Copyright ©2015 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// +build !tomita + +package topo + +const tomitaTanakaTakahashi = false diff --git a/vendor/gonum.org/v1/gonum/graph/topo/paton_cycles.go b/vendor/gonum.org/v1/gonum/graph/topo/paton_cycles.go new file mode 100644 index 0000000..44b362a --- /dev/null +++ b/vendor/gonum.org/v1/gonum/graph/topo/paton_cycles.go @@ -0,0 +1,83 @@ +// Copyright ©2017 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package topo + +import ( + "gonum.org/v1/gonum/graph" + "gonum.org/v1/gonum/graph/internal/linear" + "gonum.org/v1/gonum/graph/internal/set" +) + +// UndirectedCyclesIn returns a set of cycles that forms a cycle basis in the graph g. +// Any cycle in g can be constructed as a symmetric difference of its elements. +func UndirectedCyclesIn(g graph.Undirected) [][]graph.Node { + // From "An algorithm for finding a fundamental set of cycles of a graph" + // https://doi.org/10.1145/363219.363232 + + var cycles [][]graph.Node + done := make(set.Int64s) + var tree linear.NodeStack + nodes := g.Nodes() + for nodes.Next() { + n := nodes.Node() + id := n.ID() + if done.Has(id) { + continue + } + done.Add(id) + + tree = tree[:0] + tree.Push(n) + from := sets{id: set.Int64s{}} + to := map[int64]graph.Node{id: n} + + for tree.Len() != 0 { + u := tree.Pop() + uid := u.ID() + adj := from[uid] + for _, v := range graph.NodesOf(g.From(uid)) { + vid := v.ID() + switch { + case uid == vid: + cycles = append(cycles, []graph.Node{u}) + case !from.has(vid): + done.Add(vid) + to[vid] = u + tree.Push(v) + from.add(uid, vid) + case !adj.Has(vid): + c := []graph.Node{v, u} + adj := from[vid] + p := to[uid] + for !adj.Has(p.ID()) { + c = append(c, p) + p = to[p.ID()] + } + c = append(c, p, c[0]) + cycles = append(cycles, c) + adj.Add(uid) + } + } + } + } + + return cycles +} + +type sets map[int64]set.Int64s + +func (s sets) add(uid, vid int64) { + e, ok := s[vid] + if !ok { + e = make(set.Int64s) + s[vid] = e + } + e.Add(uid) +} + +func (s sets) has(uid int64) bool { + _, ok := s[uid] + return ok +} diff --git a/vendor/gonum.org/v1/gonum/graph/topo/tarjan.go b/vendor/gonum.org/v1/gonum/graph/topo/tarjan.go new file mode 100644 index 0000000..6471292 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/graph/topo/tarjan.go @@ -0,0 +1,199 @@ +// Copyright ©2015 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package topo + +import ( + "fmt" + "sort" + + "gonum.org/v1/gonum/graph" + "gonum.org/v1/gonum/graph/internal/ordered" + "gonum.org/v1/gonum/graph/internal/set" +) + +// Unorderable is an error containing sets of unorderable graph.Nodes. +type Unorderable [][]graph.Node + +// Error satisfies the error interface. +func (e Unorderable) Error() string { + const maxNodes = 10 + var n int + for _, c := range e { + n += len(c) + } + if n > maxNodes { + // Don't return errors that are too long. + return fmt.Sprintf("topo: no topological ordering: %d nodes in %d cyclic components", n, len(e)) + } + return fmt.Sprintf("topo: no topological ordering: cyclic components: %v", [][]graph.Node(e)) +} + +func lexical(nodes []graph.Node) { sort.Sort(ordered.ByID(nodes)) } + +// Sort performs a topological sort of the directed graph g returning the 'from' to 'to' +// sort order. If a topological ordering is not possible, an Unorderable error is returned +// listing cyclic components in g with each cyclic component's members sorted by ID. When +// an Unorderable error is returned, each cyclic component's topological position within +// the sorted nodes is marked with a nil graph.Node. +func Sort(g graph.Directed) (sorted []graph.Node, err error) { + sccs := TarjanSCC(g) + return sortedFrom(sccs, lexical) +} + +// SortStabilized performs a topological sort of the directed graph g returning the 'from' +// to 'to' sort order, or the order defined by the in place order sort function where there +// is no unambiguous topological ordering. If a topological ordering is not possible, an +// Unorderable error is returned listing cyclic components in g with each cyclic component's +// members sorted by the provided order function. If order is nil, nodes are ordered lexically +// by node ID. When an Unorderable error is returned, each cyclic component's topological +// position within the sorted nodes is marked with a nil graph.Node. +func SortStabilized(g graph.Directed, order func([]graph.Node)) (sorted []graph.Node, err error) { + if order == nil { + order = lexical + } + sccs := tarjanSCCstabilized(g, order) + return sortedFrom(sccs, order) +} + +func sortedFrom(sccs [][]graph.Node, order func([]graph.Node)) ([]graph.Node, error) { + sorted := make([]graph.Node, 0, len(sccs)) + var sc Unorderable + for _, s := range sccs { + if len(s) != 1 { + order(s) + sc = append(sc, s) + sorted = append(sorted, nil) + continue + } + sorted = append(sorted, s[0]) + } + var err error + if sc != nil { + for i, j := 0, len(sc)-1; i < j; i, j = i+1, j-1 { + sc[i], sc[j] = sc[j], sc[i] + } + err = sc + } + ordered.Reverse(sorted) + return sorted, err +} + +// TarjanSCC returns the strongly connected components of the graph g using Tarjan's algorithm. +// +// A strongly connected component of a graph is a set of vertices where it's possible to reach any +// vertex in the set from any other (meaning there's a cycle between them.) +// +// Generally speaking, a directed graph where the number of strongly connected components is equal +// to the number of nodes is acyclic, unless you count reflexive edges as a cycle (which requires +// only a little extra testing.) +// +func TarjanSCC(g graph.Directed) [][]graph.Node { + return tarjanSCCstabilized(g, nil) +} + +func tarjanSCCstabilized(g graph.Directed, order func([]graph.Node)) [][]graph.Node { + nodes := graph.NodesOf(g.Nodes()) + var succ func(id int64) []graph.Node + if order == nil { + succ = func(id int64) []graph.Node { + return graph.NodesOf(g.From(id)) + } + } else { + order(nodes) + ordered.Reverse(nodes) + + succ = func(id int64) []graph.Node { + to := graph.NodesOf(g.From(id)) + order(to) + ordered.Reverse(to) + return to + } + } + + t := tarjan{ + succ: succ, + + indexTable: make(map[int64]int, len(nodes)), + lowLink: make(map[int64]int, len(nodes)), + onStack: make(set.Int64s), + } + for _, v := range nodes { + if t.indexTable[v.ID()] == 0 { + t.strongconnect(v) + } + } + return t.sccs +} + +// tarjan implements Tarjan's strongly connected component finding +// algorithm. The implementation is from the pseudocode at +// +// http://en.wikipedia.org/wiki/Tarjan%27s_strongly_connected_components_algorithm?oldid=642744644 +// +type tarjan struct { + succ func(id int64) []graph.Node + + index int + indexTable map[int64]int + lowLink map[int64]int + onStack set.Int64s + + stack []graph.Node + + sccs [][]graph.Node +} + +// strongconnect is the strongconnect function described in the +// wikipedia article. +func (t *tarjan) strongconnect(v graph.Node) { + vID := v.ID() + + // Set the depth index for v to the smallest unused index. + t.index++ + t.indexTable[vID] = t.index + t.lowLink[vID] = t.index + t.stack = append(t.stack, v) + t.onStack.Add(vID) + + // Consider successors of v. + for _, w := range t.succ(vID) { + wID := w.ID() + if t.indexTable[wID] == 0 { + // Successor w has not yet been visited; recur on it. + t.strongconnect(w) + t.lowLink[vID] = min(t.lowLink[vID], t.lowLink[wID]) + } else if t.onStack.Has(wID) { + // Successor w is in stack s and hence in the current SCC. + t.lowLink[vID] = min(t.lowLink[vID], t.indexTable[wID]) + } + } + + // If v is a root node, pop the stack and generate an SCC. + if t.lowLink[vID] == t.indexTable[vID] { + // Start a new strongly connected component. + var ( + scc []graph.Node + w graph.Node + ) + for { + w, t.stack = t.stack[len(t.stack)-1], t.stack[:len(t.stack)-1] + t.onStack.Remove(w.ID()) + // Add w to current strongly connected component. + scc = append(scc, w) + if w.ID() == vID { + break + } + } + // Output the current strongly connected component. + t.sccs = append(t.sccs, scc) + } +} + +func min(a, b int) int { + if a < b { + return a + } + return b +} diff --git a/vendor/gonum.org/v1/gonum/graph/topo/tomita_choice.go b/vendor/gonum.org/v1/gonum/graph/topo/tomita_choice.go new file mode 100644 index 0000000..f85a0d6 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/graph/topo/tomita_choice.go @@ -0,0 +1,9 @@ +// Copyright ©2015 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// +build tomita + +package topo + +const tomitaTanakaTakahashi = true diff --git a/vendor/gonum.org/v1/gonum/graph/topo/topo.go b/vendor/gonum.org/v1/gonum/graph/topo/topo.go new file mode 100644 index 0000000..bece61a --- /dev/null +++ b/vendor/gonum.org/v1/gonum/graph/topo/topo.go @@ -0,0 +1,68 @@ +// Copyright ©2014 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package topo + +import ( + "gonum.org/v1/gonum/graph" + "gonum.org/v1/gonum/graph/traverse" +) + +// IsPathIn returns whether path is a path in g. +// +// As special cases, IsPathIn returns true for a zero length path or for +// a path of length 1 when the node in path exists in the graph. +func IsPathIn(g graph.Graph, path []graph.Node) bool { + switch len(path) { + case 0: + return true + case 1: + return g.Node(path[0].ID()) != nil + default: + var canReach func(uid, vid int64) bool + switch g := g.(type) { + case graph.Directed: + canReach = g.HasEdgeFromTo + default: + canReach = g.HasEdgeBetween + } + + for i, u := range path[:len(path)-1] { + if !canReach(u.ID(), path[i+1].ID()) { + return false + } + } + return true + } +} + +// PathExistsIn returns whether there is a path in g starting at from extending +// to to. +// +// PathExistsIn exists as a helper function. If many tests for path existence +// are being performed, other approaches will be more efficient. +func PathExistsIn(g graph.Graph, from, to graph.Node) bool { + var t traverse.BreadthFirst + return t.Walk(g, from, func(n graph.Node, _ int) bool { return n.ID() == to.ID() }) != nil +} + +// ConnectedComponents returns the connected components of the undirected graph g. +func ConnectedComponents(g graph.Undirected) [][]graph.Node { + var ( + w traverse.DepthFirst + c []graph.Node + cc [][]graph.Node + ) + during := func(n graph.Node) { + c = append(c, n) + } + after := func() { + cc = append(cc, []graph.Node(nil)) + cc[len(cc)-1] = append(cc[len(cc)-1], c...) + c = c[:0] + } + w.WalkAll(g, nil, after, during) + + return cc +} diff --git a/vendor/gonum.org/v1/gonum/graph/traverse/doc.go b/vendor/gonum.org/v1/gonum/graph/traverse/doc.go new file mode 100644 index 0000000..dc98bbf --- /dev/null +++ b/vendor/gonum.org/v1/gonum/graph/traverse/doc.go @@ -0,0 +1,6 @@ +// Copyright ©2017 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Package traverse provides basic graph traversal primitives. +package traverse // import "gonum.org/v1/gonum/graph/traverse" diff --git a/vendor/gonum.org/v1/gonum/graph/traverse/traverse.go b/vendor/gonum.org/v1/gonum/graph/traverse/traverse.go new file mode 100644 index 0000000..125b161 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/graph/traverse/traverse.go @@ -0,0 +1,231 @@ +// Copyright ©2015 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package traverse + +import ( + "gonum.org/v1/gonum/graph" + "gonum.org/v1/gonum/graph/internal/linear" + "gonum.org/v1/gonum/graph/internal/set" +) + +var _ Graph = graph.Graph(nil) + +// Graph is the subset of graph.Graph necessary for graph traversal. +type Graph interface { + // From returns all nodes that can be reached directly + // from the node with the given ID. + From(id int64) graph.Nodes + + // Edge returns the edge from u to v, with IDs uid and vid, + // if such an edge exists and nil otherwise. The node v + // must be directly reachable from u as defined by + // the From method. + Edge(uid, vid int64) graph.Edge +} + +// BreadthFirst implements stateful breadth-first graph traversal. +type BreadthFirst struct { + // Visit is called on all nodes on their first visit. + Visit func(graph.Node) + + // Traverse is called on all edges that may be traversed + // during the walk. This includes edges that would hop to + // an already visited node. + // + // The value returned by Traverse determines whether + // an edge can be traversed during the walk. + Traverse func(graph.Edge) bool + + queue linear.NodeQueue + visited set.Int64s +} + +// Walk performs a breadth-first traversal of the graph g starting from the given node, +// depending on the Traverse field and the until parameter if they are non-nil. +// The traversal follows edges for which Traverse(edge) is true and returns the first node +// for which until(node, depth) is true. During the traversal, if the Visit field is +// non-nil, it is called with each node the first time it is visited. +func (b *BreadthFirst) Walk(g Graph, from graph.Node, until func(n graph.Node, d int) bool) graph.Node { + if b.visited == nil { + b.visited = make(set.Int64s) + } + b.queue.Enqueue(from) + if b.Visit != nil && !b.visited.Has(from.ID()) { + b.Visit(from) + } + b.visited.Add(from.ID()) + + var ( + depth int + children int + untilNext = 1 + ) + for b.queue.Len() > 0 { + t := b.queue.Dequeue() + if until != nil && until(t, depth) { + return t + } + tid := t.ID() + to := g.From(tid) + for to.Next() { + n := to.Node() + nid := n.ID() + if b.Traverse != nil && !b.Traverse(g.Edge(tid, nid)) { + continue + } + if b.visited.Has(nid) { + continue + } + if b.Visit != nil { + b.Visit(n) + } + b.visited.Add(nid) + children++ + b.queue.Enqueue(n) + } + if untilNext--; untilNext == 0 { + depth++ + untilNext = children + children = 0 + } + } + + return nil +} + +// WalkAll calls Walk for each unvisited node of the graph g using edges independent +// of their direction. The functions before and after are called prior to commencing +// and after completing each walk if they are non-nil respectively. The function +// during is called on each node as it is traversed. +func (b *BreadthFirst) WalkAll(g graph.Undirected, before, after func(), during func(graph.Node)) { + b.Reset() + nodes := g.Nodes() + for nodes.Next() { + from := nodes.Node() + if b.Visited(from) { + continue + } + if before != nil { + before() + } + b.Walk(g, from, func(n graph.Node, _ int) bool { + if during != nil { + during(n) + } + return false + }) + if after != nil { + after() + } + } +} + +// Visited returned whether the node n was visited during a traverse. +func (b *BreadthFirst) Visited(n graph.Node) bool { + return b.visited.Has(n.ID()) +} + +// Reset resets the state of the traverser for reuse. +func (b *BreadthFirst) Reset() { + b.queue.Reset() + b.visited = nil +} + +// DepthFirst implements stateful depth-first graph traversal. +type DepthFirst struct { + // Visit is called on all nodes on their first visit. + Visit func(graph.Node) + + // Traverse is called on all edges that may be traversed + // during the walk. This includes edges that would hop to + // an already visited node. + // + // The value returned by Traverse determines whether an + // edge can be traversed during the walk. + Traverse func(graph.Edge) bool + + stack linear.NodeStack + visited set.Int64s +} + +// Walk performs a depth-first traversal of the graph g starting from the given node, +// depending on the Traverse field and the until parameter if they are non-nil. +// The traversal follows edges for which Traverse(edge) is true and returns the first node +// for which until(node) is true. During the traversal, if the Visit field is non-nil, it +// is called with each node the first time it is visited. +func (d *DepthFirst) Walk(g Graph, from graph.Node, until func(graph.Node) bool) graph.Node { + if d.visited == nil { + d.visited = make(set.Int64s) + } + d.stack.Push(from) + if d.Visit != nil && !d.visited.Has(from.ID()) { + d.Visit(from) + } + d.visited.Add(from.ID()) + + for d.stack.Len() > 0 { + t := d.stack.Pop() + if until != nil && until(t) { + return t + } + tid := t.ID() + to := g.From(tid) + for to.Next() { + n := to.Node() + nid := n.ID() + if d.Traverse != nil && !d.Traverse(g.Edge(tid, nid)) { + continue + } + if d.visited.Has(nid) { + continue + } + if d.Visit != nil { + d.Visit(n) + } + d.visited.Add(nid) + d.stack.Push(n) + } + } + + return nil +} + +// WalkAll calls Walk for each unvisited node of the graph g using edges independent +// of their direction. The functions before and after are called prior to commencing +// and after completing each walk if they are non-nil respectively. The function +// during is called on each node as it is traversed. +func (d *DepthFirst) WalkAll(g graph.Undirected, before, after func(), during func(graph.Node)) { + d.Reset() + nodes := g.Nodes() + for nodes.Next() { + from := nodes.Node() + if d.Visited(from) { + continue + } + if before != nil { + before() + } + d.Walk(g, from, func(n graph.Node) bool { + if during != nil { + during(n) + } + return false + }) + if after != nil { + after() + } + } +} + +// Visited returned whether the node n was visited during a traverse. +func (d *DepthFirst) Visited(n graph.Node) bool { + return d.visited.Has(n.ID()) +} + +// Reset resets the state of the traverser for reuse. +func (d *DepthFirst) Reset() { + d.stack = d.stack[:0] + d.visited = nil +} diff --git a/vendor/gonum.org/v1/gonum/graph/undirect.go b/vendor/gonum.org/v1/gonum/graph/undirect.go new file mode 100644 index 0000000..07ce64a --- /dev/null +++ b/vendor/gonum.org/v1/gonum/graph/undirect.go @@ -0,0 +1,270 @@ +// Copyright ©2015 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package graph + +// Undirect converts a directed graph to an undirected graph. +type Undirect struct { + G Directed +} + +var _ Undirected = Undirect{} + +// Node returns the node with the given ID if it exists in the graph, +// and nil otherwise. +func (g Undirect) Node(id int64) Node { return g.G.Node(id) } + +// Nodes returns all the nodes in the graph. +func (g Undirect) Nodes() Nodes { return g.G.Nodes() } + +// From returns all nodes in g that can be reached directly from u. +func (g Undirect) From(uid int64) Nodes { + return newNodeFilterIterator(g.G.From(uid), g.G.To(uid)) +} + +// HasEdgeBetween returns whether an edge exists between nodes x and y. +func (g Undirect) HasEdgeBetween(xid, yid int64) bool { return g.G.HasEdgeBetween(xid, yid) } + +// Edge returns the edge from u to v if such an edge exists and nil otherwise. +// The node v must be directly reachable from u as defined by the From method. +// If an edge exists, the Edge returned is an EdgePair. The weight of +// the edge is determined by applying the Merge func to the weights of the +// edges between u and v. +func (g Undirect) Edge(uid, vid int64) Edge { return g.EdgeBetween(uid, vid) } + +// EdgeBetween returns the edge between nodes x and y. If an edge exists, the +// Edge returned is an EdgePair. The weight of the edge is determined by +// applying the Merge func to the weights of edges between x and y. +func (g Undirect) EdgeBetween(xid, yid int64) Edge { + fe := g.G.Edge(xid, yid) + re := g.G.Edge(yid, xid) + if fe == nil && re == nil { + return nil + } + + return EdgePair{fe, re} +} + +// UndirectWeighted converts a directed weighted graph to an undirected weighted graph, +// resolving edge weight conflicts. +type UndirectWeighted struct { + G WeightedDirected + + // Absent is the value used to + // represent absent edge weights + // passed to Merge if the reverse + // edge is present. + Absent float64 + + // Merge defines how discordant edge + // weights in G are resolved. A merge + // is performed if at least one edge + // exists between the nodes being + // considered. The edges corresponding + // to the two weights are also passed, + // in the same order. + // The order of weight parameters + // passed to Merge is not defined, so + // the function should be commutative. + // If Merge is nil, the arithmetic + // mean is used to merge weights. + Merge func(x, y float64, xe, ye Edge) float64 +} + +var ( + _ Undirected = UndirectWeighted{} + _ WeightedUndirected = UndirectWeighted{} +) + +// Node returns the node with the given ID if it exists in the graph, +// and nil otherwise. +func (g UndirectWeighted) Node(id int64) Node { return g.G.Node(id) } + +// Nodes returns all the nodes in the graph. +func (g UndirectWeighted) Nodes() Nodes { return g.G.Nodes() } + +// From returns all nodes in g that can be reached directly from u. +func (g UndirectWeighted) From(uid int64) Nodes { + return newNodeFilterIterator(g.G.From(uid), g.G.To(uid)) +} + +// HasEdgeBetween returns whether an edge exists between nodes x and y. +func (g UndirectWeighted) HasEdgeBetween(xid, yid int64) bool { return g.G.HasEdgeBetween(xid, yid) } + +// Edge returns the edge from u to v if such an edge exists and nil otherwise. +// The node v must be directly reachable from u as defined by the From method. +// If an edge exists, the Edge returned is an EdgePair. The weight of +// the edge is determined by applying the Merge func to the weights of the +// edges between u and v. +func (g UndirectWeighted) Edge(uid, vid int64) Edge { return g.WeightedEdgeBetween(uid, vid) } + +// WeightedEdge returns the weighted edge from u to v if such an edge exists and nil otherwise. +// The node v must be directly reachable from u as defined by the From method. +// If an edge exists, the Edge returned is an EdgePair. The weight of +// the edge is determined by applying the Merge func to the weights of the +// edges between u and v. +func (g UndirectWeighted) WeightedEdge(uid, vid int64) WeightedEdge { + return g.WeightedEdgeBetween(uid, vid) +} + +// EdgeBetween returns the edge between nodes x and y. If an edge exists, the +// Edge returned is an EdgePair. The weight of the edge is determined by +// applying the Merge func to the weights of edges between x and y. +func (g UndirectWeighted) EdgeBetween(xid, yid int64) Edge { + return g.WeightedEdgeBetween(xid, yid) +} + +// WeightedEdgeBetween returns the weighted edge between nodes x and y. If an edge exists, the +// Edge returned is an EdgePair. The weight of the edge is determined by +// applying the Merge func to the weights of edges between x and y. +func (g UndirectWeighted) WeightedEdgeBetween(xid, yid int64) WeightedEdge { + fe := g.G.Edge(xid, yid) + re := g.G.Edge(yid, xid) + if fe == nil && re == nil { + return nil + } + + f, ok := g.G.Weight(xid, yid) + if !ok { + f = g.Absent + } + r, ok := g.G.Weight(yid, xid) + if !ok { + r = g.Absent + } + + var w float64 + if g.Merge == nil { + w = (f + r) / 2 + } else { + w = g.Merge(f, r, fe, re) + } + return WeightedEdgePair{EdgePair: [2]Edge{fe, re}, W: w} +} + +// Weight returns the weight for the edge between x and y if Edge(x, y) returns a non-nil Edge. +// If x and y are the same node the internal node weight is returned. If there is no joining +// edge between the two nodes the weight value returned is zero. Weight returns true if an edge +// exists between x and y or if x and y have the same ID, false otherwise. +func (g UndirectWeighted) Weight(xid, yid int64) (w float64, ok bool) { + fe := g.G.Edge(xid, yid) + re := g.G.Edge(yid, xid) + + f, fOk := g.G.Weight(xid, yid) + if !fOk { + f = g.Absent + } + r, rOK := g.G.Weight(yid, xid) + if !rOK { + r = g.Absent + } + ok = fOk || rOK + + if g.Merge == nil { + return (f + r) / 2, ok + } + return g.Merge(f, r, fe, re), ok +} + +// EdgePair is an opposed pair of directed edges. +type EdgePair [2]Edge + +// From returns the from node of the first non-nil edge, or nil. +func (e EdgePair) From() Node { + if e[0] != nil { + return e[0].From() + } else if e[1] != nil { + return e[1].From() + } + return nil +} + +// To returns the to node of the first non-nil edge, or nil. +func (e EdgePair) To() Node { + if e[0] != nil { + return e[0].To() + } else if e[1] != nil { + return e[1].To() + } + return nil +} + +// ReversedEdge returns a new Edge with the end point of the +// edges in the pair swapped. +func (e EdgePair) ReversedEdge() Edge { + if e[0] != nil { + e[0] = e[0].ReversedEdge() + } + if e[1] != nil { + e[1] = e[1].ReversedEdge() + } + return e +} + +// WeightedEdgePair is an opposed pair of directed edges. +type WeightedEdgePair struct { + EdgePair + W float64 +} + +// ReversedEdge returns a new Edge with the end point of the +// edges in the pair swapped. +func (e WeightedEdgePair) ReversedEdge() Edge { + e.EdgePair = e.EdgePair.ReversedEdge().(EdgePair) + return e +} + +// Weight returns the merged edge weights of the two edges. +func (e WeightedEdgePair) Weight() float64 { return e.W } + +// nodeFilterIterator combines two Nodes to produce a single stream of +// unique nodes. +type nodeFilterIterator struct { + a, b Nodes + + // unique indicates the node in b with the key ID is unique. + unique map[int64]bool +} + +func newNodeFilterIterator(a, b Nodes) *nodeFilterIterator { + n := nodeFilterIterator{a: a, b: b, unique: make(map[int64]bool)} + for n.b.Next() { + n.unique[n.b.Node().ID()] = true + } + n.b.Reset() + for n.a.Next() { + n.unique[n.a.Node().ID()] = false + } + n.a.Reset() + return &n +} + +func (n *nodeFilterIterator) Len() int { + return len(n.unique) +} + +func (n *nodeFilterIterator) Next() bool { + n.Len() + if n.a.Next() { + return true + } + for n.b.Next() { + if n.unique[n.b.Node().ID()] { + return true + } + } + return false +} + +func (n *nodeFilterIterator) Node() Node { + if n.a.Len() != 0 { + return n.a.Node() + } + return n.b.Node() +} + +func (n *nodeFilterIterator) Reset() { + n.a.Reset() + n.b.Reset() +} diff --git a/vendor/gonum.org/v1/gonum/integrate/doc.go b/vendor/gonum.org/v1/gonum/integrate/doc.go new file mode 100644 index 0000000..68fb4ef --- /dev/null +++ b/vendor/gonum.org/v1/gonum/integrate/doc.go @@ -0,0 +1,7 @@ +// Copyright ©2016 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Package integrate provides functions to compute an integral given a +// specific list of evaluations. +package integrate // import "gonum.org/v1/gonum/integrate" diff --git a/vendor/gonum.org/v1/gonum/integrate/quad/doc.go b/vendor/gonum.org/v1/gonum/integrate/quad/doc.go new file mode 100644 index 0000000..9175706 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/integrate/quad/doc.go @@ -0,0 +1,7 @@ +// Copyright ©2017 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Package quad provides numerical evaluation of definite integrals of single-variable functions. +// +package quad // import "gonum.org/v1/gonum/integrate/quad" diff --git a/vendor/gonum.org/v1/gonum/integrate/quad/hermite.go b/vendor/gonum.org/v1/gonum/integrate/quad/hermite.go new file mode 100644 index 0000000..9fd9ead --- /dev/null +++ b/vendor/gonum.org/v1/gonum/integrate/quad/hermite.go @@ -0,0 +1,314 @@ +// Copyright ©2016 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package quad + +import ( + "math" + + "gonum.org/v1/gonum/floats" + "gonum.org/v1/gonum/mathext" +) + +// Hermite generates sample locations and weights for performing quadrature with +// a squared-exponential weight +// int_-inf^inf e^(-x^2) f(x) dx . +type Hermite struct{} + +func (h Hermite) FixedLocations(x, weight []float64, min, max float64) { + // TODO(btracey): Implement the case where x > 20, x < 200 so that we don't + // need to store all of that data. + + // Algorithm adapted from Chebfun http://www.chebfun.org/. + // + // References: + // Algorithm: + // G. H. Golub and J. A. Welsch, "Calculation of Gauss quadrature rules", + // Math. Comp. 23:221-230, 1969. + // A. Glaser, X. Liu and V. Rokhlin, "A fast algorithm for the + // calculation of the roots of special functions", SIAM Journal + // on Scientific Computing", 29(4):1420-1438:, 2007. + // A. Townsend, T. Trogdon, and S.Olver, Fast computation of Gauss quadrature + // nodes and weights on the whole real line, IMA J. Numer. Anal., 36: 337–358, + // 2016. http://arxiv.org/abs/1410.5286 + + if len(x) != len(weight) { + panic("hermite: slice length mismatch") + } + if min >= max { + panic("hermite: min >= max") + } + if !math.IsInf(min, -1) || !math.IsInf(max, 1) { + panic("hermite: non-infinite bound") + } + h.locations(x, weight) +} + +func (h Hermite) locations(x, weights []float64) { + n := len(x) + switch { + case 0 < n && n <= 200: + copy(x, xCacheHermite[n-1]) + copy(weights, wCacheHermite[n-1]) + case n > 200: + h.locationsAsy(x, weights) + } +} + +// Algorithm adapted from Chebfun http://www.chebfun.org/. Specific code +// https://github.com/chebfun/chebfun/blob/development/hermpts.m. + +// Original Copyright Notice: + +/* +Copyright (c) 2015, The Chancellor, Masters and Scholars of the University +of Oxford, and the Chebfun Developers. All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are met: + * Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in the + documentation and/or other materials provided with the distribution. + * Neither the name of the University of Oxford nor the names of its + contributors may be used to endorse or promote products derived from + this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND +ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR +ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES +(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; +LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND +ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS +SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +*/ + +// locationAsy returns the node locations and weights of a Hermite quadrature rule +// with len(x) points. +func (h Hermite) locationsAsy(x, w []float64) { + // A. Townsend, T. Trogdon, and S.Olver, Fast computation of Gauss quadrature + // nodes and weights the whole real line, IMA J. Numer. Anal., + // 36: 337–358, 2016. http://arxiv.org/abs/1410.5286 + + // Find the positive locations and weights. + n := len(x) + l := n / 2 + xa := x[l:] + wa := w[l:] + for i := range xa { + xa[i], wa[i] = h.locationsAsy0(i, n) + } + // Flip around zero -- copy the negative x locations with the corresponding + // weights. + if n%2 == 0 { + l-- + } + for i, v := range xa { + x[l-i] = -v + } + for i, v := range wa { + w[l-i] = v + } + sumW := floats.Sum(w) + c := math.SqrtPi / sumW + floats.Scale(c, w) +} + +// locationsAsy0 returns the location and weight for location i in an n-point +// quadrature rule. The rule is symmetric, so i should be <= n/2 + n%2. +func (h Hermite) locationsAsy0(i, n int) (x, w float64) { + const convTol = 1e-16 + const convIter = 20 + theta0 := h.hermiteInitialGuess(i, n) + t0 := theta0 / math.Sqrt(2*float64(n)+1) + theta0 = math.Acos(t0) + sqrt2np1 := math.Sqrt(2*float64(n) + 1) + var vali, dvali float64 + for k := 0; k < convIter; k++ { + vali, dvali = h.hermpolyAsyAiry(i, n, theta0) + dt := -vali / (math.Sqrt2 * sqrt2np1 * dvali * math.Sin(theta0)) + theta0 -= dt + if math.Abs(theta0) < convTol { + break + } + } + x = sqrt2np1 * math.Cos(theta0) + ders := x*vali + math.Sqrt2*dvali + w = math.Exp(-x*x) / (ders * ders) + return x, w +} + +// hermpolyAsyAiry evaluates the Hermite polynomials using the Airy asymptotic +// formula in theta-space. +func (h Hermite) hermpolyAsyAiry(i, n int, t float64) (valVec, dvalVec float64) { + musq := 2*float64(n) + 1 + cosT := math.Cos(t) + sinT := math.Sin(t) + sin2T := 2 * cosT * sinT + eta := 0.5*t - 0.25*sin2T + chi := -math.Pow(3*eta/2, 2.0/3) + phi := math.Pow(-chi/(sinT*sinT), 1.0/4) + cnst := 2 * math.SqrtPi * math.Pow(musq, 1.0/6) * phi + airy0 := real(mathext.AiryAi(complex(math.Pow(musq, 2.0/3)*chi, 0))) + airy1 := real(mathext.AiryAiDeriv(complex(math.Pow(musq, 2.0/3)*chi, 0))) + + // Terms in 12.10.43: + const ( + a1 = 15.0 / 144 + b1 = -7.0 / 5 * a1 + a2 = 5.0 * 7 * 9 * 11.0 / 2.0 / 144.0 / 144.0 + b2 = -13.0 / 11 * a2 + a3 = 7.0 * 9 * 11 * 13 * 15 * 17 / 6.0 / 144.0 / 144.0 / 144.0 + b3 = -19.0 / 17 * a3 + ) + + // Pre-compute terms. + cos2T := cosT * cosT + cos3T := cos2T * cosT + cos4T := cos3T * cosT + cos5T := cos4T * cosT + cos7T := cos5T * cos2T + cos9T := cos7T * cos2T + + chi2 := chi * chi + chi3 := chi2 * chi + chi4 := chi3 * chi + chi5 := chi4 * chi + + phi6 := math.Pow(phi, 6) + phi12 := phi6 * phi6 + phi18 := phi12 * phi6 + + // u polynomials in 12.10.9. + u1 := (cos3T - 6*cosT) / 24.0 + u2 := (-9*cos4T + 249*cos2T + 145) / 1152.0 + u3 := (-4042*cos9T + 18189*cos7T - 28287*cos5T - 151995*cos3T - 259290*cosT) / 414720.0 + + val := airy0 + B0 := -(phi6*u1 + a1) / chi2 + val += B0 * airy1 / math.Pow(musq, 4.0/3) + A1 := (phi12*u2 + b1*phi6*u1 + b2) / chi3 + val += A1 * airy0 / (musq * musq) + B1 := -(phi18*u3 + a1*phi12*u2 + a2*phi6*u1 + a3) / chi5 + val += B1 * airy1 / math.Pow(musq, 4.0/3+2) + val *= cnst + + // Derivative. + eta = 0.5*t - 0.25*sin2T + chi = -math.Pow(3*eta/2, 2.0/3) + phi = math.Pow(-chi/(sinT*sinT), 1.0/4) + cnst = math.Sqrt2 * math.SqrtPi * math.Pow(musq, 1.0/3) / phi + + // v polynomials in 12.10.10. + v1 := (cos3T + 6*cosT) / 24 + v2 := (15*cos4T - 327*cos2T - 143) / 1152 + v3 := (259290*cosT + 238425*cos3T - 36387*cos5T + 18189*cos7T - 4042*cos9T) / 414720 + + C0 := -(phi6*v1 + b1) / chi + dval := C0 * airy0 / math.Pow(musq, 2.0/3) + dval += airy1 + C1 := -(phi18*v3 + b1*phi12*v2 + b2*phi6*v1 + b3) / chi4 + dval += C1 * airy0 / math.Pow(musq, 2.0/3+2) + D1 := (phi12*v2 + a1*phi6*v1 + a2) / chi3 + dval += D1 * airy1 / (musq * musq) + dval *= cnst + return val, dval +} + +// hermiteInitialGuess returns the initial guess for node i in an n-point Hermite +// quadrature rule. The rule is symmetric, so i should be <= n/2 + n%2. +func (h Hermite) hermiteInitialGuess(i, n int) float64 { + // There are two different formulas for the initial guesses of the hermite + // quadrature locations. The first uses the Gatteschi formula and is good + // near x = sqrt(n+0.5) + // [1] L. Gatteschi, Asymptotics and bounds for the zeros of Laguerre + // polynomials: a survey, J. Comput. Appl. Math., 144 (2002), pp. 7-27. + // The second is the Tricomi initial guesses, good near x = 0. This is + // equation 2.1 in [1] and is originally from + // [2] F. G. Tricomi, Sugli zeri delle funzioni di cui si conosce una + // rappresentazione asintotica, Ann. Mat. Pura Appl. 26 (1947), pp. 283-300. + + // If the number of points is odd, there is a quadrature point at 1, which + // has an initial guess of 0. + if n%2 == 1 { + if i == 0 { + return 0 + } + i-- + } + + m := n / 2 + a := -0.5 + if n%2 == 1 { + a = 0.5 + } + nu := 4*float64(m) + 2*a + 2 + + // Find the split between Gatteschi guesses and Tricomi guesses. + p := 0.4985 + math.SmallestNonzeroFloat64 + pidx := int(math.Floor(p * float64(n))) + + // Use the Tricomi initial guesses in the first half where x is nearer to zero. + // Note: zeros of besselj(+/-.5,x) are integer and half-integer multiples of pi. + if i < pidx { + rhs := math.Pi * (4*float64(m) - 4*(float64(i)+1) + 3) / nu + tnk := math.Pi / 2 + for k := 0; k < 7; k++ { + val := tnk - math.Sin(tnk) - rhs + dval := 1 - math.Cos(tnk) + dTnk := val / dval + tnk -= dTnk + if math.Abs(dTnk) < 1e-14 { + break + } + } + vc := math.Cos(tnk / 2) + t := vc * vc + return math.Sqrt(nu*t - (5.0/(4.0*(1-t)*(1-t))-1.0/(1-t)-1+3*a*a)/3/nu) + } + + // Use Gatteschi guesses in the second half where x is nearer to sqrt(n+0.5) + i = i + 1 - m + var ar float64 + if i < len(airyRtsExact) { + ar = airyRtsExact[i] + } else { + t := 3.0 / 8 * math.Pi * (4*(float64(i)+1) - 1) + ar = math.Pow(t, 2.0/3) * (1 + + 5.0/48*math.Pow(t, -2) - + 5.0/36*math.Pow(t, -4) + + 77125.0/82944*math.Pow(t, -6) - + 108056875.0/6967296*math.Pow(t, -8) + + 162375596875.0/334430208*math.Pow(t, -10)) + } + r := nu + math.Pow(2, 2.0/3)*ar*math.Pow(nu, 1.0/3) + + 0.2*math.Pow(2, 4.0/3)*ar*ar*math.Pow(nu, -1.0/3) + + (11.0/35-a*a-12.0/175*ar*ar*ar)/nu + + (16.0/1575*ar+92.0/7875*math.Pow(ar, 4))*math.Pow(2, 2.0/3)*math.Pow(nu, -5.0/3) - + (15152.0/3031875*math.Pow(ar, 5)+1088.0/121275*ar*ar)*math.Pow(2, 1.0/3)*math.Pow(nu, -7.0/3) + if r < 0 { + ar = 0 + } else { + ar = math.Sqrt(r) + } + return ar +} + +// airyRtsExact are the first airy roots. +var airyRtsExact = []float64{ + -2.338107410459762, + -4.087949444130970, + -5.520559828095555, + -6.786708090071765, + -7.944133587120863, + -9.022650853340979, + -10.040174341558084, + -11.008524303733260, + -11.936015563236262, + -12.828776752865757, +} diff --git a/vendor/gonum.org/v1/gonum/integrate/quad/hermite_data.go b/vendor/gonum.org/v1/gonum/integrate/quad/hermite_data.go new file mode 100644 index 0000000..48f7362 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/integrate/quad/hermite_data.go @@ -0,0 +1,413 @@ +// Copyright ©2016 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package quad + +// xCache computed from Chebfun. +var xCacheHermite = [][]float64{ + {0.0000000000000000e+00}, + {-7.0710678118654757e-01, 7.0710678118654757e-01}, + {-1.2247448713915887e+00, 0.0000000000000000e+00, 1.2247448713915887e+00}, + {-1.6506801238857842e+00, -5.2464762327529035e-01, 5.2464762327529035e-01, 1.6506801238857842e+00}, + {-2.0201828704560856e+00, -9.5857246461381873e-01, 0.0000000000000000e+00, 9.5857246461381873e-01, 2.0201828704560856e+00}, + {-2.3506049736744914e+00, -1.3358490740136970e+00, -4.3607741192761657e-01, 4.3607741192761657e-01, 1.3358490740136970e+00, 2.3506049736744914e+00}, + {-2.6519613568352316e+00, -1.6735516287674714e+00, -8.1628788285896481e-01, 0.0000000000000000e+00, 8.1628788285896481e-01, 1.6735516287674714e+00, 2.6519613568352316e+00}, + {-2.9306374202572436e+00, -1.9816567566958443e+00, -1.1571937124467804e+00, -3.8118699020732216e-01, 3.8118699020732216e-01, 1.1571937124467804e+00, 1.9816567566958443e+00, 2.9306374202572436e+00}, + {-3.1909932017815295e+00, -2.2665805845318427e+00, -1.4685532892166682e+00, -7.2355101875283767e-01, 0.0000000000000000e+00, 7.2355101875283767e-01, 1.4685532892166682e+00, 2.2665805845318427e+00, 3.1909932017815295e+00}, + {-3.4361591188377378e+00, -2.5327316742327901e+00, -1.7566836492998816e+00, -1.0366108297895138e+00, -3.4290132722370481e-01, 3.4290132722370481e-01, 1.0366108297895138e+00, 1.7566836492998816e+00, 2.5327316742327901e+00, 3.4361591188377378e+00}, + {-3.6684708465595812e+00, -2.7832900997816514e+00, -2.0259480158257550e+00, -1.3265570844949326e+00, -6.5680956688209979e-01, 0.0000000000000000e+00, 6.5680956688209979e-01, 1.3265570844949326e+00, 2.0259480158257550e+00, 2.7832900997816514e+00, 3.6684708465595812e+00}, + {-3.8897248978697814e+00, -3.0206370251208896e+00, -2.2795070805010593e+00, -1.5976826351526043e+00, -9.4778839124016367e-01, -3.1424037625435908e-01, 3.1424037625435908e-01, 9.4778839124016367e-01, 1.5976826351526043e+00, 2.2795070805010593e+00, 3.0206370251208896e+00, 3.8897248978697814e+00}, + {-4.1013375961786389e+00, -3.2466089783724090e+00, -2.5197356856782389e+00, -1.8531076516015121e+00, -1.2200550365907483e+00, -6.0576387917106034e-01, 0.0000000000000000e+00, 6.0576387917106034e-01, 1.2200550365907483e+00, 1.8531076516015121e+00, 2.5197356856782389e+00, 3.2466089783724090e+00, 4.1013375961786389e+00}, + {-4.3044485704736335e+00, -3.4626569336022701e+00, -2.7484707249854017e+00, -2.0951832585077166e+00, -1.4766827311411406e+00, -8.7871378732939931e-01, -2.9174551067256199e-01, 2.9174551067256199e-01, 8.7871378732939931e-01, 1.4766827311411406e+00, 2.0951832585077166e+00, 2.7484707249854017e+00, 3.4626569336022701e+00, 4.3044485704736335e+00}, + {-4.4999907073093910e+00, -3.6699503734044541e+00, -2.9671669279056028e+00, -2.3257324861738580e+00, -1.7199925751864888e+00, -1.1361155852109204e+00, -5.6506958325557588e-01, 0.0000000000000000e+00, 5.6506958325557588e-01, 1.1361155852109204e+00, 1.7199925751864888e+00, 2.3257324861738580e+00, 2.9671669279056028e+00, 3.6699503734044541e+00, 4.4999907073093910e+00}, + {-4.6887389393058223e+00, -3.8694479048601238e+00, -3.1769991619799529e+00, -2.5462021578474796e+00, -1.9517879909162528e+00, -1.3802585391988815e+00, -8.2295144914465546e-01, -2.7348104613815244e-01, 2.7348104613815244e-01, 8.2295144914465546e-01, 1.3802585391988815e+00, 1.9517879909162528e+00, 2.5462021578474796e+00, 3.1769991619799529e+00, 3.8694479048601238e+00, 4.6887389393058223e+00}, + {-4.8713451936744061e+00, -4.0619466758754772e+00, -3.3789320911414946e+00, -2.7577629157038897e+00, -2.1735028266666214e+00, -1.6129243142212311e+00, -1.0676487257434508e+00, -5.3163300134265479e-01, 0.0000000000000000e+00, 5.3163300134265479e-01, 1.0676487257434508e+00, 1.6129243142212311e+00, 2.1735028266666214e+00, 2.7577629157038897e+00, 3.3789320911414946e+00, 4.0619466758754772e+00, 4.8713451936744061e+00}, + {-5.0483640088744668e+00, -4.2481178735681260e+00, -3.5737690684862655e+00, -2.9613775055316052e+00, -2.3862990891666858e+00, -1.8355316042616294e+00, -1.3009208583896172e+00, -7.7668291926741173e-01, -2.5826775051909689e-01, 2.5826775051909689e-01, 7.7668291926741173e-01, 1.3009208583896172e+00, 1.8355316042616294e+00, 2.3862990891666858e+00, 2.9613775055316052e+00, 3.5737690684862655e+00, 4.2481178735681260e+00, 5.0483640088744668e+00}, + {-5.2202716905374844e+00, -4.4285328066037799e+00, -3.7621873519640201e+00, -3.1578488183476034e+00, -2.5911337897945423e+00, -2.0492317098506181e+00, -1.5241706193935327e+00, -1.0103683871343114e+00, -5.0352016342388828e-01, 0.0000000000000000e+00, 5.0352016342388828e-01, 1.0103683871343114e+00, 1.5241706193935327e+00, 2.0492317098506181e+00, 2.5911337897945423e+00, 3.1578488183476034e+00, 3.7621873519640201e+00, 4.4285328066037799e+00, 5.2202716905374844e+00}, + {-5.3874808900112319e+00, -4.6036824495507451e+00, -3.9447640401156265e+00, -3.3478545673832172e+00, -2.7888060584281313e+00, -2.2549740020892761e+00, -1.7385377121165857e+00, -1.2340762153953231e+00, -7.3747372854539406e-01, -2.4534070830090132e-01, 2.4534070830090132e-01, 7.3747372854539406e-01, 1.2340762153953231e+00, 1.7385377121165857e+00, 2.2549740020892761e+00, 2.7888060584281313e+00, 3.3478545673832172e+00, 3.9447640401156265e+00, 4.6036824495507451e+00, 5.3874808900112319e+00}, + {-5.5503518732646775e+00, -4.7739923434112193e+00, -4.1219955474918395e+00, -3.5319728771376777e+00, -2.9799912077045980e+00, -2.4535521245128376e+00, -1.9449629491862539e+00, -1.4489342506507319e+00, -9.6149963441836894e-01, -4.7945070707910747e-01, -0.0000000000000000e+00, 4.7945070707910747e-01, 9.6149963441836894e-01, 1.4489342506507319e+00, 1.9449629491862539e+00, 2.4535521245128376e+00, 2.9799912077045980e+00, 3.5319728771376777e+00, 4.1219955474918395e+00, 4.7739923434112193e+00, 5.5503518732646775e+00}, + {-5.7092013532052626e+00, -4.9398341310601763e+00, -4.2943124805931614e+00, -3.7107015328778048e+00, -3.1652659092021374e+00, -2.6456374410581724e+00, -2.1442335927985341e+00, -1.6558743732864223e+00, -1.1767139584812445e+00, -7.0368609717000696e-01, -2.3417913993099065e-01, 2.3417913993099065e-01, 7.0368609717000696e-01, 1.1767139584812445e+00, 1.6558743732864223e+00, 2.1442335927985341e+00, 2.6456374410581724e+00, 3.1652659092021374e+00, 3.7107015328778048e+00, 4.2943124805931614e+00, 4.9398341310601763e+00, 5.7092013532052626e+00}, + {-5.8643094989845714e+00, -5.1015346104766763e+00, -4.4620911737400064e+00, -3.8844727081061015e+00, -3.3451271599412244e+00, -2.8318037871261570e+00, -2.3370162114744559e+00, -1.8556770376713707e+00, -1.3840395856824952e+00, -9.1915146544256365e-01, -4.5853835006810478e-01, -0.0000000000000000e+00, 4.5853835006810478e-01, 9.1915146544256365e-01, 1.3840395856824952e+00, 1.8556770376713707e+00, 2.3370162114744559e+00, 2.8318037871261570e+00, 3.3451271599412244e+00, 3.8844727081061015e+00, 4.4620911737400064e+00, 5.1015346104766763e+00, 5.8643094989845714e+00}, + {-6.0159255614257390e+00, -5.2593829276680442e+00, -4.6256627564237878e+00, -4.0536644024481490e+00, -3.5200068130345241e+00, -3.0125461375655647e+00, -2.5238810170114268e+00, -2.0490035736616985e+00, -1.5842500109616942e+00, -1.1267608176112451e+00, -6.7417110703721217e-01, -2.2441454747251557e-01, 2.2441454747251557e-01, 6.7417110703721217e-01, 1.1267608176112451e+00, 1.5842500109616942e+00, 2.0490035736616985e+00, 2.5238810170114268e+00, 3.0125461375655647e+00, 3.5200068130345241e+00, 4.0536644024481490e+00, 4.6256627564237878e+00, 5.2593829276680442e+00, 6.0159255614257390e+00}, + {-6.1642724340524513e+00, -5.4136363552800333e+00, -4.7853203673522238e+00, -4.2186094443865612e+00, -3.6902828769983551e+00, -3.1882949244251044e+00, -2.7053202371730256e+00, -2.2364201302672808e+00, -1.7780011243371474e+00, -1.3272807020730839e+00, -8.8198275621382138e-01, -4.4014729864530827e-01, -0.0000000000000000e+00, 4.4014729864530827e-01, 8.8198275621382138e-01, 1.3272807020730839e+00, 1.7780011243371474e+00, 2.2364201302672808e+00, 2.7053202371730256e+00, 3.1882949244251044e+00, 3.6902828769983551e+00, 4.2186094443865612e+00, 4.7853203673522238e+00, 5.4136363552800333e+00, 6.1642724340524513e+00}, + {-6.3095503856256920e+00, -5.5645249819501030e+00, -4.9413249572413793e+00, -4.3796026629833040e+00, -3.8562884199091489e+00, -3.3594271823508297e+00, -2.8817622195430865e+00, -2.4184157647737790e+00, -1.9658547856411364e+00, -1.5213615166519212e+00, -1.0827330110778832e+00, -6.4809521399344827e-01, -2.1577785624346338e-01, 2.1577785624346338e-01, 6.4809521399344827e-01, 1.0827330110778832e+00, 1.5213615166519212e+00, 1.9658547856411364e+00, 2.4184157647737790e+00, 2.8817622195430865e+00, 3.3594271823508297e+00, 3.8562884199091489e+00, 4.3796026629833040e+00, 4.9413249572413793e+00, 5.5645249819501030e+00, 6.3095503856256920e+00}, + {-6.4519401407534707e+00, -5.7122555528165355e+00, -5.0939100031131836e+00, -4.5369066633724415e+00, -4.0183186704087381e+00, -3.5262753401343527e+00, -3.0535824198222552e+00, -2.5954163389108178e+00, -2.1482966453616275e+00, -1.7095607392603374e+00, -1.2770668173398578e+00, -8.4901134206010298e-01, -4.2380790054385303e-01, -0.0000000000000000e+00, 4.2380790054385303e-01, 8.4901134206010298e-01, 1.2770668173398578e+00, 1.7095607392603374e+00, 2.1482966453616275e+00, 2.5954163389108178e+00, 3.0535824198222552e+00, 3.5262753401343527e+00, 4.0183186704087381e+00, 4.5369066633724415e+00, 5.0939100031131836e+00, 5.7122555528165355e+00, 6.4519401407534707e+00}, + {-6.5916054423677419e+00, -5.8570146413828503e+00, -5.2432853732029354e+00, -4.6907565239431177e+00, -4.1766367421292676e+00, -3.6891342384616794e+00, -3.2211120765614556e+00, -2.7677953529135935e+00, -2.3257498426564407e+00, -1.8923604968376853e+00, -1.4655372634574091e+00, -1.0435352737542081e+00, -6.2483671950520925e-01, -2.0806738269073688e-01, 2.0806738269073688e-01, 6.2483671950520925e-01, 1.0435352737542081e+00, 1.4655372634574091e+00, 1.8923604968376853e+00, 2.3257498426564407e+00, 2.7677953529135935e+00, 3.2211120765614556e+00, 3.6891342384616794e+00, 4.1766367421292676e+00, 4.6907565239431177e+00, 5.2432853732029354e+00, 5.8570146413828503e+00, 6.5916054423677419e+00}, + {-6.7286951986088495e+00, -5.9989712894638192e+00, -5.3896405219667507e+00, -4.8413636510591642e+00, -4.3314782938191501e+00, -3.8482667922136202e+00, -3.3846451410922138e+00, -2.9358825042901264e+00, -2.4985856910194042e+00, -2.0701810760534278e+00, -1.6486229138923163e+00, -1.2322157550847530e+00, -8.1949868127091163e-01, -4.0916463639492873e-01, -0.0000000000000000e+00, 4.0916463639492873e-01, 8.1949868127091163e-01, 1.2322157550847530e+00, 1.6486229138923163e+00, 2.0701810760534278e+00, 2.4985856910194042e+00, 2.9358825042901264e+00, 3.3846451410922138e+00, 3.8482667922136202e+00, 4.3314782938191501e+00, 4.8413636510591642e+00, 5.3896405219667507e+00, 5.9989712894638192e+00, 6.7286951986088495e+00}, + {-6.8633452935298900e+00, -6.1382792201239331e+00, -5.5331471515674950e+00, -4.9889189685899442e+00, -4.4830553570925176e+00, -4.0039086038612286e+00, -3.5444438731553496e+00, -3.0999705295864417e+00, -2.6671321245356170e+00, -2.2433914677615041e+00, -1.8267411436036880e+00, -1.4155278001981884e+00, -1.0083382710467235e+00, -6.0392105862555223e-01, -2.0112857654887151e-01, 2.0112857654887151e-01, 6.0392105862555223e-01, 1.0083382710467235e+00, 1.4155278001981884e+00, 1.8267411436036880e+00, 2.2433914677615041e+00, 2.6671321245356170e+00, 3.0999705295864417e+00, 3.5444438731553496e+00, 4.0039086038612286e+00, 4.4830553570925176e+00, 4.9889189685899442e+00, 5.5331471515674950e+00, 6.1382792201239331e+00, 6.8633452935298900e+00}, + {-6.9956801237185395e+00, -6.2750787049428602e+00, -5.6739614446185875e+00, -5.1335955771123798e+00, -4.6315595063128594e+00, -4.1562717558181452e+00, -3.7007434032314697e+00, -3.2603207323135406e+00, -2.8316804533902054e+00, -2.4123177054804201e+00, -2.0002585489356388e+00, -1.5938858604721398e+00, -1.1918269983500462e+00, -7.9287697691530890e-01, -3.9594273647142297e-01, -0.0000000000000000e+00, 3.9594273647142297e-01, 7.9287697691530890e-01, 1.1918269983500462e+00, 1.5938858604721398e+00, 2.0002585489356388e+00, 2.4123177054804201e+00, 2.8316804533902054e+00, 3.2603207323135406e+00, 3.7007434032314697e+00, 4.1562717558181452e+00, 4.6315595063128594e+00, 5.1335955771123798e+00, 5.6739614446185875e+00, 6.2750787049428602e+00, 6.9956801237185395e+00}, + {-7.1258139098307272e+00, -6.4094981492696599e+00, -5.8122259495159128e+00, -5.2755509865158796e+00, -4.7771645035025960e+00, -4.3055479533511987e+00, -3.8537554854714444e+00, -3.4171674928185705e+00, -2.9924908250023741e+00, -2.5772495377323170e+00, -2.1694991836061122e+00, -1.7676541094632017e+00, -1.3703764109528718e+00, -9.7650046358968279e-01, -5.8497876543593241e-01, -1.9484074156939934e-01, 1.9484074156939934e-01, 5.8497876543593241e-01, 9.7650046358968279e-01, 1.3703764109528718e+00, 1.7676541094632017e+00, 2.1694991836061122e+00, 2.5772495377323170e+00, 2.9924908250023741e+00, 3.4171674928185705e+00, 3.8537554854714444e+00, 4.3055479533511987e+00, 4.7771645035025960e+00, 5.2755509865158796e+00, 5.8122259495159128e+00, 6.4094981492696599e+00, 7.1258139098307272e+00}, + {-7.2538518220151991e+00, -6.5416554457380762e+00, -5.9480711820871450e+00, -5.4149290026141923e+00, -4.9200285205950083e+00, -4.4519111488328269e+00, -4.0036716099569309e+00, -3.5707219802327179e+00, -3.1497966817038248e+00, -2.7384458243513548e+00, -2.3347511515295150e+00, -1.9371545818222065e+00, -1.5443482612431219e+00, -1.1552002041267895e+00, -7.6870137975886854e-01, -3.8392601450840907e-01, -0.0000000000000000e+00, 3.8392601450840907e-01, 7.6870137975886854e-01, 1.1552002041267895e+00, 1.5443482612431219e+00, 1.9371545818222065e+00, 2.3347511515295150e+00, 2.7384458243513548e+00, 3.1497966817038248e+00, 3.5707219802327179e+00, 4.0036716099569309e+00, 4.4519111488328269e+00, 4.9200285205950083e+00, 5.4149290026141923e+00, 5.9480711820871450e+00, 6.5416554457380762e+00, 7.2538518220151991e+00}, + {-7.3798909504812453e+00, -6.6716591360701694e+00, -6.0816169939363158e+00, -5.5518613309887774e+00, -5.0602960186057615e+00, -4.5955197481081704e+00, -4.1506656029707809e+00, -3.7211752324761531e+00, -3.3038084315644154e+00, -2.8961389431744311e+00, -2.4962719408165470e+00, -2.1026736904673324e+00, -1.7140625533873379e+00, -1.3293355518847858e+00, -9.4751645803344731e-01, -5.6771726855487448e-01, -1.8910806052714246e-01, 1.8910806052714246e-01, 5.6771726855487448e-01, 9.4751645803344731e-01, 1.3293355518847858e+00, 1.7140625533873379e+00, 2.1026736904673324e+00, 2.4962719408165470e+00, 2.8961389431744311e+00, 3.3038084315644154e+00, 3.7211752324761531e+00, 4.1506656029707809e+00, 4.5955197481081704e+00, 5.0602960186057615e+00, 5.5518613309887774e+00, 6.0816169939363158e+00, 6.6716591360701694e+00, 7.3798909504812453e+00}, + {-7.5040211464489355e+00, -6.7996094132841307e+00, -6.2129737476337166e+00, -5.6864689480904413e+00, -5.1980993461977523e+00, -4.7365184774132105e+00, -4.2948958144927625e+00, -3.8687007309691541e+00, -3.4547164957519905e+00, -3.0505384204304464e+00, -2.6542927811971722e+00, -2.2644675010425686e+00, -1.8798039887309170e+00, -1.4992244886117301e+00, -1.1217809907203027e+00, -7.4661763987986696e-01, -3.7294171704961682e-01, -0.0000000000000000e+00, 3.7294171704961682e-01, 7.4661763987986696e-01, 1.1217809907203027e+00, 1.4992244886117301e+00, 1.8798039887309170e+00, 2.2644675010425686e+00, 2.6542927811971722e+00, 3.0505384204304464e+00, 3.4547164957519905e+00, 3.8687007309691541e+00, 4.2948958144927625e+00, 4.7365184774132105e+00, 5.1980993461977523e+00, 5.6864689480904413e+00, 6.2129737476337166e+00, 6.7996094132841307e+00, 7.5040211464489355e+00}, + {-7.6263257540038927e+00, -6.9255989902599406e+00, -6.3422433309944113e+00, -5.8188632795055772e+00, -5.3335601071130636e+00, -4.8750399724670830e+00, -4.4365069701928563e+00, -4.0134565677494702e+00, -3.6026938571484757e+00, -3.2018339457881582e+00, -2.8090222351311036e+00, -2.4227660420535617e+00, -2.0418271835544202e+00, -1.6651500018434142e+00, -1.2918109588209239e+00, -9.2098180157075304e-01, -5.5190143329042274e-01, -1.8385336710581282e-01, 1.8385336710581282e-01, 5.5190143329042274e-01, 9.2098180157075304e-01, 1.2918109588209239e+00, 1.6651500018434142e+00, 2.0418271835544202e+00, 2.4227660420535617e+00, 2.8090222351311036e+00, 3.2018339457881582e+00, 3.6026938571484757e+00, 4.0134565677494702e+00, 4.4365069701928563e+00, 4.8750399724670830e+00, 5.3335601071130636e+00, 5.8188632795055772e+00, 6.3422433309944113e+00, 6.9255989902599406e+00, 7.6263257540038927e+00}, + {-7.7468822496494552e+00, -7.0497138557782293e+00, -6.4695200365240302e+00, -5.9491472174619711e+00, -5.4667903359685601e+00, -5.0112061385730726e+00, -4.5756317486673579e+00, -4.1555872811264782e+00, -3.7478982064754800e+00, -3.3501978949725357e+00, -2.9606491813032889e+00, -2.5777768581132716e+00, -2.2003609340092520e+00, -1.8273652487636047e+00, -1.4578876468742090e+00, -1.0911237649759331e+00, -7.2633961660512014e-01, -3.6284990505065795e-01, -0.0000000000000000e+00, 3.6284990505065795e-01, 7.2633961660512014e-01, 1.0911237649759331e+00, 1.4578876468742090e+00, 1.8273652487636047e+00, 2.2003609340092520e+00, 2.5777768581132716e+00, 2.9606491813032889e+00, 3.3501978949725357e+00, 3.7478982064754800e+00, 4.1555872811264782e+00, 4.5756317486673579e+00, 5.0112061385730726e+00, 5.4667903359685601e+00, 5.9491472174619711e+00, 6.4695200365240302e+00, 7.0497138557782293e+00, 7.7468822496494552e+00}, + {-7.8657628033800400e+00, -7.1720339353200302e+00, -6.5948913272654934e+00, -6.0774160035375608e+00, -5.5978935141846780e+00, -5.1451293207408231e+00, -4.7123921320848874e+00, -4.2952254197496051e+00, -3.8904737609633409e+00, -3.4957874548356260e+00, -3.1093453117179419e+00, -2.7296879628883253e+00, -2.3556117330355080e+00, -1.9860977780390656e+00, -1.6202627556330140e+00, -1.2573231317007134e+00, -8.9656834619313597e-01, -5.3733981087098337e-01, -1.7901372329587750e-01, 1.7901372329587750e-01, 5.3733981087098337e-01, 8.9656834619313597e-01, 1.2573231317007134e+00, 1.6202627556330140e+00, 1.9860977780390656e+00, 2.3556117330355080e+00, 2.7296879628883253e+00, 3.1093453117179419e+00, 3.4957874548356260e+00, 3.8904737609633409e+00, 4.2952254197496051e+00, 4.7123921320848874e+00, 5.1451293207408231e+00, 5.5978935141846780e+00, 6.0774160035375608e+00, 6.5948913272654934e+00, 7.1720339353200302e+00, 7.8657628033800400e+00}, + {-7.9830347727197806e+00, -7.2926336708657198e+00, -6.7184385064440910e+00, -6.2037579977281085e+00, -5.7269654517821049e+00, -5.2769133152304262e+00, -4.8469005687435258e+00, -4.4324928825930368e+00, -4.0305528146024674e+00, -3.6387464248745358e+00, -3.2552672359922297e+00, -2.8786703113749548e+00, -2.5077666938913192e+00, -2.1415530119868800e+00, -1.7791625828543136e+00, -1.4198301576857355e+00, -1.0628655672811786e+00, -7.0763327334857218e-01, -3.5353584699632934e-01, -0.0000000000000000e+00, 3.5353584699632934e-01, 7.0763327334857218e-01, 1.0628655672811786e+00, 1.4198301576857355e+00, 1.7791625828543136e+00, 2.1415530119868800e+00, 2.5077666938913192e+00, 2.8786703113749548e+00, 3.2552672359922297e+00, 3.6387464248745358e+00, 4.0305528146024674e+00, 4.4324928825930368e+00, 4.8469005687435258e+00, 5.2769133152304262e+00, 5.7269654517821049e+00, 6.2037579977281085e+00, 6.7184385064440910e+00, 7.2926336708657198e+00, 7.9830347727197806e+00}, + {-8.0987611392508505e+00, -7.4115825314854691e+00, -6.8402373052493548e+00, -6.3282553512200810e+00, -5.8540950560303990e+00, -5.4066542479701267e+00, -4.9792609785452555e+00, -4.5675020728443938e+00, -4.1682570668324992e+00, -3.7792067534352229e+00, -3.3985582658596281e+00, -3.0248798839012840e+00, -2.6569959984428957e+00, -2.2939171418750832e+00, -1.9347914722822956e+00, -1.5788698949316138e+00, -1.2254801090462888e+00, -8.7400661235708799e-01, -5.2387471383227713e-01, -1.7453721459758240e-01, 1.7453721459758240e-01, 5.2387471383227713e-01, 8.7400661235708799e-01, 1.2254801090462888e+00, 1.5788698949316138e+00, 1.9347914722822956e+00, 2.2939171418750832e+00, 2.6569959984428957e+00, 3.0248798839012840e+00, 3.3985582658596281e+00, 3.7792067534352229e+00, 4.1682570668324992e+00, 4.5675020728443938e+00, 4.9792609785452555e+00, 5.4066542479701267e+00, 5.8540950560303990e+00, 6.3282553512200810e+00, 6.8402373052493548e+00, 7.4115825314854691e+00, 8.0987611392508505e+00}, + {-8.2130008955982792e+00, -7.5289454645396203e+00, -6.9603584006367480e+00, -6.4509845971747524e+00, -5.9793650041651327e+00, -5.5344413406134461e+00, -5.1095696265331334e+00, -4.7003568963041156e+00, -4.3036987671546498e+00, -3.9172898548377817e+00, -3.5393499373637116e+00, -3.1684594539419857e+00, -2.8034549614843187e+00, -2.4433595531234111e+00, -2.0873346819187244e+00, -1.7346456088220295e+00, -1.3846357891600325e+00, -1.0367072529242058e+00, -6.9030505233020811e-01, -3.4490446301543276e-01, -0.0000000000000000e+00, 3.4490446301543276e-01, 6.9030505233020811e-01, 1.0367072529242058e+00, 1.3846357891600325e+00, 1.7346456088220295e+00, 2.0873346819187244e+00, 2.4433595531234111e+00, 2.8034549614843187e+00, 3.1684594539419857e+00, 3.5393499373637116e+00, 3.9172898548377817e+00, 4.3036987671546498e+00, 4.7003568963041156e+00, 5.1095696265331334e+00, 5.5344413406134461e+00, 5.9793650041651327e+00, 6.4509845971747524e+00, 6.9603584006367480e+00, 7.5289454645396203e+00, 8.2130008955982792e+00}, + {-8.3258093895669294e+00, -7.6447832957047419e+00, -7.0788678730491084e+00, -6.5720171713874747e+00, -6.1028523343815264e+00, -5.6603575812830567e+00, -5.2379158850176495e+00, -4.8311536291282762e+00, -4.4369817058810304e+00, -4.0531077444247670e+00, -3.6777633163885568e+00, -3.3095400965109221e+00, -2.9472857823054786e+00, -2.5900348706171261e+00, -2.2369607870543176e+00, -1.8873416205434848e+00, -1.5405348009155460e+00, -1.1959577943778097e+00, -8.5307290916055367e-01, -5.1137491831546933e-01, -1.7038058556181696e-01, 1.7038058556181696e-01, 5.1137491831546933e-01, 8.5307290916055367e-01, 1.1959577943778097e+00, 1.5405348009155460e+00, 1.8873416205434848e+00, 2.2369607870543176e+00, 2.5900348706171261e+00, 2.9472857823054786e+00, 3.3095400965109221e+00, 3.6777633163885568e+00, 4.0531077444247670e+00, 4.4369817058810304e+00, 4.8311536291282762e+00, 5.2379158850176495e+00, 5.6603575812830567e+00, 6.1028523343815264e+00, 6.5720171713874747e+00, 7.0788678730491084e+00, 7.6447832957047419e+00, 8.3258093895669294e+00}, + {-8.4372386310833765e+00, -7.7591530847325343e+00, -7.1958276123464335e+00, -6.6914198727121068e+00, -6.2246289668942199e+00, -5.7844803140775571e+00, -5.3643829011515560e+00, -4.9599816751949550e+00, -4.5682020755441552e+00, -4.1867640213660025e+00, -3.8139101240654272e+00, -3.4482424822200026e+00, -3.0886190396004185e+00, -2.7340846945379047e+00, -2.3838237072843595e+00, -2.0371256888641933e+00, -1.6933605309399098e+00, -1.3519593686708704e+00, -1.0123996845633196e+00, -6.7419327674231389e-01, -3.3687619662553298e-01, -0.0000000000000000e+00, 3.3687619662553298e-01, 6.7419327674231389e-01, 1.0123996845633196e+00, 1.3519593686708704e+00, 1.6933605309399098e+00, 2.0371256888641933e+00, 2.3838237072843595e+00, 2.7340846945379047e+00, 3.0886190396004185e+00, 3.4482424822200026e+00, 3.8139101240654272e+00, 4.1867640213660025e+00, 4.5682020755441552e+00, 4.9599816751949550e+00, 5.3643829011515560e+00, 5.7844803140775571e+00, 6.2246289668942199e+00, 6.6914198727121068e+00, 7.1958276123464335e+00, 7.7591530847325343e+00, 8.4372386310833765e+00}, + {-8.5473375667355391e+00, -7.8721084427748513e+00, -7.3112956789163306e+00, -6.8092552715358323e+00, -6.3447621648968919e+00, -5.9068817590275451e+00, -5.4890481838807421e+00, -5.0869242297582531e+00, -4.6974492230014393e+00, -4.3183547234421118e+00, -3.9478937115246548e+00, -3.5846779931052279e+00, -3.2275749742774260e+00, -2.8756390821704421e+00, -2.5280644271476649e+00, -2.1841510195665261e+00, -1.8432799206176103e+00, -1.5048944344828714e+00, -1.1684854631902055e+00, -8.3357976100207332e-01, -4.9973020830067427e-01, -1.6650746707363237e-01, 1.6650746707363237e-01, 4.9973020830067427e-01, 8.3357976100207332e-01, 1.1684854631902055e+00, 1.5048944344828714e+00, 1.8432799206176103e+00, 2.1841510195665261e+00, 2.5280644271476649e+00, 2.8756390821704421e+00, 3.2275749742774260e+00, 3.5846779931052279e+00, 3.9478937115246548e+00, 4.3183547234421118e+00, 4.6974492230014393e+00, 5.0869242297582531e+00, 5.4890481838807421e+00, 5.9068817590275451e+00, 6.3447621648968919e+00, 6.8092552715358323e+00, 7.3112956789163306e+00, 7.8721084427748513e+00, 8.5473375667355391e+00}, + {-8.6561523259903268e+00, -7.9836998162220016e+00, -7.4253266258561936e+00, -6.9255820732775950e+00, -6.4633149436642530e+00, -6.0276294722536941e+00, -5.6119841218396642e+00, -5.2120588631620075e+00, -4.8248063087032573e+00, -4.4479690736019313e+00, -4.0798099079253927e+00, -3.7189496893415628e+00, -3.3642645958565400e+00, -3.0148178197492252e+00, -2.6698124652513462e+00, -2.3285579714531894e+00, -1.9904454586466382e+00, -1.6549291191547923e+00, -1.3215117856392311e+00, -9.8973342486571125e-01, -6.5916168887413507e-01, -3.2938389969666915e-01, -0.0000000000000000e+00, 3.2938389969666915e-01, 6.5916168887413507e-01, 9.8973342486571125e-01, 1.3215117856392311e+00, 1.6549291191547923e+00, 1.9904454586466382e+00, 2.3285579714531894e+00, 2.6698124652513462e+00, 3.0148178197492252e+00, 3.3642645958565400e+00, 3.7189496893415628e+00, 4.0798099079253927e+00, 4.4479690736019313e+00, 4.8248063087032573e+00, 5.2120588631620075e+00, 5.6119841218396642e+00, 6.0276294722536941e+00, 6.4633149436642530e+00, 6.9255820732775950e+00, 7.4253266258561936e+00, 7.9836998162220016e+00, 8.6561523259903268e+00}, + {-8.7637264425763934e+00, -8.0939747412671181e+00, -7.5379717872258256e+00, -7.0404554428052171e+00, -6.5803464347374083e+00, -6.1467867545054018e+00, -5.7332584418499435e+00, -5.3354580360572301e+00, -4.9503508867398853e+00, -4.5756901345704328e+00, -4.2097477613269749e+00, -3.8511531505025514e+00, -3.4987906412463645e+00, -3.1517315196384712e+00, -2.8091871354772668e+00, -2.4704755132281426e+00, -2.1349968729707256e+00, -1.8022151950574945e+00, -1.4716439717320722e+00, -1.1428349031264304e+00, -8.1536867934468149e-01, -4.8884723585117468e-01, -1.6288702795526222e-01, 1.6288702795526222e-01, 4.8884723585117468e-01, 8.1536867934468149e-01, 1.1428349031264304e+00, 1.4716439717320722e+00, 1.8022151950574945e+00, 2.1349968729707256e+00, 2.4704755132281426e+00, 2.8091871354772668e+00, 3.1517315196384712e+00, 3.4987906412463645e+00, 3.8511531505025514e+00, 4.2097477613269749e+00, 4.5756901345704328e+00, 4.9503508867398853e+00, 5.3354580360572301e+00, 5.7332584418499435e+00, 6.1467867545054018e+00, 6.5803464347374083e+00, 7.0404554428052171e+00, 7.5379717872258256e+00, 8.0939747412671181e+00, 8.7637264425763934e+00}, + {-8.8701010540232534e+00, -8.2029780727977943e+00, -7.6492795366289874e+00, -7.1539272947757162e+00, -6.6959122110632512e+00, -6.2644130149704740e+00, -5.8529346161254212e+00, -5.4571895556816514e+00, -5.0741554169850653e+00, -4.7015953849679351e+00, -4.3377901884924199e+00, -3.9813772107064693e+00, -3.6312484092926161e+00, -3.2864825713246675e+00, -2.9462986351921705e+00, -2.6100224744061520e+00, -2.2770625758595000e+00, -1.9468917579991860e+00, -1.6190330813590508e+00, -1.2930487170109526e+00, -9.6853092273781483e-01, -6.4509452302768844e-01, -3.2237044915615060e-01, -0.0000000000000000e+00, 3.2237044915615060e-01, 6.4509452302768844e-01, 9.6853092273781483e-01, 1.2930487170109526e+00, 1.6190330813590508e+00, 1.9468917579991860e+00, 2.2770625758595000e+00, 2.6100224744061520e+00, 2.9462986351921705e+00, 3.2864825713246675e+00, 3.6312484092926161e+00, 3.9813772107064693e+00, 4.3377901884924199e+00, 4.7015953849679351e+00, 5.0741554169850653e+00, 5.4571895556816514e+00, 5.8529346161254212e+00, 6.2644130149704740e+00, 6.6959122110632512e+00, 7.1539272947757162e+00, 7.6492795366289874e+00, 8.2029780727977943e+00, 8.8701010540232534e+00}, + {-8.9753150819316865e+00, -8.3107521907047843e+00, -7.7592955197657743e+00, -7.2660465541643502e+00, -6.8100645780741411e+00, -6.3805640961864096e+00, -5.9710722250135460e+00, -5.5773169812237278e+00, -5.1962877187923642e+00, -4.8257572281332095e+00, -4.4640145469344583e+00, -4.1097046035605898e+00, -3.7617264902283578e+00, -3.4191659693638847e+00, -3.0812489886451053e+00, -2.7473086248223830e+00, -2.4167609048732164e+00, -2.0890866609442762e+00, -1.7638175798953000e+00, -1.4405252201375651e+00, -1.1188121524021566e+00, -7.9830462777856215e-01, -4.7864633759449599e-01, -1.5949293584886245e-01, 1.5949293584886245e-01, 4.7864633759449599e-01, 7.9830462777856215e-01, 1.1188121524021566e+00, 1.4405252201375651e+00, 1.7638175798953000e+00, 2.0890866609442762e+00, 2.4167609048732164e+00, 2.7473086248223830e+00, 3.0812489886451053e+00, 3.4191659693638847e+00, 3.7617264902283578e+00, 4.1097046035605898e+00, 4.4640145469344583e+00, 4.8257572281332095e+00, 5.1962877187923642e+00, 5.5773169812237278e+00, 5.9710722250135460e+00, 6.3805640961864096e+00, 6.8100645780741411e+00, 7.2660465541643502e+00, 7.7592955197657743e+00, 8.3107521907047843e+00, 8.9753150819316865e+00}, + {-9.0794053951994336e+00, -8.4173371862679787e+00, -7.8680628640816641e+00, -7.3768593906319282e+00, -6.9228528349597616e+00, -6.4952925650076585e+00, -6.0877272810547556e+00, -5.6958999850352088e+00, -5.3168113741835157e+00, -4.9482434430048912e+00, -4.5884931402744824e+00, -4.2362125300687872e+00, -3.8903074057661615e+00, -3.5498700372120946e+00, -3.2141328684568560e+00, -2.8824356106455995e+00, -2.5542011939519624e+00, -2.2289177453859468e+00, -1.9061247609883607e+00, -1.5854022514550468e+00, -1.2663620236873581e+00, -9.4864050761025742e-01, -6.3189269951096161e-01, -3.1578690052375491e-01, -0.0000000000000000e+00, 3.1578690052375491e-01, 6.3189269951096161e-01, 9.4864050761025742e-01, 1.2663620236873581e+00, 1.5854022514550468e+00, 1.9061247609883607e+00, 2.2289177453859468e+00, 2.5542011939519624e+00, 2.8824356106455995e+00, 3.2141328684568560e+00, 3.5498700372120946e+00, 3.8903074057661615e+00, 4.2362125300687872e+00, 4.5884931402744824e+00, 4.9482434430048912e+00, 5.3168113741835157e+00, 5.6958999850352088e+00, 6.0877272810547556e+00, 6.4952925650076585e+00, 6.9228528349597616e+00, 7.3768593906319282e+00, 7.8680628640816641e+00, 8.4173371862679787e+00, 9.0794053951994336e+00}, + {-9.1824069581293166e+00, -8.5227710309178022e+00, -7.9756223682056362e+00, -7.4864094298641941e+00, -7.0343235097706103e+00, -6.6086479738553576e+00, -6.2029525192746719e+00, -5.8129946754204047e+00, -5.4357860872249475e+00, -5.0691175849172350e+00, -4.7112936661690421e+00, -4.3609731604545781e+00, -4.0170681728581341e+00, -3.6786770625152689e+00, -3.3450383139378910e+00, -3.0154977695745222e+00, -2.6894847022677451e+00, -2.3664939042986637e+00, -2.0460719686864088e+00, -1.7278065475158983e+00, -1.4113177548982998e+00, -1.0962511289576817e+00, -7.8227172955460689e-01, -4.6905905667823600e-01, -1.5630254688946871e-01, 1.5630254688946871e-01, 4.6905905667823600e-01, 7.8227172955460689e-01, 1.0962511289576817e+00, 1.4113177548982998e+00, 1.7278065475158983e+00, 2.0460719686864088e+00, 2.3664939042986637e+00, 2.6894847022677451e+00, 3.0154977695745222e+00, 3.3450383139378910e+00, 3.6786770625152689e+00, 4.0170681728581341e+00, 4.3609731604545781e+00, 4.7112936661690421e+00, 5.0691175849172350e+00, 5.4357860872249475e+00, 5.8129946754204047e+00, 6.2029525192746719e+00, 6.6086479738553576e+00, 7.0343235097706103e+00, 7.4864094298641941e+00, 7.9756223682056362e+00, 8.5227710309178022e+00, 9.1824069581293166e+00}, + {-9.2843529650947954e+00, -8.6270897293636839e+00, -8.0820126735040123e+00, -7.5947379445835947e+00, -7.1445205714796174e+00, -6.7206770958687514e+00, -6.3167976579034049e+00, -5.9286538858731372e+00, -5.5532680052649175e+00, -5.1884393429381372e+00, -4.8324796145780748e+00, -4.4840540790573504e+00, -4.1420808019620621e+00, -3.8056638567519334e+00, -3.4740473616558587e+00, -3.1465828437716956e+00, -2.8227054233054294e+00, -2.5019160040577368e+00, -2.1837676525361926e+00, -1.8678549556995603e+00, -1.5538055294554218e+00, -1.2412730966236114e+00, -9.2993171560017596e-01, -6.1947084974715272e-01, -3.0959104093724038e-01, -0.0000000000000000e+00, 3.0959104093724038e-01, 6.1947084974715272e-01, 9.2993171560017596e-01, 1.2412730966236114e+00, 1.5538055294554218e+00, 1.8678549556995603e+00, 2.1837676525361926e+00, 2.5019160040577368e+00, 2.8227054233054294e+00, 3.1465828437716956e+00, 3.4740473616558587e+00, 3.8056638567519334e+00, 4.1420808019620621e+00, 4.4840540790573504e+00, 4.8324796145780748e+00, 5.1884393429381372e+00, 5.5532680052649175e+00, 5.9286538858731372e+00, 6.3167976579034049e+00, 6.7206770958687514e+00, 7.1445205714796174e+00, 7.5947379445835947e+00, 8.0820126735040123e+00, 8.6270897293636839e+00, 9.2843529650947954e+00}, + {-9.3852749632231376e+00, -8.7303274588209110e+00, -8.1872704197671577e+00, -7.7018840275679166e+00, -7.2534856216969716e+00, -6.8314241370686029e+00, -6.4293096331149808e+00, -6.0429274349196920e+00, -5.6693100068563673e+00, -5.3062648593656414e+00, -4.9521106229353720e+00, -4.6055186799962122e+00, -4.2654127388779193e+00, -3.9309022499573141e+00, -3.6012366010181420e+00, -3.2757726058076915e+00, -2.9539507925538842e+00, -2.6352776875892849e+00, -2.3193122827994568e+00, -2.0056554817328660e+00, -1.6939417005414366e+00, -1.3838320462108629e+00, -1.0750086571198576e+00, -7.6716990012409136e-01, -4.6002619243746073e-01, -1.5329626692045756e-01, 1.5329626692045756e-01, 4.6002619243746073e-01, 7.6716990012409136e-01, 1.0750086571198576e+00, 1.3838320462108629e+00, 1.6939417005414366e+00, 2.0056554817328660e+00, 2.3193122827994568e+00, 2.6352776875892849e+00, 2.9539507925538842e+00, 3.2757726058076915e+00, 3.6012366010181420e+00, 3.9309022499573141e+00, 4.2654127388779193e+00, 4.6055186799962122e+00, 4.9521106229353720e+00, 5.3062648593656414e+00, 5.6693100068563673e+00, 6.0429274349196920e+00, 6.4293096331149808e+00, 6.8314241370686029e+00, 7.2534856216969716e+00, 7.7018840275679166e+00, 8.1872704197671577e+00, 8.7303274588209110e+00, 9.3852749632231376e+00}, + {-9.4852029643727302e+00, -8.8325166958454133e+00, -8.2914303867830093e+00, -7.8078847487003955e+00, -7.3612580683690458e+00, -6.9409309282147520e+00, -6.5405328108757619e+00, -6.1558623601311924e+00, -5.7839619604859447e+00, -5.4226470161614522e+00, -5.0702427937817154e+00, -4.7254265200961152e+00, -4.3871272577720717e+00, -4.0544595295034727e+00, -3.7266776654942122e+00, -3.4031434102599176e+00, -3.0833023089391358e+00, -2.7666660769675384e+00, -2.4527991478588032e+00, -2.1413081984302371e+00, -1.8318338313140423e+00, -1.5240438405087577e+00, -1.2176276482547366e+00, -9.1229161088998856e-01, -6.0775496588729694e-01, -3.0374624334991529e-01, -0.0000000000000000e+00, 3.0374624334991529e-01, 6.0775496588729694e-01, 9.1229161088998856e-01, 1.2176276482547366e+00, 1.5240438405087577e+00, 1.8318338313140423e+00, 2.1413081984302371e+00, 2.4527991478588032e+00, 2.7666660769675384e+00, 3.0833023089391358e+00, 3.4031434102599176e+00, 3.7266776654942122e+00, 4.0544595295034727e+00, 4.3871272577720717e+00, 4.7254265200961152e+00, 5.0702427937817154e+00, 5.4226470161614522e+00, 5.7839619604859447e+00, 6.1558623601311924e+00, 6.5405328108757619e+00, 6.9409309282147520e+00, 7.3612580683690458e+00, 7.8078847487003955e+00, 8.2914303867830093e+00, 8.8325166958454133e+00, 9.4852029643727302e+00}, + {-9.5841655475234688e+00, -8.9336883320961924e+00, -8.3945256233272012e+00, -7.9127752978124946e+00, -7.4678752834836200e+00, -7.0492370986783772e+00, -6.6505091785664918e+00, -6.2675031293697510e+00, -5.8972709576411209e+00, -5.5376356924033008e+00, -5.1869289795928895e+00, -4.8438336345819870e+00, -4.5072838118199003e+00, -4.1763988304784849e+00, -3.8504376683661063e+00, -3.5287666814910854e+00, -3.2108360825282936e+00, -2.8961623904571474e+00, -2.5843150518736273e+00, -2.2749060375416974e+00, -1.9675815973920669e+00, -1.6620156026589967e+00, -1.3579040662442226e+00, -1.0549605418738988e+00, -7.5291217747488703e-01, -4.5149624980624264e-01, -1.5045704292086298e-01, 1.5045704292086298e-01, 4.5149624980624264e-01, 7.5291217747488703e-01, 1.0549605418738988e+00, 1.3579040662442226e+00, 1.6620156026589967e+00, 1.9675815973920669e+00, 2.2749060375416974e+00, 2.5843150518736273e+00, 2.8961623904571474e+00, 3.2108360825282936e+00, 3.5287666814910854e+00, 3.8504376683661063e+00, 4.1763988304784849e+00, 4.5072838118199003e+00, 4.8438336345819870e+00, 5.1869289795928895e+00, 5.5376356924033008e+00, 5.8972709576411209e+00, 6.2675031293697510e+00, 6.6505091785664918e+00, 7.0492370986783772e+00, 7.4678752834836200e+00, 7.9127752978124946e+00, 8.3945256233272012e+00, 8.9336883320961924e+00, 9.5841655475234688e+00}, + {-9.6821899525637249e+00, -9.0338717801824053e+00, -8.4965875649075624e+00, -8.0165891148558295e+00, -7.5733727465411995e+00, -7.1563802343429099e+00, -6.7592785186840851e+00, -6.3778918319127680e+00, -6.0092815232536667e+00, -5.6512779962558639e+00, -5.3022190388457897e+00, -4.9607928202275557e+00, -4.6259383469194120e+00, -4.2967794840336984e+00, -3.9725795904787460e+00, -3.6527093464132974e+00, -3.3366233189341656e+00, -3.0238424864191802e+00, -2.7139409275560999e+00, -2.4065354826206522e+00, -2.1012775733291407e+00, -1.7978466126036980e+00, -1.4959445977962444e+00, -1.1952915904152335e+00, -8.9562186046229086e-01, -5.9668052547571682e-01, -2.9822055049276669e-01, -0.0000000000000000e+00, 2.9822055049276669e-01, 5.9668052547571682e-01, 8.9562186046229086e-01, 1.1952915904152335e+00, 1.4959445977962444e+00, 1.7978466126036980e+00, 2.1012775733291407e+00, 2.4065354826206522e+00, 2.7139409275560999e+00, 3.0238424864191802e+00, 3.3366233189341656e+00, 3.6527093464132974e+00, 3.9725795904787460e+00, 4.2967794840336984e+00, 4.6259383469194120e+00, 4.9607928202275557e+00, 5.3022190388457897e+00, 5.6512779962558639e+00, 6.0092815232536667e+00, 6.3778918319127680e+00, 6.7592785186840851e+00, 7.1563802343429099e+00, 7.5733727465411995e+00, 8.0165891148558295e+00, 8.4965875649075624e+00, 9.0338717801824053e+00, 9.6821899525637249e+00}, + {-9.7793021663403099e+00, -9.1330950706121001e+00, -8.5976461414362131e+00, -8.1193580087474686e+00, -7.6777841753281493e+00, -7.2623960212867891e+00, -6.8668785666252870e+00, -6.4870683517446244e+00, -6.1200358061419919e+00, -5.7636184744715768e+00, -5.4161600667900469e+00, -5.0763538899639924e+00, -4.7431435831181288e+00, -4.4156573291002159e+00, -4.0931626262997982e+00, -3.7750342196411242e+00, -3.4607307491477983e+00, -3.1497773444199684e+00, -2.8417523763809922e+00, -2.5362771776597737e+00, -2.2330079208452407e+00, -1.9316290883640068e+00, -1.6318481297059444e+00, -1.3333910111792098e+00, -1.0359984385570031e+00, -7.3942258523430104e-01, -4.4342419505035346e-01, -1.4776995439420115e-01, 1.4776995439420115e-01, 4.4342419505035346e-01, 7.3942258523430104e-01, 1.0359984385570031e+00, 1.3333910111792098e+00, 1.6318481297059444e+00, 1.9316290883640068e+00, 2.2330079208452407e+00, 2.5362771776597737e+00, 2.8417523763809922e+00, 3.1497773444199684e+00, 3.4607307491477983e+00, 3.7750342196411242e+00, 4.0931626262997982e+00, 4.4156573291002159e+00, 4.7431435831181288e+00, 5.0763538899639924e+00, 5.4161600667900469e+00, 5.7636184744715768e+00, 6.1200358061419919e+00, 6.4870683517446244e+00, 6.8668785666252870e+00, 7.2623960212867891e+00, 7.6777841753281493e+00, 8.1193580087474686e+00, 8.5976461414362131e+00, 9.1330950706121001e+00, 9.7793021663403099e+00}, + {-9.8755270017367671e+00, -9.2313849407386552e+00, -8.6977298758606238e+00, -8.2211122660677169e+00, -7.7811416453357003e+00, -7.3673183767785728e+00, -6.9733451542956244e+00, -6.5950705250038864e+00, -6.2295737517233460e+00, -5.8746993020212539e+00, -5.5287966039093659e+00, -5.1905639023838361e+00, -4.8589492677211483e+00, -4.5330849920738157e+00, -4.2122424936406224e+00, -3.8958003472863840e+00, -3.5832210121368786e+00, -3.2740334921948340e+00, -2.9678201464818930e+00, -2.6642064637087906e+00, -2.3628529934485321e+00, -2.0634488697846303e+00, -1.7657065251132762e+00, -1.4693573011471874e+00, -1.1741477394031905e+00, -8.7983638589173074e-01, -5.8619098157190686e-01, -2.9298593604778866e-01, -0.0000000000000000e+00, 2.9298593604778866e-01, 5.8619098157190686e-01, 8.7983638589173074e-01, 1.1741477394031905e+00, 1.4693573011471874e+00, 1.7657065251132762e+00, 2.0634488697846303e+00, 2.3628529934485321e+00, 2.6642064637087906e+00, 2.9678201464818930e+00, 3.2740334921948340e+00, 3.5832210121368786e+00, 3.8958003472863840e+00, 4.2122424936406224e+00, 4.5330849920738157e+00, 4.8589492677211483e+00, 5.1905639023838361e+00, 5.5287966039093659e+00, 5.8746993020212539e+00, 6.2295737517233460e+00, 6.5950705250038864e+00, 6.9733451542956244e+00, 7.3673183767785728e+00, 7.7811416453357003e+00, 8.2211122660677169e+00, 8.6977298758606238e+00, 9.2313849407386552e+00, 9.8755270017367671e+00}, + {-9.9708881704574601e+00, -9.3287669164964395e+00, -8.7968659746627651e+00, -8.3218807506473613e+00, -7.8834756990041477e+00, -7.4711795689242475e+00, -7.0787123410660744e+00, -6.7019342833176401e+00, -6.3379332589686221e+00, -5.9845604541065045e+00, -5.6401708246496867e+00, -5.3034673690975112e+00, -4.9734024034844930e+00, -4.6491121384000209e+00, -4.3298717115959429e+00, -4.0150633146984989e+00, -3.7041529964080997e+00, -3.3966733857420293e+00, -3.0922105568140124e+00, -2.7903938536440540e+00, -2.4908878695658663e+00, -2.1933860192661303e+00, -1.8976053029060733e+00, -1.6032819710214761e+00, -1.3101678741366003e+00, -1.0180273335786967e+00, -7.2663440704649163e-01, -4.3577044873391346e-01, -1.4522188229810670e-01, 1.4522188229810670e-01, 4.3577044873391346e-01, 7.2663440704649163e-01, 1.0180273335786967e+00, 1.3101678741366003e+00, 1.6032819710214761e+00, 1.8976053029060733e+00, 2.1933860192661303e+00, 2.4908878695658663e+00, 2.7903938536440540e+00, 3.0922105568140124e+00, 3.3966733857420293e+00, 3.7041529964080997e+00, 4.0150633146984989e+00, 4.3298717115959429e+00, 4.6491121384000209e+00, 4.9734024034844930e+00, 5.3034673690975112e+00, 5.6401708246496867e+00, 5.9845604541065045e+00, 6.3379332589686221e+00, 6.7019342833176401e+00, 7.0787123410660744e+00, 7.4711795689242475e+00, 7.8834756990041477e+00, 8.3218807506473613e+00, 8.7968659746627651e+00, 9.3287669164964395e+00, 9.9708881704574601e+00}, + {-1.0065408350118513e+01, -9.4252653876263999e+00, -8.8950804110294346e+00, -8.4216909949586132e+00, -7.9848154458297120e+00, -7.5740103261427940e+00, -7.1830125334106762e+00, -6.8076937845356600e+00, -6.4451503233185363e+00, -6.0932398625106678e+00, -5.7503227086468636e+00, -5.4151064424967945e+00, -5.0865474548246592e+00, -4.7637856994329741e+00, -4.4460998505989417e+00, -4.1328755226679874e+00, -3.8235821457915500e+00, -3.5177557486926583e+00, -3.2149858747990203e+00, -2.9149054536014392e+00, -2.6171828242728932e+00, -2.3215153515122373e+00, -2.0276242346848043e+00, -1.7352502204281328e+00, -1.4441500040985724e+00, -1.1540931580511036e+00, -8.6485946196705754e-01, -5.7623653694114951e-01, -2.8801770388153508e-01, -0.0000000000000000e+00, 2.8801770388153508e-01, 5.7623653694114951e-01, 8.6485946196705754e-01, 1.1540931580511036e+00, 1.4441500040985724e+00, 1.7352502204281328e+00, 2.0276242346848043e+00, 2.3215153515122373e+00, 2.6171828242728932e+00, 2.9149054536014392e+00, 3.2149858747990203e+00, 3.5177557486926583e+00, 3.8235821457915500e+00, 4.1328755226679874e+00, 4.4460998505989417e+00, 4.7637856994329741e+00, 5.0865474548246592e+00, 5.4151064424967945e+00, 5.7503227086468636e+00, 6.0932398625106678e+00, 6.4451503233185363e+00, 6.8076937845356600e+00, 7.1830125334106762e+00, 7.5740103261427940e+00, 7.9848154458297120e+00, 8.4216909949586132e+00, 8.8950804110294346e+00, 9.4252653876263999e+00, 1.0065408350118513e+01}, + {-1.0159109246180085e+01, -9.5209036770133171e+00, -8.9923980014049434e+00, -8.5205692841176308e+00, -8.0851886542490199e+00, -7.6758399375048869e+00, -7.2862765943955985e+00, -6.9123815321893174e+00, -6.5512591670629208e+00, -6.2007735579934371e+00, -5.8592901963942339e+00, -5.5255210861386832e+00, -5.1984265345762939e+00, -4.8771500774731509e+00, -4.5609737579358356e+00, -4.2492864359560061e+00, -3.9415607339261842e+00, -3.6373358761707317e+00, -3.3362046535475867e+00, -3.0378033382307490e+00, -2.7418037480696915e+00, -2.4479069023076856e+00, -2.1558378712292110e+00, -1.8653415312330315e+00, -1.5761790119750203e+00, -1.2881246748688935e+00, -1.0009634995607180e+00, -7.1448878167257845e-01, -4.2850006422062747e-01, -1.4280123870343886e-01, 1.4280123870343886e-01, 4.2850006422062747e-01, 7.1448878167257845e-01, 1.0009634995607180e+00, 1.2881246748688935e+00, 1.5761790119750203e+00, 1.8653415312330315e+00, 2.1558378712292110e+00, 2.4479069023076856e+00, 2.7418037480696915e+00, 3.0378033382307490e+00, 3.3362046535475867e+00, 3.6373358761707317e+00, 3.9415607339261842e+00, 4.2492864359560061e+00, 4.5609737579358356e+00, 4.8771500774731509e+00, 5.1984265345762939e+00, 5.5255210861386832e+00, 5.8592901963942339e+00, 6.2007735579934371e+00, 6.5512591670629208e+00, 6.9123815321893174e+00, 7.2862765943955985e+00, 7.6758399375048869e+00, 8.0851886542490199e+00, 8.5205692841176308e+00, 8.9923980014049434e+00, 9.5209036770133171e+00, 1.0159109246180085e+01}, + {-1.0252011649196143e+01, -9.6157041046875609e+00, -9.0888424760573390e+00, -8.6185407332142496e+00, -8.1846218361099510e+00, -7.7766963448476236e+00, -7.3885339440499829e+00, -7.0160284848388690e+00, -6.6562923584988729e+00, -6.3071958002194259e+00, -5.9671093310413950e+00, -5.6347492296736759e+00, -5.3090795734924505e+00, -4.9892473314939929e+00, -4.6745377615999182e+00, -4.3643428074654231e+00, -4.0581381112767332e+00, -3.7554659075853780e+00, -3.4559220338316390e+00, -3.1591458858192856e+00, -2.8648125198053043e+00, -2.5726263447097191e+00, -2.2823160084101848e+00, -1.9936301908281084e+00, -1.7063340915790624e+00, -1.4202064526026656e+00, -1.1350369935815596e+00, -8.5062416477081448e-01, -5.6677314117022048e-01, -2.8329399584293824e-01, -0.0000000000000000e+00, 2.8329399584293824e-01, 5.6677314117022048e-01, 8.5062416477081448e-01, 1.1350369935815596e+00, 1.4202064526026656e+00, 1.7063340915790624e+00, 1.9936301908281084e+00, 2.2823160084101848e+00, 2.5726263447097191e+00, 2.8648125198053043e+00, 3.1591458858192856e+00, 3.4559220338316390e+00, 3.7554659075853780e+00, 4.0581381112767332e+00, 4.3643428074654231e+00, 4.6745377615999182e+00, 4.9892473314939929e+00, 5.3090795734924505e+00, 5.6347492296736759e+00, 5.9671093310413950e+00, 6.3071958002194259e+00, 6.6562923584988729e+00, 7.0160284848388690e+00, 7.3885339440499829e+00, 7.7766963448476236e+00, 8.1846218361099510e+00, 8.6185407332142496e+00, 9.0888424760573390e+00, 9.6157041046875609e+00, 1.0252011649196143e+01}, + {-1.0344135487806895e+01, -9.7096880469839437e+00, -9.1844365442194498e+00, -8.7156293586041365e+00, -8.2831403244454158e+00, -7.8766062274725090e+00, -7.4898126515277026e+00, -7.1186641563335851e+00, -6.7602809210235622e+00, -6.4125391965266108e+00, -6.0738143878025932e+00, -5.7428269099958227e+00, -5.4185444743952305e+00, -5.1001173457332412e+00, -4.7868338549738239e+00, -4.4780888809091079e+00, -4.1733609279699495e+00, -3.8721950721542884e+00, -3.5741900152128512e+00, -3.2789880784162539e+00, -2.9862673397528621e+00, -2.6957353597041749e+00, -2.4071241008699498e+00, -2.1201857552620891e+00, -1.8346892680544815e+00, -1.5504173991942523e+00, -1.2671642017112603e+00, -9.8473282246759541e-01, -7.0293355055041085e-01, -4.2158205259642756e-01, -1.4049774444160384e-01, 1.4049774444160384e-01, 4.2158205259642756e-01, 7.0293355055041085e-01, 9.8473282246759541e-01, 1.2671642017112603e+00, 1.5504173991942523e+00, 1.8346892680544815e+00, 2.1201857552620891e+00, 2.4071241008699498e+00, 2.6957353597041749e+00, 2.9862673397528621e+00, 3.2789880784162539e+00, 3.5741900152128512e+00, 3.8721950721542884e+00, 4.1733609279699495e+00, 4.4780888809091079e+00, 4.7868338549738239e+00, 5.1001173457332412e+00, 5.4185444743952305e+00, 5.7428269099958227e+00, 6.0738143878025932e+00, 6.4125391965266108e+00, 6.7602809210235622e+00, 7.1186641563335851e+00, 7.4898126515277026e+00, 7.8766062274725090e+00, 8.2831403244454158e+00, 8.7156293586041365e+00, 9.1844365442194498e+00, 9.7096880469839437e+00, 1.0344135487806895e+01}, + {-1.0435499877854168e+01, -9.8028759912974959e+00, -9.2792019543050390e+00, -8.8118581437284540e+00, -8.3807683451863202e+00, -7.9755950801420372e+00, -7.5901395198641071e+00, -7.2203167078889665e+00, -6.8632544331795353e+00, -6.5168348106821155e+00, -6.1794379922705955e+00, -5.8497884000810672e+00, -5.5268572526403030e+00, -5.2097979830408345e+00, -4.8979018644975740e+00, -4.5905665744435185e+00, -4.2872733352824408e+00, -3.9875699104197153e+00, -3.6910577000963465e+00, -3.3973817713303909e+00, -3.1062230279282566e+00, -2.8172919672837975e+00, -2.5303236304712007e+00, -2.2450734604812066e+00, -1.9613138583081484e+00, -1.6788312791720135e+00, -1.3974237486049623e+00, -1.1168987050996462e+00, -8.3707109558947612e-01, -5.5776166427908214e-01, -2.7879538567115225e-01, -0.0000000000000000e+00, 2.7879538567115225e-01, 5.5776166427908214e-01, 8.3707109558947612e-01, 1.1168987050996462e+00, 1.3974237486049623e+00, 1.6788312791720135e+00, 1.9613138583081484e+00, 2.2450734604812066e+00, 2.5303236304712007e+00, 2.8172919672837975e+00, 3.1062230279282566e+00, 3.3973817713303909e+00, 3.6910577000963465e+00, 3.9875699104197153e+00, 4.2872733352824408e+00, 4.5905665744435185e+00, 4.8979018644975740e+00, 5.2097979830408345e+00, 5.5268572526403030e+00, 5.8497884000810672e+00, 6.1794379922705955e+00, 6.5168348106821155e+00, 6.8632544331795353e+00, 7.2203167078889665e+00, 7.5901395198641071e+00, 7.9755950801420372e+00, 8.3807683451863202e+00, 8.8118581437284540e+00, 9.2792019543050390e+00, 9.8028759912974959e+00, 1.0435499877854168e+01}, + {-1.0526123167960545e+01, -9.8952875868295376e+00, -9.3731595496467204e+00, -8.9072490999647691e+00, -8.4775290833798636e+00, -8.0736872850102248e+00, -7.6895401640404968e+00, -7.3210130327809484e+00, -6.9652411205511076e+00, -6.6201122626360274e+00, -6.2840112287748280e+00, -5.9556663267994852e+00, -5.6340521643499724e+00, -5.3183252246332708e+00, -5.0077796021987675e+00, -4.7018156474074990e+00, -4.3999171682281375e+00, -4.1016344745666560e+00, -3.8065715139453600e+00, -3.5143759357409059e+00, -3.2247312919920357e+00, -2.9373508230046212e+00, -2.6519724354306349e+00, -2.3683545886324011e+00, -2.0862728798817618e+00, -1.8055171714655447e+00, -1.5258891402098638e+00, -1.2472001569431179e+00, -9.6926942307117792e-01, -6.9192230581004455e-01, -4.1498882412107863e-01, -1.3830224498700971e-01, 1.3830224498700971e-01, 4.1498882412107863e-01, 6.9192230581004455e-01, 9.6926942307117792e-01, 1.2472001569431179e+00, 1.5258891402098638e+00, 1.8055171714655447e+00, 2.0862728798817618e+00, 2.3683545886324011e+00, 2.6519724354306349e+00, 2.9373508230046212e+00, 3.2247312919920357e+00, 3.5143759357409059e+00, 3.8065715139453600e+00, 4.1016344745666560e+00, 4.3999171682281375e+00, 4.7018156474074990e+00, 5.0077796021987675e+00, 5.3183252246332708e+00, 5.6340521643499724e+00, 5.9556663267994852e+00, 6.2840112287748280e+00, 6.6201122626360274e+00, 6.9652411205511076e+00, 7.3210130327809484e+00, 7.6895401640404968e+00, 8.0736872850102248e+00, 8.4775290833798636e+00, 8.9072490999647691e+00, 9.3731595496467204e+00, 9.8952875868295376e+00, 1.0526123167960545e+01}, + {-1.0616022981878281e+01, -9.9869416916766838e+00, -9.4663293201553849e+00, -9.0018233229591331e+00, -8.5734447444179089e+00, -8.1709061780525847e+00, -7.7880390829895703e+00, -7.4207788343663230e+00, -7.0662679403068926e+00, -6.7223998201657338e+00, -6.3875637397870904e+00, -6.0604917788315049e+00, -5.7401618236902250e+00, -5.4257332976973487e+00, -5.1165030047214124e+00, -4.8118738520274640e+00, -4.5113321113682128e+00, -4.2144305099719546e+00, -3.9207754044447234e+00, -3.6300168776328952e+00, -3.3418409684468302e+00, -3.0559634843286707e+00, -2.7721250051570916e+00, -2.4900867953039354e+00, -2.2096274151691842e+00, -1.9305398759772254e+00, -1.6526292190403253e+00, -1.3757104277236682e+00, -1.0996066000569449e+00, -8.2414732440241278e-01, -5.4916721122159917e-01, -2.7450454175394473e-01, -0.0000000000000000e+00, 2.7450454175394473e-01, 5.4916721122159917e-01, 8.2414732440241278e-01, 1.0996066000569449e+00, 1.3757104277236682e+00, 1.6526292190403253e+00, 1.9305398759772254e+00, 2.2096274151691842e+00, 2.4900867953039354e+00, 2.7721250051570916e+00, 3.0559634843286707e+00, 3.3418409684468302e+00, 3.6300168776328952e+00, 3.9207754044447234e+00, 4.2144305099719546e+00, 4.5113321113682128e+00, 4.8118738520274640e+00, 5.1165030047214124e+00, 5.4257332976973487e+00, 5.7401618236902250e+00, 6.0604917788315049e+00, 6.3875637397870904e+00, 6.7223998201657338e+00, 7.0662679403068926e+00, 7.4207788343663230e+00, 7.7880390829895703e+00, 8.1709061780525847e+00, 8.5734447444179089e+00, 9.0018233229591331e+00, 9.4663293201553849e+00, 9.9869416916766838e+00, 1.0616022981878281e+01}, + {-1.0705216257883158e+01, -1.0077856416579301e+01, -9.5587304502592971e+00, -9.0956010448419295e+00, -8.6685366107264805e+00, -8.2672741104998497e+00, -7.8856597261055059e+00, -7.5196386980596541e+00, -7.1663606590936650e+00, -6.8237244832006141e+00, -6.4901238172609474e+00, -6.1642944056838216e+00, -5.8452173103130356e+00, -5.5320547921086867e+00, -5.2241062602848638e+00, -4.9207770717124939e+00, -4.6215558496899147e+00, -4.3259976200905568e+00, -4.0337110220232306e+00, -3.7443484358240933e+00, -3.4575982403178411e+00, -3.1731786504386261e+00, -2.8908327450724003e+00, -2.6103244026894505e+00, -2.3314349368154352e+00, -2.0539602757725310e+00, -1.7777085685161123e+00, -1.5024981254275005e+00, -1.2281556226723449e+00, -9.5451451326831394e-01, -6.8141359873982055e-01, -4.0869572314721075e-01, -1.3620655703952730e-01, 1.3620655703952730e-01, 4.0869572314721075e-01, 6.8141359873982055e-01, 9.5451451326831394e-01, 1.2281556226723449e+00, 1.5024981254275005e+00, 1.7777085685161123e+00, 2.0539602757725310e+00, 2.3314349368154352e+00, 2.6103244026894505e+00, 2.8908327450724003e+00, 3.1731786504386261e+00, 3.4575982403178411e+00, 3.7443484358240933e+00, 4.0337110220232306e+00, 4.3259976200905568e+00, 4.6215558496899147e+00, 4.9207770717124939e+00, 5.2241062602848638e+00, 5.5320547921086867e+00, 5.8452173103130356e+00, 6.1642944056838216e+00, 6.4901238172609474e+00, 6.8237244832006141e+00, 7.1663606590936650e+00, 7.5196386980596541e+00, 7.8856597261055059e+00, 8.2672741104998497e+00, 8.6685366107264805e+00, 9.0956010448419295e+00, 9.5587304502592971e+00, 1.0077856416579301e+01, 1.0705216257883158e+01}, + {-1.0793719285461286e+01, -1.0168049165614175e+01, -9.6503813634446356e+00, -9.1886016826884820e+00, -8.7628250943199557e+00, -8.3628125057261151e+00, -7.9824245547615575e+00, -7.6176161578278192e+00, -7.2655439249081999e+00, -6.9241120615277021e+00, -6.5917184866891132e+00, -6.2671025086846086e+00, -5.9492482679521004e+00, -5.6373207673775578e+00, -5.3306219248283462e+00, -5.0285594473595783e+00, -4.7306242061652037e+00, -4.4363734165111914e+00, -4.1454178838251625e+00, -3.8574121614750752e+00, -3.5720468340630473e+00, -3.2890423786892447e+00, -3.0081442150749433e+00, -2.7291186629245998e+00, -2.4517495992505411e+00, -2.1758356606878357e+00, -1.9011878731830645e+00, -1.6276276184711236e+00, -1.3549848665269235e+00, -1.0830966177633830e+00, -8.1180550955890352e-01, -5.4095854971511481e-01, -2.7040594535031914e-01, -0.0000000000000000e+00, 2.7040594535031914e-01, 5.4095854971511481e-01, 8.1180550955890352e-01, 1.0830966177633830e+00, 1.3549848665269235e+00, 1.6276276184711236e+00, 1.9011878731830645e+00, 2.1758356606878357e+00, 2.4517495992505411e+00, 2.7291186629245998e+00, 3.0081442150749433e+00, 3.2890423786892447e+00, 3.5720468340630473e+00, 3.8574121614750752e+00, 4.1454178838251625e+00, 4.4363734165111914e+00, 4.7306242061652037e+00, 5.0285594473595783e+00, 5.3306219248283462e+00, 5.6373207673775578e+00, 5.9492482679521004e+00, 6.2671025086846086e+00, 6.5917184866891132e+00, 6.9241120615277021e+00, 7.2655439249081999e+00, 7.6176161578278192e+00, 7.9824245547615575e+00, 8.3628125057261151e+00, 8.7628250943199557e+00, 9.1886016826884820e+00, 9.6503813634446356e+00, 1.0168049165614175e+01, 1.0793719285461286e+01}, + {-1.0881547739512506e+01, -1.0257536674087854e+01, -9.7412997636867704e+00, -9.2808438835486236e+00, -8.8563297855819929e+00, -8.4575419119948716e+00, -8.0783550992846092e+00, -7.7147337577029216e+00, -7.3638413335052801e+00, -7.0235872465011688e+00, -6.6923735845747432e+00, -6.3689431247367301e+00, -6.0522829950450374e+00, -5.7415608508027303e+00, -5.4360810284450567e+00, -5.1352534929147948e+00, -4.8385712673985131e+00, -4.5455936557800598e+00, -4.2559335229494488e+00, -3.9692474811577245e+00, -3.6852281980879265e+00, -3.4035982804227491e+00, -3.1241053447888532e+00, -2.8465179952316144e+00, -2.5706225005795180e+00, -2.2962200172773377e+00, -2.0231242405790431e+00, -1.7511593940081669e+00, -1.4801584867781896e+00, -1.2099617834856000e+00, -9.4041544125892573e-01, -6.7137027764848267e-01, -4.0268063855845554e-01, -1.3420333993493833e-01, 1.3420333993493833e-01, 4.0268063855845554e-01, 6.7137027764848267e-01, 9.4041544125892573e-01, 1.2099617834856000e+00, 1.4801584867781896e+00, 1.7511593940081669e+00, 2.0231242405790431e+00, 2.2962200172773377e+00, 2.5706225005795180e+00, 2.8465179952316144e+00, 3.1241053447888532e+00, 3.4035982804227491e+00, 3.6852281980879265e+00, 3.9692474811577245e+00, 4.2559335229494488e+00, 4.5455936557800598e+00, 4.8385712673985131e+00, 5.1352534929147948e+00, 5.4360810284450567e+00, 5.7415608508027303e+00, 6.0522829950450374e+00, 6.3689431247367301e+00, 6.6923735845747432e+00, 7.0235872465011688e+00, 7.3638413335052801e+00, 7.7147337577029216e+00, 8.0783550992846092e+00, 8.4575419119948716e+00, 8.8563297855819929e+00, 9.2808438835486236e+00, 9.7412997636867704e+00, 1.0257536674087854e+01, 1.0881547739512506e+01}, + {-1.0968716712272720e+01, -1.0346335043862190e+01, -9.8315026740330858e+00, -9.3723455663368824e+00, -8.9490694985983303e+00, -8.5514820514248040e+00, -8.1734720117891584e+00, -7.8110131087611858e+00, -7.4612754898399762e+00, -7.1221736773103954e+00, -6.7921138299353210e+00, -6.4698421035199560e+00, -6.1543485281550758e+00, -5.8448033276997977e+00, -5.5405131730790877e+00, -5.2408902012860343e+00, -4.9454294985999852e+00, -4.6536923643912962e+00, -4.3652936245432166e+00, -4.0798918452871904e+00, -3.7971816649246475e+00, -3.5168876985063164e+00, -3.2387596283073630e+00, -2.9625681999937008e+00, -2.6881019184401618e+00, -2.4151642892936240e+00, -2.1435714896364328e+00, -1.8731503781015579e+00, -1.6037367745786486e+00, -1.3351739542940309e+00, -1.0673113119622561e+00, -8.0000315987311321e-01, -5.3310762988307847e-01, -2.6648565380755118e-01, -0.0000000000000000e+00, 2.6648565380755118e-01, 5.3310762988307847e-01, 8.0000315987311321e-01, 1.0673113119622561e+00, 1.3351739542940309e+00, 1.6037367745786486e+00, 1.8731503781015579e+00, 2.1435714896364328e+00, 2.4151642892936240e+00, 2.6881019184401618e+00, 2.9625681999937008e+00, 3.2387596283073630e+00, 3.5168876985063164e+00, 3.7971816649246475e+00, 4.0798918452871904e+00, 4.3652936245432166e+00, 4.6536923643912962e+00, 4.9454294985999852e+00, 5.2408902012860343e+00, 5.5405131730790877e+00, 5.8448033276997977e+00, 6.1543485281550758e+00, 6.4698421035199560e+00, 6.7921138299353210e+00, 7.1221736773103954e+00, 7.4612754898399762e+00, 7.8110131087611858e+00, 8.1734720117891584e+00, 8.5514820514248040e+00, 8.9490694985983303e+00, 9.3723455663368824e+00, 9.8315026740330858e+00, 1.0346335043862190e+01, 1.0968716712272720e+01}, + {-1.1055240743138121e+01, -1.0434459776321237e+01, -9.9210064725726337e+00, -9.4631239608462057e+00, -9.0410623133341144e+00, -8.6446518655005491e+00, -8.2677951152318059e+00, -7.9064749419687779e+00, -7.5578680649896128e+00, -7.2198940023018814e+00, -6.8909628903904032e+00, -6.5698241787566660e+00, -6.2554707188783159e+00, -5.9470752244713703e+00, -5.6439466223452381e+00, -5.3454991415732502e+00, -5.0512298489716008e+00, -4.7607019531713526e+00, -4.4735321501381859e+00, -4.1893808634925733e+00, -3.9079445988680890e+00, -3.6289498686372541e+00, -3.3521483007723556e+00, -3.0773126524497552e+00, -2.8042335229302870e+00, -2.5327166122907285e+00, -2.2625804097899467e+00, -1.9936542226231451e+00, -1.7257764756009522e+00, -1.4587932269483976e+00, -1.1925568563654070e+00, -9.2692488970661657e-01, -6.6175893081256654e-01, -3.9692367564298847e-01, -1.3228598727031571e-01, 1.3228598727031571e-01, 3.9692367564298847e-01, 6.6175893081256654e-01, 9.2692488970661657e-01, 1.1925568563654070e+00, 1.4587932269483976e+00, 1.7257764756009522e+00, 1.9936542226231451e+00, 2.2625804097899467e+00, 2.5327166122907285e+00, 2.8042335229302870e+00, 3.0773126524497552e+00, 3.3521483007723556e+00, 3.6289498686372541e+00, 3.9079445988680890e+00, 4.1893808634925733e+00, 4.4735321501381859e+00, 4.7607019531713526e+00, 5.0512298489716008e+00, 5.3454991415732502e+00, 5.6439466223452381e+00, 5.9470752244713703e+00, 6.2554707188783159e+00, 6.5698241787566660e+00, 6.8909628903904032e+00, 7.2198940023018814e+00, 7.5578680649896128e+00, 7.9064749419687779e+00, 8.2677951152318059e+00, 8.6446518655005491e+00, 9.0410623133341144e+00, 9.4631239608462057e+00, 9.9210064725726337e+00, 1.0434459776321237e+01, 1.1055240743138121e+01}, + {-1.1141133846557336e+01, -1.0521925803168939e+01, -1.0009826926005323e+01, -9.5531956441224679e+00, -9.1323256149194787e+00, -8.7370695574210657e+00, -8.3613434490107519e+00, -8.0011391572542987e+00, -7.6536398489544064e+00, -7.3167699357978755e+00, -6.9889434433253230e+00, -6.6689130340885914e+00, -6.3556743048467315e+00, -6.0484023852002728e+00, -5.7464083842334572e+00, -5.4491085484699262e+00, -5.1560018484650616e+00, -4.8666533221367576e+00, -4.5806814514481742e+00, -4.2977484283048772e+00, -4.0175525306328757e+00, -3.7398220661958628e+00, -3.4643104988218911e+00, -3.1907924783359922e+00, -2.9190605692775651e+00, -2.6489225254318436e+00, -2.3801989943566357e+00, -2.1127215630246923e+00, -1.8463310754751214e+00, -1.5808761680325296e+00, -1.3162119786216788e+00, -1.0521989949595671e+00, -7.8870201263520334e-01, -5.2558917877833533e-01, -2.6273110051935877e-01, -0.0000000000000000e+00, 2.6273110051935877e-01, 5.2558917877833533e-01, 7.8870201263520334e-01, 1.0521989949595671e+00, 1.3162119786216788e+00, 1.5808761680325296e+00, 1.8463310754751214e+00, 2.1127215630246923e+00, 2.3801989943566357e+00, 2.6489225254318436e+00, 2.9190605692775651e+00, 3.1907924783359922e+00, 3.4643104988218911e+00, 3.7398220661958628e+00, 4.0175525306328757e+00, 4.2977484283048772e+00, 4.5806814514481742e+00, 4.8666533221367576e+00, 5.1560018484650616e+00, 5.4491085484699262e+00, 5.7464083842334572e+00, 6.0484023852002728e+00, 6.3556743048467315e+00, 6.6689130340885914e+00, 6.9889434433253230e+00, 7.3167699357978755e+00, 7.6536398489544064e+00, 8.0011391572542987e+00, 8.3613434490107519e+00, 8.7370695574210657e+00, 9.1323256149194787e+00, 9.5531956441224679e+00, 1.0009826926005323e+01, 1.0521925803168939e+01, 1.1141133846557336e+01}, + {-1.1226409538142136e+01, -1.0608747515229252e+01, -1.0097979221003259e+01, -9.6425765744143064e+00, -9.2228761302815698e+00, -8.8287526315494791e+00, -8.4541353114024211e+00, -8.0950248691313433e+00, -7.7486107996948919e+00, -7.4128223108085409e+00, -7.0860772325710890e+00, -6.7671313640535109e+00, -6.4549829753707284e+00, -6.1488095423716089e+00, -5.8479242873741697e+00, -5.5517454045920136e+00, -5.2597736967589261e+00, -4.9715759567771789e+00, -4.6867723746994256e+00, -4.4050268283718435e+00, -4.1260392804003079e+00, -3.8495397401994360e+00, -3.5752834066737424e+00, -3.3030467134928974e+00, -3.0326240725636300e+00, -2.7638251631581228e+00, -2.4964726512514903e+00, -2.2304002505260021e+00, -1.9654510562578547e+00, -1.7014760979685193e+00, -1.4383330677067736e+00, -1.1758851891099782e+00, -9.1400019866369742e-01, -6.5254941532698807e-01, -3.9140687826621273e-01, -1.3044853509282020e-01, 1.3044853509282020e-01, 3.9140687826621273e-01, 6.5254941532698807e-01, 9.1400019866369742e-01, 1.1758851891099782e+00, 1.4383330677067736e+00, 1.7014760979685193e+00, 1.9654510562578547e+00, 2.2304002505260021e+00, 2.4964726512514903e+00, 2.7638251631581228e+00, 3.0326240725636300e+00, 3.3030467134928974e+00, 3.5752834066737424e+00, 3.8495397401994360e+00, 4.1260392804003079e+00, 4.4050268283718435e+00, 4.6867723746994256e+00, 4.9715759567771789e+00, 5.2597736967589261e+00, 5.5517454045920136e+00, 5.8479242873741697e+00, 6.1488095423716089e+00, 6.4549829753707284e+00, 6.7671313640535109e+00, 7.0860772325710890e+00, 7.4128223108085409e+00, 7.7486107996948919e+00, 8.0950248691313433e+00, 8.4541353114024211e+00, 8.8287526315494791e+00, 9.2228761302815698e+00, 9.6425765744143064e+00, 1.0097979221003259e+01, 1.0608747515229252e+01, 1.1226409538142136e+01}, + {-1.1311080859133723e+01, -1.0694938789404631e+01, -1.0185477993538820e+01, -9.7312821228925461e+00, -9.3127299623383131e+00, -8.9197179302027116e+00, -8.5461882990986897e+00, -8.1881504491623041e+00, -7.8428000887281044e+00, -7.5080711279939001e+00, -7.1823851209949314e+00, -6.8645009305993021e+00, -6.5534194322069030e+00, -6.2483203822637563e+00, -5.9485190514660768e+00, -5.6534355164057573e+00, -5.3625723452058249e+00, -5.0754980166052253e+00, -4.7918343564389145e+00, -4.5112468522638922e+00, -4.2334370704564908e+00, -3.9581366357170062e+00, -3.6851023893880592e+00, -3.4141124491940373e+00, -3.1449629665080243e+00, -2.8774654290158654e+00, -2.6114443936789562e+00, -2.3467355617654575e+00, -2.0831841274592904e+00, -1.8206433462185645e+00, -1.5589732800480507e+00, -1.2980396851534748e+00, -1.0377130137484731e+00, -7.7786750657853077e-01, -5.1838035636125168e-01, -2.5913092507112634e-01, -0.0000000000000000e+00, 2.5913092507112634e-01, 5.1838035636125168e-01, 7.7786750657853077e-01, 1.0377130137484731e+00, 1.2980396851534748e+00, 1.5589732800480507e+00, 1.8206433462185645e+00, 2.0831841274592904e+00, 2.3467355617654575e+00, 2.6114443936789562e+00, 2.8774654290158654e+00, 3.1449629665080243e+00, 3.4141124491940373e+00, 3.6851023893880592e+00, 3.9581366357170062e+00, 4.2334370704564908e+00, 4.5112468522638922e+00, 4.7918343564389145e+00, 5.0754980166052253e+00, 5.3625723452058249e+00, 5.6534355164057573e+00, 5.9485190514660768e+00, 6.2483203822637563e+00, 6.5534194322069030e+00, 6.8645009305993021e+00, 7.1823851209949314e+00, 7.5080711279939001e+00, 7.8428000887281044e+00, 8.1881504491623041e+00, 8.5461882990986897e+00, 8.9197179302027116e+00, 9.3127299623383131e+00, 9.7312821228925461e+00, 1.0185477993538820e+01, 1.0694938789404631e+01, 1.1311080859133723e+01}, + {-1.1395160399349303e+01, -1.0780513013934538e+01, -1.0272337356338001e+01, -9.8193271033153593e+00, -9.4019026219489774e+00, -9.0099816679963673e+00, -8.6375193440825591e+00, -8.2805335655260475e+00, -7.9362261435724362e+00, -7.6025356011958438e+00, -7.2778871393556734e+00, -6.9610426155273659e+00, -6.6510054458855254e+00, -6.3469576054906103e+00, -6.0482163524027337e+00, -5.7542035843517603e+00, -5.4644235724164387e+00, -5.1784464167185336e+00, -4.8958955116562031e+00, -4.6164378838097413e+00, -4.3397766284785542e+00, -4.0656449059380266e+00, -3.7938011146597774e+00, -3.5240249647298416e+00, -3.2561142480573673e+00, -2.9898821536352491e+00, -2.7251550130837989e+00, -2.4617703885377660e+00, -2.1995754346550642e+00, -1.9384254811801831e+00, -1.6781827934921525e+00, -1.4187154768845949e+00, -1.1598964966533338e+00, -9.0160279089609208e-01, -6.4371445661134197e-01, -3.8611399247334727e-01, -1.2868558374092035e-01, 1.2868558374092035e-01, 3.8611399247334727e-01, 6.4371445661134197e-01, 9.0160279089609208e-01, 1.1598964966533338e+00, 1.4187154768845949e+00, 1.6781827934921525e+00, 1.9384254811801831e+00, 2.1995754346550642e+00, 2.4617703885377660e+00, 2.7251550130837989e+00, 2.9898821536352491e+00, 3.2561142480573673e+00, 3.5240249647298416e+00, 3.7938011146597774e+00, 4.0656449059380266e+00, 4.3397766284785542e+00, 4.6164378838097413e+00, 4.8958955116562031e+00, 5.1784464167185336e+00, 5.4644235724164387e+00, 5.7542035843517603e+00, 6.0482163524027337e+00, 6.3469576054906103e+00, 6.6510054458855254e+00, 6.9610426155273659e+00, 7.2778871393556734e+00, 7.6025356011958438e+00, 7.9362261435724362e+00, 8.2805335655260475e+00, 8.6375193440825591e+00, 9.0099816679963673e+00, 9.4019026219489774e+00, 9.8193271033153593e+00, 1.0272337356338001e+01, 1.0780513013934538e+01, 1.1395160399349303e+01}, + {-1.1478660318722643e+01, -1.0865483112083050e+01, -1.0358570924603207e+01, -9.9067257997991209e+00, -9.4904090577984004e+00, -9.0995594639401105e+00, -8.7281447480576215e+00, -8.3721912199266999e+00, -8.0289066873027544e+00, -7.6962341998287283e+00, -7.3726025317424613e+00, -7.0567764692174766e+00, -6.7477619079872566e+00, -6.4447429831267424e+00, -6.1470388825769424e+00, -5.8540732676978866e+00, -5.5653520540734327e+00, -5.2804469030361734e+00, -4.9989827149592099e+00, -4.7206279897915646e+00, -4.4450872824012313e+00, -4.1720952149448500e+00, -3.9014116643261456e+00, -3.6328178485882909e+00, -3.3661131093032384e+00, -3.1011122385952579e+00, -2.8376432363425028e+00, -2.5755454098889712e+00, -2.3146677482934548e+00, -2.0548675177858016e+00, -1.7960090360977110e+00, -1.5379625916624640e+00, -1.2806034800245349e+00, -1.0238111346569745e+00, -7.6746833310481610e-01, -5.1146046221189689e-01, -2.5567482833134803e-01, -0.0000000000000000e+00, 2.5567482833134803e-01, 5.1146046221189689e-01, 7.6746833310481610e-01, 1.0238111346569745e+00, 1.2806034800245349e+00, 1.5379625916624640e+00, 1.7960090360977110e+00, 2.0548675177858016e+00, 2.3146677482934548e+00, 2.5755454098889712e+00, 2.8376432363425028e+00, 3.1011122385952579e+00, 3.3661131093032384e+00, 3.6328178485882909e+00, 3.9014116643261456e+00, 4.1720952149448500e+00, 4.4450872824012313e+00, 4.7206279897915646e+00, 4.9989827149592099e+00, 5.2804469030361734e+00, 5.5653520540734327e+00, 5.8540732676978866e+00, 6.1470388825769424e+00, 6.4447429831267424e+00, 6.7477619079872566e+00, 7.0567764692174766e+00, 7.3726025317424613e+00, 7.6962341998287283e+00, 8.0289066873027544e+00, 8.3721912199266999e+00, 8.7281447480576215e+00, 9.0995594639401105e+00, 9.4904090577984004e+00, 9.9067257997991209e+00, 1.0358570924603207e+01, 1.0865483112083050e+01, 1.1478660318722643e+01}, + {-1.1561592367542413e+01, -1.0949861564373141e+01, -1.0444191840136728e+01, -9.9934919928404522e+00, -9.5782636843755515e+00, -9.1884663714605921e+00, -8.8180802146261925e+00, -8.4631397820581817e+00, -8.1208587754520742e+00, -7.7891846883888824e+00, -7.4665497978837827e+00, -7.1517217559506374e+00, -6.8437088797189292e+00, -6.5416974088024276e+00, -6.2450084067917304e+00, -5.9530672445968058e+00, -5.6653814275042969e+00, -5.3815241217984982e+00, -5.1011216754614015e+00, -4.8238440007350292e+00, -4.5493970476880525e+00, -4.2775168321151087e+00, -4.0079646366324129e+00, -3.7405231094103377e+00, -3.4749930580623096e+00, -3.2111907876889831e+00, -2.9489458689195023e+00, -2.6880992485383528e+00, -2.4285016349547641e+00, -2.1700121054020434e+00, -1.9124968927478689e+00, -1.6558283181292228e+00, -1.3998838419857680e+00, -1.1445452109440197e+00, -8.8969768175512143e-01, -6.3522930636766928e-01, -3.8103026441027604e-01, -1.2699223100184112e-01, 1.2699223100184112e-01, 3.8103026441027604e-01, 6.3522930636766928e-01, 8.8969768175512143e-01, 1.1445452109440197e+00, 1.3998838419857680e+00, 1.6558283181292228e+00, 1.9124968927478689e+00, 2.1700121054020434e+00, 2.4285016349547641e+00, 2.6880992485383528e+00, 2.9489458689195023e+00, 3.2111907876889831e+00, 3.4749930580623096e+00, 3.7405231094103377e+00, 4.0079646366324129e+00, 4.2775168321151087e+00, 4.5493970476880525e+00, 4.8238440007350292e+00, 5.1011216754614015e+00, 5.3815241217984982e+00, 5.6653814275042969e+00, 5.9530672445968058e+00, 6.2450084067917304e+00, 6.5416974088024276e+00, 6.8437088797189292e+00, 7.1517217559506374e+00, 7.4665497978837827e+00, 7.7891846883888824e+00, 8.1208587754520742e+00, 8.4631397820581817e+00, 8.8180802146261925e+00, 9.1884663714605921e+00, 9.5782636843755515e+00, 9.9934919928404522e+00, 1.0444191840136728e+01, 1.0949861564373141e+01, 1.1561592367542413e+01}, + {-1.1643967905483196e+01, -1.1033660429475029e+01, -1.0529212793984577e+01, -1.0079638983721908e+01, -9.6654804081925363e+00, -9.2767169065128172e+00, -8.9073408793931357e+00, -8.5533950218190302e+00, -8.2120988304736517e+00, -7.8814041633182299e+00, -7.5597467325857197e+00, -7.2458969961152926e+00, -6.9388656371030670e+00, -6.6378409471161133e+00, -6.3421458141623201e+00, -6.0512072677740329e+00, -5.7645343514856853e+00, -5.4817016838558219e+00, -5.2023370059655702e+00, -4.9261115854472983e+00, -4.6527327077376199e+00, -4.3819377189728366e+00, -4.1134892401757890e+00, -3.8471712777564733e+00, -3.5827860282892376e+00, -3.3201512269147213e+00, -3.0590979254933970e+00, -2.7994686133415088e+00, -2.5411156130214039e+00, -2.2838996982733861e+00, -2.0276888921639609e+00, -1.7723574118602183e+00, -1.5177847328096994e+00, -1.2638547500001409e+00, -1.0104550177477929e+00, -7.5747605237336058e-01, -5.0481068435860232e-01, -2.5235344827118406e-01, -0.0000000000000000e+00, 2.5235344827118406e-01, 5.0481068435860232e-01, 7.5747605237336058e-01, 1.0104550177477929e+00, 1.2638547500001409e+00, 1.5177847328096994e+00, 1.7723574118602183e+00, 2.0276888921639609e+00, 2.2838996982733861e+00, 2.5411156130214039e+00, 2.7994686133415088e+00, 3.0590979254933970e+00, 3.3201512269147213e+00, 3.5827860282892376e+00, 3.8471712777564733e+00, 4.1134892401757890e+00, 4.3819377189728366e+00, 4.6527327077376199e+00, 4.9261115854472983e+00, 5.2023370059655702e+00, 5.4817016838558219e+00, 5.7645343514856853e+00, 6.0512072677740329e+00, 6.3421458141623201e+00, 6.6378409471161133e+00, 6.9388656371030670e+00, 7.2458969961152926e+00, 7.5597467325857197e+00, 7.8814041633182299e+00, 8.2120988304736517e+00, 8.5533950218190302e+00, 8.9073408793931357e+00, 9.2767169065128172e+00, 9.6654804081925363e+00, 1.0079638983721908e+01, 1.0529212793984577e+01, 1.1033660429475029e+01, 1.1643967905483196e+01}, + {-1.1725797919515925e+01, -1.1116891363846698e+01, -1.0613646047709890e+01, -1.0165179617422121e+01, -9.7520726523771355e+00, -9.3643250739262687e+00, -8.9959413381560331e+00, -8.6429721394539492e+00, -8.3026426739573083e+00, -7.9729090874349415e+00, -7.6522104625334553e+00, -7.3393200055543293e+00, -7.0332507130647253e+00, -6.7331928786770474e+00, -6.4384711663544172e+00, -6.1485142162283823e+00, -5.8628325617022545e+00, -5.5810022242153172e+00, -5.3026522869660511e+00, -5.0274553198842957e+00, -4.7551198880737857e+00, -4.4853846092140870e+00, -4.2180133803413131e+00, -3.9527914995987943e+00, -3.6895224813523542e+00, -3.4280254143563189e+00, -3.1681327493707792e+00, -2.9096884292910694e+00, -2.6525462944632925e+00, -2.3965687104578790e+00, -2.1416253765529731e+00, -1.8875922815137989e+00, -1.6343507796298868e+00, -1.3817867648795250e+00, -1.1297899248837724e+00, -8.7825305924819486e-01, -6.2707144915742807e-01, -3.7614226682388563e-01, -1.2536401469850444e-01, 1.2536401469850444e-01, 3.7614226682388563e-01, 6.2707144915742807e-01, 8.7825305924819486e-01, 1.1297899248837724e+00, 1.3817867648795250e+00, 1.6343507796298868e+00, 1.8875922815137989e+00, 2.1416253765529731e+00, 2.3965687104578790e+00, 2.6525462944632925e+00, 2.9096884292910694e+00, 3.1681327493707792e+00, 3.4280254143563189e+00, 3.6895224813523542e+00, 3.9527914995987943e+00, 4.2180133803413131e+00, 4.4853846092140870e+00, 4.7551198880737857e+00, 5.0274553198842957e+00, 5.3026522869660511e+00, 5.5810022242153172e+00, 5.8628325617022545e+00, 6.1485142162283823e+00, 6.4384711663544172e+00, 6.7331928786770474e+00, 7.0332507130647253e+00, 7.3393200055543293e+00, 7.6522104625334553e+00, 7.9729090874349415e+00, 8.3026426739573083e+00, 8.6429721394539492e+00, 8.9959413381560331e+00, 9.3643250739262687e+00, 9.7520726523771355e+00, 1.0165179617422121e+01, 1.0613646047709890e+01, 1.1116891363846698e+01, 1.1725797919515925e+01}, + {-1.1807093040777294e+01, -1.1199565640216381e+01, -1.0697503453395814e+01, -1.0250126304140645e+01, -9.8380533797602858e+00, -9.4513043921191180e+00, -9.0838956733280174e+00, -8.7318857937825296e+00, -8.3925055567758680e+00, -8.0637153221240894e+00, -7.7439574806678753e+00, -7.4320079322856305e+00, -7.1268819366716176e+00, -6.8277717420599027e+00, -6.5340037424695643e+00, -6.2450081432878877e+00, -5.9602969222394462e+00, -5.6794474572664067e+00, -5.4020901259356995e+00, -5.1278987508656595e+00, -4.8565831248972122e+00, -4.5878830825516648e+00, -4.3215637389507622e+00, -4.0574116223471224e+00, -3.7952314990808915e+00, -3.5348437409765978e+00, -3.2760821218442313e+00, -3.0187919563650998e+00, -2.7628285142266278e+00, -2.5080556569501704e+00, -2.2543446558260460e+00, -2.0015731577016731e+00, -1.7496242717478425e+00, -1.4983857552446240e+00, -1.2477492802359769e+00, -9.9760976585826833e-01, -7.4786476344152575e-01, -4.9841388325234109e-01, -2.4915825309625725e-01, -0.0000000000000000e+00, 2.4915825309625725e-01, 4.9841388325234109e-01, 7.4786476344152575e-01, 9.9760976585826833e-01, 1.2477492802359769e+00, 1.4983857552446240e+00, 1.7496242717478425e+00, 2.0015731577016731e+00, 2.2543446558260460e+00, 2.5080556569501704e+00, 2.7628285142266278e+00, 3.0187919563650998e+00, 3.2760821218442313e+00, 3.5348437409765978e+00, 3.7952314990808915e+00, 4.0574116223471224e+00, 4.3215637389507622e+00, 4.5878830825516648e+00, 4.8565831248972122e+00, 5.1278987508656595e+00, 5.4020901259356995e+00, 5.6794474572664067e+00, 5.9602969222394462e+00, 6.2450081432878877e+00, 6.5340037424695643e+00, 6.8277717420599027e+00, 7.1268819366716176e+00, 7.4320079322856305e+00, 7.7439574806678753e+00, 8.0637153221240894e+00, 8.3925055567758680e+00, 8.7318857937825296e+00, 9.0838956733280174e+00, 9.4513043921191180e+00, 9.8380533797602858e+00, 1.0250126304140645e+01, 1.0697503453395814e+01, 1.1199565640216381e+01, 1.1807093040777294e+01}, + {-1.1887863560471148e+01, -1.1281694164989270e+01, -1.0780796472469154e+01, -1.0334491039538371e+01, -9.9234351145692425e+00, -9.5376679163020608e+00, -9.1712174787264829e+00, -8.8201501286610533e+00, -8.4817021873215737e+00, -8.1538381574634560e+00, -7.8350036783292891e+00, -7.5239772908068669e+00, -7.2197764697589237e+00, -6.9215953729258182e+00, -6.6287620808575998e+00, -6.3407083213299842e+00, -6.0569474734510278e+00, -5.7770582280615690e+00, -5.5006722123151564e+00, -5.2274644551006757e+00, -4.9571459285134285e+00, -4.6894576329528137e+00, -4.4241658477651837e+00, -4.1610582741258364e+00, -3.8999408693873892e+00, -3.6406352232272088e+00, -3.3829763625124016e+00, -3.1268108983731295e+00, -2.8719954485287333e+00, -2.6183952824717842e+00, -2.3658831480742251e+00, -2.1143382465070100e+00, -1.8636453287449284e+00, -1.6136938918510606e+00, -1.3643774570540068e+00, -1.1155929146033490e+00, -8.6723992270397532e-01, -6.1922034962756711e-01, -3.7143774948304431e-01, -1.2379686317313206e-01, 1.2379686317313206e-01, 3.7143774948304431e-01, 6.1922034962756711e-01, 8.6723992270397532e-01, 1.1155929146033490e+00, 1.3643774570540068e+00, 1.6136938918510606e+00, 1.8636453287449284e+00, 2.1143382465070100e+00, 2.3658831480742251e+00, 2.6183952824717842e+00, 2.8719954485287333e+00, 3.1268108983731295e+00, 3.3829763625124016e+00, 3.6406352232272088e+00, 3.8999408693873892e+00, 4.1610582741258364e+00, 4.4241658477651837e+00, 4.6894576329528137e+00, 4.9571459285134285e+00, 5.2274644551006757e+00, 5.5006722123151564e+00, 5.7770582280615690e+00, 6.0569474734510278e+00, 6.3407083213299842e+00, 6.6287620808575998e+00, 6.9215953729258182e+00, 7.2197764697589237e+00, 7.5239772908068669e+00, 7.8350036783292891e+00, 8.1538381574634560e+00, 8.4817021873215737e+00, 8.8201501286610533e+00, 9.1712174787264829e+00, 9.5376679163020608e+00, 9.9234351145692425e+00, 1.0334491039538371e+01, 1.0780796472469154e+01, 1.1281694164989270e+01, 1.1887863560471148e+01}, + {-1.1968119444868723e+01, -1.1363287494653905e+01, -1.0863536193428574e+01, -1.0418285423785731e+01, -1.0008229962827862e+01, -9.6234282602828909e+00, -9.2579198828492224e+00, -8.9077787978104048e+00, -8.5702467579780226e+00, -8.2432923404437446e+00, -7.9253643753426406e+00, -7.6152439941755770e+00, -7.3119508411483105e+00, -7.0146809405398773e+00, -6.7227640181091326e+00, -6.4356332834442265e+00, -6.1528034765082218e+00, -5.8738545599912273e+00, -5.5984193685789059e+00, -5.3261740939400637e+00, -5.0568308420977290e+00, -4.7901317317816865e+00, -4.5258441564113081e+00, -4.2637569369381021e+00, -4.0036771651776295e+00, -3.7454275882737633e+00, -3.4888444214602070e+00, -3.2337755028119282e+00, -2.9800787232025514e+00, -2.7276206792242430e+00, -2.4762755077735275e+00, -2.2259238693277426e+00, -1.9764520533164986e+00, -1.7277511839192137e+00, -1.4797165084468731e+00, -1.2322467534512360e+00, -9.8524353603723147e-01, -7.3861081967444187e-01, -4.9225440521533759e-01, -2.4608144890838049e-01, -0.0000000000000000e+00, 2.4608144890838049e-01, 4.9225440521533759e-01, 7.3861081967444187e-01, 9.8524353603723147e-01, 1.2322467534512360e+00, 1.4797165084468731e+00, 1.7277511839192137e+00, 1.9764520533164986e+00, 2.2259238693277426e+00, 2.4762755077735275e+00, 2.7276206792242430e+00, 2.9800787232025514e+00, 3.2337755028119282e+00, 3.4888444214602070e+00, 3.7454275882737633e+00, 4.0036771651776295e+00, 4.2637569369381021e+00, 4.5258441564113081e+00, 4.7901317317816865e+00, 5.0568308420977290e+00, 5.3261740939400637e+00, 5.5984193685789059e+00, 5.8738545599912273e+00, 6.1528034765082218e+00, 6.4356332834442265e+00, 6.7227640181091326e+00, 7.0146809405398773e+00, 7.3119508411483105e+00, 7.6152439941755770e+00, 7.9253643753426406e+00, 8.2432923404437446e+00, 8.5702467579780226e+00, 8.9077787978104048e+00, 9.2579198828492224e+00, 9.6234282602828909e+00, 1.0008229962827862e+01, 1.0418285423785731e+01, 1.0863536193428574e+01, 1.1363287494653905e+01, 1.1968119444868723e+01}, + {-1.2047870349469452e+01, -1.1444355851257512e+01, -1.0945733348554121e+01, -1.0501520679503271e+01, -1.0092449631556644e+01, -9.7085976169732788e+00, -9.3440155707490522e+00, -8.9947849881314195e+00, -8.6581529699601738e+00, -8.3320921014281222e+00, -8.0150543482026926e+00, -7.7058233840379780e+00, -7.4034209786511349e+00, -7.1070449818933454e+00, -6.8160267254566378e+00, -6.5298008622888712e+00, -6.2478834549069706e+00, -5.9698556991575886e+00, -5.6953515977146445e+00, -5.4240484642257165e+00, -5.1556594962091573e+00, -4.8899278863041147e+00, -4.6266220952405250e+00, -4.3655320142842475e+00, -4.1064658171809416e+00, -3.8492473525444271e+00, -3.5937139640934332e+00, -3.3397146526239907e+00, -3.0871085130972600e+00, -2.8357633947424321e+00, -2.5855547430080739e+00, -2.3363645905095152e+00, -2.0880806704972801e+00, -1.8405956313004481e+00, -1.5938063340319311e+00, -1.3476132188376881e+00, -1.1019197273183625e+00, -8.5663177059026308e-01, -6.1165723388752291e-01, -3.6690550971654429e-01, -1.2228705241518351e-01, 1.2228705241518351e-01, 3.6690550971654429e-01, 6.1165723388752291e-01, 8.5663177059026308e-01, 1.1019197273183625e+00, 1.3476132188376881e+00, 1.5938063340319311e+00, 1.8405956313004481e+00, 2.0880806704972801e+00, 2.3363645905095152e+00, 2.5855547430080739e+00, 2.8357633947424321e+00, 3.0871085130972600e+00, 3.3397146526239907e+00, 3.5937139640934332e+00, 3.8492473525444271e+00, 4.1064658171809416e+00, 4.3655320142842475e+00, 4.6266220952405250e+00, 4.8899278863041147e+00, 5.1556594962091573e+00, 5.4240484642257165e+00, 5.6953515977146445e+00, 5.9698556991575886e+00, 6.2478834549069706e+00, 6.5298008622888712e+00, 6.8160267254566378e+00, 7.1070449818933454e+00, 7.4034209786511349e+00, 7.7058233840379780e+00, 8.0150543482026926e+00, 8.3320921014281222e+00, 8.6581529699601738e+00, 8.9947849881314195e+00, 9.3440155707490522e+00, 9.7085976169732788e+00, 1.0092449631556644e+01, 1.0501520679503271e+01, 1.0945733348554121e+01, 1.1444355851257512e+01, 1.2047870349469452e+01}, + {-1.2127125632378913e+01, -1.1524909137013976e+01, -1.1027398329668573e+01, -1.0584207668672128e+01, -1.0176105446857637e+01, -9.7931877776907488e+00, -9.4295168046084115e+00, -9.0811814416185097e+00, -8.7454340566434574e+00, -8.4202511789832108e+00, -8.1040878565034831e+00, -7.7957302587642490e+00, -7.4942022390283798e+00, -7.1987034336195581e+00, -6.9085667427912707e+00, -6.6232282263685835e+00, -6.3422052331830958e+00, -6.0650801557222689e+00, -5.7914881274189227e+00, -5.5211075455728480e+00, -5.2536526594233770e+00, -4.9888676939643233e+00, -4.7265221335751297e+00, -4.4664068937399284e+00, -4.2083311812642945e+00, -3.9521198942311013e+00, -3.6976114493318364e+00, -3.4446559506485350e+00, -3.1931136334230796e+00, -2.9428535308492294e+00, -2.6937523228426583e+00, -2.4456933340503899e+00, -2.1985656547350549e+00, -1.9522633630987765e+00, -1.7066848314488341e+00, -1.4617321016098621e+00, -1.2173103173449662e+00, -9.7332720340140344e-01, -7.2969258225037825e-01, -4.8631792072343311e-01, -2.4311589961848629e-01, -0.0000000000000000e+00, 2.4311589961848629e-01, 4.8631792072343311e-01, 7.2969258225037825e-01, 9.7332720340140344e-01, 1.2173103173449662e+00, 1.4617321016098621e+00, 1.7066848314488341e+00, 1.9522633630987765e+00, 2.1985656547350549e+00, 2.4456933340503899e+00, 2.6937523228426583e+00, 2.9428535308492294e+00, 3.1931136334230796e+00, 3.4446559506485350e+00, 3.6976114493318364e+00, 3.9521198942311013e+00, 4.2083311812642945e+00, 4.4664068937399284e+00, 4.7265221335751297e+00, 4.9888676939643233e+00, 5.2536526594233770e+00, 5.5211075455728480e+00, 5.7914881274189227e+00, 6.0650801557222689e+00, 6.3422052331830958e+00, 6.6232282263685835e+00, 6.9085667427912707e+00, 7.1987034336195581e+00, 7.4942022390283798e+00, 7.7957302587642490e+00, 8.1040878565034831e+00, 8.4202511789832108e+00, 8.7454340566434574e+00, 9.0811814416185097e+00, 9.4295168046084115e+00, 9.7931877776907488e+00, 1.0176105446857637e+01, 1.0584207668672128e+01, 1.1027398329668573e+01, 1.1524909137013976e+01, 1.2127125632378913e+01}, + {-1.2205894366956139e+01, -1.1604956948103077e+01, -1.1108541203015454e+01, -1.0666356908585685e+01, -1.0259208370962089e+01, -9.8772101503410621e+00, -9.5144354431069669e+00, -9.1669804759729860e+00, -8.8321028054925428e+00, -8.5077828432020937e+00, -8.1924786677436181e+00, -7.8849788998338592e+00, -7.5843094360641796e+00, -7.2896716618750510e+00, -7.0004000104832143e+00, -6.7159319139391096e+00, -6.4357859730612450e+00, -6.1595457424759266e+00, -5.8868474512821898e+00, -5.6173705443855964e+00, -5.3508302854168104e+00, -5.0869718928011789e+00, -4.8255658337490095e+00, -4.5664040049460510e+00, -4.3092966007328037e+00, -4.0540695203033383e+00, -3.8005622017862843e+00, -3.5486257974651720e+00, -3.2981216238257258e+00, -3.0489198345947157e+00, -2.8008982758405363e+00, -2.5539414905028606e+00, -2.3079398460883205e+00, -2.0627887641978626e+00, -1.8183880343913310e+00, -1.5746411979031998e+00, -1.3314549890897636e+00, -1.0887388243534069e+00, -8.4640432975900237e-01, -6.0436489970992124e-01, -3.6253527994578344e-01, -1.2083116880501313e-01, 1.2083116880501313e-01, 3.6253527994578344e-01, 6.0436489970992124e-01, 8.4640432975900237e-01, 1.0887388243534069e+00, 1.3314549890897636e+00, 1.5746411979031998e+00, 1.8183880343913310e+00, 2.0627887641978626e+00, 2.3079398460883205e+00, 2.5539414905028606e+00, 2.8008982758405363e+00, 3.0489198345947157e+00, 3.2981216238257258e+00, 3.5486257974651720e+00, 3.8005622017862843e+00, 4.0540695203033383e+00, 4.3092966007328037e+00, 4.5664040049460510e+00, 4.8255658337490095e+00, 5.0869718928011789e+00, 5.3508302854168104e+00, 5.6173705443855964e+00, 5.8868474512821898e+00, 6.1595457424759266e+00, 6.4357859730612450e+00, 6.7159319139391096e+00, 7.0004000104832143e+00, 7.2896716618750510e+00, 7.5843094360641796e+00, 7.8849788998338592e+00, 8.1924786677436181e+00, 8.5077828432020937e+00, 8.8321028054925428e+00, 9.1669804759729860e+00, 9.5144354431069669e+00, 9.8772101503410621e+00, 1.0259208370962089e+01, 1.0666356908585685e+01, 1.1108541203015454e+01, 1.1604956948103077e+01, 1.2205894366956139e+01}, + {-1.2284185353778504e+01, -1.1684508587714936e+01, -1.1189171723313381e+01, -1.0747978586907845e+01, -1.0341769018312585e+01, -9.9606757765591638e+00, -9.5987829596674032e+00, -9.2521940040089010e+00, -8.9181715786909077e+00, -8.5946999176295051e+00, -8.2802400806274559e+00, -7.9735830966018995e+00, -7.6737568668958440e+00, -7.3799644903419361e+00, -7.0915418991761436e+00, -6.8079278647256025e+00, -6.5286422072422212e+00, -6.2532696108544226e+00, -5.9814473672099631e+00, -5.7128559348775809e+00, -5.4472115568012640e+00, -5.1842604083343016e+00, -4.9237739013081612e+00, -4.6655448734152234e+00, -4.4093844640660000e+00, -4.1551195285347458e+00, -3.9025904784769208e+00, -3.6516494632523195e+00, -3.4021588258868611e+00, -3.1539897819612150e+00, -2.9070212806051177e+00, -2.6611390150638674e+00, -2.4162345566678476e+00, -2.1722045909628105e+00, -1.9289502385999013e+00, -1.6863764465968691e+00, -1.4443914379551996e+00, -1.2029062094928866e+00, -9.6183406923470449e-01, -7.2109020587034223e-01, -4.8059128370529336e-01, -2.4025505723472676e-01, -0.0000000000000000e+00, 2.4025505723472676e-01, 4.8059128370529336e-01, 7.2109020587034223e-01, 9.6183406923470449e-01, 1.2029062094928866e+00, 1.4443914379551996e+00, 1.6863764465968691e+00, 1.9289502385999013e+00, 2.1722045909628105e+00, 2.4162345566678476e+00, 2.6611390150638674e+00, 2.9070212806051177e+00, 3.1539897819612150e+00, 3.4021588258868611e+00, 3.6516494632523195e+00, 3.9025904784769208e+00, 4.1551195285347458e+00, 4.4093844640660000e+00, 4.6655448734152234e+00, 4.9237739013081612e+00, 5.1842604083343016e+00, 5.4472115568012640e+00, 5.7128559348775809e+00, 5.9814473672099631e+00, 6.2532696108544226e+00, 6.5286422072422212e+00, 6.8079278647256025e+00, 7.0915418991761436e+00, 7.3799644903419361e+00, 7.6737568668958440e+00, 7.9735830966018995e+00, 8.2802400806274559e+00, 8.5946999176295051e+00, 8.9181715786909077e+00, 9.2521940040089010e+00, 9.5987829596674032e+00, 9.9606757765591638e+00, 1.0341769018312585e+01, 1.0747978586907845e+01, 1.1189171723313381e+01, 1.1684508587714936e+01, 1.2284185353778504e+01}, + {-1.2362007131968511e+01, -1.1763573078389516e+01, -1.1269299347041693e+01, -1.0829082575898250e+01, -1.0423797670745445e+01, -1.0043595347880535e+01, -9.6825704596576454e+00, -9.3368335519365662e+00, -9.0036523325638047e+00, -8.6810147998900291e+00, -8.3673849469719066e+00, -8.0615561695659022e+00, -7.7625583367306144e+00, -7.4695962264936551e+00, -7.1820072377109705e+00, -6.8992314496242599e+00, -6.6207898710143054e+00, -6.3462682846048741e+00, -6.0753050133032716e+00, -5.8075814973426141e+00, -5.5428149259789015e+00, -5.2807523972174923e+00, -5.0211662316993868e+00, -4.7638501705179523e+00, -4.5086162584922675e+00, -4.2552922649886131e+00, -4.0037195305892794e+00, -3.7537511542142386e+00, -3.5052504546692158e+00, -3.2580896550268070e+00, -3.0121487491227401e+00, -2.7673145177270055e+00, -2.5234796683078762e+00, -2.2805420772311842e+00, -2.0384041170786049e+00, -1.7969720547840342e+00, -1.5561555086656726e+00, -1.3158669543141910e+00, -1.0760212707898533e+00, -8.3653531976321294e-01, -5.9732755116498426e-01, -3.5831762963623587e-01, -1.1942607662410318e-01, 1.1942607662410318e-01, 3.5831762963623587e-01, 5.9732755116498426e-01, 8.3653531976321294e-01, 1.0760212707898533e+00, 1.3158669543141910e+00, 1.5561555086656726e+00, 1.7969720547840342e+00, 2.0384041170786049e+00, 2.2805420772311842e+00, 2.5234796683078762e+00, 2.7673145177270055e+00, 3.0121487491227401e+00, 3.2580896550268070e+00, 3.5052504546692158e+00, 3.7537511542142386e+00, 4.0037195305892794e+00, 4.2552922649886131e+00, 4.5086162584922675e+00, 4.7638501705179523e+00, 5.0211662316993868e+00, 5.2807523972174923e+00, 5.5428149259789015e+00, 5.8075814973426141e+00, 6.0753050133032716e+00, 6.3462682846048741e+00, 6.6207898710143054e+00, 6.8992314496242599e+00, 7.1820072377109705e+00, 7.4695962264936551e+00, 7.7625583367306144e+00, 8.0615561695659022e+00, 8.3673849469719066e+00, 8.6810147998900291e+00, 9.0036523325638047e+00, 9.3368335519365662e+00, 9.6825704596576454e+00, 1.0043595347880535e+01, 1.0423797670745445e+01, 1.0829082575898250e+01, 1.1269299347041693e+01, 1.1763573078389516e+01, 1.2362007131968511e+01}, + {-1.2439367989923680e+01, -1.1842159173697059e+01, -1.1348933245008068e+01, -1.0909678445859965e+01, -1.0505304291834085e+01, -1.0125979221008995e+01, -9.7658086966213720e+00, -9.4209102766018642e+00, -9.0885566358796162e+00, -8.7667394811116139e+00, -8.4539256923192720e+00, -8.1489109922424241e+00, -7.8507271820680931e+00, -7.5585806862536904e+00, -7.2718103393202060e+00, -6.9898574985414994e+00, -6.7122443318572111e+00, -6.4385576912861495e+00, -6.1684369014005451e+00, -5.9015643538974087e+00, -5.6376581532613121e+00, -5.3764662879277241e+00, -5.1177619537428889e+00, -4.8613397599750519e+00, -4.6070126197625800e+00, -4.3546091773629314e+00, -4.1039716607125998e+00, -3.8549540740709145e+00, -3.6074206648573135e+00, -3.3612446132027243e+00, -3.1163069035955786e+00, -2.8724953462701670e+00, -2.6297037223368069e+00, -2.3878310315742213e+00, -2.1467808256453083e+00, -1.9064606125144659e+00, -1.6667813202271460e+00, -1.4276568101012006e+00, -1.1890034308805355e+00, -9.5073960659419110e-01, -7.1278545180861597e-01, -4.7506240870319627e-01, -2.3749290097360490e-01, -0.0000000000000000e+00, 2.3749290097360490e-01, 4.7506240870319627e-01, 7.1278545180861597e-01, 9.5073960659419110e-01, 1.1890034308805355e+00, 1.4276568101012006e+00, 1.6667813202271460e+00, 1.9064606125144659e+00, 2.1467808256453083e+00, 2.3878310315742213e+00, 2.6297037223368069e+00, 2.8724953462701670e+00, 3.1163069035955786e+00, 3.3612446132027243e+00, 3.6074206648573135e+00, 3.8549540740709145e+00, 4.1039716607125998e+00, 4.3546091773629314e+00, 4.6070126197625800e+00, 4.8613397599750519e+00, 5.1177619537428889e+00, 5.3764662879277241e+00, 5.6376581532613121e+00, 5.9015643538974087e+00, 6.1684369014005451e+00, 6.4385576912861495e+00, 6.7122443318572111e+00, 6.9898574985414994e+00, 7.2718103393202060e+00, 7.5585806862536904e+00, 7.8507271820680931e+00, 8.1489109922424241e+00, 8.4539256923192720e+00, 8.7667394811116139e+00, 9.0885566358796162e+00, 9.4209102766018642e+00, 9.7658086966213720e+00, 1.0125979221008995e+01, 1.0505304291834085e+01, 1.0909678445859965e+01, 1.1348933245008068e+01, 1.1842159173697059e+01, 1.2439367989923680e+01}, + {-1.2516275975487387e+01, -1.1920275369301974e+01, -1.1428082314244874e+01, -1.0989775477860821e+01, -1.0586298540449153e+01, -1.0207837432241758e+01, -9.8485080876029141e+00, -9.5044349817531568e+00, -9.1728956871075304e+00, -8.8518855642291054e+00, -8.5398743353481130e+00, -8.2356600117533336e+00, -7.9382762925370942e+00, -7.6469312171655428e+00, -7.3609650262218116e+00, -7.0798203265112649e+00, -6.8030204171920579e+00, -6.5301531917714213e+00, -6.2608589484644162e+00, -5.9948210018972574e+00, -5.7317583424731948e+00, -5.4714198187320084e+00, -5.2135794701554339e+00, -4.9580327411502472e+00, -4.7045933784481448e+00, -4.4530908645538636e+00, -4.2033682759582804e+00, -3.9552804810525908e+00, -3.7086926119853119e+00, -3.4634787590926437e+00, -3.2195208473769141e+00, -2.9767076627641722e+00, -2.7349340022167890e+00, -2.4940999266938628e+00, -2.2541100997915855e+00, -2.0148731979134999e+00, -1.7763013802059482e+00, -1.5383098083876734e+00, -1.3008162081103343e+00, -1.0637404646882309e+00, -8.2700424699148045e-01, -5.9053065405312266e-01, -3.5424387953214642e-01, -1.1806888962751633e-01, 1.1806888962751633e-01, 3.5424387953214642e-01, 5.9053065405312266e-01, 8.2700424699148045e-01, 1.0637404646882309e+00, 1.3008162081103343e+00, 1.5383098083876734e+00, 1.7763013802059482e+00, 2.0148731979134999e+00, 2.2541100997915855e+00, 2.4940999266938628e+00, 2.7349340022167890e+00, 2.9767076627641722e+00, 3.2195208473769141e+00, 3.4634787590926437e+00, 3.7086926119853119e+00, 3.9552804810525908e+00, 4.2033682759582804e+00, 4.4530908645538636e+00, 4.7045933784481448e+00, 4.9580327411502472e+00, 5.2135794701554339e+00, 5.4714198187320084e+00, 5.7317583424731948e+00, 5.9948210018972574e+00, 6.2608589484644162e+00, 6.5301531917714213e+00, 6.8030204171920579e+00, 7.0798203265112649e+00, 7.3609650262218116e+00, 7.6469312171655428e+00, 7.9382762925370942e+00, 8.2356600117533336e+00, 8.5398743353481130e+00, 8.8518855642291054e+00, 9.1728956871075304e+00, 9.5044349817531568e+00, 9.8485080876029141e+00, 1.0207837432241758e+01, 1.0586298540449153e+01, 1.0989775477860821e+01, 1.1428082314244874e+01, 1.1920275369301974e+01, 1.2516275975487387e+01}, + {-1.2592738905595901e+01, -1.1997929913449418e+01, -1.1506755189277548e+01, -1.1069382675775751e+01, -1.0666789783587026e+01, -1.0289179711107796e+01, -9.9306787276273880e+00, -9.5874181334018580e+00, -9.2566803307032135e+00, -8.9364642812455379e+00, -8.6252425061665043e+00, -8.3218152682134434e+00, -8.0252181314463780e+00, -7.7346607201821760e+00, -7.4494846527302592e+00, -7.1691337582185994e+00, -6.8931324404171299e+00, -6.6210696079053228e+00, -6.3525865059800601e+00, -6.0873673452070678e+00, -5.8251319742404801e+00, -5.5656300731513921e+00, -5.3086364953650067e+00, -5.0539474894081309e+00, -4.8013776030542550e+00, -4.5507571227604720e+00, -4.3019299373155544e+00, -4.0547517407947415e+00, -3.8090885091914664e+00, -3.5648151994628399e+00, -3.3218146305539729e+00, -3.0799765142115443e+00, -2.8391966097343762e+00, -2.5993759817212956e+00, -2.3604203437138307e+00, -2.1222394736491395e+00, -1.8847466894257894e+00, -1.6478583747824140e+00, -1.4114935472024899e+00, -1.1755734607673940e+00, -9.4002123784514680e-01, -7.0476152427011995e-01, -4.6972016327466942e-01, -2.3482388390393241e-01, -0.0000000000000000e+00, 2.3482388390393241e-01, 4.6972016327466942e-01, 7.0476152427011995e-01, 9.4002123784514680e-01, 1.1755734607673940e+00, 1.4114935472024899e+00, 1.6478583747824140e+00, 1.8847466894257894e+00, 2.1222394736491395e+00, 2.3604203437138307e+00, 2.5993759817212956e+00, 2.8391966097343762e+00, 3.0799765142115443e+00, 3.3218146305539729e+00, 3.5648151994628399e+00, 3.8090885091914664e+00, 4.0547517407947415e+00, 4.3019299373155544e+00, 4.5507571227604720e+00, 4.8013776030542550e+00, 5.0539474894081309e+00, 5.3086364953650067e+00, 5.5656300731513921e+00, 5.8251319742404801e+00, 6.0873673452070678e+00, 6.3525865059800601e+00, 6.6210696079053228e+00, 6.8931324404171299e+00, 7.1691337582185994e+00, 7.4494846527302592e+00, 7.7346607201821760e+00, 8.0252181314463780e+00, 8.3218152682134434e+00, 8.6252425061665043e+00, 8.9364642812455379e+00, 9.2566803307032135e+00, 9.5874181334018580e+00, 9.9306787276273880e+00, 1.0289179711107796e+01, 1.0666789783587026e+01, 1.1069382675775751e+01, 1.1506755189277548e+01, 1.1997929913449418e+01, 1.2592738905595901e+01}, + {-1.2668764375434174e+01, -1.2075130816910921e+01, -1.1584960252804917e+01, -1.1148508777693781e+01, -1.0746787108514194e+01, -1.0370015493271170e+01, -1.0012330403392184e+01, -9.6698698743374543e+00, -9.3399210724883908e+00, -9.0204865095226339e+00, -8.7100414635651475e+00, -8.4073884130034546e+00, -8.1115647551404777e+00, -7.8217816701739293e+00, -7.5373821269924042e+00, -7.2578111510465870e+00, -6.9825942253568725e+00, -6.7113212484546603e+00, -6.4436343875167896e+00, -6.1792187234934586e+00, -5.9177949371442047e+00, -5.6591135131207473e+00, -5.4029500908349490e+00, -5.1491016937770269e+00, -4.8973836402140369e+00, -4.6476269884213890e+00, -4.3996764055654278e+00, -4.1533883754886318e+00, -3.9086296798928606e+00, -3.6652761017603779e+00, -3.4232113106647768e+00, -3.1823258978578806e+00, -2.9425165353484215e+00, -2.7036852380958849e+00, -2.4657387122776617e+00, -2.2285877756048471e+00, -1.9921468380503871e+00, -1.7563334332534348e+00, -1.5210677923813427e+00, -1.2862724534455954e+00, -1.0518719000409926e+00, -8.1779222425489551e-01, -5.8396080911088377e-01, -3.5030602639582364e-01, -1.1675694609164179e-01, 1.1675694609164179e-01, 3.5030602639582364e-01, 5.8396080911088377e-01, 8.1779222425489551e-01, 1.0518719000409926e+00, 1.2862724534455954e+00, 1.5210677923813427e+00, 1.7563334332534348e+00, 1.9921468380503871e+00, 2.2285877756048471e+00, 2.4657387122776617e+00, 2.7036852380958849e+00, 2.9425165353484215e+00, 3.1823258978578806e+00, 3.4232113106647768e+00, 3.6652761017603779e+00, 3.9086296798928606e+00, 4.1533883754886318e+00, 4.3996764055654278e+00, 4.6476269884213890e+00, 4.8973836402140369e+00, 5.1491016937770269e+00, 5.4029500908349490e+00, 5.6591135131207473e+00, 5.9177949371442047e+00, 6.1792187234934586e+00, 6.4436343875167896e+00, 6.7113212484546603e+00, 6.9825942253568725e+00, 7.2578111510465870e+00, 7.5373821269924042e+00, 7.8217816701739293e+00, 8.1115647551404777e+00, 8.4073884130034546e+00, 8.7100414635651475e+00, 9.0204865095226339e+00, 9.3399210724883908e+00, 9.6698698743374543e+00, 1.0012330403392184e+01, 1.0370015493271170e+01, 1.0746787108514194e+01, 1.1148508777693781e+01, 1.1584960252804917e+01, 1.2075130816910921e+01, 1.2668764375434174e+01}, + {-1.2744359767130547e+01, -1.2151885862422731e+01, -1.1662705645828531e+01, -1.1227162266730099e+01, -1.0826299334271541e+01, -1.0450353932747101e+01, -1.0093472606221523e+01, -9.7518000378530658e+00, -9.4226280941850682e+00, -9.1039627871662461e+00, -8.7942821113015697e+00, -8.4923907260054339e+00, -8.1973278312442819e+00, -7.9083061352456419e+00, -7.6246699314466770e+00, -7.3458654167536865e+00, -7.0714191292405717e+00, -6.8009219334794446e+00, -6.5340168945910211e+00, -6.2703899396887248e+00, -6.0097625569049269e+00, -5.7518860100246414e+00, -5.4965366980951300e+00, -5.2435123921335229e+00, -4.9926291522002995e+00, -4.7437187782456505e+00, -4.4966266840419751e+00, -4.2512101096067996e+00, -4.0073366067337117e+00, -3.7648827465709873e+00, -3.5237330089839838e+00, -3.2837788216598618e+00, -3.0449177232349895e+00, -2.8070526296277643e+00, -2.5700911865913141e+00, -2.3339451945166196e+00, -2.0985300939053997e+00, -1.8637645018342535e+00, -1.6295697912525116e+00, -1.3958697061759588e+00, -1.1625900068179993e+00, -9.2965813948535503e-01, -6.9700292669283159e-01, -4.6455427344836053e-01, -2.3224288604634152e-01, -0.0000000000000000e+00, 2.3224288604634152e-01, 4.6455427344836053e-01, 6.9700292669283159e-01, 9.2965813948535503e-01, 1.1625900068179993e+00, 1.3958697061759588e+00, 1.6295697912525116e+00, 1.8637645018342535e+00, 2.0985300939053997e+00, 2.3339451945166196e+00, 2.5700911865913141e+00, 2.8070526296277643e+00, 3.0449177232349895e+00, 3.2837788216598618e+00, 3.5237330089839838e+00, 3.7648827465709873e+00, 4.0073366067337117e+00, 4.2512101096067996e+00, 4.4966266840419751e+00, 4.7437187782456505e+00, 4.9926291522002995e+00, 5.2435123921335229e+00, 5.4965366980951300e+00, 5.7518860100246414e+00, 6.0097625569049269e+00, 6.2703899396887248e+00, 6.5340168945910211e+00, 6.8009219334794446e+00, 7.0714191292405717e+00, 7.3458654167536865e+00, 7.6246699314466770e+00, 7.9083061352456419e+00, 8.1973278312442819e+00, 8.4923907260054339e+00, 8.7942821113015697e+00, 9.1039627871662461e+00, 9.4226280941850682e+00, 9.7518000378530658e+00, 1.0093472606221523e+01, 1.0450353932747101e+01, 1.0826299334271541e+01, 1.1227162266730099e+01, 1.1662705645828531e+01, 1.2151885862422731e+01, 1.2744359767130547e+01}, + {-1.2819532258018480e+01, -1.2228202613648115e+01, -1.1739999277265277e+01, -1.1305351381280618e+01, -1.0905335022579179e+01, -1.0530203913474873e+01, -1.0174114544331893e+01, -9.8332181607332707e+00, -9.5048112671603402e+00, -9.1869033275673413e+00, -8.8779750134809063e+00, -8.5768331318717053e+00, -8.2825186558731350e+00, -7.9942457949462566e+00, -7.7113601420959172e+00, -7.4333090418794825e+00, -7.1596200643171857e+00, -6.8898850172400419e+00, -6.6237478409567272e+00, -6.3608952857640544e+00, -6.1010496237476017e+00, -5.8439628737731484e+00, -5.5894121696594157e+00, -5.3371960041051256e+00, -5.0871311519710600e+00, -4.8390501265746098e+00, -4.5927990585018197e+00, -4.3482359124910070e+00, -4.1052289771248800e+00, -3.8636555763680760e+00, -3.6234009627673571e+00, -3.3843573603426855e+00, -3.1464231315109763e+00, -2.9095020472812747e+00, -2.6735026437884462e+00, -2.4383376512469250e+00, -2.2039234837948438e+00, -1.9701797806022634e+00, -1.7370289901408089e+00, -1.5043959907356530e+00, -1.2722077415053921e+00, -1.0403929585875988e+00, -8.0888181218354049e-01, -5.7760564046232032e-01, -3.4649667676368101e-01, -1.1548778684603313e-01, 1.1548778684603313e-01, 3.4649667676368101e-01, 5.7760564046232032e-01, 8.0888181218354049e-01, 1.0403929585875988e+00, 1.2722077415053921e+00, 1.5043959907356530e+00, 1.7370289901408089e+00, 1.9701797806022634e+00, 2.2039234837948438e+00, 2.4383376512469250e+00, 2.6735026437884462e+00, 2.9095020472812747e+00, 3.1464231315109763e+00, 3.3843573603426855e+00, 3.6234009627673571e+00, 3.8636555763680760e+00, 4.1052289771248800e+00, 4.3482359124910070e+00, 4.5927990585018197e+00, 4.8390501265746098e+00, 5.0871311519710600e+00, 5.3371960041051256e+00, 5.5894121696594157e+00, 5.8439628737731484e+00, 6.1010496237476017e+00, 6.3608952857640544e+00, 6.6237478409567272e+00, 6.8898850172400419e+00, 7.1596200643171857e+00, 7.4333090418794825e+00, 7.7113601420959172e+00, 7.9942457949462566e+00, 8.2825186558731350e+00, 8.5768331318717053e+00, 8.8779750134809063e+00, 9.1869033275673413e+00, 9.5048112671603402e+00, 9.8332181607332707e+00, 1.0174114544331893e+01, 1.0530203913474873e+01, 1.0905335022579179e+01, 1.1305351381280618e+01, 1.1739999277265277e+01, 1.2228202613648115e+01, 1.2819532258018480e+01}, + {-1.2894288828491323e+01, -1.2304088423692580e+01, -1.1816848833075053e+01, -1.1383084124753704e+01, -1.0983902488179488e+01, -1.0609574060288383e+01, -1.0254265154452550e+01, -9.9141334955519440e+00, -9.5864801654334606e+00, -9.2693180331543115e+00, -8.9611304090936645e+00, -8.6607262153924776e+00, -8.3671481698791865e+00, -8.0796119574473497e+00, -7.7974644466766474e+00, -7.5201541069686444e+00, -7.2472095182038041e+00, -6.9782234097463292e+00, -6.7128405754384817e+00, -6.4507485669373850e+00, -6.1916704180836000e+00, -5.9353588800666905e+00, -5.6815917979923949e+00, -5.4301683618693133e+00, -5.1809060359441155e+00, -4.9336380202893313e+00, -4.6882111343374202e+00, -4.4444840380621056e+00, -4.2023257256606739e+00, -3.9616142408689083e+00, -3.7222355738045620e+00, -3.4840827074346534e+00, -3.2470547880671274e+00, -3.0110563991586603e+00, -2.7759969215550591e+00, -2.5417899662931847e+00, -2.3083528684815695e+00, -2.0756062326810416e+00, -1.8434735217321803e+00, -1.6118806822031735e+00, -1.3807558006205385e+00, -1.1500287854429532e+00, -9.1963107038107428e-01, -6.8949533518145145e-01, -4.5955524040186729e-01, -2.2974517302503578e-01, -0.0000000000000000e+00, 2.2974517302503578e-01, 4.5955524040186729e-01, 6.8949533518145145e-01, 9.1963107038107428e-01, 1.1500287854429532e+00, 1.3807558006205385e+00, 1.6118806822031735e+00, 1.8434735217321803e+00, 2.0756062326810416e+00, 2.3083528684815695e+00, 2.5417899662931847e+00, 2.7759969215550591e+00, 3.0110563991586603e+00, 3.2470547880671274e+00, 3.4840827074346534e+00, 3.7222355738045620e+00, 3.9616142408689083e+00, 4.2023257256606739e+00, 4.4444840380621056e+00, 4.6882111343374202e+00, 4.9336380202893313e+00, 5.1809060359441155e+00, 5.4301683618693133e+00, 5.6815917979923949e+00, 5.9353588800666905e+00, 6.1916704180836000e+00, 6.4507485669373850e+00, 6.7128405754384817e+00, 6.9782234097463292e+00, 7.2472095182038041e+00, 7.5201541069686444e+00, 7.7974644466766474e+00, 8.0796119574473497e+00, 8.3671481698791865e+00, 8.6607262153924776e+00, 8.9611304090936645e+00, 9.2693180331543115e+00, 9.5864801654334606e+00, 9.9141334955519440e+00, 1.0254265154452550e+01, 1.0609574060288383e+01, 1.0983902488179488e+01, 1.1383084124753704e+01, 1.1816848833075053e+01, 1.2304088423692580e+01, 1.2894288828491323e+01}, + {-1.2968636269474320e+01, -1.2379550443198964e+01, -1.1893261784933093e+01, -1.1460368274811241e+01, -1.1062009808653285e+01, -1.0688472749322155e+01, -1.0333933112841928e+01, -9.9945550223243256e+00, -9.6676440779928683e+00, -9.3512165084081822e+00, -9.0437582257661120e+00, -8.7440802360223131e+00, -8.4512269741988870e+00, -8.1644155757609358e+00, -7.8829941618009407e+00, -7.6064123046955778e+00, -7.3341995730571172e+00, -7.0659495970458304e+00, -6.8013080034124762e+00, -6.5399631244304128e+00, -6.2816387346344911e+00, -6.0260882959858302e+00, -5.7730903426736297e+00, -5.5224447390110498e+00, -5.2739696146779718e+00, -5.0274988314577831e+00, -4.7828798713474461e+00, -4.5399720618862149e+00, -4.2986450736710040e+00, -4.0587776392825550e+00, -3.8202564535947250e+00, -3.5829752236268817e+00, -3.3468338423962911e+00, -3.1117376661126466e+00, -2.8775968778776946e+00, -2.6443259240630739e+00, -2.4118430119268002e+00, -2.1800696589332400e+00, -1.9489302857683151e+00, -1.7183518462708216e+00, -1.4882634884931585e+00, -1.2585962419071162e+00, -1.0292827264188049e+00, -8.0025687937913981e-01, -5.7145369719413508e-01, -3.4280898846921665e-01, -1.1425913587653955e-01, 1.1425913587653955e-01, 3.4280898846921665e-01, 5.7145369719413508e-01, 8.0025687937913981e-01, 1.0292827264188049e+00, 1.2585962419071162e+00, 1.4882634884931585e+00, 1.7183518462708216e+00, 1.9489302857683151e+00, 2.1800696589332400e+00, 2.4118430119268002e+00, 2.6443259240630739e+00, 2.8775968778776946e+00, 3.1117376661126466e+00, 3.3468338423962911e+00, 3.5829752236268817e+00, 3.8202564535947250e+00, 4.0587776392825550e+00, 4.2986450736710040e+00, 4.5399720618862149e+00, 4.7828798713474461e+00, 5.0274988314577831e+00, 5.2739696146779718e+00, 5.5224447390110498e+00, 5.7730903426736297e+00, 6.0260882959858302e+00, 6.2816387346344911e+00, 6.5399631244304128e+00, 6.8013080034124762e+00, 7.0659495970458304e+00, 7.3341995730571172e+00, 7.6064123046955778e+00, 7.8829941618009407e+00, 8.1644155757609358e+00, 8.4512269741988870e+00, 8.7440802360223131e+00, 9.0437582257661120e+00, 9.3512165084081822e+00, 9.6676440779928683e+00, 9.9945550223243256e+00, 1.0333933112841928e+01, 1.0688472749322155e+01, 1.1062009808653285e+01, 1.1460368274811241e+01, 1.1893261784933093e+01, 1.2379550443198964e+01, 1.2968636269474320e+01}, + {-1.3042581189536424e+01, -1.2454595628047418e+01, -1.1969245398474266e+01, -1.1537211392148828e+01, -1.1139664833741461e+01, -1.0766908117887827e+01, -1.0413126845737839e+01, -1.0074491459554146e+01, -9.7483120204672815e+00, -9.4326080721882803e+00, -9.1258680927746667e+00, -8.8269051416209461e+00, -8.5347653443616345e+00, -8.2486672630612325e+00, -7.9679602491408312e+00, -7.6920949569654464e+00, -7.4206019236498832e+00, -7.1530756603397334e+00, -6.8891626070319019e+00, -6.6285518568795139e+00, -6.3709679051115460e+00, -6.1161649040299730e+00, -5.8639220558946130e+00, -5.6140398775862348e+00, -5.3663371416203249e+00, -5.1206483478982010e+00, -4.8768216162575175e+00, -4.6347169158093715e+00, -4.3942045661421210e+00, -4.1551639597063925e+00, -3.9174824654279452e+00, -3.6810544817710307e+00, -3.4457806137632216e+00, -3.2115669533719382e+00, -2.9783244464395575e+00, -2.7459683323916124e+00, -2.5144176453186349e+00, -2.2835947669364720e+00, -2.0534250234577174e+00, -1.8238363196378788e+00, -1.5947588042551295e+00, -1.3661245620886429e+00, -1.1378673281137517e+00, -9.0992222016050861e-01, -6.8222548670729899e-01, -4.5471426681985894e-01, -2.2732635951125563e-01, -0.0000000000000000e+00, 2.2732635951125563e-01, 4.5471426681985894e-01, 6.8222548670729899e-01, 9.0992222016050861e-01, 1.1378673281137517e+00, 1.3661245620886429e+00, 1.5947588042551295e+00, 1.8238363196378788e+00, 2.0534250234577174e+00, 2.2835947669364720e+00, 2.5144176453186349e+00, 2.7459683323916124e+00, 2.9783244464395575e+00, 3.2115669533719382e+00, 3.4457806137632216e+00, 3.6810544817710307e+00, 3.9174824654279452e+00, 4.1551639597063925e+00, 4.3942045661421210e+00, 4.6347169158093715e+00, 4.8768216162575175e+00, 5.1206483478982010e+00, 5.3663371416203249e+00, 5.6140398775862348e+00, 5.8639220558946130e+00, 6.1161649040299730e+00, 6.3709679051115460e+00, 6.6285518568795139e+00, 6.8891626070319019e+00, 7.1530756603397334e+00, 7.4206019236498832e+00, 7.6920949569654464e+00, 7.9679602491408312e+00, 8.2486672630612325e+00, 8.5347653443616345e+00, 8.8269051416209461e+00, 9.1258680927746667e+00, 9.4326080721882803e+00, 9.7483120204672815e+00, 1.0074491459554146e+01, 1.0413126845737839e+01, 1.0766908117887827e+01, 1.1139664833741461e+01, 1.1537211392148828e+01, 1.1969245398474266e+01, 1.2454595628047418e+01, 1.3042581189536424e+01}, + {-1.3116130021662876e+01, -1.2529230746683540e+01, -1.2044806741134924e+01, -1.1613620828842913e+01, -1.1216875194202160e+01, -1.0844888073853586e+01, -1.0491854539276538e+01, -1.0153951274713688e+01, -9.8284927461917118e+00, -9.5135017694124251e+00, -9.2074693533717600e+00, -8.9092105814595968e+00, -8.6177732442147459e+00, -8.3323773071700593e+00, -8.0523733307197105e+00, -7.7772130310612386e+00, -7.5064278944277563e+00, -7.2396132940083531e+00, -6.9764164642851814e+00, -6.7165272404965179e+00, -6.4596708195551207e+00, -6.2056020247183090e+00, -5.9541007064117260e+00, -5.7049680135255354e+00, -5.4580233400707856e+00, -5.2131018018189597e+00, -4.9700521331667717e+00, -4.7287349203529301e+00, -4.4890211062169945e+00, -4.2507907159031859e+00, -4.0139317636282685e+00, -3.7783393087968626e+00, -3.5439146360273117e+00, -3.3105645385243054e+00, -3.0782006880468189e+00, -2.8467390777247825e+00, -2.6160995263623885e+00, -2.3862052347697795e+00, -2.1569823861931283e+00, -1.9283597841455022e+00, -1.7002685219383502e+00, -1.4726416790227095e+00, -1.2454140399065079e+00, -1.0185218319476799e+00, -7.9190247875401665e-01, -5.6549436626674887e-01, -3.3923661887888523e-01, -1.1306888315145193e-01, 1.1306888315145193e-01, 3.3923661887888523e-01, 5.6549436626674887e-01, 7.9190247875401665e-01, 1.0185218319476799e+00, 1.2454140399065079e+00, 1.4726416790227095e+00, 1.7002685219383502e+00, 1.9283597841455022e+00, 2.1569823861931283e+00, 2.3862052347697795e+00, 2.6160995263623885e+00, 2.8467390777247825e+00, 3.0782006880468189e+00, 3.3105645385243054e+00, 3.5439146360273117e+00, 3.7783393087968626e+00, 4.0139317636282685e+00, 4.2507907159031859e+00, 4.4890211062169945e+00, 4.7287349203529301e+00, 4.9700521331667717e+00, 5.2131018018189597e+00, 5.4580233400707856e+00, 5.7049680135255354e+00, 5.9541007064117260e+00, 6.2056020247183090e+00, 6.4596708195551207e+00, 6.7165272404965179e+00, 6.9764164642851814e+00, 7.2396132940083531e+00, 7.5064278944277563e+00, 7.7772130310612386e+00, 8.0523733307197105e+00, 8.3323773071700593e+00, 8.6177732442147459e+00, 8.9092105814595968e+00, 9.2074693533717600e+00, 9.5135017694124251e+00, 9.8284927461917118e+00, 1.0153951274713688e+01, 1.0491854539276538e+01, 1.0844888073853586e+01, 1.1216875194202160e+01, 1.1613620828842913e+01, 1.2044806741134924e+01, 1.2529230746683540e+01, 1.3116130021662876e+01}, + {-1.3189289029708117e+01, -1.2603462387096409e+01, -1.2119952689615966e+01, -1.1689603736290559e+01, -1.1293648310231410e+01, -1.0922420304556775e+01, -1.0570124148913189e+01, -1.0232942694191875e+01, -9.9081947567061555e+00, -9.5939063821323529e+00, -9.2885710764670684e+00, -8.9910059185401376e+00, -8.7002603389157791e+00, -8.4155556842608696e+00, -8.1362437033700719e+00, -7.8617771549009925e+00, -7.5916884556157704e+00, -7.3255738226210259e+00, -7.0630812669681537e+00, -6.8039013480672903e+00, -6.5477599464293741e+00, -6.2944125378566032e+00, -6.0436396020679171e+00, -5.7952429005012469e+00, -5.5490424284915560e+00, -5.3048738966808706e+00, -5.0625866320797561e+00, -4.8220418150442850e+00, -4.5831109874666334e+00, -4.3456747816687633e+00, -4.1096218301890062e+00, -3.8748478248038931e+00, -3.6412546993987251e+00, -3.4087499161671784e+00, -3.1772458384281856e+00, -2.9466591763501873e+00, -2.7169104942556110e+00, -2.4879237700815029e+00, -2.2596259991006340e+00, -2.0319468352407690e+00, -1.8048182643389761e+00, -1.5781743044796297e+00, -1.3519507292254074e+00, -1.1260848100881036e+00, -9.0051507502248329e-01, -6.7518108007869271e-01, -4.5002319163025301e-01, -2.2498237681566594e-01, -0.0000000000000000e+00, 2.2498237681566594e-01, 4.5002319163025301e-01, 6.7518108007869271e-01, 9.0051507502248329e-01, 1.1260848100881036e+00, 1.3519507292254074e+00, 1.5781743044796297e+00, 1.8048182643389761e+00, 2.0319468352407690e+00, 2.2596259991006340e+00, 2.4879237700815029e+00, 2.7169104942556110e+00, 2.9466591763501873e+00, 3.1772458384281856e+00, 3.4087499161671784e+00, 3.6412546993987251e+00, 3.8748478248038931e+00, 4.1096218301890062e+00, 4.3456747816687633e+00, 4.5831109874666334e+00, 4.8220418150442850e+00, 5.0625866320797561e+00, 5.3048738966808706e+00, 5.5490424284915560e+00, 5.7952429005012469e+00, 6.0436396020679171e+00, 6.2944125378566032e+00, 6.5477599464293741e+00, 6.8039013480672903e+00, 7.0630812669681537e+00, 7.3255738226210259e+00, 7.5916884556157704e+00, 7.8617771549009925e+00, 8.1362437033700719e+00, 8.4155556842608696e+00, 8.7002603389157791e+00, 8.9910059185401376e+00, 9.2885710764670684e+00, 9.5939063821323529e+00, 9.9081947567061555e+00, 1.0232942694191875e+01, 1.0570124148913189e+01, 1.0922420304556775e+01, 1.1293648310231410e+01, 1.1689603736290559e+01, 1.2119952689615966e+01, 1.2603462387096409e+01, 1.3189289029708117e+01}, + {-1.3262064314547260e+01, -1.2677296963466635e+01, -1.2194689936989255e+01, -1.1765167072765909e+01, -1.1369991399473138e+01, -1.0999512285277628e+01, -1.0647943408373985e+01, -1.0311473712742933e+01, -9.9874263117220838e+00, -9.6738304400421331e+00, -9.3691820677047506e+00, -9.0723002412708205e+00, -8.7822360072393444e+00, -8.4982120718321887e+00, -8.2195813524123764e+00, -7.9457976314642735e+00, -7.6763942384382915e+00, -7.4109682169993913e+00, -7.1491683376448565e+00, -6.8906858668688686e+00, -6.6352473515597925e+00, -6.3826089025647779e+00, -6.1325516109861775e+00, -5.8848778323693072e+00, -5.6394081442881152e+00, -5.3959788324149320e+00, -5.1544397956688064e+00, -4.9146527868415708e+00, -4.6764899241060425e+00, -4.4398324229806452e+00, -4.2045695090093798e+00, -3.9705974795569130e+00, -3.7378188893809927e+00, -3.5061418395049322e+00, -3.2754793527162449e+00, -3.0457488220163000e+00, -2.8168715207266257e+00, -2.5887721648595683e+00, -2.3613785198892869e+00, -2.1346210452930161e+00, -1.9084325712330619e+00, -1.6827480025638228e+00, -1.4575040460115227e+00, -1.2326389569154106e+00, -1.0080923023599291e+00, -7.8380473788493388e-01, -5.5971779524889242e-01, -3.3577367894853466e-01, -1.1191506937570589e-01, 1.1191506937570589e-01, 3.3577367894853466e-01, 5.5971779524889242e-01, 7.8380473788493388e-01, 1.0080923023599291e+00, 1.2326389569154106e+00, 1.4575040460115227e+00, 1.6827480025638228e+00, 1.9084325712330619e+00, 2.1346210452930161e+00, 2.3613785198892869e+00, 2.5887721648595683e+00, 2.8168715207266257e+00, 3.0457488220163000e+00, 3.2754793527162449e+00, 3.5061418395049322e+00, 3.7378188893809927e+00, 3.9705974795569130e+00, 4.2045695090093798e+00, 4.4398324229806452e+00, 4.6764899241060425e+00, 4.9146527868415708e+00, 5.1544397956688064e+00, 5.3959788324149320e+00, 5.6394081442881152e+00, 5.8848778323693072e+00, 6.1325516109861775e+00, 6.3826089025647779e+00, 6.6352473515597925e+00, 6.8906858668688686e+00, 7.1491683376448565e+00, 7.4109682169993913e+00, 7.6763942384382915e+00, 7.9457976314642735e+00, 8.2195813524123764e+00, 8.4982120718321887e+00, 8.7822360072393444e+00, 9.0723002412708205e+00, 9.3691820677047506e+00, 9.6738304400421331e+00, 9.9874263117220838e+00, 1.0311473712742933e+01, 1.0647943408373985e+01, 1.0999512285277628e+01, 1.1369991399473138e+01, 1.1765167072765909e+01, 1.2194689936989255e+01, 1.2677296963466635e+01, 1.3262064314547260e+01}, + {-1.3334461819943153e+01, -1.2750740722503373e+01, -1.2269024999467902e+01, -1.1840317610615624e+01, -1.1445911484642805e+01, -1.1076171287300262e+01, -1.0725319838167971e+01, -1.0389552102465858e+01, -1.0066195438589224e+01, -9.7532822304545537e+00, -9.4493108799743251e+00, -9.1531023745391185e+00, -8.8637093532419815e+00, -8.5803558609974342e+00, -8.3023959646056102e+00, -8.0292844524423987e+00, -7.7605555495112331e+00, -7.4958071093975249e+00, -7.2346886456654547e+00, -6.9768921155793651e+00, -6.7221447159938794e+00, -6.4702031761523404e+00, -6.2208491815292408e+00, -5.9738856642890008e+00, -5.7291337661713966e+00, -5.4864303291169465e+00, -5.2456258043992712e+00, -5.0065824967968044e+00, -4.7691730793126910e+00, -4.5332793281004458e+00, -4.2987910379219327e+00, -4.0656050865933535e+00, -3.8336246231287601e+00, -3.6027583591447181e+00, -3.3729199468884627e+00, -3.1440274302474247e+00, -2.9160027574766345e+00, -2.6887713462813925e+00, -2.4622616934204693e+00, -2.2364050222293588e+00, -2.0111349624646375e+00, -1.7863872576859270e+00, -1.5620994960576213e+00, -1.3382108609965631e+00, -1.1146618985362737e+00, -8.9139429864040043e-01, -6.6835068799126329e-01, -4.4547443201433412e-01, -2.2270944408436802e-01, -0.0000000000000000e+00, 2.2270944408436802e-01, 4.4547443201433412e-01, 6.6835068799126329e-01, 8.9139429864040043e-01, 1.1146618985362737e+00, 1.3382108609965631e+00, 1.5620994960576213e+00, 1.7863872576859270e+00, 2.0111349624646375e+00, 2.2364050222293588e+00, 2.4622616934204693e+00, 2.6887713462813925e+00, 2.9160027574766345e+00, 3.1440274302474247e+00, 3.3729199468884627e+00, 3.6027583591447181e+00, 3.8336246231287601e+00, 4.0656050865933535e+00, 4.2987910379219327e+00, 4.5332793281004458e+00, 4.7691730793126910e+00, 5.0065824967968044e+00, 5.2456258043992712e+00, 5.4864303291169465e+00, 5.7291337661713966e+00, 5.9738856642890008e+00, 6.2208491815292408e+00, 6.4702031761523404e+00, 6.7221447159938794e+00, 6.9768921155793651e+00, 7.2346886456654547e+00, 7.4958071093975249e+00, 7.7605555495112331e+00, 8.0292844524423987e+00, 8.3023959646056102e+00, 8.5803558609974342e+00, 8.8637093532419815e+00, 9.1531023745391185e+00, 9.4493108799743251e+00, 9.7532822304545537e+00, 1.0066195438589224e+01, 1.0389552102465858e+01, 1.0725319838167971e+01, 1.1076171287300262e+01, 1.1445911484642805e+01, 1.1840317610615624e+01, 1.2269024999467902e+01, 1.2750740722503373e+01, 1.3334461819943153e+01}, + {-1.3406487338144908e+01, -1.2823799749487808e+01, -1.2342964222859672e+01, -1.1915061943114164e+01, -1.1521415400787030e+01, -1.1152404385585125e+01, -1.0802260753684713e+01, -1.0467185421342810e+01, -1.0144509941292846e+01, -9.8322698077779673e+00, -9.5289658233901147e+00, -9.2334208902191612e+00, -8.9446892173254735e+00, -8.6619961681345181e+00, -8.3846969404162657e+00, -8.1122473111627915e+00, -7.8441823844608196e+00, -7.5801008078574874e+00, -7.3196528223045343e+00, -7.0625310602488653e+00, -6.8084633528587952e+00, -6.5572070319215392e+00, -6.3085443611121343e+00, -6.0622788326143020e+00, -5.8182321352035169e+00, -5.5762416493299236e+00, -5.3361583601383602e+00, -5.0978451050891360e+00, -4.8611750917912095e+00, -4.6260306357871555e+00, -4.3923020786826834e+00, -4.1598868551310302e+00, -3.9286886834276706e+00, -3.6986168593184914e+00, -3.4695856364185889e+00, -3.2415136796310131e+00, -3.0143235803311552e+00, -2.7879414239819891e+00, -2.5622964023726080e+00, -2.3373204639068783e+00, -2.1129479963711879e+00, -1.8891155374270083e+00, -1.6657615087415094e+00, -1.4428259702159327e+00, -1.2202503912189531e+00, -9.9797743609810530e-01, -7.7595076154014564e-01, -5.5411482359161690e-01, -3.3241469234223181e-01, -1.1079587242243949e-01, 1.1079587242243949e-01, 3.3241469234223181e-01, 5.5411482359161690e-01, 7.7595076154014564e-01, 9.9797743609810530e-01, 1.2202503912189531e+00, 1.4428259702159327e+00, 1.6657615087415094e+00, 1.8891155374270083e+00, 2.1129479963711879e+00, 2.3373204639068783e+00, 2.5622964023726080e+00, 2.7879414239819891e+00, 3.0143235803311552e+00, 3.2415136796310131e+00, 3.4695856364185889e+00, 3.6986168593184914e+00, 3.9286886834276706e+00, 4.1598868551310302e+00, 4.3923020786826834e+00, 4.6260306357871555e+00, 4.8611750917912095e+00, 5.0978451050891360e+00, 5.3361583601383602e+00, 5.5762416493299236e+00, 5.8182321352035169e+00, 6.0622788326143020e+00, 6.3085443611121343e+00, 6.5572070319215392e+00, 6.8084633528587952e+00, 7.0625310602488653e+00, 7.3196528223045343e+00, 7.5801008078574874e+00, 7.8441823844608196e+00, 8.1122473111627915e+00, 8.3846969404162657e+00, 8.6619961681345181e+00, 8.9446892173254735e+00, 9.2334208902191612e+00, 9.5289658233901147e+00, 9.8322698077779673e+00, 1.0144509941292846e+01, 1.0467185421342810e+01, 1.0802260753684713e+01, 1.1152404385585125e+01, 1.1521415400787030e+01, 1.1915061943114164e+01, 1.2342964222859672e+01, 1.2823799749487808e+01, 1.3406487338144908e+01}, + {-1.3478146515232797e+01, -1.2896479974039561e+01, -1.2416513788721415e+01, -1.1989406490998284e+01, -1.1596509802200323e+01, -1.1228218466075560e+01, -1.0878773272902121e+01, -1.0544381021362536e+01, -1.0222377409009630e+01, -9.9108010025237636e+00, -9.6081549747716188e+00, -9.3132641171485986e+00, -9.0251841867360021e+00, -8.7431418459354120e+00, -8.4664934056489916e+00, -8.1946956148339627e+00, -7.9272844408189824e+00, -7.6638593097942502e+00, -7.4040711750779735e+00, -7.1476133293942068e+00, -6.8942142232839023e+00, -6.6436317759717189e+00, -6.3956488139470400e+00, -6.1500693736430252e+00, -5.9067156746204947e+00, -5.6654256190158065e+00, -5.4260507083580034e+00, -5.1884542945483902e+00, -4.9525101007153634e+00, -4.7181009617643239e+00, -4.4851177450803661e+00, -4.2534584199474237e+00, -4.0230272504840583e+00, -3.7937340917372753e+00, -3.5654937723650511e+00, -3.3382255503269591e+00, -3.1118526303762444e+00, -2.8863017340447339e+00, -2.6615027143383925e+00, -2.4373882085955381e+00, -2.2138933239623526e+00, -1.9909553507579980e+00, -1.7685134996706828e+00, -1.5465086592745161e+00, -1.3248831708071216e+00, -1.1035806175170297e+00, -8.8254562619156474e-01, -6.6172367872096116e-01, -4.4106093175132588e-01, -2.2050404263430215e-01, -0.0000000000000000e+00, 2.2050404263430215e-01, 4.4106093175132588e-01, 6.6172367872096116e-01, 8.8254562619156474e-01, 1.1035806175170297e+00, 1.3248831708071216e+00, 1.5465086592745161e+00, 1.7685134996706828e+00, 1.9909553507579980e+00, 2.2138933239623526e+00, 2.4373882085955381e+00, 2.6615027143383925e+00, 2.8863017340447339e+00, 3.1118526303762444e+00, 3.3382255503269591e+00, 3.5654937723650511e+00, 3.7937340917372753e+00, 4.0230272504840583e+00, 4.2534584199474237e+00, 4.4851177450803661e+00, 4.7181009617643239e+00, 4.9525101007153634e+00, 5.1884542945483902e+00, 5.4260507083580034e+00, 5.6654256190158065e+00, 5.9067156746204947e+00, 6.1500693736430252e+00, 6.3956488139470400e+00, 6.6436317759717189e+00, 6.8942142232839023e+00, 7.1476133293942068e+00, 7.4040711750779735e+00, 7.6638593097942502e+00, 7.9272844408189824e+00, 8.1946956148339627e+00, 8.4664934056489916e+00, 8.7431418459354120e+00, 9.0251841867360021e+00, 9.3132641171485986e+00, 9.6081549747716188e+00, 9.9108010025237636e+00, 1.0222377409009630e+01, 1.0544381021362536e+01, 1.0878773272902121e+01, 1.1228218466075560e+01, 1.1596509802200323e+01, 1.1989406490998284e+01, 1.2416513788721415e+01, 1.2896479974039561e+01, 1.3478146515232797e+01}, + {-1.3549444856223355e+01, -1.2968787175621387e+01, -1.2489679720231214e+01, -1.2063357508698903e+01, -1.1671201169018351e+01, -1.1303620232659483e+01, -1.0954864323727046e+01, -1.0621146056253156e+01, -1.0299805224248269e+01, -9.9888834298724642e+00, -9.6868861866549612e+00, -9.3926401506071997e+00, -9.1052026055338082e+00, -8.8238014938930043e+00, -8.5477942224789647e+00, -8.2766384961541863e+00, -8.0098711302417200e+00, -7.7470923148599020e+00, -7.4879537012921364e+00, -7.2321492282755919e+00, -6.9794079514507139e+00, -6.7294883630724422e+00, -6.4821738377935736e+00, -6.2372689413028590e+00, -5.9945964085177108e+00, -5.7539946473095069e+00, -5.5153156590327264e+00, -5.2784232927787453e+00, -5.0431917691668700e+00, -4.8095044235715418e+00, -4.5772526293071110e+00, -4.3463348693861139e+00, -4.1166559316947993e+00, -3.8881262072644476e+00, -3.6606610751014688e+00, -3.4341803600244734e+00, -3.2086078523280142e+00, -2.9838708799890519e+00, -2.7598999256576846e+00, -2.5366282819075883e+00, -2.3139917392243752e+00, -2.0919283020285095e+00, -1.8703779286996216e+00, -1.6492822921192409e+00, -1.4285845577012724e+00, -1.2082291762513240e+00, -9.8816168930097070e-01, -7.6832854481204382e-01, -5.4867692134760149e-01, -3.2915455896704260e-01, -1.0970959522807897e-01, 1.0970959522807897e-01, 3.2915455896704260e-01, 5.4867692134760149e-01, 7.6832854481204382e-01, 9.8816168930097070e-01, 1.2082291762513240e+00, 1.4285845577012724e+00, 1.6492822921192409e+00, 1.8703779286996216e+00, 2.0919283020285095e+00, 2.3139917392243752e+00, 2.5366282819075883e+00, 2.7598999256576846e+00, 2.9838708799890519e+00, 3.2086078523280142e+00, 3.4341803600244734e+00, 3.6606610751014688e+00, 3.8881262072644476e+00, 4.1166559316947993e+00, 4.3463348693861139e+00, 4.5772526293071110e+00, 4.8095044235715418e+00, 5.0431917691668700e+00, 5.2784232927787453e+00, 5.5153156590327264e+00, 5.7539946473095069e+00, 5.9945964085177108e+00, 6.2372689413028590e+00, 6.4821738377935736e+00, 6.7294883630724422e+00, 6.9794079514507139e+00, 7.2321492282755919e+00, 7.4879537012921364e+00, 7.7470923148599020e+00, 8.0098711302417200e+00, 8.2766384961541863e+00, 8.5477942224789647e+00, 8.8238014938930043e+00, 9.1052026055338082e+00, 9.3926401506071997e+00, 9.6868861866549612e+00, 9.9888834298724642e+00, 1.0299805224248269e+01, 1.0621146056253156e+01, 1.0954864323727046e+01, 1.1303620232659483e+01, 1.1671201169018351e+01, 1.2063357508698903e+01, 1.2489679720231214e+01, 1.2968787175621387e+01, 1.3549444856223355e+01}, + {-1.3620387729947742e+01, -1.3040726988796493e+01, -1.2562467887793943e+01, -1.2136921090287251e+01, -1.1745495813506075e+01, -1.1378616213805854e+01, -1.1030540650989794e+01, -1.0697487488847003e+01, -1.0376800570599057e+01, -1.0066524497824579e+01, -9.7651670958633421e+00, -9.4715568613271639e+00, -9.1847525840655759e+00, -8.9039834682597192e+00, -8.6286079999230569e+00, -8.3580848243237309e+00, -8.0919515900932844e+00, -7.8298092371332197e+00, -7.5713101008750368e+00, -7.3161487524084921e+00, -7.0640548388279587e+00, -6.8147874116676039e+00, -6.5681303797817083e+00, -6.3238888238470219e+00, -6.0818859794767741e+00, -5.8419607451405371e+00, -5.6039656063250805e+00, -5.3677648929830539e+00, -5.1332333061803963e+00, -4.9002546639192408e+00, -4.6687208267203344e+00, -4.4385307716320357e+00, -4.2095897895522105e+00, -3.9818087855776474e+00, -3.7551036658752275e+00, -3.5293947975507334e+00, -3.3046065303600929e+00, -3.0806667710024973e+00, -2.8575066022592246e+00, -2.6350599404753994e+00, -2.4132632258847226e+00, -2.1920551410961067e+00, -1.9713763537322730e+00, -1.7511692797619354e+00, -1.5313778645213059e+00, -1.3119473787947484e+00, -1.0928242276321787e+00, -8.7395576983320977e-01, -6.5529014623361903e-01, -4.3677611509527065e-01, -2.1836289303147782e-01, -0.0000000000000000e+00, 2.1836289303147782e-01, 4.3677611509527065e-01, 6.5529014623361903e-01, 8.7395576983320977e-01, 1.0928242276321787e+00, 1.3119473787947484e+00, 1.5313778645213059e+00, 1.7511692797619354e+00, 1.9713763537322730e+00, 2.1920551410961067e+00, 2.4132632258847226e+00, 2.6350599404753994e+00, 2.8575066022592246e+00, 3.0806667710024973e+00, 3.3046065303600929e+00, 3.5293947975507334e+00, 3.7551036658752275e+00, 3.9818087855776474e+00, 4.2095897895522105e+00, 4.4385307716320357e+00, 4.6687208267203344e+00, 4.9002546639192408e+00, 5.1332333061803963e+00, 5.3677648929830539e+00, 5.6039656063250805e+00, 5.8419607451405371e+00, 6.0818859794767741e+00, 6.3238888238470219e+00, 6.5681303797817083e+00, 6.8147874116676039e+00, 7.0640548388279587e+00, 7.3161487524084921e+00, 7.5713101008750368e+00, 7.8298092371332197e+00, 8.0919515900932844e+00, 8.3580848243237309e+00, 8.6286079999230569e+00, 8.9039834682597192e+00, 9.1847525840655759e+00, 9.4715568613271639e+00, 9.7651670958633421e+00, 1.0066524497824579e+01, 1.0376800570599057e+01, 1.0697487488847003e+01, 1.1030540650989794e+01, 1.1378616213805854e+01, 1.1745495813506075e+01, 1.2136921090287251e+01, 1.2562467887793943e+01, 1.3040726988796493e+01, 1.3620387729947742e+01}, + {-1.3690980373715535e+01, -1.3112304908251922e+01, -1.2634884014394832e+01, -1.2210103175151124e+01, -1.1819399886057777e+01, -1.1453212768894272e+01, -1.1105808823112232e+01, -1.0773412098098593e+01, -1.0453370440115185e+01, -1.0143731414960465e+01, -9.8430051316625278e+00, -9.5500219040630885e+00, -9.2638420079694495e+00, -8.9836958915100507e+00, -8.7089431037842626e+00, -8.4390432154976232e+00, -8.1735346944358760e+00, -7.9120192166772831e+00, -7.6541497885353431e+00, -7.3996216003602537e+00, -7.1481648776449447e+00, -6.8995392180679298e+00, -6.6535290513692580e+00, -6.4099399596264091e+00, -6.1685956652061904e+00, -5.9293355428005707e+00, -5.6920125471434968e+00, -5.4564914735798906e+00, -5.2226474874947222e+00, -4.9903648726555803e+00, -4.7595359591142765e+00, -4.5300601993847316e+00, -4.3018433678250396e+00, -4.0747968629734785e+00, -3.8488370963629586e+00, -3.6238849543164635e+00, -3.3998653215923187e+00, -3.1767066576408891e+00, -2.9543406177575795e+00, -2.7327017126497148e+00, -2.5117270009375883e+00, -2.2913558099291085e+00, -2.0715294806793496e+00, -1.8521911338989445e+00, -1.6332854537309136e+00, -1.4147584867915231e+00, -1.1965574541809449e+00, -9.7863057442477308e-01, -7.6092689551645798e-01, -5.4339613440036172e-01, -3.2598852237076159e-01, -1.0865465496797615e-01, 1.0865465496797615e-01, 3.2598852237076159e-01, 5.4339613440036172e-01, 7.6092689551645798e-01, 9.7863057442477308e-01, 1.1965574541809449e+00, 1.4147584867915231e+00, 1.6332854537309136e+00, 1.8521911338989445e+00, 2.0715294806793496e+00, 2.2913558099291085e+00, 2.5117270009375883e+00, 2.7327017126497148e+00, 2.9543406177575795e+00, 3.1767066576408891e+00, 3.3998653215923187e+00, 3.6238849543164635e+00, 3.8488370963629586e+00, 4.0747968629734785e+00, 4.3018433678250396e+00, 4.5300601993847316e+00, 4.7595359591142765e+00, 4.9903648726555803e+00, 5.2226474874947222e+00, 5.4564914735798906e+00, 5.6920125471434968e+00, 5.9293355428005707e+00, 6.1685956652061904e+00, 6.4099399596264091e+00, 6.6535290513692580e+00, 6.8995392180679298e+00, 7.1481648776449447e+00, 7.3996216003602537e+00, 7.6541497885353431e+00, 7.9120192166772831e+00, 8.1735346944358760e+00, 8.4390432154976232e+00, 8.7089431037842626e+00, 8.9836958915100507e+00, 9.2638420079694495e+00, 9.5500219040630885e+00, 9.8430051316625278e+00, 1.0143731414960465e+01, 1.0453370440115185e+01, 1.0773412098098593e+01, 1.1105808823112232e+01, 1.1453212768894272e+01, 1.1819399886057777e+01, 1.2210103175151124e+01, 1.2634884014394832e+01, 1.3112304908251922e+01, 1.3690980373715535e+01}, + {-1.3761227897775342e+01, -1.3183526293600599e+01, -1.2706933680714704e+01, -1.2282909553416038e+01, -1.1892919380924928e+01, -1.1527416094254811e+01, -1.1180675238467860e+01, -1.0848926485775479e+01, -1.0529521640346799e+01, -1.0220511197831829e+01, -9.9204075235256379e+00, -9.6280427257475036e+00, -9.3424785467404909e+00, -9.0629466613367704e+00, -8.7888076661013130e+00, -8.5195220427131719e+00, -8.2546290644616640e+00, -7.9937311305046359e+00, -7.7364819052916909e+00, -7.4825771858770791e+00, -7.2317477636524030e+00, -6.9837537698848138e+00, -6.7383801424910441e+00, -6.4954329519998799e+00, -6.2547363942623555e+00, -6.0161303065289111e+00, -5.7794680986505123e+00, -5.5446150166977928e+00, -5.3114466751017559e+00, -5.0798478074449882e+00, -4.8497111966094790e+00, -4.6209367530474879e+00, -4.3934307161440911e+00, -4.1671049584554103e+00, -3.9418763763764075e+00, -3.7176663537665213e+00, -3.4944002874250444e+00, -3.2720071651989722e+00, -3.0504191890280565e+00, -2.8295714364637563e+00, -2.6094015552011851e+00, -2.3898494859824693e+00, -2.1708572099022447e+00, -1.9523685166996116e+00, -1.7343287910777165e+00, -1.5166848144697649e+00, -1.2993845799825059e+00, -1.0823771185059250e+00, -8.6561233418988326e-01, -6.4904084766152925e-01, -4.3261384549707677e-01, -2.1628293457217421e-01, -0.0000000000000000e+00, 2.1628293457217421e-01, 4.3261384549707677e-01, 6.4904084766152925e-01, 8.6561233418988326e-01, 1.0823771185059250e+00, 1.2993845799825059e+00, 1.5166848144697649e+00, 1.7343287910777165e+00, 1.9523685166996116e+00, 2.1708572099022447e+00, 2.3898494859824693e+00, 2.6094015552011851e+00, 2.8295714364637563e+00, 3.0504191890280565e+00, 3.2720071651989722e+00, 3.4944002874250444e+00, 3.7176663537665213e+00, 3.9418763763764075e+00, 4.1671049584554103e+00, 4.3934307161440911e+00, 4.6209367530474879e+00, 4.8497111966094790e+00, 5.0798478074449882e+00, 5.3114466751017559e+00, 5.5446150166977928e+00, 5.7794680986505123e+00, 6.0161303065289111e+00, 6.2547363942623555e+00, 6.4954329519998799e+00, 6.7383801424910441e+00, 6.9837537698848138e+00, 7.2317477636524030e+00, 7.4825771858770791e+00, 7.7364819052916909e+00, 7.9937311305046359e+00, 8.2546290644616640e+00, 8.5195220427131719e+00, 8.7888076661013130e+00, 9.0629466613367704e+00, 9.3424785467404909e+00, 9.6280427257475036e+00, 9.9204075235256379e+00, 1.0220511197831829e+01, 1.0529521640346799e+01, 1.0848926485775479e+01, 1.1180675238467860e+01, 1.1527416094254811e+01, 1.1892919380924928e+01, 1.2282909553416038e+01, 1.2706933680714704e+01, 1.3183526293600599e+01, 1.3761227897775342e+01}, + {-1.3831135289582965e+01, -1.3254396373973801e+01, -1.2778622330019754e+01, -1.2355345871125120e+01, -1.1966060141686793e+01, -1.1601232228934139e+01, -1.1255146131451047e+01, -1.0924037082840369e+01, -1.0605260801047601e+01, -1.0296870678005966e+01, -9.9973813085297891e+00, -9.7056265732561915e+00, -9.4206696618824566e+00, -9.1417434592085591e+00, -8.8682095941331820e+00, -8.5995294453241531e+00, -8.3352430784012270e+00, -8.0749536029866285e+00, -7.8183153294116643e+00, -7.5650246493836404e+00, -7.3148129082162541e+00, -7.0674407587544215e+00, -6.8226936349524259e+00, -6.5803780834395011e+00, -6.3403187609122504e+00, -6.1023559541821202e+00, -5.8663435147929848e+00, -5.6321471256242530e+00, -5.3996428356774224e+00, -5.1687158132495581e+00, -4.9392592782592839e+00, -4.7111735825401517e+00, -4.4843654131098756e+00, -4.2587470982328570e+00, -4.0342359998584794e+00, -3.8107539789883282e+00, -3.5882269228863879e+00, -3.3665843249351557e+00, -3.1457589094612213e+00, -2.9256862950849976e+00, -2.7063046911512725e+00, -2.4875546226165683e+00, -2.2693786794419482e+00, -2.0517212870941761e+00, -1.8345284952160181e+00, -1.6177477819054702e+00, -1.4013278713576325e+00, -1.1852185628827800e+00, -9.6937056952866862e-01, -7.5373536471122926e-01, -5.3826503540107018e-01, -3.2291214052800010e-01, -1.0762957335551564e-01, 1.0762957335551564e-01, 3.2291214052800010e-01, 5.3826503540107018e-01, 7.5373536471122926e-01, 9.6937056952866862e-01, 1.1852185628827800e+00, 1.4013278713576325e+00, 1.6177477819054702e+00, 1.8345284952160181e+00, 2.0517212870941761e+00, 2.2693786794419482e+00, 2.4875546226165683e+00, 2.7063046911512725e+00, 2.9256862950849976e+00, 3.1457589094612213e+00, 3.3665843249351557e+00, 3.5882269228863879e+00, 3.8107539789883282e+00, 4.0342359998584794e+00, 4.2587470982328570e+00, 4.4843654131098756e+00, 4.7111735825401517e+00, 4.9392592782592839e+00, 5.1687158132495581e+00, 5.3996428356774224e+00, 5.6321471256242530e+00, 5.8663435147929848e+00, 6.1023559541821202e+00, 6.3403187609122504e+00, 6.5803780834395011e+00, 6.8226936349524259e+00, 7.0674407587544215e+00, 7.3148129082162541e+00, 7.5650246493836404e+00, 7.8183153294116643e+00, 8.0749536029866285e+00, 8.3352430784012270e+00, 8.5995294453241531e+00, 8.8682095941331820e+00, 9.1417434592085591e+00, 9.4206696618824566e+00, 9.7056265732561915e+00, 9.9973813085297891e+00, 1.0296870678005966e+01, 1.0605260801047601e+01, 1.0924037082840369e+01, 1.1255146131451047e+01, 1.1601232228934139e+01, 1.1966060141686793e+01, 1.2355345871125120e+01, 1.2778622330019754e+01, 1.3254396373973801e+01, 1.3831135289582965e+01}, + {-1.3900707417887173e+01, -1.3324920252415142e+01, -1.2849955272837832e+01, -1.2427417635190753e+01, -1.2038827866477725e+01, -1.1674667060202870e+01, -1.1329227578271460e+01, -1.0998750155541702e+01, -1.0680594380572309e+01, -1.0372816508782414e+01, -1.0073933338405658e+01, -9.7827805008057851e+00, -9.4984226146699928e+00, -9.2200937585148388e+00, -8.9471565789061032e+00, -8.6790733379711611e+00, -8.4153848809401453e+00, -8.1556950157409371e+00, -7.8996586867968652e+00, -7.6469728688944967e+00, -7.3973694497863685e+00, -7.1506095923972852e+00, -6.9064792151159047e+00, -6.6647853288831929e+00, -6.4253530391943388e+00, -6.1880230700488541e+00, -5.9526497019202820e+00, -5.7190990412807672e+00, -5.4872475579716511e+00, -5.2569808406969596e+00, -5.0281925314641187e+00, -4.8007834078342349e+00, -4.5746605880295625e+00, -4.3497368387482860e+00, -4.1259299692968714e+00, -3.9031622986173096e+00, -3.6813601841446331e+00, -3.4604536033170050e+00, -3.2403757800798094e+00, -3.0210628499554302e+00, -2.8024535582518539e+00, -2.5844889868023952e+00, -2.3671123053017529e+00, -2.1502685438583309e+00, -1.9339043838414955e+00, -1.7179679644825636e+00, -1.5024087030036999e+00, -1.2871771263105225e+00, -1.0722247125008730e+00, -8.5750374062088575e-01, -6.4296714724579429e-01, -4.2856838858126434e-01, -2.1426130687493608e-01, -0.0000000000000000e+00, 2.1426130687493608e-01, 4.2856838858126434e-01, 6.4296714724579429e-01, 8.5750374062088575e-01, 1.0722247125008730e+00, 1.2871771263105225e+00, 1.5024087030036999e+00, 1.7179679644825636e+00, 1.9339043838414955e+00, 2.1502685438583309e+00, 2.3671123053017529e+00, 2.5844889868023952e+00, 2.8024535582518539e+00, 3.0210628499554302e+00, 3.2403757800798094e+00, 3.4604536033170050e+00, 3.6813601841446331e+00, 3.9031622986173096e+00, 4.1259299692968714e+00, 4.3497368387482860e+00, 4.5746605880295625e+00, 4.8007834078342349e+00, 5.0281925314641187e+00, 5.2569808406969596e+00, 5.4872475579716511e+00, 5.7190990412807672e+00, 5.9526497019202820e+00, 6.1880230700488541e+00, 6.4253530391943388e+00, 6.6647853288831929e+00, 6.9064792151159047e+00, 7.1506095923972852e+00, 7.3973694497863685e+00, 7.6469728688944967e+00, 7.8996586867968652e+00, 8.1556950157409371e+00, 8.4153848809401453e+00, 8.6790733379711611e+00, 8.9471565789061032e+00, 9.2200937585148388e+00, 9.4984226146699928e+00, 9.7827805008057851e+00, 1.0073933338405658e+01, 1.0372816508782414e+01, 1.0680594380572309e+01, 1.0998750155541702e+01, 1.1329227578271460e+01, 1.1674667060202870e+01, 1.2038827866477725e+01, 1.2427417635190753e+01, 1.2849955272837832e+01, 1.3324920252415142e+01, 1.3900707417887173e+01}, + {-1.3969949036642459e+01, -1.3395102910086411e+01, -1.2920937691432538e+01, -1.2499130218130038e+01, -1.2111228112984248e+01, -1.1747726328818176e+01, -1.1402925502488745e+01, -1.1073071811228770e+01, -1.0755528671982248e+01, -1.0448355171600436e+01, -1.0150070286259611e+01, -9.8595113770046634e+00, -9.5757444735437325e+00, -9.2980048323218245e+00, -9.0256561033487941e+00, -8.7581614191156962e+00, -8.4950623921731907e+00, -8.2359635170287859e+00, -7.9805203608479554e+00, -7.7284304703735840e+00, -7.4794262647792982e+00, -7.2332694060552951e+00, -6.9897462859257979e+00, -6.7486643683832410e+00, -6.5098491962299123e+00, -6.2731419188661652e+00, -6.0383972335511160e+00, -5.8054816577895725e+00, -5.5742720692284102e+00, -5.3446544634117208e+00, -5.1165228902768884e+00, -4.8897785383006882e+00, -4.6643289413814166e+00, -4.4400872883391642e+00, -4.2169718186716345e+00, -3.9949052911662122e+00, -3.7738145143243407e+00, -3.5536299294390266e+00, -3.3342852386840143e+00, -3.1157170718023619e+00, -2.8978646859830590e+00, -2.6806696943332091e+00, -2.4640758190263061e+00, -2.2480286657622290e+00, -2.0324755166339639e+00, -1.8173651388771286e+00, -1.6026476072949805e+00, -1.3882741384147259e+00, -1.1741969346494687e+00, -9.6036903692102105e-01, -7.4674418434770939e-01, -5.3327667972273107e-01, -3.1992125960608508e-01, -1.0663296792943616e-01, 1.0663296792943616e-01, 3.1992125960608508e-01, 5.3327667972273107e-01, 7.4674418434770939e-01, 9.6036903692102105e-01, 1.1741969346494687e+00, 1.3882741384147259e+00, 1.6026476072949805e+00, 1.8173651388771286e+00, 2.0324755166339639e+00, 2.2480286657622290e+00, 2.4640758190263061e+00, 2.6806696943332091e+00, 2.8978646859830590e+00, 3.1157170718023619e+00, 3.3342852386840143e+00, 3.5536299294390266e+00, 3.7738145143243407e+00, 3.9949052911662122e+00, 4.2169718186716345e+00, 4.4400872883391642e+00, 4.6643289413814166e+00, 4.8897785383006882e+00, 5.1165228902768884e+00, 5.3446544634117208e+00, 5.5742720692284102e+00, 5.8054816577895725e+00, 6.0383972335511160e+00, 6.2731419188661652e+00, 6.5098491962299123e+00, 6.7486643683832410e+00, 6.9897462859257979e+00, 7.2332694060552951e+00, 7.4794262647792982e+00, 7.7284304703735840e+00, 7.9805203608479554e+00, 8.2359635170287859e+00, 8.4950623921731907e+00, 8.7581614191156962e+00, 9.0256561033487941e+00, 9.2980048323218245e+00, 9.5757444735437325e+00, 9.8595113770046634e+00, 1.0150070286259611e+01, 1.0448355171600436e+01, 1.0755528671982248e+01, 1.1073071811228770e+01, 1.1402925502488745e+01, 1.1747726328818176e+01, 1.2111228112984248e+01, 1.2499130218130038e+01, 1.2920937691432538e+01, 1.3395102910086411e+01, 1.3969949036642459e+01}, + {-1.4038864788757728e+01, -1.3464949210295044e+01, -1.2991574644085651e+01, -1.2570488862595592e+01, -1.2183266303224141e+01, -1.1820415634054809e+01, -1.1476245680301457e+01, -1.1147008003906418e+01, -1.0830069808875091e+01, -1.0523492982154455e+01, -1.0225798652986768e+01, -9.9358258915767603e+00, -9.6526421211592019e+00, -9.3754837607621955e+00, -9.1037154500398181e+00, -8.8368011791635279e+00, -8.5742833161234948e+00, -8.3157670306912532e+00, -8.0609085018410500e+00, -7.8094058375754942e+00, -7.5609919779111072e+00, -7.3154290733448306e+00, -7.0725039783127066e+00, -6.8320245990954787e+00, -6.5938169048330693e+00, -6.3577224590892367e+00, -6.1235963643451115e+00, -5.8913055371925065e+00, -5.6607272507011190e+00, -5.4317478943805018e+00, -5.2042619126759186e+00, -4.9781708909532618e+00, -4.7533827640966910e+00, -4.5298111276324908e+00, -4.3073746350430024e+00, -4.0859964678941409e+00, -3.8656038677524993e+00, -3.6461277207507310e+00, -3.4275021871760831e+00, -3.2096643696850733e+00, -2.9925540147475451e+00, -2.7761132427418982e+00, -2.5602863027962388e+00, -2.3450193490255717e+00, -2.1302602352750175e+00, -1.9159583258608970e+00, -1.7020643201191179e+00, -1.4885300888347839e+00, -1.2753085208470012e+00, -1.0623533783056693e+00, -8.4961915920820152e-01, -6.3706096596817285e-01, -4.2463437886846966e-01, -2.1229533333143397e-01, -0.0000000000000000e+00, 2.1229533333143397e-01, 4.2463437886846966e-01, 6.3706096596817285e-01, 8.4961915920820152e-01, 1.0623533783056693e+00, 1.2753085208470012e+00, 1.4885300888347839e+00, 1.7020643201191179e+00, 1.9159583258608970e+00, 2.1302602352750175e+00, 2.3450193490255717e+00, 2.5602863027962388e+00, 2.7761132427418982e+00, 2.9925540147475451e+00, 3.2096643696850733e+00, 3.4275021871760831e+00, 3.6461277207507310e+00, 3.8656038677524993e+00, 4.0859964678941409e+00, 4.3073746350430024e+00, 4.5298111276324908e+00, 4.7533827640966910e+00, 4.9781708909532618e+00, 5.2042619126759186e+00, 5.4317478943805018e+00, 5.6607272507011190e+00, 5.8913055371925065e+00, 6.1235963643451115e+00, 6.3577224590892367e+00, 6.5938169048330693e+00, 6.8320245990954787e+00, 7.0725039783127066e+00, 7.3154290733448306e+00, 7.5609919779111072e+00, 7.8094058375754942e+00, 8.0609085018410500e+00, 8.3157670306912532e+00, 8.5742833161234948e+00, 8.8368011791635279e+00, 9.1037154500398181e+00, 9.3754837607621955e+00, 9.6526421211592019e+00, 9.9358258915767603e+00, 1.0225798652986768e+01, 1.0523492982154455e+01, 1.0830069808875091e+01, 1.1147008003906418e+01, 1.1476245680301457e+01, 1.1820415634054809e+01, 1.2183266303224141e+01, 1.2570488862595592e+01, 1.2991574644085651e+01, 1.3464949210295044e+01, 1.4038864788757728e+01}, + {-1.4107459209689143e+01, -1.3534463902352346e+01, -1.3061871069197858e+01, -1.2641498685712268e+01, -1.2254947728119026e+01, -1.1892740438516830e+01, -1.1549193745603484e+01, -1.1220564539543394e+01, -1.0904223770953831e+01, -1.0598236096233554e+01, -1.0301124773392132e+01, -1.0011730561776583e+01, -9.7291222611091452e+00, -9.4525374380792577e+00, -9.1813417085894020e+00, -8.9149999082012474e+00, -8.6530551488522143e+00, -8.3951132646519024e+00, -8.1408310358446965e+00, -7.8899071213998058e+00, -7.6420749720138357e+00, -7.3970972165620390e+00, -7.1547611620163289e+00, -6.9148751466506351e+00, -6.6772655554639746e+00, -6.4417743554625506e+00, -6.2082570433307618e+00, -5.9765809233777674e+00, -5.7466236523236125e+00, -5.5182720014163928e+00, -5.2914207968757996e+00, -5.0659720076635653e+00, -4.8418339557415173e+00, -4.6189206287614857e+00, -4.3971510788768189e+00, -4.1764488943211937e+00, -3.9567417327500811e+00, -3.7379609072207938e+00, -3.5200410172016268e+00, -3.3029196182276346e+00, -3.0865369248200669e+00, -2.8708355421046665e+00, -2.6557602222368328e+00, -2.4412576422971846e+00, -2.2272762007812905e+00, -2.0137658301898180e+00, -1.8006778235437944e+00, -1.5879646729152568e+00, -1.3755799182849759e+00, -1.1634780052233955e+00, -9.5161415004400396e-01, -7.3994421120471532e-01, -5.2842456583606134e-01, -3.1701199035803573e-01, -1.0566354421255454e-01, 1.0566354421255454e-01, 3.1701199035803573e-01, 5.2842456583606134e-01, 7.3994421120471532e-01, 9.5161415004400396e-01, 1.1634780052233955e+00, 1.3755799182849759e+00, 1.5879646729152568e+00, 1.8006778235437944e+00, 2.0137658301898180e+00, 2.2272762007812905e+00, 2.4412576422971846e+00, 2.6557602222368328e+00, 2.8708355421046665e+00, 3.0865369248200669e+00, 3.3029196182276346e+00, 3.5200410172016268e+00, 3.7379609072207938e+00, 3.9567417327500811e+00, 4.1764488943211937e+00, 4.3971510788768189e+00, 4.6189206287614857e+00, 4.8418339557415173e+00, 5.0659720076635653e+00, 5.2914207968757996e+00, 5.5182720014163928e+00, 5.7466236523236125e+00, 5.9765809233777674e+00, 6.2082570433307618e+00, 6.4417743554625506e+00, 6.6772655554639746e+00, 6.9148751466506351e+00, 7.1547611620163289e+00, 7.3970972165620390e+00, 7.6420749720138357e+00, 7.8899071213998058e+00, 8.1408310358446965e+00, 8.3951132646519024e+00, 8.6530551488522143e+00, 8.9149999082012474e+00, 9.1813417085894020e+00, 9.4525374380792577e+00, 9.7291222611091452e+00, 1.0011730561776583e+01, 1.0301124773392132e+01, 1.0598236096233554e+01, 1.0904223770953831e+01, 1.1220564539543394e+01, 1.1549193745603484e+01, 1.1892740438516830e+01, 1.2254947728119026e+01, 1.2641498685712268e+01, 1.3061871069197858e+01, 1.3534463902352346e+01, 1.4107459209689143e+01}, + {-1.4175736730885069e+01, -1.3603651625271072e+01, -1.3131831789217085e+01, -1.2712164683229908e+01, -1.2326277551871243e+01, -1.1964706072741592e+01, -1.1621775194820279e+01, -1.1293747081147552e+01, -1.0977996389349029e+01, -1.0672590515300062e+01, -1.0376054822035842e+01, -1.0087231738512564e+01, -9.8051914243374672e+00, -9.5291725793451221e+00, -9.2585417826765291e+00, -8.9927647033681737e+00, -8.7313851861824325e+00, -8.4740097190111463e+00, -8.2202956732045873e+00, -7.9699422487875475e+00, -7.7226833973667368e+00, -7.4782822164736231e+00, -7.2365264558624682e+00, -6.9972248759462685e+00, -6.7602042675666434e+00, -6.5253069909368815e+00, -6.2923889264375843e+00, -6.0613177552660416e+00, -5.8319715065921489e+00, -5.6042373217821275e+00, -5.3780103967408355e+00, -5.1531930714178591e+00, -4.9296940416745816e+00, -4.7074276734866194e+00, -4.4863134031967471e+00, -4.2662752104856114e+00, -4.0472411530744239e+00, -3.8291429540521795e+00, -3.6119156342329615e+00, -3.3954971831747680e+00, -3.1798282634899460e+00, -2.9648519438950705e+00, -2.7505134571206762e+00, -2.5367599793568107e+00, -2.3235404283709329e+00, -2.1108052778175836e+00, -1.8985063855784825e+00, -1.6865968342381339e+00, -1.4750307820226600e+00, -1.2637633227153910e+00, -1.0527503532175146e+00, -8.4194844755040910e-01, -6.3131473620181966e-01, -4.2080678980376768e-01, -2.1038250619832910e-01, -0.0000000000000000e+00, 2.1038250619832910e-01, 4.2080678980376768e-01, 6.3131473620181966e-01, 8.4194844755040910e-01, 1.0527503532175146e+00, 1.2637633227153910e+00, 1.4750307820226600e+00, 1.6865968342381339e+00, 1.8985063855784825e+00, 2.1108052778175836e+00, 2.3235404283709329e+00, 2.5367599793568107e+00, 2.7505134571206762e+00, 2.9648519438950705e+00, 3.1798282634899460e+00, 3.3954971831747680e+00, 3.6119156342329615e+00, 3.8291429540521795e+00, 4.0472411530744239e+00, 4.2662752104856114e+00, 4.4863134031967471e+00, 4.7074276734866194e+00, 4.9296940416745816e+00, 5.1531930714178591e+00, 5.3780103967408355e+00, 5.6042373217821275e+00, 5.8319715065921489e+00, 6.0613177552660416e+00, 6.2923889264375843e+00, 6.5253069909368815e+00, 6.7602042675666434e+00, 6.9972248759462685e+00, 7.2365264558624682e+00, 7.4782822164736231e+00, 7.7226833973667368e+00, 7.9699422487875475e+00, 8.2202956732045873e+00, 8.4740097190111463e+00, 8.7313851861824325e+00, 8.9927647033681737e+00, 9.2585417826765291e+00, 9.5291725793451221e+00, 9.8051914243374672e+00, 1.0087231738512564e+01, 1.0376054822035842e+01, 1.0672590515300062e+01, 1.0977996389349029e+01, 1.1293747081147552e+01, 1.1621775194820279e+01, 1.1964706072741592e+01, 1.2326277551871243e+01, 1.2712164683229908e+01, 1.3131831789217085e+01, 1.3603651625271072e+01, 1.4175736730885069e+01}, + {-1.4243701683090396e+01, -1.3672516911310485e+01, -1.3201461514403192e+01, -1.2782491733501539e+01, -1.2397260816155137e+01, -1.2036317739606817e+01, -1.1693995391536458e+01, -1.1366561153620273e+01, -1.1051393351707578e+01, -1.0746562091821255e+01, -1.0450594818817532e+01, -1.0162335612194752e+01, -9.8808559752619285e+00, -9.6053957268710537e+00, -9.3353223967607271e+00, -9.0701024758844149e+00, -8.8092805310594660e+00, -8.5524636937560086e+00, -8.2993099166213113e+00, -8.0495189311868405e+00, -7.8028251805712348e+00, -7.5589922216241447e+00, -7.3178082375274442e+00, -7.0790824013949303e+00, -6.8426419003295491e+00, -6.6083294779732258e+00, -6.3760013883766673e+00, -6.1455256793035486e+00, -5.9167807417097054e+00, -5.6896540760274723e+00, -5.4640412363607487e+00, -5.2398449216799987e+00, -5.0169741892502993e+00, -4.7953437702964461e+00, -4.5748734716450743e+00, -4.3554876500322477e+00, -4.1371147481089601e+00, -3.9196868830532363e+00, -3.7031394802088755e+00, -3.4874109453955420e+00, -3.2724423705327612e+00, -3.0581772680375412e+00, -2.8445613301276866e+00, -2.6315422097183796e+00, -2.4190693200603040e+00, -2.2070936506509247e+00, -1.9955675972702900e+00, -1.7844448042599628e+00, -1.5736800173872914e+00, -1.3632289458242499e+00, -1.1530481319262951e+00, -9.4309482762671593e-01, -7.3332687636947402e-01, -5.2370259959160714e-01, -3.1418068683720984e-01, -1.0472008864073033e-01, 1.0472008864073033e-01, 3.1418068683720984e-01, 5.2370259959160714e-01, 7.3332687636947402e-01, 9.4309482762671593e-01, 1.1530481319262951e+00, 1.3632289458242499e+00, 1.5736800173872914e+00, 1.7844448042599628e+00, 1.9955675972702900e+00, 2.2070936506509247e+00, 2.4190693200603040e+00, 2.6315422097183796e+00, 2.8445613301276866e+00, 3.0581772680375412e+00, 3.2724423705327612e+00, 3.4874109453955420e+00, 3.7031394802088755e+00, 3.9196868830532363e+00, 4.1371147481089601e+00, 4.3554876500322477e+00, 4.5748734716450743e+00, 4.7953437702964461e+00, 5.0169741892502993e+00, 5.2398449216799987e+00, 5.4640412363607487e+00, 5.6896540760274723e+00, 5.9167807417097054e+00, 6.1455256793035486e+00, 6.3760013883766673e+00, 6.6083294779732258e+00, 6.8426419003295491e+00, 7.0790824013949303e+00, 7.3178082375274442e+00, 7.5589922216241447e+00, 7.8028251805712348e+00, 8.0495189311868405e+00, 8.2993099166213113e+00, 8.5524636937560086e+00, 8.8092805310594660e+00, 9.0701024758844149e+00, 9.3353223967607271e+00, 9.6053957268710537e+00, 9.8808559752619285e+00, 1.0162335612194752e+01, 1.0450594818817532e+01, 1.0746562091821255e+01, 1.1051393351707578e+01, 1.1366561153620273e+01, 1.1693995391536458e+01, 1.2036317739606817e+01, 1.2397260816155137e+01, 1.2782491733501539e+01, 1.3201461514403192e+01, 1.3672516911310485e+01, 1.4243701683090396e+01}, + {-1.4311358299517268e+01, -1.3741064189376445e+01, -1.3270764846437274e+01, -1.2852484601295865e+01, -1.2467902444132251e+01, -1.2107580518550922e+01, -1.1765859570925668e+01, -1.1439012148401694e+01, -1.1124420207060311e+01, -1.0820156534367447e+01, -1.0524750634313888e+01, -1.0237048218321810e+01, -9.9561221176215060e+00, -9.6812132563270588e+00, -9.4116901024866948e+00, -9.1470199577543809e+00, -8.8867481005681856e+00, -8.6304822961073278e+00, -8.3778810688446761e+00, -8.1286446726129125e+00, -7.8825080329965438e+00, -7.6392351571886659e+00, -7.3986146528207790e+00, -7.1604560966616368e+00, -6.9245870629045054e+00, -6.6908506692717395e+00, -6.4591035339104934e+00, -6.2292140613060107e+00, -6.0010609940399187e+00, -5.7745321810917920e+00, -5.5495235238436296e+00, -5.3259380689199052e+00, -5.1036852231316345e+00, -4.8826800705576368e+00, -4.6628427755272810e+00, -4.4440980582138394e+00, -4.2263747318887201e+00, -4.0096052927608685e+00, -3.7937255548353970e+00, -3.5786743234488534e+00, -3.3643931021355877e+00, -3.1508258282961985e+00, -2.9379186338110879e+00, -2.7256196272975335e+00, -2.5138786951694905e+00, -2.3026473190429599e+00, -2.0918784073499572e+00, -1.8815261392920377e+00, -1.6715458194887483e+00, -1.4618937418644296e+00, -1.2525270614742765e+00, -1.0434036731020315e+00, -8.3448209557097697e-01, -6.2572136079995633e-01, -4.1708090670945613e-01, -2.0852047314122538e-01, -0.0000000000000000e+00, 2.0852047314122538e-01, 4.1708090670945613e-01, 6.2572136079995633e-01, 8.3448209557097697e-01, 1.0434036731020315e+00, 1.2525270614742765e+00, 1.4618937418644296e+00, 1.6715458194887483e+00, 1.8815261392920377e+00, 2.0918784073499572e+00, 2.3026473190429599e+00, 2.5138786951694905e+00, 2.7256196272975335e+00, 2.9379186338110879e+00, 3.1508258282961985e+00, 3.3643931021355877e+00, 3.5786743234488534e+00, 3.7937255548353970e+00, 4.0096052927608685e+00, 4.2263747318887201e+00, 4.4440980582138394e+00, 4.6628427755272810e+00, 4.8826800705576368e+00, 5.1036852231316345e+00, 5.3259380689199052e+00, 5.5495235238436296e+00, 5.7745321810917920e+00, 6.0010609940399187e+00, 6.2292140613060107e+00, 6.4591035339104934e+00, 6.6908506692717395e+00, 6.9245870629045054e+00, 7.1604560966616368e+00, 7.3986146528207790e+00, 7.6392351571886659e+00, 7.8825080329965438e+00, 8.1286446726129125e+00, 8.3778810688446761e+00, 8.6304822961073278e+00, 8.8867481005681856e+00, 9.1470199577543809e+00, 9.4116901024866948e+00, 9.6812132563270588e+00, 9.9561221176215060e+00, 1.0237048218321810e+01, 1.0524750634313888e+01, 1.0820156534367447e+01, 1.1124420207060311e+01, 1.1439012148401694e+01, 1.1765859570925668e+01, 1.2107580518550922e+01, 1.2467902444132251e+01, 1.2852484601295865e+01, 1.3270764846437274e+01, 1.3741064189376445e+01, 1.4311358299517268e+01}, + {-1.4378710718888687e+01, -1.3809297788283777e+01, -1.3339746281883333e+01, -1.2922147941452449e+01, -1.2538207244299381e+01, -1.2178499369616199e+01, -1.1837372843992933e+01, -1.1511105327917651e+01, -1.1197082370480098e+01, -1.0893379412488740e+01, -1.0598527994882549e+01, -1.0311375442821380e+01, -1.0030995900063404e+01, -9.7566313825866331e+00, -9.4876512847987584e+00, -9.2235237081638886e+00, -8.9637946326267190e+00, -8.7080724475249998e+00, -8.4560162400066048e+00, -8.2073267773259264e+00, -7.9617394588210120e+00, -7.7190187333975366e+00, -7.4789536245148378e+00, -7.2413541039213873e+00, -7.0060481241167620e+00, -6.7728791679610003e+00, -6.5417042085499943e+00, -6.3123919976942711e+00, -6.0848216199144423e+00, -5.8588812627189206e+00, -5.6344671643769209e+00, -5.4114827083623283e+00, -5.1898376397716799e+00, -4.9694473837781068e+00, -4.7502324499094550e+00, -4.5321179088799042e+00, -4.3150329310427544e+00, -4.0989103774038682e+00, -3.8836864356433058e+00, -3.6693002948148599e+00, -3.4556938533892665e+00, -3.2428114561228023e+00, -3.0305996559046307e+00, -2.8190069972914444e+00, -2.6079838188988345e+00, -2.3974820722025694e+00, -2.1874551546235876e+00, -1.9778577550389427e+00, -1.7686457100860522e+00, -1.5597758698165716e+00, -1.3512059714147289e+00, -1.1428945198277283e+00, -9.3480067426661317e-01, -7.2688413962779108e-01, -5.1910506196077233e-01, -3.1142392716846806e-01, -1.0380146217422165e-01, 1.0380146217422165e-01, 3.1142392716846806e-01, 5.1910506196077233e-01, 7.2688413962779108e-01, 9.3480067426661317e-01, 1.1428945198277283e+00, 1.3512059714147289e+00, 1.5597758698165716e+00, 1.7686457100860522e+00, 1.9778577550389427e+00, 2.1874551546235876e+00, 2.3974820722025694e+00, 2.6079838188988345e+00, 2.8190069972914444e+00, 3.0305996559046307e+00, 3.2428114561228023e+00, 3.4556938533892665e+00, 3.6693002948148599e+00, 3.8836864356433058e+00, 4.0989103774038682e+00, 4.3150329310427544e+00, 4.5321179088799042e+00, 4.7502324499094550e+00, 4.9694473837781068e+00, 5.1898376397716799e+00, 5.4114827083623283e+00, 5.6344671643769209e+00, 5.8588812627189206e+00, 6.0848216199144423e+00, 6.3123919976942711e+00, 6.5417042085499943e+00, 6.7728791679610003e+00, 7.0060481241167620e+00, 7.2413541039213873e+00, 7.4789536245148378e+00, 7.7190187333975366e+00, 7.9617394588210120e+00, 8.2073267773259264e+00, 8.4560162400066048e+00, 8.7080724475249998e+00, 8.9637946326267190e+00, 9.2235237081638886e+00, 9.4876512847987584e+00, 9.7566313825866331e+00, 1.0030995900063404e+01, 1.0311375442821380e+01, 1.0598527994882549e+01, 1.0893379412488740e+01, 1.1197082370480098e+01, 1.1511105327917651e+01, 1.1837372843992933e+01, 1.2178499369616199e+01, 1.2538207244299381e+01, 1.2922147941452449e+01, 1.3339746281883333e+01, 1.3809297788283777e+01, 1.4378710718888687e+01}, + {-1.4445762988361196e+01, -1.3877221939887596e+01, -1.3408410215509624e+01, -1.2991486302387372e+01, -1.2608179914177892e+01, -1.2249079137323781e+01, -1.1908540201639026e+01, -1.1582845829838529e+01, -1.1269385127541279e+01, -1.0966236161382101e+01, -1.0671932487544689e+01, -1.0385323027156920e+01, -1.0105483221483672e+01, -9.8316561653115180e+00, -9.5632121677809998e+00, -9.2996201195877468e+00, -9.0404266923744707e+00, -8.7852408903904529e+00, -8.5337223546130900e+00, -8.2855723571485260e+00, -8.0405267626925543e+00, -7.7983504535582933e+00, -7.5588328607481419e+00, -7.3217843426652625e+00, -7.0870332216969052e+00, -6.8544233372804051e+00, -6.6238120087139674e+00, -6.3950683261592980e+00, -6.1680717068342963e+00, -5.9427106672279626e+00, -5.7188817726033569e+00, -5.4964887330064514e+00, -5.2754416211185129e+00, -5.0556561920420737e+00, -4.8370532888321796e+00, -4.6195583205220858e+00, -4.4031008017280051e+00, -4.1876139447872083e+00, -3.9730342968900869e+00, -3.7593014158877045e+00, -3.5463575794514748e+00, -3.3341475230768891e+00, -3.1226182030944165e+00, -2.9117185814056987e+00, -2.7013994291239474e+00, -2.4916131466813880e+00, -2.2823135982874576e+00, -2.0734559588903583e+00, -1.8649965720202712e+00, -1.6568928170822563e+00, -1.4491029848262951e+00, -1.2415861598558324e+00, -1.0343021091482230e+00, -8.2721117565385283e-01, -6.2027417611781133e-01, -4.1345230233083557e-01, -2.0670702506645103e-01, -0.0000000000000000e+00, 2.0670702506645103e-01, 4.1345230233083557e-01, 6.2027417611781133e-01, 8.2721117565385283e-01, 1.0343021091482230e+00, 1.2415861598558324e+00, 1.4491029848262951e+00, 1.6568928170822563e+00, 1.8649965720202712e+00, 2.0734559588903583e+00, 2.2823135982874576e+00, 2.4916131466813880e+00, 2.7013994291239474e+00, 2.9117185814056987e+00, 3.1226182030944165e+00, 3.3341475230768891e+00, 3.5463575794514748e+00, 3.7593014158877045e+00, 3.9730342968900869e+00, 4.1876139447872083e+00, 4.4031008017280051e+00, 4.6195583205220858e+00, 4.8370532888321796e+00, 5.0556561920420737e+00, 5.2754416211185129e+00, 5.4964887330064514e+00, 5.7188817726033569e+00, 5.9427106672279626e+00, 6.1680717068342963e+00, 6.3950683261592980e+00, 6.6238120087139674e+00, 6.8544233372804051e+00, 7.0870332216969052e+00, 7.3217843426652625e+00, 7.5588328607481419e+00, 7.7983504535582933e+00, 8.0405267626925543e+00, 8.2855723571485260e+00, 8.5337223546130900e+00, 8.7852408903904529e+00, 9.0404266923744707e+00, 9.2996201195877468e+00, 9.5632121677809998e+00, 9.8316561653115180e+00, 1.0105483221483672e+01, 1.0385323027156920e+01, 1.0671932487544689e+01, 1.0966236161382101e+01, 1.1269385127541279e+01, 1.1582845829838529e+01, 1.1908540201639026e+01, 1.2249079137323781e+01, 1.2608179914177892e+01, 1.2991486302387372e+01, 1.3408410215509624e+01, 1.3877221939887596e+01, 1.4445762988361196e+01}, + {-1.4512519066332473e+01, -1.3944840782089997e+01, -1.3476760943476538e+01, -1.3060504129456817e+01, -1.2677825043852227e+01, -1.2319324554388889e+01, -1.1979366518555956e+01, -1.1654238671159664e+01, -1.1341333638590701e+01, -1.1038732086359635e+01, -1.0744969564657922e+01, -1.0458896573213373e+01, -1.0179589836134598e+01, -9.9062935142905033e+00, -9.6383788202379357e+00, -9.3753154236236274e+00, -9.1166506782713626e+00, -8.8619941943843266e+00, -8.6110061582143160e+00, -8.3633883384434800e+00, -8.1188770570299589e+00, -7.8772376216979518e+00, -7.6382598630271916e+00, -7.4017545180817317e+00, -7.1675502710629999e+00, -6.9354913097861042e+00, -6.7054352913834911e+00, -6.4772516357915970e+00, -6.2508200841027559e+00, -6.0260294726803698e+00, -5.8027766843552886e+00, -5.5809657459630806e+00, -5.3605070475936927e+00, -5.1413166636713479e+00, -4.9233157596994808e+00, -4.7064300714392884e+00, -4.4905894456231428e+00, -4.2757274331715260e+00, -4.0617809273868284e+00, -3.8486898408169177e+00, -3.6363968154755777e+00, -3.4248469619214639e+00, -3.2139876233679749e+00, -3.0037681615511667e+00, -2.7941397615435219e+00, -2.5850552530853812e+00, -2.3764689463269284e+00, -2.1683364801428588e+00, -1.9606146814080672e+00, -1.7532614338130410e+00, -1.5462355549578937e+00, -1.3394966805987920e+00, -1.1330051550337377e+00, -9.2672192670934395e-01, -7.2060844820888659e-01, -5.1462657984655003e-01, -3.0873849614504811e-01, -1.0290659451494534e-01, 1.0290659451494534e-01, 3.0873849614504811e-01, 5.1462657984655003e-01, 7.2060844820888659e-01, 9.2672192670934395e-01, 1.1330051550337377e+00, 1.3394966805987920e+00, 1.5462355549578937e+00, 1.7532614338130410e+00, 1.9606146814080672e+00, 2.1683364801428588e+00, 2.3764689463269284e+00, 2.5850552530853812e+00, 2.7941397615435219e+00, 3.0037681615511667e+00, 3.2139876233679749e+00, 3.4248469619214639e+00, 3.6363968154755777e+00, 3.8486898408169177e+00, 4.0617809273868284e+00, 4.2757274331715260e+00, 4.4905894456231428e+00, 4.7064300714392884e+00, 4.9233157596994808e+00, 5.1413166636713479e+00, 5.3605070475936927e+00, 5.5809657459630806e+00, 5.8027766843552886e+00, 6.0260294726803698e+00, 6.2508200841027559e+00, 6.4772516357915970e+00, 6.7054352913834911e+00, 6.9354913097861042e+00, 7.1675502710629999e+00, 7.4017545180817317e+00, 7.6382598630271916e+00, 7.8772376216979518e+00, 8.1188770570299589e+00, 8.3633883384434800e+00, 8.6110061582143160e+00, 8.8619941943843266e+00, 9.1166506782713626e+00, 9.3753154236236274e+00, 9.6383788202379357e+00, 9.9062935142905033e+00, 1.0179589836134598e+01, 1.0458896573213373e+01, 1.0744969564657922e+01, 1.1038732086359635e+01, 1.1341333638590701e+01, 1.1654238671159664e+01, 1.1979366518555956e+01, 1.2319324554388889e+01, 1.2677825043852227e+01, 1.3060504129456817e+01, 1.3476760943476538e+01, 1.3944840782089997e+01, 1.4512519066332473e+01}, + {-1.4578982825139288e+01, -1.4012158361728110e+01, -1.3544802666397535e+01, -1.3129205768185479e+01, -1.2747147119365039e+01, -1.2389240245284292e+01, -1.2049856556961975e+01, -1.1725288752112307e+01, -1.1412932942839976e+01, -1.1110872367128264e+01, -1.0817644548390337e+01, -1.0532101547973223e+01, -1.0253321358511124e+01, -9.9805491945452847e+00, -9.7131571610296650e+00, -9.4506156965670485e+00, -9.1924728279240568e+00, -8.9383387625761230e+00, -8.6878742237708870e+00, -8.4407814687705134e+00, -8.1967972689854154e+00, -7.9556873498473575e+00, -7.7172419338499774e+00, -7.4812721290378645e+00, -7.2476069736794351e+00, -7.0160909961088489e+00, -6.7865821832816833e+00, -6.5589502767074928e+00, -6.3330753329246656e+00, -6.1088464994807214e+00, -5.8861609677875624e+00, -5.6649230721526251e+00, -5.4450435103908958e+00, -5.2264386661630065e+00, -5.0090300168969302e+00, -4.7927436140809663e+00, -4.5775096250454670e+00, -4.3632619272160529e+00, -4.1499377473239889e+00, -3.9374773392776570e+00, -3.7258236953922830e+00, -3.5149222864888690e+00, -3.3047208270435604e+00, -3.0951690621230776e+00, -2.8862185733023908e+00, -2.6778226011448112e+00, -2.4699358821459070e+00, -2.2625144983121754e+00, -2.0555157377720055e+00, -1.8488979650073754e+00, -1.6426204994556326e+00, -1.4366435013663816e+00, -1.2309278639126584e+00, -1.0254351106514223e+00, -8.2012729750837221e-01, -6.1496691852828633e-01, -4.0991681468614050e-01, -2.0494008509748737e-01, -0.0000000000000000e+00, 2.0494008509748737e-01, 4.0991681468614050e-01, 6.1496691852828633e-01, 8.2012729750837221e-01, 1.0254351106514223e+00, 1.2309278639126584e+00, 1.4366435013663816e+00, 1.6426204994556326e+00, 1.8488979650073754e+00, 2.0555157377720055e+00, 2.2625144983121754e+00, 2.4699358821459070e+00, 2.6778226011448112e+00, 2.8862185733023908e+00, 3.0951690621230776e+00, 3.3047208270435604e+00, 3.5149222864888690e+00, 3.7258236953922830e+00, 3.9374773392776570e+00, 4.1499377473239889e+00, 4.3632619272160529e+00, 4.5775096250454670e+00, 4.7927436140809663e+00, 5.0090300168969302e+00, 5.2264386661630065e+00, 5.4450435103908958e+00, 5.6649230721526251e+00, 5.8861609677875624e+00, 6.1088464994807214e+00, 6.3330753329246656e+00, 6.5589502767074928e+00, 6.7865821832816833e+00, 7.0160909961088489e+00, 7.2476069736794351e+00, 7.4812721290378645e+00, 7.7172419338499774e+00, 7.9556873498473575e+00, 8.1967972689854154e+00, 8.4407814687705134e+00, 8.6878742237708870e+00, 8.9383387625761230e+00, 9.1924728279240568e+00, 9.4506156965670485e+00, 9.7131571610296650e+00, 9.9805491945452847e+00, 1.0253321358511124e+01, 1.0532101547973223e+01, 1.0817644548390337e+01, 1.1110872367128264e+01, 1.1412932942839976e+01, 1.1725288752112307e+01, 1.2049856556961975e+01, 1.2389240245284292e+01, 1.2747147119365039e+01, 1.3129205768185479e+01, 1.3544802666397535e+01, 1.4012158361728110e+01, 1.4578982825139288e+01}, + {-1.4645158053651027e+01, -1.4079178637349177e+01, -1.3612539492279215e+01, -1.3197595467366467e+01, -1.2816150525975972e+01, -1.2458830729659516e+01, -1.2120014970184142e+01, -1.1796000859913661e+01, -1.1484187962287965e+01, -1.1182662061890452e+01, -1.0889962635005938e+01, -1.0604943287993811e+01, -1.0326683268027864e+01, -1.0054428831215711e+01, -9.7875529641744929e+00, -9.5255268647413445e+00, -9.2678992236538775e+00, -9.0142808372415573e+00, -8.7643329577328277e+00, -8.5177583232401322e+00, -8.2742941470872093e+00, -8.0337065649878543e+00, -7.7957861839727647e+00, -7.5603444756835589e+00, -7.3272108250171302e+00, -7.0962300932900497e+00, -6.8672605896069676e+00, -6.6401723692025154e+00, -6.4148457960045002e+00, -6.1911703204459041e+00, -5.9690434339462293e+00, -5.7483697694039071e+00, -5.5290603231377613e+00, -5.3110317784497383e+00, -5.0942059146888043e+00, -4.8785090886224198e+00, -4.6638717772489988e+00, -4.4502281730478108e+00, -4.2375158241640083e+00, -4.0256753132434273e+00, -3.8146499696239737e+00, -3.6043856104035212e+00, -3.3948303065739314e+00, -3.1859341709648956e+00, -2.9776491652016608e+00, -2.7699289232646307e+00, -2.5627285895601974e+00, -2.3560046696818464e+00, -2.1497148922675668e+00, -1.9438180805509269e+00, -1.7382740323646890e+00, -1.5330434074921915e+00, -1.3280876213767709e+00, -1.1233687442963156e+00, -9.1884940519120828e-01, -7.1449269940145821e-01, -5.1026209962444491e-01, -3.0612136944972701e-01, -1.0203447886287802e-01, 1.0203447886287802e-01, 3.0612136944972701e-01, 5.1026209962444491e-01, 7.1449269940145821e-01, 9.1884940519120828e-01, 1.1233687442963156e+00, 1.3280876213767709e+00, 1.5330434074921915e+00, 1.7382740323646890e+00, 1.9438180805509269e+00, 2.1497148922675668e+00, 2.3560046696818464e+00, 2.5627285895601974e+00, 2.7699289232646307e+00, 2.9776491652016608e+00, 3.1859341709648956e+00, 3.3948303065739314e+00, 3.6043856104035212e+00, 3.8146499696239737e+00, 4.0256753132434273e+00, 4.2375158241640083e+00, 4.4502281730478108e+00, 4.6638717772489988e+00, 4.8785090886224198e+00, 5.0942059146888043e+00, 5.3110317784497383e+00, 5.5290603231377613e+00, 5.7483697694039071e+00, 5.9690434339462293e+00, 6.1911703204459041e+00, 6.4148457960045002e+00, 6.6401723692025154e+00, 6.8672605896069676e+00, 7.0962300932900497e+00, 7.3272108250171302e+00, 7.5603444756835589e+00, 7.7957861839727647e+00, 8.0337065649878543e+00, 8.2742941470872093e+00, 8.5177583232401322e+00, 8.7643329577328277e+00, 9.0142808372415573e+00, 9.2678992236538775e+00, 9.5255268647413445e+00, 9.7875529641744929e+00, 1.0054428831215711e+01, 1.0326683268027864e+01, 1.0604943287993811e+01, 1.0889962635005938e+01, 1.1182662061890452e+01, 1.1484187962287965e+01, 1.1796000859913661e+01, 1.2120014970184142e+01, 1.2458830729659516e+01, 1.2816150525975972e+01, 1.3197595467366467e+01, 1.3612539492279215e+01, 1.4079178637349177e+01, 1.4645158053651027e+01}, + {-1.4711048459763671e+01, -1.4145905481877998e+01, -1.3679975439346320e+01, -1.3265677382038762e+01, -1.2884839551290751e+01, -1.2528100425622796e+01, -1.2189846306095971e+01, -1.1866379672363987e+01, -1.1555103505482034e+01, -1.1254106111274996e+01, -1.0961928898971054e+01, -1.0677427003696076e+01, -1.0399680913497548e+01, -1.0127937914235881e+01, -9.8615718637313439e+00, -9.6000547095956161e+00, -9.3429357978203189e+00, -9.0898265054222858e+00, -8.8403886058470427e+00, -8.5943253105810840e+00, -8.3513742675803542e+00, -8.1113020156791773e+00, -7.8738995393403330e+00, -7.6389786667002593e+00, -7.4063691221380390e+00, -7.1759160927206223e+00, -6.9474782023460993e+00, -6.7209258124599129e+00, -6.4961395866732436e+00, -6.2730092703749003e+00, -6.0514326468076227e+00, -5.8313146389908859e+00, -5.6125665329607717e+00, -5.3951053025257689e+00, -5.1788530194404441e+00, -4.9637363358218023e+00, -4.7496860279573854e+00, -4.5366365925149861e+00, -4.3245258876633645e+00, -4.1132948128290048e+00, -3.9028870218050722e+00, -3.6932486647410911e+00, -3.4843281552109961e+00, -3.2760759591109072e+00, -3.0684444025981263e+00, -2.8613874966667088e+00, -2.6548607762763461e+00, -2.4488211522211656e+00, -2.2432267741522884e+00, -2.0380369033597177e+00, -1.8332117940811192e+00, -1.6287125822420414e+00, -1.4245011806478818e+00, -1.2205401797456257e+00, -1.0167927531555376e+00, -8.1322256724173581e-01, -6.0979369404756556e-01, -4.0647052696828645e-01, -2.0321769857092067e-01, -0.0000000000000000e+00, 2.0321769857092067e-01, 4.0647052696828645e-01, 6.0979369404756556e-01, 8.1322256724173581e-01, 1.0167927531555376e+00, 1.2205401797456257e+00, 1.4245011806478818e+00, 1.6287125822420414e+00, 1.8332117940811192e+00, 2.0380369033597177e+00, 2.2432267741522884e+00, 2.4488211522211656e+00, 2.6548607762763461e+00, 2.8613874966667088e+00, 3.0684444025981263e+00, 3.2760759591109072e+00, 3.4843281552109961e+00, 3.6932486647410911e+00, 3.9028870218050722e+00, 4.1132948128290048e+00, 4.3245258876633645e+00, 4.5366365925149861e+00, 4.7496860279573854e+00, 4.9637363358218023e+00, 5.1788530194404441e+00, 5.3951053025257689e+00, 5.6125665329607717e+00, 5.8313146389908859e+00, 6.0514326468076227e+00, 6.2730092703749003e+00, 6.4961395866732436e+00, 6.7209258124599129e+00, 6.9474782023460993e+00, 7.1759160927206223e+00, 7.4063691221380390e+00, 7.6389786667002593e+00, 7.8738995393403330e+00, 8.1113020156791773e+00, 8.3513742675803542e+00, 8.5943253105810840e+00, 8.8403886058470427e+00, 9.0898265054222858e+00, 9.3429357978203189e+00, 9.6000547095956161e+00, 9.8615718637313439e+00, 1.0127937914235881e+01, 1.0399680913497548e+01, 1.0677427003696076e+01, 1.0961928898971054e+01, 1.1254106111274996e+01, 1.1555103505482034e+01, 1.1866379672363987e+01, 1.2189846306095971e+01, 1.2528100425622796e+01, 1.2884839551290751e+01, 1.3265677382038762e+01, 1.3679975439346320e+01, 1.4145905481877998e+01, 1.4711048459763671e+01}, + {-1.4776657672798869e+01, -1.4212342685181815e+01, -1.3747114438757134e+01, -1.3333455576348189e+01, -1.2953218388266750e+01, -1.2597053652892484e+01, -1.2259355010417195e+01, -1.1936429761298314e+01, -1.1625684271126064e+01, -1.1325209342106415e+01, -1.1033548296890791e+01, -1.0749557783474378e+01, -1.0472319517421079e+01, -1.0201081802811933e+01, -9.9352193584733488e+00, -9.6742048725828571e+00, -9.4175883379130294e+00, -9.1649817042418142e+00, -8.9160472587078665e+00, -8.6704886789370494e+00, -8.4280440404817565e+00, -8.1884802783862387e+00, -7.9515887476984757e+00, -7.7171816262142663e+00, -7.4850889709254078e+00, -7.2551562877054536e+00, -7.0272425081913878e+00, -6.8012182928404501e+00, -6.5769645975720454e+00, -6.3543714551491233e+00, -6.1333369328198826e+00, -5.9137662356417078e+00, -5.6955709309901366e+00, -5.4786682744780215e+00, -5.2629806212085075e+00, -5.0484349092048060e+00, -4.8349622041811422e+00, -4.6224972966778903e+00, -4.4109783440817232e+00, -4.2003465512658975e+00, -3.9905458845758908e+00, -3.7815228146971922e+00, -3.5732260846105883e+00, -3.3656064993935964e+00, -3.1586167350865688e+00, -2.9522111642257620e+00, -2.7463456959669994e+00, -2.5409776289935784e+00, -2.3360655156294796e+00, -2.1315690357710211e+00, -1.9274488794124773e+00, -1.7236666366787170e+00, -1.5201846943942978e+00, -1.3169661383169375e+00, -1.1139746602463279e+00, -9.1117446928904511e-01, -7.0853020661847821e-01, -5.0600686311669596e-01, -3.0356969932406813e-01, -1.0118416715318335e-01, 1.0118416715318335e-01, 3.0356969932406813e-01, 5.0600686311669596e-01, 7.0853020661847821e-01, 9.1117446928904511e-01, 1.1139746602463279e+00, 1.3169661383169375e+00, 1.5201846943942978e+00, 1.7236666366787170e+00, 1.9274488794124773e+00, 2.1315690357710211e+00, 2.3360655156294796e+00, 2.5409776289935784e+00, 2.7463456959669994e+00, 2.9522111642257620e+00, 3.1586167350865688e+00, 3.3656064993935964e+00, 3.5732260846105883e+00, 3.7815228146971922e+00, 3.9905458845758908e+00, 4.2003465512658975e+00, 4.4109783440817232e+00, 4.6224972966778903e+00, 4.8349622041811422e+00, 5.0484349092048060e+00, 5.2629806212085075e+00, 5.4786682744780215e+00, 5.6955709309901366e+00, 5.9137662356417078e+00, 6.1333369328198826e+00, 6.3543714551491233e+00, 6.5769645975720454e+00, 6.8012182928404501e+00, 7.0272425081913878e+00, 7.2551562877054536e+00, 7.4850889709254078e+00, 7.7171816262142663e+00, 7.9515887476984757e+00, 8.1884802783862387e+00, 8.4280440404817565e+00, 8.6704886789370494e+00, 8.9160472587078665e+00, 9.1649817042418142e+00, 9.4175883379130294e+00, 9.6742048725828571e+00, 9.9352193584733488e+00, 1.0201081802811933e+01, 1.0472319517421079e+01, 1.0749557783474378e+01, 1.1033548296890791e+01, 1.1325209342106415e+01, 1.1625684271126064e+01, 1.1936429761298314e+01, 1.2259355010417195e+01, 1.2597053652892484e+01, 1.2953218388266750e+01, 1.3333455576348189e+01, 1.3747114438757134e+01, 1.4212342685181815e+01, 1.4776657672798869e+01}, + {-1.4841989245812462e+01, -1.4278493956537387e+01, -1.3813960337214354e+01, -1.3400934026297357e+01, -1.3021291138101008e+01, -1.2665694635824183e+01, -1.2328545429882384e+01, -1.2006155595899877e+01, -1.1695934851542777e+01, -1.1395976471020912e+01, -1.1104825671283981e+01, -1.0821340597636343e+01, -1.0544604180098764e+01, -1.0273865729711682e+01, -1.0008500816363464e+01, -9.7479828598297100e+00, -9.4918624914245004e+00, -9.2397522259904878e+00, -8.9913148570644346e+00, -8.7462545214071792e+00, -8.5043097153654035e+00, -8.2652477635213177e+00, -8.0288603849063733e+00, -7.7949601003933706e+00, -7.5633772929797445e+00, -7.3339577806748517e+00, -7.1065607960848611e+00, -6.8810572917778128e+00, -6.6573285089186554e+00, -6.4352647603911377e+00, -6.2147643899767431e+00, -5.9957328770520153e+00, -5.7780820623389184e+00, -5.5617294749593142e+00, -5.3465977447386317e+00, -5.1326140866195260e+00, -4.9197098463649933e+00, -4.7078200985868248e+00, -4.4968832896314073e+00, -4.2868409190677941e+00, -4.0776372545120747e+00, -3.8692190753327944e+00, -3.6615354414501788e+00, -3.4545374839947667e+00, -3.2481782150506633e+00, -3.0424123540922232e+00, -2.8371961690442937e+00, -2.6324873301662031e+00, -2.4282447751872818e+00, -2.2244285843140195e+00, -2.0209998638917077e+00, -1.8179206376413908e+00, -1.6151537445099000e+00, -1.4126627422698670e+00, -1.2104118160904516e+00, -1.0083656913702024e+00, -8.0648955018269064e-01, -6.0474895073475177e-01, -4.0310974927746679e-01, -2.0153802394231832e-01, -0.0000000000000000e+00, 2.0153802394231832e-01, 4.0310974927746679e-01, 6.0474895073475177e-01, 8.0648955018269064e-01, 1.0083656913702024e+00, 1.2104118160904516e+00, 1.4126627422698670e+00, 1.6151537445099000e+00, 1.8179206376413908e+00, 2.0209998638917077e+00, 2.2244285843140195e+00, 2.4282447751872818e+00, 2.6324873301662031e+00, 2.8371961690442937e+00, 3.0424123540922232e+00, 3.2481782150506633e+00, 3.4545374839947667e+00, 3.6615354414501788e+00, 3.8692190753327944e+00, 4.0776372545120747e+00, 4.2868409190677941e+00, 4.4968832896314073e+00, 4.7078200985868248e+00, 4.9197098463649933e+00, 5.1326140866195260e+00, 5.3465977447386317e+00, 5.5617294749593142e+00, 5.7780820623389184e+00, 5.9957328770520153e+00, 6.2147643899767431e+00, 6.4352647603911377e+00, 6.6573285089186554e+00, 6.8810572917778128e+00, 7.1065607960848611e+00, 7.3339577806748517e+00, 7.5633772929797445e+00, 7.7949601003933706e+00, 8.0288603849063733e+00, 8.2652477635213177e+00, 8.5043097153654035e+00, 8.7462545214071792e+00, 8.9913148570644346e+00, 9.2397522259904878e+00, 9.4918624914245004e+00, 9.7479828598297100e+00, 1.0008500816363464e+01, 1.0273865729711682e+01, 1.0544604180098764e+01, 1.0821340597636343e+01, 1.1104825671283981e+01, 1.1395976471020912e+01, 1.1695934851542777e+01, 1.2006155595899877e+01, 1.2328545429882384e+01, 1.2665694635824183e+01, 1.3021291138101008e+01, 1.3400934026297357e+01, 1.3813960337214354e+01, 1.4278493956537387e+01, 1.4841989245812462e+01}, + {-1.4907046657816622e+01, -1.4344362927004781e+01, -1.3880516899476401e+01, -1.3468116622389784e+01, -1.3089061813006204e+01, -1.2734027506319505e+01, -1.2397421815284703e+01, -1.2075561545881943e+01, -1.1765859735997486e+01, -1.1466412107936478e+01, -1.1175765754204676e+01, -1.0892780302181299e+01, -1.0616539883571752e+01, -1.0346294805375646e+01, -1.0081421478842227e+01, -9.8213940466086171e+00, -9.5657637705148062e+00, -9.3141437229916662e+00, -9.0661971968977273e+00, -8.8216287813440992e+00, -8.5801773868921547e+00, -8.3416107212171422e+00, -8.1057208609652847e+00, -7.8723206637443432e+00, -7.6412408321991903e+00, -7.4123274900628546e+00, -7.1854401644106494e+00, -6.9604500933022759e+00, -6.7372387963808658e+00, -6.5156968597076492e+00, -6.2957228964512764e+00, -6.0772226529322166e+00, -5.8601082355882221e+00, -5.6442974391377119e+00, -5.4297131599071493e+00, -5.2162828812008781e+00, -5.0039382199076590e+00, -4.7926145253924428e+00, -4.5822505232162971e+00, -4.3727879974390058e+00, -4.1641715062468725e+00, -3.9563481264582303e+00, -3.7492672231264832e+00, -3.5428802410129703e+00, -3.3371405151612126e+00, -3.1320030981875178e+00, -2.9274246022241841e+00, -2.7233630537216094e+00, -2.5197777595433375e+00, -2.3166291829805914e+00, -2.1138788284758951e+00, -1.9114891339837408e+00, -1.7094233700136701e+00, -1.5076455445008548e+00, -1.3061203127337706e+00, -1.1048128916400251e+00, -9.0368897779154544e-01, -7.0271466854059361e-01, -5.0185638573956481e-01, -3.0108080153124556e-01, -1.0035476572283220e-01, 1.0035476572283220e-01, 3.0108080153124556e-01, 5.0185638573956481e-01, 7.0271466854059361e-01, 9.0368897779154544e-01, 1.1048128916400251e+00, 1.3061203127337706e+00, 1.5076455445008548e+00, 1.7094233700136701e+00, 1.9114891339837408e+00, 2.1138788284758951e+00, 2.3166291829805914e+00, 2.5197777595433375e+00, 2.7233630537216094e+00, 2.9274246022241841e+00, 3.1320030981875178e+00, 3.3371405151612126e+00, 3.5428802410129703e+00, 3.7492672231264832e+00, 3.9563481264582303e+00, 4.1641715062468725e+00, 4.3727879974390058e+00, 4.5822505232162971e+00, 4.7926145253924428e+00, 5.0039382199076590e+00, 5.2162828812008781e+00, 5.4297131599071493e+00, 5.6442974391377119e+00, 5.8601082355882221e+00, 6.0772226529322166e+00, 6.2957228964512764e+00, 6.5156968597076492e+00, 6.7372387963808658e+00, 6.9604500933022759e+00, 7.1854401644106494e+00, 7.4123274900628546e+00, 7.6412408321991903e+00, 7.8723206637443432e+00, 8.1057208609652847e+00, 8.3416107212171422e+00, 8.5801773868921547e+00, 8.8216287813440992e+00, 9.0661971968977273e+00, 9.3141437229916662e+00, 9.5657637705148062e+00, 9.8213940466086171e+00, 1.0081421478842227e+01, 1.0346294805375646e+01, 1.0616539883571752e+01, 1.0892780302181299e+01, 1.1175765754204676e+01, 1.1466412107936478e+01, 1.1765859735997486e+01, 1.2075561545881943e+01, 1.2397421815284703e+01, 1.2734027506319505e+01, 1.3089061813006204e+01, 1.3468116622389784e+01, 1.3880516899476401e+01, 1.4344362927004781e+01, 1.4907046657816622e+01}, + {-1.4971833315919479e+01, -1.4409953151712163e+01, -1.3946787810773682e+01, -1.3535007172173161e+01, -1.3156534338879860e+01, -1.2802056306622088e+01, -1.2465988324400744e+01, -1.2144651884544359e+01, -1.1835463313889939e+01, -1.1536520759384242e+01, -1.1246373170717687e+01, -1.0963881642425228e+01, -1.0688131495402112e+01, -1.0418374021858449e+01, -1.0153986464937166e+01, -9.8944436816224322e+00, -9.6392975564791730e+00, -9.3881617122605014e+00, -9.1406999342794091e+00, -8.8966172574222746e+00, -8.6556530000977201e+00, -8.4175752468453187e+00, -8.1821764257789713e+00, -7.9492697251276718e+00, -7.7186861610617425e+00, -7.4902721568710229e+00, -7.2638875278553732e+00, -7.0394037912138101e+00, -6.8167027385795480e+00, -6.5956752225409012e+00, -6.3762201188154259e+00, -6.1582434336162883e+00, -5.9416575318080138e+00, -5.7263804661537199e+00, -5.5123353916408391e+00, -5.2994500517810534e+00, -5.0876563260932981e+00, -4.8768898298307777e+00, -4.6670895585055892e+00, -4.4581975709748294e+00, -4.2501587058390200e+00, -4.0429203267128022e+00, -3.8364320925946052e+00, -3.6306457501139446e+00, -3.4255149448939752e+00, -3.2209950496501372e+00, -3.0170430069668734e+00, -2.8136171849644747e+00, -2.6106772442959039e+00, -2.4081840151061349e+00, -2.2060993827498390e+00, -2.0043861812019168e+00, -1.8030080932132322e+00, -1.6019295563640745e+00, -1.4011156742530042e+00, -1.2005321321308888e+00, -1.0001451163508825e+00, -7.9992123705629015e-01, -5.9982745357081801e-01, -3.9983100199064880e-01, -1.9989932450580150e-01, -0.0000000000000000e+00, 1.9989932450580150e-01, 3.9983100199064880e-01, 5.9982745357081801e-01, 7.9992123705629015e-01, 1.0001451163508825e+00, 1.2005321321308888e+00, 1.4011156742530042e+00, 1.6019295563640745e+00, 1.8030080932132322e+00, 2.0043861812019168e+00, 2.2060993827498390e+00, 2.4081840151061349e+00, 2.6106772442959039e+00, 2.8136171849644747e+00, 3.0170430069668734e+00, 3.2209950496501372e+00, 3.4255149448939752e+00, 3.6306457501139446e+00, 3.8364320925946052e+00, 4.0429203267128022e+00, 4.2501587058390200e+00, 4.4581975709748294e+00, 4.6670895585055892e+00, 4.8768898298307777e+00, 5.0876563260932981e+00, 5.2994500517810534e+00, 5.5123353916408391e+00, 5.7263804661537199e+00, 5.9416575318080138e+00, 6.1582434336162883e+00, 6.3762201188154259e+00, 6.5956752225409012e+00, 6.8167027385795480e+00, 7.0394037912138101e+00, 7.2638875278553732e+00, 7.4902721568710229e+00, 7.7186861610617425e+00, 7.9492697251276718e+00, 8.1821764257789713e+00, 8.4175752468453187e+00, 8.6556530000977201e+00, 8.8966172574222746e+00, 9.1406999342794091e+00, 9.3881617122605014e+00, 9.6392975564791730e+00, 9.8944436816224322e+00, 1.0153986464937166e+01, 1.0418374021858449e+01, 1.0688131495402112e+01, 1.0963881642425228e+01, 1.1246373170717687e+01, 1.1536520759384242e+01, 1.1835463313889939e+01, 1.2144651884544359e+01, 1.2465988324400744e+01, 1.2802056306622088e+01, 1.3156534338879860e+01, 1.3535007172173161e+01, 1.3946787810773682e+01, 1.4409953151712163e+01, 1.4971833315919479e+01}, + {-1.5036352557386012e+01, -1.4475268112055591e+01, -1.4012776679134205e+01, -1.3601609402686369e+01, -1.3223712557871778e+01, -1.2869784992006130e+01, -1.2534249024802044e+01, -1.2213430791710801e+01, -1.1904749877820654e+01, -1.1606306831707778e+01, -1.1316652442235313e+01, -1.1034649256479808e+01, -1.0759383772299557e+01, -1.0490108256609062e+01, -1.0226200775202839e+01, -9.9671368911110161e+00, -9.7124691040283810e+00, -9.4618115799659321e+00, -9.2148285900237301e+00, -8.9712256084886892e+00, -8.7307423554516284e+00, -8.4931472862936879e+00, -8.2582331746602566e+00, -8.0258135335048770e+00, -7.7957196866256542e+00, -7.5677983509350675e+00, -7.3419096239550905e+00, -7.1179252959243602e+00, -6.8957274242423345e+00, -6.6752071216509830e+00, -6.4562635198693066e+00, -6.2388028782578386e+00, -6.0227378131410365e+00, -5.8079866281147483e+00, -5.5944727293462684e+00, -5.3821241127798674e+00, -5.1708729124709514e+00, -4.9606550011220074e+00, -4.7514096353844053e+00, -4.5430791396990857e+00, -4.3356086234350091e+00, -4.1289457268926073e+00, -3.9230403924055310e+00, -3.7178446573254824e+00, -3.5133124661335695e+00, -3.3093994993045657e+00, -3.1060630168714907e+00, -2.9032617149079720e+00, -2.7009555933736791e+00, -2.4991058339609178e+00, -2.2976746867440045e+00, -2.0966253645719419e+00, -1.8959219442631448e+00, -1.6955292737615766e+00, -1.4954128844992702e+00, -1.2955389082829973e+00, -1.0958739980844798e+00, -8.9638525216554721e-01, -6.9704014101314027e-01, -4.9780643659506446e-01, -2.9865214347665664e-01, -9.9545431361682174e-02, 9.9545431361682174e-02, 2.9865214347665664e-01, 4.9780643659506446e-01, 6.9704014101314027e-01, 8.9638525216554721e-01, 1.0958739980844798e+00, 1.2955389082829973e+00, 1.4954128844992702e+00, 1.6955292737615766e+00, 1.8959219442631448e+00, 2.0966253645719419e+00, 2.2976746867440045e+00, 2.4991058339609178e+00, 2.7009555933736791e+00, 2.9032617149079720e+00, 3.1060630168714907e+00, 3.3093994993045657e+00, 3.5133124661335695e+00, 3.7178446573254824e+00, 3.9230403924055310e+00, 4.1289457268926073e+00, 4.3356086234350091e+00, 4.5430791396990857e+00, 4.7514096353844053e+00, 4.9606550011220074e+00, 5.1708729124709514e+00, 5.3821241127798674e+00, 5.5944727293462684e+00, 5.8079866281147483e+00, 6.0227378131410365e+00, 6.2388028782578386e+00, 6.4562635198693066e+00, 6.6752071216509830e+00, 6.8957274242423345e+00, 7.1179252959243602e+00, 7.3419096239550905e+00, 7.5677983509350675e+00, 7.7957196866256542e+00, 8.0258135335048770e+00, 8.2582331746602566e+00, 8.4931472862936879e+00, 8.7307423554516284e+00, 8.9712256084886892e+00, 9.2148285900237301e+00, 9.4618115799659321e+00, 9.7124691040283810e+00, 9.9671368911110161e+00, 1.0226200775202839e+01, 1.0490108256609062e+01, 1.0759383772299557e+01, 1.1034649256479808e+01, 1.1316652442235313e+01, 1.1606306831707778e+01, 1.1904749877820654e+01, 1.2213430791710801e+01, 1.2534249024802044e+01, 1.2869784992006130e+01, 1.3223712557871778e+01, 1.3601609402686369e+01, 1.4012776679134205e+01, 1.4475268112055591e+01, 1.5036352557386012e+01}, + {-1.5100607651623633e+01, -1.4540311217817694e+01, -1.4078487037622645e+01, -1.3667926962814708e+01, -1.3290600230854309e+01, -1.2937217433362431e+01, -1.2602207896558657e+01, -1.2281902356552338e+01, -1.1973723626537621e+01, -1.1675774634136665e+01, -1.1386607989721913e+01, -1.1105087678592627e+01, -1.0830301363602338e+01, -1.0561502276097833e+01, -1.0298069295499928e+01, -1.0039478682788769e+01, -9.7852835453915379e+00, -9.5350985857059971e+00, -9.2885885541431801e+00, -9.0454593582072000e+00, -8.8054511136991565e+00, -8.5683326410153491e+00, -8.3338970535972763e+00, -8.1019581834327870e+00, -7.8723476562632699e+00, -7.6449124769105978e+00, -7.4195130193460823e+00, -7.1960213409877323e+00, -6.9743197590276207e+00, -6.7542996402499806e+00, -6.5358603661027201e+00, -6.3189084426373174e+00, -6.1033567309754471e+00, -5.8891237786542385e+00, -5.6761332358780425e+00, -5.4643133436064266e+00, -5.2535964827157065e+00, -5.0439187753191463e+00, -4.8352197308201870e+00, -4.6274419304806678e+00, -4.4205307452707974e+00, -4.2144340825751483e+00, -4.0091021579943744e+00, -3.8044872890333408e+00, -3.6005437079246576e+00, -3.3972273912192827e+00, -3.1944959040967831e+00, -2.9923082576177755e+00, -2.7906247773689459e+00, -2.5894069821439305e+00, -2.3886174714670143e+00, -2.1882198209057537e+00, -1.9881784842371524e+00, -1.7884587016329878e+00, -1.5890264131159799e+00, -1.3898481766117783e+00, -1.1908910899840175e+00, -9.9212271649240336e-01, -7.9351101315811157e-01, -5.9502426155773702e-01, -3.9663100059735329e-01, -1.9829996084264417e-01, -0.0000000000000000e+00, 1.9829996084264417e-01, 3.9663100059735329e-01, 5.9502426155773702e-01, 7.9351101315811157e-01, 9.9212271649240336e-01, 1.1908910899840175e+00, 1.3898481766117783e+00, 1.5890264131159799e+00, 1.7884587016329878e+00, 1.9881784842371524e+00, 2.1882198209057537e+00, 2.3886174714670143e+00, 2.5894069821439305e+00, 2.7906247773689459e+00, 2.9923082576177755e+00, 3.1944959040967831e+00, 3.3972273912192827e+00, 3.6005437079246576e+00, 3.8044872890333408e+00, 4.0091021579943744e+00, 4.2144340825751483e+00, 4.4205307452707974e+00, 4.6274419304806678e+00, 4.8352197308201870e+00, 5.0439187753191463e+00, 5.2535964827157065e+00, 5.4643133436064266e+00, 5.6761332358780425e+00, 5.8891237786542385e+00, 6.1033567309754471e+00, 6.3189084426373174e+00, 6.5358603661027201e+00, 6.7542996402499806e+00, 6.9743197590276207e+00, 7.1960213409877323e+00, 7.4195130193460823e+00, 7.6449124769105978e+00, 7.8723476562632699e+00, 8.1019581834327870e+00, 8.3338970535972763e+00, 8.5683326410153491e+00, 8.8054511136991565e+00, 9.0454593582072000e+00, 9.2885885541431801e+00, 9.5350985857059971e+00, 9.7852835453915379e+00, 1.0039478682788769e+01, 1.0298069295499928e+01, 1.0561502276097833e+01, 1.0830301363602338e+01, 1.1105087678592627e+01, 1.1386607989721913e+01, 1.1675774634136665e+01, 1.1973723626537621e+01, 1.2281902356552338e+01, 1.2602207896558657e+01, 1.2937217433362431e+01, 1.3290600230854309e+01, 1.3667926962814708e+01, 1.4078487037622645e+01, 1.4540311217817694e+01, 1.5100607651623633e+01}, + {-1.5164601802095889e+01, -1.4605085809208793e+01, -1.4143922346496764e+01, -1.3733963425557441e+01, -1.3357201039800040e+01, -1.3004357419686706e+01, -1.2669868834839699e+01, -1.2350070580302615e+01, -1.2042388667769094e+01, -1.1744928381740326e+01, -1.1456244136772701e+01, -1.1175201342355262e+01, -1.0900888814619361e+01, -1.0632560739297817e+01, -1.0369596800621338e+01, -1.0111473949621464e+01, -9.8577458942500833e+00, -9.6080278666058625e+00, -9.3619850901178872e+00, -9.1193238995071209e+00, -8.8797848004974700e+00, -8.6431369728612566e+00, -8.4091738642920379e+00, -8.1777096203182182e+00, -7.9485761631426497e+00, -7.7216207799931862e+00, -7.4967041157356897e+00, -7.2736984893343362e+00, -7.0524864720372555e+00, -6.8329596788076081e+00, -6.6150177348097277e+00, -6.3985673866027142e+00, -6.1835217337299566e+00, -5.9697995610809267e+00, -5.7573247560732410e+00, -5.5460257976013150e+00, -5.3358353060028794e+00, -5.1266896451402619e+00, -4.9185285691809160e+00, -4.7112949078678819e+00, -4.5049342850546665e+00, -4.2993948660856276e+00, -4.0946271302677770e+00, -3.8905836652304053e+00, -3.6872189804268034e+00, -3.4844893374148405e+00, -3.2823525948738599e+00, -3.0807680665852137e+00, -2.8796963908316315e+00, -2.6790994098635554e+00, -2.4789400582443553e+00, -2.2791822590257231e+00, -2.0797908268232597e+00, -1.8807313769636194e+00, -1.6819702399610155e+00, -1.4834743806546613e+00, -1.2852113214015310e+00, -1.0871490687721614e+00, -8.8925604324227414e-01, -6.9150101141075326e-01, -4.9385302030582290e-01, -2.9628133336672019e-01, -9.8755367708345074e-02, 9.8755367708345074e-02, 2.9628133336672019e-01, 4.9385302030582290e-01, 6.9150101141075326e-01, 8.8925604324227414e-01, 1.0871490687721614e+00, 1.2852113214015310e+00, 1.4834743806546613e+00, 1.6819702399610155e+00, 1.8807313769636194e+00, 2.0797908268232597e+00, 2.2791822590257231e+00, 2.4789400582443553e+00, 2.6790994098635554e+00, 2.8796963908316315e+00, 3.0807680665852137e+00, 3.2823525948738599e+00, 3.4844893374148405e+00, 3.6872189804268034e+00, 3.8905836652304053e+00, 4.0946271302677770e+00, 4.2993948660856276e+00, 4.5049342850546665e+00, 4.7112949078678819e+00, 4.9185285691809160e+00, 5.1266896451402619e+00, 5.3358353060028794e+00, 5.5460257976013150e+00, 5.7573247560732410e+00, 5.9697995610809267e+00, 6.1835217337299566e+00, 6.3985673866027142e+00, 6.6150177348097277e+00, 6.8329596788076081e+00, 7.0524864720372555e+00, 7.2736984893343362e+00, 7.4967041157356897e+00, 7.7216207799931862e+00, 7.9485761631426497e+00, 8.1777096203182182e+00, 8.4091738642920379e+00, 8.6431369728612566e+00, 8.8797848004974700e+00, 9.1193238995071209e+00, 9.3619850901178872e+00, 9.6080278666058625e+00, 9.8577458942500833e+00, 1.0111473949621464e+01, 1.0369596800621338e+01, 1.0632560739297817e+01, 1.0900888814619361e+01, 1.1175201342355262e+01, 1.1456244136772701e+01, 1.1744928381740326e+01, 1.2042388667769094e+01, 1.2350070580302615e+01, 1.2669868834839699e+01, 1.3004357419686706e+01, 1.3357201039800040e+01, 1.3733963425557441e+01, 1.4143922346496764e+01, 1.4605085809208793e+01, 1.5164601802095889e+01}, + {-1.5228338148167351e+01, -1.4669595158833971e+01, -1.4209085995284871e+01, -1.3799722290211674e+01, -1.3423518590070948e+01, -1.3071208660474600e+01, -1.2737235652415684e+01, -1.2417939378869717e+01, -1.2110749020947747e+01, -1.1813772198267726e+01, -1.1525565112572695e+01, -1.1244994583785543e+01, -1.0971150569840248e+01, -1.0703288201027481e+01, -1.0440787957772772e+01, -1.0183127473450343e+01, -9.9298610495114250e+00, -9.6806044412474730e+00, -9.4350233389881648e+00, -9.1928244988460293e+00, -8.9537488108565402e+00, -8.7175658087076293e+00, -8.4840692689832480e+00, -8.2530736454457152e+00, -8.0244111514703373e+00, -7.7979293513870109e+00, -7.5734891556083452e+00, -7.3509631392269048e+00, -7.1302341220350698e+00, -6.9111939615465703e+00, -6.6937425208758290e+00, -6.4777867811645358e+00, -6.2632400742737344e+00, -6.0500214161419841e+00, -5.8380549248774187e+00, -5.6272693105464811e+00, -5.4175974259243240e+00, -5.2089758693153980e+00, -5.0013446320386352e+00, -4.7946467843764919e+00, -4.5888281947698371e+00, -4.3838372778464736e+00, -4.1796247675352030e+00, -3.9761435120673356e+00, -3.7733482881250526e+00, -3.5711956317782176e+00, -3.3696436841717392e+00, -3.1686520501953628e+00, -2.9681816685955908e+00, -2.7681946921824054e+00, -2.5686543769473498e+00, -2.3695249790490402e+00, -2.1707716587411507e+00, -1.9723603904195017e+00, -1.7742578780516791e+00, -1.5764314753267801e+00, -1.3788491099261777e+00, -1.1814792113700685e+00, -9.8429064194027271e-01, -7.8725263021825032e-01, -5.9033470680942102e-01, -3.9350664185130130e-01, -1.9673838392423251e-01, -0.0000000000000000e+00, 1.9673838392423251e-01, 3.9350664185130130e-01, 5.9033470680942102e-01, 7.8725263021825032e-01, 9.8429064194027271e-01, 1.1814792113700685e+00, 1.3788491099261777e+00, 1.5764314753267801e+00, 1.7742578780516791e+00, 1.9723603904195017e+00, 2.1707716587411507e+00, 2.3695249790490402e+00, 2.5686543769473498e+00, 2.7681946921824054e+00, 2.9681816685955908e+00, 3.1686520501953628e+00, 3.3696436841717392e+00, 3.5711956317782176e+00, 3.7733482881250526e+00, 3.9761435120673356e+00, 4.1796247675352030e+00, 4.3838372778464736e+00, 4.5888281947698371e+00, 4.7946467843764919e+00, 5.0013446320386352e+00, 5.2089758693153980e+00, 5.4175974259243240e+00, 5.6272693105464811e+00, 5.8380549248774187e+00, 6.0500214161419841e+00, 6.2632400742737344e+00, 6.4777867811645358e+00, 6.6937425208758290e+00, 6.9111939615465703e+00, 7.1302341220350698e+00, 7.3509631392269048e+00, 7.5734891556083452e+00, 7.7979293513870109e+00, 8.0244111514703373e+00, 8.2530736454457152e+00, 8.4840692689832480e+00, 8.7175658087076293e+00, 8.9537488108565402e+00, 9.1928244988460293e+00, 9.4350233389881648e+00, 9.6806044412474730e+00, 9.9298610495114250e+00, 1.0183127473450343e+01, 1.0440787957772772e+01, 1.0703288201027481e+01, 1.0971150569840248e+01, 1.1244994583785543e+01, 1.1525565112572695e+01, 1.1813772198267726e+01, 1.2110749020947747e+01, 1.2417939378869717e+01, 1.2737235652415684e+01, 1.3071208660474600e+01, 1.3423518590070948e+01, 1.3799722290211674e+01, 1.4209085995284871e+01, 1.4669595158833971e+01, 1.5228338148167351e+01}, + {-1.5291819766882741e+01, -1.4733842473589300e+01, -1.4273981304787835e+01, -1.3865206984476242e+01, -1.3489556412623140e+01, -1.3137774788027649e+01, -1.2804312082067129e+01, -1.2485512585349447e+01, -1.2178808619831244e+01, -1.1882310118878310e+01, -1.1594575054741449e+01, -1.1314471644289977e+01, -1.1041090976019634e+01, -1.0773689115161440e+01, -1.0511647329914869e+01, -1.0254443928470931e+01, -1.0001633798930122e+01, -9.7528332134391675e+00, -9.5077083232790560e+00, -9.2659663002961761e+00, -9.0273484133947868e+00, -8.7916245448886858e+00, -8.5585887950645070e+00, -8.3280559207901454e+00, -8.0998584215078964e+00, -7.8738441335354343e+00, -7.6498742276810061e+00, -7.4278215299523005e+00, -7.2075691033873337e+00, -6.9890090426447742e+00, -6.7720414432559277e+00, -6.5565735152644828e+00, -6.3425188170017792e+00, -6.1297965894221624e+00, -5.9183311750858119e+00, -5.7080515087680856e+00, -5.4988906689739094e+00, -5.2907854814771786e+00, -5.0836761674893394e+00, -4.8775060302648141e+00, -4.6722211749326386e+00, -4.4677702571485822e+00, -4.2641042568255187e+00, -4.0611762737492727e+00, -3.8589413423442820e+00, -3.6573562632353078e+00, -3.4563794495717373e+00, -3.2559707863506588e+00, -3.0560915012026797e+00, -2.8567040452974051e+00, -2.6577719831894839e+00, -2.4592598905657392e+00, -2.2611332589730622e+00, -2.0633584067085660e+00, -1.8659023951405984e+00, -1.6687329498037236e+00, -1.4718183856744860e+00, -1.2751275360891583e+00, -1.0786296848109087e+00, -8.8229450079298133e-01, -6.8609197521733478e-01, -4.8999236041545891e-01, -2.9396611030029574e-01, -9.7983821955818992e-02, 9.7983821955818992e-02, 2.9396611030029574e-01, 4.8999236041545891e-01, 6.8609197521733478e-01, 8.8229450079298133e-01, 1.0786296848109087e+00, 1.2751275360891583e+00, 1.4718183856744860e+00, 1.6687329498037236e+00, 1.8659023951405984e+00, 2.0633584067085660e+00, 2.2611332589730622e+00, 2.4592598905657392e+00, 2.6577719831894839e+00, 2.8567040452974051e+00, 3.0560915012026797e+00, 3.2559707863506588e+00, 3.4563794495717373e+00, 3.6573562632353078e+00, 3.8589413423442820e+00, 4.0611762737492727e+00, 4.2641042568255187e+00, 4.4677702571485822e+00, 4.6722211749326386e+00, 4.8775060302648141e+00, 5.0836761674893394e+00, 5.2907854814771786e+00, 5.4988906689739094e+00, 5.7080515087680856e+00, 5.9183311750858119e+00, 6.1297965894221624e+00, 6.3425188170017792e+00, 6.5565735152644828e+00, 6.7720414432559277e+00, 6.9890090426447742e+00, 7.2075691033873337e+00, 7.4278215299523005e+00, 7.6498742276810061e+00, 7.8738441335354343e+00, 8.0998584215078964e+00, 8.3280559207901454e+00, 8.5585887950645070e+00, 8.7916245448886858e+00, 9.0273484133947868e+00, 9.2659663002961761e+00, 9.5077083232790560e+00, 9.7528332134391675e+00, 1.0001633798930122e+01, 1.0254443928470931e+01, 1.0511647329914869e+01, 1.0773689115161440e+01, 1.1041090976019634e+01, 1.1314471644289977e+01, 1.1594575054741449e+01, 1.1882310118878310e+01, 1.2178808619831244e+01, 1.2485512585349447e+01, 1.2804312082067129e+01, 1.3137774788027649e+01, 1.3489556412623140e+01, 1.3865206984476242e+01, 1.4273981304787835e+01, 1.4733842473589300e+01, 1.5291819766882741e+01}, + {-1.5355049674683126e+01, -1.4797830896490307e+01, -1.4338611529008967e+01, -1.3930420866479178e+01, -1.3555317966130856e+01, -1.3204059359674192e+01, -1.2871101778903627e+01, -1.2552793952444540e+01, -1.2246571315023999e+01, -1.1950546092769182e+01, -1.1663278012068952e+01, -1.1383636673511937e+01, -1.1110714285141645e+01, -1.0843767837715543e+01, -1.0582179378973514e+01, -1.0325427884573593e+01, -1.0073068822584037e+01, -9.8247189758331519e+00, -9.5800449507652310e+00, -9.3387543294632973e+00, -9.1005887544187765e+00, -8.8653184514446028e+00, -8.6327378395084349e+00, -8.4026619736253494e+00, -8.1749236343739895e+00, -7.9493709251260496e+00, -7.7258652721212853e+00, -7.5042797472635216e+00, -7.2844976517401534e+00, -7.0664113121603904e+00, -6.8499210511601492e+00, -6.6349343022359886e+00, -6.4213648445851028e+00, -6.2091321383995863e+00, -5.9981607447217984e+00, -5.7883798168558824e+00, -5.5797226526273649e+00, -5.3721262986220681e+00, -5.1655311990180879e+00, -4.9598808828268046e+00, -4.7551216843394561e+00, -4.5512024923799270e+00, -4.3480745246271999e+00, -4.1456911238198586e+00, -3.9440075731117403e+00, -3.7429809282294269e+00, -3.5425698644023571e+00, -3.3427345363058465e+00, -3.1434364494849998e+00, -2.9446383419204585e+00, -2.7463040745609382e+00, -2.5483985297872240e+00, -2.3508875168916132e+00, -2.1537376837587945e+00, -1.9569164340215148e+00, -1.7603918490392145e+00, -1.5641326141118630e+00, -1.3681079483960505e+00, -1.1722875380371232e+00, -9.7664147207086760e-01, -7.8114018068176028e-01, -5.8575437543280562e-01, -3.9045499110504589e-01, -1.9521312880340755e-01, -0.0000000000000000e+00, 1.9521312880340755e-01, 3.9045499110504589e-01, 5.8575437543280562e-01, 7.8114018068176028e-01, 9.7664147207086760e-01, 1.1722875380371232e+00, 1.3681079483960505e+00, 1.5641326141118630e+00, 1.7603918490392145e+00, 1.9569164340215148e+00, 2.1537376837587945e+00, 2.3508875168916132e+00, 2.5483985297872240e+00, 2.7463040745609382e+00, 2.9446383419204585e+00, 3.1434364494849998e+00, 3.3427345363058465e+00, 3.5425698644023571e+00, 3.7429809282294269e+00, 3.9440075731117403e+00, 4.1456911238198586e+00, 4.3480745246271999e+00, 4.5512024923799270e+00, 4.7551216843394561e+00, 4.9598808828268046e+00, 5.1655311990180879e+00, 5.3721262986220681e+00, 5.5797226526273649e+00, 5.7883798168558824e+00, 5.9981607447217984e+00, 6.2091321383995863e+00, 6.4213648445851028e+00, 6.6349343022359886e+00, 6.8499210511601492e+00, 7.0664113121603904e+00, 7.2844976517401534e+00, 7.5042797472635216e+00, 7.7258652721212853e+00, 7.9493709251260496e+00, 8.1749236343739895e+00, 8.4026619736253494e+00, 8.6327378395084349e+00, 8.8653184514446028e+00, 9.1005887544187765e+00, 9.3387543294632973e+00, 9.5800449507652310e+00, 9.8247189758331519e+00, 1.0073068822584037e+01, 1.0325427884573593e+01, 1.0582179378973514e+01, 1.0843767837715543e+01, 1.1110714285141645e+01, 1.1383636673511937e+01, 1.1663278012068952e+01, 1.1950546092769182e+01, 1.2246571315023999e+01, 1.2552793952444540e+01, 1.2871101778903627e+01, 1.3204059359674192e+01, 1.3555317966130856e+01, 1.3930420866479178e+01, 1.4338611529008967e+01, 1.4797830896490307e+01, 1.5355049674683126e+01}, + {-1.5418030829061875e+01, -1.4861563508435680e+01, -1.4402979857014893e+01, -1.3995367226732167e+01, -1.3620806639033354e+01, -1.3270065859909028e+01, -1.2937608322597471e+01, -1.2619787154794016e+01, -1.2314040876404606e+01, -1.2018483985703304e+01, -1.1731677947147649e+01, -1.1452493732070961e+01, -1.1180024657270208e+01, -1.0913528629812234e+01, -1.0652388468924656e+01, -1.0396083810552501e+01, -1.0144170696212489e+01, -9.8962664133982265e+00, -9.6520380180840490e+00, -9.4111934972461668e+00, -9.1734748618357518e+00, -8.9386526761940264e+00, -8.7065216731065060e+00, -8.4768972009392183e+00, -8.2496123166430966e+00, -8.0245153858819371e+00, -7.8014680855407574e+00, -7.5803437285851887e+00, -7.3610258494479153e+00, -7.1434070016947420e+00, -6.9273877299634155e+00, -6.7128756859735788e+00, -6.4997848644138259e+00, -6.2880349391774812e+00, -6.0775506840731746e+00, -5.8682614650210780e+00, -5.6601007930399492e+00, -5.4530059291673494e+00, -5.2469175339362204e+00, -5.0417793552319132e+00, -4.8375379493332744e+00, -4.6341424307446326e+00, -4.4315442470876985e+00, -4.2296969758707617e+00, -4.0285561404088908e+00, -3.8280790425501139e+00, -3.6282246101824178e+00, -3.4289532577657682e+00, -3.2302267583609394e+00, -3.0320081258198956e+00, -2.8342615059664689e+00, -2.6369520757359015e+00, -2.4400459493612159e+00, -2.2435100907965948e+00, -2.0473122316554950e+00, -1.8514207940163270e+00, -1.6558048175129170e+00, -1.4604338901821434e+00, -1.2652780825883267e+00, -1.0703078847841583e+00, -8.7549414570205153e-01, -6.8080801459844631e-01, -4.8622088419741660e-01, -2.9170433519935240e-01, -9.7230081835906187e-02, 9.7230081835906187e-02, 2.9170433519935240e-01, 4.8622088419741660e-01, 6.8080801459844631e-01, 8.7549414570205153e-01, 1.0703078847841583e+00, 1.2652780825883267e+00, 1.4604338901821434e+00, 1.6558048175129170e+00, 1.8514207940163270e+00, 2.0473122316554950e+00, 2.2435100907965948e+00, 2.4400459493612159e+00, 2.6369520757359015e+00, 2.8342615059664689e+00, 3.0320081258198956e+00, 3.2302267583609394e+00, 3.4289532577657682e+00, 3.6282246101824178e+00, 3.8280790425501139e+00, 4.0285561404088908e+00, 4.2296969758707617e+00, 4.4315442470876985e+00, 4.6341424307446326e+00, 4.8375379493332744e+00, 5.0417793552319132e+00, 5.2469175339362204e+00, 5.4530059291673494e+00, 5.6601007930399492e+00, 5.8682614650210780e+00, 6.0775506840731746e+00, 6.2880349391774812e+00, 6.4997848644138259e+00, 6.7128756859735788e+00, 6.9273877299634155e+00, 7.1434070016947420e+00, 7.3610258494479153e+00, 7.5803437285851887e+00, 7.8014680855407574e+00, 8.0245153858819371e+00, 8.2496123166430966e+00, 8.4768972009392183e+00, 8.7065216731065060e+00, 8.9386526761940264e+00, 9.1734748618357518e+00, 9.4111934972461668e+00, 9.6520380180840490e+00, 9.8962664133982265e+00, 1.0144170696212489e+01, 1.0396083810552501e+01, 1.0652388468924656e+01, 1.0913528629812234e+01, 1.1180024657270208e+01, 1.1452493732070961e+01, 1.1731677947147649e+01, 1.2018483985703304e+01, 1.2314040876404606e+01, 1.2619787154794016e+01, 1.2937608322597471e+01, 1.3270065859909028e+01, 1.3620806639033354e+01, 1.3995367226732167e+01, 1.4402979857014893e+01, 1.4861563508435680e+01, 1.5418030829061875e+01}, + {-1.5480766130162912e+01, -1.4925043329908892e+01, -1.4467089414730456e+01, -1.4060049290015051e+01, -1.3686025751508065e+01, -1.3335797702455430e+01, -1.3003835219535608e+01, -1.2686495791216792e+01, -1.2381220995463224e+01, -1.2086127582443233e+01, -1.1799778738905447e+01, -1.1521046794198202e+01, -1.1249026163290472e+01, -1.0982975660531801e+01, -1.0722278868759513e+01, -1.0466416077189237e+01, -1.0214943894423017e+01, -9.9674801067548007e+00, -9.7236922142042346e+00, -9.4832886034446808e+00, -9.2460116489073236e+00, -9.0116322486398488e+00, -8.7799454445339524e+00, -8.5507668736650881e+00, -8.3239298647512676e+00, -8.0992830411501870e+00, -7.8766883257751701e+00, -7.6560192679949264e+00, -7.4371596307659544e+00, -7.2200021898070377e+00, -7.0044477068537327e+00, -6.7904040468269269e+00, -6.5777854147500525e+00, -6.3665116929098069e+00, -6.1565078624054426e+00, -5.9477034961131627e+00, -5.7400323123838195e+00, -5.5334317806272333e+00, -5.3278427714156580e+00, -5.1232092449384545e+00, -4.9194779726184912e+00, -4.7165982875032402e+00, -4.5145218597049430e+00, -4.3132024937121241e+00, -4.1125959448506144e+00, -3.9126597525531843e+00, -3.7133530884165373e+00, -3.5146366172935721e+00, -3.3164723698963634e+00, -3.1188236255781381e+00, -2.9216548041266144e+00, -2.7249313655408702e+00, -2.5286197168834539e+00, -2.3326871254017765e+00, -2.1371016372005704e+00, -1.9418320008225314e+00, -1.7468475951589244e+00, -1.5521183611674445e+00, -1.3576147369221905e+00, -1.1633075955612597e+00, -9.6916818573207730e-01, -7.7516807416380029e-01, -5.8127909002062172e-01, -3.8747327070995596e-01, -1.9372280883577261e-01, -0.0000000000000000e+00, 1.9372280883577261e-01, 3.8747327070995596e-01, 5.8127909002062172e-01, 7.7516807416380029e-01, 9.6916818573207730e-01, 1.1633075955612597e+00, 1.3576147369221905e+00, 1.5521183611674445e+00, 1.7468475951589244e+00, 1.9418320008225314e+00, 2.1371016372005704e+00, 2.3326871254017765e+00, 2.5286197168834539e+00, 2.7249313655408702e+00, 2.9216548041266144e+00, 3.1188236255781381e+00, 3.3164723698963634e+00, 3.5146366172935721e+00, 3.7133530884165373e+00, 3.9126597525531843e+00, 4.1125959448506144e+00, 4.3132024937121241e+00, 4.5145218597049430e+00, 4.7165982875032402e+00, 4.9194779726184912e+00, 5.1232092449384545e+00, 5.3278427714156580e+00, 5.5334317806272333e+00, 5.7400323123838195e+00, 5.9477034961131627e+00, 6.1565078624054426e+00, 6.3665116929098069e+00, 6.5777854147500525e+00, 6.7904040468269269e+00, 7.0044477068537327e+00, 7.2200021898070377e+00, 7.4371596307659544e+00, 7.6560192679949264e+00, 7.8766883257751701e+00, 8.0992830411501870e+00, 8.3239298647512676e+00, 8.5507668736650881e+00, 8.7799454445339524e+00, 9.0116322486398488e+00, 9.2460116489073236e+00, 9.4832886034446808e+00, 9.7236922142042346e+00, 9.9674801067548007e+00, 1.0214943894423017e+01, 1.0466416077189237e+01, 1.0722278868759513e+01, 1.0982975660531801e+01, 1.1249026163290472e+01, 1.1521046794198202e+01, 1.1799778738905447e+01, 1.2086127582443233e+01, 1.2381220995463224e+01, 1.2686495791216792e+01, 1.3003835219535608e+01, 1.3335797702455430e+01, 1.3686025751508065e+01, 1.4060049290015051e+01, 1.4467089414730456e+01, 1.4925043329908892e+01, 1.5480766130162912e+01}, + {-1.5543258422323774e+01, -1.4988273322620483e+01, -1.4530943266670407e+01, -1.4124470217193569e+01, -1.3750978557373212e+01, -1.3401258232252896e+01, -1.3069785904893536e+01, -1.2752923386873304e+01, -1.2448115287552957e+01, -1.2153480589094658e+01, -1.1867584185044096e+01, -1.1589299750272769e+01, -1.1317722787546410e+01, -1.1052113009654860e+01, -1.0791854755335825e+01, -1.0536428960217005e+01, -1.0285392793773033e+01, -1.0038364535378761e+01, -9.7950121237570809e+00, -9.5550443402238834e+00, -9.3182039178520490e+00, -9.0842620837165757e+00, -8.8530141842483925e+00, -8.6242761407386386e+00, -8.3978815492187184e+00, -8.1736792862980483e+00, -7.9515315164625289e+00, -7.7313120209923101e+00, -7.5129047868198517e+00, -7.2962028071941081e+00, -7.0811070562331242e+00, -6.8675256072344579e+00, -6.6553728706063735e+00, -6.4445689319376820e+00, -6.2350389743700507e+00, -6.0267127723148342e+00, -5.8195242458455034e+00, -5.6134110669298432e+00, -5.4083143101435791e+00, -5.2041781417053947e+00, -5.0009495416506535e+00, -4.7985780547627241e+00, -4.5970155665415948e+00, -4.3962161010367922e+00, -4.1961356378270693e+00, -3.9967319458099735e+00, -3.7979644317837824e+00, -3.5997940020732910e+00, -3.4021829356783408e+00, -3.2050947676167678e+00, -3.0084941812975128e+00, -2.8123469088994537e+00, -2.6166196388511782e+00, -2.4212799296093106e+00, -2.2262961290209549e+00, -2.0316372986313107e+00, -1.8372731423624578e+00, -1.6431739390450815e+00, -1.4493104783328268e+00, -1.2556539995700138e+00, -1.0621761332184900e+00, -8.6884884447915023e-01, -6.7564437876870465e-01, -4.8253520873311562e-01, -2.8949398249614278e-01, -9.6493472854955881e-02, 9.6493472854955881e-02, 2.8949398249614278e-01, 4.8253520873311562e-01, 6.7564437876870465e-01, 8.6884884447915023e-01, 1.0621761332184900e+00, 1.2556539995700138e+00, 1.4493104783328268e+00, 1.6431739390450815e+00, 1.8372731423624578e+00, 2.0316372986313107e+00, 2.2262961290209549e+00, 2.4212799296093106e+00, 2.6166196388511782e+00, 2.8123469088994537e+00, 3.0084941812975128e+00, 3.2050947676167678e+00, 3.4021829356783408e+00, 3.5997940020732910e+00, 3.7979644317837824e+00, 3.9967319458099735e+00, 4.1961356378270693e+00, 4.3962161010367922e+00, 4.5970155665415948e+00, 4.7985780547627241e+00, 5.0009495416506535e+00, 5.2041781417053947e+00, 5.4083143101435791e+00, 5.6134110669298432e+00, 5.8195242458455034e+00, 6.0267127723148342e+00, 6.2350389743700507e+00, 6.4445689319376820e+00, 6.6553728706063735e+00, 6.8675256072344579e+00, 7.0811070562331242e+00, 7.2962028071941081e+00, 7.5129047868198517e+00, 7.7313120209923101e+00, 7.9515315164625289e+00, 8.1736792862980483e+00, 8.3978815492187184e+00, 8.6242761407386386e+00, 8.8530141842483925e+00, 9.0842620837165757e+00, 9.3182039178520490e+00, 9.5550443402238834e+00, 9.7950121237570809e+00, 1.0038364535378761e+01, 1.0285392793773033e+01, 1.0536428960217005e+01, 1.0791854755335825e+01, 1.1052113009654860e+01, 1.1317722787546410e+01, 1.1589299750272769e+01, 1.1867584185044096e+01, 1.2153480589094658e+01, 1.2448115287552957e+01, 1.2752923386873304e+01, 1.3069785904893536e+01, 1.3401258232252896e+01, 1.3750978557373212e+01, 1.4124470217193569e+01, 1.4530943266670407e+01, 1.4988273322620483e+01, 1.5543258422323774e+01}, + {-1.5605510495565703e+01, -1.5051256391093448e+01, -1.4594544417610667e+01, -1.4188633106973045e+01, -1.3815668245922964e+01, -1.3466450727373902e+01, -1.3135463744634585e+01, -1.2819073395348772e+01, -1.2514727294059055e+01, -1.2220546635363748e+01, -1.1935098004387349e+01, -1.1657256409263454e+01, -1.1386118430379335e+01, -1.1120944670301025e+01, -1.0861120216120376e+01, -1.0606126643170979e+01, -1.0355521675732227e+01, -1.0108924080680463e+01, -9.8660022302368411e+00, -9.6264652954408270e+00, -9.3900563633043106e+00, -9.1565469853869601e+00, -8.9257328082304568e+00, -8.6974300329890397e+00, -8.4714725186984587e+00, -8.2477093909264614e+00, -8.0260030514294165e+00, -7.8062275090663409e+00, -7.5882669703629624e+00, -7.3720146416474837e+00, -7.1573717048843459e+00, -6.9442464371104355e+00, -6.7325534493647661e+00, -6.5222130256523068e+00, -6.3131505461243611e+00, -6.1052959815328816e+00, -5.8985834483025466e+00, -5.6929508153954593e+00, -5.4883393556191784e+00, -5.2846934352257779e+00, -5.0819602366259380e+00, -4.8800895098428096e+00, -4.6790333489905045e+00, -4.4787459906088296e+00, -4.2791836311409464e+00, -4.0803042612209337e+00, -3.8820675147573152e+00, -3.6844345310674940e+00, -3.4873678285452550e+00, -3.2908311885362425e+00, -3.0947895482603323e+00, -2.8992089017596818e+00, -2.7040562079709125e+00, -2.5092993051224148e+00, -2.3149068307458065e+00, -2.1208481466662792e+00, -1.9270932684016822e+00, -1.7336127984562590e+00, -1.5403778630431328e+00, -1.3473600518110582e+00, -1.1545313601863669e+00, -9.6186413397127435e-01, -7.6933101586525243e-01, -5.7690489359762542e-01, -3.8455884937721463e-01, -1.9226611037913860e-01, -0.0000000000000000e+00, 1.9226611037913860e-01, 3.8455884937721463e-01, 5.7690489359762542e-01, 7.6933101586525243e-01, 9.6186413397127435e-01, 1.1545313601863669e+00, 1.3473600518110582e+00, 1.5403778630431328e+00, 1.7336127984562590e+00, 1.9270932684016822e+00, 2.1208481466662792e+00, 2.3149068307458065e+00, 2.5092993051224148e+00, 2.7040562079709125e+00, 2.8992089017596818e+00, 3.0947895482603323e+00, 3.2908311885362425e+00, 3.4873678285452550e+00, 3.6844345310674940e+00, 3.8820675147573152e+00, 4.0803042612209337e+00, 4.2791836311409464e+00, 4.4787459906088296e+00, 4.6790333489905045e+00, 4.8800895098428096e+00, 5.0819602366259380e+00, 5.2846934352257779e+00, 5.4883393556191784e+00, 5.6929508153954593e+00, 5.8985834483025466e+00, 6.1052959815328816e+00, 6.3131505461243611e+00, 6.5222130256523068e+00, 6.7325534493647661e+00, 6.9442464371104355e+00, 7.1573717048843459e+00, 7.3720146416474837e+00, 7.5882669703629624e+00, 7.8062275090663409e+00, 8.0260030514294165e+00, 8.2477093909264614e+00, 8.4714725186984587e+00, 8.6974300329890397e+00, 8.9257328082304568e+00, 9.1565469853869601e+00, 9.3900563633043106e+00, 9.6264652954408270e+00, 9.8660022302368411e+00, 1.0108924080680463e+01, 1.0355521675732227e+01, 1.0606126643170979e+01, 1.0861120216120376e+01, 1.1120944670301025e+01, 1.1386118430379335e+01, 1.1657256409263454e+01, 1.1935098004387349e+01, 1.2220546635363748e+01, 1.2514727294059055e+01, 1.2819073395348772e+01, 1.3135463744634585e+01, 1.3466450727373902e+01, 1.3815668245922964e+01, 1.4188633106973045e+01, 1.4594544417610667e+01, 1.5051256391093448e+01, 1.5605510495565703e+01}, + {-1.5667525087033042e+01, -1.5113995384194148e+01, -1.4657895814201638e+01, -1.4252540997590891e+01, -1.3880097943698006e+01, -1.3531378400872688e+01, -1.3200872037437804e+01, -1.2884949200661589e+01, -1.2581060484489544e+01, -1.2287329276732219e+01, -1.2002323839142806e+01, -1.1724920501080165e+01, -1.1454216910571811e+01, -1.1189474551468571e+01, -1.0930079251827893e+01, -1.0675513220130126e+01, -1.0425334729531320e+01, -1.0179163028964693e+01, -9.9366669190764370e+00, -9.6975559558409401e+00, -9.4615735756363968e+00, -9.2284916500953180e+00, -8.9981061215741249e+00, -8.7702334668725097e+00, -8.5447078038596302e+00, -8.3213785029101519e+00, -8.1001081988952368e+00, -7.8807711240717229e+00, -7.6632517003331104e+00, -7.4474433427993905e+00, -7.2332474369156490e+00, -7.0205724589985739e+00, -6.8093332161497848e+00, -6.5994501860991619e+00, -6.3908489411790317e+00, -6.1834596435017497e+00, -5.9772166006971021e+00, -5.7720578733949548e+00, -5.5679249271127809e+00, -5.3647623224033971e+00, -5.1625174380934791e+00, -4.9611402232433406e+00, -4.7605829741178525e+00, -4.5608001330046735e+00, -4.3617481061705252e+00, -4.1633850986262280e+00, -3.9656709636900676e+00, -3.7685670656076877e+00, -3.5720361537138379e+00, -3.3760422468139430e+00, -3.1805505266274205e+00, -2.9855272392745684e+00, -2.7909396039085173e+00, -2.5967557276963822e+00, -2.4029445264418832e+00, -2.2094756502175628e+00, -2.0163194134400269e+00, -1.8234467288779213e+00, -1.6308290451307998e+00, -1.4384382871587706e+00, -1.2462467994785644e+00, -1.0542272916723083e+00, -8.6235278588129705e-01, -6.7059656597901129e-01, -4.7893212813590247e-01, -2.8733313250340886e-01, -9.5773355756546857e-02, 9.5773355756546857e-02, 2.8733313250340886e-01, 4.7893212813590247e-01, 6.7059656597901129e-01, 8.6235278588129705e-01, 1.0542272916723083e+00, 1.2462467994785644e+00, 1.4384382871587706e+00, 1.6308290451307998e+00, 1.8234467288779213e+00, 2.0163194134400269e+00, 2.2094756502175628e+00, 2.4029445264418832e+00, 2.5967557276963822e+00, 2.7909396039085173e+00, 2.9855272392745684e+00, 3.1805505266274205e+00, 3.3760422468139430e+00, 3.5720361537138379e+00, 3.7685670656076877e+00, 3.9656709636900676e+00, 4.1633850986262280e+00, 4.3617481061705252e+00, 4.5608001330046735e+00, 4.7605829741178525e+00, 4.9611402232433406e+00, 5.1625174380934791e+00, 5.3647623224033971e+00, 5.5679249271127809e+00, 5.7720578733949548e+00, 5.9772166006971021e+00, 6.1834596435017497e+00, 6.3908489411790317e+00, 6.5994501860991619e+00, 6.8093332161497848e+00, 7.0205724589985739e+00, 7.2332474369156490e+00, 7.4474433427993905e+00, 7.6632517003331104e+00, 7.8807711240717229e+00, 8.1001081988952368e+00, 8.3213785029101519e+00, 8.5447078038596302e+00, 8.7702334668725097e+00, 8.9981061215741249e+00, 9.2284916500953180e+00, 9.4615735756363968e+00, 9.6975559558409401e+00, 9.9366669190764370e+00, 1.0179163028964693e+01, 1.0425334729531320e+01, 1.0675513220130126e+01, 1.0930079251827893e+01, 1.1189474551468571e+01, 1.1454216910571811e+01, 1.1724920501080165e+01, 1.2002323839142806e+01, 1.2287329276732219e+01, 1.2581060484489544e+01, 1.2884949200661589e+01, 1.3200872037437804e+01, 1.3531378400872688e+01, 1.3880097943698006e+01, 1.4252540997590891e+01, 1.4657895814201638e+01, 1.5113995384194148e+01, 1.5667525087033042e+01}, + {-1.5729304882383959e+01, -1.5176493096610994e+01, -1.4721000346526051e+01, -1.4316196868450396e+01, -1.3944270716194302e+01, -1.3596044402569063e+01, -1.3266014016557564e+01, -1.2950554119200040e+01, -1.2647118258490758e+01, -1.2353831996553668e+01, -1.2069265257081389e+01, -1.1792295678839055e+01, -1.1522021967701262e+01, -1.1257706480479557e+01, -1.0998735778960949e+01, -1.0744592698355470e+01, -1.0494836054902457e+01, -1.0249085574277188e+01, -1.0007010480604432e+01, -9.7683207101299594e+00, -9.5327600441503151e+00, -9.3001006700843707e+00, -9.0701388219340853e+00, -8.8426912480559938e+00, -8.6175923211137029e+00, -8.3946916522729023e+00, -8.1738521055035420e+00, -7.9549481324236444e+00, -7.7378643662186803e+00, -7.5224944266685263e+00, -7.3087398984951246e+00, -7.0965094530044173e+00, -6.8857180889690319e+00, -6.6762864733372753e+00, -6.4681403659874999e+00, -6.2612101156154036e+00, -6.0554302161231597e+00, -5.8507389147062092e+00, -5.6470778643062038e+00, -5.4443918142929100e+00, -5.2426283342121209e+00, -5.0417375662357022e+00, -4.8416720026085907e+00, -4.6423862849333188e+00, -4.4438370225867869e+00, -4.2459826279436079e+00, -4.0487831663989748e+00, -3.8522002194524045e+00, -3.6561967593406961e+00, -3.4607370339009882e+00, -3.2657864605087177e+00, -3.0713115280751384e+00, -2.8772797062087854e+00, -2.6836593607479262e+00, -2.4904196749593304e+00, -2.2975305757745934e+00, -2.1049626645007851e+00, -1.9126871514986146e+00, -1.7206757943700177e+00, -1.5289008392390442e+00, -1.3373349647460135e+00, -1.1459512284058875e+00, -9.5472301500823675e-01, -7.6362398675858623e-01, -5.7262803487972669e-01, -3.8170923240711668e-01, -1.9084178792504236e-01, -0.0000000000000000e+00, 1.9084178792504236e-01, 3.8170923240711668e-01, 5.7262803487972669e-01, 7.6362398675858623e-01, 9.5472301500823675e-01, 1.1459512284058875e+00, 1.3373349647460135e+00, 1.5289008392390442e+00, 1.7206757943700177e+00, 1.9126871514986146e+00, 2.1049626645007851e+00, 2.2975305757745934e+00, 2.4904196749593304e+00, 2.6836593607479262e+00, 2.8772797062087854e+00, 3.0713115280751384e+00, 3.2657864605087177e+00, 3.4607370339009882e+00, 3.6561967593406961e+00, 3.8522002194524045e+00, 4.0487831663989748e+00, 4.2459826279436079e+00, 4.4438370225867869e+00, 4.6423862849333188e+00, 4.8416720026085907e+00, 5.0417375662357022e+00, 5.2426283342121209e+00, 5.4443918142929100e+00, 5.6470778643062038e+00, 5.8507389147062092e+00, 6.0554302161231597e+00, 6.2612101156154036e+00, 6.4681403659874999e+00, 6.6762864733372753e+00, 6.8857180889690319e+00, 7.0965094530044173e+00, 7.3087398984951246e+00, 7.5224944266685263e+00, 7.7378643662186803e+00, 7.9549481324236444e+00, 8.1738521055035420e+00, 8.3946916522729023e+00, 8.6175923211137029e+00, 8.8426912480559938e+00, 9.0701388219340853e+00, 9.3001006700843707e+00, 9.5327600441503151e+00, 9.7683207101299594e+00, 1.0007010480604432e+01, 1.0249085574277188e+01, 1.0494836054902457e+01, 1.0744592698355470e+01, 1.0998735778960949e+01, 1.1257706480479557e+01, 1.1522021967701262e+01, 1.1792295678839055e+01, 1.2069265257081389e+01, 1.2353831996553668e+01, 1.2647118258490758e+01, 1.2950554119200040e+01, 1.3266014016557564e+01, 1.3596044402569063e+01, 1.3944270716194302e+01, 1.4316196868450396e+01, 1.4721000346526051e+01, 1.5176493096610994e+01, 1.5729304882383959e+01}, + {-1.5790852517134587e+01, -1.5238752270283085e+01, -1.4783860849603613e+01, -1.4379603641698345e+01, -1.4008189569512616e+01, -1.3660451820769911e+01, -1.3330892851617801e+01, -1.3015891401590464e+01, -1.2712903947791023e+01, -1.2420058208074661e+01, -1.2135925753638015e+01, -1.1859385521045176e+01, -1.1589537264407285e+01, -1.1325644205334678e+01, -1.1067093632255466e+01, -1.0813369000829548e+01, -1.0564029664716180e+01, -1.0318695821142974e+01, -1.0077037112888743e+01, -9.8387638519273803e+00, -9.6036201601455531e+00, -9.3713785365821725e+00, -9.1418355028369902e+00, -8.9148080748582679e+00, -8.6901308761912386e+00, -8.4676537549061486e+00, -8.2472398001890390e+00, -8.0287636791201553e+00, -7.8121102322437741e+00, -7.5971732800158449e+00, -7.3838546023854859e+00, -7.1720630615179752e+00, -6.9617138436331016e+00, -6.7527278005665341e+00, -6.5450308752913493e+00, -6.3385535985021413e+00, -6.1332306456429340e+00, -5.9290004455850367e+00, -5.7258048336320568e+00, -5.5235887427222501e+00, -5.3222999276716845e+00, -5.1218887180998118e+00, -4.9223077963371686e+00, -4.7235119971600676e+00, -4.5254581266508760e+00, -4.3281047978617426e+00, -4.1314122812779592e+00, -3.9353423683453870e+00, -3.7398582465531405e+00, -3.5449243847552383e+00, -3.3505064275787246e+00, -3.1565710979056170e+00, -2.9630861065357892e+00, -2.7700200682405702e+00, -2.5773424235052076e+00, -2.3850233653343853e+00, -2.1930337705606084e+00, -2.0013451351519014e+00, -1.8099295130641593e+00, -1.6187594582257014e+00, -1.4278079692779639e+00, -1.2370484367275345e+00, -1.0464545921915449e+00, -8.5600045944125114e-01, -6.6566030696788747e-01, -4.7540860181095812e-01, -2.8521996440226244e-01, -9.5069124189415724e-02, 9.5069124189415724e-02, 2.8521996440226244e-01, 4.7540860181095812e-01, 6.6566030696788747e-01, 8.5600045944125114e-01, 1.0464545921915449e+00, 1.2370484367275345e+00, 1.4278079692779639e+00, 1.6187594582257014e+00, 1.8099295130641593e+00, 2.0013451351519014e+00, 2.1930337705606084e+00, 2.3850233653343853e+00, 2.5773424235052076e+00, 2.7700200682405702e+00, 2.9630861065357892e+00, 3.1565710979056170e+00, 3.3505064275787246e+00, 3.5449243847552383e+00, 3.7398582465531405e+00, 3.9353423683453870e+00, 4.1314122812779592e+00, 4.3281047978617426e+00, 4.5254581266508760e+00, 4.7235119971600676e+00, 4.9223077963371686e+00, 5.1218887180998118e+00, 5.3222999276716845e+00, 5.5235887427222501e+00, 5.7258048336320568e+00, 5.9290004455850367e+00, 6.1332306456429340e+00, 6.3385535985021413e+00, 6.5450308752913493e+00, 6.7527278005665341e+00, 6.9617138436331016e+00, 7.1720630615179752e+00, 7.3838546023854859e+00, 7.5971732800158449e+00, 7.8121102322437741e+00, 8.0287636791201553e+00, 8.2472398001890390e+00, 8.4676537549061486e+00, 8.6901308761912386e+00, 8.9148080748582679e+00, 9.1418355028369902e+00, 9.3713785365821725e+00, 9.6036201601455531e+00, 9.8387638519273803e+00, 1.0077037112888743e+01, 1.0318695821142974e+01, 1.0564029664716180e+01, 1.0813369000829548e+01, 1.1067093632255466e+01, 1.1325644205334678e+01, 1.1589537264407285e+01, 1.1859385521045176e+01, 1.2135925753638015e+01, 1.2420058208074661e+01, 1.2712903947791023e+01, 1.3015891401590464e+01, 1.3330892851617801e+01, 1.3660451820769911e+01, 1.4008189569512616e+01, 1.4379603641698345e+01, 1.4783860849603613e+01, 1.5238752270283085e+01, 1.5790852517134587e+01}, + {-1.5852170577958367e+01, -1.5300775595780827e+01, -1.4846480104844703e+01, -1.4442764183748782e+01, -1.4071857451951351e+01, -1.3724603683931077e+01, -1.3395511650343654e+01, -1.3080964234499790e+01, -1.2778420818075578e+01, -1.2486011256383852e+01, -1.2202308753936913e+01, -1.1926193533696322e+01, -1.1656766388576511e+01, -1.1393291396981837e+01, -1.1135156567035956e+01, -1.0881845968701494e+01, -1.0632919487519736e+01, -1.0387997787201458e+01, -1.0146750924472403e+01, -9.9088895826069283e+00, -9.6741582198685645e+00, -9.4423296428652392e+00, -9.2132006568631315e+00, -8.9865885415552977e+00, -8.7623281675764328e+00, -8.5402696161385645e+00, -8.3202761978883633e+00, -8.1022227916007061e+00, -7.8859944413814738e+00, -7.6714851645199493e+00, -7.4585969322894226e+00, -7.2472387937374041e+00, -7.0373261184664502e+00, -6.8287799390351633e+00, -6.6215263772344395e+00, -6.4154961413560549e+00, -6.2106240838470441e+00, -6.0068488105662023e+00, -5.8041123343284946e+00, -5.6023597666149243e+00, -5.4015390422975438e+00, -5.2016006730266913e+00, -5.0024975255849276e+00, -4.8041846220567344e+00, -4.6066189591163766e+00, -4.4097593441152032e+00, -4.2135662459677485e+00, -4.0180016591039696e+00, -3.8230289789816339e+00, -3.6286128878452084e+00, -3.4347192495813528e+00, -3.2413150126609418e+00, -3.0483681202772592e+00, -2.8558474268927547e+00, -2.6637226204951387e+00, -2.4719641499397405e+00, -2.2805431568207979e+00, -2.0894314113711125e+00, -1.8986012519385917e+00, -1.7080255276306118e+00, -1.5176775437537562e+00, -1.3275310097080575e+00, -1.1375599890219745e+00, -9.4773885123753698e-01, -7.5804222537475319e-01, -5.6844495472091694e-01, -3.7892205270411800e-01, -1.8944865962132065e-01, -0.0000000000000000e+00, 1.8944865962132065e-01, 3.7892205270411800e-01, 5.6844495472091694e-01, 7.5804222537475319e-01, 9.4773885123753698e-01, 1.1375599890219745e+00, 1.3275310097080575e+00, 1.5176775437537562e+00, 1.7080255276306118e+00, 1.8986012519385917e+00, 2.0894314113711125e+00, 2.2805431568207979e+00, 2.4719641499397405e+00, 2.6637226204951387e+00, 2.8558474268927547e+00, 3.0483681202772592e+00, 3.2413150126609418e+00, 3.4347192495813528e+00, 3.6286128878452084e+00, 3.8230289789816339e+00, 4.0180016591039696e+00, 4.2135662459677485e+00, 4.4097593441152032e+00, 4.6066189591163766e+00, 4.8041846220567344e+00, 5.0024975255849276e+00, 5.2016006730266913e+00, 5.4015390422975438e+00, 5.6023597666149243e+00, 5.8041123343284946e+00, 6.0068488105662023e+00, 6.2106240838470441e+00, 6.4154961413560549e+00, 6.6215263772344395e+00, 6.8287799390351633e+00, 7.0373261184664502e+00, 7.2472387937374041e+00, 7.4585969322894226e+00, 7.6714851645199493e+00, 7.8859944413814738e+00, 8.1022227916007061e+00, 8.3202761978883633e+00, 8.5402696161385645e+00, 8.7623281675764328e+00, 8.9865885415552977e+00, 9.2132006568631315e+00, 9.4423296428652392e+00, 9.6741582198685645e+00, 9.9088895826069283e+00, 1.0146750924472403e+01, 1.0387997787201458e+01, 1.0632919487519736e+01, 1.0881845968701494e+01, 1.1135156567035956e+01, 1.1393291396981837e+01, 1.1656766388576511e+01, 1.1926193533696322e+01, 1.2202308753936913e+01, 1.2486011256383852e+01, 1.2778420818075578e+01, 1.3080964234499790e+01, 1.3395511650343654e+01, 1.3724603683931077e+01, 1.4071857451951351e+01, 1.4442764183748782e+01, 1.4846480104844703e+01, 1.5300775595780827e+01, 1.5852170577958367e+01}, + {-1.5913261603942480e+01, -1.5362565713640512e+01, -1.4908860841455180e+01, -1.4505681306755122e+01, -1.4135277255544954e+01, -1.3788502962262141e+01, -1.3459873460233181e+01, -1.3145775742375259e+01, -1.2843672070795714e+01, -1.2551694420292197e+01, -1.2268417614744900e+01, -1.1992723152311497e+01, -1.1723712855448614e+01, -1.1460651651502332e+01, -1.1202928261484638e+01, -1.0950027363642045e+01, -1.0701509369981165e+01, -1.0456995405743042e+01, -1.0216155937006269e+01, -9.9787020140292348e+00, -9.7443784273495506e+00, -9.5129582872037410e+00, -9.2842386787046287e+00, -9.0580371415563476e+00, -8.8341887898063280e+00, -8.6125439341638330e+00, -8.3929661031022729e+00, -8.1753303834489426e+00, -7.9595220192037859e+00, -7.7454352207811352e+00, -7.5329721470151476e+00, -7.3220420300038924e+00, -7.1125604188199452e+00, -6.9044485227388090e+00, -6.6976326382579296e+00, -6.4920436470380141e+00, -6.2876165741721222e+00, -6.0842901980089970e+00, -5.8820067042248958e+00, -5.6807113780286782e+00, -5.4803523293560428e+00, -5.2808802467053466e+00, -5.0822481759241382e+00, -4.8844113207996323e+00, -4.6873268627591953e+00, -4.4909537973654823e+00, -4.2952527856086178e+00, -4.1001860182656378e+00, -3.9057170918238593e+00, -3.7118108946570874e+00, -3.5184335023072513e+00, -3.3255520808638139e+00, -3.1331347975530193e+00, -2.9411507377518000e+00, -2.7495698277296028e+00, -2.5583627624976102e+00, -2.3675009382106698e+00, -2.1769563886241587e+00, -1.9867017251572272e+00, -1.7967100801564784e+00, -1.6069550529909824e+00, -1.4174106586413215e+00, -1.2280512784727862e+00, -1.0388516129063341e+00, -8.4978663572097946e-01, -6.6083154973822611e-01, -4.7196174365329491e-01, -2.8315274978949312e-01, -9.4380202561292328e-02, 9.4380202561292328e-02, 2.8315274978949312e-01, 4.7196174365329491e-01, 6.6083154973822611e-01, 8.4978663572097946e-01, 1.0388516129063341e+00, 1.2280512784727862e+00, 1.4174106586413215e+00, 1.6069550529909824e+00, 1.7967100801564784e+00, 1.9867017251572272e+00, 2.1769563886241587e+00, 2.3675009382106698e+00, 2.5583627624976102e+00, 2.7495698277296028e+00, 2.9411507377518000e+00, 3.1331347975530193e+00, 3.3255520808638139e+00, 3.5184335023072513e+00, 3.7118108946570874e+00, 3.9057170918238593e+00, 4.1001860182656378e+00, 4.2952527856086178e+00, 4.4909537973654823e+00, 4.6873268627591953e+00, 4.8844113207996323e+00, 5.0822481759241382e+00, 5.2808802467053466e+00, 5.4803523293560428e+00, 5.6807113780286782e+00, 5.8820067042248958e+00, 6.0842901980089970e+00, 6.2876165741721222e+00, 6.4920436470380141e+00, 6.6976326382579296e+00, 6.9044485227388090e+00, 7.1125604188199452e+00, 7.3220420300038924e+00, 7.5329721470151476e+00, 7.7454352207811352e+00, 7.9595220192037859e+00, 8.1753303834489426e+00, 8.3929661031022729e+00, 8.6125439341638330e+00, 8.8341887898063280e+00, 9.0580371415563476e+00, 9.2842386787046287e+00, 9.5129582872037410e+00, 9.7443784273495506e+00, 9.9787020140292348e+00, 1.0216155937006269e+01, 1.0456995405743042e+01, 1.0701509369981165e+01, 1.0950027363642045e+01, 1.1202928261484638e+01, 1.1460651651502332e+01, 1.1723712855448614e+01, 1.1992723152311497e+01, 1.2268417614744900e+01, 1.2551694420292197e+01, 1.2843672070795714e+01, 1.3145775742375259e+01, 1.3459873460233181e+01, 1.3788502962262141e+01, 1.4135277255544954e+01, 1.4505681306755122e+01, 1.4908860841455180e+01, 1.5362565713640512e+01, 1.5913261603942480e+01}, + {-1.5974128087803059e+01, -1.5424125215654696e+01, -1.4971005737794270e+01, -1.4568357770032772e+01, -1.4198451817550286e+01, -1.3852152569276431e+01, -1.3523981270171619e+01, -1.3210328989123949e+01, -1.2908660844914866e+01, -1.2617110914147242e+01, -1.2334255626355663e+01, -1.2058977743887230e+01, -1.1790380109646946e+01, -1.1527728492218195e+01, -1.1270412318828130e+01, -1.1017916870112439e+01, -1.0769803079243415e+01, -1.0525692528151630e+01, -1.0285256087783404e+01, -1.0048205171171734e+01, -9.8142848971316887e+00, -9.5832686756941996e+00, -9.3549538681059694e+00, -9.1291582704569425e+00, -8.9057172366410864e+00, -8.6844813033334454e+00, -8.4653142133164536e+00, -8.2480912579473227e+00, -8.0326978775762718e+00, -7.8190284721625867e+00, -7.6069853844711011e+00, -7.3964780259572480e+00, -7.1874221213951852e+00, -6.9797390529218948e+00, -6.7733552877875338e+00, -6.5682018769582182e+00, -6.3642140139886543e+00, -6.1613306454010424e+00, -5.9594941252729363e+00, -5.7586499079259337e+00, -5.5587462735771860e+00, -5.3597340826114390e+00, -5.1615665547873322e+00, -4.9641990702352388e+00, -4.7675889895563337e+00, -4.5716954907107725e+00, -4.3764794207003801e+00, -4.1819031603187895e+00, -3.9879305004682921e+00, -3.7945265287347798e+00, -3.6016575250757095e+00, -3.4092908656157688e+00, -3.2173949336645937e+00, -3.0259390371736554e+00, -2.8348933319378968e+00, -2.6442287499240060e+00, -2.4539169321730845e+00, -2.2639301657825421e+00, -2.0742413245213673e+00, -1.8848238126757204e+00, -1.6956515117588089e+00, -1.5066987297510446e+00, -1.3179401525641421e+00, -1.1293507974466097e+00, -9.4090596806846227e-01, -7.5258121104029896e-01, -5.6435227363645946e-01, -3.7619506250401802e-01, -1.8808560314914807e-01, -0.0000000000000000e+00, 1.8808560314914807e-01, 3.7619506250401802e-01, 5.6435227363645946e-01, 7.5258121104029896e-01, 9.4090596806846227e-01, 1.1293507974466097e+00, 1.3179401525641421e+00, 1.5066987297510446e+00, 1.6956515117588089e+00, 1.8848238126757204e+00, 2.0742413245213673e+00, 2.2639301657825421e+00, 2.4539169321730845e+00, 2.6442287499240060e+00, 2.8348933319378968e+00, 3.0259390371736554e+00, 3.2173949336645937e+00, 3.4092908656157688e+00, 3.6016575250757095e+00, 3.7945265287347798e+00, 3.9879305004682921e+00, 4.1819031603187895e+00, 4.3764794207003801e+00, 4.5716954907107725e+00, 4.7675889895563337e+00, 4.9641990702352388e+00, 5.1615665547873322e+00, 5.3597340826114390e+00, 5.5587462735771860e+00, 5.7586499079259337e+00, 5.9594941252729363e+00, 6.1613306454010424e+00, 6.3642140139886543e+00, 6.5682018769582182e+00, 6.7733552877875338e+00, 6.9797390529218948e+00, 7.1874221213951852e+00, 7.3964780259572480e+00, 7.6069853844711011e+00, 7.8190284721625867e+00, 8.0326978775762718e+00, 8.2480912579473227e+00, 8.4653142133164536e+00, 8.6844813033334454e+00, 8.9057172366410864e+00, 9.1291582704569425e+00, 9.3549538681059694e+00, 9.5832686756941996e+00, 9.8142848971316887e+00, 1.0048205171171734e+01, 1.0285256087783404e+01, 1.0525692528151630e+01, 1.0769803079243415e+01, 1.1017916870112439e+01, 1.1270412318828130e+01, 1.1527728492218195e+01, 1.1790380109646946e+01, 1.2058977743887230e+01, 1.2334255626355663e+01, 1.2617110914147242e+01, 1.2908660844914866e+01, 1.3210328989123949e+01, 1.3523981270171619e+01, 1.3852152569276431e+01, 1.4198451817550286e+01, 1.4568357770032772e+01, 1.4971005737794270e+01, 1.5424125215654696e+01, 1.5974128087803059e+01}, + {-1.6034772477060816e+01, -1.5485456646120181e+01, -1.5032917422687504e+01, -1.4630796281434252e+01, -1.4261383921882931e+01, -1.3915555363288536e+01, -1.3587838011990620e+01, -1.3274626979734647e+01, -1.2973390218594389e+01, -1.2682263889584272e+01, -1.2399826014408026e+01, -1.2124960608784908e+01, -1.1856771527137033e+01, -1.1594525371724179e+01, -1.1337612269445431e+01, -1.1085518097551031e+01, -1.0837804305192460e+01, -1.0594092926257336e+01, -1.0354055232179574e+01, -1.0117402994660386e+01, -9.8838816568977812e+00, -9.6532649249849332e+00, -9.4253504326923299e+00, -9.1999562289744645e+00, -8.9769179041114668e+00, -8.7560862173209468e+00, -8.5373251222877080e+00, -8.3205101114907762e+00, -8.1055268182049698e+00, -7.8922698284767430e+00, -7.6806416654983130e+00, -7.4705519165212015e+00, -7.2619164783901100e+00, -7.0546569023913719e+00, -6.8486998227236997e+00, -6.6439764557516492e+00, -6.4404221594710993e+00, -6.2379760444330401e+00, -6.0365806288366217e+00, -5.8361815316904240e+00, -5.6367271989099255e+00, -5.4381686580140975e+00, -5.2404592977393536e+00, -5.0435546694321367e+00, -4.8474123075333448e+00, -4.6519915668456937e+00, -4.4572534745922692e+00, -4.2631605955419172e+00, -4.0696769087032161e+00, -3.8767676942807565e+00, -3.6843994297509557e+00, -3.4925396940542712e+00, -3.3011570790203533e+00, -3.1102211072454056e+00, -2.9197021557295608e+00, -2.7295713846583514e+00, -2.5398006707783964e+00, -2.3503625448744949e+00, -2.1612301329048256e+00, -1.9723771003938499e+00, -1.7837775997197194e+00, -1.5954062199652144e+00, -1.4072379390291252e+00, -1.2192480777190431e+00, -1.0314122555671925e+00, -8.4370634812858625e-01, -6.5610644553571928e-01, -4.6858881209649367e-01, -2.8112984673230629e-01, -9.3706044061378571e-02, 9.3706044061378571e-02, 2.8112984673230629e-01, 4.6858881209649367e-01, 6.5610644553571928e-01, 8.4370634812858625e-01, 1.0314122555671925e+00, 1.2192480777190431e+00, 1.4072379390291252e+00, 1.5954062199652144e+00, 1.7837775997197194e+00, 1.9723771003938499e+00, 2.1612301329048256e+00, 2.3503625448744949e+00, 2.5398006707783964e+00, 2.7295713846583514e+00, 2.9197021557295608e+00, 3.1102211072454056e+00, 3.3011570790203533e+00, 3.4925396940542712e+00, 3.6843994297509557e+00, 3.8767676942807565e+00, 4.0696769087032161e+00, 4.2631605955419172e+00, 4.4572534745922692e+00, 4.6519915668456937e+00, 4.8474123075333448e+00, 5.0435546694321367e+00, 5.2404592977393536e+00, 5.4381686580140975e+00, 5.6367271989099255e+00, 5.8361815316904240e+00, 6.0365806288366217e+00, 6.2379760444330401e+00, 6.4404221594710993e+00, 6.6439764557516492e+00, 6.8486998227236997e+00, 7.0546569023913719e+00, 7.2619164783901100e+00, 7.4705519165212015e+00, 7.6806416654983130e+00, 7.8922698284767430e+00, 8.1055268182049698e+00, 8.3205101114907762e+00, 8.5373251222877080e+00, 8.7560862173209468e+00, 8.9769179041114668e+00, 9.1999562289744645e+00, 9.4253504326923299e+00, 9.6532649249849332e+00, 9.8838816568977812e+00, 1.0117402994660386e+01, 1.0354055232179574e+01, 1.0594092926257336e+01, 1.0837804305192460e+01, 1.1085518097551031e+01, 1.1337612269445431e+01, 1.1594525371724179e+01, 1.1856771527137033e+01, 1.2124960608784908e+01, 1.2399826014408026e+01, 1.2682263889584272e+01, 1.2973390218594389e+01, 1.3274626979734647e+01, 1.3587838011990620e+01, 1.3915555363288536e+01, 1.4261383921882931e+01, 1.4630796281434252e+01, 1.5032917422687504e+01, 1.5485456646120181e+01, 1.6034772477060816e+01}, + {-1.6095197175178654e+01, -1.5546562503045239e+01, -1.5094598476696428e+01, -1.4692999498678725e+01, -1.4324076300505626e+01, -1.3978714148861499e+01, -1.3651446561974707e+01, -1.3338672661844436e+01, -1.3037863210821454e+01, -1.2747156437216972e+01, -1.2465131941640982e+01, -1.2190674982552025e+01, -1.1922890417116040e+01, -1.1661045673847648e+01, -1.1404531572900545e+01, -1.1152834582481235e+01, -1.0905516662643253e+01, -1.0662200294603309e+01, -1.0422557146004042e+01, -1.0186299343207718e+01, -9.9531726499989794e+00, -9.7229510648991155e+00, -9.4954324906908987e+00, -9.2704352257718199e+00, -9.0477950934492313e+00, -8.8273630721636813e+00, -8.6090033232019607e+00, -8.3925915368661208e+00, -8.1780135360427444e+00, -7.9651640895243876e+00, -7.7539458975483946e+00, -7.5442687197268468e+00, -7.3360486214748111e+00, -7.1292073196522763e+00, -6.9236716117447346e+00, -6.7193728757570428e+00, -6.5162466302615387e+00, -6.3142321458564812e+00, -6.1132721007540169e+00, -5.9133122744035465e+00, -5.7143012740244483e+00, -5.5161902897161745e+00, -5.3189328744684259e+00, -5.1224847459365774e+00, -4.9268036072990391e+00, -4.7318489848907346e+00, -4.5375820806238156e+00, -4.3439656374738016e+00, -4.1509638165353397e+00, -3.9585420843435810e+00, -3.7666671093205584e+00, -3.5753066663455639e+00, -3.3844295485681095e+00, -3.1940054856848250e+00, -3.0040050679901400e+00, -2.8143996755869578e+00, -2.6251614122095890e+00, -2.4362630431683869e+00, -2.2476779369751214e+00, -2.0593800102511661e+00, -1.8713436755579074e+00, -1.6835437918212028e+00, -1.4959556170497912e+00, -1.3085547630718375e+00, -1.1213171520347327e+00, -9.3421897443120272e-01, -7.4723664843005233e-01, -5.6034678030272367e-01, -3.7352612574750421e-01, -1.8675155192184525e-01, -0.0000000000000000e+00, 1.8675155192184525e-01, 3.7352612574750421e-01, 5.6034678030272367e-01, 7.4723664843005233e-01, 9.3421897443120272e-01, 1.1213171520347327e+00, 1.3085547630718375e+00, 1.4959556170497912e+00, 1.6835437918212028e+00, 1.8713436755579074e+00, 2.0593800102511661e+00, 2.2476779369751214e+00, 2.4362630431683869e+00, 2.6251614122095890e+00, 2.8143996755869578e+00, 3.0040050679901400e+00, 3.1940054856848250e+00, 3.3844295485681095e+00, 3.5753066663455639e+00, 3.7666671093205584e+00, 3.9585420843435810e+00, 4.1509638165353397e+00, 4.3439656374738016e+00, 4.5375820806238156e+00, 4.7318489848907346e+00, 4.9268036072990391e+00, 5.1224847459365774e+00, 5.3189328744684259e+00, 5.5161902897161745e+00, 5.7143012740244483e+00, 5.9133122744035465e+00, 6.1132721007540169e+00, 6.3142321458564812e+00, 6.5162466302615387e+00, 6.7193728757570428e+00, 6.9236716117447346e+00, 7.1292073196522763e+00, 7.3360486214748111e+00, 7.5442687197268468e+00, 7.7539458975483946e+00, 7.9651640895243876e+00, 8.1780135360427444e+00, 8.3925915368661208e+00, 8.6090033232019607e+00, 8.8273630721636813e+00, 9.0477950934492313e+00, 9.2704352257718199e+00, 9.4954324906908987e+00, 9.7229510648991155e+00, 9.9531726499989794e+00, 1.0186299343207718e+01, 1.0422557146004042e+01, 1.0662200294603309e+01, 1.0905516662643253e+01, 1.1152834582481235e+01, 1.1404531572900545e+01, 1.1661045673847648e+01, 1.1922890417116040e+01, 1.2190674982552025e+01, 1.2465131941640982e+01, 1.2747156437216972e+01, 1.3037863210821454e+01, 1.3338672661844436e+01, 1.3651446561974707e+01, 1.3978714148861499e+01, 1.4324076300505626e+01, 1.4692999498678725e+01, 1.5094598476696428e+01, 1.5546562503045239e+01, 1.6095197175178654e+01}, + {-1.6155404542662776e+01, -1.5607445239317764e+01, -1.5156051433346871e+01, -1.4754970030637802e+01, -1.4386531634770627e+01, -1.4041631678205716e+01, -1.3714809742317117e+01, -1.3402468927252324e+01, -1.3102082782981558e+01, -1.2811791588270133e+01, -1.2530176509588186e+01, -1.2256124037680147e+01, -1.1988740023836153e+01, -1.1727292715539413e+01, -1.1471173619903071e+01, -1.1219869790544209e+01, -1.0972943693447091e+01, -1.0730018252630529e+01, -1.0490765527764685e+01, -1.0254897995961329e+01, -1.0022161737889968e+01, -9.7923310409601427e+00, -9.5652040735500723e+00, -9.3405993801743854e+00, -9.1183530139059350e+00, -8.8983161691878188e+00, -8.6803532117101110e+00, -8.4643400264036650e+00, -8.2501626225617972e+00, -8.0377159484935703e+00, -7.8269028782146863e+00, -7.6176333403821728e+00, -7.4098235656060059e+00, -7.2033954328740331e+00, -6.9982758994323033e+00, -6.7943965013094090e+00, -6.5916929139374751e+00, -6.3901045641355303e+00, -6.1895742861825749e+00, -5.9900480158931027e+00, -5.7914745175749145e+00, -5.5938051395422690e+00, -5.3969935945114758e+00, -5.2009957617479348e+00, -5.0057695082847111e+00, -4.8112745269098740e+00, -4.6174721889364516e+00, -4.4243254100357463e+00, -4.2317985276405068e+00, -4.0398571886161809e+00, -3.8484682460617101e+00, -3.6575996642408795e+00, -3.4672204307647947e+00, -3.2773004752487735e+00, -3.0878105937554641e+00, -2.8987223784123866e+00, -2.7100081516581653e+00, -2.5216409046290269e+00, -2.3335942392467470e+00, -2.1458423136124405e+00, -1.9583597903480086e+00, -1.7711217875596625e+00, -1.5841038321261636e+00, -1.3972818150389417e+00, -1.2106319485423875e+00, -1.0241307248408380e+00, -8.3775487615432709e-01, -6.5148133591837221e-01, -4.6528720093408932e-01, -2.7914969428399067e-01, -9.3046128836030015e-02, 9.3046128836030015e-02, 2.7914969428399067e-01, 4.6528720093408932e-01, 6.5148133591837221e-01, 8.3775487615432709e-01, 1.0241307248408380e+00, 1.2106319485423875e+00, 1.3972818150389417e+00, 1.5841038321261636e+00, 1.7711217875596625e+00, 1.9583597903480086e+00, 2.1458423136124405e+00, 2.3335942392467470e+00, 2.5216409046290269e+00, 2.7100081516581653e+00, 2.8987223784123866e+00, 3.0878105937554641e+00, 3.2773004752487735e+00, 3.4672204307647947e+00, 3.6575996642408795e+00, 3.8484682460617101e+00, 4.0398571886161809e+00, 4.2317985276405068e+00, 4.4243254100357463e+00, 4.6174721889364516e+00, 4.8112745269098740e+00, 5.0057695082847111e+00, 5.2009957617479348e+00, 5.3969935945114758e+00, 5.5938051395422690e+00, 5.7914745175749145e+00, 5.9900480158931027e+00, 6.1895742861825749e+00, 6.3901045641355303e+00, 6.5916929139374751e+00, 6.7943965013094090e+00, 6.9982758994323033e+00, 7.2033954328740331e+00, 7.4098235656060059e+00, 7.6176333403821728e+00, 7.8269028782146863e+00, 8.0377159484935703e+00, 8.2501626225617972e+00, 8.4643400264036650e+00, 8.6803532117101110e+00, 8.8983161691878188e+00, 9.1183530139059350e+00, 9.3405993801743854e+00, 9.5652040735500723e+00, 9.7923310409601427e+00, 1.0022161737889968e+01, 1.0254897995961329e+01, 1.0490765527764685e+01, 1.0730018252630529e+01, 1.0972943693447091e+01, 1.1219869790544209e+01, 1.1471173619903071e+01, 1.1727292715539413e+01, 1.1988740023836153e+01, 1.2256124037680147e+01, 1.2530176509588186e+01, 1.2811791588270133e+01, 1.3102082782981558e+01, 1.3402468927252324e+01, 1.3714809742317117e+01, 1.4041631678205716e+01, 1.4386531634770627e+01, 1.4754970030637802e+01, 1.5156051433346871e+01, 1.5607445239317764e+01, 1.6155404542662776e+01}, + {-1.6215396898128663e+01, -1.5668107263835802e+01, -1.5217278780317361e+01, -1.4816710438579342e+01, -1.4448752556717958e+01, -1.4104310652531494e+01, -1.3777930322527094e+01, -1.3466018613382040e+01, -1.3166051840377859e+01, -1.2876172316156813e+01, -1.2594962760214381e+01, -1.2321310885302282e+01, -1.2054323528364668e+01, -1.1793269748698537e+01, -1.1537541734199792e+01, -1.1286627118459547e+01, -1.1040088868523824e+01, -1.0797550346784112e+01, -1.0558684000851162e+01, -1.0323202654766870e+01, -1.0090852702474850e+01, -9.8614087168236804e+00, -9.6346691284611641e+00, -9.4104527247850562e+00, -9.1885957854652229e+00, -8.9689497178220225e+00, -8.7513790888474610e+00, -8.5357599750069966e+00, -8.3219785688987216e+00, -8.1099299952250909e+00, -7.8995172986236630e+00, -7.6906505735952022e+00, -7.4832462126880364e+00, -7.2772262536958907e+00, -7.0725178102281943e+00, -6.8690525728554430e+00, -6.6667663702937121e+00, -6.4655987819036467e+00, -6.2654927942392939e+00, -6.0663944955663265e+00, -5.8682528032352996e+00, -5.6710192195879507e+00, -5.4746476127280044e+00, -5.2790940190292117e+00, -5.0843164647041039e+00, -4.8902748041336244e+00, -4.6969305729742112e+00, -4.5042468543255083e+00, -4.3121881564674842e+00, -4.1207203008672968e+00, -3.9298103193193850e+00, -3.7394263592217327e+00, -3.5495375961107607e+00, -3.3601141526799938e+00, -3.1711270235961577e+00, -2.9825480055027773e+00, -2.7943496316674543e+00, -2.6065051107863355e+00, -2.4189882695090272e+00, -2.2317734982904458e+00, -2.0448357002136932e+00, -1.8581502424607397e+00, -1.6716929101361060e+00, -1.4854398621734262e+00, -1.2993675890761167e+00, -1.1134528722618340e+00, -9.2767274479622841e-01, -7.4200445331498865e-01, -5.5642542094454162e-01, -3.7091321104119962e-01, -1.8544549157619233e-01, -0.0000000000000000e+00, 1.8544549157619233e-01, 3.7091321104119962e-01, 5.5642542094454162e-01, 7.4200445331498865e-01, 9.2767274479622841e-01, 1.1134528722618340e+00, 1.2993675890761167e+00, 1.4854398621734262e+00, 1.6716929101361060e+00, 1.8581502424607397e+00, 2.0448357002136932e+00, 2.2317734982904458e+00, 2.4189882695090272e+00, 2.6065051107863355e+00, 2.7943496316674543e+00, 2.9825480055027773e+00, 3.1711270235961577e+00, 3.3601141526799938e+00, 3.5495375961107607e+00, 3.7394263592217327e+00, 3.9298103193193850e+00, 4.1207203008672968e+00, 4.3121881564674842e+00, 4.5042468543255083e+00, 4.6969305729742112e+00, 4.8902748041336244e+00, 5.0843164647041039e+00, 5.2790940190292117e+00, 5.4746476127280044e+00, 5.6710192195879507e+00, 5.8682528032352996e+00, 6.0663944955663265e+00, 6.2654927942392939e+00, 6.4655987819036467e+00, 6.6667663702937121e+00, 6.8690525728554430e+00, 7.0725178102281943e+00, 7.2772262536958907e+00, 7.4832462126880364e+00, 7.6906505735952022e+00, 7.8995172986236630e+00, 8.1099299952250909e+00, 8.3219785688987216e+00, 8.5357599750069966e+00, 8.7513790888474610e+00, 8.9689497178220225e+00, 9.1885957854652229e+00, 9.4104527247850562e+00, 9.6346691284611641e+00, 9.8614087168236804e+00, 1.0090852702474850e+01, 1.0323202654766870e+01, 1.0558684000851162e+01, 1.0797550346784112e+01, 1.1040088868523824e+01, 1.1286627118459547e+01, 1.1537541734199792e+01, 1.1793269748698537e+01, 1.2054323528364668e+01, 1.2321310885302282e+01, 1.2594962760214381e+01, 1.2876172316156813e+01, 1.3166051840377859e+01, 1.3466018613382040e+01, 1.3777930322527094e+01, 1.4104310652531494e+01, 1.4448752556717958e+01, 1.4816710438579342e+01, 1.5217278780317361e+01, 1.5668107263835802e+01, 1.6215396898128663e+01}, + {-1.6275176519333314e+01, -1.5728550942602004e+01, -1.5278282960589307e+01, -1.4878223237370916e+01, -1.4510741650331246e+01, -1.4166753723357122e+01, -1.3840811020790602e+01, -1.3529324504696065e+01, -1.3229773233699543e+01, -1.2940301538002238e+01, -1.2659493677496206e+01, -1.2386238576832154e+01, -1.2119644050283457e+01, -1.1858979961933818e+01, -1.1603639174400232e+01, -1.1353109895917042e+01, -1.1106955589822139e+01, -1.0864800052544570e+01, -1.0626316115639773e+01, -1.0391216946349248e+01, -1.0159249248367633e+01, -9.9301878766205718e+00, -9.7038315207870216e+00, -9.4799992080020576e+00, -9.2585274414535341e+00, -9.0392678383049354e+00, -8.8220851638421109e+00, -8.6068556830666605e+00, -8.3934657688780785e+00, -8.1818107193508762e+00, -7.9717937466932813e+00, -7.7633251081577797e+00, -7.5563213550879356e+00, -7.3507046808793666e+00, -7.1464023522306821e+00, -6.9433462109007360e+00, -6.7414722354477217e+00, -6.5407201542348838e+00, -6.3410331024462083e+00, -6.1423573170383490e+00, -5.9446418645201566e+00, -5.7478383972427567e+00, -5.5519009345358734e+00, -5.3567856655668686e+00, -5.1624507712492340e+00, -4.9688562629036443e+00, -4.7759638356907441e+00, -4.5837367351012581e+00, -4.3921396350144111e+00, -4.2011385260270222e+00, -4.0107006129187415e+00, -3.8207942202581950e+00, -3.6313887052743148e+00, -3.4424543772197556e+00, -3.2539624225418282e+00, -3.0658848352527657e+00, -2.8781943519573039e+00, -2.6908643910529264e+00, -2.5038689956679221e+00, -2.3171827799457509e+00, -2.1307808783218642e+00, -1.9446388974719979e+00, -1.7587328706394580e+00, -1.5730392140737939e+00, -1.3875346853347825e+00, -1.2021963432343084e+00, -1.0170015092047828e+00, -8.3192772989651476e-01, -6.4695274081809961e-01, -4.6205443084366904e-01, -2.7721080741886533e-01, -9.2399962303806829e-02, 9.2399962303806829e-02, 2.7721080741886533e-01, 4.6205443084366904e-01, 6.4695274081809961e-01, 8.3192772989651476e-01, 1.0170015092047828e+00, 1.2021963432343084e+00, 1.3875346853347825e+00, 1.5730392140737939e+00, 1.7587328706394580e+00, 1.9446388974719979e+00, 2.1307808783218642e+00, 2.3171827799457509e+00, 2.5038689956679221e+00, 2.6908643910529264e+00, 2.8781943519573039e+00, 3.0658848352527657e+00, 3.2539624225418282e+00, 3.4424543772197556e+00, 3.6313887052743148e+00, 3.8207942202581950e+00, 4.0107006129187415e+00, 4.2011385260270222e+00, 4.3921396350144111e+00, 4.5837367351012581e+00, 4.7759638356907441e+00, 4.9688562629036443e+00, 5.1624507712492340e+00, 5.3567856655668686e+00, 5.5519009345358734e+00, 5.7478383972427567e+00, 5.9446418645201566e+00, 6.1423573170383490e+00, 6.3410331024462083e+00, 6.5407201542348838e+00, 6.7414722354477217e+00, 6.9433462109007360e+00, 7.1464023522306821e+00, 7.3507046808793666e+00, 7.5563213550879356e+00, 7.7633251081577797e+00, 7.9717937466932813e+00, 8.1818107193508762e+00, 8.3934657688780785e+00, 8.6068556830666605e+00, 8.8220851638421109e+00, 9.0392678383049354e+00, 9.2585274414535341e+00, 9.4799992080020576e+00, 9.7038315207870216e+00, 9.9301878766205718e+00, 1.0159249248367633e+01, 1.0391216946349248e+01, 1.0626316115639773e+01, 1.0864800052544570e+01, 1.1106955589822139e+01, 1.1353109895917042e+01, 1.1603639174400232e+01, 1.1858979961933818e+01, 1.2119644050283457e+01, 1.2386238576832154e+01, 1.2659493677496206e+01, 1.2940301538002238e+01, 1.3229773233699543e+01, 1.3529324504696065e+01, 1.3840811020790602e+01, 1.4166753723357122e+01, 1.4510741650331246e+01, 1.4878223237370916e+01, 1.5278282960589307e+01, 1.5728550942602004e+01, 1.6275176519333314e+01}, + {-1.6334745644175054e+01, -1.5788778599783345e+01, -1.5339066373560392e+01, -1.4939510896644496e+01, -1.4572501452752805e+01, -1.4228963493774261e+01, -1.3903454505286284e+01, -1.3592389334062910e+01, -1.3293249760441302e+01, -1.3004182116116599e+01, -1.2723772188949674e+01, -1.2450910105547806e+01, -1.2184704649330305e+01, -1.1924426482264677e+01, -1.1669469135738963e+01, -1.1419321387402469e+01, -1.1173547192211004e+01, -1.0931770776387262e+01, -1.0693665351523348e+01, -1.0458944424415622e+01, -1.0227355005071125e+01, -9.9986722272145627e+00, -9.7726950364017036e+00, -9.5492426964438799e+00, -9.3281519310537870e+00, -9.1092745642913098e+00, -8.8924755568174323e+00, -8.6776313592631116e+00, -8.4646285219201989e+00, -8.2533625133112452e+00, -8.0437367102645929e+00, -7.8356615297967140e+00, -7.6290536790115411e+00, -7.4238355038151660e+00, -7.2199344208383769e+00, -7.0172824197970574e+00, -6.8158156257772653e+00, -6.6154739127391755e+00, -6.4162005609911104e+00, -6.2179419525665987e+00, -6.0206472994014675e+00, -5.8242683999987914e+00, -5.6287594209215639e+00, -5.4340766999932706e+00, -5.2401785685362707e+00, -5.0470251903539998e+00, -4.8545784154787137e+00, -4.6628016469727172e+00, -4.4716597192961824e+00, -4.2811187869459504e+00, -4.0911462222326103e+00, -3.9017105212024688e+00, -3.7127812168303533e+00, -3.5243287987118928e+00, -3.3363246385723251e+00, -3.1487409209853521e+00, -2.9615505787617051e+00, -2.7747272325244832e+00, -2.5882451340382215e+00, -2.4020791129020052e+00, -2.2162045262547485e+00, -2.0305972111736730e+00, -1.8452334394757037e+00, -1.6600898746564745e+00, -1.4751435307233549e+00, -1.2903717326977489e+00, -1.1057520785781945e+00, -9.2126240256979075e-01, -7.3688073939740739e-01, -5.5258528953022001e-01, -3.6835438515345242e-01, -1.8416645673001250e-01, -0.0000000000000000e+00, 1.8416645673001250e-01, 3.6835438515345242e-01, 5.5258528953022001e-01, 7.3688073939740739e-01, 9.2126240256979075e-01, 1.1057520785781945e+00, 1.2903717326977489e+00, 1.4751435307233549e+00, 1.6600898746564745e+00, 1.8452334394757037e+00, 2.0305972111736730e+00, 2.2162045262547485e+00, 2.4020791129020052e+00, 2.5882451340382215e+00, 2.7747272325244832e+00, 2.9615505787617051e+00, 3.1487409209853521e+00, 3.3363246385723251e+00, 3.5243287987118928e+00, 3.7127812168303533e+00, 3.9017105212024688e+00, 4.0911462222326103e+00, 4.2811187869459504e+00, 4.4716597192961824e+00, 4.6628016469727172e+00, 4.8545784154787137e+00, 5.0470251903539998e+00, 5.2401785685362707e+00, 5.4340766999932706e+00, 5.6287594209215639e+00, 5.8242683999987914e+00, 6.0206472994014675e+00, 6.2179419525665987e+00, 6.4162005609911104e+00, 6.6154739127391755e+00, 6.8158156257772653e+00, 7.0172824197970574e+00, 7.2199344208383769e+00, 7.4238355038151660e+00, 7.6290536790115411e+00, 7.8356615297967140e+00, 8.0437367102645929e+00, 8.2533625133112452e+00, 8.4646285219201989e+00, 8.6776313592631116e+00, 8.8924755568174323e+00, 9.1092745642913098e+00, 9.3281519310537870e+00, 9.5492426964438799e+00, 9.7726950364017036e+00, 9.9986722272145627e+00, 1.0227355005071125e+01, 1.0458944424415622e+01, 1.0693665351523348e+01, 1.0931770776387262e+01, 1.1173547192211004e+01, 1.1419321387402469e+01, 1.1669469135738963e+01, 1.1924426482264677e+01, 1.2184704649330305e+01, 1.2450910105547806e+01, 1.2723772188949674e+01, 1.3004182116116599e+01, 1.3293249760441302e+01, 1.3592389334062910e+01, 1.3903454505286284e+01, 1.4228963493774261e+01, 1.4572501452752805e+01, 1.4939510896644496e+01, 1.5339066373560392e+01, 1.5788778599783345e+01, 1.6334745644175054e+01}, + {-1.6394106471662131e+01, -1.5848792518737463e+01, -1.5399631376122628e+01, -1.5000575841923927e+01, -1.4634034455459645e+01, -1.4290942519672281e+01, -1.3965863395458523e+01, -1.3655215784079445e+01, -1.3356484166275854e+01, -1.3067816859418864e+01, -1.2787801167106462e+01, -1.2515328408121787e+01, -1.2249508326984577e+01, -1.1989612376763933e+01, -1.1735034751777302e+01, -1.1485264793960161e+01, -1.1239866945305195e+01, -1.0998465857673063e+01, -1.0760735118869476e+01, -1.0526388571683613e+01, -1.0295173529077774e+01, -1.0066865400378582e+01, -9.8412633839451633e+00, -9.6181869772854895e+00, -9.3974731217264083e+00, -9.1789738453613765e+00, -8.9625543013934692e+00, -8.7480911232640413e+00, -8.5354710358385653e+00, -8.3245896752567052e+00, -8.1153505801125796e+00, -7.9076643242985316e+00, -7.7014477677473518e+00, -7.4966234058915848e+00, -7.2931188022489746e+00, -7.0908660913775821e+00, -6.8898015416986063e+00, -6.6898651694904157e+00, -6.4910003968128018e+00, -6.2931537473009849e+00, -6.0962745747319902e+00, -5.9003148200559909e+00, -5.7052287932366612e+00, -5.5109729767842897e+00, -5.3175058483147666e+00, -5.1247877198432272e+00, -4.9327805918366323e+00, -4.7414480203154818e+00, -4.5507549955198634e+00, -4.3606678308461584e+00, -4.1711540609235342e+00, -3.9821823478385219e+00, -3.7937223946353149e+00, -3.6057448653220203e+00, -3.4182213107015205e+00, -3.2311240994220300e+00, -3.0444263537086158e+00, -2.8581018892943821e+00, -2.6721251591199171e+00, -2.4864712004130762e+00, -2.3011155847990197e+00, -2.1160343711234311e+00, -1.9312040607006651e+00, -1.7466015547236411e+00, -1.5622041135941906e+00, -1.3779893179515634e+00, -1.1939350311932622e+00, -1.0100193632965979e+00, -8.2622063576151961e-01, -6.4251734750563116e-01, -4.5888814155095509e-01, -2.7531177234913912e-01, -9.1767073597484664e-02, 9.1767073597484664e-02, 2.7531177234913912e-01, 4.5888814155095509e-01, 6.4251734750563116e-01, 8.2622063576151961e-01, 1.0100193632965979e+00, 1.1939350311932622e+00, 1.3779893179515634e+00, 1.5622041135941906e+00, 1.7466015547236411e+00, 1.9312040607006651e+00, 2.1160343711234311e+00, 2.3011155847990197e+00, 2.4864712004130762e+00, 2.6721251591199171e+00, 2.8581018892943821e+00, 3.0444263537086158e+00, 3.2311240994220300e+00, 3.4182213107015205e+00, 3.6057448653220203e+00, 3.7937223946353149e+00, 3.9821823478385219e+00, 4.1711540609235342e+00, 4.3606678308461584e+00, 4.5507549955198634e+00, 4.7414480203154818e+00, 4.9327805918366323e+00, 5.1247877198432272e+00, 5.3175058483147666e+00, 5.5109729767842897e+00, 5.7052287932366612e+00, 5.9003148200559909e+00, 6.0962745747319902e+00, 6.2931537473009849e+00, 6.4910003968128018e+00, 6.6898651694904157e+00, 6.8898015416986063e+00, 7.0908660913775821e+00, 7.2931188022489746e+00, 7.4966234058915848e+00, 7.7014477677473518e+00, 7.9076643242985316e+00, 8.1153505801125796e+00, 8.3245896752567052e+00, 8.5354710358385653e+00, 8.7480911232640413e+00, 8.9625543013934692e+00, 9.1789738453613765e+00, 9.3974731217264083e+00, 9.6181869772854895e+00, 9.8412633839451633e+00, 1.0066865400378582e+01, 1.0295173529077774e+01, 1.0526388571683613e+01, 1.0760735118869476e+01, 1.0998465857673063e+01, 1.1239866945305195e+01, 1.1485264793960161e+01, 1.1735034751777302e+01, 1.1989612376763933e+01, 1.2249508326984577e+01, 1.2515328408121787e+01, 1.2787801167106462e+01, 1.3067816859418864e+01, 1.3356484166275854e+01, 1.3655215784079445e+01, 1.3965863395458523e+01, 1.4290942519672281e+01, 1.4634034455459645e+01, 1.5000575841923927e+01, 1.5399631376122628e+01, 1.5848792518737463e+01, 1.6394106471662131e+01}, + {-1.6453261162851238e+01, -1.5908594943006905e+01, -1.5459980283706434e+01, -1.5061420455716602e+01, -1.4695343105401838e+01, -1.4352693310923225e+01, -1.4028040263249208e+01, -1.3717806488350135e+01, -1.3419479146381439e+01, -1.3131208524813520e+01, -1.2851583430941135e+01, -1.2579496366100177e+01, -1.2314058027999442e+01, -1.2054540654144855e+01, -1.1800339096046955e+01, -1.1550943254895040e+01, -1.1305918055227691e+01, -1.1064888570473238e+01, -1.0827528760910019e+01, -1.0593552801837916e+01, -1.0362708305895856e+01, -1.0134770954893138e+01, -9.9095401969966606e+00, -9.6868357605097319e+00, -9.4664948015418702e+00, -9.2483695494377951e+00, -9.0323253471918115e+00, -8.8182390083208837e+00, -8.6059974295318415e+00, -8.3954964118396482e+00, -8.1866396528418637e+00, -7.9793378805138149e+00, -7.7735081047843622e+00, -7.5690729677310218e+00, -7.3659601768199030e+00, -7.1641020084474496e+00, -6.9634348712930993e+00, -6.7638989207955804e+00, -6.5654377175196297e+00, -6.3679979233591490e+00, -6.1715290304849564e+00, -5.9759831187344989e+00, -5.7813146377916294e+00, -5.5874802110437694e+00, -5.3944384584526848e+00, -5.2021498361504346e+00, -5.0105764907872219e+00, -4.8196821269235706e+00, -4.6294318859840997e+00, -4.4397922354810584e+00, -4.2507308673785191e+00, -4.0622166046072063e+00, -3.8742193148591682e+00, -3.6867098308940789e+00, -3.4996598766773372e+00, -3.3130419987465562e+00, -3.1268295022692216e+00, -2.9409963913117498e+00, -2.7555173128901118e+00, -2.5703675044157226e+00, -2.3855227441881959e+00, -2.2009593046196674e+00, -2.0166539079042916e+00, -1.8325836838717240e+00, -1.6487261297853975e+00, -1.4650590718655645e+00, -1.2815606283337120e+00, -1.0982091737893589e+00, -9.1498330474263811e-01, -7.3186180613665275e-01, -5.4882361870252716e-01, -3.6584780699750397e-01, -1.8291352798246857e-01, -0.0000000000000000e+00, 1.8291352798246857e-01, 3.6584780699750397e-01, 5.4882361870252716e-01, 7.3186180613665275e-01, 9.1498330474263811e-01, 1.0982091737893589e+00, 1.2815606283337120e+00, 1.4650590718655645e+00, 1.6487261297853975e+00, 1.8325836838717240e+00, 2.0166539079042916e+00, 2.2009593046196674e+00, 2.3855227441881959e+00, 2.5703675044157226e+00, 2.7555173128901118e+00, 2.9409963913117498e+00, 3.1268295022692216e+00, 3.3130419987465562e+00, 3.4996598766773372e+00, 3.6867098308940789e+00, 3.8742193148591682e+00, 4.0622166046072063e+00, 4.2507308673785191e+00, 4.4397922354810584e+00, 4.6294318859840997e+00, 4.8196821269235706e+00, 5.0105764907872219e+00, 5.2021498361504346e+00, 5.3944384584526848e+00, 5.5874802110437694e+00, 5.7813146377916294e+00, 5.9759831187344989e+00, 6.1715290304849564e+00, 6.3679979233591490e+00, 6.5654377175196297e+00, 6.7638989207955804e+00, 6.9634348712930993e+00, 7.1641020084474496e+00, 7.3659601768199030e+00, 7.5690729677310218e+00, 7.7735081047843622e+00, 7.9793378805138149e+00, 8.1866396528418637e+00, 8.3954964118396482e+00, 8.6059974295318415e+00, 8.8182390083208837e+00, 9.0323253471918115e+00, 9.2483695494377951e+00, 9.4664948015418702e+00, 9.6868357605097319e+00, 9.9095401969966606e+00, 1.0134770954893138e+01, 1.0362708305895856e+01, 1.0593552801837916e+01, 1.0827528760910019e+01, 1.1064888570473238e+01, 1.1305918055227691e+01, 1.1550943254895040e+01, 1.1800339096046955e+01, 1.2054540654144855e+01, 1.2314058027999442e+01, 1.2579496366100177e+01, 1.2851583430941135e+01, 1.3131208524813520e+01, 1.3419479146381439e+01, 1.3717806488350135e+01, 1.4028040263249208e+01, 1.4352693310923225e+01, 1.4695343105401838e+01, 1.5061420455716602e+01, 1.5459980283706434e+01, 1.5908594943006905e+01, 1.6453261162851238e+01}, + {-1.6512211841757207e+01, -1.5968188077282482e+01, -1.5520115371292015e+01, -1.5122047078570679e+01, -1.4756429806104775e+01, -1.4414218332528858e+01, -1.4089987634289908e+01, -1.3780164032724846e+01, -1.3482237346726031e+01, -1.3194359818522189e+01, -1.2915121747251227e+01, -1.2643416807332409e+01, -1.2378356641882881e+01, -1.2119214266294843e+01, -1.1865385183637930e+01, -1.1616359849416588e+01, -1.1371703666311570e+01, -1.1131042125331231e+01, -1.0894049555564962e+01, -1.0660440461418377e+01, -1.0429962752004187e+01, -1.0202392378570206e+01, -9.9775290361704698e+00, -9.7551926810776539e+00, -9.5352206814286085e+00, -9.3174654651143189e+00, -9.1017925622483045e+00, -8.8880789637690558e+00, -8.6762117355753663e+00, -8.4660868409009815e+00, -8.2576081336726084e+00, -8.0506864932467330e+00, -7.8452390768098423e+00, -7.6411886703008571e+00, -7.4384631222974562e+00, -7.2369948481366277e+00, -7.0367203937895502e+00, -6.8375800508126918e+00, -6.6395175151495431e+00, -6.4424795837354694e+00, -6.2464158838193349e+00, -6.0512786307039113e+00, -5.8570224102572590e+00, -5.6636039830858733e+00, -5.4709821077089327e+00, -5.2791173804478735e+00, -5.0879720900604992e+00, -4.8975100854141935e+00, -4.7076966547174930e+00, -4.5184984150200371e+00, -4.3298832108534802e+00, -4.1418200210249481e+00, -3.9542788726937901e+00, -3.7672307619648824e+00, -3.5806475803201039e+00, -3.3945020462860152e+00, -3.2087676418019360e+00, -3.0234185528101056e+00, -2.8384296136395832e+00, -2.6537762547990797e+00, -2.4694344538319206e+00, -2.2853806889194859e+00, -2.1015918949484664e+00, -1.9180454217825835e+00, -1.7347189945015418e+00, -1.5515906753892701e+00, -1.3686388274702970e+00, -1.1858420794076876e+00, -1.0031792915885485e+00, -8.2062952323391314e-01, -6.3817200037899757e-01, -4.5578608457752007e-01, -2.7345124219010619e-01, -9.1147014121871414e-02, 9.1147014121871414e-02, 2.7345124219010619e-01, 4.5578608457752007e-01, 6.3817200037899757e-01, 8.2062952323391314e-01, 1.0031792915885485e+00, 1.1858420794076876e+00, 1.3686388274702970e+00, 1.5515906753892701e+00, 1.7347189945015418e+00, 1.9180454217825835e+00, 2.1015918949484664e+00, 2.2853806889194859e+00, 2.4694344538319206e+00, 2.6537762547990797e+00, 2.8384296136395832e+00, 3.0234185528101056e+00, 3.2087676418019360e+00, 3.3945020462860152e+00, 3.5806475803201039e+00, 3.7672307619648824e+00, 3.9542788726937901e+00, 4.1418200210249481e+00, 4.3298832108534802e+00, 4.5184984150200371e+00, 4.7076966547174930e+00, 4.8975100854141935e+00, 5.0879720900604992e+00, 5.2791173804478735e+00, 5.4709821077089327e+00, 5.6636039830858733e+00, 5.8570224102572590e+00, 6.0512786307039113e+00, 6.2464158838193349e+00, 6.4424795837354694e+00, 6.6395175151495431e+00, 6.8375800508126918e+00, 7.0367203937895502e+00, 7.2369948481366277e+00, 7.4384631222974562e+00, 7.6411886703008571e+00, 7.8452390768098423e+00, 8.0506864932467330e+00, 8.2576081336726084e+00, 8.4660868409009815e+00, 8.6762117355753663e+00, 8.8880789637690558e+00, 9.1017925622483045e+00, 9.3174654651143189e+00, 9.5352206814286085e+00, 9.7551926810776539e+00, 9.9775290361704698e+00, 1.0202392378570206e+01, 1.0429962752004187e+01, 1.0660440461418377e+01, 1.0894049555564962e+01, 1.1131042125331231e+01, 1.1371703666311570e+01, 1.1616359849416588e+01, 1.1865385183637930e+01, 1.2119214266294843e+01, 1.2378356641882881e+01, 1.2643416807332409e+01, 1.2915121747251227e+01, 1.3194359818522189e+01, 1.3482237346726031e+01, 1.3780164032724846e+01, 1.4089987634289908e+01, 1.4414218332528858e+01, 1.4756429806104775e+01, 1.5122047078570679e+01, 1.5520115371292015e+01, 1.5968188077282482e+01, 1.6512211841757207e+01}, + {-1.6570960596234816e+01, -1.6027574088336884e+01, -1.5580038874389315e+01, -1.5182458010099252e+01, -1.4817296918736639e+01, -1.4475520005731354e+01, -1.4151707989055947e+01, -1.3842290956496822e+01, -1.3544761365310002e+01, -1.3257273397371849e+01, -1.2978418831992128e+01, -1.2707092507353986e+01, -1.2442407004329516e+01, -1.2183636109757821e+01, -1.1930175972733117e+01, -1.1681517598227204e+01, -1.1437226862743914e+01, -1.1196929670964046e+01, -1.0960300717203255e+01, -1.0727054831642461e+01, -1.0496940216738453e+01, -1.0269733090205794e+01, -1.0045233391137220e+01, -9.8232613010212955e+00, -9.6036543973400637e+00, -9.3862653039000534e+00, -9.1709597353376804e+00, -8.9576148574362815e+00, -8.7461179027166409e+00, -8.5363649940564841e+00, -8.3282601391216602e+00, -8.1217143660350786e+00, -7.9166449765926146e+00, -7.7129748979045933e+00, -7.5106321169206671e+00, -7.3095491851216430e+00, -7.1096627829092727e+00, -6.9109133350249667e+00, -6.7132446697794288e+00, -6.5166037160520816e+00, -6.3209402329793747e+00, -6.1262065680386195e+00, -5.9323574398835213e+00, -5.7393497428256754e+00, -5.5471423703043676e+00, -5.3556960550616148e+00, -5.1649732240539885e+00, -4.9749378663979575e+00, -4.7855554128699298e+00, -4.5967926256728040e+00, -4.4086174973432648e+00, -4.2209991578129564e+00, -4.0339077887557737e+00, -3.8473145444559340e+00, -3.6611914785198536e+00, -3.4755114758312047e+00, -3.2902481892147279e+00, -3.1053759803318370e+00, -2.9208698643810656e+00, -2.7367054582199937e+00, -2.5528589315633066e+00, -2.3693069609448871e+00, -2.1860266861608908e+00, -2.0029956689361597e+00, -1.8201918535785517e+00, -1.6375935294051731e+00, -1.4551792947414237e+00, -1.2729280223085093e+00, -1.0908188258278129e+00, -9.0883102768153856e-01, -7.2694412747841530e-01, -5.4513777138121788e-01, -3.6339172205944553e-01, -1.8168582913588810e-01, -0.0000000000000000e+00, 1.8168582913588810e-01, 3.6339172205944553e-01, 5.4513777138121788e-01, 7.2694412747841530e-01, 9.0883102768153856e-01, 1.0908188258278129e+00, 1.2729280223085093e+00, 1.4551792947414237e+00, 1.6375935294051731e+00, 1.8201918535785517e+00, 2.0029956689361597e+00, 2.1860266861608908e+00, 2.3693069609448871e+00, 2.5528589315633066e+00, 2.7367054582199937e+00, 2.9208698643810656e+00, 3.1053759803318370e+00, 3.2902481892147279e+00, 3.4755114758312047e+00, 3.6611914785198536e+00, 3.8473145444559340e+00, 4.0339077887557737e+00, 4.2209991578129564e+00, 4.4086174973432648e+00, 4.5967926256728040e+00, 4.7855554128699298e+00, 4.9749378663979575e+00, 5.1649732240539885e+00, 5.3556960550616148e+00, 5.5471423703043676e+00, 5.7393497428256754e+00, 5.9323574398835213e+00, 6.1262065680386195e+00, 6.3209402329793747e+00, 6.5166037160520816e+00, 6.7132446697794288e+00, 6.9109133350249667e+00, 7.1096627829092727e+00, 7.3095491851216430e+00, 7.5106321169206671e+00, 7.7129748979045933e+00, 7.9166449765926146e+00, 8.1217143660350786e+00, 8.3282601391216602e+00, 8.5363649940564841e+00, 8.7461179027166409e+00, 8.9576148574362815e+00, 9.1709597353376804e+00, 9.3862653039000534e+00, 9.6036543973400637e+00, 9.8232613010212955e+00, 1.0045233391137220e+01, 1.0269733090205794e+01, 1.0496940216738453e+01, 1.0727054831642461e+01, 1.0960300717203255e+01, 1.1196929670964046e+01, 1.1437226862743914e+01, 1.1681517598227204e+01, 1.1930175972733117e+01, 1.2183636109757821e+01, 1.2442407004329516e+01, 1.2707092507353986e+01, 1.2978418831992128e+01, 1.3257273397371849e+01, 1.3544761365310002e+01, 1.3842290956496822e+01, 1.4151707989055947e+01, 1.4475520005731354e+01, 1.4817296918736639e+01, 1.5182458010099252e+01, 1.5580038874389315e+01, 1.6027574088336884e+01, 1.6570960596234816e+01}, + {-1.6629509478833860e+01, -1.6086755105929722e+01, -1.5639752989987683e+01, -1.5242655509972604e+01, -1.4877946763142496e+01, -1.4536600709088953e+01, -1.4213203763983850e+01, -1.3904189753562465e+01, -1.3607053753368865e+01, -1.3319951870041416e+01, -1.3041477351568494e+01, -1.2770526190723876e+01, -1.2506211898605262e+01, -1.2247809027167504e+01, -1.1994714366091593e+01, -1.1746419465057167e+01, -1.1502490670154140e+01, -1.1262554295905716e+01, -1.1026285398343321e+01, -1.0793399130164836e+01, -1.0563643984112028e+01, -1.0336796441464246e+01, -1.0112656682574075e+01, -9.8910451114622759e+00, -9.6717995123443519e+00, -9.4547727023830390e+00, -9.2398305782140078e+00, -9.0268504779630501e+00, -8.8157197982791242e+00, -8.6063348191874436e+00, -8.3985996995836860e+00, -8.1924256138258222e+00, -7.9877300057571530e+00, -7.7844359410588435e+00, -7.5824715424057798e+00, -7.3817694947224117e+00, -7.1822666100804051e+00, -6.9839034435780709e+00, -6.7866239529910715e+00, -6.5903751961596155e+00, -6.3951070610365948e+00, -6.2007720241079047e+00, -6.0073249335450427e+00, -5.8147228139876921e+00, -5.6229246903015975e+00, -5.4318914280313058e+00, -5.2415855885816516e+00, -5.0519712974268067e+00, -4.8630141238699967e+00, -4.6746809710674029e+00, -4.4869399751921195e+00, -4.2997604127527840e+00, -4.1131126152005661e+00, -3.9269678900605394e+00, -3.7412984479117779e+00, -3.5560773346168779e+00, -3.3712783682677476e+00, -3.1868760803720186e+00, -3.0028456608544345e+00, -2.8191629064911972e+00, -2.6358041724333074e+00, -2.4527463265082488e+00, -2.2699667060184630e+00, -2.0874430767805627e+00, -1.9051535941715365e+00, -1.7230767659677173e+00, -1.5411914167793415e+00, -1.3594766538983880e+00, -1.1779118343903046e+00, -9.9647653327140173e-01, -8.1515051262328730e-01, -6.3391369150388599e-01, -4.5274611652143021e-01, -2.7162793294346238e-01, -9.0539356217393022e-02, 9.0539356217393022e-02, 2.7162793294346238e-01, 4.5274611652143021e-01, 6.3391369150388599e-01, 8.1515051262328730e-01, 9.9647653327140173e-01, 1.1779118343903046e+00, 1.3594766538983880e+00, 1.5411914167793415e+00, 1.7230767659677173e+00, 1.9051535941715365e+00, 2.0874430767805627e+00, 2.2699667060184630e+00, 2.4527463265082488e+00, 2.6358041724333074e+00, 2.8191629064911972e+00, 3.0028456608544345e+00, 3.1868760803720186e+00, 3.3712783682677476e+00, 3.5560773346168779e+00, 3.7412984479117779e+00, 3.9269678900605394e+00, 4.1131126152005661e+00, 4.2997604127527840e+00, 4.4869399751921195e+00, 4.6746809710674029e+00, 4.8630141238699967e+00, 5.0519712974268067e+00, 5.2415855885816516e+00, 5.4318914280313058e+00, 5.6229246903015975e+00, 5.8147228139876921e+00, 6.0073249335450427e+00, 6.2007720241079047e+00, 6.3951070610365948e+00, 6.5903751961596155e+00, 6.7866239529910715e+00, 6.9839034435780709e+00, 7.1822666100804051e+00, 7.3817694947224117e+00, 7.5824715424057798e+00, 7.7844359410588435e+00, 7.9877300057571530e+00, 8.1924256138258222e+00, 8.3985996995836860e+00, 8.6063348191874436e+00, 8.8157197982791242e+00, 9.0268504779630501e+00, 9.2398305782140078e+00, 9.4547727023830390e+00, 9.6717995123443519e+00, 9.8910451114622759e+00, 1.0112656682574075e+01, 1.0336796441464246e+01, 1.0563643984112028e+01, 1.0793399130164836e+01, 1.1026285398343321e+01, 1.1262554295905716e+01, 1.1502490670154140e+01, 1.1746419465057167e+01, 1.1994714366091593e+01, 1.2247809027167504e+01, 1.2506211898605262e+01, 1.2770526190723876e+01, 1.3041477351568494e+01, 1.3319951870041416e+01, 1.3607053753368865e+01, 1.3904189753562465e+01, 1.4213203763983850e+01, 1.4536600709088953e+01, 1.4877946763142496e+01, 1.5242655509972604e+01, 1.5639752989987683e+01, 1.6086755105929722e+01, 1.6629509478833860e+01}, + {-1.6687860507628386e+01, -1.6145733223684985e+01, -1.5699259877476434e+01, -1.5302641798879872e+01, -1.4938381618846188e+01, -1.4597462779517947e+01, -1.4274477352553639e+01, -1.3965862873544308e+01, -1.3669117016537621e+01, -1.3382397798268309e+01, -1.3104299924083964e+01, -1.2833720532318454e+01, -1.2569774056886668e+01, -1.2311735808633442e+01, -1.2059003212482786e+01, -1.1811068358148432e+01, -1.1567498057148995e+01, -1.1327919030095220e+01, -1.1092006691295751e+01, -1.0859476512776732e+01, -1.0630077274574017e+01, -1.0403585718697158e+01, -1.0179802264046788e+01, -9.9585475345593846e+00, -9.7396595186399217e+00, -9.5229912243166375e+00, -9.3084087277706491e+00, -9.0957895370391153e+00, -8.8850212104784116e+00, -8.6760001828398110e+00, -8.4686307618168346e+00, -8.2628242655509627e+00, -8.0584982774535074e+00, -7.8555759992614407e+00, -7.6539856868168998e+00, -7.4536601558800220e+00, -7.2545363475276838e+00, -7.0565549444870097e+00, -6.8596600312005922e+00, -6.6637987915949521e+00, -6.4689212394819462e+00, -6.2749799773089512e+00, -6.0819299796217976e+00, -5.8897283981415303e+00, -5.6983343858032613e+00, -5.5077089374792596e+00, -5.3178147454224645e+00, -5.1286160677313077e+00, -4.9400786083607757e+00, -4.7521694073949581e+00, -4.5648567404584961e+00, -4.3781100262830321e+00, -4.1918997415637014e+00, -4.0061973423430199e+00, -3.8209751912478049e+00, -3.6362064899810247e+00, -3.4518652165367119e+00, -3.2679260666634748e+00, -3.0843643991522391e+00, -2.9011561845674412e+00, -2.7182779570790365e+00, -2.5357067690860005e+00, -2.3534201483511836e+00, -2.1713960573929474e+00, -1.9896128549013927e+00, -1.8080492589666075e+00, -1.6266843119235277e+00, -1.4454973466329897e+00, -1.2644679540316108e+00, -1.0835759517944645e+00, -9.0280135396429395e-01, -7.2212434140935688e-01, -5.4152523297908439e-01, -3.6098445723261258e-01, -1.8048252462003789e-01, -0.0000000000000000e+00, 1.8048252462003789e-01, 3.6098445723261258e-01, 5.4152523297908439e-01, 7.2212434140935688e-01, 9.0280135396429395e-01, 1.0835759517944645e+00, 1.2644679540316108e+00, 1.4454973466329897e+00, 1.6266843119235277e+00, 1.8080492589666075e+00, 1.9896128549013927e+00, 2.1713960573929474e+00, 2.3534201483511836e+00, 2.5357067690860005e+00, 2.7182779570790365e+00, 2.9011561845674412e+00, 3.0843643991522391e+00, 3.2679260666634748e+00, 3.4518652165367119e+00, 3.6362064899810247e+00, 3.8209751912478049e+00, 4.0061973423430199e+00, 4.1918997415637014e+00, 4.3781100262830321e+00, 4.5648567404584961e+00, 4.7521694073949581e+00, 4.9400786083607757e+00, 5.1286160677313077e+00, 5.3178147454224645e+00, 5.5077089374792596e+00, 5.6983343858032613e+00, 5.8897283981415303e+00, 6.0819299796217976e+00, 6.2749799773089512e+00, 6.4689212394819462e+00, 6.6637987915949521e+00, 6.8596600312005922e+00, 7.0565549444870097e+00, 7.2545363475276838e+00, 7.4536601558800220e+00, 7.6539856868168998e+00, 7.8555759992614407e+00, 8.0584982774535074e+00, 8.2628242655509627e+00, 8.4686307618168346e+00, 8.6760001828398110e+00, 8.8850212104784116e+00, 9.0957895370391153e+00, 9.3084087277706491e+00, 9.5229912243166375e+00, 9.7396595186399217e+00, 9.9585475345593846e+00, 1.0179802264046788e+01, 1.0403585718697158e+01, 1.0630077274574017e+01, 1.0859476512776732e+01, 1.1092006691295751e+01, 1.1327919030095220e+01, 1.1567498057148995e+01, 1.1811068358148432e+01, 1.2059003212482786e+01, 1.2311735808633442e+01, 1.2569774056886668e+01, 1.2833720532318454e+01, 1.3104299924083964e+01, 1.3382397798268309e+01, 1.3669117016537621e+01, 1.3965862873544308e+01, 1.4274477352553639e+01, 1.4597462779517947e+01, 1.4938381618846188e+01, 1.5302641798879872e+01, 1.5699259877476434e+01, 1.6145733223684985e+01, 1.6687860507628386e+01}, + {-1.6746015667021069e+01, -1.6204510499941978e+01, -1.5758561659537367e+01, -1.5362419059461168e+01, -1.4998603726021303e+01, -1.4658108513302281e+01, -1.4335531106337195e+01, -1.4027312722878635e+01, -1.3730953615978237e+01, -1.3444613698016527e+01, -1.3166889120550762e+01, -1.2896678158583638e+01, -1.2633096161556749e+01, -1.2375419193081754e+01, -1.2123045308073459e+01, -1.1875467131689264e+01, -1.1632251936796413e+01, -1.1393026846411182e+01, -1.1157467629750515e+01, -1.0925290075047540e+01, -1.0696243246707187e+01, -1.0470104144699656e+01, -1.0246673423826493e+01, -1.0025771925388167e+01, -9.8072378395003508e+00, -9.5909243626320890e+00, -9.3766977481232310e+00, -9.1644356715597066e+00, -8.9540258506546646e+00, -8.7453648725360029e+00, -8.5383571913371981e+00, -8.3329142666081459e+00, -8.1289538189277764e+00, -7.9263991836556649e+00, -7.7251787473280729e+00, -7.5252254540210100e+00, -7.3264763712435084e+00, -7.1288723067188302e+00, -6.9323574688579237e+00, -6.7368791649028887e+00, -6.5423875316754021e+00, -6.3488352946504518e+00, -6.1561775517231760e+00, -5.9643715785732399e+00, -5.7733766529779098e+00, -5.5831538957985396e+00, -5.3936661266788803e+00, -5.2048777327581446e+00, -5.0167545489255820e+00, -4.8292637483334380e+00, -4.6423737420472682e+00, -4.4560540868511129e+00, -4.2702754003439098e+00, -4.0850092825657862e+00, -3.9002282434810596e+00, -3.7159056357210827e+00, -3.5320155920561942e+00, -3.3485329671235227e+00, -3.1654332829874168e+00, -2.9826926781529521e+00, -2.8002878596910716e+00, -2.6181960581673263e+00, -2.4363949850953830e+00, -2.2548627926621019e+00, -2.0735780354934534e+00, -1.8925196342502328e+00, -1.7116668408598084e+00, -1.5309992052052144e+00, -1.3504965431061176e+00, -1.1701389054376192e+00, -9.8990654824282009e-01, -8.0977990370358899e-01, -6.2973955184126684e-01, -4.4976619282513375e-01, -2.6984061977151302e-01, -8.9943691920400901e-02, 8.9943691920400901e-02, 2.6984061977151302e-01, 4.4976619282513375e-01, 6.2973955184126684e-01, 8.0977990370358899e-01, 9.8990654824282009e-01, 1.1701389054376192e+00, 1.3504965431061176e+00, 1.5309992052052144e+00, 1.7116668408598084e+00, 1.8925196342502328e+00, 2.0735780354934534e+00, 2.2548627926621019e+00, 2.4363949850953830e+00, 2.6181960581673263e+00, 2.8002878596910716e+00, 2.9826926781529521e+00, 3.1654332829874168e+00, 3.3485329671235227e+00, 3.5320155920561942e+00, 3.7159056357210827e+00, 3.9002282434810596e+00, 4.0850092825657862e+00, 4.2702754003439098e+00, 4.4560540868511129e+00, 4.6423737420472682e+00, 4.8292637483334380e+00, 5.0167545489255820e+00, 5.2048777327581446e+00, 5.3936661266788803e+00, 5.5831538957985396e+00, 5.7733766529779098e+00, 5.9643715785732399e+00, 6.1561775517231760e+00, 6.3488352946504518e+00, 6.5423875316754021e+00, 6.7368791649028887e+00, 6.9323574688579237e+00, 7.1288723067188302e+00, 7.3264763712435084e+00, 7.5252254540210100e+00, 7.7251787473280729e+00, 7.9263991836556649e+00, 8.1289538189277764e+00, 8.3329142666081459e+00, 8.5383571913371981e+00, 8.7453648725360029e+00, 8.9540258506546646e+00, 9.1644356715597066e+00, 9.3766977481232310e+00, 9.5909243626320890e+00, 9.8072378395003508e+00, 1.0025771925388167e+01, 1.0246673423826493e+01, 1.0470104144699656e+01, 1.0696243246707187e+01, 1.0925290075047540e+01, 1.1157467629750515e+01, 1.1393026846411182e+01, 1.1632251936796413e+01, 1.1875467131689264e+01, 1.2123045308073459e+01, 1.2375419193081754e+01, 1.2633096161556749e+01, 1.2896678158583638e+01, 1.3166889120550762e+01, 1.3444613698016527e+01, 1.3730953615978237e+01, 1.4027312722878635e+01, 1.4335531106337195e+01, 1.4658108513302281e+01, 1.4998603726021303e+01, 1.5362419059461168e+01, 1.5758561659537367e+01, 1.6204510499941978e+01, 1.6746015667021069e+01}, + {-1.6803976908523694e+01, -1.6263088958580713e+01, -1.5817660423010251e+01, -1.5421989437211316e+01, -1.5058615286432362e+01, -1.4718540167071973e+01, -1.4396367336014109e+01, -1.4088541665869107e+01, -1.3792565969471646e+01, -1.3506602040607744e+01, -1.3229247466060766e+01, -1.2959401648746917e+01, -1.2696180846459008e+01, -1.2438861869552357e+01, -1.2186843397769389e+01, -1.1939618587201746e+01, -1.1696755168060276e+01, -1.1457880662155430e+01, -1.1222671190311015e+01, -1.0990842853911044e+01, -1.0762144998868230e+01, -1.0536354880406639e+01, -1.0313273386643994e+01, -1.0092721573755421e+01, -9.8745378311513203e+00, -9.6585755413803565e+00, -9.4447011326189330e+00, -9.2327924457049324e+00, -9.0227373554249493e+00, -8.8144325990031405e+00, -8.6077827747261111e+00, -8.4026994812503251e+00, -8.1991005739976206e+00, -7.9969095195952864e+00, -7.7960548328817607e+00, -7.5964695838133709e+00, -7.3980909638458217e+00, -7.2008599031569602e+00, -7.0047207315224398e+00, -6.8096208768281805e+00, -6.6155105961598455e+00, -6.4223427351940696e+00, -6.2300725122630887e+00, -6.0386573240004857e+00, -5.8480565699220763e+00, -5.6582314936691613e+00, -5.4691450389548066e+00, -5.2807617185180318e+00, -5.0930474946144608e+00, -4.9059696697619373e+00, -4.7194967866215638e+00, -4.5335985360330326e+00, -4.3482456723419469e+00, -4.1634099352589846e+00, -3.9790639775789276e+00, -3.7951812981638180e+00, -3.6117361796606513e+00, -3.4287036304814422e+00, -3.2460593306235781e+00, -3.0637795809520179e+00, -2.8818412556030690e+00, -2.7002217572028795e+00, -2.5188989746230557e+00, -2.3378512430214919e+00, -2.1570573059390363e+00, -1.9764962792423748e+00, -1.7961476167208907e+00, -1.6159910771604287e+00, -1.4360066927302104e+00, -1.2561747385307371e+00, -1.0764757031605943e+00, -8.9689026016876394e-01, -7.1739924026651414e-01, -5.3798360417923241e-01, -3.5862441602383283e-01, -1.7930281710163956e-01, -0.0000000000000000e+00, 1.7930281710163956e-01, 3.5862441602383283e-01, 5.3798360417923241e-01, 7.1739924026651414e-01, 8.9689026016876394e-01, 1.0764757031605943e+00, 1.2561747385307371e+00, 1.4360066927302104e+00, 1.6159910771604287e+00, 1.7961476167208907e+00, 1.9764962792423748e+00, 2.1570573059390363e+00, 2.3378512430214919e+00, 2.5188989746230557e+00, 2.7002217572028795e+00, 2.8818412556030690e+00, 3.0637795809520179e+00, 3.2460593306235781e+00, 3.4287036304814422e+00, 3.6117361796606513e+00, 3.7951812981638180e+00, 3.9790639775789276e+00, 4.1634099352589846e+00, 4.3482456723419469e+00, 4.5335985360330326e+00, 4.7194967866215638e+00, 4.9059696697619373e+00, 5.0930474946144608e+00, 5.2807617185180318e+00, 5.4691450389548066e+00, 5.6582314936691613e+00, 5.8480565699220763e+00, 6.0386573240004857e+00, 6.2300725122630887e+00, 6.4223427351940696e+00, 6.6155105961598455e+00, 6.8096208768281805e+00, 7.0047207315224398e+00, 7.2008599031569602e+00, 7.3980909638458217e+00, 7.5964695838133709e+00, 7.7960548328817607e+00, 7.9969095195952864e+00, 8.1991005739976206e+00, 8.4026994812503251e+00, 8.6077827747261111e+00, 8.8144325990031405e+00, 9.0227373554249493e+00, 9.2327924457049324e+00, 9.4447011326189330e+00, 9.6585755413803565e+00, 9.8745378311513203e+00, 1.0092721573755421e+01, 1.0313273386643994e+01, 1.0536354880406639e+01, 1.0762144998868230e+01, 1.0990842853911044e+01, 1.1222671190311015e+01, 1.1457880662155430e+01, 1.1696755168060276e+01, 1.1939618587201746e+01, 1.2186843397769389e+01, 1.2438861869552357e+01, 1.2696180846459008e+01, 1.2959401648746917e+01, 1.3229247466060766e+01, 1.3506602040607744e+01, 1.3792565969471646e+01, 1.4088541665869107e+01, 1.4396367336014109e+01, 1.4718540167071973e+01, 1.5058615286432362e+01, 1.5421989437211316e+01, 1.5817660423010251e+01, 1.6263088958580713e+01, 1.6803976908523694e+01}, + {-1.6861746151514453e+01, -1.6321470589822656e+01, -1.5876558219732326e+01, -1.5481355041356229e+01, -1.5118418464347313e+01, -1.4778759958751550e+01, -1.4456988312356128e+01, -1.4149552025707647e+01, -1.3853956452475600e+01, -1.3568365253816872e+01, -1.3291377440919536e+01, -1.3021893535990777e+01, -1.2759030698111300e+01, -1.2502066478454370e+01, -1.2250400176513523e+01, -1.2003525474883981e+01, -1.1761010557188053e+01, -1.1522483340487554e+01, -1.1287620293977072e+01, -1.1056137829198551e+01, -1.0827785570772761e+01, -1.0602341026531503e+01, -1.0379605315384135e+01, -1.0159399705951289e+01, -9.9415627845826098e+00, -9.7259481176063289e+00, -9.5124223057752690e+00, -9.3008633529456954e+00, -9.0911592887589681e+00, -8.8832069983214303e+00, -8.6769112218541409e+00, -8.4721836948885070e+00, -8.2689424054370715e+00, -8.0671109491149302e+00, -7.8666179667484384e+00, -7.6673966518192671e+00, -7.4693843173280303e+00, -7.2725220134526776e+00, -7.0767541888205505e+00, -6.8820283893841117e+00, -6.6882949898457129e+00, -6.4955069533605654e+00, -6.3036196158933109e+00, -6.1125904921391818e+00, -5.9223791003665953e+00, -5.7329468039109130e+00, -5.5442566673621716e+00, -5.3562733257536657e+00, -5.1689628652816504e+00, -4.9822927142762881e+00, -4.7962315433057050e+00, -4.6107491734334216e+00, -4.4258164917680798e+00, -4.2414053735465238e+00, -4.0574886100793801e+00, -3.8740398419645214e+00, -3.6910334970398750e+00, -3.5084447326044930e+00, -3.3262493814868481e+00, -3.1444239015829951e+00, -2.9629453285254086e+00, -2.7817912311767614e+00, -2.6009396696722078e+00, -2.4203691557594618e+00, -2.2400586152085502e+00, -2.0599873520829730e+00, -1.8801350146814075e+00, -1.7004815629744074e+00, -1.5210072373739318e+00, -1.3416925286852595e+00, -1.1625181491010375e+00, -9.8346500410607329e-01, -8.0451416516907071e-01, -6.2564684310401653e-01, -4.4684436198936728e-01, -2.6808813353769451e-01, -8.9359631812036308e-02, 8.9359631812036308e-02, 2.6808813353769451e-01, 4.4684436198936728e-01, 6.2564684310401653e-01, 8.0451416516907071e-01, 9.8346500410607329e-01, 1.1625181491010375e+00, 1.3416925286852595e+00, 1.5210072373739318e+00, 1.7004815629744074e+00, 1.8801350146814075e+00, 2.0599873520829730e+00, 2.2400586152085502e+00, 2.4203691557594618e+00, 2.6009396696722078e+00, 2.7817912311767614e+00, 2.9629453285254086e+00, 3.1444239015829951e+00, 3.3262493814868481e+00, 3.5084447326044930e+00, 3.6910334970398750e+00, 3.8740398419645214e+00, 4.0574886100793801e+00, 4.2414053735465238e+00, 4.4258164917680798e+00, 4.6107491734334216e+00, 4.7962315433057050e+00, 4.9822927142762881e+00, 5.1689628652816504e+00, 5.3562733257536657e+00, 5.5442566673621716e+00, 5.7329468039109130e+00, 5.9223791003665953e+00, 6.1125904921391818e+00, 6.3036196158933109e+00, 6.4955069533605654e+00, 6.6882949898457129e+00, 6.8820283893841117e+00, 7.0767541888205505e+00, 7.2725220134526776e+00, 7.4693843173280303e+00, 7.6673966518192671e+00, 7.8666179667484384e+00, 8.0671109491149302e+00, 8.2689424054370715e+00, 8.4721836948885070e+00, 8.6769112218541409e+00, 8.8832069983214303e+00, 9.0911592887589681e+00, 9.3008633529456954e+00, 9.5124223057752690e+00, 9.7259481176063289e+00, 9.9415627845826098e+00, 1.0159399705951289e+01, 1.0379605315384135e+01, 1.0602341026531503e+01, 1.0827785570772761e+01, 1.1056137829198551e+01, 1.1287620293977072e+01, 1.1522483340487554e+01, 1.1761010557188053e+01, 1.2003525474883981e+01, 1.2250400176513523e+01, 1.2502066478454370e+01, 1.2759030698111300e+01, 1.3021893535990777e+01, 1.3291377440919536e+01, 1.3568365253816872e+01, 1.3853956452475600e+01, 1.4149552025707647e+01, 1.4456988312356128e+01, 1.4778759958751550e+01, 1.5118418464347313e+01, 1.5481355041356229e+01, 1.5876558219732326e+01, 1.6321470589822656e+01, 1.6861746151514453e+01}, + {-1.6919325283973091e+01, -1.6379657351007765e+01, -1.5935257067352701e+01, -1.5540517945702927e+01, -1.5178015387422422e+01, -1.4838770068479555e+01, -1.4517396267181470e+01, -1.4210346085463854e+01, -1.3915127399149720e+01, -1.3629905722933353e+01, -1.3353281481744700e+01, -1.3084156308589026e+01, -1.2821648256881092e+01, -1.2565035612781342e+01, -1.2313718290542331e+01, -1.2067190494908804e+01, -1.1825020859053186e+01, -1.1586837691812352e+01, -1.1352317807578974e+01, -1.1121177925121051e+01, -1.0893167945027271e+01, -1.0668065625149652e+01, -1.0445672312722746e+01, -1.0225809486441577e+01, -1.0008315927297842e+01, -9.7930453831581623e+00, -9.5798646251513500e+00, -9.3686518179792717e+00, -9.1592951439814989e+00, -8.9516916339961323e+00, -8.7457461680254056e+00, -8.5413706163114078e+00, -8.3384830972746489e+00, -8.1370073333099651e+00, -7.9368720889917341e+00, -7.7380106790491130e+00, -7.5403605357059194e+00, -7.3438628267689321e+00, -7.1484621172907721e+00, -6.9541060688033616e+00, -6.7607451710725037e+00, -6.5683325021070607e+00, -6.3768235128018915e+00, -6.1861758331287540e+00, -5.9963490972348117e+00, -5.8073047851809010e+00, -5.6190060793645857e+00, -5.4314177339367200e+00, -5.2445059557435734e+00, -5.0582382955161487e+00, -4.8725835481900202e+00, -4.6875116613772301e+00, -4.5029936511304234e+00, -4.3190015242413970e+00, -4.1355082064043351e+00, -3.9524874756501580e+00, -3.7699139005244779e+00, -3.5877627825391012e+00, -3.4060101024770435e+00, -3.2246324701747149e+00, -3.0436070774431303e+00, -2.8629116538234336e+00, -2.6825244249014188e+00, -2.5024240729314422e+00, -2.3225896995428044e+00, -2.1430007903215516e+00, -1.9636371810781683e+00, -1.7844790256269836e+00, -1.6055067649166126e+00, -1.4267010973625776e+00, -1.2480429502435568e+00, -1.0695134520316971e+00, -8.9109390553517087e-01, -7.1276576173782080e-01, -5.3451059422635872e-01, -3.5631007410029453e-01, -1.7814594526358507e-01, -0.0000000000000000e+00, 1.7814594526358507e-01, 3.5631007410029453e-01, 5.3451059422635872e-01, 7.1276576173782080e-01, 8.9109390553517087e-01, 1.0695134520316971e+00, 1.2480429502435568e+00, 1.4267010973625776e+00, 1.6055067649166126e+00, 1.7844790256269836e+00, 1.9636371810781683e+00, 2.1430007903215516e+00, 2.3225896995428044e+00, 2.5024240729314422e+00, 2.6825244249014188e+00, 2.8629116538234336e+00, 3.0436070774431303e+00, 3.2246324701747149e+00, 3.4060101024770435e+00, 3.5877627825391012e+00, 3.7699139005244779e+00, 3.9524874756501580e+00, 4.1355082064043351e+00, 4.3190015242413970e+00, 4.5029936511304234e+00, 4.6875116613772301e+00, 4.8725835481900202e+00, 5.0582382955161487e+00, 5.2445059557435734e+00, 5.4314177339367200e+00, 5.6190060793645857e+00, 5.8073047851809010e+00, 5.9963490972348117e+00, 6.1861758331287540e+00, 6.3768235128018915e+00, 6.5683325021070607e+00, 6.7607451710725037e+00, 6.9541060688033616e+00, 7.1484621172907721e+00, 7.3438628267689321e+00, 7.5403605357059194e+00, 7.7380106790491130e+00, 7.9368720889917341e+00, 8.1370073333099651e+00, 8.3384830972746489e+00, 8.5413706163114078e+00, 8.7457461680254056e+00, 8.9516916339961323e+00, 9.1592951439814989e+00, 9.3686518179792717e+00, 9.5798646251513500e+00, 9.7930453831581623e+00, 1.0008315927297842e+01, 1.0225809486441577e+01, 1.0445672312722746e+01, 1.0668065625149652e+01, 1.0893167945027271e+01, 1.1121177925121051e+01, 1.1352317807578974e+01, 1.1586837691812352e+01, 1.1825020859053186e+01, 1.2067190494908804e+01, 1.2313718290542331e+01, 1.2565035612781342e+01, 1.2821648256881092e+01, 1.3084156308589026e+01, 1.3353281481744700e+01, 1.3629905722933353e+01, 1.3915127399149720e+01, 1.4210346085463854e+01, 1.4517396267181470e+01, 1.4838770068479555e+01, 1.5178015387422422e+01, 1.5540517945702927e+01, 1.5935257067352701e+01, 1.6379657351007765e+01, 1.6919325283973091e+01}, + {-1.6976716163194499e+01, -1.6437651167348605e+01, -1.5993758950122626e+01, -1.5599480189464179e+01, -1.5237408147560531e+01, -1.4898572639500255e+01, -1.4577593394280033e+01, -1.4270926089044083e+01, -1.3976081103348914e+01, -1.3691225791789593e+01, -1.3414961982530070e+01, -1.3146192411007465e+01, -1.2884036018123565e+01, -1.2627771819287853e+01, -1.2376800338602001e+01, -1.2130616298680719e+01, -1.1888788778454018e+01, -1.1650946475122105e+01, -1.1416766545164455e+01, -1.1185966011702471e+01, -1.0958295048610227e+01, -1.0733531661229089e+01, -1.0511477422708499e+01, -1.0291954019502739e+01, -1.0074800425004574e+01, -9.8598705664345285e+00, -9.6470313831544541e+00, -9.4361611985975138e+00, -9.2271483457046273e+00, -9.0198899989563817e+00, -8.8142911760456872e+00, -8.6102638798256326e+00, -8.4077263570086043e+00, -8.2066024546299392e+00, -8.0068210588433661e+00, -7.8083156034214856e+00, -7.6110236375661993e+00, -7.4148864444214286e+00, -7.2198487031214214e+00, -7.0258581883766524e+00, -6.8328655025529308e+00, -6.6408238359815064e+00, -6.4496887518830528e+00, -6.2594179928228799e+00, -6.0699713060598244e+00, -5.8813102855233703e+00, -5.6933982284661457e+00, -5.5062000051024196e+00, -5.3196819397663528e+00, -5.1338117023131167e+00, -4.9485582086476523e+00, -4.7638915294038444e+00, -4.5797828059155004e+00, -4.3962041727224284e+00, -4.2131286859429329e+00, -4.0305302569201968e+00, -3.8483835906160127e+00, -3.6666641282827874e+00, -3.4853479939947509e+00, -3.3044119446629794e+00, -3.1238333231970716e+00, -2.9435900145097715e+00, -2.7636604040902180e+00, -2.5840233388973028e+00, -2.4046580903472949e+00, -2.2255443191898578e+00, -2.0466620420841379e+00, -1.8679915997020398e+00, -1.6895136261993984e+00, -1.5112090199076367e+00, -1.3330589151089478e+00, -1.1550446547671134e+00, -9.7714776409397042e-01, -7.9934992483832146e-01, -6.2163295018989517e-01, -4.4397876019583243e-01, -2.6636935759120106e-01, -8.8786803948269877e-02, 8.8786803948269877e-02, 2.6636935759120106e-01, 4.4397876019583243e-01, 6.2163295018989517e-01, 7.9934992483832146e-01, 9.7714776409397042e-01, 1.1550446547671134e+00, 1.3330589151089478e+00, 1.5112090199076367e+00, 1.6895136261993984e+00, 1.8679915997020398e+00, 2.0466620420841379e+00, 2.2255443191898578e+00, 2.4046580903472949e+00, 2.5840233388973028e+00, 2.7636604040902180e+00, 2.9435900145097715e+00, 3.1238333231970716e+00, 3.3044119446629794e+00, 3.4853479939947509e+00, 3.6666641282827874e+00, 3.8483835906160127e+00, 4.0305302569201968e+00, 4.2131286859429329e+00, 4.3962041727224284e+00, 4.5797828059155004e+00, 4.7638915294038444e+00, 4.9485582086476523e+00, 5.1338117023131167e+00, 5.3196819397663528e+00, 5.5062000051024196e+00, 5.6933982284661457e+00, 5.8813102855233703e+00, 6.0699713060598244e+00, 6.2594179928228799e+00, 6.4496887518830528e+00, 6.6408238359815064e+00, 6.8328655025529308e+00, 7.0258581883766524e+00, 7.2198487031214214e+00, 7.4148864444214286e+00, 7.6110236375661993e+00, 7.8083156034214856e+00, 8.0068210588433661e+00, 8.2066024546299392e+00, 8.4077263570086043e+00, 8.6102638798256326e+00, 8.8142911760456872e+00, 9.0198899989563817e+00, 9.2271483457046273e+00, 9.4361611985975138e+00, 9.6470313831544541e+00, 9.8598705664345285e+00, 1.0074800425004574e+01, 1.0291954019502739e+01, 1.0511477422708499e+01, 1.0733531661229089e+01, 1.0958295048610227e+01, 1.1185966011702471e+01, 1.1416766545164455e+01, 1.1650946475122105e+01, 1.1888788778454018e+01, 1.2130616298680719e+01, 1.2376800338602001e+01, 1.2627771819287853e+01, 1.2884036018123565e+01, 1.3146192411007465e+01, 1.3414961982530070e+01, 1.3691225791789593e+01, 1.3976081103348914e+01, 1.4270926089044083e+01, 1.4577593394280033e+01, 1.4898572639500255e+01, 1.5237408147560531e+01, 1.5599480189464179e+01, 1.5993758950122626e+01, 1.6437651167348605e+01, 1.6976716163194499e+01}, + {-1.7033920616481659e+01, -1.6495453932662432e+01, -1.6052065819662449e+01, -1.5658243778058655e+01, -1.5296598801743723e+01, -1.4958169779028550e+01, -1.4637581850310665e+01, -1.4331294242121345e+01, -1.4036819819586414e+01, -1.3752327763757672e+01, -1.3476421295676769e+01, -1.3208004244970210e+01, -1.2946196433284063e+01, -1.2690277599628986e+01, -1.2439648873125966e+01, -1.2193805490052643e+01, -1.1952316971370964e+01, -1.1714812399295406e+01, -1.1480969269340585e+01, -1.1250504906165974e+01, -1.1023169754304320e+01, -1.0798742064110225e+01, -1.0577023632291938e+01, -1.0357836350801911e+01, -1.0141019383247183e+01, -9.9264268340723447e+00, -9.7139258087846567e+00, -9.5033947874905458e+00, -9.2947222516928303e+00, -9.0878055174840568e+00, -8.8825497382175893e+00, -8.6788670473198550e+00, -8.4766758177429260e+00, -8.2759000190893772e+00, -8.0764686569918620e+00, -7.8783152821330633e+00, -7.6813775585211594e+00, -7.4855968824216461e+00, -7.2909180447858821e+00, -7.0972889311843357e+00, -6.9046602542051492e+00, -6.7129853140601305e+00, -6.5222197837846556e+00, -6.3323215159520476e+00, -6.1432503682675650e+00, -5.9549680457789771e+00, -5.7674379577529349e+00, -5.5806250875296657e+00, -5.3944958738913487e+00, -5.2090181026688498e+00, -5.0241608074728896e+00, -4.8398941785737106e+00, -4.6561894790717648e+00, -4.4730189676038190e+00, -4.2903558269168203e+00, -4.1081740977179662e+00, -3.9264486172753981e+00, -3.7451549623013909e+00, -3.5642693956998519e+00, -3.3837688168037010e+00, -3.2036307147658607e+00, -3.0238331248011101e+00, -2.8443545870054412e+00, -2.6651741075053921e+00, -2.4862711217125546e+00, -2.3076254594784578e+00, -2.1292173119626474e+00, -1.9510272000422848e+00, -1.7730359441052344e+00, -1.5952246350806079e+00, -1.4175746065712449e+00, -1.2400674079618077e+00, -1.0626847783841744e+00, -8.8540862142874210e-01, -7.0822098049619520e-01, -5.3110401468933777e-01, -3.5403997514879265e-01, -1.7701118173979521e-01, -0.0000000000000000e+00, 1.7701118173979521e-01, 3.5403997514879265e-01, 5.3110401468933777e-01, 7.0822098049619520e-01, 8.8540862142874210e-01, 1.0626847783841744e+00, 1.2400674079618077e+00, 1.4175746065712449e+00, 1.5952246350806079e+00, 1.7730359441052344e+00, 1.9510272000422848e+00, 2.1292173119626474e+00, 2.3076254594784578e+00, 2.4862711217125546e+00, 2.6651741075053921e+00, 2.8443545870054412e+00, 3.0238331248011101e+00, 3.2036307147658607e+00, 3.3837688168037010e+00, 3.5642693956998519e+00, 3.7451549623013909e+00, 3.9264486172753981e+00, 4.1081740977179662e+00, 4.2903558269168203e+00, 4.4730189676038190e+00, 4.6561894790717648e+00, 4.8398941785737106e+00, 5.0241608074728896e+00, 5.2090181026688498e+00, 5.3944958738913487e+00, 5.5806250875296657e+00, 5.7674379577529349e+00, 5.9549680457789771e+00, 6.1432503682675650e+00, 6.3323215159520476e+00, 6.5222197837846556e+00, 6.7129853140601305e+00, 6.9046602542051492e+00, 7.0972889311843357e+00, 7.2909180447858821e+00, 7.4855968824216461e+00, 7.6813775585211594e+00, 7.8783152821330633e+00, 8.0764686569918620e+00, 8.2759000190893772e+00, 8.4766758177429260e+00, 8.6788670473198550e+00, 8.8825497382175893e+00, 9.0878055174840568e+00, 9.2947222516928303e+00, 9.5033947874905458e+00, 9.7139258087846567e+00, 9.9264268340723447e+00, 1.0141019383247183e+01, 1.0357836350801911e+01, 1.0577023632291938e+01, 1.0798742064110225e+01, 1.1023169754304320e+01, 1.1250504906165974e+01, 1.1480969269340585e+01, 1.1714812399295406e+01, 1.1952316971370964e+01, 1.2193805490052643e+01, 1.2439648873125966e+01, 1.2690277599628986e+01, 1.2946196433284063e+01, 1.3208004244970210e+01, 1.3476421295676769e+01, 1.3752327763757672e+01, 1.4036819819586414e+01, 1.4331294242121345e+01, 1.4637581850310665e+01, 1.4958169779028550e+01, 1.5296598801743723e+01, 1.5658243778058655e+01, 1.6052065819662449e+01, 1.6495453932662432e+01, 1.7033920616481659e+01}, + {-1.7090940441818589e+01, -1.6553067510081998e+01, -1.6110179595706111e+01, -1.5716810683887498e+01, -1.5355589372841228e+01, -1.5017563559089020e+01, -1.4697363755671436e+01, -1.4391452713037102e+01, -1.4097345763967498e+01, -1.3813213902715583e+01, -1.3537661732992635e+01, -1.3269594170493015e+01, -1.3008131910966144e+01, -1.2752555411464087e+01, -1.2502266401375357e+01, -1.2256760626504082e+01, -1.2015608046183521e+01, -1.1778438124354272e+01, -1.1544928692572215e+01, -1.1314797374275166e+01, -1.1087794882081830e+01, -1.0863699708936927e+01, -1.0642313872803815e+01, -1.0423459468924202e+01, -1.0206975848984905e+01, -9.9927172925773995e+00, -9.7805510693200404e+00, -9.5703558139886304e+00, -9.3620201546636572e+00, -9.1554415470755703e+00, -8.9505252782658893e+00, -8.7471836102562435e+00, -8.5453350402475099e+00, -8.3449036583995202e+00, -8.1458185877888383e+00, -7.9480134939425788e+00, -7.7514261535735791e+00, -7.5559980739261903e+00, -7.3616741555800598e+00, -7.1684023927258176e+00, -6.9761336058782426e+00, -6.7848212027732666e+00, -6.5944209638389415e+00, -6.4048908491640280e+00, -6.2161908243320640e+00, -6.0282827028602410e+00, -5.8411300032943645e+00, -5.6546978192742712e+00, -5.4689527011066783e+00, -5.2838625475716334e+00, -5.0993965068499545e+00, -4.9155248855969580e+00, -4.7322190653061282e+00, -4.5494514252081650e+00, -4.3671952710387769e+00, -4.1854247690845812e+00, -4.0041148849824761e+00, -3.8232413268052290e+00, -3.6427804920159894e+00, -3.4627094179181652e+00, -3.2830057352652728e+00, -3.1036476247289064e+00, -2.9246137759523689e+00, -2.7458833489433849e+00, -2.5674359375820566e+00, -2.3892515350402799e+00, -2.2113105009265008e+00, -2.0335935299852599e+00, -1.8560816221946776e+00, -1.6787560541171089e+00, -1.5015983513687976e+00, -1.3245902620836472e+00, -1.1477137312543411e+00, -9.7095087584109574e-01, -7.9428396054439432e-01, -6.1769537414325426e-01, -4.4116760630493546e-01, -2.6468322477561551e-01, -8.8224852864438347e-02, 8.8224852864438347e-02, 2.6468322477561551e-01, 4.4116760630493546e-01, 6.1769537414325426e-01, 7.9428396054439432e-01, 9.7095087584109574e-01, 1.1477137312543411e+00, 1.3245902620836472e+00, 1.5015983513687976e+00, 1.6787560541171089e+00, 1.8560816221946776e+00, 2.0335935299852599e+00, 2.2113105009265008e+00, 2.3892515350402799e+00, 2.5674359375820566e+00, 2.7458833489433849e+00, 2.9246137759523689e+00, 3.1036476247289064e+00, 3.2830057352652728e+00, 3.4627094179181652e+00, 3.6427804920159894e+00, 3.8232413268052290e+00, 4.0041148849824761e+00, 4.1854247690845812e+00, 4.3671952710387769e+00, 4.5494514252081650e+00, 4.7322190653061282e+00, 4.9155248855969580e+00, 5.0993965068499545e+00, 5.2838625475716334e+00, 5.4689527011066783e+00, 5.6546978192742712e+00, 5.8411300032943645e+00, 6.0282827028602410e+00, 6.2161908243320640e+00, 6.4048908491640280e+00, 6.5944209638389415e+00, 6.7848212027732666e+00, 6.9761336058782426e+00, 7.1684023927258176e+00, 7.3616741555800598e+00, 7.5559980739261903e+00, 7.7514261535735791e+00, 7.9480134939425788e+00, 8.1458185877888383e+00, 8.3449036583995202e+00, 8.5453350402475099e+00, 8.7471836102562435e+00, 8.9505252782658893e+00, 9.1554415470755703e+00, 9.3620201546636572e+00, 9.5703558139886304e+00, 9.7805510693200404e+00, 9.9927172925773995e+00, 1.0206975848984905e+01, 1.0423459468924202e+01, 1.0642313872803815e+01, 1.0863699708936927e+01, 1.1087794882081830e+01, 1.1314797374275166e+01, 1.1544928692572215e+01, 1.1778438124354272e+01, 1.2015608046183521e+01, 1.2256760626504082e+01, 1.2502266401375357e+01, 1.2752555411464087e+01, 1.3008131910966144e+01, 1.3269594170493015e+01, 1.3537661732992635e+01, 1.3813213902715583e+01, 1.4097345763967498e+01, 1.4391452713037102e+01, 1.4697363755671436e+01, 1.5017563559089020e+01, 1.5355589372841228e+01, 1.5716810683887498e+01, 1.6110179595706111e+01, 1.6553067510081998e+01, 1.7090940441818589e+01}, + {-1.7147777408524032e+01, -1.6610493732745791e+01, -1.6168102166823996e+01, -1.5775182847088153e+01, -1.5414381850393539e+01, -1.5076756017330117e+01, -1.4756941195343936e+01, -1.4451403633675961e+01, -1.4157661115094985e+01, -1.3873886433984071e+01, -1.3598685566661047e+01, -1.3330964506884797e+01, -1.3069844817966612e+01, -1.2814607669526135e+01, -1.2564655386543684e+01, -1.2319484220282153e+01, -1.2078664564848737e+01, -1.1841826262681151e+01, -1.1608647478438799e+01, -1.1378846131631990e+01, -1.1152173200444981e+01, -1.0928407418040756e+01, -1.0707351021384738e+01, -1.0488826306849386e+01, -1.0272672812117237e+01, -1.0058744989900266e+01, -9.8469102719449655e+00, -9.6370474457447699e+00, -9.4290452840267438e+00, -9.2228013802394901e+00, -9.0182211531960341e+00, -8.8152169915922389e+00, -8.6137075149457782e+00, -8.4136169320244640e+00, -8.2148744813764658e+00, -8.0174139413725705e+00, -7.8211731993958935e+00, -7.6260938715966651e+00, -7.4321209660664360e+00, -7.2392025834514619e+00, -7.0472896499758120e+00, -6.8563356786247329e+00, -6.6662965548819884e+00, -6.4771303439479562e+00, -6.2887971168089667e+00, -6.1012587928995696e+00, -5.9144789974110621e+00, -5.7284229315624149e+00, -5.5430572543722416e+00, -5.3583499746593573e+00, -5.1742703521606801e+00, -4.9907888067929420e+00, -4.8078768352029666e+00, -4.6255069338529911e+00, -4.4436525279753418e+00, -4.2622879058067751e+00, -4.0813881575787159e+00, -3.9009291187969852e+00, -3.7208873173946002e+00, -3.5412399243848873e+00, -3.3619647076803778e+00, -3.1830399887764611e+00, -3.0044446020282125e+00, -2.8261578562746821e+00, -2.6481594985877317e+00, -2.4704296799425793e+00, -2.2929489226249280e+00, -2.1156980892051682e+00, -1.9386583529239170e+00, -1.7618111693452876e+00, -1.5851382491449768e+00, -1.4086215319096145e+00, -1.2322431608320461e+00, -1.0559854581943640e+00, -8.7983090153669952e-01, -7.0376210041511855e-01, -5.2776177365646371e-01, -3.5181272702177707e-01, -1.7589783119299077e-01, -0.0000000000000000e+00, 1.7589783119299077e-01, 3.5181272702177707e-01, 5.2776177365646371e-01, 7.0376210041511855e-01, 8.7983090153669952e-01, 1.0559854581943640e+00, 1.2322431608320461e+00, 1.4086215319096145e+00, 1.5851382491449768e+00, 1.7618111693452876e+00, 1.9386583529239170e+00, 2.1156980892051682e+00, 2.2929489226249280e+00, 2.4704296799425793e+00, 2.6481594985877317e+00, 2.8261578562746821e+00, 3.0044446020282125e+00, 3.1830399887764611e+00, 3.3619647076803778e+00, 3.5412399243848873e+00, 3.7208873173946002e+00, 3.9009291187969852e+00, 4.0813881575787159e+00, 4.2622879058067751e+00, 4.4436525279753418e+00, 4.6255069338529911e+00, 4.8078768352029666e+00, 4.9907888067929420e+00, 5.1742703521606801e+00, 5.3583499746593573e+00, 5.5430572543722416e+00, 5.7284229315624149e+00, 5.9144789974110621e+00, 6.1012587928995696e+00, 6.2887971168089667e+00, 6.4771303439479562e+00, 6.6662965548819884e+00, 6.8563356786247329e+00, 7.0472896499758120e+00, 7.2392025834514619e+00, 7.4321209660664360e+00, 7.6260938715966651e+00, 7.8211731993958935e+00, 8.0174139413725705e+00, 8.2148744813764658e+00, 8.4136169320244640e+00, 8.6137075149457782e+00, 8.8152169915922389e+00, 9.0182211531960341e+00, 9.2228013802394901e+00, 9.4290452840267438e+00, 9.6370474457447699e+00, 9.8469102719449655e+00, 1.0058744989900266e+01, 1.0272672812117237e+01, 1.0488826306849386e+01, 1.0707351021384738e+01, 1.0928407418040756e+01, 1.1152173200444981e+01, 1.1378846131631990e+01, 1.1608647478438799e+01, 1.1841826262681151e+01, 1.2078664564848737e+01, 1.2319484220282153e+01, 1.2564655386543684e+01, 1.2814607669526135e+01, 1.3069844817966612e+01, 1.3330964506884797e+01, 1.3598685566661047e+01, 1.3873886433984071e+01, 1.4157661115094985e+01, 1.4451403633675961e+01, 1.4756941195343936e+01, 1.5076756017330117e+01, 1.5414381850393539e+01, 1.5775182847088153e+01, 1.6168102166823996e+01, 1.6610493732745791e+01, 1.7147777408524032e+01}, + {-1.7204433257886514e+01, -1.6667734404468501e+01, -1.6225835391124875e+01, -1.5833362176266244e+01, -1.5472978191373558e+01, -1.5135749157814335e+01, -1.4816316219712570e+01, -1.4511149100314286e+01, -1.4217768014947598e+01, -1.3934347545235219e+01, -1.3659495030180320e+01, -1.3392117533718560e+01, -1.3131337480278725e+01, -1.2876436746658081e+01, -1.2626818248827179e+01, -1.2381978739506893e+01, -1.2141489044042531e+01, -1.1904979380197393e+01, -1.1672128242851137e+01, -1.1442653844932979e+01, -1.1216307427723020e+01, -1.0992867962280265e+01, -1.0772137902368133e+01, -1.0553939743380022e+01, -1.0338113206958853e+01, -1.0124512916959700e+01, -9.9130064653237309e+00, -9.7034727903605109e+00, -9.4958008075637199e+00, -9.2898882462326107e+00, -9.0856406550885644e+00, -8.8829705476356562e+00, -8.6817966638328432e+00, -8.4820433291649113e+00, -8.2836398957394959e+00, -8.0865202528325053e+00, -7.8906223965273750e+00, -7.6958880498740347e+00, -7.5022623264288848e+00, -7.3096934312013273e+00, -7.1181323939823153e+00, -6.9275328308096045e+00, -6.7378507299670023e+00, -6.5490442594474336e+00, -6.3610735932529767e+00, -6.1739007542758424e+00, -5.9874894718156684e+00, -5.8018050520510185e+00, -5.6168142600053486e+00, -5.4324852117363971e+00, -5.2487872756390139e+00, -5.0656909818891060e+00, -4.8831679391745020e+00, -4.7011907579602283e+00, -4.5197329796234431e+00, -4.3387690108692292e+00, -4.1582740629043391e+00, -3.9782240949032923e+00, -3.7985957613512462e+00, -3.6193663628916424e+00, -3.4405138003449269e+00, -3.2620165315981051e+00, -3.0838535310943649e+00, -2.9060042516778903e+00, -2.7284485885718048e+00, -2.5511668452873062e+00, -2.3741397012797925e+00, -2.1973481811834414e+00, -2.0207736254695439e+00, -1.8443976623860776e+00, -1.6682021810467551e+00, -1.4921693055472409e+00, -1.3162813699945290e+00, -1.1405208943427161e+00, -9.6487056093471757e-01, -7.8931319165491054e-01, -6.1383172560234123e-01, -4.3840919719805421e-01, -2.6302871464332994e-01, -8.7673438648223800e-02, 8.7673438648223800e-02, 2.6302871464332994e-01, 4.3840919719805421e-01, 6.1383172560234123e-01, 7.8931319165491054e-01, 9.6487056093471757e-01, 1.1405208943427161e+00, 1.3162813699945290e+00, 1.4921693055472409e+00, 1.6682021810467551e+00, 1.8443976623860776e+00, 2.0207736254695439e+00, 2.1973481811834414e+00, 2.3741397012797925e+00, 2.5511668452873062e+00, 2.7284485885718048e+00, 2.9060042516778903e+00, 3.0838535310943649e+00, 3.2620165315981051e+00, 3.4405138003449269e+00, 3.6193663628916424e+00, 3.7985957613512462e+00, 3.9782240949032923e+00, 4.1582740629043391e+00, 4.3387690108692292e+00, 4.5197329796234431e+00, 4.7011907579602283e+00, 4.8831679391745020e+00, 5.0656909818891060e+00, 5.2487872756390139e+00, 5.4324852117363971e+00, 5.6168142600053486e+00, 5.8018050520510185e+00, 5.9874894718156684e+00, 6.1739007542758424e+00, 6.3610735932529767e+00, 6.5490442594474336e+00, 6.7378507299670023e+00, 6.9275328308096045e+00, 7.1181323939823153e+00, 7.3096934312013273e+00, 7.5022623264288848e+00, 7.6958880498740347e+00, 7.8906223965273750e+00, 8.0865202528325053e+00, 8.2836398957394959e+00, 8.4820433291649113e+00, 8.6817966638328432e+00, 8.8829705476356562e+00, 9.0856406550885644e+00, 9.2898882462326107e+00, 9.4958008075637199e+00, 9.7034727903605109e+00, 9.9130064653237309e+00, 1.0124512916959700e+01, 1.0338113206958853e+01, 1.0553939743380022e+01, 1.0772137902368133e+01, 1.0992867962280265e+01, 1.1216307427723020e+01, 1.1442653844932979e+01, 1.1672128242851137e+01, 1.1904979380197393e+01, 1.2141489044042531e+01, 1.2381978739506893e+01, 1.2626818248827179e+01, 1.2876436746658081e+01, 1.3131337480278725e+01, 1.3392117533718560e+01, 1.3659495030180320e+01, 1.3934347545235219e+01, 1.4217768014947598e+01, 1.4511149100314286e+01, 1.4816316219712570e+01, 1.5135749157814335e+01, 1.5472978191373558e+01, 1.5833362176266244e+01, 1.6225835391124875e+01, 1.6667734404468501e+01, 1.7204433257886514e+01}, + {-1.7260909703781458e+01, -1.6724791300392337e+01, -1.6283381096937688e+01, -1.5891350549206285e+01, -1.5531380320925585e+01, -1.5194544951785256e+01, -1.4875490845359661e+01, -1.4570691174443654e+01, -1.4277668569732091e+01, -1.3994599387373768e+01, -1.3720092319274791e+01, -1.3453055491772846e+01, -1.3192612184064762e+01, -1.2938044974817329e+01, -1.2688757366462070e+01, -1.2444246609242258e+01, -1.2204083956265350e+01, -1.1967899997504619e+01, -1.1735373555229600e+01, -1.1506223133185523e+01, -1.1280200233327742e+01, -1.1057084062337088e+01, -1.0836677288618299e+01, -1.0618802604522946e+01, -1.0403299913666036e+01, -1.0190024009115621e+01, -9.9788426411218261e+00, -9.7696348969572284e+00, -9.5622898330514161e+00, -9.3567053127370503e+00, -9.1527870128321815e+00, -8.9504475698358625e+00, -8.7496058423271297e+00, -8.5501862706726577e+00, -8.3521183186851253e+00, -8.1553359846666886e+00, -7.9597773714928959e+00, -7.7653843071712023e+00, -7.5721020087422000e+00, -7.3798787835549238e+00, -7.1886657628964983e+00, -6.9984166637349920e+00, -6.8090875749762771e+00, -6.6206367651677347e+00, -6.4330245090246105e+00, -6.2462129305252470e+00, -6.0601658606325453e+00, -5.8748487079613687e+00, -5.6902283409336283e+00, -5.5062729801514685e+00, -5.3229520998798137e+00, -5.1402363376670781e+00, -4.9580974112509208e+00, -4.7765080419975057e+00, -4.5954418842104330e+00, -4.4148734597213917e+00, -4.2347780972404623e+00, -4.0551318760012771e+00, -3.8759115732862068e+00, -3.6970946154603719e+00, -3.5186590321815130e+00, -3.3405834134862555e+00, -3.1628468694827729e+00, -2.9854289924057533e+00, -2.8083098208124282e+00, -2.6314698057185479e+00, -2.4548897784909895e+00, -2.2785509203293697e+00, -2.1024347331829158e+00, -1.9265230119611116e+00, -1.7507978179074160e+00, -1.5752414530149108e+00, -1.3998364353710842e+00, -1.2245654753263127e+00, -1.0494114523869875e+00, -8.7435739273980528e-01, -6.9938644731849353e-01, -5.2448187032830795e-01, -3.4962699814701426e-01, -1.7480522852383426e-01, -0.0000000000000000e+00, 1.7480522852383426e-01, 3.4962699814701426e-01, 5.2448187032830795e-01, 6.9938644731849353e-01, 8.7435739273980528e-01, 1.0494114523869875e+00, 1.2245654753263127e+00, 1.3998364353710842e+00, 1.5752414530149108e+00, 1.7507978179074160e+00, 1.9265230119611116e+00, 2.1024347331829158e+00, 2.2785509203293697e+00, 2.4548897784909895e+00, 2.6314698057185479e+00, 2.8083098208124282e+00, 2.9854289924057533e+00, 3.1628468694827729e+00, 3.3405834134862555e+00, 3.5186590321815130e+00, 3.6970946154603719e+00, 3.8759115732862068e+00, 4.0551318760012771e+00, 4.2347780972404623e+00, 4.4148734597213917e+00, 4.5954418842104330e+00, 4.7765080419975057e+00, 4.9580974112509208e+00, 5.1402363376670781e+00, 5.3229520998798137e+00, 5.5062729801514685e+00, 5.6902283409336283e+00, 5.8748487079613687e+00, 6.0601658606325453e+00, 6.2462129305252470e+00, 6.4330245090246105e+00, 6.6206367651677347e+00, 6.8090875749762771e+00, 6.9984166637349920e+00, 7.1886657628964983e+00, 7.3798787835549238e+00, 7.5721020087422000e+00, 7.7653843071712023e+00, 7.9597773714928959e+00, 8.1553359846666886e+00, 8.3521183186851253e+00, 8.5501862706726577e+00, 8.7496058423271297e+00, 8.9504475698358625e+00, 9.1527870128321815e+00, 9.3567053127370503e+00, 9.5622898330514161e+00, 9.7696348969572284e+00, 9.9788426411218261e+00, 1.0190024009115621e+01, 1.0403299913666036e+01, 1.0618802604522946e+01, 1.0836677288618299e+01, 1.1057084062337088e+01, 1.1280200233327742e+01, 1.1506223133185523e+01, 1.1735373555229600e+01, 1.1967899997504619e+01, 1.2204083956265350e+01, 1.2444246609242258e+01, 1.2688757366462070e+01, 1.2938044974817329e+01, 1.3192612184064762e+01, 1.3453055491772846e+01, 1.3720092319274791e+01, 1.3994599387373768e+01, 1.4277668569732091e+01, 1.4570691174443654e+01, 1.4875490845359661e+01, 1.5194544951785256e+01, 1.5531380320925585e+01, 1.5891350549206285e+01, 1.6283381096937688e+01, 1.6724791300392337e+01, 1.7260909703781458e+01}, + {-1.7317208433270949e+01, -1.6781666167619914e+01, -1.6340741083473855e+01, -1.5949149813561979e+01, -1.5589590133082927e+01, -1.5253145338412237e+01, -1.4934467055837311e+01, -1.4630031883570041e+01, -1.4337364850710150e+01, -1.4054644075392147e+01, -1.3780479592778537e+01, -1.3513780583944808e+01, -1.3253671176599115e+01, -1.2999434646049597e+01, -1.2750475076730016e+01, -1.2506290212533999e+01, -1.2266451730913499e+01, -1.2030590590990464e+01, -1.1798385939645330e+01, -1.1569556568885659e+01, -1.1343854238969067e+01, -1.1121058389970566e+01, -1.0900971902825360e+01, -1.0683417664825948e+01, -1.0468235759616565e+01, -1.0255281147593752e+01, -1.0044421735476904e+01, -9.8355367576951434e+00, -9.6285154098307064e+00, -9.4232556874808058e+00, -9.2196633937979797e+00, -9.0176512865137788e+00, -8.8171383410582820e+00, -8.6180491108986974e+00, -8.4203131697537437e+00, -8.2238646231301775e+00, -8.0286416788466841e+00, -7.8345862679873983e+00, -7.6416437091600784e+00, -7.4497624100959028e+00, -7.2588936015760819e+00, -7.0689910994481711e+00, -6.8800110911364190e+00, -6.6919119435819789e+00, -6.5046540299913760e+00, -6.3181995731417278e+00, -6.1325125033020340e+00, -5.9475583290919758e+00, -5.7633040198215193e+00, -5.5797178980431177e+00, -5.3967695412089913e+00, -5.2144296914634500e+00, -5.0326701727181335e+00, -4.8514638142596134e+00, -4.6707843802263858e+00, -4.4906065043681664e+00, -4.3109056295662098e+00, -4.1316579516506131e+00, -3.9528403671005403e+00, -3.7744304242568925e+00, -3.5964062777151744e+00, -3.4187466455998319e+00, -3.2414307694507918e+00, -3.0644383764788512e+00, -2.8877496439694483e+00, -2.7113451656344782e+00, -2.5352059197296661e+00, -2.3593132387707172e+00, -2.1836487806954117e+00, -2.0081945013310838e+00, -1.8329326280378133e+00, -1.6578456344072345e+00, -1.4829162159053173e+00, -1.3081272663548922e+00, -1.1334618551601847e+00, -9.5890320518127026e-01, -7.8443467117124477e-01, -6.1003971869307638e-01, -4.3570190343662574e-01, -2.6140485085923082e-01, -8.7132236075583736e-02, 8.7132236075583736e-02, 2.6140485085923082e-01, 4.3570190343662574e-01, 6.1003971869307638e-01, 7.8443467117124477e-01, 9.5890320518127026e-01, 1.1334618551601847e+00, 1.3081272663548922e+00, 1.4829162159053173e+00, 1.6578456344072345e+00, 1.8329326280378133e+00, 2.0081945013310838e+00, 2.1836487806954117e+00, 2.3593132387707172e+00, 2.5352059197296661e+00, 2.7113451656344782e+00, 2.8877496439694483e+00, 3.0644383764788512e+00, 3.2414307694507918e+00, 3.4187466455998319e+00, 3.5964062777151744e+00, 3.7744304242568925e+00, 3.9528403671005403e+00, 4.1316579516506131e+00, 4.3109056295662098e+00, 4.4906065043681664e+00, 4.6707843802263858e+00, 4.8514638142596134e+00, 5.0326701727181335e+00, 5.2144296914634500e+00, 5.3967695412089913e+00, 5.5797178980431177e+00, 5.7633040198215193e+00, 5.9475583290919758e+00, 6.1325125033020340e+00, 6.3181995731417278e+00, 6.5046540299913760e+00, 6.6919119435819789e+00, 6.8800110911364190e+00, 7.0689910994481711e+00, 7.2588936015760819e+00, 7.4497624100959028e+00, 7.6416437091600784e+00, 7.8345862679873983e+00, 8.0286416788466841e+00, 8.2238646231301775e+00, 8.4203131697537437e+00, 8.6180491108986974e+00, 8.8171383410582820e+00, 9.0176512865137788e+00, 9.2196633937979797e+00, 9.4232556874808058e+00, 9.6285154098307064e+00, 9.8355367576951434e+00, 1.0044421735476904e+01, 1.0255281147593752e+01, 1.0468235759616565e+01, 1.0683417664825948e+01, 1.0900971902825360e+01, 1.1121058389970566e+01, 1.1343854238969067e+01, 1.1569556568885659e+01, 1.1798385939645330e+01, 1.2030590590990464e+01, 1.2266451730913499e+01, 1.2506290212533999e+01, 1.2750475076730016e+01, 1.2999434646049597e+01, 1.3253671176599115e+01, 1.3513780583944808e+01, 1.3780479592778537e+01, 1.4054644075392147e+01, 1.4337364850710150e+01, 1.4630031883570041e+01, 1.4934467055837311e+01, 1.5253145338412237e+01, 1.5589590133082927e+01, 1.5949149813561979e+01, 1.6340741083473855e+01, 1.6781666167619914e+01, 1.7317208433270949e+01}, + {-1.7373331107186754e+01, -1.6838360725829279e+01, -1.6397917121470808e+01, -1.6006761787526802e+01, -1.5647609491464893e+01, -1.5311552225513632e+01, -1.4993246802416799e+01, -1.4689173221989655e+01, -1.4396858895000966e+01, -1.4114483689200235e+01, -1.3840658973492838e+01, -1.3574294976135944e+01, -1.3314516667182943e+01, -1.3060608013433232e+01, -1.2811973676932878e+01, -1.2568111891415747e+01, -1.2328594755317379e+01, -1.2093053593899976e+01, -1.1861167875925814e+01, -1.1632656679158853e+01, -1.1407272019832192e+01, -1.1184793569232429e+01, -1.0965024418758787e+01, -1.0747787648671435e+01, -1.0532923520744902e+01, -1.0320287160863774e+01, -1.0109746630421466e+01, -9.9011813092421690e+00, -9.6944805303231938e+00, -9.4895424198040619e+00, -9.2862729054572402e+00, -9.0845848645331397e+00, -8.8843973875939959e+00, -8.6856351394777924e+00, -8.4882278020634931e+00, -8.2921095862957674e+00, -8.0972188031442496e+00, -7.9034974849477484e+00, -7.7108910500251024e+00, -7.5193480045954200e+00, -7.3288196769976075e+00, -7.1392599799761927e+00, -6.9506251974411839e+00, -6.7628737926408222e+00, -6.5759662351281483e+00, -6.3898648442721324e+00, -6.2045336473746264e+00, -6.0199382507162937e+00, -5.8360457220763333e+00, -5.6528244834591428e+00, -5.4702442129216378e+00, -5.2882757545322603e+00, -5.1068910356106052e+00, -4.9260629904980000e+00, -4.7457654901969706e+00, -4.5659732772932937e+00, -4.3866619056401390e+00, -4.2078076843410157e+00, -4.0293876256181580e+00, -3.8513793961965916e+00, -3.6737612718723360e+00, -3.4965120949666812e+00, -3.3196112343979944e+00, -3.1430385481283984e+00, -2.9667743477655746e+00, -2.7907993651201082e+00, -2.6150947205366424e+00, -2.4396418928328796e+00, -2.2644226906944231e+00, -2.0894192253858019e+00, -1.9146138846489213e+00, -1.7399893076698572e+00, -1.5655283610033905e+00, -1.3912141153521926e+00, -1.2170298231041317e+00, -1.0429588965369114e+00, -8.6898488660423323e-01, -6.9509146222196017e-01, -5.2126238997639096e-01, -3.4748151417981160e-01, -1.7373273720096005e-01, -0.0000000000000000e+00, 1.7373273720096005e-01, 3.4748151417981160e-01, 5.2126238997639096e-01, 6.9509146222196017e-01, 8.6898488660423323e-01, 1.0429588965369114e+00, 1.2170298231041317e+00, 1.3912141153521926e+00, 1.5655283610033905e+00, 1.7399893076698572e+00, 1.9146138846489213e+00, 2.0894192253858019e+00, 2.2644226906944231e+00, 2.4396418928328796e+00, 2.6150947205366424e+00, 2.7907993651201082e+00, 2.9667743477655746e+00, 3.1430385481283984e+00, 3.3196112343979944e+00, 3.4965120949666812e+00, 3.6737612718723360e+00, 3.8513793961965916e+00, 4.0293876256181580e+00, 4.2078076843410157e+00, 4.3866619056401390e+00, 4.5659732772932937e+00, 4.7457654901969706e+00, 4.9260629904980000e+00, 5.1068910356106052e+00, 5.2882757545322603e+00, 5.4702442129216378e+00, 5.6528244834591428e+00, 5.8360457220763333e+00, 6.0199382507162937e+00, 6.2045336473746264e+00, 6.3898648442721324e+00, 6.5759662351281483e+00, 6.7628737926408222e+00, 6.9506251974411839e+00, 7.1392599799761927e+00, 7.3288196769976075e+00, 7.5193480045954200e+00, 7.7108910500251024e+00, 7.9034974849477484e+00, 8.0972188031442496e+00, 8.2921095862957674e+00, 8.4882278020634931e+00, 8.6856351394777924e+00, 8.8843973875939959e+00, 9.0845848645331397e+00, 9.2862729054572402e+00, 9.4895424198040619e+00, 9.6944805303231938e+00, 9.9011813092421690e+00, 1.0109746630421466e+01, 1.0320287160863774e+01, 1.0532923520744902e+01, 1.0747787648671435e+01, 1.0965024418758787e+01, 1.1184793569232429e+01, 1.1407272019832192e+01, 1.1632656679158853e+01, 1.1861167875925814e+01, 1.2093053593899976e+01, 1.2328594755317379e+01, 1.2568111891415747e+01, 1.2811973676932878e+01, 1.3060608013433232e+01, 1.3314516667182943e+01, 1.3574294976135944e+01, 1.3840658973492838e+01, 1.4114483689200235e+01, 1.4396858895000966e+01, 1.4689173221989655e+01, 1.4993246802416799e+01, 1.5311552225513632e+01, 1.5647609491464893e+01, 1.6006761787526802e+01, 1.6397917121470808e+01, 1.6838360725829279e+01, 1.7373331107186754e+01}, + {-1.7429279360697116e+01, -1.6894876667871738e+01, -1.6454910953817389e+01, -1.6064188260485547e+01, -1.5705440229953897e+01, -1.5369767490259161e+01, -1.5051832004816319e+01, -1.4748117151542182e+01, -1.4456152706360307e+01, -1.4174120274430678e+01, -1.3900632549018214e+01, -1.3634600798111430e+01, -1.3375150828031479e+01, -1.3121567291995134e+01, -1.2873255425338009e+01, -1.2629713947884454e+01, -1.2390515375747968e+01, -1.2155291397374004e+01, -1.1923721800726158e+01, -1.1695525946865237e+01, -1.1470456105717808e+01, -1.1248292177643179e+01, -1.1028837462481100e+01, -1.0811915231528733e+01, -1.0597365922834403e+01, -1.0385044825972880e+01, -1.0174820155259216e+01, -9.9665714341945719e+00, -9.7601881314978272e+00, -9.5555685021734451e+00, -9.3526185969451436e+00, -9.1512514109155187e+00, -8.9513861481082948e+00, -8.7529475830520980e+00, -8.5558655040914431e+00, -8.3600742258949641e+00, -8.1655121608455978e+00, -7.9721214407712111e+00, -7.7798475819041020e+00, -7.5886391871177059e+00, -7.3984476804351802e+00, -7.2092270695808969e+00, -7.0209337329860251e+00, -6.8335262281900011e+00, -6.6469651190213561e+00, -6.4612128193108518e+00, -6.2762334512001194e+00, -6.0919927163706555e+00, -5.9084577787394661e+00, -5.7255971573558835e+00, -5.5433806283944191e+00, -5.3617791352758264e+00, -5.1807647060662454e+00, -5.0003103774057065e+00, -4.8203901243047591e+00, -4.6409787952237167e+00, -4.4620520519147870e+00, -4.2835863135645020e+00, -4.1055587048237943e+00, -3.9279470073566172e+00, -3.7507296145762479e+00, -3.5738854892718694e+00, -3.3973941238575383e+00, -3.2212355030015671e+00, -3.0453900684172543e+00, -2.8698386856160685e+00, -2.6945626124422972e+00, -2.5195434692239367e+00, -2.3447632103886216e+00, -2.1702040974057377e+00, -1.9958486729268685e+00, -1.8216797360063830e+00, -1.6476803182925728e+00, -1.4738336610872771e+00, -1.3001231931786021e+00, -1.1265325093571319e+00, -9.5304534953113307e-01, -7.7964557836059989e-01, -6.0631716533377134e-01, -4.3304416521289407e-01, -2.5981069877863450e-01, -8.6600933804640304e-02, 8.6600933804640304e-02, 2.5981069877863450e-01, 4.3304416521289407e-01, 6.0631716533377134e-01, 7.7964557836059989e-01, 9.5304534953113307e-01, 1.1265325093571319e+00, 1.3001231931786021e+00, 1.4738336610872771e+00, 1.6476803182925728e+00, 1.8216797360063830e+00, 1.9958486729268685e+00, 2.1702040974057377e+00, 2.3447632103886216e+00, 2.5195434692239367e+00, 2.6945626124422972e+00, 2.8698386856160685e+00, 3.0453900684172543e+00, 3.2212355030015671e+00, 3.3973941238575383e+00, 3.5738854892718694e+00, 3.7507296145762479e+00, 3.9279470073566172e+00, 4.1055587048237943e+00, 4.2835863135645020e+00, 4.4620520519147870e+00, 4.6409787952237167e+00, 4.8203901243047591e+00, 5.0003103774057065e+00, 5.1807647060662454e+00, 5.3617791352758264e+00, 5.5433806283944191e+00, 5.7255971573558835e+00, 5.9084577787394661e+00, 6.0919927163706555e+00, 6.2762334512001194e+00, 6.4612128193108518e+00, 6.6469651190213561e+00, 6.8335262281900011e+00, 7.0209337329860251e+00, 7.2092270695808969e+00, 7.3984476804351802e+00, 7.5886391871177059e+00, 7.7798475819041020e+00, 7.9721214407712111e+00, 8.1655121608455978e+00, 8.3600742258949641e+00, 8.5558655040914431e+00, 8.7529475830520980e+00, 8.9513861481082948e+00, 9.1512514109155187e+00, 9.3526185969451436e+00, 9.5555685021734451e+00, 9.7601881314978272e+00, 9.9665714341945719e+00, 1.0174820155259216e+01, 1.0385044825972880e+01, 1.0597365922834403e+01, 1.0811915231528733e+01, 1.1028837462481100e+01, 1.1248292177643179e+01, 1.1470456105717808e+01, 1.1695525946865237e+01, 1.1923721800726158e+01, 1.2155291397374004e+01, 1.2390515375747968e+01, 1.2629713947884454e+01, 1.2873255425338009e+01, 1.3121567291995134e+01, 1.3375150828031479e+01, 1.3634600798111430e+01, 1.3900632549018214e+01, 1.4174120274430678e+01, 1.4456152706360307e+01, 1.4748117151542182e+01, 1.5051832004816319e+01, 1.5369767490259161e+01, 1.5705440229953897e+01, 1.6064188260485547e+01, 1.6454910953817389e+01, 1.6894876667871738e+01, 1.7429279360697116e+01}, + {-1.7485054803857970e+01, -1.6951215660353082e+01, -1.6511724296161706e+01, -1.6121430993647511e+01, -1.5763084153353320e+01, -1.5427792979852288e+01, -1.5110224551907818e+01, -1.4806865602342286e+01, -1.4515248255936989e+01, -1.4233555843220696e+01, -1.3960402372562056e+01, -1.3694700144334103e+01, -1.3435575795134914e+01, -1.3182314659599264e+01, -1.2934322542095073e+01, -1.2691098644846306e+01, -1.2452215898392630e+01, -1.2217306351455802e+01, -1.1986050108567399e+01, -1.1758166811670554e+01, -1.1533408982146776e+01, -1.1311556747331544e+01, -1.1092413613523263e+01, -1.0875803041166620e+01, -1.0661565642768281e+01, -1.0449556869836458e+01, -1.0239645087896863e+01, -1.0031709962451384e+01, -9.8256410962894503e+00, -9.6213368716463528e+00, -9.4187034605725390e+00, -9.2176539744013386e+00, -9.0181077289936873e+00, -8.8199896069364012e+00, -8.6232295013939773e+00, -8.4277618290957275e+00, -8.2335251021525675e+00, -8.0404615501699581e+00, -7.8485167855520830e+00, -7.6576395060512166e+00, -7.4677812295616199e+00, -7.2788960569330223e+00, -7.0909404592182668e+00, -6.9038730862998348e+00, -6.7176545942812487e+00, -6.5322474893984905e+00, -6.3476159865165220e+00, -6.1637258805374548e+00, -5.9805444292681393e+00, -5.7980402464829925e+00, -5.6161832040781530e+00, -5.4349443423501533e+00, -5.2542957875499878e+00, -5.0742106759647410e+00, -4.8946630838663463e+00, -4.7156279627427748e+00, -4.5370810792926148e+00, -4.3589989597211884e+00, -4.1813588379261919e+00, -4.0041386072044292e+00, -3.8273167751493955e+00, -3.6508724214429682e+00, -3.4747851582739164e+00, -3.2990350931419172e+00, -3.1236027938286419e+00, -2.9484692553376957e+00, -2.7736158686231027e+00, -2.5990243909418167e+00, -2.4246769176797933e+00, -2.2505558555135576e+00, -2.0766438967802112e+00, -1.9029239949385686e+00, -1.7293793410127316e+00, -1.5559933409170248e+00, -1.3827495935679281e+00, -1.2096318696945301e+00, -1.0366240912641824e+00, -8.6371031144450516e-01, -6.9087469502674481e-01, -5.1810149923876170e-01, -3.4537505487866837e-01, -1.7267974770236652e-01, -0.0000000000000000e+00, 1.7267974770236652e-01, 3.4537505487866837e-01, 5.1810149923876170e-01, 6.9087469502674481e-01, 8.6371031144450516e-01, 1.0366240912641824e+00, 1.2096318696945301e+00, 1.3827495935679281e+00, 1.5559933409170248e+00, 1.7293793410127316e+00, 1.9029239949385686e+00, 2.0766438967802112e+00, 2.2505558555135576e+00, 2.4246769176797933e+00, 2.5990243909418167e+00, 2.7736158686231027e+00, 2.9484692553376957e+00, 3.1236027938286419e+00, 3.2990350931419172e+00, 3.4747851582739164e+00, 3.6508724214429682e+00, 3.8273167751493955e+00, 4.0041386072044292e+00, 4.1813588379261919e+00, 4.3589989597211884e+00, 4.5370810792926148e+00, 4.7156279627427748e+00, 4.8946630838663463e+00, 5.0742106759647410e+00, 5.2542957875499878e+00, 5.4349443423501533e+00, 5.6161832040781530e+00, 5.7980402464829925e+00, 5.9805444292681393e+00, 6.1637258805374548e+00, 6.3476159865165220e+00, 6.5322474893984905e+00, 6.7176545942812487e+00, 6.9038730862998348e+00, 7.0909404592182668e+00, 7.2788960569330223e+00, 7.4677812295616199e+00, 7.6576395060512166e+00, 7.8485167855520830e+00, 8.0404615501699581e+00, 8.2335251021525675e+00, 8.4277618290957275e+00, 8.6232295013939773e+00, 8.8199896069364012e+00, 9.0181077289936873e+00, 9.2176539744013386e+00, 9.4187034605725390e+00, 9.6213368716463528e+00, 9.8256410962894503e+00, 1.0031709962451384e+01, 1.0239645087896863e+01, 1.0449556869836458e+01, 1.0661565642768281e+01, 1.0875803041166620e+01, 1.1092413613523263e+01, 1.1311556747331544e+01, 1.1533408982146776e+01, 1.1758166811670554e+01, 1.1986050108567399e+01, 1.2217306351455802e+01, 1.2452215898392630e+01, 1.2691098644846306e+01, 1.2934322542095073e+01, 1.3182314659599264e+01, 1.3435575795134914e+01, 1.3694700144334103e+01, 1.3960402372562056e+01, 1.4233555843220696e+01, 1.4515248255936989e+01, 1.4806865602342286e+01, 1.5110224551907818e+01, 1.5427792979852288e+01, 1.5763084153353320e+01, 1.6121430993647511e+01, 1.6511724296161706e+01, 1.6951215660353082e+01, 1.7485054803857970e+01}, + {-1.7540659022148994e+01, -1.7007379344198675e+01, -1.6568358837502078e+01, -1.6178491720661924e+01, -1.5820543038026845e+01, -1.5485630512193200e+01, -1.5168426302403656e+01, -1.4865420473490115e+01, -1.4574147483007465e+01, -1.4292792374971157e+01, -1.4019970463722631e+01, -1.3754595074773901e+01, -1.3495793669093878e+01, -1.3242852257808744e+01, -1.2995177210125478e+01, -1.2752268207034200e+01, -1.2513698590301434e+01, -1.2279100766067012e+01, -1.2048155152843032e+01, -1.1820581671084192e+01, -1.1596133091430600e+01, -1.1374589766138451e+01, -1.1155755406023257e+01, -1.0939453658827642e+01, -1.0725525309740862e+01, -1.0513825970487574e+01, -1.0304224156133158e+01, -1.0096599672544393e+01, -9.8908422549712061e+00, -9.6868504112873062e+00, -9.4845304332878886e+00, -9.2837955469590963e+00, -9.0845651784194441e+00, -8.8867643167273460e+00, -8.6903229582689274e+00, -8.4951756202195732e+00, -8.3012609127831123e+00, -8.1085211616830808e+00, -7.9169020738077709e+00, -7.7263524400685295e+00, -7.5368238704753843e+00, -7.3482705572089442e+00, -7.1606490621065957e+00, -6.9739181255105622e+00, -6.7880384938663552e+00, -6.6029727638288866e+00, -6.4186852409432520e+00, -6.2351418112283552e+00, -6.0523098242126343e+00, -5.8701579861589899e+00, -5.6886562623761669e+00, -5.5077757876508500e+00, -5.3274887839522851e+00, -5.1477684846624596e+00, -4.9685890646722539e+00, -4.7899255757595700e+00, -4.6117538867311527e+00, -4.4340506278668892e+00, -4.2567931392552376e+00, -4.0799594226519913e+00, -3.9035280965327215e+00, -3.7274783540427840e+00, -3.5517899235782049e+00, -3.3764430317567267e+00, -3.2014183685612045e+00, -3.0266970544577654e+00, -2.8522606093090528e+00, -2.6780909229187282e+00, -2.5041702270574335e+00, -2.3304810688329010e+00, -2.1570062852778933e+00, -1.9837289790394881e+00, -1.8106324950618549e+00, -1.6377003981623639e+00, -1.4649164514076216e+00, -1.2922645952019900e+00, -1.1197289270063837e+00, -9.4729368160967065e-01, -7.7494321187896786e-01, -6.0266196991848653e-01, -4.3043448856942984e-01, -2.5824536318581770e-01, -8.6079233622990678e-02, 8.6079233622990678e-02, 2.5824536318581770e-01, 4.3043448856942984e-01, 6.0266196991848653e-01, 7.7494321187896786e-01, 9.4729368160967065e-01, 1.1197289270063837e+00, 1.2922645952019900e+00, 1.4649164514076216e+00, 1.6377003981623639e+00, 1.8106324950618549e+00, 1.9837289790394881e+00, 2.1570062852778933e+00, 2.3304810688329010e+00, 2.5041702270574335e+00, 2.6780909229187282e+00, 2.8522606093090528e+00, 3.0266970544577654e+00, 3.2014183685612045e+00, 3.3764430317567267e+00, 3.5517899235782049e+00, 3.7274783540427840e+00, 3.9035280965327215e+00, 4.0799594226519913e+00, 4.2567931392552376e+00, 4.4340506278668892e+00, 4.6117538867311527e+00, 4.7899255757595700e+00, 4.9685890646722539e+00, 5.1477684846624596e+00, 5.3274887839522851e+00, 5.5077757876508500e+00, 5.6886562623761669e+00, 5.8701579861589899e+00, 6.0523098242126343e+00, 6.2351418112283552e+00, 6.4186852409432520e+00, 6.6029727638288866e+00, 6.7880384938663552e+00, 6.9739181255105622e+00, 7.1606490621065957e+00, 7.3482705572089442e+00, 7.5368238704753843e+00, 7.7263524400685295e+00, 7.9169020738077709e+00, 8.1085211616830808e+00, 8.3012609127831123e+00, 8.4951756202195732e+00, 8.6903229582689274e+00, 8.8867643167273460e+00, 9.0845651784194441e+00, 9.2837955469590963e+00, 9.4845304332878886e+00, 9.6868504112873062e+00, 9.8908422549712061e+00, 1.0096599672544393e+01, 1.0304224156133158e+01, 1.0513825970487574e+01, 1.0725525309740862e+01, 1.0939453658827642e+01, 1.1155755406023257e+01, 1.1374589766138451e+01, 1.1596133091430600e+01, 1.1820581671084192e+01, 1.2048155152843032e+01, 1.2279100766067012e+01, 1.2513698590301434e+01, 1.2752268207034200e+01, 1.2995177210125478e+01, 1.3242852257808744e+01, 1.3495793669093878e+01, 1.3754595074773901e+01, 1.4019970463722631e+01, 1.4292792374971157e+01, 1.4574147483007465e+01, 1.4865420473490115e+01, 1.5168426302403656e+01, 1.5485630512193200e+01, 1.5820543038026845e+01, 1.6178491720661924e+01, 1.6568358837502078e+01, 1.7007379344198675e+01, 1.7540659022148994e+01}, + {-1.7596093576995067e+01, -1.7063369335203092e+01, -1.6624816240761632e+01, -1.6235372148216229e+01, -1.5877818632519856e+01, -1.5543281876523103e+01, -1.5226439085523806e+01, -1.4923783633761518e+01, -1.4632852295689380e+01, -1.4351831817083784e+01, -1.4079338809250348e+01, -1.3814287615693701e+01, -1.3555806515930390e+01, -1.3303182192722531e+01, -1.3055821575985398e+01, -1.2813224821897842e+01, -1.2574965680305057e+01, -1.2340676911954159e+01, -1.2110039246794949e+01, -1.1882772881465453e+01, -1.1658630833708994e+01, -1.1437393678686862e+01, -1.1218865329829219e+01, -1.1002869620365653e+01, -1.0789247506430682e+01, -1.0577854758286833e+01, -1.0368560038906804e+01, -1.0161243292925425e+01, -9.9557943864826086e+00, -9.7521119515382608e+00, -9.5501023980913669e+00, -9.3496790652448780e+00, -9.1507614878382224e+00, -8.9532747598589513e+00, -8.7571489793618653e+00, -8.5623187624005972e+00, -8.3687228156851035e+00, -8.1763035594473781e+00, -7.9850067934236462e+00, -7.7947814000180404e+00, -7.6055790796564340e+00, -7.4173541141133637e+00, -7.2300631542333695e+00, -7.0436650289972214e+00, -6.8581205733240118e+00, -6.6733924723685520e+00, -6.4894451203829293e+00, -6.3062444924720467e+00, -6.1237580277938539e+00, -5.9419545229426243e+00, -5.7608040344137255e+00, -5.5802777891851223e+00, -5.4003481025684028e+00, -5.2209883025831942e+00, -5.0421726601961838e+00, -4.8638763248414749e+00, -4.6860752647046970e+00, -4.5087462113102941e+00, -4.3318666080012935e+00, -4.1554145619443341e+00, -3.9793687993309228e+00, -3.8037086234793556e+00, -3.6284138755712148e+00, -3.4534648977822986e+00, -3.2788424985907505e+00, -3.1045279200654083e+00, -2.9305028069552952e+00, -2.7567491774170563e+00, -2.5832493952312059e+00, -2.4099861433705372e+00, -2.2369423987951111e+00, -2.0641014083580762e+00, -1.8914466657152740e+00, -1.7189618891393004e+00, -1.5466310001455315e+00, -1.3744381028436181e+00, -1.2023674639332740e+00, -1.0304034932677821e+00, -8.5853072491269411e-01, -6.8673379863064854e-01, -5.1499744172618944e-01, -3.4330645118694048e-01, -1.7164567605949776e-01, -0.0000000000000000e+00, 1.7164567605949776e-01, 3.4330645118694048e-01, 5.1499744172618944e-01, 6.8673379863064854e-01, 8.5853072491269411e-01, 1.0304034932677821e+00, 1.2023674639332740e+00, 1.3744381028436181e+00, 1.5466310001455315e+00, 1.7189618891393004e+00, 1.8914466657152740e+00, 2.0641014083580762e+00, 2.2369423987951111e+00, 2.4099861433705372e+00, 2.5832493952312059e+00, 2.7567491774170563e+00, 2.9305028069552952e+00, 3.1045279200654083e+00, 3.2788424985907505e+00, 3.4534648977822986e+00, 3.6284138755712148e+00, 3.8037086234793556e+00, 3.9793687993309228e+00, 4.1554145619443341e+00, 4.3318666080012935e+00, 4.5087462113102941e+00, 4.6860752647046970e+00, 4.8638763248414749e+00, 5.0421726601961838e+00, 5.2209883025831942e+00, 5.4003481025684028e+00, 5.5802777891851223e+00, 5.7608040344137255e+00, 5.9419545229426243e+00, 6.1237580277938539e+00, 6.3062444924720467e+00, 6.4894451203829293e+00, 6.6733924723685520e+00, 6.8581205733240118e+00, 7.0436650289972214e+00, 7.2300631542333695e+00, 7.4173541141133637e+00, 7.6055790796564340e+00, 7.7947814000180404e+00, 7.9850067934236462e+00, 8.1763035594473781e+00, 8.3687228156851035e+00, 8.5623187624005972e+00, 8.7571489793618653e+00, 8.9532747598589513e+00, 9.1507614878382224e+00, 9.3496790652448780e+00, 9.5501023980913669e+00, 9.7521119515382608e+00, 9.9557943864826086e+00, 1.0161243292925425e+01, 1.0368560038906804e+01, 1.0577854758286833e+01, 1.0789247506430682e+01, 1.1002869620365653e+01, 1.1218865329829219e+01, 1.1437393678686862e+01, 1.1658630833708994e+01, 1.1882772881465453e+01, 1.2110039246794949e+01, 1.2340676911954159e+01, 1.2574965680305057e+01, 1.2813224821897842e+01, 1.3055821575985398e+01, 1.3303182192722531e+01, 1.3555806515930390e+01, 1.3814287615693701e+01, 1.4079338809250348e+01, 1.4351831817083784e+01, 1.4632852295689380e+01, 1.4923783633761518e+01, 1.5226439085523806e+01, 1.5543281876523103e+01, 1.5877818632519856e+01, 1.6235372148216229e+01, 1.6624816240761632e+01, 1.7063369335203092e+01, 1.7596093576995067e+01}, + {-1.7651360006273592e+01, -1.7119187224564673e+01, -1.6681098143347068e+01, -1.6292073956617820e+01, -1.5934912658163535e+01, -1.5600748834050473e+01, -1.5284264701644213e+01, -1.4981956922278711e+01, -1.4691364571634727e+01, -1.4410676085677181e+01, -1.4138509363787067e+01, -1.3873779760412367e+01, -1.3615616367875141e+01, -1.3363306535787522e+01, -1.3116257750703349e+01, -1.2873970640467419e+01, -1.2636019359905287e+01, -1.2402037021606754e+01, -1.2171704664459938e+01, -1.1944742758999288e+01, -1.1720904567955738e+01, -1.1499970887418726e+01, -1.1281745831568482e+01, -1.1066053417347991e+01, -1.0852734770136896e+01, -1.0641645817094179e+01, -1.0432655367504838e+01, -1.0225643503212597e+01, -1.0020500219714993e+01, -9.8171242715446922e+00, -9.6154221854030180e+00, -9.4153074120141333e+00, -9.2166995934430851e+00, -9.0195239271065724e+00, -8.8237106112187824e+00, -8.6291943591887517e+00, -8.4359139726921839e+00, -8.2438119649078239e+00, -8.0528342268332285e+00, -7.8629297307503139e+00, -7.6740502658540599e+00, -7.4861502018311290e+00, -7.2991862768130868e+00, -7.1131174066575413e+00, -6.9279045129506267e+00, -6.7435103674923891e+00, -6.5598994513357987e+00, -6.3770378267108159e+00, -6.1948930203856341e+00, -6.0134339172047717e+00, -5.8326306627035311e+00, -5.6524545738351115e+00, -5.4728780569640936e+00, -5.2938745323809906e+00, -5.1154183646798366e+00, -4.9374847984163193e+00, -4.7600498985294708e+00, -4.5830904950670517e+00, -4.4065841318044781e+00, -4.2305090183906966e+00, -4.0548439856925214e+00, -3.8795684440424409e+00, -3.7046623441243547e+00, -3.5301061402576419e+00, -3.3558807558629016e+00, -3.1819675509129310e+00, -3.0083482911904582e+00, -2.8350051191900167e+00, -2.6619205265154431e+00, -2.4890773276369966e+00, -2.3164586348831824e+00, -2.1440478345522327e+00, -1.9718285640369488e+00, -1.7997846898643619e+00, -1.6279002865585475e+00, -1.4561596162410049e+00, -1.2845471088883613e+00, -1.1130473431718593e+00, -9.4164502780718806e-01, -7.7032498334676469e-01, -5.9907212434961710e-01, -4.2787144186658727e-01, -2.5670798618070745e-01, -8.5566849744303619e-02, 8.5566849744303619e-02, 2.5670798618070745e-01, 4.2787144186658727e-01, 5.9907212434961710e-01, 7.7032498334676469e-01, 9.4164502780718806e-01, 1.1130473431718593e+00, 1.2845471088883613e+00, 1.4561596162410049e+00, 1.6279002865585475e+00, 1.7997846898643619e+00, 1.9718285640369488e+00, 2.1440478345522327e+00, 2.3164586348831824e+00, 2.4890773276369966e+00, 2.6619205265154431e+00, 2.8350051191900167e+00, 3.0083482911904582e+00, 3.1819675509129310e+00, 3.3558807558629016e+00, 3.5301061402576419e+00, 3.7046623441243547e+00, 3.8795684440424409e+00, 4.0548439856925214e+00, 4.2305090183906966e+00, 4.4065841318044781e+00, 4.5830904950670517e+00, 4.7600498985294708e+00, 4.9374847984163193e+00, 5.1154183646798366e+00, 5.2938745323809906e+00, 5.4728780569640936e+00, 5.6524545738351115e+00, 5.8326306627035311e+00, 6.0134339172047717e+00, 6.1948930203856341e+00, 6.3770378267108159e+00, 6.5598994513357987e+00, 6.7435103674923891e+00, 6.9279045129506267e+00, 7.1131174066575413e+00, 7.2991862768130868e+00, 7.4861502018311290e+00, 7.6740502658540599e+00, 7.8629297307503139e+00, 8.0528342268332285e+00, 8.2438119649078239e+00, 8.4359139726921839e+00, 8.6291943591887517e+00, 8.8237106112187824e+00, 9.0195239271065724e+00, 9.2166995934430851e+00, 9.4153074120141333e+00, 9.6154221854030180e+00, 9.8171242715446922e+00, 1.0020500219714993e+01, 1.0225643503212597e+01, 1.0432655367504838e+01, 1.0641645817094179e+01, 1.0852734770136896e+01, 1.1066053417347991e+01, 1.1281745831568482e+01, 1.1499970887418726e+01, 1.1720904567955738e+01, 1.1944742758999288e+01, 1.2171704664459938e+01, 1.2402037021606754e+01, 1.2636019359905287e+01, 1.2873970640467419e+01, 1.3116257750703349e+01, 1.3363306535787522e+01, 1.3615616367875141e+01, 1.3873779760412367e+01, 1.4138509363787067e+01, 1.4410676085677181e+01, 1.4691364571634727e+01, 1.4981956922278711e+01, 1.5284264701644213e+01, 1.5600748834050473e+01, 1.5934912658163535e+01, 1.6292073956617820e+01, 1.6681098143347068e+01, 1.7119187224564673e+01, 1.7651360006273592e+01}, + {-1.7706459824808160e+01, -1.7174834579405605e+01, -1.6737206157692185e+01, -1.6348598800359760e+01, -1.5991826809662236e+01, -1.5658033118559841e+01, -1.5341904922927041e+01, -1.5039942149162044e+01, -1.4749686158703426e+01, -1.4469327066282485e+01, -1.4197484050584235e+01, -1.3933073470045795e+01, -1.3675225224131985e+01, -1.3423227324587016e+01, -1.3176487810593191e+01, -1.2934507778191829e+01, -1.2696861784139129e+01, -1.2463183290148008e+01, -1.2233153641587771e+01, -1.2006493580642585e+01, -1.1782956612954054e+01, -1.1562323753600294e+01, -1.1344399315683788e+01, -1.1129007498123606e+01, -1.0915989593880358e+01, -1.0705201685404040e+01, -1.0496512726733012e+01, -1.0289802935397134e+01, -1.0084962434756044e+01, -9.8818901004391417e+00, -9.6804925743867667e+00, -9.4806834174876062e+00, -9.2823823775768961e+00, -9.0855147540413785e+00, -8.8900108437873531e+00, -8.6958054560996842e+00, -8.5028374861240277e+00, -8.3110495384702450e+00, -8.1203875938581902e+00, -7.9308007128818385e+00, -7.7422407719095485e+00, -7.5546622269112049e+00, -7.3680219016401658e+00, -7.1822787971261874e+00, -6.9973939198751642e+00, -6.8133301265393582e+00, -6.6300519831306728e+00, -6.4475256371099929e+00, -6.2657187009061461e+00, -6.0846001456053820e+00, -5.9041402037120223e+00, -5.7243102800175727e+00, -5.5450828697328980e+00, -5.3664314831390003e+00, -5.1883305760991361e+00, -5.0107554858504457e+00, -4.8336823715588126e+00, -4.6570881591776718e+00, -4.4809504902012192e+00, -4.3052476739459955e+00, -4.1299586430328983e+00, -3.9550629117751774e+00, -3.7805405372073957e+00, -3.6063720825162893e+00, -3.4325385826573980e+00, -3.2590215119615689e+00, -3.0858027535533901e+00, -2.9128645704195151e+00, -2.7401895779789363e+00, -2.5677607180197990e+00, -2.3955612338784800e+00, -2.2235746467465418e+00, -2.0517847329999523e+00, -1.8801755024527722e+00, -1.7087311774444227e+00, -1.5374361726757684e+00, -1.3662750757146653e+00, -1.1952326280963763e+00, -1.0242937069484293e+00, -8.5344330707311145e-01, -6.8266652342392853e-01, -5.1194853391500583e-01, -3.4127458250465148e-01, -1.7062996249611376e-01, -0.0000000000000000e+00, 1.7062996249611376e-01, 3.4127458250465148e-01, 5.1194853391500583e-01, 6.8266652342392853e-01, 8.5344330707311145e-01, 1.0242937069484293e+00, 1.1952326280963763e+00, 1.3662750757146653e+00, 1.5374361726757684e+00, 1.7087311774444227e+00, 1.8801755024527722e+00, 2.0517847329999523e+00, 2.2235746467465418e+00, 2.3955612338784800e+00, 2.5677607180197990e+00, 2.7401895779789363e+00, 2.9128645704195151e+00, 3.0858027535533901e+00, 3.2590215119615689e+00, 3.4325385826573980e+00, 3.6063720825162893e+00, 3.7805405372073957e+00, 3.9550629117751774e+00, 4.1299586430328983e+00, 4.3052476739459955e+00, 4.4809504902012192e+00, 4.6570881591776718e+00, 4.8336823715588126e+00, 5.0107554858504457e+00, 5.1883305760991361e+00, 5.3664314831390003e+00, 5.5450828697328980e+00, 5.7243102800175727e+00, 5.9041402037120223e+00, 6.0846001456053820e+00, 6.2657187009061461e+00, 6.4475256371099929e+00, 6.6300519831306728e+00, 6.8133301265393582e+00, 6.9973939198751642e+00, 7.1822787971261874e+00, 7.3680219016401658e+00, 7.5546622269112049e+00, 7.7422407719095485e+00, 7.9308007128818385e+00, 8.1203875938581902e+00, 8.3110495384702450e+00, 8.5028374861240277e+00, 8.6958054560996842e+00, 8.8900108437873531e+00, 9.0855147540413785e+00, 9.2823823775768961e+00, 9.4806834174876062e+00, 9.6804925743867667e+00, 9.8818901004391417e+00, 1.0084962434756044e+01, 1.0289802935397134e+01, 1.0496512726733012e+01, 1.0705201685404040e+01, 1.0915989593880358e+01, 1.1129007498123606e+01, 1.1344399315683788e+01, 1.1562323753600294e+01, 1.1782956612954054e+01, 1.2006493580642585e+01, 1.2233153641587771e+01, 1.2463183290148008e+01, 1.2696861784139129e+01, 1.2934507778191829e+01, 1.3176487810593191e+01, 1.3423227324587016e+01, 1.3675225224131985e+01, 1.3933073470045795e+01, 1.4197484050584235e+01, 1.4469327066282485e+01, 1.4749686158703426e+01, 1.5039942149162044e+01, 1.5341904922927041e+01, 1.5658033118559841e+01, 1.5991826809662236e+01, 1.6348598800359760e+01, 1.6737206157692185e+01, 1.7174834579405605e+01, 1.7706459824808160e+01}, + {-1.7761394524848996e+01, -1.7230312943277969e+01, -1.6793141871786613e+01, -1.6404948308671031e+01, -1.6048562755664683e+01, -1.5715136437003761e+01, -1.5399361493933315e+01, -1.5097741096163510e+01, -1.4807818875617881e+01, -1.4527786614519313e+01, -1.4256264762200541e+01, -1.3992170674226745e+01, -1.3734635051620387e+01, -1.3482946563606328e+01, -1.3236513798043475e+01, -1.2994838315752334e+01, -1.2757495072417459e+01, -1.2524117876199165e+01, -1.2294388376531968e+01, -1.2068027585042133e+01, -1.1844789248242524e+01, -1.1624454598296943e+01, -1.1406828145437910e+01, -1.1191734268858408e+01, -1.0979014427470727e+01, -1.0768524857445218e+01, -1.0560134656049588e+01, -1.0353724175012228e+01, -1.0149183664094940e+01, -9.9464121185839414e+00, -9.7453162942320208e+00, -9.5458098606732449e+00, -9.3478126700959958e+00, -9.1512501224373288e+00, -8.9560526118688415e+00, -8.7621550421132515e+00, -8.5694964003332892e+00, -8.3780193810985182e+00, -8.1876700533578219e+00, -7.9983975644987861e+00, -7.8101538765164022e+00, -7.6228935300856273e+00, -7.4365734329690358e+00, -7.2511526697184987e+00, -7.0665923300691791e+00, -6.8828553537915784e+00, -6.6999063900760145e+00, -6.5177116697841271e+00, -6.3362388891223835e+00, -6.1554571034797059e+00, -5.9753366303309603e+00, -5.7958489602446139e+00, -5.6169666751500360e+00, -5.4386633731207876e+00, -5.2609135990173996e+00, -5.0836927804084810e+00, -4.9069771682545298e+00, -4.7307437818957530e+00, -4.5549703579349439e+00, -4.3796353026499011e+00, -4.2047176476080219e+00, -4.0301970081890968e+00, -3.8560535447518163e+00, -3.6822679262054381e+00, -3.5088212957709741e+00, -3.3356952387365157e+00, -3.1628717520292895e+00, -2.9903332154429094e+00, -2.8180623643724516e+00, -2.6460422639225052e+00, -2.4742562842645146e+00, -2.3026880771296399e+00, -2.1313215533321839e+00, -1.9601408612264668e+00, -1.7891303660069930e+00, -1.6182746297679145e+00, -1.4475583922432660e+00, -1.2769665521542517e+00, -1.1064841490940971e+00, -9.3609634588467550e-01, -7.6578841134230902e-01, -5.9554570339290602e-01, -4.2535365247890039e-01, -2.5519774520239308e-01, -8.5063508150433453e-02, 8.5063508150433453e-02, 2.5519774520239308e-01, 4.2535365247890039e-01, 5.9554570339290602e-01, 7.6578841134230902e-01, 9.3609634588467550e-01, 1.1064841490940971e+00, 1.2769665521542517e+00, 1.4475583922432660e+00, 1.6182746297679145e+00, 1.7891303660069930e+00, 1.9601408612264668e+00, 2.1313215533321839e+00, 2.3026880771296399e+00, 2.4742562842645146e+00, 2.6460422639225052e+00, 2.8180623643724516e+00, 2.9903332154429094e+00, 3.1628717520292895e+00, 3.3356952387365157e+00, 3.5088212957709741e+00, 3.6822679262054381e+00, 3.8560535447518163e+00, 4.0301970081890968e+00, 4.2047176476080219e+00, 4.3796353026499011e+00, 4.5549703579349439e+00, 4.7307437818957530e+00, 4.9069771682545298e+00, 5.0836927804084810e+00, 5.2609135990173996e+00, 5.4386633731207876e+00, 5.6169666751500360e+00, 5.7958489602446139e+00, 5.9753366303309603e+00, 6.1554571034797059e+00, 6.3362388891223835e+00, 6.5177116697841271e+00, 6.6999063900760145e+00, 6.8828553537915784e+00, 7.0665923300691791e+00, 7.2511526697184987e+00, 7.4365734329690358e+00, 7.6228935300856273e+00, 7.8101538765164022e+00, 7.9983975644987861e+00, 8.1876700533578219e+00, 8.3780193810985182e+00, 8.5694964003332892e+00, 8.7621550421132515e+00, 8.9560526118688415e+00, 9.1512501224373288e+00, 9.3478126700959958e+00, 9.5458098606732449e+00, 9.7453162942320208e+00, 9.9464121185839414e+00, 1.0149183664094940e+01, 1.0353724175012228e+01, 1.0560134656049588e+01, 1.0768524857445218e+01, 1.0979014427470727e+01, 1.1191734268858408e+01, 1.1406828145437910e+01, 1.1624454598296943e+01, 1.1844789248242524e+01, 1.2068027585042133e+01, 1.2294388376531968e+01, 1.2524117876199165e+01, 1.2757495072417459e+01, 1.2994838315752334e+01, 1.3236513798043475e+01, 1.3482946563606328e+01, 1.3734635051620387e+01, 1.3992170674226745e+01, 1.4256264762200541e+01, 1.4527786614519313e+01, 1.4807818875617881e+01, 1.5097741096163510e+01, 1.5399361493933315e+01, 1.5715136437003761e+01, 1.6048562755664683e+01, 1.6404948308671031e+01, 1.6793141871786613e+01, 1.7230312943277969e+01, 1.7761394524848996e+01}, + {-1.7816165576540595e+01, -1.7285623836656182e+01, -1.6848906849690259e+01, -1.6461124086051807e+01, -1.6105122139319558e+01, -1.5772060470078458e+01, -1.5456636132218662e+01, -1.5155355517282629e+01, -1.4865764512599243e+01, -1.4586056556752672e+01, -1.4314853361179868e+01, -1.4051073271804183e+01, -1.3793847785696649e+01, -1.3542466224976369e+01, -1.3296337722283933e+01, -1.3054964299852571e+01, -1.2817921309339152e+01, -1.2584842902718409e+01, -1.2355411031114167e+01, -1.2129346973425209e+01, -1.1906404715032753e+01, -1.1686365703318645e+01, -1.1469034643887829e+01, -1.1254236094539081e+01, -1.1041811678540856e+01, -1.0831617784246889e+01, -1.0623523650663971e+01, -1.0417409762265434e+01, -1.0213166493789654e+01, -1.0010692958774635e+01, -9.8098960253945080e+00, -9.6106894706457702e+00, -9.4129932496899222e+00, -9.2167328616325275e+00, -9.0218387965226103e+00, -8.8282460511228340e+00, -8.6358937032013543e+00, -8.4447245358586134e+00, -8.2546847048231751e+00, -8.0657234428032414e+00, -7.8777927959207110e+00, -7.6908473880261887e+00, -7.5048442093293950e+00, -7.3197424263067070e+00, -7.1355032102865055e+00, -6.9520895824801361e+00, -6.7694662735346460e+00, -6.5875995959434990e+00, -6.4064573278715784e+00, -6.2260086071378540e+00, -6.0462238342585293e+00, -5.8670745835899512e+00, -5.6885335217276216e+00, -5.5105743324184848e+00, -5.3331716473306852e+00, -5.1563009821003343e+00, -4.9799386771402681e+00, -4.8040618427527138e+00, -4.6286483081374303e+00, -4.4536765739303981e+00, -4.2791257679461268e+00, -4.1049756038301792e+00, -3.9312063423578660e+00, -3.7577987551410561e+00, -3.5847340905279546e+00, -3.4119940415009409e+00, -3.2395607153955579e+00, -3.0674166052796257e+00, -2.8955445628456173e+00, -2.7239277726819950e+00, -2.5525497278003657e+00, -2.3813942063052753e+00, -2.2104452491022966e+00, -2.0396871385479329e+00, -1.8691043779518630e+00, -1.6986816718482496e+00, -1.5284039069583553e+00, -1.3582561337715613e+00, -1.1882235486761765e+00, -1.0182914765751778e+00, -8.4844535392529652e-01, -6.7867071214067709e-01, -5.0895316130475177e-01, -3.3927837413597878e-01, -1.6963207015474091e-01, -0.0000000000000000e+00, 1.6963207015474091e-01, 3.3927837413597878e-01, 5.0895316130475177e-01, 6.7867071214067709e-01, 8.4844535392529652e-01, 1.0182914765751778e+00, 1.1882235486761765e+00, 1.3582561337715613e+00, 1.5284039069583553e+00, 1.6986816718482496e+00, 1.8691043779518630e+00, 2.0396871385479329e+00, 2.2104452491022966e+00, 2.3813942063052753e+00, 2.5525497278003657e+00, 2.7239277726819950e+00, 2.8955445628456173e+00, 3.0674166052796257e+00, 3.2395607153955579e+00, 3.4119940415009409e+00, 3.5847340905279546e+00, 3.7577987551410561e+00, 3.9312063423578660e+00, 4.1049756038301792e+00, 4.2791257679461268e+00, 4.4536765739303981e+00, 4.6286483081374303e+00, 4.8040618427527138e+00, 4.9799386771402681e+00, 5.1563009821003343e+00, 5.3331716473306852e+00, 5.5105743324184848e+00, 5.6885335217276216e+00, 5.8670745835899512e+00, 6.0462238342585293e+00, 6.2260086071378540e+00, 6.4064573278715784e+00, 6.5875995959434990e+00, 6.7694662735346460e+00, 6.9520895824801361e+00, 7.1355032102865055e+00, 7.3197424263067070e+00, 7.5048442093293950e+00, 7.6908473880261887e+00, 7.8777927959207110e+00, 8.0657234428032414e+00, 8.2546847048231751e+00, 8.4447245358586134e+00, 8.6358937032013543e+00, 8.8282460511228340e+00, 9.0218387965226103e+00, 9.2167328616325275e+00, 9.4129932496899222e+00, 9.6106894706457702e+00, 9.8098960253945080e+00, 1.0010692958774635e+01, 1.0213166493789654e+01, 1.0417409762265434e+01, 1.0623523650663971e+01, 1.0831617784246889e+01, 1.1041811678540856e+01, 1.1254236094539081e+01, 1.1469034643887829e+01, 1.1686365703318645e+01, 1.1906404715032753e+01, 1.2129346973425209e+01, 1.2355411031114167e+01, 1.2584842902718409e+01, 1.2817921309339152e+01, 1.3054964299852571e+01, 1.3296337722283933e+01, 1.3542466224976369e+01, 1.3793847785696649e+01, 1.4051073271804183e+01, 1.4314853361179868e+01, 1.4586056556752672e+01, 1.4865764512599243e+01, 1.5155355517282629e+01, 1.5456636132218662e+01, 1.5772060470078458e+01, 1.6105122139319558e+01, 1.6461124086051807e+01, 1.6848906849690259e+01, 1.7285623836656182e+01, 1.7816165576540595e+01}, + {-1.7870774428377043e+01, -1.7340768757416335e+01, -1.6904502632033939e+01, -1.6517127712794291e+01, -1.6161506578815988e+01, -1.5828806872783744e+01, -1.5513730528912630e+01, -1.5212787139365300e+01, -1.4923524831985965e+01, -1.4644138690731507e+01, -1.4373251680710139e+01, -1.4109783131522859e+01, -1.3852865330854605e+01, -1.3601788249195971e+01, -1.3355961560129954e+01, -1.3114887743985697e+01, -1.2878142545481497e+01, -1.2645360457815213e+01, -1.2416223731463095e+01, -1.2190453910463864e+01, -1.1967805217099682e+01, -1.1748059312137153e+01, -1.1531021094829603e+01, -1.1316515299946504e+01, -1.1104383713549687e+01, -1.0894482874671912e+01, -1.0686682162601475e+01, -1.0480862193135913e+01, -1.0276913464597831e+01, -1.0074735207405618e+01, -9.8742344007979135e+00, -9.6753249277855886e+00, -9.4779268451589243e+00, -9.2819657498467407e+00, -9.0873722264251189e+00, -8.8940813633373228e+00, -8.7020323275849982e+00, -8.5111679894115486e+00, -8.3214345899181446e+00, -8.1327814457042589e+00, -7.9451606855641499e+00, -7.7585270150414196e+00, -7.5728375052793471e+00, -7.3880514031314979e+00, -7.2041299599356252e+00, -7.0210362767207544e+00, -6.8387351639253717e+00, -6.6571930139644744e+00, -6.4763776852031878e+00, -6.2962583960814955e+00, -6.1168056282939949e+00, -5.9379910380649177e+00, -5.7597873746756241e+00, -5.5821684055025109e+00, -5.4051088469102604e+00, -5.2285843004206338e+00, -5.0525711936423852e+00, -4.8770467255047967e+00, -4.7019888153869536e+00, -4.5273760557783174e+00, -4.3531876681442050e+00, -4.1794034617032088e+00, -4.0060037948530276e+00, -3.8329695390070744e+00, -3.6602820446272100e+00, -3.4879231092581433e+00, -3.3158749473870577e+00, -3.1441201619679244e+00, -2.9726417174641275e+00, -2.8014229142755962e+00, -2.6304473644278392e+00, -2.4596989684102435e+00, -2.2891618930598643e+00, -2.1188205503948270e+00, -1.9486595773085056e+00, -1.7786638160418760e+00, -1.6088182953569903e+00, -1.4391082123394401e+00, -1.2695189147619996e+00, -1.1000358839454638e+00, -9.3064471805601867e-01, -7.6133111578138291e-01, -5.9208086033042751e-01, -4.2287980370308703e-01, -2.5371385117911249e-01, -8.4568945975610571e-02, 8.4568945975610571e-02, 2.5371385117911249e-01, 4.2287980370308703e-01, 5.9208086033042751e-01, 7.6133111578138291e-01, 9.3064471805601867e-01, 1.1000358839454638e+00, 1.2695189147619996e+00, 1.4391082123394401e+00, 1.6088182953569903e+00, 1.7786638160418760e+00, 1.9486595773085056e+00, 2.1188205503948270e+00, 2.2891618930598643e+00, 2.4596989684102435e+00, 2.6304473644278392e+00, 2.8014229142755962e+00, 2.9726417174641275e+00, 3.1441201619679244e+00, 3.3158749473870577e+00, 3.4879231092581433e+00, 3.6602820446272100e+00, 3.8329695390070744e+00, 4.0060037948530276e+00, 4.1794034617032088e+00, 4.3531876681442050e+00, 4.5273760557783174e+00, 4.7019888153869536e+00, 4.8770467255047967e+00, 5.0525711936423852e+00, 5.2285843004206338e+00, 5.4051088469102604e+00, 5.5821684055025109e+00, 5.7597873746756241e+00, 5.9379910380649177e+00, 6.1168056282939949e+00, 6.2962583960814955e+00, 6.4763776852031878e+00, 6.6571930139644744e+00, 6.8387351639253717e+00, 7.0210362767207544e+00, 7.2041299599356252e+00, 7.3880514031314979e+00, 7.5728375052793471e+00, 7.7585270150414196e+00, 7.9451606855641499e+00, 8.1327814457042589e+00, 8.3214345899181446e+00, 8.5111679894115486e+00, 8.7020323275849982e+00, 8.8940813633373228e+00, 9.0873722264251189e+00, 9.2819657498467407e+00, 9.4779268451589243e+00, 9.6753249277855886e+00, 9.8742344007979135e+00, 1.0074735207405618e+01, 1.0276913464597831e+01, 1.0480862193135913e+01, 1.0686682162601475e+01, 1.0894482874671912e+01, 1.1104383713549687e+01, 1.1316515299946504e+01, 1.1531021094829603e+01, 1.1748059312137153e+01, 1.1967805217099682e+01, 1.2190453910463864e+01, 1.2416223731463095e+01, 1.2645360457815213e+01, 1.2878142545481497e+01, 1.3114887743985697e+01, 1.3355961560129954e+01, 1.3601788249195971e+01, 1.3852865330854605e+01, 1.4109783131522859e+01, 1.4373251680710139e+01, 1.4644138690731507e+01, 1.4923524831985965e+01, 1.5212787139365300e+01, 1.5513730528912630e+01, 1.5828806872783744e+01, 1.6161506578815988e+01, 1.6517127712794291e+01, 1.6904502632033939e+01, 1.7340768757416335e+01, 1.7870774428377043e+01}, + {-1.7925222507645273e+01, -1.7395749181302794e+01, -1.6959930736506667e+01, -1.6572960745489535e+01, -1.6217717667909398e+01, -1.5885377274967727e+01, -1.5570646349282180e+01, -1.5270037662686175e+01, -1.4981101568835271e+01, -1.4702034786209495e+01, -1.4431461525263758e+01, -1.4168302092683751e+01, -1.3911689561406538e+01, -1.3660914545833675e+01, -1.3415387256705733e+01, -1.3174610629179487e+01, -1.2938160798167818e+01, -1.2705672595541055e+01, -1.2476828568829013e+01, -1.2251350525113802e+01, -1.2028992921645616e+01, -1.1809537630775932e+01, -1.1592789743714977e+01, -1.1378574170599888e+01, -1.1166732858754798e+01, -1.0957122496418719e+01, -1.0749612601735487e+01, -1.0544083920437931e+01, -1.0340427073072608e+01, -1.0138541405599460e+01, -9.9383340069978168e+00, -9.7397188649785829e+00, -9.5426161366508815e+00, -9.3469515154567748e+00, -9.1526556791851785e+00, -8.9596638066377352e+00, -8.7679151527158581e+00, -8.5773526734573888e+00, -8.3879226939696405e+00, -8.1995746133560008e+00, -8.0122606416719417e+00, -7.8259355647163380e+00, -7.6405565330989909e+00, -7.4560828725515957e+00, -7.2724759128875176e+00, -7.0896988333823048e+00, -6.9077165226546269e+00, -6.7264954513869304e+00, -6.5460035564448802e+00, -6.3662101351413103e+00, -6.1870857485496860e+00, -6.0086021329082362e+00, -5.8307321182728238e+00, -5.6534495536772900e+00, -5.4767292381468682e+00, -5.3005468569855445e+00, -5.1248789228235241e+00, -4.9497027209678768e+00, -4.7749962586489980e+00, -4.6007382177989511e+00, -4.4269079110357756e+00, -4.2534852405612638e+00, -4.0804506597091201e+00, -3.9077851369063397e+00, -3.7354701218335649e+00, -3.5634875135904314e+00, -3.3918196306899016e+00, -3.2204491827214987e+00, -3.0493592435375283e+00, -2.8785332258289715e+00, -2.7079548569689353e+00, -2.5376081560115265e+00, -2.3674774117429300e+00, -2.1975471616893691e+00, -2.0278021719937014e+00, -1.8582274180786869e+00, -1.6888080660205369e+00, -1.5195294545613143e+00, -1.3503770776931412e+00, -1.1813365677510279e+00, -1.0123936789545276e+00, -8.4353427134138192e-01, -6.7474429503886002e-01, -5.0600977482067722e-01, -3.3731679489919819e-01, -1.6865148390411927e-01, -0.0000000000000000e+00, 1.6865148390411927e-01, 3.3731679489919819e-01, 5.0600977482067722e-01, 6.7474429503886002e-01, 8.4353427134138192e-01, 1.0123936789545276e+00, 1.1813365677510279e+00, 1.3503770776931412e+00, 1.5195294545613143e+00, 1.6888080660205369e+00, 1.8582274180786869e+00, 2.0278021719937014e+00, 2.1975471616893691e+00, 2.3674774117429300e+00, 2.5376081560115265e+00, 2.7079548569689353e+00, 2.8785332258289715e+00, 3.0493592435375283e+00, 3.2204491827214987e+00, 3.3918196306899016e+00, 3.5634875135904314e+00, 3.7354701218335649e+00, 3.9077851369063397e+00, 4.0804506597091201e+00, 4.2534852405612638e+00, 4.4269079110357756e+00, 4.6007382177989511e+00, 4.7749962586489980e+00, 4.9497027209678768e+00, 5.1248789228235241e+00, 5.3005468569855445e+00, 5.4767292381468682e+00, 5.6534495536772900e+00, 5.8307321182728238e+00, 6.0086021329082362e+00, 6.1870857485496860e+00, 6.3662101351413103e+00, 6.5460035564448802e+00, 6.7264954513869304e+00, 6.9077165226546269e+00, 7.0896988333823048e+00, 7.2724759128875176e+00, 7.4560828725515957e+00, 7.6405565330989909e+00, 7.8259355647163380e+00, 8.0122606416719417e+00, 8.1995746133560008e+00, 8.3879226939696405e+00, 8.5773526734573888e+00, 8.7679151527158581e+00, 8.9596638066377352e+00, 9.1526556791851785e+00, 9.3469515154567748e+00, 9.5426161366508815e+00, 9.7397188649785829e+00, 9.9383340069978168e+00, 1.0138541405599460e+01, 1.0340427073072608e+01, 1.0544083920437931e+01, 1.0749612601735487e+01, 1.0957122496418719e+01, 1.1166732858754798e+01, 1.1378574170599888e+01, 1.1592789743714977e+01, 1.1809537630775932e+01, 1.2028992921645616e+01, 1.2251350525113802e+01, 1.2476828568829013e+01, 1.2705672595541055e+01, 1.2938160798167818e+01, 1.3174610629179487e+01, 1.3415387256705733e+01, 1.3660914545833675e+01, 1.3911689561406538e+01, 1.4168302092683751e+01, 1.4431461525263758e+01, 1.4702034786209495e+01, 1.4981101568835271e+01, 1.5270037662686175e+01, 1.5570646349282180e+01, 1.5885377274967727e+01, 1.6217717667909398e+01, 1.6572960745489535e+01, 1.6959930736506667e+01, 1.7395749181302794e+01, 1.7925222507645273e+01}, + {-1.7979511220856821e+01, -1.7450566562382527e+01, -1.7015192658329994e+01, -1.6628624717520726e+01, -1.6273756976433262e+01, -1.5941773281856777e+01, -1.5627385233279865e+01, -1.5327108761515133e+01, -1.5038496431508072e+01, -1.4759746585548676e+01, -1.4489484671220278e+01, -1.4226631965786098e+01, -1.3970322322144929e+01, -1.3719846994209712e+01, -1.3474616726146943e+01, -1.3234134904720168e+01, -1.2997978052213019e+01, -1.2765781336657277e+01, -1.2537227600374516e+01, -1.2312038911428751e+01, -1.2089969960138866e+01, -1.1870802828673778e+01, -1.1654342798540783e+01, -1.1440414953672729e+01, -1.1228861401155735e+01, -1.1019538976992907e+01, -1.0812317336788272e+01, -1.0607077354851786e+01, -1.0403709772624749e+01, -1.0202114050301228e+01, -1.0002197385309380e+01, -9.8038738687782931e+00, -9.6070637568592137e+00, -9.4116928382313976e+00, -9.2176918826171956e+00, -9.0249961578901878e+00, -8.8335450055546225e+00, -8.6432814661321498e+00, -8.4541519474088638e+00, -8.2661059296450645e+00, -8.0790957027880133e+00, -7.8930761314972990e+00, -7.7080044444269156e+00, -7.5238400447339577e+00, -7.3405443392217018e+00, -7.1580805838909543e+00, -6.9764137439811300e+00, -6.7955103668419072e+00, -6.6153384661958059e+00, -6.4358674165386551e+00, -6.2570678565839728e+00, -6.0789116007933410e+00, -5.9013715581517205e+00, -5.7244216574471602e+00, -5.5480367784012250e+00, -5.3721926880716282e+00, -5.1968659820138337e+00, -5.0220340297452291e+00, -4.8476749241050427e+00, -4.6737674341465381e+00, -4.5002909612360460e+00, -4.3272254980667917e+00, -4.1545515903248518e+00, -3.9822503007705263e+00, -3.8103031755212977e+00, -3.6386922123428187e+00, -3.4673998307723486e+00, -3.2964088439149859e+00, -3.1257024317672348e+00, -2.9552641159350221e+00, -2.7850777356245309e+00, -2.6151274247942147e+00, -2.4453975903652418e+00, -2.2758728913956188e+00, -2.1065382191302762e+00, -1.9373786778457494e+00, -1.7683795664136901e+00, -1.5995263605124337e+00, -1.4308046954202789e+00, -1.2622003493280531e+00, -1.0936992271119663e+00, -9.2528734451076988e-01, -7.5695081265386188e-01, -5.8867582288920217e-01, -4.2044863186181980e-01, -2.5225554679525719e-01, -8.4082910929564950e-02, 8.4082910929564950e-02, 2.5225554679525719e-01, 4.2044863186181980e-01, 5.8867582288920217e-01, 7.5695081265386188e-01, 9.2528734451076988e-01, 1.0936992271119663e+00, 1.2622003493280531e+00, 1.4308046954202789e+00, 1.5995263605124337e+00, 1.7683795664136901e+00, 1.9373786778457494e+00, 2.1065382191302762e+00, 2.2758728913956188e+00, 2.4453975903652418e+00, 2.6151274247942147e+00, 2.7850777356245309e+00, 2.9552641159350221e+00, 3.1257024317672348e+00, 3.2964088439149859e+00, 3.4673998307723486e+00, 3.6386922123428187e+00, 3.8103031755212977e+00, 3.9822503007705263e+00, 4.1545515903248518e+00, 4.3272254980667917e+00, 4.5002909612360460e+00, 4.6737674341465381e+00, 4.8476749241050427e+00, 5.0220340297452291e+00, 5.1968659820138337e+00, 5.3721926880716282e+00, 5.5480367784012250e+00, 5.7244216574471602e+00, 5.9013715581517205e+00, 6.0789116007933410e+00, 6.2570678565839728e+00, 6.4358674165386551e+00, 6.6153384661958059e+00, 6.7955103668419072e+00, 6.9764137439811300e+00, 7.1580805838909543e+00, 7.3405443392217018e+00, 7.5238400447339577e+00, 7.7080044444269156e+00, 7.8930761314972990e+00, 8.0790957027880133e+00, 8.2661059296450645e+00, 8.4541519474088638e+00, 8.6432814661321498e+00, 8.8335450055546225e+00, 9.0249961578901878e+00, 9.2176918826171956e+00, 9.4116928382313976e+00, 9.6070637568592137e+00, 9.8038738687782931e+00, 1.0002197385309380e+01, 1.0202114050301228e+01, 1.0403709772624749e+01, 1.0607077354851786e+01, 1.0812317336788272e+01, 1.1019538976992907e+01, 1.1228861401155735e+01, 1.1440414953672729e+01, 1.1654342798540783e+01, 1.1870802828673778e+01, 1.2089969960138866e+01, 1.2312038911428751e+01, 1.2537227600374516e+01, 1.2765781336657277e+01, 1.2997978052213019e+01, 1.3234134904720168e+01, 1.3474616726146943e+01, 1.3719846994209712e+01, 1.3970322322144929e+01, 1.4226631965786098e+01, 1.4489484671220278e+01, 1.4759746585548676e+01, 1.5038496431508072e+01, 1.5327108761515133e+01, 1.5627385233279865e+01, 1.5941773281856777e+01, 1.6273756976433262e+01, 1.6628624717520726e+01, 1.7015192658329994e+01, 1.7450566562382527e+01, 1.7979511220856821e+01}, + {-1.8033641954168267e+01, -1.7505222333487513e+01, -1.7070289870719815e+01, -1.6684121139543407e+01, -1.6329626050797177e+01, -1.5997996474571265e+01, -1.5683948796077209e+01, -1.5384002084668344e+01, -1.5095711102237930e+01, -1.4817275804306519e+01, -1.4547322867471838e+01, -1.4284774533151579e+01, -1.4028765428985777e+01, -1.3778587444058825e+01, -1.3533651852283532e+01, -1.3293462488855706e+01, -1.3057596260647884e+01, -1.2825688669380938e+01, -1.2597422849942523e+01, -1.2372521129351217e+01, -1.2150738429127935e+01, -1.1931857039523125e+01, -1.1715682430712077e+01, -1.1502039858881551e+01, -1.1290771589409180e+01, -1.1081734604649661e+01, -1.0874798696301507e+01, -1.0669844865923427e+01, -1.0466763974552268e+01, -1.0265455595339178e+01, -1.0065827032900119e+01, -9.8677924805319144e+00, -9.6712722921832484e+00, -9.4761923505272865e+00, -9.2824835159740040e+00, -9.0900811442169349e+00, -8.8989246621016740e+00, -8.7089571933595398e+00, -8.5201252271656553e+00, -8.3323783236290581e+00, -8.1456688512594901e+00, -7.9599517522241854e+00, -7.7751843318419356e+00, -7.5913260692870139e+00, -7.4083384469130396e+00, -7.2261847959726726e+00, -7.0448301568163600e+00, -6.8642411519124229e+00, -6.6843858702502548e+00, -6.5052337618746865e+00, -6.3267555414586063e+00, -6.1489230999568738e+00, -5.9717094235012285e+00, -5.7950885187964847e+00, -5.6190353443649403e+00, -5.4435257470611598e+00, -5.2685364033444513e+00, -5.0940447648531757e+00, -4.9200290078745663e+00, -4.7464679863470698e+00, -4.5733411880702048e+00, -4.4006286938303605e+00, -4.2283111391802617e+00, -4.0563696786358214e+00, -3.8847859520769483e+00, -3.7135420531591667e+00, -3.5426204995608663e+00, -3.3720042049069567e+00, -3.2016764522238601e+00, -3.0316208687934161e+00, -2.8618214022844848e+00, -2.6922622980510740e+00, -2.5229280774947309e+00, -2.3538035173969201e+00, -2.1848736301342204e+00, -2.0161236446955049e+00, -1.8475389884259239e+00, -1.6791052694275435e+00, -1.5108082595509420e+00, -1.3426338779160372e+00, -1.1745681749038943e+00, -1.0065973165643360e+00, -8.3870756938683533e-01, -6.7088528538449332e-01, -5.0311688744285843e-01, -3.3538885488700437e-01, -1.6768770922162865e-01, -0.0000000000000000e+00, 1.6768770922162865e-01, 3.3538885488700437e-01, 5.0311688744285843e-01, 6.7088528538449332e-01, 8.3870756938683533e-01, 1.0065973165643360e+00, 1.1745681749038943e+00, 1.3426338779160372e+00, 1.5108082595509420e+00, 1.6791052694275435e+00, 1.8475389884259239e+00, 2.0161236446955049e+00, 2.1848736301342204e+00, 2.3538035173969201e+00, 2.5229280774947309e+00, 2.6922622980510740e+00, 2.8618214022844848e+00, 3.0316208687934161e+00, 3.2016764522238601e+00, 3.3720042049069567e+00, 3.5426204995608663e+00, 3.7135420531591667e+00, 3.8847859520769483e+00, 4.0563696786358214e+00, 4.2283111391802617e+00, 4.4006286938303605e+00, 4.5733411880702048e+00, 4.7464679863470698e+00, 4.9200290078745663e+00, 5.0940447648531757e+00, 5.2685364033444513e+00, 5.4435257470611598e+00, 5.6190353443649403e+00, 5.7950885187964847e+00, 5.9717094235012285e+00, 6.1489230999568738e+00, 6.3267555414586063e+00, 6.5052337618746865e+00, 6.6843858702502548e+00, 6.8642411519124229e+00, 7.0448301568163600e+00, 7.2261847959726726e+00, 7.4083384469130396e+00, 7.5913260692870139e+00, 7.7751843318419356e+00, 7.9599517522241854e+00, 8.1456688512594901e+00, 8.3323783236290581e+00, 8.5201252271656553e+00, 8.7089571933595398e+00, 8.8989246621016740e+00, 9.0900811442169349e+00, 9.2824835159740040e+00, 9.4761923505272865e+00, 9.6712722921832484e+00, 9.8677924805319144e+00, 1.0065827032900119e+01, 1.0265455595339178e+01, 1.0466763974552268e+01, 1.0669844865923427e+01, 1.0874798696301507e+01, 1.1081734604649661e+01, 1.1290771589409180e+01, 1.1502039858881551e+01, 1.1715682430712077e+01, 1.1931857039523125e+01, 1.2150738429127935e+01, 1.2372521129351217e+01, 1.2597422849942523e+01, 1.2825688669380938e+01, 1.3057596260647884e+01, 1.3293462488855706e+01, 1.3533651852283532e+01, 1.3778587444058825e+01, 1.4028765428985777e+01, 1.4284774533151579e+01, 1.4547322867471838e+01, 1.4817275804306519e+01, 1.5095711102237930e+01, 1.5384002084668344e+01, 1.5683948796077209e+01, 1.5997996474571265e+01, 1.6329626050797177e+01, 1.6684121139543407e+01, 1.7070289870719815e+01, 1.7505222333487513e+01, 1.8033641954168267e+01}, + {-1.8087616073790890e+01, -1.7559717906645613e+01, -1.7125223825336107e+01, -1.6739451499952981e+01, -1.6385326414471709e+01, -1.6054048410627541e+01, -1.5740338628583727e+01, -1.5440719256044483e+01, -1.5152747237684542e+01, -1.4874624131806939e+01, -1.4604977836012001e+01, -1.4342731549531273e+01, -1.4087020669594040e+01, -1.3837137716174603e+01, -1.3592494489303405e+01, -1.3352595269479263e+01, -1.3117017345422802e+01, -1.2885396550109379e+01, -1.2657416308802228e+01, -1.2432799205480446e+01, -1.2211300391032086e+01, -1.1992702362083911e+01, -1.1776810775879971e+01, -1.1563451059348457e+01, -1.1352465634716971e+01, -1.1143711629308052e+01, -1.0937058969577729e+01, -1.0732388783033896e+01, -1.0529592049038808e+01, -1.0328568452453034e+01, -1.0129225403849107e+01, -9.9314771974714695e+00, -9.7352442838524844e+00, -9.5404526384475599e+00, -9.3470332111408592e+00, -9.1549214442271172e+00, -8.9640568486659262e+00, -8.7743826301592698e+00, -8.5858453580177638e+00, -8.3983946709283614e+00, -8.2119830146725530e+00, -8.0265654076120381e+00, -7.8420992303922956e+00, -7.6585440368392881e+00, -7.4758613834617105e+00, -7.2940146753366397e+00, -7.1129690264635128e+00, -6.9326911329302607e+00, -6.7531491574546436e+00, -6.5743126240500311e+00, -6.3961523217237177e+00, -6.2186402162517354e+00, -6.0417493691907094e+00, -5.8654538633877182e+00, -5.6897287343358398e+00, -5.5145499067980914e+00, -5.3398941361876862e+00, -5.1657389542492513e+00, -4.9920626186351811e+00, -4.8188440660145870e+00, -4.6460628683903158e+00, -4.4736991923328295e+00, -4.3017337608691273e+00, -4.1301478177908013e+00, -3.9589230941681977e+00, -3.7880417768779360e+00, -3.6174864789689782e+00, -3.4472402117084235e+00, -3.2772863581623723e+00, -3.1076086481798137e+00, -2.9381911346587741e+00, -2.7690181709839532e+00, -2.6000743895340599e+00, -2.4313446811650303e+00, -2.2628141755824469e+00, -2.0944682225228455e+00, -1.9262923736692732e+00, -1.7582723652315242e+00, -1.5903941011259675e+00, -1.4226436366938928e+00, -1.2550071629008195e+00, -1.0874709909623226e+00, -9.2002153734466763e-01, -7.5264530909089167e-01, -5.8532888942501560e-01, -4.1805892358878166e-01, -2.5082210486674750e-01, -8.3605160756706529e-02, 8.3605160756706529e-02, 2.5082210486674750e-01, 4.1805892358878166e-01, 5.8532888942501560e-01, 7.5264530909089167e-01, 9.2002153734466763e-01, 1.0874709909623226e+00, 1.2550071629008195e+00, 1.4226436366938928e+00, 1.5903941011259675e+00, 1.7582723652315242e+00, 1.9262923736692732e+00, 2.0944682225228455e+00, 2.2628141755824469e+00, 2.4313446811650303e+00, 2.6000743895340599e+00, 2.7690181709839532e+00, 2.9381911346587741e+00, 3.1076086481798137e+00, 3.2772863581623723e+00, 3.4472402117084235e+00, 3.6174864789689782e+00, 3.7880417768779360e+00, 3.9589230941681977e+00, 4.1301478177908013e+00, 4.3017337608691273e+00, 4.4736991923328295e+00, 4.6460628683903158e+00, 4.8188440660145870e+00, 4.9920626186351811e+00, 5.1657389542492513e+00, 5.3398941361876862e+00, 5.5145499067980914e+00, 5.6897287343358398e+00, 5.8654538633877182e+00, 6.0417493691907094e+00, 6.2186402162517354e+00, 6.3961523217237177e+00, 6.5743126240500311e+00, 6.7531491574546436e+00, 6.9326911329302607e+00, 7.1129690264635128e+00, 7.2940146753366397e+00, 7.4758613834617105e+00, 7.6585440368392881e+00, 7.8420992303922956e+00, 8.0265654076120381e+00, 8.2119830146725530e+00, 8.3983946709283614e+00, 8.5858453580177638e+00, 8.7743826301592698e+00, 8.9640568486659262e+00, 9.1549214442271172e+00, 9.3470332111408592e+00, 9.5404526384475599e+00, 9.7352442838524844e+00, 9.9314771974714695e+00, 1.0129225403849107e+01, 1.0328568452453034e+01, 1.0529592049038808e+01, 1.0732388783033896e+01, 1.0937058969577729e+01, 1.1143711629308052e+01, 1.1352465634716971e+01, 1.1563451059348457e+01, 1.1776810775879971e+01, 1.1992702362083911e+01, 1.2211300391032086e+01, 1.2432799205480446e+01, 1.2657416308802228e+01, 1.2885396550109379e+01, 1.3117017345422802e+01, 1.3352595269479263e+01, 1.3592494489303405e+01, 1.3837137716174603e+01, 1.4087020669594040e+01, 1.4342731549531273e+01, 1.4604977836012001e+01, 1.4874624131806939e+01, 1.5152747237684542e+01, 1.5440719256044483e+01, 1.5740338628583727e+01, 1.6054048410627541e+01, 1.6385326414471709e+01, 1.6739451499952981e+01, 1.7125223825336107e+01, 1.7559717906645613e+01, 1.8087616073790890e+01}, + {-1.8141434926389717e+01, -1.7614054673500309e+01, -1.7179995952720883e+01, -1.6794617265339959e+01, -1.6440859568460443e+01, -1.6109930624426557e+01, -1.5796556297952138e+01, -1.5497261875146510e+01, -1.5209606469472327e+01, -1.4931793231695860e+01, -1.4662451272508532e+01, -1.4400504742695974e+01, -1.4145089803991842e+01, -1.3895499603035967e+01, -1.3651146462397561e+01, -1.3411535104793492e+01, -1.3176243198091708e+01, -1.2944906904124267e+01, -1.2717209936373832e+01, -1.2492875133818373e+01, -1.2271657874909140e+01, -1.2053340860973840e+01, -1.1837729934755059e+01, -1.1624650692438404e+01, -1.1413945711687965e+01, -1.1205472263438262e+01, -1.0999100407593694e+01, -1.0794711396339702e+01, -1.0592196326121911e+01, -1.0391454992291058e+01, -1.0192394910173833e+01, -9.9949304737714737e+00, -9.7989822290161133e+00, -9.6044762429642674e+00, -9.4113435537920687e+00, -9.2195196892087417e+00, -9.0289442430934326e+00, -8.8395605019136383e+00, -8.6513151138967856e+00, -8.4641577950728877e+00, -8.2780410672415687e+00, -8.0929200236841812e+00, -7.9087521190744416e+00, -7.7254969805656009e+00, -7.5431162374687224e+00, -7.3615733673019719e+00, -7.1808335562975261e+00, -7.0008635727114159e+00, -6.8216316515006419e+00, -6.6431073891179739e+00, -6.4652616473335298e+00, -6.2880664651279838e+00, -6.1114949778187784e+00, -5.9355213426810298e+00, -5.7601206704114505e+00, -5.5852689618586204e+00, -5.4109430495080577e+00, -5.2371205432672880e+00, -5.0637797801455120e+00, -4.8908997774658411e+00, -4.7184601892859508e+00, -4.5464412657363820e+00, -4.3748238150150680e+00, -4.2035891678025479e+00, -4.0327191438852346e+00, -3.8621960207943373e+00, -3.6920025042860427e+00, -3.5221217005044698e+00, -3.3525370896831359e+00, -3.1832325012532876e+00, -3.0141920902387014e+00, -2.8454003148266174e+00, -2.6768419150134308e+00, -2.5085018922317608e+00, -2.3403654898726809e+00, -2.1724181746232882e+00, -2.0046456185454748e+00, -1.8370336818268698e+00, -1.6695683961394456e+00, -1.5022359485453094e+00, -1.3350226658927800e+00, -1.1679149996489939e+00, -1.0008995111180643e+00, -8.3396285699622974e-01, -6.6709177521751994e-01, -5.0027307104524965e-01, -3.3349360336615247e-01, -1.6674027114518436e-01, -0.0000000000000000e+00, 1.6674027114518436e-01, 3.3349360336615247e-01, 5.0027307104524965e-01, 6.6709177521751994e-01, 8.3396285699622974e-01, 1.0008995111180643e+00, 1.1679149996489939e+00, 1.3350226658927800e+00, 1.5022359485453094e+00, 1.6695683961394456e+00, 1.8370336818268698e+00, 2.0046456185454748e+00, 2.1724181746232882e+00, 2.3403654898726809e+00, 2.5085018922317608e+00, 2.6768419150134308e+00, 2.8454003148266174e+00, 3.0141920902387014e+00, 3.1832325012532876e+00, 3.3525370896831359e+00, 3.5221217005044698e+00, 3.6920025042860427e+00, 3.8621960207943373e+00, 4.0327191438852346e+00, 4.2035891678025479e+00, 4.3748238150150680e+00, 4.5464412657363820e+00, 4.7184601892859508e+00, 4.8908997774658411e+00, 5.0637797801455120e+00, 5.2371205432672880e+00, 5.4109430495080577e+00, 5.5852689618586204e+00, 5.7601206704114505e+00, 5.9355213426810298e+00, 6.1114949778187784e+00, 6.2880664651279838e+00, 6.4652616473335298e+00, 6.6431073891179739e+00, 6.8216316515006419e+00, 7.0008635727114159e+00, 7.1808335562975261e+00, 7.3615733673019719e+00, 7.5431162374687224e+00, 7.7254969805656009e+00, 7.9087521190744416e+00, 8.0929200236841812e+00, 8.2780410672415687e+00, 8.4641577950728877e+00, 8.6513151138967856e+00, 8.8395605019136383e+00, 9.0289442430934326e+00, 9.2195196892087417e+00, 9.4113435537920687e+00, 9.6044762429642674e+00, 9.7989822290161133e+00, 9.9949304737714737e+00, 1.0192394910173833e+01, 1.0391454992291058e+01, 1.0592196326121911e+01, 1.0794711396339702e+01, 1.0999100407593694e+01, 1.1205472263438262e+01, 1.1413945711687965e+01, 1.1624650692438404e+01, 1.1837729934755059e+01, 1.2053340860973840e+01, 1.2271657874909140e+01, 1.2492875133818373e+01, 1.2717209936373832e+01, 1.2944906904124267e+01, 1.3176243198091708e+01, 1.3411535104793492e+01, 1.3651146462397561e+01, 1.3895499603035967e+01, 1.4145089803991842e+01, 1.4400504742695974e+01, 1.4662451272508532e+01, 1.4931793231695860e+01, 1.5209606469472327e+01, 1.5497261875146510e+01, 1.5796556297952138e+01, 1.6109930624426557e+01, 1.6440859568460443e+01, 1.6794617265339959e+01, 1.7179995952720883e+01, 1.7614054673500309e+01, 1.8141434926389717e+01}, + {-1.8195099839472430e+01, -1.7668234005719587e+01, -1.7234607662724841e+01, -1.6849619880933339e+01, -1.6496226991759631e+01, -1.6165644627729627e+01, -1.5852603348070128e+01, -1.5553631517589562e+01, -1.5266290404714507e+01, -1.4988784742481727e+01, -1.4719744846860621e+01, -1.4458095814010402e+01, -1.4202974565149994e+01, -1.3953674869416368e+01, -1.3709609568387386e+01, -1.3470283823956324e+01, -1.3235275680476827e+01, -1.3004221626275813e+01, -1.2776805660932723e+01, -1.2552750876494338e+01, -1.2331812877201305e+01, -1.2113774567435913e+01, -1.1898441973897304e+01, -1.1685640860572095e+01, -1.1475213959174649e+01, -1.1267018682922650e+01, -1.1060925223886697e+01, -1.0856814957685176e+01, -1.0654579096632252e+01, -1.0454117545377073e+01, -1.0255337922825921e+01, -1.0058154721574315e+01, -9.8624885817991093e+00, -9.6682656610062114e+00, -9.4754170845116796e+00, -9.2838784642833954e+00, -9.0935894759573372e+00, -8.9044934855939797e+00, -8.7165372191524781e+00, -8.5296704688056462e+00, -8.3438458311533719e+00, -8.1590184731587261e+00, -7.9751459222633976e+00, -7.7921878776629914e+00, -7.6101060401591205e+00, -7.4288639583701048e+00, -7.2484268893885622e+00, -7.0687616722327293e+00, -6.8898366126570973e+00, -6.7116213780739269e+00, -6.5340869014957565e+00, -6.3572052935446060e+00, -6.1809497616900950e+00, -6.0052945359788579e+00, -5.8302148006042280e+00, -5.6556866307401252e+00, -5.4816869341281516e+00, -5.3081933969635617e+00, -5.1351844336752253e+00, -4.9626391402379442e+00, -4.7905372506934265e+00, -4.6188590965895351e+00, -4.4475855690767556e+00, -4.2766980834267416e+00, -4.1061785457606526e+00, -3.9360093217952534e+00, -3.7661732074327352e+00, -3.5966534010361229e+00, -3.4274334772463870e+00, -3.2584973622099573e+00, -3.0898293100966461e+00, -2.9214138807980121e+00, -2.7532359187051947e+00, -2.5852805324732593e+00, -2.4175330756862730e+00, -2.2499791283437252e+00, -2.0826044790946652e+00, -1.9153951081510019e+00, -1.7483371708160103e+00, -1.5814169815681258e+00, -1.4146209986437335e+00, -1.2479358090658317e+00, -1.0813481140682928e+00, -9.1484471486787966e-01, -7.4841249873834059e-01, -5.8203842534274008e-01, -4.1570951328174155e-01, -2.4941282681685847e-01, -8.3135462728728945e-02, 8.3135462728728945e-02, 2.4941282681685847e-01, 4.1570951328174155e-01, 5.8203842534274008e-01, 7.4841249873834059e-01, 9.1484471486787966e-01, 1.0813481140682928e+00, 1.2479358090658317e+00, 1.4146209986437335e+00, 1.5814169815681258e+00, 1.7483371708160103e+00, 1.9153951081510019e+00, 2.0826044790946652e+00, 2.2499791283437252e+00, 2.4175330756862730e+00, 2.5852805324732593e+00, 2.7532359187051947e+00, 2.9214138807980121e+00, 3.0898293100966461e+00, 3.2584973622099573e+00, 3.4274334772463870e+00, 3.5966534010361229e+00, 3.7661732074327352e+00, 3.9360093217952534e+00, 4.1061785457606526e+00, 4.2766980834267416e+00, 4.4475855690767556e+00, 4.6188590965895351e+00, 4.7905372506934265e+00, 4.9626391402379442e+00, 5.1351844336752253e+00, 5.3081933969635617e+00, 5.4816869341281516e+00, 5.6556866307401252e+00, 5.8302148006042280e+00, 6.0052945359788579e+00, 6.1809497616900950e+00, 6.3572052935446060e+00, 6.5340869014957565e+00, 6.7116213780739269e+00, 6.8898366126570973e+00, 7.0687616722327293e+00, 7.2484268893885622e+00, 7.4288639583701048e+00, 7.6101060401591205e+00, 7.7921878776629914e+00, 7.9751459222633976e+00, 8.1590184731587261e+00, 8.3438458311533719e+00, 8.5296704688056462e+00, 8.7165372191524781e+00, 8.9044934855939797e+00, 9.0935894759573372e+00, 9.2838784642833954e+00, 9.4754170845116796e+00, 9.6682656610062114e+00, 9.8624885817991093e+00, 1.0058154721574315e+01, 1.0255337922825921e+01, 1.0454117545377073e+01, 1.0654579096632252e+01, 1.0856814957685176e+01, 1.1060925223886697e+01, 1.1267018682922650e+01, 1.1475213959174649e+01, 1.1685640860572095e+01, 1.1898441973897304e+01, 1.2113774567435913e+01, 1.2331812877201305e+01, 1.2552750876494338e+01, 1.2776805660932723e+01, 1.3004221626275813e+01, 1.3235275680476827e+01, 1.3470283823956324e+01, 1.3709609568387386e+01, 1.3953674869416368e+01, 1.4202974565149994e+01, 1.4458095814010402e+01, 1.4719744846860621e+01, 1.4988784742481727e+01, 1.5266290404714507e+01, 1.5553631517589562e+01, 1.5852603348070128e+01, 1.6165644627729627e+01, 1.6496226991759631e+01, 1.6849619880933339e+01, 1.7234607662724841e+01, 1.7668234005719587e+01, 1.8195099839472430e+01}, + {-1.8248612121768343e+01, -1.7722257255394386e+01, -1.7289060344922966e+01, -1.6904460771032490e+01, -1.6551430141805881e+01, -1.6221191910121661e+01, -1.5908481300039147e+01, -1.5609829735595335e+01, -1.5322800626523263e+01, -1.5045600278061576e+01, -1.4776860203741105e+01, -1.4515506438991860e+01, -1.4260676659563400e+01, -1.4011665252976300e+01, -1.3767885576334674e+01, -1.3528843227708883e+01, -1.3294116625314924e+01, -1.3063342581647838e+01, -1.2836205380293851e+01, -1.2612428364469304e+01, -1.2391767362459763e+01, -1.2174005480084015e+01, -1.1958948926483190e+01, -1.1746423632015365e+01, -1.1536272481085435e+01, -1.1328353027891652e+01, -1.1122535595414808e+01, -1.0918701681487823e+01, -1.0716742613104932e+01, -1.0516558403048526e+01, -1.0318056772656865e+01, -1.0121152311984588e+01, -9.9257657543260880e+00, -9.7318233465134139e+00, -9.5392562998795718e+00, -9.3480003095250854e+00, -9.1579951317106332e+00, -8.9691842109485744e+00, -8.7815143497770496e+00, -8.5949354153447821e+00, -8.4094000778684173e+00, -8.2248635767903480e+00, -8.0412835110966494e+00, -7.8586196507783281e+00, -7.6768337668550366e+00, -7.4958894777450231e+00, -7.3157521100712737e+00, -7.1363885722521356e+00, -6.9577672394433048e+00, -6.7798578485838581e+00, -6.6026314024574280e+00, -6.4260600818151339e+00, -6.2501171647232399e+00, -6.0747769523986586e+00, -5.9000147008819077e+00, -5.7258065579720290e+00, -5.5521295049130304e+00, -5.3789613023779728e+00, -5.2062804403463288e+00, -5.0340660915133606e+00, -4.8622980679082559e+00, -4.6909567804310086e+00, -4.5200232010473753e+00, -4.3494788274071139e+00, -4.1793056496735739e+00, -4.0094861193729656e+00, -3.8400031200895830e+00, -3.6708399398492308e+00, -3.5019802450472732e+00, -3.3334080557903905e+00, -3.1651077225323845e+00, -2.9970639038944427e+00, -2.8292615455692673e+00, -2.6616858602164983e+00, -2.4943223082640649e+00, -2.3271565795365086e+00, -2.1601745756370865e+00, -1.9933623930155944e+00, -1.8267063066584319e+00, -1.6601927543415429e+00, -1.4938083213904756e+00, -1.3275397258950710e+00, -1.1613738043291120e+00, -9.9529749752779750e-01, -8.2929783697808435e-01, -6.6336193137884258e-01, -4.9747695342938458e-01, -3.3163012680628950e-01, -1.6580871328955671e-01, -0.0000000000000000e+00, 1.6580871328955671e-01, 3.3163012680628950e-01, 4.9747695342938458e-01, 6.6336193137884258e-01, 8.2929783697808435e-01, 9.9529749752779750e-01, 1.1613738043291120e+00, 1.3275397258950710e+00, 1.4938083213904756e+00, 1.6601927543415429e+00, 1.8267063066584319e+00, 1.9933623930155944e+00, 2.1601745756370865e+00, 2.3271565795365086e+00, 2.4943223082640649e+00, 2.6616858602164983e+00, 2.8292615455692673e+00, 2.9970639038944427e+00, 3.1651077225323845e+00, 3.3334080557903905e+00, 3.5019802450472732e+00, 3.6708399398492308e+00, 3.8400031200895830e+00, 4.0094861193729656e+00, 4.1793056496735739e+00, 4.3494788274071139e+00, 4.5200232010473753e+00, 4.6909567804310086e+00, 4.8622980679082559e+00, 5.0340660915133606e+00, 5.2062804403463288e+00, 5.3789613023779728e+00, 5.5521295049130304e+00, 5.7258065579720290e+00, 5.9000147008819077e+00, 6.0747769523986586e+00, 6.2501171647232399e+00, 6.4260600818151339e+00, 6.6026314024574280e+00, 6.7798578485838581e+00, 6.9577672394433048e+00, 7.1363885722521356e+00, 7.3157521100712737e+00, 7.4958894777450231e+00, 7.6768337668550366e+00, 7.8586196507783281e+00, 8.0412835110966494e+00, 8.2248635767903480e+00, 8.4094000778684173e+00, 8.5949354153447821e+00, 8.7815143497770496e+00, 8.9691842109485744e+00, 9.1579951317106332e+00, 9.3480003095250854e+00, 9.5392562998795718e+00, 9.7318233465134139e+00, 9.9257657543260880e+00, 1.0121152311984588e+01, 1.0318056772656865e+01, 1.0516558403048526e+01, 1.0716742613104932e+01, 1.0918701681487823e+01, 1.1122535595414808e+01, 1.1328353027891652e+01, 1.1536272481085435e+01, 1.1746423632015365e+01, 1.1958948926483190e+01, 1.2174005480084015e+01, 1.2391767362459763e+01, 1.2612428364469304e+01, 1.2836205380293851e+01, 1.3063342581647838e+01, 1.3294116625314924e+01, 1.3528843227708883e+01, 1.3767885576334674e+01, 1.4011665252976300e+01, 1.4260676659563400e+01, 1.4515506438991860e+01, 1.4776860203741105e+01, 1.5045600278061576e+01, 1.5322800626523263e+01, 1.5609829735595335e+01, 1.5908481300039147e+01, 1.6221191910121661e+01, 1.6551430141805881e+01, 1.6904460771032490e+01, 1.7289060344922966e+01, 1.7722257255394386e+01, 1.8248612121768343e+01}, + {-1.8301973063597831e+01, -1.7776125755426882e+01, -1.7343355369019513e+01, -1.6959141339427859e+01, -1.6606470454912181e+01, -1.6276573939462359e+01, -1.5964191652640661e+01, -1.5665858058473457e+01, -1.5379138694506308e+01, -1.5102241428233055e+01, -1.4833798963124122e+01, -1.4572738267853815e+01, -1.4318197767810874e+01, -1.4069472464839684e+01, -1.3825976228134913e+01, -1.3587215088986124e+01, -1.3352767836885693e+01, -1.3122271606204340e+01, -1.2895410962476978e+01, -1.2671909498220252e+01, -1.2451523264048797e+01, -1.2234035565627281e+01, -1.2019252793050947e+01, -1.1807001041645883e+01, -1.1597123347173454e+01, -1.1389477403535365e+01, -1.1183933663391947e+01, -1.0980373745597644e+01, -1.0778689090663802e+01, -1.0578779818366641e+01, -1.0380553751354837e+01, -1.0183925576033463e+01, -9.9888161177141903e+00, -9.7951517114594342e+00, -9.6028636535242580e+00, -9.4118877210445753e+00, -9.2221637498031388e+00, -9.0336352616534104e+00, -8.8462491345909715e+00, -8.6599553096056603e+00, -8.4747065293805139e+00, -8.2904581046690247e+00, -8.1071677048133868e+00, -7.9247951693895704e+00, -7.7433023384005537e+00, -7.5626528988034316e+00, -7.3828122454620404e+00, -7.2037473548748121e+00, -7.0254266702460146e+00, -6.8478199966541871e+00, -6.6708984052298304e+00, -6.4946341453898597e+00, -6.3190005642925637e+00, -6.1439720327768708e+00, -5.9695238771362042e+00, -5.7956323161519689e+00, -5.6222744028767648e+00, -5.4494279707139679e+00, -5.2770715833897031e+00, -5.1051844884564179e+00, -4.9337465740051734e+00, -4.7627383282970204e+00, -4.5921408020531498e+00, -4.4219355731693852e+00, -4.2521047136434067e+00, -4.0826307585233783e+00, -3.9134966767045909e+00, -3.7446858434166801e+00, -3.5761820142581890e+00, -3.4079693006478804e+00, -3.2400321465734825e+00, -3.0723553065286380e+00, -2.9049238245378035e+00, -2.7377230141769160e+00, -2.5707384395048281e+00, -2.4039558968269845e+00, -2.2373613972185420e+00, -2.0709411497393346e+00, -1.9046815452776742e+00, -1.7385691409641026e+00, -1.5725906450998823e+00, -1.4067329025482804e+00, -1.2409828805395797e+00, -1.0753276548433179e+00, -9.0975439626347543e-01, -7.4425035741428780e-01, -5.7880285973602430e-01, -4.1339928071149723e-01, -2.4802704124523722e-01, -8.2673593168222662e-02, 8.2673593168222662e-02, 2.4802704124523722e-01, 4.1339928071149723e-01, 5.7880285973602430e-01, 7.4425035741428780e-01, 9.0975439626347543e-01, 1.0753276548433179e+00, 1.2409828805395797e+00, 1.4067329025482804e+00, 1.5725906450998823e+00, 1.7385691409641026e+00, 1.9046815452776742e+00, 2.0709411497393346e+00, 2.2373613972185420e+00, 2.4039558968269845e+00, 2.5707384395048281e+00, 2.7377230141769160e+00, 2.9049238245378035e+00, 3.0723553065286380e+00, 3.2400321465734825e+00, 3.4079693006478804e+00, 3.5761820142581890e+00, 3.7446858434166801e+00, 3.9134966767045909e+00, 4.0826307585233783e+00, 4.2521047136434067e+00, 4.4219355731693852e+00, 4.5921408020531498e+00, 4.7627383282970204e+00, 4.9337465740051734e+00, 5.1051844884564179e+00, 5.2770715833897031e+00, 5.4494279707139679e+00, 5.6222744028767648e+00, 5.7956323161519689e+00, 5.9695238771362042e+00, 6.1439720327768708e+00, 6.3190005642925637e+00, 6.4946341453898597e+00, 6.6708984052298304e+00, 6.8478199966541871e+00, 7.0254266702460146e+00, 7.2037473548748121e+00, 7.3828122454620404e+00, 7.5626528988034316e+00, 7.7433023384005537e+00, 7.9247951693895704e+00, 8.1071677048133868e+00, 8.2904581046690247e+00, 8.4747065293805139e+00, 8.6599553096056603e+00, 8.8462491345909715e+00, 9.0336352616534104e+00, 9.2221637498031388e+00, 9.4118877210445753e+00, 9.6028636535242580e+00, 9.7951517114594342e+00, 9.9888161177141903e+00, 1.0183925576033463e+01, 1.0380553751354837e+01, 1.0578779818366641e+01, 1.0778689090663802e+01, 1.0980373745597644e+01, 1.1183933663391947e+01, 1.1389477403535365e+01, 1.1597123347173454e+01, 1.1807001041645883e+01, 1.2019252793050947e+01, 1.2234035565627281e+01, 1.2451523264048797e+01, 1.2671909498220252e+01, 1.2895410962476978e+01, 1.3122271606204340e+01, 1.3352767836885693e+01, 1.3587215088986124e+01, 1.3825976228134913e+01, 1.4069472464839684e+01, 1.4318197767810874e+01, 1.4572738267853815e+01, 1.4833798963124122e+01, 1.5102241428233055e+01, 1.5379138694506308e+01, 1.5665858058473457e+01, 1.5964191652640661e+01, 1.6276573939462359e+01, 1.6606470454912181e+01, 1.6959141339427859e+01, 1.7343355369019513e+01, 1.7776125755426882e+01, 1.8301973063597831e+01}, + {-1.8355183937232482e+01, -1.7829840819908938e+01, -1.7397494085242659e+01, -1.7013662969810934e+01, -1.6661349346692710e+01, -1.6331792162325648e+01, -1.6019735882790211e+01, -1.5721717993090206e+01, -1.5435306145250426e+01, -1.5158709759192869e+01, -1.4890562720798751e+01, -1.4629792926034948e+01, -1.4375539545099908e+01, -1.4127098190154650e+01, -1.3883883239094454e+01, -1.3645401153510734e+01, -1.3411231091622829e+01, -1.3181010507418183e+01, -1.2954424246353401e+01, -1.2731196148405482e+01, -1.2511082484830109e+01, -1.2293866759574003e+01, -1.2079355542224606e+01, -1.1867375091697919e+01, -1.1657768593802700e+01, -1.1450393880891689e+01, -1.1245121534098722e+01, -1.1041833292131351e+01, -1.0840420707879813e+01, -1.0640784006999668e+01, -1.0442831112353673e+01, -1.0246476805614231e+01, -1.0051642003036079e+01, -9.8582531268427509e+00, -9.6662415571436178e+00, -9.4755431520404851e+00, -9.2860978257640348e+00, -9.0978491764272782e+00, -8.9107441563917540e+00, -8.7247327793845830e+00, -8.5397678594367203e+00, -8.3558047774774487e+00, -8.1728012720508225e+00, -7.9907172511424713e+00, -7.8095146225403225e+00, -7.6291571405168730e+00, -7.4496102669262783e+00, -7.2708410450674368e+00, -7.0928179848824850e+00, -6.9155109582455818e+00, -6.7388911032550611e+00, -6.5629307365772949e+00, -6.3876032730067926e+00, -6.2128831515070333e+00, -6.0387457670829185e+00, -5.8651674079104827e+00, -5.6921251972144509e+00, -5.5195970394407690e+00, -5.3475615703205710e+00, -5.1759981104652155e+00, -5.0048866221698640e+00, -4.8342076691363776e+00, -4.6639423788555394e+00, -4.4940724074145217e+00, -4.3245799065183155e+00, -4.1554474925341225e+00, -3.9866582173856151e+00, -3.8181955411399695e+00, -3.6500433061447404e+00, -3.4821857125843008e+00, -3.3146072953368710e+00, -3.1472929020232101e+00, -2.9802276721470844e+00, -2.8133970172356633e+00, -2.6467866018952186e+00, -2.4803823257039688e+00, -2.3141703058696805e+00, -2.1481368605848363e+00, -1.9822684930168015e+00, -1.8165518758745727e+00, -1.6509738364973834e+00, -1.4855213424137426e+00, -1.3201814873224036e+00, -1.1549414774493456e+00, -9.8978861823713149e-01, -8.2471030132498657e-01, -6.5969399177967603e-01, -4.9472721553871407e-01, -3.2979754702868869e-01, -1.6489259692248556e-01, -0.0000000000000000e+00, 1.6489259692248556e-01, 3.2979754702868869e-01, 4.9472721553871407e-01, 6.5969399177967603e-01, 8.2471030132498657e-01, 9.8978861823713149e-01, 1.1549414774493456e+00, 1.3201814873224036e+00, 1.4855213424137426e+00, 1.6509738364973834e+00, 1.8165518758745727e+00, 1.9822684930168015e+00, 2.1481368605848363e+00, 2.3141703058696805e+00, 2.4803823257039688e+00, 2.6467866018952186e+00, 2.8133970172356633e+00, 2.9802276721470844e+00, 3.1472929020232101e+00, 3.3146072953368710e+00, 3.4821857125843008e+00, 3.6500433061447404e+00, 3.8181955411399695e+00, 3.9866582173856151e+00, 4.1554474925341225e+00, 4.3245799065183155e+00, 4.4940724074145217e+00, 4.6639423788555394e+00, 4.8342076691363776e+00, 5.0048866221698640e+00, 5.1759981104652155e+00, 5.3475615703205710e+00, 5.5195970394407690e+00, 5.6921251972144509e+00, 5.8651674079104827e+00, 6.0387457670829185e+00, 6.2128831515070333e+00, 6.3876032730067926e+00, 6.5629307365772949e+00, 6.7388911032550611e+00, 6.9155109582455818e+00, 7.0928179848824850e+00, 7.2708410450674368e+00, 7.4496102669262783e+00, 7.6291571405168730e+00, 7.8095146225403225e+00, 7.9907172511424713e+00, 8.1728012720508225e+00, 8.3558047774774487e+00, 8.5397678594367203e+00, 8.7247327793845830e+00, 8.9107441563917540e+00, 9.0978491764272782e+00, 9.2860978257640348e+00, 9.4755431520404851e+00, 9.6662415571436178e+00, 9.8582531268427509e+00, 1.0051642003036079e+01, 1.0246476805614231e+01, 1.0442831112353673e+01, 1.0640784006999668e+01, 1.0840420707879813e+01, 1.1041833292131351e+01, 1.1245121534098722e+01, 1.1450393880891689e+01, 1.1657768593802700e+01, 1.1867375091697919e+01, 1.2079355542224606e+01, 1.2293866759574003e+01, 1.2511082484830109e+01, 1.2731196148405482e+01, 1.2954424246353401e+01, 1.3181010507418183e+01, 1.3411231091622829e+01, 1.3645401153510734e+01, 1.3883883239094454e+01, 1.4127098190154650e+01, 1.4375539545099908e+01, 1.4629792926034948e+01, 1.4890562720798751e+01, 1.5158709759192869e+01, 1.5435306145250426e+01, 1.5721717993090206e+01, 1.6019735882790211e+01, 1.6331792162325648e+01, 1.6661349346692710e+01, 1.7013662969810934e+01, 1.7397494085242659e+01, 1.7829840819908938e+01, 1.8355183937232482e+01}, + {-1.8408245997246180e+01, -1.7883403744491030e+01, -1.7451477824729114e+01, -1.7068027026173699e+01, -1.6716068212476749e+01, -1.6386848004427836e+01, -1.6075115445979726e+01, -1.5777411024325023e+01, -1.5491304492792262e+01, -1.5215006814022102e+01, -1.4947153048869007e+01, -1.4686672014714095e+01, -1.4432703621796808e+01, -1.4184544088639219e+01, -1.3941608298492062e+01, -1.3703403140370927e+01, -1.3469508138708429e+01, -1.3239561064882505e+01, -1.3013247042274847e+01, -1.2790290156511402e+01, -1.2570446897828043e+01, -1.2353500966915787e+01, -1.2139259111417608e+01, -1.1927547752486015e+01, -1.1718210224692305e+01, -1.1511104497611852e+01, -1.1306101279669830e+01, -1.1103082428282363e+01, -1.0901939607604296e+01, -1.0702573148080202e+01, -1.0504891071714988e+01, -1.0308808254390055e+01, -1.0114245702254145e+01, -9.9211299236483459e+00, -9.7293923814947600e+00, -9.5389690138184235e+00, -9.3497998122512573e+00, -9.1618284501124911e+00, -8.9750019530671192e+00, -8.7892704065055494e+00, -8.6045866947189253e+00, -8.4209062677087161e+00, -8.2381869320992998e+00, -8.0563886631444781e+00, -7.8754734352537348e+00, -7.6954050688277018e+00, -7.5161490914978035e+00, -7.3376726121226348e+00, -7.1599442061117617e+00, -6.9829338108329129e+00, -6.8066126300166054e+00, -6.6309530462073942e+00, -6.4559285404270428e+00, -6.2815136183148121e+00, -6.1076837420963281e+00, -5.9344152678072613e+00, -5.7616853872629088e+00, -5.5894720743212600e+00, -5.4177540350364595e+00, -5.2465106613426817e+00, -5.0757219879463076e+00, -4.9053686521374775e+00, -4.7354318562614344e+00, -4.5658933326158548e+00, -4.3967353105632334e+00, -4.2279404856676086e+00, -4.0594919906828633e+00, -3.8913733682357932e+00, -3.7235685450613194e+00, -3.5560618076598769e+00, -3.3888377792582984e+00, -3.2218813979656056e+00, -3.0551778960241212e+00, -2.8887127800643957e+00, -2.7224718122796761e+00, -2.5564409924420968e+00, -2.3906065406885992e+00, -2.2249548810097695e+00, -2.0594726253794451e+00, -1.8941465584671033e+00, -1.7289636228787715e+00, -1.5639109048755229e+00, -1.3989756205215691e+00, -1.2341451022165917e+00, -1.0694067855692595e+00, -9.0474819657091143e-01, -7.4015693903015356e-01, -5.7562068223063112e-01, -4.1112714877553319e-01, -2.4666410258345153e-01, -8.2219337001082138e-02, 8.2219337001082138e-02, 2.4666410258345153e-01, 4.1112714877553319e-01, 5.7562068223063112e-01, 7.4015693903015356e-01, 9.0474819657091143e-01, 1.0694067855692595e+00, 1.2341451022165917e+00, 1.3989756205215691e+00, 1.5639109048755229e+00, 1.7289636228787715e+00, 1.8941465584671033e+00, 2.0594726253794451e+00, 2.2249548810097695e+00, 2.3906065406885992e+00, 2.5564409924420968e+00, 2.7224718122796761e+00, 2.8887127800643957e+00, 3.0551778960241212e+00, 3.2218813979656056e+00, 3.3888377792582984e+00, 3.5560618076598769e+00, 3.7235685450613194e+00, 3.8913733682357932e+00, 4.0594919906828633e+00, 4.2279404856676086e+00, 4.3967353105632334e+00, 4.5658933326158548e+00, 4.7354318562614344e+00, 4.9053686521374775e+00, 5.0757219879463076e+00, 5.2465106613426817e+00, 5.4177540350364595e+00, 5.5894720743212600e+00, 5.7616853872629088e+00, 5.9344152678072613e+00, 6.1076837420963281e+00, 6.2815136183148121e+00, 6.4559285404270428e+00, 6.6309530462073942e+00, 6.8066126300166054e+00, 6.9829338108329129e+00, 7.1599442061117617e+00, 7.3376726121226348e+00, 7.5161490914978035e+00, 7.6954050688277018e+00, 7.8754734352537348e+00, 8.0563886631444781e+00, 8.2381869320992998e+00, 8.4209062677087161e+00, 8.6045866947189253e+00, 8.7892704065055494e+00, 8.9750019530671192e+00, 9.1618284501124911e+00, 9.3497998122512573e+00, 9.5389690138184235e+00, 9.7293923814947600e+00, 9.9211299236483459e+00, 1.0114245702254145e+01, 1.0308808254390055e+01, 1.0504891071714988e+01, 1.0702573148080202e+01, 1.0901939607604296e+01, 1.1103082428282363e+01, 1.1306101279669830e+01, 1.1511104497611852e+01, 1.1718210224692305e+01, 1.1927547752486015e+01, 1.2139259111417608e+01, 1.2353500966915787e+01, 1.2570446897828043e+01, 1.2790290156511402e+01, 1.3013247042274847e+01, 1.3239561064882505e+01, 1.3469508138708429e+01, 1.3703403140370927e+01, 1.3941608298492062e+01, 1.4184544088639219e+01, 1.4432703621796808e+01, 1.4686672014714095e+01, 1.4947153048869007e+01, 1.5215006814022102e+01, 1.5491304492792262e+01, 1.5777411024325023e+01, 1.6075115445979726e+01, 1.6386848004427836e+01, 1.6716068212476749e+01, 1.7068027026173699e+01, 1.7451477824729114e+01, 1.7883403744491030e+01, 1.8408245997246180e+01}, + {-1.8461160480857558e+01, -1.7936815806741905e+01, -1.7505307899899091e+01, -1.7122234853198016e+01, -1.6770628427712005e+01, -1.6441742871044735e+01, -1.6130331776708392e+01, -1.5832938615515195e+01, -1.5547135229076916e+01, -1.5271134113158810e+01, -1.5003571496240674e+01, -1.4743377111311560e+01, -1.4489691603942756e+01, -1.4241811795112390e+01, -1.3999153070125370e+01, -1.3761222742582584e+01, -1.3527600700651213e+01, -1.3297925030905425e+01, -1.3071881132685062e+01, -1.2849193335481473e+01, -1.2629618346876333e+01, -1.2412940062792593e+01, -1.2198965407516690e+01, -1.1987520963108198e+01, -1.1778450211639703e+01, -1.1571611258704099e+01, -1.1366874938858906e+01, -1.1164123227107424e+01, -1.0963247897778071e+01, -1.0764149385037474e+01, -1.0566735808984420e+01, -1.0370922138674908e+01, -1.0176629469126997e+01, -9.9837843937805424e+00, -9.7923184573541491e+00, -9.6021676767792830e+00, -9.4132721200689708e+00, -9.2255755347225286e+00, -9.0390250186738808e+00, -8.8535707279314924e+00, -8.6691656159885877e+00, -8.4857652008457727e+00, -8.3033273561177268e+00, -8.1218121232175022e+00, -7.9411815420463903e+00, -7.7613994979807179e+00, -7.5824315832521663e+00, -7.4042449710755918e+00, -7.2268083010962609e+00, -7.0500915749135666e+00, -6.8740660605962152e+00, -6.6987042052389292e+00, -6.5239795547267194e+00, -6.3498666799725676e+00, -6.1763411089806590e+00, -6.0033792641618602e+00, -5.8309584043931153e+00, -5.6590565713687386e+00, -5.4876525398409859e+00, -5.3167257713902920e+00, -5.1462563714034442e+00, -4.9762250489711075e+00, -4.8066130794454613e+00, -4.6374022694244390e+00, -4.4685749239519819e+00, -4.3001138157438552e+00, -4.1320021562665863e+00, -3.9642235685129914e+00, -3.7967620613319606e+00, -3.6296020051828295e+00, -3.4627281091959579e+00, -3.2961253994312023e+00, -3.1297791982350316e+00, -2.9636751046050795e+00, -2.7977989754781833e+00, -2.6321369078644477e+00, -2.4666752217556631e+00, -2.3014004437416671e+00, -2.1362992912728620e+00, -1.9713586575113293e+00, -1.8065655967166996e+00, -1.6419073101163049e+00, -1.4773711322121057e+00, -1.3129445174795540e+00, -1.1486150274158851e+00, -9.8437031789740859e-01, -8.2019812680717263e-01, -6.5608626189593511e-01, -4.9202258884072003e-01, -3.2799501946636006e-01, -1.6399150009634114e-01, -0.0000000000000000e+00, 1.6399150009634114e-01, 3.2799501946636006e-01, 4.9202258884072003e-01, 6.5608626189593511e-01, 8.2019812680717263e-01, 9.8437031789740859e-01, 1.1486150274158851e+00, 1.3129445174795540e+00, 1.4773711322121057e+00, 1.6419073101163049e+00, 1.8065655967166996e+00, 1.9713586575113293e+00, 2.1362992912728620e+00, 2.3014004437416671e+00, 2.4666752217556631e+00, 2.6321369078644477e+00, 2.7977989754781833e+00, 2.9636751046050795e+00, 3.1297791982350316e+00, 3.2961253994312023e+00, 3.4627281091959579e+00, 3.6296020051828295e+00, 3.7967620613319606e+00, 3.9642235685129914e+00, 4.1320021562665863e+00, 4.3001138157438552e+00, 4.4685749239519819e+00, 4.6374022694244390e+00, 4.8066130794454613e+00, 4.9762250489711075e+00, 5.1462563714034442e+00, 5.3167257713902920e+00, 5.4876525398409859e+00, 5.6590565713687386e+00, 5.8309584043931153e+00, 6.0033792641618602e+00, 6.1763411089806590e+00, 6.3498666799725676e+00, 6.5239795547267194e+00, 6.6987042052389292e+00, 6.8740660605962152e+00, 7.0500915749135666e+00, 7.2268083010962609e+00, 7.4042449710755918e+00, 7.5824315832521663e+00, 7.7613994979807179e+00, 7.9411815420463903e+00, 8.1218121232175022e+00, 8.3033273561177268e+00, 8.4857652008457727e+00, 8.6691656159885877e+00, 8.8535707279314924e+00, 9.0390250186738808e+00, 9.2255755347225286e+00, 9.4132721200689708e+00, 9.6021676767792830e+00, 9.7923184573541491e+00, 9.9837843937805424e+00, 1.0176629469126997e+01, 1.0370922138674908e+01, 1.0566735808984420e+01, 1.0764149385037474e+01, 1.0963247897778071e+01, 1.1164123227107424e+01, 1.1366874938858906e+01, 1.1571611258704099e+01, 1.1778450211639703e+01, 1.1987520963108198e+01, 1.2198965407516690e+01, 1.2412940062792593e+01, 1.2629618346876333e+01, 1.2849193335481473e+01, 1.3071881132685062e+01, 1.3297925030905425e+01, 1.3527600700651213e+01, 1.3761222742582584e+01, 1.3999153070125370e+01, 1.4241811795112390e+01, 1.4489691603942756e+01, 1.4743377111311560e+01, 1.5003571496240674e+01, 1.5271134113158810e+01, 1.5547135229076916e+01, 1.5832938615515195e+01, 1.6130331776708392e+01, 1.6441742871044735e+01, 1.6770628427712005e+01, 1.7122234853198016e+01, 1.7505307899899091e+01, 1.7936815806741905e+01, 1.8461160480857558e+01}, + {-1.8513928608263910e+01, -1.7990078266499342e+01, -1.7558985604821832e+01, -1.7176287776635139e+01, -1.6825031348357740e+01, -1.6496478147418177e+01, -1.6185386288902521e+01, -1.5888302208889046e+01, -1.5602799824404595e+01, -1.5327093154858311e+01, -1.5059819589095412e+01, -1.4799909769977225e+01, -1.4546505073756210e+01, -1.4298902920011114e+01, -1.4056519192842705e+01, -1.3818861627636300e+01, -1.3585510473849133e+01, -1.3356104131088610e+01, -1.3130328272714728e+01, -1.2907907470327904e+01, -1.2688598647247023e+01, -1.2472185893139361e+01, -1.2258476307546671e+01, -1.2047296632129509e+01, -1.1838490495223393e+01, -1.1631916137256281e+01, -1.1427444517781534e+01, -1.1224957728290695e+01, -1.1024347652217228e+01, -1.0825514826405534e+01, -1.0628367468022887e+01, -1.0432820638288694e+01, -1.0238795520089184e+01, -1.0046218790968188e+01, -9.8550220764490426e+00, -9.6651414713778827e+00, -9.4765171191542965e+00, -9.2890928404578474e+00, -9.1028158044838392e+00, -8.9176362368413074e+00, -8.7335071591960247e+00, -8.5503841565041103e+00, -8.3682251683108984e+00, -8.1869903011111198e+00, -8.0066416592005272e+00, -7.8271431918122021e+00, -7.6484605546357463e+00, -7.4705609840747460e+00, -7.2934131828156445e+00, -7.1169872154661693e+00, -6.9412544131792480e+00, -6.7661872863133015e+00, -6.5917594442957039e+00, -6.4179455219559527e+00, -6.2447211116812200e+00, -6.0720627008216166e+00, -5.8999476138372433e+00, -5.7283539587355410e+00, -5.5572605773966774e+00, -5.3866469994277919e+00, -5.2164933992246887e+00, -5.0467805559527736e+00, -4.8774898161882945e+00, -4.7086030589866992e+00, -4.5401026631678034e+00, -4.3719714766276327e+00, -4.2041927875047698e+00, -4.0367502970449332e+00, -3.8696280940217886e+00, -3.7028106305845436e+00, -3.5362826994142633e+00, -3.3700294120808700e+00, -3.2040361785018661e+00, -3.0382886874118746e+00, -2.8727728877593544e+00, -2.7074749709533479e+00, -2.5423813538889330e+00, -2.3774786626852964e+00, -2.2127537170750284e+00, -2.0481935153874455e+00, -1.8837852200725123e+00, -1.7195161437153161e+00, -1.5553737354940405e+00, -1.3913455680370777e+00, -1.2274193246372884e+00, -1.0635827867835286e+00, -8.9982382197139399e-01, -7.3613037175675000e-01, -5.7249044001699179e-01, -4.0889208138615224e-01, -2.4532338983095259e-01, -8.1772487335672980e-02, 8.1772487335672980e-02, 2.4532338983095259e-01, 4.0889208138615224e-01, 5.7249044001699179e-01, 7.3613037175675000e-01, 8.9982382197139399e-01, 1.0635827867835286e+00, 1.2274193246372884e+00, 1.3913455680370777e+00, 1.5553737354940405e+00, 1.7195161437153161e+00, 1.8837852200725123e+00, 2.0481935153874455e+00, 2.2127537170750284e+00, 2.3774786626852964e+00, 2.5423813538889330e+00, 2.7074749709533479e+00, 2.8727728877593544e+00, 3.0382886874118746e+00, 3.2040361785018661e+00, 3.3700294120808700e+00, 3.5362826994142633e+00, 3.7028106305845436e+00, 3.8696280940217886e+00, 4.0367502970449332e+00, 4.2041927875047698e+00, 4.3719714766276327e+00, 4.5401026631678034e+00, 4.7086030589866992e+00, 4.8774898161882945e+00, 5.0467805559527736e+00, 5.2164933992246887e+00, 5.3866469994277919e+00, 5.5572605773966774e+00, 5.7283539587355410e+00, 5.8999476138372433e+00, 6.0720627008216166e+00, 6.2447211116812200e+00, 6.4179455219559527e+00, 6.5917594442957039e+00, 6.7661872863133015e+00, 6.9412544131792480e+00, 7.1169872154661693e+00, 7.2934131828156445e+00, 7.4705609840747460e+00, 7.6484605546357463e+00, 7.8271431918122021e+00, 8.0066416592005272e+00, 8.1869903011111198e+00, 8.3682251683108984e+00, 8.5503841565041103e+00, 8.7335071591960247e+00, 8.9176362368413074e+00, 9.1028158044838392e+00, 9.2890928404578474e+00, 9.4765171191542965e+00, 9.6651414713778827e+00, 9.8550220764490426e+00, 1.0046218790968188e+01, 1.0238795520089184e+01, 1.0432820638288694e+01, 1.0628367468022887e+01, 1.0825514826405534e+01, 1.1024347652217228e+01, 1.1224957728290695e+01, 1.1427444517781534e+01, 1.1631916137256281e+01, 1.1838490495223393e+01, 1.2047296632129509e+01, 1.2258476307546671e+01, 1.2472185893139361e+01, 1.2688598647247023e+01, 1.2907907470327904e+01, 1.3130328272714728e+01, 1.3356104131088610e+01, 1.3585510473849133e+01, 1.3818861627636300e+01, 1.4056519192842705e+01, 1.4298902920011114e+01, 1.4546505073756210e+01, 1.4799909769977225e+01, 1.5059819589095412e+01, 1.5327093154858311e+01, 1.5602799824404595e+01, 1.5888302208889046e+01, 1.6185386288902521e+01, 1.6496478147418177e+01, 1.6825031348357740e+01, 1.7176287776635139e+01, 1.7558985604821832e+01, 1.7990078266499342e+01, 1.8513928608263910e+01}, + {-1.8566551582966934e+01, -1.8043192366212146e+01, -1.7612512215572043e+01, -1.7230187103675732e+01, -1.6879278311267964e+01, -1.6551055199152213e+01, -1.6240280376324659e+01, -1.5943503225988053e+01, -1.5658299727865778e+01, -1.5382885415641553e+01, -1.5115898831352473e+01, -1.4856271522065896e+01, -1.4603145590122086e+01, -1.4355819049893570e+01, -1.4113708281060786e+01, -1.3876321438029819e+01, -1.3643239129136898e+01, -1.3414100064890203e+01, -1.3188590190760220e+01, -1.2966434318726625e+01, -1.2747389586262109e+01, -1.2531240275314762e+01, -1.2317793659316827e+01, -1.2106876638246456e+01, -1.1898332985486029e+01, -1.1692021075138110e+01, -1.1487811990637235e+01, -1.1285587938886058e+01, -1.1085240911376399e+01, -1.0886671546608126e+01, -1.0689788157813780e+01, -1.0494505897387436e+01, -1.0300746035105149e+01, -1.0108435331643134e+01, -9.9175054923613413e+00, -9.7278926890529842e+00, -9.5395371395344313e+00, -9.3523827366910055e+00, -9.1663767199978832e+00, -8.9814693836739572e+00, -8.7976138165555611e+00, -8.6147656695390857e+00, -8.4328829470701354e+00, -8.2519258196777496e+00, -8.0718564549859924e+00, -7.8926388649980117e+00, -7.7142387677523603e+00, -7.5366234617083592e+00, -7.3597617114348806e+00, -7.1836236433617389e+00, -7.0081806505105702e+00, -6.8334053052569255e+00, -6.6592712792911071e+00, -6.4857532700449330e+00, -6.3128269329377309e+00, -6.1404688188693868e+00, -5.9686563164530524e+00, -5.7973675985364332e+00, -5.6265815726098278e+00, -5.4562778347421155e+00, -5.2864366267236216e+00, -5.1170387961280301e+00, -4.9480657590346588e+00, -4.7794994651782767e+00, -4.6113223653163988e+00, -4.4435173806242290e+00, -4.2760678739453368e+00, -4.1089576227421016e+00, -3.9421707936041384e+00, -3.7756919181855801e+00, -3.6095058704533836e+00, -3.4435978451389198e+00, -3.2779533372941430e+00, -3.1125581228617367e+00, -2.9473982401758851e+00, -2.7824599723168095e+00, -2.6177298302480856e+00, -2.4531945366709622e+00, -2.2888410105346408e+00, -2.1246563521456703e+00, -1.9606278288234207e+00, -1.7967428610519807e+00, -1.6329890090818511e+00, -1.4693539599375276e+00, -1.3058255147894620e+00, -1.1423915766510224e+00, -9.7904013836295944e-01, -8.1575927082952060e-01, -6.5253711147178228e-01, -4.8936185286498823e-01, -3.2622173152770212e-01, -1.6310501683142664e-01, -0.0000000000000000e+00, 1.6310501683142664e-01, 3.2622173152770212e-01, 4.8936185286498823e-01, 6.5253711147178228e-01, 8.1575927082952060e-01, 9.7904013836295944e-01, 1.1423915766510224e+00, 1.3058255147894620e+00, 1.4693539599375276e+00, 1.6329890090818511e+00, 1.7967428610519807e+00, 1.9606278288234207e+00, 2.1246563521456703e+00, 2.2888410105346408e+00, 2.4531945366709622e+00, 2.6177298302480856e+00, 2.7824599723168095e+00, 2.9473982401758851e+00, 3.1125581228617367e+00, 3.2779533372941430e+00, 3.4435978451389198e+00, 3.6095058704533836e+00, 3.7756919181855801e+00, 3.9421707936041384e+00, 4.1089576227421016e+00, 4.2760678739453368e+00, 4.4435173806242290e+00, 4.6113223653163988e+00, 4.7794994651782767e+00, 4.9480657590346588e+00, 5.1170387961280301e+00, 5.2864366267236216e+00, 5.4562778347421155e+00, 5.6265815726098278e+00, 5.7973675985364332e+00, 5.9686563164530524e+00, 6.1404688188693868e+00, 6.3128269329377309e+00, 6.4857532700449330e+00, 6.6592712792911071e+00, 6.8334053052569255e+00, 7.0081806505105702e+00, 7.1836236433617389e+00, 7.3597617114348806e+00, 7.5366234617083592e+00, 7.7142387677523603e+00, 7.8926388649980117e+00, 8.0718564549859924e+00, 8.2519258196777496e+00, 8.4328829470701354e+00, 8.6147656695390857e+00, 8.7976138165555611e+00, 8.9814693836739572e+00, 9.1663767199978832e+00, 9.3523827366910055e+00, 9.5395371395344313e+00, 9.7278926890529842e+00, 9.9175054923613413e+00, 1.0108435331643134e+01, 1.0300746035105149e+01, 1.0494505897387436e+01, 1.0689788157813780e+01, 1.0886671546608126e+01, 1.1085240911376399e+01, 1.1285587938886058e+01, 1.1487811990637235e+01, 1.1692021075138110e+01, 1.1898332985486029e+01, 1.2106876638246456e+01, 1.2317793659316827e+01, 1.2531240275314762e+01, 1.2747389586262109e+01, 1.2966434318726625e+01, 1.3188590190760220e+01, 1.3414100064890203e+01, 1.3643239129136898e+01, 1.3876321438029819e+01, 1.4113708281060786e+01, 1.4355819049893570e+01, 1.4603145590122086e+01, 1.4856271522065896e+01, 1.5115898831352473e+01, 1.5382885415641553e+01, 1.5658299727865778e+01, 1.5943503225988053e+01, 1.6240280376324659e+01, 1.6551055199152213e+01, 1.6879278311267964e+01, 1.7230187103675732e+01, 1.7612512215572043e+01, 1.8043192366212146e+01, 1.8566551582966934e+01}, + {-1.8619030592090468e+01, -1.8096159331273782e+01, -1.7665888990577489e+01, -1.7283934123310647e+01, -1.6933370634565016e+01, -1.6605475372599290e+01, -1.6295015412972358e+01, -1.5998543068078176e+01, -1.5713636367765231e+01, -1.5438512350731910e+01, -1.5171810705118517e+01, -1.4912463876600265e+01, -1.4659614689068135e+01, -1.4412561747929246e+01, -1.4170721925268760e+01, -1.3933603791786325e+01, -1.3700788312318828e+01, -1.3471914506172659e+01, -1.3246668589046763e+01, -1.3024775611596180e+01, -1.2805992923888546e+01, -1.2590104998712771e+01, -1.2376919282049462e+01, -1.2166262830933078e+01, -1.1957979562598471e+01, -1.1751927983683725e+01, -1.1547979300411118e+01, -1.1346015834038415e+01, -1.1145929683090303e+01, -1.0947621586721102e+01, -1.0750999953246895e+01, -1.0555980025269434e+01, -1.0362483158498314e+01, -1.0170436195792986e+01, -9.9797709214047838e+00, -9.7904235831297708e+00, -9.6023344722552224e+00, -9.4154475529222363e+00, -9.2297101339294976e+00, -9.0450725771408784e+00, -8.8614880375878489e+00, -8.6789122311192006e+00, -8.4973032260786319e+00, -8.3166212560112189e+00, -8.1368285508332487e+00, -7.9578891842622976e+00, -7.7797689356090070e+00, -7.6024351642886749e+00, -7.4258566956282692e+00, -7.2500037167290969e+00, -7.0748476813029546e+00, -6.9003612225343067e+00, -6.7265180731367256e+00, -6.5532929918714933e+00, -6.3806616958821749e+00, -6.2086007982735998e+00, -6.0370877504282516e+00, -5.8661007886095131e+00, -5.6956188844502709e+00, -5.5256216989684903e+00, -5.3560895397890276e+00, -5.1870033212841671e+00, -5.0183445273745217e+00, -4.8500951767577565e+00, -4.6822377903553614e+00, -4.5147553607879187e+00, -4.3476313237072048e+00, -4.1808495308294304e+00, -4.0143942245280941e+00, -3.8482500138575717e+00, -3.6824018518898707e+00, -3.5168350142570763e+00, -3.3515350788010423e+00, -3.1864879062399973e+00, -3.0216796217689894e+00, -2.8570965975176055e+00, -2.6927254357942609e+00, -2.5285529530516158e+00, -2.3645661645123681e+00, -2.2007522693989476e+00, -2.0370986367144122e+00, -1.8735927915252717e+00, -1.7102224017000158e+00, -1.5469752650598505e+00, -1.3838392969005895e+00, -1.2208025178467894e+00, -1.0578530420011503e+00, -8.9497906535386207e-01, -7.3216885441804769e-01, -5.6941073505871143e-01, -4.0669308148366978e-01, -2.4400430536582823e-01, -8.1332845066888051e-02, 8.1332845066888051e-02, 2.4400430536582823e-01, 4.0669308148366978e-01, 5.6941073505871143e-01, 7.3216885441804769e-01, 8.9497906535386207e-01, 1.0578530420011503e+00, 1.2208025178467894e+00, 1.3838392969005895e+00, 1.5469752650598505e+00, 1.7102224017000158e+00, 1.8735927915252717e+00, 2.0370986367144122e+00, 2.2007522693989476e+00, 2.3645661645123681e+00, 2.5285529530516158e+00, 2.6927254357942609e+00, 2.8570965975176055e+00, 3.0216796217689894e+00, 3.1864879062399973e+00, 3.3515350788010423e+00, 3.5168350142570763e+00, 3.6824018518898707e+00, 3.8482500138575717e+00, 4.0143942245280941e+00, 4.1808495308294304e+00, 4.3476313237072048e+00, 4.5147553607879187e+00, 4.6822377903553614e+00, 4.8500951767577565e+00, 5.0183445273745217e+00, 5.1870033212841671e+00, 5.3560895397890276e+00, 5.5256216989684903e+00, 5.6956188844502709e+00, 5.8661007886095131e+00, 6.0370877504282516e+00, 6.2086007982735998e+00, 6.3806616958821749e+00, 6.5532929918714933e+00, 6.7265180731367256e+00, 6.9003612225343067e+00, 7.0748476813029546e+00, 7.2500037167290969e+00, 7.4258566956282692e+00, 7.6024351642886749e+00, 7.7797689356090070e+00, 7.9578891842622976e+00, 8.1368285508332487e+00, 8.3166212560112189e+00, 8.4973032260786319e+00, 8.6789122311192006e+00, 8.8614880375878489e+00, 9.0450725771408784e+00, 9.2297101339294976e+00, 9.4154475529222363e+00, 9.6023344722552224e+00, 9.7904235831297708e+00, 9.9797709214047838e+00, 1.0170436195792986e+01, 1.0362483158498314e+01, 1.0555980025269434e+01, 1.0750999953246895e+01, 1.0947621586721102e+01, 1.1145929683090303e+01, 1.1346015834038415e+01, 1.1547979300411118e+01, 1.1751927983683725e+01, 1.1957979562598471e+01, 1.2166262830933078e+01, 1.2376919282049462e+01, 1.2590104998712771e+01, 1.2805992923888546e+01, 1.3024775611596180e+01, 1.3246668589046763e+01, 1.3471914506172659e+01, 1.3700788312318828e+01, 1.3933603791786325e+01, 1.4170721925268760e+01, 1.4412561747929246e+01, 1.4659614689068135e+01, 1.4912463876600265e+01, 1.5171810705118517e+01, 1.5438512350731910e+01, 1.5713636367765231e+01, 1.5998543068078176e+01, 1.6295015412972358e+01, 1.6605475372599290e+01, 1.6933370634565016e+01, 1.7283934123310647e+01, 1.7665888990577489e+01, 1.8096159331273782e+01, 1.8619030592090468e+01}, + {-1.8671366806690571e+01, -1.8148980370347783e+01, -1.7719117170958008e+01, -1.7337530106682756e+01, -1.6987309618003824e+01, -1.6659739995236801e+01, -1.6349592753466883e+01, -1.6053423116550778e+01, -1.5768811152035209e+01, -1.5493975394480826e+01, -1.5227556671125813e+01, -1.4968488320721917e+01, -1.4715913884228952e+01, -1.4469132554376165e+01, -1.4227561692518973e+01, -1.3990710282959036e+01, -1.3758159644687614e+01, -1.3529549103735965e+01, -1.3304565144176481e+01, -1.3082933053660961e+01, -1.2864410393317099e+01, -1.2648781825357542e+01, -1.2435854966991251e+01, -1.2225457031069224e+01, -1.2017432077505465e+01, -1.1811638744355296e+01, -1.1607948359555895e+01, -1.1406243357684700e+01, -1.1206415943294315e+01, -1.1008366955213159e+01, -1.0812004895879960e+01, -1.0617245097158223e+01, -1.0424008999756163e+01, -1.0232223527789033e+01, -1.0041820543476462e+01, -9.8527363696957284e+00, -9.6649113702822387e+00, -9.4782895797065567e+00, -9.2928183751587881e+00, -9.1084481852078358e+00, -8.9251322301305365e+00, -8.7428262897666009e+00, -8.5614884953828341e+00, -8.3810791425501474e+00, -8.2015605224698938e+00, -8.0228967695483639e+00, -7.8450537233223443e+00, -7.6679988030953385e+00, -7.4917008938611840e+00, -7.3161302422763823e+00, -7.1412583615998901e+00, -6.9670579446537078e+00, -6.7935027839732776e+00, -6.6205676984161617e+00, -6.4482284655834805e+00, -6.2764617594829843e+00, -6.1052450929273041e+00, -5.9345567642172057e+00, -5.7643758077087970e+00, -5.5946819479066203e+00, -5.4254555567622882e+00, -5.2566776138914113e+00, -5.0883296694508156e+00, -4.9203938094437563e+00, -4.7528526232436477e+00, -4.5856891731470037e+00, -4.4188869657842096e+00, -4.2524299252326587e+00, -4.0863023676909851e+00, -3.9204889775857690e+00, -3.7549747849933839e+00, -3.5897451442697546e+00, -3.4247857137898574e+00, -3.2600824367068570e+00, -3.0956215226480941e+00, -2.9313894302716164e+00, -2.7673728506128410e+00, -2.6035586911561817e+00, -2.4399340605712174e+00, -2.2764862540572279e+00, -2.1132027392437367e+00, -1.9500711425981514e+00, -1.7870792362946351e+00, -1.6242149255011178e+00, -1.4614662360438109e+00, -1.2988213024107587e+00, -1.1362683560579210e+00, -9.7379571398297626e-01, -8.1139176753353270e-01, -6.4904497141773709e-01, -4.8674383288637452e-01, -3.2447690105649102e-01, -1.6223275634733261e-01, -0.0000000000000000e+00, 1.6223275634733261e-01, 3.2447690105649102e-01, 4.8674383288637452e-01, 6.4904497141773709e-01, 8.1139176753353270e-01, 9.7379571398297626e-01, 1.1362683560579210e+00, 1.2988213024107587e+00, 1.4614662360438109e+00, 1.6242149255011178e+00, 1.7870792362946351e+00, 1.9500711425981514e+00, 2.1132027392437367e+00, 2.2764862540572279e+00, 2.4399340605712174e+00, 2.6035586911561817e+00, 2.7673728506128410e+00, 2.9313894302716164e+00, 3.0956215226480941e+00, 3.2600824367068570e+00, 3.4247857137898574e+00, 3.5897451442697546e+00, 3.7549747849933839e+00, 3.9204889775857690e+00, 4.0863023676909851e+00, 4.2524299252326587e+00, 4.4188869657842096e+00, 4.5856891731470037e+00, 4.7528526232436477e+00, 4.9203938094437563e+00, 5.0883296694508156e+00, 5.2566776138914113e+00, 5.4254555567622882e+00, 5.5946819479066203e+00, 5.7643758077087970e+00, 5.9345567642172057e+00, 6.1052450929273041e+00, 6.2764617594829843e+00, 6.4482284655834805e+00, 6.6205676984161617e+00, 6.7935027839732776e+00, 6.9670579446537078e+00, 7.1412583615998901e+00, 7.3161302422763823e+00, 7.4917008938611840e+00, 7.6679988030953385e+00, 7.8450537233223443e+00, 8.0228967695483639e+00, 8.2015605224698938e+00, 8.3810791425501474e+00, 8.5614884953828341e+00, 8.7428262897666009e+00, 8.9251322301305365e+00, 9.1084481852078358e+00, 9.2928183751587881e+00, 9.4782895797065567e+00, 9.6649113702822387e+00, 9.8527363696957284e+00, 1.0041820543476462e+01, 1.0232223527789033e+01, 1.0424008999756163e+01, 1.0617245097158223e+01, 1.0812004895879960e+01, 1.1008366955213159e+01, 1.1206415943294315e+01, 1.1406243357684700e+01, 1.1607948359555895e+01, 1.1811638744355296e+01, 1.2017432077505465e+01, 1.2225457031069224e+01, 1.2435854966991251e+01, 1.2648781825357542e+01, 1.2864410393317099e+01, 1.3082933053660961e+01, 1.3304565144176481e+01, 1.3529549103735965e+01, 1.3758159644687614e+01, 1.3990710282959036e+01, 1.4227561692518973e+01, 1.4469132554376165e+01, 1.4715913884228952e+01, 1.4968488320721917e+01, 1.5227556671125813e+01, 1.5493975394480826e+01, 1.5768811152035209e+01, 1.6053423116550778e+01, 1.6349592753466883e+01, 1.6659739995236801e+01, 1.6987309618003824e+01, 1.7337530106682756e+01, 1.7719117170958008e+01, 1.8148980370347783e+01, 1.8671366806690571e+01}, + {-1.8723561382058008e+01, -1.8201656675685278e+01, -1.7772197980856198e+01, -1.7390976307430105e+01, -1.7041096543327139e+01, -1.6713850376034177e+01, -1.6404013733432148e+01, -1.6108144733313441e+01, -1.5823825468638194e+01, -1.5549275960782586e+01, -1.5283138169159214e+01, -1.5024346320130698e+01, -1.4772044667297957e+01, -1.4525532987045759e+01, -1.4284229126904924e+01, -1.4047642482122557e+01, -1.3815354723529373e+01, -1.3587005481836711e+01, -1.3362281507661821e+01, -1.3140908323999378e+01, -1.2922643701525621e+01, -1.2707272490482195e+01, -1.2494602478007950e+01, -1.2284461031551617e+01, -1.2076692352553557e+01, -1.1871155209388238e+01, -1.1667721050654935e+01, -1.1466272423235269e+01, -1.1266701636724759e+01, -1.1068909628665612e+01, -1.0872804994678496e+01, -1.0678303154963148e+01, -1.0485325634312177e+01, -1.0293799437190248e+01, -1.0103656502883585e+01, -9.9148332284509078e+00, -9.7272700493753241e+00, -9.5409110695534167e+00, -9.3557037336580802e+00, -9.1715985360473766e+00, -8.9885487613184782e+00, -8.8065102523660030e+00, -8.6254412024311282e+00, -8.4453019681474064e+00, -8.2660549010220361e+00, -8.0876641951531028e+00, -7.9100957492873887e+00, -7.7333170415796193e+00, -7.5572970156311445e+00, -7.3820059765703627e+00, -7.2074154960945611e+00, -7.0334983255273515e+00, -6.8602283160613764e+00, -6.6875803454554488e+00, -6.5155302505411381e+00, -6.3440547649682282e+00, -6.1731314616830320e+00, -6.0027386996898384e+00, -5.8328555746948121e+00, -5.6634618732746507e+00, -5.4945380302499895e+00, -5.3260650889766197e+00, -5.1580246642968088e+00, -4.9903989079186992e+00, -4.8231704760145977e+00, -4.6563224988490806e+00, -4.4898385522658026e+00, -4.3237026308777766e+00, -4.1578991228200994e+00, -3.9924127859367369e+00, -3.8272287252842805e+00, -3.6623323718456740e+00, -3.4977094623559837e+00, -3.3333460201503651e+00, -3.1692283369516736e+00, -3.0053429555216815e+00, -2.8416766531057553e+00, -2.6782164256060947e+00, -2.5149494724234098e+00, -2.3518631819111540e+00, -2.1889451173902774e+00, -2.0261830036759103e+00, -1.8635647140704730e+00, -1.7010782577804868e+00, -1.5387117677168360e+00, -1.3764534886404416e+00, -1.2142917656172734e+00, -1.0522150327483684e+00, -8.9021180214204398e-01, -7.2827065309683536e-01, -5.6638022146482336e-01, -4.0452918916600705e-01, -2.4270627382517243e-01, -8.0900218503372048e-02, 8.0900218503372048e-02, 2.4270627382517243e-01, 4.0452918916600705e-01, 5.6638022146482336e-01, 7.2827065309683536e-01, 8.9021180214204398e-01, 1.0522150327483684e+00, 1.2142917656172734e+00, 1.3764534886404416e+00, 1.5387117677168360e+00, 1.7010782577804868e+00, 1.8635647140704730e+00, 2.0261830036759103e+00, 2.1889451173902774e+00, 2.3518631819111540e+00, 2.5149494724234098e+00, 2.6782164256060947e+00, 2.8416766531057553e+00, 3.0053429555216815e+00, 3.1692283369516736e+00, 3.3333460201503651e+00, 3.4977094623559837e+00, 3.6623323718456740e+00, 3.8272287252842805e+00, 3.9924127859367369e+00, 4.1578991228200994e+00, 4.3237026308777766e+00, 4.4898385522658026e+00, 4.6563224988490806e+00, 4.8231704760145977e+00, 4.9903989079186992e+00, 5.1580246642968088e+00, 5.3260650889766197e+00, 5.4945380302499895e+00, 5.6634618732746507e+00, 5.8328555746948121e+00, 6.0027386996898384e+00, 6.1731314616830320e+00, 6.3440547649682282e+00, 6.5155302505411381e+00, 6.6875803454554488e+00, 6.8602283160613764e+00, 7.0334983255273515e+00, 7.2074154960945611e+00, 7.3820059765703627e+00, 7.5572970156311445e+00, 7.7333170415796193e+00, 7.9100957492873887e+00, 8.0876641951531028e+00, 8.2660549010220361e+00, 8.4453019681474064e+00, 8.6254412024311282e+00, 8.8065102523660030e+00, 8.9885487613184782e+00, 9.1715985360473766e+00, 9.3557037336580802e+00, 9.5409110695534167e+00, 9.7272700493753241e+00, 9.9148332284509078e+00, 1.0103656502883585e+01, 1.0293799437190248e+01, 1.0485325634312177e+01, 1.0678303154963148e+01, 1.0872804994678496e+01, 1.1068909628665612e+01, 1.1266701636724759e+01, 1.1466272423235269e+01, 1.1667721050654935e+01, 1.1871155209388238e+01, 1.2076692352553557e+01, 1.2284461031551617e+01, 1.2494602478007950e+01, 1.2707272490482195e+01, 1.2922643701525621e+01, 1.3140908323999378e+01, 1.3362281507661821e+01, 1.3587005481836711e+01, 1.3815354723529373e+01, 1.4047642482122557e+01, 1.4284229126904924e+01, 1.4525532987045759e+01, 1.4772044667297957e+01, 1.5024346320130698e+01, 1.5283138169159214e+01, 1.5549275960782586e+01, 1.5823825468638194e+01, 1.6108144733313441e+01, 1.6404013733432148e+01, 1.6713850376034177e+01, 1.7041096543327139e+01, 1.7390976307430105e+01, 1.7772197980856198e+01, 1.8201656675685278e+01, 1.8723561382058008e+01}, + {-1.8775615458013544e+01, -1.8254189423434781e+01, -1.7825132627760091e+01, -1.7444273962020645e+01, -1.7094732674611997e+01, -1.6767807805810907e+01, -1.6458279669864240e+01, -1.6162709261170946e+01, -1.5878680685959477e+01, -1.5604415443478599e+01, -1.5338556618472303e+01, -1.5080039319512867e+01, -1.4828008508467775e+01, -1.4581764541755678e+01, -1.4340725750026818e+01, -1.4104401936851414e+01, -1.3872375122615423e+01, -1.3644285240693469e+01, -1.3419819306444866e+01, -1.3198703076577427e+01, -1.2980694529827234e+01, -1.2765578703092004e+01, -1.2553163552163017e+01, -1.2343276597888334e+01, -1.2135762182101830e+01, -1.1930479202418702e+01, -1.1727299227066995e+01, -1.1526104914236369e+01, -1.1326788677599657e+01, -1.1129251552471963e+01, -1.0933402226734774e+01, -1.0739156208018326e+01, -1.0546435104305445e+01, -1.0355165999524157e+01, -1.0165280909146327e+01, -9.9767163035334772e+00, -9.7894126889375794e+00, -9.6033142377998999e+00, -9.4183684613901413e+00, -9.2345259189629125e+00, -9.0517399585345863e+00, -8.8699664851432551e+00, -8.6891637530810133e+00, -8.5092921791069820e+00, -8.3303141740818543e+00, -8.1521939908263850e+00, -7.9748975863098721e+00, -7.7983924965309503e+00, -7.6226477226697984e+00, -7.4476336272751640e+00, -7.2733218394067363e+00, -7.0996851677878805e+00, -6.9266975211391140e+00, -6.7543338349621100e+00, -6.5825700041298303e+00, -6.4113828207126948e+00, -6.2407499165352709e+00, -6.0706497100141474e+00, -5.9010613568767107e+00, -5.7319647044035085e+00, -5.5633402488744599e+00, -5.3951690959323528e+00, -5.2274329236061510e+00, -5.0601139477624058e+00, -4.8931948897757946e+00, -4.7266589462300219e+00, -4.5604897604781494e+00, -4.3946713959074097e+00, -4.2291883107676789e+00, -4.0640253344354704e+00, -3.8991676449965649e+00, -3.7346007480405388e+00, -3.5703104565694619e+00, -3.4062828719311762e+00, -3.2425043656948236e+00, -3.0789615623928461e+00, -2.9156413230595541e+00, -2.7525307295016344e+00, -2.5896170692407368e+00, -2.4268878210725524e+00, -2.2643306411906372e+00, -2.1019333498267070e+00, -1.9396839183622288e+00, -1.7775704568689277e+00, -1.6155812020383324e+00, -1.4537045054627018e+00, -1.2919288222316836e+00, -1.1302426998108011e+00, -9.6863476716944097e-01, -8.0709372412735814e-01, -6.4560833088991310e-01, -4.8416739774326256e-01, -3.2275977488157320e-01, -1.6137434233903589e-01, -0.0000000000000000e+00, 1.6137434233903589e-01, 3.2275977488157320e-01, 4.8416739774326256e-01, 6.4560833088991310e-01, 8.0709372412735814e-01, 9.6863476716944097e-01, 1.1302426998108011e+00, 1.2919288222316836e+00, 1.4537045054627018e+00, 1.6155812020383324e+00, 1.7775704568689277e+00, 1.9396839183622288e+00, 2.1019333498267070e+00, 2.2643306411906372e+00, 2.4268878210725524e+00, 2.5896170692407368e+00, 2.7525307295016344e+00, 2.9156413230595541e+00, 3.0789615623928461e+00, 3.2425043656948236e+00, 3.4062828719311762e+00, 3.5703104565694619e+00, 3.7346007480405388e+00, 3.8991676449965649e+00, 4.0640253344354704e+00, 4.2291883107676789e+00, 4.3946713959074097e+00, 4.5604897604781494e+00, 4.7266589462300219e+00, 4.8931948897757946e+00, 5.0601139477624058e+00, 5.2274329236061510e+00, 5.3951690959323528e+00, 5.5633402488744599e+00, 5.7319647044035085e+00, 5.9010613568767107e+00, 6.0706497100141474e+00, 6.2407499165352709e+00, 6.4113828207126948e+00, 6.5825700041298303e+00, 6.7543338349621100e+00, 6.9266975211391140e+00, 7.0996851677878805e+00, 7.2733218394067363e+00, 7.4476336272751640e+00, 7.6226477226697984e+00, 7.7983924965309503e+00, 7.9748975863098721e+00, 8.1521939908263850e+00, 8.3303141740818543e+00, 8.5092921791069820e+00, 8.6891637530810133e+00, 8.8699664851432551e+00, 9.0517399585345863e+00, 9.2345259189629125e+00, 9.4183684613901413e+00, 9.6033142377998999e+00, 9.7894126889375794e+00, 9.9767163035334772e+00, 1.0165280909146327e+01, 1.0355165999524157e+01, 1.0546435104305445e+01, 1.0739156208018326e+01, 1.0933402226734774e+01, 1.1129251552471963e+01, 1.1326788677599657e+01, 1.1526104914236369e+01, 1.1727299227066995e+01, 1.1930479202418702e+01, 1.2135762182101830e+01, 1.2343276597888334e+01, 1.2553163552163017e+01, 1.2765578703092004e+01, 1.2980694529827234e+01, 1.3198703076577427e+01, 1.3419819306444866e+01, 1.3644285240693469e+01, 1.3872375122615423e+01, 1.4104401936851414e+01, 1.4340725750026818e+01, 1.4581764541755678e+01, 1.4828008508467775e+01, 1.5080039319512867e+01, 1.5338556618472303e+01, 1.5604415443478599e+01, 1.5878680685959477e+01, 1.6162709261170946e+01, 1.6458279669864240e+01, 1.6767807805810907e+01, 1.7094732674611997e+01, 1.7444273962020645e+01, 1.7825132627760091e+01, 1.8254189423434781e+01, 1.8775615458013544e+01}, + {-1.8827530159196098e+01, -1.8306579773944520e+01, -1.7877922302817918e+01, -1.7497424290078804e+01, -1.7148219258607661e+01, -1.6821613557585714e+01, -1.6512391861491736e+01, -1.6217118024196811e+01, -1.5933378153189908e+01, -1.5659395216751500e+01, -1.5393813418192966e+01, -1.5135568742958350e+01, -1.4883806856859362e+01, -1.4637828692771006e+01, -1.4397053061445078e+01, -1.4160990172186152e+01, -1.3929222392681286e+01, -1.3701389956978932e+01, -1.3477180143402940e+01, -1.3256318940768097e+01, -1.3038564534403923e+01, -1.2823702146512529e+01, -1.2611539900280661e+01, -1.2401905468777167e+01, -1.2194643333116042e+01, -1.1989612519093924e+01, -1.1786684713553210e+01, -1.1585742685014308e+01, -1.1386678950280542e+01, -1.1189394641517911e+01, -1.0993798537966583e+01, -1.0799806233800744e+01, -1.0607339419318674e+01, -1.0416325257045436e+01, -1.0226695837777637e+01, -1.0038387704321417e+01, -9.8513414328396713e+00, -9.6655012634583866e+00, -9.4808147731799934e+00, -9.2972325852854905e+00, -9.1147081103323924e+00, -8.9331973146145547e+00, -8.7526585125759730e+00, -8.5730521801893929e+00, -8.3943407867426210e+00, -8.2164886428367048e+00, -8.0394617627036364e+00, -7.8632277392072103e+00, -7.6877556301072998e+00, -7.5130158543520213e+00, -7.3389800973191823e+00, -7.1656212240628339e+00, -6.9929131997360230e+00, -6.8208310164601347e+00, -6.6493506259970188e+00, -6.4784488776542828e+00, -6.3081034609187254e+00, -6.1382928523689460e+00, -5.9689962664672880e+00, -5.8001936098740767e+00, -5.6318654389648133e+00, -5.4639929202639941e+00, -5.2965577935384172e+00, -5.1295423373184992e+00, -4.9629293366389069e+00, -4.7967020528099553e+00, -4.6308441950490966e+00, -4.4653398938177720e+00, -4.3001736757230296e+00, -4.1353304398559789e+00, -3.9707954354504480e+00, -3.8065542407552790e+00, -3.6425927430227989e+00, -3.4788971195240692e+00, -3.3154538195088263e+00, -3.1522495470345691e+00, -2.9892712445951148e+00, -2.8265060774842699e+00, -2.6639414188349808e+00, -2.5015648352786735e+00, -2.3393640731732996e+00, -2.1773270453521110e+00, -2.0154418183483078e+00, -1.8536966000534938e+00, -1.6920797277704067e+00, -1.5305796566226446e+00, -1.3691849482861076e+00, -1.2078842600086817e+00, -1.0466663338862598e+00, -8.8551998636459661e-01, -7.2443409793770541e-01, -5.6339760301455488e-01, -4.0239947992671032e-01, -2.4142874105030510e-01, -8.0474423016328678e-02, 8.0474423016328678e-02, 2.4142874105030510e-01, 4.0239947992671032e-01, 5.6339760301455488e-01, 7.2443409793770541e-01, 8.8551998636459661e-01, 1.0466663338862598e+00, 1.2078842600086817e+00, 1.3691849482861076e+00, 1.5305796566226446e+00, 1.6920797277704067e+00, 1.8536966000534938e+00, 2.0154418183483078e+00, 2.1773270453521110e+00, 2.3393640731732996e+00, 2.5015648352786735e+00, 2.6639414188349808e+00, 2.8265060774842699e+00, 2.9892712445951148e+00, 3.1522495470345691e+00, 3.3154538195088263e+00, 3.4788971195240692e+00, 3.6425927430227989e+00, 3.8065542407552790e+00, 3.9707954354504480e+00, 4.1353304398559789e+00, 4.3001736757230296e+00, 4.4653398938177720e+00, 4.6308441950490966e+00, 4.7967020528099553e+00, 4.9629293366389069e+00, 5.1295423373184992e+00, 5.2965577935384172e+00, 5.4639929202639941e+00, 5.6318654389648133e+00, 5.8001936098740767e+00, 5.9689962664672880e+00, 6.1382928523689460e+00, 6.3081034609187254e+00, 6.4784488776542828e+00, 6.6493506259970188e+00, 6.8208310164601347e+00, 6.9929131997360230e+00, 7.1656212240628339e+00, 7.3389800973191823e+00, 7.5130158543520213e+00, 7.6877556301072998e+00, 7.8632277392072103e+00, 8.0394617627036364e+00, 8.2164886428367048e+00, 8.3943407867426210e+00, 8.5730521801893929e+00, 8.7526585125759730e+00, 8.9331973146145547e+00, 9.1147081103323924e+00, 9.2972325852854905e+00, 9.4808147731799934e+00, 9.6655012634583866e+00, 9.8513414328396713e+00, 1.0038387704321417e+01, 1.0226695837777637e+01, 1.0416325257045436e+01, 1.0607339419318674e+01, 1.0799806233800744e+01, 1.0993798537966583e+01, 1.1189394641517911e+01, 1.1386678950280542e+01, 1.1585742685014308e+01, 1.1786684713553210e+01, 1.1989612519093924e+01, 1.2194643333116042e+01, 1.2401905468777167e+01, 1.2611539900280661e+01, 1.2823702146512529e+01, 1.3038564534403923e+01, 1.3256318940768097e+01, 1.3477180143402940e+01, 1.3701389956978932e+01, 1.3929222392681286e+01, 1.4160990172186152e+01, 1.4397053061445078e+01, 1.4637828692771006e+01, 1.4883806856859362e+01, 1.5135568742958350e+01, 1.5393813418192966e+01, 1.5659395216751500e+01, 1.5933378153189908e+01, 1.6217118024196811e+01, 1.6512391861491736e+01, 1.6821613557585714e+01, 1.7148219258607661e+01, 1.7497424290078804e+01, 1.7877922302817918e+01, 1.8306579773944520e+01, 1.8827530159196098e+01}, + {-1.8879306595344101e+01, -1.8358828872057543e+01, -1.7930568181145354e+01, -1.7550428494704086e+01, -1.7201557525065336e+01, -1.6875268886917173e+01, -1.6566351589127169e+01, -1.6271372328095570e+01, -1.5987919200699100e+01, -1.5714216635509363e+01, -1.5448909947718773e+01, -1.5190935994367434e+01, -1.4939441140940190e+01, -1.4693726893234164e+01, -1.4453212539122219e+01, -1.4217408691087410e+01, -1.3985898061893202e+01, -1.3758321184299211e+01, -1.3534365597840969e+01, -1.3313757521857125e+01, -1.3096255346825995e+01, -1.2881644478923162e+01, -1.2669733207493820e+01, -1.2460349356668459e+01, -1.2253337545746717e+01, -1.2048556927665979e+01, -1.1845879306886999e+01, -1.1645187561301928e+01, -1.1446374309916036e+01, -1.1249340780842516e+01, -1.1053995843796514e+01, -1.0860255178628208e+01, -1.0668040557095399e+01, -1.0477279219472964e+01, -1.0287903331040779e+01, -1.0099849506211216e+01, -9.9130583902203586e+00, -9.7274742900396252e+00, -9.5430448475612568e+00, -9.3597207492442038e+00, -9.1774554673313418e+00, -8.9962050285074913e+00, -8.8159278064930948e+00, -8.6365843355868694e+00, -8.4581371426023590e+00, -8.2805505950043532e+00, -8.1037907633543647e+00, -7.9278252964301190e+00, -7.7526233076005129e+00, -7.5781552712214886e+00, -7.4043929279751541e+00, -7.2313091982087343e+00, -7.0588781024451404e+00, -6.8870746883362148e+00, -6.7158749634153345e+00, -6.5452558330803203e+00, -6.3751950433020319e+00, -6.2056711276101213e+00, -6.0366633579564608e+00, -5.8681516990995659e+00, -5.7001167661909662e+00, -5.5325397852775344e+00, -5.3654025564628540e+00, -5.1986874194964612e+00, -5.0323772215824860e+00, -4.8664552872193818e+00, -4.7009053899003126e+00, -4.5357117255196737e+00, -4.3708588873453733e+00, -4.2063318424291563e+00, -4.0421159093385528e+00, -3.8781967371040955e+00, -3.7145602852845432e+00, -3.5511928050609516e+00, -3.3880808212776983e+00, -3.2252111153551550e+00, -3.0625707090045471e+00, -2.9001468486808863e+00, -2.7379269907145933e+00, -2.5758987870667562e+00, -2.4140500716568205e+00, -2.2523688472150090e+00, -2.0908432726148978e+00, -1.9294616506444018e+00, -1.7682124161759520e+00, -1.6070841246989318e+00, -1.4460654411794531e+00, -1.2851451292143858e+00, -1.1243120404481430e+00, -9.6355510422216051e-01, -8.0286331742823902e-01, -6.4222573453801035e-01, -4.8163145778170013e-01, -3.2106962745015089e-01, -1.6052941229469772e-01, -0.0000000000000000e+00, 1.6052941229469772e-01, 3.2106962745015089e-01, 4.8163145778170013e-01, 6.4222573453801035e-01, 8.0286331742823902e-01, 9.6355510422216051e-01, 1.1243120404481430e+00, 1.2851451292143858e+00, 1.4460654411794531e+00, 1.6070841246989318e+00, 1.7682124161759520e+00, 1.9294616506444018e+00, 2.0908432726148978e+00, 2.2523688472150090e+00, 2.4140500716568205e+00, 2.5758987870667562e+00, 2.7379269907145933e+00, 2.9001468486808863e+00, 3.0625707090045471e+00, 3.2252111153551550e+00, 3.3880808212776983e+00, 3.5511928050609516e+00, 3.7145602852845432e+00, 3.8781967371040955e+00, 4.0421159093385528e+00, 4.2063318424291563e+00, 4.3708588873453733e+00, 4.5357117255196737e+00, 4.7009053899003126e+00, 4.8664552872193818e+00, 5.0323772215824860e+00, 5.1986874194964612e+00, 5.3654025564628540e+00, 5.5325397852775344e+00, 5.7001167661909662e+00, 5.8681516990995659e+00, 6.0366633579564608e+00, 6.2056711276101213e+00, 6.3751950433020319e+00, 6.5452558330803203e+00, 6.7158749634153345e+00, 6.8870746883362148e+00, 7.0588781024451404e+00, 7.2313091982087343e+00, 7.4043929279751541e+00, 7.5781552712214886e+00, 7.7526233076005129e+00, 7.9278252964301190e+00, 8.1037907633543647e+00, 8.2805505950043532e+00, 8.4581371426023590e+00, 8.6365843355868694e+00, 8.8159278064930948e+00, 8.9962050285074913e+00, 9.1774554673313418e+00, 9.3597207492442038e+00, 9.5430448475612568e+00, 9.7274742900396252e+00, 9.9130583902203586e+00, 1.0099849506211216e+01, 1.0287903331040779e+01, 1.0477279219472964e+01, 1.0668040557095399e+01, 1.0860255178628208e+01, 1.1053995843796514e+01, 1.1249340780842516e+01, 1.1446374309916036e+01, 1.1645187561301928e+01, 1.1845879306886999e+01, 1.2048556927665979e+01, 1.2253337545746717e+01, 1.2460349356668459e+01, 1.2669733207493820e+01, 1.2881644478923162e+01, 1.3096255346825995e+01, 1.3313757521857125e+01, 1.3534365597840969e+01, 1.3758321184299211e+01, 1.3985898061893202e+01, 1.4217408691087410e+01, 1.4453212539122219e+01, 1.4693726893234164e+01, 1.4939441140940190e+01, 1.5190935994367434e+01, 1.5448909947718773e+01, 1.5714216635509363e+01, 1.5987919200699100e+01, 1.6271372328095570e+01, 1.6566351589127169e+01, 1.6875268886917173e+01, 1.7201557525065336e+01, 1.7550428494704086e+01, 1.7930568181145354e+01, 1.8358828872057543e+01, 1.8879306595344101e+01}, + {-1.8930945861570141e+01, -1.8410937847399730e+01, -1.7983071422125320e+01, -1.7603287762782053e+01, -1.7254748687059873e+01, -1.6928775032235986e+01, -1.6620160116009828e+01, -1.6325473460556140e+01, -1.6042305140399389e+01, -1.5768881035760351e+01, -1.5503847567102451e+01, -1.5246142457847222e+01, -1.4994912768931876e+01, -1.4749460575583919e+01, -1.4509205639853443e+01, -1.4273658974878348e+01, -1.4042403636302666e+01, -1.3815080453660714e+01, -1.3591377225971019e+01, -1.3371020401535507e+01, -1.3153768574557885e+01, -1.2939407333876543e+01, -1.2727745133777605e+01, -1.2518609948312903e+01, -1.2311846533891678e+01, -1.2107314169569506e+01, -1.1904884776447382e+01, -1.1704441340847984e+01, -1.1505876583067728e+01, -1.1309091826281094e+01, -1.1113996029812375e+01, -1.0920504958337819e+01, -1.0728540464237067e+01, -1.0538029864706095e+01, -1.0348905398685391e+01, -1.0161103751374336e+01, -9.9745656362640407e+00, -9.7892354263520396e+00, -9.6050608275979510e+00, -9.4219925888112019e+00, -9.2399842430857984e+00, -9.0589918766548454e+00, -8.8789739216625421e+00, -8.6998909698693474e+00, -8.5217056047373152e+00, -8.3443822497033171e+00, -8.1678870307509044e+00, -7.9921876516470487e+00, -7.8172532804264092e+00, -7.6430544458895735e+00, -7.4695629430384693e+00, -7.2967517465063425e+00, -7.1245949311547765e+00, -6.9530675991094046e+00, -6.7821458125915717e+00, -6.6118065319773942e+00, -6.4420275585800111e+00, -6.2727874817069420e+00, -6.1040656295934008e+00, -5.9358420238552689e+00, -5.7680973371429536e+00, -5.6008128537104387e+00, -5.4339704326429148e+00, -5.2675524735120289e+00, -5.1015418842505529e+00, -4.9359220510583919e+00, -4.7706768101697055e+00, -4.6057904213268381e+00, -4.4412475428209168e+00, -4.2770332079715754e+00, -4.1131328029295950e+00, -3.9495320456963339e+00, -3.7862169662628560e+00, -3.6231738877798154e+00, -3.4603894086764226e+00, -3.2978503856533741e+00, -3.1355439174805326e+00, -2.9734573295354325e+00, -2.8115781590234925e+00, -2.6498941408250944e+00, -2.4883931939185926e+00, -2.3270634083318020e+00, -2.1658930325776748e+00, -2.0048704615326960e+00, -1.8439842247190983e+00, -1.6832229749542722e+00, -1.5225754773327997e+00, -1.3620305985083838e+00, -1.2015772962445594e+00, -1.0412046092045326e+00, -8.8090164695175288e-01, -7.2065758013394154e-01, -5.6046163082425071e-01, -4.0030306299404111e-01, -2.4017117309244904e-01, -8.0055280708449292e-02, 8.0055280708449292e-02, 2.4017117309244904e-01, 4.0030306299404111e-01, 5.6046163082425071e-01, 7.2065758013394154e-01, 8.8090164695175288e-01, 1.0412046092045326e+00, 1.2015772962445594e+00, 1.3620305985083838e+00, 1.5225754773327997e+00, 1.6832229749542722e+00, 1.8439842247190983e+00, 2.0048704615326960e+00, 2.1658930325776748e+00, 2.3270634083318020e+00, 2.4883931939185926e+00, 2.6498941408250944e+00, 2.8115781590234925e+00, 2.9734573295354325e+00, 3.1355439174805326e+00, 3.2978503856533741e+00, 3.4603894086764226e+00, 3.6231738877798154e+00, 3.7862169662628560e+00, 3.9495320456963339e+00, 4.1131328029295950e+00, 4.2770332079715754e+00, 4.4412475428209168e+00, 4.6057904213268381e+00, 4.7706768101697055e+00, 4.9359220510583919e+00, 5.1015418842505529e+00, 5.2675524735120289e+00, 5.4339704326429148e+00, 5.6008128537104387e+00, 5.7680973371429536e+00, 5.9358420238552689e+00, 6.1040656295934008e+00, 6.2727874817069420e+00, 6.4420275585800111e+00, 6.6118065319773942e+00, 6.7821458125915717e+00, 6.9530675991094046e+00, 7.1245949311547765e+00, 7.2967517465063425e+00, 7.4695629430384693e+00, 7.6430544458895735e+00, 7.8172532804264092e+00, 7.9921876516470487e+00, 8.1678870307509044e+00, 8.3443822497033171e+00, 8.5217056047373152e+00, 8.6998909698693474e+00, 8.8789739216625421e+00, 9.0589918766548454e+00, 9.2399842430857984e+00, 9.4219925888112019e+00, 9.6050608275979510e+00, 9.7892354263520396e+00, 9.9745656362640407e+00, 1.0161103751374336e+01, 1.0348905398685391e+01, 1.0538029864706095e+01, 1.0728540464237067e+01, 1.0920504958337819e+01, 1.1113996029812375e+01, 1.1309091826281094e+01, 1.1505876583067728e+01, 1.1704441340847984e+01, 1.1904884776447382e+01, 1.2107314169569506e+01, 1.2311846533891678e+01, 1.2518609948312903e+01, 1.2727745133777605e+01, 1.2939407333876543e+01, 1.3153768574557885e+01, 1.3371020401535507e+01, 1.3591377225971019e+01, 1.3815080453660714e+01, 1.4042403636302666e+01, 1.4273658974878348e+01, 1.4509205639853443e+01, 1.4749460575583919e+01, 1.4994912768931876e+01, 1.5246142457847222e+01, 1.5503847567102451e+01, 1.5768881035760351e+01, 1.6042305140399389e+01, 1.6325473460556140e+01, 1.6620160116009828e+01, 1.6928775032235986e+01, 1.7254748687059873e+01, 1.7603287762782053e+01, 1.7983071422125320e+01, 1.8410937847399730e+01, 1.8930945861570141e+01}, + {-1.8982449038629166e+01, -1.8462907814661030e+01, -1.8035433169700678e+01, -1.7656003265287762e+01, -1.7307793941303686e+01, -1.6982133215169227e+01, -1.6673818688140226e+01, -1.6379422691596499e+01, -1.6096537266100814e+01, -1.5823389734978102e+01, -1.5558627617427760e+01, -1.5301189498098173e+01, -1.5050223129207568e+01, -1.4805031151963790e+01, -1.4565033799686276e+01, -1.4329742483675762e+01, -1.4098740600289259e+01, -1.3871669273924994e+01, -1.3648216561379414e+01, -1.3428109138379162e+01, -1.3211105801450707e+01, -1.2996992320804354e+01, -1.2785577314468604e+01, -1.2576688905294764e+01, -1.2370171985743601e+01, -1.2165885959983921e+01, -1.1963702864796298e+01, -1.1763505794009992e+01, -1.1565187568319024e+01, -1.1368649605090505e+01, -1.1173800952409410e+01, -1.0980557458945643e+01, -1.0788841056880692e+01, -1.0598579139520853e+01, -1.0409704018662802e+01, -1.0222152449492267e+01, -1.0035865212956141e+01, -9.8507867472781321e+00, -9.6668648216825712e+00, -9.4840502465221892e+00, -9.3022966149286930e+00, -9.1215600718621754e+00, -8.9417991070598717e+00, -8.7629743689023787e+00, -8.5850484966463263e+00, -8.4079859688330369e+00, -8.2317529659853896e+00, -8.0563172459604981e+00, -7.8816480305419629e+00, -7.7077159020391406e+00, -7.5344927088175595e+00, -7.3619514788185860e+00, -7.1900663402415486e+00, -7.0188124486605776e+00, -6.8481659199339457e+00, -6.6781037683378583e+00, -6.5086038494209282e+00, -6.3396448071316529e+00, -6.1712060248201084e+00, -6.0032675797578730e+00, -5.8358102008577646e+00, -5.6688152293079357e+00, -5.5022645818639999e+00, -5.3361407165684911e+00, -5.1704266006896686e+00, -5.0051056806918259e+00, -4.8401618540670963e+00, -4.6755794428746595e+00, -4.5113431688473895e+00, -4.3474381299386442e+00, -4.1838497781931396e+00, -4.0205638988360004e+00, -3.8575665904830925e+00, -3.6948442463838789e+00, -3.5323835366153280e+00, -3.3701713911519686e+00, -3.2081949837430650e+00, -3.0464417165332232e+00, -2.8848992053675047e+00, -2.7235552657264601e+00, -2.5623978992403580e+00, -2.4014152807354292e+00, -2.2405957457680694e+00, -2.0799277786058252e+00, -1.9194000006165373e+00, -1.7590011590293138e+00, -1.5987201160331019e+00, -1.4385458381804410e+00, -1.2784673860656646e+00, -1.1184739042482656e+00, -9.5855461139344544e-01, -7.9869879060299465e-01, -6.3889577991065161e-01, -4.7913496291691687e-01, -3.1940575953902500e-01, -1.5969761685235179e-01, -0.0000000000000000e+00, 1.5969761685235179e-01, 3.1940575953902500e-01, 4.7913496291691687e-01, 6.3889577991065161e-01, 7.9869879060299465e-01, 9.5855461139344544e-01, 1.1184739042482656e+00, 1.2784673860656646e+00, 1.4385458381804410e+00, 1.5987201160331019e+00, 1.7590011590293138e+00, 1.9194000006165373e+00, 2.0799277786058252e+00, 2.2405957457680694e+00, 2.4014152807354292e+00, 2.5623978992403580e+00, 2.7235552657264601e+00, 2.8848992053675047e+00, 3.0464417165332232e+00, 3.2081949837430650e+00, 3.3701713911519686e+00, 3.5323835366153280e+00, 3.6948442463838789e+00, 3.8575665904830925e+00, 4.0205638988360004e+00, 4.1838497781931396e+00, 4.3474381299386442e+00, 4.5113431688473895e+00, 4.6755794428746595e+00, 4.8401618540670963e+00, 5.0051056806918259e+00, 5.1704266006896686e+00, 5.3361407165684911e+00, 5.5022645818639999e+00, 5.6688152293079357e+00, 5.8358102008577646e+00, 6.0032675797578730e+00, 6.1712060248201084e+00, 6.3396448071316529e+00, 6.5086038494209282e+00, 6.6781037683378583e+00, 6.8481659199339457e+00, 7.0188124486605776e+00, 7.1900663402415486e+00, 7.3619514788185860e+00, 7.5344927088175595e+00, 7.7077159020391406e+00, 7.8816480305419629e+00, 8.0563172459604981e+00, 8.2317529659853896e+00, 8.4079859688330369e+00, 8.5850484966463263e+00, 8.7629743689023787e+00, 8.9417991070598717e+00, 9.1215600718621754e+00, 9.3022966149286930e+00, 9.4840502465221892e+00, 9.6668648216825712e+00, 9.8507867472781321e+00, 1.0035865212956141e+01, 1.0222152449492267e+01, 1.0409704018662802e+01, 1.0598579139520853e+01, 1.0788841056880692e+01, 1.0980557458945643e+01, 1.1173800952409410e+01, 1.1368649605090505e+01, 1.1565187568319024e+01, 1.1763505794009992e+01, 1.1963702864796298e+01, 1.2165885959983921e+01, 1.2370171985743601e+01, 1.2576688905294764e+01, 1.2785577314468604e+01, 1.2996992320804354e+01, 1.3211105801450707e+01, 1.3428109138379162e+01, 1.3648216561379414e+01, 1.3871669273924994e+01, 1.4098740600289259e+01, 1.4329742483675762e+01, 1.4565033799686276e+01, 1.4805031151963790e+01, 1.5050223129207568e+01, 1.5301189498098173e+01, 1.5558627617427760e+01, 1.5823389734978102e+01, 1.6096537266100814e+01, 1.6379422691596499e+01, 1.6673818688140226e+01, 1.6982133215169227e+01, 1.7307793941303686e+01, 1.7656003265287762e+01, 1.8035433169700678e+01, 1.8462907814661030e+01, 1.8982449038629166e+01}, + {-1.9033817193180372e+01, -1.8514739873870020e+01, -1.8087654552659949e+01, -1.7708576157582009e+01, -1.7360694468453165e+01, -1.7035344640856714e+01, -1.6727328534606457e+01, -1.6433221273899989e+01, -1.6150616853857390e+01, -1.5877744032458063e+01, -1.5613251421176075e+01, -1.5356078460791041e+01, -1.5105373590679383e+01, -1.4860440014620181e+01, -1.4620698434329638e+01, -1.4385660656810286e+01, -1.4154910416992225e+01, -1.3928089132251925e+01, -1.3704885115481808e+01, -1.3485025268316207e+01, -1.3268268588222005e+01, -1.3054401025509831e+01, -1.2843231360770561e+01, -1.2634587864551046e+01, -1.2428315564322974e+01, -1.2224273988380631e+01, -1.2022335288240422e+01, -1.1822382664331087e+01, -1.1624309036867450e+01, -1.1428015916557383e+01, -1.1233412439414929e+01, -1.1040414537288214e+01, -1.0848944221357737e+01, -1.0658928960246737e+01, -1.0470301137821307e+01, -1.0282997578470843e+01, -1.0096959129817110e+01, -9.9121302945287511e+00, -9.7284589043113154e+00, -9.5458958302732722e+00, -9.3643947247907189e+00, -9.1839117907500878e+00, -9.0044055746721732e+00, -8.8258367807379408e+00, -8.6481681031671602e+00, -8.4713640747611425e+00, -8.2953909297232951e+00, -8.1202164791264426e+00, -7.9458099976117715e+00, -7.7721421200878789e+00, -7.5991847473548857e+00, -7.4269109597125382e+00, -7.2552949377261493e+00, -7.0843118894232422e+00, -6.9139379832792587e+00, -6.7441502864247154e+00, -6.5749267075705307e+00, -6.4062459442041968e+00, -6.2380874336583796e+00, -6.0704313076963157e+00, -5.9032583502958440e+00, -5.7365499583469575e+00, -5.5702881050067488e+00, -5.4044553054813402e+00, -5.2390345850270315e+00, -5.0740094489830243e+00, -4.9093638546659628e+00, -4.7450821849723575e+00, -4.5811492235491675e+00, -4.4175501314054006e+00, -4.2542704248488947e+00, -4.0912959546425300e+00, -3.9286128862831946e+00, -3.7662076813149068e+00, -3.6040670795948389e+00, -3.4421780824374997e+00, -3.2805279365682800e+00, -3.1191041188228450e+00, -2.9578943215336770e+00, -2.7968864385493881e+00, -2.6360685518363063e+00, -2.4754289186153811e+00, -2.3149559589906059e+00, -2.1546382440280341e+00, -1.9944644842470249e+00, -1.8344235184876900e+00, -1.6745043031205911e+00, -1.5146959015666042e+00, -1.3549874740965486e+00, -1.1953682678816466e+00, -1.0358276072672234e+00, -8.7635488424319652e-01, -7.1693954908593016e-01, -5.5757110114689967e-01, -3.9823907976435802e-01, -2.3893305527481104e-01, -7.9642620101614783e-02, 7.9642620101614783e-02, 2.3893305527481104e-01, 3.9823907976435802e-01, 5.5757110114689967e-01, 7.1693954908593016e-01, 8.7635488424319652e-01, 1.0358276072672234e+00, 1.1953682678816466e+00, 1.3549874740965486e+00, 1.5146959015666042e+00, 1.6745043031205911e+00, 1.8344235184876900e+00, 1.9944644842470249e+00, 2.1546382440280341e+00, 2.3149559589906059e+00, 2.4754289186153811e+00, 2.6360685518363063e+00, 2.7968864385493881e+00, 2.9578943215336770e+00, 3.1191041188228450e+00, 3.2805279365682800e+00, 3.4421780824374997e+00, 3.6040670795948389e+00, 3.7662076813149068e+00, 3.9286128862831946e+00, 4.0912959546425300e+00, 4.2542704248488947e+00, 4.4175501314054006e+00, 4.5811492235491675e+00, 4.7450821849723575e+00, 4.9093638546659628e+00, 5.0740094489830243e+00, 5.2390345850270315e+00, 5.4044553054813402e+00, 5.5702881050067488e+00, 5.7365499583469575e+00, 5.9032583502958440e+00, 6.0704313076963157e+00, 6.2380874336583796e+00, 6.4062459442041968e+00, 6.5749267075705307e+00, 6.7441502864247154e+00, 6.9139379832792587e+00, 7.0843118894232422e+00, 7.2552949377261493e+00, 7.4269109597125382e+00, 7.5991847473548857e+00, 7.7721421200878789e+00, 7.9458099976117715e+00, 8.1202164791264426e+00, 8.2953909297232951e+00, 8.4713640747611425e+00, 8.6481681031671602e+00, 8.8258367807379408e+00, 9.0044055746721732e+00, 9.1839117907500878e+00, 9.3643947247907189e+00, 9.5458958302732722e+00, 9.7284589043113154e+00, 9.9121302945287511e+00, 1.0096959129817110e+01, 1.0282997578470843e+01, 1.0470301137821307e+01, 1.0658928960246737e+01, 1.0848944221357737e+01, 1.1040414537288214e+01, 1.1233412439414929e+01, 1.1428015916557383e+01, 1.1624309036867450e+01, 1.1822382664331087e+01, 1.2022335288240422e+01, 1.2224273988380631e+01, 1.2428315564322974e+01, 1.2634587864551046e+01, 1.2843231360770561e+01, 1.3054401025509831e+01, 1.3268268588222005e+01, 1.3485025268316207e+01, 1.3704885115481808e+01, 1.3928089132251925e+01, 1.4154910416992225e+01, 1.4385660656810286e+01, 1.4620698434329638e+01, 1.4860440014620181e+01, 1.5105373590679383e+01, 1.5356078460791041e+01, 1.5613251421176075e+01, 1.5877744032458063e+01, 1.6150616853857390e+01, 1.6433221273899989e+01, 1.6727328534606457e+01, 1.7035344640856714e+01, 1.7360694468453165e+01, 1.7708576157582009e+01, 1.8087654552659949e+01, 1.8514739873870020e+01, 1.9033817193180372e+01}, + {-1.9085051378043033e+01, -1.8566435110662020e+01, -1.8139736684916269e+01, -1.7761007579700482e+01, -1.7413451433407719e+01, -1.7088410498259837e+01, -1.6780690867902603e+01, -1.6486870443143417e+01, -1.6204545162304957e+01, -1.5931945209665145e+01, -1.5667720282583909e+01, -1.5410810672934431e+01, -1.5160365503176191e+01, -1.4915688536290588e+01, -1.4676200939552615e+01, -1.4441414913235960e+01, -1.4210914528731067e+01, -1.3984341494531598e+01, -1.3761384377966619e+01, -1.3541770305082187e+01, -1.3325258472923085e+01, -1.3111635010647520e+01, -1.2900708860246835e+01, -1.2692308438876983e+01, -1.2486278907997042e+01, -1.2282479919055724e+01, -1.2080783737378024e+01, -1.1881073669101410e+01, -1.1683242733101009e+01, -1.1487192532589896e+01, -1.1292832290695909e+01, -1.1100078021646450e+01, -1.0908851814834854e+01, -1.0719081213424776e+01, -1.0530698672582092e+01, -1.0343641085134585e+01, -1.0157849364615741e+01, -9.9732680773760247e+00, -9.7898451168372347e+00, -9.6075314140949892e+00, -9.4262806799959176e+00, -9.2460491745721143e+00, -9.0667955003389675e+00, -8.8884804164791387e+00, -8.7110666713658151e+00, -8.5345188512382073e+00, -8.3588032431445285e+00, -8.1838877105226704e+00, -8.0097415800046274e+00, -7.8363355382140769e+00, -7.6636415374829738e+00, -7.4916327095468409e+00, -7.3202832863932787e+00, -7.1495685275371788e+00, -6.9794646530814948e+00, -6.8099487819964919e+00, -6.6409988751145752e+00, -6.4725936823937991e+00, -6.3047126940519806e+00, -6.1373360952161162e+00, -5.9704447237692317e+00, -5.8040200311098342e+00, -5.6380440455680922e+00, -5.4724993382485847e+00, -5.3073689910920496e+00, -5.1426365669687621e+00, -4.9782860816339154e+00, -4.8143019773913309e+00, -4.6506690983259311e+00, -4.4873726669780289e+00, -4.3243982623437809e+00, -4.1617317990962412e+00, -3.9993595079305000e+00, -3.8372679169445236e+00, -3.6754438339745814e+00, -3.5138743298107440e+00, -3.3525467222238068e+00, -3.1914485607403469e+00, -3.0305676121073946e+00, -2.8698918463925485e+00, -2.7094094236692574e+00, -2.5491086812405244e+00, -2.3889781213574715e+00, -2.2290063993920741e+00, -2.0691823124259607e+00, -1.9094947882195137e+00, -1.7499328745276050e+00, -1.5904857287301759e+00, -1.4311426077475764e+00, -1.2718928582120677e+00, -1.1127259068682478e+00, -9.5363125117633152e-01, -7.9459845009326358e-01, -6.3561711500753826e-01, -4.7667690080438158e-01, -3.1776749703859319e-01, -1.5887861919288843e-01, -0.0000000000000000e+00, 1.5887861919288843e-01, 3.1776749703859319e-01, 4.7667690080438158e-01, 6.3561711500753826e-01, 7.9459845009326358e-01, 9.5363125117633152e-01, 1.1127259068682478e+00, 1.2718928582120677e+00, 1.4311426077475764e+00, 1.5904857287301759e+00, 1.7499328745276050e+00, 1.9094947882195137e+00, 2.0691823124259607e+00, 2.2290063993920741e+00, 2.3889781213574715e+00, 2.5491086812405244e+00, 2.7094094236692574e+00, 2.8698918463925485e+00, 3.0305676121073946e+00, 3.1914485607403469e+00, 3.3525467222238068e+00, 3.5138743298107440e+00, 3.6754438339745814e+00, 3.8372679169445236e+00, 3.9993595079305000e+00, 4.1617317990962412e+00, 4.3243982623437809e+00, 4.4873726669780289e+00, 4.6506690983259311e+00, 4.8143019773913309e+00, 4.9782860816339154e+00, 5.1426365669687621e+00, 5.3073689910920496e+00, 5.4724993382485847e+00, 5.6380440455680922e+00, 5.8040200311098342e+00, 5.9704447237692317e+00, 6.1373360952161162e+00, 6.3047126940519806e+00, 6.4725936823937991e+00, 6.6409988751145752e+00, 6.8099487819964919e+00, 6.9794646530814948e+00, 7.1495685275371788e+00, 7.3202832863932787e+00, 7.4916327095468409e+00, 7.6636415374829738e+00, 7.8363355382140769e+00, 8.0097415800046274e+00, 8.1838877105226704e+00, 8.3588032431445285e+00, 8.5345188512382073e+00, 8.7110666713658151e+00, 8.8884804164791387e+00, 9.0667955003389675e+00, 9.2460491745721143e+00, 9.4262806799959176e+00, 9.6075314140949892e+00, 9.7898451168372347e+00, 9.9732680773760247e+00, 1.0157849364615741e+01, 1.0343641085134585e+01, 1.0530698672582092e+01, 1.0719081213424776e+01, 1.0908851814834854e+01, 1.1100078021646450e+01, 1.1292832290695909e+01, 1.1487192532589896e+01, 1.1683242733101009e+01, 1.1881073669101410e+01, 1.2080783737378024e+01, 1.2282479919055724e+01, 1.2486278907997042e+01, 1.2692308438876983e+01, 1.2900708860246835e+01, 1.3111635010647520e+01, 1.3325258472923085e+01, 1.3541770305082187e+01, 1.3761384377966619e+01, 1.3984341494531598e+01, 1.4210914528731067e+01, 1.4441414913235960e+01, 1.4676200939552615e+01, 1.4915688536290588e+01, 1.5160365503176191e+01, 1.5410810672934431e+01, 1.5667720282583909e+01, 1.5931945209665145e+01, 1.6204545162304957e+01, 1.6486870443143417e+01, 1.6780690867902603e+01, 1.7088410498259837e+01, 1.7413451433407719e+01, 1.7761007579700482e+01, 1.8139736684916269e+01, 1.8566435110662020e+01, 1.9085051378043033e+01}, + {-1.9136152632446343e+01, -1.8617994596540964e+01, -1.8191680665779813e+01, -1.7813298656636132e+01, -1.7466065985601755e+01, -1.7141331960462935e+01, -1.6833906884239585e+01, -1.6540371418317267e+01, -1.6258323432990789e+01, -1.5985994530572864e+01, -1.5722035487991715e+01, -1.5465387443233377e+01, -1.5215200197812091e+01, -1.4970778070582133e+01, -1.4731542691573273e+01, -1.4497006651929539e+01, -1.4266754357415572e+01, -1.4040427805805265e+01, -1.3817715817227148e+01, -1.3598345740663659e+01, -1.3382076971394328e+01, -1.3168695816190608e+01, -1.2958011377300066e+01, -1.2749852217418379e+01, -1.2544063630985093e+01, -1.2340505391648597e+01, -1.2139049877631299e+01, -1.1939580499904540e+01, -1.1741990375159112e+01, -1.1546181198293549e+01, -1.1352062278750141e+01, -1.1159549712352600e+01, -1.0968565665937081e+01, -1.0779037756447492e+01, -1.0590898509596441e+01, -1.0404084885901707e+01, -1.0218537864062538e+01, -1.0034202073365652e+01, -9.8510254682020655e+00, -9.6689590389043136e+00, -9.4879565540345112e+00, -9.3079743300090456e+00, -9.1289710245687488e+00, -8.9509074511196953e+00, -8.7737464113997543e+00, -8.5974525442855416e+00, -8.4219921888566009e+00, -8.2473332600882276e+00, -8.0734451357601422e+00, -7.9002985533514174e+00, -7.7278655158483680e+00, -7.5561192055258362e+00, -7.3850339048770719e+00, -7.2145849239662976e+00, -7.0447485335633226e+00, -6.8755019034936238e+00, -6.7068230457014044e+00, -6.5386907615791259e+00, -6.3710845931658042e+00, -6.2039847778590458e+00, -6.0373722063233055e+00, -5.8712283833097523e+00, -5.7055353911321696e+00, -5.5402758555689235e+00, -5.3754329139836932e+00, -5.2109901854777609e+00, -5.0469317429044782e+00, -4.8832420865923929e+00, -4.7199061196376721e+00, -4.5569091246390609e+00, -4.3942367417598831e+00, -4.2318749480117184e+00, -4.0698100376633946e+00, -3.9080286036870948e+00, -3.7465175201606447e+00, -3.5852639255516232e+00, -3.4242552068148551e+00, -3.2634789842401504e+00, -3.1029230969919848e+00, -2.9425755892871246e+00, -2.7824246971601365e+00, -2.6224588357702330e+00, -2.4626665872061215e+00, -2.3030366887483824e+00, -2.1435580215515251e+00, -1.9842195997101939e+00, -1.8250105596761317e+00, -1.6659201499943919e+00, -1.5069377213289974e+00, -1.3480527167497827e+00, -1.1892546622534923e+00, -1.0305331574934382e+00, -8.7187786669305845e-01, -7.1327850971966389e-01, -5.5472485329544763e-01, -3.9620670232352440e-01, -2.3771389130732343e-01, -7.9236275842124657e-02, 7.9236275842124657e-02, 2.3771389130732343e-01, 3.9620670232352440e-01, 5.5472485329544763e-01, 7.1327850971966389e-01, 8.7187786669305845e-01, 1.0305331574934382e+00, 1.1892546622534923e+00, 1.3480527167497827e+00, 1.5069377213289974e+00, 1.6659201499943919e+00, 1.8250105596761317e+00, 1.9842195997101939e+00, 2.1435580215515251e+00, 2.3030366887483824e+00, 2.4626665872061215e+00, 2.6224588357702330e+00, 2.7824246971601365e+00, 2.9425755892871246e+00, 3.1029230969919848e+00, 3.2634789842401504e+00, 3.4242552068148551e+00, 3.5852639255516232e+00, 3.7465175201606447e+00, 3.9080286036870948e+00, 4.0698100376633946e+00, 4.2318749480117184e+00, 4.3942367417598831e+00, 4.5569091246390609e+00, 4.7199061196376721e+00, 4.8832420865923929e+00, 5.0469317429044782e+00, 5.2109901854777609e+00, 5.3754329139836932e+00, 5.5402758555689235e+00, 5.7055353911321696e+00, 5.8712283833097523e+00, 6.0373722063233055e+00, 6.2039847778590458e+00, 6.3710845931658042e+00, 6.5386907615791259e+00, 6.7068230457014044e+00, 6.8755019034936238e+00, 7.0447485335633226e+00, 7.2145849239662976e+00, 7.3850339048770719e+00, 7.5561192055258362e+00, 7.7278655158483680e+00, 7.9002985533514174e+00, 8.0734451357601422e+00, 8.2473332600882276e+00, 8.4219921888566009e+00, 8.5974525442855416e+00, 8.7737464113997543e+00, 8.9509074511196953e+00, 9.1289710245687488e+00, 9.3079743300090456e+00, 9.4879565540345112e+00, 9.6689590389043136e+00, 9.8510254682020655e+00, 1.0034202073365652e+01, 1.0218537864062538e+01, 1.0404084885901707e+01, 1.0590898509596441e+01, 1.0779037756447492e+01, 1.0968565665937081e+01, 1.1159549712352600e+01, 1.1352062278750141e+01, 1.1546181198293549e+01, 1.1741990375159112e+01, 1.1939580499904540e+01, 1.2139049877631299e+01, 1.2340505391648597e+01, 1.2544063630985093e+01, 1.2749852217418379e+01, 1.2958011377300066e+01, 1.3168695816190608e+01, 1.3382076971394328e+01, 1.3598345740663659e+01, 1.3817715817227148e+01, 1.4040427805805265e+01, 1.4266754357415572e+01, 1.4497006651929539e+01, 1.4731542691573273e+01, 1.4970778070582133e+01, 1.5215200197812091e+01, 1.5465387443233377e+01, 1.5722035487991715e+01, 1.5985994530572864e+01, 1.6258323432990789e+01, 1.6540371418317267e+01, 1.6833906884239585e+01, 1.7141331960462935e+01, 1.7466065985601755e+01, 1.7813298656636132e+01, 1.8191680665779813e+01, 1.8617994596540964e+01, 1.9136152632446343e+01}, + {-1.9187121982273521e+01, -1.8669419389135154e+01, -1.8243487580223828e+01, -1.7865450498614848e+01, -1.7518539259289700e+01, -1.7194110184967560e+01, -1.6886977763848531e+01, -1.6593725402038196e+01, -1.6311952890695306e+01, -1.6039893241994275e+01, -1.5776198306184138e+01, -1.5519810062439074e+01, -1.5269878987345788e+01, -1.5025709952340740e+01, -1.4786725047437830e+01, -1.4552437252279828e+01, -1.4322431304945502e+01, -1.4096349490675653e+01, -1.3873880880782780e+01, -1.3654753045730484e+01, -1.3438725577708846e+01, -1.3225584959886243e+01, -1.3015140453639436e+01, -1.2807220766151111e+01, -1.2601671323850574e+01, -1.2398352021647000e+01, -1.2197135349764665e+01, -1.1997904823149442e+01, -1.1800553655478529e+01, -1.1604983632531599e+01, -1.1411104149281471e+01, -1.1218831382380758e+01, -1.1028087575354093e+01, -1.0838800418181339e+01, -1.0650902506384885e+01, -1.0464330867440474e+01, -1.0279026544483804e+01, -1.0094934229009290e+01, -9.9120019356474849e+00, -9.7301807132353808e+00, -9.5494243873137332e+00, -9.3696893299405168e+00, -9.1909342533320295e+00, -9.0131200243591802e+00, -8.8362094973560179e+00, -8.6601673630569582e+00, -8.4849600117808954e+00, -8.3105554092350147e+00, -8.1369229835265475e+00, -7.9640335221538869e+00, -7.7918590779046193e+00, -7.6203728827216537e+00, -7.4495492687133176e+00, -7.2793635955820948e+00, -7.1097921838319138e+00, -6.9408122531878398e+00, -6.7724018657261427e+00, -6.6045398732686005e+00, -6.4372058686436739e+00, -6.2703801404598503e+00, -6.1040436310739281e+00, -5.9381778974699042e+00, -5.7727650747931136e+00, -5.6077878423099152e+00, -5.4432293915858283e+00, -5.2790733966951038e+00, -5.1153039862925711e+00, -4.9519057173944132e+00, -4.7888635507286939e+00, -4.6261628275290461e+00, -4.4637892476562113e+00, -4.3017288489422221e+00, -4.1399679876610396e+00, -3.9784933200376109e+00, -3.8172917847145760e+00, -3.6563505861024530e+00, -3.4956571785450063e+00, -3.3351992512368578e+00, -3.1749647138352097e+00, -3.0149416827118620e+00, -2.8551184677956569e+00, -2.6954835599590137e+00, -2.5360256189054007e+00, -2.3767334615175160e+00, -2.2175960506285199e+00, -2.0586024841810624e+00, -1.8997419847409271e+00, -1.7410038893340725e+00, -1.5823776395775344e+00, -1.4238527720762202e+00, -1.2654189090589871e+00, -1.1070657492286282e+00, -9.4878305880147162e-01, -7.9056066271324643e-01, -6.3238843596870120e-01, -4.7425629511315831e-01, -3.1615418980479731e-01, -1.5807209446693934e-01, -0.0000000000000000e+00, 1.5807209446693934e-01, 3.1615418980479731e-01, 4.7425629511315831e-01, 6.3238843596870120e-01, 7.9056066271324643e-01, 9.4878305880147162e-01, 1.1070657492286282e+00, 1.2654189090589871e+00, 1.4238527720762202e+00, 1.5823776395775344e+00, 1.7410038893340725e+00, 1.8997419847409271e+00, 2.0586024841810624e+00, 2.2175960506285199e+00, 2.3767334615175160e+00, 2.5360256189054007e+00, 2.6954835599590137e+00, 2.8551184677956569e+00, 3.0149416827118620e+00, 3.1749647138352097e+00, 3.3351992512368578e+00, 3.4956571785450063e+00, 3.6563505861024530e+00, 3.8172917847145760e+00, 3.9784933200376109e+00, 4.1399679876610396e+00, 4.3017288489422221e+00, 4.4637892476562113e+00, 4.6261628275290461e+00, 4.7888635507286939e+00, 4.9519057173944132e+00, 5.1153039862925711e+00, 5.2790733966951038e+00, 5.4432293915858283e+00, 5.6077878423099152e+00, 5.7727650747931136e+00, 5.9381778974699042e+00, 6.1040436310739281e+00, 6.2703801404598503e+00, 6.4372058686436739e+00, 6.6045398732686005e+00, 6.7724018657261427e+00, 6.9408122531878398e+00, 7.1097921838319138e+00, 7.2793635955820948e+00, 7.4495492687133176e+00, 7.6203728827216537e+00, 7.7918590779046193e+00, 7.9640335221538869e+00, 8.1369229835265475e+00, 8.3105554092350147e+00, 8.4849600117808954e+00, 8.6601673630569582e+00, 8.8362094973560179e+00, 9.0131200243591802e+00, 9.1909342533320295e+00, 9.3696893299405168e+00, 9.5494243873137332e+00, 9.7301807132353808e+00, 9.9120019356474849e+00, 1.0094934229009290e+01, 1.0279026544483804e+01, 1.0464330867440474e+01, 1.0650902506384885e+01, 1.0838800418181339e+01, 1.1028087575354093e+01, 1.1218831382380758e+01, 1.1411104149281471e+01, 1.1604983632531599e+01, 1.1800553655478529e+01, 1.1997904823149442e+01, 1.2197135349764665e+01, 1.2398352021647000e+01, 1.2601671323850574e+01, 1.2807220766151111e+01, 1.3015140453639436e+01, 1.3225584959886243e+01, 1.3438725577708846e+01, 1.3654753045730484e+01, 1.3873880880782780e+01, 1.4096349490675653e+01, 1.4322431304945502e+01, 1.4552437252279828e+01, 1.4786725047437830e+01, 1.5025709952340740e+01, 1.5269878987345788e+01, 1.5519810062439074e+01, 1.5776198306184138e+01, 1.6039893241994275e+01, 1.6311952890695306e+01, 1.6593725402038196e+01, 1.6886977763848531e+01, 1.7194110184967560e+01, 1.7518539259289700e+01, 1.7865450498614848e+01, 1.8243487580223828e+01, 1.8669419389135154e+01, 1.9187121982273521e+01}, + {-1.9237960440300323e+01, -1.8720710532447114e+01, -1.8295158499144485e+01, -1.7917464201364730e+01, -1.7570872373824393e+01, -1.7246746313979781e+01, -1.6939904671276931e+01, -1.6646933580854089e+01, -1.6365434743746000e+01, -1.6093642573904912e+01, -1.5830209988722080e+01, -1.5574079803690140e+01, -1.5324403166531194e+01, -1.5080485498011255e+01, -1.4841749345390438e+01, -1.4607708074467340e+01, -1.4377946753600369e+01, -1.4152107953707038e+01, -1.3929880995689542e+01, -1.3710993670057162e+01, -1.3495205764604879e+01, -1.3282303937699215e+01, -1.3072097608735946e+01, -1.2864415628348308e+01, -1.2659103553980440e+01, -1.2456021400878894e+01, -1.2255041770389502e+01, -1.2056048280588383e+01, -1.1858934241324912e+01, -1.1663601528470521e+01, -1.1469959621759649e+01, -1.1277924777921493e+01, -1.1087419316430047e+01, -1.0898370999572231e+01, -1.0710712491958864e+01, -1.0524380887307489e+01, -1.0339317292477098e+01, -1.0155466460457681e+01, -9.9727764654064881e+00, -9.7911984139496884e+00, -9.6106861878874543e+00, -9.4311962141946886e+00, -9.2526872588317186e+00, -9.0751202413947372e+00, -8.8984580680650804e+00, -8.7226654806754844e+00, -8.5477089200129779e+00, -8.3735564017325164e+00, -8.2001774034706933e+00, -8.0275427619319260e+00, -7.8556245788755001e+00, -7.6843961350654411e+00, -7.5138318113597080e+00, -7.3439070162139659e+00, -7.1745981189604136e+00, -7.0058823882959311e+00, -6.8377379354779668e+00, -6.6701436617824124e+00, -6.5030792098264039e+00, -6.3365249184017198e+00, -6.1704617805017721e+00, -6.0048714042581519e+00, -5.8397359765316308e+00, -5.6750382289281065e+00, -5.5107614060326373e+00, -5.3468892356747437e+00, -5.1834059010560054e+00, -5.0202960145867985e+00, -4.8575445932931558e+00, -4.6951370356673570e+00, -4.5330590998470743e+00, -4.3712968830180499e+00, -4.2098368019442765e+00, -4.0486655745377966e+00, -3.8877702023875300e+00, -3.7271379541730876e+00, -3.5667563498954848e+00, -3.4066131458619480e+00, -3.2466963203668557e+00, -3.0869940600152046e+00, -2.9274947466388688e+00, -2.7681869447595400e+00, -2.6090593895553744e+00, -2.4501009752913179e+00, -2.2913007441756847e+00, -2.1326478756079257e+00, -1.9741316757846794e+00, -1.8157415676330977e+00, -1.6574670810421992e+00, -1.4992978433645401e+00, -1.3412235701618946e+00, -1.1832340561698793e+00, -1.0253191664575543e+00, -8.6746882775900824e-01, -7.0967301995478616e-01, -5.5192176768174561e-01, -3.9420513205055796e-01, -2.3651320245058799e-01, -7.8836088422303746e-02, 7.8836088422303746e-02, 2.3651320245058799e-01, 3.9420513205055796e-01, 5.5192176768174561e-01, 7.0967301995478616e-01, 8.6746882775900824e-01, 1.0253191664575543e+00, 1.1832340561698793e+00, 1.3412235701618946e+00, 1.4992978433645401e+00, 1.6574670810421992e+00, 1.8157415676330977e+00, 1.9741316757846794e+00, 2.1326478756079257e+00, 2.2913007441756847e+00, 2.4501009752913179e+00, 2.6090593895553744e+00, 2.7681869447595400e+00, 2.9274947466388688e+00, 3.0869940600152046e+00, 3.2466963203668557e+00, 3.4066131458619480e+00, 3.5667563498954848e+00, 3.7271379541730876e+00, 3.8877702023875300e+00, 4.0486655745377966e+00, 4.2098368019442765e+00, 4.3712968830180499e+00, 4.5330590998470743e+00, 4.6951370356673570e+00, 4.8575445932931558e+00, 5.0202960145867985e+00, 5.1834059010560054e+00, 5.3468892356747437e+00, 5.5107614060326373e+00, 5.6750382289281065e+00, 5.8397359765316308e+00, 6.0048714042581519e+00, 6.1704617805017721e+00, 6.3365249184017198e+00, 6.5030792098264039e+00, 6.6701436617824124e+00, 6.8377379354779668e+00, 7.0058823882959311e+00, 7.1745981189604136e+00, 7.3439070162139659e+00, 7.5138318113597080e+00, 7.6843961350654411e+00, 7.8556245788755001e+00, 8.0275427619319260e+00, 8.2001774034706933e+00, 8.3735564017325164e+00, 8.5477089200129779e+00, 8.7226654806754844e+00, 8.8984580680650804e+00, 9.0751202413947372e+00, 9.2526872588317186e+00, 9.4311962141946886e+00, 9.6106861878874543e+00, 9.7911984139496884e+00, 9.9727764654064881e+00, 1.0155466460457681e+01, 1.0339317292477098e+01, 1.0524380887307489e+01, 1.0710712491958864e+01, 1.0898370999572231e+01, 1.1087419316430047e+01, 1.1277924777921493e+01, 1.1469959621759649e+01, 1.1663601528470521e+01, 1.1858934241324912e+01, 1.2056048280588383e+01, 1.2255041770389502e+01, 1.2456021400878894e+01, 1.2659103553980440e+01, 1.2864415628348308e+01, 1.3072097608735946e+01, 1.3282303937699215e+01, 1.3495205764604879e+01, 1.3710993670057162e+01, 1.3929880995689542e+01, 1.4152107953707038e+01, 1.4377946753600369e+01, 1.4607708074467340e+01, 1.4841749345390438e+01, 1.5080485498011255e+01, 1.5324403166531194e+01, 1.5574079803690140e+01, 1.5830209988722080e+01, 1.6093642573904912e+01, 1.6365434743746000e+01, 1.6646933580854089e+01, 1.6939904671276931e+01, 1.7246746313979781e+01, 1.7570872373824393e+01, 1.7917464201364730e+01, 1.8295158499144485e+01, 1.8720710532447114e+01, 1.9237960440300323e+01}, + {-1.9288669006428073e+01, -1.8771869057097732e+01, -1.8346694479614712e+01, -1.7969340846379055e+01, -1.7623066433928877e+01, -1.7299241474690692e+01, -1.6992688755677822e+01, -1.6699997125541856e+01, -1.6418770184323929e+01, -1.6147243739757997e+01, -1.5884071770266710e+01, -1.5628197922845580e+01, -1.5378774012459481e+01, -1.5135106005988732e+01, -1.4896616905233879e+01, -1.4662820459834590e+01, -1.4433302066419493e+01, -1.4207704579815282e+01, -1.3985717568940393e+01, -1.3767069042933601e+01, -1.3551518983907220e+01, -1.3338854224244356e+01, -1.3128884340266051e+01, -1.2921438325035480e+01, -1.2716361866052114e+01, -1.2513515097991551e+01, -1.2312770732455700e+01, -1.2114012489821327e+01, -1.1917133775310306e+01, -1.1722036554111044e+01, -1.1528630389965276e+01, -1.1336831618941112e+01, -1.1146562635737574e+01, -1.0957751274234692e+01, -1.0770330267425512e+01, -1.0584236774568581e+01, -1.0399411965548676e+01, -1.0215800654155213e+01, -1.0033350973375560e+01, -9.8520140869264541e+00, -9.6717439321652883e+00, -9.4924969902767238e+00, -9.3142320802515925e+00, -9.1369101736902376e+00, -8.9604942278913082e+00, -8.7849490350458534e+00, -8.6102410856578757e+00, -8.4363384445666423e+00, -8.2632106381612598e+00, -8.0908285515608398e+00, -7.9191643346894995e+00, -7.7481913163088629e+00, -7.5778839251852919e+00, -7.4082176176676109e+00, -7.2391688110363521e+00, -7.0707148220592551e+00, -6.9028338102518392e+00, -6.7355047253976705e+00, -6.5687072589316280e+00, -6.4024217988321039e+00, -6.2366293877054586e+00, -6.0713116837789416e+00, -5.9064509245471877e+00, -5.7420298928430569e+00, -5.5780318851261201e+00, -5.4144406818022057e+00, -5.2512405194051937e+00, -5.0884160644880927e+00, -4.9259523890845678e+00, -4.7638349476146598e+00, -4.6020495551197111e+00, -4.4405823667216309e+00, -4.2794198582106002e+00, -4.1185488076735286e+00, -3.9579562780827859e+00, -3.7976296007713630e+00, -3.6375563597264895e+00, -3.4777243766391046e+00, -3.3181216966513536e+00, -3.1587365747486800e+00, -2.9995574627469606e+00, -2.8405729968287345e+00, -2.6817719855857511e+00, -2.5231433985279939e+00, -2.3646763550219534e+00, -2.2063601136233109e+00, -2.0481840617713236e+00, -1.8901377058141628e+00, -1.7322106613361827e+00, -1.5743926437596774e+00, -1.4166734591950965e+00, -1.2590429955149371e+00, -1.1014912136276596e+00, -9.4400813892896185e-01, -7.8658385290862076e-01, -6.2920848489186076e-01, -4.7187220389486334e-01, -3.1456521057457892e-01, -1.5727772925344838e-01, -0.0000000000000000e+00, 1.5727772925344838e-01, 3.1456521057457892e-01, 4.7187220389486334e-01, 6.2920848489186076e-01, 7.8658385290862076e-01, 9.4400813892896185e-01, 1.1014912136276596e+00, 1.2590429955149371e+00, 1.4166734591950965e+00, 1.5743926437596774e+00, 1.7322106613361827e+00, 1.8901377058141628e+00, 2.0481840617713236e+00, 2.2063601136233109e+00, 2.3646763550219534e+00, 2.5231433985279939e+00, 2.6817719855857511e+00, 2.8405729968287345e+00, 2.9995574627469606e+00, 3.1587365747486800e+00, 3.3181216966513536e+00, 3.4777243766391046e+00, 3.6375563597264895e+00, 3.7976296007713630e+00, 3.9579562780827859e+00, 4.1185488076735286e+00, 4.2794198582106002e+00, 4.4405823667216309e+00, 4.6020495551197111e+00, 4.7638349476146598e+00, 4.9259523890845678e+00, 5.0884160644880927e+00, 5.2512405194051937e+00, 5.4144406818022057e+00, 5.5780318851261201e+00, 5.7420298928430569e+00, 5.9064509245471877e+00, 6.0713116837789416e+00, 6.2366293877054586e+00, 6.4024217988321039e+00, 6.5687072589316280e+00, 6.7355047253976705e+00, 6.9028338102518392e+00, 7.0707148220592551e+00, 7.2391688110363521e+00, 7.4082176176676109e+00, 7.5778839251852919e+00, 7.7481913163088629e+00, 7.9191643346894995e+00, 8.0908285515608398e+00, 8.2632106381612598e+00, 8.4363384445666423e+00, 8.6102410856578757e+00, 8.7849490350458534e+00, 8.9604942278913082e+00, 9.1369101736902376e+00, 9.3142320802515925e+00, 9.4924969902767238e+00, 9.6717439321652883e+00, 9.8520140869264541e+00, 1.0033350973375560e+01, 1.0215800654155213e+01, 1.0399411965548676e+01, 1.0584236774568581e+01, 1.0770330267425512e+01, 1.0957751274234692e+01, 1.1146562635737574e+01, 1.1336831618941112e+01, 1.1528630389965276e+01, 1.1722036554111044e+01, 1.1917133775310306e+01, 1.2114012489821327e+01, 1.2312770732455700e+01, 1.2513515097991551e+01, 1.2716361866052114e+01, 1.2921438325035480e+01, 1.3128884340266051e+01, 1.3338854224244356e+01, 1.3551518983907220e+01, 1.3767069042933601e+01, 1.3985717568940393e+01, 1.4207704579815282e+01, 1.4433302066419493e+01, 1.4662820459834590e+01, 1.4896616905233879e+01, 1.5135106005988732e+01, 1.5378774012459481e+01, 1.5628197922845580e+01, 1.5884071770266710e+01, 1.6147243739757997e+01, 1.6418770184323929e+01, 1.6699997125541856e+01, 1.6992688755677822e+01, 1.7299241474690692e+01, 1.7623066433928877e+01, 1.7969340846379055e+01, 1.8346694479614712e+01, 1.8771869057097732e+01, 1.9288669006428073e+01}, + {-1.9339248667911409e+01, -1.8822895980564734e+01, -1.8398096565132178e+01, -1.8021081501173168e+01, -1.7675122529961925e+01, -1.7351596779550405e+01, -1.7045331151092153e+01, -1.6752917191398179e+01, -1.6471960388762881e+01, -1.6200697936792103e+01, -1.5937784868895722e+01, -1.5682165658809755e+01, -1.5432992784892868e+01, -1.5189572756961194e+01, -1.4951329028681464e+01, -1.4717775731247299e+01, -1.4488498587572698e+01, -1.4263140734648266e+01, -1.4041391987855551e+01, -1.3822980573565543e+01, -1.3607666666938064e+01, -1.3395237273208020e+01, -1.3185502124544024e+01, -1.2978290355434069e+01, -1.2773447782488505e+01, -1.2570834658918320e+01, -1.2370323805730495e+01, -1.2171799044787127e+01, -1.1975153875897130e+01, -1.1780290352805242e+01, -1.1587118122520339e+01, -1.1395553599726039e+01, -1.1205519253636435e+01, -1.1016942989025184e+01, -1.0829757606576079e+01, -1.0643900330402785e+01, -1.0459312392733535e+01, -1.0275938667476471e+01, -1.0093727345768286e+01, -9.9126296477337945e+00, -9.7325995656019266e+00, -9.5535936340768206e+00, -9.3755707244836426e+00, -9.1984918597235552e+00, -9.0223200475008358e+00, -8.8470201296436706e+00, -8.6725586456412369e+00, -8.4989037087736055e+00, -8.3260248934261227e+00, -8.1538931323624819e+00, -7.9824806228866425e+00, -7.8117607409569434e+00, -7.6417079624302628e+00, -7.4722977907127222e+00, -7.3035066901782200e+00, -7.1353120247902524e+00, -6.9676920014260446e+00, -6.8006256174581203e+00, -6.6340926121968646e+00, -6.4680734218403675e+00, -6.3025491376151397e+00, -6.1375014668241503e+00, -5.9729126965475468e+00, -5.8087656597670589e+00, -5.6450437037075716e+00, -5.4817306602094709e+00, -5.3188108179631426e+00, -5.1562688964529020e+00, -4.9940900214715160e+00, -4.8322597020794342e+00, -4.6707638088937005e+00, -4.5095885536020024e+00, -4.3487204696060182e+00, -4.1881463937064671e+00, -4.0278534487497097e+00, -3.8678290271620543e+00, -3.7080607753039518e+00, -3.5485365785817580e+00, -3.3892445472592136e+00, -3.2301730029155085e+00, -3.0713104655004981e+00, -2.9126456409412849e+00, -2.7541674092575419e+00, -2.5958648131459636e+00, -2.4377270469968155e+00, -2.2797434463078550e+00, -2.1219034774632650e+00, -1.9641967278469044e+00, -1.8066128962612460e+00, -1.6491417836246915e+00, -1.4917732839215128e+00, -1.3344973753799896e+00, -1.1773041118552243e+00, -1.0201836143944896e+00, -8.6312606296338723e-01, -7.0612168831240074e-01, -5.4916076396359503e-01, -3.9223359829815591e-01, -2.3533052672581237e-01, -7.8441903917412792e-02, 7.8441903917412792e-02, 2.3533052672581237e-01, 3.9223359829815591e-01, 5.4916076396359503e-01, 7.0612168831240074e-01, 8.6312606296338723e-01, 1.0201836143944896e+00, 1.1773041118552243e+00, 1.3344973753799896e+00, 1.4917732839215128e+00, 1.6491417836246915e+00, 1.8066128962612460e+00, 1.9641967278469044e+00, 2.1219034774632650e+00, 2.2797434463078550e+00, 2.4377270469968155e+00, 2.5958648131459636e+00, 2.7541674092575419e+00, 2.9126456409412849e+00, 3.0713104655004981e+00, 3.2301730029155085e+00, 3.3892445472592136e+00, 3.5485365785817580e+00, 3.7080607753039518e+00, 3.8678290271620543e+00, 4.0278534487497097e+00, 4.1881463937064671e+00, 4.3487204696060182e+00, 4.5095885536020024e+00, 4.6707638088937005e+00, 4.8322597020794342e+00, 4.9940900214715160e+00, 5.1562688964529020e+00, 5.3188108179631426e+00, 5.4817306602094709e+00, 5.6450437037075716e+00, 5.8087656597670589e+00, 5.9729126965475468e+00, 6.1375014668241503e+00, 6.3025491376151397e+00, 6.4680734218403675e+00, 6.6340926121968646e+00, 6.8006256174581203e+00, 6.9676920014260446e+00, 7.1353120247902524e+00, 7.3035066901782200e+00, 7.4722977907127222e+00, 7.6417079624302628e+00, 7.8117607409569434e+00, 7.9824806228866425e+00, 8.1538931323624819e+00, 8.3260248934261227e+00, 8.4989037087736055e+00, 8.6725586456412369e+00, 8.8470201296436706e+00, 9.0223200475008358e+00, 9.1984918597235552e+00, 9.3755707244836426e+00, 9.5535936340768206e+00, 9.7325995656019266e+00, 9.9126296477337945e+00, 1.0093727345768286e+01, 1.0275938667476471e+01, 1.0459312392733535e+01, 1.0643900330402785e+01, 1.0829757606576079e+01, 1.1016942989025184e+01, 1.1205519253636435e+01, 1.1395553599726039e+01, 1.1587118122520339e+01, 1.1780290352805242e+01, 1.1975153875897130e+01, 1.2171799044787127e+01, 1.2370323805730495e+01, 1.2570834658918320e+01, 1.2773447782488505e+01, 1.2978290355434069e+01, 1.3185502124544024e+01, 1.3395237273208020e+01, 1.3607666666938064e+01, 1.3822980573565543e+01, 1.4041391987855551e+01, 1.4263140734648266e+01, 1.4488498587572698e+01, 1.4717775731247299e+01, 1.4951329028681464e+01, 1.5189572756961194e+01, 1.5432992784892868e+01, 1.5682165658809755e+01, 1.5937784868895722e+01, 1.6200697936792103e+01, 1.6471960388762881e+01, 1.6752917191398179e+01, 1.7045331151092153e+01, 1.7351596779550405e+01, 1.7675122529961925e+01, 1.8021081501173168e+01, 1.8398096565132178e+01, 1.8822895980564734e+01, 1.9339248667911409e+01}, +} + +// wCache computed from Chebfun. +var wCacheHermite = [][]float64{ + {1.7724538509055159e+00}, + {8.8622692545275794e-01, 8.8622692545275794e-01}, + {2.9540897515091952e-01, 1.1816359006036770e+00, 2.9540897515091952e-01}, + {8.1312835447245130e-02, 8.0491409000551273e-01, 8.0491409000551273e-01, 8.1312835447245130e-02}, + {1.9953242059045879e-02, 3.9361932315224107e-01, 9.4530872048294201e-01, 3.9361932315224107e-01, 1.9953242059045879e-02}, + {4.5300099055088682e-03, 1.5706732032285645e-01, 7.2462959522439263e-01, 7.2462959522439263e-01, 1.5706732032285645e-01, 4.5300099055088682e-03}, + {9.7178124509951806e-04, 5.4515582819127072e-02, 4.2560725261012772e-01, 8.1026461755680723e-01, 4.2560725261012772e-01, 5.4515582819127072e-02, 9.7178124509951806e-04}, + {1.9960407221136889e-04, 1.7077983007413408e-02, 2.0780232581489197e-01, 6.6114701255824115e-01, 6.6114701255824115e-01, 2.0780232581489197e-01, 1.7077983007413408e-02, 1.9960407221136889e-04}, + {3.9606977263264351e-05, 4.9436242755369481e-03, 8.8474527394376584e-02, 4.3265155900255570e-01, 7.2023521560605097e-01, 4.3265155900255570e-01, 8.8474527394376584e-02, 4.9436242755369481e-03, 3.9606977263264351e-05}, + {7.6404328552325936e-06, 1.3436457467812385e-03, 3.3874394455481120e-02, 2.4013861108231432e-01, 6.1086263373532612e-01, 6.1086263373532612e-01, 2.4013861108231432e-01, 3.3874394455481120e-02, 1.3436457467812385e-03, 7.6404328552325936e-06}, + {1.4395603937142598e-06, 3.4681946632334550e-04, 1.1911395444911524e-02, 1.1722787516770818e-01, 4.2935975235612489e-01, 6.5475928691459262e-01, 4.2935975235612489e-01, 1.1722787516770818e-01, 1.1911395444911524e-02, 3.4681946632334550e-04, 1.4395603937142598e-06}, + {2.6585516843563055e-07, 8.5736870435878019e-05, 3.9053905846290599e-03, 5.1607985615883964e-02, 2.6049231026416109e-01, 5.7013523626247942e-01, 5.7013523626247942e-01, 2.6049231026416109e-01, 5.1607985615883964e-02, 3.9053905846290599e-03, 8.5736870435878019e-05, 2.6585516843563055e-07}, + {4.8257318500731284e-08, 2.0430360402707074e-05, 1.2074599927193832e-03, 2.0862775296169932e-02, 1.4032332068702327e-01, 4.2161629689854296e-01, 6.0439318792116203e-01, 4.2161629689854296e-01, 1.4032332068702327e-01, 2.0862775296169932e-02, 1.2074599927193832e-03, 2.0430360402707074e-05, 4.8257318500731284e-08}, + {8.6285911681251554e-09, 4.7164843550189453e-06, 3.5509261355192302e-04, 7.8500547264579047e-03, 6.8505534223465281e-02, 2.7310560906424641e-01, 5.3640590971209023e-01, 5.3640590971209023e-01, 2.7310560906424641e-01, 6.8505534223465281e-02, 7.8500547264579047e-03, 3.5509261355192302e-04, 4.7164843550189453e-06, 8.6285911681251554e-09}, + {1.5224758042535219e-09, 1.0591155477110612e-06, 1.0000444123249973e-04, 2.7780688429127598e-03, 3.0780033872546117e-02, 1.5848891579593580e-01, 4.1202868749889832e-01, 5.6410030872641814e-01, 4.1202868749889832e-01, 1.5848891579593580e-01, 3.0780033872546117e-02, 2.7780688429127598e-03, 1.0000444123249973e-04, 1.0591155477110612e-06, 1.5224758042535219e-09}, + {2.6548074740111957e-10, 2.3209808448652080e-07, 2.7118600925378922e-05, 9.3228400862418201e-04, 1.2880311535509982e-02, 8.3810041398985943e-02, 2.8064745852853384e-01, 5.0792947901661334e-01, 5.0792947901661334e-01, 2.8064745852853384e-01, 8.3810041398985943e-02, 1.2880311535509982e-02, 9.3228400862418201e-04, 2.7118600925378922e-05, 2.3209808448652080e-07, 2.6548074740111957e-10}, + {4.5805789307986523e-11, 4.9770789816307401e-08, 7.1122891400212805e-06, 2.9864328669775215e-04, 5.0673499576275429e-03, 4.0920034149756306e-02, 1.7264829767009740e-01, 4.0182646947041184e-01, 5.3091793762486328e-01, 4.0182646947041184e-01, 1.7264829767009740e-01, 4.0920034149756306e-02, 5.0673499576275429e-03, 2.9864328669775215e-04, 7.1122891400212805e-06, 4.9770789816307401e-08, 4.5805789307986523e-11}, + {7.8281997721158175e-12, 1.0467205795791975e-08, 1.8106544810934246e-06, 9.1811268679294544e-05, 1.8885226302684155e-03, 1.8640042387544645e-02, 9.7301747641315411e-02, 2.8480728566997937e-01, 4.8349569472545578e-01, 4.8349569472545578e-01, 2.8480728566997937e-01, 9.7301747641315411e-02, 1.8640042387544645e-02, 1.8885226302684155e-03, 9.1811268679294544e-05, 1.8106544810934246e-06, 1.0467205795791975e-08, 7.8281997721158175e-12}, + {1.3262970944985103e-12, 2.1630510098635541e-09, 4.4882431472231438e-07, 2.7209197763161644e-05, 6.7087752140718431e-04, 7.9888667777229892e-03, 5.0810386909052110e-02, 1.8363270130699696e-01, 3.9160898861302956e-01, 5.0297488827618764e-01, 3.9160898861302956e-01, 1.8363270130699696e-01, 5.0810386909052110e-02, 7.9888667777229892e-03, 6.7087752140718431e-04, 2.7209197763161644e-05, 4.4882431472231438e-07, 2.1630510098635541e-09, 1.3262970944985103e-12}, + {2.2293936455341389e-13, 4.3993409922731442e-10, 1.0860693707692744e-07, 7.8025564785320785e-06, 2.2833863601635308e-04, 3.2437733422378532e-03, 2.4810520887463654e-02, 1.0901720602002316e-01, 2.8667550536283426e-01, 4.6224366960061009e-01, 4.6224366960061009e-01, 2.8667550536283426e-01, 1.0901720602002316e-01, 2.4810520887463654e-02, 3.2437733422378532e-03, 2.2833863601635308e-04, 7.8025564785320785e-06, 1.0860693707692744e-07, 4.3993409922731442e-10, 2.2293936455341389e-13}, + {3.7203650688184023e-14, 8.8186112420499177e-11, 2.5712301800593246e-08, 2.1718848980566754e-06, 7.4783988673100723e-05, 1.2549820417264148e-03, 1.1414065837434373e-02, 6.0179646658912296e-02, 1.9212032406699767e-01, 3.8166907361350205e-01, 4.7902370312017756e-01, 3.8166907361350205e-01, 1.9212032406699767e-01, 6.0179646658912296e-02, 1.1414065837434373e-02, 1.2549820417264148e-03, 7.4783988673100723e-05, 2.1718848980566754e-06, 2.5712301800593246e-08, 8.8186112420499177e-11, 3.7203650688184023e-14}, + {6.1671834244041516e-15, 1.7443390075479748e-11, 5.9669909860596536e-09, 5.8842875633009930e-07, 2.3655128552510423e-05, 4.6488505088425271e-04, 4.9783993350516510e-03, 3.1140370884423854e-02, 1.1910236095878249e-01, 2.8697143324690705e-01, 4.4354522643495936e-01, 4.4354522643495936e-01, 2.8697143324690705e-01, 1.1910236095878249e-01, 3.1140370884423854e-02, 4.9783993350516510e-03, 4.6488505088425271e-04, 2.3655128552510423e-05, 5.8842875633009930e-07, 5.9669909860596536e-09, 1.7443390075479748e-11, 6.1671834244041516e-15}, + {1.0160384616453285e-15, 3.4083140980305419e-12, 1.3596296504028875e-09, 1.5553393291457709e-07, 7.2492959180022684e-06, 1.6556169914187465e-04, 2.0695678749606356e-03, 1.5207084004484164e-02, 6.8890289429087329e-02, 1.9864489857802253e-01, 3.7214382487756476e-01, 4.5819658559321297e-01, 3.7214382487756476e-01, 1.9864489857802253e-01, 6.8890289429087329e-02, 1.5207084004484164e-02, 2.0695678749606356e-03, 1.6556169914187465e-04, 7.2492959180022684e-06, 1.5553393291457709e-07, 1.3596296504028875e-09, 3.4083140980305419e-12, 1.0160384616453285e-15}, + {1.6643684964891445e-16, 6.5846202430781003e-13, 3.0462542699875441e-10, 4.0189711749414533e-08, 2.1582457049023422e-06, 5.6886916364043827e-05, 8.2369248268841886e-04, 7.0483558100726791e-03, 3.7445470503230750e-02, 1.2773962178455914e-01, 2.8617953534644303e-01, 4.2693116386869923e-01, 4.2693116386869923e-01, 2.8617953534644303e-01, 1.2773962178455914e-01, 3.7445470503230750e-02, 7.0483558100726791e-03, 8.2369248268841886e-04, 5.6886916364043827e-05, 2.1582457049023422e-06, 4.0189711749414533e-08, 3.0462542699875441e-10, 6.5846202430781003e-13, 1.6643684964891445e-16}, + {2.7119235127587269e-17, 1.2588149877465385e-13, 6.7196384177062396e-11, 1.0170382503018480e-08, 6.2570324996911171e-07, 1.8915972957340510e-05, 3.1508363874548464e-04, 3.1157087201256362e-03, 1.9243098965408916e-02, 7.6888995175808827e-02, 2.0362113667812395e-01, 3.6308898927589056e-01, 4.3986872216948475e-01, 3.6308898927589056e-01, 2.0362113667812395e-01, 7.6888995175808827e-02, 1.9243098965408916e-02, 3.1157087201256362e-03, 3.1508363874548464e-04, 1.8915972957340510e-05, 6.2570324996911171e-07, 1.0170382503018480e-08, 6.7196384177062396e-11, 1.2588149877465385e-13, 2.7119235127587269e-17}, + {4.3969160947540168e-18, 2.3831486593721262e-14, 1.4609999339816088e-11, 2.5244940344905425e-09, 1.7701063373973562e-07, 6.1032917173960523e-06, 1.1622970160311058e-04, 1.3190647223238600e-03, 9.3979012911595897e-03, 4.3598227217250818e-02, 1.3511332791178782e-01, 2.8463224117678448e-01, 4.1204365059036946e-01, 4.1204365059036946e-01, 2.8463224117678448e-01, 1.3511332791178782e-01, 4.3598227217250818e-02, 9.3979012911595897e-03, 1.3190647223238600e-03, 1.1622970160311058e-04, 6.1032917173960523e-06, 1.7701063373973562e-07, 2.5244940344905425e-09, 1.4609999339816088e-11, 2.3831486593721262e-14, 4.3969160947540168e-18}, + {7.0957792932592773e-19, 4.4707724573930981e-15, 3.1341176136230566e-12, 6.1550315782317765e-10, 4.8954004096995851e-08, 1.9152809005953010e-06, 4.1467580043841186e-05, 5.3676961568811289e-04, 4.3812798357925394e-03, 2.3415933625341907e-02, 8.4173081084051971e-02, 2.0737048075100944e-01, 3.5451730409975396e-01, 4.2357728801505945e-01, 3.5451730409975396e-01, 2.0737048075100944e-01, 8.4173081084051971e-02, 2.3415933625341907e-02, 4.3812798357925394e-03, 5.3676961568811289e-04, 4.1467580043841186e-05, 1.9152809005953010e-06, 4.8954004096995851e-08, 6.1550315782317765e-10, 3.1341176136230566e-12, 4.4707724573930981e-15, 7.0957792932592773e-19}, + {1.1401393479036859e-19, 8.3159379512068002e-16, 6.6394367149096827e-13, 1.4758531682776914e-10, 1.3256825015417164e-08, 5.8577197209929782e-07, 1.4345504229714394e-05, 2.1061810002403312e-04, 1.9573312944089908e-03, 1.1968423214354838e-02, 4.9514889289898188e-02, 1.4139460978695487e-01, 2.8256139125938867e-01, 3.9860471782645146e-01, 3.9860471782645146e-01, 2.8256139125938867e-01, 1.4139460978695487e-01, 4.9514889289898188e-02, 1.1968423214354838e-02, 1.9573312944089908e-03, 2.1061810002403312e-04, 1.4345504229714394e-05, 5.8577197209929782e-07, 1.3256825015417164e-08, 1.4758531682776914e-10, 6.6394367149096827e-13, 8.3159379512068002e-16, 1.1401393479036859e-19}, + {1.8244608516739659e-20, 1.5345004446053123e-16, 1.3901072714496111e-13, 3.4841301613084286e-11, 3.5203123276007155e-09, 1.7492291299499463e-07, 4.8230734976477891e-06, 7.9909203545218356e-05, 8.4079250614026418e-04, 5.8455035452715061e-03, 2.7639655592023655e-02, 9.0768842215578197e-02, 2.1014269444921063e-01, 3.4641893907167015e-01, 4.0897117463522970e-01, 3.4641893907167015e-01, 2.1014269444921063e-01, 9.0768842215578197e-02, 2.7639655592023655e-02, 5.8455035452715061e-03, 8.4079250614026418e-04, 7.9909203545218356e-05, 4.8230734976477891e-06, 1.7492291299499463e-07, 3.5203123276007155e-09, 3.4841301613084286e-11, 1.3901072714496111e-13, 1.5345004446053123e-16, 1.8244608516739659e-20}, + {2.9082547001312504e-21, 2.8103336027509541e-17, 2.8786070805487377e-14, 8.1061862974630246e-12, 9.1785804243785856e-10, 5.1085224507759746e-08, 1.5790948873247135e-06, 2.9387252289229863e-05, 3.4831012431868588e-04, 2.7379224730676621e-03, 1.4703829704826692e-02, 5.5144176870234256e-02, 1.4673584754089000e-01, 2.8013093083921248e-01, 3.8639488954181417e-01, 3.8639488954181417e-01, 2.8013093083921248e-01, 1.4673584754089000e-01, 5.5144176870234256e-02, 1.4703829704826692e-02, 2.7379224730676621e-03, 3.4831012431868588e-04, 2.9387252289229863e-05, 1.5790948873247135e-06, 5.1085224507759746e-08, 9.1785804243785856e-10, 8.1061862974630246e-12, 2.8786070805487377e-14, 2.8103336027509541e-17, 2.9082547001312504e-21}, + {4.6189683913871598e-22, 5.1106090079270951e-18, 5.8995564987539112e-15, 1.8603735214521663e-12, 2.3524920032086692e-10, 1.4611988344910622e-08, 5.0437125589397732e-07, 1.0498602757675620e-05, 1.3952090395047032e-04, 1.2336833073068893e-03, 7.4827999140352043e-03, 3.1847230731300323e-02, 9.6717948160870412e-02, 2.1213278866876470e-01, 3.3877265789410776e-01, 3.9577855609860946e-01, 3.3877265789410776e-01, 2.1213278866876470e-01, 9.6717948160870412e-02, 3.1847230731300323e-02, 7.4827999140352043e-03, 1.2336833073068893e-03, 1.3952090395047032e-04, 1.0498602757675620e-05, 5.0437125589397732e-07, 1.4611988344910622e-08, 2.3524920032086692e-10, 1.8603735214521663e-12, 5.8995564987539112e-15, 5.1106090079270951e-18, 4.6189683913871598e-22}, + {7.3106764273841909e-23, 9.2317365365181365e-19, 1.1973440170928574e-15, 4.2150102113264756e-13, 5.9332914633966556e-11, 4.0988321647708776e-09, 1.5741677925455937e-07, 3.6505851295623875e-06, 5.4165840618199778e-05, 5.3626836552797335e-04, 3.6548903266544271e-03, 1.7553428831573435e-02, 6.0458130955912612e-02, 1.5126973407664251e-01, 2.7745814230252980e-01, 3.7523835259280258e-01, 3.7523835259280258e-01, 2.7745814230252980e-01, 1.5126973407664251e-01, 6.0458130955912612e-02, 1.7553428831573435e-02, 3.6548903266544271e-03, 5.3626836552797335e-04, 5.4165840618199778e-05, 3.6505851295623875e-06, 1.5741677925455937e-07, 4.0988321647708776e-09, 5.9332914633966556e-11, 4.2150102113264756e-13, 1.1973440170928574e-15, 9.2317365365181365e-19, 7.3106764273841909e-23}, + {1.1533162176980265e-23, 1.6570947415336692e-19, 2.4077856795579959e-16, 9.4348141590150248e-14, 1.4739809370924901e-11, 1.1289222471083366e-09, 4.8077456763232087e-08, 1.2376933672012150e-06, 2.0423684051423809e-05, 2.2544277059632773e-04, 1.7184546377609299e-03, 9.2656899706852411e-03, 3.5987982318576993e-02, 1.0206907999554145e-01, 2.1349393115029186e-01, 3.3155200075074132e-01, 3.8378526651986389e-01, 3.3155200075074132e-01, 2.1349393115029186e-01, 1.0206907999554145e-01, 3.5987982318576993e-02, 9.2656899706852411e-03, 1.7184546377609299e-03, 2.2544277059632773e-04, 2.0423684051423809e-05, 1.2376933672012150e-06, 4.8077456763232087e-08, 1.1289222471083366e-09, 1.4739809370924901e-11, 9.4348141590150248e-14, 2.4077856795579959e-16, 1.6570947415336692e-19, 1.1533162176980265e-23}, + {1.8138001119597749e-24, 2.9567089223605576e-20, 4.7999019978948293e-17, 2.0878403731160143e-14, 3.6098817474860888e-12, 3.0562520419155053e-10, 1.4387732912539959e-08, 4.0979740352245880e-07, 7.4934487833022770e-06, 9.1861189828724009e-05, 7.7981759962319987e-04, 4.6984636292665893e-03, 2.0473151727019457e-02, 6.5445134108752731e-02, 1.5511041662330927e-01, 2.7462771563513722e-01, 3.6499244699664513e-01, 3.6499244699664513e-01, 2.7462771563513722e-01, 1.5511041662330927e-01, 6.5445134108752731e-02, 2.0473151727019457e-02, 4.6984636292665893e-03, 7.7981759962319987e-04, 9.1861189828724009e-05, 7.4934487833022770e-06, 4.0979740352245880e-07, 1.4387732912539959e-08, 3.0562520419155053e-10, 3.6098817474860888e-12, 2.0878403731160143e-14, 4.7999019978948293e-17, 2.9567089223605576e-20, 1.8138001119597749e-24}, + {2.8441134634327172e-25, 5.2456527291743166e-21, 9.4898848794730163e-18, 4.5704441904182093e-15, 8.7225260125370039e-13, 8.1406956866777238e-11, 4.2239761300094312e-09, 1.3269405173066746e-07, 2.6798156546135386e-06, 3.6352769174157649e-05, 3.4234009319960727e-04, 2.2950283295840954e-03, 1.1166806590265730e-02, 4.0024775133100314e-02, 1.0687290695546242e-01, 2.1434719059607343e-01, 3.2472872157456961e-01, 3.7281997319072468e-01, 3.2472872157456961e-01, 2.1434719059607343e-01, 1.0687290695546242e-01, 4.0024775133100314e-02, 1.1166806590265730e-02, 2.2950283295840954e-03, 3.4234009319960727e-04, 3.6352769174157649e-05, 2.6798156546135386e-06, 1.3269405173066746e-07, 4.2239761300094312e-09, 8.1406956866777238e-11, 8.7225260125370039e-13, 4.5704441904182093e-15, 9.4898848794730163e-18, 5.2456527291743166e-21, 2.8441134634327172e-25}, + {4.4471534175756294e-26, 9.2564030835581952e-22, 1.8615979878080060e-18, 9.9029016869062119e-16, 2.0809126742868747e-13, 2.1353541817845700e-11, 1.2178678209557312e-09, 4.2074913231090725e-08, 9.3555700897732196e-07, 1.3997029220328986e-05, 1.4569615962636070e-04, 1.0825347566868020e-03, 5.8564259865595538e-03, 2.3425767509731743e-02, 7.0104750105177666e-02, 1.5835545375116650e-01, 2.7170124700952736e-01, 3.5554007427370893e-01, 3.5554007427370893e-01, 2.7170124700952736e-01, 1.5835545375116650e-01, 7.0104750105177666e-02, 2.3425767509731743e-02, 5.8564259865595538e-03, 1.0825347566868020e-03, 1.4569615962636070e-04, 1.3997029220328986e-05, 9.3555700897732196e-07, 4.2074913231090725e-08, 1.2178678209557312e-09, 2.1353541817845700e-11, 2.0809126742868747e-13, 9.9029016869062119e-16, 1.8615979878080060e-18, 9.2564030835581952e-22, 4.4471534175756294e-26}, + {6.9350835444886530e-27, 1.6249853271916221e-22, 3.6247269512444368e-19, 2.1248851941314958e-16, 4.9047393641852124e-14, 5.5203972646374721e-12, 3.4519062272121333e-10, 1.3079698821802362e-08, 3.1928854810032377e-07, 5.2521435771200543e-06, 6.0226951652674167e-05, 4.9417150121469438e-04, 2.9608001846029054e-03, 1.3160179977211566e-02, 4.3931362299539353e-02, 1.1117916510719518e-01, 2.1478887596478388e-01, 3.1827467975443136e-01, 3.6274375769908357e-01, 3.1827467975443136e-01, 2.1478887596478388e-01, 1.1117916510719518e-01, 4.3931362299539353e-02, 1.3160179977211566e-02, 2.9608001846029054e-03, 4.9417150121469438e-04, 6.0226951652674167e-05, 5.2521435771200543e-06, 3.1928854810032377e-07, 1.3079698821802362e-08, 3.4519062272121333e-10, 5.5203972646374721e-12, 4.9047393641852124e-14, 2.1248851941314958e-16, 3.6247269512444368e-19, 1.6249853271916221e-22, 6.9350835444886530e-27}, + {1.0787188820741241e-27, 2.8387387532094075e-23, 7.0078433305584917e-20, 4.5173712236048158e-17, 1.1428616750256895e-14, 1.4076333438452520e-12, 9.6270292926054501e-11, 3.9906528856889178e-09, 1.0665865704409148e-07, 1.9234656867323990e-06, 2.4223415120935207e-05, 2.1875759533939115e-04, 1.4463071215627061e-03, 7.1152368261340193e-03, 2.6380533520486894e-02, 7.4443983368238764e-02, 1.6108797149548099e-01, 2.6872376002892800e-01, 3.4678411786878038e-01, 3.4678411786878038e-01, 2.6872376002892800e-01, 1.6108797149548099e-01, 7.4443983368238764e-02, 2.6380533520486894e-02, 7.1152368261340193e-03, 1.4463071215627061e-03, 2.1875759533939115e-04, 2.4223415120935207e-05, 1.9234656867323990e-06, 1.0665865704409148e-07, 3.9906528856889178e-09, 9.6270292926054501e-11, 1.4076333438452520e-12, 1.1428616750256895e-14, 4.5173712236048158e-17, 7.0078433305584917e-20, 2.8387387532094075e-23, 1.0787188820741241e-27}, + {1.6737892441539538e-28, 4.9359073375904207e-24, 1.3457268201343837e-20, 9.5193204821162674e-18, 2.6340961107224671e-15, 3.5426508019791487e-13, 2.6440048769930072e-11, 1.1961664237667613e-09, 3.4915088439108438e-08, 6.8844204093401453e-07, 9.4942995361418356e-06, 9.4077279467672325e-05, 6.8405759521855724e-04, 3.7112302317141572e-03, 1.5221980837289402e-02, 4.7690071331635359e-02, 1.1503504579976950e-01, 2.1489607677746486e-01, 3.1216284886717011e-01, 3.5344263570679940e-01, 3.1216284886717011e-01, 2.1489607677746486e-01, 1.1503504579976950e-01, 4.7690071331635359e-02, 1.5221980837289402e-02, 3.7112302317141572e-03, 6.8405759521855724e-04, 9.4077279467672325e-05, 9.4942995361418356e-06, 6.8844204093401453e-07, 3.4915088439108438e-08, 1.1961664237667613e-09, 2.6440048769930072e-11, 3.5426508019791487e-13, 2.6340961107224671e-15, 9.5193204821162674e-18, 1.3457268201343837e-20, 4.9359073375904207e-24, 1.6737892441539538e-28}, + {2.5910437138470431e-29, 8.5440569637751720e-25, 2.5675933654117104e-21, 1.9891810121165382e-18, 6.0083587894908697e-16, 8.8057076452162173e-14, 7.1565280526903396e-12, 3.5256207913654460e-10, 1.1212360832275973e-08, 2.4111441636705294e-07, 3.6315761506930362e-06, 3.9369339810924932e-05, 3.1385359454133192e-04, 1.8714968295979542e-03, 8.4608880082581352e-03, 2.9312565536172355e-02, 7.8474605865404390e-02, 1.6337873271327136e-01, 2.6572825187737692e-01, 3.3864327742558936e-01, 3.3864327742558936e-01, 2.6572825187737692e-01, 1.6337873271327136e-01, 7.8474605865404390e-02, 2.9312565536172355e-02, 8.4608880082581352e-03, 1.8714968295979542e-03, 3.1385359454133192e-04, 3.9369339810924932e-05, 3.6315761506930362e-06, 2.4111441636705294e-07, 1.1212360832275973e-08, 3.5256207913654460e-10, 7.1565280526903396e-12, 8.8057076452162173e-14, 6.0083587894908697e-16, 1.9891810121165382e-18, 2.5675933654117104e-21, 8.5440569637751720e-25, 2.5910437138470431e-29}, + {4.0019596605455888e-30, 1.4726537286520423e-25, 4.8687379360130589e-22, 4.1234085375315004e-19, 1.3569875295861202e-16, 2.1629872471750215e-14, 1.9103833646809321e-12, 1.0226798927782354e-10, 3.5356816483807076e-09, 8.2726584187498191e-08, 1.3573781404870848e-06, 1.6055965736677200e-05, 1.3993025659674152e-04, 9.1423425632870857e-04, 4.5403929873032720e-03, 1.7330881362116784e-02, 5.1289872397094663e-02, 1.1848439124684401e-01, 2.1473086449008444e-01, 3.0636781693785120e-01, 3.4482208361638955e-01, 3.0636781693785120e-01, 2.1473086449008444e-01, 1.1848439124684401e-01, 5.1289872397094663e-02, 1.7330881362116784e-02, 4.5403929873032720e-03, 9.1423425632870857e-04, 1.3993025659674152e-04, 1.6055965736677200e-05, 1.3573781404870848e-06, 8.2726584187498191e-08, 3.5356816483807076e-09, 1.0226798927782354e-10, 1.9103833646809321e-12, 2.1629872471750215e-14, 1.3569875295861202e-16, 4.1234085375315004e-19, 4.8687379360130589e-22, 1.4726537286520423e-25, 4.0019596605455888e-30}, + {6.1678589258109773e-31, 2.5278698640534739e-26, 9.1778906956924963e-23, 8.4821520800863616e-20, 3.0358903478107461e-17, 5.2533377155686787e-15, 5.0327055821840466e-13, 2.9217288372333376e-11, 1.0958052288078504e-09, 2.7834715265490800e-08, 4.9636593935798231e-07, 6.3902459677354328e-06, 6.0719621077883790e-05, 4.3341227172125611e-04, 2.3571613945963252e-03, 9.8795240531885219e-03, 3.2202101288907867e-02, 8.2211269303293782e-02, 1.6528800127466756e-01, 2.6273890678229461e-01, 3.3104891389085667e-01, 3.3104891389085667e-01, 2.6273890678229461e-01, 1.6528800127466756e-01, 8.2211269303293782e-02, 3.2202101288907867e-02, 9.8795240531885219e-03, 2.3571613945963252e-03, 4.3341227172125611e-04, 6.0719621077883790e-05, 6.3902459677354328e-06, 4.9636593935798231e-07, 2.7834715265490800e-08, 1.0958052288078504e-09, 2.9217288372333376e-11, 5.0327055821840466e-13, 5.2533377155686787e-15, 3.0358903478107461e-17, 8.4821520800863616e-20, 9.1778906956924963e-23, 2.5278698640534739e-26, 6.1678589258109773e-31}, + {9.4863063307486296e-32, 4.3221460599337345e-27, 1.7203368260282161e-23, 1.7320753790913467e-20, 6.7308117032428195e-18, 1.2622032209758195e-15, 1.3092104166584946e-13, 8.2271061208164963e-12, 3.3407179153175535e-10, 9.1931960675404414e-09, 1.7777830445479720e-07, 2.4851377424115403e-06, 2.5680573077056677e-05, 1.9972435671322962e-04, 1.1860700927674253e-03, 5.4414552932418097e-03, 1.9468160151101737e-02, 5.4724809081874805e-02, 1.2156737988638795e-01, 2.1434349700188368e-01, 3.0086599386765978e-01, 3.3680296539275278e-01, 3.0086599386765978e-01, 2.1434349700188368e-01, 1.2156737988638795e-01, 5.4724809081874805e-02, 1.9468160151101737e-02, 5.4414552932418097e-03, 1.1860700927674253e-03, 1.9972435671322962e-04, 2.5680573077056677e-05, 2.4851377424115403e-06, 1.7777830445479720e-07, 9.1931960675404414e-09, 3.3407179153175535e-10, 8.2271061208164963e-12, 1.3092104166584946e-13, 1.2622032209758195e-15, 6.7308117032428195e-18, 1.7320753790913467e-20, 1.7203368260282161e-23, 4.3221460599337345e-27, 9.4863063307486296e-32}, + {1.4561153081762322e-32, 7.3621261042968285e-28, 3.2072216919050275e-24, 3.5121449558365656e-21, 1.4794099758171669e-18, 3.0015214029808191e-16, 3.3650356127923192e-14, 2.2848078971220373e-12, 1.0026000778909185e-10, 2.9831210190396358e-09, 6.2427245062815072e-08, 9.4545385422855902e-07, 1.0600171772517656e-05, 8.9597973640616782e-05, 5.7942043271566558e-04, 2.9012937752966299e-03, 1.1357866939069948e-02, 3.5033757109687119e-02, 8.5670187300246295e-02, 1.6686716582564590e-01, 2.5977339611588807e-01, 3.2394262884199604e-01, 3.2394262884199604e-01, 2.5977339611588807e-01, 1.6686716582564590e-01, 8.5670187300246295e-02, 3.5033757109687119e-02, 1.1357866939069948e-02, 2.9012937752966299e-03, 5.7942043271566558e-04, 8.9597973640616782e-05, 1.0600171772517656e-05, 9.4545385422855902e-07, 6.2427245062815072e-08, 2.9831210190396358e-09, 1.0026000778909185e-10, 2.2848078971220373e-12, 3.3650356127923192e-14, 3.0015214029808191e-16, 1.4794099758171669e-18, 3.5121449558365656e-21, 3.2072216919050275e-24, 7.3621261042968285e-28, 1.4561153081762322e-32}, + {2.2308126593779868e-33, 1.2494829120559801e-28, 5.9481747159233458e-25, 7.0737246502696336e-22, 3.2248298707326610e-19, 7.0674619157542681e-17, 8.5500874663703821e-15, 6.2620383158353876e-13, 2.9642049893157670e-11, 9.5182669412336419e-10, 2.1512839074194501e-08, 3.5225192415065300e-07, 4.2754558863972689e-06, 3.9183295448819551e-05, 2.7524242709953090e-04, 1.5001278829297612e-03, 6.4070706562040147e-03, 2.1617644433765643e-02, 5.7992746631628803e-02, 1.2432050192893590e-01, 2.1377487850201790e-01, 2.9563565196662861e-01, 3.2931845505069163e-01, 2.9563565196662861e-01, 2.1377487850201790e-01, 1.2432050192893590e-01, 5.7992746631628803e-02, 2.1617644433765643e-02, 6.4070706562040147e-03, 1.5001278829297612e-03, 2.7524242709953090e-04, 3.9183295448819551e-05, 4.2754558863972689e-06, 3.5225192415065300e-07, 2.1512839074194501e-08, 9.5182669412336419e-10, 2.9642049893157670e-11, 6.2620383158353876e-13, 8.5500874663703821e-15, 7.0674619157542681e-17, 3.2248298707326610e-19, 7.0737246502696336e-22, 5.9481747159233458e-25, 1.2494829120559801e-28, 2.2308126593779868e-33}, + {3.4113792299096508e-34, 2.1132014962696289e-29, 1.0976558019968051e-25, 1.4155042016755176e-22, 6.9737995966949200e-20, 1.6484447389507407e-17, 2.1486472823678717e-15, 1.6947076167749986e-13, 8.6391402914483532e-12, 2.9885455879923381e-10, 7.2816347565639946e-09, 1.2865324130563406e-07, 1.6869468397355139e-06, 1.6725935920653945e-05, 1.2731983755106949e-04, 7.5339673454313967e-04, 3.5010578505227864e-03, 1.2883487559777229e-02, 3.7795830992247079e-02, 8.8868227566005320e-02, 1.6816012736212269e-01, 2.5684454350651975e-01, 3.1727438491816684e-01, 3.1727438491816684e-01, 2.5684454350651975e-01, 1.6816012736212269e-01, 8.8868227566005320e-02, 3.7795830992247079e-02, 1.2883487559777229e-02, 3.5010578505227864e-03, 7.5339673454313967e-04, 1.2731983755106949e-04, 1.6725935920653945e-05, 1.6869468397355139e-06, 1.2865324130563406e-07, 7.2816347565639946e-09, 2.9885455879923381e-10, 8.6391402914483532e-12, 1.6947076167749986e-13, 2.1486472823678717e-15, 1.6484447389507407e-17, 6.9737995966949200e-20, 1.4155042016755176e-22, 1.0976558019968051e-25, 2.1132014962696289e-29, 3.4113792299096508e-34}, + {5.2074550840354642e-35, 3.5619817807815819e-30, 2.0158558166547157e-26, 2.8149547423920401e-23, 1.4966299166256593e-20, 3.8101463479230325e-18, 5.3428423961486667e-16, 4.5312676467479057e-14, 2.4836135431247532e-12, 9.2402978883643831e-11, 2.4228134937243909e-09, 4.6104364856635215e-08, 6.5181055230401887e-07, 6.9771079617906096e-06, 5.7425938238321535e-05, 3.6806169587486260e-04, 1.8562527788732503e-03, 7.4296915378901547e-03, 2.3765555762406108e-02, 6.1094386221801737e-02, 1.2677670015500209e-01, 2.1305846026514422e-01, 2.9065687385124467e-01, 3.2231167941131522e-01, 2.9065687385124467e-01, 2.1305846026514422e-01, 1.2677670015500209e-01, 6.1094386221801737e-02, 2.3765555762406108e-02, 7.4296915378901547e-03, 1.8562527788732503e-03, 3.6806169587486260e-04, 5.7425938238321535e-05, 6.9771079617906096e-06, 6.5181055230401887e-07, 4.6104364856635215e-08, 2.4228134937243909e-09, 9.2402978883643831e-11, 2.4836135431247532e-12, 4.5312676467479057e-14, 5.3428423961486667e-16, 3.8101463479230325e-18, 1.4966299166256593e-20, 2.8149547423920401e-23, 2.0158558166547157e-26, 3.5619817807815819e-30, 5.2074550840354642e-35}, + {7.9355514607740056e-36, 5.9846126933135906e-31, 3.6850360801506979e-27, 5.5645774689023294e-24, 3.1883873235051506e-21, 8.7301596011867662e-19, 1.3151596226583889e-16, 1.1975898654791956e-14, 7.0469325815459271e-13, 2.8152965378381835e-11, 7.9304674951654092e-10, 1.6225141358957786e-08, 2.4686589936697532e-07, 2.8472586917348417e-06, 2.5285990277484952e-05, 1.7515043180117349e-04, 9.5639231981941610e-04, 4.1530049119775534e-03, 1.4444961574981133e-02, 4.0479676984603939e-02, 9.1822297079285201e-02, 1.6920447194564109e-01, 2.5396154266475901e-01, 3.1100103037796278e-01, 3.1100103037796278e-01, 2.5396154266475901e-01, 1.6920447194564109e-01, 9.1822297079285201e-02, 4.0479676984603939e-02, 1.4444961574981133e-02, 4.1530049119775534e-03, 9.5639231981941610e-04, 1.7515043180117349e-04, 2.5285990277484952e-05, 2.8472586917348417e-06, 2.4686589936697532e-07, 1.6225141358957786e-08, 7.9304674951654092e-10, 2.8152965378381835e-11, 7.0469325815459271e-13, 1.1975898654791956e-14, 1.3151596226583889e-16, 8.7301596011867662e-19, 3.1883873235051506e-21, 5.5645774689023294e-24, 3.6850360801506979e-27, 5.9846126933135906e-31, 7.9355514607740056e-36}, + {1.2072872864975299e-36, 1.0023613427435658e-31, 6.7063563353463921e-28, 1.0936809436127891e-24, 6.7446743456588804e-22, 1.9836416602794039e-19, 3.2059589524003601e-17, 3.1301391441242865e-15, 1.9744849360632020e-13, 8.4575241714312574e-12, 2.5554702439987741e-10, 5.6118428733874883e-09, 9.1729613211703643e-08, 1.1378411996774051e-06, 1.0881805485674181e-05, 8.1291046453153734e-05, 4.7952088600477827e-04, 2.2536737753387727e-03, 8.5018075629547680e-03, 2.5900304438342722e-02, 6.4032496212210105e-02, 1.2896559830848855e-01, 2.1222171925816707e-01, 2.8591145169466448e-01, 3.1573389003557428e-01, 2.8591145169466448e-01, 2.1222171925816707e-01, 1.2896559830848855e-01, 6.4032496212210105e-02, 2.5900304438342722e-02, 8.5018075629547680e-03, 2.2536737753387727e-03, 4.7952088600477827e-04, 8.1291046453153734e-05, 1.0881805485674181e-05, 1.1378411996774051e-06, 9.1729613211703643e-08, 5.6118428733874883e-09, 2.5554702439987741e-10, 8.4575241714312574e-12, 1.9744849360632020e-13, 3.1301391441242865e-15, 3.2059589524003601e-17, 1.9836416602794039e-19, 6.7446743456588804e-22, 1.0936809436127891e-24, 6.7063563353463921e-28, 1.0023613427435658e-31, 1.2072872864975299e-36}, + {1.8337940485734247e-37, 1.6738016679078003e-32, 1.2152441234045103e-28, 2.1376583083600910e-25, 1.4170935995734126e-22, 4.4709843654079433e-20, 7.7423829570434533e-18, 8.0942618934651918e-16, 5.4659440318156410e-14, 2.5066555238996801e-12, 8.1118773649302876e-11, 1.9090405438119032e-09, 3.3467934040214541e-08, 4.4570299668178385e-07, 4.5816827079555368e-06, 3.6840190537807232e-05, 2.3426989210925618e-04, 1.1890117817496457e-03, 4.8532638261719538e-03, 1.6031941068412211e-02, 4.3079159156765599e-02, 9.4548935477086246e-02, 1.7003245567716402e-01, 2.5113085633200216e-01, 3.0508512920439906e-01, 3.0508512920439906e-01, 2.5113085633200216e-01, 1.7003245567716402e-01, 9.4548935477086246e-02, 4.3079159156765599e-02, 1.6031941068412211e-02, 4.8532638261719538e-03, 1.1890117817496457e-03, 2.3426989210925618e-04, 3.6840190537807232e-05, 4.5816827079555368e-06, 4.4570299668178385e-07, 3.3467934040214541e-08, 1.9090405438119032e-09, 8.1118773649302876e-11, 2.5066555238996801e-12, 5.4659440318156410e-14, 8.0942618934651918e-16, 7.7423829570434533e-18, 4.4709843654079433e-20, 1.4170935995734126e-22, 2.1376583083600910e-25, 1.2152441234045103e-28, 1.6738016679078003e-32, 1.8337940485734247e-37}, + {2.7811222326645344e-38, 2.7868913296983680e-33, 2.1929968310380126e-29, 4.1558650081686601e-26, 2.9579544865762534e-23, 9.9993599745434670e-21, 1.8530303241143367e-18, 2.0717064275729557e-16, 1.4956863300607063e-14, 7.3335422940638437e-13, 2.5381498317560184e-11, 6.3917274853532610e-10, 1.1999326533873204e-08, 1.7127604128737511e-07, 1.8891833940946794e-06, 1.6319954290330701e-05, 1.1165708931911324e-04, 6.1070259874041984e-04, 2.6911086058640649e-03, 9.6161203333211508e-03, 2.8012262602063957e-02, 6.6811317318844293e-02, 1.3091376937688559e-01, 2.1128731588244376e-01, 2.8138276384359967e-01, 3.0954302944664125e-01, 2.8138276384359967e-01, 2.1128731588244376e-01, 1.3091376937688559e-01, 6.6811317318844293e-02, 2.8012262602063957e-02, 9.6161203333211508e-03, 2.6911086058640649e-03, 6.1070259874041984e-04, 1.1165708931911324e-04, 1.6319954290330701e-05, 1.8891833940946794e-06, 1.7127604128737511e-07, 1.1999326533873204e-08, 6.3917274853532610e-10, 2.5381498317560184e-11, 7.3335422940638437e-13, 1.4956863300607063e-14, 2.0717064275729557e-16, 1.8530303241143367e-18, 9.9993599745434670e-21, 2.9579544865762534e-23, 4.1558650081686601e-26, 2.1929968310380126e-29, 2.7868913296983680e-33, 2.7811222326645344e-38}, + {4.2115420614105446e-39, 4.6271645012802355e-34, 3.9415951369162550e-30, 8.0378813038410338e-27, 6.1353803600426939e-24, 2.2196939309444735e-21, 4.3966970036273617e-19, 5.2503117409060339e-17, 4.0473768240447191e-15, 2.1189539626185899e-13, 7.8326447571213533e-12, 2.1076390184671402e-10, 4.2306685774284424e-09, 6.4623052172886120e-08, 7.6355680403038401e-07, 7.0741004987582161e-06, 5.1975998082926043e-05, 3.0574624681119092e-04, 1.4514479978625793e-03, 5.5977022679341544e-03, 1.7635166965733341e-02, 4.5590184103438805e-02, 9.7064055572858784e-02, 1.7067182934440731e-01, 2.4835688537420786e-01, 2.9949402485158516e-01, 2.9949402485158516e-01, 2.4835688537420786e-01, 1.7067182934440731e-01, 9.7064055572858784e-02, 4.5590184103438805e-02, 1.7635166965733341e-02, 5.5977022679341544e-03, 1.4514479978625793e-03, 3.0574624681119092e-04, 5.1975998082926043e-05, 7.0741004987582161e-06, 7.6355680403038401e-07, 6.4623052172886120e-08, 4.2306685774284424e-09, 2.1076390184671402e-10, 7.8326447571213533e-12, 2.1189539626185899e-13, 4.0473768240447191e-15, 5.2503117409060339e-17, 4.3966970036273617e-19, 2.2196939309444735e-21, 6.1353803600426939e-24, 8.0378813038410338e-27, 3.9415951369162550e-30, 4.6271645012802355e-34, 4.2115420614105446e-39}, + {6.3684623301529066e-40, 7.6617679179706699e-35, 7.0570663748570958e-31, 1.5468771288046908e-27, 1.2648661239395541e-24, 4.8919455331107564e-22, 1.0345305017520525e-19, 1.3179732316466649e-17, 1.0835445640249948e-15, 6.0495805864229647e-14, 2.3852254108327101e-12, 6.8487736158296791e-11, 1.4678463337326563e-09, 2.3957838556591937e-08, 3.0275650879319551e-07, 3.0032131335980235e-06, 2.3654729185615187e-05, 1.4937604708377004e-04, 7.6242846753152228e-04, 3.1668653729621131e-03, 1.0765666029311394e-02, 3.0093535038790555e-02, 6.9436105549292540e-02, 1.3264501499247186e-01, 2.1027400626010295e-01, 2.7705564403537380e-01, 3.0370259492878027e-01, 2.7705564403537380e-01, 2.1027400626010295e-01, 1.3264501499247186e-01, 6.9436105549292540e-02, 3.0093535038790555e-02, 1.0765666029311394e-02, 3.1668653729621131e-03, 7.6242846753152228e-04, 1.4937604708377004e-04, 2.3654729185615187e-05, 3.0032131335980235e-06, 3.0275650879319551e-07, 2.3957838556591937e-08, 1.4678463337326563e-09, 6.8487736158296791e-11, 2.3852254108327101e-12, 6.0495805864229647e-14, 1.0835445640249948e-15, 1.3179732316466649e-17, 1.0345305017520525e-19, 4.8919455331107564e-22, 1.2648661239395541e-24, 1.5468771288046908e-27, 7.0570663748570958e-31, 7.6617679179706699e-35, 6.3684623301529066e-40}, + {9.6165697698301258e-41, 1.2653200760168758e-35, 1.2587808691128722e-31, 2.9626275871495710e-28, 2.5923371557456032e-25, 1.0706490586422193e-22, 2.4146919999727056e-20, 3.2782519697898691e-18, 2.8709892632049820e-16, 1.7073406733468467e-14, 7.1713449978551876e-13, 2.1943984676811026e-11, 5.0147398704735039e-10, 8.7334448358337394e-09, 1.1786188465819090e-07, 1.2498100284112936e-06, 1.0535444441825744e-05, 7.1294347272485073e-05, 3.9051617266315898e-04, 1.7435254219654911e-03, 6.3820594170464536e-03, 1.9246441370539501e-02, 4.8010306161295493e-02, 9.9382787160472649e-02, 1.7114652799601413e-01, 2.4564247050424473e-01, 2.9419908452729160e-01, 2.9419908452729160e-01, 2.4564247050424473e-01, 1.7114652799601413e-01, 9.9382787160472649e-02, 4.8010306161295493e-02, 1.9246441370539501e-02, 6.3820594170464536e-03, 1.7435254219654911e-03, 3.9051617266315898e-04, 7.1294347272485073e-05, 1.0535444441825744e-05, 1.2498100284112936e-06, 1.1786188465819090e-07, 8.7334448358337394e-09, 5.0147398704735039e-10, 2.1943984676811026e-11, 7.1713449978551876e-13, 1.7073406733468467e-14, 2.8709892632049820e-16, 3.2782519697898691e-18, 2.4146919999727056e-20, 1.0706490586422193e-22, 2.5923371557456032e-25, 2.9626275871495710e-28, 1.2587808691128722e-31, 1.2653200760168758e-35, 9.6165697698301258e-41}, + {1.4501627690036662e-41, 2.0843249634724133e-36, 2.2371881417054147e-32, 5.6477366517509067e-29, 5.2828491369827090e-26, 2.3275184070080019e-23, 5.5924703344128403e-21, 8.0822735455939761e-19, 7.5316452507313807e-17, 4.7652984604189149e-15, 2.1297535284773726e-13, 6.9364439628016448e-12, 1.6879993347401012e-10, 3.1324958261028878e-09, 4.5081881972300138e-08, 5.1027034733995817e-07, 4.5962183144073910e-06, 3.3274977701846682e-05, 1.9525422369452320e-04, 9.3526439804451304e-04, 3.6789367196008232e-03, 1.1943896271882769e-02, 3.2137739232619873e-02, 7.1912783366758826e-02, 1.3418063914176925e-01, 2.0919736551904625e-01, 2.7291625193942631e-01, 2.9818072956643887e-01, 2.7291625193942631e-01, 2.0919736551904625e-01, 1.3418063914176925e-01, 7.1912783366758826e-02, 3.2137739232619873e-02, 1.1943896271882769e-02, 3.6789367196008232e-03, 9.3526439804451304e-04, 1.9525422369452320e-04, 3.3274977701846682e-05, 4.5962183144073910e-06, 5.1027034733995817e-07, 4.5081881972300138e-08, 3.1324958261028878e-09, 1.6879993347401012e-10, 6.9364439628016448e-12, 2.1297535284773726e-13, 4.7652984604189149e-15, 7.5316452507313807e-17, 8.0822735455939761e-19, 5.5924703344128403e-21, 2.3275184070080019e-23, 5.2828491369827090e-26, 5.6477366517509067e-29, 2.2371881417054147e-32, 2.0843249634724133e-36, 1.4501627690036662e-41}, + {2.1839473796203715e-42, 3.4249840843882790e-37, 3.9621540898960065e-33, 1.0717993883210793e-29, 1.0706713589643687e-26, 5.0270999187295385e-24, 1.2855363897049299e-21, 1.9756668115390835e-19, 1.9569261785647621e-17, 1.3158561251396991e-15, 6.2504456297525029e-14, 2.1641925885012134e-12, 5.6013978504685036e-11, 1.1062006976882208e-09, 1.6954324255991913e-08, 2.0454464536460739e-07, 1.9657534004009810e-06, 1.5201081492284489e-05, 9.5395991047773378e-05, 4.8937293565704996e-04, 2.0647475636031381e-03, 7.2020522320410518e-03, 2.0858574140157814e-02, 5.0338397424815960e-02, 1.0151939271798682e-01, 1.7147724762127156e-01, 2.4298927099272355e-01, 2.8917508433514816e-01, 2.8917508433514816e-01, 2.4298927099272355e-01, 1.7147724762127156e-01, 1.0151939271798682e-01, 5.0338397424815960e-02, 2.0858574140157814e-02, 7.2020522320410518e-03, 2.0647475636031381e-03, 4.8937293565704996e-04, 9.5395991047773378e-05, 1.5201081492284489e-05, 1.9657534004009810e-06, 2.0454464536460739e-07, 1.6954324255991913e-08, 1.1062006976882208e-09, 5.6013978504685036e-11, 2.1641925885012134e-12, 6.2504456297525029e-14, 1.3158561251396991e-15, 1.9569261785647621e-17, 1.9756668115390835e-19, 1.2855363897049299e-21, 5.0270999187295385e-24, 1.0706713589643687e-26, 1.0717993883210793e-29, 3.9621540898960065e-33, 3.4249840843882790e-37, 2.1839473796203715e-42}, + {3.2848343922234264e-43, 5.6145192176094857e-38, 6.9933335612180924e-34, 2.0251468211785233e-30, 2.1584037474070782e-27, 1.0789794198158127e-24, 2.9336812927661561e-22, 4.7897086011228863e-20, 5.0376705984946099e-18, 3.5961584198415702e-16, 1.8135512160129682e-14, 6.6680503871104131e-13, 1.8333727853050702e-11, 3.8483200507286178e-10, 6.2732332422802088e-09, 8.0560041690845556e-08, 8.2486418628805857e-07, 6.8030638512964077e-06, 4.5587459694050582e-05, 2.5003668451320217e-04, 1.1295333257645079e-03, 4.2250842236442390e-03, 1.3144726459581649e-02, 3.4139801128400288e-02, 7.4247675370613914e-02, 1.3553970687329148e-01, 2.0807036460654699e-01, 2.6895194980711801e-01, 2.9294948869685189e-01, 2.6895194980711801e-01, 2.0807036460654699e-01, 1.3553970687329148e-01, 7.4247675370613914e-02, 3.4139801128400288e-02, 1.3144726459581649e-02, 4.2250842236442390e-03, 1.1295333257645079e-03, 2.5003668451320217e-04, 4.5587459694050582e-05, 6.8030638512964077e-06, 8.2486418628805857e-07, 8.0560041690845556e-08, 6.2732332422802088e-09, 3.8483200507286178e-10, 1.8333727853050702e-11, 6.6680503871104131e-13, 1.8135512160129682e-14, 3.5961584198415702e-16, 5.0376705984946099e-18, 4.7897086011228863e-20, 2.9336812927661561e-22, 1.0789794198158127e-24, 2.1584037474070782e-27, 2.0251468211785233e-30, 6.9933335612180924e-34, 5.6145192176094857e-38, 3.2848343922234264e-43}, + {4.9345427975166703e-44, 9.1824526100660134e-39, 1.2302868650409080e-34, 3.8103303738430430e-31, 4.3288355988163862e-28, 2.3018029777481733e-25, 6.6480276615029978e-23, 1.1519651049881098e-20, 1.2852645022546249e-18, 9.7305218355116897e-17, 5.2042716465726346e-15, 2.0297402247949017e-13, 5.9217773854289703e-12, 1.3196011058824996e-10, 2.2850766766300011e-09, 3.1195246571716635e-08, 3.3984745324530809e-07, 2.9851306091787731e-06, 2.1327339454200749e-05, 1.2486893055641099e-04, 6.0296061313673489e-04, 2.4143454547297083e-03, 8.0534582747568747e-03, 2.2465314158278335e-02, 5.2574374089316610e-02, 1.0348723259331935e-01, 1.7168192789310016e-01, 2.4039805219536928e-01, 2.8439970531426456e-01, 2.8439970531426456e-01, 2.4039805219536928e-01, 1.7168192789310016e-01, 1.0348723259331935e-01, 5.2574374089316610e-02, 2.2465314158278335e-02, 8.0534582747568747e-03, 2.4143454547297083e-03, 6.0296061313673489e-04, 1.2486893055641099e-04, 2.1327339454200749e-05, 2.9851306091787731e-06, 3.3984745324530809e-07, 3.1195246571716635e-08, 2.2850766766300011e-09, 1.3196011058824996e-10, 5.9217773854289703e-12, 2.0297402247949017e-13, 5.2042716465726346e-15, 9.7305218355116897e-17, 1.2852645022546249e-18, 1.1519651049881098e-20, 6.6480276615029978e-23, 2.3018029777481733e-25, 4.3288355988163862e-28, 3.8103303738430430e-31, 1.2302868650409080e-34, 9.1824526100660134e-39, 4.9345427975166703e-44}, + {7.4038563068673450e-45, 1.4983962534795572e-39, 2.1574506027269642e-35, 7.1398344998607888e-32, 8.6385560494218303e-29, 4.8816444464632883e-26, 1.4963121942445614e-23, 2.7492726789010400e-21, 3.2508143970587746e-19, 2.6076354448061946e-17, 1.4776310161414783e-15, 6.1066717751743867e-14, 1.8884472351365658e-12, 4.4624672633249920e-11, 8.1989628692240572e-10, 1.1884210228039399e-08, 1.3757431403862670e-07, 1.2852403270784334e-06, 9.7761956199732341e-06, 6.1008880084875632e-05, 3.1439538907451493e-04, 1.3453332060777454e-03, 4.8029120328152543e-03, 1.4362559357587188e-02, 3.6095769719147547e-02, 7.6447309674074940e-02, 1.3673928335964028e-01, 2.0690383287647826e-01, 2.6515118763397444e-01, 2.8798424312571891e-01, 2.6515118763397444e-01, 2.0690383287647826e-01, 1.3673928335964028e-01, 7.6447309674074940e-02, 3.6095769719147547e-02, 1.4362559357587188e-02, 4.8029120328152543e-03, 1.3453332060777454e-03, 3.1439538907451493e-04, 6.1008880084875632e-05, 9.7761956199732341e-06, 1.2852403270784334e-06, 1.3757431403862670e-07, 1.1884210228039399e-08, 8.1989628692240572e-10, 4.4624672633249920e-11, 1.8884472351365658e-12, 6.1066717751743867e-14, 1.4776310161414783e-15, 2.6076354448061946e-17, 3.2508143970587746e-19, 2.7492726789010400e-21, 1.4963121942445614e-23, 4.8816444464632883e-26, 8.6385560494218303e-29, 7.1398344998607888e-32, 2.1574506027269642e-35, 1.4983962534795572e-39, 7.4038563068673450e-45}, + {1.1095872479683389e-45, 2.4397475881451687e-40, 3.7716267271207567e-36, 1.3325596117642357e-32, 1.7155731476717639e-29, 1.0294059971650915e-26, 3.3457569557525580e-24, 6.5125672574964802e-22, 8.1536404730238284e-20, 6.9232479095776861e-18, 4.1524441096940554e-16, 1.8166245762596555e-14, 5.9484305160561044e-13, 1.4889573490628030e-11, 2.8993590128077474e-10, 4.4568227752259551e-09, 5.4755546192766962e-08, 5.4335161342049202e-07, 4.3942869362670327e-06, 2.9187419041555301e-05, 1.6027733468184521e-04, 7.3177355696550823e-04, 2.7913248289530633e-03, 8.9321783603078207e-03, 2.4061272766109355e-02, 5.4718970932182816e-02, 1.0529876369778575e-01, 1.7177615691888495e-01, 2.3786890495865878e-01, 2.7985311752282899e-01, 2.7985311752282899e-01, 2.3786890495865878e-01, 1.7177615691888495e-01, 1.0529876369778575e-01, 5.4718970932182816e-02, 2.4061272766109355e-02, 8.9321783603078207e-03, 2.7913248289530633e-03, 7.3177355696550823e-04, 1.6027733468184521e-04, 2.9187419041555301e-05, 4.3942869362670327e-06, 5.4335161342049202e-07, 5.4755546192766962e-08, 4.4568227752259551e-09, 2.8993590128077474e-10, 1.4889573490628030e-11, 5.9484305160561044e-13, 1.8166245762596555e-14, 4.1524441096940554e-16, 6.9232479095776861e-18, 8.1536404730238284e-20, 6.5125672574964802e-22, 3.3457569557525580e-24, 1.0294059971650915e-26, 1.7155731476717639e-29, 1.3325596117642357e-32, 3.7716267271207567e-36, 2.4397475881451687e-40, 1.1095872479683389e-45}, + {1.6610070486206728e-46, 3.9640620009950677e-41, 6.5736876067220763e-37, 2.4774746124224036e-33, 3.3910839755167296e-30, 2.1587655850502334e-27, 7.4335800238463694e-25, 1.5315980968945761e-22, 2.0285571447032586e-20, 1.8216160573946239e-18, 1.1553699025986796e-16, 5.3454835391987630e-15, 1.8515162945489592e-13, 4.9041814317738659e-12, 1.0110041892763622e-10, 1.6462612023799091e-09, 2.1440032177267756e-08, 2.2570860520414515e-07, 1.9382734960996544e-06, 1.3684069297535329e-05, 7.9958097609494878e-05, 3.8892085371592777e-04, 1.5825583090958971e-03, 5.4099296624677247e-03, 1.5592290312662006e-02, 3.8002651439240136e-02, 7.8518270158397185e-02, 1.3779465152760539e-01, 2.0570683105394089e-01, 2.6150339781427412e-01, 2.8326318995972372e-01, 2.6150339781427412e-01, 2.0570683105394089e-01, 1.3779465152760539e-01, 7.8518270158397185e-02, 3.8002651439240136e-02, 1.5592290312662006e-02, 5.4099296624677247e-03, 1.5825583090958971e-03, 3.8892085371592777e-04, 7.9958097609494878e-05, 1.3684069297535329e-05, 1.9382734960996544e-06, 2.2570860520414515e-07, 2.1440032177267756e-08, 1.6462612023799091e-09, 1.0110041892763622e-10, 4.9041814317738659e-12, 1.8515162945489592e-13, 5.3454835391987630e-15, 1.1553699025986796e-16, 1.8216160573946239e-18, 2.0285571447032586e-20, 1.5315980968945761e-22, 7.4335800238463694e-25, 2.1587655850502334e-27, 3.3910839755167296e-30, 2.4774746124224036e-33, 6.5736876067220763e-37, 3.9640620009950677e-41, 1.6610070486206728e-46}, + {2.4837138180536654e-47, 6.4274569671736483e-42, 1.1424028966725505e-37, 4.5888574043976661e-34, 6.6725110959391675e-31, 4.5029239074522869e-28, 1.6414116353925588e-25, 3.5767852999576706e-23, 5.0073739324943669e-21, 4.7513038441234886e-19, 3.1838887747718650e-17, 1.5564176512276732e-15, 5.6970949884188746e-14, 1.5952148801760892e-12, 3.4779437587729222e-11, 5.9926928642653631e-10, 8.2638647618516911e-09, 9.2185737335790556e-08, 8.3956368818335782e-07, 6.2919256841286273e-06, 3.9066932688710276e-05, 2.0215217999457319e-04, 8.7616004129601344e-04, 3.1945104951283309e-03, 9.8342822671495106e-03, 2.5641844544669879e-02, 5.6773556441129344e-02, 1.0696556035928920e-01, 1.7177351127893237e-01, 2.3540141376649018e-01, 2.7551763457131306e-01, 2.7551763457131306e-01, 2.3540141376649018e-01, 1.7177351127893237e-01, 1.0696556035928920e-01, 5.6773556441129344e-02, 2.5641844544669879e-02, 9.8342822671495106e-03, 3.1945104951283309e-03, 8.7616004129601344e-04, 2.0215217999457319e-04, 3.9066932688710276e-05, 6.2919256841286273e-06, 8.3956368818335782e-07, 9.2185737335790556e-08, 8.2638647618516911e-09, 5.9926928642653631e-10, 3.4779437587729222e-11, 1.5952148801760892e-12, 5.6970949884188746e-14, 1.5564176512276732e-15, 3.1838887747718650e-17, 4.7513038441234886e-19, 5.0073739324943669e-21, 3.5767852999576706e-23, 1.6414116353925588e-25, 4.5029239074522869e-28, 6.6725110959391675e-31, 4.5888574043976661e-34, 1.1424028966725505e-37, 6.4274569671736483e-42, 2.4837138180536654e-47}, + {3.7099206361930349e-48, 1.0400778615224128e-42, 1.9796804708319897e-38, 8.4687478191905945e-35, 1.3071305930820806e-31, 9.3437837175659029e-29, 3.6027426635285024e-26, 8.2963863116210883e-24, 1.2266629909143781e-21, 1.2288435628835336e-19, 8.6925536958459408e-18, 4.4857058689315790e-16, 1.7335817955789140e-14, 5.1265062385198317e-13, 1.1808921844569749e-11, 2.1508698297874976e-10, 3.1371929535383148e-09, 3.7041625984897139e-08, 3.5734732949990906e-07, 2.8393114498469317e-06, 1.8709113003788699e-05, 1.0284880800685653e-04, 4.7411702610320811e-04, 1.8409222622442125e-03, 6.0436044551375788e-03, 1.6829299199652096e-02, 3.9858264027817066e-02, 8.0467087994200825e-02, 1.3871950817658463e-01, 2.0448695346897386e-01, 2.5799889943138321e-01, 2.7876694884925185e-01, 2.5799889943138321e-01, 2.0448695346897386e-01, 1.3871950817658463e-01, 8.0467087994200825e-02, 3.9858264027817066e-02, 1.6829299199652096e-02, 6.0436044551375788e-03, 1.8409222622442125e-03, 4.7411702610320811e-04, 1.0284880800685653e-04, 1.8709113003788699e-05, 2.8393114498469317e-06, 3.5734732949990906e-07, 3.7041625984897139e-08, 3.1371929535383148e-09, 2.1508698297874976e-10, 1.1808921844569749e-11, 5.1265062385198317e-13, 1.7335817955789140e-14, 4.4857058689315790e-16, 8.6925536958459408e-18, 1.2288435628835336e-19, 1.2266629909143781e-21, 8.2963863116210883e-24, 3.6027426635285024e-26, 9.3437837175659029e-29, 1.3071305930820806e-31, 8.4687478191905945e-35, 1.9796804708319897e-38, 1.0400778615224128e-42, 3.7099206361930349e-48}, + {5.5357065358570184e-49, 1.6797479901081275e-43, 3.4211380112558623e-39, 1.5573906246297621e-35, 2.5496608991129390e-32, 1.9291035954649827e-29, 7.8617977889259561e-27, 1.9117068833006383e-24, 2.9828627842798555e-22, 3.1522545665038165e-20, 2.3518847106758435e-18, 1.2800933913224359e-16, 5.2186237265908379e-15, 1.6283407307097284e-13, 3.9591777669477542e-12, 7.6152172501454727e-11, 1.1736167423215493e-09, 1.4651253164761138e-08, 1.4955329367272515e-07, 1.2583402510311900e-06, 8.7884992308503793e-06, 5.1259291357862856e-05, 2.5098369851306242e-04, 1.0363290995075809e-03, 3.6225869785344623e-03, 1.0756040509879146e-02, 2.7203128953688913e-02, 5.8739981964099283e-02, 1.0849834930618682e-01, 1.7168584234908363e-01, 2.3299478606267793e-01, 2.7137742494130401e-01, 2.7137742494130401e-01, 2.3299478606267793e-01, 1.7168584234908363e-01, 1.0849834930618682e-01, 5.8739981964099283e-02, 2.7203128953688913e-02, 1.0756040509879146e-02, 3.6225869785344623e-03, 1.0363290995075809e-03, 2.5098369851306242e-04, 5.1259291357862856e-05, 8.7884992308503793e-06, 1.2583402510311900e-06, 1.4955329367272515e-07, 1.4651253164761138e-08, 1.1736167423215493e-09, 7.6152172501454727e-11, 3.9591777669477542e-12, 1.6283407307097284e-13, 5.2186237265908379e-15, 1.2800933913224359e-16, 2.3518847106758435e-18, 3.1522545665038165e-20, 2.9828627842798555e-22, 1.9117068833006383e-24, 7.8617977889259561e-27, 1.9291035954649827e-29, 2.5496608991129390e-32, 1.5573906246297621e-35, 3.4211380112558623e-39, 1.6797479901081275e-43, 5.5357065358570184e-49}, + {8.2516107962538437e-50, 2.7076758452831307e-44, 5.8962844659788279e-40, 2.8541849032786616e-36, 4.9525862550205175e-33, 3.9632869870747322e-30, 1.7059115810757949e-27, 4.3769741948718935e-25, 7.2016107891350715e-23, 8.0222187354240242e-21, 6.3078910455861518e-19, 3.6181996190429121e-17, 1.5546635722380968e-15, 5.1139174817165822e-14, 1.3112516106390246e-12, 2.6608653477929730e-11, 4.3286561534485417e-10, 5.7075829327787572e-09, 6.1577962214505752e-08, 5.4804560350180002e-07, 4.0522493910237441e-06, 2.5045342890495838e-05, 1.3008291629845118e-04, 5.7039896752377221e-04, 2.1199816320368434e-03, 6.7014045380057391e-03, 1.8069433112703592e-02, 4.1661108762478506e-02, 8.2300163369735294e-02, 1.3952613948284390e-01, 2.0325057415444170e-01, 2.5462881185279007e-01, 2.7447822655926341e-01, 2.5462881185279007e-01, 2.0325057415444170e-01, 1.3952613948284390e-01, 8.2300163369735294e-02, 4.1661108762478506e-02, 1.8069433112703592e-02, 6.7014045380057391e-03, 2.1199816320368434e-03, 5.7039896752377221e-04, 1.3008291629845118e-04, 2.5045342890495838e-05, 4.0522493910237441e-06, 5.4804560350180002e-07, 6.1577962214505752e-08, 5.7075829327787572e-09, 4.3286561534485417e-10, 2.6608653477929730e-11, 1.3112516106390246e-12, 5.1139174817165822e-14, 1.5546635722380968e-15, 3.6181996190429121e-17, 6.3078910455861518e-19, 8.0222187354240242e-21, 7.2016107891350715e-23, 4.3769741948718935e-25, 1.7059115810757949e-27, 3.9632869870747322e-30, 4.9525862550205175e-33, 2.8541849032786616e-36, 5.8962844659788279e-40, 2.7076758452831307e-44, 8.2516107962538437e-50}, + {1.2287768797135238e-50, 4.3565748909654033e-45, 1.0135609150438053e-40, 5.2133092407201474e-37, 9.5811571508986169e-34, 8.1036840367508257e-31, 3.6813492354788247e-28, 9.9592645616067056e-26, 1.7266548206330479e-23, 2.0259038600763283e-21, 1.6775151863814584e-19, 1.0132362352411122e-17, 4.5848634437116816e-16, 1.5885532676287405e-14, 4.2916420558156765e-13, 9.1795670585092616e-12, 1.5747988216738595e-10, 2.1910095676436381e-09, 2.4958591861839229e-08, 2.3470996573719479e-07, 1.8351979221091958e-06, 1.2005278344250187e-05, 6.6061130219886947e-05, 3.0721561121964142e-04, 1.2123596600232957e-03, 4.0741349478589858e-03, 1.1693944851274627e-02, 2.8741855098732765e-02, 6.0620459149597836e-02, 1.0990705313380535e-01, 1.7152351798443127e-01, 2.3064795199119065e-01, 2.6741826939146512e-01, 2.6741826939146512e-01, 2.3064795199119065e-01, 1.7152351798443127e-01, 1.0990705313380535e-01, 6.0620459149597836e-02, 2.8741855098732765e-02, 1.1693944851274627e-02, 4.0741349478589858e-03, 1.2123596600232957e-03, 3.0721561121964142e-04, 6.6061130219886947e-05, 1.2005278344250187e-05, 1.8351979221091958e-06, 2.3470996573719479e-07, 2.4958591861839229e-08, 2.1910095676436381e-09, 1.5747988216738595e-10, 9.1795670585092616e-12, 4.2916420558156765e-13, 1.5885532676287405e-14, 4.5848634437116816e-16, 1.0132362352411122e-17, 1.6775151863814584e-19, 2.0259038600763283e-21, 1.7266548206330479e-23, 9.9592645616067056e-26, 3.6813492354788247e-28, 8.1036840367508257e-31, 9.5811571508986169e-34, 5.2133092407201474e-37, 1.0135609150438053e-40, 4.3565748909654033e-45, 1.2287768797135238e-50}, + {1.8280449098224982e-51, 6.9969776104465074e-46, 1.7378601313821812e-41, 9.4914073654790101e-38, 1.8462398438517290e-34, 1.6492772853150654e-31, 7.9020561032723045e-29, 2.2524636987884908e-26, 4.1119507972824725e-24, 5.0779932729853307e-22, 4.4245805201641825e-20, 2.8120140377689001e-18, 1.3389362360428643e-16, 4.8824398317753951e-15, 1.3886060911907966e-13, 3.1279303248480510e-12, 5.6537332414960619e-11, 8.2920364429410743e-10, 9.9634336551576826e-09, 9.8898818048931736e-08, 8.1685444112022656e-07, 5.6493811974978918e-06, 3.2895750894750603e-05, 1.6204473635815332e-04, 6.7809290318947399e-04, 2.4191591440605796e-03, 7.3808332831545517e-03, 1.9308982903048826e-02, 4.3410259562275427e-02, 8.4023710374037747e-02, 1.4022557730554358e-01, 2.0200304815121278e-01, 2.5138497704677004e-01, 2.7038153661061759e-01, 2.5138497704677004e-01, 2.0200304815121278e-01, 1.4022557730554358e-01, 8.4023710374037747e-02, 4.3410259562275427e-02, 1.9308982903048826e-02, 7.3808332831545517e-03, 2.4191591440605796e-03, 6.7809290318947399e-04, 1.6204473635815332e-04, 3.2895750894750603e-05, 5.6493811974978918e-06, 8.1685444112022656e-07, 9.8898818048931736e-08, 9.9634336551576826e-09, 8.2920364429410743e-10, 5.6537332414960619e-11, 3.1279303248480510e-12, 1.3886060911907966e-13, 4.8824398317753951e-15, 1.3389362360428643e-16, 2.8120140377689001e-18, 4.4245805201641825e-20, 5.0779932729853307e-22, 4.1119507972824725e-24, 2.2524636987884908e-26, 7.9020561032723045e-29, 1.6492772853150654e-31, 1.8462398438517290e-34, 9.4914073654790101e-38, 1.7378601313821812e-41, 6.9969776104465074e-46, 1.8280449098224982e-51}, + {2.7170067116730165e-52, 1.1217925177026293e-46, 2.9723642655668546e-42, 1.7225457466828413e-38, 3.5439568636689488e-35, 3.3415178524750795e-32, 1.6874060730443393e-29, 5.0645263679858964e-27, 9.7283658177698223e-25, 1.2635945997114491e-22, 1.1577247535043669e-20, 7.7362132775431355e-19, 3.8731516902515290e-17, 1.4852494321530914e-15, 4.4433093884802028e-14, 1.0531630647084200e-12, 2.0038587880644725e-11, 3.0952986928769617e-10, 3.9193048318854111e-09, 4.1023319776043747e-08, 3.5754993018691617e-07, 2.6114965376035973e-06, 1.6073154232928773e-05, 8.3767928571091071e-05, 3.7124104714178821e-04, 1.4042112162131032e-03, 4.5476632686124660e-03, 1.2644719871722365e-02, 3.0255311010248965e-02, 6.2417460794691487e-02, 1.1120083831262205e-01, 1.7129562704597320e-01, 2.2835964147332644e-01, 2.6362735600964210e-01, 2.6362735600964210e-01, 2.2835964147332644e-01, 1.7129562704597320e-01, 1.1120083831262205e-01, 6.2417460794691487e-02, 3.0255311010248965e-02, 1.2644719871722365e-02, 4.5476632686124660e-03, 1.4042112162131032e-03, 3.7124104714178821e-04, 8.3767928571091071e-05, 1.6073154232928773e-05, 2.6114965376035973e-06, 3.5754993018691617e-07, 4.1023319776043747e-08, 3.9193048318854111e-09, 3.0952986928769617e-10, 2.0038587880644725e-11, 1.0531630647084200e-12, 4.4433093884802028e-14, 1.4852494321530914e-15, 3.8731516902515290e-17, 7.7362132775431355e-19, 1.1577247535043669e-20, 1.2635945997114491e-22, 9.7283658177698223e-25, 5.0645263679858964e-27, 1.6874060730443393e-29, 3.3415178524750795e-32, 3.5439568636689488e-35, 1.7225457466828413e-38, 2.9723642655668546e-42, 1.1217925177026293e-46, 2.7170067116730165e-52}, + {4.0345457983197185e-53, 1.7954384298058220e-47, 5.0715303590978945e-43, 3.1165183087668226e-39, 6.7773962594083074e-36, 6.7403932258482419e-33, 3.5851439372856567e-30, 1.1322450883416873e-27, 2.2869723112554000e-25, 3.1221586704548222e-23, 3.0058381695419005e-21, 2.1103468793130292e-19, 1.1100994988590987e-17, 4.4732478501982463e-16, 1.4065409569369246e-14, 3.5050786947688594e-13, 7.0144682351913210e-12, 1.1401392553489413e-10, 1.5199381740983112e-09, 1.6759992864221019e-08, 1.5399292502874990e-07, 1.1865872353918687e-06, 7.7110412769679424e-06, 4.2469576433922949e-05, 1.9909609055329152e-04, 7.9743820114063489e-04, 2.7377658998792736e-03, 8.0794563303749761e-03, 2.0544655922552182e-02, 4.5105267291699995e-02, 8.5643719556781017e-02, 1.4082773794401451e-01, 2.0074887688731419e-01, 2.4825988992112150e-01, 2.6646296361626087e-01, 2.4825988992112150e-01, 2.0074887688731419e-01, 1.4082773794401451e-01, 8.5643719556781017e-02, 4.5105267291699995e-02, 2.0544655922552182e-02, 8.0794563303749761e-03, 2.7377658998792736e-03, 7.9743820114063489e-04, 1.9909609055329152e-04, 4.2469576433922949e-05, 7.7110412769679424e-06, 1.1865872353918687e-06, 1.5399292502874990e-07, 1.6759992864221019e-08, 1.5199381740983112e-09, 1.1401392553489413e-10, 7.0144682351913210e-12, 3.5050786947688594e-13, 1.4065409569369246e-14, 4.4732478501982463e-16, 1.1100994988590987e-17, 2.1103468793130292e-19, 3.0058381695419005e-21, 3.1221586704548222e-23, 2.2869723112554000e-25, 1.1322450883416873e-27, 3.5851439372856567e-30, 6.7403932258482419e-33, 6.7773962594083074e-36, 3.1165183087668226e-39, 5.0715303590978945e-43, 1.7954384298058220e-47, 4.0345457983197185e-53}, + {5.9856092466933942e-54, 2.8688168912332612e-48, 8.6328113687066931e-44, 5.6216222243961033e-40, 1.2913788025337670e-36, 1.3538428107777088e-33, 7.5798318604310986e-31, 2.5172693785018537e-28, 5.3430216209626571e-26, 7.6615923972613038e-24, 7.7454736745174986e-22, 5.7095124323046545e-20, 3.1533210598724162e-18, 1.3342434681247850e-16, 4.4061175397950322e-15, 1.1534952560183679e-13, 2.4259623022354819e-12, 4.1457910332486197e-11, 5.8137344484280929e-10, 6.7473312881324816e-09, 6.5292934189484169e-08, 5.3024856335051780e-07, 3.6344904047182492e-06, 2.1131233267306212e-05, 1.0466994285006789e-04, 4.4339999663332603e-04, 1.6117353953558448e-03, 5.0416367476379337e-03, 1.3605327560618538e-02, 3.1741278190050709e-02, 6.4133640985369098e-02, 1.1238816501981971e-01, 1.7101015293603891e-01, 2.2612844383734518e-01, 2.5999310620316046e-01, 2.5999310620316046e-01, 2.2612844383734518e-01, 1.7101015293603891e-01, 1.1238816501981971e-01, 6.4133640985369098e-02, 3.1741278190050709e-02, 1.3605327560618538e-02, 5.0416367476379337e-03, 1.6117353953558448e-03, 4.4339999663332603e-04, 1.0466994285006789e-04, 2.1131233267306212e-05, 3.6344904047182492e-06, 5.3024856335051780e-07, 6.5292934189484169e-08, 6.7473312881324816e-09, 5.8137344484280929e-10, 4.1457910332486197e-11, 2.4259623022354819e-12, 1.1534952560183679e-13, 4.4061175397950322e-15, 1.3342434681247850e-16, 3.1533210598724162e-18, 5.7095124323046545e-20, 7.7454736745174986e-22, 7.6615923972613038e-24, 5.3430216209626571e-26, 2.5172693785018537e-28, 7.5798318604310986e-31, 1.3538428107777088e-33, 1.2913788025337670e-36, 5.6216222243961033e-40, 8.6328113687066931e-44, 2.8688168912332612e-48, 5.9856092466933942e-54}, + {8.8724008630942898e-55, 4.5764336789075590e-49, 1.4661118038718265e-44, 1.0110662325251453e-40, 2.4518913381167023e-37, 2.7079520650401302e-34, 1.5949041770861358e-31, 5.5663539164360154e-29, 1.2407683405472676e-26, 1.8675901831656063e-24, 1.9812686302523555e-22, 1.5323733317270031e-20, 8.8796168315044913e-19, 3.9423704233496054e-17, 1.3663145902821194e-15, 3.7548696680658361e-14, 8.2926419028182389e-13, 1.4887527478920644e-11, 2.1942330999743630e-10, 2.6779761171072075e-09, 2.7267833016134243e-08, 2.3316541347245123e-07, 1.6840172739131284e-06, 1.0325059077129107e-05, 5.3979561403797981e-05, 2.4157232920980263e-04, 9.2859086457657155e-04, 3.0750221611855083e-03, 8.7949222136699829e-03, 2.1773546737772456e-02, 4.6746077576099838e-02, 8.7165933915876653e-02, 1.4134154505606342e-01, 1.9949184460058894e-01, 2.4524663594920437e-01, 2.6270996412870795e-01, 2.4524663594920437e-01, 1.9949184460058894e-01, 1.4134154505606342e-01, 8.7165933915876653e-02, 4.6746077576099838e-02, 2.1773546737772456e-02, 8.7949222136699829e-03, 3.0750221611855083e-03, 9.2859086457657155e-04, 2.4157232920980263e-04, 5.3979561403797981e-05, 1.0325059077129107e-05, 1.6840172739131284e-06, 2.3316541347245123e-07, 2.7267833016134243e-08, 2.6779761171072075e-09, 2.1942330999743630e-10, 1.4887527478920644e-11, 8.2926419028182389e-13, 3.7548696680658361e-14, 1.3663145902821194e-15, 3.9423704233496054e-17, 8.8796168315044913e-19, 1.5323733317270031e-20, 1.9812686302523555e-22, 1.8675901831656063e-24, 1.2407683405472676e-26, 5.5663539164360154e-29, 1.5949041770861358e-31, 2.7079520650401302e-34, 2.4518913381167023e-37, 1.0110662325251453e-40, 1.4661118038718265e-44, 4.5764336789075590e-49, 8.8724008630942898e-55}, + {1.3140200635759138e-55, 7.2888760600699374e-50, 2.4843223778452137e-45, 1.8132393982490279e-41, 4.6392142210443241e-38, 5.3944805170903356e-35, 3.3402996562304201e-32, 1.2244058147622891e-29, 2.8644542990100537e-27, 4.5229388132654362e-25, 5.0319775906458023e-23, 4.0808135693241622e-21, 2.4793963766166604e-19, 1.1542667556483506e-17, 4.1953024073558444e-16, 1.2094094650388391e-14, 2.8026693289669983e-13, 5.2816149659273362e-12, 8.1749589868013172e-11, 1.0483088420892190e-09, 1.1221758911496157e-08, 1.0094315168892042e-07, 7.6747273751899334e-07, 4.9572490208738044e-06, 2.7325285268221331e-05, 1.2904853700328517e-04, 5.2397811821303173e-04, 1.8346879155592502e-03, 5.5544997811365557e-03, 1.4572966566065007e-02, 3.3197971745097789e-02, 6.5771771087721614e-02, 1.1347683695023611e-01, 1.7067412122709255e-01, 2.2395285395671372e-01, 2.5650502626034899e-01, 2.5650502626034899e-01, 2.2395285395671372e-01, 1.7067412122709255e-01, 1.1347683695023611e-01, 6.5771771087721614e-02, 3.3197971745097789e-02, 1.4572966566065007e-02, 5.5544997811365557e-03, 1.8346879155592502e-03, 5.2397811821303173e-04, 1.2904853700328517e-04, 2.7325285268221331e-05, 4.9572490208738044e-06, 7.6747273751899334e-07, 1.0094315168892042e-07, 1.1221758911496157e-08, 1.0483088420892190e-09, 8.1749589868013172e-11, 5.2816149659273362e-12, 2.8026693289669983e-13, 1.2094094650388391e-14, 4.1953024073558444e-16, 1.1542667556483506e-17, 2.4793963766166604e-19, 4.0808135693241622e-21, 5.0319775906458023e-23, 4.5229388132654362e-25, 2.8644542990100537e-27, 1.2244058147622891e-29, 3.3402996562304201e-32, 5.3944805170903356e-35, 4.6392142210443241e-38, 1.8132393982490279e-41, 2.4843223778452137e-45, 7.2888760600699374e-50, 1.3140200635759138e-55}, + {1.9444622230234342e-56, 1.1590959320007943e-50, 4.2004740254236363e-46, 3.2427888315124139e-42, 8.7482381269078278e-39, 1.0703814562172511e-35, 6.9640785632250250e-33, 2.6794918791965289e-30, 6.5751748013020018e-28, 1.0884570120961755e-25, 1.2691625603633038e-23, 1.0785433636173751e-21, 6.8663309606135431e-20, 3.3495959873429566e-18, 1.2758954586949696e-16, 3.8555242748433330e-15, 9.3683796134838034e-14, 1.8518064702871477e-12, 3.0076840393622017e-11, 4.0491316393465588e-10, 4.5529601729418187e-09, 4.3045713494096147e-08, 3.4420843484101099e-07, 2.3399997161498497e-06, 1.3586053874631190e-05, 6.7639272623934896e-05, 2.8977924888595573e-04, 1.0716281610696826e-03, 3.4300764387647066e-03, 9.5249775695009746e-03, 2.2993107110436964e-02, 4.8332960504809425e-02, 8.8595835021937436e-02, 1.4177503840549346e-01, 1.9823513131250983e-01, 2.4233883536068279e-01, 2.5911119749680783e-01, 2.4233883536068279e-01, 1.9823513131250983e-01, 1.4177503840549346e-01, 8.8595835021937436e-02, 4.8332960504809425e-02, 2.2993107110436964e-02, 9.5249775695009746e-03, 3.4300764387647066e-03, 1.0716281610696826e-03, 2.8977924888595573e-04, 6.7639272623934896e-05, 1.3586053874631190e-05, 2.3399997161498497e-06, 3.4420843484101099e-07, 4.3045713494096147e-08, 4.5529601729418187e-09, 4.0491316393465588e-10, 3.0076840393622017e-11, 1.8518064702871477e-12, 9.3683796134838034e-14, 3.8555242748433330e-15, 1.2758954586949696e-16, 3.3495959873429566e-18, 6.8663309606135431e-20, 1.0785433636173751e-21, 1.2691625603633038e-23, 1.0884570120961755e-25, 6.5751748013020018e-28, 2.6794918791965289e-30, 6.9640785632250250e-33, 1.0703814562172511e-35, 8.7482381269078278e-39, 3.2427888315124139e-42, 4.2004740254236363e-46, 1.1590959320007943e-50, 1.9444622230234342e-56}, + {2.8750275727924455e-57, 1.8404303463880958e-51, 7.0869721836637015e-47, 5.7836152624626662e-43, 1.6442414677949876e-39, 2.1156822123723002e-36, 1.4455024437001548e-33, 5.8345675304162565e-31, 1.5008973228057372e-28, 2.6033049503053747e-26, 3.1794998286708481e-24, 2.8296018603048517e-22, 1.8863727782270500e-20, 9.6365678168475681e-19, 3.8443430855465295e-17, 1.2168900581339222e-15, 3.0981886195643928e-14, 6.4188571380482567e-13, 1.0931590521335809e-11, 1.5438256127811863e-10, 1.8219509376836273e-09, 1.8089441547445238e-08, 1.5199860776329110e-07, 1.0865550828327965e-06, 6.6384540614267379e-06, 3.4806103755504017e-05, 1.5717296348585755e-04, 6.1320670594855193e-04, 2.0727405318696742e-03, 6.0846962089377739e-03, 1.5545067446090813e-02, 3.4623986133714632e-02, 6.7334688728284187e-02, 1.1447404988622129e-01, 1.7029372558220210e-01, 2.2183130791546488e-01, 2.5315358016164968e-01, 2.5315358016164968e-01, 2.2183130791546488e-01, 1.7029372558220210e-01, 1.1447404988622129e-01, 6.7334688728284187e-02, 3.4623986133714632e-02, 1.5545067446090813e-02, 6.0846962089377739e-03, 2.0727405318696742e-03, 6.1320670594855193e-04, 1.5717296348585755e-04, 3.4806103755504017e-05, 6.6384540614267379e-06, 1.0865550828327965e-06, 1.5199860776329110e-07, 1.8089441547445238e-08, 1.8219509376836273e-09, 1.5438256127811863e-10, 1.0931590521335809e-11, 6.4188571380482567e-13, 3.0981886195643928e-14, 1.2168900581339222e-15, 3.8443430855465295e-17, 9.6365678168475681e-19, 1.8863727782270500e-20, 2.8296018603048517e-22, 3.1794998286708481e-24, 2.6033049503053747e-26, 1.5008973228057372e-28, 5.8345675304162565e-31, 1.4455024437001548e-33, 2.1156822123723002e-36, 1.6442414677949876e-39, 5.7836152624626662e-43, 7.0869721836637015e-47, 1.8404303463880958e-51, 2.8750275727924455e-57}, + {4.2475403944235563e-58, 2.9179360026092975e-52, 1.1932102199256711e-47, 1.0287869477179010e-43, 3.0804483195558287e-40, 4.1660662393372492e-37, 2.9874354212480538e-34, 1.2642951712027768e-31, 3.4074887052468082e-29, 6.1891475308279466e-27, 7.9129760878551264e-25, 7.3704570195170156e-23, 5.1421792395941592e-21, 2.7491455795179009e-19, 1.1478770068871720e-17, 3.8036255791351365e-16, 1.0139883410013013e-14, 2.2003659842875677e-13, 3.9263803072919160e-12, 5.8125114501326569e-11, 7.1939577649798698e-10, 7.4947098368604126e-09, 6.6118669991811844e-08, 4.9655916977568413e-07, 3.1895150634568674e-06, 1.7594600636505465e-05, 8.3660559585237938e-05, 3.4399085887424382e-04, 1.2265540580803703e-03, 3.8020227522452721e-03, 1.0267477817470316e-02, 2.4201116175054306e-02, 4.9866450712457495e-02, 8.9938636734284719e-02, 1.4213547000895149e-01, 1.9698140672796691e-01, 2.3953059321276829e-01, 2.5565638153018372e-01, 2.3953059321276829e-01, 1.9698140672796691e-01, 1.4213547000895149e-01, 8.9938636734284719e-02, 4.9866450712457495e-02, 2.4201116175054306e-02, 1.0267477817470316e-02, 3.8020227522452721e-03, 1.2265540580803703e-03, 3.4399085887424382e-04, 8.3660559585237938e-05, 1.7594600636505465e-05, 3.1895150634568674e-06, 4.9655916977568413e-07, 6.6118669991811844e-08, 7.4947098368604126e-09, 7.1939577649798698e-10, 5.8125114501326569e-11, 3.9263803072919160e-12, 2.2003659842875677e-13, 1.0139883410013013e-14, 3.8036255791351365e-16, 1.1478770068871720e-17, 2.7491455795179009e-19, 5.1421792395941592e-21, 7.3704570195170156e-23, 7.9129760878551264e-25, 6.1891475308279466e-27, 3.4074887052468082e-29, 1.2642951712027768e-31, 2.9874354212480538e-34, 4.1660662393372492e-37, 3.0804483195558287e-40, 1.0287869477179010e-43, 1.1932102199256711e-47, 2.9179360026092975e-52, 4.2475403944235563e-58}, + {6.2703784480694208e-59, 4.6195869591589206e-53, 2.0048780070281735e-48, 1.8252564480138844e-44, 5.7530583850395732e-41, 8.1734457929853433e-38, 6.1482072743115000e-35, 2.7266171296454801e-32, 7.6951427469170207e-30, 1.4628329302419815e-27, 1.9567429736662381e-25, 1.9064520843927174e-23, 1.3911463582850983e-21, 7.7788589853855015e-20, 3.3973551764091287e-18, 1.1777094633733961e-16, 3.2852244087268414e-15, 7.4617858659486148e-14, 1.3941377125806722e-12, 2.1618054623290873e-11, 2.8038686778996357e-10, 3.0626832246516170e-09, 2.8344707359971740e-08, 2.2345250981121576e-07, 1.5076264668245447e-06, 8.7420837008076225e-06, 4.3727831577546010e-05, 1.8929762178671690e-04, 7.1126362441827832e-04, 2.3254926708230895e-03, 6.6306857259903285e-03, 1.6519285009994170e-02, 3.6018246356681126e-02, 6.8825257394126579e-02, 1.1538643824874496e-01, 1.6987443543948361e-01, 2.1976221051624720e-01, 2.4993008013933904e-01, 2.4993008013933904e-01, 2.1976221051624720e-01, 1.6987443543948361e-01, 1.1538643824874496e-01, 6.8825257394126579e-02, 3.6018246356681126e-02, 1.6519285009994170e-02, 6.6306857259903285e-03, 2.3254926708230895e-03, 7.1126362441827832e-04, 1.8929762178671690e-04, 4.3727831577546010e-05, 8.7420837008076225e-06, 1.5076264668245447e-06, 2.2345250981121576e-07, 2.8344707359971740e-08, 3.0626832246516170e-09, 2.8038686778996357e-10, 2.1618054623290873e-11, 1.3941377125806722e-12, 7.4617858659486148e-14, 3.2852244087268414e-15, 1.1777094633733961e-16, 3.3973551764091287e-18, 7.7788589853855015e-20, 1.3911463582850983e-21, 1.9064520843927174e-23, 1.9567429736662381e-25, 1.4628329302419815e-27, 7.6951427469170207e-30, 2.7266171296454801e-32, 6.1482072743115000e-35, 8.1734457929853433e-38, 5.7530583850395732e-41, 1.8252564480138844e-44, 2.0048780070281735e-48, 4.6195869591589206e-53, 6.2703784480694208e-59}, + {9.2494982219878178e-60, 7.3032412797370021e-54, 3.3619702764053515e-49, 3.2301359968277160e-45, 1.0711545066976377e-41, 1.5978109726748470e-38, 1.2601224816663446e-35, 5.8531004842511326e-33, 1.7288387765755684e-30, 3.4377966700873511e-28, 4.8085158087733593e-26, 4.8977600911363362e-24, 3.7358525784193159e-22, 2.1835856119475800e-20, 9.9692083309512582e-19, 3.6131260931106432e-17, 1.0539595203667931e-15, 2.5039803161790368e-14, 4.8951189293680036e-13, 7.9452485251106098e-12, 1.0791168663568375e-10, 1.2349280054594245e-09, 1.1980389793173491e-08, 9.9059473817560524e-08, 7.0144004582899757e-07, 4.2716344520701190e-06, 2.2456298833005965e-05, 1.0225119927510267e-04, 4.0444792546155510e-04, 1.3933051824601373e-03, 4.1899160223157692e-03, 1.1020394106207396e-02, 2.5395651463267135e-02, 5.1347296466210786e-02, 9.1199284542961764e-02, 1.4242938912607919e-01, 1.9573290855318265e-01, 2.3681645470932214e-01, 2.5233616878303844e-01, 2.3681645470932214e-01, 1.9573290855318265e-01, 1.4242938912607919e-01, 9.1199284542961764e-02, 5.1347296466210786e-02, 2.5395651463267135e-02, 1.1020394106207396e-02, 4.1899160223157692e-03, 1.3933051824601373e-03, 4.0444792546155510e-04, 1.0225119927510267e-04, 2.2456298833005965e-05, 4.2716344520701190e-06, 7.0144004582899757e-07, 9.9059473817560524e-08, 1.1980389793173491e-08, 1.2349280054594245e-09, 1.0791168663568375e-10, 7.9452485251106098e-12, 4.8951189293680036e-13, 2.5039803161790368e-14, 1.0539595203667931e-15, 3.6131260931106432e-17, 9.9692083309512582e-19, 2.1835856119475800e-20, 3.7358525784193159e-22, 4.8977600911363362e-24, 4.8085158087733593e-26, 3.4377966700873511e-28, 1.7288387765755684e-30, 5.8531004842511326e-33, 1.2601224816663446e-35, 1.5978109726748470e-38, 1.0711545066976377e-41, 3.2301359968277160e-45, 3.3619702764053515e-49, 7.3032412797370021e-54, 9.2494982219878178e-60}, + {1.3633833300020003e-60, 1.1529935596394474e-54, 5.6267053619996539e-50, 5.7021852730627675e-46, 1.9884044482437607e-42, 3.1126000220972273e-39, 2.5723712996578709e-36, 1.2507839519363874e-33, 3.8645811266244288e-31, 8.0343028910788638e-29, 1.1744629608333255e-26, 1.2499248537371909e-24, 9.9604705585733289e-23, 6.0820538195831629e-21, 2.9010288549385599e-19, 1.0985973435650033e-17, 3.3490712214349002e-16, 8.3173189675616162e-15, 1.7001957813551391e-13, 2.8865604962002583e-12, 4.1025651516570179e-11, 4.9151895982232822e-10, 4.9945929385487262e-09, 4.3280820924798808e-08, 3.2138167253993492e-07, 2.0537028582318748e-06, 1.1336998395832870e-05, 5.4246298804851998e-05, 2.2565979982055683e-04, 8.1827502694065590e-04, 2.5924825356809312e-03, 7.1909572204228826e-03, 1.7493488621590211e-02, 3.7379964305875417e-02, 7.0246334697072893e-02, 1.1622011916471321e-01, 1.6942108833811217e-01, 2.1774395641632643e-01, 2.4682659213344721e-01, 2.4682659213344721e-01, 2.1774395641632643e-01, 1.6942108833811217e-01, 1.1622011916471321e-01, 7.0246334697072893e-02, 3.7379964305875417e-02, 1.7493488621590211e-02, 7.1909572204228826e-03, 2.5924825356809312e-03, 8.1827502694065590e-04, 2.2565979982055683e-04, 5.4246298804851998e-05, 1.1336998395832870e-05, 2.0537028582318748e-06, 3.2138167253993492e-07, 4.3280820924798808e-08, 4.9945929385487262e-09, 4.9151895982232822e-10, 4.1025651516570179e-11, 2.8865604962002583e-12, 1.7001957813551391e-13, 8.3173189675616162e-15, 3.3490712214349002e-16, 1.0985973435650033e-17, 2.9010288549385599e-19, 6.0820538195831629e-21, 9.9604705585733289e-23, 1.2499248537371909e-24, 1.1744629608333255e-26, 8.0343028910788638e-29, 3.8645811266244288e-31, 1.2507839519363874e-33, 2.5723712996578709e-36, 3.1126000220972273e-39, 1.9884044482437607e-42, 5.7021852730627675e-46, 5.6267053619996539e-50, 1.1529935596394474e-54, 1.3633833300020003e-60}, + {2.0081683556012850e-61, 1.8178162551753148e-55, 9.3991295413480502e-51, 1.0041756149620635e-46, 3.6803258437880526e-43, 6.0427427569366402e-40, 5.2306055163095921e-37, 2.6610906118130193e-34, 8.5963278572927171e-32, 1.8674862726204788e-29, 2.8515675215544967e-27, 3.1692521338966959e-25, 2.6370712657175804e-23, 1.6812873372296920e-21, 8.3735311688359131e-20, 3.3113569430928055e-18, 1.0543294854998749e-16, 2.7353878696405856e-15, 5.8430667493651178e-14, 1.0369888470878425e-12, 1.5412284525394938e-11, 1.9317783962508064e-10, 2.0546146868576734e-09, 1.8645196384240901e-08, 1.4507140693109981e-07, 9.7197681673778427e-07, 5.6294099461087388e-06, 2.8280775482696968e-05, 1.2361276486093516e-04, 4.7135721157038344e-04, 1.5717570688376730e-03, 4.5927856260634522e-03, 1.1781817217642985e-02, 2.6575061210558436e-02, 5.2776416531222697e-02, 9.2382459021801644e-02, 1.4266271740044509e-01, 1.9449150803253865e-01, 2.3419136519193123e-01, 2.4914204006426580e-01, 2.3419136519193123e-01, 1.9449150803253865e-01, 1.4266271740044509e-01, 9.2382459021801644e-02, 5.2776416531222697e-02, 2.6575061210558436e-02, 1.1781817217642985e-02, 4.5927856260634522e-03, 1.5717570688376730e-03, 4.7135721157038344e-04, 1.2361276486093516e-04, 2.8280775482696968e-05, 5.6294099461087388e-06, 9.7197681673778427e-07, 1.4507140693109981e-07, 1.8645196384240901e-08, 2.0546146868576734e-09, 1.9317783962508064e-10, 1.5412284525394938e-11, 1.0369888470878425e-12, 5.8430667493651178e-14, 2.7353878696405856e-15, 1.0543294854998749e-16, 3.3113569430928055e-18, 8.3735311688359131e-20, 1.6812873372296920e-21, 2.6370712657175804e-23, 3.1692521338966959e-25, 2.8515675215544967e-27, 1.8674862726204788e-29, 8.5963278572927171e-32, 2.6610906118130193e-34, 5.2306055163095921e-37, 6.0427427569366402e-40, 3.6803258437880526e-43, 1.0041756149620635e-46, 9.3991295413480502e-51, 1.8178162551753148e-55, 2.0081683556012850e-61}, + {2.9557746032980847e-62, 2.8621845857326094e-56, 1.5671564937749406e-51, 1.7642057043388989e-47, 6.7924330161546228e-44, 1.1692076998197370e-40, 1.0595146928277511e-37, 5.6371927794771982e-35, 1.9029906100319421e-32, 4.3178173430220290e-30, 6.8834570436337191e-28, 7.9851871981733358e-26, 6.9341198195289992e-24, 4.6134710745539610e-22, 2.3978402756179082e-20, 9.8965289896201795e-19, 3.2891620143645649e-17, 8.9094610406663587e-16, 1.9875208346984979e-14, 3.6848498201591518e-13, 5.7232797185020048e-12, 7.4997199146582833e-11, 8.3430559260867452e-10, 7.9229232891502669e-09, 6.4544629501196559e-08, 4.5305355658612973e-07, 2.7507246274012478e-06, 1.4496444191129691e-05, 6.6517412418153150e-05, 2.6647788669335111e-04, 9.3431768696117658e-04, 2.8731975330897244e-03, 7.7640394053996482e-03, 1.8465751154427907e-02, 3.8708599914838084e-02, 7.1600747691843394e-02, 1.1698073380429937e-01, 1.6893796927590174e-01, 2.1577494627433041e-01, 2.4383585380721171e-01, 2.4383585380721171e-01, 2.1577494627433041e-01, 1.6893796927590174e-01, 1.1698073380429937e-01, 7.1600747691843394e-02, 3.8708599914838084e-02, 1.8465751154427907e-02, 7.7640394053996482e-03, 2.8731975330897244e-03, 9.3431768696117658e-04, 2.6647788669335111e-04, 6.6517412418153150e-05, 1.4496444191129691e-05, 2.7507246274012478e-06, 4.5305355658612973e-07, 6.4544629501196559e-08, 7.9229232891502669e-09, 8.3430559260867452e-10, 7.4997199146582833e-11, 5.7232797185020048e-12, 3.6848498201591518e-13, 1.9875208346984979e-14, 8.9094610406663587e-16, 3.2891620143645649e-17, 9.8965289896201795e-19, 2.3978402756179082e-20, 4.6134710745539610e-22, 6.9341198195289992e-24, 7.9851871981733358e-26, 6.8834570436337191e-28, 4.3178173430220290e-30, 1.9029906100319421e-32, 5.6371927794771982e-35, 1.0595146928277511e-37, 1.1692076998197370e-40, 6.7924330161546228e-44, 1.7642057043388989e-47, 1.5671564937749406e-51, 2.8621845857326094e-56, 2.9557746032980847e-62}, + {4.3474845662635656e-63, 4.5007211282481250e-57, 2.6082298377217237e-52, 3.0923065202135848e-48, 1.2501139575481106e-44, 2.2549072526181938e-41, 2.1381421402290456e-38, 1.1891459359657575e-35, 4.1929734266120318e-33, 9.9316879214599191e-31, 1.6522218331862429e-28, 1.9995631822582135e-26, 1.8111771920709664e-24, 1.2568606780858601e-22, 6.8135449122040696e-21, 2.9333425183931587e-19, 1.0170733571061197e-17, 2.8746837567251872e-16, 6.6931269313103119e-15, 1.2955204220510481e-13, 2.1014796586114549e-12, 2.8770619925838150e-11, 3.3453423873608581e-10, 3.3221419859886598e-09, 2.8316197051829986e-08, 2.0807020978532919e-07, 1.3232911149144323e-06, 7.3096953196703488e-06, 3.5180651840387621e-05, 1.4793874164100584e-04, 5.4489132381766836e-04, 1.7617305058156662e-03, 5.0096471937478518e-03, 1.2549959027149010e-02, 2.7737938219946516e-02, 5.4154863728585530e-02, 9.3492582227695284e-02, 1.4284081532237788e-01, 1.9325876496080116e-01, 2.3165063428423818e-01, 2.4606621240915144e-01, 2.3165063428423818e-01, 1.9325876496080116e-01, 1.4284081532237788e-01, 9.3492582227695284e-02, 5.4154863728585530e-02, 2.7737938219946516e-02, 1.2549959027149010e-02, 5.0096471937478518e-03, 1.7617305058156662e-03, 5.4489132381766836e-04, 1.4793874164100584e-04, 3.5180651840387621e-05, 7.3096953196703488e-06, 1.3232911149144323e-06, 2.0807020978532919e-07, 2.8316197051829986e-08, 3.3221419859886598e-09, 3.3453423873608581e-10, 2.8770619925838150e-11, 2.1014796586114549e-12, 1.2955204220510481e-13, 6.6931269313103119e-15, 2.8746837567251872e-16, 1.0170733571061197e-17, 2.9333425183931587e-19, 6.8135449122040696e-21, 1.2568606780858601e-22, 1.8111771920709664e-24, 1.9995631822582135e-26, 1.6522218331862429e-28, 9.9316879214599191e-31, 4.1929734266120318e-33, 1.1891459359657575e-35, 2.1381421402290456e-38, 2.2549072526181938e-41, 1.2501139575481106e-44, 3.0923065202135848e-48, 2.6082298377217237e-52, 4.5007211282481250e-57, 4.3474845662635656e-63}, + {6.3900846179356680e-64, 7.0683040654635165e-58, 4.3331628156671634e-53, 5.4079341607686490e-49, 2.2944871434121997e-45, 4.3348761233697938e-42, 4.2990971075609177e-39, 2.4981476114804934e-36, 9.1963660505689469e-34, 2.2729303616459228e-31, 3.9439087540838148e-29, 4.9770348134757658e-27, 4.7000291498915663e-25, 3.4001352768447133e-23, 1.9215431432889354e-21, 8.6245303609378974e-20, 3.1179884254074804e-18, 9.1905096520186247e-17, 2.2320593837338383e-15, 4.5078424718756776e-14, 7.6319902510210860e-13, 1.0909613183044814e-11, 1.3250311215425566e-10, 1.3750709116496800e-09, 1.2253983047111421e-08, 9.4193245001969527e-08, 6.2702226836829128e-07, 3.6275480887435970e-06, 1.8297490531324017e-05, 8.0695629747031377e-05, 3.1195003282250721e-04, 1.0594217898549964e-03, 3.1670839271362595e-03, 8.3485090964466376e-03, 1.9434337138642246e-02, 4.0003826723007251e-02, 7.2891273924741068e-02, 1.1767348589341342e-01, 1.6842887908813681e-01, 2.1385359898330383e-01, 2.4095120319809515e-01, 2.4095120319809515e-01, 2.1385359898330383e-01, 1.6842887908813681e-01, 1.1767348589341342e-01, 7.2891273924741068e-02, 4.0003826723007251e-02, 1.9434337138642246e-02, 8.3485090964466376e-03, 3.1670839271362595e-03, 1.0594217898549964e-03, 3.1195003282250721e-04, 8.0695629747031377e-05, 1.8297490531324017e-05, 3.6275480887435970e-06, 6.2702226836829128e-07, 9.4193245001969527e-08, 1.2253983047111421e-08, 1.3750709116496800e-09, 1.3250311215425566e-10, 1.0909613183044814e-11, 7.6319902510210860e-13, 4.5078424718756776e-14, 2.2320593837338383e-15, 9.1905096520186247e-17, 3.1179884254074804e-18, 8.6245303609378974e-20, 1.9215431432889354e-21, 3.4001352768447133e-23, 4.7000291498915663e-25, 4.9770348134757658e-27, 3.9439087540838148e-29, 2.2729303616459228e-31, 9.1963660505689469e-34, 2.4981476114804934e-36, 4.2990971075609177e-39, 4.3348761233697938e-42, 2.2944871434121997e-45, 5.4079341607686490e-49, 4.3331628156671634e-53, 7.0683040654635165e-58, 6.3900846179356680e-64}, + {9.3860543034002427e-65, 1.1086857074759456e-58, 7.1863132033979924e-54, 9.4366281490374702e-50, 4.2000954414179859e-46, 8.3074192361133957e-43, 8.6131947009295395e-40, 5.2269922366851146e-37, 2.0080025620200044e-34, 5.1761145427794598e-32, 9.3634938532685839e-30, 1.2315531785339706e-27, 1.2119297214386231e-25, 9.1354157424215784e-24, 5.3793647773372928e-22, 2.5158646274622071e-20, 9.4786222458398701e-19, 2.9120616998176371e-17, 7.3730974398680154e-16, 1.5527783781178122e-14, 2.7422533133249144e-13, 4.0903372275217891e-12, 5.1859074150534959e-11, 5.6202961151004291e-10, 5.2329996549801057e-09, 4.2048817138133669e-08, 2.9276307349429699e-07, 1.7725651070157471e-06, 9.3629045679147633e-06, 4.3270500380986358e-05, 1.7541290219541701e-04, 6.2518907824834233e-04, 1.9629978285824797e-03, 5.4395127563656737e-03, 1.3323152028369395e-02, 2.8883095437764291e-02, 5.5483794234023616e-02, 9.4533826153130718e-02, 1.4296854105472981e-01, 1.9203597399663150e-01, 2.2918990372570339e-01, 2.4310155924759538e-01, 2.2918990372570339e-01, 1.9203597399663150e-01, 1.4296854105472981e-01, 9.4533826153130718e-02, 5.5483794234023616e-02, 2.8883095437764291e-02, 1.3323152028369395e-02, 5.4395127563656737e-03, 1.9629978285824797e-03, 6.2518907824834233e-04, 1.7541290219541701e-04, 4.3270500380986358e-05, 9.3629045679147633e-06, 1.7725651070157471e-06, 2.9276307349429699e-07, 4.2048817138133669e-08, 5.2329996549801057e-09, 5.6202961151004291e-10, 5.1859074150534959e-11, 4.0903372275217891e-12, 2.7422533133249144e-13, 1.5527783781178122e-14, 7.3730974398680154e-16, 2.9120616998176371e-17, 9.4786222458398701e-19, 2.5158646274622071e-20, 5.3793647773372928e-22, 9.1354157424215784e-24, 1.2119297214386231e-25, 1.2315531785339706e-27, 9.3634938532685839e-30, 5.1761145427794598e-32, 2.0080025620200044e-34, 5.2269922366851146e-37, 8.6131947009295395e-40, 8.3074192361133957e-43, 4.2000954414179859e-46, 9.4366281490374702e-50, 7.1863132033979924e-54, 1.1086857074759456e-58, 9.3860543034002427e-65}, + {1.3777591125358009e-65, 1.7368921661372738e-59, 1.1897755678170730e-54, 1.6430818957997274e-50, 7.6682284731962107e-47, 1.5871807417910754e-43, 1.7196165366502119e-40, 1.0893693911932259e-37, 4.3652562606795252e-35, 1.1730726684546622e-32, 2.2113391943206024e-30, 3.0299940661257904e-28, 3.1056775475544893e-26, 2.4381144772606556e-24, 1.4951761449964922e-22, 7.2828478980006098e-21, 2.8579464976920660e-19, 9.1468094747747248e-18, 2.4130464210357486e-16, 5.2963672903976192e-15, 9.7511119835367196e-14, 1.5167942114683800e-12, 2.0061976506906991e-11, 2.2691700569827051e-10, 2.2060316533485200e-09, 1.8517359090727501e-08, 1.3475150826592070e-07, 8.5321092475315403e-07, 4.7158906386790063e-06, 2.2820419642920848e-05, 9.6932540256516268e-05, 3.6225322463531061e-04, 1.1935740511675038e-03, 3.4735556703184827e-03, 8.9429974621500780e-03, 2.0397690515966099e-02, 4.1265501456996284e-02, 7.4120627126827321e-02, 1.1830317740926892e-01, 1.6789719350811264e-01, 2.1197836082996782e-01, 2.3816651641238701e-01, 2.3816651641238701e-01, 2.1197836082996782e-01, 1.6789719350811264e-01, 1.1830317740926892e-01, 7.4120627126827321e-02, 4.1265501456996284e-02, 2.0397690515966099e-02, 8.9429974621500780e-03, 3.4735556703184827e-03, 1.1935740511675038e-03, 3.6225322463531061e-04, 9.6932540256516268e-05, 2.2820419642920848e-05, 4.7158906386790063e-06, 8.5321092475315403e-07, 1.3475150826592070e-07, 1.8517359090727501e-08, 2.2060316533485200e-09, 2.2691700569827051e-10, 2.0061976506906991e-11, 1.5167942114683800e-12, 9.7511119835367196e-14, 5.2963672903976192e-15, 2.4130464210357486e-16, 9.1468094747747248e-18, 2.8579464976920660e-19, 7.2828478980006098e-21, 1.4951761449964922e-22, 2.4381144772606556e-24, 3.1056775475544893e-26, 3.0299940661257904e-28, 2.2113391943206024e-30, 1.1730726684546622e-32, 4.3652562606795252e-35, 1.0893693911932259e-37, 1.7196165366502119e-40, 1.5871807417910754e-43, 7.6682284731962107e-47, 1.6430818957997274e-50, 1.1897755678170730e-54, 1.7368921661372738e-59, 1.3777591125358009e-65}, + {2.0210775781982523e-66, 2.7178104548553350e-60, 1.9665137729345701e-55, 2.8548162606098395e-51, 1.3964239783926081e-47, 3.0233321269411514e-44, 3.4214653414754691e-41, 2.2616501773134073e-38, 9.4491938966483701e-36, 2.6460383187052969e-33, 5.1955440071780227e-31, 7.4129920915228913e-29, 7.9104233673873923e-27, 6.4645977792616403e-25, 4.1267488231576369e-23, 2.0924604014548367e-21, 8.5484498659455372e-20, 2.8486537373202696e-18, 7.8262318929365957e-17, 1.7892942386135024e-15, 3.4323564970059177e-14, 5.5646126660265747e-13, 7.6736928514695270e-12, 9.0529279992180488e-11, 9.1835433159763121e-10, 8.0473976876877841e-09, 6.1165350599767583e-08, 4.0472265409701691e-07, 2.3390768958015489e-06, 1.1842716592385991e-05, 5.2665815424842040e-05, 2.0620794316657292e-04, 7.1235629871549161e-04, 2.1752890413369533e-03, 5.8813993717320096e-03, 1.4099847351552907e-02, 3.0009543309201085e-02, 5.6764441788522510e-02, 9.5510122550246740e-02, 1.4305030254196435e-01, 1.9082420376099704e-01, 2.2680511848210408e-01, 2.4024154090350588e-01, 2.2680511848210408e-01, 1.9082420376099704e-01, 1.4305030254196435e-01, 9.5510122550246740e-02, 5.6764441788522510e-02, 3.0009543309201085e-02, 1.4099847351552907e-02, 5.8813993717320096e-03, 2.1752890413369533e-03, 7.1235629871549161e-04, 2.0620794316657292e-04, 5.2665815424842040e-05, 1.1842716592385991e-05, 2.3390768958015489e-06, 4.0472265409701691e-07, 6.1165350599767583e-08, 8.0473976876877841e-09, 9.1835433159763121e-10, 9.0529279992180488e-11, 7.6736928514695270e-12, 5.5646126660265747e-13, 3.4323564970059177e-14, 1.7892942386135024e-15, 7.8262318929365957e-17, 2.8486537373202696e-18, 8.5484498659455372e-20, 2.0924604014548367e-21, 4.1267488231576369e-23, 6.4645977792616403e-25, 7.9104233673873923e-27, 7.4129920915228913e-29, 5.1955440071780227e-31, 2.6460383187052969e-33, 9.4491938966483701e-36, 2.2616501773134073e-38, 3.4214653414754691e-41, 3.0233321269411514e-44, 1.3964239783926081e-47, 2.8548162606098395e-51, 1.9665137729345701e-55, 2.7178104548553350e-60, 2.0210775781982523e-66}, + {2.9629045151469795e-67, 4.2477382853725514e-61, 3.2450153429194443e-56, 4.9498576868697659e-52, 2.5365833564005594e-48, 5.7421164166187080e-45, 6.7848084979774069e-42, 4.6777688454197694e-39, 2.0368541188744583e-36, 5.9410439274149082e-34, 1.2145487671367711e-31, 1.8036964134319502e-29, 2.0029387907481775e-27, 1.7031624106751895e-25, 1.1312252871807839e-23, 5.9680464287706203e-22, 2.5370441167173625e-20, 8.7983250076979868e-19, 2.5159793892190254e-17, 5.9885746170489713e-16, 1.1962793877597350e-14, 2.0202353514446467e-13, 2.9029728072990475e-12, 3.5699318546714651e-11, 3.7765043235634349e-10, 3.4525199163835126e-09, 2.7390187380396967e-08, 1.8926893464082190e-07, 1.1429783024859512e-06, 6.0502321414091725e-06, 2.8148083952665619e-05, 1.1537557343902387e-04, 4.1754273548549582e-04, 1.3367210462102556e-03, 3.7920023944900709e-03, 9.5461945478795308e-03, 2.1354422319535372e-02, 4.2493637239894133e-02, 7.5291446661321912e-02, 1.1887424153508566e-01, 1.6734591429857545e-01, 2.1014771223832399e-01, 2.3547615304001107e-01, 2.3547615304001107e-01, 2.1014771223832399e-01, 1.6734591429857545e-01, 1.1887424153508566e-01, 7.5291446661321912e-02, 4.2493637239894133e-02, 2.1354422319535372e-02, 9.5461945478795308e-03, 3.7920023944900709e-03, 1.3367210462102556e-03, 4.1754273548549582e-04, 1.1537557343902387e-04, 2.8148083952665619e-05, 6.0502321414091725e-06, 1.1429783024859512e-06, 1.8926893464082190e-07, 2.7390187380396967e-08, 3.4525199163835126e-09, 3.7765043235634349e-10, 3.5699318546714651e-11, 2.9029728072990475e-12, 2.0202353514446467e-13, 1.1962793877597350e-14, 5.9885746170489713e-16, 2.5159793892190254e-17, 8.7983250076979868e-19, 2.5370441167173625e-20, 5.9680464287706203e-22, 1.1312252871807839e-23, 1.7031624106751895e-25, 2.0029387907481775e-27, 1.8036964134319502e-29, 1.2145487671367711e-31, 5.9410439274149082e-34, 2.0368541188744583e-36, 4.6777688454197694e-39, 6.7848084979774069e-42, 5.7421164166187080e-45, 2.5365833564005594e-48, 4.9498576868697659e-52, 3.2450153429194443e-56, 4.2477382853725514e-61, 2.9629045151469795e-67}, + {4.3409286059314790e-68, 6.6312997992786490e-62, 5.3461191685641914e-57, 8.5648796438497824e-53, 4.5963493020491623e-49, 1.0874550734954457e-45, 1.3410304270363433e-42, 9.6394008578235831e-40, 4.3726474538903734e-37, 1.3279095802116780e-34, 2.8252406271755742e-32, 4.3652040547608271e-30, 5.0421784821017282e-28, 4.4592405404333692e-26, 3.0802364578690691e-24, 1.6900522980387141e-22, 7.4723281435460732e-21, 2.6954831586023608e-19, 8.0190167662890588e-18, 1.9861050466676079e-16, 4.1293419627929950e-15, 7.2600749267287114e-14, 1.0864518129258071e-12, 1.3919025655825987e-11, 1.5345768250281952e-10, 1.4627430245954727e-09, 1.2104826477830397e-08, 8.7294781098733113e-08, 5.5045426913594814e-07, 3.0442062696033981e-06, 1.4805735210956500e-05, 6.3482017172793501e-05, 2.4048437850905004e-04, 8.0646696699679095e-04, 2.3982976827574042e-03, 6.3343363657207474e-03, 1.4878611633370075e-02, 3.1116468918540825e-02, 5.7998096102851303e-02, 9.6425173608541109e-02, 1.4309010371318212e-01, 1.8962432993080802e-01, 2.2449250076685384e-01, 2.3748014388162669e-01, 2.2449250076685384e-01, 1.8962432993080802e-01, 1.4309010371318212e-01, 9.6425173608541109e-02, 5.7998096102851303e-02, 3.1116468918540825e-02, 1.4878611633370075e-02, 6.3343363657207474e-03, 2.3982976827574042e-03, 8.0646696699679095e-04, 2.4048437850905004e-04, 6.3482017172793501e-05, 1.4805735210956500e-05, 3.0442062696033981e-06, 5.5045426913594814e-07, 8.7294781098733113e-08, 1.2104826477830397e-08, 1.4627430245954727e-09, 1.5345768250281952e-10, 1.3919025655825987e-11, 1.0864518129258071e-12, 7.2600749267287114e-14, 4.1293419627929950e-15, 1.9861050466676079e-16, 8.0190167662890588e-18, 2.6954831586023608e-19, 7.4723281435460732e-21, 1.6900522980387141e-22, 3.0802364578690691e-24, 4.4592405404333692e-26, 5.0421784821017282e-28, 4.3652040547608271e-30, 2.8252406271755742e-32, 1.3279095802116780e-34, 4.3726474538903734e-37, 9.6394008578235831e-40, 1.3410304270363433e-42, 1.0874550734954457e-45, 4.5963493020491623e-49, 8.5648796438497824e-53, 5.3461191685641914e-57, 6.6312997992786490e-62, 4.3409286059314790e-68}, + {6.3559888804815291e-69, 1.0340740401651512e-62, 8.7937979910383930e-58, 1.4790458246095511e-53, 8.3086543388025186e-50, 2.0536646110581859e-46, 2.6420736294829820e-43, 1.9792151722732658e-40, 9.3494545988123451e-38, 2.9549868117171349e-35, 6.5403217012580441e-33, 1.0509161650154305e-30, 1.2621393343519558e-28, 1.1604245678013870e-26, 8.3325690961981494e-25, 4.7526132981917159e-23, 2.1844800072999790e-21, 8.1927992139953961e-20, 2.5344499212676499e-18, 6.5284978547093747e-17, 1.4120140414140493e-15, 2.5832194339375668e-14, 4.0236781196418743e-13, 5.3673492268073417e-12, 6.1636566122784830e-11, 6.1219783545440359e-10, 5.2813629715600939e-09, 3.9723156884506070e-08, 2.6137484697367874e-07, 1.5091682442228327e-06, 7.6676762452478550e-06, 3.4365246906918278e-05, 1.3616684443587397e-04, 4.7795191142819410e-04, 1.4887726540954260e-03, 4.1217965699966519e-03, 1.0156852342094170e-02, 2.2303298514796874e-02, 4.3688380057289714e-02, 7.6406289996198792e-02, 1.1939077299329898e-01, 1.6677771361921795e-01, 2.0836017261496417e-01, 2.3287490818454673e-01, 2.3287490818454673e-01, 2.0836017261496417e-01, 1.6677771361921795e-01, 1.1939077299329898e-01, 7.6406289996198792e-02, 4.3688380057289714e-02, 2.2303298514796874e-02, 1.0156852342094170e-02, 4.1217965699966519e-03, 1.4887726540954260e-03, 4.7795191142819410e-04, 1.3616684443587397e-04, 3.4365246906918278e-05, 7.6676762452478550e-06, 1.5091682442228327e-06, 2.6137484697367874e-07, 3.9723156884506070e-08, 5.2813629715600939e-09, 6.1219783545440359e-10, 6.1636566122784830e-11, 5.3673492268073417e-12, 4.0236781196418743e-13, 2.5832194339375668e-14, 1.4120140414140493e-15, 6.5284978547093747e-17, 2.5344499212676499e-18, 8.1927992139953961e-20, 2.1844800072999790e-21, 4.7526132981917159e-23, 8.3325690961981494e-25, 1.1604245678013870e-26, 1.2621393343519558e-28, 1.0509161650154305e-30, 6.5403217012580441e-33, 2.9549868117171349e-35, 9.3494545988123451e-38, 1.9792151722732658e-40, 2.6420736294829820e-43, 2.0536646110581859e-46, 8.3086543388025186e-50, 1.4790458246095511e-53, 8.7937979910383930e-58, 1.0340740401651512e-62, 6.3559888804815291e-69}, + {9.3008818390890492e-70, 1.6107424492415425e-63, 1.4442544724939497e-58, 2.5491245644508241e-54, 1.4983841626026885e-50, 3.8676878782479128e-47, 5.1890175350658309e-44, 4.0494895783343356e-41, 1.9912326515858821e-38, 6.5473365822875601e-36, 1.5069227943967880e-33, 2.5171173318424500e-31, 3.1418754018247846e-29, 3.0018077177700208e-27, 2.2397389074987337e-25, 1.3273883998725675e-23, 6.3398444713628853e-22, 2.4709662681137447e-20, 7.9447537335212530e-19, 2.1273958174343124e-17, 4.7841544346425597e-16, 9.1026391570176184e-15, 1.4750035153523922e-13, 2.0475389185298693e-12, 2.4477459246779186e-11, 2.5318768471063941e-10, 2.2756270244146732e-09, 1.7840153336747013e-08, 1.2241314031279016e-07, 7.3745467763804806e-07, 3.9114112665063118e-06, 1.8311113911126260e-05, 7.5833505293245319e-05, 2.7838967822704579e-04, 9.0756465037858196e-04, 2.6316863728237144e-03, 6.7973713273565750e-03, 1.5658123034423620e-02, 3.2203216875107243e-02, 5.9186084836389256e-02, 9.7282463095519897e-02, 1.4309158549116330e-01, 1.8843706331894564e-01, 2.2224852664936542e-01, 2.3481182765823758e-01, 2.2224852664936542e-01, 1.8843706331894564e-01, 1.4309158549116330e-01, 9.7282463095519897e-02, 5.9186084836389256e-02, 3.2203216875107243e-02, 1.5658123034423620e-02, 6.7973713273565750e-03, 2.6316863728237144e-03, 9.0756465037858196e-04, 2.7838967822704579e-04, 7.5833505293245319e-05, 1.8311113911126260e-05, 3.9114112665063118e-06, 7.3745467763804806e-07, 1.2241314031279016e-07, 1.7840153336747013e-08, 2.2756270244146732e-09, 2.5318768471063941e-10, 2.4477459246779186e-11, 2.0475389185298693e-12, 1.4750035153523922e-13, 9.1026391570176184e-15, 4.7841544346425597e-16, 2.1273958174343124e-17, 7.9447537335212530e-19, 2.4709662681137447e-20, 6.3398444713628853e-22, 1.3273883998725675e-23, 2.2397389074987337e-25, 3.0018077177700208e-27, 3.1418754018247846e-29, 2.5171173318424500e-31, 1.5069227943967880e-33, 6.5473365822875601e-36, 1.9912326515858821e-38, 4.0494895783343356e-41, 5.1890175350658309e-44, 3.8676878782479128e-47, 1.4983841626026885e-50, 2.5491245644508241e-54, 1.4442544724939497e-58, 1.6107424492415425e-63, 9.3008818390890492e-70}, + {1.3602242492354518e-70, 2.5062893896000025e-64, 2.3683907001837810e-59, 4.3849662998245842e-55, 2.6959438737993594e-51, 7.2644220875022382e-48, 1.0159844536444225e-44, 8.2566412936940675e-42, 4.2246144598021515e-39, 1.4445590039286314e-36, 3.4560072588078828e-34, 5.9987185516144465e-32, 7.7788491872908368e-30, 7.7199678213751262e-28, 5.9827510138490597e-26, 3.6826729053156295e-24, 1.8269179382831153e-22, 7.3963423583597857e-21, 2.4705505922880106e-19, 6.8737797990960140e-18, 1.6064742942702627e-16, 3.1773129599637848e-15, 5.3533724861984214e-14, 7.7293031086018118e-13, 9.6137828691596686e-12, 1.0350233726240316e-10, 9.6863849248805751e-10, 7.9104166149104796e-09, 5.6567852900930044e-08, 3.5533045829496680e-07, 1.9661408494659142e-06, 9.6077760741794566e-06, 4.1557921102543606e-05, 1.5944214368480690e-04, 5.4359224952962480e-04, 1.6496055361812409e-03, 4.4622998595035017e-03, 1.0773786624642434e-02, 2.3243228173755130e-02, 4.4849988132390606e-02, 7.7467627605512762e-02, 1.1985655590239072e-01, 1.6619497260995916e-01, 2.0661430370392464e-01, 2.3035797018194470e-01, 2.3035797018194470e-01, 2.0661430370392464e-01, 1.6619497260995916e-01, 1.1985655590239072e-01, 7.7467627605512762e-02, 4.4849988132390606e-02, 2.3243228173755130e-02, 1.0773786624642434e-02, 4.4622998595035017e-03, 1.6496055361812409e-03, 5.4359224952962480e-04, 1.5944214368480690e-04, 4.1557921102543606e-05, 9.6077760741794566e-06, 1.9661408494659142e-06, 3.5533045829496680e-07, 5.6567852900930044e-08, 7.9104166149104796e-09, 9.6863849248805751e-10, 1.0350233726240316e-10, 9.6137828691596686e-12, 7.7293031086018118e-13, 5.3533724861984214e-14, 3.1773129599637848e-15, 1.6064742942702627e-16, 6.8737797990960140e-18, 2.4705505922880106e-19, 7.3963423583597857e-21, 1.8269179382831153e-22, 3.6826729053156295e-24, 5.9827510138490597e-26, 7.7199678213751262e-28, 7.7788491872908368e-30, 5.9987185516144465e-32, 3.4560072588078828e-34, 1.4445590039286314e-36, 4.2246144598021515e-39, 8.2566412936940675e-42, 1.0159844536444225e-44, 7.2644220875022382e-48, 2.6959438737993594e-51, 4.3849662998245842e-55, 2.3683907001837810e-59, 2.5062893896000025e-64, 1.3602242492354518e-70}, + {1.9881401943823065e-71, 3.8956133082883524e-65, 3.8780907613890165e-60, 7.5287596024697438e-56, 4.8396443307633869e-52, 1.3608233728823463e-48, 1.9832504849666674e-45, 1.6777720088426207e-42, 8.9292665279968075e-40, 3.1739897948436054e-37, 7.8902747437194502e-35, 1.4225885948951900e-32, 1.9157413956045486e-30, 1.9741018729702676e-28, 1.5883597138828314e-26, 1.0150605078465530e-24, 5.2280336522983695e-23, 2.1976422794092572e-21, 7.6226000038011678e-20, 2.2026235762112506e-18, 5.3473142908729109e-17, 1.0988447789132501e-15, 1.9241100890740006e-14, 2.8879825636852583e-13, 3.7354214836948911e-12, 4.1834945759406555e-11, 4.0743696574618951e-10, 3.4640710439123456e-09, 2.5801094931709852e-08, 1.6888360171753998e-07, 9.7426112561417961e-07, 4.9661767693609960e-06, 2.2420154669404182e-05, 8.9832774694917811e-05, 3.2005763736769638e-04, 1.0156641399211783e-03, 2.8750919999992967e-03, 7.2695749936617917e-03, 1.6437166647437267e-02, 3.3269271876221002e-02, 6.0329758617287225e-02, 9.8085267668094472e-02, 1.4305806223178333e-01, 1.8726297375534309e-01, 2.2006990496442211e-01, 2.3223147790375134e-01, 2.2006990496442211e-01, 1.8726297375534309e-01, 1.4305806223178333e-01, 9.8085267668094472e-02, 6.0329758617287225e-02, 3.3269271876221002e-02, 1.6437166647437267e-02, 7.2695749936617917e-03, 2.8750919999992967e-03, 1.0156641399211783e-03, 3.2005763736769638e-04, 8.9832774694917811e-05, 2.2420154669404182e-05, 4.9661767693609960e-06, 9.7426112561417961e-07, 1.6888360171753998e-07, 2.5801094931709852e-08, 3.4640710439123456e-09, 4.0743696574618951e-10, 4.1834945759406555e-11, 3.7354214836948911e-12, 2.8879825636852583e-13, 1.9241100890740006e-14, 1.0988447789132501e-15, 5.3473142908729109e-17, 2.2026235762112506e-18, 7.6226000038011678e-20, 2.1976422794092572e-21, 5.2280336522983695e-23, 1.0150605078465530e-24, 1.5883597138828314e-26, 1.9741018729702676e-28, 1.9157413956045486e-30, 1.4225885948951900e-32, 7.8902747437194502e-35, 3.1739897948436054e-37, 8.9292665279968075e-40, 1.6777720088426207e-42, 1.9832504849666674e-45, 1.3608233728823463e-48, 4.8396443307633869e-52, 7.5287596024697438e-56, 3.8780907613890165e-60, 3.8956133082883524e-65, 1.9881401943823065e-71}, + {2.9042780793285192e-72, 6.0487911085991875e-66, 6.3408840552698421e-61, 1.2902630736739495e-56, 8.6686227081665284e-53, 2.5425883339495936e-49, 3.8599589978240019e-46, 3.3979613434850632e-43, 1.8803612804682350e-40, 6.9456401083003465e-38, 1.7934288277345599e-35, 3.3574581848271432e-33, 4.6935588729781351e-31, 5.0199340944280284e-29, 4.1917802591871942e-27, 2.7800091791127284e-25, 1.4859465721519723e-23, 6.4827357579899811e-22, 2.3339160328683892e-20, 7.0010683610304173e-19, 1.7647313333962599e-17, 3.7660828360044482e-16, 6.8501492079314778e-15, 1.0683188551398555e-13, 1.4362003723050719e-12, 1.6723593784919510e-11, 1.6940433392992703e-10, 1.4986355561051177e-09, 1.1619214278627516e-08, 7.9204993538763526e-08, 4.7607676418369897e-07, 2.5297850142327841e-06, 1.1912329247318932e-05, 4.9812715968551620e-05, 1.8533007239769129e-04, 6.1455372697219728e-04, 1.8190665842030779e-03, 4.8128687057527163e-03, 1.1395877807434502e-02, 2.4173252104118973e-02, 4.5978813889354229e-02, 7.8477839811903538e-02, 1.2027508931592504e-01, 1.6559981501591434e-01, 2.0490871177338488e-01, 2.2792088322788109e-01, 2.2792088322788109e-01, 2.0490871177338488e-01, 1.6559981501591434e-01, 1.2027508931592504e-01, 7.8477839811903538e-02, 4.5978813889354229e-02, 2.4173252104118973e-02, 1.1395877807434502e-02, 4.8128687057527163e-03, 1.8190665842030779e-03, 6.1455372697219728e-04, 1.8533007239769129e-04, 4.9812715968551620e-05, 1.1912329247318932e-05, 2.5297850142327841e-06, 4.7607676418369897e-07, 7.9204993538763526e-08, 1.1619214278627516e-08, 1.4986355561051177e-09, 1.6940433392992703e-10, 1.6723593784919510e-11, 1.4362003723050719e-12, 1.0683188551398555e-13, 6.8501492079314778e-15, 3.7660828360044482e-16, 1.7647313333962599e-17, 7.0010683610304173e-19, 2.3339160328683892e-20, 6.4827357579899811e-22, 1.4859465721519723e-23, 2.7800091791127284e-25, 4.1917802591871942e-27, 5.0199340944280284e-29, 4.6935588729781351e-31, 3.3574581848271432e-33, 1.7934288277345599e-35, 6.9456401083003465e-38, 1.8803612804682350e-40, 3.3979613434850632e-43, 3.8599589978240019e-46, 2.5425883339495936e-49, 8.6686227081665284e-53, 1.2902630736739495e-56, 6.3408840552698421e-61, 6.0487911085991875e-66, 2.9042780793285192e-72}, + {4.2402214052951246e-73, 9.3824834253827312e-67, 1.0352869434490524e-61, 2.2072219195734482e-57, 1.5493136809731731e-53, 4.7385536981365373e-50, 7.4907905142626159e-47, 6.8594377885386513e-44, 3.9454427601608861e-41, 1.5138792655234332e-38, 4.0587293642249081e-36, 7.8867039514681461e-34, 1.1440858552314813e-31, 1.2695564683626133e-29, 1.0997785424357608e-27, 7.5663389636387123e-26, 4.1954427268621758e-24, 1.8988470634740822e-22, 7.0927393378883720e-21, 2.2077343775414074e-19, 5.7754643743238307e-18, 1.2794126341282052e-16, 2.4162049041525693e-15, 3.9134647294064509e-14, 5.4654903154799270e-13, 6.6135809154494093e-12, 6.9642829049519196e-11, 6.4070330827294555e-10, 5.1680095336388813e-09, 3.6666857282366239e-08, 2.2949550926387024e-07, 1.2704896429597333e-06, 6.2359369667152816e-06, 2.7195889773576724e-05, 1.0558960284417518e-04, 3.6560795714255136e-04, 1.1307532404246092e-03, 3.1281305245610394e-03, 7.7500451541012766e-03, 1.7214629493700298e-02, 3.4314242859408567e-02, 6.1430478646654310e-02, 9.8836668138780004e-02, 1.4299255414069270e-01, 1.8610251044108347e-01, 2.1795355826999802e-01, 2.2973436523811983e-01, 2.1795355826999802e-01, 1.8610251044108347e-01, 1.4299255414069270e-01, 9.8836668138780004e-02, 6.1430478646654310e-02, 3.4314242859408567e-02, 1.7214629493700298e-02, 7.7500451541012766e-03, 3.1281305245610394e-03, 1.1307532404246092e-03, 3.6560795714255136e-04, 1.0558960284417518e-04, 2.7195889773576724e-05, 6.2359369667152816e-06, 1.2704896429597333e-06, 2.2949550926387024e-07, 3.6666857282366239e-08, 5.1680095336388813e-09, 6.4070330827294555e-10, 6.9642829049519196e-11, 6.6135809154494093e-12, 5.4654903154799270e-13, 3.9134647294064509e-14, 2.4162049041525693e-15, 1.2794126341282052e-16, 5.7754643743238307e-18, 2.2077343775414074e-19, 7.0927393378883720e-21, 1.8988470634740822e-22, 4.1954427268621758e-24, 7.5663389636387123e-26, 1.0997785424357608e-27, 1.2695564683626133e-29, 1.1440858552314813e-31, 7.8867039514681461e-34, 4.0587293642249081e-36, 1.5138792655234332e-38, 3.9454427601608861e-41, 6.8594377885386513e-44, 7.4907905142626159e-47, 4.7385536981365373e-50, 1.5493136809731731e-53, 2.2072219195734482e-57, 1.0352869434490524e-61, 9.3824834253827312e-67, 4.2402214052951246e-73}, + {6.1873167292600330e-74, 1.4538901947049863e-67, 1.6879603995600424e-62, 3.7691324369200321e-58, 2.7631164912569130e-54, 8.8091139694466722e-51, 1.4495623366344723e-47, 1.3802919462863240e-44, 8.2491698349502409e-42, 3.2868281039958480e-39, 9.1463558192668670e-37, 1.8440635697521863e-34, 2.7749265480534503e-32, 3.1936039953280896e-30, 2.8689474946571079e-28, 2.0467667970915599e-26, 1.1768586219954579e-24, 5.5235547418541776e-23, 2.1397391826730893e-21, 6.9081875027574077e-20, 1.8747446593629021e-18, 4.3090894092529427e-17, 8.4454653993109795e-16, 1.4199548095965024e-14, 2.0591425994119171e-13, 2.5880423909347226e-12, 2.8316127909785268e-11, 2.7076622359247714e-10, 2.2709635043178179e-09, 1.6760660123885240e-08, 1.0917307724251056e-07, 6.2927723529831787e-07, 3.2175123782368650e-06, 1.4625147457368242e-05, 5.9216205461426278e-05, 2.1395132199714689e-04, 6.9090534092963924e-04, 1.9969762869401319e-03, 5.1728592008298650e-03, 1.2022070950665343e-02, 2.5092532015162070e-02, 4.7075288211797156e-02, 7.9439215172105435e-02, 1.2064961060726002e-01, 1.6499413655200360e-01, 2.0324204888941669e-01, 2.2555951425452636e-01, 2.2555951425452636e-01, 2.0324204888941669e-01, 1.6499413655200360e-01, 1.2064961060726002e-01, 7.9439215172105435e-02, 4.7075288211797156e-02, 2.5092532015162070e-02, 1.2022070950665343e-02, 5.1728592008298650e-03, 1.9969762869401319e-03, 6.9090534092963924e-04, 2.1395132199714689e-04, 5.9216205461426278e-05, 1.4625147457368242e-05, 3.2175123782368650e-06, 6.2927723529831787e-07, 1.0917307724251056e-07, 1.6760660123885240e-08, 2.2709635043178179e-09, 2.7076622359247714e-10, 2.8316127909785268e-11, 2.5880423909347226e-12, 2.0591425994119171e-13, 1.4199548095965024e-14, 8.4454653993109795e-16, 4.3090894092529427e-17, 1.8747446593629021e-18, 6.9081875027574077e-20, 2.1397391826730893e-21, 5.5235547418541776e-23, 1.1768586219954579e-24, 2.0467667970915599e-26, 2.8689474946571079e-28, 3.1936039953280896e-30, 2.7749265480534503e-32, 1.8440635697521863e-34, 9.1463558192668670e-37, 3.2868281039958480e-39, 8.2491698349502409e-42, 1.3802919462863240e-44, 1.4495623366344723e-47, 8.8091139694466722e-51, 2.7631164912569130e-54, 3.7691324369200321e-58, 1.6879603995600424e-62, 1.4538901947049863e-67, 6.1873167292600330e-74}, + {9.0236831042710006e-75, 2.2507013266634397e-68, 2.7483088481763178e-63, 6.4250837772781210e-59, 4.9175305451785707e-55, 1.6336381889925197e-51, 2.7972683193869001e-48, 2.7688066162395138e-45, 1.7187588390664908e-42, 7.1089100355543671e-40, 2.0525562261562526e-37, 4.2923202100065188e-35, 6.6976892723501377e-33, 7.9915865248767542e-31, 7.4422174735850754e-29, 5.5036367600342068e-27, 3.2802180350829561e-25, 1.5959112775571386e-23, 6.4090652642750824e-22, 2.1453091893671746e-20, 6.0370235645633800e-19, 1.4391241588665120e-17, 2.9258946262067771e-16, 5.1042859875473367e-15, 7.6822356519145676e-14, 1.0023990331996466e-12, 1.1389647007504889e-11, 1.1314327929338598e-10, 9.8619913092006371e-10, 7.5672777489105014e-09, 5.1267694307391355e-08, 3.0750156860401326e-07, 1.6368618884967503e-06, 7.7499739996840847e-06, 3.2702654980222023e-05, 1.2321031502033358e-04, 4.1514601828721595e-04, 1.2527946608682095e-03, 3.3904013872216063e-03, 8.2379096963313674e-03, 1.7989495266934163e-02, 3.5337848644927512e-02, 6.2489606495141174e-02, 9.9539560539641095e-02, 1.4289781614596173e-01, 1.8495601933162914e-01, 2.1589660563060412e-01, 2.2731610876192898e-01, 2.1589660563060412e-01, 1.8495601933162914e-01, 1.4289781614596173e-01, 9.9539560539641095e-02, 6.2489606495141174e-02, 3.5337848644927512e-02, 1.7989495266934163e-02, 8.2379096963313674e-03, 3.3904013872216063e-03, 1.2527946608682095e-03, 4.1514601828721595e-04, 1.2321031502033358e-04, 3.2702654980222023e-05, 7.7499739996840847e-06, 1.6368618884967503e-06, 3.0750156860401326e-07, 5.1267694307391355e-08, 7.5672777489105014e-09, 9.8619913092006371e-10, 1.1314327929338598e-10, 1.1389647007504889e-11, 1.0023990331996466e-12, 7.6822356519145676e-14, 5.1042859875473367e-15, 2.9258946262067771e-16, 1.4391241588665120e-17, 6.0370235645633800e-19, 2.1453091893671746e-20, 6.4090652642750824e-22, 1.5959112775571386e-23, 3.2802180350829561e-25, 5.5036367600342068e-27, 7.4422174735850754e-29, 7.9915865248767542e-31, 6.6976892723501377e-33, 4.2923202100065188e-35, 2.0525562261562526e-37, 7.1089100355543671e-40, 1.7187588390664908e-42, 2.7688066162395138e-45, 2.7972683193869001e-48, 1.6336381889925197e-51, 4.9175305451785707e-55, 6.4250837772781210e-59, 2.7483088481763178e-63, 2.2507013266634397e-68, 9.0236831042710006e-75}, + {1.3153371477011018e-75, 3.4808413877180985e-69, 4.4687024216810169e-64, 1.0933824737518103e-59, 8.7337328359193298e-56, 3.0222939315022155e-52, 5.3832225069701608e-49, 5.5370648197825761e-46, 3.5689385617810602e-43, 1.5317989647111863e-40, 4.5874016658583215e-38, 9.9468000592214207e-36, 1.6088732079384737e-33, 1.9895468942695488e-31, 1.9199707067949033e-29, 1.4712415549741861e-27, 9.0859656470487372e-26, 4.5806010985499422e-24, 1.9062601886875492e-22, 6.6129511764811165e-21, 1.9288793268371580e-19, 4.7668311711866824e-18, 1.0049051491574811e-16, 1.8181691304932292e-15, 2.8387673951693530e-14, 3.8436817248766534e-13, 4.5333033745839362e-12, 4.6760103859850812e-11, 4.2335937835110236e-10, 3.3755847311277282e-09, 2.3773662147574315e-08, 1.4829694050050112e-07, 8.2135445478328264e-07, 4.0482331787772522e-06, 1.7791805918220274e-05, 6.9854324445415533e-05, 2.4541809273052672e-04, 7.7269582168165273e-04, 2.1831319760967165e-03, 5.5416312894389780e-03, 1.2651375112800185e-02, 2.6000340271244451e-02, 4.8139906731089997e-02, 8.0353950080642028e-02, 1.2098311686251535e-01, 1.6437963059849708e-01, 2.0161301347923419e-01, 2.2327002349753469e-01, 2.2327002349753469e-01, 2.0161301347923419e-01, 1.6437963059849708e-01, 1.2098311686251535e-01, 8.0353950080642028e-02, 4.8139906731089997e-02, 2.6000340271244451e-02, 1.2651375112800185e-02, 5.5416312894389780e-03, 2.1831319760967165e-03, 7.7269582168165273e-04, 2.4541809273052672e-04, 6.9854324445415533e-05, 1.7791805918220274e-05, 4.0482331787772522e-06, 8.2135445478328264e-07, 1.4829694050050112e-07, 2.3773662147574315e-08, 3.3755847311277282e-09, 4.2335937835110236e-10, 4.6760103859850812e-11, 4.5333033745839362e-12, 3.8436817248766534e-13, 2.8387673951693530e-14, 1.8181691304932292e-15, 1.0049051491574811e-16, 4.7668311711866824e-18, 1.9288793268371580e-19, 6.6129511764811165e-21, 1.9062601886875492e-22, 4.5806010985499422e-24, 9.0859656470487372e-26, 1.4712415549741861e-27, 1.9199707067949033e-29, 1.9895468942695488e-31, 1.6088732079384737e-33, 9.9468000592214207e-36, 4.5874016658583215e-38, 1.5317989647111863e-40, 3.5689385617810602e-43, 5.5370648197825761e-46, 5.3832225069701608e-49, 3.0222939315022155e-52, 8.7337328359193298e-56, 1.0933824737518103e-59, 4.4687024216810169e-64, 3.4808413877180985e-69, 1.3153371477011018e-75}, + {1.9163113126364858e-76, 5.3782147987051925e-70, 7.2563879227687952e-65, 1.8575257861979059e-60, 1.5480113585024239e-56, 5.5781996438680938e-53, 1.0331986095466757e-49, 1.1039698088171895e-46, 7.3860282667906572e-44, 3.2885519707581838e-41, 1.0211695752712912e-38, 2.2950262940501390e-36, 3.8466556677503065e-34, 4.9282126606183490e-32, 4.9266058774066898e-30, 3.9104187247799965e-28, 2.5014139383687256e-26, 1.3062311410367629e-24, 5.6310279110780034e-23, 2.0237140711644305e-21, 6.1159230112779824e-20, 1.5662392354042098e-18, 3.4222031640236014e-17, 6.4188994417848841e-16, 1.0392178377829332e-14, 1.4594559933116930e-13, 1.7858814626910534e-12, 1.9118156866346926e-11, 1.7970567599571068e-10, 1.4881403983681883e-09, 1.0889411432936255e-08, 7.0605103886976605e-08, 4.0665248009122635e-07, 2.0852201445349388e-06, 9.5392954689367993e-06, 3.9005661581998708e-05, 1.4279713128781597e-04, 4.6876282593032669e-04, 1.3817279615374414e-03, 3.6614915226082989e-03, 8.7323289056329013e-03, 1.8760838951499537e-02, 3.6339904964249992e-02, 6.3508495757082825e-02, 1.0019666687230201e-01, 1.4277636364566415e-01, 1.8382375801093334e-01, 2.1389634702935822e-01, 2.2497264372314615e-01, 2.1389634702935822e-01, 1.8382375801093334e-01, 1.4277636364566415e-01, 1.0019666687230201e-01, 6.3508495757082825e-02, 3.6339904964249992e-02, 1.8760838951499537e-02, 8.7323289056329013e-03, 3.6614915226082989e-03, 1.3817279615374414e-03, 4.6876282593032669e-04, 1.4279713128781597e-04, 3.9005661581998708e-05, 9.5392954689367993e-06, 2.0852201445349388e-06, 4.0665248009122635e-07, 7.0605103886976605e-08, 1.0889411432936255e-08, 1.4881403983681883e-09, 1.7970567599571068e-10, 1.9118156866346926e-11, 1.7858814626910534e-12, 1.4594559933116930e-13, 1.0392178377829332e-14, 6.4188994417848841e-16, 3.4222031640236014e-17, 1.5662392354042098e-18, 6.1159230112779824e-20, 2.0237140711644305e-21, 5.6310279110780034e-23, 1.3062311410367629e-24, 2.5014139383687256e-26, 3.9104187247799965e-28, 4.9266058774066898e-30, 4.9282126606183490e-32, 3.8466556677503065e-34, 2.2950262940501390e-36, 1.0211695752712912e-38, 3.2885519707581838e-41, 7.3860282667906572e-44, 1.1039698088171895e-46, 1.0331986095466757e-49, 5.5781996438680938e-53, 1.5480113585024239e-56, 1.8575257861979059e-60, 7.2563879227687952e-65, 5.3782147987051925e-70, 1.9163113126364858e-76}, + {2.7904525118679737e-77, 8.3020782212471425e-71, 1.1767736856636937e-65, 3.1505048454950654e-61, 2.7383297861245024e-57, 1.0271804264134620e-53, 1.9777901946758928e-50, 2.1945724897613748e-47, 1.5235549200899557e-44, 7.0346474438870195e-42, 2.2642373940609524e-39, 5.2728051887223330e-37, 9.1548141716730934e-35, 1.2147369169275739e-32, 1.2575027798926635e-30, 1.0335182999889521e-28, 6.8454256421150675e-27, 3.7013456293648089e-25, 1.6522412624655730e-23, 6.1491894101104838e-22, 1.9247067870562822e-20, 5.1057396537975428e-19, 1.1557955215269003e-17, 2.2464616952811288e-16, 3.7697131696054083e-15, 5.4886703549372233e-14, 6.9650658814205409e-13, 7.7347769030963229e-12, 7.5445969018975008e-11, 6.4855152005816811e-10, 4.9282840753506927e-09, 3.3196572232173258e-08, 1.9871572271899896e-07, 1.0595200143943266e-06, 5.0423148877264742e-06, 2.1459377904439359e-05, 8.1811800690556602e-05, 2.7983364446273394e-04, 8.5995448443042602e-04, 2.3773109222215024e-03, 5.9185523609855805e-03, 1.3282862169963864e-02, 2.6896050260216100e-02, 4.9173217905643024e-02, 8.1224149327040002e-02, 1.2127838443962285e-01, 1.6375781072963536e-01, 2.0002035034474053e-01, 2.2104883827723254e-01, 2.2104883827723254e-01, 2.0002035034474053e-01, 1.6375781072963536e-01, 1.2127838443962285e-01, 8.1224149327040002e-02, 4.9173217905643024e-02, 2.6896050260216100e-02, 1.3282862169963864e-02, 5.9185523609855805e-03, 2.3773109222215024e-03, 8.5995448443042602e-04, 2.7983364446273394e-04, 8.1811800690556602e-05, 2.1459377904439359e-05, 5.0423148877264742e-06, 1.0595200143943266e-06, 1.9871572271899896e-07, 3.3196572232173258e-08, 4.9282840753506927e-09, 6.4855152005816811e-10, 7.5445969018975008e-11, 7.7347769030963229e-12, 6.9650658814205409e-13, 5.4886703549372233e-14, 3.7697131696054083e-15, 2.2464616952811288e-16, 1.1557955215269003e-17, 5.1057396537975428e-19, 1.9247067870562822e-20, 6.1491894101104838e-22, 1.6522412624655730e-23, 3.7013456293648089e-25, 6.8454256421150675e-27, 1.0335182999889521e-28, 1.2575027798926635e-30, 1.2147369169275739e-32, 9.1548141716730934e-35, 5.2728051887223330e-37, 2.2642373940609524e-39, 7.0346474438870195e-42, 1.5235549200899557e-44, 2.1945724897613748e-47, 1.9777901946758928e-50, 1.0271804264134620e-53, 2.7383297861245024e-57, 3.1505048454950654e-61, 1.1767736856636937e-65, 8.3020782212471425e-71, 2.7904525118679737e-77}, + {4.0613138578260408e-78, 1.2803753168202646e-71, 1.9059379634157273e-66, 5.3348322229477923e-62, 4.8344851717020582e-58, 1.8871785659984032e-54, 3.7761789448607101e-51, 4.3499235924727088e-48, 3.1326162048099509e-45, 1.4994923130410846e-42, 5.0011700959096005e-40, 1.2063719426097628e-37, 2.1689987827926264e-35, 2.9797144315839396e-33, 3.1931805290733206e-31, 2.7165497805935431e-29, 1.8623802130980743e-27, 1.0423117478273709e-25, 4.8161473163605573e-24, 1.8555204994489541e-22, 6.0128622969968872e-21, 1.6515980171005252e-19, 3.8719360930637078e-18, 7.7952821069111630e-17, 1.3552585228326797e-15, 2.0448819688025393e-14, 2.6898645575023506e-13, 3.0973073233390342e-12, 3.1335844312742886e-11, 2.7949011969950757e-10, 2.2044098210910168e-09, 1.5418168139637173e-08, 9.5872357774191023e-08, 5.3122836336339743e-07, 2.6285303500427774e-06, 1.1636493707975660e-05, 4.6170574104968263e-05, 1.6444759671629532e-04, 5.2653510513586970e-04, 1.5174715210062894e-03, 3.9409789851024037e-03, 9.2324971205438137e-03, 1.9527821414966836e-02, 3.7320312768871315e-02, 6.4488485276268173e-02, 1.0081054546716739e-01, 1.4263049549711082e-01, 1.8270590844100287e-01, 2.1195024923496616e-01, 2.2270019277644792e-01, 2.1195024923496616e-01, 1.8270590844100287e-01, 1.4263049549711082e-01, 1.0081054546716739e-01, 6.4488485276268173e-02, 3.7320312768871315e-02, 1.9527821414966836e-02, 9.2324971205438137e-03, 3.9409789851024037e-03, 1.5174715210062894e-03, 5.2653510513586970e-04, 1.6444759671629532e-04, 4.6170574104968263e-05, 1.1636493707975660e-05, 2.6285303500427774e-06, 5.3122836336339743e-07, 9.5872357774191023e-08, 1.5418168139637173e-08, 2.2044098210910168e-09, 2.7949011969950757e-10, 3.1335844312742886e-11, 3.0973073233390342e-12, 2.6898645575023506e-13, 2.0448819688025393e-14, 1.3552585228326797e-15, 7.7952821069111630e-17, 3.8719360930637078e-18, 1.6515980171005252e-19, 6.0128622969968872e-21, 1.8555204994489541e-22, 4.8161473163605573e-24, 1.0423117478273709e-25, 1.8623802130980743e-27, 2.7165497805935431e-29, 3.1931805290733206e-31, 2.9797144315839396e-33, 2.1689987827926264e-35, 1.2063719426097628e-37, 5.0011700959096005e-40, 1.4994923130410846e-42, 3.1326162048099509e-45, 4.3499235924727088e-48, 3.7761789448607101e-51, 1.8871785659984032e-54, 4.8344851717020582e-58, 5.3348322229477923e-62, 1.9059379634157273e-66, 1.2803753168202646e-71, 4.0613138578260408e-78}, + {5.9080678650312624e-79, 1.9728605748792453e-72, 3.0830289900033436e-67, 9.0192223036937046e-63, 8.5188830817618133e-59, 3.4594779364755318e-55, 7.1915294634635365e-52, 8.5975639548257647e-49, 6.4207252053484628e-46, 3.1852178778360094e-43, 1.1004706827142279e-40, 2.7487848843571218e-38, 5.1162326043852910e-36, 7.2745725968876294e-34, 8.0674342787094055e-32, 7.1018122263849361e-30, 5.0377911662132328e-28, 2.9173500726293596e-26, 1.3948415260687786e-24, 5.5610269616591533e-23, 1.8649976751302685e-21, 5.3023161831319081e-20, 1.2868329211211647e-18, 2.6824921647604057e-17, 4.8298353217030778e-16, 7.5488968779152708e-15, 1.0288749373509957e-13, 1.2278785144101270e-12, 1.2879038257315668e-11, 1.1913006349290762e-10, 9.7479212538717177e-10, 7.0758572838895756e-09, 4.5681275084849536e-08, 2.6290974837537234e-07, 1.3517971591103712e-06, 6.2215248177778678e-06, 2.5676159384549134e-05, 9.5171627785509925e-05, 3.1729197104330006e-04, 9.5269218854862072e-04, 2.5792732600590893e-03, 6.3030002856080628e-03, 1.3915665220231790e-02, 2.7779127385933564e-02, 5.0175812677428613e-02, 8.2051827391224449e-02, 1.2153798684410418e-01, 1.6313003050278282e-01, 1.9846285025418653e-01, 2.1889262958743932e-01, 2.1889262958743932e-01, 1.9846285025418653e-01, 1.6313003050278282e-01, 1.2153798684410418e-01, 8.2051827391224449e-02, 5.0175812677428613e-02, 2.7779127385933564e-02, 1.3915665220231790e-02, 6.3030002856080628e-03, 2.5792732600590893e-03, 9.5269218854862072e-04, 3.1729197104330006e-04, 9.5171627785509925e-05, 2.5676159384549134e-05, 6.2215248177778678e-06, 1.3517971591103712e-06, 2.6290974837537234e-07, 4.5681275084849536e-08, 7.0758572838895756e-09, 9.7479212538717177e-10, 1.1913006349290762e-10, 1.2879038257315668e-11, 1.2278785144101270e-12, 1.0288749373509957e-13, 7.5488968779152708e-15, 4.8298353217030778e-16, 2.6824921647604057e-17, 1.2868329211211647e-18, 5.3023161831319081e-20, 1.8649976751302685e-21, 5.5610269616591533e-23, 1.3948415260687786e-24, 2.9173500726293596e-26, 5.0377911662132328e-28, 7.1018122263849361e-30, 8.0674342787094055e-32, 7.2745725968876294e-34, 5.1162326043852910e-36, 2.7487848843571218e-38, 1.1004706827142279e-40, 3.1852178778360094e-43, 6.4207252053484628e-46, 8.5975639548257647e-49, 7.1915294634635365e-52, 3.4594779364755318e-55, 8.5188830817618133e-59, 9.0192223036937046e-63, 3.0830289900033436e-67, 1.9728605748792453e-72, 5.9080678650312624e-79}, + {8.5904309894818364e-80, 3.0371816846686698e-73, 4.9809124279561082e-68, 1.5224294774877133e-63, 1.4982961830544962e-59, 6.3278572616256388e-56, 1.3661743924144741e-52, 1.6945475116309319e-49, 1.3119398711365523e-46, 6.7430272036894988e-44, 2.4125325785224105e-41, 6.2381218594744620e-39, 1.2015973481338649e-36, 1.7677477939830743e-34, 2.0280915379277212e-32, 1.8467949790116279e-30, 1.3550808933459723e-28, 8.1168066229507355e-27, 4.0142573977672800e-25, 1.6555633107937502e-23, 5.7440707561477782e-22, 1.6897039658922656e-20, 4.2436086794049572e-19, 9.1557867438886624e-18, 1.7065546359370669e-16, 2.7618433228960722e-15, 3.8986379094133088e-14, 4.8201156943039955e-13, 5.2391878407492571e-12, 5.0236198872007664e-11, 4.2625383198445261e-10, 3.2096074629923419e-09, 2.1502703382505708e-08, 1.2847504869574726e-07, 6.8606640833299067e-07, 3.2808733242412502e-06, 1.4075589850899586e-05, 5.4263099453546539e-05, 1.8825409449540776e-04, 5.8852552673756763e-04, 1.6599244925103198e-03, 4.2284362004214454e-03, 9.7376438372710882e-03, 2.0289684052787870e-02, 3.8279047715437671e-02, 6.5430893699439191e-02, 1.0138360090162955e-01, 1.4246231456883673e-01, 1.8160258790817957e-01, 2.1005593297000114e-01, 2.2049524037272045e-01, 2.1005593297000114e-01, 1.8160258790817957e-01, 1.4246231456883673e-01, 1.0138360090162955e-01, 6.5430893699439191e-02, 3.8279047715437671e-02, 2.0289684052787870e-02, 9.7376438372710882e-03, 4.2284362004214454e-03, 1.6599244925103198e-03, 5.8852552673756763e-04, 1.8825409449540776e-04, 5.4263099453546539e-05, 1.4075589850899586e-05, 3.2808733242412502e-06, 6.8606640833299067e-07, 1.2847504869574726e-07, 2.1502703382505708e-08, 3.2096074629923419e-09, 4.2625383198445261e-10, 5.0236198872007664e-11, 5.2391878407492571e-12, 4.8201156943039955e-13, 3.8986379094133088e-14, 2.7618433228960722e-15, 1.7065546359370669e-16, 9.1557867438886624e-18, 4.2436086794049572e-19, 1.6897039658922656e-20, 5.7440707561477782e-22, 1.6555633107937502e-23, 4.0142573977672800e-25, 8.1168066229507355e-27, 1.3550808933459723e-28, 1.8467949790116279e-30, 2.0280915379277212e-32, 1.7677477939830743e-34, 1.2015973481338649e-36, 6.2381218594744620e-39, 2.4125325785224105e-41, 6.7430272036894988e-44, 1.3119398711365523e-46, 1.6945475116309319e-49, 1.3661743924144741e-52, 6.3278572616256388e-56, 1.4982961830544962e-59, 1.5224294774877133e-63, 4.9809124279561082e-68, 3.0371816846686698e-73, 8.5904309894818364e-80}, + {1.2484709934169793e-80, 4.6716134267455262e-74, 8.0373302337190505e-69, 2.5658825214250866e-64, 2.6303256479665557e-60, 1.1549629962198519e-56, 2.5889667003232748e-53, 3.3307225187898004e-50, 2.6725211308631837e-47, 1.4227172891228852e-44, 5.2696956176078606e-42, 1.4101103127648418e-39, 2.8100935042395224e-37, 4.2761357132188511e-35, 5.0736585413756089e-33, 4.7776103023740991e-31, 3.6248608340202214e-29, 2.2451158851842135e-27, 1.1481416337131872e-25, 4.8966201179064703e-24, 1.7569837815084929e-22, 5.3457088341630912e-21, 1.3887935290404047e-19, 3.1001141967930157e-18, 5.9794936879871788e-17, 1.0016087747850758e-15, 1.4637583603666311e-14, 1.8740601774404304e-13, 2.1099943426302045e-12, 2.0963143669741310e-11, 1.8436168972317116e-10, 1.4393514257953216e-09, 1.0001836240436016e-08, 6.2007747893151711e-08, 3.4372586316527799e-07, 1.7070358456037084e-06, 7.6089581259975007e-06, 3.0491388427439179e-05, 1.1001458276280751e-04, 3.5787758856259302e-04, 1.0509023763580526e-03, 2.7887647298672400e-03, 6.6943659482095498e-03, 1.4548976670515633e-02, 2.8649120680685196e-02, 5.1148315515202297e-02, 8.2838910302204208e-02, 1.2176431106358794e-01, 1.6249750087283824e-01, 1.9693934921359804e-01, 2.1679829114353039e-01, 2.1679829114353039e-01, 1.9693934921359804e-01, 1.6249750087283824e-01, 1.2176431106358794e-01, 8.2838910302204208e-02, 5.1148315515202297e-02, 2.8649120680685196e-02, 1.4548976670515633e-02, 6.6943659482095498e-03, 2.7887647298672400e-03, 1.0509023763580526e-03, 3.5787758856259302e-04, 1.1001458276280751e-04, 3.0491388427439179e-05, 7.6089581259975007e-06, 1.7070358456037084e-06, 3.4372586316527799e-07, 6.2007747893151711e-08, 1.0001836240436016e-08, 1.4393514257953216e-09, 1.8436168972317116e-10, 2.0963143669741310e-11, 2.1099943426302045e-12, 1.8740601774404304e-13, 1.4637583603666311e-14, 1.0016087747850758e-15, 5.9794936879871788e-17, 3.1001141967930157e-18, 1.3887935290404047e-19, 5.3457088341630912e-21, 1.7569837815084929e-22, 4.8966201179064703e-24, 1.1481416337131872e-25, 2.2451158851842135e-27, 3.6248608340202214e-29, 4.7776103023740991e-31, 5.0736585413756089e-33, 4.2761357132188511e-35, 2.8100935042395224e-37, 1.4101103127648418e-39, 5.2696956176078606e-42, 1.4227172891228852e-44, 2.6725211308631837e-47, 3.3307225187898004e-50, 2.5889667003232748e-53, 1.1549629962198519e-56, 2.6303256479665557e-60, 2.5658825214250866e-64, 8.0373302337190505e-69, 4.6716134267455262e-74, 1.2484709934169793e-80}, + {1.8135909934565948e-81, 7.1794482518779405e-75, 1.2953741930788741e-69, 4.3179663130756179e-65, 4.6092689309993016e-61, 2.1035897073764635e-57, 4.8944169739176386e-54, 6.5290610509647759e-51, 5.4278702104818919e-48, 2.9919651066058538e-45, 1.1469498354442222e-42, 3.1751925459280292e-40, 6.5444038132328712e-38, 1.0297652702618718e-35, 1.2632131984833186e-33, 1.2296697906080111e-31, 9.6441699135755975e-30, 6.1744578340056585e-28, 3.2639881507273933e-26, 1.4390083206149803e-24, 5.3380455822882838e-23, 1.6792427517695717e-21, 4.5112507797362053e-20, 1.0414929706906582e-18, 2.0779783402868767e-17, 3.6013157967372950e-16, 5.4464951383584157e-15, 7.2181098533397167e-14, 8.4145383929577368e-13, 8.6584557144940455e-12, 7.8890516800983647e-11, 6.3831524130277722e-10, 4.5984942968861874e-09, 2.9567316716160832e-08, 1.7005133441347446e-07, 8.7658428229201604e-07, 4.0574245645905015e-06, 1.6891865749199694e-05, 6.3348592408607267e-05, 2.1430344005210969e-04, 6.5478304395432502e-04, 1.8089687249743006e-03, 4.5234328605134695e-03, 1.0247034345774014e-02, 2.1045743544451385e-02, 3.9216150726808105e-02, 6.6337015149646864e-02, 1.0191809344727074e-01, 1.4227374613665661e-01, 1.8051385843527265e-01, 2.0821116124461711e-01, 2.1835450988366478e-01, 2.0821116124461711e-01, 1.8051385843527265e-01, 1.4227374613665661e-01, 1.0191809344727074e-01, 6.6337015149646864e-02, 3.9216150726808105e-02, 2.1045743544451385e-02, 1.0247034345774014e-02, 4.5234328605134695e-03, 1.8089687249743006e-03, 6.5478304395432502e-04, 2.1430344005210969e-04, 6.3348592408607267e-05, 1.6891865749199694e-05, 4.0574245645905015e-06, 8.7658428229201604e-07, 1.7005133441347446e-07, 2.9567316716160832e-08, 4.5984942968861874e-09, 6.3831524130277722e-10, 7.8890516800983647e-11, 8.6584557144940455e-12, 8.4145383929577368e-13, 7.2181098533397167e-14, 5.4464951383584157e-15, 3.6013157967372950e-16, 2.0779783402868767e-17, 1.0414929706906582e-18, 4.5112507797362053e-20, 1.6792427517695717e-21, 5.3380455822882838e-23, 1.4390083206149803e-24, 3.2639881507273933e-26, 6.1744578340056585e-28, 9.6441699135755975e-30, 1.2296697906080111e-31, 1.2632131984833186e-33, 1.0297652702618718e-35, 6.5444038132328712e-38, 3.1751925459280292e-40, 1.1469498354442222e-42, 2.9919651066058538e-45, 5.4278702104818919e-48, 6.5290610509647759e-51, 4.8944169739176386e-54, 2.1035897073764635e-57, 4.6092689309993016e-61, 4.3179663130756179e-65, 1.2953741930788741e-69, 7.1794482518779405e-75, 1.8135909934565948e-81}, + {2.6333035610382397e-82, 1.1024259296315383e-75, 2.0852967445741233e-70, 7.2556400241843898e-66, 8.0626671773555472e-62, 3.8234147683158063e-58, 9.2309809923115915e-55, 1.2764717039702522e-51, 1.0991625015027042e-48, 6.2718288470036324e-46, 2.4875851238977396e-43, 7.1225496037923785e-41, 1.5178899706379763e-38, 2.4689751624056617e-36, 3.1303492079755755e-34, 3.1491543092696820e-32, 2.5522940793940470e-30, 1.6885482670813982e-28, 9.2239272353929576e-27, 4.2024365472164892e-25, 1.6110964547970300e-23, 5.2383797573874037e-22, 1.4547178325694397e-20, 3.4721710760327322e-19, 7.1634722023972862e-18, 1.2840032078315100e-16, 2.0088075710888178e-15, 2.7546357750311648e-14, 3.3235533916670117e-13, 3.5405078866592410e-12, 3.3406627081403544e-11, 2.8000417787301215e-10, 2.0903260822482034e-09, 1.3932720755460754e-08, 8.3098897726797486e-08, 4.4440079875949290e-07, 2.1349200492771325e-06, 9.2289528191432300e-06, 3.5954963665620299e-05, 1.2641879089097217e-04, 4.0166542717281779e-04, 1.1545621668817155e-03, 3.0055192270927668e-03, 7.0920553324460628e-03, 1.5182046088138676e-02, 2.9505655023499801e-02, 5.2091376675144491e-02, 8.3587237918301055e-02, 1.2195957249370640e-01, 1.6186130554382056e-01, 1.9544872749879730e-01, 2.1476292059027455e-01, 2.1476292059027455e-01, 1.9544872749879730e-01, 1.6186130554382056e-01, 1.2195957249370640e-01, 8.3587237918301055e-02, 5.2091376675144491e-02, 2.9505655023499801e-02, 1.5182046088138676e-02, 7.0920553324460628e-03, 3.0055192270927668e-03, 1.1545621668817155e-03, 4.0166542717281779e-04, 1.2641879089097217e-04, 3.5954963665620299e-05, 9.2289528191432300e-06, 2.1349200492771325e-06, 4.4440079875949290e-07, 8.3098897726797486e-08, 1.3932720755460754e-08, 2.0903260822482034e-09, 2.8000417787301215e-10, 3.3406627081403544e-11, 3.5405078866592410e-12, 3.3235533916670117e-13, 2.7546357750311648e-14, 2.0088075710888178e-15, 1.2840032078315100e-16, 7.1634722023972862e-18, 3.4721710760327322e-19, 1.4547178325694397e-20, 5.2383797573874037e-22, 1.6110964547970300e-23, 4.2024365472164892e-25, 9.2239272353929576e-27, 1.6885482670813982e-28, 2.5522940793940470e-30, 3.1491543092696820e-32, 3.1303492079755755e-34, 2.4689751624056617e-36, 1.5178899706379763e-38, 7.1225496037923785e-41, 2.4875851238977396e-43, 6.2718288470036324e-46, 1.0991625015027042e-48, 1.2764717039702522e-51, 9.2309809923115915e-55, 3.8234147683158063e-58, 8.0626671773555472e-62, 7.2556400241843898e-66, 2.0852967445741233e-70, 1.1024259296315383e-75, 2.6333035610382397e-82}, + {3.8217861138085193e-83, 1.6914062097101716e-76, 3.3530353599320843e-71, 1.2174103962049957e-66, 1.4078704255855603e-62, 6.9351300068921097e-59, 1.7369401373650657e-55, 2.4890859009540812e-52, 2.2194272123488213e-49, 1.3105586296104345e-46, 5.3766702664810952e-44, 1.5917657474423608e-41, 3.5064188455904165e-39, 5.8941611353204651e-37, 7.7216030296457281e-35, 8.0254188497506414e-33, 6.7194313102734894e-31, 4.5922904136653062e-29, 2.5914755539513213e-27, 1.2197280334801351e-25, 4.8310543796668917e-24, 1.6229893391874090e-22, 4.6574324658183078e-21, 1.1488922069252337e-19, 2.4500971300072567e-18, 4.5403372361053743e-17, 7.3453513452694623e-16, 1.0418125668654998e-14, 1.3004299981305270e-13, 1.4335908364459435e-12, 1.4002082648585132e-11, 1.2152299982982779e-10, 9.3968905695997315e-10, 6.4898261604097228e-09, 4.0121667112572270e-08, 2.2249156377389857e-07, 1.1087989233089753e-06, 4.9744232013977632e-06, 2.0121686738026826e-05, 7.3491681493347192e-05, 2.4267655305986469e-04, 7.2534332143885018e-04, 1.9644706290361147e-03, 4.8255384821498035e-03, 1.0759969971184533e-02, 2.1795386765479520e-02, 4.0131719533568026e-02, 6.7208115842432761e-02, 1.0241614803131452e-01, 1.4206655437043109e-01, 1.7943973488563147e-01, 2.0641382873539532e-01, 2.1627494312286819e-01, 2.0641382873539532e-01, 1.7943973488563147e-01, 1.4206655437043109e-01, 1.0241614803131452e-01, 6.7208115842432761e-02, 4.0131719533568026e-02, 2.1795386765479520e-02, 1.0759969971184533e-02, 4.8255384821498035e-03, 1.9644706290361147e-03, 7.2534332143885018e-04, 2.4267655305986469e-04, 7.3491681493347192e-05, 2.0121686738026826e-05, 4.9744232013977632e-06, 1.1087989233089753e-06, 2.2249156377389857e-07, 4.0121667112572270e-08, 6.4898261604097228e-09, 9.3968905695997315e-10, 1.2152299982982779e-10, 1.4002082648585132e-11, 1.4335908364459435e-12, 1.3004299981305270e-13, 1.0418125668654998e-14, 7.3453513452694623e-16, 4.5403372361053743e-17, 2.4500971300072567e-18, 1.1488922069252337e-19, 4.6574324658183078e-21, 1.6229893391874090e-22, 4.8310543796668917e-24, 1.2197280334801351e-25, 2.5914755539513213e-27, 4.5922904136653062e-29, 6.7194313102734894e-31, 8.0254188497506414e-33, 7.7216030296457281e-35, 5.8941611353204651e-37, 3.5064188455904165e-39, 1.5917657474423608e-41, 5.3766702664810952e-44, 1.3105586296104345e-46, 2.2194272123488213e-49, 2.4890859009540812e-52, 1.7369401373650657e-55, 6.9351300068921097e-59, 1.4078704255855603e-62, 1.2174103962049957e-66, 3.3530353599320843e-71, 1.6914062097101716e-76, 3.8217861138085193e-83}, + {5.5441992722068482e-84, 2.5929395481292082e-77, 5.3853541853574509e-72, 2.0397343220146265e-67, 2.4541249535874453e-63, 1.2554100822781216e-59, 3.2608376921091931e-56, 4.8412365544964550e-53, 4.4687726007935870e-50, 2.7300300393618730e-47, 1.1581833176995228e-44, 3.5442966005156755e-42, 8.0681149793680242e-40, 1.4011614007496688e-37, 1.8960834059704929e-35, 2.0353977109818878e-33, 1.7599996574525402e-31, 1.2422004027168656e-29, 7.2392001895975213e-28, 3.5188457649983255e-26, 1.4394549700596331e-24, 4.9949088636257322e-23, 1.4806823585568918e-21, 3.7736012205435806e-20, 8.3155142767475590e-19, 1.5925756694175446e-17, 2.6632797443869722e-16, 3.9055385475699820e-15, 5.0416061378516563e-14, 5.7492300646009343e-13, 5.8103188163962404e-12, 5.2193608370012943e-11, 4.1786070059665728e-10, 2.9889202437814213e-09, 1.9144674266312151e-08, 1.1003547442210821e-07, 5.6858218763614167e-07, 2.6459673129317030e-06, 1.1106993480718368e-05, 4.2117165655971054e-05, 1.4445933891050312e-04, 4.4872081587300148e-04, 1.2636334832367285e-03, 3.2292611575356744e-03, 7.4954912038533066e-03, 1.5814177885623497e-02, 3.0348423943327270e-02, 5.3005665528953826e-02, 8.4298566514348958e-02, 1.2212582857840716e-01, 1.6122241452497918e-01, 1.9398990851223818e-01, 2.1278380261127008e-01, 2.1278380261127008e-01, 1.9398990851223818e-01, 1.6122241452497918e-01, 1.2212582857840716e-01, 8.4298566514348958e-02, 5.3005665528953826e-02, 3.0348423943327270e-02, 1.5814177885623497e-02, 7.4954912038533066e-03, 3.2292611575356744e-03, 1.2636334832367285e-03, 4.4872081587300148e-04, 1.4445933891050312e-04, 4.2117165655971054e-05, 1.1106993480718368e-05, 2.6459673129317030e-06, 5.6858218763614167e-07, 1.1003547442210821e-07, 1.9144674266312151e-08, 2.9889202437814213e-09, 4.1786070059665728e-10, 5.2193608370012943e-11, 5.8103188163962404e-12, 5.7492300646009343e-13, 5.0416061378516563e-14, 3.9055385475699820e-15, 2.6632797443869722e-16, 1.5925756694175446e-17, 8.3155142767475590e-19, 3.7736012205435806e-20, 1.4806823585568918e-21, 4.9949088636257322e-23, 1.4394549700596331e-24, 3.5188457649983255e-26, 7.2392001895975213e-28, 1.2422004027168656e-29, 1.7599996574525402e-31, 2.0353977109818878e-33, 1.8960834059704929e-35, 1.4011614007496688e-37, 8.0681149793680242e-40, 3.5442966005156755e-42, 1.1581833176995228e-44, 2.7300300393618730e-47, 4.4687726007935870e-50, 4.8412365544964550e-53, 3.2608376921091931e-56, 1.2554100822781216e-59, 2.4541249535874453e-63, 2.0397343220146265e-67, 5.3853541853574509e-72, 2.5929395481292082e-77, 5.5441992722068482e-84}, + {8.0393570948066263e-85, 3.9718103441759187e-78, 8.6398120096864360e-73, 3.4126815525065270e-68, 4.2706414980124341e-64, 2.2680834264069969e-60, 6.1079802227760500e-57, 9.3924613692270336e-54, 8.9727413440350903e-51, 5.6695759907967620e-48, 2.4865399657656492e-45, 7.8635083915713631e-43, 1.8492505278934263e-40, 3.3170216420826621e-38, 4.6353055637591542e-36, 5.1377976964188761e-34, 4.5868230110636832e-32, 3.3422916829863083e-30, 2.0109122099933846e-28, 1.0091662170872907e-26, 4.2622901606926843e-25, 1.5271754441949008e-23, 4.6750298903216843e-22, 1.2305362144434042e-20, 2.8009656241231101e-19, 5.5420769366655970e-18, 9.5769111189688996e-17, 1.4514991221804437e-15, 1.9370075854564139e-14, 2.2840535977994185e-13, 2.3875175115445612e-12, 2.2189005868460593e-11, 1.8384761407124325e-10, 1.3614036372254147e-09, 9.0305672540467943e-09, 5.3771344014689486e-08, 2.8795698296534307e-07, 1.3893404522738127e-06, 6.0491308542217258e-06, 2.3802318133010187e-05, 8.4755918374596230e-05, 2.7344820329821672e-04, 8.0022923976588462e-04, 2.1262829728506513e-03, 5.1343246513282466e-03, 1.1275787985601648e-02, 2.2538065887400437e-02, 4.1025901106205930e-02, 6.8045431494172207e-02, 1.0287976270898229e-01, 1.4184235712795296e-01, 1.7838019194959490e-01, 2.0466195210272575e-01, 2.1425368197218711e-01, 2.0466195210272575e-01, 1.7838019194959490e-01, 1.4184235712795296e-01, 1.0287976270898229e-01, 6.8045431494172207e-02, 4.1025901106205930e-02, 2.2538065887400437e-02, 1.1275787985601648e-02, 5.1343246513282466e-03, 2.1262829728506513e-03, 8.0022923976588462e-04, 2.7344820329821672e-04, 8.4755918374596230e-05, 2.3802318133010187e-05, 6.0491308542217258e-06, 1.3893404522738127e-06, 2.8795698296534307e-07, 5.3771344014689486e-08, 9.0305672540467943e-09, 1.3614036372254147e-09, 1.8384761407124325e-10, 2.2189005868460593e-11, 2.3875175115445612e-12, 2.2840535977994185e-13, 1.9370075854564139e-14, 1.4514991221804437e-15, 9.5769111189688996e-17, 5.5420769366655970e-18, 2.8009656241231101e-19, 1.2305362144434042e-20, 4.6750298903216843e-22, 1.5271754441949008e-23, 4.2622901606926843e-25, 1.0091662170872907e-26, 2.0109122099933846e-28, 3.3422916829863083e-30, 4.5868230110636832e-32, 5.1377976964188761e-34, 4.6353055637591542e-36, 3.3170216420826621e-38, 1.8492505278934263e-40, 7.8635083915713631e-43, 2.4865399657656492e-45, 5.6695759907967620e-48, 8.9727413440350903e-51, 9.3924613692270336e-54, 6.1079802227760500e-57, 2.2680834264069969e-60, 4.2706414980124341e-64, 3.4126815525065270e-68, 8.6398120096864360e-73, 3.9718103441759187e-78, 8.0393570948066263e-85}, + {1.1652438369940990e-85, 6.0791348825232945e-79, 1.3845734727539209e-73, 5.7018195541180805e-69, 7.4193203262024338e-65, 4.0896774498494807e-61, 1.1415803801508950e-57, 1.8177229307902597e-54, 1.7966861107994258e-51, 1.1738936354016808e-48, 5.3209955905676318e-46, 1.7384628972964638e-43, 4.2224503669018324e-41, 7.8205013430131660e-39, 1.1282489276123687e-36, 1.2908866148089198e-34, 1.1895168657985647e-32, 8.9460231384332668e-31, 5.5552056820850460e-29, 2.8773904696541066e-27, 1.2543771045577858e-25, 4.6393197457827112e-24, 1.4661278293822309e-22, 3.9843331940864138e-21, 9.3649180986702086e-20, 1.9137000334222806e-18, 3.4159437284687078e-17, 5.3490074750121478e-16, 7.3765684098315642e-15, 8.9908236810354557e-14, 9.7167575155334535e-13, 9.3392850013870488e-12, 8.0050126851044650e-11, 6.1341491782010079e-10, 4.2120058561600563e-09, 2.5970629339726590e-08, 1.4407085283844865e-07, 7.2034690286882965e-07, 3.2515206739898432e-06, 1.3269605500302639e-05, 4.9028384509802042e-05, 1.6420793698012457e-04, 4.9909954967131240e-04, 1.3780641953787555e-03, 3.4597075988944920e-03, 7.9041144380998778e-03, 1.6444728895483641e-02, 3.1177182981317560e-02, 5.3891864826823496e-02, 8.4974571583542949e-02, 1.2226499127815092e-01, 1.6058169612107806e-01, 1.9256185751562635e-01, 2.1085839371676188e-01, 2.1085839371676188e-01, 1.9256185751562635e-01, 1.6058169612107806e-01, 1.2226499127815092e-01, 8.4974571583542949e-02, 5.3891864826823496e-02, 3.1177182981317560e-02, 1.6444728895483641e-02, 7.9041144380998778e-03, 3.4597075988944920e-03, 1.3780641953787555e-03, 4.9909954967131240e-04, 1.6420793698012457e-04, 4.9028384509802042e-05, 1.3269605500302639e-05, 3.2515206739898432e-06, 7.2034690286882965e-07, 1.4407085283844865e-07, 2.5970629339726590e-08, 4.2120058561600563e-09, 6.1341491782010079e-10, 8.0050126851044650e-11, 9.3392850013870488e-12, 9.7167575155334535e-13, 8.9908236810354557e-14, 7.3765684098315642e-15, 5.3490074750121478e-16, 3.4159437284687078e-17, 1.9137000334222806e-18, 9.3649180986702086e-20, 3.9843331940864138e-21, 1.4661278293822309e-22, 4.6393197457827112e-24, 1.2543771045577858e-25, 2.8773904696541066e-27, 5.5552056820850460e-29, 8.9460231384332668e-31, 1.1895168657985647e-32, 1.2908866148089198e-34, 1.1282489276123687e-36, 7.8205013430131660e-39, 4.2224503669018324e-41, 1.7384628972964638e-43, 5.3209955905676318e-46, 1.1738936354016808e-48, 1.7966861107994258e-51, 1.8177229307902597e-54, 1.1415803801508950e-57, 4.0896774498494807e-61, 7.4193203262024338e-65, 5.7018195541180805e-69, 1.3845734727539209e-73, 6.0791348825232945e-79, 1.1652438369940990e-85}, + {1.6882167305707454e-86, 9.2973163909464803e-80, 2.2164453269063499e-74, 9.5134149540243336e-70, 1.2868308966878782e-65, 7.3601988558208524e-62, 2.1289805828564999e-58, 3.5092885496818465e-55, 3.5879758763817130e-52, 2.4233946264708683e-49, 1.1349953099138248e-46, 3.8300452415564833e-44, 9.6052219657335494e-42, 1.8364466079421476e-39, 2.7344551836642605e-37, 3.2286335472886435e-35, 3.0699209573361954e-33, 2.3822649787982913e-31, 1.5263559111336781e-29, 8.1574699196788167e-28, 3.6694695554120368e-26, 1.4004790074896691e-24, 4.5675182897648129e-23, 1.2811473098169559e-21, 3.1084252235905216e-20, 6.5579821121285801e-19, 1.2087664560459120e-17, 1.9548969486564158e-16, 2.7849389834496225e-15, 3.5072844186621564e-14, 3.9175124016391924e-13, 3.8925586774167020e-12, 3.4501563168365539e-11, 2.7347438232977245e-10, 1.9430095134453053e-09, 1.2400464068693708e-08, 7.1228670517261927e-08, 3.6889724023685680e-07, 1.7254610364884881e-06, 7.2997812630457740e-06, 2.7971738168589429e-05, 9.7203453180453000e-05, 3.0668682564469456e-04, 8.7945145983116615e-04, 2.2942465962944290e-03, 5.4493669764638540e-03, 1.1793861247331682e-02, 2.3273293687959880e-02, 4.1898884894253120e-02, 6.8850165394430590e-02, 1.0331081665178690e-01, 1.4160263924602950e-01, 1.7733517017411166e-01, 2.0295366115211383e-01, 2.1228805186235042e-01, 2.0295366115211383e-01, 1.7733517017411166e-01, 1.4160263924602950e-01, 1.0331081665178690e-01, 6.8850165394430590e-02, 4.1898884894253120e-02, 2.3273293687959880e-02, 1.1793861247331682e-02, 5.4493669764638540e-03, 2.2942465962944290e-03, 8.7945145983116615e-04, 3.0668682564469456e-04, 9.7203453180453000e-05, 2.7971738168589429e-05, 7.2997812630457740e-06, 1.7254610364884881e-06, 3.6889724023685680e-07, 7.1228670517261927e-08, 1.2400464068693708e-08, 1.9430095134453053e-09, 2.7347438232977245e-10, 3.4501563168365539e-11, 3.8925586774167020e-12, 3.9175124016391924e-13, 3.5072844186621564e-14, 2.7849389834496225e-15, 1.9548969486564158e-16, 1.2087664560459120e-17, 6.5579821121285801e-19, 3.1084252235905216e-20, 1.2811473098169559e-21, 4.5675182897648129e-23, 1.4004790074896691e-24, 3.6694695554120368e-26, 8.1574699196788167e-28, 1.5263559111336781e-29, 2.3822649787982913e-31, 3.0699209573361954e-33, 3.2286335472886435e-35, 2.7344551836642605e-37, 1.8364466079421476e-39, 9.6052219657335494e-42, 3.8300452415564833e-44, 1.1349953099138248e-46, 2.4233946264708683e-49, 3.5879758763817130e-52, 3.5092885496818465e-55, 2.1289805828564999e-58, 7.3601988558208524e-62, 1.2868308966878782e-65, 9.5134149540243336e-70, 2.2164453269063499e-74, 9.2973163909464803e-80, 1.6882167305707454e-86}, + {2.4448842535551943e-87, 1.4208271558885895e-80, 3.5443349838315051e-75, 1.5851638341206613e-70, 2.2283154783345243e-66, 1.3221294591344122e-62, 3.9619475088358356e-59, 6.7588233504303362e-56, 7.1462236088717712e-53, 4.9883667718148639e-50, 2.4133632044032138e-47, 8.4092583458821788e-45, 2.1769699583039015e-42, 4.2954616661119346e-40, 6.5994654800159303e-38, 8.0390389893761170e-36, 7.8853213936624651e-34, 6.3119668181308563e-32, 4.1716030615092882e-30, 2.2997378701939073e-28, 1.0671290531221681e-26, 4.2015271606281150e-25, 1.4137224274155160e-23, 4.0914965094167889e-22, 1.0244196283925887e-20, 2.2306211570300883e-19, 4.2441255956981095e-18, 7.0866359430444936e-17, 1.0425344386668410e-15, 1.3561256356020877e-14, 1.5649369927618164e-13, 1.6069042876497942e-12, 1.4722456305780643e-11, 1.2066201440627706e-10, 8.8669552925941743e-10, 5.8549609302492589e-09, 3.4807738736531026e-08, 1.8664547970988355e-07, 9.0421645542748592e-07, 3.9637332341743650e-06, 1.5744241598181718e-05, 5.6738856687547051e-05, 1.8573262875095809e-04, 5.5284802877925447e-04, 1.4977892627292288e-03, 3.6965702725171162e-03, 8.3173850369341346e-03, 1.7073105881727814e-02, 3.1991743583371472e-02, 5.4750665778331979e-02, 8.5616850779843184e-02, 1.2237883847025718e-01, 1.5993992755454783e-01, 1.9116358027866243e-01, 2.0898430851629271e-01, 2.0898430851629271e-01, 1.9116358027866243e-01, 1.5993992755454783e-01, 1.2237883847025718e-01, 8.5616850779843184e-02, 5.4750665778331979e-02, 3.1991743583371472e-02, 1.7073105881727814e-02, 8.3173850369341346e-03, 3.6965702725171162e-03, 1.4977892627292288e-03, 5.5284802877925447e-04, 1.8573262875095809e-04, 5.6738856687547051e-05, 1.5744241598181718e-05, 3.9637332341743650e-06, 9.0421645542748592e-07, 1.8664547970988355e-07, 3.4807738736531026e-08, 5.8549609302492589e-09, 8.8669552925941743e-10, 1.2066201440627706e-10, 1.4722456305780643e-11, 1.6069042876497942e-12, 1.5649369927618164e-13, 1.3561256356020877e-14, 1.0425344386668410e-15, 7.0866359430444936e-17, 4.2441255956981095e-18, 2.2306211570300883e-19, 1.0244196283925887e-20, 4.0914965094167889e-22, 1.4137224274155160e-23, 4.2015271606281150e-25, 1.0671290531221681e-26, 2.2997378701939073e-28, 4.1716030615092882e-30, 6.3119668181308563e-32, 7.8853213936624651e-34, 8.0390389893761170e-36, 6.5994654800159303e-38, 4.2954616661119346e-40, 2.1769699583039015e-42, 8.4092583458821788e-45, 2.4133632044032138e-47, 4.9883667718148639e-50, 7.1462236088717712e-53, 6.7588233504303362e-56, 3.9619475088358356e-59, 1.3221294591344122e-62, 2.2283154783345243e-66, 1.5851638341206613e-70, 3.5443349838315051e-75, 1.4208271558885895e-80, 2.4448842535551943e-87}, + {3.5392388249213494e-88, 2.1696906628474067e-81, 5.6618251917679648e-76, 2.6377634936769698e-71, 3.8524845909450548e-67, 2.3705848077171715e-63, 7.3575399530382127e-60, 1.2986758900322505e-56, 1.4196212763395569e-53, 1.0238881258836751e-50, 5.1156622686963731e-48, 1.8401448448645002e-45, 4.9161713392811270e-43, 1.0008264983378071e-40, 1.5861732912787765e-38, 1.9928674928468924e-36, 2.0159688321556722e-34, 1.6641520867485548e-32, 1.1341833636634616e-30, 6.4478013617261386e-29, 3.0854328584335985e-27, 1.2528407416044293e-25, 4.3478662125855451e-24, 1.2979585113660281e-22, 3.3525506308968111e-21, 7.5318707919556725e-20, 1.4788148820660904e-18, 2.5485379935372649e-17, 3.8703599993584500e-16, 5.1983189715882119e-15, 6.1952864130014014e-14, 6.5714753453597697e-13, 6.2212201579575959e-12, 5.2699881035078001e-11, 4.0039253602146504e-10, 2.7342851107604089e-09, 1.6816952694545013e-08, 9.3323608375261539e-08, 4.6806643299789927e-07, 2.1250386338304275e-06, 8.7455216612331247e-06, 3.2668449881324752e-05, 1.1089473739894331e-04, 3.4245439902397305e-04, 9.6300903314221480e-04, 2.4681920354686017e-03, 5.7702467735466509e-03, 1.2313597617381173e-02, 2.4000639085914963e-02, 4.2750896794915914e-02, 6.9623487033404263e-02, 1.0371107766286002e-01, 1.4134876449586573e-01, 1.7630458117164949e-01, 2.0128719075538343e-01, 2.1037554689061755e-01, 2.0128719075538343e-01, 1.7630458117164949e-01, 1.4134876449586573e-01, 1.0371107766286002e-01, 6.9623487033404263e-02, 4.2750896794915914e-02, 2.4000639085914963e-02, 1.2313597617381173e-02, 5.7702467735466509e-03, 2.4681920354686017e-03, 9.6300903314221480e-04, 3.4245439902397305e-04, 1.1089473739894331e-04, 3.2668449881324752e-05, 8.7455216612331247e-06, 2.1250386338304275e-06, 4.6806643299789927e-07, 9.3323608375261539e-08, 1.6816952694545013e-08, 2.7342851107604089e-09, 4.0039253602146504e-10, 5.2699881035078001e-11, 6.2212201579575959e-12, 6.5714753453597697e-13, 6.1952864130014014e-14, 5.1983189715882119e-15, 3.8703599993584500e-16, 2.5485379935372649e-17, 1.4788148820660904e-18, 7.5318707919556725e-20, 3.3525506308968111e-21, 1.2979585113660281e-22, 4.3478662125855451e-24, 1.2528407416044293e-25, 3.0854328584335985e-27, 6.4478013617261386e-29, 1.1341833636634616e-30, 1.6641520867485548e-32, 2.0159688321556722e-34, 1.9928674928468924e-36, 1.5861732912787765e-38, 1.0008264983378071e-40, 4.9161713392811270e-43, 1.8401448448645002e-45, 5.1156622686963731e-48, 1.0238881258836751e-50, 1.4196212763395569e-53, 1.2986758900322505e-56, 7.3575399530382127e-60, 2.3705848077171715e-63, 3.8524845909450548e-67, 2.6377634936769698e-71, 5.6618251917679648e-76, 2.1696906628474067e-81, 3.5392388249213494e-88}, + {5.1213633747657793e-89, 3.3107952448155878e-82, 9.0350209598558634e-77, 4.3835962164609016e-72, 6.6500528211701889e-68, 4.2427446781122925e-64, 1.3635093884813276e-60, 2.4895722182831332e-57, 2.8129093907478575e-54, 2.0956899110692340e-51, 1.0810706995222149e-48, 4.0133902027335798e-46, 1.1062614025677837e-43, 2.3230252252301317e-41, 3.7968846438904689e-39, 4.9189778171570439e-37, 5.1304565438169636e-35, 4.3662939232282773e-33, 3.0678714585911782e-31, 1.7980362370024124e-29, 8.8704657684441925e-28, 3.7135654602235681e-26, 1.3288244259482261e-24, 4.0906197763453875e-23, 1.0896574554298035e-21, 2.5250019651414982e-20, 5.1142598161040868e-19, 9.0937679770517361e-18, 1.4251779817419312e-16, 1.9757601287853092e-15, 2.4309836206138916e-14, 2.6627832873106297e-13, 2.6038192973897978e-12, 2.2788951363311533e-11, 1.7893870863289438e-10, 1.2632724994941663e-09, 8.0347791732044002e-09, 4.6125099246668157e-08, 2.3940078640211405e-07, 1.1251692413517793e-06, 4.7955457442441056e-06, 1.8559162409889888e-05, 6.5298413383660050e-05, 2.0909754829024987e-04, 6.1000345990479894e-04, 1.6227318637929411e-03, 3.9395573314086755e-03, 8.7347828708895063e-03, 1.7698763026162554e-02, 3.2791967492410569e-02, 5.5582763848207158e-02, 8.6226926941764842e-02, 1.2246902437683259e-01, 1.5929780439003277e-01, 1.8979412167575810e-01, 2.0715930730796278e-01, 2.0715930730796278e-01, 1.8979412167575810e-01, 1.5929780439003277e-01, 1.2246902437683259e-01, 8.6226926941764842e-02, 5.5582763848207158e-02, 3.2791967492410569e-02, 1.7698763026162554e-02, 8.7347828708895063e-03, 3.9395573314086755e-03, 1.6227318637929411e-03, 6.1000345990479894e-04, 2.0909754829024987e-04, 6.5298413383660050e-05, 1.8559162409889888e-05, 4.7955457442441056e-06, 1.1251692413517793e-06, 2.3940078640211405e-07, 4.6125099246668157e-08, 8.0347791732044002e-09, 1.2632724994941663e-09, 1.7893870863289438e-10, 2.2788951363311533e-11, 2.6038192973897978e-12, 2.6627832873106297e-13, 2.4309836206138916e-14, 1.9757601287853092e-15, 1.4251779817419312e-16, 9.0937679770517361e-18, 5.1142598161040868e-19, 2.5250019651414982e-20, 1.0896574554298035e-21, 4.0906197763453875e-23, 1.3288244259482261e-24, 3.7135654602235681e-26, 8.8704657684441925e-28, 1.7980362370024124e-29, 3.0678714585911782e-31, 4.3662939232282773e-33, 5.1304565438169636e-35, 4.9189778171570439e-37, 3.7968846438904689e-39, 2.3230252252301317e-41, 1.1062614025677837e-43, 4.0133902027335798e-46, 1.0810706995222149e-48, 2.0956899110692340e-51, 2.8129093907478575e-54, 2.4895722182831332e-57, 1.3635093884813276e-60, 4.2427446781122925e-64, 6.6500528211701889e-68, 4.3835962164609016e-72, 9.0350209598558634e-77, 3.3107952448155878e-82, 5.1213633747657793e-89}, + {7.4077801156809802e-90, 5.0483520825696067e-83, 1.4403225795247276e-77, 7.2755698062212851e-73, 1.1461458182067259e-68, 7.5798509757209669e-65, 2.5217356180713449e-61, 4.7616681817655899e-58, 5.5596107723280791e-55, 4.2776169290546349e-52, 2.2777250019423805e-49, 8.7248851122544073e-47, 2.4806792113142151e-44, 5.3718421575826214e-42, 9.0525141699026487e-40, 1.2089985963915576e-37, 1.2997822407575913e-35, 1.1401487006660065e-33, 8.2566517430946983e-32, 4.9874725361855554e-30, 2.5360177279806859e-28, 1.0943086662261828e-26, 4.0363441926925487e-25, 1.2809131432068308e-23, 3.5178503719335083e-22, 8.4054498872392565e-21, 1.7557265568072654e-19, 3.2200557928719378e-18, 5.2060944517170957e-17, 7.4470753817100703e-16, 9.4565868275202180e-15, 1.0692722214257240e-13, 1.0796165415752499e-12, 9.7589319582302090e-12, 7.9162851028857836e-11, 5.7753749442476326e-10, 3.7971425911450754e-09, 2.2540365079384215e-08, 1.2101463098116854e-07, 5.8853747125042215e-07, 2.5965756360441134e-06, 1.0406346919868978e-05, 3.7931294208686378e-05, 1.2588825538260341e-04, 3.8080638386839771e-04, 1.0508900454889139e-03, 2.6479410521627993e-03, 6.0965525061442453e-03, 1.2834439196438882e-02, 2.4719722908456396e-02, 4.3582193779905874e-02, 7.0366531191907819e-02, 1.0408220923468044e-01, 1.4108198634981012e-01, 1.7528831212387214e-01, 1.9966087345702854e-01, 2.0851381638716077e-01, 1.9966087345702854e-01, 1.7528831212387214e-01, 1.4108198634981012e-01, 1.0408220923468044e-01, 7.0366531191907819e-02, 4.3582193779905874e-02, 2.4719722908456396e-02, 1.2834439196438882e-02, 6.0965525061442453e-03, 2.6479410521627993e-03, 1.0508900454889139e-03, 3.8080638386839771e-04, 1.2588825538260341e-04, 3.7931294208686378e-05, 1.0406346919868978e-05, 2.5965756360441134e-06, 5.8853747125042215e-07, 1.2101463098116854e-07, 2.2540365079384215e-08, 3.7971425911450754e-09, 5.7753749442476326e-10, 7.9162851028857836e-11, 9.7589319582302090e-12, 1.0796165415752499e-12, 1.0692722214257240e-13, 9.4565868275202180e-15, 7.4470753817100703e-16, 5.2060944517170957e-17, 3.2200557928719378e-18, 1.7557265568072654e-19, 8.4054498872392565e-21, 3.5178503719335083e-22, 1.2809131432068308e-23, 4.0363441926925487e-25, 1.0943086662261828e-26, 2.5360177279806859e-28, 4.9874725361855554e-30, 8.2566517430946983e-32, 1.1401487006660065e-33, 1.2997822407575913e-35, 1.2089985963915576e-37, 9.0525141699026487e-40, 5.3718421575826214e-42, 2.4806792113142151e-44, 8.7248851122544073e-47, 2.2777250019423805e-49, 4.2776169290546349e-52, 5.5596107723280791e-55, 4.7616681817655899e-58, 2.5217356180713449e-61, 7.5798509757209669e-65, 1.1461458182067259e-68, 7.2755698062212851e-73, 1.4403225795247276e-77, 5.0483520825696067e-83, 7.4077801156809802e-90}, + {1.0710750710094196e-90, 7.6922700260785516e-84, 2.2937970374962485e-78, 1.2060181138762046e-73, 1.9724028846801179e-69, 1.3517892214658933e-65, 4.6544766331713673e-62, 9.0869836287455904e-59, 1.0961147706353286e-55, 8.7075616610856478e-53, 4.7848109229537450e-50, 1.8906897473607166e-47, 5.5435860303288234e-45, 1.2376390647546546e-42, 2.1498363403641766e-40, 2.9591207501866347e-38, 3.2783946198859832e-36, 2.9632924702764426e-34, 2.2111690104503776e-32, 1.3762537782433349e-30, 7.2106896606964717e-29, 3.2061793655442875e-27, 1.2186704300488311e-25, 3.9856906328389221e-24, 1.1282136742777468e-22, 2.7788009433178836e-21, 5.9840605797879746e-20, 1.1316516319423812e-18, 1.8868897366381483e-17, 2.7841156852775031e-16, 3.6474708837523359e-15, 4.2559572408720686e-14, 4.4353966285433845e-13, 4.1393133324428903e-12, 3.4675788374516700e-11, 2.6132836673568577e-10, 1.7753911292124313e-09, 1.0893439091005499e-08, 6.0471668709771245e-08, 3.0419237917584698e-07, 1.3886494911822552e-06, 5.7606576662754425e-06, 2.1743312833175104e-05, 7.4756242475529144e-05, 2.3436272202084139e-04, 6.7059411024834642e-04, 1.7528045021959946e-03, 4.1883749721869905e-03, 9.1558081843385875e-03, 1.8321199420242032e-02, 3.3577761609077682e-02, 5.6388855176296548e-02, 8.6806251150572486e-02, 1.2253708910746171e-01, 1.5865594890874077e-01, 1.8845256425575369e-01, 2.0538128483764440e-01, 2.0538128483764440e-01, 1.8845256425575369e-01, 1.5865594890874077e-01, 1.2253708910746171e-01, 8.6806251150572486e-02, 5.6388855176296548e-02, 3.3577761609077682e-02, 1.8321199420242032e-02, 9.1558081843385875e-03, 4.1883749721869905e-03, 1.7528045021959946e-03, 6.7059411024834642e-04, 2.3436272202084139e-04, 7.4756242475529144e-05, 2.1743312833175104e-05, 5.7606576662754425e-06, 1.3886494911822552e-06, 3.0419237917584698e-07, 6.0471668709771245e-08, 1.0893439091005499e-08, 1.7753911292124313e-09, 2.6132836673568577e-10, 3.4675788374516700e-11, 4.1393133324428903e-12, 4.4353966285433845e-13, 4.2559572408720686e-14, 3.6474708837523359e-15, 2.7841156852775031e-16, 1.8868897366381483e-17, 1.1316516319423812e-18, 5.9840605797879746e-20, 2.7788009433178836e-21, 1.1282136742777468e-22, 3.9856906328389221e-24, 1.2186704300488311e-25, 3.2061793655442875e-27, 7.2106896606964717e-29, 1.3762537782433349e-30, 2.2111690104503776e-32, 2.9632924702764426e-34, 3.2783946198859832e-36, 2.9591207501866347e-38, 2.1498363403641766e-40, 1.2376390647546546e-42, 5.5435860303288234e-45, 1.8906897473607166e-47, 4.7848109229537450e-50, 8.7075616610856478e-53, 1.0961147706353286e-55, 9.0869836287455904e-59, 4.6544766331713673e-62, 1.3517892214658933e-65, 1.9724028846801179e-69, 1.2060181138762046e-73, 2.2937970374962485e-78, 7.6922700260785516e-84, 1.0710750710094196e-90}, + {1.5480449561254974e-91, 1.1712554562855694e-84, 3.6493992259071536e-79, 1.9966381452254604e-74, 3.3892412733092808e-70, 2.4065988574277261e-66, 8.5740429722944209e-63, 1.7303021840396506e-59, 2.1557967924444521e-56, 1.7677871947095523e-53, 1.0022253044363931e-50, 4.0842818848410062e-48, 1.2346472555075369e-45, 2.8411408701159342e-43, 5.0858613958959180e-41, 7.2130038186701857e-39, 8.2330481300630906e-37, 7.6663204905842568e-35, 5.8928891499709780e-33, 3.7782612371775474e-31, 2.0392097722330042e-29, 9.3406997569354054e-28, 3.6577061009128777e-26, 1.2325099622415151e-24, 3.5948795028461328e-23, 9.1244425221919135e-22, 2.0251548550899527e-20, 3.9477817506225531e-19, 6.7863601469508643e-18, 1.0325389659471167e-16, 1.3951643164647175e-15, 1.6793350772844097e-14, 1.8058345392733700e-13, 1.7393402348430495e-12, 1.5041987406688428e-11, 1.1705945482958366e-10, 8.2144797343634809e-10, 5.2077334277669068e-09, 2.9879393170121428e-08, 1.5539942997674910e-07, 7.3371456445731177e-07, 3.1491925110163785e-06, 1.2303027527173753e-05, 4.3799266325778880e-05, 1.4224028492262314e-04, 4.2179171252798944e-04, 1.1430722829481254e-03, 2.8333080652480614e-03, 6.4278810024544141e-03, 1.3355861419689084e-02, 2.5430213894352387e-02, 4.4393059115154253e-02, 7.1080397415348953e-02, 1.0442577716745464e-01, 1.4080345768904715e-01, 1.7428622967828752e-01, 1.9807313269914872e-01, 2.0670065276640287e-01, 1.9807313269914872e-01, 1.7428622967828752e-01, 1.4080345768904715e-01, 1.0442577716745464e-01, 7.1080397415348953e-02, 4.4393059115154253e-02, 2.5430213894352387e-02, 1.3355861419689084e-02, 6.4278810024544141e-03, 2.8333080652480614e-03, 1.1430722829481254e-03, 4.2179171252798944e-04, 1.4224028492262314e-04, 4.3799266325778880e-05, 1.2303027527173753e-05, 3.1491925110163785e-06, 7.3371456445731177e-07, 1.5539942997674910e-07, 2.9879393170121428e-08, 5.2077334277669068e-09, 8.2144797343634809e-10, 1.1705945482958366e-10, 1.5041987406688428e-11, 1.7393402348430495e-12, 1.8058345392733700e-13, 1.6793350772844097e-14, 1.3951643164647175e-15, 1.0325389659471167e-16, 6.7863601469508643e-18, 3.9477817506225531e-19, 2.0251548550899527e-20, 9.1244425221919135e-22, 3.5948795028461328e-23, 1.2325099622415151e-24, 3.6577061009128777e-26, 9.3406997569354054e-28, 2.0392097722330042e-29, 3.7782612371775474e-31, 5.8928891499709780e-33, 7.6663204905842568e-35, 8.2330481300630906e-37, 7.2130038186701857e-39, 5.0858613958959180e-41, 2.8411408701159342e-43, 1.2346472555075369e-45, 4.0842818848410062e-48, 1.0022253044363931e-50, 1.7677871947095523e-53, 2.1557967924444521e-56, 1.7303021840396506e-59, 8.5740429722944209e-63, 2.4065988574277261e-66, 3.3892412733092808e-70, 1.9966381452254604e-74, 3.6493992259071536e-79, 1.1712554562855694e-84, 1.5480449561254974e-91}, + {2.2365648981246453e-92, 1.7821550800540237e-85, 5.8004986924644819e-80, 3.3015035960765624e-75, 5.8152759548633637e-71, 4.2771691981814217e-67, 1.5763666548556604e-63, 3.2876134769273718e-60, 4.2297673811987948e-57, 3.5794857851611975e-54, 2.0932606053442080e-51, 8.7956536234027020e-49, 2.7406328866050857e-46, 6.4989825715662458e-44, 1.1986003025152256e-41, 1.7511205172582050e-39, 2.0587364582129049e-37, 1.9743896190133811e-35, 1.5630006293499951e-33, 1.0320466636328443e-31, 5.7365178511704523e-30, 2.7061869712127610e-28, 1.0914439534101697e-26, 3.7881611827990218e-25, 1.1381700734483244e-23, 2.9761942889320718e-22, 6.8061278108013458e-21, 1.3672374402361970e-19, 2.4223988311307281e-18, 3.7993465086131341e-17, 5.2930247423722938e-16, 6.5702357143564891e-15, 7.2875563565483730e-14, 7.2418684802603713e-13, 6.4631097928536238e-12, 5.1919141781420298e-11, 3.7618946881237821e-10, 2.4632483182140787e-09, 1.4601515540917063e-08, 7.8484291703314789e-08, 3.8310233242483633e-07, 1.7005728326428347e-06, 6.8734922472265778e-06, 2.5326195751014174e-05, 8.5160665584721077e-05, 2.6158391341408045e-04, 7.3463960542667715e-04, 1.8879100806443562e-03, 4.4427288798279318e-03, 9.5799818951133077e-03, 1.8939956587000626e-02, 3.4349073289602634e-02, 5.7169633542064863e-02, 8.7356205785880797e-02, 1.2258446739609345e-01, 1.5801491756025524e-01, 1.8713802680418945e-01, 2.0364826010006534e-01, 2.0364826010006534e-01, 1.8713802680418945e-01, 1.5801491756025524e-01, 1.2258446739609345e-01, 8.7356205785880797e-02, 5.7169633542064863e-02, 3.4349073289602634e-02, 1.8939956587000626e-02, 9.5799818951133077e-03, 4.4427288798279318e-03, 1.8879100806443562e-03, 7.3463960542667715e-04, 2.6158391341408045e-04, 8.5160665584721077e-05, 2.5326195751014174e-05, 6.8734922472265778e-06, 1.7005728326428347e-06, 3.8310233242483633e-07, 7.8484291703314789e-08, 1.4601515540917063e-08, 2.4632483182140787e-09, 3.7618946881237821e-10, 5.1919141781420298e-11, 6.4631097928536238e-12, 7.2418684802603713e-13, 7.2875563565483730e-14, 6.5702357143564891e-15, 5.2930247423722938e-16, 3.7993465086131341e-17, 2.4223988311307281e-18, 1.3672374402361970e-19, 6.8061278108013458e-21, 2.9761942889320718e-22, 1.1381700734483244e-23, 3.7881611827990218e-25, 1.0914439534101697e-26, 2.7061869712127610e-28, 5.7365178511704523e-30, 1.0320466636328443e-31, 1.5630006293499951e-33, 1.9743896190133811e-35, 2.0587364582129049e-37, 1.7511205172582050e-39, 1.1986003025152256e-41, 6.4989825715662458e-44, 2.7406328866050857e-46, 8.7956536234027020e-49, 2.0932606053442080e-51, 3.5794857851611975e-54, 4.2297673811987948e-57, 3.2876134769273718e-60, 1.5763666548556604e-63, 4.2771691981814217e-67, 5.8152759548633637e-71, 3.3015035960765624e-75, 5.8004986924644819e-80, 1.7821550800540237e-85, 2.2365648981246453e-92}, + {3.2301009543101099e-93, 2.7098206745663592e-86, 9.2107100141581600e-81, 5.4525421540759655e-76, 9.9634282798974572e-72, 7.5888934406190700e-68, 2.8926678180441972e-64, 6.2331956354059190e-61, 8.2793896913033925e-58, 7.2291385544928827e-55, 4.3597174343439006e-52, 1.8884218782951360e-49, 6.0637045694189279e-47, 1.4814159278355307e-44, 2.8142461042205111e-42, 4.2343947760097721e-40, 5.1264009327908215e-38, 5.0622696862483853e-36, 4.1261954854591773e-34, 2.8051622162519962e-32, 1.6053724151348174e-30, 7.7976718238941548e-29, 3.2382364130273421e-27, 1.1573493767550765e-25, 3.5810407352214098e-24, 9.6443805701337253e-23, 2.2718330011553644e-21, 4.7015694865721316e-20, 8.5828869554158906e-19, 1.3872615165495405e-17, 1.9920232649990639e-16, 2.5491665563098012e-15, 2.9155331883629833e-14, 2.9881593623355600e-13, 2.7511620846328702e-12, 2.2805223736764051e-11, 1.7055424914056455e-10, 1.1530186653890739e-09, 7.0587498965529012e-09, 3.9196764143747362e-08, 1.9772532357280954e-07, 9.0734367403526114e-07, 3.7926166158590859e-06, 1.4457032475544561e-05, 5.0311336988073622e-05, 1.6000468687262702e-04, 4.6545282707817341e-04, 1.2395239105761761e-03, 3.0241014828653783e-03, 6.7638384706753211e-03, 1.3877372041544333e-02, 2.6131824932046500e-02, 4.5183798113816530e-02, 7.1766149805041285e-02, 1.0474325576824089e-01, 1.4051423956652490e-01, 1.7329818332155345e-01, 1.9652247660560301e-01, 2.0493398052053613e-01, 1.9652247660560301e-01, 1.7329818332155345e-01, 1.4051423956652490e-01, 1.0474325576824089e-01, 7.1766149805041285e-02, 4.5183798113816530e-02, 2.6131824932046500e-02, 1.3877372041544333e-02, 6.7638384706753211e-03, 3.0241014828653783e-03, 1.2395239105761761e-03, 4.6545282707817341e-04, 1.6000468687262702e-04, 5.0311336988073622e-05, 1.4457032475544561e-05, 3.7926166158590859e-06, 9.0734367403526114e-07, 1.9772532357280954e-07, 3.9196764143747362e-08, 7.0587498965529012e-09, 1.1530186653890739e-09, 1.7055424914056455e-10, 2.2805223736764051e-11, 2.7511620846328702e-12, 2.9881593623355600e-13, 2.9155331883629833e-14, 2.5491665563098012e-15, 1.9920232649990639e-16, 1.3872615165495405e-17, 8.5828869554158906e-19, 4.7015694865721316e-20, 2.2718330011553644e-21, 9.6443805701337253e-23, 3.5810407352214098e-24, 1.1573493767550765e-25, 3.2382364130273421e-27, 7.7976718238941548e-29, 1.6053724151348174e-30, 2.8051622162519962e-32, 4.1261954854591773e-34, 5.0622696862483853e-36, 5.1264009327908215e-38, 4.2343947760097721e-40, 2.8142461042205111e-42, 1.4814159278355307e-44, 6.0637045694189279e-47, 1.8884218782951360e-49, 4.3597174343439006e-52, 7.2291385544928827e-55, 8.2793896913033925e-58, 6.2331956354059190e-61, 2.8926678180441972e-64, 7.5888934406190700e-68, 9.9634282798974572e-72, 5.4525421540759655e-76, 9.2107100141581600e-81, 2.7098206745663592e-86, 3.2301009543101099e-93}, + {4.6632605618348153e-94, 4.1175713624143213e-87, 1.4612038470881841e-81, 8.9943320432041730e-77, 1.7046189545614933e-72, 1.3442523656293476e-68, 5.2981258920312120e-65, 1.1793052736766146e-61, 1.6168482075284075e-58, 1.4562812961539782e-55, 9.0550269871980438e-53, 4.0423080380262144e-50, 1.3372947653240557e-47, 3.3652104040755819e-45, 6.5834566491742290e-43, 1.0199300069374435e-40, 1.2712364015219454e-38, 1.2922794919134986e-36, 1.0842626381128112e-34, 7.5875981466261195e-33, 4.4697458344428785e-31, 2.2348156961076159e-29, 9.5537375722030760e-28, 3.5151519348961093e-26, 1.1197954114046576e-24, 3.1052469143218520e-23, 7.5325222776132763e-22, 1.6054837533511592e-20, 3.0189715471734914e-19, 5.0270892648593941e-18, 7.4380949511123831e-17, 9.8097342377809155e-16, 1.1565317290963977e-14, 1.2221338000525176e-13, 1.1603996128858022e-12, 9.9222265232533288e-12, 7.6565621148965857e-11, 5.3422379271962827e-10, 3.3764133497574799e-09, 1.9362011885244885e-08, 1.0089574633621321e-07, 4.7845042712908339e-07, 2.0673284334051564e-06, 8.1491561908568428e-06, 2.9337744635072858e-05, 9.6558931405380603e-05, 2.9081250785794189e-04, 8.0215126322350460e-04, 2.0279429361286859e-03, 4.7023255147965815e-03, 1.0006845717689308e-02, 1.9554616052364186e-02, 3.5105886050128668e-02, 5.7925787803644498e-02, 8.7878107549790321e-02, 1.2261249660444425e-01, 1.5737520760258733e-01, 1.8584966291329158e-01, 2.0195836706954076e-01, 2.0195836706954076e-01, 1.8584966291329158e-01, 1.5737520760258733e-01, 1.2261249660444425e-01, 8.7878107549790321e-02, 5.7925787803644498e-02, 3.5105886050128668e-02, 1.9554616052364186e-02, 1.0006845717689308e-02, 4.7023255147965815e-03, 2.0279429361286859e-03, 8.0215126322350460e-04, 2.9081250785794189e-04, 9.6558931405380603e-05, 2.9337744635072858e-05, 8.1491561908568428e-06, 2.0673284334051564e-06, 4.7845042712908339e-07, 1.0089574633621321e-07, 1.9362011885244885e-08, 3.3764133497574799e-09, 5.3422379271962827e-10, 7.6565621148965857e-11, 9.9222265232533288e-12, 1.1603996128858022e-12, 1.2221338000525176e-13, 1.1565317290963977e-14, 9.8097342377809155e-16, 7.4380949511123831e-17, 5.0270892648593941e-18, 3.0189715471734914e-19, 1.6054837533511592e-20, 7.5325222776132763e-22, 3.1052469143218520e-23, 1.1197954114046576e-24, 3.5151519348961093e-26, 9.5537375722030760e-28, 2.2348156961076159e-29, 4.4697458344428785e-31, 7.5875981466261195e-33, 1.0842626381128112e-34, 1.2922794919134986e-36, 1.2712364015219454e-38, 1.0199300069374435e-40, 6.5834566491742290e-43, 3.3652104040755819e-45, 1.3372947653240557e-47, 4.0423080380262144e-50, 9.0550269871980438e-53, 1.4562812961539782e-55, 1.6168482075284075e-58, 1.1793052736766146e-61, 5.2981258920312120e-65, 1.3442523656293476e-68, 1.7046189545614933e-72, 8.9943320432041730e-77, 1.4612038470881841e-81, 4.1175713624143213e-87, 4.6632605618348153e-94}, + {6.7298368161176175e-95, 6.2524679295400819e-88, 2.3159244482213129e-82, 1.4819345628019795e-77, 2.9122929849292479e-73, 2.3772467547464570e-69, 9.6859162631794203e-66, 2.2265944526096699e-62, 3.1502482452041196e-59, 2.9262669926463904e-56, 1.8755830649756743e-53, 8.6273958082206037e-51, 2.9399528685004811e-48, 7.6185958106575033e-46, 1.5345315352659727e-43, 2.4472631035773749e-41, 3.1395792592468297e-39, 3.2847155364958309e-37, 2.8362637361915701e-35, 2.0425551530268673e-33, 1.2382461823632275e-31, 6.3712964630586884e-30, 2.8030942195831802e-28, 1.0614795301061955e-26, 3.4805041051243282e-25, 9.9351833121692472e-24, 2.4810981347002888e-22, 5.4448704650642035e-21, 1.0543376862850843e-19, 1.8081886383358570e-18, 2.7559344338025971e-17, 3.7447612695692442e-16, 4.5495653050067822e-15, 4.9552804559564594e-14, 4.8505588604403995e-13, 4.2769240959330674e-12, 3.4041116737305440e-11, 2.4505079134311513e-10, 1.5983528945271918e-09, 9.4619152361816427e-09, 5.0915075227638090e-08, 2.4939923966935258e-07, 1.1135208095853982e-06, 4.5371664146194656e-06, 1.6890448115948282e-05, 5.7506280394736890e-05, 1.7923272339346707e-04, 5.1182575908404512e-04, 1.3402041554580024e-03, 3.2201249358166924e-03, 7.1040413328426078e-03, 1.4398510037728523e-02, 2.6824309528990144e-02, 4.5954734368354178e-02, 7.2424817070363218e-02, 1.0503603365193327e-01, 1.4021530912606431e-01, 1.7232400830089628e-01, 1.9500749227238295e-01, 2.0321184623044758e-01, 1.9500749227238295e-01, 1.7232400830089628e-01, 1.4021530912606431e-01, 1.0503603365193327e-01, 7.2424817070363218e-02, 4.5954734368354178e-02, 2.6824309528990144e-02, 1.4398510037728523e-02, 7.1040413328426078e-03, 3.2201249358166924e-03, 1.3402041554580024e-03, 5.1182575908404512e-04, 1.7923272339346707e-04, 5.7506280394736890e-05, 1.6890448115948282e-05, 4.5371664146194656e-06, 1.1135208095853982e-06, 2.4939923966935258e-07, 5.0915075227638090e-08, 9.4619152361816427e-09, 1.5983528945271918e-09, 2.4505079134311513e-10, 3.4041116737305440e-11, 4.2769240959330674e-12, 4.8505588604403995e-13, 4.9552804559564594e-14, 4.5495653050067822e-15, 3.7447612695692442e-16, 2.7559344338025971e-17, 1.8081886383358570e-18, 1.0543376862850843e-19, 5.4448704650642035e-21, 2.4810981347002888e-22, 9.9351833121692472e-24, 3.4805041051243282e-25, 1.0614795301061955e-26, 2.8030942195831802e-28, 6.3712964630586884e-30, 1.2382461823632275e-31, 2.0425551530268673e-33, 2.8362637361915701e-35, 3.2847155364958309e-37, 3.1395792592468297e-39, 2.4472631035773749e-41, 1.5345315352659727e-43, 7.6185958106575033e-46, 2.9399528685004811e-48, 8.6273958082206037e-51, 1.8755830649756743e-53, 2.9262669926463904e-56, 3.1502482452041196e-59, 2.2265944526096699e-62, 9.6859162631794203e-66, 2.3772467547464570e-69, 2.9122929849292479e-73, 1.4819345628019795e-77, 2.3159244482213129e-82, 6.2524679295400819e-88, 6.7298368161176175e-95}, + {9.7087403173561320e-96, 9.4880223969421335e-89, 3.6672428899258345e-83, 2.4388595277598848e-78, 4.9686800178709975e-74, 4.1972934212808472e-70, 1.7675264508544507e-66, 4.1953576796012743e-63, 6.1240687137526128e-60, 5.8655575443381544e-57, 3.8745058915294428e-54, 1.8359883902918249e-51, 6.4431660432386704e-49, 1.7190498209667764e-46, 3.5641227393846403e-44, 5.8499085615955162e-42, 7.7228330960795927e-40, 8.3138040908205878e-38, 7.3861592386371398e-36, 5.4726905931533611e-34, 3.4133817510443908e-32, 1.8070157219004075e-30, 8.1797979499699255e-29, 3.1872062136052619e-27, 1.0753877987999934e-25, 3.1590858369572161e-24, 8.1196497360506958e-23, 1.8341746417696775e-21, 3.6563781425498920e-20, 6.4565140315879466e-19, 1.0133911369582986e-17, 1.4182835695860085e-16, 1.7750978597330652e-15, 1.9921528797000957e-14, 2.0097524571107850e-13, 1.8267486127018911e-12, 1.4991784686824891e-11, 1.1130625000696823e-10, 7.4897520452156729e-10, 4.5753933756254367e-09, 2.5414386492665182e-08, 1.2854272339650551e-07, 5.9280419576037410e-07, 2.4957777427586521e-06, 9.6033945516455336e-06, 3.3808196410240332e-05, 1.0899702609675318e-04, 3.2209543490435076e-04, 8.7313245576434379e-04, 2.1727898312954579e-03, 4.9668732525919509e-03, 1.0435962135902662e-02, 2.0164796980741757e-02, 3.5848215647758121e-02, 5.8657999750043426e-02, 8.8373210437321048e-02, 1.2262242405763166e-01, 1.5673726302678062e-01, 1.8458665957131964e-01, 2.0030984626187961e-01, 2.0030984626187961e-01, 1.8458665957131964e-01, 1.5673726302678062e-01, 1.2262242405763166e-01, 8.8373210437321048e-02, 5.8657999750043426e-02, 3.5848215647758121e-02, 2.0164796980741757e-02, 1.0435962135902662e-02, 4.9668732525919509e-03, 2.1727898312954579e-03, 8.7313245576434379e-04, 3.2209543490435076e-04, 1.0899702609675318e-04, 3.3808196410240332e-05, 9.6033945516455336e-06, 2.4957777427586521e-06, 5.9280419576037410e-07, 1.2854272339650551e-07, 2.5414386492665182e-08, 4.5753933756254367e-09, 7.4897520452156729e-10, 1.1130625000696823e-10, 1.4991784686824891e-11, 1.8267486127018911e-12, 2.0097524571107850e-13, 1.9921528797000957e-14, 1.7750978597330652e-15, 1.4182835695860085e-16, 1.0133911369582986e-17, 6.4565140315879466e-19, 3.6563781425498920e-20, 1.8341746417696775e-21, 8.1196497360506958e-23, 3.1590858369572161e-24, 1.0753877987999934e-25, 3.1872062136052619e-27, 8.1797979499699255e-29, 1.8070157219004075e-30, 3.4133817510443908e-32, 5.4726905931533611e-34, 7.3861592386371398e-36, 8.3138040908205878e-38, 7.7228330960795927e-40, 5.8499085615955162e-42, 3.5641227393846403e-44, 1.7190498209667764e-46, 6.4431660432386704e-49, 1.8359883902918249e-51, 3.8745058915294428e-54, 5.8655575443381544e-57, 6.1240687137526128e-60, 4.1953576796012743e-63, 1.7675264508544507e-66, 4.1972934212808472e-70, 4.9686800178709975e-74, 2.4388595277598848e-78, 3.6672428899258345e-83, 9.4880223969421335e-89, 9.7087403173561320e-96}, + {1.4001253071178789e-96, 1.4388577548096490e-89, 5.8017947700800046e-84, 4.0091236159545663e-79, 8.4655265882771739e-75, 7.3990554223004025e-71, 3.2196574821489126e-67, 7.8890210982671253e-64, 1.1878725902550843e-60, 1.1728639625604033e-57, 7.9826611475407281e-55, 3.8960048884898755e-52, 1.4077485752140258e-49, 3.8661220025482209e-47, 8.2491446524855651e-45, 1.3931625443392332e-42, 1.8922123418053773e-40, 2.0955223902923607e-38, 1.9150597358365338e-36, 1.4595508732329443e-34, 9.3637976764322119e-33, 5.0989613303209105e-31, 2.3742569610821154e-29, 9.5165969692199733e-28, 3.3033284608828233e-26, 9.9838789745986388e-25, 2.6403982498871405e-23, 6.1378568842560367e-22, 1.2592940912286238e-20, 2.2889503956241953e-19, 3.6986721862085567e-18, 5.3301125275267336e-17, 6.8703687643915730e-16, 7.9423755813393362e-15, 8.2552749332412685e-14, 7.7326044861505372e-13, 6.5412509847214547e-12, 5.0072069799282932e-11, 3.4747667141215967e-10, 2.1897184637878725e-09, 1.2550651872012550e-08, 6.5522732997355584e-08, 3.1199715417437437e-07, 1.3566980827714974e-06, 5.3937313862223019e-06, 1.9625894008346668e-05, 6.5422509840297027e-05, 1.9997290405207970e-04, 5.6094024606844449e-04, 1.4450639870005968e-03, 3.4211784137958604e-03, 7.4481168960531027e-03, 1.4918844448036933e-02, 2.7507458506267436e-02, 4.6706206412519734e-02, 7.3057392793912337e-02, 1.0530541916565815e-01, 1.3990756676688021e-01, 1.7136352815479530e-01, 1.9352684051681762e-01, 2.0153240948474130e-01, 1.9352684051681762e-01, 1.7136352815479530e-01, 1.3990756676688021e-01, 1.0530541916565815e-01, 7.3057392793912337e-02, 4.6706206412519734e-02, 2.7507458506267436e-02, 1.4918844448036933e-02, 7.4481168960531027e-03, 3.4211784137958604e-03, 1.4450639870005968e-03, 5.6094024606844449e-04, 1.9997290405207970e-04, 6.5422509840297027e-05, 1.9625894008346668e-05, 5.3937313862223019e-06, 1.3566980827714974e-06, 3.1199715417437437e-07, 6.5522732997355584e-08, 1.2550651872012550e-08, 2.1897184637878725e-09, 3.4747667141215967e-10, 5.0072069799282932e-11, 6.5412509847214547e-12, 7.7326044861505372e-13, 8.2552749332412685e-14, 7.9423755813393362e-15, 6.8703687643915730e-16, 5.3301125275267336e-17, 3.6986721862085567e-18, 2.2889503956241953e-19, 1.2592940912286238e-20, 6.1378568842560367e-22, 2.6403982498871405e-23, 9.9838789745986388e-25, 3.3033284608828233e-26, 9.5165969692199733e-28, 2.3742569610821154e-29, 5.0989613303209105e-31, 9.3637976764322119e-33, 1.4595508732329443e-34, 1.9150597358365338e-36, 2.0955223902923607e-38, 1.8922123418053773e-40, 1.3931625443392332e-42, 8.2491446524855651e-45, 3.8661220025482209e-47, 1.4077485752140258e-49, 3.8960048884898755e-52, 7.9826611475407281e-55, 1.1728639625604033e-57, 1.1878725902550843e-60, 7.8890210982671253e-64, 3.2196574821489126e-67, 7.3990554223004025e-71, 8.4655265882771739e-75, 4.0091236159545663e-79, 5.8017947700800046e-84, 1.4388577548096490e-89, 1.4001253071178789e-96}, + {2.0184534275095049e-97, 2.1806296158265287e-90, 9.1706055028522197e-85, 6.5830036453656527e-80, 1.4403978976524620e-75, 1.3022823162971665e-71, 5.8544142858486073e-68, 1.4805274033207088e-64, 2.2990511219174665e-61, 2.3396177274687451e-58, 1.6403928051497794e-55, 8.2441788250067191e-53, 3.0664667336421114e-50, 8.6667923231989335e-48, 1.9026918231732264e-45, 3.3057067086043009e-43, 4.6182598989607685e-41, 5.2602254003426042e-39, 4.9438860107310490e-37, 3.8749094400485918e-35, 2.5564844139368277e-33, 1.4316070974387265e-31, 6.8553849798952103e-30, 2.8259664729965746e-28, 1.0088930738432750e-26, 3.1364243262134751e-25, 8.5327296120897214e-24, 2.0406372113926846e-22, 4.3078486936244584e-21, 8.0577481866273284e-20, 1.3400866628758495e-18, 1.9879461273563199e-17, 2.6381930823541892e-16, 3.1406382865933653e-15, 3.3622360761898003e-14, 3.2444814316722348e-13, 2.8281489919728500e-12, 2.2313290142958860e-11, 1.5963585528134907e-10, 1.0373981456665397e-09, 6.1333415530709063e-09, 3.3038743448338228e-08, 1.6237364967913452e-07, 7.2898765543366947e-07, 2.9932498983591025e-06, 1.1252541495603299e-05, 3.8767965827716168e-05, 1.2251950121646584e-04, 3.5547512496610876e-04, 9.4757899341760905e-04, 2.3223308982827312e-03, 5.2360833859067743e-03, 1.0866914248342787e-02, 2.0770153886127483e-02, 3.6576106509803016e-02, 5.9366942312670930e-02, 8.8842708636295842e-02, 1.2261541377174207e-01, 1.5610147984994696e-01, 1.8334823578007414e-01, 1.9870103704081624e-01, 1.9870103704081624e-01, 1.8334823578007414e-01, 1.5610147984994696e-01, 1.2261541377174207e-01, 8.8842708636295842e-02, 5.9366942312670930e-02, 3.6576106509803016e-02, 2.0770153886127483e-02, 1.0866914248342787e-02, 5.2360833859067743e-03, 2.3223308982827312e-03, 9.4757899341760905e-04, 3.5547512496610876e-04, 1.2251950121646584e-04, 3.8767965827716168e-05, 1.1252541495603299e-05, 2.9932498983591025e-06, 7.2898765543366947e-07, 1.6237364967913452e-07, 3.3038743448338228e-08, 6.1333415530709063e-09, 1.0373981456665397e-09, 1.5963585528134907e-10, 2.2313290142958860e-11, 2.8281489919728500e-12, 3.2444814316722348e-13, 3.3622360761898003e-14, 3.1406382865933653e-15, 2.6381930823541892e-16, 1.9879461273563199e-17, 1.3400866628758495e-18, 8.0577481866273284e-20, 4.3078486936244584e-21, 2.0406372113926846e-22, 8.5327296120897214e-24, 3.1364243262134751e-25, 1.0088930738432750e-26, 2.8259664729965746e-28, 6.8553849798952103e-30, 1.4316070974387265e-31, 2.5564844139368277e-33, 3.8749094400485918e-35, 4.9438860107310490e-37, 5.2602254003426042e-39, 4.6182598989607685e-41, 3.3057067086043009e-43, 1.9026918231732264e-45, 8.6667923231989335e-48, 3.0664667336421114e-50, 8.2441788250067191e-53, 1.6403928051497794e-55, 2.3396177274687451e-58, 2.2990511219174665e-61, 1.4805274033207088e-64, 5.8544142858486073e-68, 1.3022823162971665e-71, 1.4403978976524620e-75, 6.5830036453656527e-80, 9.1706055028522197e-85, 2.1806296158265287e-90, 2.0184534275095049e-97}, + {2.9088440757674062e-98, 3.3027195553941757e-91, 1.4482784276490109e-85, 1.0797361898132115e-80, 2.4475687753038470e-76, 2.2885780042663531e-72, 1.0626701542492299e-68, 2.7730751722659017e-65, 4.4400764524986828e-62, 4.6560392499732150e-59, 3.3622797072552750e-56, 1.7396913258105826e-53, 6.6597646076210362e-51, 1.9366833085713306e-48, 4.3737554297590874e-46, 7.8155861441480535e-44, 1.1228680356492188e-41, 1.3151165218678305e-39, 1.2708875196505519e-37, 1.0241410150378061e-35, 6.9469156975836022e-34, 3.9996687161846431e-32, 1.9692128985290527e-30, 8.3465290951830949e-29, 3.0639885163656867e-27, 9.7951708742326283e-26, 2.7405597239380626e-24, 6.7411887298015296e-23, 1.4638683297363268e-21, 2.8169790254203712e-20, 4.8205257099851608e-19, 7.3591325007878809e-18, 1.0052255425278666e-16, 1.2319403285686423e-15, 1.3580008734485746e-14, 1.3496086984992707e-13, 1.2118575324056077e-12, 9.8514672314148016e-12, 7.2637761188053839e-11, 4.8661361388782973e-10, 2.9666085963352118e-09, 1.6482913905846427e-08, 8.3579939555984911e-08, 3.8727287415554441e-07, 1.6416874675897103e-06, 6.3737479499140128e-06, 2.2686436750985634e-05, 7.4097922186640899e-05, 2.2227085873416057e-04, 6.1281987967776825e-04, 1.5540467884754450e-03, 3.6270593070595858e-03, 7.7957038787030220e-03, 1.5437973179493971e-02, 2.8181096910943754e-02, 4.7438564768728979e-02, 7.3664835869183778e-02, 1.0555264545810959e-01, 1.3959184263252383e-01, 1.7041655690534233e-01, 1.9207925104318502e-01, 1.9989393461088162e-01, 1.9207925104318502e-01, 1.7041655690534233e-01, 1.3959184263252383e-01, 1.0555264545810959e-01, 7.3664835869183778e-02, 4.7438564768728979e-02, 2.8181096910943754e-02, 1.5437973179493971e-02, 7.7957038787030220e-03, 3.6270593070595858e-03, 1.5540467884754450e-03, 6.1281987967776825e-04, 2.2227085873416057e-04, 7.4097922186640899e-05, 2.2686436750985634e-05, 6.3737479499140128e-06, 1.6416874675897103e-06, 3.8727287415554441e-07, 8.3579939555984911e-08, 1.6482913905846427e-08, 2.9666085963352118e-09, 4.8661361388782973e-10, 7.2637761188053839e-11, 9.8514672314148016e-12, 1.2118575324056077e-12, 1.3496086984992707e-13, 1.3580008734485746e-14, 1.2319403285686423e-15, 1.0052255425278666e-16, 7.3591325007878809e-18, 4.8205257099851608e-19, 2.8169790254203712e-20, 1.4638683297363268e-21, 6.7411887298015296e-23, 2.7405597239380626e-24, 9.7951708742326283e-26, 3.0639885163656867e-27, 8.3465290951830949e-29, 1.9692128985290527e-30, 3.9996687161846431e-32, 6.9469156975836022e-34, 1.0241410150378061e-35, 1.2708875196505519e-37, 1.3151165218678305e-39, 1.1228680356492188e-41, 7.8155861441480535e-44, 4.3737554297590874e-46, 1.9366833085713306e-48, 6.6597646076210362e-51, 1.7396913258105826e-53, 3.3622797072552750e-56, 4.6560392499732150e-59, 4.4400764524986828e-62, 2.7730751722659017e-65, 1.0626701542492299e-68, 2.2885780042663531e-72, 2.4475687753038470e-76, 1.0797361898132115e-80, 1.4482784276490109e-85, 3.3027195553941757e-91, 2.9088440757674062e-98}, + {4.1905794490628812e-99, 4.9990906972957580e-92, 2.2852295477811565e-86, 1.7690369566797577e-81, 4.1535499466158161e-77, 4.0157584892449402e-73, 1.9255984704437394e-69, 5.1840723409278860e-66, 8.5567547361303599e-63, 9.2443848020406234e-60, 6.8742017262988808e-57, 3.6611032011247543e-54, 1.4421371050636836e-51, 4.3141707603430363e-49, 1.0020507711410342e-46, 1.8412712525866596e-44, 2.7198617815524323e-42, 3.2749113002444449e-40, 3.2533264664077919e-38, 2.6949141835666013e-36, 1.8790207698162015e-34, 1.1120314056802636e-32, 5.6279071396682005e-31, 2.4520940513432773e-29, 9.2537661461189014e-28, 3.0414019572106789e-26, 8.7492009712123219e-25, 2.2129732702532020e-23, 4.9419868423536690e-22, 9.7813379414413797e-21, 1.7218091698469169e-19, 2.7043296085128987e-18, 3.8011128040478799e-17, 4.7943347392243567e-16, 5.4401721259012553e-15, 5.5664982903187763e-14, 5.1473099468312146e-13, 4.3100503800704298e-12, 3.2741601230785952e-11, 2.2604055819337785e-10, 1.4204990214913607e-09, 8.1379035349171669e-09, 4.2560154044109738e-08, 2.0345626953177357e-07, 8.9008863258739004e-07, 3.5675339063854564e-06, 1.3113467580430861e-05, 4.4247522456022853e-05, 1.3716931938884588e-04, 3.9098949747131122e-04, 1.0254795244170039e-03, 2.4764405325003970e-03, 5.5096709995779728e-03, 1.1299305506959472e-02, 2.1370374426895520e-02, 3.7289628484137964e-02, 6.0053278089003327e-02, 8.9287739344200062e-02, 1.2259255262751995e-01, 1.5546821084992313e-01, 1.8213364120711689e-01, 1.9713037059257607e-01, 1.9713037059257607e-01, 1.8213364120711689e-01, 1.5546821084992313e-01, 1.2259255262751995e-01, 8.9287739344200062e-02, 6.0053278089003327e-02, 3.7289628484137964e-02, 2.1370374426895520e-02, 1.1299305506959472e-02, 5.5096709995779728e-03, 2.4764405325003970e-03, 1.0254795244170039e-03, 3.9098949747131122e-04, 1.3716931938884588e-04, 4.4247522456022853e-05, 1.3113467580430861e-05, 3.5675339063854564e-06, 8.9008863258739004e-07, 2.0345626953177357e-07, 4.2560154044109738e-08, 8.1379035349171669e-09, 1.4204990214913607e-09, 2.2604055819337785e-10, 3.2741601230785952e-11, 4.3100503800704298e-12, 5.1473099468312146e-13, 5.5664982903187763e-14, 5.4401721259012553e-15, 4.7943347392243567e-16, 3.8011128040478799e-17, 2.7043296085128987e-18, 1.7218091698469169e-19, 9.7813379414413797e-21, 4.9419868423536690e-22, 2.2129732702532020e-23, 8.7492009712123219e-25, 3.0414019572106789e-26, 9.2537661461189014e-28, 2.4520940513432773e-29, 5.6279071396682005e-31, 1.1120314056802636e-32, 1.8790207698162015e-34, 2.6949141835666013e-36, 3.2533264664077919e-38, 3.2749113002444449e-40, 2.7198617815524323e-42, 1.8412712525866596e-44, 1.0020507711410342e-46, 4.3141707603430363e-49, 1.4421371050636836e-51, 3.6611032011247543e-54, 6.8742017262988808e-57, 9.2443848020406234e-60, 8.5567547361303599e-63, 5.1840723409278860e-66, 1.9255984704437394e-69, 4.0157584892449402e-73, 4.1535499466158161e-77, 1.7690369566797577e-81, 2.2852295477811565e-86, 4.9990906972957580e-92, 4.1905794490628812e-99}, + {6.0350604964534262e-100, 7.5621198682031623e-93, 3.6027698446759891e-87, 2.8952659291406100e-82, 7.0395367135995835e-78, 7.0359072121742493e-74, 3.4833403833073089e-70, 9.6729013253984892e-67, 1.6455761362144333e-63, 1.8312341591411876e-60, 1.4019398864694188e-57, 7.6839475145600301e-55, 3.1138694261390015e-52, 9.5806542034971832e-50, 2.2882121049633724e-47, 4.3227155768633621e-45, 6.5638401618338331e-43, 8.1233879274180601e-41, 8.2938969771250512e-39, 7.0606944403026099e-37, 5.0593403396440372e-35, 3.0770662798790086e-33, 1.6004068212453668e-31, 7.1663558200524862e-30, 2.7795790004930404e-28, 9.3899224442264679e-27, 2.7766379169834465e-25, 7.2198960174204195e-24, 1.6577060031062816e-22, 3.3737146353721905e-21, 6.1074208727145320e-20, 9.8664119741289685e-19, 1.4266141735959953e-17, 1.8513770722928222e-16, 2.1618705462400890e-15, 2.2768505441090677e-14, 2.1674951892336612e-13, 1.8688765345567561e-12, 1.4622404081860438e-11, 1.0399946851470756e-10, 6.7347582712586187e-10, 3.9769084715886220e-09, 2.1444167115213114e-08, 1.0572466663977363e-07, 4.7716606625319424e-07, 1.9736622489528508e-06, 7.4891717673847164e-06, 2.6095502713919602e-05, 8.3569751965942746e-05, 2.4616923611611111e-04, 6.6748228082773623e-04, 1.6670890148279265e-03, 3.8375633568742918e-03, 8.1464528080591927e-03, 1.5955521786480558e-02, 2.8845081137418717e-02, 4.8152169340609514e-02, 7.4248071076568556e-02, 1.0577887521500248e-01, 1.3926890249425167e-01, 1.6948290095730328e-01, 1.9066351798670744e-01, 1.9829478313399460e-01, 1.9066351798670744e-01, 1.6948290095730328e-01, 1.3926890249425167e-01, 1.0577887521500248e-01, 7.4248071076568556e-02, 4.8152169340609514e-02, 2.8845081137418717e-02, 1.5955521786480558e-02, 8.1464528080591927e-03, 3.8375633568742918e-03, 1.6670890148279265e-03, 6.6748228082773623e-04, 2.4616923611611111e-04, 8.3569751965942746e-05, 2.6095502713919602e-05, 7.4891717673847164e-06, 1.9736622489528508e-06, 4.7716606625319424e-07, 1.0572466663977363e-07, 2.1444167115213114e-08, 3.9769084715886220e-09, 6.7347582712586187e-10, 1.0399946851470756e-10, 1.4622404081860438e-11, 1.8688765345567561e-12, 2.1674951892336612e-13, 2.2768505441090677e-14, 2.1618705462400890e-15, 1.8513770722928222e-16, 1.4266141735959953e-17, 9.8664119741289685e-19, 6.1074208727145320e-20, 3.3737146353721905e-21, 1.6577060031062816e-22, 7.2198960174204195e-24, 2.7766379169834465e-25, 9.3899224442264679e-27, 2.7795790004930404e-28, 7.1663558200524862e-30, 1.6004068212453668e-31, 3.0770662798790086e-33, 5.0593403396440372e-35, 7.0606944403026099e-37, 8.2938969771250512e-39, 8.1233879274180601e-41, 6.5638401618338331e-43, 4.3227155768633621e-45, 2.2882121049633724e-47, 9.5806542034971832e-50, 3.1138694261390015e-52, 7.6839475145600301e-55, 1.4019398864694188e-57, 1.8312341591411876e-60, 1.6455761362144333e-63, 9.6729013253984892e-67, 3.4833403833073089e-70, 7.0359072121742493e-74, 7.0395367135995835e-78, 2.8952659291406100e-82, 3.6027698446759891e-87, 7.5621198682031623e-93, 6.0350604964534262e-100}, + {8.6885048841104336e-101, 1.1432282229672125e-93, 5.6751482819801390e-88, 4.7334609039691596e-83, 1.1915620124280458e-78, 1.2309273526337388e-74, 6.2907076772692688e-71, 1.8014846076367508e-67, 3.1581319803836728e-64, 3.6193344722347900e-61, 2.8521414298372380e-58, 1.6084468211903416e-55, 6.7043872367938317e-53, 2.1211541634033625e-50, 5.2082971293259102e-48, 1.0113496653683935e-45, 1.5782902465389378e-43, 2.0072591434893221e-41, 2.1058526230258254e-39, 1.8420302384806772e-37, 1.3561579038335388e-35, 8.4745466577882083e-34, 4.5287460314884707e-32, 2.0836578377129600e-30, 8.3043823092881025e-29, 2.8828182366037129e-27, 8.7606149843747518e-26, 2.3412386562212859e-24, 5.5254399484537480e-23, 1.1560142775887639e-21, 2.1516151218938123e-20, 3.5742061956149856e-19, 5.3150514071326065e-18, 7.0949360015714532e-17, 8.5234045323955501e-16, 9.2369780981720531e-15, 9.0500846874230352e-14, 8.0327883964150772e-13, 6.4713186612400961e-12, 4.7401870432354588e-11, 3.1621598261836951e-10, 1.9240529197354846e-09, 1.0693195796935537e-08, 5.4353072977389075e-08, 2.5298490233788676e-07, 1.0794646043238905e-06, 4.2268677400739964e-06, 1.5203524203072556e-05, 5.0277271260002348e-05, 1.5298771765756425e-04, 4.2867197747952282e-04, 1.1068159448702048e-03, 2.6349882348356921e-03, 5.7873557283189625e-03, 1.1732759367031630e-02, 2.1965177289949588e-02, 3.7988873885061170e-02, 6.0717658137057637e-02, 8.9709385493087268e-02, 1.2255485603934885e-01, 1.5483776980551858e-01, 1.8094215487742904e-01, 1.9559636350108756e-01, 1.9559636350108756e-01, 1.8094215487742904e-01, 1.5483776980551858e-01, 1.2255485603934885e-01, 8.9709385493087268e-02, 6.0717658137057637e-02, 3.7988873885061170e-02, 2.1965177289949588e-02, 1.1732759367031630e-02, 5.7873557283189625e-03, 2.6349882348356921e-03, 1.1068159448702048e-03, 4.2867197747952282e-04, 1.5298771765756425e-04, 5.0277271260002348e-05, 1.5203524203072556e-05, 4.2268677400739964e-06, 1.0794646043238905e-06, 2.5298490233788676e-07, 5.4353072977389075e-08, 1.0693195796935537e-08, 1.9240529197354846e-09, 3.1621598261836951e-10, 4.7401870432354588e-11, 6.4713186612400961e-12, 8.0327883964150772e-13, 9.0500846874230352e-14, 9.2369780981720531e-15, 8.5234045323955501e-16, 7.0949360015714532e-17, 5.3150514071326065e-18, 3.5742061956149856e-19, 2.1516151218938123e-20, 1.1560142775887639e-21, 5.5254399484537480e-23, 2.3412386562212859e-24, 8.7606149843747518e-26, 2.8828182366037129e-27, 8.3043823092881025e-29, 2.0836578377129600e-30, 4.5287460314884707e-32, 8.4745466577882083e-34, 1.3561579038335388e-35, 1.8420302384806772e-37, 2.1058526230258254e-39, 2.0072591434893221e-41, 1.5782902465389378e-43, 1.0113496653683935e-45, 5.2082971293259102e-48, 2.1211541634033625e-50, 6.7043872367938317e-53, 1.6084468211903416e-55, 2.8521414298372380e-58, 3.6193344722347900e-61, 3.1581319803836728e-64, 1.8014846076367508e-67, 6.2907076772692688e-71, 1.2309273526337388e-74, 1.1915620124280458e-78, 4.7334609039691596e-83, 5.6751482819801390e-88, 1.1432282229672125e-93, 8.6885048841104336e-101}, + {1.2504497509175530e-101, 1.7272798059471284e-94, 8.9321681572258524e-89, 7.7306185241136030e-84, 2.0143957652711162e-79, 2.1503714733677984e-75, 1.1341924208630308e-71, 3.3489139011900278e-68, 6.0486548964206330e-65, 7.1375092946576251e-62, 5.7884563375067011e-59, 3.3581166223962448e-56, 1.4394641949298623e-53, 4.6821808383361703e-51, 1.1817054440721172e-48, 2.3581659156082481e-46, 3.7814427940915093e-44, 4.9411031115925398e-42, 5.3255303775590573e-40, 4.7854390680281889e-38, 3.6191883446065631e-36, 2.3232083386416384e-34, 1.2753331411048401e-32, 6.0277753850946330e-31, 2.4679773241853991e-29, 8.8019567691972675e-28, 2.7482489212126092e-26, 7.5468218903320873e-25, 1.8303134636337642e-23, 3.9355990860983814e-22, 7.5293161638815711e-21, 1.2857997786762843e-19, 1.9659326888507207e-18, 2.6986511907298130e-17, 3.3344414303302576e-16, 3.7173303125266623e-15, 3.7473954472956373e-14, 3.4230094493503872e-13, 2.8385303725081926e-12, 2.1406920290521382e-11, 1.4706331273477511e-10, 9.2173940967721533e-10, 5.2781663937137130e-09, 2.7650497045037171e-08, 1.3267855842580783e-07, 5.8380944276295039e-07, 2.3581561724849069e-06, 8.7524468034552877e-06, 2.9876790536001972e-05, 9.3874435720364848e-05, 2.7170762628015821e-04, 7.2493929742724068e-04, 1.7841208326818967e-03, 4.0524855186172149e-03, 8.5000263041808636e-03, 1.6471142241660967e-02, 2.9499296248305443e-02, 4.8847387114452009e-02, 7.4807989768816377e-02, 1.0598520508123921e-01, 1.3893945309094766e-01, 1.6856236074260383e-01, 1.8927849580179334e-01, 1.9673340688884505e-01, 1.8927849580179334e-01, 1.6856236074260383e-01, 1.3893945309094766e-01, 1.0598520508123921e-01, 7.4807989768816377e-02, 4.8847387114452009e-02, 2.9499296248305443e-02, 1.6471142241660967e-02, 8.5000263041808636e-03, 4.0524855186172149e-03, 1.7841208326818967e-03, 7.2493929742724068e-04, 2.7170762628015821e-04, 9.3874435720364848e-05, 2.9876790536001972e-05, 8.7524468034552877e-06, 2.3581561724849069e-06, 5.8380944276295039e-07, 1.3267855842580783e-07, 2.7650497045037171e-08, 5.2781663937137130e-09, 9.2173940967721533e-10, 1.4706331273477511e-10, 2.1406920290521382e-11, 2.8385303725081926e-12, 3.4230094493503872e-13, 3.7473954472956373e-14, 3.7173303125266623e-15, 3.3344414303302576e-16, 2.6986511907298130e-17, 1.9659326888507207e-18, 1.2857997786762843e-19, 7.5293161638815711e-21, 3.9355990860983814e-22, 1.8303134636337642e-23, 7.5468218903320873e-25, 2.7482489212126092e-26, 8.8019567691972675e-28, 2.4679773241853991e-29, 6.0277753850946330e-31, 1.2753331411048401e-32, 2.3232083386416384e-34, 3.6191883446065631e-36, 4.7854390680281889e-38, 5.3255303775590573e-40, 4.9411031115925398e-42, 3.7814427940915093e-44, 2.3581659156082481e-46, 1.1817054440721172e-48, 4.6821808383361703e-51, 1.4394641949298623e-53, 3.3581166223962448e-56, 5.7884563375067011e-59, 7.1375092946576251e-62, 6.0486548964206330e-65, 3.3489139011900278e-68, 1.1341924208630308e-71, 2.1503714733677984e-75, 2.0143957652711162e-79, 7.7306185241136030e-84, 8.9321681572258524e-89, 1.7272798059471284e-94, 1.2504497509175530e-101}, + {1.7990659801093241e-102, 2.6081724024084728e-95, 1.4046897713151180e-89, 1.2612494833385409e-84, 3.4012300869368197e-80, 3.7512158688048432e-76, 2.0415857972441075e-72, 6.2142441618306433e-69, 1.1561551640964042e-65, 1.4044672577405165e-62, 1.1719785012130235e-59, 6.9930729240520693e-57, 3.0820773833392742e-54, 1.0304862520557101e-51, 2.6727437517360924e-49, 5.4802170289787664e-47, 9.0280401387865924e-45, 1.2117795341306277e-42, 1.3414974817643660e-40, 1.2380855579763670e-38, 9.6167080679676503e-37, 6.3399135263665769e-35, 3.5743788958794533e-33, 1.7351030202821157e-31, 7.2965450067684075e-30, 2.6729236200580738e-28, 8.5728304837693986e-27, 2.4184034596476935e-25, 6.0259840320064904e-24, 1.3313678590335932e-22, 2.6174575839348305e-21, 4.5940076773297253e-20, 7.2201073169282769e-19, 1.0189332304232922e-17, 1.2945481593393717e-16, 1.4842238375138792e-15, 1.5390497303535517e-14, 1.4463473211904274e-13, 1.2342144866005691e-12, 9.5803165087358731e-12, 6.7757804877745394e-11, 4.3731866598484108e-10, 2.5793972294264184e-09, 1.3921907152935201e-08, 6.8845811221543303e-08, 3.1228729861789030e-07, 1.3007470032382014e-06, 4.9799245325909927e-06, 1.7540485848093888e-05, 5.6887437600402582e-05, 1.7001408826280900e-04, 4.6855153780841227e-04, 1.1915638144571682e-03, 2.7978394016057863e-03, 6.0688624069258814e-03, 1.2166918864469335e-02, 2.2554310167824408e-02, 3.8673954810636818e-02, 6.1360721004490068e-02, 9.0108678376448870e-02, 1.2250327316413555e-01, 1.5421043529835440e-01, 1.7977308390779931e-01, 1.9409761186408808e-01, 1.9409761186408808e-01, 1.7977308390779931e-01, 1.5421043529835440e-01, 1.2250327316413555e-01, 9.0108678376448870e-02, 6.1360721004490068e-02, 3.8673954810636818e-02, 2.2554310167824408e-02, 1.2166918864469335e-02, 6.0688624069258814e-03, 2.7978394016057863e-03, 1.1915638144571682e-03, 4.6855153780841227e-04, 1.7001408826280900e-04, 5.6887437600402582e-05, 1.7540485848093888e-05, 4.9799245325909927e-06, 1.3007470032382014e-06, 3.1228729861789030e-07, 6.8845811221543303e-08, 1.3921907152935201e-08, 2.5793972294264184e-09, 4.3731866598484108e-10, 6.7757804877745394e-11, 9.5803165087358731e-12, 1.2342144866005691e-12, 1.4463473211904274e-13, 1.5390497303535517e-14, 1.4842238375138792e-15, 1.2945481593393717e-16, 1.0189332304232922e-17, 7.2201073169282769e-19, 4.5940076773297253e-20, 2.6174575839348305e-21, 1.3313678590335932e-22, 6.0259840320064904e-24, 2.4184034596476935e-25, 8.5728304837693986e-27, 2.6729236200580738e-28, 7.2965450067684075e-30, 1.7351030202821157e-31, 3.5743788958794533e-33, 6.3399135263665769e-35, 9.6167080679676503e-37, 1.2380855579763670e-38, 1.3414974817643660e-40, 1.2117795341306277e-42, 9.0280401387865924e-45, 5.4802170289787664e-47, 2.6727437517360924e-49, 1.0304862520557101e-51, 3.0820773833392742e-54, 6.9930729240520693e-57, 1.1719785012130235e-59, 1.4044672577405165e-62, 1.1561551640964042e-65, 6.2142441618306433e-69, 2.0415857972441075e-72, 3.7512158688048432e-76, 3.4012300869368197e-80, 1.2612494833385409e-84, 1.4046897713151180e-89, 2.6081724024084728e-95, 1.7990659801093241e-102}, + {2.5875539364524082e-103, 3.9360184590802221e-96, 2.2072552957748285e-90, 2.0556308729779607e-85, 5.7358476340730861e-81, 6.5345649901409931e-77, 3.6690360645455800e-73, 1.1510510197511481e-69, 2.2055377414515242e-66, 2.7576366331120112e-63, 2.3673174707161052e-60, 1.4525786040323095e-57, 6.5811912152939833e-55, 2.2613773295130902e-52, 6.0264301132977530e-50, 1.2693840763809051e-47, 2.1479177879979361e-45, 2.9609218353788539e-43, 3.3661609053282785e-41, 3.1901478352848241e-39, 2.5443979671277979e-37, 1.7223946532210455e-35, 9.9710553873521448e-34, 4.9700994335207590e-32, 2.1462063060723877e-30, 8.0737792155578718e-29, 2.6593692402816157e-27, 7.7051505318327598e-26, 1.9720496638158951e-24, 4.4757971347544071e-23, 9.0403370335874384e-22, 1.6303640703529370e-20, 2.6332049182645191e-19, 3.8194419802784429e-18, 4.9883327330781313e-17, 5.8802277275507751e-16, 6.2702394771473180e-15, 6.0607257135908391e-14, 5.3204907075388442e-13, 4.2495506587750072e-12, 3.0933020393247358e-11, 2.0552435298786022e-10, 1.2482251818681576e-09, 6.9389671445373625e-09, 3.5351823460523453e-08, 1.6525270457753961e-07, 7.0953503060139187e-07, 2.8010603367707380e-06, 1.0176471541446871e-05, 3.4054184172402124e-05, 1.0504748699764762e-04, 2.9892250594151955e-04, 7.8519722061027154e-04, 1.9050667392793649e-03, 4.2716207417923171e-03, 8.8560992639436561e-03, 1.6984511709158091e-02, 3.0143653484891619e-02, 4.9524590136894565e-02, 7.5345450641660414e-02, 1.0617266978963312e-01, 1.3860414698078849e-01, 1.6765473214361890e-01, 1.8792309546385810e-01, 1.9520834171916421e-01, 1.8792309546385810e-01, 1.6765473214361890e-01, 1.3860414698078849e-01, 1.0617266978963312e-01, 7.5345450641660414e-02, 4.9524590136894565e-02, 3.0143653484891619e-02, 1.6984511709158091e-02, 8.8560992639436561e-03, 4.2716207417923171e-03, 1.9050667392793649e-03, 7.8519722061027154e-04, 2.9892250594151955e-04, 1.0504748699764762e-04, 3.4054184172402124e-05, 1.0176471541446871e-05, 2.8010603367707380e-06, 7.0953503060139187e-07, 1.6525270457753961e-07, 3.5351823460523453e-08, 6.9389671445373625e-09, 1.2482251818681576e-09, 2.0552435298786022e-10, 3.0933020393247358e-11, 4.2495506587750072e-12, 5.3204907075388442e-13, 6.0607257135908391e-14, 6.2702394771473180e-15, 5.8802277275507751e-16, 4.9883327330781313e-17, 3.8194419802784429e-18, 2.6332049182645191e-19, 1.6303640703529370e-20, 9.0403370335874384e-22, 4.4757971347544071e-23, 1.9720496638158951e-24, 7.7051505318327598e-26, 2.6593692402816157e-27, 8.0737792155578718e-29, 2.1462063060723877e-30, 4.9700994335207590e-32, 9.9710553873521448e-34, 1.7223946532210455e-35, 2.5443979671277979e-37, 3.1901478352848241e-39, 3.3661609053282785e-41, 2.9609218353788539e-43, 2.1479177879979361e-45, 1.2693840763809051e-47, 6.0264301132977530e-50, 2.2613773295130902e-52, 6.5811912152939833e-55, 1.4525786040323095e-57, 2.3673174707161052e-60, 2.7576366331120112e-63, 2.2055377414515242e-66, 1.1510510197511481e-69, 3.6690360645455800e-73, 6.5345649901409931e-77, 5.7358476340730861e-81, 2.0556308729779607e-85, 2.2072552957748285e-90, 3.9360184590802221e-96, 2.5875539364524082e-103}, + {3.7204460108518457e-104, 5.9364731594787663e-97, 3.4655965116643281e-91, 3.3469757581791224e-86, 9.6613261772636215e-82, 1.1367258528662723e-77, 6.5833865033387550e-74, 2.1283034461639501e-70, 4.1991937638649886e-67, 5.4030149717578019e-64, 4.7707625290237234e-61, 3.0097211186016156e-58, 1.4015247946309535e-55, 4.9483338463613849e-53, 1.3546817753059001e-50, 2.9307665929739616e-48, 5.0927511908300209e-46, 7.2087130229885203e-44, 8.4144007328158726e-42, 8.1870801342500945e-40, 6.7037119216509246e-38, 4.6587143166611642e-36, 2.7687058280430152e-34, 1.4168038025321554e-32, 6.2811433857681812e-31, 2.4259782593432643e-29, 8.2046076950332880e-28, 2.4409654886964413e-26, 6.4156025663739209e-25, 1.4954510277528916e-23, 3.1025363444064255e-22, 5.7477855676921120e-21, 9.5376732466503112e-20, 1.4215508257408722e-18, 1.9080516525917262e-17, 2.3119223992445814e-16, 2.5344536275316043e-15, 2.5189945152766989e-14, 2.2742721378405763e-13, 1.8685821935956322e-12, 1.3994739517759176e-11, 9.5692629429270517e-11, 5.9825297159286590e-10, 3.4243028086561824e-09, 1.7967512803433057e-08, 8.6525026250843954e-08, 3.8283102346903316e-07, 1.5578439536677208e-06, 5.8357960655860304e-06, 2.0142490747188856e-05, 6.4107957353545819e-05, 1.8828587640798520e-04, 5.1065276380199355e-04, 1.2796927737216395e-03, 2.9648560622765260e-03, 6.3539216222626869e-03, 1.2601446134450314e-02, 2.3137547830647125e-02, 3.9345000709058629e-02, 6.1983091960651798e-02, 9.0486600174251544e-02, 1.2243869169055180e-01, 1.5358645412541436e-01, 1.7862576228606980e-01, 1.9263278589714239e-01, 1.9263278589714239e-01, 1.7862576228606980e-01, 1.5358645412541436e-01, 1.2243869169055180e-01, 9.0486600174251544e-02, 6.1983091960651798e-02, 3.9345000709058629e-02, 2.3137547830647125e-02, 1.2601446134450314e-02, 6.3539216222626869e-03, 2.9648560622765260e-03, 1.2796927737216395e-03, 5.1065276380199355e-04, 1.8828587640798520e-04, 6.4107957353545819e-05, 2.0142490747188856e-05, 5.8357960655860304e-06, 1.5578439536677208e-06, 3.8283102346903316e-07, 8.6525026250843954e-08, 1.7967512803433057e-08, 3.4243028086561824e-09, 5.9825297159286590e-10, 9.5692629429270517e-11, 1.3994739517759176e-11, 1.8685821935956322e-12, 2.2742721378405763e-13, 2.5189945152766989e-14, 2.5344536275316043e-15, 2.3119223992445814e-16, 1.9080516525917262e-17, 1.4215508257408722e-18, 9.5376732466503112e-20, 5.7477855676921120e-21, 3.1025363444064255e-22, 1.4954510277528916e-23, 6.4156025663739209e-25, 2.4409654886964413e-26, 8.2046076950332880e-28, 2.4259782593432643e-29, 6.2811433857681812e-31, 1.4168038025321554e-32, 2.7687058280430152e-34, 4.6587143166611642e-36, 6.7037119216509246e-38, 8.1870801342500945e-40, 8.4144007328158726e-42, 7.2087130229885203e-44, 5.0927511908300209e-46, 2.9307665929739616e-48, 1.3546817753059001e-50, 4.9483338463613849e-53, 1.4015247946309535e-55, 3.0097211186016156e-58, 4.7707625290237234e-61, 5.4030149717578019e-64, 4.1991937638649886e-67, 2.1283034461639501e-70, 6.5833865033387550e-74, 1.1367258528662723e-77, 9.6613261772636215e-82, 3.3469757581791224e-86, 3.4655965116643281e-91, 5.9364731594787663e-97, 3.7204460108518457e-104}, + {5.3476826111026365e-105, 8.9485710679509321e-98, 5.4370243471100807e-92, 5.4441378603550616e-87, 1.6254006780310087e-82, 1.9746831356131776e-78, 1.1794217215696164e-74, 3.9284007348254153e-71, 7.9796183204186807e-68, 1.0563843092209409e-64, 9.5924087114315112e-62, 6.2207574186530967e-59, 2.9767954168841160e-56, 1.0797369730934170e-53, 3.0360436451965334e-51, 6.7450137912280108e-49, 1.2034270735300903e-46, 1.7487964745761127e-44, 2.0954606929273772e-42, 2.0928182450743565e-40, 1.7589155981687000e-38, 1.2546216106083190e-36, 7.6531182757967849e-35, 4.0196809205772165e-33, 1.8291657781800354e-31, 7.2519108515928950e-30, 2.5176731485459177e-28, 7.6897052483356061e-27, 2.0750476694679341e-25, 4.9664572942742541e-24, 1.0580876836793333e-22, 2.0131999550178635e-21, 3.4313613065500532e-20, 5.2539364144662411e-19, 7.2456324501572317e-18, 9.0217967422636200e-17, 1.0165109952415262e-15, 1.0385841011593362e-14, 9.6411044899350535e-14, 8.1461816331285264e-13, 6.2756146921948126e-12, 4.4148505508362490e-11, 2.8403341656010478e-10, 1.6734368028761769e-09, 9.0404035956869064e-09, 4.4835197499650446e-08, 2.0435322677073055e-07, 8.5687946165889204e-07, 3.3086178947400949e-06, 1.1774562755908359e-05, 3.8651667203812415e-05, 1.1712338225370822e-04, 3.2784720468186990e-04, 8.4825701574449603e-04, 2.0298461578116241e-03, 4.4947646714684642e-03, 9.2143589576985860e-03, 1.7495331329372001e-02, 3.0778087957022211e-02, 5.0184153739443245e-02, 7.5861280569172768e-02, 1.0634224601517195e-01, 1.3826358695373833e-01, 1.6675980772407939e-01, 1.8659628095711822e-01, 1.9371820170604062e-01, 1.8659628095711822e-01, 1.6675980772407939e-01, 1.3826358695373833e-01, 1.0634224601517195e-01, 7.5861280569172768e-02, 5.0184153739443245e-02, 3.0778087957022211e-02, 1.7495331329372001e-02, 9.2143589576985860e-03, 4.4947646714684642e-03, 2.0298461578116241e-03, 8.4825701574449603e-04, 3.2784720468186990e-04, 1.1712338225370822e-04, 3.8651667203812415e-05, 1.1774562755908359e-05, 3.3086178947400949e-06, 8.5687946165889204e-07, 2.0435322677073055e-07, 4.4835197499650446e-08, 9.0404035956869064e-09, 1.6734368028761769e-09, 2.8403341656010478e-10, 4.4148505508362490e-11, 6.2756146921948126e-12, 8.1461816331285264e-13, 9.6411044899350535e-14, 1.0385841011593362e-14, 1.0165109952415262e-15, 9.0217967422636200e-17, 7.2456324501572317e-18, 5.2539364144662411e-19, 3.4313613065500532e-20, 2.0131999550178635e-21, 1.0580876836793333e-22, 4.9664572942742541e-24, 2.0750476694679341e-25, 7.6897052483356061e-27, 2.5176731485459177e-28, 7.2519108515928950e-30, 1.8291657781800354e-31, 4.0196809205772165e-33, 7.6531182757967849e-35, 1.2546216106083190e-36, 1.7589155981687000e-38, 2.0928182450743565e-40, 2.0954606929273772e-42, 1.7487964745761127e-44, 1.2034270735300903e-46, 6.7450137912280108e-49, 3.0360436451965334e-51, 1.0797369730934170e-53, 2.9767954168841160e-56, 6.2207574186530967e-59, 9.5924087114315112e-62, 1.0563843092209409e-64, 7.9796183204186807e-68, 3.9284007348254153e-71, 1.1794217215696164e-74, 1.9746831356131776e-78, 1.6254006780310087e-82, 5.4441378603550616e-87, 5.4370243471100807e-92, 8.9485710679509321e-98, 5.3476826111026365e-105}, + {7.6842766516367719e-106, 1.3481426814084330e-98, 8.5232824201455338e-93, 8.8466828631703380e-88, 2.7313386049487823e-83, 3.4257032455364114e-79, 2.1096988030040495e-75, 7.2385503439630423e-72, 1.5134726683009566e-68, 2.0611376551597914e-65, 1.9243761113547505e-62, 1.2826427266230406e-59, 6.3061655444515630e-57, 2.3494568244414183e-54, 6.7840814198640183e-52, 1.5474546466895863e-49, 2.8342684935250220e-47, 4.2276037377430170e-45, 5.1990976532787586e-43, 5.3289760352121873e-41, 4.5962221753185640e-39, 3.3643504079348503e-37, 2.1059885157863759e-35, 1.1351213650894055e-33, 5.3008778122412928e-32, 2.1567929048213211e-30, 7.6849291812936124e-29, 2.4091492702687982e-27, 6.6731359742375955e-26, 1.6395930158211436e-24, 3.5862705417926132e-23, 7.0063164858326786e-22, 1.2263229445163221e-20, 1.9284904334361852e-19, 2.7319104369658748e-18, 3.4946872798000184e-17, 4.0459872349113726e-16, 4.2484216574797647e-15, 4.0538431929767204e-14, 3.5215468617192942e-13, 2.7897431630242835e-12, 2.0185811443148509e-11, 1.3360415395259884e-10, 8.0999556201008525e-10, 4.5039223844610038e-09, 2.2996588427898263e-08, 1.0794018969942840e-07, 4.6622930297370394e-07, 1.8549414274389103e-06, 6.8039737747046242e-06, 2.3027980530130599e-05, 7.1968372723672767e-05, 2.0783849443716974e-04, 5.5499593798384811e-04, 1.3711669594184176e-03, 3.1358975655376973e-03, 6.6422701758844281e-03, 1.3036021883614270e-02, 2.3714690293508239e-02, 4.0002156173129341e-02, 6.2585382403901088e-02, 9.0844086374182967e-02, 1.2236194224535350e-01, 1.5296604436543901e-01, 1.7749954969646717e-01, 1.9120062497846860e-01, 1.9120062497846860e-01, 1.7749954969646717e-01, 1.5296604436543901e-01, 1.2236194224535350e-01, 9.0844086374182967e-02, 6.2585382403901088e-02, 4.0002156173129341e-02, 2.3714690293508239e-02, 1.3036021883614270e-02, 6.6422701758844281e-03, 3.1358975655376973e-03, 1.3711669594184176e-03, 5.5499593798384811e-04, 2.0783849443716974e-04, 7.1968372723672767e-05, 2.3027980530130599e-05, 6.8039737747046242e-06, 1.8549414274389103e-06, 4.6622930297370394e-07, 1.0794018969942840e-07, 2.2996588427898263e-08, 4.5039223844610038e-09, 8.0999556201008525e-10, 1.3360415395259884e-10, 2.0185811443148509e-11, 2.7897431630242835e-12, 3.5215468617192942e-13, 4.0538431929767204e-14, 4.2484216574797647e-15, 4.0459872349113726e-16, 3.4946872798000184e-17, 2.7319104369658748e-18, 1.9284904334361852e-19, 1.2263229445163221e-20, 7.0063164858326786e-22, 3.5862705417926132e-23, 1.6395930158211436e-24, 6.6731359742375955e-26, 2.4091492702687982e-27, 7.6849291812936124e-29, 2.1567929048213211e-30, 5.3008778122412928e-32, 1.1351213650894055e-33, 2.1059885157863759e-35, 3.3643504079348503e-37, 4.5962221753185640e-39, 5.3289760352121873e-41, 5.1990976532787586e-43, 4.2276037377430170e-45, 2.8342684935250220e-47, 1.5474546466895863e-49, 6.7840814198640183e-52, 2.3494568244414183e-54, 6.3061655444515630e-57, 1.2826427266230406e-59, 1.9243761113547505e-62, 2.0611376551597914e-65, 1.5134726683009566e-68, 7.2385503439630423e-72, 2.1096988030040495e-75, 3.4257032455364114e-79, 2.7313386049487823e-83, 8.8466828631703380e-88, 8.5232824201455338e-93, 1.3481426814084330e-98, 7.6842766516367719e-106}, + {1.1038467599988350e-106, 2.0299162819709644e-99, 1.3351165545454540e-93, 1.4361908901012819e-88, 4.5844662610976337e-84, 5.9349976525016213e-80, 3.7680124485137400e-76, 1.3315304198313815e-72, 2.8651973069480425e-69, 4.0133192434618745e-66, 3.8520167842143179e-63, 2.6383248122689808e-60, 1.3324956798179218e-57, 5.0982905111217610e-55, 1.5114881547668727e-52, 3.5392080595751209e-50, 6.6532981134843764e-48, 1.0184623032786348e-45, 1.2852633429954566e-43, 1.3517324859318610e-41, 1.1962163965744953e-39, 8.9837835745312106e-38, 5.7697747333366196e-36, 3.1907496278196311e-34, 1.5288163843577720e-32, 6.3824723629696159e-31, 2.3335393536670584e-29, 7.5069091034254989e-28, 2.1339408990452690e-26, 5.3812272933845995e-25, 1.2081587782243251e-23, 2.4230032186653206e-22, 4.3541619756512954e-21, 7.0308758150274706e-20, 1.0228495371481795e-18, 1.3439170617083339e-17, 1.5983699268748500e-16, 1.7244201781932836e-15, 1.6909213145415565e-14, 1.5097790665682497e-13, 1.2295731019067163e-12, 9.1482393782595353e-12, 6.2274132191241200e-11, 3.8838846452764459e-10, 2.2221554698762070e-09, 1.1677641217752186e-08, 5.6428156340858430e-08, 2.5098662449728381e-07, 1.0285882365095455e-06, 3.8874166532037593e-06, 1.3560417245137210e-05, 4.3693239040909913e-05, 1.3013545776424483e-04, 3.5851189054336881e-04, 9.1411456482270634e-04, 2.1583740072268734e-03, 4.7217142757868197e-03, 9.5745050499428906e-03, 1.8003325023065602e-02, 3.1402556502212124e-02, 5.0826454983412143e-02, 7.6356275486723038e-02, 1.0649485597288683e-01, 1.3791833004859055e-01, 1.6587737779249406e-01, 1.8529706602349078e-01, 1.9226167387366441e-01, 1.8529706602349078e-01, 1.6587737779249406e-01, 1.3791833004859055e-01, 1.0649485597288683e-01, 7.6356275486723038e-02, 5.0826454983412143e-02, 3.1402556502212124e-02, 1.8003325023065602e-02, 9.5745050499428906e-03, 4.7217142757868197e-03, 2.1583740072268734e-03, 9.1411456482270634e-04, 3.5851189054336881e-04, 1.3013545776424483e-04, 4.3693239040909913e-05, 1.3560417245137210e-05, 3.8874166532037593e-06, 1.0285882365095455e-06, 2.5098662449728381e-07, 5.6428156340858430e-08, 1.1677641217752186e-08, 2.2221554698762070e-09, 3.8838846452764459e-10, 6.2274132191241200e-11, 9.1482393782595353e-12, 1.2295731019067163e-12, 1.5097790665682497e-13, 1.6909213145415565e-14, 1.7244201781932836e-15, 1.5983699268748500e-16, 1.3439170617083339e-17, 1.0228495371481795e-18, 7.0308758150274706e-20, 4.3541619756512954e-21, 2.4230032186653206e-22, 1.2081587782243251e-23, 5.3812272933845995e-25, 2.1339408990452690e-26, 7.5069091034254989e-28, 2.3335393536670584e-29, 6.3824723629696159e-31, 1.5288163843577720e-32, 3.1907496278196311e-34, 5.7697747333366196e-36, 8.9837835745312106e-38, 1.1962163965744953e-39, 1.3517324859318610e-41, 1.2852633429954566e-43, 1.0184623032786348e-45, 6.6532981134843764e-48, 3.5392080595751209e-50, 1.5114881547668727e-52, 5.0982905111217610e-55, 1.3324956798179218e-57, 2.6383248122689808e-60, 3.8520167842143179e-63, 4.0133192434618745e-66, 2.8651973069480425e-69, 1.3315304198313815e-72, 3.7680124485137400e-76, 5.9349976525016213e-80, 4.5844662610976337e-84, 1.4361908901012819e-88, 1.3351165545454540e-93, 2.0299162819709644e-99, 1.1038467599988350e-106}, + {1.5852022154122762e-107, 3.0548054158892936e-100, 2.0897891620152027e-94, 2.3293228753706694e-89, 7.6861123200181953e-85, 1.0268748980780725e-80, 6.7197562830108496e-77, 2.4452549463378667e-73, 5.4141833815112683e-70, 7.7987351892856256e-67, 7.6937052841722558e-64, 5.4140880882527221e-61, 2.8084445413684342e-58, 1.1033315179441334e-55, 3.3578943438527196e-53, 8.0698691825802516e-51, 1.5567833435482675e-48, 2.4451923543925048e-46, 3.1658838545522830e-44, 3.4158346081733210e-42, 3.1009657171113824e-40, 2.3889887847651522e-38, 1.5738945493185839e-36, 8.9283867523289446e-35, 4.3884114191019410e-33, 1.8794344951812523e-31, 7.0495273821073143e-30, 2.3266903412110293e-28, 6.7861521642478146e-27, 1.7559922329924036e-25, 4.0458189399870260e-24, 8.3276681875539800e-23, 1.5360712580259943e-21, 2.5462929950511611e-20, 3.8033125448314512e-19, 5.1314188262140908e-18, 6.2679375130394878e-17, 6.9461573699177854e-16, 6.9976953274028973e-15, 6.4202984348233192e-14, 5.3739121421360399e-13, 4.1101386023109489e-12, 2.8767484176405797e-11, 1.8451534167067902e-10, 1.0859592046532814e-09, 5.8718148797495608e-09, 2.9201210196159125e-08, 1.3370799258052363e-07, 5.6424627677752911e-07, 2.1965028260725411e-06, 7.8943275081059988e-06, 2.6215639412143949e-05, 8.0497734201318759e-05, 2.2870525172954362e-04, 6.0159714197281571e-04, 1.4659454148941060e-03, 3.3108212147861167e-03, 6.9336514656727563e-03, 1.3470344826444706e-02, 2.4285561078700165e-02, 4.0645578943455961e-02, 6.3168189419948478e-02, 9.1182028088582376e-02, 1.2227380245007502e-01, 1.5234939813711931e-01, 1.7639383039152406e-01, 1.8979993309264909e-01, 1.8979993309264909e-01, 1.7639383039152406e-01, 1.5234939813711931e-01, 1.2227380245007502e-01, 9.1182028088582376e-02, 6.3168189419948478e-02, 4.0645578943455961e-02, 2.4285561078700165e-02, 1.3470344826444706e-02, 6.9336514656727563e-03, 3.3108212147861167e-03, 1.4659454148941060e-03, 6.0159714197281571e-04, 2.2870525172954362e-04, 8.0497734201318759e-05, 2.6215639412143949e-05, 7.8943275081059988e-06, 2.1965028260725411e-06, 5.6424627677752911e-07, 1.3370799258052363e-07, 2.9201210196159125e-08, 5.8718148797495608e-09, 1.0859592046532814e-09, 1.8451534167067902e-10, 2.8767484176405797e-11, 4.1101386023109489e-12, 5.3739121421360399e-13, 6.4202984348233192e-14, 6.9976953274028973e-15, 6.9461573699177854e-16, 6.2679375130394878e-17, 5.1314188262140908e-18, 3.8033125448314512e-19, 2.5462929950511611e-20, 1.5360712580259943e-21, 8.3276681875539800e-23, 4.0458189399870260e-24, 1.7559922329924036e-25, 6.7861521642478146e-27, 2.3266903412110293e-28, 7.0495273821073143e-30, 1.8794344951812523e-31, 4.3884114191019410e-33, 8.9283867523289446e-35, 1.5738945493185839e-36, 2.3889887847651522e-38, 3.1009657171113824e-40, 3.4158346081733210e-42, 3.1658838545522830e-44, 2.4451923543925048e-46, 1.5567833435482675e-48, 8.0698691825802516e-51, 3.3578943438527196e-53, 1.1033315179441334e-55, 2.8084445413684342e-58, 5.4140880882527221e-61, 7.6937052841722558e-64, 7.7987351892856256e-67, 5.4141833815112683e-70, 2.4452549463378667e-73, 6.7197562830108496e-77, 1.0268748980780725e-80, 7.6861123200181953e-85, 2.3293228753706694e-89, 2.0897891620152027e-94, 3.0548054158892936e-100, 1.5852022154122762e-107}, + {2.2757903447048717e-108, 4.5946784737513260e-101, 3.2685938252533535e-95, 3.7743169785683037e-90, 1.2871697383667851e-85, 1.7743864842451219e-81, 1.1966100169534770e-77, 4.4831207104684604e-74, 1.0212230702897007e-70, 1.5124472591580667e-67, 1.5333631698716670e-64, 1.1084367892237507e-61, 5.9044668946443965e-59, 2.3813770140907575e-56, 7.4386721348341212e-54, 1.8345006104189449e-51, 3.6310691969704462e-49, 5.8508598523019220e-47, 7.7706802795811407e-45, 8.5997500074472335e-43, 8.0073315846684445e-41, 6.3269186795493014e-39, 4.2749818807535224e-37, 2.4872103396508171e-35, 1.2538186619197742e-33, 5.5075144968159152e-32, 2.1188905321706446e-30, 7.1735092247885270e-29, 2.1463025081080465e-27, 5.6976909711813352e-26, 1.3468870756541617e-24, 2.8447276912580344e-23, 5.3848024996122255e-22, 9.1613793530553680e-21, 1.4046424779365027e-19, 1.9456017248888693e-18, 2.4401660157799310e-17, 2.7770723790106855e-16, 2.8735526657240152e-15, 2.7084363045358423e-14, 2.3293514464982674e-13, 1.8309133143528682e-12, 1.3172552368556761e-11, 8.6866283016645484e-11, 5.2575217351973056e-10, 2.9240918874514531e-09, 1.4961568756142573e-08, 7.0502109098437831e-08, 3.0626482923803324e-07, 1.2276189268114097e-06, 4.5443796787236290e-06, 1.5548071919772645e-05, 4.9202833580003181e-05, 1.4411581751672914e-04, 3.9094357332687408e-04, 9.8276091717415027e-04, 2.2905612451458858e-03, 4.9522684042317359e-03, 9.9362495542868867e-03, 1.8508238320820009e-02, 3.2017035703904927e-02, 5.1451871301553709e-02, 7.6831201307210703e-02, 1.0663137077642493e-01, 1.3756889121351917e-01, 1.6500723131965989e-01, 1.8402451115016699e-01, 1.9083751332645207e-01, 1.8402451115016699e-01, 1.6500723131965989e-01, 1.3756889121351917e-01, 1.0663137077642493e-01, 7.6831201307210703e-02, 5.1451871301553709e-02, 3.2017035703904927e-02, 1.8508238320820009e-02, 9.9362495542868867e-03, 4.9522684042317359e-03, 2.2905612451458858e-03, 9.8276091717415027e-04, 3.9094357332687408e-04, 1.4411581751672914e-04, 4.9202833580003181e-05, 1.5548071919772645e-05, 4.5443796787236290e-06, 1.2276189268114097e-06, 3.0626482923803324e-07, 7.0502109098437831e-08, 1.4961568756142573e-08, 2.9240918874514531e-09, 5.2575217351973056e-10, 8.6866283016645484e-11, 1.3172552368556761e-11, 1.8309133143528682e-12, 2.3293514464982674e-13, 2.7084363045358423e-14, 2.8735526657240152e-15, 2.7770723790106855e-16, 2.4401660157799310e-17, 1.9456017248888693e-18, 1.4046424779365027e-19, 9.1613793530553680e-21, 5.3848024996122255e-22, 2.8447276912580344e-23, 1.3468870756541617e-24, 5.6976909711813352e-26, 2.1463025081080465e-27, 7.1735092247885270e-29, 2.1188905321706446e-30, 5.5075144968159152e-32, 1.2538186619197742e-33, 2.4872103396508171e-35, 4.2749818807535224e-37, 6.3269186795493014e-39, 8.0073315846684445e-41, 8.5997500074472335e-43, 7.7706802795811407e-45, 5.8508598523019220e-47, 3.6310691969704462e-49, 1.8345006104189449e-51, 7.4386721348341212e-54, 2.3813770140907575e-56, 5.9044668946443965e-59, 1.1084367892237507e-61, 1.5333631698716670e-64, 1.5124472591580667e-67, 1.0212230702897007e-70, 4.4831207104684604e-74, 1.1966100169534770e-77, 1.7743864842451219e-81, 1.2871697383667851e-85, 3.7743169785683037e-90, 3.2685938252533535e-95, 4.5946784737513260e-101, 2.2757903447048717e-108}, + {3.2662778348790219e-109, 6.9071006009219448e-102, 5.1085629363787234e-96, 6.1100313027084669e-91, 2.1531900867437274e-86, 3.0621042209000059e-82, 2.1277376772398947e-78, 8.2059605065764682e-75, 1.9227730563170089e-71, 2.9274096096614171e-68, 3.0495045268798647e-65, 2.2641169845080718e-62, 1.2382980686348502e-59, 5.1263348642622049e-57, 1.6432629457353511e-54, 4.1579427709675079e-52, 8.4425869514003785e-50, 1.3953581486228228e-47, 1.9006681419175218e-45, 2.1571532321739302e-43, 2.0597137897723631e-41, 1.6688589526897371e-39, 1.1562775861404743e-37, 6.8982672254135044e-36, 3.5658879405329104e-34, 1.6062235506206207e-32, 6.3371488059404152e-31, 2.2002659780562692e-29, 6.7518197536098842e-28, 1.8384352926742758e-26, 4.4579796693281321e-25, 9.6593405936315995e-24, 1.8759587883105718e-22, 3.2750072247643929e-21, 5.1531219168800766e-20, 7.3260730719021186e-19, 9.4321973070076218e-18, 1.1021086847913827e-16, 1.1710396791414340e-15, 1.1336050130083279e-14, 1.0014937399406310e-13, 8.0878754293572482e-13, 5.9796723109076469e-12, 4.0531324359847441e-11, 2.5220174708373236e-10, 1.4423973477091834e-09, 7.5910433713044797e-09, 3.6801425167297173e-08, 1.6451664884983423e-07, 6.7880160807256645e-07, 2.5872670138074200e-06, 9.1170822839813128e-06, 2.9724333423383228e-05, 8.9724509010453385e-05, 2.5091729952798149e-04, 6.5046837318858926e-04, 1.5639824929501397e-03, 3.4894828544320858e-03, 7.2278157943492030e-03, 1.3904131095041473e-02, 2.4850005571417737e-02, 4.1275438102408031e-02, 6.3732095470061270e-02, 9.1501274267677846e-02, 1.2217500065832824e-01, 1.5173668408251911e-01, 1.7530801211052990e-01, 1.8842957463583779e-01, 1.8842957463583779e-01, 1.7530801211052990e-01, 1.5173668408251911e-01, 1.2217500065832824e-01, 9.1501274267677846e-02, 6.3732095470061270e-02, 4.1275438102408031e-02, 2.4850005571417737e-02, 1.3904131095041473e-02, 7.2278157943492030e-03, 3.4894828544320858e-03, 1.5639824929501397e-03, 6.5046837318858926e-04, 2.5091729952798149e-04, 8.9724509010453385e-05, 2.9724333423383228e-05, 9.1170822839813128e-06, 2.5872670138074200e-06, 6.7880160807256645e-07, 1.6451664884983423e-07, 3.6801425167297173e-08, 7.5910433713044797e-09, 1.4423973477091834e-09, 2.5220174708373236e-10, 4.0531324359847441e-11, 5.9796723109076469e-12, 8.0878754293572482e-13, 1.0014937399406310e-13, 1.1336050130083279e-14, 1.1710396791414340e-15, 1.1021086847913827e-16, 9.4321973070076218e-18, 7.3260730719021186e-19, 5.1531219168800766e-20, 3.2750072247643929e-21, 1.8759587883105718e-22, 9.6593405936315995e-24, 4.4579796693281321e-25, 1.8384352926742758e-26, 6.7518197536098842e-28, 2.2002659780562692e-29, 6.3371488059404152e-31, 1.6062235506206207e-32, 3.5658879405329104e-34, 6.8982672254135044e-36, 1.1562775861404743e-37, 1.6688589526897371e-39, 2.0597137897723631e-41, 2.1571532321739302e-43, 1.9006681419175218e-45, 1.3953581486228228e-47, 8.4425869514003785e-50, 4.1579427709675079e-52, 1.6432629457353511e-54, 5.1263348642622049e-57, 1.2382980686348502e-59, 2.2641169845080718e-62, 3.0495045268798647e-65, 2.9274096096614171e-68, 1.9227730563170089e-71, 8.2059605065764682e-75, 2.1277376772398947e-78, 3.0621042209000059e-82, 2.1531900867437274e-86, 6.1100313027084669e-91, 5.1085629363787234e-96, 6.9071006009219448e-102, 3.2662778348790219e-109}, + {4.6865023080529104e-110, 1.0377874004830612e-102, 7.9784749096307812e-97, 9.8821174227967692e-92, 3.5979287498244655e-87, 5.2776428357398472e-83, 3.7779666227111211e-79, 1.4996171154683470e-75, 3.6138115204304974e-72, 5.6551650475163530e-69, 6.0520249291763858e-66, 4.6142654439156290e-63, 2.5906808193011388e-60, 1.1006724998865391e-57, 3.6200846949796910e-55, 9.3965049756134747e-53, 1.9569075431422824e-50, 3.3168917267338703e-48, 4.6329566465533973e-46, 5.3914438385767737e-44, 5.2781068509974114e-42, 4.3845152623372787e-40, 3.1144817985274696e-38, 1.9049498787300406e-36, 1.0095682854224567e-34, 4.6623959142602483e-33, 1.8860299651335772e-31, 6.7143368715209606e-30, 2.1127537039024262e-28, 5.8994042231272372e-27, 1.4671212494779564e-25, 3.2605011152464131e-24, 6.4955255161731863e-23, 1.1633403477038459e-21, 1.8781142293296228e-20, 2.7399173517222677e-19, 3.6203893599581944e-18, 4.3421867882952047e-17, 4.7366003944880216e-16, 4.7080515903807368e-15, 4.2715858467937535e-14, 3.5433804833163948e-13, 2.6914605808295047e-12, 1.8746383733203776e-11, 1.1989016690827638e-10, 7.0489725875624514e-10, 3.8145955545353021e-09, 1.9020527069551235e-08, 8.7475726975743740e-08, 3.7140992001185702e-07, 1.4571432947497977e-06, 5.2867540339888927e-06, 1.7751862632560425e-05, 5.5204240789547246e-05, 1.5909525194144218e-04, 4.2516612395155680e-04, 1.0541825457525329e-03, 2.4263153829887293e-03, 5.1862282813377051e-03, 1.0299316731984193e-02, 1.9009837222668049e-02, 3.2621520059018146e-02, 5.2060779315091815e-02, 7.7286794858634558e-02, 1.0675261357345991e-01, 1.3721574664493244e-01, 1.6414915672894112e-01, 1.8277772077559612e-01, 1.8944453877662401e-01, 1.8277772077559612e-01, 1.6414915672894112e-01, 1.3721574664493244e-01, 1.0675261357345991e-01, 7.7286794858634558e-02, 5.2060779315091815e-02, 3.2621520059018146e-02, 1.9009837222668049e-02, 1.0299316731984193e-02, 5.1862282813377051e-03, 2.4263153829887293e-03, 1.0541825457525329e-03, 4.2516612395155680e-04, 1.5909525194144218e-04, 5.5204240789547246e-05, 1.7751862632560425e-05, 5.2867540339888927e-06, 1.4571432947497977e-06, 3.7140992001185702e-07, 8.7475726975743740e-08, 1.9020527069551235e-08, 3.8145955545353021e-09, 7.0489725875624514e-10, 1.1989016690827638e-10, 1.8746383733203776e-11, 2.6914605808295047e-12, 3.5433804833163948e-13, 4.2715858467937535e-14, 4.7080515903807368e-15, 4.7366003944880216e-16, 4.3421867882952047e-17, 3.6203893599581944e-18, 2.7399173517222677e-19, 1.8781142293296228e-20, 1.1633403477038459e-21, 6.4955255161731863e-23, 3.2605011152464131e-24, 1.4671212494779564e-25, 5.8994042231272372e-27, 2.1127537039024262e-28, 6.7143368715209606e-30, 1.8860299651335772e-31, 4.6623959142602483e-33, 1.0095682854224567e-34, 1.9049498787300406e-36, 3.1144817985274696e-38, 4.3845152623372787e-40, 5.2781068509974114e-42, 5.3914438385767737e-44, 4.6329566465533973e-46, 3.3168917267338703e-48, 1.9569075431422824e-50, 9.3965049756134747e-53, 3.6200846949796910e-55, 1.1006724998865391e-57, 2.5906808193011388e-60, 4.6142654439156290e-63, 6.0520249291763858e-66, 5.6551650475163530e-69, 3.6138115204304974e-72, 1.4996171154683470e-75, 3.7779666227111211e-79, 5.2776428357398472e-83, 3.5979287498244655e-87, 9.8821174227967692e-92, 7.9784749096307812e-97, 1.0377874004830612e-102, 4.6865023080529104e-110}, + {6.7223469064836171e-111, 1.5584604318709814e-103, 1.2451690475064742e-97, 1.5968463027613036e-92, 6.0055451530824495e-88, 9.0848000543271587e-84, 6.6985500695669678e-80, 2.7361658102621640e-76, 6.7802095889475983e-73, 1.0903770909317098e-69, 1.1985921850382964e-66, 9.3828601952142795e-64, 5.4070585225542087e-61, 2.3572025385780174e-58, 7.9532902157128368e-56, 2.1173804777066541e-53, 4.5220720973223149e-51, 7.8591635665593126e-49, 1.1254750686853198e-46, 1.3427041430602204e-44, 1.3474889050314146e-42, 1.1474212047422846e-40, 8.3547107388260308e-39, 5.2380609435106477e-37, 2.8455586990614318e-35, 1.3470860626931207e-33, 5.5860425698193572e-32, 2.0386781124262974e-30, 6.5767258559351848e-29, 1.8828451569167714e-27, 4.8012341582972026e-26, 1.0941854437188646e-24, 2.2355486477720061e-23, 4.1066490153458769e-22, 6.8008865930090286e-21, 1.0178867789337611e-19, 1.3800519281469149e-18, 1.6985968059071478e-17, 1.9017680123139647e-16, 1.9404932122013048e-15, 1.8076531130346059e-14, 1.5398466784483034e-13, 1.2013344715990716e-12, 8.5959906732140100e-12, 5.6487797699597178e-11, 3.4133713929443775e-10, 1.8988505331522261e-09, 9.7353434088062900e-09, 4.6047775382709491e-08, 2.0113005931537178e-07, 8.1197441052941798e-07, 3.0322448415500775e-06, 1.0482793298607957e-05, 3.3573050144199230e-05, 9.9676496285740638e-05, 2.7450358985306201e-04, 7.0161767403489428e-04, 1.6652282490644866e-03, 3.6717374087279781e-03, 7.5245206122160650e-03, 1.4337113630217399e-02, 2.5407889466847185e-02, 4.1891912442268933e-02, 6.4277668190634463e-02, 9.1802633810630571e-02, 1.2206621940114575e-01, 1.5112804960523191e-01, 1.7424152504403007e-01, 1.8708847054904945e-01, 1.8708847054904945e-01, 1.7424152504403007e-01, 1.5112804960523191e-01, 1.2206621940114575e-01, 9.1802633810630571e-02, 6.4277668190634463e-02, 4.1891912442268933e-02, 2.5407889466847185e-02, 1.4337113630217399e-02, 7.5245206122160650e-03, 3.6717374087279781e-03, 1.6652282490644866e-03, 7.0161767403489428e-04, 2.7450358985306201e-04, 9.9676496285740638e-05, 3.3573050144199230e-05, 1.0482793298607957e-05, 3.0322448415500775e-06, 8.1197441052941798e-07, 2.0113005931537178e-07, 4.6047775382709491e-08, 9.7353434088062900e-09, 1.8988505331522261e-09, 3.4133713929443775e-10, 5.6487797699597178e-11, 8.5959906732140100e-12, 1.2013344715990716e-12, 1.5398466784483034e-13, 1.8076531130346059e-14, 1.9404932122013048e-15, 1.9017680123139647e-16, 1.6985968059071478e-17, 1.3800519281469149e-18, 1.0178867789337611e-19, 6.8008865930090286e-21, 4.1066490153458769e-22, 2.2355486477720061e-23, 1.0941854437188646e-24, 4.8012341582972026e-26, 1.8828451569167714e-27, 6.5767258559351848e-29, 2.0386781124262974e-30, 5.5860425698193572e-32, 1.3470860626931207e-33, 2.8455586990614318e-35, 5.2380609435106477e-37, 8.3547107388260308e-39, 1.1474212047422846e-40, 1.3474889050314146e-42, 1.3427041430602204e-44, 1.1254750686853198e-46, 7.8591635665593126e-49, 4.5220720973223149e-51, 2.1173804777066541e-53, 7.9532902157128368e-56, 2.3572025385780174e-58, 5.4070585225542087e-61, 9.3828601952142795e-64, 1.1985921850382964e-66, 1.0903770909317098e-69, 6.7802095889475983e-73, 2.7361658102621640e-76, 6.6985500695669678e-80, 9.0848000543271587e-84, 6.0055451530824495e-88, 1.5968463027613036e-92, 1.2451690475064742e-97, 1.5584604318709814e-103, 6.7223469064836171e-111}, + {9.6398630558977581e-112, 2.3391641752283700e-104, 1.9419047430148712e-98, 2.5780282869707508e-93, 1.0013546946979084e-88, 1.5618992863642802e-84, 1.1860256149765069e-80, 4.9845319901386102e-77, 1.2699036539452659e-73, 2.0984019099670130e-70, 2.3689362034001624e-67, 1.9037493843202249e-64, 1.1258502033609599e-61, 5.0354502440866812e-59, 1.7426389319046294e-56, 4.7576590995159999e-54, 1.0418263136743596e-51, 1.8562658771588515e-49, 2.7249577678833700e-47, 3.3321800978024594e-45, 3.4274481444478336e-43, 2.9912154191216358e-41, 2.2321555171303588e-39, 1.4342596614707723e-37, 7.9853025164866527e-36, 3.8743074669761106e-34, 1.6466139693318358e-32, 6.1594817750585881e-31, 2.0367481580156025e-29, 5.9772735819833118e-28, 1.5625565508848433e-26, 3.6509499167114885e-25, 7.6484446772058021e-24, 1.4407768646001281e-22, 2.4470614433838565e-21, 3.7566695291667047e-20, 5.2249355965739373e-19, 6.5981079437776417e-18, 7.5804607661939761e-17, 7.9383130347919212e-16, 7.5907028620400909e-15, 6.6385260183760165e-14, 5.3182112719426004e-13, 3.9083095195736355e-12, 2.6383151459347353e-11, 1.6380456665860958e-10, 9.3648087516475362e-10, 4.9354207551905105e-09, 2.4002106969319454e-08, 1.0781831043581527e-07, 4.4775845772911881e-07, 1.7205483203483572e-06, 6.1220977817639379e-06, 2.0186382119025494e-05, 6.1721031633452369e-05, 1.7510316724439029e-04, 4.6120030826672870e-04, 1.1283616065624156e-03, 2.5655409728187234e-03, 5.4233979404253157e-03, 1.0663442942343464e-02, 1.9507907091601058e-02, 3.3216020285214413e-02, 5.2653553807060320e-02, 7.7723764833089265e-02, 1.0685936247311774e-01, 1.3685933683570478e-01, 1.6330294257556086e-01, 1.8155584069555794e-01, 1.8808162842571305e-01, 1.8155584069555794e-01, 1.6330294257556086e-01, 1.3685933683570478e-01, 1.0685936247311774e-01, 7.7723764833089265e-02, 5.2653553807060320e-02, 3.3216020285214413e-02, 1.9507907091601058e-02, 1.0663442942343464e-02, 5.4233979404253157e-03, 2.5655409728187234e-03, 1.1283616065624156e-03, 4.6120030826672870e-04, 1.7510316724439029e-04, 6.1721031633452369e-05, 2.0186382119025494e-05, 6.1220977817639379e-06, 1.7205483203483572e-06, 4.4775845772911881e-07, 1.0781831043581527e-07, 2.4002106969319454e-08, 4.9354207551905105e-09, 9.3648087516475362e-10, 1.6380456665860958e-10, 2.6383151459347353e-11, 3.9083095195736355e-12, 5.3182112719426004e-13, 6.6385260183760165e-14, 7.5907028620400909e-15, 7.9383130347919212e-16, 7.5804607661939761e-17, 6.5981079437776417e-18, 5.2249355965739373e-19, 3.7566695291667047e-20, 2.4470614433838565e-21, 1.4407768646001281e-22, 7.6484446772058021e-24, 3.6509499167114885e-25, 1.5625565508848433e-26, 5.9772735819833118e-28, 2.0367481580156025e-29, 6.1594817750585881e-31, 1.6466139693318358e-32, 3.8743074669761106e-34, 7.9853025164866527e-36, 1.4342596614707723e-37, 2.2321555171303588e-39, 2.9912154191216358e-41, 3.4274481444478336e-43, 3.3321800978024594e-45, 2.7249577678833700e-47, 1.8562658771588515e-49, 1.0418263136743596e-51, 4.7576590995159999e-54, 1.7426389319046294e-56, 5.0354502440866812e-59, 1.1258502033609599e-61, 1.9037493843202249e-64, 2.3689362034001624e-67, 2.0984019099670130e-70, 1.2699036539452659e-73, 4.9845319901386102e-77, 1.1860256149765069e-80, 1.5618992863642802e-84, 1.0013546946979084e-88, 2.5780282869707508e-93, 1.9419047430148712e-98, 2.3391641752283700e-104, 9.6398630558977581e-112}, + {1.3819748652176074e-112, 3.5091818441132373e-105, 3.0263733342631031e-99, 4.1584216000446123e-94, 1.6678802978259982e-89, 2.6820059816501053e-85, 2.0970348613054561e-81, 9.0664032095201726e-78, 2.3744232682771987e-74, 4.0308051146214329e-71, 4.6725947222676983e-68, 3.8542403780632908e-65, 2.3387627697862867e-62, 1.0729932979377662e-59, 3.8081687860913558e-57, 1.0660224990923822e-54, 2.3931046396310541e-52, 4.3706076964057967e-50, 6.5758217028615195e-48, 8.2408262629145279e-46, 8.6863577038588966e-44, 7.7681876124982396e-42, 5.9400458445057631e-40, 3.9109426504580767e-38, 2.2311802993712928e-36, 1.1092621876592673e-34, 4.8310493095269011e-33, 1.8519160989732117e-31, 6.2757371777744156e-30, 1.8875929093999227e-28, 5.0576726716251256e-27, 1.2113423787195963e-25, 2.6014811007021294e-24, 5.0242965481154138e-23, 8.7498945768853231e-22, 1.3775031653677140e-20, 1.9649805509896429e-19, 2.5453305256296733e-18, 3.0000691187992888e-17, 3.2236051617172698e-16, 3.1633284490034087e-15, 2.8395986506952882e-14, 2.3353492711597277e-13, 1.7622103628998908e-12, 1.2216962997815707e-11, 7.7914682997538544e-11, 4.5765854976073951e-10, 2.4786353105487376e-09, 1.2390357485809571e-08, 5.7223858055497259e-08, 2.4439179961675454e-07, 9.6600646013167157e-07, 3.5367142158469601e-06, 1.2002319438047467e-05, 3.7780839366269368e-05, 1.1038074912944360e-04, 2.9949084760834345e-04, 7.5504927141389195e-04, 1.7696288232331070e-03, 3.8574393750280067e-03, 7.8235307009575233e-03, 1.4769041560724656e-02, 2.5959097306055404e-02, 4.2495188992312062e-02, 6.4805460287971808e-02, 9.2086877576565612e-02, 1.2194809856528342e-01, 1.5052362288934179e-01, 1.7319382084353874e-01, 1.8577559474964558e-01, 1.8577559474964558e-01, 1.7319382084353874e-01, 1.5052362288934179e-01, 1.2194809856528342e-01, 9.2086877576565612e-02, 6.4805460287971808e-02, 4.2495188992312062e-02, 2.5959097306055404e-02, 1.4769041560724656e-02, 7.8235307009575233e-03, 3.8574393750280067e-03, 1.7696288232331070e-03, 7.5504927141389195e-04, 2.9949084760834345e-04, 1.1038074912944360e-04, 3.7780839366269368e-05, 1.2002319438047467e-05, 3.5367142158469601e-06, 9.6600646013167157e-07, 2.4439179961675454e-07, 5.7223858055497259e-08, 1.2390357485809571e-08, 2.4786353105487376e-09, 4.5765854976073951e-10, 7.7914682997538544e-11, 1.2216962997815707e-11, 1.7622103628998908e-12, 2.3353492711597277e-13, 2.8395986506952882e-14, 3.1633284490034087e-15, 3.2236051617172698e-16, 3.0000691187992888e-17, 2.5453305256296733e-18, 1.9649805509896429e-19, 1.3775031653677140e-20, 8.7498945768853231e-22, 5.0242965481154138e-23, 2.6014811007021294e-24, 1.2113423787195963e-25, 5.0576726716251256e-27, 1.8875929093999227e-28, 6.2757371777744156e-30, 1.8519160989732117e-31, 4.8310493095269011e-33, 1.1092621876592673e-34, 2.2311802993712928e-36, 3.9109426504580767e-38, 5.9400458445057631e-40, 7.7681876124982396e-42, 8.6863577038588966e-44, 8.2408262629145279e-46, 6.5758217028615195e-48, 4.3706076964057967e-50, 2.3931046396310541e-52, 1.0660224990923822e-54, 3.8081687860913558e-57, 1.0729932979377662e-59, 2.3387627697862867e-62, 3.8542403780632908e-65, 4.6725947222676983e-68, 4.0308051146214329e-71, 2.3744232682771987e-74, 9.0664032095201726e-78, 2.0970348613054561e-81, 2.6820059816501053e-85, 1.6678802978259982e-89, 4.1584216000446123e-94, 3.0263733342631031e-99, 3.5091818441132373e-105, 1.3819748652176074e-112}, + {1.9806611743021416e-113, 5.2617952692292333e-106, 4.7131990325935266e-100, 6.7017830703194019e-95, 2.7751659416516980e-90, 4.5998341384092458e-86, 3.7027394072850406e-82, 1.6465801492558192e-78, 4.4321525706156201e-75, 7.7285235953959866e-72, 9.1980779235827154e-69, 7.7863635474467061e-66, 4.8472018976182706e-63, 2.2808008160100623e-60, 8.3001996570148669e-58, 2.3819600570060440e-55, 5.4809275321322830e-53, 1.0258876994968507e-50, 1.5817079781846762e-48, 2.0310845466149993e-46, 2.1935503204230003e-44, 2.0098426331457899e-42, 1.5745331638769155e-40, 1.0620797611432563e-38, 6.2076082159887624e-37, 3.1618692048946272e-35, 1.4108563317113768e-33, 5.5412911024876112e-32, 1.9240852596678829e-30, 5.9301271167711558e-29, 1.6282950449666844e-27, 3.9967858207858083e-26, 8.7976197776883766e-25, 1.7416595619626179e-23, 3.1094324529576228e-22, 5.0189377569399244e-21, 7.3412947992638873e-20, 9.7524056424221390e-19, 1.1789993455088293e-17, 1.2995830094215584e-16, 1.3084437330245737e-15, 1.2052816831862512e-14, 1.0173726615206055e-13, 7.8806482946078899e-13, 5.6095283428337204e-12, 3.6739040139866195e-11, 2.2165913480620041e-10, 1.2333492148378478e-09, 6.3355479710981103e-09, 3.0075005874343488e-08, 1.3205310627276386e-07, 5.3676540058539977e-07, 2.0214361392302868e-06, 7.0582654033955854e-06, 2.2866437400066508e-05, 6.8776486666748005e-05, 1.9216752502494505e-04, 4.9906383375612176e-04, 1.2052761990741546e-03, 2.7081400657517538e-03, 5.6635846018365278e-03, 1.1028376452466447e-02, 2.0002251583713944e-02, 3.3800561758698749e-02, 5.3230566834836512e-02, 7.8142792739003333e-02, 1.0695235327964076e-01, 1.3650006936059866e-01, 1.6246837812900003e-01, 1.8035805565273866e-01, 1.8674771616737459e-01, 1.8035805565273866e-01, 1.6246837812900003e-01, 1.3650006936059866e-01, 1.0695235327964076e-01, 7.8142792739003333e-02, 5.3230566834836512e-02, 3.3800561758698749e-02, 2.0002251583713944e-02, 1.1028376452466447e-02, 5.6635846018365278e-03, 2.7081400657517538e-03, 1.2052761990741546e-03, 4.9906383375612176e-04, 1.9216752502494505e-04, 6.8776486666748005e-05, 2.2866437400066508e-05, 7.0582654033955854e-06, 2.0214361392302868e-06, 5.3676540058539977e-07, 1.3205310627276386e-07, 3.0075005874343488e-08, 6.3355479710981103e-09, 1.2333492148378478e-09, 2.2165913480620041e-10, 3.6739040139866195e-11, 5.6095283428337204e-12, 7.8806482946078899e-13, 1.0173726615206055e-13, 1.2052816831862512e-14, 1.3084437330245737e-15, 1.2995830094215584e-16, 1.1789993455088293e-17, 9.7524056424221390e-19, 7.3412947992638873e-20, 5.0189377569399244e-21, 3.1094324529576228e-22, 1.7416595619626179e-23, 8.7976197776883766e-25, 3.9967858207858083e-26, 1.6282950449666844e-27, 5.9301271167711558e-29, 1.9240852596678829e-30, 5.5412911024876112e-32, 1.4108563317113768e-33, 3.1618692048946272e-35, 6.2076082159887624e-37, 1.0620797611432563e-38, 1.5745331638769155e-40, 2.0098426331457899e-42, 2.1935503204230003e-44, 2.0310845466149993e-46, 1.5817079781846762e-48, 1.0258876994968507e-50, 5.4809275321322830e-53, 2.3819600570060440e-55, 8.3001996570148669e-58, 2.2808008160100623e-60, 4.8472018976182706e-63, 7.7863635474467061e-66, 9.1980779235827154e-69, 7.7285235953959866e-72, 4.4321525706156201e-75, 1.6465801492558192e-78, 3.7027394072850406e-82, 4.5998341384092458e-86, 2.7751659416516980e-90, 6.7017830703194019e-95, 4.7131990325935266e-100, 5.2617952692292333e-106, 1.9806611743021416e-113}, + {2.8379348362554196e-114, 7.8858321167381958e-107, 7.3351907294176643e-101, 1.0791401722330717e-95, 4.6128120689561502e-91, 7.8796483529297319e-87, 6.5291103196563761e-83, 2.9859071017101074e-79, 8.2594238150373050e-76, 1.4791530184295327e-72, 1.8070968918430098e-69, 1.5696742395566467e-66, 1.0023248619238131e-63, 4.8364134393087594e-61, 1.8044278404363096e-58, 5.3077896463877236e-56, 1.2516691340073944e-53, 2.4006692740362835e-51, 3.7923450617699092e-49, 4.9890808610628276e-47, 5.5197834921993867e-45, 5.1808064286331934e-43, 4.1575196368970433e-41, 2.8726311560209485e-39, 1.7198340431536187e-37, 8.9732693547519472e-36, 4.1015179356507749e-34, 1.6502257188992863e-32, 5.8701285325766118e-31, 1.8535450041434515e-29, 5.2145562564115323e-28, 1.3115164390064590e-26, 2.9583134344508206e-25, 6.0020532911752611e-24, 1.0982988908956421e-22, 1.8172013379429154e-21, 2.7250115798124682e-20, 3.7116593876307894e-19, 4.6014038935013638e-18, 5.2019302408782819e-17, 5.3723712748625257e-16, 5.0771526984812688e-15, 4.3975029340499356e-14, 3.4959048726655372e-13, 2.5543256450392521e-12, 1.7175688761100677e-11, 1.0641335972971535e-10, 6.0815188314211019e-10, 3.2093758879421831e-09, 1.5654930566151192e-08, 7.0648903615829459e-08, 2.9522889815634413e-07, 1.1433046682221214e-06, 4.1062137791532273e-06, 1.3686795544658825e-05, 4.2366755055396916e-05, 1.2186350361528871e-04, 3.2590355495495157e-04, 8.1076372459361551e-04, 1.8771268090367514e-03, 4.0464432735401257e-03, 8.1246183048277309e-03, 1.5199679576417248e-02, 2.6503531097710236e-02, 4.3085461690763743e-02, 6.5316009514185674e-02, 9.2354740298272661e-02, 1.2182123832712786e-01, 1.4992351472225099e-01, 1.7216437167539980e-01, 1.8448997083421120e-01, 1.8448997083421120e-01, 1.7216437167539980e-01, 1.4992351472225099e-01, 1.2182123832712786e-01, 9.2354740298272661e-02, 6.5316009514185674e-02, 4.3085461690763743e-02, 2.6503531097710236e-02, 1.5199679576417248e-02, 8.1246183048277309e-03, 4.0464432735401257e-03, 1.8771268090367514e-03, 8.1076372459361551e-04, 3.2590355495495157e-04, 1.2186350361528871e-04, 4.2366755055396916e-05, 1.3686795544658825e-05, 4.1062137791532273e-06, 1.1433046682221214e-06, 2.9522889815634413e-07, 7.0648903615829459e-08, 1.5654930566151192e-08, 3.2093758879421831e-09, 6.0815188314211019e-10, 1.0641335972971535e-10, 1.7175688761100677e-11, 2.5543256450392521e-12, 3.4959048726655372e-13, 4.3975029340499356e-14, 5.0771526984812688e-15, 5.3723712748625257e-16, 5.2019302408782819e-17, 4.6014038935013638e-18, 3.7116593876307894e-19, 2.7250115798124682e-20, 1.8172013379429154e-21, 1.0982988908956421e-22, 6.0020532911752611e-24, 2.9583134344508206e-25, 1.3115164390064590e-26, 5.2145562564115323e-28, 1.8535450041434515e-29, 5.8701285325766118e-31, 1.6502257188992863e-32, 4.1015179356507749e-34, 8.9732693547519472e-36, 1.7198340431536187e-37, 2.8726311560209485e-39, 4.1575196368970433e-41, 5.1808064286331934e-43, 5.5197834921993867e-45, 4.9890808610628276e-47, 3.7923450617699092e-49, 2.4006692740362835e-51, 1.2516691340073944e-53, 5.3077896463877236e-56, 1.8044278404363096e-58, 4.8364134393087594e-61, 1.0023248619238131e-63, 1.5696742395566467e-66, 1.8070968918430098e-69, 1.4791530184295327e-72, 8.2594238150373050e-76, 2.9859071017101074e-79, 6.5291103196563761e-83, 7.8796483529297319e-87, 4.6128120689561502e-91, 1.0791401722330717e-95, 7.3351907294176643e-101, 7.8858321167381958e-107, 2.8379348362554196e-114}, + {4.0651653045385065e-115, 1.1812699945481087e-107, 1.1408091037467098e-101, 1.7361831916251206e-96, 7.6595061701919539e-92, 1.3482180200386414e-87, 1.1497555722475321e-83, 5.4065905193630812e-80, 1.5366409186618650e-76, 2.8258657002435720e-73, 3.5434157687072630e-70, 3.1577316852342737e-67, 2.0680029879339054e-64, 1.0231020536481417e-61, 3.9127659587819162e-59, 1.1795593319147925e-56, 2.8502626581292134e-54, 5.6008912228881934e-52, 9.0638661704615042e-50, 1.2214291755276681e-47, 1.3841464531877442e-45, 1.3306007915064349e-43, 1.0936052280580312e-41, 7.7388088770231391e-40, 4.7451186753733802e-38, 2.5356047070173512e-36, 1.1870121009809329e-34, 4.8915667031805749e-33, 1.7822371990214424e-31, 5.7644513660319519e-30, 1.6612582615477246e-28, 4.2804618347791540e-27, 9.8922322140449142e-26, 2.0564738212957134e-24, 3.8562115828650105e-23, 6.5389458924467930e-22, 1.0050517825377878e-20, 1.4033245536407663e-19, 1.7836445099325982e-18, 2.0676254282785005e-17, 2.1899168813282935e-16, 2.1227770911527276e-15, 1.8861860904057362e-14, 1.5385347856793465e-13, 1.1536439246054796e-12, 7.9623160515403962e-12, 5.0644993569234348e-11, 2.9720520664121096e-10, 1.6108731738857700e-09, 8.0720587699486656e-09, 3.7430932220185756e-08, 1.6076054796089227e-07, 6.4000756781336375e-07, 2.3636229043243477e-06, 8.1033917859954005e-06, 2.5807006976135383e-05, 7.6393528572820399e-05, 2.1031479179566012e-04, 5.3877140763670364e-04, 1.2849006257472789e-03, 2.8540126420656033e-03, 5.9065989999834033e-03, 1.1393877212953164e-02, 2.0492691616955331e-02, 3.4375183073736520e-02, 5.3792186966533914e-02, 7.8544533849893697e-02, 1.0703228204562122e-01, 1.3613832142377319e-01, 1.6164525387078738e-01, 1.7918358709477061e-01, 1.8544178808228795e-01, 1.7918358709477061e-01, 1.6164525387078738e-01, 1.3613832142377319e-01, 1.0703228204562122e-01, 7.8544533849893697e-02, 5.3792186966533914e-02, 3.4375183073736520e-02, 2.0492691616955331e-02, 1.1393877212953164e-02, 5.9065989999834033e-03, 2.8540126420656033e-03, 1.2849006257472789e-03, 5.3877140763670364e-04, 2.1031479179566012e-04, 7.6393528572820399e-05, 2.5807006976135383e-05, 8.1033917859954005e-06, 2.3636229043243477e-06, 6.4000756781336375e-07, 1.6076054796089227e-07, 3.7430932220185756e-08, 8.0720587699486656e-09, 1.6108731738857700e-09, 2.9720520664121096e-10, 5.0644993569234348e-11, 7.9623160515403962e-12, 1.1536439246054796e-12, 1.5385347856793465e-13, 1.8861860904057362e-14, 2.1227770911527276e-15, 2.1899168813282935e-16, 2.0676254282785005e-17, 1.7836445099325982e-18, 1.4033245536407663e-19, 1.0050517825377878e-20, 6.5389458924467930e-22, 3.8562115828650105e-23, 2.0564738212957134e-24, 9.8922322140449142e-26, 4.2804618347791540e-27, 1.6612582615477246e-28, 5.7644513660319519e-30, 1.7822371990214424e-31, 4.8915667031805749e-33, 1.1870121009809329e-34, 2.5356047070173512e-36, 4.7451186753733802e-38, 7.7388088770231391e-40, 1.0936052280580312e-41, 1.3306007915064349e-43, 1.3841464531877442e-45, 1.2214291755276681e-47, 9.0638661704615042e-50, 5.6008912228881934e-52, 2.8502626581292134e-54, 1.1795593319147925e-56, 3.9127659587819162e-59, 1.0231020536481417e-61, 2.0680029879339054e-64, 3.1577316852342737e-67, 3.5434157687072630e-70, 2.8258657002435720e-73, 1.5366409186618650e-76, 5.4065905193630812e-80, 1.1497555722475321e-83, 1.3482180200386414e-87, 7.6595061701919539e-92, 1.7361831916251206e-96, 1.1408091037467098e-101, 1.1812699945481087e-107, 4.0651653045385065e-115}, + {5.8215540266145185e-116, 1.7686479321265132e-108, 1.7730623068856681e-102, 2.7909232189455364e-97, 1.2705722231489315e-92, 2.3041370774073117e-88, 2.0220172247445533e-84, 9.7753535494291230e-81, 2.8542469937135876e-77, 5.3891664187060082e-74, 6.9347342664174447e-71, 6.3393303649456501e-68, 4.2572743226051364e-65, 2.1591730776951801e-62, 8.4632278622572117e-60, 2.6143765867593842e-57, 6.4722677680216748e-55, 1.3028416632725528e-52, 2.1595419825453231e-50, 2.9805109745607960e-48, 3.4589808905752308e-46, 3.4051398010375247e-44, 2.8658496493850430e-42, 2.0766525261834300e-40, 1.3038602059242759e-38, 7.1344872719593792e-37, 3.4201248592076866e-35, 1.4432903376167608e-33, 5.3852783517497647e-32, 1.7838570362707828e-30, 5.2653274547677795e-29, 1.3896193908215000e-27, 3.2896623243386845e-26, 7.0060070837042623e-25, 1.3459870964148325e-23, 2.3386559482775554e-22, 3.6836215601895347e-21, 5.2713950568645777e-20, 6.8677255495973648e-19, 8.1615638298029365e-18, 8.8631701623207850e-17, 8.8103300045560730e-16, 8.0291277303592449e-15, 6.7183131113433536e-14, 5.1685673461842002e-13, 3.6606905840155156e-12, 2.3898379902347417e-11, 1.4397371489675669e-10, 8.0125910717950545e-10, 4.1235783120777111e-09, 1.9642460747885040e-08, 8.6680352990322611e-08, 3.5465537239549829e-07, 1.3464428002356888e-06, 4.7465352780258519e-06, 1.5547603684619328e-05, 4.7349798947666790e-05, 1.3415011473384580e-04, 3.5376394702118597e-04, 8.6875807961727194e-04, 1.9876616088445901e-03, 4.2386040557315851e-03, 8.4275632150598145e-03, 1.5628807300276451e-02, 2.7041109022394049e-02, 4.3662930188759774e-02, 6.5809838711917215e-02, 9.2606922401635969e-02, 1.2168620186278144e-01, 1.4932782014181728e-01, 1.7115266931753381e-01, 1.8323066902877375e-01, 1.8323066902877375e-01, 1.7115266931753381e-01, 1.4932782014181728e-01, 1.2168620186278144e-01, 9.2606922401635969e-02, 6.5809838711917215e-02, 4.3662930188759774e-02, 2.7041109022394049e-02, 1.5628807300276451e-02, 8.4275632150598145e-03, 4.2386040557315851e-03, 1.9876616088445901e-03, 8.6875807961727194e-04, 3.5376394702118597e-04, 1.3415011473384580e-04, 4.7349798947666790e-05, 1.5547603684619328e-05, 4.7465352780258519e-06, 1.3464428002356888e-06, 3.5465537239549829e-07, 8.6680352990322611e-08, 1.9642460747885040e-08, 4.1235783120777111e-09, 8.0125910717950545e-10, 1.4397371489675669e-10, 2.3898379902347417e-11, 3.6606905840155156e-12, 5.1685673461842002e-13, 6.7183131113433536e-14, 8.0291277303592449e-15, 8.8103300045560730e-16, 8.8631701623207850e-17, 8.1615638298029365e-18, 6.8677255495973648e-19, 5.2713950568645777e-20, 3.6836215601895347e-21, 2.3386559482775554e-22, 1.3459870964148325e-23, 7.0060070837042623e-25, 3.2896623243386845e-26, 1.3896193908215000e-27, 5.2653274547677795e-29, 1.7838570362707828e-30, 5.3852783517497647e-32, 1.4432903376167608e-33, 3.4201248592076866e-35, 7.1344872719593792e-37, 1.3038602059242759e-38, 2.0766525261834300e-40, 2.8658496493850430e-42, 3.4051398010375247e-44, 3.4589808905752308e-46, 2.9805109745607960e-48, 2.1595419825453231e-50, 1.3028416632725528e-52, 6.4722677680216748e-55, 2.6143765867593842e-57, 8.4632278622572117e-60, 2.1591730776951801e-62, 4.2572743226051364e-65, 6.3393303649456501e-68, 6.9347342664174447e-71, 5.3891664187060082e-74, 2.8542469937135876e-77, 9.7753535494291230e-81, 2.0220172247445533e-84, 2.3041370774073117e-88, 1.2705722231489315e-92, 2.7909232189455364e-97, 1.7730623068856681e-102, 1.7686479321265132e-108, 5.8215540266145185e-116}, + {8.3346219931731252e-117, 2.6468336179684675e-109, 2.7538984203799183e-103, 4.4826957080916534e-98, 2.1055559670183425e-93, 3.9333042153463284e-89, 3.5513952096001502e-85, 1.7648636462384819e-81, 5.2931716200065506e-78, 1.0259659682881671e-74, 1.3546124768341285e-71, 1.2700634063455813e-68, 8.7450484044987438e-66, 4.5461329246587614e-63, 1.8260394178147789e-60, 5.7792816013390868e-58, 1.4656166079834788e-55, 3.0217106412347761e-53, 5.1294451668154943e-51, 7.2494810877533628e-49, 8.6147071643386152e-47, 8.6832052790729524e-45, 7.4822992810294955e-43, 5.5510153038363309e-41, 3.5683132140145833e-39, 1.9990336800405551e-37, 9.8114235481332920e-36, 4.2392471038699379e-34, 1.6195898700068922e-32, 5.4933861932302487e-31, 1.6604056318842839e-29, 4.4876913417614677e-28, 1.0880533660044361e-26, 2.3734409390889848e-25, 4.6708788240159748e-24, 8.3141586271431848e-23, 1.3417423881673005e-21, 1.9674928839115667e-20, 2.6269310713002712e-19, 3.1997441855901569e-18, 3.5620366003569210e-17, 3.6302240823037884e-16, 3.3924183801506980e-15, 2.9112006503092275e-14, 2.2973505987627707e-13, 1.6693328537504025e-12, 1.1182840247910251e-11, 6.9144057864820654e-11, 3.9502105053420148e-10, 2.0873156939372195e-09, 1.0211061576622443e-08, 4.6286545842406255e-08, 1.9458139349161944e-07, 7.5918662435991949e-07, 2.7511365949804849e-06, 9.2658749369498393e-06, 2.9023198119388909e-05, 8.4594658849625380e-05, 2.2956989796967087e-04, 5.8033480492528523e-04, 1.3672056490331354e-03, 3.0030570133818706e-03, 6.1522556633449429e-03, 1.1759716605478240e-02, 2.0979064379787655e-02, 3.4939934715514299e-02, 5.4338778627529040e-02, 7.8929618144152600e-02, 1.0709980745724954e-01, 1.3577444219071524e-01, 1.6083336191837752e-01, 1.7803169108709616e-01, 1.8416287919896177e-01, 1.7803169108709616e-01, 1.6083336191837752e-01, 1.3577444219071524e-01, 1.0709980745724954e-01, 7.8929618144152600e-02, 5.4338778627529040e-02, 3.4939934715514299e-02, 2.0979064379787655e-02, 1.1759716605478240e-02, 6.1522556633449429e-03, 3.0030570133818706e-03, 1.3672056490331354e-03, 5.8033480492528523e-04, 2.2956989796967087e-04, 8.4594658849625380e-05, 2.9023198119388909e-05, 9.2658749369498393e-06, 2.7511365949804849e-06, 7.5918662435991949e-07, 1.9458139349161944e-07, 4.6286545842406255e-08, 1.0211061576622443e-08, 2.0873156939372195e-09, 3.9502105053420148e-10, 6.9144057864820654e-11, 1.1182840247910251e-11, 1.6693328537504025e-12, 2.2973505987627707e-13, 2.9112006503092275e-14, 3.3924183801506980e-15, 3.6302240823037884e-16, 3.5620366003569210e-17, 3.1997441855901569e-18, 2.6269310713002712e-19, 1.9674928839115667e-20, 1.3417423881673005e-21, 8.3141586271431848e-23, 4.6708788240159748e-24, 2.3734409390889848e-25, 1.0880533660044361e-26, 4.4876913417614677e-28, 1.6604056318842839e-29, 5.4933861932302487e-31, 1.6195898700068922e-32, 4.2392471038699379e-34, 9.8114235481332920e-36, 1.9990336800405551e-37, 3.5683132140145833e-39, 5.5510153038363309e-41, 7.4822992810294955e-43, 8.6832052790729524e-45, 8.6147071643386152e-47, 7.2494810877533628e-49, 5.1294451668154943e-51, 3.0217106412347761e-53, 1.4656166079834788e-55, 5.7792816013390868e-58, 1.8260394178147789e-60, 4.5461329246587614e-63, 8.7450484044987438e-66, 1.2700634063455813e-68, 1.3546124768341285e-71, 1.0259659682881671e-74, 5.2931716200065506e-78, 1.7648636462384819e-81, 3.5513952096001502e-85, 3.9333042153463284e-89, 2.1055559670183425e-93, 4.4826957080916534e-98, 2.7538984203799183e-103, 2.6468336179684675e-109, 8.3346219931731252e-117}, + {1.1929450990497388e-117, 3.9591990974598804e-110, 4.2745275541790886e-104, 7.1940582732963705e-99, 3.4858463341267156e-94, 6.7067764488470954e-90, 6.2295239916515678e-86, 3.1817588164569700e-82, 9.8006355361055656e-79, 1.9498222863573950e-75, 2.6411200905371024e-72, 2.5394077671157272e-69, 1.7924826309087973e-66, 9.5498361457830398e-64, 3.9302490205746199e-61, 1.2742413782141750e-58, 3.3097280501169341e-56, 6.9880745770528625e-54, 1.2146673922184433e-51, 1.7576627452371236e-49, 2.1383500879761454e-47, 2.2064997583491492e-45, 1.9463790072121561e-43, 1.4781646220486795e-41, 9.7267315160133413e-40, 5.5780033470752983e-38, 2.8025392714968451e-36, 1.2395973767055291e-34, 4.8482501635459064e-33, 1.6835590125100971e-31, 5.2099674531110126e-30, 1.4417990594032542e-28, 3.5795224098780112e-27, 7.9961758900246410e-26, 1.6116510554151974e-24, 2.9383411496603336e-23, 4.8574792709615864e-22, 7.2973076814277955e-21, 9.9829502923346154e-20, 1.2460703643781112e-18, 1.4216815546927034e-17, 1.4851721687462022e-16, 1.4228468795430129e-15, 1.2519744100239038e-14, 1.0132054243548976e-13, 7.5515422033135666e-13, 5.1897461393175068e-12, 3.2925485308161026e-11, 1.9304840630938534e-10, 1.0471078941135214e-09, 5.2592465173501358e-09, 2.4482298403820274e-08, 1.0571641536621842e-07, 4.2377549476354214e-07, 1.5781624324532756e-06, 5.4637147044287493e-06, 1.7596343655070067e-05, 5.2748866067347142e-05, 1.4726499921039843e-04, 3.8309201801330436e-04, 9.2902602860733265e-04, 2.1011697743361312e-03, 4.4337774736073364e-03, 8.7321528128579372e-03, 1.6056218663448245e-02, 2.7571764216085648e-02, 4.4227798774476149e-02, 6.6287455917162549e-02, 9.2844091734104794e-02, 1.2154351785304160e-01, 1.4873661992593393e-01, 1.7015822429768401e-01, 1.8199680336473223e-01, 1.8199680336473223e-01, 1.7015822429768401e-01, 1.4873661992593393e-01, 1.2154351785304160e-01, 9.2844091734104794e-02, 6.6287455917162549e-02, 4.4227798774476149e-02, 2.7571764216085648e-02, 1.6056218663448245e-02, 8.7321528128579372e-03, 4.4337774736073364e-03, 2.1011697743361312e-03, 9.2902602860733265e-04, 3.8309201801330436e-04, 1.4726499921039843e-04, 5.2748866067347142e-05, 1.7596343655070067e-05, 5.4637147044287493e-06, 1.5781624324532756e-06, 4.2377549476354214e-07, 1.0571641536621842e-07, 2.4482298403820274e-08, 5.2592465173501358e-09, 1.0471078941135214e-09, 1.9304840630938534e-10, 3.2925485308161026e-11, 5.1897461393175068e-12, 7.5515422033135666e-13, 1.0132054243548976e-13, 1.2519744100239038e-14, 1.4228468795430129e-15, 1.4851721687462022e-16, 1.4216815546927034e-17, 1.2460703643781112e-18, 9.9829502923346154e-20, 7.2973076814277955e-21, 4.8574792709615864e-22, 2.9383411496603336e-23, 1.6116510554151974e-24, 7.9961758900246410e-26, 3.5795224098780112e-27, 1.4417990594032542e-28, 5.2099674531110126e-30, 1.6835590125100971e-31, 4.8482501635459064e-33, 1.2395973767055291e-34, 2.8025392714968451e-36, 5.5780033470752983e-38, 9.7267315160133413e-40, 1.4781646220486795e-41, 1.9463790072121561e-43, 2.2064997583491492e-45, 2.1383500879761454e-47, 1.7576627452371236e-49, 1.2146673922184433e-51, 6.9880745770528625e-54, 3.3097280501169341e-56, 1.2742413782141750e-58, 3.9302490205746199e-61, 9.5498361457830398e-64, 1.7924826309087973e-66, 2.5394077671157272e-69, 2.6411200905371024e-72, 1.9498222863573950e-75, 9.8006355361055656e-79, 3.1817588164569700e-82, 6.2295239916515678e-86, 6.7067764488470954e-90, 3.4858463341267156e-94, 7.1940582732963705e-99, 4.2745275541790886e-104, 3.9591990974598804e-110, 1.1929450990497388e-117}, + {1.7070406181366590e-118, 5.9195113844414591e-111, 6.6305252022274385e-105, 1.1536024679706741e-99, 5.7653910975637400e-95, 1.1423077010030593e-90, 1.0913377348991997e-86, 5.7280704066660911e-83, 1.8118187632234171e-79, 3.6992788142779886e-76, 5.1399529790115008e-73, 5.0672863794680850e-70, 3.6662464654865143e-67, 2.0015272638354942e-64, 8.4387742209978957e-62, 2.8023099391369100e-59, 7.4539706347406648e-57, 1.6114689883841890e-54, 2.8677408135139503e-52, 4.2481015098521193e-50, 5.2903245181279725e-48, 5.5876178971908134e-46, 5.0448907217391667e-44, 3.9213661446271294e-42, 2.6409880203075402e-40, 1.5501131114814050e-38, 7.9712569158416828e-37, 3.6087469701637568e-35, 1.4446952835847923e-33, 5.1351616577255980e-32, 1.6267414726790182e-30, 4.6086441078964235e-29, 1.1714111882873408e-27, 2.6792736258069186e-26, 5.5296079664300731e-25, 1.0324183959837516e-23, 1.7479927721363103e-22, 2.6897676451867309e-21, 3.7695192606286801e-20, 4.8205800888162673e-19, 5.6356840948811206e-18, 6.0335058787737898e-17, 5.9246665636288388e-16, 5.3441830122937569e-15, 4.4343905593075405e-14, 3.3891924837505246e-13, 2.3889528428660744e-12, 1.5547996800001229e-11, 9.3534621545646173e-11, 5.2065241132424465e-10, 2.6842310976125569e-09, 1.2828665113850952e-08, 5.6885421998531753e-08, 2.3421973604647242e-07, 8.9613156437377830e-07, 3.1882138059205163e-06, 1.0554357586582146e-05, 3.2530204545596872e-05, 9.3401898795249160e-05, 2.4995620584101932e-04, 6.2376294512410423e-04, 1.4521587444586976e-03, 3.1551701974875863e-03, 6.4003731513496125e-03, 1.2125677167472506e-02, 2.1461222380505476e-02, 3.5494877838398289e-02, 5.4870701544829148e-02, 7.9298651231443446e-02, 1.0715555306319222e-01, 1.3540875492462576e-01, 1.6003249638444184e-01, 1.7690165636825916e-01, 1.8291007049692826e-01, 1.7690165636825916e-01, 1.6003249638444184e-01, 1.3540875492462576e-01, 1.0715555306319222e-01, 7.9298651231443446e-02, 5.4870701544829148e-02, 3.5494877838398289e-02, 2.1461222380505476e-02, 1.2125677167472506e-02, 6.4003731513496125e-03, 3.1551701974875863e-03, 1.4521587444586976e-03, 6.2376294512410423e-04, 2.4995620584101932e-04, 9.3401898795249160e-05, 3.2530204545596872e-05, 1.0554357586582146e-05, 3.1882138059205163e-06, 8.9613156437377830e-07, 2.3421973604647242e-07, 5.6885421998531753e-08, 1.2828665113850952e-08, 2.6842310976125569e-09, 5.2065241132424465e-10, 9.3534621545646173e-11, 1.5547996800001229e-11, 2.3889528428660744e-12, 3.3891924837505246e-13, 4.4343905593075405e-14, 5.3441830122937569e-15, 5.9246665636288388e-16, 6.0335058787737898e-17, 5.6356840948811206e-18, 4.8205800888162673e-19, 3.7695192606286801e-20, 2.6897676451867309e-21, 1.7479927721363103e-22, 1.0324183959837516e-23, 5.5296079664300731e-25, 2.6792736258069186e-26, 1.1714111882873408e-27, 4.6086441078964235e-29, 1.6267414726790182e-30, 5.1351616577255980e-32, 1.4446952835847923e-33, 3.6087469701637568e-35, 7.9712569158416828e-37, 1.5501131114814050e-38, 2.6409880203075402e-40, 3.9213661446271294e-42, 5.0448907217391667e-44, 5.5876178971908134e-46, 5.2903245181279725e-48, 4.2481015098521193e-50, 2.8677408135139503e-52, 1.6114689883841890e-54, 7.4539706347406648e-57, 2.8023099391369100e-59, 8.4387742209978957e-62, 2.0015272638354942e-64, 3.6662464654865143e-67, 5.0672863794680850e-70, 5.1399529790115008e-73, 3.6992788142779886e-76, 1.8118187632234171e-79, 5.7280704066660911e-83, 1.0913377348991997e-86, 1.1423077010030593e-90, 5.7653910975637400e-95, 1.1536024679706741e-99, 6.6305252022274385e-105, 5.9195113844414591e-111, 1.7070406181366590e-118}, + {2.4420658728359029e-119, 8.8463582669186499e-112, 1.0278520435124409e-105, 1.8483749220636043e-100, 9.5264999562099965e-96, 1.9434401044261462e-91, 1.9094951070499896e-87, 1.0297738588062730e-83, 3.3443041190506409e-80, 7.0066129472632568e-77, 9.9847674734222346e-74, 1.0091718959432019e-70, 7.4829681064377069e-68, 4.1855384000761587e-65, 1.8075959008515128e-62, 6.1472579038779528e-60, 1.6742548573659892e-57, 3.7056307854458431e-55, 6.7504839696392484e-53, 1.0235353311199910e-50, 1.3045775870281223e-48, 1.4101600092151863e-46, 1.3029538434289154e-44, 1.0364256329737640e-42, 7.1430632340356861e-41, 4.2904029597408759e-39, 2.2577848908542067e-37, 1.0460259894518416e-35, 4.2855476518701719e-34, 1.5590004744721381e-32, 5.0546933927274757e-31, 1.4657489608244835e-29, 3.8136046756805110e-28, 8.9292832689033937e-27, 1.8867047659679220e-25, 3.6067468226756066e-24, 6.2530721763179845e-23, 9.8539255836953708e-22, 1.4143965820612745e-20, 1.8527994733871280e-19, 2.2190951508494865e-18, 2.4342129329346826e-17, 2.4494846944487187e-16, 2.2645381737872469e-15, 1.9261422339406544e-14, 1.5093097961491213e-13, 1.0909182729913857e-12, 7.2818111329385306e-12, 4.4936440478669596e-11, 2.5663720011644112e-10, 1.3577644240845783e-09, 6.6605391288638309e-09, 3.0321186483039541e-08, 1.2819858780710668e-07, 5.0378676249733306e-07, 1.8413731465659074e-06, 6.2640223021607951e-06, 1.9844802959774770e-05, 5.8582692413005888e-05, 1.6123158507144338e-04, 4.1390553680515079e-04, 9.9155807247580142e-04, 2.2175853317597280e-03, 4.6318204121044990e-03, 9.0381820758826531e-03, 1.6481721286763326e-02, 2.8095443629279886e-02, 4.4780275406602559e-02, 6.6749354510874270e-02, 9.3066885205695463e-02, 1.2139368280029710e-01, 1.4814998194065979e-01, 1.6918056507168472e-01, 1.8078752906102052e-01, 1.8078752906102052e-01, 1.6918056507168472e-01, 1.4814998194065979e-01, 1.2139368280029710e-01, 9.3066885205695463e-02, 6.6749354510874270e-02, 4.4780275406602559e-02, 2.8095443629279886e-02, 1.6481721286763326e-02, 9.0381820758826531e-03, 4.6318204121044990e-03, 2.2175853317597280e-03, 9.9155807247580142e-04, 4.1390553680515079e-04, 1.6123158507144338e-04, 5.8582692413005888e-05, 1.9844802959774770e-05, 6.2640223021607951e-06, 1.8413731465659074e-06, 5.0378676249733306e-07, 1.2819858780710668e-07, 3.0321186483039541e-08, 6.6605391288638309e-09, 1.3577644240845783e-09, 2.5663720011644112e-10, 4.4936440478669596e-11, 7.2818111329385306e-12, 1.0909182729913857e-12, 1.5093097961491213e-13, 1.9261422339406544e-14, 2.2645381737872469e-15, 2.4494846944487187e-16, 2.4342129329346826e-17, 2.2190951508494865e-18, 1.8527994733871280e-19, 1.4143965820612745e-20, 9.8539255836953708e-22, 6.2530721763179845e-23, 3.6067468226756066e-24, 1.8867047659679220e-25, 8.9292832689033937e-27, 3.8136046756805110e-28, 1.4657489608244835e-29, 5.0546933927274757e-31, 1.5590004744721381e-32, 4.2855476518701719e-34, 1.0460259894518416e-35, 2.2577848908542067e-37, 4.2904029597408759e-39, 7.1430632340356861e-41, 1.0364256329737640e-42, 1.3029538434289154e-44, 1.4101600092151863e-46, 1.3045775870281223e-48, 1.0235353311199910e-50, 6.7504839696392484e-53, 3.7056307854458431e-55, 1.6742548573659892e-57, 6.1472579038779528e-60, 1.8075959008515128e-62, 4.1855384000761587e-65, 7.4829681064377069e-68, 1.0091718959432019e-70, 9.9847674734222346e-74, 7.0066129472632568e-77, 3.3443041190506409e-80, 1.0297738588062730e-83, 1.9094951070499896e-87, 1.9434401044261462e-91, 9.5264999562099965e-96, 1.8483749220636043e-100, 1.0278520435124409e-105, 8.8463582669186499e-112, 2.4420658728359029e-119}, + {3.4927077138614495e-120, 1.3214344265685972e-112, 1.5923529644019419e-106, 2.9592365392916602e-101, 1.5726312834383212e-96, 3.3028107528503361e-92, 3.3368715475506461e-88, 1.8487368270391726e-84, 6.1636129673212566e-81, 1.3248817176167857e-77, 1.9361289427704634e-74, 2.0059128285660820e-71, 1.5241335107478225e-68, 8.7332836494574003e-66, 3.8627717414010648e-63, 1.3451211690839924e-60, 3.7506692343030420e-58, 8.4975513070583486e-56, 1.5843787624675853e-53, 2.4585380900294205e-51, 3.2067130430374953e-49, 3.5468958347722111e-47, 3.3533561409072355e-45, 2.7292702435888796e-43, 1.9246140375840066e-41, 1.1827867549833032e-39, 6.3685955808210726e-38, 3.0190103270136603e-36, 1.2656167558393507e-34, 4.7112145637232019e-33, 1.5631232313432506e-31, 4.6386822641081590e-30, 1.2351960569462707e-28, 2.9601529366809405e-27, 6.4022761029959671e-26, 1.2529044419038621e-24, 2.2238719447561208e-23, 3.5882695810709981e-22, 5.2741804743987912e-21, 7.0757399180811555e-20, 8.6802800075429983e-19, 9.7541289549630027e-18, 1.0056284258182538e-16, 9.5266259491509481e-16, 8.3044572801891268e-15, 6.6701332750751410e-14, 4.9425874408492004e-13, 3.3828553008734636e-12, 2.1409348303068694e-11, 1.2542010220460425e-10, 6.8076912279459116e-10, 3.4268964379715692e-09, 1.6011995715635929e-08, 6.9500026302554605e-08, 2.8044586426011501e-07, 1.0528006766858483e-06, 3.6792955563634022e-06, 1.1977707839841136e-05, 3.6343264722217326e-05, 1.0283673489115176e-04, 2.7149548605395329e-04, 6.6906197625873226e-04, 1.5397243486054463e-03, 3.3102482665201089e-03, 6.6507742518744171e-03, 1.2491552298533356e-02, 2.1939032537498546e-02, 3.6040083142086628e-02, 5.5388310278285176e-02, 7.9652215262150924e-02, 1.0720010935792797e-01, 1.3504155894527325e-01, 1.5924245367972575e-01, 1.7579280253635429e-01, 1.8168248613117691e-01, 1.7579280253635429e-01, 1.5924245367972575e-01, 1.3504155894527325e-01, 1.0720010935792797e-01, 7.9652215262150924e-02, 5.5388310278285176e-02, 3.6040083142086628e-02, 2.1939032537498546e-02, 1.2491552298533356e-02, 6.6507742518744171e-03, 3.3102482665201089e-03, 1.5397243486054463e-03, 6.6906197625873226e-04, 2.7149548605395329e-04, 1.0283673489115176e-04, 3.6343264722217326e-05, 1.1977707839841136e-05, 3.6792955563634022e-06, 1.0528006766858483e-06, 2.8044586426011501e-07, 6.9500026302554605e-08, 1.6011995715635929e-08, 3.4268964379715692e-09, 6.8076912279459116e-10, 1.2542010220460425e-10, 2.1409348303068694e-11, 3.3828553008734636e-12, 4.9425874408492004e-13, 6.6701332750751410e-14, 8.3044572801891268e-15, 9.5266259491509481e-16, 1.0056284258182538e-16, 9.7541289549630027e-18, 8.6802800075429983e-19, 7.0757399180811555e-20, 5.2741804743987912e-21, 3.5882695810709981e-22, 2.2238719447561208e-23, 1.2529044419038621e-24, 6.4022761029959671e-26, 2.9601529366809405e-27, 1.2351960569462707e-28, 4.6386822641081590e-30, 1.5631232313432506e-31, 4.7112145637232019e-33, 1.2656167558393507e-34, 3.0190103270136603e-36, 6.3685955808210726e-38, 1.1827867549833032e-39, 1.9246140375840066e-41, 2.7292702435888796e-43, 3.3533561409072355e-45, 3.5468958347722111e-47, 3.2067130430374953e-49, 2.4585380900294205e-51, 1.5843787624675853e-53, 8.4975513070583486e-56, 3.7506692343030420e-58, 1.3451211690839924e-60, 3.8627717414010648e-63, 8.7332836494574003e-66, 1.5241335107478225e-68, 2.0059128285660820e-71, 1.9361289427704634e-74, 1.3248817176167857e-77, 6.1636129673212566e-81, 1.8487368270391726e-84, 3.3368715475506461e-88, 3.3028107528503361e-92, 1.5726312834383212e-96, 2.9592365392916602e-101, 1.5923529644019419e-106, 1.3214344265685972e-112, 3.4927077138614495e-120}, + {4.9941282442606218e-121, 1.9730195235183967e-113, 2.4653426103610859e-107, 4.7340081105291807e-102, 2.5936668261705122e-97, 5.6069447675960520e-93, 5.8240937771374570e-89, 3.3144783626996768e-85, 1.1342570222437056e-81, 2.5011096149430186e-78, 3.7476379174495528e-75, 3.9794814788876447e-72, 3.0979908735545725e-69, 1.8182439901095753e-66, 8.2354102738652040e-64, 2.9360923690156291e-61, 8.3803809757287599e-59, 1.9432660775004502e-56, 3.7079038793856585e-54, 5.8875467910577051e-52, 7.8572573935602256e-50, 8.8917221680510199e-48, 8.6005009516161168e-46, 7.1611639319733097e-44, 5.1661373788013124e-42, 3.2479669251550536e-40, 1.7890987177636214e-38, 8.6765699223362474e-37, 3.7212625316226216e-35, 1.4172340427766930e-33, 4.8110787981190828e-32, 1.4608586938294995e-30, 3.9805330733973355e-29, 9.7620761562971344e-28, 2.1608244949042565e-26, 4.3281046749583524e-25, 7.8636681438025253e-24, 1.2989130984240299e-22, 1.9546850118180088e-21, 2.6851626691585123e-20, 3.3733593511587544e-19, 3.8824303202657657e-18, 4.1001349905289425e-17, 3.9793051828221563e-16, 3.5542824359476288e-15, 2.9256051436471688e-14, 2.2220151125790449e-13, 1.5590549255574669e-12, 1.0116812009960419e-11, 6.0778437594159535e-11, 3.3838207012562939e-10, 1.7475098827142176e-09, 8.3784654573163473e-09, 3.7324734098992270e-08, 1.5461411843105312e-07, 5.9598254897039207e-07, 2.1391519688131322e-06, 7.1539515355890577e-06, 2.2304926470707147e-05, 6.4869805018180065e-05, 1.7607226778935711e-04, 4.4622007110453903e-04, 1.0563416857043819e-03, 2.3368400915518292e-03, 4.8325911868397145e-03, 9.3454535527143379e-03, 1.6905135871613127e-02, 2.8612106958160764e-02, 4.5320570847252710e-02, 6.7196013411209815e-02, 9.3275910346104995e-02, 1.2123716317285059e-01, 1.4756796236123393e-01, 1.6821923724022270e-01, 1.7960204009494329e-01, 1.7960204009494329e-01, 1.6821923724022270e-01, 1.4756796236123393e-01, 1.2123716317285059e-01, 9.3275910346104995e-02, 6.7196013411209815e-02, 4.5320570847252710e-02, 2.8612106958160764e-02, 1.6905135871613127e-02, 9.3454535527143379e-03, 4.8325911868397145e-03, 2.3368400915518292e-03, 1.0563416857043819e-03, 4.4622007110453903e-04, 1.7607226778935711e-04, 6.4869805018180065e-05, 2.2304926470707147e-05, 7.1539515355890577e-06, 2.1391519688131322e-06, 5.9598254897039207e-07, 1.5461411843105312e-07, 3.7324734098992270e-08, 8.3784654573163473e-09, 1.7475098827142176e-09, 3.3838207012562939e-10, 6.0778437594159535e-11, 1.0116812009960419e-11, 1.5590549255574669e-12, 2.2220151125790449e-13, 2.9256051436471688e-14, 3.5542824359476288e-15, 3.9793051828221563e-16, 4.1001349905289425e-17, 3.8824303202657657e-18, 3.3733593511587544e-19, 2.6851626691585123e-20, 1.9546850118180088e-21, 1.2989130984240299e-22, 7.8636681438025253e-24, 4.3281046749583524e-25, 2.1608244949042565e-26, 9.7620761562971344e-28, 3.9805330733973355e-29, 1.4608586938294995e-30, 4.8110787981190828e-32, 1.4172340427766930e-33, 3.7212625316226216e-35, 8.6765699223362474e-37, 1.7890987177636214e-38, 3.2479669251550536e-40, 5.1661373788013124e-42, 7.1611639319733097e-44, 8.6005009516161168e-46, 8.8917221680510199e-48, 7.8572573935602256e-50, 5.8875467910577051e-52, 3.7079038793856585e-54, 1.9432660775004502e-56, 8.3803809757287599e-59, 2.9360923690156291e-61, 8.2354102738652040e-64, 1.8182439901095753e-66, 3.0979908735545725e-69, 3.9794814788876447e-72, 3.7476379174495528e-75, 2.5011096149430186e-78, 1.1342570222437056e-81, 3.3144783626996768e-85, 5.8240937771374570e-89, 5.6069447675960520e-93, 2.5936668261705122e-97, 4.7340081105291807e-102, 2.4653426103610859e-107, 1.9730195235183967e-113, 4.9941282442606218e-121}, + {7.1392222017713907e-122, 2.9445845110667619e-114, 3.8145868837135596e-108, 7.5673169898909099e-103, 4.2736593644380303e-98, 9.5083335143727237e-94, 1.0152930470452851e-89, 5.9342954899998552e-86, 2.0842120266141319e-82, 4.7139344340961388e-79, 7.2413131304484502e-76, 7.8798610228883591e-73, 6.2842892805603737e-70, 3.7773465198180638e-67, 1.7517513408453002e-64, 6.3932209199477197e-62, 1.8676726441679446e-59, 4.4319273103122463e-57, 8.6528507374176976e-55, 1.4056977850593077e-52, 1.9192002948401386e-50, 2.2217716370141584e-48, 2.1982691386986115e-46, 1.8722764372760468e-44, 1.3815703248778117e-42, 8.8845643289761640e-41, 5.0058533566520875e-39, 2.4832338706643681e-37, 1.0894227474485724e-35, 4.2442335712525587e-34, 1.4739090369348960e-32, 4.5785676785685073e-31, 1.2763864929327656e-29, 3.2028196423033392e-28, 7.2542458795704827e-27, 1.4869262820581125e-25, 2.7648741131715751e-24, 4.6744604380487021e-23, 7.2007069149677438e-22, 1.0126623472614820e-20, 1.3025790505179032e-19, 1.5351372541981349e-18, 1.6603563330241997e-17, 1.6505566788948694e-16, 1.5102835946897961e-15, 1.2737138463928603e-14, 9.9133825544714478e-14, 7.1289732797212971e-13, 4.7421458438831827e-12, 2.9209512270101235e-11, 1.6676562560887225e-10, 8.8333847125166249e-10, 4.3447745740955300e-09, 1.9860254178505552e-08, 8.4433687619112637e-08, 3.3409895046160263e-07, 1.2312829806627210e-06, 4.2290221665164350e-06, 1.3544999036061644e-05, 4.0477621043373221e-05, 1.1292006863438276e-04, 2.9420790203745482e-04, 7.1623536508659178e-04, 1.6298641009901027e-03, 3.4681866693627482e-03, 6.9032861428781267e-03, 1.2857145952636101e-02, 2.2412375310366960e-02, 3.6575629838591243e-02, 5.5891953828813568e-02, 7.9990869817108165e-02, 1.0723403572962083e-01, 1.3467313142650547e-01, 1.5846303276659124e-01, 1.7470447835636663e-01, 1.8047929085878514e-01, 1.7470447835636663e-01, 1.5846303276659124e-01, 1.3467313142650547e-01, 1.0723403572962083e-01, 7.9990869817108165e-02, 5.5891953828813568e-02, 3.6575629838591243e-02, 2.2412375310366960e-02, 1.2857145952636101e-02, 6.9032861428781267e-03, 3.4681866693627482e-03, 1.6298641009901027e-03, 7.1623536508659178e-04, 2.9420790203745482e-04, 1.1292006863438276e-04, 4.0477621043373221e-05, 1.3544999036061644e-05, 4.2290221665164350e-06, 1.2312829806627210e-06, 3.3409895046160263e-07, 8.4433687619112637e-08, 1.9860254178505552e-08, 4.3447745740955300e-09, 8.8333847125166249e-10, 1.6676562560887225e-10, 2.9209512270101235e-11, 4.7421458438831827e-12, 7.1289732797212971e-13, 9.9133825544714478e-14, 1.2737138463928603e-14, 1.5102835946897961e-15, 1.6505566788948694e-16, 1.6603563330241997e-17, 1.5351372541981349e-18, 1.3025790505179032e-19, 1.0126623472614820e-20, 7.2007069149677438e-22, 4.6744604380487021e-23, 2.7648741131715751e-24, 1.4869262820581125e-25, 7.2542458795704827e-27, 3.2028196423033392e-28, 1.2763864929327656e-29, 4.5785676785685073e-31, 1.4739090369348960e-32, 4.2442335712525587e-34, 1.0894227474485724e-35, 2.4832338706643681e-37, 5.0058533566520875e-39, 8.8845643289761640e-41, 1.3815703248778117e-42, 1.8722764372760468e-44, 2.1982691386986115e-46, 2.2217716370141584e-48, 1.9192002948401386e-50, 1.4056977850593077e-52, 8.6528507374176976e-55, 4.4319273103122463e-57, 1.8676726441679446e-59, 6.3932209199477197e-62, 1.7517513408453002e-64, 3.7773465198180638e-67, 6.2842892805603737e-70, 7.8798610228883591e-73, 7.2413131304484502e-76, 4.7139344340961388e-79, 2.0842120266141319e-82, 5.9342954899998552e-86, 1.0152930470452851e-89, 9.5083335143727237e-94, 4.2736593644380303e-98, 7.5673169898909099e-103, 3.8145868837135596e-108, 2.9445845110667619e-114, 7.1392222017713907e-122}, + {1.0203215419405115e-122, 4.3926409088220260e-115, 5.8986558173681245e-109, 1.2087105293093391e-103, 7.0353980850250737e-99, 1.6107326288260005e-94, 1.7678067289525790e-90, 1.0610697100358954e-86, 3.8241442068780522e-83, 8.8702923923721366e-80, 1.3967624162820115e-76, 1.5573928888055416e-73, 1.2722188115916827e-70, 7.8305641247237068e-68, 3.7176883954596755e-65, 1.3887513560879043e-62, 4.1517737028880682e-60, 1.0080675510766497e-57, 2.0135706193376487e-55, 3.3463102229914590e-53, 4.6733192109921242e-51, 5.5335945022909141e-49, 5.5997707655432161e-47, 4.8778201649868724e-45, 3.6811705155316592e-43, 2.4210413787496984e-41, 1.3950773628177882e-39, 7.0777866064732306e-38, 3.1757468989046869e-36, 1.2654132053176338e-34, 4.4947480753641788e-33, 1.4281952005441561e-31, 4.0727582182469552e-30, 1.0454808582283026e-28, 2.4226185736538330e-27, 5.0807355599009919e-26, 9.6670785467690252e-25, 1.6725361565602745e-23, 2.6368638947823986e-22, 3.7957111549106315e-21, 4.9980289800251531e-20, 6.0306098179642906e-19, 6.6786804983364280e-18, 6.7991402460144679e-17, 6.3720317811650348e-16, 5.5049284400726915e-15, 4.3896449685818035e-14, 3.2346882306340970e-13, 2.2052168865195276e-12, 1.3923473891503035e-11, 8.1499619365601122e-11, 4.4267370542157585e-10, 2.2331409933577818e-09, 1.0471618689062924e-08, 4.5678915072122709e-08, 1.8549839541616252e-07, 7.0175441742037147e-07, 2.4747420668926454e-06, 8.1402071214919003e-06, 2.4988785980142873e-05, 7.1628474554958981e-05, 1.9180837279585649e-04, 4.8004901932197877e-04, 1.1233614820028411e-03, 2.4588639421196687e-03, 5.0359498094177816e-03, 9.6537773093780983e-03, 1.7326295602536231e-02, 2.9121725644242560e-02, 4.5848897885251948e-02, 6.7627897299374851e-02, 9.3471746781564918e-02, 1.2107439739077461e-01, 1.4699060677873937e-01, 1.6727380280253201e-01, 1.7843956694581845e-01, 1.7843956694581845e-01, 1.6727380280253201e-01, 1.4699060677873937e-01, 1.2107439739077461e-01, 9.3471746781564918e-02, 6.7627897299374851e-02, 4.5848897885251948e-02, 2.9121725644242560e-02, 1.7326295602536231e-02, 9.6537773093780983e-03, 5.0359498094177816e-03, 2.4588639421196687e-03, 1.1233614820028411e-03, 4.8004901932197877e-04, 1.9180837279585649e-04, 7.1628474554958981e-05, 2.4988785980142873e-05, 8.1402071214919003e-06, 2.4747420668926454e-06, 7.0175441742037147e-07, 1.8549839541616252e-07, 4.5678915072122709e-08, 1.0471618689062924e-08, 2.2331409933577818e-09, 4.4267370542157585e-10, 8.1499619365601122e-11, 1.3923473891503035e-11, 2.2052168865195276e-12, 3.2346882306340970e-13, 4.3896449685818035e-14, 5.5049284400726915e-15, 6.3720317811650348e-16, 6.7991402460144679e-17, 6.6786804983364280e-18, 6.0306098179642906e-19, 4.9980289800251531e-20, 3.7957111549106315e-21, 2.6368638947823986e-22, 1.6725361565602745e-23, 9.6670785467690252e-25, 5.0807355599009919e-26, 2.4226185736538330e-27, 1.0454808582283026e-28, 4.0727582182469552e-30, 1.4281952005441561e-31, 4.4947480753641788e-33, 1.2654132053176338e-34, 3.1757468989046869e-36, 7.0777866064732306e-38, 1.3950773628177882e-39, 2.4210413787496984e-41, 3.6811705155316592e-43, 4.8778201649868724e-45, 5.5997707655432161e-47, 5.5335945022909141e-49, 4.6733192109921242e-51, 3.3463102229914590e-53, 2.0135706193376487e-55, 1.0080675510766497e-57, 4.1517737028880682e-60, 1.3887513560879043e-62, 3.7176883954596755e-65, 7.8305641247237068e-68, 1.2722188115916827e-70, 1.5573928888055416e-73, 1.3967624162820115e-76, 8.8702923923721366e-80, 3.8241442068780522e-83, 1.0610697100358954e-86, 1.7678067289525790e-90, 1.6107326288260005e-94, 7.0353980850250737e-99, 1.2087105293093391e-103, 5.8986558173681245e-109, 4.3926409088220260e-115, 1.0203215419405115e-122}, + {1.4578716888574380e-123, 6.5499584195542182e-116, 9.1158441302395916e-110, 1.9291851130201365e-104, 1.1571376888010415e-99, 2.7257668732866951e-95, 3.0744291780743119e-91, 1.8947250431186403e-87, 7.0064183028413687e-84, 1.6664955389849393e-80, 2.6895662316444978e-77, 3.0723812680200150e-74, 2.5704405332276809e-71, 1.6198772061572697e-68, 7.8722498229365745e-66, 3.0095177392268181e-63, 9.2061099904566106e-61, 2.2868504296047653e-58, 4.6726874994563693e-56, 7.9427931843455534e-54, 1.1344972574084324e-51, 1.3738127684763008e-49, 1.4217090927665081e-47, 1.2663997582044745e-45, 9.7729455955061106e-44, 6.5725138972034875e-42, 3.8727324373725659e-40, 2.0091450762048232e-38, 9.2185663859103873e-37, 3.7563551936945986e-35, 1.3644995796552897e-33, 4.4341465741479497e-32, 1.2932715429724819e-30, 3.3956536214966241e-29, 8.0487680064663208e-28, 1.7268001245109222e-26, 3.3613905413108243e-25, 5.9504202589874584e-24, 9.5995603721747768e-23, 1.4141461815469168e-21, 1.9058394524305284e-20, 2.3538938440623925e-19, 2.6687563232163005e-18, 2.7817760484437033e-17, 2.6696608032295694e-16, 2.3621238776008329e-15, 1.9293848215014542e-14, 1.4565613270987712e-13, 1.0174797099588637e-12, 6.5837630407380247e-12, 3.9501281887181556e-11, 2.1996223199610065e-10, 1.1378158851290800e-09, 5.4720083734809282e-09, 2.4485807329829557e-08, 1.0202255589712521e-07, 3.9608954710479925e-07, 1.4337991258427923e-06, 4.8422272544211524e-06, 1.5265488972373680e-05, 4.4948480076886716e-05, 1.2367217082799177e-04, 3.1811200186972611e-04, 7.6528399238001590e-04, 1.7225370790285232e-03, 3.6288805291928589e-03, 7.1577405214717574e-03, 1.3222272319718250e-02, 2.2881143871485050e-02, 3.7101604703410701e-02, 5.6381975314828768e-02, 8.0315152775434245e-02, 1.0725786228191081e-01, 1.3430372904700771e-01, 1.5769403536946561e-01, 1.7363606017905039e-01, 1.7929968765055787e-01, 1.7363606017905039e-01, 1.5769403536946561e-01, 1.3430372904700771e-01, 1.0725786228191081e-01, 8.0315152775434245e-02, 5.6381975314828768e-02, 3.7101604703410701e-02, 2.2881143871485050e-02, 1.3222272319718250e-02, 7.1577405214717574e-03, 3.6288805291928589e-03, 1.7225370790285232e-03, 7.6528399238001590e-04, 3.1811200186972611e-04, 1.2367217082799177e-04, 4.4948480076886716e-05, 1.5265488972373680e-05, 4.8422272544211524e-06, 1.4337991258427923e-06, 3.9608954710479925e-07, 1.0202255589712521e-07, 2.4485807329829557e-08, 5.4720083734809282e-09, 1.1378158851290800e-09, 2.1996223199610065e-10, 3.9501281887181556e-11, 6.5837630407380247e-12, 1.0174797099588637e-12, 1.4565613270987712e-13, 1.9293848215014542e-14, 2.3621238776008329e-15, 2.6696608032295694e-16, 2.7817760484437033e-17, 2.6687563232163005e-18, 2.3538938440623925e-19, 1.9058394524305284e-20, 1.4141461815469168e-21, 9.5995603721747768e-23, 5.9504202589874584e-24, 3.3613905413108243e-25, 1.7268001245109222e-26, 8.0487680064663208e-28, 3.3956536214966241e-29, 1.2932715429724819e-30, 4.4341465741479497e-32, 1.3644995796552897e-33, 3.7563551936945986e-35, 9.2185663859103873e-37, 2.0091450762048232e-38, 3.8727324373725659e-40, 6.5725138972034875e-42, 9.7729455955061106e-44, 1.2663997582044745e-45, 1.4217090927665081e-47, 1.3738127684763008e-49, 1.1344972574084324e-51, 7.9427931843455534e-54, 4.6726874994563693e-56, 2.2868504296047653e-58, 9.2061099904566106e-61, 3.0095177392268181e-63, 7.8722498229365745e-66, 1.6198772061572697e-68, 2.5704405332276809e-71, 3.0723812680200150e-74, 2.6895662316444978e-77, 1.6664955389849393e-80, 7.0064183028413687e-84, 1.8947250431186403e-87, 3.0744291780743119e-91, 2.7257668732866951e-95, 1.1571376888010415e-99, 1.9291851130201365e-104, 9.1158441302395916e-110, 6.5499584195542182e-116, 1.4578716888574380e-123}, + {2.0825660478904760e-124, 9.7625806330567035e-117, 1.4079328290981535e-110, 3.0768081483238432e-105, 1.9014873630323964e-100, 4.6079244634146070e-96, 5.3405557808694215e-92, 3.3789567793532434e-88, 1.2818424753415062e-84, 3.1260093916222602e-81, 5.1701750558611924e-78, 6.0500487002887184e-75, 5.1832685019930213e-72, 3.3439902942647502e-69, 1.6632668027272507e-66, 6.5065315377220389e-64, 2.0362992343457651e-61, 5.1742950935108869e-59, 1.0813687999683845e-56, 1.8798749780879993e-54, 2.7458133331828386e-52, 3.3999899766205468e-50, 3.5976656083923711e-48, 3.2766076271604045e-46, 2.5853058405449532e-44, 1.7776460048844266e-42, 1.0709222498944708e-40, 5.6804446369555694e-39, 2.6648525467274738e-37, 1.1102669670225965e-35, 4.1238340545301011e-34, 1.3703279298000201e-32, 4.0870885738671846e-31, 1.0974483844909332e-29, 2.6604572771951219e-28, 5.8380527272929378e-27, 1.1624651194307257e-25, 2.1051475426760218e-24, 3.4745776974200654e-23, 5.2372761374065798e-22, 7.2227982269547378e-21, 9.1298647949536611e-20, 1.0594921794825161e-18, 1.1305197522298609e-17, 1.1108048214201895e-16, 1.0064014746615927e-15, 8.4185702868695228e-15, 6.5097729971311742e-14, 4.6585402153758067e-13, 3.0885703725214258e-12, 1.8990163158421596e-11, 1.0838710379194289e-10, 5.7477002127859619e-10, 2.8342947222693205e-09, 1.3006943860141712e-08, 5.5591582741553296e-08, 2.2143724484223221e-07, 8.2259408138485364e-07, 2.8515507236878458e-06, 9.2296922270054013e-06, 2.7908549833980887e-05, 7.8876670613220481e-05, 2.0846012412302649e-04, 5.1540364930051158e-04, 1.1925993797900470e-03, 2.5835851277422653e-03, 5.2417582224628060e-03, 9.9629708516368199e-03, 1.7745045563417383e-02, 2.9624281938924940e-02, 4.6365470641524055e-02, 6.8045456872924986e-02, 9.3654947635056565e-02, 1.2090579766618584e-01, 1.4641795120378570e-01, 1.6634383944546130e-01, 1.7729937449709982e-01, 1.7729937449709982e-01, 1.6634383944546130e-01, 1.4641795120378570e-01, 1.2090579766618584e-01, 9.3654947635056565e-02, 6.8045456872924986e-02, 4.6365470641524055e-02, 2.9624281938924940e-02, 1.7745045563417383e-02, 9.9629708516368199e-03, 5.2417582224628060e-03, 2.5835851277422653e-03, 1.1925993797900470e-03, 5.1540364930051158e-04, 2.0846012412302649e-04, 7.8876670613220481e-05, 2.7908549833980887e-05, 9.2296922270054013e-06, 2.8515507236878458e-06, 8.2259408138485364e-07, 2.2143724484223221e-07, 5.5591582741553296e-08, 1.3006943860141712e-08, 2.8342947222693205e-09, 5.7477002127859619e-10, 1.0838710379194289e-10, 1.8990163158421596e-11, 3.0885703725214258e-12, 4.6585402153758067e-13, 6.5097729971311742e-14, 8.4185702868695228e-15, 1.0064014746615927e-15, 1.1108048214201895e-16, 1.1305197522298609e-17, 1.0594921794825161e-18, 9.1298647949536611e-20, 7.2227982269547378e-21, 5.2372761374065798e-22, 3.4745776974200654e-23, 2.1051475426760218e-24, 1.1624651194307257e-25, 5.8380527272929378e-27, 2.6604572771951219e-28, 1.0974483844909332e-29, 4.0870885738671846e-31, 1.3703279298000201e-32, 4.1238340545301011e-34, 1.1102669670225965e-35, 2.6648525467274738e-37, 5.6804446369555694e-39, 1.0709222498944708e-40, 1.7776460048844266e-42, 2.5853058405449532e-44, 3.2766076271604045e-46, 3.5976656083923711e-48, 3.3999899766205468e-50, 2.7458133331828386e-52, 1.8798749780879993e-54, 1.0813687999683845e-56, 5.1742950935108869e-59, 2.0362992343457651e-61, 6.5065315377220389e-64, 1.6632668027272507e-66, 3.3439902942647502e-69, 5.1832685019930213e-72, 6.0500487002887184e-75, 5.1701750558611924e-78, 3.1260093916222602e-81, 1.2818424753415062e-84, 3.3789567793532434e-88, 5.3405557808694215e-92, 4.6079244634146070e-96, 1.9014873630323964e-100, 3.0768081483238432e-105, 1.4079328290981535e-110, 9.7625806330567035e-117, 2.0825660478904760e-124}, + {2.9742440544925765e-125, 1.4544742104415291e-117, 2.1732568643306002e-111, 4.9034921609592363e-106, 3.1218947762576099e-101, 7.7817715084363300e-97, 9.2663025096531715e-93, 6.0181049060844634e-89, 2.3418402716705127e-85, 5.8546944803309517e-82, 9.9220223776303372e-79, 1.1892107956658327e-75, 1.0431826368600407e-72, 6.8889436382474771e-70, 3.5064990884120332e-67, 1.4034428812712122e-64, 4.4930692768932806e-62, 1.1677337858216425e-59, 2.4957603118464674e-57, 4.4365856613493741e-55, 6.6258941032515879e-53, 8.3883165922512944e-51, 9.0744120512086595e-49, 8.4490059406933366e-47, 6.8149889325650219e-45, 4.7903177946790465e-43, 2.9501316492565748e-41, 1.5996806743635201e-39, 7.6718304654322104e-38, 3.2676791768964119e-36, 1.2408362449919783e-34, 4.2155847725706856e-33, 1.2855517878737815e-31, 3.5296150194332557e-30, 8.7497557637466758e-29, 1.9635223775022683e-27, 3.9986259229284162e-26, 7.4065187877720327e-25, 1.2504748686909515e-23, 1.9282493305400296e-22, 2.7207846792186873e-21, 3.5191108460642354e-20, 4.1792457685713468e-19, 4.5641951147736119e-18, 4.5905709762107554e-17, 4.2579719891425731e-16, 3.6469920289222789e-15, 2.8879673414801644e-14, 2.1167713706852141e-13, 1.4376384719405419e-12, 9.0565354971087218e-12, 5.2969771654748994e-11, 2.8789912587990587e-10, 1.4553534068219742e-09, 6.8479451315824872e-09, 3.0015309064632456e-08, 1.2263753200782247e-07, 4.6740187357094381e-07, 1.6627017535681513e-06, 5.5239309109102710e-06, 1.7148598641483128e-05, 4.9770974063404784e-05, 1.3511264030161956e-04, 3.4322471703307256e-04, 8.1620625227316382e-04, 1.8177000254213835e-03, 3.7922249171946417e-03, 7.4139737035166485e-03, 1.3586755499761058e-02, 2.3345243317359717e-02, 3.7618101204680170e-02, 5.6858711709023881e-02, 8.0625581158863605e-02, 1.0727209153833471e-01, 1.3393358950744624e-01, 1.5693526614765907e-01, 1.7258695046281516e-01, 1.7814291547216715e-01, 1.7258695046281516e-01, 1.5693526614765907e-01, 1.3393358950744624e-01, 1.0727209153833471e-01, 8.0625581158863605e-02, 5.6858711709023881e-02, 3.7618101204680170e-02, 2.3345243317359717e-02, 1.3586755499761058e-02, 7.4139737035166485e-03, 3.7922249171946417e-03, 1.8177000254213835e-03, 8.1620625227316382e-04, 3.4322471703307256e-04, 1.3511264030161956e-04, 4.9770974063404784e-05, 1.7148598641483128e-05, 5.5239309109102710e-06, 1.6627017535681513e-06, 4.6740187357094381e-07, 1.2263753200782247e-07, 3.0015309064632456e-08, 6.8479451315824872e-09, 1.4553534068219742e-09, 2.8789912587990587e-10, 5.2969771654748994e-11, 9.0565354971087218e-12, 1.4376384719405419e-12, 2.1167713706852141e-13, 2.8879673414801644e-14, 3.6469920289222789e-15, 4.2579719891425731e-16, 4.5905709762107554e-17, 4.5641951147736119e-18, 4.1792457685713468e-19, 3.5191108460642354e-20, 2.7207846792186873e-21, 1.9282493305400296e-22, 1.2504748686909515e-23, 7.4065187877720327e-25, 3.9986259229284162e-26, 1.9635223775022683e-27, 8.7497557637466758e-29, 3.5296150194332557e-30, 1.2855517878737815e-31, 4.2155847725706856e-33, 1.2408362449919783e-34, 3.2676791768964119e-36, 7.6718304654322104e-38, 1.5996806743635201e-39, 2.9501316492565748e-41, 4.7903177946790465e-43, 6.8149889325650219e-45, 8.4490059406933366e-47, 9.0744120512086595e-49, 8.3883165922512944e-51, 6.6258941032515879e-53, 4.4365856613493741e-55, 2.4957603118464674e-57, 1.1677337858216425e-59, 4.4930692768932806e-62, 1.4034428812712122e-64, 3.5064990884120332e-67, 6.8889436382474771e-70, 1.0431826368600407e-72, 1.1892107956658327e-75, 9.9220223776303372e-79, 5.8546944803309517e-82, 2.3418402716705127e-85, 6.0181049060844634e-89, 9.2663025096531715e-93, 7.7817715084363300e-97, 3.1218947762576099e-101, 4.9034921609592363e-106, 2.1732568643306002e-111, 1.4544742104415291e-117, 2.9742440544925765e-125}, + {4.2467223153569913e-126, 2.1660313165253327e-118, 3.3526412389934468e-112, 7.8089506311734750e-107, 5.1211062371557049e-102, 1.3128438701986923e-97, 1.6059434510399780e-93, 1.0704932710164072e-89, 4.2723909537910979e-86, 1.0948479687576274e-82, 1.9009714466058216e-79, 2.3333732870895970e-76, 2.0954987289754821e-73, 1.4163011238027347e-70, 7.3764205708401192e-68, 3.0202620912677113e-65, 9.8899383346021114e-63, 2.6286294129074487e-60, 5.7447123101943819e-58, 1.0441146661857655e-55, 1.5941894796140349e-53, 2.0631744724119192e-51, 2.2815057723526744e-49, 2.1713655679015281e-47, 1.7902127496153674e-45, 1.2862015727088547e-43, 8.0963481474532886e-42, 4.4873140335977730e-40, 2.1997031006119584e-38, 9.5769420875142906e-37, 3.7173934630972095e-35, 1.2910298923887236e-33, 4.0247941719917949e-32, 1.1297479501227750e-30, 2.8633730681601511e-29, 6.5701662464695220e-28, 1.3681807173905664e-26, 2.5916454156396681e-25, 4.4751190749666469e-24, 7.0583497822483184e-23, 1.0188011695097018e-21, 1.3481278810188344e-20, 1.6381331947152628e-19, 1.8307185107770479e-18, 1.8844546640240470e-17, 1.7891286411123497e-16, 1.5687514682212088e-15, 1.2719076381502738e-14, 9.5465675547568851e-14, 6.6405134257786806e-13, 4.2851307836098596e-12, 2.5677582201989908e-11, 1.4301024597508902e-10, 7.4092720692344339e-10, 3.5737970851749710e-09, 1.6060537813990750e-08, 6.7293992111962037e-08, 2.6306912128825106e-07, 9.6009499955544809e-07, 3.2731466120984474e-06, 1.0429494937549579e-05, 3.1076452822469759e-05, 8.6632019755194801e-05, 2.2604661890533189e-04, 5.5229314310309603e-04, 1.2640347665701273e-03, 2.7109305106750477e-03, 5.4498805064709423e-03, 1.0272859026411405e-02, 1.8161242168807858e-02, 3.0119768029475406e-02, 4.6870503949025956e-02, 6.8449129121212759e-02, 9.3826040853472042e-02, 1.2073175170965833e-01, 1.4585002297738520e-01, 1.6542893986636190e-01, 1.7618076008397315e-01, 1.7618076008397315e-01, 1.6542893986636190e-01, 1.4585002297738520e-01, 1.2073175170965833e-01, 9.3826040853472042e-02, 6.8449129121212759e-02, 4.6870503949025956e-02, 3.0119768029475406e-02, 1.8161242168807858e-02, 1.0272859026411405e-02, 5.4498805064709423e-03, 2.7109305106750477e-03, 1.2640347665701273e-03, 5.5229314310309603e-04, 2.2604661890533189e-04, 8.6632019755194801e-05, 3.1076452822469759e-05, 1.0429494937549579e-05, 3.2731466120984474e-06, 9.6009499955544809e-07, 2.6306912128825106e-07, 6.7293992111962037e-08, 1.6060537813990750e-08, 3.5737970851749710e-09, 7.4092720692344339e-10, 1.4301024597508902e-10, 2.5677582201989908e-11, 4.2851307836098596e-12, 6.6405134257786806e-13, 9.5465675547568851e-14, 1.2719076381502738e-14, 1.5687514682212088e-15, 1.7891286411123497e-16, 1.8844546640240470e-17, 1.8307185107770479e-18, 1.6381331947152628e-19, 1.3481278810188344e-20, 1.0188011695097018e-21, 7.0583497822483184e-23, 4.4751190749666469e-24, 2.5916454156396681e-25, 1.3681807173905664e-26, 6.5701662464695220e-28, 2.8633730681601511e-29, 1.1297479501227750e-30, 4.0247941719917949e-32, 1.2910298923887236e-33, 3.7173934630972095e-35, 9.5769420875142906e-37, 2.1997031006119584e-38, 4.4873140335977730e-40, 8.0963481474532886e-42, 1.2862015727088547e-43, 1.7902127496153674e-45, 2.1713655679015281e-47, 2.2815057723526744e-49, 2.0631744724119192e-51, 1.5941894796140349e-53, 1.0441146661857655e-55, 5.7447123101943819e-58, 2.6286294129074487e-60, 9.8899383346021114e-63, 3.0202620912677113e-65, 7.3764205708401192e-68, 1.4163011238027347e-70, 2.0954987289754821e-73, 2.3333732870895970e-76, 1.9009714466058216e-79, 1.0948479687576274e-82, 4.2723909537910979e-86, 1.0704932710164072e-89, 1.6059434510399780e-93, 1.3128438701986923e-97, 5.1211062371557049e-102, 7.8089506311734750e-107, 3.3526412389934468e-112, 2.1660313165253327e-118, 4.2467223153569913e-126}, + {6.0622191716662980e-127, 3.2243541187800939e-119, 5.1690742816237208e-113, 1.2426977160105404e-107, 8.3933287533777738e-103, 2.2126569238728289e-98, 2.7801189404282378e-94, 1.9017852686354782e-90, 7.7836404960844694e-87, 2.0443095742420186e-83, 3.6361329490246816e-80, 4.5702928776404487e-77, 4.2014053957085256e-74, 2.9059246516402931e-71, 1.5484204394570369e-68, 6.4850166385488236e-66, 2.1717290123179292e-63, 5.9022979463583546e-61, 1.3188160319392796e-58, 2.4504276066974761e-56, 3.8244861009984640e-54, 5.0591506359259702e-52, 5.7180427835175108e-50, 5.5619189779622956e-48, 4.6865094097082296e-46, 3.4411166261243821e-44, 2.2137173158797545e-42, 1.2539021346574782e-40, 6.2819019919798964e-39, 2.7952039819044771e-37, 1.1089145782361913e-35, 3.9362772026982286e-34, 1.2543049763002862e-32, 3.5989403271252947e-31, 9.3246229360624703e-30, 2.1873567871874991e-28, 4.6570323992852519e-27, 9.0198508148124766e-26, 1.5926655666987673e-24, 2.5689779244357800e-23, 3.7925166697437060e-22, 5.1332963733171119e-21, 6.3810270087780860e-20, 7.2960970893548457e-19, 7.6848696141461809e-18, 7.4667368566800348e-17, 6.7010236404093807e-16, 5.5616305380816712e-15, 4.2738351786402332e-14, 3.0441295544509839e-13, 2.0118060311112926e-12, 1.2348350013212526e-11, 7.0457887005315459e-11, 3.7404445985544352e-10, 1.8490262742729560e-09, 8.5176899942265963e-09, 3.6590845237002989e-08, 1.4668615688059134e-07, 5.4909587760916178e-07, 1.9204753233550013e-06, 6.2793321142086111e-06, 1.9203890628511156e-05, 5.4960123823238632e-05, 1.4726036700166861e-04, 3.6956136752144001e-04, 8.6899815474605370e-04, 1.9153075674406669e-03, 3.9581151034971717e-03, 7.6718266966305298e-03, 1.3950429172087464e-02, 2.3804589918929770e-02, 3.8125218704480275e-02, 5.7322493628469834e-02, 8.0922651951388633e-02, 1.0727720003748800e-01, 1.3356293292585539e-01, 1.5618653283532280e-01, 1.7155657639082103e-01, 1.7700824722075201e-01, 1.7155657639082103e-01, 1.5618653283532280e-01, 1.3356293292585539e-01, 1.0727720003748800e-01, 8.0922651951388633e-02, 5.7322493628469834e-02, 3.8125218704480275e-02, 2.3804589918929770e-02, 1.3950429172087464e-02, 7.6718266966305298e-03, 3.9581151034971717e-03, 1.9153075674406669e-03, 8.6899815474605370e-04, 3.6956136752144001e-04, 1.4726036700166861e-04, 5.4960123823238632e-05, 1.9203890628511156e-05, 6.2793321142086111e-06, 1.9204753233550013e-06, 5.4909587760916178e-07, 1.4668615688059134e-07, 3.6590845237002989e-08, 8.5176899942265963e-09, 1.8490262742729560e-09, 3.7404445985544352e-10, 7.0457887005315459e-11, 1.2348350013212526e-11, 2.0118060311112926e-12, 3.0441295544509839e-13, 4.2738351786402332e-14, 5.5616305380816712e-15, 6.7010236404093807e-16, 7.4667368566800348e-17, 7.6848696141461809e-18, 7.2960970893548457e-19, 6.3810270087780860e-20, 5.1332963733171119e-21, 3.7925166697437060e-22, 2.5689779244357800e-23, 1.5926655666987673e-24, 9.0198508148124766e-26, 4.6570323992852519e-27, 2.1873567871874991e-28, 9.3246229360624703e-30, 3.5989403271252947e-31, 1.2543049763002862e-32, 3.9362772026982286e-34, 1.1089145782361913e-35, 2.7952039819044771e-37, 6.2819019919798964e-39, 1.2539021346574782e-40, 2.2137173158797545e-42, 3.4411166261243821e-44, 4.6865094097082296e-46, 5.5619189779622956e-48, 5.7180427835175108e-50, 5.0591506359259702e-52, 3.8244861009984640e-54, 2.4504276066974761e-56, 1.3188160319392796e-58, 5.9022979463583546e-61, 2.1717290123179292e-63, 6.4850166385488236e-66, 1.5484204394570369e-68, 2.9059246516402931e-71, 4.2014053957085256e-74, 4.5702928776404487e-77, 3.6361329490246816e-80, 2.0443095742420186e-83, 7.7836404960844694e-87, 1.9017852686354782e-90, 2.7801189404282378e-94, 2.2126569238728289e-98, 8.3933287533777738e-103, 1.2426977160105404e-107, 5.1690742816237208e-113, 3.2243541187800939e-119, 6.0622191716662980e-127}, + {8.6518898286288247e-128, 4.7977981754547922e-120, 7.9650930361988897e-114, 1.9761840987326553e-108, 1.3744652069324438e-103, 3.7255148645554058e-99, 4.8074121331969427e-95, 3.3744153060134342e-91, 1.4161180265335159e-87, 3.8114504571794760e-84, 6.9438585136020447e-81, 8.9360817516471687e-78, 8.4079759000226087e-75, 5.9504405763416772e-72, 3.2435033695316742e-69, 1.3893312175841125e-66, 4.7576408366455308e-64, 1.3220024862368775e-61, 3.0197052622687561e-59, 5.7351470833255443e-57, 9.1486995119191431e-55, 1.2368470663040083e-52, 1.4286078076592960e-50, 1.4200335880405776e-48, 1.2226957787809403e-46, 9.1739205502621965e-45, 6.0306042099204429e-43, 3.4904884601063066e-41, 1.7869095381531890e-39, 8.1249855483381444e-38, 3.2939528684311649e-36, 1.1948989473488384e-34, 3.8912956469188826e-33, 1.1411267330350244e-31, 3.0219294698888464e-30, 7.2459465976943397e-29, 1.5770256477583313e-27, 3.1226061139317796e-26, 5.6372579181684656e-25, 9.2975592528325581e-24, 1.4036014280320261e-22, 1.9429670019227641e-21, 2.4703591186430466e-20, 2.8894203773544814e-19, 3.1135796480328694e-18, 3.0953725694775336e-17, 2.8427562189767261e-16, 2.4147784190996735e-15, 1.8994699453982887e-14, 1.3851048606782092e-13, 9.3729953674226721e-13, 5.8917523372705995e-12, 3.4433525991626855e-11, 1.8726953725774312e-10, 9.4854377303416358e-10, 4.4780357696096071e-09, 1.9718477984120434e-08, 8.1042320257489110e-08, 3.1108717616071734e-07, 1.1159535961120921e-06, 3.7432563998726419e-06, 1.1746874098316171e-05, 3.4504766489542566e-05, 9.4911766414585672e-05, 2.4458580745649063e-04, 5.9072464709476020e-04, 1.3376446613946990e-03, 2.8408258176521148e-03, 5.6601830605110866e-03, 1.0583273905359803e-02, 1.8574752611538813e-02, 3.0608185223036027e-02, 4.7364212800332400e-02, 6.8839337618374755e-02, 9.3985530465238165e-02, 1.2055262431349997e-01, 1.4528684159808489e-01, 1.6452871112826625e-01, 1.7508305167467292e-01, 1.7508305167467292e-01, 1.6452871112826625e-01, 1.4528684159808489e-01, 1.2055262431349997e-01, 9.3985530465238165e-02, 6.8839337618374755e-02, 4.7364212800332400e-02, 3.0608185223036027e-02, 1.8574752611538813e-02, 1.0583273905359803e-02, 5.6601830605110866e-03, 2.8408258176521148e-03, 1.3376446613946990e-03, 5.9072464709476020e-04, 2.4458580745649063e-04, 9.4911766414585672e-05, 3.4504766489542566e-05, 1.1746874098316171e-05, 3.7432563998726419e-06, 1.1159535961120921e-06, 3.1108717616071734e-07, 8.1042320257489110e-08, 1.9718477984120434e-08, 4.4780357696096071e-09, 9.4854377303416358e-10, 1.8726953725774312e-10, 3.4433525991626855e-11, 5.8917523372705995e-12, 9.3729953674226721e-13, 1.3851048606782092e-13, 1.8994699453982887e-14, 2.4147784190996735e-15, 2.8427562189767261e-16, 3.0953725694775336e-17, 3.1135796480328694e-18, 2.8894203773544814e-19, 2.4703591186430466e-20, 1.9429670019227641e-21, 1.4036014280320261e-22, 9.2975592528325581e-24, 5.6372579181684656e-25, 3.1226061139317796e-26, 1.5770256477583313e-27, 7.2459465976943397e-29, 3.0219294698888464e-30, 1.1411267330350244e-31, 3.8912956469188826e-33, 1.1948989473488384e-34, 3.2939528684311649e-36, 8.1249855483381444e-38, 1.7869095381531890e-39, 3.4904884601063066e-41, 6.0306042099204429e-43, 9.1739205502621965e-45, 1.2226957787809403e-46, 1.4200335880405776e-48, 1.4286078076592960e-50, 1.2368470663040083e-52, 9.1486995119191431e-55, 5.7351470833255443e-57, 3.0197052622687561e-59, 1.3220024862368775e-61, 4.7576408366455308e-64, 1.3893312175841125e-66, 3.2435033695316742e-69, 5.9504405763416772e-72, 8.4079759000226087e-75, 8.9360817516471687e-78, 6.9438585136020447e-81, 3.8114504571794760e-84, 1.4161180265335159e-87, 3.3744153060134342e-91, 4.8074121331969427e-95, 3.7255148645554058e-99, 1.3744652069324438e-103, 1.9761840987326553e-108, 7.9650930361988897e-114, 4.7977981754547922e-120, 8.6518898286288247e-128}, + {1.2345052237856376e-128, 7.1361565819334150e-121, 1.2266594150983389e-114, 3.1403768309606574e-109, 2.2488806415651994e-104, 6.2666361422341182e-100, 8.3038517132199445e-96, 5.9799996465821154e-92, 2.5729286682716097e-88, 7.0956475615088519e-85, 1.3239358802678529e-81, 1.7442238644011473e-78, 1.6795285148575302e-75, 1.2160739161745740e-72, 6.7800483155694390e-70, 2.9698887117979696e-67, 1.0398334915263462e-64, 2.9537631885955336e-62, 6.8964075076110458e-60, 1.3386610121664405e-57, 2.1822972256453033e-55, 3.0148592722499396e-53, 3.5582391595026354e-51, 3.6138684655576511e-49, 3.1792871272966973e-47, 2.4372213779029665e-45, 1.6369116080171861e-43, 9.6799943605644117e-42, 5.0631425091072337e-40, 2.3522144528005868e-38, 9.7436152093981038e-37, 3.6115822674301904e-35, 1.2018288490038798e-33, 3.6015212879811887e-32, 9.7468625406557925e-31, 2.3885355396506504e-29, 5.3132702519188496e-28, 1.0753756973350715e-26, 1.9845716557598769e-25, 3.3462806594548906e-24, 5.1650319326919827e-23, 7.3109577835953338e-22, 9.5059376788875581e-21, 1.1371572178214573e-19, 1.2534183670047640e-18, 1.2747621562807485e-17, 1.1978221703435283e-16, 1.0411788127158037e-15, 8.3817954183785831e-15, 6.2561571481732302e-14, 4.3340112417705868e-13, 2.7894047381925510e-12, 1.6694572089431568e-11, 9.2995420785846948e-11, 4.8253510985881196e-10, 2.3340778029825478e-09, 1.0532686874011172e-08, 4.4371096492273068e-08, 1.7461444759106688e-07, 6.4230905783791856e-07, 2.2097354109635801e-06, 7.1138004485475131e-06, 2.1441047304603760e-05, 6.0530803203101914e-05, 1.6013349936228681e-04, 3.9713567276994117e-04, 9.2365343040052964e-04, 2.0153124277213861e-03, 4.1264467864295497e-03, 7.9311452492789869e-03, 1.4313136262234679e-02, 2.4259110409796711e-02, 3.8623061726879737e-02, 5.7773645171755317e-02, 8.1206842893413358e-02, 1.0727363982645181e-01, 1.3319196312197995e-01, 1.5544764635273797e-01, 1.7054438857615731e-01, 1.7589498780426926e-01, 1.7054438857615731e-01, 1.5544764635273797e-01, 1.3319196312197995e-01, 1.0727363982645181e-01, 8.1206842893413358e-02, 5.7773645171755317e-02, 3.8623061726879737e-02, 2.4259110409796711e-02, 1.4313136262234679e-02, 7.9311452492789869e-03, 4.1264467864295497e-03, 2.0153124277213861e-03, 9.2365343040052964e-04, 3.9713567276994117e-04, 1.6013349936228681e-04, 6.0530803203101914e-05, 2.1441047304603760e-05, 7.1138004485475131e-06, 2.2097354109635801e-06, 6.4230905783791856e-07, 1.7461444759106688e-07, 4.4371096492273068e-08, 1.0532686874011172e-08, 2.3340778029825478e-09, 4.8253510985881196e-10, 9.2995420785846948e-11, 1.6694572089431568e-11, 2.7894047381925510e-12, 4.3340112417705868e-13, 6.2561571481732302e-14, 8.3817954183785831e-15, 1.0411788127158037e-15, 1.1978221703435283e-16, 1.2747621562807485e-17, 1.2534183670047640e-18, 1.1371572178214573e-19, 9.5059376788875581e-21, 7.3109577835953338e-22, 5.1650319326919827e-23, 3.3462806594548906e-24, 1.9845716557598769e-25, 1.0753756973350715e-26, 5.3132702519188496e-28, 2.3885355396506504e-29, 9.7468625406557925e-31, 3.6015212879811887e-32, 1.2018288490038798e-33, 3.6115822674301904e-35, 9.7436152093981038e-37, 2.3522144528005868e-38, 5.0631425091072337e-40, 9.6799943605644117e-42, 1.6369116080171861e-43, 2.4372213779029665e-45, 3.1792871272966973e-47, 3.6138684655576511e-49, 3.5582391595026354e-51, 3.0148592722499396e-53, 2.1822972256453033e-55, 1.3386610121664405e-57, 6.8964075076110458e-60, 2.9537631885955336e-62, 1.0398334915263462e-64, 2.9698887117979696e-67, 6.7800483155694390e-70, 1.2160739161745740e-72, 1.6795285148575302e-75, 1.7442238644011473e-78, 1.3239358802678529e-81, 7.0956475615088519e-85, 2.5729286682716097e-88, 5.9799996465821154e-92, 8.3038517132199445e-96, 6.2666361422341182e-100, 2.2488806415651994e-104, 3.1403768309606574e-109, 1.2266594150983389e-114, 7.1361565819334150e-121, 1.2345052237856376e-128}, + {1.7610777213868225e-129, 1.0609912453053086e-121, 1.8880558326632475e-115, 4.9869144791066677e-110, 3.6765138012657772e-105, 1.0530846293412266e-100, 1.4327597465974048e-96, 1.0584620865577655e-92, 4.6684664948676272e-89, 1.3190436630179440e-85, 2.5202632570592097e-82, 3.3987392362121908e-79, 3.3488181154099359e-76, 2.4804288326920011e-73, 1.4143430079239634e-70, 6.3346915410241234e-68, 2.2674306256941818e-65, 6.5836081498721399e-63, 1.5709872468845785e-60, 3.1162632356321267e-58, 5.1910061298697851e-56, 7.3273399650957289e-54, 8.8354796635334199e-52, 9.1677552430019203e-50, 8.2395121572045278e-48, 6.4526516804970545e-46, 4.4272600478065905e-44, 2.6745508857556167e-42, 1.4291089046778579e-40, 6.7826401946708970e-39, 2.8703150932676095e-37, 1.0869482703973487e-35, 3.6955025716069553e-34, 1.1315067191371783e-32, 3.1289601063429292e-31, 7.8353356972027161e-30, 1.7811808982463465e-28, 3.6843324819342597e-27, 6.9494876383385912e-26, 1.1977696964648504e-24, 1.8899444794564548e-23, 2.7350044749786203e-22, 3.6360647433609545e-21, 4.4479270726420680e-20, 5.0139935781581053e-19, 5.2157884028434802e-18, 5.0135017597656447e-17, 4.4585078370462008e-16, 3.6726238537048878e-15, 2.8053332193363147e-14, 1.9891563745557595e-13, 1.3105682814703333e-12, 8.0308637715632703e-12, 4.5809917511320124e-11, 2.4345153103464099e-10, 1.2063179828327093e-09, 5.5773560719102751e-09, 2.4077676519331747e-08, 9.7119175861531978e-08, 3.6624118990378204e-07, 1.2919701008614705e-06, 4.2657607173255093e-06, 1.3189244631563864e-05, 3.8205770006826046e-05, 1.0373273668170783e-04, 2.6409447862081663e-04, 6.3070332659971552e-04, 1.4134038747122414e-03, 2.9731958710714898e-03, 5.8725347587191210e-03, 1.0894054653347555e-02, 1.8985454327504647e-02, 3.1089543185356148e-02, 4.7846811856586229e-02, 6.9216492829879864e-02, 9.4133897771831843e-02, 1.2036875882166113e-01, 1.4472841947349580e-01, 1.6364277404586250e-01, 1.7400560617482017e-01, 1.7400560617482017e-01, 1.6364277404586250e-01, 1.4472841947349580e-01, 1.2036875882166113e-01, 9.4133897771831843e-02, 6.9216492829879864e-02, 4.7846811856586229e-02, 3.1089543185356148e-02, 1.8985454327504647e-02, 1.0894054653347555e-02, 5.8725347587191210e-03, 2.9731958710714898e-03, 1.4134038747122414e-03, 6.3070332659971552e-04, 2.6409447862081663e-04, 1.0373273668170783e-04, 3.8205770006826046e-05, 1.3189244631563864e-05, 4.2657607173255093e-06, 1.2919701008614705e-06, 3.6624118990378204e-07, 9.7119175861531978e-08, 2.4077676519331747e-08, 5.5773560719102751e-09, 1.2063179828327093e-09, 2.4345153103464099e-10, 4.5809917511320124e-11, 8.0308637715632703e-12, 1.3105682814703333e-12, 1.9891563745557595e-13, 2.8053332193363147e-14, 3.6726238537048878e-15, 4.4585078370462008e-16, 5.0135017597656447e-17, 5.2157884028434802e-18, 5.0139935781581053e-19, 4.4479270726420680e-20, 3.6360647433609545e-21, 2.7350044749786203e-22, 1.8899444794564548e-23, 1.1977696964648504e-24, 6.9494876383385912e-26, 3.6843324819342597e-27, 1.7811808982463465e-28, 7.8353356972027161e-30, 3.1289601063429292e-31, 1.1315067191371783e-32, 3.6955025716069553e-34, 1.0869482703973487e-35, 2.8703150932676095e-37, 6.7826401946708970e-39, 1.4291089046778579e-40, 2.6745508857556167e-42, 4.4272600478065905e-44, 6.4526516804970545e-46, 8.2395121572045278e-48, 9.1677552430019203e-50, 8.8354796635334199e-52, 7.3273399650957289e-54, 5.1910061298697851e-56, 3.1162632356321267e-58, 1.5709872468845785e-60, 6.5836081498721399e-63, 2.2674306256941818e-65, 6.3346915410241234e-68, 1.4143430079239634e-70, 2.4804288326920011e-73, 3.3488181154099359e-76, 3.3987392362121908e-79, 2.5202632570592097e-82, 1.3190436630179440e-85, 4.6684664948676272e-89, 1.0584620865577655e-92, 1.4327597465974048e-96, 1.0530846293412266e-100, 3.6765138012657772e-105, 4.9869144791066677e-110, 1.8880558326632475e-115, 1.0609912453053086e-121, 1.7610777213868225e-129}, + {2.5117056700120872e-130, 1.5768344953215994e-122, 2.9044636530400262e-116, 7.9137283674998714e-111, 6.0054708822621273e-106, 1.7679796472043246e-101, 2.4694368635244847e-97, 1.8712284923478763e-93, 8.4595142859097527e-90, 2.4484925972797376e-86, 4.7901098788291739e-83, 6.6115362451933797e-80, 6.6651975922997095e-77, 5.0496259060528478e-74, 2.9443577961646315e-71, 1.3482570840246656e-68, 4.9330325826042063e-66, 1.4638938206976037e-63, 3.5696603658904023e-61, 7.2351735603462302e-59, 1.2313651255661928e-56, 1.7756983991411928e-54, 2.1873288432589287e-52, 2.3183952076674861e-50, 2.1283904185528713e-48, 1.7025628600075561e-46, 1.1931897574643388e-44, 7.3626408195179616e-43, 4.0184547253394729e-41, 1.9480947197049647e-39, 8.4210939788616602e-38, 3.2575341809749061e-36, 1.1313879341749972e-34, 3.5389381244383855e-33, 9.9980778309899927e-32, 2.5579997011243763e-30, 5.9416331503292959e-29, 1.2558645009560268e-27, 2.4207882624773725e-26, 4.2641635772547449e-25, 6.8770962222233684e-24, 1.0173048420715203e-22, 1.3826289575432440e-21, 1.7292550621372860e-20, 1.9932479444365377e-19, 2.1204345027937428e-18, 2.0846204533947437e-17, 1.8963226940193161e-16, 1.5980648038540025e-15, 1.2489882385266198e-14, 9.0627897868794926e-14, 6.1113514542896781e-13, 3.8334734795673616e-12, 2.2387867842316195e-11, 1.2183208011102774e-10, 6.1827585936607041e-10, 2.9281826160122555e-09, 1.2951325071150348e-08, 5.3532512893353530e-08, 2.0690866853712093e-07, 7.4825803592771974e-07, 2.5332274890078433e-06, 8.0328671931181133e-06, 2.3869848947287937e-05, 6.6497705172595715e-05, 1.7374941584438457e-04, 4.2595976788697434e-04, 9.8016363676181151e-04, 2.1176656262735217e-03, 4.2971163011999540e-03, 8.1917798784339399e-03, 1.4674728608435711e-02, 2.4708741311255288e-02, 3.9111739287652818e-02, 5.8212483797557772e-02, 8.1478613249930595e-02, 1.0726183985947185e-01, 1.3282086880025051e-01, 1.5471842089261012e-01, 1.6954985984858575e-01, 1.7480247235206883e-01, 1.6954985984858575e-01, 1.5471842089261012e-01, 1.3282086880025051e-01, 1.0726183985947185e-01, 8.1478613249930595e-02, 5.8212483797557772e-02, 3.9111739287652818e-02, 2.4708741311255288e-02, 1.4674728608435711e-02, 8.1917798784339399e-03, 4.2971163011999540e-03, 2.1176656262735217e-03, 9.8016363676181151e-04, 4.2595976788697434e-04, 1.7374941584438457e-04, 6.6497705172595715e-05, 2.3869848947287937e-05, 8.0328671931181133e-06, 2.5332274890078433e-06, 7.4825803592771974e-07, 2.0690866853712093e-07, 5.3532512893353530e-08, 1.2951325071150348e-08, 2.9281826160122555e-09, 6.1827585936607041e-10, 1.2183208011102774e-10, 2.2387867842316195e-11, 3.8334734795673616e-12, 6.1113514542896781e-13, 9.0627897868794926e-14, 1.2489882385266198e-14, 1.5980648038540025e-15, 1.8963226940193161e-16, 2.0846204533947437e-17, 2.1204345027937428e-18, 1.9932479444365377e-19, 1.7292550621372860e-20, 1.3826289575432440e-21, 1.0173048420715203e-22, 6.8770962222233684e-24, 4.2641635772547449e-25, 2.4207882624773725e-26, 1.2558645009560268e-27, 5.9416331503292959e-29, 2.5579997011243763e-30, 9.9980778309899927e-32, 3.5389381244383855e-33, 1.1313879341749972e-34, 3.2575341809749061e-36, 8.4210939788616602e-38, 1.9480947197049647e-39, 4.0184547253394729e-41, 7.3626408195179616e-43, 1.1931897574643388e-44, 1.7025628600075561e-46, 2.1283904185528713e-48, 2.3183952076674861e-50, 2.1873288432589287e-52, 1.7756983991411928e-54, 1.2313651255661928e-56, 7.2351735603462302e-59, 3.5696603658904023e-61, 1.4638938206976037e-63, 4.9330325826042063e-66, 1.3482570840246656e-68, 2.9443577961646315e-71, 5.0496259060528478e-74, 6.6651975922997095e-77, 6.6115362451933797e-80, 4.7901098788291739e-83, 2.4484925972797376e-86, 8.4595142859097527e-90, 1.8712284923478763e-93, 2.4694368635244847e-97, 1.7679796472043246e-101, 6.0054708822621273e-106, 7.9137283674998714e-111, 2.9044636530400262e-116, 1.5768344953215994e-122, 2.5117056700120872e-130}, + {3.5814970887800193e-131, 2.3425513711144864e-123, 4.4656005878580503e-117, 1.2549679546936818e-111, 9.8017329285864726e-107, 2.9653839872557489e-102, 4.2516584957198355e-98, 3.3041628702723710e-94, 1.5309029297604490e-90, 4.5385580031078324e-87, 9.0901963341946014e-84, 1.2839971949179289e-80, 1.3242215741981033e-77, 1.0260459032147593e-74, 6.1171687686353920e-72, 2.8634705175373264e-69, 1.0708169155525760e-66, 3.2473167974844921e-64, 8.0909309792517176e-62, 1.6754390708913517e-59, 2.9129572580930014e-57, 4.2909222783948995e-55, 5.3988684348783699e-53, 5.8447003073435629e-51, 5.4802004204685326e-49, 4.4772141969183521e-47, 3.2045539320239441e-45, 2.0194963974060789e-43, 1.1256982521722169e-41, 5.5735541782797091e-40, 2.4607034140503088e-38, 9.7221219580876439e-37, 3.4489022958731964e-35, 1.1019416508568278e-33, 3.1800971964673699e-32, 8.3116537305566989e-31, 1.9723475111279380e-29, 4.2593300365187824e-28, 8.3889785533018652e-27, 1.5099943444312937e-25, 2.4887063965856610e-24, 3.7625921372478096e-23, 5.2269932699489526e-22, 6.6828358472667806e-21, 7.8752810946470447e-20, 8.5660876502561181e-19, 8.6117182742222153e-18, 8.0118871420831184e-17, 6.9061126848313650e-16, 5.5217099395406411e-15, 4.0993475210626060e-14, 2.8287311125546069e-13, 1.8159956877481757e-12, 1.0856048964794633e-11, 6.0482344698906487e-11, 3.1429016861706717e-10, 1.5244189642430853e-09, 6.9064794339712537e-09, 2.9246755983618971e-08, 1.1583508882541563e-07, 4.2933935551544649e-07, 1.4900490066410718e-06, 4.8446895241968929e-06, 1.4764162429716271e-05, 4.2191721743284952e-05, 1.1311130499099743e-04, 2.8458825008902025e-04, 6.7223242445783719e-04, 1.4912851650067261e-03, 3.1079648052231286e-03, 6.0868070844442837e-03, 1.1205047384266207e-02, 1.9393234478244935e-02, 3.1563859231069517e-02, 4.8318515012084520e-02, 6.9580992429206442e-02, 9.4271602476518737e-02, 1.2018047849522055e-01, 1.4417476260347803e-01, 1.6277076260081147e-01, 1.7294780784506847e-01, 1.7294780784506847e-01, 1.6277076260081147e-01, 1.4417476260347803e-01, 1.2018047849522055e-01, 9.4271602476518737e-02, 6.9580992429206442e-02, 4.8318515012084520e-02, 3.1563859231069517e-02, 1.9393234478244935e-02, 1.1205047384266207e-02, 6.0868070844442837e-03, 3.1079648052231286e-03, 1.4912851650067261e-03, 6.7223242445783719e-04, 2.8458825008902025e-04, 1.1311130499099743e-04, 4.2191721743284952e-05, 1.4764162429716271e-05, 4.8446895241968929e-06, 1.4900490066410718e-06, 4.2933935551544649e-07, 1.1583508882541563e-07, 2.9246755983618971e-08, 6.9064794339712537e-09, 1.5244189642430853e-09, 3.1429016861706717e-10, 6.0482344698906487e-11, 1.0856048964794633e-11, 1.8159956877481757e-12, 2.8287311125546069e-13, 4.0993475210626060e-14, 5.5217099395406411e-15, 6.9061126848313650e-16, 8.0118871420831184e-17, 8.6117182742222153e-18, 8.5660876502561181e-19, 7.8752810946470447e-20, 6.6828358472667806e-21, 5.2269932699489526e-22, 3.7625921372478096e-23, 2.4887063965856610e-24, 1.5099943444312937e-25, 8.3889785533018652e-27, 4.2593300365187824e-28, 1.9723475111279380e-29, 8.3116537305566989e-31, 3.1800971964673699e-32, 1.1019416508568278e-33, 3.4489022958731964e-35, 9.7221219580876439e-37, 2.4607034140503088e-38, 5.5735541782797091e-40, 1.1256982521722169e-41, 2.0194963974060789e-43, 3.2045539320239441e-45, 4.4772141969183521e-47, 5.4802004204685326e-49, 5.8447003073435629e-51, 5.3988684348783699e-53, 4.2909222783948995e-55, 2.9129572580930014e-57, 1.6754390708913517e-59, 8.0909309792517176e-62, 3.2473167974844921e-64, 1.0708169155525760e-66, 2.8634705175373264e-69, 6.1171687686353920e-72, 1.0260459032147593e-74, 1.3242215741981033e-77, 1.2839971949179289e-80, 9.0901963341946014e-84, 4.5385580031078324e-87, 1.5309029297604490e-90, 3.3041628702723710e-94, 4.2516584957198355e-98, 2.9653839872557489e-102, 9.8017329285864726e-107, 1.2549679546936818e-111, 4.4656005878580503e-117, 2.3425513711144864e-123, 3.5814970887800193e-131}, + {5.1058382388659269e-132, 3.4787452134261360e-124, 6.8621328476606254e-118, 1.9887929527241800e-112, 1.5984806988768767e-107, 4.9691113060541562e-103, 7.3123967185564915e-99, 5.8275341127995904e-95, 2.7668599413210135e-91, 8.4008516795919007e-88, 1.7224108182682644e-84, 2.4894935837631581e-81, 2.6262929329466987e-78, 2.0809367058140554e-75, 1.2683653376027615e-72, 6.0687062439892660e-70, 2.3192577160038707e-67, 7.1865618890614741e-65, 1.8293638224274986e-62, 3.8697820973543049e-60, 6.8723829518191775e-58, 1.0339639913382959e-55, 1.3286530592326351e-53, 1.4689382904493016e-51, 1.4065458992870208e-49, 1.1734649400073276e-47, 8.5768492248869891e-46, 5.5194857702963127e-44, 3.1417689022491827e-42, 1.5884965434373284e-40, 7.1618254982458697e-39, 2.8896676701682861e-37, 1.0469000850542053e-35, 3.4161633966903074e-34, 1.0069252468038614e-32, 2.6880974156578509e-31, 6.5158094363462302e-30, 1.4374174204375062e-28, 2.8922727144899803e-27, 5.3189806528533866e-26, 8.9574942952228153e-25, 1.3838820621840956e-23, 1.9647377828565719e-22, 2.5674274750397857e-21, 3.0926768131345034e-20, 3.4389885933401063e-19, 3.5348317750763591e-18, 3.3627744521880394e-17, 2.9643952991648909e-16, 2.4242278762902340e-15, 1.8410778220221309e-14, 1.2997777867558265e-13, 8.5384118147611980e-13, 5.2237944008745323e-12, 2.9789583264039043e-11, 1.5847474437596250e-10, 7.8704836352169759e-10, 3.6517281221510981e-09, 1.5839569546032163e-08, 6.4270494029387501e-08, 2.4409702642671850e-07, 8.6823986929119786e-07, 2.8938252044645185e-06, 9.0422158488303162e-06, 2.6500151909252169e-05, 7.2875309659252541e-05, 1.8812470050977485e-04, 4.5604422468456632e-04, 1.0385182654144745e-03, 2.2223166735266457e-03, 4.4700208091087738e-03, 8.4535858780912364e-03, 1.5035066629455310e-02, 2.5153428292907908e-02, 3.9591364280961522e-02, 5.8639320239639699e-02, 8.1738404552485058e-02, 1.0724220730838535e-01, 1.3244982464015054e-01, 1.5399867398459949e-01, 1.6857248411687309e-01, 1.7373006454622800e-01, 1.6857248411687309e-01, 1.5399867398459949e-01, 1.3244982464015054e-01, 1.0724220730838535e-01, 8.1738404552485058e-02, 5.8639320239639699e-02, 3.9591364280961522e-02, 2.5153428292907908e-02, 1.5035066629455310e-02, 8.4535858780912364e-03, 4.4700208091087738e-03, 2.2223166735266457e-03, 1.0385182654144745e-03, 4.5604422468456632e-04, 1.8812470050977485e-04, 7.2875309659252541e-05, 2.6500151909252169e-05, 9.0422158488303162e-06, 2.8938252044645185e-06, 8.6823986929119786e-07, 2.4409702642671850e-07, 6.4270494029387501e-08, 1.5839569546032163e-08, 3.6517281221510981e-09, 7.8704836352169759e-10, 1.5847474437596250e-10, 2.9789583264039043e-11, 5.2237944008745323e-12, 8.5384118147611980e-13, 1.2997777867558265e-13, 1.8410778220221309e-14, 2.4242278762902340e-15, 2.9643952991648909e-16, 3.3627744521880394e-17, 3.5348317750763591e-18, 3.4389885933401063e-19, 3.0926768131345034e-20, 2.5674274750397857e-21, 1.9647377828565719e-22, 1.3838820621840956e-23, 8.9574942952228153e-25, 5.3189806528533866e-26, 2.8922727144899803e-27, 1.4374174204375062e-28, 6.5158094363462302e-30, 2.6880974156578509e-31, 1.0069252468038614e-32, 3.4161633966903074e-34, 1.0469000850542053e-35, 2.8896676701682861e-37, 7.1618254982458697e-39, 1.5884965434373284e-40, 3.1417689022491827e-42, 5.5194857702963127e-44, 8.5768492248869891e-46, 1.1734649400073276e-47, 1.4065458992870208e-49, 1.4689382904493016e-51, 1.3286530592326351e-53, 1.0339639913382959e-55, 6.8723829518191775e-58, 3.8697820973543049e-60, 1.8293638224274986e-62, 7.1865618890614741e-65, 2.3192577160038707e-67, 6.0687062439892660e-70, 1.2683653376027615e-72, 2.0809367058140554e-75, 2.6262929329466987e-78, 2.4894935837631581e-81, 1.7224108182682644e-84, 8.4008516795919007e-88, 2.7668599413210135e-91, 5.8275341127995904e-95, 7.3123967185564915e-99, 4.9691113060541562e-103, 1.5984806988768767e-107, 1.9887929527241800e-112, 6.8621328476606254e-118, 3.4787452134261360e-124, 5.1058382388659269e-132}, + {7.2774136808374355e-133, 5.1640253363781065e-125, 1.0539163395229755e-118, 3.1495974864069521e-113, 2.6047400909049849e-108, 8.3190735956096083e-104, 1.2563394260844383e-99, 1.0266029334944310e-95, 4.9942483588606006e-92, 1.5528225080749898e-88, 3.2586916028699435e-85, 4.8189357949651464e-82, 5.1995900683110418e-79, 4.2125456733725475e-76, 2.6247146396219082e-73, 1.2834925357232391e-70, 5.0121772980919659e-68, 1.5867585378958465e-65, 4.1261409970889649e-63, 8.9152934736506373e-61, 1.6170383864591641e-58, 2.4845512749608618e-56, 3.2602867516914131e-54, 3.6806734534292869e-52, 3.5986560950072034e-50, 3.0655422197589668e-48, 2.2877484525825812e-46, 1.5032084002307616e-44, 8.7364621051992394e-43, 4.5101690564191873e-41, 2.0762677615749785e-39, 8.5540504547638604e-38, 3.1645195547118431e-36, 1.0544772050999357e-34, 3.1740376151073218e-33, 8.6536469704170907e-32, 2.1423361912655937e-30, 4.8272033657573108e-29, 9.9214758418835271e-28, 1.8639024742210292e-26, 3.2068221113110570e-25, 5.0619653821736361e-24, 7.3433969692709968e-23, 9.8063138469438635e-22, 1.2072647224673922e-20, 1.3721655943148051e-19, 1.4417902155501092e-18, 1.4022988180739114e-17, 1.2639871956315117e-16, 1.0570613866427036e-15, 8.2106561126223668e-15, 5.9294550957769600e-14, 3.9849788475246675e-13, 2.4946167855672290e-12, 1.4558577681248529e-11, 7.9272325104664409e-11, 4.0303485756427884e-10, 1.9146717079546461e-09, 8.5049436556100333e-09, 3.5346942618254887e-08, 1.3752997102467001e-07, 5.0124990214178011e-07, 1.7121991444326337e-06, 5.4842169146680313e-06, 1.6479308921225968e-05, 4.6474831646589210e-05, 1.2306336370474281e-04, 3.0608156336295680e-04, 7.1531332284984861e-04, 1.5712593917201550e-03, 3.2450562679797843e-03, 6.3028742438160333e-03, 1.1516105006399560e-02, 1.9797989451736150e-02, 3.2031157662458190e-02, 4.8779535009276516e-02, 6.9933221621689870e-02, 9.4399083753525778e-02, 1.1998808778160919e-01, 1.4362587120152381e-01, 1.6191232338499803e-01, 1.7190906682322146e-01, 1.7190906682322146e-01, 1.6191232338499803e-01, 1.4362587120152381e-01, 1.1998808778160919e-01, 9.4399083753525778e-02, 6.9933221621689870e-02, 4.8779535009276516e-02, 3.2031157662458190e-02, 1.9797989451736150e-02, 1.1516105006399560e-02, 6.3028742438160333e-03, 3.2450562679797843e-03, 1.5712593917201550e-03, 7.1531332284984861e-04, 3.0608156336295680e-04, 1.2306336370474281e-04, 4.6474831646589210e-05, 1.6479308921225968e-05, 5.4842169146680313e-06, 1.7121991444326337e-06, 5.0124990214178011e-07, 1.3752997102467001e-07, 3.5346942618254887e-08, 8.5049436556100333e-09, 1.9146717079546461e-09, 4.0303485756427884e-10, 7.9272325104664409e-11, 1.4558577681248529e-11, 2.4946167855672290e-12, 3.9849788475246675e-13, 5.9294550957769600e-14, 8.2106561126223668e-15, 1.0570613866427036e-15, 1.2639871956315117e-16, 1.4022988180739114e-17, 1.4417902155501092e-18, 1.3721655943148051e-19, 1.2072647224673922e-20, 9.8063138469438635e-22, 7.3433969692709968e-23, 5.0619653821736361e-24, 3.2068221113110570e-25, 1.8639024742210292e-26, 9.9214758418835271e-28, 4.8272033657573108e-29, 2.1423361912655937e-30, 8.6536469704170907e-32, 3.1740376151073218e-33, 1.0544772050999357e-34, 3.1645195547118431e-36, 8.5540504547638604e-38, 2.0762677615749785e-39, 4.5101690564191873e-41, 8.7364621051992394e-43, 1.5032084002307616e-44, 2.2877484525825812e-46, 3.0655422197589668e-48, 3.5986560950072034e-50, 3.6806734534292869e-52, 3.2602867516914131e-54, 2.4845512749608618e-56, 1.6170383864591641e-58, 8.9152934736506373e-61, 4.1261409970889649e-63, 1.5867585378958465e-65, 5.0121772980919659e-68, 1.2834925357232391e-70, 2.6247146396219082e-73, 4.2125456733725475e-76, 5.1995900683110418e-79, 4.8189357949651464e-82, 3.2586916028699435e-85, 1.5528225080749898e-88, 4.9942483588606006e-92, 1.0266029334944310e-95, 1.2563394260844383e-99, 8.3190735956096083e-104, 2.6047400909049849e-108, 3.1495974864069521e-113, 1.0539163395229755e-118, 5.1640253363781065e-125, 7.2774136808374355e-133}, + {1.0370401330404128e-133, 7.6628104007938171e-126, 1.6177944458582779e-119, 4.9846209741944082e-114, 4.2410903981966999e-109, 1.3914702767711137e-104, 2.1562790735643016e-100, 1.8064249655941844e-96, 9.0033176079341812e-93, 2.8662892545517945e-89, 6.1560200907369249e-86, 9.3130571849846176e-83, 1.0276539331534194e-79, 8.5120283276029899e-77, 5.4209234698248101e-74, 2.7089105182625548e-71, 1.0808323828410104e-68, 3.4954669585711314e-66, 9.2841597125229309e-64, 2.0487495492161133e-61, 3.7947795686740356e-59, 5.9537760035157825e-57, 7.9771959541897771e-55, 9.1949435242106659e-53, 9.1785078949346243e-51, 7.9824553719798949e-49, 6.0817267689790556e-47, 4.0796548388663509e-45, 2.4206136388252030e-43, 1.2757690988603576e-41, 5.9959663570350600e-40, 2.5220557090360086e-38, 9.5260156082741867e-37, 3.2409917090630152e-35, 9.9611495020508274e-34, 2.7731632192036800e-32, 7.0108006530723495e-31, 1.6132689567876035e-29, 3.3864807517869285e-28, 6.4981402796424245e-27, 1.1420079983768409e-25, 1.8415308229503966e-24, 2.7293664944237734e-23, 3.7240665643118182e-22, 4.6849502295754945e-21, 5.4418440370928148e-20, 5.8442290512558873e-19, 5.8103464142089000e-18, 5.3541848627525126e-17, 4.5782053131046985e-16, 3.6364250808804760e-15, 2.6857950742667181e-14, 1.8463211087063347e-13, 1.1824213225107695e-12, 7.0605885367663953e-12, 3.9342842554083200e-11, 2.0472924906259976e-10, 9.9563021167170463e-10, 4.5281139927096718e-09, 1.9271612548094547e-08, 7.6800568196628212e-08, 2.8675127848315921e-07, 1.0036330972927129e-06, 3.2945281705899829e-06, 1.0147672170741122e-05, 2.9341866948530294e-05, 7.9677853191228106e-05, 2.0327512248096151e-04, 4.8739807702018396e-04, 1.0987048493532403e-03, 2.3292137543048106e-03, 4.6450584683976901e-03, 8.7164233107623907e-03, 1.5394018995268760e-02, 2.5593125567577809e-02, 4.0062052918626051e-02, 5.9054458453798808e-02, 8.1986641314913386e-02, 1.0721512879081883e-01, 1.3207899230191014e-01, 1.5328822654091181e-01, 1.6761177530123578e-01, 1.7267715506412973e-01, 1.6761177530123578e-01, 1.5328822654091181e-01, 1.3207899230191014e-01, 1.0721512879081883e-01, 8.1986641314913386e-02, 5.9054458453798808e-02, 4.0062052918626051e-02, 2.5593125567577809e-02, 1.5394018995268760e-02, 8.7164233107623907e-03, 4.6450584683976901e-03, 2.3292137543048106e-03, 1.0987048493532403e-03, 4.8739807702018396e-04, 2.0327512248096151e-04, 7.9677853191228106e-05, 2.9341866948530294e-05, 1.0147672170741122e-05, 3.2945281705899829e-06, 1.0036330972927129e-06, 2.8675127848315921e-07, 7.6800568196628212e-08, 1.9271612548094547e-08, 4.5281139927096718e-09, 9.9563021167170463e-10, 2.0472924906259976e-10, 3.9342842554083200e-11, 7.0605885367663953e-12, 1.1824213225107695e-12, 1.8463211087063347e-13, 2.6857950742667181e-14, 3.6364250808804760e-15, 4.5782053131046985e-16, 5.3541848627525126e-17, 5.8103464142089000e-18, 5.8442290512558873e-19, 5.4418440370928148e-20, 4.6849502295754945e-21, 3.7240665643118182e-22, 2.7293664944237734e-23, 1.8415308229503966e-24, 1.1420079983768409e-25, 6.4981402796424245e-27, 3.3864807517869285e-28, 1.6132689567876035e-29, 7.0108006530723495e-31, 2.7731632192036800e-32, 9.9611495020508274e-34, 3.2409917090630152e-35, 9.5260156082741867e-37, 2.5220557090360086e-38, 5.9959663570350600e-40, 1.2757690988603576e-41, 2.4206136388252030e-43, 4.0796548388663509e-45, 6.0817267689790556e-47, 7.9824553719798949e-49, 9.1785078949346243e-51, 9.1949435242106659e-53, 7.9771959541897771e-55, 5.9537760035157825e-57, 3.7947795686740356e-59, 2.0487495492161133e-61, 9.2841597125229309e-64, 3.4954669585711314e-66, 1.0808323828410104e-68, 2.7089105182625548e-71, 5.4209234698248101e-74, 8.5120283276029899e-77, 1.0276539331534194e-79, 9.3130571849846176e-83, 6.1560200907369249e-86, 2.8662892545517945e-89, 9.0033176079341812e-93, 1.8064249655941844e-96, 2.1562790735643016e-100, 1.3914702767711137e-104, 4.2410903981966999e-109, 4.9846209741944082e-114, 1.6177944458582779e-119, 7.6628104007938171e-126, 1.0370401330404128e-133}, + {1.4774864044730919e-134, 1.1366413967312580e-126, 2.4820647345980668e-120, 7.8835864352328026e-115, 6.9000209771443205e-110, 2.3253044708336631e-105, 3.6970767260897540e-101, 3.1749907175216285e-97, 1.6210276225797838e-93, 5.2835329659655297e-90, 1.1612190047837748e-86, 1.7969751555347008e-83, 2.0276116391228268e-80, 1.7168527133451101e-77, 1.1174481216103580e-74, 5.7057155838516782e-72, 2.3257094876339456e-69, 7.6827297329990042e-67, 2.0840476902701378e-64, 4.6963320200403117e-62, 8.8821741614196649e-60, 1.4228297251266947e-57, 1.9462991520350709e-55, 2.2902568991273256e-53, 2.3338081888304624e-51, 2.0719270806826472e-49, 1.6113919045974023e-47, 1.1033912986165171e-45, 6.6828682986466127e-44, 3.5953723054115794e-42, 1.7249314720204933e-40, 7.4065841467302406e-39, 2.8558675123113318e-37, 9.9193674854686742e-36, 3.1125293650397015e-34, 8.8470553643068575e-33, 2.2836747845517017e-31, 5.3659089984759047e-30, 1.1502283587852324e-28, 2.2540053407015112e-27, 4.0457517920854419e-26, 6.6636084030909077e-25, 1.0088605806727544e-23, 1.4062620052335121e-22, 1.8074912939358217e-21, 2.1452843610753105e-20, 2.3544032965183590e-19, 2.3923239898564218e-18, 2.2533431601785240e-17, 1.9696954324441430e-16, 1.5995738956856818e-15, 1.2080546491164034e-14, 8.4930831160690606e-14, 5.5633632513833882e-13, 3.3984267451196722e-12, 1.9374997522123724e-11, 1.0317252591614767e-10, 5.1352562907714339e-10, 2.3907560585436029e-09, 1.0417563656335543e-08, 4.2512972951519652e-08, 1.6257453952247463e-07, 5.8290254892118051e-07, 1.9605333794325452e-06, 6.1886554017466215e-06, 1.8342475402473601e-05, 5.1067234537777706e-05, 1.3360429556693264e-04, 3.2858768305186580e-04, 7.5994560780524184e-04, 1.6532956640381497e-03, 3.3843936084184865e-03, 6.5206132604078895e-03, 1.1827087059306185e-02, 2.0199624381625723e-02, 3.2491469153777909e-02, 4.9230083099421298e-02, 7.0273553473008815e-02, 9.4516761260745735e-02, 1.1979187349505478e-01, 1.4308174026017673e-01, 1.6106711507035643e-01, 1.7088881774276651e-01, 1.7088881774276651e-01, 1.6106711507035643e-01, 1.4308174026017673e-01, 1.1979187349505478e-01, 9.4516761260745735e-02, 7.0273553473008815e-02, 4.9230083099421298e-02, 3.2491469153777909e-02, 2.0199624381625723e-02, 1.1827087059306185e-02, 6.5206132604078895e-03, 3.3843936084184865e-03, 1.6532956640381497e-03, 7.5994560780524184e-04, 3.2858768305186580e-04, 1.3360429556693264e-04, 5.1067234537777706e-05, 1.8342475402473601e-05, 6.1886554017466215e-06, 1.9605333794325452e-06, 5.8290254892118051e-07, 1.6257453952247463e-07, 4.2512972951519652e-08, 1.0417563656335543e-08, 2.3907560585436029e-09, 5.1352562907714339e-10, 1.0317252591614767e-10, 1.9374997522123724e-11, 3.3984267451196722e-12, 5.5633632513833882e-13, 8.4930831160690606e-14, 1.2080546491164034e-14, 1.5995738956856818e-15, 1.9696954324441430e-16, 2.2533431601785240e-17, 2.3923239898564218e-18, 2.3544032965183590e-19, 2.1452843610753105e-20, 1.8074912939358217e-21, 1.4062620052335121e-22, 1.0088605806727544e-23, 6.6636084030909077e-25, 4.0457517920854419e-26, 2.2540053407015112e-27, 1.1502283587852324e-28, 5.3659089984759047e-30, 2.2836747845517017e-31, 8.8470553643068575e-33, 3.1125293650397015e-34, 9.9193674854686742e-36, 2.8558675123113318e-37, 7.4065841467302406e-39, 1.7249314720204933e-40, 3.5953723054115794e-42, 6.6828682986466127e-44, 1.1033912986165171e-45, 1.6113919045974023e-47, 2.0719270806826472e-49, 2.3338081888304624e-51, 2.2902568991273256e-53, 1.9462991520350709e-55, 1.4228297251266947e-57, 8.8821741614196649e-60, 4.6963320200403117e-62, 2.0840476902701378e-64, 7.6827297329990042e-67, 2.3257094876339456e-69, 5.7057155838516782e-72, 1.1174481216103580e-74, 1.7168527133451101e-77, 2.0276116391228268e-80, 1.7969751555347008e-83, 1.1612190047837748e-86, 5.2835329659655297e-90, 1.6210276225797838e-93, 3.1749907175216285e-97, 3.6970767260897540e-101, 2.3253044708336631e-105, 6.9000209771443205e-110, 7.8835864352328026e-115, 2.4820647345980668e-120, 1.1366413967312580e-126, 1.4774864044730919e-134}, + {2.1045621159574830e-135, 1.6853736680140806e-127, 3.8060787191717414e-121, 1.2460431717780355e-115, 1.1217254900384875e-110, 3.8823686733906794e-106, 6.3324559268876792e-102, 5.5741078533835752e-98, 2.9150063258205350e-94, 9.7261605479925101e-91, 2.1872204017818973e-87, 3.4618487459964773e-84, 3.9938420292542256e-81, 3.4566329048643892e-78, 2.2990777865226915e-75, 1.1993601521489046e-72, 4.9937669382694245e-70, 1.6848180938195073e-67, 4.6671413158696264e-65, 1.0738850147584223e-62, 2.0736313112573398e-60, 3.3911184321991622e-58, 4.7353070830108189e-56, 5.6878446259603137e-54, 5.9160971507025675e-52, 5.3608992748856614e-50, 4.2554770940703958e-48, 2.9740995846398285e-46, 1.8385123238649587e-44, 1.0095495376276113e-42, 4.9435800520554766e-41, 2.1666198058800618e-39, 8.5272688786829149e-38, 3.0232780766836873e-36, 9.6838410701764544e-35, 2.8099176317436489e-33, 7.4047930191407272e-32, 1.7763620673555590e-30, 3.8878538387006395e-29, 7.7794521016127823e-28, 1.4259174843705503e-26, 2.3985064022625709e-25, 3.7088241471290263e-24, 5.2806166677136793e-23, 6.9334558099800012e-22, 8.4073193071442628e-21, 9.4275495388536763e-20, 9.7888471796527459e-19, 9.4228636835633407e-18, 8.4188202692459649e-17, 6.9888828000995662e-16, 5.3963258195263796e-15, 3.8792170840235010e-14, 2.5986297789613529e-13, 1.6235905955395716e-12, 9.4688541385973895e-12, 5.1587531828242437e-11, 2.6274698033796913e-10, 1.2519243488472937e-09, 5.5840692373466946e-09, 2.3330544086080361e-08, 9.1359564208430839e-08, 3.3548824403388151e-07, 1.1558985161302230e-06, 3.7384592935259604e-06, 1.1355193773739383e-05, 3.2404937824118138e-05, 8.6919300398468442e-05, 2.1921561912382546e-04, 5.2002884998328185e-04, 1.1607090699966350e-03, 2.4383039027033393e-03, 4.8221285878206101e-03, 8.9801569838840641e-03, 1.5751462301844443e-02, 2.6027795319202608e-02, 4.0523924217918790e-02, 5.9458195592787075e-02, 8.2223731723011229e-02, 1.0718097152174927e-01, 1.3170852135471239e-01, 1.5258690288545360e-01, 1.6666726633087006e-01, 1.7164316012362582e-01, 1.6666726633087006e-01, 1.5258690288545360e-01, 1.3170852135471239e-01, 1.0718097152174927e-01, 8.2223731723011229e-02, 5.9458195592787075e-02, 4.0523924217918790e-02, 2.6027795319202608e-02, 1.5751462301844443e-02, 8.9801569838840641e-03, 4.8221285878206101e-03, 2.4383039027033393e-03, 1.1607090699966350e-03, 5.2002884998328185e-04, 2.1921561912382546e-04, 8.6919300398468442e-05, 3.2404937824118138e-05, 1.1355193773739383e-05, 3.7384592935259604e-06, 1.1558985161302230e-06, 3.3548824403388151e-07, 9.1359564208430839e-08, 2.3330544086080361e-08, 5.5840692373466946e-09, 1.2519243488472937e-09, 2.6274698033796913e-10, 5.1587531828242437e-11, 9.4688541385973895e-12, 1.6235905955395716e-12, 2.5986297789613529e-13, 3.8792170840235010e-14, 5.3963258195263796e-15, 6.9888828000995662e-16, 8.4188202692459649e-17, 9.4228636835633407e-18, 9.7888471796527459e-19, 9.4275495388536763e-20, 8.4073193071442628e-21, 6.9334558099800012e-22, 5.2806166677136793e-23, 3.7088241471290263e-24, 2.3985064022625709e-25, 1.4259174843705503e-26, 7.7794521016127823e-28, 3.8878538387006395e-29, 1.7763620673555590e-30, 7.4047930191407272e-32, 2.8099176317436489e-33, 9.6838410701764544e-35, 3.0232780766836873e-36, 8.5272688786829149e-38, 2.1666198058800618e-39, 4.9435800520554766e-41, 1.0095495376276113e-42, 1.8385123238649587e-44, 2.9740995846398285e-46, 4.2554770940703958e-48, 5.3608992748856614e-50, 5.9160971507025675e-52, 5.6878446259603137e-54, 4.7353070830108189e-56, 3.3911184321991622e-58, 2.0736313112573398e-60, 1.0738850147584223e-62, 4.6671413158696264e-65, 1.6848180938195073e-67, 4.9937669382694245e-70, 1.1993601521489046e-72, 2.2990777865226915e-75, 3.4566329048643892e-78, 3.9938420292542256e-81, 3.4618487459964773e-84, 2.1872204017818973e-87, 9.7261605479925101e-91, 2.9150063258205350e-94, 5.5741078533835752e-98, 6.3324559268876792e-102, 3.8823686733906794e-106, 1.1217254900384875e-110, 1.2460431717780355e-115, 3.8060787191717414e-121, 1.6853736680140806e-127, 2.1045621159574830e-135}, + {2.9971690491012730e-136, 2.4980891398206762e-128, 5.8333713614299987e-122, 1.9681709423933214e-116, 1.8221727762675135e-111, 6.4763280099261484e-107, 1.0835544161526790e-102, 9.7751589620462579e-99, 5.2354686514639054e-95, 1.7880404661796150e-91, 4.1137899834361915e-88, 6.6588269447352457e-85, 7.8536797447719853e-82, 6.9470795412027442e-79, 4.7212966724403236e-76, 2.5160708333635340e-73, 1.0700081200070465e-70, 3.6866210764241453e-68, 1.0427591853094809e-65, 2.4496158809944983e-63, 4.8287654459243867e-61, 8.0607602578177551e-59, 1.1488941335553380e-56, 1.4084920383852964e-54, 1.4951953571335084e-52, 1.3827452604659908e-50, 1.1201727218381219e-48, 7.9894861866156147e-47, 5.0402806679659781e-45, 2.8245056542381498e-43, 1.4115238161648369e-41, 6.3134908835445427e-40, 2.5359952829354267e-38, 9.1766156441607901e-37, 3.0000961317861622e-35, 8.8855426938068462e-34, 2.3901684056219713e-32, 5.8532507898005387e-31, 1.3078348727191383e-29, 2.6717694287109624e-28, 5.0001529132188371e-27, 8.5882149512444349e-26, 1.3561502945339099e-24, 1.9719885628376035e-23, 2.6445905466631306e-22, 3.2756544281813711e-21, 3.7524614126630812e-20, 3.9808239456321635e-19, 3.9155933049886526e-18, 3.5751188594220651e-17, 3.0333673764885698e-16, 2.3941367575799993e-15, 1.7594869510962716e-14, 1.2051394761470136e-13, 7.6998498930386894e-13, 4.5928264710545402e-12, 2.5595903423630225e-11, 1.3337516615570700e-10, 6.5027567060176707e-10, 2.9684197043117582e-09, 1.2694911467897228e-08, 5.0894009383117858e-08, 1.9137169385842292e-07, 6.7528978068278956e-07, 2.2372679339319808e-06, 6.9624497238718160e-06, 2.0361547224732169e-05, 5.5980964406792492e-05, 1.4474894898356766e-04, 3.5211870018385556e-04, 8.0612713585123284e-04, 1.7373614851938118e-03, 3.5259000508765289e-03, 6.7399040525805923e-03, 1.2137859543972172e-02, 2.0598052684982535e-02, 3.2944830178350461e-02, 4.9670368744572549e-02, 7.0602349240131604e-02, 9.4625036098936688e-02, 1.1959210591508003e-01, 1.4254236006576013e-01, 1.6023480790396424e-01, 1.6988651844048314e-01, 1.6988651844048314e-01, 1.6023480790396424e-01, 1.4254236006576013e-01, 1.1959210591508003e-01, 9.4625036098936688e-02, 7.0602349240131604e-02, 4.9670368744572549e-02, 3.2944830178350461e-02, 2.0598052684982535e-02, 1.2137859543972172e-02, 6.7399040525805923e-03, 3.5259000508765289e-03, 1.7373614851938118e-03, 8.0612713585123284e-04, 3.5211870018385556e-04, 1.4474894898356766e-04, 5.5980964406792492e-05, 2.0361547224732169e-05, 6.9624497238718160e-06, 2.2372679339319808e-06, 6.7528978068278956e-07, 1.9137169385842292e-07, 5.0894009383117858e-08, 1.2694911467897228e-08, 2.9684197043117582e-09, 6.5027567060176707e-10, 1.3337516615570700e-10, 2.5595903423630225e-11, 4.5928264710545402e-12, 7.6998498930386894e-13, 1.2051394761470136e-13, 1.7594869510962716e-14, 2.3941367575799993e-15, 3.0333673764885698e-16, 3.5751188594220651e-17, 3.9155933049886526e-18, 3.9808239456321635e-19, 3.7524614126630812e-20, 3.2756544281813711e-21, 2.6445905466631306e-22, 1.9719885628376035e-23, 1.3561502945339099e-24, 8.5882149512444349e-26, 5.0001529132188371e-27, 2.6717694287109624e-28, 1.3078348727191383e-29, 5.8532507898005387e-31, 2.3901684056219713e-32, 8.8855426938068462e-34, 3.0000961317861622e-35, 9.1766156441607901e-37, 2.5359952829354267e-38, 6.3134908835445427e-40, 1.4115238161648369e-41, 2.8245056542381498e-43, 5.0402806679659781e-45, 7.9894861866156147e-47, 1.1201727218381219e-48, 1.3827452604659908e-50, 1.4951953571335084e-52, 1.4084920383852964e-54, 1.1488941335553380e-56, 8.0607602578177551e-59, 4.8287654459243867e-61, 2.4496158809944983e-63, 1.0427591853094809e-65, 3.6866210764241453e-68, 1.0700081200070465e-70, 2.5160708333635340e-73, 4.7212966724403236e-76, 6.9470795412027442e-79, 7.8536797447719853e-82, 6.6588269447352457e-85, 4.1137899834361915e-88, 1.7880404661796150e-91, 5.2354686514639054e-95, 9.7751589620462579e-99, 1.0835544161526790e-102, 6.4763280099261484e-107, 1.8221727762675135e-111, 1.9681709423933214e-116, 5.8333713614299987e-122, 2.4980891398206762e-128, 2.9971690491012730e-136}, + {4.2674931553342203e-137, 3.7013509844210040e-129, 8.9359543576021383e-123, 3.1068184196664260e-117, 2.9577586950025811e-112, 1.0793940202510381e-107, 1.8522459789363982e-103, 1.7123515642355668e-99, 9.3917005928096861e-96, 3.2827539517233995e-92, 7.7262644929909970e-89, 1.2788460726412831e-85, 1.5418407850274678e-82, 1.3937615371156682e-79, 9.6774116872500576e-77, 5.2679228724311439e-74, 2.2879261623118261e-71, 8.0492001695776603e-69, 2.3244392181338319e-66, 5.5743129455028007e-64, 1.1216194835766520e-61, 1.9110201175931197e-59, 2.7798349876894280e-57, 3.4779129275742834e-55, 3.7676299883874885e-53, 3.5555263388415704e-51, 2.9391917507565755e-49, 2.1391283000208420e-47, 1.3770363396814081e-45, 7.8742105891358037e-44, 4.0154223770167576e-42, 1.8327311167318893e-40, 7.5123280800309780e-39, 2.7740812006389936e-37, 9.2554744703184908e-36, 2.7976499634157274e-34, 7.6807810254033586e-33, 1.9198421026192681e-31, 4.3786397679911876e-30, 9.1312863995714357e-29, 1.7445924812851179e-27, 3.0593178878006020e-26, 4.9326008142373812e-25, 7.3241393698795336e-24, 1.0030774102469258e-22, 1.2689360428385754e-21, 1.4847983173617774e-20, 1.6090879587826797e-19, 1.6169926387796005e-18, 1.5085327113052835e-17, 1.3079623075775175e-16, 1.0550654299237249e-15, 7.9256160807813284e-15, 5.5495687412198589e-14, 3.6252638108375508e-13, 2.2112377011770522e-12, 1.2603426258460018e-11, 6.7177337738006964e-11, 3.3507655233538448e-10, 1.5650990540810345e-09, 6.8499863308168436e-09, 2.8109038530404097e-08, 1.0820676942101237e-07, 3.9097121047484011e-07, 1.3265796796150300e-06, 4.2288616562734994e-06, 1.2670859378200327e-05, 3.5699320251985919e-05, 9.4613317406319364e-05, 2.3596028278015030e-04, 5.5394259248142631e-04, 1.2245148633746915e-03, 2.5495331679026910e-03, 5.0011317639987000e-03, 9.2446564129297221e-03, 1.6107280751087583e-02, 2.6457407162359760e-02, 4.0977099534106338e-02, 5.9850822005640535e-02, 8.2450068298432835e-02, 1.0714008439362262e-01, 1.3133855013394508e-01, 1.5189453076872492e-01, 1.6573850820195182e-01, 1.7062752012289428e-01, 1.6573850820195182e-01, 1.5189453076872492e-01, 1.3133855013394508e-01, 1.0714008439362262e-01, 8.2450068298432835e-02, 5.9850822005640535e-02, 4.0977099534106338e-02, 2.6457407162359760e-02, 1.6107280751087583e-02, 9.2446564129297221e-03, 5.0011317639987000e-03, 2.5495331679026910e-03, 1.2245148633746915e-03, 5.5394259248142631e-04, 2.3596028278015030e-04, 9.4613317406319364e-05, 3.5699320251985919e-05, 1.2670859378200327e-05, 4.2288616562734994e-06, 1.3265796796150300e-06, 3.9097121047484011e-07, 1.0820676942101237e-07, 2.8109038530404097e-08, 6.8499863308168436e-09, 1.5650990540810345e-09, 3.3507655233538448e-10, 6.7177337738006964e-11, 1.2603426258460018e-11, 2.2112377011770522e-12, 3.6252638108375508e-13, 5.5495687412198589e-14, 7.9256160807813284e-15, 1.0550654299237249e-15, 1.3079623075775175e-16, 1.5085327113052835e-17, 1.6169926387796005e-18, 1.6090879587826797e-19, 1.4847983173617774e-20, 1.2689360428385754e-21, 1.0030774102469258e-22, 7.3241393698795336e-24, 4.9326008142373812e-25, 3.0593178878006020e-26, 1.7445924812851179e-27, 9.1312863995714357e-29, 4.3786397679911876e-30, 1.9198421026192681e-31, 7.6807810254033586e-33, 2.7976499634157274e-34, 9.2554744703184908e-36, 2.7740812006389936e-37, 7.5123280800309780e-39, 1.8327311167318893e-40, 4.0154223770167576e-42, 7.8742105891358037e-44, 1.3770363396814081e-45, 2.1391283000208420e-47, 2.9391917507565755e-49, 3.5555263388415704e-51, 3.7676299883874885e-53, 3.4779129275742834e-55, 2.7798349876894280e-57, 1.9110201175931197e-59, 1.1216194835766520e-61, 5.5743129455028007e-64, 2.3244392181338319e-66, 8.0492001695776603e-69, 2.2879261623118261e-71, 5.2679228724311439e-74, 9.6774116872500576e-77, 1.3937615371156682e-79, 1.5418407850274678e-82, 1.2788460726412831e-85, 7.7262644929909970e-89, 3.2827539517233995e-92, 9.3917005928096861e-96, 1.7123515642355668e-99, 1.8522459789363982e-103, 1.0793940202510381e-107, 2.9577586950025811e-112, 3.1068184196664260e-117, 8.9359543576021383e-123, 3.7013509844210040e-129, 4.2674931553342203e-137}, + {6.0750158130606195e-138, 5.4821992024733287e-130, 1.3681822165982846e-123, 4.9011169950359405e-118, 4.7974396933811439e-113, 1.7974392277078273e-108, 3.1631548152333414e-104, 2.9963165563522804e-100, 1.6827169561417482e-96, 6.0190850814126162e-93, 1.4490438595507066e-89, 2.4523185664718153e-86, 3.0220219471047618e-83, 2.7913874973199548e-80, 1.9799592744971097e-77, 1.1008005267477927e-74, 4.8820589020655159e-72, 1.7536229837285863e-69, 5.1696909850843693e-67, 1.2654633658209892e-64, 2.5987959219621812e-62, 4.5188057419567269e-60, 6.7077790006444227e-58, 8.5635704641083388e-56, 9.4658821709007862e-54, 9.1146156997272630e-52, 7.6876441303763667e-50, 5.7085575439180572e-48, 3.7493520841061347e-46, 2.1874580274727376e-44, 1.1381244408182845e-42, 5.3001743821412355e-41, 2.2167101442428974e-39, 8.3523638615101674e-38, 2.8435471995138850e-36, 8.7709151746020293e-35, 2.4573526663074424e-33, 6.2684680341301147e-32, 1.4591317861612538e-30, 3.1058077266730593e-29, 6.0569508661768345e-28, 1.0842625724312578e-26, 1.7847202502838671e-25, 2.7056507791212747e-24, 3.7836354660151275e-23, 4.8878185643771254e-22, 5.8409861850177518e-21, 6.4652715404585129e-20, 6.6366701688022747e-19, 6.3252994285961157e-18, 5.6034658987730242e-17, 4.6187988803389262e-16, 3.5458950811944735e-15, 2.5377705512106429e-14, 1.6946972716989475e-13, 1.0568382827043380e-12, 6.1595020440963285e-12, 3.3576005591117169e-11, 1.7130382466099029e-10, 8.1856149500888656e-10, 3.6657050674739318e-09, 1.5393813958417279e-08, 6.0654560255829627e-08, 2.2435783940914669e-07, 7.7946793854749263e-07, 2.5447213452537048e-06, 7.8101702177205490e-06, 2.2544487920551540e-05, 6.1227929783044720e-05, 1.5651161607055494e-04, 3.7668553922014524e-04, 8.5385410230442147e-04, 1.8234228920144958e-03, 3.6694988559735154e-03, 6.9606294949961902e-03, 1.2448294747794275e-02, 2.0993195618512490e-02, 3.3391282475767885e-02, 5.0100599356951693e-02, 7.0919958702876942e-02, 9.4724291720265918e-02, 1.1938903980933140e-01, 1.4200771666714165e-01, 1.5941508322714990e-01, 1.6890164874640598e-01, 1.6890164874640598e-01, 1.5941508322714990e-01, 1.4200771666714165e-01, 1.1938903980933140e-01, 9.4724291720265918e-02, 7.0919958702876942e-02, 5.0100599356951693e-02, 3.3391282475767885e-02, 2.0993195618512490e-02, 1.2448294747794275e-02, 6.9606294949961902e-03, 3.6694988559735154e-03, 1.8234228920144958e-03, 8.5385410230442147e-04, 3.7668553922014524e-04, 1.5651161607055494e-04, 6.1227929783044720e-05, 2.2544487920551540e-05, 7.8101702177205490e-06, 2.5447213452537048e-06, 7.7946793854749263e-07, 2.2435783940914669e-07, 6.0654560255829627e-08, 1.5393813958417279e-08, 3.6657050674739318e-09, 8.1856149500888656e-10, 1.7130382466099029e-10, 3.3576005591117169e-11, 6.1595020440963285e-12, 1.0568382827043380e-12, 1.6946972716989475e-13, 2.5377705512106429e-14, 3.5458950811944735e-15, 4.6187988803389262e-16, 5.6034658987730242e-17, 6.3252994285961157e-18, 6.6366701688022747e-19, 6.4652715404585129e-20, 5.8409861850177518e-21, 4.8878185643771254e-22, 3.7836354660151275e-23, 2.7056507791212747e-24, 1.7847202502838671e-25, 1.0842625724312578e-26, 6.0569508661768345e-28, 3.1058077266730593e-29, 1.4591317861612538e-30, 6.2684680341301147e-32, 2.4573526663074424e-33, 8.7709151746020293e-35, 2.8435471995138850e-36, 8.3523638615101674e-38, 2.2167101442428974e-39, 5.3001743821412355e-41, 1.1381244408182845e-42, 2.1874580274727376e-44, 3.7493520841061347e-46, 5.7085575439180572e-48, 7.6876441303763667e-50, 9.1146156997272630e-52, 9.4658821709007862e-54, 8.5635704641083388e-56, 6.7077790006444227e-58, 4.5188057419567269e-60, 2.5987959219621812e-62, 1.2654633658209892e-64, 5.1696909850843693e-67, 1.7536229837285863e-69, 4.8820589020655159e-72, 1.1008005267477927e-74, 1.9799592744971097e-77, 2.7913874973199548e-80, 3.0220219471047618e-83, 2.4523185664718153e-86, 1.4490438595507066e-89, 6.0190850814126162e-93, 1.6827169561417482e-96, 2.9963165563522804e-100, 3.1631548152333414e-104, 1.7974392277078273e-108, 4.7974396933811439e-113, 4.9011169950359405e-118, 1.3681822165982846e-123, 5.4821992024733287e-130, 6.0750158130606195e-138}, + {8.6464098495106045e-139, 8.1169547967150641e-131, 2.0937788694250258e-124, 7.7268624062801043e-119, 7.7755878030242305e-114, 2.9905793066209871e-109, 5.3966031917331080e-105, 5.2373672835160608e-101, 3.0113519152678094e-97, 1.1021983838887256e-93, 2.7138416423119788e-90, 4.6954865769208796e-87, 5.9136418179343394e-84, 5.5809142352163536e-81, 4.0435337904295643e-78, 2.2958300915077279e-75, 1.0396325878228644e-72, 3.8123200620819477e-70, 1.1471867481155006e-67, 2.8660528596806035e-65, 6.0065917977528455e-63, 1.0657711610182893e-60, 1.6142564784909037e-58, 2.1026978884308611e-56, 2.3713251069838819e-54, 2.3294898984397351e-52, 2.0044580918502007e-50, 1.5184626573152095e-48, 1.0174299157427975e-46, 6.0556195147012746e-45, 3.2142710908365357e-43, 1.5270851777984907e-41, 6.5158604619910054e-40, 2.5048070243067333e-38, 8.7004606571271586e-37, 2.7381749245865647e-35, 7.8277738587896100e-34, 2.0375540992709603e-32, 4.8399847326203970e-31, 1.0513633818680897e-29, 2.0926202706959383e-28, 3.8234939104131151e-27, 6.4242027780709820e-26, 9.9421354277925673e-25, 1.4194294864852321e-23, 1.8722162613285435e-22, 2.2845745768934510e-21, 2.5824265914439110e-20, 2.7074430614342264e-19, 2.6357601492491789e-18, 2.3853221384019439e-17, 2.0087998156976881e-16, 1.5758126400353855e-15, 1.1525451076836792e-14, 7.8665152475648879e-14, 5.0146808941799565e-13, 2.9880423319270495e-12, 1.6654840593814865e-11, 8.6899012302376471e-11, 4.2472082249711324e-10, 1.9457386067825712e-09, 8.3602716973645751e-09, 3.3710054470838133e-08, 1.2762506765080952e-07, 4.5391122547936338e-07, 1.5173031250618367e-06, 4.7690949847434419e-06, 1.4100857760918970e-05, 3.9234961307384339e-05, 1.0277324714018339e-04, 2.5352235086933272e-04, 5.8914391280362889e-04, 1.2901045250613646e-03, 2.6628467710093206e-03, 5.1819700035925865e-03, 9.5097957728564348e-03, 1.6461365836824633e-02, 2.6881937632071830e-02, 4.1421702134247403e-02, 6.0232621258249110e-02, 8.2666028537237118e-02, 1.0709279898985245e-01, 1.3096920653341185e-01, 1.5121094137038713e-01, 1.6482506909185640e-01, 1.6962969836778974e-01, 1.6482506909185640e-01, 1.5121094137038713e-01, 1.3096920653341185e-01, 1.0709279898985245e-01, 8.2666028537237118e-02, 6.0232621258249110e-02, 4.1421702134247403e-02, 2.6881937632071830e-02, 1.6461365836824633e-02, 9.5097957728564348e-03, 5.1819700035925865e-03, 2.6628467710093206e-03, 1.2901045250613646e-03, 5.8914391280362889e-04, 2.5352235086933272e-04, 1.0277324714018339e-04, 3.9234961307384339e-05, 1.4100857760918970e-05, 4.7690949847434419e-06, 1.5173031250618367e-06, 4.5391122547936338e-07, 1.2762506765080952e-07, 3.3710054470838133e-08, 8.3602716973645751e-09, 1.9457386067825712e-09, 4.2472082249711324e-10, 8.6899012302376471e-11, 1.6654840593814865e-11, 2.9880423319270495e-12, 5.0146808941799565e-13, 7.8665152475648879e-14, 1.1525451076836792e-14, 1.5758126400353855e-15, 2.0087998156976881e-16, 2.3853221384019439e-17, 2.6357601492491789e-18, 2.7074430614342264e-19, 2.5824265914439110e-20, 2.2845745768934510e-21, 1.8722162613285435e-22, 1.4194294864852321e-23, 9.9421354277925673e-25, 6.4242027780709820e-26, 3.8234939104131151e-27, 2.0926202706959383e-28, 1.0513633818680897e-29, 4.8399847326203970e-31, 2.0375540992709603e-32, 7.8277738587896100e-34, 2.7381749245865647e-35, 8.7004606571271586e-37, 2.5048070243067333e-38, 6.5158604619910054e-40, 1.5270851777984907e-41, 3.2142710908365357e-43, 6.0556195147012746e-45, 1.0174299157427975e-46, 1.5184626573152095e-48, 2.0044580918502007e-50, 2.3294898984397351e-52, 2.3713251069838819e-54, 2.1026978884308611e-56, 1.6142564784909037e-58, 1.0657711610182893e-60, 6.0065917977528455e-63, 2.8660528596806035e-65, 1.1471867481155006e-67, 3.8123200620819477e-70, 1.0396325878228644e-72, 2.2958300915077279e-75, 4.0435337904295643e-78, 5.5809142352163536e-81, 5.9136418179343394e-84, 4.6954865769208796e-87, 2.7138416423119788e-90, 1.1021983838887256e-93, 3.0113519152678094e-97, 5.2373672835160608e-101, 5.3966031917331080e-105, 2.9905793066209871e-109, 7.7755878030242305e-114, 7.7268624062801043e-119, 2.0937788694250258e-124, 8.1169547967150641e-131, 8.6464098495106045e-139}, + {1.2303789814620092e-139, 1.2013699333289992e-131, 3.2026074548384006e-125, 1.2174269794916373e-119, 1.2593229738553804e-114, 4.9714964520806193e-110, 9.1982052962411378e-106, 9.1447911140134055e-102, 5.3827092462530275e-98, 2.0157286790487118e-94, 5.0755707227503414e-91, 8.9771058574961900e-88, 1.1553649373808784e-84, 1.1139151556417037e-81, 8.2429409894791420e-79, 4.7790526311204645e-76, 2.2094400576899919e-73, 8.2703192545553517e-71, 2.5400223834270154e-68, 6.4759951809357819e-66, 1.3849218619189611e-63, 2.5072557278333359e-61, 3.8744806525565368e-59, 5.1487100357555966e-57, 5.9234211353878837e-55, 5.9358962183919884e-53, 5.2102035909462387e-51, 4.0261165631274628e-49, 2.7517389408826910e-47, 1.6706338169898752e-45, 9.0454023907569489e-44, 4.3836632815273276e-42, 1.9080203924026029e-40, 7.4822875434667983e-39, 2.6513404940738975e-37, 8.5126426420369604e-36, 2.4827942651601884e-34, 6.5937510349118087e-33, 1.5981308016388612e-31, 3.5423572866413238e-30, 7.1949797524882649e-29, 1.3416207816913450e-27, 2.3006552131922817e-26, 3.6342070015972533e-25, 5.2963698057770957e-24, 7.1317120414745996e-23, 8.8849961119116762e-22, 1.0255026540751254e-20, 1.0979177006155230e-19, 1.0916029509464340e-18, 1.0090271979231116e-17, 8.6804078056359369e-17, 6.9567797833146074e-16, 5.1989584985238723e-15, 3.6262025536343989e-14, 2.3625593061240154e-13, 1.4389854608144821e-12, 8.1997932243736651e-12, 4.3745620487604465e-11, 2.1864887185791365e-10, 1.0245209689572339e-09, 4.5031898910426613e-09, 1.8577866622072818e-08, 7.1975399855994780e-08, 2.6200414925731448e-07, 8.9655811991941417e-07, 2.8853130691641410e-06, 8.7365058018499081e-06, 2.4899323348910079e-05, 6.6819890242436042e-05, 1.6890601339725517e-04, 4.0229796846567709e-04, 9.0312111074868030e-04, 1.9114445894976445e-03, 3.8151134691487313e-03, 7.1826754657021994e-03, 1.2758271065775782e-02, 2.1384981853074116e-02, 3.3830872556682462e-02, 5.0520980072118506e-02, 7.1226720494510898e-02, 9.4814894788900164e-02, 1.1918291538647637e-01, 1.4147779230279267e-01, 1.5860763301741698e-01, 1.6793370935002905e-01, 1.6793370935002905e-01, 1.5860763301741698e-01, 1.4147779230279267e-01, 1.1918291538647637e-01, 9.4814894788900164e-02, 7.1226720494510898e-02, 5.0520980072118506e-02, 3.3830872556682462e-02, 2.1384981853074116e-02, 1.2758271065775782e-02, 7.1826754657021994e-03, 3.8151134691487313e-03, 1.9114445894976445e-03, 9.0312111074868030e-04, 4.0229796846567709e-04, 1.6890601339725517e-04, 6.6819890242436042e-05, 2.4899323348910079e-05, 8.7365058018499081e-06, 2.8853130691641410e-06, 8.9655811991941417e-07, 2.6200414925731448e-07, 7.1975399855994780e-08, 1.8577866622072818e-08, 4.5031898910426613e-09, 1.0245209689572339e-09, 2.1864887185791365e-10, 4.3745620487604465e-11, 8.1997932243736651e-12, 1.4389854608144821e-12, 2.3625593061240154e-13, 3.6262025536343989e-14, 5.1989584985238723e-15, 6.9567797833146074e-16, 8.6804078056359369e-17, 1.0090271979231116e-17, 1.0916029509464340e-18, 1.0979177006155230e-19, 1.0255026540751254e-20, 8.8849961119116762e-22, 7.1317120414745996e-23, 5.2963698057770957e-24, 3.6342070015972533e-25, 2.3006552131922817e-26, 1.3416207816913450e-27, 7.1949797524882649e-29, 3.5423572866413238e-30, 1.5981308016388612e-31, 6.5937510349118087e-33, 2.4827942651601884e-34, 8.5126426420369604e-36, 2.6513404940738975e-37, 7.4822875434667983e-39, 1.9080203924026029e-40, 4.3836632815273276e-42, 9.0454023907569489e-44, 1.6706338169898752e-45, 2.7517389408826910e-47, 4.0261165631274628e-49, 5.2102035909462387e-51, 5.9358962183919884e-53, 5.9234211353878837e-55, 5.1487100357555966e-57, 3.8744806525565368e-59, 2.5072557278333359e-61, 1.3849218619189611e-63, 6.4759951809357819e-66, 2.5400223834270154e-68, 8.2703192545553517e-71, 2.2094400576899919e-73, 4.7790526311204645e-76, 8.2429409894791420e-79, 1.1139151556417037e-81, 1.1553649373808784e-84, 8.9771058574961900e-88, 5.0755707227503414e-91, 2.0157286790487118e-94, 5.3827092462530275e-98, 9.1447911140134055e-102, 9.1982052962411378e-106, 4.9714964520806193e-110, 1.2593229738553804e-114, 1.2174269794916373e-119, 3.2026074548384006e-125, 1.2013699333289992e-131, 1.2303789814620092e-139}, + {1.7504813313443110e-140, 1.7774902414670824e-132, 4.8962631916290320e-126, 1.9169773412509448e-120, 2.0380950317636651e-115, 8.2575913221058373e-111, 1.5662913179616934e-106, 1.5950515939148060e-102, 9.6102475509992629e-99, 3.6817405361106414e-95, 9.4795771929584613e-92, 1.7137636253252277e-88, 2.2537069207131266e-85, 2.2195670363270061e-82, 1.6773665243938764e-79, 9.9294168393320748e-77, 4.6861855398798507e-74, 1.7903790379405142e-71, 5.6115831515377673e-69, 1.4599152900352242e-66, 3.1854832448751424e-64, 5.8835568128153365e-62, 9.2750122555037185e-60, 1.2572829274411131e-57, 1.4754337744917936e-55, 1.5080977336171035e-53, 1.3501492314506362e-51, 1.0641162728147897e-49, 7.4179019446614824e-48, 4.5933036667242678e-46, 2.5365532103719637e-44, 1.2538070457653651e-42, 5.5662423908008861e-41, 2.2264349328620495e-39, 8.0473188469528885e-38, 2.6355807192340085e-36, 7.8414692242533960e-35, 2.1244914229476255e-33, 5.2531973592621732e-32, 1.1880047374923401e-30, 2.4620531629619134e-29, 4.6845665497518896e-28, 8.1977521328279625e-27, 1.3215706261902287e-25, 1.9657684577473083e-24, 2.7018364834425692e-23, 3.4361611658856167e-22, 4.0489821139593706e-21, 4.4260400812843420e-20, 4.4935788616135659e-19, 4.2418977594565075e-18, 3.7271474634420088e-17, 3.0512318816558229e-16, 2.3295237031463167e-15, 1.6601293173811213e-14, 1.1052712006706022e-13, 6.8801511055288008e-13, 4.0073848073937339e-12, 2.1856088643011424e-11, 1.1169403547356809e-10, 5.3519948055672733e-10, 2.4060047025435626e-09, 1.0153711719361421e-08, 4.0247544813147544e-08, 1.4992205080398724e-07, 5.2506826823537642e-07, 1.7297783253854633e-06, 5.3626317222650889e-06, 1.5651476474775071e-05, 4.3021779350373374e-05, 1.1141208654585957e-04, 2.7191419917337988e-04, 6.2563601676209426e-04, 1.3574588134771591e-03, 2.7781892530593399e-03, 5.3645468312921761e-03, 9.7754538393799058e-03, 1.6813616037549516e-02, 2.7301369702544091e-02, 4.1857856809001823e-02, 6.0603870172343872e-02, 8.2871975523587077e-02, 1.0703943053616907e-01, 1.3060060873789656e-01, 1.5053596929119434e-01, 1.6392653352569631e-01, 1.6864917988011466e-01, 1.6392653352569631e-01, 1.5053596929119434e-01, 1.3060060873789656e-01, 1.0703943053616907e-01, 8.2871975523587077e-02, 6.0603870172343872e-02, 4.1857856809001823e-02, 2.7301369702544091e-02, 1.6813616037549516e-02, 9.7754538393799058e-03, 5.3645468312921761e-03, 2.7781892530593399e-03, 1.3574588134771591e-03, 6.2563601676209426e-04, 2.7191419917337988e-04, 1.1141208654585957e-04, 4.3021779350373374e-05, 1.5651476474775071e-05, 5.3626317222650889e-06, 1.7297783253854633e-06, 5.2506826823537642e-07, 1.4992205080398724e-07, 4.0247544813147544e-08, 1.0153711719361421e-08, 2.4060047025435626e-09, 5.3519948055672733e-10, 1.1169403547356809e-10, 2.1856088643011424e-11, 4.0073848073937339e-12, 6.8801511055288008e-13, 1.1052712006706022e-13, 1.6601293173811213e-14, 2.3295237031463167e-15, 3.0512318816558229e-16, 3.7271474634420088e-17, 4.2418977594565075e-18, 4.4935788616135659e-19, 4.4260400812843420e-20, 4.0489821139593706e-21, 3.4361611658856167e-22, 2.7018364834425692e-23, 1.9657684577473083e-24, 1.3215706261902287e-25, 8.1977521328279625e-27, 4.6845665497518896e-28, 2.4620531629619134e-29, 1.1880047374923401e-30, 5.2531973592621732e-32, 2.1244914229476255e-33, 7.8414692242533960e-35, 2.6355807192340085e-36, 8.0473188469528885e-38, 2.2264349328620495e-39, 5.5662423908008861e-41, 1.2538070457653651e-42, 2.5365532103719637e-44, 4.5933036667242678e-46, 7.4179019446614824e-48, 1.0641162728147897e-49, 1.3501492314506362e-51, 1.5080977336171035e-53, 1.4754337744917936e-55, 1.2572829274411131e-57, 9.2750122555037185e-60, 5.8835568128153365e-62, 3.1854832448751424e-64, 1.4599152900352242e-66, 5.6115831515377673e-69, 1.7903790379405142e-71, 4.6861855398798507e-74, 9.9294168393320748e-77, 1.6773665243938764e-79, 2.2195670363270061e-82, 2.2537069207131266e-85, 1.7137636253252277e-88, 9.4795771929584613e-92, 3.6817405361106414e-95, 9.6102475509992629e-99, 1.5950515939148060e-102, 1.5662913179616934e-106, 8.2575913221058373e-111, 2.0380950317636651e-115, 1.9169773412509448e-120, 4.8962631916290320e-126, 1.7774902414670824e-132, 1.7504813313443110e-140}, + {2.4899600964326729e-141, 2.6289723724457029e-133, 7.4819712112270593e-127, 3.0166712841087998e-121, 3.2960842775717504e-116, 1.3704328437984587e-111, 2.6646059822111242e-107, 2.7792048294771247e-103, 1.7138296773072837e-99, 6.7162796391421331e-96, 1.7680841960979503e-92, 3.2668637275061076e-89, 4.3893173430676675e-86, 4.4153132623096831e-83, 3.4072707861884866e-80, 2.0591798858084703e-77, 9.9197548192127678e-75, 3.8678293788234498e-72, 1.2370523208691571e-69, 3.2836627133699974e-67, 7.3095353354239491e-65, 1.3772094187709065e-62, 2.2145636638702121e-60, 3.0619202170369175e-58, 3.6647661995973869e-56, 3.8203649763779160e-54, 3.4881351774083147e-52, 2.8036775536310555e-50, 1.9931613619579773e-48, 1.2586567146117010e-46, 7.0884044215753474e-45, 3.5732404426806206e-43, 1.6178126120857497e-41, 6.5996510428781978e-40, 2.4328759328467202e-38, 8.1267763573550637e-37, 2.4662127220461684e-35, 6.8155261813509929e-34, 1.7191030917484011e-32, 3.9660195668953352e-31, 8.3853154776461475e-30, 1.6278171201629848e-28, 2.9065390998814081e-27, 4.7813433896247487e-26, 7.2578010107444783e-25, 1.0180797851831930e-23, 1.3215548441960911e-22, 1.5895976396503745e-21, 1.7738996763753002e-20, 1.8387513781865510e-19, 1.7723693479759404e-18, 1.5903120371435548e-17, 1.3296671955521453e-16, 1.0369288823537636e-15, 7.5490426637877302e-15, 5.1350286901144194e-14, 3.2662837995375004e-13, 1.9442795471244843e-12, 1.0838609818985724e-11, 5.6623712315092519e-11, 2.7740673531720516e-10, 1.2752593807305187e-09, 5.5042412134787782e-09, 2.2317961616833082e-08, 8.5054483756057923e-08, 3.0481775749836127e-07, 1.0277468837741270e-06, 3.2615617411254204e-06, 9.7462566160282362e-06, 2.7434125933222057e-05, 7.2768434100256330e-05, 1.8194526534270113e-04, 4.2896461357756435e-04, 9.5392124328337667e-04, 2.0013900802573659e-03, 3.9626676572752555e-03, 7.4059308800968815e-03, 1.3067672819155697e-02, 2.1773347066246198e-02, 3.4263651242794235e-02, 5.0931713552683139e-02, 7.1522962430052642e-02, 9.4897195996236208e-02, 1.1897395918444152e-01, 1.4095256578997853e-01, 1.5781215945203400e-01, 1.6698222073712451e-01, 1.6698222073712451e-01, 1.5781215945203400e-01, 1.4095256578997853e-01, 1.1897395918444152e-01, 9.4897195996236208e-02, 7.1522962430052642e-02, 5.0931713552683139e-02, 3.4263651242794235e-02, 2.1773347066246198e-02, 1.3067672819155697e-02, 7.4059308800968815e-03, 3.9626676572752555e-03, 2.0013900802573659e-03, 9.5392124328337667e-04, 4.2896461357756435e-04, 1.8194526534270113e-04, 7.2768434100256330e-05, 2.7434125933222057e-05, 9.7462566160282362e-06, 3.2615617411254204e-06, 1.0277468837741270e-06, 3.0481775749836127e-07, 8.5054483756057923e-08, 2.2317961616833082e-08, 5.5042412134787782e-09, 1.2752593807305187e-09, 2.7740673531720516e-10, 5.6623712315092519e-11, 1.0838609818985724e-11, 1.9442795471244843e-12, 3.2662837995375004e-13, 5.1350286901144194e-14, 7.5490426637877302e-15, 1.0369288823537636e-15, 1.3296671955521453e-16, 1.5903120371435548e-17, 1.7723693479759404e-18, 1.8387513781865510e-19, 1.7738996763753002e-20, 1.5895976396503745e-21, 1.3215548441960911e-22, 1.0180797851831930e-23, 7.2578010107444783e-25, 4.7813433896247487e-26, 2.9065390998814081e-27, 1.6278171201629848e-28, 8.3853154776461475e-30, 3.9660195668953352e-31, 1.7191030917484011e-32, 6.8155261813509929e-34, 2.4662127220461684e-35, 8.1267763573550637e-37, 2.4328759328467202e-38, 6.5996510428781978e-40, 1.6178126120857497e-41, 3.5732404426806206e-43, 7.0884044215753474e-45, 1.2586567146117010e-46, 1.9931613619579773e-48, 2.8036775536310555e-50, 3.4881351774083147e-52, 3.8203649763779160e-54, 3.6647661995973869e-56, 3.0619202170369175e-58, 2.2145636638702121e-60, 1.3772094187709065e-62, 7.3095353354239491e-65, 3.2836627133699974e-67, 1.2370523208691571e-69, 3.8678293788234498e-72, 9.9197548192127678e-75, 2.0591798858084703e-77, 3.4072707861884866e-80, 4.4153132623096831e-83, 4.3893173430676675e-86, 3.2668637275061076e-89, 1.7680841960979503e-92, 6.7162796391421331e-96, 1.7138296773072837e-99, 2.7792048294771247e-103, 2.6646059822111242e-107, 1.3704328437984587e-111, 3.2960842775717504e-116, 3.0166712841087998e-121, 7.4819712112270593e-127, 2.6289723724457029e-133, 2.4899600964326729e-141}, + {3.5411506471568517e-142, 3.8870007298929211e-134, 1.1427720218874746e-127, 4.7443699591186030e-122, 5.3267445942905176e-117, 2.2724999081578913e-112, 4.5288568274049433e-108, 4.8374430266711206e-104, 3.0528486739600850e-100, 1.2236703652880107e-96, 3.2933121419749483e-93, 6.2184654173606399e-90, 8.5354199357192903e-87, 8.7687858501113383e-84, 6.9091769993084969e-81, 4.2624770595798820e-78, 2.0957319191157142e-75, 8.3387106103658512e-73, 2.7211703761612434e-70, 7.3690229255476969e-68, 1.6733234438392451e-65, 3.2158114928338939e-63, 5.2740829263623727e-61, 7.4369351842678165e-59, 9.0774918435864705e-57, 9.6499842697585386e-55, 8.9847110119503318e-53, 7.3640839458387673e-51, 5.3383547450946618e-49, 3.4375135972727117e-47, 1.9740530949677369e-45, 1.0147285474798106e-43, 4.6848975802870826e-42, 1.9488875555428468e-40, 7.3264228854645894e-39, 2.4958088668373114e-37, 7.7243461606513683e-36, 2.1771495752866995e-34, 5.6010600122601367e-33, 1.3180336788235122e-31, 2.8426307813064235e-30, 5.6294266388452469e-29, 1.0254685909108741e-27, 1.7211387969394399e-26, 2.6657834170950206e-25, 3.8158515821307025e-24, 5.0550170089602786e-23, 6.2057205211001172e-22, 7.0687611828654942e-21, 7.4798012691484967e-20, 7.3607060012414608e-19, 6.7436191474716113e-18, 5.7576889770247240e-17, 4.5856377569387674e-16, 3.4098950541891060e-15, 2.3694365058853221e-14, 1.5398051925046748e-13, 9.3656673457701801e-13, 5.3355866239683074e-12, 2.8490354792258560e-11, 1.4268284555926340e-10, 6.7061742381683788e-10, 2.9598086074993327e-09, 1.2273853303242205e-08, 4.7847173987299635e-08, 1.7543109823449902e-07, 6.0525229343483686e-07, 1.9657973702696829e-06, 6.0130527402628209e-06, 1.7329090398329860e-05, 4.7069644542722322e-05, 1.2054246571768105e-04, 2.9114733811626920e-04, 6.6342074803749660e-04, 1.4265570512383724e-03, 2.8955046143597550e-03, 5.5487673845870103e-03, 1.0041513921437970e-02, 1.7163936516514416e-02, 2.7715692333499325e-02, 4.2285689519455438e-02, 6.0964838880386028e-02, 8.3068258519182064e-02, 1.0698027879398084e-01, 1.3023286590097533e-01, 1.4986945253577164e-01, 1.6304250159157527e-01, 1.6768547028079969e-01, 1.6304250159157527e-01, 1.4986945253577164e-01, 1.3023286590097533e-01, 1.0698027879398084e-01, 8.3068258519182064e-02, 6.0964838880386028e-02, 4.2285689519455438e-02, 2.7715692333499325e-02, 1.7163936516514416e-02, 1.0041513921437970e-02, 5.5487673845870103e-03, 2.8955046143597550e-03, 1.4265570512383724e-03, 6.6342074803749660e-04, 2.9114733811626920e-04, 1.2054246571768105e-04, 4.7069644542722322e-05, 1.7329090398329860e-05, 6.0130527402628209e-06, 1.9657973702696829e-06, 6.0525229343483686e-07, 1.7543109823449902e-07, 4.7847173987299635e-08, 1.2273853303242205e-08, 2.9598086074993327e-09, 6.7061742381683788e-10, 1.4268284555926340e-10, 2.8490354792258560e-11, 5.3355866239683074e-12, 9.3656673457701801e-13, 1.5398051925046748e-13, 2.3694365058853221e-14, 3.4098950541891060e-15, 4.5856377569387674e-16, 5.7576889770247240e-17, 6.7436191474716113e-18, 7.3607060012414608e-19, 7.4798012691484967e-20, 7.0687611828654942e-21, 6.2057205211001172e-22, 5.0550170089602786e-23, 3.8158515821307025e-24, 2.6657834170950206e-25, 1.7211387969394399e-26, 1.0254685909108741e-27, 5.6294266388452469e-29, 2.8426307813064235e-30, 1.3180336788235122e-31, 5.6010600122601367e-33, 2.1771495752866995e-34, 7.7243461606513683e-36, 2.4958088668373114e-37, 7.3264228854645894e-39, 1.9488875555428468e-40, 4.6848975802870826e-42, 1.0147285474798106e-43, 1.9740530949677369e-45, 3.4375135972727117e-47, 5.3383547450946618e-49, 7.3640839458387673e-51, 8.9847110119503318e-53, 9.6499842697585386e-55, 9.0774918435864705e-57, 7.4369351842678165e-59, 5.2740829263623727e-61, 3.2158114928338939e-63, 1.6733234438392451e-65, 7.3690229255476969e-68, 2.7211703761612434e-70, 8.3387106103658512e-73, 2.0957319191157142e-75, 4.2624770595798820e-78, 6.9091769993084969e-81, 8.7687858501113383e-84, 8.5354199357192903e-87, 6.2184654173606399e-90, 3.2933121419749483e-93, 1.2236703652880107e-96, 3.0528486739600850e-100, 4.8374430266711206e-104, 4.5288568274049433e-108, 2.2724999081578913e-112, 5.3267445942905176e-117, 4.7443699591186030e-122, 1.1427720218874746e-127, 3.8870007298929211e-134, 3.5411506471568517e-142}, + {5.0351722883464185e-143, 5.7450587562745078e-135, 1.7446064596498268e-128, 7.4571213334341600e-123, 8.6023715273839571e-118, 3.7652628313310755e-113, 7.6903016009308287e-109, 8.4113401468636500e-105, 5.4319085709594968e-101, 2.2267204629384821e-97, 6.1261103396048387e-94, 1.1819905626126024e-90, 1.6572504238354695e-87, 1.7386395392283553e-84, 1.3986045504550338e-81, 8.8071523423621673e-79, 4.4190897787947332e-76, 1.7941108043227645e-73, 5.9730812278268291e-71, 1.6500317832456593e-68, 3.8217018216862523e-66, 7.4907171097897077e-64, 1.2528614722567282e-61, 1.8015494058808057e-59, 2.2422904341536794e-57, 2.4305731255545130e-55, 2.3074311289682700e-53, 1.9283073358698602e-51, 1.4252524566058730e-49, 9.3573522665893741e-48, 5.4788838446141128e-46, 2.8715169045497604e-44, 1.3517494625509147e-42, 5.7335891630321996e-41, 2.1977946695381225e-39, 7.6344180491439718e-38, 2.4094190488253342e-36, 6.9253812367067252e-35, 1.8169855856152196e-33, 4.3607057687085942e-32, 9.5923503135464682e-31, 1.9376278309334056e-29, 3.6004726047782310e-28, 6.1647472746508888e-27, 9.7413623207618448e-26, 1.4227116244089339e-24, 1.9231607886931175e-23, 2.4093040723082529e-22, 2.8008499497491982e-21, 3.0250061661788475e-20, 3.0387132211276073e-19, 2.8421271873321773e-18, 2.4775756264214477e-17, 2.0149141590187015e-16, 1.5301273364381654e-15, 1.0859626897815736e-14, 7.2089869379341516e-14, 4.4796392616415365e-13, 2.6076028723419556e-12, 1.4228958097317545e-11, 7.2832614256562299e-11, 3.4992319867238825e-10, 1.5789636824184370e-09, 6.6952823181138107e-09, 2.6692828097969734e-08, 1.0010785492509061e-07, 3.5334287745169781e-07, 1.1742867585331092e-06, 3.6760831094679487e-06, 1.0844326360928105e-05, 3.0156999060878010e-05, 7.9084957328144794e-05, 1.9564188997236542e-04, 4.5669297388226392e-04, 1.0062461311647175e-03, 2.0932217887326024e-03, 4.1120856339170011e-03, 7.6302877130003874e-03, 1.3376390072548504e-02, 2.2158233552618596e-02, 3.4689673239772116e-02, 5.1332999819585050e-02, 7.1809001831171768e-02, 9.4971530833224349e-02, 1.1876238489882553e-01, 1.4043201287954626e-01, 1.5702837449220031e-01, 1.6604672219204664e-01, 1.6604672219204664e-01, 1.5702837449220031e-01, 1.4043201287954626e-01, 1.1876238489882553e-01, 9.4971530833224349e-02, 7.1809001831171768e-02, 5.1332999819585050e-02, 3.4689673239772116e-02, 2.2158233552618596e-02, 1.3376390072548504e-02, 7.6302877130003874e-03, 4.1120856339170011e-03, 2.0932217887326024e-03, 1.0062461311647175e-03, 4.5669297388226392e-04, 1.9564188997236542e-04, 7.9084957328144794e-05, 3.0156999060878010e-05, 1.0844326360928105e-05, 3.6760831094679487e-06, 1.1742867585331092e-06, 3.5334287745169781e-07, 1.0010785492509061e-07, 2.6692828097969734e-08, 6.6952823181138107e-09, 1.5789636824184370e-09, 3.4992319867238825e-10, 7.2832614256562299e-11, 1.4228958097317545e-11, 2.6076028723419556e-12, 4.4796392616415365e-13, 7.2089869379341516e-14, 1.0859626897815736e-14, 1.5301273364381654e-15, 2.0149141590187015e-16, 2.4775756264214477e-17, 2.8421271873321773e-18, 3.0387132211276073e-19, 3.0250061661788475e-20, 2.8008499497491982e-21, 2.4093040723082529e-22, 1.9231607886931175e-23, 1.4227116244089339e-24, 9.7413623207618448e-26, 6.1647472746508888e-27, 3.6004726047782310e-28, 1.9376278309334056e-29, 9.5923503135464682e-31, 4.3607057687085942e-32, 1.8169855856152196e-33, 6.9253812367067252e-35, 2.4094190488253342e-36, 7.6344180491439718e-38, 2.1977946695381225e-39, 5.7335891630321996e-41, 1.3517494625509147e-42, 2.8715169045497604e-44, 5.4788838446141128e-46, 9.3573522665893741e-48, 1.4252524566058730e-49, 1.9283073358698602e-51, 2.3074311289682700e-53, 2.4305731255545130e-55, 2.2422904341536794e-57, 1.8015494058808057e-59, 1.2528614722567282e-61, 7.4907171097897077e-64, 3.8217018216862523e-66, 1.6500317832456593e-68, 5.9730812278268291e-71, 1.7941108043227645e-73, 4.4190897787947332e-76, 8.8071523423621673e-79, 1.3986045504550338e-81, 1.7386395392283553e-84, 1.6572504238354695e-87, 1.1819905626126024e-90, 6.1261103396048387e-94, 2.2267204629384821e-97, 5.4319085709594968e-101, 8.4113401468636500e-105, 7.6903016009308287e-109, 3.7652628313310755e-113, 8.6023715273839571e-118, 7.4571213334341600e-123, 1.7446064596498268e-128, 5.7450587562745078e-135, 5.0351722883464185e-143}, + {7.1581854941568756e-144, 8.4884228757564859e-136, 2.6621446843016668e-129, 1.1714088250630149e-123, 1.3882583385637704e-118, 6.2335512554508696e-114, 1.3046719672955675e-109, 1.4610767163662094e-105, 9.6541517663049285e-102, 4.0470443036783296e-98, 1.1380585129708444e-94, 2.2435189643567695e-91, 3.2128730776926215e-88, 3.4417484237908715e-85, 2.8263134922810747e-82, 1.8164480123517544e-79, 9.3003877258546557e-77, 3.8523675927861067e-74, 1.3083561453477787e-71, 3.6865159997894300e-69, 8.7082600252251548e-67, 1.7406436581415539e-64, 2.9687147399709626e-62, 4.3527424404558593e-60, 5.5237929617045550e-58, 6.1047069753060061e-56, 5.9085581038387938e-54, 5.0340269517910206e-52, 3.7932466358096469e-50, 2.5389205160417814e-48, 1.5155318342592620e-46, 8.0977464419800098e-45, 3.8862848936787104e-43, 1.6805797332715199e-41, 6.5678744364293848e-40, 2.3261212178440308e-38, 7.4851978878895327e-37, 2.1937508830604144e-35, 5.8690540840121578e-34, 1.4363784560339465e-32, 3.2222344142269078e-31, 6.6381799678904965e-30, 1.2580944858403407e-28, 2.1972252658847511e-27, 3.5417421260548565e-26, 5.2769903994937601e-25, 7.2776858868650826e-24, 9.3028399771791796e-23, 1.1035696761428192e-21, 1.2163644574829813e-20, 1.2470907989461904e-19, 1.1906056130964038e-18, 1.0595326417660698e-17, 8.7974278780040251e-17, 6.8216435985574468e-16, 4.9441425800883030e-15, 3.3521165632319237e-14, 2.1277112806959519e-13, 1.2653006101080342e-12, 7.0545233223348931e-12, 3.6899828977436854e-11, 1.8119180456089166e-10, 8.3573910554417999e-10, 3.6229939665804831e-09, 1.4769397910223943e-08, 5.6647039057714555e-08, 2.0451241808924649e-07, 6.9532414269249713e-07, 2.2272343809174102e-06, 6.7240427138149140e-06, 1.9140150173987354e-05, 5.1388360015799239e-05, 1.3017662891537081e-04, 3.1123241184750341e-04, 7.0249863037702334e-04, 1.4973772242764213e-03, 3.0147364453737358e-03, 5.7345384962421463e-03, 1.0307863786080675e-02, 1.7512238829626020e-02, 2.8124900042797892e-02, 4.2705327076181680e-02, 6.1315790894116422e-02, 8.3255213529058528e-02, 1.0691562889960281e-01, 1.2986607877253428e-01, 1.4921123248756085e-01, 1.6217258820124303e-01, 1.6673809474249007e-01, 1.6217258820124303e-01, 1.4921123248756085e-01, 1.2986607877253428e-01, 1.0691562889960281e-01, 8.3255213529058528e-02, 6.1315790894116422e-02, 4.2705327076181680e-02, 2.8124900042797892e-02, 1.7512238829626020e-02, 1.0307863786080675e-02, 5.7345384962421463e-03, 3.0147364453737358e-03, 1.4973772242764213e-03, 7.0249863037702334e-04, 3.1123241184750341e-04, 1.3017662891537081e-04, 5.1388360015799239e-05, 1.9140150173987354e-05, 6.7240427138149140e-06, 2.2272343809174102e-06, 6.9532414269249713e-07, 2.0451241808924649e-07, 5.6647039057714555e-08, 1.4769397910223943e-08, 3.6229939665804831e-09, 8.3573910554417999e-10, 1.8119180456089166e-10, 3.6899828977436854e-11, 7.0545233223348931e-12, 1.2653006101080342e-12, 2.1277112806959519e-13, 3.3521165632319237e-14, 4.9441425800883030e-15, 6.8216435985574468e-16, 8.7974278780040251e-17, 1.0595326417660698e-17, 1.1906056130964038e-18, 1.2470907989461904e-19, 1.2163644574829813e-20, 1.1035696761428192e-21, 9.3028399771791796e-23, 7.2776858868650826e-24, 5.2769903994937601e-25, 3.5417421260548565e-26, 2.1972252658847511e-27, 1.2580944858403407e-28, 6.6381799678904965e-30, 3.2222344142269078e-31, 1.4363784560339465e-32, 5.8690540840121578e-34, 2.1937508830604144e-35, 7.4851978878895327e-37, 2.3261212178440308e-38, 6.5678744364293848e-40, 1.6805797332715199e-41, 3.8862848936787104e-43, 8.0977464419800098e-45, 1.5155318342592620e-46, 2.5389205160417814e-48, 3.7932466358096469e-50, 5.0340269517910206e-52, 5.9085581038387938e-54, 6.1047069753060061e-56, 5.5237929617045550e-58, 4.3527424404558593e-60, 2.9687147399709626e-62, 1.7406436581415539e-64, 8.7082600252251548e-67, 3.6865159997894300e-69, 1.3083561453477787e-71, 3.8523675927861067e-74, 9.3003877258546557e-77, 1.8164480123517544e-79, 2.8263134922810747e-82, 3.4417484237908715e-85, 3.2128730776926215e-88, 2.2435189643567695e-91, 1.1380585129708444e-94, 4.0470443036783296e-98, 9.6541517663049285e-102, 1.4610767163662094e-105, 1.3046719672955675e-109, 6.2335512554508696e-114, 1.3882583385637704e-118, 1.1714088250630149e-123, 2.6621446843016668e-129, 8.4884228757564859e-136, 7.1581854941568756e-144}, + {1.0174451907169722e-144, 1.2537577338109092e-136, 4.0603558490313254e-130, 1.8390471781108747e-124, 2.2388300710588648e-119, 1.0311647067706544e-114, 2.2113953581884588e-110, 2.5353839157043305e-106, 1.7139381704010506e-102, 7.3466025769339733e-99, 2.1114364896149106e-95, 4.2524253414775605e-92, 6.2193919080044227e-89, 6.8022938891305936e-86, 5.7017766571657716e-83, 3.7396644213938176e-80, 1.9536604993085239e-77, 8.2555014673420022e-75, 2.8598818663126844e-72, 8.2184808615623598e-70, 1.9797683901166225e-67, 4.0351646934878695e-65, 7.0170567960964978e-63, 1.0489546649192595e-60, 1.3571109461554121e-58, 1.5290034639304410e-56, 1.5086081632928153e-54, 1.3102411068553504e-52, 1.0064229300129089e-50, 6.8667092650473948e-49, 4.1782480984927090e-47, 2.2757544084791531e-45, 1.1133537204234619e-43, 4.9079810854674800e-42, 1.9553490062333371e-40, 7.0599294307578851e-39, 2.3160882385061203e-37, 6.9205465629360576e-36, 1.8877397396823247e-34, 4.7107012996283542e-33, 1.0775571184918142e-31, 2.2637292363656048e-30, 4.3753093336081546e-29, 7.7932715085212503e-28, 1.2812781788064907e-26, 1.9472758171895140e-25, 2.7395824646129918e-24, 3.5726754325441841e-23, 4.3241778048815671e-22, 4.8633316486835449e-21, 5.0883502730989637e-20, 4.9579227287808755e-19, 4.5034462009468205e-18, 3.8170938055666007e-17, 3.0217745531718828e-16, 2.2362025898948135e-15, 1.5482439389379285e-14, 1.0036629079256339e-13, 6.0964973515510935e-13, 3.4723560883860410e-12, 1.8557142556642682e-11, 9.3114559517381624e-11, 4.3894092072618305e-10, 1.9450249932898343e-09, 8.1060721426662619e-09, 3.1789582778002615e-08, 1.1737053608502818e-07, 4.0816183881635333e-07, 1.3374965511498204e-06, 4.1315876560548306e-06, 1.2035714382322826e-05, 3.3076061707490808e-05, 8.5780643722862892e-05, 2.1000778733594350e-04, 4.8548944122296688e-04, 1.0600860254999084e-03, 2.1869011800911028e-03, 4.2632921737953853e-03, 7.8556410099729287e-03, 1.3684318450535705e-02, 2.2539589851423701e-02, 3.5108996740976929e-02, 5.1725036108245200e-02, 7.2085145846744095e-02, 9.5038220322125194e-02, 1.1854839415593066e-01, 1.3991610657942335e-01, 1.5625599948674623e-01, 1.6512677086080088e-01, 1.6512677086080088e-01, 1.5625599948674623e-01, 1.3991610657942335e-01, 1.1854839415593066e-01, 9.5038220322125194e-02, 7.2085145846744095e-02, 5.1725036108245200e-02, 3.5108996740976929e-02, 2.2539589851423701e-02, 1.3684318450535705e-02, 7.8556410099729287e-03, 4.2632921737953853e-03, 2.1869011800911028e-03, 1.0600860254999084e-03, 4.8548944122296688e-04, 2.1000778733594350e-04, 8.5780643722862892e-05, 3.3076061707490808e-05, 1.2035714382322826e-05, 4.1315876560548306e-06, 1.3374965511498204e-06, 4.0816183881635333e-07, 1.1737053608502818e-07, 3.1789582778002615e-08, 8.1060721426662619e-09, 1.9450249932898343e-09, 4.3894092072618305e-10, 9.3114559517381624e-11, 1.8557142556642682e-11, 3.4723560883860410e-12, 6.0964973515510935e-13, 1.0036629079256339e-13, 1.5482439389379285e-14, 2.2362025898948135e-15, 3.0217745531718828e-16, 3.8170938055666007e-17, 4.5034462009468205e-18, 4.9579227287808755e-19, 5.0883502730989637e-20, 4.8633316486835449e-21, 4.3241778048815671e-22, 3.5726754325441841e-23, 2.7395824646129918e-24, 1.9472758171895140e-25, 1.2812781788064907e-26, 7.7932715085212503e-28, 4.3753093336081546e-29, 2.2637292363656048e-30, 1.0775571184918142e-31, 4.7107012996283542e-33, 1.8877397396823247e-34, 6.9205465629360576e-36, 2.3160882385061203e-37, 7.0599294307578851e-39, 1.9553490062333371e-40, 4.9079810854674800e-42, 1.1133537204234619e-43, 2.2757544084791531e-45, 4.1782480984927090e-47, 6.8667092650473948e-49, 1.0064229300129089e-50, 1.3102411068553504e-52, 1.5086081632928153e-54, 1.5290034639304410e-56, 1.3571109461554121e-58, 1.0489546649192595e-60, 7.0170567960964978e-63, 4.0351646934878695e-65, 1.9797683901166225e-67, 8.2184808615623598e-70, 2.8598818663126844e-72, 8.2555014673420022e-75, 1.9536604993085239e-77, 3.7396644213938176e-80, 5.7017766571657716e-83, 6.8022938891305936e-86, 6.2193919080044227e-89, 4.2524253414775605e-92, 2.1114364896149106e-95, 7.3466025769339733e-99, 1.7139381704010506e-102, 2.5353839157043305e-106, 2.2113953581884588e-110, 1.0311647067706544e-114, 2.2388300710588648e-119, 1.8390471781108747e-124, 4.0603558490313254e-130, 1.2537577338109092e-136, 1.0174451907169722e-144}, + {1.4459034710264201e-145, 1.8512101069007797e-137, 6.1900866973604703e-131, 2.8855379169821182e-125, 3.6080591054052492e-120, 1.7044183943684018e-115, 3.7449189704395232e-111, 4.3952304904350605e-107, 3.0394872358078098e-103, 1.3320382883615053e-99, 3.9122879439382170e-96, 8.0489834136101609e-93, 1.2021477111483726e-89, 1.3422872316499288e-86, 1.1483443011896613e-83, 7.6855073087396857e-81, 4.0962408560880496e-78, 1.7656535944547459e-75, 6.2384154226153306e-73, 1.8282206685181686e-70, 4.4907236877039048e-68, 9.3322988518723512e-66, 1.6545283606525458e-63, 2.5213872637605977e-61, 3.3253581541518404e-59, 3.8190282366232640e-57, 3.8408503131563961e-55, 3.4001469878023392e-53, 2.6620432033111890e-51, 1.8512591668862315e-49, 1.1481424339931328e-47, 6.3739969672637270e-46, 3.1784051437766470e-44, 1.4281570152975713e-42, 5.7996825799748193e-41, 2.1345161706875471e-39, 7.1381873166360108e-38, 2.1743217660681791e-36, 6.0463752750772134e-35, 1.5382579845230378e-33, 3.5875473238148687e-32, 7.6845885182041777e-31, 1.5145049680016594e-29, 2.7509103704580098e-28, 4.6123845343733745e-27, 7.1493666161606643e-26, 1.0259279798609206e-24, 1.3647547675188007e-23, 1.6851209810753113e-22, 1.9336042594232875e-21, 2.0642309881609900e-20, 2.0524462477993889e-19, 1.9026226109394554e-18, 1.6459732365754106e-17, 1.3300948716108394e-16, 1.0048759007103044e-15, 7.1035141604691271e-15, 4.7022739273699201e-14, 2.9170375040074331e-13, 1.6970108678299655e-12, 9.2646537879215591e-12, 4.7495690973542646e-11, 2.2878306073735174e-10, 1.0360690148079607e-09, 4.4135301064719171e-09, 1.7694607842914065e-08, 6.6798391472451806e-08, 2.3755404518235753e-07, 7.9619631902655087e-07, 2.5160446644029356e-06, 7.4993851915062359e-06, 2.1091170590509833e-05, 5.5987643740857493e-05, 1.4032641744066531e-04, 3.3217919994016825e-04, 7.4286891131823533e-04, 1.5698960784933606e-03, 3.1358280493826080e-03, 5.9217687653631513e-03, 1.0574395576910816e-02, 1.7858440641500769e-02, 2.8528992504062382e-02, 4.3116896847977884e-02, 6.1656983184778281e-02, 8.3433163844430863e-02, 1.0684575215294285e-01, 1.2950034028006346e-01, 1.4856115387707583e-01, 1.6131642239308974e-01, 1.6580659700649841e-01, 1.6131642239308974e-01, 1.4856115387707583e-01, 1.2950034028006346e-01, 1.0684575215294285e-01, 8.3433163844430863e-02, 6.1656983184778281e-02, 4.3116896847977884e-02, 2.8528992504062382e-02, 1.7858440641500769e-02, 1.0574395576910816e-02, 5.9217687653631513e-03, 3.1358280493826080e-03, 1.5698960784933606e-03, 7.4286891131823533e-04, 3.3217919994016825e-04, 1.4032641744066531e-04, 5.5987643740857493e-05, 2.1091170590509833e-05, 7.4993851915062359e-06, 2.5160446644029356e-06, 7.9619631902655087e-07, 2.3755404518235753e-07, 6.6798391472451806e-08, 1.7694607842914065e-08, 4.4135301064719171e-09, 1.0360690148079607e-09, 2.2878306073735174e-10, 4.7495690973542646e-11, 9.2646537879215591e-12, 1.6970108678299655e-12, 2.9170375040074331e-13, 4.7022739273699201e-14, 7.1035141604691271e-15, 1.0048759007103044e-15, 1.3300948716108394e-16, 1.6459732365754106e-17, 1.9026226109394554e-18, 2.0524462477993889e-19, 2.0642309881609900e-20, 1.9336042594232875e-21, 1.6851209810753113e-22, 1.3647547675188007e-23, 1.0259279798609206e-24, 7.1493666161606643e-26, 4.6123845343733745e-27, 2.7509103704580098e-28, 1.5145049680016594e-29, 7.6845885182041777e-31, 3.5875473238148687e-32, 1.5382579845230378e-33, 6.0463752750772134e-35, 2.1743217660681791e-36, 7.1381873166360108e-38, 2.1345161706875471e-39, 5.7996825799748193e-41, 1.4281570152975713e-42, 3.1784051437766470e-44, 6.3739969672637270e-46, 1.1481424339931328e-47, 1.8512591668862315e-49, 2.6620432033111890e-51, 3.4001469878023392e-53, 3.8408503131563961e-55, 3.8190282366232640e-57, 3.3253581541518404e-59, 2.5213872637605977e-61, 1.6545283606525458e-63, 9.3322988518723512e-66, 4.4907236877039048e-68, 1.8282206685181686e-70, 6.2384154226153306e-73, 1.7656535944547459e-75, 4.0962408560880496e-78, 7.6855073087396857e-81, 1.1483443011896613e-83, 1.3422872316499288e-86, 1.2021477111483726e-89, 8.0489834136101609e-93, 3.9122879439382170e-96, 1.3320382883615053e-99, 3.0394872358078098e-103, 4.3952304904350605e-107, 3.7449189704395232e-111, 1.7044183943684018e-115, 3.6080591054052492e-120, 2.8855379169821182e-125, 6.1900866973604703e-131, 1.8512101069007797e-137, 1.4459034710264201e-145}, + {2.0544167081187589e-146, 2.7324651628287789e-138, 9.4326009424734410e-132, 4.5249380509459828e-126, 5.8107301413051742e-121, 2.8150324044562284e-116, 6.3362635154258981e-112, 7.6118645291179184e-108, 5.3843632439536920e-104, 2.4123127079122074e-100, 7.2398333677694101e-97, 1.5214189335297059e-93, 2.3202235060376314e-90, 2.6445775673882037e-87, 2.3089447193822174e-84, 1.5767056174162126e-81, 8.5727195865402764e-79, 3.7689704357176965e-76, 1.3580443028284028e-73, 4.0582349306406949e-71, 1.0163600799789738e-68, 2.1532917098122703e-66, 3.8916837271887350e-64, 6.0453772000204261e-62, 8.1267907500285513e-60, 9.5128584361969727e-58, 9.7509710005032432e-56, 8.7976987433650472e-54, 7.0198773756157181e-52, 4.9753094376640113e-50, 3.1447462107627518e-48, 1.7792627000157171e-46, 9.0423237558527988e-45, 4.1409133307449204e-43, 1.7138906209368488e-41, 6.4290783512879458e-40, 2.1913996777475932e-38, 6.8039020082108794e-37, 1.9286270378988431e-35, 5.0017461082503641e-34, 1.1891915619808499e-32, 2.5969337466904165e-31, 5.2182308893507914e-30, 9.6642798753295627e-29, 1.6523003694364310e-27, 2.6117599911695471e-26, 3.8222432521976105e-25, 5.1859356642906214e-24, 6.5314813585357555e-23, 7.6453095155047312e-22, 8.3267040963735343e-21, 8.4472663315088944e-20, 7.9904365646153140e-19, 7.0543936381041909e-18, 5.8181634606657441e-17, 4.4867423723460479e-16, 3.2378602233952898e-15, 2.1883227889949076e-14, 1.3861757281246388e-13, 8.2354944810071531e-13, 4.5921996541535561e-12, 2.4048711144413950e-11, 1.1835015186215515e-10, 5.4765134657351679e-10, 2.3841694011026308e-09, 9.7699965359482599e-09, 3.7704288545104245e-08, 1.3709740387891837e-07, 4.6989603841203290e-07, 1.5187614573110066e-06, 4.6308779212753262e-06, 1.3325507543076759e-05, 3.6199433343496547e-05, 9.2866446344988979e-05, 2.2505423008074610e-04, 5.1535932107027135e-04, 1.1154298676886345e-03, 2.2823888738018917e-03, 4.4162127170279562e-03, 8.0818888889421375e-03, 1.3991358954534535e-02, 2.2917370391076095e-02, 3.5521683059969993e-02, 5.2108016747135361e-02, 7.2351691768296089e-02, 9.5097571709914869e-02, 1.1833217723448959e-01, 1.3940481744965749e-01, 1.5549476479437641e-01, 1.6422194087055850e-01, 1.6422194087055850e-01, 1.5549476479437641e-01, 1.3940481744965749e-01, 1.1833217723448959e-01, 9.5097571709914869e-02, 7.2351691768296089e-02, 5.2108016747135361e-02, 3.5521683059969993e-02, 2.2917370391076095e-02, 1.3991358954534535e-02, 8.0818888889421375e-03, 4.4162127170279562e-03, 2.2823888738018917e-03, 1.1154298676886345e-03, 5.1535932107027135e-04, 2.2505423008074610e-04, 9.2866446344988979e-05, 3.6199433343496547e-05, 1.3325507543076759e-05, 4.6308779212753262e-06, 1.5187614573110066e-06, 4.6989603841203290e-07, 1.3709740387891837e-07, 3.7704288545104245e-08, 9.7699965359482599e-09, 2.3841694011026308e-09, 5.4765134657351679e-10, 1.1835015186215515e-10, 2.4048711144413950e-11, 4.5921996541535561e-12, 8.2354944810071531e-13, 1.3861757281246388e-13, 2.1883227889949076e-14, 3.2378602233952898e-15, 4.4867423723460479e-16, 5.8181634606657441e-17, 7.0543936381041909e-18, 7.9904365646153140e-19, 8.4472663315088944e-20, 8.3267040963735343e-21, 7.6453095155047312e-22, 6.5314813585357555e-23, 5.1859356642906214e-24, 3.8222432521976105e-25, 2.6117599911695471e-26, 1.6523003694364310e-27, 9.6642798753295627e-29, 5.2182308893507914e-30, 2.5969337466904165e-31, 1.1891915619808499e-32, 5.0017461082503641e-34, 1.9286270378988431e-35, 6.8039020082108794e-37, 2.1913996777475932e-38, 6.4290783512879458e-40, 1.7138906209368488e-41, 4.1409133307449204e-43, 9.0423237558527988e-45, 1.7792627000157171e-46, 3.1447462107627518e-48, 4.9753094376640113e-50, 7.0198773756157181e-52, 8.7976987433650472e-54, 9.7509710005032432e-56, 9.5128584361969727e-58, 8.1267907500285513e-60, 6.0453772000204261e-62, 3.8916837271887350e-64, 2.1532917098122703e-66, 1.0163600799789738e-68, 4.0582349306406949e-71, 1.3580443028284028e-73, 3.7689704357176965e-76, 8.5727195865402764e-79, 1.5767056174162126e-81, 2.3089447193822174e-84, 2.6445775673882037e-87, 2.3202235060376314e-90, 1.5214189335297059e-93, 7.2398333677694101e-97, 2.4123127079122074e-100, 5.3843632439536920e-104, 7.6118645291179184e-108, 6.3362635154258981e-112, 2.8150324044562284e-116, 5.8107301413051742e-121, 4.5249380509459828e-126, 9.4326009424734410e-132, 2.7324651628287789e-138, 2.0544167081187589e-146}, + {2.9184983704075192e-147, 4.0319183713400766e-139, 1.4367134541050939e-132, 7.0917398018507910e-127, 9.3517988737392972e-122, 4.6457185244440549e-117, 1.0711309190325783e-112, 1.3169706905085093e-108, 9.5280026251390744e-105, 4.3635736981957750e-101, 1.3380638102406329e-97, 2.8718782241763001e-94, 4.4716757994553476e-91, 5.2022930902389893e-88, 4.6349150479031114e-85, 3.2290491038758581e-82, 1.7908398704126536e-79, 8.0297842302329272e-77, 2.9503683873426791e-74, 8.9893224354049310e-72, 2.2951885774902920e-69, 4.9569512961693677e-67, 9.1317952402850698e-65, 1.4458391823512865e-62, 1.9809319471106229e-60, 2.3631742826980710e-58, 2.4686029155016893e-56, 2.2697566491840546e-54, 1.8456028929462080e-52, 1.3329759770755586e-50, 8.5857720828783205e-49, 4.9502403838758848e-47, 2.5636716593985549e-45, 1.1964123882499316e-43, 5.0463692733981642e-42, 1.9291541526211061e-40, 6.7015535541500665e-39, 2.1206217889043141e-37, 6.1266403471398839e-36, 1.6195131761876436e-34, 3.9248683542722297e-33, 8.7371187009076405e-32, 1.7897414312928308e-30, 3.3792749075968173e-29, 5.8905961617051956e-28, 9.4940326558644130e-27, 1.4168234014649860e-25, 1.9603740051045881e-24, 2.5181065241794600e-23, 3.0063944491537587e-22, 3.3400431583218902e-21, 3.4567219838648987e-20, 3.3360382939472660e-19, 3.0052255828362505e-18, 2.5293356487116101e-17, 1.9906862214252033e-16, 1.4663260946610058e-15, 1.0116632951507467e-14, 6.5425658781468336e-14, 3.9689851927958321e-13, 2.2600968959480387e-12, 1.2088512678946103e-11, 6.0769364101383090e-11, 2.8728746426712136e-10, 1.2779384152123448e-09, 5.3517155996766336e-09, 2.1109723472766886e-08, 7.8466356162788103e-08, 2.7497279026009702e-07, 9.0883362098124687e-07, 2.8342636150206807e-06, 8.3429573893995674e-06, 2.3188718962635865e-05, 6.0877111145275987e-05, 1.5100325433613592e-04, 3.5399662151566394e-04, 7.8452960713547043e-04, 1.6440892137594167e-03, 3.2587225571780792e-03, 6.1103686178933300e-03, 1.0841005727093818e-02, 1.8202465449941120e-02, 2.8927974168057351e-02, 4.3520526497899684e-02, 6.1988666273243780e-02, 8.3602420563279559e-02, 1.0677090675896843e-01, 1.2913573606742534e-01, 1.4791906474448899e-01, 1.6047364667465450e-01, 1.6489053845950130e-01, 1.6047364667465450e-01, 1.4791906474448899e-01, 1.2913573606742534e-01, 1.0677090675896843e-01, 8.3602420563279559e-02, 6.1988666273243780e-02, 4.3520526497899684e-02, 2.8927974168057351e-02, 1.8202465449941120e-02, 1.0841005727093818e-02, 6.1103686178933300e-03, 3.2587225571780792e-03, 1.6440892137594167e-03, 7.8452960713547043e-04, 3.5399662151566394e-04, 1.5100325433613592e-04, 6.0877111145275987e-05, 2.3188718962635865e-05, 8.3429573893995674e-06, 2.8342636150206807e-06, 9.0883362098124687e-07, 2.7497279026009702e-07, 7.8466356162788103e-08, 2.1109723472766886e-08, 5.3517155996766336e-09, 1.2779384152123448e-09, 2.8728746426712136e-10, 6.0769364101383090e-11, 1.2088512678946103e-11, 2.2600968959480387e-12, 3.9689851927958321e-13, 6.5425658781468336e-14, 1.0116632951507467e-14, 1.4663260946610058e-15, 1.9906862214252033e-16, 2.5293356487116101e-17, 3.0052255828362505e-18, 3.3360382939472660e-19, 3.4567219838648987e-20, 3.3400431583218902e-21, 3.0063944491537587e-22, 2.5181065241794600e-23, 1.9603740051045881e-24, 1.4168234014649860e-25, 9.4940326558644130e-27, 5.8905961617051956e-28, 3.3792749075968173e-29, 1.7897414312928308e-30, 8.7371187009076405e-32, 3.9248683542722297e-33, 1.6195131761876436e-34, 6.1266403471398839e-36, 2.1206217889043141e-37, 6.7015535541500665e-39, 1.9291541526211061e-40, 5.0463692733981642e-42, 1.1964123882499316e-43, 2.5636716593985549e-45, 4.9502403838758848e-47, 8.5857720828783205e-49, 1.3329759770755586e-50, 1.8456028929462080e-52, 2.2697566491840546e-54, 2.4686029155016893e-56, 2.3631742826980710e-58, 1.9809319471106229e-60, 1.4458391823512865e-62, 9.1317952402850698e-65, 4.9569512961693677e-67, 2.2951885774902920e-69, 8.9893224354049310e-72, 2.9503683873426791e-74, 8.0297842302329272e-77, 1.7908398704126536e-79, 3.2290491038758581e-82, 4.6349150479031114e-85, 5.2022930902389893e-88, 4.4716757994553476e-91, 2.8718782241763001e-94, 1.3380638102406329e-97, 4.3635736981957750e-101, 9.5280026251390744e-105, 1.3169706905085093e-108, 1.0711309190325783e-112, 4.6457185244440549e-117, 9.3517988737392972e-122, 7.0917398018507910e-127, 1.4367134541050939e-132, 4.0319183713400766e-139, 2.9184983704075192e-147}, + {4.1452696623483395e-148, 5.9474161464872244e-140, 2.1873318060197500e-133, 1.1108350634945189e-127, 1.5040760324474137e-122, 7.6610437292542439e-118, 1.8091479691129481e-113, 2.2763600768872421e-109, 1.6842534274181958e-105, 7.8840222434465455e-102, 2.4699093339298307e-98, 5.4137540405290719e-95, 8.6056883186768174e-92, 1.0218043358339510e-88, 9.2888951681828944e-86, 6.6016471696136248e-83, 3.7342898142071596e-80, 1.7074874775824880e-77, 6.3969105456232966e-75, 1.9870439601235429e-72, 5.1717638304628555e-70, 1.1385038568984282e-67, 2.1376722221415430e-65, 3.4493784123174178e-63, 4.8161711078239858e-61, 5.8549015274027961e-59, 6.2323324068554731e-57, 5.8390470979220446e-55, 4.8378847700931107e-53, 3.5603170842517748e-51, 2.3366431061277383e-49, 1.3727341637645679e-47, 7.2439067732577147e-46, 3.4446682433867826e-44, 1.4805046972403839e-42, 5.7672996866961233e-41, 2.0415908964489771e-39, 6.5835338360421613e-38, 1.9383756218147382e-36, 5.2220236908204409e-35, 1.2898503021552704e-33, 2.9266129491084507e-32, 6.1107571504811921e-31, 1.1761496941464036e-29, 2.0900714057742614e-28, 3.4343583117038137e-27, 5.2255927032642325e-26, 7.3725440555518383e-25, 9.6570947115181529e-24, 1.1758429681040065e-22, 1.3323707929307384e-21, 1.4065250811244644e-20, 1.3847330443129062e-19, 1.2726463913998945e-18, 1.0928940877684042e-17, 8.7773275665944823e-17, 6.5982162774306465e-16, 4.6464228722152545e-15, 3.0673938466320995e-14, 1.8997359653204635e-13, 1.1045578960219544e-12, 6.0330789028505752e-12, 3.0975214481618230e-11, 1.4957890384694261e-10, 6.7975116544323199e-10, 2.9085970464514601e-09, 1.1724370655087347e-08, 4.4542518892262074e-08, 1.5956404054013332e-07, 5.3920679976732085e-07, 1.7195329738679880e-06, 5.1768455512555845e-06, 1.4718871925099586e-05, 3.9535219175273295e-05, 1.0035307023681430e-04, 2.4079185627092926e-04, 5.4630685564139225e-04, 1.1722653593393563e-03, 2.3796447518820959e-03, 4.5707734636933712e-03, 8.3089325331250057e-03, 1.4297417780659441e-02, 2.3291535150151157e-02, 3.5927796289914674e-02, 5.2482133056536061e-02, 7.2608927339705281e-02, 9.5149879125439124e-02, 1.1811391373984721e-01, 1.3889811387154746e-01, 1.5474440942350939e-01, 1.6333182250163769e-01, 1.6333182250163769e-01, 1.5474440942350939e-01, 1.3889811387154746e-01, 1.1811391373984721e-01, 9.5149879125439124e-02, 7.2608927339705281e-02, 5.2482133056536061e-02, 3.5927796289914674e-02, 2.3291535150151157e-02, 1.4297417780659441e-02, 8.3089325331250057e-03, 4.5707734636933712e-03, 2.3796447518820959e-03, 1.1722653593393563e-03, 5.4630685564139225e-04, 2.4079185627092926e-04, 1.0035307023681430e-04, 3.9535219175273295e-05, 1.4718871925099586e-05, 5.1768455512555845e-06, 1.7195329738679880e-06, 5.3920679976732085e-07, 1.5956404054013332e-07, 4.4542518892262074e-08, 1.1724370655087347e-08, 2.9085970464514601e-09, 6.7975116544323199e-10, 1.4957890384694261e-10, 3.0975214481618230e-11, 6.0330789028505752e-12, 1.1045578960219544e-12, 1.8997359653204635e-13, 3.0673938466320995e-14, 4.6464228722152545e-15, 6.5982162774306465e-16, 8.7773275665944823e-17, 1.0928940877684042e-17, 1.2726463913998945e-18, 1.3847330443129062e-19, 1.4065250811244644e-20, 1.3323707929307384e-21, 1.1758429681040065e-22, 9.6570947115181529e-24, 7.3725440555518383e-25, 5.2255927032642325e-26, 3.4343583117038137e-27, 2.0900714057742614e-28, 1.1761496941464036e-29, 6.1107571504811921e-31, 2.9266129491084507e-32, 1.2898503021552704e-33, 5.2220236908204409e-35, 1.9383756218147382e-36, 6.5835338360421613e-38, 2.0415908964489771e-39, 5.7672996866961233e-41, 1.4805046972403839e-42, 3.4446682433867826e-44, 7.2439067732577147e-46, 1.3727341637645679e-47, 2.3366431061277383e-49, 3.5603170842517748e-51, 4.8378847700931107e-53, 5.8390470979220446e-55, 6.2323324068554731e-57, 5.8549015274027961e-59, 4.8161711078239858e-61, 3.4493784123174178e-63, 2.1376722221415430e-65, 1.1385038568984282e-67, 5.1717638304628555e-70, 1.9870439601235429e-72, 6.3969105456232966e-75, 1.7074874775824880e-77, 3.7342898142071596e-80, 6.6016471696136248e-83, 9.2888951681828944e-86, 1.0218043358339510e-88, 8.6056883186768174e-92, 5.4137540405290719e-95, 2.4699093339298307e-98, 7.8840222434465455e-102, 1.6842534274181958e-105, 2.2763600768872421e-109, 1.8091479691129481e-113, 7.6610437292542439e-118, 1.5040760324474137e-122, 1.1108350634945189e-127, 2.1873318060197500e-133, 5.9474161464872244e-140, 4.1452696623483395e-148}, + {5.8866637956879832e-149, 8.7701238593349446e-141, 3.3286398376690490e-134, 1.7390228685740816e-128, 2.4174491971248305e-123, 1.2623847464849609e-118, 3.0530327446766110e-114, 3.9308789125198654e-110, 2.9740999490947605e-106, 1.4228368487505154e-102, 4.5535119727458115e-99, 1.0191828364399763e-95, 1.6537960518565174e-92, 2.0039262792917714e-89, 1.8586066284522124e-86, 1.3473832322197353e-83, 7.7728521820677718e-81, 3.6240329528761601e-78, 1.3842194496021173e-75, 4.3831680288373217e-73, 1.1628357927637541e-70, 2.6089921702855559e-68, 4.9923292649319471e-66, 8.2091330064948600e-64, 1.1679600204323236e-61, 1.4467547750792869e-59, 1.5691297806630218e-57, 1.4978576876165396e-55, 1.2644313857762022e-53, 9.4805533849020122e-52, 6.3392777437739552e-50, 3.7943396524379168e-48, 2.0399882317508400e-46, 9.8835394906743943e-45, 4.3280559299142535e-43, 1.7178434457904729e-41, 6.1961228512356421e-40, 2.0359401385923293e-38, 6.1082242612809361e-37, 1.6768933675263854e-35, 4.2210054325158127e-34, 9.7605477823793654e-33, 2.0771142264898199e-31, 4.0748381699253249e-30, 7.3810648870417183e-29, 1.2363547377218958e-27, 1.9178022081467005e-26, 2.7586033246049575e-25, 3.6843129796217172e-24, 4.5743951370215857e-23, 5.2859323137468154e-22, 5.6910915175835955e-21, 5.7148760619818976e-20, 5.3577677281181046e-19, 4.6939007358067331e-18, 3.8463054439056453e-17, 2.9504021972772029e-16, 2.1202927606877813e-15, 1.4286304928072976e-14, 9.0316979019406731e-14, 5.3609851542957339e-13, 2.9897229802024487e-12, 1.5674715085860065e-11, 7.7305137154009676e-11, 3.5884068378224706e-10, 1.5685984370316076e-09, 6.4603917825466764e-09, 2.5081389998281072e-08, 9.1830644706366585e-08, 3.1721513586827257e-07, 1.0342536339020313e-06, 3.1840053712593505e-06, 9.2587247391013518e-06, 2.5439403557345701e-05, 6.6066258510845340e-05, 1.6221813086114782e-04, 3.7669274161032700e-04, 8.2747754872833888e-04, 1.7199311750936504e-03, 3.3833630340537615e-03, 6.3002503573431189e-03, 1.1107594867856934e-02, 1.8544242319012489e-02, 2.9321853906612717e-02, 4.3916343744405027e-02, 6.2311084328469719e-02, 8.3763283089409293e-02, 1.0669133852506041e-01, 1.2877234499448006e-01, 1.4728481639742919e-01, 1.5964391640203837e-01, 1.6398949726573361e-01, 1.5964391640203837e-01, 1.4728481639742919e-01, 1.2877234499448006e-01, 1.0669133852506041e-01, 8.3763283089409293e-02, 6.2311084328469719e-02, 4.3916343744405027e-02, 2.9321853906612717e-02, 1.8544242319012489e-02, 1.1107594867856934e-02, 6.3002503573431189e-03, 3.3833630340537615e-03, 1.7199311750936504e-03, 8.2747754872833888e-04, 3.7669274161032700e-04, 1.6221813086114782e-04, 6.6066258510845340e-05, 2.5439403557345701e-05, 9.2587247391013518e-06, 3.1840053712593505e-06, 1.0342536339020313e-06, 3.1721513586827257e-07, 9.1830644706366585e-08, 2.5081389998281072e-08, 6.4603917825466764e-09, 1.5685984370316076e-09, 3.5884068378224706e-10, 7.7305137154009676e-11, 1.5674715085860065e-11, 2.9897229802024487e-12, 5.3609851542957339e-13, 9.0316979019406731e-14, 1.4286304928072976e-14, 2.1202927606877813e-15, 2.9504021972772029e-16, 3.8463054439056453e-17, 4.6939007358067331e-18, 5.3577677281181046e-19, 5.7148760619818976e-20, 5.6910915175835955e-21, 5.2859323137468154e-22, 4.5743951370215857e-23, 3.6843129796217172e-24, 2.7586033246049575e-25, 1.9178022081467005e-26, 1.2363547377218958e-27, 7.3810648870417183e-29, 4.0748381699253249e-30, 2.0771142264898199e-31, 9.7605477823793654e-33, 4.2210054325158127e-34, 1.6768933675263854e-35, 6.1082242612809361e-37, 2.0359401385923293e-38, 6.1961228512356421e-40, 1.7178434457904729e-41, 4.3280559299142535e-43, 9.8835394906743943e-45, 2.0399882317508400e-46, 3.7943396524379168e-48, 6.3392777437739552e-50, 9.4805533849020122e-52, 1.2644313857762022e-53, 1.4978576876165396e-55, 1.5691297806630218e-57, 1.4467547750792869e-59, 1.1679600204323236e-61, 8.2091330064948600e-64, 4.9923292649319471e-66, 2.6089921702855559e-68, 1.1628357927637541e-70, 4.3831680288373217e-73, 1.3842194496021173e-75, 3.6240329528761601e-78, 7.7728521820677718e-81, 1.3473832322197353e-83, 1.8586066284522124e-86, 2.0039262792917714e-89, 1.6537960518565174e-92, 1.0191828364399763e-95, 4.5535119727458115e-99, 1.4228368487505154e-102, 2.9740999490947605e-106, 3.9308789125198654e-110, 3.0530327446766110e-114, 1.2623847464849609e-118, 2.4174491971248305e-123, 1.7390228685740816e-128, 3.3286398376690490e-134, 8.7701238593349446e-141, 5.8866637956879832e-149}, + {8.3581373975539108e-150, 1.2928412877755577e-141, 5.0632382588324120e-135, 2.7209602221952139e-129, 3.8829389622015062e-124, 2.0785832875875387e-119, 5.1477595408996125e-115, 6.7815072200124497e-111, 5.2462701695642363e-107, 2.5648912231375885e-103, 8.3845258592424195e-100, 1.9161630252069163e-96, 3.1736968736455641e-93, 3.9241298067673186e-90, 3.7129493377218318e-87, 2.7453553081368732e-84, 1.6150344379416257e-81, 7.6774327866656284e-79, 2.9894343828423281e-76, 9.6489070550182607e-74, 2.6089586336270140e-71, 5.9654008943367430e-69, 1.1631966527487701e-66, 1.9489490740739864e-64, 2.8252664298663074e-62, 3.5656104439409607e-60, 3.9399315442250632e-58, 3.8315828065199010e-56, 3.2951200002417535e-54, 2.5169328462380233e-52, 1.7144956007470846e-50, 1.0454204091183458e-48, 5.7258874684495484e-47, 2.8261341310391440e-45, 1.2607977361040919e-43, 5.0982121558902017e-42, 1.8734745650648728e-40, 6.2719136180711063e-39, 1.9172254945419127e-37, 5.3629616050774624e-36, 1.3755486543958331e-34, 3.2412811142581749e-33, 7.0292474203881568e-32, 1.4053678782835759e-30, 2.5945192595304142e-29, 4.4296347519346036e-28, 7.0039976811745699e-27, 1.0270246629696484e-25, 1.3983996188477357e-24, 1.7702189074849013e-23, 2.0857890645994067e-22, 2.2900155277371847e-21, 2.3452218050389733e-20, 2.2425235675550114e-19, 2.0040402270860207e-18, 1.6752540366716003e-17, 1.3110796195605400e-16, 9.6139706301947510e-16, 6.6105224464386113e-15, 4.2652608433469510e-14, 2.5842479214605026e-13, 1.4712572098023350e-12, 7.8755658569412608e-12, 3.9661851656268533e-11, 1.8802196827204209e-10, 8.3950345085448172e-10, 3.5321294226177634e-09, 1.4010751708171831e-08, 5.2419925848831286e-08, 1.8506755890537394e-07, 6.1679613744986908e-07, 1.9413286157507076e-06, 5.7724680860172742e-06, 1.6221044402072930e-05, 4.3091495767535577e-05, 1.0825095642068624e-04, 2.5723066429963950e-04, 5.7833524878596126e-04, 1.2305790314187559e-03, 2.4786280618533298e-03, 4.7269014592664482e-03, 8.5366761761569536e-03, 1.4602406139195508e-02, 2.3662049334306538e-02, 3.6327402988086469e-02, 5.2847573265457019e-02, 7.2857131060649991e-02, 9.5195424202311488e-02, 1.1789377323403843e-01, 1.3839596229316664e-01, 1.5400468068882942e-01, 1.6245602140829696e-01, 1.6245602140829696e-01, 1.5400468068882942e-01, 1.3839596229316664e-01, 1.1789377323403843e-01, 9.5195424202311488e-02, 7.2857131060649991e-02, 5.2847573265457019e-02, 3.6327402988086469e-02, 2.3662049334306538e-02, 1.4602406139195508e-02, 8.5366761761569536e-03, 4.7269014592664482e-03, 2.4786280618533298e-03, 1.2305790314187559e-03, 5.7833524878596126e-04, 2.5723066429963950e-04, 1.0825095642068624e-04, 4.3091495767535577e-05, 1.6221044402072930e-05, 5.7724680860172742e-06, 1.9413286157507076e-06, 6.1679613744986908e-07, 1.8506755890537394e-07, 5.2419925848831286e-08, 1.4010751708171831e-08, 3.5321294226177634e-09, 8.3950345085448172e-10, 1.8802196827204209e-10, 3.9661851656268533e-11, 7.8755658569412608e-12, 1.4712572098023350e-12, 2.5842479214605026e-13, 4.2652608433469510e-14, 6.6105224464386113e-15, 9.6139706301947510e-16, 1.3110796195605400e-16, 1.6752540366716003e-17, 2.0040402270860207e-18, 2.2425235675550114e-19, 2.3452218050389733e-20, 2.2900155277371847e-21, 2.0857890645994067e-22, 1.7702189074849013e-23, 1.3983996188477357e-24, 1.0270246629696484e-25, 7.0039976811745699e-27, 4.4296347519346036e-28, 2.5945192595304142e-29, 1.4053678782835759e-30, 7.0292474203881568e-32, 3.2412811142581749e-33, 1.3755486543958331e-34, 5.3629616050774624e-36, 1.9172254945419127e-37, 6.2719136180711063e-39, 1.8734745650648728e-40, 5.0982121558902017e-42, 1.2607977361040919e-43, 2.8261341310391440e-45, 5.7258874684495484e-47, 1.0454204091183458e-48, 1.7144956007470846e-50, 2.5169328462380233e-52, 3.2951200002417535e-54, 3.8315828065199010e-56, 3.9399315442250632e-58, 3.5656104439409607e-60, 2.8252664298663074e-62, 1.9489490740739864e-64, 1.1631966527487701e-66, 5.9654008943367430e-69, 2.6089586336270140e-71, 9.6489070550182607e-74, 2.9894343828423281e-76, 7.6774327866656284e-79, 1.6150344379416257e-81, 2.7453553081368732e-84, 3.7129493377218318e-87, 3.9241298067673186e-90, 3.1736968736455641e-93, 1.9161630252069163e-96, 8.3845258592424195e-100, 2.5648912231375885e-103, 5.2462701695642363e-107, 6.7815072200124497e-111, 5.1477595408996125e-115, 2.0785832875875387e-119, 3.8829389622015062e-124, 2.7209602221952139e-129, 5.0632382588324120e-135, 1.2928412877755577e-141, 8.3581373975539108e-150}, + {1.1865178694667318e-150, 1.9052328091840223e-142, 7.6984112043987381e-136, 4.2550288152769099e-130, 6.2327840907767578e-125, 3.4199371076928271e-120, 8.6723736530813987e-116, 1.1688387166643387e-111, 9.2447904963657574e-108, 4.6184277629201267e-104, 1.5419925530578493e-100, 3.5978676971165183e-97, 6.0819398055170106e-94, 7.6728915952440279e-91, 7.4056905596969795e-88, 5.5844689207576789e-85, 3.3498078614715179e-82, 1.6234441963286573e-79, 6.4436381052523653e-77, 2.1197593898507255e-74, 5.8410998823252364e-72, 1.3609591582325947e-69, 2.7039668570857215e-67, 4.6159532490505087e-65, 6.8172254689772145e-63, 8.7649300109604261e-61, 9.8662587260951024e-59, 9.7741261753796815e-57, 8.5624277958175666e-55, 6.6621826190749697e-53, 4.6227110569813219e-51, 2.8712161421730314e-49, 1.6018948070503715e-47, 8.0538675955345321e-46, 3.6600273200497822e-44, 1.5076237023555035e-42, 5.6437809696463000e-41, 1.9247883296069542e-39, 5.9942100762419949e-38, 1.7082702187606927e-36, 4.4641638317895058e-35, 1.0718010386176091e-33, 2.3684402708428834e-32, 4.8253003329308802e-31, 9.0781689354637632e-30, 1.5795898083977327e-28, 2.5455821620542874e-27, 3.8046791468512480e-26, 5.2807723431418099e-25, 6.8148555847519964e-24, 8.1865279023956756e-23, 9.1644244237263828e-22, 9.5703407786302617e-21, 9.3325018346830289e-20, 8.5060423271066705e-19, 7.2528021781457044e-18, 5.7903374085298744e-17, 4.3318599899726389e-16, 3.0391558780794568e-15, 2.0010535409370048e-14, 1.2373574238435096e-13, 7.1903669458823460e-13, 3.9291706318326725e-12, 2.0202524727399665e-11, 9.7794253591584735e-11, 4.4592266183083370e-10, 1.9163195825780139e-09, 7.7651658482449274e-09, 2.9683092240535501e-08, 1.0708625929490390e-07, 3.6475807440990068e-07, 1.1735270767794228e-06, 3.5674612381232805e-06, 1.0250735219808427e-05, 2.7849862113005195e-05, 7.1564447183165722e-05, 1.7398159469356308e-04, 4.0027477960329739e-04, 8.7170842819435444e-04, 1.7973955409015775e-03, 3.5096925793779445e-03, 6.4913282065116552e-03, 1.1374067733309335e-02, 1.8883705620831706e-02, 2.9710644677922813e-02, 4.4304476145580723e-02, 6.2624475272894761e-02, 8.3916039610710105e-02, 1.0660728151712012e-01, 1.2841023960066572e-01, 1.4665826336478710e-01, 1.5882689919380447e-01, 1.6310306755078369e-01, 1.5882689919380447e-01, 1.4665826336478710e-01, 1.2841023960066572e-01, 1.0660728151712012e-01, 8.3916039610710105e-02, 6.2624475272894761e-02, 4.4304476145580723e-02, 2.9710644677922813e-02, 1.8883705620831706e-02, 1.1374067733309335e-02, 6.4913282065116552e-03, 3.5096925793779445e-03, 1.7973955409015775e-03, 8.7170842819435444e-04, 4.0027477960329739e-04, 1.7398159469356308e-04, 7.1564447183165722e-05, 2.7849862113005195e-05, 1.0250735219808427e-05, 3.5674612381232805e-06, 1.1735270767794228e-06, 3.6475807440990068e-07, 1.0708625929490390e-07, 2.9683092240535501e-08, 7.7651658482449274e-09, 1.9163195825780139e-09, 4.4592266183083370e-10, 9.7794253591584735e-11, 2.0202524727399665e-11, 3.9291706318326725e-12, 7.1903669458823460e-13, 1.2373574238435096e-13, 2.0010535409370048e-14, 3.0391558780794568e-15, 4.3318599899726389e-16, 5.7903374085298744e-17, 7.2528021781457044e-18, 8.5060423271066705e-19, 9.3325018346830289e-20, 9.5703407786302617e-21, 9.1644244237263828e-22, 8.1865279023956756e-23, 6.8148555847519964e-24, 5.2807723431418099e-25, 3.8046791468512480e-26, 2.5455821620542874e-27, 1.5795898083977327e-28, 9.0781689354637632e-30, 4.8253003329308802e-31, 2.3684402708428834e-32, 1.0718010386176091e-33, 4.4641638317895058e-35, 1.7082702187606927e-36, 5.9942100762419949e-38, 1.9247883296069542e-39, 5.6437809696463000e-41, 1.5076237023555035e-42, 3.6600273200497822e-44, 8.0538675955345321e-46, 1.6018948070503715e-47, 2.8712161421730314e-49, 4.6227110569813219e-51, 6.6621826190749697e-53, 8.5624277958175666e-55, 9.7741261753796815e-57, 9.8662587260951024e-59, 8.7649300109604261e-61, 6.8172254689772145e-63, 4.6159532490505087e-65, 2.7039668570857215e-67, 1.3609591582325947e-69, 5.8410998823252364e-72, 2.1197593898507255e-74, 6.4436381052523653e-77, 1.6234441963286573e-79, 3.3498078614715179e-82, 5.5844689207576789e-85, 7.4056905596969795e-88, 7.6728915952440279e-91, 6.0819398055170106e-94, 3.5978676971165183e-97, 1.5419925530578493e-100, 4.6184277629201267e-104, 9.2447904963657574e-108, 1.1688387166643387e-111, 8.6723736530813987e-116, 3.4199371076928271e-120, 6.2327840907767578e-125, 4.2550288152769099e-130, 7.6984112043987381e-136, 1.9052328091840223e-142, 1.1865178694667318e-150}, + {1.6840859823147192e-151, 2.8068259915465668e-143, 1.1700024361722009e-136, 6.6504100620446623e-131, 9.9982626870689096e-126, 5.6227251746796540e-121, 1.4598025546816272e-116, 2.0126980605111617e-112, 1.6274180017356631e-108, 8.3068317564996415e-105, 2.8324541894274240e-101, 6.7467686874093196e-98, 1.1639059887066096e-94, 1.4980806006797597e-91, 1.4748024918249946e-88, 1.1340919960793407e-85, 6.9358925722736469e-83, 3.4266063564656041e-80, 1.3862443412766689e-77, 4.6475320184424221e-75, 1.3049987561313226e-72, 3.0981254141048187e-70, 6.2713089100746324e-68, 1.0906635714604883e-65, 1.6409070764036964e-63, 2.1490689833897993e-61, 2.4641241627782225e-59, 2.4864657741894790e-57, 2.2186336768274302e-55, 1.7582563693437111e-53, 1.2426111111041695e-51, 7.8609642161410873e-50, 4.4670071717223134e-48, 2.2875134062797375e-46, 1.0588298166277823e-44, 4.4424843586907703e-43, 1.6939665809360599e-41, 5.8848055182020130e-40, 1.8668536705967508e-38, 5.4197686272780061e-37, 1.4428750537835282e-35, 3.5292920298964982e-34, 7.9458957717354865e-33, 1.6494369134608046e-31, 3.1620314656015991e-30, 5.6065555004969651e-29, 9.2077125425461603e-28, 1.4025749559057771e-26, 1.9841800348065493e-25, 2.6100548891439468e-24, 3.1962255598539348e-23, 3.6477448742277230e-22, 3.8838880473136030e-21, 3.8618679709476893e-20, 3.5894545533623586e-19, 3.1214143458455917e-18, 2.5417847367765562e-17, 1.9397457159226965e-16, 1.3883744241015343e-15, 9.3270625462533699e-15, 5.8852482736096351e-14, 3.4902504651461380e-13, 1.9466883562172841e-12, 1.0217552929953939e-11, 5.0496076724281894e-11, 2.3510718451740779e-10, 1.0318035977700679e-09, 4.2703647703012249e-09, 1.6675261164947539e-08, 6.1462809064451552e-08, 2.1392739679622303e-07, 7.0340742267172300e-07, 2.1857314406758851e-06, 6.4208055079598766e-06, 1.7837324122194181e-05, 4.6876297088489141e-05, 1.1657026717177768e-04, 2.7438000977925001e-04, 6.1144669240944507e-04, 1.2903563124214057e-03, 2.5792975144676013e-03, 4.8845246714536682e-03, 8.7650270802736002e-03, 1.4906240076216418e-02, 2.4028883068628805e-02, 3.6720571883816493e-02, 5.3204522444878222e-02, 7.3096572483403829e-02, 9.5234476669428525e-02, 1.1767191582495477e-01, 1.3789832745338026e-01, 1.5327533388369177e-01, 1.6159415788497841e-01, 1.6159415788497841e-01, 1.5327533388369177e-01, 1.3789832745338026e-01, 1.1767191582495477e-01, 9.5234476669428525e-02, 7.3096572483403829e-02, 5.3204522444878222e-02, 3.6720571883816493e-02, 2.4028883068628805e-02, 1.4906240076216418e-02, 8.7650270802736002e-03, 4.8845246714536682e-03, 2.5792975144676013e-03, 1.2903563124214057e-03, 6.1144669240944507e-04, 2.7438000977925001e-04, 1.1657026717177768e-04, 4.6876297088489141e-05, 1.7837324122194181e-05, 6.4208055079598766e-06, 2.1857314406758851e-06, 7.0340742267172300e-07, 2.1392739679622303e-07, 6.1462809064451552e-08, 1.6675261164947539e-08, 4.2703647703012249e-09, 1.0318035977700679e-09, 2.3510718451740779e-10, 5.0496076724281894e-11, 1.0217552929953939e-11, 1.9466883562172841e-12, 3.4902504651461380e-13, 5.8852482736096351e-14, 9.3270625462533699e-15, 1.3883744241015343e-15, 1.9397457159226965e-16, 2.5417847367765562e-17, 3.1214143458455917e-18, 3.5894545533623586e-19, 3.8618679709476893e-20, 3.8838880473136030e-21, 3.6477448742277230e-22, 3.1962255598539348e-23, 2.6100548891439468e-24, 1.9841800348065493e-25, 1.4025749559057771e-26, 9.2077125425461603e-28, 5.6065555004969651e-29, 3.1620314656015991e-30, 1.6494369134608046e-31, 7.9458957717354865e-33, 3.5292920298964982e-34, 1.4428750537835282e-35, 5.4197686272780061e-37, 1.8668536705967508e-38, 5.8848055182020130e-40, 1.6939665809360599e-41, 4.4424843586907703e-43, 1.0588298166277823e-44, 2.2875134062797375e-46, 4.4670071717223134e-48, 7.8609642161410873e-50, 1.2426111111041695e-51, 1.7582563693437111e-53, 2.2186336768274302e-55, 2.4864657741894790e-57, 2.4641241627782225e-59, 2.1490689833897993e-61, 1.6409070764036964e-63, 1.0906635714604883e-65, 6.2713089100746324e-68, 3.0981254141048187e-70, 1.3049987561313226e-72, 4.6475320184424221e-75, 1.3862443412766689e-77, 3.4266063564656041e-80, 6.9358925722736469e-83, 1.1340919960793407e-85, 1.4748024918249946e-88, 1.4980806006797597e-91, 1.1639059887066096e-94, 6.7467686874093196e-98, 2.8324541894274240e-101, 8.3068317564996415e-105, 1.6274180017356631e-108, 2.0126980605111617e-112, 1.4598025546816272e-116, 5.6227251746796540e-121, 9.9982626870689096e-126, 6.6504100620446623e-131, 1.1700024361722009e-136, 2.8068259915465668e-143, 1.6840859823147192e-151}, + {2.3899020087783538e-152, 4.1337932844079439e-144, 1.7774077118017352e-137, 1.0388721680141190e-131, 1.6028413383110423e-126, 9.2375467812799180e-122, 2.4552181378752603e-117, 3.4625987620456931e-113, 2.8619431556076958e-109, 1.4924416386535932e-105, 5.1966730435893490e-102, 1.2635422959480214e-98, 2.2243270327788831e-95, 2.9206395863825604e-92, 2.9324498160004818e-89, 2.2993488113263748e-86, 1.4336286376453945e-83, 7.2194564657826763e-81, 2.9766194663244294e-78, 1.0169377873705381e-75, 2.9095299074235018e-73, 7.0373804171328241e-71, 1.4512217440971957e-68, 2.5709836724164109e-66, 3.9400295587538806e-64, 5.2559492672815257e-62, 6.1380579859214437e-60, 6.3081846096424608e-58, 5.7325826101059963e-56, 4.6268156176755226e-54, 3.3301662675561347e-52, 2.1455339764954737e-50, 1.2416693105547687e-48, 6.4756889941564796e-47, 3.0527217350206308e-45, 1.3044685923509235e-43, 5.0660482804550958e-42, 1.7925263383158762e-40, 5.7919770166825449e-39, 1.7127600543267554e-37, 4.6447431828997982e-36, 1.1573309534274821e-34, 2.6544306695450948e-33, 5.6136594541327730e-32, 1.0964361374105204e-30, 1.9808277135032592e-29, 3.3148509472564812e-28, 5.1455269820153546e-27, 7.4183584908683151e-26, 9.9456309833303069e-25, 1.2413968193571878e-23, 1.4441897483451718e-22, 1.5675836145616411e-21, 1.5891496108132932e-20, 1.5060515461273932e-19, 1.3355156911983980e-18, 1.1090885964042391e-17, 8.6327149787564008e-17, 6.3027647004538790e-16, 4.3195605416094461e-15, 2.7808623921464611e-14, 1.6828396856093499e-13, 9.5787095713446750e-13, 5.1314070230067691e-12, 2.5887038946092999e-11, 1.2305056649947358e-10, 5.5140053605655075e-10, 2.3304976685688244e-09, 9.2946430660480328e-09, 3.4995595911608765e-08, 1.2444418429985038e-07, 4.1810988437722446e-07, 1.3277780039510742e-06, 3.9868978854768734e-06, 1.1323113503918158e-05, 3.0426750494245559e-05, 7.7380888614623922e-05, 1.8630373980043952e-04, 4.2474911952990744e-04, 9.1721684584915764e-04, 1.8764550081725386e-03, 3.6376544190370392e-03, 6.6835183409171238e-03, 1.1640333062329629e-02, 1.9220794786116900e-02, 3.0094363212091947e-02, 4.4685050904584946e-02, 6.2929070893539232e-02, 8.4060967557359437e-02, 1.0651895867711307e-01, 1.2804948653534245e-01, 1.4603926334720602e-01, 1.5802227437713226e-01, 1.6223085863339984e-01, 1.5802227437713226e-01, 1.4603926334720602e-01, 1.2804948653534245e-01, 1.0651895867711307e-01, 8.4060967557359437e-02, 6.2929070893539232e-02, 4.4685050904584946e-02, 3.0094363212091947e-02, 1.9220794786116900e-02, 1.1640333062329629e-02, 6.6835183409171238e-03, 3.6376544190370392e-03, 1.8764550081725386e-03, 9.1721684584915764e-04, 4.2474911952990744e-04, 1.8630373980043952e-04, 7.7380888614623922e-05, 3.0426750494245559e-05, 1.1323113503918158e-05, 3.9868978854768734e-06, 1.3277780039510742e-06, 4.1810988437722446e-07, 1.2444418429985038e-07, 3.4995595911608765e-08, 9.2946430660480328e-09, 2.3304976685688244e-09, 5.5140053605655075e-10, 1.2305056649947358e-10, 2.5887038946092999e-11, 5.1314070230067691e-12, 9.5787095713446750e-13, 1.6828396856093499e-13, 2.7808623921464611e-14, 4.3195605416094461e-15, 6.3027647004538790e-16, 8.6327149787564008e-17, 1.1090885964042391e-17, 1.3355156911983980e-18, 1.5060515461273932e-19, 1.5891496108132932e-20, 1.5675836145616411e-21, 1.4441897483451718e-22, 1.2413968193571878e-23, 9.9456309833303069e-25, 7.4183584908683151e-26, 5.1455269820153546e-27, 3.3148509472564812e-28, 1.9808277135032592e-29, 1.0964361374105204e-30, 5.6136594541327730e-32, 2.6544306695450948e-33, 1.1573309534274821e-34, 4.6447431828997982e-36, 1.7127600543267554e-37, 5.7919770166825449e-39, 1.7925263383158762e-40, 5.0660482804550958e-42, 1.3044685923509235e-43, 3.0527217350206308e-45, 6.4756889941564796e-47, 1.2416693105547687e-48, 2.1455339764954737e-50, 3.3301662675561347e-52, 4.6268156176755226e-54, 5.7325826101059963e-56, 6.3081846096424608e-58, 6.1380579859214437e-60, 5.2559492672815257e-62, 3.9400295587538806e-64, 2.5709836724164109e-66, 1.4512217440971957e-68, 7.0373804171328241e-71, 2.9095299074235018e-73, 1.0169377873705381e-75, 2.9766194663244294e-78, 7.2194564657826763e-81, 1.4336286376453945e-83, 2.2993488113263748e-86, 2.9324498160004818e-89, 2.9206395863825604e-92, 2.2243270327788831e-95, 1.2635422959480214e-98, 5.1966730435893490e-102, 1.4924416386535932e-105, 2.8619431556076958e-109, 3.4625987620456931e-113, 2.4552181378752603e-117, 9.2375467812799180e-122, 1.6028413383110423e-126, 1.0388721680141190e-131, 1.7774077118017352e-137, 4.1337932844079439e-144, 2.3899020087783538e-152}, + {3.3909583926202969e-153, 6.0862398111746403e-145, 2.6990049104808299e-138, 1.6219807504695568e-132, 2.5679266269176032e-127, 1.5165281109964923e-122, 4.1260001698114066e-118, 5.9515348152406019e-114, 5.0279031031404388e-110, 2.6784561524395333e-106, 9.5230206639834012e-103, 2.3633738761330501e-99, 4.2451210579798656e-96, 5.6858261506014681e-93, 5.8218672370402156e-90, 4.6543449390713426e-87, 2.9582182277022238e-84, 1.5183294739007438e-81, 6.3795487359338829e-79, 2.2208089866950920e-76, 6.4735410582641528e-74, 1.5951103254298808e-71, 3.3507196700207861e-69, 6.0464069117443835e-67, 9.4376753651859668e-65, 1.2822191213791653e-62, 1.5249989655376484e-60, 1.5960849625455376e-58, 1.4770796663837213e-56, 1.2140312147914462e-54, 8.8982101270994950e-53, 5.8379315269148491e-51, 3.4404622652833074e-49, 1.8272036259234053e-47, 8.7716947482766774e-46, 3.8170926065847122e-44, 1.5096656333028563e-42, 5.4400265472252386e-41, 1.7901934646856910e-39, 5.3916646781027312e-38, 1.4892193296848201e-36, 3.7795868784349260e-35, 8.8301666493717958e-34, 1.9022911023392480e-32, 3.7850530748490676e-31, 6.9665700818714156e-30, 1.1878115612451122e-28, 1.8786842016880562e-27, 2.7599602414845152e-26, 3.7707755724079553e-25, 4.7967389479007740e-24, 5.6876402480164174e-23, 6.2928558569586138e-22, 6.5032337700285951e-21, 6.2833568961437137e-20, 5.6810615766852734e-19, 4.8108093342629945e-18, 3.8186952800271027e-17, 2.8435455634715359e-16, 1.9878218878928244e-15, 1.3054944744238950e-14, 8.0602086244990039e-14, 4.6813428453581271e-13, 2.5592533379055519e-12, 1.3177335517890257e-11, 6.3937241128218932e-11, 2.9249655637661996e-10, 1.2622501489006109e-09, 5.1408413936157202e-09, 1.9768915482746416e-08, 7.1808683672368473e-08, 2.4648607700088249e-07, 7.9982594734022375e-07, 2.4543893859357136e-06, 7.1249965705005385e-06, 1.9573063938461480e-05, 5.0897601014117974e-05, 1.2532087255275024e-04, 2.9224860429386435e-04, 6.4564239421835911e-04, 1.3515815953737001e-03, 2.6816113762841770e-03, 5.0435720589444479e-03, 8.9938955083231700e-03, 1.5208840297800851e-02, 2.4392011104876491e-02, 3.7107373608297196e-02, 5.3553162455639902e-02, 7.3327512502661077e-02, 9.5267294910895362e-02, 1.1744849271751398e-01, 1.3740517258624155e-01, 1.5255613196758108e-01, 1.6074586617490177e-01, 1.6074586617490177e-01, 1.5255613196758108e-01, 1.3740517258624155e-01, 1.1744849271751398e-01, 9.5267294910895362e-02, 7.3327512502661077e-02, 5.3553162455639902e-02, 3.7107373608297196e-02, 2.4392011104876491e-02, 1.5208840297800851e-02, 8.9938955083231700e-03, 5.0435720589444479e-03, 2.6816113762841770e-03, 1.3515815953737001e-03, 6.4564239421835911e-04, 2.9224860429386435e-04, 1.2532087255275024e-04, 5.0897601014117974e-05, 1.9573063938461480e-05, 7.1249965705005385e-06, 2.4543893859357136e-06, 7.9982594734022375e-07, 2.4648607700088249e-07, 7.1808683672368473e-08, 1.9768915482746416e-08, 5.1408413936157202e-09, 1.2622501489006109e-09, 2.9249655637661996e-10, 6.3937241128218932e-11, 1.3177335517890257e-11, 2.5592533379055519e-12, 4.6813428453581271e-13, 8.0602086244990039e-14, 1.3054944744238950e-14, 1.9878218878928244e-15, 2.8435455634715359e-16, 3.8186952800271027e-17, 4.8108093342629945e-18, 5.6810615766852734e-19, 6.2833568961437137e-20, 6.5032337700285951e-21, 6.2928558569586138e-22, 5.6876402480164174e-23, 4.7967389479007740e-24, 3.7707755724079553e-25, 2.7599602414845152e-26, 1.8786842016880562e-27, 1.1878115612451122e-28, 6.9665700818714156e-30, 3.7850530748490676e-31, 1.9022911023392480e-32, 8.8301666493717958e-34, 3.7795868784349260e-35, 1.4892193296848201e-36, 5.3916646781027312e-38, 1.7901934646856910e-39, 5.4400265472252386e-41, 1.5096656333028563e-42, 3.8170926065847122e-44, 8.7716947482766774e-46, 1.8272036259234053e-47, 3.4404622652833074e-49, 5.8379315269148491e-51, 8.8982101270994950e-53, 1.2140312147914462e-54, 1.4770796663837213e-56, 1.5960849625455376e-58, 1.5249989655376484e-60, 1.2822191213791653e-62, 9.4376753651859668e-65, 6.0464069117443835e-67, 3.3507196700207861e-69, 1.5951103254298808e-71, 6.4735410582641528e-74, 2.2208089866950920e-76, 6.3795487359338829e-79, 1.5183294739007438e-81, 2.9582182277022238e-84, 4.6543449390713426e-87, 5.8218672370402156e-90, 5.6858261506014681e-93, 4.2451210579798656e-96, 2.3633738761330501e-99, 9.5230206639834012e-103, 2.6784561524395333e-106, 5.0279031031404388e-110, 5.9515348152406019e-114, 4.1260001698114066e-118, 1.5165281109964923e-122, 2.5679266269176032e-127, 1.6219807504695568e-132, 2.6990049104808299e-138, 6.0862398111746403e-145, 3.3909583926202969e-153}, + {4.8105196926800447e-154, 8.9581349097014215e-146, 4.0967387783316221e-139, 2.5310532151069106e-133, 4.1115283174204790e-128, 2.4878902559132056e-123, 6.9281121543161367e-119, 1.0220279314543827e-114, 8.8243129868960541e-111, 4.8017711254332563e-107, 1.7430737935309201e-103, 4.4149829449283152e-100, 8.0909195748902241e-97, 1.1053200966532163e-93, 1.1540787556182570e-90, 9.4062445961729118e-88, 6.0938287850722912e-85, 3.1875459729105427e-82, 1.3647333597873649e-79, 4.8404013281274901e-77, 1.4373955207015468e-74, 3.6078416607613653e-72, 7.7193542145188385e-70, 1.4187152411878090e-67, 2.2552327816162141e-65, 3.1202888315880434e-63, 3.7791134625824903e-61, 4.0276289664497742e-59, 3.7954127913999138e-57, 3.1764227614962876e-55, 2.3706043434701509e-53, 1.5836556658561472e-51, 9.5030686706332782e-50, 5.1390369637842554e-48, 2.5120644517965854e-46, 1.1131134062886865e-44, 4.4828688752777018e-43, 1.6449624113929946e-41, 5.5124888578291881e-40, 1.6907462404862362e-38, 4.7559672339276421e-37, 1.2293279880804420e-35, 2.9252024556839558e-34, 6.4187377483908620e-33, 1.3009300303447977e-31, 2.4391335868389452e-30, 4.2366796233867587e-29, 6.8268728000449227e-28, 1.0218585868330291e-26, 1.4225584486944289e-25, 1.8440385833763509e-24, 2.2283071036811595e-23, 2.5127294052122314e-22, 2.6467919286881548e-21, 2.6068354747780742e-20, 2.4028313064323887e-19, 2.0745586965632990e-18, 1.6791123823339180e-17, 1.2750493352152407e-16, 9.0906021055959760e-16, 6.0895673953539301e-15, 3.8353345411384518e-14, 2.2726043918949056e-13, 1.2676970629687990e-12, 6.6608945127799692e-12, 3.2985045445873824e-11, 1.5402785642655144e-10, 6.7857514014962344e-10, 2.8217661921664192e-09, 1.1080667611835489e-08, 4.1107393732952725e-08, 1.4413206230487042e-07, 4.7781084115403341e-07, 1.4981838617872995e-06, 4.4446553339045300e-06, 1.2480054945286184e-05, 3.3176731522024002e-05, 8.3524630257267974e-05, 1.9919419791658306e-04, 4.5012132211052618e-04, 9.6399635747843960e-04, 1.9570814745659979e-03, 3.7671919910449735e-03, 6.8767389146135380e-03, 1.1906303498193138e-02, 1.9555454063496128e-02, 3.0473029715843637e-02, 4.5058194694576482e-02, 6.3225096957716942e-02, 8.4198334040706369e-02, 1.0642658240453728e-01, 1.2769014695749564e-01, 1.4542767716486577e-01, 1.5722973246415300e-01, 1.6137249430200609e-01, 1.5722973246415300e-01, 1.4542767716486577e-01, 1.2769014695749564e-01, 1.0642658240453728e-01, 8.4198334040706369e-02, 6.3225096957716942e-02, 4.5058194694576482e-02, 3.0473029715843637e-02, 1.9555454063496128e-02, 1.1906303498193138e-02, 6.8767389146135380e-03, 3.7671919910449735e-03, 1.9570814745659979e-03, 9.6399635747843960e-04, 4.5012132211052618e-04, 1.9919419791658306e-04, 8.3524630257267974e-05, 3.3176731522024002e-05, 1.2480054945286184e-05, 4.4446553339045300e-06, 1.4981838617872995e-06, 4.7781084115403341e-07, 1.4413206230487042e-07, 4.1107393732952725e-08, 1.1080667611835489e-08, 2.8217661921664192e-09, 6.7857514014962344e-10, 1.5402785642655144e-10, 3.2985045445873824e-11, 6.6608945127799692e-12, 1.2676970629687990e-12, 2.2726043918949056e-13, 3.8353345411384518e-14, 6.0895673953539301e-15, 9.0906021055959760e-16, 1.2750493352152407e-16, 1.6791123823339180e-17, 2.0745586965632990e-18, 2.4028313064323887e-19, 2.6068354747780742e-20, 2.6467919286881548e-21, 2.5127294052122314e-22, 2.2283071036811595e-23, 1.8440385833763509e-24, 1.4225584486944289e-25, 1.0218585868330291e-26, 6.8268728000449227e-28, 4.2366796233867587e-29, 2.4391335868389452e-30, 1.3009300303447977e-31, 6.4187377483908620e-33, 2.9252024556839558e-34, 1.2293279880804420e-35, 4.7559672339276421e-37, 1.6907462404862362e-38, 5.5124888578291881e-40, 1.6449624113929946e-41, 4.4828688752777018e-43, 1.1131134062886865e-44, 2.5120644517965854e-46, 5.1390369637842554e-48, 9.5030686706332782e-50, 1.5836556658561472e-51, 2.3706043434701509e-53, 3.1764227614962876e-55, 3.7954127913999138e-57, 4.0276289664497742e-59, 3.7791134625824903e-61, 3.1202888315880434e-63, 2.2552327816162141e-65, 1.4187152411878090e-67, 7.7193542145188385e-70, 3.6078416607613653e-72, 1.4373955207015468e-74, 4.8404013281274901e-77, 1.3647333597873649e-79, 3.1875459729105427e-82, 6.0938287850722912e-85, 9.4062445961729118e-88, 1.1540787556182570e-90, 1.1053200966532163e-93, 8.0909195748902241e-97, 4.4149829449283152e-100, 1.7430737935309201e-103, 4.8017711254332563e-107, 8.8243129868960541e-111, 1.0220279314543827e-114, 6.9281121543161367e-119, 2.4878902559132056e-123, 4.1115283174204790e-128, 2.5310532151069106e-133, 4.0967387783316221e-139, 8.9581349097014215e-146, 4.8105196926800447e-154}, + {6.8232197942250040e-155, 1.3181218114180107e-146, 6.2157350428501506e-140, 3.9475798303387850e-134, 6.5789265659620338e-129, 4.0785124308845132e-124, 1.1623859612267819e-119, 1.7535057075593614e-115, 1.5472021309193150e-111, 8.5990905704820439e-108, 3.1867892204083499e-104, 8.2372979129162400e-101, 1.5400245034982435e-97, 2.1456921017005163e-94, 2.2843171180826872e-91, 1.8979497846368726e-88, 1.2532102545185035e-85, 6.6801061673807505e-83, 2.9141025510545569e-80, 1.0529633740939257e-77, 3.1851858054210446e-75, 8.1431135103329952e-73, 1.7744839092477200e-70, 3.3212610596524388e-68, 5.3763688423964646e-66, 7.5746019148953009e-64, 9.3412200164700803e-62, 1.0136702052099045e-59, 9.7258603579728974e-58, 8.2874406983788114e-56, 6.2972157268746326e-54, 4.2830642136217272e-52, 2.6167460089856425e-50, 1.4407389710981317e-48, 7.1704184113766286e-47, 3.2349665837355912e-45, 1.3265135880257318e-43, 4.9561856762703143e-42, 1.6911721771605448e-40, 5.2817945954838791e-39, 1.5129382111557171e-37, 3.9824232345150253e-36, 9.6505627399970417e-35, 2.1566756583641779e-33, 4.4519543419059356e-32, 8.5019504368590908e-31, 1.5042535921639604e-29, 2.4692068347694563e-28, 3.7652687650201339e-27, 5.3404226168220292e-26, 7.0535560842160286e-25, 8.6851943617712193e-24, 9.9804891048351136e-23, 1.0714309730247980e-21, 1.0755595219737973e-20, 1.0105554063847399e-19, 8.8944609945993243e-19, 7.3396140181061249e-18, 5.6828205180213369e-17, 4.1316021764480691e-16, 2.8225916935311505e-15, 1.8132136826440378e-14, 1.0959822792780298e-13, 6.2370678258135157e-13, 3.3437645093788376e-12, 1.6897122210470246e-11, 8.0527347789629487e-11, 3.6211627179619072e-10, 1.5372205783801174e-09, 6.1632086713433375e-09, 2.3347964326766498e-08, 8.3606844554454458e-08, 2.8310992931625352e-07, 9.0687938435499206e-07, 2.7490144224436228e-06, 7.8882549269903193e-06, 2.1433661822155404e-05, 5.5163316323048505e-05, 1.3451233819198536e-04, 3.1084451589835123e-04, 6.8092260658474482e-04, 1.4142383035108275e-03, 2.7855275571971620e-03, 5.2039736325766575e-03, 9.2231946903286923e-03, 1.5510131997231313e-02, 2.4751412543080331e-02, 3.7487880444774385e-02, 5.3893671909463224e-02, 7.3550203638163128e-02, 9.5294126497036155e-02, 1.1722364672954393e-01, 1.3691645960749019e-01, 1.5184684526785858e-01, 1.5991079381814263e-01, 1.5991079381814263e-01, 1.5184684526785858e-01, 1.3691645960749019e-01, 1.1722364672954393e-01, 9.5294126497036155e-02, 7.3550203638163128e-02, 5.3893671909463224e-02, 3.7487880444774385e-02, 2.4751412543080331e-02, 1.5510131997231313e-02, 9.2231946903286923e-03, 5.2039736325766575e-03, 2.7855275571971620e-03, 1.4142383035108275e-03, 6.8092260658474482e-04, 3.1084451589835123e-04, 1.3451233819198536e-04, 5.5163316323048505e-05, 2.1433661822155404e-05, 7.8882549269903193e-06, 2.7490144224436228e-06, 9.0687938435499206e-07, 2.8310992931625352e-07, 8.3606844554454458e-08, 2.3347964326766498e-08, 6.1632086713433375e-09, 1.5372205783801174e-09, 3.6211627179619072e-10, 8.0527347789629487e-11, 1.6897122210470246e-11, 3.3437645093788376e-12, 6.2370678258135157e-13, 1.0959822792780298e-13, 1.8132136826440378e-14, 2.8225916935311505e-15, 4.1316021764480691e-16, 5.6828205180213369e-17, 7.3396140181061249e-18, 8.8944609945993243e-19, 1.0105554063847399e-19, 1.0755595219737973e-20, 1.0714309730247980e-21, 9.9804891048351136e-23, 8.6851943617712193e-24, 7.0535560842160286e-25, 5.3404226168220292e-26, 3.7652687650201339e-27, 2.4692068347694563e-28, 1.5042535921639604e-29, 8.5019504368590908e-31, 4.4519543419059356e-32, 2.1566756583641779e-33, 9.6505627399970417e-35, 3.9824232345150253e-36, 1.5129382111557171e-37, 5.2817945954838791e-39, 1.6911721771605448e-40, 4.9561856762703143e-42, 1.3265135880257318e-43, 3.2349665837355912e-45, 7.1704184113766286e-47, 1.4407389710981317e-48, 2.6167460089856425e-50, 4.2830642136217272e-52, 6.2972157268746326e-54, 8.2874406983788114e-56, 9.7258603579728974e-58, 1.0136702052099045e-59, 9.3412200164700803e-62, 7.5746019148953009e-64, 5.3763688423964646e-66, 3.3212610596524388e-68, 1.7744839092477200e-70, 8.1431135103329952e-73, 3.1851858054210446e-75, 1.0529633740939257e-77, 2.9141025510545569e-80, 6.6801061673807505e-83, 1.2532102545185035e-85, 1.8979497846368726e-88, 2.2843171180826872e-91, 2.1456921017005163e-94, 1.5400245034982435e-97, 8.2372979129162400e-101, 3.1867892204083499e-104, 8.5990905704820439e-108, 1.5472021309193150e-111, 1.7535057075593614e-115, 1.1623859612267819e-119, 4.0785124308845132e-124, 6.5789265659620338e-129, 3.9475798303387850e-134, 6.2157350428501506e-140, 1.3181218114180107e-146, 6.8232197942250040e-155}, + {9.6764302762857911e-156, 1.9389386338649253e-147, 9.4268794388387584e-141, 6.1537049528839255e-135, 1.0520595245806230e-129, 6.6813621694344271e-125, 1.9486722540934962e-120, 3.0058398378782854e-116, 2.7101250640991562e-112, 1.5383037549430030e-108, 5.8195869131646608e-105, 1.5349872474118010e-101, 2.9274184968148517e-98, 4.1594648723850271e-95, 4.5147260427833628e-92, 3.8235833994762416e-89, 2.5729913678738507e-86, 1.3975076314494611e-83, 6.2111050589991735e-81, 2.2862048443444402e-78, 7.0441083482496365e-76, 1.8341247494924424e-73, 4.0702482203595230e-71, 7.7576584194845979e-69, 1.2786993882275677e-66, 1.8342905501305085e-64, 2.3031479777875351e-62, 2.5445408528816102e-60, 2.4855538228455434e-58, 2.1561995344944781e-56, 1.6679549705866419e-54, 1.1549259780000125e-52, 7.1833013403181687e-51, 4.0263574453603144e-49, 2.0400456628439797e-47, 9.3700002807816908e-46, 3.9116867251382951e-44, 1.4879647423347193e-42, 5.1693708373553046e-41, 1.6438042759677152e-39, 4.7942896465195374e-38, 1.2849980190682212e-36, 3.1708729314553754e-35, 7.2161081218241727e-34, 1.5169921976469880e-32, 2.9504597948207525e-31, 5.3168727228967571e-30, 8.8896336943576362e-29, 1.3808362593425483e-27, 1.9951324606202683e-26, 2.6846378493690218e-25, 3.3679964757574780e-24, 3.9436002250852672e-23, 4.3140992488603084e-22, 4.4134945997917874e-21, 4.2263869813792336e-20, 3.7916648002202093e-19, 3.1895272336827489e-18, 2.5176935915196902e-17, 1.8663270859468551e-16, 1.3001503435609439e-15, 8.5176175587843442e-15, 5.2510379021912937e-14, 3.0482099219006810e-13, 1.6671499873186635e-12, 8.5956677592000092e-12, 4.1801607084826671e-11, 1.9183890700529380e-10, 8.3123118452689150e-10, 3.4021150696080136e-09, 1.3158571431631216e-08, 4.8115154735048105e-08, 1.6639485157072335e-07, 5.4443385920665998e-07, 1.6859754012772494e-06, 4.9431447402619969e-06, 1.3725819438547434e-05, 3.6106464014912190e-05, 9.0004542316217840e-05, 2.1266213156628891e-04, 4.7639613833099480e-04, 1.0120395216254993e-03, 2.0392461173393923e-03, 3.8982490246164416e-03, 7.0709100790306376e-03, 1.2171895486539193e-02, 1.9887632287530231e-02, 3.0846667595353164e-02, 4.5424033501544080e-02, 6.3512773332392206e-02, 8.4328396273560896e-02, 1.0633035510412364e-01, 1.2733227690715612e-01, 1.4482336870308740e-01, 1.5644897465655389e-01, 1.6052761213288572e-01, 1.5644897465655389e-01, 1.4482336870308740e-01, 1.2733227690715612e-01, 1.0633035510412364e-01, 8.4328396273560896e-02, 6.3512773332392206e-02, 4.5424033501544080e-02, 3.0846667595353164e-02, 1.9887632287530231e-02, 1.2171895486539193e-02, 7.0709100790306376e-03, 3.8982490246164416e-03, 2.0392461173393923e-03, 1.0120395216254993e-03, 4.7639613833099480e-04, 2.1266213156628891e-04, 9.0004542316217840e-05, 3.6106464014912190e-05, 1.3725819438547434e-05, 4.9431447402619969e-06, 1.6859754012772494e-06, 5.4443385920665998e-07, 1.6639485157072335e-07, 4.8115154735048105e-08, 1.3158571431631216e-08, 3.4021150696080136e-09, 8.3123118452689150e-10, 1.9183890700529380e-10, 4.1801607084826671e-11, 8.5956677592000092e-12, 1.6671499873186635e-12, 3.0482099219006810e-13, 5.2510379021912937e-14, 8.5176175587843442e-15, 1.3001503435609439e-15, 1.8663270859468551e-16, 2.5176935915196902e-17, 3.1895272336827489e-18, 3.7916648002202093e-19, 4.2263869813792336e-20, 4.4134945997917874e-21, 4.3140992488603084e-22, 3.9436002250852672e-23, 3.3679964757574780e-24, 2.6846378493690218e-25, 1.9951324606202683e-26, 1.3808362593425483e-27, 8.8896336943576362e-29, 5.3168727228967571e-30, 2.9504597948207525e-31, 1.5169921976469880e-32, 7.2161081218241727e-34, 3.1708729314553754e-35, 1.2849980190682212e-36, 4.7942896465195374e-38, 1.6438042759677152e-39, 5.1693708373553046e-41, 1.4879647423347193e-42, 3.9116867251382951e-44, 9.3700002807816908e-46, 2.0400456628439797e-47, 4.0263574453603144e-49, 7.1833013403181687e-51, 1.1549259780000125e-52, 1.6679549705866419e-54, 2.1561995344944781e-56, 2.4855538228455434e-58, 2.5445408528816102e-60, 2.3031479777875351e-62, 1.8342905501305085e-64, 1.2786993882275677e-66, 7.7576584194845979e-69, 4.0702482203595230e-71, 1.8341247494924424e-73, 7.0441083482496365e-76, 2.2862048443444402e-78, 6.2111050589991735e-81, 1.3975076314494611e-83, 2.5729913678738507e-86, 3.8235833994762416e-89, 4.5147260427833628e-92, 4.1594648723850271e-95, 2.9274184968148517e-98, 1.5349872474118010e-101, 5.8195869131646608e-105, 1.5383037549430030e-108, 2.7101250640991562e-112, 3.0058398378782854e-116, 1.9486722540934962e-120, 6.6813621694344271e-125, 1.0520595245806230e-129, 6.1537049528839255e-135, 9.4268794388387584e-141, 1.9389386338649253e-147, 9.6764302762857911e-156}, + {1.3720504154302014e-156, 2.8513090679363871e-148, 1.4291119261031478e-141, 9.5878345367776407e-136, 1.6813631090843168e-130, 1.0937641473652881e-125, 3.2642500496954800e-121, 5.1480442370317754e-117, 4.7425471562207601e-113, 2.7489983469746781e-109, 1.0615412978825736e-105, 2.8568935742124688e-102, 5.5574407262791218e-99, 8.0520048626328837e-96, 8.9097729474744936e-93, 7.6909567014190078e-90, 5.2740027603411657e-87, 2.9186112720938588e-84, 1.3214406564729561e-81, 4.9544462772840985e-79, 1.5547429149839197e-76, 4.1226034386806711e-74, 9.3161574688297957e-72, 1.8079563460381452e-69, 3.0341692292606370e-67, 4.4312923121519367e-65, 5.6644240327335667e-63, 6.3708774357395162e-61, 6.3351435391240280e-59, 5.5944362188420675e-57, 4.4053385520999193e-55, 3.1050807666511750e-53, 1.9659178072617245e-51, 1.1217013968671671e-49, 5.7853802707668581e-48, 2.7049816006551789e-46, 1.1495533915103732e-44, 4.4515141949278675e-43, 1.5743976688633690e-41, 5.0968507385634822e-40, 1.5134439954515047e-38, 4.1300163134837491e-37, 1.0376574212671890e-35, 2.4044973436406753e-34, 5.1472183288770750e-33, 1.0194603568070278e-31, 1.8709115794220601e-30, 3.1858380070837839e-29, 5.0402581729336372e-28, 7.4179265516977060e-27, 1.0167813330369454e-25, 1.2995016885518635e-24, 1.5502266336408172e-23, 1.7279241662380531e-22, 1.8012995272732974e-21, 1.7578400079602021e-20, 1.6072599968378759e-19, 1.3780640314290736e-18, 1.1088556613183840e-17, 8.3797787500377154e-17, 5.9519054533347666e-16, 3.9759844018964598e-15, 2.4996755476537754e-14, 1.4799417664726766e-13, 8.2563190162426826e-13, 4.3426586703108686e-12, 2.1547012918303077e-11, 1.0090299262094598e-10, 4.4618936758916388e-10, 1.8639520759198730e-09, 7.3594054861502447e-09, 2.7474235203608662e-08, 9.7018924643039030e-08, 3.2418977135736030e-07, 1.0254381425692049e-06, 3.0713815320154091e-06, 8.7138650801478641e-06, 2.3424552293210665e-05, 5.9681270208738333e-05, 1.4415391428196787e-04, 3.3017517124886084e-04, 7.1728665634025713e-04, 1.4783089544886255e-03, 2.8910036930282720e-03, 5.3656605093971325e-03, 9.4528407852602792e-03, 1.5810044685497838e-02, 2.5107070566959051e-02, 3.7862166097742164e-02, 5.4226226141723462e-02, 7.3764890309958767e-02, 9.5315208688082131e-02, 1.1699751277485990e-01, 1.3643214928471140e-01, 1.5114725119507466e-01, 1.5908860103657232e-01, 1.5908860103657232e-01, 1.5114725119507466e-01, 1.3643214928471140e-01, 1.1699751277485990e-01, 9.5315208688082131e-02, 7.3764890309958767e-02, 5.4226226141723462e-02, 3.7862166097742164e-02, 2.5107070566959051e-02, 1.5810044685497838e-02, 9.4528407852602792e-03, 5.3656605093971325e-03, 2.8910036930282720e-03, 1.4783089544886255e-03, 7.1728665634025713e-04, 3.3017517124886084e-04, 1.4415391428196787e-04, 5.9681270208738333e-05, 2.3424552293210665e-05, 8.7138650801478641e-06, 3.0713815320154091e-06, 1.0254381425692049e-06, 3.2418977135736030e-07, 9.7018924643039030e-08, 2.7474235203608662e-08, 7.3594054861502447e-09, 1.8639520759198730e-09, 4.4618936758916388e-10, 1.0090299262094598e-10, 2.1547012918303077e-11, 4.3426586703108686e-12, 8.2563190162426826e-13, 1.4799417664726766e-13, 2.4996755476537754e-14, 3.9759844018964598e-15, 5.9519054533347666e-16, 8.3797787500377154e-17, 1.1088556613183840e-17, 1.3780640314290736e-18, 1.6072599968378759e-19, 1.7578400079602021e-20, 1.8012995272732974e-21, 1.7279241662380531e-22, 1.5502266336408172e-23, 1.2995016885518635e-24, 1.0167813330369454e-25, 7.4179265516977060e-27, 5.0402581729336372e-28, 3.1858380070837839e-29, 1.8709115794220601e-30, 1.0194603568070278e-31, 5.1472183288770750e-33, 2.4044973436406753e-34, 1.0376574212671890e-35, 4.1300163134837491e-37, 1.5134439954515047e-38, 5.0968507385634822e-40, 1.5743976688633690e-41, 4.4515141949278675e-43, 1.1495533915103732e-44, 2.7049816006551789e-46, 5.7853802707668581e-48, 1.1217013968671671e-49, 1.9659178072617245e-51, 3.1050807666511750e-53, 4.4053385520999193e-55, 5.5944362188420675e-57, 6.3351435391240280e-59, 6.3708774357395162e-61, 5.6644240327335667e-63, 4.4312923121519367e-65, 3.0341692292606370e-67, 1.8079563460381452e-69, 9.3161574688297957e-72, 4.1226034386806711e-74, 1.5547429149839197e-76, 4.9544462772840985e-79, 1.3214406564729561e-81, 2.9186112720938588e-84, 5.2740027603411657e-87, 7.6909567014190078e-90, 8.9097729474744936e-93, 8.0520048626328837e-96, 5.5574407262791218e-99, 2.8568935742124688e-102, 1.0615412978825736e-105, 2.7489983469746781e-109, 4.7425471562207601e-113, 5.1480442370317754e-117, 3.2642500496954800e-121, 1.0937641473652881e-125, 1.6813631090843168e-130, 9.5878345367776407e-136, 1.4291119261031478e-141, 2.8513090679363871e-148, 1.3720504154302014e-156}, + {1.9451569748580915e-157, 4.1917692496623074e-149, 2.1656533203264049e-142, 1.4930848367233329e-136, 2.6854744034643652e-131, 1.7892892532014627e-126, 5.4637061901241444e-122, 8.8092712504606827e-118, 8.2912095929290215e-114, 4.9074274510768621e-110, 1.9341588617846345e-106, 5.3107713773093621e-103, 1.0536661811562218e-99, 1.5565848726847315e-96, 1.7557722156935769e-93, 1.5446161289516271e-90, 1.0792867414624379e-87, 6.0849467639302215e-85, 2.8063950432169120e-82, 1.0716703469936493e-79, 3.4248444263060510e-77, 9.2475643800642814e-75, 2.1277914455786410e-72, 4.2042110347158325e-70, 7.1831202945319969e-68, 1.0679652291289812e-65, 1.3896846788334397e-63, 1.5910268843395548e-61, 1.6104220743828299e-59, 1.4475542666378789e-57, 1.1602357060613541e-55, 8.3238465917891379e-54, 5.3641275790400676e-52, 3.1152601083577022e-50, 1.6354424962643853e-48, 7.7832174172965086e-47, 3.3668417110773493e-45, 1.3271170300689291e-43, 4.7778763656316077e-42, 1.5745422805731105e-40, 4.7595446906951574e-39, 1.3222509215279862e-37, 3.3821840940140512e-36, 7.9793659216047449e-35, 1.7391569791608673e-33, 3.5073691468931601e-32, 6.5544111912453122e-31, 1.1365779583643404e-29, 1.8312631014871582e-28, 2.7449324565145490e-27, 3.8322872065908508e-26, 4.9890778549827656e-25, 6.0629582449560457e-24, 6.8848488569478251e-23, 7.3125997647970303e-22, 7.2714081831677253e-21, 6.7751229891684194e-20, 5.9201437592343701e-19, 4.8552433914811921e-18, 3.7401100817855975e-17, 2.7081193971598301e-16, 1.8444288831090382e-15, 1.1823696121971421e-14, 7.1386313730701460e-14, 4.0616946929687980e-13, 2.1791052221508088e-12, 1.1029706356088577e-11, 5.2697432068365651e-11, 2.3777643019834690e-10, 1.0136912013567245e-09, 4.0850156582808148e-09, 1.5567430501187781e-08, 5.6124175036636782e-08, 1.9149546201420412e-07, 6.1858506291091545e-07, 1.8924364481789124e-06, 5.4848459956508166e-06, 1.5064725177098550e-05, 3.9222592074287836e-05, 9.6829305369002766e-05, 2.2671622856088540e-04, 5.0357752441741071e-04, 1.0613379467370166e-03, 2.1229194690909154e-03, 4.0307696130016573e-03, 7.2659539954360898e-03, 1.2437029172215985e-02, 2.0217282655364367e-02, 3.1215303196211058e-02, 4.5782692483561545e-02, 6.3792314106331613e-02, 8.4451401972617260e-02, 1.0623046970193420e-01, 1.2697592765070259e-01, 1.4422620485622731e-01, 1.5567971237666092e-01, 1.5969586284722317e-01, 1.5567971237666092e-01, 1.4422620485622731e-01, 1.2697592765070259e-01, 1.0623046970193420e-01, 8.4451401972617260e-02, 6.3792314106331613e-02, 4.5782692483561545e-02, 3.1215303196211058e-02, 2.0217282655364367e-02, 1.2437029172215985e-02, 7.2659539954360898e-03, 4.0307696130016573e-03, 2.1229194690909154e-03, 1.0613379467370166e-03, 5.0357752441741071e-04, 2.2671622856088540e-04, 9.6829305369002766e-05, 3.9222592074287836e-05, 1.5064725177098550e-05, 5.4848459956508166e-06, 1.8924364481789124e-06, 6.1858506291091545e-07, 1.9149546201420412e-07, 5.6124175036636782e-08, 1.5567430501187781e-08, 4.0850156582808148e-09, 1.0136912013567245e-09, 2.3777643019834690e-10, 5.2697432068365651e-11, 1.1029706356088577e-11, 2.1791052221508088e-12, 4.0616946929687980e-13, 7.1386313730701460e-14, 1.1823696121971421e-14, 1.8444288831090382e-15, 2.7081193971598301e-16, 3.7401100817855975e-17, 4.8552433914811921e-18, 5.9201437592343701e-19, 6.7751229891684194e-20, 7.2714081831677253e-21, 7.3125997647970303e-22, 6.8848488569478251e-23, 6.0629582449560457e-24, 4.9890778549827656e-25, 3.8322872065908508e-26, 2.7449324565145490e-27, 1.8312631014871582e-28, 1.1365779583643404e-29, 6.5544111912453122e-31, 3.5073691468931601e-32, 1.7391569791608673e-33, 7.9793659216047449e-35, 3.3821840940140512e-36, 1.3222509215279862e-37, 4.7595446906951574e-39, 1.5745422805731105e-40, 4.7778763656316077e-42, 1.3271170300689291e-43, 3.3668417110773493e-45, 7.7832174172965086e-47, 1.6354424962643853e-48, 3.1152601083577022e-50, 5.3641275790400676e-52, 8.3238465917891379e-54, 1.1602357060613541e-55, 1.4475542666378789e-57, 1.6104220743828299e-59, 1.5910268843395548e-61, 1.3896846788334397e-63, 1.0679652291289812e-65, 7.1831202945319969e-68, 4.2042110347158325e-70, 2.1277914455786410e-72, 9.2475643800642814e-75, 3.4248444263060510e-77, 1.0716703469936493e-79, 2.8063950432169120e-82, 6.0849467639302215e-85, 1.0792867414624379e-87, 1.5446161289516271e-90, 1.7557722156935769e-93, 1.5565848726847315e-96, 1.0536661811562218e-99, 5.3107713773093621e-103, 1.9341588617846345e-106, 4.9074274510768621e-110, 8.2912095929290215e-114, 8.8092712504606827e-118, 5.4637061901241444e-122, 1.7892892532014627e-126, 2.6854744034643652e-131, 1.4930848367233329e-136, 2.1656533203264049e-142, 4.1917692496623074e-149, 1.9451569748580915e-157}, + {2.7572081394930497e-158, 6.1606195891114038e-150, 3.2804813831912987e-143, 2.3239698294632731e-137, 4.2866809471010448e-132, 2.9250839726119467e-127, 9.1380522604480418e-123, 1.5061298645822260e-118, 1.4481443283610901e-114, 8.7515402609102180e-111, 3.5201601974984495e-107, 9.8605307314410817e-104, 1.9951459370163873e-100, 3.0050341197440912e-97, 3.4549517863933081e-94, 3.0974015995404665e-91, 2.2051316618251230e-88, 1.2664932443877347e-85, 5.9494906351077990e-83, 2.3137758798623088e-80, 7.5297591329388192e-78, 2.0701649292513591e-75, 4.8496085133984147e-73, 9.7550576247941725e-71, 1.6966738887834285e-68, 2.5677846130096429e-66, 3.4010587398231950e-64, 3.9632860731065032e-62, 4.0830488118084828e-60, 3.7353956966630745e-58, 3.0471769779359536e-56, 2.2249520894879323e-54, 1.4592805785267631e-52, 8.6253646389571326e-51, 4.6085517173721152e-49, 2.2322309293391554e-47, 9.8278883256522070e-46, 3.9428752392560041e-44, 1.4448263309190163e-42, 4.8464578191552559e-41, 1.4912109536234187e-39, 4.2170331394849629e-38, 1.0980633083650227e-36, 2.6372704388350129e-35, 5.8519678162563370e-34, 1.2015551933775674e-32, 2.2862239823374585e-31, 4.0367549228098398e-30, 6.6230590346823284e-29, 1.0109795409268771e-27, 1.4374775988383664e-26, 1.9060160602736751e-25, 2.3593174689076554e-24, 2.7291375088225280e-23, 2.9530245244292184e-22, 2.9916750627894004e-21, 2.8402168199630962e-20, 2.5289725099536307e-19, 2.1136880764547752e-18, 1.6594882054957410e-17, 1.2247874128456919e-16, 8.5035945941928308e-16, 5.5576041209463139e-15, 3.4212940713768145e-14, 1.9850553217879612e-13, 1.0861345804029464e-12, 5.6073947890454799e-12, 2.7329486591769151e-11, 1.2580856861713816e-10, 5.4727113377318889e-10, 2.2506273484860213e-09, 8.7538457447490308e-09, 3.2215483374955876e-08, 1.1221944492414074e-07, 3.7014154510322367e-07, 1.1564156154340975e-06, 3.4233275145786425e-06, 9.6051781722267405e-06, 2.5551197899130104e-05, 6.4459196331247565e-05, 1.5425452576992943e-04, 3.5024735925131938e-04, 7.5473297532327498e-04, 1.5437752230132358e-03, 2.9979972233121189e-03, 5.5285649600795095e-03, 9.6827528386239439e-03, 1.6108512025371353e-02, 2.5458972192611309e-02, 3.8230305479849361e-02, 5.4550997194724032e-02, 7.3971809106191291e-02, 9.5330768912038982e-02, 1.1677021831583778e-01, 1.3595220139257186e-01, 1.5045713397116045e-01, 1.5827896015320714e-01, 1.5827896015320714e-01, 1.5045713397116045e-01, 1.3595220139257186e-01, 1.1677021831583778e-01, 9.5330768912038982e-02, 7.3971809106191291e-02, 5.4550997194724032e-02, 3.8230305479849361e-02, 2.5458972192611309e-02, 1.6108512025371353e-02, 9.6827528386239439e-03, 5.5285649600795095e-03, 2.9979972233121189e-03, 1.5437752230132358e-03, 7.5473297532327498e-04, 3.5024735925131938e-04, 1.5425452576992943e-04, 6.4459196331247565e-05, 2.5551197899130104e-05, 9.6051781722267405e-06, 3.4233275145786425e-06, 1.1564156154340975e-06, 3.7014154510322367e-07, 1.1221944492414074e-07, 3.2215483374955876e-08, 8.7538457447490308e-09, 2.2506273484860213e-09, 5.4727113377318889e-10, 1.2580856861713816e-10, 2.7329486591769151e-11, 5.6073947890454799e-12, 1.0861345804029464e-12, 1.9850553217879612e-13, 3.4212940713768145e-14, 5.5576041209463139e-15, 8.5035945941928308e-16, 1.2247874128456919e-16, 1.6594882054957410e-17, 2.1136880764547752e-18, 2.5289725099536307e-19, 2.8402168199630962e-20, 2.9916750627894004e-21, 2.9530245244292184e-22, 2.7291375088225280e-23, 2.3593174689076554e-24, 1.9060160602736751e-25, 1.4374775988383664e-26, 1.0109795409268771e-27, 6.6230590346823284e-29, 4.0367549228098398e-30, 2.2862239823374585e-31, 1.2015551933775674e-32, 5.8519678162563370e-34, 2.6372704388350129e-35, 1.0980633083650227e-36, 4.2170331394849629e-38, 1.4912109536234187e-39, 4.8464578191552559e-41, 1.4448263309190163e-42, 3.9428752392560041e-44, 9.8278883256522070e-46, 2.2322309293391554e-47, 4.6085517173721152e-49, 8.6253646389571326e-51, 1.4592805785267631e-52, 2.2249520894879323e-54, 3.0471769779359536e-56, 3.7353956966630745e-58, 4.0830488118084828e-60, 3.9632860731065032e-62, 3.4010587398231950e-64, 2.5677846130096429e-66, 1.6966738887834285e-68, 9.7550576247941725e-71, 4.8496085133984147e-73, 2.0701649292513591e-75, 7.5297591329388192e-78, 2.3137758798623088e-80, 5.9494906351077990e-83, 1.2664932443877347e-85, 2.2051316618251230e-88, 3.0974015995404665e-91, 3.4549517863933081e-94, 3.0050341197440912e-97, 1.9951459370163873e-100, 9.8605307314410817e-104, 3.5201601974984495e-107, 8.7515402609102180e-111, 1.4481443283610901e-114, 1.5061298645822260e-118, 9.1380522604480418e-123, 2.9250839726119467e-127, 4.2866809471010448e-132, 2.3239698294632731e-137, 3.2804813831912987e-143, 6.1606195891114038e-150, 2.7572081394930497e-158}, + {3.9076471690941080e-159, 9.0516231985655450e-151, 4.9672234319145855e-144, 3.6154341661312203e-138, 6.8385535268994495e-133, 4.7785893786651104e-128, 1.5271628772769884e-123, 2.5728411822428113e-119, 2.5269536248805626e-115, 1.5590872025715406e-111, 6.3995909265162004e-108, 1.8286346030132772e-104, 3.7730702714400253e-101, 5.7934780549600130e-98, 6.7888152108219864e-95, 6.2017960981784824e-92, 4.4982131286438984e-89, 2.6316100628013750e-86, 1.2590645102017268e-83, 4.9863539974326119e-81, 1.6522944213413452e-78, 4.6250118175679923e-76, 1.1030078970560833e-73, 2.2585685158900422e-71, 3.9985750751823772e-69, 6.1594934342804038e-67, 8.3034758916801791e-65, 9.8479077342924136e-63, 1.0325295240318946e-60, 9.6133222464273388e-59, 7.9807919503856858e-57, 5.9302840185272367e-55, 3.9582000609173701e-53, 2.3808961922219179e-51, 1.2945943475503535e-49, 6.3814434194007278e-48, 2.8592815645992017e-46, 1.1674410264648282e-44, 4.3538446893851820e-43, 1.4863772701012763e-41, 4.6548397931087811e-40, 1.3398281145584983e-38, 3.5510998957536425e-37, 8.6816489289737604e-36, 1.9610184560405483e-34, 4.0989946034555524e-33, 7.9401630700838486e-32, 1.4273966207691636e-30, 2.3845069143279796e-29, 3.7062734865446578e-28, 5.3663645149930618e-27, 7.2463592171380759e-26, 9.1353566359276092e-25, 1.0763247315298265e-23, 1.1863095918086436e-22, 1.2243177639523886e-21, 1.1841779286732196e-20, 1.0743199749303155e-19, 9.1494332489911347e-19, 7.3203566611052359e-18, 5.5063768554293231e-17, 3.8967149070515473e-16, 2.5960894208342548e-15, 1.6293142388397836e-14, 9.6386743368990212e-14, 5.3778391218729220e-13, 2.8314924455552075e-12, 1.4075632287720228e-11, 6.6097087557103094e-11, 2.9333592912411779e-10, 1.2308733220895705e-09, 4.8855519350876319e-09, 1.8350327790289198e-08, 6.5248828381395632e-08, 2.1971536691628579e-07, 7.0090428369282574e-07, 2.1189035330905069e-06, 6.0723051489799535e-06, 1.6501142336395539e-05, 4.2531734642799478e-05, 1.0400739885143485e-04, 2.4136469790269507e-04, 5.3166865805501619e-04, 1.1118823380073424e-03, 2.2080714903093502e-03, 4.1646982803793314e-03, 7.4617948415810062e-03, 1.2701628295481309e-02, 2.0544362511894173e-02, 3.1578965559564402e-02, 4.6134295845117745e-02, 6.4063927714297891e-02, 8.4567589743710897e-02, 1.0612711013185427e-01, 1.2662114600203433e-01, 1.4363605547026137e-01, 1.5492166682336009e-01, 1.5887690970441684e-01, 1.5492166682336009e-01, 1.4363605547026137e-01, 1.2662114600203433e-01, 1.0612711013185427e-01, 8.4567589743710897e-02, 6.4063927714297891e-02, 4.6134295845117745e-02, 3.1578965559564402e-02, 2.0544362511894173e-02, 1.2701628295481309e-02, 7.4617948415810062e-03, 4.1646982803793314e-03, 2.2080714903093502e-03, 1.1118823380073424e-03, 5.3166865805501619e-04, 2.4136469790269507e-04, 1.0400739885143485e-04, 4.2531734642799478e-05, 1.6501142336395539e-05, 6.0723051489799535e-06, 2.1189035330905069e-06, 7.0090428369282574e-07, 2.1971536691628579e-07, 6.5248828381395632e-08, 1.8350327790289198e-08, 4.8855519350876319e-09, 1.2308733220895705e-09, 2.9333592912411779e-10, 6.6097087557103094e-11, 1.4075632287720228e-11, 2.8314924455552075e-12, 5.3778391218729220e-13, 9.6386743368990212e-14, 1.6293142388397836e-14, 2.5960894208342548e-15, 3.8967149070515473e-16, 5.5063768554293231e-17, 7.3203566611052359e-18, 9.1494332489911347e-19, 1.0743199749303155e-19, 1.1841779286732196e-20, 1.2243177639523886e-21, 1.1863095918086436e-22, 1.0763247315298265e-23, 9.1353566359276092e-25, 7.2463592171380759e-26, 5.3663645149930618e-27, 3.7062734865446578e-28, 2.3845069143279796e-29, 1.4273966207691636e-30, 7.9401630700838486e-32, 4.0989946034555524e-33, 1.9610184560405483e-34, 8.6816489289737604e-36, 3.5510998957536425e-37, 1.3398281145584983e-38, 4.6548397931087811e-40, 1.4863772701012763e-41, 4.3538446893851820e-43, 1.1674410264648282e-44, 2.8592815645992017e-46, 6.3814434194007278e-48, 1.2945943475503535e-49, 2.3808961922219179e-51, 3.9582000609173701e-53, 5.9302840185272367e-55, 7.9807919503856858e-57, 9.6133222464273388e-59, 1.0325295240318946e-60, 9.8479077342924136e-63, 8.3034758916801791e-65, 6.1594934342804038e-67, 3.9985750751823772e-69, 2.2585685158900422e-71, 1.1030078970560833e-73, 4.6250118175679923e-76, 1.6522944213413452e-78, 4.9863539974326119e-81, 1.2590645102017268e-83, 2.6316100628013750e-86, 4.4982131286438984e-89, 6.2017960981784824e-92, 6.7888152108219864e-95, 5.7934780549600130e-98, 3.7730702714400253e-101, 1.8286346030132772e-104, 6.3995909265162004e-108, 1.5590872025715406e-111, 2.5269536248805626e-115, 2.5728411822428113e-119, 1.5271628772769884e-123, 4.7785893786651104e-128, 6.8385535268994495e-133, 3.6154341661312203e-138, 4.9672234319145855e-144, 9.0516231985655450e-151, 3.9076471690941080e-159}, + {5.5372309477634655e-160, 1.3295499920372636e-151, 7.5182851789608105e-145, 5.6218117631932678e-139, 1.0903164404085858e-133, 7.8013060714659609e-129, 2.5502660858886434e-124, 4.3913191972243959e-120, 4.4053240904063359e-116, 2.7746976743925027e-112, 1.1621599260631149e-108, 3.3872112245822123e-105, 7.1263773535131733e-102, 1.1154449265558409e-98, 1.3320789562852151e-95, 1.2398999039839109e-92, 9.1613585633591115e-90, 5.4590825264344227e-87, 2.6598715295190543e-84, 1.0726407876436278e-81, 3.6188278671651930e-79, 1.0312393292677344e-76, 2.5035350436180196e-74, 5.2179989944251382e-72, 9.4025005576362417e-70, 1.4740979038429450e-67, 2.0223861173713801e-65, 2.4409225876304963e-63, 2.6043821060405426e-61, 2.4675003119821257e-59, 2.0845067543701599e-57, 1.5761631000101791e-55, 1.0705056723174281e-53, 6.5523461103489109e-52, 3.6254089691585309e-50, 1.8184989576075493e-48, 8.2913990414809887e-47, 3.4450097830357403e-45, 1.3074430076121838e-43, 4.5423930481987850e-42, 1.4477028628289723e-40, 4.2408968311262046e-39, 1.1439892419798912e-37, 2.8466218480726145e-36, 6.5448147609160573e-35, 1.3925226088255375e-33, 2.7459089931491756e-32, 5.0252446557042918e-31, 8.5465806757782117e-30, 1.3525065257212417e-28, 1.9939689031724527e-27, 2.7417253568038711e-26, 3.5198681528057952e-25, 4.2235075145104786e-24, 4.7412259196608422e-23, 4.9840723225381343e-22, 4.9106724438195655e-21, 4.5386744987288709e-20, 3.9382188462775671e-19, 3.2106160083645321e-18, 2.4610138689948007e-17, 1.7749316371092158e-16, 1.2052626919006545e-15, 7.7106596673007474e-15, 4.6502415038307999e-14, 2.6453661839732785e-13, 1.4202436007691954e-12, 7.2000553706787476e-12, 3.4484441377850428e-11, 1.5611080651132002e-10, 6.6828734274332115e-10, 2.7064654297770221e-09, 1.0373610614267088e-08, 3.7645745874679117e-08, 1.2939635385192320e-07, 4.2140690642224546e-07, 1.3007683229247929e-06, 3.8067496326313364e-06, 1.0565607635971951e-05, 2.7819080771995885e-05, 6.9504723426630250e-05, 1.6482276370905397e-04, 3.7106723611638040e-04, 7.9325913151483628e-04, 1.6106180017909323e-03, 3.1064654644112318e-03, 5.6926204501420372e-03, 9.9128527364234706e-03, 1.6405471669264392e-02, 2.5807108029946707e-02, 3.8592374515300572e-02, 5.4868153810335302e-02, 7.4171189043357108e-02, 9.5341025218153283e-02, 1.1654188378760408e-01, 1.3547657485442111e-01, 1.4977628436985038e-01, 1.5748155504372183e-01, 1.5748155504372183e-01, 1.4977628436985038e-01, 1.3547657485442111e-01, 1.1654188378760408e-01, 9.5341025218153283e-02, 7.4171189043357108e-02, 5.4868153810335302e-02, 3.8592374515300572e-02, 2.5807108029946707e-02, 1.6405471669264392e-02, 9.9128527364234706e-03, 5.6926204501420372e-03, 3.1064654644112318e-03, 1.6106180017909323e-03, 7.9325913151483628e-04, 3.7106723611638040e-04, 1.6482276370905397e-04, 6.9504723426630250e-05, 2.7819080771995885e-05, 1.0565607635971951e-05, 3.8067496326313364e-06, 1.3007683229247929e-06, 4.2140690642224546e-07, 1.2939635385192320e-07, 3.7645745874679117e-08, 1.0373610614267088e-08, 2.7064654297770221e-09, 6.6828734274332115e-10, 1.5611080651132002e-10, 3.4484441377850428e-11, 7.2000553706787476e-12, 1.4202436007691954e-12, 2.6453661839732785e-13, 4.6502415038307999e-14, 7.7106596673007474e-15, 1.2052626919006545e-15, 1.7749316371092158e-16, 2.4610138689948007e-17, 3.2106160083645321e-18, 3.9382188462775671e-19, 4.5386744987288709e-20, 4.9106724438195655e-21, 4.9840723225381343e-22, 4.7412259196608422e-23, 4.2235075145104786e-24, 3.5198681528057952e-25, 2.7417253568038711e-26, 1.9939689031724527e-27, 1.3525065257212417e-28, 8.5465806757782117e-30, 5.0252446557042918e-31, 2.7459089931491756e-32, 1.3925226088255375e-33, 6.5448147609160573e-35, 2.8466218480726145e-36, 1.1439892419798912e-37, 4.2408968311262046e-39, 1.4477028628289723e-40, 4.5423930481987850e-42, 1.3074430076121838e-43, 3.4450097830357403e-45, 8.2913990414809887e-47, 1.8184989576075493e-48, 3.6254089691585309e-50, 6.5523461103489109e-52, 1.0705056723174281e-53, 1.5761631000101791e-55, 2.0845067543701599e-57, 2.4675003119821257e-59, 2.6043821060405426e-61, 2.4409225876304963e-63, 2.0223861173713801e-65, 1.4740979038429450e-67, 9.4025005576362417e-70, 5.2179989944251382e-72, 2.5035350436180196e-74, 1.0312393292677344e-76, 3.6188278671651930e-79, 1.0726407876436278e-81, 2.6598715295190543e-84, 5.4590825264344227e-87, 9.1613585633591115e-90, 1.2398999039839109e-92, 1.3320789562852151e-95, 1.1154449265558409e-98, 7.1263773535131733e-102, 3.3872112245822123e-105, 1.1621599260631149e-108, 2.7746976743925027e-112, 4.4053240904063359e-116, 4.3913191972243959e-120, 2.5502660858886434e-124, 7.8013060714659609e-129, 1.0903164404085858e-133, 5.6218117631932678e-139, 7.5182851789608105e-145, 1.3295499920372636e-151, 5.5372309477634655e-160}, + {7.8451635230016858e-161, 1.9523607750140259e-152, 1.1375077401246616e-145, 8.7373530318096058e-140, 1.7373540565570015e-134, 1.2727518873570116e-129, 4.2555627733290781e-125, 7.4887926040201802e-121, 7.6728599246194710e-117, 4.9331494690374170e-113, 2.1081805704508371e-109, 6.2668746307989698e-106, 1.3443162713188522e-102, 2.1447717307711369e-99, 2.6100910182768806e-96, 2.4752041309788718e-93, 1.8629470575327606e-90, 1.1305871177614866e-87, 5.6095106427386894e-85, 2.3032570747054949e-82, 7.9109829843002634e-80, 2.2948435687570354e-77, 5.6707499982586783e-75, 1.2029596296647568e-72, 2.2060845936398770e-70, 3.5197550034683429e-68, 4.9140189431762165e-66, 6.0352629400444085e-64, 6.5524291595185267e-62, 6.3168313216462770e-60, 5.4297718879402538e-58, 4.1774383436193032e-56, 2.8868574992210366e-54, 1.7978766039740882e-52, 1.0121575932082767e-50, 5.1657784424151435e-49, 2.3965562741555340e-47, 1.0131990990489010e-45, 3.9127390924870208e-44, 1.3832722270346481e-42, 4.4862136218811265e-41, 1.3373657357984576e-39, 3.6713213079441595e-38, 9.2972768440223579e-37, 2.1755444131733555e-35, 4.7112703089400342e-34, 9.4560355326075165e-33, 1.7615330473782583e-31, 3.0497304146140982e-30, 4.9132641478669298e-29, 7.3745987634168783e-28, 1.0324336757120609e-26, 1.3496248520015288e-25, 1.6490733800656049e-24, 1.8852558386853772e-23, 2.0184180321625388e-22, 2.0255846228536046e-21, 1.9070317011573267e-20, 1.6857232563417842e-19, 1.4001409738252677e-18, 1.0935411714915941e-17, 8.0367861053116616e-17, 5.5616740488492008e-16, 3.6264613629986606e-15, 2.2293626576566599e-14, 1.2928616191624445e-13, 7.0768307534960783e-13, 3.6582348996540792e-12, 1.7867762742843070e-11, 8.2498114987726634e-11, 3.6024058170237729e-10, 1.4883529382284672e-09, 5.8205576663344680e-09, 2.1554622191079048e-08, 7.5613014701765766e-08, 2.5135518772343285e-07, 7.9206548159012504e-07, 2.3667653844440894e-06, 6.7081316695963449e-06, 1.8039486708167727e-05, 4.6040475362243668e-05, 1.1154709040626857e-04, 2.5661526702463013e-04, 5.6067195570805187e-04, 1.1636625437784563e-03, 2.2946716387392549e-03, 4.2999800431004790e-03, 7.6583588130546571e-03, 1.2965620087982580e-02, 2.0868833143304531e-02, 3.1937686193532246e-02, 4.6478966725271985e-02, 6.4327817062627538e-02, 8.4677189450608162e-02, 1.0602045179435517e-01, 1.2626797462143288e-01, 1.4305279328441931e-01, 1.5417456855130846e-01, 1.5807042792926745e-01, 1.5417456855130846e-01, 1.4305279328441931e-01, 1.2626797462143288e-01, 1.0602045179435517e-01, 8.4677189450608162e-02, 6.4327817062627538e-02, 4.6478966725271985e-02, 3.1937686193532246e-02, 2.0868833143304531e-02, 1.2965620087982580e-02, 7.6583588130546571e-03, 4.2999800431004790e-03, 2.2946716387392549e-03, 1.1636625437784563e-03, 5.6067195570805187e-04, 2.5661526702463013e-04, 1.1154709040626857e-04, 4.6040475362243668e-05, 1.8039486708167727e-05, 6.7081316695963449e-06, 2.3667653844440894e-06, 7.9206548159012504e-07, 2.5135518772343285e-07, 7.5613014701765766e-08, 2.1554622191079048e-08, 5.8205576663344680e-09, 1.4883529382284672e-09, 3.6024058170237729e-10, 8.2498114987726634e-11, 1.7867762742843070e-11, 3.6582348996540792e-12, 7.0768307534960783e-13, 1.2928616191624445e-13, 2.2293626576566599e-14, 3.6264613629986606e-15, 5.5616740488492008e-16, 8.0367861053116616e-17, 1.0935411714915941e-17, 1.4001409738252677e-18, 1.6857232563417842e-19, 1.9070317011573267e-20, 2.0255846228536046e-21, 2.0184180321625388e-22, 1.8852558386853772e-23, 1.6490733800656049e-24, 1.3496248520015288e-25, 1.0324336757120609e-26, 7.3745987634168783e-28, 4.9132641478669298e-29, 3.0497304146140982e-30, 1.7615330473782583e-31, 9.4560355326075165e-33, 4.7112703089400342e-34, 2.1755444131733555e-35, 9.2972768440223579e-37, 3.6713213079441595e-38, 1.3373657357984576e-39, 4.4862136218811265e-41, 1.3832722270346481e-42, 3.9127390924870208e-44, 1.0131990990489010e-45, 2.3965562741555340e-47, 5.1657784424151435e-49, 1.0121575932082767e-50, 1.7978766039740882e-52, 2.8868574992210366e-54, 4.1774383436193032e-56, 5.4297718879402538e-58, 6.3168313216462770e-60, 6.5524291595185267e-62, 6.0352629400444085e-64, 4.9140189431762165e-66, 3.5197550034683429e-68, 2.2060845936398770e-70, 1.2029596296647568e-72, 5.6707499982586783e-75, 2.2948435687570354e-77, 7.9109829843002634e-80, 2.3032570747054949e-82, 5.6095106427386894e-85, 1.1305871177614866e-87, 1.8629470575327606e-90, 2.4752041309788718e-93, 2.6100910182768806e-96, 2.1447717307711369e-99, 1.3443162713188522e-102, 6.2668746307989698e-106, 2.1081805704508371e-109, 4.9331494690374170e-113, 7.6728599246194710e-117, 7.4887926040201802e-121, 4.2555627733290781e-125, 1.2727518873570116e-129, 1.7373540565570015e-134, 8.7373530318096058e-140, 1.1375077401246616e-145, 1.9523607750140259e-152, 7.8451635230016858e-161}, + {1.1113325242665607e-161, 2.8661157405088651e-153, 1.7203699801925772e-146, 1.3572912678569801e-140, 2.7667745801693423e-135, 2.0750643806729351e-130, 7.0958232027421688e-126, 1.2760468594731325e-121, 1.3351777371855847e-117, 8.7619375020369923e-114, 3.8201658326880149e-110, 1.1581310985408031e-106, 2.5327817240407812e-103, 4.1185419255140006e-100, 5.1071272449783039e-97, 4.9339716820592914e-94, 3.7824070114676548e-91, 2.3376600104580566e-88, 1.1809947785806983e-85, 4.9369057800338342e-83, 1.7261669173831611e-80, 5.0968500275659627e-78, 1.2818807628470368e-75, 2.7674728413631406e-73, 5.1647694116231104e-71, 8.3851919880334988e-69, 1.1912098546399618e-66, 1.4886103795423663e-64, 1.6443955400649481e-62, 1.6129133242366921e-60, 1.4105633288272223e-58, 1.1041151603377387e-56, 7.7628198468094521e-55, 4.9186136364477312e-53, 2.8172193314312900e-51, 1.4628544044995270e-49, 6.9047962388108693e-48, 2.9700444136624115e-46, 1.1669783057995060e-44, 4.1977240232717920e-43, 1.3852328747381678e-41, 4.2018792516358609e-40, 1.1737667874934990e-38, 3.0248112870289189e-37, 7.2029787582674604e-36, 1.5874640120654814e-34, 3.2427846726141820e-33, 6.1484526840191010e-32, 1.0834946554226288e-30, 1.7768523467499803e-29, 2.7149541158618372e-28, 3.8695229489123667e-27, 5.1500223036312467e-26, 6.4071917092815282e-25, 7.4586793696524265e-24, 8.1320524064342075e-23, 8.3113512372523795e-22, 7.9697999021291434e-21, 7.1759828546746257e-20, 6.0717113565204395e-19, 4.8312403419891758e-18, 3.6176893189785955e-17, 2.5510691957075621e-16, 1.6951627656128116e-15, 1.0621010070430722e-14, 6.2782746989491113e-14, 3.5033054765081019e-13, 1.8463363234967078e-12, 9.1951726021293097e-12, 4.3294831001094053e-11, 1.9281472420443613e-10, 8.1257537810009703e-10, 3.2418174605425769e-09, 1.2248647054749650e-08, 4.3845698417608257e-08, 1.4875155394404803e-07, 4.7845376510326057e-07, 1.4594959469005024e-06, 4.2236041008124797e-06, 1.1598624726140732e-05, 3.0233694291010849e-05, 7.4825364488004140e-05, 1.7586687773524874e-04, 3.9264033171199677e-04, 8.3286186061158249e-04, 1.6788174607182051e-03, 3.2163656781061659e-03, 5.8577616753894349e-03, 1.0143065156003795e-02, 1.6700865101051256e-02, 2.6151472056325592e-02, 3.8948449958622955e-02, 5.5177861430971059e-02, 7.4363251819021731e-02, 9.5346186707308309e-02, 1.1631262299580138e-01, 1.3500522787142605e-01, 1.4910449946872065e-01, 1.5669608061804552e-01, 1.5669608061804552e-01, 1.4910449946872065e-01, 1.3500522787142605e-01, 1.1631262299580138e-01, 9.5346186707308309e-02, 7.4363251819021731e-02, 5.5177861430971059e-02, 3.8948449958622955e-02, 2.6151472056325592e-02, 1.6700865101051256e-02, 1.0143065156003795e-02, 5.8577616753894349e-03, 3.2163656781061659e-03, 1.6788174607182051e-03, 8.3286186061158249e-04, 3.9264033171199677e-04, 1.7586687773524874e-04, 7.4825364488004140e-05, 3.0233694291010849e-05, 1.1598624726140732e-05, 4.2236041008124797e-06, 1.4594959469005024e-06, 4.7845376510326057e-07, 1.4875155394404803e-07, 4.3845698417608257e-08, 1.2248647054749650e-08, 3.2418174605425769e-09, 8.1257537810009703e-10, 1.9281472420443613e-10, 4.3294831001094053e-11, 9.1951726021293097e-12, 1.8463363234967078e-12, 3.5033054765081019e-13, 6.2782746989491113e-14, 1.0621010070430722e-14, 1.6951627656128116e-15, 2.5510691957075621e-16, 3.6176893189785955e-17, 4.8312403419891758e-18, 6.0717113565204395e-19, 7.1759828546746257e-20, 7.9697999021291434e-21, 8.3113512372523795e-22, 8.1320524064342075e-23, 7.4586793696524265e-24, 6.4071917092815282e-25, 5.1500223036312467e-26, 3.8695229489123667e-27, 2.7149541158618372e-28, 1.7768523467499803e-29, 1.0834946554226288e-30, 6.1484526840191010e-32, 3.2427846726141820e-33, 1.5874640120654814e-34, 7.2029787582674604e-36, 3.0248112870289189e-37, 1.1737667874934990e-38, 4.2018792516358609e-40, 1.3852328747381678e-41, 4.1977240232717920e-43, 1.1669783057995060e-44, 2.9700444136624115e-46, 6.9047962388108693e-48, 1.4628544044995270e-49, 2.8172193314312900e-51, 4.9186136364477312e-53, 7.7628198468094521e-55, 1.1041151603377387e-56, 1.4105633288272223e-58, 1.6129133242366921e-60, 1.6443955400649481e-62, 1.4886103795423663e-64, 1.1912098546399618e-66, 8.3851919880334988e-69, 5.1647694116231104e-71, 2.7674728413631406e-73, 1.2818807628470368e-75, 5.0968500275659627e-78, 1.7261669173831611e-80, 4.9369057800338342e-83, 1.1809947785806983e-85, 2.3376600104580566e-88, 3.7824070114676548e-91, 4.9339716820592914e-94, 5.1071272449783039e-97, 4.1185419255140006e-100, 2.5327817240407812e-103, 1.1581310985408031e-106, 3.8201658326880149e-110, 8.7619375020369923e-114, 1.3351777371855847e-117, 1.2760468594731325e-121, 7.0958232027421688e-126, 2.0750643806729351e-130, 2.7667745801693423e-135, 1.3572912678569801e-140, 1.7203699801925772e-146, 2.8661157405088651e-153, 1.1113325242665607e-161}, + {1.5740526693584465e-162, 4.2063621518212007e-154, 2.6008944296538456e-147, 2.1074517218240589e-141, 4.4036325515199439e-136, 3.3809077673415763e-131, 1.1822946408488032e-126, 2.1725144459735252e-122, 2.3212763347728001e-118, 1.5547020684816359e-114, 6.9150182989560957e-111, 2.1377995126059837e-107, 4.7660941708469085e-104, 7.8984266909061953e-101, 9.9792660546750878e-98, 9.8208576850538984e-95, 7.6677750665974574e-92, 4.8256764019125120e-89, 2.4821990706514334e-86, 1.0563282231662235e-83, 3.7595217722773437e-81, 1.1298318188883590e-78, 2.8919012660409682e-76, 6.3534513888384988e-74, 1.2065320173749644e-71, 1.9931390693715852e-69, 2.8809012333842463e-67, 3.6628479225209667e-65, 4.1164923961054212e-63, 4.1077464167348542e-61, 3.6546677508675100e-59, 2.9102221244239064e-57, 2.0815358697976881e-55, 1.3417085980893448e-53, 7.8178603818978630e-52, 4.1297412217775072e-50, 1.9830414431093633e-48, 8.6778002491153641e-47, 3.4688352621558211e-45, 1.2694602644562695e-43, 4.2621053825441441e-42, 1.3153901362816908e-40, 3.7386729623017851e-39, 9.8033645273483504e-38, 2.3754629195463299e-36, 5.3274424573468086e-35, 1.1074717912785746e-33, 2.1369895498120545e-32, 3.8327370697542584e-31, 6.3974120073582433e-30, 9.9497591435745215e-29, 1.4435514631174776e-27, 1.9558597204465646e-26, 2.4773098248022596e-25, 2.9362302035583120e-24, 3.2596930342072210e-23, 3.3925841234631972e-22, 3.3130211274194918e-21, 3.0381735933104597e-20, 2.6183914995557019e-19, 2.1223329298239927e-18, 1.6190413502935239e-17, 1.1632214089861438e-16, 7.8760411702639125e-16, 5.0287848550079939e-15, 3.0295892003599562e-14, 1.7231158339195911e-13, 9.2573881503227024e-13, 4.7003287276740161e-12, 2.2565549953554105e-11, 1.0248110528690129e-10, 4.4046822285487214e-10, 1.7924282778278772e-09, 6.9087593674811365e-09, 2.5232222625609405e-08, 8.7350604983433712e-08, 2.8673524946820525e-07, 8.9277708976484127e-07, 2.6374622880097546e-06, 7.3949955626738131e-06, 1.9684213310007427e-05, 4.9755352753869857e-05, 1.1945642608697544e-04, 2.7247518029396975e-04, 5.9058909090482672e-04, 1.2166676013683477e-03, 2.3826889355841037e-03, 4.4365604655715969e-03, 7.8555741198391739e-03, 1.3228935168893997e-02, 2.1190659578816773e-02, 3.2291498859030154e-02, 4.6816827098479806e-02, 6.4584179655610197e-02, 8.4780422567999614e-02, 1.0591066198925958e-01, 1.2591645229378781e-01, 1.4247629387217531e-01, 1.5343815707202285e-01, 1.5727610417082902e-01, 1.5343815707202285e-01, 1.4247629387217531e-01, 1.2591645229378781e-01, 1.0591066198925958e-01, 8.4780422567999614e-02, 6.4584179655610197e-02, 4.6816827098479806e-02, 3.2291498859030154e-02, 2.1190659578816773e-02, 1.3228935168893997e-02, 7.8555741198391739e-03, 4.4365604655715969e-03, 2.3826889355841037e-03, 1.2166676013683477e-03, 5.9058909090482672e-04, 2.7247518029396975e-04, 1.1945642608697544e-04, 4.9755352753869857e-05, 1.9684213310007427e-05, 7.3949955626738131e-06, 2.6374622880097546e-06, 8.9277708976484127e-07, 2.8673524946820525e-07, 8.7350604983433712e-08, 2.5232222625609405e-08, 6.9087593674811365e-09, 1.7924282778278772e-09, 4.4046822285487214e-10, 1.0248110528690129e-10, 2.2565549953554105e-11, 4.7003287276740161e-12, 9.2573881503227024e-13, 1.7231158339195911e-13, 3.0295892003599562e-14, 5.0287848550079939e-15, 7.8760411702639125e-16, 1.1632214089861438e-16, 1.6190413502935239e-17, 2.1223329298239927e-18, 2.6183914995557019e-19, 3.0381735933104597e-20, 3.3130211274194918e-21, 3.3925841234631972e-22, 3.2596930342072210e-23, 2.9362302035583120e-24, 2.4773098248022596e-25, 1.9558597204465646e-26, 1.4435514631174776e-27, 9.9497591435745215e-29, 6.3974120073582433e-30, 3.8327370697542584e-31, 2.1369895498120545e-32, 1.1074717912785746e-33, 5.3274424573468086e-35, 2.3754629195463299e-36, 9.8033645273483504e-38, 3.7386729623017851e-39, 1.3153901362816908e-40, 4.2621053825441441e-42, 1.2694602644562695e-43, 3.4688352621558211e-45, 8.6778002491153641e-47, 1.9830414431093633e-48, 4.1297412217775072e-50, 7.8178603818978630e-52, 1.3417085980893448e-53, 2.0815358697976881e-55, 2.9102221244239064e-57, 3.6546677508675100e-59, 4.1077464167348542e-61, 4.1164923961054212e-63, 3.6628479225209667e-65, 2.8809012333842463e-67, 1.9931390693715852e-69, 1.2065320173749644e-71, 6.3534513888384988e-74, 2.8919012660409682e-76, 1.1298318188883590e-78, 3.7595217722773437e-81, 1.0563282231662235e-83, 2.4821990706514334e-86, 4.8256764019125120e-89, 7.6677750665974574e-92, 9.8208576850538984e-95, 9.9792660546750878e-98, 7.8984266909061953e-101, 4.7660941708469085e-104, 2.1377995126059837e-107, 6.9150182989560957e-111, 1.5547020684816359e-114, 2.3212763347728001e-118, 2.1725144459735252e-122, 1.1822946408488032e-126, 3.3809077673415763e-131, 4.4036325515199439e-136, 2.1074517218240589e-141, 2.6008944296538456e-147, 4.2063621518212007e-154, 1.5740526693584465e-162}, + {2.2290934963031539e-163, 6.1716304223418473e-155, 3.9305958509063355e-148, 3.2706601334834755e-142, 7.0049069648137550e-137, 5.5049260050287900e-132, 1.9684692530020812e-127, 3.6957545112422320e-123, 4.0320327117616306e-119, 2.7559362749699724e-115, 1.2503894738744883e-111, 3.9416969862084461e-108, 8.9577890237321700e-105, 1.5127860735379895e-101, 1.9472732376896783e-98, 1.9519789398323314e-95, 1.5520652579926001e-92, 9.9458309202670908e-90, 5.2083206235820379e-87, 2.2562215375682347e-84, 8.1731118972592471e-82, 2.4997514837476365e-79, 6.5111285362111366e-77, 1.4555888776738779e-74, 2.8125155025369667e-72, 4.7271089337772468e-70, 6.9513137370020262e-68, 8.9912466505154563e-66, 1.0279594830695334e-63, 1.0434894316959833e-61, 9.4440639857201346e-60, 7.6499247670821092e-58, 5.5658406725957325e-56, 3.6493796373809251e-54, 2.1630323514364711e-52, 1.1622901525905010e-50, 5.6773339224732908e-49, 2.5272570465088243e-47, 1.0276815273410889e-45, 3.8259457909463708e-44, 1.3067728287128299e-42, 4.1029828415971428e-41, 1.1864415902523173e-39, 3.1652198121078916e-38, 7.8035886293155777e-37, 1.7807430428966829e-35, 3.7667945725159055e-34, 7.3963969744653024e-33, 1.3499836949356773e-31, 2.2932424003565328e-30, 3.6300293183361797e-29, 5.3605436078106824e-28, 7.3930155412655720e-27, 9.5323773039462743e-26, 1.1502145928125925e-24, 1.3000662757028629e-23, 1.3776941782313042e-22, 1.3699807056265559e-21, 1.2794012136597428e-20, 1.1229742763811151e-19, 9.2710336687274256e-19, 7.2042972150393114e-18, 5.2729781171856177e-17, 3.6375048889083554e-16, 2.3664867578847666e-15, 1.4528301518132017e-14, 8.4213476872404286e-14, 4.6114644502136799e-13, 2.3867692880717110e-12, 1.1681801468249808e-11, 5.4092941845487156e-11, 2.3708105480750139e-10, 9.8392832670094020e-10, 3.8682673753498617e-09, 1.4411972185221586e-08, 5.0903013962041357e-08, 1.7050141340191249e-07, 5.4177677321026202e-07, 1.6336412605871983e-06, 4.6759044289293238e-06, 1.2707753951032874e-05, 3.2800534876023489e-05, 8.0428506528979537e-05, 1.8739476963256670e-04, 4.1497155710923123e-04, 8.7353709789879039e-04, 1.7483531042542699e-03, 3.3276551358222884e-03, 6.0239245919995622e-03, 1.0373317514267365e-02, 1.6994637482018023e-02, 2.6492061401939205e-02, 3.9298609227795579e-02, 5.5480282208033298e-02, 7.4548212057078664e-02, 9.5346453940664169e-02, 1.1608254348974273e-01, 1.3453811804025481e-01, 1.4844158241213595e-01, 1.5592224232989221e-01, 1.5592224232989221e-01, 1.4844158241213595e-01, 1.3453811804025481e-01, 1.1608254348974273e-01, 9.5346453940664169e-02, 7.4548212057078664e-02, 5.5480282208033298e-02, 3.9298609227795579e-02, 2.6492061401939205e-02, 1.6994637482018023e-02, 1.0373317514267365e-02, 6.0239245919995622e-03, 3.3276551358222884e-03, 1.7483531042542699e-03, 8.7353709789879039e-04, 4.1497155710923123e-04, 1.8739476963256670e-04, 8.0428506528979537e-05, 3.2800534876023489e-05, 1.2707753951032874e-05, 4.6759044289293238e-06, 1.6336412605871983e-06, 5.4177677321026202e-07, 1.7050141340191249e-07, 5.0903013962041357e-08, 1.4411972185221586e-08, 3.8682673753498617e-09, 9.8392832670094020e-10, 2.3708105480750139e-10, 5.4092941845487156e-11, 1.1681801468249808e-11, 2.3867692880717110e-12, 4.6114644502136799e-13, 8.4213476872404286e-14, 1.4528301518132017e-14, 2.3664867578847666e-15, 3.6375048889083554e-16, 5.2729781171856177e-17, 7.2042972150393114e-18, 9.2710336687274256e-19, 1.1229742763811151e-19, 1.2794012136597428e-20, 1.3699807056265559e-21, 1.3776941782313042e-22, 1.3000662757028629e-23, 1.1502145928125925e-24, 9.5323773039462743e-26, 7.3930155412655720e-27, 5.3605436078106824e-28, 3.6300293183361797e-29, 2.2932424003565328e-30, 1.3499836949356773e-31, 7.3963969744653024e-33, 3.7667945725159055e-34, 1.7807430428966829e-35, 7.8035886293155777e-37, 3.1652198121078916e-38, 1.1864415902523173e-39, 4.1029828415971428e-41, 1.3067728287128299e-42, 3.8259457909463708e-44, 1.0276815273410889e-45, 2.5272570465088243e-47, 5.6773339224732908e-49, 1.1622901525905010e-50, 2.1630323514364711e-52, 3.6493796373809251e-54, 5.5658406725957325e-56, 7.6499247670821092e-58, 9.4440639857201346e-60, 1.0434894316959833e-61, 1.0279594830695334e-63, 8.9912466505154563e-66, 6.9513137370020262e-68, 4.7271089337772468e-70, 2.8125155025369667e-72, 1.4555888776738779e-74, 6.5111285362111366e-77, 2.4997514837476365e-79, 8.1731118972592471e-82, 2.2562215375682347e-84, 5.2083206235820379e-87, 9.9458309202670908e-90, 1.5520652579926001e-92, 1.9519789398323314e-95, 1.9472732376896783e-98, 1.5127860735379895e-101, 8.9577890237321700e-105, 3.9416969862084461e-108, 1.2503894738744883e-111, 2.7559362749699724e-115, 4.0320327117616306e-119, 3.6957545112422320e-123, 1.9684692530020812e-127, 5.5049260050287900e-132, 7.0049069648137550e-137, 3.2706601334834755e-142, 3.9305958509063355e-148, 6.1716304223418473e-155, 2.2290934963031539e-163}, +} diff --git a/vendor/gonum.org/v1/gonum/integrate/quad/internal/PrintGoSlice.m b/vendor/gonum.org/v1/gonum/integrate/quad/internal/PrintGoSlice.m new file mode 100644 index 0000000..6b1f501 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/integrate/quad/internal/PrintGoSlice.m @@ -0,0 +1,11 @@ +% Copyright ©2016 The Gonum Authors. All rights reserved. +% Use of this source code is governed by a BSD-style +% license that can be found in the LICENSE file. + +function PrintGoSlice(a) + fprintf('[]float64{') + for i = 1:length(a) + fprintf('%1.16e, ',a(i)) + end + fprintf('}\n') +end \ No newline at end of file diff --git a/vendor/gonum.org/v1/gonum/integrate/quad/internal/genherm.m b/vendor/gonum.org/v1/gonum/integrate/quad/internal/genherm.m new file mode 100644 index 0000000..ce1904c --- /dev/null +++ b/vendor/gonum.org/v1/gonum/integrate/quad/internal/genherm.m @@ -0,0 +1,32 @@ +% Copyright ©2016 The Gonum Authors. All rights reserved. +% Use of this source code is governed by a BSD-style +% license that can be found in the LICENSE file. + +clc +clear all +close all + +% Generate herm points +min = 216; +max = 216; +fprintf('xCache = [][]float64{\n') +for i = min:max + [x,w] = hermpts(i); + fprintf('{') + for j = 1:i-1 + fprintf('%1.16e, ',x(j)) + end + fprintf('%1.16e},\n',x(i)) +end +fprintf('}\n') + +fprintf('wCache = [][]float64{\n') +for i = min:max + [x,w] = hermpts(i); + fprintf('{') + for j = 1:i-1 + fprintf('%1.16e, ',w(j)) + end + fprintf('%1.16e},\n',w(i)) +end +fprintf('}\n') diff --git a/vendor/gonum.org/v1/gonum/integrate/quad/internal/hermpts.m b/vendor/gonum.org/v1/gonum/integrate/quad/internal/hermpts.m new file mode 100644 index 0000000..4f5b360 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/integrate/quad/internal/hermpts.m @@ -0,0 +1,609 @@ +% Copyright (c) 2015, The Chancellor, Masters and Scholars of the University +% of Oxford, and the Chebfun Developers. +% Copyright (c) 2016 The Gonum Authors +% All rights reserved. +% +% Redistribution and use in source and binary forms, with or without +% modification, are permitted provided that the following conditions are met: +% * Redistributions of source code must retain the above copyright +% notice, this list of conditions and the following disclaimer. +% * Redistributions in binary form must reproduce the above copyright +% notice, this list of conditions and the following disclaimer in the +% documentation and/or other materials provided with the distribution. +% * Neither the name of the University of Oxford nor the names of its +% contributors may be used to endorse or promote products derived from +% this software without specific prior written permission. +% +% THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND +% ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +% WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +% DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR +% ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES +% (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; +% LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND +% ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +% (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS +% SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + + +function [x, w, v] = hermpts(n, varargin) +%HERMPTS Hermite points and Gauss-Hermite Quadrature Weights. +% HERMPTS(N) returns N Hermite points X in (-inf, inf). By default these are +% roots of the 'physicist'-type Hermite polynomials, which are orthogonal with +% respect to the weight exp(-x.^2). +% +% HERMPTS(N, 'PROB') normalises instead by the probablist's definition (with +% weight exp(-x.^2/2)), which gives rise to monomials. +% +% [X, W] = HERMPTS(N) returns also a row vector W of weights for Gauss-Hermite +% quadrature. [X,W,V] = HERMPTS(N) returns in addition a column vector V of +% the barycentric weights corresponding to X. +% +% [X, W] = HERMPTS(N, METHOD) where METHOD is one of 'GW', 'REC', 'GLR', or +% 'ASY' allows the user to select which method is used. 'GW' will use the +% traditional Golub-Welsch eigenvalue method [1], best when n<=20. 'REC' +% uses Newton's method with polynomial evaluation via the 3-term +% recurrence for Hermite polynomials. 'GLR' uses Glaser-Liu-Rokhlin +% fast algorithm which is much faster for large N [2]. 'ASY' uses Newton's +% method with polynomial evaluation via asymptotic formula. 'ASY' is the +% fastest for N>=200, 'GLR' is the most accurate for nodes close to 0. +% By default HERMPTS uses 'GW' when N <= 20, 'REC' for 21<=N<200, and +% 'ASY' when N>=200. +% +% References: +% [1] G. H. Golub and J. A. Welsch, "Calculation of Gauss quadrature +% rules", Math. Comp. 23:221-230, 1969, +% [2] A. Glaser, X. Liu and V. Rokhlin, "A fast algorithm for the +% calculation of the roots of special functions", SIAM Journal +% on Scientific Computing", 29(4):1420-1438:, 2007. +% [3] A. Townsend, T. Trogdon and S. Olver, Fast computation of Gauss +% nodes and weights on the whole real line, submitted, 2014. +% +% See also CHEBPTS, LEGPTS, LAGPTS, and JACPTS. + +% Copyright 2015 by The University of Oxford and The Chebfun Developers. +% See http://www.chebfun.org/ for Chebfun information. +% +% 'GW' by Nick Trefethen, March 2009 - algorithm adapted from [1]. +% 'GLR' by Nick Hale, March 2010 - algorithm adapted from [2]. + +% Defaults: +method = 'default'; +type = 'phys'; + +if ( n < 0 ) + error('CHEBFUN:hermpts:n', 'First input should be a positive integer.'); +end + +% Return empty vector if n = 0. +if ( n == 0 ) + [x, w, v] = deal([]); + return +end + +% Check the inputs +while ( ~isempty(varargin) ) + s = varargin{1}; + varargin(1) = []; + if ( strcmpi(s, 'GW') ) + method = 'GW'; + elseif ( strcmpi(s,'glr') ) + method = 'GLR'; + elseif ( strcmpi(s,'rec') ) + method = 'REC'; + elseif ( strcmpi(s,'asy') ) + method = 'ASY'; + elseif ( strncmpi(s, 'phys', 3) ) + type = 'phys'; + elseif ( strncmpi(s, 'prob', 3) ) + type = 'prob'; + else + error('CHEBFUN:hermpts:input', 'Unrecognised input string; %s.', s); + end +end + +% Three cases: +% +% N <= 20: Use GW +% 21<=N<200: Use REC +% N>=200: Use ASY +if ( n == 1 ) + % n = 1 case is trivial + x = 0; + w = sqrt(pi); + v = 1; + +elseif ( (n < 21 && strcmpi(method,'default')) || strcmpi(method,'GW') ) + % GW, see [1] + + beta = sqrt(.5*(1:n-1)); % 3-term recurrence coeffs + T = diag(beta, 1) + diag(beta, -1); % Jacobi matrix + [V, D] = eig(T); % Eigenvalue decomposition + [x, indx] = sort(diag(D)); % Hermite points + w = sqrt(pi)*V(1, indx).^2; % weights + v = abs(V(1, indx)).'; % Barycentric weights + v = v./max(v); % Normalize + v(2:2:n) = -v(2:2:n); + + % Enforce symmetry: + ii = 1:floor(n/2); + x = x(ii); + w = w(ii); + vmid = v(floor(n/2)+1); + v = v(ii); + if ( mod(n, 2) ) + x = [x ; 0 ; -x(end:-1:1)]; + w = [w, sqrt(pi) - sum(2*w), w(end:-1:1)]; + v = [v ; vmid ; v(end:-1:1)]; + else + x = [x ; -x(end:-1:1)]; + w = [w, w(end:-1:1)]; + v = [v ; -v(end:-1:1)]; + end + +elseif ( strcmpi(method,'GLR') ) + % Fast, see [2] + + [x, ders] = alg0_Herm(n); % Nodes and H_n'(x) + w = (2*exp(-x.^2)./ders.^2)'; % Quadrature weights + v = exp(-x.^2/2)./ders; % Barycentric weights + v = v./max(abs(v)); % Normalize + if ( ~mod(n, 2) ) + ii = (n/2+1):n; + v(ii) = -v(ii); + end + +elseif ( (n < 200 && strcmpi(method,'default')) || strcmpi(method,'REC') ) + + [x, w, v] = hermpts_rec( n ); + +else + + [x, w, v] = hermpts_asy( n ); + +end + +% Normalise so that sum(w) = sqrt(pi) +w = (sqrt(pi)/sum(w))*w; + +if ( strcmpi(type, 'prob') ) + x = x*sqrt(2); + w = w*sqrt(2); +end + +end + +%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% +%% %%%%%%%%%%%%%%%%%%%%%%% Routines for GLR algorithm %%%%%%%%%%%%%%%%%%%%%%%%% +%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% + +% Driver for 'GLR'. +function [roots, ders] = alg0_Herm(n) +% Compute coefficients of H_m(0), H_m'(0), m = 0,..,N. + +Hm2 = 0; +Hm1 = pi^(-1/4); +Hpm2 = 0; +Hpm1 = 0; +for k = 0:n-1 + H = -sqrt(k/(k+1))*Hm2; + Hp = sqrt(2/(k+1))*Hm1-sqrt(k/(k+1))*Hpm2; + Hm2 = Hm1; + Hm1 = H; + Hpm2 = Hpm1; + Hpm1 = Hp; +end + +% allocate storage +roots = zeros(n, 1); +ders = zeros(n, 1); +if ( mod(n,2) ) + % zero is a root: + roots((n-1)/2) = 0; + ders((n+1)/2) = Hp; +else + % find first root: + [roots(n/2+1), ders(n/2+1)] = alg2_Herm(H,n); +end + +% compute roots and derivatives: +[roots, ders] = alg1_Herm(roots, ders); + +end + +%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% + +% Main algorithm for 'GLR' +function [roots, ders] = alg1_Herm(roots, ders) + +n = length(roots); +s = mod(n, 2); +N = (n - s) / 2; + +% number of terms in Taylor expansion +m = 30; + +% initialise +hh1 = ones(m + 1, 1); +u = zeros(1, m + 1); +up = zeros(1, m + 1); + +for j = (N + 1):(n - 1) + + % previous root + x = roots(j); + + % initial approx + h = rk2_Herm(pi/2,-pi/2,x,n) - x; + + % scaling + M = 1/h; + + % recurrence relation for Hermite polynomials + c1 = -(2*n+1-x^2)/M^2; + c2 = 2*x./M^3; + c3 = 1./M^4; + u(1) = 0; + u(2) = ders(j)/M; + u(3) = .5*c1*u(1); + u(4) = (c1*u(2) + c2*u(1))/6; + up(1) = u(2); + up(2) = 2*u(3)*M; + up(3) = 3*u(4)*M; + up(m+1) = 0; + + for k = 2:m-2 + u(k+3) = (c1*u(k+1) + c2*u(k) + c3*u(k-1))/((k+1)*(k+2)); + up(k+2) = (k+2)*u(k+3)*M; + end + + % flip for more accuracy in inner product calculation + u = u(m+1:-1:1); + up = up(m+1:-1:1); + + % Newton iteration + hh = hh1; + hh(end) = M; + step = inf; + l = 0; + z = zeros(m, 1); + while ( (abs(step) > eps) && (l < 10) ) + l = l + 1; + step = (u*hh)/(up*hh); + h = h - step; + % powers of h (This is the fastest way!) + hh = [M ; cumprod(M*h + z)]; + % flip for more accuracy in inner product calculation + hh = hh(end:-1:1); + end + + % update + roots(j+1) = x + h; + ders(j+1) = up*hh; +end + +% nodes are symmetric +roots(1:N+s) = -roots(n:-1:N+1); +ders(1:N+s) = ders(n:-1:N+1); + +end + +%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% + +% find the first root (note H_n'(0) = 0) +function [x1, d1] = alg2_Herm(Hn0, n) + +% advance ODE via Runge-Kutta for initial approx +x1 = rk2_Herm(0, -pi/2, 0, n); + +% number of terms in Taylor expansion +m = 30; + +% scaling +M = 1/x1; +% c = log10(n); +% M = 1./x1.^(1-1.25/(c)); + +% initialise +u = zeros(1,m+1); +up = zeros(1,m+1); + +% recurrence relation for Legendre polynomials +u(1) = Hn0; +u(3) = -.5*(2*n+1)*u(1)/M^2; +up(1) = 0; +up(2) = 2*u(3)*M; +for k = 2:2:m-2 + u(k+3) = (-(2*n+1)*u(k+1)/M^2 + u(k-1)/M^4)/((k+1)*(k+2)); + up(k+2) = (k+2)*u(k+3)*M; +end + +% flip for more accuracy in inner product calculation +u = u(m+1:-1:1); +up = up(m+1:-1:1); + +z = zeros(m, 1); +x1k = [M ; cumprod(M*x1 + z)]; +step = inf; +l = 0; +% Newton iteration +while ( (abs(step) > eps) && (l < 10) ) + l = l + 1; + step = (u*x1k)/(up*x1k); + x1 = x1 - step; + % powers of h (This is the fastest way!) + x1k = [1 ; cumprod(M*x1 + z)]; + x1k = x1k(end:-1:1); +end + +% Update derivative +d1 = up*x1k; + +end + +%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% + +% Runge-Kutta for Hermite Equation +function x = rk2_Herm(t, tn, x, n) +m = 10; +h = (tn-t)/m; +for j = 1:m + k1 = -h/(sqrt(2*n+1-x^2) - .5*x*sin(2*t)/(2*n+1-x^2)); + t = t + h; + k2 = -h/(sqrt(2*n+1-(x+k1)^2) - .5*x*sin(2*t)/(2*n+1-(x+k1)^2)); + x = x + .5*(k1 + k2); +end +end + +%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% +%% %%%%%%%%%%%%%%%%%%%%%%% Routines for ASY algorithm %%%%%%%%%%%%%%%%%%%%%%%%% +%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% + +function [x, w, v] = hermpts_asy(n) +% HERMPTS_ASY, fast algorithm for computing Gauss-Hermite nodes and weights +% using Newton's method with polynomial evaluation via asymptotic expansions. +% +% x = Gauss-Hermite nodes, w = quad weights, v = bary weights. +% +% See [3]. + +[x, w, v] = hermpts_asy0( n ); + +if mod(n,2) == 1 % fold out + x = [-x(end:-1:1);x(2:end)]; + w = [w(end:-1:1) w(2:end)]; w = (sqrt(pi)/sum(w))*w; + v = [v(end:-1:1);v(2:end)]; v = v./max(abs(v)); +else + x = [-x(end:-1:1);x]; + w = [w(end:-1:1) w]; w = (sqrt(pi)/sum(w))*w; + v = [v(end:-1:1);-v]; v = v./max(abs(v)); +end + +% debug +%tic, exact = hermpts(n); toc +%semilogy(abs(exact-x)) +end + +function [x, w, v] = hermpts_asy0(n) +% Compute Hermite nodes and weights using asymptotic formula + +x0 = HermiteInitialGuesses(n); % get initial guesses +t0 = x0./sqrt(2*n+1); +theta0 = acos(t0); % convert to theta-variable +for k = 1:20 + [val, dval] = hermpoly_asy_airy(n, theta0); + dt = -val./(sqrt(2)*sqrt(2*n+1)*dval.*sin(theta0)); + theta0 = theta0 - dt; % Newton update + if norm(dt,inf) < sqrt(eps)/10, break; end +end +t0 = cos(theta0); +x = sqrt(2*n+1)*t0; % back to x-variable + +ders = x.*val + sqrt(2)*dval; +%ders = dval; +w = (exp(-x.^2)./ders.^2)'; % quadrature weights + +v = exp(-x.^2/2)./ders; % Barycentric weights +end + +function [val, dval] = hermpoly_asy_airy(n, theta) +% HERMPOLY_ASY evaluation hermite poly using Airy asymptotic formula in +% theta-space. + +musq = 2*n+1; +cosT = cos(theta); sinT = sin(theta); +sin2T = 2*cosT.*sinT; +eta = .5*theta - .25*sin2T; +chi = -(3*eta/2).^(2/3); +phi = (-chi./sinT.^2).^(1/4); +const = 2*sqrt(pi)*musq^(1/6)*phi; +Airy0 = real(airy(musq.^(2/3)*chi)); +Airy1 = real(airy(1,musq.^(2/3)*chi)); + +% Terms in (12.10.43): +a0 = 1; b0 = 1; +a1 = 15/144; b1 = -7/5*a1; +a2 = 5*7*9*11/2/144^2; b2 = -13/11*a2; +a3 = 7*9*11*13*15*17/6/144^3; +b3 = -19/17*a3; + +% u polynomials in (12.10.9) +u0 = 1; u1 = (cosT.^3-6*cosT)/24; +u2 = (-9*cosT.^4 + 249*cosT.^2 + 145)/1152; +u3 = (-4042*cosT.^9+18189*cosT.^7-28287*cosT.^5-151995*cosT.^3-259290*cosT)/414720; + +%first term +A0 = 1; +val = A0*Airy0; + +%second term +B0 = -(a0*phi.^6.*u1+a1*u0)./chi.^2; +val = val + B0.*Airy1./musq.^(4/3); + +%third term +A1 = (b0*phi.^12.*u2 + b1*phi.^6.*u1 + b2*u0)./chi.^3; +val = val + A1.*Airy0/musq.^2; + +%fourth term +B1 = -(phi.^18.*u3 + a1*phi.^12.*u2 + a2*phi.^6.*u1 + a3*u0)./chi.^5; +val = val + B1.*Airy1./musq.^(4/3+2); + +val = const.*val; + +%% Derivative + +eta = .5*theta - .25*sin2T; +chi = -(3*eta/2).^(2/3); +phi = (-chi./sinT.^2).^(1/4); +const = sqrt(2*pi)*musq^(1/3)./phi; + +% v polynomials in (12.10.10) +v0 = 1; v1 = (cosT.^3+6*cosT)/24; +v2 = (15*cosT.^4-327*cosT.^2-143)/1152; +v3 = (259290*cosT + 238425*cosT.^3 - 36387*cosT.^5 + 18189*cosT.^7 -... + 4042*cosT.^9)/414720; + +%first term +C0 = -(b0*phi.^6.*v1 + b1.*v0)./chi; +dval = C0.*Airy0/musq.^(2/3); + +% %second term +D0 = a0*v0; +dval = dval + D0*Airy1; + +% %third term +C1 = -(phi.^18.*v3 + b1*phi.^12.*v2 + b2*phi.^6.*v1 + b3*v0)./chi.^4; +dval = dval + C1.*Airy0/musq.^(2/3+2); + +%fourth term +D1 = (a0*phi.^12.*v2 + a1*phi.^6.*v1 + a2*v0)./chi.^3; +dval = dval + D1.*Airy1/musq.^2; + +dval = const.*dval; + +end + +function x_init = HermiteInitialGuesses(n) +%HERMITEINTITIALGUESSES(N), Initial guesses for Hermite zeros. +% +% [1] L. Gatteschi, Asymptotics and bounds for the zeros of Laguerre +% polynomials: a survey, J. Comput. Appl. Math., 144 (2002), pp. 7-27. +% +% [2] F. G. Tricomi, Sugli zeri delle funzioni di cui si conosce una +% rappresentazione asintotica, Ann. Mat. Pura Appl. 26 (1947), pp. 283-300. + +% Gatteschi formula involving airy roots [1]. +% These initial guess are good near x = sqrt(n+1/2); +if mod(n,2) == 1 + m = (n-1)/2; bess = (1:m)'*pi; a = .5; +else + m = n/2; bess = ((0:m-1)'+.5)*pi; a = -.5; +end +nu = 4*m + 2*a + 2; +T = @(t) t.^(2/3).*(1+5/48*t.^(-2)-5/36*t.^(-4)+(77125/82944)*t.^(-6) -... + 108056875/6967296*t.^(-8)+162375596875/334430208*t.^(-10)); +airyrts = -T(3/8*pi*(4*(1:m)'-1)); + +airyrts_exact = [ -2.338107410459762 % Exact Airy roots. + -4.087949444130970 + -5.520559828095555 + -6.786708090071765 + -7.944133587120863 + -9.022650853340979 + -10.040174341558084 + -11.008524303733260 + -11.936015563236262 + -12.828776752865757]; +airyrts(1:10) = airyrts_exact; % correct first 10. + +x_init = sqrt(nu + 2^(2/3)*airyrts*nu^(1/3) +... + 1/5*2^(4/3)*airyrts.^2*nu^(-1/3) +... + (11/35-a^2-12/175*airyrts.^3)/nu +... + (16/1575*airyrts+92/7875*airyrts.^4)*2^(2/3)*nu^(-5/3) -... + (15152/3031875*airyrts.^5+1088/121275*airyrts.^2)*2^(1/3)*nu^(-7/3)); +x_init_airy = real(x_init(end:-1:1)); + +% Tricomi initial guesses. Equation (2.1) in [1]. Originally in [2]. +% These initial guesses are good near x = 0 . Note: zeros of besselj(+/-.5,x) +% are integer and half-integer multiples of pi. +% x_init_bess = bess/sqrt(nu).*sqrt((1+ (bess.^2+2*(a^2-1))/3/nu^2) ); +Tnk0 = pi/2*ones(m,1); +nu = (4*m+2*a+2); +rhs = (4*m-4*(1:m)'+3)./nu*pi; + +for k = 1:7 + val = Tnk0 - sin(Tnk0) - rhs; + dval = 1 - cos(Tnk0); + dTnk0 = val./dval; + Tnk0 = Tnk0 - dTnk0; +end + +tnk = cos(Tnk0/2).^2; +x_init_sin = sqrt(nu*tnk - (5./(4*(1-tnk).^2) - 1./(1-tnk)-1+3*a^2)/3/nu); + +% Patch together +p = 0.4985+eps; +x_init = [x_init_sin(1:floor(p*n));x_init_airy(ceil(p*n):end)]; + + +if mod(n,2) == 1 + x_init = [0;x_init]; + x_init = x_init(1:m+1); +else + x_init = x_init(1:m); +end + +% debug: +%y = hermpts(n); +%semilogy(abs(y - x_init)); +%yhalf = -y(m:-1:1); +%semilogy(abs(yhalf - x_init)); +end + +%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% +%% %%%%%%%%%%%%%%%%%%%%%%% Routines for REC algorithm %%%%%%%%%%%%%%%%%%%%%%%%% +%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% + +function [x, w, v] = hermpts_rec(n) +% Compute Hermite nodes and weights using recurrence relation. + +x0 = HermiteInitialGuesses(n); +x0 = x0.*sqrt(2); + +for kk = 1:10 + [val, dval] = hermpoly_rec(n, x0); + dx = val./dval; + dx(isnan(dx)) = 0; + x0 = x0 - dx; + if norm(dx, inf)= max { + panic("legendre: min >= max") + } + if math.IsInf(min, 0) || math.IsInf(max, 0) { + panic("legendre: infinite bound") + } + for i := range x { + x[i], weight[i] = l.boundedLocation(len(x), i, min, max) + } +} + +func (l Legendre) FixedLocationSingle(n, k int, min, max float64) (x, weight float64) { + if min >= max { + panic("legendre: min >= max") + } + if math.IsInf(min, 0) || math.IsInf(max, 0) { + panic("legendre: infinite bound") + } + return l.boundedLocation(n, k, min, max) +} + +func (l Legendre) boundedLocation(n, k int, min, max float64) (x, weight float64) { + x, weight = l.location(n, k) + return (x+1)/2*(max-min) + min, weight * (max - min) / 2 +} + +// Algorithm adapted from http://sourceforge.net/projects/fastgausslegendrequadrature. + +// Original Copyright Notice: +//******************************************* +// Copyright (C) 2014 by Ignace Bogaert * +//******************************************* + +// This software package is based on the paper +// I. Bogaert, "Iteration-Free Computation of Gauss-Legendre Quadrature Nodes and Weights", +// to be published in the SIAM Journal of Scientific Computing. + +// The main features of this software are: +// - Speed: due to the simple formulas and the O(1) complexity computation of individual Gauss-Legendre +// quadrature nodes and weights. This makes it compatible with parallel computing paradigms. +// - Accuracy: the error on the nodes and weights is within a few ulps (see the paper for details). + +// Disclaimer: +// THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESSED OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +// WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE REGENTS OR +// CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, +// BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) +// HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR +// OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// location finds the location and weight for location k given n total samples. +func (l Legendre) location(n, k int) (x, weight float64) { + // Note: k is 1-indexed in the original algorithm. It has been translated to + // zero-indexed here. + if n < 101 { + theta, weight := l.tabulated(n, k) + return math.Cos(theta), weight + } + if 2*k+1 > n { + theta, weight := l.computed(n, n-k) + return math.Cos(math.Pi - theta), weight + } + theta, weight := l.computed(n, k+1) + return math.Cos(theta), weight +} + +// Returns tabulated theta and weight values: valid for l <= 100 +func (l Legendre) tabulated(n, k int) (theta, weight float64) { + if n%2 == 1 { + n2 := (n - 1) / 2 + if k == n2 { + return math.Pi / 2, 2.0 / (cl[n] * cl[n]) + } + if k < n2 { + return oddThetaZeros[n2-1][n2-k-1], oddWeights[n2-1][n2-k-1] + } + return math.Pi - oddThetaZeros[n2-1][k-n2-1], oddWeights[n2-1][k-n2-1] + } + n2 := n / 2 + if k < n2 { + return evenThetaZeros[n2-1][n2-k-1], evenWeights[n2-1][n2-k-1] + } + return math.Pi - evenThetaZeros[n2-1][k-n2], evenWeights[n2-1][k-n2] +} + +func (l Legendre) computed(n, k int) (theta, weight float64) { + // First, get the bessel zero + w := 1.0 / (float64(n) + 0.5) + nu := besselJZero(k) + theta = w * nu + x := theta * theta + + // Get the asymptotic BesselJ(1, nu) squared + b := besselJ1Squared(k) + + // Get the Chebyshev interpolants for the nodes. + sf1t := (((((-1.29052996274280508473467968379e-12*x+2.40724685864330121825976175184e-10)*x-3.13148654635992041468855740012e-8)*x+0.275573168962061235623801563453e-5)*x-0.148809523713909147898955880165e-3)*x+0.416666666665193394525296923981e-2)*x - 0.416666666666662959639712457549e-1 + sf2t := (((((+2.20639421781871003734786884322e-9*x-7.53036771373769326811030753538e-8)*x+0.161969259453836261731700382098e-5)*x-0.253300326008232025914059965302e-4)*x+0.282116886057560434805998583817e-3)*x-0.209022248387852902722635654229e-2)*x + 0.815972221772932265640401128517e-2 + sf3t := (((((-2.97058225375526229899781956673e-8*x+5.55845330223796209655886325712e-7)*x-0.567797841356833081642185432056e-5)*x+0.418498100329504574443885193835e-4)*x-0.251395293283965914823026348764e-3)*x+0.128654198542845137196151147483e-2)*x - 0.416012165620204364833694266818e-2 + + // and the weights + wsf1t := ((((((((-2.20902861044616638398573427475e-14*x+2.30365726860377376873232578871e-12)*x-1.75257700735423807659851042318e-10)*x+1.03756066927916795821098009353e-8)*x-4.63968647553221331251529631098e-7)*x+0.149644593625028648361395938176e-4)*x-0.326278659594412170300449074873e-3)*x+0.436507936507598105249726413120e-2)*x-0.305555555555553028279487898503e-1)*x + 0.833333333333333302184063103900e-1 + wsf2t := (((((((+3.63117412152654783455929483029e-12*x+7.67643545069893130779501844323e-11)*x-7.12912857233642220650643150625e-9)*x+2.11483880685947151466370130277e-7)*x-0.381817918680045468483009307090e-5)*x+0.465969530694968391417927388162e-4)*x-0.407297185611335764191683161117e-3)*x+0.268959435694729660779984493795e-2)*x - 0.111111111111214923138249347172e-1 + wsf3t := (((((((+2.01826791256703301806643264922e-9*x-4.38647122520206649251063212545e-8)*x+5.08898347288671653137451093208e-7)*x-0.397933316519135275712977531366e-5)*x+0.200559326396458326778521795392e-4)*x-0.422888059282921161626339411388e-4)*x-0.105646050254076140548678457002e-3)*x-0.947969308958577323145923317955e-4)*x + 0.656966489926484797412985260842e-2 + + // Then refine with the paper expansions + nuSin := nu / math.Sin(theta) + bNuSin := b * nuSin + wInvSinc := w * w * nuSin + wIS2 := wInvSinc * wInvSinc + + // Finally, compute the node and the weight + theta = w * (nu + theta*wInvSinc*(sf1t+wIS2*(sf2t+wIS2*sf3t))) + deno := bNuSin + bNuSin*wIS2*(wsf1t+wIS2*(wsf2t+wIS2*wsf3t)) + weight = 2.0 * w / deno + return theta, weight +} + +func besselJZero(k int) float64 { + if k < 20 { + return jz[k-1] + } + z := math.Pi * (float64(k) - 0.25) + r := 1.0 / z + r2 := r * r + z += r * (0.125 + r2*(-0.807291666666666666666666666667e-1+r2*(0.246028645833333333333333333333+r2*(-1.82443876720610119047619047619+r2*(25.3364147973439050099206349206+r2*(-567.644412135183381139802038240+r2*(18690.4765282320653831636345064+r2*(-8.49353580299148769921876983660e5+5.09225462402226769498681286758e7*r2)))))))) + return z +} + +func besselJ1Squared(k int) float64 { + if k < 21 { + return j1[k-1] + } + x := 1.0 / (float64(k) - 0.25) + x2 := x * x + return x * (0.202642367284675542887758926420 + x2*x2*(-0.303380429711290253026202643516e-3+x2*(0.198924364245969295201137972743e-3+x2*(-0.228969902772111653038747229723e-3+x2*(0.433710719130746277915572905025e-3+x2*(-0.123632349727175414724737657367e-2+x2*(0.496101423268883102872271417616e-2+x2*(-0.266837393702323757700998557826e-1+.185395398206345628711318848386*x2)))))))) +} + +// Tabulated values +var jz = [20]float64{2.40482555769577276862163187933, 5.52007811028631064959660411281, 8.65372791291101221695419871266, 11.7915344390142816137430449119, 14.9309177084877859477625939974, 18.0710639679109225431478829756, 21.2116366298792589590783933505, 24.3524715307493027370579447632, 27.4934791320402547958772882346, 30.6346064684319751175495789269, 33.7758202135735686842385463467, 36.9170983536640439797694930633, 40.0584257646282392947993073740, 43.1997917131767303575240727287, 46.3411883716618140186857888791, 49.4826098973978171736027615332, 52.6240518411149960292512853804, 55.7655107550199793116834927735, 58.9069839260809421328344066346, 62.0484691902271698828525002646} +var j1 = [21]float64{0.269514123941916926139021992911, 0.115780138582203695807812836182, 0.0736863511364082151406476811985, 0.0540375731981162820417749182758, 0.0426614290172430912655106063495, 0.0352421034909961013587473033648, 0.0300210701030546726750888157688, 0.0261473914953080885904584675399, 0.0231591218246913922652676382178, 0.0207838291222678576039808057297, 0.0188504506693176678161056800214, 0.0172461575696650082995240053542, 0.0158935181059235978027065594287, 0.0147376260964721895895742982592, 0.0137384651453871179182880484134, 0.0128661817376151328791406637228, 0.0120980515486267975471075438497, 0.0114164712244916085168627222986, 0.0108075927911802040115547286830, 0.0102603729262807628110423992790, 0.00976589713979105054059846736696} + +// Tabulated nodes and weights +// The required theta values for the Legendre nodes for l <= 100 +var evenThetaZeros = [][]float64{ + {0.9553166181245092781638573e0}, + {0.1223899586470372591854100e1, 0.5332956802491269896325121e0}, + {0.1329852612388110166006182e1, 0.8483666264874876548310910e0, 0.3696066519448289481138796e0}, + {0.1386317078892131346282665e1, 0.1017455539490153431016397e1, 0.6490365804607796100719162e0, 0.2827570635937967783987981e0}, + {0.1421366498439524924081833e1, 0.1122539327631709474018620e1, 0.8238386589997556048023640e0, 0.5255196555285001171749362e0, 0.2289442988470260178701589e0}, + {0.1445233238471440081118261e1, 0.1194120375947706635968399e1, 0.9430552870605735796668951e0, 0.6921076988818410126677201e0, 0.4414870814893317611922530e0, 0.1923346793046672033050762e0}, + {0.1462529992921481833498746e1, 0.1246003586776677662375070e1, 0.1029498592525136749641068e1, 0.8130407055389454598609888e0, 0.5966877608172733931509619e0, 0.3806189306666775272453522e0, 0.1658171411523664030454318e0}, + {0.1475640280808194256470687e1, 0.1285331444322965257106517e1, 0.1095033401803444343034890e1, 0.9047575323895165085030778e0, 0.7145252532340252146626998e0, 0.5243866409035941583262629e0, 0.3344986386876292124968005e0, 0.1457246820036738335698855e0}, + {0.1485919440392653014379727e1, 0.1316167494718022699851110e1, 0.1146421481056642228295923e1, 0.9766871104439832694094465e0, 0.8069738930788195349918620e0, 0.6373005058706191519531139e0, 0.4677113145328286263205134e0, 0.2983460782092324727528346e0, 0.1299747364196768405406564e0}, + {0.1494194914310399553510039e1, 0.1340993178589955138305166e1, 0.1187794926634098887711586e1, 0.1034603297590104231043189e1, 0.8814230742890135843662021e0, 0.7282625848696072912405713e0, 0.5751385026314284688366450e0, 0.4220907301111166004529037e0, 0.2692452880289302424376614e0, 0.1172969277059561308491253e0}, + {0.1501000399130816063282492e1, 0.1361409225664372117193308e1, 0.1221820208990359866348145e1, 0.1082235198111836788818162e1, 0.9426568273796608630446470e0, 0.8030892957063359443650460e0, 0.6635400754448062852164288e0, 0.5240242709487281141128643e0, 0.3845781703583910933413978e0, 0.2453165389983612942439953e0, 0.1068723357985259945018899e0}, + {0.1506695545558101030878728e1, 0.1378494427506219143960887e1, 0.1250294703417272987066314e1, 0.1122097523267250692925104e1, 0.9939044422989454674968570e0, 0.8657177770401081355080608e0, 0.7375413075437535618804594e0, 0.6093818382449565759195927e0, 0.4812531951313686873528891e0, 0.3531886675690780704072227e0, 0.2252936226353075734690198e0, 0.9814932949793685067733311e-1}, + {0.1511531546703289231944719e1, 0.1393002286179807923400254e1, 0.1274473959424494104852958e1, 0.1155947313793812040125722e1, 0.1037423319077439147088755e1, 0.9189033445598992550553862e0, 0.8003894803353296871788647e0, 0.6818851814129298518332401e0, 0.5633967073169293284500428e0, 0.4449368152119130526034289e0, 0.3265362611165358134766736e0, 0.2082924425598466358987549e0, 0.9074274842993199730441784e-1}, + {0.1515689149557281132993364e1, 0.1405475003062348722192382e1, 0.1295261501292316172835393e1, 0.1185049147889021579229406e1, 0.1074838574917869281769567e1, 0.9646306371285440922680794e0, 0.8544265718392254369377945e0, 0.7442282945111358297916378e0, 0.6340389954584301734412433e0, 0.5238644768825679339859620e0, 0.4137165857369637683488098e0, 0.3036239070914333637971179e0, 0.1936769929947376175341314e0, 0.8437551461511597225722252e-1}, + {0.1519301729274526620713294e1, 0.1416312682230741743401738e1, 0.1313324092045794720169874e1, 0.1210336308624476413072722e1, 0.1107349759228459143499061e1, 0.1004365001539081003659288e1, 0.9013828087667156388167226e0, 0.7984043170121235411718744e0, 0.6954313000299367256853883e0, 0.5924667257887385542924194e0, 0.4895160050896970092628705e0, 0.3865901987860504829542802e0, 0.2837160095793466884313556e0, 0.1809780449917272162574031e0, 0.7884320726554945051322849e-1}, + {0.1522469852641529230282387e1, 0.1425817011963825344615095e1, 0.1329164502391080681347666e1, 0.1232512573416362994802398e1, 0.1135861522840293704616614e1, 0.1039211728068951568003361e1, 0.9425636940046777101926515e0, 0.8459181315837993237739032e0, 0.7492760951181414487254243e0, 0.6526392394594561548023681e0, 0.5560103418005302722406995e0, 0.4593944730762095704649700e0, 0.3628020075350028174968692e0, 0.2662579994723859636910796e0, 0.1698418454282150179319973e0, 0.7399171309970959768773072e-1}, + {0.1525270780617194430047563e1, 0.1434219768045409606267345e1, 0.1343169000217435981125683e1, 0.1252118659062444379491066e1, 0.1161068957629157748792749e1, 0.1070020159291475075961444e1, 0.9789726059789103169325141e0, 0.8879267623988119819560021e0, 0.7968832893748414870413015e0, 0.7058431727509840105946884e0, 0.6148079652926100198490992e0, 0.5237802779694730663856110e0, 0.4327648832448234459097574e0, 0.3417715500266717765568488e0, 0.2508238767288223767569849e0, 0.1599966542668327644694431e0, 0.6970264809814094464033170e-1}, + {0.1527764849261740485876940e1, 0.1441701954349064743573367e1, 0.1355639243522655042028688e1, 0.1269576852063768424508476e1, 0.1183514935851550608323947e1, 0.1097453683555812711123880e1, 0.1011393333949027021740881e1, 0.9253342019812867059380523e0, 0.8392767201322475821509486e0, 0.7532215073977623159515351e0, 0.6671694908788198522546767e0, 0.5811221342350705406265672e0, 0.4950819018993074588093747e0, 0.4090533017972007314666814e0, 0.3230455648729987995657071e0, 0.2370809940997936908335290e0, 0.1512302802537625099602687e0, 0.6588357082399222649528476e-1}, + {0.1529999863223206659623262e1, 0.1448406982124841835685420e1, 0.1366814241651488684482888e1, 0.1285221744143731581870833e1, 0.1203629605904952775544878e1, 0.1122037965173751996510051e1, 0.1040446993107623345153211e1, 0.9588569097730895525404200e0, 0.8772680085516152329147030e0, 0.7956806951062012653043722e0, 0.7140955526031660805347356e0, 0.6325134568448222221560326e0, 0.5509357927460004487348532e0, 0.4693648943475422765864580e0, 0.3878050333015201414955289e0, 0.3062649591511896679168503e0, 0.2247658146033686460963295e0, 0.1433746167818849555570557e0, 0.6246124541276674097388211e-1}, + {0.1532014188279762793560699e1, 0.1454449946977268522285131e1, 0.1376885814601482670609845e1, 0.1299321869764876494939757e1, 0.1221758200747475205847413e1, 0.1144194910846247537582396e1, 0.1066632125552939823863593e1, 0.9890700026972186303565530e0, 0.9115087474225932692070479e0, 0.8339486352158799520695092e0, 0.7563900488174808348719219e0, 0.6788335401193977027577509e0, 0.6012799395312684623216685e0, 0.5237305617022755897200291e0, 0.4461876237541810478131970e0, 0.3686551849119556335824055e0, 0.2911415613085158758589405e0, 0.2136668503694680525340165e0, 0.1362947587312224822844743e0, 0.5937690028966411906487257e-1}, + {0.1533838971193864306068338e1, 0.1459924288056445029654271e1, 0.1386009690354996919044862e1, 0.1312095239305276612560739e1, 0.1238181002944535867235042e1, 0.1164267059803796726229370e1, 0.1090353503721897748980095e1, 0.1016440450472067349837507e1, 0.9425280472651469176638349e0, 0.8686164868955467866176243e0, 0.7947060295895204342519786e0, 0.7207970381018823842440224e0, 0.6468900366403721167107352e0, 0.5729858150363658839291287e0, 0.4990856247464946058899833e0, 0.4251915773724379089467945e0, 0.3513075400485981451355368e0, 0.2774414365914335857735201e0, 0.2036124177925793565507033e0, 0.1298811916061515892914930e0, 0.5658282534660210272754152e-1}, + {0.1535499761264077326499892e1, 0.1464906652494521470377318e1, 0.1394313611500109323616335e1, 0.1323720686538524176057236e1, 0.1253127930763390908996314e1, 0.1182535404796980113294400e1, 0.1111943180033868679273393e1, 0.1041351343083674290731439e1, 0.9707600019805773720746280e0, 0.9001692951667510715040632e0, 0.8295794049297955988640329e0, 0.7589905782114329186155957e0, 0.6884031600807736268672129e0, 0.6178176499732537480601935e0, 0.5472348011493452159473826e0, 0.4766558078624760377875119e0, 0.4060826859477620301047824e0, 0.3355191279517093844978473e0, 0.2649727008485465487101933e0, 0.1944616940738156405895778e0, 0.1240440866043499301839465e0, 0.5403988657613871827831605e-1}, + {0.1537017713608809830855653e1, 0.1469460505124226636602925e1, 0.1401903350962364703169699e1, 0.1334346289590505369693957e1, 0.1266789363044399933941254e1, 0.1199232618763735058848455e1, 0.1131676111906105521856066e1, 0.1064119908394702657537061e1, 0.9965640890815034701957497e0, 0.9290087556203499065939494e0, 0.8614540390091103102510609e0, 0.7939001124053586164046432e0, 0.7263472110048245091518914e0, 0.6587956640463586742461796e0, 0.5912459486086227271608064e0, 0.5236987847717837556177452e0, 0.4561553147193391989386660e0, 0.3886174669444433167860783e0, 0.3210887745896478259115420e0, 0.2535764786314617292100029e0, 0.1860980813776342452540915e0, 0.1187090676924131329841811e0, 0.5171568198966901682810573e-1}, + {0.1538410494858190444190279e1, 0.1473638845472165977392911e1, 0.1408867240039222913928858e1, 0.1344095709533508756473909e1, 0.1279324287566779722061664e1, 0.1214553011719528935181709e1, 0.1149781925191718586091000e1, 0.1085011078936665906275419e1, 0.1020240534516704208782618e1, 0.9554703680422404498752066e0, 0.8907006757608306209160649e0, 0.8259315822134856671969566e0, 0.7611632524946588128425351e0, 0.6963959112887657683892237e0, 0.6316298735371143844913976e0, 0.5668655960010826255149266e0, 0.5021037684870694065589284e0, 0.4373454855522296089897130e0, 0.3725925956833896735786860e0, 0.3078484858841616878136371e0, 0.2431200981264999375962973e0, 0.1784242126043536701754986e0, 0.1138140258514833068653307e0, 0.4958315373802413441075340e-1}, + {0.1539692973716708504412697e1, 0.1477486279394502338589519e1, 0.1415279620944410339318226e1, 0.1353073023537942666830874e1, 0.1290866514321280958405103e1, 0.1228660123395079609266898e1, 0.1166453885011658611362850e1, 0.1104247839096738022319035e1, 0.1042042033248543055386770e1, 0.9798365254403234947595400e0, 0.9176313877712591840677176e0, 0.8554267118081827231209625e0, 0.7932226163976800550406599e0, 0.7310192594231560707888939e0, 0.6688168560730805146438886e0, 0.6066157082814543103941755e0, 0.5444162542389049922529553e0, 0.4822191559963931133878621e0, 0.4200254643636986308379697e0, 0.3578369542536859435571624e0, 0.2956568781922605524959448e0, 0.2334919029083292837123583e0, 0.1713581437497397360313735e0, 0.1093066902335822942650053e0, 0.4761952998197036029817629e-1}, + {0.1540877753740080417345045e1, 0.1481040617373741365390254e1, 0.1421203510518656600018143e1, 0.1361366453804322852131292e1, 0.1301529469356044341206877e1, 0.1241692581525935716830402e1, 0.1181855817774264617619371e1, 0.1122019209772750368801179e1, 0.1062182794829879659341536e1, 0.1002346617783007482854908e1, 0.9425107335729934538419206e0, 0.8826752108319277463183701e0, 0.8228401370047382776784725e0, 0.7630056258499810562932058e0, 0.7031718287376427885875898e0, 0.6433389522119553277924537e0, 0.5835072863023426715977658e0, 0.5236772521416453354847559e0, 0.4638494862268433259444639e0, 0.4040249990308909882616381e0, 0.3442054975680110060507306e0, 0.2843941101955779333389742e0, 0.2245972494281051799602510e0, 0.1648304164747050021714385e0, 0.1051427544146599992432949e0, 0.4580550859172367960799915e-1}, + {0.1541975588842621898865181e1, 0.1484334121018556567335167e1, 0.1426692677652358867201800e1, 0.1369051275783071487471360e1, 0.1311409933595114953831618e1, 0.1253768670970438091691833e1, 0.1196127510146226323327062e1, 0.1138486476526912406867032e1, 0.1080845599717322003702293e1, 0.1023204914871722785830020e1, 0.9655644644970043364617272e0, 0.9079243009168822510582606e0, 0.8502844897148263889326479e0, 0.7926451146568312828354346e0, 0.7350062849078710810840430e0, 0.6773681459074923011631400e0, 0.6197308962817025162722438e0, 0.5620948151095422609589585e0, 0.5044603077892199488064657e0, 0.4468279872027509013135997e0, 0.3891988265038338944044115e0, 0.3315744698431505326770711e0, 0.2739579305700525818998611e0, 0.2163553856859193758294342e0, 0.1587817673749480300092784e0, 0.1012844151694839452028589e0, 0.4412462056235422293371300e-1}, + {0.1542995710582548837472073e1, 0.1487394484904746766220933e1, 0.1431793279635669382208875e1, 0.1376192108950239363921811e1, 0.1320590987909222553912422e1, 0.1264989932881031519687125e1, 0.1209388962038683919740547e1, 0.1153788095965648154683658e1, 0.1098187358416032947576489e1, 0.1042586777292402877200408e1, 0.9869863859317282394719449e0, 0.9313862248321055503829503e0, 0.8757863440192765677772914e0, 0.8201868063589761051746975e0, 0.7645876922981545448147078e0, 0.7089891068198449136125464e0, 0.6533911899285832425290628e0, 0.5977941329592257586198087e0, 0.5421982048745539015834188e0, 0.4866037965045890355211229e0, 0.4310114988353693539492225e0, 0.3754222503860499120445385e0, 0.3198376369331602148544626e0, 0.2642605649958747239907310e0, 0.2086969927688100977274751e0, 0.1531613237261629042774314e0, 0.9769922156300582041279299e-1, 0.4256272861907242306694832e-1}, + {0.1543946088331101630230404e1, 0.1490245617072432741470241e1, 0.1436545162952171175361532e1, 0.1382844737841275627385236e1, 0.1329144354302189376680665e1, 0.1275444025914442882448630e1, 0.1221743767654456436125309e1, 0.1168043596353244531685999e1, 0.1114343531263457295536939e1, 0.1060643594778787047442989e1, 0.1006943813366184678568021e1, 0.9532442187977767941200107e0, 0.8995448498101763729640445e0, 0.8458457543830885615091264e0, 0.7921469929325243736682034e0, 0.7384486428849507503612470e0, 0.6847508053901545384892447e0, 0.6310536154445759741044291e0, 0.5773572576394624029563656e0, 0.5236619915567428835581025e0, 0.4699681944935857341529219e0, 0.4162764370726533962791279e0, 0.3625876255789859906927245e0, 0.3089032914359211154562848e0, 0.2552262416643531728802047e0, 0.2015622306384971766058615e0, 0.1479251692966707827334002e0, 0.9435916010280739398532997e-1, 0.4110762866287674188292735e-1}, + {0.1544833637851665335244669e1, 0.1492908264756388370493025e1, 0.1440982906138650837480037e1, 0.1389057572001580364167786e1, 0.1337132272892735072773304e1, 0.1285207020157876647295968e1, 0.1233281826234298389291217e1, 0.1181356705000596722238457e1, 0.1129431672204958843918638e1, 0.1077506746001711267258715e1, 0.1025581947637229234301640e1, 0.9736573023432582093437126e0, 0.9217328405213548692702866e0, 0.8698085993416727107979968e0, 0.8178846249414537373941032e0, 0.7659609755086193214466010e0, 0.7140377257012462393241274e0, 0.6621149731355525426273686e0, 0.6101928481720243483360470e0, 0.5582715291407654489802101e0, 0.5063512668959282414914789e0, 0.4544324261262307197237056e0, 0.4025155584642650335664553e0, 0.3506015401168133792671488e0, 0.2986918517703509333332016e0, 0.2467892075469457255751440e0, 0.1948991714956708008247732e0, 0.1430351946011564171352354e0, 0.9123992133264713232350199e-1, 0.3974873026126591246235829e-1}, + {0.1545664389841685834178882e1, 0.1495400520006868605194165e1, 0.1445136662469633349524466e1, 0.1394872825707861196682996e1, 0.1344609018631531661347402e1, 0.1294345250782284139500904e1, 0.1244081532562166402923175e1, 0.1193817875503760392032898e1, 0.1143554292597402872188167e1, 0.1093290798696377946301336e1, 0.1043027411028491785799717e1, 0.9927641498535133311947588e0, 0.9425010393224361375194941e0, 0.8922381086194002226900769e0, 0.8419753935054036625982058e0, 0.7917129384431112475049142e0, 0.7414507995789214800057706e0, 0.6911890490185720721582180e0, 0.6409277811053987947460976e0, 0.5906671218914768219060599e0, 0.5404072438741681591850965e0, 0.4901483897634232956856935e0, 0.4398909124691513811974471e0, 0.3896353458699818240468259e0, 0.3393825380385224469051922e0, 0.2891339221891949677776928e0, 0.2388921255071779766209942e0, 0.1886625339124777570188312e0, 0.1384581678870181657476050e0, 0.8832030722827102577102185e-1, 0.3847679847963676404657822e-1}, + {0.1546443627125265521960044e1, 0.1497738231263909315513507e1, 0.1449032845902631477147772e1, 0.1400327478265391242178337e1, 0.1351622135921668846451224e1, 0.1302916826944702448727527e1, 0.1254211560091483702838765e1, 0.1205506345013417018443405e1, 0.1156801192508980685500292e1, 0.1108096114833249453312212e1, 0.1059391126084216587933501e1, 0.1010686242693213908544820e1, 0.9619814840575052973573711e0, 0.9132768733691264344256970e0, 0.8645724387181842642305406e0, 0.8158682145859558652971026e0, 0.7671642439014559105969752e0, 0.7184605809290069459742089e0, 0.6697572954095121564500879e0, 0.6210544786425143220264938e0, 0.5723522526623283741373995e0, 0.5236507845164779831804685e0, 0.4749503092950064087413842e0, 0.4262511688770346357064771e0, 0.3775538805043668894422883e0, 0.3288592658750793954850446e0, 0.2801687136893753887834348e0, 0.2314847695998852605184853e0, 0.1828126524563463299986617e0, 0.1341649789468091132459783e0, 0.8558174883654483804697753e-1, 0.3728374374031613183399036e-1}, + {0.1547175997094614757138430e1, 0.1499935340679181525271649e1, 0.1452694693272706215568985e1, 0.1405454061061768876728643e1, 0.1358213450511184239883293e1, 0.1310972868490444296079765e1, 0.1263732322416537730871712e1, 0.1216491820419724046503073e1, 0.1169251371540540180758674e1, 0.1122010985968754004469355e1, 0.1074770675338453464761893e1, 0.1027530453098431393666936e1, 0.9802903349842005856557204e0, 0.9330503396284544173873149e0, 0.8858104893623263267477775e0, 0.8385708112832335506864354e0, 0.7913313387011139500976360e0, 0.7440921131314510897906335e0, 0.6968531870945337206139839e0, 0.6496146281309018959581539e0, 0.6023765246993705639765525e0, 0.5551389950762090311242875e0, 0.5079022012032895030848024e0, 0.4606663710240282967569630e0, 0.4134318360639670775957014e0, 0.3661990979414348851212686e0, 0.3189689535781378596191439e0, 0.2717427498485401725509746e0, 0.2245229557871702595200694e0, 0.1773146332323969343091350e0, 0.1301300193754780766338959e0, 0.8300791095077070533235660e-1, 0.3616244959900389221395842e-1}, + {0.1547865604457777747119921e1, 0.1502004162357357213441384e1, 0.1456142728021903760325049e1, 0.1410281306774684706589738e1, 0.1364419904164498130803254e1, 0.1318558526067441138200403e1, 0.1272697178801115154796514e1, 0.1226835869256177571730448e1, 0.1180974605051351016009903e1, 0.1135113394719709026888693e1, 0.1089252247936466574864114e1, 0.1043391175801911243726755e1, 0.9975301911979639874925565e0, 0.9516693092438447484954432e0, 0.9058085478865097428655118e0, 0.8599479286766250282572181e0, 0.8140874778035996603018790e0, 0.7682272274981820559251592e0, 0.7223672179660643783333797e0, 0.6765075001043380283085699e0, 0.6306481393987597674748178e0, 0.5847892216487432573582268e0, 0.5389308616059791284685642e0, 0.4930732164176132508179420e0, 0.4472165073094733435432890e0, 0.4013610560689043520551232e0, 0.3555073496130768130758891e0, 0.3096561615434305328219637e0, 0.2638087993597793691714182e0, 0.2179676599607749036552390e0, 0.1721376573496165890967450e0, 0.1263306713881449555499955e0, 0.8058436603519718986295825e-1, 0.3510663068970053260227480e-1}, + {0.1548516088202564202943238e1, 0.1503955613246577879586994e1, 0.1459395145012190281751360e1, 0.1414834688100222735099866e1, 0.1370274247295441414922756e1, 0.1325713827649021532002630e1, 0.1281153434570536124285912e1, 0.1236593073933169034954499e1, 0.1192032752196710979323473e1, 0.1147472476554108430135576e1, 0.1102912255109027578275434e1, 0.1058352097094263144928973e1, 0.1013792013144153206047048e1, 0.9692320156388929821870602e0, 0.9246721191454417746654622e0, 0.8801123409896300773149632e0, 0.8355527020087518049947413e0, 0.7909932275560464363973909e0, 0.7464339488624693592395086e0, 0.7018749049145358048463504e0, 0.6573161450929179933243905e0, 0.6127577329584494909986789e0, 0.5681997518140860838771656e0, 0.5236423130979094957496400e0, 0.4790855694444512920982626e0, 0.4345297357523596151738496e0, 0.3899751246318782591316393e0, 0.3454222091410984787772492e0, 0.3008717408917773811461237e0, 0.2563249902500918978614004e0, 0.2117842860782107775954396e0, 0.1672544029381415755198150e0, 0.1227468836419337342946123e0, 0.7829832364814667171382217e-1, 0.3411071484766340151578357e-1}, + {0.1549130685823945998342524e1, 0.1505799405819664254557106e1, 0.1462468131657470292685966e1, 0.1419136867330461353369368e1, 0.1375805616982638895139986e1, 0.1332474384976155365522566e1, 0.1289143175965912901391449e1, 0.1245811994984327181800398e1, 0.1202480847539690438616688e1, 0.1159149739732435788417226e1, 0.1115818678394807971862305e1, 0.1072487671261111519215409e1, 0.1029156727178025494814510e1, 0.9858258563677261466814511e0, 0.9424950707611702085500992e0, 0.8991643844255133860018485e0, 0.8558338141192845596532563e0, 0.8125033800232146117493243e0, 0.7691731067161328174004981e0, 0.7258430244984030733808537e0, 0.6825131712172895509836733e0, 0.6391835948321685576634513e0, 0.5958543570955633038336902e0, 0.5525255389612023677479152e0, 0.5091972487450747080139606e0, 0.4658696348260689008126722e0, 0.4225429061321313393543928e0, 0.3792173666095906812269559e0, 0.3358934762285008809293807e0, 0.2925719658301625547639832e0, 0.2492540707015179370724365e0, 0.2059420554273186332219697e0, 0.1626405628266886976038507e0, 0.1193608172622853851645011e0, 0.7613840464754681957544313e-1, 0.3316974474186058622824911e-1}, + {0.1549712287207882890839045e1, 0.1507544209724862511636878e1, 0.1465376137339015815734558e1, 0.1423208073529702865859582e1, 0.1381040021900765225468989e1, 0.1338871986235691269778498e1, 0.1296703970558498635765633e1, 0.1254535979202491212629656e1, 0.1212368016889500927716256e1, 0.1170200088822853513468851e1, 0.1128032200798161849314963e1, 0.1085864359337236600941540e1, 0.1043696571852037437540940e1, 0.1001528846847853898635169e1, 0.9593611941780778060127795e0, 0.9171936253674231737318512e0, 0.8750261540268988114426643e0, 0.8328587963932301252176965e0, 0.7906915720393251716472997e0, 0.7485245048233193695739358e0, 0.7063576241759074809548715e0, 0.6641909668761970070284373e0, 0.6220245795476036586681135e0, 0.5798585222396645710869275e0, 0.5376928736905555113005422e0, 0.4955277392687366749125653e0, 0.4533632633323484070376718e0, 0.4111996491651493998151895e0, 0.3690371925202636251212886e0, 0.3268763409876008462653069e0, 0.2847178057580674399826003e0, 0.2425627889274157106498810e0, 0.2004134942584602007834507e0, 0.1582744399049656648660257e0, 0.1161565488818554609430574e0, 0.7409445176394481360104851e-1, 0.3227929535095246410912398e-1}, + {0.1550263480064160377720298e1, 0.1509197788083808185665328e1, 0.1468132100566875710992083e1, 0.1427066420556418463513913e1, 0.1386000751198712817289420e1, 0.1344935095788765217267069e1, 0.1303869457820298477498722e1, 0.1262803841041882838326682e1, 0.1221738249521212843639205e1, 0.1180672687719991159061894e1, 0.1139607160582508034089119e1, 0.1098541673641858946868449e1, 0.1057476233148907719560749e1, 0.1016410846230700992453501e1, 0.9753455210872527645472818e0, 0.9342802672387126698703291e0, 0.8932150958393123732306518e0, 0.8521500200807685012223049e0, 0.8110850557169691024167180e0, 0.7700202217553081279468270e0, 0.7289555413804262510029339e0, 0.6878910432074509889956044e0, 0.6468267630110350344178276e0, 0.6057627461556542068727688e0, 0.5646990510834698732732127e0, 0.5236357544389875315454201e0, 0.4825729588028297682338108e0, 0.4415108047277878179738561e0, 0.4004494901533595099830119e0, 0.3593893030723592157150581e0, 0.3183306793460978083354355e0, 0.2772743115465352362860883e0, 0.2362213703174823832436869e0, 0.1951740017836102296584907e0, 0.1541366059551230775894261e0, 0.1131198202589878992052369e0, 0.7215736988593890187079586e-1, 0.3143540438351454384152236e-1}, + {0.1550786588415152297375587e1, 0.1510767112957397367780716e1, 0.1470747641421582916022579e1, 0.1430728176478592843861361e1, 0.1390708720885325111445925e1, 0.1350689277522434511387126e1, 0.1310669849435604714836514e1, 0.1270650439881648370588402e1, 0.1230631052380981613091250e1, 0.1190611690778358944744052e1, 0.1150592359314214516523625e1, 0.1110573062709576809284752e1, 0.1070553806268363352417161e1, 0.1030534596002003296175373e1, 0.9905154387828984834423913e0, 0.9504963425353941517573974e0, 0.9104773164759498161192732e0, 0.8704583714184727086854142e0, 0.8304395201669023270865304e0, 0.7904207780260519973626051e0, 0.7504021634749074983118715e0, 0.7103836990664583264642972e0, 0.6703654126486745769832673e0, 0.6303473390491956215820085e0, 0.5903295224434431765765323e0, 0.5503120197533818815098408e0, 0.5102949056413983084126817e0, 0.4702782800468414863285692e0, 0.4302622799152491769326599e0, 0.3902470981180917254123191e0, 0.3502330152869736207185960e0, 0.3102204561556976356809728e0, 0.2702100956292792195263915e0, 0.2302030745053307298726703e0, 0.1902014842102915167005070e0, 0.1502096126336221315300686e0, 0.1102378261690820867329259e0, 0.7031899075931525095025389e-1, 0.3063451333411226493032265e-1}, + {0.1551283705347968314195100e1, 0.1512258463601911009913297e1, 0.1473233225313284690780287e1, 0.1434207992834186122366616e1, 0.1395182768588723275108301e1, 0.1356157555104474252423723e1, 0.1317132355046745793679891e1, 0.1278107171256650000336432e1, 0.1239082006794203284097135e1, 0.1200056864987904389011051e1, 0.1161031749492588664002624e1, 0.1122006664357811755961100e1, 0.1082981614109627397900573e1, 0.1043956603849447575483550e1, 0.1004931639374790125389322e1, 0.9659067273282460489273148e0, 0.9268818753831082867718635e0, 0.8878570924770502938457708e0, 0.8488323891094102606331406e0, 0.8098077777236123075833052e0, 0.7707832732049530424809748e0, 0.7317588935368492604710264e0, 0.6927346606780251833003950e0, 0.6537106017528970872810663e0, 0.6146867506941756306797580e0, 0.5756631505519364744300804e0, 0.5366398568077528417370132e0, 0.4976169422443344500752625e0, 0.4585945042946725387136724e0, 0.4195726764797194195007418e0, 0.3805516468579533335376469e0, 0.3415316890685593880011997e0, 0.3025132172735989410463832e0, 0.2634968895917008761291809e0, 0.2244838184598823563259898e0, 0.1854760433267094750424413e0, 0.1464777455344068532549101e0, 0.1074990339130794792907032e0, 0.6857195785426972961368108e-1, 0.2987341732561906608807860e-1}, + {0.1551756721003315464043007e1, 0.1513677510435354867644006e1, 0.1475598302924814895692182e1, 0.1437519100549654116408972e1, 0.1399439905448387106945081e1, 0.1361360719846430407096351e1, 0.1323281546084682430842605e1, 0.1285202386651141609385598e1, 0.1247123244216506877361870e1, 0.1209044121674894401873626e1, 0.1170965022191058363946285e1, 0.1132885949255841486220662e1, 0.1094806906752030657845562e1, 0.1056727899033393535018723e1, 0.1018648931020478788570327e1, 0.9805700083178549567966928e0, 0.9424911373589552049711100e0, 0.9044123255867553868253384e0, 0.8663335816813894348633149e0, 0.8282549158498738099497389e0, 0.7901763401989443875774432e0, 0.7520978692204962458482329e0, 0.7140195204316730003387055e0, 0.6759413152305656820841666e0, 0.6378632800575392064866756e0, 0.5997854479978337579981629e0, 0.5617078610344953281799357e0, 0.5236305732820186728652802e0, 0.4855536557378012985520074e0, 0.4474772034530068342865487e0, 0.4094013466928584958758982e0, 0.3713262689388439070717808e0, 0.3332522371792479009733062e0, 0.2951796555193184134657530e0, 0.2571091661074227554417865e0, 0.2190418543971735546480404e0, 0.1809797103814301725822348e0, 0.1429268140230164119614409e0, 0.1048930290780323497410212e0, 0.6690962797843649866645769e-1, 0.2914922224685900914817542e-1}, + {0.1552207346590136182648920e1, 0.1515029387081184115266415e1, 0.1477851430283927973458023e1, 0.1440673478039699629370259e1, 0.1403495532240969264030648e1, 0.1366317594853508812224152e1, 0.1329139667940348087929429e1, 0.1291961753688162615428688e1, 0.1254783854436838464182091e1, 0.1217605972713102930414639e1, 0.1180428111269300876868432e1, 0.1143250273128649048802100e1, 0.1106072461638634327789036e1, 0.1068894680534663975270023e1, 0.1031716934016664760314029e1, 0.9945392268421176498894610e0, 0.9573615644400829018748874e0, 0.9201839530522288586731642e0, 0.8830063999088902711516820e0, 0.8458289134509915302518266e0, 0.8086515036126424512848147e0, 0.7714741821849085841225787e0, 0.7342969632895448309937051e0, 0.6971198640037406540069491e0, 0.6599429051953912854163132e0, 0.6227661126567800124770610e0, 0.5855895186691062254659102e0, 0.5484131642019636734351025e0, 0.5112371020703309674589504e0, 0.4740614015734592960802666e0, 0.4368861554959151187817336e0, 0.3997114910036376358365916e0, 0.3625375872199777754435892e0, 0.3253647047992267079974806e0, 0.2881932382678453273830096e0, 0.2510238145617968753500674e0, 0.2138574934303919974438356e0, 0.1766962177535783269128215e0, 0.1395439709154010255199071e0, 0.1024103832005221866954023e0, 0.6532598686141261097119747e-1, 0.2845930797694291389393445e-1}, + {0.1552637135069155811491072e1, 0.1516318752418798211357541e1, 0.1480000372180291690418989e1, 0.1443681995989991700140976e1, 0.1407363625527612735973164e1, 0.1371045262534953065860219e1, 0.1334726908836065747097909e1, 0.1298408566359386697763653e1, 0.1262090237162411913706886e1, 0.1225771923459625279363960e1, 0.1189453627654523146514386e1, 0.1153135352376772077918208e1, 0.1116817100525785826106551e1, 0.1080498875322336017099434e1, 0.1044180680370244915946738e1, 0.1007862519730785566833872e1, 0.9715443980131875264637689e0, 0.9352263204856910439167915e0, 0.8989082932130182550456316e0, 0.8625903232280967802521182e0, 0.8262724187486163930514201e0, 0.7899545894528804342126058e0, 0.7536368468349768085155075e0, 0.7173192046673890278545072e0, 0.6810016796111441673128480e0, 0.6446842920316340773745262e0, 0.6083670671059611518530899e0, 0.5720500363511797523369558e0, 0.5357332397728172506411618e0, 0.4994167289487775362163415e0, 0.4631005715608865274454686e0, 0.4267848582339839676363509e0, 0.3904697131799790288672503e0, 0.3541553113674441557740819e0, 0.3178419074113077198829473e0, 0.2815298867038369044519273e0, 0.2452198616736214006194288e0, 0.2089128675558041239775998e0, 0.1726108022974787183994402e0, 0.1363175571713249458600521e0, 0.1000425397881322914313825e0, 0.6381557644960651200944222e-1, 0.2780129671121636039734655e-1}, + {0.1553047499032218401181962e1, 0.1517549844221432542461907e1, 0.1482052191561582448658478e1, 0.1446554542510861055782865e1, 0.1411056898564365493121105e1, 0.1375559261269981001734724e1, 0.1340061632245437638964436e1, 0.1304564013196950335363525e1, 0.1269066405939915513649123e1, 0.1233568812422221364483924e1, 0.1198071234750839346739124e1, 0.1162573675222508872274463e1, 0.1127076136359515473862368e1, 0.1091578620951808778363231e1, 0.1056081132107029235444226e1, 0.1020583673310438024843461e1, 0.9850862484973095869616622e0, 0.9495888621411026369897815e0, 0.9140915193617473526041913e0, 0.8785942260597805964360395e0, 0.8430969890839839780181254e0, 0.8075998164428632814935249e0, 0.7721027175741014967901450e0, 0.7366057036915554827257553e0, 0.7011087882372792641869964e0, 0.6656119874777629720186974e0, 0.6301153213012084608241887e0, 0.5946188142997514629085459e0, 0.5591224972630766104664894e0, 0.5236264092783024624074546e0, 0.4881306007441175888503326e0, 0.4526351377998500905914452e0, 0.4171401090099414677462070e0, 0.3816456357674021470057899e0, 0.3461518890753412856675063e0, 0.3106591177837409768492156e0, 0.2751676985649013361686770e0, 0.2396782299970584002479842e0, 0.2041917239104339765549482e0, 0.1687100353513348647833163e0, 0.1332369676454340307348264e0, 0.9778171579501174586520881e-1, 0.6237343205901608270979365e-1, 0.2717302558182235133513210e-1}, + {0.1553439726211153891540573e1, 0.1518726525682668668950427e1, 0.1484013327077361052080319e1, 0.1449300131698066374929113e1, 0.1414586940879145218883617e1, 0.1379873756000009717714844e1, 0.1345160578499605494109603e1, 0.1310447409892181029407508e1, 0.1275734251784724823396464e1, 0.1241021105896515467487132e1, 0.1206307974081314658309029e1, 0.1171594858352843571506531e1, 0.1136881760914326165420300e1, 0.1102168684193068774494217e1, 0.1067455630881287279906518e1, 0.1032742603984709761582283e1, 0.9980296068808995413713835e0, 0.9633166433897968474836258e0, 0.9286037178597176902839922e0, 0.8938908352730483454962679e0, 0.8591780013772376740585140e0, 0.8244652228485703565016715e0, 0.7897525074988288740747291e0, 0.7550398645386622329842600e0, 0.7203273049167972965433221e0, 0.6856148417619669061621766e0, 0.6509024909658764678789680e0, 0.6161902719627732109904446e0, 0.5814782087876726421060849e0, 0.5467663315368932859708410e0, 0.5120546784214694424751802e0, 0.4773432987146161851453875e0, 0.4426322570828636775769209e0, 0.4079216401227574252826633e0, 0.3732115665343573673240355e0, 0.3385022035318641142927744e0, 0.3037937944563405612019789e0, 0.2690867076466992914990193e0, 0.2343815284441088285495466e0, 0.1996792463094099012688324e0, 0.1649816752853099621072722e0, 0.1302925346385956500837770e0, 0.9562081616094948269905207e-1, 0.6099502786102040135198395e-1, 0.2657252290854776665952679e-1}, + {0.1553814992974904767594241e1, 0.1519852325907741898557817e1, 0.1485889660564341242674032e1, 0.1451926998111647785152899e1, 0.1417964339743630985906479e1, 0.1384001686692845945859686e1, 0.1350039040242776872946770e1, 0.1316076401741232369348729e1, 0.1282113772615099921371445e1, 0.1248151154386817288949698e1, 0.1214188548692984168143550e1, 0.1180225957305622474388020e1, 0.1146263382156703179022046e1, 0.1112300825366698998613230e1, 0.1078338289278105103916832e1, 0.1044375776495107627552926e1, 0.1010413289930890288650173e1, 0.9764508328644780886953041e0, 0.9424884090095589354132202e0, 0.9085260225984488659490189e0, 0.8745636784853451215455853e0, 0.8406013822743460048537475e0, 0.8066391404795569177534715e0, 0.7726769607271702244889884e0, 0.7387148520130367387469271e0, 0.7047528250344497011443004e0, 0.6707908926224332706815892e0, 0.6368290703120276715090693e0, 0.6028673771049329733376093e0, 0.5689058365047911420524623e0, 0.5349444779460832748774921e0, 0.5009833388030907720537138e0, 0.4670224672735823328060142e0, 0.4330619266162571710985599e0, 0.3991018015460700850326972e0, 0.3651422081877256344485503e0, 0.3311833101314466311103548e0, 0.2972253454486352538763297e0, 0.2632686745061683534910424e0, 0.2293138699815081215985284e0, 0.1953618999343470689252174e0, 0.1614145391777897730914718e0, 0.1274754265555317105245073e0, 0.9355335943686297111639257e-1, 0.5967622944002585907962555e-1, 0.2599798753052849047032580e-1}, + {0.1554174376112911655131098e1, 0.1520930475263640362170511e1, 0.1487686575963027013435604e1, 0.1454442679258803180913942e1, 0.1421198786221944168258440e1, 0.1387954897956585365296993e1, 0.1354711015610581847809736e1, 0.1321467140386931222410853e1, 0.1288223273556309404505081e1, 0.1254979416471008337267759e1, 0.1221735570580615776412743e1, 0.1188491737449843097435062e1, 0.1155247918778991542491874e1, 0.1122004116427655660730083e1, 0.1088760332442401967089102e1, 0.1055516569089340593777585e1, 0.1022272828892740925095715e1, 0.9890291146811467076264609e0, 0.9557854296428465678959260e0, 0.9225417773930866874628226e0, 0.8892981620561221868061383e0, 0.8560545883661619186153440e0, 0.8228110617925680415850631e0, 0.7895675886964734656602191e0, 0.7563241765284943282959446e0, 0.7230808340807681383862155e0, 0.6898375718116413059811978e0, 0.6565944022687408111136058e0, 0.6233513406471279431598408e0, 0.5901084055357449335782332e0, 0.5568656199307345199294838e0, 0.5236230126340485109018232e0, 0.4903806202198476810807501e0, 0.4571384898571183050552302e0, 0.4238966834573972483152713e0, 0.3906552839347125500730013e0, 0.3574144049483910279156003e0, 0.3241742066189948531421192e0, 0.2909349219721993995636414e0, 0.2576969037411283384416169e0, 0.2244607124763750082606152e0, 0.1912272957431274569912962e0, 0.1579983907861406744991899e0, 0.1247775594308675650267811e0, 0.9157341285433675818728635e-1, 0.5841325237532701385812948e-1, 0.2544777076240816313972829e-1}, + {0.1554518863153354618809409e1, 0.1521963936333782670214978e1, 0.1489409010908686292228052e1, 0.1456854087820918568482631e1, 0.1424299168033388494075931e1, 0.1391744252537595165009714e1, 0.1359189342362693116905575e1, 0.1326634438585269225516707e1, 0.1294079542340034988016159e1, 0.1261524654831668904330407e1, 0.1228969777348083696352705e1, 0.1196414911275444418157033e1, 0.1163860058115329026827193e1, 0.1131305219504506571098859e1, 0.1098750397237914982841550e1, 0.1066195593295557461150055e1, 0.1033640809874212986016967e1, 0.1001086049425085324651032e1, 0.9685313146988134601280153e0, 0.9359766087996588330547245e0, 0.9034219352512048766203636e0, 0.8708672980765996496647291e0, 0.8383127018973108640833295e0, 0.8057581520556423644789438e0, 0.7732036547680256450048242e0, 0.7406492173185620802637676e0, 0.7080948483057714882525616e0, 0.6755405579604902654406567e0, 0.6429863585601198571817691e0, 0.6104322649751623629236805e0, 0.5778782954001507969801437e0, 0.5453244723459250134170285e0, 0.5127708240092147734858477e0, 0.4802173861982495342372455e0, 0.4476642050968422792532389e0, 0.4151113413261211455132671e0, 0.3825588760747025757978563e0, 0.3500069206395502661556462e0, 0.3174556318161704671189642e0, 0.2849052377944113082878058e0, 0.2523560839907875626097181e0, 0.2198087193323827316322426e0, 0.1872640717400572601243546e0, 0.1547238424480887172335593e0, 0.1221915194567498709631299e0, 0.8967553546914315204781840e-1, 0.5720262597323678474637133e-1, 0.2492036059421555107245208e-1}, + {0.1554849361424470843090118e1, 0.1522955431101933730645303e1, 0.1491061502037751976297424e1, 0.1459167575082261894770634e1, 0.1427273651103158170602525e1, 0.1395379730992862183093282e1, 0.1363485815676330697886480e1, 0.1331591906119453530640248e1, 0.1299698003338207337770238e1, 0.1267804108408757103237650e1, 0.1235910222478728395590872e1, 0.1204016346779913703159712e1, 0.1172122482642727288439229e1, 0.1140228631512787910483320e1, 0.1108334794970091261912531e1, 0.1076440974751339138680154e1, 0.1044547172776127017814204e1, 0.1012653391177865049408482e1, 0.9807596323405319627306720e0, 0.9488658989426541583823449e0, 0.9169721940102869797082899e0, 0.8850785209812848825432963e0, 0.8531848837838285971960304e0, 0.8212912869330969580404013e0, 0.7893977356512249782795147e0, 0.7575042360174185552669765e0, 0.7256107951575083863622461e0, 0.6937174214856350398887716e0, 0.6618241250156435481462849e0, 0.6299309177668761147121611e0, 0.5980378142995696297245189e0, 0.5661448324309071185385071e0, 0.5342519942071113461815355e0, 0.5023593272451872220760104e0, 0.4704668666194035162003700e0, 0.4385746575692260390945883e0, 0.4066827594785525726660483e0, 0.3747912518813925812276922e0, 0.3429002438089823350625543e0, 0.3110098888674705209106637e0, 0.2791204106078991711912441e0, 0.2472321474279120600810915e0, 0.2153456371036966567922014e0, 0.1834617887100953140198272e0, 0.1515822689338083535939382e0, 0.1197104949484175660714864e0, 0.8785472823121690639967810e-1, 0.5604116141749524467628553e-1, 0.2441436781606819510490200e-1}, + {0.1555166706034023842787706e1, 0.1523907464890582273398300e1, 0.1492648224885016483409279e1, 0.1461388986785839210767990e1, 0.1430129751376631035251350e1, 0.1398870519462421720845393e1, 0.1367611291876438076975682e1, 0.1336352069487341263827064e1, 0.1305092853207091240256110e1, 0.1273833643999595441027728e1, 0.1242574442890323705495464e1, 0.1211315250977103200801981e1, 0.1180056069442347222927677e1, 0.1148796899567022469820701e1, 0.1117537742746723499546780e1, 0.1086278600510304367969825e1, 0.1055019474541620880304106e1, 0.1023760366705069175705639e1, 0.9925012790757765081567637e0, 0.9612422139755203374385844e0, 0.9299831740157389822411293e0, 0.8987241621493743180375722e0, 0.8674651817337867269827651e0, 0.8362062366076504452859926e0, 0.8049473311856388413561914e0, 0.7736884705759381993127359e0, 0.7424296607273230664538510e0, 0.7111709086148904840060001e0, 0.6799122224768919982385331e0, 0.6486536121198915829209739e0, 0.6173950893164463798129595e0, 0.5861366683298159921278400e0, 0.5548783666157332634604655e0, 0.5236202057751242467455922e0, 0.4923622128691229579358494e0, 0.4611044222679868504944176e0, 0.4298468783051183048132298e0, 0.3985896391770900735176252e0, 0.3673327828297899556279530e0, 0.3360764161195064368209114e0, 0.3048206895905456571224703e0, 0.2735658223403245791263072e0, 0.2423121460275046288225596e0, 0.2110601877217048587999889e0, 0.1798108384023314561549010e0, 0.1485657315840060835766576e0, 0.1173282164330337207824850e0, 0.8610639001623934211634967e-1, 0.5492592372249737419414775e-1, 0.2392851379957687254895331e-1}, +} + +var oddThetaZeros = [][]float64{ + {0.6847192030022829138880982e0}, + {0.1002176803643121641749915e1, 0.4366349492255221620374655e0}, + {0.1152892953722227341986065e1, 0.7354466143229520469385622e0, 0.3204050902900619825355950e0}, + {0.1240573923404363422789550e1, 0.9104740292261473250358755e0, 0.5807869795060065580284919e0, 0.2530224166119306882187233e0}, + {0.1297877729331450368298142e1, 0.1025003226369574843297844e1, 0.7522519395990821317003373e0, 0.4798534223256743217333579e0, 0.2090492874137409414071522e0}, + {0.1338247676100454369194835e1, 0.1105718066248490075175419e1, 0.8732366099401630367220948e0, 0.6408663264733867770811230e0, 0.4088002373420211722955679e0, 0.1780944581262765470585931e0}, + {0.1368219536992351783359098e1, 0.1165652065603030148723847e1, 0.9631067821301481995711685e0, 0.7606069572889918619145483e0, 0.5582062109125313357140248e0, 0.3560718303314725022788878e0, 0.1551231069747375098418591e0}, + {0.1391350647015287461874435e1, 0.1211909966211469688151240e1, 0.1032480728417239563449772e1, 0.8530732514258505686069670e0, 0.6737074594242522259878462e0, 0.4944303818194983217354808e0, 0.3153898594929282395996014e0, 0.1373998952992547671039022e0}, + {0.1409742336767428999667236e1, 0.1248691224331339221187704e1, 0.1087646521650454938943641e1, 0.9266134127998189551499083e0, 0.7656007620508340547558669e0, 0.6046261769405451549818494e0, 0.4437316659960951760051408e0, 0.2830497588453068048261493e0, 0.1233108673082312764916251e0}, + {0.1424715475176742734932665e1, 0.1278636375242898727771561e1, 0.1132561101012537613667002e1, 0.9864925055883793730483278e0, 0.8404350520135058972624775e0, 0.6943966110110701016065380e0, 0.5483930281810389839680525e0, 0.4024623099018152227701990e0, 0.2567245837448891192759858e0, 0.1118422651428890834760883e0}, + {0.1437141935303526306632113e1, 0.1303488659735581140681362e1, 0.1169837785762829821262819e1, 0.1036190996404462300207004e1, 0.9025507517347875930425807e0, 0.7689210263823624893974324e0, 0.6353089402976822861185532e0, 0.5017289283414202278167583e0, 0.3682157131008289798868520e0, 0.2348791589702580223688923e0, 0.1023252788872632487579640e0}, + {0.1447620393135667144403507e1, 0.1324445197736386798102445e1, 0.1201271573324181312770120e1, 0.1078100568411879956441542e1, 0.9549336362382321811515336e0, 0.8317729718814276781352878e0, 0.7086221837538611370849622e0, 0.5854877911108011727748238e0, 0.4623830630132757357909198e0, 0.3393399712563371486343129e0, 0.2164597408964339264361902e0, 0.9430083986305519349231898e-1}, + {0.1456575541704195839944967e1, 0.1342355260834552126304154e1, 0.1228136043468909663499174e1, 0.1113918572282611841378549e1, 0.9997037539874953933323299e0, 0.8854928869950799998575862e0, 0.7712879690777516856072467e0, 0.6570923167092416238233585e0, 0.5429119513798658239789812e0, 0.4287591577660783587509129e0, 0.3146635662674373982102762e0, 0.2007190266590380629766487e0, 0.8744338280630300217927750e-1}, + {0.1464317002991565219979113e1, 0.1357838033080061766980173e1, 0.1251359804334884770836945e1, 0.1144882777708662655968171e1, 0.1038407544520296695714932e1, 0.9319349156915986836657782e0, 0.8254660749671546663859351e0, 0.7190028636037068047812305e0, 0.6125483562383020473196681e0, 0.5061081521562999836102547e0, 0.3996936914666951732317457e0, 0.2933325857619472952507468e0, 0.1871123137498061864373407e0, 0.8151560650977882057817999e-1}, + {0.1471075823713997440657641e1, 0.1371355574944658989649887e1, 0.1271635855736122280723838e1, 0.1171916986981363820797100e1, 0.1072199368669106404814915e1, 0.9724835301003496870596165e0, 0.8727702114891848603047954e0, 0.7730605060747958359120755e0, 0.6733561257504194406005404e0, 0.5736599396529727772420934e0, 0.4739771829190733570809765e0, 0.3743185619229329461021810e0, 0.2747099287638327553949437e0, 0.1752332025619508475799133e0, 0.7634046205384429302353073e-1}, + {0.1477027911291552393547878e1, 0.1383259682348271685979143e1, 0.1289491840051302622319481e1, 0.1195724613675799550484673e1, 0.1101958282220461402990667e1, 0.1008193204014774090964219e1, 0.9144298626454031699590564e0, 0.8206689427646120483710056e0, 0.7269114630504563073034288e0, 0.6331590254855162126233733e0, 0.5394143214244183829842424e0, 0.4456822679082866369288652e0, 0.3519729273095236644049666e0, 0.2583106041071417718760275e0, 0.1647723231643112502628240e0, 0.7178317184275122449502857e-1}, + {0.1482309554825692463999299e1, 0.1393822922226542123661077e1, 0.1305336577335833571381699e1, 0.1216850687682353365944624e1, 0.1128365453024608460982204e1, 0.1039881123511957522668140e1, 0.9513980267579228357946521e0, 0.8629166105524045911461307e0, 0.7744375139383604902604254e0, 0.6859616923374368587817328e0, 0.5974906525247623278123711e0, 0.5090269299866796725116786e0, 0.4205751610647263669405267e0, 0.3321448379994943116084719e0, 0.2437588931448048912587688e0, 0.1554900095178924564386865e0, 0.6773932498157585698088354e-1}, + {0.1487027983239550912222135e1, 0.1403259745496922270264564e1, 0.1319491725464661433609663e1, 0.1235724047968681189212364e1, 0.1151956859289811446164825e1, 0.1068190338689553494802072e1, 0.9844247150109837231349622e0, 0.9006602918737365182850484e0, 0.8168974877846821404275069e0, 0.7331369031796229223580227e0, 0.6493794386888650054486281e0, 0.5656265174356596757139537e0, 0.4818805368222631487731579e0, 0.3981458834052590173509113e0, 0.3144315409387123154212535e0, 0.2307592167302372059759857e0, 0.1471977156945989772472748e0, 0.6412678117309944052403703e-1}, + {0.1491268718102344688271411e1, 0.1411741190914640487505771e1, 0.1332213830951015404441941e1, 0.1252686732830809999680267e1, 0.1173160005794509313174730e1, 0.1093633781237958896879965e1, 0.1014108223243148393065201e1, 0.9345835440325075907377330e0, 0.8550600276575269107773349e0, 0.7755380679025248517258532e0, 0.6960182317959841585145109e0, 0.6165013717819833504477346e0, 0.5369888366794912945318079e0, 0.4574829005269902932408889e0, 0.3779877260196973978940863e0, 0.2985118404618624984946326e0, 0.2190758506462427957069113e0, 0.1397450765119767349146353e0, 0.6088003363863534825005464e-1}, + {0.1495100801651051409999732e1, 0.1419405340110198552778393e1, 0.1343710008748627892724810e1, 0.1268014880389353000310414e1, 0.1192320038028903827079750e1, 0.1116625579891689469044026e1, 0.1040931626310454794079799e1, 0.9652383295306942866661884e0, 0.8895458882533946571137358e0, 0.8138545700535261740447950e0, 0.7381647473570304814395029e0, 0.6624769578126105498149624e0, 0.5867920109947446493391737e0, 0.5111111891461744489290992e0, 0.4354366553151050147918632e0, 0.3597723703299625354660452e0, 0.2841264494060559943920389e0, 0.2085185052177154996230005e0, 0.1330107089065635461375419e0, 0.5794620170990797798650123e-1}, + {0.1498580583401444174317386e1, 0.1426364890228584522673414e1, 0.1354149299629923281192036e1, 0.1281933868420423988034246e1, 0.1209718660626713399048551e1, 0.1137503750956414845248481e1, 0.1065289229411733880607916e1, 0.9930752076949068878557126e0, 0.9208618284397049456535757e0, 0.8486492789905562098591586e0, 0.7764378127156926158031943e0, 0.7042277832708635930867344e0, 0.6320197021480767602848178e0, 0.5598143404345395912377042e0, 0.4876129202946139420188428e0, 0.4154175043169533365541148e0, 0.3432318703096418027524597e0, 0.2710637595435203246492797e0, 0.1989318822110657561806962e0, 0.1268955503926593166308254e0, 0.5528212871240371048241379e-1}, + {0.1501754508594837337089856e1, 0.1432712730475143340404518e1, 0.1363671034069754274950592e1, 0.1294629464249430679064317e1, 0.1225588071083248538559259e1, 0.1156546912269029268686830e1, 0.1087506056298747798071893e1, 0.1018465586752840651469411e1, 0.9494256083335850798964741e0, 0.8803862556198167553278643e0, 0.8113477061841624760598814e0, 0.7423102009244498727845341e0, 0.6732740767851639064676858e0, 0.6042398217472142478598295e0, 0.5352081720899522889584566e0, 0.4661802954366277026594659e0, 0.3971581629712621730826920e0, 0.3281453857685808451825081e0, 0.2591493642052661979197670e0, 0.1901879854885491785792565e0, 0.1213179541186130699071317e0, 0.5285224511635143601147552e-1}, + {0.1504661202517196460191540e1, 0.1438526110541037227495230e1, 0.1372391084315255737540026e1, 0.1306256159670931796771616e1, 0.1240121376243315949825014e1, 0.1173986779205849344923421e1, 0.1107852421486856229076325e1, 0.1041718366715156747157745e1, 0.9755846932657442605621389e0, 0.9094514999854931965227238e0, 0.8433189145364798253029042e0, 0.7771871059265138564989363e0, 0.7110563039566125173946002e0, 0.6449268305419475123120585e0, 0.5787991523675322133651034e0, 0.5126739740395088296453592e0, 0.4465524134105889084933393e0, 0.3804363581140941600870992e0, 0.3143292666717729726674543e0, 0.2482382273986418438740754e0, 0.1821803739336923550363257e0, 0.1162100228791666307841708e0, 0.5062697144246344520692308e-1}, + {0.1507333049739684406957329e1, 0.1443869798951040686809862e1, 0.1380406601553595646811530e1, 0.1316943486448336467960940e1, 0.1253480485358734060913055e1, 0.1190017634088428795118215e1, 0.1126554974102287077081806e1, 0.1063092554588577221978254e1, 0.9996304352342330000643921e0, 0.9361686900661624628632729e0, 0.8727074129127595264965883e0, 0.8092467253835331800652228e0, 0.7457867888716805068068402e0, 0.6823278231980088937854296e0, 0.6188701366516795329577182e0, 0.5554141765061554178407906e0, 0.4919606183965743300387332e0, 0.4285105345527885639657014e0, 0.3650657359209552112046854e0, 0.3016295408979540017854803e0, 0.2382087510453128743250072e0, 0.1748198074104535338147956e0, 0.1115148317291502081079519e0, 0.4858150828905663931389750e-1}, + {0.1509797405521643600800862e1, 0.1448798505784201776188819e1, 0.1387799649767640868379247e1, 0.1326800860997572277878513e1, 0.1265802165120213614545418e1, 0.1204803590828283748583827e1, 0.1143805171007496028164312e1, 0.1082806944206958485218487e1, 0.1021808956582037259849130e1, 0.9608112645303606832220554e0, 0.8998139383584991342974664e0, 0.8388170675106567024157190e0, 0.7778207682214244793380700e0, 0.7168251950382156442798800e0, 0.6558305587295081487906238e0, 0.5948371551492265376377962e0, 0.5338454137827292925154468e0, 0.4728559836463229599006206e0, 0.4118698949811841042358258e0, 0.3508888880839026413717319e0, 0.2899161521835467942607342e0, 0.2289582244272697168835150e0, 0.1680309071251709912058722e0, 0.1071842976730454709494914e0, 0.4669490825917857848258897e-1}, + {0.1512077535592702651885542e1, 0.1453358762182399391553360e1, 0.1394640024852448295479492e1, 0.1335921342914185177270306e1, 0.1277202737290683500323248e1, 0.1218484231207691826029908e1, 0.1159765851037557179133987e1, 0.1101047627365156083369632e1, 0.1042329596373083545617043e1, 0.9836118016874520301049009e0, 0.9248942968954766185908511e0, 0.8661771490588063053774554e0, 0.8074604437333368789787031e0, 0.7487442923247565105494255e0, 0.6900288431709550365296138e0, 0.6313142987730108226833704e0, 0.5726009435739572428629866e0, 0.5138891906843943809444838e0, 0.4551796645660731149033106e0, 0.3964733566771858874923011e0, 0.3377719420068963817561906e0, 0.2790784903284342592940125e0, 0.2203992941938221111139898e0, 0.1617495649772923108686624e0, 0.1031775271253784724197264e0, 0.4494935602951385601335598e-1}, + {0.1514193352804819997509006e1, 0.1457590393617468793209691e1, 0.1400987464419153080392546e1, 0.1344384581184662080889348e1, 0.1287781761126833878758488e1, 0.1231179023218584237510462e1, 0.1174576388822640925688125e1, 0.1117973882475943676285829e1, 0.1061371532893653466992815e1, 0.1004769374285310770780417e1, 0.9481674481184788854172919e0, 0.8915658055327279211293483e0, 0.8349645107156934027761499e0, 0.7783636457331086848148917e0, 0.7217633176118399859733190e0, 0.6651636690166557413471029e0, 0.6085648948549621671933311e0, 0.5519672690500084950513985e0, 0.4953711895788266953367288e0, 0.4387772581729219934583483e0, 0.3821864303519236078766179e0, 0.3256003205491779498477363e0, 0.2690218877324958059454348e0, 0.2124571975249336244841297e0, 0.1559209129891515317090843e0, 0.9945952063842375053227931e-1, 0.4332960406341033436157524e-1}, + {0.1516162000094549207021851e1, 0.1461527685790782385188426e1, 0.1406893396579229558427657e1, 0.1352259145769086826235918e1, 0.1297624947629923059740243e1, 0.1242990817790597917328601e1, 0.1188356773715062198539162e1, 0.1133722835287525783953663e1, 0.1079089025551156002698850e1, 0.1024455371662101389801169e1, 0.9698219061474760364582928e0, 0.9151886685974009713577537e0, 0.8605557079864861100238346e0, 0.8059230859253162466918892e0, 0.7512908813164713594661588e0, 0.6966591971861112012613682e0, 0.6420281709850565965229799e0, 0.5873979906122764301937499e0, 0.5327689202536826556885353e0, 0.4781413438508069051295597e0, 0.4235158420269503798571552e0, 0.3688933369002844229314675e0, 0.3142753865947702189467806e0, 0.2596648470121556361200229e0, 0.2050675726616484232653526e0, 0.1504977164639767777858359e0, 0.9600014792058154736462106e-1, 0.4182252607645932321862773e-1}, + {0.1517998315905975681819213e1, 0.1465200315462026532129551e1, 0.1412402336143180968579639e1, 0.1359604389111228213837104e1, 0.1306806486279734731351497e1, 0.1254008640622089183072742e1, 0.1201210866535131048800458e1, 0.1148413180281179970113571e1, 0.1095615600538999408768381e1, 0.1042818149105710558651372e1, 0.9900208518088600875617620e0, 0.9372237397138955502862203e0, 0.8844268507524555199840381e0, 0.8316302319600398731649744e0, 0.7788339426133210890795576e0, 0.7260380587255163256281298e0, 0.6732426796448045921910045e0, 0.6204479380061240544867289e0, 0.5676540152134466427854705e0, 0.5148611664077887834613451e0, 0.4620697624728053757183766e0, 0.4092803643735033357684553e0, 0.3564938631002461237979451e0, 0.3037117642790043703921396e0, 0.2509368276982060978106092e0, 0.1981747109679032915697317e0, 0.1454390911823840643137232e0, 0.9277332955453467429763451e-1, 0.4041676055113025684436480e-1}, + {0.1519715208823086817411929e1, 0.1468634099702062550682430e1, 0.1417553008469014674939490e1, 0.1366471944542347269659860e1, 0.1315390917933946912760115e1, 0.1264309939489363760018555e1, 0.1213229021168654147755139e1, 0.1162148176384137345494752e1, 0.1111067420416500738111992e1, 0.1059986770938296676746064e1, 0.1008906248685091746434581e1, 0.9578258783312407255956784e0, 0.9067456896525242756445150e0, 0.8556657190967860708153477e0, 0.8045860119448479090873824e0, 0.7535066253423996943740445e0, 0.7024276326462752642452137e0, 0.6513491298057893513225544e0, 0.6002712449887427739045163e0, 0.5491941535583390603837715e0, 0.4981181022276018128369963e0, 0.4470434496975185070560821e0, 0.3959707385770101868486847e0, 0.3449008307748737032772825e0, 0.2938351828535981363494671e0, 0.2427764647581323719392653e0, 0.1917301500230701193408602e0, 0.1407094708800750523796875e0, 0.8975637836633630394302762e-1, 0.3910242380354419363081899e-1}, + {0.1521323961422700444944464e1, 0.1471851603590422118622546e1, 0.1422379260986849454727777e1, 0.1372906941604798453293218e1, 0.1323434653909307929892118e1, 0.1273962407026590487708892e1, 0.1224490210963055761921526e1, 0.1175018076866133593082748e1, 0.1125546017342156230227131e1, 0.1076074046851682267877939e1, 0.1026602182210094558879809e1, 0.9771304432322302018639612e0, 0.9276588535760335871906045e0, 0.8781874418647315968408864e0, 0.8287162432047307488040550e0, 0.7792453012756761070555010e0, 0.7297746712644485550469075e0, 0.6803044240724808212528033e0, 0.6308346524943159683026367e0, 0.5813654805388740483542438e0, 0.5318970779332963132260134e0, 0.4824296835154055410257004e0, 0.4329636445908698102350729e0, 0.3834994865870752458854056e0, 0.3340380441799942088370002e0, 0.2845807279748544733570760e0, 0.2351301237470960623526672e0, 0.1856915325646991222655151e0, 0.1362777698319134965765757e0, 0.8692946525012054120187353e-1, 0.3787087726949234365520114e-1}, + {0.1522834478472358672931947e1, 0.1474872636605138418026177e1, 0.1426910807768284322082436e1, 0.1378948998781055367310047e1, 0.1330987216841224680164684e1, 0.1283025469674968454386883e1, 0.1235063765709222885799986e1, 0.1187102114275073728898860e1, 0.1139140525853183114841234e1, 0.1091179012375759666645271e1, 0.1043217587604604879578741e1, 0.9952562676120370548458597e0, 0.9472950714021223337048082e0, 0.8993340217254078241758816e0, 0.8513731461641338285808219e0, 0.8034124786014693431693904e0, 0.7554520612457602368887930e0, 0.7074919474732165281510693e0, 0.6595322059052657580628641e0, 0.6115729263971504325174172e0, 0.5636142290734363894767612e0, 0.5156562783879918167717991e0, 0.4676993058012953469089537e0, 0.4197436479350834076514896e0, 0.3717898140987174444032373e0, 0.3238386134116156886828960e0, 0.2758914133405791810724762e0, 0.2279507206431424610498769e0, 0.1800216744637006612298520e0, 0.1321166988439841543825694e0, 0.8427518284958235696897899e-1, 0.3671453742186897322954009e-1}, + {0.1524255491013576804195881e1, 0.1477714660784952783237945e1, 0.1431173841758652772349485e1, 0.1384633039781787069436630e1, 0.1338092261006965672253841e1, 0.1291551512012124788593875e1, 0.1245010799937299944123195e1, 0.1198470132644670409416924e1, 0.1151929518909907204916554e1, 0.1105388968655282680015213e1, 0.1058848493238442193822372e1, 0.1012308105815651361079674e1, 0.9657678218054126734684090e0, 0.9192276594886802366068293e0, 0.8726876407972167893294764e0, 0.8261477923647281669131478e0, 0.7796081469509049827753598e0, 0.7330687454042532567721262e0, 0.6865296394193009886613469e0, 0.6399908954920466591029822e0, 0.5934526007301573325059582e0, 0.5469148716199143611697357e0, 0.5003778676688561814362271e0, 0.4538418134105091550464446e0, 0.4073070354279485829740435e0, 0.3607740278788822846227453e0, 0.3142435758510728338330843e0, 0.2677170062389944640113953e0, 0.2211967514739567668334169e0, 0.1746877983807874325844051e0, 0.1282022028383479964348629e0, 0.8177818680168764430245080e-1, 0.3562671947817428176226631e-1}, + {0.1525594725214770881206476e1, 0.1480393128432045740356817e1, 0.1435191541323085582529217e1, 0.1389989968924959812091252e1, 0.1344788416522907866060817e1, 0.1299586889746827997174554e1, 0.1254385394680661996389736e1, 0.1209183937989395175829969e1, 0.1163982527069600127515982e1, 0.1118781170231154762473596e1, 0.1073579876920155012130433e1, 0.1028378657996412636748477e1, 0.9831775260837211038023103e0, 0.9379764960179657076015136e0, 0.8927755854282048597997986e0, 0.8475748155007347757967789e0, 0.8023742119985848209905761e0, 0.7571738066433708695662393e0, 0.7119736390205872251796930e0, 0.6667737592565460745639184e0, 0.6215742318591892056934095e0, 0.5763751413603713322640298e0, 0.5311766008298875656047892e0, 0.4859787651249621588538330e0, 0.4407818522612533891543536e0, 0.3955861793705505114602136e0, 0.3503922263398633798966312e0, 0.3052007556167344348049303e0, 0.2600130558662051177480644e0, 0.2148314894784555841956251e0, 0.1696608997322034095150907e0, 0.1245129955389270002683579e0, 0.7942489891978153749097006e-1, 0.3460150809198016850782325e-1}, + {0.1526859042890589526378487e1, 0.1482921763148403842276533e1, 0.1438984491795164536567108e1, 0.1395047233189252525231459e1, 0.1351109991891878034957302e1, 0.1307172772745304669260382e1, 0.1263235580960968906699379e1, 0.1219298422221050703835127e1, 0.1175361302797916700875697e1, 0.1131424229697065895207730e1, 0.1087487210830887883186060e1, 0.1043550255232887174273672e1, 0.9996133733253190253881393e0, 0.9556765772578535710715874e0, 0.9117398813415957196221754e0, 0.8678033026125661948850687e0, 0.8238668615732247310812836e0, 0.7799305831824293601507400e0, 0.7359944981977457886183921e0, 0.6920586450266629333858465e0, 0.6481230723279649663476697e0, 0.6041878427445000852182582e0, 0.5602530383870993537615272e0, 0.5163187691099712879003757e0, 0.4723851853891499571797438e0, 0.4284524990953311047063058e0, 0.3845210184454249891341793e0, 0.3405912098612419399584605e0, 0.2966638144233703899038032e0, 0.2527400847124078576715667e0, 0.2088223170017057788708674e0, 0.1649152190599722827308055e0, 0.1210301722471160155498167e0, 0.7720326018898817828206987e-1, 0.3363364974516995102167462e-1}, + {0.1528054559083405137047563e1, 0.1485312794997097705446883e1, 0.1442571038214470776579613e1, 0.1399829292522320013570493e1, 0.1357087561874166765548658e1, 0.1314345850454078759228779e1, 0.1271604162748143389685638e1, 0.1228862503626296926524085e1, 0.1186120878437839368895715e1, 0.1143379293124832099340074e1, 0.1100637754358770795248912e1, 0.1057896269707576280860569e1, 0.1015154847842238156769126e1, 0.9724134987956584339974640e0, 0.9296722342907946818431047e0, 0.8869310681617368097575324e0, 0.8441900169008687429295884e0, 0.8014491003793523040286325e0, 0.7587083428093935362576859e0, 0.7159677740493625646700975e0, 0.6732274314040501413860867e0, 0.6304873621547357085895928e0, 0.5877476271899241333221832e0, 0.5450083063396327078463020e0, 0.5022695064252395155059223e0, 0.4595313737871711838065652e0, 0.4167941144922007176438387e0, 0.3740580283336802289311736e0, 0.3313235690067746553700419e0, 0.2885914573933480330041531e0, 0.2458629119584249278750153e0, 0.2031401664615301668533461e0, 0.1604278005405711652039491e0, 0.1177368858339244458607172e0, 0.7510252408650086658441596e-1, 0.3271846270775478856070884e-1}, + {0.1529186740959505109653289e1, 0.1487577158293388707508111e1, 0.1445967582009979387718202e1, 0.1404358015412336440816745e1, 0.1362748461941311565399969e1, 0.1321138925227929972823825e1, 0.1279529409151733277951100e1, 0.1237919917907156982173977e1, 0.1196310456080472987418488e1, 0.1154701028740456700905269e1, 0.1113091641546798022997704e1, 0.1071482300881451340842721e1, 0.1029873014009735917989475e1, 0.9882637892802373569245916e0, 0.9466546363756944528310758e0, 0.9050455666314926033869497e0, 0.8634365934447506520344540e0, 0.8218277328062565148449433e0, 0.7802190040012226850703573e0, 0.7386104305454957746359112e0, 0.6970020414556031913861513e0, 0.6553938730008771105113861e0, 0.6137859711661063914322283e0, 0.5721783951857430999179669e0, 0.5305712227365694155165922e0, 0.4889645577740232855661796e0, 0.4473585427277744403333139e0, 0.4057533781735039172217875e0, 0.3641493559322687127223795e0, 0.3225469176515179389138545e0, 0.2809467650889194227770571e0, 0.2393500844055270891104500e0, 0.1977590501629603151642330e0, 0.1561781206604067112364815e0, 0.1146180742271483316267615e0, 0.7311308274978660184520447e-1, 0.3185176130791400787169333e-1}, + {0.1530260491394766313570510e1, 0.1489724658775115137266557e1, 0.1449188831753177403184250e1, 0.1408653013220734918131897e1, 0.1368117206184034069757975e1, 0.1327581413807020172726043e1, 0.1287045639459248683416470e1, 0.1246509886770075360771330e1, 0.1205974159691064843702080e1, 0.1165438462569017837684371e1, 0.1124902800232641860108690e1, 0.1084367178096737546333982e1, 0.1043831602288925654461738e1, 0.1003296079805520648394589e1, 0.9627606187053432591598275e0, 0.9222252283533212928777294e0, 0.8816899197300536544215117e0, 0.8411547058297167490229198e0, 0.8006196021777239861105672e0, 0.7600846275129127849719743e0, 0.7195498046991648764002008e0, 0.6790151619622966197448464e0, 0.6384807345966275863946969e0, 0.5979465673637771458800754e0, 0.5574127179353942966481713e0, 0.5168792619515766918187819e0, 0.4763463006547495614450433e0, 0.4358139727703203523144583e0, 0.3952824736706231680817472e0, 0.3547520876199503791717895e0, 0.3142232448436673832093046e0, 0.2736966289659020439688229e0, 0.2331733955144496369946707e0, 0.1926556629116315949109922e0, 0.1521477743835989472840536e0, 0.1116602300918232453371161e0, 0.7122632005925390425640031e-1, 0.3102979192734513847869512e-1}, + {0.1531280219945530918862887e1, 0.1491764115543711582608611e1, 0.1452248016067723206269747e1, 0.1412731924058150689920340e1, 0.1373215842151127100608219e1, 0.1333699773114208203180680e1, 0.1294183719885939986844436e1, 0.1254667685620366764206205e1, 0.1215151673737978313041594e1, 0.1175635687984935008823566e1, 0.1136119732502868295328130e1, 0.1096603811912170337964094e1, 0.1057087931412518476253055e1, 0.1017572096905509113863513e1, 0.9780563151458206514020694e0, 0.9385405939294598498464432e0, 0.8990249423306286659734381e0, 0.8595093710029676968794279e0, 0.8199938925669823205931282e0, 0.7804785221142635001130339e0, 0.7409632778721439753645470e0, 0.7014481820920565808095373e0, 0.6619332622550151160558599e0, 0.6224185527349885679868616e0, 0.5829040971371158016902326e0, 0.5433899516536147244946347e0, 0.5038761899947552937603140e0, 0.4643629108305196256509391e0, 0.4248502493722176391609139e0, 0.3853383960541810555628366e0, 0.3458276279674767760058527e0, 0.3063183644932167228808922e0, 0.2668112720373341483108662e0, 0.2273074770384765519559169e0, 0.1878090446069578429818381e0, 0.1483202086882449059764783e0, 0.1088512052741322662621244e0, 0.6943448689600673838300180e-1, 0.3024917865720923179577363e-1}, + {0.1532249903371281818085917e1, 0.1493703482108998740614827e1, 0.1455157065195200346809599e1, 0.1416610654869340270223431e1, 0.1378064253450997606022340e1, 0.1339517863369794055919890e1, 0.1300971487198245305453001e1, 0.1262425127688525048786896e1, 0.1223878787814308166546501e1, 0.1185332470819113339105535e1, 0.1146786180272904774996439e1, 0.1108239920139165765487189e1, 0.1069693694855262920629379e1, 0.1031147509429735196850587e1, 0.9926013695612463310882198e0, 0.9540552817854489715123326e0, 0.9155092536580933534978986e0, 0.8769632939856246206308699e0, 0.8384174131186299393233148e0, 0.7998716233293992826192237e0, 0.7613259393034545837300323e0, 0.7227803787876118667749166e0, 0.6842349634562860931661901e0, 0.6456897200871628101751519e0, 0.6071446821835496233813653e0, 0.5685998922550279415939221e0, 0.5300554050908430047380815e0, 0.4915112925697217767572364e0, 0.4529676509187802579380104e0, 0.4144246120108054286121629e0, 0.3758823615873930314093573e0, 0.3373411699211847213003570e0, 0.2988014460838619282843952e0, 0.2602638401106843145315994e0, 0.2217294507811754336425535e0, 0.1832002925124018168986342e0, 0.1446804953347050655563166e0, 0.1061800440374660771048480e0, 0.6773059476567831336488402e-1, 0.2950687695527422224851832e-1}, + {0.1533173137460634461235066e1, 0.1495549950040734249895393e1, 0.1457926766471340970762709e1, 0.1420303588732694442267846e1, 0.1382680418872520759663065e1, 0.1345057259031099988676433e1, 0.1307434111468678960501903e1, 0.1269810978596001635506341e1, 0.1232187863008871596257323e1, 0.1194564767527851916244615e1, 0.1156941695244461089553108e1, 0.1119318649575559636172662e1, 0.1081695634328067979412755e1, 0.1044072653776750930510111e1, 0.1006449712758602402124214e1, 0.9688268167884441336521039e0, 0.9312039722018274751677424e0, 0.8935811863333633591901354e0, 0.8559584677414483220357356e0, 0.8183358264943738874932307e0, 0.7807132745385688213392421e0, 0.7430908261781095392995681e0, 0.7054684987070400358784448e0, 0.6678463132547297820965882e0, 0.6302242959332082940279826e0, 0.5926024794204980683238426e0, 0.5549809051864955237054951e0, 0.5173596266878257139037738e0, 0.4797387140623364241772428e0, 0.4421182612140318859822955e0, 0.4044983968396638104610711e0, 0.3668793022152994411560368e0, 0.3292612411240570212440856e0, 0.2916446128242035199998930e0, 0.2540300517665934607689814e0, 0.2164186303985620085027010e0, 0.1788123148742007754778852e0, 0.1412151362884411752920148e0, 0.1036368402634645114775150e0, 0.6610832470916409695729856e-1, 0.2880013396280840229218334e-1}, + {0.1534053181584449084854269e1, 0.1497310038074501005766978e1, 0.1460566897984002644464183e1, 0.1423823763069232867789940e1, 0.1387080635143547965139117e1, 0.1350337516098480646600889e1, 0.1313594407926723776707731e1, 0.1276851312747612578778902e1, 0.1240108232835827171448969e1, 0.1203365170654181674634873e1, 0.1166622128891556900019450e1, 0.1129879110507284864268762e1, 0.1093136118783624474731954e1, 0.1056393157388405786003046e1, 0.1019650230450503114577901e1, 0.9829073426515786199715451e0, 0.9461644993385942743541994e0, 0.9094217066630320811486021e0, 0.8726789717547518480094350e0, 0.8359363029411928866147184e0, 0.7991937100265521402467844e0, 0.7624512046511992388808212e0, 0.7257088007597790982554974e0, 0.6889665152185689899094400e0, 0.6522243686409073299342467e0, 0.6154823865075504175164916e0, 0.5787406007128420496638175e0, 0.5419990517384125648087865e0, 0.5052577917731960988645056e0, 0.4685168892980173635234519e0, 0.4317764360047099160222576e0, 0.3950365575646972937604113e0, 0.3582974309994310205507555e0, 0.3215593139080007759227897e0, 0.2848225961961619069649047e0, 0.2480878974611689122432227e0, 0.2113562650517154915467591e0, 0.1746296191183898065201571e0, 0.1379118964507339113271975e0, 0.1012126146941469342401701e0, 0.6456194899726137278760257e-1, 0.2812645439079299219187419e-1}, + {0.1534892997139557227614279e1, 0.1498989668998897501276994e1, 0.1463086343903285773505644e1, 0.1427183023414814244376429e1, 0.1391279709144040438287602e1, 0.1355376402767821814937864e1, 0.1319473106048673173924451e1, 0.1283569820856137399848247e1, 0.1247666549190742942502495e1, 0.1211763293211231530413995e1, 0.1175860055265884319525693e1, 0.1139956837928964066704190e1, 0.1104053644043538840797350e1, 0.1068150476772278342447444e1, 0.1032247339658243608912598e1, 0.9963442366982618328585493e0, 0.9604411724322426430586723e0, 0.9245381520528253319358567e0, 0.8886351815411563067305273e0, 0.8527322678365406966379400e0, 0.8168294190504262166761188e0, 0.7809266447390142664026016e0, 0.7450239562542930157586944e0, 0.7091213672012904985883029e0, 0.6732188940411854039055226e0, 0.6373165568977466866717861e0, 0.6014143806519714388063208e0, 0.5655123964528129857845238e0, 0.5296106438411039715966193e0, 0.4937091737981756577948229e0, 0.4578080532255782153525438e0, 0.4219073717059785387344039e0, 0.3860072520255396095683859e0, 0.3501078671472635990145335e0, 0.3142094687704932495909488e0, 0.2783124378775218384923333e0, 0.2424173798924625361874772e0, 0.2065253182141071551836492e0, 0.1706381290938671641708352e0, 0.1347596593282315198612592e0, 0.9889920900871122533586553e-1, 0.6308626356388784057588631e-1, 0.2748357108440508277394892e-1}, + {0.1535695280838629983064694e1, 0.1500594236235067817656313e1, 0.1465493194350303789230585e1, 0.1430392156577492526495371e1, 0.1395291124351096810858349e1, 0.1360190099162024176252063e1, 0.1325089082574000089379322e1, 0.1289988076241572027256558e1, 0.1254887081930202663795858e1, 0.1219786101538994920367859e1, 0.1184685137126702182946916e1, 0.1149584190941820846004092e1, 0.1114483265457749469332035e1, 0.1079382363414242848588494e1, 0.1044281487866708939888712e1, 0.1009180642245317812316634e1, 0.9740798304264509422659935e0, 0.9389790568197674899837757e0, 0.9038783264751749171405213e0, 0.8687776452153701566068907e0, 0.8336770198015192229270720e0, 0.7985764581422970742698971e0, 0.7634759695602610192254430e0, 0.7283755651349081311799055e0, 0.6932752581495918103871962e0, 0.6581750646810479477537782e0, 0.6230750043877162525265513e0, 0.5879751015798285374141491e0, 0.5528753866962970290878822e0, 0.5177758983811020490994086e0, 0.4826766864637186565865902e0, 0.4475778163386701445336738e0, 0.4124793755752883735361361e0, 0.3773814842049053432591527e0, 0.3422843113148581639684411e0, 0.3071881029697497767338606e0, 0.2720932316284942932084102e0, 0.2370002891767127567222407e0, 0.2019102761348421810146637e0, 0.1668250268181992892198073e0, 0.1317483020532982541977987e0, 0.9668919410176593344830717e-1, 0.6167652949817792358742135e-1, 0.2686941953400762687915995e-1}, + {0.1536462493634653558154673e1, 0.1502128661685489464262068e1, 0.1467794832169950298839286e1, 0.1433461006333747476463744e1, 0.1399127185457927306909792e1, 0.1364793370871767472755746e1, 0.1330459563966682507229700e1, 0.1296125766211456950047804e1, 0.1261791979169174443914592e1, 0.1227458204516276373551266e1, 0.1193124444064268679881601e1, 0.1158790699784705540565424e1, 0.1124456973838220917338613e1, 0.1090123268608563332983681e1, 0.1055789586742829027889885e1, 0.1021455931199402224481903e1, 0.9871223053055240306873772e0, 0.9527887128269590857605072e0, 0.9184551580529615732874848e0, 0.8841216459007313197085517e0, 0.8497881820448998068986446e0, 0.8154547730794463756650640e0, 0.7811214267220410983907210e0, 0.7467881520744805288579630e0, 0.7124549599581423848996086e0, 0.6781218633510390484774053e0, 0.6437888779643722276961833e0, 0.6094560230135452763170614e0, 0.5751233222647905281576305e0, 0.5407908054797110156395425e0, 0.5064585104462232763044121e0, 0.4721264858937837018545325e0, 0.4377947957771643018072936e0, 0.4034635257416918872885646e0, 0.3691327931855416440777167e0, 0.3348027634909946151567752e0, 0.3004736773353657517478163e0, 0.2661458990278703974149616e0, 0.2318200075085118064771005e0, 0.1974969814205034596217949e0, 0.1631786149772797106698111e0, 0.1288685867945150272796250e0, 0.9457579039019365184018477e-1, 0.6032842220945916819748797e-1, 0.2628211572883546008386342e-1}, + {0.1537196885933572311910085e1, 0.1503597446159129663218426e1, 0.1469998008568304160871417e1, 0.1436398574277729377094190e1, 0.1402799144434368418084898e1, 0.1369199720226542342210552e1, 0.1335600302895785666466344e1, 0.1302000893749787833197270e1, 0.1268401494176718217187838e1, 0.1234802105661283063077015e1, 0.1201202729802928570677208e1, 0.1167603368336689119731474e1, 0.1134004023157288628212183e1, 0.1100404696347243386055709e1, 0.1066805390209896030700280e1, 0.1033206107308545748870827e1, 0.9996068505131472996246808e0, 0.9660076230564559666956844e0, 0.9324084286020318648375284e0, 0.8988092713272342656758064e0, 0.8652101560253048754543914e0, 0.8316110882319595313175680e0, 0.7980120743837286767240920e0, 0.7644131220178278512956878e0, 0.7308142400269308414762795e0, 0.6972154389873656296775555e0, 0.6636167315867435235683334e0, 0.6300181331881122315700717e0, 0.5964196625844131090385918e0, 0.5628213430226633060114218e0, 0.5292232036175455988770345e0, 0.4956252813388603291268380e0, 0.4620276238643496856689332e0, 0.4284302937718012609061760e0, 0.3948333748659561479104270e0, 0.3612369820255324850503899e0, 0.3276412770872600895283016e0, 0.2940464955725917238672137e0, 0.2604529939906062681054034e0, 0.2268613388903867245835696e0, 0.1932724879746856807613294e0, 0.1596881970714452359218090e0, 0.1261120661032951679792394e0, 0.9255279834764232165670211e-1, 0.5903798711627596210077655e-1, 0.2571993685288741305807485e-1}, + {0.1537900519639177351485509e1, 0.1505004713461118831562885e1, 0.1472108909246876714959093e1, 0.1439213107999753788389740e1, 0.1406317310749171844429399e1, 0.1373421518560135827011261e1, 0.1340525732543378926238078e1, 0.1307629953866399941713193e1, 0.1274734183765634621652739e1, 0.1241838423560042429086765e1, 0.1208942674666441461574345e1, 0.1176046938616989970936899e1, 0.1143151217079296988032361e1, 0.1110255511879752164142939e1, 0.1077359825030803093319347e1, 0.1044464158763086533573607e1, 0.1011568515563550961534623e1, 0.9786728982210094134846821e0, 0.9457773098809579873452675e0, 0.9128817541120207886160886e0, 0.8799862349870845994262022e0, 0.8470907571831347751008168e0, 0.8141953261050969543263697e0, 0.7812999480407721032432037e0, 0.7484046303564402266425896e0, 0.7155093817462244075628281e0, 0.6826142125533466396520346e0, 0.6497191351887403950357432e0, 0.6168241646833332909207560e0, 0.5839293194266532352434130e0, 0.5510346221695150324949297e0, 0.5181401014079633610544426e0, 0.4852457933290632607369490e0, 0.4523517446039431388003505e0, 0.4194580164920722423612656e0, 0.3865646910356375140534892e0, 0.3536718807003189971969294e0, 0.3207797439266498255416416e0, 0.2878885112969848452450724e0, 0.2549985318477515756044100e0, 0.2221103602568508117102717e0, 0.1892249341643785313168465e0, 0.1563439726212394862316010e0, 0.1234710001537068179882843e0, 0.9061453776736619019094845e-1, 0.5780160090309369034797044e-1, 0.2518130440638251656980999e-1}, + {0.1538575287485045780713568e1, 0.1506354249056545799167351e1, 0.1474133212398093554231315e1, 0.1441912178413208451704314e1, 0.1409691148027973881079186e1, 0.1377470122199186272616473e1, 0.1345249101923067139210221e1, 0.1313028088244711409410919e1, 0.1280807082268469343020428e1, 0.1248586085169490583375238e1, 0.1216365098206699074213627e1, 0.1184144122737518830558069e1, 0.1151923160234735793613503e1, 0.1119702212305964062886069e1, 0.1087481280716290811591462e1, 0.1055260367414810028339009e1, 0.1023039474565930165787482e1, 0.9908186045865674272211987e0, 0.9585977601906320722299056e0, 0.9263769444426036830464570e0, 0.8941561608225061952846411e0, 0.8619354133052817812042663e0, 0.8297147064584916186566054e0, 0.7974940455635382827549679e0, 0.7652734367673509855003551e0, 0.7330528872739117793257283e0, 0.7008324055884451343305450e0, 0.6686120018320298047193041e0, 0.6363916881515750209117372e0, 0.6041714792607289968809847e0, 0.5719513931632926368357825e0, 0.5397314521353000325496229e0, 0.5075116840805377280486923e0, 0.4752921244363891783961832e0, 0.4430728189095547215892704e0, 0.4108538274961112658763390e0, 0.3786352305487998074788803e0, 0.3464171382200184643128623e0, 0.3141997056941599156198233e0, 0.2819831588178046655599196e0, 0.2497678394619649260592757e0, 0.2175542909210219972765731e0, 0.1853434315961135904158300e0, 0.1531369452704970394027659e0, 0.1209382841678252589048669e0, 0.8875579450016283173293810e-1, 0.5661593754525190873771522e-1, 0.2466476940450737058975552e-1}, + {0.1539222930035210331902410e1, 0.1507649534071729882214386e1, 0.1476076139707032453353232e1, 0.1444502747756546556830706e1, 0.1412929359055252480197337e1, 0.1381355974464721552102928e1, 0.1349782594880622732927647e1, 0.1318209221240839295255046e1, 0.1286635854534357387243172e1, 0.1255062495811112994872428e1, 0.1223489146193015470717893e1, 0.1191915806886406014313715e1, 0.1160342479196260434661502e1, 0.1128769164542510055952304e1, 0.1097195864478936528824546e1, 0.1065622580715200621234508e1, 0.1034049315142698534418744e1, 0.1002476069865111021377467e1, 0.9709028472347329448081481e0, 0.9393296498959608456620406e0, 0.9077564808376970380335442e0, 0.8761833434569334264096395e0, 0.8446102416364528348063321e0, 0.8130371798404960344077378e0, 0.7814641632334840064334645e0, 0.7498911978285964532098456e0, 0.7183182906753955596314298e0, 0.6867454500990591398232408e0, 0.6551726860086246453663390e0, 0.6236000102986843027011345e0, 0.5920274373793840619034224e0, 0.5604549848852622707385612e0, 0.5288826746375584896948472e0, 0.4973105339724571989307663e0, 0.4657385976085971045914307e0, 0.4341669103277770901346174e0, 0.4025955309141879357899857e0, 0.3710245380997234377015025e0, 0.3394540398171456073906403e0, 0.3078841881262277508367562e0, 0.2763152043287541015913350e0, 0.2447474234189502677044064e0, 0.2131813777658572006989977e0, 0.1816179673056091210434906e0, 0.1500588419721174291665790e0, 0.1185073845935281602210493e0, 0.8697177361567243680812898e-1, 0.5547793843128156580348541e-1, 0.2416899936118312040170588e-1}, +} + +// The weights of these nodes +var evenWeights = [][]float64{ + {1.0}, + {0.6521451548625461426269364e0, 0.3478548451374538573730642e0}, + {0.4679139345726910473898704e0, 0.3607615730481386075698336e0, 0.1713244923791703450402969e0}, + {0.3626837833783619829651504e0, 0.3137066458778872873379622e0, 0.2223810344533744705443556e0, 0.1012285362903762591525320e0}, + {0.2955242247147528701738930e0, 0.2692667193099963550912268e0, 0.2190863625159820439955350e0, 0.1494513491505805931457764e0, 0.6667134430868813759356850e-1}, + {0.2491470458134027850005624e0, 0.2334925365383548087608498e0, 0.2031674267230659217490644e0, 0.1600783285433462263346522e0, 0.1069393259953184309602552e0, 0.4717533638651182719461626e-1}, + {0.2152638534631577901958766e0, 0.2051984637212956039659240e0, 0.1855383974779378137417164e0, 0.1572031671581935345696019e0, 0.1215185706879031846894145e0, 0.8015808715976020980563266e-1, 0.3511946033175186303183410e-1}, + {0.1894506104550684962853967e0, 0.1826034150449235888667636e0, 0.1691565193950025381893119e0, 0.1495959888165767320815019e0, 0.1246289712555338720524763e0, 0.9515851168249278480992520e-1, 0.6225352393864789286284360e-1, 0.2715245941175409485178166e-1}, + {0.1691423829631435918406565e0, 0.1642764837458327229860538e0, 0.1546846751262652449254180e0, 0.1406429146706506512047311e0, 0.1225552067114784601845192e0, 0.1009420441062871655628144e0, 0.7642573025488905652912984e-1, 0.4971454889496979645333512e-1, 0.2161601352648331031334248e-1}, + {0.1527533871307258506980843e0, 0.1491729864726037467878288e0, 0.1420961093183820513292985e0, 0.1316886384491766268984948e0, 0.1181945319615184173123774e0, 0.1019301198172404350367504e0, 0.8327674157670474872475850e-1, 0.6267204833410906356950596e-1, 0.4060142980038694133103928e-1, 0.1761400713915211831186249e-1}, + {0.1392518728556319933754102e0, 0.1365414983460151713525738e0, 0.1311735047870623707329649e0, 0.1232523768105124242855609e0, 0.1129322960805392183934005e0, 0.1004141444428809649320786e0, 0.8594160621706772741444398e-1, 0.6979646842452048809496104e-1, 0.5229333515268328594031142e-1, 0.3377490158481415479330258e-1, 0.1462799529827220068498987e-1}, + {0.1279381953467521569740562e0, 0.1258374563468282961213754e0, 0.1216704729278033912044631e0, 0.1155056680537256013533445e0, 0.1074442701159656347825772e0, 0.9761865210411388826988072e-1, 0.8619016153195327591718514e-1, 0.7334648141108030573403386e-1, 0.5929858491543678074636724e-1, 0.4427743881741980616860272e-1, 0.2853138862893366318130802e-1, 0.1234122979998719954680507e-1}, + {0.1183214152792622765163711e0, 0.1166604434852965820446624e0, 0.1133618165463196665494407e0, 0.1084718405285765906565795e0, 0.1020591610944254232384142e0, 0.9421380035591414846366474e-1, 0.8504589431348523921044770e-1, 0.7468414976565974588707538e-1, 0.6327404632957483553945402e-1, 0.5097582529714781199831990e-1, 0.3796238329436276395030342e-1, 0.2441785109263190878961718e-1, 0, .1055137261734300715565387e-1}, + {0.1100470130164751962823763e0, 0.1087111922582941352535716e0, 0.1060557659228464179104165e0, 0.1021129675780607698142166e0, 0.9693065799792991585048880e-1, 0.9057174439303284094218612e-1, 0.8311341722890121839039666e-1, 0.7464621423456877902393178e-1, 0.6527292396699959579339794e-1, 0.5510734567571674543148330e-1, 0.4427293475900422783958756e-1, 0.3290142778230437997763004e-1, 0.2113211259277125975149896e-1, 0.9124282593094517738816778e-2}, + {0.1028526528935588403412856e0, 0.1017623897484055045964290e0, 0.9959342058679526706278018e-1, 0.9636873717464425963946864e-1, 0.9212252223778612871763266e-1, 0.8689978720108297980238752e-1, 0.8075589522942021535469516e-1, 0.7375597473770520626824384e-1, 0.6597422988218049512812820e-1, 0.5749315621761906648172152e-1, 0.4840267283059405290293858e-1, 0.3879919256962704959680230e-1, 0.2878470788332336934971862e-1, 0.1846646831109095914230276e-1, 0.7968192496166605615469690e-2}, + {0.9654008851472780056676488e-1, 0.9563872007927485941908208e-1, 0.9384439908080456563918026e-1, 0.9117387869576388471286854e-1, 0.8765209300440381114277140e-1, 0.8331192422694675522219922e-1, 0.7819389578707030647174106e-1, 0.7234579410884850622539954e-1, 0.6582222277636184683765034e-1, 0.5868409347853554714528360e-1, 0.5099805926237617619616316e-1, 0.4283589802222668065687810e-1, 0.3427386291302143310268716e-1, 0.2539206530926205945575196e-1, 0.1627439473090567060516896e-1, 0.7018610009470096600404748e-2}, + {0.9095674033025987361533764e-1, 0.9020304437064072957394216e-1, 0.8870189783569386928707642e-1, 0.8646573974703574978424688e-1, 0.8351309969984565518702044e-1, 0.7986844433977184473881888e-1, 0.7556197466003193127083398e-1, 0.7062937581425572499903896e-1, 0.6511152155407641137854442e-1, 0.5905413582752449319396124e-1, 0.5250741457267810616824590e-1, 0.4552561152335327245382266e-1, 0.3816659379638751632176606e-1, 0.3049138063844613180944194e-1, 0.2256372198549497008409476e-1, 0.1445016274859503541520101e-1, 0.6229140555908684718603220e-2}, + {0.8598327567039474749008516e-1, 0.8534668573933862749185052e-1, 0.8407821897966193493345756e-1, 0.8218726670433970951722338e-1, 0.7968782891207160190872470e-1, 0.7659841064587067452875784e-1, 0.7294188500565306135387342e-1, 0.6874532383573644261368974e-1, 0.6403979735501548955638454e-1, 0.5886014424532481730967550e-1, 0.5324471397775991909202590e-1, 0.4723508349026597841661708e-1, 0.4087575092364489547411412e-1, 0.3421381077030722992124474e-1, 0.2729862149856877909441690e-1, 0.2018151529773547153209770e-1, 0.1291594728406557440450307e-1, 0.5565719664245045361251818e-2}, + {0.8152502928038578669921876e-1, 0.8098249377059710062326952e-1, 0.7990103324352782158602774e-1, 0.7828784465821094807537540e-1, 0.7615366354844639606599344e-1, 0.7351269258474345714520658e-1, 0.7038250706689895473928292e-1, 0.6678393797914041193504612e-1, 0.6274093339213305405296984e-1, 0.5828039914699720602230556e-1, 0.5343201991033231997375704e-1, 0.4822806186075868337435238e-1, 0.4270315850467443423587832e-1, 0.3689408159402473816493978e-1, 0.3083950054517505465873166e-1, 0.2457973973823237589520214e-1, 0.1815657770961323689887502e-1, 0.1161344471646867417766868e-1, 0.5002880749639345675901886e-2}, + {0.7750594797842481126372404e-1, 0.7703981816424796558830758e-1, 0.7611036190062624237155810e-1, 0.7472316905796826420018930e-1, 0.7288658239580405906051074e-1, 0.7061164739128677969548346e-1, 0.6791204581523390382569024e-1, 0.6480401345660103807455446e-1, 0.6130624249292893916653822e-1, 0.5743976909939155136661768e-1, 0.5322784698393682435499678e-1, 0.4869580763507223206143380e-1, 0.4387090818567327199167442e-1, 0.3878216797447201763997196e-1, 0.3346019528254784739267780e-1, 0.2793700698002340109848970e-1, 0.2224584919416695726150432e-1, 0.1642105838190788871286396e-1, 0.1049828453115281361474434e-1, 0.4521277098533191258471490e-2}, + {0.7386423423217287999638556e-1, 0.7346081345346752826402828e-1, 0.7265617524380410488790570e-1, 0.7145471426517098292181042e-1, 0.6986299249259415976615480e-1, 0.6788970337652194485536350e-1, 0.6554562436490897892700504e-1, 0.6284355804500257640931846e-1, 0.5979826222758665431283142e-1, 0.5642636935801838164642686e-1, 0.5274629569917407034394234e-1, 0.4877814079280324502744954e-1, 0.4454357777196587787431674e-1, 0.4006573518069226176059618e-1, 0.3536907109759211083266214e-1, 0.3047924069960346836290502e-1, 0.2542295952611304788674188e-1, 0.2022786956905264475705664e-1, 0.1492244369735749414467869e-1, 0.9536220301748502411822340e-2, 0.4105998604649084610599928e-2}, + {0.7054915778935406881133824e-1, 0.7019768547355821258714200e-1, 0.6949649186157257803708402e-1, 0.6844907026936666098545864e-1, 0.6706063890629365239570506e-1, 0.6533811487918143498424096e-1, 0.6329007973320385495013890e-1, 0.6092673670156196803855800e-1, 0.5825985987759549533421064e-1, 0.5530273556372805254874660e-1, 0.5207009609170446188123162e-1, 0.4857804644835203752763920e-1, 0.4484398408197003144624282e-1, 0.4088651231034621890844686e-1, 0.3672534781380887364290888e-1, 0.3238122281206982088084682e-1, 0.2787578282128101008111450e-1, 0.2323148190201921062895910e-1, 0.1847148173681474917204335e-1, 0.1361958675557998552020491e-1, 0.8700481367524844122565470e-2, 0.3745404803112777515171456e-2}, + {0.6751868584903645882021418e-1, 0.6721061360067817586237416e-1, 0.6659587476845488737576196e-1, 0.6567727426778120737875756e-1, 0.6445900346713906958827948e-1, 0.6294662106439450817895206e-1, 0.6114702772465048101535670e-1, 0.5906843459554631480755080e-1, 0.5672032584399123581687444e-1, 0.5411341538585675449163752e-1, 0.5125959800714302133536554e-1, 0.4817189510171220053046892e-1, 0.4486439527731812676709458e-1, 0.4135219010967872970421980e-1, 0.3765130535738607132766076e-1, 0.3377862799910689652060416e-1, 0.2975182955220275579905234e-1, 0.2558928639713001063470016e-1, 0.2130999875413650105447862e-1, 0.1693351400783623804623151e-1, 0.1247988377098868420673525e-1, 0.7969898229724622451610710e-2, 0.3430300868107048286016700e-2}, + {0.6473769681268392250302496e-1, 0.6446616443595008220650418e-1, 0.6392423858464818662390622e-1, 0.6311419228625402565712596e-1, 0.6203942315989266390419786e-1, 0.6070443916589388005296916e-1, 0.5911483969839563574647484e-1, 0.5727729210040321570515042e-1, 0.5519950369998416286820356e-1, 0.5289018948519366709550490e-1, 0.5035903555385447495780746e-1, 0.4761665849249047482590674e-1, 0.4467456085669428041944838e-1, 0.4154508294346474921405856e-1, 0.3824135106583070631721688e-1, 0.3477722256477043889254814e-1, 0.3116722783279808890206628e-1, 0.2742650970835694820007336e-1, 0.2357076083932437914051962e-1, 0.1961616045735552781446139e-1, 0.1557931572294384872817736e-1, 0.1147723457923453948959265e-1, 0.7327553901276262102386656e-2, 0.3153346052305838632678320e-2}, + {0.6217661665534726232103316e-1, 0.6193606742068324338408750e-1, 0.6145589959031666375640678e-1, 0.6073797084177021603175000e-1, 0.5978505870426545750957640e-1, 0.5860084981322244583512250e-1, 0.5718992564772838372302946e-1, 0.5555774480621251762356746e-1, 0.5371062188899624652345868e-1, 0.5165570306958113848990528e-1, 0.4940093844946631492124360e-1, 0.4695505130394843296563322e-1, 0.4432750433880327549202254e-1, 0.4152846309014769742241230e-1, 0.3856875661258767524477018e-1, 0.3545983561514615416073452e-1, 0.3221372822357801664816538e-1, 0.2884299358053519802990658e-1, 0.2536067357001239044019428e-1, 0.2178024317012479298159128e-1, 0.1811556071348939035125903e-1, 0.1438082276148557441937880e-1, 0.1059054838365096926356876e-1, 0.6759799195745401502778824e-2, 0.2908622553155140958394976e-2}, + {0.5981036574529186024778538e-1, 0.5959626017124815825831088e-1, 0.5916881546604297036933200e-1, 0.5852956177181386855029062e-1, 0.5768078745252682765393200e-1, 0.5662553090236859719080832e-1, 0.5536756966930265254904124e-1, 0.5391140693275726475083694e-1, 0.5226225538390699303439404e-1, 0.5042601856634237721821144e-1, 0.4840926974407489685396032e-1, 0.4621922837278479350764582e-1, 0.4386373425900040799512978e-1, 0.4135121950056027167904044e-1, 0.3869067831042397898510146e-1, 0.3589163483509723294194276e-1, 0.3296410908971879791501014e-1, 0.2991858114714394664128188e-1, 0.2676595374650401344949324e-1, 0.2351751355398446159032286e-1, 0.2018489150798079220298930e-1, 0.1678002339630073567792252e-1, 0.1331511498234096065660116e-1, 0.9802634579462752061952706e-2, 0.6255523962973276899717754e-2, 0.2691316950047111118946698e-2}, + {0.5761753670714702467237616e-1, 0.5742613705411211485929010e-1, 0.5704397355879459856782852e-1, 0.5647231573062596503104434e-1, 0.5571306256058998768336982e-1, 0.5476873621305798630622270e-1, 0.5364247364755361127210060e-1, 0.5233801619829874466558872e-1, 0.5085969714618814431970910e-1, 0.4921242732452888606879048e-1, 0.4740167880644499105857626e-1, 0.4543346672827671397485208e-1, 0.4331432930959701544192564e-1, 0.4105130613664497422171834e-1, 0.3865191478210251683685736e-1, 0.3612412584038355258288694e-1, 0.3347633646437264571604038e-1, 0.3071734249787067605400450e-1, 0.2785630931059587028700164e-1, 0.2490274146720877305005456e-1, 0.2186645142285308594551102e-1, 0.1875752762146937791200757e-1, 0.1558630303592413170296832e-1, 0.1236332812884764416646861e-1, 0.9099369455509396948032734e-2, 0.5805611015239984878826112e-2, 0.2497481835761585775945054e-2}, + {0.5557974630651439584627342e-1, 0.5540795250324512321779340e-1, 0.5506489590176242579630464e-1, 0.5455163687088942106175058e-1, 0.5386976186571448570895448e-1, 0.5302137852401076396799152e-1, 0.5200910915174139984305222e-1, 0.5083608261779848056012412e-1, 0.4950592468304757891996610e-1, 0.4802274679360025812073550e-1, 0.4639113337300189676219012e-1, 0.4461612765269228321341510e-1, 0.4270321608466708651103858e-1, 0.4065831138474451788012514e-1, 0.3848773425924766248682568e-1, 0.3619819387231518603588452e-1, 0.3379676711561176129542654e-1, 0.3129087674731044786783572e-1, 0.2868826847382274172988602e-1, 0.2599698705839195219181960e-1, 0.2322535156256531693725830e-1, 0.2038192988240257263480560e-1, 0.1747551291140094650495930e-1, 0.1451508927802147180777130e-1, 0.1150982434038338217377419e-1, 0.8469063163307887661628584e-2, 0.5402522246015337761313780e-2, 0.2323855375773215501098716e-2}, + {0.5368111986333484886390600e-1, 0.5352634330405825210061082e-1, 0.5321723644657901410348096e-1, 0.5275469052637083342964580e-1, 0.5214003918366981897126058e-1, 0.5137505461828572547451486e-1, 0.5046194247995312529765992e-1, 0.4940333550896239286651076e-1, 0.4820228594541774840657052e-1, 0.4686225672902634691841818e-1, 0.4538711151481980250398048e-1, 0.4378110353364025103902560e-1, 0.4204886332958212599457020e-1, 0.4019538540986779688807676e-1, 0.3822601384585843322945902e-1, 0.3614642686708727054078062e-1, 0.3396262049341601079772722e-1, 0.3168089125380932732029244e-1, 0.2930781804416049071839382e-1, 0.2685024318198186847590714e-1, 0.2431525272496395254025850e-1, 0.2171015614014623576691612e-1, 0.1904246546189340865578709e-1, 0.1631987423497096505212063e-1, 0.1355023711298881214517933e-1, 0.1074155353287877411685532e-1, 0.7901973849998674754018608e-2, 0.5039981612650243085015810e-2, 0.2167723249627449943047768e-2}, + {0.5190787763122063973286496e-1, 0.5176794317491018754380368e-1, 0.5148845150098093399504444e-1, 0.5107015606985562740454910e-1, 0.5051418453250937459823872e-1, 0.4982203569055018101115930e-1, 0.4899557545575683538947578e-1, 0.4803703181997118096366674e-1, 0.4694898884891220484701330e-1, 0.4573437971611448664719662e-1, 0.4439647879578711332778398e-1, 0.4293889283593564195423128e-1, 0.4136555123558475561316394e-1, 0.3968069545238079947012286e-1, 0.3788886756924344403094056e-1, 0.3599489805108450306657888e-1, 0.3400389272494642283491466e-1, 0.3192121901929632894945890e-1, 0.2975249150078894524083642e-1, 0.2750355674992479163522324e-1, 0.2518047762152124837957096e-1, 0.2278951694399781986378308e-1, 0.2033712072945728677503268e-1, 0.1782990101420772026039605e-1, 0.1527461859678479930672510e-1, 0.1267816647681596013149540e-1, 0.1004755718228798435788578e-1, 0.7389931163345455531517530e-2, 0.4712729926953568640893942e-2, 0.2026811968873758496433874e-2}, + {0.5024800037525628168840300e-1, 0.5012106956904328807480410e-1, 0.4986752859495239424476130e-1, 0.4948801791969929252786578e-1, 0.4898349622051783710485112e-1, 0.4835523796347767283480314e-1, 0.4760483018410123227045008e-1, 0.4673416847841552480220700e-1, 0.4574545221457018077723242e-1, 0.4464117897712441429364478e-1, 0.4342413825804741958006920e-1, 0.4209740441038509664302268e-1, 0.4066432888241744096828524e-1, 0.3912853175196308412331100e-1, 0.3749389258228002998561838e-1, 0.3576454062276814128558760e-1, 0.3394484437941054509111762e-1, 0.3203940058162467810633926e-1, 0.3005302257398987007700934e-1, 0.2799072816331463754123820e-1, 0.2585772695402469802709536e-1, 0.2365940720868279257451652e-1, 0.2140132227766996884117906e-1, 0.1908917665857319873250324e-1, 0.1672881179017731628855027e-1, 0.1432619182380651776740340e-1, 0.1188739011701050194481938e-1, 0.9418579428420387637936636e-2, 0.6926041901830960871704530e-2, 0.4416333456930904813271960e-2, 0.1899205679513690480402948e-2}, + {0.4869095700913972038336538e-1, 0.4857546744150342693479908e-1, 0.4834476223480295716976954e-1, 0.4799938859645830772812614e-1, 0.4754016571483030866228214e-1, 0.4696818281621001732532634e-1, 0.4628479658131441729595326e-1, 0.4549162792741814447977098e-1, 0.4459055816375656306013478e-1, 0.4358372452932345337682780e-1, 0.4247351512365358900733972e-1, 0.4126256324262352861015628e-1, 0.3995374113272034138665686e-1, 0.3855015317861562912896262e-1, 0.3705512854024004604041492e-1, 0.3547221325688238381069330e-1, 0.3380516183714160939156536e-1, 0.3205792835485155358546770e-1, 0.3023465707240247886797386e-1, 0.2833967261425948322751098e-1, 0.2637746971505465867169136e-1, 0.2435270256871087333817770e-1, 0.2227017380838325415929788e-1, 0.2013482315353020937234076e-1, 0.1795171577569734308504602e-1, 0.1572603047602471932196614e-1, 0.1346304789671864259806029e-1, 0.1116813946013112881859029e-1, 0.8846759826363947723030856e-2, 0.6504457968978362856118112e-2, 0.4147033260562467635287472e-2, 0.1783280721696432947292054e-2}, + {0.4722748126299855484563332e-1, 0.4712209828764473218544518e-1, 0.4691156748762082774625404e-1, 0.4659635863958410362582412e-1, 0.4617717509791597547166640e-1, 0.4565495222527305612043888e-1, 0.4503085530544150021519278e-1, 0.4430627694315316190460328e-1, 0.4348283395666747864757528e-1, 0.4256236377005571631890662e-1, 0.4154692031324188131773448e-1, 0.4043876943895497912586836e-1, 0.3924038386682833018781280e-1, 0.3795443766594162094913028e-1, 0.3658380028813909441368980e-1, 0.3513153016547255590064132e-1, 0.3360086788611223267034862e-1, 0.3199522896404688727128174e-1, 0.3031819621886851919364104e-1, 0.2857351178293187118282268e-1, 0.2676506875425000190879332e-1, 0.2489690251475737263773110e-1, 0.2297318173532665591809836e-1, 0.2099819909186462577733052e-1, 0.1897636172277132593486659e-1, 0.1691218147224521718035102e-1, 0.1481026500273396017364296e-1, 0.1267530398126168187644599e-1, 0.1051206598770575465737803e-1, 0.8325388765990901416725080e-2, 0.6120192018447936365568516e-2, 0.3901625641744248259228942e-2, 0.1677653744007238599334225e-2}, + {0.4584938738725097468656398e-1, 0.4575296541606795051900614e-1, 0.4556032425064828598070770e-1, 0.4527186901844377786941174e-1, 0.4488820634542666782635216e-1, 0.4441014308035275590934876e-1, 0.4383868459795605201060492e-1, 0.4317503268464422322584344e-1, 0.4242058301114249930061428e-1, 0.4157692219740291648457550e-1, 0.4064582447595407614088174e-1, 0.3962924796071230802540652e-1, 0.3852933052910671449325372e-1, 0.3734838532618666771607896e-1, 0.3608889590017987071497568e-1, 0.3475351097975151316679320e-1, 0.3334503890398068790314300e-1, 0.3186644171682106493934736e-1, 0.3032082893855398034157906e-1, 0.2871145102748499071080394e-1, 0.2704169254590396155797848e-1, 0.2531506504517639832390244e-1, 0.2353519968587633336129308e-1, 0.2170583961037807980146532e-1, 0.1983083208795549829102926e-1, 0.1791412045792315248940600e-1, 0.1595973590961380007213420e-1, 0.1397178917445765581596455e-1, 0.1195446231976944210322336e-1, 0.9912001251585937209131520e-2, 0.7848711393177167415052160e-2, 0.5768969918729952021468320e-2, 0.3677366595011730633570254e-2, 0.1581140256372912939103728e-2}, + {0.4454941715975466720216750e-1, 0.4446096841724637082355728e-1, 0.4428424653905540677579966e-1, 0.4401960239018345875735580e-1, 0.4366756139720144025254848e-1, 0.4322882250506869978939520e-1, 0.4270425678944977776996576e-1, 0.4209490572728440602098398e-1, 0.4140197912904520863822652e-1, 0.4062685273678961635122600e-1, 0.3977106549277656747784952e-1, 0.3883631648407340397900292e-1, 0.3782446156922281719727230e-1, 0.3673750969367269534804046e-1, 0.3557761890129238053276980e-1, 0.3434709204990653756854510e-1, 0.3304837223937242047087430e-1, 0.3168403796130848173465310e-1, 0.3025679798015423781653688e-1, 0.2876948595580828066131070e-1, 0.2722505481866441715910742e-1, 0.2562657090846848279898494e-1, 0.2397720788910029227868640e-1, 0.2228024045225659583389064e-1, 0.2053903782432645338449270e-1, 0.1875705709313342341545081e-1, 0.1693783637630293253183738e-1, 0.1508498786544312768229492e-1, 0.1320219081467674762507440e-1, 0.1129318464993153764963015e-1, 0.9361762769699026811498692e-2, 0.7411769363190210362109460e-2, 0.5447111874217218312821680e-2, 0.3471894893078143254999524e-2, 0.1492721288844515731042666e-2}, + {0.4332111216548653707639384e-1, 0.4323978130522261748526514e-1, 0.4307727227491369974525036e-1, 0.4283389016833881366683982e-1, 0.4251009191005772007780078e-1, 0.4210648539758646414658732e-1, 0.4162382836013859820760788e-1, 0.4106302693607506110193610e-1, 0.4042513397173397004332898e-1, 0.3971134704483490178239872e-1, 0.3892300621616966379996300e-1, 0.3806159151380216383437540e-1, 0.3712872015450289946055536e-1, 0.3612614350763799298563092e-1, 0.3505574380721787043413848e-1, 0.3391953061828605949719618e-1, 0.3271963706429384670431246e-1, 0.3145831582256181397777608e-1, 0.3013793489537547929298290e-1, 0.2876097316470176109512506e-1, 0.2733001573895093443379638e-1, 0.2584774910065589028389804e-1, 0.2431695606441916432634724e-1, 0.2274051055503575445593134e-1, 0.2112137221644055350981986e-1, 0.1946258086329427804301667e-1, 0.1776725078920065359435915e-1, 0.1603856495028515521816122e-1, 0.1427976905455419326655572e-1, 0.1249416561987375776778277e-1, 0.1068510816535189715895734e-1, 0.8855996073706153383956510e-2, 0.7010272321861863296081600e-2, 0.5151436018790886908248502e-2, 0.3283169774667495801897558e-2, 0.1411516393973434135715864e-2}, + {0.4215870660994342212223066e-1, 0.4208374996915697247489576e-1, 0.4193396995777702146995522e-1, 0.4170963287924075437870998e-1, 0.4141113759675351082006810e-1, 0.4103901482412726684741876e-1, 0.4059392618219472805807676e-1, 0.4007666302247696675915112e-1, 0.3948814502019646832363280e-1, 0.3882941853913770775808220e-1, 0.3810165477126324889635168e-1, 0.3730614765439415573370658e-1, 0.3644431157165856448181076e-1, 0.3551767883680095992585374e-1, 0.3452789696982646100333388e-1, 0.3347672576782876626372244e-1, 0.3236603417621699952527994e-1, 0.3119779696591542603337254e-1, 0.2997409122246118733996502e-1, 0.2869709265326987534209508e-1, 0.2736907171967935230243778e-1, 0.2599238960072378786677346e-1, 0.2456949399594276724564910e-1, 0.2310291477491582303093246e-1, 0.2159525948167588896969968e-1, 0.2004920870279494425273506e-1, 0.1846751130897987978285368e-1, 0.1685297958202485358484807e-1, 0.1520848424340123480887426e-1, 0.1353694941178749434105245e-1, 0.1184134754749966732316814e-1, 0.1012469453828730542112095e-1, 0.8390045433971397064089364e-2, 0.6640492909114357634760192e-2, 0.4879179758594144584288316e-2, 0.3109420149896754678673688e-2, 0.1336761650069883550325931e-2}, + {0.4105703691622942259325972e-1, 0.4098780546479395154130842e-1, 0.4084945930182849228039176e-1, 0.4064223171029473877745496e-1, 0.4036647212284402315409558e-1, 0.4002264553259682611646172e-1, 0.3961133170906205842314674e-1, 0.3913322422051844076750754e-1, 0.3858912926450673834292118e-1, 0.3797996430840528319523540e-1, 0.3730675654238160982756716e-1, 0.3657064114732961700724404e-1, 0.3577285938071394752777924e-1, 0.3491475648355076744412550e-1, 0.3399777941205638084674262e-1, 0.3302347439779174100654158e-1, 0.3199348434042160006853510e-1, 0.3090954603749159538993714e-1, 0.2977348725590504095670750e-1, 0.2858722365005400377397500e-1, 0.2735275553182752167415270e-1, 0.2607216449798598352427480e-1, 0.2474760992065967164326474e-1, 0.2338132530701118662247962e-1, 0.2197561453441624916801320e-1, 0.2053284796790802109297466e-1, 0.1905545846719058280680223e-1, 0.1754593729147423095419928e-1, 0.1600682991224857088850986e-1, 0.1444073174827667993988980e-1, 0.1285028384751014494492467e-1, 0.1123816856966768723967455e-1, 0.9607105414713754082404616e-2, 0.7959847477239734621118374e-2, 0.6299180497328445866575096e-2, 0.4627935228037421326126844e-2, 0.2949102953642474900394994e-2, 0.1267791634085359663272804e-2}, + {0.4001146511842048298877858e-1, 0.3994739036908802487930490e-1, 0.3981934348036408922503176e-1, 0.3962752950781054295639346e-1, 0.3937225562423312193722022e-1, 0.3905393062777341314731136e-1, 0.3867306428725767400389548e-1, 0.3823026652585098764962036e-1, 0.3772624644432424786429014e-1, 0.3716181118549838685067108e-1, 0.3653786464168470064819248e-1, 0.3585540600719169544500572e-1, 0.3511552817821718947488010e-1, 0.3431941600268909029029166e-1, 0.3346834438285897797298150e-1, 0.3256367623368904440805548e-1, 0.3160686030030479773888294e-1, 0.3059942883801304528943330e-1, 0.2954299515860694641162030e-1, 0.2843925104689751626239046e-1, 0.2728996405162436486456432e-1, 0.2609697465510883502983394e-1, 0.2486219332622245076144308e-1, 0.2358759746145747209645146e-1, 0.2227522821911388676305032e-1, 0.2092718725187772678537816e-1, 0.1954563334339992337791787e-1, 0.1813277895498232864440684e-1, 0.1669088668934389186621294e-1, 0.1522226568017845169331591e-1, 0.1372926792014414839372596e-1, 0.1221428454978988639768250e-1, 0.1067974215748111335351669e-1, 0.9128099227255087276943326e-2, 0.7561843189439718826977318e-2, 0.5983489944440407989648850e-2, 0.4395596039460346742737866e-2, 0.2800868811838630411609396e-2, 0.1204024566067353280336448e-2}, + {0.3901781365630665481128044e-1, 0.3895839596276953119862554e-1, 0.3883965105905196893177418e-1, 0.3866175977407646332707712e-1, 0.3842499300695942318521238e-1, 0.3812971131447763834420674e-1, 0.3777636436200139748977496e-1, 0.3736549023873049002670538e-1, 0.3689771463827600883915092e-1, 0.3637374990583597804396502e-1, 0.3579439395341605460286146e-1, 0.3516052904474759349552658e-1, 0.3447312045175392879436434e-1, 0.3373321498461152281667534e-1, 0.3294193939764540138283636e-1, 0.3210049867348777314805654e-1, 0.3121017418811470164244288e-1, 0.3027232175955798066122008e-1, 0.2928836958326784769276746e-1, 0.2825981605727686239675312e-1, 0.2718822750048638067441898e-1, 0.2607523576756511790296854e-1, 0.2492253576411549110511808e-1, 0.2373188286593010129319242e-1, 0.2250509024633246192622164e-1, 0.2124402611578200638871032e-1, 0.1995061087814199892889169e-1, 0.1862681420829903142873492e-1, 0.1727465205626930635858456e-1, 0.1589618358372568804490352e-1, 0.1449350804050907611696272e-1, 0.1306876159240133929378674e-1, 0.1162411412079782691646643e-1, 0.1016176604110306452083288e-1, 0.8683945269260858426408640e-2, 0.7192904768117312752674654e-2, 0.5690922451403198649270494e-2, 0.4180313124694895236739096e-2, 0.2663533589512681669292770e-2, 0.1144950003186941534544369e-2}, + {0.3807230964014187120769602e-1, 0.3801710843143526990530278e-1, 0.3790678605050578477946422e-1, 0.3774150245427586967153708e-1, 0.3752149728818502087157412e-1, 0.3724708953872766418784006e-1, 0.3691867707095445699853162e-1, 0.3653673605160765284219780e-1, 0.3610182025872702307569544e-1, 0.3561456027872747268049598e-1, 0.3507566259211269038478042e-1, 0.3448590854915070550737888e-1, 0.3384615323699685874463648e-1, 0.3315732423990721132775848e-1, 0.3242042029434060507783656e-1, 0.3163650984090024553762352e-1, 0.3080672947521562981366802e-1, 0.2993228230001272463508596e-1, 0.2901443618076440396145302e-1, 0.2805452190745423047171398e-1, 0.2705393126512477151978662e-1, 0.2601411501601702375386842e-1, 0.2493658079624075515577230e-1, 0.2382289093004782634222678e-1, 0.2267466016491410310244200e-1, 0.2149355333077484404348958e-1, 0.2028128292691215890157032e-1, 0.1903960664017892507303976e-1, 0.1777032479849840714698234e-1, 0.1647527776398370889101217e-1, 0.1515634327076256178846848e-1, 0.1381543371412645938772740e-1, 0.1245449340114210467973318e-1, 0.1107549578175989632022419e-1, 0.9680440704371073736965104e-2, 0.8271351818383685604431294e-2, 0.6850274534183526184325356e-2, 0.5419276232446765090703842e-2, 0.3980457937856074619030326e-2, 0.2536054696856106109823094e-2, 0.1090118595275830866109234e-2}, + {0.3717153701903406760328362e-1, 0.3712016261260209427372758e-1, 0.3701748480379452058524442e-1, 0.3686364550259030771845208e-1, 0.3665885732875907563657692e-1, 0.3640340331800212248862624e-1, 0.3609763653077256670175260e-1, 0.3574197956431530727788894e-1, 0.3533692396860127616038866e-1, 0.3488302956696330845641672e-1, 0.3438092368237270062133504e-1, 0.3383130027042598480372494e-1, 0.3323491896024044407471552e-1, 0.3259260400458425718361322e-1, 0.3190524314069272748402282e-1, 0.3117378636334566129196750e-1, 0.3039924461190246977311372e-1, 0.2958268837311084528960516e-1, 0.2872524620162180221266452e-1, 0.2782810316025840603576668e-1, 0.2689249918219763751581640e-1, 0.2591972735733464772516052e-1, 0.2491113214520642888439108e-1, 0.2386810751695823938471552e-1, 0.2279209502894212933888898e-1, 0.2168458183064482298924430e-1, 0.2054709860975627861152400e-1, 0.1938121747731880864780669e-1, 0.1818854979605654992760044e-1, 0.1697074395521161134308213e-1, 0.1572948309558359820159970e-1, 0.1446648278916118624227443e-1, 0.1318348867918234598679997e-1, 0.1188227408980122349505120e-1, 0.1056463762300824526484878e-1, 0.9232400784190247014382770e-2, 0.7887405752648146382107148e-2, 0.6531513687713654601121566e-2, 0.5166605182746808329881136e-2, 0.3794591650452349696393000e-2, 0.2417511265443122855238466e-2, 0.1039133516451971889197062e-2}, + {0.3631239537581333828231516e-1, 0.3626450208420238743149194e-1, 0.3616877866860063758274494e-1, 0.3602535138093525771008956e-1, 0.3583440939092405578977942e-1, 0.3559620453657549559069116e-1, 0.3531105099203420508058466e-1, 0.3497932485321009937141316e-1, 0.3460146364173769225993442e-1, 0.3417796572791990463423808e-1, 0.3370938967341755486497158e-1, 0.3319635349455159712009034e-1, 0.3263953384718992195609868e-1, 0.3203966513429401611022852e-1, 0.3139753853730286555853332e-1, 0.3071400097263205318303994e-1, 0.2998995397466493249133840e-1, 0.2922635250670994458366154e-1, 0.2842420370149349475731242e-1, 0.2758456553285124838738412e-1, 0.2670854542037220957530654e-1, 0.2579729876883953540777106e-1, 0.2485202744439983591832606e-1, 0.2387397818947900497321768e-1, 0.2286444097854800644577274e-1, 0.2182474731692762780068420e-1, 0.2075626848490914279058154e-1, 0.1966041372956217980740210e-1, 0.1853862840670985920631482e-1, 0.1739239207569054238672012e-1, 0.1622321654972902258808405e-1, 0.1503264390508137868494523e-1, 0.1382224445276667086664874e-1, 0.1259361467806969781040954e-1, 0.1134837515617770397716730e-1, 0.1008816846038610565467284e-1, 0.8814657101954815703782366e-2, 0.7529521612194562606844596e-2, 0.6234459139140123463885784e-2, 0.4931184096960103696423408e-2, 0.3621439249610901437553882e-2, 0.2307087488809902925963262e-2, 0.9916432666203635255681510e-3}, + {0.3549206430171454529606746e-1, 0.3544734460447076970614316e-1, 0.3535796155642384379366902e-1, 0.3522402777945910853287866e-1, 0.3504571202900426139658624e-1, 0.3482323898139935499312912e-1, 0.3455688895080708413486530e-1, 0.3424699753602007873736958e-1, 0.3389395519761025923989258e-1, 0.3349820676595309252806520e-1, 0.3306025088074670014528066e-1, 0.3258063936273210868623942e-1, 0.3205997651840638806926700e-1, 0.3149891837860489232004182e-1, 0.3089817187191219763370292e-1, 0.3025849393394352533513752e-1, 0.2958069055361934911335230e-1, 0.2886561575763542924647688e-1, 0.2811417053440861349157908e-1, 0.2732730169885533083562360e-1, 0.2650600069943473772140906e-1, 0.2565130236896194788477952e-1, 0.2476428362076873302532156e-1, 0.2384606209185966126357838e-1, 0.2289779473478114232724788e-1, 0.2192067635998985359563460e-1, 0.2091593813057662423225406e-1, 0.1988484601127411324360109e-1, 0.1882869917375545139470985e-1, 0.1774882836032407455649534e-1, 0.1664659420821765604511323e-1, 0.1552338553693355384016474e-1, 0.1438061760129994423593466e-1, 0.1321973031362791170818164e-1, 0.1204218643958121230973900e-1, 0.1084946977542927125940107e-1, 0.9643083322053204400769368e-2, 0.8424547492702473015098308e-2, 0.7195398459796372059759572e-2, 0.5957186996138046583131162e-2, 0.4711479279598661743021848e-2, 0.3459867667862796423976646e-2, 0.2204058563143696628535344e-2, 0.9473355981619272667700360e-3}, + {0.3470797248895005792046014e-1, 0.3466615208568824018827232e-1, 0.3458256166949689141805380e-1, 0.3445730196032425617459566e-1, 0.3429052388637504193169728e-1, 0.3408242840225399546360508e-1, 0.3383326624683168725792750e-1, 0.3354333764112427668293316e-1, 0.3321299192655131651404080e-1, 0.3284262714400750457863018e-1, 0.3243268955425561691178950e-1, 0.3198367310021857603945600e-1, 0.3149611881181863607695780e-1, 0.3097061415408092094593650e-1, 0.3040779231928695269039426e-1, 0.2980833146403127548714788e-1, 0.2917295389210074248655798e-1, 0.2850242518416141631875546e-1, 0.2779755327530227515803874e-1, 0.2705918748154795852161408e-1, 0.2628821747651458736159580e-1, 0.2548557221944322848446706e-1, 0.2465221883590485293596628e-1, 0.2378916145252872321010090e-1, 0.2289743998716318463498862e-1, 0.2197812889593413383869188e-1, 0.2103233587872256311706242e-1, 0.2006120054463959596453232e-1, 0.1906589303913731842532399e-1, 0.1804761263446023616404962e-1, 0.1700758628522267570939747e-1, 0.1594706715100663901320649e-1, 0.1486733308804332405038481e-1, 0.1376968511233709343075118e-1, 0.1265544583716812886887583e-1, 0.1152595788914805885059348e-1, 0.1038258230989321461380844e-1, 0.9226696957741990940319884e-2, 0.8059694944620015658670990e-2, 0.6882983208463284314729370e-2, 0.5697981560747352600849438e-2, 0.4506123613674977864136850e-2, 0.3308867243336018195431340e-2, 0.2107778774526329891473788e-2, 0.9059323712148330937360098e-3}, + {0.3395777082810234796700260e-1, 0.3391860442372254949502722e-1, 0.3384031678893360189141840e-1, 0.3372299821957387169380074e-1, 0.3356678402920367631007550e-1, 0.3337185439303681030780114e-1, 0.3313843414012938182262046e-1, 0.3286679249406566032646806e-1, 0.3255724276244004524316198e-1, 0.3221014197549332953574452e-1, 0.3182589047432008582597260e-1, 0.3140493144912217791614030e-1, 0.3094775042804103166804096e-1, 0.3045487471715832098063528e-1, 0.2992687279231107330786762e-1, 0.2936435364342281261274650e-1, 0.2876796607210717582237958e-1, 0.2813839794335440451445112e-1, 0.2747637539216417339517938e-1, 0.2678266198604032330048838e-1, 0.2605805784431417922245786e-1, 0.2530339871531322569754810e-1, 0.2451955501244097425717108e-1, 0.2370743081028191239353720e-1, 0.2286796280189254240434106e-1, 0.2200211921848585739874382e-1, 0.2111089871276246180997612e-1, 0.2019532920718748374956428e-1, 0.1925646670855947471237209e-1, 0.1829539409026755729118717e-1, 0.1731321984368977636114053e-1, 0.1631107680025595800481463e-1, 0.1529012082579650150690625e-1, 0.1425152948895392526580707e-1, 0.1319650070571113802911160e-1, 0.1212625136263771052929676e-1, 0.1104201592263539422398575e-1, 0.9945045019726082041770092e-2, 0.8836604056467877374547944e-2, 0.7717971837373568504533128e-2, 0.6590439334214895223179124e-2, 0.5455308908000870987158870e-2, 0.4313895331861700472339122e-2, 0.3167535943396097874261610e-2, 0.2017671366262838591883234e-2, 0.8671851787671421353540866e-3}, + {0.3323930891781532080070524e-1, 0.3320257661860686379876634e-1, 0.3312915261254696321600516e-1, 0.3301911803949165507667076e-1, 0.3287259449712959072614770e-1, 0.3268974390660630715252838e-1, 0.3247076833358767948450850e-1, 0.3221590976496030711281812e-1, 0.3192544984141561392584074e-1, 0.3159970954621320046477392e-1, 0.3123904885046741788219108e-1, 0.3084386631534918741110674e-1, 0.3041459865164271220328128e-1, 0.2995172023714386920008800e-1, 0.2945574259243367639719146e-1, 0.2892721381560625584227516e-1, 0.2836671797657610681272962e-1, 0.2777487447163422062065088e-1, 0.2715233733896656472388262e-1, 0.2649979453589169919669406e-1, 0.2581796717861672816440260e-1, 0.2510760874535240512858038e-1, 0.2436950424366898830634656e-1, 0.2360446934301438228050796e-1, 0.2281334947335523641001192e-1, 0.2199701889094007717339700e-1, 0.2115637971222138981504522e-1, 0.2029236091701113217988866e-1, 0.1940591732198200488605189e-1, 0.1849802852566591095380957e-1, 0.1756969782614325199872555e-1, 0.1662195111266549663832874e-1, 0.1565583573251555786002188e-1, 0.1467241933449946420426407e-1, 0.1367278869060687850644038e-1, 0.1265804849763899444482439e-1, 0.1162932016112241459607371e-1, 0.1058774056495412223672440e-1, 0.9534460832865158250063918e-2, 0.8470645094534635999910406e-2, 0.7397469288142356200862272e-2, 0.6316120091036448223107804e-2, 0.5227794289507767545307002e-2, 0.4133699875407776483295790e-2, 0.3035065891038628027389626e-2, 0.1933219888725418943121000e-2, 0.8308716126821624946495838e-3}, + {0.3255061449236316624196142e-1, 0.3251611871386883598720548e-1, 0.3244716371406426936401278e-1, 0.3234382256857592842877486e-1, 0.3220620479403025066866710e-1, 0.3203445623199266321813896e-1, 0.3182875889441100653475374e-1, 0.3158933077072716855802074e-1, 0.3131642559686135581278434e-1, 0.3101033258631383742324982e-1, 0.3067137612366914901422878e-1, 0.3029991542082759379408878e-1, 0.2989634413632838598438796e-1, 0.2946108995816790597043632e-1, 0.2899461415055523654267862e-1, 0.2849741106508538564559948e-1, 0.2797000761684833443981840e-1, 0.2741296272602924282342110e-1, 0.2682686672559176219805676e-1, 0.2621234073567241391345816e-1, 0.2557003600534936149879724e-1, 0.2490063322248361028838244e-1, 0.2420484179236469128226730e-1, 0.2348339908592621984223612e-1, 0.2273706965832937400134754e-1, 0.2196664443874434919475618e-1, 0.2117293989219129898767356e-1, 0.2035679715433332459524556e-1, 0.1951908114014502241008485e-1, 0.1866067962741146738515655e-1, 0.1778250231604526083761406e-1, 0.1688547986424517245047785e-1, 0.1597056290256229138061685e-1, 0.1503872102699493800587588e-1, 0.1409094177231486091586166e-1, 0.1312822956696157263706415e-1, 0.1215160467108831963518178e-1, 0.1116210209983849859121361e-1, 0.1016077053500841575758671e-1, 0.9148671230783386632584044e-2, 0.8126876925698759217383246e-2, 0.7096470791153865269143206e-2, 0.6058545504235961683315686e-2, 0.5014202742927517692471308e-2, 0.3964554338444686673733524e-2, 0.2910731817934946408411678e-2, 0.1853960788946921732331620e-2, 0.7967920655520124294367096e-3}, + {0.3188987535287646727794502e-1, 0.3185743815812401071309920e-1, 0.3179259676252863019831786e-1, 0.3169541712034925160907410e-1, 0.3156599807910805290145092e-1, 0.3140447127904656151748860e-1, 0.3121100101922626441684056e-1, 0.3098578409040993463104290e-1, 0.3072904957489366992001356e-1, 0.3044105861349325839490764e-1, 0.3012210413992189884853100e-1, 0.2977251058282947626617570e-1, 0.2939263353580649216776328e-1, 0.2898285939568834204744914e-1, 0.2854360496952788570349054e-1, 0.2807531705063613875324586e-1, 0.2757847196412239390009986e-1, 0.2705357508239612827767608e-1, 0.2650116031112363935248738e-1, 0.2592178954616244891846836e-1, 0.2531605210202609734314644e-1, 0.2468456411246099618197954e-1, 0.2402796790374549880324124e-1, 0.2334693134134927471268304e-1, 0.2264214715061843311126274e-1, 0.2191433221217865041901888e-1, 0.2116422683277485691127980e-1, 0.2039259399229191457948346e-1, 0.1960021856772633077323700e-1, 0.1878790653490468656148738e-1, 0.1795648414877062812244296e-1, 0.1710679710308990026235402e-1, 0.1623970967045369565272614e-1, 0.1535610382349775576849818e-1, 0.1445687833830440197756895e-1, 0.1354294788102946514364726e-1, 0.1261524207892195285778215e-1, 0.1167470457713812428742924e-1, 0.1072229208322431712024324e-1, 0.9758973402174096835348026e-2, 0.8785728467392263202699392e-2, 0.7803547379100754890979542e-2, 0.6813429479165215998771186e-2, 0.5816382546439639112764538e-2, 0.4813422398586770918478190e-2, 0.3805574085352359565512666e-2, 0.2793881135722130870629084e-2, 0.1779477041014528741695358e-2, 0.7647669822743134580383448e-3}, + {0.3125542345386335694764248e-1, 0.3122488425484935773237650e-1, 0.3116383569620990678381832e-1, 0.3107233742756651658781016e-1, 0.3095047885049098823406346e-1, 0.3079837903115259042771392e-1, 0.3061618658398044849645950e-1, 0.3040407952645482001650792e-1, 0.3016226510516914491906862e-1, 0.2989097959333283091683684e-1, 0.2959048805991264251175454e-1, 0.2926108411063827662011896e-1, 0.2890308960112520313487610e-1, 0.2851685432239509799093676e-1, 0.2810275565910117331764820e-1, 0.2766119822079238829420408e-1, 0.2719261344657688013649158e-1, 0.2669745918357096266038448e-1, 0.2617621923954567634230892e-1, 0.2562940291020811607564182e-1, 0.2505754448157958970376402e-1, 0.2446120270795705271997480e-1, 0.2384096026596820596256040e-1, 0.2319742318525412162248878e-1, 0.2253122025633627270179672e-1, 0.2184300241624738631395360e-1, 0.2113344211252764154267220e-1, 0.2040323264620943276683910e-1, 0.1965308749443530586538157e-1, 0.1888373961337490455294131e-1, 0.1809594072212811666439111e-1, 0.1729046056832358243934388e-1, 0.1646808617614521264310506e-1, 0.1562962107754600272393719e-1, 0.1477588452744130176887969e-1, 0.1390771070371877268795387e-1, 0.1302594789297154228555807e-1, 0.1213145766297949740774437e-1, 0.1122511402318597711722209e-1, 0.1030780257486896958578198e-1, 0.9380419653694457951417628e-2, 0.8443871469668971402620252e-2, 0.7499073255464711578829804e-2, 0.6546948450845322764152444e-2, 0.5588428003865515157213478e-2, 0.4624450063422119351093868e-2, 0.3655961201326375182342828e-2, 0.2683925371553482419437272e-2, 0.1709392653518105239533969e-2, 0.7346344905056717304142370e-3}, +} + +var oddWeights = [][]float64{ + {0.5555555555555555555555555}, + {0.4786286704993664680412916e0, 0.2369268850561890875142644e0}, + {0.3818300505051189449503698e0, 0.2797053914892766679014680e0, 0.1294849661688696932706118e0}, + {0.3123470770400028400686304e0, 0.2606106964029354623187428e0, 0.1806481606948574040584721e0, 0.8127438836157441197189206e-1}, + {0.2628045445102466621806890e0, 0.2331937645919904799185238e0, 0.1862902109277342514260979e0, 0.1255803694649046246346947e0, 0.5566856711617366648275374e-1}, + {0.2262831802628972384120902e0, 0.2078160475368885023125234e0, 0.1781459807619457382800468e0, 0.1388735102197872384636019e0, 0.9212149983772844791442126e-1, 0.4048400476531587952001996e-1}, + {0.1984314853271115764561182e0, 0.1861610000155622110268006e0, 0.1662692058169939335532006e0, 0.1395706779261543144478051e0, 0.1071592204671719350118693e0, 0.7036604748810812470926662e-1, 0.3075324199611726835462762e-1}, + {0.1765627053669926463252710e0, 0.1680041021564500445099705e0, 0.1540457610768102880814317e0, 0.1351363684685254732863199e0, 0.1118838471934039710947887e0, 0.8503614831717918088353538e-1, 0.5545952937398720112944102e-1, 0.2414830286854793196010920e-1}, + {0.1589688433939543476499565e0, 0.1527660420658596667788553e0, 0.1426067021736066117757460e0, 0.1287539625393362276755159e0, 0.1115666455473339947160242e0, 0.9149002162244999946446222e-1, 0.6904454273764122658070790e-1, 0.4481422676569960033283728e-1, 0.1946178822972647703631351e-1}, + {0.1445244039899700590638271e0, 0.1398873947910731547221335e0, 0.1322689386333374617810526e0, 0.1218314160537285341953671e0, 0.1087972991671483776634747e0, 0.9344442345603386155329010e-1, 0.7610011362837930201705132e-1, 0.5713442542685720828363528e-1, 0.3695378977085249379995034e-1, 0.1601722825777433332422273e-1}, + {0.1324620394046966173716425e0, 0.1289057221880821499785954e0, 0.1230490843067295304675784e0, 0.1149966402224113649416434e0, 0.1048920914645414100740861e0, 0.9291576606003514747701876e-1, 0.7928141177671895492289248e-1, 0.6423242140852585212716980e-1, 0.4803767173108466857164124e-1, 0.3098800585697944431069484e-1, 0.1341185948714177208130864e-1}, + {0.1222424429903100416889594e0, 0.1194557635357847722281782e0, 0.1148582591457116483393255e0, 0.1085196244742636531160939e0, 0.1005359490670506442022068e0, 0.9102826198296364981149704e-1, 0.8014070033500101801323524e-1, 0.6803833381235691720718712e-1, 0.5490469597583519192593686e-1, 0.4093915670130631265562402e-1, 0.2635498661503213726190216e-1, 0.1139379850102628794789998e-1}, + {0.1134763461089651486203700e0, 0.1112524883568451926721632e0, 0.1075782857885331872121629e0, 0.1025016378177457986712478e0, 0.9608872737002850756565252e-1, 0.8842315854375695019432262e-1, 0.7960486777305777126307488e-1, 0.6974882376624559298432254e-1, 0.5898353685983359911030058e-1, 0.4744941252061506270409646e-1, 0.3529705375741971102257772e-1, 0.2268623159618062319603554e-1, 0.9798996051294360261149438e-2}, + {0.1058761550973209414065914e0, 0.1040733100777293739133284e0, 0.1010912737599149661218204e0, 0.9696383409440860630190016e-1, 0.9173775713925876334796636e-1, 0.8547225736617252754534480e-1, 0.7823832713576378382814484e-1, 0.7011793325505127856958160e-1, 0.6120309065707913854210970e-1, 0.5159482690249792391259412e-1, 0.4140206251868283610482948e-1, 0.3074049220209362264440778e-1, 0.1973208505612270598385931e-1, 0.8516903878746409654261436e-2}, + {0.9922501122667230787487546e-1, 0.9774333538632872509347402e-1, 0.9529024291231951280720412e-1, 0.9189011389364147821536290e-1, 0.8757674060847787612619794e-1, 0.8239299176158926390382334e-1, 0.7639038659877661642635764e-1, 0.6962858323541036616775632e-1, 0.6217478656102842691034334e-1, 0.5410308242491685371166596e-1, 0.4549370752720110290231576e-1, 0.3643227391238546402439264e-1, 0.2700901918497942180060860e-1, 0.1731862079031058246315918e-1, 0.7470831579248775858700554e-2}, + {0.9335642606559611616099912e-1, 0.9212398664331684621324104e-1, 0.9008195866063857723974370e-1, 0.8724828761884433760728158e-1, 0.8364787606703870761392808e-1, 0.7931236479488673836390848e-1, 0.7427985484395414934247216e-1, 0.6859457281865671280595482e-1, 0.6230648253031748003162750e-1, 0.5547084663166356128494468e-1, 0.4814774281871169567014706e-1, 0.4040154133166959156340938e-1, 0.3230035863232895328156104e-1, 0.2391554810174948035053310e-1, 0.1532170151293467612794584e-1, 0.6606227847587378058647800e-2}, + {0.8814053043027546297073886e-1, 0.8710444699718353424332214e-1, 0.8538665339209912522594402e-1, 0.8300059372885658837992644e-1, 0.7996494224232426293266204e-1, 0.7630345715544205353865872e-1, 0.7204479477256006466546180e-1, 0.6722228526908690396430546e-1, 0.6187367196608018888701398e-1, 0.5604081621237012857832772e-1, 0.4976937040135352980519956e-1, 0.4310842232617021878230592e-1, 0.3611011586346338053271748e-1, 0.2882926010889425404871630e-1, 0.2132297991148358088343844e-1, 0.1365082834836149226640441e-1, 0.5883433420443084975750336e-2}, + {0.8347457362586278725225302e-1, 0.8259527223643725089123018e-1, 0.8113662450846503050987774e-1, 0.7910886183752938076721222e-1, 0.7652620757052923788588804e-1, 0.7340677724848817272462668e-1, 0.6977245155570034488508154e-1, 0.6564872287275124948402376e-1, 0.6106451652322598613098804e-1, 0.5605198799827491780853916e-1, 0.5064629765482460160387558e-1, 0.4488536466243716665741054e-1, 0.3880960250193454448896226e-1, 0.3246163984752148106723444e-1, 0.2588603699055893352275954e-1, 0.1912904448908396604350259e-1, 0.1223878010030755652630649e-1, 0.5273057279497939351724544e-2}, + {0.7927622256836847101015574e-1, 0.7852361328737117672506330e-1, 0.7727455254468201672851160e-1, 0.7553693732283605770478448e-1, 0.7332175341426861738115402e-1, 0.7064300597060876077011486e-1, 0.6751763096623126536302120e-1, 0.6396538813868238898670650e-1, 0.6000873608859614957494160e-1, 0.5567269034091629990739094e-1, 0.5098466529212940521402098e-1, 0.4597430110891663188417682e-1, 0.4067327684793384393905618e-1, 0.3511511149813133076106530e-1, 0.2933495598390337859215654e-1, 0.2336938483217816459471240e-1, 0.1725622909372491904080491e-1, 0.1103478893916459424267603e-1, 0.4752944691635101370775866e-2}, + {0.7547874709271582402724706e-1, 0.7482962317622155189130518e-1, 0.7375188202722346993928094e-1, 0.7225169686102307339634646e-1, 0.7033766062081749748165896e-1, 0.6802073676087676673553342e-1, 0.6531419645352741043616384e-1, 0.6223354258096631647157330e-1, 0.5879642094987194499118590e-1, 0.5502251924257874188014710e-1, 0.5093345429461749478117008e-1, 0.4655264836901434206075674e-1, 0.4190519519590968942934048e-1, 0.3701771670350798843526154e-1, 0.3191821173169928178706676e-1, 0.2663589920711044546754900e-1, 0.2120106336877955307569710e-1, 0.1564493840781858853082666e-1, 0.9999938773905945338496546e-2, 0.4306140358164887684003630e-2}, + {0.7202750197142197434530754e-1, 0.7146373425251414129758106e-1, 0.7052738776508502812628636e-1, 0.6922334419365668428229950e-1, 0.6755840222936516919240796e-1, 0.6554124212632279749123378e-1, 0.6318238044939611232562970e-1, 0.6049411524999129451967862e-1, 0.5749046195691051942760910e-1, 0.5418708031888178686337342e-1, 0.5060119278439015652385048e-1, 0.4675149475434658001064704e-1, 0.4265805719798208376380686e-1, 0.3834222219413265757212856e-1, 0.3382649208686029234496834e-1, 0.2913441326149849491594084e-1, 0.2429045661383881590201850e-1, 0.1931990142368390039612543e-1, 0.1424875643157648610854214e-1, 0.9103996637401403318866628e-2, 0.3919490253844127282968528e-2}, + {0.6887731697766132288200278e-1, 0.6838457737866967453169206e-1, 0.6756595416360753627091012e-1, 0.6642534844984252808291474e-1, 0.6496819575072343085382664e-1, 0.6320144007381993774996374e-1, 0.6113350083106652250188634e-1, 0.5877423271884173857436156e-1, 0.5613487875978647664392382e-1, 0.5322801673126895194590376e-1, 0.5006749923795202979913194e-1, 0.4666838771837336526776814e-1, 0.4304688070916497115169120e-1, 0.3922023672930244756418756e-1, 0.3520669220160901624770010e-1, 0.3102537493451546716250854e-1, 0.2669621396757766480567536e-1, 0.2223984755057873239395080e-1, 0.1767753525793759061709347e-1, 0.1303110499158278432063191e-1, 0.8323189296218241645734836e-2, 0.3582663155283558931145652e-2}, + {0.6599053358881047453357062e-1, 0.6555737776654974025114294e-1, 0.6483755623894572670260402e-1, 0.6383421660571703063129384e-1, 0.6255174622092166264056434e-1, 0.6099575300873964533071060e-1, 0.5917304094233887597615438e-1, 0.5709158029323154022201646e-1, 0.5476047278153022595712512e-1, 0.5218991178005714487221170e-1, 0.4939113774736116960457022e-1, 0.4637638908650591120440168e-1, 0.4315884864847953826830162e-1, 0.3975258612253100378090162e-1, 0.3617249658417495161345948e-1, 0.3243423551518475676761786e-1, 0.2855415070064338650473990e-1, 0.2454921165965881853783378e-1, 0.2043693814766842764203432e-1, 0.1623533314643305967072624e-1, 0.1196284846431232096394232e-1, 0.7638616295848833614105174e-2, 0.3287453842528014883248206e-2}, + {0.6333550929649174859083696e-1, 0.6295270746519569947439960e-1, 0.6231641732005726740107682e-1, 0.6142920097919293629682652e-1, 0.6029463095315201730310616e-1, 0.5891727576002726602452756e-1, 0.5730268153018747548516450e-1, 0.5545734967480358869043158e-1, 0.5338871070825896852794302e-1, 0.5110509433014459067462262e-1, 0.4861569588782824027765094e-1, 0.4593053935559585354249958e-1, 0.4306043698125959798834538e-1, 0.4001694576637302136860494e-1, 0.3681232096300068981946734e-1, 0.3345946679162217434248744e-1, 0.2997188462058382535069014e-1, 0.2636361892706601696094518e-1, 0.2264920158744667649877160e-1, 0.1884359585308945844445106e-1, 0.1496214493562465102958377e-1, 0.1102055103159358049750846e-1, 0.7035099590086451473452956e-2, 0.3027278988922905077484090e-2}, + {0.6088546484485634388119860e-1, 0.6054550693473779513812526e-1, 0.5998031577750325209006396e-1, 0.5919199392296154378353896e-1, 0.5818347398259214059843780e-1, 0.5695850772025866210007778e-1, 0.5552165209573869301673704e-1, 0.5387825231304556143409938e-1, 0.5203442193669708756413650e-1, 0.4999702015005740977954886e-1, 0.4777362624062310199999514e-1, 0.4537251140765006874816670e-1, 0.4280260799788008665360980e-1, 0.4007347628549645318680892e-1, 0.3719526892326029284290846e-1, 0.3417869320418833623620910e-1, 0.3103497129016000845442504e-1, 0.2777579859416247719599602e-1, 0.2441330057378143427314164e-1, 0.2095998840170321057979252e-1, 0.1742871472340105225950284e-1, 0.1383263400647782229668883e-1, 0.1018519129782172993923731e-1, 0.6500337783252600292109494e-2, 0.2796807171089895575547228e-2}, + {0.5861758623272026331807196e-1, 0.5831431136225600755627570e-1, 0.5781001499171319631968304e-1, 0.5710643553626719177338328e-1, 0.5620599838173970980865512e-1, 0.5511180752393359900234954e-1, 0.5382763486873102904208140e-1, 0.5235790722987271819970160e-1, 0.5070769106929271529648556e-1, 0.4888267503269914042044844e-1, 0.4688915034075031402187278e-1, 0.4473398910367281021276570e-1, 0.4242462063452001359228150e-1, 0.3996900584354038212709364e-1, 0.3737560980348291567417214e-1, 0.3465337258353423795838740e-1, 0.3181167845901932306323576e-1, 0.2886032361782373626279970e-1, 0.2580948251075751771396152e-1, 0.2266967305707020839878928e-1, 0.1945172110763689538804750e-1, 0.1616672525668746392806095e-1, 0.1282602614424037917915135e-1, 0.9441202284940344386662890e-2, 0.6024276226948673281242120e-2, 0.2591683720567031811603734e-2}, + {0.5651231824977200140065834e-1, 0.5624063407108436802827906e-1, 0.5578879419528408710293598e-1, 0.5515824600250868759665114e-1, 0.5435100932991110207032224e-1, 0.5336967000160547272357054e-1, 0.5221737154563208456439348e-1, 0.5089780512449397922477522e-1, 0.4941519771155173948075862e-1, 0.4777429855120069555003682e-1, 0.4598036394628383810390480e-1, 0.4403914042160658989516800e-1, 0.4195684631771876239520718e-1, 0.3974015187433717960946388e-1, 0.3739615786796554528291572e-1, 0.3493237287358988740726862e-1, 0.3235668922618583168470572e-1, 0.2967735776516104122129630e-1, 0.2690296145639627066711996e-1, 0.2404238800972562200779126e-1, 0.2110480166801645412020978e-1, 0.1809961452072906240796732e-1, 0.1503645833351178821315019e-1, 0.1192516071984861217075236e-1, 0.8775746107058528177390204e-2, 0.5598632266560767354082364e-2, 0.2408323619979788819164582e-2}, + {0.5455280360476188648013898e-1, 0.5430847145249864313874678e-1, 0.5390206148329857464280950e-1, 0.5333478658481915842657698e-1, 0.5260833972917743244023134e-1, 0.5172488892051782472062386e-1, 0.5068707072492740865664050e-1, 0.4949798240201967899383808e-1, 0.4816117266168775126885110e-1, 0.4668063107364150378384082e-1, 0.4506077616138115779721374e-1, 0.4330644221621519659643210e-1, 0.4142286487080111036319668e-1, 0.3941566547548011408995280e-1, 0.3729083432441731735473546e-1, 0.3505471278231261750575064e-1, 0.3271397436637156854248994e-1, 0.3027560484269399945849064e-1, 0.2774688140218019232125814e-1, 0.2513535099091812264727322e-1, 0.2244880789077643807968978e-1, 0.1969527069948852038242318e-1, 0.1688295902344154903500062e-1, 0.1402027079075355617024753e-1, 0.1111576373233599014567619e-1, 0.8178160067821232626211086e-2, 0.5216533474718779390504886e-2, 0.2243753872250662909727492e-2}, + {0.5272443385912793196130422e-1, 0.5250390264782873905094128e-1, 0.5213703364837539138398724e-1, 0.5162484939089148214644000e-1, 0.5096877742539391685024800e-1, 0.5017064634299690281072034e-1, 0.4923268067936198577969374e-1, 0.4815749471460644038814684e-1, 0.4694808518696201919315986e-1, 0.4560782294050976983186828e-1, 0.4414044353029738069079808e-1, 0.4255003681106763866730838e-1, 0.4084103553868670766020196e-1, 0.3901820301616000950303072e-1, 0.3708661981887092269183778e-1, 0.3505166963640010878371850e-1, 0.3291902427104527775751116e-1, 0.3069462783611168323975056e-1, 0.2838468020053479790515332e-1, 0.2599561973129850018665014e-1, 0.2353410539371336342527500e-1, 0.2100699828843718735046168e-1, 0.1842134275361002936061624e-1, 0.1578434731308146614732024e-1, 0.1310336630634519101831859e-1, 0.1038588550099586219379846e-1, 0.7639529453487575142699186e-2, 0.4872239168265284768580414e-2, 0.2095492284541223402697724e-2}, + {0.5101448703869726354373512e-1, 0.5081476366881834320770052e-1, 0.5048247038679740464814450e-1, 0.5001847410817825342505160e-1, 0.4942398534673558993996884e-1, 0.4870055505641152608753004e-1, 0.4785007058509560716183348e-1, 0.4687475075080906597642932e-1, 0.4577714005314595937133982e-1, 0.4456010203508348827154136e-1, 0.4322681181249609790104358e-1, 0.4178074779088849206667564e-1, 0.4022568259099824736764020e-1, 0.3856567320700817274615216e-1, 0.3680505042315481738432126e-1, 0.3494840751653335109085198e-1, 0.3300058827590741063272390e-1, 0.3096667436839739482469792e-1, 0.2885197208818340150434184e-1, 0.2666199852415088966281066e-1, 0.2440246718754420291534050e-1, 0.2207927314831904400247522e-1, 0.1969847774610118133051782e-1, 0.1726629298761374359443389e-1, 0.1478906588493791454617878e-1, 0.1227326350781210462927897e-1, 0.9725461830356133736135366e-2, 0.7152354991749089585834616e-2, 0.4560924006012417184541648e-2, 0.1961453361670282671779431e-2}, + {0.4941183303991817896703964e-1, 0.4923038042374756078504314e-1, 0.4892845282051198994470936e-1, 0.4850678909788384786409014e-1, 0.4796642113799513141105276e-1, 0.4730867131226891908060508e-1, 0.4653514924538369651039536e-1, 0.4564774787629260868588592e-1, 0.4464863882594139537033256e-1, 0.4354026708302759079896428e-1, 0.4232534502081582298250554e-1, 0.4100684575966639863511004e-1, 0.3958799589154409398480778e-1, 0.3807226758434955676363856e-1, 0.3646337008545728963045232e-1, 0.3476524064535587769718026e-1, 0.3298203488377934176568344e-1, 0.3111811662221981750821608e-1, 0.2917804720828052694555162e-1, 0.2716657435909793322519012e-1, 0.2508862055334498661862972e-1, 0.2294927100488993314894282e-1, 0.2075376125803909077534152e-1, 0.1850746446016127040926083e-1, 0.1621587841033833888228333e-1, 0.1388461261611561082486681e-1, 0.1151937607688004175075116e-1, 0.9125968676326656354058462e-2, 0.6710291765960136251908410e-2, 0.4278508346863761866081200e-2, 0.1839874595577084117085868e-2}, + {0.4790669250049586203134730e-1, 0.4774134868124062155903898e-1, 0.4746619823288550315264446e-1, 0.4708187401045452224600686e-1, 0.4658925997223349830225508e-1, 0.4598948914665169696389334e-1, 0.4528394102630023065712822e-1, 0.4447423839508297442732352e-1, 0.4356224359580048653228480e-1, 0.4255005424675580271921714e-1, 0.4143999841724029302268646e-1, 0.4023462927300553381544642e-1, 0.3893671920405119761667398e-1, 0.3754925344825770980977246e-1, 0.3607542322556527393216642e-1, 0.3451861839854905862522142e-1, 0.3288241967636857498404946e-1, 0.3117059038018914246443218e-1, 0.2938706778931066806264472e-1, 0.2753595408845034394249940e-1, 0.2562150693803775821408458e-1, 0.2364812969128723669878144e-1, 0.2162036128493406284165378e-1, 0.1954286583675006282683714e-1, 0.1742042199767024849536596e-1, 0.1525791214644831034926464e-1, 0.1306031163999484633616732e-1, 0.1083267878959796862151440e-1, 0.8580148266881459893636434e-2, 0.6307942578971754550189764e-2, 0.4021524172003736347075858e-2, 0.1729258251300250898337759e-2}, + {0.4649043816026462820831466e-1, 0.4633935168241562110844706e-1, 0.4608790448976157619721740e-1, 0.4573664116106369093689412e-1, 0.4528632245466953156805004e-1, 0.4473792366088982547214182e-1, 0.4409263248975101830783160e-1, 0.4335184649869951735915584e-1, 0.4251717006583049147154770e-1, 0.4159041091519924309854838e-1, 0.4057357620174452522725164e-1, 0.3946886816430888264288692e-1, 0.3827867935617948064763712e-1, 0.3700558746349258202313488e-1, 0.3565234972274500666133270e-1, 0.3422189694953664673983902e-1, 0.3271732719153120542712204e-1, 0.3114189901947282393742616e-1, 0.2949902447094566969584718e-1, 0.2779226166243676998720012e-1, 0.2602530708621323880370460e-1, 0.2420198760967316472069180e-1, 0.2232625219645207692279754e-1, 0.2040216337134354044925720e-1, 0.1843388845680457387216616e-1, 0.1642569062253087920472674e-1, 0.1438191982720055093097663e-1, 0.1230700384928815052195302e-1, 0.1020544003410244098666155e-1, 0.8081790299023136215346300e-2, 0.5940693177582235216514606e-2, 0.3787008301825508445960626e-2, 0.1628325035240012866460003e-2}, + {0.4515543023614546051651704e-1, 0.4501700814039980219871620e-1, 0.4478661887831255754213528e-1, 0.4446473312204713809623108e-1, 0.4405200846590928438098588e-1, 0.4354928808292674103357578e-1, 0.4295759900230521387841984e-1, 0.4227815001128051285158270e-1, 0.4151232918565450208287406e-1, 0.4066170105406160053752604e-1, 0.3972800340176164120645862e-1, 0.3871314372049251393273936e-1, 0.3761919531164090650815840e-1, 0.3644839305070051405664348e-1, 0.3520312882168348614775456e-1, 0.3388594663083228949780964e-1, 0.3249953740964611124473418e-1, 0.3104673351789053903268552e-1, 0.2953050295790671177981110e-1, 0.2795394331218770599086132e-1, 0.2632027541686948379176090e-1, 0.2463283678454245536433616e-1, 0.2289507479074078565552120e-1, 0.2111053963987189462789068e-1, 0.1928287712884940278924393e-1, 0.1741582123196982913207401e-1, 0.1551318654340616473976910e-1, 0.1357886064907567099981112e-1, 0.1161679661067196554873961e-1, 0.9631006150415575588660562e-2, 0.7625555931201510611459992e-2, 0.5604579927870594828535346e-2, 0.3572416739397372609702552e-2, 0.1535976952792084075135094e-2}, + {0.4389487921178858632125256e-1, 0.4376774491340214497230982e-1, 0.4355612710410853337113396e-1, 0.4326043426324126659885626e-1, 0.4288123715758043502060704e-1, 0.4241926773962459303533940e-1, 0.4187541773473300618954268e-1, 0.4125073691986602424910896e-1, 0.4054643109724689643492514e-1, 0.3976385976685758167433708e-1, 0.3890453350226294749240264e-1, 0.3797011103483115621441804e-1, 0.3696239605198203185608278e-1, 0.3588333371564891077796844e-1, 0.3473500690768218837536532e-1, 0.3351963220945403083440624e-1, 0.3223955562344352694190700e-1, 0.3089724804509072169860608e-1, 0.2949530049370881246493644e-1, 0.2803641911174149061798030e-1, 0.2652341994215790800810512e-1, 0.2495922349431387305527612e-1, 0.2334684910922325263171504e-1, 0.2168940913598536796183230e-1, 0.1999010293235011128748561e-1, 0.1825221070467867050232934e-1, 0.1647908720746239655059230e-1, 0.1467415533461152920040808e-1, 0.1284089966808780607041846e-1, 0.1098286015429855170627475e-1, 0.9103626461992005851317578e-2, 0.7206835281831493387342912e-2, 0.5296182844025892632677844e-2, 0.3375555496730675865126842e-2, 0.1451267330029397268489446e-2}, + {0.4270273086485722207660098e-1, 0.4258568982601838702576300e-1, 0.4239085899223159440537396e-1, 0.4211859425425563626894556e-1, 0.4176939294869285375410172e-1, 0.4134389294952549452688336e-1, 0.4084287150293886154936056e-1, 0.4026724380756003336494178e-1, 0.3961806134270614331650800e-1, 0.3889650994769673952047552e-1, 0.3810390765573980059550798e-1, 0.3724170228634977315689404e-1, 0.3631146880069778469034650e-1, 0.3531490642472828750906318e-1, 0.3425383554530221541412972e-1, 0.3313019438504384067706900e-1, 0.3194603546197670648650132e-1, 0.3070352184043350493812614e-1, 0.2940492318011656010545704e-1, 0.2805261159057206032380240e-1, 0.2664905729872748295223048e-1, 0.2519682413753831281333190e-1, 0.2369856486421897462660896e-1, 0.2215701631704007205676952e-1, 0.2057499442036116916601972e-1, 0.1895538904867002168973610e-1, 0.1730115876248908300560664e-1, 0.1561532543359142299553300e-1, 0.1390096878831465086752053e-1, 0.1216122092928111272776412e-1, 0.1039926099500053220130511e-1, 0.8618310479532247613912182e-2, 0.6821631349174792362208078e-2, 0.5012538571606190263812266e-2, 0.3194524377289034522078870e-2, 0.1373376462759619223985654e-2}, + {0.4157356944178127878299940e-1, 0.4146558103261909213524834e-1, 0.4128580808246718908346088e-1, 0.4103456181139210667622250e-1, 0.4071227717293733029875788e-1, 0.4031951210114157755817430e-1, 0.3985694654465635257596536e-1, 0.3932538128963516252076754e-1, 0.3872573657343257584146640e-1, 0.3805905049151360313563098e-1, 0.3732647720033209016730652e-1, 0.3652928491929033900685118e-1, 0.3566885373524045308911856e-1, 0.3474667321333040653509838e-1, 0.3376433981833409264695562e-1, 0.3272355415093422052152286e-1, 0.3162611800374964805603220e-1, 0.3047393124221453920313760e-1, 0.2926898851572598680503318e-1, 0.2801337580478054082525924e-1, 0.2670926681012085177235442e-1, 0.2535891919021637909420806e-1, 0.2396467065371695917476570e-1, 0.2252893491386577645054636e-1, 0.2105419751228284223644546e-1, 0.1954301152012788937957076e-1, 0.1799799312564505063794604e-1, 0.1642181711902464004359937e-1, 0.1481721228981446852013731e-1, 0.1318695676282480211961300e-1, 0.1153387332830449596681366e-1, 0.9860824916114018392051822e-2, 0.8170710707327826403717118e-2, 0.6466464907037538401963982e-2, 0.4751069185015273965898868e-2, 0.3027671014606041291230134e-2, 0.1301591717375855993899257e-2}, + {0.4050253572678803195524960e-1, 0.4040269003221775617032620e-1, 0.4023646282485108419526524e-1, 0.4000412721559123741035150e-1, 0.3970606493128931068103760e-1, 0.3934276568757015193713232e-1, 0.3891482638423378562103292e-1, 0.3842295012455452367368120e-1, 0.3786794506008932026166678e-1, 0.3725072306289371887876038e-1, 0.3657229822732745453345840e-1, 0.3583378520391196260264276e-1, 0.3503639736797827845487748e-1, 0.3418144482611567926531782e-1, 0.3327033226369854530283962e-1, 0.3230455663703097559357210e-1, 0.3128570471390543339395640e-1, 0.3021545046662299869139892e-1, 0.2909555232176876134870268e-1, 0.2792785027127696854150716e-1, 0.2671426284955789083200264e-1, 0.2545678398169440375263742e-1, 0.2415747970795584494059388e-1, 0.2281848479012952051290956e-1, 0.2144199920545613550512462e-1, 0.2003028453431617639624646e-1, 0.1858566024834148550917969e-1, 0.1711049990653110417623953e-1, 0.1560722726874913129508073e-1, 0.1407831234002700405016720e-1, 0.1252626736922736518735940e-1, 0.1095364285391135423859170e-1, 0.9363023692386430769260798e-2, 0.7757025950083070731841176e-2, 0.6138296159756341839268696e-2, 0.4509523600205835333238688e-2, 0.2873553083652691657275240e-2, 0.1235291177139409614163874e-2}, + {0.3948525740129116475372166e-1, 0.3939275600474300393426418e-1, 0.3923874749659464355491890e-1, 0.3902347234287979602650502e-1, 0.3874726667023996706818530e-1, 0.3841056174110417740541666e-1, 0.3801388328032604954551756e-1, 0.3755785065432977047790708e-1, 0.3704317590404678415983790e-1, 0.3647066263315342752925638e-1, 0.3584120475334575228920704e-1, 0.3515578508861113112825058e-1, 0.3441547384067660088259166e-1, 0.3362142691803093004992252e-1, 0.3277488413113081785342150e-1, 0.3187716725661117036051890e-1, 0.3092967797352483528829388e-1, 0.2993389567483836289564858e-1, 0.2889137515760726678163634e-1, 0.2780374419544705894443552e-1, 0.2667270099710555653788310e-1, 0.2550001155512877394733978e-1, 0.2428750688879949263942200e-1, 0.2303708018571902627697914e-1, 0.2175068384660807976864198e-1, 0.2043032643814085987844290e-1, 0.1907806955893748858478357e-1, 0.1769602462431041786466318e-1, 0.1628634957619168209183741e-1, 0.1485124552635006931857919e-1, 0.1339295334482567619730830e-1, 0.1191375021511699869960077e-1, 0.1041594620451338257918368e-1, 0.8901880982652486253740074e-2, 0.7373921131330176830391914e-2, 0.5834459868763465589211910e-2, 0.4285929113126531218219446e-2, 0.2730907065754855918535274e-2, 0.1173930129956613021207112e-2}, + {0.3851778959688469523783810e-1, 0.3843192958037517210025656e-1, 0.3828897129558352443032002e-1, 0.3808912713547560183102332e-1, 0.3783269400830055924757518e-1, 0.3752005289647583785923924e-1, 0.3715166829056371214474266e-1, 0.3672808749918043951690600e-1, 0.3624993983586341279832570e-1, 0.3571793568410456853072614e-1, 0.3513286544193937941597898e-1, 0.3449559834765979589474544e-1, 0.3380708118839624555119598e-1, 0.3306833689348800442087536e-1, 0.3228046301473268887240310e-1, 0.3144463009577406641803652e-1, 0.3056207993305266189565968e-1, 0.2963412373090559765847516e-1, 0.2866214015356067622579182e-1, 0.2764757327692492691108618e-1, 0.2659193044321992109092004e-1, 0.2549678002166567706947970e-1, 0.2436374907856309733249090e-1, 0.2319452096027391988145570e-1, 0.2199083279275163277050144e-1, 0.2075447290144560853952252e-1, 0.1948727815560191821592671e-1, 0.1819113124125576115176324e-1, 0.1686795786763513947433495e-1, 0.1551972391246436293824549e-1, 0.1414843251323606554825229e-1, 0.1275612111513442100025550e-1, 0.1134485849541625576200880e-1, 0.9916741809595875499750926e-2, 0.8473893785345565449616918e-2, 0.7018460484931625511609624e-2, 0.5552611370256278902273182e-2, 0.4078551113421395586018386e-2, 0.2598622299928953013499446e-2, 0.1117029847124606606122469e-2}, + {0.3759656394395517759196934e-1, 0.3751672450373727271505762e-1, 0.3738378433575740441091762e-1, 0.3719793160197673054400130e-1, 0.3695942935618497107975802e-1, 0.3666861517167809004390068e-1, 0.3632590066346228889989584e-1, 0.3593177090566064734733082e-1, 0.3548678374494710264584324e-1, 0.3499156901097965473152462e-1, 0.3444682762495051683252180e-1, 0.3385333060751519869931002e-1, 0.3321191798750501518117324e-1, 0.3252349761296806599129116e-1, 0.3178904386622215064354856e-1, 0.3100959628473919484306724e-1, 0.3018625808981441705410184e-1, 0.2932019462510452791804122e-1, 0.2841263170724764156375054e-1, 0.2746485389090326123892810e-1, 0.2647820265067376248510830e-1, 0.2545407448248949675081806e-1, 0.2439391892715855749743432e-1, 0.2329923651890054937016126e-1, 0.2217157666180362262199056e-1, 0.2101253543726991787400918e-1, 0.1982375334565493904931242e-1, 0.1860691298547847284166721e-1, 0.1736373667382462235016547e-1, 0.1609598401193537091543832e-1, 0.1480544940071787768084914e-1, 0.1349395951237523498069998e-1, 0.1216337072779861206303406e-1, 0.1081556655803715872036043e-1, 0.9452455092479699888244178e-2, 0.8075966593123452283593892e-2, 0.6688051635243685741358420e-2, 0.5290681445859865555240374e-2, 0.3885859435353202192003776e-2, 0.2475719322545939743331242e-2, 0.1064168219666567756385077e-2}, + {0.3671834473341961622215226e-1, 0.3664397593378570248640692e-1, 0.3652013948874488485747660e-1, 0.3634700257169520376675674e-1, 0.3612479890936246037475190e-1, 0.3585382846628081255691520e-1, 0.3553445703985569908199156e-1, 0.3516711576655578824981280e-1, 0.3475230053990063752924744e-1, 0.3429057134102984670822224e-1, 0.3378255148275753033131186e-1, 0.3322892676813276976252854e-1, 0.3263044456464217818903764e-1, 0.3198791279530467445976990e-1, 0.3130219884802087044839684e-1, 0.3057422840464999572392432e-1, 0.2980498419139588737561256e-1, 0.2899550465219015208986610e-1, 0.2814688254686507584638292e-1, 0.2726026347601116478577010e-1, 0.2633684433451435982173160e-1, 0.2537787169586608847736972e-1, 0.2438464012943568314241580e-1, 0.2335849045298989189769872e-1, 0.2230080792283937418945736e-1, 0.2121302036408937967241628e-1, 0.2009659624357542174179408e-1, 0.1895304268818284044680496e-1, 0.1778390345139817090774314e-1, 0.1659075683115467007520452e-1, 0.1537521354238962687440865e-1, 0.1413891454840083293055609e-1, 0.1288352885649808429050626e-1, 0.1161075128670389800962475e-1, 0.1032230023052424589381722e-1, 0.9019915439993631278967098e-2, 0.7705355960382757079897960e-2, 0.6380398587897515098686098e-2, 0.5046838426924442725450432e-2, 0.3706500125759316706868292e-2, 0.2361331704285020896763904e-2, 0.1014971908967743695374167e-2}, + {0.3588019106018701587773518e-1, 0.3581080434383374175662560e-1, 0.3569525919440943377647946e-1, 0.3553370454416059391133478e-1, 0.3532634862941021369843054e-1, 0.3507345872215153655662536e-1, 0.3477536078554782924871120e-1, 0.3443243905378224376593820e-1, 0.3404513553679937345518354e-1, 0.3361394945057693558422230e-1, 0.3313943657366202353628890e-1, 0.3262220853080144392580048e-1, 0.3206293200458966777765818e-1, 0.3146232787615076393796228e-1, 0.3082117029596223415371898e-1, 0.3014028568601882474395096e-1, 0.2942055167462304824922484e-1, 0.2866289596517621838858744e-1, 0.2786829514042920598963448e-1, 0.2703777340373580728397710e-1, 0.2617240125893355894972542e-1, 0.2527329413055707316411874e-1, 0.2434161092616763233921348e-1, 0.2337855254266017225782364e-1, 0.2238536031848547821419758e-1, 0.2136331443380253159361604e-1, 0.2031373226065556952656956e-1, 0.1923796666535655878505047e-1, 0.1813740426535425205021816e-1, 0.1701346364300153443364516e-1, 0.1586759351882631900292224e-1, 0.1470127088723984222989451e-1, 0.1351599911824565808188095e-1, 0.1231330603004803654228712e-1, 0.1109474194056071927972064e-1, 0.9861877713701826716584494e-2, 0.8616302838488951832949878e-2, 0.7359623648818063660769462e-2, 0.6093462047634872130101964e-2, 0.4819456238501885899307624e-2, 0.3539271655388628540179688e-2, 0.2254690753752853092482060e-2, 0.9691097381770753376096654e-3}, + {0.3507942401790202531716760e-1, 0.3501458416619644336915306e-1, 0.3490660650856070989101148e-1, 0.3475562407298142092081152e-1, 0.3456182286913780813643384e-1, 0.3432544165923908781796544e-1, 0.3404677166387108716735582e-1, 0.3372615620321457070630952e-1, 0.3336399027407732093971928e-1, 0.3296072006326111707429234e-1, 0.3251684239786320696758578e-1, 0.3203290413318958550703170e-1, 0.3150950147903428365879858e-1, 0.3094727926515484478947892e-1, 0.3034693014684912934340756e-1, 0.2970919375161245962730194e-1, 0.2903485576792681183001942e-1, 0.2832474697730520722803496e-1, 0.2757974223078458253347716e-1, 0.2680075937112917771256550e-1, 0.2598875810207383625148160e-1, 0.2514473880600256862281534e-1, 0.2426974131152233927366188e-1, 0.2336484361245544582716880e-1, 0.2243116053983636712835892e-1, 0.2146984238856114084341254e-1, 0.2048207350040027021224486e-1, 0.1946907080515187313867415e-1, 0.1843208232178411567584622e-1, 0.1737238562150240166964102e-1, 0.1629128625479238457754130e-1, 0.1519011614466612339747308e-1, 0.1407023194864448281388687e-1, 0.1293301339260267729158710e-1, 0.1177986158087489217661933e-1, 0.1061219728997218803268093e-1, 0.9431459260797890539711922e-2, 0.8239102525389078730572362e-2, 0.7036596870989114137389446e-2, 0.5825425788770107459644064e-2, 0.4607087343463241433054622e-2, 0.3383104792407455132632698e-2, 0.2155112582219113764637582e-2, 0.9262871051934728155239026e-3}, + {0.3431359817623139857242020e-1, 0.3425291647165106006719224e-1, 0.3415185977541012618567448e-1, 0.3401054720622907866548866e-1, 0.3382914533369793579365620e-1, 0.3360786798193575310982430e-1, 0.3334697597754983863697838e-1, 0.3304677684219179120016898e-1, 0.3270762443007278294842040e-1, 0.3232991851086539448409380e-1, 0.3191410429848369728859888e-1, 0.3146067192629708854519032e-1, 0.3097015586939654421561894e-1, 0.3044313431459439490344712e-1, 0.2988022847890037493277136e-1, 0.2928210187727747971826382e-1, 0.2864945954054102439649608e-1, 0.2798304718432316638118606e-1, 0.2728365033008298027898986e-1, 0.2655209337919890810307922e-1, 0.2578923864123601618879028e-1, 0.2499598531753495743256148e-1, 0.2417326844132287942221788e-1, 0.2332205777559880283599600e-1, 0.2244335667009737337332098e-1, 0.2153820087868566629622426e-1, 0.2060765733859846074045938e-1, 0.1965282291296914660474199e-1, 0.1867482309816812542178599e-1, 0.1767481069752190506037194e-1, 0.1665396446306124017225753e-1, 0.1561348770705005975095101e-1, 0.1455460688520869608484063e-1, 0.1347857015383097919431856e-1, 0.1238664590355674305453526e-1, 0.1128012127376968298340906e-1, 0.1016030065441547672889225e-1, 0.9028504189234487748913298e-2, 0.7886066314628901599629988e-2, 0.6734334432268884665261132e-2, 0.5574668047479788997832340e-2, 0.4408439747302676819065170e-2, 0.3237045507972104977098260e-2, 0.2061987122032229660677942e-2, 0.8862412406694141765769646e-3}, + {0.3358047670273290820423322e-1, 0.3352360509236689973246714e-1, 0.3342889041048296629425518e-1, 0.3329643957561578934524218e-1, 0.3312640210470322597293962e-1, 0.3291896994430459113247722e-1, 0.3267437725392241575486392e-1, 0.3239290014167229270630344e-1, 0.3207485635259921958171598e-1, 0.3172060490999230883258760e-1, 0.3133054571010280192591498e-1, 0.3090511907072293590876800e-1, 0.3044480523413530949647580e-1, 0.2995012382499392416587776e-1, 0.2942163326374897748551588e-1, 0.2885993013627770636290672e-1, 0.2826564852043306435742870e-1, 0.2763945927027071971311622e-1, 0.2698206925876273304878794e-1, 0.2629422057985327475229788e-1, 0.2557668971075783892217594e-1, 0.2483028663545258189183534e-1, 0.2405585393034465615306556e-1, 0.2325426581315775168991978e-1, 0.2242642715610957188910656e-1, 0.2157327246449981801505782e-1, 0.2069576482186873448858912e-1, 0.1979489480292792866805571e-1, 0.1887167935550803461442971e-1, 0.1792716065281371317885285e-1, 0.1696240491732901090122756e-1, 0.1597850121778211678831695e-1, 0.1497656024067188095391932e-1, 0.1395771303800797072406999e-1, 0.1292310975318535045602668e-1, 0.1187391832744712509861298e-1, 0.1081132319054248938202577e-1, 0.9736523941887687826947068e-2, 0.8650734035428648314139846e-2, 0.7555179500769820751618632e-2, 0.6451097794311275889059324e-2, 0.5339737098169214613757504e-2, 0.4222357382406607998634106e-2, 0.3100240403099316775464478e-2, 0.1974768768686808388940061e-2, 0.8487371680679110048896640e-3}, + {0.3287800959763194823557646e-1, 0.3282463569369918669308888e-1, 0.3273574336068393226919658e-1, 0.3261142878598215425670652e-1, 0.3245182648620325926685946e-1, 0.3225710916161441434734840e-1, 0.3202748750926769529295728e-1, 0.3176320999501228029097900e-1, 0.3146456258463840201321734e-1, 0.3113186843444399825682258e-1, 0.3076548754155891475295788e-1, 0.3036581635440506677724356e-1, 0.2993328734371411225240016e-1, 0.2946836853456688237515152e-1, 0.2897156299996101153484194e-1, 0.2844340831645486261311894e-1, 0.2788447598247691424309350e-1, 0.2729537079993022266578380e-1, 0.2667673021976135431896846e-1, 0.2602922365220227153290076e-1, 0.2535355174243201293660006e-1, 0.2465044561244261997612948e-1, 0.2392066606993061007707546e-1, 0.2316500278507139174920030e-1, 0.2238427343606939184041926e-1, 0.2157932282441140120676856e-1, 0.2075102196078490181790884e-1, 0.1990026712265721124487174e-1, 0.1902797888454570639306994e-1, 0.1813510112204514410759734e-1, 0.1722259999071698441334003e-1, 0.1629146288099104326591566e-1, 0.1534269735028835663459242e-1, 0.1437733003365908208357459e-1, 0.1339640553436828544136536e-1, 0.1240098529611606104018197e-1, 0.1139214645908584403924275e-1, 0.1037098070311609684083942e-1, 0.9338593083876397086740596e-2, 0.8296100874530990238145090e-2, 0.7244632443933199672626606e-2, 0.6185326261033323769312750e-2, 0.5119330329927718280032034e-2, 0.4047803316371759906879922e-2, 0.2971924240818190718436604e-2, 0.1892968377922935762776147e-2, 0.8135642494541165010544716e-3}, + {0.3220431459661350533475748e-1, 0.3215415737958550153577998e-1, 0.3207061987527279934927952e-1, 0.3195378880670864194528382e-1, 0.3180378546007149044495368e-1, 0.3162076555877401604294910e-1, 0.3140491910180172362457798e-1, 0.3115647016646904145775102e-1, 0.3087567667579765382432642e-1, 0.3056283013075858386135104e-1, 0.3021825530765601453452082e-1, 0.2984230992096702903457814e-1, 0.2943538425198732086424294e-1, 0.2899790074366843187205222e-1, 0.2853031356206718751823808e-1, 0.2803310812486267752680532e-1, 0.2750680059743034256009616e-1, 0.2695193735699644067363378e-1, 0.2636909442542934975707846e-1, 0.2575887687125678489535242e-1, 0.2512191818153004673565192e-1, 0.2445887960418784729059960e-1, 0.2377044946160306882104198e-1, 0.2305734243602599579639616e-1, 0.2232029882766713237862322e-1, 0.2156008378619171827843500e-1, 0.2077748651642656849799008e-1, 0.1997331945910804688818908e-1, 0.1914841744752812933525703e-1, 0.1830363684096414082229124e-1, 0.1743985463580780463940516e-1, 0.1655796755534245662902801e-1, 0.1565889111915692052020687e-1, 0.1474355869323695017635984e-1, 0.1381292052185304327114855e-1, 0.1286794274249338667571135e-1, 0.1190960638533075683273654e-1, 0.1093890635919594895396767e-1, 0.9956850427084044948237490e-2, 0.8964458176697999432566250e-2, 0.7962759997865495595598110e-2, 0.6952796096469405526464256e-2, 0.5935615630788222954183688e-2, 0.4912276262166028130833504e-2, 0.3883845329489294421733034e-2, 0.2851409243213055771419126e-2, 0.1816146398210039609502983e-2, 0.7805332219425612457264822e-3}, + {0.3155766036791122885809208e-1, 0.3151046648162834771323796e-1, 0.3143186227722154616152128e-1, 0.3132192610907518012817474e-1, 0.3118076756395815837033438e-1, 0.3100852735178559535833486e-1, 0.3080537716535627949917920e-1, 0.3057151950920577999218210e-1, 0.3030718749774580397961262e-1, 0.3001264462289103447190280e-1, 0.2968818449140509844801766e-1, 0.2933413053222750347643324e-1, 0.2895083567407331040373860e-1, 0.2853868199362694972663692e-1, 0.2809808033468091126593440e-1, 0.2762946989859901232207604e-1, 0.2713331780651255092639320e-1, 0.2661011863368585130179228e-1, 0.2606039391651548254092866e-1, 0.2548469163265475465058230e-1, 0.2488358565478194644598738e-1, 0.2425767517855707823164026e-1, 0.2360758412533789404661778e-1, 0.2293396052025105528408320e-1, 0.2223747584623937158435550e-1, 0.2151882437473022381824646e-1, 0.2077872247359421120742490e-1, 0.2001790789308656620794778e-1, 0.1923713903048718479867380e-1, 0.1843719417417849927098560e-1, 0.1761887072792438050675710e-1, 0.1678298441613870708950299e-1, 0.1593036847096084971103802e-1, 0.1506187280199023331295260e-1, 0.1417836314957944606614279e-1, 0.1328072022265728347995425e-1, 0.1236983882217516210343368e-1, 0.1144662695149825376113323e-1, 0.1051200491552474540574917e-1, 0.9566904411326136356898158e-2, 0.8612267615478888991732218e-2, 0.7649046279335257935390770e-2, 0.6678200860575098165183170e-2, 0.5700699773395926875152328e-2, 0.4717519037520830079689318e-2, 0.3729643487243034749198276e-2, 0.2738075873626878091327392e-2, 0.1743906958219244938639563e-2, 0.7494736467374053633626714e-3}, +} + +// The factor in front of the direct product representation +var cl = [...]float64{1.0, 1.0, -0.5000000000000000000000000e0, -0.1500000000000000000000000e1, 0.3750000000000000000000000e0, 0.1875000000000000000000000e1, -0.3125000000000000000000000e0, -0.2187500000000000000000000e1, 0.2734375000000000000000000e0, 0.2460937500000000000000000e1, -0.2460937500000000000000000e0, -0.2707031250000000000000000e1, 0.2255859375000000000000000e0, 0.2932617187500000000000000e1, -0.2094726562500000000000000e0, -0.3142089843750000000000000e1, 0.1963806152343750000000000e0, 0.3338470458984375000000000e1, -0.1854705810546875000000000e0, -0.3523941040039062500000000e1, 0.1761970520019531250000000e0, 0.3700138092041015625000000e1, -0.1681880950927734375000000e0, -0.3868326187133789062500000e1, 0.1611802577972412109375000e0, 0.4029506444931030273437500e1, -0.1549810171127319335937500e0, -0.4184487462043762207031250e1, 0.1494459807872772216796875e0, 0.4333933442831039428710938e1, -0.1444644480943679809570312e0, -0.4478397890925407409667969e1, 0.1399499340914189815521240e0, 0.4618347825016826391220093e1, -0.1358337595593184232711792e0, -0.4754181584576144814491272e1, 0.1320605995715595781803131e0, 0.4886242184147704392671585e1, -0.1285853206354659050703049e0, -0.5014827504783170297741890e1, 0.1253706876195792574435472e0, 0.5140198192402749555185437e1, -0.1223856712476845132187009e0, -0.5262583863650434068404138e1, 0.1196041787193280470091850e0, 0.5382188042369762115413323e1, -0.1170040878776035242481157e0, -0.5499192130247365639661439e1, 0.1145665027134867841596133e0, 0.5613758632960852423821052e1, -0.1122751726592170484764210e0, -0.5726033805620069472297473e1, 0.1101160347234628744672591e0, 0.5836149840343532346764732e1, -0.1080768488952505990141617e0, -0.5944226689238782945778894e1, 0.1061469051649782668889088e0, 0.6050373594403761212667803e1, -0.1043167861104096760804794e0, -0.6154690380514170888748282e1, 0.1025781730085695148124714e0, 0.6257268553522740403560753e1, -0.1009236863471409742509799e0, -0.6358192239869881377811733e1, 0.9934675374796689652830833e-1, 0.6457538993617848274340042e1, -0.9784149990330073142939457e-1, -0.6555380493521149005769436e1, 0.9640265431648748537896230e-1, 0.6651783147837636491148399e1, -0.9502547354053766415926284e-1, -0.6746808621378174155307661e1, 0.9370567529691908549038419e-1, 0.6840514296675093240798046e1, -0.9243938238750126001078440e-1, -0.6932953679062594500808830e1, 0.9122307472450782237906355e-1, 0.7024176753787102323187894e1, -0.9005354812547567081010120e-1, -0.7114230301912577993997995e1, 0.8892787877390722492497493e-1, 0.7203158180686485218922970e1, -0.8784339244739616120637768e-1, -0.7291001573133881380129347e1, 0.8679763777540334976344461e-1, 0.7377799210909284729892792e1, -0.8578836291754982244061386e-1, -0.7463587573826834552333406e1, 0.8481349515712311991287961e-1, 0.7548401068983957672246285e1, -0.8387112298871064080273650e-1, -0.7632272191972668313049022e1, 0.8295948034752900340270676e-1, 0.7715231672320197316451729e1, -0.8207693268425741826012477e-1, -0.7797308605004454734711853e1, 0.8122196463546307015324847e-1, 0.7878530569639917804865102e1, -0.8039316907795834494760308e-1, -0.7958923738717876149812705e1, 0.7958923738717876149812705e-1} diff --git a/vendor/gonum.org/v1/gonum/integrate/quad/quad.go b/vendor/gonum.org/v1/gonum/integrate/quad/quad.go new file mode 100644 index 0000000..2104835 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/integrate/quad/quad.go @@ -0,0 +1,157 @@ +// Copyright ©2015 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package quad + +import ( + "math" + "sync" +) + +// FixedLocationer computes a set of quadrature locations and weights and stores +// them in-place into x and weight respectively. The number of points generated is equal to +// the len(x). The weights and locations should be chosen such that +// int_min^max f(x) dx ≈ \sum_i w_i f(x_i) +type FixedLocationer interface { + FixedLocations(x, weight []float64, min, max float64) +} + +// FixedLocationSingle returns the location and weight for element k in a +// fixed quadrature rule with n total samples and integral bounds from min to max. +type FixedLocationSingler interface { + FixedLocationSingle(n, k int, min, max float64) (x, weight float64) +} + +// Fixed approximates the integral of the function f from min to max using a fixed +// n-point quadrature rule. During evaluation, f will be evaluated n times using +// the weights and locations specified by rule. That is, Fixed estimates +// int_min^max f(x) dx ≈ \sum_i w_i f(x_i) +// If rule is nil, an acceptable default is chosen, otherwise it is +// assumed that the properties of the integral match the assumptions of rule. +// For example, Legendre assumes that the integration bounds are finite. If +// rule is also a FixedLocationSingler, the quadrature points are computed +// individually rather than as a unit. +// +// If concurrent <= 0, f is evaluated serially, while if concurrent > 0, f +// may be evaluated with at most concurrent simultaneous evaluations. +// +// min must be less than or equal to max, and n must be positive, otherwise +// Fixed will panic. +func Fixed(f func(float64) float64, min, max float64, n int, rule FixedLocationer, concurrent int) float64 { + // TODO(btracey): When there are Hermite polynomial quadrature, add an additional + // example to the documentation comment that talks about weight functions. + if n <= 0 { + panic("quad: non-positive number of locations") + } + if min > max { + panic("quad: min > max") + } + if min == max { + return 0 + } + intfunc := f + // If rule is non-nil it is assumed that the function and the constraints + // of rule are aligned. If it is nil, wrap the function and do something + // reasonable. + // TODO(btracey): Replace wrapping with other quadrature rules when + // we have rules that support infinite-bound integrals. + if rule == nil { + // int_a^b f(x)dx = int_u^-1(a)^u^-1(b) f(u(t))u'(t)dt + switch { + case math.IsInf(max, 1) && math.IsInf(min, -1): + // u(t) = (t/(1-t^2)) + min = -1 + max = 1 + intfunc = func(x float64) float64 { + v := 1 - x*x + return f(x/v) * (1 + x*x) / (v * v) + } + case math.IsInf(max, 1): + // u(t) = a + t / (1-t) + a := min + min = 0 + max = 1 + intfunc = func(x float64) float64 { + v := 1 - x + return f(a+x/v) / (v * v) + } + case math.IsInf(min, -1): + // u(t) = a - (1-t)/t + a := max + min = 0 + max = 1 + intfunc = func(x float64) float64 { + return f(a-(1-x)/x) / (x * x) + } + } + rule = Legendre{} + } + singler, isSingler := rule.(FixedLocationSingler) + + var xs, weights []float64 + if !isSingler { + xs = make([]float64, n) + weights = make([]float64, n) + rule.FixedLocations(xs, weights, min, max) + } + + if concurrent > n { + concurrent = n + } + + if concurrent <= 0 { + var integral float64 + // Evaluate in serial. + if isSingler { + for k := 0; k < n; k++ { + x, weight := singler.FixedLocationSingle(n, k, min, max) + integral += weight * intfunc(x) + } + return integral + } + for i, x := range xs { + integral += weights[i] * intfunc(x) + } + return integral + } + + // Evaluate concurrently + tasks := make(chan int) + + // Launch distributor + go func() { + for i := 0; i < n; i++ { + tasks <- i + } + close(tasks) + }() + + var mux sync.Mutex + var integral float64 + var wg sync.WaitGroup + wg.Add(concurrent) + for i := 0; i < concurrent; i++ { + // Launch workers + go func() { + defer wg.Done() + var subIntegral float64 + for k := range tasks { + var x, weight float64 + if isSingler { + x, weight = singler.FixedLocationSingle(n, k, min, max) + } else { + x = xs[k] + weight = weights[k] + } + f := intfunc(x) + subIntegral += f * weight + } + mux.Lock() + integral += subIntegral + mux.Unlock() + }() + } + wg.Wait() + return integral +} diff --git a/vendor/gonum.org/v1/gonum/integrate/trapezoidal.go b/vendor/gonum.org/v1/gonum/integrate/trapezoidal.go new file mode 100644 index 0000000..9cdfd3b --- /dev/null +++ b/vendor/gonum.org/v1/gonum/integrate/trapezoidal.go @@ -0,0 +1,40 @@ +// Copyright ©2016 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package integrate + +import "sort" + +// Trapezoidal estimates the integral of a function f +// \int_a^b f(x) dx +// from a set of evaluations of the function using the trapezoidal rule. +// The trapezoidal rule makes piecewise linear approximations to the function, +// and estimates +// \int_x[i]^x[i+1] f(x) dx +// as +// (x[i+1] - x[i]) * (f[i] + f[i+1])/2 +// where f[i] is the value of the function at x[i]. +// More details on the trapezoidal rule can be found at: +// https://en.wikipedia.org/wiki/Trapezoidal_rule +// +// The (x,f) input data points must be sorted along x. +// One can use github.com/gonum/stat.SortWeighted to do that. +// The x and f slices must be of equal length and have length > 1. +func Trapezoidal(x, f []float64) float64 { + switch { + case len(x) != len(f): + panic("integrate: slice length mismatch") + case len(x) < 2: + panic("integrate: input data too small") + case !sort.Float64sAreSorted(x): + panic("integrate: input must be sorted") + } + + integral := 0.0 + for i := 0; i < len(x)-1; i++ { + integral += 0.5 * (x[i+1] - x[i]) * (f[i+1] + f[i]) + } + + return integral +} diff --git a/vendor/gonum.org/v1/gonum/internal/asm/c128/axpyinc_amd64.s b/vendor/gonum.org/v1/gonum/internal/asm/c128/axpyinc_amd64.s new file mode 100644 index 0000000..68490e5 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/internal/asm/c128/axpyinc_amd64.s @@ -0,0 +1,134 @@ +// Copyright ©2016 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// +build !noasm,!appengine,!safe + +#include "textflag.h" + +// MOVDDUP X2, X3 +#define MOVDDUP_X2_X3 BYTE $0xF2; BYTE $0x0F; BYTE $0x12; BYTE $0xDA +// MOVDDUP X4, X5 +#define MOVDDUP_X4_X5 BYTE $0xF2; BYTE $0x0F; BYTE $0x12; BYTE $0xEC +// MOVDDUP X6, X7 +#define MOVDDUP_X6_X7 BYTE $0xF2; BYTE $0x0F; BYTE $0x12; BYTE $0xFE +// MOVDDUP X8, X9 +#define MOVDDUP_X8_X9 BYTE $0xF2; BYTE $0x45; BYTE $0x0F; BYTE $0x12; BYTE $0xC8 + +// ADDSUBPD X2, X3 +#define ADDSUBPD_X2_X3 BYTE $0x66; BYTE $0x0F; BYTE $0xD0; BYTE $0xDA +// ADDSUBPD X4, X5 +#define ADDSUBPD_X4_X5 BYTE $0x66; BYTE $0x0F; BYTE $0xD0; BYTE $0xEC +// ADDSUBPD X6, X7 +#define ADDSUBPD_X6_X7 BYTE $0x66; BYTE $0x0F; BYTE $0xD0; BYTE $0xFE +// ADDSUBPD X8, X9 +#define ADDSUBPD_X8_X9 BYTE $0x66; BYTE $0x45; BYTE $0x0F; BYTE $0xD0; BYTE $0xC8 + +// func AxpyInc(alpha complex128, x, y []complex128, n, incX, incY, ix, iy uintptr) +TEXT ·AxpyInc(SB), NOSPLIT, $0 + MOVQ x_base+16(FP), SI // SI = &x + MOVQ y_base+40(FP), DI // DI = &y + MOVQ n+64(FP), CX // CX = n + CMPQ CX, $0 // if n==0 { return } + JE axpyi_end + MOVQ ix+88(FP), R8 // R8 = ix // Load the first index + SHLQ $4, R8 // R8 *= sizeof(complex128) + MOVQ iy+96(FP), R9 // R9 = iy + SHLQ $4, R9 // R9 *= sizeof(complex128) + LEAQ (SI)(R8*1), SI // SI = &(x[ix]) + LEAQ (DI)(R9*1), DI // DI = &(y[iy]) + MOVQ DI, DX // DX = DI // Separate Read/Write pointers + MOVQ incX+72(FP), R8 // R8 = incX + SHLQ $4, R8 // R8 *= sizeof(complex128) + MOVQ incY+80(FP), R9 // R9 = iy + SHLQ $4, R9 // R9 *= sizeof(complex128) + MOVUPS alpha+0(FP), X0 // X0 = { imag(a), real(a) } + MOVAPS X0, X1 + SHUFPD $0x1, X1, X1 // X1 = { real(a), imag(a) } + MOVAPS X0, X10 // Copy X0 and X1 for pipelining + MOVAPS X1, X11 + MOVQ CX, BX + ANDQ $3, CX // CX = n % 4 + SHRQ $2, BX // BX = floor( n / 4 ) + JZ axpyi_tail // if BX == 0 { goto axpyi_tail } + +axpyi_loop: // do { + MOVUPS (SI), X2 // X_i = { imag(x[i]), real(x[i]) } + MOVUPS (SI)(R8*1), X4 + LEAQ (SI)(R8*2), SI // SI = &(SI[incX*2]) + MOVUPS (SI), X6 + MOVUPS (SI)(R8*1), X8 + + // X_(i+1) = { real(x[i], real(x[i]) } + MOVDDUP_X2_X3 + MOVDDUP_X4_X5 + MOVDDUP_X6_X7 + MOVDDUP_X8_X9 + + // X_i = { imag(x[i]), imag(x[i]) } + SHUFPD $0x3, X2, X2 + SHUFPD $0x3, X4, X4 + SHUFPD $0x3, X6, X6 + SHUFPD $0x3, X8, X8 + + // X_i = { real(a) * imag(x[i]), imag(a) * imag(x[i]) } + // X_(i+1) = { imag(a) * real(x[i]), real(a) * real(x[i]) } + MULPD X1, X2 + MULPD X0, X3 + MULPD X11, X4 + MULPD X10, X5 + MULPD X1, X6 + MULPD X0, X7 + MULPD X11, X8 + MULPD X10, X9 + + // X_(i+1) = { + // imag(result[i]): imag(a)*real(x[i]) + real(a)*imag(x[i]), + // real(result[i]): real(a)*real(x[i]) - imag(a)*imag(x[i]) + // } + ADDSUBPD_X2_X3 + ADDSUBPD_X4_X5 + ADDSUBPD_X6_X7 + ADDSUBPD_X8_X9 + + // X_(i+1) = { imag(result[i]) + imag(y[i]), real(result[i]) + real(y[i]) } + ADDPD (DX), X3 + ADDPD (DX)(R9*1), X5 + LEAQ (DX)(R9*2), DX // DX = &(DX[incY*2]) + ADDPD (DX), X7 + ADDPD (DX)(R9*1), X9 + MOVUPS X3, (DI) // dst[i] = X_(i+1) + MOVUPS X5, (DI)(R9*1) + LEAQ (DI)(R9*2), DI + MOVUPS X7, (DI) + MOVUPS X9, (DI)(R9*1) + LEAQ (SI)(R8*2), SI // SI = &(SI[incX*2]) + LEAQ (DX)(R9*2), DX // DX = &(DX[incY*2]) + LEAQ (DI)(R9*2), DI // DI = &(DI[incY*2]) + DECQ BX + JNZ axpyi_loop // } while --BX > 0 + CMPQ CX, $0 // if CX == 0 { return } + JE axpyi_end + +axpyi_tail: // do { + MOVUPS (SI), X2 // X_i = { imag(x[i]), real(x[i]) } + MOVDDUP_X2_X3 // X_(i+1) = { real(x[i], real(x[i]) } + SHUFPD $0x3, X2, X2 // X_i = { imag(x[i]), imag(x[i]) } + MULPD X1, X2 // X_i = { real(a) * imag(x[i]), imag(a) * imag(x[i]) } + MULPD X0, X3 // X_(i+1) = { imag(a) * real(x[i]), real(a) * real(x[i]) } + + // X_(i+1) = { + // imag(result[i]): imag(a)*real(x[i]) + real(a)*imag(x[i]), + // real(result[i]): real(a)*real(x[i]) - imag(a)*imag(x[i]) + // } + ADDSUBPD_X2_X3 + + // X_(i+1) = { imag(result[i]) + imag(y[i]), real(result[i]) + real(y[i]) } + ADDPD (DI), X3 + MOVUPS X3, (DI) // y[i] = X_i + ADDQ R8, SI // SI = &(SI[incX]) + ADDQ R9, DI // DI = &(DI[incY]) + LOOP axpyi_tail // } while --CX > 0 + +axpyi_end: + RET diff --git a/vendor/gonum.org/v1/gonum/internal/asm/c128/axpyincto_amd64.s b/vendor/gonum.org/v1/gonum/internal/asm/c128/axpyincto_amd64.s new file mode 100644 index 0000000..50d21f2 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/internal/asm/c128/axpyincto_amd64.s @@ -0,0 +1,141 @@ +// Copyright ©2016 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// +build !noasm,!appengine,!safe + +#include "textflag.h" + +// MOVDDUP X2, X3 +#define MOVDDUP_X2_X3 BYTE $0xF2; BYTE $0x0F; BYTE $0x12; BYTE $0xDA +// MOVDDUP X4, X5 +#define MOVDDUP_X4_X5 BYTE $0xF2; BYTE $0x0F; BYTE $0x12; BYTE $0xEC +// MOVDDUP X6, X7 +#define MOVDDUP_X6_X7 BYTE $0xF2; BYTE $0x0F; BYTE $0x12; BYTE $0xFE +// MOVDDUP X8, X9 +#define MOVDDUP_X8_X9 BYTE $0xF2; BYTE $0x45; BYTE $0x0F; BYTE $0x12; BYTE $0xC8 + +// ADDSUBPD X2, X3 +#define ADDSUBPD_X2_X3 BYTE $0x66; BYTE $0x0F; BYTE $0xD0; BYTE $0xDA +// ADDSUBPD X4, X5 +#define ADDSUBPD_X4_X5 BYTE $0x66; BYTE $0x0F; BYTE $0xD0; BYTE $0xEC +// ADDSUBPD X6, X7 +#define ADDSUBPD_X6_X7 BYTE $0x66; BYTE $0x0F; BYTE $0xD0; BYTE $0xFE +// ADDSUBPD X8, X9 +#define ADDSUBPD_X8_X9 BYTE $0x66; BYTE $0x45; BYTE $0x0F; BYTE $0xD0; BYTE $0xC8 + +// func AxpyIncTo(dst []complex128, incDst, idst uintptr, alpha complex128, x, y []complex128, n, incX, incY, ix, iy uintptr) +TEXT ·AxpyIncTo(SB), NOSPLIT, $0 + MOVQ dst_base+0(FP), DI // DI = &dst + MOVQ x_base+56(FP), SI // SI = &x + MOVQ y_base+80(FP), DX // DX = &y + MOVQ n+104(FP), CX // CX = n + CMPQ CX, $0 // if n==0 { return } + JE axpyi_end + MOVQ ix+128(FP), R8 // R8 = ix // Load the first index + SHLQ $4, R8 // R8 *= sizeof(complex128) + MOVQ iy+136(FP), R9 // R9 = iy + SHLQ $4, R9 // R9 *= sizeof(complex128) + MOVQ idst+32(FP), R10 // R10 = idst + SHLQ $4, R10 // R10 *= sizeof(complex128) + LEAQ (SI)(R8*1), SI // SI = &(x[ix]) + LEAQ (DX)(R9*1), DX // DX = &(y[iy]) + LEAQ (DI)(R10*1), DI // DI = &(dst[idst]) + MOVQ incX+112(FP), R8 // R8 = incX + SHLQ $4, R8 // R8 *= sizeof(complex128) + MOVQ incY+120(FP), R9 // R9 = incY + SHLQ $4, R9 // R9 *= sizeof(complex128) + MOVQ incDst+24(FP), R10 // R10 = incDst + SHLQ $4, R10 // R10 *= sizeof(complex128) + MOVUPS alpha+40(FP), X0 // X0 = { imag(a), real(a) } + MOVAPS X0, X1 + SHUFPD $0x1, X1, X1 // X1 = { real(a), imag(a) } + MOVAPS X0, X10 // Copy X0 and X1 for pipelining + MOVAPS X1, X11 + MOVQ CX, BX + ANDQ $3, CX // CX = n % 4 + SHRQ $2, BX // BX = floor( n / 4 ) + JZ axpyi_tail // if BX == 0 { goto axpyi_tail } + +axpyi_loop: // do { + MOVUPS (SI), X2 // X_i = { imag(x[i]), real(x[i]) } + MOVUPS (SI)(R8*1), X4 + LEAQ (SI)(R8*2), SI // SI = &(SI[incX*2]) + + MOVUPS (SI), X6 + MOVUPS (SI)(R8*1), X8 + + // X_(i+1) = { real(x[i], real(x[i]) } + MOVDDUP_X2_X3 + MOVDDUP_X4_X5 + MOVDDUP_X6_X7 + MOVDDUP_X8_X9 + + // X_i = { imag(x[i]), imag(x[i]) } + SHUFPD $0x3, X2, X2 + SHUFPD $0x3, X4, X4 + SHUFPD $0x3, X6, X6 + SHUFPD $0x3, X8, X8 + + // X_i = { real(a) * imag(x[i]), imag(a) * imag(x[i]) } + // X_(i+1) = { imag(a) * real(x[i]), real(a) * real(x[i]) } + MULPD X1, X2 + MULPD X0, X3 + MULPD X11, X4 + MULPD X10, X5 + MULPD X1, X6 + MULPD X0, X7 + MULPD X11, X8 + MULPD X10, X9 + + // X_(i+1) = { + // imag(result[i]): imag(a)*real(x[i]) + real(a)*imag(x[i]), + // real(result[i]): real(a)*real(x[i]) - imag(a)*imag(x[i]) + // } + ADDSUBPD_X2_X3 + ADDSUBPD_X4_X5 + ADDSUBPD_X6_X7 + ADDSUBPD_X8_X9 + + // X_(i+1) = { imag(result[i]) + imag(y[i]), real(result[i]) + real(y[i]) } + ADDPD (DX), X3 + ADDPD (DX)(R9*1), X5 + LEAQ (DX)(R9*2), DX // DX = &(DX[incY*2]) + ADDPD (DX), X7 + ADDPD (DX)(R9*1), X9 + MOVUPS X3, (DI) // dst[i] = X_(i+1) + MOVUPS X5, (DI)(R10*1) + LEAQ (DI)(R10*2), DI + MOVUPS X7, (DI) + MOVUPS X9, (DI)(R10*1) + LEAQ (SI)(R8*2), SI // SI = &(SI[incX*2]) + LEAQ (DX)(R9*2), DX // DX = &(DX[incY*2]) + LEAQ (DI)(R10*2), DI // DI = &(DI[incDst*2]) + DECQ BX + JNZ axpyi_loop // } while --BX > 0 + CMPQ CX, $0 // if CX == 0 { return } + JE axpyi_end + +axpyi_tail: // do { + MOVUPS (SI), X2 // X_i = { imag(x[i]), real(x[i]) } + MOVDDUP_X2_X3 // X_(i+1) = { real(x[i], real(x[i]) } + SHUFPD $0x3, X2, X2 // X_i = { imag(x[i]), imag(x[i]) } + MULPD X1, X2 // X_i = { real(a) * imag(x[i]), imag(a) * imag(x[i]) } + MULPD X0, X3 // X_(i+1) = { imag(a) * real(x[i]), real(a) * real(x[i]) } + + // X_(i+1) = { + // imag(result[i]): imag(a)*real(x[i]) + real(a)*imag(x[i]), + // real(result[i]): real(a)*real(x[i]) - imag(a)*imag(x[i]) + // } + ADDSUBPD_X2_X3 + + // X_(i+1) = { imag(result[i]) + imag(y[i]), real(result[i]) + real(y[i]) } + ADDPD (DX), X3 + MOVUPS X3, (DI) // y[i] X_(i+1) + ADDQ R8, SI // SI += incX + ADDQ R9, DX // DX += incY + ADDQ R10, DI // DI += incDst + LOOP axpyi_tail // } while --CX > 0 + +axpyi_end: + RET diff --git a/vendor/gonum.org/v1/gonum/internal/asm/c128/axpyunitary_amd64.s b/vendor/gonum.org/v1/gonum/internal/asm/c128/axpyunitary_amd64.s new file mode 100644 index 0000000..ccf8289 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/internal/asm/c128/axpyunitary_amd64.s @@ -0,0 +1,122 @@ +// Copyright ©2016 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// +build !noasm,!appengine,!safe + +#include "textflag.h" + +// MOVDDUP X2, X3 +#define MOVDDUP_X2_X3 BYTE $0xF2; BYTE $0x0F; BYTE $0x12; BYTE $0xDA +// MOVDDUP X4, X5 +#define MOVDDUP_X4_X5 BYTE $0xF2; BYTE $0x0F; BYTE $0x12; BYTE $0xEC +// MOVDDUP X6, X7 +#define MOVDDUP_X6_X7 BYTE $0xF2; BYTE $0x0F; BYTE $0x12; BYTE $0xFE +// MOVDDUP X8, X9 +#define MOVDDUP_X8_X9 BYTE $0xF2; BYTE $0x45; BYTE $0x0F; BYTE $0x12; BYTE $0xC8 + +// ADDSUBPD X2, X3 +#define ADDSUBPD_X2_X3 BYTE $0x66; BYTE $0x0F; BYTE $0xD0; BYTE $0xDA +// ADDSUBPD X4, X5 +#define ADDSUBPD_X4_X5 BYTE $0x66; BYTE $0x0F; BYTE $0xD0; BYTE $0xEC +// ADDSUBPD X6, X7 +#define ADDSUBPD_X6_X7 BYTE $0x66; BYTE $0x0F; BYTE $0xD0; BYTE $0xFE +// ADDSUBPD X8, X9 +#define ADDSUBPD_X8_X9 BYTE $0x66; BYTE $0x45; BYTE $0x0F; BYTE $0xD0; BYTE $0xC8 + +// func AxpyUnitary(alpha complex128, x, y []complex128) +TEXT ·AxpyUnitary(SB), NOSPLIT, $0 + MOVQ x_base+16(FP), SI // SI = &x + MOVQ y_base+40(FP), DI // DI = &y + MOVQ x_len+24(FP), CX // CX = min( len(x), len(y) ) + CMPQ y_len+48(FP), CX + CMOVQLE y_len+48(FP), CX + CMPQ CX, $0 // if CX == 0 { return } + JE caxy_end + PXOR X0, X0 // Clear work registers and cache-align loop + PXOR X1, X1 + MOVUPS alpha+0(FP), X0 // X0 = { imag(a), real(a) } + MOVAPS X0, X1 + SHUFPD $0x1, X1, X1 // X1 = { real(a), imag(a) } + XORQ AX, AX // i = 0 + MOVAPS X0, X10 // Copy X0 and X1 for pipelining + MOVAPS X1, X11 + MOVQ CX, BX + ANDQ $3, CX // CX = n % 4 + SHRQ $2, BX // BX = floor( n / 4 ) + JZ caxy_tail // if BX == 0 { goto caxy_tail } + +caxy_loop: // do { + MOVUPS (SI)(AX*8), X2 // X_i = { imag(x[i]), real(x[i]) } + MOVUPS 16(SI)(AX*8), X4 + MOVUPS 32(SI)(AX*8), X6 + MOVUPS 48(SI)(AX*8), X8 + + // X_(i+1) = { real(x[i], real(x[i]) } + MOVDDUP_X2_X3 + MOVDDUP_X4_X5 + MOVDDUP_X6_X7 + MOVDDUP_X8_X9 + + // X_i = { imag(x[i]), imag(x[i]) } + SHUFPD $0x3, X2, X2 + SHUFPD $0x3, X4, X4 + SHUFPD $0x3, X6, X6 + SHUFPD $0x3, X8, X8 + + // X_i = { real(a) * imag(x[i]), imag(a) * imag(x[i]) } + // X_(i+1) = { imag(a) * real(x[i]), real(a) * real(x[i]) } + MULPD X1, X2 + MULPD X0, X3 + MULPD X11, X4 + MULPD X10, X5 + MULPD X1, X6 + MULPD X0, X7 + MULPD X11, X8 + MULPD X10, X9 + + // X_(i+1) = { + // imag(result[i]): imag(a)*real(x[i]) + real(a)*imag(x[i]), + // real(result[i]): real(a)*real(x[i]) - imag(a)*imag(x[i]) + // } + ADDSUBPD_X2_X3 + ADDSUBPD_X4_X5 + ADDSUBPD_X6_X7 + ADDSUBPD_X8_X9 + + // X_(i+1) = { imag(result[i]) + imag(y[i]), real(result[i]) + real(y[i]) } + ADDPD (DI)(AX*8), X3 + ADDPD 16(DI)(AX*8), X5 + ADDPD 32(DI)(AX*8), X7 + ADDPD 48(DI)(AX*8), X9 + MOVUPS X3, (DI)(AX*8) // y[i] = X_(i+1) + MOVUPS X5, 16(DI)(AX*8) + MOVUPS X7, 32(DI)(AX*8) + MOVUPS X9, 48(DI)(AX*8) + ADDQ $8, AX // i += 8 + DECQ BX + JNZ caxy_loop // } while --BX > 0 + CMPQ CX, $0 // if CX == 0 { return } + JE caxy_end + +caxy_tail: // do { + MOVUPS (SI)(AX*8), X2 // X_i = { imag(x[i]), real(x[i]) } + MOVDDUP_X2_X3 // X_(i+1) = { real(x[i], real(x[i]) } + SHUFPD $0x3, X2, X2 // X_i = { imag(x[i]), imag(x[i]) } + MULPD X1, X2 // X_i = { real(a) * imag(x[i]), imag(a) * imag(x[i]) } + MULPD X0, X3 // X_(i+1) = { imag(a) * real(x[i]), real(a) * real(x[i]) } + + // X_(i+1) = { + // imag(result[i]): imag(a)*real(x[i]) + real(a)*imag(x[i]), + // real(result[i]): real(a)*real(x[i]) - imag(a)*imag(x[i]) + // } + ADDSUBPD_X2_X3 + + // X_(i+1) = { imag(result[i]) + imag(y[i]), real(result[i]) + real(y[i]) } + ADDPD (DI)(AX*8), X3 + MOVUPS X3, (DI)(AX*8) // y[i] = X_(i+1) + ADDQ $2, AX // i += 2 + LOOP caxy_tail // } while --CX > 0 + +caxy_end: + RET diff --git a/vendor/gonum.org/v1/gonum/internal/asm/c128/axpyunitaryto_amd64.s b/vendor/gonum.org/v1/gonum/internal/asm/c128/axpyunitaryto_amd64.s new file mode 100644 index 0000000..07ceabc --- /dev/null +++ b/vendor/gonum.org/v1/gonum/internal/asm/c128/axpyunitaryto_amd64.s @@ -0,0 +1,123 @@ +// Copyright ©2016 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// +build !noasm,!appengine,!safe + +#include "textflag.h" + +// MOVDDUP X2, X3 +#define MOVDDUP_X2_X3 BYTE $0xF2; BYTE $0x0F; BYTE $0x12; BYTE $0xDA +// MOVDDUP X4, X5 +#define MOVDDUP_X4_X5 BYTE $0xF2; BYTE $0x0F; BYTE $0x12; BYTE $0xEC +// MOVDDUP X6, X7 +#define MOVDDUP_X6_X7 BYTE $0xF2; BYTE $0x0F; BYTE $0x12; BYTE $0xFE +// MOVDDUP X8, X9 +#define MOVDDUP_X8_X9 BYTE $0xF2; BYTE $0x45; BYTE $0x0F; BYTE $0x12; BYTE $0xC8 + +// ADDSUBPD X2, X3 +#define ADDSUBPD_X2_X3 BYTE $0x66; BYTE $0x0F; BYTE $0xD0; BYTE $0xDA +// ADDSUBPD X4, X5 +#define ADDSUBPD_X4_X5 BYTE $0x66; BYTE $0x0F; BYTE $0xD0; BYTE $0xEC +// ADDSUBPD X6, X7 +#define ADDSUBPD_X6_X7 BYTE $0x66; BYTE $0x0F; BYTE $0xD0; BYTE $0xFE +// ADDSUBPD X8, X9 +#define ADDSUBPD_X8_X9 BYTE $0x66; BYTE $0x45; BYTE $0x0F; BYTE $0xD0; BYTE $0xC8 + +// func AxpyUnitaryTo(dst []complex128, alpha complex64, x, y []complex128) +TEXT ·AxpyUnitaryTo(SB), NOSPLIT, $0 + MOVQ dst_base+0(FP), DI // DI = &dst + MOVQ x_base+40(FP), SI // SI = &x + MOVQ y_base+64(FP), DX // DX = &y + MOVQ x_len+48(FP), CX // CX = min( len(x), len(y), len(dst) ) + CMPQ y_len+72(FP), CX + CMOVQLE y_len+72(FP), CX + CMPQ dst_len+8(FP), CX + CMOVQLE dst_len+8(FP), CX + CMPQ CX, $0 // if CX == 0 { return } + JE caxy_end + MOVUPS alpha+24(FP), X0 // X0 = { imag(a), real(a) } + MOVAPS X0, X1 + SHUFPD $0x1, X1, X1 // X1 = { real(a), imag(a) } + XORQ AX, AX // i = 0 + MOVAPS X0, X10 // Copy X0 and X1 for pipelining + MOVAPS X1, X11 + MOVQ CX, BX + ANDQ $3, CX // CX = n % 4 + SHRQ $2, BX // BX = floor( n / 4 ) + JZ caxy_tail // if BX == 0 { goto caxy_tail } + +caxy_loop: // do { + MOVUPS (SI)(AX*8), X2 // X_i = { imag(x[i]), real(x[i]) } + MOVUPS 16(SI)(AX*8), X4 + MOVUPS 32(SI)(AX*8), X6 + MOVUPS 48(SI)(AX*8), X8 + + // X_(i+1) = { real(x[i], real(x[i]) } + MOVDDUP_X2_X3 // Load and duplicate imag elements (xi, xi) + MOVDDUP_X4_X5 + MOVDDUP_X6_X7 + MOVDDUP_X8_X9 + + // X_i = { imag(x[i]), imag(x[i]) } + SHUFPD $0x3, X2, X2 // duplicate real elements (xr, xr) + SHUFPD $0x3, X4, X4 + SHUFPD $0x3, X6, X6 + SHUFPD $0x3, X8, X8 + + // X_i = { real(a) * imag(x[i]), imag(a) * imag(x[i]) } + // X_(i+1) = { imag(a) * real(x[i]), real(a) * real(x[i]) } + MULPD X1, X2 + MULPD X0, X3 + MULPD X11, X4 + MULPD X10, X5 + MULPD X1, X6 + MULPD X0, X7 + MULPD X11, X8 + MULPD X10, X9 + + // X_(i+1) = { + // imag(result[i]): imag(a)*real(x[i]) + real(a)*imag(x[i]), + // real(result[i]): real(a)*real(x[i]) - imag(a)*imag(x[i]) + // } + ADDSUBPD_X2_X3 + ADDSUBPD_X4_X5 + ADDSUBPD_X6_X7 + ADDSUBPD_X8_X9 + + // X_(i+1) = { imag(result[i]) + imag(y[i]), real(result[i]) + real(y[i]) } + ADDPD (DX)(AX*8), X3 + ADDPD 16(DX)(AX*8), X5 + ADDPD 32(DX)(AX*8), X7 + ADDPD 48(DX)(AX*8), X9 + MOVUPS X3, (DI)(AX*8) // y[i] = X_(i+1) + MOVUPS X5, 16(DI)(AX*8) + MOVUPS X7, 32(DI)(AX*8) + MOVUPS X9, 48(DI)(AX*8) + ADDQ $8, AX // i += 8 + DECQ BX + JNZ caxy_loop // } while --BX > 0 + CMPQ CX, $0 // if CX == 0 { return } + JE caxy_end + +caxy_tail: // Same calculation, but read in values to avoid trampling memory + MOVUPS (SI)(AX*8), X2 // X_i = { imag(x[i]), real(x[i]) } + MOVDDUP_X2_X3 // X_(i+1) = { real(x[i], real(x[i]) } + SHUFPD $0x3, X2, X2 // X_i = { imag(x[i]), imag(x[i]) } + MULPD X1, X2 // X_i = { real(a) * imag(x[i]), imag(a) * imag(x[i]) } + MULPD X0, X3 // X_(i+1) = { imag(a) * real(x[i]), real(a) * real(x[i]) } + + // X_(i+1) = { + // imag(result[i]): imag(a)*real(x[i]) + real(a)*imag(x[i]), + // real(result[i]): real(a)*real(x[i]) - imag(a)*imag(x[i]) + // } + ADDSUBPD_X2_X3 + + // X_(i+1) = { imag(result[i]) + imag(y[i]), real(result[i]) + real(y[i]) } + ADDPD (DX)(AX*8), X3 + MOVUPS X3, (DI)(AX*8) // y[i] = X_(i+1) + ADDQ $2, AX // i += 2 + LOOP caxy_tail // } while --CX > 0 + +caxy_end: + RET diff --git a/vendor/gonum.org/v1/gonum/internal/asm/c128/doc.go b/vendor/gonum.org/v1/gonum/internal/asm/c128/doc.go new file mode 100644 index 0000000..8802ff1 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/internal/asm/c128/doc.go @@ -0,0 +1,6 @@ +// Copyright ©2017 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Package c128 provides complex128 vector primitives. +package c128 // import "gonum.org/v1/gonum/internal/asm/c128" diff --git a/vendor/gonum.org/v1/gonum/internal/asm/c128/dotcinc_amd64.s b/vendor/gonum.org/v1/gonum/internal/asm/c128/dotcinc_amd64.s new file mode 100644 index 0000000..03c07db --- /dev/null +++ b/vendor/gonum.org/v1/gonum/internal/asm/c128/dotcinc_amd64.s @@ -0,0 +1,153 @@ +// Copyright ©2016 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// +build !noasm,!appengine,!safe + +#include "textflag.h" + +#define MOVDDUP_XPTR__X3 LONG $0x1E120FF2 // MOVDDUP (SI), X3 +#define MOVDDUP_XPTR_INCX__X5 LONG $0x120F42F2; WORD $0x062C // MOVDDUP (SI)(R8*1), X5 +#define MOVDDUP_XPTR_INCX_2__X7 LONG $0x120F42F2; WORD $0x463C // MOVDDUP (SI)(R8*2), X7 +#define MOVDDUP_XPTR_INCx3X__X9 LONG $0x120F46F2; WORD $0x0E0C // MOVDDUP (SI)(R9*1), X9 + +#define MOVDDUP_8_XPTR__X2 LONG $0x56120FF2; BYTE $0x08 // MOVDDUP 8(SI), X2 +#define MOVDDUP_8_XPTR_INCX__X4 LONG $0x120F42F2; WORD $0x0664; BYTE $0x08 // MOVDDUP 8(SI)(R8*1), X4 +#define MOVDDUP_8_XPTR_INCX_2__X6 LONG $0x120F42F2; WORD $0x4674; BYTE $0x08 // MOVDDUP 8(SI)(R8*2), X6 +#define MOVDDUP_8_XPTR_INCx3X__X8 LONG $0x120F46F2; WORD $0x0E44; BYTE $0x08 // MOVDDUP 8(SI)(R9*1), X8 + +#define ADDSUBPD_X2_X3 LONG $0xDAD00F66 // ADDSUBPD X2, X3 +#define ADDSUBPD_X4_X5 LONG $0xECD00F66 // ADDSUBPD X4, X5 +#define ADDSUBPD_X6_X7 LONG $0xFED00F66 // ADDSUBPD X6, X7 +#define ADDSUBPD_X8_X9 LONG $0xD00F4566; BYTE $0xC8 // ADDSUBPD X8, X9 + +#define X_PTR SI +#define Y_PTR DI +#define LEN CX +#define TAIL BX +#define SUM X0 +#define P_SUM X1 +#define INC_X R8 +#define INCx3_X R9 +#define INC_Y R10 +#define INCx3_Y R11 +#define NEG1 X15 +#define P_NEG1 X14 + +// func DotcInc(x, y []complex128, n, incX, incY, ix, iy uintptr) (sum complex128) +TEXT ·DotcInc(SB), NOSPLIT, $0 + MOVQ x_base+0(FP), X_PTR // X_PTR = &x + MOVQ y_base+24(FP), Y_PTR // Y_PTR = &y + MOVQ n+48(FP), LEN // LEN = n + PXOR SUM, SUM // SUM = 0 + CMPQ LEN, $0 // if LEN == 0 { return } + JE dot_end + PXOR P_SUM, P_SUM // P_SUM = 0 + MOVQ ix+72(FP), INC_X // INC_X = ix * sizeof(complex128) + SHLQ $4, INC_X + MOVQ iy+80(FP), INC_Y // INC_Y = iy * sizeof(complex128) + SHLQ $4, INC_Y + LEAQ (X_PTR)(INC_X*1), X_PTR // X_PTR = &(X_PTR[ix]) + LEAQ (Y_PTR)(INC_Y*1), Y_PTR // Y_PTR = &(Y_PTR[iy]) + MOVQ incX+56(FP), INC_X // INC_X = incX + SHLQ $4, INC_X // INC_X *= sizeof(complex128) + MOVQ incY+64(FP), INC_Y // INC_Y = incY + SHLQ $4, INC_Y // INC_Y *= sizeof(complex128) + MOVSD $(-1.0), NEG1 + SHUFPD $0, NEG1, NEG1 // { -1, -1 } + MOVQ LEN, TAIL + ANDQ $3, TAIL // TAIL = n % 4 + SHRQ $2, LEN // LEN = floor( n / 4 ) + JZ dot_tail // if n <= 4 { goto dot_tail } + MOVAPS NEG1, P_NEG1 // Copy NEG1 to P_NEG1 for pipelining + LEAQ (INC_X)(INC_X*2), INCx3_X // INCx3_X = 3 * incX * sizeof(complex128) + LEAQ (INC_Y)(INC_Y*2), INCx3_Y // INCx3_Y = 3 * incY * sizeof(complex128) + +dot_loop: // do { + MOVDDUP_XPTR__X3 // X_(i+1) = { real(x[i], real(x[i]) } + MOVDDUP_XPTR_INCX__X5 + MOVDDUP_XPTR_INCX_2__X7 + MOVDDUP_XPTR_INCx3X__X9 + + MOVDDUP_8_XPTR__X2 // X_i = { imag(x[i]), imag(x[i]) } + MOVDDUP_8_XPTR_INCX__X4 + MOVDDUP_8_XPTR_INCX_2__X6 + MOVDDUP_8_XPTR_INCx3X__X8 + + // X_i = { -imag(x[i]), -imag(x[i]) } + MULPD NEG1, X2 + MULPD P_NEG1, X4 + MULPD NEG1, X6 + MULPD P_NEG1, X8 + + // X_j = { imag(y[i]), real(y[i]) } + MOVUPS (Y_PTR), X10 + MOVUPS (Y_PTR)(INC_Y*1), X11 + MOVUPS (Y_PTR)(INC_Y*2), X12 + MOVUPS (Y_PTR)(INCx3_Y*1), X13 + + // X_(i+1) = { imag(a) * real(x[i]), real(a) * real(x[i]) } + MULPD X10, X3 + MULPD X11, X5 + MULPD X12, X7 + MULPD X13, X9 + + // X_j = { real(y[i]), imag(y[i]) } + SHUFPD $0x1, X10, X10 + SHUFPD $0x1, X11, X11 + SHUFPD $0x1, X12, X12 + SHUFPD $0x1, X13, X13 + + // X_i = { real(a) * imag(x[i]), imag(a) * imag(x[i]) } + MULPD X10, X2 + MULPD X11, X4 + MULPD X12, X6 + MULPD X13, X8 + + // X_(i+1) = { + // imag(result[i]): imag(a)*real(x[i]) + real(a)*imag(x[i]), + // real(result[i]): real(a)*real(x[i]) - imag(a)*imag(x[i]) + // } + ADDSUBPD_X2_X3 + ADDSUBPD_X4_X5 + ADDSUBPD_X6_X7 + ADDSUBPD_X8_X9 + + // psum += result[i] + ADDPD X3, SUM + ADDPD X5, P_SUM + ADDPD X7, SUM + ADDPD X9, P_SUM + + LEAQ (X_PTR)(INC_X*4), X_PTR // X_PTR = &(X_PTR[incX*4]) + LEAQ (Y_PTR)(INC_Y*4), Y_PTR // Y_PTR = &(Y_PTR[incY*4]) + + DECQ LEN + JNZ dot_loop // } while --LEN > 0 + ADDPD P_SUM, SUM // sum += psum + CMPQ TAIL, $0 // if TAIL == 0 { return } + JE dot_end + +dot_tail: // do { + MOVDDUP_XPTR__X3 // X_(i+1) = { real(x[i], real(x[i]) } + MOVDDUP_8_XPTR__X2 // X_i = { imag(x[i]), imag(x[i]) } + MULPD NEG1, X2 // X_i = { -imag(x[i]) , -imag(x[i]) } + MOVUPS (Y_PTR), X10 // X_j = { imag(y[i]) , real(y[i]) } + MULPD X10, X3 // X_(i+1) = { imag(a) * real(x[i]), real(a) * real(x[i]) } + SHUFPD $0x1, X10, X10 // X_j = { real(y[i]) , imag(y[i]) } + MULPD X10, X2 // X_i = { real(a) * imag(x[i]), imag(a) * imag(x[i]) } + + // X_(i+1) = { + // imag(result[i]): imag(a)*real(x[i]) + real(a)*imag(x[i]), + // real(result[i]): real(a)*real(x[i]) - imag(a)*imag(x[i]) + // } + ADDSUBPD_X2_X3 + ADDPD X3, SUM // sum += result[i] + ADDQ INC_X, X_PTR // X_PTR += incX + ADDQ INC_Y, Y_PTR // Y_PTR += incY + DECQ TAIL + JNZ dot_tail // } while --TAIL > 0 + +dot_end: + MOVUPS SUM, sum+88(FP) + RET diff --git a/vendor/gonum.org/v1/gonum/internal/asm/c128/dotcunitary_amd64.s b/vendor/gonum.org/v1/gonum/internal/asm/c128/dotcunitary_amd64.s new file mode 100644 index 0000000..adce85e --- /dev/null +++ b/vendor/gonum.org/v1/gonum/internal/asm/c128/dotcunitary_amd64.s @@ -0,0 +1,143 @@ +// Copyright ©2016 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// +build !noasm,!appengine,!safe + +#include "textflag.h" + +#define MOVDDUP_XPTR_IDX_8__X3 LONG $0x1C120FF2; BYTE $0xC6 // MOVDDUP (SI)(AX*8), X3 +#define MOVDDUP_16_XPTR_IDX_8__X5 LONG $0x6C120FF2; WORD $0x10C6 // MOVDDUP 16(SI)(AX*8), X5 +#define MOVDDUP_32_XPTR_IDX_8__X7 LONG $0x7C120FF2; WORD $0x20C6 // MOVDDUP 32(SI)(AX*8), X7 +#define MOVDDUP_48_XPTR_IDX_8__X9 LONG $0x120F44F2; WORD $0xC64C; BYTE $0x30 // MOVDDUP 48(SI)(AX*8), X9 + +#define MOVDDUP_XPTR_IIDX_8__X2 LONG $0x14120FF2; BYTE $0xD6 // MOVDDUP (SI)(DX*8), X2 +#define MOVDDUP_16_XPTR_IIDX_8__X4 LONG $0x64120FF2; WORD $0x10D6 // MOVDDUP 16(SI)(DX*8), X4 +#define MOVDDUP_32_XPTR_IIDX_8__X6 LONG $0x74120FF2; WORD $0x20D6 // MOVDDUP 32(SI)(DX*8), X6 +#define MOVDDUP_48_XPTR_IIDX_8__X8 LONG $0x120F44F2; WORD $0xD644; BYTE $0x30 // MOVDDUP 48(SI)(DX*8), X8 + +#define ADDSUBPD_X2_X3 LONG $0xDAD00F66 // ADDSUBPD X2, X3 +#define ADDSUBPD_X4_X5 LONG $0xECD00F66 // ADDSUBPD X4, X5 +#define ADDSUBPD_X6_X7 LONG $0xFED00F66 // ADDSUBPD X6, X7 +#define ADDSUBPD_X8_X9 LONG $0xD00F4566; BYTE $0xC8 // ADDSUBPD X8, X9 + +#define X_PTR SI +#define Y_PTR DI +#define LEN CX +#define TAIL BX +#define SUM X0 +#define P_SUM X1 +#define IDX AX +#define I_IDX DX +#define NEG1 X15 +#define P_NEG1 X14 + +// func DotcUnitary(x, y []complex128) (sum complex128) +TEXT ·DotcUnitary(SB), NOSPLIT, $0 + MOVQ x_base+0(FP), X_PTR // X_PTR = &x + MOVQ y_base+24(FP), Y_PTR // Y_PTR = &y + MOVQ x_len+8(FP), LEN // LEN = min( len(x), len(y) ) + CMPQ y_len+32(FP), LEN + CMOVQLE y_len+32(FP), LEN + PXOR SUM, SUM // sum = 0 + CMPQ LEN, $0 // if LEN == 0 { return } + JE dot_end + XORPS P_SUM, P_SUM // psum = 0 + MOVSD $(-1.0), NEG1 + SHUFPD $0, NEG1, NEG1 // { -1, -1 } + XORQ IDX, IDX // i := 0 + MOVQ $1, I_IDX // j := 1 + MOVQ LEN, TAIL + ANDQ $3, TAIL // TAIL = floor( TAIL / 4 ) + SHRQ $2, LEN // LEN = TAIL % 4 + JZ dot_tail // if LEN == 0 { goto dot_tail } + + MOVAPS NEG1, P_NEG1 // Copy NEG1 to P_NEG1 for pipelining + +dot_loop: // do { + MOVDDUP_XPTR_IDX_8__X3 // X_(i+1) = { real(x[i], real(x[i]) } + MOVDDUP_16_XPTR_IDX_8__X5 + MOVDDUP_32_XPTR_IDX_8__X7 + MOVDDUP_48_XPTR_IDX_8__X9 + + MOVDDUP_XPTR_IIDX_8__X2 // X_i = { imag(x[i]), imag(x[i]) } + MOVDDUP_16_XPTR_IIDX_8__X4 + MOVDDUP_32_XPTR_IIDX_8__X6 + MOVDDUP_48_XPTR_IIDX_8__X8 + + // X_i = { -imag(x[i]), -imag(x[i]) } + MULPD NEG1, X2 + MULPD P_NEG1, X4 + MULPD NEG1, X6 + MULPD P_NEG1, X8 + + // X_j = { imag(y[i]), real(y[i]) } + MOVUPS (Y_PTR)(IDX*8), X10 + MOVUPS 16(Y_PTR)(IDX*8), X11 + MOVUPS 32(Y_PTR)(IDX*8), X12 + MOVUPS 48(Y_PTR)(IDX*8), X13 + + // X_(i+1) = { imag(a) * real(x[i]), real(a) * real(x[i]) } + MULPD X10, X3 + MULPD X11, X5 + MULPD X12, X7 + MULPD X13, X9 + + // X_j = { real(y[i]), imag(y[i]) } + SHUFPD $0x1, X10, X10 + SHUFPD $0x1, X11, X11 + SHUFPD $0x1, X12, X12 + SHUFPD $0x1, X13, X13 + + // X_i = { real(a) * imag(x[i]), imag(a) * imag(x[i]) } + MULPD X10, X2 + MULPD X11, X4 + MULPD X12, X6 + MULPD X13, X8 + + // X_(i+1) = { + // imag(result[i]): imag(a)*real(x[i]) + real(a)*imag(x[i]), + // real(result[i]): real(a)*real(x[i]) - imag(a)*imag(x[i]) + // } + ADDSUBPD_X2_X3 + ADDSUBPD_X4_X5 + ADDSUBPD_X6_X7 + ADDSUBPD_X8_X9 + + // psum += result[i] + ADDPD X3, SUM + ADDPD X5, P_SUM + ADDPD X7, SUM + ADDPD X9, P_SUM + + ADDQ $8, IDX // IDX += 8 + ADDQ $8, I_IDX // I_IDX += 8 + DECQ LEN + JNZ dot_loop // } while --LEN > 0 + ADDPD P_SUM, SUM // sum += psum + CMPQ TAIL, $0 // if TAIL == 0 { return } + JE dot_end + +dot_tail: // do { + MOVDDUP_XPTR_IDX_8__X3 // X_(i+1) = { real(x[i]) , real(x[i]) } + MOVDDUP_XPTR_IIDX_8__X2 // X_i = { imag(x[i]) , imag(x[i]) } + MULPD NEG1, X2 // X_i = { -imag(x[i]) , -imag(x[i]) } + MOVUPS (Y_PTR)(IDX*8), X10 // X_j = { imag(y[i]) , real(y[i]) } + MULPD X10, X3 // X_(i+1) = { imag(a) * real(x[i]), real(a) * real(x[i]) } + SHUFPD $0x1, X10, X10 // X_j = { real(y[i]) , imag(y[i]) } + MULPD X10, X2 // X_i = { real(a) * imag(x[i]), imag(a) * imag(x[i]) } + + // X_(i+1) = { + // imag(result[i]): imag(a)*real(x[i]) + real(a)*imag(x[i]), + // real(result[i]): real(a)*real(x[i]) - imag(a)*imag(x[i]) + // } + ADDSUBPD_X2_X3 + ADDPD X3, SUM // SUM += result[i] + ADDQ $2, IDX // IDX += 2 + ADDQ $2, I_IDX // I_IDX += 2 + DECQ TAIL + JNZ dot_tail // } while --TAIL > 0 + +dot_end: + MOVUPS SUM, sum+48(FP) + RET diff --git a/vendor/gonum.org/v1/gonum/internal/asm/c128/dotuinc_amd64.s b/vendor/gonum.org/v1/gonum/internal/asm/c128/dotuinc_amd64.s new file mode 100644 index 0000000..5b15444 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/internal/asm/c128/dotuinc_amd64.s @@ -0,0 +1,141 @@ +// Copyright ©2016 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// +build !noasm,!appengine,!safe + +#include "textflag.h" + +#define MOVDDUP_XPTR__X3 LONG $0x1E120FF2 // MOVDDUP (SI), X3 +#define MOVDDUP_XPTR_INCX__X5 LONG $0x120F42F2; WORD $0x062C // MOVDDUP (SI)(R8*1), X5 +#define MOVDDUP_XPTR_INCX_2__X7 LONG $0x120F42F2; WORD $0x463C // MOVDDUP (SI)(R8*2), X7 +#define MOVDDUP_XPTR_INCx3X__X9 LONG $0x120F46F2; WORD $0x0E0C // MOVDDUP (SI)(R9*1), X9 + +#define MOVDDUP_8_XPTR__X2 LONG $0x56120FF2; BYTE $0x08 // MOVDDUP 8(SI), X2 +#define MOVDDUP_8_XPTR_INCX__X4 LONG $0x120F42F2; WORD $0x0664; BYTE $0x08 // MOVDDUP 8(SI)(R8*1), X4 +#define MOVDDUP_8_XPTR_INCX_2__X6 LONG $0x120F42F2; WORD $0x4674; BYTE $0x08 // MOVDDUP 8(SI)(R8*2), X6 +#define MOVDDUP_8_XPTR_INCx3X__X8 LONG $0x120F46F2; WORD $0x0E44; BYTE $0x08 // MOVDDUP 8(SI)(R9*1), X8 + +#define ADDSUBPD_X2_X3 LONG $0xDAD00F66 // ADDSUBPD X2, X3 +#define ADDSUBPD_X4_X5 LONG $0xECD00F66 // ADDSUBPD X4, X5 +#define ADDSUBPD_X6_X7 LONG $0xFED00F66 // ADDSUBPD X6, X7 +#define ADDSUBPD_X8_X9 LONG $0xD00F4566; BYTE $0xC8 // ADDSUBPD X8, X9 + +#define X_PTR SI +#define Y_PTR DI +#define LEN CX +#define TAIL BX +#define SUM X0 +#define P_SUM X1 +#define INC_X R8 +#define INCx3_X R9 +#define INC_Y R10 +#define INCx3_Y R11 + +// func DotuInc(x, y []complex128, n, incX, incY, ix, iy uintptr) (sum complex128) +TEXT ·DotuInc(SB), NOSPLIT, $0 + MOVQ x_base+0(FP), X_PTR // X_PTR = &x + MOVQ y_base+24(FP), Y_PTR // Y_PTR = &y + MOVQ n+48(FP), LEN // LEN = n + PXOR SUM, SUM // sum = 0 + CMPQ LEN, $0 // if LEN == 0 { return } + JE dot_end + MOVQ ix+72(FP), INC_X // INC_X = ix * sizeof(complex128) + SHLQ $4, INC_X + MOVQ iy+80(FP), INC_Y // INC_Y = iy * sizeof(complex128) + SHLQ $4, INC_Y + LEAQ (X_PTR)(INC_X*1), X_PTR // X_PTR = &(X_PTR[ix]) + LEAQ (Y_PTR)(INC_Y*1), Y_PTR // Y_PTR = &(Y_PTR[iy]) + MOVQ incX+56(FP), INC_X // INC_X = incX + SHLQ $4, INC_X // INC_X *= sizeof(complex128) + MOVQ incY+64(FP), INC_Y // INC_Y = incY + SHLQ $4, INC_Y // INC_Y *= sizeof(complex128) + MOVQ LEN, TAIL + ANDQ $3, TAIL // LEN = LEN % 4 + SHRQ $2, LEN // LEN = floor( LEN / 4 ) + JZ dot_tail // if LEN <= 4 { goto dot_tail } + PXOR P_SUM, P_SUM // psum = 0 + LEAQ (INC_X)(INC_X*2), INCx3_X // INCx3_X = 3 * incX * sizeof(complex128) + LEAQ (INC_Y)(INC_Y*2), INCx3_Y // INCx3_Y = 3 * incY * sizeof(complex128) + +dot_loop: // do { + MOVDDUP_XPTR__X3 // X_(i+1) = { real(x[i], real(x[i]) } + MOVDDUP_XPTR_INCX__X5 + MOVDDUP_XPTR_INCX_2__X7 + MOVDDUP_XPTR_INCx3X__X9 + + MOVDDUP_8_XPTR__X2 // X_i = { imag(x[i]), imag(x[i]) } + MOVDDUP_8_XPTR_INCX__X4 + MOVDDUP_8_XPTR_INCX_2__X6 + MOVDDUP_8_XPTR_INCx3X__X8 + + // X_j = { imag(y[i]), real(y[i]) } + MOVUPS (Y_PTR), X10 + MOVUPS (Y_PTR)(INC_Y*1), X11 + MOVUPS (Y_PTR)(INC_Y*2), X12 + MOVUPS (Y_PTR)(INCx3_Y*1), X13 + + // X_(i+1) = { imag(a) * real(x[i]), real(a) * real(x[i]) } + MULPD X10, X3 + MULPD X11, X5 + MULPD X12, X7 + MULPD X13, X9 + + // X_j = { real(y[i]), imag(y[i]) } + SHUFPD $0x1, X10, X10 + SHUFPD $0x1, X11, X11 + SHUFPD $0x1, X12, X12 + SHUFPD $0x1, X13, X13 + + // X_i = { real(a) * imag(x[i]), imag(a) * imag(x[i]) } + MULPD X10, X2 + MULPD X11, X4 + MULPD X12, X6 + MULPD X13, X8 + + // X_(i+1) = { + // imag(result[i]): imag(a)*real(x[i]) + real(a)*imag(x[i]), + // real(result[i]): real(a)*real(x[i]) - imag(a)*imag(x[i]) + // } + ADDSUBPD_X2_X3 + ADDSUBPD_X4_X5 + ADDSUBPD_X6_X7 + ADDSUBPD_X8_X9 + + // psum += result[i] + ADDPD X3, SUM + ADDPD X5, P_SUM + ADDPD X7, SUM + ADDPD X9, P_SUM + + LEAQ (X_PTR)(INC_X*4), X_PTR // X_PTR = &(X_PTR[incX*4]) + LEAQ (Y_PTR)(INC_Y*4), Y_PTR // Y_PTR = &(Y_PTR[incY*4]) + + DECQ LEN + JNZ dot_loop // } while --BX > 0 + ADDPD P_SUM, SUM // sum += psum + CMPQ TAIL, $0 // if TAIL == 0 { return } + JE dot_end + +dot_tail: // do { + MOVDDUP_XPTR__X3 // X_(i+1) = { real(x[i], real(x[i]) } + MOVDDUP_8_XPTR__X2 // X_i = { imag(x[i]), imag(x[i]) } + MOVUPS (Y_PTR), X10 // X_j = { imag(y[i]) , real(y[i]) } + MULPD X10, X3 // X_(i+1) = { imag(a) * real(x[i]), real(a) * real(x[i]) } + SHUFPD $0x1, X10, X10 // X_j = { real(y[i]) , imag(y[i]) } + MULPD X10, X2 // X_i = { real(a) * imag(x[i]), imag(a) * imag(x[i]) } + + // X_(i+1) = { + // imag(result[i]): imag(a)*real(x[i]) + real(a)*imag(x[i]), + // real(result[i]): real(a)*real(x[i]) - imag(a)*imag(x[i]) + // } + ADDSUBPD_X2_X3 + ADDPD X3, SUM // sum += result[i] + ADDQ INC_X, X_PTR // X_PTR += incX + ADDQ INC_Y, Y_PTR // Y_PTR += incY + DECQ TAIL // --TAIL + JNZ dot_tail // } while TAIL > 0 + +dot_end: + MOVUPS SUM, sum+88(FP) + RET diff --git a/vendor/gonum.org/v1/gonum/internal/asm/c128/dotuunitary_amd64.s b/vendor/gonum.org/v1/gonum/internal/asm/c128/dotuunitary_amd64.s new file mode 100644 index 0000000..a45f31e --- /dev/null +++ b/vendor/gonum.org/v1/gonum/internal/asm/c128/dotuunitary_amd64.s @@ -0,0 +1,130 @@ +// Copyright ©2016 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// +build !noasm,!appengine,!safe + +#include "textflag.h" + +#define MOVDDUP_XPTR_IDX_8__X3 LONG $0x1C120FF2; BYTE $0xC6 // MOVDDUP (SI)(AX*8), X3 +#define MOVDDUP_16_XPTR_IDX_8__X5 LONG $0x6C120FF2; WORD $0x10C6 // MOVDDUP 16(SI)(AX*8), X5 +#define MOVDDUP_32_XPTR_IDX_8__X7 LONG $0x7C120FF2; WORD $0x20C6 // MOVDDUP 32(SI)(AX*8), X7 +#define MOVDDUP_48_XPTR_IDX_8__X9 LONG $0x120F44F2; WORD $0xC64C; BYTE $0x30 // MOVDDUP 48(SI)(AX*8), X9 + +#define MOVDDUP_XPTR_IIDX_8__X2 LONG $0x14120FF2; BYTE $0xD6 // MOVDDUP (SI)(DX*8), X2 +#define MOVDDUP_16_XPTR_IIDX_8__X4 LONG $0x64120FF2; WORD $0x10D6 // MOVDDUP 16(SI)(DX*8), X4 +#define MOVDDUP_32_XPTR_IIDX_8__X6 LONG $0x74120FF2; WORD $0x20D6 // MOVDDUP 32(SI)(DX*8), X6 +#define MOVDDUP_48_XPTR_IIDX_8__X8 LONG $0x120F44F2; WORD $0xD644; BYTE $0x30 // MOVDDUP 48(SI)(DX*8), X8 + +#define ADDSUBPD_X2_X3 LONG $0xDAD00F66 // ADDSUBPD X2, X3 +#define ADDSUBPD_X4_X5 LONG $0xECD00F66 // ADDSUBPD X4, X5 +#define ADDSUBPD_X6_X7 LONG $0xFED00F66 // ADDSUBPD X6, X7 +#define ADDSUBPD_X8_X9 LONG $0xD00F4566; BYTE $0xC8 // ADDSUBPD X8, X9 + +#define X_PTR SI +#define Y_PTR DI +#define LEN CX +#define TAIL BX +#define SUM X0 +#define P_SUM X1 +#define IDX AX +#define I_IDX DX + +// func DotuUnitary(x, y []complex128) (sum complex128) +TEXT ·DotuUnitary(SB), NOSPLIT, $0 + MOVQ x_base+0(FP), X_PTR // X_PTR = &x + MOVQ y_base+24(FP), Y_PTR // Y_PTR = &y + MOVQ x_len+8(FP), LEN // LEN = min( len(x), len(y) ) + CMPQ y_len+32(FP), LEN + CMOVQLE y_len+32(FP), LEN + PXOR SUM, SUM // SUM = 0 + CMPQ LEN, $0 // if LEN == 0 { return } + JE dot_end + PXOR P_SUM, P_SUM // P_SUM = 0 + XORQ IDX, IDX // IDX = 0 + MOVQ $1, DX // j = 1 + MOVQ LEN, TAIL + ANDQ $3, TAIL // TAIL = floor( LEN / 4 ) + SHRQ $2, LEN // LEN = LEN % 4 + JZ dot_tail // if LEN == 0 { goto dot_tail } + +dot_loop: // do { + MOVDDUP_XPTR_IDX_8__X3 // X_(i+1) = { real(x[i], real(x[i]) } + MOVDDUP_16_XPTR_IDX_8__X5 + MOVDDUP_32_XPTR_IDX_8__X7 + MOVDDUP_48_XPTR_IDX_8__X9 + + MOVDDUP_XPTR_IIDX_8__X2 // X_i = { imag(x[i]), imag(x[i]) } + MOVDDUP_16_XPTR_IIDX_8__X4 + MOVDDUP_32_XPTR_IIDX_8__X6 + MOVDDUP_48_XPTR_IIDX_8__X8 + + // X_j = { imag(y[i]), real(y[i]) } + MOVUPS (Y_PTR)(IDX*8), X10 + MOVUPS 16(Y_PTR)(IDX*8), X11 + MOVUPS 32(Y_PTR)(IDX*8), X12 + MOVUPS 48(Y_PTR)(IDX*8), X13 + + // X_(i+1) = { imag(a) * real(x[i]), real(a) * real(x[i]) } + MULPD X10, X3 + MULPD X11, X5 + MULPD X12, X7 + MULPD X13, X9 + + // X_j = { real(y[i]), imag(y[i]) } + SHUFPD $0x1, X10, X10 + SHUFPD $0x1, X11, X11 + SHUFPD $0x1, X12, X12 + SHUFPD $0x1, X13, X13 + + // X_i = { real(a) * imag(x[i]), imag(a) * imag(x[i]) } + MULPD X10, X2 + MULPD X11, X4 + MULPD X12, X6 + MULPD X13, X8 + + // X_(i+1) = { + // imag(result[i]): imag(a)*real(x[i]) + real(a)*imag(x[i]), + // real(result[i]): real(a)*real(x[i]) - imag(a)*imag(x[i]) + // } + ADDSUBPD_X2_X3 + ADDSUBPD_X4_X5 + ADDSUBPD_X6_X7 + ADDSUBPD_X8_X9 + + // psum += result[i] + ADDPD X3, SUM + ADDPD X5, P_SUM + ADDPD X7, SUM + ADDPD X9, P_SUM + + ADDQ $8, IDX // IDX += 8 + ADDQ $8, I_IDX // I_IDX += 8 + DECQ LEN + JNZ dot_loop // } while --LEN > 0 + ADDPD P_SUM, SUM // SUM += P_SUM + CMPQ TAIL, $0 // if TAIL == 0 { return } + JE dot_end + +dot_tail: // do { + MOVDDUP_XPTR_IDX_8__X3 // X_(i+1) = { real(x[i] , real(x[i]) } + MOVDDUP_XPTR_IIDX_8__X2 // X_i = { imag(x[i]) , imag(x[i]) } + MOVUPS (Y_PTR)(IDX*8), X10 // X_j = { imag(y[i]) , real(y[i]) } + MULPD X10, X3 // X_(i+1) = { imag(a) * real(x[i]), real(a) * real(x[i]) } + SHUFPD $0x1, X10, X10 // X_j = { real(y[i]) , imag(y[i]) } + MULPD X10, X2 // X_i = { real(a) * imag(x[i]), imag(a) * imag(x[i]) } + + // X_(i+1) = { + // imag(result[i]): imag(a)*real(x[i]) + real(a)*imag(x[i]), + // real(result[i]): real(a)*real(x[i]) - imag(a)*imag(x[i]) + // } + ADDSUBPD_X2_X3 + ADDPD X3, SUM // psum += result[i] + ADDQ $2, IDX // IDX += 2 + ADDQ $2, I_IDX // I_IDX += 2 + DECQ TAIL // --TAIL + JNZ dot_tail // } while TAIL > 0 + +dot_end: + MOVUPS SUM, sum+48(FP) + RET diff --git a/vendor/gonum.org/v1/gonum/internal/asm/c128/dscalinc_amd64.s b/vendor/gonum.org/v1/gonum/internal/asm/c128/dscalinc_amd64.s new file mode 100644 index 0000000..d8fd54d --- /dev/null +++ b/vendor/gonum.org/v1/gonum/internal/asm/c128/dscalinc_amd64.s @@ -0,0 +1,69 @@ +// Copyright ©2017 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// +build !noasm,!appengine,!safe + +#include "textflag.h" + +#define SRC SI +#define DST SI +#define LEN CX +#define TAIL BX +#define INC R9 +#define INC3 R10 +#define ALPHA X0 +#define ALPHA_2 X1 + +#define MOVDDUP_ALPHA LONG $0x44120FF2; WORD $0x0824 // MOVDDUP 8(SP), X0 + +// func DscalInc(alpha float64, x []complex128, n, inc uintptr) +TEXT ·DscalInc(SB), NOSPLIT, $0 + MOVQ x_base+8(FP), SRC // SRC = &x + MOVQ n+32(FP), LEN // LEN = n + CMPQ LEN, $0 // if LEN == 0 { return } + JE dscal_end + + MOVDDUP_ALPHA // ALPHA = alpha + MOVQ inc+40(FP), INC // INC = inc + SHLQ $4, INC // INC = INC * sizeof(complex128) + LEAQ (INC)(INC*2), INC3 // INC3 = 3 * INC + MOVUPS ALPHA, ALPHA_2 // Copy ALPHA and ALPHA_2 for pipelining + MOVQ LEN, TAIL // TAIL = LEN + SHRQ $2, LEN // LEN = floor( n / 4 ) + JZ dscal_tail // if LEN == 0 { goto dscal_tail } + +dscal_loop: // do { + MOVUPS (SRC), X2 // X_i = x[i] + MOVUPS (SRC)(INC*1), X3 + MOVUPS (SRC)(INC*2), X4 + MOVUPS (SRC)(INC3*1), X5 + + MULPD ALPHA, X2 // X_i *= ALPHA + MULPD ALPHA_2, X3 + MULPD ALPHA, X4 + MULPD ALPHA_2, X5 + + MOVUPS X2, (DST) // x[i] = X_i + MOVUPS X3, (DST)(INC*1) + MOVUPS X4, (DST)(INC*2) + MOVUPS X5, (DST)(INC3*1) + + LEAQ (SRC)(INC*4), SRC // SRC += INC*4 + DECQ LEN + JNZ dscal_loop // } while --LEN > 0 + +dscal_tail: + ANDQ $3, TAIL // TAIL = TAIL % 4 + JE dscal_end // if TAIL == 0 { return } + +dscal_tail_loop: // do { + MOVUPS (SRC), X2 // X_i = x[i] + MULPD ALPHA, X2 // X_i *= ALPHA + MOVUPS X2, (DST) // x[i] = X_i + ADDQ INC, SRC // SRC += INC + DECQ TAIL + JNZ dscal_tail_loop // } while --TAIL > 0 + +dscal_end: + RET diff --git a/vendor/gonum.org/v1/gonum/internal/asm/c128/dscalunitary_amd64.s b/vendor/gonum.org/v1/gonum/internal/asm/c128/dscalunitary_amd64.s new file mode 100644 index 0000000..6ed900a --- /dev/null +++ b/vendor/gonum.org/v1/gonum/internal/asm/c128/dscalunitary_amd64.s @@ -0,0 +1,66 @@ +// Copyright ©2017 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// +build !noasm,!appengine,!safe + +#include "textflag.h" + +#define SRC SI +#define DST SI +#define LEN CX +#define IDX AX +#define TAIL BX +#define ALPHA X0 +#define ALPHA_2 X1 + +#define MOVDDUP_ALPHA LONG $0x44120FF2; WORD $0x0824 // MOVDDUP 8(SP), X0 + +// func DscalUnitary(alpha float64, x []complex128) +TEXT ·DscalUnitary(SB), NOSPLIT, $0 + MOVQ x_base+8(FP), SRC // SRC = &x + MOVQ x_len+16(FP), LEN // LEN = len(x) + CMPQ LEN, $0 // if LEN == 0 { return } + JE dscal_end + + MOVDDUP_ALPHA // ALPHA = alpha + XORQ IDX, IDX // IDX = 0 + MOVUPS ALPHA, ALPHA_2 // Copy ALPHA to ALPHA_2 for pipelining + MOVQ LEN, TAIL // TAIL = LEN + SHRQ $2, LEN // LEN = floor( n / 4 ) + JZ dscal_tail // if LEN == 0 { goto dscal_tail } + +dscal_loop: // do { + MOVUPS (SRC)(IDX*8), X2 // X_i = x[i] + MOVUPS 16(SRC)(IDX*8), X3 + MOVUPS 32(SRC)(IDX*8), X4 + MOVUPS 48(SRC)(IDX*8), X5 + + MULPD ALPHA, X2 // X_i *= ALPHA + MULPD ALPHA_2, X3 + MULPD ALPHA, X4 + MULPD ALPHA_2, X5 + + MOVUPS X2, (DST)(IDX*8) // x[i] = X_i + MOVUPS X3, 16(DST)(IDX*8) + MOVUPS X4, 32(DST)(IDX*8) + MOVUPS X5, 48(DST)(IDX*8) + + ADDQ $8, IDX // IDX += 8 + DECQ LEN + JNZ dscal_loop // } while --LEN > 0 + +dscal_tail: + ANDQ $3, TAIL // TAIL = TAIL % 4 + JZ dscal_end // if TAIL == 0 { return } + +dscal_tail_loop: // do { + MOVUPS (SRC)(IDX*8), X2 // X_i = x[i] + MULPD ALPHA, X2 // X_i *= ALPHA + MOVUPS X2, (DST)(IDX*8) // x[i] = X_i + ADDQ $2, IDX // IDX += 2 + DECQ TAIL + JNZ dscal_tail_loop // } while --TAIL > 0 + +dscal_end: + RET diff --git a/vendor/gonum.org/v1/gonum/internal/asm/c128/scal.go b/vendor/gonum.org/v1/gonum/internal/asm/c128/scal.go new file mode 100644 index 0000000..47a80e5 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/internal/asm/c128/scal.go @@ -0,0 +1,31 @@ +// Copyright ©2016 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package c128 + +// ScalUnitaryTo is +// for i, v := range x { +// dst[i] = alpha * v +// } +func ScalUnitaryTo(dst []complex128, alpha complex128, x []complex128) { + for i, v := range x { + dst[i] = alpha * v + } +} + +// ScalIncTo is +// var idst, ix uintptr +// for i := 0; i < int(n); i++ { +// dst[idst] = alpha * x[ix] +// ix += incX +// idst += incDst +// } +func ScalIncTo(dst []complex128, incDst uintptr, alpha complex128, x []complex128, n, incX uintptr) { + var idst, ix uintptr + for i := 0; i < int(n); i++ { + dst[idst] = alpha * x[ix] + ix += incX + idst += incDst + } +} diff --git a/vendor/gonum.org/v1/gonum/internal/asm/c128/scalUnitary_amd64.s b/vendor/gonum.org/v1/gonum/internal/asm/c128/scalUnitary_amd64.s new file mode 100644 index 0000000..f08590e --- /dev/null +++ b/vendor/gonum.org/v1/gonum/internal/asm/c128/scalUnitary_amd64.s @@ -0,0 +1,116 @@ +// Copyright ©2017 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// +build !noasm,!appengine,!safe + +#include "textflag.h" + +#define SRC SI +#define DST SI +#define LEN CX +#define IDX AX +#define TAIL BX +#define ALPHA X0 +#define ALPHA_C X1 +#define ALPHA2 X10 +#define ALPHA_C2 X11 + +#define MOVDDUP_X2_X3 LONG $0xDA120FF2 // MOVDDUP X2, X3 +#define MOVDDUP_X4_X5 LONG $0xEC120FF2 // MOVDDUP X4, X5 +#define MOVDDUP_X6_X7 LONG $0xFE120FF2 // MOVDDUP X6, X7 +#define MOVDDUP_X8_X9 LONG $0x120F45F2; BYTE $0xC8 // MOVDDUP X8, X9 + +#define ADDSUBPD_X2_X3 LONG $0xDAD00F66 // ADDSUBPD X2, X3 +#define ADDSUBPD_X4_X5 LONG $0xECD00F66 // ADDSUBPD X4, X5 +#define ADDSUBPD_X6_X7 LONG $0xFED00F66 // ADDSUBPD X6, X7 +#define ADDSUBPD_X8_X9 LONG $0xD00F4566; BYTE $0xC8 // ADDSUBPD X8, X9 + +// func ScalUnitary(alpha complex128, x []complex128) +TEXT ·ScalUnitary(SB), NOSPLIT, $0 + MOVQ x_base+16(FP), SRC // SRC = &x + MOVQ x_len+24(FP), LEN // LEN = len(x) + CMPQ LEN, $0 // if LEN == 0 { return } + JE scal_end + + MOVUPS alpha+0(FP), ALPHA // ALPHA = { imag(alpha), real(alpha) } + MOVAPS ALPHA, ALPHA_C + SHUFPD $0x1, ALPHA_C, ALPHA_C // ALPHA_C = { real(alpha), imag(alpha) } + + XORQ IDX, IDX // IDX = 0 + MOVAPS ALPHA, ALPHA2 // Copy ALPHA and ALPHA_C for pipelining + MOVAPS ALPHA_C, ALPHA_C2 + MOVQ LEN, TAIL + SHRQ $2, LEN // LEN = floor( n / 4 ) + JZ scal_tail // if BX == 0 { goto scal_tail } + +scal_loop: // do { + MOVUPS (SRC)(IDX*8), X2 // X_i = { imag(x[i]), real(x[i]) } + MOVUPS 16(SRC)(IDX*8), X4 + MOVUPS 32(SRC)(IDX*8), X6 + MOVUPS 48(SRC)(IDX*8), X8 + + // X_(i+1) = { real(x[i], real(x[i]) } + MOVDDUP_X2_X3 + MOVDDUP_X4_X5 + MOVDDUP_X6_X7 + MOVDDUP_X8_X9 + + // X_i = { imag(x[i]), imag(x[i]) } + SHUFPD $0x3, X2, X2 + SHUFPD $0x3, X4, X4 + SHUFPD $0x3, X6, X6 + SHUFPD $0x3, X8, X8 + + // X_i = { real(ALPHA) * imag(x[i]), imag(ALPHA) * imag(x[i]) } + // X_(i+1) = { imag(ALPHA) * real(x[i]), real(ALPHA) * real(x[i]) } + MULPD ALPHA_C, X2 + MULPD ALPHA, X3 + MULPD ALPHA_C2, X4 + MULPD ALPHA2, X5 + MULPD ALPHA_C, X6 + MULPD ALPHA, X7 + MULPD ALPHA_C2, X8 + MULPD ALPHA2, X9 + + // X_(i+1) = { + // imag(result[i]): imag(ALPHA)*real(x[i]) + real(ALPHA)*imag(x[i]), + // real(result[i]): real(ALPHA)*real(x[i]) - imag(ALPHA)*imag(x[i]) + // } + ADDSUBPD_X2_X3 + ADDSUBPD_X4_X5 + ADDSUBPD_X6_X7 + ADDSUBPD_X8_X9 + + MOVUPS X3, (DST)(IDX*8) // x[i] = X_(i+1) + MOVUPS X5, 16(DST)(IDX*8) + MOVUPS X7, 32(DST)(IDX*8) + MOVUPS X9, 48(DST)(IDX*8) + ADDQ $8, IDX // IDX += 8 + DECQ LEN + JNZ scal_loop // } while --LEN > 0 + +scal_tail: + ANDQ $3, TAIL // TAIL = TAIL % 4 + JZ scal_end // if TAIL == 0 { return } + +scal_tail_loop: // do { + MOVUPS (SRC)(IDX*8), X2 // X_i = { imag(x[i]), real(x[i]) } + MOVDDUP_X2_X3 // X_(i+1) = { real(x[i], real(x[i]) } + SHUFPD $0x3, X2, X2 // X_i = { imag(x[i]), imag(x[i]) } + MULPD ALPHA_C, X2 // X_i = { real(ALPHA) * imag(x[i]), imag(ALPHA) * imag(x[i]) } + MULPD ALPHA, X3 // X_(i+1) = { imag(ALPHA) * real(x[i]), real(ALPHA) * real(x[i]) } + + // X_(i+1) = { + // imag(result[i]): imag(ALPHA)*real(x[i]) + real(ALPHA)*imag(x[i]), + // real(result[i]): real(ALPHA)*real(x[i]) - imag(ALPHA)*imag(x[i]) + // } + ADDSUBPD_X2_X3 + + MOVUPS X3, (DST)(IDX*8) // x[i] = X_(i+1) + ADDQ $2, IDX // IDX += 2 + DECQ TAIL + JNZ scal_tail_loop // } while --LEN > 0 + +scal_end: + RET diff --git a/vendor/gonum.org/v1/gonum/internal/asm/c128/scalinc_amd64.s b/vendor/gonum.org/v1/gonum/internal/asm/c128/scalinc_amd64.s new file mode 100644 index 0000000..5829ee5 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/internal/asm/c128/scalinc_amd64.s @@ -0,0 +1,121 @@ +// Copyright ©2016 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// +build !noasm,!appengine,!safe + +#include "textflag.h" + +#define SRC SI +#define DST SI +#define LEN CX +#define TAIL BX +#define INC R9 +#define INC3 R10 +#define ALPHA X0 +#define ALPHA_C X1 +#define ALPHA2 X10 +#define ALPHA_C2 X11 + +#define MOVDDUP_X2_X3 LONG $0xDA120FF2 // MOVDDUP X2, X3 +#define MOVDDUP_X4_X5 LONG $0xEC120FF2 // MOVDDUP X4, X5 +#define MOVDDUP_X6_X7 LONG $0xFE120FF2 // MOVDDUP X6, X7 +#define MOVDDUP_X8_X9 LONG $0x120F45F2; BYTE $0xC8 // MOVDDUP X8, X9 + +#define ADDSUBPD_X2_X3 LONG $0xDAD00F66 // ADDSUBPD X2, X3 +#define ADDSUBPD_X4_X5 LONG $0xECD00F66 // ADDSUBPD X4, X5 +#define ADDSUBPD_X6_X7 LONG $0xFED00F66 // ADDSUBPD X6, X7 +#define ADDSUBPD_X8_X9 LONG $0xD00F4566; BYTE $0xC8 // ADDSUBPD X8, X9 + +// func ScalInc(alpha complex128, x []complex128, n, inc uintptr) +TEXT ·ScalInc(SB), NOSPLIT, $0 + MOVQ x_base+16(FP), SRC // SRC = &x + MOVQ n+40(FP), LEN // LEN = len(x) + CMPQ LEN, $0 + JE scal_end // if LEN == 0 { return } + + MOVQ inc+48(FP), INC // INC = inc + SHLQ $4, INC // INC = INC * sizeof(complex128) + LEAQ (INC)(INC*2), INC3 // INC3 = 3 * INC + + MOVUPS alpha+0(FP), ALPHA // ALPHA = { imag(alpha), real(alpha) } + MOVAPS ALPHA, ALPHA_C + SHUFPD $0x1, ALPHA_C, ALPHA_C // ALPHA_C = { real(alpha), imag(alpha) } + + MOVAPS ALPHA, ALPHA2 // Copy ALPHA and ALPHA_C for pipelining + MOVAPS ALPHA_C, ALPHA_C2 + MOVQ LEN, TAIL + SHRQ $2, LEN // LEN = floor( n / 4 ) + JZ scal_tail // if BX == 0 { goto scal_tail } + +scal_loop: // do { + MOVUPS (SRC), X2 // X_i = { imag(x[i]), real(x[i]) } + MOVUPS (SRC)(INC*1), X4 + MOVUPS (SRC)(INC*2), X6 + MOVUPS (SRC)(INC3*1), X8 + + // X_(i+1) = { real(x[i], real(x[i]) } + MOVDDUP_X2_X3 + MOVDDUP_X4_X5 + MOVDDUP_X6_X7 + MOVDDUP_X8_X9 + + // X_i = { imag(x[i]), imag(x[i]) } + SHUFPD $0x3, X2, X2 + SHUFPD $0x3, X4, X4 + SHUFPD $0x3, X6, X6 + SHUFPD $0x3, X8, X8 + + // X_i = { real(ALPHA) * imag(x[i]), imag(ALPHA) * imag(x[i]) } + // X_(i+1) = { imag(ALPHA) * real(x[i]), real(ALPHA) * real(x[i]) } + MULPD ALPHA_C, X2 + MULPD ALPHA, X3 + MULPD ALPHA_C2, X4 + MULPD ALPHA2, X5 + MULPD ALPHA_C, X6 + MULPD ALPHA, X7 + MULPD ALPHA_C2, X8 + MULPD ALPHA2, X9 + + // X_(i+1) = { + // imag(result[i]): imag(ALPHA)*real(x[i]) + real(ALPHA)*imag(x[i]), + // real(result[i]): real(ALPHA)*real(x[i]) - imag(ALPHA)*imag(x[i]) + // } + ADDSUBPD_X2_X3 + ADDSUBPD_X4_X5 + ADDSUBPD_X6_X7 + ADDSUBPD_X8_X9 + + MOVUPS X3, (DST) // x[i] = X_(i+1) + MOVUPS X5, (DST)(INC*1) + MOVUPS X7, (DST)(INC*2) + MOVUPS X9, (DST)(INC3*1) + + LEAQ (SRC)(INC*4), SRC // SRC = &(SRC[inc*4]) + DECQ LEN + JNZ scal_loop // } while --BX > 0 + +scal_tail: + ANDQ $3, TAIL // TAIL = TAIL % 4 + JE scal_end // if TAIL == 0 { return } + +scal_tail_loop: // do { + MOVUPS (SRC), X2 // X_i = { imag(x[i]), real(x[i]) } + MOVDDUP_X2_X3 // X_(i+1) = { real(x[i], real(x[i]) } + SHUFPD $0x3, X2, X2 // X_i = { imag(x[i]), imag(x[i]) } + MULPD ALPHA_C, X2 // X_i = { real(ALPHA) * imag(x[i]), imag(ALPHA) * imag(x[i]) } + MULPD ALPHA, X3 // X_(i+1) = { imag(ALPHA) * real(x[i]), real(ALPHA) * real(x[i]) } + + // X_(i+1) = { + // imag(result[i]): imag(ALPHA)*real(x[i]) + real(ALPHA)*imag(x[i]), + // real(result[i]): real(ALPHA)*real(x[i]) - imag(ALPHA)*imag(x[i]) + // } + ADDSUBPD_X2_X3 + + MOVUPS X3, (DST) // x[i] = X_i + ADDQ INC, SRC // SRC = &(SRC[incX]) + DECQ TAIL + JNZ scal_tail_loop // } while --TAIL > 0 + +scal_end: + RET diff --git a/vendor/gonum.org/v1/gonum/internal/asm/c128/stubs_amd64.go b/vendor/gonum.org/v1/gonum/internal/asm/c128/stubs_amd64.go new file mode 100644 index 0000000..ad6b23c --- /dev/null +++ b/vendor/gonum.org/v1/gonum/internal/asm/c128/stubs_amd64.go @@ -0,0 +1,96 @@ +// Copyright ©2016 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// +build !noasm,!appengine,!safe + +package c128 + +// AxpyUnitary is +// for i, v := range x { +// y[i] += alpha * v +// } +func AxpyUnitary(alpha complex128, x, y []complex128) + +// AxpyUnitaryTo is +// for i, v := range x { +// dst[i] = alpha*v + y[i] +// } +func AxpyUnitaryTo(dst []complex128, alpha complex128, x, y []complex128) + +// AxpyInc is +// for i := 0; i < int(n); i++ { +// y[iy] += alpha * x[ix] +// ix += incX +// iy += incY +// } +func AxpyInc(alpha complex128, x, y []complex128, n, incX, incY, ix, iy uintptr) + +// AxpyIncTo is +// for i := 0; i < int(n); i++ { +// dst[idst] = alpha*x[ix] + y[iy] +// ix += incX +// iy += incY +// idst += incDst +// } +func AxpyIncTo(dst []complex128, incDst, idst uintptr, alpha complex128, x, y []complex128, n, incX, incY, ix, iy uintptr) + +// DscalUnitary is +// for i, v := range x { +// x[i] = complex(real(v)*alpha, imag(v)*alpha) +// } +func DscalUnitary(alpha float64, x []complex128) + +// DscalInc is +// var ix uintptr +// for i := 0; i < int(n); i++ { +// x[ix] = complex(real(x[ix])*alpha, imag(x[ix])*alpha) +// ix += inc +// } +func DscalInc(alpha float64, x []complex128, n, inc uintptr) + +// ScalInc is +// var ix uintptr +// for i := 0; i < int(n); i++ { +// x[ix] *= alpha +// ix += incX +// } +func ScalInc(alpha complex128, x []complex128, n, inc uintptr) + +// ScalUnitary is +// for i := range x { +// x[i] *= alpha +// } +func ScalUnitary(alpha complex128, x []complex128) + +// DotcUnitary is +// for i, v := range x { +// sum += y[i] * cmplx.Conj(v) +// } +// return sum +func DotcUnitary(x, y []complex128) (sum complex128) + +// DotcInc is +// for i := 0; i < int(n); i++ { +// sum += y[iy] * cmplx.Conj(x[ix]) +// ix += incX +// iy += incY +// } +// return sum +func DotcInc(x, y []complex128, n, incX, incY, ix, iy uintptr) (sum complex128) + +// DotuUnitary is +// for i, v := range x { +// sum += y[i] * v +// } +// return sum +func DotuUnitary(x, y []complex128) (sum complex128) + +// DotuInc is +// for i := 0; i < int(n); i++ { +// sum += y[iy] * x[ix] +// ix += incX +// iy += incY +// } +// return sum +func DotuInc(x, y []complex128, n, incX, incY, ix, iy uintptr) (sum complex128) diff --git a/vendor/gonum.org/v1/gonum/internal/asm/c128/stubs_noasm.go b/vendor/gonum.org/v1/gonum/internal/asm/c128/stubs_noasm.go new file mode 100644 index 0000000..6313e57 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/internal/asm/c128/stubs_noasm.go @@ -0,0 +1,163 @@ +// Copyright ©2016 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// +build !amd64 noasm appengine safe + +package c128 + +import "math/cmplx" + +// AxpyUnitary is +// for i, v := range x { +// y[i] += alpha * v +// } +func AxpyUnitary(alpha complex128, x, y []complex128) { + for i, v := range x { + y[i] += alpha * v + } +} + +// AxpyUnitaryTo is +// for i, v := range x { +// dst[i] = alpha*v + y[i] +// } +func AxpyUnitaryTo(dst []complex128, alpha complex128, x, y []complex128) { + for i, v := range x { + dst[i] = alpha*v + y[i] + } +} + +// AxpyInc is +// for i := 0; i < int(n); i++ { +// y[iy] += alpha * x[ix] +// ix += incX +// iy += incY +// } +func AxpyInc(alpha complex128, x, y []complex128, n, incX, incY, ix, iy uintptr) { + for i := 0; i < int(n); i++ { + y[iy] += alpha * x[ix] + ix += incX + iy += incY + } +} + +// AxpyIncTo is +// for i := 0; i < int(n); i++ { +// dst[idst] = alpha*x[ix] + y[iy] +// ix += incX +// iy += incY +// idst += incDst +// } +func AxpyIncTo(dst []complex128, incDst, idst uintptr, alpha complex128, x, y []complex128, n, incX, incY, ix, iy uintptr) { + for i := 0; i < int(n); i++ { + dst[idst] = alpha*x[ix] + y[iy] + ix += incX + iy += incY + idst += incDst + } +} + +// DscalUnitary is +// for i, v := range x { +// x[i] = complex(real(v)*alpha, imag(v)*alpha) +// } +func DscalUnitary(alpha float64, x []complex128) { + for i, v := range x { + x[i] = complex(real(v)*alpha, imag(v)*alpha) + } +} + +// DscalInc is +// var ix uintptr +// for i := 0; i < int(n); i++ { +// x[ix] = complex(real(x[ix])*alpha, imag(x[ix])*alpha) +// ix += inc +// } +func DscalInc(alpha float64, x []complex128, n, inc uintptr) { + var ix uintptr + for i := 0; i < int(n); i++ { + x[ix] = complex(real(x[ix])*alpha, imag(x[ix])*alpha) + ix += inc + } +} + +// ScalInc is +// var ix uintptr +// for i := 0; i < int(n); i++ { +// x[ix] *= alpha +// ix += incX +// } +func ScalInc(alpha complex128, x []complex128, n, inc uintptr) { + var ix uintptr + for i := 0; i < int(n); i++ { + x[ix] *= alpha + ix += inc + } +} + +// ScalUnitary is +// for i := range x { +// x[i] *= alpha +// } +func ScalUnitary(alpha complex128, x []complex128) { + for i := range x { + x[i] *= alpha + } +} + +// DotcUnitary is +// for i, v := range x { +// sum += y[i] * cmplx.Conj(v) +// } +// return sum +func DotcUnitary(x, y []complex128) (sum complex128) { + for i, v := range x { + sum += y[i] * cmplx.Conj(v) + } + return sum +} + +// DotcInc is +// for i := 0; i < int(n); i++ { +// sum += y[iy] * cmplx.Conj(x[ix]) +// ix += incX +// iy += incY +// } +// return sum +func DotcInc(x, y []complex128, n, incX, incY, ix, iy uintptr) (sum complex128) { + for i := 0; i < int(n); i++ { + sum += y[iy] * cmplx.Conj(x[ix]) + ix += incX + iy += incY + } + return sum +} + +// DotuUnitary is +// for i, v := range x { +// sum += y[i] * v +// } +// return sum +func DotuUnitary(x, y []complex128) (sum complex128) { + for i, v := range x { + sum += y[i] * v + } + return sum +} + +// DotuInc is +// for i := 0; i < int(n); i++ { +// sum += y[iy] * x[ix] +// ix += incX +// iy += incY +// } +// return sum +func DotuInc(x, y []complex128, n, incX, incY, ix, iy uintptr) (sum complex128) { + for i := 0; i < int(n); i++ { + sum += y[iy] * x[ix] + ix += incX + iy += incY + } + return sum +} diff --git a/vendor/gonum.org/v1/gonum/internal/asm/c64/axpyinc_amd64.s b/vendor/gonum.org/v1/gonum/internal/asm/c64/axpyinc_amd64.s new file mode 100644 index 0000000..5f10511 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/internal/asm/c64/axpyinc_amd64.s @@ -0,0 +1,151 @@ +// Copyright ©2016 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// +build !noasm,!appengine,!safe + +#include "textflag.h" + +// MOVSHDUP X3, X2 +#define MOVSHDUP_X3_X2 BYTE $0xF3; BYTE $0x0F; BYTE $0x16; BYTE $0xD3 +// MOVSLDUP X3, X3 +#define MOVSLDUP_X3_X3 BYTE $0xF3; BYTE $0x0F; BYTE $0x12; BYTE $0xDB +// ADDSUBPS X2, X3 +#define ADDSUBPS_X2_X3 BYTE $0xF2; BYTE $0x0F; BYTE $0xD0; BYTE $0xDA + +// MOVSHDUP X5, X4 +#define MOVSHDUP_X5_X4 BYTE $0xF3; BYTE $0x0F; BYTE $0x16; BYTE $0xE5 +// MOVSLDUP X5, X5 +#define MOVSLDUP_X5_X5 BYTE $0xF3; BYTE $0x0F; BYTE $0x12; BYTE $0xED +// ADDSUBPS X4, X5 +#define ADDSUBPS_X4_X5 BYTE $0xF2; BYTE $0x0F; BYTE $0xD0; BYTE $0xEC + +// MOVSHDUP X7, X6 +#define MOVSHDUP_X7_X6 BYTE $0xF3; BYTE $0x0F; BYTE $0x16; BYTE $0xF7 +// MOVSLDUP X7, X7 +#define MOVSLDUP_X7_X7 BYTE $0xF3; BYTE $0x0F; BYTE $0x12; BYTE $0xFF +// ADDSUBPS X6, X7 +#define ADDSUBPS_X6_X7 BYTE $0xF2; BYTE $0x0F; BYTE $0xD0; BYTE $0xFE + +// MOVSHDUP X9, X8 +#define MOVSHDUP_X9_X8 BYTE $0xF3; BYTE $0x45; BYTE $0x0F; BYTE $0x16; BYTE $0xC1 +// MOVSLDUP X9, X9 +#define MOVSLDUP_X9_X9 BYTE $0xF3; BYTE $0x45; BYTE $0x0F; BYTE $0x12; BYTE $0xC9 +// ADDSUBPS X8, X9 +#define ADDSUBPS_X8_X9 BYTE $0xF2; BYTE $0x45; BYTE $0x0F; BYTE $0xD0; BYTE $0xC8 + +// func AxpyInc(alpha complex64, x, y []complex64, n, incX, incY, ix, iy uintptr) +TEXT ·AxpyInc(SB), NOSPLIT, $0 + MOVQ x_base+8(FP), SI // SI = &x + MOVQ y_base+32(FP), DI // DI = &y + MOVQ n+56(FP), CX // CX = n + CMPQ CX, $0 // if n==0 { return } + JE axpyi_end + MOVQ ix+80(FP), R8 // R8 = ix + MOVQ iy+88(FP), R9 // R9 = iy + LEAQ (SI)(R8*8), SI // SI = &(x[ix]) + LEAQ (DI)(R9*8), DI // DI = &(y[iy]) + MOVQ DI, DX // DX = DI // Read/Write pointers + MOVQ incX+64(FP), R8 // R8 = incX + SHLQ $3, R8 // R8 *= sizeof(complex64) + MOVQ incY+72(FP), R9 // R9 = incY + SHLQ $3, R9 // R9 *= sizeof(complex64) + MOVSD alpha+0(FP), X0 // X0 = { 0, 0, imag(a), real(a) } + MOVAPS X0, X1 + SHUFPS $0x11, X1, X1 // X1 = { 0, 0, real(a), imag(a) } + MOVAPS X0, X10 // Copy X0 and X1 for pipelining + MOVAPS X1, X11 + MOVQ CX, BX + ANDQ $3, CX // CX = n % 4 + SHRQ $2, BX // BX = floor( n / 4 ) + JZ axpyi_tail // if BX == 0 { goto axpyi_tail } + +axpyi_loop: // do { + MOVSD (SI), X3 // X_i = { imag(x[i+1]), real(x[i+1]) } + MOVSD (SI)(R8*1), X5 + LEAQ (SI)(R8*2), SI // SI = &(SI[incX*2]) + MOVSD (SI), X7 + MOVSD (SI)(R8*1), X9 + + // X_(i-1) = { imag(x[i]), imag(x[i]) } + MOVSHDUP_X3_X2 + MOVSHDUP_X5_X4 + MOVSHDUP_X7_X6 + MOVSHDUP_X9_X8 + + // X_i = { real(x[i]), real(x[i]) } + MOVSLDUP_X3_X3 + MOVSLDUP_X5_X5 + MOVSLDUP_X7_X7 + MOVSLDUP_X9_X9 + + // X_(i-1) = { real(a) * imag(x[i]), imag(a) * imag(x[i]) } + // X_i = { imag(a) * real(x[i]), real(a) * real(x[i]) } + MULPS X1, X2 + MULPS X0, X3 + MULPS X11, X4 + MULPS X10, X5 + MULPS X1, X6 + MULPS X0, X7 + MULPS X11, X8 + MULPS X10, X9 + + // X_i = { + // imag(result[i]): imag(a)*real(x[i]) + real(a)*imag(x[i]), + // real(result[i]): real(a)*real(x[i]) - imag(a)*imag(x[i]), + // } + ADDSUBPS_X2_X3 + ADDSUBPS_X4_X5 + ADDSUBPS_X6_X7 + ADDSUBPS_X8_X9 + + // X_i = { imag(result[i]) + imag(y[i]), real(result[i]) + real(y[i]) } + MOVSD (DX), X2 + MOVSD (DX)(R9*1), X4 + LEAQ (DX)(R9*2), DX // DX = &(DX[incY*2]) + MOVSD (DX), X6 + MOVSD (DX)(R9*1), X8 + ADDPS X2, X3 + ADDPS X4, X5 + ADDPS X6, X7 + ADDPS X8, X9 + + MOVSD X3, (DI) // y[i] = X_i + MOVSD X5, (DI)(R9*1) + LEAQ (DI)(R9*2), DI // DI = &(DI[incDst]) + MOVSD X7, (DI) + MOVSD X9, (DI)(R9*1) + LEAQ (SI)(R8*2), SI // SI = &(SI[incX*2]) + LEAQ (DX)(R9*2), DX // DX = &(DX[incY*2]) + LEAQ (DI)(R9*2), DI // DI = &(DI[incDst]) + DECQ BX + JNZ axpyi_loop // } while --BX > 0 + CMPQ CX, $0 // if CX == 0 { return } + JE axpyi_end + +axpyi_tail: // do { + MOVSD (SI), X3 // X_i = { imag(x[i+1]), real(x[i+1]) } + MOVSHDUP_X3_X2 // X_(i-1) = { real(x[i]), real(x[i]) } + MOVSLDUP_X3_X3 // X_i = { imag(x[i]), imag(x[i]) } + + // X_i = { imag(a) * real(x[i]), real(a) * real(x[i]) } + // X_(i-1) = { real(a) * imag(x[i]), imag(a) * imag(x[i]) } + MULPS X1, X2 + MULPS X0, X3 + + // X_i = { + // imag(result[i]): imag(a)*real(x[i]) + real(a)*imag(x[i]), + // real(result[i]): real(a)*real(x[i]) - imag(a)*imag(x[i]), + // } + ADDSUBPS_X2_X3 // (ai*x1r+ar*x1i, ar*x1r-ai*x1i) + + // X_i = { imag(result[i]) + imag(y[i]), real(result[i]) + real(y[i]) } + MOVSD (DI), X4 + ADDPS X4, X3 + MOVSD X3, (DI) // y[i] = X_i + ADDQ R8, SI // SI += incX + ADDQ R9, DI // DI += incY + LOOP axpyi_tail // } while --CX > 0 + +axpyi_end: + RET diff --git a/vendor/gonum.org/v1/gonum/internal/asm/c64/axpyincto_amd64.s b/vendor/gonum.org/v1/gonum/internal/asm/c64/axpyincto_amd64.s new file mode 100644 index 0000000..5b0e284 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/internal/asm/c64/axpyincto_amd64.s @@ -0,0 +1,156 @@ +// Copyright ©2016 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// +build !noasm,!appengine,!safe + +#include "textflag.h" + +// MOVSHDUP X3, X2 +#define MOVSHDUP_X3_X2 BYTE $0xF3; BYTE $0x0F; BYTE $0x16; BYTE $0xD3 +// MOVSLDUP X3, X3 +#define MOVSLDUP_X3_X3 BYTE $0xF3; BYTE $0x0F; BYTE $0x12; BYTE $0xDB +// ADDSUBPS X2, X3 +#define ADDSUBPS_X2_X3 BYTE $0xF2; BYTE $0x0F; BYTE $0xD0; BYTE $0xDA + +// MOVSHDUP X5, X4 +#define MOVSHDUP_X5_X4 BYTE $0xF3; BYTE $0x0F; BYTE $0x16; BYTE $0xE5 +// MOVSLDUP X5, X5 +#define MOVSLDUP_X5_X5 BYTE $0xF3; BYTE $0x0F; BYTE $0x12; BYTE $0xED +// ADDSUBPS X4, X5 +#define ADDSUBPS_X4_X5 BYTE $0xF2; BYTE $0x0F; BYTE $0xD0; BYTE $0xEC + +// MOVSHDUP X7, X6 +#define MOVSHDUP_X7_X6 BYTE $0xF3; BYTE $0x0F; BYTE $0x16; BYTE $0xF7 +// MOVSLDUP X7, X7 +#define MOVSLDUP_X7_X7 BYTE $0xF3; BYTE $0x0F; BYTE $0x12; BYTE $0xFF +// ADDSUBPS X6, X7 +#define ADDSUBPS_X6_X7 BYTE $0xF2; BYTE $0x0F; BYTE $0xD0; BYTE $0xFE + +// MOVSHDUP X9, X8 +#define MOVSHDUP_X9_X8 BYTE $0xF3; BYTE $0x45; BYTE $0x0F; BYTE $0x16; BYTE $0xC1 +// MOVSLDUP X9, X9 +#define MOVSLDUP_X9_X9 BYTE $0xF3; BYTE $0x45; BYTE $0x0F; BYTE $0x12; BYTE $0xC9 +// ADDSUBPS X8, X9 +#define ADDSUBPS_X8_X9 BYTE $0xF2; BYTE $0x45; BYTE $0x0F; BYTE $0xD0; BYTE $0xC8 + +// func AxpyIncTo(dst []complex64, incDst, idst uintptr, alpha complex64, x, y []complex64, n, incX, incY, ix, iy uintptr) +TEXT ·AxpyIncTo(SB), NOSPLIT, $0 + MOVQ dst_base+0(FP), DI // DI = &dst + MOVQ x_base+48(FP), SI // SI = &x + MOVQ y_base+72(FP), DX // DX = &y + MOVQ n+96(FP), CX // CX = n + CMPQ CX, $0 // if n==0 { return } + JE axpyi_end + MOVQ ix+120(FP), R8 // Load the first index + MOVQ iy+128(FP), R9 + MOVQ idst+32(FP), R10 + LEAQ (SI)(R8*8), SI // SI = &(x[ix]) + LEAQ (DX)(R9*8), DX // DX = &(y[iy]) + LEAQ (DI)(R10*8), DI // DI = &(dst[idst]) + MOVQ incX+104(FP), R8 // Incrementors*8 for easy iteration (ADDQ) + SHLQ $3, R8 + MOVQ incY+112(FP), R9 + SHLQ $3, R9 + MOVQ incDst+24(FP), R10 + SHLQ $3, R10 + MOVSD alpha+40(FP), X0 // X0 = { 0, 0, imag(a), real(a) } + MOVAPS X0, X1 + SHUFPS $0x11, X1, X1 // X1 = { 0, 0, real(a), imag(a) } + MOVAPS X0, X10 // Copy X0 and X1 for pipelining + MOVAPS X1, X11 + MOVQ CX, BX + ANDQ $3, CX // CX = n % 4 + SHRQ $2, BX // BX = floor( n / 4 ) + JZ axpyi_tail // if BX == 0 { goto axpyi_tail } + +axpyi_loop: // do { + MOVSD (SI), X3 // X_i = { imag(x[i]), real(x[i]) } + MOVSD (SI)(R8*1), X5 + LEAQ (SI)(R8*2), SI // SI = &(SI[incX*2]) + MOVSD (SI), X7 + MOVSD (SI)(R8*1), X9 + + // X_(i-1) = { imag(x[i]), imag(x[i]) } + MOVSHDUP_X3_X2 + MOVSHDUP_X5_X4 + MOVSHDUP_X7_X6 + MOVSHDUP_X9_X8 + + // X_i = { real(x[i]), real(x[i]) } + MOVSLDUP_X3_X3 + MOVSLDUP_X5_X5 + MOVSLDUP_X7_X7 + MOVSLDUP_X9_X9 + + // X_(i-1) = { real(a) * imag(x[i]), imag(a) * imag(x[i]) } + // X_i = { imag(a) * real(x[i]), real(a) * real(x[i]) } + MULPS X1, X2 + MULPS X0, X3 + MULPS X11, X4 + MULPS X10, X5 + MULPS X1, X6 + MULPS X0, X7 + MULPS X11, X8 + MULPS X10, X9 + + // X_i = { + // imag(result[i]): imag(a)*real(x[i]) + real(a)*imag(x[i]), + // real(result[i]): real(a)*real(x[i]) - imag(a)*imag(x[i]), + // } + ADDSUBPS_X2_X3 + ADDSUBPS_X4_X5 + ADDSUBPS_X6_X7 + ADDSUBPS_X8_X9 + + // X_i = { imag(result[i]) + imag(y[i]), real(result[i]) + real(y[i]) } + MOVSD (DX), X2 + MOVSD (DX)(R9*1), X4 + LEAQ (DX)(R9*2), DX // DX = &(DX[incY*2]) + MOVSD (DX), X6 + MOVSD (DX)(R9*1), X8 + ADDPS X2, X3 + ADDPS X4, X5 + ADDPS X6, X7 + ADDPS X8, X9 + + MOVSD X3, (DI) // y[i] = X_i + MOVSD X5, (DI)(R10*1) + LEAQ (DI)(R10*2), DI // DI = &(DI[incDst]) + MOVSD X7, (DI) + MOVSD X9, (DI)(R10*1) + LEAQ (SI)(R8*2), SI // SI = &(SI[incX*2]) + LEAQ (DX)(R9*2), DX // DX = &(DX[incY*2]) + LEAQ (DI)(R10*2), DI // DI = &(DI[incDst]) + DECQ BX + JNZ axpyi_loop // } while --BX > 0 + CMPQ CX, $0 // if CX == 0 { return } + JE axpyi_end + +axpyi_tail: + MOVSD (SI), X3 // X_i = { imag(x[i]), real(x[i]) } + MOVSHDUP_X3_X2 // X_(i-1) = { imag(x[i]), imag(x[i]) } + MOVSLDUP_X3_X3 // X_i = { real(x[i]), real(x[i]) } + + // X_i = { imag(a) * real(x[i]), real(a) * real(x[i]) } + // X_(i-1) = { real(a) * imag(x[i]), imag(a) * imag(x[i]) } + MULPS X1, X2 + MULPS X0, X3 + + // X_i = { + // imag(result[i]): imag(a)*real(x[i]) + real(a)*imag(x[i]), + // real(result[i]): real(a)*real(x[i]) - imag(a)*imag(x[i]), + // } + ADDSUBPS_X2_X3 + + // X_i = { imag(result[i]) + imag(y[i]), real(result[i]) + real(y[i]) } + MOVSD (DX), X4 + ADDPS X4, X3 + MOVSD X3, (DI) // y[i] = X_i + ADDQ R8, SI // SI += incX + ADDQ R9, DX // DX += incY + ADDQ R10, DI // DI += incDst + LOOP axpyi_tail // } while --CX > 0 + +axpyi_end: + RET diff --git a/vendor/gonum.org/v1/gonum/internal/asm/c64/axpyunitary_amd64.s b/vendor/gonum.org/v1/gonum/internal/asm/c64/axpyunitary_amd64.s new file mode 100644 index 0000000..c38cb3c --- /dev/null +++ b/vendor/gonum.org/v1/gonum/internal/asm/c64/axpyunitary_amd64.s @@ -0,0 +1,160 @@ +// Copyright ©2016 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// +build !noasm,!appengine,!safe + +#include "textflag.h" + +// MOVSHDUP X3, X2 +#define MOVSHDUP_X3_X2 BYTE $0xF3; BYTE $0x0F; BYTE $0x16; BYTE $0xD3 +// MOVSLDUP X3, X3 +#define MOVSLDUP_X3_X3 BYTE $0xF3; BYTE $0x0F; BYTE $0x12; BYTE $0xDB +// ADDSUBPS X2, X3 +#define ADDSUBPS_X2_X3 BYTE $0xF2; BYTE $0x0F; BYTE $0xD0; BYTE $0xDA + +// MOVSHDUP X5, X4 +#define MOVSHDUP_X5_X4 BYTE $0xF3; BYTE $0x0F; BYTE $0x16; BYTE $0xE5 +// MOVSLDUP X5, X5 +#define MOVSLDUP_X5_X5 BYTE $0xF3; BYTE $0x0F; BYTE $0x12; BYTE $0xED +// ADDSUBPS X4, X5 +#define ADDSUBPS_X4_X5 BYTE $0xF2; BYTE $0x0F; BYTE $0xD0; BYTE $0xEC + +// MOVSHDUP X7, X6 +#define MOVSHDUP_X7_X6 BYTE $0xF3; BYTE $0x0F; BYTE $0x16; BYTE $0xF7 +// MOVSLDUP X7, X7 +#define MOVSLDUP_X7_X7 BYTE $0xF3; BYTE $0x0F; BYTE $0x12; BYTE $0xFF +// ADDSUBPS X6, X7 +#define ADDSUBPS_X6_X7 BYTE $0xF2; BYTE $0x0F; BYTE $0xD0; BYTE $0xFE + +// MOVSHDUP X9, X8 +#define MOVSHDUP_X9_X8 BYTE $0xF3; BYTE $0x45; BYTE $0x0F; BYTE $0x16; BYTE $0xC1 +// MOVSLDUP X9, X9 +#define MOVSLDUP_X9_X9 BYTE $0xF3; BYTE $0x45; BYTE $0x0F; BYTE $0x12; BYTE $0xC9 +// ADDSUBPS X8, X9 +#define ADDSUBPS_X8_X9 BYTE $0xF2; BYTE $0x45; BYTE $0x0F; BYTE $0xD0; BYTE $0xC8 + +// func AxpyUnitary(alpha complex64, x, y []complex64) +TEXT ·AxpyUnitary(SB), NOSPLIT, $0 + MOVQ x_base+8(FP), SI // SI = &x + MOVQ y_base+32(FP), DI // DI = &y + MOVQ x_len+16(FP), CX // CX = min( len(x), len(y) ) + CMPQ y_len+40(FP), CX + CMOVQLE y_len+40(FP), CX + CMPQ CX, $0 // if CX == 0 { return } + JE caxy_end + PXOR X0, X0 // Clear work registers and cache-align loop + PXOR X1, X1 + MOVSD alpha+0(FP), X0 // X0 = { 0, 0, imag(a), real(a) } + SHUFPD $0, X0, X0 // X0 = { imag(a), real(a), imag(a), real(a) } + MOVAPS X0, X1 + SHUFPS $0x11, X1, X1 // X1 = { real(a), imag(a), real(a), imag(a) } + XORQ AX, AX // i = 0 + MOVQ DI, BX // Align on 16-byte boundary for ADDPS + ANDQ $15, BX // BX = &y & 15 + JZ caxy_no_trim // if BX == 0 { goto caxy_no_trim } + + // Trim first value in unaligned buffer + XORPS X2, X2 // Clear work registers and cache-align loop + XORPS X3, X3 + XORPS X4, X4 + MOVSD (SI)(AX*8), X3 // X3 = { imag(x[i]), real(x[i]) } + MOVSHDUP_X3_X2 // X2 = { imag(x[i]), imag(x[i]) } + MOVSLDUP_X3_X3 // X3 = { real(x[i]), real(x[i]) } + MULPS X1, X2 // X2 = { real(a) * imag(x[i]), imag(a) * imag(x[i]) } + MULPS X0, X3 // X3 = { imag(a) * real(x[i]), real(a) * real(x[i]) } + + // X3 = { imag(a)*real(x[i]) + real(a)*imag(x[i]), real(a)*real(x[i]) - imag(a)*imag(x[i]) } + ADDSUBPS_X2_X3 + MOVSD (DI)(AX*8), X4 // X3 += y[i] + ADDPS X4, X3 + MOVSD X3, (DI)(AX*8) // y[i] = X3 + INCQ AX // i++ + DECQ CX // --CX + JZ caxy_end // if CX == 0 { return } + +caxy_no_trim: + MOVAPS X0, X10 // Copy X0 and X1 for pipelineing + MOVAPS X1, X11 + MOVQ CX, BX + ANDQ $7, CX // CX = n % 8 + SHRQ $3, BX // BX = floor( n / 8 ) + JZ caxy_tail // if BX == 0 { goto caxy_tail } + +caxy_loop: // do { + // X_i = { imag(x[i]), real(x[i]), imag(x[i+1]), real(x[i+1]) } + MOVUPS (SI)(AX*8), X3 + MOVUPS 16(SI)(AX*8), X5 + MOVUPS 32(SI)(AX*8), X7 + MOVUPS 48(SI)(AX*8), X9 + + // X_(i-1) = { imag(x[i]), imag(x[i]), imag(x[i]+1), imag(x[i]+1) } + MOVSHDUP_X3_X2 + MOVSHDUP_X5_X4 + MOVSHDUP_X7_X6 + MOVSHDUP_X9_X8 + + // X_i = { real(x[i]), real(x[i]), real(x[i+1]), real(x[i+1]) } + MOVSLDUP_X3_X3 + MOVSLDUP_X5_X5 + MOVSLDUP_X7_X7 + MOVSLDUP_X9_X9 + + // X_i = { imag(a) * real(x[i]), real(a) * real(x[i]), + // imag(a) * real(x[i+1]), real(a) * real(x[i+1]) } + // X_(i-1) = { real(a) * imag(x[i]), imag(a) * imag(x[i]), + // real(a) * imag(x[i+1]), imag(a) * imag(x[i+1]) } + MULPS X1, X2 + MULPS X0, X3 + MULPS X11, X4 + MULPS X10, X5 + MULPS X1, X6 + MULPS X0, X7 + MULPS X11, X8 + MULPS X10, X9 + + // X_i = { + // imag(result[i]): imag(a)*real(x[i]) + real(a)*imag(x[i]), + // real(result[i]): real(a)*real(x[i]) - imag(a)*imag(x[i]), + // imag(result[i+1]): imag(a)*real(x[i+1]) + real(a)*imag(x[i+1]), + // real(result[i+1]): real(a)*real(x[i+1]) - imag(a)*imag(x[i+1]), + // } + ADDSUBPS_X2_X3 + ADDSUBPS_X4_X5 + ADDSUBPS_X6_X7 + ADDSUBPS_X8_X9 + + // X_i = { imag(result[i]) + imag(y[i]), real(result[i]) + real(y[i]), + // imag(result[i+1]) + imag(y[i+1]), real(result[i+1]) + real(y[i+1]) } + ADDPS (DI)(AX*8), X3 + ADDPS 16(DI)(AX*8), X5 + ADDPS 32(DI)(AX*8), X7 + ADDPS 48(DI)(AX*8), X9 + MOVUPS X3, (DI)(AX*8) // y[i:i+1] = X_i + MOVUPS X5, 16(DI)(AX*8) + MOVUPS X7, 32(DI)(AX*8) + MOVUPS X9, 48(DI)(AX*8) + ADDQ $8, AX // i += 8 + DECQ BX // --BX + JNZ caxy_loop // } while BX > 0 + CMPQ CX, $0 // if CX == 0 { return } + JE caxy_end + +caxy_tail: // do { + MOVSD (SI)(AX*8), X3 // X3 = { imag(x[i]), real(x[i]) } + MOVSHDUP_X3_X2 // X2 = { imag(x[i]), imag(x[i]) } + MOVSLDUP_X3_X3 // X3 = { real(x[i]), real(x[i]) } + MULPS X1, X2 // X2 = { real(a) * imag(x[i]), imag(a) * imag(x[i]) } + MULPS X0, X3 // X3 = { imag(a) * real(x[i]), real(a) * real(x[i]) } + + // X3 = { imag(a)*real(x[i]) + real(a)*imag(x[i]), + // real(a)*real(x[i]) - imag(a)*imag(x[i]) } + ADDSUBPS_X2_X3 + MOVSD (DI)(AX*8), X4 // X3 += y[i] + ADDPS X4, X3 + MOVSD X3, (DI)(AX*8) // y[i] = X3 + INCQ AX // ++i + LOOP caxy_tail // } while --CX > 0 + +caxy_end: + RET diff --git a/vendor/gonum.org/v1/gonum/internal/asm/c64/axpyunitaryto_amd64.s b/vendor/gonum.org/v1/gonum/internal/asm/c64/axpyunitaryto_amd64.s new file mode 100644 index 0000000..fee4bb9 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/internal/asm/c64/axpyunitaryto_amd64.s @@ -0,0 +1,157 @@ +// Copyright ©2016 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// +build !noasm,!appengine,!safe + +#include "textflag.h" + +// MOVSHDUP X3, X2 +#define MOVSHDUP_X3_X2 BYTE $0xF3; BYTE $0x0F; BYTE $0x16; BYTE $0xD3 +// MOVSLDUP X3, X3 +#define MOVSLDUP_X3_X3 BYTE $0xF3; BYTE $0x0F; BYTE $0x12; BYTE $0xDB +// ADDSUBPS X2, X3 +#define ADDSUBPS_X2_X3 BYTE $0xF2; BYTE $0x0F; BYTE $0xD0; BYTE $0xDA + +// MOVSHDUP X5, X4 +#define MOVSHDUP_X5_X4 BYTE $0xF3; BYTE $0x0F; BYTE $0x16; BYTE $0xE5 +// MOVSLDUP X5, X5 +#define MOVSLDUP_X5_X5 BYTE $0xF3; BYTE $0x0F; BYTE $0x12; BYTE $0xED +// ADDSUBPS X4, X5 +#define ADDSUBPS_X4_X5 BYTE $0xF2; BYTE $0x0F; BYTE $0xD0; BYTE $0xEC + +// MOVSHDUP X7, X6 +#define MOVSHDUP_X7_X6 BYTE $0xF3; BYTE $0x0F; BYTE $0x16; BYTE $0xF7 +// MOVSLDUP X7, X7 +#define MOVSLDUP_X7_X7 BYTE $0xF3; BYTE $0x0F; BYTE $0x12; BYTE $0xFF +// ADDSUBPS X6, X7 +#define ADDSUBPS_X6_X7 BYTE $0xF2; BYTE $0x0F; BYTE $0xD0; BYTE $0xFE + +// MOVSHDUP X9, X8 +#define MOVSHDUP_X9_X8 BYTE $0xF3; BYTE $0x45; BYTE $0x0F; BYTE $0x16; BYTE $0xC1 +// MOVSLDUP X9, X9 +#define MOVSLDUP_X9_X9 BYTE $0xF3; BYTE $0x45; BYTE $0x0F; BYTE $0x12; BYTE $0xC9 +// ADDSUBPS X8, X9 +#define ADDSUBPS_X8_X9 BYTE $0xF2; BYTE $0x45; BYTE $0x0F; BYTE $0xD0; BYTE $0xC8 + +// func AxpyUnitaryTo(dst []complex64, alpha complex64, x, y []complex64) +TEXT ·AxpyUnitaryTo(SB), NOSPLIT, $0 + MOVQ dst_base+0(FP), DI // DI = &dst + MOVQ x_base+32(FP), SI // SI = &x + MOVQ y_base+56(FP), DX // DX = &y + MOVQ x_len+40(FP), CX + CMPQ y_len+64(FP), CX // CX = min( len(x), len(y), len(dst) ) + CMOVQLE y_len+64(FP), CX + CMPQ dst_len+8(FP), CX + CMOVQLE dst_len+8(FP), CX + CMPQ CX, $0 // if CX == 0 { return } + JE caxy_end + MOVSD alpha+24(FP), X0 // X0 = { 0, 0, imag(a), real(a) } + SHUFPD $0, X0, X0 // X0 = { imag(a), real(a), imag(a), real(a) } + MOVAPS X0, X1 + SHUFPS $0x11, X1, X1 // X1 = { real(a), imag(a), real(a), imag(a) } + XORQ AX, AX // i = 0 + MOVQ DX, BX // Align on 16-byte boundary for ADDPS + ANDQ $15, BX // BX = &y & 15 + JZ caxy_no_trim // if BX == 0 { goto caxy_no_trim } + + MOVSD (SI)(AX*8), X3 // X3 = { imag(x[i]), real(x[i]) } + MOVSHDUP_X3_X2 // X2 = { imag(x[i]), imag(x[i]) } + MOVSLDUP_X3_X3 // X3 = { real(x[i]), real(x[i]) } + MULPS X1, X2 // X2 = { real(a) * imag(x[i]), imag(a) * imag(x[i]) } + MULPS X0, X3 // X3 = { imag(a) * real(x[i]), real(a) * real(x[i]) } + + // X3 = { imag(a)*real(x[i]) + real(a)*imag(x[i]), real(a)*real(x[i]) - imag(a)*imag(x[i]) } + ADDSUBPS_X2_X3 + MOVSD (DX)(AX*8), X4 // X3 += y[i] + ADDPS X4, X3 + MOVSD X3, (DI)(AX*8) // dst[i] = X3 + INCQ AX // i++ + DECQ CX // --CX + JZ caxy_tail // if BX == 0 { goto caxy_tail } + +caxy_no_trim: + MOVAPS X0, X10 // Copy X0 and X1 for pipelineing + MOVAPS X1, X11 + MOVQ CX, BX + ANDQ $7, CX // CX = n % 8 + SHRQ $3, BX // BX = floor( n / 8 ) + JZ caxy_tail // if BX == 0 { goto caxy_tail } + +caxy_loop: + // X_i = { imag(x[i]), real(x[i]), imag(x[i+1]), real(x[i+1]) } + MOVUPS (SI)(AX*8), X3 + MOVUPS 16(SI)(AX*8), X5 + MOVUPS 32(SI)(AX*8), X7 + MOVUPS 48(SI)(AX*8), X9 + + // X_(i-1) = { imag(x[i]), imag(x[i]), imag(x[i]+1), imag(x[i]+1) } + MOVSHDUP_X3_X2 + MOVSHDUP_X5_X4 + MOVSHDUP_X7_X6 + MOVSHDUP_X9_X8 + + // X_i = { real(x[i]), real(x[i]), real(x[i+1]), real(x[i+1]) } + MOVSLDUP_X3_X3 + MOVSLDUP_X5_X5 + MOVSLDUP_X7_X7 + MOVSLDUP_X9_X9 + + // X_i = { imag(a) * real(x[i]), real(a) * real(x[i]), + // imag(a) * real(x[i+1]), real(a) * real(x[i+1]) } + // X_(i-1) = { real(a) * imag(x[i]), imag(a) * imag(x[i]), + // real(a) * imag(x[i+1]), imag(a) * imag(x[i+1]) } + MULPS X1, X2 + MULPS X0, X3 + MULPS X11, X4 + MULPS X10, X5 + MULPS X1, X6 + MULPS X0, X7 + MULPS X11, X8 + MULPS X10, X9 + + // X_i = { + // imag(result[i]): imag(a)*real(x[i]) + real(a)*imag(x[i]), + // real(result[i]): real(a)*real(x[i]) - imag(a)*imag(x[i]), + // imag(result[i+1]): imag(a)*real(x[i+1]) + real(a)*imag(x[i+1]), + // real(result[i+1]): real(a)*real(x[i+1]) - imag(a)*imag(x[i+1]), + // } + ADDSUBPS_X2_X3 + ADDSUBPS_X4_X5 + ADDSUBPS_X6_X7 + ADDSUBPS_X8_X9 + + // X_i = { imag(result[i]) + imag(y[i]), real(result[i]) + real(y[i]), + // imag(result[i+1]) + imag(y[i+1]), real(result[i+1]) + real(y[i+1]) } + ADDPS (DX)(AX*8), X3 + ADDPS 16(DX)(AX*8), X5 + ADDPS 32(DX)(AX*8), X7 + ADDPS 48(DX)(AX*8), X9 + MOVUPS X3, (DI)(AX*8) // y[i:i+1] = X_i + MOVUPS X5, 16(DI)(AX*8) + MOVUPS X7, 32(DI)(AX*8) + MOVUPS X9, 48(DI)(AX*8) + ADDQ $8, AX // i += 8 + DECQ BX // --BX + JNZ caxy_loop // } while BX > 0 + CMPQ CX, $0 // if CX == 0 { return } + JE caxy_end + +caxy_tail: // do { + MOVSD (SI)(AX*8), X3 // X3 = { imag(x[i]), real(x[i]) } + MOVSHDUP_X3_X2 // X2 = { imag(x[i]), imag(x[i]) } + MOVSLDUP_X3_X3 // X3 = { real(x[i]), real(x[i]) } + MULPS X1, X2 // X2 = { real(a) * imag(x[i]), imag(a) * imag(x[i]) } + MULPS X0, X3 // X3 = { imag(a) * real(x[i]), real(a) * real(x[i]) } + + // X3 = { imag(a)*real(x[i]) + real(a)*imag(x[i]), + // real(a)*real(x[i]) - imag(a)*imag(x[i]) } + ADDSUBPS_X2_X3 + MOVSD (DX)(AX*8), X4 // X3 += y[i] + ADDPS X4, X3 + MOVSD X3, (DI)(AX*8) // y[i] = X3 + INCQ AX // ++i + LOOP caxy_tail // } while --CX > 0 + +caxy_end: + RET diff --git a/vendor/gonum.org/v1/gonum/internal/asm/c64/conj.go b/vendor/gonum.org/v1/gonum/internal/asm/c64/conj.go new file mode 100644 index 0000000..910e1e5 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/internal/asm/c64/conj.go @@ -0,0 +1,7 @@ +// Copyright ©2015 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package c64 + +func conj(c complex64) complex64 { return complex(real(c), -imag(c)) } diff --git a/vendor/gonum.org/v1/gonum/internal/asm/c64/doc.go b/vendor/gonum.org/v1/gonum/internal/asm/c64/doc.go new file mode 100644 index 0000000..35f1b2a --- /dev/null +++ b/vendor/gonum.org/v1/gonum/internal/asm/c64/doc.go @@ -0,0 +1,6 @@ +// Copyright ©2017 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Package c64 provides complex64 vector primitives. +package c64 // import "gonum.org/v1/gonum/internal/asm/c64" diff --git a/vendor/gonum.org/v1/gonum/internal/asm/c64/dotcinc_amd64.s b/vendor/gonum.org/v1/gonum/internal/asm/c64/dotcinc_amd64.s new file mode 100644 index 0000000..2161643 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/internal/asm/c64/dotcinc_amd64.s @@ -0,0 +1,160 @@ +// Copyright ©2016 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// +build !noasm,!appengine,!safe + +#include "textflag.h" + +#define MOVSHDUP_X3_X2 LONG $0xD3160FF3 // MOVSHDUP X3, X2 +#define MOVSHDUP_X5_X4 LONG $0xE5160FF3 // MOVSHDUP X5, X4 +#define MOVSHDUP_X7_X6 LONG $0xF7160FF3 // MOVSHDUP X7, X6 +#define MOVSHDUP_X9_X8 LONG $0x160F45F3; BYTE $0xC1 // MOVSHDUP X9, X8 + +#define MOVSLDUP_X3_X3 LONG $0xDB120FF3 // MOVSLDUP X3, X3 +#define MOVSLDUP_X5_X5 LONG $0xED120FF3 // MOVSLDUP X5, X5 +#define MOVSLDUP_X7_X7 LONG $0xFF120FF3 // MOVSLDUP X7, X7 +#define MOVSLDUP_X9_X9 LONG $0x120F45F3; BYTE $0xC9 // MOVSLDUP X9, X9 + +#define ADDSUBPS_X2_X3 LONG $0xDAD00FF2 // ADDSUBPS X2, X3 +#define ADDSUBPS_X4_X5 LONG $0xECD00FF2 // ADDSUBPS X4, X5 +#define ADDSUBPS_X6_X7 LONG $0xFED00FF2 // ADDSUBPS X6, X7 +#define ADDSUBPS_X8_X9 LONG $0xD00F45F2; BYTE $0xC8 // ADDSUBPS X8, X9 + +#define X_PTR SI +#define Y_PTR DI +#define LEN CX +#define TAIL BX +#define SUM X0 +#define P_SUM X1 +#define INC_X R8 +#define INCx3_X R9 +#define INC_Y R10 +#define INCx3_Y R11 +#define NEG1 X15 +#define P_NEG1 X14 + +// func DotcInc(x, y []complex64, n, incX, incY, ix, iy uintptr) (sum complex64) +TEXT ·DotcInc(SB), NOSPLIT, $0 + MOVQ x_base+0(FP), X_PTR // X_PTR = &x + MOVQ y_base+24(FP), Y_PTR // Y_PTR = &y + PXOR SUM, SUM // SUM = 0 + PXOR P_SUM, P_SUM // P_SUM = 0 + MOVQ n+48(FP), LEN // LEN = n + CMPQ LEN, $0 // if LEN == 0 { return } + JE dotc_end + MOVQ ix+72(FP), INC_X + MOVQ iy+80(FP), INC_Y + LEAQ (X_PTR)(INC_X*8), X_PTR // X_PTR = &(X_PTR[ix]) + LEAQ (Y_PTR)(INC_Y*8), Y_PTR // Y_PTR = &(Y_PTR[iy]) + MOVQ incX+56(FP), INC_X // INC_X = incX * sizeof(complex64) + SHLQ $3, INC_X + MOVQ incY+64(FP), INC_Y // INC_Y = incY * sizeof(complex64) + SHLQ $3, INC_Y + MOVSS $(-1.0), NEG1 + SHUFPS $0, NEG1, NEG1 // { -1, -1, -1, -1 } + + MOVQ LEN, TAIL + ANDQ $3, TAIL // TAIL = LEN % 4 + SHRQ $2, LEN // LEN = floor( LEN / 4 ) + JZ dotc_tail // if LEN == 0 { goto dotc_tail } + + MOVUPS NEG1, P_NEG1 // Copy NEG1 for pipelining + LEAQ (INC_X)(INC_X*2), INCx3_X // INCx3_X = INC_X * 3 + LEAQ (INC_Y)(INC_Y*2), INCx3_Y // INCx3_Y = INC_Y * 3 + +dotc_loop: // do { + MOVSD (X_PTR), X3 // X_i = { imag(x[i]), real(x[i]) } + MOVSD (X_PTR)(INC_X*1), X5 + MOVSD (X_PTR)(INC_X*2), X7 + MOVSD (X_PTR)(INCx3_X*1), X9 + + // X_(i-1) = { imag(x[i]), imag(x[i]) } + MOVSHDUP_X3_X2 + MOVSHDUP_X5_X4 + MOVSHDUP_X7_X6 + MOVSHDUP_X9_X8 + + // X_i = { real(x[i]), real(x[i]) } + MOVSLDUP_X3_X3 + MOVSLDUP_X5_X5 + MOVSLDUP_X7_X7 + MOVSLDUP_X9_X9 + + // X_(i-1) = { -imag(x[i]), -imag(x[i]) } + MULPS NEG1, X2 + MULPS P_NEG1, X4 + MULPS NEG1, X6 + MULPS P_NEG1, X8 + + // X_j = { imag(y[i]), real(y[i]) } + MOVSD (Y_PTR), X10 + MOVSD (Y_PTR)(INC_Y*1), X11 + MOVSD (Y_PTR)(INC_Y*2), X12 + MOVSD (Y_PTR)(INCx3_Y*1), X13 + + // X_i = { imag(y[i]) * real(x[i]), real(y[i]) * real(x[i]) } + MULPS X10, X3 + MULPS X11, X5 + MULPS X12, X7 + MULPS X13, X9 + + // X_j = { real(y[i]), imag(y[i]) } + SHUFPS $0xB1, X10, X10 + SHUFPS $0xB1, X11, X11 + SHUFPS $0xB1, X12, X12 + SHUFPS $0xB1, X13, X13 + + // X_(i-1) = { real(y[i]) * imag(x[i]), imag(y[i]) * imag(x[i]) } + MULPS X10, X2 + MULPS X11, X4 + MULPS X12, X6 + MULPS X13, X8 + + // X_i = { + // imag(result[i]): imag(y[i]) * real(x[i]) + real(y[i]) * imag(x[i]), + // real(result[i]): real(y[i]) * real(x[i]) - imag(y[i]) * imag(x[i]) } + ADDSUBPS_X2_X3 + ADDSUBPS_X4_X5 + ADDSUBPS_X6_X7 + ADDSUBPS_X8_X9 + + // SUM += X_i + ADDPS X3, SUM + ADDPS X5, P_SUM + ADDPS X7, SUM + ADDPS X9, P_SUM + + LEAQ (X_PTR)(INC_X*4), X_PTR // X_PTR = &(X_PTR[INC_X*4]) + LEAQ (Y_PTR)(INC_Y*4), Y_PTR // Y_PTR = &(Y_PTR[INC_Y*4]) + + DECQ LEN + JNZ dotc_loop // } while --LEN > 0 + + ADDPS P_SUM, SUM // SUM = { P_SUM + SUM } + CMPQ TAIL, $0 // if TAIL == 0 { return } + JE dotc_end + +dotc_tail: // do { + MOVSD (X_PTR), X3 // X_i = { imag(x[i]), real(x[i]) } + MOVSHDUP_X3_X2 // X_(i-1) = { imag(x[i]), imag(x[i]) } + MOVSLDUP_X3_X3 // X_i = { real(x[i]), real(x[i]) } + MULPS NEG1, X2 // X_(i-1) = { -imag(x[i]), imag(x[i]) } + MOVUPS (Y_PTR), X10 // X_j = { imag(y[i]), real(y[i]) } + MULPS X10, X3 // X_i = { imag(y[i]) * real(x[i]), real(y[i]) * real(x[i]) } + SHUFPS $0x1, X10, X10 // X_j = { real(y[i]), imag(y[i]) } + MULPS X10, X2 // X_(i-1) = { real(y[i]) * imag(x[i]), imag(y[i]) * imag(x[i]) } + + // X_i = { + // imag(result[i]): imag(y[i])*real(x[i]) + real(y[i])*imag(x[i]), + // real(result[i]): real(y[i])*real(x[i]) - imag(y[i])*imag(x[i]) } + ADDSUBPS_X2_X3 + ADDPS X3, SUM // SUM += X_i + ADDQ INC_X, X_PTR // X_PTR += INC_X + ADDQ INC_Y, Y_PTR // Y_PTR += INC_Y + DECQ TAIL + JNZ dotc_tail // } while --TAIL > 0 + +dotc_end: + MOVSD SUM, sum+88(FP) // return SUM + RET diff --git a/vendor/gonum.org/v1/gonum/internal/asm/c64/dotcunitary_amd64.s b/vendor/gonum.org/v1/gonum/internal/asm/c64/dotcunitary_amd64.s new file mode 100644 index 0000000..4efc52b --- /dev/null +++ b/vendor/gonum.org/v1/gonum/internal/asm/c64/dotcunitary_amd64.s @@ -0,0 +1,208 @@ +// Copyright ©2017 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// +build !noasm,!appengine,!safe + +#include "textflag.h" + +#define MOVSLDUP_XPTR_IDX_8__X3 LONG $0x1C120FF3; BYTE $0xC6 // MOVSLDUP (SI)(AX*8), X3 +#define MOVSLDUP_16_XPTR_IDX_8__X5 LONG $0x6C120FF3; WORD $0x10C6 // MOVSLDUP 16(SI)(AX*8), X5 +#define MOVSLDUP_32_XPTR_IDX_8__X7 LONG $0x7C120FF3; WORD $0x20C6 // MOVSLDUP 32(SI)(AX*8), X7 +#define MOVSLDUP_48_XPTR_IDX_8__X9 LONG $0x120F44F3; WORD $0xC64C; BYTE $0x30 // MOVSLDUP 48(SI)(AX*8), X9 + +#define MOVSHDUP_XPTR_IDX_8__X2 LONG $0x14160FF3; BYTE $0xC6 // MOVSHDUP (SI)(AX*8), X2 +#define MOVSHDUP_16_XPTR_IDX_8__X4 LONG $0x64160FF3; WORD $0x10C6 // MOVSHDUP 16(SI)(AX*8), X4 +#define MOVSHDUP_32_XPTR_IDX_8__X6 LONG $0x74160FF3; WORD $0x20C6 // MOVSHDUP 32(SI)(AX*8), X6 +#define MOVSHDUP_48_XPTR_IDX_8__X8 LONG $0x160F44F3; WORD $0xC644; BYTE $0x30 // MOVSHDUP 48(SI)(AX*8), X8 + +#define MOVSHDUP_X3_X2 LONG $0xD3160FF3 // MOVSHDUP X3, X2 +#define MOVSLDUP_X3_X3 LONG $0xDB120FF3 // MOVSLDUP X3, X3 + +#define ADDSUBPS_X2_X3 LONG $0xDAD00FF2 // ADDSUBPS X2, X3 +#define ADDSUBPS_X4_X5 LONG $0xECD00FF2 // ADDSUBPS X4, X5 +#define ADDSUBPS_X6_X7 LONG $0xFED00FF2 // ADDSUBPS X6, X7 +#define ADDSUBPS_X8_X9 LONG $0xD00F45F2; BYTE $0xC8 // ADDSUBPS X8, X9 + +#define X_PTR SI +#define Y_PTR DI +#define LEN CX +#define TAIL BX +#define SUM X0 +#define P_SUM X1 +#define IDX AX +#define I_IDX DX +#define NEG1 X15 +#define P_NEG1 X14 + +// func DotcUnitary(x, y []complex64) (sum complex64) +TEXT ·DotcUnitary(SB), NOSPLIT, $0 + MOVQ x_base+0(FP), X_PTR // X_PTR = &x + MOVQ y_base+24(FP), Y_PTR // Y_PTR = &y + PXOR SUM, SUM // SUM = 0 + PXOR P_SUM, P_SUM // P_SUM = 0 + MOVQ x_len+8(FP), LEN // LEN = min( len(x), len(y) ) + CMPQ y_len+32(FP), LEN + CMOVQLE y_len+32(FP), LEN + CMPQ LEN, $0 // if LEN == 0 { return } + JE dotc_end + XORQ IDX, IDX // i = 0 + MOVSS $(-1.0), NEG1 + SHUFPS $0, NEG1, NEG1 // { -1, -1, -1, -1 } + + MOVQ X_PTR, DX + ANDQ $15, DX // DX = &x & 15 + JZ dotc_aligned // if DX == 0 { goto dotc_aligned } + + MOVSD (X_PTR)(IDX*8), X3 // X_i = { imag(x[i]), real(x[i]) } + MOVSHDUP_X3_X2 // X_(i-1) = { imag(x[i]), imag(x[i]) } + MOVSLDUP_X3_X3 // X_i = { real(x[i]), real(x[i]) } + MOVSD (Y_PTR)(IDX*8), X10 // X_j = { imag(y[i]), real(y[i]) } + MULPS NEG1, X2 // X_(i-1) = { -imag(x[i]), imag(x[i]) } + MULPS X10, X3 // X_i = { imag(y[i]) * real(x[i]), real(y[i]) * real(x[i]) } + SHUFPS $0x1, X10, X10 // X_j = { real(y[i]), imag(y[i]) } + MULPS X10, X2 // X_(i-1) = { real(y[i]) * imag(x[i]), imag(y[i]) * imag(x[i]) } + + // X_i = { + // imag(result[i]): imag(y[i])*real(x[i]) + real(y[i])*imag(x[i]), + // real(result[i]): real(y[i])*real(x[i]) - imag(y[i])*imag(x[i]) } + ADDSUBPS_X2_X3 + + MOVAPS X3, SUM // SUM = X_i + INCQ IDX // IDX++ + DECQ LEN // LEN-- + JZ dotc_ret // if LEN == 0 { goto dotc_ret } + +dotc_aligned: + MOVQ LEN, TAIL + ANDQ $7, TAIL // TAIL = LEN % 8 + SHRQ $3, LEN // LEN = floor( LEN / 8 ) + JZ dotc_tail // if LEN == 0 { return } + MOVUPS NEG1, P_NEG1 // Copy NEG1 for pipelining + +dotc_loop: // do { + MOVSLDUP_XPTR_IDX_8__X3 // X_i = { real(x[i]), real(x[i]), real(x[i+1]), real(x[i+1]) } + MOVSLDUP_16_XPTR_IDX_8__X5 + MOVSLDUP_32_XPTR_IDX_8__X7 + MOVSLDUP_48_XPTR_IDX_8__X9 + + MOVSHDUP_XPTR_IDX_8__X2 // X_(i-1) = { imag(x[i]), imag(x[i]), imag(x[i+1]), imag(x[i+1]) } + MOVSHDUP_16_XPTR_IDX_8__X4 + MOVSHDUP_32_XPTR_IDX_8__X6 + MOVSHDUP_48_XPTR_IDX_8__X8 + + // X_j = { imag(y[i]), real(y[i]), imag(y[i+1]), real(y[i+1]) } + MOVUPS (Y_PTR)(IDX*8), X10 + MOVUPS 16(Y_PTR)(IDX*8), X11 + MOVUPS 32(Y_PTR)(IDX*8), X12 + MOVUPS 48(Y_PTR)(IDX*8), X13 + + // X_(i-1) = { -imag(x[i]), -imag(x[i]), -imag(x[i]+1), -imag(x[i]+1) } + MULPS NEG1, X2 + MULPS P_NEG1, X4 + MULPS NEG1, X6 + MULPS P_NEG1, X8 + + // X_i = { imag(y[i]) * real(x[i]), real(y[i]) * real(x[i]), + // imag(y[i+1]) * real(x[i+1]), real(y[i+1]) * real(x[i+1]) } + MULPS X10, X3 + MULPS X11, X5 + MULPS X12, X7 + MULPS X13, X9 + + // X_j = { real(y[i]), imag(y[i]), real(y[i+1]), imag(y[i+1]) } + SHUFPS $0xB1, X10, X10 + SHUFPS $0xB1, X11, X11 + SHUFPS $0xB1, X12, X12 + SHUFPS $0xB1, X13, X13 + + // X_(i-1) = { real(y[i]) * imag(x[i]), imag(y[i]) * imag(x[i]), + // real(y[i+1]) * imag(x[i+1]), imag(y[i+1]) * imag(x[i+1]) } + MULPS X10, X2 + MULPS X11, X4 + MULPS X12, X6 + MULPS X13, X8 + + // X_i = { + // imag(result[i]): imag(y[i]) * real(x[i]) + real(y[i]) * imag(x[i]), + // real(result[i]): real(y[i]) * real(x[i]) - imag(y[i]) * imag(x[i]), + // imag(result[i+1]): imag(y[i+1]) * real(x[i+1]) + real(y[i+1]) * imag(x[i+1]), + // real(result[i+1]): real(y[i+1]) * real(x[i+1]) - imag(y[i+1]) * imag(x[i+1]), + // } + ADDSUBPS_X2_X3 + ADDSUBPS_X4_X5 + ADDSUBPS_X6_X7 + ADDSUBPS_X8_X9 + + // SUM += X_i + ADDPS X3, SUM + ADDPS X5, P_SUM + ADDPS X7, SUM + ADDPS X9, P_SUM + + ADDQ $8, IDX // IDX += 8 + DECQ LEN + JNZ dotc_loop // } while --LEN > 0 + + ADDPS SUM, P_SUM // P_SUM = { P_SUM[1] + SUM[1], P_SUM[0] + SUM[0] } + XORPS SUM, SUM // SUM = 0 + + CMPQ TAIL, $0 // if TAIL == 0 { return } + JE dotc_end + +dotc_tail: + MOVQ TAIL, LEN + SHRQ $1, LEN // LEN = floor( LEN / 2 ) + JZ dotc_tail_one // if LEN == 0 { goto dotc_tail_one } + +dotc_tail_two: // do { + MOVSLDUP_XPTR_IDX_8__X3 // X_i = { real(x[i]), real(x[i]), real(x[i+1]), real(x[i+1]) } + MOVSHDUP_XPTR_IDX_8__X2 // X_(i-1) = { imag(x[i]), imag(x[i]), imag(x[i]+1), imag(x[i]+1) } + MOVUPS (Y_PTR)(IDX*8), X10 // X_j = { imag(y[i]), real(y[i]) } + MULPS NEG1, X2 // X_(i-1) = { -imag(x[i]), imag(x[i]) } + MULPS X10, X3 // X_i = { imag(y[i]) * real(x[i]), real(y[i]) * real(x[i]) } + SHUFPS $0xB1, X10, X10 // X_j = { real(y[i]), imag(y[i]) } + MULPS X10, X2 // X_(i-1) = { real(y[i]) * imag(x[i]), imag(y[i]) * imag(x[i]) } + + // X_i = { + // imag(result[i]): imag(y[i])*real(x[i]) + real(y[i])*imag(x[i]), + // real(result[i]): real(y[i])*real(x[i]) - imag(y[i])*imag(x[i]) } + ADDSUBPS_X2_X3 + + ADDPS X3, SUM // SUM += X_i + + ADDQ $2, IDX // IDX += 2 + DECQ LEN + JNZ dotc_tail_two // } while --LEN > 0 + + ADDPS SUM, P_SUM // P_SUM = { P_SUM[1] + SUM[1], P_SUM[0] + SUM[0] } + XORPS SUM, SUM // SUM = 0 + + ANDQ $1, TAIL + JZ dotc_end + +dotc_tail_one: + MOVSD (X_PTR)(IDX*8), X3 // X_i = { imag(x[i]), real(x[i]) } + MOVSHDUP_X3_X2 // X_(i-1) = { imag(x[i]), imag(x[i]) } + MOVSLDUP_X3_X3 // X_i = { real(x[i]), real(x[i]) } + MOVSD (Y_PTR)(IDX*8), X10 // X_j = { imag(y[i]), real(y[i]) } + MULPS NEG1, X2 // X_(i-1) = { -imag(x[i]), imag(x[i]) } + MULPS X10, X3 // X_i = { imag(y[i]) * real(x[i]), real(y[i]) * real(x[i]) } + SHUFPS $0x1, X10, X10 // X_j = { real(y[i]), imag(y[i]) } + MULPS X10, X2 // X_(i-1) = { real(y[i]) * imag(x[i]), imag(y[i]) * imag(x[i]) } + + // X_i = { + // imag(result[i]): imag(y[i])*real(x[i]) + real(y[i])*imag(x[i]), + // real(result[i]): real(y[i])*real(x[i]) - imag(y[i])*imag(x[i]) } + ADDSUBPS_X2_X3 + + ADDPS X3, SUM // SUM += X_i + +dotc_end: + ADDPS P_SUM, SUM // SUM = { P_SUM[0] + SUM[0] } + MOVHLPS P_SUM, P_SUM // P_SUM = { P_SUM[1], P_SUM[1] } + ADDPS P_SUM, SUM // SUM = { P_SUM[1] + SUM[0] } + +dotc_ret: + MOVSD SUM, sum+48(FP) // return SUM + RET diff --git a/vendor/gonum.org/v1/gonum/internal/asm/c64/dotuinc_amd64.s b/vendor/gonum.org/v1/gonum/internal/asm/c64/dotuinc_amd64.s new file mode 100644 index 0000000..6b26c5a --- /dev/null +++ b/vendor/gonum.org/v1/gonum/internal/asm/c64/dotuinc_amd64.s @@ -0,0 +1,148 @@ +// Copyright ©2016 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// +build !noasm,!appengine,!safe + +#include "textflag.h" + +#define MOVSHDUP_X3_X2 LONG $0xD3160FF3 // MOVSHDUP X3, X2 +#define MOVSHDUP_X5_X4 LONG $0xE5160FF3 // MOVSHDUP X5, X4 +#define MOVSHDUP_X7_X6 LONG $0xF7160FF3 // MOVSHDUP X7, X6 +#define MOVSHDUP_X9_X8 LONG $0x160F45F3; BYTE $0xC1 // MOVSHDUP X9, X8 + +#define MOVSLDUP_X3_X3 LONG $0xDB120FF3 // MOVSLDUP X3, X3 +#define MOVSLDUP_X5_X5 LONG $0xED120FF3 // MOVSLDUP X5, X5 +#define MOVSLDUP_X7_X7 LONG $0xFF120FF3 // MOVSLDUP X7, X7 +#define MOVSLDUP_X9_X9 LONG $0x120F45F3; BYTE $0xC9 // MOVSLDUP X9, X9 + +#define ADDSUBPS_X2_X3 LONG $0xDAD00FF2 // ADDSUBPS X2, X3 +#define ADDSUBPS_X4_X5 LONG $0xECD00FF2 // ADDSUBPS X4, X5 +#define ADDSUBPS_X6_X7 LONG $0xFED00FF2 // ADDSUBPS X6, X7 +#define ADDSUBPS_X8_X9 LONG $0xD00F45F2; BYTE $0xC8 // ADDSUBPS X8, X9 + +#define X_PTR SI +#define Y_PTR DI +#define LEN CX +#define TAIL BX +#define SUM X0 +#define P_SUM X1 +#define INC_X R8 +#define INCx3_X R9 +#define INC_Y R10 +#define INCx3_Y R11 + +// func DotuInc(x, y []complex64, n, incX, incY, ix, iy uintptr) (sum complex64) +TEXT ·DotuInc(SB), NOSPLIT, $0 + MOVQ x_base+0(FP), X_PTR // X_PTR = &x + MOVQ y_base+24(FP), Y_PTR // Y_PTR = &y + PXOR SUM, SUM // SUM = 0 + PXOR P_SUM, P_SUM // P_SUM = 0 + MOVQ n+48(FP), LEN // LEN = n + CMPQ LEN, $0 // if LEN == 0 { return } + JE dotu_end + MOVQ ix+72(FP), INC_X + MOVQ iy+80(FP), INC_Y + LEAQ (X_PTR)(INC_X*8), X_PTR // X_PTR = &(X_PTR[ix]) + LEAQ (Y_PTR)(INC_Y*8), Y_PTR // Y_PTR = &(Y_PTR[iy]) + MOVQ incX+56(FP), INC_X // INC_X = incX * sizeof(complex64) + SHLQ $3, INC_X + MOVQ incY+64(FP), INC_Y // INC_Y = incY * sizeof(complex64) + SHLQ $3, INC_Y + + MOVQ LEN, TAIL + ANDQ $3, TAIL // TAIL = LEN % 4 + SHRQ $2, LEN // LEN = floor( LEN / 4 ) + JZ dotu_tail // if TAIL == 0 { goto dotu_tail } + + LEAQ (INC_X)(INC_X*2), INCx3_X // INCx3_X = INC_X * 3 + LEAQ (INC_Y)(INC_Y*2), INCx3_Y // INCx3_Y = INC_Y * 3 + +dotu_loop: // do { + MOVSD (X_PTR), X3 // X_i = { imag(x[i]), real(x[i]) } + MOVSD (X_PTR)(INC_X*1), X5 + MOVSD (X_PTR)(INC_X*2), X7 + MOVSD (X_PTR)(INCx3_X*1), X9 + + // X_(i-1) = { imag(x[i]), imag(x[i]) } + MOVSHDUP_X3_X2 + MOVSHDUP_X5_X4 + MOVSHDUP_X7_X6 + MOVSHDUP_X9_X8 + + // X_i = { real(x[i]), real(x[i]) } + MOVSLDUP_X3_X3 + MOVSLDUP_X5_X5 + MOVSLDUP_X7_X7 + MOVSLDUP_X9_X9 + + // X_j = { imag(y[i]), real(y[i]) } + MOVSD (Y_PTR), X10 + MOVSD (Y_PTR)(INC_Y*1), X11 + MOVSD (Y_PTR)(INC_Y*2), X12 + MOVSD (Y_PTR)(INCx3_Y*1), X13 + + // X_i = { imag(y[i]) * real(x[i]), real(y[i]) * real(x[i]) } + MULPS X10, X3 + MULPS X11, X5 + MULPS X12, X7 + MULPS X13, X9 + + // X_j = { real(y[i]), imag(y[i]) } + SHUFPS $0xB1, X10, X10 + SHUFPS $0xB1, X11, X11 + SHUFPS $0xB1, X12, X12 + SHUFPS $0xB1, X13, X13 + + // X_(i-1) = { real(y[i]) * imag(x[i]), imag(y[i]) * imag(x[i]) } + MULPS X10, X2 + MULPS X11, X4 + MULPS X12, X6 + MULPS X13, X8 + + // X_i = { + // imag(result[i]): imag(y[i]) * real(x[i]) + real(y[i]) * imag(x[i]), + // real(result[i]): real(y[i]) * real(x[i]) - imag(y[i]) * imag(x[i]) } + ADDSUBPS_X2_X3 + ADDSUBPS_X4_X5 + ADDSUBPS_X6_X7 + ADDSUBPS_X8_X9 + + // SUM += X_i + ADDPS X3, SUM + ADDPS X5, P_SUM + ADDPS X7, SUM + ADDPS X9, P_SUM + + LEAQ (X_PTR)(INC_X*4), X_PTR // X_PTR = &(X_PTR[INC_X*4]) + LEAQ (Y_PTR)(INC_Y*4), Y_PTR // Y_PTR = &(Y_PTR[INC_Y*4]) + + DECQ LEN + JNZ dotu_loop // } while --LEN > 0 + + ADDPS P_SUM, SUM // SUM = { P_SUM + SUM } + CMPQ TAIL, $0 // if TAIL == 0 { return } + JE dotu_end + +dotu_tail: // do { + MOVSD (X_PTR), X3 // X_i = { imag(x[i]), real(x[i]) } + MOVSHDUP_X3_X2 // X_(i-1) = { imag(x[i]), imag(x[i]) } + MOVSLDUP_X3_X3 // X_i = { real(x[i]), real(x[i]) } + MOVUPS (Y_PTR), X10 // X_j = { imag(y[i]), real(y[i]) } + MULPS X10, X3 // X_i = { imag(y[i]) * real(x[i]), real(y[i]) * real(x[i]) } + SHUFPS $0x1, X10, X10 // X_j = { real(y[i]), imag(y[i]) } + MULPS X10, X2 // X_(i-1) = { real(y[i]) * imag(x[i]), imag(y[i]) * imag(x[i]) } + + // X_i = { + // imag(result[i]): imag(y[i])*real(x[i]) + real(y[i])*imag(x[i]), + // real(result[i]): real(y[i])*real(x[i]) - imag(y[i])*imag(x[i]) } + ADDSUBPS_X2_X3 + ADDPS X3, SUM // SUM += X_i + ADDQ INC_X, X_PTR // X_PTR += INC_X + ADDQ INC_Y, Y_PTR // Y_PTR += INC_Y + DECQ TAIL + JNZ dotu_tail // } while --TAIL > 0 + +dotu_end: + MOVSD SUM, sum+88(FP) // return SUM + RET diff --git a/vendor/gonum.org/v1/gonum/internal/asm/c64/dotuunitary_amd64.s b/vendor/gonum.org/v1/gonum/internal/asm/c64/dotuunitary_amd64.s new file mode 100644 index 0000000..07a115b --- /dev/null +++ b/vendor/gonum.org/v1/gonum/internal/asm/c64/dotuunitary_amd64.s @@ -0,0 +1,197 @@ +// Copyright ©2017 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// +build !noasm,!appengine,!safe + +#include "textflag.h" + +#define MOVSLDUP_XPTR_IDX_8__X3 LONG $0x1C120FF3; BYTE $0xC6 // MOVSLDUP (SI)(AX*8), X3 +#define MOVSLDUP_16_XPTR_IDX_8__X5 LONG $0x6C120FF3; WORD $0x10C6 // MOVSLDUP 16(SI)(AX*8), X5 +#define MOVSLDUP_32_XPTR_IDX_8__X7 LONG $0x7C120FF3; WORD $0x20C6 // MOVSLDUP 32(SI)(AX*8), X7 +#define MOVSLDUP_48_XPTR_IDX_8__X9 LONG $0x120F44F3; WORD $0xC64C; BYTE $0x30 // MOVSLDUP 48(SI)(AX*8), X9 + +#define MOVSHDUP_XPTR_IDX_8__X2 LONG $0x14160FF3; BYTE $0xC6 // MOVSHDUP (SI)(AX*8), X2 +#define MOVSHDUP_16_XPTR_IDX_8__X4 LONG $0x64160FF3; WORD $0x10C6 // MOVSHDUP 16(SI)(AX*8), X4 +#define MOVSHDUP_32_XPTR_IDX_8__X6 LONG $0x74160FF3; WORD $0x20C6 // MOVSHDUP 32(SI)(AX*8), X6 +#define MOVSHDUP_48_XPTR_IDX_8__X8 LONG $0x160F44F3; WORD $0xC644; BYTE $0x30 // MOVSHDUP 48(SI)(AX*8), X8 + +#define MOVSHDUP_X3_X2 LONG $0xD3160FF3 // MOVSHDUP X3, X2 +#define MOVSLDUP_X3_X3 LONG $0xDB120FF3 // MOVSLDUP X3, X3 + +#define ADDSUBPS_X2_X3 LONG $0xDAD00FF2 // ADDSUBPS X2, X3 +#define ADDSUBPS_X4_X5 LONG $0xECD00FF2 // ADDSUBPS X4, X5 +#define ADDSUBPS_X6_X7 LONG $0xFED00FF2 // ADDSUBPS X6, X7 +#define ADDSUBPS_X8_X9 LONG $0xD00F45F2; BYTE $0xC8 // ADDSUBPS X8, X9 + +#define X_PTR SI +#define Y_PTR DI +#define LEN CX +#define TAIL BX +#define SUM X0 +#define P_SUM X1 +#define IDX AX +#define I_IDX DX +#define NEG1 X15 +#define P_NEG1 X14 + +// func DotuUnitary(x, y []complex64) (sum complex64) +TEXT ·DotuUnitary(SB), NOSPLIT, $0 + MOVQ x_base+0(FP), X_PTR // X_PTR = &x + MOVQ y_base+24(FP), Y_PTR // Y_PTR = &y + PXOR SUM, SUM // SUM = 0 + PXOR P_SUM, P_SUM // P_SUM = 0 + MOVQ x_len+8(FP), LEN // LEN = min( len(x), len(y) ) + CMPQ y_len+32(FP), LEN + CMOVQLE y_len+32(FP), LEN + CMPQ LEN, $0 // if LEN == 0 { return } + JE dotu_end + XORQ IDX, IDX // IDX = 0 + + MOVQ X_PTR, DX + ANDQ $15, DX // DX = &x & 15 + JZ dotu_aligned // if DX == 0 { goto dotu_aligned } + + MOVSD (X_PTR)(IDX*8), X3 // X_i = { imag(x[i]), real(x[i]) } + MOVSHDUP_X3_X2 // X_(i-1) = { imag(x[i]), imag(x[i]) } + MOVSLDUP_X3_X3 // X_i = { real(x[i]), real(x[i]) } + MOVSD (Y_PTR)(IDX*8), X10 // X_j = { imag(y[i]), real(y[i]) } + MULPS X10, X3 // X_i = { imag(y[i]) * real(x[i]), real(y[i]) * real(x[i]) } + SHUFPS $0x1, X10, X10 // X_j = { real(y[i]), imag(y[i]) } + MULPS X10, X2 // X_(i-1) = { real(y[i]) * imag(x[i]), imag(y[i]) * imag(x[i]) } + + // X_i = { + // imag(result[i]): imag(y[i])*real(x[i]) + real(y[i])*imag(x[i]), + // real(result[i]): real(y[i])*real(x[i]) - imag(y[i])*imag(x[i]) } + ADDSUBPS_X2_X3 + + MOVAPS X3, SUM // SUM = X_i + INCQ IDX // IDX++ + DECQ LEN // LEN-- + JZ dotu_end // if LEN == 0 { goto dotu_end } + +dotu_aligned: + MOVQ LEN, TAIL + ANDQ $7, TAIL // TAIL = LEN % 8 + SHRQ $3, LEN // LEN = floor( LEN / 8 ) + JZ dotu_tail // if LEN == 0 { goto dotu_tail } + PXOR P_SUM, P_SUM + +dotu_loop: // do { + MOVSLDUP_XPTR_IDX_8__X3 // X_i = { real(x[i]), real(x[i]), real(x[i+1]), real(x[i+1]) } + MOVSLDUP_16_XPTR_IDX_8__X5 + MOVSLDUP_32_XPTR_IDX_8__X7 + MOVSLDUP_48_XPTR_IDX_8__X9 + + MOVSHDUP_XPTR_IDX_8__X2 // X_(i-1) = { imag(x[i]), imag(x[i]), imag(x[i]+1), imag(x[i]+1) } + MOVSHDUP_16_XPTR_IDX_8__X4 + MOVSHDUP_32_XPTR_IDX_8__X6 + MOVSHDUP_48_XPTR_IDX_8__X8 + + // X_j = { imag(y[i]), real(y[i]), imag(y[i+1]), real(y[i+1]) } + MOVUPS (Y_PTR)(IDX*8), X10 + MOVUPS 16(Y_PTR)(IDX*8), X11 + MOVUPS 32(Y_PTR)(IDX*8), X12 + MOVUPS 48(Y_PTR)(IDX*8), X13 + + // X_i = { imag(y[i]) * real(x[i]), real(y[i]) * real(x[i]), + // imag(y[i+1]) * real(x[i+1]), real(y[i+1]) * real(x[i+1]) } + MULPS X10, X3 + MULPS X11, X5 + MULPS X12, X7 + MULPS X13, X9 + + // X_j = { real(y[i]), imag(y[i]), real(y[i+1]), imag(y[i+1]) } + SHUFPS $0xB1, X10, X10 + SHUFPS $0xB1, X11, X11 + SHUFPS $0xB1, X12, X12 + SHUFPS $0xB1, X13, X13 + + // X_(i-1) = { real(y[i]) * imag(x[i]), imag(y[i]) * imag(x[i]), + // real(y[i+1]) * imag(x[i+1]), imag(y[i+1]) * imag(x[i+1]) } + MULPS X10, X2 + MULPS X11, X4 + MULPS X12, X6 + MULPS X13, X8 + + // X_i = { + // imag(result[i]): imag(y[i]) * real(x[i]) + real(y[i]) * imag(x[i]), + // real(result[i]): real(y[i]) * real(x[i]) - imag(y[i]) * imag(x[i]), + // imag(result[i+1]): imag(y[i+1]) * real(x[i+1]) + real(y[i+1]) * imag(x[i+1]), + // real(result[i+1]): real(y[i+1]) * real(x[i+1]) - imag(y[i+1]) * imag(x[i+1]), + // } + ADDSUBPS_X2_X3 + ADDSUBPS_X4_X5 + ADDSUBPS_X6_X7 + ADDSUBPS_X8_X9 + + // SUM += X_i + ADDPS X3, SUM + ADDPS X5, P_SUM + ADDPS X7, SUM + ADDPS X9, P_SUM + + ADDQ $8, IDX // IDX += 8 + DECQ LEN + JNZ dotu_loop // } while --LEN > 0 + + ADDPS SUM, P_SUM // P_SUM = { P_SUM[1] + SUM[1], P_SUM[0] + SUM[0] } + XORPS SUM, SUM // SUM = 0 + + CMPQ TAIL, $0 // if TAIL == 0 { return } + JE dotu_end + +dotu_tail: + MOVQ TAIL, LEN + SHRQ $1, LEN // LEN = floor( LEN / 2 ) + JZ dotu_tail_one // if LEN == 0 { goto dotc_tail_one } + +dotu_tail_two: // do { + MOVSLDUP_XPTR_IDX_8__X3 // X_i = { real(x[i]), real(x[i]), real(x[i+1]), real(x[i+1]) } + MOVSHDUP_XPTR_IDX_8__X2 // X_(i-1) = { imag(x[i]), imag(x[i]), imag(x[i]+1), imag(x[i]+1) } + MOVUPS (Y_PTR)(IDX*8), X10 // X_j = { imag(y[i]), real(y[i]) } + MULPS X10, X3 // X_i = { imag(y[i]) * real(x[i]), real(y[i]) * real(x[i]) } + SHUFPS $0xB1, X10, X10 // X_j = { real(y[i]), imag(y[i]) } + MULPS X10, X2 // X_(i-1) = { real(y[i]) * imag(x[i]), imag(y[i]) * imag(x[i]) } + + // X_i = { + // imag(result[i]): imag(y[i])*real(x[i]) + real(y[i])*imag(x[i]), + // real(result[i]): real(y[i])*real(x[i]) - imag(y[i])*imag(x[i]) } + ADDSUBPS_X2_X3 + + ADDPS X3, SUM // SUM += X_i + + ADDQ $2, IDX // IDX += 2 + DECQ LEN + JNZ dotu_tail_two // } while --LEN > 0 + + ADDPS SUM, P_SUM // P_SUM = { P_SUM[1] + SUM[1], P_SUM[0] + SUM[0] } + XORPS SUM, SUM // SUM = 0 + + ANDQ $1, TAIL + JZ dotu_end + +dotu_tail_one: + MOVSD (X_PTR)(IDX*8), X3 // X_i = { imag(x[i]), real(x[i]) } + MOVSHDUP_X3_X2 // X_(i-1) = { imag(x[i]), imag(x[i]) } + MOVSLDUP_X3_X3 // X_i = { real(x[i]), real(x[i]) } + MOVSD (Y_PTR)(IDX*8), X10 // X_j = { imag(y[i]), real(y[i]) } + MULPS X10, X3 // X_i = { imag(y[i]) * real(x[i]), real(y[i]) * real(x[i]) } + SHUFPS $0x1, X10, X10 // X_j = { real(y[i]), imag(y[i]) } + MULPS X10, X2 // X_(i-1) = { real(y[i]) * imag(x[i]), imag(y[i]) * imag(x[i]) } + + // X_i = { + // imag(result[i]): imag(y[i])*real(x[i]) + real(y[i])*imag(x[i]), + // real(result[i]): real(y[i])*real(x[i]) - imag(y[i])*imag(x[i]) } + ADDSUBPS_X2_X3 + + ADDPS X3, SUM // SUM += X_i + +dotu_end: + ADDPS P_SUM, SUM // SUM = { P_SUM[0] + SUM[0] } + MOVHLPS P_SUM, P_SUM // P_SUM = { P_SUM[1], P_SUM[1] } + ADDPS P_SUM, SUM // SUM = { P_SUM[1] + SUM[0] } + +dotu_ret: + MOVSD SUM, sum+48(FP) // return SUM + RET diff --git a/vendor/gonum.org/v1/gonum/internal/asm/c64/scal.go b/vendor/gonum.org/v1/gonum/internal/asm/c64/scal.go new file mode 100644 index 0000000..a84def8 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/internal/asm/c64/scal.go @@ -0,0 +1,79 @@ +// Copyright ©2016 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package c64 + +// ScalUnitary is +// for i := range x { +// x[i] *= alpha +// } +func ScalUnitary(alpha complex64, x []complex64) { + for i := range x { + x[i] *= alpha + } +} + +// ScalUnitaryTo is +// for i, v := range x { +// dst[i] = alpha * v +// } +func ScalUnitaryTo(dst []complex64, alpha complex64, x []complex64) { + for i, v := range x { + dst[i] = alpha * v + } +} + +// ScalInc is +// var ix uintptr +// for i := 0; i < int(n); i++ { +// x[ix] *= alpha +// ix += incX +// } +func ScalInc(alpha complex64, x []complex64, n, incX uintptr) { + var ix uintptr + for i := 0; i < int(n); i++ { + x[ix] *= alpha + ix += incX + } +} + +// ScalIncTo is +// var idst, ix uintptr +// for i := 0; i < int(n); i++ { +// dst[idst] = alpha * x[ix] +// ix += incX +// idst += incDst +// } +func ScalIncTo(dst []complex64, incDst uintptr, alpha complex64, x []complex64, n, incX uintptr) { + var idst, ix uintptr + for i := 0; i < int(n); i++ { + dst[idst] = alpha * x[ix] + ix += incX + idst += incDst + } +} + +// SscalUnitary is +// for i, v := range x { +// x[i] = complex(real(v)*alpha, imag(v)*alpha) +// } +func SscalUnitary(alpha float32, x []complex64) { + for i, v := range x { + x[i] = complex(real(v)*alpha, imag(v)*alpha) + } +} + +// SscalInc is +// var ix uintptr +// for i := 0; i < int(n); i++ { +// x[ix] = complex(real(x[ix])*alpha, imag(x[ix])*alpha) +// ix += inc +// } +func SscalInc(alpha float32, x []complex64, n, inc uintptr) { + var ix uintptr + for i := 0; i < int(n); i++ { + x[ix] = complex(real(x[ix])*alpha, imag(x[ix])*alpha) + ix += inc + } +} diff --git a/vendor/gonum.org/v1/gonum/internal/asm/c64/stubs_amd64.go b/vendor/gonum.org/v1/gonum/internal/asm/c64/stubs_amd64.go new file mode 100644 index 0000000..3e12d6b --- /dev/null +++ b/vendor/gonum.org/v1/gonum/internal/asm/c64/stubs_amd64.go @@ -0,0 +1,68 @@ +// Copyright ©2016 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// +build !noasm,!appengine,!safe + +package c64 + +// AxpyUnitary is +// for i, v := range x { +// y[i] += alpha * v +// } +func AxpyUnitary(alpha complex64, x, y []complex64) + +// AxpyUnitaryTo is +// for i, v := range x { +// dst[i] = alpha*v + y[i] +// } +func AxpyUnitaryTo(dst []complex64, alpha complex64, x, y []complex64) + +// AxpyInc is +// for i := 0; i < int(n); i++ { +// y[iy] += alpha * x[ix] +// ix += incX +// iy += incY +// } +func AxpyInc(alpha complex64, x, y []complex64, n, incX, incY, ix, iy uintptr) + +// AxpyIncTo is +// for i := 0; i < int(n); i++ { +// dst[idst] = alpha*x[ix] + y[iy] +// ix += incX +// iy += incY +// idst += incDst +// } +func AxpyIncTo(dst []complex64, incDst, idst uintptr, alpha complex64, x, y []complex64, n, incX, incY, ix, iy uintptr) + +// DotcUnitary is +// for i, v := range x { +// sum += y[i] * conj(v) +// } +// return sum +func DotcUnitary(x, y []complex64) (sum complex64) + +// DotcInc is +// for i := 0; i < int(n); i++ { +// sum += y[iy] * conj(x[ix]) +// ix += incX +// iy += incY +// } +// return sum +func DotcInc(x, y []complex64, n, incX, incY, ix, iy uintptr) (sum complex64) + +// DotuUnitary is +// for i, v := range x { +// sum += y[i] * v +// } +// return sum +func DotuUnitary(x, y []complex64) (sum complex64) + +// DotuInc is +// for i := 0; i < int(n); i++ { +// sum += y[iy] * x[ix] +// ix += incX +// iy += incY +// } +// return sum +func DotuInc(x, y []complex64, n, incX, incY, ix, iy uintptr) (sum complex64) diff --git a/vendor/gonum.org/v1/gonum/internal/asm/c64/stubs_noasm.go b/vendor/gonum.org/v1/gonum/internal/asm/c64/stubs_noasm.go new file mode 100644 index 0000000..411afcb --- /dev/null +++ b/vendor/gonum.org/v1/gonum/internal/asm/c64/stubs_noasm.go @@ -0,0 +1,113 @@ +// Copyright ©2016 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// +build !amd64 noasm appengine safe + +package c64 + +// AxpyUnitary is +// for i, v := range x { +// y[i] += alpha * v +// } +func AxpyUnitary(alpha complex64, x, y []complex64) { + for i, v := range x { + y[i] += alpha * v + } +} + +// AxpyUnitaryTo is +// for i, v := range x { +// dst[i] = alpha*v + y[i] +// } +func AxpyUnitaryTo(dst []complex64, alpha complex64, x, y []complex64) { + for i, v := range x { + dst[i] = alpha*v + y[i] + } +} + +// AxpyInc is +// for i := 0; i < int(n); i++ { +// y[iy] += alpha * x[ix] +// ix += incX +// iy += incY +// } +func AxpyInc(alpha complex64, x, y []complex64, n, incX, incY, ix, iy uintptr) { + for i := 0; i < int(n); i++ { + y[iy] += alpha * x[ix] + ix += incX + iy += incY + } +} + +// AxpyIncTo is +// for i := 0; i < int(n); i++ { +// dst[idst] = alpha*x[ix] + y[iy] +// ix += incX +// iy += incY +// idst += incDst +// } +func AxpyIncTo(dst []complex64, incDst, idst uintptr, alpha complex64, x, y []complex64, n, incX, incY, ix, iy uintptr) { + for i := 0; i < int(n); i++ { + dst[idst] = alpha*x[ix] + y[iy] + ix += incX + iy += incY + idst += incDst + } +} + +// DotcUnitary is +// for i, v := range x { +// sum += y[i] * conj(v) +// } +// return sum +func DotcUnitary(x, y []complex64) (sum complex64) { + for i, v := range x { + sum += y[i] * conj(v) + } + return sum +} + +// DotcInc is +// for i := 0; i < int(n); i++ { +// sum += y[iy] * conj(x[ix]) +// ix += incX +// iy += incY +// } +// return sum +func DotcInc(x, y []complex64, n, incX, incY, ix, iy uintptr) (sum complex64) { + for i := 0; i < int(n); i++ { + sum += y[iy] * conj(x[ix]) + ix += incX + iy += incY + } + return sum +} + +// DotuUnitary is +// for i, v := range x { +// sum += y[i] * v +// } +// return sum +func DotuUnitary(x, y []complex64) (sum complex64) { + for i, v := range x { + sum += y[i] * v + } + return sum +} + +// DotuInc is +// for i := 0; i < int(n); i++ { +// sum += y[iy] * x[ix] +// ix += incX +// iy += incY +// } +// return sum +func DotuInc(x, y []complex64, n, incX, incY, ix, iy uintptr) (sum complex64) { + for i := 0; i < int(n); i++ { + sum += y[iy] * x[ix] + ix += incX + iy += incY + } + return sum +} diff --git a/vendor/gonum.org/v1/gonum/internal/asm/f32/axpyinc_amd64.s b/vendor/gonum.org/v1/gonum/internal/asm/f32/axpyinc_amd64.s new file mode 100644 index 0000000..ebf360f --- /dev/null +++ b/vendor/gonum.org/v1/gonum/internal/asm/f32/axpyinc_amd64.s @@ -0,0 +1,73 @@ +// Copyright ©2016 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// +build !noasm,!appengine,!safe + +#include "textflag.h" + +// func AxpyInc(alpha float32, x, y []float32, n, incX, incY, ix, iy uintptr) +TEXT ·AxpyInc(SB), NOSPLIT, $0 + MOVQ n+56(FP), CX // CX = n + CMPQ CX, $0 // if n==0 { return } + JLE axpyi_end + MOVQ x_base+8(FP), SI // SI = &x + MOVQ y_base+32(FP), DI // DI = &y + MOVQ ix+80(FP), R8 // R8 = ix + MOVQ iy+88(FP), R9 // R9 = iy + LEAQ (SI)(R8*4), SI // SI = &(x[ix]) + LEAQ (DI)(R9*4), DI // DI = &(y[iy]) + MOVQ DI, DX // DX = DI Read Pointer for y + MOVQ incX+64(FP), R8 // R8 = incX + SHLQ $2, R8 // R8 *= sizeof(float32) + MOVQ incY+72(FP), R9 // R9 = incY + SHLQ $2, R9 // R9 *= sizeof(float32) + MOVSS alpha+0(FP), X0 // X0 = alpha + MOVSS X0, X1 // X1 = X0 // for pipelining + MOVQ CX, BX + ANDQ $3, BX // BX = n % 4 + SHRQ $2, CX // CX = floor( n / 4 ) + JZ axpyi_tail_start // if CX == 0 { goto axpyi_tail_start } + +axpyi_loop: // Loop unrolled 4x do { + MOVSS (SI), X2 // X_i = x[i] + MOVSS (SI)(R8*1), X3 + LEAQ (SI)(R8*2), SI // SI = &(SI[incX*2]) + MOVSS (SI), X4 + MOVSS (SI)(R8*1), X5 + MULSS X1, X2 // X_i *= a + MULSS X0, X3 + MULSS X1, X4 + MULSS X0, X5 + ADDSS (DX), X2 // X_i += y[i] + ADDSS (DX)(R9*1), X3 + LEAQ (DX)(R9*2), DX // DX = &(DX[incY*2]) + ADDSS (DX), X4 + ADDSS (DX)(R9*1), X5 + MOVSS X2, (DI) // y[i] = X_i + MOVSS X3, (DI)(R9*1) + LEAQ (DI)(R9*2), DI // DI = &(DI[incY*2]) + MOVSS X4, (DI) + MOVSS X5, (DI)(R9*1) + LEAQ (SI)(R8*2), SI // SI = &(SI[incX*2]) // Increment addresses + LEAQ (DX)(R9*2), DX // DX = &(DX[incY*2]) + LEAQ (DI)(R9*2), DI // DI = &(DI[incY*2]) + LOOP axpyi_loop // } while --CX > 0 + CMPQ BX, $0 // if BX == 0 { return } + JE axpyi_end + +axpyi_tail_start: // Reset loop registers + MOVQ BX, CX // Loop counter: CX = BX + +axpyi_tail: // do { + MOVSS (SI), X2 // X2 = x[i] + MULSS X1, X2 // X2 *= a + ADDSS (DI), X2 // X2 += y[i] + MOVSS X2, (DI) // y[i] = X2 + ADDQ R8, SI // SI = &(SI[incX]) + ADDQ R9, DI // DI = &(DI[incY]) + LOOP axpyi_tail // } while --CX > 0 + +axpyi_end: + RET + diff --git a/vendor/gonum.org/v1/gonum/internal/asm/f32/axpyincto_amd64.s b/vendor/gonum.org/v1/gonum/internal/asm/f32/axpyincto_amd64.s new file mode 100644 index 0000000..4e9020e --- /dev/null +++ b/vendor/gonum.org/v1/gonum/internal/asm/f32/axpyincto_amd64.s @@ -0,0 +1,78 @@ +// Copyright ©2016 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// +build !noasm,!appengine,!safe + +#include "textflag.h" + +// func AxpyIncTo(dst []float32, incDst, idst uintptr, alpha float32, x, y []float32, n, incX, incY, ix, iy uintptr) +TEXT ·AxpyIncTo(SB), NOSPLIT, $0 + MOVQ n+96(FP), CX // CX = n + CMPQ CX, $0 // if n==0 { return } + JLE axpyi_end + MOVQ dst_base+0(FP), DI // DI = &dst + MOVQ x_base+48(FP), SI // SI = &x + MOVQ y_base+72(FP), DX // DX = &y + MOVQ ix+120(FP), R8 // R8 = ix // Load the first index + MOVQ iy+128(FP), R9 // R9 = iy + MOVQ idst+32(FP), R10 // R10 = idst + LEAQ (SI)(R8*4), SI // SI = &(x[ix]) + LEAQ (DX)(R9*4), DX // DX = &(y[iy]) + LEAQ (DI)(R10*4), DI // DI = &(dst[idst]) + MOVQ incX+104(FP), R8 // R8 = incX + SHLQ $2, R8 // R8 *= sizeof(float32) + MOVQ incY+112(FP), R9 // R9 = incY + SHLQ $2, R9 // R9 *= sizeof(float32) + MOVQ incDst+24(FP), R10 // R10 = incDst + SHLQ $2, R10 // R10 *= sizeof(float32) + MOVSS alpha+40(FP), X0 // X0 = alpha + MOVSS X0, X1 // X1 = X0 // for pipelining + MOVQ CX, BX + ANDQ $3, BX // BX = n % 4 + SHRQ $2, CX // CX = floor( n / 4 ) + JZ axpyi_tail_start // if CX == 0 { goto axpyi_tail_start } + +axpyi_loop: // Loop unrolled 4x do { + MOVSS (SI), X2 // X_i = x[i] + MOVSS (SI)(R8*1), X3 + LEAQ (SI)(R8*2), SI // SI = &(SI[incX*2]) + MOVSS (SI), X4 + MOVSS (SI)(R8*1), X5 + MULSS X1, X2 // X_i *= a + MULSS X0, X3 + MULSS X1, X4 + MULSS X0, X5 + ADDSS (DX), X2 // X_i += y[i] + ADDSS (DX)(R9*1), X3 + LEAQ (DX)(R9*2), DX // DX = &(DX[incY*2]) + ADDSS (DX), X4 + ADDSS (DX)(R9*1), X5 + MOVSS X2, (DI) // dst[i] = X_i + MOVSS X3, (DI)(R10*1) + LEAQ (DI)(R10*2), DI // DI = &(DI[incDst*2]) + MOVSS X4, (DI) + MOVSS X5, (DI)(R10*1) + LEAQ (SI)(R8*2), SI // SI = &(SI[incX*2]) // Increment addresses + LEAQ (DX)(R9*2), DX // DX = &(DX[incY*2]) + LEAQ (DI)(R10*2), DI // DI = &(DI[incDst*2]) + LOOP axpyi_loop // } while --CX > 0 + CMPQ BX, $0 // if BX == 0 { return } + JE axpyi_end + +axpyi_tail_start: // Reset loop registers + MOVQ BX, CX // Loop counter: CX = BX + +axpyi_tail: // do { + MOVSS (SI), X2 // X2 = x[i] + MULSS X1, X2 // X2 *= a + ADDSS (DX), X2 // X2 += y[i] + MOVSS X2, (DI) // dst[i] = X2 + ADDQ R8, SI // SI = &(SI[incX]) + ADDQ R9, DX // DX = &(DX[incY]) + ADDQ R10, DI // DI = &(DI[incY]) + LOOP axpyi_tail // } while --CX > 0 + +axpyi_end: + RET + diff --git a/vendor/gonum.org/v1/gonum/internal/asm/f32/axpyunitary_amd64.s b/vendor/gonum.org/v1/gonum/internal/asm/f32/axpyunitary_amd64.s new file mode 100644 index 0000000..224b842 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/internal/asm/f32/axpyunitary_amd64.s @@ -0,0 +1,97 @@ +// Copyright ©2016 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// +build !noasm,!appengine,!safe + +#include "textflag.h" + +// func AxpyUnitary(alpha float32, x, y []float32) +TEXT ·AxpyUnitary(SB), NOSPLIT, $0 + MOVQ x_base+8(FP), SI // SI = &x + MOVQ y_base+32(FP), DI // DI = &y + MOVQ x_len+16(FP), BX // BX = min( len(x), len(y) ) + CMPQ y_len+40(FP), BX + CMOVQLE y_len+40(FP), BX + CMPQ BX, $0 // if BX == 0 { return } + JE axpy_end + MOVSS alpha+0(FP), X0 + SHUFPS $0, X0, X0 // X0 = { a, a, a, a } + XORQ AX, AX // i = 0 + PXOR X2, X2 // 2 NOP instructions (PXOR) to align + PXOR X3, X3 // loop to cache line + MOVQ DI, CX + ANDQ $0xF, CX // Align on 16-byte boundary for ADDPS + JZ axpy_no_trim // if CX == 0 { goto axpy_no_trim } + + XORQ $0xF, CX // CX = 4 - floor( BX % 16 / 4 ) + INCQ CX + SHRQ $2, CX + +axpy_align: // Trim first value(s) in unaligned buffer do { + MOVSS (SI)(AX*4), X2 // X2 = x[i] + MULSS X0, X2 // X2 *= a + ADDSS (DI)(AX*4), X2 // X2 += y[i] + MOVSS X2, (DI)(AX*4) // y[i] = X2 + INCQ AX // i++ + DECQ BX + JZ axpy_end // if --BX == 0 { return } + LOOP axpy_align // } while --CX > 0 + +axpy_no_trim: + MOVUPS X0, X1 // Copy X0 to X1 for pipelining + MOVQ BX, CX + ANDQ $0xF, BX // BX = len % 16 + SHRQ $4, CX // CX = int( len / 16 ) + JZ axpy_tail4_start // if CX == 0 { return } + +axpy_loop: // Loop unrolled 16x do { + MOVUPS (SI)(AX*4), X2 // X2 = x[i:i+4] + MOVUPS 16(SI)(AX*4), X3 + MOVUPS 32(SI)(AX*4), X4 + MOVUPS 48(SI)(AX*4), X5 + MULPS X0, X2 // X2 *= a + MULPS X1, X3 + MULPS X0, X4 + MULPS X1, X5 + ADDPS (DI)(AX*4), X2 // X2 += y[i:i+4] + ADDPS 16(DI)(AX*4), X3 + ADDPS 32(DI)(AX*4), X4 + ADDPS 48(DI)(AX*4), X5 + MOVUPS X2, (DI)(AX*4) // dst[i:i+4] = X2 + MOVUPS X3, 16(DI)(AX*4) + MOVUPS X4, 32(DI)(AX*4) + MOVUPS X5, 48(DI)(AX*4) + ADDQ $16, AX // i += 16 + LOOP axpy_loop // while (--CX) > 0 + CMPQ BX, $0 // if BX == 0 { return } + JE axpy_end + +axpy_tail4_start: // Reset loop counter for 4-wide tail loop + MOVQ BX, CX // CX = floor( BX / 4 ) + SHRQ $2, CX + JZ axpy_tail_start // if CX == 0 { goto axpy_tail_start } + +axpy_tail4: // Loop unrolled 4x do { + MOVUPS (SI)(AX*4), X2 // X2 = x[i] + MULPS X0, X2 // X2 *= a + ADDPS (DI)(AX*4), X2 // X2 += y[i] + MOVUPS X2, (DI)(AX*4) // y[i] = X2 + ADDQ $4, AX // i += 4 + LOOP axpy_tail4 // } while --CX > 0 + +axpy_tail_start: // Reset loop counter for 1-wide tail loop + MOVQ BX, CX // CX = BX % 4 + ANDQ $3, CX + JZ axpy_end // if CX == 0 { return } + +axpy_tail: + MOVSS (SI)(AX*4), X1 // X1 = x[i] + MULSS X0, X1 // X1 *= a + ADDSS (DI)(AX*4), X1 // X1 += y[i] + MOVSS X1, (DI)(AX*4) // y[i] = X1 + INCQ AX // i++ + LOOP axpy_tail // } while --CX > 0 + +axpy_end: + RET diff --git a/vendor/gonum.org/v1/gonum/internal/asm/f32/axpyunitaryto_amd64.s b/vendor/gonum.org/v1/gonum/internal/asm/f32/axpyunitaryto_amd64.s new file mode 100644 index 0000000..e26ccff --- /dev/null +++ b/vendor/gonum.org/v1/gonum/internal/asm/f32/axpyunitaryto_amd64.s @@ -0,0 +1,98 @@ +// Copyright ©2016 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// +build !noasm,!appengine,!safe + +#include "textflag.h" + +// func AxpyUnitaryTo(dst []float32, alpha float32, x, y []float32) +TEXT ·AxpyUnitaryTo(SB), NOSPLIT, $0 + MOVQ dst_base+0(FP), DI // DI = &dst + MOVQ x_base+32(FP), SI // SI = &x + MOVQ y_base+56(FP), DX // DX = &y + MOVQ x_len+40(FP), BX // BX = min( len(x), len(y), len(dst) ) + CMPQ y_len+64(FP), BX + CMOVQLE y_len+64(FP), BX + CMPQ dst_len+8(FP), BX + CMOVQLE dst_len+8(FP), BX + CMPQ BX, $0 // if BX == 0 { return } + JE axpy_end + MOVSS alpha+24(FP), X0 + SHUFPS $0, X0, X0 // X0 = { a, a, a, a, } + XORQ AX, AX // i = 0 + MOVQ DX, CX + ANDQ $0xF, CX // Align on 16-byte boundary for ADDPS + JZ axpy_no_trim // if CX == 0 { goto axpy_no_trim } + + XORQ $0xF, CX // CX = 4 - floor ( B % 16 / 4 ) + INCQ CX + SHRQ $2, CX + +axpy_align: // Trim first value(s) in unaligned buffer do { + MOVSS (SI)(AX*4), X2 // X2 = x[i] + MULSS X0, X2 // X2 *= a + ADDSS (DX)(AX*4), X2 // X2 += y[i] + MOVSS X2, (DI)(AX*4) // y[i] = X2 + INCQ AX // i++ + DECQ BX + JZ axpy_end // if --BX == 0 { return } + LOOP axpy_align // } while --CX > 0 + +axpy_no_trim: + MOVUPS X0, X1 // Copy X0 to X1 for pipelining + MOVQ BX, CX + ANDQ $0xF, BX // BX = len % 16 + SHRQ $4, CX // CX = floor( len / 16 ) + JZ axpy_tail4_start // if CX == 0 { return } + +axpy_loop: // Loop unrolled 16x do { + MOVUPS (SI)(AX*4), X2 // X2 = x[i:i+4] + MOVUPS 16(SI)(AX*4), X3 + MOVUPS 32(SI)(AX*4), X4 + MOVUPS 48(SI)(AX*4), X5 + MULPS X0, X2 // X2 *= a + MULPS X1, X3 + MULPS X0, X4 + MULPS X1, X5 + ADDPS (DX)(AX*4), X2 // X2 += y[i:i+4] + ADDPS 16(DX)(AX*4), X3 + ADDPS 32(DX)(AX*4), X4 + ADDPS 48(DX)(AX*4), X5 + MOVUPS X2, (DI)(AX*4) // dst[i:i+4] = X2 + MOVUPS X3, 16(DI)(AX*4) + MOVUPS X4, 32(DI)(AX*4) + MOVUPS X5, 48(DI)(AX*4) + ADDQ $16, AX // i += 16 + LOOP axpy_loop // while (--CX) > 0 + CMPQ BX, $0 // if BX == 0 { return } + JE axpy_end + +axpy_tail4_start: // Reset loop counter for 4-wide tail loop + MOVQ BX, CX // CX = floor( BX / 4 ) + SHRQ $2, CX + JZ axpy_tail_start // if CX == 0 { goto axpy_tail_start } + +axpy_tail4: // Loop unrolled 4x do { + MOVUPS (SI)(AX*4), X2 // X2 = x[i] + MULPS X0, X2 // X2 *= a + ADDPS (DX)(AX*4), X2 // X2 += y[i] + MOVUPS X2, (DI)(AX*4) // y[i] = X2 + ADDQ $4, AX // i += 4 + LOOP axpy_tail4 // } while --CX > 0 + +axpy_tail_start: // Reset loop counter for 1-wide tail loop + MOVQ BX, CX // CX = BX % 4 + ANDQ $3, CX + JZ axpy_end // if CX == 0 { return } + +axpy_tail: + MOVSS (SI)(AX*4), X1 // X1 = x[i] + MULSS X0, X1 // X1 *= a + ADDSS (DX)(AX*4), X1 // X1 += y[i] + MOVSS X1, (DI)(AX*4) // y[i] = X1 + INCQ AX // i++ + LOOP axpy_tail // } while --CX > 0 + +axpy_end: + RET diff --git a/vendor/gonum.org/v1/gonum/internal/asm/f32/ddotinc_amd64.s b/vendor/gonum.org/v1/gonum/internal/asm/f32/ddotinc_amd64.s new file mode 100644 index 0000000..de9e312 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/internal/asm/f32/ddotinc_amd64.s @@ -0,0 +1,91 @@ +// Copyright ©2017 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// +build !noasm,!appengine,!safe + +#include "textflag.h" + +#define X_PTR SI +#define Y_PTR DI +#define LEN CX +#define TAIL BX +#define INC_X R8 +#define INCx3_X R10 +#define INC_Y R9 +#define INCx3_Y R11 +#define SUM X0 +#define P_SUM X1 + +// func DdotInc(x, y []float32, n, incX, incY, ix, iy uintptr) (sum float64) +TEXT ·DdotInc(SB), NOSPLIT, $0 + MOVQ x_base+0(FP), X_PTR // X_PTR = &x + MOVQ y_base+24(FP), Y_PTR // Y_PTR = &y + MOVQ n+48(FP), LEN // LEN = n + PXOR SUM, SUM // SUM = 0 + CMPQ LEN, $0 + JE dot_end + + MOVQ ix+72(FP), INC_X // INC_X = ix + MOVQ iy+80(FP), INC_Y // INC_Y = iy + LEAQ (X_PTR)(INC_X*4), X_PTR // X_PTR = &(x[ix]) + LEAQ (Y_PTR)(INC_Y*4), Y_PTR // Y_PTR = &(y[iy]) + + MOVQ incX+56(FP), INC_X // INC_X = incX * sizeof(float32) + SHLQ $2, INC_X + MOVQ incY+64(FP), INC_Y // INC_Y = incY * sizeof(float32) + SHLQ $2, INC_Y + + MOVQ LEN, TAIL + ANDQ $3, TAIL // TAIL = LEN % 4 + SHRQ $2, LEN // LEN = floor( LEN / 4 ) + JZ dot_tail // if LEN == 0 { goto dot_tail } + + PXOR P_SUM, P_SUM // P_SUM = 0 for pipelining + LEAQ (INC_X)(INC_X*2), INCx3_X // INCx3_X = INC_X * 3 + LEAQ (INC_Y)(INC_Y*2), INCx3_Y // INCx3_Y = INC_Y * 3 + +dot_loop: // Loop unrolled 4x do { + CVTSS2SD (X_PTR), X2 // X_i = x[i:i+1] + CVTSS2SD (X_PTR)(INC_X*1), X3 + CVTSS2SD (X_PTR)(INC_X*2), X4 + CVTSS2SD (X_PTR)(INCx3_X*1), X5 + + CVTSS2SD (Y_PTR), X6 // X_j = y[i:i+1] + CVTSS2SD (Y_PTR)(INC_Y*1), X7 + CVTSS2SD (Y_PTR)(INC_Y*2), X8 + CVTSS2SD (Y_PTR)(INCx3_Y*1), X9 + + MULSD X6, X2 // X_i *= X_j + MULSD X7, X3 + MULSD X8, X4 + MULSD X9, X5 + + ADDSD X2, SUM // SUM += X_i + ADDSD X3, P_SUM + ADDSD X4, SUM + ADDSD X5, P_SUM + + LEAQ (X_PTR)(INC_X*4), X_PTR // X_PTR = &(X_PTR[INC_X * 4]) + LEAQ (Y_PTR)(INC_Y*4), Y_PTR // Y_PTR = &(Y_PTR[INC_Y * 4]) + + DECQ LEN + JNZ dot_loop // } while --LEN > 0 + + ADDSD P_SUM, SUM // SUM += P_SUM + CMPQ TAIL, $0 // if TAIL == 0 { return } + JE dot_end + +dot_tail: // do { + CVTSS2SD (X_PTR), X2 // X2 = x[i] + CVTSS2SD (Y_PTR), X3 // X2 *= y[i] + MULSD X3, X2 + ADDSD X2, SUM // SUM += X2 + ADDQ INC_X, X_PTR // X_PTR += INC_X + ADDQ INC_Y, Y_PTR // Y_PTR += INC_Y + DECQ TAIL + JNZ dot_tail // } while --TAIL > 0 + +dot_end: + MOVSD SUM, sum+88(FP) // return SUM + RET diff --git a/vendor/gonum.org/v1/gonum/internal/asm/f32/ddotunitary_amd64.s b/vendor/gonum.org/v1/gonum/internal/asm/f32/ddotunitary_amd64.s new file mode 100644 index 0000000..d39ab78 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/internal/asm/f32/ddotunitary_amd64.s @@ -0,0 +1,110 @@ +// Copyright ©2017 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// +build !noasm,!appengine,!safe + +#include "textflag.h" + +#define HADDPD_SUM_SUM LONG $0xC07C0F66 // @ HADDPD X0, X0 + +#define X_PTR SI +#define Y_PTR DI +#define LEN CX +#define TAIL BX +#define IDX AX +#define SUM X0 +#define P_SUM X1 + +// func DdotUnitary(x, y []float32) (sum float32) +TEXT ·DdotUnitary(SB), NOSPLIT, $0 + MOVQ x_base+0(FP), X_PTR // X_PTR = &x + MOVQ y_base+24(FP), Y_PTR // Y_PTR = &y + MOVQ x_len+8(FP), LEN // LEN = min( len(x), len(y) ) + CMPQ y_len+32(FP), LEN + CMOVQLE y_len+32(FP), LEN + PXOR SUM, SUM // psum = 0 + CMPQ LEN, $0 + JE dot_end + + XORQ IDX, IDX + MOVQ Y_PTR, DX + ANDQ $0xF, DX // Align on 16-byte boundary for ADDPS + JZ dot_no_trim // if DX == 0 { goto dot_no_trim } + + SUBQ $16, DX + +dot_align: // Trim first value(s) in unaligned buffer do { + CVTSS2SD (X_PTR)(IDX*4), X2 // X2 = float64(x[i]) + CVTSS2SD (Y_PTR)(IDX*4), X3 // X3 = float64(y[i]) + MULSD X3, X2 + ADDSD X2, SUM // SUM += X2 + INCQ IDX // IDX++ + DECQ LEN + JZ dot_end // if --TAIL == 0 { return } + ADDQ $4, DX + JNZ dot_align // } while --LEN > 0 + +dot_no_trim: + PXOR P_SUM, P_SUM // P_SUM = 0 for pipelining + MOVQ LEN, TAIL + ANDQ $0x7, TAIL // TAIL = LEN % 8 + SHRQ $3, LEN // LEN = floor( LEN / 8 ) + JZ dot_tail_start // if LEN == 0 { goto dot_tail_start } + +dot_loop: // Loop unrolled 8x do { + CVTPS2PD (X_PTR)(IDX*4), X2 // X_i = x[i:i+1] + CVTPS2PD 8(X_PTR)(IDX*4), X3 + CVTPS2PD 16(X_PTR)(IDX*4), X4 + CVTPS2PD 24(X_PTR)(IDX*4), X5 + + CVTPS2PD (Y_PTR)(IDX*4), X6 // X_j = y[i:i+1] + CVTPS2PD 8(Y_PTR)(IDX*4), X7 + CVTPS2PD 16(Y_PTR)(IDX*4), X8 + CVTPS2PD 24(Y_PTR)(IDX*4), X9 + + MULPD X6, X2 // X_i *= X_j + MULPD X7, X3 + MULPD X8, X4 + MULPD X9, X5 + + ADDPD X2, SUM // SUM += X_i + ADDPD X3, P_SUM + ADDPD X4, SUM + ADDPD X5, P_SUM + + ADDQ $8, IDX // IDX += 8 + DECQ LEN + JNZ dot_loop // } while --LEN > 0 + + ADDPD P_SUM, SUM // SUM += P_SUM + CMPQ TAIL, $0 // if TAIL == 0 { return } + JE dot_end + +dot_tail_start: + MOVQ TAIL, LEN + SHRQ $1, LEN + JZ dot_tail_one + +dot_tail_two: + CVTPS2PD (X_PTR)(IDX*4), X2 // X_i = x[i:i+1] + CVTPS2PD (Y_PTR)(IDX*4), X6 // X_j = y[i:i+1] + MULPD X6, X2 // X_i *= X_j + ADDPD X2, SUM // SUM += X_i + ADDQ $2, IDX // IDX += 2 + DECQ LEN + JNZ dot_tail_two // } while --LEN > 0 + + ANDQ $1, TAIL + JZ dot_end + +dot_tail_one: + CVTSS2SD (X_PTR)(IDX*4), X2 // X2 = float64(x[i]) + CVTSS2SD (Y_PTR)(IDX*4), X3 // X3 = float64(y[i]) + MULSD X3, X2 // X2 *= X3 + ADDSD X2, SUM // SUM += X2 + +dot_end: + HADDPD_SUM_SUM // SUM = \sum{ SUM[i] } + MOVSD SUM, sum+48(FP) // return SUM + RET diff --git a/vendor/gonum.org/v1/gonum/internal/asm/f32/doc.go b/vendor/gonum.org/v1/gonum/internal/asm/f32/doc.go new file mode 100644 index 0000000..408847a --- /dev/null +++ b/vendor/gonum.org/v1/gonum/internal/asm/f32/doc.go @@ -0,0 +1,6 @@ +// Copyright ©2017 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Package f32 provides float32 vector primitives. +package f32 // import "gonum.org/v1/gonum/internal/asm/f32" diff --git a/vendor/gonum.org/v1/gonum/internal/asm/f32/dotinc_amd64.s b/vendor/gonum.org/v1/gonum/internal/asm/f32/dotinc_amd64.s new file mode 100644 index 0000000..b6f4021 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/internal/asm/f32/dotinc_amd64.s @@ -0,0 +1,85 @@ +// Copyright ©2017 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// +build !noasm,!appengine,!safe + +#include "textflag.h" + +#define X_PTR SI +#define Y_PTR DI +#define LEN CX +#define TAIL BX +#define INC_X R8 +#define INCx3_X R10 +#define INC_Y R9 +#define INCx3_Y R11 +#define SUM X0 +#define P_SUM X1 + +// func DotInc(x, y []float32, n, incX, incY, ix, iy uintptr) (sum float32) +TEXT ·DotInc(SB), NOSPLIT, $0 + MOVQ x_base+0(FP), X_PTR // X_PTR = &x + MOVQ y_base+24(FP), Y_PTR // Y_PTR = &y + PXOR SUM, SUM // SUM = 0 + MOVQ n+48(FP), LEN // LEN = n + CMPQ LEN, $0 + JE dot_end + + MOVQ ix+72(FP), INC_X // INC_X = ix + MOVQ iy+80(FP), INC_Y // INC_Y = iy + LEAQ (X_PTR)(INC_X*4), X_PTR // X_PTR = &(x[ix]) + LEAQ (Y_PTR)(INC_Y*4), Y_PTR // Y_PTR = &(y[iy]) + + MOVQ incX+56(FP), INC_X // INC_X := incX * sizeof(float32) + SHLQ $2, INC_X + MOVQ incY+64(FP), INC_Y // INC_Y := incY * sizeof(float32) + SHLQ $2, INC_Y + + MOVQ LEN, TAIL + ANDQ $0x3, TAIL // TAIL = LEN % 4 + SHRQ $2, LEN // LEN = floor( LEN / 4 ) + JZ dot_tail // if LEN == 0 { goto dot_tail } + + PXOR P_SUM, P_SUM // P_SUM = 0 for pipelining + LEAQ (INC_X)(INC_X*2), INCx3_X // INCx3_X = INC_X * 3 + LEAQ (INC_Y)(INC_Y*2), INCx3_Y // INCx3_Y = INC_Y * 3 + +dot_loop: // Loop unrolled 4x do { + MOVSS (X_PTR), X2 // X_i = x[i:i+1] + MOVSS (X_PTR)(INC_X*1), X3 + MOVSS (X_PTR)(INC_X*2), X4 + MOVSS (X_PTR)(INCx3_X*1), X5 + + MULSS (Y_PTR), X2 // X_i *= y[i:i+1] + MULSS (Y_PTR)(INC_Y*1), X3 + MULSS (Y_PTR)(INC_Y*2), X4 + MULSS (Y_PTR)(INCx3_Y*1), X5 + + ADDSS X2, SUM // SUM += X_i + ADDSS X3, P_SUM + ADDSS X4, SUM + ADDSS X5, P_SUM + + LEAQ (X_PTR)(INC_X*4), X_PTR // X_PTR = &(X_PTR[INC_X * 4]) + LEAQ (Y_PTR)(INC_Y*4), Y_PTR // Y_PTR = &(Y_PTR[INC_Y * 4]) + + DECQ LEN + JNZ dot_loop // } while --LEN > 0 + + ADDSS P_SUM, SUM // P_SUM += SUM + CMPQ TAIL, $0 // if TAIL == 0 { return } + JE dot_end + +dot_tail: // do { + MOVSS (X_PTR), X2 // X2 = x[i] + MULSS (Y_PTR), X2 // X2 *= y[i] + ADDSS X2, SUM // SUM += X2 + ADDQ INC_X, X_PTR // X_PTR += INC_X + ADDQ INC_Y, Y_PTR // Y_PTR += INC_Y + DECQ TAIL + JNZ dot_tail // } while --TAIL > 0 + +dot_end: + MOVSS SUM, sum+88(FP) // return SUM + RET diff --git a/vendor/gonum.org/v1/gonum/internal/asm/f32/dotunitary_amd64.s b/vendor/gonum.org/v1/gonum/internal/asm/f32/dotunitary_amd64.s new file mode 100644 index 0000000..fd4f7b4 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/internal/asm/f32/dotunitary_amd64.s @@ -0,0 +1,106 @@ +// Copyright ©2017 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// +build !noasm,!appengine,!safe + +#include "textflag.h" + +#define HADDPS_SUM_SUM LONG $0xC07C0FF2 // @ HADDPS X0, X0 + +#define X_PTR SI +#define Y_PTR DI +#define LEN CX +#define TAIL BX +#define IDX AX +#define SUM X0 +#define P_SUM X1 + +// func DotUnitary(x, y []float32) (sum float32) +TEXT ·DotUnitary(SB), NOSPLIT, $0 + MOVQ x_base+0(FP), X_PTR // X_PTR = &x + MOVQ y_base+24(FP), Y_PTR // Y_PTR = &y + PXOR SUM, SUM // SUM = 0 + MOVQ x_len+8(FP), LEN // LEN = min( len(x), len(y) ) + CMPQ y_len+32(FP), LEN + CMOVQLE y_len+32(FP), LEN + CMPQ LEN, $0 + JE dot_end + + XORQ IDX, IDX + MOVQ Y_PTR, DX + ANDQ $0xF, DX // Align on 16-byte boundary for MULPS + JZ dot_no_trim // if DX == 0 { goto dot_no_trim } + SUBQ $16, DX + +dot_align: // Trim first value(s) in unaligned buffer do { + MOVSS (X_PTR)(IDX*4), X2 // X2 = x[i] + MULSS (Y_PTR)(IDX*4), X2 // X2 *= y[i] + ADDSS X2, SUM // SUM += X2 + INCQ IDX // IDX++ + DECQ LEN + JZ dot_end // if --TAIL == 0 { return } + ADDQ $4, DX + JNZ dot_align // } while --DX > 0 + +dot_no_trim: + PXOR P_SUM, P_SUM // P_SUM = 0 for pipelining + MOVQ LEN, TAIL + ANDQ $0xF, TAIL // TAIL = LEN % 16 + SHRQ $4, LEN // LEN = floor( LEN / 16 ) + JZ dot_tail4_start // if LEN == 0 { goto dot_tail4_start } + +dot_loop: // Loop unrolled 16x do { + MOVUPS (X_PTR)(IDX*4), X2 // X_i = x[i:i+1] + MOVUPS 16(X_PTR)(IDX*4), X3 + MOVUPS 32(X_PTR)(IDX*4), X4 + MOVUPS 48(X_PTR)(IDX*4), X5 + + MULPS (Y_PTR)(IDX*4), X2 // X_i *= y[i:i+1] + MULPS 16(Y_PTR)(IDX*4), X3 + MULPS 32(Y_PTR)(IDX*4), X4 + MULPS 48(Y_PTR)(IDX*4), X5 + + ADDPS X2, SUM // SUM += X_i + ADDPS X3, P_SUM + ADDPS X4, SUM + ADDPS X5, P_SUM + + ADDQ $16, IDX // IDX += 16 + DECQ LEN + JNZ dot_loop // } while --LEN > 0 + + ADDPS P_SUM, SUM // SUM += P_SUM + CMPQ TAIL, $0 // if TAIL == 0 { return } + JE dot_end + +dot_tail4_start: // Reset loop counter for 4-wide tail loop + MOVQ TAIL, LEN // LEN = floor( TAIL / 4 ) + SHRQ $2, LEN + JZ dot_tail_start // if LEN == 0 { goto dot_tail_start } + +dot_tail4_loop: // Loop unrolled 4x do { + MOVUPS (X_PTR)(IDX*4), X2 // X_i = x[i:i+1] + MULPS (Y_PTR)(IDX*4), X2 // X_i *= y[i:i+1] + ADDPS X2, SUM // SUM += X_i + ADDQ $4, IDX // i += 4 + DECQ LEN + JNZ dot_tail4_loop // } while --LEN > 0 + +dot_tail_start: // Reset loop counter for 1-wide tail loop + ANDQ $3, TAIL // TAIL = TAIL % 4 + JZ dot_end // if TAIL == 0 { return } + +dot_tail: // do { + MOVSS (X_PTR)(IDX*4), X2 // X2 = x[i] + MULSS (Y_PTR)(IDX*4), X2 // X2 *= y[i] + ADDSS X2, SUM // psum += X2 + INCQ IDX // IDX++ + DECQ TAIL + JNZ dot_tail // } while --TAIL > 0 + +dot_end: + HADDPS_SUM_SUM // SUM = \sum{ SUM[i] } + HADDPS_SUM_SUM + MOVSS SUM, sum+48(FP) // return SUM + RET diff --git a/vendor/gonum.org/v1/gonum/internal/asm/f32/ge_amd64.go b/vendor/gonum.org/v1/gonum/internal/asm/f32/ge_amd64.go new file mode 100644 index 0000000..2b336a2 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/internal/asm/f32/ge_amd64.go @@ -0,0 +1,15 @@ +// Copyright ©2017 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// +build !noasm,!appengine,!safe + +package f32 + +// Ger performs the rank-one operation +// A += alpha * x * y^T +// where A is an m×n dense matrix, x and y are vectors, and alpha is a scalar. +func Ger(m, n uintptr, alpha float32, + x []float32, incX uintptr, + y []float32, incY uintptr, + a []float32, lda uintptr) diff --git a/vendor/gonum.org/v1/gonum/internal/asm/f32/ge_amd64.s b/vendor/gonum.org/v1/gonum/internal/asm/f32/ge_amd64.s new file mode 100644 index 0000000..ecb2641 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/internal/asm/f32/ge_amd64.s @@ -0,0 +1,757 @@ +// Copyright ©2017 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// +build !noasm,!appengine,!safe + +#include "textflag.h" + +#define SIZE 4 +#define BITSIZE 2 +#define KERNELSIZE 3 + +#define M_DIM m+0(FP) +#define M CX +#define N_DIM n+8(FP) +#define N BX + +#define TMP1 R14 +#define TMP2 R15 + +#define X_PTR SI +#define Y y_base+56(FP) +#define Y_PTR DX +#define A_ROW AX +#define A_PTR DI + +#define INC_X R8 +#define INC3_X R9 + +#define INC_Y R10 +#define INC3_Y R11 + +#define LDA R12 +#define LDA3 R13 + +#define ALPHA X0 +#define ALPHA_SPILL al-16(SP) + +#define LOAD_ALPHA \ + MOVSS alpha+16(FP), ALPHA \ + SHUFPS $0, ALPHA, ALPHA + +#define LOAD_SCALED4 \ + PREFETCHNTA 16*SIZE(X_PTR) \ + MOVDDUP (X_PTR), X1 \ + MOVDDUP 2*SIZE(X_PTR), X3 \ + MOVSHDUP X1, X2 \ + MOVSHDUP X3, X4 \ + MOVSLDUP X1, X1 \ + MOVSLDUP X3, X3 \ + MULPS ALPHA, X1 \ + MULPS ALPHA, X2 \ + MULPS ALPHA, X3 \ + MULPS ALPHA, X4 + +#define LOAD_SCALED2 \ + MOVDDUP (X_PTR), X1 \ + MOVSHDUP X1, X2 \ + MOVSLDUP X1, X1 \ + MULPS ALPHA, X1 \ + MULPS ALPHA, X2 + +#define LOAD_SCALED1 \ + MOVSS (X_PTR), X1 \ + SHUFPS $0, X1, X1 \ + MULPS ALPHA, X1 + +#define LOAD_SCALED4_INC \ + PREFETCHNTA (X_PTR)(INC_X*8) \ + MOVSS (X_PTR), X1 \ + MOVSS (X_PTR)(INC_X*1), X2 \ + MOVSS (X_PTR)(INC_X*2), X3 \ + MOVSS (X_PTR)(INC3_X*1), X4 \ + SHUFPS $0, X1, X1 \ + SHUFPS $0, X2, X2 \ + SHUFPS $0, X3, X3 \ + SHUFPS $0, X4, X4 \ + MULPS ALPHA, X1 \ + MULPS ALPHA, X2 \ + MULPS ALPHA, X3 \ + MULPS ALPHA, X4 + +#define LOAD_SCALED2_INC \ + MOVSS (X_PTR), X1 \ + MOVSS (X_PTR)(INC_X*1), X2 \ + SHUFPS $0, X1, X1 \ + SHUFPS $0, X2, X2 \ + MULPS ALPHA, X1 \ + MULPS ALPHA, X2 + +#define KERNEL_LOAD8 \ + MOVUPS (Y_PTR), X5 \ + MOVUPS 4*SIZE(Y_PTR), X6 + +#define KERNEL_LOAD8_INC \ + MOVSS (Y_PTR), X5 \ + MOVSS (Y_PTR)(INC_Y*1), X6 \ + MOVSS (Y_PTR)(INC_Y*2), X7 \ + MOVSS (Y_PTR)(INC3_Y*1), X8 \ + UNPCKLPS X6, X5 \ + UNPCKLPS X8, X7 \ + MOVLHPS X7, X5 \ + LEAQ (Y_PTR)(INC_Y*4), Y_PTR \ + MOVSS (Y_PTR), X6 \ + MOVSS (Y_PTR)(INC_Y*1), X7 \ + MOVSS (Y_PTR)(INC_Y*2), X8 \ + MOVSS (Y_PTR)(INC3_Y*1), X9 \ + UNPCKLPS X7, X6 \ + UNPCKLPS X9, X8 \ + MOVLHPS X8, X6 + +#define KERNEL_LOAD4 \ + MOVUPS (Y_PTR), X5 + +#define KERNEL_LOAD4_INC \ + MOVSS (Y_PTR), X5 \ + MOVSS (Y_PTR)(INC_Y*1), X6 \ + MOVSS (Y_PTR)(INC_Y*2), X7 \ + MOVSS (Y_PTR)(INC3_Y*1), X8 \ + UNPCKLPS X6, X5 \ + UNPCKLPS X8, X7 \ + MOVLHPS X7, X5 + +#define KERNEL_LOAD2 \ + MOVSD (Y_PTR), X5 + +#define KERNEL_LOAD2_INC \ + MOVSS (Y_PTR), X5 \ + MOVSS (Y_PTR)(INC_Y*1), X6 \ + UNPCKLPS X6, X5 + +#define KERNEL_4x8 \ + MOVUPS X5, X7 \ + MOVUPS X6, X8 \ + MOVUPS X5, X9 \ + MOVUPS X6, X10 \ + MOVUPS X5, X11 \ + MOVUPS X6, X12 \ + MULPS X1, X5 \ + MULPS X1, X6 \ + MULPS X2, X7 \ + MULPS X2, X8 \ + MULPS X3, X9 \ + MULPS X3, X10 \ + MULPS X4, X11 \ + MULPS X4, X12 + +#define STORE_4x8 \ + MOVUPS ALPHA, ALPHA_SPILL \ + MOVUPS (A_PTR), X13 \ + ADDPS X13, X5 \ + MOVUPS 4*SIZE(A_PTR), X14 \ + ADDPS X14, X6 \ + MOVUPS (A_PTR)(LDA*1), X15 \ + ADDPS X15, X7 \ + MOVUPS 4*SIZE(A_PTR)(LDA*1), X0 \ + ADDPS X0, X8 \ + MOVUPS (A_PTR)(LDA*2), X13 \ + ADDPS X13, X9 \ + MOVUPS 4*SIZE(A_PTR)(LDA*2), X14 \ + ADDPS X14, X10 \ + MOVUPS (A_PTR)(LDA3*1), X15 \ + ADDPS X15, X11 \ + MOVUPS 4*SIZE(A_PTR)(LDA3*1), X0 \ + ADDPS X0, X12 \ + MOVUPS X5, (A_PTR) \ + MOVUPS X6, 4*SIZE(A_PTR) \ + MOVUPS X7, (A_PTR)(LDA*1) \ + MOVUPS X8, 4*SIZE(A_PTR)(LDA*1) \ + MOVUPS X9, (A_PTR)(LDA*2) \ + MOVUPS X10, 4*SIZE(A_PTR)(LDA*2) \ + MOVUPS X11, (A_PTR)(LDA3*1) \ + MOVUPS X12, 4*SIZE(A_PTR)(LDA3*1) \ + MOVUPS ALPHA_SPILL, ALPHA \ + ADDQ $8*SIZE, A_PTR + +#define KERNEL_4x4 \ + MOVUPS X5, X6 \ + MOVUPS X5, X7 \ + MOVUPS X5, X8 \ + MULPS X1, X5 \ + MULPS X2, X6 \ + MULPS X3, X7 \ + MULPS X4, X8 + +#define STORE_4x4 \ + MOVUPS (A_PTR), X13 \ + ADDPS X13, X5 \ + MOVUPS (A_PTR)(LDA*1), X14 \ + ADDPS X14, X6 \ + MOVUPS (A_PTR)(LDA*2), X15 \ + ADDPS X15, X7 \ + MOVUPS (A_PTR)(LDA3*1), X13 \ + ADDPS X13, X8 \ + MOVUPS X5, (A_PTR) \ + MOVUPS X6, (A_PTR)(LDA*1) \ + MOVUPS X7, (A_PTR)(LDA*2) \ + MOVUPS X8, (A_PTR)(LDA3*1) \ + ADDQ $4*SIZE, A_PTR + +#define KERNEL_4x2 \ + MOVUPS X5, X6 \ + MOVUPS X5, X7 \ + MOVUPS X5, X8 \ + MULPS X1, X5 \ + MULPS X2, X6 \ + MULPS X3, X7 \ + MULPS X4, X8 + +#define STORE_4x2 \ + MOVSD (A_PTR), X9 \ + ADDPS X9, X5 \ + MOVSD (A_PTR)(LDA*1), X10 \ + ADDPS X10, X6 \ + MOVSD (A_PTR)(LDA*2), X11 \ + ADDPS X11, X7 \ + MOVSD (A_PTR)(LDA3*1), X12 \ + ADDPS X12, X8 \ + MOVSD X5, (A_PTR) \ + MOVSD X6, (A_PTR)(LDA*1) \ + MOVSD X7, (A_PTR)(LDA*2) \ + MOVSD X8, (A_PTR)(LDA3*1) \ + ADDQ $2*SIZE, A_PTR + +#define KERNEL_4x1 \ + MOVSS (Y_PTR), X5 \ + MOVSS X5, X6 \ + MOVSS X5, X7 \ + MOVSS X5, X8 \ + MULSS X1, X5 \ + MULSS X2, X6 \ + MULSS X3, X7 \ + MULSS X4, X8 + +#define STORE_4x1 \ + ADDSS (A_PTR), X5 \ + ADDSS (A_PTR)(LDA*1), X6 \ + ADDSS (A_PTR)(LDA*2), X7 \ + ADDSS (A_PTR)(LDA3*1), X8 \ + MOVSS X5, (A_PTR) \ + MOVSS X6, (A_PTR)(LDA*1) \ + MOVSS X7, (A_PTR)(LDA*2) \ + MOVSS X8, (A_PTR)(LDA3*1) \ + ADDQ $SIZE, A_PTR + +#define KERNEL_2x8 \ + MOVUPS X5, X7 \ + MOVUPS X6, X8 \ + MULPS X1, X5 \ + MULPS X1, X6 \ + MULPS X2, X7 \ + MULPS X2, X8 + +#define STORE_2x8 \ + MOVUPS (A_PTR), X9 \ + ADDPS X9, X5 \ + MOVUPS 4*SIZE(A_PTR), X10 \ + ADDPS X10, X6 \ + MOVUPS (A_PTR)(LDA*1), X11 \ + ADDPS X11, X7 \ + MOVUPS 4*SIZE(A_PTR)(LDA*1), X12 \ + ADDPS X12, X8 \ + MOVUPS X5, (A_PTR) \ + MOVUPS X6, 4*SIZE(A_PTR) \ + MOVUPS X7, (A_PTR)(LDA*1) \ + MOVUPS X8, 4*SIZE(A_PTR)(LDA*1) \ + ADDQ $8*SIZE, A_PTR + +#define KERNEL_2x4 \ + MOVUPS X5, X6 \ + MULPS X1, X5 \ + MULPS X2, X6 + +#define STORE_2x4 \ + MOVUPS (A_PTR), X9 \ + ADDPS X9, X5 \ + MOVUPS (A_PTR)(LDA*1), X11 \ + ADDPS X11, X6 \ + MOVUPS X5, (A_PTR) \ + MOVUPS X6, (A_PTR)(LDA*1) \ + ADDQ $4*SIZE, A_PTR + +#define KERNEL_2x2 \ + MOVSD X5, X6 \ + MULPS X1, X5 \ + MULPS X2, X6 + +#define STORE_2x2 \ + MOVSD (A_PTR), X7 \ + ADDPS X7, X5 \ + MOVSD (A_PTR)(LDA*1), X8 \ + ADDPS X8, X6 \ + MOVSD X5, (A_PTR) \ + MOVSD X6, (A_PTR)(LDA*1) \ + ADDQ $2*SIZE, A_PTR + +#define KERNEL_2x1 \ + MOVSS (Y_PTR), X5 \ + MOVSS X5, X6 \ + MULSS X1, X5 \ + MULSS X2, X6 + +#define STORE_2x1 \ + ADDSS (A_PTR), X5 \ + ADDSS (A_PTR)(LDA*1), X6 \ + MOVSS X5, (A_PTR) \ + MOVSS X6, (A_PTR)(LDA*1) \ + ADDQ $SIZE, A_PTR + +#define KERNEL_1x8 \ + MULPS X1, X5 \ + MULPS X1, X6 + +#define STORE_1x8 \ + MOVUPS (A_PTR), X7 \ + ADDPS X7, X5 \ + MOVUPS 4*SIZE(A_PTR), X8 \ + ADDPS X8, X6 \ + MOVUPS X5, (A_PTR) \ + MOVUPS X6, 4*SIZE(A_PTR) \ + ADDQ $8*SIZE, A_PTR + +#define KERNEL_1x4 \ + MULPS X1, X5 \ + MULPS X1, X6 + +#define STORE_1x4 \ + MOVUPS (A_PTR), X7 \ + ADDPS X7, X5 \ + MOVUPS X5, (A_PTR) \ + ADDQ $4*SIZE, A_PTR + +#define KERNEL_1x2 \ + MULPS X1, X5 + +#define STORE_1x2 \ + MOVSD (A_PTR), X6 \ + ADDPS X6, X5 \ + MOVSD X5, (A_PTR) \ + ADDQ $2*SIZE, A_PTR + +#define KERNEL_1x1 \ + MOVSS (Y_PTR), X5 \ + MULSS X1, X5 + +#define STORE_1x1 \ + ADDSS (A_PTR), X5 \ + MOVSS X5, (A_PTR) \ + ADDQ $SIZE, A_PTR + +// func Ger(m, n uintptr, alpha float32, +// x []float32, incX uintptr, +// y []float32, incY uintptr, +// a []float32, lda uintptr) +TEXT ·Ger(SB), 0, $16-120 + MOVQ M_DIM, M + MOVQ N_DIM, N + CMPQ M, $0 + JE end + CMPQ N, $0 + JE end + + LOAD_ALPHA + + MOVQ x_base+24(FP), X_PTR + MOVQ y_base+56(FP), Y_PTR + MOVQ a_base+88(FP), A_ROW + MOVQ A_ROW, A_PTR + MOVQ lda+112(FP), LDA // LDA = LDA * sizeof(float32) + SHLQ $BITSIZE, LDA + LEAQ (LDA)(LDA*2), LDA3 // LDA3 = LDA * 3 + + CMPQ incY+80(FP), $1 // Check for dense vector Y (fast-path) + JNE inc + CMPQ incX+48(FP), $1 // Check for dense vector X (fast-path) + JNE inc + + SHRQ $2, M + JZ r2 + +r4: + + // LOAD 4 + LOAD_SCALED4 + + MOVQ N_DIM, N + SHRQ $KERNELSIZE, N + JZ r4c4 + +r4c8: + // 4x8 KERNEL + KERNEL_LOAD8 + KERNEL_4x8 + STORE_4x8 + + ADDQ $8*SIZE, Y_PTR + + DECQ N + JNZ r4c8 + +r4c4: + TESTQ $4, N_DIM + JZ r4c2 + + // 4x4 KERNEL + KERNEL_LOAD4 + KERNEL_4x4 + STORE_4x4 + + ADDQ $4*SIZE, Y_PTR + +r4c2: + TESTQ $2, N_DIM + JZ r4c1 + + // 4x2 KERNEL + KERNEL_LOAD2 + KERNEL_4x2 + STORE_4x2 + + ADDQ $2*SIZE, Y_PTR + +r4c1: + TESTQ $1, N_DIM + JZ r4end + + // 4x1 KERNEL + KERNEL_4x1 + STORE_4x1 + + ADDQ $SIZE, Y_PTR + +r4end: + ADDQ $4*SIZE, X_PTR + MOVQ Y, Y_PTR + LEAQ (A_ROW)(LDA*4), A_ROW + MOVQ A_ROW, A_PTR + + DECQ M + JNZ r4 + +r2: + TESTQ $2, M_DIM + JZ r1 + + // LOAD 2 + LOAD_SCALED2 + + MOVQ N_DIM, N + SHRQ $KERNELSIZE, N + JZ r2c4 + +r2c8: + // 2x8 KERNEL + KERNEL_LOAD8 + KERNEL_2x8 + STORE_2x8 + + ADDQ $8*SIZE, Y_PTR + + DECQ N + JNZ r2c8 + +r2c4: + TESTQ $4, N_DIM + JZ r2c2 + + // 2x4 KERNEL + KERNEL_LOAD4 + KERNEL_2x4 + STORE_2x4 + + ADDQ $4*SIZE, Y_PTR + +r2c2: + TESTQ $2, N_DIM + JZ r2c1 + + // 2x2 KERNEL + KERNEL_LOAD2 + KERNEL_2x2 + STORE_2x2 + + ADDQ $2*SIZE, Y_PTR + +r2c1: + TESTQ $1, N_DIM + JZ r2end + + // 2x1 KERNEL + KERNEL_2x1 + STORE_2x1 + + ADDQ $SIZE, Y_PTR + +r2end: + ADDQ $2*SIZE, X_PTR + MOVQ Y, Y_PTR + LEAQ (A_ROW)(LDA*2), A_ROW + MOVQ A_ROW, A_PTR + +r1: + TESTQ $1, M_DIM + JZ end + + // LOAD 1 + LOAD_SCALED1 + + MOVQ N_DIM, N + SHRQ $KERNELSIZE, N + JZ r1c4 + +r1c8: + // 1x8 KERNEL + KERNEL_LOAD8 + KERNEL_1x8 + STORE_1x8 + + ADDQ $8*SIZE, Y_PTR + + DECQ N + JNZ r1c8 + +r1c4: + TESTQ $4, N_DIM + JZ r1c2 + + // 1x4 KERNEL + KERNEL_LOAD4 + KERNEL_1x4 + STORE_1x4 + + ADDQ $4*SIZE, Y_PTR + +r1c2: + TESTQ $2, N_DIM + JZ r1c1 + + // 1x2 KERNEL + KERNEL_LOAD2 + KERNEL_1x2 + STORE_1x2 + + ADDQ $2*SIZE, Y_PTR + +r1c1: + TESTQ $1, N_DIM + JZ end + + // 1x1 KERNEL + KERNEL_1x1 + STORE_1x1 + +end: + RET + +inc: // Algorithm for incY != 0 ( split loads in kernel ) + + MOVQ incX+48(FP), INC_X // INC_X = incX * sizeof(float32) + SHLQ $BITSIZE, INC_X + MOVQ incY+80(FP), INC_Y // INC_Y = incY * sizeof(float32) + SHLQ $BITSIZE, INC_Y + LEAQ (INC_X)(INC_X*2), INC3_X // INC3_X = INC_X * 3 + LEAQ (INC_Y)(INC_Y*2), INC3_Y // INC3_Y = INC_Y * 3 + + XORQ TMP2, TMP2 + MOVQ M, TMP1 + SUBQ $1, TMP1 + IMULQ INC_X, TMP1 + NEGQ TMP1 + CMPQ INC_X, $0 + CMOVQLT TMP1, TMP2 + LEAQ (X_PTR)(TMP2*SIZE), X_PTR + + XORQ TMP2, TMP2 + MOVQ N, TMP1 + SUBQ $1, TMP1 + IMULQ INC_Y, TMP1 + NEGQ TMP1 + CMPQ INC_Y, $0 + CMOVQLT TMP1, TMP2 + LEAQ (Y_PTR)(TMP2*SIZE), Y_PTR + + SHRQ $2, M + JZ inc_r2 + +inc_r4: + // LOAD 4 + LOAD_SCALED4_INC + + MOVQ N_DIM, N + SHRQ $KERNELSIZE, N + JZ inc_r4c4 + +inc_r4c8: + // 4x4 KERNEL + KERNEL_LOAD8_INC + KERNEL_4x8 + STORE_4x8 + + LEAQ (Y_PTR)(INC_Y*4), Y_PTR + DECQ N + JNZ inc_r4c8 + +inc_r4c4: + TESTQ $4, N_DIM + JZ inc_r4c2 + + // 4x4 KERNEL + KERNEL_LOAD4_INC + KERNEL_4x4 + STORE_4x4 + + LEAQ (Y_PTR)(INC_Y*4), Y_PTR + +inc_r4c2: + TESTQ $2, N_DIM + JZ inc_r4c1 + + // 4x2 KERNEL + KERNEL_LOAD2_INC + KERNEL_4x2 + STORE_4x2 + + LEAQ (Y_PTR)(INC_Y*2), Y_PTR + +inc_r4c1: + TESTQ $1, N_DIM + JZ inc_r4end + + // 4x1 KERNEL + KERNEL_4x1 + STORE_4x1 + + ADDQ INC_Y, Y_PTR + +inc_r4end: + LEAQ (X_PTR)(INC_X*4), X_PTR + MOVQ Y, Y_PTR + LEAQ (A_ROW)(LDA*4), A_ROW + MOVQ A_ROW, A_PTR + + DECQ M + JNZ inc_r4 + +inc_r2: + TESTQ $2, M_DIM + JZ inc_r1 + + // LOAD 2 + LOAD_SCALED2_INC + + MOVQ N_DIM, N + SHRQ $KERNELSIZE, N + JZ inc_r2c4 + +inc_r2c8: + // 2x8 KERNEL + KERNEL_LOAD8_INC + KERNEL_2x8 + STORE_2x8 + + LEAQ (Y_PTR)(INC_Y*4), Y_PTR + DECQ N + JNZ inc_r2c8 + +inc_r2c4: + TESTQ $4, N_DIM + JZ inc_r2c2 + + // 2x4 KERNEL + KERNEL_LOAD4_INC + KERNEL_2x4 + STORE_2x4 + + LEAQ (Y_PTR)(INC_Y*4), Y_PTR + +inc_r2c2: + TESTQ $2, N_DIM + JZ inc_r2c1 + + // 2x2 KERNEL + KERNEL_LOAD2_INC + KERNEL_2x2 + STORE_2x2 + + LEAQ (Y_PTR)(INC_Y*2), Y_PTR + +inc_r2c1: + TESTQ $1, N_DIM + JZ inc_r2end + + // 2x1 KERNEL + KERNEL_2x1 + STORE_2x1 + + ADDQ INC_Y, Y_PTR + +inc_r2end: + LEAQ (X_PTR)(INC_X*2), X_PTR + MOVQ Y, Y_PTR + LEAQ (A_ROW)(LDA*2), A_ROW + MOVQ A_ROW, A_PTR + +inc_r1: + TESTQ $1, M_DIM + JZ end + + // LOAD 1 + LOAD_SCALED1 + + MOVQ N_DIM, N + SHRQ $KERNELSIZE, N + JZ inc_r1c4 + +inc_r1c8: + // 1x8 KERNEL + KERNEL_LOAD8_INC + KERNEL_1x8 + STORE_1x8 + + LEAQ (Y_PTR)(INC_Y*4), Y_PTR + DECQ N + JNZ inc_r1c8 + +inc_r1c4: + TESTQ $4, N_DIM + JZ inc_r1c2 + + // 1x4 KERNEL + KERNEL_LOAD4_INC + KERNEL_1x4 + STORE_1x4 + + LEAQ (Y_PTR)(INC_Y*4), Y_PTR + +inc_r1c2: + TESTQ $2, N_DIM + JZ inc_r1c1 + + // 1x2 KERNEL + KERNEL_LOAD2_INC + KERNEL_1x2 + STORE_1x2 + + LEAQ (Y_PTR)(INC_Y*2), Y_PTR + +inc_r1c1: + TESTQ $1, N_DIM + JZ inc_end + + // 1x1 KERNEL + KERNEL_1x1 + STORE_1x1 + +inc_end: + RET diff --git a/vendor/gonum.org/v1/gonum/internal/asm/f32/ge_noasm.go b/vendor/gonum.org/v1/gonum/internal/asm/f32/ge_noasm.go new file mode 100644 index 0000000..d92f996 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/internal/asm/f32/ge_noasm.go @@ -0,0 +1,36 @@ +// Copyright ©2017 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// +build !amd64 noasm appengine safe + +package f32 + +// Ger performs the rank-one operation +// A += alpha * x * y^T +// where A is an m×n dense matrix, x and y are vectors, and alpha is a scalar. +func Ger(m, n uintptr, alpha float32, x []float32, incX uintptr, y []float32, incY uintptr, a []float32, lda uintptr) { + + if incX == 1 && incY == 1 { + x = x[:m] + y = y[:n] + for i, xv := range x { + AxpyUnitary(alpha*xv, y, a[uintptr(i)*lda:uintptr(i)*lda+n]) + } + return + } + + var ky, kx uintptr + if int(incY) < 0 { + ky = uintptr(-int(n-1) * int(incY)) + } + if int(incX) < 0 { + kx = uintptr(-int(m-1) * int(incX)) + } + + ix := kx + for i := 0; i < int(m); i++ { + AxpyInc(alpha*x[ix], y, a[uintptr(i)*lda:uintptr(i)*lda+n], uintptr(n), uintptr(incY), 1, uintptr(ky), 0) + ix += incX + } +} diff --git a/vendor/gonum.org/v1/gonum/internal/asm/f32/scal.go b/vendor/gonum.org/v1/gonum/internal/asm/f32/scal.go new file mode 100644 index 0000000..d0867a4 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/internal/asm/f32/scal.go @@ -0,0 +1,55 @@ +// Copyright ©2016 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package f32 + +// ScalUnitary is +// for i := range x { +// x[i] *= alpha +// } +func ScalUnitary(alpha float32, x []float32) { + for i := range x { + x[i] *= alpha + } +} + +// ScalUnitaryTo is +// for i, v := range x { +// dst[i] = alpha * v +// } +func ScalUnitaryTo(dst []float32, alpha float32, x []float32) { + for i, v := range x { + dst[i] = alpha * v + } +} + +// ScalInc is +// var ix uintptr +// for i := 0; i < int(n); i++ { +// x[ix] *= alpha +// ix += incX +// } +func ScalInc(alpha float32, x []float32, n, incX uintptr) { + var ix uintptr + for i := 0; i < int(n); i++ { + x[ix] *= alpha + ix += incX + } +} + +// ScalIncTo is +// var idst, ix uintptr +// for i := 0; i < int(n); i++ { +// dst[idst] = alpha * x[ix] +// ix += incX +// idst += incDst +// } +func ScalIncTo(dst []float32, incDst uintptr, alpha float32, x []float32, n, incX uintptr) { + var idst, ix uintptr + for i := 0; i < int(n); i++ { + dst[idst] = alpha * x[ix] + ix += incX + idst += incDst + } +} diff --git a/vendor/gonum.org/v1/gonum/internal/asm/f32/stubs_amd64.go b/vendor/gonum.org/v1/gonum/internal/asm/f32/stubs_amd64.go new file mode 100644 index 0000000..fcbce09 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/internal/asm/f32/stubs_amd64.go @@ -0,0 +1,68 @@ +// Copyright ©2016 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// +build !noasm,!appengine,!safe + +package f32 + +// AxpyUnitary is +// for i, v := range x { +// y[i] += alpha * v +// } +func AxpyUnitary(alpha float32, x, y []float32) + +// AxpyUnitaryTo is +// for i, v := range x { +// dst[i] = alpha*v + y[i] +// } +func AxpyUnitaryTo(dst []float32, alpha float32, x, y []float32) + +// AxpyInc is +// for i := 0; i < int(n); i++ { +// y[iy] += alpha * x[ix] +// ix += incX +// iy += incY +// } +func AxpyInc(alpha float32, x, y []float32, n, incX, incY, ix, iy uintptr) + +// AxpyIncTo is +// for i := 0; i < int(n); i++ { +// dst[idst] = alpha*x[ix] + y[iy] +// ix += incX +// iy += incY +// idst += incDst +// } +func AxpyIncTo(dst []float32, incDst, idst uintptr, alpha float32, x, y []float32, n, incX, incY, ix, iy uintptr) + +// DdotUnitary is +// for i, v := range x { +// sum += float64(y[i]) * float64(v) +// } +// return +func DdotUnitary(x, y []float32) (sum float64) + +// DdotInc is +// for i := 0; i < int(n); i++ { +// sum += float64(y[iy]) * float64(x[ix]) +// ix += incX +// iy += incY +// } +// return +func DdotInc(x, y []float32, n, incX, incY, ix, iy uintptr) (sum float64) + +// DotUnitary is +// for i, v := range x { +// sum += y[i] * v +// } +// return sum +func DotUnitary(x, y []float32) (sum float32) + +// DotInc is +// for i := 0; i < int(n); i++ { +// sum += y[iy] * x[ix] +// ix += incX +// iy += incY +// } +// return sum +func DotInc(x, y []float32, n, incX, incY, ix, iy uintptr) (sum float32) diff --git a/vendor/gonum.org/v1/gonum/internal/asm/f32/stubs_noasm.go b/vendor/gonum.org/v1/gonum/internal/asm/f32/stubs_noasm.go new file mode 100644 index 0000000..3b5b097 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/internal/asm/f32/stubs_noasm.go @@ -0,0 +1,113 @@ +// Copyright ©2016 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// +build !amd64 noasm appengine safe + +package f32 + +// AxpyUnitary is +// for i, v := range x { +// y[i] += alpha * v +// } +func AxpyUnitary(alpha float32, x, y []float32) { + for i, v := range x { + y[i] += alpha * v + } +} + +// AxpyUnitaryTo is +// for i, v := range x { +// dst[i] = alpha*v + y[i] +// } +func AxpyUnitaryTo(dst []float32, alpha float32, x, y []float32) { + for i, v := range x { + dst[i] = alpha*v + y[i] + } +} + +// AxpyInc is +// for i := 0; i < int(n); i++ { +// y[iy] += alpha * x[ix] +// ix += incX +// iy += incY +// } +func AxpyInc(alpha float32, x, y []float32, n, incX, incY, ix, iy uintptr) { + for i := 0; i < int(n); i++ { + y[iy] += alpha * x[ix] + ix += incX + iy += incY + } +} + +// AxpyIncTo is +// for i := 0; i < int(n); i++ { +// dst[idst] = alpha*x[ix] + y[iy] +// ix += incX +// iy += incY +// idst += incDst +// } +func AxpyIncTo(dst []float32, incDst, idst uintptr, alpha float32, x, y []float32, n, incX, incY, ix, iy uintptr) { + for i := 0; i < int(n); i++ { + dst[idst] = alpha*x[ix] + y[iy] + ix += incX + iy += incY + idst += incDst + } +} + +// DotUnitary is +// for i, v := range x { +// sum += y[i] * v +// } +// return sum +func DotUnitary(x, y []float32) (sum float32) { + for i, v := range x { + sum += y[i] * v + } + return sum +} + +// DotInc is +// for i := 0; i < int(n); i++ { +// sum += y[iy] * x[ix] +// ix += incX +// iy += incY +// } +// return sum +func DotInc(x, y []float32, n, incX, incY, ix, iy uintptr) (sum float32) { + for i := 0; i < int(n); i++ { + sum += y[iy] * x[ix] + ix += incX + iy += incY + } + return sum +} + +// DdotUnitary is +// for i, v := range x { +// sum += float64(y[i]) * float64(v) +// } +// return +func DdotUnitary(x, y []float32) (sum float64) { + for i, v := range x { + sum += float64(y[i]) * float64(v) + } + return +} + +// DdotInc is +// for i := 0; i < int(n); i++ { +// sum += float64(y[iy]) * float64(x[ix]) +// ix += incX +// iy += incY +// } +// return +func DdotInc(x, y []float32, n, incX, incY, ix, iy uintptr) (sum float64) { + for i := 0; i < int(n); i++ { + sum += float64(y[iy]) * float64(x[ix]) + ix += incX + iy += incY + } + return +} diff --git a/vendor/gonum.org/v1/gonum/internal/asm/f64/abssum_amd64.s b/vendor/gonum.org/v1/gonum/internal/asm/f64/abssum_amd64.s new file mode 100644 index 0000000..d9d61bb --- /dev/null +++ b/vendor/gonum.org/v1/gonum/internal/asm/f64/abssum_amd64.s @@ -0,0 +1,82 @@ +// Copyright ©2016 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// +build !noasm,!appengine,!safe + +#include "textflag.h" + +// func L1Norm(x []float64) float64 +TEXT ·L1Norm(SB), NOSPLIT, $0 + MOVQ x_base+0(FP), SI // SI = &x + MOVQ x_len+8(FP), CX // CX = len(x) + XORQ AX, AX // i = 0 + PXOR X0, X0 // p_sum_i = 0 + PXOR X1, X1 + PXOR X2, X2 + PXOR X3, X3 + PXOR X4, X4 + PXOR X5, X5 + PXOR X6, X6 + PXOR X7, X7 + CMPQ CX, $0 // if CX == 0 { return 0 } + JE absum_end + MOVQ CX, BX + ANDQ $7, BX // BX = len(x) % 8 + SHRQ $3, CX // CX = floor( len(x) / 8 ) + JZ absum_tail_start // if CX == 0 { goto absum_tail_start } + +absum_loop: // do { + // p_sum += max( p_sum + x[i], p_sum - x[i] ) + MOVUPS (SI)(AX*8), X8 // X_i = x[i:i+1] + MOVUPS 16(SI)(AX*8), X9 + MOVUPS 32(SI)(AX*8), X10 + MOVUPS 48(SI)(AX*8), X11 + ADDPD X8, X0 // p_sum_i += X_i ( positive values ) + ADDPD X9, X2 + ADDPD X10, X4 + ADDPD X11, X6 + SUBPD X8, X1 // p_sum_(i+1) -= X_i ( negative values ) + SUBPD X9, X3 + SUBPD X10, X5 + SUBPD X11, X7 + MAXPD X1, X0 // p_sum_i = max( p_sum_i, p_sum_(i+1) ) + MAXPD X3, X2 + MAXPD X5, X4 + MAXPD X7, X6 + MOVAPS X0, X1 // p_sum_(i+1) = p_sum_i + MOVAPS X2, X3 + MOVAPS X4, X5 + MOVAPS X6, X7 + ADDQ $8, AX // i += 8 + LOOP absum_loop // } while --CX > 0 + + // p_sum_0 = \sum_{i=1}^{3}( p_sum_(i*2) ) + ADDPD X3, X0 + ADDPD X5, X7 + ADDPD X7, X0 + + // p_sum_0[0] = p_sum_0[0] + p_sum_0[1] + MOVAPS X0, X1 + SHUFPD $0x3, X0, X0 // lower( p_sum_0 ) = upper( p_sum_0 ) + ADDSD X1, X0 + CMPQ BX, $0 + JE absum_end // if BX == 0 { goto absum_end } + +absum_tail_start: // Reset loop registers + MOVQ BX, CX // Loop counter: CX = BX + XORPS X8, X8 // X_8 = 0 + +absum_tail: // do { + // p_sum += max( p_sum + x[i], p_sum - x[i] ) + MOVSD (SI)(AX*8), X8 // X_8 = x[i] + MOVSD X0, X1 // p_sum_1 = p_sum_0 + ADDSD X8, X0 // p_sum_0 += X_8 + SUBSD X8, X1 // p_sum_1 -= X_8 + MAXSD X1, X0 // p_sum_0 = max( p_sum_0, p_sum_1 ) + INCQ AX // i++ + LOOP absum_tail // } while --CX > 0 + +absum_end: // return p_sum_0 + MOVSD X0, sum+24(FP) + RET diff --git a/vendor/gonum.org/v1/gonum/internal/asm/f64/abssuminc_amd64.s b/vendor/gonum.org/v1/gonum/internal/asm/f64/abssuminc_amd64.s new file mode 100644 index 0000000..cac19aa --- /dev/null +++ b/vendor/gonum.org/v1/gonum/internal/asm/f64/abssuminc_amd64.s @@ -0,0 +1,90 @@ +// Copyright ©2016 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// +build !noasm,!appengine,!safe + +#include "textflag.h" + +// func L1NormInc(x []float64, n, incX int) (sum float64) +TEXT ·L1NormInc(SB), NOSPLIT, $0 + MOVQ x_base+0(FP), SI // SI = &x + MOVQ n+24(FP), CX // CX = n + MOVQ incX+32(FP), AX // AX = increment * sizeof( float64 ) + SHLQ $3, AX + MOVQ AX, DX // DX = AX * 3 + IMULQ $3, DX + PXOR X0, X0 // p_sum_i = 0 + PXOR X1, X1 + PXOR X2, X2 + PXOR X3, X3 + PXOR X4, X4 + PXOR X5, X5 + PXOR X6, X6 + PXOR X7, X7 + CMPQ CX, $0 // if CX == 0 { return 0 } + JE absum_end + MOVQ CX, BX + ANDQ $7, BX // BX = n % 8 + SHRQ $3, CX // CX = floor( n / 8 ) + JZ absum_tail_start // if CX == 0 { goto absum_tail_start } + +absum_loop: // do { + // p_sum = max( p_sum + x[i], p_sum - x[i] ) + MOVSD (SI), X8 // X_i[0] = x[i] + MOVSD (SI)(AX*1), X9 + MOVSD (SI)(AX*2), X10 + MOVSD (SI)(DX*1), X11 + LEAQ (SI)(AX*4), SI // SI = SI + 4 + MOVHPD (SI), X8 // X_i[1] = x[i+4] + MOVHPD (SI)(AX*1), X9 + MOVHPD (SI)(AX*2), X10 + MOVHPD (SI)(DX*1), X11 + ADDPD X8, X0 // p_sum_i += X_i ( positive values ) + ADDPD X9, X2 + ADDPD X10, X4 + ADDPD X11, X6 + SUBPD X8, X1 // p_sum_(i+1) -= X_i ( negative values ) + SUBPD X9, X3 + SUBPD X10, X5 + SUBPD X11, X7 + MAXPD X1, X0 // p_sum_i = max( p_sum_i, p_sum_(i+1) ) + MAXPD X3, X2 + MAXPD X5, X4 + MAXPD X7, X6 + MOVAPS X0, X1 // p_sum_(i+1) = p_sum_i + MOVAPS X2, X3 + MOVAPS X4, X5 + MOVAPS X6, X7 + LEAQ (SI)(AX*4), SI // SI = SI + 4 + LOOP absum_loop // } while --CX > 0 + + // p_sum_0 = \sum_{i=1}^{3}( p_sum_(i*2) ) + ADDPD X3, X0 + ADDPD X5, X7 + ADDPD X7, X0 + + // p_sum_0[0] = p_sum_0[0] + p_sum_0[1] + MOVAPS X0, X1 + SHUFPD $0x3, X0, X0 // lower( p_sum_0 ) = upper( p_sum_0 ) + ADDSD X1, X0 + CMPQ BX, $0 + JE absum_end // if BX == 0 { goto absum_end } + +absum_tail_start: // Reset loop registers + MOVQ BX, CX // Loop counter: CX = BX + XORPS X8, X8 // X_8 = 0 + +absum_tail: // do { + // p_sum += max( p_sum + x[i], p_sum - x[i] ) + MOVSD (SI), X8 // X_8 = x[i] + MOVSD X0, X1 // p_sum_1 = p_sum_0 + ADDSD X8, X0 // p_sum_0 += X_8 + SUBSD X8, X1 // p_sum_1 -= X_8 + MAXSD X1, X0 // p_sum_0 = max( p_sum_0, p_sum_1 ) + ADDQ AX, SI // i++ + LOOP absum_tail // } while --CX > 0 + +absum_end: // return p_sum_0 + MOVSD X0, sum+40(FP) + RET diff --git a/vendor/gonum.org/v1/gonum/internal/asm/f64/add_amd64.s b/vendor/gonum.org/v1/gonum/internal/asm/f64/add_amd64.s new file mode 100644 index 0000000..bc0ea6a --- /dev/null +++ b/vendor/gonum.org/v1/gonum/internal/asm/f64/add_amd64.s @@ -0,0 +1,66 @@ +// Copyright ©2016 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// +build !noasm,!appengine,!safe + +#include "textflag.h" + +// func Add(dst, s []float64) +TEXT ·Add(SB), NOSPLIT, $0 + MOVQ dst_base+0(FP), DI // DI = &dst + MOVQ dst_len+8(FP), CX // CX = len(dst) + MOVQ s_base+24(FP), SI // SI = &s + CMPQ s_len+32(FP), CX // CX = max( CX, len(s) ) + CMOVQLE s_len+32(FP), CX + CMPQ CX, $0 // if CX == 0 { return } + JE add_end + XORQ AX, AX + MOVQ DI, BX + ANDQ $0x0F, BX // BX = &dst & 15 + JZ add_no_trim // if BX == 0 { goto add_no_trim } + + // Align on 16-bit boundary + MOVSD (SI)(AX*8), X0 // X0 = s[i] + ADDSD (DI)(AX*8), X0 // X0 += dst[i] + MOVSD X0, (DI)(AX*8) // dst[i] = X0 + INCQ AX // i++ + DECQ CX // --CX + JE add_end // if CX == 0 { return } + +add_no_trim: + MOVQ CX, BX + ANDQ $7, BX // BX = len(dst) % 8 + SHRQ $3, CX // CX = floor( len(dst) / 8 ) + JZ add_tail_start // if CX == 0 { goto add_tail_start } + +add_loop: // Loop unrolled 8x do { + MOVUPS (SI)(AX*8), X0 // X_i = s[i:i+1] + MOVUPS 16(SI)(AX*8), X1 + MOVUPS 32(SI)(AX*8), X2 + MOVUPS 48(SI)(AX*8), X3 + ADDPD (DI)(AX*8), X0 // X_i += dst[i:i+1] + ADDPD 16(DI)(AX*8), X1 + ADDPD 32(DI)(AX*8), X2 + ADDPD 48(DI)(AX*8), X3 + MOVUPS X0, (DI)(AX*8) // dst[i:i+1] = X_i + MOVUPS X1, 16(DI)(AX*8) + MOVUPS X2, 32(DI)(AX*8) + MOVUPS X3, 48(DI)(AX*8) + ADDQ $8, AX // i += 8 + LOOP add_loop // } while --CX > 0 + CMPQ BX, $0 // if BX == 0 { return } + JE add_end + +add_tail_start: // Reset loop registers + MOVQ BX, CX // Loop counter: CX = BX + +add_tail: // do { + MOVSD (SI)(AX*8), X0 // X0 = s[i] + ADDSD (DI)(AX*8), X0 // X0 += dst[i] + MOVSD X0, (DI)(AX*8) // dst[i] = X0 + INCQ AX // ++i + LOOP add_tail // } while --CX > 0 + +add_end: + RET diff --git a/vendor/gonum.org/v1/gonum/internal/asm/f64/addconst_amd64.s b/vendor/gonum.org/v1/gonum/internal/asm/f64/addconst_amd64.s new file mode 100644 index 0000000..7cc68c7 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/internal/asm/f64/addconst_amd64.s @@ -0,0 +1,53 @@ +// Copyright ©2016 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// +build !noasm,!appengine,!safe + +#include "textflag.h" + +// func Addconst(alpha float64, x []float64) +TEXT ·AddConst(SB), NOSPLIT, $0 + MOVQ x_base+8(FP), SI // SI = &x + MOVQ x_len+16(FP), CX // CX = len(x) + CMPQ CX, $0 // if len(x) == 0 { return } + JE ac_end + MOVSD alpha+0(FP), X4 // X4 = { a, a } + SHUFPD $0, X4, X4 + MOVUPS X4, X5 // X5 = X4 + XORQ AX, AX // i = 0 + MOVQ CX, BX + ANDQ $7, BX // BX = len(x) % 8 + SHRQ $3, CX // CX = floor( len(x) / 8 ) + JZ ac_tail_start // if CX == 0 { goto ac_tail_start } + +ac_loop: // Loop unrolled 8x do { + MOVUPS (SI)(AX*8), X0 // X_i = s[i:i+1] + MOVUPS 16(SI)(AX*8), X1 + MOVUPS 32(SI)(AX*8), X2 + MOVUPS 48(SI)(AX*8), X3 + ADDPD X4, X0 // X_i += a + ADDPD X5, X1 + ADDPD X4, X2 + ADDPD X5, X3 + MOVUPS X0, (SI)(AX*8) // s[i:i+1] = X_i + MOVUPS X1, 16(SI)(AX*8) + MOVUPS X2, 32(SI)(AX*8) + MOVUPS X3, 48(SI)(AX*8) + ADDQ $8, AX // i += 8 + LOOP ac_loop // } while --CX > 0 + CMPQ BX, $0 // if BX == 0 { return } + JE ac_end + +ac_tail_start: // Reset loop counters + MOVQ BX, CX // Loop counter: CX = BX + +ac_tail: // do { + MOVSD (SI)(AX*8), X0 // X0 = s[i] + ADDSD X4, X0 // X0 += a + MOVSD X0, (SI)(AX*8) // s[i] = X0 + INCQ AX // ++i + LOOP ac_tail // } while --CX > 0 + +ac_end: + RET diff --git a/vendor/gonum.org/v1/gonum/internal/asm/f64/axpy.go b/vendor/gonum.org/v1/gonum/internal/asm/f64/axpy.go new file mode 100644 index 0000000..b832213 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/internal/asm/f64/axpy.go @@ -0,0 +1,57 @@ +// Copyright ©2015 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// +build !amd64 noasm appengine safe + +package f64 + +// AxpyUnitary is +// for i, v := range x { +// y[i] += alpha * v +// } +func AxpyUnitary(alpha float64, x, y []float64) { + for i, v := range x { + y[i] += alpha * v + } +} + +// AxpyUnitaryTo is +// for i, v := range x { +// dst[i] = alpha*v + y[i] +// } +func AxpyUnitaryTo(dst []float64, alpha float64, x, y []float64) { + for i, v := range x { + dst[i] = alpha*v + y[i] + } +} + +// AxpyInc is +// for i := 0; i < int(n); i++ { +// y[iy] += alpha * x[ix] +// ix += incX +// iy += incY +// } +func AxpyInc(alpha float64, x, y []float64, n, incX, incY, ix, iy uintptr) { + for i := 0; i < int(n); i++ { + y[iy] += alpha * x[ix] + ix += incX + iy += incY + } +} + +// AxpyIncTo is +// for i := 0; i < int(n); i++ { +// dst[idst] = alpha*x[ix] + y[iy] +// ix += incX +// iy += incY +// idst += incDst +// } +func AxpyIncTo(dst []float64, incDst, idst uintptr, alpha float64, x, y []float64, n, incX, incY, ix, iy uintptr) { + for i := 0; i < int(n); i++ { + dst[idst] = alpha*x[ix] + y[iy] + ix += incX + iy += incY + idst += incDst + } +} diff --git a/vendor/gonum.org/v1/gonum/internal/asm/f64/axpyinc_amd64.s b/vendor/gonum.org/v1/gonum/internal/asm/f64/axpyinc_amd64.s new file mode 100644 index 0000000..aab22e3 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/internal/asm/f64/axpyinc_amd64.s @@ -0,0 +1,142 @@ +// Copyright ©2015 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. +// +// Some of the loop unrolling code is copied from: +// http://golang.org/src/math/big/arith_amd64.s +// which is distributed under these terms: +// +// Copyright (c) 2012 The Go Authors. All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// +build !noasm,!appengine,!safe + +#include "textflag.h" + +#define X_PTR SI +#define Y_PTR DI +#define DST_PTR DI +#define IDX AX +#define LEN CX +#define TAIL BX +#define INC_X R8 +#define INCx3_X R11 +#define INC_Y R9 +#define INCx3_Y R12 +#define INC_DST R9 +#define INCx3_DST R12 +#define ALPHA X0 +#define ALPHA_2 X1 + +// func AxpyInc(alpha float64, x, y []float64, n, incX, incY, ix, iy uintptr) +TEXT ·AxpyInc(SB), NOSPLIT, $0 + MOVQ x_base+8(FP), X_PTR // X_PTR = &x + MOVQ y_base+32(FP), Y_PTR // Y_PTR = &y + MOVQ n+56(FP), LEN // LEN = n + CMPQ LEN, $0 // if LEN == 0 { return } + JE end + + MOVQ ix+80(FP), INC_X + MOVQ iy+88(FP), INC_Y + LEAQ (X_PTR)(INC_X*8), X_PTR // X_PTR = &(x[ix]) + LEAQ (Y_PTR)(INC_Y*8), Y_PTR // Y_PTR = &(y[iy]) + MOVQ Y_PTR, DST_PTR // DST_PTR = Y_PTR // Write pointer + + MOVQ incX+64(FP), INC_X // INC_X = incX * sizeof(float64) + SHLQ $3, INC_X + MOVQ incY+72(FP), INC_Y // INC_Y = incY * sizeof(float64) + SHLQ $3, INC_Y + + MOVSD alpha+0(FP), ALPHA // ALPHA = alpha + MOVQ LEN, TAIL + ANDQ $3, TAIL // TAIL = n % 4 + SHRQ $2, LEN // LEN = floor( n / 4 ) + JZ tail_start // if LEN == 0 { goto tail_start } + + MOVAPS ALPHA, ALPHA_2 // ALPHA_2 = ALPHA for pipelining + LEAQ (INC_X)(INC_X*2), INCx3_X // INCx3_X = INC_X * 3 + LEAQ (INC_Y)(INC_Y*2), INCx3_Y // INCx3_Y = INC_Y * 3 + +loop: // do { // y[i] += alpha * x[i] unrolled 4x. + MOVSD (X_PTR), X2 // X_i = x[i] + MOVSD (X_PTR)(INC_X*1), X3 + MOVSD (X_PTR)(INC_X*2), X4 + MOVSD (X_PTR)(INCx3_X*1), X5 + + MULSD ALPHA, X2 // X_i *= a + MULSD ALPHA_2, X3 + MULSD ALPHA, X4 + MULSD ALPHA_2, X5 + + ADDSD (Y_PTR), X2 // X_i += y[i] + ADDSD (Y_PTR)(INC_Y*1), X3 + ADDSD (Y_PTR)(INC_Y*2), X4 + ADDSD (Y_PTR)(INCx3_Y*1), X5 + + MOVSD X2, (DST_PTR) // y[i] = X_i + MOVSD X3, (DST_PTR)(INC_DST*1) + MOVSD X4, (DST_PTR)(INC_DST*2) + MOVSD X5, (DST_PTR)(INCx3_DST*1) + + LEAQ (X_PTR)(INC_X*4), X_PTR // X_PTR = &(X_PTR[incX*4]) + LEAQ (Y_PTR)(INC_Y*4), Y_PTR // Y_PTR = &(Y_PTR[incY*4]) + DECQ LEN + JNZ loop // } while --LEN > 0 + CMPQ TAIL, $0 // if TAIL == 0 { return } + JE end + +tail_start: // Reset Loop registers + MOVQ TAIL, LEN // Loop counter: LEN = TAIL + SHRQ $1, LEN // LEN = floor( LEN / 2 ) + JZ tail_one + +tail_two: + MOVSD (X_PTR), X2 // X_i = x[i] + MOVSD (X_PTR)(INC_X*1), X3 + MULSD ALPHA, X2 // X_i *= a + MULSD ALPHA, X3 + ADDSD (Y_PTR), X2 // X_i += y[i] + ADDSD (Y_PTR)(INC_Y*1), X3 + MOVSD X2, (DST_PTR) // y[i] = X_i + MOVSD X3, (DST_PTR)(INC_DST*1) + + LEAQ (X_PTR)(INC_X*2), X_PTR // X_PTR = &(X_PTR[incX*2]) + LEAQ (Y_PTR)(INC_Y*2), Y_PTR // Y_PTR = &(Y_PTR[incY*2]) + + ANDQ $1, TAIL + JZ end // if TAIL == 0 { goto end } + +tail_one: + // y[i] += alpha * x[i] for the last n % 4 iterations. + MOVSD (X_PTR), X2 // X2 = x[i] + MULSD ALPHA, X2 // X2 *= a + ADDSD (Y_PTR), X2 // X2 += y[i] + MOVSD X2, (DST_PTR) // y[i] = X2 + +end: + RET diff --git a/vendor/gonum.org/v1/gonum/internal/asm/f64/axpyincto_amd64.s b/vendor/gonum.org/v1/gonum/internal/asm/f64/axpyincto_amd64.s new file mode 100644 index 0000000..f2fb977 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/internal/asm/f64/axpyincto_amd64.s @@ -0,0 +1,148 @@ +// Copyright ©2015 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. +// +// Some of the loop unrolling code is copied from: +// http://golang.org/src/math/big/arith_amd64.s +// which is distributed under these terms: +// +// Copyright (c) 2012 The Go Authors. All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// +build !noasm,!appengine,!safe + +#include "textflag.h" + +#define X_PTR SI +#define Y_PTR DI +#define DST_PTR DX +#define IDX AX +#define LEN CX +#define TAIL BX +#define INC_X R8 +#define INCx3_X R11 +#define INC_Y R9 +#define INCx3_Y R12 +#define INC_DST R10 +#define INCx3_DST R13 +#define ALPHA X0 +#define ALPHA_2 X1 + +// func AxpyIncTo(dst []float64, incDst, idst uintptr, alpha float64, x, y []float64, n, incX, incY, ix, iy uintptr) +TEXT ·AxpyIncTo(SB), NOSPLIT, $0 + MOVQ dst_base+0(FP), DST_PTR // DST_PTR := &dst + MOVQ x_base+48(FP), X_PTR // X_PTR := &x + MOVQ y_base+72(FP), Y_PTR // Y_PTR := &y + MOVQ n+96(FP), LEN // LEN := n + CMPQ LEN, $0 // if LEN == 0 { return } + JE end + + MOVQ ix+120(FP), INC_X + LEAQ (X_PTR)(INC_X*8), X_PTR // X_PTR = &(x[ix]) + MOVQ iy+128(FP), INC_Y + LEAQ (Y_PTR)(INC_Y*8), Y_PTR // Y_PTR = &(dst[idst]) + MOVQ idst+32(FP), INC_DST + LEAQ (DST_PTR)(INC_DST*8), DST_PTR // DST_PTR = &(y[iy]) + + MOVQ incX+104(FP), INC_X // INC_X = incX * sizeof(float64) + SHLQ $3, INC_X + MOVQ incY+112(FP), INC_Y // INC_Y = incY * sizeof(float64) + SHLQ $3, INC_Y + MOVQ incDst+24(FP), INC_DST // INC_DST = incDst * sizeof(float64) + SHLQ $3, INC_DST + MOVSD alpha+40(FP), ALPHA + + MOVQ LEN, TAIL + ANDQ $3, TAIL // TAIL = n % 4 + SHRQ $2, LEN // LEN = floor( n / 4 ) + JZ tail_start // if LEN == 0 { goto tail_start } + + MOVSD ALPHA, ALPHA_2 // ALPHA_2 = ALPHA for pipelining + LEAQ (INC_X)(INC_X*2), INCx3_X // INCx3_X = INC_X * 3 + LEAQ (INC_Y)(INC_Y*2), INCx3_Y // INCx3_Y = INC_Y * 3 + LEAQ (INC_DST)(INC_DST*2), INCx3_DST // INCx3_DST = INC_DST * 3 + +loop: // do { // y[i] += alpha * x[i] unrolled 2x. + MOVSD (X_PTR), X2 // X_i = x[i] + MOVSD (X_PTR)(INC_X*1), X3 + MOVSD (X_PTR)(INC_X*2), X4 + MOVSD (X_PTR)(INCx3_X*1), X5 + + MULSD ALPHA, X2 // X_i *= a + MULSD ALPHA_2, X3 + MULSD ALPHA, X4 + MULSD ALPHA_2, X5 + + ADDSD (Y_PTR), X2 // X_i += y[i] + ADDSD (Y_PTR)(INC_Y*1), X3 + ADDSD (Y_PTR)(INC_Y*2), X4 + ADDSD (Y_PTR)(INCx3_Y*1), X5 + + MOVSD X2, (DST_PTR) // y[i] = X_i + MOVSD X3, (DST_PTR)(INC_DST*1) + MOVSD X4, (DST_PTR)(INC_DST*2) + MOVSD X5, (DST_PTR)(INCx3_DST*1) + + LEAQ (X_PTR)(INC_X*4), X_PTR // X_PTR = &(X_PTR[incX*4]) + LEAQ (Y_PTR)(INC_Y*4), Y_PTR // Y_PTR = &(Y_PTR[incY*4]) + LEAQ (DST_PTR)(INC_DST*4), DST_PTR // DST_PTR = &(DST_PTR[incDst*4] + DECQ LEN + JNZ loop // } while --LEN > 0 + CMPQ TAIL, $0 // if TAIL == 0 { return } + JE end + +tail_start: // Reset Loop registers + MOVQ TAIL, LEN // Loop counter: LEN = TAIL + SHRQ $1, LEN // LEN = floor( LEN / 2 ) + JZ tail_one + +tail_two: + MOVSD (X_PTR), X2 // X_i = x[i] + MOVSD (X_PTR)(INC_X*1), X3 + MULSD ALPHA, X2 // X_i *= a + MULSD ALPHA, X3 + ADDSD (Y_PTR), X2 // X_i += y[i] + ADDSD (Y_PTR)(INC_Y*1), X3 + MOVSD X2, (DST_PTR) // y[i] = X_i + MOVSD X3, (DST_PTR)(INC_DST*1) + + LEAQ (X_PTR)(INC_X*2), X_PTR // X_PTR = &(X_PTR[incX*2]) + LEAQ (Y_PTR)(INC_Y*2), Y_PTR // Y_PTR = &(Y_PTR[incY*2]) + LEAQ (DST_PTR)(INC_DST*2), DST_PTR // DST_PTR = &(DST_PTR[incY*2] + + ANDQ $1, TAIL + JZ end // if TAIL == 0 { goto end } + +tail_one: + MOVSD (X_PTR), X2 // X2 = x[i] + MULSD ALPHA, X2 // X2 *= a + ADDSD (Y_PTR), X2 // X2 += y[i] + MOVSD X2, (DST_PTR) // y[i] = X2 + +end: + RET diff --git a/vendor/gonum.org/v1/gonum/internal/asm/f64/axpyunitary_amd64.s b/vendor/gonum.org/v1/gonum/internal/asm/f64/axpyunitary_amd64.s new file mode 100644 index 0000000..cc519cf --- /dev/null +++ b/vendor/gonum.org/v1/gonum/internal/asm/f64/axpyunitary_amd64.s @@ -0,0 +1,134 @@ +// Copyright ©2015 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. +// +// Some of the loop unrolling code is copied from: +// http://golang.org/src/math/big/arith_amd64.s +// which is distributed under these terms: +// +// Copyright (c) 2012 The Go Authors. All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// +build !noasm,!appengine,!safe + +#include "textflag.h" + +#define X_PTR SI +#define Y_PTR DI +#define DST_PTR DI +#define IDX AX +#define LEN CX +#define TAIL BX +#define ALPHA X0 +#define ALPHA_2 X1 + +// func AxpyUnitary(alpha float64, x, y []float64) +TEXT ·AxpyUnitary(SB), NOSPLIT, $0 + MOVQ x_base+8(FP), X_PTR // X_PTR := &x + MOVQ y_base+32(FP), Y_PTR // Y_PTR := &y + MOVQ x_len+16(FP), LEN // LEN = min( len(x), len(y) ) + CMPQ y_len+40(FP), LEN + CMOVQLE y_len+40(FP), LEN + CMPQ LEN, $0 // if LEN == 0 { return } + JE end + XORQ IDX, IDX + MOVSD alpha+0(FP), ALPHA // ALPHA := { alpha, alpha } + SHUFPD $0, ALPHA, ALPHA + MOVUPS ALPHA, ALPHA_2 // ALPHA_2 := ALPHA for pipelining + MOVQ Y_PTR, TAIL // Check memory alignment + ANDQ $15, TAIL // TAIL = &y % 16 + JZ no_trim // if TAIL == 0 { goto no_trim } + + // Align on 16-byte boundary + MOVSD (X_PTR), X2 // X2 := x[0] + MULSD ALPHA, X2 // X2 *= a + ADDSD (Y_PTR), X2 // X2 += y[0] + MOVSD X2, (DST_PTR) // y[0] = X2 + INCQ IDX // i++ + DECQ LEN // LEN-- + JZ end // if LEN == 0 { return } + +no_trim: + MOVQ LEN, TAIL + ANDQ $7, TAIL // TAIL := n % 8 + SHRQ $3, LEN // LEN = floor( n / 8 ) + JZ tail_start // if LEN == 0 { goto tail2_start } + +loop: // do { + // y[i] += alpha * x[i] unrolled 8x. + MOVUPS (X_PTR)(IDX*8), X2 // X_i = x[i] + MOVUPS 16(X_PTR)(IDX*8), X3 + MOVUPS 32(X_PTR)(IDX*8), X4 + MOVUPS 48(X_PTR)(IDX*8), X5 + + MULPD ALPHA, X2 // X_i *= a + MULPD ALPHA_2, X3 + MULPD ALPHA, X4 + MULPD ALPHA_2, X5 + + ADDPD (Y_PTR)(IDX*8), X2 // X_i += y[i] + ADDPD 16(Y_PTR)(IDX*8), X3 + ADDPD 32(Y_PTR)(IDX*8), X4 + ADDPD 48(Y_PTR)(IDX*8), X5 + + MOVUPS X2, (DST_PTR)(IDX*8) // y[i] = X_i + MOVUPS X3, 16(DST_PTR)(IDX*8) + MOVUPS X4, 32(DST_PTR)(IDX*8) + MOVUPS X5, 48(DST_PTR)(IDX*8) + + ADDQ $8, IDX // i += 8 + DECQ LEN + JNZ loop // } while --LEN > 0 + CMPQ TAIL, $0 // if TAIL == 0 { return } + JE end + +tail_start: // Reset loop registers + MOVQ TAIL, LEN // Loop counter: LEN = TAIL + SHRQ $1, LEN // LEN = floor( TAIL / 2 ) + JZ tail_one // if TAIL == 0 { goto tail } + +tail_two: // do { + MOVUPS (X_PTR)(IDX*8), X2 // X2 = x[i] + MULPD ALPHA, X2 // X2 *= a + ADDPD (Y_PTR)(IDX*8), X2 // X2 += y[i] + MOVUPS X2, (DST_PTR)(IDX*8) // y[i] = X2 + ADDQ $2, IDX // i += 2 + DECQ LEN + JNZ tail_two // } while --LEN > 0 + + ANDQ $1, TAIL + JZ end // if TAIL == 0 { goto end } + +tail_one: + MOVSD (X_PTR)(IDX*8), X2 // X2 = x[i] + MULSD ALPHA, X2 // X2 *= a + ADDSD (Y_PTR)(IDX*8), X2 // X2 += y[i] + MOVSD X2, (DST_PTR)(IDX*8) // y[i] = X2 + +end: + RET diff --git a/vendor/gonum.org/v1/gonum/internal/asm/f64/axpyunitaryto_amd64.s b/vendor/gonum.org/v1/gonum/internal/asm/f64/axpyunitaryto_amd64.s new file mode 100644 index 0000000..3918092 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/internal/asm/f64/axpyunitaryto_amd64.s @@ -0,0 +1,140 @@ +// Copyright ©2015 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. +// +// Some of the loop unrolling code is copied from: +// http://golang.org/src/math/big/arith_amd64.s +// which is distributed under these terms: +// +// Copyright (c) 2012 The Go Authors. All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// +build !noasm,!appengine,!safe + +#include "textflag.h" + +#define X_PTR SI +#define Y_PTR DX +#define DST_PTR DI +#define IDX AX +#define LEN CX +#define TAIL BX +#define ALPHA X0 +#define ALPHA_2 X1 + +// func AxpyUnitaryTo(dst []float64, alpha float64, x, y []float64) +TEXT ·AxpyUnitaryTo(SB), NOSPLIT, $0 + MOVQ dst_base+0(FP), DST_PTR // DST_PTR := &dst + MOVQ x_base+32(FP), X_PTR // X_PTR := &x + MOVQ y_base+56(FP), Y_PTR // Y_PTR := &y + MOVQ x_len+40(FP), LEN // LEN = min( len(x), len(y), len(dst) ) + CMPQ y_len+64(FP), LEN + CMOVQLE y_len+64(FP), LEN + CMPQ dst_len+8(FP), LEN + CMOVQLE dst_len+8(FP), LEN + + CMPQ LEN, $0 + JE end // if LEN == 0 { return } + + XORQ IDX, IDX // IDX = 0 + MOVSD alpha+24(FP), ALPHA + SHUFPD $0, ALPHA, ALPHA // ALPHA := { alpha, alpha } + MOVQ Y_PTR, TAIL // Check memory alignment + ANDQ $15, TAIL // TAIL = &y % 16 + JZ no_trim // if TAIL == 0 { goto no_trim } + + // Align on 16-byte boundary + MOVSD (X_PTR), X2 // X2 := x[0] + MULSD ALPHA, X2 // X2 *= a + ADDSD (Y_PTR), X2 // X2 += y[0] + MOVSD X2, (DST_PTR) // y[0] = X2 + INCQ IDX // i++ + DECQ LEN // LEN-- + JZ end // if LEN == 0 { return } + +no_trim: + MOVQ LEN, TAIL + ANDQ $7, TAIL // TAIL := n % 8 + SHRQ $3, LEN // LEN = floor( n / 8 ) + JZ tail_start // if LEN == 0 { goto tail_start } + + MOVUPS ALPHA, ALPHA_2 // ALPHA_2 := ALPHA for pipelining + +loop: // do { + // y[i] += alpha * x[i] unrolled 8x. + MOVUPS (X_PTR)(IDX*8), X2 // X_i = x[i] + MOVUPS 16(X_PTR)(IDX*8), X3 + MOVUPS 32(X_PTR)(IDX*8), X4 + MOVUPS 48(X_PTR)(IDX*8), X5 + + MULPD ALPHA, X2 // X_i *= alpha + MULPD ALPHA_2, X3 + MULPD ALPHA, X4 + MULPD ALPHA_2, X5 + + ADDPD (Y_PTR)(IDX*8), X2 // X_i += y[i] + ADDPD 16(Y_PTR)(IDX*8), X3 + ADDPD 32(Y_PTR)(IDX*8), X4 + ADDPD 48(Y_PTR)(IDX*8), X5 + + MOVUPS X2, (DST_PTR)(IDX*8) // y[i] = X_i + MOVUPS X3, 16(DST_PTR)(IDX*8) + MOVUPS X4, 32(DST_PTR)(IDX*8) + MOVUPS X5, 48(DST_PTR)(IDX*8) + + ADDQ $8, IDX // i += 8 + DECQ LEN + JNZ loop // } while --LEN > 0 + CMPQ TAIL, $0 // if TAIL == 0 { return } + JE end + +tail_start: // Reset loop registers + MOVQ TAIL, LEN // Loop counter: LEN = TAIL + SHRQ $1, LEN // LEN = floor( TAIL / 2 ) + JZ tail_one // if LEN == 0 { goto tail } + +tail_two: // do { + MOVUPS (X_PTR)(IDX*8), X2 // X2 = x[i] + MULPD ALPHA, X2 // X2 *= alpha + ADDPD (Y_PTR)(IDX*8), X2 // X2 += y[i] + MOVUPS X2, (DST_PTR)(IDX*8) // y[i] = X2 + ADDQ $2, IDX // i += 2 + DECQ LEN + JNZ tail_two // } while --LEN > 0 + + ANDQ $1, TAIL + JZ end // if TAIL == 0 { goto end } + +tail_one: + MOVSD (X_PTR)(IDX*8), X2 // X2 = x[i] + MULSD ALPHA, X2 // X2 *= a + ADDSD (Y_PTR)(IDX*8), X2 // X2 += y[i] + MOVSD X2, (DST_PTR)(IDX*8) // y[i] = X2 + +end: + RET diff --git a/vendor/gonum.org/v1/gonum/internal/asm/f64/cumprod_amd64.s b/vendor/gonum.org/v1/gonum/internal/asm/f64/cumprod_amd64.s new file mode 100644 index 0000000..32bd157 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/internal/asm/f64/cumprod_amd64.s @@ -0,0 +1,71 @@ +// Copyright ©2016 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// +build !noasm,!appengine,!safe + +#include "textflag.h" + +TEXT ·CumProd(SB), NOSPLIT, $0 + MOVQ dst_base+0(FP), DI // DI = &dst + MOVQ dst_len+8(FP), CX // CX = len(dst) + MOVQ s_base+24(FP), SI // SI = &s + CMPQ s_len+32(FP), CX // CX = max( CX, len(s) ) + CMOVQLE s_len+32(FP), CX + MOVQ CX, ret_len+56(FP) // len(ret) = CX + CMPQ CX, $0 // if CX == 0 { return } + JE cp_end + XORQ AX, AX // i = 0 + + MOVSD (SI), X5 // p_prod = { s[0], s[0] } + SHUFPD $0, X5, X5 + MOVSD X5, (DI) // dst[0] = s[0] + INCQ AX // ++i + DECQ CX // -- CX + JZ cp_end // if CX == 0 { return } + + MOVQ CX, BX + ANDQ $3, BX // BX = CX % 4 + SHRQ $2, CX // CX = floor( CX / 4 ) + JZ cp_tail_start // if CX == 0 { goto cp_tail_start } + +cp_loop: // Loop unrolled 4x do { + MOVUPS (SI)(AX*8), X0 // X0 = s[i:i+1] + MOVUPS 16(SI)(AX*8), X2 + MOVAPS X0, X1 // X1 = X0 + MOVAPS X2, X3 + SHUFPD $1, X1, X1 // { X1[0], X1[1] } = { X1[1], X1[0] } + SHUFPD $1, X3, X3 + MULPD X0, X1 // X1 *= X0 + MULPD X2, X3 + SHUFPD $2, X1, X0 // { X0[0], X0[1] } = { X0[0], X1[1] } + SHUFPD $3, X1, X1 // { X1[0], X1[1] } = { X1[1], X1[1] } + SHUFPD $2, X3, X2 + SHUFPD $3, X3, X3 + MULPD X5, X0 // X0 *= p_prod + MULPD X1, X5 // p_prod *= X1 + MULPD X5, X2 + MOVUPS X0, (DI)(AX*8) // dst[i] = X0 + MOVUPS X2, 16(DI)(AX*8) + MULPD X3, X5 + ADDQ $4, AX // i += 4 + LOOP cp_loop // } while --CX > 0 + + // if BX == 0 { return } + CMPQ BX, $0 + JE cp_end + +cp_tail_start: // Reset loop registers + MOVQ BX, CX // Loop counter: CX = BX + +cp_tail: // do { + MULSD (SI)(AX*8), X5 // p_prod *= s[i] + MOVSD X5, (DI)(AX*8) // dst[i] = p_prod + INCQ AX // ++i + LOOP cp_tail // } while --CX > 0 + +cp_end: + MOVQ DI, ret_base+48(FP) // &ret = &dst + MOVQ dst_cap+16(FP), SI // cap(ret) = cap(dst) + MOVQ SI, ret_cap+64(FP) + RET diff --git a/vendor/gonum.org/v1/gonum/internal/asm/f64/cumsum_amd64.s b/vendor/gonum.org/v1/gonum/internal/asm/f64/cumsum_amd64.s new file mode 100644 index 0000000..10d7fda --- /dev/null +++ b/vendor/gonum.org/v1/gonum/internal/asm/f64/cumsum_amd64.s @@ -0,0 +1,64 @@ +// Copyright ©2016 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// +build !noasm,!appengine,!safe + +#include "textflag.h" + +TEXT ·CumSum(SB), NOSPLIT, $0 + MOVQ dst_base+0(FP), DI // DI = &dst + MOVQ dst_len+8(FP), CX // CX = len(dst) + MOVQ s_base+24(FP), SI // SI = &s + CMPQ s_len+32(FP), CX // CX = max( CX, len(s) ) + CMOVQLE s_len+32(FP), CX + MOVQ CX, ret_len+56(FP) // len(ret) = CX + CMPQ CX, $0 // if CX == 0 { return } + JE cs_end + XORQ AX, AX // i = 0 + PXOR X5, X5 // p_sum = 0 + MOVQ CX, BX + ANDQ $3, BX // BX = CX % 4 + SHRQ $2, CX // CX = floor( CX / 4 ) + JZ cs_tail_start // if CX == 0 { goto cs_tail_start } + +cs_loop: // Loop unrolled 4x do { + MOVUPS (SI)(AX*8), X0 // X0 = s[i:i+1] + MOVUPS 16(SI)(AX*8), X2 + MOVAPS X0, X1 // X1 = X0 + MOVAPS X2, X3 + SHUFPD $1, X1, X1 // { X1[0], X1[1] } = { X1[1], X1[0] } + SHUFPD $1, X3, X3 + ADDPD X0, X1 // X1 += X0 + ADDPD X2, X3 + SHUFPD $2, X1, X0 // { X0[0], X0[1] } = { X0[0], X1[1] } + SHUFPD $3, X1, X1 // { X1[0], X1[1] } = { X1[1], X1[1] } + SHUFPD $2, X3, X2 + SHUFPD $3, X3, X3 + ADDPD X5, X0 // X0 += p_sum + ADDPD X1, X5 // p_sum += X1 + ADDPD X5, X2 + MOVUPS X0, (DI)(AX*8) // dst[i] = X0 + MOVUPS X2, 16(DI)(AX*8) + ADDPD X3, X5 + ADDQ $4, AX // i += 4 + LOOP cs_loop // } while --CX > 0 + + // if BX == 0 { return } + CMPQ BX, $0 + JE cs_end + +cs_tail_start: // Reset loop registers + MOVQ BX, CX // Loop counter: CX = BX + +cs_tail: // do { + ADDSD (SI)(AX*8), X5 // p_sum *= s[i] + MOVSD X5, (DI)(AX*8) // dst[i] = p_sum + INCQ AX // ++i + LOOP cs_tail // } while --CX > 0 + +cs_end: + MOVQ DI, ret_base+48(FP) // &ret = &dst + MOVQ dst_cap+16(FP), SI // cap(ret) = cap(dst) + MOVQ SI, ret_cap+64(FP) + RET diff --git a/vendor/gonum.org/v1/gonum/internal/asm/f64/div_amd64.s b/vendor/gonum.org/v1/gonum/internal/asm/f64/div_amd64.s new file mode 100644 index 0000000..1a4e9ee --- /dev/null +++ b/vendor/gonum.org/v1/gonum/internal/asm/f64/div_amd64.s @@ -0,0 +1,67 @@ +// Copyright ©2016 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// +build !noasm,!appengine,!safe + +#include "textflag.h" + +// func Div(dst, s []float64) +TEXT ·Div(SB), NOSPLIT, $0 + MOVQ dst_base+0(FP), DI // DI = &dst + MOVQ dst_len+8(FP), CX // CX = len(dst) + MOVQ s_base+24(FP), SI // SI = &s + CMPQ s_len+32(FP), CX // CX = max( CX, len(s) ) + CMOVQLE s_len+32(FP), CX + CMPQ CX, $0 // if CX == 0 { return } + JE div_end + XORQ AX, AX // i = 0 + MOVQ SI, BX + ANDQ $15, BX // BX = &s & 15 + JZ div_no_trim // if BX == 0 { goto div_no_trim } + + // Align on 16-bit boundary + MOVSD (DI)(AX*8), X0 // X0 = dst[i] + DIVSD (SI)(AX*8), X0 // X0 /= s[i] + MOVSD X0, (DI)(AX*8) // dst[i] = X0 + INCQ AX // ++i + DECQ CX // --CX + JZ div_end // if CX == 0 { return } + +div_no_trim: + MOVQ CX, BX + ANDQ $7, BX // BX = len(dst) % 8 + SHRQ $3, CX // CX = floor( len(dst) / 8 ) + JZ div_tail_start // if CX == 0 { goto div_tail_start } + +div_loop: // Loop unrolled 8x do { + MOVUPS (DI)(AX*8), X0 // X0 = dst[i:i+1] + MOVUPS 16(DI)(AX*8), X1 + MOVUPS 32(DI)(AX*8), X2 + MOVUPS 48(DI)(AX*8), X3 + DIVPD (SI)(AX*8), X0 // X0 /= s[i:i+1] + DIVPD 16(SI)(AX*8), X1 + DIVPD 32(SI)(AX*8), X2 + DIVPD 48(SI)(AX*8), X3 + MOVUPS X0, (DI)(AX*8) // dst[i] = X0 + MOVUPS X1, 16(DI)(AX*8) + MOVUPS X2, 32(DI)(AX*8) + MOVUPS X3, 48(DI)(AX*8) + ADDQ $8, AX // i += 8 + LOOP div_loop // } while --CX > 0 + CMPQ BX, $0 // if BX == 0 { return } + JE div_end + +div_tail_start: // Reset loop registers + MOVQ BX, CX // Loop counter: CX = BX + +div_tail: // do { + MOVSD (DI)(AX*8), X0 // X0 = dst[i] + DIVSD (SI)(AX*8), X0 // X0 /= s[i] + MOVSD X0, (DI)(AX*8) // dst[i] = X0 + INCQ AX // ++i + LOOP div_tail // } while --CX > 0 + +div_end: + RET + diff --git a/vendor/gonum.org/v1/gonum/internal/asm/f64/divto_amd64.s b/vendor/gonum.org/v1/gonum/internal/asm/f64/divto_amd64.s new file mode 100644 index 0000000..16ab9b7 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/internal/asm/f64/divto_amd64.s @@ -0,0 +1,73 @@ +// Copyright ©2016 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// +build !noasm,!appengine,!safe + +#include "textflag.h" + +// func DivTo(dst, x, y []float64) +TEXT ·DivTo(SB), NOSPLIT, $0 + MOVQ dst_base+0(FP), DI // DI = &dst + MOVQ dst_len+8(FP), CX // CX = len(dst) + MOVQ x_base+24(FP), SI // SI = &x + MOVQ y_base+48(FP), DX // DX = &y + CMPQ x_len+32(FP), CX // CX = max( len(dst), len(x), len(y) ) + CMOVQLE x_len+32(FP), CX + CMPQ y_len+56(FP), CX + CMOVQLE y_len+56(FP), CX + MOVQ CX, ret_len+80(FP) // len(ret) = CX + CMPQ CX, $0 // if CX == 0 { return } + JE div_end + XORQ AX, AX // i = 0 + MOVQ DX, BX + ANDQ $15, BX // BX = &y & OxF + JZ div_no_trim // if BX == 0 { goto div_no_trim } + + // Align on 16-bit boundary + MOVSD (SI)(AX*8), X0 // X0 = s[i] + DIVSD (DX)(AX*8), X0 // X0 /= t[i] + MOVSD X0, (DI)(AX*8) // dst[i] = X0 + INCQ AX // ++i + DECQ CX // --CX + JZ div_end // if CX == 0 { return } + +div_no_trim: + MOVQ CX, BX + ANDQ $7, BX // BX = len(dst) % 8 + SHRQ $3, CX // CX = floor( len(dst) / 8 ) + JZ div_tail_start // if CX == 0 { goto div_tail_start } + +div_loop: // Loop unrolled 8x do { + MOVUPS (SI)(AX*8), X0 // X0 = x[i:i+1] + MOVUPS 16(SI)(AX*8), X1 + MOVUPS 32(SI)(AX*8), X2 + MOVUPS 48(SI)(AX*8), X3 + DIVPD (DX)(AX*8), X0 // X0 /= y[i:i+1] + DIVPD 16(DX)(AX*8), X1 + DIVPD 32(DX)(AX*8), X2 + DIVPD 48(DX)(AX*8), X3 + MOVUPS X0, (DI)(AX*8) // dst[i:i+1] = X0 + MOVUPS X1, 16(DI)(AX*8) + MOVUPS X2, 32(DI)(AX*8) + MOVUPS X3, 48(DI)(AX*8) + ADDQ $8, AX // i += 8 + LOOP div_loop // } while --CX > 0 + CMPQ BX, $0 // if BX == 0 { return } + JE div_end + +div_tail_start: // Reset loop registers + MOVQ BX, CX // Loop counter: CX = BX + +div_tail: // do { + MOVSD (SI)(AX*8), X0 // X0 = x[i] + DIVSD (DX)(AX*8), X0 // X0 /= y[i] + MOVSD X0, (DI)(AX*8) + INCQ AX // ++i + LOOP div_tail // } while --CX > 0 + +div_end: + MOVQ DI, ret_base+72(FP) // &ret = &dst + MOVQ dst_cap+16(FP), DI // cap(ret) = cap(dst) + MOVQ DI, ret_cap+88(FP) + RET diff --git a/vendor/gonum.org/v1/gonum/internal/asm/f64/doc.go b/vendor/gonum.org/v1/gonum/internal/asm/f64/doc.go new file mode 100644 index 0000000..33c76c1 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/internal/asm/f64/doc.go @@ -0,0 +1,6 @@ +// Copyright ©2017 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Package f64 provides float64 vector primitives. +package f64 // import "gonum.org/v1/gonum/internal/asm/f64" diff --git a/vendor/gonum.org/v1/gonum/internal/asm/f64/dot.go b/vendor/gonum.org/v1/gonum/internal/asm/f64/dot.go new file mode 100644 index 0000000..b77138d --- /dev/null +++ b/vendor/gonum.org/v1/gonum/internal/asm/f64/dot.go @@ -0,0 +1,35 @@ +// Copyright ©2015 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// +build !amd64 noasm appengine safe + +package f64 + +// DotUnitary is +// for i, v := range x { +// sum += y[i] * v +// } +// return sum +func DotUnitary(x, y []float64) (sum float64) { + for i, v := range x { + sum += y[i] * v + } + return sum +} + +// DotInc is +// for i := 0; i < int(n); i++ { +// sum += y[iy] * x[ix] +// ix += incX +// iy += incY +// } +// return sum +func DotInc(x, y []float64, n, incX, incY, ix, iy uintptr) (sum float64) { + for i := 0; i < int(n); i++ { + sum += y[iy] * x[ix] + ix += incX + iy += incY + } + return sum +} diff --git a/vendor/gonum.org/v1/gonum/internal/asm/f64/dot_amd64.s b/vendor/gonum.org/v1/gonum/internal/asm/f64/dot_amd64.s new file mode 100644 index 0000000..6daba1b --- /dev/null +++ b/vendor/gonum.org/v1/gonum/internal/asm/f64/dot_amd64.s @@ -0,0 +1,145 @@ +// Copyright ©2015 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. +// +// Some of the loop unrolling code is copied from: +// http://golang.org/src/math/big/arith_amd64.s +// which is distributed under these terms: +// +// Copyright (c) 2012 The Go Authors. All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// +build !noasm,!appengine,!safe + +#include "textflag.h" + +// func DdotUnitary(x, y []float64) (sum float64) +// This function assumes len(y) >= len(x). +TEXT ·DotUnitary(SB), NOSPLIT, $0 + MOVQ x+0(FP), R8 + MOVQ x_len+8(FP), DI // n = len(x) + MOVQ y+24(FP), R9 + + MOVSD $(0.0), X7 // sum = 0 + MOVSD $(0.0), X8 // sum = 0 + + MOVQ $0, SI // i = 0 + SUBQ $4, DI // n -= 4 + JL tail_uni // if n < 0 goto tail_uni + +loop_uni: + // sum += x[i] * y[i] unrolled 4x. + MOVUPD 0(R8)(SI*8), X0 + MOVUPD 0(R9)(SI*8), X1 + MOVUPD 16(R8)(SI*8), X2 + MOVUPD 16(R9)(SI*8), X3 + MULPD X1, X0 + MULPD X3, X2 + ADDPD X0, X7 + ADDPD X2, X8 + + ADDQ $4, SI // i += 4 + SUBQ $4, DI // n -= 4 + JGE loop_uni // if n >= 0 goto loop_uni + +tail_uni: + ADDQ $4, DI // n += 4 + JLE end_uni // if n <= 0 goto end_uni + +onemore_uni: + // sum += x[i] * y[i] for the remaining 1-3 elements. + MOVSD 0(R8)(SI*8), X0 + MOVSD 0(R9)(SI*8), X1 + MULSD X1, X0 + ADDSD X0, X7 + + ADDQ $1, SI // i++ + SUBQ $1, DI // n-- + JNZ onemore_uni // if n != 0 goto onemore_uni + +end_uni: + // Add the four sums together. + ADDPD X8, X7 + MOVSD X7, X0 + UNPCKHPD X7, X7 + ADDSD X0, X7 + MOVSD X7, sum+48(FP) // Return final sum. + RET + +// func DdotInc(x, y []float64, n, incX, incY, ix, iy uintptr) (sum float64) +TEXT ·DotInc(SB), NOSPLIT, $0 + MOVQ x+0(FP), R8 + MOVQ y+24(FP), R9 + MOVQ n+48(FP), CX + MOVQ incX+56(FP), R11 + MOVQ incY+64(FP), R12 + MOVQ ix+72(FP), R13 + MOVQ iy+80(FP), R14 + + MOVSD $(0.0), X7 // sum = 0 + LEAQ (R8)(R13*8), SI // p = &x[ix] + LEAQ (R9)(R14*8), DI // q = &y[ix] + SHLQ $3, R11 // incX *= sizeof(float64) + SHLQ $3, R12 // indY *= sizeof(float64) + + SUBQ $2, CX // n -= 2 + JL tail_inc // if n < 0 goto tail_inc + +loop_inc: + // sum += *p * *q unrolled 2x. + MOVHPD (SI), X0 + MOVHPD (DI), X1 + ADDQ R11, SI // p += incX + ADDQ R12, DI // q += incY + MOVLPD (SI), X0 + MOVLPD (DI), X1 + ADDQ R11, SI // p += incX + ADDQ R12, DI // q += incY + + MULPD X1, X0 + ADDPD X0, X7 + + SUBQ $2, CX // n -= 2 + JGE loop_inc // if n >= 0 goto loop_inc + +tail_inc: + ADDQ $2, CX // n += 2 + JLE end_inc // if n <= 0 goto end_inc + + // sum += *p * *q for the last iteration if n is odd. + MOVSD (SI), X0 + MULSD (DI), X0 + ADDSD X0, X7 + +end_inc: + // Add the two sums together. + MOVSD X7, X0 + UNPCKHPD X7, X7 + ADDSD X0, X7 + MOVSD X7, sum+88(FP) // Return final sum. + RET diff --git a/vendor/gonum.org/v1/gonum/internal/asm/f64/ge_amd64.go b/vendor/gonum.org/v1/gonum/internal/asm/f64/ge_amd64.go new file mode 100644 index 0000000..00c99e9 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/internal/asm/f64/ge_amd64.go @@ -0,0 +1,22 @@ +// Copyright ©2017 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// +build !noasm,!appengine,!safe + +package f64 + +// Ger performs the rank-one operation +// A += alpha * x * y^T +// where A is an m×n dense matrix, x and y are vectors, and alpha is a scalar. +func Ger(m, n uintptr, alpha float64, x []float64, incX uintptr, y []float64, incY uintptr, a []float64, lda uintptr) + +// GemvN computes +// y = alpha * A * x + beta * y +// where A is an m×n dense matrix, x and y are vectors, and alpha and beta are scalars. +func GemvN(m, n uintptr, alpha float64, a []float64, lda uintptr, x []float64, incX uintptr, beta float64, y []float64, incY uintptr) + +// GemvT computes +// y = alpha * A^T * x + beta * y +// where A is an m×n dense matrix, x and y are vectors, and alpha and beta are scalars. +func GemvT(m, n uintptr, alpha float64, a []float64, lda uintptr, x []float64, incX uintptr, beta float64, y []float64, incY uintptr) diff --git a/vendor/gonum.org/v1/gonum/internal/asm/f64/ge_noasm.go b/vendor/gonum.org/v1/gonum/internal/asm/f64/ge_noasm.go new file mode 100644 index 0000000..2a1cfd5 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/internal/asm/f64/ge_noasm.go @@ -0,0 +1,118 @@ +// Copyright ©2017 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// +build !amd64 noasm appengine safe + +package f64 + +// Ger performs the rank-one operation +// A += alpha * x * y^T +// where A is an m×n dense matrix, x and y are vectors, and alpha is a scalar. +func Ger(m, n uintptr, alpha float64, x []float64, incX uintptr, y []float64, incY uintptr, a []float64, lda uintptr) { + if incX == 1 && incY == 1 { + x = x[:m] + y = y[:n] + for i, xv := range x { + AxpyUnitary(alpha*xv, y, a[uintptr(i)*lda:uintptr(i)*lda+n]) + } + return + } + + var ky, kx uintptr + if int(incY) < 0 { + ky = uintptr(-int(n-1) * int(incY)) + } + if int(incX) < 0 { + kx = uintptr(-int(m-1) * int(incX)) + } + + ix := kx + for i := 0; i < int(m); i++ { + AxpyInc(alpha*x[ix], y, a[uintptr(i)*lda:uintptr(i)*lda+n], n, incY, 1, ky, 0) + ix += incX + } +} + +// GemvN computes +// y = alpha * A * x + beta * y +// where A is an m×n dense matrix, x and y are vectors, and alpha and beta are scalars. +func GemvN(m, n uintptr, alpha float64, a []float64, lda uintptr, x []float64, incX uintptr, beta float64, y []float64, incY uintptr) { + var kx, ky, i uintptr + if int(incX) < 0 { + kx = uintptr(-int(n-1) * int(incX)) + } + if int(incY) < 0 { + ky = uintptr(-int(m-1) * int(incY)) + } + + if incX == 1 && incY == 1 { + if beta == 0 { + for i = 0; i < m; i++ { + y[i] = alpha * DotUnitary(a[lda*i:lda*i+n], x) + } + return + } + for i = 0; i < m; i++ { + y[i] = y[i]*beta + alpha*DotUnitary(a[lda*i:lda*i+n], x) + } + return + } + iy := ky + if beta == 0 { + for i = 0; i < m; i++ { + y[iy] = alpha * DotInc(x, a[lda*i:lda*i+n], n, incX, 1, kx, 0) + iy += incY + } + return + } + for i = 0; i < m; i++ { + y[iy] = y[iy]*beta + alpha*DotInc(x, a[lda*i:lda*i+n], n, incX, 1, kx, 0) + iy += incY + } +} + +// GemvT computes +// y = alpha * A^T * x + beta * y +// where A is an m×n dense matrix, x and y are vectors, and alpha and beta are scalars. +func GemvT(m, n uintptr, alpha float64, a []float64, lda uintptr, x []float64, incX uintptr, beta float64, y []float64, incY uintptr) { + var kx, ky, i uintptr + if int(incX) < 0 { + kx = uintptr(-int(m-1) * int(incX)) + } + if int(incY) < 0 { + ky = uintptr(-int(n-1) * int(incY)) + } + switch { + case beta == 0: // beta == 0 is special-cased to memclear + if incY == 1 { + for i := range y { + y[i] = 0 + } + } else { + iy := ky + for i := 0; i < int(n); i++ { + y[iy] = 0 + iy += incY + } + } + case int(incY) < 0: + ScalInc(beta, y, n, uintptr(int(-incY))) + case incY == 1: + ScalUnitary(beta, y[:n]) + default: + ScalInc(beta, y, n, incY) + } + + if incX == 1 && incY == 1 { + for i = 0; i < m; i++ { + AxpyUnitaryTo(y, alpha*x[i], a[lda*i:lda*i+n], y) + } + return + } + ix := kx + for i = 0; i < m; i++ { + AxpyInc(alpha*x[ix], a[lda*i:lda*i+n], y, n, 1, incY, 0, ky) + ix += incX + } +} diff --git a/vendor/gonum.org/v1/gonum/internal/asm/f64/gemvN_amd64.s b/vendor/gonum.org/v1/gonum/internal/asm/f64/gemvN_amd64.s new file mode 100644 index 0000000..f0a98f0 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/internal/asm/f64/gemvN_amd64.s @@ -0,0 +1,685 @@ +// Copyright ©2017 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// +build !noasm,!appengine,!safe + +#include "textflag.h" + +#define SIZE 8 + +#define M_DIM m+0(FP) +#define M CX +#define N_DIM n+8(FP) +#define N BX + +#define TMP1 R14 +#define TMP2 R15 + +#define X_PTR SI +#define X x_base+56(FP) +#define INC_X R8 +#define INC3_X R9 + +#define Y_PTR DX +#define Y y_base+96(FP) +#define INC_Y R10 +#define INC3_Y R11 + +#define A_ROW AX +#define A_PTR DI +#define LDA R12 +#define LDA3 R13 + +#define ALPHA X15 +#define BETA X14 + +#define INIT4 \ + XORPS X0, X0 \ + XORPS X1, X1 \ + XORPS X2, X2 \ + XORPS X3, X3 + +#define INIT2 \ + XORPS X0, X0 \ + XORPS X1, X1 + +#define INIT1 \ + XORPS X0, X0 + +#define KERNEL_LOAD4 \ + MOVUPS (X_PTR), X12 \ + MOVUPS 2*SIZE(X_PTR), X13 + +#define KERNEL_LOAD2 \ + MOVUPS (X_PTR), X12 + +#define KERNEL_LOAD4_INC \ + MOVSD (X_PTR), X12 \ + MOVHPD (X_PTR)(INC_X*1), X12 \ + MOVSD (X_PTR)(INC_X*2), X13 \ + MOVHPD (X_PTR)(INC3_X*1), X13 + +#define KERNEL_LOAD2_INC \ + MOVSD (X_PTR), X12 \ + MOVHPD (X_PTR)(INC_X*1), X12 + +#define KERNEL_4x4 \ + MOVUPS (A_PTR), X4 \ + MOVUPS 2*SIZE(A_PTR), X5 \ + MOVUPS (A_PTR)(LDA*1), X6 \ + MOVUPS 2*SIZE(A_PTR)(LDA*1), X7 \ + MOVUPS (A_PTR)(LDA*2), X8 \ + MOVUPS 2*SIZE(A_PTR)(LDA*2), X9 \ + MOVUPS (A_PTR)(LDA3*1), X10 \ + MOVUPS 2*SIZE(A_PTR)(LDA3*1), X11 \ + MULPD X12, X4 \ + MULPD X13, X5 \ + MULPD X12, X6 \ + MULPD X13, X7 \ + MULPD X12, X8 \ + MULPD X13, X9 \ + MULPD X12, X10 \ + MULPD X13, X11 \ + ADDPD X4, X0 \ + ADDPD X5, X0 \ + ADDPD X6, X1 \ + ADDPD X7, X1 \ + ADDPD X8, X2 \ + ADDPD X9, X2 \ + ADDPD X10, X3 \ + ADDPD X11, X3 \ + ADDQ $4*SIZE, A_PTR + +#define KERNEL_4x2 \ + MOVUPS (A_PTR), X4 \ + MOVUPS (A_PTR)(LDA*1), X5 \ + MOVUPS (A_PTR)(LDA*2), X6 \ + MOVUPS (A_PTR)(LDA3*1), X7 \ + MULPD X12, X4 \ + MULPD X12, X5 \ + MULPD X12, X6 \ + MULPD X12, X7 \ + ADDPD X4, X0 \ + ADDPD X5, X1 \ + ADDPD X6, X2 \ + ADDPD X7, X3 \ + ADDQ $2*SIZE, A_PTR + +#define KERNEL_4x1 \ + MOVDDUP (X_PTR), X12 \ + MOVSD (A_PTR), X4 \ + MOVHPD (A_PTR)(LDA*1), X4 \ + MOVSD (A_PTR)(LDA*2), X5 \ + MOVHPD (A_PTR)(LDA3*1), X5 \ + MULPD X12, X4 \ + MULPD X12, X5 \ + ADDPD X4, X0 \ + ADDPD X5, X2 \ + ADDQ $SIZE, A_PTR + +#define STORE4 \ + MOVUPS (Y_PTR), X4 \ + MOVUPS 2*SIZE(Y_PTR), X5 \ + MULPD ALPHA, X0 \ + MULPD ALPHA, X2 \ + MULPD BETA, X4 \ + MULPD BETA, X5 \ + ADDPD X0, X4 \ + ADDPD X2, X5 \ + MOVUPS X4, (Y_PTR) \ + MOVUPS X5, 2*SIZE(Y_PTR) + +#define STORE4_INC \ + MOVSD (Y_PTR), X4 \ + MOVHPD (Y_PTR)(INC_Y*1), X4 \ + MOVSD (Y_PTR)(INC_Y*2), X5 \ + MOVHPD (Y_PTR)(INC3_Y*1), X5 \ + MULPD ALPHA, X0 \ + MULPD ALPHA, X2 \ + MULPD BETA, X4 \ + MULPD BETA, X5 \ + ADDPD X0, X4 \ + ADDPD X2, X5 \ + MOVLPD X4, (Y_PTR) \ + MOVHPD X4, (Y_PTR)(INC_Y*1) \ + MOVLPD X5, (Y_PTR)(INC_Y*2) \ + MOVHPD X5, (Y_PTR)(INC3_Y*1) + +#define KERNEL_2x4 \ + MOVUPS (A_PTR), X8 \ + MOVUPS 2*SIZE(A_PTR), X9 \ + MOVUPS (A_PTR)(LDA*1), X10 \ + MOVUPS 2*SIZE(A_PTR)(LDA*1), X11 \ + MULPD X12, X8 \ + MULPD X13, X9 \ + MULPD X12, X10 \ + MULPD X13, X11 \ + ADDPD X8, X0 \ + ADDPD X10, X1 \ + ADDPD X9, X0 \ + ADDPD X11, X1 \ + ADDQ $4*SIZE, A_PTR + +#define KERNEL_2x2 \ + MOVUPS (A_PTR), X8 \ + MOVUPS (A_PTR)(LDA*1), X9 \ + MULPD X12, X8 \ + MULPD X12, X9 \ + ADDPD X8, X0 \ + ADDPD X9, X1 \ + ADDQ $2*SIZE, A_PTR + +#define KERNEL_2x1 \ + MOVDDUP (X_PTR), X12 \ + MOVSD (A_PTR), X8 \ + MOVHPD (A_PTR)(LDA*1), X8 \ + MULPD X12, X8 \ + ADDPD X8, X0 \ + ADDQ $SIZE, A_PTR + +#define STORE2 \ + MOVUPS (Y_PTR), X4 \ + MULPD ALPHA, X0 \ + MULPD BETA, X4 \ + ADDPD X0, X4 \ + MOVUPS X4, (Y_PTR) + +#define STORE2_INC \ + MOVSD (Y_PTR), X4 \ + MOVHPD (Y_PTR)(INC_Y*1), X4 \ + MULPD ALPHA, X0 \ + MULPD BETA, X4 \ + ADDPD X0, X4 \ + MOVSD X4, (Y_PTR) \ + MOVHPD X4, (Y_PTR)(INC_Y*1) + +#define KERNEL_1x4 \ + MOVUPS (A_PTR), X8 \ + MOVUPS 2*SIZE(A_PTR), X9 \ + MULPD X12, X8 \ + MULPD X13, X9 \ + ADDPD X8, X0 \ + ADDPD X9, X0 \ + ADDQ $4*SIZE, A_PTR + +#define KERNEL_1x2 \ + MOVUPS (A_PTR), X8 \ + MULPD X12, X8 \ + ADDPD X8, X0 \ + ADDQ $2*SIZE, A_PTR + +#define KERNEL_1x1 \ + MOVSD (X_PTR), X12 \ + MOVSD (A_PTR), X8 \ + MULSD X12, X8 \ + ADDSD X8, X0 \ + ADDQ $SIZE, A_PTR + +#define STORE1 \ + HADDPD X0, X0 \ + MOVSD (Y_PTR), X4 \ + MULSD ALPHA, X0 \ + MULSD BETA, X4 \ + ADDSD X0, X4 \ + MOVSD X4, (Y_PTR) + +// func GemvN(m, n int, +// alpha float64, +// a []float64, lda int, +// x []float64, incX int, +// beta float64, +// y []float64, incY int) +TEXT ·GemvN(SB), NOSPLIT, $32-128 + MOVQ M_DIM, M + MOVQ N_DIM, N + CMPQ M, $0 + JE end + CMPQ N, $0 + JE end + + MOVDDUP alpha+16(FP), ALPHA + MOVDDUP beta+88(FP), BETA + + MOVQ x_base+56(FP), X_PTR + MOVQ y_base+96(FP), Y_PTR + MOVQ a_base+24(FP), A_ROW + MOVQ incY+120(FP), INC_Y + MOVQ lda+48(FP), LDA // LDA = LDA * sizeof(float64) + SHLQ $3, LDA + LEAQ (LDA)(LDA*2), LDA3 // LDA3 = LDA * 3 + MOVQ A_ROW, A_PTR + + XORQ TMP2, TMP2 + MOVQ M, TMP1 + SUBQ $1, TMP1 + IMULQ INC_Y, TMP1 + NEGQ TMP1 + CMPQ INC_Y, $0 + CMOVQLT TMP1, TMP2 + LEAQ (Y_PTR)(TMP2*SIZE), Y_PTR + MOVQ Y_PTR, Y + + SHLQ $3, INC_Y // INC_Y = incY * sizeof(float64) + LEAQ (INC_Y)(INC_Y*2), INC3_Y // INC3_Y = INC_Y * 3 + + MOVSD $0.0, X0 + COMISD BETA, X0 + JNE gemv_start // if beta != 0 { goto gemv_start } + +gemv_clear: // beta == 0 is special cased to clear memory (no nan handling) + XORPS X0, X0 + XORPS X1, X1 + XORPS X2, X2 + XORPS X3, X3 + + CMPQ incY+120(FP), $1 // Check for dense vector X (fast-path) + JNE inc_clear + + SHRQ $3, M + JZ clear4 + +clear8: + MOVUPS X0, (Y_PTR) + MOVUPS X1, 16(Y_PTR) + MOVUPS X2, 32(Y_PTR) + MOVUPS X3, 48(Y_PTR) + ADDQ $8*SIZE, Y_PTR + DECQ M + JNZ clear8 + +clear4: + TESTQ $4, M_DIM + JZ clear2 + MOVUPS X0, (Y_PTR) + MOVUPS X1, 16(Y_PTR) + ADDQ $4*SIZE, Y_PTR + +clear2: + TESTQ $2, M_DIM + JZ clear1 + MOVUPS X0, (Y_PTR) + ADDQ $2*SIZE, Y_PTR + +clear1: + TESTQ $1, M_DIM + JZ prep_end + MOVSD X0, (Y_PTR) + + JMP prep_end + +inc_clear: + SHRQ $2, M + JZ inc_clear2 + +inc_clear4: + MOVSD X0, (Y_PTR) + MOVSD X1, (Y_PTR)(INC_Y*1) + MOVSD X2, (Y_PTR)(INC_Y*2) + MOVSD X3, (Y_PTR)(INC3_Y*1) + LEAQ (Y_PTR)(INC_Y*4), Y_PTR + DECQ M + JNZ inc_clear4 + +inc_clear2: + TESTQ $2, M_DIM + JZ inc_clear1 + MOVSD X0, (Y_PTR) + MOVSD X1, (Y_PTR)(INC_Y*1) + LEAQ (Y_PTR)(INC_Y*2), Y_PTR + +inc_clear1: + TESTQ $1, M_DIM + JZ prep_end + MOVSD X0, (Y_PTR) + +prep_end: + MOVQ Y, Y_PTR + MOVQ M_DIM, M + +gemv_start: + CMPQ incX+80(FP), $1 // Check for dense vector X (fast-path) + JNE inc + + SHRQ $2, M + JZ r2 + +r4: + // LOAD 4 + INIT4 + + MOVQ N_DIM, N + SHRQ $2, N + JZ r4c2 + +r4c4: + // 4x4 KERNEL + KERNEL_LOAD4 + KERNEL_4x4 + + ADDQ $4*SIZE, X_PTR + + DECQ N + JNZ r4c4 + +r4c2: + TESTQ $2, N_DIM + JZ r4c1 + + // 4x2 KERNEL + KERNEL_LOAD2 + KERNEL_4x2 + + ADDQ $2*SIZE, X_PTR + +r4c1: + HADDPD X1, X0 + HADDPD X3, X2 + TESTQ $1, N_DIM + JZ r4end + + // 4x1 KERNEL + KERNEL_4x1 + + ADDQ $SIZE, X_PTR + +r4end: + CMPQ INC_Y, $SIZE + JNZ r4st_inc + + STORE4 + ADDQ $4*SIZE, Y_PTR + JMP r4inc + +r4st_inc: + STORE4_INC + LEAQ (Y_PTR)(INC_Y*4), Y_PTR + +r4inc: + MOVQ X, X_PTR + LEAQ (A_ROW)(LDA*4), A_ROW + MOVQ A_ROW, A_PTR + + DECQ M + JNZ r4 + +r2: + TESTQ $2, M_DIM + JZ r1 + + // LOAD 2 + INIT2 + + MOVQ N_DIM, N + SHRQ $2, N + JZ r2c2 + +r2c4: + // 2x4 KERNEL + KERNEL_LOAD4 + KERNEL_2x4 + + ADDQ $4*SIZE, X_PTR + + DECQ N + JNZ r2c4 + +r2c2: + TESTQ $2, N_DIM + JZ r2c1 + + // 2x2 KERNEL + KERNEL_LOAD2 + KERNEL_2x2 + + ADDQ $2*SIZE, X_PTR + +r2c1: + HADDPD X1, X0 + TESTQ $1, N_DIM + JZ r2end + + // 2x1 KERNEL + KERNEL_2x1 + + ADDQ $SIZE, X_PTR + +r2end: + CMPQ INC_Y, $SIZE + JNE r2st_inc + + STORE2 + ADDQ $2*SIZE, Y_PTR + JMP r2inc + +r2st_inc: + STORE2_INC + LEAQ (Y_PTR)(INC_Y*2), Y_PTR + +r2inc: + MOVQ X, X_PTR + LEAQ (A_ROW)(LDA*2), A_ROW + MOVQ A_ROW, A_PTR + +r1: + TESTQ $1, M_DIM + JZ end + + // LOAD 1 + INIT1 + + MOVQ N_DIM, N + SHRQ $2, N + JZ r1c2 + +r1c4: + // 1x4 KERNEL + KERNEL_LOAD4 + KERNEL_1x4 + + ADDQ $4*SIZE, X_PTR + + DECQ N + JNZ r1c4 + +r1c2: + TESTQ $2, N_DIM + JZ r1c1 + + // 1x2 KERNEL + KERNEL_LOAD2 + KERNEL_1x2 + + ADDQ $2*SIZE, X_PTR + +r1c1: + + TESTQ $1, N_DIM + JZ r1end + + // 1x1 KERNEL + KERNEL_1x1 + +r1end: + STORE1 + +end: + RET + +inc: // Algorithm for incX != 1 ( split loads in kernel ) + MOVQ incX+80(FP), INC_X // INC_X = incX + + XORQ TMP2, TMP2 // TMP2 = 0 + MOVQ N, TMP1 // TMP1 = N + SUBQ $1, TMP1 // TMP1 -= 1 + NEGQ TMP1 // TMP1 = -TMP1 + IMULQ INC_X, TMP1 // TMP1 *= INC_X + CMPQ INC_X, $0 // if INC_X < 0 { TMP2 = TMP1 } + CMOVQLT TMP1, TMP2 + LEAQ (X_PTR)(TMP2*SIZE), X_PTR // X_PTR = X_PTR[TMP2] + MOVQ X_PTR, X // X = X_PTR + + SHLQ $3, INC_X + LEAQ (INC_X)(INC_X*2), INC3_X // INC3_X = INC_X * 3 + + SHRQ $2, M + JZ inc_r2 + +inc_r4: + // LOAD 4 + INIT4 + + MOVQ N_DIM, N + SHRQ $2, N + JZ inc_r4c2 + +inc_r4c4: + // 4x4 KERNEL + KERNEL_LOAD4_INC + KERNEL_4x4 + + LEAQ (X_PTR)(INC_X*4), X_PTR + + DECQ N + JNZ inc_r4c4 + +inc_r4c2: + TESTQ $2, N_DIM + JZ inc_r4c1 + + // 4x2 KERNEL + KERNEL_LOAD2_INC + KERNEL_4x2 + + LEAQ (X_PTR)(INC_X*2), X_PTR + +inc_r4c1: + HADDPD X1, X0 + HADDPD X3, X2 + TESTQ $1, N_DIM + JZ inc_r4end + + // 4x1 KERNEL + KERNEL_4x1 + + ADDQ INC_X, X_PTR + +inc_r4end: + CMPQ INC_Y, $SIZE + JNE inc_r4st_inc + + STORE4 + ADDQ $4*SIZE, Y_PTR + JMP inc_r4inc + +inc_r4st_inc: + STORE4_INC + LEAQ (Y_PTR)(INC_Y*4), Y_PTR + +inc_r4inc: + MOVQ X, X_PTR + LEAQ (A_ROW)(LDA*4), A_ROW + MOVQ A_ROW, A_PTR + + DECQ M + JNZ inc_r4 + +inc_r2: + TESTQ $2, M_DIM + JZ inc_r1 + + // LOAD 2 + INIT2 + + MOVQ N_DIM, N + SHRQ $2, N + JZ inc_r2c2 + +inc_r2c4: + // 2x4 KERNEL + KERNEL_LOAD4_INC + KERNEL_2x4 + + LEAQ (X_PTR)(INC_X*4), X_PTR + DECQ N + JNZ inc_r2c4 + +inc_r2c2: + TESTQ $2, N_DIM + JZ inc_r2c1 + + // 2x2 KERNEL + KERNEL_LOAD2_INC + KERNEL_2x2 + + LEAQ (X_PTR)(INC_X*2), X_PTR + +inc_r2c1: + HADDPD X1, X0 + TESTQ $1, N_DIM + JZ inc_r2end + + // 2x1 KERNEL + KERNEL_2x1 + + ADDQ INC_X, X_PTR + +inc_r2end: + CMPQ INC_Y, $SIZE + JNE inc_r2st_inc + + STORE2 + ADDQ $2*SIZE, Y_PTR + JMP inc_r2inc + +inc_r2st_inc: + STORE2_INC + LEAQ (Y_PTR)(INC_Y*2), Y_PTR + +inc_r2inc: + MOVQ X, X_PTR + LEAQ (A_ROW)(LDA*2), A_ROW + MOVQ A_ROW, A_PTR + +inc_r1: + TESTQ $1, M_DIM + JZ inc_end + + // LOAD 1 + INIT1 + + MOVQ N_DIM, N + SHRQ $2, N + JZ inc_r1c2 + +inc_r1c4: + // 1x4 KERNEL + KERNEL_LOAD4_INC + KERNEL_1x4 + + LEAQ (X_PTR)(INC_X*4), X_PTR + DECQ N + JNZ inc_r1c4 + +inc_r1c2: + TESTQ $2, N_DIM + JZ inc_r1c1 + + // 1x2 KERNEL + KERNEL_LOAD2_INC + KERNEL_1x2 + + LEAQ (X_PTR)(INC_X*2), X_PTR + +inc_r1c1: + TESTQ $1, N_DIM + JZ inc_r1end + + // 1x1 KERNEL + KERNEL_1x1 + +inc_r1end: + STORE1 + +inc_end: + RET diff --git a/vendor/gonum.org/v1/gonum/internal/asm/f64/gemvT_amd64.s b/vendor/gonum.org/v1/gonum/internal/asm/f64/gemvT_amd64.s new file mode 100644 index 0000000..87a9445 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/internal/asm/f64/gemvT_amd64.s @@ -0,0 +1,745 @@ +// Copyright ©2017 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// +build !noasm,!appengine,!safe + +#include "textflag.h" + +#define SIZE 8 + +#define M_DIM n+8(FP) +#define M CX +#define N_DIM m+0(FP) +#define N BX + +#define TMP1 R14 +#define TMP2 R15 + +#define X_PTR SI +#define X x_base+56(FP) +#define Y_PTR DX +#define Y y_base+96(FP) +#define A_ROW AX +#define A_PTR DI + +#define INC_X R8 +#define INC3_X R9 + +#define INC_Y R10 +#define INC3_Y R11 + +#define LDA R12 +#define LDA3 R13 + +#define ALPHA X15 +#define BETA X14 + +#define INIT4 \ + MOVDDUP (X_PTR), X8 \ + MOVDDUP (X_PTR)(INC_X*1), X9 \ + MOVDDUP (X_PTR)(INC_X*2), X10 \ + MOVDDUP (X_PTR)(INC3_X*1), X11 \ + MULPD ALPHA, X8 \ + MULPD ALPHA, X9 \ + MULPD ALPHA, X10 \ + MULPD ALPHA, X11 + +#define INIT2 \ + MOVDDUP (X_PTR), X8 \ + MOVDDUP (X_PTR)(INC_X*1), X9 \ + MULPD ALPHA, X8 \ + MULPD ALPHA, X9 + +#define INIT1 \ + MOVDDUP (X_PTR), X8 \ + MULPD ALPHA, X8 + +#define KERNEL_LOAD4 \ + MOVUPS (Y_PTR), X0 \ + MOVUPS 2*SIZE(Y_PTR), X1 + +#define KERNEL_LOAD2 \ + MOVUPS (Y_PTR), X0 + +#define KERNEL_LOAD4_INC \ + MOVSD (Y_PTR), X0 \ + MOVHPD (Y_PTR)(INC_Y*1), X0 \ + MOVSD (Y_PTR)(INC_Y*2), X1 \ + MOVHPD (Y_PTR)(INC3_Y*1), X1 + +#define KERNEL_LOAD2_INC \ + MOVSD (Y_PTR), X0 \ + MOVHPD (Y_PTR)(INC_Y*1), X0 + +#define KERNEL_4x4 \ + MOVUPS (A_PTR), X4 \ + MOVUPS 2*SIZE(A_PTR), X5 \ + MOVUPS (A_PTR)(LDA*1), X6 \ + MOVUPS 2*SIZE(A_PTR)(LDA*1), X7 \ + MULPD X8, X4 \ + MULPD X8, X5 \ + MULPD X9, X6 \ + MULPD X9, X7 \ + ADDPD X4, X0 \ + ADDPD X5, X1 \ + ADDPD X6, X0 \ + ADDPD X7, X1 \ + MOVUPS (A_PTR)(LDA*2), X4 \ + MOVUPS 2*SIZE(A_PTR)(LDA*2), X5 \ + MOVUPS (A_PTR)(LDA3*1), X6 \ + MOVUPS 2*SIZE(A_PTR)(LDA3*1), X7 \ + MULPD X10, X4 \ + MULPD X10, X5 \ + MULPD X11, X6 \ + MULPD X11, X7 \ + ADDPD X4, X0 \ + ADDPD X5, X1 \ + ADDPD X6, X0 \ + ADDPD X7, X1 \ + ADDQ $4*SIZE, A_PTR + +#define KERNEL_4x2 \ + MOVUPS (A_PTR), X4 \ + MOVUPS 2*SIZE(A_PTR), X5 \ + MOVUPS (A_PTR)(LDA*1), X6 \ + MOVUPS 2*SIZE(A_PTR)(LDA*1), X7 \ + MULPD X8, X4 \ + MULPD X8, X5 \ + MULPD X9, X6 \ + MULPD X9, X7 \ + ADDPD X4, X0 \ + ADDPD X5, X1 \ + ADDPD X6, X0 \ + ADDPD X7, X1 \ + ADDQ $4*SIZE, A_PTR + +#define KERNEL_4x1 \ + MOVUPS (A_PTR), X4 \ + MOVUPS 2*SIZE(A_PTR), X5 \ + MULPD X8, X4 \ + MULPD X8, X5 \ + ADDPD X4, X0 \ + ADDPD X5, X1 \ + ADDQ $4*SIZE, A_PTR + +#define STORE4 \ + MOVUPS X0, (Y_PTR) \ + MOVUPS X1, 2*SIZE(Y_PTR) + +#define STORE4_INC \ + MOVLPD X0, (Y_PTR) \ + MOVHPD X0, (Y_PTR)(INC_Y*1) \ + MOVLPD X1, (Y_PTR)(INC_Y*2) \ + MOVHPD X1, (Y_PTR)(INC3_Y*1) + +#define KERNEL_2x4 \ + MOVUPS (A_PTR), X4 \ + MOVUPS (A_PTR)(LDA*1), X5 \ + MOVUPS (A_PTR)(LDA*2), X6 \ + MOVUPS (A_PTR)(LDA3*1), X7 \ + MULPD X8, X4 \ + MULPD X9, X5 \ + MULPD X10, X6 \ + MULPD X11, X7 \ + ADDPD X4, X0 \ + ADDPD X5, X0 \ + ADDPD X6, X0 \ + ADDPD X7, X0 \ + ADDQ $2*SIZE, A_PTR + +#define KERNEL_2x2 \ + MOVUPS (A_PTR), X4 \ + MOVUPS (A_PTR)(LDA*1), X5 \ + MULPD X8, X4 \ + MULPD X9, X5 \ + ADDPD X4, X0 \ + ADDPD X5, X0 \ + ADDQ $2*SIZE, A_PTR + +#define KERNEL_2x1 \ + MOVUPS (A_PTR), X4 \ + MULPD X8, X4 \ + ADDPD X4, X0 \ + ADDQ $2*SIZE, A_PTR + +#define STORE2 \ + MOVUPS X0, (Y_PTR) + +#define STORE2_INC \ + MOVLPD X0, (Y_PTR) \ + MOVHPD X0, (Y_PTR)(INC_Y*1) + +#define KERNEL_1x4 \ + MOVSD (Y_PTR), X0 \ + MOVSD (A_PTR), X4 \ + MOVSD (A_PTR)(LDA*1), X5 \ + MOVSD (A_PTR)(LDA*2), X6 \ + MOVSD (A_PTR)(LDA3*1), X7 \ + MULSD X8, X4 \ + MULSD X9, X5 \ + MULSD X10, X6 \ + MULSD X11, X7 \ + ADDSD X4, X0 \ + ADDSD X5, X0 \ + ADDSD X6, X0 \ + ADDSD X7, X0 \ + MOVSD X0, (Y_PTR) \ + ADDQ $SIZE, A_PTR + +#define KERNEL_1x2 \ + MOVSD (Y_PTR), X0 \ + MOVSD (A_PTR), X4 \ + MOVSD (A_PTR)(LDA*1), X5 \ + MULSD X8, X4 \ + MULSD X9, X5 \ + ADDSD X4, X0 \ + ADDSD X5, X0 \ + MOVSD X0, (Y_PTR) \ + ADDQ $SIZE, A_PTR + +#define KERNEL_1x1 \ + MOVSD (Y_PTR), X0 \ + MOVSD (A_PTR), X4 \ + MULSD X8, X4 \ + ADDSD X4, X0 \ + MOVSD X0, (Y_PTR) \ + ADDQ $SIZE, A_PTR + +#define SCALE_8(PTR, SCAL) \ + MOVUPS (PTR), X0 \ + MOVUPS 16(PTR), X1 \ + MOVUPS 32(PTR), X2 \ + MOVUPS 48(PTR), X3 \ + MULPD SCAL, X0 \ + MULPD SCAL, X1 \ + MULPD SCAL, X2 \ + MULPD SCAL, X3 \ + MOVUPS X0, (PTR) \ + MOVUPS X1, 16(PTR) \ + MOVUPS X2, 32(PTR) \ + MOVUPS X3, 48(PTR) + +#define SCALE_4(PTR, SCAL) \ + MOVUPS (PTR), X0 \ + MOVUPS 16(PTR), X1 \ + MULPD SCAL, X0 \ + MULPD SCAL, X1 \ + MOVUPS X0, (PTR) \ + MOVUPS X1, 16(PTR) \ + +#define SCALE_2(PTR, SCAL) \ + MOVUPS (PTR), X0 \ + MULPD SCAL, X0 \ + MOVUPS X0, (PTR) \ + +#define SCALE_1(PTR, SCAL) \ + MOVSD (PTR), X0 \ + MULSD SCAL, X0 \ + MOVSD X0, (PTR) \ + +#define SCALEINC_4(PTR, INC, INC3, SCAL) \ + MOVSD (PTR), X0 \ + MOVSD (PTR)(INC*1), X1 \ + MOVSD (PTR)(INC*2), X2 \ + MOVSD (PTR)(INC3*1), X3 \ + MULSD SCAL, X0 \ + MULSD SCAL, X1 \ + MULSD SCAL, X2 \ + MULSD SCAL, X3 \ + MOVSD X0, (PTR) \ + MOVSD X1, (PTR)(INC*1) \ + MOVSD X2, (PTR)(INC*2) \ + MOVSD X3, (PTR)(INC3*1) + +#define SCALEINC_2(PTR, INC, SCAL) \ + MOVSD (PTR), X0 \ + MOVSD (PTR)(INC*1), X1 \ + MULSD SCAL, X0 \ + MULSD SCAL, X1 \ + MOVSD X0, (PTR) \ + MOVSD X1, (PTR)(INC*1) + +// func GemvT(m, n int, +// alpha float64, +// a []float64, lda int, +// x []float64, incX int, +// beta float64, +// y []float64, incY int) +TEXT ·GemvT(SB), NOSPLIT, $32-128 + MOVQ M_DIM, M + MOVQ N_DIM, N + CMPQ M, $0 + JE end + CMPQ N, $0 + JE end + + MOVDDUP alpha+16(FP), ALPHA + + MOVQ x_base+56(FP), X_PTR + MOVQ y_base+96(FP), Y_PTR + MOVQ a_base+24(FP), A_ROW + MOVQ incY+120(FP), INC_Y // INC_Y = incY * sizeof(float64) + MOVQ lda+48(FP), LDA // LDA = LDA * sizeof(float64) + SHLQ $3, LDA + LEAQ (LDA)(LDA*2), LDA3 // LDA3 = LDA * 3 + MOVQ A_ROW, A_PTR + + MOVQ incX+80(FP), INC_X // INC_X = incX * sizeof(float64) + + XORQ TMP2, TMP2 + MOVQ N, TMP1 + SUBQ $1, TMP1 + NEGQ TMP1 + IMULQ INC_X, TMP1 + CMPQ INC_X, $0 + CMOVQLT TMP1, TMP2 + LEAQ (X_PTR)(TMP2*SIZE), X_PTR + MOVQ X_PTR, X + + SHLQ $3, INC_X + LEAQ (INC_X)(INC_X*2), INC3_X // INC3_X = INC_X * 3 + + CMPQ incY+120(FP), $1 // Check for dense vector Y (fast-path) + JNE inc + + MOVSD $1.0, X0 + COMISD beta+88(FP), X0 + JE gemv_start + + MOVSD $0.0, X0 + COMISD beta+88(FP), X0 + JE gemv_clear + + MOVDDUP beta+88(FP), BETA + SHRQ $3, M + JZ scal4 + +scal8: + SCALE_8(Y_PTR, BETA) + ADDQ $8*SIZE, Y_PTR + DECQ M + JNZ scal8 + +scal4: + TESTQ $4, M_DIM + JZ scal2 + SCALE_4(Y_PTR, BETA) + ADDQ $4*SIZE, Y_PTR + +scal2: + TESTQ $2, M_DIM + JZ scal1 + SCALE_2(Y_PTR, BETA) + ADDQ $2*SIZE, Y_PTR + +scal1: + TESTQ $1, M_DIM + JZ prep_end + SCALE_1(Y_PTR, BETA) + + JMP prep_end + +gemv_clear: // beta == 0 is special cased to clear memory (no nan handling) + XORPS X0, X0 + XORPS X1, X1 + XORPS X2, X2 + XORPS X3, X3 + + SHRQ $3, M + JZ clear4 + +clear8: + MOVUPS X0, (Y_PTR) + MOVUPS X1, 16(Y_PTR) + MOVUPS X2, 32(Y_PTR) + MOVUPS X3, 48(Y_PTR) + ADDQ $8*SIZE, Y_PTR + DECQ M + JNZ clear8 + +clear4: + TESTQ $4, M_DIM + JZ clear2 + MOVUPS X0, (Y_PTR) + MOVUPS X1, 16(Y_PTR) + ADDQ $4*SIZE, Y_PTR + +clear2: + TESTQ $2, M_DIM + JZ clear1 + MOVUPS X0, (Y_PTR) + ADDQ $2*SIZE, Y_PTR + +clear1: + TESTQ $1, M_DIM + JZ prep_end + MOVSD X0, (Y_PTR) + +prep_end: + MOVQ Y, Y_PTR + MOVQ M_DIM, M + +gemv_start: + SHRQ $2, N + JZ c2 + +c4: + // LOAD 4 + INIT4 + + MOVQ M_DIM, M + SHRQ $2, M + JZ c4r2 + +c4r4: + // 4x4 KERNEL + KERNEL_LOAD4 + KERNEL_4x4 + STORE4 + + ADDQ $4*SIZE, Y_PTR + + DECQ M + JNZ c4r4 + +c4r2: + TESTQ $2, M_DIM + JZ c4r1 + + // 4x2 KERNEL + KERNEL_LOAD2 + KERNEL_2x4 + STORE2 + + ADDQ $2*SIZE, Y_PTR + +c4r1: + TESTQ $1, M_DIM + JZ c4end + + // 4x1 KERNEL + KERNEL_1x4 + + ADDQ $SIZE, Y_PTR + +c4end: + LEAQ (X_PTR)(INC_X*4), X_PTR + MOVQ Y, Y_PTR + LEAQ (A_ROW)(LDA*4), A_ROW + MOVQ A_ROW, A_PTR + + DECQ N + JNZ c4 + +c2: + TESTQ $2, N_DIM + JZ c1 + + // LOAD 2 + INIT2 + + MOVQ M_DIM, M + SHRQ $2, M + JZ c2r2 + +c2r4: + // 2x4 KERNEL + KERNEL_LOAD4 + KERNEL_4x2 + STORE4 + + ADDQ $4*SIZE, Y_PTR + + DECQ M + JNZ c2r4 + +c2r2: + TESTQ $2, M_DIM + JZ c2r1 + + // 2x2 KERNEL + KERNEL_LOAD2 + KERNEL_2x2 + STORE2 + + ADDQ $2*SIZE, Y_PTR + +c2r1: + TESTQ $1, M_DIM + JZ c2end + + // 2x1 KERNEL + KERNEL_1x2 + + ADDQ $SIZE, Y_PTR + +c2end: + LEAQ (X_PTR)(INC_X*2), X_PTR + MOVQ Y, Y_PTR + LEAQ (A_ROW)(LDA*2), A_ROW + MOVQ A_ROW, A_PTR + +c1: + TESTQ $1, N_DIM + JZ end + + // LOAD 1 + INIT1 + + MOVQ M_DIM, M + SHRQ $2, M + JZ c1r2 + +c1r4: + // 1x4 KERNEL + KERNEL_LOAD4 + KERNEL_4x1 + STORE4 + + ADDQ $4*SIZE, Y_PTR + + DECQ M + JNZ c1r4 + +c1r2: + TESTQ $2, M_DIM + JZ c1r1 + + // 1x2 KERNEL + KERNEL_LOAD2 + KERNEL_2x1 + STORE2 + + ADDQ $2*SIZE, Y_PTR + +c1r1: + TESTQ $1, M_DIM + JZ end + + // 1x1 KERNEL + KERNEL_1x1 + +end: + RET + +inc: // Algorithm for incX != 0 ( split loads in kernel ) + XORQ TMP2, TMP2 + MOVQ M, TMP1 + SUBQ $1, TMP1 + IMULQ INC_Y, TMP1 + NEGQ TMP1 + CMPQ INC_Y, $0 + CMOVQLT TMP1, TMP2 + LEAQ (Y_PTR)(TMP2*SIZE), Y_PTR + MOVQ Y_PTR, Y + + SHLQ $3, INC_Y + LEAQ (INC_Y)(INC_Y*2), INC3_Y // INC3_Y = INC_Y * 3 + + MOVSD $1.0, X0 + COMISD beta+88(FP), X0 + JE inc_gemv_start + + MOVSD $0.0, X0 + COMISD beta+88(FP), X0 + JE inc_gemv_clear + + MOVDDUP beta+88(FP), BETA + SHRQ $2, M + JZ inc_scal2 + +inc_scal4: + SCALEINC_4(Y_PTR, INC_Y, INC3_Y, BETA) + LEAQ (Y_PTR)(INC_Y*4), Y_PTR + DECQ M + JNZ inc_scal4 + +inc_scal2: + TESTQ $2, M_DIM + JZ inc_scal1 + + SCALEINC_2(Y_PTR, INC_Y, BETA) + LEAQ (Y_PTR)(INC_Y*2), Y_PTR + +inc_scal1: + TESTQ $1, M_DIM + JZ inc_prep_end + SCALE_1(Y_PTR, BETA) + + JMP inc_prep_end + +inc_gemv_clear: // beta == 0 is special-cased to clear memory (no nan handling) + XORPS X0, X0 + XORPS X1, X1 + XORPS X2, X2 + XORPS X3, X3 + + SHRQ $2, M + JZ inc_clear2 + +inc_clear4: + MOVSD X0, (Y_PTR) + MOVSD X1, (Y_PTR)(INC_Y*1) + MOVSD X2, (Y_PTR)(INC_Y*2) + MOVSD X3, (Y_PTR)(INC3_Y*1) + LEAQ (Y_PTR)(INC_Y*4), Y_PTR + DECQ M + JNZ inc_clear4 + +inc_clear2: + TESTQ $2, M_DIM + JZ inc_clear1 + MOVSD X0, (Y_PTR) + MOVSD X1, (Y_PTR)(INC_Y*1) + LEAQ (Y_PTR)(INC_Y*2), Y_PTR + +inc_clear1: + TESTQ $1, M_DIM + JZ inc_prep_end + MOVSD X0, (Y_PTR) + +inc_prep_end: + MOVQ Y, Y_PTR + MOVQ M_DIM, M + +inc_gemv_start: + SHRQ $2, N + JZ inc_c2 + +inc_c4: + // LOAD 4 + INIT4 + + MOVQ M_DIM, M + SHRQ $2, M + JZ inc_c4r2 + +inc_c4r4: + // 4x4 KERNEL + KERNEL_LOAD4_INC + KERNEL_4x4 + STORE4_INC + + LEAQ (Y_PTR)(INC_Y*4), Y_PTR + + DECQ M + JNZ inc_c4r4 + +inc_c4r2: + TESTQ $2, M_DIM + JZ inc_c4r1 + + // 4x2 KERNEL + KERNEL_LOAD2_INC + KERNEL_2x4 + STORE2_INC + + LEAQ (Y_PTR)(INC_Y*2), Y_PTR + +inc_c4r1: + TESTQ $1, M_DIM + JZ inc_c4end + + // 4x1 KERNEL + KERNEL_1x4 + + ADDQ INC_Y, Y_PTR + +inc_c4end: + LEAQ (X_PTR)(INC_X*4), X_PTR + MOVQ Y, Y_PTR + LEAQ (A_ROW)(LDA*4), A_ROW + MOVQ A_ROW, A_PTR + + DECQ N + JNZ inc_c4 + +inc_c2: + TESTQ $2, N_DIM + JZ inc_c1 + + // LOAD 2 + INIT2 + + MOVQ M_DIM, M + SHRQ $2, M + JZ inc_c2r2 + +inc_c2r4: + // 2x4 KERNEL + KERNEL_LOAD4_INC + KERNEL_4x2 + STORE4_INC + + LEAQ (Y_PTR)(INC_Y*4), Y_PTR + DECQ M + JNZ inc_c2r4 + +inc_c2r2: + TESTQ $2, M_DIM + JZ inc_c2r1 + + // 2x2 KERNEL + KERNEL_LOAD2_INC + KERNEL_2x2 + STORE2_INC + + LEAQ (Y_PTR)(INC_Y*2), Y_PTR + +inc_c2r1: + TESTQ $1, M_DIM + JZ inc_c2end + + // 2x1 KERNEL + KERNEL_1x2 + + ADDQ INC_Y, Y_PTR + +inc_c2end: + LEAQ (X_PTR)(INC_X*2), X_PTR + MOVQ Y, Y_PTR + LEAQ (A_ROW)(LDA*2), A_ROW + MOVQ A_ROW, A_PTR + +inc_c1: + TESTQ $1, N_DIM + JZ inc_end + + // LOAD 1 + INIT1 + + MOVQ M_DIM, M + SHRQ $2, M + JZ inc_c1r2 + +inc_c1r4: + // 1x4 KERNEL + KERNEL_LOAD4_INC + KERNEL_4x1 + STORE4_INC + + LEAQ (Y_PTR)(INC_Y*4), Y_PTR + DECQ M + JNZ inc_c1r4 + +inc_c1r2: + TESTQ $2, M_DIM + JZ inc_c1r1 + + // 1x2 KERNEL + KERNEL_LOAD2_INC + KERNEL_2x1 + STORE2_INC + + LEAQ (Y_PTR)(INC_Y*2), Y_PTR + +inc_c1r1: + TESTQ $1, M_DIM + JZ inc_end + + // 1x1 KERNEL + KERNEL_1x1 + +inc_end: + RET diff --git a/vendor/gonum.org/v1/gonum/internal/asm/f64/ger_amd64.s b/vendor/gonum.org/v1/gonum/internal/asm/f64/ger_amd64.s new file mode 100644 index 0000000..7ae5cf7 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/internal/asm/f64/ger_amd64.s @@ -0,0 +1,591 @@ +// Copyright ©2017 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// +build !noasm,!appengine,!safe + +#include "textflag.h" + +#define SIZE 8 + +#define M_DIM m+0(FP) +#define M CX +#define N_DIM n+8(FP) +#define N BX + +#define TMP1 R14 +#define TMP2 R15 + +#define X_PTR SI +#define Y y_base+56(FP) +#define Y_PTR DX +#define A_ROW AX +#define A_PTR DI + +#define INC_X R8 +#define INC3_X R9 + +#define INC_Y R10 +#define INC3_Y R11 + +#define LDA R12 +#define LDA3 R13 + +#define ALPHA X0 + +#define LOAD4 \ + PREFETCHNTA (X_PTR )(INC_X*8) \ + MOVDDUP (X_PTR), X1 \ + MOVDDUP (X_PTR)(INC_X*1), X2 \ + MOVDDUP (X_PTR)(INC_X*2), X3 \ + MOVDDUP (X_PTR)(INC3_X*1), X4 \ + MULPD ALPHA, X1 \ + MULPD ALPHA, X2 \ + MULPD ALPHA, X3 \ + MULPD ALPHA, X4 + +#define LOAD2 \ + MOVDDUP (X_PTR), X1 \ + MOVDDUP (X_PTR)(INC_X*1), X2 \ + MULPD ALPHA, X1 \ + MULPD ALPHA, X2 + +#define LOAD1 \ + MOVDDUP (X_PTR), X1 \ + MULPD ALPHA, X1 + +#define KERNEL_LOAD4 \ + MOVUPS (Y_PTR), X5 \ + MOVUPS 2*SIZE(Y_PTR), X6 + +#define KERNEL_LOAD4_INC \ + MOVLPD (Y_PTR), X5 \ + MOVHPD (Y_PTR)(INC_Y*1), X5 \ + MOVLPD (Y_PTR)(INC_Y*2), X6 \ + MOVHPD (Y_PTR)(INC3_Y*1), X6 + +#define KERNEL_LOAD2 \ + MOVUPS (Y_PTR), X5 + +#define KERNEL_LOAD2_INC \ + MOVLPD (Y_PTR), X5 \ + MOVHPD (Y_PTR)(INC_Y*1), X5 + +#define KERNEL_4x4 \ + MOVUPS X5, X7 \ + MOVUPS X6, X8 \ + MOVUPS X5, X9 \ + MOVUPS X6, X10 \ + MOVUPS X5, X11 \ + MOVUPS X6, X12 \ + MULPD X1, X5 \ + MULPD X1, X6 \ + MULPD X2, X7 \ + MULPD X2, X8 \ + MULPD X3, X9 \ + MULPD X3, X10 \ + MULPD X4, X11 \ + MULPD X4, X12 + +#define STORE_4x4 \ + MOVUPS (A_PTR), X13 \ + ADDPD X13, X5 \ + MOVUPS 2*SIZE(A_PTR), X14 \ + ADDPD X14, X6 \ + MOVUPS (A_PTR)(LDA*1), X15 \ + ADDPD X15, X7 \ + MOVUPS 2*SIZE(A_PTR)(LDA*1), X0 \ + ADDPD X0, X8 \ + MOVUPS (A_PTR)(LDA*2), X13 \ + ADDPD X13, X9 \ + MOVUPS 2*SIZE(A_PTR)(LDA*2), X14 \ + ADDPD X14, X10 \ + MOVUPS (A_PTR)(LDA3*1), X15 \ + ADDPD X15, X11 \ + MOVUPS 2*SIZE(A_PTR)(LDA3*1), X0 \ + ADDPD X0, X12 \ + MOVUPS X5, (A_PTR) \ + MOVUPS X6, 2*SIZE(A_PTR) \ + MOVUPS X7, (A_PTR)(LDA*1) \ + MOVUPS X8, 2*SIZE(A_PTR)(LDA*1) \ + MOVUPS X9, (A_PTR)(LDA*2) \ + MOVUPS X10, 2*SIZE(A_PTR)(LDA*2) \ + MOVUPS X11, (A_PTR)(LDA3*1) \ + MOVUPS X12, 2*SIZE(A_PTR)(LDA3*1) \ + ADDQ $4*SIZE, A_PTR + +#define KERNEL_4x2 \ + MOVUPS X5, X6 \ + MOVUPS X5, X7 \ + MOVUPS X5, X8 \ + MULPD X1, X5 \ + MULPD X2, X6 \ + MULPD X3, X7 \ + MULPD X4, X8 + +#define STORE_4x2 \ + MOVUPS (A_PTR), X9 \ + ADDPD X9, X5 \ + MOVUPS (A_PTR)(LDA*1), X10 \ + ADDPD X10, X6 \ + MOVUPS (A_PTR)(LDA*2), X11 \ + ADDPD X11, X7 \ + MOVUPS (A_PTR)(LDA3*1), X12 \ + ADDPD X12, X8 \ + MOVUPS X5, (A_PTR) \ + MOVUPS X6, (A_PTR)(LDA*1) \ + MOVUPS X7, (A_PTR)(LDA*2) \ + MOVUPS X8, (A_PTR)(LDA3*1) \ + ADDQ $2*SIZE, A_PTR + +#define KERNEL_4x1 \ + MOVSD (Y_PTR), X5 \ + MOVSD X5, X6 \ + MOVSD X5, X7 \ + MOVSD X5, X8 \ + MULSD X1, X5 \ + MULSD X2, X6 \ + MULSD X3, X7 \ + MULSD X4, X8 + +#define STORE_4x1 \ + ADDSD (A_PTR), X5 \ + ADDSD (A_PTR)(LDA*1), X6 \ + ADDSD (A_PTR)(LDA*2), X7 \ + ADDSD (A_PTR)(LDA3*1), X8 \ + MOVSD X5, (A_PTR) \ + MOVSD X6, (A_PTR)(LDA*1) \ + MOVSD X7, (A_PTR)(LDA*2) \ + MOVSD X8, (A_PTR)(LDA3*1) \ + ADDQ $SIZE, A_PTR + +#define KERNEL_2x4 \ + MOVUPS X5, X7 \ + MOVUPS X6, X8 \ + MULPD X1, X5 \ + MULPD X1, X6 \ + MULPD X2, X7 \ + MULPD X2, X8 + +#define STORE_2x4 \ + MOVUPS (A_PTR), X9 \ + ADDPD X9, X5 \ + MOVUPS 2*SIZE(A_PTR), X10 \ + ADDPD X10, X6 \ + MOVUPS (A_PTR)(LDA*1), X11 \ + ADDPD X11, X7 \ + MOVUPS 2*SIZE(A_PTR)(LDA*1), X12 \ + ADDPD X12, X8 \ + MOVUPS X5, (A_PTR) \ + MOVUPS X6, 2*SIZE(A_PTR) \ + MOVUPS X7, (A_PTR)(LDA*1) \ + MOVUPS X8, 2*SIZE(A_PTR)(LDA*1) \ + ADDQ $4*SIZE, A_PTR + +#define KERNEL_2x2 \ + MOVUPS X5, X6 \ + MULPD X1, X5 \ + MULPD X2, X6 + +#define STORE_2x2 \ + MOVUPS (A_PTR), X7 \ + ADDPD X7, X5 \ + MOVUPS (A_PTR)(LDA*1), X8 \ + ADDPD X8, X6 \ + MOVUPS X5, (A_PTR) \ + MOVUPS X6, (A_PTR)(LDA*1) \ + ADDQ $2*SIZE, A_PTR + +#define KERNEL_2x1 \ + MOVSD (Y_PTR), X5 \ + MOVSD X5, X6 \ + MULSD X1, X5 \ + MULSD X2, X6 + +#define STORE_2x1 \ + ADDSD (A_PTR), X5 \ + ADDSD (A_PTR)(LDA*1), X6 \ + MOVSD X5, (A_PTR) \ + MOVSD X6, (A_PTR)(LDA*1) \ + ADDQ $SIZE, A_PTR + +#define KERNEL_1x4 \ + MULPD X1, X5 \ + MULPD X1, X6 + +#define STORE_1x4 \ + MOVUPS (A_PTR), X7 \ + ADDPD X7, X5 \ + MOVUPS 2*SIZE(A_PTR), X8 \ + ADDPD X8, X6 \ + MOVUPS X5, (A_PTR) \ + MOVUPS X6, 2*SIZE(A_PTR) \ + ADDQ $4*SIZE, A_PTR + +#define KERNEL_1x2 \ + MULPD X1, X5 + +#define STORE_1x2 \ + MOVUPS (A_PTR), X6 \ + ADDPD X6, X5 \ + MOVUPS X5, (A_PTR) \ + ADDQ $2*SIZE, A_PTR + +#define KERNEL_1x1 \ + MOVSD (Y_PTR), X5 \ + MULSD X1, X5 + +#define STORE_1x1 \ + ADDSD (A_PTR), X5 \ + MOVSD X5, (A_PTR) \ + ADDQ $SIZE, A_PTR + +// func Ger(m, n uintptr, alpha float64, +// x []float64, incX uintptr, +// y []float64, incY uintptr, +// a []float64, lda uintptr) +TEXT ·Ger(SB), NOSPLIT, $0 + MOVQ M_DIM, M + MOVQ N_DIM, N + CMPQ M, $0 + JE end + CMPQ N, $0 + JE end + + MOVDDUP alpha+16(FP), ALPHA + + MOVQ x_base+24(FP), X_PTR + MOVQ y_base+56(FP), Y_PTR + MOVQ a_base+88(FP), A_ROW + MOVQ incX+48(FP), INC_X // INC_X = incX * sizeof(float64) + SHLQ $3, INC_X + MOVQ lda+112(FP), LDA // LDA = LDA * sizeof(float64) + SHLQ $3, LDA + LEAQ (LDA)(LDA*2), LDA3 // LDA3 = LDA * 3 + LEAQ (INC_X)(INC_X*2), INC3_X // INC3_X = INC_X * 3 + MOVQ A_ROW, A_PTR + + XORQ TMP2, TMP2 + MOVQ M, TMP1 + SUBQ $1, TMP1 + IMULQ INC_X, TMP1 + NEGQ TMP1 + CMPQ INC_X, $0 + CMOVQLT TMP1, TMP2 + LEAQ (X_PTR)(TMP2*SIZE), X_PTR + + CMPQ incY+80(FP), $1 // Check for dense vector Y (fast-path) + JG inc + JL end + + SHRQ $2, M + JZ r2 + +r4: + // LOAD 4 + LOAD4 + + MOVQ N_DIM, N + SHRQ $2, N + JZ r4c2 + +r4c4: + // 4x4 KERNEL + KERNEL_LOAD4 + KERNEL_4x4 + STORE_4x4 + + ADDQ $4*SIZE, Y_PTR + + DECQ N + JNZ r4c4 + + // Reload ALPHA after it's clobbered by STORE_4x4 + MOVDDUP alpha+16(FP), ALPHA + +r4c2: + TESTQ $2, N_DIM + JZ r4c1 + + // 4x2 KERNEL + KERNEL_LOAD2 + KERNEL_4x2 + STORE_4x2 + + ADDQ $2*SIZE, Y_PTR + +r4c1: + TESTQ $1, N_DIM + JZ r4end + + // 4x1 KERNEL + KERNEL_4x1 + STORE_4x1 + + ADDQ $SIZE, Y_PTR + +r4end: + LEAQ (X_PTR)(INC_X*4), X_PTR + MOVQ Y, Y_PTR + LEAQ (A_ROW)(LDA*4), A_ROW + MOVQ A_ROW, A_PTR + + DECQ M + JNZ r4 + +r2: + TESTQ $2, M_DIM + JZ r1 + + // LOAD 2 + LOAD2 + + MOVQ N_DIM, N + SHRQ $2, N + JZ r2c2 + +r2c4: + // 2x4 KERNEL + KERNEL_LOAD4 + KERNEL_2x4 + STORE_2x4 + + ADDQ $4*SIZE, Y_PTR + + DECQ N + JNZ r2c4 + +r2c2: + TESTQ $2, N_DIM + JZ r2c1 + + // 2x2 KERNEL + KERNEL_LOAD2 + KERNEL_2x2 + STORE_2x2 + + ADDQ $2*SIZE, Y_PTR + +r2c1: + TESTQ $1, N_DIM + JZ r2end + + // 2x1 KERNEL + KERNEL_2x1 + STORE_2x1 + + ADDQ $SIZE, Y_PTR + +r2end: + LEAQ (X_PTR)(INC_X*2), X_PTR + MOVQ Y, Y_PTR + LEAQ (A_ROW)(LDA*2), A_ROW + MOVQ A_ROW, A_PTR + +r1: + TESTQ $1, M_DIM + JZ end + + // LOAD 1 + LOAD1 + + MOVQ N_DIM, N + SHRQ $2, N + JZ r1c2 + +r1c4: + // 1x4 KERNEL + KERNEL_LOAD4 + KERNEL_1x4 + STORE_1x4 + + ADDQ $4*SIZE, Y_PTR + + DECQ N + JNZ r1c4 + +r1c2: + TESTQ $2, N_DIM + JZ r1c1 + + // 1x2 KERNEL + KERNEL_LOAD2 + KERNEL_1x2 + STORE_1x2 + + ADDQ $2*SIZE, Y_PTR + +r1c1: + TESTQ $1, N_DIM + JZ end + + // 1x1 KERNEL + KERNEL_1x1 + STORE_1x1 + + ADDQ $SIZE, Y_PTR + +end: + RET + +inc: // Algorithm for incY != 1 ( split loads in kernel ) + + MOVQ incY+80(FP), INC_Y // INC_Y = incY * sizeof(float64) + SHLQ $3, INC_Y + LEAQ (INC_Y)(INC_Y*2), INC3_Y // INC3_Y = INC_Y * 3 + + XORQ TMP2, TMP2 + MOVQ N, TMP1 + SUBQ $1, TMP1 + IMULQ INC_Y, TMP1 + NEGQ TMP1 + CMPQ INC_Y, $0 + CMOVQLT TMP1, TMP2 + LEAQ (Y_PTR)(TMP2*SIZE), Y_PTR + + SHRQ $2, M + JZ inc_r2 + +inc_r4: + // LOAD 4 + LOAD4 + + MOVQ N_DIM, N + SHRQ $2, N + JZ inc_r4c2 + +inc_r4c4: + // 4x4 KERNEL + KERNEL_LOAD4_INC + KERNEL_4x4 + STORE_4x4 + + LEAQ (Y_PTR)(INC_Y*4), Y_PTR + DECQ N + JNZ inc_r4c4 + + // Reload ALPHA after it's clobbered by STORE_4x4 + MOVDDUP alpha+16(FP), ALPHA + +inc_r4c2: + TESTQ $2, N_DIM + JZ inc_r4c1 + + // 4x2 KERNEL + KERNEL_LOAD2_INC + KERNEL_4x2 + STORE_4x2 + + LEAQ (Y_PTR)(INC_Y*2), Y_PTR + +inc_r4c1: + TESTQ $1, N_DIM + JZ inc_r4end + + // 4x1 KERNEL + KERNEL_4x1 + STORE_4x1 + + ADDQ INC_Y, Y_PTR + +inc_r4end: + LEAQ (X_PTR)(INC_X*4), X_PTR + MOVQ Y, Y_PTR + LEAQ (A_ROW)(LDA*4), A_ROW + MOVQ A_ROW, A_PTR + + DECQ M + JNZ inc_r4 + +inc_r2: + TESTQ $2, M_DIM + JZ inc_r1 + + // LOAD 2 + LOAD2 + + MOVQ N_DIM, N + SHRQ $2, N + JZ inc_r2c2 + +inc_r2c4: + // 2x4 KERNEL + KERNEL_LOAD4_INC + KERNEL_2x4 + STORE_2x4 + + LEAQ (Y_PTR)(INC_Y*4), Y_PTR + DECQ N + JNZ inc_r2c4 + +inc_r2c2: + TESTQ $2, N_DIM + JZ inc_r2c1 + + // 2x2 KERNEL + KERNEL_LOAD2_INC + KERNEL_2x2 + STORE_2x2 + + LEAQ (Y_PTR)(INC_Y*2), Y_PTR + +inc_r2c1: + TESTQ $1, N_DIM + JZ inc_r2end + + // 2x1 KERNEL + KERNEL_2x1 + STORE_2x1 + + ADDQ INC_Y, Y_PTR + +inc_r2end: + LEAQ (X_PTR)(INC_X*2), X_PTR + MOVQ Y, Y_PTR + LEAQ (A_ROW)(LDA*2), A_ROW + MOVQ A_ROW, A_PTR + +inc_r1: + TESTQ $1, M_DIM + JZ end + + // LOAD 1 + LOAD1 + + MOVQ N_DIM, N + SHRQ $2, N + JZ inc_r1c2 + +inc_r1c4: + // 1x4 KERNEL + KERNEL_LOAD4_INC + KERNEL_1x4 + STORE_1x4 + + LEAQ (Y_PTR)(INC_Y*4), Y_PTR + DECQ N + JNZ inc_r1c4 + +inc_r1c2: + TESTQ $2, N_DIM + JZ inc_r1c1 + + // 1x2 KERNEL + KERNEL_LOAD2_INC + KERNEL_1x2 + STORE_1x2 + + LEAQ (Y_PTR)(INC_Y*2), Y_PTR + +inc_r1c1: + TESTQ $1, N_DIM + JZ end + + // 1x1 KERNEL + KERNEL_1x1 + STORE_1x1 + + ADDQ INC_Y, Y_PTR + +inc_end: + RET diff --git a/vendor/gonum.org/v1/gonum/internal/asm/f64/l1norm_amd64.s b/vendor/gonum.org/v1/gonum/internal/asm/f64/l1norm_amd64.s new file mode 100644 index 0000000..f87f856 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/internal/asm/f64/l1norm_amd64.s @@ -0,0 +1,58 @@ +// Copyright ©2016 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// +build !noasm,!appengine,!safe + +#include "textflag.h" + +// func L1Dist(s, t []float64) float64 +TEXT ·L1Dist(SB), NOSPLIT, $0 + MOVQ s_base+0(FP), DI // DI = &s + MOVQ t_base+24(FP), SI // SI = &t + MOVQ s_len+8(FP), CX // CX = len(s) + CMPQ t_len+32(FP), CX // CX = max( CX, len(t) ) + CMOVQLE t_len+32(FP), CX + PXOR X3, X3 // norm = 0 + CMPQ CX, $0 // if CX == 0 { return 0 } + JE l1_end + XORQ AX, AX // i = 0 + MOVQ CX, BX + ANDQ $1, BX // BX = CX % 2 + SHRQ $1, CX // CX = floor( CX / 2 ) + JZ l1_tail_start // if CX == 0 { return 0 } + +l1_loop: // Loop unrolled 2x do { + MOVUPS (SI)(AX*8), X0 // X0 = t[i:i+1] + MOVUPS (DI)(AX*8), X1 // X1 = s[i:i+1] + MOVAPS X0, X2 + SUBPD X1, X0 + SUBPD X2, X1 + MAXPD X1, X0 // X0 = max( X0 - X1, X1 - X0 ) + ADDPD X0, X3 // norm += X0 + ADDQ $2, AX // i += 2 + LOOP l1_loop // } while --CX > 0 + CMPQ BX, $0 // if BX == 0 { return } + JE l1_end + +l1_tail_start: // Reset loop registers + MOVQ BX, CX // Loop counter: CX = BX + PXOR X0, X0 // reset X0, X1 to break dependencies + PXOR X1, X1 + +l1_tail: + MOVSD (SI)(AX*8), X0 // X0 = t[i] + MOVSD (DI)(AX*8), X1 // x1 = s[i] + MOVAPD X0, X2 + SUBSD X1, X0 + SUBSD X2, X1 + MAXSD X1, X0 // X0 = max( X0 - X1, X1 - X0 ) + ADDSD X0, X3 // norm += X0 + +l1_end: + MOVAPS X3, X2 + SHUFPD $1, X2, X2 + ADDSD X3, X2 // X2 = X3[1] + X3[0] + MOVSD X2, ret+48(FP) // return X2 + RET + diff --git a/vendor/gonum.org/v1/gonum/internal/asm/f64/linfnorm_amd64.s b/vendor/gonum.org/v1/gonum/internal/asm/f64/linfnorm_amd64.s new file mode 100644 index 0000000..b062592 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/internal/asm/f64/linfnorm_amd64.s @@ -0,0 +1,57 @@ +// Copyright ©2016 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// +build !noasm,!appengine,!safe + +#include "textflag.h" + +// func LinfDist(s, t []float64) float64 +TEXT ·LinfDist(SB), NOSPLIT, $0 + MOVQ s_base+0(FP), DI // DI = &s + MOVQ t_base+24(FP), SI // SI = &t + MOVQ s_len+8(FP), CX // CX = len(s) + CMPQ t_len+32(FP), CX // CX = max( CX, len(t) ) + CMOVQLE t_len+32(FP), CX + PXOR X3, X3 // norm = 0 + CMPQ CX, $0 // if CX == 0 { return 0 } + JE l1_end + XORQ AX, AX // i = 0 + MOVQ CX, BX + ANDQ $1, BX // BX = CX % 2 + SHRQ $1, CX // CX = floor( CX / 2 ) + JZ l1_tail_start // if CX == 0 { return 0 } + +l1_loop: // Loop unrolled 2x do { + MOVUPS (SI)(AX*8), X0 // X0 = t[i:i+1] + MOVUPS (DI)(AX*8), X1 // X1 = s[i:i+1] + MOVAPS X0, X2 + SUBPD X1, X0 + SUBPD X2, X1 + MAXPD X1, X0 // X0 = max( X0 - X1, X1 - X0 ) + MAXPD X0, X3 // norm = max( norm, X0 ) + ADDQ $2, AX // i += 2 + LOOP l1_loop // } while --CX > 0 + CMPQ BX, $0 // if BX == 0 { return } + JE l1_end + +l1_tail_start: // Reset loop registers + MOVQ BX, CX // Loop counter: CX = BX + PXOR X0, X0 // reset X0, X1 to break dependencies + PXOR X1, X1 + +l1_tail: + MOVSD (SI)(AX*8), X0 // X0 = t[i] + MOVSD (DI)(AX*8), X1 // X1 = s[i] + MOVAPD X0, X2 + SUBSD X1, X0 + SUBSD X2, X1 + MAXSD X1, X0 // X0 = max( X0 - X1, X1 - X0 ) + MAXSD X0, X3 // norm = max( norm, X0 ) + +l1_end: + MOVAPS X3, X2 + SHUFPD $1, X2, X2 + MAXSD X3, X2 // X2 = max( X3[1], X3[0] ) + MOVSD X2, ret+48(FP) // return X2 + RET diff --git a/vendor/gonum.org/v1/gonum/internal/asm/f64/scal.go b/vendor/gonum.org/v1/gonum/internal/asm/f64/scal.go new file mode 100644 index 0000000..3cc7aca --- /dev/null +++ b/vendor/gonum.org/v1/gonum/internal/asm/f64/scal.go @@ -0,0 +1,57 @@ +// Copyright ©2016 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// +build !amd64 noasm appengine safe + +package f64 + +// ScalUnitary is +// for i := range x { +// x[i] *= alpha +// } +func ScalUnitary(alpha float64, x []float64) { + for i := range x { + x[i] *= alpha + } +} + +// ScalUnitaryTo is +// for i, v := range x { +// dst[i] = alpha * v +// } +func ScalUnitaryTo(dst []float64, alpha float64, x []float64) { + for i, v := range x { + dst[i] = alpha * v + } +} + +// ScalInc is +// var ix uintptr +// for i := 0; i < int(n); i++ { +// x[ix] *= alpha +// ix += incX +// } +func ScalInc(alpha float64, x []float64, n, incX uintptr) { + var ix uintptr + for i := 0; i < int(n); i++ { + x[ix] *= alpha + ix += incX + } +} + +// ScalIncTo is +// var idst, ix uintptr +// for i := 0; i < int(n); i++ { +// dst[idst] = alpha * x[ix] +// ix += incX +// idst += incDst +// } +func ScalIncTo(dst []float64, incDst uintptr, alpha float64, x []float64, n, incX uintptr) { + var idst, ix uintptr + for i := 0; i < int(n); i++ { + dst[idst] = alpha * x[ix] + ix += incX + idst += incDst + } +} diff --git a/vendor/gonum.org/v1/gonum/internal/asm/f64/scalinc_amd64.s b/vendor/gonum.org/v1/gonum/internal/asm/f64/scalinc_amd64.s new file mode 100644 index 0000000..cf185fc --- /dev/null +++ b/vendor/gonum.org/v1/gonum/internal/asm/f64/scalinc_amd64.s @@ -0,0 +1,113 @@ +// Copyright ©2016 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. +// +// Some of the loop unrolling code is copied from: +// http://golang.org/src/math/big/arith_amd64.s +// which is distributed under these terms: +// +// Copyright (c) 2012 The Go Authors. All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// +build !noasm,!appengine,!safe + +#include "textflag.h" + +#define X_PTR SI +#define LEN CX +#define TAIL BX +#define INC_X R8 +#define INCx3_X R9 +#define ALPHA X0 +#define ALPHA_2 X1 + +// func ScalInc(alpha float64, x []float64, n, incX uintptr) +TEXT ·ScalInc(SB), NOSPLIT, $0 + MOVSD alpha+0(FP), ALPHA // ALPHA = alpha + MOVQ x_base+8(FP), X_PTR // X_PTR = &x + MOVQ incX+40(FP), INC_X // INC_X = incX + SHLQ $3, INC_X // INC_X *= sizeof(float64) + MOVQ n+32(FP), LEN // LEN = n + CMPQ LEN, $0 + JE end // if LEN == 0 { return } + + MOVQ LEN, TAIL + ANDQ $3, TAIL // TAIL = LEN % 4 + SHRQ $2, LEN // LEN = floor( LEN / 4 ) + JZ tail_start // if LEN == 0 { goto tail_start } + + MOVUPS ALPHA, ALPHA_2 // ALPHA_2 = ALPHA for pipelining + LEAQ (INC_X)(INC_X*2), INCx3_X // INCx3_X = INC_X * 3 + +loop: // do { // x[i] *= alpha unrolled 4x. + MOVSD (X_PTR), X2 // X_i = x[i] + MOVSD (X_PTR)(INC_X*1), X3 + MOVSD (X_PTR)(INC_X*2), X4 + MOVSD (X_PTR)(INCx3_X*1), X5 + + MULSD ALPHA, X2 // X_i *= a + MULSD ALPHA_2, X3 + MULSD ALPHA, X4 + MULSD ALPHA_2, X5 + + MOVSD X2, (X_PTR) // x[i] = X_i + MOVSD X3, (X_PTR)(INC_X*1) + MOVSD X4, (X_PTR)(INC_X*2) + MOVSD X5, (X_PTR)(INCx3_X*1) + + LEAQ (X_PTR)(INC_X*4), X_PTR // X_PTR = &(X_PTR[incX*4]) + DECQ LEN + JNZ loop // } while --LEN > 0 + CMPQ TAIL, $0 + JE end // if TAIL == 0 { return } + +tail_start: // Reset loop registers + MOVQ TAIL, LEN // Loop counter: LEN = TAIL + SHRQ $1, LEN // LEN = floor( LEN / 2 ) + JZ tail_one + +tail_two: // do { + MOVSD (X_PTR), X2 // X_i = x[i] + MOVSD (X_PTR)(INC_X*1), X3 + MULSD ALPHA, X2 // X_i *= a + MULSD ALPHA, X3 + MOVSD X2, (X_PTR) // x[i] = X_i + MOVSD X3, (X_PTR)(INC_X*1) + + LEAQ (X_PTR)(INC_X*2), X_PTR // X_PTR = &(X_PTR[incX*2]) + + ANDQ $1, TAIL + JZ end + +tail_one: + MOVSD (X_PTR), X2 // X_i = x[i] + MULSD ALPHA, X2 // X_i *= ALPHA + MOVSD X2, (X_PTR) // x[i] = X_i + +end: + RET diff --git a/vendor/gonum.org/v1/gonum/internal/asm/f64/scalincto_amd64.s b/vendor/gonum.org/v1/gonum/internal/asm/f64/scalincto_amd64.s new file mode 100644 index 0000000..cf359ac --- /dev/null +++ b/vendor/gonum.org/v1/gonum/internal/asm/f64/scalincto_amd64.s @@ -0,0 +1,122 @@ +// Copyright ©2016 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. +// +// Some of the loop unrolling code is copied from: +// http://golang.org/src/math/big/arith_amd64.s +// which is distributed under these terms: +// +// Copyright (c) 2012 The Go Authors. All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// +build !noasm,!appengine,!safe + +#include "textflag.h" + +#define X_PTR SI +#define DST_PTR DI +#define LEN CX +#define TAIL BX +#define INC_X R8 +#define INCx3_X R9 +#define INC_DST R10 +#define INCx3_DST R11 +#define ALPHA X0 +#define ALPHA_2 X1 + +// func ScalIncTo(dst []float64, incDst uintptr, alpha float64, x []float64, n, incX uintptr) +TEXT ·ScalIncTo(SB), NOSPLIT, $0 + MOVQ dst_base+0(FP), DST_PTR // DST_PTR = &dst + MOVQ incDst+24(FP), INC_DST // INC_DST = incDst + SHLQ $3, INC_DST // INC_DST *= sizeof(float64) + MOVSD alpha+32(FP), ALPHA // ALPHA = alpha + MOVQ x_base+40(FP), X_PTR // X_PTR = &x + MOVQ n+64(FP), LEN // LEN = n + MOVQ incX+72(FP), INC_X // INC_X = incX + SHLQ $3, INC_X // INC_X *= sizeof(float64) + CMPQ LEN, $0 + JE end // if LEN == 0 { return } + + MOVQ LEN, TAIL + ANDQ $3, TAIL // TAIL = LEN % 4 + SHRQ $2, LEN // LEN = floor( LEN / 4 ) + JZ tail_start // if LEN == 0 { goto tail_start } + + MOVUPS ALPHA, ALPHA_2 // ALPHA_2 = ALPHA for pipelining + LEAQ (INC_X)(INC_X*2), INCx3_X // INCx3_X = INC_X * 3 + LEAQ (INC_DST)(INC_DST*2), INCx3_DST // INCx3_DST = INC_DST * 3 + +loop: // do { // x[i] *= alpha unrolled 4x. + MOVSD (X_PTR), X2 // X_i = x[i] + MOVSD (X_PTR)(INC_X*1), X3 + MOVSD (X_PTR)(INC_X*2), X4 + MOVSD (X_PTR)(INCx3_X*1), X5 + + MULSD ALPHA, X2 // X_i *= a + MULSD ALPHA_2, X3 + MULSD ALPHA, X4 + MULSD ALPHA_2, X5 + + MOVSD X2, (DST_PTR) // dst[i] = X_i + MOVSD X3, (DST_PTR)(INC_DST*1) + MOVSD X4, (DST_PTR)(INC_DST*2) + MOVSD X5, (DST_PTR)(INCx3_DST*1) + + LEAQ (X_PTR)(INC_X*4), X_PTR // X_PTR = &(X_PTR[incX*4]) + LEAQ (DST_PTR)(INC_DST*4), DST_PTR // DST_PTR = &(DST_PTR[incDst*4]) + DECQ LEN + JNZ loop // } while --LEN > 0 + CMPQ TAIL, $0 + JE end // if TAIL == 0 { return } + +tail_start: // Reset loop registers + MOVQ TAIL, LEN // Loop counter: LEN = TAIL + SHRQ $1, LEN // LEN = floor( LEN / 2 ) + JZ tail_one + +tail_two: + MOVSD (X_PTR), X2 // X_i = x[i] + MOVSD (X_PTR)(INC_X*1), X3 + MULSD ALPHA, X2 // X_i *= a + MULSD ALPHA, X3 + MOVSD X2, (DST_PTR) // dst[i] = X_i + MOVSD X3, (DST_PTR)(INC_DST*1) + + LEAQ (X_PTR)(INC_X*2), X_PTR // X_PTR = &(X_PTR[incX*2]) + LEAQ (DST_PTR)(INC_DST*2), DST_PTR // DST_PTR = &(DST_PTR[incDst*2]) + + ANDQ $1, TAIL + JZ end + +tail_one: + MOVSD (X_PTR), X2 // X_i = x[i] + MULSD ALPHA, X2 // X_i *= ALPHA + MOVSD X2, (DST_PTR) // x[i] = X_i + +end: + RET diff --git a/vendor/gonum.org/v1/gonum/internal/asm/f64/scalunitary_amd64.s b/vendor/gonum.org/v1/gonum/internal/asm/f64/scalunitary_amd64.s new file mode 100644 index 0000000..560aef2 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/internal/asm/f64/scalunitary_amd64.s @@ -0,0 +1,112 @@ +// Copyright ©2016 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. +// +// Some of the loop unrolling code is copied from: +// http://golang.org/src/math/big/arith_amd64.s +// which is distributed under these terms: +// +// Copyright (c) 2012 The Go Authors. All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// +build !noasm,!appengine,!safe + +#include "textflag.h" + +#define MOVDDUP_ALPHA LONG $0x44120FF2; WORD $0x0824 // @ MOVDDUP XMM0, 8[RSP] + +#define X_PTR SI +#define DST_PTR DI +#define IDX AX +#define LEN CX +#define TAIL BX +#define ALPHA X0 +#define ALPHA_2 X1 + +// func ScalUnitary(alpha float64, x []float64) +TEXT ·ScalUnitary(SB), NOSPLIT, $0 + MOVDDUP_ALPHA // ALPHA = { alpha, alpha } + MOVQ x_base+8(FP), X_PTR // X_PTR = &x + MOVQ x_len+16(FP), LEN // LEN = len(x) + CMPQ LEN, $0 + JE end // if LEN == 0 { return } + XORQ IDX, IDX // IDX = 0 + + MOVQ LEN, TAIL + ANDQ $7, TAIL // TAIL = LEN % 8 + SHRQ $3, LEN // LEN = floor( LEN / 8 ) + JZ tail_start // if LEN == 0 { goto tail_start } + + MOVUPS ALPHA, ALPHA_2 + +loop: // do { // x[i] *= alpha unrolled 8x. + MOVUPS (X_PTR)(IDX*8), X2 // X_i = x[i] + MOVUPS 16(X_PTR)(IDX*8), X3 + MOVUPS 32(X_PTR)(IDX*8), X4 + MOVUPS 48(X_PTR)(IDX*8), X5 + + MULPD ALPHA, X2 // X_i *= ALPHA + MULPD ALPHA_2, X3 + MULPD ALPHA, X4 + MULPD ALPHA_2, X5 + + MOVUPS X2, (X_PTR)(IDX*8) // x[i] = X_i + MOVUPS X3, 16(X_PTR)(IDX*8) + MOVUPS X4, 32(X_PTR)(IDX*8) + MOVUPS X5, 48(X_PTR)(IDX*8) + + ADDQ $8, IDX // i += 8 + DECQ LEN + JNZ loop // while --LEN > 0 + CMPQ TAIL, $0 + JE end // if TAIL == 0 { return } + +tail_start: // Reset loop registers + MOVQ TAIL, LEN // Loop counter: LEN = TAIL + SHRQ $1, LEN // LEN = floor( TAIL / 2 ) + JZ tail_one // if n == 0 goto end + +tail_two: // do { + MOVUPS (X_PTR)(IDX*8), X2 // X_i = x[i] + MULPD ALPHA, X2 // X_i *= ALPHA + MOVUPS X2, (X_PTR)(IDX*8) // x[i] = X_i + ADDQ $2, IDX // i += 2 + DECQ LEN + JNZ tail_two // while --LEN > 0 + + ANDQ $1, TAIL + JZ end // if TAIL == 0 { return } + +tail_one: + // x[i] *= alpha for the remaining element. + MOVSD (X_PTR)(IDX*8), X2 + MULSD ALPHA, X2 + MOVSD X2, (X_PTR)(IDX*8) + +end: + RET diff --git a/vendor/gonum.org/v1/gonum/internal/asm/f64/scalunitaryto_amd64.s b/vendor/gonum.org/v1/gonum/internal/asm/f64/scalunitaryto_amd64.s new file mode 100644 index 0000000..a5b2b01 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/internal/asm/f64/scalunitaryto_amd64.s @@ -0,0 +1,113 @@ +// Copyright ©2016 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. +// +// Some of the loop unrolling code is copied from: +// http://golang.org/src/math/big/arith_amd64.s +// which is distributed under these terms: +// +// Copyright (c) 2012 The Go Authors. All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// +build !noasm,!appengine,!safe + +#include "textflag.h" + +#define MOVDDUP_ALPHA LONG $0x44120FF2; WORD $0x2024 // @ MOVDDUP 32(SP), X0 /*XMM0, 32[RSP]*/ + +#define X_PTR SI +#define DST_PTR DI +#define IDX AX +#define LEN CX +#define TAIL BX +#define ALPHA X0 +#define ALPHA_2 X1 + +// func ScalUnitaryTo(dst []float64, alpha float64, x []float64) +// This function assumes len(dst) >= len(x). +TEXT ·ScalUnitaryTo(SB), NOSPLIT, $0 + MOVQ x_base+32(FP), X_PTR // X_PTR = &x + MOVQ dst_base+0(FP), DST_PTR // DST_PTR = &dst + MOVDDUP_ALPHA // ALPHA = { alpha, alpha } + MOVQ x_len+40(FP), LEN // LEN = len(x) + CMPQ LEN, $0 + JE end // if LEN == 0 { return } + + XORQ IDX, IDX // IDX = 0 + MOVQ LEN, TAIL + ANDQ $7, TAIL // TAIL = LEN % 8 + SHRQ $3, LEN // LEN = floor( LEN / 8 ) + JZ tail_start // if LEN == 0 { goto tail_start } + + MOVUPS ALPHA, ALPHA_2 // ALPHA_2 = ALPHA for pipelining + +loop: // do { // dst[i] = alpha * x[i] unrolled 8x. + MOVUPS (X_PTR)(IDX*8), X2 // X_i = x[i] + MOVUPS 16(X_PTR)(IDX*8), X3 + MOVUPS 32(X_PTR)(IDX*8), X4 + MOVUPS 48(X_PTR)(IDX*8), X5 + + MULPD ALPHA, X2 // X_i *= ALPHA + MULPD ALPHA_2, X3 + MULPD ALPHA, X4 + MULPD ALPHA_2, X5 + + MOVUPS X2, (DST_PTR)(IDX*8) // dst[i] = X_i + MOVUPS X3, 16(DST_PTR)(IDX*8) + MOVUPS X4, 32(DST_PTR)(IDX*8) + MOVUPS X5, 48(DST_PTR)(IDX*8) + + ADDQ $8, IDX // i += 8 + DECQ LEN + JNZ loop // while --LEN > 0 + CMPQ TAIL, $0 + JE end // if TAIL == 0 { return } + +tail_start: // Reset loop counters + MOVQ TAIL, LEN // Loop counter: LEN = TAIL + SHRQ $1, LEN // LEN = floor( TAIL / 2 ) + JZ tail_one // if LEN == 0 { goto tail_one } + +tail_two: // do { + MOVUPS (X_PTR)(IDX*8), X2 // X_i = x[i] + MULPD ALPHA, X2 // X_i *= ALPHA + MOVUPS X2, (DST_PTR)(IDX*8) // dst[i] = X_i + ADDQ $2, IDX // i += 2 + DECQ LEN + JNZ tail_two // while --LEN > 0 + + ANDQ $1, TAIL + JZ end // if TAIL == 0 { return } + +tail_one: + MOVSD (X_PTR)(IDX*8), X2 // X_i = x[i] + MULSD ALPHA, X2 // X_i *= ALPHA + MOVSD X2, (DST_PTR)(IDX*8) // dst[i] = X_i + +end: + RET diff --git a/vendor/gonum.org/v1/gonum/internal/asm/f64/stubs_amd64.go b/vendor/gonum.org/v1/gonum/internal/asm/f64/stubs_amd64.go new file mode 100644 index 0000000..a51b945 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/internal/asm/f64/stubs_amd64.go @@ -0,0 +1,172 @@ +// Copyright ©2015 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// +build !noasm,!appengine,!safe + +package f64 + +// L1Norm is +// for _, v := range x { +// sum += math.Abs(v) +// } +// return sum +func L1Norm(x []float64) (sum float64) + +// L1NormInc is +// for i := 0; i < n*incX; i += incX { +// sum += math.Abs(x[i]) +// } +// return sum +func L1NormInc(x []float64, n, incX int) (sum float64) + +// AddConst is +// for i := range x { +// x[i] += alpha +// } +func AddConst(alpha float64, x []float64) + +// Add is +// for i, v := range s { +// dst[i] += v +// } +func Add(dst, s []float64) + +// AxpyUnitary is +// for i, v := range x { +// y[i] += alpha * v +// } +func AxpyUnitary(alpha float64, x, y []float64) + +// AxpyUnitaryTo is +// for i, v := range x { +// dst[i] = alpha*v + y[i] +// } +func AxpyUnitaryTo(dst []float64, alpha float64, x, y []float64) + +// AxpyInc is +// for i := 0; i < int(n); i++ { +// y[iy] += alpha * x[ix] +// ix += incX +// iy += incY +// } +func AxpyInc(alpha float64, x, y []float64, n, incX, incY, ix, iy uintptr) + +// AxpyIncTo is +// for i := 0; i < int(n); i++ { +// dst[idst] = alpha*x[ix] + y[iy] +// ix += incX +// iy += incY +// idst += incDst +// } +func AxpyIncTo(dst []float64, incDst, idst uintptr, alpha float64, x, y []float64, n, incX, incY, ix, iy uintptr) + +// CumSum is +// if len(s) == 0 { +// return dst +// } +// dst[0] = s[0] +// for i, v := range s[1:] { +// dst[i+1] = dst[i] + v +// } +// return dst +func CumSum(dst, s []float64) []float64 + +// CumProd is +// if len(s) == 0 { +// return dst +// } +// dst[0] = s[0] +// for i, v := range s[1:] { +// dst[i+1] = dst[i] * v +// } +// return dst +func CumProd(dst, s []float64) []float64 + +// Div is +// for i, v := range s { +// dst[i] /= v +// } +func Div(dst, s []float64) + +// DivTo is +// for i, v := range s { +// dst[i] = v / t[i] +// } +// return dst +func DivTo(dst, x, y []float64) []float64 + +// DotUnitary is +// for i, v := range x { +// sum += y[i] * v +// } +// return sum +func DotUnitary(x, y []float64) (sum float64) + +// DotInc is +// for i := 0; i < int(n); i++ { +// sum += y[iy] * x[ix] +// ix += incX +// iy += incY +// } +// return sum +func DotInc(x, y []float64, n, incX, incY, ix, iy uintptr) (sum float64) + +// L1Dist is +// var norm float64 +// for i, v := range s { +// norm += math.Abs(t[i] - v) +// } +// return norm +func L1Dist(s, t []float64) float64 + +// LinfDist is +// var norm float64 +// if len(s) == 0 { +// return 0 +// } +// norm = math.Abs(t[0] - s[0]) +// for i, v := range s[1:] { +// absDiff := math.Abs(t[i+1] - v) +// if absDiff > norm || math.IsNaN(norm) { +// norm = absDiff +// } +// } +// return norm +func LinfDist(s, t []float64) float64 + +// ScalUnitary is +// for i := range x { +// x[i] *= alpha +// } +func ScalUnitary(alpha float64, x []float64) + +// ScalUnitaryTo is +// for i, v := range x { +// dst[i] = alpha * v +// } +func ScalUnitaryTo(dst []float64, alpha float64, x []float64) + +// ScalInc is +// var ix uintptr +// for i := 0; i < int(n); i++ { +// x[ix] *= alpha +// ix += incX +// } +func ScalInc(alpha float64, x []float64, n, incX uintptr) + +// ScalIncTo is +// var idst, ix uintptr +// for i := 0; i < int(n); i++ { +// dst[idst] = alpha * x[ix] +// ix += incX +// idst += incDst +// } +func ScalIncTo(dst []float64, incDst uintptr, alpha float64, x []float64, n, incX uintptr) + +// Sum is +// var sum float64 +// for i := range x { +// sum += x[i] +// } +func Sum(x []float64) float64 diff --git a/vendor/gonum.org/v1/gonum/internal/asm/f64/stubs_noasm.go b/vendor/gonum.org/v1/gonum/internal/asm/f64/stubs_noasm.go new file mode 100644 index 0000000..670978a --- /dev/null +++ b/vendor/gonum.org/v1/gonum/internal/asm/f64/stubs_noasm.go @@ -0,0 +1,170 @@ +// Copyright ©2016 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// +build !amd64 noasm appengine safe + +package f64 + +import "math" + +// L1Norm is +// for _, v := range x { +// sum += math.Abs(v) +// } +// return sum +func L1Norm(x []float64) (sum float64) { + for _, v := range x { + sum += math.Abs(v) + } + return sum +} + +// L1NormInc is +// for i := 0; i < n*incX; i += incX { +// sum += math.Abs(x[i]) +// } +// return sum +func L1NormInc(x []float64, n, incX int) (sum float64) { + for i := 0; i < n*incX; i += incX { + sum += math.Abs(x[i]) + } + return sum +} + +// Add is +// for i, v := range s { +// dst[i] += v +// } +func Add(dst, s []float64) { + for i, v := range s { + dst[i] += v + } +} + +// AddConst is +// for i := range x { +// x[i] += alpha +// } +func AddConst(alpha float64, x []float64) { + for i := range x { + x[i] += alpha + } +} + +// CumSum is +// if len(s) == 0 { +// return dst +// } +// dst[0] = s[0] +// for i, v := range s[1:] { +// dst[i+1] = dst[i] + v +// } +// return dst +func CumSum(dst, s []float64) []float64 { + if len(s) == 0 { + return dst + } + dst[0] = s[0] + for i, v := range s[1:] { + dst[i+1] = dst[i] + v + } + return dst +} + +// CumProd is +// if len(s) == 0 { +// return dst +// } +// dst[0] = s[0] +// for i, v := range s[1:] { +// dst[i+1] = dst[i] * v +// } +// return dst +func CumProd(dst, s []float64) []float64 { + if len(s) == 0 { + return dst + } + dst[0] = s[0] + for i, v := range s[1:] { + dst[i+1] = dst[i] * v + } + return dst +} + +// Div is +// for i, v := range s { +// dst[i] /= v +// } +func Div(dst, s []float64) { + for i, v := range s { + dst[i] /= v + } +} + +// DivTo is +// for i, v := range s { +// dst[i] = v / t[i] +// } +// return dst +func DivTo(dst, s, t []float64) []float64 { + for i, v := range s { + dst[i] = v / t[i] + } + return dst +} + +// L1Dist is +// var norm float64 +// for i, v := range s { +// norm += math.Abs(t[i] - v) +// } +// return norm +func L1Dist(s, t []float64) float64 { + var norm float64 + for i, v := range s { + norm += math.Abs(t[i] - v) + } + return norm +} + +// LinfDist is +// var norm float64 +// if len(s) == 0 { +// return 0 +// } +// norm = math.Abs(t[0] - s[0]) +// for i, v := range s[1:] { +// absDiff := math.Abs(t[i+1] - v) +// if absDiff > norm || math.IsNaN(norm) { +// norm = absDiff +// } +// } +// return norm +func LinfDist(s, t []float64) float64 { + var norm float64 + if len(s) == 0 { + return 0 + } + norm = math.Abs(t[0] - s[0]) + for i, v := range s[1:] { + absDiff := math.Abs(t[i+1] - v) + if absDiff > norm || math.IsNaN(norm) { + norm = absDiff + } + } + return norm +} + +// Sum is +// var sum float64 +// for i := range x { +// sum += x[i] +// } +func Sum(x []float64) float64 { + var sum float64 + for _, v := range x { + sum += v + } + return sum +} diff --git a/vendor/gonum.org/v1/gonum/internal/asm/f64/sum_amd64.s b/vendor/gonum.org/v1/gonum/internal/asm/f64/sum_amd64.s new file mode 100644 index 0000000..22eede6 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/internal/asm/f64/sum_amd64.s @@ -0,0 +1,100 @@ +// Copyright ©2018 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// +build !noasm,!appengine,!safe + +#include "textflag.h" + +#define X_PTR SI +#define IDX AX +#define LEN CX +#define TAIL BX +#define SUM X0 +#define SUM_1 X1 +#define SUM_2 X2 +#define SUM_3 X3 + +// func Sum(x []float64) float64 +TEXT ·Sum(SB), NOSPLIT, $0 + MOVQ x_base+0(FP), X_PTR // X_PTR = &x + MOVQ x_len+8(FP), LEN // LEN = len(x) + XORQ IDX, IDX // i = 0 + PXOR SUM, SUM // p_sum_i = 0 + CMPQ LEN, $0 // if LEN == 0 { return 0 } + JE sum_end + + PXOR SUM_1, SUM_1 + PXOR SUM_2, SUM_2 + PXOR SUM_3, SUM_3 + + MOVQ X_PTR, TAIL // Check memory alignment + ANDQ $15, TAIL // TAIL = &y % 16 + JZ no_trim // if TAIL == 0 { goto no_trim } + + // Align on 16-byte boundary + ADDSD (X_PTR), X0 // X0 += x[0] + INCQ IDX // i++ + DECQ LEN // LEN-- + DECQ TAIL // TAIL-- + JZ sum_end // if TAIL == 0 { return } + +no_trim: + MOVQ LEN, TAIL + SHRQ $4, LEN // LEN = floor( n / 16 ) + JZ sum_tail8 // if LEN == 0 { goto sum_tail8 } + +sum_loop: // sum 16x wide do { + ADDPD (SI)(AX*8), SUM // sum_i += x[i:i+2] + ADDPD 16(SI)(AX*8), SUM_1 + ADDPD 32(SI)(AX*8), SUM_2 + ADDPD 48(SI)(AX*8), SUM_3 + ADDPD 64(SI)(AX*8), SUM + ADDPD 80(SI)(AX*8), SUM_1 + ADDPD 96(SI)(AX*8), SUM_2 + ADDPD 112(SI)(AX*8), SUM_3 + ADDQ $16, IDX // i += 16 + DECQ LEN + JNZ sum_loop // } while --CX > 0 + +sum_tail8: + TESTQ $8, TAIL + JZ sum_tail4 + + ADDPD (SI)(AX*8), SUM // sum_i += x[i:i+2] + ADDPD 16(SI)(AX*8), SUM_1 + ADDPD 32(SI)(AX*8), SUM_2 + ADDPD 48(SI)(AX*8), SUM_3 + ADDQ $8, IDX + +sum_tail4: + ADDPD SUM_3, SUM + ADDPD SUM_2, SUM_1 + + TESTQ $4, TAIL + JZ sum_tail2 + + ADDPD (SI)(AX*8), SUM // sum_i += x[i:i+2] + ADDPD 16(SI)(AX*8), SUM_1 + ADDQ $4, IDX + +sum_tail2: + ADDPD SUM_1, SUM + + TESTQ $2, TAIL + JZ sum_tail1 + + ADDPD (SI)(AX*8), SUM // sum_i += x[i:i+2] + ADDQ $2, IDX + +sum_tail1: + HADDPD SUM, SUM // sum_i[0] += sum_i[1] + + TESTQ $1, TAIL + JZ sum_end + + ADDSD (SI)(IDX*8), SUM + +sum_end: // return sum + MOVSD SUM, sum+24(FP) + RET diff --git a/vendor/gonum.org/v1/gonum/internal/cmplx64/abs.go b/vendor/gonum.org/v1/gonum/internal/cmplx64/abs.go new file mode 100644 index 0000000..ac6eb81 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/internal/cmplx64/abs.go @@ -0,0 +1,14 @@ +// Copyright 2010 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Copyright ©2017 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package cmplx64 + +import math "gonum.org/v1/gonum/internal/math32" + +// Abs returns the absolute value (also called the modulus) of x. +func Abs(x complex64) float32 { return math.Hypot(real(x), imag(x)) } diff --git a/vendor/gonum.org/v1/gonum/internal/cmplx64/conj.go b/vendor/gonum.org/v1/gonum/internal/cmplx64/conj.go new file mode 100644 index 0000000..705262f --- /dev/null +++ b/vendor/gonum.org/v1/gonum/internal/cmplx64/conj.go @@ -0,0 +1,12 @@ +// Copyright 2010 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Copyright ©2017 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package cmplx64 + +// Conj returns the complex conjugate of x. +func Conj(x complex64) complex64 { return complex(real(x), -imag(x)) } diff --git a/vendor/gonum.org/v1/gonum/internal/cmplx64/doc.go b/vendor/gonum.org/v1/gonum/internal/cmplx64/doc.go new file mode 100644 index 0000000..5424ea0 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/internal/cmplx64/doc.go @@ -0,0 +1,7 @@ +// Copyright ©2017 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Package cmplx64 provides complex64 versions of standard library math/cmplx +// package routines used by gonum/blas. +package cmplx64 // import "gonum.org/v1/gonum/internal/cmplx64" diff --git a/vendor/gonum.org/v1/gonum/internal/cmplx64/isinf.go b/vendor/gonum.org/v1/gonum/internal/cmplx64/isinf.go new file mode 100644 index 0000000..21d3d18 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/internal/cmplx64/isinf.go @@ -0,0 +1,25 @@ +// Copyright 2010 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Copyright ©2017 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package cmplx64 + +import math "gonum.org/v1/gonum/internal/math32" + +// IsInf returns true if either real(x) or imag(x) is an infinity. +func IsInf(x complex64) bool { + if math.IsInf(real(x), 0) || math.IsInf(imag(x), 0) { + return true + } + return false +} + +// Inf returns a complex infinity, complex(+Inf, +Inf). +func Inf() complex64 { + inf := math.Inf(1) + return complex(inf, inf) +} diff --git a/vendor/gonum.org/v1/gonum/internal/cmplx64/isnan.go b/vendor/gonum.org/v1/gonum/internal/cmplx64/isnan.go new file mode 100644 index 0000000..7e0bf78 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/internal/cmplx64/isnan.go @@ -0,0 +1,29 @@ +// Copyright 2010 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Copyright ©2017 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package cmplx64 + +import math "gonum.org/v1/gonum/internal/math32" + +// IsNaN returns true if either real(x) or imag(x) is NaN +// and neither is an infinity. +func IsNaN(x complex64) bool { + switch { + case math.IsInf(real(x), 0) || math.IsInf(imag(x), 0): + return false + case math.IsNaN(real(x)) || math.IsNaN(imag(x)): + return true + } + return false +} + +// NaN returns a complex ``not-a-number'' value. +func NaN() complex64 { + nan := math.NaN() + return complex(nan, nan) +} diff --git a/vendor/gonum.org/v1/gonum/internal/cmplx64/sqrt.go b/vendor/gonum.org/v1/gonum/internal/cmplx64/sqrt.go new file mode 100644 index 0000000..439987b --- /dev/null +++ b/vendor/gonum.org/v1/gonum/internal/cmplx64/sqrt.go @@ -0,0 +1,108 @@ +// Copyright 2010 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Copyright ©2017 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package cmplx64 + +import math "gonum.org/v1/gonum/internal/math32" + +// The original C code, the long comment, and the constants +// below are from http://netlib.sandia.gov/cephes/c9x-complex/clog.c. +// The go code is a simplified version of the original C. +// +// Cephes Math Library Release 2.8: June, 2000 +// Copyright 1984, 1987, 1989, 1992, 2000 by Stephen L. Moshier +// +// The readme file at http://netlib.sandia.gov/cephes/ says: +// Some software in this archive may be from the book _Methods and +// Programs for Mathematical Functions_ (Prentice-Hall or Simon & Schuster +// International, 1989) or from the Cephes Mathematical Library, a +// commercial product. In either event, it is copyrighted by the author. +// What you see here may be used freely but it comes with no support or +// guarantee. +// +// The two known misprints in the book are repaired here in the +// source listings for the gamma function and the incomplete beta +// integral. +// +// Stephen L. Moshier +// moshier@na-net.ornl.gov + +// Complex square root +// +// DESCRIPTION: +// +// If z = x + iy, r = |z|, then +// +// 1/2 +// Re w = [ (r + x)/2 ] , +// +// 1/2 +// Im w = [ (r - x)/2 ] . +// +// Cancelation error in r-x or r+x is avoided by using the +// identity 2 Re w Im w = y. +// +// Note that -w is also a square root of z. The root chosen +// is always in the right half plane and Im w has the same sign as y. +// +// ACCURACY: +// +// Relative error: +// arithmetic domain # trials peak rms +// DEC -10,+10 25000 3.2e-17 9.6e-18 +// IEEE -10,+10 1,000,000 2.9e-16 6.1e-17 + +// Sqrt returns the square root of x. +// The result r is chosen so that real(r) ≥ 0 and imag(r) has the same sign as imag(x). +func Sqrt(x complex64) complex64 { + if imag(x) == 0 { + if real(x) == 0 { + return complex(0, 0) + } + if real(x) < 0 { + return complex(0, math.Sqrt(-real(x))) + } + return complex(math.Sqrt(real(x)), 0) + } + if real(x) == 0 { + if imag(x) < 0 { + r := math.Sqrt(-0.5 * imag(x)) + return complex(r, -r) + } + r := math.Sqrt(0.5 * imag(x)) + return complex(r, r) + } + a := real(x) + b := imag(x) + var scale float32 + // Rescale to avoid internal overflow or underflow. + if math.Abs(a) > 4 || math.Abs(b) > 4 { + a *= 0.25 + b *= 0.25 + scale = 2 + } else { + a *= 1.8014398509481984e16 // 2**54 + b *= 1.8014398509481984e16 + scale = 7.450580596923828125e-9 // 2**-27 + } + r := math.Hypot(a, b) + var t float32 + if a > 0 { + t = math.Sqrt(0.5*r + 0.5*a) + r = scale * math.Abs((0.5*b)/t) + t *= scale + } else { + r = math.Sqrt(0.5*r - 0.5*a) + t = scale * math.Abs((0.5*b)/r) + r *= scale + } + if b < 0 { + return complex(t, -r) + } + return complex(t, r) +} diff --git a/vendor/gonum.org/v1/gonum/internal/math32/doc.go b/vendor/gonum.org/v1/gonum/internal/math32/doc.go new file mode 100644 index 0000000..68917c6 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/internal/math32/doc.go @@ -0,0 +1,7 @@ +// Copyright ©2017 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Package math32 provides float32 versions of standard library math package +// routines used by gonum/blas/native. +package math32 // import "gonum.org/v1/gonum/internal/math32" diff --git a/vendor/gonum.org/v1/gonum/internal/math32/math.go b/vendor/gonum.org/v1/gonum/internal/math32/math.go new file mode 100644 index 0000000..56c90be --- /dev/null +++ b/vendor/gonum.org/v1/gonum/internal/math32/math.go @@ -0,0 +1,111 @@ +// Copyright 2009 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Copyright ©2015 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package math32 + +import ( + "math" +) + +const ( + unan = 0x7fc00000 + uinf = 0x7f800000 + uneginf = 0xff800000 + mask = 0x7f8 >> 3 + shift = 32 - 8 - 1 + bias = 127 +) + +// Abs returns the absolute value of x. +// +// Special cases are: +// Abs(±Inf) = +Inf +// Abs(NaN) = NaN +func Abs(x float32) float32 { + switch { + case x < 0: + return -x + case x == 0: + return 0 // return correctly abs(-0) + } + return x +} + +// Copysign returns a value with the magnitude +// of x and the sign of y. +func Copysign(x, y float32) float32 { + const sign = 1 << 31 + return math.Float32frombits(math.Float32bits(x)&^sign | math.Float32bits(y)&sign) +} + +// Hypot returns Sqrt(p*p + q*q), taking care to avoid +// unnecessary overflow and underflow. +// +// Special cases are: +// Hypot(±Inf, q) = +Inf +// Hypot(p, ±Inf) = +Inf +// Hypot(NaN, q) = NaN +// Hypot(p, NaN) = NaN +func Hypot(p, q float32) float32 { + // special cases + switch { + case IsInf(p, 0) || IsInf(q, 0): + return Inf(1) + case IsNaN(p) || IsNaN(q): + return NaN() + } + if p < 0 { + p = -p + } + if q < 0 { + q = -q + } + if p < q { + p, q = q, p + } + if p == 0 { + return 0 + } + q = q / p + return p * Sqrt(1+q*q) +} + +// Inf returns positive infinity if sign >= 0, negative infinity if sign < 0. +func Inf(sign int) float32 { + var v uint32 + if sign >= 0 { + v = uinf + } else { + v = uneginf + } + return math.Float32frombits(v) +} + +// IsInf reports whether f is an infinity, according to sign. +// If sign > 0, IsInf reports whether f is positive infinity. +// If sign < 0, IsInf reports whether f is negative infinity. +// If sign == 0, IsInf reports whether f is either infinity. +func IsInf(f float32, sign int) bool { + // Test for infinity by comparing against maximum float. + // To avoid the floating-point hardware, could use: + // x := math.Float32bits(f); + // return sign >= 0 && x == uinf || sign <= 0 && x == uneginf; + return sign >= 0 && f > math.MaxFloat32 || sign <= 0 && f < -math.MaxFloat32 +} + +// IsNaN reports whether f is an IEEE 754 ``not-a-number'' value. +func IsNaN(f float32) (is bool) { + // IEEE 754 says that only NaNs satisfy f != f. + // To avoid the floating-point hardware, could use: + // x := math.Float32bits(f); + // return uint32(x>>shift)&mask == mask && x != uinf && x != uneginf + return f != f +} + +// NaN returns an IEEE 754 ``not-a-number'' value. +func NaN() float32 { return math.Float32frombits(unan) } diff --git a/vendor/gonum.org/v1/gonum/internal/math32/signbit.go b/vendor/gonum.org/v1/gonum/internal/math32/signbit.go new file mode 100644 index 0000000..3e9f0bb --- /dev/null +++ b/vendor/gonum.org/v1/gonum/internal/math32/signbit.go @@ -0,0 +1,16 @@ +// Copyright 2010 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Copyright ©2017 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package math32 + +import "math" + +// Signbit returns true if x is negative or negative zero. +func Signbit(x float32) bool { + return math.Float32bits(x)&(1<<31) != 0 +} diff --git a/vendor/gonum.org/v1/gonum/internal/math32/sqrt.go b/vendor/gonum.org/v1/gonum/internal/math32/sqrt.go new file mode 100644 index 0000000..bf630de --- /dev/null +++ b/vendor/gonum.org/v1/gonum/internal/math32/sqrt.go @@ -0,0 +1,25 @@ +// Copyright ©2015 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// +build !amd64 noasm appengine safe + +package math32 + +import ( + "math" +) + +// Sqrt returns the square root of x. +// +// Special cases are: +// Sqrt(+Inf) = +Inf +// Sqrt(±0) = ±0 +// Sqrt(x < 0) = NaN +// Sqrt(NaN) = NaN +func Sqrt(x float32) float32 { + // FIXME(kortschak): Direct translation of the math package + // asm code for 386 fails to build. No test hardware is available + // for arm, so using conversion instead. + return float32(math.Sqrt(float64(x))) +} diff --git a/vendor/gonum.org/v1/gonum/internal/math32/sqrt_amd64.go b/vendor/gonum.org/v1/gonum/internal/math32/sqrt_amd64.go new file mode 100644 index 0000000..905ae5c --- /dev/null +++ b/vendor/gonum.org/v1/gonum/internal/math32/sqrt_amd64.go @@ -0,0 +1,20 @@ +// Copyright 2009 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Copyright ©2015 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// +build !noasm,!appengine,!safe + +package math32 + +// Sqrt returns the square root of x. +// +// Special cases are: +// Sqrt(+Inf) = +Inf +// Sqrt(±0) = ±0 +// Sqrt(x < 0) = NaN +// Sqrt(NaN) = NaN +func Sqrt(x float32) float32 diff --git a/vendor/gonum.org/v1/gonum/internal/math32/sqrt_amd64.s b/vendor/gonum.org/v1/gonum/internal/math32/sqrt_amd64.s new file mode 100644 index 0000000..fa2b869 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/internal/math32/sqrt_amd64.s @@ -0,0 +1,20 @@ +// Copyright 2009 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Copyright ©2015 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +//+build !noasm,!appengine,!safe + +// TODO(kortschak): use textflag.h after we drop Go 1.3 support +//#include "textflag.h" +// Don't insert stack check preamble. +#define NOSPLIT 4 + +// func Sqrt(x float32) float32 +TEXT ·Sqrt(SB),NOSPLIT,$0 + SQRTSS x+0(FP), X0 + MOVSS X0, ret+8(FP) + RET diff --git a/vendor/gonum.org/v1/gonum/lapack/doc.go b/vendor/gonum.org/v1/gonum/lapack/doc.go new file mode 100644 index 0000000..2475cb4 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/lapack/doc.go @@ -0,0 +1,6 @@ +// Copyright ©2018 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Package lapack provides interfaces for the LAPACK linear algebra standard. +package lapack // import "gonum.org/v1/gonum/lapack" diff --git a/vendor/gonum.org/v1/gonum/lapack/gonum/dbdsqr.go b/vendor/gonum.org/v1/gonum/lapack/gonum/dbdsqr.go new file mode 100644 index 0000000..5f3833f --- /dev/null +++ b/vendor/gonum.org/v1/gonum/lapack/gonum/dbdsqr.go @@ -0,0 +1,505 @@ +// Copyright ©2015 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package gonum + +import ( + "math" + + "gonum.org/v1/gonum/blas" + "gonum.org/v1/gonum/blas/blas64" + "gonum.org/v1/gonum/lapack" +) + +// Dbdsqr performs a singular value decomposition of a real n×n bidiagonal matrix. +// +// The SVD of the bidiagonal matrix B is +// B = Q * S * P^T +// where S is a diagonal matrix of singular values, Q is an orthogonal matrix of +// left singular vectors, and P is an orthogonal matrix of right singular vectors. +// +// Q and P are only computed if requested. If left singular vectors are requested, +// this routine returns U * Q instead of Q, and if right singular vectors are +// requested P^T * VT is returned instead of P^T. +// +// Frequently Dbdsqr is used in conjunction with Dgebrd which reduces a general +// matrix A into bidiagonal form. In this case, the SVD of A is +// A = (U * Q) * S * (P^T * VT) +// +// This routine may also compute Q^T * C. +// +// d and e contain the elements of the bidiagonal matrix b. d must have length at +// least n, and e must have length at least n-1. Dbdsqr will panic if there is +// insufficient length. On exit, D contains the singular values of B in decreasing +// order. +// +// VT is a matrix of size n×ncvt whose elements are stored in vt. The elements +// of vt are modified to contain P^T * VT on exit. VT is not used if ncvt == 0. +// +// U is a matrix of size nru×n whose elements are stored in u. The elements +// of u are modified to contain U * Q on exit. U is not used if nru == 0. +// +// C is a matrix of size n×ncc whose elements are stored in c. The elements +// of c are modified to contain Q^T * C on exit. C is not used if ncc == 0. +// +// work contains temporary storage and must have length at least 4*(n-1). Dbdsqr +// will panic if there is insufficient working memory. +// +// Dbdsqr returns whether the decomposition was successful. +// +// Dbdsqr is an internal routine. It is exported for testing purposes. +func (impl Implementation) Dbdsqr(uplo blas.Uplo, n, ncvt, nru, ncc int, d, e, vt []float64, ldvt int, u []float64, ldu int, c []float64, ldc int, work []float64) (ok bool) { + switch { + case uplo != blas.Upper && uplo != blas.Lower: + panic(badUplo) + case n < 0: + panic(nLT0) + case ncvt < 0: + panic(ncvtLT0) + case nru < 0: + panic(nruLT0) + case ncc < 0: + panic(nccLT0) + case ldvt < max(1, ncvt): + panic(badLdVT) + case (ldu < max(1, n) && nru > 0) || (ldu < 1 && nru == 0): + panic(badLdU) + case ldc < max(1, ncc): + panic(badLdC) + } + + // Quick return if possible. + if n == 0 { + return true + } + + if len(vt) < (n-1)*ldvt+ncvt && ncvt != 0 { + panic(shortVT) + } + if len(u) < (nru-1)*ldu+n && nru != 0 { + panic(shortU) + } + if len(c) < (n-1)*ldc+ncc && ncc != 0 { + panic(shortC) + } + if len(d) < n { + panic(shortD) + } + if len(e) < n-1 { + panic(shortE) + } + if len(work) < 4*(n-1) { + panic(shortWork) + } + + var info int + bi := blas64.Implementation() + const maxIter = 6 + + if n != 1 { + // If the singular vectors do not need to be computed, use qd algorithm. + if !(ncvt > 0 || nru > 0 || ncc > 0) { + info = impl.Dlasq1(n, d, e, work) + // If info is 2 dqds didn't finish, and so try to. + if info != 2 { + return info == 0 + } + } + nm1 := n - 1 + nm12 := nm1 + nm1 + nm13 := nm12 + nm1 + idir := 0 + + eps := dlamchE + unfl := dlamchS + lower := uplo == blas.Lower + var cs, sn, r float64 + if lower { + for i := 0; i < n-1; i++ { + cs, sn, r = impl.Dlartg(d[i], e[i]) + d[i] = r + e[i] = sn * d[i+1] + d[i+1] *= cs + work[i] = cs + work[nm1+i] = sn + } + if nru > 0 { + impl.Dlasr(blas.Right, lapack.Variable, lapack.Forward, nru, n, work, work[n-1:], u, ldu) + } + if ncc > 0 { + impl.Dlasr(blas.Left, lapack.Variable, lapack.Forward, n, ncc, work, work[n-1:], c, ldc) + } + } + // Compute singular values to a relative accuracy of tol. If tol is negative + // the values will be computed to an absolute accuracy of math.Abs(tol) * norm(b) + tolmul := math.Max(10, math.Min(100, math.Pow(eps, -1.0/8))) + tol := tolmul * eps + var smax float64 + for i := 0; i < n; i++ { + smax = math.Max(smax, math.Abs(d[i])) + } + for i := 0; i < n-1; i++ { + smax = math.Max(smax, math.Abs(e[i])) + } + + var sminl float64 + var thresh float64 + if tol >= 0 { + sminoa := math.Abs(d[0]) + if sminoa != 0 { + mu := sminoa + for i := 1; i < n; i++ { + mu = math.Abs(d[i]) * (mu / (mu + math.Abs(e[i-1]))) + sminoa = math.Min(sminoa, mu) + if sminoa == 0 { + break + } + } + } + sminoa = sminoa / math.Sqrt(float64(n)) + thresh = math.Max(tol*sminoa, float64(maxIter*n*n)*unfl) + } else { + thresh = math.Max(math.Abs(tol)*smax, float64(maxIter*n*n)*unfl) + } + // Prepare for the main iteration loop for the singular values. + maxIt := maxIter * n * n + iter := 0 + oldl2 := -1 + oldm := -1 + // m points to the last element of unconverged part of matrix. + m := n + + Outer: + for m > 1 { + if iter > maxIt { + info = 0 + for i := 0; i < n-1; i++ { + if e[i] != 0 { + info++ + } + } + return info == 0 + } + // Find diagonal block of matrix to work on. + if tol < 0 && math.Abs(d[m-1]) <= thresh { + d[m-1] = 0 + } + smax = math.Abs(d[m-1]) + smin := smax + var l2 int + var broke bool + for l3 := 0; l3 < m-1; l3++ { + l2 = m - l3 - 2 + abss := math.Abs(d[l2]) + abse := math.Abs(e[l2]) + if tol < 0 && abss <= thresh { + d[l2] = 0 + } + if abse <= thresh { + broke = true + break + } + smin = math.Min(smin, abss) + smax = math.Max(math.Max(smax, abss), abse) + } + if broke { + e[l2] = 0 + if l2 == m-2 { + // Convergence of bottom singular value, return to top. + m-- + continue + } + l2++ + } else { + l2 = 0 + } + // e[ll] through e[m-2] are nonzero, e[ll-1] is zero + if l2 == m-2 { + // Handle 2×2 block separately. + var sinr, cosr, sinl, cosl float64 + d[m-1], d[m-2], sinr, cosr, sinl, cosl = impl.Dlasv2(d[m-2], e[m-2], d[m-1]) + e[m-2] = 0 + if ncvt > 0 { + bi.Drot(ncvt, vt[(m-2)*ldvt:], 1, vt[(m-1)*ldvt:], 1, cosr, sinr) + } + if nru > 0 { + bi.Drot(nru, u[m-2:], ldu, u[m-1:], ldu, cosl, sinl) + } + if ncc > 0 { + bi.Drot(ncc, c[(m-2)*ldc:], 1, c[(m-1)*ldc:], 1, cosl, sinl) + } + m -= 2 + continue + } + // If working on a new submatrix, choose shift direction from larger end + // diagonal element toward smaller. + if l2 > oldm-1 || m-1 < oldl2 { + if math.Abs(d[l2]) >= math.Abs(d[m-1]) { + idir = 1 + } else { + idir = 2 + } + } + // Apply convergence tests. + // TODO(btracey): There is a lot of similar looking code here. See + // if there is a better way to de-duplicate. + if idir == 1 { + // Run convergence test in forward direction. + // First apply standard test to bottom of matrix. + if math.Abs(e[m-2]) <= math.Abs(tol)*math.Abs(d[m-1]) || (tol < 0 && math.Abs(e[m-2]) <= thresh) { + e[m-2] = 0 + continue + } + if tol >= 0 { + // If relative accuracy desired, apply convergence criterion forward. + mu := math.Abs(d[l2]) + sminl = mu + for l3 := l2; l3 < m-1; l3++ { + if math.Abs(e[l3]) <= tol*mu { + e[l3] = 0 + continue Outer + } + mu = math.Abs(d[l3+1]) * (mu / (mu + math.Abs(e[l3]))) + sminl = math.Min(sminl, mu) + } + } + } else { + // Run convergence test in backward direction. + // First apply standard test to top of matrix. + if math.Abs(e[l2]) <= math.Abs(tol)*math.Abs(d[l2]) || (tol < 0 && math.Abs(e[l2]) <= thresh) { + e[l2] = 0 + continue + } + if tol >= 0 { + // If relative accuracy desired, apply convergence criterion backward. + mu := math.Abs(d[m-1]) + sminl = mu + for l3 := m - 2; l3 >= l2; l3-- { + if math.Abs(e[l3]) <= tol*mu { + e[l3] = 0 + continue Outer + } + mu = math.Abs(d[l3]) * (mu / (mu + math.Abs(e[l3]))) + sminl = math.Min(sminl, mu) + } + } + } + oldl2 = l2 + oldm = m + // Compute shift. First, test if shifting would ruin relative accuracy, + // and if so set the shift to zero. + var shift float64 + if tol >= 0 && float64(n)*tol*(sminl/smax) <= math.Max(eps, (1.0/100)*tol) { + shift = 0 + } else { + var sl2 float64 + if idir == 1 { + sl2 = math.Abs(d[l2]) + shift, _ = impl.Dlas2(d[m-2], e[m-2], d[m-1]) + } else { + sl2 = math.Abs(d[m-1]) + shift, _ = impl.Dlas2(d[l2], e[l2], d[l2+1]) + } + // Test if shift is negligible + if sl2 > 0 { + if (shift/sl2)*(shift/sl2) < eps { + shift = 0 + } + } + } + iter += m - l2 + 1 + // If no shift, do simplified QR iteration. + if shift == 0 { + if idir == 1 { + cs := 1.0 + oldcs := 1.0 + var sn, r, oldsn float64 + for i := l2; i < m-1; i++ { + cs, sn, r = impl.Dlartg(d[i]*cs, e[i]) + if i > l2 { + e[i-1] = oldsn * r + } + oldcs, oldsn, d[i] = impl.Dlartg(oldcs*r, d[i+1]*sn) + work[i-l2] = cs + work[i-l2+nm1] = sn + work[i-l2+nm12] = oldcs + work[i-l2+nm13] = oldsn + } + h := d[m-1] * cs + d[m-1] = h * oldcs + e[m-2] = h * oldsn + if ncvt > 0 { + impl.Dlasr(blas.Left, lapack.Variable, lapack.Forward, m-l2, ncvt, work, work[n-1:], vt[l2*ldvt:], ldvt) + } + if nru > 0 { + impl.Dlasr(blas.Right, lapack.Variable, lapack.Forward, nru, m-l2, work[nm12:], work[nm13:], u[l2:], ldu) + } + if ncc > 0 { + impl.Dlasr(blas.Left, lapack.Variable, lapack.Forward, m-l2, ncc, work[nm12:], work[nm13:], c[l2*ldc:], ldc) + } + if math.Abs(e[m-2]) < thresh { + e[m-2] = 0 + } + } else { + cs := 1.0 + oldcs := 1.0 + var sn, r, oldsn float64 + for i := m - 1; i >= l2+1; i-- { + cs, sn, r = impl.Dlartg(d[i]*cs, e[i-1]) + if i < m-1 { + e[i] = oldsn * r + } + oldcs, oldsn, d[i] = impl.Dlartg(oldcs*r, d[i-1]*sn) + work[i-l2-1] = cs + work[i-l2+nm1-1] = -sn + work[i-l2+nm12-1] = oldcs + work[i-l2+nm13-1] = -oldsn + } + h := d[l2] * cs + d[l2] = h * oldcs + e[l2] = h * oldsn + if ncvt > 0 { + impl.Dlasr(blas.Left, lapack.Variable, lapack.Backward, m-l2, ncvt, work[nm12:], work[nm13:], vt[l2*ldvt:], ldvt) + } + if nru > 0 { + impl.Dlasr(blas.Right, lapack.Variable, lapack.Backward, nru, m-l2, work, work[n-1:], u[l2:], ldu) + } + if ncc > 0 { + impl.Dlasr(blas.Left, lapack.Variable, lapack.Backward, m-l2, ncc, work, work[n-1:], c[l2*ldc:], ldc) + } + if math.Abs(e[l2]) <= thresh { + e[l2] = 0 + } + } + } else { + // Use nonzero shift. + if idir == 1 { + // Chase bulge from top to bottom. Save cosines and sines for + // later singular vector updates. + f := (math.Abs(d[l2]) - shift) * (math.Copysign(1, d[l2]) + shift/d[l2]) + g := e[l2] + var cosl, sinl float64 + for i := l2; i < m-1; i++ { + cosr, sinr, r := impl.Dlartg(f, g) + if i > l2 { + e[i-1] = r + } + f = cosr*d[i] + sinr*e[i] + e[i] = cosr*e[i] - sinr*d[i] + g = sinr * d[i+1] + d[i+1] *= cosr + cosl, sinl, r = impl.Dlartg(f, g) + d[i] = r + f = cosl*e[i] + sinl*d[i+1] + d[i+1] = cosl*d[i+1] - sinl*e[i] + if i < m-2 { + g = sinl * e[i+1] + e[i+1] = cosl * e[i+1] + } + work[i-l2] = cosr + work[i-l2+nm1] = sinr + work[i-l2+nm12] = cosl + work[i-l2+nm13] = sinl + } + e[m-2] = f + if ncvt > 0 { + impl.Dlasr(blas.Left, lapack.Variable, lapack.Forward, m-l2, ncvt, work, work[n-1:], vt[l2*ldvt:], ldvt) + } + if nru > 0 { + impl.Dlasr(blas.Right, lapack.Variable, lapack.Forward, nru, m-l2, work[nm12:], work[nm13:], u[l2:], ldu) + } + if ncc > 0 { + impl.Dlasr(blas.Left, lapack.Variable, lapack.Forward, m-l2, ncc, work[nm12:], work[nm13:], c[l2*ldc:], ldc) + } + if math.Abs(e[m-2]) <= thresh { + e[m-2] = 0 + } + } else { + // Chase bulge from top to bottom. Save cosines and sines for + // later singular vector updates. + f := (math.Abs(d[m-1]) - shift) * (math.Copysign(1, d[m-1]) + shift/d[m-1]) + g := e[m-2] + for i := m - 1; i > l2; i-- { + cosr, sinr, r := impl.Dlartg(f, g) + if i < m-1 { + e[i] = r + } + f = cosr*d[i] + sinr*e[i-1] + e[i-1] = cosr*e[i-1] - sinr*d[i] + g = sinr * d[i-1] + d[i-1] *= cosr + cosl, sinl, r := impl.Dlartg(f, g) + d[i] = r + f = cosl*e[i-1] + sinl*d[i-1] + d[i-1] = cosl*d[i-1] - sinl*e[i-1] + if i > l2+1 { + g = sinl * e[i-2] + e[i-2] *= cosl + } + work[i-l2-1] = cosr + work[i-l2+nm1-1] = -sinr + work[i-l2+nm12-1] = cosl + work[i-l2+nm13-1] = -sinl + } + e[l2] = f + if math.Abs(e[l2]) <= thresh { + e[l2] = 0 + } + if ncvt > 0 { + impl.Dlasr(blas.Left, lapack.Variable, lapack.Backward, m-l2, ncvt, work[nm12:], work[nm13:], vt[l2*ldvt:], ldvt) + } + if nru > 0 { + impl.Dlasr(blas.Right, lapack.Variable, lapack.Backward, nru, m-l2, work, work[n-1:], u[l2:], ldu) + } + if ncc > 0 { + impl.Dlasr(blas.Left, lapack.Variable, lapack.Backward, m-l2, ncc, work, work[n-1:], c[l2*ldc:], ldc) + } + } + } + } + } + + // All singular values converged, make them positive. + for i := 0; i < n; i++ { + if d[i] < 0 { + d[i] *= -1 + if ncvt > 0 { + bi.Dscal(ncvt, -1, vt[i*ldvt:], 1) + } + } + } + + // Sort the singular values in decreasing order. + for i := 0; i < n-1; i++ { + isub := 0 + smin := d[0] + for j := 1; j < n-i; j++ { + if d[j] <= smin { + isub = j + smin = d[j] + } + } + if isub != n-i { + // Swap singular values and vectors. + d[isub] = d[n-i-1] + d[n-i-1] = smin + if ncvt > 0 { + bi.Dswap(ncvt, vt[isub*ldvt:], 1, vt[(n-i-1)*ldvt:], 1) + } + if nru > 0 { + bi.Dswap(nru, u[isub:], ldu, u[n-i-1:], ldu) + } + if ncc > 0 { + bi.Dswap(ncc, c[isub*ldc:], 1, c[(n-i-1)*ldc:], 1) + } + } + } + info = 0 + for i := 0; i < n-1; i++ { + if e[i] != 0 { + info++ + } + } + return info == 0 +} diff --git a/vendor/gonum.org/v1/gonum/lapack/gonum/dgebak.go b/vendor/gonum.org/v1/gonum/lapack/gonum/dgebak.go new file mode 100644 index 0000000..7caa0b1 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/lapack/gonum/dgebak.go @@ -0,0 +1,89 @@ +// Copyright ©2016 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package gonum + +import ( + "gonum.org/v1/gonum/blas/blas64" + "gonum.org/v1/gonum/lapack" +) + +// Dgebak updates an n×m matrix V as +// V = P D V, if side == lapack.EVRight, +// V = P D^{-1} V, if side == lapack.EVLeft, +// where P and D are n×n permutation and scaling matrices, respectively, +// implicitly represented by job, scale, ilo and ihi as returned by Dgebal. +// +// Typically, columns of the matrix V contain the right or left (determined by +// side) eigenvectors of the balanced matrix output by Dgebal, and Dgebak forms +// the eigenvectors of the original matrix. +// +// Dgebak is an internal routine. It is exported for testing purposes. +func (impl Implementation) Dgebak(job lapack.BalanceJob, side lapack.EVSide, n, ilo, ihi int, scale []float64, m int, v []float64, ldv int) { + switch { + case job != lapack.BalanceNone && job != lapack.Permute && job != lapack.Scale && job != lapack.PermuteScale: + panic(badBalanceJob) + case side != lapack.EVLeft && side != lapack.EVRight: + panic(badEVSide) + case n < 0: + panic(nLT0) + case ilo < 0 || max(0, n-1) < ilo: + panic(badIlo) + case ihi < min(ilo, n-1) || n <= ihi: + panic(badIhi) + case m < 0: + panic(mLT0) + case ldv < max(1, m): + panic(badLdV) + } + + // Quick return if possible. + if n == 0 || m == 0 { + return + } + + if len(scale) < n { + panic(shortScale) + } + if len(v) < (n-1)*ldv+m { + panic(shortV) + } + + // Quick return if possible. + if job == lapack.BalanceNone { + return + } + + bi := blas64.Implementation() + if ilo != ihi && job != lapack.Permute { + // Backward balance. + if side == lapack.EVRight { + for i := ilo; i <= ihi; i++ { + bi.Dscal(m, scale[i], v[i*ldv:], 1) + } + } else { + for i := ilo; i <= ihi; i++ { + bi.Dscal(m, 1/scale[i], v[i*ldv:], 1) + } + } + } + if job == lapack.Scale { + return + } + // Backward permutation. + for i := ilo - 1; i >= 0; i-- { + k := int(scale[i]) + if k == i { + continue + } + bi.Dswap(m, v[i*ldv:], 1, v[k*ldv:], 1) + } + for i := ihi + 1; i < n; i++ { + k := int(scale[i]) + if k == i { + continue + } + bi.Dswap(m, v[i*ldv:], 1, v[k*ldv:], 1) + } +} diff --git a/vendor/gonum.org/v1/gonum/lapack/gonum/dgebal.go b/vendor/gonum.org/v1/gonum/lapack/gonum/dgebal.go new file mode 100644 index 0000000..6fb5170 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/lapack/gonum/dgebal.go @@ -0,0 +1,239 @@ +// Copyright ©2016 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package gonum + +import ( + "math" + + "gonum.org/v1/gonum/blas/blas64" + "gonum.org/v1/gonum/lapack" +) + +// Dgebal balances an n×n matrix A. Balancing consists of two stages, permuting +// and scaling. Both steps are optional and depend on the value of job. +// +// Permuting consists of applying a permutation matrix P such that the matrix +// that results from P^T*A*P takes the upper block triangular form +// [ T1 X Y ] +// P^T A P = [ 0 B Z ], +// [ 0 0 T2 ] +// where T1 and T2 are upper triangular matrices and B contains at least one +// nonzero off-diagonal element in each row and column. The indices ilo and ihi +// mark the starting and ending columns of the submatrix B. The eigenvalues of A +// isolated in the first 0 to ilo-1 and last ihi+1 to n-1 elements on the +// diagonal can be read off without any roundoff error. +// +// Scaling consists of applying a diagonal similarity transformation D such that +// D^{-1}*B*D has the 1-norm of each row and its corresponding column nearly +// equal. The output matrix is +// [ T1 X*D Y ] +// [ 0 inv(D)*B*D inv(D)*Z ]. +// [ 0 0 T2 ] +// Scaling may reduce the 1-norm of the matrix, and improve the accuracy of +// the computed eigenvalues and/or eigenvectors. +// +// job specifies the operations that will be performed on A. +// If job is lapack.BalanceNone, Dgebal sets scale[i] = 1 for all i and returns ilo=0, ihi=n-1. +// If job is lapack.Permute, only permuting will be done. +// If job is lapack.Scale, only scaling will be done. +// If job is lapack.PermuteScale, both permuting and scaling will be done. +// +// On return, if job is lapack.Permute or lapack.PermuteScale, it will hold that +// A[i,j] == 0, for i > j and j ∈ {0, ..., ilo-1, ihi+1, ..., n-1}. +// If job is lapack.BalanceNone or lapack.Scale, or if n == 0, it will hold that +// ilo == 0 and ihi == n-1. +// +// On return, scale will contain information about the permutations and scaling +// factors applied to A. If π(j) denotes the index of the column interchanged +// with column j, and D[j,j] denotes the scaling factor applied to column j, +// then +// scale[j] == π(j), for j ∈ {0, ..., ilo-1, ihi+1, ..., n-1}, +// == D[j,j], for j ∈ {ilo, ..., ihi}. +// scale must have length equal to n, otherwise Dgebal will panic. +// +// Dgebal is an internal routine. It is exported for testing purposes. +func (impl Implementation) Dgebal(job lapack.BalanceJob, n int, a []float64, lda int, scale []float64) (ilo, ihi int) { + switch { + case job != lapack.BalanceNone && job != lapack.Permute && job != lapack.Scale && job != lapack.PermuteScale: + panic(badBalanceJob) + case n < 0: + panic(nLT0) + case lda < max(1, n): + panic(badLdA) + } + + ilo = 0 + ihi = n - 1 + + if n == 0 { + return ilo, ihi + } + + if len(scale) != n { + panic(shortScale) + } + + if job == lapack.BalanceNone { + for i := range scale { + scale[i] = 1 + } + return ilo, ihi + } + + if len(a) < (n-1)*lda+n { + panic(shortA) + } + + bi := blas64.Implementation() + swapped := true + + if job == lapack.Scale { + goto scaling + } + + // Permutation to isolate eigenvalues if possible. + // + // Search for rows isolating an eigenvalue and push them down. + for swapped { + swapped = false + rows: + for i := ihi; i >= 0; i-- { + for j := 0; j <= ihi; j++ { + if i == j { + continue + } + if a[i*lda+j] != 0 { + continue rows + } + } + // Row i has only zero off-diagonal elements in the + // block A[ilo:ihi+1,ilo:ihi+1]. + scale[ihi] = float64(i) + if i != ihi { + bi.Dswap(ihi+1, a[i:], lda, a[ihi:], lda) + bi.Dswap(n, a[i*lda:], 1, a[ihi*lda:], 1) + } + if ihi == 0 { + scale[0] = 1 + return ilo, ihi + } + ihi-- + swapped = true + break + } + } + // Search for columns isolating an eigenvalue and push them left. + swapped = true + for swapped { + swapped = false + columns: + for j := ilo; j <= ihi; j++ { + for i := ilo; i <= ihi; i++ { + if i == j { + continue + } + if a[i*lda+j] != 0 { + continue columns + } + } + // Column j has only zero off-diagonal elements in the + // block A[ilo:ihi+1,ilo:ihi+1]. + scale[ilo] = float64(j) + if j != ilo { + bi.Dswap(ihi+1, a[j:], lda, a[ilo:], lda) + bi.Dswap(n-ilo, a[j*lda+ilo:], 1, a[ilo*lda+ilo:], 1) + } + swapped = true + ilo++ + break + } + } + +scaling: + for i := ilo; i <= ihi; i++ { + scale[i] = 1 + } + + if job == lapack.Permute { + return ilo, ihi + } + + // Balance the submatrix in rows ilo to ihi. + + const ( + // sclfac should be a power of 2 to avoid roundoff errors. + // Elements of scale are restricted to powers of sclfac, + // therefore the matrix will be only nearly balanced. + sclfac = 2 + // factor determines the minimum reduction of the row and column + // norms that is considered non-negligible. It must be less than 1. + factor = 0.95 + ) + sfmin1 := dlamchS / dlamchP + sfmax1 := 1 / sfmin1 + sfmin2 := sfmin1 * sclfac + sfmax2 := 1 / sfmin2 + + // Iterative loop for norm reduction. + var conv bool + for !conv { + conv = true + for i := ilo; i <= ihi; i++ { + c := bi.Dnrm2(ihi-ilo+1, a[ilo*lda+i:], lda) + r := bi.Dnrm2(ihi-ilo+1, a[i*lda+ilo:], 1) + ica := bi.Idamax(ihi+1, a[i:], lda) + ca := math.Abs(a[ica*lda+i]) + ira := bi.Idamax(n-ilo, a[i*lda+ilo:], 1) + ra := math.Abs(a[i*lda+ilo+ira]) + + // Guard against zero c or r due to underflow. + if c == 0 || r == 0 { + continue + } + g := r / sclfac + f := 1.0 + s := c + r + for c < g && math.Max(f, math.Max(c, ca)) < sfmax2 && math.Min(r, math.Min(g, ra)) > sfmin2 { + if math.IsNaN(c + f + ca + r + g + ra) { + // Panic if NaN to avoid infinite loop. + panic("lapack: NaN") + } + f *= sclfac + c *= sclfac + ca *= sclfac + g /= sclfac + r /= sclfac + ra /= sclfac + } + g = c / sclfac + for r <= g && math.Max(r, ra) < sfmax2 && math.Min(math.Min(f, c), math.Min(g, ca)) > sfmin2 { + f /= sclfac + c /= sclfac + ca /= sclfac + g /= sclfac + r *= sclfac + ra *= sclfac + } + + if c+r >= factor*s { + // Reduction would be negligible. + continue + } + if f < 1 && scale[i] < 1 && f*scale[i] <= sfmin1 { + continue + } + if f > 1 && scale[i] > 1 && scale[i] >= sfmax1/f { + continue + } + + // Now balance. + scale[i] *= f + bi.Dscal(n-ilo, 1/f, a[i*lda+ilo:], 1) + bi.Dscal(ihi+1, f, a[i:], lda) + conv = false + } + } + return ilo, ihi +} diff --git a/vendor/gonum.org/v1/gonum/lapack/gonum/dgebd2.go b/vendor/gonum.org/v1/gonum/lapack/gonum/dgebd2.go new file mode 100644 index 0000000..cf951a1 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/lapack/gonum/dgebd2.go @@ -0,0 +1,86 @@ +// Copyright ©2015 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package gonum + +import "gonum.org/v1/gonum/blas" + +// Dgebd2 reduces an m×n matrix A to upper or lower bidiagonal form by an orthogonal +// transformation. +// Q^T * A * P = B +// if m >= n, B is upper diagonal, otherwise B is lower bidiagonal. +// d is the diagonal, len = min(m,n) +// e is the off-diagonal len = min(m,n)-1 +// +// Dgebd2 is an internal routine. It is exported for testing purposes. +func (impl Implementation) Dgebd2(m, n int, a []float64, lda int, d, e, tauQ, tauP, work []float64) { + switch { + case m < 0: + panic(mLT0) + case n < 0: + panic(nLT0) + case lda < max(1, n): + panic(badLdA) + } + + // Quick return if possible. + minmn := min(m, n) + if minmn == 0 { + return + } + + switch { + case len(d) < minmn: + panic(shortD) + case len(e) < minmn-1: + panic(shortE) + case len(tauQ) < minmn: + panic(shortTauQ) + case len(tauP) < minmn: + panic(shortTauP) + case len(work) < max(m, n): + panic(shortWork) + } + + if m >= n { + for i := 0; i < n; i++ { + a[i*lda+i], tauQ[i] = impl.Dlarfg(m-i, a[i*lda+i], a[min(i+1, m-1)*lda+i:], lda) + d[i] = a[i*lda+i] + a[i*lda+i] = 1 + // Apply H_i to A[i:m, i+1:n] from the left. + if i < n-1 { + impl.Dlarf(blas.Left, m-i, n-i-1, a[i*lda+i:], lda, tauQ[i], a[i*lda+i+1:], lda, work) + } + a[i*lda+i] = d[i] + if i < n-1 { + a[i*lda+i+1], tauP[i] = impl.Dlarfg(n-i-1, a[i*lda+i+1], a[i*lda+min(i+2, n-1):], 1) + e[i] = a[i*lda+i+1] + a[i*lda+i+1] = 1 + impl.Dlarf(blas.Right, m-i-1, n-i-1, a[i*lda+i+1:], 1, tauP[i], a[(i+1)*lda+i+1:], lda, work) + a[i*lda+i+1] = e[i] + } else { + tauP[i] = 0 + } + } + return + } + for i := 0; i < m; i++ { + a[i*lda+i], tauP[i] = impl.Dlarfg(n-i, a[i*lda+i], a[i*lda+min(i+1, n-1):], 1) + d[i] = a[i*lda+i] + a[i*lda+i] = 1 + if i < m-1 { + impl.Dlarf(blas.Right, m-i-1, n-i, a[i*lda+i:], 1, tauP[i], a[(i+1)*lda+i:], lda, work) + } + a[i*lda+i] = d[i] + if i < m-1 { + a[(i+1)*lda+i], tauQ[i] = impl.Dlarfg(m-i-1, a[(i+1)*lda+i], a[min(i+2, m-1)*lda+i:], lda) + e[i] = a[(i+1)*lda+i] + a[(i+1)*lda+i] = 1 + impl.Dlarf(blas.Left, m-i-1, n-i-1, a[(i+1)*lda+i:], lda, tauQ[i], a[(i+1)*lda+i+1:], lda, work) + a[(i+1)*lda+i] = e[i] + } else { + tauQ[i] = 0 + } + } +} diff --git a/vendor/gonum.org/v1/gonum/lapack/gonum/dgebrd.go b/vendor/gonum.org/v1/gonum/lapack/gonum/dgebrd.go new file mode 100644 index 0000000..f03bf8d --- /dev/null +++ b/vendor/gonum.org/v1/gonum/lapack/gonum/dgebrd.go @@ -0,0 +1,161 @@ +// Copyright ©2015 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package gonum + +import ( + "gonum.org/v1/gonum/blas" + "gonum.org/v1/gonum/blas/blas64" +) + +// Dgebrd reduces a general m×n matrix A to upper or lower bidiagonal form B by +// an orthogonal transformation: +// Q^T * A * P = B. +// The diagonal elements of B are stored in d and the off-diagonal elements are stored +// in e. These are additionally stored along the diagonal of A and the off-diagonal +// of A. If m >= n B is an upper-bidiagonal matrix, and if m < n B is a +// lower-bidiagonal matrix. +// +// The remaining elements of A store the data needed to construct Q and P. +// The matrices Q and P are products of elementary reflectors +// if m >= n, Q = H_0 * H_1 * ... * H_{n-1}, +// P = G_0 * G_1 * ... * G_{n-2}, +// if m < n, Q = H_0 * H_1 * ... * H_{m-2}, +// P = G_0 * G_1 * ... * G_{m-1}, +// where +// H_i = I - tauQ[i] * v_i * v_i^T, +// G_i = I - tauP[i] * u_i * u_i^T. +// +// As an example, on exit the entries of A when m = 6, and n = 5 +// [ d e u1 u1 u1] +// [v1 d e u2 u2] +// [v1 v2 d e u3] +// [v1 v2 v3 d e] +// [v1 v2 v3 v4 d] +// [v1 v2 v3 v4 v5] +// and when m = 5, n = 6 +// [ d u1 u1 u1 u1 u1] +// [ e d u2 u2 u2 u2] +// [v1 e d u3 u3 u3] +// [v1 v2 e d u4 u4] +// [v1 v2 v3 e d u5] +// +// d, tauQ, and tauP must all have length at least min(m,n), and e must have +// length min(m,n) - 1, unless lwork is -1 when there is no check except for +// work which must have a length of at least one. +// +// work is temporary storage, and lwork specifies the usable memory length. +// At minimum, lwork >= max(1,m,n) or be -1 and this function will panic otherwise. +// Dgebrd is blocked decomposition, but the block size is limited +// by the temporary space available. If lwork == -1, instead of performing Dgebrd, +// the optimal work length will be stored into work[0]. +// +// Dgebrd is an internal routine. It is exported for testing purposes. +func (impl Implementation) Dgebrd(m, n int, a []float64, lda int, d, e, tauQ, tauP, work []float64, lwork int) { + switch { + case m < 0: + panic(mLT0) + case n < 0: + panic(nLT0) + case lda < max(1, n): + panic(badLdA) + case lwork < max(1, max(m, n)) && lwork != -1: + panic(badLWork) + case len(work) < max(1, lwork): + panic(shortWork) + } + + // Quick return if possible. + minmn := min(m, n) + if minmn == 0 { + work[0] = 1 + return + } + + nb := impl.Ilaenv(1, "DGEBRD", " ", m, n, -1, -1) + lwkopt := (m + n) * nb + if lwork == -1 { + work[0] = float64(lwkopt) + return + } + + switch { + case len(a) < (m-1)*lda+n: + panic(shortA) + case len(d) < minmn: + panic(shortD) + case len(e) < minmn-1: + panic(shortE) + case len(tauQ) < minmn: + panic(shortTauQ) + case len(tauP) < minmn: + panic(shortTauP) + } + + nx := minmn + ws := max(m, n) + if 1 < nb && nb < minmn { + // At least one blocked operation can be done. + // Get the crossover point nx. + nx = max(nb, impl.Ilaenv(3, "DGEBRD", " ", m, n, -1, -1)) + // Determine when to switch from blocked to unblocked code. + if nx < minmn { + // At least one blocked operation will be done. + ws = (m + n) * nb + if lwork < ws { + // Not enough work space for the optimal nb, + // consider using a smaller block size. + nbmin := impl.Ilaenv(2, "DGEBRD", " ", m, n, -1, -1) + if lwork >= (m+n)*nbmin { + // Enough work space for minimum block size. + nb = lwork / (m + n) + } else { + nb = minmn + nx = minmn + } + } + } + } + bi := blas64.Implementation() + ldworkx := nb + ldworky := nb + var i int + for i = 0; i < minmn-nx; i += nb { + // Reduce rows and columns i:i+nb to bidiagonal form and return + // the matrices X and Y which are needed to update the unreduced + // part of the matrix. + // X is stored in the first m rows of work, y in the next rows. + x := work[:m*ldworkx] + y := work[m*ldworkx:] + impl.Dlabrd(m-i, n-i, nb, a[i*lda+i:], lda, + d[i:], e[i:], tauQ[i:], tauP[i:], + x, ldworkx, y, ldworky) + + // Update the trailing submatrix A[i+nb:m,i+nb:n], using an update + // of the form A := A - V*Y**T - X*U**T + bi.Dgemm(blas.NoTrans, blas.Trans, m-i-nb, n-i-nb, nb, + -1, a[(i+nb)*lda+i:], lda, y[nb*ldworky:], ldworky, + 1, a[(i+nb)*lda+i+nb:], lda) + + bi.Dgemm(blas.NoTrans, blas.NoTrans, m-i-nb, n-i-nb, nb, + -1, x[nb*ldworkx:], ldworkx, a[i*lda+i+nb:], lda, + 1, a[(i+nb)*lda+i+nb:], lda) + + // Copy diagonal and off-diagonal elements of B back into A. + if m >= n { + for j := i; j < i+nb; j++ { + a[j*lda+j] = d[j] + a[j*lda+j+1] = e[j] + } + } else { + for j := i; j < i+nb; j++ { + a[j*lda+j] = d[j] + a[(j+1)*lda+j] = e[j] + } + } + } + // Use unblocked code to reduce the remainder of the matrix. + impl.Dgebd2(m-i, n-i, a[i*lda+i:], lda, d[i:], e[i:], tauQ[i:], tauP[i:], work) + work[0] = float64(ws) +} diff --git a/vendor/gonum.org/v1/gonum/lapack/gonum/dgecon.go b/vendor/gonum.org/v1/gonum/lapack/gonum/dgecon.go new file mode 100644 index 0000000..1d1ca58 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/lapack/gonum/dgecon.go @@ -0,0 +1,92 @@ +// Copyright ©2015 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package gonum + +import ( + "math" + + "gonum.org/v1/gonum/blas" + "gonum.org/v1/gonum/blas/blas64" + "gonum.org/v1/gonum/lapack" +) + +// Dgecon estimates the reciprocal of the condition number of the n×n matrix A +// given the LU decomposition of the matrix. The condition number computed may +// be based on the 1-norm or the ∞-norm. +// +// The slice a contains the result of the LU decomposition of A as computed by Dgetrf. +// +// anorm is the corresponding 1-norm or ∞-norm of the original matrix A. +// +// work is a temporary data slice of length at least 4*n and Dgecon will panic otherwise. +// +// iwork is a temporary data slice of length at least n and Dgecon will panic otherwise. +func (impl Implementation) Dgecon(norm lapack.MatrixNorm, n int, a []float64, lda int, anorm float64, work []float64, iwork []int) float64 { + switch { + case norm != lapack.MaxColumnSum && norm != lapack.MaxRowSum: + panic(badNorm) + case n < 0: + panic(nLT0) + case lda < max(1, n): + panic(badLdA) + } + + // Quick return if possible. + if n == 0 { + return 1 + } + + switch { + case len(a) < (n-1)*lda+n: + panic(shortA) + case len(work) < 4*n: + panic(shortWork) + case len(iwork) < n: + panic(shortIWork) + } + + // Quick return if possible. + if anorm == 0 { + return 0 + } + + bi := blas64.Implementation() + var rcond, ainvnm float64 + var kase int + var normin bool + isave := new([3]int) + onenrm := norm == lapack.MaxColumnSum + smlnum := dlamchS + kase1 := 2 + if onenrm { + kase1 = 1 + } + for { + ainvnm, kase = impl.Dlacn2(n, work[n:], work, iwork, ainvnm, kase, isave) + if kase == 0 { + if ainvnm != 0 { + rcond = (1 / ainvnm) / anorm + } + return rcond + } + var sl, su float64 + if kase == kase1 { + sl = impl.Dlatrs(blas.Lower, blas.NoTrans, blas.Unit, normin, n, a, lda, work, work[2*n:]) + su = impl.Dlatrs(blas.Upper, blas.NoTrans, blas.NonUnit, normin, n, a, lda, work, work[3*n:]) + } else { + su = impl.Dlatrs(blas.Upper, blas.Trans, blas.NonUnit, normin, n, a, lda, work, work[3*n:]) + sl = impl.Dlatrs(blas.Lower, blas.Trans, blas.Unit, normin, n, a, lda, work, work[2*n:]) + } + scale := sl * su + normin = true + if scale != 1 { + ix := bi.Idamax(n, work, 1) + if scale == 0 || scale < math.Abs(work[ix])*smlnum { + return rcond + } + impl.Drscl(n, scale, work, 1) + } + } +} diff --git a/vendor/gonum.org/v1/gonum/lapack/gonum/dgeev.go b/vendor/gonum.org/v1/gonum/lapack/gonum/dgeev.go new file mode 100644 index 0000000..0da4e60 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/lapack/gonum/dgeev.go @@ -0,0 +1,279 @@ +// Copyright ©2016 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package gonum + +import ( + "math" + + "gonum.org/v1/gonum/blas" + "gonum.org/v1/gonum/blas/blas64" + "gonum.org/v1/gonum/lapack" +) + +// Dgeev computes the eigenvalues and, optionally, the left and/or right +// eigenvectors for an n×n real nonsymmetric matrix A. +// +// The right eigenvector v_j of A corresponding to an eigenvalue λ_j +// is defined by +// A v_j = λ_j v_j, +// and the left eigenvector u_j corresponding to an eigenvalue λ_j is defined by +// u_j^H A = λ_j u_j^H, +// where u_j^H is the conjugate transpose of u_j. +// +// On return, A will be overwritten and the left and right eigenvectors will be +// stored, respectively, in the columns of the n×n matrices VL and VR in the +// same order as their eigenvalues. If the j-th eigenvalue is real, then +// u_j = VL[:,j], +// v_j = VR[:,j], +// and if it is not real, then j and j+1 form a complex conjugate pair and the +// eigenvectors can be recovered as +// u_j = VL[:,j] + i*VL[:,j+1], +// u_{j+1} = VL[:,j] - i*VL[:,j+1], +// v_j = VR[:,j] + i*VR[:,j+1], +// v_{j+1} = VR[:,j] - i*VR[:,j+1], +// where i is the imaginary unit. The computed eigenvectors are normalized to +// have Euclidean norm equal to 1 and largest component real. +// +// Left eigenvectors will be computed only if jobvl == lapack.LeftEVCompute, +// otherwise jobvl must be lapack.LeftEVNone. +// Right eigenvectors will be computed only if jobvr == lapack.RightEVCompute, +// otherwise jobvr must be lapack.RightEVNone. +// For other values of jobvl and jobvr Dgeev will panic. +// +// wr and wi contain the real and imaginary parts, respectively, of the computed +// eigenvalues. Complex conjugate pairs of eigenvalues appear consecutively with +// the eigenvalue having the positive imaginary part first. +// wr and wi must have length n, and Dgeev will panic otherwise. +// +// work must have length at least lwork and lwork must be at least max(1,4*n) if +// the left or right eigenvectors are computed, and at least max(1,3*n) if no +// eigenvectors are computed. For good performance, lwork must generally be +// larger. On return, optimal value of lwork will be stored in work[0]. +// +// If lwork == -1, instead of performing Dgeev, the function only calculates the +// optimal vaule of lwork and stores it into work[0]. +// +// On return, first is the index of the first valid eigenvalue. If first == 0, +// all eigenvalues and eigenvectors have been computed. If first is positive, +// Dgeev failed to compute all the eigenvalues, no eigenvectors have been +// computed and wr[first:] and wi[first:] contain those eigenvalues which have +// converged. +func (impl Implementation) Dgeev(jobvl lapack.LeftEVJob, jobvr lapack.RightEVJob, n int, a []float64, lda int, wr, wi []float64, vl []float64, ldvl int, vr []float64, ldvr int, work []float64, lwork int) (first int) { + wantvl := jobvl == lapack.LeftEVCompute + wantvr := jobvr == lapack.RightEVCompute + var minwrk int + if wantvl || wantvr { + minwrk = max(1, 4*n) + } else { + minwrk = max(1, 3*n) + } + switch { + case jobvl != lapack.LeftEVCompute && jobvl != lapack.LeftEVNone: + panic(badLeftEVJob) + case jobvr != lapack.RightEVCompute && jobvr != lapack.RightEVNone: + panic(badRightEVJob) + case n < 0: + panic(nLT0) + case lda < max(1, n): + panic(badLdA) + case ldvl < 1 || (ldvl < n && wantvl): + panic(badLdVL) + case ldvr < 1 || (ldvr < n && wantvr): + panic(badLdVR) + case lwork < minwrk && lwork != -1: + panic(badLWork) + case len(work) < lwork: + panic(shortWork) + } + + // Quick return if possible. + if n == 0 { + work[0] = 1 + return 0 + } + + maxwrk := 2*n + n*impl.Ilaenv(1, "DGEHRD", " ", n, 1, n, 0) + if wantvl || wantvr { + maxwrk = max(maxwrk, 2*n+(n-1)*impl.Ilaenv(1, "DORGHR", " ", n, 1, n, -1)) + impl.Dhseqr(lapack.EigenvaluesAndSchur, lapack.SchurOrig, n, 0, n-1, + a, lda, wr, wi, nil, n, work, -1) + maxwrk = max(maxwrk, max(n+1, n+int(work[0]))) + side := lapack.EVLeft + if wantvr { + side = lapack.EVRight + } + impl.Dtrevc3(side, lapack.EVAllMulQ, nil, n, a, lda, vl, ldvl, vr, ldvr, + n, work, -1) + maxwrk = max(maxwrk, n+int(work[0])) + maxwrk = max(maxwrk, 4*n) + } else { + impl.Dhseqr(lapack.EigenvaluesOnly, lapack.SchurNone, n, 0, n-1, + a, lda, wr, wi, vr, ldvr, work, -1) + maxwrk = max(maxwrk, max(n+1, n+int(work[0]))) + } + maxwrk = max(maxwrk, minwrk) + + if lwork == -1 { + work[0] = float64(maxwrk) + return 0 + } + + switch { + case len(a) < (n-1)*lda+n: + panic(shortA) + case len(wr) != n: + panic(badLenWr) + case len(wi) != n: + panic(badLenWi) + case len(vl) < (n-1)*ldvl+n && wantvl: + panic(shortVL) + case len(vr) < (n-1)*ldvr+n && wantvr: + panic(shortVR) + } + + // Get machine constants. + smlnum := math.Sqrt(dlamchS) / dlamchP + bignum := 1 / smlnum + + // Scale A if max element outside range [smlnum,bignum]. + anrm := impl.Dlange(lapack.MaxAbs, n, n, a, lda, nil) + var scalea bool + var cscale float64 + if 0 < anrm && anrm < smlnum { + scalea = true + cscale = smlnum + } else if anrm > bignum { + scalea = true + cscale = bignum + } + if scalea { + impl.Dlascl(lapack.General, 0, 0, anrm, cscale, n, n, a, lda) + } + + // Balance the matrix. + workbal := work[:n] + ilo, ihi := impl.Dgebal(lapack.PermuteScale, n, a, lda, workbal) + + // Reduce to upper Hessenberg form. + iwrk := 2 * n + tau := work[n : iwrk-1] + impl.Dgehrd(n, ilo, ihi, a, lda, tau, work[iwrk:], lwork-iwrk) + + var side lapack.EVSide + if wantvl { + side = lapack.EVLeft + // Copy Householder vectors to VL. + impl.Dlacpy(blas.Lower, n, n, a, lda, vl, ldvl) + // Generate orthogonal matrix in VL. + impl.Dorghr(n, ilo, ihi, vl, ldvl, tau, work[iwrk:], lwork-iwrk) + // Perform QR iteration, accumulating Schur vectors in VL. + iwrk = n + first = impl.Dhseqr(lapack.EigenvaluesAndSchur, lapack.SchurOrig, n, ilo, ihi, + a, lda, wr, wi, vl, ldvl, work[iwrk:], lwork-iwrk) + if wantvr { + // Want left and right eigenvectors. + // Copy Schur vectors to VR. + side = lapack.EVBoth + impl.Dlacpy(blas.All, n, n, vl, ldvl, vr, ldvr) + } + } else if wantvr { + side = lapack.EVRight + // Copy Householder vectors to VR. + impl.Dlacpy(blas.Lower, n, n, a, lda, vr, ldvr) + // Generate orthogonal matrix in VR. + impl.Dorghr(n, ilo, ihi, vr, ldvr, tau, work[iwrk:], lwork-iwrk) + // Perform QR iteration, accumulating Schur vectors in VR. + iwrk = n + first = impl.Dhseqr(lapack.EigenvaluesAndSchur, lapack.SchurOrig, n, ilo, ihi, + a, lda, wr, wi, vr, ldvr, work[iwrk:], lwork-iwrk) + } else { + // Compute eigenvalues only. + iwrk = n + first = impl.Dhseqr(lapack.EigenvaluesOnly, lapack.SchurNone, n, ilo, ihi, + a, lda, wr, wi, nil, 1, work[iwrk:], lwork-iwrk) + } + + if first > 0 { + if scalea { + // Undo scaling. + impl.Dlascl(lapack.General, 0, 0, cscale, anrm, n-first, 1, wr[first:], 1) + impl.Dlascl(lapack.General, 0, 0, cscale, anrm, n-first, 1, wi[first:], 1) + impl.Dlascl(lapack.General, 0, 0, cscale, anrm, ilo, 1, wr, 1) + impl.Dlascl(lapack.General, 0, 0, cscale, anrm, ilo, 1, wi, 1) + } + work[0] = float64(maxwrk) + return first + } + + if wantvl || wantvr { + // Compute left and/or right eigenvectors. + impl.Dtrevc3(side, lapack.EVAllMulQ, nil, n, + a, lda, vl, ldvl, vr, ldvr, n, work[iwrk:], lwork-iwrk) + } + bi := blas64.Implementation() + if wantvl { + // Undo balancing of left eigenvectors. + impl.Dgebak(lapack.PermuteScale, lapack.EVLeft, n, ilo, ihi, workbal, n, vl, ldvl) + // Normalize left eigenvectors and make largest component real. + for i, wii := range wi { + if wii < 0 { + continue + } + if wii == 0 { + scl := 1 / bi.Dnrm2(n, vl[i:], ldvl) + bi.Dscal(n, scl, vl[i:], ldvl) + continue + } + scl := 1 / impl.Dlapy2(bi.Dnrm2(n, vl[i:], ldvl), bi.Dnrm2(n, vl[i+1:], ldvl)) + bi.Dscal(n, scl, vl[i:], ldvl) + bi.Dscal(n, scl, vl[i+1:], ldvl) + for k := 0; k < n; k++ { + vi := vl[k*ldvl+i] + vi1 := vl[k*ldvl+i+1] + work[iwrk+k] = vi*vi + vi1*vi1 + } + k := bi.Idamax(n, work[iwrk:iwrk+n], 1) + cs, sn, _ := impl.Dlartg(vl[k*ldvl+i], vl[k*ldvl+i+1]) + bi.Drot(n, vl[i:], ldvl, vl[i+1:], ldvl, cs, sn) + vl[k*ldvl+i+1] = 0 + } + } + if wantvr { + // Undo balancing of right eigenvectors. + impl.Dgebak(lapack.PermuteScale, lapack.EVRight, n, ilo, ihi, workbal, n, vr, ldvr) + // Normalize right eigenvectors and make largest component real. + for i, wii := range wi { + if wii < 0 { + continue + } + if wii == 0 { + scl := 1 / bi.Dnrm2(n, vr[i:], ldvr) + bi.Dscal(n, scl, vr[i:], ldvr) + continue + } + scl := 1 / impl.Dlapy2(bi.Dnrm2(n, vr[i:], ldvr), bi.Dnrm2(n, vr[i+1:], ldvr)) + bi.Dscal(n, scl, vr[i:], ldvr) + bi.Dscal(n, scl, vr[i+1:], ldvr) + for k := 0; k < n; k++ { + vi := vr[k*ldvr+i] + vi1 := vr[k*ldvr+i+1] + work[iwrk+k] = vi*vi + vi1*vi1 + } + k := bi.Idamax(n, work[iwrk:iwrk+n], 1) + cs, sn, _ := impl.Dlartg(vr[k*ldvr+i], vr[k*ldvr+i+1]) + bi.Drot(n, vr[i:], ldvr, vr[i+1:], ldvr, cs, sn) + vr[k*ldvr+i+1] = 0 + } + } + + if scalea { + // Undo scaling. + impl.Dlascl(lapack.General, 0, 0, cscale, anrm, n-first, 1, wr[first:], 1) + impl.Dlascl(lapack.General, 0, 0, cscale, anrm, n-first, 1, wi[first:], 1) + } + + work[0] = float64(maxwrk) + return first +} diff --git a/vendor/gonum.org/v1/gonum/lapack/gonum/dgehd2.go b/vendor/gonum.org/v1/gonum/lapack/gonum/dgehd2.go new file mode 100644 index 0000000..261f21b --- /dev/null +++ b/vendor/gonum.org/v1/gonum/lapack/gonum/dgehd2.go @@ -0,0 +1,97 @@ +// Copyright ©2016 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package gonum + +import "gonum.org/v1/gonum/blas" + +// Dgehd2 reduces a block of a general n×n matrix A to upper Hessenberg form H +// by an orthogonal similarity transformation Q^T * A * Q = H. +// +// The matrix Q is represented as a product of (ihi-ilo) elementary +// reflectors +// Q = H_{ilo} H_{ilo+1} ... H_{ihi-1}. +// Each H_i has the form +// H_i = I - tau[i] * v * v^T +// where v is a real vector with v[0:i+1] = 0, v[i+1] = 1 and v[ihi+1:n] = 0. +// v[i+2:ihi+1] is stored on exit in A[i+2:ihi+1,i]. +// +// On entry, a contains the n×n general matrix to be reduced. On return, the +// upper triangle and the first subdiagonal of A are overwritten with the upper +// Hessenberg matrix H, and the elements below the first subdiagonal, with the +// slice tau, represent the orthogonal matrix Q as a product of elementary +// reflectors. +// +// The contents of A are illustrated by the following example, with n = 7, ilo = +// 1 and ihi = 5. +// On entry, +// [ a a a a a a a ] +// [ a a a a a a ] +// [ a a a a a a ] +// [ a a a a a a ] +// [ a a a a a a ] +// [ a a a a a a ] +// [ a ] +// on return, +// [ a a h h h h a ] +// [ a h h h h a ] +// [ h h h h h h ] +// [ v1 h h h h h ] +// [ v1 v2 h h h h ] +// [ v1 v2 v3 h h h ] +// [ a ] +// where a denotes an element of the original matrix A, h denotes a +// modified element of the upper Hessenberg matrix H, and vi denotes an +// element of the vector defining H_i. +// +// ilo and ihi determine the block of A that will be reduced to upper Hessenberg +// form. It must hold that 0 <= ilo <= ihi <= max(0, n-1), otherwise Dgehd2 will +// panic. +// +// On return, tau will contain the scalar factors of the elementary reflectors. +// It must have length equal to n-1, otherwise Dgehd2 will panic. +// +// work must have length at least n, otherwise Dgehd2 will panic. +// +// Dgehd2 is an internal routine. It is exported for testing purposes. +func (impl Implementation) Dgehd2(n, ilo, ihi int, a []float64, lda int, tau, work []float64) { + switch { + case n < 0: + panic(nLT0) + case ilo < 0 || max(0, n-1) < ilo: + panic(badIlo) + case ihi < min(ilo, n-1) || n <= ihi: + panic(badIhi) + case lda < max(1, n): + panic(badLdA) + } + + // Quick return if possible. + if n == 0 { + return + } + + switch { + case len(a) < (n-1)*lda+n: + panic(shortA) + case len(tau) != n-1: + panic(badLenTau) + case len(work) < n: + panic(shortWork) + } + + for i := ilo; i < ihi; i++ { + // Compute elementary reflector H_i to annihilate A[i+2:ihi+1,i]. + var aii float64 + aii, tau[i] = impl.Dlarfg(ihi-i, a[(i+1)*lda+i], a[min(i+2, n-1)*lda+i:], lda) + a[(i+1)*lda+i] = 1 + + // Apply H_i to A[0:ihi+1,i+1:ihi+1] from the right. + impl.Dlarf(blas.Right, ihi+1, ihi-i, a[(i+1)*lda+i:], lda, tau[i], a[i+1:], lda, work) + + // Apply H_i to A[i+1:ihi+1,i+1:n] from the left. + impl.Dlarf(blas.Left, ihi-i, n-i-1, a[(i+1)*lda+i:], lda, tau[i], a[(i+1)*lda+i+1:], lda, work) + a[(i+1)*lda+i] = aii + } +} diff --git a/vendor/gonum.org/v1/gonum/lapack/gonum/dgehrd.go b/vendor/gonum.org/v1/gonum/lapack/gonum/dgehrd.go new file mode 100644 index 0000000..89b73ce --- /dev/null +++ b/vendor/gonum.org/v1/gonum/lapack/gonum/dgehrd.go @@ -0,0 +1,194 @@ +// Copyright ©2016 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package gonum + +import ( + "gonum.org/v1/gonum/blas" + "gonum.org/v1/gonum/blas/blas64" + "gonum.org/v1/gonum/lapack" +) + +// Dgehrd reduces a block of a real n×n general matrix A to upper Hessenberg +// form H by an orthogonal similarity transformation Q^T * A * Q = H. +// +// The matrix Q is represented as a product of (ihi-ilo) elementary +// reflectors +// Q = H_{ilo} H_{ilo+1} ... H_{ihi-1}. +// Each H_i has the form +// H_i = I - tau[i] * v * v^T +// where v is a real vector with v[0:i+1] = 0, v[i+1] = 1 and v[ihi+1:n] = 0. +// v[i+2:ihi+1] is stored on exit in A[i+2:ihi+1,i]. +// +// On entry, a contains the n×n general matrix to be reduced. On return, the +// upper triangle and the first subdiagonal of A will be overwritten with the +// upper Hessenberg matrix H, and the elements below the first subdiagonal, with +// the slice tau, represent the orthogonal matrix Q as a product of elementary +// reflectors. +// +// The contents of a are illustrated by the following example, with n = 7, ilo = +// 1 and ihi = 5. +// On entry, +// [ a a a a a a a ] +// [ a a a a a a ] +// [ a a a a a a ] +// [ a a a a a a ] +// [ a a a a a a ] +// [ a a a a a a ] +// [ a ] +// on return, +// [ a a h h h h a ] +// [ a h h h h a ] +// [ h h h h h h ] +// [ v1 h h h h h ] +// [ v1 v2 h h h h ] +// [ v1 v2 v3 h h h ] +// [ a ] +// where a denotes an element of the original matrix A, h denotes a +// modified element of the upper Hessenberg matrix H, and vi denotes an +// element of the vector defining H_i. +// +// ilo and ihi determine the block of A that will be reduced to upper Hessenberg +// form. It must hold that 0 <= ilo <= ihi < n if n > 0, and ilo == 0 and ihi == +// -1 if n == 0, otherwise Dgehrd will panic. +// +// On return, tau will contain the scalar factors of the elementary reflectors. +// Elements tau[:ilo] and tau[ihi:] will be set to zero. tau must have length +// equal to n-1 if n > 0, otherwise Dgehrd will panic. +// +// work must have length at least lwork and lwork must be at least max(1,n), +// otherwise Dgehrd will panic. On return, work[0] contains the optimal value of +// lwork. +// +// If lwork == -1, instead of performing Dgehrd, only the optimal value of lwork +// will be stored in work[0]. +// +// Dgehrd is an internal routine. It is exported for testing purposes. +func (impl Implementation) Dgehrd(n, ilo, ihi int, a []float64, lda int, tau, work []float64, lwork int) { + switch { + case n < 0: + panic(nLT0) + case ilo < 0 || max(0, n-1) < ilo: + panic(badIlo) + case ihi < min(ilo, n-1) || n <= ihi: + panic(badIhi) + case lda < max(1, n): + panic(badLdA) + case lwork < max(1, n) && lwork != -1: + panic(badLWork) + case len(work) < lwork: + panic(shortWork) + } + + // Quick return if possible. + if n == 0 { + work[0] = 1 + return + } + + const ( + nbmax = 64 + ldt = nbmax + 1 + tsize = ldt * nbmax + ) + // Compute the workspace requirements. + nb := min(nbmax, impl.Ilaenv(1, "DGEHRD", " ", n, ilo, ihi, -1)) + lwkopt := n*nb + tsize + if lwork == -1 { + work[0] = float64(lwkopt) + return + } + + if len(a) < (n-1)*lda+n { + panic(shortA) + } + if len(tau) != n-1 { + panic(badLenTau) + } + + // Set tau[:ilo] and tau[ihi:] to zero. + for i := 0; i < ilo; i++ { + tau[i] = 0 + } + for i := ihi; i < n-1; i++ { + tau[i] = 0 + } + + // Quick return if possible. + nh := ihi - ilo + 1 + if nh <= 1 { + work[0] = 1 + return + } + + // Determine the block size. + nbmin := 2 + var nx int + if 1 < nb && nb < nh { + // Determine when to cross over from blocked to unblocked code + // (last block is always handled by unblocked code). + nx = max(nb, impl.Ilaenv(3, "DGEHRD", " ", n, ilo, ihi, -1)) + if nx < nh { + // Determine if workspace is large enough for blocked code. + if lwork < n*nb+tsize { + // Not enough workspace to use optimal nb: + // determine the minimum value of nb, and reduce + // nb or force use of unblocked code. + nbmin = max(2, impl.Ilaenv(2, "DGEHRD", " ", n, ilo, ihi, -1)) + if lwork >= n*nbmin+tsize { + nb = (lwork - tsize) / n + } else { + nb = 1 + } + } + } + } + ldwork := nb // work is used as an n×nb matrix. + + var i int + if nb < nbmin || nh <= nb { + // Use unblocked code below. + i = ilo + } else { + // Use blocked code. + bi := blas64.Implementation() + iwt := n * nb // Size of the matrix Y and index where the matrix T starts in work. + for i = ilo; i < ihi-nx; i += nb { + ib := min(nb, ihi-i) + + // Reduce columns [i:i+ib] to Hessenberg form, returning the + // matrices V and T of the block reflector H = I - V*T*V^T + // which performs the reduction, and also the matrix Y = A*V*T. + impl.Dlahr2(ihi+1, i+1, ib, a[i:], lda, tau[i:], work[iwt:], ldt, work, ldwork) + + // Apply the block reflector H to A[:ihi+1,i+ib:ihi+1] from the + // right, computing A := A - Y * V^T. V[i+ib,i+ib-1] must be set + // to 1. + ei := a[(i+ib)*lda+i+ib-1] + a[(i+ib)*lda+i+ib-1] = 1 + bi.Dgemm(blas.NoTrans, blas.Trans, ihi+1, ihi-i-ib+1, ib, + -1, work, ldwork, + a[(i+ib)*lda+i:], lda, + 1, a[i+ib:], lda) + a[(i+ib)*lda+i+ib-1] = ei + + // Apply the block reflector H to A[0:i+1,i+1:i+ib-1] from the + // right. + bi.Dtrmm(blas.Right, blas.Lower, blas.Trans, blas.Unit, i+1, ib-1, + 1, a[(i+1)*lda+i:], lda, work, ldwork) + for j := 0; j <= ib-2; j++ { + bi.Daxpy(i+1, -1, work[j:], ldwork, a[i+j+1:], lda) + } + + // Apply the block reflector H to A[i+1:ihi+1,i+ib:n] from the + // left. + impl.Dlarfb(blas.Left, blas.Trans, lapack.Forward, lapack.ColumnWise, + ihi-i, n-i-ib, ib, + a[(i+1)*lda+i:], lda, work[iwt:], ldt, a[(i+1)*lda+i+ib:], lda, work, ldwork) + } + } + // Use unblocked code to reduce the rest of the matrix. + impl.Dgehd2(n, i, ihi, a, lda, tau, work) + work[0] = float64(lwkopt) +} diff --git a/vendor/gonum.org/v1/gonum/lapack/gonum/dgelq2.go b/vendor/gonum.org/v1/gonum/lapack/gonum/dgelq2.go new file mode 100644 index 0000000..abc96f7 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/lapack/gonum/dgelq2.go @@ -0,0 +1,65 @@ +// Copyright ©2015 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package gonum + +import "gonum.org/v1/gonum/blas" + +// Dgelq2 computes the LQ factorization of the m×n matrix A. +// +// In an LQ factorization, L is a lower triangular m×n matrix, and Q is an n×n +// orthonormal matrix. +// +// a is modified to contain the information to construct L and Q. +// The lower triangle of a contains the matrix L. The upper triangular elements +// (not including the diagonal) contain the elementary reflectors. tau is modified +// to contain the reflector scales. tau must have length of at least k = min(m,n) +// and this function will panic otherwise. +// +// See Dgeqr2 for a description of the elementary reflectors and orthonormal +// matrix Q. Q is constructed as a product of these elementary reflectors, +// Q = H_{k-1} * ... * H_1 * H_0. +// +// work is temporary storage of length at least m and this function will panic otherwise. +// +// Dgelq2 is an internal routine. It is exported for testing purposes. +func (impl Implementation) Dgelq2(m, n int, a []float64, lda int, tau, work []float64) { + switch { + case m < 0: + panic(mLT0) + case n < 0: + panic(nLT0) + case lda < max(1, n): + panic(badLdA) + } + + // Quick return if possible. + k := min(m, n) + if k == 0 { + return + } + + switch { + case len(a) < (m-1)*lda+n: + panic(shortA) + case len(tau) < k: + panic(shortTau) + case len(work) < m: + panic(shortWork) + } + + for i := 0; i < k; i++ { + a[i*lda+i], tau[i] = impl.Dlarfg(n-i, a[i*lda+i], a[i*lda+min(i+1, n-1):], 1) + if i < m-1 { + aii := a[i*lda+i] + a[i*lda+i] = 1 + impl.Dlarf(blas.Right, m-i-1, n-i, + a[i*lda+i:], 1, + tau[i], + a[(i+1)*lda+i:], lda, + work) + a[i*lda+i] = aii + } + } +} diff --git a/vendor/gonum.org/v1/gonum/lapack/gonum/dgelqf.go b/vendor/gonum.org/v1/gonum/lapack/gonum/dgelqf.go new file mode 100644 index 0000000..f1fd13a --- /dev/null +++ b/vendor/gonum.org/v1/gonum/lapack/gonum/dgelqf.go @@ -0,0 +1,97 @@ +// Copyright ©2015 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package gonum + +import ( + "gonum.org/v1/gonum/blas" + "gonum.org/v1/gonum/lapack" +) + +// Dgelqf computes the LQ factorization of the m×n matrix A using a blocked +// algorithm. See the documentation for Dgelq2 for a description of the +// parameters at entry and exit. +// +// work is temporary storage, and lwork specifies the usable memory length. +// At minimum, lwork >= m, and this function will panic otherwise. +// Dgelqf is a blocked LQ factorization, but the block size is limited +// by the temporary space available. If lwork == -1, instead of performing Dgelqf, +// the optimal work length will be stored into work[0]. +// +// tau must have length at least min(m,n), and this function will panic otherwise. +func (impl Implementation) Dgelqf(m, n int, a []float64, lda int, tau, work []float64, lwork int) { + switch { + case m < 0: + panic(mLT0) + case n < 0: + panic(nLT0) + case lda < max(1, n): + panic(badLdA) + case lwork < max(1, m) && lwork != -1: + panic(badLWork) + case len(work) < max(1, lwork): + panic(shortWork) + } + + k := min(m, n) + if k == 0 { + work[0] = 1 + return + } + + nb := impl.Ilaenv(1, "DGELQF", " ", m, n, -1, -1) + if lwork == -1 { + work[0] = float64(m * nb) + return + } + + if len(a) < (m-1)*lda+n { + panic(shortA) + } + if len(tau) < k { + panic(shortTau) + } + + // Find the optimal blocking size based on the size of available memory + // and optimal machine parameters. + nbmin := 2 + var nx int + iws := m + if 1 < nb && nb < k { + nx = max(0, impl.Ilaenv(3, "DGELQF", " ", m, n, -1, -1)) + if nx < k { + iws = m * nb + if lwork < iws { + nb = lwork / m + nbmin = max(2, impl.Ilaenv(2, "DGELQF", " ", m, n, -1, -1)) + } + } + } + ldwork := nb + // Computed blocked LQ factorization. + var i int + if nbmin <= nb && nb < k && nx < k { + for i = 0; i < k-nx; i += nb { + ib := min(k-i, nb) + impl.Dgelq2(ib, n-i, a[i*lda+i:], lda, tau[i:], work) + if i+ib < m { + impl.Dlarft(lapack.Forward, lapack.RowWise, n-i, ib, + a[i*lda+i:], lda, + tau[i:], + work, ldwork) + impl.Dlarfb(blas.Right, blas.NoTrans, lapack.Forward, lapack.RowWise, + m-i-ib, n-i, ib, + a[i*lda+i:], lda, + work, ldwork, + a[(i+ib)*lda+i:], lda, + work[ib*ldwork:], ldwork) + } + } + } + // Perform unblocked LQ factorization on the remainder. + if i < k { + impl.Dgelq2(m-i, n-i, a[i*lda+i:], lda, tau[i:], work) + } + work[0] = float64(iws) +} diff --git a/vendor/gonum.org/v1/gonum/lapack/gonum/dgels.go b/vendor/gonum.org/v1/gonum/lapack/gonum/dgels.go new file mode 100644 index 0000000..a3894b6 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/lapack/gonum/dgels.go @@ -0,0 +1,219 @@ +// Copyright ©2015 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package gonum + +import ( + "gonum.org/v1/gonum/blas" + "gonum.org/v1/gonum/lapack" +) + +// Dgels finds a minimum-norm solution based on the matrices A and B using the +// QR or LQ factorization. Dgels returns false if the matrix +// A is singular, and true if this solution was successfully found. +// +// The minimization problem solved depends on the input parameters. +// +// 1. If m >= n and trans == blas.NoTrans, Dgels finds X such that || A*X - B||_2 +// is minimized. +// 2. If m < n and trans == blas.NoTrans, Dgels finds the minimum norm solution of +// A * X = B. +// 3. If m >= n and trans == blas.Trans, Dgels finds the minimum norm solution of +// A^T * X = B. +// 4. If m < n and trans == blas.Trans, Dgels finds X such that || A*X - B||_2 +// is minimized. +// Note that the least-squares solutions (cases 1 and 3) perform the minimization +// per column of B. This is not the same as finding the minimum-norm matrix. +// +// The matrix A is a general matrix of size m×n and is modified during this call. +// The input matrix B is of size max(m,n)×nrhs, and serves two purposes. On entry, +// the elements of b specify the input matrix B. B has size m×nrhs if +// trans == blas.NoTrans, and n×nrhs if trans == blas.Trans. On exit, the +// leading submatrix of b contains the solution vectors X. If trans == blas.NoTrans, +// this submatrix is of size n×nrhs, and of size m×nrhs otherwise. +// +// work is temporary storage, and lwork specifies the usable memory length. +// At minimum, lwork >= max(m,n) + max(m,n,nrhs), and this function will panic +// otherwise. A longer work will enable blocked algorithms to be called. +// In the special case that lwork == -1, work[0] will be set to the optimal working +// length. +func (impl Implementation) Dgels(trans blas.Transpose, m, n, nrhs int, a []float64, lda int, b []float64, ldb int, work []float64, lwork int) bool { + mn := min(m, n) + minwrk := mn + max(mn, nrhs) + switch { + case trans != blas.NoTrans && trans != blas.Trans && trans != blas.ConjTrans: + panic(badTrans) + case m < 0: + panic(mLT0) + case n < 0: + panic(nLT0) + case nrhs < 0: + panic(nrhsLT0) + case lda < max(1, n): + panic(badLdA) + case ldb < max(1, nrhs): + panic(badLdB) + case lwork < max(1, minwrk) && lwork != -1: + panic(badLWork) + case len(work) < max(1, lwork): + panic(shortWork) + } + + // Quick return if possible. + if mn == 0 || nrhs == 0 { + impl.Dlaset(blas.All, max(m, n), nrhs, 0, 0, b, ldb) + work[0] = 1 + return true + } + + // Find optimal block size. + var nb int + if m >= n { + nb = impl.Ilaenv(1, "DGEQRF", " ", m, n, -1, -1) + if trans != blas.NoTrans { + nb = max(nb, impl.Ilaenv(1, "DORMQR", "LN", m, nrhs, n, -1)) + } else { + nb = max(nb, impl.Ilaenv(1, "DORMQR", "LT", m, nrhs, n, -1)) + } + } else { + nb = impl.Ilaenv(1, "DGELQF", " ", m, n, -1, -1) + if trans != blas.NoTrans { + nb = max(nb, impl.Ilaenv(1, "DORMLQ", "LT", n, nrhs, m, -1)) + } else { + nb = max(nb, impl.Ilaenv(1, "DORMLQ", "LN", n, nrhs, m, -1)) + } + } + wsize := max(1, mn+max(mn, nrhs)*nb) + work[0] = float64(wsize) + + if lwork == -1 { + return true + } + + switch { + case len(a) < (m-1)*lda+n: + panic(shortA) + case len(b) < (max(m, n)-1)*ldb+nrhs: + panic(shortB) + } + + // Scale the input matrices if they contain extreme values. + smlnum := dlamchS / dlamchP + bignum := 1 / smlnum + anrm := impl.Dlange(lapack.MaxAbs, m, n, a, lda, nil) + var iascl int + if anrm > 0 && anrm < smlnum { + impl.Dlascl(lapack.General, 0, 0, anrm, smlnum, m, n, a, lda) + iascl = 1 + } else if anrm > bignum { + impl.Dlascl(lapack.General, 0, 0, anrm, bignum, m, n, a, lda) + } else if anrm == 0 { + // Matrix is all zeros. + impl.Dlaset(blas.All, max(m, n), nrhs, 0, 0, b, ldb) + return true + } + brow := m + if trans != blas.NoTrans { + brow = n + } + bnrm := impl.Dlange(lapack.MaxAbs, brow, nrhs, b, ldb, nil) + ibscl := 0 + if bnrm > 0 && bnrm < smlnum { + impl.Dlascl(lapack.General, 0, 0, bnrm, smlnum, brow, nrhs, b, ldb) + ibscl = 1 + } else if bnrm > bignum { + impl.Dlascl(lapack.General, 0, 0, bnrm, bignum, brow, nrhs, b, ldb) + ibscl = 2 + } + + // Solve the minimization problem using a QR or an LQ decomposition. + var scllen int + if m >= n { + impl.Dgeqrf(m, n, a, lda, work, work[mn:], lwork-mn) + if trans == blas.NoTrans { + impl.Dormqr(blas.Left, blas.Trans, m, nrhs, n, + a, lda, + work[:n], + b, ldb, + work[mn:], lwork-mn) + ok := impl.Dtrtrs(blas.Upper, blas.NoTrans, blas.NonUnit, n, nrhs, + a, lda, + b, ldb) + if !ok { + return false + } + scllen = n + } else { + ok := impl.Dtrtrs(blas.Upper, blas.Trans, blas.NonUnit, n, nrhs, + a, lda, + b, ldb) + if !ok { + return false + } + for i := n; i < m; i++ { + for j := 0; j < nrhs; j++ { + b[i*ldb+j] = 0 + } + } + impl.Dormqr(blas.Left, blas.NoTrans, m, nrhs, n, + a, lda, + work[:n], + b, ldb, + work[mn:], lwork-mn) + scllen = m + } + } else { + impl.Dgelqf(m, n, a, lda, work, work[mn:], lwork-mn) + if trans == blas.NoTrans { + ok := impl.Dtrtrs(blas.Lower, blas.NoTrans, blas.NonUnit, + m, nrhs, + a, lda, + b, ldb) + if !ok { + return false + } + for i := m; i < n; i++ { + for j := 0; j < nrhs; j++ { + b[i*ldb+j] = 0 + } + } + impl.Dormlq(blas.Left, blas.Trans, n, nrhs, m, + a, lda, + work, + b, ldb, + work[mn:], lwork-mn) + scllen = n + } else { + impl.Dormlq(blas.Left, blas.NoTrans, n, nrhs, m, + a, lda, + work, + b, ldb, + work[mn:], lwork-mn) + ok := impl.Dtrtrs(blas.Lower, blas.Trans, blas.NonUnit, + m, nrhs, + a, lda, + b, ldb) + if !ok { + return false + } + } + } + + // Adjust answer vector based on scaling. + if iascl == 1 { + impl.Dlascl(lapack.General, 0, 0, anrm, smlnum, scllen, nrhs, b, ldb) + } + if iascl == 2 { + impl.Dlascl(lapack.General, 0, 0, anrm, bignum, scllen, nrhs, b, ldb) + } + if ibscl == 1 { + impl.Dlascl(lapack.General, 0, 0, smlnum, bnrm, scllen, nrhs, b, ldb) + } + if ibscl == 2 { + impl.Dlascl(lapack.General, 0, 0, bignum, bnrm, scllen, nrhs, b, ldb) + } + + work[0] = float64(wsize) + return true +} diff --git a/vendor/gonum.org/v1/gonum/lapack/gonum/dgeql2.go b/vendor/gonum.org/v1/gonum/lapack/gonum/dgeql2.go new file mode 100644 index 0000000..3f3ddb1 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/lapack/gonum/dgeql2.go @@ -0,0 +1,61 @@ +// Copyright ©2016 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package gonum + +import "gonum.org/v1/gonum/blas" + +// Dgeql2 computes the QL factorization of the m×n matrix A. That is, Dgeql2 +// computes Q and L such that +// A = Q * L +// where Q is an m×m orthonormal matrix and L is a lower trapezoidal matrix. +// +// Q is represented as a product of elementary reflectors, +// Q = H_{k-1} * ... * H_1 * H_0 +// where k = min(m,n) and each H_i has the form +// H_i = I - tau[i] * v_i * v_i^T +// Vector v_i has v[m-k+i+1:m] = 0, v[m-k+i] = 1, and v[:m-k+i+1] is stored on +// exit in A[0:m-k+i-1, n-k+i]. +// +// tau must have length at least min(m,n), and Dgeql2 will panic otherwise. +// +// work is temporary memory storage and must have length at least n. +// +// Dgeql2 is an internal routine. It is exported for testing purposes. +func (impl Implementation) Dgeql2(m, n int, a []float64, lda int, tau, work []float64) { + switch { + case m < 0: + panic(mLT0) + case n < 0: + panic(nLT0) + case lda < max(1, n): + panic(badLdA) + } + + // Quick return if possible. + k := min(m, n) + if k == 0 { + return + } + + switch { + case len(a) < (m-1)*lda+n: + panic(shortA) + case len(tau) < k: + panic(shortTau) + case len(work) < n: + panic(shortWork) + } + + var aii float64 + for i := k - 1; i >= 0; i-- { + // Generate elementary reflector H_i to annihilate A[0:m-k+i-1, n-k+i]. + aii, tau[i] = impl.Dlarfg(m-k+i+1, a[(m-k+i)*lda+n-k+i], a[n-k+i:], lda) + + // Apply H_i to A[0:m-k+i, 0:n-k+i-1] from the left. + a[(m-k+i)*lda+n-k+i] = 1 + impl.Dlarf(blas.Left, m-k+i+1, n-k+i, a[n-k+i:], lda, tau[i], a, lda, work) + a[(m-k+i)*lda+n-k+i] = aii + } +} diff --git a/vendor/gonum.org/v1/gonum/lapack/gonum/dgeqp3.go b/vendor/gonum.org/v1/gonum/lapack/gonum/dgeqp3.go new file mode 100644 index 0000000..6949da9 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/lapack/gonum/dgeqp3.go @@ -0,0 +1,186 @@ +// Copyright ©2017 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package gonum + +import ( + "gonum.org/v1/gonum/blas" + "gonum.org/v1/gonum/blas/blas64" +) + +// Dgeqp3 computes a QR factorization with column pivoting of the +// m×n matrix A: A*P = Q*R using Level 3 BLAS. +// +// The matrix Q is represented as a product of elementary reflectors +// Q = H_0 H_1 . . . H_{k-1}, where k = min(m,n). +// Each H_i has the form +// H_i = I - tau * v * v^T +// where tau and v are real vectors with v[0:i-1] = 0 and v[i] = 1; +// v[i:m] is stored on exit in A[i:m, i], and tau in tau[i]. +// +// jpvt specifies a column pivot to be applied to A. If +// jpvt[j] is at least zero, the jth column of A is permuted +// to the front of A*P (a leading column), if jpvt[j] is -1 +// the jth column of A is a free column. If jpvt[j] < -1, Dgeqp3 +// will panic. On return, jpvt holds the permutation that was +// applied; the jth column of A*P was the jpvt[j] column of A. +// jpvt must have length n or Dgeqp3 will panic. +// +// tau holds the scalar factors of the elementary reflectors. +// It must have length min(m, n), otherwise Dgeqp3 will panic. +// +// work must have length at least max(1,lwork), and lwork must be at least +// 3*n+1, otherwise Dgeqp3 will panic. For optimal performance lwork must +// be at least 2*n+(n+1)*nb, where nb is the optimal blocksize. On return, +// work[0] will contain the optimal value of lwork. +// +// If lwork == -1, instead of performing Dgeqp3, only the optimal value of lwork +// will be stored in work[0]. +// +// Dgeqp3 is an internal routine. It is exported for testing purposes. +func (impl Implementation) Dgeqp3(m, n int, a []float64, lda int, jpvt []int, tau, work []float64, lwork int) { + const ( + inb = 1 + inbmin = 2 + ixover = 3 + ) + + minmn := min(m, n) + iws := 3*n + 1 + if minmn == 0 { + iws = 1 + } + switch { + case m < 0: + panic(mLT0) + case n < 0: + panic(nLT0) + case lda < max(1, n): + panic(badLdA) + case lwork < iws && lwork != -1: + panic(badLWork) + case len(work) < max(1, lwork): + panic(shortWork) + } + + // Quick return if possible. + if minmn == 0 { + work[0] = 1 + return + } + + nb := impl.Ilaenv(inb, "DGEQRF", " ", m, n, -1, -1) + if lwork == -1 { + work[0] = float64(2*n + (n+1)*nb) + return + } + + switch { + case len(a) < (m-1)*lda+n: + panic(shortA) + case len(jpvt) != n: + panic(badLenJpvt) + case len(tau) < minmn: + panic(shortTau) + } + + for _, v := range jpvt { + if v < -1 || n <= v { + panic(badJpvt) + } + } + + bi := blas64.Implementation() + + // Move initial columns up front. + var nfxd int + for j := 0; j < n; j++ { + if jpvt[j] == -1 { + jpvt[j] = j + continue + } + if j != nfxd { + bi.Dswap(m, a[j:], lda, a[nfxd:], lda) + jpvt[j], jpvt[nfxd] = jpvt[nfxd], j + } else { + jpvt[j] = j + } + nfxd++ + } + + // Factorize nfxd columns. + // + // Compute the QR factorization of nfxd columns and update remaining columns. + if nfxd > 0 { + na := min(m, nfxd) + impl.Dgeqrf(m, na, a, lda, tau, work, lwork) + iws = max(iws, int(work[0])) + if na < n { + impl.Dormqr(blas.Left, blas.Trans, m, n-na, na, a, lda, tau[:na], a[na:], lda, + work, lwork) + iws = max(iws, int(work[0])) + } + } + + if nfxd >= minmn { + work[0] = float64(iws) + return + } + + // Factorize free columns. + sm := m - nfxd + sn := n - nfxd + sminmn := minmn - nfxd + + // Determine the block size. + nb = impl.Ilaenv(inb, "DGEQRF", " ", sm, sn, -1, -1) + nbmin := 2 + nx := 0 + + if 1 < nb && nb < sminmn { + // Determine when to cross over from blocked to unblocked code. + nx = max(0, impl.Ilaenv(ixover, "DGEQRF", " ", sm, sn, -1, -1)) + + if nx < sminmn { + // Determine if workspace is large enough for blocked code. + minws := 2*sn + (sn+1)*nb + iws = max(iws, minws) + if lwork < minws { + // Not enough workspace to use optimal nb. Reduce + // nb and determine the minimum value of nb. + nb = (lwork - 2*sn) / (sn + 1) + nbmin = max(2, impl.Ilaenv(inbmin, "DGEQRF", " ", sm, sn, -1, -1)) + } + } + } + + // Initialize partial column norms. + // The first n elements of work store the exact column norms. + for j := nfxd; j < n; j++ { + work[j] = bi.Dnrm2(sm, a[nfxd*lda+j:], lda) + work[n+j] = work[j] + } + j := nfxd + if nbmin <= nb && nb < sminmn && nx < sminmn { + // Use blocked code initially. + + // Compute factorization. + var fjb int + for topbmn := minmn - nx; j < topbmn; j += fjb { + jb := min(nb, topbmn-j) + + // Factorize jb columns among columns j:n. + fjb = impl.Dlaqps(m, n-j, j, jb, a[j:], lda, jpvt[j:], tau[j:], + work[j:n], work[j+n:2*n], work[2*n:2*n+jb], work[2*n+jb:], jb) + } + } + + // Use unblocked code to factor the last or only block. + if j < minmn { + impl.Dlaqp2(m, n-j, j, a[j:], lda, jpvt[j:], tau[j:], + work[j:n], work[j+n:2*n], work[2*n:]) + } + + work[0] = float64(iws) +} diff --git a/vendor/gonum.org/v1/gonum/lapack/gonum/dgeqr2.go b/vendor/gonum.org/v1/gonum/lapack/gonum/dgeqr2.go new file mode 100644 index 0000000..3e35d7e --- /dev/null +++ b/vendor/gonum.org/v1/gonum/lapack/gonum/dgeqr2.go @@ -0,0 +1,76 @@ +// Copyright ©2015 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package gonum + +import "gonum.org/v1/gonum/blas" + +// Dgeqr2 computes a QR factorization of the m×n matrix A. +// +// In a QR factorization, Q is an m×m orthonormal matrix, and R is an +// upper triangular m×n matrix. +// +// A is modified to contain the information to construct Q and R. +// The upper triangle of a contains the matrix R. The lower triangular elements +// (not including the diagonal) contain the elementary reflectors. tau is modified +// to contain the reflector scales. tau must have length at least min(m,n), and +// this function will panic otherwise. +// +// The ith elementary reflector can be explicitly constructed by first extracting +// the +// v[j] = 0 j < i +// v[j] = 1 j == i +// v[j] = a[j*lda+i] j > i +// and computing H_i = I - tau[i] * v * v^T. +// +// The orthonormal matrix Q can be constructed from a product of these elementary +// reflectors, Q = H_0 * H_1 * ... * H_{k-1}, where k = min(m,n). +// +// work is temporary storage of length at least n and this function will panic otherwise. +// +// Dgeqr2 is an internal routine. It is exported for testing purposes. +func (impl Implementation) Dgeqr2(m, n int, a []float64, lda int, tau, work []float64) { + // TODO(btracey): This is oriented such that columns of a are eliminated. + // This likely could be re-arranged to take better advantage of row-major + // storage. + + switch { + case m < 0: + panic(mLT0) + case n < 0: + panic(nLT0) + case lda < max(1, n): + panic(badLdA) + case len(work) < n: + panic(shortWork) + } + + // Quick return if possible. + k := min(m, n) + if k == 0 { + return + } + + switch { + case len(a) < (m-1)*lda+n: + panic(shortA) + case len(tau) < k: + panic(shortTau) + } + + for i := 0; i < k; i++ { + // Generate elementary reflector H_i. + a[i*lda+i], tau[i] = impl.Dlarfg(m-i, a[i*lda+i], a[min((i+1), m-1)*lda+i:], lda) + if i < n-1 { + aii := a[i*lda+i] + a[i*lda+i] = 1 + impl.Dlarf(blas.Left, m-i, n-i-1, + a[i*lda+i:], lda, + tau[i], + a[i*lda+i+1:], lda, + work) + a[i*lda+i] = aii + } + } +} diff --git a/vendor/gonum.org/v1/gonum/lapack/gonum/dgeqrf.go b/vendor/gonum.org/v1/gonum/lapack/gonum/dgeqrf.go new file mode 100644 index 0000000..300f8ee --- /dev/null +++ b/vendor/gonum.org/v1/gonum/lapack/gonum/dgeqrf.go @@ -0,0 +1,108 @@ +// Copyright ©2015 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package gonum + +import ( + "gonum.org/v1/gonum/blas" + "gonum.org/v1/gonum/lapack" +) + +// Dgeqrf computes the QR factorization of the m×n matrix A using a blocked +// algorithm. See the documentation for Dgeqr2 for a description of the +// parameters at entry and exit. +// +// work is temporary storage, and lwork specifies the usable memory length. +// The length of work must be at least max(1, lwork) and lwork must be -1 +// or at least n, otherwise this function will panic. +// Dgeqrf is a blocked QR factorization, but the block size is limited +// by the temporary space available. If lwork == -1, instead of performing Dgeqrf, +// the optimal work length will be stored into work[0]. +// +// tau must have length at least min(m,n), and this function will panic otherwise. +func (impl Implementation) Dgeqrf(m, n int, a []float64, lda int, tau, work []float64, lwork int) { + switch { + case m < 0: + panic(mLT0) + case n < 0: + panic(nLT0) + case lda < max(1, n): + panic(badLdA) + case lwork < max(1, n) && lwork != -1: + panic(badLWork) + case len(work) < max(1, lwork): + panic(shortWork) + } + + // Quick return if possible. + k := min(m, n) + if k == 0 { + work[0] = 1 + return + } + + // nb is the optimal blocksize, i.e. the number of columns transformed at a time. + nb := impl.Ilaenv(1, "DGEQRF", " ", m, n, -1, -1) + if lwork == -1 { + work[0] = float64(n * nb) + return + } + + if len(a) < (m-1)*lda+n { + panic(shortA) + } + if len(tau) < k { + panic(shortTau) + } + + nbmin := 2 // Minimal block size. + var nx int // Use unblocked (unless changed in the next for loop) + iws := n + // Only consider blocked if the suggested block size is > 1 and the + // number of rows or columns is sufficiently large. + if 1 < nb && nb < k { + // nx is the block size at which the code switches from blocked + // to unblocked. + nx = max(0, impl.Ilaenv(3, "DGEQRF", " ", m, n, -1, -1)) + if k > nx { + iws = n * nb + if lwork < iws { + // Not enough workspace to use the optimal block + // size. Get the minimum block size instead. + nb = lwork / n + nbmin = max(2, impl.Ilaenv(2, "DGEQRF", " ", m, n, -1, -1)) + } + } + } + + // Compute QR using a blocked algorithm. + var i int + if nbmin <= nb && nb < k && nx < k { + ldwork := nb + for i = 0; i < k-nx; i += nb { + ib := min(k-i, nb) + // Compute the QR factorization of the current block. + impl.Dgeqr2(m-i, ib, a[i*lda+i:], lda, tau[i:], work) + if i+ib < n { + // Form the triangular factor of the block reflector and apply H^T + // In Dlarft, work becomes the T matrix. + impl.Dlarft(lapack.Forward, lapack.ColumnWise, m-i, ib, + a[i*lda+i:], lda, + tau[i:], + work, ldwork) + impl.Dlarfb(blas.Left, blas.Trans, lapack.Forward, lapack.ColumnWise, + m-i, n-i-ib, ib, + a[i*lda+i:], lda, + work, ldwork, + a[i*lda+i+ib:], lda, + work[ib*ldwork:], ldwork) + } + } + } + // Call unblocked code on the remaining columns. + if i < k { + impl.Dgeqr2(m-i, n-i, a[i*lda+i:], lda, tau[i:], work) + } + work[0] = float64(iws) +} diff --git a/vendor/gonum.org/v1/gonum/lapack/gonum/dgerq2.go b/vendor/gonum.org/v1/gonum/lapack/gonum/dgerq2.go new file mode 100644 index 0000000..60dac97 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/lapack/gonum/dgerq2.go @@ -0,0 +1,68 @@ +// Copyright ©2017 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package gonum + +import "gonum.org/v1/gonum/blas" + +// Dgerq2 computes an RQ factorization of the m×n matrix A, +// A = R * Q. +// On exit, if m <= n, the upper triangle of the subarray +// A[0:m, n-m:n] contains the m×m upper triangular matrix R. +// If m >= n, the elements on and above the (m-n)-th subdiagonal +// contain the m×n upper trapezoidal matrix R. +// The remaining elements, with tau, represent the +// orthogonal matrix Q as a product of min(m,n) elementary +// reflectors. +// +// The matrix Q is represented as a product of elementary reflectors +// Q = H_0 H_1 . . . H_{min(m,n)-1}. +// Each H(i) has the form +// H_i = I - tau_i * v * v^T +// where v is a vector with v[0:n-k+i-1] stored in A[m-k+i, 0:n-k+i-1], +// v[n-k+i:n] = 0 and v[n-k+i] = 1. +// +// tau must have length min(m,n) and work must have length m, otherwise +// Dgerq2 will panic. +// +// Dgerq2 is an internal routine. It is exported for testing purposes. +func (impl Implementation) Dgerq2(m, n int, a []float64, lda int, tau, work []float64) { + switch { + case m < 0: + panic(mLT0) + case n < 0: + panic(nLT0) + case lda < max(1, n): + panic(badLdA) + case len(work) < m: + panic(shortWork) + } + + // Quick return if possible. + k := min(m, n) + if k == 0 { + return + } + + switch { + case len(a) < (m-1)*lda+n: + panic(shortA) + case len(tau) < k: + panic(shortTau) + } + + for i := k - 1; i >= 0; i-- { + // Generate elementary reflector H[i] to annihilate + // A[m-k+i, 0:n-k+i-1]. + mki := m - k + i + nki := n - k + i + var aii float64 + aii, tau[i] = impl.Dlarfg(nki+1, a[mki*lda+nki], a[mki*lda:], 1) + + // Apply H[i] to A[0:m-k+i-1, 0:n-k+i] from the right. + a[mki*lda+nki] = 1 + impl.Dlarf(blas.Right, mki, nki+1, a[mki*lda:], 1, tau[i], a, lda, work) + a[mki*lda+nki] = aii + } +} diff --git a/vendor/gonum.org/v1/gonum/lapack/gonum/dgerqf.go b/vendor/gonum.org/v1/gonum/lapack/gonum/dgerqf.go new file mode 100644 index 0000000..9b4aa05 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/lapack/gonum/dgerqf.go @@ -0,0 +1,129 @@ +// Copyright ©2017 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package gonum + +import ( + "gonum.org/v1/gonum/blas" + "gonum.org/v1/gonum/lapack" +) + +// Dgerqf computes an RQ factorization of the m×n matrix A, +// A = R * Q. +// On exit, if m <= n, the upper triangle of the subarray +// A[0:m, n-m:n] contains the m×m upper triangular matrix R. +// If m >= n, the elements on and above the (m-n)-th subdiagonal +// contain the m×n upper trapezoidal matrix R. +// The remaining elements, with tau, represent the +// orthogonal matrix Q as a product of min(m,n) elementary +// reflectors. +// +// The matrix Q is represented as a product of elementary reflectors +// Q = H_0 H_1 . . . H_{min(m,n)-1}. +// Each H(i) has the form +// H_i = I - tau_i * v * v^T +// where v is a vector with v[0:n-k+i-1] stored in A[m-k+i, 0:n-k+i-1], +// v[n-k+i:n] = 0 and v[n-k+i] = 1. +// +// tau must have length min(m,n), work must have length max(1, lwork), +// and lwork must be -1 or at least max(1, m), otherwise Dgerqf will panic. +// On exit, work[0] will contain the optimal length for work. +// +// Dgerqf is an internal routine. It is exported for testing purposes. +func (impl Implementation) Dgerqf(m, n int, a []float64, lda int, tau, work []float64, lwork int) { + switch { + case m < 0: + panic(mLT0) + case n < 0: + panic(nLT0) + case lda < max(1, n): + panic(badLdA) + case lwork < max(1, m) && lwork != -1: + panic(badLWork) + case len(work) < max(1, lwork): + panic(shortWork) + } + + // Quick return if possible. + k := min(m, n) + if k == 0 { + work[0] = 1 + return + } + + nb := impl.Ilaenv(1, "DGERQF", " ", m, n, -1, -1) + if lwork == -1 { + work[0] = float64(m * nb) + return + } + + if len(a) < (m-1)*lda+n { + panic(shortA) + } + if len(tau) != k { + panic(badLenTau) + } + + nbmin := 2 + nx := 1 + iws := m + var ldwork int + if 1 < nb && nb < k { + // Determine when to cross over from blocked to unblocked code. + nx = max(0, impl.Ilaenv(3, "DGERQF", " ", m, n, -1, -1)) + if nx < k { + // Determine whether workspace is large enough for blocked code. + iws = m * nb + if lwork < iws { + // Not enough workspace to use optimal nb. Reduce + // nb and determine the minimum value of nb. + nb = lwork / m + nbmin = max(2, impl.Ilaenv(2, "DGERQF", " ", m, n, -1, -1)) + } + ldwork = nb + } + } + + var mu, nu int + if nbmin <= nb && nb < k && nx < k { + // Use blocked code initially. + // The last kk rows are handled by the block method. + ki := ((k - nx - 1) / nb) * nb + kk := min(k, ki+nb) + + var i int + for i = k - kk + ki; i >= k-kk; i -= nb { + ib := min(k-i, nb) + + // Compute the RQ factorization of the current block + // A[m-k+i:m-k+i+ib-1, 0:n-k+i+ib-1]. + impl.Dgerq2(ib, n-k+i+ib, a[(m-k+i)*lda:], lda, tau[i:], work) + if m-k+i > 0 { + // Form the triangular factor of the block reflector + // H = H_{i+ib-1} . . . H_{i+1} H_i. + impl.Dlarft(lapack.Backward, lapack.RowWise, + n-k+i+ib, ib, a[(m-k+i)*lda:], lda, tau[i:], + work, ldwork) + + // Apply H to A[0:m-k+i-1, 0:n-k+i+ib-1] from the right. + impl.Dlarfb(blas.Right, blas.NoTrans, lapack.Backward, lapack.RowWise, + m-k+i, n-k+i+ib, ib, a[(m-k+i)*lda:], lda, + work, ldwork, + a, lda, + work[ib*ldwork:], ldwork) + } + } + mu = m - k + i + nb + nu = n - k + i + nb + } else { + mu = m + nu = n + } + + // Use unblocked code to factor the last or only block. + if mu > 0 && nu > 0 { + impl.Dgerq2(mu, nu, a, lda, tau, work) + } + work[0] = float64(iws) +} diff --git a/vendor/gonum.org/v1/gonum/lapack/gonum/dgesvd.go b/vendor/gonum.org/v1/gonum/lapack/gonum/dgesvd.go new file mode 100644 index 0000000..136f683 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/lapack/gonum/dgesvd.go @@ -0,0 +1,1374 @@ +// Copyright ©2015 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package gonum + +import ( + "math" + + "gonum.org/v1/gonum/blas" + "gonum.org/v1/gonum/blas/blas64" + "gonum.org/v1/gonum/lapack" +) + +const noSVDO = "dgesvd: not coded for overwrite" + +// Dgesvd computes the singular value decomposition of the input matrix A. +// +// The singular value decomposition is +// A = U * Sigma * V^T +// where Sigma is an m×n diagonal matrix containing the singular values of A, +// U is an m×m orthogonal matrix and V is an n×n orthogonal matrix. The first +// min(m,n) columns of U and V are the left and right singular vectors of A +// respectively. +// +// jobU and jobVT are options for computing the singular vectors. The behavior +// is as follows +// jobU == lapack.SVDAll All m columns of U are returned in u +// jobU == lapack.SVDStore The first min(m,n) columns are returned in u +// jobU == lapack.SVDOverwrite The first min(m,n) columns of U are written into a +// jobU == lapack.SVDNone The columns of U are not computed. +// The behavior is the same for jobVT and the rows of V^T. At most one of jobU +// and jobVT can equal lapack.SVDOverwrite, and Dgesvd will panic otherwise. +// +// On entry, a contains the data for the m×n matrix A. During the call to Dgesvd +// the data is overwritten. On exit, A contains the appropriate singular vectors +// if either job is lapack.SVDOverwrite. +// +// s is a slice of length at least min(m,n) and on exit contains the singular +// values in decreasing order. +// +// u contains the left singular vectors on exit, stored column-wise. If +// jobU == lapack.SVDAll, u is of size m×m. If jobU == lapack.SVDStore u is +// of size m×min(m,n). If jobU == lapack.SVDOverwrite or lapack.SVDNone, u is +// not used. +// +// vt contains the left singular vectors on exit, stored row-wise. If +// jobV == lapack.SVDAll, vt is of size n×n. If jobVT == lapack.SVDStore vt is +// of size min(m,n)×n. If jobVT == lapack.SVDOverwrite or lapack.SVDNone, vt is +// not used. +// +// work is a slice for storing temporary memory, and lwork is the usable size of +// the slice. lwork must be at least max(5*min(m,n), 3*min(m,n)+max(m,n)). +// If lwork == -1, instead of performing Dgesvd, the optimal work length will be +// stored into work[0]. Dgesvd will panic if the working memory has insufficient +// storage. +// +// Dgesvd returns whether the decomposition successfully completed. +func (impl Implementation) Dgesvd(jobU, jobVT lapack.SVDJob, m, n int, a []float64, lda int, s, u []float64, ldu int, vt []float64, ldvt int, work []float64, lwork int) (ok bool) { + if jobU == lapack.SVDOverwrite || jobVT == lapack.SVDOverwrite { + panic(noSVDO) + } + + wantua := jobU == lapack.SVDAll + wantus := jobU == lapack.SVDStore + wantuas := wantua || wantus + wantuo := jobU == lapack.SVDOverwrite + wantun := jobU == lapack.SVDNone + if !(wantua || wantus || wantuo || wantun) { + panic(badSVDJob) + } + + wantva := jobVT == lapack.SVDAll + wantvs := jobVT == lapack.SVDStore + wantvas := wantva || wantvs + wantvo := jobVT == lapack.SVDOverwrite + wantvn := jobVT == lapack.SVDNone + if !(wantva || wantvs || wantvo || wantvn) { + panic(badSVDJob) + } + + if wantuo && wantvo { + panic(bothSVDOver) + } + + minmn := min(m, n) + minwork := 1 + if minmn > 0 { + minwork = max(3*minmn+max(m, n), 5*minmn) + } + switch { + case m < 0: + panic(mLT0) + case n < 0: + panic(nLT0) + case lda < max(1, n): + panic(badLdA) + case ldu < 1, wantua && ldu < m, wantus && ldu < minmn: + panic(badLdU) + case ldvt < 1 || (wantvas && ldvt < n): + panic(badLdVT) + case lwork < minwork && lwork != -1: + panic(badLWork) + case len(work) < max(1, lwork): + panic(shortWork) + } + + // Quick return if possible. + if minmn == 0 { + work[0] = 1 + return true + } + + // Compute optimal workspace size for subroutines. + opts := string(jobU) + string(jobVT) + mnthr := impl.Ilaenv(6, "DGESVD", opts, m, n, 0, 0) + maxwrk := 1 + var wrkbl, bdspac int + if m >= n { + bdspac = 5 * n + impl.Dgeqrf(m, n, a, lda, nil, work, -1) + lwork_dgeqrf := int(work[0]) + + impl.Dorgqr(m, n, n, a, lda, nil, work, -1) + lwork_dorgqr_n := int(work[0]) + impl.Dorgqr(m, m, n, a, lda, nil, work, -1) + lwork_dorgqr_m := int(work[0]) + + impl.Dgebrd(n, n, a, lda, s, nil, nil, nil, work, -1) + lwork_dgebrd := int(work[0]) + + impl.Dorgbr(lapack.GeneratePT, n, n, n, a, lda, nil, work, -1) + lwork_dorgbr_p := int(work[0]) + + impl.Dorgbr(lapack.GenerateQ, n, n, n, a, lda, nil, work, -1) + lwork_dorgbr_q := int(work[0]) + + if m >= mnthr { + if wantun { + // Path 1 (m much larger than n, jobU == None) + maxwrk = n + lwork_dgeqrf + maxwrk = max(maxwrk, 3*n+lwork_dgebrd) + if wantvo || wantvas { + maxwrk = max(maxwrk, 3*n+lwork_dorgbr_p) + } + maxwrk = max(maxwrk, bdspac) + } else if wantuo && wantvn { + // Path 2 (m much larger than n, jobU == Overwrite, jobVT == None) + wrkbl = n + lwork_dgeqrf + wrkbl = max(wrkbl, n+lwork_dorgqr_n) + wrkbl = max(wrkbl, 3*n+lwork_dgebrd) + wrkbl = max(wrkbl, 3*n+lwork_dorgbr_q) + wrkbl = max(wrkbl, bdspac) + maxwrk = max(n*n+wrkbl, n*n+m*n+n) + } else if wantuo && wantvas { + // Path 3 (m much larger than n, jobU == Overwrite, jobVT == Store or All) + wrkbl = n + lwork_dgeqrf + wrkbl = max(wrkbl, n+lwork_dorgqr_n) + wrkbl = max(wrkbl, 3*n+lwork_dgebrd) + wrkbl = max(wrkbl, 3*n+lwork_dorgbr_q) + wrkbl = max(wrkbl, 3*n+lwork_dorgbr_p) + wrkbl = max(wrkbl, bdspac) + maxwrk = max(n*n+wrkbl, n*n+m*n+n) + } else if wantus && wantvn { + // Path 4 (m much larger than n, jobU == Store, jobVT == None) + wrkbl = n + lwork_dgeqrf + wrkbl = max(wrkbl, n+lwork_dorgqr_n) + wrkbl = max(wrkbl, 3*n+lwork_dgebrd) + wrkbl = max(wrkbl, 3*n+lwork_dorgbr_q) + wrkbl = max(wrkbl, bdspac) + maxwrk = n*n + wrkbl + } else if wantus && wantvo { + // Path 5 (m much larger than n, jobU == Store, jobVT == Overwrite) + wrkbl = n + lwork_dgeqrf + wrkbl = max(wrkbl, n+lwork_dorgqr_n) + wrkbl = max(wrkbl, 3*n+lwork_dgebrd) + wrkbl = max(wrkbl, 3*n+lwork_dorgbr_q) + wrkbl = max(wrkbl, 3*n+lwork_dorgbr_p) + wrkbl = max(wrkbl, bdspac) + maxwrk = 2*n*n + wrkbl + } else if wantus && wantvas { + // Path 6 (m much larger than n, jobU == Store, jobVT == Store or All) + wrkbl = n + lwork_dgeqrf + wrkbl = max(wrkbl, n+lwork_dorgqr_n) + wrkbl = max(wrkbl, 3*n+lwork_dgebrd) + wrkbl = max(wrkbl, 3*n+lwork_dorgbr_q) + wrkbl = max(wrkbl, 3*n+lwork_dorgbr_p) + wrkbl = max(wrkbl, bdspac) + maxwrk = n*n + wrkbl + } else if wantua && wantvn { + // Path 7 (m much larger than n, jobU == All, jobVT == None) + wrkbl = n + lwork_dgeqrf + wrkbl = max(wrkbl, n+lwork_dorgqr_m) + wrkbl = max(wrkbl, 3*n+lwork_dgebrd) + wrkbl = max(wrkbl, 3*n+lwork_dorgbr_q) + wrkbl = max(wrkbl, bdspac) + maxwrk = n*n + wrkbl + } else if wantua && wantvo { + // Path 8 (m much larger than n, jobU == All, jobVT == Overwrite) + wrkbl = n + lwork_dgeqrf + wrkbl = max(wrkbl, n+lwork_dorgqr_m) + wrkbl = max(wrkbl, 3*n+lwork_dgebrd) + wrkbl = max(wrkbl, 3*n+lwork_dorgbr_q) + wrkbl = max(wrkbl, 3*n+lwork_dorgbr_p) + wrkbl = max(wrkbl, bdspac) + maxwrk = 2*n*n + wrkbl + } else if wantua && wantvas { + // Path 9 (m much larger than n, jobU == All, jobVT == Store or All) + wrkbl = n + lwork_dgeqrf + wrkbl = max(wrkbl, n+lwork_dorgqr_m) + wrkbl = max(wrkbl, 3*n+lwork_dgebrd) + wrkbl = max(wrkbl, 3*n+lwork_dorgbr_q) + wrkbl = max(wrkbl, 3*n+lwork_dorgbr_p) + wrkbl = max(wrkbl, bdspac) + maxwrk = n*n + wrkbl + } + } else { + // Path 10 (m at least n, but not much larger) + impl.Dgebrd(m, n, a, lda, s, nil, nil, nil, work, -1) + lwork_dgebrd := int(work[0]) + maxwrk = 3*n + lwork_dgebrd + if wantus || wantuo { + impl.Dorgbr(lapack.GenerateQ, m, n, n, a, lda, nil, work, -1) + lwork_dorgbr_q = int(work[0]) + maxwrk = max(maxwrk, 3*n+lwork_dorgbr_q) + } + if wantua { + impl.Dorgbr(lapack.GenerateQ, m, m, n, a, lda, nil, work, -1) + lwork_dorgbr_q := int(work[0]) + maxwrk = max(maxwrk, 3*n+lwork_dorgbr_q) + } + if !wantvn { + maxwrk = max(maxwrk, 3*n+lwork_dorgbr_p) + } + maxwrk = max(maxwrk, bdspac) + } + } else { + bdspac = 5 * m + + impl.Dgelqf(m, n, a, lda, nil, work, -1) + lwork_dgelqf := int(work[0]) + + impl.Dorglq(n, n, m, nil, n, nil, work, -1) + lwork_dorglq_n := int(work[0]) + impl.Dorglq(m, n, m, a, lda, nil, work, -1) + lwork_dorglq_m := int(work[0]) + + impl.Dgebrd(m, m, a, lda, s, nil, nil, nil, work, -1) + lwork_dgebrd := int(work[0]) + + impl.Dorgbr(lapack.GeneratePT, m, m, m, a, n, nil, work, -1) + lwork_dorgbr_p := int(work[0]) + + impl.Dorgbr(lapack.GenerateQ, m, m, m, a, n, nil, work, -1) + lwork_dorgbr_q := int(work[0]) + + if n >= mnthr { + if wantvn { + // Path 1t (n much larger than m, jobVT == None) + maxwrk = m + lwork_dgelqf + maxwrk = max(maxwrk, 3*m+lwork_dgebrd) + if wantuo || wantuas { + maxwrk = max(maxwrk, 3*m+lwork_dorgbr_q) + } + maxwrk = max(maxwrk, bdspac) + } else if wantvo && wantun { + // Path 2t (n much larger than m, jobU == None, jobVT == Overwrite) + wrkbl = m + lwork_dgelqf + wrkbl = max(wrkbl, m+lwork_dorglq_m) + wrkbl = max(wrkbl, 3*m+lwork_dgebrd) + wrkbl = max(wrkbl, 3*m+lwork_dorgbr_p) + wrkbl = max(wrkbl, bdspac) + maxwrk = max(m*m+wrkbl, m*m+m*n+m) + } else if wantvo && wantuas { + // Path 3t (n much larger than m, jobU == Store or All, jobVT == Overwrite) + wrkbl = m + lwork_dgelqf + wrkbl = max(wrkbl, m+lwork_dorglq_m) + wrkbl = max(wrkbl, 3*m+lwork_dgebrd) + wrkbl = max(wrkbl, 3*m+lwork_dorgbr_p) + wrkbl = max(wrkbl, 3*m+lwork_dorgbr_q) + wrkbl = max(wrkbl, bdspac) + maxwrk = max(m*m+wrkbl, m*m+m*n+m) + } else if wantvs && wantun { + // Path 4t (n much larger than m, jobU == None, jobVT == Store) + wrkbl = m + lwork_dgelqf + wrkbl = max(wrkbl, m+lwork_dorglq_m) + wrkbl = max(wrkbl, 3*m+lwork_dgebrd) + wrkbl = max(wrkbl, 3*m+lwork_dorgbr_p) + wrkbl = max(wrkbl, bdspac) + maxwrk = m*m + wrkbl + } else if wantvs && wantuo { + // Path 5t (n much larger than m, jobU == Overwrite, jobVT == Store) + wrkbl = m + lwork_dgelqf + wrkbl = max(wrkbl, m+lwork_dorglq_m) + wrkbl = max(wrkbl, 3*m+lwork_dgebrd) + wrkbl = max(wrkbl, 3*m+lwork_dorgbr_p) + wrkbl = max(wrkbl, 3*m+lwork_dorgbr_q) + wrkbl = max(wrkbl, bdspac) + maxwrk = 2*m*m + wrkbl + } else if wantvs && wantuas { + // Path 6t (n much larger than m, jobU == Store or All, jobVT == Store) + wrkbl = m + lwork_dgelqf + wrkbl = max(wrkbl, m+lwork_dorglq_m) + wrkbl = max(wrkbl, 3*m+lwork_dgebrd) + wrkbl = max(wrkbl, 3*m+lwork_dorgbr_p) + wrkbl = max(wrkbl, 3*m+lwork_dorgbr_q) + wrkbl = max(wrkbl, bdspac) + maxwrk = m*m + wrkbl + } else if wantva && wantun { + // Path 7t (n much larger than m, jobU== None, jobVT == All) + wrkbl = m + lwork_dgelqf + wrkbl = max(wrkbl, m+lwork_dorglq_n) + wrkbl = max(wrkbl, 3*m+lwork_dgebrd) + wrkbl = max(wrkbl, 3*m+lwork_dorgbr_p) + wrkbl = max(wrkbl, bdspac) + maxwrk = m*m + wrkbl + } else if wantva && wantuo { + // Path 8t (n much larger than m, jobU == Overwrite, jobVT == All) + wrkbl = m + lwork_dgelqf + wrkbl = max(wrkbl, m+lwork_dorglq_n) + wrkbl = max(wrkbl, 3*m+lwork_dgebrd) + wrkbl = max(wrkbl, 3*m+lwork_dorgbr_p) + wrkbl = max(wrkbl, 3*m+lwork_dorgbr_q) + wrkbl = max(wrkbl, bdspac) + maxwrk = 2*m*m + wrkbl + } else if wantva && wantuas { + // Path 9t (n much larger than m, jobU == Store or All, jobVT == All) + wrkbl = m + lwork_dgelqf + wrkbl = max(wrkbl, m+lwork_dorglq_n) + wrkbl = max(wrkbl, 3*m+lwork_dgebrd) + wrkbl = max(wrkbl, 3*m+lwork_dorgbr_p) + wrkbl = max(wrkbl, 3*m+lwork_dorgbr_q) + wrkbl = max(wrkbl, bdspac) + maxwrk = m*m + wrkbl + } + } else { + // Path 10t (n greater than m, but not much larger) + impl.Dgebrd(m, n, a, lda, s, nil, nil, nil, work, -1) + lwork_dgebrd = int(work[0]) + maxwrk = 3*m + lwork_dgebrd + if wantvs || wantvo { + impl.Dorgbr(lapack.GeneratePT, m, n, m, a, n, nil, work, -1) + lwork_dorgbr_p = int(work[0]) + maxwrk = max(maxwrk, 3*m+lwork_dorgbr_p) + } + if wantva { + impl.Dorgbr(lapack.GeneratePT, n, n, m, a, n, nil, work, -1) + lwork_dorgbr_p = int(work[0]) + maxwrk = max(maxwrk, 3*m+lwork_dorgbr_p) + } + if !wantun { + maxwrk = max(maxwrk, 3*m+lwork_dorgbr_q) + } + maxwrk = max(maxwrk, bdspac) + } + } + + maxwrk = max(maxwrk, minwork) + if lwork == -1 { + work[0] = float64(maxwrk) + return true + } + + if len(a) < (m-1)*lda+n { + panic(shortA) + } + if len(s) < minmn { + panic(shortS) + } + if (len(u) < (m-1)*ldu+m && wantua) || (len(u) < (m-1)*ldu+minmn && wantus) { + panic(shortU) + } + if (len(vt) < (n-1)*ldvt+n && wantva) || (len(vt) < (minmn-1)*ldvt+n && wantvs) { + panic(shortVT) + } + + // Perform decomposition. + eps := dlamchE + smlnum := math.Sqrt(dlamchS) / eps + bignum := 1 / smlnum + + // Scale A if max element outside range [smlnum, bignum]. + anrm := impl.Dlange(lapack.MaxAbs, m, n, a, lda, nil) + var iscl bool + if anrm > 0 && anrm < smlnum { + iscl = true + impl.Dlascl(lapack.General, 0, 0, anrm, smlnum, m, n, a, lda) + } else if anrm > bignum { + iscl = true + impl.Dlascl(lapack.General, 0, 0, anrm, bignum, m, n, a, lda) + } + + bi := blas64.Implementation() + var ie int + if m >= n { + // If A has sufficiently more rows than columns, use the QR decomposition. + if m >= mnthr { + // m >> n + if wantun { + // Path 1. + itau := 0 + iwork := itau + n + + // Compute A = Q * R. + impl.Dgeqrf(m, n, a, lda, work[itau:], work[iwork:], lwork-iwork) + + // Zero out below R. + impl.Dlaset(blas.Lower, n-1, n-1, 0, 0, a[lda:], lda) + ie = 0 + itauq := ie + n + itaup := itauq + n + iwork = itaup + n + // Bidiagonalize R in A. + impl.Dgebrd(n, n, a, lda, s, work[ie:], work[itauq:], + work[itaup:], work[iwork:], lwork-iwork) + ncvt := 0 + if wantvo || wantvas { + impl.Dorgbr(lapack.GeneratePT, n, n, n, a, lda, work[itaup:], + work[iwork:], lwork-iwork) + ncvt = n + } + iwork = ie + n + + // Perform bidiagonal QR iteration computing right singular vectors + // of A in A if desired. + ok = impl.Dbdsqr(blas.Upper, n, ncvt, 0, 0, s, work[ie:], + a, lda, work, 1, work, 1, work[iwork:]) + + // If right singular vectors desired in VT, copy them there. + if wantvas { + impl.Dlacpy(blas.All, n, n, a, lda, vt, ldvt) + } + } else if wantuo && wantvn { + // Path 2 + panic(noSVDO) + } else if wantuo && wantvas { + // Path 3 + panic(noSVDO) + } else if wantus { + if wantvn { + // Path 4 + if lwork >= n*n+max(4*n, bdspac) { + // Sufficient workspace for a fast algorithm. + ir := 0 + var ldworkr int + if lwork >= wrkbl+lda*n { + ldworkr = lda + } else { + ldworkr = n + } + itau := ir + ldworkr*n + iwork := itau + n + // Compute A = Q * R. + impl.Dgeqrf(m, n, a, lda, work[itau:], work[iwork:], lwork-iwork) + + // Copy R to work[ir:], zeroing out below it. + impl.Dlacpy(blas.Upper, n, n, a, lda, work[ir:], ldworkr) + impl.Dlaset(blas.Lower, n-1, n-1, 0, 0, work[ir+ldworkr:], ldworkr) + + // Generate Q in A. + impl.Dorgqr(m, n, n, a, lda, work[itau:], work[iwork:], lwork-iwork) + ie := itau + itauq := ie + n + itaup := itauq + n + iwork = itaup + n + + // Bidiagonalize R in work[ir:]. + impl.Dgebrd(n, n, work[ir:], ldworkr, s, work[ie:], + work[itauq:], work[itaup:], work[iwork:], lwork-iwork) + + // Generate left vectors bidiagonalizing R in work[ir:]. + impl.Dorgbr(lapack.GenerateQ, n, n, n, work[ir:], ldworkr, + work[itauq:], work[iwork:], lwork-iwork) + iwork = ie + n + + // Perform bidiagonal QR iteration, compuing left singular + // vectors of R in work[ir:]. + ok = impl.Dbdsqr(blas.Upper, n, 0, n, 0, s, work[ie:], work, 1, + work[ir:], ldworkr, work, 1, work[iwork:]) + + // Multiply Q in A by left singular vectors of R in + // work[ir:], storing result in U. + bi.Dgemm(blas.NoTrans, blas.NoTrans, m, n, n, 1, a, lda, + work[ir:], ldworkr, 0, u, ldu) + } else { + // Insufficient workspace for a fast algorithm. + itau := 0 + iwork := itau + n + + // Compute A = Q*R, copying result to U. + impl.Dgeqrf(m, n, a, lda, work[itau:], work[iwork:], lwork-iwork) + impl.Dlacpy(blas.Lower, m, n, a, lda, u, ldu) + + // Generate Q in U. + impl.Dorgqr(m, n, n, u, ldu, work[itau:], work[iwork:], lwork-iwork) + ie := itau + itauq := ie + n + itaup := itauq + n + iwork = itaup + n + + // Zero out below R in A. + impl.Dlaset(blas.Lower, n-1, n-1, 0, 0, a[lda:], lda) + + // Bidiagonalize R in A. + impl.Dgebrd(n, n, a, lda, s, work[ie:], + work[itauq:], work[itaup:], work[iwork:], lwork-iwork) + + // Multiply Q in U by left vectors bidiagonalizing R. + impl.Dormbr(lapack.ApplyQ, blas.Right, blas.NoTrans, m, n, n, + a, lda, work[itauq:], u, ldu, work[iwork:], lwork-iwork) + iwork = ie + n + + // Perform bidiagonal QR iteration, computing left + // singular vectors of A in U. + ok = impl.Dbdsqr(blas.Upper, n, 0, m, 0, s, work[ie:], work, 1, + u, ldu, work, 1, work[iwork:]) + } + } else if wantvo { + // Path 5 + panic(noSVDO) + } else if wantvas { + // Path 6 + if lwork >= n*n+max(4*n, bdspac) { + // Sufficient workspace for a fast algorithm. + iu := 0 + var ldworku int + if lwork >= wrkbl+lda*n { + ldworku = lda + } else { + ldworku = n + } + itau := iu + ldworku*n + iwork := itau + n + + // Compute A = Q * R. + impl.Dgeqrf(m, n, a, lda, work[itau:], work[iwork:], lwork-iwork) + // Copy R to work[iu:], zeroing out below it. + impl.Dlacpy(blas.Upper, n, n, a, lda, work[iu:], ldworku) + impl.Dlaset(blas.Lower, n-1, n-1, 0, 0, work[iu+ldworku:], ldworku) + + // Generate Q in A. + impl.Dorgqr(m, n, n, a, lda, work[itau:], work[iwork:], lwork-iwork) + + ie := itau + itauq := ie + n + itaup := itauq + n + iwork = itaup + n + + // Bidiagonalize R in work[iu:], copying result to VT. + impl.Dgebrd(n, n, work[iu:], ldworku, s, work[ie:], + work[itauq:], work[itaup:], work[iwork:], lwork-iwork) + impl.Dlacpy(blas.Upper, n, n, work[iu:], ldworku, vt, ldvt) + + // Generate left bidiagonalizing vectors in work[iu:]. + impl.Dorgbr(lapack.GenerateQ, n, n, n, work[iu:], ldworku, + work[itauq:], work[iwork:], lwork-iwork) + + // Generate right bidiagonalizing vectors in VT. + impl.Dorgbr(lapack.GeneratePT, n, n, n, vt, ldvt, + work[itaup:], work[iwork:], lwork-iwork) + iwork = ie + n + + // Perform bidiagonal QR iteration, computing left singular + // vectors of R in work[iu:], and computing right singular + // vectors of R in VT. + ok = impl.Dbdsqr(blas.Upper, n, n, n, 0, s, work[ie:], + vt, ldvt, work[iu:], ldworku, work, 1, work[iwork:]) + + // Multiply Q in A by left singular vectors of R in + // work[iu:], storing result in U. + bi.Dgemm(blas.NoTrans, blas.NoTrans, m, n, n, 1, a, lda, + work[iu:], ldworku, 0, u, ldu) + } else { + // Insufficient workspace for a fast algorithm. + itau := 0 + iwork := itau + n + + // Compute A = Q * R, copying result to U. + impl.Dgeqrf(m, n, a, lda, work[itau:], work[iwork:], lwork-iwork) + impl.Dlacpy(blas.Lower, m, n, a, lda, u, ldu) + + // Generate Q in U. + impl.Dorgqr(m, n, n, u, ldu, work[itau:], work[iwork:], lwork-iwork) + + // Copy R to VT, zeroing out below it. + impl.Dlacpy(blas.Upper, n, n, a, lda, vt, ldvt) + impl.Dlaset(blas.Lower, n-1, n-1, 0, 0, vt[ldvt:], ldvt) + + ie := itau + itauq := ie + n + itaup := itauq + n + iwork = itaup + n + + // Bidiagonalize R in VT. + impl.Dgebrd(n, n, vt, ldvt, s, work[ie:], + work[itauq:], work[itaup:], work[iwork:], lwork-iwork) + + // Multiply Q in U by left bidiagonalizing vectors in VT. + impl.Dormbr(lapack.ApplyQ, blas.Right, blas.NoTrans, m, n, n, + vt, ldvt, work[itauq:], u, ldu, work[iwork:], lwork-iwork) + + // Generate right bidiagonalizing vectors in VT. + impl.Dorgbr(lapack.GeneratePT, n, n, n, vt, ldvt, + work[itaup:], work[iwork:], lwork-iwork) + iwork = ie + n + + // Perform bidiagonal QR iteration, computing left singular + // vectors of A in U and computing right singular vectors + // of A in VT. + ok = impl.Dbdsqr(blas.Upper, n, n, m, 0, s, work[ie:], + vt, ldvt, u, ldu, work, 1, work[iwork:]) + } + } + } else if wantua { + if wantvn { + // Path 7 + if lwork >= n*n+max(max(n+m, 4*n), bdspac) { + // Sufficient workspace for a fast algorithm. + ir := 0 + var ldworkr int + if lwork >= wrkbl+lda*n { + ldworkr = lda + } else { + ldworkr = n + } + itau := ir + ldworkr*n + iwork := itau + n + + // Compute A = Q*R, copying result to U. + impl.Dgeqrf(m, n, a, lda, work[itau:], work[iwork:], lwork-iwork) + impl.Dlacpy(blas.Lower, m, n, a, lda, u, ldu) + + // Copy R to work[ir:], zeroing out below it. + impl.Dlacpy(blas.Upper, n, n, a, lda, work[ir:], ldworkr) + impl.Dlaset(blas.Lower, n-1, n-1, 0, 0, work[ir+ldworkr:], ldworkr) + + // Generate Q in U. + impl.Dorgqr(m, m, n, u, ldu, work[itau:], work[iwork:], lwork-iwork) + ie := itau + itauq := ie + n + itaup := itauq + n + iwork = itaup + n + + // Bidiagonalize R in work[ir:]. + impl.Dgebrd(n, n, work[ir:], ldworkr, s, work[ie:], + work[itauq:], work[itaup:], work[iwork:], lwork-iwork) + + // Generate left bidiagonalizing vectors in work[ir:]. + impl.Dorgbr(lapack.GenerateQ, n, n, n, work[ir:], ldworkr, + work[itauq:], work[iwork:], lwork-iwork) + iwork = ie + n + + // Perform bidiagonal QR iteration, computing left singular + // vectors of R in work[ir:]. + ok = impl.Dbdsqr(blas.Upper, n, 0, n, 0, s, work[ie:], work, 1, + work[ir:], ldworkr, work, 1, work[iwork:]) + + // Multiply Q in U by left singular vectors of R in + // work[ir:], storing result in A. + bi.Dgemm(blas.NoTrans, blas.NoTrans, m, n, n, 1, u, ldu, + work[ir:], ldworkr, 0, a, lda) + + // Copy left singular vectors of A from A to U. + impl.Dlacpy(blas.All, m, n, a, lda, u, ldu) + } else { + // Insufficient workspace for a fast algorithm. + itau := 0 + iwork := itau + n + + // Compute A = Q*R, copying result to U. + impl.Dgeqrf(m, n, a, lda, work[itau:], work[iwork:], lwork-iwork) + impl.Dlacpy(blas.Lower, m, n, a, lda, u, ldu) + + // Generate Q in U. + impl.Dorgqr(m, m, n, u, ldu, work[itau:], work[iwork:], lwork-iwork) + ie := itau + itauq := ie + n + itaup := itauq + n + iwork = itaup + n + + // Zero out below R in A. + impl.Dlaset(blas.Lower, n-1, n-1, 0, 0, a[lda:], lda) + + // Bidiagonalize R in A. + impl.Dgebrd(n, n, a, lda, s, work[ie:], + work[itauq:], work[itaup:], work[iwork:], lwork-iwork) + + // Multiply Q in U by left bidiagonalizing vectors in A. + impl.Dormbr(lapack.ApplyQ, blas.Right, blas.NoTrans, m, n, n, + a, lda, work[itauq:], u, ldu, work[iwork:], lwork-iwork) + iwork = ie + n + + // Perform bidiagonal QR iteration, computing left + // singular vectors of A in U. + ok = impl.Dbdsqr(blas.Upper, n, 0, m, 0, s, work[ie:], + work, 1, u, ldu, work, 1, work[iwork:]) + } + } else if wantvo { + // Path 8. + panic(noSVDO) + } else if wantvas { + // Path 9. + if lwork >= n*n+max(max(n+m, 4*n), bdspac) { + // Sufficient workspace for a fast algorithm. + iu := 0 + var ldworku int + if lwork >= wrkbl+lda*n { + ldworku = lda + } else { + ldworku = n + } + itau := iu + ldworku*n + iwork := itau + n + + // Compute A = Q * R, copying result to U. + impl.Dgeqrf(m, n, a, lda, work[itau:], work[iwork:], lwork-iwork) + impl.Dlacpy(blas.Lower, m, n, a, lda, u, ldu) + + // Generate Q in U. + impl.Dorgqr(m, m, n, u, ldu, work[itau:], work[iwork:], lwork-iwork) + + // Copy R to work[iu:], zeroing out below it. + impl.Dlacpy(blas.Upper, n, n, a, lda, work[iu:], ldworku) + impl.Dlaset(blas.Lower, n-1, n-1, 0, 0, work[iu+ldworku:], ldworku) + + ie = itau + itauq := ie + n + itaup := itauq + n + iwork = itaup + n + + // Bidiagonalize R in work[iu:], copying result to VT. + impl.Dgebrd(n, n, work[iu:], ldworku, s, work[ie:], + work[itauq:], work[itaup:], work[iwork:], lwork-iwork) + impl.Dlacpy(blas.Upper, n, n, work[iu:], ldworku, vt, ldvt) + + // Generate left bidiagonalizing vectors in work[iu:]. + impl.Dorgbr(lapack.GenerateQ, n, n, n, work[iu:], ldworku, + work[itauq:], work[iwork:], lwork-iwork) + + // Generate right bidiagonalizing vectors in VT. + impl.Dorgbr(lapack.GeneratePT, n, n, n, vt, ldvt, + work[itaup:], work[iwork:], lwork-iwork) + iwork = ie + n + + // Perform bidiagonal QR iteration, computing left singular + // vectors of R in work[iu:] and computing right + // singular vectors of R in VT. + ok = impl.Dbdsqr(blas.Upper, n, n, n, 0, s, work[ie:], + vt, ldvt, work[iu:], ldworku, work, 1, work[iwork:]) + + // Multiply Q in U by left singular vectors of R in + // work[iu:], storing result in A. + bi.Dgemm(blas.NoTrans, blas.NoTrans, m, n, n, 1, + u, ldu, work[iu:], ldworku, 0, a, lda) + + // Copy left singular vectors of A from A to U. + impl.Dlacpy(blas.All, m, n, a, lda, u, ldu) + + /* + // Bidiagonalize R in VT. + impl.Dgebrd(n, n, vt, ldvt, s, work[ie:], + work[itauq:], work[itaup:], work[iwork:], lwork-iwork) + + // Multiply Q in U by left bidiagonalizing vectors in VT. + impl.Dormbr(lapack.ApplyQ, blas.Right, blas.NoTrans, + m, n, n, vt, ldvt, work[itauq:], u, ldu, work[iwork:], lwork-iwork) + + // Generate right bidiagonalizing vectors in VT. + impl.Dorgbr(lapack.GeneratePT, n, n, n, vt, ldvt, + work[itaup:], work[iwork:], lwork-iwork) + iwork = ie + n + + // Perform bidiagonal QR iteration, computing left singular + // vectors of A in U and computing right singular vectors + // of A in VT. + ok = impl.Dbdsqr(blas.Upper, n, n, m, 0, s, work[ie:], + vt, ldvt, u, ldu, work, 1, work[iwork:]) + */ + } else { + // Insufficient workspace for a fast algorithm. + itau := 0 + iwork := itau + n + + // Compute A = Q*R, copying result to U. + impl.Dgeqrf(m, n, a, lda, work[itau:], work[iwork:], lwork-iwork) + impl.Dlacpy(blas.Lower, m, n, a, lda, u, ldu) + + // Generate Q in U. + impl.Dorgqr(m, m, n, u, ldu, work[itau:], work[iwork:], lwork-iwork) + + // Copy R from A to VT, zeroing out below it. + impl.Dlacpy(blas.Upper, n, n, a, lda, vt, ldvt) + if n > 1 { + impl.Dlaset(blas.Lower, n-1, n-1, 0, 0, vt[ldvt:], ldvt) + } + + ie := itau + itauq := ie + n + itaup := itauq + n + iwork = itaup + n + + // Bidiagonalize R in VT. + impl.Dgebrd(n, n, vt, ldvt, s, work[ie:], + work[itauq:], work[itaup:], work[iwork:], lwork-iwork) + + // Multiply Q in U by left bidiagonalizing vectors in VT. + impl.Dormbr(lapack.ApplyQ, blas.Right, blas.NoTrans, + m, n, n, vt, ldvt, work[itauq:], u, ldu, work[iwork:], lwork-iwork) + + // Generate right bidiagonizing vectors in VT. + impl.Dorgbr(lapack.GeneratePT, n, n, n, vt, ldvt, + work[itaup:], work[iwork:], lwork-iwork) + iwork = ie + n + + // Perform bidiagonal QR iteration, computing left singular + // vectors of A in U and computing right singular vectors + // of A in VT. + ok = impl.Dbdsqr(blas.Upper, n, n, m, 0, s, work[ie:], + vt, ldvt, u, ldu, work, 1, work[iwork:]) + } + } + } + } else { + // Path 10. + // M at least N, but not much larger. + ie = 0 + itauq := ie + n + itaup := itauq + n + iwork := itaup + n + + // Bidiagonalize A. + impl.Dgebrd(m, n, a, lda, s, work[ie:], work[itauq:], + work[itaup:], work[iwork:], lwork-iwork) + if wantuas { + // Left singular vectors are desired in U. Copy result to U and + // generate left biadiagonalizing vectors in U. + impl.Dlacpy(blas.Lower, m, n, a, lda, u, ldu) + var ncu int + if wantus { + ncu = n + } + if wantua { + ncu = m + } + impl.Dorgbr(lapack.GenerateQ, m, ncu, n, u, ldu, work[itauq:], work[iwork:], lwork-iwork) + } + if wantvas { + // Right singular vectors are desired in VT. Copy result to VT and + // generate left biadiagonalizing vectors in VT. + impl.Dlacpy(blas.Upper, n, n, a, lda, vt, ldvt) + impl.Dorgbr(lapack.GeneratePT, n, n, n, vt, ldvt, work[itaup:], work[iwork:], lwork-iwork) + } + if wantuo { + panic(noSVDO) + } + if wantvo { + panic(noSVDO) + } + iwork = ie + n + var nru, ncvt int + if wantuas || wantuo { + nru = m + } + if wantun { + nru = 0 + } + if wantvas || wantvo { + ncvt = n + } + if wantvn { + ncvt = 0 + } + if !wantuo && !wantvo { + // Perform bidiagonal QR iteration, if desired, computing left + // singular vectors in U and right singular vectors in VT. + ok = impl.Dbdsqr(blas.Upper, n, ncvt, nru, 0, s, work[ie:], + vt, ldvt, u, ldu, work, 1, work[iwork:]) + } else { + // There will be two branches when the implementation is complete. + panic(noSVDO) + } + } + } else { + // A has more columns than rows. If A has sufficiently more columns than + // rows, first reduce using the LQ decomposition. + if n >= mnthr { + // n >> m. + if wantvn { + // Path 1t. + itau := 0 + iwork := itau + m + + // Compute A = L*Q. + impl.Dgelqf(m, n, a, lda, work[itau:], work[iwork:], lwork-iwork) + + // Zero out above L. + impl.Dlaset(blas.Upper, m-1, m-1, 0, 0, a[1:], lda) + ie := 0 + itauq := ie + m + itaup := itauq + m + iwork = itaup + m + + // Bidiagonalize L in A. + impl.Dgebrd(m, m, a, lda, s, work[ie:itauq], + work[itauq:itaup], work[itaup:iwork], work[iwork:], lwork-iwork) + if wantuo || wantuas { + impl.Dorgbr(lapack.GenerateQ, m, m, m, a, lda, + work[itauq:], work[iwork:], lwork-iwork) + } + iwork = ie + m + nru := 0 + if wantuo || wantuas { + nru = m + } + + // Perform bidiagonal QR iteration, computing left singular vectors + // of A in A if desired. + ok = impl.Dbdsqr(blas.Upper, m, 0, nru, 0, s, work[ie:], + work, 1, a, lda, work, 1, work[iwork:]) + + // If left singular vectors desired in U, copy them there. + if wantuas { + impl.Dlacpy(blas.All, m, m, a, lda, u, ldu) + } + } else if wantvo && wantun { + // Path 2t. + panic(noSVDO) + } else if wantvo && wantuas { + // Path 3t. + panic(noSVDO) + } else if wantvs { + if wantun { + // Path 4t. + if lwork >= m*m+max(4*m, bdspac) { + // Sufficient workspace for a fast algorithm. + ir := 0 + var ldworkr int + if lwork >= wrkbl+lda*m { + ldworkr = lda + } else { + ldworkr = m + } + itau := ir + ldworkr*m + iwork := itau + m + + // Compute A = L*Q. + impl.Dgelqf(m, n, a, lda, work[itau:], work[iwork:], lwork-iwork) + + // Copy L to work[ir:], zeroing out above it. + impl.Dlacpy(blas.Lower, m, m, a, lda, work[ir:], ldworkr) + impl.Dlaset(blas.Upper, m-1, m-1, 0, 0, work[ir+1:], ldworkr) + + // Generate Q in A. + impl.Dorglq(m, n, m, a, lda, work[itau:], work[iwork:], lwork-iwork) + ie := itau + itauq := ie + m + itaup := itauq + m + iwork = itaup + m + + // Bidiagonalize L in work[ir:]. + impl.Dgebrd(m, m, work[ir:], ldworkr, s, work[ie:], + work[itauq:], work[itaup:], work[iwork:], lwork-iwork) + + // Generate right vectors bidiagonalizing L in work[ir:]. + impl.Dorgbr(lapack.GeneratePT, m, m, m, work[ir:], ldworkr, + work[itaup:], work[iwork:], lwork-iwork) + iwork = ie + m + + // Perform bidiagonal QR iteration, computing right singular + // vectors of L in work[ir:]. + ok = impl.Dbdsqr(blas.Upper, m, m, 0, 0, s, work[ie:], + work[ir:], ldworkr, work, 1, work, 1, work[iwork:]) + + // Multiply right singular vectors of L in work[ir:] by + // Q in A, storing result in VT. + bi.Dgemm(blas.NoTrans, blas.NoTrans, m, n, m, 1, + work[ir:], ldworkr, a, lda, 0, vt, ldvt) + } else { + // Insufficient workspace for a fast algorithm. + itau := 0 + iwork := itau + m + + // Compute A = L*Q. + impl.Dgelqf(m, n, a, lda, work[itau:], work[iwork:], lwork-iwork) + + // Copy result to VT. + impl.Dlacpy(blas.Upper, m, n, a, lda, vt, ldvt) + + // Generate Q in VT. + impl.Dorglq(m, n, m, vt, ldvt, work[itau:], work[iwork:], lwork-iwork) + ie := itau + itauq := ie + m + itaup := itauq + m + iwork = itaup + m + + // Zero out above L in A. + impl.Dlaset(blas.Upper, m-1, m-1, 0, 0, a[1:], lda) + + // Bidiagonalize L in A. + impl.Dgebrd(m, m, a, lda, s, work[ie:], + work[itauq:], work[itaup:], work[iwork:], lwork-iwork) + + // Multiply right vectors bidiagonalizing L by Q in VT. + impl.Dormbr(lapack.ApplyP, blas.Left, blas.Trans, m, n, m, + a, lda, work[itaup:], vt, ldvt, work[iwork:], lwork-iwork) + iwork = ie + m + + // Perform bidiagonal QR iteration, computing right + // singular vectors of A in VT. + ok = impl.Dbdsqr(blas.Upper, m, n, 0, 0, s, work[ie:], + vt, ldvt, work, 1, work, 1, work[iwork:]) + } + } else if wantuo { + // Path 5t. + panic(noSVDO) + } else if wantuas { + // Path 6t. + if lwork >= m*m+max(4*m, bdspac) { + // Sufficient workspace for a fast algorithm. + iu := 0 + var ldworku int + if lwork >= wrkbl+lda*m { + ldworku = lda + } else { + ldworku = m + } + itau := iu + ldworku*m + iwork := itau + m + + // Compute A = L*Q. + impl.Dgelqf(m, n, a, lda, work[itau:], work[iwork:], lwork-iwork) + + // Copy L to work[iu:], zeroing out above it. + impl.Dlacpy(blas.Lower, m, m, a, lda, work[iu:], ldworku) + impl.Dlaset(blas.Upper, m-1, m-1, 0, 0, work[iu+1:], ldworku) + + // Generate Q in A. + impl.Dorglq(m, n, m, a, lda, work[itau:], work[iwork:], lwork-iwork) + ie := itau + itauq := ie + m + itaup := itauq + m + iwork = itaup + m + + // Bidiagonalize L in work[iu:], copying result to U. + impl.Dgebrd(m, m, work[iu:], ldworku, s, work[ie:], + work[itauq:], work[itaup:], work[iwork:], lwork-iwork) + impl.Dlacpy(blas.Lower, m, m, work[iu:], ldworku, u, ldu) + + // Generate right bidiagionalizing vectors in work[iu:]. + impl.Dorgbr(lapack.GeneratePT, m, m, m, work[iu:], ldworku, + work[itaup:], work[iwork:], lwork-iwork) + + // Generate left bidiagonalizing vectors in U. + impl.Dorgbr(lapack.GenerateQ, m, m, m, u, ldu, work[itauq:], work[iwork:], lwork-iwork) + iwork = ie + m + + // Perform bidiagonal QR iteration, computing left singular + // vectors of L in U and computing right singular vectors of + // L in work[iu:]. + ok = impl.Dbdsqr(blas.Upper, m, m, m, 0, s, work[ie:], + work[iu:], ldworku, u, ldu, work, 1, work[iwork:]) + + // Multiply right singular vectors of L in work[iu:] by + // Q in A, storing result in VT. + bi.Dgemm(blas.NoTrans, blas.NoTrans, m, n, m, 1, + work[iu:], ldworku, a, lda, 0, vt, ldvt) + } else { + // Insufficient workspace for a fast algorithm. + itau := 0 + iwork := itau + m + + // Compute A = L*Q, copying result to VT. + impl.Dgelqf(m, n, a, lda, work[itau:], work[iwork:], lwork-iwork) + impl.Dlacpy(blas.Upper, m, n, a, lda, vt, ldvt) + + // Generate Q in VT. + impl.Dorglq(m, n, m, vt, ldvt, work[itau:], work[iwork:], lwork-iwork) + + // Copy L to U, zeroing out above it. + impl.Dlacpy(blas.Lower, m, m, a, lda, u, ldu) + impl.Dlaset(blas.Upper, m-1, m-1, 0, 0, u[1:], ldu) + + ie := itau + itauq := ie + m + itaup := itauq + m + iwork = itaup + m + + // Bidiagonalize L in U. + impl.Dgebrd(m, m, u, ldu, s, work[ie:], + work[itauq:], work[itaup:], work[iwork:], lwork-iwork) + + // Multiply right bidiagonalizing vectors in U by Q in VT. + impl.Dormbr(lapack.ApplyP, blas.Left, blas.Trans, m, n, m, + u, ldu, work[itaup:], vt, ldvt, work[iwork:], lwork-iwork) + + // Generate left bidiagonalizing vectors in U. + impl.Dorgbr(lapack.GenerateQ, m, m, m, u, ldu, work[itauq:], work[iwork:], lwork-iwork) + iwork = ie + m + + // Perform bidiagonal QR iteration, computing left singular + // vectors of A in U and computing right singular vectors + // of A in VT. + ok = impl.Dbdsqr(blas.Upper, m, n, m, 0, s, work[ie:], vt, ldvt, + u, ldu, work, 1, work[iwork:]) + } + } + } else if wantva { + if wantun { + // Path 7t. + if lwork >= m*m+max(max(n+m, 4*m), bdspac) { + // Sufficient workspace for a fast algorithm. + ir := 0 + var ldworkr int + if lwork >= wrkbl+lda*m { + ldworkr = lda + } else { + ldworkr = m + } + itau := ir + ldworkr*m + iwork := itau + m + + // Compute A = L*Q, copying result to VT. + impl.Dgelqf(m, n, a, lda, work[itau:], work[iwork:], lwork-iwork) + impl.Dlacpy(blas.Upper, m, n, a, lda, vt, ldvt) + + // Copy L to work[ir:], zeroing out above it. + impl.Dlacpy(blas.Lower, m, m, a, lda, work[ir:], ldworkr) + impl.Dlaset(blas.Upper, m-1, m-1, 0, 0, work[ir+1:], ldworkr) + + // Generate Q in VT. + impl.Dorglq(n, n, m, vt, ldvt, work[itau:], work[iwork:], lwork-iwork) + + ie := itau + itauq := ie + m + itaup := itauq + m + iwork = itaup + m + + // Bidiagonalize L in work[ir:]. + impl.Dgebrd(m, m, work[ir:], ldworkr, s, work[ie:], + work[itauq:], work[itaup:], work[iwork:], lwork-iwork) + + // Generate right bidiagonalizing vectors in work[ir:]. + impl.Dorgbr(lapack.GeneratePT, m, m, m, work[ir:], ldworkr, + work[itaup:], work[iwork:], lwork-iwork) + iwork = ie + m + + // Perform bidiagonal QR iteration, computing right + // singular vectors of L in work[ir:]. + ok = impl.Dbdsqr(blas.Upper, m, m, 0, 0, s, work[ie:], + work[ir:], ldworkr, work, 1, work, 1, work[iwork:]) + + // Multiply right singular vectors of L in work[ir:] by + // Q in VT, storing result in A. + bi.Dgemm(blas.NoTrans, blas.NoTrans, m, n, m, 1, + work[ir:], ldworkr, vt, ldvt, 0, a, lda) + + // Copy right singular vectors of A from A to VT. + impl.Dlacpy(blas.All, m, n, a, lda, vt, ldvt) + } else { + // Insufficient workspace for a fast algorithm. + itau := 0 + iwork := itau + m + // Compute A = L * Q, copying result to VT. + impl.Dgelqf(m, n, a, lda, work[itau:], work[iwork:], lwork-iwork) + impl.Dlacpy(blas.Upper, m, n, a, lda, vt, ldvt) + + // Generate Q in VT. + impl.Dorglq(n, n, m, vt, ldvt, work[itau:], work[iwork:], lwork-iwork) + + ie := itau + itauq := ie + m + itaup := itauq + m + iwork = itaup + m + + // Zero out above L in A. + impl.Dlaset(blas.Upper, m-1, m-1, 0, 0, a[1:], lda) + + // Bidiagonalize L in A. + impl.Dgebrd(m, m, a, lda, s, work[ie:], work[itauq:], + work[itaup:], work[iwork:], lwork-iwork) + + // Multiply right bidiagonalizing vectors in A by Q in VT. + impl.Dormbr(lapack.ApplyP, blas.Left, blas.Trans, m, n, m, + a, lda, work[itaup:], vt, ldvt, work[iwork:], lwork-iwork) + iwork = ie + m + + // Perform bidiagonal QR iteration, computing right singular + // vectors of A in VT. + ok = impl.Dbdsqr(blas.Upper, m, n, 0, 0, s, work[ie:], + vt, ldvt, work, 1, work, 1, work[iwork:]) + } + } else if wantuo { + panic(noSVDO) + } else if wantuas { + // Path 9t. + if lwork >= m*m+max(max(m+n, 4*m), bdspac) { + // Sufficient workspace for a fast algorithm. + iu := 0 + + var ldworku int + if lwork >= wrkbl+lda*m { + ldworku = lda + } else { + ldworku = m + } + itau := iu + ldworku*m + iwork := itau + m + + // Generate A = L * Q copying result to VT. + impl.Dgelqf(m, n, a, lda, work[itau:], work[iwork:], lwork-iwork) + impl.Dlacpy(blas.Upper, m, n, a, lda, vt, ldvt) + + // Generate Q in VT. + impl.Dorglq(n, n, m, vt, ldvt, work[itau:], work[iwork:], lwork-iwork) + + // Copy L to work[iu:], zeroing out above it. + impl.Dlacpy(blas.Lower, m, m, a, lda, work[iu:], ldworku) + impl.Dlaset(blas.Upper, m-1, m-1, 0, 0, work[iu+1:], ldworku) + ie = itau + itauq := ie + m + itaup := itauq + m + iwork = itaup + m + + // Bidiagonalize L in work[iu:], copying result to U. + impl.Dgebrd(m, m, work[iu:], ldworku, s, work[ie:], + work[itauq:], work[itaup:], work[iwork:], lwork-iwork) + impl.Dlacpy(blas.Lower, m, m, work[iu:], ldworku, u, ldu) + + // Generate right bidiagonalizing vectors in work[iu:]. + impl.Dorgbr(lapack.GeneratePT, m, m, m, work[iu:], ldworku, + work[itaup:], work[iwork:], lwork-iwork) + + // Generate left bidiagonalizing vectors in U. + impl.Dorgbr(lapack.GenerateQ, m, m, m, u, ldu, work[itauq:], work[iwork:], lwork-iwork) + iwork = ie + m + + // Perform bidiagonal QR iteration, computing left singular + // vectors of L in U and computing right singular vectors + // of L in work[iu:]. + ok = impl.Dbdsqr(blas.Upper, m, m, m, 0, s, work[ie:], + work[iu:], ldworku, u, ldu, work, 1, work[iwork:]) + + // Multiply right singular vectors of L in work[iu:] + // Q in VT, storing result in A. + bi.Dgemm(blas.NoTrans, blas.NoTrans, m, n, m, 1, + work[iu:], ldworku, vt, ldvt, 0, a, lda) + + // Copy right singular vectors of A from A to VT. + impl.Dlacpy(blas.All, m, n, a, lda, vt, ldvt) + } else { + // Insufficient workspace for a fast algorithm. + itau := 0 + iwork := itau + m + + // Compute A = L * Q, copying result to VT. + impl.Dgelqf(m, n, a, lda, work[itau:], work[iwork:], lwork-iwork) + impl.Dlacpy(blas.Upper, m, n, a, lda, vt, ldvt) + + // Generate Q in VT. + impl.Dorglq(n, n, m, vt, ldvt, work[itau:], work[iwork:], lwork-iwork) + + // Copy L to U, zeroing out above it. + impl.Dlacpy(blas.Lower, m, m, a, lda, u, ldu) + impl.Dlaset(blas.Upper, m-1, m-1, 0, 0, u[1:], ldu) + + ie = itau + itauq := ie + m + itaup := itauq + m + iwork = itaup + m + + // Bidiagonalize L in U. + impl.Dgebrd(m, m, u, ldu, s, work[ie:], work[itauq:], + work[itaup:], work[iwork:], lwork-iwork) + + // Multiply right bidiagonalizing vectors in U by Q in VT. + impl.Dormbr(lapack.ApplyP, blas.Left, blas.Trans, m, n, m, + u, ldu, work[itaup:], vt, ldvt, work[iwork:], lwork-iwork) + + // Generate left bidiagonalizing vectors in U. + impl.Dorgbr(lapack.GenerateQ, m, m, m, u, ldu, work[itauq:], work[iwork:], lwork-iwork) + iwork = ie + m + + // Perform bidiagonal QR iteration, computing left singular + // vectors of A in U and computing right singular vectors + // of A in VT. + ok = impl.Dbdsqr(blas.Upper, m, n, m, 0, s, work[ie:], + vt, ldvt, u, ldu, work, 1, work[iwork:]) + } + } + } + } else { + // Path 10t. + // N at least M, but not much larger. + ie = 0 + itauq := ie + m + itaup := itauq + m + iwork := itaup + m + + // Bidiagonalize A. + impl.Dgebrd(m, n, a, lda, s, work[ie:], work[itauq:], work[itaup:], work[iwork:], lwork-iwork) + if wantuas { + // If left singular vectors desired in U, copy result to U and + // generate left bidiagonalizing vectors in U. + impl.Dlacpy(blas.Lower, m, m, a, lda, u, ldu) + impl.Dorgbr(lapack.GenerateQ, m, m, n, u, ldu, work[itauq:], work[iwork:], lwork-iwork) + } + if wantvas { + // If right singular vectors desired in VT, copy result to VT + // and generate right bidiagonalizing vectors in VT. + impl.Dlacpy(blas.Upper, m, n, a, lda, vt, ldvt) + var nrvt int + if wantva { + nrvt = n + } else { + nrvt = m + } + impl.Dorgbr(lapack.GeneratePT, nrvt, n, m, vt, ldvt, work[itaup:], work[iwork:], lwork-iwork) + } + if wantuo { + panic(noSVDO) + } + if wantvo { + panic(noSVDO) + } + iwork = ie + m + var nru, ncvt int + if wantuas || wantuo { + nru = m + } + if wantvas || wantvo { + ncvt = n + } + if !wantuo && !wantvo { + // Perform bidiagonal QR iteration, if desired, computing left + // singular vectors in U and computing right singular vectors in + // VT. + ok = impl.Dbdsqr(blas.Lower, m, ncvt, nru, 0, s, work[ie:], + vt, ldvt, u, ldu, work, 1, work[iwork:]) + } else { + // There will be two branches when the implementation is complete. + panic(noSVDO) + } + } + } + if !ok { + if ie > 1 { + for i := 0; i < minmn-1; i++ { + work[i+1] = work[i+ie] + } + } + if ie < 1 { + for i := minmn - 2; i >= 0; i-- { + work[i+1] = work[i+ie] + } + } + } + // Undo scaling if necessary. + if iscl { + if anrm > bignum { + impl.Dlascl(lapack.General, 0, 0, bignum, anrm, 1, minmn, s, minmn) + } + if !ok && anrm > bignum { + impl.Dlascl(lapack.General, 0, 0, bignum, anrm, 1, minmn-1, work[1:], minmn) + } + if anrm < smlnum { + impl.Dlascl(lapack.General, 0, 0, smlnum, anrm, 1, minmn, s, minmn) + } + if !ok && anrm < smlnum { + impl.Dlascl(lapack.General, 0, 0, smlnum, anrm, 1, minmn-1, work[1:], minmn) + } + } + work[0] = float64(maxwrk) + return ok +} diff --git a/vendor/gonum.org/v1/gonum/lapack/gonum/dgetf2.go b/vendor/gonum.org/v1/gonum/lapack/gonum/dgetf2.go new file mode 100644 index 0000000..63ad72e --- /dev/null +++ b/vendor/gonum.org/v1/gonum/lapack/gonum/dgetf2.go @@ -0,0 +1,84 @@ +// Copyright ©2015 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package gonum + +import ( + "math" + + "gonum.org/v1/gonum/blas/blas64" +) + +// Dgetf2 computes the LU decomposition of the m×n matrix A. +// The LU decomposition is a factorization of a into +// A = P * L * U +// where P is a permutation matrix, L is a unit lower triangular matrix, and +// U is a (usually) non-unit upper triangular matrix. On exit, L and U are stored +// in place into a. +// +// ipiv is a permutation vector. It indicates that row i of the matrix was +// changed with ipiv[i]. ipiv must have length at least min(m,n), and will panic +// otherwise. ipiv is zero-indexed. +// +// Dgetf2 returns whether the matrix A is singular. The LU decomposition will +// be computed regardless of the singularity of A, but division by zero +// will occur if the false is returned and the result is used to solve a +// system of equations. +// +// Dgetf2 is an internal routine. It is exported for testing purposes. +func (Implementation) Dgetf2(m, n int, a []float64, lda int, ipiv []int) (ok bool) { + mn := min(m, n) + switch { + case m < 0: + panic(mLT0) + case n < 0: + panic(nLT0) + case lda < max(1, n): + panic(badLdA) + } + + // Quick return if possible. + if mn == 0 { + return true + } + + switch { + case len(a) < (m-1)*lda+n: + panic(shortA) + case len(ipiv) != mn: + panic(badLenIpiv) + } + + bi := blas64.Implementation() + + sfmin := dlamchS + ok = true + for j := 0; j < mn; j++ { + // Find a pivot and test for singularity. + jp := j + bi.Idamax(m-j, a[j*lda+j:], lda) + ipiv[j] = jp + if a[jp*lda+j] == 0 { + ok = false + } else { + // Swap the rows if necessary. + if jp != j { + bi.Dswap(n, a[j*lda:], 1, a[jp*lda:], 1) + } + if j < m-1 { + aj := a[j*lda+j] + if math.Abs(aj) >= sfmin { + bi.Dscal(m-j-1, 1/aj, a[(j+1)*lda+j:], lda) + } else { + for i := 0; i < m-j-1; i++ { + a[(j+1)*lda+j] = a[(j+1)*lda+j] / a[lda*j+j] + } + } + } + } + if j < mn-1 { + bi.Dger(m-j-1, n-j-1, -1, a[(j+1)*lda+j:], lda, a[j*lda+j+1:], 1, a[(j+1)*lda+j+1:], lda) + } + } + return ok +} diff --git a/vendor/gonum.org/v1/gonum/lapack/gonum/dgetrf.go b/vendor/gonum.org/v1/gonum/lapack/gonum/dgetrf.go new file mode 100644 index 0000000..ad01e71 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/lapack/gonum/dgetrf.go @@ -0,0 +1,85 @@ +// Copyright ©2015 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package gonum + +import ( + "gonum.org/v1/gonum/blas" + "gonum.org/v1/gonum/blas/blas64" +) + +// Dgetrf computes the LU decomposition of the m×n matrix A. +// The LU decomposition is a factorization of A into +// A = P * L * U +// where P is a permutation matrix, L is a unit lower triangular matrix, and +// U is a (usually) non-unit upper triangular matrix. On exit, L and U are stored +// in place into a. +// +// ipiv is a permutation vector. It indicates that row i of the matrix was +// changed with ipiv[i]. ipiv must have length at least min(m,n), and will panic +// otherwise. ipiv is zero-indexed. +// +// Dgetrf is the blocked version of the algorithm. +// +// Dgetrf returns whether the matrix A is singular. The LU decomposition will +// be computed regardless of the singularity of A, but division by zero +// will occur if the false is returned and the result is used to solve a +// system of equations. +func (impl Implementation) Dgetrf(m, n int, a []float64, lda int, ipiv []int) (ok bool) { + mn := min(m, n) + switch { + case m < 0: + panic(mLT0) + case n < 0: + panic(nLT0) + case lda < max(1, n): + panic(badLdA) + } + + // Quick return if possible. + if mn == 0 { + return true + } + + switch { + case len(a) < (m-1)*lda+n: + panic(shortA) + case len(ipiv) != mn: + panic(badLenIpiv) + } + + bi := blas64.Implementation() + + nb := impl.Ilaenv(1, "DGETRF", " ", m, n, -1, -1) + if nb <= 1 || mn <= nb { + // Use the unblocked algorithm. + return impl.Dgetf2(m, n, a, lda, ipiv) + } + ok = true + for j := 0; j < mn; j += nb { + jb := min(mn-j, nb) + blockOk := impl.Dgetf2(m-j, jb, a[j*lda+j:], lda, ipiv[j:j+jb]) + if !blockOk { + ok = false + } + for i := j; i <= min(m-1, j+jb-1); i++ { + ipiv[i] = j + ipiv[i] + } + impl.Dlaswp(j, a, lda, j, j+jb-1, ipiv[:j+jb], 1) + if j+jb < n { + impl.Dlaswp(n-j-jb, a[j+jb:], lda, j, j+jb-1, ipiv[:j+jb], 1) + bi.Dtrsm(blas.Left, blas.Lower, blas.NoTrans, blas.Unit, + jb, n-j-jb, 1, + a[j*lda+j:], lda, + a[j*lda+j+jb:], lda) + if j+jb < m { + bi.Dgemm(blas.NoTrans, blas.NoTrans, m-j-jb, n-j-jb, jb, -1, + a[(j+jb)*lda+j:], lda, + a[j*lda+j+jb:], lda, + 1, a[(j+jb)*lda+j+jb:], lda) + } + } + } + return ok +} diff --git a/vendor/gonum.org/v1/gonum/lapack/gonum/dgetri.go b/vendor/gonum.org/v1/gonum/lapack/gonum/dgetri.go new file mode 100644 index 0000000..b2f2ae4 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/lapack/gonum/dgetri.go @@ -0,0 +1,116 @@ +// Copyright ©2015 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package gonum + +import ( + "gonum.org/v1/gonum/blas" + "gonum.org/v1/gonum/blas/blas64" +) + +// Dgetri computes the inverse of the matrix A using the LU factorization computed +// by Dgetrf. On entry, a contains the PLU decomposition of A as computed by +// Dgetrf and on exit contains the reciprocal of the original matrix. +// +// Dgetri will not perform the inversion if the matrix is singular, and returns +// a boolean indicating whether the inversion was successful. +// +// work is temporary storage, and lwork specifies the usable memory length. +// At minimum, lwork >= n and this function will panic otherwise. +// Dgetri is a blocked inversion, but the block size is limited +// by the temporary space available. If lwork == -1, instead of performing Dgetri, +// the optimal work length will be stored into work[0]. +func (impl Implementation) Dgetri(n int, a []float64, lda int, ipiv []int, work []float64, lwork int) (ok bool) { + iws := max(1, n) + switch { + case n < 0: + panic(nLT0) + case lda < max(1, n): + panic(badLdA) + case lwork < iws && lwork != -1: + panic(badLWork) + case len(work) < max(1, lwork): + panic(shortWork) + } + + if n == 0 { + work[0] = 1 + return true + } + + nb := impl.Ilaenv(1, "DGETRI", " ", n, -1, -1, -1) + if lwork == -1 { + work[0] = float64(n * nb) + return true + } + + switch { + case len(a) < (n-1)*lda+n: + panic(shortA) + case len(ipiv) != n: + panic(badLenIpiv) + } + + // Form inv(U). + ok = impl.Dtrtri(blas.Upper, blas.NonUnit, n, a, lda) + if !ok { + return false + } + + nbmin := 2 + if 1 < nb && nb < n { + iws = max(n*nb, 1) + if lwork < iws { + nb = lwork / n + nbmin = max(2, impl.Ilaenv(2, "DGETRI", " ", n, -1, -1, -1)) + } + } + ldwork := nb + + bi := blas64.Implementation() + // Solve the equation inv(A)*L = inv(U) for inv(A). + // TODO(btracey): Replace this with a more row-major oriented algorithm. + if nb < nbmin || n <= nb { + // Unblocked code. + for j := n - 1; j >= 0; j-- { + for i := j + 1; i < n; i++ { + // Copy current column of L to work and replace with zeros. + work[i] = a[i*lda+j] + a[i*lda+j] = 0 + } + // Compute current column of inv(A). + if j < n-1 { + bi.Dgemv(blas.NoTrans, n, n-j-1, -1, a[(j+1):], lda, work[(j+1):], 1, 1, a[j:], lda) + } + } + } else { + // Blocked code. + nn := ((n - 1) / nb) * nb + for j := nn; j >= 0; j -= nb { + jb := min(nb, n-j) + // Copy current block column of L to work and replace + // with zeros. + for jj := j; jj < j+jb; jj++ { + for i := jj + 1; i < n; i++ { + work[i*ldwork+(jj-j)] = a[i*lda+jj] + a[i*lda+jj] = 0 + } + } + // Compute current block column of inv(A). + if j+jb < n { + bi.Dgemm(blas.NoTrans, blas.NoTrans, n, jb, n-j-jb, -1, a[(j+jb):], lda, work[(j+jb)*ldwork:], ldwork, 1, a[j:], lda) + } + bi.Dtrsm(blas.Right, blas.Lower, blas.NoTrans, blas.Unit, n, jb, 1, work[j*ldwork:], ldwork, a[j:], lda) + } + } + // Apply column interchanges. + for j := n - 2; j >= 0; j-- { + jp := ipiv[j] + if jp != j { + bi.Dswap(n, a[j:], lda, a[jp:], lda) + } + } + work[0] = float64(iws) + return true +} diff --git a/vendor/gonum.org/v1/gonum/lapack/gonum/dgetrs.go b/vendor/gonum.org/v1/gonum/lapack/gonum/dgetrs.go new file mode 100644 index 0000000..ecc20d7 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/lapack/gonum/dgetrs.go @@ -0,0 +1,72 @@ +// Copyright ©2015 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package gonum + +import ( + "gonum.org/v1/gonum/blas" + "gonum.org/v1/gonum/blas/blas64" +) + +// Dgetrs solves a system of equations using an LU factorization. +// The system of equations solved is +// A * X = B if trans == blas.Trans +// A^T * X = B if trans == blas.NoTrans +// A is a general n×n matrix with stride lda. B is a general matrix of size n×nrhs. +// +// On entry b contains the elements of the matrix B. On exit, b contains the +// elements of X, the solution to the system of equations. +// +// a and ipiv contain the LU factorization of A and the permutation indices as +// computed by Dgetrf. ipiv is zero-indexed. +func (impl Implementation) Dgetrs(trans blas.Transpose, n, nrhs int, a []float64, lda int, ipiv []int, b []float64, ldb int) { + switch { + case trans != blas.NoTrans && trans != blas.Trans && trans != blas.ConjTrans: + panic(badTrans) + case n < 0: + panic(nLT0) + case nrhs < 0: + panic(nrhsLT0) + case lda < max(1, n): + panic(badLdA) + case ldb < max(1, nrhs): + panic(badLdB) + } + + // Quick return if possible. + if n == 0 || nrhs == 0 { + return + } + + switch { + case len(a) < (n-1)*lda+n: + panic(shortA) + case len(b) < (n-1)*ldb+nrhs: + panic(shortB) + case len(ipiv) != n: + panic(badLenIpiv) + } + + bi := blas64.Implementation() + + if trans == blas.NoTrans { + // Solve A * X = B. + impl.Dlaswp(nrhs, b, ldb, 0, n-1, ipiv, 1) + // Solve L * X = B, updating b. + bi.Dtrsm(blas.Left, blas.Lower, blas.NoTrans, blas.Unit, + n, nrhs, 1, a, lda, b, ldb) + // Solve U * X = B, updating b. + bi.Dtrsm(blas.Left, blas.Upper, blas.NoTrans, blas.NonUnit, + n, nrhs, 1, a, lda, b, ldb) + return + } + // Solve A^T * X = B. + // Solve U^T * X = B, updating b. + bi.Dtrsm(blas.Left, blas.Upper, blas.Trans, blas.NonUnit, + n, nrhs, 1, a, lda, b, ldb) + // Solve L^T * X = B, updating b. + bi.Dtrsm(blas.Left, blas.Lower, blas.Trans, blas.Unit, + n, nrhs, 1, a, lda, b, ldb) + impl.Dlaswp(nrhs, b, ldb, 0, n-1, ipiv, -1) +} diff --git a/vendor/gonum.org/v1/gonum/lapack/gonum/dggsvd3.go b/vendor/gonum.org/v1/gonum/lapack/gonum/dggsvd3.go new file mode 100644 index 0000000..ac234dc --- /dev/null +++ b/vendor/gonum.org/v1/gonum/lapack/gonum/dggsvd3.go @@ -0,0 +1,242 @@ +// Copyright ©2017 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package gonum + +import ( + "math" + + "gonum.org/v1/gonum/blas/blas64" + "gonum.org/v1/gonum/lapack" +) + +// Dggsvd3 computes the generalized singular value decomposition (GSVD) +// of an m×n matrix A and p×n matrix B: +// U^T*A*Q = D1*[ 0 R ] +// +// V^T*B*Q = D2*[ 0 R ] +// where U, V and Q are orthogonal matrices. +// +// Dggsvd3 returns k and l, the dimensions of the sub-blocks. k+l +// is the effective numerical rank of the (m+p)×n matrix [ A^T B^T ]^T. +// R is a (k+l)×(k+l) nonsingular upper triangular matrix, D1 and +// D2 are m×(k+l) and p×(k+l) diagonal matrices and of the following +// structures, respectively: +// +// If m-k-l >= 0, +// +// k l +// D1 = k [ I 0 ] +// l [ 0 C ] +// m-k-l [ 0 0 ] +// +// k l +// D2 = l [ 0 S ] +// p-l [ 0 0 ] +// +// n-k-l k l +// [ 0 R ] = k [ 0 R11 R12 ] k +// l [ 0 0 R22 ] l +// +// where +// +// C = diag( alpha_k, ... , alpha_{k+l} ), +// S = diag( beta_k, ... , beta_{k+l} ), +// C^2 + S^2 = I. +// +// R is stored in +// A[0:k+l, n-k-l:n] +// on exit. +// +// If m-k-l < 0, +// +// k m-k k+l-m +// D1 = k [ I 0 0 ] +// m-k [ 0 C 0 ] +// +// k m-k k+l-m +// D2 = m-k [ 0 S 0 ] +// k+l-m [ 0 0 I ] +// p-l [ 0 0 0 ] +// +// n-k-l k m-k k+l-m +// [ 0 R ] = k [ 0 R11 R12 R13 ] +// m-k [ 0 0 R22 R23 ] +// k+l-m [ 0 0 0 R33 ] +// +// where +// C = diag( alpha_k, ... , alpha_m ), +// S = diag( beta_k, ... , beta_m ), +// C^2 + S^2 = I. +// +// R = [ R11 R12 R13 ] is stored in A[1:m, n-k-l+1:n] +// [ 0 R22 R23 ] +// and R33 is stored in +// B[m-k:l, n+m-k-l:n] on exit. +// +// Dggsvd3 computes C, S, R, and optionally the orthogonal transformation +// matrices U, V and Q. +// +// jobU, jobV and jobQ are options for computing the orthogonal matrices. The behavior +// is as follows +// jobU == lapack.GSVDU Compute orthogonal matrix U +// jobU == lapack.GSVDNone Do not compute orthogonal matrix. +// The behavior is the same for jobV and jobQ with the exception that instead of +// lapack.GSVDU these accept lapack.GSVDV and lapack.GSVDQ respectively. +// The matrices U, V and Q must be m×m, p×p and n×n respectively unless the +// relevant job parameter is lapack.GSVDNone. +// +// alpha and beta must have length n or Dggsvd3 will panic. On exit, alpha and +// beta contain the generalized singular value pairs of A and B +// alpha[0:k] = 1, +// beta[0:k] = 0, +// if m-k-l >= 0, +// alpha[k:k+l] = diag(C), +// beta[k:k+l] = diag(S), +// if m-k-l < 0, +// alpha[k:m]= C, alpha[m:k+l]= 0 +// beta[k:m] = S, beta[m:k+l] = 1. +// if k+l < n, +// alpha[k+l:n] = 0 and +// beta[k+l:n] = 0. +// +// On exit, iwork contains the permutation required to sort alpha descending. +// +// iwork must have length n, work must have length at least max(1, lwork), and +// lwork must be -1 or greater than n, otherwise Dggsvd3 will panic. If +// lwork is -1, work[0] holds the optimal lwork on return, but Dggsvd3 does +// not perform the GSVD. +func (impl Implementation) Dggsvd3(jobU, jobV, jobQ lapack.GSVDJob, m, n, p int, a []float64, lda int, b []float64, ldb int, alpha, beta, u []float64, ldu int, v []float64, ldv int, q []float64, ldq int, work []float64, lwork int, iwork []int) (k, l int, ok bool) { + wantu := jobU == lapack.GSVDU + wantv := jobV == lapack.GSVDV + wantq := jobQ == lapack.GSVDQ + switch { + case !wantu && jobU != lapack.GSVDNone: + panic(badGSVDJob + "U") + case !wantv && jobV != lapack.GSVDNone: + panic(badGSVDJob + "V") + case !wantq && jobQ != lapack.GSVDNone: + panic(badGSVDJob + "Q") + case m < 0: + panic(mLT0) + case n < 0: + panic(nLT0) + case p < 0: + panic(pLT0) + case lda < max(1, n): + panic(badLdA) + case ldb < max(1, n): + panic(badLdB) + case ldu < 1, wantu && ldu < m: + panic(badLdU) + case ldv < 1, wantv && ldv < p: + panic(badLdV) + case ldq < 1, wantq && ldq < n: + panic(badLdQ) + case len(iwork) < n: + panic(shortWork) + case lwork < 1 && lwork != -1: + panic(badLWork) + case len(work) < max(1, lwork): + panic(shortWork) + } + + // Determine optimal work length. + impl.Dggsvp3(jobU, jobV, jobQ, + m, p, n, + a, lda, + b, ldb, + 0, 0, + u, ldu, + v, ldv, + q, ldq, + iwork, + work, work, -1) + lwkopt := n + int(work[0]) + lwkopt = max(lwkopt, 2*n) + lwkopt = max(lwkopt, 1) + work[0] = float64(lwkopt) + if lwork == -1 { + return 0, 0, true + } + + switch { + case len(a) < (m-1)*lda+n: + panic(shortA) + case len(b) < (p-1)*ldb+n: + panic(shortB) + case wantu && len(u) < (m-1)*ldu+m: + panic(shortU) + case wantv && len(v) < (p-1)*ldv+p: + panic(shortV) + case wantq && len(q) < (n-1)*ldq+n: + panic(shortQ) + case len(alpha) != n: + panic(badLenAlpha) + case len(beta) != n: + panic(badLenBeta) + } + + // Compute the Frobenius norm of matrices A and B. + anorm := impl.Dlange(lapack.Frobenius, m, n, a, lda, nil) + bnorm := impl.Dlange(lapack.Frobenius, p, n, b, ldb, nil) + + // Get machine precision and set up threshold for determining + // the effective numerical rank of the matrices A and B. + tola := float64(max(m, n)) * math.Max(anorm, dlamchS) * dlamchP + tolb := float64(max(p, n)) * math.Max(bnorm, dlamchS) * dlamchP + + // Preprocessing. + k, l = impl.Dggsvp3(jobU, jobV, jobQ, + m, p, n, + a, lda, + b, ldb, + tola, tolb, + u, ldu, + v, ldv, + q, ldq, + iwork, + work[:n], work[n:], lwork-n) + + // Compute the GSVD of two upper "triangular" matrices. + _, ok = impl.Dtgsja(jobU, jobV, jobQ, + m, p, n, + k, l, + a, lda, + b, ldb, + tola, tolb, + alpha, beta, + u, ldu, + v, ldv, + q, ldq, + work) + + // Sort the singular values and store the pivot indices in iwork + // Copy alpha to work, then sort alpha in work. + bi := blas64.Implementation() + bi.Dcopy(n, alpha, 1, work[:n], 1) + ibnd := min(l, m-k) + for i := 0; i < ibnd; i++ { + // Scan for largest alpha_{k+i}. + isub := i + smax := work[k+i] + for j := i + 1; j < ibnd; j++ { + if v := work[k+j]; v > smax { + isub = j + smax = v + } + } + if isub != i { + work[k+isub] = work[k+i] + work[k+i] = smax + iwork[k+i] = k + isub + } else { + iwork[k+i] = k + i + } + } + + work[0] = float64(lwkopt) + + return k, l, ok +} diff --git a/vendor/gonum.org/v1/gonum/lapack/gonum/dggsvp3.go b/vendor/gonum.org/v1/gonum/lapack/gonum/dggsvp3.go new file mode 100644 index 0000000..7a9ad9f --- /dev/null +++ b/vendor/gonum.org/v1/gonum/lapack/gonum/dggsvp3.go @@ -0,0 +1,281 @@ +// Copyright ©2017 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package gonum + +import ( + "math" + + "gonum.org/v1/gonum/blas" + "gonum.org/v1/gonum/lapack" +) + +// Dggsvp3 computes orthogonal matrices U, V and Q such that +// +// n-k-l k l +// U^T*A*Q = k [ 0 A12 A13 ] if m-k-l >= 0; +// l [ 0 0 A23 ] +// m-k-l [ 0 0 0 ] +// +// n-k-l k l +// U^T*A*Q = k [ 0 A12 A13 ] if m-k-l < 0; +// m-k [ 0 0 A23 ] +// +// n-k-l k l +// V^T*B*Q = l [ 0 0 B13 ] +// p-l [ 0 0 0 ] +// +// where the k×k matrix A12 and l×l matrix B13 are non-singular +// upper triangular. A23 is l×l upper triangular if m-k-l >= 0, +// otherwise A23 is (m-k)×l upper trapezoidal. +// +// Dggsvp3 returns k and l, the dimensions of the sub-blocks. k+l +// is the effective numerical rank of the (m+p)×n matrix [ A^T B^T ]^T. +// +// jobU, jobV and jobQ are options for computing the orthogonal matrices. The behavior +// is as follows +// jobU == lapack.GSVDU Compute orthogonal matrix U +// jobU == lapack.GSVDNone Do not compute orthogonal matrix. +// The behavior is the same for jobV and jobQ with the exception that instead of +// lapack.GSVDU these accept lapack.GSVDV and lapack.GSVDQ respectively. +// The matrices U, V and Q must be m×m, p×p and n×n respectively unless the +// relevant job parameter is lapack.GSVDNone. +// +// tola and tolb are the convergence criteria for the Jacobi-Kogbetliantz +// iteration procedure. Generally, they are the same as used in the preprocessing +// step, for example, +// tola = max(m, n)*norm(A)*eps, +// tolb = max(p, n)*norm(B)*eps. +// Where eps is the machine epsilon. +// +// iwork must have length n, work must have length at least max(1, lwork), and +// lwork must be -1 or greater than zero, otherwise Dggsvp3 will panic. +// +// Dggsvp3 is an internal routine. It is exported for testing purposes. +func (impl Implementation) Dggsvp3(jobU, jobV, jobQ lapack.GSVDJob, m, p, n int, a []float64, lda int, b []float64, ldb int, tola, tolb float64, u []float64, ldu int, v []float64, ldv int, q []float64, ldq int, iwork []int, tau, work []float64, lwork int) (k, l int) { + wantu := jobU == lapack.GSVDU + wantv := jobV == lapack.GSVDV + wantq := jobQ == lapack.GSVDQ + switch { + case !wantu && jobU != lapack.GSVDNone: + panic(badGSVDJob + "U") + case !wantv && jobV != lapack.GSVDNone: + panic(badGSVDJob + "V") + case !wantq && jobQ != lapack.GSVDNone: + panic(badGSVDJob + "Q") + case m < 0: + panic(mLT0) + case p < 0: + panic(pLT0) + case n < 0: + panic(nLT0) + case lda < max(1, n): + panic(badLdA) + case ldb < max(1, n): + panic(badLdB) + case ldu < 1, wantu && ldu < m: + panic(badLdU) + case ldv < 1, wantv && ldv < p: + panic(badLdV) + case ldq < 1, wantq && ldq < n: + panic(badLdQ) + case len(iwork) != n: + panic(shortWork) + case lwork < 1 && lwork != -1: + panic(badLWork) + case len(work) < max(1, lwork): + panic(shortWork) + } + + var lwkopt int + impl.Dgeqp3(p, n, b, ldb, iwork, tau, work, -1) + lwkopt = int(work[0]) + if wantv { + lwkopt = max(lwkopt, p) + } + lwkopt = max(lwkopt, min(n, p)) + lwkopt = max(lwkopt, m) + if wantq { + lwkopt = max(lwkopt, n) + } + impl.Dgeqp3(m, n, a, lda, iwork, tau, work, -1) + lwkopt = max(lwkopt, int(work[0])) + lwkopt = max(1, lwkopt) + if lwork == -1 { + work[0] = float64(lwkopt) + return 0, 0 + } + + switch { + case len(a) < (m-1)*lda+n: + panic(shortA) + case len(b) < (p-1)*ldb+n: + panic(shortB) + case wantu && len(u) < (m-1)*ldu+m: + panic(shortU) + case wantv && len(v) < (p-1)*ldv+p: + panic(shortV) + case wantq && len(q) < (n-1)*ldq+n: + panic(shortQ) + case len(tau) < n: + // tau check must come after lwkopt query since + // the Dggsvd3 call for lwkopt query may have + // lwork == -1, and tau is provided by work. + panic(shortTau) + } + + const forward = true + + // QR with column pivoting of B: B*P = V*[ S11 S12 ]. + // [ 0 0 ] + for i := range iwork[:n] { + iwork[i] = 0 + } + impl.Dgeqp3(p, n, b, ldb, iwork, tau, work, lwork) + + // Update A := A*P. + impl.Dlapmt(forward, m, n, a, lda, iwork) + + // Determine the effective rank of matrix B. + for i := 0; i < min(p, n); i++ { + if math.Abs(b[i*ldb+i]) > tolb { + l++ + } + } + + if wantv { + // Copy the details of V, and form V. + impl.Dlaset(blas.All, p, p, 0, 0, v, ldv) + if p > 1 { + impl.Dlacpy(blas.Lower, p-1, min(p, n), b[ldb:], ldb, v[ldv:], ldv) + } + impl.Dorg2r(p, p, min(p, n), v, ldv, tau, work) + } + + // Clean up B. + for i := 1; i < l; i++ { + r := b[i*ldb : i*ldb+i] + for j := range r { + r[j] = 0 + } + } + if p > l { + impl.Dlaset(blas.All, p-l, n, 0, 0, b[l*ldb:], ldb) + } + + if wantq { + // Set Q = I and update Q := Q*P. + impl.Dlaset(blas.All, n, n, 0, 1, q, ldq) + impl.Dlapmt(forward, n, n, q, ldq, iwork) + } + + if p >= l && n != l { + // RQ factorization of [ S11 S12 ]: [ S11 S12 ] = [ 0 S12 ]*Z. + impl.Dgerq2(l, n, b, ldb, tau, work) + + // Update A := A*Z^T. + impl.Dormr2(blas.Right, blas.Trans, m, n, l, b, ldb, tau, a, lda, work) + + if wantq { + // Update Q := Q*Z^T. + impl.Dormr2(blas.Right, blas.Trans, n, n, l, b, ldb, tau, q, ldq, work) + } + + // Clean up B. + impl.Dlaset(blas.All, l, n-l, 0, 0, b, ldb) + for i := 1; i < l; i++ { + r := b[i*ldb+n-l : i*ldb+i+n-l] + for j := range r { + r[j] = 0 + } + } + } + + // Let N-L L + // A = [ A11 A12 ] M, + // + // then the following does the complete QR decomposition of A11: + // + // A11 = U*[ 0 T12 ]*P1^T. + // [ 0 0 ] + for i := range iwork[:n-l] { + iwork[i] = 0 + } + impl.Dgeqp3(m, n-l, a, lda, iwork[:n-l], tau, work, lwork) + + // Determine the effective rank of A11. + for i := 0; i < min(m, n-l); i++ { + if math.Abs(a[i*lda+i]) > tola { + k++ + } + } + + // Update A12 := U^T*A12, where A12 = A[0:m, n-l:n]. + impl.Dorm2r(blas.Left, blas.Trans, m, l, min(m, n-l), a, lda, tau, a[n-l:], lda, work) + + if wantu { + // Copy the details of U, and form U. + impl.Dlaset(blas.All, m, m, 0, 0, u, ldu) + if m > 1 { + impl.Dlacpy(blas.Lower, m-1, min(m, n-l), a[lda:], lda, u[ldu:], ldu) + } + impl.Dorg2r(m, m, min(m, n-l), u, ldu, tau, work) + } + + if wantq { + // Update Q[0:n, 0:n-l] := Q[0:n, 0:n-l]*P1. + impl.Dlapmt(forward, n, n-l, q, ldq, iwork[:n-l]) + } + + // Clean up A: set the strictly lower triangular part of + // A[0:k, 0:k] = 0, and A[k:m, 0:n-l] = 0. + for i := 1; i < k; i++ { + r := a[i*lda : i*lda+i] + for j := range r { + r[j] = 0 + } + } + if m > k { + impl.Dlaset(blas.All, m-k, n-l, 0, 0, a[k*lda:], lda) + } + + if n-l > k { + // RQ factorization of [ T11 T12 ] = [ 0 T12 ]*Z1. + impl.Dgerq2(k, n-l, a, lda, tau, work) + + if wantq { + // Update Q[0:n, 0:n-l] := Q[0:n, 0:n-l]*Z1^T. + impl.Dorm2r(blas.Right, blas.Trans, n, n-l, k, a, lda, tau, q, ldq, work) + } + + // Clean up A. + impl.Dlaset(blas.All, k, n-l-k, 0, 0, a, lda) + for i := 1; i < k; i++ { + r := a[i*lda+n-k-l : i*lda+i+n-k-l] + for j := range r { + a[j] = 0 + } + } + } + + if m > k { + // QR factorization of A[k:m, n-l:n]. + impl.Dgeqr2(m-k, l, a[k*lda+n-l:], lda, tau, work) + if wantu { + // Update U[:, k:m) := U[:, k:m]*U1. + impl.Dorm2r(blas.Right, blas.NoTrans, m, m-k, min(m-k, l), a[k*lda+n-l:], lda, tau, u[k:], ldu, work) + } + + // Clean up A. + for i := k + 1; i < m; i++ { + r := a[i*lda+n-l : i*lda+min(n-l+i-k, n)] + for j := range r { + r[j] = 0 + } + } + } + + work[0] = float64(lwkopt) + return k, l +} diff --git a/vendor/gonum.org/v1/gonum/lapack/gonum/dhseqr.go b/vendor/gonum.org/v1/gonum/lapack/gonum/dhseqr.go new file mode 100644 index 0000000..ed3fbca --- /dev/null +++ b/vendor/gonum.org/v1/gonum/lapack/gonum/dhseqr.go @@ -0,0 +1,252 @@ +// Copyright ©2016 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package gonum + +import ( + "math" + + "gonum.org/v1/gonum/blas" + "gonum.org/v1/gonum/lapack" +) + +// Dhseqr computes the eigenvalues of an n×n Hessenberg matrix H and, +// optionally, the matrices T and Z from the Schur decomposition +// H = Z T Z^T, +// where T is an n×n upper quasi-triangular matrix (the Schur form), and Z is +// the n×n orthogonal matrix of Schur vectors. +// +// Optionally Z may be postmultiplied into an input orthogonal matrix Q so that +// this routine can give the Schur factorization of a matrix A which has been +// reduced to the Hessenberg form H by the orthogonal matrix Q: +// A = Q H Q^T = (QZ) T (QZ)^T. +// +// If job == lapack.EigenvaluesOnly, only the eigenvalues will be computed. +// If job == lapack.EigenvaluesAndSchur, the eigenvalues and the Schur form T will +// be computed. +// For other values of job Dhseqr will panic. +// +// If compz == lapack.SchurNone, no Schur vectors will be computed and Z will not be +// referenced. +// If compz == lapack.SchurHess, on return Z will contain the matrix of Schur +// vectors of H. +// If compz == lapack.SchurOrig, on entry z is assumed to contain the orthogonal +// matrix Q that is the identity except for the submatrix +// Q[ilo:ihi+1,ilo:ihi+1]. On return z will be updated to the product Q*Z. +// +// ilo and ihi determine the block of H on which Dhseqr operates. It is assumed +// that H is already upper triangular in rows and columns [0:ilo] and [ihi+1:n], +// although it will be only checked that the block is isolated, that is, +// ilo == 0 or H[ilo,ilo-1] == 0, +// ihi == n-1 or H[ihi+1,ihi] == 0, +// and Dhseqr will panic otherwise. ilo and ihi are typically set by a previous +// call to Dgebal, otherwise they should be set to 0 and n-1, respectively. It +// must hold that +// 0 <= ilo <= ihi < n, if n > 0, +// ilo == 0 and ihi == -1, if n == 0. +// +// wr and wi must have length n. +// +// work must have length at least lwork and lwork must be at least max(1,n) +// otherwise Dhseqr will panic. The minimum lwork delivers very good and +// sometimes optimal performance, although lwork as large as 11*n may be +// required. On return, work[0] will contain the optimal value of lwork. +// +// If lwork is -1, instead of performing Dhseqr, the function only estimates the +// optimal workspace size and stores it into work[0]. Neither h nor z are +// accessed. +// +// unconverged indicates whether Dhseqr computed all the eigenvalues. +// +// If unconverged == 0, all the eigenvalues have been computed and their real +// and imaginary parts will be stored on return in wr and wi, respectively. If +// two eigenvalues are computed as a complex conjugate pair, they are stored in +// consecutive elements of wr and wi, say the i-th and (i+1)th, with wi[i] > 0 +// and wi[i+1] < 0. +// +// If unconverged == 0 and job == lapack.EigenvaluesAndSchur, on return H will +// contain the upper quasi-triangular matrix T from the Schur decomposition (the +// Schur form). 2×2 diagonal blocks (corresponding to complex conjugate pairs of +// eigenvalues) will be returned in standard form, with +// H[i,i] == H[i+1,i+1], +// and +// H[i+1,i]*H[i,i+1] < 0. +// The eigenvalues will be stored in wr and wi in the same order as on the +// diagonal of the Schur form returned in H, with +// wr[i] = H[i,i], +// and, if H[i:i+2,i:i+2] is a 2×2 diagonal block, +// wi[i] = sqrt(-H[i+1,i]*H[i,i+1]), +// wi[i+1] = -wi[i]. +// +// If unconverged == 0 and job == lapack.EigenvaluesOnly, the contents of h +// on return is unspecified. +// +// If unconverged > 0, some eigenvalues have not converged, and the blocks +// [0:ilo] and [unconverged:n] of wr and wi will contain those eigenvalues which +// have been successfully computed. Failures are rare. +// +// If unconverged > 0 and job == lapack.EigenvaluesOnly, on return the +// remaining unconverged eigenvalues are the eigenvalues of the upper Hessenberg +// matrix H[ilo:unconverged,ilo:unconverged]. +// +// If unconverged > 0 and job == lapack.EigenvaluesAndSchur, then on +// return +// (initial H) U = U (final H), (*) +// where U is an orthogonal matrix. The final H is upper Hessenberg and +// H[unconverged:ihi+1,unconverged:ihi+1] is upper quasi-triangular. +// +// If unconverged > 0 and compz == lapack.SchurOrig, then on return +// (final Z) = (initial Z) U, +// where U is the orthogonal matrix in (*) regardless of the value of job. +// +// If unconverged > 0 and compz == lapack.SchurHess, then on return +// (final Z) = U, +// where U is the orthogonal matrix in (*) regardless of the value of job. +// +// References: +// [1] R. Byers. LAPACK 3.1 xHSEQR: Tuning and Implementation Notes on the +// Small Bulge Multi-Shift QR Algorithm with Aggressive Early Deflation. +// LAPACK Working Note 187 (2007) +// URL: http://www.netlib.org/lapack/lawnspdf/lawn187.pdf +// [2] K. Braman, R. Byers, R. Mathias. The Multishift QR Algorithm. Part I: +// Maintaining Well-Focused Shifts and Level 3 Performance. SIAM J. Matrix +// Anal. Appl. 23(4) (2002), pp. 929—947 +// URL: http://dx.doi.org/10.1137/S0895479801384573 +// [3] K. Braman, R. Byers, R. Mathias. The Multishift QR Algorithm. Part II: +// Aggressive Early Deflation. SIAM J. Matrix Anal. Appl. 23(4) (2002), pp. 948—973 +// URL: http://dx.doi.org/10.1137/S0895479801384585 +// +// Dhseqr is an internal routine. It is exported for testing purposes. +func (impl Implementation) Dhseqr(job lapack.SchurJob, compz lapack.SchurComp, n, ilo, ihi int, h []float64, ldh int, wr, wi []float64, z []float64, ldz int, work []float64, lwork int) (unconverged int) { + wantt := job == lapack.EigenvaluesAndSchur + wantz := compz == lapack.SchurHess || compz == lapack.SchurOrig + + switch { + case job != lapack.EigenvaluesOnly && job != lapack.EigenvaluesAndSchur: + panic(badSchurJob) + case compz != lapack.SchurNone && compz != lapack.SchurHess && compz != lapack.SchurOrig: + panic(badSchurComp) + case n < 0: + panic(nLT0) + case ilo < 0 || max(0, n-1) < ilo: + panic(badIlo) + case ihi < min(ilo, n-1) || n <= ihi: + panic(badIhi) + case ldh < max(1, n): + panic(badLdH) + case ldz < 1, wantz && ldz < n: + panic(badLdZ) + case lwork < max(1, n) && lwork != -1: + panic(badLWork) + case len(work) < max(1, lwork): + panic(shortWork) + } + + // Quick return if possible. + if n == 0 { + work[0] = 1 + return 0 + } + + // Quick return in case of a workspace query. + if lwork == -1 { + impl.Dlaqr04(wantt, wantz, n, ilo, ihi, h, ldh, wr, wi, ilo, ihi, z, ldz, work, -1, 1) + work[0] = math.Max(float64(n), work[0]) + return 0 + } + + switch { + case len(h) < (n-1)*ldh+n: + panic(shortH) + case wantz && len(z) < (n-1)*ldz+n: + panic(shortZ) + case len(wr) < n: + panic(shortWr) + case len(wi) < n: + panic(shortWi) + } + + const ( + // Matrices of order ntiny or smaller must be processed by + // Dlahqr because of insufficient subdiagonal scratch space. + // This is a hard limit. + ntiny = 11 + + // nl is the size of a local workspace to help small matrices + // through a rare Dlahqr failure. nl > ntiny is required and + // nl <= nmin = Ilaenv(ispec=12,...) is recommended (the default + // value of nmin is 75). Using nl = 49 allows up to six + // simultaneous shifts and a 16×16 deflation window. + nl = 49 + ) + + // Copy eigenvalues isolated by Dgebal. + for i := 0; i < ilo; i++ { + wr[i] = h[i*ldh+i] + wi[i] = 0 + } + for i := ihi + 1; i < n; i++ { + wr[i] = h[i*ldh+i] + wi[i] = 0 + } + + // Initialize Z to identity matrix if requested. + if compz == lapack.SchurHess { + impl.Dlaset(blas.All, n, n, 0, 1, z, ldz) + } + + // Quick return if possible. + if ilo == ihi { + wr[ilo] = h[ilo*ldh+ilo] + wi[ilo] = 0 + return 0 + } + + // Dlahqr/Dlaqr04 crossover point. + nmin := impl.Ilaenv(12, "DHSEQR", string(job)+string(compz), n, ilo, ihi, lwork) + nmin = max(ntiny, nmin) + + if n > nmin { + // Dlaqr0 for big matrices. + unconverged = impl.Dlaqr04(wantt, wantz, n, ilo, ihi, h, ldh, wr[:ihi+1], wi[:ihi+1], + ilo, ihi, z, ldz, work, lwork, 1) + } else { + // Dlahqr for small matrices. + unconverged = impl.Dlahqr(wantt, wantz, n, ilo, ihi, h, ldh, wr[:ihi+1], wi[:ihi+1], + ilo, ihi, z, ldz) + if unconverged > 0 { + // A rare Dlahqr failure! Dlaqr04 sometimes succeeds + // when Dlahqr fails. + kbot := unconverged + if n >= nl { + // Larger matrices have enough subdiagonal + // scratch space to call Dlaqr04 directly. + unconverged = impl.Dlaqr04(wantt, wantz, n, ilo, kbot, h, ldh, + wr[:ihi+1], wi[:ihi+1], ilo, ihi, z, ldz, work, lwork, 1) + } else { + // Tiny matrices don't have enough subdiagonal + // scratch space to benefit from Dlaqr04. Hence, + // tiny matrices must be copied into a larger + // array before calling Dlaqr04. + var hl [nl * nl]float64 + impl.Dlacpy(blas.All, n, n, h, ldh, hl[:], nl) + impl.Dlaset(blas.All, nl, nl-n, 0, 0, hl[n:], nl) + var workl [nl]float64 + unconverged = impl.Dlaqr04(wantt, wantz, nl, ilo, kbot, hl[:], nl, + wr[:ihi+1], wi[:ihi+1], ilo, ihi, z, ldz, workl[:], nl, 1) + work[0] = workl[0] + if wantt || unconverged > 0 { + impl.Dlacpy(blas.All, n, n, hl[:], nl, h, ldh) + } + } + } + } + // Zero out under the first subdiagonal, if necessary. + if (wantt || unconverged > 0) && n > 2 { + impl.Dlaset(blas.Lower, n-2, n-2, 0, 0, h[2*ldh:], ldh) + } + + work[0] = math.Max(float64(n), work[0]) + return unconverged +} diff --git a/vendor/gonum.org/v1/gonum/lapack/gonum/dlabrd.go b/vendor/gonum.org/v1/gonum/lapack/gonum/dlabrd.go new file mode 100644 index 0000000..babc0b7 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/lapack/gonum/dlabrd.go @@ -0,0 +1,173 @@ +// Copyright ©2015 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package gonum + +import ( + "gonum.org/v1/gonum/blas" + "gonum.org/v1/gonum/blas/blas64" +) + +// Dlabrd reduces the first NB rows and columns of a real general m×n matrix +// A to upper or lower bidiagonal form by an orthogonal transformation +// Q**T * A * P +// If m >= n, A is reduced to upper bidiagonal form and upon exit the elements +// on and below the diagonal in the first nb columns represent the elementary +// reflectors, and the elements above the diagonal in the first nb rows represent +// the matrix P. If m < n, A is reduced to lower bidiagonal form and the elements +// P is instead stored above the diagonal. +// +// The reduction to bidiagonal form is stored in d and e, where d are the diagonal +// elements, and e are the off-diagonal elements. +// +// The matrices Q and P are products of elementary reflectors +// Q = H_0 * H_1 * ... * H_{nb-1} +// P = G_0 * G_1 * ... * G_{nb-1} +// where +// H_i = I - tauQ[i] * v_i * v_i^T +// G_i = I - tauP[i] * u_i * u_i^T +// +// As an example, on exit the entries of A when m = 6, n = 5, and nb = 2 +// [ 1 1 u1 u1 u1] +// [v1 1 1 u2 u2] +// [v1 v2 a a a] +// [v1 v2 a a a] +// [v1 v2 a a a] +// [v1 v2 a a a] +// and when m = 5, n = 6, and nb = 2 +// [ 1 u1 u1 u1 u1 u1] +// [ 1 1 u2 u2 u2 u2] +// [v1 1 a a a a] +// [v1 v2 a a a a] +// [v1 v2 a a a a] +// +// Dlabrd also returns the matrices X and Y which are used with U and V to +// apply the transformation to the unreduced part of the matrix +// A := A - V*Y^T - X*U^T +// and returns the matrices X and Y which are needed to apply the +// transformation to the unreduced part of A. +// +// X is an m×nb matrix, Y is an n×nb matrix. d, e, taup, and tauq must all have +// length at least nb. Dlabrd will panic if these size constraints are violated. +// +// Dlabrd is an internal routine. It is exported for testing purposes. +func (impl Implementation) Dlabrd(m, n, nb int, a []float64, lda int, d, e, tauQ, tauP, x []float64, ldx int, y []float64, ldy int) { + switch { + case m < 0: + panic(mLT0) + case n < 0: + panic(nLT0) + case nb < 0: + panic(nbLT0) + case nb > n: + panic(nbGTN) + case nb > m: + panic(nbGTM) + case lda < max(1, n): + panic(badLdA) + case ldx < max(1, nb): + panic(badLdX) + case ldy < max(1, nb): + panic(badLdY) + } + + if m == 0 || n == 0 || nb == 0 { + return + } + + switch { + case len(a) < (m-1)*lda+n: + panic(shortA) + case len(d) < nb: + panic(shortD) + case len(e) < nb: + panic(shortE) + case len(tauQ) < nb: + panic(shortTauQ) + case len(tauP) < nb: + panic(shortTauP) + case len(x) < (m-1)*ldx+nb: + panic(shortX) + case len(y) < (n-1)*ldy+nb: + panic(shortY) + } + + bi := blas64.Implementation() + + if m >= n { + // Reduce to upper bidiagonal form. + for i := 0; i < nb; i++ { + bi.Dgemv(blas.NoTrans, m-i, i, -1, a[i*lda:], lda, y[i*ldy:], 1, 1, a[i*lda+i:], lda) + bi.Dgemv(blas.NoTrans, m-i, i, -1, x[i*ldx:], ldx, a[i:], lda, 1, a[i*lda+i:], lda) + + a[i*lda+i], tauQ[i] = impl.Dlarfg(m-i, a[i*lda+i], a[min(i+1, m-1)*lda+i:], lda) + d[i] = a[i*lda+i] + if i < n-1 { + // Compute Y[i+1:n, i]. + a[i*lda+i] = 1 + bi.Dgemv(blas.Trans, m-i, n-i-1, 1, a[i*lda+i+1:], lda, a[i*lda+i:], lda, 0, y[(i+1)*ldy+i:], ldy) + bi.Dgemv(blas.Trans, m-i, i, 1, a[i*lda:], lda, a[i*lda+i:], lda, 0, y[i:], ldy) + bi.Dgemv(blas.NoTrans, n-i-1, i, -1, y[(i+1)*ldy:], ldy, y[i:], ldy, 1, y[(i+1)*ldy+i:], ldy) + bi.Dgemv(blas.Trans, m-i, i, 1, x[i*ldx:], ldx, a[i*lda+i:], lda, 0, y[i:], ldy) + bi.Dgemv(blas.Trans, i, n-i-1, -1, a[i+1:], lda, y[i:], ldy, 1, y[(i+1)*ldy+i:], ldy) + bi.Dscal(n-i-1, tauQ[i], y[(i+1)*ldy+i:], ldy) + + // Update A[i, i+1:n]. + bi.Dgemv(blas.NoTrans, n-i-1, i+1, -1, y[(i+1)*ldy:], ldy, a[i*lda:], 1, 1, a[i*lda+i+1:], 1) + bi.Dgemv(blas.Trans, i, n-i-1, -1, a[i+1:], lda, x[i*ldx:], 1, 1, a[i*lda+i+1:], 1) + + // Generate reflection P[i] to annihilate A[i, i+2:n]. + a[i*lda+i+1], tauP[i] = impl.Dlarfg(n-i-1, a[i*lda+i+1], a[i*lda+min(i+2, n-1):], 1) + e[i] = a[i*lda+i+1] + a[i*lda+i+1] = 1 + + // Compute X[i+1:m, i]. + bi.Dgemv(blas.NoTrans, m-i-1, n-i-1, 1, a[(i+1)*lda+i+1:], lda, a[i*lda+i+1:], 1, 0, x[(i+1)*ldx+i:], ldx) + bi.Dgemv(blas.Trans, n-i-1, i+1, 1, y[(i+1)*ldy:], ldy, a[i*lda+i+1:], 1, 0, x[i:], ldx) + bi.Dgemv(blas.NoTrans, m-i-1, i+1, -1, a[(i+1)*lda:], lda, x[i:], ldx, 1, x[(i+1)*ldx+i:], ldx) + bi.Dgemv(blas.NoTrans, i, n-i-1, 1, a[i+1:], lda, a[i*lda+i+1:], 1, 0, x[i:], ldx) + bi.Dgemv(blas.NoTrans, m-i-1, i, -1, x[(i+1)*ldx:], ldx, x[i:], ldx, 1, x[(i+1)*ldx+i:], ldx) + bi.Dscal(m-i-1, tauP[i], x[(i+1)*ldx+i:], ldx) + } + } + return + } + // Reduce to lower bidiagonal form. + for i := 0; i < nb; i++ { + // Update A[i,i:n] + bi.Dgemv(blas.NoTrans, n-i, i, -1, y[i*ldy:], ldy, a[i*lda:], 1, 1, a[i*lda+i:], 1) + bi.Dgemv(blas.Trans, i, n-i, -1, a[i:], lda, x[i*ldx:], 1, 1, a[i*lda+i:], 1) + + // Generate reflection P[i] to annihilate A[i, i+1:n] + a[i*lda+i], tauP[i] = impl.Dlarfg(n-i, a[i*lda+i], a[i*lda+min(i+1, n-1):], 1) + d[i] = a[i*lda+i] + if i < m-1 { + a[i*lda+i] = 1 + // Compute X[i+1:m, i]. + bi.Dgemv(blas.NoTrans, m-i-1, n-i, 1, a[(i+1)*lda+i:], lda, a[i*lda+i:], 1, 0, x[(i+1)*ldx+i:], ldx) + bi.Dgemv(blas.Trans, n-i, i, 1, y[i*ldy:], ldy, a[i*lda+i:], 1, 0, x[i:], ldx) + bi.Dgemv(blas.NoTrans, m-i-1, i, -1, a[(i+1)*lda:], lda, x[i:], ldx, 1, x[(i+1)*ldx+i:], ldx) + bi.Dgemv(blas.NoTrans, i, n-i, 1, a[i:], lda, a[i*lda+i:], 1, 0, x[i:], ldx) + bi.Dgemv(blas.NoTrans, m-i-1, i, -1, x[(i+1)*ldx:], ldx, x[i:], ldx, 1, x[(i+1)*ldx+i:], ldx) + bi.Dscal(m-i-1, tauP[i], x[(i+1)*ldx+i:], ldx) + + // Update A[i+1:m, i]. + bi.Dgemv(blas.NoTrans, m-i-1, i, -1, a[(i+1)*lda:], lda, y[i*ldy:], 1, 1, a[(i+1)*lda+i:], lda) + bi.Dgemv(blas.NoTrans, m-i-1, i+1, -1, x[(i+1)*ldx:], ldx, a[i:], lda, 1, a[(i+1)*lda+i:], lda) + + // Generate reflection Q[i] to annihilate A[i+2:m, i]. + a[(i+1)*lda+i], tauQ[i] = impl.Dlarfg(m-i-1, a[(i+1)*lda+i], a[min(i+2, m-1)*lda+i:], lda) + e[i] = a[(i+1)*lda+i] + a[(i+1)*lda+i] = 1 + + // Compute Y[i+1:n, i]. + bi.Dgemv(blas.Trans, m-i-1, n-i-1, 1, a[(i+1)*lda+i+1:], lda, a[(i+1)*lda+i:], lda, 0, y[(i+1)*ldy+i:], ldy) + bi.Dgemv(blas.Trans, m-i-1, i, 1, a[(i+1)*lda:], lda, a[(i+1)*lda+i:], lda, 0, y[i:], ldy) + bi.Dgemv(blas.NoTrans, n-i-1, i, -1, y[(i+1)*ldy:], ldy, y[i:], ldy, 1, y[(i+1)*ldy+i:], ldy) + bi.Dgemv(blas.Trans, m-i-1, i+1, 1, x[(i+1)*ldx:], ldx, a[(i+1)*lda+i:], lda, 0, y[i:], ldy) + bi.Dgemv(blas.Trans, i+1, n-i-1, -1, a[i+1:], lda, y[i:], ldy, 1, y[(i+1)*ldy+i:], ldy) + bi.Dscal(n-i-1, tauQ[i], y[(i+1)*ldy+i:], ldy) + } + } +} diff --git a/vendor/gonum.org/v1/gonum/lapack/gonum/dlacn2.go b/vendor/gonum.org/v1/gonum/lapack/gonum/dlacn2.go new file mode 100644 index 0000000..e8ac1e4 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/lapack/gonum/dlacn2.go @@ -0,0 +1,134 @@ +// Copyright ©2015 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package gonum + +import ( + "math" + + "gonum.org/v1/gonum/blas/blas64" +) + +// Dlacn2 estimates the 1-norm of an n×n matrix A using sequential updates with +// matrix-vector products provided externally. +// +// Dlacn2 is called sequentially and it returns the value of est and kase to be +// used on the next call. +// On the initial call, kase must be 0. +// In between calls, x must be overwritten by +// A * X if kase was returned as 1, +// A^T * X if kase was returned as 2, +// and all other parameters must not be changed. +// On the final return, kase is returned as 0, v contains A*W where W is a +// vector, and est = norm(V)/norm(W) is a lower bound for 1-norm of A. +// +// v, x, and isgn must all have length n and n must be at least 1, otherwise +// Dlacn2 will panic. isave is used for temporary storage. +// +// Dlacn2 is an internal routine. It is exported for testing purposes. +func (impl Implementation) Dlacn2(n int, v, x []float64, isgn []int, est float64, kase int, isave *[3]int) (float64, int) { + switch { + case n < 1: + panic(nLT1) + case len(v) < n: + panic(shortV) + case len(x) < n: + panic(shortX) + case len(isgn) < n: + panic(shortIsgn) + case isave[0] < 0 || 5 < isave[0]: + panic(badIsave) + case isave[0] == 0 && kase != 0: + panic(badIsave) + } + + const itmax = 5 + bi := blas64.Implementation() + + if kase == 0 { + for i := 0; i < n; i++ { + x[i] = 1 / float64(n) + } + kase = 1 + isave[0] = 1 + return est, kase + } + switch isave[0] { + case 1: + if n == 1 { + v[0] = x[0] + est = math.Abs(v[0]) + kase = 0 + return est, kase + } + est = bi.Dasum(n, x, 1) + for i := 0; i < n; i++ { + x[i] = math.Copysign(1, x[i]) + isgn[i] = int(x[i]) + } + kase = 2 + isave[0] = 2 + return est, kase + case 2: + isave[1] = bi.Idamax(n, x, 1) + isave[2] = 2 + for i := 0; i < n; i++ { + x[i] = 0 + } + x[isave[1]] = 1 + kase = 1 + isave[0] = 3 + return est, kase + case 3: + bi.Dcopy(n, x, 1, v, 1) + estold := est + est = bi.Dasum(n, v, 1) + sameSigns := true + for i := 0; i < n; i++ { + if int(math.Copysign(1, x[i])) != isgn[i] { + sameSigns = false + break + } + } + if !sameSigns && est > estold { + for i := 0; i < n; i++ { + x[i] = math.Copysign(1, x[i]) + isgn[i] = int(x[i]) + } + kase = 2 + isave[0] = 4 + return est, kase + } + case 4: + jlast := isave[1] + isave[1] = bi.Idamax(n, x, 1) + if x[jlast] != math.Abs(x[isave[1]]) && isave[2] < itmax { + isave[2] += 1 + for i := 0; i < n; i++ { + x[i] = 0 + } + x[isave[1]] = 1 + kase = 1 + isave[0] = 3 + return est, kase + } + case 5: + tmp := 2 * (bi.Dasum(n, x, 1)) / float64(3*n) + if tmp > est { + bi.Dcopy(n, x, 1, v, 1) + est = tmp + } + kase = 0 + return est, kase + } + // Iteration complete. Final stage + altsgn := 1.0 + for i := 0; i < n; i++ { + x[i] = altsgn * (1 + float64(i)/float64(n-1)) + altsgn *= -1 + } + kase = 1 + isave[0] = 5 + return est, kase +} diff --git a/vendor/gonum.org/v1/gonum/lapack/gonum/dlacpy.go b/vendor/gonum.org/v1/gonum/lapack/gonum/dlacpy.go new file mode 100644 index 0000000..a37f3b0 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/lapack/gonum/dlacpy.go @@ -0,0 +1,59 @@ +// Copyright ©2015 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package gonum + +import "gonum.org/v1/gonum/blas" + +// Dlacpy copies the elements of A specified by uplo into B. Uplo can specify +// a triangular portion with blas.Upper or blas.Lower, or can specify all of the +// elemest with blas.All. +// +// Dlacpy is an internal routine. It is exported for testing purposes. +func (impl Implementation) Dlacpy(uplo blas.Uplo, m, n int, a []float64, lda int, b []float64, ldb int) { + switch { + case uplo != blas.Upper && uplo != blas.Lower && uplo != blas.All: + panic(badUplo) + case m < 0: + panic(mLT0) + case n < 0: + panic(nLT0) + case lda < max(1, n): + panic(badLdA) + case ldb < max(1, n): + panic(badLdB) + } + + if m == 0 || n == 0 { + return + } + + switch { + case len(a) < (m-1)*lda+n: + panic(shortA) + case len(b) < (m-1)*ldb+n: + panic(shortB) + } + + switch uplo { + case blas.Upper: + for i := 0; i < m; i++ { + for j := i; j < n; j++ { + b[i*ldb+j] = a[i*lda+j] + } + } + case blas.Lower: + for i := 0; i < m; i++ { + for j := 0; j < min(i+1, n); j++ { + b[i*ldb+j] = a[i*lda+j] + } + } + case blas.All: + for i := 0; i < m; i++ { + for j := 0; j < n; j++ { + b[i*ldb+j] = a[i*lda+j] + } + } + } +} diff --git a/vendor/gonum.org/v1/gonum/lapack/gonum/dlae2.go b/vendor/gonum.org/v1/gonum/lapack/gonum/dlae2.go new file mode 100644 index 0000000..c071fec --- /dev/null +++ b/vendor/gonum.org/v1/gonum/lapack/gonum/dlae2.go @@ -0,0 +1,49 @@ +// Copyright ©2016 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package gonum + +import "math" + +// Dlae2 computes the eigenvalues of a 2×2 symmetric matrix +// [a b] +// [b c] +// and returns the eigenvalue with the larger absolute value as rt1 and the +// smaller as rt2. +// +// Dlae2 is an internal routine. It is exported for testing purposes. +func (impl Implementation) Dlae2(a, b, c float64) (rt1, rt2 float64) { + sm := a + c + df := a - c + adf := math.Abs(df) + tb := b + b + ab := math.Abs(tb) + acmx := c + acmn := a + if math.Abs(a) > math.Abs(c) { + acmx = a + acmn = c + } + var rt float64 + if adf > ab { + rt = adf * math.Sqrt(1+(ab/adf)*(ab/adf)) + } else if adf < ab { + rt = ab * math.Sqrt(1+(adf/ab)*(adf/ab)) + } else { + rt = ab * math.Sqrt2 + } + if sm < 0 { + rt1 = 0.5 * (sm - rt) + rt2 = (acmx/rt1)*acmn - (b/rt1)*b + return rt1, rt2 + } + if sm > 0 { + rt1 = 0.5 * (sm + rt) + rt2 = (acmx/rt1)*acmn - (b/rt1)*b + return rt1, rt2 + } + rt1 = 0.5 * rt + rt2 = -0.5 * rt + return rt1, rt2 +} diff --git a/vendor/gonum.org/v1/gonum/lapack/gonum/dlaev2.go b/vendor/gonum.org/v1/gonum/lapack/gonum/dlaev2.go new file mode 100644 index 0000000..74d75b9 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/lapack/gonum/dlaev2.go @@ -0,0 +1,82 @@ +// Copyright ©2015 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package gonum + +import "math" + +// Dlaev2 computes the Eigen decomposition of a symmetric 2×2 matrix. +// The matrix is given by +// [a b] +// [b c] +// Dlaev2 returns rt1 and rt2, the eigenvalues of the matrix where |RT1| > |RT2|, +// and [cs1, sn1] which is the unit right eigenvalue for RT1. +// [ cs1 sn1] [a b] [cs1 -sn1] = [rt1 0] +// [-sn1 cs1] [b c] [sn1 cs1] [ 0 rt2] +// +// Dlaev2 is an internal routine. It is exported for testing purposes. +func (impl Implementation) Dlaev2(a, b, c float64) (rt1, rt2, cs1, sn1 float64) { + sm := a + c + df := a - c + adf := math.Abs(df) + tb := b + b + ab := math.Abs(tb) + acmx := c + acmn := a + if math.Abs(a) > math.Abs(c) { + acmx = a + acmn = c + } + var rt float64 + if adf > ab { + rt = adf * math.Sqrt(1+(ab/adf)*(ab/adf)) + } else if adf < ab { + rt = ab * math.Sqrt(1+(adf/ab)*(adf/ab)) + } else { + rt = ab * math.Sqrt(2) + } + var sgn1 float64 + if sm < 0 { + rt1 = 0.5 * (sm - rt) + sgn1 = -1 + rt2 = (acmx/rt1)*acmn - (b/rt1)*b + } else if sm > 0 { + rt1 = 0.5 * (sm + rt) + sgn1 = 1 + rt2 = (acmx/rt1)*acmn - (b/rt1)*b + } else { + rt1 = 0.5 * rt + rt2 = -0.5 * rt + sgn1 = 1 + } + var cs, sgn2 float64 + if df >= 0 { + cs = df + rt + sgn2 = 1 + } else { + cs = df - rt + sgn2 = -1 + } + acs := math.Abs(cs) + if acs > ab { + ct := -tb / cs + sn1 = 1 / math.Sqrt(1+ct*ct) + cs1 = ct * sn1 + } else { + if ab == 0 { + cs1 = 1 + sn1 = 0 + } else { + tn := -cs / tb + cs1 = 1 / math.Sqrt(1+tn*tn) + sn1 = tn * cs1 + } + } + if sgn1 == sgn2 { + tn := cs1 + cs1 = -sn1 + sn1 = tn + } + return rt1, rt2, cs1, sn1 +} diff --git a/vendor/gonum.org/v1/gonum/lapack/gonum/dlaexc.go b/vendor/gonum.org/v1/gonum/lapack/gonum/dlaexc.go new file mode 100644 index 0000000..2b79bd8 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/lapack/gonum/dlaexc.go @@ -0,0 +1,269 @@ +// Copyright ©2016 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package gonum + +import ( + "math" + + "gonum.org/v1/gonum/blas" + "gonum.org/v1/gonum/blas/blas64" + "gonum.org/v1/gonum/lapack" +) + +// Dlaexc swaps two adjacent diagonal blocks of order 1 or 2 in an n×n upper +// quasi-triangular matrix T by an orthogonal similarity transformation. +// +// T must be in Schur canonical form, that is, block upper triangular with 1×1 +// and 2×2 diagonal blocks; each 2×2 diagonal block has its diagonal elements +// equal and its off-diagonal elements of opposite sign. On return, T will +// contain the updated matrix again in Schur canonical form. +// +// If wantq is true, the transformation is accumulated in the n×n matrix Q, +// otherwise Q is not referenced. +// +// j1 is the index of the first row of the first block. n1 and n2 are the order +// of the first and second block, respectively. +// +// work must have length at least n, otherwise Dlaexc will panic. +// +// If ok is false, the transformed matrix T would be too far from Schur form. +// The blocks are not swapped, and T and Q are not modified. +// +// If n1 and n2 are both equal to 1, Dlaexc will always return true. +// +// Dlaexc is an internal routine. It is exported for testing purposes. +func (impl Implementation) Dlaexc(wantq bool, n int, t []float64, ldt int, q []float64, ldq int, j1, n1, n2 int, work []float64) (ok bool) { + switch { + case n < 0: + panic(nLT0) + case ldt < max(1, n): + panic(badLdT) + case wantq && ldt < max(1, n): + panic(badLdQ) + case j1 < 0 || n <= j1: + panic(badJ1) + case len(work) < n: + panic(shortWork) + case n1 < 0 || 2 < n1: + panic(badN1) + case n2 < 0 || 2 < n2: + panic(badN2) + } + + if n == 0 || n1 == 0 || n2 == 0 { + return true + } + + switch { + case len(t) < (n-1)*ldt+n: + panic(shortT) + case wantq && len(q) < (n-1)*ldq+n: + panic(shortQ) + } + + if j1+n1 >= n { + // TODO(vladimir-ch): Reference LAPACK does this check whether + // the start of the second block is in the matrix T. It returns + // true if it is not and moreover it does not check whether the + // whole second block fits into T. This does not feel + // satisfactory. The only caller of Dlaexc is Dtrexc, so if the + // caller makes sure that this does not happen, we could be + // stricter here. + return true + } + + j2 := j1 + 1 + j3 := j1 + 2 + + bi := blas64.Implementation() + + if n1 == 1 && n2 == 1 { + // Swap two 1×1 blocks. + t11 := t[j1*ldt+j1] + t22 := t[j2*ldt+j2] + + // Determine the transformation to perform the interchange. + cs, sn, _ := impl.Dlartg(t[j1*ldt+j2], t22-t11) + + // Apply transformation to the matrix T. + if n-j3 > 0 { + bi.Drot(n-j3, t[j1*ldt+j3:], 1, t[j2*ldt+j3:], 1, cs, sn) + } + if j1 > 0 { + bi.Drot(j1, t[j1:], ldt, t[j2:], ldt, cs, sn) + } + + t[j1*ldt+j1] = t22 + t[j2*ldt+j2] = t11 + + if wantq { + // Accumulate transformation in the matrix Q. + bi.Drot(n, q[j1:], ldq, q[j2:], ldq, cs, sn) + } + + return true + } + + // Swapping involves at least one 2×2 block. + // + // Copy the diagonal block of order n1+n2 to the local array d and + // compute its norm. + nd := n1 + n2 + var d [16]float64 + const ldd = 4 + impl.Dlacpy(blas.All, nd, nd, t[j1*ldt+j1:], ldt, d[:], ldd) + dnorm := impl.Dlange(lapack.MaxAbs, nd, nd, d[:], ldd, work) + + // Compute machine-dependent threshold for test for accepting swap. + eps := dlamchP + thresh := math.Max(10*eps*dnorm, dlamchS/eps) + + // Solve T11*X - X*T22 = scale*T12 for X. + var x [4]float64 + const ldx = 2 + scale, _, _ := impl.Dlasy2(false, false, -1, n1, n2, d[:], ldd, d[n1*ldd+n1:], ldd, d[n1:], ldd, x[:], ldx) + + // Swap the adjacent diagonal blocks. + switch { + case n1 == 1 && n2 == 2: + // Generate elementary reflector H so that + // ( scale, X11, X12 ) H = ( 0, 0, * ) + u := [3]float64{scale, x[0], 1} + _, tau := impl.Dlarfg(3, x[1], u[:2], 1) + t11 := t[j1*ldt+j1] + + // Perform swap provisionally on diagonal block in d. + impl.Dlarfx(blas.Left, 3, 3, u[:], tau, d[:], ldd, work) + impl.Dlarfx(blas.Right, 3, 3, u[:], tau, d[:], ldd, work) + + // Test whether to reject swap. + if math.Max(math.Abs(d[2*ldd]), math.Max(math.Abs(d[2*ldd+1]), math.Abs(d[2*ldd+2]-t11))) > thresh { + return false + } + + // Accept swap: apply transformation to the entire matrix T. + impl.Dlarfx(blas.Left, 3, n-j1, u[:], tau, t[j1*ldt+j1:], ldt, work) + impl.Dlarfx(blas.Right, j2+1, 3, u[:], tau, t[j1:], ldt, work) + + t[j3*ldt+j1] = 0 + t[j3*ldt+j2] = 0 + t[j3*ldt+j3] = t11 + + if wantq { + // Accumulate transformation in the matrix Q. + impl.Dlarfx(blas.Right, n, 3, u[:], tau, q[j1:], ldq, work) + } + + case n1 == 2 && n2 == 1: + // Generate elementary reflector H so that: + // H ( -X11 ) = ( * ) + // ( -X21 ) = ( 0 ) + // ( scale ) = ( 0 ) + u := [3]float64{1, -x[ldx], scale} + _, tau := impl.Dlarfg(3, -x[0], u[1:], 1) + t33 := t[j3*ldt+j3] + + // Perform swap provisionally on diagonal block in D. + impl.Dlarfx(blas.Left, 3, 3, u[:], tau, d[:], ldd, work) + impl.Dlarfx(blas.Right, 3, 3, u[:], tau, d[:], ldd, work) + + // Test whether to reject swap. + if math.Max(math.Abs(d[ldd]), math.Max(math.Abs(d[2*ldd]), math.Abs(d[0]-t33))) > thresh { + return false + } + + // Accept swap: apply transformation to the entire matrix T. + impl.Dlarfx(blas.Right, j3+1, 3, u[:], tau, t[j1:], ldt, work) + impl.Dlarfx(blas.Left, 3, n-j1-1, u[:], tau, t[j1*ldt+j2:], ldt, work) + + t[j1*ldt+j1] = t33 + t[j2*ldt+j1] = 0 + t[j3*ldt+j1] = 0 + + if wantq { + // Accumulate transformation in the matrix Q. + impl.Dlarfx(blas.Right, n, 3, u[:], tau, q[j1:], ldq, work) + } + + default: // n1 == 2 && n2 == 2 + // Generate elementary reflectors H_1 and H_2 so that: + // H_2 H_1 ( -X11 -X12 ) = ( * * ) + // ( -X21 -X22 ) ( 0 * ) + // ( scale 0 ) ( 0 0 ) + // ( 0 scale ) ( 0 0 ) + u1 := [3]float64{1, -x[ldx], scale} + _, tau1 := impl.Dlarfg(3, -x[0], u1[1:], 1) + + temp := -tau1 * (x[1] + u1[1]*x[ldx+1]) + u2 := [3]float64{1, -temp * u1[2], scale} + _, tau2 := impl.Dlarfg(3, -temp*u1[1]-x[ldx+1], u2[1:], 1) + + // Perform swap provisionally on diagonal block in D. + impl.Dlarfx(blas.Left, 3, 4, u1[:], tau1, d[:], ldd, work) + impl.Dlarfx(blas.Right, 4, 3, u1[:], tau1, d[:], ldd, work) + impl.Dlarfx(blas.Left, 3, 4, u2[:], tau2, d[ldd:], ldd, work) + impl.Dlarfx(blas.Right, 4, 3, u2[:], tau2, d[1:], ldd, work) + + // Test whether to reject swap. + m1 := math.Max(math.Abs(d[2*ldd]), math.Abs(d[2*ldd+1])) + m2 := math.Max(math.Abs(d[3*ldd]), math.Abs(d[3*ldd+1])) + if math.Max(m1, m2) > thresh { + return false + } + + // Accept swap: apply transformation to the entire matrix T. + j4 := j1 + 3 + impl.Dlarfx(blas.Left, 3, n-j1, u1[:], tau1, t[j1*ldt+j1:], ldt, work) + impl.Dlarfx(blas.Right, j4+1, 3, u1[:], tau1, t[j1:], ldt, work) + impl.Dlarfx(blas.Left, 3, n-j1, u2[:], tau2, t[j2*ldt+j1:], ldt, work) + impl.Dlarfx(blas.Right, j4+1, 3, u2[:], tau2, t[j2:], ldt, work) + + t[j3*ldt+j1] = 0 + t[j3*ldt+j2] = 0 + t[j4*ldt+j1] = 0 + t[j4*ldt+j2] = 0 + + if wantq { + // Accumulate transformation in the matrix Q. + impl.Dlarfx(blas.Right, n, 3, u1[:], tau1, q[j1:], ldq, work) + impl.Dlarfx(blas.Right, n, 3, u2[:], tau2, q[j2:], ldq, work) + } + } + + if n2 == 2 { + // Standardize new 2×2 block T11. + a, b := t[j1*ldt+j1], t[j1*ldt+j2] + c, d := t[j2*ldt+j1], t[j2*ldt+j2] + var cs, sn float64 + t[j1*ldt+j1], t[j1*ldt+j2], t[j2*ldt+j1], t[j2*ldt+j2], _, _, _, _, cs, sn = impl.Dlanv2(a, b, c, d) + if n-j1-2 > 0 { + bi.Drot(n-j1-2, t[j1*ldt+j1+2:], 1, t[j2*ldt+j1+2:], 1, cs, sn) + } + if j1 > 0 { + bi.Drot(j1, t[j1:], ldt, t[j2:], ldt, cs, sn) + } + if wantq { + bi.Drot(n, q[j1:], ldq, q[j2:], ldq, cs, sn) + } + } + if n1 == 2 { + // Standardize new 2×2 block T22. + j3 := j1 + n2 + j4 := j3 + 1 + a, b := t[j3*ldt+j3], t[j3*ldt+j4] + c, d := t[j4*ldt+j3], t[j4*ldt+j4] + var cs, sn float64 + t[j3*ldt+j3], t[j3*ldt+j4], t[j4*ldt+j3], t[j4*ldt+j4], _, _, _, _, cs, sn = impl.Dlanv2(a, b, c, d) + if n-j3-2 > 0 { + bi.Drot(n-j3-2, t[j3*ldt+j3+2:], 1, t[j4*ldt+j3+2:], 1, cs, sn) + } + bi.Drot(j3, t[j3:], ldt, t[j4:], ldt, cs, sn) + if wantq { + bi.Drot(n, q[j3:], ldq, q[j4:], ldq, cs, sn) + } + } + + return true +} diff --git a/vendor/gonum.org/v1/gonum/lapack/gonum/dlags2.go b/vendor/gonum.org/v1/gonum/lapack/gonum/dlags2.go new file mode 100644 index 0000000..6954deb --- /dev/null +++ b/vendor/gonum.org/v1/gonum/lapack/gonum/dlags2.go @@ -0,0 +1,182 @@ +// Copyright ©2017 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package gonum + +import "math" + +// Dlags2 computes 2-by-2 orthogonal matrices U, V and Q with the +// triangles of A and B specified by upper. +// +// If upper is true +// +// U^T*A*Q = U^T*[ a1 a2 ]*Q = [ x 0 ] +// [ 0 a3 ] [ x x ] +// and +// V^T*B*Q = V^T*[ b1 b2 ]*Q = [ x 0 ] +// [ 0 b3 ] [ x x ] +// +// otherwise +// +// U^T*A*Q = U^T*[ a1 0 ]*Q = [ x x ] +// [ a2 a3 ] [ 0 x ] +// and +// V^T*B*Q = V^T*[ b1 0 ]*Q = [ x x ] +// [ b2 b3 ] [ 0 x ]. +// +// The rows of the transformed A and B are parallel, where +// +// U = [ csu snu ], V = [ csv snv ], Q = [ csq snq ] +// [ -snu csu ] [ -snv csv ] [ -snq csq ] +// +// Dlags2 is an internal routine. It is exported for testing purposes. +func (impl Implementation) Dlags2(upper bool, a1, a2, a3, b1, b2, b3 float64) (csu, snu, csv, snv, csq, snq float64) { + if upper { + // Input matrices A and B are upper triangular matrices. + // + // Form matrix C = A*adj(B) = [ a b ] + // [ 0 d ] + a := a1 * b3 + d := a3 * b1 + b := a2*b1 - a1*b2 + + // The SVD of real 2-by-2 triangular C. + // + // [ csl -snl ]*[ a b ]*[ csr snr ] = [ r 0 ] + // [ snl csl ] [ 0 d ] [ -snr csr ] [ 0 t ] + _, _, snr, csr, snl, csl := impl.Dlasv2(a, b, d) + + if math.Abs(csl) >= math.Abs(snl) || math.Abs(csr) >= math.Abs(snr) { + // Compute the [0, 0] and [0, 1] elements of U^T*A and V^T*B, + // and [0, 1] element of |U|^T*|A| and |V|^T*|B|. + + ua11r := csl * a1 + ua12 := csl*a2 + snl*a3 + + vb11r := csr * b1 + vb12 := csr*b2 + snr*b3 + + aua12 := math.Abs(csl)*math.Abs(a2) + math.Abs(snl)*math.Abs(a3) + avb12 := math.Abs(csr)*math.Abs(b2) + math.Abs(snr)*math.Abs(b3) + + // Zero [0, 1] elements of U^T*A and V^T*B. + if math.Abs(ua11r)+math.Abs(ua12) != 0 { + if aua12/(math.Abs(ua11r)+math.Abs(ua12)) <= avb12/(math.Abs(vb11r)+math.Abs(vb12)) { + csq, snq, _ = impl.Dlartg(-ua11r, ua12) + } else { + csq, snq, _ = impl.Dlartg(-vb11r, vb12) + } + } else { + csq, snq, _ = impl.Dlartg(-vb11r, vb12) + } + + csu = csl + snu = -snl + csv = csr + snv = -snr + } else { + // Compute the [1, 0] and [1, 1] elements of U^T*A and V^T*B, + // and [1, 1] element of |U|^T*|A| and |V|^T*|B|. + + ua21 := -snl * a1 + ua22 := -snl*a2 + csl*a3 + + vb21 := -snr * b1 + vb22 := -snr*b2 + csr*b3 + + aua22 := math.Abs(snl)*math.Abs(a2) + math.Abs(csl)*math.Abs(a3) + avb22 := math.Abs(snr)*math.Abs(b2) + math.Abs(csr)*math.Abs(b3) + + // Zero [1, 1] elements of U^T*A and V^T*B, and then swap. + if math.Abs(ua21)+math.Abs(ua22) != 0 { + if aua22/(math.Abs(ua21)+math.Abs(ua22)) <= avb22/(math.Abs(vb21)+math.Abs(vb22)) { + csq, snq, _ = impl.Dlartg(-ua21, ua22) + } else { + csq, snq, _ = impl.Dlartg(-vb21, vb22) + } + } else { + csq, snq, _ = impl.Dlartg(-vb21, vb22) + } + + csu = snl + snu = csl + csv = snr + snv = csr + } + } else { + // Input matrices A and B are lower triangular matrices + // + // Form matrix C = A*adj(B) = [ a 0 ] + // [ c d ] + a := a1 * b3 + d := a3 * b1 + c := a2*b3 - a3*b2 + + // The SVD of real 2-by-2 triangular C + // + // [ csl -snl ]*[ a 0 ]*[ csr snr ] = [ r 0 ] + // [ snl csl ] [ c d ] [ -snr csr ] [ 0 t ] + _, _, snr, csr, snl, csl := impl.Dlasv2(a, c, d) + + if math.Abs(csr) >= math.Abs(snr) || math.Abs(csl) >= math.Abs(snl) { + // Compute the [1, 0] and [1, 1] elements of U^T*A and V^T*B, + // and [1, 0] element of |U|^T*|A| and |V|^T*|B|. + + ua21 := -snr*a1 + csr*a2 + ua22r := csr * a3 + + vb21 := -snl*b1 + csl*b2 + vb22r := csl * b3 + + aua21 := math.Abs(snr)*math.Abs(a1) + math.Abs(csr)*math.Abs(a2) + avb21 := math.Abs(snl)*math.Abs(b1) + math.Abs(csl)*math.Abs(b2) + + // Zero [1, 0] elements of U^T*A and V^T*B. + if (math.Abs(ua21) + math.Abs(ua22r)) != 0 { + if aua21/(math.Abs(ua21)+math.Abs(ua22r)) <= avb21/(math.Abs(vb21)+math.Abs(vb22r)) { + csq, snq, _ = impl.Dlartg(ua22r, ua21) + } else { + csq, snq, _ = impl.Dlartg(vb22r, vb21) + } + } else { + csq, snq, _ = impl.Dlartg(vb22r, vb21) + } + + csu = csr + snu = -snr + csv = csl + snv = -snl + } else { + // Compute the [0, 0] and [0, 1] elements of U^T *A and V^T *B, + // and [0, 0] element of |U|^T*|A| and |V|^T*|B|. + + ua11 := csr*a1 + snr*a2 + ua12 := snr * a3 + + vb11 := csl*b1 + snl*b2 + vb12 := snl * b3 + + aua11 := math.Abs(csr)*math.Abs(a1) + math.Abs(snr)*math.Abs(a2) + avb11 := math.Abs(csl)*math.Abs(b1) + math.Abs(snl)*math.Abs(b2) + + // Zero [0, 0] elements of U^T*A and V^T*B, and then swap. + if (math.Abs(ua11) + math.Abs(ua12)) != 0 { + if aua11/(math.Abs(ua11)+math.Abs(ua12)) <= avb11/(math.Abs(vb11)+math.Abs(vb12)) { + csq, snq, _ = impl.Dlartg(ua12, ua11) + } else { + csq, snq, _ = impl.Dlartg(vb12, vb11) + } + } else { + csq, snq, _ = impl.Dlartg(vb12, vb11) + } + + csu = snr + snu = csr + csv = snl + snv = csl + } + } + + return csu, snu, csv, snv, csq, snq +} diff --git a/vendor/gonum.org/v1/gonum/lapack/gonum/dlahqr.go b/vendor/gonum.org/v1/gonum/lapack/gonum/dlahqr.go new file mode 100644 index 0000000..00a869b --- /dev/null +++ b/vendor/gonum.org/v1/gonum/lapack/gonum/dlahqr.go @@ -0,0 +1,431 @@ +// Copyright ©2016 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package gonum + +import ( + "math" + + "gonum.org/v1/gonum/blas/blas64" +) + +// Dlahqr computes the eigenvalues and Schur factorization of a block of an n×n +// upper Hessenberg matrix H, using the double-shift/single-shift QR algorithm. +// +// h and ldh represent the matrix H. Dlahqr works primarily with the Hessenberg +// submatrix H[ilo:ihi+1,ilo:ihi+1], but applies transformations to all of H if +// wantt is true. It is assumed that H[ihi+1:n,ihi+1:n] is already upper +// quasi-triangular, although this is not checked. +// +// It must hold that +// 0 <= ilo <= max(0,ihi), and ihi < n, +// and that +// H[ilo,ilo-1] == 0, if ilo > 0, +// otherwise Dlahqr will panic. +// +// If unconverged is zero on return, wr[ilo:ihi+1] and wi[ilo:ihi+1] will contain +// respectively the real and imaginary parts of the computed eigenvalues ilo +// to ihi. If two eigenvalues are computed as a complex conjugate pair, they are +// stored in consecutive elements of wr and wi, say the i-th and (i+1)th, with +// wi[i] > 0 and wi[i+1] < 0. If wantt is true, the eigenvalues are stored in +// the same order as on the diagonal of the Schur form returned in H, with +// wr[i] = H[i,i], and, if H[i:i+2,i:i+2] is a 2×2 diagonal block, +// wi[i] = sqrt(abs(H[i+1,i]*H[i,i+1])) and wi[i+1] = -wi[i]. +// +// wr and wi must have length ihi+1. +// +// z and ldz represent an n×n matrix Z. If wantz is true, the transformations +// will be applied to the submatrix Z[iloz:ihiz+1,ilo:ihi+1] and it must hold that +// 0 <= iloz <= ilo, and ihi <= ihiz < n. +// If wantz is false, z is not referenced. +// +// unconverged indicates whether Dlahqr computed all the eigenvalues ilo to ihi +// in a total of 30 iterations per eigenvalue. +// +// If unconverged is zero, all the eigenvalues ilo to ihi have been computed and +// will be stored on return in wr[ilo:ihi+1] and wi[ilo:ihi+1]. +// +// If unconverged is zero and wantt is true, H[ilo:ihi+1,ilo:ihi+1] will be +// overwritten on return by upper quasi-triangular full Schur form with any +// 2×2 diagonal blocks in standard form. +// +// If unconverged is zero and if wantt is false, the contents of h on return is +// unspecified. +// +// If unconverged is positive, some eigenvalues have not converged, and +// wr[unconverged:ihi+1] and wi[unconverged:ihi+1] contain those eigenvalues +// which have been successfully computed. +// +// If unconverged is positive and wantt is true, then on return +// (initial H)*U = U*(final H), (*) +// where U is an orthogonal matrix. The final H is upper Hessenberg and +// H[unconverged:ihi+1,unconverged:ihi+1] is upper quasi-triangular. +// +// If unconverged is positive and wantt is false, on return the remaining +// unconverged eigenvalues are the eigenvalues of the upper Hessenberg matrix +// H[ilo:unconverged,ilo:unconverged]. +// +// If unconverged is positive and wantz is true, then on return +// (final Z) = (initial Z)*U, +// where U is the orthogonal matrix in (*) regardless of the value of wantt. +// +// Dlahqr is an internal routine. It is exported for testing purposes. +func (impl Implementation) Dlahqr(wantt, wantz bool, n, ilo, ihi int, h []float64, ldh int, wr, wi []float64, iloz, ihiz int, z []float64, ldz int) (unconverged int) { + switch { + case n < 0: + panic(nLT0) + case ilo < 0, max(0, ihi) < ilo: + panic(badIlo) + case ihi >= n: + panic(badIhi) + case ldh < max(1, n): + panic(badLdH) + case wantz && (iloz < 0 || ilo < iloz): + panic(badIloz) + case wantz && (ihiz < ihi || n <= ihiz): + panic(badIhiz) + case ldz < 1, wantz && ldz < n: + panic(badLdZ) + } + + // Quick return if possible. + if n == 0 { + return 0 + } + + switch { + case len(h) < (n-1)*ldh+n: + panic(shortH) + case len(wr) != ihi+1: + panic(shortWr) + case len(wi) != ihi+1: + panic(shortWi) + case wantz && len(z) < (n-1)*ldz+n: + panic(shortZ) + case ilo > 0 && h[ilo*ldh+ilo-1] != 0: + panic(notIsolated) + } + + if ilo == ihi { + wr[ilo] = h[ilo*ldh+ilo] + wi[ilo] = 0 + return 0 + } + + // Clear out the trash. + for j := ilo; j < ihi-2; j++ { + h[(j+2)*ldh+j] = 0 + h[(j+3)*ldh+j] = 0 + } + if ilo <= ihi-2 { + h[ihi*ldh+ihi-2] = 0 + } + + nh := ihi - ilo + 1 + nz := ihiz - iloz + 1 + + // Set machine-dependent constants for the stopping criterion. + ulp := dlamchP + smlnum := float64(nh) / ulp * dlamchS + + // i1 and i2 are the indices of the first row and last column of H to + // which transformations must be applied. If eigenvalues only are being + // computed, i1 and i2 are set inside the main loop. + var i1, i2 int + if wantt { + i1 = 0 + i2 = n - 1 + } + + itmax := 30 * max(10, nh) // Total number of QR iterations allowed. + + // The main loop begins here. i is the loop index and decreases from ihi + // to ilo in steps of 1 or 2. Each iteration of the loop works with the + // active submatrix in rows and columns l to i. Eigenvalues i+1 to ihi + // have already converged. Either l = ilo or H[l,l-1] is negligible so + // that the matrix splits. + bi := blas64.Implementation() + i := ihi + for i >= ilo { + l := ilo + + // Perform QR iterations on rows and columns ilo to i until a + // submatrix of order 1 or 2 splits off at the bottom because a + // subdiagonal element has become negligible. + converged := false + for its := 0; its <= itmax; its++ { + // Look for a single small subdiagonal element. + var k int + for k = i; k > l; k-- { + if math.Abs(h[k*ldh+k-1]) <= smlnum { + break + } + tst := math.Abs(h[(k-1)*ldh+k-1]) + math.Abs(h[k*ldh+k]) + if tst == 0 { + if k-2 >= ilo { + tst += math.Abs(h[(k-1)*ldh+k-2]) + } + if k+1 <= ihi { + tst += math.Abs(h[(k+1)*ldh+k]) + } + } + // The following is a conservative small + // subdiagonal deflation criterion due to Ahues + // & Tisseur (LAWN 122, 1997). It has better + // mathematical foundation and improves accuracy + // in some cases. + if math.Abs(h[k*ldh+k-1]) <= ulp*tst { + ab := math.Max(math.Abs(h[k*ldh+k-1]), math.Abs(h[(k-1)*ldh+k])) + ba := math.Min(math.Abs(h[k*ldh+k-1]), math.Abs(h[(k-1)*ldh+k])) + aa := math.Max(math.Abs(h[k*ldh+k]), math.Abs(h[(k-1)*ldh+k-1]-h[k*ldh+k])) + bb := math.Min(math.Abs(h[k*ldh+k]), math.Abs(h[(k-1)*ldh+k-1]-h[k*ldh+k])) + s := aa + ab + if ab/s*ba <= math.Max(smlnum, aa/s*bb*ulp) { + break + } + } + } + l = k + if l > ilo { + // H[l,l-1] is negligible. + h[l*ldh+l-1] = 0 + } + if l >= i-1 { + // Break the loop because a submatrix of order 1 + // or 2 has split off. + converged = true + break + } + + // Now the active submatrix is in rows and columns l to + // i. If eigenvalues only are being computed, only the + // active submatrix need be transformed. + if !wantt { + i1 = l + i2 = i + } + + const ( + dat1 = 3.0 + dat2 = -0.4375 + ) + var h11, h21, h12, h22 float64 + switch its { + case 10: // Exceptional shift. + s := math.Abs(h[(l+1)*ldh+l]) + math.Abs(h[(l+2)*ldh+l+1]) + h11 = dat1*s + h[l*ldh+l] + h12 = dat2 * s + h21 = s + h22 = h11 + case 20: // Exceptional shift. + s := math.Abs(h[i*ldh+i-1]) + math.Abs(h[(i-1)*ldh+i-2]) + h11 = dat1*s + h[i*ldh+i] + h12 = dat2 * s + h21 = s + h22 = h11 + default: // Prepare to use Francis' double shift (i.e., + // 2nd degree generalized Rayleigh quotient). + h11 = h[(i-1)*ldh+i-1] + h21 = h[i*ldh+i-1] + h12 = h[(i-1)*ldh+i] + h22 = h[i*ldh+i] + } + s := math.Abs(h11) + math.Abs(h12) + math.Abs(h21) + math.Abs(h22) + var ( + rt1r, rt1i float64 + rt2r, rt2i float64 + ) + if s != 0 { + h11 /= s + h21 /= s + h12 /= s + h22 /= s + tr := (h11 + h22) / 2 + det := (h11-tr)*(h22-tr) - h12*h21 + rtdisc := math.Sqrt(math.Abs(det)) + if det >= 0 { + // Complex conjugate shifts. + rt1r = tr * s + rt2r = rt1r + rt1i = rtdisc * s + rt2i = -rt1i + } else { + // Real shifts (use only one of them). + rt1r = tr + rtdisc + rt2r = tr - rtdisc + if math.Abs(rt1r-h22) <= math.Abs(rt2r-h22) { + rt1r *= s + rt2r = rt1r + } else { + rt2r *= s + rt1r = rt2r + } + rt1i = 0 + rt2i = 0 + } + } + + // Look for two consecutive small subdiagonal elements. + var m int + var v [3]float64 + for m = i - 2; m >= l; m-- { + // Determine the effect of starting the + // double-shift QR iteration at row m, and see + // if this would make H[m,m-1] negligible. The + // following uses scaling to avoid overflows and + // most underflows. + h21s := h[(m+1)*ldh+m] + s := math.Abs(h[m*ldh+m]-rt2r) + math.Abs(rt2i) + math.Abs(h21s) + h21s /= s + v[0] = h21s*h[m*ldh+m+1] + (h[m*ldh+m]-rt1r)*((h[m*ldh+m]-rt2r)/s) - rt2i/s*rt1i + v[1] = h21s * (h[m*ldh+m] + h[(m+1)*ldh+m+1] - rt1r - rt2r) + v[2] = h21s * h[(m+2)*ldh+m+1] + s = math.Abs(v[0]) + math.Abs(v[1]) + math.Abs(v[2]) + v[0] /= s + v[1] /= s + v[2] /= s + if m == l { + break + } + dsum := math.Abs(h[(m-1)*ldh+m-1]) + math.Abs(h[m*ldh+m]) + math.Abs(h[(m+1)*ldh+m+1]) + if math.Abs(h[m*ldh+m-1])*(math.Abs(v[1])+math.Abs(v[2])) <= ulp*math.Abs(v[0])*dsum { + break + } + } + + // Double-shift QR step. + for k := m; k < i; k++ { + // The first iteration of this loop determines a + // reflection G from the vector V and applies it + // from left and right to H, thus creating a + // non-zero bulge below the subdiagonal. + // + // Each subsequent iteration determines a + // reflection G to restore the Hessenberg form + // in the (k-1)th column, and thus chases the + // bulge one step toward the bottom of the + // active submatrix. nr is the order of G. + + nr := min(3, i-k+1) + if k > m { + bi.Dcopy(nr, h[k*ldh+k-1:], ldh, v[:], 1) + } + var t0 float64 + v[0], t0 = impl.Dlarfg(nr, v[0], v[1:], 1) + if k > m { + h[k*ldh+k-1] = v[0] + h[(k+1)*ldh+k-1] = 0 + if k < i-1 { + h[(k+2)*ldh+k-1] = 0 + } + } else if m > l { + // Use the following instead of H[k,k-1] = -H[k,k-1] + // to avoid a bug when v[1] and v[2] underflow. + h[k*ldh+k-1] *= 1 - t0 + } + t1 := t0 * v[1] + if nr == 3 { + t2 := t0 * v[2] + + // Apply G from the left to transform + // the rows of the matrix in columns k + // to i2. + for j := k; j <= i2; j++ { + sum := h[k*ldh+j] + v[1]*h[(k+1)*ldh+j] + v[2]*h[(k+2)*ldh+j] + h[k*ldh+j] -= sum * t0 + h[(k+1)*ldh+j] -= sum * t1 + h[(k+2)*ldh+j] -= sum * t2 + } + + // Apply G from the right to transform + // the columns of the matrix in rows i1 + // to min(k+3,i). + for j := i1; j <= min(k+3, i); j++ { + sum := h[j*ldh+k] + v[1]*h[j*ldh+k+1] + v[2]*h[j*ldh+k+2] + h[j*ldh+k] -= sum * t0 + h[j*ldh+k+1] -= sum * t1 + h[j*ldh+k+2] -= sum * t2 + } + + if wantz { + // Accumulate transformations in the matrix Z. + for j := iloz; j <= ihiz; j++ { + sum := z[j*ldz+k] + v[1]*z[j*ldz+k+1] + v[2]*z[j*ldz+k+2] + z[j*ldz+k] -= sum * t0 + z[j*ldz+k+1] -= sum * t1 + z[j*ldz+k+2] -= sum * t2 + } + } + } else if nr == 2 { + // Apply G from the left to transform + // the rows of the matrix in columns k + // to i2. + for j := k; j <= i2; j++ { + sum := h[k*ldh+j] + v[1]*h[(k+1)*ldh+j] + h[k*ldh+j] -= sum * t0 + h[(k+1)*ldh+j] -= sum * t1 + } + + // Apply G from the right to transform + // the columns of the matrix in rows i1 + // to min(k+3,i). + for j := i1; j <= i; j++ { + sum := h[j*ldh+k] + v[1]*h[j*ldh+k+1] + h[j*ldh+k] -= sum * t0 + h[j*ldh+k+1] -= sum * t1 + } + + if wantz { + // Accumulate transformations in the matrix Z. + for j := iloz; j <= ihiz; j++ { + sum := z[j*ldz+k] + v[1]*z[j*ldz+k+1] + z[j*ldz+k] -= sum * t0 + z[j*ldz+k+1] -= sum * t1 + } + } + } + } + } + + if !converged { + // The QR iteration finished without splitting off a + // submatrix of order 1 or 2. + return i + 1 + } + + if l == i { + // H[i,i-1] is negligible: one eigenvalue has converged. + wr[i] = h[i*ldh+i] + wi[i] = 0 + } else if l == i-1 { + // H[i-1,i-2] is negligible: a pair of eigenvalues have converged. + + // Transform the 2×2 submatrix to standard Schur form, + // and compute and store the eigenvalues. + var cs, sn float64 + a, b := h[(i-1)*ldh+i-1], h[(i-1)*ldh+i] + c, d := h[i*ldh+i-1], h[i*ldh+i] + a, b, c, d, wr[i-1], wi[i-1], wr[i], wi[i], cs, sn = impl.Dlanv2(a, b, c, d) + h[(i-1)*ldh+i-1], h[(i-1)*ldh+i] = a, b + h[i*ldh+i-1], h[i*ldh+i] = c, d + + if wantt { + // Apply the transformation to the rest of H. + if i2 > i { + bi.Drot(i2-i, h[(i-1)*ldh+i+1:], 1, h[i*ldh+i+1:], 1, cs, sn) + } + bi.Drot(i-i1-1, h[i1*ldh+i-1:], ldh, h[i1*ldh+i:], ldh, cs, sn) + } + + if wantz { + // Apply the transformation to Z. + bi.Drot(nz, z[iloz*ldz+i-1:], ldz, z[iloz*ldz+i:], ldz, cs, sn) + } + } + + // Return to start of the main loop with new value of i. + i = l - 1 + } + return 0 +} diff --git a/vendor/gonum.org/v1/gonum/lapack/gonum/dlahr2.go b/vendor/gonum.org/v1/gonum/lapack/gonum/dlahr2.go new file mode 100644 index 0000000..a47dc8f --- /dev/null +++ b/vendor/gonum.org/v1/gonum/lapack/gonum/dlahr2.go @@ -0,0 +1,195 @@ +// Copyright ©2016 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package gonum + +import ( + "gonum.org/v1/gonum/blas" + "gonum.org/v1/gonum/blas/blas64" +) + +// Dlahr2 reduces the first nb columns of a real general n×(n-k+1) matrix A so +// that elements below the k-th subdiagonal are zero. The reduction is performed +// by an orthogonal similarity transformation Q^T * A * Q. Dlahr2 returns the +// matrices V and T which determine Q as a block reflector I - V*T*V^T, and +// also the matrix Y = A * V * T. +// +// The matrix Q is represented as a product of nb elementary reflectors +// Q = H_0 * H_1 * ... * H_{nb-1}. +// Each H_i has the form +// H_i = I - tau[i] * v * v^T, +// where v is a real vector with v[0:i+k-1] = 0 and v[i+k-1] = 1. v[i+k:n] is +// stored on exit in A[i+k+1:n,i]. +// +// The elements of the vectors v together form the (n-k+1)×nb matrix +// V which is needed, with T and Y, to apply the transformation to the +// unreduced part of the matrix, using an update of the form +// A = (I - V*T*V^T) * (A - Y*V^T). +// +// On entry, a contains the n×(n-k+1) general matrix A. On return, the elements +// on and above the k-th subdiagonal in the first nb columns are overwritten +// with the corresponding elements of the reduced matrix; the elements below the +// k-th subdiagonal, with the slice tau, represent the matrix Q as a product of +// elementary reflectors. The other columns of A are unchanged. +// +// The contents of A on exit are illustrated by the following example +// with n = 7, k = 3 and nb = 2: +// [ a a a a a ] +// [ a a a a a ] +// [ a a a a a ] +// [ h h a a a ] +// [ v0 h a a a ] +// [ v0 v1 a a a ] +// [ v0 v1 a a a ] +// where a denotes an element of the original matrix A, h denotes a +// modified element of the upper Hessenberg matrix H, and vi denotes an +// element of the vector defining H_i. +// +// k is the offset for the reduction. Elements below the k-th subdiagonal in the +// first nb columns are reduced to zero. +// +// nb is the number of columns to be reduced. +// +// On entry, a represents the n×(n-k+1) matrix A. On return, the elements on and +// above the k-th subdiagonal in the first nb columns are overwritten with the +// corresponding elements of the reduced matrix. The elements below the k-th +// subdiagonal, with the slice tau, represent the matrix Q as a product of +// elementary reflectors. The other columns of A are unchanged. +// +// tau will contain the scalar factors of the elementary reflectors. It must +// have length at least nb. +// +// t and ldt represent the nb×nb upper triangular matrix T, and y and ldy +// represent the n×nb matrix Y. +// +// Dlahr2 is an internal routine. It is exported for testing purposes. +func (impl Implementation) Dlahr2(n, k, nb int, a []float64, lda int, tau, t []float64, ldt int, y []float64, ldy int) { + switch { + case n < 0: + panic(nLT0) + case k < 0: + panic(kLT0) + case nb < 0: + panic(nbLT0) + case nb > n: + panic(nbGTN) + case lda < max(1, n-k+1): + panic(badLdA) + case ldt < max(1, nb): + panic(badLdT) + case ldy < max(1, nb): + panic(badLdY) + } + + // Quick return if possible. + if n < 0 { + return + } + + switch { + case len(a) < (n-1)*lda+n-k+1: + panic(shortA) + case len(tau) < nb: + panic(shortTau) + case len(t) < (nb-1)*ldt+nb: + panic(shortT) + case len(y) < (n-1)*ldy+nb: + panic(shortY) + } + + // Quick return if possible. + if n == 1 { + return + } + + bi := blas64.Implementation() + var ei float64 + for i := 0; i < nb; i++ { + if i > 0 { + // Update A[k:n,i]. + + // Update i-th column of A - Y * V^T. + bi.Dgemv(blas.NoTrans, n-k, i, + -1, y[k*ldy:], ldy, + a[(k+i-1)*lda:], 1, + 1, a[k*lda+i:], lda) + + // Apply I - V * T^T * V^T to this column (call it b) + // from the left, using the last column of T as + // workspace. + // Let V = [ V1 ] and b = [ b1 ] (first i rows) + // [ V2 ] [ b2 ] + // where V1 is unit lower triangular. + // + // w := V1^T * b1. + bi.Dcopy(i, a[k*lda+i:], lda, t[nb-1:], ldt) + bi.Dtrmv(blas.Lower, blas.Trans, blas.Unit, i, + a[k*lda:], lda, t[nb-1:], ldt) + + // w := w + V2^T * b2. + bi.Dgemv(blas.Trans, n-k-i, i, + 1, a[(k+i)*lda:], lda, + a[(k+i)*lda+i:], lda, + 1, t[nb-1:], ldt) + + // w := T^T * w. + bi.Dtrmv(blas.Upper, blas.Trans, blas.NonUnit, i, + t, ldt, t[nb-1:], ldt) + + // b2 := b2 - V2*w. + bi.Dgemv(blas.NoTrans, n-k-i, i, + -1, a[(k+i)*lda:], lda, + t[nb-1:], ldt, + 1, a[(k+i)*lda+i:], lda) + + // b1 := b1 - V1*w. + bi.Dtrmv(blas.Lower, blas.NoTrans, blas.Unit, i, + a[k*lda:], lda, t[nb-1:], ldt) + bi.Daxpy(i, -1, t[nb-1:], ldt, a[k*lda+i:], lda) + + a[(k+i-1)*lda+i-1] = ei + } + + // Generate the elementary reflector H_i to annihilate + // A[k+i+1:n,i]. + ei, tau[i] = impl.Dlarfg(n-k-i, a[(k+i)*lda+i], a[min(k+i+1, n-1)*lda+i:], lda) + a[(k+i)*lda+i] = 1 + + // Compute Y[k:n,i]. + bi.Dgemv(blas.NoTrans, n-k, n-k-i, + 1, a[k*lda+i+1:], lda, + a[(k+i)*lda+i:], lda, + 0, y[k*ldy+i:], ldy) + bi.Dgemv(blas.Trans, n-k-i, i, + 1, a[(k+i)*lda:], lda, + a[(k+i)*lda+i:], lda, + 0, t[i:], ldt) + bi.Dgemv(blas.NoTrans, n-k, i, + -1, y[k*ldy:], ldy, + t[i:], ldt, + 1, y[k*ldy+i:], ldy) + bi.Dscal(n-k, tau[i], y[k*ldy+i:], ldy) + + // Compute T[0:i,i]. + bi.Dscal(i, -tau[i], t[i:], ldt) + bi.Dtrmv(blas.Upper, blas.NoTrans, blas.NonUnit, i, + t, ldt, t[i:], ldt) + + t[i*ldt+i] = tau[i] + } + a[(k+nb-1)*lda+nb-1] = ei + + // Compute Y[0:k,0:nb]. + impl.Dlacpy(blas.All, k, nb, a[1:], lda, y, ldy) + bi.Dtrmm(blas.Right, blas.Lower, blas.NoTrans, blas.Unit, k, nb, + 1, a[k*lda:], lda, y, ldy) + if n > k+nb { + bi.Dgemm(blas.NoTrans, blas.NoTrans, k, nb, n-k-nb, + 1, a[1+nb:], lda, + a[(k+nb)*lda:], lda, + 1, y, ldy) + } + bi.Dtrmm(blas.Right, blas.Upper, blas.NoTrans, blas.NonUnit, k, nb, + 1, t, ldt, y, ldy) +} diff --git a/vendor/gonum.org/v1/gonum/lapack/gonum/dlaln2.go b/vendor/gonum.org/v1/gonum/lapack/gonum/dlaln2.go new file mode 100644 index 0000000..ca0b2f7 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/lapack/gonum/dlaln2.go @@ -0,0 +1,405 @@ +// Copyright ©2016 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package gonum + +import "math" + +// Dlaln2 solves a linear equation or a system of 2 linear equations of the form +// (ca A - w D) X = scale B, if trans == false, +// (ca A^T - w D) X = scale B, if trans == true, +// where A is a na×na real matrix, ca is a real scalar, D is a na×na diagonal +// real matrix, w is a scalar, real if nw == 1, complex if nw == 2, and X and B +// are na×1 matrices, real if w is real, complex if w is complex. +// +// If w is complex, X and B are represented as na×2 matrices, the first column +// of each being the real part and the second being the imaginary part. +// +// na and nw must be 1 or 2, otherwise Dlaln2 will panic. +// +// d1 and d2 are the diagonal elements of D. d2 is not used if na == 1. +// +// wr and wi represent the real and imaginary part, respectively, of the scalar +// w. wi is not used if nw == 1. +// +// smin is the desired lower bound on the singular values of A. This should be +// a safe distance away from underflow or overflow, say, between +// (underflow/machine precision) and (overflow*machine precision). +// +// If both singular values of (ca A - w D) are less than smin, smin*identity +// will be used instead of (ca A - w D). If only one singular value is less than +// smin, one element of (ca A - w D) will be perturbed enough to make the +// smallest singular value roughly smin. If both singular values are at least +// smin, (ca A - w D) will not be perturbed. In any case, the perturbation will +// be at most some small multiple of max(smin, ulp*norm(ca A - w D)). The +// singular values are computed by infinity-norm approximations, and thus will +// only be correct to a factor of 2 or so. +// +// All input quantities are assumed to be smaller than overflow by a reasonable +// factor. +// +// scale is a scaling factor less than or equal to 1 which is chosen so that X +// can be computed without overflow. X is further scaled if necessary to assure +// that norm(ca A - w D)*norm(X) is less than overflow. +// +// xnorm contains the infinity-norm of X when X is regarded as a na×nw real +// matrix. +// +// ok will be false if (ca A - w D) had to be perturbed to make its smallest +// singular value greater than smin, otherwise ok will be true. +// +// Dlaln2 is an internal routine. It is exported for testing purposes. +func (impl Implementation) Dlaln2(trans bool, na, nw int, smin, ca float64, a []float64, lda int, d1, d2 float64, b []float64, ldb int, wr, wi float64, x []float64, ldx int) (scale, xnorm float64, ok bool) { + // TODO(vladimir-ch): Consider splitting this function into two, one + // handling the real case (nw == 1) and the other handling the complex + // case (nw == 2). Given that Go has complex types, their signatures + // would be simpler and more natural, and the implementation not as + // convoluted. + + switch { + case na != 1 && na != 2: + panic(badNa) + case nw != 1 && nw != 2: + panic(badNw) + case lda < na: + panic(badLdA) + case len(a) < (na-1)*lda+na: + panic(shortA) + case ldb < nw: + panic(badLdB) + case len(b) < (na-1)*ldb+nw: + panic(shortB) + case ldx < nw: + panic(badLdX) + case len(x) < (na-1)*ldx+nw: + panic(shortX) + } + + smlnum := 2 * dlamchS + bignum := 1 / smlnum + smini := math.Max(smin, smlnum) + + ok = true + scale = 1 + + if na == 1 { + // 1×1 (i.e., scalar) system C X = B. + + if nw == 1 { + // Real 1×1 system. + + // C = ca A - w D. + csr := ca*a[0] - wr*d1 + cnorm := math.Abs(csr) + + // If |C| < smini, use C = smini. + if cnorm < smini { + csr = smini + cnorm = smini + ok = false + } + + // Check scaling for X = B / C. + bnorm := math.Abs(b[0]) + if cnorm < 1 && bnorm > math.Max(1, bignum*cnorm) { + scale = 1 / bnorm + } + + // Compute X. + x[0] = b[0] * scale / csr + xnorm = math.Abs(x[0]) + + return scale, xnorm, ok + } + + // Complex 1×1 system (w is complex). + + // C = ca A - w D. + csr := ca*a[0] - wr*d1 + csi := -wi * d1 + cnorm := math.Abs(csr) + math.Abs(csi) + + // If |C| < smini, use C = smini. + if cnorm < smini { + csr = smini + csi = 0 + cnorm = smini + ok = false + } + + // Check scaling for X = B / C. + bnorm := math.Abs(b[0]) + math.Abs(b[1]) + if cnorm < 1 && bnorm > math.Max(1, bignum*cnorm) { + scale = 1 / bnorm + } + + // Compute X. + cx := complex(scale*b[0], scale*b[1]) / complex(csr, csi) + x[0], x[1] = real(cx), imag(cx) + xnorm = math.Abs(x[0]) + math.Abs(x[1]) + + return scale, xnorm, ok + } + + // 2×2 system. + + // Compute the real part of + // C = ca A - w D + // or + // C = ca A^T - w D. + crv := [4]float64{ + ca*a[0] - wr*d1, + ca * a[1], + ca * a[lda], + ca*a[lda+1] - wr*d2, + } + if trans { + crv[1] = ca * a[lda] + crv[2] = ca * a[1] + } + + pivot := [4][4]int{ + {0, 1, 2, 3}, + {1, 0, 3, 2}, + {2, 3, 0, 1}, + {3, 2, 1, 0}, + } + + if nw == 1 { + // Real 2×2 system (w is real). + + // Find the largest element in C. + var cmax float64 + var icmax int + for j, v := range crv { + v = math.Abs(v) + if v > cmax { + cmax = v + icmax = j + } + } + + // If norm(C) < smini, use smini*identity. + if cmax < smini { + bnorm := math.Max(math.Abs(b[0]), math.Abs(b[ldb])) + if smini < 1 && bnorm > math.Max(1, bignum*smini) { + scale = 1 / bnorm + } + temp := scale / smini + x[0] = temp * b[0] + x[ldx] = temp * b[ldb] + xnorm = temp * bnorm + ok = false + + return scale, xnorm, ok + } + + // Gaussian elimination with complete pivoting. + // Form upper triangular matrix + // [ur11 ur12] + // [ 0 ur22] + ur11 := crv[icmax] + ur12 := crv[pivot[icmax][1]] + cr21 := crv[pivot[icmax][2]] + cr22 := crv[pivot[icmax][3]] + ur11r := 1 / ur11 + lr21 := ur11r * cr21 + ur22 := cr22 - ur12*lr21 + + // If smaller pivot < smini, use smini. + if math.Abs(ur22) < smini { + ur22 = smini + ok = false + } + + var br1, br2 float64 + if icmax > 1 { + // If the pivot lies in the second row, swap the rows. + br1 = b[ldb] + br2 = b[0] + } else { + br1 = b[0] + br2 = b[ldb] + } + br2 -= lr21 * br1 // Apply the Gaussian elimination step to the right-hand side. + + bbnd := math.Max(math.Abs(ur22*ur11r*br1), math.Abs(br2)) + if bbnd > 1 && math.Abs(ur22) < 1 && bbnd >= bignum*math.Abs(ur22) { + scale = 1 / bbnd + } + + // Solve the linear system ur*xr=br. + xr2 := br2 * scale / ur22 + xr1 := scale*br1*ur11r - ur11r*ur12*xr2 + if icmax&0x1 != 0 { + // If the pivot lies in the second column, swap the components of the solution. + x[0] = xr2 + x[ldx] = xr1 + } else { + x[0] = xr1 + x[ldx] = xr2 + } + xnorm = math.Max(math.Abs(xr1), math.Abs(xr2)) + + // Further scaling if norm(A)*norm(X) > overflow. + if xnorm > 1 && cmax > 1 && xnorm > bignum/cmax { + temp := cmax / bignum + x[0] *= temp + x[ldx] *= temp + xnorm *= temp + scale *= temp + } + + return scale, xnorm, ok + } + + // Complex 2×2 system (w is complex). + + // Find the largest element in C. + civ := [4]float64{ + -wi * d1, + 0, + 0, + -wi * d2, + } + var cmax float64 + var icmax int + for j, v := range crv { + v := math.Abs(v) + if v+math.Abs(civ[j]) > cmax { + cmax = v + math.Abs(civ[j]) + icmax = j + } + } + + // If norm(C) < smini, use smini*identity. + if cmax < smini { + br1 := math.Abs(b[0]) + math.Abs(b[1]) + br2 := math.Abs(b[ldb]) + math.Abs(b[ldb+1]) + bnorm := math.Max(br1, br2) + if smini < 1 && bnorm > 1 && bnorm > bignum*smini { + scale = 1 / bnorm + } + temp := scale / smini + x[0] = temp * b[0] + x[1] = temp * b[1] + x[ldb] = temp * b[ldb] + x[ldb+1] = temp * b[ldb+1] + xnorm = temp * bnorm + ok = false + + return scale, xnorm, ok + } + + // Gaussian elimination with complete pivoting. + ur11 := crv[icmax] + ui11 := civ[icmax] + ur12 := crv[pivot[icmax][1]] + ui12 := civ[pivot[icmax][1]] + cr21 := crv[pivot[icmax][2]] + ci21 := civ[pivot[icmax][2]] + cr22 := crv[pivot[icmax][3]] + ci22 := civ[pivot[icmax][3]] + var ( + ur11r, ui11r float64 + lr21, li21 float64 + ur12s, ui12s float64 + ur22, ui22 float64 + ) + if icmax == 0 || icmax == 3 { + // Off-diagonals of pivoted C are real. + if math.Abs(ur11) > math.Abs(ui11) { + temp := ui11 / ur11 + ur11r = 1 / (ur11 * (1 + temp*temp)) + ui11r = -temp * ur11r + } else { + temp := ur11 / ui11 + ui11r = -1 / (ui11 * (1 + temp*temp)) + ur11r = -temp * ui11r + } + lr21 = cr21 * ur11r + li21 = cr21 * ui11r + ur12s = ur12 * ur11r + ui12s = ur12 * ui11r + ur22 = cr22 - ur12*lr21 + ui22 = ci22 - ur12*li21 + } else { + // Diagonals of pivoted C are real. + ur11r = 1 / ur11 + // ui11r is already 0. + lr21 = cr21 * ur11r + li21 = ci21 * ur11r + ur12s = ur12 * ur11r + ui12s = ui12 * ur11r + ur22 = cr22 - ur12*lr21 + ui12*li21 + ui22 = -ur12*li21 - ui12*lr21 + } + u22abs := math.Abs(ur22) + math.Abs(ui22) + + // If smaller pivot < smini, use smini. + if u22abs < smini { + ur22 = smini + ui22 = 0 + ok = false + } + + var br1, bi1 float64 + var br2, bi2 float64 + if icmax > 1 { + // If the pivot lies in the second row, swap the rows. + br1 = b[ldb] + bi1 = b[ldb+1] + br2 = b[0] + bi2 = b[1] + } else { + br1 = b[0] + bi1 = b[1] + br2 = b[ldb] + bi2 = b[ldb+1] + } + br2 += -lr21*br1 + li21*bi1 + bi2 += -li21*br1 - lr21*bi1 + + bbnd1 := u22abs * (math.Abs(ur11r) + math.Abs(ui11r)) * (math.Abs(br1) + math.Abs(bi1)) + bbnd2 := math.Abs(br2) + math.Abs(bi2) + bbnd := math.Max(bbnd1, bbnd2) + if bbnd > 1 && u22abs < 1 && bbnd >= bignum*u22abs { + scale = 1 / bbnd + br1 *= scale + bi1 *= scale + br2 *= scale + bi2 *= scale + } + + cx2 := complex(br2, bi2) / complex(ur22, ui22) + xr2, xi2 := real(cx2), imag(cx2) + xr1 := ur11r*br1 - ui11r*bi1 - ur12s*xr2 + ui12s*xi2 + xi1 := ui11r*br1 + ur11r*bi1 - ui12s*xr2 - ur12s*xi2 + if icmax&0x1 != 0 { + // If the pivot lies in the second column, swap the components of the solution. + x[0] = xr2 + x[1] = xi2 + x[ldx] = xr1 + x[ldx+1] = xi1 + } else { + x[0] = xr1 + x[1] = xi1 + x[ldx] = xr2 + x[ldx+1] = xi2 + } + xnorm = math.Max(math.Abs(xr1)+math.Abs(xi1), math.Abs(xr2)+math.Abs(xi2)) + + // Further scaling if norm(A)*norm(X) > overflow. + if xnorm > 1 && cmax > 1 && xnorm > bignum/cmax { + temp := cmax / bignum + x[0] *= temp + x[1] *= temp + x[ldx] *= temp + x[ldx+1] *= temp + xnorm *= temp + scale *= temp + } + + return scale, xnorm, ok +} diff --git a/vendor/gonum.org/v1/gonum/lapack/gonum/dlange.go b/vendor/gonum.org/v1/gonum/lapack/gonum/dlange.go new file mode 100644 index 0000000..9edfa83 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/lapack/gonum/dlange.go @@ -0,0 +1,89 @@ +// Copyright ©2015 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package gonum + +import ( + "math" + + "gonum.org/v1/gonum/lapack" +) + +// Dlange computes the matrix norm of the general m×n matrix a. The input norm +// specifies the norm computed. +// lapack.MaxAbs: the maximum absolute value of an element. +// lapack.MaxColumnSum: the maximum column sum of the absolute values of the entries. +// lapack.MaxRowSum: the maximum row sum of the absolute values of the entries. +// lapack.Frobenius: the square root of the sum of the squares of the entries. +// If norm == lapack.MaxColumnSum, work must be of length n, and this function will panic otherwise. +// There are no restrictions on work for the other matrix norms. +func (impl Implementation) Dlange(norm lapack.MatrixNorm, m, n int, a []float64, lda int, work []float64) float64 { + // TODO(btracey): These should probably be refactored to use BLAS calls. + switch { + case norm != lapack.MaxRowSum && norm != lapack.MaxColumnSum && norm != lapack.Frobenius && norm != lapack.MaxAbs: + panic(badNorm) + case lda < max(1, n): + panic(badLdA) + } + + // Quick return if possible. + if m == 0 || n == 0 { + return 0 + } + + switch { + case len(a) < (m-1)*lda+n: + panic(badLdA) + case norm == lapack.MaxColumnSum && len(work) < n: + panic(shortWork) + } + + if norm == lapack.MaxAbs { + var value float64 + for i := 0; i < m; i++ { + for j := 0; j < n; j++ { + value = math.Max(value, math.Abs(a[i*lda+j])) + } + } + return value + } + if norm == lapack.MaxColumnSum { + if len(work) < n { + panic(shortWork) + } + for i := 0; i < n; i++ { + work[i] = 0 + } + for i := 0; i < m; i++ { + for j := 0; j < n; j++ { + work[j] += math.Abs(a[i*lda+j]) + } + } + var value float64 + for i := 0; i < n; i++ { + value = math.Max(value, work[i]) + } + return value + } + if norm == lapack.MaxRowSum { + var value float64 + for i := 0; i < m; i++ { + var sum float64 + for j := 0; j < n; j++ { + sum += math.Abs(a[i*lda+j]) + } + value = math.Max(value, sum) + } + return value + } + // norm == lapack.Frobenius + var value float64 + scale := 0.0 + sum := 1.0 + for i := 0; i < m; i++ { + scale, sum = impl.Dlassq(n, a[i*lda:], 1, scale, sum) + } + value = scale * math.Sqrt(sum) + return value +} diff --git a/vendor/gonum.org/v1/gonum/lapack/gonum/dlanst.go b/vendor/gonum.org/v1/gonum/lapack/gonum/dlanst.go new file mode 100644 index 0000000..9ca1897 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/lapack/gonum/dlanst.go @@ -0,0 +1,75 @@ +// Copyright ©2016 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package gonum + +import ( + "math" + + "gonum.org/v1/gonum/lapack" +) + +// Dlanst computes the specified norm of a symmetric tridiagonal matrix A. +// The diagonal elements of A are stored in d and the off-diagonal elements +// are stored in e. +func (impl Implementation) Dlanst(norm lapack.MatrixNorm, n int, d, e []float64) float64 { + switch { + case norm != lapack.MaxRowSum && norm != lapack.MaxColumnSum && norm != lapack.Frobenius && norm != lapack.MaxAbs: + panic(badNorm) + case n < 0: + panic(nLT0) + } + if n == 0 { + return 0 + } + switch { + case len(d) < n: + panic(shortD) + case len(e) < n-1: + panic(shortE) + } + + switch norm { + default: + panic(badNorm) + case lapack.MaxAbs: + anorm := math.Abs(d[n-1]) + for i := 0; i < n-1; i++ { + sum := math.Abs(d[i]) + if anorm < sum || math.IsNaN(sum) { + anorm = sum + } + sum = math.Abs(e[i]) + if anorm < sum || math.IsNaN(sum) { + anorm = sum + } + } + return anorm + case lapack.MaxColumnSum, lapack.MaxRowSum: + if n == 1 { + return math.Abs(d[0]) + } + anorm := math.Abs(d[0]) + math.Abs(e[0]) + sum := math.Abs(e[n-2]) + math.Abs(d[n-1]) + if anorm < sum || math.IsNaN(sum) { + anorm = sum + } + for i := 1; i < n-1; i++ { + sum := math.Abs(d[i]) + math.Abs(e[i]) + math.Abs(e[i-1]) + if anorm < sum || math.IsNaN(sum) { + anorm = sum + } + } + return anorm + case lapack.Frobenius: + var scale float64 + sum := 1.0 + if n > 1 { + scale, sum = impl.Dlassq(n-1, e, 1, scale, sum) + sum = 2 * sum + } + scale, sum = impl.Dlassq(n, d, 1, scale, sum) + return scale * math.Sqrt(sum) + } +} diff --git a/vendor/gonum.org/v1/gonum/lapack/gonum/dlansy.go b/vendor/gonum.org/v1/gonum/lapack/gonum/dlansy.go new file mode 100644 index 0000000..97ba5b2 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/lapack/gonum/dlansy.go @@ -0,0 +1,132 @@ +// Copyright ©2015 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package gonum + +import ( + "math" + + "gonum.org/v1/gonum/blas" + "gonum.org/v1/gonum/lapack" +) + +// Dlansy computes the specified norm of an n×n symmetric matrix. If +// norm == lapack.MaxColumnSum or norm == lapackMaxRowSum work must have length +// at least n, otherwise work is unused. +func (impl Implementation) Dlansy(norm lapack.MatrixNorm, uplo blas.Uplo, n int, a []float64, lda int, work []float64) float64 { + switch { + case norm != lapack.MaxRowSum && norm != lapack.MaxColumnSum && norm != lapack.Frobenius && norm != lapack.MaxAbs: + panic(badNorm) + case uplo != blas.Upper && uplo != blas.Lower: + panic(badUplo) + case n < 0: + panic(nLT0) + case lda < max(1, n): + panic(badLdA) + } + + // Quick return if possible. + if n == 0 { + return 0 + } + + switch { + case len(a) < (n-1)*lda+n: + panic(shortA) + case (norm == lapack.MaxColumnSum || norm == lapack.MaxRowSum) && len(work) < n: + panic(shortWork) + } + + switch norm { + default: + panic(badNorm) + case lapack.MaxAbs: + if uplo == blas.Upper { + var max float64 + for i := 0; i < n; i++ { + for j := i; j < n; j++ { + v := math.Abs(a[i*lda+j]) + if math.IsNaN(v) { + return math.NaN() + } + if v > max { + max = v + } + } + } + return max + } + var max float64 + for i := 0; i < n; i++ { + for j := 0; j <= i; j++ { + v := math.Abs(a[i*lda+j]) + if math.IsNaN(v) { + return math.NaN() + } + if v > max { + max = v + } + } + } + return max + case lapack.MaxRowSum, lapack.MaxColumnSum: + // A symmetric matrix has the same 1-norm and ∞-norm. + for i := 0; i < n; i++ { + work[i] = 0 + } + if uplo == blas.Upper { + for i := 0; i < n; i++ { + work[i] += math.Abs(a[i*lda+i]) + for j := i + 1; j < n; j++ { + v := math.Abs(a[i*lda+j]) + work[i] += v + work[j] += v + } + } + } else { + for i := 0; i < n; i++ { + for j := 0; j < i; j++ { + v := math.Abs(a[i*lda+j]) + work[i] += v + work[j] += v + } + work[i] += math.Abs(a[i*lda+i]) + } + } + var max float64 + for i := 0; i < n; i++ { + v := work[i] + if math.IsNaN(v) { + return math.NaN() + } + if v > max { + max = v + } + } + return max + case lapack.Frobenius: + if uplo == blas.Upper { + var sum float64 + for i := 0; i < n; i++ { + v := a[i*lda+i] + sum += v * v + for j := i + 1; j < n; j++ { + v := a[i*lda+j] + sum += 2 * v * v + } + } + return math.Sqrt(sum) + } + var sum float64 + for i := 0; i < n; i++ { + for j := 0; j < i; j++ { + v := a[i*lda+j] + sum += 2 * v * v + } + v := a[i*lda+i] + sum += v * v + } + return math.Sqrt(sum) + } +} diff --git a/vendor/gonum.org/v1/gonum/lapack/gonum/dlantr.go b/vendor/gonum.org/v1/gonum/lapack/gonum/dlantr.go new file mode 100644 index 0000000..cc96391 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/lapack/gonum/dlantr.go @@ -0,0 +1,260 @@ +// Copyright ©2015 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package gonum + +import ( + "math" + + "gonum.org/v1/gonum/blas" + "gonum.org/v1/gonum/lapack" +) + +// Dlantr computes the specified norm of an m×n trapezoidal matrix A. If +// norm == lapack.MaxColumnSum work must have length at least n, otherwise work +// is unused. +func (impl Implementation) Dlantr(norm lapack.MatrixNorm, uplo blas.Uplo, diag blas.Diag, m, n int, a []float64, lda int, work []float64) float64 { + switch { + case norm != lapack.MaxRowSum && norm != lapack.MaxColumnSum && norm != lapack.Frobenius && norm != lapack.MaxAbs: + panic(badNorm) + case uplo != blas.Upper && uplo != blas.Lower: + panic(badUplo) + case diag != blas.Unit && diag != blas.NonUnit: + panic(badDiag) + case n < 0: + panic(nLT0) + case lda < max(1, n): + panic(badLdA) + } + + // Quick return if possible. + minmn := min(m, n) + if minmn == 0 { + return 0 + } + + switch { + case len(a) < (m-1)*lda+n: + panic(shortA) + case norm == lapack.MaxColumnSum && len(work) < n: + panic(shortWork) + } + + switch norm { + default: + panic(badNorm) + case lapack.MaxAbs: + if diag == blas.Unit { + value := 1.0 + if uplo == blas.Upper { + for i := 0; i < m; i++ { + for j := i + 1; j < n; j++ { + tmp := math.Abs(a[i*lda+j]) + if math.IsNaN(tmp) { + return tmp + } + if tmp > value { + value = tmp + } + } + } + return value + } + for i := 1; i < m; i++ { + for j := 0; j < min(i, n); j++ { + tmp := math.Abs(a[i*lda+j]) + if math.IsNaN(tmp) { + return tmp + } + if tmp > value { + value = tmp + } + } + } + return value + } + var value float64 + if uplo == blas.Upper { + for i := 0; i < m; i++ { + for j := i; j < n; j++ { + tmp := math.Abs(a[i*lda+j]) + if math.IsNaN(tmp) { + return tmp + } + if tmp > value { + value = tmp + } + } + } + return value + } + for i := 0; i < m; i++ { + for j := 0; j <= min(i, n-1); j++ { + tmp := math.Abs(a[i*lda+j]) + if math.IsNaN(tmp) { + return tmp + } + if tmp > value { + value = tmp + } + } + } + return value + case lapack.MaxColumnSum: + if diag == blas.Unit { + for i := 0; i < minmn; i++ { + work[i] = 1 + } + for i := minmn; i < n; i++ { + work[i] = 0 + } + if uplo == blas.Upper { + for i := 0; i < m; i++ { + for j := i + 1; j < n; j++ { + work[j] += math.Abs(a[i*lda+j]) + } + } + } else { + for i := 1; i < m; i++ { + for j := 0; j < min(i, n); j++ { + work[j] += math.Abs(a[i*lda+j]) + } + } + } + } else { + for i := 0; i < n; i++ { + work[i] = 0 + } + if uplo == blas.Upper { + for i := 0; i < m; i++ { + for j := i; j < n; j++ { + work[j] += math.Abs(a[i*lda+j]) + } + } + } else { + for i := 0; i < m; i++ { + for j := 0; j <= min(i, n-1); j++ { + work[j] += math.Abs(a[i*lda+j]) + } + } + } + } + var max float64 + for _, v := range work[:n] { + if math.IsNaN(v) { + return math.NaN() + } + if v > max { + max = v + } + } + return max + case lapack.MaxRowSum: + var maxsum float64 + if diag == blas.Unit { + if uplo == blas.Upper { + for i := 0; i < m; i++ { + var sum float64 + if i < minmn { + sum = 1 + } + for j := i + 1; j < n; j++ { + sum += math.Abs(a[i*lda+j]) + } + if math.IsNaN(sum) { + return math.NaN() + } + if sum > maxsum { + maxsum = sum + } + } + return maxsum + } else { + for i := 1; i < m; i++ { + var sum float64 + if i < minmn { + sum = 1 + } + for j := 0; j < min(i, n); j++ { + sum += math.Abs(a[i*lda+j]) + } + if math.IsNaN(sum) { + return math.NaN() + } + if sum > maxsum { + maxsum = sum + } + } + return maxsum + } + } else { + if uplo == blas.Upper { + for i := 0; i < m; i++ { + var sum float64 + for j := i; j < n; j++ { + sum += math.Abs(a[i*lda+j]) + } + if math.IsNaN(sum) { + return sum + } + if sum > maxsum { + maxsum = sum + } + } + return maxsum + } else { + for i := 0; i < m; i++ { + var sum float64 + for j := 0; j <= min(i, n-1); j++ { + sum += math.Abs(a[i*lda+j]) + } + if math.IsNaN(sum) { + return sum + } + if sum > maxsum { + maxsum = sum + } + } + return maxsum + } + } + case lapack.Frobenius: + var nrm float64 + if diag == blas.Unit { + if uplo == blas.Upper { + for i := 0; i < m; i++ { + for j := i + 1; j < n; j++ { + tmp := a[i*lda+j] + nrm += tmp * tmp + } + } + } else { + for i := 1; i < m; i++ { + for j := 0; j < min(i, n); j++ { + tmp := a[i*lda+j] + nrm += tmp * tmp + } + } + } + nrm += float64(minmn) + } else { + if uplo == blas.Upper { + for i := 0; i < m; i++ { + for j := i; j < n; j++ { + tmp := math.Abs(a[i*lda+j]) + nrm += tmp * tmp + } + } + } else { + for i := 0; i < m; i++ { + for j := 0; j <= min(i, n-1); j++ { + tmp := math.Abs(a[i*lda+j]) + nrm += tmp * tmp + } + } + } + } + return math.Sqrt(nrm) + } +} diff --git a/vendor/gonum.org/v1/gonum/lapack/gonum/dlanv2.go b/vendor/gonum.org/v1/gonum/lapack/gonum/dlanv2.go new file mode 100644 index 0000000..e5dcfb7 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/lapack/gonum/dlanv2.go @@ -0,0 +1,132 @@ +// Copyright ©2016 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package gonum + +import "math" + +// Dlanv2 computes the Schur factorization of a real 2×2 matrix: +// [ a b ] = [ cs -sn ] * [ aa bb ] * [ cs sn ] +// [ c d ] [ sn cs ] [ cc dd ] * [-sn cs ] +// If cc is zero, aa and dd are real eigenvalues of the matrix. Otherwise it +// holds that aa = dd and bb*cc < 0, and aa ± sqrt(bb*cc) are complex conjugate +// eigenvalues. The real and imaginary parts of the eigenvalues are returned in +// (rt1r,rt1i) and (rt2r,rt2i). +func (impl Implementation) Dlanv2(a, b, c, d float64) (aa, bb, cc, dd float64, rt1r, rt1i, rt2r, rt2i float64, cs, sn float64) { + switch { + case c == 0: // Matrix is already upper triangular. + aa = a + bb = b + cc = 0 + dd = d + cs = 1 + sn = 0 + case b == 0: // Matrix is lower triangular, swap rows and columns. + aa = d + bb = -c + cc = 0 + dd = a + cs = 0 + sn = 1 + case a == d && math.Signbit(b) != math.Signbit(c): // Matrix is already in the standard Schur form. + aa = a + bb = b + cc = c + dd = d + cs = 1 + sn = 0 + default: + temp := a - d + p := temp / 2 + bcmax := math.Max(math.Abs(b), math.Abs(c)) + bcmis := math.Min(math.Abs(b), math.Abs(c)) + if b*c < 0 { + bcmis *= -1 + } + scale := math.Max(math.Abs(p), bcmax) + z := p/scale*p + bcmax/scale*bcmis + eps := dlamchP + + if z >= 4*eps { + // Real eigenvalues. Compute aa and dd. + if p > 0 { + z = p + math.Sqrt(scale)*math.Sqrt(z) + } else { + z = p - math.Sqrt(scale)*math.Sqrt(z) + } + aa = d + z + dd = d - bcmax/z*bcmis + // Compute bb and the rotation matrix. + tau := impl.Dlapy2(c, z) + cs = z / tau + sn = c / tau + bb = b - c + cc = 0 + } else { + // Complex eigenvalues, or real (almost) equal eigenvalues. + // Make diagonal elements equal. + sigma := b + c + tau := impl.Dlapy2(sigma, temp) + cs = math.Sqrt((1 + math.Abs(sigma)/tau) / 2) + sn = -p / (tau * cs) + if sigma < 0 { + sn *= -1 + } + // Compute [ aa bb ] = [ a b ] [ cs -sn ] + // [ cc dd ] [ c d ] [ sn cs ] + aa = a*cs + b*sn + bb = -a*sn + b*cs + cc = c*cs + d*sn + dd = -c*sn + d*cs + // Compute [ a b ] = [ cs sn ] [ aa bb ] + // [ c d ] [-sn cs ] [ cc dd ] + a = aa*cs + cc*sn + b = bb*cs + dd*sn + c = -aa*sn + cc*cs + d = -bb*sn + dd*cs + + temp = (a + d) / 2 + aa = temp + bb = b + cc = c + dd = temp + + if cc != 0 { + if bb != 0 { + if math.Signbit(bb) == math.Signbit(cc) { + // Real eigenvalues, reduce to + // upper triangular form. + sab := math.Sqrt(math.Abs(bb)) + sac := math.Sqrt(math.Abs(cc)) + p = sab * sac + if cc < 0 { + p *= -1 + } + tau = 1 / math.Sqrt(math.Abs(bb+cc)) + aa = temp + p + bb = bb - cc + cc = 0 + dd = temp - p + cs1 := sab * tau + sn1 := sac * tau + cs, sn = cs*cs1-sn*sn1, cs*sn1+sn+cs1 + } + } else { + bb = -cc + cc = 0 + cs, sn = -sn, cs + } + } + } + } + + // Store eigenvalues in (rt1r,rt1i) and (rt2r,rt2i). + rt1r = aa + rt2r = dd + if cc != 0 { + rt1i = math.Sqrt(math.Abs(bb)) * math.Sqrt(math.Abs(cc)) + rt2i = -rt1i + } + return +} diff --git a/vendor/gonum.org/v1/gonum/lapack/gonum/dlapll.go b/vendor/gonum.org/v1/gonum/lapack/gonum/dlapll.go new file mode 100644 index 0000000..bf98c33 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/lapack/gonum/dlapll.go @@ -0,0 +1,55 @@ +// Copyright ©2017 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package gonum + +import "gonum.org/v1/gonum/blas/blas64" + +// Dlapll returns the smallest singular value of the n×2 matrix A = [ x y ]. +// The function first computes the QR factorization of A = Q*R, and then computes +// the SVD of the 2-by-2 upper triangular matrix r. +// +// The contents of x and y are overwritten during the call. +// +// Dlapll is an internal routine. It is exported for testing purposes. +func (impl Implementation) Dlapll(n int, x []float64, incX int, y []float64, incY int) float64 { + switch { + case n < 0: + panic(nLT0) + case incX <= 0: + panic(badIncX) + case incY <= 0: + panic(badIncY) + } + + // Quick return if possible. + if n == 0 { + return 0 + } + + switch { + case len(x) < 1+(n-1)*incX: + panic(shortX) + case len(y) < 1+(n-1)*incY: + panic(shortY) + } + + // Quick return if possible. + if n == 1 { + return 0 + } + + // Compute the QR factorization of the N-by-2 matrix [ X Y ]. + a00, tau := impl.Dlarfg(n, x[0], x[incX:], incX) + x[0] = 1 + + bi := blas64.Implementation() + c := -tau * bi.Ddot(n, x, incX, y, incY) + bi.Daxpy(n, c, x, incX, y, incY) + a11, _ := impl.Dlarfg(n-1, y[incY], y[2*incY:], incY) + + // Compute the SVD of 2-by-2 upper triangular matrix. + ssmin, _ := impl.Dlas2(a00, y[0], a11) + return ssmin +} diff --git a/vendor/gonum.org/v1/gonum/lapack/gonum/dlapmt.go b/vendor/gonum.org/v1/gonum/lapack/gonum/dlapmt.go new file mode 100644 index 0000000..55f1567 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/lapack/gonum/dlapmt.go @@ -0,0 +1,89 @@ +// Copyright ©2017 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package gonum + +import "gonum.org/v1/gonum/blas/blas64" + +// Dlapmt rearranges the columns of the m×n matrix X as specified by the +// permutation k_0, k_1, ..., k_n-1 of the integers 0, ..., n-1. +// +// If forward is true a forward permutation is performed: +// +// X[0:m, k[j]] is moved to X[0:m, j] for j = 0, 1, ..., n-1. +// +// otherwise a backward permutation is performed: +// +// X[0:m, j] is moved to X[0:m, k[j]] for j = 0, 1, ..., n-1. +// +// k must have length n, otherwise Dlapmt will panic. k is zero-indexed. +func (impl Implementation) Dlapmt(forward bool, m, n int, x []float64, ldx int, k []int) { + switch { + case m < 0: + panic(mLT0) + case n < 0: + panic(nLT0) + case ldx < max(1, n): + panic(badLdX) + } + + // Quick return if possible. + if m == 0 || n == 0 { + return + } + + switch { + case len(x) < (m-1)*ldx+n: + panic(shortX) + case len(k) != n: + panic(badLenK) + } + + // Quick return if possible. + if n == 1 { + return + } + + for i, v := range k { + v++ + k[i] = -v + } + + bi := blas64.Implementation() + + if forward { + for j, v := range k { + if v >= 0 { + continue + } + k[j] = -v + i := -v - 1 + for k[i] < 0 { + bi.Dswap(m, x[j:], ldx, x[i:], ldx) + + k[i] = -k[i] + j = i + i = k[i] - 1 + } + } + } else { + for i, v := range k { + if v >= 0 { + continue + } + k[i] = -v + j := -v - 1 + for j != i { + bi.Dswap(m, x[j:], ldx, x[i:], ldx) + + k[j] = -k[j] + j = k[j] - 1 + } + } + } + + for i := range k { + k[i]-- + } +} diff --git a/vendor/gonum.org/v1/gonum/lapack/gonum/dlapy2.go b/vendor/gonum.org/v1/gonum/lapack/gonum/dlapy2.go new file mode 100644 index 0000000..19f73ff --- /dev/null +++ b/vendor/gonum.org/v1/gonum/lapack/gonum/dlapy2.go @@ -0,0 +1,14 @@ +// Copyright ©2015 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package gonum + +import "math" + +// Dlapy2 is the LAPACK version of math.Hypot. +// +// Dlapy2 is an internal routine. It is exported for testing purposes. +func (Implementation) Dlapy2(x, y float64) float64 { + return math.Hypot(x, y) +} diff --git a/vendor/gonum.org/v1/gonum/lapack/gonum/dlaqp2.go b/vendor/gonum.org/v1/gonum/lapack/gonum/dlaqp2.go new file mode 100644 index 0000000..d3a0def --- /dev/null +++ b/vendor/gonum.org/v1/gonum/lapack/gonum/dlaqp2.go @@ -0,0 +1,127 @@ +// Copyright ©2017 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package gonum + +import ( + "math" + + "gonum.org/v1/gonum/blas" + "gonum.org/v1/gonum/blas/blas64" +) + +// Dlaqp2 computes a QR factorization with column pivoting of the block A[offset:m, 0:n] +// of the m×n matrix A. The block A[0:offset, 0:n] is accordingly pivoted, but not factorized. +// +// On exit, the upper triangle of block A[offset:m, 0:n] is the triangular factor obtained. +// The elements in block A[offset:m, 0:n] below the diagonal, together with tau, represent +// the orthogonal matrix Q as a product of elementary reflectors. +// +// offset is number of rows of the matrix A that must be pivoted but not factorized. +// offset must not be negative otherwise Dlaqp2 will panic. +// +// On exit, jpvt holds the permutation that was applied; the jth column of A*P was the +// jpvt[j] column of A. jpvt must have length n, otherwise Dlaqp2 will panic. +// +// On exit tau holds the scalar factors of the elementary reflectors. It must have length +// at least min(m-offset, n) otherwise Dlaqp2 will panic. +// +// vn1 and vn2 hold the partial and complete column norms respectively. They must have length n, +// otherwise Dlaqp2 will panic. +// +// work must have length n, otherwise Dlaqp2 will panic. +// +// Dlaqp2 is an internal routine. It is exported for testing purposes. +func (impl Implementation) Dlaqp2(m, n, offset int, a []float64, lda int, jpvt []int, tau, vn1, vn2, work []float64) { + switch { + case m < 0: + panic(mLT0) + case n < 0: + panic(nLT0) + case offset < 0: + panic(offsetLT0) + case offset > m: + panic(offsetGTM) + case lda < max(1, n): + panic(badLdA) + } + + // Quick return if possible. + if m == 0 || n == 0 { + return + } + + mn := min(m-offset, n) + switch { + case len(a) < (m-1)*lda+n: + panic(shortA) + case len(jpvt) != n: + panic(badLenJpvt) + case len(tau) < mn: + panic(shortTau) + case len(vn1) < n: + panic(shortVn1) + case len(vn2) < n: + panic(shortVn2) + case len(work) < n: + panic(shortWork) + } + + tol3z := math.Sqrt(dlamchE) + + bi := blas64.Implementation() + + // Compute factorization. + for i := 0; i < mn; i++ { + offpi := offset + i + + // Determine ith pivot column and swap if necessary. + p := i + bi.Idamax(n-i, vn1[i:], 1) + if p != i { + bi.Dswap(m, a[p:], lda, a[i:], lda) + jpvt[p], jpvt[i] = jpvt[i], jpvt[p] + vn1[p] = vn1[i] + vn2[p] = vn2[i] + } + + // Generate elementary reflector H_i. + if offpi < m-1 { + a[offpi*lda+i], tau[i] = impl.Dlarfg(m-offpi, a[offpi*lda+i], a[(offpi+1)*lda+i:], lda) + } else { + tau[i] = 0 + } + + if i < n-1 { + // Apply H_i^T to A[offset+i:m, i:n] from the left. + aii := a[offpi*lda+i] + a[offpi*lda+i] = 1 + impl.Dlarf(blas.Left, m-offpi, n-i-1, a[offpi*lda+i:], lda, tau[i], a[offpi*lda+i+1:], lda, work) + a[offpi*lda+i] = aii + } + + // Update partial column norms. + for j := i + 1; j < n; j++ { + if vn1[j] == 0 { + continue + } + + // The following marked lines follow from the + // analysis in Lapack Working Note 176. + r := math.Abs(a[offpi*lda+j]) / vn1[j] // * + temp := math.Max(0, 1-r*r) // * + r = vn1[j] / vn2[j] // * + temp2 := temp * r * r // * + if temp2 < tol3z { + var v float64 + if offpi < m-1 { + v = bi.Dnrm2(m-offpi-1, a[(offpi+1)*lda+j:], lda) + } + vn1[j] = v + vn2[j] = v + } else { + vn1[j] *= math.Sqrt(temp) // * + } + } + } +} diff --git a/vendor/gonum.org/v1/gonum/lapack/gonum/dlaqps.go b/vendor/gonum.org/v1/gonum/lapack/gonum/dlaqps.go new file mode 100644 index 0000000..dd683b6 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/lapack/gonum/dlaqps.go @@ -0,0 +1,244 @@ +// Copyright ©2017 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package gonum + +import ( + "math" + + "gonum.org/v1/gonum/blas" + "gonum.org/v1/gonum/blas/blas64" +) + +// Dlaqps computes a step of QR factorization with column pivoting +// of an m×n matrix A by using Blas-3. It tries to factorize nb +// columns from A starting from the row offset, and updates all +// of the matrix with Dgemm. +// +// In some cases, due to catastrophic cancellations, it cannot +// factorize nb columns. Hence, the actual number of factorized +// columns is returned in kb. +// +// Dlaqps computes a QR factorization with column pivoting of the +// block A[offset:m, 0:nb] of the m×n matrix A. The block +// A[0:offset, 0:n] is accordingly pivoted, but not factorized. +// +// On exit, the upper triangle of block A[offset:m, 0:kb] is the +// triangular factor obtained. The elements in block A[offset:m, 0:n] +// below the diagonal, together with tau, represent the orthogonal +// matrix Q as a product of elementary reflectors. +// +// offset is number of rows of the matrix A that must be pivoted but +// not factorized. offset must not be negative otherwise Dlaqps will panic. +// +// On exit, jpvt holds the permutation that was applied; the jth column +// of A*P was the jpvt[j] column of A. jpvt must have length n, +// otherwise Dlapqs will panic. +// +// On exit tau holds the scalar factors of the elementary reflectors. +// It must have length nb, otherwise Dlapqs will panic. +// +// vn1 and vn2 hold the partial and complete column norms respectively. +// They must have length n, otherwise Dlapqs will panic. +// +// auxv must have length nb, otherwise Dlaqps will panic. +// +// f and ldf represent an n×nb matrix F that is overwritten during the +// call. +// +// Dlaqps is an internal routine. It is exported for testing purposes. +func (impl Implementation) Dlaqps(m, n, offset, nb int, a []float64, lda int, jpvt []int, tau, vn1, vn2, auxv, f []float64, ldf int) (kb int) { + switch { + case m < 0: + panic(mLT0) + case n < 0: + panic(nLT0) + case offset < 0: + panic(offsetLT0) + case offset > m: + panic(offsetGTM) + case nb < 0: + panic(nbLT0) + case nb > n: + panic(nbGTN) + case lda < max(1, n): + panic(badLdA) + case ldf < max(1, nb): + panic(badLdF) + } + + if m == 0 || n == 0 { + return 0 + } + + switch { + case len(a) < (m-1)*lda+n: + panic(shortA) + case len(jpvt) != n: + panic(badLenJpvt) + case len(vn1) < n: + panic(shortVn1) + case len(vn2) < n: + panic(shortVn2) + } + + if nb == 0 { + return 0 + } + + switch { + case len(tau) < nb: + panic(shortTau) + case len(auxv) < nb: + panic(shortAuxv) + case len(f) < (n-1)*ldf+nb: + panic(shortF) + } + + if offset == m { + return 0 + } + + lastrk := min(m, n+offset) + lsticc := -1 + tol3z := math.Sqrt(dlamchE) + + bi := blas64.Implementation() + + var k, rk int + for ; k < nb && lsticc == -1; k++ { + rk = offset + k + + // Determine kth pivot column and swap if necessary. + p := k + bi.Idamax(n-k, vn1[k:], 1) + if p != k { + bi.Dswap(m, a[p:], lda, a[k:], lda) + bi.Dswap(k, f[p*ldf:], 1, f[k*ldf:], 1) + jpvt[p], jpvt[k] = jpvt[k], jpvt[p] + vn1[p] = vn1[k] + vn2[p] = vn2[k] + } + + // Apply previous Householder reflectors to column K: + // + // A[rk:m, k] = A[rk:m, k] - A[rk:m, 0:k-1]*F[k, 0:k-1]^T. + if k > 0 { + bi.Dgemv(blas.NoTrans, m-rk, k, -1, + a[rk*lda:], lda, + f[k*ldf:], 1, + 1, + a[rk*lda+k:], lda) + } + + // Generate elementary reflector H_k. + if rk < m-1 { + a[rk*lda+k], tau[k] = impl.Dlarfg(m-rk, a[rk*lda+k], a[(rk+1)*lda+k:], lda) + } else { + tau[k] = 0 + } + + akk := a[rk*lda+k] + a[rk*lda+k] = 1 + + // Compute kth column of F: + // + // Compute F[k+1:n, k] = tau[k]*A[rk:m, k+1:n]^T*A[rk:m, k]. + if k < n-1 { + bi.Dgemv(blas.Trans, m-rk, n-k-1, tau[k], + a[rk*lda+k+1:], lda, + a[rk*lda+k:], lda, + 0, + f[(k+1)*ldf+k:], ldf) + } + + // Padding F[0:k, k] with zeros. + for j := 0; j < k; j++ { + f[j*ldf+k] = 0 + } + + // Incremental updating of F: + // + // F[0:n, k] := F[0:n, k] - tau[k]*F[0:n, 0:k-1]*A[rk:m, 0:k-1]^T*A[rk:m,k]. + if k > 0 { + bi.Dgemv(blas.Trans, m-rk, k, -tau[k], + a[rk*lda:], lda, + a[rk*lda+k:], lda, + 0, + auxv, 1) + bi.Dgemv(blas.NoTrans, n, k, 1, + f, ldf, + auxv, 1, + 1, + f[k:], ldf) + } + + // Update the current row of A: + // + // A[rk, k+1:n] = A[rk, k+1:n] - A[rk, 0:k]*F[k+1:n, 0:k]^T. + if k < n-1 { + bi.Dgemv(blas.NoTrans, n-k-1, k+1, -1, + f[(k+1)*ldf:], ldf, + a[rk*lda:], 1, + 1, + a[rk*lda+k+1:], 1) + } + + // Update partial column norms. + if rk < lastrk-1 { + for j := k + 1; j < n; j++ { + if vn1[j] == 0 { + continue + } + + // The following marked lines follow from the + // analysis in Lapack Working Note 176. + r := math.Abs(a[rk*lda+j]) / vn1[j] // * + temp := math.Max(0, 1-r*r) // * + r = vn1[j] / vn2[j] // * + temp2 := temp * r * r // * + if temp2 < tol3z { + // vn2 is used here as a collection of + // indices into vn2 and also a collection + // of column norms. + vn2[j] = float64(lsticc) + lsticc = j + } else { + vn1[j] *= math.Sqrt(temp) // * + } + } + } + + a[rk*lda+k] = akk + } + kb = k + rk = offset + kb + + // Apply the block reflector to the rest of the matrix: + // + // A[offset+kb+1:m, kb+1:n] := A[offset+kb+1:m, kb+1:n] - A[offset+kb+1:m, 1:kb]*F[kb+1:n, 1:kb]^T. + if kb < min(n, m-offset) { + bi.Dgemm(blas.NoTrans, blas.Trans, + m-rk, n-kb, kb, -1, + a[rk*lda:], lda, + f[kb*ldf:], ldf, + 1, + a[rk*lda+kb:], lda) + } + + // Recomputation of difficult columns. + for lsticc >= 0 { + itemp := int(vn2[lsticc]) + + // NOTE: The computation of vn1[lsticc] relies on the fact that + // Dnrm2 does not fail on vectors with norm below the value of + // sqrt(dlamchS) + v := bi.Dnrm2(m-rk, a[rk*lda+lsticc:], lda) + vn1[lsticc] = v + vn2[lsticc] = v + + lsticc = itemp + } + + return kb +} diff --git a/vendor/gonum.org/v1/gonum/lapack/gonum/dlaqr04.go b/vendor/gonum.org/v1/gonum/lapack/gonum/dlaqr04.go new file mode 100644 index 0000000..e9fbb60 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/lapack/gonum/dlaqr04.go @@ -0,0 +1,478 @@ +// Copyright ©2016 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package gonum + +import ( + "math" + + "gonum.org/v1/gonum/blas" +) + +// Dlaqr04 computes the eigenvalues of a block of an n×n upper Hessenberg matrix +// H, and optionally the matrices T and Z from the Schur decomposition +// H = Z T Z^T +// where T is an upper quasi-triangular matrix (the Schur form), and Z is the +// orthogonal matrix of Schur vectors. +// +// wantt indicates whether the full Schur form T is required. If wantt is false, +// then only enough of H will be updated to preserve the eigenvalues. +// +// wantz indicates whether the n×n matrix of Schur vectors Z is required. If it +// is true, the orthogonal similarity transformation will be accumulated into +// Z[iloz:ihiz+1,ilo:ihi+1], otherwise Z will not be referenced. +// +// ilo and ihi determine the block of H on which Dlaqr04 operates. It must hold that +// 0 <= ilo <= ihi < n, if n > 0, +// ilo == 0 and ihi == -1, if n == 0, +// and the block must be isolated, that is, +// ilo == 0 or H[ilo,ilo-1] == 0, +// ihi == n-1 or H[ihi+1,ihi] == 0, +// otherwise Dlaqr04 will panic. +// +// wr and wi must have length ihi+1. +// +// iloz and ihiz specify the rows of Z to which transformations will be applied +// if wantz is true. It must hold that +// 0 <= iloz <= ilo, and ihi <= ihiz < n, +// otherwise Dlaqr04 will panic. +// +// work must have length at least lwork and lwork must be +// lwork >= 1, if n <= 11, +// lwork >= n, if n > 11, +// otherwise Dlaqr04 will panic. lwork as large as 6*n may be required for +// optimal performance. On return, work[0] will contain the optimal value of +// lwork. +// +// If lwork is -1, instead of performing Dlaqr04, the function only estimates the +// optimal workspace size and stores it into work[0]. Neither h nor z are +// accessed. +// +// recur is the non-negative recursion depth. For recur > 0, Dlaqr04 behaves +// as DLAQR0, for recur == 0 it behaves as DLAQR4. +// +// unconverged indicates whether Dlaqr04 computed all the eigenvalues of H[ilo:ihi+1,ilo:ihi+1]. +// +// If unconverged is zero and wantt is true, H will contain on return the upper +// quasi-triangular matrix T from the Schur decomposition. 2×2 diagonal blocks +// (corresponding to complex conjugate pairs of eigenvalues) will be returned in +// standard form, with H[i,i] == H[i+1,i+1] and H[i+1,i]*H[i,i+1] < 0. +// +// If unconverged is zero and if wantt is false, the contents of h on return is +// unspecified. +// +// If unconverged is zero, all the eigenvalues have been computed and their real +// and imaginary parts will be stored on return in wr[ilo:ihi+1] and +// wi[ilo:ihi+1], respectively. If two eigenvalues are computed as a complex +// conjugate pair, they are stored in consecutive elements of wr and wi, say the +// i-th and (i+1)th, with wi[i] > 0 and wi[i+1] < 0. If wantt is true, then the +// eigenvalues are stored in the same order as on the diagonal of the Schur form +// returned in H, with wr[i] = H[i,i] and, if H[i:i+2,i:i+2] is a 2×2 diagonal +// block, wi[i] = sqrt(-H[i+1,i]*H[i,i+1]) and wi[i+1] = -wi[i]. +// +// If unconverged is positive, some eigenvalues have not converged, and +// wr[unconverged:ihi+1] and wi[unconverged:ihi+1] will contain those +// eigenvalues which have been successfully computed. Failures are rare. +// +// If unconverged is positive and wantt is true, then on return +// (initial H)*U = U*(final H), (*) +// where U is an orthogonal matrix. The final H is upper Hessenberg and +// H[unconverged:ihi+1,unconverged:ihi+1] is upper quasi-triangular. +// +// If unconverged is positive and wantt is false, on return the remaining +// unconverged eigenvalues are the eigenvalues of the upper Hessenberg matrix +// H[ilo:unconverged,ilo:unconverged]. +// +// If unconverged is positive and wantz is true, then on return +// (final Z) = (initial Z)*U, +// where U is the orthogonal matrix in (*) regardless of the value of wantt. +// +// References: +// [1] K. Braman, R. Byers, R. Mathias. The Multishift QR Algorithm. Part I: +// Maintaining Well-Focused Shifts and Level 3 Performance. SIAM J. Matrix +// Anal. Appl. 23(4) (2002), pp. 929—947 +// URL: http://dx.doi.org/10.1137/S0895479801384573 +// [2] K. Braman, R. Byers, R. Mathias. The Multishift QR Algorithm. Part II: +// Aggressive Early Deflation. SIAM J. Matrix Anal. Appl. 23(4) (2002), pp. 948—973 +// URL: http://dx.doi.org/10.1137/S0895479801384585 +// +// Dlaqr04 is an internal routine. It is exported for testing purposes. +func (impl Implementation) Dlaqr04(wantt, wantz bool, n, ilo, ihi int, h []float64, ldh int, wr, wi []float64, iloz, ihiz int, z []float64, ldz int, work []float64, lwork int, recur int) (unconverged int) { + const ( + // Matrices of order ntiny or smaller must be processed by + // Dlahqr because of insufficient subdiagonal scratch space. + // This is a hard limit. + ntiny = 11 + // Exceptional deflation windows: try to cure rare slow + // convergence by varying the size of the deflation window after + // kexnw iterations. + kexnw = 5 + // Exceptional shifts: try to cure rare slow convergence with + // ad-hoc exceptional shifts every kexsh iterations. + kexsh = 6 + + // See https://github.com/gonum/lapack/pull/151#discussion_r68162802 + // and the surrounding discussion for an explanation where these + // constants come from. + // TODO(vladimir-ch): Similar constants for exceptional shifts + // are used also in dlahqr.go. The first constant is different + // there, it is equal to 3. Why? And does it matter? + wilk1 = 0.75 + wilk2 = -0.4375 + ) + + switch { + case n < 0: + panic(nLT0) + case ilo < 0 || max(0, n-1) < ilo: + panic(badIlo) + case ihi < min(ilo, n-1) || n <= ihi: + panic(badIhi) + case ldh < max(1, n): + panic(badLdH) + case wantz && (iloz < 0 || ilo < iloz): + panic(badIloz) + case wantz && (ihiz < ihi || n <= ihiz): + panic(badIhiz) + case ldz < 1, wantz && ldz < n: + panic(badLdZ) + case lwork < 1 && lwork != -1: + panic(badLWork) + // TODO(vladimir-ch): Enable if and when we figure out what the minimum + // necessary lwork value is. Dlaqr04 says that the minimum is n which + // clashes with Dlaqr23's opinion about optimal work when nw <= 2 + // (independent of n). + // case lwork < n && n > ntiny && lwork != -1: + // panic(badLWork) + case len(work) < max(1, lwork): + panic(shortWork) + case recur < 0: + panic(recurLT0) + } + + // Quick return. + if n == 0 { + work[0] = 1 + return 0 + } + + if lwork != -1 { + switch { + case len(h) < (n-1)*ldh+n: + panic(shortH) + case len(wr) != ihi+1: + panic(badLenWr) + case len(wi) != ihi+1: + panic(badLenWi) + case wantz && len(z) < (n-1)*ldz+n: + panic(shortZ) + case ilo > 0 && h[ilo*ldh+ilo-1] != 0: + panic(notIsolated) + case ihi+1 < n && h[(ihi+1)*ldh+ihi] != 0: + panic(notIsolated) + } + } + + if n <= ntiny { + // Tiny matrices must use Dlahqr. + if lwork == -1 { + work[0] = 1 + return 0 + } + return impl.Dlahqr(wantt, wantz, n, ilo, ihi, h, ldh, wr, wi, iloz, ihiz, z, ldz) + } + + // Use small bulge multi-shift QR with aggressive early deflation on + // larger-than-tiny matrices. + var jbcmpz string + if wantt { + jbcmpz = "S" + } else { + jbcmpz = "E" + } + if wantz { + jbcmpz += "V" + } else { + jbcmpz += "N" + } + + var fname string + if recur > 0 { + fname = "DLAQR0" + } else { + fname = "DLAQR4" + } + // nwr is the recommended deflation window size. n is greater than 11, + // so there is enough subdiagonal workspace for nwr >= 2 as required. + // (In fact, there is enough subdiagonal space for nwr >= 3.) + // TODO(vladimir-ch): If there is enough space for nwr >= 3, should we + // use it? + nwr := impl.Ilaenv(13, fname, jbcmpz, n, ilo, ihi, lwork) + nwr = max(2, nwr) + nwr = min(ihi-ilo+1, min((n-1)/3, nwr)) + + // nsr is the recommended number of simultaneous shifts. n is greater + // than 11, so there is enough subdiagonal workspace for nsr to be even + // and greater than or equal to two as required. + nsr := impl.Ilaenv(15, fname, jbcmpz, n, ilo, ihi, lwork) + nsr = min(nsr, min((n+6)/9, ihi-ilo)) + nsr = max(2, nsr&^1) + + // Workspace query call to Dlaqr23. + impl.Dlaqr23(wantt, wantz, n, ilo, ihi, nwr+1, h, ldh, iloz, ihiz, z, ldz, + wr, wi, h, ldh, n, h, ldh, n, h, ldh, work, -1, recur) + // Optimal workspace is max(Dlaqr5, Dlaqr23). + lwkopt := max(3*nsr/2, int(work[0])) + // Quick return in case of workspace query. + if lwork == -1 { + work[0] = float64(lwkopt) + return 0 + } + + // Dlahqr/Dlaqr04 crossover point. + nmin := impl.Ilaenv(12, fname, jbcmpz, n, ilo, ihi, lwork) + nmin = max(ntiny, nmin) + + // Nibble determines when to skip a multi-shift QR sweep (Dlaqr5). + nibble := impl.Ilaenv(14, fname, jbcmpz, n, ilo, ihi, lwork) + nibble = max(0, nibble) + + // Computation mode of far-from-diagonal orthogonal updates in Dlaqr5. + kacc22 := impl.Ilaenv(16, fname, jbcmpz, n, ilo, ihi, lwork) + kacc22 = max(0, min(kacc22, 2)) + + // nwmax is the largest possible deflation window for which there is + // sufficient workspace. + nwmax := min((n-1)/3, lwork/2) + nw := nwmax // Start with maximum deflation window size. + + // nsmax is the largest number of simultaneous shifts for which there is + // sufficient workspace. + nsmax := min((n+6)/9, 2*lwork/3) &^ 1 + + ndfl := 1 // Number of iterations since last deflation. + ndec := 0 // Deflation window size decrement. + + // Main loop. + var ( + itmax = max(30, 2*kexsh) * max(10, (ihi-ilo+1)) + it = 0 + ) + for kbot := ihi; kbot >= ilo; { + if it == itmax { + unconverged = kbot + 1 + break + } + it++ + + // Locate active block. + ktop := ilo + for k := kbot; k >= ilo+1; k-- { + if h[k*ldh+k-1] == 0 { + ktop = k + break + } + } + + // Select deflation window size nw. + // + // Typical Case: + // If possible and advisable, nibble the entire active block. + // If not, use size min(nwr,nwmax) or min(nwr+1,nwmax) + // depending upon which has the smaller corresponding + // subdiagonal entry (a heuristic). + // + // Exceptional Case: + // If there have been no deflations in kexnw or more + // iterations, then vary the deflation window size. At first, + // because larger windows are, in general, more powerful than + // smaller ones, rapidly increase the window to the maximum + // possible. Then, gradually reduce the window size. + nh := kbot - ktop + 1 + nwupbd := min(nh, nwmax) + if ndfl < kexnw { + nw = min(nwupbd, nwr) + } else { + nw = min(nwupbd, 2*nw) + } + if nw < nwmax { + if nw >= nh-1 { + nw = nh + } else { + kwtop := kbot - nw + 1 + if math.Abs(h[kwtop*ldh+kwtop-1]) > math.Abs(h[(kwtop-1)*ldh+kwtop-2]) { + nw++ + } + } + } + if ndfl < kexnw { + ndec = -1 + } else if ndec >= 0 || nw >= nwupbd { + ndec++ + if nw-ndec < 2 { + ndec = 0 + } + nw -= ndec + } + + // Split workspace under the subdiagonal of H into: + // - an nw×nw work array V in the lower left-hand corner, + // - an nw×nhv horizontal work array along the bottom edge (nhv + // must be at least nw but more is better), + // - an nve×nw vertical work array along the left-hand-edge + // (nhv can be any positive integer but more is better). + kv := n - nw + kt := nw + kwv := nw + 1 + nhv := n - kwv - kt + // Aggressive early deflation. + ls, ld := impl.Dlaqr23(wantt, wantz, n, ktop, kbot, nw, + h, ldh, iloz, ihiz, z, ldz, wr[:kbot+1], wi[:kbot+1], + h[kv*ldh:], ldh, nhv, h[kv*ldh+kt:], ldh, nhv, h[kwv*ldh:], ldh, work, lwork, recur) + + // Adjust kbot accounting for new deflations. + kbot -= ld + // ks points to the shifts. + ks := kbot - ls + 1 + + // Skip an expensive QR sweep if there is a (partly heuristic) + // reason to expect that many eigenvalues will deflate without + // it. Here, the QR sweep is skipped if many eigenvalues have + // just been deflated or if the remaining active block is small. + if ld > 0 && (100*ld > nw*nibble || kbot-ktop+1 <= min(nmin, nwmax)) { + // ld is positive, note progress. + ndfl = 1 + continue + } + + // ns is the nominal number of simultaneous shifts. This may be + // lowered (slightly) if Dlaqr23 did not provide that many + // shifts. + ns := min(min(nsmax, nsr), max(2, kbot-ktop)) &^ 1 + + // If there have been no deflations in a multiple of kexsh + // iterations, then try exceptional shifts. Otherwise use shifts + // provided by Dlaqr23 above or from the eigenvalues of a + // trailing principal submatrix. + if ndfl%kexsh == 0 { + ks = kbot - ns + 1 + for i := kbot; i > max(ks, ktop+1); i -= 2 { + ss := math.Abs(h[i*ldh+i-1]) + math.Abs(h[(i-1)*ldh+i-2]) + aa := wilk1*ss + h[i*ldh+i] + _, _, _, _, wr[i-1], wi[i-1], wr[i], wi[i], _, _ = + impl.Dlanv2(aa, ss, wilk2*ss, aa) + } + if ks == ktop { + wr[ks+1] = h[(ks+1)*ldh+ks+1] + wi[ks+1] = 0 + wr[ks] = wr[ks+1] + wi[ks] = wi[ks+1] + } + } else { + // If we got ns/2 or fewer shifts, use Dlahqr or recur + // into Dlaqr04 on a trailing principal submatrix to get + // more. Since ns <= nsmax <=(n+6)/9, there is enough + // space below the subdiagonal to fit an ns×ns scratch + // array. + if kbot-ks+1 <= ns/2 { + ks = kbot - ns + 1 + kt = n - ns + impl.Dlacpy(blas.All, ns, ns, h[ks*ldh+ks:], ldh, h[kt*ldh:], ldh) + if ns > nmin && recur > 0 { + ks += impl.Dlaqr04(false, false, ns, 1, ns-1, h[kt*ldh:], ldh, + wr[ks:ks+ns], wi[ks:ks+ns], 0, 0, nil, 0, work, lwork, recur-1) + } else { + ks += impl.Dlahqr(false, false, ns, 0, ns-1, h[kt*ldh:], ldh, + wr[ks:ks+ns], wi[ks:ks+ns], 0, 0, nil, 1) + } + // In case of a rare QR failure use eigenvalues + // of the trailing 2×2 principal submatrix. + if ks >= kbot { + aa := h[(kbot-1)*ldh+kbot-1] + bb := h[(kbot-1)*ldh+kbot] + cc := h[kbot*ldh+kbot-1] + dd := h[kbot*ldh+kbot] + _, _, _, _, wr[kbot-1], wi[kbot-1], wr[kbot], wi[kbot], _, _ = + impl.Dlanv2(aa, bb, cc, dd) + ks = kbot - 1 + } + } + + if kbot-ks+1 > ns { + // Sorting the shifts helps a little. Bubble + // sort keeps complex conjugate pairs together. + sorted := false + for k := kbot; k > ks; k-- { + if sorted { + break + } + sorted = true + for i := ks; i < k; i++ { + if math.Abs(wr[i])+math.Abs(wi[i]) >= math.Abs(wr[i+1])+math.Abs(wi[i+1]) { + continue + } + sorted = false + wr[i], wr[i+1] = wr[i+1], wr[i] + wi[i], wi[i+1] = wi[i+1], wi[i] + } + } + } + + // Shuffle shifts into pairs of real shifts and pairs of + // complex conjugate shifts using the fact that complex + // conjugate shifts are already adjacent to one another. + // TODO(vladimir-ch): The shuffling here could probably + // be removed but I'm not sure right now and it's safer + // to leave it. + for i := kbot; i > ks+1; i -= 2 { + if wi[i] == -wi[i-1] { + continue + } + wr[i], wr[i-1], wr[i-2] = wr[i-1], wr[i-2], wr[i] + wi[i], wi[i-1], wi[i-2] = wi[i-1], wi[i-2], wi[i] + } + } + + // If there are only two shifts and both are real, then use only one. + if kbot-ks+1 == 2 && wi[kbot] == 0 { + if math.Abs(wr[kbot]-h[kbot*ldh+kbot]) < math.Abs(wr[kbot-1]-h[kbot*ldh+kbot]) { + wr[kbot-1] = wr[kbot] + } else { + wr[kbot] = wr[kbot-1] + } + } + + // Use up to ns of the smallest magnitude shifts. If there + // aren't ns shifts available, then use them all, possibly + // dropping one to make the number of shifts even. + ns = min(ns, kbot-ks+1) &^ 1 + ks = kbot - ns + 1 + + // Split workspace under the subdiagonal into: + // - a kdu×kdu work array U in the lower left-hand-corner, + // - a kdu×nhv horizontal work array WH along the bottom edge + // (nhv must be at least kdu but more is better), + // - an nhv×kdu vertical work array WV along the left-hand-edge + // (nhv must be at least kdu but more is better). + kdu := 3*ns - 3 + ku := n - kdu + kwh := kdu + kwv = kdu + 3 + nhv = n - kwv - kdu + // Small-bulge multi-shift QR sweep. + impl.Dlaqr5(wantt, wantz, kacc22, n, ktop, kbot, ns, + wr[ks:ks+ns], wi[ks:ks+ns], h, ldh, iloz, ihiz, z, ldz, + work, 3, h[ku*ldh:], ldh, nhv, h[kwv*ldh:], ldh, nhv, h[ku*ldh+kwh:], ldh) + + // Note progress (or the lack of it). + if ld > 0 { + ndfl = 1 + } else { + ndfl++ + } + } + + work[0] = float64(lwkopt) + return unconverged +} diff --git a/vendor/gonum.org/v1/gonum/lapack/gonum/dlaqr1.go b/vendor/gonum.org/v1/gonum/lapack/gonum/dlaqr1.go new file mode 100644 index 0000000..e21373b --- /dev/null +++ b/vendor/gonum.org/v1/gonum/lapack/gonum/dlaqr1.go @@ -0,0 +1,59 @@ +// Copyright ©2016 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package gonum + +import "math" + +// Dlaqr1 sets v to a scalar multiple of the first column of the product +// (H - (sr1 + i*si1)*I)*(H - (sr2 + i*si2)*I) +// where H is a 2×2 or 3×3 matrix, I is the identity matrix of the same size, +// and i is the imaginary unit. Scaling is done to avoid overflows and most +// underflows. +// +// n is the order of H and must be either 2 or 3. It must hold that either sr1 = +// sr2 and si1 = -si2, or si1 = si2 = 0. The length of v must be equal to n. If +// any of these conditions is not met, Dlaqr1 will panic. +// +// Dlaqr1 is an internal routine. It is exported for testing purposes. +func (impl Implementation) Dlaqr1(n int, h []float64, ldh int, sr1, si1, sr2, si2 float64, v []float64) { + switch { + case n != 2 && n != 3: + panic("lapack: n must be 2 or 3") + case ldh < n: + panic(badLdH) + case len(h) < (n-1)*ldh+n: + panic(shortH) + case !((sr1 == sr2 && si1 == -si2) || (si1 == 0 && si2 == 0)): + panic(badShifts) + case len(v) != n: + panic(shortV) + } + + if n == 2 { + s := math.Abs(h[0]-sr2) + math.Abs(si2) + math.Abs(h[ldh]) + if s == 0 { + v[0] = 0 + v[1] = 0 + } else { + h21s := h[ldh] / s + v[0] = h21s*h[1] + (h[0]-sr1)*((h[0]-sr2)/s) - si1*(si2/s) + v[1] = h21s * (h[0] + h[ldh+1] - sr1 - sr2) + } + return + } + + s := math.Abs(h[0]-sr2) + math.Abs(si2) + math.Abs(h[ldh]) + math.Abs(h[2*ldh]) + if s == 0 { + v[0] = 0 + v[1] = 0 + v[2] = 0 + } else { + h21s := h[ldh] / s + h31s := h[2*ldh] / s + v[0] = (h[0]-sr1)*((h[0]-sr2)/s) - si1*(si2/s) + h[1]*h21s + h[2]*h31s + v[1] = h21s*(h[0]+h[ldh+1]-sr1-sr2) + h[ldh+2]*h31s + v[2] = h31s*(h[0]+h[2*ldh+2]-sr1-sr2) + h21s*h[2*ldh+1] + } +} diff --git a/vendor/gonum.org/v1/gonum/lapack/gonum/dlaqr23.go b/vendor/gonum.org/v1/gonum/lapack/gonum/dlaqr23.go new file mode 100644 index 0000000..ff299a7 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/lapack/gonum/dlaqr23.go @@ -0,0 +1,415 @@ +// Copyright ©2016 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package gonum + +import ( + "math" + + "gonum.org/v1/gonum/blas" + "gonum.org/v1/gonum/blas/blas64" + "gonum.org/v1/gonum/lapack" +) + +// Dlaqr23 performs the orthogonal similarity transformation of an n×n upper +// Hessenberg matrix to detect and deflate fully converged eigenvalues from a +// trailing principal submatrix using aggressive early deflation [1]. +// +// On return, H will be overwritten by a new Hessenberg matrix that is a +// perturbation of an orthogonal similarity transformation of H. It is hoped +// that on output H will have many zero subdiagonal entries. +// +// If wantt is true, the matrix H will be fully updated so that the +// quasi-triangular Schur factor can be computed. If wantt is false, then only +// enough of H will be updated to preserve the eigenvalues. +// +// If wantz is true, the orthogonal similarity transformation will be +// accumulated into Z[iloz:ihiz+1,ktop:kbot+1], otherwise Z is not referenced. +// +// ktop and kbot determine a block [ktop:kbot+1,ktop:kbot+1] along the diagonal +// of H. It must hold that +// 0 <= ilo <= ihi < n, if n > 0, +// ilo == 0 and ihi == -1, if n == 0, +// and the block must be isolated, that is, it must hold that +// ktop == 0 or H[ktop,ktop-1] == 0, +// kbot == n-1 or H[kbot+1,kbot] == 0, +// otherwise Dlaqr23 will panic. +// +// nw is the deflation window size. It must hold that +// 0 <= nw <= kbot-ktop+1, +// otherwise Dlaqr23 will panic. +// +// iloz and ihiz specify the rows of the n×n matrix Z to which transformations +// will be applied if wantz is true. It must hold that +// 0 <= iloz <= ktop, and kbot <= ihiz < n, +// otherwise Dlaqr23 will panic. +// +// sr and si must have length kbot+1, otherwise Dlaqr23 will panic. +// +// v and ldv represent an nw×nw work matrix. +// t and ldt represent an nw×nh work matrix, and nh must be at least nw. +// wv and ldwv represent an nv×nw work matrix. +// +// work must have length at least lwork and lwork must be at least max(1,2*nw), +// otherwise Dlaqr23 will panic. Larger values of lwork may result in greater +// efficiency. On return, work[0] will contain the optimal value of lwork. +// +// If lwork is -1, instead of performing Dlaqr23, the function only estimates the +// optimal workspace size and stores it into work[0]. Neither h nor z are +// accessed. +// +// recur is the non-negative recursion depth. For recur > 0, Dlaqr23 behaves +// as DLAQR3, for recur == 0 it behaves as DLAQR2. +// +// On return, ns and nd will contain respectively the number of unconverged +// (i.e., approximate) eigenvalues and converged eigenvalues that are stored in +// sr and si. +// +// On return, the real and imaginary parts of approximate eigenvalues that may +// be used for shifts will be stored respectively in sr[kbot-nd-ns+1:kbot-nd+1] +// and si[kbot-nd-ns+1:kbot-nd+1]. +// +// On return, the real and imaginary parts of converged eigenvalues will be +// stored respectively in sr[kbot-nd+1:kbot+1] and si[kbot-nd+1:kbot+1]. +// +// References: +// [1] K. Braman, R. Byers, R. Mathias. The Multishift QR Algorithm. Part II: +// Aggressive Early Deflation. SIAM J. Matrix Anal. Appl 23(4) (2002), pp. 948—973 +// URL: http://dx.doi.org/10.1137/S0895479801384585 +// +func (impl Implementation) Dlaqr23(wantt, wantz bool, n, ktop, kbot, nw int, h []float64, ldh int, iloz, ihiz int, z []float64, ldz int, sr, si []float64, v []float64, ldv int, nh int, t []float64, ldt int, nv int, wv []float64, ldwv int, work []float64, lwork int, recur int) (ns, nd int) { + switch { + case n < 0: + panic(nLT0) + case ktop < 0 || max(0, n-1) < ktop: + panic(badKtop) + case kbot < min(ktop, n-1) || n <= kbot: + panic(badKbot) + case nw < 0 || kbot-ktop+1+1 < nw: + panic(badNw) + case ldh < max(1, n): + panic(badLdH) + case wantz && (iloz < 0 || ktop < iloz): + panic(badIloz) + case wantz && (ihiz < kbot || n <= ihiz): + panic(badIhiz) + case ldz < 1, wantz && ldz < n: + panic(badLdZ) + case ldv < max(1, nw): + panic(badLdV) + case nh < nw: + panic(badNh) + case ldt < max(1, nh): + panic(badLdT) + case nv < 0: + panic(nvLT0) + case ldwv < max(1, nw): + panic(badLdWV) + case lwork < max(1, 2*nw) && lwork != -1: + panic(badLWork) + case len(work) < max(1, lwork): + panic(shortWork) + case recur < 0: + panic(recurLT0) + } + + // Quick return for zero window size. + if nw == 0 { + work[0] = 1 + return 0, 0 + } + + // LAPACK code does not enforce the documented behavior + // nw <= kbot-ktop+1 + // but we do (we panic above). + jw := nw + lwkopt := max(1, 2*nw) + if jw > 2 { + // Workspace query call to Dgehrd. + impl.Dgehrd(jw, 0, jw-2, t, ldt, work, work, -1) + lwk1 := int(work[0]) + // Workspace query call to Dormhr. + impl.Dormhr(blas.Right, blas.NoTrans, jw, jw, 0, jw-2, t, ldt, work, v, ldv, work, -1) + lwk2 := int(work[0]) + if recur > 0 { + // Workspace query call to Dlaqr04. + impl.Dlaqr04(true, true, jw, 0, jw-1, t, ldt, sr, si, 0, jw-1, v, ldv, work, -1, recur-1) + lwk3 := int(work[0]) + // Optimal workspace. + lwkopt = max(jw+max(lwk1, lwk2), lwk3) + } else { + // Optimal workspace. + lwkopt = jw + max(lwk1, lwk2) + } + } + // Quick return in case of workspace query. + if lwork == -1 { + work[0] = float64(lwkopt) + return 0, 0 + } + + // Check input slices only if not doing workspace query. + switch { + case len(h) < (n-1)*ldh+n: + panic(shortH) + case len(v) < (nw-1)*ldv+nw: + panic(shortV) + case len(t) < (nw-1)*ldt+nh: + panic(shortT) + case len(wv) < (nv-1)*ldwv+nw: + panic(shortWV) + case wantz && len(z) < (n-1)*ldz+n: + panic(shortZ) + case len(sr) != kbot+1: + panic(badLenSr) + case len(si) != kbot+1: + panic(badLenSi) + case ktop > 0 && h[ktop*ldh+ktop-1] != 0: + panic(notIsolated) + case kbot+1 < n && h[(kbot+1)*ldh+kbot] != 0: + panic(notIsolated) + } + + // Machine constants. + ulp := dlamchP + smlnum := float64(n) / ulp * dlamchS + + // Setup deflation window. + var s float64 + kwtop := kbot - jw + 1 + if kwtop != ktop { + s = h[kwtop*ldh+kwtop-1] + } + if kwtop == kbot { + // 1×1 deflation window. + sr[kwtop] = h[kwtop*ldh+kwtop] + si[kwtop] = 0 + ns = 1 + nd = 0 + if math.Abs(s) <= math.Max(smlnum, ulp*math.Abs(h[kwtop*ldh+kwtop])) { + ns = 0 + nd = 1 + if kwtop > ktop { + h[kwtop*ldh+kwtop-1] = 0 + } + } + work[0] = 1 + return ns, nd + } + + // Convert to spike-triangular form. In case of a rare QR failure, this + // routine continues to do aggressive early deflation using that part of + // the deflation window that converged using infqr here and there to + // keep track. + impl.Dlacpy(blas.Upper, jw, jw, h[kwtop*ldh+kwtop:], ldh, t, ldt) + bi := blas64.Implementation() + bi.Dcopy(jw-1, h[(kwtop+1)*ldh+kwtop:], ldh+1, t[ldt:], ldt+1) + impl.Dlaset(blas.All, jw, jw, 0, 1, v, ldv) + nmin := impl.Ilaenv(12, "DLAQR3", "SV", jw, 0, jw-1, lwork) + var infqr int + if recur > 0 && jw > nmin { + infqr = impl.Dlaqr04(true, true, jw, 0, jw-1, t, ldt, sr[kwtop:], si[kwtop:], 0, jw-1, v, ldv, work, lwork, recur-1) + } else { + infqr = impl.Dlahqr(true, true, jw, 0, jw-1, t, ldt, sr[kwtop:], si[kwtop:], 0, jw-1, v, ldv) + } + // Note that ilo == 0 which conveniently coincides with the success + // value of infqr, that is, infqr as an index always points to the first + // converged eigenvalue. + + // Dtrexc needs a clean margin near the diagonal. + for j := 0; j < jw-3; j++ { + t[(j+2)*ldt+j] = 0 + t[(j+3)*ldt+j] = 0 + } + if jw >= 3 { + t[(jw-1)*ldt+jw-3] = 0 + } + + ns = jw + ilst := infqr + // Deflation detection loop. + for ilst < ns { + bulge := false + if ns >= 2 { + bulge = t[(ns-1)*ldt+ns-2] != 0 + } + if !bulge { + // Real eigenvalue. + abst := math.Abs(t[(ns-1)*ldt+ns-1]) + if abst == 0 { + abst = math.Abs(s) + } + if math.Abs(s*v[ns-1]) <= math.Max(smlnum, ulp*abst) { + // Deflatable. + ns-- + } else { + // Undeflatable, move it up out of the way. + // Dtrexc can not fail in this case. + _, ilst, _ = impl.Dtrexc(lapack.UpdateSchur, jw, t, ldt, v, ldv, ns-1, ilst, work) + ilst++ + } + continue + } + // Complex conjugate pair. + abst := math.Abs(t[(ns-1)*ldt+ns-1]) + math.Sqrt(math.Abs(t[(ns-1)*ldt+ns-2]))*math.Sqrt(math.Abs(t[(ns-2)*ldt+ns-1])) + if abst == 0 { + abst = math.Abs(s) + } + if math.Max(math.Abs(s*v[ns-1]), math.Abs(s*v[ns-2])) <= math.Max(smlnum, ulp*abst) { + // Deflatable. + ns -= 2 + } else { + // Undeflatable, move them up out of the way. + // Dtrexc does the right thing with ilst in case of a + // rare exchange failure. + _, ilst, _ = impl.Dtrexc(lapack.UpdateSchur, jw, t, ldt, v, ldv, ns-1, ilst, work) + ilst += 2 + } + } + + // Return to Hessenberg form. + if ns == 0 { + s = 0 + } + if ns < jw { + // Sorting diagonal blocks of T improves accuracy for graded + // matrices. Bubble sort deals well with exchange failures. + sorted := false + i := ns + for !sorted { + sorted = true + kend := i - 1 + i = infqr + var k int + if i == ns-1 || t[(i+1)*ldt+i] == 0 { + k = i + 1 + } else { + k = i + 2 + } + for k <= kend { + var evi float64 + if k == i+1 { + evi = math.Abs(t[i*ldt+i]) + } else { + evi = math.Abs(t[i*ldt+i]) + math.Sqrt(math.Abs(t[(i+1)*ldt+i]))*math.Sqrt(math.Abs(t[i*ldt+i+1])) + } + + var evk float64 + if k == kend || t[(k+1)*ldt+k] == 0 { + evk = math.Abs(t[k*ldt+k]) + } else { + evk = math.Abs(t[k*ldt+k]) + math.Sqrt(math.Abs(t[(k+1)*ldt+k]))*math.Sqrt(math.Abs(t[k*ldt+k+1])) + } + + if evi >= evk { + i = k + } else { + sorted = false + _, ilst, ok := impl.Dtrexc(lapack.UpdateSchur, jw, t, ldt, v, ldv, i, k, work) + if ok { + i = ilst + } else { + i = k + } + } + if i == kend || t[(i+1)*ldt+i] == 0 { + k = i + 1 + } else { + k = i + 2 + } + } + } + } + + // Restore shift/eigenvalue array from T. + for i := jw - 1; i >= infqr; { + if i == infqr || t[i*ldt+i-1] == 0 { + sr[kwtop+i] = t[i*ldt+i] + si[kwtop+i] = 0 + i-- + continue + } + aa := t[(i-1)*ldt+i-1] + bb := t[(i-1)*ldt+i] + cc := t[i*ldt+i-1] + dd := t[i*ldt+i] + _, _, _, _, sr[kwtop+i-1], si[kwtop+i-1], sr[kwtop+i], si[kwtop+i], _, _ = impl.Dlanv2(aa, bb, cc, dd) + i -= 2 + } + + if ns < jw || s == 0 { + if ns > 1 && s != 0 { + // Reflect spike back into lower triangle. + bi.Dcopy(ns, v[:ns], 1, work[:ns], 1) + _, tau := impl.Dlarfg(ns, work[0], work[1:ns], 1) + work[0] = 1 + impl.Dlaset(blas.Lower, jw-2, jw-2, 0, 0, t[2*ldt:], ldt) + impl.Dlarf(blas.Left, ns, jw, work[:ns], 1, tau, t, ldt, work[jw:]) + impl.Dlarf(blas.Right, ns, ns, work[:ns], 1, tau, t, ldt, work[jw:]) + impl.Dlarf(blas.Right, jw, ns, work[:ns], 1, tau, v, ldv, work[jw:]) + impl.Dgehrd(jw, 0, ns-1, t, ldt, work[:jw-1], work[jw:], lwork-jw) + } + + // Copy updated reduced window into place. + if kwtop > 0 { + h[kwtop*ldh+kwtop-1] = s * v[0] + } + impl.Dlacpy(blas.Upper, jw, jw, t, ldt, h[kwtop*ldh+kwtop:], ldh) + bi.Dcopy(jw-1, t[ldt:], ldt+1, h[(kwtop+1)*ldh+kwtop:], ldh+1) + + // Accumulate orthogonal matrix in order to update H and Z, if + // requested. + if ns > 1 && s != 0 { + // work[:ns-1] contains the elementary reflectors stored + // by a call to Dgehrd above. + impl.Dormhr(blas.Right, blas.NoTrans, jw, ns, 0, ns-1, + t, ldt, work[:ns-1], v, ldv, work[jw:], lwork-jw) + } + + // Update vertical slab in H. + var ltop int + if !wantt { + ltop = ktop + } + for krow := ltop; krow < kwtop; krow += nv { + kln := min(nv, kwtop-krow) + bi.Dgemm(blas.NoTrans, blas.NoTrans, kln, jw, jw, + 1, h[krow*ldh+kwtop:], ldh, v, ldv, + 0, wv, ldwv) + impl.Dlacpy(blas.All, kln, jw, wv, ldwv, h[krow*ldh+kwtop:], ldh) + } + + // Update horizontal slab in H. + if wantt { + for kcol := kbot + 1; kcol < n; kcol += nh { + kln := min(nh, n-kcol) + bi.Dgemm(blas.Trans, blas.NoTrans, jw, kln, jw, + 1, v, ldv, h[kwtop*ldh+kcol:], ldh, + 0, t, ldt) + impl.Dlacpy(blas.All, jw, kln, t, ldt, h[kwtop*ldh+kcol:], ldh) + } + } + + // Update vertical slab in Z. + if wantz { + for krow := iloz; krow <= ihiz; krow += nv { + kln := min(nv, ihiz-krow+1) + bi.Dgemm(blas.NoTrans, blas.NoTrans, kln, jw, jw, + 1, z[krow*ldz+kwtop:], ldz, v, ldv, + 0, wv, ldwv) + impl.Dlacpy(blas.All, kln, jw, wv, ldwv, z[krow*ldz+kwtop:], ldz) + } + } + } + + // The number of deflations. + nd = jw - ns + // Shifts are converged eigenvalues that could not be deflated. + // Subtracting infqr from the spike length takes care of the case of a + // rare QR failure while calculating eigenvalues of the deflation + // window. + ns -= infqr + work[0] = float64(lwkopt) + return ns, nd +} diff --git a/vendor/gonum.org/v1/gonum/lapack/gonum/dlaqr5.go b/vendor/gonum.org/v1/gonum/lapack/gonum/dlaqr5.go new file mode 100644 index 0000000..c198f22 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/lapack/gonum/dlaqr5.go @@ -0,0 +1,644 @@ +// Copyright ©2016 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package gonum + +import ( + "math" + + "gonum.org/v1/gonum/blas" + "gonum.org/v1/gonum/blas/blas64" +) + +// Dlaqr5 performs a single small-bulge multi-shift QR sweep on an isolated +// block of a Hessenberg matrix. +// +// wantt and wantz determine whether the quasi-triangular Schur factor and the +// orthogonal Schur factor, respectively, will be computed. +// +// kacc22 specifies the computation mode of far-from-diagonal orthogonal +// updates. Permitted values are: +// 0: Dlaqr5 will not accumulate reflections and will not use matrix-matrix +// multiply to update far-from-diagonal matrix entries. +// 1: Dlaqr5 will accumulate reflections and use matrix-matrix multiply to +// update far-from-diagonal matrix entries. +// 2: Dlaqr5 will accumulate reflections, use matrix-matrix multiply to update +// far-from-diagonal matrix entries, and take advantage of 2×2 block +// structure during matrix multiplies. +// For other values of kacc2 Dlaqr5 will panic. +// +// n is the order of the Hessenberg matrix H. +// +// ktop and kbot are indices of the first and last row and column of an isolated +// diagonal block upon which the QR sweep will be applied. It must hold that +// ktop == 0, or 0 < ktop <= n-1 and H[ktop, ktop-1] == 0, and +// kbot == n-1, or 0 <= kbot < n-1 and H[kbot+1, kbot] == 0, +// otherwise Dlaqr5 will panic. +// +// nshfts is the number of simultaneous shifts. It must be positive and even, +// otherwise Dlaqr5 will panic. +// +// sr and si contain the real and imaginary parts, respectively, of the shifts +// of origin that define the multi-shift QR sweep. On return both slices may be +// reordered by Dlaqr5. Their length must be equal to nshfts, otherwise Dlaqr5 +// will panic. +// +// h and ldh represent the Hessenberg matrix H of size n×n. On return +// multi-shift QR sweep with shifts sr+i*si has been applied to the isolated +// diagonal block in rows and columns ktop through kbot, inclusive. +// +// iloz and ihiz specify the rows of Z to which transformations will be applied +// if wantz is true. It must hold that 0 <= iloz <= ihiz < n, otherwise Dlaqr5 +// will panic. +// +// z and ldz represent the matrix Z of size n×n. If wantz is true, the QR sweep +// orthogonal similarity transformation is accumulated into +// z[iloz:ihiz,iloz:ihiz] from the right, otherwise z not referenced. +// +// v and ldv represent an auxiliary matrix V of size (nshfts/2)×3. Note that V +// is transposed with respect to the reference netlib implementation. +// +// u and ldu represent an auxiliary matrix of size (3*nshfts-3)×(3*nshfts-3). +// +// wh and ldwh represent an auxiliary matrix of size (3*nshfts-3)×nh. +// +// wv and ldwv represent an auxiliary matrix of size nv×(3*nshfts-3). +// +// Dlaqr5 is an internal routine. It is exported for testing purposes. +func (impl Implementation) Dlaqr5(wantt, wantz bool, kacc22 int, n, ktop, kbot, nshfts int, sr, si []float64, h []float64, ldh int, iloz, ihiz int, z []float64, ldz int, v []float64, ldv int, u []float64, ldu int, nv int, wv []float64, ldwv int, nh int, wh []float64, ldwh int) { + switch { + case kacc22 != 0 && kacc22 != 1 && kacc22 != 2: + panic(badKacc22) + case n < 0: + panic(nLT0) + case ktop < 0 || n <= ktop: + panic(badKtop) + case kbot < 0 || n <= kbot: + panic(badKbot) + + case nshfts < 0: + panic(nshftsLT0) + case nshfts&0x1 != 0: + panic(nshftsOdd) + case len(sr) != nshfts: + panic(badLenSr) + case len(si) != nshfts: + panic(badLenSi) + + case ldh < max(1, n): + panic(badLdH) + case len(h) < (n-1)*ldh+n: + panic(shortH) + + case wantz && ihiz >= n: + panic(badIhiz) + case wantz && iloz < 0 || ihiz < iloz: + panic(badIloz) + case ldz < 1, wantz && ldz < n: + panic(badLdZ) + case wantz && len(z) < (n-1)*ldz+n: + panic(shortZ) + + case ldv < 3: + // V is transposed w.r.t. reference lapack. + panic(badLdV) + case len(v) < (nshfts/2-1)*ldv+3: + panic(shortV) + + case ldu < max(1, 3*nshfts-3): + panic(badLdU) + case len(u) < (3*nshfts-3-1)*ldu+3*nshfts-3: + panic(shortU) + + case nv < 0: + panic(nvLT0) + case ldwv < max(1, 3*nshfts-3): + panic(badLdWV) + case len(wv) < (nv-1)*ldwv+3*nshfts-3: + panic(shortWV) + + case nh < 0: + panic(nhLT0) + case ldwh < max(1, nh): + panic(badLdWH) + case len(wh) < (3*nshfts-3-1)*ldwh+nh: + panic(shortWH) + + case ktop > 0 && h[ktop*ldh+ktop-1] != 0: + panic(notIsolated) + case kbot < n-1 && h[(kbot+1)*ldh+kbot] != 0: + panic(notIsolated) + } + + // If there are no shifts, then there is nothing to do. + if nshfts < 2 { + return + } + // If the active block is empty or 1×1, then there is nothing to do. + if ktop >= kbot { + return + } + + // Shuffle shifts into pairs of real shifts and pairs of complex + // conjugate shifts assuming complex conjugate shifts are already + // adjacent to one another. + for i := 0; i < nshfts-2; i += 2 { + if si[i] == -si[i+1] { + continue + } + sr[i], sr[i+1], sr[i+2] = sr[i+1], sr[i+2], sr[i] + si[i], si[i+1], si[i+2] = si[i+1], si[i+2], si[i] + } + + // Note: lapack says that nshfts must be even but allows it to be odd + // anyway. We panic above if nshfts is not even, so reducing it by one + // is unnecessary. The only caller Dlaqr04 uses only even nshfts. + // + // The original comment and code from lapack-3.6.0/SRC/dlaqr5.f:341: + // * ==== NSHFTS is supposed to be even, but if it is odd, + // * . then simply reduce it by one. The shuffle above + // * . ensures that the dropped shift is real and that + // * . the remaining shifts are paired. ==== + // * + // NS = NSHFTS - MOD( NSHFTS, 2 ) + ns := nshfts + + safmin := dlamchS + ulp := dlamchP + smlnum := safmin * float64(n) / ulp + + // Use accumulated reflections to update far-from-diagonal entries? + accum := kacc22 == 1 || kacc22 == 2 + // If so, exploit the 2×2 block structure? + blk22 := ns > 2 && kacc22 == 2 + + // Clear trash. + if ktop+2 <= kbot { + h[(ktop+2)*ldh+ktop] = 0 + } + + // nbmps = number of 2-shift bulges in the chain. + nbmps := ns / 2 + + // kdu = width of slab. + kdu := 6*nbmps - 3 + + // Create and chase chains of nbmps bulges. + for incol := 3*(1-nbmps) + ktop - 1; incol <= kbot-2; incol += 3*nbmps - 2 { + ndcol := incol + kdu + if accum { + impl.Dlaset(blas.All, kdu, kdu, 0, 1, u, ldu) + } + + // Near-the-diagonal bulge chase. The following loop performs + // the near-the-diagonal part of a small bulge multi-shift QR + // sweep. Each 6*nbmps-2 column diagonal chunk extends from + // column incol to column ndcol (including both column incol and + // column ndcol). The following loop chases a 3*nbmps column + // long chain of nbmps bulges 3*nbmps-2 columns to the right. + // (incol may be less than ktop and ndcol may be greater than + // kbot indicating phantom columns from which to chase bulges + // before they are actually introduced or to which to chase + // bulges beyond column kbot.) + for krcol := incol; krcol <= min(incol+3*nbmps-3, kbot-2); krcol++ { + // Bulges number mtop to mbot are active double implicit + // shift bulges. There may or may not also be small 2×2 + // bulge, if there is room. The inactive bulges (if any) + // must wait until the active bulges have moved down the + // diagonal to make room. The phantom matrix paradigm + // described above helps keep track. + + mtop := max(0, ((ktop-1)-krcol+2)/3) + mbot := min(nbmps, (kbot-krcol)/3) - 1 + m22 := mbot + 1 + bmp22 := (mbot < nbmps-1) && (krcol+3*m22 == kbot-2) + + // Generate reflections to chase the chain right one + // column. (The minimum value of k is ktop-1.) + for m := mtop; m <= mbot; m++ { + k := krcol + 3*m + if k == ktop-1 { + impl.Dlaqr1(3, h[ktop*ldh+ktop:], ldh, + sr[2*m], si[2*m], sr[2*m+1], si[2*m+1], + v[m*ldv:m*ldv+3]) + alpha := v[m*ldv] + _, v[m*ldv] = impl.Dlarfg(3, alpha, v[m*ldv+1:m*ldv+3], 1) + continue + } + beta := h[(k+1)*ldh+k] + v[m*ldv+1] = h[(k+2)*ldh+k] + v[m*ldv+2] = h[(k+3)*ldh+k] + beta, v[m*ldv] = impl.Dlarfg(3, beta, v[m*ldv+1:m*ldv+3], 1) + + // A bulge may collapse because of vigilant deflation or + // destructive underflow. In the underflow case, try the + // two-small-subdiagonals trick to try to reinflate the + // bulge. + if h[(k+3)*ldh+k] != 0 || h[(k+3)*ldh+k+1] != 0 || h[(k+3)*ldh+k+2] == 0 { + // Typical case: not collapsed (yet). + h[(k+1)*ldh+k] = beta + h[(k+2)*ldh+k] = 0 + h[(k+3)*ldh+k] = 0 + continue + } + + // Atypical case: collapsed. Attempt to reintroduce + // ignoring H[k+1,k] and H[k+2,k]. If the fill + // resulting from the new reflector is too large, + // then abandon it. Otherwise, use the new one. + var vt [3]float64 + impl.Dlaqr1(3, h[(k+1)*ldh+k+1:], ldh, sr[2*m], + si[2*m], sr[2*m+1], si[2*m+1], vt[:]) + alpha := vt[0] + _, vt[0] = impl.Dlarfg(3, alpha, vt[1:3], 1) + refsum := vt[0] * (h[(k+1)*ldh+k] + vt[1]*h[(k+2)*ldh+k]) + + dsum := math.Abs(h[k*ldh+k]) + math.Abs(h[(k+1)*ldh+k+1]) + math.Abs(h[(k+2)*ldh+k+2]) + if math.Abs(h[(k+2)*ldh+k]-refsum*vt[1])+math.Abs(refsum*vt[2]) > ulp*dsum { + // Starting a new bulge here would create + // non-negligible fill. Use the old one with + // trepidation. + h[(k+1)*ldh+k] = beta + h[(k+2)*ldh+k] = 0 + h[(k+3)*ldh+k] = 0 + continue + } else { + // Starting a new bulge here would create + // only negligible fill. Replace the old + // reflector with the new one. + h[(k+1)*ldh+k] -= refsum + h[(k+2)*ldh+k] = 0 + h[(k+3)*ldh+k] = 0 + v[m*ldv] = vt[0] + v[m*ldv+1] = vt[1] + v[m*ldv+2] = vt[2] + } + } + + // Generate a 2×2 reflection, if needed. + if bmp22 { + k := krcol + 3*m22 + if k == ktop-1 { + impl.Dlaqr1(2, h[(k+1)*ldh+k+1:], ldh, + sr[2*m22], si[2*m22], sr[2*m22+1], si[2*m22+1], + v[m22*ldv:m22*ldv+2]) + beta := v[m22*ldv] + _, v[m22*ldv] = impl.Dlarfg(2, beta, v[m22*ldv+1:m22*ldv+2], 1) + } else { + beta := h[(k+1)*ldh+k] + v[m22*ldv+1] = h[(k+2)*ldh+k] + beta, v[m22*ldv] = impl.Dlarfg(2, beta, v[m22*ldv+1:m22*ldv+2], 1) + h[(k+1)*ldh+k] = beta + h[(k+2)*ldh+k] = 0 + } + } + + // Multiply H by reflections from the left. + var jbot int + switch { + case accum: + jbot = min(ndcol, kbot) + case wantt: + jbot = n - 1 + default: + jbot = kbot + } + for j := max(ktop, krcol); j <= jbot; j++ { + mend := min(mbot+1, (j-krcol+2)/3) - 1 + for m := mtop; m <= mend; m++ { + k := krcol + 3*m + refsum := v[m*ldv] * (h[(k+1)*ldh+j] + + v[m*ldv+1]*h[(k+2)*ldh+j] + v[m*ldv+2]*h[(k+3)*ldh+j]) + h[(k+1)*ldh+j] -= refsum + h[(k+2)*ldh+j] -= refsum * v[m*ldv+1] + h[(k+3)*ldh+j] -= refsum * v[m*ldv+2] + } + } + if bmp22 { + k := krcol + 3*m22 + for j := max(k+1, ktop); j <= jbot; j++ { + refsum := v[m22*ldv] * (h[(k+1)*ldh+j] + v[m22*ldv+1]*h[(k+2)*ldh+j]) + h[(k+1)*ldh+j] -= refsum + h[(k+2)*ldh+j] -= refsum * v[m22*ldv+1] + } + } + + // Multiply H by reflections from the right. Delay filling in the last row + // until the vigilant deflation check is complete. + var jtop int + switch { + case accum: + jtop = max(ktop, incol) + case wantt: + jtop = 0 + default: + jtop = ktop + } + for m := mtop; m <= mbot; m++ { + if v[m*ldv] == 0 { + continue + } + k := krcol + 3*m + for j := jtop; j <= min(kbot, k+3); j++ { + refsum := v[m*ldv] * (h[j*ldh+k+1] + + v[m*ldv+1]*h[j*ldh+k+2] + v[m*ldv+2]*h[j*ldh+k+3]) + h[j*ldh+k+1] -= refsum + h[j*ldh+k+2] -= refsum * v[m*ldv+1] + h[j*ldh+k+3] -= refsum * v[m*ldv+2] + } + if accum { + // Accumulate U. (If necessary, update Z later with an + // efficient matrix-matrix multiply.) + kms := k - incol + for j := max(0, ktop-incol-1); j < kdu; j++ { + refsum := v[m*ldv] * (u[j*ldu+kms] + + v[m*ldv+1]*u[j*ldu+kms+1] + v[m*ldv+2]*u[j*ldu+kms+2]) + u[j*ldu+kms] -= refsum + u[j*ldu+kms+1] -= refsum * v[m*ldv+1] + u[j*ldu+kms+2] -= refsum * v[m*ldv+2] + } + } else if wantz { + // U is not accumulated, so update Z now by multiplying by + // reflections from the right. + for j := iloz; j <= ihiz; j++ { + refsum := v[m*ldv] * (z[j*ldz+k+1] + + v[m*ldv+1]*z[j*ldz+k+2] + v[m*ldv+2]*z[j*ldz+k+3]) + z[j*ldz+k+1] -= refsum + z[j*ldz+k+2] -= refsum * v[m*ldv+1] + z[j*ldz+k+3] -= refsum * v[m*ldv+2] + } + } + } + + // Special case: 2×2 reflection (if needed). + if bmp22 && v[m22*ldv] != 0 { + k := krcol + 3*m22 + for j := jtop; j <= min(kbot, k+3); j++ { + refsum := v[m22*ldv] * (h[j*ldh+k+1] + v[m22*ldv+1]*h[j*ldh+k+2]) + h[j*ldh+k+1] -= refsum + h[j*ldh+k+2] -= refsum * v[m22*ldv+1] + } + if accum { + kms := k - incol + for j := max(0, ktop-incol-1); j < kdu; j++ { + refsum := v[m22*ldv] * (u[j*ldu+kms] + v[m22*ldv+1]*u[j*ldu+kms+1]) + u[j*ldu+kms] -= refsum + u[j*ldu+kms+1] -= refsum * v[m22*ldv+1] + } + } else if wantz { + for j := iloz; j <= ihiz; j++ { + refsum := v[m22*ldv] * (z[j*ldz+k+1] + v[m22*ldv+1]*z[j*ldz+k+2]) + z[j*ldz+k+1] -= refsum + z[j*ldz+k+2] -= refsum * v[m22*ldv+1] + } + } + } + + // Vigilant deflation check. + mstart := mtop + if krcol+3*mstart < ktop { + mstart++ + } + mend := mbot + if bmp22 { + mend++ + } + if krcol == kbot-2 { + mend++ + } + for m := mstart; m <= mend; m++ { + k := min(kbot-1, krcol+3*m) + + // The following convergence test requires that the tradition + // small-compared-to-nearby-diagonals criterion and the Ahues & + // Tisseur (LAWN 122, 1997) criteria both be satisfied. The latter + // improves accuracy in some examples. Falling back on an alternate + // convergence criterion when tst1 or tst2 is zero (as done here) is + // traditional but probably unnecessary. + + if h[(k+1)*ldh+k] == 0 { + continue + } + tst1 := math.Abs(h[k*ldh+k]) + math.Abs(h[(k+1)*ldh+k+1]) + if tst1 == 0 { + if k >= ktop+1 { + tst1 += math.Abs(h[k*ldh+k-1]) + } + if k >= ktop+2 { + tst1 += math.Abs(h[k*ldh+k-2]) + } + if k >= ktop+3 { + tst1 += math.Abs(h[k*ldh+k-3]) + } + if k <= kbot-2 { + tst1 += math.Abs(h[(k+2)*ldh+k+1]) + } + if k <= kbot-3 { + tst1 += math.Abs(h[(k+3)*ldh+k+1]) + } + if k <= kbot-4 { + tst1 += math.Abs(h[(k+4)*ldh+k+1]) + } + } + if math.Abs(h[(k+1)*ldh+k]) <= math.Max(smlnum, ulp*tst1) { + h12 := math.Max(math.Abs(h[(k+1)*ldh+k]), math.Abs(h[k*ldh+k+1])) + h21 := math.Min(math.Abs(h[(k+1)*ldh+k]), math.Abs(h[k*ldh+k+1])) + h11 := math.Max(math.Abs(h[(k+1)*ldh+k+1]), math.Abs(h[k*ldh+k]-h[(k+1)*ldh+k+1])) + h22 := math.Min(math.Abs(h[(k+1)*ldh+k+1]), math.Abs(h[k*ldh+k]-h[(k+1)*ldh+k+1])) + scl := h11 + h12 + tst2 := h22 * (h11 / scl) + if tst2 == 0 || h21*(h12/scl) <= math.Max(smlnum, ulp*tst2) { + h[(k+1)*ldh+k] = 0 + } + } + } + + // Fill in the last row of each bulge. + mend = min(nbmps, (kbot-krcol-1)/3) - 1 + for m := mtop; m <= mend; m++ { + k := krcol + 3*m + refsum := v[m*ldv] * v[m*ldv+2] * h[(k+4)*ldh+k+3] + h[(k+4)*ldh+k+1] = -refsum + h[(k+4)*ldh+k+2] = -refsum * v[m*ldv+1] + h[(k+4)*ldh+k+3] -= refsum * v[m*ldv+2] + } + } + + // Use U (if accumulated) to update far-from-diagonal entries in H. + // If required, use U to update Z as well. + if !accum { + continue + } + var jtop, jbot int + if wantt { + jtop = 0 + jbot = n - 1 + } else { + jtop = ktop + jbot = kbot + } + bi := blas64.Implementation() + if !blk22 || incol < ktop || kbot < ndcol || ns <= 2 { + // Updates not exploiting the 2×2 block structure of U. k0 and nu keep track + // of the location and size of U in the special cases of introducing bulges + // and chasing bulges off the bottom. In these special cases and in case the + // number of shifts is ns = 2, there is no 2×2 block structure to exploit. + + k0 := max(0, ktop-incol-1) + nu := kdu - max(0, ndcol-kbot) - k0 + + // Horizontal multiply. + for jcol := min(ndcol, kbot) + 1; jcol <= jbot; jcol += nh { + jlen := min(nh, jbot-jcol+1) + bi.Dgemm(blas.Trans, blas.NoTrans, nu, jlen, nu, + 1, u[k0*ldu+k0:], ldu, + h[(incol+k0+1)*ldh+jcol:], ldh, + 0, wh, ldwh) + impl.Dlacpy(blas.All, nu, jlen, wh, ldwh, h[(incol+k0+1)*ldh+jcol:], ldh) + } + + // Vertical multiply. + for jrow := jtop; jrow <= max(ktop, incol)-1; jrow += nv { + jlen := min(nv, max(ktop, incol)-jrow) + bi.Dgemm(blas.NoTrans, blas.NoTrans, jlen, nu, nu, + 1, h[jrow*ldh+incol+k0+1:], ldh, + u[k0*ldu+k0:], ldu, + 0, wv, ldwv) + impl.Dlacpy(blas.All, jlen, nu, wv, ldwv, h[jrow*ldh+incol+k0+1:], ldh) + } + + // Z multiply (also vertical). + if wantz { + for jrow := iloz; jrow <= ihiz; jrow += nv { + jlen := min(nv, ihiz-jrow+1) + bi.Dgemm(blas.NoTrans, blas.NoTrans, jlen, nu, nu, + 1, z[jrow*ldz+incol+k0+1:], ldz, + u[k0*ldu+k0:], ldu, + 0, wv, ldwv) + impl.Dlacpy(blas.All, jlen, nu, wv, ldwv, z[jrow*ldz+incol+k0+1:], ldz) + } + } + + continue + } + + // Updates exploiting U's 2×2 block structure. + + // i2, i4, j2, j4 are the last rows and columns of the blocks. + i2 := (kdu + 1) / 2 + i4 := kdu + j2 := i4 - i2 + j4 := kdu + + // kzs and knz deal with the band of zeros along the diagonal of one of the + // triangular blocks. + kzs := (j4 - j2) - (ns + 1) + knz := ns + 1 + + // Horizontal multiply. + for jcol := min(ndcol, kbot) + 1; jcol <= jbot; jcol += nh { + jlen := min(nh, jbot-jcol+1) + + // Copy bottom of H to top+kzs of scratch (the first kzs + // rows get multiplied by zero). + impl.Dlacpy(blas.All, knz, jlen, h[(incol+1+j2)*ldh+jcol:], ldh, wh[kzs*ldwh:], ldwh) + + // Multiply by U21^T. + impl.Dlaset(blas.All, kzs, jlen, 0, 0, wh, ldwh) + bi.Dtrmm(blas.Left, blas.Upper, blas.Trans, blas.NonUnit, knz, jlen, + 1, u[j2*ldu+kzs:], ldu, wh[kzs*ldwh:], ldwh) + + // Multiply top of H by U11^T. + bi.Dgemm(blas.Trans, blas.NoTrans, i2, jlen, j2, + 1, u, ldu, h[(incol+1)*ldh+jcol:], ldh, + 1, wh, ldwh) + + // Copy top of H to bottom of WH. + impl.Dlacpy(blas.All, j2, jlen, h[(incol+1)*ldh+jcol:], ldh, wh[i2*ldwh:], ldwh) + + // Multiply by U21^T. + bi.Dtrmm(blas.Left, blas.Lower, blas.Trans, blas.NonUnit, j2, jlen, + 1, u[i2:], ldu, wh[i2*ldwh:], ldwh) + + // Multiply by U22. + bi.Dgemm(blas.Trans, blas.NoTrans, i4-i2, jlen, j4-j2, + 1, u[j2*ldu+i2:], ldu, h[(incol+1+j2)*ldh+jcol:], ldh, + 1, wh[i2*ldwh:], ldwh) + + // Copy it back. + impl.Dlacpy(blas.All, kdu, jlen, wh, ldwh, h[(incol+1)*ldh+jcol:], ldh) + } + + // Vertical multiply. + for jrow := jtop; jrow <= max(incol, ktop)-1; jrow += nv { + jlen := min(nv, max(incol, ktop)-jrow) + + // Copy right of H to scratch (the first kzs columns get multiplied + // by zero). + impl.Dlacpy(blas.All, jlen, knz, h[jrow*ldh+incol+1+j2:], ldh, wv[kzs:], ldwv) + + // Multiply by U21. + impl.Dlaset(blas.All, jlen, kzs, 0, 0, wv, ldwv) + bi.Dtrmm(blas.Right, blas.Upper, blas.NoTrans, blas.NonUnit, jlen, knz, + 1, u[j2*ldu+kzs:], ldu, wv[kzs:], ldwv) + + // Multiply by U11. + bi.Dgemm(blas.NoTrans, blas.NoTrans, jlen, i2, j2, + 1, h[jrow*ldh+incol+1:], ldh, u, ldu, + 1, wv, ldwv) + + // Copy left of H to right of scratch. + impl.Dlacpy(blas.All, jlen, j2, h[jrow*ldh+incol+1:], ldh, wv[i2:], ldwv) + + // Multiply by U21. + bi.Dtrmm(blas.Right, blas.Lower, blas.NoTrans, blas.NonUnit, jlen, i4-i2, + 1, u[i2:], ldu, wv[i2:], ldwv) + + // Multiply by U22. + bi.Dgemm(blas.NoTrans, blas.NoTrans, jlen, i4-i2, j4-j2, + 1, h[jrow*ldh+incol+1+j2:], ldh, u[j2*ldu+i2:], ldu, + 1, wv[i2:], ldwv) + + // Copy it back. + impl.Dlacpy(blas.All, jlen, kdu, wv, ldwv, h[jrow*ldh+incol+1:], ldh) + } + + if !wantz { + continue + } + // Multiply Z (also vertical). + for jrow := iloz; jrow <= ihiz; jrow += nv { + jlen := min(nv, ihiz-jrow+1) + + // Copy right of Z to left of scratch (first kzs columns get + // multiplied by zero). + impl.Dlacpy(blas.All, jlen, knz, z[jrow*ldz+incol+1+j2:], ldz, wv[kzs:], ldwv) + + // Multiply by U12. + impl.Dlaset(blas.All, jlen, kzs, 0, 0, wv, ldwv) + bi.Dtrmm(blas.Right, blas.Upper, blas.NoTrans, blas.NonUnit, jlen, knz, + 1, u[j2*ldu+kzs:], ldu, wv[kzs:], ldwv) + + // Multiply by U11. + bi.Dgemm(blas.NoTrans, blas.NoTrans, jlen, i2, j2, + 1, z[jrow*ldz+incol+1:], ldz, u, ldu, + 1, wv, ldwv) + + // Copy left of Z to right of scratch. + impl.Dlacpy(blas.All, jlen, j2, z[jrow*ldz+incol+1:], ldz, wv[i2:], ldwv) + + // Multiply by U21. + bi.Dtrmm(blas.Right, blas.Lower, blas.NoTrans, blas.NonUnit, jlen, i4-i2, + 1, u[i2:], ldu, wv[i2:], ldwv) + + // Multiply by U22. + bi.Dgemm(blas.NoTrans, blas.NoTrans, jlen, i4-i2, j4-j2, + 1, z[jrow*ldz+incol+1+j2:], ldz, u[j2*ldu+i2:], ldu, + 1, wv[i2:], ldwv) + + // Copy the result back to Z. + impl.Dlacpy(blas.All, jlen, kdu, wv, ldwv, z[jrow*ldz+incol+1:], ldz) + } + } +} diff --git a/vendor/gonum.org/v1/gonum/lapack/gonum/dlarf.go b/vendor/gonum.org/v1/gonum/lapack/gonum/dlarf.go new file mode 100644 index 0000000..9fc97a3 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/lapack/gonum/dlarf.go @@ -0,0 +1,101 @@ +// Copyright ©2015 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package gonum + +import ( + "gonum.org/v1/gonum/blas" + "gonum.org/v1/gonum/blas/blas64" +) + +// Dlarf applies an elementary reflector to a general rectangular matrix c. +// This computes +// c = h * c if side == Left +// c = c * h if side == right +// where +// h = 1 - tau * v * v^T +// and c is an m * n matrix. +// +// work is temporary storage of length at least m if side == Left and at least +// n if side == Right. This function will panic if this length requirement is not met. +// +// Dlarf is an internal routine. It is exported for testing purposes. +func (impl Implementation) Dlarf(side blas.Side, m, n int, v []float64, incv int, tau float64, c []float64, ldc int, work []float64) { + switch { + case side != blas.Left && side != blas.Right: + panic(badSide) + case m < 0: + panic(mLT0) + case n < 0: + panic(nLT0) + case incv == 0: + panic(zeroIncV) + case ldc < max(1, n): + panic(badLdC) + } + + if m == 0 || n == 0 { + return + } + + applyleft := side == blas.Left + lenV := n + if applyleft { + lenV = m + } + + switch { + case len(v) < 1+(lenV-1)*abs(incv): + panic(shortV) + case len(c) < (m-1)*ldc+n: + panic(shortC) + case (applyleft && len(work) < n) || (!applyleft && len(work) < m): + panic(shortWork) + } + + lastv := 0 // last non-zero element of v + lastc := 0 // last non-zero row/column of c + if tau != 0 { + var i int + if applyleft { + lastv = m - 1 + } else { + lastv = n - 1 + } + if incv > 0 { + i = lastv * incv + } + + // Look for the last non-zero row in v. + for lastv >= 0 && v[i] == 0 { + lastv-- + i -= incv + } + if applyleft { + // Scan for the last non-zero column in C[0:lastv, :] + lastc = impl.Iladlc(lastv+1, n, c, ldc) + } else { + // Scan for the last non-zero row in C[:, 0:lastv] + lastc = impl.Iladlr(m, lastv+1, c, ldc) + } + } + if lastv == -1 || lastc == -1 { + return + } + // Sometimes 1-indexing is nicer ... + bi := blas64.Implementation() + if applyleft { + // Form H * C + // w[0:lastc+1] = c[1:lastv+1, 1:lastc+1]^T * v[1:lastv+1,1] + bi.Dgemv(blas.Trans, lastv+1, lastc+1, 1, c, ldc, v, incv, 0, work, 1) + // c[0: lastv, 0: lastc] = c[...] - w[0:lastv, 1] * v[1:lastc, 1]^T + bi.Dger(lastv+1, lastc+1, -tau, v, incv, work, 1, c, ldc) + return + } + // Form C*H + // w[0:lastc+1,1] := c[0:lastc+1,0:lastv+1] * v[0:lastv+1,1] + bi.Dgemv(blas.NoTrans, lastc+1, lastv+1, 1, c, ldc, v, incv, 0, work, 1) + // c[0:lastc+1,0:lastv+1] = c[...] - w[0:lastc+1,0] * v[0:lastv+1,0]^T + bi.Dger(lastc+1, lastv+1, -tau, work, 1, v, incv, c, ldc) +} diff --git a/vendor/gonum.org/v1/gonum/lapack/gonum/dlarfb.go b/vendor/gonum.org/v1/gonum/lapack/gonum/dlarfb.go new file mode 100644 index 0000000..4dd8e06 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/lapack/gonum/dlarfb.go @@ -0,0 +1,449 @@ +// Copyright ©2015 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package gonum + +import ( + "gonum.org/v1/gonum/blas" + "gonum.org/v1/gonum/blas/blas64" + "gonum.org/v1/gonum/lapack" +) + +// Dlarfb applies a block reflector to a matrix. +// +// In the call to Dlarfb, the mxn c is multiplied by the implicitly defined matrix h as follows: +// c = h * c if side == Left and trans == NoTrans +// c = c * h if side == Right and trans == NoTrans +// c = h^T * c if side == Left and trans == Trans +// c = c * h^T if side == Right and trans == Trans +// h is a product of elementary reflectors. direct sets the direction of multiplication +// h = h_1 * h_2 * ... * h_k if direct == Forward +// h = h_k * h_k-1 * ... * h_1 if direct == Backward +// The combination of direct and store defines the orientation of the elementary +// reflectors. In all cases the ones on the diagonal are implicitly represented. +// +// If direct == lapack.Forward and store == lapack.ColumnWise +// V = [ 1 ] +// [v1 1 ] +// [v1 v2 1] +// [v1 v2 v3] +// [v1 v2 v3] +// If direct == lapack.Forward and store == lapack.RowWise +// V = [ 1 v1 v1 v1 v1] +// [ 1 v2 v2 v2] +// [ 1 v3 v3] +// If direct == lapack.Backward and store == lapack.ColumnWise +// V = [v1 v2 v3] +// [v1 v2 v3] +// [ 1 v2 v3] +// [ 1 v3] +// [ 1] +// If direct == lapack.Backward and store == lapack.RowWise +// V = [v1 v1 1 ] +// [v2 v2 v2 1 ] +// [v3 v3 v3 v3 1] +// An elementary reflector can be explicitly constructed by extracting the +// corresponding elements of v, placing a 1 where the diagonal would be, and +// placing zeros in the remaining elements. +// +// t is a k×k matrix containing the block reflector, and this function will panic +// if t is not of sufficient size. See Dlarft for more information. +// +// work is a temporary storage matrix with stride ldwork. +// work must be of size at least n×k side == Left and m×k if side == Right, and +// this function will panic if this size is not met. +// +// Dlarfb is an internal routine. It is exported for testing purposes. +func (Implementation) Dlarfb(side blas.Side, trans blas.Transpose, direct lapack.Direct, store lapack.StoreV, m, n, k int, v []float64, ldv int, t []float64, ldt int, c []float64, ldc int, work []float64, ldwork int) { + nv := m + if side == blas.Right { + nv = n + } + switch { + case side != blas.Left && side != blas.Right: + panic(badSide) + case trans != blas.Trans && trans != blas.NoTrans: + panic(badTrans) + case direct != lapack.Forward && direct != lapack.Backward: + panic(badDirect) + case store != lapack.ColumnWise && store != lapack.RowWise: + panic(badStoreV) + case m < 0: + panic(mLT0) + case n < 0: + panic(nLT0) + case k < 0: + panic(kLT0) + case store == lapack.ColumnWise && ldv < max(1, k): + panic(badLdV) + case store == lapack.RowWise && ldv < max(1, nv): + panic(badLdV) + case ldt < max(1, k): + panic(badLdT) + case ldc < max(1, n): + panic(badLdC) + case ldwork < max(1, k): + panic(badLdWork) + } + + if m == 0 || n == 0 { + return + } + + nw := n + if side == blas.Right { + nw = m + } + switch { + case store == lapack.ColumnWise && len(v) < (nv-1)*ldv+k: + panic(shortV) + case store == lapack.RowWise && len(v) < (k-1)*ldv+nv: + panic(shortV) + case len(t) < (k-1)*ldt+k: + panic(shortT) + case len(c) < (m-1)*ldc+n: + panic(shortC) + case len(work) < (nw-1)*ldwork+k: + panic(shortWork) + } + + bi := blas64.Implementation() + + transt := blas.Trans + if trans == blas.Trans { + transt = blas.NoTrans + } + // TODO(btracey): This follows the original Lapack code where the + // elements are copied into the columns of the working array. The + // loops should go in the other direction so the data is written + // into the rows of work so the copy is not strided. A bigger change + // would be to replace work with work^T, but benchmarks would be + // needed to see if the change is merited. + if store == lapack.ColumnWise { + if direct == lapack.Forward { + // V1 is the first k rows of C. V2 is the remaining rows. + if side == blas.Left { + // W = C^T V = C1^T V1 + C2^T V2 (stored in work). + + // W = C1. + for j := 0; j < k; j++ { + bi.Dcopy(n, c[j*ldc:], 1, work[j:], ldwork) + } + // W = W * V1. + bi.Dtrmm(blas.Right, blas.Lower, blas.NoTrans, blas.Unit, + n, k, 1, + v, ldv, + work, ldwork) + if m > k { + // W = W + C2^T V2. + bi.Dgemm(blas.Trans, blas.NoTrans, n, k, m-k, + 1, c[k*ldc:], ldc, v[k*ldv:], ldv, + 1, work, ldwork) + } + // W = W * T^T or W * T. + bi.Dtrmm(blas.Right, blas.Upper, transt, blas.NonUnit, n, k, + 1, t, ldt, + work, ldwork) + // C -= V * W^T. + if m > k { + // C2 -= V2 * W^T. + bi.Dgemm(blas.NoTrans, blas.Trans, m-k, n, k, + -1, v[k*ldv:], ldv, work, ldwork, + 1, c[k*ldc:], ldc) + } + // W *= V1^T. + bi.Dtrmm(blas.Right, blas.Lower, blas.Trans, blas.Unit, n, k, + 1, v, ldv, + work, ldwork) + // C1 -= W^T. + // TODO(btracey): This should use blas.Axpy. + for i := 0; i < n; i++ { + for j := 0; j < k; j++ { + c[j*ldc+i] -= work[i*ldwork+j] + } + } + return + } + // Form C = C * H or C * H^T, where C = (C1 C2). + + // W = C1. + for i := 0; i < k; i++ { + bi.Dcopy(m, c[i:], ldc, work[i:], ldwork) + } + // W *= V1. + bi.Dtrmm(blas.Right, blas.Lower, blas.NoTrans, blas.Unit, m, k, + 1, v, ldv, + work, ldwork) + if n > k { + bi.Dgemm(blas.NoTrans, blas.NoTrans, m, k, n-k, + 1, c[k:], ldc, v[k*ldv:], ldv, + 1, work, ldwork) + } + // W *= T or T^T. + bi.Dtrmm(blas.Right, blas.Upper, trans, blas.NonUnit, m, k, + 1, t, ldt, + work, ldwork) + if n > k { + bi.Dgemm(blas.NoTrans, blas.Trans, m, n-k, k, + -1, work, ldwork, v[k*ldv:], ldv, + 1, c[k:], ldc) + } + // C -= W * V^T. + bi.Dtrmm(blas.Right, blas.Lower, blas.Trans, blas.Unit, m, k, + 1, v, ldv, + work, ldwork) + // C -= W. + // TODO(btracey): This should use blas.Axpy. + for i := 0; i < m; i++ { + for j := 0; j < k; j++ { + c[i*ldc+j] -= work[i*ldwork+j] + } + } + return + } + // V = (V1) + // = (V2) (last k rows) + // Where V2 is unit upper triangular. + if side == blas.Left { + // Form H * C or + // W = C^T V. + + // W = C2^T. + for j := 0; j < k; j++ { + bi.Dcopy(n, c[(m-k+j)*ldc:], 1, work[j:], ldwork) + } + // W *= V2. + bi.Dtrmm(blas.Right, blas.Upper, blas.NoTrans, blas.Unit, n, k, + 1, v[(m-k)*ldv:], ldv, + work, ldwork) + if m > k { + // W += C1^T * V1. + bi.Dgemm(blas.Trans, blas.NoTrans, n, k, m-k, + 1, c, ldc, v, ldv, + 1, work, ldwork) + } + // W *= T or T^T. + bi.Dtrmm(blas.Right, blas.Lower, transt, blas.NonUnit, n, k, + 1, t, ldt, + work, ldwork) + // C -= V * W^T. + if m > k { + bi.Dgemm(blas.NoTrans, blas.Trans, m-k, n, k, + -1, v, ldv, work, ldwork, + 1, c, ldc) + } + // W *= V2^T. + bi.Dtrmm(blas.Right, blas.Upper, blas.Trans, blas.Unit, n, k, + 1, v[(m-k)*ldv:], ldv, + work, ldwork) + // C2 -= W^T. + // TODO(btracey): This should use blas.Axpy. + for i := 0; i < n; i++ { + for j := 0; j < k; j++ { + c[(m-k+j)*ldc+i] -= work[i*ldwork+j] + } + } + return + } + // Form C * H or C * H^T where C = (C1 C2). + // W = C * V. + + // W = C2. + for j := 0; j < k; j++ { + bi.Dcopy(m, c[n-k+j:], ldc, work[j:], ldwork) + } + + // W = W * V2. + bi.Dtrmm(blas.Right, blas.Upper, blas.NoTrans, blas.Unit, m, k, + 1, v[(n-k)*ldv:], ldv, + work, ldwork) + if n > k { + bi.Dgemm(blas.NoTrans, blas.NoTrans, m, k, n-k, + 1, c, ldc, v, ldv, + 1, work, ldwork) + } + // W *= T or T^T. + bi.Dtrmm(blas.Right, blas.Lower, trans, blas.NonUnit, m, k, + 1, t, ldt, + work, ldwork) + // C -= W * V^T. + if n > k { + // C1 -= W * V1^T. + bi.Dgemm(blas.NoTrans, blas.Trans, m, n-k, k, + -1, work, ldwork, v, ldv, + 1, c, ldc) + } + // W *= V2^T. + bi.Dtrmm(blas.Right, blas.Upper, blas.Trans, blas.Unit, m, k, + 1, v[(n-k)*ldv:], ldv, + work, ldwork) + // C2 -= W. + // TODO(btracey): This should use blas.Axpy. + for i := 0; i < m; i++ { + for j := 0; j < k; j++ { + c[i*ldc+n-k+j] -= work[i*ldwork+j] + } + } + return + } + // Store = Rowwise. + if direct == lapack.Forward { + // V = (V1 V2) where v1 is unit upper triangular. + if side == blas.Left { + // Form H * C or H^T * C where C = (C1; C2). + // W = C^T * V^T. + + // W = C1^T. + for j := 0; j < k; j++ { + bi.Dcopy(n, c[j*ldc:], 1, work[j:], ldwork) + } + // W *= V1^T. + bi.Dtrmm(blas.Right, blas.Upper, blas.Trans, blas.Unit, n, k, + 1, v, ldv, + work, ldwork) + if m > k { + bi.Dgemm(blas.Trans, blas.Trans, n, k, m-k, + 1, c[k*ldc:], ldc, v[k:], ldv, + 1, work, ldwork) + } + // W *= T or T^T. + bi.Dtrmm(blas.Right, blas.Upper, transt, blas.NonUnit, n, k, + 1, t, ldt, + work, ldwork) + // C -= V^T * W^T. + if m > k { + bi.Dgemm(blas.Trans, blas.Trans, m-k, n, k, + -1, v[k:], ldv, work, ldwork, + 1, c[k*ldc:], ldc) + } + // W *= V1. + bi.Dtrmm(blas.Right, blas.Upper, blas.NoTrans, blas.Unit, n, k, + 1, v, ldv, + work, ldwork) + // C1 -= W^T. + // TODO(btracey): This should use blas.Axpy. + for i := 0; i < n; i++ { + for j := 0; j < k; j++ { + c[j*ldc+i] -= work[i*ldwork+j] + } + } + return + } + // Form C * H or C * H^T where C = (C1 C2). + // W = C * V^T. + + // W = C1. + for j := 0; j < k; j++ { + bi.Dcopy(m, c[j:], ldc, work[j:], ldwork) + } + // W *= V1^T. + bi.Dtrmm(blas.Right, blas.Upper, blas.Trans, blas.Unit, m, k, + 1, v, ldv, + work, ldwork) + if n > k { + bi.Dgemm(blas.NoTrans, blas.Trans, m, k, n-k, + 1, c[k:], ldc, v[k:], ldv, + 1, work, ldwork) + } + // W *= T or T^T. + bi.Dtrmm(blas.Right, blas.Upper, trans, blas.NonUnit, m, k, + 1, t, ldt, + work, ldwork) + // C -= W * V. + if n > k { + bi.Dgemm(blas.NoTrans, blas.NoTrans, m, n-k, k, + -1, work, ldwork, v[k:], ldv, + 1, c[k:], ldc) + } + // W *= V1. + bi.Dtrmm(blas.Right, blas.Upper, blas.NoTrans, blas.Unit, m, k, + 1, v, ldv, + work, ldwork) + // C1 -= W. + // TODO(btracey): This should use blas.Axpy. + for i := 0; i < m; i++ { + for j := 0; j < k; j++ { + c[i*ldc+j] -= work[i*ldwork+j] + } + } + return + } + // V = (V1 V2) where V2 is the last k columns and is lower unit triangular. + if side == blas.Left { + // Form H * C or H^T C where C = (C1 ; C2). + // W = C^T * V^T. + + // W = C2^T. + for j := 0; j < k; j++ { + bi.Dcopy(n, c[(m-k+j)*ldc:], 1, work[j:], ldwork) + } + // W *= V2^T. + bi.Dtrmm(blas.Right, blas.Lower, blas.Trans, blas.Unit, n, k, + 1, v[m-k:], ldv, + work, ldwork) + if m > k { + bi.Dgemm(blas.Trans, blas.Trans, n, k, m-k, + 1, c, ldc, v, ldv, + 1, work, ldwork) + } + // W *= T or T^T. + bi.Dtrmm(blas.Right, blas.Lower, transt, blas.NonUnit, n, k, + 1, t, ldt, + work, ldwork) + // C -= V^T * W^T. + if m > k { + bi.Dgemm(blas.Trans, blas.Trans, m-k, n, k, + -1, v, ldv, work, ldwork, + 1, c, ldc) + } + // W *= V2. + bi.Dtrmm(blas.Right, blas.Lower, blas.NoTrans, blas.Unit, n, k, + 1, v[m-k:], ldv, + work, ldwork) + // C2 -= W^T. + // TODO(btracey): This should use blas.Axpy. + for i := 0; i < n; i++ { + for j := 0; j < k; j++ { + c[(m-k+j)*ldc+i] -= work[i*ldwork+j] + } + } + return + } + // Form C * H or C * H^T where C = (C1 C2). + // W = C * V^T. + // W = C2. + for j := 0; j < k; j++ { + bi.Dcopy(m, c[n-k+j:], ldc, work[j:], ldwork) + } + // W *= V2^T. + bi.Dtrmm(blas.Right, blas.Lower, blas.Trans, blas.Unit, m, k, + 1, v[n-k:], ldv, + work, ldwork) + if n > k { + bi.Dgemm(blas.NoTrans, blas.Trans, m, k, n-k, + 1, c, ldc, v, ldv, + 1, work, ldwork) + } + // W *= T or T^T. + bi.Dtrmm(blas.Right, blas.Lower, trans, blas.NonUnit, m, k, + 1, t, ldt, + work, ldwork) + // C -= W * V. + if n > k { + bi.Dgemm(blas.NoTrans, blas.NoTrans, m, n-k, k, + -1, work, ldwork, v, ldv, + 1, c, ldc) + } + // W *= V2. + bi.Dtrmm(blas.Right, blas.Lower, blas.NoTrans, blas.Unit, m, k, + 1, v[n-k:], ldv, + work, ldwork) + // C1 -= W. + // TODO(btracey): This should use blas.Axpy. + for i := 0; i < m; i++ { + for j := 0; j < k; j++ { + c[i*ldc+n-k+j] -= work[i*ldwork+j] + } + } +} diff --git a/vendor/gonum.org/v1/gonum/lapack/gonum/dlarfg.go b/vendor/gonum.org/v1/gonum/lapack/gonum/dlarfg.go new file mode 100644 index 0000000..e037fdd --- /dev/null +++ b/vendor/gonum.org/v1/gonum/lapack/gonum/dlarfg.go @@ -0,0 +1,71 @@ +// Copyright ©2015 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package gonum + +import ( + "math" + + "gonum.org/v1/gonum/blas/blas64" +) + +// Dlarfg generates an elementary reflector for a Householder matrix. It creates +// a real elementary reflector of order n such that +// H * (alpha) = (beta) +// ( x) ( 0) +// H^T * H = I +// H is represented in the form +// H = 1 - tau * (1; v) * (1 v^T) +// where tau is a real scalar. +// +// On entry, x contains the vector x, on exit it contains v. +// +// Dlarfg is an internal routine. It is exported for testing purposes. +func (impl Implementation) Dlarfg(n int, alpha float64, x []float64, incX int) (beta, tau float64) { + switch { + case n < 0: + panic(nLT0) + case incX <= 0: + panic(badIncX) + } + + if n <= 1 { + return alpha, 0 + } + + if len(x) < 1+(n-2)*abs(incX) { + panic(shortX) + } + + bi := blas64.Implementation() + + xnorm := bi.Dnrm2(n-1, x, incX) + if xnorm == 0 { + return alpha, 0 + } + beta = -math.Copysign(impl.Dlapy2(alpha, xnorm), alpha) + safmin := dlamchS / dlamchE + knt := 0 + if math.Abs(beta) < safmin { + // xnorm and beta may be inaccurate, scale x and recompute. + rsafmn := 1 / safmin + for { + knt++ + bi.Dscal(n-1, rsafmn, x, incX) + beta *= rsafmn + alpha *= rsafmn + if math.Abs(beta) >= safmin { + break + } + } + xnorm = bi.Dnrm2(n-1, x, incX) + beta = -math.Copysign(impl.Dlapy2(alpha, xnorm), alpha) + } + tau = (beta - alpha) / beta + bi.Dscal(n-1, 1/(alpha-beta), x, incX) + for j := 0; j < knt; j++ { + beta *= safmin + } + return beta, tau +} diff --git a/vendor/gonum.org/v1/gonum/lapack/gonum/dlarft.go b/vendor/gonum.org/v1/gonum/lapack/gonum/dlarft.go new file mode 100644 index 0000000..8f03eb8 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/lapack/gonum/dlarft.go @@ -0,0 +1,166 @@ +// Copyright ©2015 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package gonum + +import ( + "gonum.org/v1/gonum/blas" + "gonum.org/v1/gonum/blas/blas64" + "gonum.org/v1/gonum/lapack" +) + +// Dlarft forms the triangular factor T of a block reflector H, storing the answer +// in t. +// H = I - V * T * V^T if store == lapack.ColumnWise +// H = I - V^T * T * V if store == lapack.RowWise +// H is defined by a product of the elementary reflectors where +// H = H_0 * H_1 * ... * H_{k-1} if direct == lapack.Forward +// H = H_{k-1} * ... * H_1 * H_0 if direct == lapack.Backward +// +// t is a k×k triangular matrix. t is upper triangular if direct = lapack.Forward +// and lower triangular otherwise. This function will panic if t is not of +// sufficient size. +// +// store describes the storage of the elementary reflectors in v. See +// Dlarfb for a description of layout. +// +// tau contains the scalar factors of the elementary reflectors H_i. +// +// Dlarft is an internal routine. It is exported for testing purposes. +func (Implementation) Dlarft(direct lapack.Direct, store lapack.StoreV, n, k int, v []float64, ldv int, tau []float64, t []float64, ldt int) { + mv, nv := n, k + if store == lapack.RowWise { + mv, nv = k, n + } + switch { + case direct != lapack.Forward && direct != lapack.Backward: + panic(badDirect) + case store != lapack.RowWise && store != lapack.ColumnWise: + panic(badStoreV) + case n < 0: + panic(nLT0) + case k < 1: + panic(kLT1) + case ldv < max(1, nv): + panic(badLdV) + case len(tau) < k: + panic(shortTau) + case ldt < max(1, k): + panic(shortT) + } + + if n == 0 { + return + } + + switch { + case len(v) < (mv-1)*ldv+nv: + panic(shortV) + case len(t) < (k-1)*ldt+k: + panic(shortT) + } + + bi := blas64.Implementation() + + // TODO(btracey): There are a number of minor obvious loop optimizations here. + // TODO(btracey): It may be possible to rearrange some of the code so that + // index of 1 is more common in the Dgemv. + if direct == lapack.Forward { + prevlastv := n - 1 + for i := 0; i < k; i++ { + prevlastv = max(i, prevlastv) + if tau[i] == 0 { + for j := 0; j <= i; j++ { + t[j*ldt+i] = 0 + } + continue + } + var lastv int + if store == lapack.ColumnWise { + // skip trailing zeros + for lastv = n - 1; lastv >= i+1; lastv-- { + if v[lastv*ldv+i] != 0 { + break + } + } + for j := 0; j < i; j++ { + t[j*ldt+i] = -tau[i] * v[i*ldv+j] + } + j := min(lastv, prevlastv) + bi.Dgemv(blas.Trans, j-i, i, + -tau[i], v[(i+1)*ldv:], ldv, v[(i+1)*ldv+i:], ldv, + 1, t[i:], ldt) + } else { + for lastv = n - 1; lastv >= i+1; lastv-- { + if v[i*ldv+lastv] != 0 { + break + } + } + for j := 0; j < i; j++ { + t[j*ldt+i] = -tau[i] * v[j*ldv+i] + } + j := min(lastv, prevlastv) + bi.Dgemv(blas.NoTrans, i, j-i, + -tau[i], v[i+1:], ldv, v[i*ldv+i+1:], 1, + 1, t[i:], ldt) + } + bi.Dtrmv(blas.Upper, blas.NoTrans, blas.NonUnit, i, t, ldt, t[i:], ldt) + t[i*ldt+i] = tau[i] + if i > 1 { + prevlastv = max(prevlastv, lastv) + } else { + prevlastv = lastv + } + } + return + } + prevlastv := 0 + for i := k - 1; i >= 0; i-- { + if tau[i] == 0 { + for j := i; j < k; j++ { + t[j*ldt+i] = 0 + } + continue + } + var lastv int + if i < k-1 { + if store == lapack.ColumnWise { + for lastv = 0; lastv < i; lastv++ { + if v[lastv*ldv+i] != 0 { + break + } + } + for j := i + 1; j < k; j++ { + t[j*ldt+i] = -tau[i] * v[(n-k+i)*ldv+j] + } + j := max(lastv, prevlastv) + bi.Dgemv(blas.Trans, n-k+i-j, k-i-1, + -tau[i], v[j*ldv+i+1:], ldv, v[j*ldv+i:], ldv, + 1, t[(i+1)*ldt+i:], ldt) + } else { + for lastv = 0; lastv < i; lastv++ { + if v[i*ldv+lastv] != 0 { + break + } + } + for j := i + 1; j < k; j++ { + t[j*ldt+i] = -tau[i] * v[j*ldv+n-k+i] + } + j := max(lastv, prevlastv) + bi.Dgemv(blas.NoTrans, k-i-1, n-k+i-j, + -tau[i], v[(i+1)*ldv+j:], ldv, v[i*ldv+j:], 1, + 1, t[(i+1)*ldt+i:], ldt) + } + bi.Dtrmv(blas.Lower, blas.NoTrans, blas.NonUnit, k-i-1, + t[(i+1)*ldt+i+1:], ldt, + t[(i+1)*ldt+i:], ldt) + if i > 0 { + prevlastv = min(prevlastv, lastv) + } else { + prevlastv = lastv + } + } + t[i*ldt+i] = tau[i] + } +} diff --git a/vendor/gonum.org/v1/gonum/lapack/gonum/dlarfx.go b/vendor/gonum.org/v1/gonum/lapack/gonum/dlarfx.go new file mode 100644 index 0000000..d7928c8 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/lapack/gonum/dlarfx.go @@ -0,0 +1,550 @@ +// Copyright ©2016 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package gonum + +import "gonum.org/v1/gonum/blas" + +// Dlarfx applies an elementary reflector H to a real m×n matrix C, from either +// the left or the right, with loop unrolling when the reflector has order less +// than 11. +// +// H is represented in the form +// H = I - tau * v * v^T, +// where tau is a real scalar and v is a real vector. If tau = 0, then H is +// taken to be the identity matrix. +// +// v must have length equal to m if side == blas.Left, and equal to n if side == +// blas.Right, otherwise Dlarfx will panic. +// +// c and ldc represent the m×n matrix C. On return, C is overwritten by the +// matrix H * C if side == blas.Left, or C * H if side == blas.Right. +// +// work must have length at least n if side == blas.Left, and at least m if side +// == blas.Right, otherwise Dlarfx will panic. work is not referenced if H has +// order < 11. +// +// Dlarfx is an internal routine. It is exported for testing purposes. +func (impl Implementation) Dlarfx(side blas.Side, m, n int, v []float64, tau float64, c []float64, ldc int, work []float64) { + switch { + case side != blas.Left && side != blas.Right: + panic(badSide) + case m < 0: + panic(mLT0) + case n < 0: + panic(nLT0) + case ldc < max(1, n): + panic(badLdC) + } + + // Quick return if possible. + if m == 0 || n == 0 { + return + } + + nh := m + lwork := n + if side == blas.Right { + nh = n + lwork = m + } + switch { + case len(v) < nh: + panic(shortV) + case len(c) < (m-1)*ldc+n: + panic(shortC) + case nh > 10 && len(work) < lwork: + panic(shortWork) + } + + if tau == 0 { + return + } + + if side == blas.Left { + // Form H * C, where H has order m. + switch m { + default: // Code for general m. + impl.Dlarf(side, m, n, v, 1, tau, c, ldc, work) + return + + case 0: // No-op for zero size matrix. + return + + case 1: // Special code for 1×1 Householder matrix. + t0 := 1 - tau*v[0]*v[0] + for j := 0; j < n; j++ { + c[j] *= t0 + } + return + + case 2: // Special code for 2×2 Householder matrix. + v0 := v[0] + t0 := tau * v0 + v1 := v[1] + t1 := tau * v1 + for j := 0; j < n; j++ { + sum := v0*c[j] + v1*c[ldc+j] + c[j] -= sum * t0 + c[ldc+j] -= sum * t1 + } + return + + case 3: // Special code for 3×3 Householder matrix. + v0 := v[0] + t0 := tau * v0 + v1 := v[1] + t1 := tau * v1 + v2 := v[2] + t2 := tau * v2 + for j := 0; j < n; j++ { + sum := v0*c[j] + v1*c[ldc+j] + v2*c[2*ldc+j] + c[j] -= sum * t0 + c[ldc+j] -= sum * t1 + c[2*ldc+j] -= sum * t2 + } + return + + case 4: // Special code for 4×4 Householder matrix. + v0 := v[0] + t0 := tau * v0 + v1 := v[1] + t1 := tau * v1 + v2 := v[2] + t2 := tau * v2 + v3 := v[3] + t3 := tau * v3 + for j := 0; j < n; j++ { + sum := v0*c[j] + v1*c[ldc+j] + v2*c[2*ldc+j] + v3*c[3*ldc+j] + c[j] -= sum * t0 + c[ldc+j] -= sum * t1 + c[2*ldc+j] -= sum * t2 + c[3*ldc+j] -= sum * t3 + } + return + + case 5: // Special code for 5×5 Householder matrix. + v0 := v[0] + t0 := tau * v0 + v1 := v[1] + t1 := tau * v1 + v2 := v[2] + t2 := tau * v2 + v3 := v[3] + t3 := tau * v3 + v4 := v[4] + t4 := tau * v4 + for j := 0; j < n; j++ { + sum := v0*c[j] + v1*c[ldc+j] + v2*c[2*ldc+j] + v3*c[3*ldc+j] + v4*c[4*ldc+j] + c[j] -= sum * t0 + c[ldc+j] -= sum * t1 + c[2*ldc+j] -= sum * t2 + c[3*ldc+j] -= sum * t3 + c[4*ldc+j] -= sum * t4 + } + return + + case 6: // Special code for 6×6 Householder matrix. + v0 := v[0] + t0 := tau * v0 + v1 := v[1] + t1 := tau * v1 + v2 := v[2] + t2 := tau * v2 + v3 := v[3] + t3 := tau * v3 + v4 := v[4] + t4 := tau * v4 + v5 := v[5] + t5 := tau * v5 + for j := 0; j < n; j++ { + sum := v0*c[j] + v1*c[ldc+j] + v2*c[2*ldc+j] + v3*c[3*ldc+j] + v4*c[4*ldc+j] + + v5*c[5*ldc+j] + c[j] -= sum * t0 + c[ldc+j] -= sum * t1 + c[2*ldc+j] -= sum * t2 + c[3*ldc+j] -= sum * t3 + c[4*ldc+j] -= sum * t4 + c[5*ldc+j] -= sum * t5 + } + return + + case 7: // Special code for 7×7 Householder matrix. + v0 := v[0] + t0 := tau * v0 + v1 := v[1] + t1 := tau * v1 + v2 := v[2] + t2 := tau * v2 + v3 := v[3] + t3 := tau * v3 + v4 := v[4] + t4 := tau * v4 + v5 := v[5] + t5 := tau * v5 + v6 := v[6] + t6 := tau * v6 + for j := 0; j < n; j++ { + sum := v0*c[j] + v1*c[ldc+j] + v2*c[2*ldc+j] + v3*c[3*ldc+j] + v4*c[4*ldc+j] + + v5*c[5*ldc+j] + v6*c[6*ldc+j] + c[j] -= sum * t0 + c[ldc+j] -= sum * t1 + c[2*ldc+j] -= sum * t2 + c[3*ldc+j] -= sum * t3 + c[4*ldc+j] -= sum * t4 + c[5*ldc+j] -= sum * t5 + c[6*ldc+j] -= sum * t6 + } + return + + case 8: // Special code for 8×8 Householder matrix. + v0 := v[0] + t0 := tau * v0 + v1 := v[1] + t1 := tau * v1 + v2 := v[2] + t2 := tau * v2 + v3 := v[3] + t3 := tau * v3 + v4 := v[4] + t4 := tau * v4 + v5 := v[5] + t5 := tau * v5 + v6 := v[6] + t6 := tau * v6 + v7 := v[7] + t7 := tau * v7 + for j := 0; j < n; j++ { + sum := v0*c[j] + v1*c[ldc+j] + v2*c[2*ldc+j] + v3*c[3*ldc+j] + v4*c[4*ldc+j] + + v5*c[5*ldc+j] + v6*c[6*ldc+j] + v7*c[7*ldc+j] + c[j] -= sum * t0 + c[ldc+j] -= sum * t1 + c[2*ldc+j] -= sum * t2 + c[3*ldc+j] -= sum * t3 + c[4*ldc+j] -= sum * t4 + c[5*ldc+j] -= sum * t5 + c[6*ldc+j] -= sum * t6 + c[7*ldc+j] -= sum * t7 + } + return + + case 9: // Special code for 9×9 Householder matrix. + v0 := v[0] + t0 := tau * v0 + v1 := v[1] + t1 := tau * v1 + v2 := v[2] + t2 := tau * v2 + v3 := v[3] + t3 := tau * v3 + v4 := v[4] + t4 := tau * v4 + v5 := v[5] + t5 := tau * v5 + v6 := v[6] + t6 := tau * v6 + v7 := v[7] + t7 := tau * v7 + v8 := v[8] + t8 := tau * v8 + for j := 0; j < n; j++ { + sum := v0*c[j] + v1*c[ldc+j] + v2*c[2*ldc+j] + v3*c[3*ldc+j] + v4*c[4*ldc+j] + + v5*c[5*ldc+j] + v6*c[6*ldc+j] + v7*c[7*ldc+j] + v8*c[8*ldc+j] + c[j] -= sum * t0 + c[ldc+j] -= sum * t1 + c[2*ldc+j] -= sum * t2 + c[3*ldc+j] -= sum * t3 + c[4*ldc+j] -= sum * t4 + c[5*ldc+j] -= sum * t5 + c[6*ldc+j] -= sum * t6 + c[7*ldc+j] -= sum * t7 + c[8*ldc+j] -= sum * t8 + } + return + + case 10: // Special code for 10×10 Householder matrix. + v0 := v[0] + t0 := tau * v0 + v1 := v[1] + t1 := tau * v1 + v2 := v[2] + t2 := tau * v2 + v3 := v[3] + t3 := tau * v3 + v4 := v[4] + t4 := tau * v4 + v5 := v[5] + t5 := tau * v5 + v6 := v[6] + t6 := tau * v6 + v7 := v[7] + t7 := tau * v7 + v8 := v[8] + t8 := tau * v8 + v9 := v[9] + t9 := tau * v9 + for j := 0; j < n; j++ { + sum := v0*c[j] + v1*c[ldc+j] + v2*c[2*ldc+j] + v3*c[3*ldc+j] + v4*c[4*ldc+j] + + v5*c[5*ldc+j] + v6*c[6*ldc+j] + v7*c[7*ldc+j] + v8*c[8*ldc+j] + v9*c[9*ldc+j] + c[j] -= sum * t0 + c[ldc+j] -= sum * t1 + c[2*ldc+j] -= sum * t2 + c[3*ldc+j] -= sum * t3 + c[4*ldc+j] -= sum * t4 + c[5*ldc+j] -= sum * t5 + c[6*ldc+j] -= sum * t6 + c[7*ldc+j] -= sum * t7 + c[8*ldc+j] -= sum * t8 + c[9*ldc+j] -= sum * t9 + } + return + } + } + + // Form C * H, where H has order n. + switch n { + default: // Code for general n. + impl.Dlarf(side, m, n, v, 1, tau, c, ldc, work) + return + + case 0: // No-op for zero size matrix. + return + + case 1: // Special code for 1×1 Householder matrix. + t0 := 1 - tau*v[0]*v[0] + for j := 0; j < m; j++ { + c[j*ldc] *= t0 + } + return + + case 2: // Special code for 2×2 Householder matrix. + v0 := v[0] + t0 := tau * v0 + v1 := v[1] + t1 := tau * v1 + for j := 0; j < m; j++ { + cs := c[j*ldc:] + sum := v0*cs[0] + v1*cs[1] + cs[0] -= sum * t0 + cs[1] -= sum * t1 + } + return + + case 3: // Special code for 3×3 Householder matrix. + v0 := v[0] + t0 := tau * v0 + v1 := v[1] + t1 := tau * v1 + v2 := v[2] + t2 := tau * v2 + for j := 0; j < m; j++ { + cs := c[j*ldc:] + sum := v0*cs[0] + v1*cs[1] + v2*cs[2] + cs[0] -= sum * t0 + cs[1] -= sum * t1 + cs[2] -= sum * t2 + } + return + + case 4: // Special code for 4×4 Householder matrix. + v0 := v[0] + t0 := tau * v0 + v1 := v[1] + t1 := tau * v1 + v2 := v[2] + t2 := tau * v2 + v3 := v[3] + t3 := tau * v3 + for j := 0; j < m; j++ { + cs := c[j*ldc:] + sum := v0*cs[0] + v1*cs[1] + v2*cs[2] + v3*cs[3] + cs[0] -= sum * t0 + cs[1] -= sum * t1 + cs[2] -= sum * t2 + cs[3] -= sum * t3 + } + return + + case 5: // Special code for 5×5 Householder matrix. + v0 := v[0] + t0 := tau * v0 + v1 := v[1] + t1 := tau * v1 + v2 := v[2] + t2 := tau * v2 + v3 := v[3] + t3 := tau * v3 + v4 := v[4] + t4 := tau * v4 + for j := 0; j < m; j++ { + cs := c[j*ldc:] + sum := v0*cs[0] + v1*cs[1] + v2*cs[2] + v3*cs[3] + v4*cs[4] + cs[0] -= sum * t0 + cs[1] -= sum * t1 + cs[2] -= sum * t2 + cs[3] -= sum * t3 + cs[4] -= sum * t4 + } + return + + case 6: // Special code for 6×6 Householder matrix. + v0 := v[0] + t0 := tau * v0 + v1 := v[1] + t1 := tau * v1 + v2 := v[2] + t2 := tau * v2 + v3 := v[3] + t3 := tau * v3 + v4 := v[4] + t4 := tau * v4 + v5 := v[5] + t5 := tau * v5 + for j := 0; j < m; j++ { + cs := c[j*ldc:] + sum := v0*cs[0] + v1*cs[1] + v2*cs[2] + v3*cs[3] + v4*cs[4] + v5*cs[5] + cs[0] -= sum * t0 + cs[1] -= sum * t1 + cs[2] -= sum * t2 + cs[3] -= sum * t3 + cs[4] -= sum * t4 + cs[5] -= sum * t5 + } + return + + case 7: // Special code for 7×7 Householder matrix. + v0 := v[0] + t0 := tau * v0 + v1 := v[1] + t1 := tau * v1 + v2 := v[2] + t2 := tau * v2 + v3 := v[3] + t3 := tau * v3 + v4 := v[4] + t4 := tau * v4 + v5 := v[5] + t5 := tau * v5 + v6 := v[6] + t6 := tau * v6 + for j := 0; j < m; j++ { + cs := c[j*ldc:] + sum := v0*cs[0] + v1*cs[1] + v2*cs[2] + v3*cs[3] + v4*cs[4] + + v5*cs[5] + v6*cs[6] + cs[0] -= sum * t0 + cs[1] -= sum * t1 + cs[2] -= sum * t2 + cs[3] -= sum * t3 + cs[4] -= sum * t4 + cs[5] -= sum * t5 + cs[6] -= sum * t6 + } + return + + case 8: // Special code for 8×8 Householder matrix. + v0 := v[0] + t0 := tau * v0 + v1 := v[1] + t1 := tau * v1 + v2 := v[2] + t2 := tau * v2 + v3 := v[3] + t3 := tau * v3 + v4 := v[4] + t4 := tau * v4 + v5 := v[5] + t5 := tau * v5 + v6 := v[6] + t6 := tau * v6 + v7 := v[7] + t7 := tau * v7 + for j := 0; j < m; j++ { + cs := c[j*ldc:] + sum := v0*cs[0] + v1*cs[1] + v2*cs[2] + v3*cs[3] + v4*cs[4] + + v5*cs[5] + v6*cs[6] + v7*cs[7] + cs[0] -= sum * t0 + cs[1] -= sum * t1 + cs[2] -= sum * t2 + cs[3] -= sum * t3 + cs[4] -= sum * t4 + cs[5] -= sum * t5 + cs[6] -= sum * t6 + cs[7] -= sum * t7 + } + return + + case 9: // Special code for 9×9 Householder matrix. + v0 := v[0] + t0 := tau * v0 + v1 := v[1] + t1 := tau * v1 + v2 := v[2] + t2 := tau * v2 + v3 := v[3] + t3 := tau * v3 + v4 := v[4] + t4 := tau * v4 + v5 := v[5] + t5 := tau * v5 + v6 := v[6] + t6 := tau * v6 + v7 := v[7] + t7 := tau * v7 + v8 := v[8] + t8 := tau * v8 + for j := 0; j < m; j++ { + cs := c[j*ldc:] + sum := v0*cs[0] + v1*cs[1] + v2*cs[2] + v3*cs[3] + v4*cs[4] + + v5*cs[5] + v6*cs[6] + v7*cs[7] + v8*cs[8] + cs[0] -= sum * t0 + cs[1] -= sum * t1 + cs[2] -= sum * t2 + cs[3] -= sum * t3 + cs[4] -= sum * t4 + cs[5] -= sum * t5 + cs[6] -= sum * t6 + cs[7] -= sum * t7 + cs[8] -= sum * t8 + } + return + + case 10: // Special code for 10×10 Householder matrix. + v0 := v[0] + t0 := tau * v0 + v1 := v[1] + t1 := tau * v1 + v2 := v[2] + t2 := tau * v2 + v3 := v[3] + t3 := tau * v3 + v4 := v[4] + t4 := tau * v4 + v5 := v[5] + t5 := tau * v5 + v6 := v[6] + t6 := tau * v6 + v7 := v[7] + t7 := tau * v7 + v8 := v[8] + t8 := tau * v8 + v9 := v[9] + t9 := tau * v9 + for j := 0; j < m; j++ { + cs := c[j*ldc:] + sum := v0*cs[0] + v1*cs[1] + v2*cs[2] + v3*cs[3] + v4*cs[4] + + v5*cs[5] + v6*cs[6] + v7*cs[7] + v8*cs[8] + v9*cs[9] + cs[0] -= sum * t0 + cs[1] -= sum * t1 + cs[2] -= sum * t2 + cs[3] -= sum * t3 + cs[4] -= sum * t4 + cs[5] -= sum * t5 + cs[6] -= sum * t6 + cs[7] -= sum * t7 + cs[8] -= sum * t8 + cs[9] -= sum * t9 + } + return + } +} diff --git a/vendor/gonum.org/v1/gonum/lapack/gonum/dlartg.go b/vendor/gonum.org/v1/gonum/lapack/gonum/dlartg.go new file mode 100644 index 0000000..ad64546 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/lapack/gonum/dlartg.go @@ -0,0 +1,80 @@ +// Copyright ©2015 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package gonum + +import "math" + +// Dlartg generates a plane rotation so that +// [ cs sn] * [f] = [r] +// [-sn cs] [g] = [0] +// This is a more accurate version of BLAS drotg, with the other differences that +// if g = 0, then cs = 1 and sn = 0, and if f = 0 and g != 0, then cs = 0 and sn = 1. +// If abs(f) > abs(g), cs will be positive. +// +// Dlartg is an internal routine. It is exported for testing purposes. +func (impl Implementation) Dlartg(f, g float64) (cs, sn, r float64) { + safmn2 := math.Pow(dlamchB, math.Trunc(math.Log(dlamchS/dlamchE)/math.Log(dlamchB)/2)) + safmx2 := 1 / safmn2 + if g == 0 { + cs = 1 + sn = 0 + r = f + return cs, sn, r + } + if f == 0 { + cs = 0 + sn = 1 + r = g + return cs, sn, r + } + f1 := f + g1 := g + scale := math.Max(math.Abs(f1), math.Abs(g1)) + if scale >= safmx2 { + var count int + for { + count++ + f1 *= safmn2 + g1 *= safmn2 + scale = math.Max(math.Abs(f1), math.Abs(g1)) + if scale < safmx2 { + break + } + } + r = math.Sqrt(f1*f1 + g1*g1) + cs = f1 / r + sn = g1 / r + for i := 0; i < count; i++ { + r *= safmx2 + } + } else if scale <= safmn2 { + var count int + for { + count++ + f1 *= safmx2 + g1 *= safmx2 + scale = math.Max(math.Abs(f1), math.Abs(g1)) + if scale >= safmn2 { + break + } + } + r = math.Sqrt(f1*f1 + g1*g1) + cs = f1 / r + sn = g1 / r + for i := 0; i < count; i++ { + r *= safmn2 + } + } else { + r = math.Sqrt(f1*f1 + g1*g1) + cs = f1 / r + sn = g1 / r + } + if math.Abs(f) > math.Abs(g) && cs < 0 { + cs *= -1 + sn *= -1 + r *= -1 + } + return cs, sn, r +} diff --git a/vendor/gonum.org/v1/gonum/lapack/gonum/dlas2.go b/vendor/gonum.org/v1/gonum/lapack/gonum/dlas2.go new file mode 100644 index 0000000..9922b4a --- /dev/null +++ b/vendor/gonum.org/v1/gonum/lapack/gonum/dlas2.go @@ -0,0 +1,43 @@ +// Copyright ©2015 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package gonum + +import "math" + +// Dlas2 computes the singular values of the 2×2 matrix defined by +// [F G] +// [0 H] +// The smaller and larger singular values are returned in that order. +// +// Dlas2 is an internal routine. It is exported for testing purposes. +func (impl Implementation) Dlas2(f, g, h float64) (ssmin, ssmax float64) { + fa := math.Abs(f) + ga := math.Abs(g) + ha := math.Abs(h) + fhmin := math.Min(fa, ha) + fhmax := math.Max(fa, ha) + if fhmin == 0 { + if fhmax == 0 { + return 0, ga + } + v := math.Min(fhmax, ga) / math.Max(fhmax, ga) + return 0, math.Max(fhmax, ga) * math.Sqrt(1+v*v) + } + if ga < fhmax { + as := 1 + fhmin/fhmax + at := (fhmax - fhmin) / fhmax + au := (ga / fhmax) * (ga / fhmax) + c := 2 / (math.Sqrt(as*as+au) + math.Sqrt(at*at+au)) + return fhmin * c, fhmax / c + } + au := fhmax / ga + if au == 0 { + return fhmin * fhmax / ga, ga + } + as := 1 + fhmin/fhmax + at := (fhmax - fhmin) / fhmax + c := 1 / (math.Sqrt(1+(as*au)*(as*au)) + math.Sqrt(1+(at*au)*(at*au))) + return 2 * (fhmin * c) * au, ga / (c + c) +} diff --git a/vendor/gonum.org/v1/gonum/lapack/gonum/dlascl.go b/vendor/gonum.org/v1/gonum/lapack/gonum/dlascl.go new file mode 100644 index 0000000..61c4eb7 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/lapack/gonum/dlascl.go @@ -0,0 +1,111 @@ +// Copyright ©2015 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package gonum + +import ( + "math" + + "gonum.org/v1/gonum/lapack" +) + +// Dlascl multiplies an m×n matrix by the scalar cto/cfrom. +// +// cfrom must not be zero, and cto and cfrom must not be NaN, otherwise Dlascl +// will panic. +// +// Dlascl is an internal routine. It is exported for testing purposes. +func (impl Implementation) Dlascl(kind lapack.MatrixType, kl, ku int, cfrom, cto float64, m, n int, a []float64, lda int) { + switch kind { + default: + panic(badMatrixType) + case 'H', 'B', 'Q', 'Z': // See dlascl.f. + panic("not implemented") + case lapack.General, lapack.UpperTri, lapack.LowerTri: + if lda < max(1, n) { + panic(badLdA) + } + } + switch { + case cfrom == 0: + panic(zeroCFrom) + case math.IsNaN(cfrom): + panic(nanCFrom) + case math.IsNaN(cto): + panic(nanCTo) + case m < 0: + panic(mLT0) + case n < 0: + panic(nLT0) + } + + if n == 0 || m == 0 { + return + } + + switch kind { + case lapack.General, lapack.UpperTri, lapack.LowerTri: + if len(a) < (m-1)*lda+n { + panic(shortA) + } + } + + smlnum := dlamchS + bignum := 1 / smlnum + cfromc := cfrom + ctoc := cto + cfrom1 := cfromc * smlnum + for { + var done bool + var mul, ctol float64 + if cfrom1 == cfromc { + // cfromc is inf. + mul = ctoc / cfromc + done = true + ctol = ctoc + } else { + ctol = ctoc / bignum + if ctol == ctoc { + // ctoc is either 0 or inf. + mul = ctoc + done = true + cfromc = 1 + } else if math.Abs(cfrom1) > math.Abs(ctoc) && ctoc != 0 { + mul = smlnum + done = false + cfromc = cfrom1 + } else if math.Abs(ctol) > math.Abs(cfromc) { + mul = bignum + done = false + ctoc = ctol + } else { + mul = ctoc / cfromc + done = true + } + } + switch kind { + case lapack.General: + for i := 0; i < m; i++ { + for j := 0; j < n; j++ { + a[i*lda+j] = a[i*lda+j] * mul + } + } + case lapack.UpperTri: + for i := 0; i < m; i++ { + for j := i; j < n; j++ { + a[i*lda+j] = a[i*lda+j] * mul + } + } + case lapack.LowerTri: + for i := 0; i < m; i++ { + for j := 0; j <= min(i, n-1); j++ { + a[i*lda+j] = a[i*lda+j] * mul + } + } + } + if done { + break + } + } +} diff --git a/vendor/gonum.org/v1/gonum/lapack/gonum/dlaset.go b/vendor/gonum.org/v1/gonum/lapack/gonum/dlaset.go new file mode 100644 index 0000000..b4e6391 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/lapack/gonum/dlaset.go @@ -0,0 +1,57 @@ +// Copyright ©2015 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package gonum + +import "gonum.org/v1/gonum/blas" + +// Dlaset sets the off-diagonal elements of A to alpha, and the diagonal +// elements to beta. If uplo == blas.Upper, only the elements in the upper +// triangular part are set. If uplo == blas.Lower, only the elements in the +// lower triangular part are set. If uplo is otherwise, all of the elements of A +// are set. +// +// Dlaset is an internal routine. It is exported for testing purposes. +func (impl Implementation) Dlaset(uplo blas.Uplo, m, n int, alpha, beta float64, a []float64, lda int) { + switch { + case m < 0: + panic(mLT0) + case n < 0: + panic(nLT0) + case lda < max(1, n): + panic(badLdA) + } + + minmn := min(m, n) + if minmn == 0 { + return + } + + if len(a) < (m-1)*lda+n { + panic(shortA) + } + + if uplo == blas.Upper { + for i := 0; i < m; i++ { + for j := i + 1; j < n; j++ { + a[i*lda+j] = alpha + } + } + } else if uplo == blas.Lower { + for i := 0; i < m; i++ { + for j := 0; j < min(i+1, n); j++ { + a[i*lda+j] = alpha + } + } + } else { + for i := 0; i < m; i++ { + for j := 0; j < n; j++ { + a[i*lda+j] = alpha + } + } + } + for i := 0; i < minmn; i++ { + a[i*lda+i] = beta + } +} diff --git a/vendor/gonum.org/v1/gonum/lapack/gonum/dlasq1.go b/vendor/gonum.org/v1/gonum/lapack/gonum/dlasq1.go new file mode 100644 index 0000000..1f1d1dc --- /dev/null +++ b/vendor/gonum.org/v1/gonum/lapack/gonum/dlasq1.go @@ -0,0 +1,100 @@ +// Copyright ©2015 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package gonum + +import ( + "math" + + "gonum.org/v1/gonum/blas/blas64" + "gonum.org/v1/gonum/lapack" +) + +// Dlasq1 computes the singular values of an n×n bidiagonal matrix with diagonal +// d and off-diagonal e. On exit, d contains the singular values in decreasing +// order, and e is overwritten. d must have length at least n, e must have +// length at least n-1, and the input work must have length at least 4*n. Dlasq1 +// will panic if these conditions are not met. +// +// Dlasq1 is an internal routine. It is exported for testing purposes. +func (impl Implementation) Dlasq1(n int, d, e, work []float64) (info int) { + if n < 0 { + panic(nLT0) + } + + if n == 0 { + return info + } + + switch { + case len(d) < n: + panic(shortD) + case len(e) < n-1: + panic(shortE) + case len(work) < 4*n: + panic(shortWork) + } + + if n == 1 { + d[0] = math.Abs(d[0]) + return info + } + + if n == 2 { + d[1], d[0] = impl.Dlas2(d[0], e[0], d[1]) + return info + } + + // Estimate the largest singular value. + var sigmx float64 + for i := 0; i < n-1; i++ { + d[i] = math.Abs(d[i]) + sigmx = math.Max(sigmx, math.Abs(e[i])) + } + d[n-1] = math.Abs(d[n-1]) + // Early return if sigmx is zero (matrix is already diagonal). + if sigmx == 0 { + impl.Dlasrt(lapack.SortDecreasing, n, d) + return info + } + + for i := 0; i < n; i++ { + sigmx = math.Max(sigmx, d[i]) + } + + // Copy D and E into WORK (in the Z format) and scale (squaring the + // input data makes scaling by a power of the radix pointless). + + eps := dlamchP + safmin := dlamchS + scale := math.Sqrt(eps / safmin) + bi := blas64.Implementation() + bi.Dcopy(n, d, 1, work, 2) + bi.Dcopy(n-1, e, 1, work[1:], 2) + impl.Dlascl(lapack.General, 0, 0, sigmx, scale, 2*n-1, 1, work, 1) + + // Compute the q's and e's. + for i := 0; i < 2*n-1; i++ { + work[i] *= work[i] + } + work[2*n-1] = 0 + + info = impl.Dlasq2(n, work) + if info == 0 { + for i := 0; i < n; i++ { + d[i] = math.Sqrt(work[i]) + } + impl.Dlascl(lapack.General, 0, 0, scale, sigmx, n, 1, d, 1) + } else if info == 2 { + // Maximum number of iterations exceeded. Move data from work + // into D and E so the calling subroutine can try to finish. + for i := 0; i < n; i++ { + d[i] = math.Sqrt(work[2*i]) + e[i] = math.Sqrt(work[2*i+1]) + } + impl.Dlascl(lapack.General, 0, 0, scale, sigmx, n, 1, d, 1) + impl.Dlascl(lapack.General, 0, 0, scale, sigmx, n, 1, e, 1) + } + return info +} diff --git a/vendor/gonum.org/v1/gonum/lapack/gonum/dlasq2.go b/vendor/gonum.org/v1/gonum/lapack/gonum/dlasq2.go new file mode 100644 index 0000000..fd24a55 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/lapack/gonum/dlasq2.go @@ -0,0 +1,369 @@ +// Copyright ©2015 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package gonum + +import ( + "math" + + "gonum.org/v1/gonum/lapack" +) + +// Dlasq2 computes all the eigenvalues of the symmetric positive +// definite tridiagonal matrix associated with the qd array Z. Eigevalues +// are computed to high relative accuracy avoiding denormalization, underflow +// and overflow. +// +// To see the relation of Z to the tridiagonal matrix, let L be a +// unit lower bidiagonal matrix with sub-diagonals Z(2,4,6,,..) and +// let U be an upper bidiagonal matrix with 1's above and diagonal +// Z(1,3,5,,..). The tridiagonal is L*U or, if you prefer, the +// symmetric tridiagonal to which it is similar. +// +// info returns a status error. The return codes mean as follows: +// 0: The algorithm completed successfully. +// 1: A split was marked by a positive value in e. +// 2: Current block of Z not diagonalized after 100*n iterations (in inner +// while loop). On exit Z holds a qd array with the same eigenvalues as +// the given Z. +// 3: Termination criterion of outer while loop not met (program created more +// than N unreduced blocks). +// +// z must have length at least 4*n, and must not contain any negative elements. +// Dlasq2 will panic otherwise. +// +// Dlasq2 is an internal routine. It is exported for testing purposes. +func (impl Implementation) Dlasq2(n int, z []float64) (info int) { + if n < 0 { + panic(nLT0) + } + + if n == 0 { + return info + } + + if len(z) < 4*n { + panic(shortZ) + } + + if n == 1 { + if z[0] < 0 { + panic(negZ) + } + return info + } + + const cbias = 1.5 + + eps := dlamchP + safmin := dlamchS + tol := eps * 100 + tol2 := tol * tol + if n == 2 { + if z[1] < 0 || z[2] < 0 { + panic(negZ) + } else if z[2] > z[0] { + z[0], z[2] = z[2], z[0] + } + z[4] = z[0] + z[1] + z[2] + if z[1] > z[2]*tol2 { + t := 0.5 * (z[0] - z[2] + z[1]) + s := z[2] * (z[1] / t) + if s <= t { + s = z[2] * (z[1] / (t * (1 + math.Sqrt(1+s/t)))) + } else { + s = z[2] * (z[1] / (t + math.Sqrt(t)*math.Sqrt(t+s))) + } + t = z[0] + s + z[1] + z[2] *= z[0] / t + z[0] = t + } + z[1] = z[2] + z[5] = z[1] + z[0] + return info + } + // Check for negative data and compute sums of q's and e's. + z[2*n-1] = 0 + emin := z[1] + var d, e, qmax float64 + var i1, n1 int + for k := 0; k < 2*(n-1); k += 2 { + if z[k] < 0 || z[k+1] < 0 { + panic(negZ) + } + d += z[k] + e += z[k+1] + qmax = math.Max(qmax, z[k]) + emin = math.Min(emin, z[k+1]) + } + if z[2*(n-1)] < 0 { + panic(negZ) + } + d += z[2*(n-1)] + // Check for diagonality. + if e == 0 { + for k := 1; k < n; k++ { + z[k] = z[2*k] + } + impl.Dlasrt(lapack.SortDecreasing, n, z) + z[2*(n-1)] = d + return info + } + trace := d + e + // Check for zero data. + if trace == 0 { + z[2*(n-1)] = 0 + return info + } + // Rearrange data for locality: Z=(q1,qq1,e1,ee1,q2,qq2,e2,ee2,...). + for k := 2 * n; k >= 2; k -= 2 { + z[2*k-1] = 0 + z[2*k-2] = z[k-1] + z[2*k-3] = 0 + z[2*k-4] = z[k-2] + } + i0 := 0 + n0 := n - 1 + + // Reverse the qd-array, if warranted. + // z[4*i0-3] --> z[4*(i0+1)-3-1] --> z[4*i0] + if cbias*z[4*i0] < z[4*n0] { + ipn4Out := 4 * (i0 + n0 + 2) + for i4loop := 4 * (i0 + 1); i4loop <= 2*(i0+n0+1); i4loop += 4 { + i4 := i4loop - 1 + ipn4 := ipn4Out - 1 + z[i4-3], z[ipn4-i4-4] = z[ipn4-i4-4], z[i4-3] + z[i4-1], z[ipn4-i4-6] = z[ipn4-i4-6], z[i4-1] + } + } + + // Initial split checking via dqd and Li's test. + pp := 0 + for k := 0; k < 2; k++ { + d = z[4*n0+pp] + for i4loop := 4*n0 + pp; i4loop >= 4*(i0+1)+pp; i4loop -= 4 { + i4 := i4loop - 1 + if z[i4-1] <= tol2*d { + z[i4-1] = math.Copysign(0, -1) + d = z[i4-3] + } else { + d = z[i4-3] * (d / (d + z[i4-1])) + } + } + // dqd maps Z to ZZ plus Li's test. + emin = z[4*(i0+1)+pp] + d = z[4*i0+pp] + for i4loop := 4*(i0+1) + pp; i4loop <= 4*n0+pp; i4loop += 4 { + i4 := i4loop - 1 + z[i4-2*pp-2] = d + z[i4-1] + if z[i4-1] <= tol2*d { + z[i4-1] = math.Copysign(0, -1) + z[i4-2*pp-2] = d + z[i4-2*pp] = 0 + d = z[i4+1] + } else if safmin*z[i4+1] < z[i4-2*pp-2] && safmin*z[i4-2*pp-2] < z[i4+1] { + tmp := z[i4+1] / z[i4-2*pp-2] + z[i4-2*pp] = z[i4-1] * tmp + d *= tmp + } else { + z[i4-2*pp] = z[i4+1] * (z[i4-1] / z[i4-2*pp-2]) + d = z[i4+1] * (d / z[i4-2*pp-2]) + } + emin = math.Min(emin, z[i4-2*pp]) + } + z[4*(n0+1)-pp-3] = d + + // Now find qmax. + qmax = z[4*(i0+1)-pp-3] + for i4loop := 4*(i0+1) - pp + 2; i4loop <= 4*(n0+1)+pp-2; i4loop += 4 { + i4 := i4loop - 1 + qmax = math.Max(qmax, z[i4]) + } + // Prepare for the next iteration on K. + pp = 1 - pp + } + + // Initialise variables to pass to DLASQ3. + var ttype int + var dmin1, dmin2, dn, dn1, dn2, g, tau float64 + var tempq float64 + iter := 2 + var nFail int + nDiv := 2 * (n0 - i0) + var i4 int +outer: + for iwhila := 1; iwhila <= n+1; iwhila++ { + // Test for completion. + if n0 < 0 { + // Move q's to the front. + for k := 1; k < n; k++ { + z[k] = z[4*k] + } + // Sort and compute sum of eigenvalues. + impl.Dlasrt(lapack.SortDecreasing, n, z) + e = 0 + for k := n - 1; k >= 0; k-- { + e += z[k] + } + // Store trace, sum(eigenvalues) and information on performance. + z[2*n] = trace + z[2*n+1] = e + z[2*n+2] = float64(iter) + z[2*n+3] = float64(nDiv) / float64(n*n) + z[2*n+4] = 100 * float64(nFail) / float64(iter) + return info + } + + // While array unfinished do + // e[n0] holds the value of sigma when submatrix in i0:n0 + // splits from the rest of the array, but is negated. + var desig float64 + var sigma float64 + if n0 != n-1 { + sigma = -z[4*(n0+1)-2] + } + if sigma < 0 { + info = 1 + return info + } + // Find last unreduced submatrix's top index i0, find qmax and + // emin. Find Gershgorin-type bound if Q's much greater than E's. + var emax float64 + if n0 > i0 { + emin = math.Abs(z[4*(n0+1)-6]) + } else { + emin = 0 + } + qmin := z[4*(n0+1)-4] + qmax = qmin + zSmall := false + for i4loop := 4 * (n0 + 1); i4loop >= 8; i4loop -= 4 { + i4 = i4loop - 1 + if z[i4-5] <= 0 { + zSmall = true + break + } + if qmin >= 4*emax { + qmin = math.Min(qmin, z[i4-3]) + emax = math.Max(emax, z[i4-5]) + } + qmax = math.Max(qmax, z[i4-7]+z[i4-5]) + emin = math.Min(emin, z[i4-5]) + } + if !zSmall { + i4 = 3 + } + i0 = (i4+1)/4 - 1 + pp = 0 + if n0-i0 > 1 { + dee := z[4*i0] + deemin := dee + kmin := i0 + for i4loop := 4*(i0+1) + 1; i4loop <= 4*(n0+1)-3; i4loop += 4 { + i4 := i4loop - 1 + dee = z[i4] * (dee / (dee + z[i4-2])) + if dee <= deemin { + deemin = dee + kmin = (i4+4)/4 - 1 + } + } + if (kmin-i0)*2 < n0-kmin && deemin <= 0.5*z[4*n0] { + ipn4Out := 4 * (i0 + n0 + 2) + pp = 2 + for i4loop := 4 * (i0 + 1); i4loop <= 2*(i0+n0+1); i4loop += 4 { + i4 := i4loop - 1 + ipn4 := ipn4Out - 1 + z[i4-3], z[ipn4-i4-4] = z[ipn4-i4-4], z[i4-3] + z[i4-2], z[ipn4-i4-3] = z[ipn4-i4-3], z[i4-2] + z[i4-1], z[ipn4-i4-6] = z[ipn4-i4-6], z[i4-1] + z[i4], z[ipn4-i4-5] = z[ipn4-i4-5], z[i4] + } + } + } + // Put -(initial shift) into DMIN. + dmin := -math.Max(0, qmin-2*math.Sqrt(qmin)*math.Sqrt(emax)) + + // Now i0:n0 is unreduced. + // PP = 0 for ping, PP = 1 for pong. + // PP = 2 indicates that flipping was applied to the Z array and + // and that the tests for deflation upon entry in Dlasq3 + // should not be performed. + nbig := 100 * (n0 - i0 + 1) + for iwhilb := 0; iwhilb < nbig; iwhilb++ { + if i0 > n0 { + continue outer + } + + // While submatrix unfinished take a good dqds step. + i0, n0, pp, dmin, sigma, desig, qmax, nFail, iter, nDiv, ttype, dmin1, dmin2, dn, dn1, dn2, g, tau = + impl.Dlasq3(i0, n0, z, pp, dmin, sigma, desig, qmax, nFail, iter, nDiv, ttype, dmin1, dmin2, dn, dn1, dn2, g, tau) + + pp = 1 - pp + // When emin is very small check for splits. + if pp == 0 && n0-i0 >= 3 { + if z[4*(n0+1)-1] <= tol2*qmax || z[4*(n0+1)-2] <= tol2*sigma { + splt := i0 - 1 + qmax = z[4*i0] + emin = z[4*(i0+1)-2] + oldemn := z[4*(i0+1)-1] + for i4loop := 4 * (i0 + 1); i4loop <= 4*(n0-2); i4loop += 4 { + i4 := i4loop - 1 + if z[i4] <= tol2*z[i4-3] || z[i4-1] <= tol2*sigma { + z[i4-1] = -sigma + splt = i4 / 4 + qmax = 0 + emin = z[i4+3] + oldemn = z[i4+4] + } else { + qmax = math.Max(qmax, z[i4+1]) + emin = math.Min(emin, z[i4-1]) + oldemn = math.Min(oldemn, z[i4]) + } + } + z[4*(n0+1)-2] = emin + z[4*(n0+1)-1] = oldemn + i0 = splt + 1 + } + } + } + // Maximum number of iterations exceeded, restore the shift + // sigma and place the new d's and e's in a qd array. + // This might need to be done for several blocks. + info = 2 + i1 = i0 + for { + tempq = z[4*i0] + z[4*i0] += sigma + for k := i0 + 1; k <= n0; k++ { + tempe := z[4*(k+1)-6] + z[4*(k+1)-6] *= tempq / z[4*(k+1)-8] + tempq = z[4*k] + z[4*k] += sigma + tempe - z[4*(k+1)-6] + } + // Prepare to do this on the previous block if there is one. + if i1 <= 0 { + break + } + n1 = i1 - 1 + for i1 >= 1 && z[4*(i1+1)-6] >= 0 { + i1 -= 1 + } + sigma = -z[4*(n1+1)-2] + } + for k := 0; k < n; k++ { + z[2*k] = z[4*k] + // Only the block 1..N0 is unfinished. The rest of the e's + // must be essentially zero, although sometimes other data + // has been stored in them. + if k < n0 { + z[2*(k+1)-1] = z[4*(k+1)-1] + } else { + z[2*(k+1)] = 0 + } + } + return info + } + info = 3 + return info +} diff --git a/vendor/gonum.org/v1/gonum/lapack/gonum/dlasq3.go b/vendor/gonum.org/v1/gonum/lapack/gonum/dlasq3.go new file mode 100644 index 0000000..a05e94e --- /dev/null +++ b/vendor/gonum.org/v1/gonum/lapack/gonum/dlasq3.go @@ -0,0 +1,172 @@ +// Copyright ©2015 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package gonum + +import "math" + +// Dlasq3 checks for deflation, computes a shift (tau) and calls dqds. +// In case of failure it changes shifts, and tries again until output +// is positive. +// +// Dlasq3 is an internal routine. It is exported for testing purposes. +func (impl Implementation) Dlasq3(i0, n0 int, z []float64, pp int, dmin, sigma, desig, qmax float64, nFail, iter, nDiv int, ttype int, dmin1, dmin2, dn, dn1, dn2, g, tau float64) ( + i0Out, n0Out, ppOut int, dminOut, sigmaOut, desigOut, qmaxOut float64, nFailOut, iterOut, nDivOut, ttypeOut int, dmin1Out, dmin2Out, dnOut, dn1Out, dn2Out, gOut, tauOut float64) { + switch { + case i0 < 0: + panic(i0LT0) + case n0 < 0: + panic(n0LT0) + case len(z) < 4*n0: + panic(shortZ) + case pp != 0 && pp != 1 && pp != 2: + panic(badPp) + } + + const cbias = 1.5 + + n0in := n0 + eps := dlamchP + tol := eps * 100 + tol2 := tol * tol + var nn int + var t float64 + for { + if n0 < i0 { + return i0, n0, pp, dmin, sigma, desig, qmax, nFail, iter, nDiv, ttype, dmin1, dmin2, dn, dn1, dn2, g, tau + } + if n0 == i0 { + z[4*(n0+1)-4] = z[4*(n0+1)+pp-4] + sigma + n0-- + continue + } + nn = 4*(n0+1) + pp - 1 + if n0 != i0+1 { + // Check whether e[n0-1] is negligible, 1 eigenvalue. + if z[nn-5] > tol2*(sigma+z[nn-3]) && z[nn-2*pp-4] > tol2*z[nn-7] { + // Check whether e[n0-2] is negligible, 2 eigenvalues. + if z[nn-9] > tol2*sigma && z[nn-2*pp-8] > tol2*z[nn-11] { + break + } + } else { + z[4*(n0+1)-4] = z[4*(n0+1)+pp-4] + sigma + n0-- + continue + } + } + if z[nn-3] > z[nn-7] { + z[nn-3], z[nn-7] = z[nn-7], z[nn-3] + } + t = 0.5 * (z[nn-7] - z[nn-3] + z[nn-5]) + if z[nn-5] > z[nn-3]*tol2 && t != 0 { + s := z[nn-3] * (z[nn-5] / t) + if s <= t { + s = z[nn-3] * (z[nn-5] / (t * (1 + math.Sqrt(1+s/t)))) + } else { + s = z[nn-3] * (z[nn-5] / (t + math.Sqrt(t)*math.Sqrt(t+s))) + } + t = z[nn-7] + (s + z[nn-5]) + z[nn-3] *= z[nn-7] / t + z[nn-7] = t + } + z[4*(n0+1)-8] = z[nn-7] + sigma + z[4*(n0+1)-4] = z[nn-3] + sigma + n0 -= 2 + } + if pp == 2 { + pp = 0 + } + + // Reverse the qd-array, if warranted. + if dmin <= 0 || n0 < n0in { + if cbias*z[4*(i0+1)+pp-4] < z[4*(n0+1)+pp-4] { + ipn4Out := 4 * (i0 + n0 + 2) + for j4loop := 4 * (i0 + 1); j4loop <= 2*((i0+1)+(n0+1)-1); j4loop += 4 { + ipn4 := ipn4Out - 1 + j4 := j4loop - 1 + + z[j4-3], z[ipn4-j4-4] = z[ipn4-j4-4], z[j4-3] + z[j4-2], z[ipn4-j4-3] = z[ipn4-j4-3], z[j4-2] + z[j4-1], z[ipn4-j4-6] = z[ipn4-j4-6], z[j4-1] + z[j4], z[ipn4-j4-5] = z[ipn4-j4-5], z[j4] + } + if n0-i0 <= 4 { + z[4*(n0+1)+pp-2] = z[4*(i0+1)+pp-2] + z[4*(n0+1)-pp-1] = z[4*(i0+1)-pp-1] + } + dmin2 = math.Min(dmin2, z[4*(i0+1)-pp-2]) + z[4*(n0+1)+pp-2] = math.Min(math.Min(z[4*(n0+1)+pp-2], z[4*(i0+1)+pp-2]), z[4*(i0+1)+pp+2]) + z[4*(n0+1)-pp-1] = math.Min(math.Min(z[4*(n0+1)-pp-1], z[4*(i0+1)-pp-1]), z[4*(i0+1)-pp+3]) + qmax = math.Max(math.Max(qmax, z[4*(i0+1)+pp-4]), z[4*(i0+1)+pp]) + dmin = math.Copysign(0, -1) // Fortran code has -zero, but -0 in go is 0 + } + } + + // Choose a shift. + tau, ttype, g = impl.Dlasq4(i0, n0, z, pp, n0in, dmin, dmin1, dmin2, dn, dn1, dn2, tau, ttype, g) + + // Call dqds until dmin > 0. +loop: + for { + i0, n0, pp, tau, sigma, dmin, dmin1, dmin2, dn, dn1, dn2 = impl.Dlasq5(i0, n0, z, pp, tau, sigma) + + nDiv += n0 - i0 + 2 + iter++ + switch { + case dmin >= 0 && dmin1 >= 0: + // Success. + goto done + + case dmin < 0 && dmin1 > 0 && z[4*n0-pp-1] < tol*(sigma+dn1) && math.Abs(dn) < tol*sigma: + // Convergence hidden by negative dn. + z[4*n0-pp+1] = 0 + dmin = 0 + goto done + + case dmin < 0: + // Tau too big. Select new Tau and try again. + nFail++ + if ttype < -22 { + // Failed twice. Play it safe. + tau = 0 + } else if dmin1 > 0 { + // Late failure. Gives excellent shift. + tau = (tau + dmin) * (1 - 2*eps) + ttype -= 11 + } else { + // Early failure. Divide by 4. + tau = tau / 4 + ttype -= 12 + } + + case math.IsNaN(dmin): + if tau == 0 { + break loop + } + tau = 0 + + default: + // Possible underflow. Play it safe. + break loop + } + } + + // Risk of underflow. + dmin, dmin1, dmin2, dn, dn1, dn2 = impl.Dlasq6(i0, n0, z, pp) + nDiv += n0 - i0 + 2 + iter++ + tau = 0 + +done: + if tau < sigma { + desig += tau + t = sigma + desig + desig -= t - sigma + } else { + t = sigma + tau + desig += sigma - (t - tau) + } + sigma = t + return i0, n0, pp, dmin, sigma, desig, qmax, nFail, iter, nDiv, ttype, dmin1, dmin2, dn, dn1, dn2, g, tau +} diff --git a/vendor/gonum.org/v1/gonum/lapack/gonum/dlasq4.go b/vendor/gonum.org/v1/gonum/lapack/gonum/dlasq4.go new file mode 100644 index 0000000..f6dbb31 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/lapack/gonum/dlasq4.go @@ -0,0 +1,249 @@ +// Copyright ©2015 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package gonum + +import "math" + +// Dlasq4 computes an approximation to the smallest eigenvalue using values of d +// from the previous transform. +// i0, n0, and n0in are zero-indexed. +// +// Dlasq4 is an internal routine. It is exported for testing purposes. +func (impl Implementation) Dlasq4(i0, n0 int, z []float64, pp int, n0in int, dmin, dmin1, dmin2, dn, dn1, dn2, tau float64, ttype int, g float64) (tauOut float64, ttypeOut int, gOut float64) { + switch { + case i0 < 0: + panic(i0LT0) + case n0 < 0: + panic(n0LT0) + case len(z) < 4*n0: + panic(shortZ) + case pp != 0 && pp != 1: + panic(badPp) + } + + const ( + cnst1 = 0.563 + cnst2 = 1.01 + cnst3 = 1.05 + + cnstthird = 0.333 // TODO(btracey): Fix? + ) + // A negative dmin forces the shift to take that absolute value + // ttype records the type of shift. + if dmin <= 0 { + tau = -dmin + ttype = -1 + return tau, ttype, g + } + nn := 4*(n0+1) + pp - 1 // -1 for zero indexing + s := math.NaN() // Poison s so that failure to take a path below is obvious + if n0in == n0 { + // No eigenvalues deflated. + if dmin == dn || dmin == dn1 { + b1 := math.Sqrt(z[nn-3]) * math.Sqrt(z[nn-5]) + b2 := math.Sqrt(z[nn-7]) * math.Sqrt(z[nn-9]) + a2 := z[nn-7] + z[nn-5] + if dmin == dn && dmin1 == dn1 { + gap2 := dmin2 - a2 - dmin2/4 + var gap1 float64 + if gap2 > 0 && gap2 > b2 { + gap1 = a2 - dn - (b2/gap2)*b2 + } else { + gap1 = a2 - dn - (b1 + b2) + } + if gap1 > 0 && gap1 > b1 { + s = math.Max(dn-(b1/gap1)*b1, 0.5*dmin) + ttype = -2 + } else { + s = 0 + if dn > b1 { + s = dn - b1 + } + if a2 > b1+b2 { + s = math.Min(s, a2-(b1+b2)) + } + s = math.Max(s, cnstthird*dmin) + ttype = -3 + } + } else { + ttype = -4 + s = dmin / 4 + var gam float64 + var np int + if dmin == dn { + gam = dn + a2 = 0 + if z[nn-5] > z[nn-7] { + return tau, ttype, g + } + b2 = z[nn-5] / z[nn-7] + np = nn - 9 + } else { + np = nn - 2*pp + gam = dn1 + if z[np-4] > z[np-2] { + return tau, ttype, g + } + a2 = z[np-4] / z[np-2] + if z[nn-9] > z[nn-11] { + return tau, ttype, g + } + b2 = z[nn-9] / z[nn-11] + np = nn - 13 + } + // Approximate contribution to norm squared from i < nn-1. + a2 += b2 + for i4loop := np + 1; i4loop >= 4*(i0+1)-1+pp; i4loop -= 4 { + i4 := i4loop - 1 + if b2 == 0 { + break + } + b1 = b2 + if z[i4] > z[i4-2] { + return tau, ttype, g + } + b2 *= z[i4] / z[i4-2] + a2 += b2 + if 100*math.Max(b2, b1) < a2 || cnst1 < a2 { + break + } + } + a2 *= cnst3 + // Rayleigh quotient residual bound. + if a2 < cnst1 { + s = gam * (1 - math.Sqrt(a2)) / (1 + a2) + } + } + } else if dmin == dn2 { + ttype = -5 + s = dmin / 4 + // Compute contribution to norm squared from i > nn-2. + np := nn - 2*pp + b1 := z[np-2] + b2 := z[np-6] + gam := dn2 + if z[np-8] > b2 || z[np-4] > b1 { + return tau, ttype, g + } + a2 := (z[np-8] / b2) * (1 + z[np-4]/b1) + // Approximate contribution to norm squared from i < nn-2. + if n0-i0 > 2 { + b2 = z[nn-13] / z[nn-15] + a2 += b2 + for i4loop := (nn + 1) - 17; i4loop >= 4*(i0+1)-1+pp; i4loop -= 4 { + i4 := i4loop - 1 + if b2 == 0 { + break + } + b1 = b2 + if z[i4] > z[i4-2] { + return tau, ttype, g + } + b2 *= z[i4] / z[i4-2] + a2 += b2 + if 100*math.Max(b2, b1) < a2 || cnst1 < a2 { + break + } + } + a2 *= cnst3 + } + if a2 < cnst1 { + s = gam * (1 - math.Sqrt(a2)) / (1 + a2) + } + } else { + // Case 6, no information to guide us. + if ttype == -6 { + g += cnstthird * (1 - g) + } else if ttype == -18 { + g = cnstthird / 4 + } else { + g = 1.0 / 4 + } + s = g * dmin + ttype = -6 + } + } else if n0in == (n0 + 1) { + // One eigenvalue just deflated. Use DMIN1, DN1 for DMIN and DN. + if dmin1 == dn1 && dmin2 == dn2 { + ttype = -7 + s = cnstthird * dmin1 + if z[nn-5] > z[nn-7] { + return tau, ttype, g + } + b1 := z[nn-5] / z[nn-7] + b2 := b1 + if b2 != 0 { + for i4loop := 4*(n0+1) - 9 + pp; i4loop >= 4*(i0+1)-1+pp; i4loop -= 4 { + i4 := i4loop - 1 + a2 := b1 + if z[i4] > z[i4-2] { + return tau, ttype, g + } + b1 *= z[i4] / z[i4-2] + b2 += b1 + if 100*math.Max(b1, a2) < b2 { + break + } + } + } + b2 = math.Sqrt(cnst3 * b2) + a2 := dmin1 / (1 + b2*b2) + gap2 := 0.5*dmin2 - a2 + if gap2 > 0 && gap2 > b2*a2 { + s = math.Max(s, a2*(1-cnst2*a2*(b2/gap2)*b2)) + } else { + s = math.Max(s, a2*(1-cnst2*b2)) + ttype = -8 + } + } else { + s = dmin1 / 4 + if dmin1 == dn1 { + s = 0.5 * dmin1 + } + ttype = -9 + } + } else if n0in == (n0 + 2) { + // Two eigenvalues deflated. Use DMIN2, DN2 for DMIN and DN. + if dmin2 == dn2 && 2*z[nn-5] < z[nn-7] { + ttype = -10 + s = cnstthird * dmin2 + if z[nn-5] > z[nn-7] { + return tau, ttype, g + } + b1 := z[nn-5] / z[nn-7] + b2 := b1 + if b2 != 0 { + for i4loop := 4*(n0+1) - 9 + pp; i4loop >= 4*(i0+1)-1+pp; i4loop -= 4 { + i4 := i4loop - 1 + if z[i4] > z[i4-2] { + return tau, ttype, g + } + b1 *= z[i4] / z[i4-2] + b2 += b1 + if 100*b1 < b2 { + break + } + } + } + b2 = math.Sqrt(cnst3 * b2) + a2 := dmin2 / (1 + b2*b2) + gap2 := z[nn-7] + z[nn-9] - math.Sqrt(z[nn-11])*math.Sqrt(z[nn-9]) - a2 + if gap2 > 0 && gap2 > b2*a2 { + s = math.Max(s, a2*(1-cnst2*a2*(b2/gap2)*b2)) + } else { + s = math.Max(s, a2*(1-cnst2*b2)) + } + } else { + s = dmin2 / 4 + ttype = -11 + } + } else if n0in > n0+2 { + // Case 12, more than two eigenvalues deflated. No information. + s = 0 + ttype = -12 + } + tau = s + return tau, ttype, g +} diff --git a/vendor/gonum.org/v1/gonum/lapack/gonum/dlasq5.go b/vendor/gonum.org/v1/gonum/lapack/gonum/dlasq5.go new file mode 100644 index 0000000..d3826d9 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/lapack/gonum/dlasq5.go @@ -0,0 +1,140 @@ +// Copyright ©2015 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package gonum + +import "math" + +// Dlasq5 computes one dqds transform in ping-pong form. +// i0 and n0 are zero-indexed. +// +// Dlasq5 is an internal routine. It is exported for testing purposes. +func (impl Implementation) Dlasq5(i0, n0 int, z []float64, pp int, tau, sigma float64) (i0Out, n0Out, ppOut int, tauOut, sigmaOut, dmin, dmin1, dmin2, dn, dnm1, dnm2 float64) { + // The lapack function has inputs for ieee and eps, but Go requires ieee so + // these are unnecessary. + + switch { + case i0 < 0: + panic(i0LT0) + case n0 < 0: + panic(n0LT0) + case len(z) < 4*n0: + panic(shortZ) + case pp != 0 && pp != 1: + panic(badPp) + } + + if n0-i0-1 <= 0 { + return i0, n0, pp, tau, sigma, dmin, dmin1, dmin2, dn, dnm1, dnm2 + } + + eps := dlamchP + dthresh := eps * (sigma + tau) + if tau < dthresh*0.5 { + tau = 0 + } + var j4 int + var emin float64 + if tau != 0 { + j4 = 4*i0 + pp + emin = z[j4+4] + d := z[j4] - tau + dmin = d + // In the reference there are code paths that actually return this value. + // dmin1 = -z[j4] + if pp == 0 { + for j4loop := 4 * (i0 + 1); j4loop <= 4*((n0+1)-3); j4loop += 4 { + j4 := j4loop - 1 + z[j4-2] = d + z[j4-1] + tmp := z[j4+1] / z[j4-2] + d = d*tmp - tau + dmin = math.Min(dmin, d) + z[j4] = z[j4-1] * tmp + emin = math.Min(z[j4], emin) + } + } else { + for j4loop := 4 * (i0 + 1); j4loop <= 4*((n0+1)-3); j4loop += 4 { + j4 := j4loop - 1 + z[j4-3] = d + z[j4] + tmp := z[j4+2] / z[j4-3] + d = d*tmp - tau + dmin = math.Min(dmin, d) + z[j4-1] = z[j4] * tmp + emin = math.Min(z[j4-1], emin) + } + } + // Unroll the last two steps. + dnm2 = d + dmin2 = dmin + j4 = 4*((n0+1)-2) - pp - 1 + j4p2 := j4 + 2*pp - 1 + z[j4-2] = dnm2 + z[j4p2] + z[j4] = z[j4p2+2] * (z[j4p2] / z[j4-2]) + dnm1 = z[j4p2+2]*(dnm2/z[j4-2]) - tau + dmin = math.Min(dmin, dnm1) + + dmin1 = dmin + j4 += 4 + j4p2 = j4 + 2*pp - 1 + z[j4-2] = dnm1 + z[j4p2] + z[j4] = z[j4p2+2] * (z[j4p2] / z[j4-2]) + dn = z[j4p2+2]*(dnm1/z[j4-2]) - tau + dmin = math.Min(dmin, dn) + } else { + // This is the version that sets d's to zero if they are small enough. + j4 = 4*(i0+1) + pp - 4 + emin = z[j4+4] + d := z[j4] - tau + dmin = d + // In the reference there are code paths that actually return this value. + // dmin1 = -z[j4] + if pp == 0 { + for j4loop := 4 * (i0 + 1); j4loop <= 4*((n0+1)-3); j4loop += 4 { + j4 := j4loop - 1 + z[j4-2] = d + z[j4-1] + tmp := z[j4+1] / z[j4-2] + d = d*tmp - tau + if d < dthresh { + d = 0 + } + dmin = math.Min(dmin, d) + z[j4] = z[j4-1] * tmp + emin = math.Min(z[j4], emin) + } + } else { + for j4loop := 4 * (i0 + 1); j4loop <= 4*((n0+1)-3); j4loop += 4 { + j4 := j4loop - 1 + z[j4-3] = d + z[j4] + tmp := z[j4+2] / z[j4-3] + d = d*tmp - tau + if d < dthresh { + d = 0 + } + dmin = math.Min(dmin, d) + z[j4-1] = z[j4] * tmp + emin = math.Min(z[j4-1], emin) + } + } + // Unroll the last two steps. + dnm2 = d + dmin2 = dmin + j4 = 4*((n0+1)-2) - pp - 1 + j4p2 := j4 + 2*pp - 1 + z[j4-2] = dnm2 + z[j4p2] + z[j4] = z[j4p2+2] * (z[j4p2] / z[j4-2]) + dnm1 = z[j4p2+2]*(dnm2/z[j4-2]) - tau + dmin = math.Min(dmin, dnm1) + + dmin1 = dmin + j4 += 4 + j4p2 = j4 + 2*pp - 1 + z[j4-2] = dnm1 + z[j4p2] + z[j4] = z[j4p2+2] * (z[j4p2] / z[j4-2]) + dn = z[j4p2+2]*(dnm1/z[j4-2]) - tau + dmin = math.Min(dmin, dn) + } + z[j4+2] = dn + z[4*(n0+1)-pp-1] = emin + return i0, n0, pp, tau, sigma, dmin, dmin1, dmin2, dn, dnm1, dnm2 +} diff --git a/vendor/gonum.org/v1/gonum/lapack/gonum/dlasq6.go b/vendor/gonum.org/v1/gonum/lapack/gonum/dlasq6.go new file mode 100644 index 0000000..54bf587 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/lapack/gonum/dlasq6.go @@ -0,0 +1,118 @@ +// Copyright ©2015 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package gonum + +import "math" + +// Dlasq6 computes one dqd transform in ping-pong form with protection against +// overflow and underflow. z has length at least 4*(n0+1) and holds the qd array. +// i0 is the zero-based first index. +// n0 is the zero-based last index. +// +// Dlasq6 is an internal routine. It is exported for testing purposes. +func (impl Implementation) Dlasq6(i0, n0 int, z []float64, pp int) (dmin, dmin1, dmin2, dn, dnm1, dnm2 float64) { + switch { + case i0 < 0: + panic(i0LT0) + case n0 < 0: + panic(n0LT0) + case len(z) < 4*n0: + panic(shortZ) + case pp != 0 && pp != 1: + panic(badPp) + } + + if n0-i0-1 <= 0 { + return dmin, dmin1, dmin2, dn, dnm1, dnm2 + } + + safmin := dlamchS + j4 := 4*(i0+1) + pp - 4 // -4 rather than -3 for zero indexing + emin := z[j4+4] + d := z[j4] + dmin = d + if pp == 0 { + for j4loop := 4 * (i0 + 1); j4loop <= 4*((n0+1)-3); j4loop += 4 { + j4 := j4loop - 1 // Translate back to zero-indexed. + z[j4-2] = d + z[j4-1] + if z[j4-2] == 0 { + z[j4] = 0 + d = z[j4+1] + dmin = d + emin = 0 + } else if safmin*z[j4+1] < z[j4-2] && safmin*z[j4-2] < z[j4+1] { + tmp := z[j4+1] / z[j4-2] + z[j4] = z[j4-1] * tmp + d *= tmp + } else { + z[j4] = z[j4+1] * (z[j4-1] / z[j4-2]) + d = z[j4+1] * (d / z[j4-2]) + } + dmin = math.Min(dmin, d) + emin = math.Min(emin, z[j4]) + } + } else { + for j4loop := 4 * (i0 + 1); j4loop <= 4*((n0+1)-3); j4loop += 4 { + j4 := j4loop - 1 + z[j4-3] = d + z[j4] + if z[j4-3] == 0 { + z[j4-1] = 0 + d = z[j4+2] + dmin = d + emin = 0 + } else if safmin*z[j4+2] < z[j4-3] && safmin*z[j4-3] < z[j4+2] { + tmp := z[j4+2] / z[j4-3] + z[j4-1] = z[j4] * tmp + d *= tmp + } else { + z[j4-1] = z[j4+2] * (z[j4] / z[j4-3]) + d = z[j4+2] * (d / z[j4-3]) + } + dmin = math.Min(dmin, d) + emin = math.Min(emin, z[j4-1]) + } + } + // Unroll last two steps. + dnm2 = d + dmin2 = dmin + j4 = 4*(n0-1) - pp - 1 + j4p2 := j4 + 2*pp - 1 + z[j4-2] = dnm2 + z[j4p2] + if z[j4-2] == 0 { + z[j4] = 0 + dnm1 = z[j4p2+2] + dmin = dnm1 + emin = 0 + } else if safmin*z[j4p2+2] < z[j4-2] && safmin*z[j4-2] < z[j4p2+2] { + tmp := z[j4p2+2] / z[j4-2] + z[j4] = z[j4p2] * tmp + dnm1 = dnm2 * tmp + } else { + z[j4] = z[j4p2+2] * (z[j4p2] / z[j4-2]) + dnm1 = z[j4p2+2] * (dnm2 / z[j4-2]) + } + dmin = math.Min(dmin, dnm1) + dmin1 = dmin + j4 += 4 + j4p2 = j4 + 2*pp - 1 + z[j4-2] = dnm1 + z[j4p2] + if z[j4-2] == 0 { + z[j4] = 0 + dn = z[j4p2+2] + dmin = dn + emin = 0 + } else if safmin*z[j4p2+2] < z[j4-2] && safmin*z[j4-2] < z[j4p2+2] { + tmp := z[j4p2+2] / z[j4-2] + z[j4] = z[j4p2] * tmp + dn = dnm1 * tmp + } else { + z[j4] = z[j4p2+2] * (z[j4p2] / z[j4-2]) + dn = z[j4p2+2] * (dnm1 / z[j4-2]) + } + dmin = math.Min(dmin, dn) + z[j4+2] = dn + z[4*(n0+1)-pp-1] = emin + return dmin, dmin1, dmin2, dn, dnm1, dnm2 +} diff --git a/vendor/gonum.org/v1/gonum/lapack/gonum/dlasr.go b/vendor/gonum.org/v1/gonum/lapack/gonum/dlasr.go new file mode 100644 index 0000000..a7dbe00 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/lapack/gonum/dlasr.go @@ -0,0 +1,279 @@ +// Copyright ©2015 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package gonum + +import ( + "gonum.org/v1/gonum/blas" + "gonum.org/v1/gonum/lapack" +) + +// Dlasr applies a sequence of plane rotations to the m×n matrix A. This series +// of plane rotations is implicitly represented by a matrix P. P is multiplied +// by a depending on the value of side -- A = P * A if side == lapack.Left, +// A = A * P^T if side == lapack.Right. +// +// The exact value of P depends on the value of pivot, but in all cases P is +// implicitly represented by a series of 2×2 rotation matrices. The entries of +// rotation matrix k are defined by s[k] and c[k] +// R(k) = [ c[k] s[k]] +// [-s[k] s[k]] +// If direct == lapack.Forward, the rotation matrices are applied as +// P = P(z-1) * ... * P(2) * P(1), while if direct == lapack.Backward they are +// applied as P = P(1) * P(2) * ... * P(n). +// +// pivot defines the mapping of the elements in R(k) to P(k). +// If pivot == lapack.Variable, the rotation is performed for the (k, k+1) plane. +// P(k) = [1 ] +// [ ... ] +// [ 1 ] +// [ c[k] s[k] ] +// [ -s[k] c[k] ] +// [ 1 ] +// [ ... ] +// [ 1] +// if pivot == lapack.Top, the rotation is performed for the (1, k+1) plane, +// P(k) = [c[k] s[k] ] +// [ 1 ] +// [ ... ] +// [ 1 ] +// [-s[k] c[k] ] +// [ 1 ] +// [ ... ] +// [ 1] +// and if pivot == lapack.Bottom, the rotation is performed for the (k, z) plane. +// P(k) = [1 ] +// [ ... ] +// [ 1 ] +// [ c[k] s[k]] +// [ 1 ] +// [ ... ] +// [ 1 ] +// [ -s[k] c[k]] +// s and c have length m - 1 if side == blas.Left, and n - 1 if side == blas.Right. +// +// Dlasr is an internal routine. It is exported for testing purposes. +func (impl Implementation) Dlasr(side blas.Side, pivot lapack.Pivot, direct lapack.Direct, m, n int, c, s, a []float64, lda int) { + switch { + case side != blas.Left && side != blas.Right: + panic(badSide) + case pivot != lapack.Variable && pivot != lapack.Top && pivot != lapack.Bottom: + panic(badPivot) + case direct != lapack.Forward && direct != lapack.Backward: + panic(badDirect) + case m < 0: + panic(mLT0) + case n < 0: + panic(nLT0) + case lda < max(1, n): + panic(badLdA) + } + + // Quick return if possible. + if m == 0 || n == 0 { + return + } + + if side == blas.Left { + if len(c) < m-1 { + panic(shortC) + } + if len(s) < m-1 { + panic(shortS) + } + } else { + if len(c) < n-1 { + panic(shortC) + } + if len(s) < n-1 { + panic(shortS) + } + } + if len(a) < (m-1)*lda+n { + panic(shortA) + } + + if side == blas.Left { + if pivot == lapack.Variable { + if direct == lapack.Forward { + for j := 0; j < m-1; j++ { + ctmp := c[j] + stmp := s[j] + if ctmp != 1 || stmp != 0 { + for i := 0; i < n; i++ { + tmp2 := a[j*lda+i] + tmp := a[(j+1)*lda+i] + a[(j+1)*lda+i] = ctmp*tmp - stmp*tmp2 + a[j*lda+i] = stmp*tmp + ctmp*tmp2 + } + } + } + return + } + for j := m - 2; j >= 0; j-- { + ctmp := c[j] + stmp := s[j] + if ctmp != 1 || stmp != 0 { + for i := 0; i < n; i++ { + tmp2 := a[j*lda+i] + tmp := a[(j+1)*lda+i] + a[(j+1)*lda+i] = ctmp*tmp - stmp*tmp2 + a[j*lda+i] = stmp*tmp + ctmp*tmp2 + } + } + } + return + } else if pivot == lapack.Top { + if direct == lapack.Forward { + for j := 1; j < m; j++ { + ctmp := c[j-1] + stmp := s[j-1] + if ctmp != 1 || stmp != 0 { + for i := 0; i < n; i++ { + tmp := a[j*lda+i] + tmp2 := a[i] + a[j*lda+i] = ctmp*tmp - stmp*tmp2 + a[i] = stmp*tmp + ctmp*tmp2 + } + } + } + return + } + for j := m - 1; j >= 1; j-- { + ctmp := c[j-1] + stmp := s[j-1] + if ctmp != 1 || stmp != 0 { + for i := 0; i < n; i++ { + ctmp := c[j-1] + stmp := s[j-1] + if ctmp != 1 || stmp != 0 { + for i := 0; i < n; i++ { + tmp := a[j*lda+i] + tmp2 := a[i] + a[j*lda+i] = ctmp*tmp - stmp*tmp2 + a[i] = stmp*tmp + ctmp*tmp2 + } + } + } + } + } + return + } + if direct == lapack.Forward { + for j := 0; j < m-1; j++ { + ctmp := c[j] + stmp := s[j] + if ctmp != 1 || stmp != 0 { + for i := 0; i < n; i++ { + tmp := a[j*lda+i] + tmp2 := a[(m-1)*lda+i] + a[j*lda+i] = stmp*tmp2 + ctmp*tmp + a[(m-1)*lda+i] = ctmp*tmp2 - stmp*tmp + } + } + } + return + } + for j := m - 2; j >= 0; j-- { + ctmp := c[j] + stmp := s[j] + if ctmp != 1 || stmp != 0 { + for i := 0; i < n; i++ { + tmp := a[j*lda+i] + tmp2 := a[(m-1)*lda+i] + a[j*lda+i] = stmp*tmp2 + ctmp*tmp + a[(m-1)*lda+i] = ctmp*tmp2 - stmp*tmp + } + } + } + return + } + if pivot == lapack.Variable { + if direct == lapack.Forward { + for j := 0; j < n-1; j++ { + ctmp := c[j] + stmp := s[j] + if ctmp != 1 || stmp != 0 { + for i := 0; i < m; i++ { + tmp := a[i*lda+j+1] + tmp2 := a[i*lda+j] + a[i*lda+j+1] = ctmp*tmp - stmp*tmp2 + a[i*lda+j] = stmp*tmp + ctmp*tmp2 + } + } + } + return + } + for j := n - 2; j >= 0; j-- { + ctmp := c[j] + stmp := s[j] + if ctmp != 1 || stmp != 0 { + for i := 0; i < m; i++ { + tmp := a[i*lda+j+1] + tmp2 := a[i*lda+j] + a[i*lda+j+1] = ctmp*tmp - stmp*tmp2 + a[i*lda+j] = stmp*tmp + ctmp*tmp2 + } + } + } + return + } else if pivot == lapack.Top { + if direct == lapack.Forward { + for j := 1; j < n; j++ { + ctmp := c[j-1] + stmp := s[j-1] + if ctmp != 1 || stmp != 0 { + for i := 0; i < m; i++ { + tmp := a[i*lda+j] + tmp2 := a[i*lda] + a[i*lda+j] = ctmp*tmp - stmp*tmp2 + a[i*lda] = stmp*tmp + ctmp*tmp2 + } + } + } + return + } + for j := n - 1; j >= 1; j-- { + ctmp := c[j-1] + stmp := s[j-1] + if ctmp != 1 || stmp != 0 { + for i := 0; i < m; i++ { + tmp := a[i*lda+j] + tmp2 := a[i*lda] + a[i*lda+j] = ctmp*tmp - stmp*tmp2 + a[i*lda] = stmp*tmp + ctmp*tmp2 + } + } + } + return + } + if direct == lapack.Forward { + for j := 0; j < n-1; j++ { + ctmp := c[j] + stmp := s[j] + if ctmp != 1 || stmp != 0 { + for i := 0; i < m; i++ { + tmp := a[i*lda+j] + tmp2 := a[i*lda+n-1] + a[i*lda+j] = stmp*tmp2 + ctmp*tmp + a[i*lda+n-1] = ctmp*tmp2 - stmp*tmp + } + + } + } + return + } + for j := n - 2; j >= 0; j-- { + ctmp := c[j] + stmp := s[j] + if ctmp != 1 || stmp != 0 { + for i := 0; i < m; i++ { + tmp := a[i*lda+j] + tmp2 := a[i*lda+n-1] + a[i*lda+j] = stmp*tmp2 + ctmp*tmp + a[i*lda+n-1] = ctmp*tmp2 - stmp*tmp + } + } + } +} diff --git a/vendor/gonum.org/v1/gonum/lapack/gonum/dlasrt.go b/vendor/gonum.org/v1/gonum/lapack/gonum/dlasrt.go new file mode 100644 index 0000000..be47280 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/lapack/gonum/dlasrt.go @@ -0,0 +1,36 @@ +// Copyright ©2015 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package gonum + +import ( + "sort" + + "gonum.org/v1/gonum/lapack" +) + +// Dlasrt sorts the numbers in the input slice d. If s == lapack.SortIncreasing, +// the elements are sorted in increasing order. If s == lapack.SortDecreasing, +// the elements are sorted in decreasing order. For other values of s Dlasrt +// will panic. +// +// Dlasrt is an internal routine. It is exported for testing purposes. +func (impl Implementation) Dlasrt(s lapack.Sort, n int, d []float64) { + switch { + case n < 0: + panic(nLT0) + case len(d) < n: + panic(shortD) + } + + d = d[:n] + switch s { + default: + panic(badSort) + case lapack.SortIncreasing: + sort.Float64s(d) + case lapack.SortDecreasing: + sort.Sort(sort.Reverse(sort.Float64Slice(d))) + } +} diff --git a/vendor/gonum.org/v1/gonum/lapack/gonum/dlassq.go b/vendor/gonum.org/v1/gonum/lapack/gonum/dlassq.go new file mode 100644 index 0000000..9c2dc77 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/lapack/gonum/dlassq.go @@ -0,0 +1,41 @@ +// Copyright ©2015 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package gonum + +import "math" + +// Dlassq updates a sum of squares in scaled form. The input parameters scale and +// sumsq represent the current scale and total sum of squares. These values are +// updated with the information in the first n elements of the vector specified +// by x and incX. +// +// Dlassq is an internal routine. It is exported for testing purposes. +func (impl Implementation) Dlassq(n int, x []float64, incx int, scale float64, sumsq float64) (scl, smsq float64) { + switch { + case n < 0: + panic(nLT0) + case incx <= 0: + panic(badIncX) + case len(x) < 1+(n-1)*incx: + panic(shortX) + } + + if n == 0 { + return scale, sumsq + } + + for ix := 0; ix <= (n-1)*incx; ix += incx { + absxi := math.Abs(x[ix]) + if absxi > 0 || math.IsNaN(absxi) { + if scale < absxi { + sumsq = 1 + sumsq*(scale/absxi)*(scale/absxi) + scale = absxi + } else { + sumsq += (absxi / scale) * (absxi / scale) + } + } + } + return scale, sumsq +} diff --git a/vendor/gonum.org/v1/gonum/lapack/gonum/dlasv2.go b/vendor/gonum.org/v1/gonum/lapack/gonum/dlasv2.go new file mode 100644 index 0000000..204af19 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/lapack/gonum/dlasv2.go @@ -0,0 +1,115 @@ +// Copyright ©2015 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package gonum + +import "math" + +// Dlasv2 computes the singular value decomposition of a 2×2 matrix. +// [ csl snl] [f g] [csr -snr] = [ssmax 0] +// [-snl csl] [0 h] [snr csr] = [ 0 ssmin] +// ssmax is the larger absolute singular value, and ssmin is the smaller absolute +// singular value. [cls, snl] and [csr, snr] are the left and right singular vectors. +// +// Dlasv2 is an internal routine. It is exported for testing purposes. +func (impl Implementation) Dlasv2(f, g, h float64) (ssmin, ssmax, snr, csr, snl, csl float64) { + ft := f + fa := math.Abs(ft) + ht := h + ha := math.Abs(h) + // pmax points to the largest element of the matrix in terms of absolute value. + // 1 if F, 2 if G, 3 if H. + pmax := 1 + swap := ha > fa + if swap { + pmax = 3 + ft, ht = ht, ft + fa, ha = ha, fa + } + gt := g + ga := math.Abs(gt) + var clt, crt, slt, srt float64 + if ga == 0 { + ssmin = ha + ssmax = fa + clt = 1 + crt = 1 + slt = 0 + srt = 0 + } else { + gasmall := true + if ga > fa { + pmax = 2 + if (fa / ga) < dlamchE { + gasmall = false + ssmax = ga + if ha > 1 { + ssmin = fa / (ga / ha) + } else { + ssmin = (fa / ga) * ha + } + clt = 1 + slt = ht / gt + srt = 1 + crt = ft / gt + } + } + if gasmall { + d := fa - ha + l := d / fa + if d == fa { // deal with inf + l = 1 + } + m := gt / ft + t := 2 - l + s := math.Hypot(t, m) + var r float64 + if l == 0 { + r = math.Abs(m) + } else { + r = math.Hypot(l, m) + } + a := 0.5 * (s + r) + ssmin = ha / a + ssmax = fa * a + if m == 0 { + if l == 0 { + t = math.Copysign(2, ft) * math.Copysign(1, gt) + } else { + t = gt/math.Copysign(d, ft) + m/t + } + } else { + t = (m/(s+t) + m/(r+l)) * (1 + a) + } + l = math.Hypot(t, 2) + crt = 2 / l + srt = t / l + clt = (crt + srt*m) / a + slt = (ht / ft) * srt / a + } + } + if swap { + csl = srt + snl = crt + csr = slt + snr = clt + } else { + csl = clt + snl = slt + csr = crt + snr = srt + } + var tsign float64 + switch pmax { + case 1: + tsign = math.Copysign(1, csr) * math.Copysign(1, csl) * math.Copysign(1, f) + case 2: + tsign = math.Copysign(1, snr) * math.Copysign(1, csl) * math.Copysign(1, g) + case 3: + tsign = math.Copysign(1, snr) * math.Copysign(1, snl) * math.Copysign(1, h) + } + ssmax = math.Copysign(ssmax, tsign) + ssmin = math.Copysign(ssmin, tsign*math.Copysign(1, f)*math.Copysign(1, h)) + return ssmin, ssmax, snr, csr, snl, csl +} diff --git a/vendor/gonum.org/v1/gonum/lapack/gonum/dlaswp.go b/vendor/gonum.org/v1/gonum/lapack/gonum/dlaswp.go new file mode 100644 index 0000000..b207d12 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/lapack/gonum/dlaswp.go @@ -0,0 +1,52 @@ +// Copyright ©2015 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package gonum + +import "gonum.org/v1/gonum/blas/blas64" + +// Dlaswp swaps the rows k1 to k2 of a rectangular matrix A according to the +// indices in ipiv so that row k is swapped with ipiv[k]. +// +// n is the number of columns of A and incX is the increment for ipiv. If incX +// is 1, the swaps are applied from k1 to k2. If incX is -1, the swaps are +// applied in reverse order from k2 to k1. For other values of incX Dlaswp will +// panic. ipiv must have length k2+1, otherwise Dlaswp will panic. +// +// The indices k1, k2, and the elements of ipiv are zero-based. +// +// Dlaswp is an internal routine. It is exported for testing purposes. +func (impl Implementation) Dlaswp(n int, a []float64, lda int, k1, k2 int, ipiv []int, incX int) { + switch { + case n < 0: + panic(nLT0) + case k2 < 0: + panic(badK2) + case k1 < 0 || k2 < k1: + panic(badK1) + case lda < max(1, n): + panic(badLdA) + case len(a) < (k2-1)*lda+n: + panic(shortA) + case len(ipiv) != k2+1: + panic(badLenIpiv) + case incX != 1 && incX != -1: + panic(absIncNotOne) + } + + if n == 0 { + return + } + + bi := blas64.Implementation() + if incX == 1 { + for k := k1; k <= k2; k++ { + bi.Dswap(n, a[k*lda:], 1, a[ipiv[k]*lda:], 1) + } + return + } + for k := k2; k >= k1; k-- { + bi.Dswap(n, a[k*lda:], 1, a[ipiv[k]*lda:], 1) + } +} diff --git a/vendor/gonum.org/v1/gonum/lapack/gonum/dlasy2.go b/vendor/gonum.org/v1/gonum/lapack/gonum/dlasy2.go new file mode 100644 index 0000000..abfe60e --- /dev/null +++ b/vendor/gonum.org/v1/gonum/lapack/gonum/dlasy2.go @@ -0,0 +1,290 @@ +// Copyright ©2016 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package gonum + +import ( + "math" + + "gonum.org/v1/gonum/blas/blas64" +) + +// Dlasy2 solves the Sylvester matrix equation where the matrices are of order 1 +// or 2. It computes the unknown n1×n2 matrix X so that +// TL*X + sgn*X*TR = scale*B, if tranl == false and tranr == false, +// TL^T*X + sgn*X*TR = scale*B, if tranl == true and tranr == false, +// TL*X + sgn*X*TR^T = scale*B, if tranl == false and tranr == true, +// TL^T*X + sgn*X*TR^T = scale*B, if tranl == true and tranr == true, +// where TL is n1×n1, TR is n2×n2, B is n1×n2, and 1 <= n1,n2 <= 2. +// +// isgn must be 1 or -1, and n1 and n2 must be 0, 1, or 2, but these conditions +// are not checked. +// +// Dlasy2 returns three values, a scale factor that is chosen less than or equal +// to 1 to prevent the solution overflowing, the infinity norm of the solution, +// and an indicator of success. If ok is false, TL and TR have eigenvalues that +// are too close, so TL or TR is perturbed to get a non-singular equation. +// +// Dlasy2 is an internal routine. It is exported for testing purposes. +func (impl Implementation) Dlasy2(tranl, tranr bool, isgn, n1, n2 int, tl []float64, ldtl int, tr []float64, ldtr int, b []float64, ldb int, x []float64, ldx int) (scale, xnorm float64, ok bool) { + // TODO(vladimir-ch): Add input validation checks conditionally skipped + // using the build tag mechanism. + + ok = true + // Quick return if possible. + if n1 == 0 || n2 == 0 { + return scale, xnorm, ok + } + + // Set constants to control overflow. + eps := dlamchP + smlnum := dlamchS / eps + sgn := float64(isgn) + + if n1 == 1 && n2 == 1 { + // 1×1 case: TL11*X + sgn*X*TR11 = B11. + tau1 := tl[0] + sgn*tr[0] + bet := math.Abs(tau1) + if bet <= smlnum { + tau1 = smlnum + bet = smlnum + ok = false + } + scale = 1 + gam := math.Abs(b[0]) + if smlnum*gam > bet { + scale = 1 / gam + } + x[0] = b[0] * scale / tau1 + xnorm = math.Abs(x[0]) + return scale, xnorm, ok + } + + if n1+n2 == 3 { + // 1×2 or 2×1 case. + var ( + smin float64 + tmp [4]float64 // tmp is used as a 2×2 row-major matrix. + btmp [2]float64 + ) + if n1 == 1 && n2 == 2 { + // 1×2 case: TL11*[X11 X12] + sgn*[X11 X12]*op[TR11 TR12] = [B11 B12]. + // [TR21 TR22] + smin = math.Abs(tl[0]) + smin = math.Max(smin, math.Max(math.Abs(tr[0]), math.Abs(tr[1]))) + smin = math.Max(smin, math.Max(math.Abs(tr[ldtr]), math.Abs(tr[ldtr+1]))) + smin = math.Max(eps*smin, smlnum) + tmp[0] = tl[0] + sgn*tr[0] + tmp[3] = tl[0] + sgn*tr[ldtr+1] + if tranr { + tmp[1] = sgn * tr[1] + tmp[2] = sgn * tr[ldtr] + } else { + tmp[1] = sgn * tr[ldtr] + tmp[2] = sgn * tr[1] + } + btmp[0] = b[0] + btmp[1] = b[1] + } else { + // 2×1 case: op[TL11 TL12]*[X11] + sgn*[X11]*TR11 = [B11]. + // [TL21 TL22]*[X21] [X21] [B21] + smin = math.Abs(tr[0]) + smin = math.Max(smin, math.Max(math.Abs(tl[0]), math.Abs(tl[1]))) + smin = math.Max(smin, math.Max(math.Abs(tl[ldtl]), math.Abs(tl[ldtl+1]))) + smin = math.Max(eps*smin, smlnum) + tmp[0] = tl[0] + sgn*tr[0] + tmp[3] = tl[ldtl+1] + sgn*tr[0] + if tranl { + tmp[1] = tl[ldtl] + tmp[2] = tl[1] + } else { + tmp[1] = tl[1] + tmp[2] = tl[ldtl] + } + btmp[0] = b[0] + btmp[1] = b[ldb] + } + + // Solve 2×2 system using complete pivoting. + // Set pivots less than smin to smin. + + bi := blas64.Implementation() + ipiv := bi.Idamax(len(tmp), tmp[:], 1) + // Compute the upper triangular matrix [u11 u12]. + // [ 0 u22] + u11 := tmp[ipiv] + if math.Abs(u11) <= smin { + ok = false + u11 = smin + } + locu12 := [4]int{1, 0, 3, 2} // Index in tmp of the element on the same row as the pivot. + u12 := tmp[locu12[ipiv]] + locl21 := [4]int{2, 3, 0, 1} // Index in tmp of the element on the same column as the pivot. + l21 := tmp[locl21[ipiv]] / u11 + locu22 := [4]int{3, 2, 1, 0} // Index in tmp of the remaining element. + u22 := tmp[locu22[ipiv]] - l21*u12 + if math.Abs(u22) <= smin { + ok = false + u22 = smin + } + if ipiv&0x2 != 0 { // true for ipiv equal to 2 and 3. + // The pivot was in the second row, swap the elements of + // the right-hand side. + btmp[0], btmp[1] = btmp[1], btmp[0]-l21*btmp[1] + } else { + btmp[1] -= l21 * btmp[0] + } + scale = 1 + if 2*smlnum*math.Abs(btmp[1]) > math.Abs(u22) || 2*smlnum*math.Abs(btmp[0]) > math.Abs(u11) { + scale = 0.5 / math.Max(math.Abs(btmp[0]), math.Abs(btmp[1])) + btmp[0] *= scale + btmp[1] *= scale + } + // Solve the system [u11 u12] [x21] = [ btmp[0] ]. + // [ 0 u22] [x22] [ btmp[1] ] + x22 := btmp[1] / u22 + x21 := btmp[0]/u11 - (u12/u11)*x22 + if ipiv&0x1 != 0 { // true for ipiv equal to 1 and 3. + // The pivot was in the second column, swap the elements + // of the solution. + x21, x22 = x22, x21 + } + x[0] = x21 + if n1 == 1 { + x[1] = x22 + xnorm = math.Abs(x[0]) + math.Abs(x[1]) + } else { + x[ldx] = x22 + xnorm = math.Max(math.Abs(x[0]), math.Abs(x[ldx])) + } + return scale, xnorm, ok + } + + // 2×2 case: op[TL11 TL12]*[X11 X12] + SGN*[X11 X12]*op[TR11 TR12] = [B11 B12]. + // [TL21 TL22] [X21 X22] [X21 X22] [TR21 TR22] [B21 B22] + // + // Solve equivalent 4×4 system using complete pivoting. + // Set pivots less than smin to smin. + + smin := math.Max(math.Abs(tr[0]), math.Abs(tr[1])) + smin = math.Max(smin, math.Max(math.Abs(tr[ldtr]), math.Abs(tr[ldtr+1]))) + smin = math.Max(smin, math.Max(math.Abs(tl[0]), math.Abs(tl[1]))) + smin = math.Max(smin, math.Max(math.Abs(tl[ldtl]), math.Abs(tl[ldtl+1]))) + smin = math.Max(eps*smin, smlnum) + + var t [4][4]float64 + t[0][0] = tl[0] + sgn*tr[0] + t[1][1] = tl[0] + sgn*tr[ldtr+1] + t[2][2] = tl[ldtl+1] + sgn*tr[0] + t[3][3] = tl[ldtl+1] + sgn*tr[ldtr+1] + if tranl { + t[0][2] = tl[ldtl] + t[1][3] = tl[ldtl] + t[2][0] = tl[1] + t[3][1] = tl[1] + } else { + t[0][2] = tl[1] + t[1][3] = tl[1] + t[2][0] = tl[ldtl] + t[3][1] = tl[ldtl] + } + if tranr { + t[0][1] = sgn * tr[1] + t[1][0] = sgn * tr[ldtr] + t[2][3] = sgn * tr[1] + t[3][2] = sgn * tr[ldtr] + } else { + t[0][1] = sgn * tr[ldtr] + t[1][0] = sgn * tr[1] + t[2][3] = sgn * tr[ldtr] + t[3][2] = sgn * tr[1] + } + + var btmp [4]float64 + btmp[0] = b[0] + btmp[1] = b[1] + btmp[2] = b[ldb] + btmp[3] = b[ldb+1] + + // Perform elimination. + var jpiv [4]int // jpiv records any column swaps for pivoting. + for i := 0; i < 3; i++ { + var ( + xmax float64 + ipsv, jpsv int + ) + for ip := i; ip < 4; ip++ { + for jp := i; jp < 4; jp++ { + if math.Abs(t[ip][jp]) >= xmax { + xmax = math.Abs(t[ip][jp]) + ipsv = ip + jpsv = jp + } + } + } + if ipsv != i { + // The pivot is not in the top row of the unprocessed + // block, swap rows ipsv and i of t and btmp. + t[ipsv], t[i] = t[i], t[ipsv] + btmp[ipsv], btmp[i] = btmp[i], btmp[ipsv] + } + if jpsv != i { + // The pivot is not in the left column of the + // unprocessed block, swap columns jpsv and i of t. + for k := 0; k < 4; k++ { + t[k][jpsv], t[k][i] = t[k][i], t[k][jpsv] + } + } + jpiv[i] = jpsv + if math.Abs(t[i][i]) < smin { + ok = false + t[i][i] = smin + } + for k := i + 1; k < 4; k++ { + t[k][i] /= t[i][i] + btmp[k] -= t[k][i] * btmp[i] + for j := i + 1; j < 4; j++ { + t[k][j] -= t[k][i] * t[i][j] + } + } + } + if math.Abs(t[3][3]) < smin { + ok = false + t[3][3] = smin + } + scale = 1 + if 8*smlnum*math.Abs(btmp[0]) > math.Abs(t[0][0]) || + 8*smlnum*math.Abs(btmp[1]) > math.Abs(t[1][1]) || + 8*smlnum*math.Abs(btmp[2]) > math.Abs(t[2][2]) || + 8*smlnum*math.Abs(btmp[3]) > math.Abs(t[3][3]) { + + maxbtmp := math.Max(math.Abs(btmp[0]), math.Abs(btmp[1])) + maxbtmp = math.Max(maxbtmp, math.Max(math.Abs(btmp[2]), math.Abs(btmp[3]))) + scale = 1 / 8 / maxbtmp + btmp[0] *= scale + btmp[1] *= scale + btmp[2] *= scale + btmp[3] *= scale + } + // Compute the solution of the upper triangular system t * tmp = btmp. + var tmp [4]float64 + for i := 3; i >= 0; i-- { + temp := 1 / t[i][i] + tmp[i] = btmp[i] * temp + for j := i + 1; j < 4; j++ { + tmp[i] -= temp * t[i][j] * tmp[j] + } + } + for i := 2; i >= 0; i-- { + if jpiv[i] != i { + tmp[i], tmp[jpiv[i]] = tmp[jpiv[i]], tmp[i] + } + } + x[0] = tmp[0] + x[1] = tmp[1] + x[ldx] = tmp[2] + x[ldx+1] = tmp[3] + xnorm = math.Max(math.Abs(tmp[0])+math.Abs(tmp[1]), math.Abs(tmp[2])+math.Abs(tmp[3])) + return scale, xnorm, ok +} diff --git a/vendor/gonum.org/v1/gonum/lapack/gonum/dlatrd.go b/vendor/gonum.org/v1/gonum/lapack/gonum/dlatrd.go new file mode 100644 index 0000000..018efc9 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/lapack/gonum/dlatrd.go @@ -0,0 +1,165 @@ +// Copyright ©2016 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package gonum + +import ( + "gonum.org/v1/gonum/blas" + "gonum.org/v1/gonum/blas/blas64" +) + +// Dlatrd reduces nb rows and columns of a real n×n symmetric matrix A to symmetric +// tridiagonal form. It computes the orthonormal similarity transformation +// Q^T * A * Q +// and returns the matrices V and W to apply to the unreduced part of A. If +// uplo == blas.Upper, the upper triangle is supplied and the last nb rows are +// reduced. If uplo == blas.Lower, the lower triangle is supplied and the first +// nb rows are reduced. +// +// a contains the symmetric matrix on entry with active triangular half specified +// by uplo. On exit, the nb columns have been reduced to tridiagonal form. The +// diagonal contains the diagonal of the reduced matrix, the off-diagonal is +// set to 1, and the remaining elements contain the data to construct Q. +// +// If uplo == blas.Upper, with n = 5 and nb = 2 on exit a is +// [ a a a v4 v5] +// [ a a v4 v5] +// [ a 1 v5] +// [ d 1] +// [ d] +// +// If uplo == blas.Lower, with n = 5 and nb = 2, on exit a is +// [ d ] +// [ 1 d ] +// [v1 1 a ] +// [v1 v2 a a ] +// [v1 v2 a a a] +// +// e contains the superdiagonal elements of the reduced matrix. If uplo == blas.Upper, +// e[n-nb:n-1] contains the last nb columns of the reduced matrix, while if +// uplo == blas.Lower, e[:nb] contains the first nb columns of the reduced matrix. +// e must have length at least n-1, and Dlatrd will panic otherwise. +// +// tau contains the scalar factors of the elementary reflectors needed to construct Q. +// The reflectors are stored in tau[n-nb:n-1] if uplo == blas.Upper, and in +// tau[:nb] if uplo == blas.Lower. tau must have length n-1, and Dlatrd will panic +// otherwise. +// +// w is an n×nb matrix. On exit it contains the data to update the unreduced part +// of A. +// +// The matrix Q is represented as a product of elementary reflectors. Each reflector +// H has the form +// I - tau * v * v^T +// If uplo == blas.Upper, +// Q = H_{n-1} * H_{n-2} * ... * H_{n-nb} +// where v[:i-1] is stored in A[:i-1,i], v[i-1] = 1, and v[i:n] = 0. +// +// If uplo == blas.Lower, +// Q = H_0 * H_1 * ... * H_{nb-1} +// where v[:i+1] = 0, v[i+1] = 1, and v[i+2:n] is stored in A[i+2:n,i]. +// +// The vectors v form the n×nb matrix V which is used with W to apply a +// symmetric rank-2 update to the unreduced part of A +// A = A - V * W^T - W * V^T +// +// Dlatrd is an internal routine. It is exported for testing purposes. +func (impl Implementation) Dlatrd(uplo blas.Uplo, n, nb int, a []float64, lda int, e, tau, w []float64, ldw int) { + switch { + case uplo != blas.Upper && uplo != blas.Lower: + panic(badUplo) + case n < 0: + panic(nLT0) + case nb < 0: + panic(nbLT0) + case nb > n: + panic(nbGTN) + case lda < max(1, n): + panic(badLdA) + case ldw < max(1, nb): + panic(badLdW) + } + + if n == 0 { + return + } + + switch { + case len(a) < (n-1)*lda+n: + panic(shortA) + case len(w) < (n-1)*ldw+nb: + panic(shortW) + case len(e) < n-1: + panic(shortE) + case len(tau) < n-1: + panic(shortTau) + } + + bi := blas64.Implementation() + + if uplo == blas.Upper { + for i := n - 1; i >= n-nb; i-- { + iw := i - n + nb + if i < n-1 { + // Update A(0:i, i). + bi.Dgemv(blas.NoTrans, i+1, n-i-1, -1, a[i+1:], lda, + w[i*ldw+iw+1:], 1, 1, a[i:], lda) + bi.Dgemv(blas.NoTrans, i+1, n-i-1, -1, w[iw+1:], ldw, + a[i*lda+i+1:], 1, 1, a[i:], lda) + } + if i > 0 { + // Generate elementary reflector H_i to annihilate A(0:i-2,i). + e[i-1], tau[i-1] = impl.Dlarfg(i, a[(i-1)*lda+i], a[i:], lda) + a[(i-1)*lda+i] = 1 + + // Compute W(0:i-1, i). + bi.Dsymv(blas.Upper, i, 1, a, lda, a[i:], lda, 0, w[iw:], ldw) + if i < n-1 { + bi.Dgemv(blas.Trans, i, n-i-1, 1, w[iw+1:], ldw, + a[i:], lda, 0, w[(i+1)*ldw+iw:], ldw) + bi.Dgemv(blas.NoTrans, i, n-i-1, -1, a[i+1:], lda, + w[(i+1)*ldw+iw:], ldw, 1, w[iw:], ldw) + bi.Dgemv(blas.Trans, i, n-i-1, 1, a[i+1:], lda, + a[i:], lda, 0, w[(i+1)*ldw+iw:], ldw) + bi.Dgemv(blas.NoTrans, i, n-i-1, -1, w[iw+1:], ldw, + w[(i+1)*ldw+iw:], ldw, 1, w[iw:], ldw) + } + bi.Dscal(i, tau[i-1], w[iw:], ldw) + alpha := -0.5 * tau[i-1] * bi.Ddot(i, w[iw:], ldw, a[i:], lda) + bi.Daxpy(i, alpha, a[i:], lda, w[iw:], ldw) + } + } + } else { + // Reduce first nb columns of lower triangle. + for i := 0; i < nb; i++ { + // Update A(i:n, i) + bi.Dgemv(blas.NoTrans, n-i, i, -1, a[i*lda:], lda, + w[i*ldw:], 1, 1, a[i*lda+i:], lda) + bi.Dgemv(blas.NoTrans, n-i, i, -1, w[i*ldw:], ldw, + a[i*lda:], 1, 1, a[i*lda+i:], lda) + if i < n-1 { + // Generate elementary reflector H_i to annihilate A(i+2:n,i). + e[i], tau[i] = impl.Dlarfg(n-i-1, a[(i+1)*lda+i], a[min(i+2, n-1)*lda+i:], lda) + a[(i+1)*lda+i] = 1 + + // Compute W(i+1:n,i). + bi.Dsymv(blas.Lower, n-i-1, 1, a[(i+1)*lda+i+1:], lda, + a[(i+1)*lda+i:], lda, 0, w[(i+1)*ldw+i:], ldw) + bi.Dgemv(blas.Trans, n-i-1, i, 1, w[(i+1)*ldw:], ldw, + a[(i+1)*lda+i:], lda, 0, w[i:], ldw) + bi.Dgemv(blas.NoTrans, n-i-1, i, -1, a[(i+1)*lda:], lda, + w[i:], ldw, 1, w[(i+1)*ldw+i:], ldw) + bi.Dgemv(blas.Trans, n-i-1, i, 1, a[(i+1)*lda:], lda, + a[(i+1)*lda+i:], lda, 0, w[i:], ldw) + bi.Dgemv(blas.NoTrans, n-i-1, i, -1, w[(i+1)*ldw:], ldw, + w[i:], ldw, 1, w[(i+1)*ldw+i:], ldw) + bi.Dscal(n-i-1, tau[i], w[(i+1)*ldw+i:], ldw) + alpha := -0.5 * tau[i] * bi.Ddot(n-i-1, w[(i+1)*ldw+i:], ldw, + a[(i+1)*lda+i:], lda) + bi.Daxpy(n-i-1, alpha, a[(i+1)*lda+i:], lda, + w[(i+1)*ldw+i:], ldw) + } + } + } +} diff --git a/vendor/gonum.org/v1/gonum/lapack/gonum/dlatrs.go b/vendor/gonum.org/v1/gonum/lapack/gonum/dlatrs.go new file mode 100644 index 0000000..dc445c6 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/lapack/gonum/dlatrs.go @@ -0,0 +1,359 @@ +// Copyright ©2015 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package gonum + +import ( + "math" + + "gonum.org/v1/gonum/blas" + "gonum.org/v1/gonum/blas/blas64" +) + +// Dlatrs solves a triangular system of equations scaled to prevent overflow. It +// solves +// A * x = scale * b if trans == blas.NoTrans +// A^T * x = scale * b if trans == blas.Trans +// where the scale s is set for numeric stability. +// +// A is an n×n triangular matrix. On entry, the slice x contains the values of +// b, and on exit it contains the solution vector x. +// +// If normin == true, cnorm is an input and cnorm[j] contains the norm of the off-diagonal +// part of the j^th column of A. If trans == blas.NoTrans, cnorm[j] must be greater +// than or equal to the infinity norm, and greater than or equal to the one-norm +// otherwise. If normin == false, then cnorm is treated as an output, and is set +// to contain the 1-norm of the off-diagonal part of the j^th column of A. +// +// Dlatrs is an internal routine. It is exported for testing purposes. +func (impl Implementation) Dlatrs(uplo blas.Uplo, trans blas.Transpose, diag blas.Diag, normin bool, n int, a []float64, lda int, x []float64, cnorm []float64) (scale float64) { + switch { + case uplo != blas.Upper && uplo != blas.Lower: + panic(badUplo) + case trans != blas.NoTrans && trans != blas.Trans && trans != blas.ConjTrans: + panic(badTrans) + case diag != blas.Unit && diag != blas.NonUnit: + panic(badDiag) + case n < 0: + panic(nLT0) + case lda < max(1, n): + panic(badLdA) + } + + // Quick return if possible. + if n == 0 { + return 0 + } + + switch { + case len(a) < (n-1)*lda+n: + panic(shortA) + case len(x) < n: + panic(shortX) + case len(cnorm) < n: + panic(shortCNorm) + } + + upper := uplo == blas.Upper + nonUnit := diag == blas.NonUnit + + smlnum := dlamchS / dlamchP + bignum := 1 / smlnum + scale = 1 + + bi := blas64.Implementation() + + if !normin { + if upper { + cnorm[0] = 0 + for j := 1; j < n; j++ { + cnorm[j] = bi.Dasum(j, a[j:], lda) + } + } else { + for j := 0; j < n-1; j++ { + cnorm[j] = bi.Dasum(n-j-1, a[(j+1)*lda+j:], lda) + } + cnorm[n-1] = 0 + } + } + // Scale the column norms by tscal if the maximum element in cnorm is greater than bignum. + imax := bi.Idamax(n, cnorm, 1) + tmax := cnorm[imax] + var tscal float64 + if tmax <= bignum { + tscal = 1 + } else { + tscal = 1 / (smlnum * tmax) + bi.Dscal(n, tscal, cnorm, 1) + } + + // Compute a bound on the computed solution vector to see if bi.Dtrsv can be used. + j := bi.Idamax(n, x, 1) + xmax := math.Abs(x[j]) + xbnd := xmax + var grow float64 + var jfirst, jlast, jinc int + if trans == blas.NoTrans { + if upper { + jfirst = n - 1 + jlast = -1 + jinc = -1 + } else { + jfirst = 0 + jlast = n + jinc = 1 + } + // Compute the growth in A * x = b. + if tscal != 1 { + grow = 0 + goto Solve + } + if nonUnit { + grow = 1 / math.Max(xbnd, smlnum) + xbnd = grow + for j := jfirst; j != jlast; j += jinc { + if grow <= smlnum { + goto Solve + } + tjj := math.Abs(a[j*lda+j]) + xbnd = math.Min(xbnd, math.Min(1, tjj)*grow) + if tjj+cnorm[j] >= smlnum { + grow *= tjj / (tjj + cnorm[j]) + } else { + grow = 0 + } + } + grow = xbnd + } else { + grow = math.Min(1, 1/math.Max(xbnd, smlnum)) + for j := jfirst; j != jlast; j += jinc { + if grow <= smlnum { + goto Solve + } + grow *= 1 / (1 + cnorm[j]) + } + } + } else { + if upper { + jfirst = 0 + jlast = n + jinc = 1 + } else { + jfirst = n - 1 + jlast = -1 + jinc = -1 + } + if tscal != 1 { + grow = 0 + goto Solve + } + if nonUnit { + grow = 1 / (math.Max(xbnd, smlnum)) + xbnd = grow + for j := jfirst; j != jlast; j += jinc { + if grow <= smlnum { + goto Solve + } + xj := 1 + cnorm[j] + grow = math.Min(grow, xbnd/xj) + tjj := math.Abs(a[j*lda+j]) + if xj > tjj { + xbnd *= tjj / xj + } + } + grow = math.Min(grow, xbnd) + } else { + grow = math.Min(1, 1/math.Max(xbnd, smlnum)) + for j := jfirst; j != jlast; j += jinc { + if grow <= smlnum { + goto Solve + } + xj := 1 + cnorm[j] + grow /= xj + } + } + } + +Solve: + if grow*tscal > smlnum { + // Use the Level 2 BLAS solve if the reciprocal of the bound on + // elements of X is not too small. + bi.Dtrsv(uplo, trans, diag, n, a, lda, x, 1) + if tscal != 1 { + bi.Dscal(n, 1/tscal, cnorm, 1) + } + return scale + } + + // Use a Level 1 BLAS solve, scaling intermediate results. + if xmax > bignum { + scale = bignum / xmax + bi.Dscal(n, scale, x, 1) + xmax = bignum + } + if trans == blas.NoTrans { + for j := jfirst; j != jlast; j += jinc { + xj := math.Abs(x[j]) + var tjj, tjjs float64 + if nonUnit { + tjjs = a[j*lda+j] * tscal + } else { + tjjs = tscal + if tscal == 1 { + goto Skip1 + } + } + tjj = math.Abs(tjjs) + if tjj > smlnum { + if tjj < 1 { + if xj > tjj*bignum { + rec := 1 / xj + bi.Dscal(n, rec, x, 1) + scale *= rec + xmax *= rec + } + } + x[j] /= tjjs + xj = math.Abs(x[j]) + } else if tjj > 0 { + if xj > tjj*bignum { + rec := (tjj * bignum) / xj + if cnorm[j] > 1 { + rec /= cnorm[j] + } + bi.Dscal(n, rec, x, 1) + scale *= rec + xmax *= rec + } + x[j] /= tjjs + xj = math.Abs(x[j]) + } else { + for i := 0; i < n; i++ { + x[i] = 0 + } + x[j] = 1 + xj = 1 + scale = 0 + xmax = 0 + } + Skip1: + if xj > 1 { + rec := 1 / xj + if cnorm[j] > (bignum-xmax)*rec { + rec *= 0.5 + bi.Dscal(n, rec, x, 1) + scale *= rec + } + } else if xj*cnorm[j] > bignum-xmax { + bi.Dscal(n, 0.5, x, 1) + scale *= 0.5 + } + if upper { + if j > 0 { + bi.Daxpy(j, -x[j]*tscal, a[j:], lda, x, 1) + i := bi.Idamax(j, x, 1) + xmax = math.Abs(x[i]) + } + } else { + if j < n-1 { + bi.Daxpy(n-j-1, -x[j]*tscal, a[(j+1)*lda+j:], lda, x[j+1:], 1) + i := j + bi.Idamax(n-j-1, x[j+1:], 1) + xmax = math.Abs(x[i]) + } + } + } + } else { + for j := jfirst; j != jlast; j += jinc { + xj := math.Abs(x[j]) + uscal := tscal + rec := 1 / math.Max(xmax, 1) + var tjjs float64 + if cnorm[j] > (bignum-xj)*rec { + rec *= 0.5 + if nonUnit { + tjjs = a[j*lda+j] * tscal + } else { + tjjs = tscal + } + tjj := math.Abs(tjjs) + if tjj > 1 { + rec = math.Min(1, rec*tjj) + uscal /= tjjs + } + if rec < 1 { + bi.Dscal(n, rec, x, 1) + scale *= rec + xmax *= rec + } + } + var sumj float64 + if uscal == 1 { + if upper { + sumj = bi.Ddot(j, a[j:], lda, x, 1) + } else if j < n-1 { + sumj = bi.Ddot(n-j-1, a[(j+1)*lda+j:], lda, x[j+1:], 1) + } + } else { + if upper { + for i := 0; i < j; i++ { + sumj += (a[i*lda+j] * uscal) * x[i] + } + } else if j < n { + for i := j + 1; i < n; i++ { + sumj += (a[i*lda+j] * uscal) * x[i] + } + } + } + if uscal == tscal { + x[j] -= sumj + xj := math.Abs(x[j]) + var tjjs float64 + if nonUnit { + tjjs = a[j*lda+j] * tscal + } else { + tjjs = tscal + if tscal == 1 { + goto Skip2 + } + } + tjj := math.Abs(tjjs) + if tjj > smlnum { + if tjj < 1 { + if xj > tjj*bignum { + rec = 1 / xj + bi.Dscal(n, rec, x, 1) + scale *= rec + xmax *= rec + } + } + x[j] /= tjjs + } else if tjj > 0 { + if xj > tjj*bignum { + rec = (tjj * bignum) / xj + bi.Dscal(n, rec, x, 1) + scale *= rec + xmax *= rec + } + x[j] /= tjjs + } else { + for i := 0; i < n; i++ { + x[i] = 0 + } + x[j] = 1 + scale = 0 + xmax = 0 + } + } else { + x[j] = x[j]/tjjs - sumj + } + Skip2: + xmax = math.Max(xmax, math.Abs(x[j])) + } + } + scale /= tscal + if tscal != 1 { + bi.Dscal(n, 1/tscal, cnorm, 1) + } + return scale +} diff --git a/vendor/gonum.org/v1/gonum/lapack/gonum/dlauu2.go b/vendor/gonum.org/v1/gonum/lapack/gonum/dlauu2.go new file mode 100644 index 0000000..ecce22c --- /dev/null +++ b/vendor/gonum.org/v1/gonum/lapack/gonum/dlauu2.go @@ -0,0 +1,64 @@ +// Copyright ©2018 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package gonum + +import ( + "gonum.org/v1/gonum/blas" + "gonum.org/v1/gonum/blas/blas64" +) + +// Dlauu2 computes the product +// U * U^T if uplo is blas.Upper +// L^T * L if uplo is blas.Lower +// where U or L is stored in the upper or lower triangular part of A. +// Only the upper or lower triangle of the result is stored, overwriting +// the corresponding factor in A. +func (impl Implementation) Dlauu2(uplo blas.Uplo, n int, a []float64, lda int) { + switch { + case uplo != blas.Upper && uplo != blas.Lower: + panic(badUplo) + case n < 0: + panic(nLT0) + case lda < max(1, n): + panic(badLdA) + } + + // Quick return if possible. + if n == 0 { + return + } + + if len(a) < (n-1)*lda+n { + panic(shortA) + } + + bi := blas64.Implementation() + + if uplo == blas.Upper { + // Compute the product U*U^T. + for i := 0; i < n; i++ { + aii := a[i*lda+i] + if i < n-1 { + a[i*lda+i] = bi.Ddot(n-i, a[i*lda+i:], 1, a[i*lda+i:], 1) + bi.Dgemv(blas.NoTrans, i, n-i-1, 1, a[i+1:], lda, a[i*lda+i+1:], 1, + aii, a[i:], lda) + } else { + bi.Dscal(i+1, aii, a[i:], lda) + } + } + } else { + // Compute the product L^T*L. + for i := 0; i < n; i++ { + aii := a[i*lda+i] + if i < n-1 { + a[i*lda+i] = bi.Ddot(n-i, a[i*lda+i:], lda, a[i*lda+i:], lda) + bi.Dgemv(blas.Trans, n-i-1, i, 1, a[(i+1)*lda:], lda, a[(i+1)*lda+i:], lda, + aii, a[i*lda:], 1) + } else { + bi.Dscal(i+1, aii, a[i*lda:], 1) + } + } + } +} diff --git a/vendor/gonum.org/v1/gonum/lapack/gonum/dlauum.go b/vendor/gonum.org/v1/gonum/lapack/gonum/dlauum.go new file mode 100644 index 0000000..67ecadd --- /dev/null +++ b/vendor/gonum.org/v1/gonum/lapack/gonum/dlauum.go @@ -0,0 +1,81 @@ +// Copyright ©2018 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package gonum + +import ( + "gonum.org/v1/gonum/blas" + "gonum.org/v1/gonum/blas/blas64" +) + +// Dlauum computes the product +// U * U^T if uplo is blas.Upper +// L^T * L if uplo is blas.Lower +// where U or L is stored in the upper or lower triangular part of A. +// Only the upper or lower triangle of the result is stored, overwriting +// the corresponding factor in A. +func (impl Implementation) Dlauum(uplo blas.Uplo, n int, a []float64, lda int) { + switch { + case uplo != blas.Upper && uplo != blas.Lower: + panic(badUplo) + case n < 0: + panic(nLT0) + case lda < max(1, n): + panic(badLdA) + } + + // Quick return if possible. + if n == 0 { + return + } + + if len(a) < (n-1)*lda+n { + panic(shortA) + } + + // Determine the block size. + opts := "U" + if uplo == blas.Lower { + opts = "L" + } + nb := impl.Ilaenv(1, "DLAUUM", opts, n, -1, -1, -1) + + if nb <= 1 || n <= nb { + // Use unblocked code. + impl.Dlauu2(uplo, n, a, lda) + return + } + + // Use blocked code. + bi := blas64.Implementation() + if uplo == blas.Upper { + // Compute the product U*U^T. + for i := 0; i < n; i += nb { + ib := min(nb, n-i) + bi.Dtrmm(blas.Right, blas.Upper, blas.Trans, blas.NonUnit, + i, ib, 1, a[i*lda+i:], lda, a[i:], lda) + impl.Dlauu2(blas.Upper, ib, a[i*lda+i:], lda) + if n-i-ib > 0 { + bi.Dgemm(blas.NoTrans, blas.Trans, i, ib, n-i-ib, + 1, a[i+ib:], lda, a[i*lda+i+ib:], lda, 1, a[i:], lda) + bi.Dsyrk(blas.Upper, blas.NoTrans, ib, n-i-ib, + 1, a[i*lda+i+ib:], lda, 1, a[i*lda+i:], lda) + } + } + } else { + // Compute the product L^T*L. + for i := 0; i < n; i += nb { + ib := min(nb, n-i) + bi.Dtrmm(blas.Left, blas.Lower, blas.Trans, blas.NonUnit, + ib, i, 1, a[i*lda+i:], lda, a[i*lda:], lda) + impl.Dlauu2(blas.Lower, ib, a[i*lda+i:], lda) + if n-i-ib > 0 { + bi.Dgemm(blas.Trans, blas.NoTrans, ib, i, n-i-ib, + 1, a[(i+ib)*lda+i:], lda, a[(i+ib)*lda:], lda, 1, a[i*lda:], lda) + bi.Dsyrk(blas.Lower, blas.Trans, ib, n-i-ib, + 1, a[(i+ib)*lda+i:], lda, 1, a[i*lda+i:], lda) + } + } + } +} diff --git a/vendor/gonum.org/v1/gonum/lapack/gonum/doc.go b/vendor/gonum.org/v1/gonum/lapack/gonum/doc.go new file mode 100644 index 0000000..5794289 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/lapack/gonum/doc.go @@ -0,0 +1,28 @@ +// Copyright ©2015 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Package gonum is a pure-go implementation of the LAPACK API. The LAPACK API defines +// a set of algorithms for advanced matrix operations. +// +// The function definitions and implementations follow that of the netlib reference +// implementation. See http://www.netlib.org/lapack/explore-html/ for more +// information, and http://www.netlib.org/lapack/explore-html/d4/de1/_l_i_c_e_n_s_e_source.html +// for more license information. +// +// Slice function arguments frequently represent vectors and matrices. The data +// layout is identical to that found in https://godoc.org/gonum.org/v1/gonum/blas/gonum. +// +// Most LAPACK functions are built on top the routines defined in the BLAS API, +// and as such the computation time for many LAPACK functions is +// dominated by BLAS calls. Here, BLAS is accessed through the +// blas64 package (https://godoc.org/golang.org/v1/gonum/blas/blas64). In particular, +// this implies that an external BLAS library will be used if it is +// registered in blas64. +// +// The full LAPACK capability has not been implemented at present. The full +// API is very large, containing approximately 200 functions for double precision +// alone. Future additions will be focused on supporting the gonum matrix +// package (https://godoc.org/github.com/gonum/matrix/mat64), though pull requests +// with implementations and tests for LAPACK function are encouraged. +package gonum // import "gonum.org/v1/gonum/lapack/gonum" diff --git a/vendor/gonum.org/v1/gonum/lapack/gonum/dorg2l.go b/vendor/gonum.org/v1/gonum/lapack/gonum/dorg2l.go new file mode 100644 index 0000000..a20765a --- /dev/null +++ b/vendor/gonum.org/v1/gonum/lapack/gonum/dorg2l.go @@ -0,0 +1,76 @@ +// Copyright ©2016 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package gonum + +import ( + "gonum.org/v1/gonum/blas" + "gonum.org/v1/gonum/blas/blas64" +) + +// Dorg2l generates an m×n matrix Q with orthonormal columns which is defined +// as the last n columns of a product of k elementary reflectors of order m. +// Q = H_{k-1} * ... * H_1 * H_0 +// See Dgelqf for more information. It must be that m >= n >= k. +// +// tau contains the scalar reflectors computed by Dgeqlf. tau must have length +// at least k, and Dorg2l will panic otherwise. +// +// work contains temporary memory, and must have length at least n. Dorg2l will +// panic otherwise. +// +// Dorg2l is an internal routine. It is exported for testing purposes. +func (impl Implementation) Dorg2l(m, n, k int, a []float64, lda int, tau, work []float64) { + switch { + case m < 0: + panic(mLT0) + case n < 0: + panic(nLT0) + case n > m: + panic(nGTM) + case k < 0: + panic(kLT0) + case k > n: + panic(kGTN) + case lda < max(1, n): + panic(badLdA) + } + + if n == 0 { + return + } + + switch { + case len(a) < (m-1)*lda+n: + panic(shortA) + case len(tau) < k: + panic(shortTau) + case len(work) < n: + panic(shortWork) + } + + // Initialize columns 0:n-k to columns of the unit matrix. + for j := 0; j < n-k; j++ { + for l := 0; l < m; l++ { + a[l*lda+j] = 0 + } + a[(m-n+j)*lda+j] = 1 + } + + bi := blas64.Implementation() + for i := 0; i < k; i++ { + ii := n - k + i + + // Apply H_i to A[0:m-k+i, 0:n-k+i] from the left. + a[(m-n+ii)*lda+ii] = 1 + impl.Dlarf(blas.Left, m-n+ii+1, ii, a[ii:], lda, tau[i], a, lda, work) + bi.Dscal(m-n+ii, -tau[i], a[ii:], lda) + a[(m-n+ii)*lda+ii] = 1 - tau[i] + + // Set A[m-k+i:m, n-k+i+1] to zero. + for l := m - n + ii + 1; l < m; l++ { + a[l*lda+ii] = 0 + } + } +} diff --git a/vendor/gonum.org/v1/gonum/lapack/gonum/dorg2r.go b/vendor/gonum.org/v1/gonum/lapack/gonum/dorg2r.go new file mode 100644 index 0000000..de44775 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/lapack/gonum/dorg2r.go @@ -0,0 +1,75 @@ +// Copyright ©2015 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package gonum + +import ( + "gonum.org/v1/gonum/blas" + "gonum.org/v1/gonum/blas/blas64" +) + +// Dorg2r generates an m×n matrix Q with orthonormal columns defined by the +// product of elementary reflectors as computed by Dgeqrf. +// Q = H_0 * H_1 * ... * H_{k-1} +// len(tau) >= k, 0 <= k <= n, 0 <= n <= m, len(work) >= n. +// Dorg2r will panic if these conditions are not met. +// +// Dorg2r is an internal routine. It is exported for testing purposes. +func (impl Implementation) Dorg2r(m, n, k int, a []float64, lda int, tau []float64, work []float64) { + switch { + case m < 0: + panic(mLT0) + case n < 0: + panic(nLT0) + case n > m: + panic(nGTM) + case k < 0: + panic(kLT0) + case k > n: + panic(kGTN) + case lda < max(1, n): + panic(badLdA) + } + + if n == 0 { + return + } + + switch { + case len(a) < (m-1)*lda+n: + panic(shortA) + case len(tau) < k: + panic(shortTau) + case len(work) < n: + panic(shortWork) + } + + bi := blas64.Implementation() + + // Initialize columns k+1:n to columns of the unit matrix. + for l := 0; l < m; l++ { + for j := k; j < n; j++ { + a[l*lda+j] = 0 + } + } + for j := k; j < n; j++ { + a[j*lda+j] = 1 + } + for i := k - 1; i >= 0; i-- { + for i := range work { + work[i] = 0 + } + if i < n-1 { + a[i*lda+i] = 1 + impl.Dlarf(blas.Left, m-i, n-i-1, a[i*lda+i:], lda, tau[i], a[i*lda+i+1:], lda, work) + } + if i < m-1 { + bi.Dscal(m-i-1, -tau[i], a[(i+1)*lda+i:], lda) + } + a[i*lda+i] = 1 - tau[i] + for l := 0; l < i; l++ { + a[l*lda+i] = 0 + } + } +} diff --git a/vendor/gonum.org/v1/gonum/lapack/gonum/dorgbr.go b/vendor/gonum.org/v1/gonum/lapack/gonum/dorgbr.go new file mode 100644 index 0000000..626cad5 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/lapack/gonum/dorgbr.go @@ -0,0 +1,138 @@ +// Copyright ©2015 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package gonum + +import "gonum.org/v1/gonum/lapack" + +// Dorgbr generates one of the matrices Q or P^T computed by Dgebrd +// computed from the decomposition Dgebrd. See Dgebd2 for the description of +// Q and P^T. +// +// If vect == lapack.GenerateQ, then a is assumed to have been an m×k matrix and +// Q is of order m. If m >= k, then Dorgbr returns the first n columns of Q +// where m >= n >= k. If m < k, then Dorgbr returns Q as an m×m matrix. +// +// If vect == lapack.GeneratePT, then A is assumed to have been a k×n matrix, and +// P^T is of order n. If k < n, then Dorgbr returns the first m rows of P^T, +// where n >= m >= k. If k >= n, then Dorgbr returns P^T as an n×n matrix. +// +// Dorgbr is an internal routine. It is exported for testing purposes. +func (impl Implementation) Dorgbr(vect lapack.GenOrtho, m, n, k int, a []float64, lda int, tau, work []float64, lwork int) { + wantq := vect == lapack.GenerateQ + mn := min(m, n) + switch { + case vect != lapack.GenerateQ && vect != lapack.GeneratePT: + panic(badGenOrtho) + case m < 0: + panic(mLT0) + case n < 0: + panic(nLT0) + case k < 0: + panic(kLT0) + case wantq && n > m: + panic(nGTM) + case wantq && n < min(m, k): + panic("lapack: n < min(m,k)") + case !wantq && m > n: + panic(mGTN) + case !wantq && m < min(n, k): + panic("lapack: m < min(n,k)") + case lda < max(1, n) && lwork != -1: + // Normally, we follow the reference and require the leading + // dimension to be always valid, even in case of workspace + // queries. However, if a caller provided a placeholder value + // for lda (and a) when doing a workspace query that didn't + // fulfill the condition here, it would cause a panic. This is + // exactly what Dgesvd does. + panic(badLdA) + case lwork < max(1, mn) && lwork != -1: + panic(badLWork) + case len(work) < max(1, lwork): + panic(shortWork) + } + + // Quick return if possible. + work[0] = 1 + if m == 0 || n == 0 { + return + } + + if wantq { + if m >= k { + impl.Dorgqr(m, n, k, a, lda, tau, work, -1) + } else if m > 1 { + impl.Dorgqr(m-1, m-1, m-1, a[lda+1:], lda, tau, work, -1) + } + } else { + if k < n { + impl.Dorglq(m, n, k, a, lda, tau, work, -1) + } else if n > 1 { + impl.Dorglq(n-1, n-1, n-1, a[lda+1:], lda, tau, work, -1) + } + } + lworkopt := int(work[0]) + lworkopt = max(lworkopt, mn) + if lwork == -1 { + work[0] = float64(lworkopt) + return + } + + switch { + case len(a) < (m-1)*lda+n: + panic(shortA) + case wantq && len(tau) < min(m, k): + panic(shortTau) + case !wantq && len(tau) < min(n, k): + panic(shortTau) + } + + if wantq { + // Form Q, determined by a call to Dgebrd to reduce an m×k matrix. + if m >= k { + impl.Dorgqr(m, n, k, a, lda, tau, work, lwork) + } else { + // Shift the vectors which define the elementary reflectors one + // column to the right, and set the first row and column of Q to + // those of the unit matrix. + for j := m - 1; j >= 1; j-- { + a[j] = 0 + for i := j + 1; i < m; i++ { + a[i*lda+j] = a[i*lda+j-1] + } + } + a[0] = 1 + for i := 1; i < m; i++ { + a[i*lda] = 0 + } + if m > 1 { + // Form Q[1:m-1, 1:m-1] + impl.Dorgqr(m-1, m-1, m-1, a[lda+1:], lda, tau, work, lwork) + } + } + } else { + // Form P^T, determined by a call to Dgebrd to reduce a k×n matrix. + if k < n { + impl.Dorglq(m, n, k, a, lda, tau, work, lwork) + } else { + // Shift the vectors which define the elementary reflectors one + // row downward, and set the first row and column of P^T to + // those of the unit matrix. + a[0] = 1 + for i := 1; i < n; i++ { + a[i*lda] = 0 + } + for j := 1; j < n; j++ { + for i := j - 1; i >= 1; i-- { + a[i*lda+j] = a[(i-1)*lda+j] + } + a[j] = 0 + } + if n > 1 { + impl.Dorglq(n-1, n-1, n-1, a[lda+1:], lda, tau, work, lwork) + } + } + } + work[0] = float64(lworkopt) +} diff --git a/vendor/gonum.org/v1/gonum/lapack/gonum/dorghr.go b/vendor/gonum.org/v1/gonum/lapack/gonum/dorghr.go new file mode 100644 index 0000000..6e799d1 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/lapack/gonum/dorghr.go @@ -0,0 +1,101 @@ +// Copyright ©2016 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package gonum + +// Dorghr generates an n×n orthogonal matrix Q which is defined as the product +// of ihi-ilo elementary reflectors: +// Q = H_{ilo} H_{ilo+1} ... H_{ihi-1}. +// +// a and lda represent an n×n matrix that contains the elementary reflectors, as +// returned by Dgehrd. On return, a is overwritten by the n×n orthogonal matrix +// Q. Q will be equal to the identity matrix except in the submatrix +// Q[ilo+1:ihi+1,ilo+1:ihi+1]. +// +// ilo and ihi must have the same values as in the previous call of Dgehrd. It +// must hold that +// 0 <= ilo <= ihi < n, if n > 0, +// ilo = 0, ihi = -1, if n == 0. +// +// tau contains the scalar factors of the elementary reflectors, as returned by +// Dgehrd. tau must have length n-1. +// +// work must have length at least max(1,lwork) and lwork must be at least +// ihi-ilo. For optimum performance lwork must be at least (ihi-ilo)*nb where nb +// is the optimal blocksize. On return, work[0] will contain the optimal value +// of lwork. +// +// If lwork == -1, instead of performing Dorghr, only the optimal value of lwork +// will be stored into work[0]. +// +// If any requirement on input sizes is not met, Dorghr will panic. +// +// Dorghr is an internal routine. It is exported for testing purposes. +func (impl Implementation) Dorghr(n, ilo, ihi int, a []float64, lda int, tau, work []float64, lwork int) { + nh := ihi - ilo + switch { + case ilo < 0 || max(1, n) <= ilo: + panic(badIlo) + case ihi < min(ilo, n-1) || n <= ihi: + panic(badIhi) + case lda < max(1, n): + panic(badLdA) + case lwork < max(1, nh) && lwork != -1: + panic(badLWork) + case len(work) < max(1, lwork): + panic(shortWork) + } + + // Quick return if possible. + if n == 0 { + work[0] = 1 + return + } + + lwkopt := max(1, nh) * impl.Ilaenv(1, "DORGQR", " ", nh, nh, nh, -1) + if lwork == -1 { + work[0] = float64(lwkopt) + return + } + + switch { + case len(a) < (n-1)*lda+n: + panic(shortA) + case len(tau) < n-1: + panic(shortTau) + } + + // Shift the vectors which define the elementary reflectors one column + // to the right. + for i := ilo + 2; i < ihi+1; i++ { + copy(a[i*lda+ilo+1:i*lda+i], a[i*lda+ilo:i*lda+i-1]) + } + // Set the first ilo+1 and the last n-ihi-1 rows and columns to those of + // the identity matrix. + for i := 0; i < ilo+1; i++ { + for j := 0; j < n; j++ { + a[i*lda+j] = 0 + } + a[i*lda+i] = 1 + } + for i := ilo + 1; i < ihi+1; i++ { + for j := 0; j <= ilo; j++ { + a[i*lda+j] = 0 + } + for j := i; j < n; j++ { + a[i*lda+j] = 0 + } + } + for i := ihi + 1; i < n; i++ { + for j := 0; j < n; j++ { + a[i*lda+j] = 0 + } + a[i*lda+i] = 1 + } + if nh > 0 { + // Generate Q[ilo+1:ihi+1,ilo+1:ihi+1]. + impl.Dorgqr(nh, nh, nh, a[(ilo+1)*lda+ilo+1:], lda, tau[ilo:ihi], work, lwork) + } + work[0] = float64(lwkopt) +} diff --git a/vendor/gonum.org/v1/gonum/lapack/gonum/dorgl2.go b/vendor/gonum.org/v1/gonum/lapack/gonum/dorgl2.go new file mode 100644 index 0000000..b5566b9 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/lapack/gonum/dorgl2.go @@ -0,0 +1,71 @@ +// Copyright ©2015 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package gonum + +import ( + "gonum.org/v1/gonum/blas" + "gonum.org/v1/gonum/blas/blas64" +) + +// Dorgl2 generates an m×n matrix Q with orthonormal rows defined by the +// first m rows product of elementary reflectors as computed by Dgelqf. +// Q = H_0 * H_1 * ... * H_{k-1} +// len(tau) >= k, 0 <= k <= m, 0 <= m <= n, len(work) >= m. +// Dorgl2 will panic if these conditions are not met. +// +// Dorgl2 is an internal routine. It is exported for testing purposes. +func (impl Implementation) Dorgl2(m, n, k int, a []float64, lda int, tau, work []float64) { + switch { + case m < 0: + panic(mLT0) + case n < m: + panic(nLTM) + case k < 0: + panic(kLT0) + case k > m: + panic(kGTM) + case lda < max(1, m): + panic(badLdA) + } + + if m == 0 { + return + } + + switch { + case len(a) < (m-1)*lda+n: + panic(shortA) + case len(tau) < k: + panic(shortTau) + case len(work) < m: + panic(shortWork) + } + + bi := blas64.Implementation() + + if k < m { + for i := k; i < m; i++ { + for j := 0; j < n; j++ { + a[i*lda+j] = 0 + } + } + for j := k; j < m; j++ { + a[j*lda+j] = 1 + } + } + for i := k - 1; i >= 0; i-- { + if i < n-1 { + if i < m-1 { + a[i*lda+i] = 1 + impl.Dlarf(blas.Right, m-i-1, n-i, a[i*lda+i:], 1, tau[i], a[(i+1)*lda+i:], lda, work) + } + bi.Dscal(n-i-1, -tau[i], a[i*lda+i+1:], 1) + } + a[i*lda+i] = 1 - tau[i] + for l := 0; l < i; l++ { + a[i*lda+l] = 0 + } + } +} diff --git a/vendor/gonum.org/v1/gonum/lapack/gonum/dorglq.go b/vendor/gonum.org/v1/gonum/lapack/gonum/dorglq.go new file mode 100644 index 0000000..a6dd980 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/lapack/gonum/dorglq.go @@ -0,0 +1,123 @@ +// Copyright ©2015 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package gonum + +import ( + "gonum.org/v1/gonum/blas" + "gonum.org/v1/gonum/lapack" +) + +// Dorglq generates an m×n matrix Q with orthonormal columns defined by the +// product of elementary reflectors as computed by Dgelqf. +// Q = H_0 * H_1 * ... * H_{k-1} +// Dorglq is the blocked version of Dorgl2 that makes greater use of level-3 BLAS +// routines. +// +// len(tau) >= k, 0 <= k <= m, and 0 <= m <= n. +// +// work is temporary storage, and lwork specifies the usable memory length. At minimum, +// lwork >= m, and the amount of blocking is limited by the usable length. +// If lwork == -1, instead of computing Dorglq the optimal work length is stored +// into work[0]. +// +// Dorglq will panic if the conditions on input values are not met. +// +// Dorglq is an internal routine. It is exported for testing purposes. +func (impl Implementation) Dorglq(m, n, k int, a []float64, lda int, tau, work []float64, lwork int) { + switch { + case m < 0: + panic(mLT0) + case n < m: + panic(nLTM) + case k < 0: + panic(kLT0) + case k > m: + panic(kGTM) + case lda < max(1, n): + panic(badLdA) + case lwork < max(1, m) && lwork != -1: + panic(badLWork) + case len(work) < max(1, lwork): + panic(shortWork) + } + + if m == 0 { + work[0] = 1 + return + } + + nb := impl.Ilaenv(1, "DORGLQ", " ", m, n, k, -1) + if lwork == -1 { + work[0] = float64(m * nb) + return + } + + switch { + case len(a) < (m-1)*lda+n: + panic(shortA) + case len(tau) < k: + panic(shortTau) + } + + nbmin := 2 // Minimum block size + var nx int // Crossover size from blocked to unbloked code + iws := m // Length of work needed + var ldwork int + if 1 < nb && nb < k { + nx = max(0, impl.Ilaenv(3, "DORGLQ", " ", m, n, k, -1)) + if nx < k { + ldwork = nb + iws = m * ldwork + if lwork < iws { + nb = lwork / m + ldwork = nb + nbmin = max(2, impl.Ilaenv(2, "DORGLQ", " ", m, n, k, -1)) + } + } + } + + var ki, kk int + if nbmin <= nb && nb < k && nx < k { + // The first kk rows are handled by the blocked method. + ki = ((k - nx - 1) / nb) * nb + kk = min(k, ki+nb) + for i := kk; i < m; i++ { + for j := 0; j < kk; j++ { + a[i*lda+j] = 0 + } + } + } + if kk < m { + // Perform the operation on colums kk to the end. + impl.Dorgl2(m-kk, n-kk, k-kk, a[kk*lda+kk:], lda, tau[kk:], work) + } + if kk > 0 { + // Perform the operation on column-blocks + for i := ki; i >= 0; i -= nb { + ib := min(nb, k-i) + if i+ib < m { + impl.Dlarft(lapack.Forward, lapack.RowWise, + n-i, ib, + a[i*lda+i:], lda, + tau[i:], + work, ldwork) + + impl.Dlarfb(blas.Right, blas.Trans, lapack.Forward, lapack.RowWise, + m-i-ib, n-i, ib, + a[i*lda+i:], lda, + work, ldwork, + a[(i+ib)*lda+i:], lda, + work[ib*ldwork:], ldwork) + } + impl.Dorgl2(ib, n-i, ib, a[i*lda+i:], lda, tau[i:], work) + for l := i; l < i+ib; l++ { + for j := 0; j < i; j++ { + a[l*lda+j] = 0 + } + } + } + } + work[0] = float64(iws) +} diff --git a/vendor/gonum.org/v1/gonum/lapack/gonum/dorgql.go b/vendor/gonum.org/v1/gonum/lapack/gonum/dorgql.go new file mode 100644 index 0000000..6927ba4 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/lapack/gonum/dorgql.go @@ -0,0 +1,136 @@ +// Copyright ©2016 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package gonum + +import ( + "gonum.org/v1/gonum/blas" + "gonum.org/v1/gonum/lapack" +) + +// Dorgql generates the m×n matrix Q with orthonormal columns defined as the +// last n columns of a product of k elementary reflectors of order m +// Q = H_{k-1} * ... * H_1 * H_0. +// +// It must hold that +// 0 <= k <= n <= m, +// and Dorgql will panic otherwise. +// +// On entry, the (n-k+i)-th column of A must contain the vector which defines +// the elementary reflector H_i, for i=0,...,k-1, and tau[i] must contain its +// scalar factor. On return, a contains the m×n matrix Q. +// +// tau must have length at least k, and Dorgql will panic otherwise. +// +// work must have length at least max(1,lwork), and lwork must be at least +// max(1,n), otherwise Dorgql will panic. For optimum performance lwork must +// be a sufficiently large multiple of n. +// +// If lwork == -1, instead of computing Dorgql the optimal work length is stored +// into work[0]. +// +// Dorgql is an internal routine. It is exported for testing purposes. +func (impl Implementation) Dorgql(m, n, k int, a []float64, lda int, tau, work []float64, lwork int) { + switch { + case m < 0: + panic(mLT0) + case n < 0: + panic(nLT0) + case n > m: + panic(nGTM) + case k < 0: + panic(kLT0) + case k > n: + panic(kGTN) + case lda < max(1, n): + panic(badLdA) + case lwork < max(1, n) && lwork != -1: + panic(badLWork) + case len(work) < max(1, lwork): + panic(shortWork) + } + + // Quick return if possible. + if n == 0 { + work[0] = 1 + return + } + + nb := impl.Ilaenv(1, "DORGQL", " ", m, n, k, -1) + if lwork == -1 { + work[0] = float64(n * nb) + return + } + + switch { + case len(a) < (m-1)*lda+n: + panic(shortA) + case len(tau) < k: + panic(shortTau) + } + + nbmin := 2 + var nx, ldwork int + iws := n + if 1 < nb && nb < k { + // Determine when to cross over from blocked to unblocked code. + nx = max(0, impl.Ilaenv(3, "DORGQL", " ", m, n, k, -1)) + if nx < k { + // Determine if workspace is large enough for blocked code. + iws = n * nb + if lwork < iws { + // Not enough workspace to use optimal nb: reduce nb and determine + // the minimum value of nb. + nb = lwork / n + nbmin = max(2, impl.Ilaenv(2, "DORGQL", " ", m, n, k, -1)) + } + ldwork = nb + } + } + + var kk int + if nbmin <= nb && nb < k && nx < k { + // Use blocked code after the first block. The last kk columns are handled + // by the block method. + kk = min(k, ((k-nx+nb-1)/nb)*nb) + + // Set A(m-kk:m, 0:n-kk) to zero. + for i := m - kk; i < m; i++ { + for j := 0; j < n-kk; j++ { + a[i*lda+j] = 0 + } + } + } + + // Use unblocked code for the first or only block. + impl.Dorg2l(m-kk, n-kk, k-kk, a, lda, tau, work) + if kk > 0 { + // Use blocked code. + for i := k - kk; i < k; i += nb { + ib := min(nb, k-i) + if n-k+i > 0 { + // Form the triangular factor of the block reflector + // H = H_{i+ib-1} * ... * H_{i+1} * H_i. + impl.Dlarft(lapack.Backward, lapack.ColumnWise, m-k+i+ib, ib, + a[n-k+i:], lda, tau[i:], work, ldwork) + + // Apply H to A[0:m-k+i+ib, 0:n-k+i] from the left. + impl.Dlarfb(blas.Left, blas.NoTrans, lapack.Backward, lapack.ColumnWise, + m-k+i+ib, n-k+i, ib, a[n-k+i:], lda, work, ldwork, + a, lda, work[ib*ldwork:], ldwork) + } + + // Apply H to rows 0:m-k+i+ib of current block. + impl.Dorg2l(m-k+i+ib, ib, ib, a[n-k+i:], lda, tau[i:], work) + + // Set rows m-k+i+ib:m of current block to zero. + for j := n - k + i; j < n-k+i+ib; j++ { + for l := m - k + i + ib; l < m; l++ { + a[l*lda+j] = 0 + } + } + } + } + work[0] = float64(iws) +} diff --git a/vendor/gonum.org/v1/gonum/lapack/gonum/dorgqr.go b/vendor/gonum.org/v1/gonum/lapack/gonum/dorgqr.go new file mode 100644 index 0000000..f07fdaf --- /dev/null +++ b/vendor/gonum.org/v1/gonum/lapack/gonum/dorgqr.go @@ -0,0 +1,134 @@ +// Copyright ©2015 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package gonum + +import ( + "gonum.org/v1/gonum/blas" + "gonum.org/v1/gonum/lapack" +) + +// Dorgqr generates an m×n matrix Q with orthonormal columns defined by the +// product of elementary reflectors +// Q = H_0 * H_1 * ... * H_{k-1} +// as computed by Dgeqrf. +// Dorgqr is the blocked version of Dorg2r that makes greater use of level-3 BLAS +// routines. +// +// The length of tau must be at least k, and the length of work must be at least n. +// It also must be that 0 <= k <= n and 0 <= n <= m. +// +// work is temporary storage, and lwork specifies the usable memory length. At +// minimum, lwork >= n, and the amount of blocking is limited by the usable +// length. If lwork == -1, instead of computing Dorgqr the optimal work length +// is stored into work[0]. +// +// Dorgqr will panic if the conditions on input values are not met. +// +// Dorgqr is an internal routine. It is exported for testing purposes. +func (impl Implementation) Dorgqr(m, n, k int, a []float64, lda int, tau, work []float64, lwork int) { + switch { + case m < 0: + panic(mLT0) + case n < 0: + panic(nLT0) + case n > m: + panic(nGTM) + case k < 0: + panic(kLT0) + case k > n: + panic(kGTN) + case lda < max(1, n) && lwork != -1: + // Normally, we follow the reference and require the leading + // dimension to be always valid, even in case of workspace + // queries. However, if a caller provided a placeholder value + // for lda (and a) when doing a workspace query that didn't + // fulfill the condition here, it would cause a panic. This is + // exactly what Dgesvd does. + panic(badLdA) + case lwork < max(1, n) && lwork != -1: + panic(badLWork) + case len(work) < max(1, lwork): + panic(shortWork) + } + + if n == 0 { + work[0] = 1 + return + } + + nb := impl.Ilaenv(1, "DORGQR", " ", m, n, k, -1) + // work is treated as an n×nb matrix + if lwork == -1 { + work[0] = float64(n * nb) + return + } + + switch { + case len(a) < (m-1)*lda+n: + panic(shortA) + case len(tau) < k: + panic(shortTau) + } + + nbmin := 2 // Minimum block size + var nx int // Crossover size from blocked to unbloked code + iws := n // Length of work needed + var ldwork int + if 1 < nb && nb < k { + nx = max(0, impl.Ilaenv(3, "DORGQR", " ", m, n, k, -1)) + if nx < k { + ldwork = nb + iws = n * ldwork + if lwork < iws { + nb = lwork / n + ldwork = nb + nbmin = max(2, impl.Ilaenv(2, "DORGQR", " ", m, n, k, -1)) + } + } + } + var ki, kk int + if nbmin <= nb && nb < k && nx < k { + // The first kk columns are handled by the blocked method. + ki = ((k - nx - 1) / nb) * nb + kk = min(k, ki+nb) + for i := 0; i < kk; i++ { + for j := kk; j < n; j++ { + a[i*lda+j] = 0 + } + } + } + if kk < n { + // Perform the operation on colums kk to the end. + impl.Dorg2r(m-kk, n-kk, k-kk, a[kk*lda+kk:], lda, tau[kk:], work) + } + if kk > 0 { + // Perform the operation on column-blocks. + for i := ki; i >= 0; i -= nb { + ib := min(nb, k-i) + if i+ib < n { + impl.Dlarft(lapack.Forward, lapack.ColumnWise, + m-i, ib, + a[i*lda+i:], lda, + tau[i:], + work, ldwork) + + impl.Dlarfb(blas.Left, blas.NoTrans, lapack.Forward, lapack.ColumnWise, + m-i, n-i-ib, ib, + a[i*lda+i:], lda, + work, ldwork, + a[i*lda+i+ib:], lda, + work[ib*ldwork:], ldwork) + } + impl.Dorg2r(m-i, ib, ib, a[i*lda+i:], lda, tau[i:], work) + // Set rows 0:i-1 of current block to zero. + for j := i; j < i+ib; j++ { + for l := 0; l < i; l++ { + a[l*lda+j] = 0 + } + } + } + } + work[0] = float64(iws) +} diff --git a/vendor/gonum.org/v1/gonum/lapack/gonum/dorgtr.go b/vendor/gonum.org/v1/gonum/lapack/gonum/dorgtr.go new file mode 100644 index 0000000..483fbca --- /dev/null +++ b/vendor/gonum.org/v1/gonum/lapack/gonum/dorgtr.go @@ -0,0 +1,104 @@ +// Copyright ©2016 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package gonum + +import "gonum.org/v1/gonum/blas" + +// Dorgtr generates a real orthogonal matrix Q which is defined as the product +// of n-1 elementary reflectors of order n as returned by Dsytrd. +// +// The construction of Q depends on the value of uplo: +// Q = H_{n-1} * ... * H_1 * H_0 if uplo == blas.Upper +// Q = H_0 * H_1 * ... * H_{n-1} if uplo == blas.Lower +// where H_i is constructed from the elementary reflectors as computed by Dsytrd. +// See the documentation for Dsytrd for more information. +// +// tau must have length at least n-1, and Dorgtr will panic otherwise. +// +// work is temporary storage, and lwork specifies the usable memory length. At +// minimum, lwork >= max(1,n-1), and Dorgtr will panic otherwise. The amount of blocking +// is limited by the usable length. +// If lwork == -1, instead of computing Dorgtr the optimal work length is stored +// into work[0]. +// +// Dorgtr is an internal routine. It is exported for testing purposes. +func (impl Implementation) Dorgtr(uplo blas.Uplo, n int, a []float64, lda int, tau, work []float64, lwork int) { + switch { + case uplo != blas.Upper && uplo != blas.Lower: + panic(badUplo) + case n < 0: + panic(nLT0) + case lda < max(1, n): + panic(badLdA) + case lwork < max(1, n-1) && lwork != -1: + panic(badLWork) + case len(work) < max(1, lwork): + panic(shortWork) + } + + if n == 0 { + work[0] = 1 + return + } + + var nb int + if uplo == blas.Upper { + nb = impl.Ilaenv(1, "DORGQL", " ", n-1, n-1, n-1, -1) + } else { + nb = impl.Ilaenv(1, "DORGQR", " ", n-1, n-1, n-1, -1) + } + lworkopt := max(1, n-1) * nb + if lwork == -1 { + work[0] = float64(lworkopt) + return + } + + switch { + case len(a) < (n-1)*lda+n: + panic(shortA) + case len(tau) < n-1: + panic(shortTau) + } + + if uplo == blas.Upper { + // Q was determined by a call to Dsytrd with uplo == blas.Upper. + // Shift the vectors which define the elementary reflectors one column + // to the left, and set the last row and column of Q to those of the unit + // matrix. + for j := 0; j < n-1; j++ { + for i := 0; i < j; i++ { + a[i*lda+j] = a[i*lda+j+1] + } + a[(n-1)*lda+j] = 0 + } + for i := 0; i < n-1; i++ { + a[i*lda+n-1] = 0 + } + a[(n-1)*lda+n-1] = 1 + + // Generate Q[0:n-1, 0:n-1]. + impl.Dorgql(n-1, n-1, n-1, a, lda, tau, work, lwork) + } else { + // Q was determined by a call to Dsytrd with uplo == blas.Upper. + // Shift the vectors which define the elementary reflectors one column + // to the right, and set the first row and column of Q to those of the unit + // matrix. + for j := n - 1; j > 0; j-- { + a[j] = 0 + for i := j + 1; i < n; i++ { + a[i*lda+j] = a[i*lda+j-1] + } + } + a[0] = 1 + for i := 1; i < n; i++ { + a[i*lda] = 0 + } + if n > 1 { + // Generate Q[1:n, 1:n]. + impl.Dorgqr(n-1, n-1, n-1, a[lda+1:], lda, tau, work, lwork) + } + } + work[0] = float64(lworkopt) +} diff --git a/vendor/gonum.org/v1/gonum/lapack/gonum/dorm2r.go b/vendor/gonum.org/v1/gonum/lapack/gonum/dorm2r.go new file mode 100644 index 0000000..4b0bd83 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/lapack/gonum/dorm2r.go @@ -0,0 +1,101 @@ +// Copyright ©2015 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package gonum + +import "gonum.org/v1/gonum/blas" + +// Dorm2r multiplies a general matrix C by an orthogonal matrix from a QR factorization +// determined by Dgeqrf. +// C = Q * C if side == blas.Left and trans == blas.NoTrans +// C = Q^T * C if side == blas.Left and trans == blas.Trans +// C = C * Q if side == blas.Right and trans == blas.NoTrans +// C = C * Q^T if side == blas.Right and trans == blas.Trans +// If side == blas.Left, a is a matrix of size m×k, and if side == blas.Right +// a is of size n×k. +// +// tau contains the Householder factors and is of length at least k and this function +// will panic otherwise. +// +// work is temporary storage of length at least n if side == blas.Left +// and at least m if side == blas.Right and this function will panic otherwise. +// +// Dorm2r is an internal routine. It is exported for testing purposes. +func (impl Implementation) Dorm2r(side blas.Side, trans blas.Transpose, m, n, k int, a []float64, lda int, tau, c []float64, ldc int, work []float64) { + left := side == blas.Left + switch { + case !left && side != blas.Right: + panic(badSide) + case trans != blas.Trans && trans != blas.NoTrans: + panic(badTrans) + case m < 0: + panic(mLT0) + case n < 0: + panic(nLT0) + case k < 0: + panic(kLT0) + case left && k > m: + panic(kGTM) + case !left && k > n: + panic(kGTN) + case lda < max(1, k): + panic(badLdA) + case ldc < max(1, n): + panic(badLdC) + } + + // Quick return if possible. + if m == 0 || n == 0 || k == 0 { + return + } + + switch { + case left && len(a) < (m-1)*lda+k: + panic(shortA) + case !left && len(a) < (n-1)*lda+k: + panic(shortA) + case len(c) < (m-1)*ldc+n: + panic(shortC) + case len(tau) < k: + panic(shortTau) + case left && len(work) < n: + panic(shortWork) + case !left && len(work) < m: + panic(shortWork) + } + + if left { + if trans == blas.NoTrans { + for i := k - 1; i >= 0; i-- { + aii := a[i*lda+i] + a[i*lda+i] = 1 + impl.Dlarf(side, m-i, n, a[i*lda+i:], lda, tau[i], c[i*ldc:], ldc, work) + a[i*lda+i] = aii + } + return + } + for i := 0; i < k; i++ { + aii := a[i*lda+i] + a[i*lda+i] = 1 + impl.Dlarf(side, m-i, n, a[i*lda+i:], lda, tau[i], c[i*ldc:], ldc, work) + a[i*lda+i] = aii + } + return + } + if trans == blas.NoTrans { + for i := 0; i < k; i++ { + aii := a[i*lda+i] + a[i*lda+i] = 1 + impl.Dlarf(side, m, n-i, a[i*lda+i:], lda, tau[i], c[i:], ldc, work) + a[i*lda+i] = aii + } + return + } + for i := k - 1; i >= 0; i-- { + aii := a[i*lda+i] + a[i*lda+i] = 1 + impl.Dlarf(side, m, n-i, a[i*lda+i:], lda, tau[i], c[i:], ldc, work) + a[i*lda+i] = aii + } +} diff --git a/vendor/gonum.org/v1/gonum/lapack/gonum/dormbr.go b/vendor/gonum.org/v1/gonum/lapack/gonum/dormbr.go new file mode 100644 index 0000000..026dc04 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/lapack/gonum/dormbr.go @@ -0,0 +1,178 @@ +// Copyright ©2015 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package gonum + +import ( + "gonum.org/v1/gonum/blas" + "gonum.org/v1/gonum/lapack" +) + +// Dormbr applies a multiplicative update to the matrix C based on a +// decomposition computed by Dgebrd. +// +// Dormbr overwrites the m×n matrix C with +// Q * C if vect == lapack.ApplyQ, side == blas.Left, and trans == blas.NoTrans +// C * Q if vect == lapack.ApplyQ, side == blas.Right, and trans == blas.NoTrans +// Q^T * C if vect == lapack.ApplyQ, side == blas.Left, and trans == blas.Trans +// C * Q^T if vect == lapack.ApplyQ, side == blas.Right, and trans == blas.Trans +// +// P * C if vect == lapack.ApplyP, side == blas.Left, and trans == blas.NoTrans +// C * P if vect == lapack.ApplyP, side == blas.Right, and trans == blas.NoTrans +// P^T * C if vect == lapack.ApplyP, side == blas.Left, and trans == blas.Trans +// C * P^T if vect == lapack.ApplyP, side == blas.Right, and trans == blas.Trans +// where P and Q are the orthogonal matrices determined by Dgebrd when reducing +// a matrix A to bidiagonal form: A = Q * B * P^T. See Dgebrd for the +// definitions of Q and P. +// +// If vect == lapack.ApplyQ, A is assumed to have been an nq×k matrix, while if +// vect == lapack.ApplyP, A is assumed to have been a k×nq matrix. nq = m if +// side == blas.Left, while nq = n if side == blas.Right. +// +// tau must have length min(nq,k), and Dormbr will panic otherwise. tau contains +// the elementary reflectors to construct Q or P depending on the value of +// vect. +// +// work must have length at least max(1,lwork), and lwork must be either -1 or +// at least max(1,n) if side == blas.Left, and at least max(1,m) if side == +// blas.Right. For optimum performance lwork should be at least n*nb if side == +// blas.Left, and at least m*nb if side == blas.Right, where nb is the optimal +// block size. On return, work[0] will contain the optimal value of lwork. +// +// If lwork == -1, the function only calculates the optimal value of lwork and +// returns it in work[0]. +// +// Dormbr is an internal routine. It is exported for testing purposes. +func (impl Implementation) Dormbr(vect lapack.ApplyOrtho, side blas.Side, trans blas.Transpose, m, n, k int, a []float64, lda int, tau, c []float64, ldc int, work []float64, lwork int) { + nq := n + nw := m + if side == blas.Left { + nq = m + nw = n + } + applyQ := vect == lapack.ApplyQ + switch { + case !applyQ && vect != lapack.ApplyP: + panic(badApplyOrtho) + case side != blas.Left && side != blas.Right: + panic(badSide) + case trans != blas.NoTrans && trans != blas.Trans: + panic(badTrans) + case m < 0: + panic(mLT0) + case n < 0: + panic(nLT0) + case k < 0: + panic(kLT0) + case applyQ && lda < max(1, min(nq, k)): + panic(badLdA) + case !applyQ && lda < max(1, nq): + panic(badLdA) + case ldc < max(1, n): + panic(badLdC) + case lwork < max(1, nw) && lwork != -1: + panic(badLWork) + case len(work) < max(1, lwork): + panic(shortWork) + } + + // Quick return if possible. + if m == 0 || n == 0 { + work[0] = 1 + return + } + + // The current implementation does not use opts, but a future change may + // use these options so construct them. + var opts string + if side == blas.Left { + opts = "L" + } else { + opts = "R" + } + if trans == blas.Trans { + opts += "T" + } else { + opts += "N" + } + var nb int + if applyQ { + if side == blas.Left { + nb = impl.Ilaenv(1, "DORMQR", opts, m-1, n, m-1, -1) + } else { + nb = impl.Ilaenv(1, "DORMQR", opts, m, n-1, n-1, -1) + } + } else { + if side == blas.Left { + nb = impl.Ilaenv(1, "DORMLQ", opts, m-1, n, m-1, -1) + } else { + nb = impl.Ilaenv(1, "DORMLQ", opts, m, n-1, n-1, -1) + } + } + lworkopt := max(1, nw) * nb + if lwork == -1 { + work[0] = float64(lworkopt) + return + } + + minnqk := min(nq, k) + switch { + case applyQ && len(a) < (nq-1)*lda+minnqk: + panic(shortA) + case !applyQ && len(a) < (minnqk-1)*lda+nq: + panic(shortA) + case len(tau) < minnqk: + panic(shortTau) + case len(c) < (m-1)*ldc+n: + panic(shortC) + } + + if applyQ { + // Change the operation to get Q depending on the size of the initial + // matrix to Dgebrd. The size matters due to the storage location of + // the off-diagonal elements. + if nq >= k { + impl.Dormqr(side, trans, m, n, k, a, lda, tau[:k], c, ldc, work, lwork) + } else if nq > 1 { + mi := m + ni := n - 1 + i1 := 0 + i2 := 1 + if side == blas.Left { + mi = m - 1 + ni = n + i1 = 1 + i2 = 0 + } + impl.Dormqr(side, trans, mi, ni, nq-1, a[1*lda:], lda, tau[:nq-1], c[i1*ldc+i2:], ldc, work, lwork) + } + work[0] = float64(lworkopt) + return + } + + transt := blas.Trans + if trans == blas.Trans { + transt = blas.NoTrans + } + + // Change the operation to get P depending on the size of the initial + // matrix to Dgebrd. The size matters due to the storage location of + // the off-diagonal elements. + if nq > k { + impl.Dormlq(side, transt, m, n, k, a, lda, tau, c, ldc, work, lwork) + } else if nq > 1 { + mi := m + ni := n - 1 + i1 := 0 + i2 := 1 + if side == blas.Left { + mi = m - 1 + ni = n + i1 = 1 + i2 = 0 + } + impl.Dormlq(side, transt, mi, ni, nq-1, a[1:], lda, tau, c[i1*ldc+i2:], ldc, work, lwork) + } + work[0] = float64(lworkopt) +} diff --git a/vendor/gonum.org/v1/gonum/lapack/gonum/dormhr.go b/vendor/gonum.org/v1/gonum/lapack/gonum/dormhr.go new file mode 100644 index 0000000..c00f440 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/lapack/gonum/dormhr.go @@ -0,0 +1,129 @@ +// Copyright ©2016 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package gonum + +import "gonum.org/v1/gonum/blas" + +// Dormhr multiplies an m×n general matrix C with an nq×nq orthogonal matrix Q +// Q * C, if side == blas.Left and trans == blas.NoTrans, +// Q^T * C, if side == blas.Left and trans == blas.Trans, +// C * Q, if side == blas.Right and trans == blas.NoTrans, +// C * Q^T, if side == blas.Right and trans == blas.Trans, +// where nq == m if side == blas.Left and nq == n if side == blas.Right. +// +// Q is defined implicitly as the product of ihi-ilo elementary reflectors, as +// returned by Dgehrd: +// Q = H_{ilo} H_{ilo+1} ... H_{ihi-1}. +// Q is equal to the identity matrix except in the submatrix +// Q[ilo+1:ihi+1,ilo+1:ihi+1]. +// +// ilo and ihi must have the same values as in the previous call of Dgehrd. It +// must hold that +// 0 <= ilo <= ihi < m, if m > 0 and side == blas.Left, +// ilo = 0 and ihi = -1, if m = 0 and side == blas.Left, +// 0 <= ilo <= ihi < n, if n > 0 and side == blas.Right, +// ilo = 0 and ihi = -1, if n = 0 and side == blas.Right. +// +// a and lda represent an m×m matrix if side == blas.Left and an n×n matrix if +// side == blas.Right. The matrix contains vectors which define the elementary +// reflectors, as returned by Dgehrd. +// +// tau contains the scalar factors of the elementary reflectors, as returned by +// Dgehrd. tau must have length m-1 if side == blas.Left and n-1 if side == +// blas.Right. +// +// c and ldc represent the m×n matrix C. On return, c is overwritten by the +// product with Q. +// +// work must have length at least max(1,lwork), and lwork must be at least +// max(1,n), if side == blas.Left, and max(1,m), if side == blas.Right. For +// optimum performance lwork should be at least n*nb if side == blas.Left and +// m*nb if side == blas.Right, where nb is the optimal block size. On return, +// work[0] will contain the optimal value of lwork. +// +// If lwork == -1, instead of performing Dormhr, only the optimal value of lwork +// will be stored in work[0]. +// +// If any requirement on input sizes is not met, Dormhr will panic. +// +// Dormhr is an internal routine. It is exported for testing purposes. +func (impl Implementation) Dormhr(side blas.Side, trans blas.Transpose, m, n, ilo, ihi int, a []float64, lda int, tau, c []float64, ldc int, work []float64, lwork int) { + nq := n // The order of Q. + nw := m // The minimum length of work. + if side == blas.Left { + nq = m + nw = n + } + switch { + case side != blas.Left && side != blas.Right: + panic(badSide) + case trans != blas.NoTrans && trans != blas.Trans: + panic(badTrans) + case m < 0: + panic(mLT0) + case n < 0: + panic(nLT0) + case ilo < 0 || max(1, nq) <= ilo: + panic(badIlo) + case ihi < min(ilo, nq-1) || nq <= ihi: + panic(badIhi) + case lda < max(1, nq): + panic(badLdA) + case lwork < max(1, nw) && lwork != -1: + panic(badLWork) + case len(work) < max(1, lwork): + panic(shortWork) + } + + // Quick return if possible. + if m == 0 || n == 0 { + work[0] = 1 + return + } + + nh := ihi - ilo + var nb int + if side == blas.Left { + opts := "LN" + if trans == blas.Trans { + opts = "LT" + } + nb = impl.Ilaenv(1, "DORMQR", opts, nh, n, nh, -1) + } else { + opts := "RN" + if trans == blas.Trans { + opts = "RT" + } + nb = impl.Ilaenv(1, "DORMQR", opts, m, nh, nh, -1) + } + lwkopt := max(1, nw) * nb + if lwork == -1 { + work[0] = float64(lwkopt) + return + } + + if nh == 0 { + work[0] = 1 + return + } + + switch { + case len(a) < (nq-1)*lda+nq: + panic(shortA) + case len(c) < (m-1)*ldc+n: + panic(shortC) + case len(tau) != nq-1: + panic(badLenTau) + } + + if side == blas.Left { + impl.Dormqr(side, trans, nh, n, nh, a[(ilo+1)*lda+ilo:], lda, + tau[ilo:ihi], c[(ilo+1)*ldc:], ldc, work, lwork) + } else { + impl.Dormqr(side, trans, m, nh, nh, a[(ilo+1)*lda+ilo:], lda, + tau[ilo:ihi], c[ilo+1:], ldc, work, lwork) + } + work[0] = float64(lwkopt) +} diff --git a/vendor/gonum.org/v1/gonum/lapack/gonum/dorml2.go b/vendor/gonum.org/v1/gonum/lapack/gonum/dorml2.go new file mode 100644 index 0000000..25aa83a --- /dev/null +++ b/vendor/gonum.org/v1/gonum/lapack/gonum/dorml2.go @@ -0,0 +1,102 @@ +// Copyright ©2015 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package gonum + +import "gonum.org/v1/gonum/blas" + +// Dorml2 multiplies a general matrix C by an orthogonal matrix from an LQ factorization +// determined by Dgelqf. +// C = Q * C if side == blas.Left and trans == blas.NoTrans +// C = Q^T * C if side == blas.Left and trans == blas.Trans +// C = C * Q if side == blas.Right and trans == blas.NoTrans +// C = C * Q^T if side == blas.Right and trans == blas.Trans +// If side == blas.Left, a is a matrix of side k×m, and if side == blas.Right +// a is of size k×n. +// +// tau contains the Householder factors and is of length at least k and this function will +// panic otherwise. +// +// work is temporary storage of length at least n if side == blas.Left +// and at least m if side == blas.Right and this function will panic otherwise. +// +// Dorml2 is an internal routine. It is exported for testing purposes. +func (impl Implementation) Dorml2(side blas.Side, trans blas.Transpose, m, n, k int, a []float64, lda int, tau, c []float64, ldc int, work []float64) { + left := side == blas.Left + switch { + case !left && side != blas.Right: + panic(badSide) + case trans != blas.Trans && trans != blas.NoTrans: + panic(badTrans) + case m < 0: + panic(mLT0) + case n < 0: + panic(nLT0) + case k < 0: + panic(kLT0) + case left && k > m: + panic(kGTM) + case !left && k > n: + panic(kGTN) + case left && lda < max(1, m): + panic(badLdA) + case !left && lda < max(1, n): + panic(badLdA) + } + + // Quick return if possible. + if m == 0 || n == 0 || k == 0 { + return + } + + switch { + case left && len(a) < (k-1)*lda+m: + panic(shortA) + case !left && len(a) < (k-1)*lda+n: + panic(shortA) + case len(tau) < k: + panic(shortTau) + case len(c) < (m-1)*ldc+n: + panic(shortC) + case left && len(work) < n: + panic(shortWork) + case !left && len(work) < m: + panic(shortWork) + } + + notrans := trans == blas.NoTrans + switch { + case left && notrans: + for i := 0; i < k; i++ { + aii := a[i*lda+i] + a[i*lda+i] = 1 + impl.Dlarf(side, m-i, n, a[i*lda+i:], 1, tau[i], c[i*ldc:], ldc, work) + a[i*lda+i] = aii + } + + case left && !notrans: + for i := k - 1; i >= 0; i-- { + aii := a[i*lda+i] + a[i*lda+i] = 1 + impl.Dlarf(side, m-i, n, a[i*lda+i:], 1, tau[i], c[i*ldc:], ldc, work) + a[i*lda+i] = aii + } + + case !left && notrans: + for i := k - 1; i >= 0; i-- { + aii := a[i*lda+i] + a[i*lda+i] = 1 + impl.Dlarf(side, m, n-i, a[i*lda+i:], 1, tau[i], c[i:], ldc, work) + a[i*lda+i] = aii + } + + case !left && !notrans: + for i := 0; i < k; i++ { + aii := a[i*lda+i] + a[i*lda+i] = 1 + impl.Dlarf(side, m, n-i, a[i*lda+i:], 1, tau[i], c[i:], ldc, work) + a[i*lda+i] = aii + } + } +} diff --git a/vendor/gonum.org/v1/gonum/lapack/gonum/dormlq.go b/vendor/gonum.org/v1/gonum/lapack/gonum/dormlq.go new file mode 100644 index 0000000..6fcfc2f --- /dev/null +++ b/vendor/gonum.org/v1/gonum/lapack/gonum/dormlq.go @@ -0,0 +1,174 @@ +// Copyright ©2015 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package gonum + +import ( + "gonum.org/v1/gonum/blas" + "gonum.org/v1/gonum/lapack" +) + +// Dormlq multiplies the matrix C by the orthogonal matrix Q defined by the +// slices a and tau. A and tau are as returned from Dgelqf. +// C = Q * C if side == blas.Left and trans == blas.NoTrans +// C = Q^T * C if side == blas.Left and trans == blas.Trans +// C = C * Q if side == blas.Right and trans == blas.NoTrans +// C = C * Q^T if side == blas.Right and trans == blas.Trans +// If side == blas.Left, A is a matrix of side k×m, and if side == blas.Right +// A is of size k×n. This uses a blocked algorithm. +// +// work is temporary storage, and lwork specifies the usable memory length. +// At minimum, lwork >= m if side == blas.Left and lwork >= n if side == blas.Right, +// and this function will panic otherwise. +// Dormlq uses a block algorithm, but the block size is limited +// by the temporary space available. If lwork == -1, instead of performing Dormlq, +// the optimal work length will be stored into work[0]. +// +// tau contains the Householder scales and must have length at least k, and +// this function will panic otherwise. +func (impl Implementation) Dormlq(side blas.Side, trans blas.Transpose, m, n, k int, a []float64, lda int, tau, c []float64, ldc int, work []float64, lwork int) { + left := side == blas.Left + nw := m + if left { + nw = n + } + switch { + case !left && side != blas.Right: + panic(badSide) + case trans != blas.Trans && trans != blas.NoTrans: + panic(badTrans) + case m < 0: + panic(mLT0) + case n < 0: + panic(nLT0) + case k < 0: + panic(kLT0) + case left && k > m: + panic(kGTM) + case !left && k > n: + panic(kGTN) + case left && lda < max(1, m): + panic(badLdA) + case !left && lda < max(1, n): + panic(badLdA) + case lwork < max(1, nw) && lwork != -1: + panic(badLWork) + case len(work) < max(1, lwork): + panic(shortWork) + } + + // Quick return if possible. + if m == 0 || n == 0 || k == 0 { + work[0] = 1 + return + } + + const ( + nbmax = 64 + ldt = nbmax + tsize = nbmax * ldt + ) + opts := string(side) + string(trans) + nb := min(nbmax, impl.Ilaenv(1, "DORMLQ", opts, m, n, k, -1)) + lworkopt := max(1, nw)*nb + tsize + if lwork == -1 { + work[0] = float64(lworkopt) + return + } + + switch { + case left && len(a) < (k-1)*lda+m: + panic(shortA) + case !left && len(a) < (k-1)*lda+n: + panic(shortA) + case len(tau) < k: + panic(shortTau) + case len(c) < (m-1)*ldc+n: + panic(shortC) + } + + nbmin := 2 + if 1 < nb && nb < k { + iws := nw*nb + tsize + if lwork < iws { + nb = (lwork - tsize) / nw + nbmin = max(2, impl.Ilaenv(2, "DORMLQ", opts, m, n, k, -1)) + } + } + if nb < nbmin || k <= nb { + // Call unblocked code. + impl.Dorml2(side, trans, m, n, k, a, lda, tau, c, ldc, work) + work[0] = float64(lworkopt) + return + } + + t := work[:tsize] + wrk := work[tsize:] + ldwrk := nb + + notrans := trans == blas.NoTrans + transt := blas.NoTrans + if notrans { + transt = blas.Trans + } + + switch { + case left && notrans: + for i := 0; i < k; i += nb { + ib := min(nb, k-i) + impl.Dlarft(lapack.Forward, lapack.RowWise, m-i, ib, + a[i*lda+i:], lda, + tau[i:], + t, ldt) + impl.Dlarfb(side, transt, lapack.Forward, lapack.RowWise, m-i, n, ib, + a[i*lda+i:], lda, + t, ldt, + c[i*ldc:], ldc, + wrk, ldwrk) + } + + case left && !notrans: + for i := ((k - 1) / nb) * nb; i >= 0; i -= nb { + ib := min(nb, k-i) + impl.Dlarft(lapack.Forward, lapack.RowWise, m-i, ib, + a[i*lda+i:], lda, + tau[i:], + t, ldt) + impl.Dlarfb(side, transt, lapack.Forward, lapack.RowWise, m-i, n, ib, + a[i*lda+i:], lda, + t, ldt, + c[i*ldc:], ldc, + wrk, ldwrk) + } + + case !left && notrans: + for i := ((k - 1) / nb) * nb; i >= 0; i -= nb { + ib := min(nb, k-i) + impl.Dlarft(lapack.Forward, lapack.RowWise, n-i, ib, + a[i*lda+i:], lda, + tau[i:], + t, ldt) + impl.Dlarfb(side, transt, lapack.Forward, lapack.RowWise, m, n-i, ib, + a[i*lda+i:], lda, + t, ldt, + c[i:], ldc, + wrk, ldwrk) + } + + case !left && !notrans: + for i := 0; i < k; i += nb { + ib := min(nb, k-i) + impl.Dlarft(lapack.Forward, lapack.RowWise, n-i, ib, + a[i*lda+i:], lda, + tau[i:], + t, ldt) + impl.Dlarfb(side, transt, lapack.Forward, lapack.RowWise, m, n-i, ib, + a[i*lda+i:], lda, + t, ldt, + c[i:], ldc, + wrk, ldwrk) + } + } + work[0] = float64(lworkopt) +} diff --git a/vendor/gonum.org/v1/gonum/lapack/gonum/dormqr.go b/vendor/gonum.org/v1/gonum/lapack/gonum/dormqr.go new file mode 100644 index 0000000..8ae4508 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/lapack/gonum/dormqr.go @@ -0,0 +1,177 @@ +// Copyright ©2015 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package gonum + +import ( + "gonum.org/v1/gonum/blas" + "gonum.org/v1/gonum/lapack" +) + +// Dormqr multiplies an m×n matrix C by an orthogonal matrix Q as +// C = Q * C, if side == blas.Left and trans == blas.NoTrans, +// C = Q^T * C, if side == blas.Left and trans == blas.Trans, +// C = C * Q, if side == blas.Right and trans == blas.NoTrans, +// C = C * Q^T, if side == blas.Right and trans == blas.Trans, +// where Q is defined as the product of k elementary reflectors +// Q = H_0 * H_1 * ... * H_{k-1}. +// +// If side == blas.Left, A is an m×k matrix and 0 <= k <= m. +// If side == blas.Right, A is an n×k matrix and 0 <= k <= n. +// The ith column of A contains the vector which defines the elementary +// reflector H_i and tau[i] contains its scalar factor. tau must have length k +// and Dormqr will panic otherwise. Dgeqrf returns A and tau in the required +// form. +// +// work must have length at least max(1,lwork), and lwork must be at least n if +// side == blas.Left and at least m if side == blas.Right, otherwise Dormqr will +// panic. +// +// work is temporary storage, and lwork specifies the usable memory length. At +// minimum, lwork >= m if side == blas.Left and lwork >= n if side == +// blas.Right, and this function will panic otherwise. Larger values of lwork +// will generally give better performance. On return, work[0] will contain the +// optimal value of lwork. +// +// If lwork is -1, instead of performing Dormqr, the optimal workspace size will +// be stored into work[0]. +func (impl Implementation) Dormqr(side blas.Side, trans blas.Transpose, m, n, k int, a []float64, lda int, tau, c []float64, ldc int, work []float64, lwork int) { + left := side == blas.Left + nq := n + nw := m + if left { + nq = m + nw = n + } + switch { + case !left && side != blas.Right: + panic(badSide) + case trans != blas.NoTrans && trans != blas.Trans: + panic(badTrans) + case m < 0: + panic(mLT0) + case n < 0: + panic(nLT0) + case k < 0: + panic(kLT0) + case left && k > m: + panic(kGTM) + case !left && k > n: + panic(kGTN) + case lda < max(1, k): + panic(badLdA) + case ldc < max(1, n): + panic(badLdC) + case lwork < max(1, nw) && lwork != -1: + panic(badLWork) + case len(work) < max(1, lwork): + panic(shortWork) + } + + // Quick return if possible. + if m == 0 || n == 0 || k == 0 { + work[0] = 1 + return + } + + const ( + nbmax = 64 + ldt = nbmax + tsize = nbmax * ldt + ) + opts := string(side) + string(trans) + nb := min(nbmax, impl.Ilaenv(1, "DORMQR", opts, m, n, k, -1)) + lworkopt := max(1, nw)*nb + tsize + if lwork == -1 { + work[0] = float64(lworkopt) + return + } + + switch { + case len(a) < (nq-1)*lda+k: + panic(shortA) + case len(tau) != k: + panic(badLenTau) + case len(c) < (m-1)*ldc+n: + panic(shortC) + } + + nbmin := 2 + if 1 < nb && nb < k { + if lwork < nw*nb+tsize { + nb = (lwork - tsize) / nw + nbmin = max(2, impl.Ilaenv(2, "DORMQR", opts, m, n, k, -1)) + } + } + + if nb < nbmin || k <= nb { + // Call unblocked code. + impl.Dorm2r(side, trans, m, n, k, a, lda, tau, c, ldc, work) + work[0] = float64(lworkopt) + return + } + + var ( + ldwork = nb + notrans = trans == blas.NoTrans + ) + switch { + case left && notrans: + for i := ((k - 1) / nb) * nb; i >= 0; i -= nb { + ib := min(nb, k-i) + impl.Dlarft(lapack.Forward, lapack.ColumnWise, m-i, ib, + a[i*lda+i:], lda, + tau[i:], + work[:tsize], ldt) + impl.Dlarfb(side, trans, lapack.Forward, lapack.ColumnWise, m-i, n, ib, + a[i*lda+i:], lda, + work[:tsize], ldt, + c[i*ldc:], ldc, + work[tsize:], ldwork) + } + + case left && !notrans: + for i := 0; i < k; i += nb { + ib := min(nb, k-i) + impl.Dlarft(lapack.Forward, lapack.ColumnWise, m-i, ib, + a[i*lda+i:], lda, + tau[i:], + work[:tsize], ldt) + impl.Dlarfb(side, trans, lapack.Forward, lapack.ColumnWise, m-i, n, ib, + a[i*lda+i:], lda, + work[:tsize], ldt, + c[i*ldc:], ldc, + work[tsize:], ldwork) + } + + case !left && notrans: + for i := 0; i < k; i += nb { + ib := min(nb, k-i) + impl.Dlarft(lapack.Forward, lapack.ColumnWise, n-i, ib, + a[i*lda+i:], lda, + tau[i:], + work[:tsize], ldt) + impl.Dlarfb(side, trans, lapack.Forward, lapack.ColumnWise, m, n-i, ib, + a[i*lda+i:], lda, + work[:tsize], ldt, + c[i:], ldc, + work[tsize:], ldwork) + } + + case !left && !notrans: + for i := ((k - 1) / nb) * nb; i >= 0; i -= nb { + ib := min(nb, k-i) + impl.Dlarft(lapack.Forward, lapack.ColumnWise, n-i, ib, + a[i*lda+i:], lda, + tau[i:], + work[:tsize], ldt) + impl.Dlarfb(side, trans, lapack.Forward, lapack.ColumnWise, m, n-i, ib, + a[i*lda+i:], lda, + work[:tsize], ldt, + c[i:], ldc, + work[tsize:], ldwork) + } + } + work[0] = float64(lworkopt) +} diff --git a/vendor/gonum.org/v1/gonum/lapack/gonum/dormr2.go b/vendor/gonum.org/v1/gonum/lapack/gonum/dormr2.go new file mode 100644 index 0000000..bb03f32 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/lapack/gonum/dormr2.go @@ -0,0 +1,103 @@ +// Copyright ©2015 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package gonum + +import "gonum.org/v1/gonum/blas" + +// Dormr2 multiplies a general matrix C by an orthogonal matrix from a RQ factorization +// determined by Dgerqf. +// C = Q * C if side == blas.Left and trans == blas.NoTrans +// C = Q^T * C if side == blas.Left and trans == blas.Trans +// C = C * Q if side == blas.Right and trans == blas.NoTrans +// C = C * Q^T if side == blas.Right and trans == blas.Trans +// If side == blas.Left, a is a matrix of size k×m, and if side == blas.Right +// a is of size k×n. +// +// tau contains the Householder factors and is of length at least k and this function +// will panic otherwise. +// +// work is temporary storage of length at least n if side == blas.Left +// and at least m if side == blas.Right and this function will panic otherwise. +// +// Dormr2 is an internal routine. It is exported for testing purposes. +func (impl Implementation) Dormr2(side blas.Side, trans blas.Transpose, m, n, k int, a []float64, lda int, tau, c []float64, ldc int, work []float64) { + left := side == blas.Left + nq := n + nw := m + if left { + nq = m + nw = n + } + switch { + case !left && side != blas.Right: + panic(badSide) + case trans != blas.NoTrans && trans != blas.Trans: + panic(badTrans) + case m < 0: + panic(mLT0) + case n < 0: + panic(nLT0) + case k < 0: + panic(kLT0) + case left && k > m: + panic(kGTM) + case !left && k > n: + panic(kGTN) + case lda < max(1, nq): + panic(badLdA) + case ldc < max(1, n): + panic(badLdC) + } + + // Quick return if possible. + if m == 0 || n == 0 || k == 0 { + return + } + + switch { + case len(a) < (k-1)*lda+nq: + panic(shortA) + case len(tau) < k: + panic(shortTau) + case len(c) < (m-1)*ldc+n: + panic(shortC) + case len(work) < nw: + panic(shortWork) + } + + if left { + if trans == blas.NoTrans { + for i := k - 1; i >= 0; i-- { + aii := a[i*lda+(m-k+i)] + a[i*lda+(m-k+i)] = 1 + impl.Dlarf(side, m-k+i+1, n, a[i*lda:], 1, tau[i], c, ldc, work) + a[i*lda+(m-k+i)] = aii + } + return + } + for i := 0; i < k; i++ { + aii := a[i*lda+(m-k+i)] + a[i*lda+(m-k+i)] = 1 + impl.Dlarf(side, m-k+i+1, n, a[i*lda:], 1, tau[i], c, ldc, work) + a[i*lda+(m-k+i)] = aii + } + return + } + if trans == blas.NoTrans { + for i := 0; i < k; i++ { + aii := a[i*lda+(n-k+i)] + a[i*lda+(n-k+i)] = 1 + impl.Dlarf(side, m, n-k+i+1, a[i*lda:], 1, tau[i], c, ldc, work) + a[i*lda+(n-k+i)] = aii + } + return + } + for i := k - 1; i >= 0; i-- { + aii := a[i*lda+(n-k+i)] + a[i*lda+(n-k+i)] = 1 + impl.Dlarf(side, m, n-k+i+1, a[i*lda:], 1, tau[i], c, ldc, work) + a[i*lda+(n-k+i)] = aii + } +} diff --git a/vendor/gonum.org/v1/gonum/lapack/gonum/dpbtf2.go b/vendor/gonum.org/v1/gonum/lapack/gonum/dpbtf2.go new file mode 100644 index 0000000..a5beb80 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/lapack/gonum/dpbtf2.go @@ -0,0 +1,110 @@ +// Copyright ©2017 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package gonum + +import ( + "math" + + "gonum.org/v1/gonum/blas" + "gonum.org/v1/gonum/blas/blas64" +) + +// Dpbtf2 computes the Cholesky factorization of a symmetric positive banded +// matrix ab. The matrix ab is n×n with kd diagonal bands. The Cholesky +// factorization computed is +// A = U^T * U if ul == blas.Upper +// A = L * L^T if ul == blas.Lower +// ul also specifies the storage of ab. If ul == blas.Upper, then +// ab is stored as an upper-triangular banded matrix with kd super-diagonals, +// and if ul == blas.Lower, ab is stored as a lower-triangular banded matrix +// with kd sub-diagonals. On exit, the banded matrix U or L is stored in-place +// into ab depending on the value of ul. Dpbtf2 returns whether the factorization +// was successfully completed. +// +// The band storage scheme is illustrated below when n = 6, and kd = 2. +// The resulting Cholesky decomposition is stored in the same elements as the +// input band matrix (a11 becomes u11 or l11, etc.). +// +// ul = blas.Upper +// a11 a12 a13 +// a22 a23 a24 +// a33 a34 a35 +// a44 a45 a46 +// a55 a56 * +// a66 * * +// +// ul = blas.Lower +// * * a11 +// * a21 a22 +// a31 a32 a33 +// a42 a43 a44 +// a53 a54 a55 +// a64 a65 a66 +// +// Dpbtf2 is the unblocked version of the algorithm, see Dpbtrf for the blocked +// version. +// +// Dpbtf2 is an internal routine, exported for testing purposes. +func (Implementation) Dpbtf2(ul blas.Uplo, n, kd int, ab []float64, ldab int) (ok bool) { + switch { + case ul != blas.Upper && ul != blas.Lower: + panic(badUplo) + case n < 0: + panic(nLT0) + case kd < 0: + panic(kdLT0) + case ldab < kd+1: + panic(badLdA) + } + + if n == 0 { + return + } + + if len(ab) < (n-1)*ldab+kd { + panic(shortAB) + } + + bi := blas64.Implementation() + + kld := max(1, ldab-1) + if ul == blas.Upper { + for j := 0; j < n; j++ { + // Compute U(J,J) and test for non positive-definiteness. + ajj := ab[j*ldab] + if ajj <= 0 { + return false + } + ajj = math.Sqrt(ajj) + ab[j*ldab] = ajj + // Compute elements j+1:j+kn of row J and update the trailing submatrix + // within the band. + kn := min(kd, n-j-1) + if kn > 0 { + bi.Dscal(kn, 1/ajj, ab[j*ldab+1:], 1) + bi.Dsyr(blas.Upper, kn, -1, ab[j*ldab+1:], 1, ab[(j+1)*ldab:], kld) + } + } + return true + } + for j := 0; j < n; j++ { + // Compute L(J,J) and test for non positive-definiteness. + ajj := ab[j*ldab+kd] + if ajj <= 0 { + return false + } + ajj = math.Sqrt(ajj) + ab[j*ldab+kd] = ajj + + // Compute elements J+1:J+KN of column J and update the trailing submatrix + // within the band. + kn := min(kd, n-j-1) + if kn > 0 { + bi.Dscal(kn, 1/ajj, ab[(j+1)*ldab+kd-1:], kld) + bi.Dsyr(blas.Lower, kn, -1, ab[(j+1)*ldab+kd-1:], kld, ab[(j+1)*ldab+kd:], kld) + } + } + return true +} diff --git a/vendor/gonum.org/v1/gonum/lapack/gonum/dpocon.go b/vendor/gonum.org/v1/gonum/lapack/gonum/dpocon.go new file mode 100644 index 0000000..7af4c18 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/lapack/gonum/dpocon.go @@ -0,0 +1,90 @@ +// Copyright ©2015 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package gonum + +import ( + "math" + + "gonum.org/v1/gonum/blas" + "gonum.org/v1/gonum/blas/blas64" +) + +// Dpocon estimates the reciprocal of the condition number of a positive-definite +// matrix A given the Cholesky decomposition of A. The condition number computed +// is based on the 1-norm and the ∞-norm. +// +// anorm is the 1-norm and the ∞-norm of the original matrix A. +// +// work is a temporary data slice of length at least 3*n and Dpocon will panic otherwise. +// +// iwork is a temporary data slice of length at least n and Dpocon will panic otherwise. +func (impl Implementation) Dpocon(uplo blas.Uplo, n int, a []float64, lda int, anorm float64, work []float64, iwork []int) float64 { + switch { + case uplo != blas.Upper && uplo != blas.Lower: + panic(badUplo) + case n < 0: + panic(nLT0) + case lda < max(1, n): + panic(badLdA) + case anorm < 0: + panic(negANorm) + } + + // Quick return if possible. + if n == 0 { + return 1 + } + + switch { + case len(a) < (n-1)*lda+n: + panic(shortA) + case len(work) < 3*n: + panic(shortWork) + case len(iwork) < n: + panic(shortIWork) + } + + if anorm == 0 { + return 0 + } + + bi := blas64.Implementation() + + var ( + smlnum = dlamchS + rcond float64 + sl, su float64 + normin bool + ainvnm float64 + kase int + isave [3]int + ) + for { + ainvnm, kase = impl.Dlacn2(n, work[n:], work, iwork, ainvnm, kase, &isave) + if kase == 0 { + if ainvnm != 0 { + rcond = (1 / ainvnm) / anorm + } + return rcond + } + if uplo == blas.Upper { + sl = impl.Dlatrs(blas.Upper, blas.Trans, blas.NonUnit, normin, n, a, lda, work, work[2*n:]) + normin = true + su = impl.Dlatrs(blas.Upper, blas.NoTrans, blas.NonUnit, normin, n, a, lda, work, work[2*n:]) + } else { + sl = impl.Dlatrs(blas.Lower, blas.NoTrans, blas.NonUnit, normin, n, a, lda, work, work[2*n:]) + normin = true + su = impl.Dlatrs(blas.Lower, blas.Trans, blas.NonUnit, normin, n, a, lda, work, work[2*n:]) + } + scale := sl * su + if scale != 1 { + ix := bi.Idamax(n, work, 1) + if scale == 0 || scale < math.Abs(work[ix])*smlnum { + return rcond + } + impl.Drscl(n, scale, work, 1) + } + } +} diff --git a/vendor/gonum.org/v1/gonum/lapack/gonum/dpotf2.go b/vendor/gonum.org/v1/gonum/lapack/gonum/dpotf2.go new file mode 100644 index 0000000..5d3327c --- /dev/null +++ b/vendor/gonum.org/v1/gonum/lapack/gonum/dpotf2.go @@ -0,0 +1,82 @@ +// Copyright ©2015 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package gonum + +import ( + "math" + + "gonum.org/v1/gonum/blas" + "gonum.org/v1/gonum/blas/blas64" +) + +// Dpotf2 computes the Cholesky decomposition of the symmetric positive definite +// matrix a. If ul == blas.Upper, then a is stored as an upper-triangular matrix, +// and a = U^T U is stored in place into a. If ul == blas.Lower, then a = L L^T +// is computed and stored in-place into a. If a is not positive definite, false +// is returned. This is the unblocked version of the algorithm. +// +// Dpotf2 is an internal routine. It is exported for testing purposes. +func (Implementation) Dpotf2(ul blas.Uplo, n int, a []float64, lda int) (ok bool) { + switch { + case ul != blas.Upper && ul != blas.Lower: + panic(badUplo) + case n < 0: + panic(nLT0) + case lda < max(1, n): + panic(badLdA) + } + + // Quick return if possible. + if n == 0 { + return true + } + + if len(a) < (n-1)*lda+n { + panic(shortA) + } + + bi := blas64.Implementation() + + if ul == blas.Upper { + for j := 0; j < n; j++ { + ajj := a[j*lda+j] + if j != 0 { + ajj -= bi.Ddot(j, a[j:], lda, a[j:], lda) + } + if ajj <= 0 || math.IsNaN(ajj) { + a[j*lda+j] = ajj + return false + } + ajj = math.Sqrt(ajj) + a[j*lda+j] = ajj + if j < n-1 { + bi.Dgemv(blas.Trans, j, n-j-1, + -1, a[j+1:], lda, a[j:], lda, + 1, a[j*lda+j+1:], 1) + bi.Dscal(n-j-1, 1/ajj, a[j*lda+j+1:], 1) + } + } + return true + } + for j := 0; j < n; j++ { + ajj := a[j*lda+j] + if j != 0 { + ajj -= bi.Ddot(j, a[j*lda:], 1, a[j*lda:], 1) + } + if ajj <= 0 || math.IsNaN(ajj) { + a[j*lda+j] = ajj + return false + } + ajj = math.Sqrt(ajj) + a[j*lda+j] = ajj + if j < n-1 { + bi.Dgemv(blas.NoTrans, n-j-1, j, + -1, a[(j+1)*lda:], lda, a[j*lda:], 1, + 1, a[(j+1)*lda+j:], lda) + bi.Dscal(n-j-1, 1/ajj, a[(j+1)*lda+j:], lda) + } + } + return true +} diff --git a/vendor/gonum.org/v1/gonum/lapack/gonum/dpotrf.go b/vendor/gonum.org/v1/gonum/lapack/gonum/dpotrf.go new file mode 100644 index 0000000..2124168 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/lapack/gonum/dpotrf.go @@ -0,0 +1,81 @@ +// Copyright ©2015 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package gonum + +import ( + "gonum.org/v1/gonum/blas" + "gonum.org/v1/gonum/blas/blas64" +) + +// Dpotrf computes the Cholesky decomposition of the symmetric positive definite +// matrix a. If ul == blas.Upper, then a is stored as an upper-triangular matrix, +// and a = U^T U is stored in place into a. If ul == blas.Lower, then a = L L^T +// is computed and stored in-place into a. If a is not positive definite, false +// is returned. This is the blocked version of the algorithm. +func (impl Implementation) Dpotrf(ul blas.Uplo, n int, a []float64, lda int) (ok bool) { + switch { + case ul != blas.Upper && ul != blas.Lower: + panic(badUplo) + case n < 0: + panic(nLT0) + case lda < max(1, n): + panic(badLdA) + } + + // Quick return if possible. + if n == 0 { + return true + } + + if len(a) < (n-1)*lda+n { + panic(shortA) + } + + nb := impl.Ilaenv(1, "DPOTRF", string(ul), n, -1, -1, -1) + if nb <= 1 || n <= nb { + return impl.Dpotf2(ul, n, a, lda) + } + bi := blas64.Implementation() + if ul == blas.Upper { + for j := 0; j < n; j += nb { + jb := min(nb, n-j) + bi.Dsyrk(blas.Upper, blas.Trans, jb, j, + -1, a[j:], lda, + 1, a[j*lda+j:], lda) + ok = impl.Dpotf2(blas.Upper, jb, a[j*lda+j:], lda) + if !ok { + return ok + } + if j+jb < n { + bi.Dgemm(blas.Trans, blas.NoTrans, jb, n-j-jb, j, + -1, a[j:], lda, a[j+jb:], lda, + 1, a[j*lda+j+jb:], lda) + bi.Dtrsm(blas.Left, blas.Upper, blas.Trans, blas.NonUnit, jb, n-j-jb, + 1, a[j*lda+j:], lda, + a[j*lda+j+jb:], lda) + } + } + return true + } + for j := 0; j < n; j += nb { + jb := min(nb, n-j) + bi.Dsyrk(blas.Lower, blas.NoTrans, jb, j, + -1, a[j*lda:], lda, + 1, a[j*lda+j:], lda) + ok := impl.Dpotf2(blas.Lower, jb, a[j*lda+j:], lda) + if !ok { + return ok + } + if j+jb < n { + bi.Dgemm(blas.NoTrans, blas.Trans, n-j-jb, jb, j, + -1, a[(j+jb)*lda:], lda, a[j*lda:], lda, + 1, a[(j+jb)*lda+j:], lda) + bi.Dtrsm(blas.Right, blas.Lower, blas.Trans, blas.NonUnit, n-j-jb, jb, + 1, a[j*lda+j:], lda, + a[(j+jb)*lda+j:], lda) + } + } + return true +} diff --git a/vendor/gonum.org/v1/gonum/lapack/gonum/dpotri.go b/vendor/gonum.org/v1/gonum/lapack/gonum/dpotri.go new file mode 100644 index 0000000..2394775 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/lapack/gonum/dpotri.go @@ -0,0 +1,44 @@ +// Copyright ©2019 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package gonum + +import "gonum.org/v1/gonum/blas" + +// Dpotri computes the inverse of a real symmetric positive definite matrix A +// using its Cholesky factorization. +// +// On entry, a contains the triangular factor U or L from the Cholesky +// factorization A = U^T*U or A = L*L^T, as computed by Dpotrf. +// On return, a contains the upper or lower triangle of the (symmetric) +// inverse of A, overwriting the input factor U or L. +func (impl Implementation) Dpotri(uplo blas.Uplo, n int, a []float64, lda int) (ok bool) { + switch { + case uplo != blas.Upper && uplo != blas.Lower: + panic(badUplo) + case n < 0: + panic(nLT0) + case lda < max(1, n): + panic(badLdA) + } + + // Quick return if possible. + if n == 0 { + return true + } + + if len(a) < (n-1)*lda+n { + panic(shortA) + } + + // Invert the triangular Cholesky factor U or L. + ok = impl.Dtrtri(uplo, blas.NonUnit, n, a, lda) + if !ok { + return false + } + + // Form inv(U)*inv(U)^T or inv(L)^T*inv(L). + impl.Dlauum(uplo, n, a, lda) + return true +} diff --git a/vendor/gonum.org/v1/gonum/lapack/gonum/dpotrs.go b/vendor/gonum.org/v1/gonum/lapack/gonum/dpotrs.go new file mode 100644 index 0000000..689e043 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/lapack/gonum/dpotrs.go @@ -0,0 +1,62 @@ +// Copyright ©2018 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package gonum + +import ( + "gonum.org/v1/gonum/blas" + "gonum.org/v1/gonum/blas/blas64" +) + +// Dpotrs solves a system of n linear equations A*X = B where A is an n×n +// symmetric positive definite matrix and B is an n×nrhs matrix. The matrix A is +// represented by its Cholesky factorization +// A = U^T*U if uplo == blas.Upper +// A = L*L^T if uplo == blas.Lower +// as computed by Dpotrf. On entry, B contains the right-hand side matrix B, on +// return it contains the solution matrix X. +func (Implementation) Dpotrs(uplo blas.Uplo, n, nrhs int, a []float64, lda int, b []float64, ldb int) { + switch { + case uplo != blas.Upper && uplo != blas.Lower: + panic(badUplo) + case n < 0: + panic(nLT0) + case nrhs < 0: + panic(nrhsLT0) + case lda < max(1, n): + panic(badLdA) + case ldb < max(1, nrhs): + panic(badLdB) + } + + // Quick return if possible. + if n == 0 || nrhs == 0 { + return + } + + switch { + case len(a) < (n-1)*lda+n: + panic(shortA) + case len(b) < (n-1)*ldb+nrhs: + panic(shortB) + } + + bi := blas64.Implementation() + + if uplo == blas.Upper { + // Solve U^T * U * X = B where U is stored in the upper triangle of A. + + // Solve U^T * X = B, overwriting B with X. + bi.Dtrsm(blas.Left, blas.Upper, blas.Trans, blas.NonUnit, n, nrhs, 1, a, lda, b, ldb) + // Solve U * X = B, overwriting B with X. + bi.Dtrsm(blas.Left, blas.Upper, blas.NoTrans, blas.NonUnit, n, nrhs, 1, a, lda, b, ldb) + } else { + // Solve L * L^T * X = B where L is stored in the lower triangle of A. + + // Solve L * X = B, overwriting B with X. + bi.Dtrsm(blas.Left, blas.Lower, blas.NoTrans, blas.NonUnit, n, nrhs, 1, a, lda, b, ldb) + // Solve L^T * X = B, overwriting B with X. + bi.Dtrsm(blas.Left, blas.Lower, blas.Trans, blas.NonUnit, n, nrhs, 1, a, lda, b, ldb) + } +} diff --git a/vendor/gonum.org/v1/gonum/lapack/gonum/drscl.go b/vendor/gonum.org/v1/gonum/lapack/gonum/drscl.go new file mode 100644 index 0000000..b2772db --- /dev/null +++ b/vendor/gonum.org/v1/gonum/lapack/gonum/drscl.go @@ -0,0 +1,63 @@ +// Copyright ©2015 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package gonum + +import ( + "math" + + "gonum.org/v1/gonum/blas/blas64" +) + +// Drscl multiplies the vector x by 1/a being careful to avoid overflow or +// underflow where possible. +// +// Drscl is an internal routine. It is exported for testing purposes. +func (impl Implementation) Drscl(n int, a float64, x []float64, incX int) { + switch { + case n < 0: + panic(nLT0) + case incX <= 0: + panic(badIncX) + } + + // Quick return if possible. + if n == 0 { + return + } + + if len(x) < 1+(n-1)*incX { + panic(shortX) + } + + bi := blas64.Implementation() + + cden := a + cnum := 1.0 + smlnum := dlamchS + bignum := 1 / smlnum + for { + cden1 := cden * smlnum + cnum1 := cnum / bignum + var mul float64 + var done bool + switch { + case cnum != 0 && math.Abs(cden1) > math.Abs(cnum): + mul = smlnum + done = false + cden = cden1 + case math.Abs(cnum1) > math.Abs(cden): + mul = bignum + done = false + cnum = cnum1 + default: + mul = cnum / cden + done = true + } + bi.Dscal(n, mul, x, incX) + if done { + break + } + } +} diff --git a/vendor/gonum.org/v1/gonum/lapack/gonum/dsteqr.go b/vendor/gonum.org/v1/gonum/lapack/gonum/dsteqr.go new file mode 100644 index 0000000..d6c7861 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/lapack/gonum/dsteqr.go @@ -0,0 +1,376 @@ +// Copyright ©2016 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package gonum + +import ( + "math" + + "gonum.org/v1/gonum/blas" + "gonum.org/v1/gonum/blas/blas64" + "gonum.org/v1/gonum/lapack" +) + +// Dsteqr computes the eigenvalues and optionally the eigenvectors of a symmetric +// tridiagonal matrix using the implicit QL or QR method. The eigenvectors of a +// full or band symmetric matrix can also be found if Dsytrd, Dsptrd, or Dsbtrd +// have been used to reduce this matrix to tridiagonal form. +// +// d, on entry, contains the diagonal elements of the tridiagonal matrix. On exit, +// d contains the eigenvalues in ascending order. d must have length n and +// Dsteqr will panic otherwise. +// +// e, on entry, contains the off-diagonal elements of the tridiagonal matrix on +// entry, and is overwritten during the call to Dsteqr. e must have length n-1 and +// Dsteqr will panic otherwise. +// +// z, on entry, contains the n×n orthogonal matrix used in the reduction to +// tridiagonal form if compz == lapack.EVOrig. On exit, if +// compz == lapack.EVOrig, z contains the orthonormal eigenvectors of the +// original symmetric matrix, and if compz == lapack.EVTridiag, z contains the +// orthonormal eigenvectors of the symmetric tridiagonal matrix. z is not used +// if compz == lapack.EVCompNone. +// +// work must have length at least max(1, 2*n-2) if the eigenvectors are computed, +// and Dsteqr will panic otherwise. +// +// Dsteqr is an internal routine. It is exported for testing purposes. +func (impl Implementation) Dsteqr(compz lapack.EVComp, n int, d, e, z []float64, ldz int, work []float64) (ok bool) { + switch { + case compz != lapack.EVCompNone && compz != lapack.EVTridiag && compz != lapack.EVOrig: + panic(badEVComp) + case n < 0: + panic(nLT0) + case ldz < 1, compz != lapack.EVCompNone && ldz < n: + panic(badLdZ) + } + + // Quick return if possible. + if n == 0 { + return true + } + + switch { + case len(d) < n: + panic(shortD) + case len(e) < n-1: + panic(shortE) + case compz != lapack.EVCompNone && len(z) < (n-1)*ldz+n: + panic(shortZ) + case compz != lapack.EVCompNone && len(work) < max(1, 2*n-2): + panic(shortWork) + } + + var icompz int + if compz == lapack.EVOrig { + icompz = 1 + } else if compz == lapack.EVTridiag { + icompz = 2 + } + + if n == 1 { + if icompz == 2 { + z[0] = 1 + } + return true + } + + bi := blas64.Implementation() + + eps := dlamchE + eps2 := eps * eps + safmin := dlamchS + safmax := 1 / safmin + ssfmax := math.Sqrt(safmax) / 3 + ssfmin := math.Sqrt(safmin) / eps2 + + // Compute the eigenvalues and eigenvectors of the tridiagonal matrix. + if icompz == 2 { + impl.Dlaset(blas.All, n, n, 0, 1, z, ldz) + } + const maxit = 30 + nmaxit := n * maxit + + jtot := 0 + + // Determine where the matrix splits and choose QL or QR iteration for each + // block, according to whether top or bottom diagonal element is smaller. + l1 := 0 + nm1 := n - 1 + + type scaletype int + const ( + down scaletype = iota + 1 + up + ) + var iscale scaletype + + for { + if l1 > n-1 { + // Order eigenvalues and eigenvectors. + if icompz == 0 { + impl.Dlasrt(lapack.SortIncreasing, n, d) + } else { + // TODO(btracey): Consider replacing this sort with a call to sort.Sort. + for ii := 1; ii < n; ii++ { + i := ii - 1 + k := i + p := d[i] + for j := ii; j < n; j++ { + if d[j] < p { + k = j + p = d[j] + } + } + if k != i { + d[k] = d[i] + d[i] = p + bi.Dswap(n, z[i:], ldz, z[k:], ldz) + } + } + } + return true + } + if l1 > 0 { + e[l1-1] = 0 + } + var m int + if l1 <= nm1 { + for m = l1; m < nm1; m++ { + test := math.Abs(e[m]) + if test == 0 { + break + } + if test <= (math.Sqrt(math.Abs(d[m]))*math.Sqrt(math.Abs(d[m+1])))*eps { + e[m] = 0 + break + } + } + } + l := l1 + lsv := l + lend := m + lendsv := lend + l1 = m + 1 + if lend == l { + continue + } + + // Scale submatrix in rows and columns L to Lend + anorm := impl.Dlanst(lapack.MaxAbs, lend-l+1, d[l:], e[l:]) + switch { + case anorm == 0: + continue + case anorm > ssfmax: + iscale = down + // Pretend that d and e are matrices with 1 column. + impl.Dlascl(lapack.General, 0, 0, anorm, ssfmax, lend-l+1, 1, d[l:], 1) + impl.Dlascl(lapack.General, 0, 0, anorm, ssfmax, lend-l, 1, e[l:], 1) + case anorm < ssfmin: + iscale = up + impl.Dlascl(lapack.General, 0, 0, anorm, ssfmin, lend-l+1, 1, d[l:], 1) + impl.Dlascl(lapack.General, 0, 0, anorm, ssfmin, lend-l, 1, e[l:], 1) + } + + // Choose between QL and QR. + if math.Abs(d[lend]) < math.Abs(d[l]) { + lend = lsv + l = lendsv + } + if lend > l { + // QL Iteration. Look for small subdiagonal element. + for { + if l != lend { + for m = l; m < lend; m++ { + v := math.Abs(e[m]) + if v*v <= (eps2*math.Abs(d[m]))*math.Abs(d[m+1])+safmin { + break + } + } + } else { + m = lend + } + if m < lend { + e[m] = 0 + } + p := d[l] + if m == l { + // Eigenvalue found. + l++ + if l > lend { + break + } + continue + } + + // If remaining matrix is 2×2, use Dlae2 to compute its eigensystem. + if m == l+1 { + if icompz > 0 { + d[l], d[l+1], work[l], work[n-1+l] = impl.Dlaev2(d[l], e[l], d[l+1]) + impl.Dlasr(blas.Right, lapack.Variable, lapack.Backward, + n, 2, work[l:], work[n-1+l:], z[l:], ldz) + } else { + d[l], d[l+1] = impl.Dlae2(d[l], e[l], d[l+1]) + } + e[l] = 0 + l += 2 + if l > lend { + break + } + continue + } + + if jtot == nmaxit { + break + } + jtot++ + + // Form shift + g := (d[l+1] - p) / (2 * e[l]) + r := impl.Dlapy2(g, 1) + g = d[m] - p + e[l]/(g+math.Copysign(r, g)) + s := 1.0 + c := 1.0 + p = 0.0 + + // Inner loop + for i := m - 1; i >= l; i-- { + f := s * e[i] + b := c * e[i] + c, s, r = impl.Dlartg(g, f) + if i != m-1 { + e[i+1] = r + } + g = d[i+1] - p + r = (d[i]-g)*s + 2*c*b + p = s * r + d[i+1] = g + p + g = c*r - b + + // If eigenvectors are desired, then save rotations. + if icompz > 0 { + work[i] = c + work[n-1+i] = -s + } + } + // If eigenvectors are desired, then apply saved rotations. + if icompz > 0 { + mm := m - l + 1 + impl.Dlasr(blas.Right, lapack.Variable, lapack.Backward, + n, mm, work[l:], work[n-1+l:], z[l:], ldz) + } + d[l] -= p + e[l] = g + } + } else { + // QR Iteration. + // Look for small superdiagonal element. + for { + if l != lend { + for m = l; m > lend; m-- { + v := math.Abs(e[m-1]) + if v*v <= (eps2*math.Abs(d[m])*math.Abs(d[m-1]) + safmin) { + break + } + } + } else { + m = lend + } + if m > lend { + e[m-1] = 0 + } + p := d[l] + if m == l { + // Eigenvalue found + l-- + if l < lend { + break + } + continue + } + + // If remaining matrix is 2×2, use Dlae2 to compute its eigenvalues. + if m == l-1 { + if icompz > 0 { + d[l-1], d[l], work[m], work[n-1+m] = impl.Dlaev2(d[l-1], e[l-1], d[l]) + impl.Dlasr(blas.Right, lapack.Variable, lapack.Forward, + n, 2, work[m:], work[n-1+m:], z[l-1:], ldz) + } else { + d[l-1], d[l] = impl.Dlae2(d[l-1], e[l-1], d[l]) + } + e[l-1] = 0 + l -= 2 + if l < lend { + break + } + continue + } + if jtot == nmaxit { + break + } + jtot++ + + // Form shift. + g := (d[l-1] - p) / (2 * e[l-1]) + r := impl.Dlapy2(g, 1) + g = d[m] - p + (e[l-1])/(g+math.Copysign(r, g)) + s := 1.0 + c := 1.0 + p = 0.0 + + // Inner loop. + for i := m; i < l; i++ { + f := s * e[i] + b := c * e[i] + c, s, r = impl.Dlartg(g, f) + if i != m { + e[i-1] = r + } + g = d[i] - p + r = (d[i+1]-g)*s + 2*c*b + p = s * r + d[i] = g + p + g = c*r - b + + // If eigenvectors are desired, then save rotations. + if icompz > 0 { + work[i] = c + work[n-1+i] = s + } + } + + // If eigenvectors are desired, then apply saved rotations. + if icompz > 0 { + mm := l - m + 1 + impl.Dlasr(blas.Right, lapack.Variable, lapack.Forward, + n, mm, work[m:], work[n-1+m:], z[m:], ldz) + } + d[l] -= p + e[l-1] = g + } + } + + // Undo scaling if necessary. + switch iscale { + case down: + // Pretend that d and e are matrices with 1 column. + impl.Dlascl(lapack.General, 0, 0, ssfmax, anorm, lendsv-lsv+1, 1, d[lsv:], 1) + impl.Dlascl(lapack.General, 0, 0, ssfmax, anorm, lendsv-lsv, 1, e[lsv:], 1) + case up: + impl.Dlascl(lapack.General, 0, 0, ssfmin, anorm, lendsv-lsv+1, 1, d[lsv:], 1) + impl.Dlascl(lapack.General, 0, 0, ssfmin, anorm, lendsv-lsv, 1, e[lsv:], 1) + } + + // Check for no convergence to an eigenvalue after a total of n*maxit iterations. + if jtot >= nmaxit { + break + } + } + for i := 0; i < n-1; i++ { + if e[i] != 0 { + return false + } + } + return true +} diff --git a/vendor/gonum.org/v1/gonum/lapack/gonum/dsterf.go b/vendor/gonum.org/v1/gonum/lapack/gonum/dsterf.go new file mode 100644 index 0000000..dc1e178 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/lapack/gonum/dsterf.go @@ -0,0 +1,285 @@ +// Copyright ©2016 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package gonum + +import ( + "math" + + "gonum.org/v1/gonum/lapack" +) + +// Dsterf computes all eigenvalues of a symmetric tridiagonal matrix using the +// Pal-Walker-Kahan variant of the QL or QR algorithm. +// +// d contains the diagonal elements of the tridiagonal matrix on entry, and +// contains the eigenvalues in ascending order on exit. d must have length at +// least n, or Dsterf will panic. +// +// e contains the off-diagonal elements of the tridiagonal matrix on entry, and is +// overwritten during the call to Dsterf. e must have length of at least n-1 or +// Dsterf will panic. +// +// Dsterf is an internal routine. It is exported for testing purposes. +func (impl Implementation) Dsterf(n int, d, e []float64) (ok bool) { + if n < 0 { + panic(nLT0) + } + + // Quick return if possible. + if n == 0 { + return true + } + + switch { + case len(d) < n: + panic(shortD) + case len(e) < n-1: + panic(shortE) + } + + if n == 1 { + return true + } + + const ( + none = 0 // The values are not scaled. + down = 1 // The values are scaled below ssfmax threshold. + up = 2 // The values are scaled below ssfmin threshold. + ) + + // Determine the unit roundoff for this environment. + eps := dlamchE + eps2 := eps * eps + safmin := dlamchS + safmax := 1 / safmin + ssfmax := math.Sqrt(safmax) / 3 + ssfmin := math.Sqrt(safmin) / eps2 + + // Compute the eigenvalues of the tridiagonal matrix. + maxit := 30 + nmaxit := n * maxit + jtot := 0 + + l1 := 0 + + for { + if l1 > n-1 { + impl.Dlasrt(lapack.SortIncreasing, n, d) + return true + } + if l1 > 0 { + e[l1-1] = 0 + } + var m int + for m = l1; m < n-1; m++ { + if math.Abs(e[m]) <= math.Sqrt(math.Abs(d[m]))*math.Sqrt(math.Abs(d[m+1]))*eps { + e[m] = 0 + break + } + } + + l := l1 + lsv := l + lend := m + lendsv := lend + l1 = m + 1 + if lend == 0 { + continue + } + + // Scale submatrix in rows and columns l to lend. + anorm := impl.Dlanst(lapack.MaxAbs, lend-l+1, d[l:], e[l:]) + iscale := none + if anorm == 0 { + continue + } + if anorm > ssfmax { + iscale = down + impl.Dlascl(lapack.General, 0, 0, anorm, ssfmax, lend-l+1, 1, d[l:], n) + impl.Dlascl(lapack.General, 0, 0, anorm, ssfmax, lend-l, 1, e[l:], n) + } else if anorm < ssfmin { + iscale = up + impl.Dlascl(lapack.General, 0, 0, anorm, ssfmin, lend-l+1, 1, d[l:], n) + impl.Dlascl(lapack.General, 0, 0, anorm, ssfmin, lend-l, 1, e[l:], n) + } + + el := e[l:lend] + for i, v := range el { + el[i] *= v + } + + // Choose between QL and QR iteration. + if math.Abs(d[lend]) < math.Abs(d[l]) { + lend = lsv + l = lendsv + } + if lend >= l { + // QL Iteration. + // Look for small sub-diagonal element. + for { + if l != lend { + for m = l; m < lend; m++ { + if math.Abs(e[m]) <= eps2*(math.Abs(d[m]*d[m+1])) { + break + } + } + } else { + m = lend + } + if m < lend { + e[m] = 0 + } + p := d[l] + if m == l { + // Eigenvalue found. + l++ + if l > lend { + break + } + continue + } + // If remaining matrix is 2 by 2, use Dlae2 to compute its eigenvalues. + if m == l+1 { + d[l], d[l+1] = impl.Dlae2(d[l], math.Sqrt(e[l]), d[l+1]) + e[l] = 0 + l += 2 + if l > lend { + break + } + continue + } + if jtot == nmaxit { + break + } + jtot++ + + // Form shift. + rte := math.Sqrt(e[l]) + sigma := (d[l+1] - p) / (2 * rte) + r := impl.Dlapy2(sigma, 1) + sigma = p - (rte / (sigma + math.Copysign(r, sigma))) + + c := 1.0 + s := 0.0 + gamma := d[m] - sigma + p = gamma * gamma + + // Inner loop. + for i := m - 1; i >= l; i-- { + bb := e[i] + r := p + bb + if i != m-1 { + e[i+1] = s * r + } + oldc := c + c = p / r + s = bb / r + oldgam := gamma + alpha := d[i] + gamma = c*(alpha-sigma) - s*oldgam + d[i+1] = oldgam + (alpha - gamma) + if c != 0 { + p = (gamma * gamma) / c + } else { + p = oldc * bb + } + } + e[l] = s * p + d[l] = sigma + gamma + } + } else { + for { + // QR Iteration. + // Look for small super-diagonal element. + for m = l; m > lend; m-- { + if math.Abs(e[m-1]) <= eps2*math.Abs(d[m]*d[m-1]) { + break + } + } + if m > lend { + e[m-1] = 0 + } + p := d[l] + if m == l { + // Eigenvalue found. + l-- + if l < lend { + break + } + continue + } + + // If remaining matrix is 2 by 2, use Dlae2 to compute its eigenvalues. + if m == l-1 { + d[l], d[l-1] = impl.Dlae2(d[l], math.Sqrt(e[l-1]), d[l-1]) + e[l-1] = 0 + l -= 2 + if l < lend { + break + } + continue + } + if jtot == nmaxit { + break + } + jtot++ + + // Form shift. + rte := math.Sqrt(e[l-1]) + sigma := (d[l-1] - p) / (2 * rte) + r := impl.Dlapy2(sigma, 1) + sigma = p - (rte / (sigma + math.Copysign(r, sigma))) + + c := 1.0 + s := 0.0 + gamma := d[m] - sigma + p = gamma * gamma + + // Inner loop. + for i := m; i < l; i++ { + bb := e[i] + r := p + bb + if i != m { + e[i-1] = s * r + } + oldc := c + c = p / r + s = bb / r + oldgam := gamma + alpha := d[i+1] + gamma = c*(alpha-sigma) - s*oldgam + d[i] = oldgam + alpha - gamma + if c != 0 { + p = (gamma * gamma) / c + } else { + p = oldc * bb + } + } + e[l-1] = s * p + d[l] = sigma + gamma + } + } + + // Undo scaling if necessary + switch iscale { + case down: + impl.Dlascl(lapack.General, 0, 0, ssfmax, anorm, lendsv-lsv+1, 1, d[lsv:], n) + case up: + impl.Dlascl(lapack.General, 0, 0, ssfmin, anorm, lendsv-lsv+1, 1, d[lsv:], n) + } + + // Check for no convergence to an eigenvalue after a total of n*maxit iterations. + if jtot >= nmaxit { + break + } + } + for _, v := range e[:n-1] { + if v != 0 { + return false + } + } + impl.Dlasrt(lapack.SortIncreasing, n, d) + return true +} diff --git a/vendor/gonum.org/v1/gonum/lapack/gonum/dsyev.go b/vendor/gonum.org/v1/gonum/lapack/gonum/dsyev.go new file mode 100644 index 0000000..5f57f3a --- /dev/null +++ b/vendor/gonum.org/v1/gonum/lapack/gonum/dsyev.go @@ -0,0 +1,130 @@ +// Copyright ©2016 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package gonum + +import ( + "math" + + "gonum.org/v1/gonum/blas" + "gonum.org/v1/gonum/blas/blas64" + "gonum.org/v1/gonum/lapack" +) + +// Dsyev computes all eigenvalues and, optionally, the eigenvectors of a real +// symmetric matrix A. +// +// w contains the eigenvalues in ascending order upon return. w must have length +// at least n, and Dsyev will panic otherwise. +// +// On entry, a contains the elements of the symmetric matrix A in the triangular +// portion specified by uplo. If jobz == lapack.EVCompute, a contains the +// orthonormal eigenvectors of A on exit, otherwise jobz must be lapack.EVNone +// and on exit the specified triangular region is overwritten. +// +// work is temporary storage, and lwork specifies the usable memory length. At minimum, +// lwork >= 3*n-1, and Dsyev will panic otherwise. The amount of blocking is +// limited by the usable length. If lwork == -1, instead of computing Dsyev the +// optimal work length is stored into work[0]. +func (impl Implementation) Dsyev(jobz lapack.EVJob, uplo blas.Uplo, n int, a []float64, lda int, w, work []float64, lwork int) (ok bool) { + switch { + case jobz != lapack.EVNone && jobz != lapack.EVCompute: + panic(badEVJob) + case uplo != blas.Upper && uplo != blas.Lower: + panic(badUplo) + case n < 0: + panic(nLT0) + case lda < max(1, n): + panic(badLdA) + case lwork < max(1, 3*n-1) && lwork != -1: + panic(badLWork) + case len(work) < max(1, lwork): + panic(shortWork) + } + + // Quick return if possible. + if n == 0 { + return true + } + + var opts string + if uplo == blas.Upper { + opts = "U" + } else { + opts = "L" + } + nb := impl.Ilaenv(1, "DSYTRD", opts, n, -1, -1, -1) + lworkopt := max(1, (nb+2)*n) + if lwork == -1 { + work[0] = float64(lworkopt) + return + } + + switch { + case len(a) < (n-1)*lda+n: + panic(shortA) + case len(w) < n: + panic(shortW) + } + + if n == 1 { + w[0] = a[0] + work[0] = 2 + if jobz == lapack.EVCompute { + a[0] = 1 + } + return true + } + + safmin := dlamchS + eps := dlamchP + smlnum := safmin / eps + bignum := 1 / smlnum + rmin := math.Sqrt(smlnum) + rmax := math.Sqrt(bignum) + + // Scale matrix to allowable range, if necessary. + anrm := impl.Dlansy(lapack.MaxAbs, uplo, n, a, lda, work) + scaled := false + var sigma float64 + if anrm > 0 && anrm < rmin { + scaled = true + sigma = rmin / anrm + } else if anrm > rmax { + scaled = true + sigma = rmax / anrm + } + if scaled { + kind := lapack.LowerTri + if uplo == blas.Upper { + kind = lapack.UpperTri + } + impl.Dlascl(kind, 0, 0, 1, sigma, n, n, a, lda) + } + var inde int + indtau := inde + n + indwork := indtau + n + llwork := lwork - indwork + impl.Dsytrd(uplo, n, a, lda, w, work[inde:], work[indtau:], work[indwork:], llwork) + + // For eigenvalues only, call Dsterf. For eigenvectors, first call Dorgtr + // to generate the orthogonal matrix, then call Dsteqr. + if jobz == lapack.EVNone { + ok = impl.Dsterf(n, w, work[inde:]) + } else { + impl.Dorgtr(uplo, n, a, lda, work[indtau:], work[indwork:], llwork) + ok = impl.Dsteqr(lapack.EVComp(jobz), n, w, work[inde:], a, lda, work[indtau:]) + } + if !ok { + return false + } + + // If the matrix was scaled, then rescale eigenvalues appropriately. + if scaled { + bi := blas64.Implementation() + bi.Dscal(n, 1/sigma, w, 1) + } + work[0] = float64(lworkopt) + return true +} diff --git a/vendor/gonum.org/v1/gonum/lapack/gonum/dsytd2.go b/vendor/gonum.org/v1/gonum/lapack/gonum/dsytd2.go new file mode 100644 index 0000000..23cfd05 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/lapack/gonum/dsytd2.go @@ -0,0 +1,136 @@ +// Copyright ©2016 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package gonum + +import ( + "gonum.org/v1/gonum/blas" + "gonum.org/v1/gonum/blas/blas64" +) + +// Dsytd2 reduces a symmetric n×n matrix A to symmetric tridiagonal form T by +// an orthogonal similarity transformation +// Q^T * A * Q = T +// On entry, the matrix is contained in the specified triangle of a. On exit, +// if uplo == blas.Upper, the diagonal and first super-diagonal of a are +// overwritten with the elements of T. The elements above the first super-diagonal +// are overwritten with the elementary reflectors that are used with +// the elements written to tau in order to construct Q. If uplo == blas.Lower, +// the elements are written in the lower triangular region. +// +// d must have length at least n. e and tau must have length at least n-1. Dsytd2 +// will panic if these sizes are not met. +// +// Q is represented as a product of elementary reflectors. +// If uplo == blas.Upper +// Q = H_{n-2} * ... * H_1 * H_0 +// and if uplo == blas.Lower +// Q = H_0 * H_1 * ... * H_{n-2} +// where +// H_i = I - tau * v * v^T +// where tau is stored in tau[i], and v is stored in a. +// +// If uplo == blas.Upper, v[0:i-1] is stored in A[0:i-1,i+1], v[i] = 1, and +// v[i+1:] = 0. The elements of a are +// [ d e v2 v3 v4] +// [ d e v3 v4] +// [ d e v4] +// [ d e] +// [ d] +// If uplo == blas.Lower, v[0:i+1] = 0, v[i+1] = 1, and v[i+2:] is stored in +// A[i+2:n,i]. +// The elements of a are +// [ d ] +// [ e d ] +// [v1 e d ] +// [v1 v2 e d ] +// [v1 v2 v3 e d] +// +// Dsytd2 is an internal routine. It is exported for testing purposes. +func (impl Implementation) Dsytd2(uplo blas.Uplo, n int, a []float64, lda int, d, e, tau []float64) { + switch { + case uplo != blas.Upper && uplo != blas.Lower: + panic(badUplo) + case n < 0: + panic(nLT0) + case lda < max(1, n): + panic(badLdA) + } + + // Quick return if possible. + if n == 0 { + return + } + + switch { + case len(a) < (n-1)*lda+n: + panic(shortA) + case len(d) < n: + panic(shortD) + case len(e) < n-1: + panic(shortE) + case len(tau) < n-1: + panic(shortTau) + } + + bi := blas64.Implementation() + + if uplo == blas.Upper { + // Reduce the upper triangle of A. + for i := n - 2; i >= 0; i-- { + // Generate elementary reflector H_i = I - tau * v * v^T to + // annihilate A[i:i-1, i+1]. + var taui float64 + a[i*lda+i+1], taui = impl.Dlarfg(i+1, a[i*lda+i+1], a[i+1:], lda) + e[i] = a[i*lda+i+1] + if taui != 0 { + // Apply H_i from both sides to A[0:i,0:i]. + a[i*lda+i+1] = 1 + + // Compute x := tau * A * v storing x in tau[0:i]. + bi.Dsymv(uplo, i+1, taui, a, lda, a[i+1:], lda, 0, tau, 1) + + // Compute w := x - 1/2 * tau * (x^T * v) * v. + alpha := -0.5 * taui * bi.Ddot(i+1, tau, 1, a[i+1:], lda) + bi.Daxpy(i+1, alpha, a[i+1:], lda, tau, 1) + + // Apply the transformation as a rank-2 update + // A = A - v * w^T - w * v^T. + bi.Dsyr2(uplo, i+1, -1, a[i+1:], lda, tau, 1, a, lda) + a[i*lda+i+1] = e[i] + } + d[i+1] = a[(i+1)*lda+i+1] + tau[i] = taui + } + d[0] = a[0] + return + } + // Reduce the lower triangle of A. + for i := 0; i < n-1; i++ { + // Generate elementary reflector H_i = I - tau * v * v^T to + // annihilate A[i+2:n, i]. + var taui float64 + a[(i+1)*lda+i], taui = impl.Dlarfg(n-i-1, a[(i+1)*lda+i], a[min(i+2, n-1)*lda+i:], lda) + e[i] = a[(i+1)*lda+i] + if taui != 0 { + // Apply H_i from both sides to A[i+1:n, i+1:n]. + a[(i+1)*lda+i] = 1 + + // Compute x := tau * A * v, storing y in tau[i:n-1]. + bi.Dsymv(uplo, n-i-1, taui, a[(i+1)*lda+i+1:], lda, a[(i+1)*lda+i:], lda, 0, tau[i:], 1) + + // Compute w := x - 1/2 * tau * (x^T * v) * v. + alpha := -0.5 * taui * bi.Ddot(n-i-1, tau[i:], 1, a[(i+1)*lda+i:], lda) + bi.Daxpy(n-i-1, alpha, a[(i+1)*lda+i:], lda, tau[i:], 1) + + // Apply the transformation as a rank-2 update + // A = A - v * w^T - w * v^T. + bi.Dsyr2(uplo, n-i-1, -1, a[(i+1)*lda+i:], lda, tau[i:], 1, a[(i+1)*lda+i+1:], lda) + a[(i+1)*lda+i] = e[i] + } + d[i] = a[i*lda+i] + tau[i] = taui + } + d[n-1] = a[(n-1)*lda+n-1] +} diff --git a/vendor/gonum.org/v1/gonum/lapack/gonum/dsytrd.go b/vendor/gonum.org/v1/gonum/lapack/gonum/dsytrd.go new file mode 100644 index 0000000..df47568 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/lapack/gonum/dsytrd.go @@ -0,0 +1,172 @@ +// Copyright ©2016 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package gonum + +import ( + "gonum.org/v1/gonum/blas" + "gonum.org/v1/gonum/blas/blas64" +) + +// Dsytrd reduces a symmetric n×n matrix A to symmetric tridiagonal form by an +// orthogonal similarity transformation +// Q^T * A * Q = T +// where Q is an orthonormal matrix and T is symmetric and tridiagonal. +// +// On entry, a contains the elements of the input matrix in the triangle specified +// by uplo. On exit, the diagonal and sub/super-diagonal are overwritten by the +// corresponding elements of the tridiagonal matrix T. The remaining elements in +// the triangle, along with the array tau, contain the data to construct Q as +// the product of elementary reflectors. +// +// If uplo == blas.Upper, Q is constructed with +// Q = H_{n-2} * ... * H_1 * H_0 +// where +// H_i = I - tau_i * v * v^T +// v is constructed as v[i+1:n] = 0, v[i] = 1, v[0:i-1] is stored in A[0:i-1, i+1]. +// The elements of A are +// [ d e v1 v2 v3] +// [ d e v2 v3] +// [ d e v3] +// [ d e] +// [ e] +// +// If uplo == blas.Lower, Q is constructed with +// Q = H_0 * H_1 * ... * H_{n-2} +// where +// H_i = I - tau_i * v * v^T +// v is constructed as v[0:i+1] = 0, v[i+1] = 1, v[i+2:n] is stored in A[i+2:n, i]. +// The elements of A are +// [ d ] +// [ e d ] +// [v0 e d ] +// [v0 v1 e d ] +// [v0 v1 v2 e d] +// +// d must have length n, and e and tau must have length n-1. Dsytrd will panic if +// these conditions are not met. +// +// work is temporary storage, and lwork specifies the usable memory length. At minimum, +// lwork >= 1, and Dsytrd will panic otherwise. The amount of blocking is +// limited by the usable length. +// If lwork == -1, instead of computing Dsytrd the optimal work length is stored +// into work[0]. +// +// Dsytrd is an internal routine. It is exported for testing purposes. +func (impl Implementation) Dsytrd(uplo blas.Uplo, n int, a []float64, lda int, d, e, tau, work []float64, lwork int) { + switch { + case uplo != blas.Upper && uplo != blas.Lower: + panic(badUplo) + case n < 0: + panic(nLT0) + case lda < max(1, n): + panic(badLdA) + case lwork < 1 && lwork != -1: + panic(badLWork) + case len(work) < max(1, lwork): + panic(shortWork) + } + + // Quick return if possible. + if n == 0 { + work[0] = 1 + return + } + + nb := impl.Ilaenv(1, "DSYTRD", string(uplo), n, -1, -1, -1) + lworkopt := n * nb + if lwork == -1 { + work[0] = float64(lworkopt) + return + } + + switch { + case len(a) < (n-1)*lda+n: + panic(shortA) + case len(d) < n: + panic(shortD) + case len(e) < n-1: + panic(shortE) + case len(tau) < n-1: + panic(shortTau) + } + + bi := blas64.Implementation() + + nx := n + iws := 1 + var ldwork int + if 1 < nb && nb < n { + // Determine when to cross over from blocked to unblocked code. The last + // block is always handled by unblocked code. + nx = max(nb, impl.Ilaenv(3, "DSYTRD", string(uplo), n, -1, -1, -1)) + if nx < n { + // Determine if workspace is large enough for blocked code. + ldwork = nb + iws = n * ldwork + if lwork < iws { + // Not enough workspace to use optimal nb: determine the minimum + // value of nb and reduce nb or force use of unblocked code by + // setting nx = n. + nb = max(lwork/n, 1) + nbmin := impl.Ilaenv(2, "DSYTRD", string(uplo), n, -1, -1, -1) + if nb < nbmin { + nx = n + } + } + } else { + nx = n + } + } else { + nb = 1 + } + ldwork = nb + + if uplo == blas.Upper { + // Reduce the upper triangle of A. Columns 0:kk are handled by the + // unblocked method. + var i int + kk := n - ((n-nx+nb-1)/nb)*nb + for i = n - nb; i >= kk; i -= nb { + // Reduce columns i:i+nb to tridiagonal form and form the matrix W + // which is needed to update the unreduced part of the matrix. + impl.Dlatrd(uplo, i+nb, nb, a, lda, e, tau, work, ldwork) + + // Update the unreduced submatrix A[0:i-1,0:i-1], using an update + // of the form A = A - V*W^T - W*V^T. + bi.Dsyr2k(uplo, blas.NoTrans, i, nb, -1, a[i:], lda, work, ldwork, 1, a, lda) + + // Copy superdiagonal elements back into A, and diagonal elements into D. + for j := i; j < i+nb; j++ { + a[(j-1)*lda+j] = e[j-1] + d[j] = a[j*lda+j] + } + } + // Use unblocked code to reduce the last or only block + // check that i == kk. + impl.Dsytd2(uplo, kk, a, lda, d, e, tau) + } else { + var i int + // Reduce the lower triangle of A. + for i = 0; i < n-nx; i += nb { + // Reduce columns 0:i+nb to tridiagonal form and form the matrix W + // which is needed to update the unreduced part of the matrix. + impl.Dlatrd(uplo, n-i, nb, a[i*lda+i:], lda, e[i:], tau[i:], work, ldwork) + + // Update the unreduced submatrix A[i+ib:n, i+ib:n], using an update + // of the form A = A + V*W^T - W*V^T. + bi.Dsyr2k(uplo, blas.NoTrans, n-i-nb, nb, -1, a[(i+nb)*lda+i:], lda, + work[nb*ldwork:], ldwork, 1, a[(i+nb)*lda+i+nb:], lda) + + // Copy subdiagonal elements back into A, and diagonal elements into D. + for j := i; j < i+nb; j++ { + a[(j+1)*lda+j] = e[j] + d[j] = a[j*lda+j] + } + } + // Use unblocked code to reduce the last or only block. + impl.Dsytd2(uplo, n-i, a[i*lda+i:], lda, d[i:], e[i:], tau[i:]) + } + work[0] = float64(iws) +} diff --git a/vendor/gonum.org/v1/gonum/lapack/gonum/dtgsja.go b/vendor/gonum.org/v1/gonum/lapack/gonum/dtgsja.go new file mode 100644 index 0000000..d70d196 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/lapack/gonum/dtgsja.go @@ -0,0 +1,373 @@ +// Copyright ©2017 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package gonum + +import ( + "math" + + "gonum.org/v1/gonum/blas" + "gonum.org/v1/gonum/blas/blas64" + "gonum.org/v1/gonum/lapack" +) + +// Dtgsja computes the generalized singular value decomposition (GSVD) +// of two real upper triangular or trapezoidal matrices A and B. +// +// A and B have the following forms, which may be obtained by the +// preprocessing subroutine Dggsvp from a general m×n matrix A and p×n +// matrix B: +// +// n-k-l k l +// A = k [ 0 A12 A13 ] if m-k-l >= 0; +// l [ 0 0 A23 ] +// m-k-l [ 0 0 0 ] +// +// n-k-l k l +// A = k [ 0 A12 A13 ] if m-k-l < 0; +// m-k [ 0 0 A23 ] +// +// n-k-l k l +// B = l [ 0 0 B13 ] +// p-l [ 0 0 0 ] +// +// where the k×k matrix A12 and l×l matrix B13 are non-singular +// upper triangular. A23 is l×l upper triangular if m-k-l >= 0, +// otherwise A23 is (m-k)×l upper trapezoidal. +// +// On exit, +// +// U^T*A*Q = D1*[ 0 R ], V^T*B*Q = D2*[ 0 R ], +// +// where U, V and Q are orthogonal matrices. +// R is a non-singular upper triangular matrix, and D1 and D2 are +// diagonal matrices, which are of the following structures: +// +// If m-k-l >= 0, +// +// k l +// D1 = k [ I 0 ] +// l [ 0 C ] +// m-k-l [ 0 0 ] +// +// k l +// D2 = l [ 0 S ] +// p-l [ 0 0 ] +// +// n-k-l k l +// [ 0 R ] = k [ 0 R11 R12 ] k +// l [ 0 0 R22 ] l +// +// where +// +// C = diag( alpha_k, ... , alpha_{k+l} ), +// S = diag( beta_k, ... , beta_{k+l} ), +// C^2 + S^2 = I. +// +// R is stored in +// A[0:k+l, n-k-l:n] +// on exit. +// +// If m-k-l < 0, +// +// k m-k k+l-m +// D1 = k [ I 0 0 ] +// m-k [ 0 C 0 ] +// +// k m-k k+l-m +// D2 = m-k [ 0 S 0 ] +// k+l-m [ 0 0 I ] +// p-l [ 0 0 0 ] +// +// n-k-l k m-k k+l-m +// [ 0 R ] = k [ 0 R11 R12 R13 ] +// m-k [ 0 0 R22 R23 ] +// k+l-m [ 0 0 0 R33 ] +// +// where +// C = diag( alpha_k, ... , alpha_m ), +// S = diag( beta_k, ... , beta_m ), +// C^2 + S^2 = I. +// +// R = [ R11 R12 R13 ] is stored in A[0:m, n-k-l:n] +// [ 0 R22 R23 ] +// and R33 is stored in +// B[m-k:l, n+m-k-l:n] on exit. +// +// The computation of the orthogonal transformation matrices U, V or Q +// is optional. These matrices may either be formed explicitly, or they +// may be post-multiplied into input matrices U1, V1, or Q1. +// +// Dtgsja essentially uses a variant of Kogbetliantz algorithm to reduce +// min(l,m-k)×l triangular or trapezoidal matrix A23 and l×l +// matrix B13 to the form: +// +// U1^T*A13*Q1 = C1*R1; V1^T*B13*Q1 = S1*R1, +// +// where U1, V1 and Q1 are orthogonal matrices. C1 and S1 are diagonal +// matrices satisfying +// +// C1^2 + S1^2 = I, +// +// and R1 is an l×l non-singular upper triangular matrix. +// +// jobU, jobV and jobQ are options for computing the orthogonal matrices. The behavior +// is as follows +// jobU == lapack.GSVDU Compute orthogonal matrix U +// jobU == lapack.GSVDUnit Use unit-initialized matrix +// jobU == lapack.GSVDNone Do not compute orthogonal matrix. +// The behavior is the same for jobV and jobQ with the exception that instead of +// lapack.GSVDU these accept lapack.GSVDV and lapack.GSVDQ respectively. +// The matrices U, V and Q must be m×m, p×p and n×n respectively unless the +// relevant job parameter is lapack.GSVDNone. +// +// k and l specify the sub-blocks in the input matrices A and B: +// A23 = A[k:min(k+l,m), n-l:n) and B13 = B[0:l, n-l:n] +// of A and B, whose GSVD is going to be computed by Dtgsja. +// +// tola and tolb are the convergence criteria for the Jacobi-Kogbetliantz +// iteration procedure. Generally, they are the same as used in the preprocessing +// step, for example, +// tola = max(m, n)*norm(A)*eps, +// tolb = max(p, n)*norm(B)*eps, +// where eps is the machine epsilon. +// +// work must have length at least 2*n, otherwise Dtgsja will panic. +// +// alpha and beta must have length n or Dtgsja will panic. On exit, alpha and +// beta contain the generalized singular value pairs of A and B +// alpha[0:k] = 1, +// beta[0:k] = 0, +// if m-k-l >= 0, +// alpha[k:k+l] = diag(C), +// beta[k:k+l] = diag(S), +// if m-k-l < 0, +// alpha[k:m]= C, alpha[m:k+l]= 0 +// beta[k:m] = S, beta[m:k+l] = 1. +// if k+l < n, +// alpha[k+l:n] = 0 and +// beta[k+l:n] = 0. +// +// On exit, A[n-k:n, 0:min(k+l,m)] contains the triangular matrix R or part of R +// and if necessary, B[m-k:l, n+m-k-l:n] contains a part of R. +// +// Dtgsja returns whether the routine converged and the number of iteration cycles +// that were run. +// +// Dtgsja is an internal routine. It is exported for testing purposes. +func (impl Implementation) Dtgsja(jobU, jobV, jobQ lapack.GSVDJob, m, p, n, k, l int, a []float64, lda int, b []float64, ldb int, tola, tolb float64, alpha, beta, u []float64, ldu int, v []float64, ldv int, q []float64, ldq int, work []float64) (cycles int, ok bool) { + const maxit = 40 + + initu := jobU == lapack.GSVDUnit + wantu := initu || jobU == lapack.GSVDU + + initv := jobV == lapack.GSVDUnit + wantv := initv || jobV == lapack.GSVDV + + initq := jobQ == lapack.GSVDUnit + wantq := initq || jobQ == lapack.GSVDQ + + switch { + case !initu && !wantu && jobU != lapack.GSVDNone: + panic(badGSVDJob + "U") + case !initv && !wantv && jobV != lapack.GSVDNone: + panic(badGSVDJob + "V") + case !initq && !wantq && jobQ != lapack.GSVDNone: + panic(badGSVDJob + "Q") + case m < 0: + panic(mLT0) + case p < 0: + panic(pLT0) + case n < 0: + panic(nLT0) + + case lda < max(1, n): + panic(badLdA) + case len(a) < (m-1)*lda+n: + panic(shortA) + + case ldb < max(1, n): + panic(badLdB) + case len(b) < (p-1)*ldb+n: + panic(shortB) + + case len(alpha) != n: + panic(badLenAlpha) + case len(beta) != n: + panic(badLenBeta) + + case ldu < 1, wantu && ldu < m: + panic(badLdU) + case wantu && len(u) < (m-1)*ldu+m: + panic(shortU) + + case ldv < 1, wantv && ldv < p: + panic(badLdV) + case wantv && len(v) < (p-1)*ldv+p: + panic(shortV) + + case ldq < 1, wantq && ldq < n: + panic(badLdQ) + case wantq && len(q) < (n-1)*ldq+n: + panic(shortQ) + + case len(work) < 2*n: + panic(shortWork) + } + + // Initialize U, V and Q, if necessary + if initu { + impl.Dlaset(blas.All, m, m, 0, 1, u, ldu) + } + if initv { + impl.Dlaset(blas.All, p, p, 0, 1, v, ldv) + } + if initq { + impl.Dlaset(blas.All, n, n, 0, 1, q, ldq) + } + + bi := blas64.Implementation() + minTol := math.Min(tola, tolb) + + // Loop until convergence. + upper := false + for cycles = 1; cycles <= maxit; cycles++ { + upper = !upper + + for i := 0; i < l-1; i++ { + for j := i + 1; j < l; j++ { + var a1, a2, a3 float64 + if k+i < m { + a1 = a[(k+i)*lda+n-l+i] + } + if k+j < m { + a3 = a[(k+j)*lda+n-l+j] + } + + b1 := b[i*ldb+n-l+i] + b3 := b[j*ldb+n-l+j] + + var b2 float64 + if upper { + if k+i < m { + a2 = a[(k+i)*lda+n-l+j] + } + b2 = b[i*ldb+n-l+j] + } else { + if k+j < m { + a2 = a[(k+j)*lda+n-l+i] + } + b2 = b[j*ldb+n-l+i] + } + + csu, snu, csv, snv, csq, snq := impl.Dlags2(upper, a1, a2, a3, b1, b2, b3) + + // Update (k+i)-th and (k+j)-th rows of matrix A: U^T*A. + if k+j < m { + bi.Drot(l, a[(k+j)*lda+n-l:], 1, a[(k+i)*lda+n-l:], 1, csu, snu) + } + + // Update i-th and j-th rows of matrix B: V^T*B. + bi.Drot(l, b[j*ldb+n-l:], 1, b[i*ldb+n-l:], 1, csv, snv) + + // Update (n-l+i)-th and (n-l+j)-th columns of matrices + // A and B: A*Q and B*Q. + bi.Drot(min(k+l, m), a[n-l+j:], lda, a[n-l+i:], lda, csq, snq) + bi.Drot(l, b[n-l+j:], ldb, b[n-l+i:], ldb, csq, snq) + + if upper { + if k+i < m { + a[(k+i)*lda+n-l+j] = 0 + } + b[i*ldb+n-l+j] = 0 + } else { + if k+j < m { + a[(k+j)*lda+n-l+i] = 0 + } + b[j*ldb+n-l+i] = 0 + } + + // Update orthogonal matrices U, V, Q, if desired. + if wantu && k+j < m { + bi.Drot(m, u[k+j:], ldu, u[k+i:], ldu, csu, snu) + } + if wantv { + bi.Drot(p, v[j:], ldv, v[i:], ldv, csv, snv) + } + if wantq { + bi.Drot(n, q[n-l+j:], ldq, q[n-l+i:], ldq, csq, snq) + } + } + } + + if !upper { + // The matrices A13 and B13 were lower triangular at the start + // of the cycle, and are now upper triangular. + // + // Convergence test: test the parallelism of the corresponding + // rows of A and B. + var error float64 + for i := 0; i < min(l, m-k); i++ { + bi.Dcopy(l-i, a[(k+i)*lda+n-l+i:], 1, work, 1) + bi.Dcopy(l-i, b[i*ldb+n-l+i:], 1, work[l:], 1) + ssmin := impl.Dlapll(l-i, work, 1, work[l:], 1) + error = math.Max(error, ssmin) + } + if math.Abs(error) <= minTol { + // The algorithm has converged. + // Compute the generalized singular value pairs (alpha, beta) + // and set the triangular matrix R to array A. + for i := 0; i < k; i++ { + alpha[i] = 1 + beta[i] = 0 + } + + for i := 0; i < min(l, m-k); i++ { + a1 := a[(k+i)*lda+n-l+i] + b1 := b[i*ldb+n-l+i] + + if a1 != 0 { + gamma := b1 / a1 + + // Change sign if necessary. + if gamma < 0 { + bi.Dscal(l-i, -1, b[i*ldb+n-l+i:], 1) + if wantv { + bi.Dscal(p, -1, v[i:], ldv) + } + } + beta[k+i], alpha[k+i], _ = impl.Dlartg(math.Abs(gamma), 1) + + if alpha[k+i] >= beta[k+i] { + bi.Dscal(l-i, 1/alpha[k+i], a[(k+i)*lda+n-l+i:], 1) + } else { + bi.Dscal(l-i, 1/beta[k+i], b[i*ldb+n-l+i:], 1) + bi.Dcopy(l-i, b[i*ldb+n-l+i:], 1, a[(k+i)*lda+n-l+i:], 1) + } + } else { + alpha[k+i] = 0 + beta[k+i] = 1 + bi.Dcopy(l-i, b[i*ldb+n-l+i:], 1, a[(k+i)*lda+n-l+i:], 1) + } + } + + for i := m; i < k+l; i++ { + alpha[i] = 0 + beta[i] = 1 + } + if k+l < n { + for i := k + l; i < n; i++ { + alpha[i] = 0 + beta[i] = 0 + } + } + + return cycles, true + } + } + } + + // The algorithm has not converged after maxit cycles. + return cycles, false +} diff --git a/vendor/gonum.org/v1/gonum/lapack/gonum/dtrcon.go b/vendor/gonum.org/v1/gonum/lapack/gonum/dtrcon.go new file mode 100644 index 0000000..899c95d --- /dev/null +++ b/vendor/gonum.org/v1/gonum/lapack/gonum/dtrcon.go @@ -0,0 +1,90 @@ +// Copyright ©2015 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package gonum + +import ( + "math" + + "gonum.org/v1/gonum/blas" + "gonum.org/v1/gonum/blas/blas64" + "gonum.org/v1/gonum/lapack" +) + +// Dtrcon estimates the reciprocal of the condition number of a triangular matrix A. +// The condition number computed may be based on the 1-norm or the ∞-norm. +// +// work is a temporary data slice of length at least 3*n and Dtrcon will panic otherwise. +// +// iwork is a temporary data slice of length at least n and Dtrcon will panic otherwise. +func (impl Implementation) Dtrcon(norm lapack.MatrixNorm, uplo blas.Uplo, diag blas.Diag, n int, a []float64, lda int, work []float64, iwork []int) float64 { + switch { + case norm != lapack.MaxColumnSum && norm != lapack.MaxRowSum: + panic(badNorm) + case uplo != blas.Upper && uplo != blas.Lower: + panic(badUplo) + case diag != blas.NonUnit && diag != blas.Unit: + panic(badDiag) + case n < 0: + panic(nLT0) + case lda < max(1, n): + panic(badLdA) + } + + if n == 0 { + return 1 + } + + switch { + case len(a) < (n-1)*lda+n: + panic(shortA) + case len(work) < 3*n: + panic(shortWork) + case len(iwork) < n: + panic(shortIWork) + } + + bi := blas64.Implementation() + + var rcond float64 + smlnum := dlamchS * float64(n) + + anorm := impl.Dlantr(norm, uplo, diag, n, n, a, lda, work) + + if anorm <= 0 { + return rcond + } + var ainvnm float64 + var normin bool + kase1 := 2 + if norm == lapack.MaxColumnSum { + kase1 = 1 + } + var kase int + isave := new([3]int) + var scale float64 + for { + ainvnm, kase = impl.Dlacn2(n, work[n:], work, iwork, ainvnm, kase, isave) + if kase == 0 { + if ainvnm != 0 { + rcond = (1 / anorm) / ainvnm + } + return rcond + } + if kase == kase1 { + scale = impl.Dlatrs(uplo, blas.NoTrans, diag, normin, n, a, lda, work, work[2*n:]) + } else { + scale = impl.Dlatrs(uplo, blas.Trans, diag, normin, n, a, lda, work, work[2*n:]) + } + normin = true + if scale != 1 { + ix := bi.Idamax(n, work, 1) + xnorm := math.Abs(work[ix]) + if scale == 0 || scale < xnorm*smlnum { + return rcond + } + impl.Drscl(n, scale, work, 1) + } + } +} diff --git a/vendor/gonum.org/v1/gonum/lapack/gonum/dtrevc3.go b/vendor/gonum.org/v1/gonum/lapack/gonum/dtrevc3.go new file mode 100644 index 0000000..17121b8 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/lapack/gonum/dtrevc3.go @@ -0,0 +1,885 @@ +// Copyright ©2016 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package gonum + +import ( + "math" + + "gonum.org/v1/gonum/blas" + "gonum.org/v1/gonum/blas/blas64" + "gonum.org/v1/gonum/lapack" +) + +// Dtrevc3 computes some or all of the right and/or left eigenvectors of an n×n +// upper quasi-triangular matrix T in Schur canonical form. Matrices of this +// type are produced by the Schur factorization of a real general matrix A +// A = Q T Q^T, +// as computed by Dhseqr. +// +// The right eigenvector x of T corresponding to an +// eigenvalue λ is defined by +// T x = λ x, +// and the left eigenvector y is defined by +// y^T T = λ y^T. +// +// The eigenvalues are read directly from the diagonal blocks of T. +// +// This routine returns the matrices X and/or Y of right and left eigenvectors +// of T, or the products Q*X and/or Q*Y, where Q is an input matrix. If Q is the +// orthogonal factor that reduces a matrix A to Schur form T, then Q*X and Q*Y +// are the matrices of right and left eigenvectors of A. +// +// If side == lapack.EVRight, only right eigenvectors will be computed. +// If side == lapack.EVLeft, only left eigenvectors will be computed. +// If side == lapack.EVBoth, both right and left eigenvectors will be computed. +// For other values of side, Dtrevc3 will panic. +// +// If howmny == lapack.EVAll, all right and/or left eigenvectors will be +// computed. +// If howmny == lapack.EVAllMulQ, all right and/or left eigenvectors will be +// computed and multiplied from left by the matrices in VR and/or VL. +// If howmny == lapack.EVSelected, right and/or left eigenvectors will be +// computed as indicated by selected. +// For other values of howmny, Dtrevc3 will panic. +// +// selected specifies which eigenvectors will be computed. It must have length n +// if howmny == lapack.EVSelected, and it is not referenced otherwise. +// If w_j is a real eigenvalue, the corresponding real eigenvector will be +// computed if selected[j] is true. +// If w_j and w_{j+1} are the real and imaginary parts of a complex eigenvalue, +// the corresponding complex eigenvector is computed if either selected[j] or +// selected[j+1] is true, and on return selected[j] will be set to true and +// selected[j+1] will be set to false. +// +// VL and VR are n×mm matrices. If howmny is lapack.EVAll or +// lapack.AllEVMulQ, mm must be at least n. If howmny is +// lapack.EVSelected, mm must be large enough to store the selected +// eigenvectors. Each selected real eigenvector occupies one column and each +// selected complex eigenvector occupies two columns. If mm is not sufficiently +// large, Dtrevc3 will panic. +// +// On entry, if howmny is lapack.EVAllMulQ, it is assumed that VL (if side +// is lapack.EVLeft or lapack.EVBoth) contains an n×n matrix QL, +// and that VR (if side is lapack.EVLeft or lapack.EVBoth) contains +// an n×n matrix QR. QL and QR are typically the orthogonal matrix Q of Schur +// vectors returned by Dhseqr. +// +// On return, if side is lapack.EVLeft or lapack.EVBoth, +// VL will contain: +// if howmny == lapack.EVAll, the matrix Y of left eigenvectors of T, +// if howmny == lapack.EVAllMulQ, the matrix Q*Y, +// if howmny == lapack.EVSelected, the left eigenvectors of T specified by +// selected, stored consecutively in the +// columns of VL, in the same order as their +// eigenvalues. +// VL is not referenced if side == lapack.EVRight. +// +// On return, if side is lapack.EVRight or lapack.EVBoth, +// VR will contain: +// if howmny == lapack.EVAll, the matrix X of right eigenvectors of T, +// if howmny == lapack.EVAllMulQ, the matrix Q*X, +// if howmny == lapack.EVSelected, the left eigenvectors of T specified by +// selected, stored consecutively in the +// columns of VR, in the same order as their +// eigenvalues. +// VR is not referenced if side == lapack.EVLeft. +// +// Complex eigenvectors corresponding to a complex eigenvalue are stored in VL +// and VR in two consecutive columns, the first holding the real part, and the +// second the imaginary part. +// +// Each eigenvector will be normalized so that the element of largest magnitude +// has magnitude 1. Here the magnitude of a complex number (x,y) is taken to be +// |x| + |y|. +// +// work must have length at least lwork and lwork must be at least max(1,3*n), +// otherwise Dtrevc3 will panic. For optimum performance, lwork should be at +// least n+2*n*nb, where nb is the optimal blocksize. +// +// If lwork == -1, instead of performing Dtrevc3, the function only estimates +// the optimal workspace size based on n and stores it into work[0]. +// +// Dtrevc3 returns the number of columns in VL and/or VR actually used to store +// the eigenvectors. +// +// Dtrevc3 is an internal routine. It is exported for testing purposes. +func (impl Implementation) Dtrevc3(side lapack.EVSide, howmny lapack.EVHowMany, selected []bool, n int, t []float64, ldt int, vl []float64, ldvl int, vr []float64, ldvr int, mm int, work []float64, lwork int) (m int) { + bothv := side == lapack.EVBoth + rightv := side == lapack.EVRight || bothv + leftv := side == lapack.EVLeft || bothv + switch { + case !rightv && !leftv: + panic(badEVSide) + case howmny != lapack.EVAll && howmny != lapack.EVAllMulQ && howmny != lapack.EVSelected: + panic(badEVHowMany) + case n < 0: + panic(nLT0) + case ldt < max(1, n): + panic(badLdT) + case mm < 0: + panic(mmLT0) + case ldvl < 1: + // ldvl and ldvr are also checked below after the computation of + // m (number of columns of VL and VR) in case of howmny == EVSelected. + panic(badLdVL) + case ldvr < 1: + panic(badLdVR) + case lwork < max(1, 3*n) && lwork != -1: + panic(badLWork) + case len(work) < max(1, lwork): + panic(shortWork) + } + + // Quick return if possible. + if n == 0 { + work[0] = 1 + return 0 + } + + // Normally we don't check slice lengths until after the workspace + // query. However, even in case of the workspace query we need to + // compute and return the value of m, and since the computation accesses t, + // we put the length check of t here. + if len(t) < (n-1)*ldt+n { + panic(shortT) + } + + if howmny == lapack.EVSelected { + if len(selected) != n { + panic(badLenSelected) + } + // Set m to the number of columns required to store the selected + // eigenvectors, and standardize the slice selected. + // Each selected real eigenvector occupies one column and each + // selected complex eigenvector occupies two columns. + for j := 0; j < n; { + if j == n-1 || t[(j+1)*ldt+j] == 0 { + // Diagonal 1×1 block corresponding to a + // real eigenvalue. + if selected[j] { + m++ + } + j++ + } else { + // Diagonal 2×2 block corresponding to a + // complex eigenvalue. + if selected[j] || selected[j+1] { + selected[j] = true + selected[j+1] = false + m += 2 + } + j += 2 + } + } + } else { + m = n + } + if mm < m { + panic(badMm) + } + + // Quick return in case of a workspace query. + nb := impl.Ilaenv(1, "DTREVC", string(side)+string(howmny), n, -1, -1, -1) + if lwork == -1 { + work[0] = float64(n + 2*n*nb) + return m + } + + // Quick return if no eigenvectors were selected. + if m == 0 { + return 0 + } + + switch { + case leftv && ldvl < mm: + panic(badLdVL) + case leftv && len(vl) < (n-1)*ldvl+mm: + panic(shortVL) + + case rightv && ldvr < mm: + panic(badLdVR) + case rightv && len(vr) < (n-1)*ldvr+mm: + panic(shortVR) + } + + // Use blocked version of back-transformation if sufficient workspace. + // Zero-out the workspace to avoid potential NaN propagation. + const ( + nbmin = 8 + nbmax = 128 + ) + if howmny == lapack.EVAllMulQ && lwork >= n+2*n*nbmin { + nb = min((lwork-n)/(2*n), nbmax) + impl.Dlaset(blas.All, n, 1+2*nb, 0, 0, work[:n+2*nb*n], 1+2*nb) + } else { + nb = 1 + } + + // Set the constants to control overflow. + ulp := dlamchP + smlnum := float64(n) / ulp * dlamchS + bignum := (1 - ulp) / smlnum + + // Split work into a vector of column norms and an n×2*nb matrix b. + norms := work[:n] + ldb := 2 * nb + b := work[n : n+n*ldb] + + // Compute 1-norm of each column of strictly upper triangular part of T + // to control overflow in triangular solver. + norms[0] = 0 + for j := 1; j < n; j++ { + var cn float64 + for i := 0; i < j; i++ { + cn += math.Abs(t[i*ldt+j]) + } + norms[j] = cn + } + + bi := blas64.Implementation() + + var ( + x [4]float64 + + iv int // Index of column in current block. + is int + + // ip is used below to specify the real or complex eigenvalue: + // ip == 0, real eigenvalue, + // 1, first of conjugate complex pair (wr,wi), + // -1, second of conjugate complex pair (wr,wi). + ip int + iscomplex [nbmax]int // Stores ip for each column in current block. + ) + + if side == lapack.EVLeft { + goto leftev + } + + // Compute right eigenvectors. + + // For complex right vector, iv-1 is for real part and iv for complex + // part. Non-blocked version always uses iv=1, blocked version starts + // with iv=nb-1 and goes down to 0 or 1. + iv = max(2, nb) - 1 + ip = 0 + is = m - 1 + for ki := n - 1; ki >= 0; ki-- { + if ip == -1 { + // Previous iteration (ki+1) was second of + // conjugate pair, so this ki is first of + // conjugate pair. + ip = 1 + continue + } + + if ki == 0 || t[ki*ldt+ki-1] == 0 { + // Last column or zero on sub-diagonal, so this + // ki must be real eigenvalue. + ip = 0 + } else { + // Non-zero on sub-diagonal, so this ki is + // second of conjugate pair. + ip = -1 + } + + if howmny == lapack.EVSelected { + if ip == 0 { + if !selected[ki] { + continue + } + } else if !selected[ki-1] { + continue + } + } + + // Compute the ki-th eigenvalue (wr,wi). + wr := t[ki*ldt+ki] + var wi float64 + if ip != 0 { + wi = math.Sqrt(math.Abs(t[ki*ldt+ki-1])) * math.Sqrt(math.Abs(t[(ki-1)*ldt+ki])) + } + smin := math.Max(ulp*(math.Abs(wr)+math.Abs(wi)), smlnum) + + if ip == 0 { + // Real right eigenvector. + + b[ki*ldb+iv] = 1 + // Form right-hand side. + for k := 0; k < ki; k++ { + b[k*ldb+iv] = -t[k*ldt+ki] + } + // Solve upper quasi-triangular system: + // [ T[0:ki,0:ki] - wr ]*X = scale*b. + for j := ki - 1; j >= 0; { + if j == 0 || t[j*ldt+j-1] == 0 { + // 1×1 diagonal block. + scale, xnorm, _ := impl.Dlaln2(false, 1, 1, smin, 1, t[j*ldt+j:], ldt, + 1, 1, b[j*ldb+iv:], ldb, wr, 0, x[:1], 2) + // Scale X[0,0] to avoid overflow when updating the + // right-hand side. + if xnorm > 1 && norms[j] > bignum/xnorm { + x[0] /= xnorm + scale /= xnorm + } + // Scale if necessary. + if scale != 1 { + bi.Dscal(ki+1, scale, b[iv:], ldb) + } + b[j*ldb+iv] = x[0] + // Update right-hand side. + bi.Daxpy(j, -x[0], t[j:], ldt, b[iv:], ldb) + j-- + } else { + // 2×2 diagonal block. + scale, xnorm, _ := impl.Dlaln2(false, 2, 1, smin, 1, t[(j-1)*ldt+j-1:], ldt, + 1, 1, b[(j-1)*ldb+iv:], ldb, wr, 0, x[:3], 2) + // Scale X[0,0] and X[1,0] to avoid overflow + // when updating the right-hand side. + if xnorm > 1 { + beta := math.Max(norms[j-1], norms[j]) + if beta > bignum/xnorm { + x[0] /= xnorm + x[2] /= xnorm + scale /= xnorm + } + } + // Scale if necessary. + if scale != 1 { + bi.Dscal(ki+1, scale, b[iv:], ldb) + } + b[(j-1)*ldb+iv] = x[0] + b[j*ldb+iv] = x[2] + // Update right-hand side. + bi.Daxpy(j-1, -x[0], t[j-1:], ldt, b[iv:], ldb) + bi.Daxpy(j-1, -x[2], t[j:], ldt, b[iv:], ldb) + j -= 2 + } + } + // Copy the vector x or Q*x to VR and normalize. + switch { + case howmny != lapack.EVAllMulQ: + // No back-transform: copy x to VR and normalize. + bi.Dcopy(ki+1, b[iv:], ldb, vr[is:], ldvr) + ii := bi.Idamax(ki+1, vr[is:], ldvr) + remax := 1 / math.Abs(vr[ii*ldvr+is]) + bi.Dscal(ki+1, remax, vr[is:], ldvr) + for k := ki + 1; k < n; k++ { + vr[k*ldvr+is] = 0 + } + case nb == 1: + // Version 1: back-transform each vector with GEMV, Q*x. + if ki > 0 { + bi.Dgemv(blas.NoTrans, n, ki, 1, vr, ldvr, b[iv:], ldb, + b[ki*ldb+iv], vr[ki:], ldvr) + } + ii := bi.Idamax(n, vr[ki:], ldvr) + remax := 1 / math.Abs(vr[ii*ldvr+ki]) + bi.Dscal(n, remax, vr[ki:], ldvr) + default: + // Version 2: back-transform block of vectors with GEMM. + // Zero out below vector. + for k := ki + 1; k < n; k++ { + b[k*ldb+iv] = 0 + } + iscomplex[iv] = ip + // Back-transform and normalization is done below. + } + } else { + // Complex right eigenvector. + + // Initial solve + // [ ( T[ki-1,ki-1] T[ki-1,ki] ) - (wr + i*wi) ]*X = 0. + // [ ( T[ki, ki-1] T[ki, ki] ) ] + if math.Abs(t[(ki-1)*ldt+ki]) >= math.Abs(t[ki*ldt+ki-1]) { + b[(ki-1)*ldb+iv-1] = 1 + b[ki*ldb+iv] = wi / t[(ki-1)*ldt+ki] + } else { + b[(ki-1)*ldb+iv-1] = -wi / t[ki*ldt+ki-1] + b[ki*ldb+iv] = 1 + } + b[ki*ldb+iv-1] = 0 + b[(ki-1)*ldb+iv] = 0 + // Form right-hand side. + for k := 0; k < ki-1; k++ { + b[k*ldb+iv-1] = -b[(ki-1)*ldb+iv-1] * t[k*ldt+ki-1] + b[k*ldb+iv] = -b[ki*ldb+iv] * t[k*ldt+ki] + } + // Solve upper quasi-triangular system: + // [ T[0:ki-1,0:ki-1] - (wr+i*wi) ]*X = scale*(b1+i*b2) + for j := ki - 2; j >= 0; { + if j == 0 || t[j*ldt+j-1] == 0 { + // 1×1 diagonal block. + + scale, xnorm, _ := impl.Dlaln2(false, 1, 2, smin, 1, t[j*ldt+j:], ldt, + 1, 1, b[j*ldb+iv-1:], ldb, wr, wi, x[:2], 2) + // Scale X[0,0] and X[0,1] to avoid + // overflow when updating the right-hand side. + if xnorm > 1 && norms[j] > bignum/xnorm { + x[0] /= xnorm + x[1] /= xnorm + scale /= xnorm + } + // Scale if necessary. + if scale != 1 { + bi.Dscal(ki+1, scale, b[iv-1:], ldb) + bi.Dscal(ki+1, scale, b[iv:], ldb) + } + b[j*ldb+iv-1] = x[0] + b[j*ldb+iv] = x[1] + // Update the right-hand side. + bi.Daxpy(j, -x[0], t[j:], ldt, b[iv-1:], ldb) + bi.Daxpy(j, -x[1], t[j:], ldt, b[iv:], ldb) + j-- + } else { + // 2×2 diagonal block. + + scale, xnorm, _ := impl.Dlaln2(false, 2, 2, smin, 1, t[(j-1)*ldt+j-1:], ldt, + 1, 1, b[(j-1)*ldb+iv-1:], ldb, wr, wi, x[:], 2) + // Scale X to avoid overflow when updating + // the right-hand side. + if xnorm > 1 { + beta := math.Max(norms[j-1], norms[j]) + if beta > bignum/xnorm { + rec := 1 / xnorm + x[0] *= rec + x[1] *= rec + x[2] *= rec + x[3] *= rec + scale *= rec + } + } + // Scale if necessary. + if scale != 1 { + bi.Dscal(ki+1, scale, b[iv-1:], ldb) + bi.Dscal(ki+1, scale, b[iv:], ldb) + } + b[(j-1)*ldb+iv-1] = x[0] + b[(j-1)*ldb+iv] = x[1] + b[j*ldb+iv-1] = x[2] + b[j*ldb+iv] = x[3] + // Update the right-hand side. + bi.Daxpy(j-1, -x[0], t[j-1:], ldt, b[iv-1:], ldb) + bi.Daxpy(j-1, -x[1], t[j-1:], ldt, b[iv:], ldb) + bi.Daxpy(j-1, -x[2], t[j:], ldt, b[iv-1:], ldb) + bi.Daxpy(j-1, -x[3], t[j:], ldt, b[iv:], ldb) + j -= 2 + } + } + + // Copy the vector x or Q*x to VR and normalize. + switch { + case howmny != lapack.EVAllMulQ: + // No back-transform: copy x to VR and normalize. + bi.Dcopy(ki+1, b[iv-1:], ldb, vr[is-1:], ldvr) + bi.Dcopy(ki+1, b[iv:], ldb, vr[is:], ldvr) + emax := 0.0 + for k := 0; k <= ki; k++ { + emax = math.Max(emax, math.Abs(vr[k*ldvr+is-1])+math.Abs(vr[k*ldvr+is])) + } + remax := 1 / emax + bi.Dscal(ki+1, remax, vr[is-1:], ldvr) + bi.Dscal(ki+1, remax, vr[is:], ldvr) + for k := ki + 1; k < n; k++ { + vr[k*ldvr+is-1] = 0 + vr[k*ldvr+is] = 0 + } + case nb == 1: + // Version 1: back-transform each vector with GEMV, Q*x. + if ki-1 > 0 { + bi.Dgemv(blas.NoTrans, n, ki-1, 1, vr, ldvr, b[iv-1:], ldb, + b[(ki-1)*ldb+iv-1], vr[ki-1:], ldvr) + bi.Dgemv(blas.NoTrans, n, ki-1, 1, vr, ldvr, b[iv:], ldb, + b[ki*ldb+iv], vr[ki:], ldvr) + } else { + bi.Dscal(n, b[(ki-1)*ldb+iv-1], vr[ki-1:], ldvr) + bi.Dscal(n, b[ki*ldb+iv], vr[ki:], ldvr) + } + emax := 0.0 + for k := 0; k < n; k++ { + emax = math.Max(emax, math.Abs(vr[k*ldvr+ki-1])+math.Abs(vr[k*ldvr+ki])) + } + remax := 1 / emax + bi.Dscal(n, remax, vr[ki-1:], ldvr) + bi.Dscal(n, remax, vr[ki:], ldvr) + default: + // Version 2: back-transform block of vectors with GEMM. + // Zero out below vector. + for k := ki + 1; k < n; k++ { + b[k*ldb+iv-1] = 0 + b[k*ldb+iv] = 0 + } + iscomplex[iv-1] = -ip + iscomplex[iv] = ip + iv-- + // Back-transform and normalization is done below. + } + } + if nb > 1 { + // Blocked version of back-transform. + + // For complex case, ki2 includes both vectors (ki-1 and ki). + ki2 := ki + if ip != 0 { + ki2-- + } + // Columns iv:nb of b are valid vectors. + // When the number of vectors stored reaches nb-1 or nb, + // or if this was last vector, do the Gemm. + if iv < 2 || ki2 == 0 { + bi.Dgemm(blas.NoTrans, blas.NoTrans, n, nb-iv, ki2+nb-iv, + 1, vr, ldvr, b[iv:], ldb, + 0, b[nb+iv:], ldb) + // Normalize vectors. + var remax float64 + for k := iv; k < nb; k++ { + if iscomplex[k] == 0 { + // Real eigenvector. + ii := bi.Idamax(n, b[nb+k:], ldb) + remax = 1 / math.Abs(b[ii*ldb+nb+k]) + } else if iscomplex[k] == 1 { + // First eigenvector of conjugate pair. + emax := 0.0 + for ii := 0; ii < n; ii++ { + emax = math.Max(emax, math.Abs(b[ii*ldb+nb+k])+math.Abs(b[ii*ldb+nb+k+1])) + } + remax = 1 / emax + // Second eigenvector of conjugate pair + // will reuse this value of remax. + } + bi.Dscal(n, remax, b[nb+k:], ldb) + } + impl.Dlacpy(blas.All, n, nb-iv, b[nb+iv:], ldb, vr[ki2:], ldvr) + iv = nb - 1 + } else { + iv-- + } + } + is-- + if ip != 0 { + is-- + } + } + + if side == lapack.EVRight { + return m + } + +leftev: + // Compute left eigenvectors. + + // For complex left vector, iv is for real part and iv+1 for complex + // part. Non-blocked version always uses iv=0. Blocked version starts + // with iv=0, goes up to nb-2 or nb-1. + iv = 0 + ip = 0 + is = 0 + for ki := 0; ki < n; ki++ { + if ip == 1 { + // Previous iteration ki-1 was first of conjugate pair, + // so this ki is second of conjugate pair. + ip = -1 + continue + } + + if ki == n-1 || t[(ki+1)*ldt+ki] == 0 { + // Last column or zero on sub-diagonal, so this ki must + // be real eigenvalue. + ip = 0 + } else { + // Non-zero on sub-diagonal, so this ki is first of + // conjugate pair. + ip = 1 + } + if howmny == lapack.EVSelected && !selected[ki] { + continue + } + + // Compute the ki-th eigenvalue (wr,wi). + wr := t[ki*ldt+ki] + var wi float64 + if ip != 0 { + wi = math.Sqrt(math.Abs(t[ki*ldt+ki+1])) * math.Sqrt(math.Abs(t[(ki+1)*ldt+ki])) + } + smin := math.Max(ulp*(math.Abs(wr)+math.Abs(wi)), smlnum) + + if ip == 0 { + // Real left eigenvector. + + b[ki*ldb+iv] = 1 + // Form right-hand side. + for k := ki + 1; k < n; k++ { + b[k*ldb+iv] = -t[ki*ldt+k] + } + // Solve transposed quasi-triangular system: + // [ T[ki+1:n,ki+1:n] - wr ]^T * X = scale*b + vmax := 1.0 + vcrit := bignum + for j := ki + 1; j < n; { + if j == n-1 || t[(j+1)*ldt+j] == 0 { + // 1×1 diagonal block. + + // Scale if necessary to avoid overflow + // when forming the right-hand side. + if norms[j] > vcrit { + rec := 1 / vmax + bi.Dscal(n-ki, rec, b[ki*ldb+iv:], ldb) + vmax = 1 + } + b[j*ldb+iv] -= bi.Ddot(j-ki-1, t[(ki+1)*ldt+j:], ldt, b[(ki+1)*ldb+iv:], ldb) + // Solve [ T[j,j] - wr ]^T * X = b. + scale, _, _ := impl.Dlaln2(false, 1, 1, smin, 1, t[j*ldt+j:], ldt, + 1, 1, b[j*ldb+iv:], ldb, wr, 0, x[:1], 2) + // Scale if necessary. + if scale != 1 { + bi.Dscal(n-ki, scale, b[ki*ldb+iv:], ldb) + } + b[j*ldb+iv] = x[0] + vmax = math.Max(math.Abs(b[j*ldb+iv]), vmax) + vcrit = bignum / vmax + j++ + } else { + // 2×2 diagonal block. + + // Scale if necessary to avoid overflow + // when forming the right-hand side. + beta := math.Max(norms[j], norms[j+1]) + if beta > vcrit { + bi.Dscal(n-ki+1, 1/vmax, b[ki*ldb+iv:], 1) + vmax = 1 + } + b[j*ldb+iv] -= bi.Ddot(j-ki-1, t[(ki+1)*ldt+j:], ldt, b[(ki+1)*ldb+iv:], ldb) + b[(j+1)*ldb+iv] -= bi.Ddot(j-ki-1, t[(ki+1)*ldt+j+1:], ldt, b[(ki+1)*ldb+iv:], ldb) + // Solve + // [ T[j,j]-wr T[j,j+1] ]^T * X = scale*[ b1 ] + // [ T[j+1,j] T[j+1,j+1]-wr ] [ b2 ] + scale, _, _ := impl.Dlaln2(true, 2, 1, smin, 1, t[j*ldt+j:], ldt, + 1, 1, b[j*ldb+iv:], ldb, wr, 0, x[:3], 2) + // Scale if necessary. + if scale != 1 { + bi.Dscal(n-ki, scale, b[ki*ldb+iv:], ldb) + } + b[j*ldb+iv] = x[0] + b[(j+1)*ldb+iv] = x[2] + vmax = math.Max(vmax, math.Max(math.Abs(b[j*ldb+iv]), math.Abs(b[(j+1)*ldb+iv]))) + vcrit = bignum / vmax + j += 2 + } + } + // Copy the vector x or Q*x to VL and normalize. + switch { + case howmny != lapack.EVAllMulQ: + // No back-transform: copy x to VL and normalize. + bi.Dcopy(n-ki, b[ki*ldb+iv:], ldb, vl[ki*ldvl+is:], ldvl) + ii := bi.Idamax(n-ki, vl[ki*ldvl+is:], ldvl) + ki + remax := 1 / math.Abs(vl[ii*ldvl+is]) + bi.Dscal(n-ki, remax, vl[ki*ldvl+is:], ldvl) + for k := 0; k < ki; k++ { + vl[k*ldvl+is] = 0 + } + case nb == 1: + // Version 1: back-transform each vector with Gemv, Q*x. + if n-ki-1 > 0 { + bi.Dgemv(blas.NoTrans, n, n-ki-1, + 1, vl[ki+1:], ldvl, b[(ki+1)*ldb+iv:], ldb, + b[ki*ldb+iv], vl[ki:], ldvl) + } + ii := bi.Idamax(n, vl[ki:], ldvl) + remax := 1 / math.Abs(vl[ii*ldvl+ki]) + bi.Dscal(n, remax, vl[ki:], ldvl) + default: + // Version 2: back-transform block of vectors with Gemm + // zero out above vector. + for k := 0; k < ki; k++ { + b[k*ldb+iv] = 0 + } + iscomplex[iv] = ip + // Back-transform and normalization is done below. + } + } else { + // Complex left eigenvector. + + // Initial solve: + // [ [ T[ki,ki] T[ki,ki+1] ]^T - (wr - i* wi) ]*X = 0. + // [ [ T[ki+1,ki] T[ki+1,ki+1] ] ] + if math.Abs(t[ki*ldt+ki+1]) >= math.Abs(t[(ki+1)*ldt+ki]) { + b[ki*ldb+iv] = wi / t[ki*ldt+ki+1] + b[(ki+1)*ldb+iv+1] = 1 + } else { + b[ki*ldb+iv] = 1 + b[(ki+1)*ldb+iv+1] = -wi / t[(ki+1)*ldt+ki] + } + b[(ki+1)*ldb+iv] = 0 + b[ki*ldb+iv+1] = 0 + // Form right-hand side. + for k := ki + 2; k < n; k++ { + b[k*ldb+iv] = -b[ki*ldb+iv] * t[ki*ldt+k] + b[k*ldb+iv+1] = -b[(ki+1)*ldb+iv+1] * t[(ki+1)*ldt+k] + } + // Solve transposed quasi-triangular system: + // [ T[ki+2:n,ki+2:n]^T - (wr-i*wi) ]*X = b1+i*b2 + vmax := 1.0 + vcrit := bignum + for j := ki + 2; j < n; { + if j == n-1 || t[(j+1)*ldt+j] == 0 { + // 1×1 diagonal block. + + // Scale if necessary to avoid overflow + // when forming the right-hand side elements. + if norms[j] > vcrit { + rec := 1 / vmax + bi.Dscal(n-ki, rec, b[ki*ldb+iv:], ldb) + bi.Dscal(n-ki, rec, b[ki*ldb+iv+1:], ldb) + vmax = 1 + } + b[j*ldb+iv] -= bi.Ddot(j-ki-2, t[(ki+2)*ldt+j:], ldt, b[(ki+2)*ldb+iv:], ldb) + b[j*ldb+iv+1] -= bi.Ddot(j-ki-2, t[(ki+2)*ldt+j:], ldt, b[(ki+2)*ldb+iv+1:], ldb) + // Solve [ T[j,j]-(wr-i*wi) ]*(X11+i*X12) = b1+i*b2. + scale, _, _ := impl.Dlaln2(false, 1, 2, smin, 1, t[j*ldt+j:], ldt, + 1, 1, b[j*ldb+iv:], ldb, wr, -wi, x[:2], 2) + // Scale if necessary. + if scale != 1 { + bi.Dscal(n-ki, scale, b[ki*ldb+iv:], ldb) + bi.Dscal(n-ki, scale, b[ki*ldb+iv+1:], ldb) + } + b[j*ldb+iv] = x[0] + b[j*ldb+iv+1] = x[1] + vmax = math.Max(vmax, math.Max(math.Abs(b[j*ldb+iv]), math.Abs(b[j*ldb+iv+1]))) + vcrit = bignum / vmax + j++ + } else { + // 2×2 diagonal block. + + // Scale if necessary to avoid overflow + // when forming the right-hand side elements. + if math.Max(norms[j], norms[j+1]) > vcrit { + rec := 1 / vmax + bi.Dscal(n-ki, rec, b[ki*ldb+iv:], ldb) + bi.Dscal(n-ki, rec, b[ki*ldb+iv+1:], ldb) + vmax = 1 + } + b[j*ldb+iv] -= bi.Ddot(j-ki-2, t[(ki+2)*ldt+j:], ldt, b[(ki+2)*ldb+iv:], ldb) + b[j*ldb+iv+1] -= bi.Ddot(j-ki-2, t[(ki+2)*ldt+j:], ldt, b[(ki+2)*ldb+iv+1:], ldb) + b[(j+1)*ldb+iv] -= bi.Ddot(j-ki-2, t[(ki+2)*ldt+j+1:], ldt, b[(ki+2)*ldb+iv:], ldb) + b[(j+1)*ldb+iv+1] -= bi.Ddot(j-ki-2, t[(ki+2)*ldt+j+1:], ldt, b[(ki+2)*ldb+iv+1:], ldb) + // Solve 2×2 complex linear equation + // [ [T[j,j] T[j,j+1] ]^T - (wr-i*wi)*I ]*X = scale*b + // [ [T[j+1,j] T[j+1,j+1]] ] + scale, _, _ := impl.Dlaln2(true, 2, 2, smin, 1, t[j*ldt+j:], ldt, + 1, 1, b[j*ldb+iv:], ldb, wr, -wi, x[:], 2) + // Scale if necessary. + if scale != 1 { + bi.Dscal(n-ki, scale, b[ki*ldb+iv:], ldb) + bi.Dscal(n-ki, scale, b[ki*ldb+iv+1:], ldb) + } + b[j*ldb+iv] = x[0] + b[j*ldb+iv+1] = x[1] + b[(j+1)*ldb+iv] = x[2] + b[(j+1)*ldb+iv+1] = x[3] + vmax01 := math.Max(math.Abs(x[0]), math.Abs(x[1])) + vmax23 := math.Max(math.Abs(x[2]), math.Abs(x[3])) + vmax = math.Max(vmax, math.Max(vmax01, vmax23)) + vcrit = bignum / vmax + j += 2 + } + } + // Copy the vector x or Q*x to VL and normalize. + switch { + case howmny != lapack.EVAllMulQ: + // No back-transform: copy x to VL and normalize. + bi.Dcopy(n-ki, b[ki*ldb+iv:], ldb, vl[ki*ldvl+is:], ldvl) + bi.Dcopy(n-ki, b[ki*ldb+iv+1:], ldb, vl[ki*ldvl+is+1:], ldvl) + emax := 0.0 + for k := ki; k < n; k++ { + emax = math.Max(emax, math.Abs(vl[k*ldvl+is])+math.Abs(vl[k*ldvl+is+1])) + } + remax := 1 / emax + bi.Dscal(n-ki, remax, vl[ki*ldvl+is:], ldvl) + bi.Dscal(n-ki, remax, vl[ki*ldvl+is+1:], ldvl) + for k := 0; k < ki; k++ { + vl[k*ldvl+is] = 0 + vl[k*ldvl+is+1] = 0 + } + case nb == 1: + // Version 1: back-transform each vector with GEMV, Q*x. + if n-ki-2 > 0 { + bi.Dgemv(blas.NoTrans, n, n-ki-2, + 1, vl[ki+2:], ldvl, b[(ki+2)*ldb+iv:], ldb, + b[ki*ldb+iv], vl[ki:], ldvl) + bi.Dgemv(blas.NoTrans, n, n-ki-2, + 1, vl[ki+2:], ldvl, b[(ki+2)*ldb+iv+1:], ldb, + b[(ki+1)*ldb+iv+1], vl[ki+1:], ldvl) + } else { + bi.Dscal(n, b[ki*ldb+iv], vl[ki:], ldvl) + bi.Dscal(n, b[(ki+1)*ldb+iv+1], vl[ki+1:], ldvl) + } + emax := 0.0 + for k := 0; k < n; k++ { + emax = math.Max(emax, math.Abs(vl[k*ldvl+ki])+math.Abs(vl[k*ldvl+ki+1])) + } + remax := 1 / emax + bi.Dscal(n, remax, vl[ki:], ldvl) + bi.Dscal(n, remax, vl[ki+1:], ldvl) + default: + // Version 2: back-transform block of vectors with GEMM. + // Zero out above vector. + // Could go from ki-nv+1 to ki-1. + for k := 0; k < ki; k++ { + b[k*ldb+iv] = 0 + b[k*ldb+iv+1] = 0 + } + iscomplex[iv] = ip + iscomplex[iv+1] = -ip + iv++ + // Back-transform and normalization is done below. + } + } + if nb > 1 { + // Blocked version of back-transform. + // For complex case, ki2 includes both vectors ki and ki+1. + ki2 := ki + if ip != 0 { + ki2++ + } + // Columns [0:iv] of work are valid vectors. When the + // number of vectors stored reaches nb-1 or nb, or if + // this was last vector, do the Gemm. + if iv >= nb-2 || ki2 == n-1 { + bi.Dgemm(blas.NoTrans, blas.NoTrans, n, iv+1, n-ki2+iv, + 1, vl[ki2-iv:], ldvl, b[(ki2-iv)*ldb:], ldb, + 0, b[nb:], ldb) + // Normalize vectors. + var remax float64 + for k := 0; k <= iv; k++ { + if iscomplex[k] == 0 { + // Real eigenvector. + ii := bi.Idamax(n, b[nb+k:], ldb) + remax = 1 / math.Abs(b[ii*ldb+nb+k]) + } else if iscomplex[k] == 1 { + // First eigenvector of conjugate pair. + emax := 0.0 + for ii := 0; ii < n; ii++ { + emax = math.Max(emax, math.Abs(b[ii*ldb+nb+k])+math.Abs(b[ii*ldb+nb+k+1])) + } + remax = 1 / emax + // Second eigenvector of conjugate pair + // will reuse this value of remax. + } + bi.Dscal(n, remax, b[nb+k:], ldb) + } + impl.Dlacpy(blas.All, n, iv+1, b[nb:], ldb, vl[ki2-iv:], ldvl) + iv = 0 + } else { + iv++ + } + } + is++ + if ip != 0 { + is++ + } + } + + return m +} diff --git a/vendor/gonum.org/v1/gonum/lapack/gonum/dtrexc.go b/vendor/gonum.org/v1/gonum/lapack/gonum/dtrexc.go new file mode 100644 index 0000000..9f3f90b --- /dev/null +++ b/vendor/gonum.org/v1/gonum/lapack/gonum/dtrexc.go @@ -0,0 +1,230 @@ +// Copyright ©2016 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package gonum + +import "gonum.org/v1/gonum/lapack" + +// Dtrexc reorders the real Schur factorization of a n×n real matrix +// A = Q*T*Q^T +// so that the diagonal block of T with row index ifst is moved to row ilst. +// +// On entry, T must be in Schur canonical form, that is, block upper triangular +// with 1×1 and 2×2 diagonal blocks; each 2×2 diagonal block has its diagonal +// elements equal and its off-diagonal elements of opposite sign. +// +// On return, T will be reordered by an orthogonal similarity transformation Z +// as Z^T*T*Z, and will be again in Schur canonical form. +// +// If compq is lapack.UpdateSchur, on return the matrix Q of Schur vectors will be +// updated by post-multiplying it with Z. +// If compq is lapack.UpdateSchurNone, the matrix Q is not referenced and will not be +// updated. +// For other values of compq Dtrexc will panic. +// +// ifst and ilst specify the reordering of the diagonal blocks of T. The block +// with row index ifst is moved to row ilst, by a sequence of transpositions +// between adjacent blocks. +// +// If ifst points to the second row of a 2×2 block, ifstOut will point to the +// first row, otherwise it will be equal to ifst. +// +// ilstOut will point to the first row of the block in its final position. If ok +// is true, ilstOut may differ from ilst by +1 or -1. +// +// It must hold that +// 0 <= ifst < n, and 0 <= ilst < n, +// otherwise Dtrexc will panic. +// +// If ok is false, two adjacent blocks were too close to swap because the +// problem is very ill-conditioned. T may have been partially reordered, and +// ilstOut will point to the first row of the block at the position to which it +// has been moved. +// +// work must have length at least n, otherwise Dtrexc will panic. +// +// Dtrexc is an internal routine. It is exported for testing purposes. +func (impl Implementation) Dtrexc(compq lapack.UpdateSchurComp, n int, t []float64, ldt int, q []float64, ldq int, ifst, ilst int, work []float64) (ifstOut, ilstOut int, ok bool) { + switch { + case compq != lapack.UpdateSchur && compq != lapack.UpdateSchurNone: + panic(badUpdateSchurComp) + case n < 0: + panic(nLT0) + case ldt < max(1, n): + panic(badLdT) + case ldq < 1, compq == lapack.UpdateSchur && ldq < n: + panic(badLdQ) + case (ifst < 0 || n <= ifst) && n > 0: + panic(badIfst) + case (ilst < 0 || n <= ilst) && n > 0: + panic(badIlst) + } + + // Quick return if possible. + if n == 0 { + return ifst, ilst, true + } + + switch { + case len(t) < (n-1)*ldt+n: + panic(shortT) + case compq == lapack.UpdateSchur && len(q) < (n-1)*ldq+n: + panic(shortQ) + case len(work) < n: + panic(shortWork) + } + + // Quick return if possible. + if n == 1 { + return ifst, ilst, true + } + + // Determine the first row of specified block + // and find out it is 1×1 or 2×2. + if ifst > 0 && t[ifst*ldt+ifst-1] != 0 { + ifst-- + } + nbf := 1 // Size of the first block. + if ifst+1 < n && t[(ifst+1)*ldt+ifst] != 0 { + nbf = 2 + } + // Determine the first row of the final block + // and find out it is 1×1 or 2×2. + if ilst > 0 && t[ilst*ldt+ilst-1] != 0 { + ilst-- + } + nbl := 1 // Size of the last block. + if ilst+1 < n && t[(ilst+1)*ldt+ilst] != 0 { + nbl = 2 + } + + ok = true + wantq := compq == lapack.UpdateSchur + + switch { + case ifst == ilst: + return ifst, ilst, true + + case ifst < ilst: + // Update ilst. + switch { + case nbf == 2 && nbl == 1: + ilst-- + case nbf == 1 && nbl == 2: + ilst++ + } + here := ifst + for here < ilst { + // Swap block with next one below. + if nbf == 1 || nbf == 2 { + // Current block either 1×1 or 2×2. + nbnext := 1 // Size of the next block. + if here+nbf+1 < n && t[(here+nbf+1)*ldt+here+nbf] != 0 { + nbnext = 2 + } + ok = impl.Dlaexc(wantq, n, t, ldt, q, ldq, here, nbf, nbnext, work) + if !ok { + return ifst, here, false + } + here += nbnext + // Test if 2×2 block breaks into two 1×1 blocks. + if nbf == 2 && t[(here+1)*ldt+here] == 0 { + nbf = 3 + } + continue + } + + // Current block consists of two 1×1 blocks each of + // which must be swapped individually. + nbnext := 1 // Size of the next block. + if here+3 < n && t[(here+3)*ldt+here+2] != 0 { + nbnext = 2 + } + ok = impl.Dlaexc(wantq, n, t, ldt, q, ldq, here+1, 1, nbnext, work) + if !ok { + return ifst, here, false + } + if nbnext == 1 { + // Swap two 1×1 blocks, no problems possible. + impl.Dlaexc(wantq, n, t, ldt, q, ldq, here, 1, nbnext, work) + here++ + continue + } + // Recompute nbnext in case 2×2 split. + if t[(here+2)*ldt+here+1] == 0 { + nbnext = 1 + } + if nbnext == 2 { + // 2×2 block did not split. + ok = impl.Dlaexc(wantq, n, t, ldt, q, ldq, here, 1, nbnext, work) + if !ok { + return ifst, here, false + } + } else { + // 2×2 block did split. + impl.Dlaexc(wantq, n, t, ldt, q, ldq, here, 1, 1, work) + impl.Dlaexc(wantq, n, t, ldt, q, ldq, here+1, 1, 1, work) + } + here += 2 + } + return ifst, here, true + + default: // ifst > ilst + here := ifst + for here > ilst { + // Swap block with next one above. + if nbf == 1 || nbf == 2 { + // Current block either 1×1 or 2×2. + nbnext := 1 + if here-2 >= 0 && t[(here-1)*ldt+here-2] != 0 { + nbnext = 2 + } + ok = impl.Dlaexc(wantq, n, t, ldt, q, ldq, here-nbnext, nbnext, nbf, work) + if !ok { + return ifst, here, false + } + here -= nbnext + // Test if 2×2 block breaks into two 1×1 blocks. + if nbf == 2 && t[(here+1)*ldt+here] == 0 { + nbf = 3 + } + continue + } + + // Current block consists of two 1×1 blocks each of + // which must be swapped individually. + nbnext := 1 + if here-2 >= 0 && t[(here-1)*ldt+here-2] != 0 { + nbnext = 2 + } + ok = impl.Dlaexc(wantq, n, t, ldt, q, ldq, here-nbnext, nbnext, 1, work) + if !ok { + return ifst, here, false + } + if nbnext == 1 { + // Swap two 1×1 blocks, no problems possible. + impl.Dlaexc(wantq, n, t, ldt, q, ldq, here, nbnext, 1, work) + here-- + continue + } + // Recompute nbnext in case 2×2 split. + if t[here*ldt+here-1] == 0 { + nbnext = 1 + } + if nbnext == 2 { + // 2×2 block did not split. + ok = impl.Dlaexc(wantq, n, t, ldt, q, ldq, here-1, 2, 1, work) + if !ok { + return ifst, here, false + } + } else { + // 2×2 block did split. + impl.Dlaexc(wantq, n, t, ldt, q, ldq, here, 1, 1, work) + impl.Dlaexc(wantq, n, t, ldt, q, ldq, here-1, 1, 1, work) + } + here -= 2 + } + return ifst, here, true + } +} diff --git a/vendor/gonum.org/v1/gonum/lapack/gonum/dtrti2.go b/vendor/gonum.org/v1/gonum/lapack/gonum/dtrti2.go new file mode 100644 index 0000000..efc24b6 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/lapack/gonum/dtrti2.go @@ -0,0 +1,69 @@ +// Copyright ©2015 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package gonum + +import ( + "gonum.org/v1/gonum/blas" + "gonum.org/v1/gonum/blas/blas64" +) + +// Dtrti2 computes the inverse of a triangular matrix, storing the result in place +// into a. This is the BLAS level 2 version of the algorithm. +// +// Dtrti2 is an internal routine. It is exported for testing purposes. +func (impl Implementation) Dtrti2(uplo blas.Uplo, diag blas.Diag, n int, a []float64, lda int) { + switch { + case uplo != blas.Upper && uplo != blas.Lower: + panic(badUplo) + case diag != blas.NonUnit && diag != blas.Unit: + panic(badDiag) + case n < 0: + panic(nLT0) + case lda < max(1, n): + panic(badLdA) + } + + if n == 0 { + return + } + + if len(a) < (n-1)*lda+n { + panic(shortA) + } + + bi := blas64.Implementation() + + nonUnit := diag == blas.NonUnit + // TODO(btracey): Replace this with a row-major ordering. + if uplo == blas.Upper { + for j := 0; j < n; j++ { + var ajj float64 + if nonUnit { + ajj = 1 / a[j*lda+j] + a[j*lda+j] = ajj + ajj *= -1 + } else { + ajj = -1 + } + bi.Dtrmv(blas.Upper, blas.NoTrans, diag, j, a, lda, a[j:], lda) + bi.Dscal(j, ajj, a[j:], lda) + } + return + } + for j := n - 1; j >= 0; j-- { + var ajj float64 + if nonUnit { + ajj = 1 / a[j*lda+j] + a[j*lda+j] = ajj + ajj *= -1 + } else { + ajj = -1 + } + if j < n-1 { + bi.Dtrmv(blas.Lower, blas.NoTrans, diag, n-j-1, a[(j+1)*lda+j+1:], lda, a[(j+1)*lda+j:], lda) + bi.Dscal(n-j-1, ajj, a[(j+1)*lda+j:], lda) + } + } +} diff --git a/vendor/gonum.org/v1/gonum/lapack/gonum/dtrtri.go b/vendor/gonum.org/v1/gonum/lapack/gonum/dtrtri.go new file mode 100644 index 0000000..6ec3663 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/lapack/gonum/dtrtri.go @@ -0,0 +1,72 @@ +// Copyright ©2015 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package gonum + +import ( + "gonum.org/v1/gonum/blas" + "gonum.org/v1/gonum/blas/blas64" +) + +// Dtrtri computes the inverse of a triangular matrix, storing the result in place +// into a. This is the BLAS level 3 version of the algorithm which builds upon +// Dtrti2 to operate on matrix blocks instead of only individual columns. +// +// Dtrtri will not perform the inversion if the matrix is singular, and returns +// a boolean indicating whether the inversion was successful. +func (impl Implementation) Dtrtri(uplo blas.Uplo, diag blas.Diag, n int, a []float64, lda int) (ok bool) { + switch { + case uplo != blas.Upper && uplo != blas.Lower: + panic(badUplo) + case diag != blas.NonUnit && diag != blas.Unit: + panic(badDiag) + case n < 0: + panic(nLT0) + case lda < max(1, n): + panic(badLdA) + } + + if n == 0 { + return true + } + + if len(a) < (n-1)*lda+n { + panic(shortA) + } + + if diag == blas.NonUnit { + for i := 0; i < n; i++ { + if a[i*lda+i] == 0 { + return false + } + } + } + + bi := blas64.Implementation() + + nb := impl.Ilaenv(1, "DTRTRI", "UD", n, -1, -1, -1) + if nb <= 1 || nb > n { + impl.Dtrti2(uplo, diag, n, a, lda) + return true + } + if uplo == blas.Upper { + for j := 0; j < n; j += nb { + jb := min(nb, n-j) + bi.Dtrmm(blas.Left, blas.Upper, blas.NoTrans, diag, j, jb, 1, a, lda, a[j:], lda) + bi.Dtrsm(blas.Right, blas.Upper, blas.NoTrans, diag, j, jb, -1, a[j*lda+j:], lda, a[j:], lda) + impl.Dtrti2(blas.Upper, diag, jb, a[j*lda+j:], lda) + } + return true + } + nn := ((n - 1) / nb) * nb + for j := nn; j >= 0; j -= nb { + jb := min(nb, n-j) + if j+jb <= n-1 { + bi.Dtrmm(blas.Left, blas.Lower, blas.NoTrans, diag, n-j-jb, jb, 1, a[(j+jb)*lda+j+jb:], lda, a[(j+jb)*lda+j:], lda) + bi.Dtrsm(blas.Right, blas.Lower, blas.NoTrans, diag, n-j-jb, jb, -1, a[j*lda+j:], lda, a[(j+jb)*lda+j:], lda) + } + impl.Dtrti2(blas.Lower, diag, jb, a[j*lda+j:], lda) + } + return true +} diff --git a/vendor/gonum.org/v1/gonum/lapack/gonum/dtrtrs.go b/vendor/gonum.org/v1/gonum/lapack/gonum/dtrtrs.go new file mode 100644 index 0000000..1752dc5 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/lapack/gonum/dtrtrs.go @@ -0,0 +1,55 @@ +// Copyright ©2015 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package gonum + +import ( + "gonum.org/v1/gonum/blas" + "gonum.org/v1/gonum/blas/blas64" +) + +// Dtrtrs solves a triangular system of the form A * X = B or A^T * X = B. Dtrtrs +// returns whether the solve completed successfully. If A is singular, no solve is performed. +func (impl Implementation) Dtrtrs(uplo blas.Uplo, trans blas.Transpose, diag blas.Diag, n, nrhs int, a []float64, lda int, b []float64, ldb int) (ok bool) { + switch { + case uplo != blas.Upper && uplo != blas.Lower: + panic(badUplo) + case trans != blas.NoTrans && trans != blas.Trans && trans != blas.ConjTrans: + panic(badTrans) + case diag != blas.NonUnit && diag != blas.Unit: + panic(badDiag) + case n < 0: + panic(nLT0) + case nrhs < 0: + panic(nrhsLT0) + case lda < max(1, n): + panic(badLdA) + case ldb < max(1, nrhs): + panic(badLdB) + } + + if n == 0 { + return true + } + + switch { + case len(a) < (n-1)*lda+n: + panic(shortA) + case len(b) < (n-1)*ldb+nrhs: + panic(shortB) + } + + // Check for singularity. + nounit := diag == blas.NonUnit + if nounit { + for i := 0; i < n; i++ { + if a[i*lda+i] == 0 { + return false + } + } + } + bi := blas64.Implementation() + bi.Dtrsm(blas.Left, uplo, trans, diag, n, nrhs, 1, a, lda, b, ldb) + return true +} diff --git a/vendor/gonum.org/v1/gonum/lapack/gonum/errors.go b/vendor/gonum.org/v1/gonum/lapack/gonum/errors.go new file mode 100644 index 0000000..3c0cb68 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/lapack/gonum/errors.go @@ -0,0 +1,174 @@ +// Copyright ©2015 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package gonum + +// This list is duplicated in netlib/lapack/netlib. Keep in sync. +const ( + // Panic strings for bad enumeration values. + badApplyOrtho = "lapack: bad ApplyOrtho" + badBalanceJob = "lapack: bad BalanceJob" + badDiag = "lapack: bad Diag" + badDirect = "lapack: bad Direct" + badEVComp = "lapack: bad EVComp" + badEVHowMany = "lapack: bad EVHowMany" + badEVJob = "lapack: bad EVJob" + badEVSide = "lapack: bad EVSide" + badGSVDJob = "lapack: bad GSVDJob" + badGenOrtho = "lapack: bad GenOrtho" + badLeftEVJob = "lapack: bad LeftEVJob" + badMatrixType = "lapack: bad MatrixType" + badNorm = "lapack: bad Norm" + badPivot = "lapack: bad Pivot" + badRightEVJob = "lapack: bad RightEVJob" + badSVDJob = "lapack: bad SVDJob" + badSchurComp = "lapack: bad SchurComp" + badSchurJob = "lapack: bad SchurJob" + badSide = "lapack: bad Side" + badSort = "lapack: bad Sort" + badStoreV = "lapack: bad StoreV" + badTrans = "lapack: bad Trans" + badUpdateSchurComp = "lapack: bad UpdateSchurComp" + badUplo = "lapack: bad Uplo" + bothSVDOver = "lapack: both jobU and jobVT are lapack.SVDOverwrite" + + // Panic strings for bad numerical and string values. + badIfst = "lapack: ifst out of range" + badIhi = "lapack: ihi out of range" + badIhiz = "lapack: ihiz out of range" + badIlo = "lapack: ilo out of range" + badIloz = "lapack: iloz out of range" + badIlst = "lapack: ilst out of range" + badIsave = "lapack: bad isave value" + badIspec = "lapack: bad ispec value" + badJ1 = "lapack: j1 out of range" + badJpvt = "lapack: bad element of jpvt" + badK1 = "lapack: k1 out of range" + badK2 = "lapack: k2 out of range" + badKacc22 = "lapack: invalid value of kacc22" + badKbot = "lapack: kbot out of range" + badKtop = "lapack: ktop out of range" + badLWork = "lapack: insufficient declared workspace length" + badMm = "lapack: mm out of range" + badN1 = "lapack: bad value of n1" + badN2 = "lapack: bad value of n2" + badNa = "lapack: bad value of na" + badName = "lapack: bad name" + badNh = "lapack: bad value of nh" + badNw = "lapack: bad value of nw" + badPp = "lapack: bad value of pp" + badShifts = "lapack: bad shifts" + i0LT0 = "lapack: i0 < 0" + kGTM = "lapack: k > m" + kGTN = "lapack: k > n" + kLT0 = "lapack: k < 0" + kLT1 = "lapack: k < 1" + kdLT0 = "lapack: kd < 0" + mGTN = "lapack: m > n" + mLT0 = "lapack: m < 0" + mmLT0 = "lapack: mm < 0" + n0LT0 = "lapack: n0 < 0" + nGTM = "lapack: n > m" + nLT0 = "lapack: n < 0" + nLT1 = "lapack: n < 1" + nLTM = "lapack: n < m" + nanCFrom = "lapack: cfrom is NaN" + nanCTo = "lapack: cto is NaN" + nbGTM = "lapack: nb > m" + nbGTN = "lapack: nb > n" + nbLT0 = "lapack: nb < 0" + nccLT0 = "lapack: ncc < 0" + ncvtLT0 = "lapack: ncvt < 0" + negANorm = "lapack: anorm < 0" + negZ = "lapack: negative z value" + nhLT0 = "lapack: nh < 0" + notIsolated = "lapack: block is not isolated" + nrhsLT0 = "lapack: nrhs < 0" + nruLT0 = "lapack: nru < 0" + nshftsLT0 = "lapack: nshfts < 0" + nshftsOdd = "lapack: nshfts must be even" + nvLT0 = "lapack: nv < 0" + offsetGTM = "lapack: offset > m" + offsetLT0 = "lapack: offset < 0" + pLT0 = "lapack: p < 0" + recurLT0 = "lapack: recur < 0" + zeroCFrom = "lapack: zero cfrom" + + // Panic strings for bad slice lengths. + badLenAlpha = "lapack: bad length of alpha" + badLenBeta = "lapack: bad length of beta" + badLenIpiv = "lapack: bad length of ipiv" + badLenJpvt = "lapack: bad length of jpvt" + badLenK = "lapack: bad length of k" + badLenSelected = "lapack: bad length of selected" + badLenSi = "lapack: bad length of si" + badLenSr = "lapack: bad length of sr" + badLenTau = "lapack: bad length of tau" + badLenWi = "lapack: bad length of wi" + badLenWr = "lapack: bad length of wr" + + // Panic strings for insufficient slice lengths. + shortA = "lapack: insufficient length of a" + shortAB = "lapack: insufficient length of ab" + shortAuxv = "lapack: insufficient length of auxv" + shortB = "lapack: insufficient length of b" + shortC = "lapack: insufficient length of c" + shortCNorm = "lapack: insufficient length of cnorm" + shortD = "lapack: insufficient length of d" + shortE = "lapack: insufficient length of e" + shortF = "lapack: insufficient length of f" + shortH = "lapack: insufficient length of h" + shortIWork = "lapack: insufficient length of iwork" + shortIsgn = "lapack: insufficient length of isgn" + shortQ = "lapack: insufficient length of q" + shortS = "lapack: insufficient length of s" + shortScale = "lapack: insufficient length of scale" + shortT = "lapack: insufficient length of t" + shortTau = "lapack: insufficient length of tau" + shortTauP = "lapack: insufficient length of tauP" + shortTauQ = "lapack: insufficient length of tauQ" + shortU = "lapack: insufficient length of u" + shortV = "lapack: insufficient length of v" + shortVL = "lapack: insufficient length of vl" + shortVR = "lapack: insufficient length of vr" + shortVT = "lapack: insufficient length of vt" + shortVn1 = "lapack: insufficient length of vn1" + shortVn2 = "lapack: insufficient length of vn2" + shortW = "lapack: insufficient length of w" + shortWH = "lapack: insufficient length of wh" + shortWV = "lapack: insufficient length of wv" + shortWi = "lapack: insufficient length of wi" + shortWork = "lapack: insufficient length of work" + shortWr = "lapack: insufficient length of wr" + shortX = "lapack: insufficient length of x" + shortY = "lapack: insufficient length of y" + shortZ = "lapack: insufficient length of z" + + // Panic strings for bad leading dimensions of matrices. + badLdA = "lapack: bad leading dimension of A" + badLdB = "lapack: bad leading dimension of B" + badLdC = "lapack: bad leading dimension of C" + badLdF = "lapack: bad leading dimension of F" + badLdH = "lapack: bad leading dimension of H" + badLdQ = "lapack: bad leading dimension of Q" + badLdT = "lapack: bad leading dimension of T" + badLdU = "lapack: bad leading dimension of U" + badLdV = "lapack: bad leading dimension of V" + badLdVL = "lapack: bad leading dimension of VL" + badLdVR = "lapack: bad leading dimension of VR" + badLdVT = "lapack: bad leading dimension of VT" + badLdW = "lapack: bad leading dimension of W" + badLdWH = "lapack: bad leading dimension of WH" + badLdWV = "lapack: bad leading dimension of WV" + badLdWork = "lapack: bad leading dimension of Work" + badLdX = "lapack: bad leading dimension of X" + badLdY = "lapack: bad leading dimension of Y" + badLdZ = "lapack: bad leading dimension of Z" + + // Panic strings for bad vector increments. + absIncNotOne = "lapack: increment not one or negative one" + badIncX = "lapack: incX <= 0" + badIncY = "lapack: incY <= 0" + zeroIncV = "lapack: incv == 0" +) diff --git a/vendor/gonum.org/v1/gonum/lapack/gonum/iladlc.go b/vendor/gonum.org/v1/gonum/lapack/gonum/iladlc.go new file mode 100644 index 0000000..b251d72 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/lapack/gonum/iladlc.go @@ -0,0 +1,45 @@ +// Copyright ©2015 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package gonum + +// Iladlc scans a matrix for its last non-zero column. Returns -1 if the matrix +// is all zeros. +// +// Iladlc is an internal routine. It is exported for testing purposes. +func (Implementation) Iladlc(m, n int, a []float64, lda int) int { + switch { + case m < 0: + panic(mLT0) + case n < 0: + panic(nLT0) + case lda < max(1, n): + panic(badLdA) + } + + if n == 0 || m == 0 { + return -1 + } + + if len(a) < (m-1)*lda+n { + panic(shortA) + } + + // Test common case where corner is non-zero. + if a[n-1] != 0 || a[(m-1)*lda+(n-1)] != 0 { + return n - 1 + } + + // Scan each row tracking the highest column seen. + highest := -1 + for i := 0; i < m; i++ { + for j := n - 1; j >= 0; j-- { + if a[i*lda+j] != 0 { + highest = max(highest, j) + break + } + } + } + return highest +} diff --git a/vendor/gonum.org/v1/gonum/lapack/gonum/iladlr.go b/vendor/gonum.org/v1/gonum/lapack/gonum/iladlr.go new file mode 100644 index 0000000..b73fe18 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/lapack/gonum/iladlr.go @@ -0,0 +1,41 @@ +// Copyright ©2015 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package gonum + +// Iladlr scans a matrix for its last non-zero row. Returns -1 if the matrix +// is all zeros. +// +// Iladlr is an internal routine. It is exported for testing purposes. +func (Implementation) Iladlr(m, n int, a []float64, lda int) int { + switch { + case m < 0: + panic(mLT0) + case n < 0: + panic(nLT0) + case lda < max(1, n): + panic(badLdA) + } + + if n == 0 || m == 0 { + return -1 + } + + if len(a) < (m-1)*lda+n { + panic(shortA) + } + + // Check the common case where the corner is non-zero + if a[(m-1)*lda] != 0 || a[(m-1)*lda+n-1] != 0 { + return m - 1 + } + for i := m - 1; i >= 0; i-- { + for j := 0; j < n; j++ { + if a[i*lda+j] != 0 { + return i + } + } + } + return -1 +} diff --git a/vendor/gonum.org/v1/gonum/lapack/gonum/ilaenv.go b/vendor/gonum.org/v1/gonum/lapack/gonum/ilaenv.go new file mode 100644 index 0000000..c134d21 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/lapack/gonum/ilaenv.go @@ -0,0 +1,387 @@ +// Copyright ©2015 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package gonum + +// Ilaenv returns algorithm tuning parameters for the algorithm given by the +// input string. ispec specifies the parameter to return: +// 1: The optimal block size for a blocked algorithm. +// 2: The minimum block size for a blocked algorithm. +// 3: The block size of unprocessed data at which a blocked algorithm should +// crossover to an unblocked version. +// 4: The number of shifts. +// 5: The minimum column dimension for blocking to be used. +// 6: The crossover point for SVD (to use QR factorization or not). +// 7: The number of processors. +// 8: The crossover point for multi-shift in QR and QZ methods for non-symmetric eigenvalue problems. +// 9: Maximum size of the subproblems in divide-and-conquer algorithms. +// 10: ieee NaN arithmetic can be trusted not to trap. +// 11: infinity arithmetic can be trusted not to trap. +// 12...16: parameters for Dhseqr and related functions. See Iparmq for more +// information. +// +// Ilaenv is an internal routine. It is exported for testing purposes. +func (impl Implementation) Ilaenv(ispec int, name string, opts string, n1, n2, n3, n4 int) int { + // TODO(btracey): Replace this with a constant lookup? A list of constants? + sname := name[0] == 'S' || name[0] == 'D' + cname := name[0] == 'C' || name[0] == 'Z' + if !sname && !cname { + panic(badName) + } + c2 := name[1:3] + c3 := name[3:6] + c4 := c3[1:3] + + switch ispec { + default: + panic(badIspec) + case 1: + switch c2 { + default: + panic(badName) + case "GE": + switch c3 { + default: + panic(badName) + case "TRF": + if sname { + return 64 + } + return 64 + case "QRF", "RQF", "LQF", "QLF": + if sname { + return 32 + } + return 32 + case "HRD": + if sname { + return 32 + } + return 32 + case "BRD": + if sname { + return 32 + } + return 32 + case "TRI": + if sname { + return 64 + } + return 64 + } + case "PO": + switch c3 { + default: + panic(badName) + case "TRF": + if sname { + return 64 + } + return 64 + } + case "SY": + switch c3 { + default: + panic(badName) + case "TRF": + if sname { + return 64 + } + return 64 + case "TRD": + return 32 + case "GST": + return 64 + } + case "HE": + switch c3 { + default: + panic(badName) + case "TRF": + return 64 + case "TRD": + return 32 + case "GST": + return 64 + } + case "OR": + switch c3[0] { + default: + panic(badName) + case 'G': + switch c3[1:] { + default: + panic(badName) + case "QR", "RQ", "LQ", "QL", "HR", "TR", "BR": + return 32 + } + case 'M': + switch c3[1:] { + default: + panic(badName) + case "QR", "RQ", "LQ", "QL", "HR", "TR", "BR": + return 32 + } + } + case "UN": + switch c3[0] { + default: + panic(badName) + case 'G': + switch c3[1:] { + default: + panic(badName) + case "QR", "RQ", "LQ", "QL", "HR", "TR", "BR": + return 32 + } + case 'M': + switch c3[1:] { + default: + panic(badName) + case "QR", "RQ", "LQ", "QL", "HR", "TR", "BR": + return 32 + } + } + case "GB": + switch c3 { + default: + panic(badName) + case "TRF": + if sname { + if n4 <= 64 { + return 1 + } + return 32 + } + if n4 <= 64 { + return 1 + } + return 32 + } + case "PB": + switch c3 { + default: + panic(badName) + case "TRF": + if sname { + if n4 <= 64 { + return 1 + } + return 32 + } + if n4 <= 64 { + return 1 + } + return 32 + } + case "TR": + switch c3 { + default: + panic(badName) + case "TRI": + if sname { + return 64 + } + return 64 + case "EVC": + if sname { + return 64 + } + return 64 + } + case "LA": + switch c3 { + default: + panic(badName) + case "UUM": + if sname { + return 64 + } + return 64 + } + case "ST": + if sname && c3 == "EBZ" { + return 1 + } + panic(badName) + } + case 2: + switch c2 { + default: + panic(badName) + case "GE": + switch c3 { + default: + panic(badName) + case "QRF", "RQF", "LQF", "QLF": + if sname { + return 2 + } + return 2 + case "HRD": + if sname { + return 2 + } + return 2 + case "BRD": + if sname { + return 2 + } + return 2 + case "TRI": + if sname { + return 2 + } + return 2 + } + case "SY": + switch c3 { + default: + panic(badName) + case "TRF": + if sname { + return 8 + } + return 8 + case "TRD": + if sname { + return 2 + } + panic(badName) + } + case "HE": + if c3 == "TRD" { + return 2 + } + panic(badName) + case "OR": + if !sname { + panic(badName) + } + switch c3[0] { + default: + panic(badName) + case 'G': + switch c4 { + default: + panic(badName) + case "QR", "RQ", "LQ", "QL", "HR", "TR", "BR": + return 2 + } + case 'M': + switch c4 { + default: + panic(badName) + case "QR", "RQ", "LQ", "QL", "HR", "TR", "BR": + return 2 + } + } + case "UN": + switch c3[0] { + default: + panic(badName) + case 'G': + switch c4 { + default: + panic(badName) + case "QR", "RQ", "LQ", "QL", "HR", "TR", "BR": + return 2 + } + case 'M': + switch c4 { + default: + panic(badName) + case "QR", "RQ", "LQ", "QL", "HR", "TR", "BR": + return 2 + } + } + } + case 3: + switch c2 { + default: + panic(badName) + case "GE": + switch c3 { + default: + panic(badName) + case "QRF", "RQF", "LQF", "QLF": + if sname { + return 128 + } + return 128 + case "HRD": + if sname { + return 128 + } + return 128 + case "BRD": + if sname { + return 128 + } + return 128 + } + case "SY": + if sname && c3 == "TRD" { + return 32 + } + panic(badName) + case "HE": + if c3 == "TRD" { + return 32 + } + panic(badName) + case "OR": + switch c3[0] { + default: + panic(badName) + case 'G': + switch c4 { + default: + panic(badName) + case "QR", "RQ", "LQ", "QL", "HR", "TR", "BR": + return 128 + } + } + case "UN": + switch c3[0] { + default: + panic(badName) + case 'G': + switch c4 { + default: + panic(badName) + case "QR", "RQ", "LQ", "QL", "HR", "TR", "BR": + return 128 + } + } + } + case 4: + // Used by xHSEQR + return 6 + case 5: + // Not used + return 2 + case 6: + // Used by xGELSS and xGESVD + return int(float64(min(n1, n2)) * 1.6) + case 7: + // Not used + return 1 + case 8: + // Used by xHSEQR + return 50 + case 9: + // used by xGELSD and xGESDD + return 25 + case 10: + // Go guarantees ieee + return 1 + case 11: + // Go guarantees ieee + return 1 + case 12, 13, 14, 15, 16: + // Dhseqr and related functions for eigenvalue problems. + return impl.Iparmq(ispec, name, opts, n1, n2, n3, n4) + } +} diff --git a/vendor/gonum.org/v1/gonum/lapack/gonum/iparmq.go b/vendor/gonum.org/v1/gonum/lapack/gonum/iparmq.go new file mode 100644 index 0000000..3800f11 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/lapack/gonum/iparmq.go @@ -0,0 +1,115 @@ +// Copyright ©2016 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package gonum + +import "math" + +// Iparmq returns problem and machine dependent parameters useful for Dhseqr and +// related subroutines for eigenvalue problems. +// +// ispec specifies the parameter to return: +// 12: Crossover point between Dlahqr and Dlaqr0. Will be at least 11. +// 13: Deflation window size. +// 14: Nibble crossover point. Determines when to skip a multi-shift QR sweep. +// 15: Number of simultaneous shifts in a multishift QR iteration. +// 16: Select structured matrix multiply. +// For other values of ispec Iparmq will panic. +// +// name is the name of the calling function. name must be in uppercase but this +// is not checked. +// +// opts is not used and exists for future use. +// +// n is the order of the Hessenberg matrix H. +// +// ilo and ihi specify the block [ilo:ihi+1,ilo:ihi+1] that is being processed. +// +// lwork is the amount of workspace available. +// +// Except for ispec input parameters are not checked. +// +// Iparmq is an internal routine. It is exported for testing purposes. +func (Implementation) Iparmq(ispec int, name, opts string, n, ilo, ihi, lwork int) int { + nh := ihi - ilo + 1 + ns := 2 + switch { + case nh >= 30: + ns = 4 + case nh >= 60: + ns = 10 + case nh >= 150: + ns = max(10, nh/int(math.Log(float64(nh))/math.Ln2)) + case nh >= 590: + ns = 64 + case nh >= 3000: + ns = 128 + case nh >= 6000: + ns = 256 + } + ns = max(2, ns-(ns%2)) + + switch ispec { + default: + panic(badIspec) + + case 12: + // Matrices of order smaller than nmin get sent to Dlahqr, the + // classic double shift algorithm. This must be at least 11. + const nmin = 75 + return nmin + + case 13: + const knwswp = 500 + if nh <= knwswp { + return ns + } + return 3 * ns / 2 + + case 14: + // Skip a computationally expensive multi-shift QR sweep with + // Dlaqr5 whenever aggressive early deflation finds at least + // nibble*(window size)/100 deflations. The default, small, + // value reflects the expectation that the cost of looking + // through the deflation window with Dlaqr3 will be + // substantially smaller. + const nibble = 14 + return nibble + + case 15: + return ns + + case 16: + if len(name) != 6 { + panic(badName) + } + const ( + k22min = 14 + kacmin = 14 + ) + var acc22 int + switch { + case name[1:] == "GGHRD" || name[1:] == "GGHD3": + acc22 = 1 + if nh >= k22min { + acc22 = 2 + } + case name[3:] == "EXC": + if nh >= kacmin { + acc22 = 1 + } + if nh >= k22min { + acc22 = 2 + } + case name[1:] == "HSEQR" || name[1:5] == "LAQR": + if ns >= kacmin { + acc22 = 1 + } + if ns >= k22min { + acc22 = 2 + } + } + return acc22 + } +} diff --git a/vendor/gonum.org/v1/gonum/lapack/gonum/lapack.go b/vendor/gonum.org/v1/gonum/lapack/gonum/lapack.go new file mode 100644 index 0000000..950db32 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/lapack/gonum/lapack.go @@ -0,0 +1,55 @@ +// Copyright ©2015 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package gonum + +import "gonum.org/v1/gonum/lapack" + +// Implementation is the native Go implementation of LAPACK routines. It +// is built on top of calls to the return of blas64.Implementation(), so while +// this code is in pure Go, the underlying BLAS implementation may not be. +type Implementation struct{} + +var _ lapack.Float64 = Implementation{} + +func min(a, b int) int { + if a < b { + return a + } + return b +} + +func max(a, b int) int { + if a > b { + return a + } + return b +} + +func abs(a int) int { + if a < 0 { + return -a + } + return a +} + +const ( + // dlamchE is the machine epsilon. For IEEE this is 2^{-53}. + dlamchE = 1.0 / (1 << 53) + // TODO(kortschak) Replace this with 0x1p-53 when go1.12 is no + // longer supported. + + // dlamchB is the radix of the machine (the base of the number system). + dlamchB = 2 + + // dlamchP is base * eps. + dlamchP = dlamchB * dlamchE + + // dlamchS is the "safe minimum", that is, the lowest number such that + // 1/dlamchS does not overflow, or also the smallest normal number. + // For IEEE this is 2^{-1022}. + dlamchS = 1.0 / (1 << 256) / (1 << 256) / (1 << 256) / (1 << 254) + // TODO(kortschak) Replace this with 0x1p-1022 when go1.12 is no + // longer supported. +) diff --git a/vendor/gonum.org/v1/gonum/lapack/internal/testdata/dlahr2test/main.go b/vendor/gonum.org/v1/gonum/lapack/internal/testdata/dlahr2test/main.go new file mode 100644 index 0000000..22210a7 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/lapack/internal/testdata/dlahr2test/main.go @@ -0,0 +1,102 @@ +// Copyright ©2016 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// dlahr2test generates test data for Dlahr2. Test cases are stored in +// gzip-compressed JSON file testlapack/testdata/dlahr2data.json.gz which is +// read during testing by testlapack/dlahr2.go. +// +// This program uses cgo to call Fortran version of DLAHR2. Therefore, matrices +// passed to the Fortran routine are in column-major format but are written into +// the output file in row-major format. +package main + +import ( + "compress/gzip" + "encoding/json" + "log" + "os" + "path/filepath" + + "golang.org/x/exp/rand" + + "gonum.org/v1/gonum/lapack/internal/testdata/netlib" +) + +type Dlahr2Test struct { + N, K, NB int + A []float64 + + AWant []float64 + TWant []float64 + YWant []float64 + TauWant []float64 +} + +func main() { + file, err := os.Create(filepath.FromSlash("../../../testlapack/testdata/dlahr2data.json.gz")) + if err != nil { + log.Fatal(err) + } + defer file.Close() + w := gzip.NewWriter(file) + + rnd := rand.New(rand.NewSource(1)) + + var tests []Dlahr2Test + for _, n := range []int{4, 5, 6, 7, 11} { + for k := 0; k <= n/2; k++ { + for nb := 1; nb <= k; nb++ { + ain := genrand(n, n-k+1, rnd) + a := make([]float64, len(ain)) + copy(a, ain) + + t := genrand(nb, nb, rnd) + y := genrand(n, nb, rnd) + tau := genrand(nb, 1, rnd) + + netlib.Dlahr2(n, k, nb, a, n, tau, t, nb, y, n) + + tests = append(tests, Dlahr2Test{ + N: n, + K: k, + NB: nb, + A: rowMajor(n, n-k+1, ain), + AWant: rowMajor(n, n-k+1, a), + TWant: rowMajor(nb, nb, t), + YWant: rowMajor(n, nb, y), + TauWant: tau, + }) + } + } + } + json.NewEncoder(w).Encode(tests) + + err = w.Close() + if err != nil { + log.Fatal(err) + } +} + +// genrand returns a general r×c matrix with random entries. +func genrand(r, c int, rnd *rand.Rand) []float64 { + m := make([]float64, r*c) + for i := range m { + m[i] = rnd.NormFloat64() + } + return m +} + +// rowMajor returns the given r×c column-major matrix a in row-major format. +func rowMajor(r, c int, a []float64) []float64 { + if len(a) != r*c { + panic("testdata: slice length mismatch") + } + m := make([]float64, len(a)) + for i := 0; i < r; i++ { + for j := 0; j < c; j++ { + m[i*c+j] = a[i+j*r] + } + } + return m +} diff --git a/vendor/gonum.org/v1/gonum/lapack/internal/testdata/dlaqr5test/main.go b/vendor/gonum.org/v1/gonum/lapack/internal/testdata/dlaqr5test/main.go new file mode 100644 index 0000000..86c3323 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/lapack/internal/testdata/dlaqr5test/main.go @@ -0,0 +1,186 @@ +// Copyright ©2016 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// This program generates test data for Dlaqr5. Test cases are stored in +// gzip-compressed JSON file testlapack/testdata/dlaqr5data.json.gz which is +// read during testing by testlapack/dlaqr5.go. +// +// This program uses cgo to call Fortran version of DLAQR5. Therefore, matrices +// passed to the Fortran routine are in column-major format but are written into +// the output file in row-major format. +package main + +import ( + "compress/gzip" + "encoding/json" + "log" + "os" + "path/filepath" + + "golang.org/x/exp/rand" + + "gonum.org/v1/gonum/lapack/internal/testdata/netlib" +) + +type Dlaqr5Test struct { + WantT bool + N int + NShifts int + KTop, KBot int + ShiftR, ShiftI []float64 + H []float64 + + HWant []float64 + ZWant []float64 +} + +func main() { + file, err := os.Create(filepath.FromSlash("../../../testlapack/testdata/dlaqr5data.json.gz")) + if err != nil { + log.Fatal(err) + } + defer file.Close() + w := gzip.NewWriter(file) + + rnd := rand.New(rand.NewSource(1)) + + var tests []Dlaqr5Test + for _, wantt := range []bool{true, false} { + for _, n := range []int{2, 3, 4, 5, 6, 7, 11} { + for k := 0; k <= min(5, n); k++ { + npairs := k + if npairs == 0 { + npairs = 2 * n + } + for ktop := 0; ktop < n-1; ktop++ { + for kbot := ktop + 1; kbot < n; kbot++ { + sr, si := shiftpairs(npairs, rnd) + nshfts := len(sr) + + v := genrand(nshfts/2, 3, rnd) + u := genrand(3*nshfts-3, 3*nshfts-3, rnd) + wh := genrand(3*nshfts-3, n, rnd) + nh := n + wv := genrand(n, 3*nshfts-3, rnd) + nv := n + + h := hessrand(n, rnd) + if ktop > 0 { + h[ktop+(ktop-1)*n] = 0 + } + if kbot < n-1 { + h[kbot+1+kbot*n] = 0 + } + hin := make([]float64, len(h)) + copy(hin, h) + z := eye(n) + + netlib.Dlaqr5(wantt, true, 2, + n, ktop+1, kbot+1, + nshfts, sr, si, + h, n, + 1, n, z, n, + v, 3, + u, 3*nshfts-3, + nh, wh, nh, + nv, wv, 3*nshfts-3) + + tests = append(tests, Dlaqr5Test{ + WantT: wantt, + N: n, + NShifts: nshfts, + KTop: ktop, + KBot: kbot, + ShiftR: sr, + ShiftI: si, + H: rowMajor(n, n, hin), + HWant: rowMajor(n, n, h), + ZWant: rowMajor(n, n, z), + }) + } + } + } + } + } + json.NewEncoder(w).Encode(tests) + + err = w.Close() + if err != nil { + log.Fatal(err) + } +} + +// genrand returns a general r×c matrix with random entries. +func genrand(r, c int, rnd *rand.Rand) []float64 { + m := make([]float64, r*c) + for i := range m { + m[i] = rnd.NormFloat64() + } + return m +} + +// eye returns an identity matrix of order n. +func eye(n int) []float64 { + m := make([]float64, n*n) + for i := 0; i < n*n; i += n + 1 { + m[i] = 1 + } + return m +} + +// hessrand returns a Hessenberg matrix of order n with random non-zero entries +// in column-major format. +func hessrand(n int, rnd *rand.Rand) []float64 { + h := make([]float64, n*n) + for j := 0; j < n; j++ { + for i := 0; i <= min(j+1, n-1); i++ { + h[i+j*n] = rnd.NormFloat64() + } + } + return h +} + +// shiftpairs generates k real and complex conjugate shift pairs. That is, the +// length of sr and si is 2*k. +func shiftpairs(k int, rnd *rand.Rand) (sr, si []float64) { + sr = make([]float64, 2*k) + si = make([]float64, 2*k) + for i := 0; i < len(sr); { + if rnd.Float64() < 0.5 || i == len(sr)-1 { + sr[i] = rnd.NormFloat64() + i++ + continue + } + // Generate a complex conjugate pair. + r := rnd.NormFloat64() + c := rnd.NormFloat64() + sr[i] = r + si[i] = c + sr[i+1] = r + si[i+1] = -c + i += 2 + } + return sr, si +} + +// rowMajor returns the given r×c column-major matrix a in row-major format. +func rowMajor(r, c int, a []float64) []float64 { + if len(a) != r*c { + panic("testdata: slice length mismatch") + } + m := make([]float64, len(a)) + for i := 0; i < r; i++ { + for j := 0; j < c; j++ { + m[i*c+j] = a[i+j*r] + } + } + return m +} + +func min(a, b int) int { + if a < b { + return a + } + return b +} diff --git a/vendor/gonum.org/v1/gonum/lapack/internal/testdata/dlasqtest/dcopy.f b/vendor/gonum.org/v1/gonum/lapack/internal/testdata/dlasqtest/dcopy.f new file mode 100644 index 0000000..d9d5ac7 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/lapack/internal/testdata/dlasqtest/dcopy.f @@ -0,0 +1,115 @@ +*> \brief \b DCOPY +* +* =========== DOCUMENTATION =========== +* +* Online html documentation available at +* http://www.netlib.org/lapack/explore-html/ +* +* Definition: +* =========== +* +* SUBROUTINE DCOPY(N,DX,INCX,DY,INCY) +* +* .. Scalar Arguments .. +* INTEGER INCX,INCY,N +* .. +* .. Array Arguments .. +* DOUBLE PRECISION DX(*),DY(*) +* .. +* +* +*> \par Purpose: +* ============= +*> +*> \verbatim +*> +*> DCOPY copies a vector, x, to a vector, y. +*> uses unrolled loops for increments equal to one. +*> \endverbatim +* +* Authors: +* ======== +* +*> \author Univ. of Tennessee +*> \author Univ. of California Berkeley +*> \author Univ. of Colorado Denver +*> \author NAG Ltd. +* +*> \date November 2011 +* +*> \ingroup double_blas_level1 +* +*> \par Further Details: +* ===================== +*> +*> \verbatim +*> +*> jack dongarra, linpack, 3/11/78. +*> modified 12/3/93, array(1) declarations changed to array(*) +*> \endverbatim +*> +* ===================================================================== + SUBROUTINE DCOPY(N,DX,INCX,DY,INCY) +* +* -- Reference BLAS level1 routine (version 3.4.0) -- +* -- Reference BLAS is a software package provided by Univ. of Tennessee, -- +* -- Univ. of California Berkeley, Univ. of Colorado Denver and NAG Ltd..-- +* November 2011 +* +* .. Scalar Arguments .. + INTEGER INCX,INCY,N +* .. +* .. Array Arguments .. + DOUBLE PRECISION DX(*),DY(*) +* .. +* +* ===================================================================== +* +* .. Local Scalars .. + INTEGER I,IX,IY,M,MP1 +* .. +* .. Intrinsic Functions .. + INTRINSIC MOD +* .. + IF (N.LE.0) RETURN + IF (INCX.EQ.1 .AND. INCY.EQ.1) THEN +* +* code for both increments equal to 1 +* +* +* clean-up loop +* + M = MOD(N,7) + IF (M.NE.0) THEN + DO I = 1,M + DY(I) = DX(I) + END DO + IF (N.LT.7) RETURN + END IF + MP1 = M + 1 + DO I = MP1,N,7 + DY(I) = DX(I) + DY(I+1) = DX(I+1) + DY(I+2) = DX(I+2) + DY(I+3) = DX(I+3) + DY(I+4) = DX(I+4) + DY(I+5) = DX(I+5) + DY(I+6) = DX(I+6) + END DO + ELSE +* +* code for unequal increments or equal increments +* not equal to 1 +* + IX = 1 + IY = 1 + IF (INCX.LT.0) IX = (-N+1)*INCX + 1 + IF (INCY.LT.0) IY = (-N+1)*INCY + 1 + DO I = 1,N + DY(IY) = DX(IX) + IX = IX + INCX + IY = IY + INCY + END DO + END IF + RETURN + END diff --git a/vendor/gonum.org/v1/gonum/lapack/internal/testdata/dlasqtest/disnan.f b/vendor/gonum.org/v1/gonum/lapack/internal/testdata/dlasqtest/disnan.f new file mode 100644 index 0000000..355b827 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/lapack/internal/testdata/dlasqtest/disnan.f @@ -0,0 +1,80 @@ +*> \brief \b DISNAN tests input for NaN. +* +* =========== DOCUMENTATION =========== +* +* Online html documentation available at +* http://www.netlib.org/lapack/explore-html/ +* +*> \htmlonly +*> Download DISNAN + dependencies +*> +*> [TGZ] +*> +*> [ZIP] +*> +*> [TXT] +*> \endhtmlonly +* +* Definition: +* =========== +* +* LOGICAL FUNCTION DISNAN( DIN ) +* +* .. Scalar Arguments .. +* DOUBLE PRECISION DIN +* .. +* +* +*> \par Purpose: +* ============= +*> +*> \verbatim +*> +*> DISNAN returns .TRUE. if its argument is NaN, and .FALSE. +*> otherwise. To be replaced by the Fortran 2003 intrinsic in the +*> future. +*> \endverbatim +* +* Arguments: +* ========== +* +*> \param[in] DIN +*> \verbatim +*> DIN is DOUBLE PRECISION +*> Input to test for NaN. +*> \endverbatim +* +* Authors: +* ======== +* +*> \author Univ. of Tennessee +*> \author Univ. of California Berkeley +*> \author Univ. of Colorado Denver +*> \author NAG Ltd. +* +*> \date September 2012 +* +*> \ingroup auxOTHERauxiliary +* +* ===================================================================== + LOGICAL FUNCTION DISNAN( DIN ) +* +* -- LAPACK auxiliary routine (version 3.4.2) -- +* -- LAPACK is a software package provided by Univ. of Tennessee, -- +* -- Univ. of California Berkeley, Univ. of Colorado Denver and NAG Ltd..-- +* September 2012 +* +* .. Scalar Arguments .. + DOUBLE PRECISION DIN +* .. +* +* ===================================================================== +* +* .. External Functions .. + LOGICAL DLAISNAN + EXTERNAL DLAISNAN +* .. +* .. Executable Statements .. + DISNAN = DLAISNAN(DIN,DIN) + RETURN + END diff --git a/vendor/gonum.org/v1/gonum/lapack/internal/testdata/dlasqtest/dlaisnan.f b/vendor/gonum.org/v1/gonum/lapack/internal/testdata/dlasqtest/dlaisnan.f new file mode 100644 index 0000000..58595c5 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/lapack/internal/testdata/dlasqtest/dlaisnan.f @@ -0,0 +1,91 @@ +*> \brief \b DLAISNAN tests input for NaN by comparing two arguments for inequality. +* +* =========== DOCUMENTATION =========== +* +* Online html documentation available at +* http://www.netlib.org/lapack/explore-html/ +* +*> \htmlonly +*> Download DLAISNAN + dependencies +*> +*> [TGZ] +*> +*> [ZIP] +*> +*> [TXT] +*> \endhtmlonly +* +* Definition: +* =========== +* +* LOGICAL FUNCTION DLAISNAN( DIN1, DIN2 ) +* +* .. Scalar Arguments .. +* DOUBLE PRECISION DIN1, DIN2 +* .. +* +* +*> \par Purpose: +* ============= +*> +*> \verbatim +*> +*> This routine is not for general use. It exists solely to avoid +*> over-optimization in DISNAN. +*> +*> DLAISNAN checks for NaNs by comparing its two arguments for +*> inequality. NaN is the only floating-point value where NaN != NaN +*> returns .TRUE. To check for NaNs, pass the same variable as both +*> arguments. +*> +*> A compiler must assume that the two arguments are +*> not the same variable, and the test will not be optimized away. +*> Interprocedural or whole-program optimization may delete this +*> test. The ISNAN functions will be replaced by the correct +*> Fortran 03 intrinsic once the intrinsic is widely available. +*> \endverbatim +* +* Arguments: +* ========== +* +*> \param[in] DIN1 +*> \verbatim +*> DIN1 is DOUBLE PRECISION +*> \endverbatim +*> +*> \param[in] DIN2 +*> \verbatim +*> DIN2 is DOUBLE PRECISION +*> Two numbers to compare for inequality. +*> \endverbatim +* +* Authors: +* ======== +* +*> \author Univ. of Tennessee +*> \author Univ. of California Berkeley +*> \author Univ. of Colorado Denver +*> \author NAG Ltd. +* +*> \date September 2012 +* +*> \ingroup auxOTHERauxiliary +* +* ===================================================================== + LOGICAL FUNCTION DLAISNAN( DIN1, DIN2 ) +* +* -- LAPACK auxiliary routine (version 3.4.2) -- +* -- LAPACK is a software package provided by Univ. of Tennessee, -- +* -- Univ. of California Berkeley, Univ. of Colorado Denver and NAG Ltd..-- +* September 2012 +* +* .. Scalar Arguments .. + DOUBLE PRECISION DIN1, DIN2 +* .. +* +* ===================================================================== +* +* .. Executable Statements .. + DLAISNAN = (DIN1.NE.DIN2) + RETURN + END diff --git a/vendor/gonum.org/v1/gonum/lapack/internal/testdata/dlasqtest/dlamch.f b/vendor/gonum.org/v1/gonum/lapack/internal/testdata/dlasqtest/dlamch.f new file mode 100644 index 0000000..25c2c8e --- /dev/null +++ b/vendor/gonum.org/v1/gonum/lapack/internal/testdata/dlasqtest/dlamch.f @@ -0,0 +1,193 @@ +*> \brief \b DLAMCH +* +* =========== DOCUMENTATION =========== +* +* Online html documentation available at +* http://www.netlib.org/lapack/explore-html/ +* +* Definition: +* =========== +* +* DOUBLE PRECISION FUNCTION DLAMCH( CMACH ) +* +* +*> \par Purpose: +* ============= +*> +*> \verbatim +*> +*> DLAMCH determines double precision machine parameters. +*> \endverbatim +* +* Arguments: +* ========== +* +*> \param[in] CMACH +*> \verbatim +*> Specifies the value to be returned by DLAMCH: +*> = 'E' or 'e', DLAMCH := eps +*> = 'S' or 's , DLAMCH := sfmin +*> = 'B' or 'b', DLAMCH := base +*> = 'P' or 'p', DLAMCH := eps*base +*> = 'N' or 'n', DLAMCH := t +*> = 'R' or 'r', DLAMCH := rnd +*> = 'M' or 'm', DLAMCH := emin +*> = 'U' or 'u', DLAMCH := rmin +*> = 'L' or 'l', DLAMCH := emax +*> = 'O' or 'o', DLAMCH := rmax +*> where +*> eps = relative machine precision +*> sfmin = safe minimum, such that 1/sfmin does not overflow +*> base = base of the machine +*> prec = eps*base +*> t = number of (base) digits in the mantissa +*> rnd = 1.0 when rounding occurs in addition, 0.0 otherwise +*> emin = minimum exponent before (gradual) underflow +*> rmin = underflow threshold - base**(emin-1) +*> emax = largest exponent before overflow +*> rmax = overflow threshold - (base**emax)*(1-eps) +*> \endverbatim +* +* Authors: +* ======== +* +*> \author Univ. of Tennessee +*> \author Univ. of California Berkeley +*> \author Univ. of Colorado Denver +*> \author NAG Ltd. +* +*> \date November 2011 +* +*> \ingroup auxOTHERauxiliary +* +* ===================================================================== + DOUBLE PRECISION FUNCTION DLAMCH( CMACH ) +* +* -- LAPACK auxiliary routine (version 3.4.0) -- +* -- LAPACK is a software package provided by Univ. of Tennessee, -- +* -- Univ. of California Berkeley, Univ. of Colorado Denver and NAG Ltd..-- +* November 2011 +* +* .. Scalar Arguments .. + CHARACTER CMACH +* .. +* +* .. Scalar Arguments .. + DOUBLE PRECISION A, B +* .. +* +* ===================================================================== +* +* .. Parameters .. + DOUBLE PRECISION ONE, ZERO + PARAMETER ( ONE = 1.0D+0, ZERO = 0.0D+0 ) +* .. +* .. Local Scalars .. + DOUBLE PRECISION RND, EPS, SFMIN, SMALL, RMACH +* .. +* .. External Functions .. + LOGICAL LSAME + EXTERNAL LSAME +* .. +* .. Intrinsic Functions .. + INTRINSIC DIGITS, EPSILON, HUGE, MAXEXPONENT, + $ MINEXPONENT, RADIX, TINY +* .. +* .. Executable Statements .. +* +* +* Assume rounding, not chopping. Always. +* + RND = ONE +* + IF( ONE.EQ.RND ) THEN + EPS = EPSILON(ZERO) * 0.5 + ELSE + EPS = EPSILON(ZERO) + END IF +* + IF( LSAME( CMACH, 'E' ) ) THEN + RMACH = EPS + ELSE IF( LSAME( CMACH, 'S' ) ) THEN + SFMIN = TINY(ZERO) + SMALL = ONE / HUGE(ZERO) + IF( SMALL.GE.SFMIN ) THEN +* +* Use SMALL plus a bit, to avoid the possibility of rounding +* causing overflow when computing 1/sfmin. +* + SFMIN = SMALL*( ONE+EPS ) + END IF + RMACH = SFMIN + ELSE IF( LSAME( CMACH, 'B' ) ) THEN + RMACH = RADIX(ZERO) + ELSE IF( LSAME( CMACH, 'P' ) ) THEN + RMACH = EPS * RADIX(ZERO) + ELSE IF( LSAME( CMACH, 'N' ) ) THEN + RMACH = DIGITS(ZERO) + ELSE IF( LSAME( CMACH, 'R' ) ) THEN + RMACH = RND + ELSE IF( LSAME( CMACH, 'M' ) ) THEN + RMACH = MINEXPONENT(ZERO) + ELSE IF( LSAME( CMACH, 'U' ) ) THEN + RMACH = tiny(zero) + ELSE IF( LSAME( CMACH, 'L' ) ) THEN + RMACH = MAXEXPONENT(ZERO) + ELSE IF( LSAME( CMACH, 'O' ) ) THEN + RMACH = HUGE(ZERO) + ELSE + RMACH = ZERO + END IF +* + DLAMCH = RMACH + RETURN +* +* End of DLAMCH +* + END +************************************************************************ +*> \brief \b DLAMC3 +*> \details +*> \b Purpose: +*> \verbatim +*> DLAMC3 is intended to force A and B to be stored prior to doing +*> the addition of A and B , for use in situations where optimizers +*> might hold one of these in a register. +*> \endverbatim +*> \author LAPACK is a software package provided by Univ. of Tennessee, Univ. of California Berkeley, Univ. of Colorado Denver and NAG Ltd.. +*> \date November 2011 +*> \ingroup auxOTHERauxiliary +*> +*> \param[in] A +*> \verbatim +*> A is a DOUBLE PRECISION +*> \endverbatim +*> +*> \param[in] B +*> \verbatim +*> B is a DOUBLE PRECISION +*> The values A and B. +*> \endverbatim +*> + DOUBLE PRECISION FUNCTION DLAMC3( A, B ) +* +* -- LAPACK auxiliary routine (version 3.4.0) -- +* Univ. of Tennessee, Univ. of California Berkeley and NAG Ltd.. +* November 2010 +* +* .. Scalar Arguments .. + DOUBLE PRECISION A, B +* .. +* ===================================================================== +* +* .. Executable Statements .. +* + DLAMC3 = A + B +* + RETURN +* +* End of DLAMC3 +* + END +* +************************************************************************ diff --git a/vendor/gonum.org/v1/gonum/lapack/internal/testdata/dlasqtest/dlas2.f b/vendor/gonum.org/v1/gonum/lapack/internal/testdata/dlasqtest/dlas2.f new file mode 100644 index 0000000..81077f9 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/lapack/internal/testdata/dlasqtest/dlas2.f @@ -0,0 +1,183 @@ +*> \brief \b DLAS2 computes singular values of a 2-by-2 triangular matrix. +* +* =========== DOCUMENTATION =========== +* +* Online html documentation available at +* http://www.netlib.org/lapack/explore-html/ +* +*> \htmlonly +*> Download DLAS2 + dependencies +*> +*> [TGZ] +*> +*> [ZIP] +*> +*> [TXT] +*> \endhtmlonly +* +* Definition: +* =========== +* +* SUBROUTINE DLAS2( F, G, H, SSMIN, SSMAX ) +* +* .. Scalar Arguments .. +* DOUBLE PRECISION F, G, H, SSMAX, SSMIN +* .. +* +* +*> \par Purpose: +* ============= +*> +*> \verbatim +*> +*> DLAS2 computes the singular values of the 2-by-2 matrix +*> [ F G ] +*> [ 0 H ]. +*> On return, SSMIN is the smaller singular value and SSMAX is the +*> larger singular value. +*> \endverbatim +* +* Arguments: +* ========== +* +*> \param[in] F +*> \verbatim +*> F is DOUBLE PRECISION +*> The (1,1) element of the 2-by-2 matrix. +*> \endverbatim +*> +*> \param[in] G +*> \verbatim +*> G is DOUBLE PRECISION +*> The (1,2) element of the 2-by-2 matrix. +*> \endverbatim +*> +*> \param[in] H +*> \verbatim +*> H is DOUBLE PRECISION +*> The (2,2) element of the 2-by-2 matrix. +*> \endverbatim +*> +*> \param[out] SSMIN +*> \verbatim +*> SSMIN is DOUBLE PRECISION +*> The smaller singular value. +*> \endverbatim +*> +*> \param[out] SSMAX +*> \verbatim +*> SSMAX is DOUBLE PRECISION +*> The larger singular value. +*> \endverbatim +* +* Authors: +* ======== +* +*> \author Univ. of Tennessee +*> \author Univ. of California Berkeley +*> \author Univ. of Colorado Denver +*> \author NAG Ltd. +* +*> \date September 2012 +* +*> \ingroup auxOTHERauxiliary +* +*> \par Further Details: +* ===================== +*> +*> \verbatim +*> +*> Barring over/underflow, all output quantities are correct to within +*> a few units in the last place (ulps), even in the absence of a guard +*> digit in addition/subtraction. +*> +*> In IEEE arithmetic, the code works correctly if one matrix element is +*> infinite. +*> +*> Overflow will not occur unless the largest singular value itself +*> overflows, or is within a few ulps of overflow. (On machines with +*> partial overflow, like the Cray, overflow may occur if the largest +*> singular value is within a factor of 2 of overflow.) +*> +*> Underflow is harmless if underflow is gradual. Otherwise, results +*> may correspond to a matrix modified by perturbations of size near +*> the underflow threshold. +*> \endverbatim +*> +* ===================================================================== + SUBROUTINE DLAS2( F, G, H, SSMIN, SSMAX ) +* +* -- LAPACK auxiliary routine (version 3.4.2) -- +* -- LAPACK is a software package provided by Univ. of Tennessee, -- +* -- Univ. of California Berkeley, Univ. of Colorado Denver and NAG Ltd..-- +* September 2012 +* +* .. Scalar Arguments .. + DOUBLE PRECISION F, G, H, SSMAX, SSMIN +* .. +* +* ==================================================================== +* +* .. Parameters .. + DOUBLE PRECISION ZERO + PARAMETER ( ZERO = 0.0D0 ) + DOUBLE PRECISION ONE + PARAMETER ( ONE = 1.0D0 ) + DOUBLE PRECISION TWO + PARAMETER ( TWO = 2.0D0 ) +* .. +* .. Local Scalars .. + DOUBLE PRECISION AS, AT, AU, C, FA, FHMN, FHMX, GA, HA +* .. +* .. Intrinsic Functions .. + INTRINSIC ABS, MAX, MIN, SQRT +* .. +* .. Executable Statements .. +* + FA = ABS( F ) + GA = ABS( G ) + HA = ABS( H ) + FHMN = MIN( FA, HA ) + FHMX = MAX( FA, HA ) + IF( FHMN.EQ.ZERO ) THEN + SSMIN = ZERO + IF( FHMX.EQ.ZERO ) THEN + SSMAX = GA + ELSE + SSMAX = MAX( FHMX, GA )*SQRT( ONE+ + $ ( MIN( FHMX, GA ) / MAX( FHMX, GA ) )**2 ) + END IF + ELSE + IF( GA.LT.FHMX ) THEN + AS = ONE + FHMN / FHMX + AT = ( FHMX-FHMN ) / FHMX + AU = ( GA / FHMX )**2 + C = TWO / ( SQRT( AS*AS+AU )+SQRT( AT*AT+AU ) ) + SSMIN = FHMN*C + SSMAX = FHMX / C + ELSE + AU = FHMX / GA + IF( AU.EQ.ZERO ) THEN +* +* Avoid possible harmful underflow if exponent range +* asymmetric (true SSMIN may not underflow even if +* AU underflows) +* + SSMIN = ( FHMN*FHMX ) / GA + SSMAX = GA + ELSE + AS = ONE + FHMN / FHMX + AT = ( FHMX-FHMN ) / FHMX + C = ONE / ( SQRT( ONE+( AS*AU )**2 )+ + $ SQRT( ONE+( AT*AU )**2 ) ) + SSMIN = ( FHMN*C )*AU + SSMIN = SSMIN + SSMIN + SSMAX = GA / ( C+C ) + END IF + END IF + END IF + RETURN +* +* End of DLAS2 +* + END diff --git a/vendor/gonum.org/v1/gonum/lapack/internal/testdata/dlasqtest/dlascl.f b/vendor/gonum.org/v1/gonum/lapack/internal/testdata/dlasqtest/dlascl.f new file mode 100644 index 0000000..9b9b33c --- /dev/null +++ b/vendor/gonum.org/v1/gonum/lapack/internal/testdata/dlasqtest/dlascl.f @@ -0,0 +1,364 @@ +*> \brief \b DLASCL multiplies a general rectangular matrix by a real scalar defined as cto/cfrom. +* +* =========== DOCUMENTATION =========== +* +* Online html documentation available at +* http://www.netlib.org/lapack/explore-html/ +* +*> \htmlonly +*> Download DLASCL + dependencies +*> +*> [TGZ] +*> +*> [ZIP] +*> +*> [TXT] +*> \endhtmlonly +* +* Definition: +* =========== +* +* SUBROUTINE DLASCL( TYPE, KL, KU, CFROM, CTO, M, N, A, LDA, INFO ) +* +* .. Scalar Arguments .. +* CHARACTER TYPE +* INTEGER INFO, KL, KU, LDA, M, N +* DOUBLE PRECISION CFROM, CTO +* .. +* .. Array Arguments .. +* DOUBLE PRECISION A( LDA, * ) +* .. +* +* +*> \par Purpose: +* ============= +*> +*> \verbatim +*> +*> DLASCL multiplies the M by N real matrix A by the real scalar +*> CTO/CFROM. This is done without over/underflow as long as the final +*> result CTO*A(I,J)/CFROM does not over/underflow. TYPE specifies that +*> A may be full, upper triangular, lower triangular, upper Hessenberg, +*> or banded. +*> \endverbatim +* +* Arguments: +* ========== +* +*> \param[in] TYPE +*> \verbatim +*> TYPE is CHARACTER*1 +*> TYPE indices the storage type of the input matrix. +*> = 'G': A is a full matrix. +*> = 'L': A is a lower triangular matrix. +*> = 'U': A is an upper triangular matrix. +*> = 'H': A is an upper Hessenberg matrix. +*> = 'B': A is a symmetric band matrix with lower bandwidth KL +*> and upper bandwidth KU and with the only the lower +*> half stored. +*> = 'Q': A is a symmetric band matrix with lower bandwidth KL +*> and upper bandwidth KU and with the only the upper +*> half stored. +*> = 'Z': A is a band matrix with lower bandwidth KL and upper +*> bandwidth KU. See DGBTRF for storage details. +*> \endverbatim +*> +*> \param[in] KL +*> \verbatim +*> KL is INTEGER +*> The lower bandwidth of A. Referenced only if TYPE = 'B', +*> 'Q' or 'Z'. +*> \endverbatim +*> +*> \param[in] KU +*> \verbatim +*> KU is INTEGER +*> The upper bandwidth of A. Referenced only if TYPE = 'B', +*> 'Q' or 'Z'. +*> \endverbatim +*> +*> \param[in] CFROM +*> \verbatim +*> CFROM is DOUBLE PRECISION +*> \endverbatim +*> +*> \param[in] CTO +*> \verbatim +*> CTO is DOUBLE PRECISION +*> +*> The matrix A is multiplied by CTO/CFROM. A(I,J) is computed +*> without over/underflow if the final result CTO*A(I,J)/CFROM +*> can be represented without over/underflow. CFROM must be +*> nonzero. +*> \endverbatim +*> +*> \param[in] M +*> \verbatim +*> M is INTEGER +*> The number of rows of the matrix A. M >= 0. +*> \endverbatim +*> +*> \param[in] N +*> \verbatim +*> N is INTEGER +*> The number of columns of the matrix A. N >= 0. +*> \endverbatim +*> +*> \param[in,out] A +*> \verbatim +*> A is DOUBLE PRECISION array, dimension (LDA,N) +*> The matrix to be multiplied by CTO/CFROM. See TYPE for the +*> storage type. +*> \endverbatim +*> +*> \param[in] LDA +*> \verbatim +*> LDA is INTEGER +*> The leading dimension of the array A. LDA >= max(1,M). +*> \endverbatim +*> +*> \param[out] INFO +*> \verbatim +*> INFO is INTEGER +*> 0 - successful exit +*> <0 - if INFO = -i, the i-th argument had an illegal value. +*> \endverbatim +* +* Authors: +* ======== +* +*> \author Univ. of Tennessee +*> \author Univ. of California Berkeley +*> \author Univ. of Colorado Denver +*> \author NAG Ltd. +* +*> \date September 2012 +* +*> \ingroup auxOTHERauxiliary +* +* ===================================================================== + SUBROUTINE DLASCL( TYPE, KL, KU, CFROM, CTO, M, N, A, LDA, INFO ) +* +* -- LAPACK auxiliary routine (version 3.4.2) -- +* -- LAPACK is a software package provided by Univ. of Tennessee, -- +* -- Univ. of California Berkeley, Univ. of Colorado Denver and NAG Ltd..-- +* September 2012 +* +* .. Scalar Arguments .. + CHARACTER TYPE + INTEGER INFO, KL, KU, LDA, M, N + DOUBLE PRECISION CFROM, CTO +* .. +* .. Array Arguments .. + DOUBLE PRECISION A( LDA, * ) +* .. +* +* ===================================================================== +* +* .. Parameters .. + DOUBLE PRECISION ZERO, ONE + PARAMETER ( ZERO = 0.0D0, ONE = 1.0D0 ) +* .. +* .. Local Scalars .. + LOGICAL DONE + INTEGER I, ITYPE, J, K1, K2, K3, K4 + DOUBLE PRECISION BIGNUM, CFROM1, CFROMC, CTO1, CTOC, MUL, SMLNUM +* .. +* .. External Functions .. + LOGICAL LSAME, DISNAN + DOUBLE PRECISION DLAMCH + EXTERNAL LSAME, DLAMCH, DISNAN +* .. +* .. Intrinsic Functions .. + INTRINSIC ABS, MAX, MIN +* .. +* .. External Subroutines .. + EXTERNAL XERBLA +* .. +* .. Executable Statements .. +* +* Test the input arguments +* + INFO = 0 +* + IF( LSAME( TYPE, 'G' ) ) THEN + ITYPE = 0 + ELSE IF( LSAME( TYPE, 'L' ) ) THEN + ITYPE = 1 + ELSE IF( LSAME( TYPE, 'U' ) ) THEN + ITYPE = 2 + ELSE IF( LSAME( TYPE, 'H' ) ) THEN + ITYPE = 3 + ELSE IF( LSAME( TYPE, 'B' ) ) THEN + ITYPE = 4 + ELSE IF( LSAME( TYPE, 'Q' ) ) THEN + ITYPE = 5 + ELSE IF( LSAME( TYPE, 'Z' ) ) THEN + ITYPE = 6 + ELSE + ITYPE = -1 + END IF +* + IF( ITYPE.EQ.-1 ) THEN + INFO = -1 + ELSE IF( CFROM.EQ.ZERO .OR. DISNAN(CFROM) ) THEN + INFO = -4 + ELSE IF( DISNAN(CTO) ) THEN + INFO = -5 + ELSE IF( M.LT.0 ) THEN + INFO = -6 + ELSE IF( N.LT.0 .OR. ( ITYPE.EQ.4 .AND. N.NE.M ) .OR. + $ ( ITYPE.EQ.5 .AND. N.NE.M ) ) THEN + INFO = -7 + ELSE IF( ITYPE.LE.3 .AND. LDA.LT.MAX( 1, M ) ) THEN + INFO = -9 + ELSE IF( ITYPE.GE.4 ) THEN + IF( KL.LT.0 .OR. KL.GT.MAX( M-1, 0 ) ) THEN + INFO = -2 + ELSE IF( KU.LT.0 .OR. KU.GT.MAX( N-1, 0 ) .OR. + $ ( ( ITYPE.EQ.4 .OR. ITYPE.EQ.5 ) .AND. KL.NE.KU ) ) + $ THEN + INFO = -3 + ELSE IF( ( ITYPE.EQ.4 .AND. LDA.LT.KL+1 ) .OR. + $ ( ITYPE.EQ.5 .AND. LDA.LT.KU+1 ) .OR. + $ ( ITYPE.EQ.6 .AND. LDA.LT.2*KL+KU+1 ) ) THEN + INFO = -9 + END IF + END IF +* + IF( INFO.NE.0 ) THEN + CALL XERBLA( 'DLASCL', -INFO ) + RETURN + END IF +* +* Quick return if possible +* + IF( N.EQ.0 .OR. M.EQ.0 ) + $ RETURN +* +* Get machine parameters +* + SMLNUM = DLAMCH( 'S' ) + BIGNUM = ONE / SMLNUM +* + CFROMC = CFROM + CTOC = CTO +* + 10 CONTINUE + CFROM1 = CFROMC*SMLNUM + IF( CFROM1.EQ.CFROMC ) THEN +! CFROMC is an inf. Multiply by a correctly signed zero for +! finite CTOC, or a NaN if CTOC is infinite. + MUL = CTOC / CFROMC + DONE = .TRUE. + CTO1 = CTOC + ELSE + CTO1 = CTOC / BIGNUM + IF( CTO1.EQ.CTOC ) THEN +! CTOC is either 0 or an inf. In both cases, CTOC itself +! serves as the correct multiplication factor. + MUL = CTOC + DONE = .TRUE. + CFROMC = ONE + ELSE IF( ABS( CFROM1 ).GT.ABS( CTOC ) .AND. CTOC.NE.ZERO ) THEN + MUL = SMLNUM + DONE = .FALSE. + CFROMC = CFROM1 + ELSE IF( ABS( CTO1 ).GT.ABS( CFROMC ) ) THEN + MUL = BIGNUM + DONE = .FALSE. + CTOC = CTO1 + ELSE + MUL = CTOC / CFROMC + DONE = .TRUE. + END IF + END IF +* + IF( ITYPE.EQ.0 ) THEN +* +* Full matrix +* + DO 30 J = 1, N + DO 20 I = 1, M + A( I, J ) = A( I, J )*MUL + 20 CONTINUE + 30 CONTINUE +* + ELSE IF( ITYPE.EQ.1 ) THEN +* +* Lower triangular matrix +* + DO 50 J = 1, N + DO 40 I = J, M + A( I, J ) = A( I, J )*MUL + 40 CONTINUE + 50 CONTINUE +* + ELSE IF( ITYPE.EQ.2 ) THEN +* +* Upper triangular matrix +* + DO 70 J = 1, N + DO 60 I = 1, MIN( J, M ) + A( I, J ) = A( I, J )*MUL + 60 CONTINUE + 70 CONTINUE +* + ELSE IF( ITYPE.EQ.3 ) THEN +* +* Upper Hessenberg matrix +* + DO 90 J = 1, N + DO 80 I = 1, MIN( J+1, M ) + A( I, J ) = A( I, J )*MUL + 80 CONTINUE + 90 CONTINUE +* + ELSE IF( ITYPE.EQ.4 ) THEN +* +* Lower half of a symmetric band matrix +* + K3 = KL + 1 + K4 = N + 1 + DO 110 J = 1, N + DO 100 I = 1, MIN( K3, K4-J ) + A( I, J ) = A( I, J )*MUL + 100 CONTINUE + 110 CONTINUE +* + ELSE IF( ITYPE.EQ.5 ) THEN +* +* Upper half of a symmetric band matrix +* + K1 = KU + 2 + K3 = KU + 1 + DO 130 J = 1, N + DO 120 I = MAX( K1-J, 1 ), K3 + A( I, J ) = A( I, J )*MUL + 120 CONTINUE + 130 CONTINUE +* + ELSE IF( ITYPE.EQ.6 ) THEN +* +* Band matrix +* + K1 = KL + KU + 2 + K2 = KL + 1 + K3 = 2*KL + KU + 1 + K4 = KL + KU + 1 + M + DO 150 J = 1, N + DO 140 I = MAX( K1-J, K2 ), MIN( K3, K4-J ) + A( I, J ) = A( I, J )*MUL + 140 CONTINUE + 150 CONTINUE +* + END IF +* + IF( .NOT.DONE ) + $ GO TO 10 +* + RETURN +* +* End of DLASCL +* + END diff --git a/vendor/gonum.org/v1/gonum/lapack/internal/testdata/dlasqtest/dlasq1.f b/vendor/gonum.org/v1/gonum/lapack/internal/testdata/dlasqtest/dlasq1.f new file mode 100644 index 0000000..f084778 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/lapack/internal/testdata/dlasqtest/dlasq1.f @@ -0,0 +1,226 @@ +*> \brief \b DLASQ1 computes the singular values of a real square bidiagonal matrix. Used by sbdsqr. +* +* =========== DOCUMENTATION =========== +* +* Online html documentation available at +* http://www.netlib.org/lapack/explore-html/ +* +*> \htmlonly +*> Download DLASQ1 + dependencies +*> +*> [TGZ] +*> +*> [ZIP] +*> +*> [TXT] +*> \endhtmlonly +* +* Definition: +* =========== +* +* SUBROUTINE DLASQ1( N, D, E, WORK, INFO ) +* +* .. Scalar Arguments .. +* INTEGER INFO, N +* .. +* .. Array Arguments .. +* DOUBLE PRECISION D( * ), E( * ), WORK( * ) +* .. +* +* +*> \par Purpose: +* ============= +*> +*> \verbatim +*> +*> DLASQ1 computes the singular values of a real N-by-N bidiagonal +*> matrix with diagonal D and off-diagonal E. The singular values +*> are computed to high relative accuracy, in the absence of +*> denormalization, underflow and overflow. The algorithm was first +*> presented in +*> +*> "Accurate singular values and differential qd algorithms" by K. V. +*> Fernando and B. N. Parlett, Numer. Math., Vol-67, No. 2, pp. 191-230, +*> 1994, +*> +*> and the present implementation is described in "An implementation of +*> the dqds Algorithm (Positive Case)", LAPACK Working Note. +*> \endverbatim +* +* Arguments: +* ========== +* +*> \param[in] N +*> \verbatim +*> N is INTEGER +*> The number of rows and columns in the matrix. N >= 0. +*> \endverbatim +*> +*> \param[in,out] D +*> \verbatim +*> D is DOUBLE PRECISION array, dimension (N) +*> On entry, D contains the diagonal elements of the +*> bidiagonal matrix whose SVD is desired. On normal exit, +*> D contains the singular values in decreasing order. +*> \endverbatim +*> +*> \param[in,out] E +*> \verbatim +*> E is DOUBLE PRECISION array, dimension (N) +*> On entry, elements E(1:N-1) contain the off-diagonal elements +*> of the bidiagonal matrix whose SVD is desired. +*> On exit, E is overwritten. +*> \endverbatim +*> +*> \param[out] WORK +*> \verbatim +*> WORK is DOUBLE PRECISION array, dimension (4*N) +*> \endverbatim +*> +*> \param[out] INFO +*> \verbatim +*> INFO is INTEGER +*> = 0: successful exit +*> < 0: if INFO = -i, the i-th argument had an illegal value +*> > 0: the algorithm failed +*> = 1, a split was marked by a positive value in E +*> = 2, current block of Z not diagonalized after 100*N +*> iterations (in inner while loop) On exit D and E +*> represent a matrix with the same singular values +*> which the calling subroutine could use to finish the +*> computation, or even feed back into DLASQ1 +*> = 3, termination criterion of outer while loop not met +*> (program created more than N unreduced blocks) +*> \endverbatim +* +* Authors: +* ======== +* +*> \author Univ. of Tennessee +*> \author Univ. of California Berkeley +*> \author Univ. of Colorado Denver +*> \author NAG Ltd. +* +*> \date September 2012 +* +*> \ingroup auxOTHERcomputational +* +* ===================================================================== + SUBROUTINE DLASQ1( N, D, E, WORK, INFO ) +* +* -- LAPACK computational routine (version 3.4.2) -- +* -- LAPACK is a software package provided by Univ. of Tennessee, -- +* -- Univ. of California Berkeley, Univ. of Colorado Denver and NAG Ltd..-- +* September 2012 +* +* .. Scalar Arguments .. + INTEGER INFO, N +* .. +* .. Array Arguments .. + DOUBLE PRECISION D( * ), E( * ), WORK( * ) +* .. +* +* ===================================================================== +* +* .. Parameters .. + DOUBLE PRECISION ZERO + PARAMETER ( ZERO = 0.0D0 ) +* .. +* .. Local Scalars .. + INTEGER I, IINFO + DOUBLE PRECISION EPS, SCALE, SAFMIN, SIGMN, SIGMX +* .. +* .. External Subroutines .. + EXTERNAL DCOPY, DLAS2, DLASCL, DLASQ2, DLASRT, XERBLA +* .. +* .. External Functions .. + DOUBLE PRECISION DLAMCH + EXTERNAL DLAMCH +* .. +* .. Intrinsic Functions .. + INTRINSIC ABS, MAX, SQRT +* .. +* .. Executable Statements .. +* + INFO = 0 + IF( N.LT.0 ) THEN + INFO = -2 + CALL XERBLA( 'DLASQ1', -INFO ) + RETURN + ELSE IF( N.EQ.0 ) THEN + RETURN + ELSE IF( N.EQ.1 ) THEN + D( 1 ) = ABS( D( 1 ) ) + RETURN + ELSE IF( N.EQ.2 ) THEN + CALL DLAS2( D( 1 ), E( 1 ), D( 2 ), SIGMN, SIGMX ) + D( 1 ) = SIGMX + D( 2 ) = SIGMN + RETURN + END IF +* +* Estimate the largest singular value. +* + SIGMX = ZERO + DO 10 I = 1, N - 1 + D( I ) = ABS( D( I ) ) + SIGMX = MAX( SIGMX, ABS( E( I ) ) ) + 10 CONTINUE + D( N ) = ABS( D( N ) ) +* +* Early return if SIGMX is zero (matrix is already diagonal). +* + IF( SIGMX.EQ.ZERO ) THEN + CALL DLASRT( 'D', N, D, IINFO ) + RETURN + END IF +* + DO 20 I = 1, N + SIGMX = MAX( SIGMX, D( I ) ) + 20 CONTINUE +* +* Copy D and E into WORK (in the Z format) and scale (squaring the +* input data makes scaling by a power of the radix pointless). +* + EPS = DLAMCH( 'Precision' ) + SAFMIN = DLAMCH( 'Safe minimum' ) + SCALE = SQRT( EPS / SAFMIN ) + + CALL DCOPY( N, D, 1, WORK( 1 ), 2 ) + CALL DCOPY( N-1, E, 1, WORK( 2 ), 2 ) + CALL DLASCL( 'G', 0, 0, SIGMX, SCALE, 2*N-1, 1, WORK, 2*N-1, + $ IINFO ) +* +* Compute the q's and e's. +* + DO 30 I = 1, 2*N - 1 + WORK( I ) = WORK( I )**2 + 30 CONTINUE + WORK( 2*N ) = ZERO +* + + CALL DLASQ2( N, WORK, INFO ) +* + IF( INFO.EQ.0 ) THEN + DO 40 I = 1, N + D( I ) = SQRT( WORK( I ) ) + 40 CONTINUE + CALL DLASCL( 'G', 0, 0, SCALE, SIGMX, N, 1, D, N, IINFO ) + ELSE IF( INFO.EQ.2 ) THEN +* +* Maximum number of iterations exceeded. Move data from WORK +* into D and E so the calling subroutine can try to finish +* + DO I = 1, N + D( I ) = SQRT( WORK( 2*I-1 ) ) + E( I ) = SQRT( WORK( 2*I ) ) + END DO + CALL DLASCL( 'G', 0, 0, SCALE, SIGMX, N, 1, D, N, IINFO ) + CALL DLASCL( 'G', 0, 0, SCALE, SIGMX, N, 1, E, N, IINFO ) + END IF +* + RETURN +* +* End of DLASQ1 +* + END diff --git a/vendor/gonum.org/v1/gonum/lapack/internal/testdata/dlasqtest/dlasq2.f b/vendor/gonum.org/v1/gonum/lapack/internal/testdata/dlasqtest/dlasq2.f new file mode 100644 index 0000000..602dffc --- /dev/null +++ b/vendor/gonum.org/v1/gonum/lapack/internal/testdata/dlasqtest/dlasq2.f @@ -0,0 +1,652 @@ +*> \brief \b DLASQ2 computes all the eigenvalues of the symmetric positive definite tridiagonal matrix associated with the qd Array Z to high relative accuracy. Used by sbdsqr and sstegr. +* +* =========== DOCUMENTATION =========== +* +* Online html documentation available at +* http://www.netlib.org/lapack/explore-html/ +* +*> \htmlonly +*> Download DLASQ2 + dependencies +*> +*> [TGZ] +*> +*> [ZIP] +*> +*> [TXT] +*> \endhtmlonly +* +* Definition: +* =========== +* +* SUBROUTINE DLASQ2( N, Z, INFO ) +* +* .. Scalar Arguments .. +* INTEGER INFO, N +* .. +* .. Array Arguments .. +* DOUBLE PRECISION Z( * ) +* .. +* +* +*> \par Purpose: +* ============= +*> +*> \verbatim +*> +*> DLASQ2 computes all the eigenvalues of the symmetric positive +*> definite tridiagonal matrix associated with the qd array Z to high +*> relative accuracy are computed to high relative accuracy, in the +*> absence of denormalization, underflow and overflow. +*> +*> To see the relation of Z to the tridiagonal matrix, let L be a +*> unit lower bidiagonal matrix with subdiagonals Z(2,4,6,,..) and +*> let U be an upper bidiagonal matrix with 1's above and diagonal +*> Z(1,3,5,,..). The tridiagonal is L*U or, if you prefer, the +*> symmetric tridiagonal to which it is similar. +*> +*> Note : DLASQ2 defines a logical variable, IEEE, which is true +*> on machines which follow ieee-754 floating-point standard in their +*> handling of infinities and NaNs, and false otherwise. This variable +*> is passed to DLASQ3. +*> \endverbatim +* +* Arguments: +* ========== +* +*> \param[in] N +*> \verbatim +*> N is INTEGER +*> The number of rows and columns in the matrix. N >= 0. +*> \endverbatim +*> +*> \param[in,out] Z +*> \verbatim +*> Z is DOUBLE PRECISION array, dimension ( 4*N ) +*> On entry Z holds the qd array. On exit, entries 1 to N hold +*> the eigenvalues in decreasing order, Z( 2*N+1 ) holds the +*> trace, and Z( 2*N+2 ) holds the sum of the eigenvalues. If +*> N > 2, then Z( 2*N+3 ) holds the iteration count, Z( 2*N+4 ) +*> holds NDIVS/NIN^2, and Z( 2*N+5 ) holds the percentage of +*> shifts that failed. +*> \endverbatim +*> +*> \param[out] INFO +*> \verbatim +*> INFO is INTEGER +*> = 0: successful exit +*> < 0: if the i-th argument is a scalar and had an illegal +*> value, then INFO = -i, if the i-th argument is an +*> array and the j-entry had an illegal value, then +*> INFO = -(i*100+j) +*> > 0: the algorithm failed +*> = 1, a split was marked by a positive value in E +*> = 2, current block of Z not diagonalized after 100*N +*> iterations (in inner while loop). On exit Z holds +*> a qd array with the same eigenvalues as the given Z. +*> = 3, termination criterion of outer while loop not met +*> (program created more than N unreduced blocks) +*> \endverbatim +* +* Authors: +* ======== +* +*> \author Univ. of Tennessee +*> \author Univ. of California Berkeley +*> \author Univ. of Colorado Denver +*> \author NAG Ltd. +* +*> \date September 2012 +* +*> \ingroup auxOTHERcomputational +* +*> \par Further Details: +* ===================== +*> +*> \verbatim +*> +*> Local Variables: I0:N0 defines a current unreduced segment of Z. +*> The shifts are accumulated in SIGMA. Iteration count is in ITER. +*> Ping-pong is controlled by PP (alternates between 0 and 1). +*> \endverbatim +*> +* ===================================================================== + SUBROUTINE DLASQ2( N, Z, INFO ) +* +* -- LAPACK computational routine (version 3.4.2) -- +* -- LAPACK is a software package provided by Univ. of Tennessee, -- +* -- Univ. of California Berkeley, Univ. of Colorado Denver and NAG Ltd..-- +* September 2012 +* +* .. Scalar Arguments .. + INTEGER INFO, N +* .. +* .. Array Arguments .. + DOUBLE PRECISION Z( * ) +* .. +* +* ===================================================================== +* +* .. Parameters .. + DOUBLE PRECISION CBIAS + PARAMETER ( CBIAS = 1.50D0 ) + DOUBLE PRECISION ZERO, HALF, ONE, TWO, FOUR, HUNDRD + PARAMETER ( ZERO = 0.0D0, HALF = 0.5D0, ONE = 1.0D0, + $ TWO = 2.0D0, FOUR = 4.0D0, HUNDRD = 100.0D0 ) +* .. +* .. Local Scalars .. + LOGICAL IEEE + INTEGER I0, I1, I4, IINFO, IPN4, ITER, IWHILA, IWHILB, + $ K, KMIN, N0, N1, NBIG, NDIV, NFAIL, PP, SPLT, + $ TTYPE + DOUBLE PRECISION D, DEE, DEEMIN, DESIG, DMIN, DMIN1, DMIN2, DN, + $ DN1, DN2, E, EMAX, EMIN, EPS, G, OLDEMN, QMAX, + $ QMIN, S, SAFMIN, SIGMA, T, TAU, TEMP, TOL, + $ TOL2, TRACE, ZMAX, TEMPE, TEMPQ +* .. +* .. External Subroutines .. + EXTERNAL DLASQ3, DLASRT, XERBLA +* .. +* .. External Functions .. + INTEGER ILAENV + DOUBLE PRECISION DLAMCH + EXTERNAL DLAMCH, ILAENV +* .. +* .. Intrinsic Functions .. + INTRINSIC ABS, DBLE, MAX, MIN, SQRT +* .. +* .. Executable Statements .. +* +* Test the input arguments. +* (in case DLASQ2 is not called by DLASQ1) +* + INFO = 0 + EPS = DLAMCH( 'Precision' ) + SAFMIN = DLAMCH( 'Safe minimum' ) + TOL = EPS*HUNDRD + TOL2 = TOL**2 +* + IF( N.LT.0 ) THEN + INFO = -1 + CALL XERBLA( 'DLASQ2', 1 ) + RETURN + ELSE IF( N.EQ.0 ) THEN + RETURN + ELSE IF( N.EQ.1 ) THEN +* +* 1-by-1 case. +* + IF( Z( 1 ).LT.ZERO ) THEN + INFO = -201 + CALL XERBLA( 'DLASQ2', 2 ) + END IF + RETURN + ELSE IF( N.EQ.2 ) THEN +* +* 2-by-2 case. +* + IF( Z( 2 ).LT.ZERO .OR. Z( 3 ).LT.ZERO ) THEN + INFO = -2 + CALL XERBLA( 'DLASQ2', 2 ) + RETURN + ELSE IF( Z( 3 ).GT.Z( 1 ) ) THEN + D = Z( 3 ) + Z( 3 ) = Z( 1 ) + Z( 1 ) = D + END IF + Z( 5 ) = Z( 1 ) + Z( 2 ) + Z( 3 ) + IF( Z( 2 ).GT.Z( 3 )*TOL2 ) THEN + T = HALF*( ( Z( 1 )-Z( 3 ) )+Z( 2 ) ) + S = Z( 3 )*( Z( 2 ) / T ) + IF( S.LE.T ) THEN + S = Z( 3 )*( Z( 2 ) / ( T*( ONE+SQRT( ONE+S / T ) ) ) ) + ELSE + S = Z( 3 )*( Z( 2 ) / ( T+SQRT( T )*SQRT( T+S ) ) ) + END IF + T = Z( 1 ) + ( S+Z( 2 ) ) + Z( 3 ) = Z( 3 )*( Z( 1 ) / T ) + Z( 1 ) = T + END IF + Z( 2 ) = Z( 3 ) + Z( 6 ) = Z( 2 ) + Z( 1 ) + RETURN + END IF +* +* Check for negative data and compute sums of q's and e's. +* + Z( 2*N ) = ZERO + EMIN = Z( 2 ) + QMAX = ZERO + ZMAX = ZERO + D = ZERO + E = ZERO +* + DO 10 K = 1, 2*( N-1 ), 2 + IF( Z( K ).LT.ZERO ) THEN + INFO = -( 200+K ) + CALL XERBLA( 'DLASQ2', 2 ) + RETURN + ELSE IF( Z( K+1 ).LT.ZERO ) THEN + INFO = -( 200+K+1 ) + CALL XERBLA( 'DLASQ2', 2 ) + RETURN + END IF + D = D + Z( K ) + E = E + Z( K+1 ) + QMAX = MAX( QMAX, Z( K ) ) + EMIN = MIN( EMIN, Z( K+1 ) ) + ZMAX = MAX( QMAX, ZMAX, Z( K+1 ) ) + 10 CONTINUE + IF( Z( 2*N-1 ).LT.ZERO ) THEN + INFO = -( 200+2*N-1 ) + CALL XERBLA( 'DLASQ2', 2 ) + RETURN + END IF + D = D + Z( 2*N-1 ) + QMAX = MAX( QMAX, Z( 2*N-1 ) ) + ZMAX = MAX( QMAX, ZMAX ) +* +* Check for diagonality. +* + IF( E.EQ.ZERO ) THEN + DO 20 K = 2, N + Z( K ) = Z( 2*K-1 ) + 20 CONTINUE + CALL DLASRT( 'D', N, Z, IINFO ) + Z( 2*N-1 ) = D + RETURN + END IF +* + TRACE = D + E +* +* Check for zero data. +* + IF( TRACE.EQ.ZERO ) THEN + Z( 2*N-1 ) = ZERO + RETURN + END IF +* +* Check whether the machine is IEEE conformable. +* + IEEE = ILAENV( 10, 'DLASQ2', 'N', 1, 2, 3, 4 ).EQ.1 .AND. + $ ILAENV( 11, 'DLASQ2', 'N', 1, 2, 3, 4 ).EQ.1 +* +* Rearrange data for locality: Z=(q1,qq1,e1,ee1,q2,qq2,e2,ee2,...). +* + DO 30 K = 2*N, 2, -2 + Z( 2*K ) = ZERO + Z( 2*K-1 ) = Z( K ) + Z( 2*K-2 ) = ZERO + Z( 2*K-3 ) = Z( K-1 ) + 30 CONTINUE +* + I0 = 1 + N0 = N +* +* Reverse the qd-array, if warranted. +* + IF( CBIAS*Z( 4*I0-3 ).LT.Z( 4*N0-3 ) ) THEN + IPN4 = 4*( I0+N0 ) + DO 40 I4 = 4*I0, 2*( I0+N0-1 ), 4 + + TEMP = Z( I4-3 ) + Z( I4-3 ) = Z( IPN4-I4-3 ) + Z( IPN4-I4-3 ) = TEMP + TEMP = Z( I4-1 ) + Z( I4-1 ) = Z( IPN4-I4-5 ) + Z( IPN4-I4-5 ) = TEMP + 40 CONTINUE + END IF +* +* Initial split checking via dqd and Li's test. +* + PP = 0 +* + DO 80 K = 1, 2 +* + D = Z( 4*N0+PP-3 ) + DO 50 I4 = 4*( N0-1 ) + PP, 4*I0 + PP, -4 + IF( Z( I4-1 ).LE.TOL2*D ) THEN + Z( I4-1 ) = -ZERO + D = Z( I4-3 ) + ELSE + D = Z( I4-3 )*( D / ( D+Z( I4-1 ) ) ) + END IF + 50 CONTINUE +* +* dqd maps Z to ZZ plus Li's test. +* + EMIN = Z( 4*I0+PP+1 ) + D = Z( 4*I0+PP-3 ) + DO 60 I4 = 4*I0 + PP, 4*( N0-1 ) + PP, 4 + Z( I4-2*PP-2 ) = D + Z( I4-1 ) + IF( Z( I4-1 ).LE.TOL2*D ) THEN + Z( I4-1 ) = -ZERO + Z( I4-2*PP-2 ) = D + Z( I4-2*PP ) = ZERO + D = Z( I4+1 ) + ELSE IF( SAFMIN*Z( I4+1 ).LT.Z( I4-2*PP-2 ) .AND. + $ SAFMIN*Z( I4-2*PP-2 ).LT.Z( I4+1 ) ) THEN + TEMP = Z( I4+1 ) / Z( I4-2*PP-2 ) + Z( I4-2*PP ) = Z( I4-1 )*TEMP + D = D*TEMP + ELSE + Z( I4-2*PP ) = Z( I4+1 )*( Z( I4-1 ) / Z( I4-2*PP-2 ) ) + D = Z( I4+1 )*( D / Z( I4-2*PP-2 ) ) + END IF + EMIN = MIN( EMIN, Z( I4-2*PP ) ) + 60 CONTINUE + Z( 4*N0-PP-2 ) = D +* +* Now find qmax. +* + QMAX = Z( 4*I0-PP-2 ) + DO 70 I4 = 4*I0 - PP + 2, 4*N0 - PP - 2, 4 + QMAX = MAX( QMAX, Z( I4 ) ) + 70 CONTINUE +* +* Prepare for the next iteration on K. +* + PP = 1 - PP + 80 CONTINUE + +* +* Initialise variables to pass to DLASQ3. +* + TTYPE = 0 + DMIN1 = ZERO + DMIN2 = ZERO + DN = ZERO + DN1 = ZERO + DN2 = ZERO + G = ZERO + TAU = ZERO +* + ITER = 2 + NFAIL = 0 + NDIV = 2*( N0-I0 ) +* + DO 160 IWHILA = 1, N + 1 + + IF( N0.LT.1 ) THEN + GO TO 170 + END IF +* +* While array unfinished do +* +* E(N0) holds the value of SIGMA when submatrix in I0:N0 +* splits from the rest of the array, but is negated. +* + DESIG = ZERO + IF( N0.EQ.N ) THEN + SIGMA = ZERO + ELSE + SIGMA = -Z( 4*N0-1 ) + END IF + IF( SIGMA.LT.ZERO ) THEN + INFO = 1 + RETURN + END IF +* +* Find last unreduced submatrix's top index I0, find QMAX and +* EMIN. Find Gershgorin-type bound if Q's much greater than E's. +* + EMAX = ZERO + IF( N0.GT.I0 ) THEN + EMIN = ABS( Z( 4*N0-5 ) ) + ELSE + EMIN = ZERO + END IF + QMIN = Z( 4*N0-3 ) + QMAX = QMIN + DO 90 I4 = 4*N0, 8, -4 + IF( Z( I4-5 ).LE.ZERO ) + $ GO TO 100 + IF( QMIN.GE.FOUR*EMAX ) THEN + QMIN = MIN( QMIN, Z( I4-3 ) ) + EMAX = MAX( EMAX, Z( I4-5 ) ) + END IF + QMAX = MAX( QMAX, Z( I4-7 )+Z( I4-5 ) ) + EMIN = MIN( EMIN, Z( I4-5 ) ) + 90 CONTINUE + I4 = 4 +* + 100 CONTINUE + I0 = I4 / 4 + + PP = 0 +* + IF( N0-I0.GT.1 ) THEN + DEE = Z( 4*I0-3 ) + DEEMIN = DEE + KMIN = I0 + DO 110 I4 = 4*I0+1, 4*N0-3, 4 + DEE = Z( I4 )*( DEE /( DEE+Z( I4-2 ) ) ) + IF( DEE.LE.DEEMIN ) THEN + DEEMIN = DEE + KMIN = ( I4+3 )/4 + END IF + 110 CONTINUE + IF( (KMIN-I0)*2.LT.N0-KMIN .AND. + $ DEEMIN.LE.HALF*Z(4*N0-3) ) THEN + IPN4 = 4*( I0+N0 ) + PP = 2 + DO 120 I4 = 4*I0, 2*( I0+N0-1 ), 4 + TEMP = Z( I4-3 ) + Z( I4-3 ) = Z( IPN4-I4-3 ) + Z( IPN4-I4-3 ) = TEMP + TEMP = Z( I4-2 ) + Z( I4-2 ) = Z( IPN4-I4-2 ) + Z( IPN4-I4-2 ) = TEMP + TEMP = Z( I4-1 ) + Z( I4-1 ) = Z( IPN4-I4-5 ) + Z( IPN4-I4-5 ) = TEMP + TEMP = Z( I4 ) + Z( I4 ) = Z( IPN4-I4-4 ) + Z( IPN4-I4-4 ) = TEMP + 120 CONTINUE + END IF + END IF +* +* Put -(initial shift) into DMIN. +* + DMIN = -MAX( ZERO, QMIN-TWO*SQRT( QMIN )*SQRT( EMAX ) ) +* +* Now I0:N0 is unreduced. +* PP = 0 for ping, PP = 1 for pong. +* PP = 2 indicates that flipping was applied to the Z array and +* and that the tests for deflation upon entry in DLASQ3 +* should not be performed. +* + NBIG = 100*( N0-I0+1 ) + DO 140 IWHILB = 1, NBIG + + IF( I0.GT.N0 ) + $ GO TO 150 +* + + ! Print out test cases + + write(3,*) "{" + write(3,*) "i0: ", I0, "," + write(3,*) "n0: ", N0, "," + write(3,'(9999(g0))',advance="no") "z: []float64{" + do i = 1, 4*n + write (3,'(99999(e24.16,a))',advance="no") z(i), "," + end do + write (3,*) "}," + write (3,*) "pp: ", PP, "," + write (3,*) "dmin: ", DMIN, "," + write (3,*) "desig:", DESIG, "," + write (3,*) "qmax: ", QMAX, "," + write (3,*) "ttype:", TTYPE, "," + write (3,*) "dmin1:", DMIN1, "," + write (3,*) "dmin2:", DMIN2, "," + write (3,*) "dn: ", DN, "," + write (3,*) "dn1: ", DN1, "," + write (3,*) "dn2: ", DN2, "," + write (3,*) "g: ", G, "," + write (3,*) "tau: ", TAU, "," + write (3,*) "nFail:", NFAIL, "," + write (3,*) "iter: ", ITER, "," + write (3,*) "sigma:", SIGMA, "," + write (3,*) "nDiv: ", NDIV, "," + +* While submatrix unfinished take a good dqds step. +* + + + CALL DLASQ3( I0, N0, Z, PP, DMIN, SIGMA, DESIG, QMAX, NFAIL, + $ ITER, NDIV, IEEE, TTYPE, DMIN1, DMIN2, DN, DN1, + $ DN2, G, TAU ) + + + ! Write the outputs + write(3,'(9999(g0))',advance="no") "zOut: []float64{" + do i = 1, 4*n + write (3,'(99999(e24.16,a))',advance="no") z(i), "," + end do + write (3,*) "}," + write (3,*) "i0Out:",I0, "," + write (3,*) "n0Out:", N0, "," + write (3,*) "ppOut:", PP, "," + write (3,*) "dminOut:", DMIN, "," + write (3,*) "desigOut:", DESIG, "," + write (3,*) "sigmaOut:", SIGMA, "," + write (3,*) "qmaxOut:", QMAX, "," + write (3,*) "nFailOut:", NFAIL, "," + write (3,*) "iterOut:", ITER, "," + write (3,*) "nDivOut:", NDIV, "," + write (3,*) "ttypeOut:", TTYPE, "," + write (3,*) "dmin1Out:", DMIN1, "," + write (3,*) "dmin2Out:", DMIN2, "," + write (3,*) "dnOut:", DN, "," + write (3,*) "dn1Out:", DN1, "," + write (3,*) "dn2Out:", DN2, "," + write (3,*) "gOut:", G, "," + write (3,*) "tauOut:", TAU, "," + + write (3,*) "}," + + + PP = 1 - PP +* +* When EMIN is very small check for splits. +* + IF( PP.EQ.0 .AND. N0-I0.GE.3 ) THEN + IF( Z( 4*N0 ).LE.TOL2*QMAX .OR. + $ Z( 4*N0-1 ).LE.TOL2*SIGMA ) THEN + SPLT = I0 - 1 + QMAX = Z( 4*I0-3 ) + EMIN = Z( 4*I0-1 ) + OLDEMN = Z( 4*I0 ) + DO 130 I4 = 4*I0, 4*( N0-3 ), 4 + IF( Z( I4 ).LE.TOL2*Z( I4-3 ) .OR. + $ Z( I4-1 ).LE.TOL2*SIGMA ) THEN + Z( I4-1 ) = -SIGMA + SPLT = I4 / 4 + QMAX = ZERO + EMIN = Z( I4+3 ) + OLDEMN = Z( I4+4 ) + ELSE + QMAX = MAX( QMAX, Z( I4+1 ) ) + EMIN = MIN( EMIN, Z( I4-1 ) ) + OLDEMN = MIN( OLDEMN, Z( I4 ) ) + END IF + 130 CONTINUE + Z( 4*N0-1 ) = EMIN + Z( 4*N0 ) = OLDEMN + I0 = SPLT + 1 + END IF + END IF +* + 140 CONTINUE +* + INFO = 2 +* +* Maximum number of iterations exceeded, restore the shift +* SIGMA and place the new d's and e's in a qd array. +* This might need to be done for several blocks +* + I1 = I0 + N1 = N0 + 145 CONTINUE + + TEMPQ = Z( 4*I0-3 ) + Z( 4*I0-3 ) = Z( 4*I0-3 ) + SIGMA + DO K = I0+1, N0 + TEMPE = Z( 4*K-5 ) + Z( 4*K-5 ) = Z( 4*K-5 ) * (TEMPQ / Z( 4*K-7 )) + TEMPQ = Z( 4*K-3 ) + Z( 4*K-3 ) = Z( 4*K-3 ) + SIGMA + TEMPE - Z( 4*K-5 ) + END DO +* +* Prepare to do this on the previous block if there is one +* + IF( I1.GT.1 ) THEN + N1 = I1-1 + DO WHILE( ( I1.GE.2 ) .AND. ( Z(4*I1-5).GE.ZERO ) ) + I1 = I1 - 1 + END DO + SIGMA = -Z(4*N1-1) + GO TO 145 + END IF + + DO K = 1, N + Z( 2*K-1 ) = Z( 4*K-3 ) +* +* Only the block 1..N0 is unfinished. The rest of the e's +* must be essentially zero, although sometimes other data +* has been stored in them. +* + IF( K.LT.N0 ) THEN + Z( 2*K ) = Z( 4*K-1 ) + ELSE + Z( 2*K ) = 0 + END IF + END DO + RETURN +* +* end IWHILB +* + 150 CONTINUE +* + 160 CONTINUE +* + INFO = 3 + RETURN +* +* end IWHILA +* + 170 CONTINUE +* + +* Move q's to the front. +* + DO 180 K = 2, N + Z( K ) = Z( 4*K-3 ) + 180 CONTINUE +* +* Sort and compute sum of eigenvalues. +* + CALL DLASRT( 'D', N, Z, IINFO ) +* + + E = ZERO + DO 190 K = N, 1, -1 + E = E + Z( K ) + 190 CONTINUE +* +* Store trace, sum(eigenvalues) and information on performance. +* + + Z( 2*N+1 ) = TRACE + Z( 2*N+2 ) = E + Z( 2*N+3 ) = DBLE( ITER ) + Z( 2*N+4 ) = DBLE( NDIV ) / DBLE( N**2 ) + Z( 2*N+5 ) = HUNDRD*NFAIL / DBLE( ITER ) + + RETURN +* +* End of DLASQ2 +* + END diff --git a/vendor/gonum.org/v1/gonum/lapack/internal/testdata/dlasqtest/dlasq3.f b/vendor/gonum.org/v1/gonum/lapack/internal/testdata/dlasqtest/dlasq3.f new file mode 100644 index 0000000..1c4e8ec --- /dev/null +++ b/vendor/gonum.org/v1/gonum/lapack/internal/testdata/dlasqtest/dlasq3.f @@ -0,0 +1,498 @@ +*> \brief \b DLASQ3 checks for deflation, computes a shift and calls dqds. Used by sbdsqr. +* +* =========== DOCUMENTATION =========== +* +* Online html documentation available at +* http://www.netlib.org/lapack/explore-html/ +* +*> \htmlonly +*> Download DLASQ3 + dependencies +*> +*> [TGZ] +*> +*> [ZIP] +*> +*> [TXT] +*> \endhtmlonly +* +* Definition: +* =========== +* +* SUBROUTINE DLASQ3( I0, N0, Z, PP, DMIN, SIGMA, DESIG, QMAX, NFAIL, +* ITER, NDIV, IEEE, TTYPE, DMIN1, DMIN2, DN, DN1, +* DN2, G, TAU ) +* +* .. Scalar Arguments .. +* LOGICAL IEEE +* INTEGER I0, ITER, N0, NDIV, NFAIL, PP +* DOUBLE PRECISION DESIG, DMIN, DMIN1, DMIN2, DN, DN1, DN2, G, +* $ QMAX, SIGMA, TAU +* .. +* .. Array Arguments .. +* DOUBLE PRECISION Z( * ) +* .. +* +* +*> \par Purpose: +* ============= +*> +*> \verbatim +*> +*> DLASQ3 checks for deflation, computes a shift (TAU) and calls dqds. +*> In case of failure it changes shifts, and tries again until output +*> is positive. +*> \endverbatim +* +* Arguments: +* ========== +* +*> \param[in] I0 +*> \verbatim +*> I0 is INTEGER +*> First index. +*> \endverbatim +*> +*> \param[in,out] N0 +*> \verbatim +*> N0 is INTEGER +*> Last index. +*> \endverbatim +*> +*> \param[in] Z +*> \verbatim +*> Z is DOUBLE PRECISION array, dimension ( 4*N ) +*> Z holds the qd array. +*> \endverbatim +*> +*> \param[in,out] PP +*> \verbatim +*> PP is INTEGER +*> PP=0 for ping, PP=1 for pong. +*> PP=2 indicates that flipping was applied to the Z array +*> and that the initial tests for deflation should not be +*> performed. +*> \endverbatim +*> +*> \param[out] DMIN +*> \verbatim +*> DMIN is DOUBLE PRECISION +*> Minimum value of d. +*> \endverbatim +*> +*> \param[out] SIGMA +*> \verbatim +*> SIGMA is DOUBLE PRECISION +*> Sum of shifts used in current segment. +*> \endverbatim +*> +*> \param[in,out] DESIG +*> \verbatim +*> DESIG is DOUBLE PRECISION +*> Lower order part of SIGMA +*> \endverbatim +*> +*> \param[in] QMAX +*> \verbatim +*> QMAX is DOUBLE PRECISION +*> Maximum value of q. +*> \endverbatim +*> +*> \param[out] NFAIL +*> \verbatim +*> NFAIL is INTEGER +*> Number of times shift was too big. +*> \endverbatim +*> +*> \param[out] ITER +*> \verbatim +*> ITER is INTEGER +*> Number of iterations. +*> \endverbatim +*> +*> \param[out] NDIV +*> \verbatim +*> NDIV is INTEGER +*> Number of divisions. +*> \endverbatim +*> +*> \param[in] IEEE +*> \verbatim +*> IEEE is LOGICAL +*> Flag for IEEE or non IEEE arithmetic (passed to DLASQ5). +*> \endverbatim +*> +*> \param[in,out] TTYPE +*> \verbatim +*> TTYPE is INTEGER +*> Shift type. +*> \endverbatim +*> +*> \param[in,out] DMIN1 +*> \verbatim +*> DMIN1 is DOUBLE PRECISION +*> \endverbatim +*> +*> \param[in,out] DMIN2 +*> \verbatim +*> DMIN2 is DOUBLE PRECISION +*> \endverbatim +*> +*> \param[in,out] DN +*> \verbatim +*> DN is DOUBLE PRECISION +*> \endverbatim +*> +*> \param[in,out] DN1 +*> \verbatim +*> DN1 is DOUBLE PRECISION +*> \endverbatim +*> +*> \param[in,out] DN2 +*> \verbatim +*> DN2 is DOUBLE PRECISION +*> \endverbatim +*> +*> \param[in,out] G +*> \verbatim +*> G is DOUBLE PRECISION +*> \endverbatim +*> +*> \param[in,out] TAU +*> \verbatim +*> TAU is DOUBLE PRECISION +*> +*> These are passed as arguments in order to save their values +*> between calls to DLASQ3. +*> \endverbatim +* +* Authors: +* ======== +* +*> \author Univ. of Tennessee +*> \author Univ. of California Berkeley +*> \author Univ. of Colorado Denver +*> \author NAG Ltd. +* +*> \date September 2012 +* +*> \ingroup auxOTHERcomputational +* +* ===================================================================== + SUBROUTINE DLASQ3( I0, N0, Z, PP, DMIN, SIGMA, DESIG, QMAX, NFAIL, + $ ITER, NDIV, IEEE, TTYPE, DMIN1, DMIN2, DN, DN1, + $ DN2, G, TAU ) +* +* -- LAPACK computational routine (version 3.4.2) -- +* -- LAPACK is a software package provided by Univ. of Tennessee, -- +* -- Univ. of California Berkeley, Univ. of Colorado Denver and NAG Ltd..-- +* September 2012 +* +* .. Scalar Arguments .. + LOGICAL IEEE + INTEGER I0, ITER, N0, NDIV, NFAIL, PP + DOUBLE PRECISION DESIG, DMIN, DMIN1, DMIN2, DN, DN1, DN2, G, + $ QMAX, SIGMA, TAU +* .. +* .. Array Arguments .. + DOUBLE PRECISION Z( * ) +* .. +* +* ===================================================================== +* +* .. Parameters .. + DOUBLE PRECISION CBIAS + PARAMETER ( CBIAS = 1.50D0 ) + DOUBLE PRECISION ZERO, QURTR, HALF, ONE, TWO, HUNDRD + PARAMETER ( ZERO = 0.0D0, QURTR = 0.250D0, HALF = 0.5D0, + $ ONE = 1.0D0, TWO = 2.0D0, HUNDRD = 100.0D0 ) +* .. +* .. Local Scalars .. + INTEGER IPN4, J4, N0IN, NN, TTYPE + DOUBLE PRECISION EPS, S, T, TEMP, TOL, TOL2 +* .. +* .. External Subroutines .. + EXTERNAL DLASQ4, DLASQ5, DLASQ6 +* .. +* .. External Function .. + DOUBLE PRECISION DLAMCH + LOGICAL DISNAN + EXTERNAL DISNAN, DLAMCH +* .. +* .. Intrinsic Functions .. + INTRINSIC ABS, MAX, MIN, SQRT +* .. +* .. Executable Statements .. +* + + N0IN = N0 + EPS = DLAMCH( 'Precision' ) + TOL = EPS*HUNDRD + TOL2 = TOL**2 +* +* Check for deflation. +* + 10 CONTINUE +* + IF( N0.LT.I0 ) + $ RETURN + IF( N0.EQ.I0 ) + $ GO TO 20 + NN = 4*N0 + PP + IF( N0.EQ.( I0+1 ) ) + $ GO TO 40 +* +* Check whether E(N0-1) is negligible, 1 eigenvalue. +* + IF( Z( NN-5 ).GT.TOL2*( SIGMA+Z( NN-3 ) ) .AND. + $ Z( NN-2*PP-4 ).GT.TOL2*Z( NN-7 ) ) + $ GO TO 30 +* + 20 CONTINUE +* + Z( 4*N0-3 ) = Z( 4*N0+PP-3 ) + SIGMA + N0 = N0 - 1 + GO TO 10 +* +* Check whether E(N0-2) is negligible, 2 eigenvalues. +* + 30 CONTINUE +* + IF( Z( NN-9 ).GT.TOL2*SIGMA .AND. + $ Z( NN-2*PP-8 ).GT.TOL2*Z( NN-11 ) ) + $ GO TO 50 +* + 40 CONTINUE +* + IF( Z( NN-3 ).GT.Z( NN-7 ) ) THEN + S = Z( NN-3 ) + Z( NN-3 ) = Z( NN-7 ) + Z( NN-7 ) = S + END IF + T = HALF*( ( Z( NN-7 )-Z( NN-3 ) )+Z( NN-5 ) ) + IF( Z( NN-5 ).GT.Z( NN-3 )*TOL2.AND.T.NE.ZERO ) THEN + S = Z( NN-3 )*( Z( NN-5 ) / T ) + IF( S.LE.T ) THEN + S = Z( NN-3 )*( Z( NN-5 ) / + $ ( T*( ONE+SQRT( ONE+S / T ) ) ) ) + ELSE + S = Z( NN-3 )*( Z( NN-5 ) / ( T+SQRT( T )*SQRT( T+S ) ) ) + END IF + T = Z( NN-7 ) + ( S+Z( NN-5 ) ) + Z( NN-3 ) = Z( NN-3 )*( Z( NN-7 ) / T ) + Z( NN-7 ) = T + END IF + Z( 4*N0-7 ) = Z( NN-7 ) + SIGMA + Z( 4*N0-3 ) = Z( NN-3 ) + SIGMA + N0 = N0 - 2 + GO TO 10 +* + 50 CONTINUE + IF( PP.EQ.2 ) + $ PP = 0 +* +* Reverse the qd-array, if warranted. +* + + IF( DMIN.LE.ZERO .OR. N0.LT.N0IN ) THEN + IF( CBIAS*Z( 4*I0+PP-3 ).LT.Z( 4*N0+PP-3 ) ) THEN + IPN4 = 4*( I0+N0 ) + DO 60 J4 = 4*I0, 2*( I0+N0-1 ), 4 + TEMP = Z( J4-3 ) + Z( J4-3 ) = Z( IPN4-J4-3 ) + Z( IPN4-J4-3 ) = TEMP + TEMP = Z( J4-2 ) + Z( J4-2 ) = Z( IPN4-J4-2 ) + Z( IPN4-J4-2 ) = TEMP + TEMP = Z( J4-1 ) + Z( J4-1 ) = Z( IPN4-J4-5 ) + Z( IPN4-J4-5 ) = TEMP + TEMP = Z( J4 ) + Z( J4 ) = Z( IPN4-J4-4 ) + Z( IPN4-J4-4 ) = TEMP + 60 CONTINUE + IF( N0-I0.LE.4 ) THEN + Z( 4*N0+PP-1 ) = Z( 4*I0+PP-1 ) + Z( 4*N0-PP ) = Z( 4*I0-PP ) + END IF + DMIN2 = MIN( DMIN2, Z( 4*N0+PP-1 ) ) + Z( 4*N0+PP-1 ) = MIN( Z( 4*N0+PP-1 ), Z( 4*I0+PP-1 ), + $ Z( 4*I0+PP+3 ) ) + Z( 4*N0-PP ) = MIN( Z( 4*N0-PP ), Z( 4*I0-PP ), + $ Z( 4*I0-PP+4 ) ) + QMAX = MAX( QMAX, Z( 4*I0+PP-3 ), Z( 4*I0+PP+1 ) ) + DMIN = -ZERO + END IF + END IF +* +* Choose a shift. +* + ! Print out DLASQ4 test cases + write(4,*) "{" + write(4,'(9999(g0))',advance="no") "z: []float64{" + do i = 1, NN + write (4,'(99999(e24.16,a))',advance="no") z(i), "," + end do + write (4,*) "}," + write (4,*) "i0: ", I0, "," + write (4,*) "n0: ", N0, "," + write (4,*) "pp: ", PP, "," + write (4,*) "n0in: ", N0IN, "," + write (4,*) "dmin: ", DMIN, "," + write (4,*) "dmin1:", DMIN1, "," + write (4,*) "dmin2:", DMIN2, "," + write (4,*) "dn: ", DN, "," + write (4,*) "dn1: ", DN1, "," + write (4,*) "dn2: ", DN2, "," + write (4,*) "tau: ", TAU, "," + write (4,*) "ttype: ", TTYPE, "," + write (4,*) "g: ", G, "," + CALL DLASQ4( I0, N0, Z, PP, N0IN, DMIN, DMIN1, DMIN2, DN, DN1, + $ DN2, TAU, TTYPE, G ) + + write(4,'(9999(g0))',advance="no") "zOut: []float64{" + do i = 1, NN + write (4,'(99999(e24.16,a))',advance="no") z(i), "," + end do + write (4,*) "}," + write (4,*) "tauOut: ", TAU, "," + write (4,*) "ttypeOut: ", TTYPE, "," + write (4,*) "gOut: ", G, "," + write (4,*) "}," + +* +* Call dqds until DMIN > 0. +* + 70 CONTINUE +* + + write(5,*) "{" + write(5,'(9999(g0))',advance="no") "z: []float64{" + do i = 1, NN + write (5,'(99999(e24.16,a))',advance="no") z(i), "," + end do + write (5,*) "}," + write (5,*) "i0: ", I0, "," + write (5,*) "n0: ", N0, "," + write (5,*) "pp: ", PP, "," + write (5,*) "tau: ", TAU, "," + write (5,*) "sigma: ", SIGMA, "," + write (5,*) "dmin: ", DMIN, "," + write (5,*) "dmin1:", DMIN1, "," + write (5,*) "dmin2:", DMIN2, "," + write (5,*) "dn: ", DN, "," + write (5,*) "dnm1: ", DN1, "," + write (5,*) "dnm2: ", DN2, "," + + + CALL DLASQ5( I0, N0, Z, PP, TAU, SIGMA, DMIN, DMIN1, DMIN2, DN, + $ DN1, DN2, IEEE, EPS ) + + + + write (5,*) "i0Out: ", I0, "," + write (5,*) "n0Out: ", N0, "," + write (5,*) "ppOut: ", PP, "," + write (5,*) "tauOut: ", TAU, "," + write (5,*) "sigmaOut: ", SIGMA, "," + write (5,*) "dminOut: ", DMIN, "," + write (5,*) "dmin1Out:", DMIN1, "," + write (5,*) "dmin2Out:", DMIN2, "," + write (5,*) "dnOut: ", DN, "," + write (5,*) "dnm1Out: ", DN1, "," + write (5,*) "dnm2Out: ", DN2, "," + write (5,*) "}," + +* + NDIV = NDIV + ( N0-I0+2 ) + + ITER = ITER + 1 +* +* Check status. +* + + IF( DMIN.GE.ZERO .AND. DMIN1.GE.ZERO ) THEN +* +* Success. +* + GO TO 90 +* + ELSE IF( DMIN.LT.ZERO .AND. DMIN1.GT.ZERO .AND. + $ Z( 4*( N0-1 )-PP ).LT.TOL*( SIGMA+DN1 ) .AND. + $ ABS( DN ).LT.TOL*SIGMA ) THEN + +* +* Convergence hidden by negative DN. +* + Z( 4*( N0-1 )-PP+2 ) = ZERO + DMIN = ZERO + GO TO 90 + ELSE IF( DMIN.LT.ZERO ) THEN + +* +* TAU too big. Select new TAU and try again. +* + NFAIL = NFAIL + 1 + IF( TTYPE.LT.-22 ) THEN +* +* Failed twice. Play it safe. +* + TAU = ZERO + ELSE IF( DMIN1.GT.ZERO ) THEN +* +* Late failure. Gives excellent shift. +* + TAU = ( TAU+DMIN )*( ONE-TWO*EPS ) + TTYPE = TTYPE - 11 + ELSE +* +* Early failure. Divide by 4. +* + TAU = QURTR*TAU + TTYPE = TTYPE - 12 + END IF + GO TO 70 + ELSE IF( DISNAN( DMIN ) ) THEN +* +* NaN. +* + IF( TAU.EQ.ZERO ) THEN + GO TO 80 + ELSE + TAU = ZERO + GO TO 70 + END IF + ELSE +* +* Possible underflow. Play it safe. +* + GO TO 80 + END IF +* +* Risk of underflow. +* + 80 CONTINUE + + CALL DLASQ6( I0, N0, Z, PP, DMIN, DMIN1, DMIN2, DN, DN1, DN2 ) + + + NDIV = NDIV + ( N0-I0+2 ) + ITER = ITER + 1 + TAU = ZERO +* + 90 CONTINUE + + IF( TAU.LT.SIGMA ) THEN + DESIG = DESIG + TAU + T = SIGMA + DESIG + DESIG = DESIG - ( T-SIGMA ) + ELSE + T = SIGMA + TAU + DESIG = SIGMA - ( T-TAU ) + DESIG + END IF + SIGMA = T +* + RETURN +* +* End of DLASQ3 +* + END diff --git a/vendor/gonum.org/v1/gonum/lapack/internal/testdata/dlasqtest/dlasq4.f b/vendor/gonum.org/v1/gonum/lapack/internal/testdata/dlasqtest/dlasq4.f new file mode 100644 index 0000000..f60f775 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/lapack/internal/testdata/dlasqtest/dlasq4.f @@ -0,0 +1,427 @@ +*> \brief \b DLASQ4 computes an approximation to the smallest eigenvalue using values of d from the previous transform. Used by sbdsqr. +* +* =========== DOCUMENTATION =========== +* +* Online html documentation available at +* http://www.netlib.org/lapack/explore-html/ +* +*> \htmlonly +*> Download DLASQ4 + dependencies +*> +*> [TGZ] +*> +*> [ZIP] +*> +*> [TXT] +*> \endhtmlonly +* +* Definition: +* =========== +* +* SUBROUTINE DLASQ4( I0, N0, Z, PP, N0IN, DMIN, DMIN1, DMIN2, DN, +* DN1, DN2, TAU, TTYPE, G ) +* +* .. Scalar Arguments .. +* INTEGER I0, N0, N0IN, PP, TTYPE +* DOUBLE PRECISION DMIN, DMIN1, DMIN2, DN, DN1, DN2, G, TAU +* .. +* .. Array Arguments .. +* DOUBLE PRECISION Z( * ) +* .. +* +* +*> \par Purpose: +* ============= +*> +*> \verbatim +*> +*> DLASQ4 computes an approximation TAU to the smallest eigenvalue +*> using values of d from the previous transform. +*> \endverbatim +* +* Arguments: +* ========== +* +*> \param[in] I0 +*> \verbatim +*> I0 is INTEGER +*> First index. +*> \endverbatim +*> +*> \param[in] N0 +*> \verbatim +*> N0 is INTEGER +*> Last index. +*> \endverbatim +*> +*> \param[in] Z +*> \verbatim +*> Z is DOUBLE PRECISION array, dimension ( 4*N ) +*> Z holds the qd array. +*> \endverbatim +*> +*> \param[in] PP +*> \verbatim +*> PP is INTEGER +*> PP=0 for ping, PP=1 for pong. +*> \endverbatim +*> +*> \param[in] N0IN +*> \verbatim +*> N0IN is INTEGER +*> The value of N0 at start of EIGTEST. +*> \endverbatim +*> +*> \param[in] DMIN +*> \verbatim +*> DMIN is DOUBLE PRECISION +*> Minimum value of d. +*> \endverbatim +*> +*> \param[in] DMIN1 +*> \verbatim +*> DMIN1 is DOUBLE PRECISION +*> Minimum value of d, excluding D( N0 ). +*> \endverbatim +*> +*> \param[in] DMIN2 +*> \verbatim +*> DMIN2 is DOUBLE PRECISION +*> Minimum value of d, excluding D( N0 ) and D( N0-1 ). +*> \endverbatim +*> +*> \param[in] DN +*> \verbatim +*> DN is DOUBLE PRECISION +*> d(N) +*> \endverbatim +*> +*> \param[in] DN1 +*> \verbatim +*> DN1 is DOUBLE PRECISION +*> d(N-1) +*> \endverbatim +*> +*> \param[in] DN2 +*> \verbatim +*> DN2 is DOUBLE PRECISION +*> d(N-2) +*> \endverbatim +*> +*> \param[out] TAU +*> \verbatim +*> TAU is DOUBLE PRECISION +*> This is the shift. +*> \endverbatim +*> +*> \param[out] TTYPE +*> \verbatim +*> TTYPE is INTEGER +*> Shift type. +*> \endverbatim +*> +*> \param[in,out] G +*> \verbatim +*> G is REAL +*> G is passed as an argument in order to save its value between +*> calls to DLASQ4. +*> \endverbatim +* +* Authors: +* ======== +* +*> \author Univ. of Tennessee +*> \author Univ. of California Berkeley +*> \author Univ. of Colorado Denver +*> \author NAG Ltd. +* +*> \date September 2012 +* +*> \ingroup auxOTHERcomputational +* +*> \par Further Details: +* ===================== +*> +*> \verbatim +*> +*> CNST1 = 9/16 +*> \endverbatim +*> +* ===================================================================== + SUBROUTINE DLASQ4( I0, N0, Z, PP, N0IN, DMIN, DMIN1, DMIN2, DN, + $ DN1, DN2, TAU, TTYPE, G ) +* +* -- LAPACK computational routine (version 3.4.2) -- +* -- LAPACK is a software package provided by Univ. of Tennessee, -- +* -- Univ. of California Berkeley, Univ. of Colorado Denver and NAG Ltd..-- +* September 2012 +* +* .. Scalar Arguments .. + INTEGER I0, N0, N0IN, PP, TTYPE + DOUBLE PRECISION DMIN, DMIN1, DMIN2, DN, DN1, DN2, G, TAU +* .. +* .. Array Arguments .. + DOUBLE PRECISION Z( * ) +* .. +* +* ===================================================================== +* +* .. Parameters .. + DOUBLE PRECISION CNST1, CNST2, CNST3 + PARAMETER ( CNST1 = 0.5630D0, CNST2 = 1.010D0, + $ CNST3 = 1.050D0 ) + DOUBLE PRECISION QURTR, THIRD, HALF, ZERO, ONE, TWO, HUNDRD + PARAMETER ( QURTR = 0.250D0, THIRD = 0.3330D0, + $ HALF = 0.50D0, ZERO = 0.0D0, ONE = 1.0D0, + $ TWO = 2.0D0, HUNDRD = 100.0D0 ) +* .. +* .. Local Scalars .. + INTEGER I4, NN, NP + DOUBLE PRECISION A2, B1, B2, GAM, GAP1, GAP2, S +* .. +* .. Intrinsic Functions .. + INTRINSIC MAX, MIN, SQRT +* .. +* .. Executable Statements .. +* +* A negative DMIN forces the shift to take that absolute value +* TTYPE records the type of shift. +* + + IF( DMIN.LE.ZERO ) THEN + TAU = -DMIN + TTYPE = -1 + RETURN + END IF +* + NN = 4*N0 + PP + IF( N0IN.EQ.N0 ) THEN +* +* No eigenvalues deflated. +* + IF( DMIN.EQ.DN .OR. DMIN.EQ.DN1 ) THEN +* + B1 = SQRT( Z( NN-3 ) )*SQRT( Z( NN-5 ) ) + B2 = SQRT( Z( NN-7 ) )*SQRT( Z( NN-9 ) ) + A2 = Z( NN-7 ) + Z( NN-5 ) +* +* Cases 2 and 3. +* + IF( DMIN.EQ.DN .AND. DMIN1.EQ.DN1 ) THEN + + GAP2 = DMIN2 - A2 - DMIN2*QURTR + IF( GAP2.GT.ZERO .AND. GAP2.GT.B2 ) THEN + GAP1 = A2 - DN - ( B2 / GAP2 )*B2 + ELSE + GAP1 = A2 - DN - ( B1+B2 ) + END IF + IF( GAP1.GT.ZERO .AND. GAP1.GT.B1 ) THEN + S = MAX( DN-( B1 / GAP1 )*B1, HALF*DMIN ) + TTYPE = -2 + ELSE + S = ZERO + IF( DN.GT.B1 ) + $ S = DN - B1 + IF( A2.GT.( B1+B2 ) ) + $ S = MIN( S, A2-( B1+B2 ) ) + S = MAX( S, THIRD*DMIN ) + TTYPE = -3 + END IF + ELSE +* +* Case 4. +* + TTYPE = -4 + S = QURTR*DMIN + IF( DMIN.EQ.DN ) THEN + GAM = DN + A2 = ZERO + IF( Z( NN-5 ) .GT. Z( NN-7 ) ) + $ RETURN + B2 = Z( NN-5 ) / Z( NN-7 ) + NP = NN - 9 + ELSE + NP = NN - 2*PP + B2 = Z( NP-2 ) + GAM = DN1 + IF( Z( NP-4 ) .GT. Z( NP-2 ) ) + $ RETURN + A2 = Z( NP-4 ) / Z( NP-2 ) + IF( Z( NN-9 ) .GT. Z( NN-11 ) ) + $ RETURN + B2 = Z( NN-9 ) / Z( NN-11 ) + NP = NN - 13 + END IF +* +* Approximate contribution to norm squared from I < NN-1. +* + A2 = A2 + B2 + DO 10 I4 = NP, 4*I0 - 1 + PP, -4 + IF( B2.EQ.ZERO ) + $ GO TO 20 + B1 = B2 + IF( Z( I4 ) .GT. Z( I4-2 ) ) + $ RETURN + B2 = B2*( Z( I4 ) / Z( I4-2 ) ) + A2 = A2 + B2 + IF( HUNDRD*MAX( B2, B1 ).LT.A2 .OR. CNST1.LT.A2 ) + $ GO TO 20 + 10 CONTINUE + 20 CONTINUE + A2 = CNST3*A2 +* +* Rayleigh quotient residual bound. +* + IF( A2.LT.CNST1 ) + $ S = GAM*( ONE-SQRT( A2 ) ) / ( ONE+A2 ) + END IF + ELSE IF( DMIN.EQ.DN2 ) THEN +* +* Case 5. +* + TTYPE = -5 + S = QURTR*DMIN +* +* Compute contribution to norm squared from I > NN-2. +* + NP = NN - 2*PP + B1 = Z( NP-2 ) + B2 = Z( NP-6 ) + GAM = DN2 + IF( Z( NP-8 ).GT.B2 .OR. Z( NP-4 ).GT.B1 ) + $ RETURN + A2 = ( Z( NP-8 ) / B2 )*( ONE+Z( NP-4 ) / B1 ) +* +* Approximate contribution to norm squared from I < NN-2. +* + IF( N0-I0.GT.2 ) THEN + B2 = Z( NN-13 ) / Z( NN-15 ) + A2 = A2 + B2 + DO 30 I4 = NN - 17, 4*I0 - 1 + PP, -4 + IF( B2.EQ.ZERO ) + $ GO TO 40 + B1 = B2 + IF( Z( I4 ) .GT. Z( I4-2 ) ) + $ RETURN + B2 = B2*( Z( I4 ) / Z( I4-2 ) ) + A2 = A2 + B2 + IF( HUNDRD*MAX( B2, B1 ).LT.A2 .OR. CNST1.LT.A2 ) + $ GO TO 40 + 30 CONTINUE + 40 CONTINUE + A2 = CNST3*A2 + END IF +* + IF( A2.LT.CNST1 ) + $ S = GAM*( ONE-SQRT( A2 ) ) / ( ONE+A2 ) + ELSE +* +* Case 6, no information to guide us. +* + IF( TTYPE.EQ.-6 ) THEN + G = G + THIRD*( ONE-G ) + ELSE IF( TTYPE.EQ.-18 ) THEN + G = QURTR*THIRD + ELSE + G = QURTR + END IF + S = G*DMIN + TTYPE = -6 + END IF +* + ELSE IF( N0IN.EQ.( N0+1 ) ) THEN +* +* One eigenvalue just deflated. Use DMIN1, DN1 for DMIN and DN. +* + IF( DMIN1.EQ.DN1 .AND. DMIN2.EQ.DN2 ) THEN +* +* Cases 7 and 8. +* + TTYPE = -7 + S = THIRD*DMIN1 + IF( Z( NN-5 ).GT.Z( NN-7 ) ) + $ RETURN + B1 = Z( NN-5 ) / Z( NN-7 ) + B2 = B1 + IF( B2.EQ.ZERO ) + $ GO TO 60 + DO 50 I4 = 4*N0 - 9 + PP, 4*I0 - 1 + PP, -4 + A2 = B1 + IF( Z( I4 ).GT.Z( I4-2 ) ) + $ RETURN + B1 = B1*( Z( I4 ) / Z( I4-2 ) ) + B2 = B2 + B1 + IF( HUNDRD*MAX( B1, A2 ).LT.B2 ) + $ GO TO 60 + 50 CONTINUE + 60 CONTINUE + B2 = SQRT( CNST3*B2 ) + A2 = DMIN1 / ( ONE+B2**2 ) + GAP2 = HALF*DMIN2 - A2 + IF( GAP2.GT.ZERO .AND. GAP2.GT.B2*A2 ) THEN + S = MAX( S, A2*( ONE-CNST2*A2*( B2 / GAP2 )*B2 ) ) + ELSE + S = MAX( S, A2*( ONE-CNST2*B2 ) ) + TTYPE = -8 + END IF + ELSE +* +* Case 9. +* + S = QURTR*DMIN1 + IF( DMIN1.EQ.DN1 ) + $ S = HALF*DMIN1 + TTYPE = -9 + END IF +* + ELSE IF( N0IN.EQ.( N0+2 ) ) THEN +* +* Two eigenvalues deflated. Use DMIN2, DN2 for DMIN and DN. +* +* Cases 10 and 11. +* + IF( DMIN2.EQ.DN2 .AND. TWO*Z( NN-5 ).LT.Z( NN-7 ) ) THEN + TTYPE = -10 + S = THIRD*DMIN2 + IF( Z( NN-5 ).GT.Z( NN-7 ) ) + $ RETURN + B1 = Z( NN-5 ) / Z( NN-7 ) + B2 = B1 + IF( B2.EQ.ZERO ) + $ GO TO 80 + DO 70 I4 = 4*N0 - 9 + PP, 4*I0 - 1 + PP, -4 + IF( Z( I4 ).GT.Z( I4-2 ) ) + $ RETURN + B1 = B1*( Z( I4 ) / Z( I4-2 ) ) + B2 = B2 + B1 + IF( HUNDRD*B1.LT.B2 ) + $ GO TO 80 + 70 CONTINUE + 80 CONTINUE + B2 = SQRT( CNST3*B2 ) + A2 = DMIN2 / ( ONE+B2**2 ) + GAP2 = Z( NN-7 ) + Z( NN-9 ) - + $ SQRT( Z( NN-11 ) )*SQRT( Z( NN-9 ) ) - A2 + IF( GAP2.GT.ZERO .AND. GAP2.GT.B2*A2 ) THEN + S = MAX( S, A2*( ONE-CNST2*A2*( B2 / GAP2 )*B2 ) ) + ELSE + S = MAX( S, A2*( ONE-CNST2*B2 ) ) + END IF + ELSE + S = QURTR*DMIN2 + TTYPE = -11 + END IF + ELSE IF( N0IN.GT.( N0+2 ) ) THEN +* +* Case 12, more than two eigenvalues deflated. No information. +* + S = ZERO + TTYPE = -12 + END IF +* + TAU = S + RETURN +* +* End of DLASQ4 +* + END diff --git a/vendor/gonum.org/v1/gonum/lapack/internal/testdata/dlasqtest/dlasq5.f b/vendor/gonum.org/v1/gonum/lapack/internal/testdata/dlasqtest/dlasq5.f new file mode 100644 index 0000000..1947044 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/lapack/internal/testdata/dlasqtest/dlasq5.f @@ -0,0 +1,413 @@ +*> \brief \b DLASQ5 computes one dqds transform in ping-pong form. Used by sbdsqr and sstegr. +* +* =========== DOCUMENTATION =========== +* +* Online html documentation available at +* http://www.netlib.org/lapack/explore-html/ +* +*> \htmlonly +*> Download DLASQ5 + dependencies +*> +*> [TGZ] +*> +*> [ZIP] +*> +*> [TXT] +*> \endhtmlonly +* +* Definition: +* =========== +* +* SUBROUTINE DLASQ5( I0, N0, Z, PP, TAU, SIGMA, DMIN, DMIN1, DMIN2, DN, +* DNM1, DNM2, IEEE, EPS ) +* +* .. Scalar Arguments .. +* LOGICAL IEEE +* INTEGER I0, N0, PP +* DOUBLE PRECISION DMIN, DMIN1, DMIN2, DN, DNM1, DNM2, TAU, SIGMA, EPS +* .. +* .. Array Arguments .. +* DOUBLE PRECISION Z( * ) +* .. +* +* +*> \par Purpose: +* ============= +*> +*> \verbatim +*> +*> DLASQ5 computes one dqds transform in ping-pong form, one +*> version for IEEE machines another for non IEEE machines. +*> \endverbatim +* +* Arguments: +* ========== +* +*> \param[in] I0 +*> \verbatim +*> I0 is INTEGER +*> First index. +*> \endverbatim +*> +*> \param[in] N0 +*> \verbatim +*> N0 is INTEGER +*> Last index. +*> \endverbatim +*> +*> \param[in] Z +*> \verbatim +*> Z is DOUBLE PRECISION array, dimension ( 4*N ) +*> Z holds the qd array. EMIN is stored in Z(4*N0) to avoid +*> an extra argument. +*> \endverbatim +*> +*> \param[in] PP +*> \verbatim +*> PP is INTEGER +*> PP=0 for ping, PP=1 for pong. +*> \endverbatim +*> +*> \param[in] TAU +*> \verbatim +*> TAU is DOUBLE PRECISION +*> This is the shift. +*> \endverbatim +*> +*> \param[in] SIGMA +*> \verbatim +*> SIGMA is DOUBLE PRECISION +*> This is the accumulated shift up to this step. +*> \endverbatim +*> +*> \param[out] DMIN +*> \verbatim +*> DMIN is DOUBLE PRECISION +*> Minimum value of d. +*> \endverbatim +*> +*> \param[out] DMIN1 +*> \verbatim +*> DMIN1 is DOUBLE PRECISION +*> Minimum value of d, excluding D( N0 ). +*> \endverbatim +*> +*> \param[out] DMIN2 +*> \verbatim +*> DMIN2 is DOUBLE PRECISION +*> Minimum value of d, excluding D( N0 ) and D( N0-1 ). +*> \endverbatim +*> +*> \param[out] DN +*> \verbatim +*> DN is DOUBLE PRECISION +*> d(N0), the last value of d. +*> \endverbatim +*> +*> \param[out] DNM1 +*> \verbatim +*> DNM1 is DOUBLE PRECISION +*> d(N0-1). +*> \endverbatim +*> +*> \param[out] DNM2 +*> \verbatim +*> DNM2 is DOUBLE PRECISION +*> d(N0-2). +*> \endverbatim +*> +*> \param[in] IEEE +*> \verbatim +*> IEEE is LOGICAL +*> Flag for IEEE or non IEEE arithmetic. +*> \endverbatim +* +*> \param[in] EPS +*> \verbatim +*> EPS is DOUBLE PRECISION +*> This is the value of epsilon used. +*> \endverbatim +*> +* Authors: +* ======== +* +*> \author Univ. of Tennessee +*> \author Univ. of California Berkeley +*> \author Univ. of Colorado Denver +*> \author NAG Ltd. +* +*> \date September 2012 +* +*> \ingroup auxOTHERcomputational +* +* ===================================================================== + SUBROUTINE DLASQ5( I0, N0, Z, PP, TAU, SIGMA, DMIN, DMIN1, DMIN2, + $ DN, DNM1, DNM2, IEEE, EPS ) +* +* -- LAPACK computational routine (version 3.4.2) -- +* -- LAPACK is a software package provided by Univ. of Tennessee, -- +* -- Univ. of California Berkeley, Univ. of Colorado Denver and NAG Ltd..-- +* September 2012 +* +* .. Scalar Arguments .. + LOGICAL IEEE + INTEGER I0, N0, PP + DOUBLE PRECISION DMIN, DMIN1, DMIN2, DN, DNM1, DNM2, TAU, + $ SIGMA, EPS +* .. +* .. Array Arguments .. + DOUBLE PRECISION Z( * ) +* .. +* +* ===================================================================== +* +* .. Parameter .. + DOUBLE PRECISION ZERO, HALF + PARAMETER ( ZERO = 0.0D0, HALF = 0.5 ) +* .. +* .. Local Scalars .. + INTEGER J4, J4P2 + DOUBLE PRECISION D, EMIN, TEMP, DTHRESH +* .. +* .. Intrinsic Functions .. + INTRINSIC MIN +* .. +* .. Executable Statements .. +* + + IF( ( N0-I0-1 ).LE.0 ) + $ RETURN +* + DTHRESH = EPS*(SIGMA+TAU) + IF( TAU.LT.DTHRESH*HALF ) TAU = ZERO + IF( TAU.NE.ZERO ) THEN + J4 = 4*I0 + PP - 3 + EMIN = Z( J4+4 ) + D = Z( J4 ) - TAU + DMIN = D + DMIN1 = -Z( J4 ) +* + IF( IEEE ) THEN +* +* Code for IEEE arithmetic. +* + IF( PP.EQ.0 ) THEN + DO 10 J4 = 4*I0, 4*( N0-3 ), 4 + Z( J4-2 ) = D + Z( J4-1 ) + TEMP = Z( J4+1 ) / Z( J4-2 ) + D = D*TEMP - TAU + DMIN = MIN( DMIN, D ) + Z( J4 ) = Z( J4-1 )*TEMP + EMIN = MIN( Z( J4 ), EMIN ) + 10 CONTINUE + ELSE + DO 20 J4 = 4*I0, 4*( N0-3 ), 4 + Z( J4-3 ) = D + Z( J4 ) + TEMP = Z( J4+2 ) / Z( J4-3 ) + D = D*TEMP - TAU + DMIN = MIN( DMIN, D ) + Z( J4-1 ) = Z( J4 )*TEMP + EMIN = MIN( Z( J4-1 ), EMIN ) + 20 CONTINUE + END IF + +* +* Unroll last two steps. +* + DNM2 = D + DMIN2 = DMIN + J4 = 4*( N0-2 ) - PP + J4P2 = J4 + 2*PP - 1 + Z( J4-2 ) = DNM2 + Z( J4P2 ) + Z( J4 ) = Z( J4P2+2 )*( Z( J4P2 ) / Z( J4-2 ) ) + DNM1 = Z( J4P2+2 )*( DNM2 / Z( J4-2 ) ) - TAU + DMIN = MIN( DMIN, DNM1 ) +* + DMIN1 = DMIN + J4 = J4 + 4 + J4P2 = J4 + 2*PP - 1 + Z( J4-2 ) = DNM1 + Z( J4P2 ) + Z( J4 ) = Z( J4P2+2 )*( Z( J4P2 ) / Z( J4-2 ) ) + DN = Z( J4P2+2 )*( DNM1 / Z( J4-2 ) ) - TAU + DMIN = MIN( DMIN, DN ) +* + ELSE +* +* Code for non IEEE arithmetic. +* + IF( PP.EQ.0 ) THEN + DO 30 J4 = 4*I0, 4*( N0-3 ), 4 + Z( J4-2 ) = D + Z( J4-1 ) + IF( D.LT.ZERO ) THEN + RETURN + ELSE + Z( J4 ) = Z( J4+1 )*( Z( J4-1 ) / Z( J4-2 ) ) + D = Z( J4+1 )*( D / Z( J4-2 ) ) - TAU + END IF + DMIN = MIN( DMIN, D ) + EMIN = MIN( EMIN, Z( J4 ) ) + 30 CONTINUE + ELSE + DO 40 J4 = 4*I0, 4*( N0-3 ), 4 + Z( J4-3 ) = D + Z( J4 ) + IF( D.LT.ZERO ) THEN + RETURN + ELSE + Z( J4-1 ) = Z( J4+2 )*( Z( J4 ) / Z( J4-3 ) ) + D = Z( J4+2 )*( D / Z( J4-3 ) ) - TAU + END IF + DMIN = MIN( DMIN, D ) + EMIN = MIN( EMIN, Z( J4-1 ) ) + 40 CONTINUE + END IF +* +* Unroll last two steps. +* + DNM2 = D + DMIN2 = DMIN + J4 = 4*( N0-2 ) - PP + J4P2 = J4 + 2*PP - 1 + Z( J4-2 ) = DNM2 + Z( J4P2 ) + IF( DNM2.LT.ZERO ) THEN + RETURN + ELSE + Z( J4 ) = Z( J4P2+2 )*( Z( J4P2 ) / Z( J4-2 ) ) + DNM1 = Z( J4P2+2 )*( DNM2 / Z( J4-2 ) ) - TAU + END IF + DMIN = MIN( DMIN, DNM1 ) +* + DMIN1 = DMIN + J4 = J4 + 4 + J4P2 = J4 + 2*PP - 1 + Z( J4-2 ) = DNM1 + Z( J4P2 ) + IF( DNM1.LT.ZERO ) THEN + RETURN + ELSE + Z( J4 ) = Z( J4P2+2 )*( Z( J4P2 ) / Z( J4-2 ) ) + DN = Z( J4P2+2 )*( DNM1 / Z( J4-2 ) ) - TAU + END IF + DMIN = MIN( DMIN, DN ) +* + END IF + ELSE +* This is the version that sets d's to zero if they are small enough + J4 = 4*I0 + PP - 3 + EMIN = Z( J4+4 ) + D = Z( J4 ) - TAU + DMIN = D + DMIN1 = -Z( J4 ) + IF( IEEE ) THEN +* +* Code for IEEE arithmetic. +* + + IF( PP.EQ.0 ) THEN + DO 50 J4 = 4*I0, 4*( N0-3 ), 4 + Z( J4-2 ) = D + Z( J4-1 ) + TEMP = Z( J4+1 ) / Z( J4-2 ) + D = D*TEMP - TAU + IF( D.LT.DTHRESH ) D = ZERO + DMIN = MIN( DMIN, D ) + Z( J4 ) = Z( J4-1 )*TEMP + EMIN = MIN( Z( J4 ), EMIN ) + 50 CONTINUE + ELSE + DO 60 J4 = 4*I0, 4*( N0-3 ), 4 + Z( J4-3 ) = D + Z( J4 ) + TEMP = Z( J4+2 ) / Z( J4-3 ) + D = D*TEMP - TAU + IF( D.LT.DTHRESH ) D = ZERO + DMIN = MIN( DMIN, D ) + Z( J4-1 ) = Z( J4 )*TEMP + EMIN = MIN( Z( J4-1 ), EMIN ) + 60 CONTINUE + END IF +* +* Unroll last two steps. +* + DNM2 = D + DMIN2 = DMIN + J4 = 4*( N0-2 ) - PP + J4P2 = J4 + 2*PP - 1 + Z( J4-2 ) = DNM2 + Z( J4P2 ) + Z( J4 ) = Z( J4P2+2 )*( Z( J4P2 ) / Z( J4-2 ) ) + DNM1 = Z( J4P2+2 )*( DNM2 / Z( J4-2 ) ) - TAU + DMIN = MIN( DMIN, DNM1 ) +* + DMIN1 = DMIN + J4 = J4 + 4 + J4P2 = J4 + 2*PP - 1 + Z( J4-2 ) = DNM1 + Z( J4P2 ) + Z( J4 ) = Z( J4P2+2 )*( Z( J4P2 ) / Z( J4-2 ) ) + DN = Z( J4P2+2 )*( DNM1 / Z( J4-2 ) ) - TAU + DMIN = MIN( DMIN, DN ) +* + ELSE +* +* Code for non IEEE arithmetic. +* + IF( PP.EQ.0 ) THEN + DO 70 J4 = 4*I0, 4*( N0-3 ), 4 + Z( J4-2 ) = D + Z( J4-1 ) + IF( D.LT.ZERO ) THEN + RETURN + ELSE + Z( J4 ) = Z( J4+1 )*( Z( J4-1 ) / Z( J4-2 ) ) + D = Z( J4+1 )*( D / Z( J4-2 ) ) - TAU + END IF + IF( D.LT.DTHRESH) D = ZERO + DMIN = MIN( DMIN, D ) + EMIN = MIN( EMIN, Z( J4 ) ) + 70 CONTINUE + ELSE + DO 80 J4 = 4*I0, 4*( N0-3 ), 4 + Z( J4-3 ) = D + Z( J4 ) + IF( D.LT.ZERO ) THEN + RETURN + ELSE + Z( J4-1 ) = Z( J4+2 )*( Z( J4 ) / Z( J4-3 ) ) + D = Z( J4+2 )*( D / Z( J4-3 ) ) - TAU + END IF + IF( D.LT.DTHRESH) D = ZERO + DMIN = MIN( DMIN, D ) + EMIN = MIN( EMIN, Z( J4-1 ) ) + 80 CONTINUE + END IF +* +* Unroll last two steps. +* + DNM2 = D + DMIN2 = DMIN + J4 = 4*( N0-2 ) - PP + J4P2 = J4 + 2*PP - 1 + Z( J4-2 ) = DNM2 + Z( J4P2 ) + IF( DNM2.LT.ZERO ) THEN + RETURN + ELSE + Z( J4 ) = Z( J4P2+2 )*( Z( J4P2 ) / Z( J4-2 ) ) + DNM1 = Z( J4P2+2 )*( DNM2 / Z( J4-2 ) ) - TAU + END IF + DMIN = MIN( DMIN, DNM1 ) +* + DMIN1 = DMIN + J4 = J4 + 4 + J4P2 = J4 + 2*PP - 1 + Z( J4-2 ) = DNM1 + Z( J4P2 ) + IF( DNM1.LT.ZERO ) THEN + RETURN + ELSE + Z( J4 ) = Z( J4P2+2 )*( Z( J4P2 ) / Z( J4-2 ) ) + DN = Z( J4P2+2 )*( DNM1 / Z( J4-2 ) ) - TAU + END IF + DMIN = MIN( DMIN, DN ) +* + END IF + END IF +* + Z( J4+2 ) = DN + Z( 4*N0-PP ) = EMIN + RETURN +* +* End of DLASQ5 +* + END diff --git a/vendor/gonum.org/v1/gonum/lapack/internal/testdata/dlasqtest/dlasq6.f b/vendor/gonum.org/v1/gonum/lapack/internal/testdata/dlasqtest/dlasq6.f new file mode 100644 index 0000000..bcd61f3 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/lapack/internal/testdata/dlasqtest/dlasq6.f @@ -0,0 +1,258 @@ +*> \brief \b DLASQ6 computes one dqd transform in ping-pong form. Used by sbdsqr and sstegr. +* +* =========== DOCUMENTATION =========== +* +* Online html documentation available at +* http://www.netlib.org/lapack/explore-html/ +* +*> \htmlonly +*> Download DLASQ6 + dependencies +*> +*> [TGZ] +*> +*> [ZIP] +*> +*> [TXT] +*> \endhtmlonly +* +* Definition: +* =========== +* +* SUBROUTINE DLASQ6( I0, N0, Z, PP, DMIN, DMIN1, DMIN2, DN, +* DNM1, DNM2 ) +* +* .. Scalar Arguments .. +* INTEGER I0, N0, PP +* DOUBLE PRECISION DMIN, DMIN1, DMIN2, DN, DNM1, DNM2 +* .. +* .. Array Arguments .. +* DOUBLE PRECISION Z( * ) +* .. +* +* +*> \par Purpose: +* ============= +*> +*> \verbatim +*> +*> DLASQ6 computes one dqd (shift equal to zero) transform in +*> ping-pong form, with protection against underflow and overflow. +*> \endverbatim +* +* Arguments: +* ========== +* +*> \param[in] I0 +*> \verbatim +*> I0 is INTEGER +*> First index. +*> \endverbatim +*> +*> \param[in] N0 +*> \verbatim +*> N0 is INTEGER +*> Last index. +*> \endverbatim +*> +*> \param[in] Z +*> \verbatim +*> Z is DOUBLE PRECISION array, dimension ( 4*N ) +*> Z holds the qd array. EMIN is stored in Z(4*N0) to avoid +*> an extra argument. +*> \endverbatim +*> +*> \param[in] PP +*> \verbatim +*> PP is INTEGER +*> PP=0 for ping, PP=1 for pong. +*> \endverbatim +*> +*> \param[out] DMIN +*> \verbatim +*> DMIN is DOUBLE PRECISION +*> Minimum value of d. +*> \endverbatim +*> +*> \param[out] DMIN1 +*> \verbatim +*> DMIN1 is DOUBLE PRECISION +*> Minimum value of d, excluding D( N0 ). +*> \endverbatim +*> +*> \param[out] DMIN2 +*> \verbatim +*> DMIN2 is DOUBLE PRECISION +*> Minimum value of d, excluding D( N0 ) and D( N0-1 ). +*> \endverbatim +*> +*> \param[out] DN +*> \verbatim +*> DN is DOUBLE PRECISION +*> d(N0), the last value of d. +*> \endverbatim +*> +*> \param[out] DNM1 +*> \verbatim +*> DNM1 is DOUBLE PRECISION +*> d(N0-1). +*> \endverbatim +*> +*> \param[out] DNM2 +*> \verbatim +*> DNM2 is DOUBLE PRECISION +*> d(N0-2). +*> \endverbatim +* +* Authors: +* ======== +* +*> \author Univ. of Tennessee +*> \author Univ. of California Berkeley +*> \author Univ. of Colorado Denver +*> \author NAG Ltd. +* +*> \date September 2012 +* +*> \ingroup auxOTHERcomputational +* +* ===================================================================== + SUBROUTINE DLASQ6( I0, N0, Z, PP, DMIN, DMIN1, DMIN2, DN, + $ DNM1, DNM2 ) +* +* -- LAPACK computational routine (version 3.4.2) -- +* -- LAPACK is a software package provided by Univ. of Tennessee, -- +* -- Univ. of California Berkeley, Univ. of Colorado Denver and NAG Ltd..-- +* September 2012 +* +* .. Scalar Arguments .. + INTEGER I0, N0, PP + DOUBLE PRECISION DMIN, DMIN1, DMIN2, DN, DNM1, DNM2 +* .. +* .. Array Arguments .. + DOUBLE PRECISION Z( * ) +* .. +* +* ===================================================================== +* +* .. Parameter .. + DOUBLE PRECISION ZERO + PARAMETER ( ZERO = 0.0D0 ) +* .. +* .. Local Scalars .. + INTEGER J4, J4P2 + DOUBLE PRECISION D, EMIN, SAFMIN, TEMP +* .. +* .. External Function .. + DOUBLE PRECISION DLAMCH + EXTERNAL DLAMCH +* .. +* .. Intrinsic Functions .. + INTRINSIC MIN +* .. +* .. Executable Statements .. +* + IF( ( N0-I0-1 ).LE.0 ) + $ RETURN +* + + print *, "In dlasq6" + STOP + + SAFMIN = DLAMCH( 'Safe minimum' ) + J4 = 4*I0 + PP - 3 + EMIN = Z( J4+4 ) + D = Z( J4 ) + DMIN = D +* + IF( PP.EQ.0 ) THEN + DO 10 J4 = 4*I0, 4*( N0-3 ), 4 + Z( J4-2 ) = D + Z( J4-1 ) + IF( Z( J4-2 ).EQ.ZERO ) THEN + Z( J4 ) = ZERO + D = Z( J4+1 ) + DMIN = D + EMIN = ZERO + ELSE IF( SAFMIN*Z( J4+1 ).LT.Z( J4-2 ) .AND. + $ SAFMIN*Z( J4-2 ).LT.Z( J4+1 ) ) THEN + TEMP = Z( J4+1 ) / Z( J4-2 ) + Z( J4 ) = Z( J4-1 )*TEMP + D = D*TEMP + ELSE + Z( J4 ) = Z( J4+1 )*( Z( J4-1 ) / Z( J4-2 ) ) + D = Z( J4+1 )*( D / Z( J4-2 ) ) + END IF + DMIN = MIN( DMIN, D ) + EMIN = MIN( EMIN, Z( J4 ) ) + 10 CONTINUE + ELSE + DO 20 J4 = 4*I0, 4*( N0-3 ), 4 + Z( J4-3 ) = D + Z( J4 ) + IF( Z( J4-3 ).EQ.ZERO ) THEN + Z( J4-1 ) = ZERO + D = Z( J4+2 ) + DMIN = D + EMIN = ZERO + ELSE IF( SAFMIN*Z( J4+2 ).LT.Z( J4-3 ) .AND. + $ SAFMIN*Z( J4-3 ).LT.Z( J4+2 ) ) THEN + TEMP = Z( J4+2 ) / Z( J4-3 ) + Z( J4-1 ) = Z( J4 )*TEMP + D = D*TEMP + ELSE + Z( J4-1 ) = Z( J4+2 )*( Z( J4 ) / Z( J4-3 ) ) + D = Z( J4+2 )*( D / Z( J4-3 ) ) + END IF + DMIN = MIN( DMIN, D ) + EMIN = MIN( EMIN, Z( J4-1 ) ) + 20 CONTINUE + END IF +* +* Unroll last two steps. +* + DNM2 = D + DMIN2 = DMIN + J4 = 4*( N0-2 ) - PP + J4P2 = J4 + 2*PP - 1 + Z( J4-2 ) = DNM2 + Z( J4P2 ) + IF( Z( J4-2 ).EQ.ZERO ) THEN + Z( J4 ) = ZERO + DNM1 = Z( J4P2+2 ) + DMIN = DNM1 + EMIN = ZERO + ELSE IF( SAFMIN*Z( J4P2+2 ).LT.Z( J4-2 ) .AND. + $ SAFMIN*Z( J4-2 ).LT.Z( J4P2+2 ) ) THEN + TEMP = Z( J4P2+2 ) / Z( J4-2 ) + Z( J4 ) = Z( J4P2 )*TEMP + DNM1 = DNM2*TEMP + ELSE + Z( J4 ) = Z( J4P2+2 )*( Z( J4P2 ) / Z( J4-2 ) ) + DNM1 = Z( J4P2+2 )*( DNM2 / Z( J4-2 ) ) + END IF + DMIN = MIN( DMIN, DNM1 ) +* + DMIN1 = DMIN + J4 = J4 + 4 + J4P2 = J4 + 2*PP - 1 + Z( J4-2 ) = DNM1 + Z( J4P2 ) + IF( Z( J4-2 ).EQ.ZERO ) THEN + Z( J4 ) = ZERO + DN = Z( J4P2+2 ) + DMIN = DN + EMIN = ZERO + ELSE IF( SAFMIN*Z( J4P2+2 ).LT.Z( J4-2 ) .AND. + $ SAFMIN*Z( J4-2 ).LT.Z( J4P2+2 ) ) THEN + TEMP = Z( J4P2+2 ) / Z( J4-2 ) + Z( J4 ) = Z( J4P2 )*TEMP + DN = DNM1*TEMP + ELSE + Z( J4 ) = Z( J4P2+2 )*( Z( J4P2 ) / Z( J4-2 ) ) + DN = Z( J4P2+2 )*( DNM1 / Z( J4-2 ) ) + END IF + DMIN = MIN( DMIN, DN ) +* + Z( J4+2 ) = DN + Z( 4*N0-PP ) = EMIN + RETURN +* +* End of DLASQ6 +* + END diff --git a/vendor/gonum.org/v1/gonum/lapack/internal/testdata/dlasqtest/dlasrt.f b/vendor/gonum.org/v1/gonum/lapack/internal/testdata/dlasqtest/dlasrt.f new file mode 100644 index 0000000..f5d0e6c --- /dev/null +++ b/vendor/gonum.org/v1/gonum/lapack/internal/testdata/dlasqtest/dlasrt.f @@ -0,0 +1,303 @@ +*> \brief \b DLASRT sorts numbers in increasing or decreasing order. +* +* =========== DOCUMENTATION =========== +* +* Online html documentation available at +* http://www.netlib.org/lapack/explore-html/ +* +*> \htmlonly +*> Download DLASRT + dependencies +*> +*> [TGZ] +*> +*> [ZIP] +*> +*> [TXT] +*> \endhtmlonly +* +* Definition: +* =========== +* +* SUBROUTINE DLASRT( ID, N, D, INFO ) +* +* .. Scalar Arguments .. +* CHARACTER ID +* INTEGER INFO, N +* .. +* .. Array Arguments .. +* DOUBLE PRECISION D( * ) +* .. +* +* +*> \par Purpose: +* ============= +*> +*> \verbatim +*> +*> Sort the numbers in D in increasing order (if ID = 'I') or +*> in decreasing order (if ID = 'D' ). +*> +*> Use Quick Sort, reverting to Insertion sort on arrays of +*> size <= 20. Dimension of STACK limits N to about 2**32. +*> \endverbatim +* +* Arguments: +* ========== +* +*> \param[in] ID +*> \verbatim +*> ID is CHARACTER*1 +*> = 'I': sort D in increasing order; +*> = 'D': sort D in decreasing order. +*> \endverbatim +*> +*> \param[in] N +*> \verbatim +*> N is INTEGER +*> The length of the array D. +*> \endverbatim +*> +*> \param[in,out] D +*> \verbatim +*> D is DOUBLE PRECISION array, dimension (N) +*> On entry, the array to be sorted. +*> On exit, D has been sorted into increasing order +*> (D(1) <= ... <= D(N) ) or into decreasing order +*> (D(1) >= ... >= D(N) ), depending on ID. +*> \endverbatim +*> +*> \param[out] INFO +*> \verbatim +*> INFO is INTEGER +*> = 0: successful exit +*> < 0: if INFO = -i, the i-th argument had an illegal value +*> \endverbatim +* +* Authors: +* ======== +* +*> \author Univ. of Tennessee +*> \author Univ. of California Berkeley +*> \author Univ. of Colorado Denver +*> \author NAG Ltd. +* +*> \date September 2012 +* +*> \ingroup auxOTHERcomputational +* +* ===================================================================== + SUBROUTINE DLASRT( ID, N, D, INFO ) +* +* -- LAPACK computational routine (version 3.4.2) -- +* -- LAPACK is a software package provided by Univ. of Tennessee, -- +* -- Univ. of California Berkeley, Univ. of Colorado Denver and NAG Ltd..-- +* September 2012 +* +* .. Scalar Arguments .. + CHARACTER ID + INTEGER INFO, N +* .. +* .. Array Arguments .. + DOUBLE PRECISION D( * ) +* .. +* +* ===================================================================== +* +* .. Parameters .. + INTEGER SELECT + PARAMETER ( SELECT = 20 ) +* .. +* .. Local Scalars .. + INTEGER DIR, ENDD, I, J, START, STKPNT + DOUBLE PRECISION D1, D2, D3, DMNMX, TMP +* .. +* .. Local Arrays .. + INTEGER STACK( 2, 32 ) +* .. +* .. External Functions .. + LOGICAL LSAME + EXTERNAL LSAME +* .. +* .. External Subroutines .. + EXTERNAL XERBLA +* .. +* .. Executable Statements .. +* +* Test the input paramters. +* + INFO = 0 + DIR = -1 + IF( LSAME( ID, 'D' ) ) THEN + DIR = 0 + ELSE IF( LSAME( ID, 'I' ) ) THEN + DIR = 1 + END IF + IF( DIR.EQ.-1 ) THEN + INFO = -1 + ELSE IF( N.LT.0 ) THEN + INFO = -2 + END IF + IF( INFO.NE.0 ) THEN + CALL XERBLA( 'DLASRT', -INFO ) + RETURN + END IF +* +* Quick return if possible +* + IF( N.LE.1 ) + $ RETURN +* + STKPNT = 1 + STACK( 1, 1 ) = 1 + STACK( 2, 1 ) = N + 10 CONTINUE + START = STACK( 1, STKPNT ) + ENDD = STACK( 2, STKPNT ) + STKPNT = STKPNT - 1 + IF( ENDD-START.LE.SELECT .AND. ENDD-START.GT.0 ) THEN +* +* Do Insertion sort on D( START:ENDD ) +* + IF( DIR.EQ.0 ) THEN +* +* Sort into decreasing order +* + DO 30 I = START + 1, ENDD + DO 20 J = I, START + 1, -1 + IF( D( J ).GT.D( J-1 ) ) THEN + DMNMX = D( J ) + D( J ) = D( J-1 ) + D( J-1 ) = DMNMX + ELSE + GO TO 30 + END IF + 20 CONTINUE + 30 CONTINUE +* + ELSE +* +* Sort into increasing order +* + DO 50 I = START + 1, ENDD + DO 40 J = I, START + 1, -1 + IF( D( J ).LT.D( J-1 ) ) THEN + DMNMX = D( J ) + D( J ) = D( J-1 ) + D( J-1 ) = DMNMX + ELSE + GO TO 50 + END IF + 40 CONTINUE + 50 CONTINUE +* + END IF +* + ELSE IF( ENDD-START.GT.SELECT ) THEN +* +* Partition D( START:ENDD ) and stack parts, largest one first +* +* Choose partition entry as median of 3 +* + D1 = D( START ) + D2 = D( ENDD ) + I = ( START+ENDD ) / 2 + D3 = D( I ) + IF( D1.LT.D2 ) THEN + IF( D3.LT.D1 ) THEN + DMNMX = D1 + ELSE IF( D3.LT.D2 ) THEN + DMNMX = D3 + ELSE + DMNMX = D2 + END IF + ELSE + IF( D3.LT.D2 ) THEN + DMNMX = D2 + ELSE IF( D3.LT.D1 ) THEN + DMNMX = D3 + ELSE + DMNMX = D1 + END IF + END IF +* + IF( DIR.EQ.0 ) THEN +* +* Sort into decreasing order +* + I = START - 1 + J = ENDD + 1 + 60 CONTINUE + 70 CONTINUE + J = J - 1 + IF( D( J ).LT.DMNMX ) + $ GO TO 70 + 80 CONTINUE + I = I + 1 + IF( D( I ).GT.DMNMX ) + $ GO TO 80 + IF( I.LT.J ) THEN + TMP = D( I ) + D( I ) = D( J ) + D( J ) = TMP + GO TO 60 + END IF + IF( J-START.GT.ENDD-J-1 ) THEN + STKPNT = STKPNT + 1 + STACK( 1, STKPNT ) = START + STACK( 2, STKPNT ) = J + STKPNT = STKPNT + 1 + STACK( 1, STKPNT ) = J + 1 + STACK( 2, STKPNT ) = ENDD + ELSE + STKPNT = STKPNT + 1 + STACK( 1, STKPNT ) = J + 1 + STACK( 2, STKPNT ) = ENDD + STKPNT = STKPNT + 1 + STACK( 1, STKPNT ) = START + STACK( 2, STKPNT ) = J + END IF + ELSE +* +* Sort into increasing order +* + I = START - 1 + J = ENDD + 1 + 90 CONTINUE + 100 CONTINUE + J = J - 1 + IF( D( J ).GT.DMNMX ) + $ GO TO 100 + 110 CONTINUE + I = I + 1 + IF( D( I ).LT.DMNMX ) + $ GO TO 110 + IF( I.LT.J ) THEN + TMP = D( I ) + D( I ) = D( J ) + D( J ) = TMP + GO TO 90 + END IF + IF( J-START.GT.ENDD-J-1 ) THEN + STKPNT = STKPNT + 1 + STACK( 1, STKPNT ) = START + STACK( 2, STKPNT ) = J + STKPNT = STKPNT + 1 + STACK( 1, STKPNT ) = J + 1 + STACK( 2, STKPNT ) = ENDD + ELSE + STKPNT = STKPNT + 1 + STACK( 1, STKPNT ) = J + 1 + STACK( 2, STKPNT ) = ENDD + STKPNT = STKPNT + 1 + STACK( 1, STKPNT ) = START + STACK( 2, STKPNT ) = J + END IF + END IF + END IF + IF( STKPNT.GT.0 ) + $ GO TO 10 + RETURN +* +* End of DLASRT +* + END diff --git a/vendor/gonum.org/v1/gonum/lapack/internal/testdata/dlasqtest/ieeeck.f b/vendor/gonum.org/v1/gonum/lapack/internal/testdata/dlasqtest/ieeeck.f new file mode 100644 index 0000000..132e436 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/lapack/internal/testdata/dlasqtest/ieeeck.f @@ -0,0 +1,203 @@ +*> \brief \b IEEECK +* +* =========== DOCUMENTATION =========== +* +* Online html documentation available at +* http://www.netlib.org/lapack/explore-html/ +* +*> \htmlonly +*> Download IEEECK + dependencies +*> +*> [TGZ] +*> +*> [ZIP] +*> +*> [TXT] +*> \endhtmlonly +* +* Definition: +* =========== +* +* INTEGER FUNCTION IEEECK( ISPEC, ZERO, ONE ) +* +* .. Scalar Arguments .. +* INTEGER ISPEC +* REAL ONE, ZERO +* .. +* +* +*> \par Purpose: +* ============= +*> +*> \verbatim +*> +*> IEEECK is called from the ILAENV to verify that Infinity and +*> possibly NaN arithmetic is safe (i.e. will not trap). +*> \endverbatim +* +* Arguments: +* ========== +* +*> \param[in] ISPEC +*> \verbatim +*> ISPEC is INTEGER +*> Specifies whether to test just for inifinity arithmetic +*> or whether to test for infinity and NaN arithmetic. +*> = 0: Verify infinity arithmetic only. +*> = 1: Verify infinity and NaN arithmetic. +*> \endverbatim +*> +*> \param[in] ZERO +*> \verbatim +*> ZERO is REAL +*> Must contain the value 0.0 +*> This is passed to prevent the compiler from optimizing +*> away this code. +*> \endverbatim +*> +*> \param[in] ONE +*> \verbatim +*> ONE is REAL +*> Must contain the value 1.0 +*> This is passed to prevent the compiler from optimizing +*> away this code. +*> +*> RETURN VALUE: INTEGER +*> = 0: Arithmetic failed to produce the correct answers +*> = 1: Arithmetic produced the correct answers +*> \endverbatim +* +* Authors: +* ======== +* +*> \author Univ. of Tennessee +*> \author Univ. of California Berkeley +*> \author Univ. of Colorado Denver +*> \author NAG Ltd. +* +*> \date November 2011 +* +*> \ingroup auxOTHERauxiliary +* +* ===================================================================== + INTEGER FUNCTION IEEECK( ISPEC, ZERO, ONE ) +* +* -- LAPACK auxiliary routine (version 3.4.0) -- +* -- LAPACK is a software package provided by Univ. of Tennessee, -- +* -- Univ. of California Berkeley, Univ. of Colorado Denver and NAG Ltd..-- +* November 2011 +* +* .. Scalar Arguments .. + INTEGER ISPEC + REAL ONE, ZERO +* .. +* +* ===================================================================== +* +* .. Local Scalars .. + REAL NAN1, NAN2, NAN3, NAN4, NAN5, NAN6, NEGINF, + $ NEGZRO, NEWZRO, POSINF +* .. +* .. Executable Statements .. + IEEECK = 1 +* + POSINF = ONE / ZERO + IF( POSINF.LE.ONE ) THEN + IEEECK = 0 + RETURN + END IF +* + NEGINF = -ONE / ZERO + IF( NEGINF.GE.ZERO ) THEN + IEEECK = 0 + RETURN + END IF +* + NEGZRO = ONE / ( NEGINF+ONE ) + IF( NEGZRO.NE.ZERO ) THEN + IEEECK = 0 + RETURN + END IF +* + NEGINF = ONE / NEGZRO + IF( NEGINF.GE.ZERO ) THEN + IEEECK = 0 + RETURN + END IF +* + NEWZRO = NEGZRO + ZERO + IF( NEWZRO.NE.ZERO ) THEN + IEEECK = 0 + RETURN + END IF +* + POSINF = ONE / NEWZRO + IF( POSINF.LE.ONE ) THEN + IEEECK = 0 + RETURN + END IF +* + NEGINF = NEGINF*POSINF + IF( NEGINF.GE.ZERO ) THEN + IEEECK = 0 + RETURN + END IF +* + POSINF = POSINF*POSINF + IF( POSINF.LE.ONE ) THEN + IEEECK = 0 + RETURN + END IF +* +* +* +* +* Return if we were only asked to check infinity arithmetic +* + IF( ISPEC.EQ.0 ) + $ RETURN +* + NAN1 = POSINF + NEGINF +* + NAN2 = POSINF / NEGINF +* + NAN3 = POSINF / POSINF +* + NAN4 = POSINF*ZERO +* + NAN5 = NEGINF*NEGZRO +* + NAN6 = NAN5*ZERO +* + IF( NAN1.EQ.NAN1 ) THEN + IEEECK = 0 + RETURN + END IF +* + IF( NAN2.EQ.NAN2 ) THEN + IEEECK = 0 + RETURN + END IF +* + IF( NAN3.EQ.NAN3 ) THEN + IEEECK = 0 + RETURN + END IF +* + IF( NAN4.EQ.NAN4 ) THEN + IEEECK = 0 + RETURN + END IF +* + IF( NAN5.EQ.NAN5 ) THEN + IEEECK = 0 + RETURN + END IF +* + IF( NAN6.EQ.NAN6 ) THEN + IEEECK = 0 + RETURN + END IF +* + RETURN + END diff --git a/vendor/gonum.org/v1/gonum/lapack/internal/testdata/dlasqtest/ilaenv.f b/vendor/gonum.org/v1/gonum/lapack/internal/testdata/dlasqtest/ilaenv.f new file mode 100644 index 0000000..867464d --- /dev/null +++ b/vendor/gonum.org/v1/gonum/lapack/internal/testdata/dlasqtest/ilaenv.f @@ -0,0 +1,624 @@ +*> \brief \b ILAENV +* +* =========== DOCUMENTATION =========== +* +* Online html documentation available at +* http://www.netlib.org/lapack/explore-html/ +* +*> \htmlonly +*> Download ILAENV + dependencies +*> +*> [TGZ] +*> +*> [ZIP] +*> +*> [TXT] +*> \endhtmlonly +* +* Definition: +* =========== +* +* INTEGER FUNCTION ILAENV( ISPEC, NAME, OPTS, N1, N2, N3, N4 ) +* +* .. Scalar Arguments .. +* CHARACTER*( * ) NAME, OPTS +* INTEGER ISPEC, N1, N2, N3, N4 +* .. +* +* +*> \par Purpose: +* ============= +*> +*> \verbatim +*> +*> ILAENV is called from the LAPACK routines to choose problem-dependent +*> parameters for the local environment. See ISPEC for a description of +*> the parameters. +*> +*> ILAENV returns an INTEGER +*> if ILAENV >= 0: ILAENV returns the value of the parameter specified by ISPEC +*> if ILAENV < 0: if ILAENV = -k, the k-th argument had an illegal value. +*> +*> This version provides a set of parameters which should give good, +*> but not optimal, performance on many of the currently available +*> computers. Users are encouraged to modify this subroutine to set +*> the tuning parameters for their particular machine using the option +*> and problem size information in the arguments. +*> +*> This routine will not function correctly if it is converted to all +*> lower case. Converting it to all upper case is allowed. +*> \endverbatim +* +* Arguments: +* ========== +* +*> \param[in] ISPEC +*> \verbatim +*> ISPEC is INTEGER +*> Specifies the parameter to be returned as the value of +*> ILAENV. +*> = 1: the optimal blocksize; if this value is 1, an unblocked +*> algorithm will give the best performance. +*> = 2: the minimum block size for which the block routine +*> should be used; if the usable block size is less than +*> this value, an unblocked routine should be used. +*> = 3: the crossover point (in a block routine, for N less +*> than this value, an unblocked routine should be used) +*> = 4: the number of shifts, used in the nonsymmetric +*> eigenvalue routines (DEPRECATED) +*> = 5: the minimum column dimension for blocking to be used; +*> rectangular blocks must have dimension at least k by m, +*> where k is given by ILAENV(2,...) and m by ILAENV(5,...) +*> = 6: the crossover point for the SVD (when reducing an m by n +*> matrix to bidiagonal form, if max(m,n)/min(m,n) exceeds +*> this value, a QR factorization is used first to reduce +*> the matrix to a triangular form.) +*> = 7: the number of processors +*> = 8: the crossover point for the multishift QR method +*> for nonsymmetric eigenvalue problems (DEPRECATED) +*> = 9: maximum size of the subproblems at the bottom of the +*> computation tree in the divide-and-conquer algorithm +*> (used by xGELSD and xGESDD) +*> =10: ieee NaN arithmetic can be trusted not to trap +*> =11: infinity arithmetic can be trusted not to trap +*> 12 <= ISPEC <= 16: +*> xHSEQR or one of its subroutines, +*> see IPARMQ for detailed explanation +*> \endverbatim +*> +*> \param[in] NAME +*> \verbatim +*> NAME is CHARACTER*(*) +*> The name of the calling subroutine, in either upper case or +*> lower case. +*> \endverbatim +*> +*> \param[in] OPTS +*> \verbatim +*> OPTS is CHARACTER*(*) +*> The character options to the subroutine NAME, concatenated +*> into a single character string. For example, UPLO = 'U', +*> TRANS = 'T', and DIAG = 'N' for a triangular routine would +*> be specified as OPTS = 'UTN'. +*> \endverbatim +*> +*> \param[in] N1 +*> \verbatim +*> N1 is INTEGER +*> \endverbatim +*> +*> \param[in] N2 +*> \verbatim +*> N2 is INTEGER +*> \endverbatim +*> +*> \param[in] N3 +*> \verbatim +*> N3 is INTEGER +*> \endverbatim +*> +*> \param[in] N4 +*> \verbatim +*> N4 is INTEGER +*> Problem dimensions for the subroutine NAME; these may not all +*> be required. +*> \endverbatim +* +* Authors: +* ======== +* +*> \author Univ. of Tennessee +*> \author Univ. of California Berkeley +*> \author Univ. of Colorado Denver +*> \author NAG Ltd. +* +*> \date November 2011 +* +*> \ingroup auxOTHERauxiliary +* +*> \par Further Details: +* ===================== +*> +*> \verbatim +*> +*> The following conventions have been used when calling ILAENV from the +*> LAPACK routines: +*> 1) OPTS is a concatenation of all of the character options to +*> subroutine NAME, in the same order that they appear in the +*> argument list for NAME, even if they are not used in determining +*> the value of the parameter specified by ISPEC. +*> 2) The problem dimensions N1, N2, N3, N4 are specified in the order +*> that they appear in the argument list for NAME. N1 is used +*> first, N2 second, and so on, and unused problem dimensions are +*> passed a value of -1. +*> 3) The parameter value returned by ILAENV is checked for validity in +*> the calling subroutine. For example, ILAENV is used to retrieve +*> the optimal blocksize for STRTRI as follows: +*> +*> NB = ILAENV( 1, 'STRTRI', UPLO // DIAG, N, -1, -1, -1 ) +*> IF( NB.LE.1 ) NB = MAX( 1, N ) +*> \endverbatim +*> +* ===================================================================== + INTEGER FUNCTION ILAENV( ISPEC, NAME, OPTS, N1, N2, N3, N4 ) +* +* -- LAPACK auxiliary routine (version 3.4.0) -- +* -- LAPACK is a software package provided by Univ. of Tennessee, -- +* -- Univ. of California Berkeley, Univ. of Colorado Denver and NAG Ltd..-- +* November 2011 +* +* .. Scalar Arguments .. + CHARACTER*( * ) NAME, OPTS + INTEGER ISPEC, N1, N2, N3, N4 +* .. +* +* ===================================================================== +* +* .. Local Scalars .. + INTEGER I, IC, IZ, NB, NBMIN, NX + LOGICAL CNAME, SNAME + CHARACTER C1*1, C2*2, C4*2, C3*3, SUBNAM*6 +* .. +* .. Intrinsic Functions .. + INTRINSIC CHAR, ICHAR, INT, MIN, REAL +* .. +* .. External Functions .. + INTEGER IEEECK, IPARMQ + EXTERNAL IEEECK, IPARMQ +* .. +* .. Executable Statements .. +* + GO TO ( 10, 10, 10, 80, 90, 100, 110, 120, + $ 130, 140, 150, 160, 160, 160, 160, 160 )ISPEC +* +* Invalid value for ISPEC +* + ILAENV = -1 + RETURN +* + 10 CONTINUE +* +* Convert NAME to upper case if the first character is lower case. +* + ILAENV = 1 + SUBNAM = NAME + IC = ICHAR( SUBNAM( 1: 1 ) ) + IZ = ICHAR( 'Z' ) + IF( IZ.EQ.90 .OR. IZ.EQ.122 ) THEN +* +* ASCII character set +* + IF( IC.GE.97 .AND. IC.LE.122 ) THEN + SUBNAM( 1: 1 ) = CHAR( IC-32 ) + DO 20 I = 2, 6 + IC = ICHAR( SUBNAM( I: I ) ) + IF( IC.GE.97 .AND. IC.LE.122 ) + $ SUBNAM( I: I ) = CHAR( IC-32 ) + 20 CONTINUE + END IF +* + ELSE IF( IZ.EQ.233 .OR. IZ.EQ.169 ) THEN +* +* EBCDIC character set +* + IF( ( IC.GE.129 .AND. IC.LE.137 ) .OR. + $ ( IC.GE.145 .AND. IC.LE.153 ) .OR. + $ ( IC.GE.162 .AND. IC.LE.169 ) ) THEN + SUBNAM( 1: 1 ) = CHAR( IC+64 ) + DO 30 I = 2, 6 + IC = ICHAR( SUBNAM( I: I ) ) + IF( ( IC.GE.129 .AND. IC.LE.137 ) .OR. + $ ( IC.GE.145 .AND. IC.LE.153 ) .OR. + $ ( IC.GE.162 .AND. IC.LE.169 ) )SUBNAM( I: + $ I ) = CHAR( IC+64 ) + 30 CONTINUE + END IF +* + ELSE IF( IZ.EQ.218 .OR. IZ.EQ.250 ) THEN +* +* Prime machines: ASCII+128 +* + IF( IC.GE.225 .AND. IC.LE.250 ) THEN + SUBNAM( 1: 1 ) = CHAR( IC-32 ) + DO 40 I = 2, 6 + IC = ICHAR( SUBNAM( I: I ) ) + IF( IC.GE.225 .AND. IC.LE.250 ) + $ SUBNAM( I: I ) = CHAR( IC-32 ) + 40 CONTINUE + END IF + END IF +* + C1 = SUBNAM( 1: 1 ) + SNAME = C1.EQ.'S' .OR. C1.EQ.'D' + CNAME = C1.EQ.'C' .OR. C1.EQ.'Z' + IF( .NOT.( CNAME .OR. SNAME ) ) + $ RETURN + C2 = SUBNAM( 2: 3 ) + C3 = SUBNAM( 4: 6 ) + C4 = C3( 2: 3 ) +* + GO TO ( 50, 60, 70 )ISPEC +* + 50 CONTINUE +* +* ISPEC = 1: block size +* +* In these examples, separate code is provided for setting NB for +* real and complex. We assume that NB will take the same value in +* single or double precision. +* + NB = 1 +* + IF( C2.EQ.'GE' ) THEN + IF( C3.EQ.'TRF' ) THEN + IF( SNAME ) THEN + NB = 64 + ELSE + NB = 64 + END IF + ELSE IF( C3.EQ.'QRF' .OR. C3.EQ.'RQF' .OR. C3.EQ.'LQF' .OR. + $ C3.EQ.'QLF' ) THEN + IF( SNAME ) THEN + NB = 32 + ELSE + NB = 32 + END IF + ELSE IF( C3.EQ.'HRD' ) THEN + IF( SNAME ) THEN + NB = 32 + ELSE + NB = 32 + END IF + ELSE IF( C3.EQ.'BRD' ) THEN + IF( SNAME ) THEN + NB = 32 + ELSE + NB = 32 + END IF + ELSE IF( C3.EQ.'TRI' ) THEN + IF( SNAME ) THEN + NB = 64 + ELSE + NB = 64 + END IF + END IF + ELSE IF( C2.EQ.'PO' ) THEN + IF( C3.EQ.'TRF' ) THEN + IF( SNAME ) THEN + NB = 64 + ELSE + NB = 64 + END IF + END IF + ELSE IF( C2.EQ.'SY' ) THEN + IF( C3.EQ.'TRF' ) THEN + IF( SNAME ) THEN + NB = 64 + ELSE + NB = 64 + END IF + ELSE IF( SNAME .AND. C3.EQ.'TRD' ) THEN + NB = 32 + ELSE IF( SNAME .AND. C3.EQ.'GST' ) THEN + NB = 64 + END IF + ELSE IF( CNAME .AND. C2.EQ.'HE' ) THEN + IF( C3.EQ.'TRF' ) THEN + NB = 64 + ELSE IF( C3.EQ.'TRD' ) THEN + NB = 32 + ELSE IF( C3.EQ.'GST' ) THEN + NB = 64 + END IF + ELSE IF( SNAME .AND. C2.EQ.'OR' ) THEN + IF( C3( 1: 1 ).EQ.'G' ) THEN + IF( C4.EQ.'QR' .OR. C4.EQ.'RQ' .OR. C4.EQ.'LQ' .OR. C4.EQ. + $ 'QL' .OR. C4.EQ.'HR' .OR. C4.EQ.'TR' .OR. C4.EQ.'BR' ) + $ THEN + NB = 32 + END IF + ELSE IF( C3( 1: 1 ).EQ.'M' ) THEN + IF( C4.EQ.'QR' .OR. C4.EQ.'RQ' .OR. C4.EQ.'LQ' .OR. C4.EQ. + $ 'QL' .OR. C4.EQ.'HR' .OR. C4.EQ.'TR' .OR. C4.EQ.'BR' ) + $ THEN + NB = 32 + END IF + END IF + ELSE IF( CNAME .AND. C2.EQ.'UN' ) THEN + IF( C3( 1: 1 ).EQ.'G' ) THEN + IF( C4.EQ.'QR' .OR. C4.EQ.'RQ' .OR. C4.EQ.'LQ' .OR. C4.EQ. + $ 'QL' .OR. C4.EQ.'HR' .OR. C4.EQ.'TR' .OR. C4.EQ.'BR' ) + $ THEN + NB = 32 + END IF + ELSE IF( C3( 1: 1 ).EQ.'M' ) THEN + IF( C4.EQ.'QR' .OR. C4.EQ.'RQ' .OR. C4.EQ.'LQ' .OR. C4.EQ. + $ 'QL' .OR. C4.EQ.'HR' .OR. C4.EQ.'TR' .OR. C4.EQ.'BR' ) + $ THEN + NB = 32 + END IF + END IF + ELSE IF( C2.EQ.'GB' ) THEN + IF( C3.EQ.'TRF' ) THEN + IF( SNAME ) THEN + IF( N4.LE.64 ) THEN + NB = 1 + ELSE + NB = 32 + END IF + ELSE + IF( N4.LE.64 ) THEN + NB = 1 + ELSE + NB = 32 + END IF + END IF + END IF + ELSE IF( C2.EQ.'PB' ) THEN + IF( C3.EQ.'TRF' ) THEN + IF( SNAME ) THEN + IF( N2.LE.64 ) THEN + NB = 1 + ELSE + NB = 32 + END IF + ELSE + IF( N2.LE.64 ) THEN + NB = 1 + ELSE + NB = 32 + END IF + END IF + END IF + ELSE IF( C2.EQ.'TR' ) THEN + IF( C3.EQ.'TRI' ) THEN + IF( SNAME ) THEN + NB = 64 + ELSE + NB = 64 + END IF + END IF + ELSE IF( C2.EQ.'LA' ) THEN + IF( C3.EQ.'UUM' ) THEN + IF( SNAME ) THEN + NB = 64 + ELSE + NB = 64 + END IF + END IF + ELSE IF( SNAME .AND. C2.EQ.'ST' ) THEN + IF( C3.EQ.'EBZ' ) THEN + NB = 1 + END IF + END IF + ILAENV = NB + RETURN +* + 60 CONTINUE +* +* ISPEC = 2: minimum block size +* + NBMIN = 2 + IF( C2.EQ.'GE' ) THEN + IF( C3.EQ.'QRF' .OR. C3.EQ.'RQF' .OR. C3.EQ.'LQF' .OR. C3.EQ. + $ 'QLF' ) THEN + IF( SNAME ) THEN + NBMIN = 2 + ELSE + NBMIN = 2 + END IF + ELSE IF( C3.EQ.'HRD' ) THEN + IF( SNAME ) THEN + NBMIN = 2 + ELSE + NBMIN = 2 + END IF + ELSE IF( C3.EQ.'BRD' ) THEN + IF( SNAME ) THEN + NBMIN = 2 + ELSE + NBMIN = 2 + END IF + ELSE IF( C3.EQ.'TRI' ) THEN + IF( SNAME ) THEN + NBMIN = 2 + ELSE + NBMIN = 2 + END IF + END IF + ELSE IF( C2.EQ.'SY' ) THEN + IF( C3.EQ.'TRF' ) THEN + IF( SNAME ) THEN + NBMIN = 8 + ELSE + NBMIN = 8 + END IF + ELSE IF( SNAME .AND. C3.EQ.'TRD' ) THEN + NBMIN = 2 + END IF + ELSE IF( CNAME .AND. C2.EQ.'HE' ) THEN + IF( C3.EQ.'TRD' ) THEN + NBMIN = 2 + END IF + ELSE IF( SNAME .AND. C2.EQ.'OR' ) THEN + IF( C3( 1: 1 ).EQ.'G' ) THEN + IF( C4.EQ.'QR' .OR. C4.EQ.'RQ' .OR. C4.EQ.'LQ' .OR. C4.EQ. + $ 'QL' .OR. C4.EQ.'HR' .OR. C4.EQ.'TR' .OR. C4.EQ.'BR' ) + $ THEN + NBMIN = 2 + END IF + ELSE IF( C3( 1: 1 ).EQ.'M' ) THEN + IF( C4.EQ.'QR' .OR. C4.EQ.'RQ' .OR. C4.EQ.'LQ' .OR. C4.EQ. + $ 'QL' .OR. C4.EQ.'HR' .OR. C4.EQ.'TR' .OR. C4.EQ.'BR' ) + $ THEN + NBMIN = 2 + END IF + END IF + ELSE IF( CNAME .AND. C2.EQ.'UN' ) THEN + IF( C3( 1: 1 ).EQ.'G' ) THEN + IF( C4.EQ.'QR' .OR. C4.EQ.'RQ' .OR. C4.EQ.'LQ' .OR. C4.EQ. + $ 'QL' .OR. C4.EQ.'HR' .OR. C4.EQ.'TR' .OR. C4.EQ.'BR' ) + $ THEN + NBMIN = 2 + END IF + ELSE IF( C3( 1: 1 ).EQ.'M' ) THEN + IF( C4.EQ.'QR' .OR. C4.EQ.'RQ' .OR. C4.EQ.'LQ' .OR. C4.EQ. + $ 'QL' .OR. C4.EQ.'HR' .OR. C4.EQ.'TR' .OR. C4.EQ.'BR' ) + $ THEN + NBMIN = 2 + END IF + END IF + END IF + ILAENV = NBMIN + RETURN +* + 70 CONTINUE +* +* ISPEC = 3: crossover point +* + NX = 0 + IF( C2.EQ.'GE' ) THEN + IF( C3.EQ.'QRF' .OR. C3.EQ.'RQF' .OR. C3.EQ.'LQF' .OR. C3.EQ. + $ 'QLF' ) THEN + IF( SNAME ) THEN + NX = 128 + ELSE + NX = 128 + END IF + ELSE IF( C3.EQ.'HRD' ) THEN + IF( SNAME ) THEN + NX = 128 + ELSE + NX = 128 + END IF + ELSE IF( C3.EQ.'BRD' ) THEN + IF( SNAME ) THEN + NX = 128 + ELSE + NX = 128 + END IF + END IF + ELSE IF( C2.EQ.'SY' ) THEN + IF( SNAME .AND. C3.EQ.'TRD' ) THEN + NX = 32 + END IF + ELSE IF( CNAME .AND. C2.EQ.'HE' ) THEN + IF( C3.EQ.'TRD' ) THEN + NX = 32 + END IF + ELSE IF( SNAME .AND. C2.EQ.'OR' ) THEN + IF( C3( 1: 1 ).EQ.'G' ) THEN + IF( C4.EQ.'QR' .OR. C4.EQ.'RQ' .OR. C4.EQ.'LQ' .OR. C4.EQ. + $ 'QL' .OR. C4.EQ.'HR' .OR. C4.EQ.'TR' .OR. C4.EQ.'BR' ) + $ THEN + NX = 128 + END IF + END IF + ELSE IF( CNAME .AND. C2.EQ.'UN' ) THEN + IF( C3( 1: 1 ).EQ.'G' ) THEN + IF( C4.EQ.'QR' .OR. C4.EQ.'RQ' .OR. C4.EQ.'LQ' .OR. C4.EQ. + $ 'QL' .OR. C4.EQ.'HR' .OR. C4.EQ.'TR' .OR. C4.EQ.'BR' ) + $ THEN + NX = 128 + END IF + END IF + END IF + ILAENV = NX + RETURN +* + 80 CONTINUE +* +* ISPEC = 4: number of shifts (used by xHSEQR) +* + ILAENV = 6 + RETURN +* + 90 CONTINUE +* +* ISPEC = 5: minimum column dimension (not used) +* + ILAENV = 2 + RETURN +* + 100 CONTINUE +* +* ISPEC = 6: crossover point for SVD (used by xGELSS and xGESVD) +* + ILAENV = INT( REAL( MIN( N1, N2 ) )*1.6E0 ) + RETURN +* + 110 CONTINUE +* +* ISPEC = 7: number of processors (not used) +* + ILAENV = 1 + RETURN +* + 120 CONTINUE +* +* ISPEC = 8: crossover point for multishift (used by xHSEQR) +* + ILAENV = 50 + RETURN +* + 130 CONTINUE +* +* ISPEC = 9: maximum size of the subproblems at the bottom of the +* computation tree in the divide-and-conquer algorithm +* (used by xGELSD and xGESDD) +* + ILAENV = 25 + RETURN +* + 140 CONTINUE +* +* ISPEC = 10: ieee NaN arithmetic can be trusted not to trap +* +* ILAENV = 0 + ILAENV = 1 + IF( ILAENV.EQ.1 ) THEN + ILAENV = IEEECK( 1, 0.0, 1.0 ) + END IF + RETURN +* + 150 CONTINUE +* +* ISPEC = 11: infinity arithmetic can be trusted not to trap +* +* ILAENV = 0 + ILAENV = 1 + IF( ILAENV.EQ.1 ) THEN + ILAENV = IEEECK( 0, 0.0, 1.0 ) + END IF + RETURN +* + 160 CONTINUE +* +* 12 <= ISPEC <= 16: xHSEQR or one of its subroutines. +* + ILAENV = IPARMQ( ISPEC, NAME, OPTS, N1, N2, N3, N4 ) + RETURN +* +* End of ILAENV +* + END diff --git a/vendor/gonum.org/v1/gonum/lapack/internal/testdata/dlasqtest/iparmq.f b/vendor/gonum.org/v1/gonum/lapack/internal/testdata/dlasqtest/iparmq.f new file mode 100644 index 0000000..bd5bd7a --- /dev/null +++ b/vendor/gonum.org/v1/gonum/lapack/internal/testdata/dlasqtest/iparmq.f @@ -0,0 +1,322 @@ +*> \brief \b IPARMQ +* +* =========== DOCUMENTATION =========== +* +* Online html documentation available at +* http://www.netlib.org/lapack/explore-html/ +* +*> \htmlonly +*> Download IPARMQ + dependencies +*> +*> [TGZ] +*> +*> [ZIP] +*> +*> [TXT] +*> \endhtmlonly +* +* Definition: +* =========== +* +* INTEGER FUNCTION IPARMQ( ISPEC, NAME, OPTS, N, ILO, IHI, LWORK ) +* +* .. Scalar Arguments .. +* INTEGER IHI, ILO, ISPEC, LWORK, N +* CHARACTER NAME*( * ), OPTS*( * ) +* +* +*> \par Purpose: +* ============= +*> +*> \verbatim +*> +*> This program sets problem and machine dependent parameters +*> useful for xHSEQR and its subroutines. It is called whenever +*> ILAENV is called with 12 <= ISPEC <= 16 +*> \endverbatim +* +* Arguments: +* ========== +* +*> \param[in] ISPEC +*> \verbatim +*> ISPEC is integer scalar +*> ISPEC specifies which tunable parameter IPARMQ should +*> return. +*> +*> ISPEC=12: (INMIN) Matrices of order nmin or less +*> are sent directly to xLAHQR, the implicit +*> double shift QR algorithm. NMIN must be +*> at least 11. +*> +*> ISPEC=13: (INWIN) Size of the deflation window. +*> This is best set greater than or equal to +*> the number of simultaneous shifts NS. +*> Larger matrices benefit from larger deflation +*> windows. +*> +*> ISPEC=14: (INIBL) Determines when to stop nibbling and +*> invest in an (expensive) multi-shift QR sweep. +*> If the aggressive early deflation subroutine +*> finds LD converged eigenvalues from an order +*> NW deflation window and LD.GT.(NW*NIBBLE)/100, +*> then the next QR sweep is skipped and early +*> deflation is applied immediately to the +*> remaining active diagonal block. Setting +*> IPARMQ(ISPEC=14) = 0 causes TTQRE to skip a +*> multi-shift QR sweep whenever early deflation +*> finds a converged eigenvalue. Setting +*> IPARMQ(ISPEC=14) greater than or equal to 100 +*> prevents TTQRE from skipping a multi-shift +*> QR sweep. +*> +*> ISPEC=15: (NSHFTS) The number of simultaneous shifts in +*> a multi-shift QR iteration. +*> +*> ISPEC=16: (IACC22) IPARMQ is set to 0, 1 or 2 with the +*> following meanings. +*> 0: During the multi-shift QR sweep, +*> xLAQR5 does not accumulate reflections and +*> does not use matrix-matrix multiply to +*> update the far-from-diagonal matrix +*> entries. +*> 1: During the multi-shift QR sweep, +*> xLAQR5 and/or xLAQRaccumulates reflections and uses +*> matrix-matrix multiply to update the +*> far-from-diagonal matrix entries. +*> 2: During the multi-shift QR sweep. +*> xLAQR5 accumulates reflections and takes +*> advantage of 2-by-2 block structure during +*> matrix-matrix multiplies. +*> (If xTRMM is slower than xGEMM, then +*> IPARMQ(ISPEC=16)=1 may be more efficient than +*> IPARMQ(ISPEC=16)=2 despite the greater level of +*> arithmetic work implied by the latter choice.) +*> \endverbatim +*> +*> \param[in] NAME +*> \verbatim +*> NAME is character string +*> Name of the calling subroutine +*> \endverbatim +*> +*> \param[in] OPTS +*> \verbatim +*> OPTS is character string +*> This is a concatenation of the string arguments to +*> TTQRE. +*> \endverbatim +*> +*> \param[in] N +*> \verbatim +*> N is integer scalar +*> N is the order of the Hessenberg matrix H. +*> \endverbatim +*> +*> \param[in] ILO +*> \verbatim +*> ILO is INTEGER +*> \endverbatim +*> +*> \param[in] IHI +*> \verbatim +*> IHI is INTEGER +*> It is assumed that H is already upper triangular +*> in rows and columns 1:ILO-1 and IHI+1:N. +*> \endverbatim +*> +*> \param[in] LWORK +*> \verbatim +*> LWORK is integer scalar +*> The amount of workspace available. +*> \endverbatim +* +* Authors: +* ======== +* +*> \author Univ. of Tennessee +*> \author Univ. of California Berkeley +*> \author Univ. of Colorado Denver +*> \author NAG Ltd. +* +*> \date November 2011 +* +*> \ingroup auxOTHERauxiliary +* +*> \par Further Details: +* ===================== +*> +*> \verbatim +*> +*> Little is known about how best to choose these parameters. +*> It is possible to use different values of the parameters +*> for each of CHSEQR, DHSEQR, SHSEQR and ZHSEQR. +*> +*> It is probably best to choose different parameters for +*> different matrices and different parameters at different +*> times during the iteration, but this has not been +*> implemented --- yet. +*> +*> +*> The best choices of most of the parameters depend +*> in an ill-understood way on the relative execution +*> rate of xLAQR3 and xLAQR5 and on the nature of each +*> particular eigenvalue problem. Experiment may be the +*> only practical way to determine which choices are most +*> effective. +*> +*> Following is a list of default values supplied by IPARMQ. +*> These defaults may be adjusted in order to attain better +*> performance in any particular computational environment. +*> +*> IPARMQ(ISPEC=12) The xLAHQR vs xLAQR0 crossover point. +*> Default: 75. (Must be at least 11.) +*> +*> IPARMQ(ISPEC=13) Recommended deflation window size. +*> This depends on ILO, IHI and NS, the +*> number of simultaneous shifts returned +*> by IPARMQ(ISPEC=15). The default for +*> (IHI-ILO+1).LE.500 is NS. The default +*> for (IHI-ILO+1).GT.500 is 3*NS/2. +*> +*> IPARMQ(ISPEC=14) Nibble crossover point. Default: 14. +*> +*> IPARMQ(ISPEC=15) Number of simultaneous shifts, NS. +*> a multi-shift QR iteration. +*> +*> If IHI-ILO+1 is ... +*> +*> greater than ...but less ... the +*> or equal to ... than default is +*> +*> 0 30 NS = 2+ +*> 30 60 NS = 4+ +*> 60 150 NS = 10 +*> 150 590 NS = ** +*> 590 3000 NS = 64 +*> 3000 6000 NS = 128 +*> 6000 infinity NS = 256 +*> +*> (+) By default matrices of this order are +*> passed to the implicit double shift routine +*> xLAHQR. See IPARMQ(ISPEC=12) above. These +*> values of NS are used only in case of a rare +*> xLAHQR failure. +*> +*> (**) The asterisks (**) indicate an ad-hoc +*> function increasing from 10 to 64. +*> +*> IPARMQ(ISPEC=16) Select structured matrix multiply. +*> (See ISPEC=16 above for details.) +*> Default: 3. +*> \endverbatim +*> +* ===================================================================== + INTEGER FUNCTION IPARMQ( ISPEC, NAME, OPTS, N, ILO, IHI, LWORK ) +* +* -- LAPACK auxiliary routine (version 3.4.0) -- +* -- LAPACK is a software package provided by Univ. of Tennessee, -- +* -- Univ. of California Berkeley, Univ. of Colorado Denver and NAG Ltd..-- +* November 2011 +* +* .. Scalar Arguments .. + INTEGER IHI, ILO, ISPEC, LWORK, N + CHARACTER NAME*( * ), OPTS*( * ) +* +* ================================================================ +* .. Parameters .. + INTEGER INMIN, INWIN, INIBL, ISHFTS, IACC22 + PARAMETER ( INMIN = 12, INWIN = 13, INIBL = 14, + $ ISHFTS = 15, IACC22 = 16 ) + INTEGER NMIN, K22MIN, KACMIN, NIBBLE, KNWSWP + PARAMETER ( NMIN = 75, K22MIN = 14, KACMIN = 14, + $ NIBBLE = 14, KNWSWP = 500 ) + REAL TWO + PARAMETER ( TWO = 2.0 ) +* .. +* .. Local Scalars .. + INTEGER NH, NS +* .. +* .. Intrinsic Functions .. + INTRINSIC LOG, MAX, MOD, NINT, REAL +* .. +* .. Executable Statements .. + IF( ( ISPEC.EQ.ISHFTS ) .OR. ( ISPEC.EQ.INWIN ) .OR. + $ ( ISPEC.EQ.IACC22 ) ) THEN +* +* ==== Set the number simultaneous shifts ==== +* + NH = IHI - ILO + 1 + NS = 2 + IF( NH.GE.30 ) + $ NS = 4 + IF( NH.GE.60 ) + $ NS = 10 + IF( NH.GE.150 ) + $ NS = MAX( 10, NH / NINT( LOG( REAL( NH ) ) / LOG( TWO ) ) ) + IF( NH.GE.590 ) + $ NS = 64 + IF( NH.GE.3000 ) + $ NS = 128 + IF( NH.GE.6000 ) + $ NS = 256 + NS = MAX( 2, NS-MOD( NS, 2 ) ) + END IF +* + IF( ISPEC.EQ.INMIN ) THEN +* +* +* ===== Matrices of order smaller than NMIN get sent +* . to xLAHQR, the classic double shift algorithm. +* . This must be at least 11. ==== +* + IPARMQ = NMIN +* + ELSE IF( ISPEC.EQ.INIBL ) THEN +* +* ==== INIBL: skip a multi-shift qr iteration and +* . whenever aggressive early deflation finds +* . at least (NIBBLE*(window size)/100) deflations. ==== +* + IPARMQ = NIBBLE +* + ELSE IF( ISPEC.EQ.ISHFTS ) THEN +* +* ==== NSHFTS: The number of simultaneous shifts ===== +* + IPARMQ = NS +* + ELSE IF( ISPEC.EQ.INWIN ) THEN +* +* ==== NW: deflation window size. ==== +* + IF( NH.LE.KNWSWP ) THEN + IPARMQ = NS + ELSE + IPARMQ = 3*NS / 2 + END IF +* + ELSE IF( ISPEC.EQ.IACC22 ) THEN +* +* ==== IACC22: Whether to accumulate reflections +* . before updating the far-from-diagonal elements +* . and whether to use 2-by-2 block structure while +* . doing it. A small amount of work could be saved +* . by making this choice dependent also upon the +* . NH=IHI-ILO+1. +* + IPARMQ = 0 + IF( NS.GE.KACMIN ) + $ IPARMQ = 1 + IF( NS.GE.K22MIN ) + $ IPARMQ = 2 +* + ELSE +* ===== invalid value of ispec ===== + IPARMQ = -1 +* + END IF +* +* ==== End of IPARMQ ==== +* + END diff --git a/vendor/gonum.org/v1/gonum/lapack/internal/testdata/dlasqtest/lsame.f b/vendor/gonum.org/v1/gonum/lapack/internal/testdata/dlasqtest/lsame.f new file mode 100644 index 0000000..315304c --- /dev/null +++ b/vendor/gonum.org/v1/gonum/lapack/internal/testdata/dlasqtest/lsame.f @@ -0,0 +1,125 @@ +*> \brief \b LSAME +* +* =========== DOCUMENTATION =========== +* +* Online html documentation available at +* http://www.netlib.org/lapack/explore-html/ +* +* Definition: +* =========== +* +* LOGICAL FUNCTION LSAME( CA, CB ) +* +* .. Scalar Arguments .. +* CHARACTER CA, CB +* .. +* +* +*> \par Purpose: +* ============= +*> +*> \verbatim +*> +*> LSAME returns .TRUE. if CA is the same letter as CB regardless of +*> case. +*> \endverbatim +* +* Arguments: +* ========== +* +*> \param[in] CA +*> \verbatim +*> \endverbatim +*> +*> \param[in] CB +*> \verbatim +*> CA and CB specify the single characters to be compared. +*> \endverbatim +* +* Authors: +* ======== +* +*> \author Univ. of Tennessee +*> \author Univ. of California Berkeley +*> \author Univ. of Colorado Denver +*> \author NAG Ltd. +* +*> \date November 2011 +* +*> \ingroup auxOTHERauxiliary +* +* ===================================================================== + LOGICAL FUNCTION LSAME( CA, CB ) +* +* -- LAPACK auxiliary routine (version 3.4.0) -- +* -- LAPACK is a software package provided by Univ. of Tennessee, -- +* -- Univ. of California Berkeley, Univ. of Colorado Denver and NAG Ltd..-- +* November 2011 +* +* .. Scalar Arguments .. + CHARACTER CA, CB +* .. +* +* ===================================================================== +* +* .. Intrinsic Functions .. + INTRINSIC ICHAR +* .. +* .. Local Scalars .. + INTEGER INTA, INTB, ZCODE +* .. +* .. Executable Statements .. +* +* Test if the characters are equal +* + LSAME = CA.EQ.CB + IF( LSAME ) + $ RETURN +* +* Now test for equivalence if both characters are alphabetic. +* + ZCODE = ICHAR( 'Z' ) +* +* Use 'Z' rather than 'A' so that ASCII can be detected on Prime +* machines, on which ICHAR returns a value with bit 8 set. +* ICHAR('A') on Prime machines returns 193 which is the same as +* ICHAR('A') on an EBCDIC machine. +* + INTA = ICHAR( CA ) + INTB = ICHAR( CB ) +* + IF( ZCODE.EQ.90 .OR. ZCODE.EQ.122 ) THEN +* +* ASCII is assumed - ZCODE is the ASCII code of either lower or +* upper case 'Z'. +* + IF( INTA.GE.97 .AND. INTA.LE.122 ) INTA = INTA - 32 + IF( INTB.GE.97 .AND. INTB.LE.122 ) INTB = INTB - 32 +* + ELSE IF( ZCODE.EQ.233 .OR. ZCODE.EQ.169 ) THEN +* +* EBCDIC is assumed - ZCODE is the EBCDIC code of either lower or +* upper case 'Z'. +* + IF( INTA.GE.129 .AND. INTA.LE.137 .OR. + $ INTA.GE.145 .AND. INTA.LE.153 .OR. + $ INTA.GE.162 .AND. INTA.LE.169 ) INTA = INTA + 64 + IF( INTB.GE.129 .AND. INTB.LE.137 .OR. + $ INTB.GE.145 .AND. INTB.LE.153 .OR. + $ INTB.GE.162 .AND. INTB.LE.169 ) INTB = INTB + 64 +* + ELSE IF( ZCODE.EQ.218 .OR. ZCODE.EQ.250 ) THEN +* +* ASCII is assumed, on Prime machines - ZCODE is the ASCII code +* plus 128 of either lower or upper case 'Z'. +* + IF( INTA.GE.225 .AND. INTA.LE.250 ) INTA = INTA - 32 + IF( INTB.GE.225 .AND. INTB.LE.250 ) INTB = INTB - 32 + END IF + LSAME = INTA.EQ.INTB +* +* RETURN +* +* End of LSAME +* + END diff --git a/vendor/gonum.org/v1/gonum/lapack/internal/testdata/dlasqtest/testdlasq1.f90 b/vendor/gonum.org/v1/gonum/lapack/internal/testdata/dlasqtest/testdlasq1.f90 new file mode 100644 index 0000000..5ee8878 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/lapack/internal/testdata/dlasqtest/testdlasq1.f90 @@ -0,0 +1,626 @@ +program randomsys1 +implicit none +integer, parameter :: nmax=1000 +real(kind=8), dimension(nmax) :: b, x +real(kind=8), dimension(nmax,nmax) :: a +real(kind=8) :: err +integer :: i, info, lda, ldb, nrhs, n,iter +integer, dimension(nmax) :: ipiv + +real(kind=8), dimension(100) :: d +real(kind=8), dimension(99) :: e +real(kind=8), dimension(400) :: work + +d(1:100) = (/1.8334043365537367D+00, & +1.4451749896846686D+00, & +1.0018566447551758D-01, & +-7.2143260438744417D-01, & +-3.7864653015502087D-01, & +-9.0270111568850808D-01, & +1.2204305489831029D+00, & +-9.7628177811136485D-01, & +8.4199233511256721D-01, & +-2.7938817329922050D-01, & +3.6157779599908046D-01, & +-1.8563411313998144D+00, & +-5.7930081140992240D-01, & +7.4080550463379169D-01, & +1.7021409147402005D+00, & +-5.7992035328196923D-01, & +4.0877426434579855D-01, & +-7.1297236049446144D-01, & +-1.2214095798914903D+00, & +2.9037983248746674D-01, & +7.4685018821608473D-01, & +3.0213735481316539D-01, & +-1.5207207136907624D-01, & +-2.1332671668411556D+00, & +6.8744661834930676D-01, & +-2.0946670404018297D-01, & +-1.5221059713957628D+00, & +1.1117190383859539D+00, & +-6.1175948159744020D-01, & +-4.4149212620857964D-01, & +-5.5702632609947533D-01, & +1.4071858950692646D+00, & +-3.2329881667362437D-01, & +-3.1958092104323499D-01, & +9.0934520529412111D-01, & +9.7881421364746712D-01, & +-5.1202970940327841D-01, & +1.5040024724520102D+00, & +-7.1993831181468571D-01, & +-7.1819661000094503D-01, & +-1.3481185445933910D+00, & +-1.4984426192966893D+00, & +1.1356626654278745D+00, & +1.6427417967661164D+00, & +-1.4184643787388000D+00, & +2.9811560271518989D-01, & +7.8630022575860559D-01, & +-1.8262830018047089D+00, & +6.3058255632564841D-01, & +-2.0692752940382309D-02, & +-7.2726648905906033D-01, & +-1.0461446937034022D+00, & +1.2530345094987356D+00, & +-2.3583665341168443D+00, & +1.9177654334479410D-01, & +-1.3563410975095058D+00, & +-1.0669710425337906D+00, & +1.4840245472507219D+00, & +-6.9185935518981789D-01, & +1.6813910559942205D-01, & +-7.1255209442204559D-01, & +-1.0112797453604008D+00, & +2.8591746998403011D-01, & +-1.9403039239509563D+00, & +-8.1434141084858885D-02, & +1.3873918713367210D+00, & +-8.8212258376548647D-01, & +-1.2253510598547583D+00, & +-3.8677386127356073D-01, & +-1.0262656526479850D+00, & +2.9468734022014376D-01, & +2.3442965677966704D-01, & +1.2219251054024911D+00, & +2.6066505150099868D+00, & +-7.8543147636303856D-01, & +-9.8126277325503253D-01, & +1.1722358680271947D+00, & +-8.1477749181289072D-01, & +1.3437777060446568D-01, & +3.4626341297821356D-01, & +-4.5672026157532375D-01, & +3.0496975959999184D-01, & +3.4636683737604146D-01, & +1.5114807806635011D-01, & +-1.1376177393945328D+00, & +9.3419670621891793D-01, & +7.9186416310239138D-01, & +6.7230421440462595D-01, & +-2.3236847424852280D-01, & +-1.0927137499922757D+00, & +9.8562766620822340D-01, & +-1.1382935431007701D-01, & +-9.2072237463768225D-01, & +6.1142850054965170D-01, & +8.2752057022739134D-03, & +6.7197122515126417D-01, & +-1.1007816668204429D+00, & +-1.2196829073430047D+00, & +-6.1428585523321222D-01, & +6.4599803465517280D-01/) +e(1:99) = (/-9.6393084631802151D-01, & +2.5532567563781607D+00, & +8.2492664240014357D-01, & +8.2628261757058474D-01, & +7.3021658879350859D-01, & +3.4014431785419519D-02, & +3.2121571974900542D-01, & +2.5987166374572213D-02, & +-3.1150206355945814D-01, & +1.3429094629249927D+00, & +8.6246434952180806D-01, & +-8.3756967113851388D-01, & +9.5277237959592009D-01, & +1.1717152179539618D+00, & +2.5980977671460709D-01, & +-6.4468162556415265D-01, & +-1.3751364204170078D+00, & +2.9677172586579936D-01, & +-3.7071376979215720D-01, & +8.2912690407496381D-01, & +-8.6820437618589197D-01, & +5.2500961173269689D-01, & +1.0657701704030644D+00, & +-4.6621502244820201D-03, & +-1.9013997092621748D-01, & +1.5098985741543924D-01, & +1.0102557493909003D+00, & +8.8830298507891103D-01, & +2.0464938169302065D+00, & +4.7910192662606277D-01, & +1.4155288808120892D+00, & +-5.8169388172737679D-01, & +-9.8007278321065916D-01, & +2.4369633027015425D-01, & +1.6173163491335715D-01, & +6.6887624704464499D-01, & +-1.6500999383869115D+00, & +1.4380895281962367D+00, & +4.7508565250807777D-01, & +-3.1332991280327299D-01, & +3.1402552392574451D-01, & +5.6246373170551534D-01, & +2.5944662334710866D-01, & +4.8101648688789655D-01, & +1.7823376751423265D+00, & +3.0160656491545923D-01, & +-8.8915960863742050D-01, & +-4.4783548340444157D-01, & +8.9985836172311440D-01, & +-1.5626460660617920D+00, & +8.9972644535054036D-01, & +2.4456452268563592D-01, & +-3.1377944726557985D+00, & +1.6874136691232020D+00, & +2.4791290942030142D-01, & +1.7055713617986679D+00, & +1.7027580566127303D+00, & +-5.2969836953828042D-01, & +-8.6858804294195124D-01, & +7.6588136514601834D-01, & +8.6161822555855139D-01, & +6.5387844189250555D-01, & +7.0164941351276944D-01, & +4.1171318512873312D-01, & +7.6075070364872455D-01, & +8.5708035578209718D-02, & +-4.3558500874018535D-01, & +-6.2302104134015979D-01, & +8.4912051051824700D-01, & +-1.7120108380813925D-01, & +-9.7880552224113848D-01, & +1.1904436348486702D+00, & +7.0273864977367972D-01, & +-1.0213785672492079D+00, & +4.8392839864322634D-02, & +1.2611184618297511D-01, & +5.3330169134056482D-01, & +1.8070298106837654D+00, & +-2.8022831541922144D-01, & +8.0235047640662738D-01, & +-1.2615220404695868D+00, & +1.1878769364434660D+00, & +-2.1059219864297674D-01, & +3.2897539618854971D-01, & +-5.8928028913554642D-01, & +1.9164347352074701D-02, & +2.8035162764822374D-01, & +-9.6622429734784299D-02, & +3.4216241143907045D-01, & +-2.2358052317750254D+00, & +6.6284070879481805D-01, & +7.4316074303777269D-01, & +1.0280848437626724D+00, & +-2.0939898252763922D-01, & +-1.0268515265064981D+00, & +-1.2648527910628871D-01, & +4.8663846308033204D-01, & +1.2270171407392749D+00, & +-1.6189022502021406D+00/) +work(1:400) = (/6.0466028797961957D-01, & +9.4050908804501243D-01, & +6.6456005321849043D-01, & +4.3771418718698019D-01, & +4.2463749707126569D-01, & +6.8682307286710942D-01, & +6.5637019217476222D-02, & +1.5651925473279124D-01, & +9.6969518914484562D-02, & +3.0091186058528707D-01, & +5.1521262850206540D-01, & +8.1363996099009683D-01, & +2.1426387258237492D-01, & +3.8065718929968601D-01, & +3.1805817433032985D-01, & +4.6888984490242319D-01, & +2.8303415118044517D-01, & +2.9310185733681576D-01, & +6.7908467592021626D-01, & +2.1855305259276428D-01, & +2.0318687664732285D-01, & +3.6087141685690599D-01, & +5.7067327607102258D-01, & +8.6249143744788637D-01, & +2.9311424455385804D-01, & +2.9708256355629153D-01, & +7.5257303555161192D-01, & +2.0658266191369859D-01, & +8.6533501300156102D-01, & +6.9671916574663473D-01, & +5.2382030605000085D-01, & +2.8303083325889995D-02, & +1.5832827774512764D-01, & +6.0725343954551536D-01, & +9.7524161886057836D-01, & +7.9453623373871976D-02, & +5.9480859768306260D-01, & +5.9120651313875290D-02, & +6.9202458735311201D-01, & +3.0152268100655999D-01, & +1.7326623818270528D-01, & +5.4109985500873525D-01, & +5.4415557300088502D-01, & +2.7850762181610883D-01, & +4.2315220157182809D-01, & +5.3058571535070520D-01, & +2.5354050051506050D-01, & +2.8208099496492467D-01, & +7.8860491501934493D-01, & +3.6180548048031691D-01, & +8.8054312274161706D-01, & +2.9711226063977081D-01, & +8.9436172933045366D-01, & +9.7454618399116566D-02, & +9.7691686858626237D-01, & +7.4290998949843021D-02, & +2.2228941700678773D-01, & +6.8107831239257088D-01, & +2.4151508854715265D-01, & +3.1152244431052484D-01, & +9.3284642851843402D-01, & +7.4184895999182299D-01, & +8.0105504265266125D-01, & +7.3023147729480831D-01, & +1.8292491645390843D-01, & +4.2835708180680782D-01, & +8.9699195756187267D-01, & +6.8265348801324377D-01, & +9.7892935557668759D-01, & +9.2221225892172687D-01, & +9.0837275353887081D-02, & +4.9314199770488037D-01, & +9.2698680357441421D-01, & +9.5494544041678175D-01, & +3.4795396362822290D-01, & +6.9083883150567893D-01, & +7.1090719529999513D-01, & +5.6377959581526438D-01, & +6.4948946059294044D-01, & +5.5176504901277490D-01, & +7.5582350749159777D-01, & +4.0380328579570035D-01, & +1.3065111702897217D-01, & +9.8596472934024670D-01, & +8.9634174539621614D-01, & +3.2208397052088172D-01, & +7.2114776519267410D-01, & +6.4453978250932942D-01, & +8.5520507541911234D-02, & +6.6957529769977453D-01, & +6.2272831736370449D-01, & +3.6969284363982191D-01, & +2.3682254680548520D-01, & +5.3528189063440612D-01, & +1.8724610140105305D-01, & +2.3884070280531861D-01, & +6.2809817121836331D-01, & +1.2675292937260130D-01, & +2.8133029380535923D-01, & +4.1032284435628247D-01, & +4.3491247389145765D-01, & +6.2509502830053043D-01, & +5.5014692050772329D-01, & +6.2360882645293014D-01, & +7.2918072673429812D-01, & +8.3053391899480622D-01, & +5.1381551612136129D-04, & +7.3606860149543141D-01, & +3.9998376285699544D-01, & +4.9786811334270198D-01, & +6.0397810228292748D-01, & +4.0961827788499267D-01, & +2.9671281274886468D-02, & +1.9038945142366389D-03, & +2.8430411748625642D-03, & +9.1582131461295702D-01, & +5.8983418500491935D-01, & +5.5939244907101404D-01, & +8.1540517093336062D-01, & +8.7801175865240000D-01, & +4.5844247857565062D-01, & +6.0016559532333080D-01, & +2.6265150609689439D-02, & +8.4583278724804167D-01, & +2.4969320116349378D-01, & +6.4178429079958299D-01, & +2.4746660783662855D-01, & +1.7365584472313275D-01, & +5.9262375321244554D-01, & +8.1439455096702107D-01, & +6.9383813651720949D-01, & +3.0322547833006870D-02, & +5.3921010589094598D-01, & +9.7567481498731645D-01, & +7.5076305647959851D-01, & +2.9400631279501488D-01, & +7.5316127773675856D-01, & +1.5096404497960700D-01, & +3.5576726540923664D-01, & +8.3193085296981628D-01, & +2.3183004193767690D-01, & +6.2783460500002275D-01, & +4.9839430127597562D-01, & +8.9836089260366833D-02, & +2.5193959794895041D-02, & +3.9221618315402479D-01, & +5.8938308640079917D-01, & +9.2961163544903025D-01, & +5.7208680144308399D-01, & +5.8857634514348212D-01, & +4.1176268834501623D-01, & +5.5258038981424384D-01, & +4.9160739613162047D-01, & +9.5795391353751358D-01, & +7.9720854091080284D-01, & +1.0738111282075208D-01, & +7.8303497339600214D-01, & +3.9325099922888668D-01, & +1.3041384617379179D-01, & +1.9003276633920804D-01, & +7.3982578101583363D-01, & +6.5404140923127974D-01, & +9.8383788985732593D-02, & +5.2038028571222783D-01, & +9.9729663719935122D-02, & +1.5184340208190175D-01, & +7.6190262303755044D-02, & +3.1520808532012451D-01, & +1.5965092146489504D-01, & +1.3780406161952607D-01, & +3.2261068286779754D-01, & +5.3907451703947940D-01, & +5.7085162734549566D-01, & +5.1278175811108151D-01, & +6.8417513009745512D-01, & +6.5304020513536076D-01, & +5.2449975954986505D-01, & +6.5427013442414605D-01, & +7.1636837490167116D-01, & +6.3664421403817983D-01, & +1.2825909106361078D-02, & +3.0682195787138565D-02, & +9.8030874806304999D-02, & +3.6911170916434483D-01, & +8.2645412563474197D-01, & +3.4768170859156955D-01, & +3.4431501772636058D-01, & +2.5299982364784412D-01, & +2.1647114665497036D-01, & +5.5500213563479417D-01, & +4.0207084527183062D-01, & +5.0649706367641834D-01, & +1.6867966833433606D-01, & +3.3136826030698385D-01, & +8.2792809615055885D-01, & +7.0028787314581509D-01, & +5.7926259664335768D-02, & +9.9915949022033324D-01, & +4.1154036322047599D-01, & +1.1167463676480495D-01, & +7.8075408455849260D-01, & +9.2117624440742188D-02, & +5.3494624494407637D-02, & +7.1469581589162956D-01, & +2.5076227542918023D-01, & +8.4863292090315690D-01, & +9.7388187407067284D-01, & +2.1256094905031958D-01, & +2.1533783325605065D-02, & +9.4519476038882588D-01, & +9.2970155499924934D-02, & +6.4583337452397671D-01, & +3.1188554282705405D-01, & +4.4846436394045647D-01, & +4.8723924858036949D-01, & +8.2479676511350006D-02, & +6.7182910623463954D-01, & +4.0018828942364343D-01, & +9.0027514726431157D-01, & +9.4988320610125321D-01, & +3.1933126760711733D-01, & +4.9938549375241320D-01, & +4.0043231714181288D-01, & +1.9808670325451940D-02, & +6.4503886601944815D-01, & +4.2868843006993296D-01, & +3.3959675138730994D-01, & +8.8744750085050050D-01, & +2.3632747430436052D-01, & +7.6500821493327975D-01, & +3.5754647436084384D-02, & +7.2757725604152290D-01, & +6.2583662695812525D-01, & +5.1308750608785669D-01, & +7.2448356792351315D-02, & +7.2422905845916841D-01, & +8.7984484630570914D-01, & +9.7776347735771851D-01, & +8.4750026226468134D-01, & +8.3219793814993315D-01, & +2.4784452318699535D-01, & +9.1339906293647088D-01, & +7.5037210134653420D-02, & +8.3510380115435290D-01, & +6.2933169164530067D-01, & +7.5174057889673473D-01, & +6.3200343378879975D-01, & +9.6934213238731665D-02, & +1.4827369494876504D-02, & +5.8383474186253115D-01, & +6.8756195202154743D-02, & +9.9827381100849455D-01, & +6.4918841659842363D-01, & +9.8546557863324791D-01, & +8.3480576021921249D-01, & +3.3205608571906026D-01, & +6.6139318058334262D-01, & +9.5602062659660969D-01, & +3.1051027622482125D-01, & +1.8439069400202679D-01, & +9.6709434137177297D-01, & +8.3324181552815457D-01, & +3.0954845052732810D-01, & +8.0587176753764456D-01, & +4.1732584219038238D-01, & +7.1853044935277477D-01, & +4.0673677545039083D-01, & +8.9580326774414576D-01, & +9.5817636260259365D-01, & +1.8713221139656417D-02, & +7.9167230908208319D-01, & +4.2355315388584103D-01, & +1.5181277223073395D-02, & +4.3269824007906393D-01, & +9.0477623706573340D-01, & +8.5570441457488644D-01, & +4.2921642176334200D-02, & +6.5903053300775438D-01, & +3.4785904313005395D-01, & +5.0348679004869112D-01, & +8.3994742117055976D-01, & +2.3109568410543832D-02, & +1.2436351859954159D-01, & +2.6117561918821841D-01, & +8.3494750649349414D-01, & +3.1480479595597533D-01, & +7.6812064740880894D-03, & +8.9975012571752733D-01, & +3.7026753645051064D-01, & +1.0019940926941497D-01, & +6.4320402657020315D-01, & +7.6988908998308336D-01, & +7.9112533566198451D-01, & +2.6238190747072776D-01, & +3.4686388037925503D-01, & +2.1465371537694145D-01, & +8.2209289717657175D-01, & +3.5113429966521320D-01, & +5.9919425250588099D-01, & +5.7835125693111211D-01, & +4.1358098797631293D-01, & +1.1985050890286310D-01, & +9.1161370679159903D-01, & +5.3785580105748208D-02, & +2.2891758676059876D-01, & +3.2417396306138829D-01, & +3.5076512764716117D-01, & +3.4928874777426255D-01, & +3.0380212985436572D-01, & +9.6874615996581170D-01, & +6.7152655046083776D-01, & +2.0794312837315651D-01, & +9.6313940120247044D-01, & +3.0220237504213365D-01, & +8.0794108095480799D-01, & +1.3408416275024179D-01, & +9.4776028919455635D-01, & +6.4086482116825383D-01, & +9.5325875425035178D-01, & +8.0987422593395209D-01, & +1.8159084675756379D-01, & +9.4275737153737327D-01, & +8.3124103554376771D-01, & +4.9468043578205978D-01, & +8.5531034647693982D-01, & +7.1074391181909824D-01, & +2.7349475629159786D-01, & +4.0763287189198161D-01, & +9.0976128251911847D-01, & +9.4439713870030451D-01, & +4.9863245185560190D-01, & +2.8863831012730923D-01, & +9.7589525649963815D-01, & +4.5258447627808124D-01, & +4.4990698677957075D-02, & +3.1536198151820755D-01, & +9.5190614812037189D-01, & +7.5156308247423609D-01, & +5.3579099898961424D-01, & +6.6971458883510748D-01, & +8.6517499748328641D-01, & +4.5888445390388938D-01, & +5.7855090249582031D-01, & +4.8152982184966137D-01, & +5.5061576198318274D-01, & +9.5062324380815433D-01, & +5.0986542047295536D-01, & +7.4251472966182985D-01, & +4.9079401441435533D-01, & +6.6151414870689360D-02, & +2.6249066264989940D-01, & +9.2546794407799982D-01, & +3.7148665165822231D-01, & +4.0941940003107308D-01, & +4.1575196973399631D-01, & +9.7261599736539445D-02, & +9.0162762447969347D-01, & +4.4446597981328932D-03, & +2.7392454335102678D-01, & +1.0930666111680035D-01, & +8.5544841289295426D-01, & +2.5705535663902546D-01, & +9.8913209203202213D-01, & +9.2641142236812712D-01, & +1.7094603208839290D-01, & +3.0388712489325242D-01, & +5.3345144978115477D-01, & +1.7648961347647024D-01, & +8.1359077477652830D-01, & +7.0513712380125892D-01, & +2.5720755742139950D-01, & +2.5036892046498466D-01, & +3.3509436689927874D-01, & +7.5124063162526056D-01, & +4.8797826077860845D-03, & +8.4099320643626019D-01, & +2.2957358869665739D-01, & +1.3285547727582237D-02, & +9.4993740716879371D-01, & +8.9937146465701423D-01, & +9.6262420114388625D-01, & +4.3000361954927006D-02, & +7.1266261216467264D-01, & +5.1094098258212241D-02, & +4.0753210485857738D-01, & +4.7569737399615403D-01, & +3.4746838606940983D-01, & +4.0719938711096422D-02, & +5.9756620514440806D-01, & +2.6012467360309705D-01, & +8.3285585557738717D-01, & +9.6049750529821787D-01, & +9.3670756890653750D-01, & +2.2932023844733959D-01, & +7.2031310018914962D-01, & +7.5648232426876405D-01, & +4.5015392507594826D-01, & +3.3897738839543617D-01, & +4.7249205225111501D-01, & +9.8599436000817042D-01/) +n = 100 +info = 0 + +open(unit = 4, STATUS='REPLACE', file = "gen4tests.txt") +open(unit = 3, STATUS='REPLACE', file = "gen3tests.txt") +open(unit = 5, STATUS='REPLACE', file = "gen5tests.txt") + +call dlasq1(n, d, e, work, info) + +close(5) +close(3) +close(4) + +end \ No newline at end of file diff --git a/vendor/gonum.org/v1/gonum/lapack/internal/testdata/dlasqtest/testdlasq2.f90 b/vendor/gonum.org/v1/gonum/lapack/internal/testdata/dlasqtest/testdlasq2.f90 new file mode 100644 index 0000000..c908f7d --- /dev/null +++ b/vendor/gonum.org/v1/gonum/lapack/internal/testdata/dlasqtest/testdlasq2.f90 @@ -0,0 +1,171 @@ +! $CLASSHG/codes/lapack/random/randomsys1.f90 + +program randomsys1 +implicit none +integer, parameter :: nmax=1000 +real(kind=8), dimension(nmax) :: b, x +real(kind=8), dimension(nmax,nmax) :: a +real(kind=8) :: err +integer :: i, info, lda, ldb, nrhs, n,iter +integer, dimension(nmax) :: ipiv + +real(kind=8), dimension(84) :: z + +z(1:84) = (/2.3962280929192509D-01, & +6.4346010490580374D-01, & +5.4886677265274941D-01, & +1.8435046456575191D-01, & +4.2690013698472401D-01, & +6.2760930131666248D-01, & +8.7243363936287288D-02, & +5.3710399529030350D-01, & +8.4474604576675638D-01, & +4.0947669295022632D-01, & +2.0315141244999760D-01, & +8.2714283129979482D-01, & +9.6582843029739029D-01, & +3.7773539690481128D-01, & +6.4713340225873406D-01, & +6.7774351756798612D-01, & +4.4568905572928591D-01, & +1.7208265726837521D-01, & +3.5202074329011412D-01, & +3.2913965083413110D-01, & +7.4682622623737438D-01, & +2.7151044969605542D-01, & +5.6129178633962351D-01, & +1.3244294692867761D-01, & +7.9837660640147112D-01, & +2.0595995860196409D-01, & +2.6555102564257749D-02, & +1.9623902644848701D-01, & +7.8452468963715605D-03, & +8.2177375597112101D-01, & +9.4444982187044924D-01, & +4.6799576958310329D-01, & +1.1359180530395520D-01, & +1.6049482525711320D-01, & +8.4523811016138373D-01, & +8.5370708147160490D-01, & +3.0139325968162661D-01, & +2.5117154067021252D-01, & +2.0086476380364379D-01, & +2.6078975734443288D-01, & +2.5710509100784901D-01, & +6.2861740244633146D-01, & +4.7397018497236110D-01, & +8.6989475348440182D-01, & +6.4860795895959611D-01, & +8.8943219043019872D-01, & +7.9524758401855578D-01, & +8.8016617212154102D-01, & +2.1472721943810300D-01, & +9.5183566464760194D-01, & +7.1911522894148772D-01, & +1.1065249711733080D-01, & +9.5745872307341484D-01, & +6.4694742306793951D-01, & +5.8182607574435841D-01, & +1.2922028370755401D-02, & +2.7617229134380777D-01, & +8.0045359255855852D-01, & +3.2789738555900377D-01, & +5.7940374386642890D-01, & +2.3961554431041750D-02, & +6.6990484757537938D-01, & +1.8144929371280860D-01, & +2.8661849362222419D-01, & +6.5845572125062690D-01, & +5.8896512277754309D-01, & +6.3697665190607378D-02, & +1.3281935295985961D-01, & +7.4166314172628711D-01, & +5.0033182787066932D-01, & +3.1440772963953763D-02, & +9.9073931564031925D-01, & +3.4236965949145941D-03, & +6.9929220728508257D-01, & +7.5905196170645428D-01, & +2.5421825141163590D-01, & +3.3146571845550887D-01, & +5.7286596840196446D-01, & +9.9158547964144228D-01, & +4.8967497301052382D-01, & +5.6113697207961410D-01, & +6.0641422150435165D-01, & +8.5675901647669139D-02, & +2.5817319326781851D-01/) +n = 21 +info = 0 + +open(unit = 2,STATUS='REPLACE', file = "gen2tests.txt") +open(unit = 4, STATUS='REPLACE', file = "gen4tests.txt") +open(unit = 3,STATUS='REPLACE', file = "gen3tests.txt") +open(unit = 5,STATUS='REPLACE', file = "gen5tests.txt") + +write(2,*) "{" +write(2,'(9999(g0))',advance="no") "z: []float64{" +do i = 1, 4*n +write (2,'(99999(e24.16,a))',advance="no") z(i), "," +end do +write (2,*) "}," +write(2,*) "n: " ,n,"," + + +call dlasq2(n, z, info) + +write(2,'(9999(g0))', advance="no") "zOut: []float64{" +do i = 1, 4*n +write (2,'(99999(e24.16,a))', advance="no") z(i), "," +end do +write (2,*) "}," +write(2,*) "info:", info, "," +write(2,*) "}," + +print *, "Done fixed dlasq2" + +close(5) +close(3) +close(4) +close(2) + +! For random tests +n = 21 + +open(unit = 2, STATUS='REPLACE', file = "gen2tests.txt") +open(unit = 4, STATUS='REPLACE', file = "gen4tests.txt") +open(unit = 3, STATUS='REPLACE', file = "gen3tests.txt") +open(unit = 5, STATUS='REPLACE', file = "gen5tests.txt") + +do iter = 1, 2 + +call random_number(z(1:4*n)) + +write(2,*) "{" +write(2,'(9999(g0))', advance="no") "z: []float64{" +do i = 1, 4*n +write (2,'(99999(e24.16,a))', advance="no") z(i), "," +end do +write (2,*) "}," +write(2,*) "n: " ,n,"," + +call dlasq2(n, z, info) + +write(2,'(9999(g0))', advance="no") "zOut: []float64{" +do i = 1, 4*n + write (2,'(99999(e24.16,a))', advance="no") z(i), "," +end do +write (2,*) "}," +write(2,*) "info:", info, "," +write(2,*) "}," + +print *, "Done with dlasq2" + +end do + +close(5) +close(3) +close(4) +close(2) + +end program randomsys1 \ No newline at end of file diff --git a/vendor/gonum.org/v1/gonum/lapack/internal/testdata/dlasqtest/testdlasq3.f90 b/vendor/gonum.org/v1/gonum/lapack/internal/testdata/dlasqtest/testdlasq3.f90 new file mode 100644 index 0000000..8cc5cf0 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/lapack/internal/testdata/dlasqtest/testdlasq3.f90 @@ -0,0 +1,132 @@ +program testdlasq3 +implicit none + +! Dlasq3 tests +integer :: i0, n0, pp,ttype, nFail,iter,nDiv,n +real(kind=8), dimension(84) :: z +real(kind=8) :: dmin,desig,qmax,dmin1,dmin2,dn,dn1,dn2,g,tau,sigma +logical :: ieee + +n = 84 + +ieee = .True. + +z(1:84) = (/1.9143652461808209D+00, & +1.5643842977038900D+00, & +2.4933891621438989D-01, & +3.4998094847693051D-01, & +1.3159965131315450D+00, & +1.3638621124906269D+00, & +9.8984666119707590D-02, & +2.0147331685530781D-01, & +6.0239739795872871D-01, & +6.4655447927417942D-01, & +2.2100334106387809D-03, & +5.4827584804256833D-02, & +9.8618572336789667D-02, & +2.4281908107454921D-02, & +4.7563214844548191D-01, & +7.6546697639973529D-02, & +2.5887481436771148D-01, & +6.1277840695087704D-01, & +1.0786113766900041D-01, & +1.2172855586231640D-01, & +6.4428964922552456D-01, & +2.2938358048981550D-01, & +6.2032304866397048D-01, & +5.2276720640470942D-01, & +3.6956606786075852D-01, & +7.6452331847458654D-01, & +5.3788380542522651D-01, & +2.2536579805014259D-01, & +3.5625331812646233D-01, & +8.8204867223354833D-01, & +2.2221324964361449D-02, & +1.2088451318140349D-02, & +1.2750943030216850D+00, & +6.5487468521633574D-01, & +1.6473243548212180D-01, & +6.4244094276971109D-01, & +1.0075305765438660D+00, & +3.2695517365467008D-01, & +3.4538816017831181D-01, & +8.4530783837131718D-01, & +2.6793917191534039D-01, & +4.1167148387782809D-01, & +7.3286777366837230D-01, & +2.0165584821582411D-01, & +8.3608281383074101D-01, & +9.7375794521953263D-01, & +4.8136607095928219D-01, & +5.9519264227958080D-01, & +6.4953705136764595D-01, & +6.7618762481481709D-01, & +2.3254758802226480D-01, & +4.5471549751211121D-01, & +1.9936248028938069D-01, & +3.3218193673422552D-01, & +3.7823189169112570D-01, & +9.9728131577419957D-02, & +9.8304494035037460D-01, & +7.5610809968448423D-01, & +4.4297338640403672D-01, & +6.0516873235701607D-01, & +1.1732795506024030D+00, & +7.1957244803166864D-01, & +5.0355240691445868D-01, & +8.9668048897477137D-01, & +3.0589803950585209D-01, & +6.5888323539286620D-01, & +3.0146344334154529D-01, & +1.5056721102744461D-01, & +1.2894222375675779D+00, & +6.1246453109936005D-01, & +7.5833643057994404D-01, & +9.7842114980976291D-01, & +4.9778147794615712D-01, & +9.9938135774918690D-01, & +2.8414688478625982D-01, & +2.5673655077691432D-01, & +9.2575397942057647D-01, & +5.5092683856146663D-01, & +5.2313556054509897D-05, & +6.5897402564536967D-01, & +2.1178692213810329D-05, & +7.3492248268320243D-05, & +0.0000000000000000D+00, & +0.0000000000000000D+00/) +i0 = 1 +n0 = 21 +pp = 0 +dmin = -0.0000000000000000D+00 +sigma = 0.0000000000000000D+00 +desig = 0.0000000000000000D+00 +qmax = 2.1637041623952107D+00 +nFail = 0 +iter = 2 +nDiv = 40 +ttype = 0 +dmin1 = 0.0000000000000000D+00 +dmin2 = 0.0000000000000000D+00 +dn = 0.0000000000000000D+00 +dn1 = 0.0000000000000000D+00 +dn2 = 0.0000000000000000D+00 +g = 0.0000000000000000D+00 +tau = 0.0000000000000000D+00 + +print *, "Starting Dlasq3" +open(unit = 4, file = "gen4tests.txt") +open(unit = 5, file = "gen5tests.txt") +call Dlasq3(i0, n0, z, pp, dmin, sigma, desig, qmax, nFail, iter, nDiv, ieee, ttype, dmin1, dmin2, dn, dn1, dn2, g, tau) +close(4) +close(5) + +print *, "Done calling dlasq3" +print *, Z(1:n) + +print *, I0, N0, PP +print *, DMIN, DESIG, SIGMA, QMAX +print *, NFAIL, ITER, NDIV, TTYPE +print *, DMIN1, DMIN2, DN, DN1, DN2, G, TAU + +end \ No newline at end of file diff --git a/vendor/gonum.org/v1/gonum/lapack/internal/testdata/dlasqtest/testdlasq4.f90 b/vendor/gonum.org/v1/gonum/lapack/internal/testdata/dlasqtest/testdlasq4.f90 new file mode 100644 index 0000000..3c84983 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/lapack/internal/testdata/dlasqtest/testdlasq4.f90 @@ -0,0 +1,435 @@ +program testdlasq4 +implicit none + +! Dlasq4 tests +integer :: i0, n0, pp, ttype, iter, nDiv, n, n0in +real(kind=8), dimension(401) :: z +real(kind=8) :: dmin, desig, qmax, dmin1, dmin2, dn, dn1, dn2, g, tau, sigma +logical :: ieee + +z(1:401) = (/2.9266499599402414D+02, & +2.9276699640004830D+02, & +2.9210200041216615D+02, & +2.9171973344922628D+02, & +2.9254120660184350D+02, & +2.9246923723528141D+02, & +2.8739845025761718D+02, & +2.8713781898215319D+02, & +2.9216230331620721D+02, & +2.9216609319427272D+02, & +2.9037912623973870D+02, & +2.9019341222762625D+02, & +2.9184732870824223D+02, & +2.9211597271231784D+02, & +2.9133173969911888D+02, & +2.9116703928303986D+02, & +2.9158395177813247D+02, & +2.9142053084921997D+02, & +2.8936184155474774D+02, & +2.9014388604362898D+02, & +2.9216722380092682D+02, & +2.9216917065163557D+02, & +2.9033857172870131D+02, & +2.9017428768626024D+02, & +2.9187084534700494D+02, & +2.9185392666684271D+02, & +2.8851014988393416D+02, & +2.8849290783118727D+02, & +2.9182506564169046D+02, & +2.9184679532940385D+02, & +2.9022222656964715D+02, & +2.9051820223150668D+02, & +2.9219746119018231D+02, & +2.9219997046052623D+02, & +2.9076912988009974D+02, & +2.9110108536012547D+02, & +2.9226281758828725D+02, & +2.9246883105892533D+02, & +2.9221612201279271D+02, & +2.9059157967819368D+02, & +2.9112833071623868D+02, & +2.9210680427275844D+02, & +2.9199887004058536D+02, & +2.9151030826460186D+02, & +2.9154564759046622D+02, & +2.9212462005167004D+02, & +2.9212108612522559D+02, & +2.9225720737270348D+02, & +2.9226471402909704D+02, & +2.9186949422820902D+02, & +2.9179442772569342D+02, & +2.9025839700680774D+02, & +2.9028281327393216D+02, & +2.9168280284451140D+02, & +2.9168036127921908D+02, & +2.9115346030764749D+02, & +2.9115401102000044D+02, & +2.9212282665919003D+02, & +2.9212277159409672D+02, & +2.9212554020600300D+02, & +2.9212559651286460D+02, & +2.9050907549403615D+02, & +2.9050344542208029D+02, & +2.9211004769628386D+02, & +2.9211127836880860D+02, & +2.9159257985266015D+02, & +2.9158027318883302D+02, & +2.9042716308833894D+02, & +2.9043622253245718D+02, & +2.9134221002346220D+02, & +2.9134130414047053D+02, & +2.9193179671375327D+02, & +2.9193426987096007D+02, & +2.9145366956406895D+02, & +2.9145119646828226D+02, & +2.9144876630819033D+02, & +2.9145122608379586D+02, & +2.9058395601500280D+02, & +2.9055935887314877D+02, & +2.9123982459393329D+02, & +2.9125037059550806D+02, & +2.9111624624209884D+02, & +2.9110570030194424D+02, & +2.9250004994137356D+02, & +2.9254994096967755D+02, & +2.9166071282413282D+02, & +2.9116180260251298D+02, & +2.8862845048087325D+02, & +2.8925662460652461D+02, & +2.9012609983518610D+02, & +2.9010672249354360D+02, & +2.9142999275190090D+02, & +2.9150806548221971D+02, & +2.9078477842534176D+02, & +2.8840517363552317D+02, & +2.8946267131307206D+02, & +2.9189614533792309D+02, & +2.9232617409967031D+02, & +2.9223702224333306D+02, & +2.9149978725880743D+02, & +2.9168777367421643D+02, & +2.9213296087125275D+02, & +2.9211416223585388D+02, & +2.9151431726536367D+02, & +2.9159900781717892D+02, & +2.9215362494632291D+02, & +2.9214515589728342D+02, & +2.9229900293632022D+02, & +2.9231644811476662D+02, & +2.9119079594655176D+02, & +2.9016344223508082D+02, & +2.9040052826233938D+02, & +2.9146756071890462D+02, & +2.9216592215429023D+02, & +2.9212317137116514D+02, & +2.9220217331561315D+02, & +2.9227234439098311D+02, & +2.9210931773273273D+02, & +2.9139146663504778D+02, & +2.9047426145779627D+02, & +2.9113243832972501D+02, & +2.9163901100086372D+02, & +2.9155399887833846D+02, & +2.9145039523229758D+02, & +2.9151950918932886D+02, & +2.9116352441156198D+02, & +2.9094410515950784D+02, & +2.9056083843332891D+02, & +2.9097140423254604D+02, & +2.9185385310744653D+02, & +2.9181279658894493D+02, & +2.9031697123096876D+02, & +2.9033298229128292D+02, & +2.9212507489852788D+02, & +2.9212491479406674D+02, & +2.9123248207180859D+02, & +2.9123278004625678D+02, & +2.9210890357464513D+02, & +2.9210887378334229D+02, & +2.9240010366769212D+02, & +2.9240021314868147D+02, & +2.9212046105624000D+02, & +2.9212035158139264D+02, & +2.9215242607939217D+02, & +2.9215256473001551D+02, & +2.9086373488934720D+02, & +2.9084987044121846D+02, & +2.9162085042968522D+02, & +2.9163097873649599D+02, & +2.9113228736441005D+02, & +2.9112215911901944D+02, & +2.9210393504686402D+02, & +2.9211255232953414D+02, & +2.9191445519642213D+02, & +2.9182828243114074D+02, & +2.9127013482394887D+02, & +2.9129823908392501D+02, & +2.9220880435635905D+02, & +2.9220599393650343D+02, & +2.9144676491963560D+02, & +2.9145286023012017D+02, & +2.9148058174349006D+02, & +2.9147448649442555D+02, & +2.9186619188590822D+02, & +2.9187731897876392D+02, & +2.9151148084374620D+02, & +2.9150035381231066D+02, & +2.9224292392669071D+02, & +2.9224832614828313D+02, & +2.9140236136264468D+02, & +2.9134833920814032D+02, & +2.9241531971951883D+02, & +2.9247972954055604D+02, & +2.9164613863843044D+02, & +2.8920404894780614D+02, & +2.8913485112658549D+02, & +2.9142701775363003D+02, & +2.9213449277295280D+02, & +2.9191925854858397D+02, & +2.9217029610406047D+02, & +2.9224915292105180D+02, & +2.9220325626287905D+02, & +2.9212439945202976D+02, & +2.9057154227612733D+02, & +2.9093384291672965D+02, & +2.9183595231931184D+02, & +2.9179972231667176D+02, & +2.9133916987034183D+02, & +2.9135453536038978D+02, & +2.9047085843212886D+02, & +2.9031720414585078D+02, & +2.9113106705127024D+02, & +2.9119455617800747D+02, & +2.9230127009862616D+02, & +2.9229492119209442D+02, & +2.9098170102485551D+02, & +2.9110028345622749D+02, & +2.9215236246918653D+02, & +2.9215215113995436D+02, & +2.9124650182241129D+02, & +2.9124684419934567D+02, & +2.9216244260160767D+02, & +2.9216240837005626D+02, & +2.9314769094184447D+02, & +2.9314772207133655D+02, & +2.9212215646682506D+02, & +2.9212184517804621D+02, & +2.9224739389310008D+02, & +2.9224802593241526D+02, & +2.8978611958992855D+02, & +2.8915408641678846D+02, & +2.9118533307571403D+02, & +2.9194553410039072D+02, & +2.9244688594134544D+02, & +2.9237086584501975D+02, & +2.9189894464487475D+02, & +2.9210832103555418D+02, & +2.9227685116407457D+02, & +2.9225842459914986D+02, & +2.9224821267514392D+02, & +2.9226591109467802D+02, & +2.9158625025645608D+02, & +2.9140926612253548D+02, & +2.9125298964526195D+02, & +2.9136239316241648D+02, & +2.9182040805585694D+02, & +2.9171100460012258D+02, & +2.9126693393315793D+02, & +2.9130800749967943D+02, & +2.9084722625682167D+02, & +2.9043649120580767D+02, & +2.9153791610078673D+02, & +2.9210440912405335D+02, & +2.9188686486416555D+02, & +2.9138068978583891D+02, & +2.9158124399039616D+02, & +2.9213540811751852D+02, & +2.9211624446790427D+02, & +2.9138960755567365D+02, & +2.9060415959100914D+02, & +2.9118025885063923D+02, & +2.9132902926505932D+02, & +2.9120918643494110D+02, & +2.9222955158764825D+02, & +2.9236106160611405D+02, & +2.9219941546106151D+02, & +2.9167905448737707D+02, & +2.8956733560159137D+02, & +2.9016660738228080D+02, & +2.9069372888915495D+02, & +2.9058385568123452D+02, & +2.9219861470697771D+02, & +2.9223599112669444D+02, & +2.9150234673408067D+02, & +2.9112858259833354D+02, & +2.9011809831967531D+02, & +2.9046138673473865D+02, & +2.9176526297220903D+02, & +2.9173093419212285D+02, & +2.9150067445343569D+02, & +2.9152418894132262D+02, & +2.9197021328944544D+02, & +2.9194669886297868D+02, & +2.9168012546172463D+02, & +2.9169701864791432D+02, & +2.9061916960844030D+02, & +2.9045023836074438D+02, & +2.9212529304266053D+02, & +2.9217230349727680D+02, & +2.9156357220873417D+02, & +2.9093467723991887D+02, & +2.9015918793416472D+02, & +2.9095983823965622D+02, & +2.9124303667219704D+02, & +2.9116297170306802D+02, & +2.9162557939634507D+02, & +2.9193291492830070D+02, & +2.9138731414547760D+02, & +2.9079978674942083D+02, & +2.9159254111122891D+02, & +2.9228695093326087D+02, & +2.9225423965335352D+02, & +2.9126542837357596D+02, & +2.9173807466948557D+02, & +2.9270696227984581D+02, & +2.9274075480094893D+02, & +2.9210759999419366D+02, & +2.9091701924155882D+02, & +2.9163130710260498D+02, & +2.9157158980248664D+02, & +2.9031984685457667D+02, & +2.8939239820067894D+02, & +2.9070124438246870D+02, & +2.9196350995833507D+02, & +2.9189730956351508D+02, & +2.9047434516138509D+02, & +2.9050934070611282D+02, & +2.9179387236503283D+02, & +2.9179037287198020D+02, & +2.9197909575321057D+02, & +2.9198343084479546D+02, & +2.9146450345905629D+02, & +2.9146016842889151D+02, & +2.9233889755268928D+02, & +2.9234209014701219D+02, & +2.9032302536354865D+02, & +2.8837665454569191D+02, & +2.8923259893670223D+02, & +2.9119948081590582D+02, & +2.9158422265927129D+02, & +2.9138706789415261D+02, & +2.9131767302238063D+02, & +2.9147948119882238D+02, & +2.9186937598758146D+02, & +2.9170756787255982D+02, & +2.9181460198203825D+02, & +2.9210008868832023D+02, & +2.9132088639794199D+02, & +2.9113460155819826D+02, & +2.9159448593680958D+02, & +2.9214172380575934D+02, & +2.9182660907964311D+02, & +2.8938570202792107D+02, & +2.8829979954353007D+02, & +2.9064250900128923D+02, & +2.9110347860364448D+02, & +2.9039527564479215D+02, & +2.9152783923893156D+02, & +2.9213818222324863D+02, & +2.9211881276563861D+02, & +2.9133414472425187D+02, & +2.9125375097531855D+02, & +2.9190226937530139D+02, & +2.9164903795105636D+02, & +2.8851961249369930D+02, & +2.8835729910731652D+02, & +2.9144629542849515D+02, & +2.9171485796658368D+02, & +2.9126891989861593D+02, & +2.9051052150188838D+02, & +2.9113570969073521D+02, & +2.9141652614094619D+02, & +2.9133186866181995D+02, & +2.8912484642922516D+02, & +2.8915669391948876D+02, & +2.9117142770169869D+02, & +2.9117110928821620D+02, & +2.9183490795481288D+02, & +2.9183646161652371D+02, & +2.9254379194824975D+02, & +2.9254363658822064D+02, & +2.9192127768678108D+02, & +2.9192154096878102D+02, & +2.9151073810022524D+02, & +2.9151047487964541D+02, & +2.9013993357169915D+02, & +2.9014000572685774D+02, & +2.9145714571416084D+02, & +2.9145713856006512D+02, & +2.9196091751473017D+02, & +2.9196093255283131D+02, & +2.9195731369267685D+02, & +2.9195729871599588D+02, & +2.9149460806187619D+02, & +2.9149461579988633D+02, & +2.9034687539457741D+02, & +2.9034679862867739D+02, & +2.9010765498373922D+02, & +2.9010767881379803D+02, & +2.8967492890608241D+02, & +2.8967469674750663D+02, & +2.9215189060671338D+02, & +2.9215194287124120D+02, & +2.8956132995364590D+02, & +2.8950907156783751D+02, & +2.9210783379750950D+02, & +2.9211890340058585D+02, & +2.9139896954309916D+02, & +2.9128827357375536D+02, & +2.9190012628763964D+02, & +2.9212457714005234D+02, & +2.9221336158434212D+02, & +2.9217879707919576D+02, & +2.9127472708852724D+02, & +2.9132783649002283D+02, & +2.9227509889951233D+02, & +2.9226978796550475D+02, & +2.9142078378447616D+02, & +2.9142906715955786D+02, & +2.9017517054475991D+02, & +2.8992337408144391D+02, & +2.8477362931531121D+02, & +2.8514676291150971D+02, & +2.8393423954726200D+02, & +2.8313625395453965D+02, & +2.8449999402409759D+02, & +2.8534282615303522D+02, & +2.8523140662657408D+02, & +2.8739845025761718D+02, & +2.8713781898215319D+02, & +0.0000000000000000D+00/) +i0 = 1 +n0 = 100 +pp = 1 +n0in = 100 +dmin = 7.9798559272231942D+282 +dmin1 = 7.9798559272231942D+282 +dmin2 = 2.3215857579138407D+285 +dn = 2.3140662657409930D+284 +dn1 = 7.9798559272231942D+282 +dn2 = 8.2833136615500372D+288 +g = 2.5000000000000000D-01 +tau = 6.1420124051338430D+283 +ttype = -4 + +ieee = .True. + +print *, "Starting Dlasq4" + +call Dlasq4(i0, n0, z, pp, n0in, dmin, dmin1, dmin2, dn, dn1, dn2, tau, ttype, g) + +print *, "Done calling dlasq4" +print *, "tau = ", tau +print *, tau, ttype, g + +end \ No newline at end of file diff --git a/vendor/gonum.org/v1/gonum/lapack/internal/testdata/dlasqtest/xerbla.f b/vendor/gonum.org/v1/gonum/lapack/internal/testdata/dlasqtest/xerbla.f new file mode 100644 index 0000000..3e93bc4 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/lapack/internal/testdata/dlasqtest/xerbla.f @@ -0,0 +1,99 @@ +*> \brief \b XERBLA +* +* =========== DOCUMENTATION =========== +* +* Online html documentation available at +* http://www.netlib.org/lapack/explore-html/ +* +*> \htmlonly +*> Download XERBLA + dependencies +*> +*> [TGZ] +*> +*> [ZIP] +*> +*> [TXT] +*> \endhtmlonly +* +* Definition: +* =========== +* +* SUBROUTINE XERBLA( SRNAME, INFO ) +* +* .. Scalar Arguments .. +* CHARACTER*(*) SRNAME +* INTEGER INFO +* .. +* +* +*> \par Purpose: +* ============= +*> +*> \verbatim +*> +*> XERBLA is an error handler for the LAPACK routines. +*> It is called by an LAPACK routine if an input parameter has an +*> invalid value. A message is printed and execution stops. +*> +*> Installers may consider modifying the STOP statement in order to +*> call system-specific exception-handling facilities. +*> \endverbatim +* +* Arguments: +* ========== +* +*> \param[in] SRNAME +*> \verbatim +*> SRNAME is CHARACTER*(*) +*> The name of the routine which called XERBLA. +*> \endverbatim +*> +*> \param[in] INFO +*> \verbatim +*> INFO is INTEGER +*> The position of the invalid parameter in the parameter list +*> of the calling routine. +*> \endverbatim +* +* Authors: +* ======== +* +*> \author Univ. of Tennessee +*> \author Univ. of California Berkeley +*> \author Univ. of Colorado Denver +*> \author NAG Ltd. +* +*> \date November 2011 +* +*> \ingroup auxOTHERauxiliary +* +* ===================================================================== + SUBROUTINE XERBLA( SRNAME, INFO ) +* +* -- LAPACK auxiliary routine (version 3.4.0) -- +* -- LAPACK is a software package provided by Univ. of Tennessee, -- +* -- Univ. of California Berkeley, Univ. of Colorado Denver and NAG Ltd..-- +* November 2011 +* +* .. Scalar Arguments .. + CHARACTER*(*) SRNAME + INTEGER INFO +* .. +* +* ===================================================================== +* +* .. Intrinsic Functions .. + INTRINSIC LEN_TRIM +* .. +* .. Executable Statements .. +* + WRITE( *, FMT = 9999 )SRNAME( 1:LEN_TRIM( SRNAME ) ), INFO +* + STOP +* + 9999 FORMAT( ' ** On entry to ', A, ' parameter number ', I2, ' had ', + $ 'an illegal value' ) +* +* End of XERBLA +* + END diff --git a/vendor/gonum.org/v1/gonum/lapack/internal/testdata/dsterftest/disnan.f b/vendor/gonum.org/v1/gonum/lapack/internal/testdata/dsterftest/disnan.f new file mode 100644 index 0000000..355b827 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/lapack/internal/testdata/dsterftest/disnan.f @@ -0,0 +1,80 @@ +*> \brief \b DISNAN tests input for NaN. +* +* =========== DOCUMENTATION =========== +* +* Online html documentation available at +* http://www.netlib.org/lapack/explore-html/ +* +*> \htmlonly +*> Download DISNAN + dependencies +*> +*> [TGZ] +*> +*> [ZIP] +*> +*> [TXT] +*> \endhtmlonly +* +* Definition: +* =========== +* +* LOGICAL FUNCTION DISNAN( DIN ) +* +* .. Scalar Arguments .. +* DOUBLE PRECISION DIN +* .. +* +* +*> \par Purpose: +* ============= +*> +*> \verbatim +*> +*> DISNAN returns .TRUE. if its argument is NaN, and .FALSE. +*> otherwise. To be replaced by the Fortran 2003 intrinsic in the +*> future. +*> \endverbatim +* +* Arguments: +* ========== +* +*> \param[in] DIN +*> \verbatim +*> DIN is DOUBLE PRECISION +*> Input to test for NaN. +*> \endverbatim +* +* Authors: +* ======== +* +*> \author Univ. of Tennessee +*> \author Univ. of California Berkeley +*> \author Univ. of Colorado Denver +*> \author NAG Ltd. +* +*> \date September 2012 +* +*> \ingroup auxOTHERauxiliary +* +* ===================================================================== + LOGICAL FUNCTION DISNAN( DIN ) +* +* -- LAPACK auxiliary routine (version 3.4.2) -- +* -- LAPACK is a software package provided by Univ. of Tennessee, -- +* -- Univ. of California Berkeley, Univ. of Colorado Denver and NAG Ltd..-- +* September 2012 +* +* .. Scalar Arguments .. + DOUBLE PRECISION DIN +* .. +* +* ===================================================================== +* +* .. External Functions .. + LOGICAL DLAISNAN + EXTERNAL DLAISNAN +* .. +* .. Executable Statements .. + DISNAN = DLAISNAN(DIN,DIN) + RETURN + END diff --git a/vendor/gonum.org/v1/gonum/lapack/internal/testdata/dsterftest/dlae2.f b/vendor/gonum.org/v1/gonum/lapack/internal/testdata/dsterftest/dlae2.f new file mode 100644 index 0000000..302eeaa --- /dev/null +++ b/vendor/gonum.org/v1/gonum/lapack/internal/testdata/dsterftest/dlae2.f @@ -0,0 +1,185 @@ +*> \brief \b DLAE2 computes the eigenvalues of a 2-by-2 symmetric matrix. +* +* =========== DOCUMENTATION =========== +* +* Online html documentation available at +* http://www.netlib.org/lapack/explore-html/ +* +*> \htmlonly +*> Download DLAE2 + dependencies +*> +*> [TGZ] +*> +*> [ZIP] +*> +*> [TXT] +*> \endhtmlonly +* +* Definition: +* =========== +* +* SUBROUTINE DLAE2( A, B, C, RT1, RT2 ) +* +* .. Scalar Arguments .. +* DOUBLE PRECISION A, B, C, RT1, RT2 +* .. +* +* +*> \par Purpose: +* ============= +*> +*> \verbatim +*> +*> DLAE2 computes the eigenvalues of a 2-by-2 symmetric matrix +*> [ A B ] +*> [ B C ]. +*> On return, RT1 is the eigenvalue of larger absolute value, and RT2 +*> is the eigenvalue of smaller absolute value. +*> \endverbatim +* +* Arguments: +* ========== +* +*> \param[in] A +*> \verbatim +*> A is DOUBLE PRECISION +*> The (1,1) element of the 2-by-2 matrix. +*> \endverbatim +*> +*> \param[in] B +*> \verbatim +*> B is DOUBLE PRECISION +*> The (1,2) and (2,1) elements of the 2-by-2 matrix. +*> \endverbatim +*> +*> \param[in] C +*> \verbatim +*> C is DOUBLE PRECISION +*> The (2,2) element of the 2-by-2 matrix. +*> \endverbatim +*> +*> \param[out] RT1 +*> \verbatim +*> RT1 is DOUBLE PRECISION +*> The eigenvalue of larger absolute value. +*> \endverbatim +*> +*> \param[out] RT2 +*> \verbatim +*> RT2 is DOUBLE PRECISION +*> The eigenvalue of smaller absolute value. +*> \endverbatim +* +* Authors: +* ======== +* +*> \author Univ. of Tennessee +*> \author Univ. of California Berkeley +*> \author Univ. of Colorado Denver +*> \author NAG Ltd. +* +*> \date September 2012 +* +*> \ingroup auxOTHERauxiliary +* +*> \par Further Details: +* ===================== +*> +*> \verbatim +*> +*> RT1 is accurate to a few ulps barring over/underflow. +*> +*> RT2 may be inaccurate if there is massive cancellation in the +*> determinant A*C-B*B; higher precision or correctly rounded or +*> correctly truncated arithmetic would be needed to compute RT2 +*> accurately in all cases. +*> +*> Overflow is possible only if RT1 is within a factor of 5 of overflow. +*> Underflow is harmless if the input data is 0 or exceeds +*> underflow_threshold / macheps. +*> \endverbatim +*> +* ===================================================================== + SUBROUTINE DLAE2( A, B, C, RT1, RT2 ) +* +* -- LAPACK auxiliary routine (version 3.4.2) -- +* -- LAPACK is a software package provided by Univ. of Tennessee, -- +* -- Univ. of California Berkeley, Univ. of Colorado Denver and NAG Ltd..-- +* September 2012 +* +* .. Scalar Arguments .. + DOUBLE PRECISION A, B, C, RT1, RT2 +* .. +* +* ===================================================================== +* +* .. Parameters .. + DOUBLE PRECISION ONE + PARAMETER ( ONE = 1.0D0 ) + DOUBLE PRECISION TWO + PARAMETER ( TWO = 2.0D0 ) + DOUBLE PRECISION ZERO + PARAMETER ( ZERO = 0.0D0 ) + DOUBLE PRECISION HALF + PARAMETER ( HALF = 0.5D0 ) +* .. +* .. Local Scalars .. + DOUBLE PRECISION AB, ACMN, ACMX, ADF, DF, RT, SM, TB +* .. +* .. Intrinsic Functions .. + INTRINSIC ABS, SQRT +* .. +* .. Executable Statements .. +* +* Compute the eigenvalues +* + SM = A + C + DF = A - C + ADF = ABS( DF ) + TB = B + B + AB = ABS( TB ) + IF( ABS( A ).GT.ABS( C ) ) THEN + ACMX = A + ACMN = C + ELSE + ACMX = C + ACMN = A + END IF + IF( ADF.GT.AB ) THEN + RT = ADF*SQRT( ONE+( AB / ADF )**2 ) + ELSE IF( ADF.LT.AB ) THEN + RT = AB*SQRT( ONE+( ADF / AB )**2 ) + ELSE +* +* Includes case AB=ADF=0 +* + RT = AB*SQRT( TWO ) + END IF + IF( SM.LT.ZERO ) THEN + RT1 = HALF*( SM-RT ) +* +* Order of execution important. +* To get fully accurate smaller eigenvalue, +* next line needs to be executed in higher precision. +* + RT2 = ( ACMX / RT1 )*ACMN - ( B / RT1 )*B + ELSE IF( SM.GT.ZERO ) THEN + RT1 = HALF*( SM+RT ) +* +* Order of execution important. +* To get fully accurate smaller eigenvalue, +* next line needs to be executed in higher precision. +* + RT2 = ( ACMX / RT1 )*ACMN - ( B / RT1 )*B + ELSE +* +* Includes case RT1 = RT2 = 0 +* + RT1 = HALF*RT + RT2 = -HALF*RT + END IF + RETURN +* +* End of DLAE2 +* + END diff --git a/vendor/gonum.org/v1/gonum/lapack/internal/testdata/dsterftest/dlaisnan.f b/vendor/gonum.org/v1/gonum/lapack/internal/testdata/dsterftest/dlaisnan.f new file mode 100644 index 0000000..58595c5 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/lapack/internal/testdata/dsterftest/dlaisnan.f @@ -0,0 +1,91 @@ +*> \brief \b DLAISNAN tests input for NaN by comparing two arguments for inequality. +* +* =========== DOCUMENTATION =========== +* +* Online html documentation available at +* http://www.netlib.org/lapack/explore-html/ +* +*> \htmlonly +*> Download DLAISNAN + dependencies +*> +*> [TGZ] +*> +*> [ZIP] +*> +*> [TXT] +*> \endhtmlonly +* +* Definition: +* =========== +* +* LOGICAL FUNCTION DLAISNAN( DIN1, DIN2 ) +* +* .. Scalar Arguments .. +* DOUBLE PRECISION DIN1, DIN2 +* .. +* +* +*> \par Purpose: +* ============= +*> +*> \verbatim +*> +*> This routine is not for general use. It exists solely to avoid +*> over-optimization in DISNAN. +*> +*> DLAISNAN checks for NaNs by comparing its two arguments for +*> inequality. NaN is the only floating-point value where NaN != NaN +*> returns .TRUE. To check for NaNs, pass the same variable as both +*> arguments. +*> +*> A compiler must assume that the two arguments are +*> not the same variable, and the test will not be optimized away. +*> Interprocedural or whole-program optimization may delete this +*> test. The ISNAN functions will be replaced by the correct +*> Fortran 03 intrinsic once the intrinsic is widely available. +*> \endverbatim +* +* Arguments: +* ========== +* +*> \param[in] DIN1 +*> \verbatim +*> DIN1 is DOUBLE PRECISION +*> \endverbatim +*> +*> \param[in] DIN2 +*> \verbatim +*> DIN2 is DOUBLE PRECISION +*> Two numbers to compare for inequality. +*> \endverbatim +* +* Authors: +* ======== +* +*> \author Univ. of Tennessee +*> \author Univ. of California Berkeley +*> \author Univ. of Colorado Denver +*> \author NAG Ltd. +* +*> \date September 2012 +* +*> \ingroup auxOTHERauxiliary +* +* ===================================================================== + LOGICAL FUNCTION DLAISNAN( DIN1, DIN2 ) +* +* -- LAPACK auxiliary routine (version 3.4.2) -- +* -- LAPACK is a software package provided by Univ. of Tennessee, -- +* -- Univ. of California Berkeley, Univ. of Colorado Denver and NAG Ltd..-- +* September 2012 +* +* .. Scalar Arguments .. + DOUBLE PRECISION DIN1, DIN2 +* .. +* +* ===================================================================== +* +* .. Executable Statements .. + DLAISNAN = (DIN1.NE.DIN2) + RETURN + END diff --git a/vendor/gonum.org/v1/gonum/lapack/internal/testdata/dsterftest/dlamch.f b/vendor/gonum.org/v1/gonum/lapack/internal/testdata/dsterftest/dlamch.f new file mode 100644 index 0000000..25c2c8e --- /dev/null +++ b/vendor/gonum.org/v1/gonum/lapack/internal/testdata/dsterftest/dlamch.f @@ -0,0 +1,193 @@ +*> \brief \b DLAMCH +* +* =========== DOCUMENTATION =========== +* +* Online html documentation available at +* http://www.netlib.org/lapack/explore-html/ +* +* Definition: +* =========== +* +* DOUBLE PRECISION FUNCTION DLAMCH( CMACH ) +* +* +*> \par Purpose: +* ============= +*> +*> \verbatim +*> +*> DLAMCH determines double precision machine parameters. +*> \endverbatim +* +* Arguments: +* ========== +* +*> \param[in] CMACH +*> \verbatim +*> Specifies the value to be returned by DLAMCH: +*> = 'E' or 'e', DLAMCH := eps +*> = 'S' or 's , DLAMCH := sfmin +*> = 'B' or 'b', DLAMCH := base +*> = 'P' or 'p', DLAMCH := eps*base +*> = 'N' or 'n', DLAMCH := t +*> = 'R' or 'r', DLAMCH := rnd +*> = 'M' or 'm', DLAMCH := emin +*> = 'U' or 'u', DLAMCH := rmin +*> = 'L' or 'l', DLAMCH := emax +*> = 'O' or 'o', DLAMCH := rmax +*> where +*> eps = relative machine precision +*> sfmin = safe minimum, such that 1/sfmin does not overflow +*> base = base of the machine +*> prec = eps*base +*> t = number of (base) digits in the mantissa +*> rnd = 1.0 when rounding occurs in addition, 0.0 otherwise +*> emin = minimum exponent before (gradual) underflow +*> rmin = underflow threshold - base**(emin-1) +*> emax = largest exponent before overflow +*> rmax = overflow threshold - (base**emax)*(1-eps) +*> \endverbatim +* +* Authors: +* ======== +* +*> \author Univ. of Tennessee +*> \author Univ. of California Berkeley +*> \author Univ. of Colorado Denver +*> \author NAG Ltd. +* +*> \date November 2011 +* +*> \ingroup auxOTHERauxiliary +* +* ===================================================================== + DOUBLE PRECISION FUNCTION DLAMCH( CMACH ) +* +* -- LAPACK auxiliary routine (version 3.4.0) -- +* -- LAPACK is a software package provided by Univ. of Tennessee, -- +* -- Univ. of California Berkeley, Univ. of Colorado Denver and NAG Ltd..-- +* November 2011 +* +* .. Scalar Arguments .. + CHARACTER CMACH +* .. +* +* .. Scalar Arguments .. + DOUBLE PRECISION A, B +* .. +* +* ===================================================================== +* +* .. Parameters .. + DOUBLE PRECISION ONE, ZERO + PARAMETER ( ONE = 1.0D+0, ZERO = 0.0D+0 ) +* .. +* .. Local Scalars .. + DOUBLE PRECISION RND, EPS, SFMIN, SMALL, RMACH +* .. +* .. External Functions .. + LOGICAL LSAME + EXTERNAL LSAME +* .. +* .. Intrinsic Functions .. + INTRINSIC DIGITS, EPSILON, HUGE, MAXEXPONENT, + $ MINEXPONENT, RADIX, TINY +* .. +* .. Executable Statements .. +* +* +* Assume rounding, not chopping. Always. +* + RND = ONE +* + IF( ONE.EQ.RND ) THEN + EPS = EPSILON(ZERO) * 0.5 + ELSE + EPS = EPSILON(ZERO) + END IF +* + IF( LSAME( CMACH, 'E' ) ) THEN + RMACH = EPS + ELSE IF( LSAME( CMACH, 'S' ) ) THEN + SFMIN = TINY(ZERO) + SMALL = ONE / HUGE(ZERO) + IF( SMALL.GE.SFMIN ) THEN +* +* Use SMALL plus a bit, to avoid the possibility of rounding +* causing overflow when computing 1/sfmin. +* + SFMIN = SMALL*( ONE+EPS ) + END IF + RMACH = SFMIN + ELSE IF( LSAME( CMACH, 'B' ) ) THEN + RMACH = RADIX(ZERO) + ELSE IF( LSAME( CMACH, 'P' ) ) THEN + RMACH = EPS * RADIX(ZERO) + ELSE IF( LSAME( CMACH, 'N' ) ) THEN + RMACH = DIGITS(ZERO) + ELSE IF( LSAME( CMACH, 'R' ) ) THEN + RMACH = RND + ELSE IF( LSAME( CMACH, 'M' ) ) THEN + RMACH = MINEXPONENT(ZERO) + ELSE IF( LSAME( CMACH, 'U' ) ) THEN + RMACH = tiny(zero) + ELSE IF( LSAME( CMACH, 'L' ) ) THEN + RMACH = MAXEXPONENT(ZERO) + ELSE IF( LSAME( CMACH, 'O' ) ) THEN + RMACH = HUGE(ZERO) + ELSE + RMACH = ZERO + END IF +* + DLAMCH = RMACH + RETURN +* +* End of DLAMCH +* + END +************************************************************************ +*> \brief \b DLAMC3 +*> \details +*> \b Purpose: +*> \verbatim +*> DLAMC3 is intended to force A and B to be stored prior to doing +*> the addition of A and B , for use in situations where optimizers +*> might hold one of these in a register. +*> \endverbatim +*> \author LAPACK is a software package provided by Univ. of Tennessee, Univ. of California Berkeley, Univ. of Colorado Denver and NAG Ltd.. +*> \date November 2011 +*> \ingroup auxOTHERauxiliary +*> +*> \param[in] A +*> \verbatim +*> A is a DOUBLE PRECISION +*> \endverbatim +*> +*> \param[in] B +*> \verbatim +*> B is a DOUBLE PRECISION +*> The values A and B. +*> \endverbatim +*> + DOUBLE PRECISION FUNCTION DLAMC3( A, B ) +* +* -- LAPACK auxiliary routine (version 3.4.0) -- +* Univ. of Tennessee, Univ. of California Berkeley and NAG Ltd.. +* November 2010 +* +* .. Scalar Arguments .. + DOUBLE PRECISION A, B +* .. +* ===================================================================== +* +* .. Executable Statements .. +* + DLAMC3 = A + B +* + RETURN +* +* End of DLAMC3 +* + END +* +************************************************************************ diff --git a/vendor/gonum.org/v1/gonum/lapack/internal/testdata/dsterftest/dlanst.f b/vendor/gonum.org/v1/gonum/lapack/internal/testdata/dsterftest/dlanst.f new file mode 100644 index 0000000..213b06a --- /dev/null +++ b/vendor/gonum.org/v1/gonum/lapack/internal/testdata/dsterftest/dlanst.f @@ -0,0 +1,186 @@ +*> \brief \b DLANST returns the value of the 1-norm, or the Frobenius norm, or the infinity norm, or the element of largest absolute value of a real symmetric tridiagonal matrix. +* +* =========== DOCUMENTATION =========== +* +* Online html documentation available at +* http://www.netlib.org/lapack/explore-html/ +* +*> \htmlonly +*> Download DLANST + dependencies +*> +*> [TGZ] +*> +*> [ZIP] +*> +*> [TXT] +*> \endhtmlonly +* +* Definition: +* =========== +* +* DOUBLE PRECISION FUNCTION DLANST( NORM, N, D, E ) +* +* .. Scalar Arguments .. +* CHARACTER NORM +* INTEGER N +* .. +* .. Array Arguments .. +* DOUBLE PRECISION D( * ), E( * ) +* .. +* +* +*> \par Purpose: +* ============= +*> +*> \verbatim +*> +*> DLANST returns the value of the one norm, or the Frobenius norm, or +*> the infinity norm, or the element of largest absolute value of a +*> real symmetric tridiagonal matrix A. +*> \endverbatim +*> +*> \return DLANST +*> \verbatim +*> +*> DLANST = ( max(abs(A(i,j))), NORM = 'M' or 'm' +*> ( +*> ( norm1(A), NORM = '1', 'O' or 'o' +*> ( +*> ( normI(A), NORM = 'I' or 'i' +*> ( +*> ( normF(A), NORM = 'F', 'f', 'E' or 'e' +*> +*> where norm1 denotes the one norm of a matrix (maximum column sum), +*> normI denotes the infinity norm of a matrix (maximum row sum) and +*> normF denotes the Frobenius norm of a matrix (square root of sum of +*> squares). Note that max(abs(A(i,j))) is not a consistent matrix norm. +*> \endverbatim +* +* Arguments: +* ========== +* +*> \param[in] NORM +*> \verbatim +*> NORM is CHARACTER*1 +*> Specifies the value to be returned in DLANST as described +*> above. +*> \endverbatim +*> +*> \param[in] N +*> \verbatim +*> N is INTEGER +*> The order of the matrix A. N >= 0. When N = 0, DLANST is +*> set to zero. +*> \endverbatim +*> +*> \param[in] D +*> \verbatim +*> D is DOUBLE PRECISION array, dimension (N) +*> The diagonal elements of A. +*> \endverbatim +*> +*> \param[in] E +*> \verbatim +*> E is DOUBLE PRECISION array, dimension (N-1) +*> The (n-1) sub-diagonal or super-diagonal elements of A. +*> \endverbatim +* +* Authors: +* ======== +* +*> \author Univ. of Tennessee +*> \author Univ. of California Berkeley +*> \author Univ. of Colorado Denver +*> \author NAG Ltd. +* +*> \date September 2012 +* +*> \ingroup auxOTHERauxiliary +* +* ===================================================================== + DOUBLE PRECISION FUNCTION DLANST( NORM, N, D, E ) +* +* -- LAPACK auxiliary routine (version 3.4.2) -- +* -- LAPACK is a software package provided by Univ. of Tennessee, -- +* -- Univ. of California Berkeley, Univ. of Colorado Denver and NAG Ltd..-- +* September 2012 +* +* .. Scalar Arguments .. + CHARACTER NORM + INTEGER N +* .. +* .. Array Arguments .. + DOUBLE PRECISION D( * ), E( * ) +* .. +* +* ===================================================================== +* +* .. Parameters .. + DOUBLE PRECISION ONE, ZERO + PARAMETER ( ONE = 1.0D+0, ZERO = 0.0D+0 ) +* .. +* .. Local Scalars .. + INTEGER I + DOUBLE PRECISION ANORM, SCALE, SUM +* .. +* .. External Functions .. + LOGICAL LSAME, DISNAN + EXTERNAL LSAME, DISNAN +* .. +* .. External Subroutines .. + EXTERNAL DLASSQ +* .. +* .. Intrinsic Functions .. + INTRINSIC ABS, SQRT +* .. +* .. Executable Statements .. +* + IF( N.LE.0 ) THEN + ANORM = ZERO + ELSE IF( LSAME( NORM, 'M' ) ) THEN +* +* Find max(abs(A(i,j))). +* + ANORM = ABS( D( N ) ) + DO 10 I = 1, N - 1 + SUM = ABS( D( I ) ) + IF( ANORM .LT. SUM .OR. DISNAN( SUM ) ) ANORM = SUM + SUM = ABS( E( I ) ) + IF( ANORM .LT. SUM .OR. DISNAN( SUM ) ) ANORM = SUM + 10 CONTINUE + ELSE IF( LSAME( NORM, 'O' ) .OR. NORM.EQ.'1' .OR. + $ LSAME( NORM, 'I' ) ) THEN +* +* Find norm1(A). +* + IF( N.EQ.1 ) THEN + ANORM = ABS( D( 1 ) ) + ELSE + ANORM = ABS( D( 1 ) )+ABS( E( 1 ) ) + SUM = ABS( E( N-1 ) )+ABS( D( N ) ) + IF( ANORM .LT. SUM .OR. DISNAN( SUM ) ) ANORM = SUM + DO 20 I = 2, N - 1 + SUM = ABS( D( I ) )+ABS( E( I ) )+ABS( E( I-1 ) ) + IF( ANORM .LT. SUM .OR. DISNAN( SUM ) ) ANORM = SUM + 20 CONTINUE + END IF + ELSE IF( ( LSAME( NORM, 'F' ) ) .OR. ( LSAME( NORM, 'E' ) ) ) THEN +* +* Find normF(A). +* + SCALE = ZERO + SUM = ONE + IF( N.GT.1 ) THEN + CALL DLASSQ( N-1, E, 1, SCALE, SUM ) + SUM = 2*SUM + END IF + CALL DLASSQ( N, D, 1, SCALE, SUM ) + ANORM = SCALE*SQRT( SUM ) + END IF +* + DLANST = ANORM + RETURN +* +* End of DLANST +* + END diff --git a/vendor/gonum.org/v1/gonum/lapack/internal/testdata/dsterftest/dlapy2.f b/vendor/gonum.org/v1/gonum/lapack/internal/testdata/dsterftest/dlapy2.f new file mode 100644 index 0000000..d43b0d5 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/lapack/internal/testdata/dsterftest/dlapy2.f @@ -0,0 +1,104 @@ +*> \brief \b DLAPY2 returns sqrt(x2+y2). +* +* =========== DOCUMENTATION =========== +* +* Online html documentation available at +* http://www.netlib.org/lapack/explore-html/ +* +*> \htmlonly +*> Download DLAPY2 + dependencies +*> +*> [TGZ] +*> +*> [ZIP] +*> +*> [TXT] +*> \endhtmlonly +* +* Definition: +* =========== +* +* DOUBLE PRECISION FUNCTION DLAPY2( X, Y ) +* +* .. Scalar Arguments .. +* DOUBLE PRECISION X, Y +* .. +* +* +*> \par Purpose: +* ============= +*> +*> \verbatim +*> +*> DLAPY2 returns sqrt(x**2+y**2), taking care not to cause unnecessary +*> overflow. +*> \endverbatim +* +* Arguments: +* ========== +* +*> \param[in] X +*> \verbatim +*> X is DOUBLE PRECISION +*> \endverbatim +*> +*> \param[in] Y +*> \verbatim +*> Y is DOUBLE PRECISION +*> X and Y specify the values x and y. +*> \endverbatim +* +* Authors: +* ======== +* +*> \author Univ. of Tennessee +*> \author Univ. of California Berkeley +*> \author Univ. of Colorado Denver +*> \author NAG Ltd. +* +*> \date September 2012 +* +*> \ingroup auxOTHERauxiliary +* +* ===================================================================== + DOUBLE PRECISION FUNCTION DLAPY2( X, Y ) +* +* -- LAPACK auxiliary routine (version 3.4.2) -- +* -- LAPACK is a software package provided by Univ. of Tennessee, -- +* -- Univ. of California Berkeley, Univ. of Colorado Denver and NAG Ltd..-- +* September 2012 +* +* .. Scalar Arguments .. + DOUBLE PRECISION X, Y +* .. +* +* ===================================================================== +* +* .. Parameters .. + DOUBLE PRECISION ZERO + PARAMETER ( ZERO = 0.0D0 ) + DOUBLE PRECISION ONE + PARAMETER ( ONE = 1.0D0 ) +* .. +* .. Local Scalars .. + DOUBLE PRECISION W, XABS, YABS, Z +* .. +* .. Intrinsic Functions .. + INTRINSIC ABS, MAX, MIN, SQRT +* .. +* .. Executable Statements .. +* + XABS = ABS( X ) + YABS = ABS( Y ) + W = MAX( XABS, YABS ) + Z = MIN( XABS, YABS ) + IF( Z.EQ.ZERO ) THEN + DLAPY2 = W + ELSE + DLAPY2 = W*SQRT( ONE+( Z / W )**2 ) + END IF + RETURN +* +* End of DLAPY2 +* + END diff --git a/vendor/gonum.org/v1/gonum/lapack/internal/testdata/dsterftest/dlascl.f b/vendor/gonum.org/v1/gonum/lapack/internal/testdata/dsterftest/dlascl.f new file mode 100644 index 0000000..9b9b33c --- /dev/null +++ b/vendor/gonum.org/v1/gonum/lapack/internal/testdata/dsterftest/dlascl.f @@ -0,0 +1,364 @@ +*> \brief \b DLASCL multiplies a general rectangular matrix by a real scalar defined as cto/cfrom. +* +* =========== DOCUMENTATION =========== +* +* Online html documentation available at +* http://www.netlib.org/lapack/explore-html/ +* +*> \htmlonly +*> Download DLASCL + dependencies +*> +*> [TGZ] +*> +*> [ZIP] +*> +*> [TXT] +*> \endhtmlonly +* +* Definition: +* =========== +* +* SUBROUTINE DLASCL( TYPE, KL, KU, CFROM, CTO, M, N, A, LDA, INFO ) +* +* .. Scalar Arguments .. +* CHARACTER TYPE +* INTEGER INFO, KL, KU, LDA, M, N +* DOUBLE PRECISION CFROM, CTO +* .. +* .. Array Arguments .. +* DOUBLE PRECISION A( LDA, * ) +* .. +* +* +*> \par Purpose: +* ============= +*> +*> \verbatim +*> +*> DLASCL multiplies the M by N real matrix A by the real scalar +*> CTO/CFROM. This is done without over/underflow as long as the final +*> result CTO*A(I,J)/CFROM does not over/underflow. TYPE specifies that +*> A may be full, upper triangular, lower triangular, upper Hessenberg, +*> or banded. +*> \endverbatim +* +* Arguments: +* ========== +* +*> \param[in] TYPE +*> \verbatim +*> TYPE is CHARACTER*1 +*> TYPE indices the storage type of the input matrix. +*> = 'G': A is a full matrix. +*> = 'L': A is a lower triangular matrix. +*> = 'U': A is an upper triangular matrix. +*> = 'H': A is an upper Hessenberg matrix. +*> = 'B': A is a symmetric band matrix with lower bandwidth KL +*> and upper bandwidth KU and with the only the lower +*> half stored. +*> = 'Q': A is a symmetric band matrix with lower bandwidth KL +*> and upper bandwidth KU and with the only the upper +*> half stored. +*> = 'Z': A is a band matrix with lower bandwidth KL and upper +*> bandwidth KU. See DGBTRF for storage details. +*> \endverbatim +*> +*> \param[in] KL +*> \verbatim +*> KL is INTEGER +*> The lower bandwidth of A. Referenced only if TYPE = 'B', +*> 'Q' or 'Z'. +*> \endverbatim +*> +*> \param[in] KU +*> \verbatim +*> KU is INTEGER +*> The upper bandwidth of A. Referenced only if TYPE = 'B', +*> 'Q' or 'Z'. +*> \endverbatim +*> +*> \param[in] CFROM +*> \verbatim +*> CFROM is DOUBLE PRECISION +*> \endverbatim +*> +*> \param[in] CTO +*> \verbatim +*> CTO is DOUBLE PRECISION +*> +*> The matrix A is multiplied by CTO/CFROM. A(I,J) is computed +*> without over/underflow if the final result CTO*A(I,J)/CFROM +*> can be represented without over/underflow. CFROM must be +*> nonzero. +*> \endverbatim +*> +*> \param[in] M +*> \verbatim +*> M is INTEGER +*> The number of rows of the matrix A. M >= 0. +*> \endverbatim +*> +*> \param[in] N +*> \verbatim +*> N is INTEGER +*> The number of columns of the matrix A. N >= 0. +*> \endverbatim +*> +*> \param[in,out] A +*> \verbatim +*> A is DOUBLE PRECISION array, dimension (LDA,N) +*> The matrix to be multiplied by CTO/CFROM. See TYPE for the +*> storage type. +*> \endverbatim +*> +*> \param[in] LDA +*> \verbatim +*> LDA is INTEGER +*> The leading dimension of the array A. LDA >= max(1,M). +*> \endverbatim +*> +*> \param[out] INFO +*> \verbatim +*> INFO is INTEGER +*> 0 - successful exit +*> <0 - if INFO = -i, the i-th argument had an illegal value. +*> \endverbatim +* +* Authors: +* ======== +* +*> \author Univ. of Tennessee +*> \author Univ. of California Berkeley +*> \author Univ. of Colorado Denver +*> \author NAG Ltd. +* +*> \date September 2012 +* +*> \ingroup auxOTHERauxiliary +* +* ===================================================================== + SUBROUTINE DLASCL( TYPE, KL, KU, CFROM, CTO, M, N, A, LDA, INFO ) +* +* -- LAPACK auxiliary routine (version 3.4.2) -- +* -- LAPACK is a software package provided by Univ. of Tennessee, -- +* -- Univ. of California Berkeley, Univ. of Colorado Denver and NAG Ltd..-- +* September 2012 +* +* .. Scalar Arguments .. + CHARACTER TYPE + INTEGER INFO, KL, KU, LDA, M, N + DOUBLE PRECISION CFROM, CTO +* .. +* .. Array Arguments .. + DOUBLE PRECISION A( LDA, * ) +* .. +* +* ===================================================================== +* +* .. Parameters .. + DOUBLE PRECISION ZERO, ONE + PARAMETER ( ZERO = 0.0D0, ONE = 1.0D0 ) +* .. +* .. Local Scalars .. + LOGICAL DONE + INTEGER I, ITYPE, J, K1, K2, K3, K4 + DOUBLE PRECISION BIGNUM, CFROM1, CFROMC, CTO1, CTOC, MUL, SMLNUM +* .. +* .. External Functions .. + LOGICAL LSAME, DISNAN + DOUBLE PRECISION DLAMCH + EXTERNAL LSAME, DLAMCH, DISNAN +* .. +* .. Intrinsic Functions .. + INTRINSIC ABS, MAX, MIN +* .. +* .. External Subroutines .. + EXTERNAL XERBLA +* .. +* .. Executable Statements .. +* +* Test the input arguments +* + INFO = 0 +* + IF( LSAME( TYPE, 'G' ) ) THEN + ITYPE = 0 + ELSE IF( LSAME( TYPE, 'L' ) ) THEN + ITYPE = 1 + ELSE IF( LSAME( TYPE, 'U' ) ) THEN + ITYPE = 2 + ELSE IF( LSAME( TYPE, 'H' ) ) THEN + ITYPE = 3 + ELSE IF( LSAME( TYPE, 'B' ) ) THEN + ITYPE = 4 + ELSE IF( LSAME( TYPE, 'Q' ) ) THEN + ITYPE = 5 + ELSE IF( LSAME( TYPE, 'Z' ) ) THEN + ITYPE = 6 + ELSE + ITYPE = -1 + END IF +* + IF( ITYPE.EQ.-1 ) THEN + INFO = -1 + ELSE IF( CFROM.EQ.ZERO .OR. DISNAN(CFROM) ) THEN + INFO = -4 + ELSE IF( DISNAN(CTO) ) THEN + INFO = -5 + ELSE IF( M.LT.0 ) THEN + INFO = -6 + ELSE IF( N.LT.0 .OR. ( ITYPE.EQ.4 .AND. N.NE.M ) .OR. + $ ( ITYPE.EQ.5 .AND. N.NE.M ) ) THEN + INFO = -7 + ELSE IF( ITYPE.LE.3 .AND. LDA.LT.MAX( 1, M ) ) THEN + INFO = -9 + ELSE IF( ITYPE.GE.4 ) THEN + IF( KL.LT.0 .OR. KL.GT.MAX( M-1, 0 ) ) THEN + INFO = -2 + ELSE IF( KU.LT.0 .OR. KU.GT.MAX( N-1, 0 ) .OR. + $ ( ( ITYPE.EQ.4 .OR. ITYPE.EQ.5 ) .AND. KL.NE.KU ) ) + $ THEN + INFO = -3 + ELSE IF( ( ITYPE.EQ.4 .AND. LDA.LT.KL+1 ) .OR. + $ ( ITYPE.EQ.5 .AND. LDA.LT.KU+1 ) .OR. + $ ( ITYPE.EQ.6 .AND. LDA.LT.2*KL+KU+1 ) ) THEN + INFO = -9 + END IF + END IF +* + IF( INFO.NE.0 ) THEN + CALL XERBLA( 'DLASCL', -INFO ) + RETURN + END IF +* +* Quick return if possible +* + IF( N.EQ.0 .OR. M.EQ.0 ) + $ RETURN +* +* Get machine parameters +* + SMLNUM = DLAMCH( 'S' ) + BIGNUM = ONE / SMLNUM +* + CFROMC = CFROM + CTOC = CTO +* + 10 CONTINUE + CFROM1 = CFROMC*SMLNUM + IF( CFROM1.EQ.CFROMC ) THEN +! CFROMC is an inf. Multiply by a correctly signed zero for +! finite CTOC, or a NaN if CTOC is infinite. + MUL = CTOC / CFROMC + DONE = .TRUE. + CTO1 = CTOC + ELSE + CTO1 = CTOC / BIGNUM + IF( CTO1.EQ.CTOC ) THEN +! CTOC is either 0 or an inf. In both cases, CTOC itself +! serves as the correct multiplication factor. + MUL = CTOC + DONE = .TRUE. + CFROMC = ONE + ELSE IF( ABS( CFROM1 ).GT.ABS( CTOC ) .AND. CTOC.NE.ZERO ) THEN + MUL = SMLNUM + DONE = .FALSE. + CFROMC = CFROM1 + ELSE IF( ABS( CTO1 ).GT.ABS( CFROMC ) ) THEN + MUL = BIGNUM + DONE = .FALSE. + CTOC = CTO1 + ELSE + MUL = CTOC / CFROMC + DONE = .TRUE. + END IF + END IF +* + IF( ITYPE.EQ.0 ) THEN +* +* Full matrix +* + DO 30 J = 1, N + DO 20 I = 1, M + A( I, J ) = A( I, J )*MUL + 20 CONTINUE + 30 CONTINUE +* + ELSE IF( ITYPE.EQ.1 ) THEN +* +* Lower triangular matrix +* + DO 50 J = 1, N + DO 40 I = J, M + A( I, J ) = A( I, J )*MUL + 40 CONTINUE + 50 CONTINUE +* + ELSE IF( ITYPE.EQ.2 ) THEN +* +* Upper triangular matrix +* + DO 70 J = 1, N + DO 60 I = 1, MIN( J, M ) + A( I, J ) = A( I, J )*MUL + 60 CONTINUE + 70 CONTINUE +* + ELSE IF( ITYPE.EQ.3 ) THEN +* +* Upper Hessenberg matrix +* + DO 90 J = 1, N + DO 80 I = 1, MIN( J+1, M ) + A( I, J ) = A( I, J )*MUL + 80 CONTINUE + 90 CONTINUE +* + ELSE IF( ITYPE.EQ.4 ) THEN +* +* Lower half of a symmetric band matrix +* + K3 = KL + 1 + K4 = N + 1 + DO 110 J = 1, N + DO 100 I = 1, MIN( K3, K4-J ) + A( I, J ) = A( I, J )*MUL + 100 CONTINUE + 110 CONTINUE +* + ELSE IF( ITYPE.EQ.5 ) THEN +* +* Upper half of a symmetric band matrix +* + K1 = KU + 2 + K3 = KU + 1 + DO 130 J = 1, N + DO 120 I = MAX( K1-J, 1 ), K3 + A( I, J ) = A( I, J )*MUL + 120 CONTINUE + 130 CONTINUE +* + ELSE IF( ITYPE.EQ.6 ) THEN +* +* Band matrix +* + K1 = KL + KU + 2 + K2 = KL + 1 + K3 = 2*KL + KU + 1 + K4 = KL + KU + 1 + M + DO 150 J = 1, N + DO 140 I = MAX( K1-J, K2 ), MIN( K3, K4-J ) + A( I, J ) = A( I, J )*MUL + 140 CONTINUE + 150 CONTINUE +* + END IF +* + IF( .NOT.DONE ) + $ GO TO 10 +* + RETURN +* +* End of DLASCL +* + END diff --git a/vendor/gonum.org/v1/gonum/lapack/internal/testdata/dsterftest/dlasrt.f b/vendor/gonum.org/v1/gonum/lapack/internal/testdata/dsterftest/dlasrt.f new file mode 100644 index 0000000..f5d0e6c --- /dev/null +++ b/vendor/gonum.org/v1/gonum/lapack/internal/testdata/dsterftest/dlasrt.f @@ -0,0 +1,303 @@ +*> \brief \b DLASRT sorts numbers in increasing or decreasing order. +* +* =========== DOCUMENTATION =========== +* +* Online html documentation available at +* http://www.netlib.org/lapack/explore-html/ +* +*> \htmlonly +*> Download DLASRT + dependencies +*> +*> [TGZ] +*> +*> [ZIP] +*> +*> [TXT] +*> \endhtmlonly +* +* Definition: +* =========== +* +* SUBROUTINE DLASRT( ID, N, D, INFO ) +* +* .. Scalar Arguments .. +* CHARACTER ID +* INTEGER INFO, N +* .. +* .. Array Arguments .. +* DOUBLE PRECISION D( * ) +* .. +* +* +*> \par Purpose: +* ============= +*> +*> \verbatim +*> +*> Sort the numbers in D in increasing order (if ID = 'I') or +*> in decreasing order (if ID = 'D' ). +*> +*> Use Quick Sort, reverting to Insertion sort on arrays of +*> size <= 20. Dimension of STACK limits N to about 2**32. +*> \endverbatim +* +* Arguments: +* ========== +* +*> \param[in] ID +*> \verbatim +*> ID is CHARACTER*1 +*> = 'I': sort D in increasing order; +*> = 'D': sort D in decreasing order. +*> \endverbatim +*> +*> \param[in] N +*> \verbatim +*> N is INTEGER +*> The length of the array D. +*> \endverbatim +*> +*> \param[in,out] D +*> \verbatim +*> D is DOUBLE PRECISION array, dimension (N) +*> On entry, the array to be sorted. +*> On exit, D has been sorted into increasing order +*> (D(1) <= ... <= D(N) ) or into decreasing order +*> (D(1) >= ... >= D(N) ), depending on ID. +*> \endverbatim +*> +*> \param[out] INFO +*> \verbatim +*> INFO is INTEGER +*> = 0: successful exit +*> < 0: if INFO = -i, the i-th argument had an illegal value +*> \endverbatim +* +* Authors: +* ======== +* +*> \author Univ. of Tennessee +*> \author Univ. of California Berkeley +*> \author Univ. of Colorado Denver +*> \author NAG Ltd. +* +*> \date September 2012 +* +*> \ingroup auxOTHERcomputational +* +* ===================================================================== + SUBROUTINE DLASRT( ID, N, D, INFO ) +* +* -- LAPACK computational routine (version 3.4.2) -- +* -- LAPACK is a software package provided by Univ. of Tennessee, -- +* -- Univ. of California Berkeley, Univ. of Colorado Denver and NAG Ltd..-- +* September 2012 +* +* .. Scalar Arguments .. + CHARACTER ID + INTEGER INFO, N +* .. +* .. Array Arguments .. + DOUBLE PRECISION D( * ) +* .. +* +* ===================================================================== +* +* .. Parameters .. + INTEGER SELECT + PARAMETER ( SELECT = 20 ) +* .. +* .. Local Scalars .. + INTEGER DIR, ENDD, I, J, START, STKPNT + DOUBLE PRECISION D1, D2, D3, DMNMX, TMP +* .. +* .. Local Arrays .. + INTEGER STACK( 2, 32 ) +* .. +* .. External Functions .. + LOGICAL LSAME + EXTERNAL LSAME +* .. +* .. External Subroutines .. + EXTERNAL XERBLA +* .. +* .. Executable Statements .. +* +* Test the input paramters. +* + INFO = 0 + DIR = -1 + IF( LSAME( ID, 'D' ) ) THEN + DIR = 0 + ELSE IF( LSAME( ID, 'I' ) ) THEN + DIR = 1 + END IF + IF( DIR.EQ.-1 ) THEN + INFO = -1 + ELSE IF( N.LT.0 ) THEN + INFO = -2 + END IF + IF( INFO.NE.0 ) THEN + CALL XERBLA( 'DLASRT', -INFO ) + RETURN + END IF +* +* Quick return if possible +* + IF( N.LE.1 ) + $ RETURN +* + STKPNT = 1 + STACK( 1, 1 ) = 1 + STACK( 2, 1 ) = N + 10 CONTINUE + START = STACK( 1, STKPNT ) + ENDD = STACK( 2, STKPNT ) + STKPNT = STKPNT - 1 + IF( ENDD-START.LE.SELECT .AND. ENDD-START.GT.0 ) THEN +* +* Do Insertion sort on D( START:ENDD ) +* + IF( DIR.EQ.0 ) THEN +* +* Sort into decreasing order +* + DO 30 I = START + 1, ENDD + DO 20 J = I, START + 1, -1 + IF( D( J ).GT.D( J-1 ) ) THEN + DMNMX = D( J ) + D( J ) = D( J-1 ) + D( J-1 ) = DMNMX + ELSE + GO TO 30 + END IF + 20 CONTINUE + 30 CONTINUE +* + ELSE +* +* Sort into increasing order +* + DO 50 I = START + 1, ENDD + DO 40 J = I, START + 1, -1 + IF( D( J ).LT.D( J-1 ) ) THEN + DMNMX = D( J ) + D( J ) = D( J-1 ) + D( J-1 ) = DMNMX + ELSE + GO TO 50 + END IF + 40 CONTINUE + 50 CONTINUE +* + END IF +* + ELSE IF( ENDD-START.GT.SELECT ) THEN +* +* Partition D( START:ENDD ) and stack parts, largest one first +* +* Choose partition entry as median of 3 +* + D1 = D( START ) + D2 = D( ENDD ) + I = ( START+ENDD ) / 2 + D3 = D( I ) + IF( D1.LT.D2 ) THEN + IF( D3.LT.D1 ) THEN + DMNMX = D1 + ELSE IF( D3.LT.D2 ) THEN + DMNMX = D3 + ELSE + DMNMX = D2 + END IF + ELSE + IF( D3.LT.D2 ) THEN + DMNMX = D2 + ELSE IF( D3.LT.D1 ) THEN + DMNMX = D3 + ELSE + DMNMX = D1 + END IF + END IF +* + IF( DIR.EQ.0 ) THEN +* +* Sort into decreasing order +* + I = START - 1 + J = ENDD + 1 + 60 CONTINUE + 70 CONTINUE + J = J - 1 + IF( D( J ).LT.DMNMX ) + $ GO TO 70 + 80 CONTINUE + I = I + 1 + IF( D( I ).GT.DMNMX ) + $ GO TO 80 + IF( I.LT.J ) THEN + TMP = D( I ) + D( I ) = D( J ) + D( J ) = TMP + GO TO 60 + END IF + IF( J-START.GT.ENDD-J-1 ) THEN + STKPNT = STKPNT + 1 + STACK( 1, STKPNT ) = START + STACK( 2, STKPNT ) = J + STKPNT = STKPNT + 1 + STACK( 1, STKPNT ) = J + 1 + STACK( 2, STKPNT ) = ENDD + ELSE + STKPNT = STKPNT + 1 + STACK( 1, STKPNT ) = J + 1 + STACK( 2, STKPNT ) = ENDD + STKPNT = STKPNT + 1 + STACK( 1, STKPNT ) = START + STACK( 2, STKPNT ) = J + END IF + ELSE +* +* Sort into increasing order +* + I = START - 1 + J = ENDD + 1 + 90 CONTINUE + 100 CONTINUE + J = J - 1 + IF( D( J ).GT.DMNMX ) + $ GO TO 100 + 110 CONTINUE + I = I + 1 + IF( D( I ).LT.DMNMX ) + $ GO TO 110 + IF( I.LT.J ) THEN + TMP = D( I ) + D( I ) = D( J ) + D( J ) = TMP + GO TO 90 + END IF + IF( J-START.GT.ENDD-J-1 ) THEN + STKPNT = STKPNT + 1 + STACK( 1, STKPNT ) = START + STACK( 2, STKPNT ) = J + STKPNT = STKPNT + 1 + STACK( 1, STKPNT ) = J + 1 + STACK( 2, STKPNT ) = ENDD + ELSE + STKPNT = STKPNT + 1 + STACK( 1, STKPNT ) = J + 1 + STACK( 2, STKPNT ) = ENDD + STKPNT = STKPNT + 1 + STACK( 1, STKPNT ) = START + STACK( 2, STKPNT ) = J + END IF + END IF + END IF + IF( STKPNT.GT.0 ) + $ GO TO 10 + RETURN +* +* End of DLASRT +* + END diff --git a/vendor/gonum.org/v1/gonum/lapack/internal/testdata/dsterftest/dlassq.f b/vendor/gonum.org/v1/gonum/lapack/internal/testdata/dsterftest/dlassq.f new file mode 100644 index 0000000..c7c4087 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/lapack/internal/testdata/dsterftest/dlassq.f @@ -0,0 +1,155 @@ +*> \brief \b DLASSQ updates a sum of squares represented in scaled form. +* +* =========== DOCUMENTATION =========== +* +* Online html documentation available at +* http://www.netlib.org/lapack/explore-html/ +* +*> \htmlonly +*> Download DLASSQ + dependencies +*> +*> [TGZ] +*> +*> [ZIP] +*> +*> [TXT] +*> \endhtmlonly +* +* Definition: +* =========== +* +* SUBROUTINE DLASSQ( N, X, INCX, SCALE, SUMSQ ) +* +* .. Scalar Arguments .. +* INTEGER INCX, N +* DOUBLE PRECISION SCALE, SUMSQ +* .. +* .. Array Arguments .. +* DOUBLE PRECISION X( * ) +* .. +* +* +*> \par Purpose: +* ============= +*> +*> \verbatim +*> +*> DLASSQ returns the values scl and smsq such that +*> +*> ( scl**2 )*smsq = x( 1 )**2 +...+ x( n )**2 + ( scale**2 )*sumsq, +*> +*> where x( i ) = X( 1 + ( i - 1 )*INCX ). The value of sumsq is +*> assumed to be non-negative and scl returns the value +*> +*> scl = max( scale, abs( x( i ) ) ). +*> +*> scale and sumsq must be supplied in SCALE and SUMSQ and +*> scl and smsq are overwritten on SCALE and SUMSQ respectively. +*> +*> The routine makes only one pass through the vector x. +*> \endverbatim +* +* Arguments: +* ========== +* +*> \param[in] N +*> \verbatim +*> N is INTEGER +*> The number of elements to be used from the vector X. +*> \endverbatim +*> +*> \param[in] X +*> \verbatim +*> X is DOUBLE PRECISION array, dimension (N) +*> The vector for which a scaled sum of squares is computed. +*> x( i ) = X( 1 + ( i - 1 )*INCX ), 1 <= i <= n. +*> \endverbatim +*> +*> \param[in] INCX +*> \verbatim +*> INCX is INTEGER +*> The increment between successive values of the vector X. +*> INCX > 0. +*> \endverbatim +*> +*> \param[in,out] SCALE +*> \verbatim +*> SCALE is DOUBLE PRECISION +*> On entry, the value scale in the equation above. +*> On exit, SCALE is overwritten with scl , the scaling factor +*> for the sum of squares. +*> \endverbatim +*> +*> \param[in,out] SUMSQ +*> \verbatim +*> SUMSQ is DOUBLE PRECISION +*> On entry, the value sumsq in the equation above. +*> On exit, SUMSQ is overwritten with smsq , the basic sum of +*> squares from which scl has been factored out. +*> \endverbatim +* +* Authors: +* ======== +* +*> \author Univ. of Tennessee +*> \author Univ. of California Berkeley +*> \author Univ. of Colorado Denver +*> \author NAG Ltd. +* +*> \date September 2012 +* +*> \ingroup auxOTHERauxiliary +* +* ===================================================================== + SUBROUTINE DLASSQ( N, X, INCX, SCALE, SUMSQ ) +* +* -- LAPACK auxiliary routine (version 3.4.2) -- +* -- LAPACK is a software package provided by Univ. of Tennessee, -- +* -- Univ. of California Berkeley, Univ. of Colorado Denver and NAG Ltd..-- +* September 2012 +* +* .. Scalar Arguments .. + INTEGER INCX, N + DOUBLE PRECISION SCALE, SUMSQ +* .. +* .. Array Arguments .. + DOUBLE PRECISION X( * ) +* .. +* +* ===================================================================== +* +* .. Parameters .. + DOUBLE PRECISION ZERO + PARAMETER ( ZERO = 0.0D+0 ) +* .. +* .. Local Scalars .. + INTEGER IX + DOUBLE PRECISION ABSXI +* .. +* .. External Functions .. + LOGICAL DISNAN + EXTERNAL DISNAN +* .. +* .. Intrinsic Functions .. + INTRINSIC ABS +* .. +* .. Executable Statements .. +* + IF( N.GT.0 ) THEN + DO 10 IX = 1, 1 + ( N-1 )*INCX, INCX + ABSXI = ABS( X( IX ) ) + IF( ABSXI.GT.ZERO.OR.DISNAN( ABSXI ) ) THEN + IF( SCALE.LT.ABSXI ) THEN + SUMSQ = 1 + SUMSQ*( SCALE / ABSXI )**2 + SCALE = ABSXI + ELSE + SUMSQ = SUMSQ + ( ABSXI / SCALE )**2 + END IF + END IF + 10 CONTINUE + END IF + RETURN +* +* End of DLASSQ +* + END diff --git a/vendor/gonum.org/v1/gonum/lapack/internal/testdata/dsterftest/dsterf.f b/vendor/gonum.org/v1/gonum/lapack/internal/testdata/dsterftest/dsterf.f new file mode 100644 index 0000000..43395cc --- /dev/null +++ b/vendor/gonum.org/v1/gonum/lapack/internal/testdata/dsterftest/dsterf.f @@ -0,0 +1,448 @@ +*> \brief \b DSTERF +* +* =========== DOCUMENTATION =========== +* +* Online html documentation available at +* http://www.netlib.org/lapack/explore-html/ +* +*> \htmlonly +*> Download DSTERF + dependencies +*> +*> [TGZ] +*> +*> [ZIP] +*> +*> [TXT] +*> \endhtmlonly +* +* Definition: +* =========== +* +* SUBROUTINE DSTERF( N, D, E, INFO ) +* +* .. Scalar Arguments .. +* INTEGER INFO, N +* .. +* .. Array Arguments .. +* DOUBLE PRECISION D( * ), E( * ) +* .. +* +* +*> \par Purpose: +* ============= +*> +*> \verbatim +*> +*> DSTERF computes all eigenvalues of a symmetric tridiagonal matrix +*> using the Pal-Walker-Kahan variant of the QL or QR algorithm. +*> \endverbatim +* +* Arguments: +* ========== +* +*> \param[in] N +*> \verbatim +*> N is INTEGER +*> The order of the matrix. N >= 0. +*> \endverbatim +*> +*> \param[in,out] D +*> \verbatim +*> D is DOUBLE PRECISION array, dimension (N) +*> On entry, the n diagonal elements of the tridiagonal matrix. +*> On exit, if INFO = 0, the eigenvalues in ascending order. +*> \endverbatim +*> +*> \param[in,out] E +*> \verbatim +*> E is DOUBLE PRECISION array, dimension (N-1) +*> On entry, the (n-1) subdiagonal elements of the tridiagonal +*> matrix. +*> On exit, E has been destroyed. +*> \endverbatim +*> +*> \param[out] INFO +*> \verbatim +*> INFO is INTEGER +*> = 0: successful exit +*> < 0: if INFO = -i, the i-th argument had an illegal value +*> > 0: the algorithm failed to find all of the eigenvalues in +*> a total of 30*N iterations; if INFO = i, then i +*> elements of E have not converged to zero. +*> \endverbatim +* +* Authors: +* ======== +* +*> \author Univ. of Tennessee +*> \author Univ. of California Berkeley +*> \author Univ. of Colorado Denver +*> \author NAG Ltd. +* +*> \date November 2011 +* +*> \ingroup auxOTHERcomputational +* +* ===================================================================== + SUBROUTINE DSTERF( N, D, E, INFO ) +* +* -- LAPACK computational routine (version 3.4.0) -- +* -- LAPACK is a software package provided by Univ. of Tennessee, -- +* -- Univ. of California Berkeley, Univ. of Colorado Denver and NAG Ltd..-- +* November 2011 +* +* .. Scalar Arguments .. + INTEGER INFO, N +* .. +* .. Array Arguments .. + DOUBLE PRECISION D( * ), E( * ) +* .. +* +* ===================================================================== +* +* .. Parameters .. + DOUBLE PRECISION ZERO, ONE, TWO, THREE + PARAMETER ( ZERO = 0.0D0, ONE = 1.0D0, TWO = 2.0D0, + $ THREE = 3.0D0 ) + INTEGER MAXIT + PARAMETER ( MAXIT = 30 ) +* .. +* .. Local Scalars .. + INTEGER I, ISCALE, JTOT, L, L1, LEND, LENDSV, LSV, M, + $ NMAXIT + DOUBLE PRECISION ALPHA, ANORM, BB, C, EPS, EPS2, GAMMA, OLDC, + $ OLDGAM, P, R, RT1, RT2, RTE, S, SAFMAX, SAFMIN, + $ SIGMA, SSFMAX, SSFMIN, RMAX +* .. +* .. External Functions .. + DOUBLE PRECISION DLAMCH, DLANST, DLAPY2 + EXTERNAL DLAMCH, DLANST, DLAPY2 +* .. +* .. External Subroutines .. + EXTERNAL DLAE2, DLASCL, DLASRT, XERBLA +* .. +* .. Intrinsic Functions .. + INTRINSIC ABS, SIGN, SQRT +* .. +* .. Executable Statements .. +* +* Test the input parameters. +* + INFO = 0 +* +* Quick return if possible +* + IF( N.LT.0 ) THEN + INFO = -1 + CALL XERBLA( 'DSTERF', -INFO ) + RETURN + END IF + IF( N.LE.1 ) + $ RETURN +* +* Determine the unit roundoff for this environment. +* + EPS = DLAMCH( 'E' ) + EPS2 = EPS**2 + SAFMIN = DLAMCH( 'S' ) + SAFMAX = ONE / SAFMIN + SSFMAX = SQRT( SAFMAX ) / THREE + SSFMIN = SQRT( SAFMIN ) / EPS2 + RMAX = DLAMCH( 'O' ) +* +* Compute the eigenvalues of the tridiagonal matrix. +* + NMAXIT = N*MAXIT + SIGMA = ZERO + JTOT = 0 +* +* Determine where the matrix splits and choose QL or QR iteration +* for each block, according to whether top or bottom diagonal +* element is smaller. +* + L1 = 1 +* + 10 CONTINUE + print *, "l1 = ", l1 + IF( L1.GT.N ) THEN + print *, "going to 170" + GO TO 170 + end if + IF( L1.GT.1 ) + $ E( L1-1 ) = ZERO + DO 20 M = L1, N - 1 + IF( ABS( E( M ) ).LE.( SQRT( ABS( D( M ) ) )*SQRT( ABS( D( M+ + $ 1 ) ) ) )*EPS ) THEN + E( M ) = ZERO + GO TO 30 + END IF + 20 CONTINUE + M = N +* + 30 CONTINUE + print *, "30, d" + print *, d(1:n) + L = L1 + LSV = L + LEND = M + LENDSV = LEND + L1 = M + 1 + IF( LEND.EQ.L ) + $ GO TO 10 +* +* Scale submatrix in rows and columns L to LEND +* + ANORM = DLANST( 'M', LEND-L+1, D( L ), E( L ) ) + ISCALE = 0 + IF( ANORM.EQ.ZERO ) + $ GO TO 10 + IF( (ANORM.GT.SSFMAX) ) THEN + ISCALE = 1 + CALL DLASCL( 'G', 0, 0, ANORM, SSFMAX, LEND-L+1, 1, D( L ), N, + $ INFO ) + CALL DLASCL( 'G', 0, 0, ANORM, SSFMAX, LEND-L, 1, E( L ), N, + $ INFO ) + ELSE IF( ANORM.LT.SSFMIN ) THEN + ISCALE = 2 + CALL DLASCL( 'G', 0, 0, ANORM, SSFMIN, LEND-L+1, 1, D( L ), N, + $ INFO ) + CALL DLASCL( 'G', 0, 0, ANORM, SSFMIN, LEND-L, 1, E( L ), N, + $ INFO ) + END IF +* + DO 40 I = L, LEND - 1 + E( I ) = E( I )**2 + 40 CONTINUE +* +* Choose between QL and QR iteration +* + IF( ABS( D( LEND ) ).LT.ABS( D( L ) ) ) THEN + LEND = LSV + L = LENDSV + END IF +* + IF( LEND.GE.L ) THEN + print *, "ql, d" + print *, d(1:n) +* +* QL Iteration +* +* Look for small subdiagonal element. +* + 50 CONTINUE + IF( L.NE.LEND ) THEN + DO 60 M = L, LEND - 1 + IF( ABS( E( M ) ).LE.EPS2*ABS( D( M )*D( M+1 ) ) ) + $ GO TO 70 + 60 CONTINUE + END IF + M = LEND +* + 70 CONTINUE + IF( M.LT.LEND ) + $ E( M ) = ZERO + P = D( L ) + IF( M.EQ.L ) + $ GO TO 90 +* +* If remaining matrix is 2 by 2, use DLAE2 to compute its +* eigenvalues. +* + IF( M.EQ.L+1 ) THEN + RTE = SQRT( E( L ) ) + CALL DLAE2( D( L ), RTE, D( L+1 ), RT1, RT2 ) + D( L ) = RT1 + D( L+1 ) = RT2 + E( L ) = ZERO + L = L + 2 + IF( L.LE.LEND ) + $ GO TO 50 + GO TO 150 + END IF +* + IF( JTOT.EQ.NMAXIT ) + $ GO TO 150 + JTOT = JTOT + 1 +* +* Form shift. +* + RTE = SQRT( E( L ) ) + SIGMA = ( D( L+1 )-P ) / ( TWO*RTE ) + R = DLAPY2( SIGMA, ONE ) + SIGMA = P - ( RTE / ( SIGMA+SIGN( R, SIGMA ) ) ) +* + C = ONE + S = ZERO + GAMMA = D( M ) - SIGMA + P = GAMMA*GAMMA +* +* Inner loop +* + print *, "inner loop d before" + print *, d(1:n) + + DO 80 I = M - 1, L, -1 + print *, "inner loop" + print *, "ei", e(i) + BB = E( I ) + R = P + BB + print *, "bb,p,r" + print *, bb,p,r + IF( I.NE.M-1 ) THEN + print *, s,r + E( I+1 ) = S*R + end if + OLDC = C + C = P / R + S = BB / R + OLDGAM = GAMMA + print *, "di", d(i) + ALPHA = D( I ) + GAMMA = C*( ALPHA-SIGMA ) - S*OLDGAM + print *,"og, a, ga", OLDGAM, ALPHA, GAMMA + D( I+1 ) = OLDGAM + ( ALPHA-GAMMA ) + IF( C.NE.ZERO ) THEN + P = ( GAMMA*GAMMA ) / C + ELSE + P = OLDC*BB + END IF + print *, "p, gamma = ", p,GAMMA + 80 CONTINUE +* + E( L ) = S*P + D( L ) = SIGMA + GAMMA + + print *, "inner loop d after" + print *, d(1:n) + GO TO 50 +* +* Eigenvalue found. +* + 90 CONTINUE + D( L ) = P +* + L = L + 1 + IF( L.LE.LEND ) + $ GO TO 50 + GO TO 150 +* + ELSE +* +* QR Iteration +* +* Look for small superdiagonal element. +* + 100 CONTINUE + DO 110 M = L, LEND + 1, -1 + IF( ABS( E( M-1 ) ).LE.EPS2*ABS( D( M )*D( M-1 ) ) ) + $ GO TO 120 + 110 CONTINUE + M = LEND +* + 120 CONTINUE + IF( M.GT.LEND ) + $ E( M-1 ) = ZERO + P = D( L ) + IF( M.EQ.L ) + $ GO TO 140 +* +* If remaining matrix is 2 by 2, use DLAE2 to compute its +* eigenvalues. +* + IF( M.EQ.L-1 ) THEN + RTE = SQRT( E( L-1 ) ) + CALL DLAE2( D( L ), RTE, D( L-1 ), RT1, RT2 ) + D( L ) = RT1 + D( L-1 ) = RT2 + E( L-1 ) = ZERO + L = L - 2 + IF( L.GE.LEND ) + $ GO TO 100 + GO TO 150 + END IF +* + IF( JTOT.EQ.NMAXIT ) + $ GO TO 150 + JTOT = JTOT + 1 +* +* Form shift. +* + RTE = SQRT( E( L-1 ) ) + SIGMA = ( D( L-1 )-P ) / ( TWO*RTE ) + R = DLAPY2( SIGMA, ONE ) + SIGMA = P - ( RTE / ( SIGMA+SIGN( R, SIGMA ) ) ) +* + C = ONE + S = ZERO + GAMMA = D( M ) - SIGMA + P = GAMMA*GAMMA +* +* Inner loop +* + DO 130 I = M, L - 1 + BB = E( I ) + R = P + BB + IF( I.NE.M ) + $ E( I-1 ) = S*R + OLDC = C + C = P / R + S = BB / R + OLDGAM = GAMMA + ALPHA = D( I+1 ) + GAMMA = C*( ALPHA-SIGMA ) - S*OLDGAM + D( I ) = OLDGAM + ( ALPHA-GAMMA ) + IF( C.NE.ZERO ) THEN + P = ( GAMMA*GAMMA ) / C + ELSE + P = OLDC*BB + END IF + 130 CONTINUE +* + E( L-1 ) = S*P + D( L ) = SIGMA + GAMMA + GO TO 100 +* +* Eigenvalue found. +* + 140 CONTINUE + D( L ) = P +* + L = L - 1 + IF( L.GE.LEND ) + $ GO TO 100 + GO TO 150 +* + END IF +* +* Undo scaling if necessary +* + 150 CONTINUE + IF( ISCALE.EQ.1 ) + $ CALL DLASCL( 'G', 0, 0, SSFMAX, ANORM, LENDSV-LSV+1, 1, + $ D( LSV ), N, INFO ) + IF( ISCALE.EQ.2 ) + $ CALL DLASCL( 'G', 0, 0, SSFMIN, ANORM, LENDSV-LSV+1, 1, + $ D( LSV ), N, INFO ) +* +* Check for no convergence to an eigenvalue after a total +* of N*MAXIT iterations. +* + IF( JTOT.LT.NMAXIT ) + $ GO TO 10 + DO 160 I = 1, N - 1 + IF( E( I ).NE.ZERO ) + $ INFO = INFO + 1 + 160 CONTINUE + GO TO 180 +* +* Sort eigenvalues in increasing order. +* + 170 CONTINUE + CALL DLASRT( 'I', N, D, INFO ) +* + 180 CONTINUE + RETURN +* +* End of DSTERF +* + END diff --git a/vendor/gonum.org/v1/gonum/lapack/internal/testdata/dsterftest/lsame.f b/vendor/gonum.org/v1/gonum/lapack/internal/testdata/dsterftest/lsame.f new file mode 100644 index 0000000..315304c --- /dev/null +++ b/vendor/gonum.org/v1/gonum/lapack/internal/testdata/dsterftest/lsame.f @@ -0,0 +1,125 @@ +*> \brief \b LSAME +* +* =========== DOCUMENTATION =========== +* +* Online html documentation available at +* http://www.netlib.org/lapack/explore-html/ +* +* Definition: +* =========== +* +* LOGICAL FUNCTION LSAME( CA, CB ) +* +* .. Scalar Arguments .. +* CHARACTER CA, CB +* .. +* +* +*> \par Purpose: +* ============= +*> +*> \verbatim +*> +*> LSAME returns .TRUE. if CA is the same letter as CB regardless of +*> case. +*> \endverbatim +* +* Arguments: +* ========== +* +*> \param[in] CA +*> \verbatim +*> \endverbatim +*> +*> \param[in] CB +*> \verbatim +*> CA and CB specify the single characters to be compared. +*> \endverbatim +* +* Authors: +* ======== +* +*> \author Univ. of Tennessee +*> \author Univ. of California Berkeley +*> \author Univ. of Colorado Denver +*> \author NAG Ltd. +* +*> \date November 2011 +* +*> \ingroup auxOTHERauxiliary +* +* ===================================================================== + LOGICAL FUNCTION LSAME( CA, CB ) +* +* -- LAPACK auxiliary routine (version 3.4.0) -- +* -- LAPACK is a software package provided by Univ. of Tennessee, -- +* -- Univ. of California Berkeley, Univ. of Colorado Denver and NAG Ltd..-- +* November 2011 +* +* .. Scalar Arguments .. + CHARACTER CA, CB +* .. +* +* ===================================================================== +* +* .. Intrinsic Functions .. + INTRINSIC ICHAR +* .. +* .. Local Scalars .. + INTEGER INTA, INTB, ZCODE +* .. +* .. Executable Statements .. +* +* Test if the characters are equal +* + LSAME = CA.EQ.CB + IF( LSAME ) + $ RETURN +* +* Now test for equivalence if both characters are alphabetic. +* + ZCODE = ICHAR( 'Z' ) +* +* Use 'Z' rather than 'A' so that ASCII can be detected on Prime +* machines, on which ICHAR returns a value with bit 8 set. +* ICHAR('A') on Prime machines returns 193 which is the same as +* ICHAR('A') on an EBCDIC machine. +* + INTA = ICHAR( CA ) + INTB = ICHAR( CB ) +* + IF( ZCODE.EQ.90 .OR. ZCODE.EQ.122 ) THEN +* +* ASCII is assumed - ZCODE is the ASCII code of either lower or +* upper case 'Z'. +* + IF( INTA.GE.97 .AND. INTA.LE.122 ) INTA = INTA - 32 + IF( INTB.GE.97 .AND. INTB.LE.122 ) INTB = INTB - 32 +* + ELSE IF( ZCODE.EQ.233 .OR. ZCODE.EQ.169 ) THEN +* +* EBCDIC is assumed - ZCODE is the EBCDIC code of either lower or +* upper case 'Z'. +* + IF( INTA.GE.129 .AND. INTA.LE.137 .OR. + $ INTA.GE.145 .AND. INTA.LE.153 .OR. + $ INTA.GE.162 .AND. INTA.LE.169 ) INTA = INTA + 64 + IF( INTB.GE.129 .AND. INTB.LE.137 .OR. + $ INTB.GE.145 .AND. INTB.LE.153 .OR. + $ INTB.GE.162 .AND. INTB.LE.169 ) INTB = INTB + 64 +* + ELSE IF( ZCODE.EQ.218 .OR. ZCODE.EQ.250 ) THEN +* +* ASCII is assumed, on Prime machines - ZCODE is the ASCII code +* plus 128 of either lower or upper case 'Z'. +* + IF( INTA.GE.225 .AND. INTA.LE.250 ) INTA = INTA - 32 + IF( INTB.GE.225 .AND. INTB.LE.250 ) INTB = INTB - 32 + END IF + LSAME = INTA.EQ.INTB +* +* RETURN +* +* End of LSAME +* + END diff --git a/vendor/gonum.org/v1/gonum/lapack/internal/testdata/dsterftest/testdsterf.f90 b/vendor/gonum.org/v1/gonum/lapack/internal/testdata/dsterftest/testdsterf.f90 new file mode 100644 index 0000000..8373f51 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/lapack/internal/testdata/dsterftest/testdsterf.f90 @@ -0,0 +1,15 @@ +program testdsterf +implicit none +integer, parameter :: n = 4 +real(kind=8), dimension(n) :: d +real(kind=8), dimension(n-1) :: e +integer :: info,i + +d(1:4) = (/1D+00, 3D+00, 4D+00, 6D+00/) +e(1:3) = (/2D+00, 4D+00, 5D+00/) + +call dsterf(n,d,e,info) +DO i = 1, n + print *, d(i) +end do +end \ No newline at end of file diff --git a/vendor/gonum.org/v1/gonum/lapack/internal/testdata/dsterftest/xerbla.f b/vendor/gonum.org/v1/gonum/lapack/internal/testdata/dsterftest/xerbla.f new file mode 100644 index 0000000..3e93bc4 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/lapack/internal/testdata/dsterftest/xerbla.f @@ -0,0 +1,99 @@ +*> \brief \b XERBLA +* +* =========== DOCUMENTATION =========== +* +* Online html documentation available at +* http://www.netlib.org/lapack/explore-html/ +* +*> \htmlonly +*> Download XERBLA + dependencies +*> +*> [TGZ] +*> +*> [ZIP] +*> +*> [TXT] +*> \endhtmlonly +* +* Definition: +* =========== +* +* SUBROUTINE XERBLA( SRNAME, INFO ) +* +* .. Scalar Arguments .. +* CHARACTER*(*) SRNAME +* INTEGER INFO +* .. +* +* +*> \par Purpose: +* ============= +*> +*> \verbatim +*> +*> XERBLA is an error handler for the LAPACK routines. +*> It is called by an LAPACK routine if an input parameter has an +*> invalid value. A message is printed and execution stops. +*> +*> Installers may consider modifying the STOP statement in order to +*> call system-specific exception-handling facilities. +*> \endverbatim +* +* Arguments: +* ========== +* +*> \param[in] SRNAME +*> \verbatim +*> SRNAME is CHARACTER*(*) +*> The name of the routine which called XERBLA. +*> \endverbatim +*> +*> \param[in] INFO +*> \verbatim +*> INFO is INTEGER +*> The position of the invalid parameter in the parameter list +*> of the calling routine. +*> \endverbatim +* +* Authors: +* ======== +* +*> \author Univ. of Tennessee +*> \author Univ. of California Berkeley +*> \author Univ. of Colorado Denver +*> \author NAG Ltd. +* +*> \date November 2011 +* +*> \ingroup auxOTHERauxiliary +* +* ===================================================================== + SUBROUTINE XERBLA( SRNAME, INFO ) +* +* -- LAPACK auxiliary routine (version 3.4.0) -- +* -- LAPACK is a software package provided by Univ. of Tennessee, -- +* -- Univ. of California Berkeley, Univ. of Colorado Denver and NAG Ltd..-- +* November 2011 +* +* .. Scalar Arguments .. + CHARACTER*(*) SRNAME + INTEGER INFO +* .. +* +* ===================================================================== +* +* .. Intrinsic Functions .. + INTRINSIC LEN_TRIM +* .. +* .. Executable Statements .. +* + WRITE( *, FMT = 9999 )SRNAME( 1:LEN_TRIM( SRNAME ) ), INFO +* + STOP +* + 9999 FORMAT( ' ** On entry to ', A, ' parameter number ', I2, ' had ', + $ 'an illegal value' ) +* +* End of XERBLA +* + END diff --git a/vendor/gonum.org/v1/gonum/lapack/internal/testdata/netlib/daxpy.f b/vendor/gonum.org/v1/gonum/lapack/internal/testdata/netlib/daxpy.f new file mode 100644 index 0000000..64a02d6 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/lapack/internal/testdata/netlib/daxpy.f @@ -0,0 +1,115 @@ +*> \brief \b DAXPY +* +* =========== DOCUMENTATION =========== +* +* Online html documentation available at +* http://www.netlib.org/lapack/explore-html/ +* +* Definition: +* =========== +* +* SUBROUTINE DAXPY(N,DA,DX,INCX,DY,INCY) +* +* .. Scalar Arguments .. +* DOUBLE PRECISION DA +* INTEGER INCX,INCY,N +* .. +* .. Array Arguments .. +* DOUBLE PRECISION DX(*),DY(*) +* .. +* +* +*> \par Purpose: +* ============= +*> +*> \verbatim +*> +*> DAXPY constant times a vector plus a vector. +*> uses unrolled loops for increments equal to one. +*> \endverbatim +* +* Authors: +* ======== +* +*> \author Univ. of Tennessee +*> \author Univ. of California Berkeley +*> \author Univ. of Colorado Denver +*> \author NAG Ltd. +* +*> \date November 2011 +* +*> \ingroup double_blas_level1 +* +*> \par Further Details: +* ===================== +*> +*> \verbatim +*> +*> jack dongarra, linpack, 3/11/78. +*> modified 12/3/93, array(1) declarations changed to array(*) +*> \endverbatim +*> +* ===================================================================== + SUBROUTINE DAXPY(N,DA,DX,INCX,DY,INCY) +* +* -- Reference BLAS level1 routine (version 3.4.0) -- +* -- Reference BLAS is a software package provided by Univ. of Tennessee, -- +* -- Univ. of California Berkeley, Univ. of Colorado Denver and NAG Ltd..-- +* November 2011 +* +* .. Scalar Arguments .. + DOUBLE PRECISION DA + INTEGER INCX,INCY,N +* .. +* .. Array Arguments .. + DOUBLE PRECISION DX(*),DY(*) +* .. +* +* ===================================================================== +* +* .. Local Scalars .. + INTEGER I,IX,IY,M,MP1 +* .. +* .. Intrinsic Functions .. + INTRINSIC MOD +* .. + IF (N.LE.0) RETURN + IF (DA.EQ.0.0d0) RETURN + IF (INCX.EQ.1 .AND. INCY.EQ.1) THEN +* +* code for both increments equal to 1 +* +* +* clean-up loop +* + M = MOD(N,4) + IF (M.NE.0) THEN + DO I = 1,M + DY(I) = DY(I) + DA*DX(I) + END DO + END IF + IF (N.LT.4) RETURN + MP1 = M + 1 + DO I = MP1,N,4 + DY(I) = DY(I) + DA*DX(I) + DY(I+1) = DY(I+1) + DA*DX(I+1) + DY(I+2) = DY(I+2) + DA*DX(I+2) + DY(I+3) = DY(I+3) + DA*DX(I+3) + END DO + ELSE +* +* code for unequal increments or equal increments +* not equal to 1 +* + IX = 1 + IY = 1 + IF (INCX.LT.0) IX = (-N+1)*INCX + 1 + IF (INCY.LT.0) IY = (-N+1)*INCY + 1 + DO I = 1,N + DY(IY) = DY(IY) + DA*DX(IX) + IX = IX + INCX + IY = IY + INCY + END DO + END IF + RETURN + END diff --git a/vendor/gonum.org/v1/gonum/lapack/internal/testdata/netlib/dcopy.f b/vendor/gonum.org/v1/gonum/lapack/internal/testdata/netlib/dcopy.f new file mode 100644 index 0000000..d9d5ac7 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/lapack/internal/testdata/netlib/dcopy.f @@ -0,0 +1,115 @@ +*> \brief \b DCOPY +* +* =========== DOCUMENTATION =========== +* +* Online html documentation available at +* http://www.netlib.org/lapack/explore-html/ +* +* Definition: +* =========== +* +* SUBROUTINE DCOPY(N,DX,INCX,DY,INCY) +* +* .. Scalar Arguments .. +* INTEGER INCX,INCY,N +* .. +* .. Array Arguments .. +* DOUBLE PRECISION DX(*),DY(*) +* .. +* +* +*> \par Purpose: +* ============= +*> +*> \verbatim +*> +*> DCOPY copies a vector, x, to a vector, y. +*> uses unrolled loops for increments equal to one. +*> \endverbatim +* +* Authors: +* ======== +* +*> \author Univ. of Tennessee +*> \author Univ. of California Berkeley +*> \author Univ. of Colorado Denver +*> \author NAG Ltd. +* +*> \date November 2011 +* +*> \ingroup double_blas_level1 +* +*> \par Further Details: +* ===================== +*> +*> \verbatim +*> +*> jack dongarra, linpack, 3/11/78. +*> modified 12/3/93, array(1) declarations changed to array(*) +*> \endverbatim +*> +* ===================================================================== + SUBROUTINE DCOPY(N,DX,INCX,DY,INCY) +* +* -- Reference BLAS level1 routine (version 3.4.0) -- +* -- Reference BLAS is a software package provided by Univ. of Tennessee, -- +* -- Univ. of California Berkeley, Univ. of Colorado Denver and NAG Ltd..-- +* November 2011 +* +* .. Scalar Arguments .. + INTEGER INCX,INCY,N +* .. +* .. Array Arguments .. + DOUBLE PRECISION DX(*),DY(*) +* .. +* +* ===================================================================== +* +* .. Local Scalars .. + INTEGER I,IX,IY,M,MP1 +* .. +* .. Intrinsic Functions .. + INTRINSIC MOD +* .. + IF (N.LE.0) RETURN + IF (INCX.EQ.1 .AND. INCY.EQ.1) THEN +* +* code for both increments equal to 1 +* +* +* clean-up loop +* + M = MOD(N,7) + IF (M.NE.0) THEN + DO I = 1,M + DY(I) = DX(I) + END DO + IF (N.LT.7) RETURN + END IF + MP1 = M + 1 + DO I = MP1,N,7 + DY(I) = DX(I) + DY(I+1) = DX(I+1) + DY(I+2) = DX(I+2) + DY(I+3) = DX(I+3) + DY(I+4) = DX(I+4) + DY(I+5) = DX(I+5) + DY(I+6) = DX(I+6) + END DO + ELSE +* +* code for unequal increments or equal increments +* not equal to 1 +* + IX = 1 + IY = 1 + IF (INCX.LT.0) IX = (-N+1)*INCX + 1 + IF (INCY.LT.0) IY = (-N+1)*INCY + 1 + DO I = 1,N + DY(IY) = DX(IX) + IX = IX + INCX + IY = IY + INCY + END DO + END IF + RETURN + END diff --git a/vendor/gonum.org/v1/gonum/lapack/internal/testdata/netlib/dgemm.f b/vendor/gonum.org/v1/gonum/lapack/internal/testdata/netlib/dgemm.f new file mode 100644 index 0000000..4bae243 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/lapack/internal/testdata/netlib/dgemm.f @@ -0,0 +1,384 @@ +*> \brief \b DGEMM +* +* =========== DOCUMENTATION =========== +* +* Online html documentation available at +* http://www.netlib.org/lapack/explore-html/ +* +* Definition: +* =========== +* +* SUBROUTINE DGEMM(TRANSA,TRANSB,M,N,K,ALPHA,A,LDA,B,LDB,BETA,C,LDC) +* +* .. Scalar Arguments .. +* DOUBLE PRECISION ALPHA,BETA +* INTEGER K,LDA,LDB,LDC,M,N +* CHARACTER TRANSA,TRANSB +* .. +* .. Array Arguments .. +* DOUBLE PRECISION A(LDA,*),B(LDB,*),C(LDC,*) +* .. +* +* +*> \par Purpose: +* ============= +*> +*> \verbatim +*> +*> DGEMM performs one of the matrix-matrix operations +*> +*> C := alpha*op( A )*op( B ) + beta*C, +*> +*> where op( X ) is one of +*> +*> op( X ) = X or op( X ) = X**T, +*> +*> alpha and beta are scalars, and A, B and C are matrices, with op( A ) +*> an m by k matrix, op( B ) a k by n matrix and C an m by n matrix. +*> \endverbatim +* +* Arguments: +* ========== +* +*> \param[in] TRANSA +*> \verbatim +*> TRANSA is CHARACTER*1 +*> On entry, TRANSA specifies the form of op( A ) to be used in +*> the matrix multiplication as follows: +*> +*> TRANSA = 'N' or 'n', op( A ) = A. +*> +*> TRANSA = 'T' or 't', op( A ) = A**T. +*> +*> TRANSA = 'C' or 'c', op( A ) = A**T. +*> \endverbatim +*> +*> \param[in] TRANSB +*> \verbatim +*> TRANSB is CHARACTER*1 +*> On entry, TRANSB specifies the form of op( B ) to be used in +*> the matrix multiplication as follows: +*> +*> TRANSB = 'N' or 'n', op( B ) = B. +*> +*> TRANSB = 'T' or 't', op( B ) = B**T. +*> +*> TRANSB = 'C' or 'c', op( B ) = B**T. +*> \endverbatim +*> +*> \param[in] M +*> \verbatim +*> M is INTEGER +*> On entry, M specifies the number of rows of the matrix +*> op( A ) and of the matrix C. M must be at least zero. +*> \endverbatim +*> +*> \param[in] N +*> \verbatim +*> N is INTEGER +*> On entry, N specifies the number of columns of the matrix +*> op( B ) and the number of columns of the matrix C. N must be +*> at least zero. +*> \endverbatim +*> +*> \param[in] K +*> \verbatim +*> K is INTEGER +*> On entry, K specifies the number of columns of the matrix +*> op( A ) and the number of rows of the matrix op( B ). K must +*> be at least zero. +*> \endverbatim +*> +*> \param[in] ALPHA +*> \verbatim +*> ALPHA is DOUBLE PRECISION. +*> On entry, ALPHA specifies the scalar alpha. +*> \endverbatim +*> +*> \param[in] A +*> \verbatim +*> A is DOUBLE PRECISION array of DIMENSION ( LDA, ka ), where ka is +*> k when TRANSA = 'N' or 'n', and is m otherwise. +*> Before entry with TRANSA = 'N' or 'n', the leading m by k +*> part of the array A must contain the matrix A, otherwise +*> the leading k by m part of the array A must contain the +*> matrix A. +*> \endverbatim +*> +*> \param[in] LDA +*> \verbatim +*> LDA is INTEGER +*> On entry, LDA specifies the first dimension of A as declared +*> in the calling (sub) program. When TRANSA = 'N' or 'n' then +*> LDA must be at least max( 1, m ), otherwise LDA must be at +*> least max( 1, k ). +*> \endverbatim +*> +*> \param[in] B +*> \verbatim +*> B is DOUBLE PRECISION array of DIMENSION ( LDB, kb ), where kb is +*> n when TRANSB = 'N' or 'n', and is k otherwise. +*> Before entry with TRANSB = 'N' or 'n', the leading k by n +*> part of the array B must contain the matrix B, otherwise +*> the leading n by k part of the array B must contain the +*> matrix B. +*> \endverbatim +*> +*> \param[in] LDB +*> \verbatim +*> LDB is INTEGER +*> On entry, LDB specifies the first dimension of B as declared +*> in the calling (sub) program. When TRANSB = 'N' or 'n' then +*> LDB must be at least max( 1, k ), otherwise LDB must be at +*> least max( 1, n ). +*> \endverbatim +*> +*> \param[in] BETA +*> \verbatim +*> BETA is DOUBLE PRECISION. +*> On entry, BETA specifies the scalar beta. When BETA is +*> supplied as zero then C need not be set on input. +*> \endverbatim +*> +*> \param[in,out] C +*> \verbatim +*> C is DOUBLE PRECISION array of DIMENSION ( LDC, n ). +*> Before entry, the leading m by n part of the array C must +*> contain the matrix C, except when beta is zero, in which +*> case C need not be set on entry. +*> On exit, the array C is overwritten by the m by n matrix +*> ( alpha*op( A )*op( B ) + beta*C ). +*> \endverbatim +*> +*> \param[in] LDC +*> \verbatim +*> LDC is INTEGER +*> On entry, LDC specifies the first dimension of C as declared +*> in the calling (sub) program. LDC must be at least +*> max( 1, m ). +*> \endverbatim +* +* Authors: +* ======== +* +*> \author Univ. of Tennessee +*> \author Univ. of California Berkeley +*> \author Univ. of Colorado Denver +*> \author NAG Ltd. +* +*> \date November 2015 +* +*> \ingroup double_blas_level3 +* +*> \par Further Details: +* ===================== +*> +*> \verbatim +*> +*> Level 3 Blas routine. +*> +*> -- Written on 8-February-1989. +*> Jack Dongarra, Argonne National Laboratory. +*> Iain Duff, AERE Harwell. +*> Jeremy Du Croz, Numerical Algorithms Group Ltd. +*> Sven Hammarling, Numerical Algorithms Group Ltd. +*> \endverbatim +*> +* ===================================================================== + SUBROUTINE DGEMM(TRANSA,TRANSB,M,N,K,ALPHA,A,LDA,B,LDB,BETA,C,LDC) +* +* -- Reference BLAS level3 routine (version 3.6.0) -- +* -- Reference BLAS is a software package provided by Univ. of Tennessee, -- +* -- Univ. of California Berkeley, Univ. of Colorado Denver and NAG Ltd..-- +* November 2015 +* +* .. Scalar Arguments .. + DOUBLE PRECISION ALPHA,BETA + INTEGER K,LDA,LDB,LDC,M,N + CHARACTER TRANSA,TRANSB +* .. +* .. Array Arguments .. + DOUBLE PRECISION A(LDA,*),B(LDB,*),C(LDC,*) +* .. +* +* ===================================================================== +* +* .. External Functions .. + LOGICAL LSAME + EXTERNAL LSAME +* .. +* .. External Subroutines .. + EXTERNAL XERBLA +* .. +* .. Intrinsic Functions .. + INTRINSIC MAX +* .. +* .. Local Scalars .. + DOUBLE PRECISION TEMP + INTEGER I,INFO,J,L,NCOLA,NROWA,NROWB + LOGICAL NOTA,NOTB +* .. +* .. Parameters .. + DOUBLE PRECISION ONE,ZERO + PARAMETER (ONE=1.0D+0,ZERO=0.0D+0) +* .. +* +* Set NOTA and NOTB as true if A and B respectively are not +* transposed and set NROWA, NCOLA and NROWB as the number of rows +* and columns of A and the number of rows of B respectively. +* + NOTA = LSAME(TRANSA,'N') + NOTB = LSAME(TRANSB,'N') + IF (NOTA) THEN + NROWA = M + NCOLA = K + ELSE + NROWA = K + NCOLA = M + END IF + IF (NOTB) THEN + NROWB = K + ELSE + NROWB = N + END IF +* +* Test the input parameters. +* + INFO = 0 + IF ((.NOT.NOTA) .AND. (.NOT.LSAME(TRANSA,'C')) .AND. + + (.NOT.LSAME(TRANSA,'T'))) THEN + INFO = 1 + ELSE IF ((.NOT.NOTB) .AND. (.NOT.LSAME(TRANSB,'C')) .AND. + + (.NOT.LSAME(TRANSB,'T'))) THEN + INFO = 2 + ELSE IF (M.LT.0) THEN + INFO = 3 + ELSE IF (N.LT.0) THEN + INFO = 4 + ELSE IF (K.LT.0) THEN + INFO = 5 + ELSE IF (LDA.LT.MAX(1,NROWA)) THEN + INFO = 8 + ELSE IF (LDB.LT.MAX(1,NROWB)) THEN + INFO = 10 + ELSE IF (LDC.LT.MAX(1,M)) THEN + INFO = 13 + END IF + IF (INFO.NE.0) THEN + CALL XERBLA('DGEMM ',INFO) + RETURN + END IF +* +* Quick return if possible. +* + IF ((M.EQ.0) .OR. (N.EQ.0) .OR. + + (((ALPHA.EQ.ZERO).OR. (K.EQ.0)).AND. (BETA.EQ.ONE))) RETURN +* +* And if alpha.eq.zero. +* + IF (ALPHA.EQ.ZERO) THEN + IF (BETA.EQ.ZERO) THEN + DO 20 J = 1,N + DO 10 I = 1,M + C(I,J) = ZERO + 10 CONTINUE + 20 CONTINUE + ELSE + DO 40 J = 1,N + DO 30 I = 1,M + C(I,J) = BETA*C(I,J) + 30 CONTINUE + 40 CONTINUE + END IF + RETURN + END IF +* +* Start the operations. +* + IF (NOTB) THEN + IF (NOTA) THEN +* +* Form C := alpha*A*B + beta*C. +* + DO 90 J = 1,N + IF (BETA.EQ.ZERO) THEN + DO 50 I = 1,M + C(I,J) = ZERO + 50 CONTINUE + ELSE IF (BETA.NE.ONE) THEN + DO 60 I = 1,M + C(I,J) = BETA*C(I,J) + 60 CONTINUE + END IF + DO 80 L = 1,K + TEMP = ALPHA*B(L,J) + DO 70 I = 1,M + C(I,J) = C(I,J) + TEMP*A(I,L) + 70 CONTINUE + 80 CONTINUE + 90 CONTINUE + ELSE +* +* Form C := alpha*A**T*B + beta*C +* + DO 120 J = 1,N + DO 110 I = 1,M + TEMP = ZERO + DO 100 L = 1,K + TEMP = TEMP + A(L,I)*B(L,J) + 100 CONTINUE + IF (BETA.EQ.ZERO) THEN + C(I,J) = ALPHA*TEMP + ELSE + C(I,J) = ALPHA*TEMP + BETA*C(I,J) + END IF + 110 CONTINUE + 120 CONTINUE + END IF + ELSE + IF (NOTA) THEN +* +* Form C := alpha*A*B**T + beta*C +* + DO 170 J = 1,N + IF (BETA.EQ.ZERO) THEN + DO 130 I = 1,M + C(I,J) = ZERO + 130 CONTINUE + ELSE IF (BETA.NE.ONE) THEN + DO 140 I = 1,M + C(I,J) = BETA*C(I,J) + 140 CONTINUE + END IF + DO 160 L = 1,K + TEMP = ALPHA*B(J,L) + DO 150 I = 1,M + C(I,J) = C(I,J) + TEMP*A(I,L) + 150 CONTINUE + 160 CONTINUE + 170 CONTINUE + ELSE +* +* Form C := alpha*A**T*B**T + beta*C +* + DO 200 J = 1,N + DO 190 I = 1,M + TEMP = ZERO + DO 180 L = 1,K + TEMP = TEMP + A(L,I)*B(J,L) + 180 CONTINUE + IF (BETA.EQ.ZERO) THEN + C(I,J) = ALPHA*TEMP + ELSE + C(I,J) = ALPHA*TEMP + BETA*C(I,J) + END IF + 190 CONTINUE + 200 CONTINUE + END IF + END IF +* + RETURN +* +* End of DGEMM . +* + END diff --git a/vendor/gonum.org/v1/gonum/lapack/internal/testdata/netlib/dgemv.f b/vendor/gonum.org/v1/gonum/lapack/internal/testdata/netlib/dgemv.f new file mode 100644 index 0000000..e04cc07 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/lapack/internal/testdata/netlib/dgemv.f @@ -0,0 +1,330 @@ +*> \brief \b DGEMV +* +* =========== DOCUMENTATION =========== +* +* Online html documentation available at +* http://www.netlib.org/lapack/explore-html/ +* +* Definition: +* =========== +* +* SUBROUTINE DGEMV(TRANS,M,N,ALPHA,A,LDA,X,INCX,BETA,Y,INCY) +* +* .. Scalar Arguments .. +* DOUBLE PRECISION ALPHA,BETA +* INTEGER INCX,INCY,LDA,M,N +* CHARACTER TRANS +* .. +* .. Array Arguments .. +* DOUBLE PRECISION A(LDA,*),X(*),Y(*) +* .. +* +* +*> \par Purpose: +* ============= +*> +*> \verbatim +*> +*> DGEMV performs one of the matrix-vector operations +*> +*> y := alpha*A*x + beta*y, or y := alpha*A**T*x + beta*y, +*> +*> where alpha and beta are scalars, x and y are vectors and A is an +*> m by n matrix. +*> \endverbatim +* +* Arguments: +* ========== +* +*> \param[in] TRANS +*> \verbatim +*> TRANS is CHARACTER*1 +*> On entry, TRANS specifies the operation to be performed as +*> follows: +*> +*> TRANS = 'N' or 'n' y := alpha*A*x + beta*y. +*> +*> TRANS = 'T' or 't' y := alpha*A**T*x + beta*y. +*> +*> TRANS = 'C' or 'c' y := alpha*A**T*x + beta*y. +*> \endverbatim +*> +*> \param[in] M +*> \verbatim +*> M is INTEGER +*> On entry, M specifies the number of rows of the matrix A. +*> M must be at least zero. +*> \endverbatim +*> +*> \param[in] N +*> \verbatim +*> N is INTEGER +*> On entry, N specifies the number of columns of the matrix A. +*> N must be at least zero. +*> \endverbatim +*> +*> \param[in] ALPHA +*> \verbatim +*> ALPHA is DOUBLE PRECISION. +*> On entry, ALPHA specifies the scalar alpha. +*> \endverbatim +*> +*> \param[in] A +*> \verbatim +*> A is DOUBLE PRECISION array of DIMENSION ( LDA, n ). +*> Before entry, the leading m by n part of the array A must +*> contain the matrix of coefficients. +*> \endverbatim +*> +*> \param[in] LDA +*> \verbatim +*> LDA is INTEGER +*> On entry, LDA specifies the first dimension of A as declared +*> in the calling (sub) program. LDA must be at least +*> max( 1, m ). +*> \endverbatim +*> +*> \param[in] X +*> \verbatim +*> X is DOUBLE PRECISION array of DIMENSION at least +*> ( 1 + ( n - 1 )*abs( INCX ) ) when TRANS = 'N' or 'n' +*> and at least +*> ( 1 + ( m - 1 )*abs( INCX ) ) otherwise. +*> Before entry, the incremented array X must contain the +*> vector x. +*> \endverbatim +*> +*> \param[in] INCX +*> \verbatim +*> INCX is INTEGER +*> On entry, INCX specifies the increment for the elements of +*> X. INCX must not be zero. +*> \endverbatim +*> +*> \param[in] BETA +*> \verbatim +*> BETA is DOUBLE PRECISION. +*> On entry, BETA specifies the scalar beta. When BETA is +*> supplied as zero then Y need not be set on input. +*> \endverbatim +*> +*> \param[in,out] Y +*> \verbatim +*> Y is DOUBLE PRECISION array of DIMENSION at least +*> ( 1 + ( m - 1 )*abs( INCY ) ) when TRANS = 'N' or 'n' +*> and at least +*> ( 1 + ( n - 1 )*abs( INCY ) ) otherwise. +*> Before entry with BETA non-zero, the incremented array Y +*> must contain the vector y. On exit, Y is overwritten by the +*> updated vector y. +*> \endverbatim +*> +*> \param[in] INCY +*> \verbatim +*> INCY is INTEGER +*> On entry, INCY specifies the increment for the elements of +*> Y. INCY must not be zero. +*> \endverbatim +* +* Authors: +* ======== +* +*> \author Univ. of Tennessee +*> \author Univ. of California Berkeley +*> \author Univ. of Colorado Denver +*> \author NAG Ltd. +* +*> \date November 2015 +* +*> \ingroup double_blas_level2 +* +*> \par Further Details: +* ===================== +*> +*> \verbatim +*> +*> Level 2 Blas routine. +*> The vector and matrix arguments are not referenced when N = 0, or M = 0 +*> +*> -- Written on 22-October-1986. +*> Jack Dongarra, Argonne National Lab. +*> Jeremy Du Croz, Nag Central Office. +*> Sven Hammarling, Nag Central Office. +*> Richard Hanson, Sandia National Labs. +*> \endverbatim +*> +* ===================================================================== + SUBROUTINE DGEMV(TRANS,M,N,ALPHA,A,LDA,X,INCX,BETA,Y,INCY) +* +* -- Reference BLAS level2 routine (version 3.6.0) -- +* -- Reference BLAS is a software package provided by Univ. of Tennessee, -- +* -- Univ. of California Berkeley, Univ. of Colorado Denver and NAG Ltd..-- +* November 2015 +* +* .. Scalar Arguments .. + DOUBLE PRECISION ALPHA,BETA + INTEGER INCX,INCY,LDA,M,N + CHARACTER TRANS +* .. +* .. Array Arguments .. + DOUBLE PRECISION A(LDA,*),X(*),Y(*) +* .. +* +* ===================================================================== +* +* .. Parameters .. + DOUBLE PRECISION ONE,ZERO + PARAMETER (ONE=1.0D+0,ZERO=0.0D+0) +* .. +* .. Local Scalars .. + DOUBLE PRECISION TEMP + INTEGER I,INFO,IX,IY,J,JX,JY,KX,KY,LENX,LENY +* .. +* .. External Functions .. + LOGICAL LSAME + EXTERNAL LSAME +* .. +* .. External Subroutines .. + EXTERNAL XERBLA +* .. +* .. Intrinsic Functions .. + INTRINSIC MAX +* .. +* +* Test the input parameters. +* + INFO = 0 + IF (.NOT.LSAME(TRANS,'N') .AND. .NOT.LSAME(TRANS,'T') .AND. + + .NOT.LSAME(TRANS,'C')) THEN + INFO = 1 + ELSE IF (M.LT.0) THEN + INFO = 2 + ELSE IF (N.LT.0) THEN + INFO = 3 + ELSE IF (LDA.LT.MAX(1,M)) THEN + INFO = 6 + ELSE IF (INCX.EQ.0) THEN + INFO = 8 + ELSE IF (INCY.EQ.0) THEN + INFO = 11 + END IF + IF (INFO.NE.0) THEN + CALL XERBLA('DGEMV ',INFO) + RETURN + END IF +* +* Quick return if possible. +* + IF ((M.EQ.0) .OR. (N.EQ.0) .OR. + + ((ALPHA.EQ.ZERO).AND. (BETA.EQ.ONE))) RETURN +* +* Set LENX and LENY, the lengths of the vectors x and y, and set +* up the start points in X and Y. +* + IF (LSAME(TRANS,'N')) THEN + LENX = N + LENY = M + ELSE + LENX = M + LENY = N + END IF + IF (INCX.GT.0) THEN + KX = 1 + ELSE + KX = 1 - (LENX-1)*INCX + END IF + IF (INCY.GT.0) THEN + KY = 1 + ELSE + KY = 1 - (LENY-1)*INCY + END IF +* +* Start the operations. In this version the elements of A are +* accessed sequentially with one pass through A. +* +* First form y := beta*y. +* + IF (BETA.NE.ONE) THEN + IF (INCY.EQ.1) THEN + IF (BETA.EQ.ZERO) THEN + DO 10 I = 1,LENY + Y(I) = ZERO + 10 CONTINUE + ELSE + DO 20 I = 1,LENY + Y(I) = BETA*Y(I) + 20 CONTINUE + END IF + ELSE + IY = KY + IF (BETA.EQ.ZERO) THEN + DO 30 I = 1,LENY + Y(IY) = ZERO + IY = IY + INCY + 30 CONTINUE + ELSE + DO 40 I = 1,LENY + Y(IY) = BETA*Y(IY) + IY = IY + INCY + 40 CONTINUE + END IF + END IF + END IF + IF (ALPHA.EQ.ZERO) RETURN + IF (LSAME(TRANS,'N')) THEN +* +* Form y := alpha*A*x + y. +* + JX = KX + IF (INCY.EQ.1) THEN + DO 60 J = 1,N + TEMP = ALPHA*X(JX) + DO 50 I = 1,M + Y(I) = Y(I) + TEMP*A(I,J) + 50 CONTINUE + JX = JX + INCX + 60 CONTINUE + ELSE + DO 80 J = 1,N + TEMP = ALPHA*X(JX) + IY = KY + DO 70 I = 1,M + Y(IY) = Y(IY) + TEMP*A(I,J) + IY = IY + INCY + 70 CONTINUE + JX = JX + INCX + 80 CONTINUE + END IF + ELSE +* +* Form y := alpha*A**T*x + y. +* + JY = KY + IF (INCX.EQ.1) THEN + DO 100 J = 1,N + TEMP = ZERO + DO 90 I = 1,M + TEMP = TEMP + A(I,J)*X(I) + 90 CONTINUE + Y(JY) = Y(JY) + ALPHA*TEMP + JY = JY + INCY + 100 CONTINUE + ELSE + DO 120 J = 1,N + TEMP = ZERO + IX = KX + DO 110 I = 1,M + TEMP = TEMP + A(I,J)*X(IX) + IX = IX + INCX + 110 CONTINUE + Y(JY) = Y(JY) + ALPHA*TEMP + JY = JY + INCY + 120 CONTINUE + END IF + END IF +* + RETURN +* +* End of DGEMV . +* + END diff --git a/vendor/gonum.org/v1/gonum/lapack/internal/testdata/netlib/dlabad.f b/vendor/gonum.org/v1/gonum/lapack/internal/testdata/netlib/dlabad.f new file mode 100644 index 0000000..da41263 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/lapack/internal/testdata/netlib/dlabad.f @@ -0,0 +1,105 @@ +*> \brief \b DLABAD +* +* =========== DOCUMENTATION =========== +* +* Online html documentation available at +* http://www.netlib.org/lapack/explore-html/ +* +*> \htmlonly +*> Download DLABAD + dependencies +*> +*> [TGZ] +*> +*> [ZIP] +*> +*> [TXT] +*> \endhtmlonly +* +* Definition: +* =========== +* +* SUBROUTINE DLABAD( SMALL, LARGE ) +* +* .. Scalar Arguments .. +* DOUBLE PRECISION LARGE, SMALL +* .. +* +* +*> \par Purpose: +* ============= +*> +*> \verbatim +*> +*> DLABAD takes as input the values computed by DLAMCH for underflow and +*> overflow, and returns the square root of each of these values if the +*> log of LARGE is sufficiently large. This subroutine is intended to +*> identify machines with a large exponent range, such as the Crays, and +*> redefine the underflow and overflow limits to be the square roots of +*> the values computed by DLAMCH. This subroutine is needed because +*> DLAMCH does not compensate for poor arithmetic in the upper half of +*> the exponent range, as is found on a Cray. +*> \endverbatim +* +* Arguments: +* ========== +* +*> \param[in,out] SMALL +*> \verbatim +*> SMALL is DOUBLE PRECISION +*> On entry, the underflow threshold as computed by DLAMCH. +*> On exit, if LOG10(LARGE) is sufficiently large, the square +*> root of SMALL, otherwise unchanged. +*> \endverbatim +*> +*> \param[in,out] LARGE +*> \verbatim +*> LARGE is DOUBLE PRECISION +*> On entry, the overflow threshold as computed by DLAMCH. +*> On exit, if LOG10(LARGE) is sufficiently large, the square +*> root of LARGE, otherwise unchanged. +*> \endverbatim +* +* Authors: +* ======== +* +*> \author Univ. of Tennessee +*> \author Univ. of California Berkeley +*> \author Univ. of Colorado Denver +*> \author NAG Ltd. +* +*> \date November 2011 +* +*> \ingroup auxOTHERauxiliary +* +* ===================================================================== + SUBROUTINE DLABAD( SMALL, LARGE ) +* +* -- LAPACK auxiliary routine (version 3.4.0) -- +* -- LAPACK is a software package provided by Univ. of Tennessee, -- +* -- Univ. of California Berkeley, Univ. of Colorado Denver and NAG Ltd..-- +* November 2011 +* +* .. Scalar Arguments .. + DOUBLE PRECISION LARGE, SMALL +* .. +* +* ===================================================================== +* +* .. Intrinsic Functions .. + INTRINSIC LOG10, SQRT +* .. +* .. Executable Statements .. +* +* If it looks like we're on a Cray, take the square root of +* SMALL and LARGE to avoid overflow and underflow problems. +* +* IF( LOG10( LARGE ).GT.2000.D0 ) THEN +* SMALL = SQRT( SMALL ) +* LARGE = SQRT( LARGE ) +* END IF +* + RETURN +* +* End of DLABAD +* + END diff --git a/vendor/gonum.org/v1/gonum/lapack/internal/testdata/netlib/dlacpy.f b/vendor/gonum.org/v1/gonum/lapack/internal/testdata/netlib/dlacpy.f new file mode 100644 index 0000000..a9a23c9 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/lapack/internal/testdata/netlib/dlacpy.f @@ -0,0 +1,156 @@ +*> \brief \b DLACPY copies all or part of one two-dimensional array to another. +* +* =========== DOCUMENTATION =========== +* +* Online html documentation available at +* http://www.netlib.org/lapack/explore-html/ +* +*> \htmlonly +*> Download DLACPY + dependencies +*> +*> [TGZ] +*> +*> [ZIP] +*> +*> [TXT] +*> \endhtmlonly +* +* Definition: +* =========== +* +* SUBROUTINE DLACPY( UPLO, M, N, A, LDA, B, LDB ) +* +* .. Scalar Arguments .. +* CHARACTER UPLO +* INTEGER LDA, LDB, M, N +* .. +* .. Array Arguments .. +* DOUBLE PRECISION A( LDA, * ), B( LDB, * ) +* .. +* +* +*> \par Purpose: +* ============= +*> +*> \verbatim +*> +*> DLACPY copies all or part of a two-dimensional matrix A to another +*> matrix B. +*> \endverbatim +* +* Arguments: +* ========== +* +*> \param[in] UPLO +*> \verbatim +*> UPLO is CHARACTER*1 +*> Specifies the part of the matrix A to be copied to B. +*> = 'U': Upper triangular part +*> = 'L': Lower triangular part +*> Otherwise: All of the matrix A +*> \endverbatim +*> +*> \param[in] M +*> \verbatim +*> M is INTEGER +*> The number of rows of the matrix A. M >= 0. +*> \endverbatim +*> +*> \param[in] N +*> \verbatim +*> N is INTEGER +*> The number of columns of the matrix A. N >= 0. +*> \endverbatim +*> +*> \param[in] A +*> \verbatim +*> A is DOUBLE PRECISION array, dimension (LDA,N) +*> The m by n matrix A. If UPLO = 'U', only the upper triangle +*> or trapezoid is accessed; if UPLO = 'L', only the lower +*> triangle or trapezoid is accessed. +*> \endverbatim +*> +*> \param[in] LDA +*> \verbatim +*> LDA is INTEGER +*> The leading dimension of the array A. LDA >= max(1,M). +*> \endverbatim +*> +*> \param[out] B +*> \verbatim +*> B is DOUBLE PRECISION array, dimension (LDB,N) +*> On exit, B = A in the locations specified by UPLO. +*> \endverbatim +*> +*> \param[in] LDB +*> \verbatim +*> LDB is INTEGER +*> The leading dimension of the array B. LDB >= max(1,M). +*> \endverbatim +* +* Authors: +* ======== +* +*> \author Univ. of Tennessee +*> \author Univ. of California Berkeley +*> \author Univ. of Colorado Denver +*> \author NAG Ltd. +* +*> \date September 2012 +* +*> \ingroup auxOTHERauxiliary +* +* ===================================================================== + SUBROUTINE DLACPY( UPLO, M, N, A, LDA, B, LDB ) +* +* -- LAPACK auxiliary routine (version 3.4.2) -- +* -- LAPACK is a software package provided by Univ. of Tennessee, -- +* -- Univ. of California Berkeley, Univ. of Colorado Denver and NAG Ltd..-- +* September 2012 +* +* .. Scalar Arguments .. + CHARACTER UPLO + INTEGER LDA, LDB, M, N +* .. +* .. Array Arguments .. + DOUBLE PRECISION A( LDA, * ), B( LDB, * ) +* .. +* +* ===================================================================== +* +* .. Local Scalars .. + INTEGER I, J +* .. +* .. External Functions .. + LOGICAL LSAME + EXTERNAL LSAME +* .. +* .. Intrinsic Functions .. + INTRINSIC MIN +* .. +* .. Executable Statements .. +* + IF( LSAME( UPLO, 'U' ) ) THEN + DO 20 J = 1, N + DO 10 I = 1, MIN( J, M ) + B( I, J ) = A( I, J ) + 10 CONTINUE + 20 CONTINUE + ELSE IF( LSAME( UPLO, 'L' ) ) THEN + DO 40 J = 1, N + DO 30 I = J, M + B( I, J ) = A( I, J ) + 30 CONTINUE + 40 CONTINUE + ELSE + DO 60 J = 1, N + DO 50 I = 1, M + B( I, J ) = A( I, J ) + 50 CONTINUE + 60 CONTINUE + END IF + RETURN +* +* End of DLACPY +* + END diff --git a/vendor/gonum.org/v1/gonum/lapack/internal/testdata/netlib/dlahr2.f b/vendor/gonum.org/v1/gonum/lapack/internal/testdata/netlib/dlahr2.f new file mode 100644 index 0000000..9d15979 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/lapack/internal/testdata/netlib/dlahr2.f @@ -0,0 +1,326 @@ +*> \brief \b DLAHR2 reduces the specified number of first columns of a general rectangular matrix A so that elements below the specified subdiagonal are zero, and returns auxiliary matrices which are needed to apply the transformation to the unreduced part of A. +* +* =========== DOCUMENTATION =========== +* +* Online html documentation available at +* http://www.netlib.org/lapack/explore-html/ +* +*> \htmlonly +*> Download DLAHR2 + dependencies +*> +*> [TGZ] +*> +*> [ZIP] +*> +*> [TXT] +*> \endhtmlonly +* +* Definition: +* =========== +* +* SUBROUTINE DLAHR2( N, K, NB, A, LDA, TAU, T, LDT, Y, LDY ) +* +* .. Scalar Arguments .. +* INTEGER K, LDA, LDT, LDY, N, NB +* .. +* .. Array Arguments .. +* DOUBLE PRECISION A( LDA, * ), T( LDT, NB ), TAU( NB ), +* $ Y( LDY, NB ) +* .. +* +* +*> \par Purpose: +* ============= +*> +*> \verbatim +*> +*> DLAHR2 reduces the first NB columns of A real general n-BY-(n-k+1) +*> matrix A so that elements below the k-th subdiagonal are zero. The +*> reduction is performed by an orthogonal similarity transformation +*> Q**T * A * Q. The routine returns the matrices V and T which determine +*> Q as a block reflector I - V*T*V**T, and also the matrix Y = A * V * T. +*> +*> This is an auxiliary routine called by DGEHRD. +*> \endverbatim +* +* Arguments: +* ========== +* +*> \param[in] N +*> \verbatim +*> N is INTEGER +*> The order of the matrix A. +*> \endverbatim +*> +*> \param[in] K +*> \verbatim +*> K is INTEGER +*> The offset for the reduction. Elements below the k-th +*> subdiagonal in the first NB columns are reduced to zero. +*> K < N. +*> \endverbatim +*> +*> \param[in] NB +*> \verbatim +*> NB is INTEGER +*> The number of columns to be reduced. +*> \endverbatim +*> +*> \param[in,out] A +*> \verbatim +*> A is DOUBLE PRECISION array, dimension (LDA,N-K+1) +*> On entry, the n-by-(n-k+1) general matrix A. +*> On exit, the elements on and above the k-th subdiagonal in +*> the first NB columns are overwritten with the corresponding +*> elements of the reduced matrix; the elements below the k-th +*> subdiagonal, with the array TAU, represent the matrix Q as a +*> product of elementary reflectors. The other columns of A are +*> unchanged. See Further Details. +*> \endverbatim +*> +*> \param[in] LDA +*> \verbatim +*> LDA is INTEGER +*> The leading dimension of the array A. LDA >= max(1,N). +*> \endverbatim +*> +*> \param[out] TAU +*> \verbatim +*> TAU is DOUBLE PRECISION array, dimension (NB) +*> The scalar factors of the elementary reflectors. See Further +*> Details. +*> \endverbatim +*> +*> \param[out] T +*> \verbatim +*> T is DOUBLE PRECISION array, dimension (LDT,NB) +*> The upper triangular matrix T. +*> \endverbatim +*> +*> \param[in] LDT +*> \verbatim +*> LDT is INTEGER +*> The leading dimension of the array T. LDT >= NB. +*> \endverbatim +*> +*> \param[out] Y +*> \verbatim +*> Y is DOUBLE PRECISION array, dimension (LDY,NB) +*> The n-by-nb matrix Y. +*> \endverbatim +*> +*> \param[in] LDY +*> \verbatim +*> LDY is INTEGER +*> The leading dimension of the array Y. LDY >= N. +*> \endverbatim +* +* Authors: +* ======== +* +*> \author Univ. of Tennessee +*> \author Univ. of California Berkeley +*> \author Univ. of Colorado Denver +*> \author NAG Ltd. +* +*> \date September 2012 +* +*> \ingroup doubleOTHERauxiliary +* +*> \par Further Details: +* ===================== +*> +*> \verbatim +*> +*> The matrix Q is represented as a product of nb elementary reflectors +*> +*> Q = H(1) H(2) . . . H(nb). +*> +*> Each H(i) has the form +*> +*> H(i) = I - tau * v * v**T +*> +*> where tau is a real scalar, and v is a real vector with +*> v(1:i+k-1) = 0, v(i+k) = 1; v(i+k+1:n) is stored on exit in +*> A(i+k+1:n,i), and tau in TAU(i). +*> +*> The elements of the vectors v together form the (n-k+1)-by-nb matrix +*> V which is needed, with T and Y, to apply the transformation to the +*> unreduced part of the matrix, using an update of the form: +*> A := (I - V*T*V**T) * (A - Y*V**T). +*> +*> The contents of A on exit are illustrated by the following example +*> with n = 7, k = 3 and nb = 2: +*> +*> ( a a a a a ) +*> ( a a a a a ) +*> ( a a a a a ) +*> ( h h a a a ) +*> ( v1 h a a a ) +*> ( v1 v2 a a a ) +*> ( v1 v2 a a a ) +*> +*> where a denotes an element of the original matrix A, h denotes a +*> modified element of the upper Hessenberg matrix H, and vi denotes an +*> element of the vector defining H(i). +*> +*> This subroutine is a slight modification of LAPACK-3.0's DLAHRD +*> incorporating improvements proposed by Quintana-Orti and Van de +*> Gejin. Note that the entries of A(1:K,2:NB) differ from those +*> returned by the original LAPACK-3.0's DLAHRD routine. (This +*> subroutine is not backward compatible with LAPACK-3.0's DLAHRD.) +*> \endverbatim +* +*> \par References: +* ================ +*> +*> Gregorio Quintana-Orti and Robert van de Geijn, "Improving the +*> performance of reduction to Hessenberg form," ACM Transactions on +*> Mathematical Software, 32(2):180-194, June 2006. +*> +* ===================================================================== + SUBROUTINE DLAHR2( N, K, NB, A, LDA, TAU, T, LDT, Y, LDY ) +* +* -- LAPACK auxiliary routine (version 3.4.2) -- +* -- LAPACK is a software package provided by Univ. of Tennessee, -- +* -- Univ. of California Berkeley, Univ. of Colorado Denver and NAG Ltd..-- +* September 2012 +* +* .. Scalar Arguments .. + INTEGER K, LDA, LDT, LDY, N, NB +* .. +* .. Array Arguments .. + DOUBLE PRECISION A( LDA, * ), T( LDT, NB ), TAU( NB ), + $ Y( LDY, NB ) +* .. +* +* ===================================================================== +* +* .. Parameters .. + DOUBLE PRECISION ZERO, ONE + PARAMETER ( ZERO = 0.0D+0, + $ ONE = 1.0D+0 ) +* .. +* .. Local Scalars .. + INTEGER I + DOUBLE PRECISION EI +* .. +* .. External Subroutines .. + EXTERNAL DAXPY, DCOPY, DGEMM, DGEMV, DLACPY, + $ DLARFG, DSCAL, DTRMM, DTRMV +* .. +* .. Intrinsic Functions .. + INTRINSIC MIN +* .. +* .. Executable Statements .. +* +* Quick return if possible +* + IF( N.LE.1 ) + $ RETURN +* + DO 10 I = 1, NB + IF( I.GT.1 ) THEN +* +* Update A(K+1:N,I) +* +* Update I-th column of A - Y * V**T +* + CALL DGEMV( 'NO TRANSPOSE', N-K, I-1, -ONE, Y(K+1,1), LDY, + $ A( K+I-1, 1 ), LDA, ONE, A( K+1, I ), 1 ) +* +* Apply I - V * T**T * V**T to this column (call it b) from the +* left, using the last column of T as workspace +* +* Let V = ( V1 ) and b = ( b1 ) (first I-1 rows) +* ( V2 ) ( b2 ) +* +* where V1 is unit lower triangular +* +* w := V1**T * b1 +* + CALL DCOPY( I-1, A( K+1, I ), 1, T( 1, NB ), 1 ) + CALL DTRMV( 'Lower', 'Transpose', 'UNIT', + $ I-1, A( K+1, 1 ), + $ LDA, T( 1, NB ), 1 ) +* +* w := w + V2**T * b2 +* + CALL DGEMV( 'Transpose', N-K-I+1, I-1, + $ ONE, A( K+I, 1 ), + $ LDA, A( K+I, I ), 1, ONE, T( 1, NB ), 1 ) +* +* w := T**T * w +* + CALL DTRMV( 'Upper', 'Transpose', 'NON-UNIT', + $ I-1, T, LDT, + $ T( 1, NB ), 1 ) +* +* b2 := b2 - V2*w +* + CALL DGEMV( 'NO TRANSPOSE', N-K-I+1, I-1, -ONE, + $ A( K+I, 1 ), + $ LDA, T( 1, NB ), 1, ONE, A( K+I, I ), 1 ) +* +* b1 := b1 - V1*w +* + CALL DTRMV( 'Lower', 'NO TRANSPOSE', + $ 'UNIT', I-1, + $ A( K+1, 1 ), LDA, T( 1, NB ), 1 ) + CALL DAXPY( I-1, -ONE, T( 1, NB ), 1, A( K+1, I ), 1 ) +* + A( K+I-1, I-1 ) = EI + END IF +* +* Generate the elementary reflector H(I) to annihilate +* A(K+I+1:N,I) +* + CALL DLARFG( N-K-I+1, A( K+I, I ), A( MIN( K+I+1, N ), I ), 1, + $ TAU( I ) ) + EI = A( K+I, I ) + A( K+I, I ) = ONE +* +* Compute Y(K+1:N,I) +* + CALL DGEMV( 'NO TRANSPOSE', N-K, N-K-I+1, + $ ONE, A( K+1, I+1 ), + $ LDA, A( K+I, I ), 1, ZERO, Y( K+1, I ), 1 ) + CALL DGEMV( 'Transpose', N-K-I+1, I-1, + $ ONE, A( K+I, 1 ), LDA, + $ A( K+I, I ), 1, ZERO, T( 1, I ), 1 ) + CALL DGEMV( 'NO TRANSPOSE', N-K, I-1, -ONE, + $ Y( K+1, 1 ), LDY, + $ T( 1, I ), 1, ONE, Y( K+1, I ), 1 ) + CALL DSCAL( N-K, TAU( I ), Y( K+1, I ), 1 ) +* +* Compute T(1:I,I) +* + CALL DSCAL( I-1, -TAU( I ), T( 1, I ), 1 ) + CALL DTRMV( 'Upper', 'No Transpose', 'NON-UNIT', + $ I-1, T, LDT, + $ T( 1, I ), 1 ) + T( I, I ) = TAU( I ) +* + 10 CONTINUE + A( K+NB, NB ) = EI +* +* Compute Y(1:K,1:NB) +* + CALL DLACPY( 'ALL', K, NB, A( 1, 2 ), LDA, Y, LDY ) + CALL DTRMM( 'RIGHT', 'Lower', 'NO TRANSPOSE', + $ 'UNIT', K, NB, + $ ONE, A( K+1, 1 ), LDA, Y, LDY ) + IF( N.GT.K+NB ) + $ CALL DGEMM( 'NO TRANSPOSE', 'NO TRANSPOSE', K, + $ NB, N-K-NB, ONE, + $ A( 1, 2+NB ), LDA, A( K+1+NB, 1 ), LDA, ONE, Y, + $ LDY ) + CALL DTRMM( 'RIGHT', 'Upper', 'NO TRANSPOSE', + $ 'NON-UNIT', K, NB, + $ ONE, T, LDT, Y, LDY ) +* + RETURN +* +* End of DLAHR2 +* + END diff --git a/vendor/gonum.org/v1/gonum/lapack/internal/testdata/netlib/dlamch.f b/vendor/gonum.org/v1/gonum/lapack/internal/testdata/netlib/dlamch.f new file mode 100644 index 0000000..22a1621 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/lapack/internal/testdata/netlib/dlamch.f @@ -0,0 +1,189 @@ +*> \brief \b DLAMCH +* +* =========== DOCUMENTATION =========== +* +* Online html documentation available at +* http://www.netlib.org/lapack/explore-html/ +* +* Definition: +* =========== +* +* DOUBLE PRECISION FUNCTION DLAMCH( CMACH ) +* +* +*> \par Purpose: +* ============= +*> +*> \verbatim +*> +*> DLAMCH determines double precision machine parameters. +*> \endverbatim +* +* Arguments: +* ========== +* +*> \param[in] CMACH +*> \verbatim +*> Specifies the value to be returned by DLAMCH: +*> = 'E' or 'e', DLAMCH := eps +*> = 'S' or 's , DLAMCH := sfmin +*> = 'B' or 'b', DLAMCH := base +*> = 'P' or 'p', DLAMCH := eps*base +*> = 'N' or 'n', DLAMCH := t +*> = 'R' or 'r', DLAMCH := rnd +*> = 'M' or 'm', DLAMCH := emin +*> = 'U' or 'u', DLAMCH := rmin +*> = 'L' or 'l', DLAMCH := emax +*> = 'O' or 'o', DLAMCH := rmax +*> where +*> eps = relative machine precision +*> sfmin = safe minimum, such that 1/sfmin does not overflow +*> base = base of the machine +*> prec = eps*base +*> t = number of (base) digits in the mantissa +*> rnd = 1.0 when rounding occurs in addition, 0.0 otherwise +*> emin = minimum exponent before (gradual) underflow +*> rmin = underflow threshold - base**(emin-1) +*> emax = largest exponent before overflow +*> rmax = overflow threshold - (base**emax)*(1-eps) +*> \endverbatim +* +* Authors: +* ======== +* +*> \author Univ. of Tennessee +*> \author Univ. of California Berkeley +*> \author Univ. of Colorado Denver +*> \author NAG Ltd. +* +*> \date November 2015 +* +*> \ingroup auxOTHERauxiliary +* +* ===================================================================== + DOUBLE PRECISION FUNCTION DLAMCH( CMACH ) +* +* -- LAPACK auxiliary routine (version 3.6.0) -- +* -- LAPACK is a software package provided by Univ. of Tennessee, -- +* -- Univ. of California Berkeley, Univ. of Colorado Denver and NAG Ltd..-- +* November 2015 +* +* .. Scalar Arguments .. + CHARACTER CMACH +* .. +* +* ===================================================================== +* +* .. Parameters .. + DOUBLE PRECISION ONE, ZERO + PARAMETER ( ONE = 1.0D+0, ZERO = 0.0D+0 ) +* .. +* .. Local Scalars .. + DOUBLE PRECISION RND, EPS, SFMIN, SMALL, RMACH +* .. +* .. External Functions .. + LOGICAL LSAME + EXTERNAL LSAME +* .. +* .. Intrinsic Functions .. + INTRINSIC DIGITS, EPSILON, HUGE, MAXEXPONENT, + $ MINEXPONENT, RADIX, TINY +* .. +* .. Executable Statements .. +* +* +* Assume rounding, not chopping. Always. +* + RND = ONE +* + IF( ONE.EQ.RND ) THEN + EPS = EPSILON(ZERO) * 0.5 + ELSE + EPS = EPSILON(ZERO) + END IF +* + IF( LSAME( CMACH, 'E' ) ) THEN + RMACH = EPS + ELSE IF( LSAME( CMACH, 'S' ) ) THEN + SFMIN = TINY(ZERO) + SMALL = ONE / HUGE(ZERO) + IF( SMALL.GE.SFMIN ) THEN +* +* Use SMALL plus a bit, to avoid the possibility of rounding +* causing overflow when computing 1/sfmin. +* + SFMIN = SMALL*( ONE+EPS ) + END IF + RMACH = SFMIN + ELSE IF( LSAME( CMACH, 'B' ) ) THEN + RMACH = RADIX(ZERO) + ELSE IF( LSAME( CMACH, 'P' ) ) THEN + RMACH = EPS * RADIX(ZERO) + ELSE IF( LSAME( CMACH, 'N' ) ) THEN + RMACH = DIGITS(ZERO) + ELSE IF( LSAME( CMACH, 'R' ) ) THEN + RMACH = RND + ELSE IF( LSAME( CMACH, 'M' ) ) THEN + RMACH = MINEXPONENT(ZERO) + ELSE IF( LSAME( CMACH, 'U' ) ) THEN + RMACH = tiny(zero) + ELSE IF( LSAME( CMACH, 'L' ) ) THEN + RMACH = MAXEXPONENT(ZERO) + ELSE IF( LSAME( CMACH, 'O' ) ) THEN + RMACH = HUGE(ZERO) + ELSE + RMACH = ZERO + END IF +* + DLAMCH = RMACH + RETURN +* +* End of DLAMCH +* + END +************************************************************************ +*> \brief \b DLAMC3 +*> \details +*> \b Purpose: +*> \verbatim +*> DLAMC3 is intended to force A and B to be stored prior to doing +*> the addition of A and B , for use in situations where optimizers +*> might hold one of these in a register. +*> \endverbatim +*> \author LAPACK is a software package provided by Univ. of Tennessee, Univ. of California Berkeley, Univ. of Colorado Denver and NAG Ltd.. +*> \date November 2015 +*> \ingroup auxOTHERauxiliary +*> +*> \param[in] A +*> \verbatim +*> A is a DOUBLE PRECISION +*> \endverbatim +*> +*> \param[in] B +*> \verbatim +*> B is a DOUBLE PRECISION +*> The values A and B. +*> \endverbatim +*> + DOUBLE PRECISION FUNCTION DLAMC3( A, B ) +* +* -- LAPACK auxiliary routine (version 3.6.0) -- +* Univ. of Tennessee, Univ. of California Berkeley and NAG Ltd.. +* November 2010 +* +* .. Scalar Arguments .. + DOUBLE PRECISION A, B +* .. +* ===================================================================== +* +* .. Executable Statements .. +* + DLAMC3 = A + B +* + RETURN +* +* End of DLAMC3 +* + END +* +************************************************************************ diff --git a/vendor/gonum.org/v1/gonum/lapack/internal/testdata/netlib/dlapy2.f b/vendor/gonum.org/v1/gonum/lapack/internal/testdata/netlib/dlapy2.f new file mode 100644 index 0000000..d43b0d5 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/lapack/internal/testdata/netlib/dlapy2.f @@ -0,0 +1,104 @@ +*> \brief \b DLAPY2 returns sqrt(x2+y2). +* +* =========== DOCUMENTATION =========== +* +* Online html documentation available at +* http://www.netlib.org/lapack/explore-html/ +* +*> \htmlonly +*> Download DLAPY2 + dependencies +*> +*> [TGZ] +*> +*> [ZIP] +*> +*> [TXT] +*> \endhtmlonly +* +* Definition: +* =========== +* +* DOUBLE PRECISION FUNCTION DLAPY2( X, Y ) +* +* .. Scalar Arguments .. +* DOUBLE PRECISION X, Y +* .. +* +* +*> \par Purpose: +* ============= +*> +*> \verbatim +*> +*> DLAPY2 returns sqrt(x**2+y**2), taking care not to cause unnecessary +*> overflow. +*> \endverbatim +* +* Arguments: +* ========== +* +*> \param[in] X +*> \verbatim +*> X is DOUBLE PRECISION +*> \endverbatim +*> +*> \param[in] Y +*> \verbatim +*> Y is DOUBLE PRECISION +*> X and Y specify the values x and y. +*> \endverbatim +* +* Authors: +* ======== +* +*> \author Univ. of Tennessee +*> \author Univ. of California Berkeley +*> \author Univ. of Colorado Denver +*> \author NAG Ltd. +* +*> \date September 2012 +* +*> \ingroup auxOTHERauxiliary +* +* ===================================================================== + DOUBLE PRECISION FUNCTION DLAPY2( X, Y ) +* +* -- LAPACK auxiliary routine (version 3.4.2) -- +* -- LAPACK is a software package provided by Univ. of Tennessee, -- +* -- Univ. of California Berkeley, Univ. of Colorado Denver and NAG Ltd..-- +* September 2012 +* +* .. Scalar Arguments .. + DOUBLE PRECISION X, Y +* .. +* +* ===================================================================== +* +* .. Parameters .. + DOUBLE PRECISION ZERO + PARAMETER ( ZERO = 0.0D0 ) + DOUBLE PRECISION ONE + PARAMETER ( ONE = 1.0D0 ) +* .. +* .. Local Scalars .. + DOUBLE PRECISION W, XABS, YABS, Z +* .. +* .. Intrinsic Functions .. + INTRINSIC ABS, MAX, MIN, SQRT +* .. +* .. Executable Statements .. +* + XABS = ABS( X ) + YABS = ABS( Y ) + W = MAX( XABS, YABS ) + Z = MIN( XABS, YABS ) + IF( Z.EQ.ZERO ) THEN + DLAPY2 = W + ELSE + DLAPY2 = W*SQRT( ONE+( Z / W )**2 ) + END IF + RETURN +* +* End of DLAPY2 +* + END diff --git a/vendor/gonum.org/v1/gonum/lapack/internal/testdata/netlib/dlaqr1.f b/vendor/gonum.org/v1/gonum/lapack/internal/testdata/netlib/dlaqr1.f new file mode 100644 index 0000000..df4fb68 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/lapack/internal/testdata/netlib/dlaqr1.f @@ -0,0 +1,179 @@ +*> \brief \b DLAQR1 sets a scalar multiple of the first column of the product of 2-by-2 or 3-by-3 matrix H and specified shifts. +* +* =========== DOCUMENTATION =========== +* +* Online html documentation available at +* http://www.netlib.org/lapack/explore-html/ +* +*> \htmlonly +*> Download DLAQR1 + dependencies +*> +*> [TGZ] +*> +*> [ZIP] +*> +*> [TXT] +*> \endhtmlonly +* +* Definition: +* =========== +* +* SUBROUTINE DLAQR1( N, H, LDH, SR1, SI1, SR2, SI2, V ) +* +* .. Scalar Arguments .. +* DOUBLE PRECISION SI1, SI2, SR1, SR2 +* INTEGER LDH, N +* .. +* .. Array Arguments .. +* DOUBLE PRECISION H( LDH, * ), V( * ) +* .. +* +* +*> \par Purpose: +* ============= +*> +*> \verbatim +*> +*> Given a 2-by-2 or 3-by-3 matrix H, DLAQR1 sets v to a +*> scalar multiple of the first column of the product +*> +*> (*) K = (H - (sr1 + i*si1)*I)*(H - (sr2 + i*si2)*I) +*> +*> scaling to avoid overflows and most underflows. It +*> is assumed that either +*> +*> 1) sr1 = sr2 and si1 = -si2 +*> or +*> 2) si1 = si2 = 0. +*> +*> This is useful for starting double implicit shift bulges +*> in the QR algorithm. +*> \endverbatim +* +* Arguments: +* ========== +* +*> \param[in] N +*> \verbatim +*> N is integer +*> Order of the matrix H. N must be either 2 or 3. +*> \endverbatim +*> +*> \param[in] H +*> \verbatim +*> H is DOUBLE PRECISION array of dimension (LDH,N) +*> The 2-by-2 or 3-by-3 matrix H in (*). +*> \endverbatim +*> +*> \param[in] LDH +*> \verbatim +*> LDH is integer +*> The leading dimension of H as declared in +*> the calling procedure. LDH.GE.N +*> \endverbatim +*> +*> \param[in] SR1 +*> \verbatim +*> SR1 is DOUBLE PRECISION +*> \endverbatim +*> +*> \param[in] SI1 +*> \verbatim +*> SI1 is DOUBLE PRECISION +*> \endverbatim +*> +*> \param[in] SR2 +*> \verbatim +*> SR2 is DOUBLE PRECISION +*> \endverbatim +*> +*> \param[in] SI2 +*> \verbatim +*> SI2 is DOUBLE PRECISION +*> The shifts in (*). +*> \endverbatim +*> +*> \param[out] V +*> \verbatim +*> V is DOUBLE PRECISION array of dimension N +*> A scalar multiple of the first column of the +*> matrix K in (*). +*> \endverbatim +* +* Authors: +* ======== +* +*> \author Univ. of Tennessee +*> \author Univ. of California Berkeley +*> \author Univ. of Colorado Denver +*> \author NAG Ltd. +* +*> \date September 2012 +* +*> \ingroup doubleOTHERauxiliary +* +*> \par Contributors: +* ================== +*> +*> Karen Braman and Ralph Byers, Department of Mathematics, +*> University of Kansas, USA +*> +* ===================================================================== + SUBROUTINE DLAQR1( N, H, LDH, SR1, SI1, SR2, SI2, V ) +* +* -- LAPACK auxiliary routine (version 3.4.2) -- +* -- LAPACK is a software package provided by Univ. of Tennessee, -- +* -- Univ. of California Berkeley, Univ. of Colorado Denver and NAG Ltd..-- +* September 2012 +* +* .. Scalar Arguments .. + DOUBLE PRECISION SI1, SI2, SR1, SR2 + INTEGER LDH, N +* .. +* .. Array Arguments .. + DOUBLE PRECISION H( LDH, * ), V( * ) +* .. +* +* ================================================================ +* +* .. Parameters .. + DOUBLE PRECISION ZERO + PARAMETER ( ZERO = 0.0d0 ) +* .. +* .. Local Scalars .. + DOUBLE PRECISION H21S, H31S, S +* .. +* .. Intrinsic Functions .. + INTRINSIC ABS +* .. +* .. Executable Statements .. + IF( N.EQ.2 ) THEN + S = ABS( H( 1, 1 )-SR2 ) + ABS( SI2 ) + ABS( H( 2, 1 ) ) + IF( S.EQ.ZERO ) THEN + V( 1 ) = ZERO + V( 2 ) = ZERO + ELSE + H21S = H( 2, 1 ) / S + V( 1 ) = H21S*H( 1, 2 ) + ( H( 1, 1 )-SR1 )* + $ ( ( H( 1, 1 )-SR2 ) / S ) - SI1*( SI2 / S ) + V( 2 ) = H21S*( H( 1, 1 )+H( 2, 2 )-SR1-SR2 ) + END IF + ELSE + S = ABS( H( 1, 1 )-SR2 ) + ABS( SI2 ) + ABS( H( 2, 1 ) ) + + $ ABS( H( 3, 1 ) ) + IF( S.EQ.ZERO ) THEN + V( 1 ) = ZERO + V( 2 ) = ZERO + V( 3 ) = ZERO + ELSE + H21S = H( 2, 1 ) / S + H31S = H( 3, 1 ) / S + V( 1 ) = ( H( 1, 1 )-SR1 )*( ( H( 1, 1 )-SR2 ) / S ) - + $ SI1*( SI2 / S ) + H( 1, 2 )*H21S + H( 1, 3 )*H31S + V( 2 ) = H21S*( H( 1, 1 )+H( 2, 2 )-SR1-SR2 ) + + $ H( 2, 3 )*H31S + V( 3 ) = H31S*( H( 1, 1 )+H( 3, 3 )-SR1-SR2 ) + + $ H21S*H( 3, 2 ) + END IF + END IF + END diff --git a/vendor/gonum.org/v1/gonum/lapack/internal/testdata/netlib/dlaqr5.f b/vendor/gonum.org/v1/gonum/lapack/internal/testdata/netlib/dlaqr5.f new file mode 100644 index 0000000..37ce6f6 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/lapack/internal/testdata/netlib/dlaqr5.f @@ -0,0 +1,921 @@ +*> \brief \b DLAQR5 performs a single small-bulge multi-shift QR sweep. +* +* =========== DOCUMENTATION =========== +* +* Online html documentation available at +* http://www.netlib.org/lapack/explore-html/ +* +*> \htmlonly +*> Download DLAQR5 + dependencies +*> +*> [TGZ] +*> +*> [ZIP] +*> +*> [TXT] +*> \endhtmlonly +* +* Definition: +* =========== +* +* SUBROUTINE DLAQR5( WANTT, WANTZ, KACC22, N, KTOP, KBOT, NSHFTS, +* SR, SI, H, LDH, ILOZ, IHIZ, Z, LDZ, V, LDV, U, +* LDU, NV, WV, LDWV, NH, WH, LDWH ) +* +* .. Scalar Arguments .. +* INTEGER IHIZ, ILOZ, KACC22, KBOT, KTOP, LDH, LDU, LDV, +* $ LDWH, LDWV, LDZ, N, NH, NSHFTS, NV +* LOGICAL WANTT, WANTZ +* .. +* .. Array Arguments .. +* DOUBLE PRECISION H( LDH, * ), SI( * ), SR( * ), U( LDU, * ), +* $ V( LDV, * ), WH( LDWH, * ), WV( LDWV, * ), +* $ Z( LDZ, * ) +* .. +* +* +*> \par Purpose: +* ============= +*> +*> \verbatim +*> +*> DLAQR5, called by DLAQR0, performs a +*> single small-bulge multi-shift QR sweep. +*> \endverbatim +* +* Arguments: +* ========== +* +*> \param[in] WANTT +*> \verbatim +*> WANTT is logical scalar +*> WANTT = .true. if the quasi-triangular Schur factor +*> is being computed. WANTT is set to .false. otherwise. +*> \endverbatim +*> +*> \param[in] WANTZ +*> \verbatim +*> WANTZ is logical scalar +*> WANTZ = .true. if the orthogonal Schur factor is being +*> computed. WANTZ is set to .false. otherwise. +*> \endverbatim +*> +*> \param[in] KACC22 +*> \verbatim +*> KACC22 is integer with value 0, 1, or 2. +*> Specifies the computation mode of far-from-diagonal +*> orthogonal updates. +*> = 0: DLAQR5 does not accumulate reflections and does not +*> use matrix-matrix multiply to update far-from-diagonal +*> matrix entries. +*> = 1: DLAQR5 accumulates reflections and uses matrix-matrix +*> multiply to update the far-from-diagonal matrix entries. +*> = 2: DLAQR5 accumulates reflections, uses matrix-matrix +*> multiply to update the far-from-diagonal matrix entries, +*> and takes advantage of 2-by-2 block structure during +*> matrix multiplies. +*> \endverbatim +*> +*> \param[in] N +*> \verbatim +*> N is integer scalar +*> N is the order of the Hessenberg matrix H upon which this +*> subroutine operates. +*> \endverbatim +*> +*> \param[in] KTOP +*> \verbatim +*> KTOP is integer scalar +*> \endverbatim +*> +*> \param[in] KBOT +*> \verbatim +*> KBOT is integer scalar +*> These are the first and last rows and columns of an +*> isolated diagonal block upon which the QR sweep is to be +*> applied. It is assumed without a check that +*> either KTOP = 1 or H(KTOP,KTOP-1) = 0 +*> and +*> either KBOT = N or H(KBOT+1,KBOT) = 0. +*> \endverbatim +*> +*> \param[in] NSHFTS +*> \verbatim +*> NSHFTS is integer scalar +*> NSHFTS gives the number of simultaneous shifts. NSHFTS +*> must be positive and even. +*> \endverbatim +*> +*> \param[in,out] SR +*> \verbatim +*> SR is DOUBLE PRECISION array of size (NSHFTS) +*> \endverbatim +*> +*> \param[in,out] SI +*> \verbatim +*> SI is DOUBLE PRECISION array of size (NSHFTS) +*> SR contains the real parts and SI contains the imaginary +*> parts of the NSHFTS shifts of origin that define the +*> multi-shift QR sweep. On output SR and SI may be +*> reordered. +*> \endverbatim +*> +*> \param[in,out] H +*> \verbatim +*> H is DOUBLE PRECISION array of size (LDH,N) +*> On input H contains a Hessenberg matrix. On output a +*> multi-shift QR sweep with shifts SR(J)+i*SI(J) is applied +*> to the isolated diagonal block in rows and columns KTOP +*> through KBOT. +*> \endverbatim +*> +*> \param[in] LDH +*> \verbatim +*> LDH is integer scalar +*> LDH is the leading dimension of H just as declared in the +*> calling procedure. LDH.GE.MAX(1,N). +*> \endverbatim +*> +*> \param[in] ILOZ +*> \verbatim +*> ILOZ is INTEGER +*> \endverbatim +*> +*> \param[in] IHIZ +*> \verbatim +*> IHIZ is INTEGER +*> Specify the rows of Z to which transformations must be +*> applied if WANTZ is .TRUE.. 1 .LE. ILOZ .LE. IHIZ .LE. N +*> \endverbatim +*> +*> \param[in,out] Z +*> \verbatim +*> Z is DOUBLE PRECISION array of size (LDZ,IHI) +*> If WANTZ = .TRUE., then the QR Sweep orthogonal +*> similarity transformation is accumulated into +*> Z(ILOZ:IHIZ,ILO:IHI) from the right. +*> If WANTZ = .FALSE., then Z is unreferenced. +*> \endverbatim +*> +*> \param[in] LDZ +*> \verbatim +*> LDZ is integer scalar +*> LDA is the leading dimension of Z just as declared in +*> the calling procedure. LDZ.GE.N. +*> \endverbatim +*> +*> \param[out] V +*> \verbatim +*> V is DOUBLE PRECISION array of size (LDV,NSHFTS/2) +*> \endverbatim +*> +*> \param[in] LDV +*> \verbatim +*> LDV is integer scalar +*> LDV is the leading dimension of V as declared in the +*> calling procedure. LDV.GE.3. +*> \endverbatim +*> +*> \param[out] U +*> \verbatim +*> U is DOUBLE PRECISION array of size +*> (LDU,3*NSHFTS-3) +*> \endverbatim +*> +*> \param[in] LDU +*> \verbatim +*> LDU is integer scalar +*> LDU is the leading dimension of U just as declared in the +*> in the calling subroutine. LDU.GE.3*NSHFTS-3. +*> \endverbatim +*> +*> \param[in] NH +*> \verbatim +*> NH is integer scalar +*> NH is the number of columns in array WH available for +*> workspace. NH.GE.1. +*> \endverbatim +*> +*> \param[out] WH +*> \verbatim +*> WH is DOUBLE PRECISION array of size (LDWH,NH) +*> \endverbatim +*> +*> \param[in] LDWH +*> \verbatim +*> LDWH is integer scalar +*> Leading dimension of WH just as declared in the +*> calling procedure. LDWH.GE.3*NSHFTS-3. +*> \endverbatim +*> +*> \param[in] NV +*> \verbatim +*> NV is integer scalar +*> NV is the number of rows in WV agailable for workspace. +*> NV.GE.1. +*> \endverbatim +*> +*> \param[out] WV +*> \verbatim +*> WV is DOUBLE PRECISION array of size +*> (LDWV,3*NSHFTS-3) +*> \endverbatim +*> +*> \param[in] LDWV +*> \verbatim +*> LDWV is integer scalar +*> LDWV is the leading dimension of WV as declared in the +*> in the calling subroutine. LDWV.GE.NV. +*> \endverbatim +* +* Authors: +* ======== +* +*> \author Univ. of Tennessee +*> \author Univ. of California Berkeley +*> \author Univ. of Colorado Denver +*> \author NAG Ltd. +* +*> \date September 2012 +* +*> \ingroup doubleOTHERauxiliary +* +*> \par Contributors: +* ================== +*> +*> Karen Braman and Ralph Byers, Department of Mathematics, +*> University of Kansas, USA +* +*> \par References: +* ================ +*> +*> K. Braman, R. Byers and R. Mathias, The Multi-Shift QR +*> Algorithm Part I: Maintaining Well Focused Shifts, and Level 3 +*> Performance, SIAM Journal of Matrix Analysis, volume 23, pages +*> 929--947, 2002. +*> +* ===================================================================== + SUBROUTINE DLAQR5( WANTT, WANTZ, KACC22, N, KTOP, KBOT, NSHFTS, + $ SR, SI, H, LDH, ILOZ, IHIZ, Z, LDZ, V, LDV, U, + $ LDU, NV, WV, LDWV, NH, WH, LDWH ) +* +* -- LAPACK auxiliary routine (version 3.4.2) -- +* -- LAPACK is a software package provided by Univ. of Tennessee, -- +* -- Univ. of California Berkeley, Univ. of Colorado Denver and NAG Ltd..-- +* September 2012 +* +* .. Scalar Arguments .. + INTEGER IHIZ, ILOZ, KACC22, KBOT, KTOP, LDH, LDU, LDV, + $ LDWH, LDWV, LDZ, N, NH, NSHFTS, NV + LOGICAL WANTT, WANTZ +* .. +* .. Array Arguments .. + DOUBLE PRECISION H( LDH, * ), SI( * ), SR( * ), U( LDU, * ), + $ V( LDV, * ), WH( LDWH, * ), WV( LDWV, * ), + $ Z( LDZ, * ) +* .. +* +* ================================================================ +* .. Parameters .. + DOUBLE PRECISION ZERO, ONE + PARAMETER ( ZERO = 0.0d0, ONE = 1.0d0 ) +* .. +* .. Local Scalars .. + DOUBLE PRECISION ALPHA, BETA, H11, H12, H21, H22, REFSUM, + $ SAFMAX, SAFMIN, SCL, SMLNUM, SWAP, TST1, TST2, + $ ULP + INTEGER I, I2, I4, INCOL, J, J2, J4, JBOT, JCOL, JLEN, + $ JROW, JTOP, K, K1, KDU, KMS, KNZ, KRCOL, KZS, + $ M, M22, MBOT, MEND, MSTART, MTOP, NBMPS, NDCOL, + $ NS, NU + LOGICAL ACCUM, BLK22, BMP22 +* .. +* .. External Functions .. + DOUBLE PRECISION DLAMCH + EXTERNAL DLAMCH +* .. +* .. Intrinsic Functions .. +* + INTRINSIC ABS, DBLE, MAX, MIN, MOD +* .. +* .. Local Arrays .. + DOUBLE PRECISION VT( 3 ) +* .. +* .. External Subroutines .. + EXTERNAL DGEMM, DLABAD, DLACPY, DLAQR1, DLARFG, DLASET, + $ DTRMM +* .. +* .. Executable Statements .. +* +* ==== If there are no shifts, then there is nothing to do. ==== +* + IF( NSHFTS.LT.2 ) + $ RETURN +* +* ==== If the active block is empty or 1-by-1, then there +* . is nothing to do. ==== +* + IF( KTOP.GE.KBOT ) + $ RETURN +* +* ==== Shuffle shifts into pairs of real shifts and pairs +* . of complex conjugate shifts assuming complex +* . conjugate shifts are already adjacent to one +* . another. ==== +* + DO 10 I = 1, NSHFTS - 2, 2 + IF( SI( I ).NE.-SI( I+1 ) ) THEN +* + SWAP = SR( I ) + SR( I ) = SR( I+1 ) + SR( I+1 ) = SR( I+2 ) + SR( I+2 ) = SWAP +* + SWAP = SI( I ) + SI( I ) = SI( I+1 ) + SI( I+1 ) = SI( I+2 ) + SI( I+2 ) = SWAP + END IF + 10 CONTINUE +* +* ==== NSHFTS is supposed to be even, but if it is odd, +* . then simply reduce it by one. The shuffle above +* . ensures that the dropped shift is real and that +* . the remaining shifts are paired. ==== +* + NS = NSHFTS - MOD( NSHFTS, 2 ) +* +* ==== Machine constants for deflation ==== +* + SAFMIN = DLAMCH( 'SAFE MINIMUM' ) + SAFMAX = ONE / SAFMIN + CALL DLABAD( SAFMIN, SAFMAX ) + ULP = DLAMCH( 'PRECISION' ) + SMLNUM = SAFMIN*( DBLE( N ) / ULP ) +* +* ==== Use accumulated reflections to update far-from-diagonal +* . entries ? ==== +* + ACCUM = ( KACC22.EQ.1 ) .OR. ( KACC22.EQ.2 ) +* +* ==== If so, exploit the 2-by-2 block structure? ==== +* + BLK22 = ( NS.GT.2 ) .AND. ( KACC22.EQ.2 ) +* +* ==== clear trash ==== +* + IF( KTOP+2.LE.KBOT ) + $ H( KTOP+2, KTOP ) = ZERO +* +* ==== NBMPS = number of 2-shift bulges in the chain ==== +* + NBMPS = NS / 2 +* +* ==== KDU = width of slab ==== +* + KDU = 6*NBMPS - 3 +* +* ==== Create and chase chains of NBMPS bulges ==== +* + DO 220 INCOL = 3*( 1-NBMPS ) + KTOP - 1, KBOT - 2, 3*NBMPS - 2 + NDCOL = INCOL + KDU + IF( ACCUM ) + $ CALL DLASET( 'ALL', KDU, KDU, ZERO, ONE, U, LDU ) +* +* ==== Near-the-diagonal bulge chase. The following loop +* . performs the near-the-diagonal part of a small bulge +* . multi-shift QR sweep. Each 6*NBMPS-2 column diagonal +* . chunk extends from column INCOL to column NDCOL +* . (including both column INCOL and column NDCOL). The +* . following loop chases a 3*NBMPS column long chain of +* . NBMPS bulges 3*NBMPS-2 columns to the right. (INCOL +* . may be less than KTOP and and NDCOL may be greater than +* . KBOT indicating phantom columns from which to chase +* . bulges before they are actually introduced or to which +* . to chase bulges beyond column KBOT.) ==== +* + DO 150 KRCOL = INCOL, MIN( INCOL+3*NBMPS-3, KBOT-2 ) +* +* ==== Bulges number MTOP to MBOT are active double implicit +* . shift bulges. There may or may not also be small +* . 2-by-2 bulge, if there is room. The inactive bulges +* . (if any) must wait until the active bulges have moved +* . down the diagonal to make room. The phantom matrix +* . paradigm described above helps keep track. ==== +* + MTOP = MAX( 1, ( ( KTOP-1 )-KRCOL+2 ) / 3+1 ) + MBOT = MIN( NBMPS, ( KBOT-KRCOL ) / 3 ) + M22 = MBOT + 1 + BMP22 = ( MBOT.LT.NBMPS ) .AND. ( KRCOL+3*( M22-1 ) ).EQ. + $ ( KBOT-2 ) +* +* ==== Generate reflections to chase the chain right +* . one column. (The minimum value of K is KTOP-1.) ==== +* + DO 20 M = MTOP, MBOT + K = KRCOL + 3*( M-1 ) + IF( K.EQ.KTOP-1 ) THEN + CALL DLAQR1( 3, H( KTOP, KTOP ), LDH, SR( 2*M-1 ), + $ SI( 2*M-1 ), SR( 2*M ), SI( 2*M ), + $ V( 1, M ) ) + ALPHA = V( 1, M ) + CALL DLARFG( 3, ALPHA, V( 2, M ), 1, V( 1, M ) ) + ELSE + BETA = H( K+1, K ) + V( 2, M ) = H( K+2, K ) + V( 3, M ) = H( K+3, K ) + CALL DLARFG( 3, BETA, V( 2, M ), 1, V( 1, M ) ) +* +* ==== A Bulge may collapse because of vigilant +* . deflation or destructive underflow. In the +* . underflow case, try the two-small-subdiagonals +* . trick to try to reinflate the bulge. ==== +* + IF( H( K+3, K ).NE.ZERO .OR. H( K+3, K+1 ).NE. + $ ZERO .OR. H( K+3, K+2 ).EQ.ZERO ) THEN +* +* ==== Typical case: not collapsed (yet). ==== +* + H( K+1, K ) = BETA + H( K+2, K ) = ZERO + H( K+3, K ) = ZERO + ELSE +* +* ==== Atypical case: collapsed. Attempt to +* . reintroduce ignoring H(K+1,K) and H(K+2,K). +* . If the fill resulting from the new +* . reflector is too large, then abandon it. +* . Otherwise, use the new one. ==== +* + CALL DLAQR1( 3, H( K+1, K+1 ), LDH, SR( 2*M-1 ), + $ SI( 2*M-1 ), SR( 2*M ), SI( 2*M ), + $ VT ) + ALPHA = VT( 1 ) + CALL DLARFG( 3, ALPHA, VT( 2 ), 1, VT( 1 ) ) + REFSUM = VT( 1 )*( H( K+1, K )+VT( 2 )* + $ H( K+2, K ) ) +* + IF( ABS( H( K+2, K )-REFSUM*VT( 2 ) )+ + $ ABS( REFSUM*VT( 3 ) ).GT.ULP* + $ ( ABS( H( K, K ) )+ABS( H( K+1, + $ K+1 ) )+ABS( H( K+2, K+2 ) ) ) ) THEN +* +* ==== Starting a new bulge here would +* . create non-negligible fill. Use +* . the old one with trepidation. ==== +* + H( K+1, K ) = BETA + H( K+2, K ) = ZERO + H( K+3, K ) = ZERO + ELSE +* +* ==== Stating a new bulge here would +* . create only negligible fill. +* . Replace the old reflector with +* . the new one. ==== +* + H( K+1, K ) = H( K+1, K ) - REFSUM + H( K+2, K ) = ZERO + H( K+3, K ) = ZERO + V( 1, M ) = VT( 1 ) + V( 2, M ) = VT( 2 ) + V( 3, M ) = VT( 3 ) + END IF + END IF + END IF + 20 CONTINUE +* +* ==== Generate a 2-by-2 reflection, if needed. ==== +* + K = KRCOL + 3*( M22-1 ) + IF( BMP22 ) THEN + IF( K.EQ.KTOP-1 ) THEN + CALL DLAQR1( 2, H( K+1, K+1 ), LDH, SR( 2*M22-1 ), + $ SI( 2*M22-1 ), SR( 2*M22 ), SI( 2*M22 ), + $ V( 1, M22 ) ) + BETA = V( 1, M22 ) + CALL DLARFG( 2, BETA, V( 2, M22 ), 1, V( 1, M22 ) ) + ELSE + BETA = H( K+1, K ) + V( 2, M22 ) = H( K+2, K ) + CALL DLARFG( 2, BETA, V( 2, M22 ), 1, V( 1, M22 ) ) + H( K+1, K ) = BETA + H( K+2, K ) = ZERO + END IF + END IF +* +* ==== Multiply H by reflections from the left ==== +* + IF( ACCUM ) THEN + JBOT = MIN( NDCOL, KBOT ) + ELSE IF( WANTT ) THEN + JBOT = N + ELSE + JBOT = KBOT + END IF + DO 40 J = MAX( KTOP, KRCOL ), JBOT + MEND = MIN( MBOT, ( J-KRCOL+2 ) / 3 ) + DO 30 M = MTOP, MEND + K = KRCOL + 3*( M-1 ) + REFSUM = V( 1, M )*( H( K+1, J )+V( 2, M )* + $ H( K+2, J )+V( 3, M )*H( K+3, J ) ) + H( K+1, J ) = H( K+1, J ) - REFSUM + H( K+2, J ) = H( K+2, J ) - REFSUM*V( 2, M ) + H( K+3, J ) = H( K+3, J ) - REFSUM*V( 3, M ) + 30 CONTINUE + 40 CONTINUE + IF( BMP22 ) THEN + K = KRCOL + 3*( M22-1 ) + DO 50 J = MAX( K+1, KTOP ), JBOT + REFSUM = V( 1, M22 )*( H( K+1, J )+V( 2, M22 )* + $ H( K+2, J ) ) + H( K+1, J ) = H( K+1, J ) - REFSUM + H( K+2, J ) = H( K+2, J ) - REFSUM*V( 2, M22 ) + 50 CONTINUE + END IF +* +* ==== Multiply H by reflections from the right. +* . Delay filling in the last row until the +* . vigilant deflation check is complete. ==== +* + IF( ACCUM ) THEN + JTOP = MAX( KTOP, INCOL ) + ELSE IF( WANTT ) THEN + JTOP = 1 + ELSE + JTOP = KTOP + END IF + DO 90 M = MTOP, MBOT + IF( V( 1, M ).NE.ZERO ) THEN + K = KRCOL + 3*( M-1 ) + DO 60 J = JTOP, MIN( KBOT, K+3 ) + REFSUM = V( 1, M )*( H( J, K+1 )+V( 2, M )* + $ H( J, K+2 )+V( 3, M )*H( J, K+3 ) ) + H( J, K+1 ) = H( J, K+1 ) - REFSUM + H( J, K+2 ) = H( J, K+2 ) - REFSUM*V( 2, M ) + H( J, K+3 ) = H( J, K+3 ) - REFSUM*V( 3, M ) + 60 CONTINUE +* + IF( ACCUM ) THEN +* +* ==== Accumulate U. (If necessary, update Z later +* . with with an efficient matrix-matrix +* . multiply.) ==== +* + KMS = K - INCOL + DO 70 J = MAX( 1, KTOP-INCOL ), KDU + REFSUM = V( 1, M )*( U( J, KMS+1 )+V( 2, M )* + $ U( J, KMS+2 )+V( 3, M )*U( J, KMS+3 ) ) + U( J, KMS+1 ) = U( J, KMS+1 ) - REFSUM + U( J, KMS+2 ) = U( J, KMS+2 ) - REFSUM*V( 2, M ) + U( J, KMS+3 ) = U( J, KMS+3 ) - REFSUM*V( 3, M ) + 70 CONTINUE + ELSE IF( WANTZ ) THEN +* +* ==== U is not accumulated, so update Z +* . now by multiplying by reflections +* . from the right. ==== +* + DO 80 J = ILOZ, IHIZ + REFSUM = V( 1, M )*( Z( J, K+1 )+V( 2, M )* + $ Z( J, K+2 )+V( 3, M )*Z( J, K+3 ) ) + Z( J, K+1 ) = Z( J, K+1 ) - REFSUM + Z( J, K+2 ) = Z( J, K+2 ) - REFSUM*V( 2, M ) + Z( J, K+3 ) = Z( J, K+3 ) - REFSUM*V( 3, M ) + 80 CONTINUE + END IF + END IF + 90 CONTINUE +* +* ==== Special case: 2-by-2 reflection (if needed) ==== +* + K = KRCOL + 3*( M22-1 ) + IF( BMP22 ) THEN + IF ( V( 1, M22 ).NE.ZERO ) THEN + DO 100 J = JTOP, MIN( KBOT, K+3 ) + REFSUM = V( 1, M22 )*( H( J, K+1 )+V( 2, M22 )* + $ H( J, K+2 ) ) + H( J, K+1 ) = H( J, K+1 ) - REFSUM + H( J, K+2 ) = H( J, K+2 ) - REFSUM*V( 2, M22 ) + 100 CONTINUE +* + IF( ACCUM ) THEN + KMS = K - INCOL + DO 110 J = MAX( 1, KTOP-INCOL ), KDU + REFSUM = V( 1, M22 )*( U( J, KMS+1 )+ + $ V( 2, M22 )*U( J, KMS+2 ) ) + U( J, KMS+1 ) = U( J, KMS+1 ) - REFSUM + U( J, KMS+2 ) = U( J, KMS+2 ) - + $ REFSUM*V( 2, M22 ) + 110 CONTINUE + ELSE IF( WANTZ ) THEN + DO 120 J = ILOZ, IHIZ + REFSUM = V( 1, M22 )*( Z( J, K+1 )+V( 2, M22 )* + $ Z( J, K+2 ) ) + Z( J, K+1 ) = Z( J, K+1 ) - REFSUM + Z( J, K+2 ) = Z( J, K+2 ) - REFSUM*V( 2, M22 ) + 120 CONTINUE + END IF + END IF + END IF +* +* ==== Vigilant deflation check ==== +* + MSTART = MTOP + IF( KRCOL+3*( MSTART-1 ).LT.KTOP ) + $ MSTART = MSTART + 1 + MEND = MBOT + IF( BMP22 ) + $ MEND = MEND + 1 + IF( KRCOL.EQ.KBOT-2 ) + $ MEND = MEND + 1 + DO 130 M = MSTART, MEND + K = MIN( KBOT-1, KRCOL+3*( M-1 ) ) +* +* ==== The following convergence test requires that +* . the tradition small-compared-to-nearby-diagonals +* . criterion and the Ahues & Tisseur (LAWN 122, 1997) +* . criteria both be satisfied. The latter improves +* . accuracy in some examples. Falling back on an +* . alternate convergence criterion when TST1 or TST2 +* . is zero (as done here) is traditional but probably +* . unnecessary. ==== +* + IF( H( K+1, K ).NE.ZERO ) THEN + TST1 = ABS( H( K, K ) ) + ABS( H( K+1, K+1 ) ) + IF( TST1.EQ.ZERO ) THEN + IF( K.GE.KTOP+1 ) + $ TST1 = TST1 + ABS( H( K, K-1 ) ) + IF( K.GE.KTOP+2 ) + $ TST1 = TST1 + ABS( H( K, K-2 ) ) + IF( K.GE.KTOP+3 ) + $ TST1 = TST1 + ABS( H( K, K-3 ) ) + IF( K.LE.KBOT-2 ) + $ TST1 = TST1 + ABS( H( K+2, K+1 ) ) + IF( K.LE.KBOT-3 ) + $ TST1 = TST1 + ABS( H( K+3, K+1 ) ) + IF( K.LE.KBOT-4 ) + $ TST1 = TST1 + ABS( H( K+4, K+1 ) ) + END IF + IF( ABS( H( K+1, K ) ).LE.MAX( SMLNUM, ULP*TST1 ) ) + $ THEN + H12 = MAX( ABS( H( K+1, K ) ), ABS( H( K, K+1 ) ) ) + H21 = MIN( ABS( H( K+1, K ) ), ABS( H( K, K+1 ) ) ) + H11 = MAX( ABS( H( K+1, K+1 ) ), + $ ABS( H( K, K )-H( K+1, K+1 ) ) ) + H22 = MIN( ABS( H( K+1, K+1 ) ), + $ ABS( H( K, K )-H( K+1, K+1 ) ) ) + SCL = H11 + H12 + TST2 = H22*( H11 / SCL ) +* + IF( TST2.EQ.ZERO .OR. H21*( H12 / SCL ).LE. + $ MAX( SMLNUM, ULP*TST2 ) )H( K+1, K ) = ZERO + END IF + END IF + 130 CONTINUE +* +* ==== Fill in the last row of each bulge. ==== +* + MEND = MIN( NBMPS, ( KBOT-KRCOL-1 ) / 3 ) + DO 140 M = MTOP, MEND + K = KRCOL + 3*( M-1 ) + REFSUM = V( 1, M )*V( 3, M )*H( K+4, K+3 ) + H( K+4, K+1 ) = -REFSUM + H( K+4, K+2 ) = -REFSUM*V( 2, M ) + H( K+4, K+3 ) = H( K+4, K+3 ) - REFSUM*V( 3, M ) + 140 CONTINUE +* +* ==== End of near-the-diagonal bulge chase. ==== +* + 150 CONTINUE +* +* ==== Use U (if accumulated) to update far-from-diagonal +* . entries in H. If required, use U to update Z as +* . well. ==== +* + IF( ACCUM ) THEN + IF( WANTT ) THEN + JTOP = 1 + JBOT = N + ELSE + JTOP = KTOP + JBOT = KBOT + END IF + IF( ( .NOT.BLK22 ) .OR. ( INCOL.LT.KTOP ) .OR. + $ ( NDCOL.GT.KBOT ) .OR. ( NS.LE.2 ) ) THEN +* +* ==== Updates not exploiting the 2-by-2 block +* . structure of U. K1 and NU keep track of +* . the location and size of U in the special +* . cases of introducing bulges and chasing +* . bulges off the bottom. In these special +* . cases and in case the number of shifts +* . is NS = 2, there is no 2-by-2 block +* . structure to exploit. ==== +* + K1 = MAX( 1, KTOP-INCOL ) + NU = ( KDU-MAX( 0, NDCOL-KBOT ) ) - K1 + 1 +* +* ==== Horizontal Multiply ==== +* + DO 160 JCOL = MIN( NDCOL, KBOT ) + 1, JBOT, NH + JLEN = MIN( NH, JBOT-JCOL+1 ) + CALL DGEMM( 'C', 'N', NU, JLEN, NU, ONE, U( K1, K1 ), + $ LDU, H( INCOL+K1, JCOL ), LDH, ZERO, WH, + $ LDWH ) + CALL DLACPY( 'ALL', NU, JLEN, WH, LDWH, + $ H( INCOL+K1, JCOL ), LDH ) + 160 CONTINUE +* +* ==== Vertical multiply ==== +* + DO 170 JROW = JTOP, MAX( KTOP, INCOL ) - 1, NV + JLEN = MIN( NV, MAX( KTOP, INCOL )-JROW ) + CALL DGEMM( 'N', 'N', JLEN, NU, NU, ONE, + $ H( JROW, INCOL+K1 ), LDH, U( K1, K1 ), + $ LDU, ZERO, WV, LDWV ) + CALL DLACPY( 'ALL', JLEN, NU, WV, LDWV, + $ H( JROW, INCOL+K1 ), LDH ) + 170 CONTINUE +* +* ==== Z multiply (also vertical) ==== +* + IF( WANTZ ) THEN + DO 180 JROW = ILOZ, IHIZ, NV + JLEN = MIN( NV, IHIZ-JROW+1 ) + CALL DGEMM( 'N', 'N', JLEN, NU, NU, ONE, + $ Z( JROW, INCOL+K1 ), LDZ, U( K1, K1 ), + $ LDU, ZERO, WV, LDWV ) + CALL DLACPY( 'ALL', JLEN, NU, WV, LDWV, + $ Z( JROW, INCOL+K1 ), LDZ ) + 180 CONTINUE + END IF + ELSE +* +* ==== Updates exploiting U's 2-by-2 block structure. +* . (I2, I4, J2, J4 are the last rows and columns +* . of the blocks.) ==== +* + I2 = ( KDU+1 ) / 2 + I4 = KDU + J2 = I4 - I2 + J4 = KDU +* +* ==== KZS and KNZ deal with the band of zeros +* . along the diagonal of one of the triangular +* . blocks. ==== +* + KZS = ( J4-J2 ) - ( NS+1 ) + KNZ = NS + 1 +* +* ==== Horizontal multiply ==== +* + DO 190 JCOL = MIN( NDCOL, KBOT ) + 1, JBOT, NH + JLEN = MIN( NH, JBOT-JCOL+1 ) +* +* ==== Copy bottom of H to top+KZS of scratch ==== +* (The first KZS rows get multiplied by zero.) ==== +* + CALL DLACPY( 'ALL', KNZ, JLEN, H( INCOL+1+J2, JCOL ), + $ LDH, WH( KZS+1, 1 ), LDWH ) +* +* ==== Multiply by U21**T ==== +* + CALL DLASET( 'ALL', KZS, JLEN, ZERO, ZERO, WH, LDWH ) + CALL DTRMM( 'L', 'U', 'C', 'N', KNZ, JLEN, ONE, + $ U( J2+1, 1+KZS ), LDU, WH( KZS+1, 1 ), + $ LDWH ) +* +* ==== Multiply top of H by U11**T ==== +* + CALL DGEMM( 'C', 'N', I2, JLEN, J2, ONE, U, LDU, + $ H( INCOL+1, JCOL ), LDH, ONE, WH, LDWH ) +* +* ==== Copy top of H to bottom of WH ==== +* + CALL DLACPY( 'ALL', J2, JLEN, H( INCOL+1, JCOL ), LDH, + $ WH( I2+1, 1 ), LDWH ) +* +* ==== Multiply by U21**T ==== +* + CALL DTRMM( 'L', 'L', 'C', 'N', J2, JLEN, ONE, + $ U( 1, I2+1 ), LDU, WH( I2+1, 1 ), LDWH ) +* +* ==== Multiply by U22 ==== +* + CALL DGEMM( 'C', 'N', I4-I2, JLEN, J4-J2, ONE, + $ U( J2+1, I2+1 ), LDU, + $ H( INCOL+1+J2, JCOL ), LDH, ONE, + $ WH( I2+1, 1 ), LDWH ) +* +* ==== Copy it back ==== +* + CALL DLACPY( 'ALL', KDU, JLEN, WH, LDWH, + $ H( INCOL+1, JCOL ), LDH ) + 190 CONTINUE +* +* ==== Vertical multiply ==== +* + DO 200 JROW = JTOP, MAX( INCOL, KTOP ) - 1, NV + JLEN = MIN( NV, MAX( INCOL, KTOP )-JROW ) +* +* ==== Copy right of H to scratch (the first KZS +* . columns get multiplied by zero) ==== +* + CALL DLACPY( 'ALL', JLEN, KNZ, H( JROW, INCOL+1+J2 ), + $ LDH, WV( 1, 1+KZS ), LDWV ) +* +* ==== Multiply by U21 ==== +* + CALL DLASET( 'ALL', JLEN, KZS, ZERO, ZERO, WV, LDWV ) + CALL DTRMM( 'R', 'U', 'N', 'N', JLEN, KNZ, ONE, + $ U( J2+1, 1+KZS ), LDU, WV( 1, 1+KZS ), + $ LDWV ) +* +* ==== Multiply by U11 ==== +* + CALL DGEMM( 'N', 'N', JLEN, I2, J2, ONE, + $ H( JROW, INCOL+1 ), LDH, U, LDU, ONE, WV, + $ LDWV ) +* +* ==== Copy left of H to right of scratch ==== +* + CALL DLACPY( 'ALL', JLEN, J2, H( JROW, INCOL+1 ), LDH, + $ WV( 1, 1+I2 ), LDWV ) +* +* ==== Multiply by U21 ==== +* + CALL DTRMM( 'R', 'L', 'N', 'N', JLEN, I4-I2, ONE, + $ U( 1, I2+1 ), LDU, WV( 1, 1+I2 ), LDWV ) +* +* ==== Multiply by U22 ==== +* + CALL DGEMM( 'N', 'N', JLEN, I4-I2, J4-J2, ONE, + $ H( JROW, INCOL+1+J2 ), LDH, + $ U( J2+1, I2+1 ), LDU, ONE, WV( 1, 1+I2 ), + $ LDWV ) +* +* ==== Copy it back ==== +* + CALL DLACPY( 'ALL', JLEN, KDU, WV, LDWV, + $ H( JROW, INCOL+1 ), LDH ) + 200 CONTINUE +* +* ==== Multiply Z (also vertical) ==== +* + IF( WANTZ ) THEN + DO 210 JROW = ILOZ, IHIZ, NV + JLEN = MIN( NV, IHIZ-JROW+1 ) +* +* ==== Copy right of Z to left of scratch (first +* . KZS columns get multiplied by zero) ==== +* + CALL DLACPY( 'ALL', JLEN, KNZ, + $ Z( JROW, INCOL+1+J2 ), LDZ, + $ WV( 1, 1+KZS ), LDWV ) +* +* ==== Multiply by U12 ==== +* + CALL DLASET( 'ALL', JLEN, KZS, ZERO, ZERO, WV, + $ LDWV ) + CALL DTRMM( 'R', 'U', 'N', 'N', JLEN, KNZ, ONE, + $ U( J2+1, 1+KZS ), LDU, WV( 1, 1+KZS ), + $ LDWV ) +* +* ==== Multiply by U11 ==== +* + CALL DGEMM( 'N', 'N', JLEN, I2, J2, ONE, + $ Z( JROW, INCOL+1 ), LDZ, U, LDU, ONE, + $ WV, LDWV ) +* +* ==== Copy left of Z to right of scratch ==== +* + CALL DLACPY( 'ALL', JLEN, J2, Z( JROW, INCOL+1 ), + $ LDZ, WV( 1, 1+I2 ), LDWV ) +* +* ==== Multiply by U21 ==== +* + CALL DTRMM( 'R', 'L', 'N', 'N', JLEN, I4-I2, ONE, + $ U( 1, I2+1 ), LDU, WV( 1, 1+I2 ), + $ LDWV ) +* +* ==== Multiply by U22 ==== +* + CALL DGEMM( 'N', 'N', JLEN, I4-I2, J4-J2, ONE, + $ Z( JROW, INCOL+1+J2 ), LDZ, + $ U( J2+1, I2+1 ), LDU, ONE, + $ WV( 1, 1+I2 ), LDWV ) +* +* ==== Copy the result back to Z ==== +* + CALL DLACPY( 'ALL', JLEN, KDU, WV, LDWV, + $ Z( JROW, INCOL+1 ), LDZ ) + 210 CONTINUE + END IF + END IF + END IF + 220 CONTINUE +* +* ==== End of DLAQR5 ==== +* + END diff --git a/vendor/gonum.org/v1/gonum/lapack/internal/testdata/netlib/dlarfg.f b/vendor/gonum.org/v1/gonum/lapack/internal/testdata/netlib/dlarfg.f new file mode 100644 index 0000000..ce91d33 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/lapack/internal/testdata/netlib/dlarfg.f @@ -0,0 +1,196 @@ +*> \brief \b DLARFG generates an elementary reflector (Householder matrix). +* +* =========== DOCUMENTATION =========== +* +* Online html documentation available at +* http://www.netlib.org/lapack/explore-html/ +* +*> \htmlonly +*> Download DLARFG + dependencies +*> +*> [TGZ] +*> +*> [ZIP] +*> +*> [TXT] +*> \endhtmlonly +* +* Definition: +* =========== +* +* SUBROUTINE DLARFG( N, ALPHA, X, INCX, TAU ) +* +* .. Scalar Arguments .. +* INTEGER INCX, N +* DOUBLE PRECISION ALPHA, TAU +* .. +* .. Array Arguments .. +* DOUBLE PRECISION X( * ) +* .. +* +* +*> \par Purpose: +* ============= +*> +*> \verbatim +*> +*> DLARFG generates a real elementary reflector H of order n, such +*> that +*> +*> H * ( alpha ) = ( beta ), H**T * H = I. +*> ( x ) ( 0 ) +*> +*> where alpha and beta are scalars, and x is an (n-1)-element real +*> vector. H is represented in the form +*> +*> H = I - tau * ( 1 ) * ( 1 v**T ) , +*> ( v ) +*> +*> where tau is a real scalar and v is a real (n-1)-element +*> vector. +*> +*> If the elements of x are all zero, then tau = 0 and H is taken to be +*> the unit matrix. +*> +*> Otherwise 1 <= tau <= 2. +*> \endverbatim +* +* Arguments: +* ========== +* +*> \param[in] N +*> \verbatim +*> N is INTEGER +*> The order of the elementary reflector. +*> \endverbatim +*> +*> \param[in,out] ALPHA +*> \verbatim +*> ALPHA is DOUBLE PRECISION +*> On entry, the value alpha. +*> On exit, it is overwritten with the value beta. +*> \endverbatim +*> +*> \param[in,out] X +*> \verbatim +*> X is DOUBLE PRECISION array, dimension +*> (1+(N-2)*abs(INCX)) +*> On entry, the vector x. +*> On exit, it is overwritten with the vector v. +*> \endverbatim +*> +*> \param[in] INCX +*> \verbatim +*> INCX is INTEGER +*> The increment between elements of X. INCX > 0. +*> \endverbatim +*> +*> \param[out] TAU +*> \verbatim +*> TAU is DOUBLE PRECISION +*> The value tau. +*> \endverbatim +* +* Authors: +* ======== +* +*> \author Univ. of Tennessee +*> \author Univ. of California Berkeley +*> \author Univ. of Colorado Denver +*> \author NAG Ltd. +* +*> \date September 2012 +* +*> \ingroup doubleOTHERauxiliary +* +* ===================================================================== + SUBROUTINE DLARFG( N, ALPHA, X, INCX, TAU ) +* +* -- LAPACK auxiliary routine (version 3.4.2) -- +* -- LAPACK is a software package provided by Univ. of Tennessee, -- +* -- Univ. of California Berkeley, Univ. of Colorado Denver and NAG Ltd..-- +* September 2012 +* +* .. Scalar Arguments .. + INTEGER INCX, N + DOUBLE PRECISION ALPHA, TAU +* .. +* .. Array Arguments .. + DOUBLE PRECISION X( * ) +* .. +* +* ===================================================================== +* +* .. Parameters .. + DOUBLE PRECISION ONE, ZERO + PARAMETER ( ONE = 1.0D+0, ZERO = 0.0D+0 ) +* .. +* .. Local Scalars .. + INTEGER J, KNT + DOUBLE PRECISION BETA, RSAFMN, SAFMIN, XNORM +* .. +* .. External Functions .. + DOUBLE PRECISION DLAMCH, DLAPY2, DNRM2 + EXTERNAL DLAMCH, DLAPY2, DNRM2 +* .. +* .. Intrinsic Functions .. + INTRINSIC ABS, SIGN +* .. +* .. External Subroutines .. + EXTERNAL DSCAL +* .. +* .. Executable Statements .. +* + IF( N.LE.1 ) THEN + TAU = ZERO + RETURN + END IF +* + XNORM = DNRM2( N-1, X, INCX ) +* + IF( XNORM.EQ.ZERO ) THEN +* +* H = I +* + TAU = ZERO + ELSE +* +* general case +* + BETA = -SIGN( DLAPY2( ALPHA, XNORM ), ALPHA ) + SAFMIN = DLAMCH( 'S' ) / DLAMCH( 'E' ) + KNT = 0 + IF( ABS( BETA ).LT.SAFMIN ) THEN +* +* XNORM, BETA may be inaccurate; scale X and recompute them +* + RSAFMN = ONE / SAFMIN + 10 CONTINUE + KNT = KNT + 1 + CALL DSCAL( N-1, RSAFMN, X, INCX ) + BETA = BETA*RSAFMN + ALPHA = ALPHA*RSAFMN + IF( ABS( BETA ).LT.SAFMIN ) + $ GO TO 10 +* +* New BETA is at most 1, at least SAFMIN +* + XNORM = DNRM2( N-1, X, INCX ) + BETA = -SIGN( DLAPY2( ALPHA, XNORM ), ALPHA ) + END IF + TAU = ( BETA-ALPHA ) / BETA + CALL DSCAL( N-1, ONE / ( ALPHA-BETA ), X, INCX ) +* +* If ALPHA is subnormal, it may lose relative accuracy +* + DO 20 J = 1, KNT + BETA = BETA*SAFMIN + 20 CONTINUE + ALPHA = BETA + END IF +* + RETURN +* +* End of DLARFG +* + END diff --git a/vendor/gonum.org/v1/gonum/lapack/internal/testdata/netlib/dlaset.f b/vendor/gonum.org/v1/gonum/lapack/internal/testdata/netlib/dlaset.f new file mode 100644 index 0000000..d3bb945 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/lapack/internal/testdata/netlib/dlaset.f @@ -0,0 +1,184 @@ +*> \brief \b DLASET initializes the off-diagonal elements and the diagonal elements of a matrix to given values. +* +* =========== DOCUMENTATION =========== +* +* Online html documentation available at +* http://www.netlib.org/lapack/explore-html/ +* +*> \htmlonly +*> Download DLASET + dependencies +*> +*> [TGZ] +*> +*> [ZIP] +*> +*> [TXT] +*> \endhtmlonly +* +* Definition: +* =========== +* +* SUBROUTINE DLASET( UPLO, M, N, ALPHA, BETA, A, LDA ) +* +* .. Scalar Arguments .. +* CHARACTER UPLO +* INTEGER LDA, M, N +* DOUBLE PRECISION ALPHA, BETA +* .. +* .. Array Arguments .. +* DOUBLE PRECISION A( LDA, * ) +* .. +* +* +*> \par Purpose: +* ============= +*> +*> \verbatim +*> +*> DLASET initializes an m-by-n matrix A to BETA on the diagonal and +*> ALPHA on the offdiagonals. +*> \endverbatim +* +* Arguments: +* ========== +* +*> \param[in] UPLO +*> \verbatim +*> UPLO is CHARACTER*1 +*> Specifies the part of the matrix A to be set. +*> = 'U': Upper triangular part is set; the strictly lower +*> triangular part of A is not changed. +*> = 'L': Lower triangular part is set; the strictly upper +*> triangular part of A is not changed. +*> Otherwise: All of the matrix A is set. +*> \endverbatim +*> +*> \param[in] M +*> \verbatim +*> M is INTEGER +*> The number of rows of the matrix A. M >= 0. +*> \endverbatim +*> +*> \param[in] N +*> \verbatim +*> N is INTEGER +*> The number of columns of the matrix A. N >= 0. +*> \endverbatim +*> +*> \param[in] ALPHA +*> \verbatim +*> ALPHA is DOUBLE PRECISION +*> The constant to which the offdiagonal elements are to be set. +*> \endverbatim +*> +*> \param[in] BETA +*> \verbatim +*> BETA is DOUBLE PRECISION +*> The constant to which the diagonal elements are to be set. +*> \endverbatim +*> +*> \param[out] A +*> \verbatim +*> A is DOUBLE PRECISION array, dimension (LDA,N) +*> On exit, the leading m-by-n submatrix of A is set as follows: +*> +*> if UPLO = 'U', A(i,j) = ALPHA, 1<=i<=j-1, 1<=j<=n, +*> if UPLO = 'L', A(i,j) = ALPHA, j+1<=i<=m, 1<=j<=n, +*> otherwise, A(i,j) = ALPHA, 1<=i<=m, 1<=j<=n, i.ne.j, +*> +*> and, for all UPLO, A(i,i) = BETA, 1<=i<=min(m,n). +*> \endverbatim +*> +*> \param[in] LDA +*> \verbatim +*> LDA is INTEGER +*> The leading dimension of the array A. LDA >= max(1,M). +*> \endverbatim +* +* Authors: +* ======== +* +*> \author Univ. of Tennessee +*> \author Univ. of California Berkeley +*> \author Univ. of Colorado Denver +*> \author NAG Ltd. +* +*> \date November 2015 +* +*> \ingroup auxOTHERauxiliary +* +* ===================================================================== + SUBROUTINE DLASET( UPLO, M, N, ALPHA, BETA, A, LDA ) +* +* -- LAPACK auxiliary routine (version 3.6.0) -- +* -- LAPACK is a software package provided by Univ. of Tennessee, -- +* -- Univ. of California Berkeley, Univ. of Colorado Denver and NAG Ltd..-- +* November 2015 +* +* .. Scalar Arguments .. + CHARACTER UPLO + INTEGER LDA, M, N + DOUBLE PRECISION ALPHA, BETA +* .. +* .. Array Arguments .. + DOUBLE PRECISION A( LDA, * ) +* .. +* +* ===================================================================== +* +* .. Local Scalars .. + INTEGER I, J +* .. +* .. External Functions .. + LOGICAL LSAME + EXTERNAL LSAME +* .. +* .. Intrinsic Functions .. + INTRINSIC MIN +* .. +* .. Executable Statements .. +* + IF( LSAME( UPLO, 'U' ) ) THEN +* +* Set the strictly upper triangular or trapezoidal part of the +* array to ALPHA. +* + DO 20 J = 2, N + DO 10 I = 1, MIN( J-1, M ) + A( I, J ) = ALPHA + 10 CONTINUE + 20 CONTINUE +* + ELSE IF( LSAME( UPLO, 'L' ) ) THEN +* +* Set the strictly lower triangular or trapezoidal part of the +* array to ALPHA. +* + DO 40 J = 1, MIN( M, N ) + DO 30 I = J + 1, M + A( I, J ) = ALPHA + 30 CONTINUE + 40 CONTINUE +* + ELSE +* +* Set the leading m-by-n submatrix to ALPHA. +* + DO 60 J = 1, N + DO 50 I = 1, M + A( I, J ) = ALPHA + 50 CONTINUE + 60 CONTINUE + END IF +* +* Set the first min(M,N) diagonal elements to BETA. +* + DO 70 I = 1, MIN( M, N ) + A( I, I ) = BETA + 70 CONTINUE +* + RETURN +* +* End of DLASET +* + END diff --git a/vendor/gonum.org/v1/gonum/lapack/internal/testdata/netlib/dnrm2.f b/vendor/gonum.org/v1/gonum/lapack/internal/testdata/netlib/dnrm2.f new file mode 100644 index 0000000..5ea257a --- /dev/null +++ b/vendor/gonum.org/v1/gonum/lapack/internal/testdata/netlib/dnrm2.f @@ -0,0 +1,112 @@ +*> \brief \b DNRM2 +* +* =========== DOCUMENTATION =========== +* +* Online html documentation available at +* http://www.netlib.org/lapack/explore-html/ +* +* Definition: +* =========== +* +* DOUBLE PRECISION FUNCTION DNRM2(N,X,INCX) +* +* .. Scalar Arguments .. +* INTEGER INCX,N +* .. +* .. Array Arguments .. +* DOUBLE PRECISION X(*) +* .. +* +* +*> \par Purpose: +* ============= +*> +*> \verbatim +*> +*> DNRM2 returns the euclidean norm of a vector via the function +*> name, so that +*> +*> DNRM2 := sqrt( x'*x ) +*> \endverbatim +* +* Authors: +* ======== +* +*> \author Univ. of Tennessee +*> \author Univ. of California Berkeley +*> \author Univ. of Colorado Denver +*> \author NAG Ltd. +* +*> \date November 2011 +* +*> \ingroup double_blas_level1 +* +*> \par Further Details: +* ===================== +*> +*> \verbatim +*> +*> -- This version written on 25-October-1982. +*> Modified on 14-October-1993 to inline the call to DLASSQ. +*> Sven Hammarling, Nag Ltd. +*> \endverbatim +*> +* ===================================================================== + DOUBLE PRECISION FUNCTION DNRM2(N,X,INCX) +* +* -- Reference BLAS level1 routine (version 3.4.0) -- +* -- Reference BLAS is a software package provided by Univ. of Tennessee, -- +* -- Univ. of California Berkeley, Univ. of Colorado Denver and NAG Ltd..-- +* November 2011 +* +* .. Scalar Arguments .. + INTEGER INCX,N +* .. +* .. Array Arguments .. + DOUBLE PRECISION X(*) +* .. +* +* ===================================================================== +* +* .. Parameters .. + DOUBLE PRECISION ONE,ZERO + PARAMETER (ONE=1.0D+0,ZERO=0.0D+0) +* .. +* .. Local Scalars .. + DOUBLE PRECISION ABSXI,NORM,SCALE,SSQ + INTEGER IX +* .. +* .. Intrinsic Functions .. + INTRINSIC ABS,SQRT +* .. + IF (N.LT.1 .OR. INCX.LT.1) THEN + NORM = ZERO + ELSE IF (N.EQ.1) THEN + NORM = ABS(X(1)) + ELSE + SCALE = ZERO + SSQ = ONE +* The following loop is equivalent to this call to the LAPACK +* auxiliary routine: +* CALL DLASSQ( N, X, INCX, SCALE, SSQ ) +* + DO 10 IX = 1,1 + (N-1)*INCX,INCX + IF (X(IX).NE.ZERO) THEN + ABSXI = ABS(X(IX)) + IF (SCALE.LT.ABSXI) THEN + SSQ = ONE + SSQ* (SCALE/ABSXI)**2 + SCALE = ABSXI + ELSE + SSQ = SSQ + (ABSXI/SCALE)**2 + END IF + END IF + 10 CONTINUE + NORM = SCALE*SQRT(SSQ) + END IF +* + DNRM2 = NORM + RETURN +* +* End of DNRM2. +* + END diff --git a/vendor/gonum.org/v1/gonum/lapack/internal/testdata/netlib/dscal.f b/vendor/gonum.org/v1/gonum/lapack/internal/testdata/netlib/dscal.f new file mode 100644 index 0000000..3337de8 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/lapack/internal/testdata/netlib/dscal.f @@ -0,0 +1,110 @@ +*> \brief \b DSCAL +* +* =========== DOCUMENTATION =========== +* +* Online html documentation available at +* http://www.netlib.org/lapack/explore-html/ +* +* Definition: +* =========== +* +* SUBROUTINE DSCAL(N,DA,DX,INCX) +* +* .. Scalar Arguments .. +* DOUBLE PRECISION DA +* INTEGER INCX,N +* .. +* .. Array Arguments .. +* DOUBLE PRECISION DX(*) +* .. +* +* +*> \par Purpose: +* ============= +*> +*> \verbatim +*> +*> DSCAL scales a vector by a constant. +*> uses unrolled loops for increment equal to one. +*> \endverbatim +* +* Authors: +* ======== +* +*> \author Univ. of Tennessee +*> \author Univ. of California Berkeley +*> \author Univ. of Colorado Denver +*> \author NAG Ltd. +* +*> \date November 2011 +* +*> \ingroup double_blas_level1 +* +*> \par Further Details: +* ===================== +*> +*> \verbatim +*> +*> jack dongarra, linpack, 3/11/78. +*> modified 3/93 to return if incx .le. 0. +*> modified 12/3/93, array(1) declarations changed to array(*) +*> \endverbatim +*> +* ===================================================================== + SUBROUTINE DSCAL(N,DA,DX,INCX) +* +* -- Reference BLAS level1 routine (version 3.4.0) -- +* -- Reference BLAS is a software package provided by Univ. of Tennessee, -- +* -- Univ. of California Berkeley, Univ. of Colorado Denver and NAG Ltd..-- +* November 2011 +* +* .. Scalar Arguments .. + DOUBLE PRECISION DA + INTEGER INCX,N +* .. +* .. Array Arguments .. + DOUBLE PRECISION DX(*) +* .. +* +* ===================================================================== +* +* .. Local Scalars .. + INTEGER I,M,MP1,NINCX +* .. +* .. Intrinsic Functions .. + INTRINSIC MOD +* .. + IF (N.LE.0 .OR. INCX.LE.0) RETURN + IF (INCX.EQ.1) THEN +* +* code for increment equal to 1 +* +* +* clean-up loop +* + M = MOD(N,5) + IF (M.NE.0) THEN + DO I = 1,M + DX(I) = DA*DX(I) + END DO + IF (N.LT.5) RETURN + END IF + MP1 = M + 1 + DO I = MP1,N,5 + DX(I) = DA*DX(I) + DX(I+1) = DA*DX(I+1) + DX(I+2) = DA*DX(I+2) + DX(I+3) = DA*DX(I+3) + DX(I+4) = DA*DX(I+4) + END DO + ELSE +* +* code for increment not equal to 1 +* + NINCX = N*INCX + DO I = 1,NINCX,INCX + DX(I) = DA*DX(I) + END DO + END IF + RETURN + END diff --git a/vendor/gonum.org/v1/gonum/lapack/internal/testdata/netlib/dtrmm.f b/vendor/gonum.org/v1/gonum/lapack/internal/testdata/netlib/dtrmm.f new file mode 100644 index 0000000..cbd5ce7 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/lapack/internal/testdata/netlib/dtrmm.f @@ -0,0 +1,415 @@ +*> \brief \b DTRMM +* +* =========== DOCUMENTATION =========== +* +* Online html documentation available at +* http://www.netlib.org/lapack/explore-html/ +* +* Definition: +* =========== +* +* SUBROUTINE DTRMM(SIDE,UPLO,TRANSA,DIAG,M,N,ALPHA,A,LDA,B,LDB) +* +* .. Scalar Arguments .. +* DOUBLE PRECISION ALPHA +* INTEGER LDA,LDB,M,N +* CHARACTER DIAG,SIDE,TRANSA,UPLO +* .. +* .. Array Arguments .. +* DOUBLE PRECISION A(LDA,*),B(LDB,*) +* .. +* +* +*> \par Purpose: +* ============= +*> +*> \verbatim +*> +*> DTRMM performs one of the matrix-matrix operations +*> +*> B := alpha*op( A )*B, or B := alpha*B*op( A ), +*> +*> where alpha is a scalar, B is an m by n matrix, A is a unit, or +*> non-unit, upper or lower triangular matrix and op( A ) is one of +*> +*> op( A ) = A or op( A ) = A**T. +*> \endverbatim +* +* Arguments: +* ========== +* +*> \param[in] SIDE +*> \verbatim +*> SIDE is CHARACTER*1 +*> On entry, SIDE specifies whether op( A ) multiplies B from +*> the left or right as follows: +*> +*> SIDE = 'L' or 'l' B := alpha*op( A )*B. +*> +*> SIDE = 'R' or 'r' B := alpha*B*op( A ). +*> \endverbatim +*> +*> \param[in] UPLO +*> \verbatim +*> UPLO is CHARACTER*1 +*> On entry, UPLO specifies whether the matrix A is an upper or +*> lower triangular matrix as follows: +*> +*> UPLO = 'U' or 'u' A is an upper triangular matrix. +*> +*> UPLO = 'L' or 'l' A is a lower triangular matrix. +*> \endverbatim +*> +*> \param[in] TRANSA +*> \verbatim +*> TRANSA is CHARACTER*1 +*> On entry, TRANSA specifies the form of op( A ) to be used in +*> the matrix multiplication as follows: +*> +*> TRANSA = 'N' or 'n' op( A ) = A. +*> +*> TRANSA = 'T' or 't' op( A ) = A**T. +*> +*> TRANSA = 'C' or 'c' op( A ) = A**T. +*> \endverbatim +*> +*> \param[in] DIAG +*> \verbatim +*> DIAG is CHARACTER*1 +*> On entry, DIAG specifies whether or not A is unit triangular +*> as follows: +*> +*> DIAG = 'U' or 'u' A is assumed to be unit triangular. +*> +*> DIAG = 'N' or 'n' A is not assumed to be unit +*> triangular. +*> \endverbatim +*> +*> \param[in] M +*> \verbatim +*> M is INTEGER +*> On entry, M specifies the number of rows of B. M must be at +*> least zero. +*> \endverbatim +*> +*> \param[in] N +*> \verbatim +*> N is INTEGER +*> On entry, N specifies the number of columns of B. N must be +*> at least zero. +*> \endverbatim +*> +*> \param[in] ALPHA +*> \verbatim +*> ALPHA is DOUBLE PRECISION. +*> On entry, ALPHA specifies the scalar alpha. When alpha is +*> zero then A is not referenced and B need not be set before +*> entry. +*> \endverbatim +*> +*> \param[in] A +*> \verbatim +*> A is DOUBLE PRECISION array of DIMENSION ( LDA, k ), where k is m +*> when SIDE = 'L' or 'l' and is n when SIDE = 'R' or 'r'. +*> Before entry with UPLO = 'U' or 'u', the leading k by k +*> upper triangular part of the array A must contain the upper +*> triangular matrix and the strictly lower triangular part of +*> A is not referenced. +*> Before entry with UPLO = 'L' or 'l', the leading k by k +*> lower triangular part of the array A must contain the lower +*> triangular matrix and the strictly upper triangular part of +*> A is not referenced. +*> Note that when DIAG = 'U' or 'u', the diagonal elements of +*> A are not referenced either, but are assumed to be unity. +*> \endverbatim +*> +*> \param[in] LDA +*> \verbatim +*> LDA is INTEGER +*> On entry, LDA specifies the first dimension of A as declared +*> in the calling (sub) program. When SIDE = 'L' or 'l' then +*> LDA must be at least max( 1, m ), when SIDE = 'R' or 'r' +*> then LDA must be at least max( 1, n ). +*> \endverbatim +*> +*> \param[in,out] B +*> \verbatim +*> B is DOUBLE PRECISION array of DIMENSION ( LDB, n ). +*> Before entry, the leading m by n part of the array B must +*> contain the matrix B, and on exit is overwritten by the +*> transformed matrix. +*> \endverbatim +*> +*> \param[in] LDB +*> \verbatim +*> LDB is INTEGER +*> On entry, LDB specifies the first dimension of B as declared +*> in the calling (sub) program. LDB must be at least +*> max( 1, m ). +*> \endverbatim +* +* Authors: +* ======== +* +*> \author Univ. of Tennessee +*> \author Univ. of California Berkeley +*> \author Univ. of Colorado Denver +*> \author NAG Ltd. +* +*> \date November 2011 +* +*> \ingroup double_blas_level3 +* +*> \par Further Details: +* ===================== +*> +*> \verbatim +*> +*> Level 3 Blas routine. +*> +*> -- Written on 8-February-1989. +*> Jack Dongarra, Argonne National Laboratory. +*> Iain Duff, AERE Harwell. +*> Jeremy Du Croz, Numerical Algorithms Group Ltd. +*> Sven Hammarling, Numerical Algorithms Group Ltd. +*> \endverbatim +*> +* ===================================================================== + SUBROUTINE DTRMM(SIDE,UPLO,TRANSA,DIAG,M,N,ALPHA,A,LDA,B,LDB) +* +* -- Reference BLAS level3 routine (version 3.4.0) -- +* -- Reference BLAS is a software package provided by Univ. of Tennessee, -- +* -- Univ. of California Berkeley, Univ. of Colorado Denver and NAG Ltd..-- +* November 2011 +* +* .. Scalar Arguments .. + DOUBLE PRECISION ALPHA + INTEGER LDA,LDB,M,N + CHARACTER DIAG,SIDE,TRANSA,UPLO +* .. +* .. Array Arguments .. + DOUBLE PRECISION A(LDA,*),B(LDB,*) +* .. +* +* ===================================================================== +* +* .. External Functions .. + LOGICAL LSAME + EXTERNAL LSAME +* .. +* .. External Subroutines .. + EXTERNAL XERBLA +* .. +* .. Intrinsic Functions .. + INTRINSIC MAX +* .. +* .. Local Scalars .. + DOUBLE PRECISION TEMP + INTEGER I,INFO,J,K,NROWA + LOGICAL LSIDE,NOUNIT,UPPER +* .. +* .. Parameters .. + DOUBLE PRECISION ONE,ZERO + PARAMETER (ONE=1.0D+0,ZERO=0.0D+0) +* .. +* +* Test the input parameters. +* + LSIDE = LSAME(SIDE,'L') + IF (LSIDE) THEN + NROWA = M + ELSE + NROWA = N + END IF + NOUNIT = LSAME(DIAG,'N') + UPPER = LSAME(UPLO,'U') +* + INFO = 0 + IF ((.NOT.LSIDE) .AND. (.NOT.LSAME(SIDE,'R'))) THEN + INFO = 1 + ELSE IF ((.NOT.UPPER) .AND. (.NOT.LSAME(UPLO,'L'))) THEN + INFO = 2 + ELSE IF ((.NOT.LSAME(TRANSA,'N')) .AND. + + (.NOT.LSAME(TRANSA,'T')) .AND. + + (.NOT.LSAME(TRANSA,'C'))) THEN + INFO = 3 + ELSE IF ((.NOT.LSAME(DIAG,'U')) .AND. (.NOT.LSAME(DIAG,'N'))) THEN + INFO = 4 + ELSE IF (M.LT.0) THEN + INFO = 5 + ELSE IF (N.LT.0) THEN + INFO = 6 + ELSE IF (LDA.LT.MAX(1,NROWA)) THEN + INFO = 9 + ELSE IF (LDB.LT.MAX(1,M)) THEN + INFO = 11 + END IF + IF (INFO.NE.0) THEN + CALL XERBLA('DTRMM ',INFO) + RETURN + END IF +* +* Quick return if possible. +* + IF (M.EQ.0 .OR. N.EQ.0) RETURN +* +* And when alpha.eq.zero. +* + IF (ALPHA.EQ.ZERO) THEN + DO 20 J = 1,N + DO 10 I = 1,M + B(I,J) = ZERO + 10 CONTINUE + 20 CONTINUE + RETURN + END IF +* +* Start the operations. +* + IF (LSIDE) THEN + IF (LSAME(TRANSA,'N')) THEN +* +* Form B := alpha*A*B. +* + IF (UPPER) THEN + DO 50 J = 1,N + DO 40 K = 1,M + IF (B(K,J).NE.ZERO) THEN + TEMP = ALPHA*B(K,J) + DO 30 I = 1,K - 1 + B(I,J) = B(I,J) + TEMP*A(I,K) + 30 CONTINUE + IF (NOUNIT) TEMP = TEMP*A(K,K) + B(K,J) = TEMP + END IF + 40 CONTINUE + 50 CONTINUE + ELSE + DO 80 J = 1,N + DO 70 K = M,1,-1 + IF (B(K,J).NE.ZERO) THEN + TEMP = ALPHA*B(K,J) + B(K,J) = TEMP + IF (NOUNIT) B(K,J) = B(K,J)*A(K,K) + DO 60 I = K + 1,M + B(I,J) = B(I,J) + TEMP*A(I,K) + 60 CONTINUE + END IF + 70 CONTINUE + 80 CONTINUE + END IF + ELSE +* +* Form B := alpha*A**T*B. +* + IF (UPPER) THEN + DO 110 J = 1,N + DO 100 I = M,1,-1 + TEMP = B(I,J) + IF (NOUNIT) TEMP = TEMP*A(I,I) + DO 90 K = 1,I - 1 + TEMP = TEMP + A(K,I)*B(K,J) + 90 CONTINUE + B(I,J) = ALPHA*TEMP + 100 CONTINUE + 110 CONTINUE + ELSE + DO 140 J = 1,N + DO 130 I = 1,M + TEMP = B(I,J) + IF (NOUNIT) TEMP = TEMP*A(I,I) + DO 120 K = I + 1,M + TEMP = TEMP + A(K,I)*B(K,J) + 120 CONTINUE + B(I,J) = ALPHA*TEMP + 130 CONTINUE + 140 CONTINUE + END IF + END IF + ELSE + IF (LSAME(TRANSA,'N')) THEN +* +* Form B := alpha*B*A. +* + IF (UPPER) THEN + DO 180 J = N,1,-1 + TEMP = ALPHA + IF (NOUNIT) TEMP = TEMP*A(J,J) + DO 150 I = 1,M + B(I,J) = TEMP*B(I,J) + 150 CONTINUE + DO 170 K = 1,J - 1 + IF (A(K,J).NE.ZERO) THEN + TEMP = ALPHA*A(K,J) + DO 160 I = 1,M + B(I,J) = B(I,J) + TEMP*B(I,K) + 160 CONTINUE + END IF + 170 CONTINUE + 180 CONTINUE + ELSE + DO 220 J = 1,N + TEMP = ALPHA + IF (NOUNIT) TEMP = TEMP*A(J,J) + DO 190 I = 1,M + B(I,J) = TEMP*B(I,J) + 190 CONTINUE + DO 210 K = J + 1,N + IF (A(K,J).NE.ZERO) THEN + TEMP = ALPHA*A(K,J) + DO 200 I = 1,M + B(I,J) = B(I,J) + TEMP*B(I,K) + 200 CONTINUE + END IF + 210 CONTINUE + 220 CONTINUE + END IF + ELSE +* +* Form B := alpha*B*A**T. +* + IF (UPPER) THEN + DO 260 K = 1,N + DO 240 J = 1,K - 1 + IF (A(J,K).NE.ZERO) THEN + TEMP = ALPHA*A(J,K) + DO 230 I = 1,M + B(I,J) = B(I,J) + TEMP*B(I,K) + 230 CONTINUE + END IF + 240 CONTINUE + TEMP = ALPHA + IF (NOUNIT) TEMP = TEMP*A(K,K) + IF (TEMP.NE.ONE) THEN + DO 250 I = 1,M + B(I,K) = TEMP*B(I,K) + 250 CONTINUE + END IF + 260 CONTINUE + ELSE + DO 300 K = N,1,-1 + DO 280 J = K + 1,N + IF (A(J,K).NE.ZERO) THEN + TEMP = ALPHA*A(J,K) + DO 270 I = 1,M + B(I,J) = B(I,J) + TEMP*B(I,K) + 270 CONTINUE + END IF + 280 CONTINUE + TEMP = ALPHA + IF (NOUNIT) TEMP = TEMP*A(K,K) + IF (TEMP.NE.ONE) THEN + DO 290 I = 1,M + B(I,K) = TEMP*B(I,K) + 290 CONTINUE + END IF + 300 CONTINUE + END IF + END IF + END IF +* + RETURN +* +* End of DTRMM . +* + END diff --git a/vendor/gonum.org/v1/gonum/lapack/internal/testdata/netlib/dtrmv.f b/vendor/gonum.org/v1/gonum/lapack/internal/testdata/netlib/dtrmv.f new file mode 100644 index 0000000..71459fe --- /dev/null +++ b/vendor/gonum.org/v1/gonum/lapack/internal/testdata/netlib/dtrmv.f @@ -0,0 +1,342 @@ +*> \brief \b DTRMV +* +* =========== DOCUMENTATION =========== +* +* Online html documentation available at +* http://www.netlib.org/lapack/explore-html/ +* +* Definition: +* =========== +* +* SUBROUTINE DTRMV(UPLO,TRANS,DIAG,N,A,LDA,X,INCX) +* +* .. Scalar Arguments .. +* INTEGER INCX,LDA,N +* CHARACTER DIAG,TRANS,UPLO +* .. +* .. Array Arguments .. +* DOUBLE PRECISION A(LDA,*),X(*) +* .. +* +* +*> \par Purpose: +* ============= +*> +*> \verbatim +*> +*> DTRMV performs one of the matrix-vector operations +*> +*> x := A*x, or x := A**T*x, +*> +*> where x is an n element vector and A is an n by n unit, or non-unit, +*> upper or lower triangular matrix. +*> \endverbatim +* +* Arguments: +* ========== +* +*> \param[in] UPLO +*> \verbatim +*> UPLO is CHARACTER*1 +*> On entry, UPLO specifies whether the matrix is an upper or +*> lower triangular matrix as follows: +*> +*> UPLO = 'U' or 'u' A is an upper triangular matrix. +*> +*> UPLO = 'L' or 'l' A is a lower triangular matrix. +*> \endverbatim +*> +*> \param[in] TRANS +*> \verbatim +*> TRANS is CHARACTER*1 +*> On entry, TRANS specifies the operation to be performed as +*> follows: +*> +*> TRANS = 'N' or 'n' x := A*x. +*> +*> TRANS = 'T' or 't' x := A**T*x. +*> +*> TRANS = 'C' or 'c' x := A**T*x. +*> \endverbatim +*> +*> \param[in] DIAG +*> \verbatim +*> DIAG is CHARACTER*1 +*> On entry, DIAG specifies whether or not A is unit +*> triangular as follows: +*> +*> DIAG = 'U' or 'u' A is assumed to be unit triangular. +*> +*> DIAG = 'N' or 'n' A is not assumed to be unit +*> triangular. +*> \endverbatim +*> +*> \param[in] N +*> \verbatim +*> N is INTEGER +*> On entry, N specifies the order of the matrix A. +*> N must be at least zero. +*> \endverbatim +*> +*> \param[in] A +*> \verbatim +*> A is DOUBLE PRECISION array of DIMENSION ( LDA, n ). +*> Before entry with UPLO = 'U' or 'u', the leading n by n +*> upper triangular part of the array A must contain the upper +*> triangular matrix and the strictly lower triangular part of +*> A is not referenced. +*> Before entry with UPLO = 'L' or 'l', the leading n by n +*> lower triangular part of the array A must contain the lower +*> triangular matrix and the strictly upper triangular part of +*> A is not referenced. +*> Note that when DIAG = 'U' or 'u', the diagonal elements of +*> A are not referenced either, but are assumed to be unity. +*> \endverbatim +*> +*> \param[in] LDA +*> \verbatim +*> LDA is INTEGER +*> On entry, LDA specifies the first dimension of A as declared +*> in the calling (sub) program. LDA must be at least +*> max( 1, n ). +*> \endverbatim +*> +*> \param[in,out] X +*> \verbatim +*> X is DOUBLE PRECISION array of dimension at least +*> ( 1 + ( n - 1 )*abs( INCX ) ). +*> Before entry, the incremented array X must contain the n +*> element vector x. On exit, X is overwritten with the +*> tranformed vector x. +*> \endverbatim +*> +*> \param[in] INCX +*> \verbatim +*> INCX is INTEGER +*> On entry, INCX specifies the increment for the elements of +*> X. INCX must not be zero. +*> \endverbatim +* +* Authors: +* ======== +* +*> \author Univ. of Tennessee +*> \author Univ. of California Berkeley +*> \author Univ. of Colorado Denver +*> \author NAG Ltd. +* +*> \date November 2011 +* +*> \ingroup double_blas_level2 +* +*> \par Further Details: +* ===================== +*> +*> \verbatim +*> +*> Level 2 Blas routine. +*> The vector and matrix arguments are not referenced when N = 0, or M = 0 +*> +*> -- Written on 22-October-1986. +*> Jack Dongarra, Argonne National Lab. +*> Jeremy Du Croz, Nag Central Office. +*> Sven Hammarling, Nag Central Office. +*> Richard Hanson, Sandia National Labs. +*> \endverbatim +*> +* ===================================================================== + SUBROUTINE DTRMV(UPLO,TRANS,DIAG,N,A,LDA,X,INCX) +* +* -- Reference BLAS level2 routine (version 3.4.0) -- +* -- Reference BLAS is a software package provided by Univ. of Tennessee, -- +* -- Univ. of California Berkeley, Univ. of Colorado Denver and NAG Ltd..-- +* November 2011 +* +* .. Scalar Arguments .. + INTEGER INCX,LDA,N + CHARACTER DIAG,TRANS,UPLO +* .. +* .. Array Arguments .. + DOUBLE PRECISION A(LDA,*),X(*) +* .. +* +* ===================================================================== +* +* .. Parameters .. + DOUBLE PRECISION ZERO + PARAMETER (ZERO=0.0D+0) +* .. +* .. Local Scalars .. + DOUBLE PRECISION TEMP + INTEGER I,INFO,IX,J,JX,KX + LOGICAL NOUNIT +* .. +* .. External Functions .. + LOGICAL LSAME + EXTERNAL LSAME +* .. +* .. External Subroutines .. + EXTERNAL XERBLA +* .. +* .. Intrinsic Functions .. + INTRINSIC MAX +* .. +* +* Test the input parameters. +* + INFO = 0 + IF (.NOT.LSAME(UPLO,'U') .AND. .NOT.LSAME(UPLO,'L')) THEN + INFO = 1 + ELSE IF (.NOT.LSAME(TRANS,'N') .AND. .NOT.LSAME(TRANS,'T') .AND. + + .NOT.LSAME(TRANS,'C')) THEN + INFO = 2 + ELSE IF (.NOT.LSAME(DIAG,'U') .AND. .NOT.LSAME(DIAG,'N')) THEN + INFO = 3 + ELSE IF (N.LT.0) THEN + INFO = 4 + ELSE IF (LDA.LT.MAX(1,N)) THEN + INFO = 6 + ELSE IF (INCX.EQ.0) THEN + INFO = 8 + END IF + IF (INFO.NE.0) THEN + CALL XERBLA('DTRMV ',INFO) + RETURN + END IF +* +* Quick return if possible. +* + IF (N.EQ.0) RETURN +* + NOUNIT = LSAME(DIAG,'N') +* +* Set up the start point in X if the increment is not unity. This +* will be ( N - 1 )*INCX too small for descending loops. +* + IF (INCX.LE.0) THEN + KX = 1 - (N-1)*INCX + ELSE IF (INCX.NE.1) THEN + KX = 1 + END IF +* +* Start the operations. In this version the elements of A are +* accessed sequentially with one pass through A. +* + IF (LSAME(TRANS,'N')) THEN +* +* Form x := A*x. +* + IF (LSAME(UPLO,'U')) THEN + IF (INCX.EQ.1) THEN + DO 20 J = 1,N + IF (X(J).NE.ZERO) THEN + TEMP = X(J) + DO 10 I = 1,J - 1 + X(I) = X(I) + TEMP*A(I,J) + 10 CONTINUE + IF (NOUNIT) X(J) = X(J)*A(J,J) + END IF + 20 CONTINUE + ELSE + JX = KX + DO 40 J = 1,N + IF (X(JX).NE.ZERO) THEN + TEMP = X(JX) + IX = KX + DO 30 I = 1,J - 1 + X(IX) = X(IX) + TEMP*A(I,J) + IX = IX + INCX + 30 CONTINUE + IF (NOUNIT) X(JX) = X(JX)*A(J,J) + END IF + JX = JX + INCX + 40 CONTINUE + END IF + ELSE + IF (INCX.EQ.1) THEN + DO 60 J = N,1,-1 + IF (X(J).NE.ZERO) THEN + TEMP = X(J) + DO 50 I = N,J + 1,-1 + X(I) = X(I) + TEMP*A(I,J) + 50 CONTINUE + IF (NOUNIT) X(J) = X(J)*A(J,J) + END IF + 60 CONTINUE + ELSE + KX = KX + (N-1)*INCX + JX = KX + DO 80 J = N,1,-1 + IF (X(JX).NE.ZERO) THEN + TEMP = X(JX) + IX = KX + DO 70 I = N,J + 1,-1 + X(IX) = X(IX) + TEMP*A(I,J) + IX = IX - INCX + 70 CONTINUE + IF (NOUNIT) X(JX) = X(JX)*A(J,J) + END IF + JX = JX - INCX + 80 CONTINUE + END IF + END IF + ELSE +* +* Form x := A**T*x. +* + IF (LSAME(UPLO,'U')) THEN + IF (INCX.EQ.1) THEN + DO 100 J = N,1,-1 + TEMP = X(J) + IF (NOUNIT) TEMP = TEMP*A(J,J) + DO 90 I = J - 1,1,-1 + TEMP = TEMP + A(I,J)*X(I) + 90 CONTINUE + X(J) = TEMP + 100 CONTINUE + ELSE + JX = KX + (N-1)*INCX + DO 120 J = N,1,-1 + TEMP = X(JX) + IX = JX + IF (NOUNIT) TEMP = TEMP*A(J,J) + DO 110 I = J - 1,1,-1 + IX = IX - INCX + TEMP = TEMP + A(I,J)*X(IX) + 110 CONTINUE + X(JX) = TEMP + JX = JX - INCX + 120 CONTINUE + END IF + ELSE + IF (INCX.EQ.1) THEN + DO 140 J = 1,N + TEMP = X(J) + IF (NOUNIT) TEMP = TEMP*A(J,J) + DO 130 I = J + 1,N + TEMP = TEMP + A(I,J)*X(I) + 130 CONTINUE + X(J) = TEMP + 140 CONTINUE + ELSE + JX = KX + DO 160 J = 1,N + TEMP = X(JX) + IX = JX + IF (NOUNIT) TEMP = TEMP*A(J,J) + DO 150 I = J + 1,N + IX = IX + INCX + TEMP = TEMP + A(I,J)*X(IX) + 150 CONTINUE + X(JX) = TEMP + JX = JX + INCX + 160 CONTINUE + END IF + END IF + END IF +* + RETURN +* +* End of DTRMV . +* + END diff --git a/vendor/gonum.org/v1/gonum/lapack/internal/testdata/netlib/lsame.f b/vendor/gonum.org/v1/gonum/lapack/internal/testdata/netlib/lsame.f new file mode 100644 index 0000000..315304c --- /dev/null +++ b/vendor/gonum.org/v1/gonum/lapack/internal/testdata/netlib/lsame.f @@ -0,0 +1,125 @@ +*> \brief \b LSAME +* +* =========== DOCUMENTATION =========== +* +* Online html documentation available at +* http://www.netlib.org/lapack/explore-html/ +* +* Definition: +* =========== +* +* LOGICAL FUNCTION LSAME( CA, CB ) +* +* .. Scalar Arguments .. +* CHARACTER CA, CB +* .. +* +* +*> \par Purpose: +* ============= +*> +*> \verbatim +*> +*> LSAME returns .TRUE. if CA is the same letter as CB regardless of +*> case. +*> \endverbatim +* +* Arguments: +* ========== +* +*> \param[in] CA +*> \verbatim +*> \endverbatim +*> +*> \param[in] CB +*> \verbatim +*> CA and CB specify the single characters to be compared. +*> \endverbatim +* +* Authors: +* ======== +* +*> \author Univ. of Tennessee +*> \author Univ. of California Berkeley +*> \author Univ. of Colorado Denver +*> \author NAG Ltd. +* +*> \date November 2011 +* +*> \ingroup auxOTHERauxiliary +* +* ===================================================================== + LOGICAL FUNCTION LSAME( CA, CB ) +* +* -- LAPACK auxiliary routine (version 3.4.0) -- +* -- LAPACK is a software package provided by Univ. of Tennessee, -- +* -- Univ. of California Berkeley, Univ. of Colorado Denver and NAG Ltd..-- +* November 2011 +* +* .. Scalar Arguments .. + CHARACTER CA, CB +* .. +* +* ===================================================================== +* +* .. Intrinsic Functions .. + INTRINSIC ICHAR +* .. +* .. Local Scalars .. + INTEGER INTA, INTB, ZCODE +* .. +* .. Executable Statements .. +* +* Test if the characters are equal +* + LSAME = CA.EQ.CB + IF( LSAME ) + $ RETURN +* +* Now test for equivalence if both characters are alphabetic. +* + ZCODE = ICHAR( 'Z' ) +* +* Use 'Z' rather than 'A' so that ASCII can be detected on Prime +* machines, on which ICHAR returns a value with bit 8 set. +* ICHAR('A') on Prime machines returns 193 which is the same as +* ICHAR('A') on an EBCDIC machine. +* + INTA = ICHAR( CA ) + INTB = ICHAR( CB ) +* + IF( ZCODE.EQ.90 .OR. ZCODE.EQ.122 ) THEN +* +* ASCII is assumed - ZCODE is the ASCII code of either lower or +* upper case 'Z'. +* + IF( INTA.GE.97 .AND. INTA.LE.122 ) INTA = INTA - 32 + IF( INTB.GE.97 .AND. INTB.LE.122 ) INTB = INTB - 32 +* + ELSE IF( ZCODE.EQ.233 .OR. ZCODE.EQ.169 ) THEN +* +* EBCDIC is assumed - ZCODE is the EBCDIC code of either lower or +* upper case 'Z'. +* + IF( INTA.GE.129 .AND. INTA.LE.137 .OR. + $ INTA.GE.145 .AND. INTA.LE.153 .OR. + $ INTA.GE.162 .AND. INTA.LE.169 ) INTA = INTA + 64 + IF( INTB.GE.129 .AND. INTB.LE.137 .OR. + $ INTB.GE.145 .AND. INTB.LE.153 .OR. + $ INTB.GE.162 .AND. INTB.LE.169 ) INTB = INTB + 64 +* + ELSE IF( ZCODE.EQ.218 .OR. ZCODE.EQ.250 ) THEN +* +* ASCII is assumed, on Prime machines - ZCODE is the ASCII code +* plus 128 of either lower or upper case 'Z'. +* + IF( INTA.GE.225 .AND. INTA.LE.250 ) INTA = INTA - 32 + IF( INTB.GE.225 .AND. INTB.LE.250 ) INTB = INTB - 32 + END IF + LSAME = INTA.EQ.INTB +* +* RETURN +* +* End of LSAME +* + END diff --git a/vendor/gonum.org/v1/gonum/lapack/internal/testdata/netlib/netlib.go b/vendor/gonum.org/v1/gonum/lapack/internal/testdata/netlib/netlib.go new file mode 100644 index 0000000..4a73906 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/lapack/internal/testdata/netlib/netlib.go @@ -0,0 +1,68 @@ +// Copyright ©2016 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package netlib + +// void dlahr2_(int* n, int* k, int* nb, double* a, int* lda, double* tau, double* t, int* ldt, double* y, int* ldy); +// +// void dlaqr5_(int* wantt, int* wantz, int* kacc22, int* n, int* ktop, int* kbot, int* nshfts, +// double* sr, double* si, double* h, int* ldh, int* iloz, int* ihiz, +// double* z, int* ldz, double* v, int* ldv, double* u, int* ldu, +// int* nv, double* wv, int* ldwv, int* nh, double* wh, int* ldwh); +import "C" + +func Dlahr2(n, k, nb int, a []float64, lda int, tau, t []float64, ldt int, y []float64, ldy int) { + func() { + n := C.int(n) + k := C.int(k) + nb := C.int(nb) + lda := C.int(lda) + ldt := C.int(ldt) + ldy := C.int(ldy) + C.dlahr2_((*C.int)(&n), (*C.int)(&k), (*C.int)(&nb), + (*C.double)(&a[0]), (*C.int)(&lda), + (*C.double)(&tau[0]), + (*C.double)(&t[0]), (*C.int)(&ldt), + (*C.double)(&y[0]), (*C.int)(&ldy)) + }() +} + +func Dlaqr5(wantt, wantz bool, kacc22 int, n, ktop, kbot int, nshfts int, sr, si []float64, h []float64, + ldh int, iloz, ihiz int, z []float64, ldz int, v []float64, ldv int, + u []float64, ldu int, nh int, wh []float64, ldwh int, nv int, wv []float64, ldwv int) { + func() { + wt := C.int(0) + if wantt { + wt = 1 + } + wz := C.int(0) + if wantz { + wz = 1 + } + kacc22 := C.int(kacc22) + n := C.int(n) + ktop := C.int(ktop) + kbot := C.int(kbot) + nshfts := C.int(nshfts) + ldh := C.int(ldh) + iloz := C.int(iloz) + ihiz := C.int(ihiz) + ldz := C.int(ldz) + ldv := C.int(ldv) + ldu := C.int(ldu) + nh := C.int(nh) + ldwh := C.int(ldwh) + nv := C.int(nv) + ldwv := C.int(ldwv) + C.dlaqr5_((*C.int)(&wt), (*C.int)(&wz), (*C.int)(&kacc22), + (*C.int)(&n), (*C.int)(&ktop), (*C.int)(&kbot), + (*C.int)(&nshfts), (*C.double)(&sr[0]), (*C.double)(&si[0]), + (*C.double)(&h[0]), (*C.int)(&ldh), + (*C.int)(&iloz), (*C.int)(&ihiz), (*C.double)(&z[0]), (*C.int)(&ldz), + (*C.double)(&v[0]), (*C.int)(&ldv), + (*C.double)(&u[0]), (*C.int)(&ldu), + (*C.int)(&nh), (*C.double)(&wh[0]), (*C.int)(&ldwh), + (*C.int)(&nv), (*C.double)(&wv[0]), (*C.int)(&ldwv)) + }() +} diff --git a/vendor/gonum.org/v1/gonum/lapack/internal/testdata/netlib/xerbla.f b/vendor/gonum.org/v1/gonum/lapack/internal/testdata/netlib/xerbla.f new file mode 100644 index 0000000..eb1c037 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/lapack/internal/testdata/netlib/xerbla.f @@ -0,0 +1,89 @@ +*> \brief \b XERBLA +* +* =========== DOCUMENTATION =========== +* +* Online html documentation available at +* http://www.netlib.org/lapack/explore-html/ +* +* Definition: +* =========== +* +* SUBROUTINE XERBLA( SRNAME, INFO ) +* +* .. Scalar Arguments .. +* CHARACTER*(*) SRNAME +* INTEGER INFO +* .. +* +* +*> \par Purpose: +* ============= +*> +*> \verbatim +*> +*> XERBLA is an error handler for the LAPACK routines. +*> It is called by an LAPACK routine if an input parameter has an +*> invalid value. A message is printed and execution stops. +*> +*> Installers may consider modifying the STOP statement in order to +*> call system-specific exception-handling facilities. +*> \endverbatim +* +* Arguments: +* ========== +* +*> \param[in] SRNAME +*> \verbatim +*> SRNAME is CHARACTER*(*) +*> The name of the routine which called XERBLA. +*> \endverbatim +*> +*> \param[in] INFO +*> \verbatim +*> INFO is INTEGER +*> The position of the invalid parameter in the parameter list +*> of the calling routine. +*> \endverbatim +* +* Authors: +* ======== +* +*> \author Univ. of Tennessee +*> \author Univ. of California Berkeley +*> \author Univ. of Colorado Denver +*> \author NAG Ltd. +* +*> \date November 2011 +* +*> \ingroup aux_blas +* +* ===================================================================== + SUBROUTINE XERBLA( SRNAME, INFO ) +* +* -- Reference BLAS level1 routine (version 3.4.0) -- +* -- Reference BLAS is a software package provided by Univ. of Tennessee, -- +* -- Univ. of California Berkeley, Univ. of Colorado Denver and NAG Ltd..-- +* November 2011 +* +* .. Scalar Arguments .. + CHARACTER*(*) SRNAME + INTEGER INFO +* .. +* +* ===================================================================== +* +* .. Intrinsic Functions .. + INTRINSIC LEN_TRIM +* .. +* .. Executable Statements .. +* + WRITE( *, FMT = 9999 )SRNAME( 1:LEN_TRIM( SRNAME ) ), INFO +* + STOP +* + 9999 FORMAT( ' ** On entry to ', A, ' parameter number ', I2, ' had ', + $ 'an illegal value' ) +* +* End of XERBLA +* + END diff --git a/vendor/gonum.org/v1/gonum/lapack/lapack.go b/vendor/gonum.org/v1/gonum/lapack/lapack.go new file mode 100644 index 0000000..eef14c1 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/lapack/lapack.go @@ -0,0 +1,213 @@ +// Copyright ©2015 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package lapack + +import "gonum.org/v1/gonum/blas" + +// Complex128 defines the public complex128 LAPACK API supported by gonum/lapack. +type Complex128 interface{} + +// Float64 defines the public float64 LAPACK API supported by gonum/lapack. +type Float64 interface { + Dgecon(norm MatrixNorm, n int, a []float64, lda int, anorm float64, work []float64, iwork []int) float64 + Dgeev(jobvl LeftEVJob, jobvr RightEVJob, n int, a []float64, lda int, wr, wi []float64, vl []float64, ldvl int, vr []float64, ldvr int, work []float64, lwork int) (first int) + Dgels(trans blas.Transpose, m, n, nrhs int, a []float64, lda int, b []float64, ldb int, work []float64, lwork int) bool + Dgelqf(m, n int, a []float64, lda int, tau, work []float64, lwork int) + Dgeqrf(m, n int, a []float64, lda int, tau, work []float64, lwork int) + Dgesvd(jobU, jobVT SVDJob, m, n int, a []float64, lda int, s, u []float64, ldu int, vt []float64, ldvt int, work []float64, lwork int) (ok bool) + Dgetrf(m, n int, a []float64, lda int, ipiv []int) (ok bool) + Dgetri(n int, a []float64, lda int, ipiv []int, work []float64, lwork int) (ok bool) + Dgetrs(trans blas.Transpose, n, nrhs int, a []float64, lda int, ipiv []int, b []float64, ldb int) + Dggsvd3(jobU, jobV, jobQ GSVDJob, m, n, p int, a []float64, lda int, b []float64, ldb int, alpha, beta, u []float64, ldu int, v []float64, ldv int, q []float64, ldq int, work []float64, lwork int, iwork []int) (k, l int, ok bool) + Dlantr(norm MatrixNorm, uplo blas.Uplo, diag blas.Diag, m, n int, a []float64, lda int, work []float64) float64 + Dlange(norm MatrixNorm, m, n int, a []float64, lda int, work []float64) float64 + Dlansy(norm MatrixNorm, uplo blas.Uplo, n int, a []float64, lda int, work []float64) float64 + Dlapmt(forward bool, m, n int, x []float64, ldx int, k []int) + Dormqr(side blas.Side, trans blas.Transpose, m, n, k int, a []float64, lda int, tau, c []float64, ldc int, work []float64, lwork int) + Dormlq(side blas.Side, trans blas.Transpose, m, n, k int, a []float64, lda int, tau, c []float64, ldc int, work []float64, lwork int) + Dpocon(uplo blas.Uplo, n int, a []float64, lda int, anorm float64, work []float64, iwork []int) float64 + Dpotrf(ul blas.Uplo, n int, a []float64, lda int) (ok bool) + Dpotri(ul blas.Uplo, n int, a []float64, lda int) (ok bool) + Dpotrs(ul blas.Uplo, n, nrhs int, a []float64, lda int, b []float64, ldb int) + Dsyev(jobz EVJob, uplo blas.Uplo, n int, a []float64, lda int, w, work []float64, lwork int) (ok bool) + Dtrcon(norm MatrixNorm, uplo blas.Uplo, diag blas.Diag, n int, a []float64, lda int, work []float64, iwork []int) float64 + Dtrtri(uplo blas.Uplo, diag blas.Diag, n int, a []float64, lda int) (ok bool) + Dtrtrs(uplo blas.Uplo, trans blas.Transpose, diag blas.Diag, n, nrhs int, a []float64, lda int, b []float64, ldb int) (ok bool) +} + +// Direct specifies the direction of the multiplication for the Householder matrix. +type Direct byte + +const ( + Forward Direct = 'F' // Reflectors are right-multiplied, H_0 * H_1 * ... * H_{k-1}. + Backward Direct = 'B' // Reflectors are left-multiplied, H_{k-1} * ... * H_1 * H_0. +) + +// Sort is the sorting order. +type Sort byte + +const ( + SortIncreasing Sort = 'I' + SortDecreasing Sort = 'D' +) + +// StoreV indicates the storage direction of elementary reflectors. +type StoreV byte + +const ( + ColumnWise StoreV = 'C' // Reflector stored in a column of the matrix. + RowWise StoreV = 'R' // Reflector stored in a row of the matrix. +) + +// MatrixNorm represents the kind of matrix norm to compute. +type MatrixNorm byte + +const ( + MaxAbs MatrixNorm = 'M' // max(abs(A(i,j))) + MaxColumnSum MatrixNorm = 'O' // Maximum absolute column sum (one norm) + MaxRowSum MatrixNorm = 'I' // Maximum absolute row sum (infinity norm) + Frobenius MatrixNorm = 'F' // Frobenius norm (sqrt of sum of squares) +) + +// MatrixType represents the kind of matrix represented in the data. +type MatrixType byte + +const ( + General MatrixType = 'G' // A general dense matrix. + UpperTri MatrixType = 'U' // An upper triangular matrix. + LowerTri MatrixType = 'L' // A lower triangular matrix. +) + +// Pivot specifies the pivot type for plane rotations. +type Pivot byte + +const ( + Variable Pivot = 'V' + Top Pivot = 'T' + Bottom Pivot = 'B' +) + +// ApplyOrtho specifies which orthogonal matrix is applied in Dormbr. +type ApplyOrtho byte + +const ( + ApplyP ApplyOrtho = 'P' // Apply P or P^T. + ApplyQ ApplyOrtho = 'Q' // Apply Q or Q^T. +) + +// GenOrtho specifies which orthogonal matrix is generated in Dorgbr. +type GenOrtho byte + +const ( + GeneratePT GenOrtho = 'P' // Generate P^T. + GenerateQ GenOrtho = 'Q' // Generate Q. +) + +// SVDJob specifies the singular vector computation type for SVD. +type SVDJob byte + +const ( + SVDAll SVDJob = 'A' // Compute all columns of the orthogonal matrix U or V. + SVDStore SVDJob = 'S' // Compute the singular vectors and store them in the orthogonal matrix U or V. + SVDOverwrite SVDJob = 'O' // Compute the singular vectors and overwrite them on the input matrix A. + SVDNone SVDJob = 'N' // Do not compute singular vectors. +) + +// GSVDJob specifies the singular vector computation type for Generalized SVD. +type GSVDJob byte + +const ( + GSVDU GSVDJob = 'U' // Compute orthogonal matrix U. + GSVDV GSVDJob = 'V' // Compute orthogonal matrix V. + GSVDQ GSVDJob = 'Q' // Compute orthogonal matrix Q. + GSVDUnit GSVDJob = 'I' // Use unit-initialized matrix. + GSVDNone GSVDJob = 'N' // Do not compute orthogonal matrix. +) + +// EVComp specifies how eigenvectors are computed in Dsteqr. +type EVComp byte + +const ( + EVOrig EVComp = 'V' // Compute eigenvectors of the original symmetric matrix. + EVTridiag EVComp = 'I' // Compute eigenvectors of the tridiagonal matrix. + EVCompNone EVComp = 'N' // Do not compute eigenvectors. +) + +// EVJob specifies whether eigenvectors are computed in Dsyev. +type EVJob byte + +const ( + EVCompute EVJob = 'V' // Compute eigenvectors. + EVNone EVJob = 'N' // Do not compute eigenvectors. +) + +// LeftEVJob specifies whether left eigenvectors are computed in Dgeev. +type LeftEVJob byte + +const ( + LeftEVCompute LeftEVJob = 'V' // Compute left eigenvectors. + LeftEVNone LeftEVJob = 'N' // Do not compute left eigenvectors. +) + +// RightEVJob specifies whether right eigenvectors are computed in Dgeev. +type RightEVJob byte + +const ( + RightEVCompute RightEVJob = 'V' // Compute right eigenvectors. + RightEVNone RightEVJob = 'N' // Do not compute right eigenvectors. +) + +// BalanceJob specifies matrix balancing operation. +type BalanceJob byte + +const ( + Permute BalanceJob = 'P' + Scale BalanceJob = 'S' + PermuteScale BalanceJob = 'B' + BalanceNone BalanceJob = 'N' +) + +// SchurJob specifies whether the Schur form is computed in Dhseqr. +type SchurJob byte + +const ( + EigenvaluesOnly SchurJob = 'E' + EigenvaluesAndSchur SchurJob = 'S' +) + +// SchurComp specifies whether and how the Schur vectors are computed in Dhseqr. +type SchurComp byte + +const ( + SchurOrig SchurComp = 'V' // Compute Schur vectors of the original matrix. + SchurHess SchurComp = 'I' // Compute Schur vectors of the upper Hessenberg matrix. + SchurNone SchurComp = 'N' // Do not compute Schur vectors. +) + +// UpdateSchurComp specifies whether the matrix of Schur vectors is updated in Dtrexc. +type UpdateSchurComp byte + +const ( + UpdateSchur UpdateSchurComp = 'V' // Update the matrix of Schur vectors. + UpdateSchurNone UpdateSchurComp = 'N' // Do not update the matrix of Schur vectors. +) + +// EVSide specifies what eigenvectors are computed in Dtrevc3. +type EVSide byte + +const ( + EVRight EVSide = 'R' // Compute only right eigenvectors. + EVLeft EVSide = 'L' // Compute only left eigenvectors. + EVBoth EVSide = 'B' // Compute both right and left eigenvectors. +) + +// EVHowMany specifies which eigenvectors are computed in Dtrevc3 and how. +type EVHowMany byte + +const ( + EVAll EVHowMany = 'A' // Compute all right and/or left eigenvectors. + EVAllMulQ EVHowMany = 'B' // Compute all right and/or left eigenvectors multiplied by an input matrix. + EVSelected EVHowMany = 'S' // Compute selected right and/or left eigenvectors. +) diff --git a/vendor/gonum.org/v1/gonum/lapack/lapack64/doc.go b/vendor/gonum.org/v1/gonum/lapack/lapack64/doc.go new file mode 100644 index 0000000..da19e3e --- /dev/null +++ b/vendor/gonum.org/v1/gonum/lapack/lapack64/doc.go @@ -0,0 +1,20 @@ +// Copyright ©2017 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Package lapack64 provides a set of convenient wrapper functions for LAPACK +// calls, as specified in the netlib standard (www.netlib.org). +// +// The native Go routines are used by default, and the Use function can be used +// to set an alternative implementation. +// +// If the type of matrix (General, Symmetric, etc.) is known and fixed, it is +// used in the wrapper signature. In many cases, however, the type of the matrix +// changes during the call to the routine, for example the matrix is symmetric on +// entry and is triangular on exit. In these cases the correct types should be checked +// in the documentation. +// +// The full set of Lapack functions is very large, and it is not clear that a +// full implementation is desirable, let alone feasible. Please open up an issue +// if there is a specific function you need and/or are willing to implement. +package lapack64 // import "gonum.org/v1/gonum/lapack/lapack64" diff --git a/vendor/gonum.org/v1/gonum/lapack/lapack64/lapack64.go b/vendor/gonum.org/v1/gonum/lapack/lapack64/lapack64.go new file mode 100644 index 0000000..208ee1f --- /dev/null +++ b/vendor/gonum.org/v1/gonum/lapack/lapack64/lapack64.go @@ -0,0 +1,581 @@ +// Copyright ©2015 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package lapack64 + +import ( + "gonum.org/v1/gonum/blas" + "gonum.org/v1/gonum/blas/blas64" + "gonum.org/v1/gonum/lapack" + "gonum.org/v1/gonum/lapack/gonum" +) + +var lapack64 lapack.Float64 = gonum.Implementation{} + +// Use sets the LAPACK float64 implementation to be used by subsequent BLAS calls. +// The default implementation is native.Implementation. +func Use(l lapack.Float64) { + lapack64 = l +} + +func max(a, b int) int { + if a > b { + return a + } + return b +} + +// Potrf computes the Cholesky factorization of a. +// The factorization has the form +// A = U^T * U if a.Uplo == blas.Upper, or +// A = L * L^T if a.Uplo == blas.Lower, +// where U is an upper triangular matrix and L is lower triangular. +// The triangular matrix is returned in t, and the underlying data between +// a and t is shared. The returned bool indicates whether a is positive +// definite and the factorization could be finished. +func Potrf(a blas64.Symmetric) (t blas64.Triangular, ok bool) { + ok = lapack64.Dpotrf(a.Uplo, a.N, a.Data, max(1, a.Stride)) + t.Uplo = a.Uplo + t.N = a.N + t.Data = a.Data + t.Stride = a.Stride + t.Diag = blas.NonUnit + return +} + +// Potri computes the inverse of a real symmetric positive definite matrix A +// using its Cholesky factorization. +// +// On entry, t contains the triangular factor U or L from the Cholesky +// factorization A = U^T*U or A = L*L^T, as computed by Potrf. +// +// On return, the upper or lower triangle of the (symmetric) inverse of A is +// stored in t, overwriting the input factor U or L, and also returned in a. The +// underlying data between a and t is shared. +// +// The returned bool indicates whether the inverse was computed successfully. +func Potri(t blas64.Triangular) (a blas64.Symmetric, ok bool) { + ok = lapack64.Dpotri(t.Uplo, t.N, t.Data, max(1, t.Stride)) + a.Uplo = t.Uplo + a.N = t.N + a.Data = t.Data + a.Stride = t.Stride + return +} + +// Potrs solves a system of n linear equations A*X = B where A is an n×n +// symmetric positive definite matrix and B is an n×nrhs matrix, using the +// Cholesky factorization A = U^T*U or A = L*L^T. t contains the corresponding +// triangular factor as returned by Potrf. On entry, B contains the right-hand +// side matrix B, on return it contains the solution matrix X. +func Potrs(t blas64.Triangular, b blas64.General) { + lapack64.Dpotrs(t.Uplo, t.N, b.Cols, t.Data, max(1, t.Stride), b.Data, max(1, b.Stride)) +} + +// Gecon estimates the reciprocal of the condition number of the n×n matrix A +// given the LU decomposition of the matrix. The condition number computed may +// be based on the 1-norm or the ∞-norm. +// +// a contains the result of the LU decomposition of A as computed by Getrf. +// +// anorm is the corresponding 1-norm or ∞-norm of the original matrix A. +// +// work is a temporary data slice of length at least 4*n and Gecon will panic otherwise. +// +// iwork is a temporary data slice of length at least n and Gecon will panic otherwise. +func Gecon(norm lapack.MatrixNorm, a blas64.General, anorm float64, work []float64, iwork []int) float64 { + return lapack64.Dgecon(norm, a.Cols, a.Data, max(1, a.Stride), anorm, work, iwork) +} + +// Gels finds a minimum-norm solution based on the matrices A and B using the +// QR or LQ factorization. Gels returns false if the matrix +// A is singular, and true if this solution was successfully found. +// +// The minimization problem solved depends on the input parameters. +// +// 1. If m >= n and trans == blas.NoTrans, Gels finds X such that || A*X - B||_2 +// is minimized. +// 2. If m < n and trans == blas.NoTrans, Gels finds the minimum norm solution of +// A * X = B. +// 3. If m >= n and trans == blas.Trans, Gels finds the minimum norm solution of +// A^T * X = B. +// 4. If m < n and trans == blas.Trans, Gels finds X such that || A*X - B||_2 +// is minimized. +// Note that the least-squares solutions (cases 1 and 3) perform the minimization +// per column of B. This is not the same as finding the minimum-norm matrix. +// +// The matrix A is a general matrix of size m×n and is modified during this call. +// The input matrix B is of size max(m,n)×nrhs, and serves two purposes. On entry, +// the elements of b specify the input matrix B. B has size m×nrhs if +// trans == blas.NoTrans, and n×nrhs if trans == blas.Trans. On exit, the +// leading submatrix of b contains the solution vectors X. If trans == blas.NoTrans, +// this submatrix is of size n×nrhs, and of size m×nrhs otherwise. +// +// Work is temporary storage, and lwork specifies the usable memory length. +// At minimum, lwork >= max(m,n) + max(m,n,nrhs), and this function will panic +// otherwise. A longer work will enable blocked algorithms to be called. +// In the special case that lwork == -1, work[0] will be set to the optimal working +// length. +func Gels(trans blas.Transpose, a blas64.General, b blas64.General, work []float64, lwork int) bool { + return lapack64.Dgels(trans, a.Rows, a.Cols, b.Cols, a.Data, max(1, a.Stride), b.Data, max(1, b.Stride), work, lwork) +} + +// Geqrf computes the QR factorization of the m×n matrix A using a blocked +// algorithm. A is modified to contain the information to construct Q and R. +// The upper triangle of a contains the matrix R. The lower triangular elements +// (not including the diagonal) contain the elementary reflectors. tau is modified +// to contain the reflector scales. tau must have length at least min(m,n), and +// this function will panic otherwise. +// +// The ith elementary reflector can be explicitly constructed by first extracting +// the +// v[j] = 0 j < i +// v[j] = 1 j == i +// v[j] = a[j*lda+i] j > i +// and computing H_i = I - tau[i] * v * v^T. +// +// The orthonormal matrix Q can be constucted from a product of these elementary +// reflectors, Q = H_0 * H_1 * ... * H_{k-1}, where k = min(m,n). +// +// Work is temporary storage, and lwork specifies the usable memory length. +// At minimum, lwork >= m and this function will panic otherwise. +// Geqrf is a blocked QR factorization, but the block size is limited +// by the temporary space available. If lwork == -1, instead of performing Geqrf, +// the optimal work length will be stored into work[0]. +func Geqrf(a blas64.General, tau, work []float64, lwork int) { + lapack64.Dgeqrf(a.Rows, a.Cols, a.Data, max(1, a.Stride), tau, work, lwork) +} + +// Gelqf computes the LQ factorization of the m×n matrix A using a blocked +// algorithm. A is modified to contain the information to construct L and Q. The +// lower triangle of a contains the matrix L. The elements above the diagonal +// and the slice tau represent the matrix Q. tau is modified to contain the +// reflector scales. tau must have length at least min(m,n), and this function +// will panic otherwise. +// +// See Geqrf for a description of the elementary reflectors and orthonormal +// matrix Q. Q is constructed as a product of these elementary reflectors, +// Q = H_{k-1} * ... * H_1 * H_0. +// +// Work is temporary storage, and lwork specifies the usable memory length. +// At minimum, lwork >= m and this function will panic otherwise. +// Gelqf is a blocked LQ factorization, but the block size is limited +// by the temporary space available. If lwork == -1, instead of performing Gelqf, +// the optimal work length will be stored into work[0]. +func Gelqf(a blas64.General, tau, work []float64, lwork int) { + lapack64.Dgelqf(a.Rows, a.Cols, a.Data, max(1, a.Stride), tau, work, lwork) +} + +// Gesvd computes the singular value decomposition of the input matrix A. +// +// The singular value decomposition is +// A = U * Sigma * V^T +// where Sigma is an m×n diagonal matrix containing the singular values of A, +// U is an m×m orthogonal matrix and V is an n×n orthogonal matrix. The first +// min(m,n) columns of U and V are the left and right singular vectors of A +// respectively. +// +// jobU and jobVT are options for computing the singular vectors. The behavior +// is as follows +// jobU == lapack.SVDAll All m columns of U are returned in u +// jobU == lapack.SVDStore The first min(m,n) columns are returned in u +// jobU == lapack.SVDOverwrite The first min(m,n) columns of U are written into a +// jobU == lapack.SVDNone The columns of U are not computed. +// The behavior is the same for jobVT and the rows of V^T. At most one of jobU +// and jobVT can equal lapack.SVDOverwrite, and Gesvd will panic otherwise. +// +// On entry, a contains the data for the m×n matrix A. During the call to Gesvd +// the data is overwritten. On exit, A contains the appropriate singular vectors +// if either job is lapack.SVDOverwrite. +// +// s is a slice of length at least min(m,n) and on exit contains the singular +// values in decreasing order. +// +// u contains the left singular vectors on exit, stored columnwise. If +// jobU == lapack.SVDAll, u is of size m×m. If jobU == lapack.SVDStore u is +// of size m×min(m,n). If jobU == lapack.SVDOverwrite or lapack.SVDNone, u is +// not used. +// +// vt contains the left singular vectors on exit, stored rowwise. If +// jobV == lapack.SVDAll, vt is of size n×m. If jobVT == lapack.SVDStore vt is +// of size min(m,n)×n. If jobVT == lapack.SVDOverwrite or lapack.SVDNone, vt is +// not used. +// +// work is a slice for storing temporary memory, and lwork is the usable size of +// the slice. lwork must be at least max(5*min(m,n), 3*min(m,n)+max(m,n)). +// If lwork == -1, instead of performing Gesvd, the optimal work length will be +// stored into work[0]. Gesvd will panic if the working memory has insufficient +// storage. +// +// Gesvd returns whether the decomposition successfully completed. +func Gesvd(jobU, jobVT lapack.SVDJob, a, u, vt blas64.General, s, work []float64, lwork int) (ok bool) { + return lapack64.Dgesvd(jobU, jobVT, a.Rows, a.Cols, a.Data, max(1, a.Stride), s, u.Data, max(1, u.Stride), vt.Data, max(1, vt.Stride), work, lwork) +} + +// Getrf computes the LU decomposition of the m×n matrix A. +// The LU decomposition is a factorization of A into +// A = P * L * U +// where P is a permutation matrix, L is a unit lower triangular matrix, and +// U is a (usually) non-unit upper triangular matrix. On exit, L and U are stored +// in place into a. +// +// ipiv is a permutation vector. It indicates that row i of the matrix was +// changed with ipiv[i]. ipiv must have length at least min(m,n), and will panic +// otherwise. ipiv is zero-indexed. +// +// Getrf is the blocked version of the algorithm. +// +// Getrf returns whether the matrix A is singular. The LU decomposition will +// be computed regardless of the singularity of A, but division by zero +// will occur if the false is returned and the result is used to solve a +// system of equations. +func Getrf(a blas64.General, ipiv []int) bool { + return lapack64.Dgetrf(a.Rows, a.Cols, a.Data, max(1, a.Stride), ipiv) +} + +// Getri computes the inverse of the matrix A using the LU factorization computed +// by Getrf. On entry, a contains the PLU decomposition of A as computed by +// Getrf and on exit contains the reciprocal of the original matrix. +// +// Getri will not perform the inversion if the matrix is singular, and returns +// a boolean indicating whether the inversion was successful. +// +// Work is temporary storage, and lwork specifies the usable memory length. +// At minimum, lwork >= n and this function will panic otherwise. +// Getri is a blocked inversion, but the block size is limited +// by the temporary space available. If lwork == -1, instead of performing Getri, +// the optimal work length will be stored into work[0]. +func Getri(a blas64.General, ipiv []int, work []float64, lwork int) (ok bool) { + return lapack64.Dgetri(a.Cols, a.Data, max(1, a.Stride), ipiv, work, lwork) +} + +// Getrs solves a system of equations using an LU factorization. +// The system of equations solved is +// A * X = B if trans == blas.Trans +// A^T * X = B if trans == blas.NoTrans +// A is a general n×n matrix with stride lda. B is a general matrix of size n×nrhs. +// +// On entry b contains the elements of the matrix B. On exit, b contains the +// elements of X, the solution to the system of equations. +// +// a and ipiv contain the LU factorization of A and the permutation indices as +// computed by Getrf. ipiv is zero-indexed. +func Getrs(trans blas.Transpose, a blas64.General, b blas64.General, ipiv []int) { + lapack64.Dgetrs(trans, a.Cols, b.Cols, a.Data, max(1, a.Stride), ipiv, b.Data, max(1, b.Stride)) +} + +// Ggsvd3 computes the generalized singular value decomposition (GSVD) +// of an m×n matrix A and p×n matrix B: +// U^T*A*Q = D1*[ 0 R ] +// +// V^T*B*Q = D2*[ 0 R ] +// where U, V and Q are orthogonal matrices. +// +// Ggsvd3 returns k and l, the dimensions of the sub-blocks. k+l +// is the effective numerical rank of the (m+p)×n matrix [ A^T B^T ]^T. +// R is a (k+l)×(k+l) nonsingular upper triangular matrix, D1 and +// D2 are m×(k+l) and p×(k+l) diagonal matrices and of the following +// structures, respectively: +// +// If m-k-l >= 0, +// +// k l +// D1 = k [ I 0 ] +// l [ 0 C ] +// m-k-l [ 0 0 ] +// +// k l +// D2 = l [ 0 S ] +// p-l [ 0 0 ] +// +// n-k-l k l +// [ 0 R ] = k [ 0 R11 R12 ] k +// l [ 0 0 R22 ] l +// +// where +// +// C = diag( alpha_k, ... , alpha_{k+l} ), +// S = diag( beta_k, ... , beta_{k+l} ), +// C^2 + S^2 = I. +// +// R is stored in +// A[0:k+l, n-k-l:n] +// on exit. +// +// If m-k-l < 0, +// +// k m-k k+l-m +// D1 = k [ I 0 0 ] +// m-k [ 0 C 0 ] +// +// k m-k k+l-m +// D2 = m-k [ 0 S 0 ] +// k+l-m [ 0 0 I ] +// p-l [ 0 0 0 ] +// +// n-k-l k m-k k+l-m +// [ 0 R ] = k [ 0 R11 R12 R13 ] +// m-k [ 0 0 R22 R23 ] +// k+l-m [ 0 0 0 R33 ] +// +// where +// C = diag( alpha_k, ... , alpha_m ), +// S = diag( beta_k, ... , beta_m ), +// C^2 + S^2 = I. +// +// R = [ R11 R12 R13 ] is stored in A[1:m, n-k-l+1:n] +// [ 0 R22 R23 ] +// and R33 is stored in +// B[m-k:l, n+m-k-l:n] on exit. +// +// Ggsvd3 computes C, S, R, and optionally the orthogonal transformation +// matrices U, V and Q. +// +// jobU, jobV and jobQ are options for computing the orthogonal matrices. The behavior +// is as follows +// jobU == lapack.GSVDU Compute orthogonal matrix U +// jobU == lapack.GSVDNone Do not compute orthogonal matrix. +// The behavior is the same for jobV and jobQ with the exception that instead of +// lapack.GSVDU these accept lapack.GSVDV and lapack.GSVDQ respectively. +// The matrices U, V and Q must be m×m, p×p and n×n respectively unless the +// relevant job parameter is lapack.GSVDNone. +// +// alpha and beta must have length n or Ggsvd3 will panic. On exit, alpha and +// beta contain the generalized singular value pairs of A and B +// alpha[0:k] = 1, +// beta[0:k] = 0, +// if m-k-l >= 0, +// alpha[k:k+l] = diag(C), +// beta[k:k+l] = diag(S), +// if m-k-l < 0, +// alpha[k:m]= C, alpha[m:k+l]= 0 +// beta[k:m] = S, beta[m:k+l] = 1. +// if k+l < n, +// alpha[k+l:n] = 0 and +// beta[k+l:n] = 0. +// +// On exit, iwork contains the permutation required to sort alpha descending. +// +// iwork must have length n, work must have length at least max(1, lwork), and +// lwork must be -1 or greater than n, otherwise Ggsvd3 will panic. If +// lwork is -1, work[0] holds the optimal lwork on return, but Ggsvd3 does +// not perform the GSVD. +func Ggsvd3(jobU, jobV, jobQ lapack.GSVDJob, a, b blas64.General, alpha, beta []float64, u, v, q blas64.General, work []float64, lwork int, iwork []int) (k, l int, ok bool) { + return lapack64.Dggsvd3(jobU, jobV, jobQ, a.Rows, a.Cols, b.Rows, a.Data, max(1, a.Stride), b.Data, max(1, b.Stride), alpha, beta, u.Data, max(1, u.Stride), v.Data, max(1, v.Stride), q.Data, max(1, q.Stride), work, lwork, iwork) +} + +// Lange computes the matrix norm of the general m×n matrix A. The input norm +// specifies the norm computed. +// lapack.MaxAbs: the maximum absolute value of an element. +// lapack.MaxColumnSum: the maximum column sum of the absolute values of the entries. +// lapack.MaxRowSum: the maximum row sum of the absolute values of the entries. +// lapack.Frobenius: the square root of the sum of the squares of the entries. +// If norm == lapack.MaxColumnSum, work must be of length n, and this function will panic otherwise. +// There are no restrictions on work for the other matrix norms. +func Lange(norm lapack.MatrixNorm, a blas64.General, work []float64) float64 { + return lapack64.Dlange(norm, a.Rows, a.Cols, a.Data, max(1, a.Stride), work) +} + +// Lansy computes the specified norm of an n×n symmetric matrix. If +// norm == lapack.MaxColumnSum or norm == lapackMaxRowSum work must have length +// at least n and this function will panic otherwise. +// There are no restrictions on work for the other matrix norms. +func Lansy(norm lapack.MatrixNorm, a blas64.Symmetric, work []float64) float64 { + return lapack64.Dlansy(norm, a.Uplo, a.N, a.Data, max(1, a.Stride), work) +} + +// Lantr computes the specified norm of an m×n trapezoidal matrix A. If +// norm == lapack.MaxColumnSum work must have length at least n and this function +// will panic otherwise. There are no restrictions on work for the other matrix norms. +func Lantr(norm lapack.MatrixNorm, a blas64.Triangular, work []float64) float64 { + return lapack64.Dlantr(norm, a.Uplo, a.Diag, a.N, a.N, a.Data, max(1, a.Stride), work) +} + +// Lapmt rearranges the columns of the m×n matrix X as specified by the +// permutation k_0, k_1, ..., k_{n-1} of the integers 0, ..., n-1. +// +// If forward is true a forward permutation is performed: +// +// X[0:m, k[j]] is moved to X[0:m, j] for j = 0, 1, ..., n-1. +// +// otherwise a backward permutation is performed: +// +// X[0:m, j] is moved to X[0:m, k[j]] for j = 0, 1, ..., n-1. +// +// k must have length n, otherwise Lapmt will panic. k is zero-indexed. +func Lapmt(forward bool, x blas64.General, k []int) { + lapack64.Dlapmt(forward, x.Rows, x.Cols, x.Data, max(1, x.Stride), k) +} + +// Ormlq multiplies the matrix C by the othogonal matrix Q defined by +// A and tau. A and tau are as returned from Gelqf. +// C = Q * C if side == blas.Left and trans == blas.NoTrans +// C = Q^T * C if side == blas.Left and trans == blas.Trans +// C = C * Q if side == blas.Right and trans == blas.NoTrans +// C = C * Q^T if side == blas.Right and trans == blas.Trans +// If side == blas.Left, A is a matrix of side k×m, and if side == blas.Right +// A is of size k×n. This uses a blocked algorithm. +// +// Work is temporary storage, and lwork specifies the usable memory length. +// At minimum, lwork >= m if side == blas.Left and lwork >= n if side == blas.Right, +// and this function will panic otherwise. +// Ormlq uses a block algorithm, but the block size is limited +// by the temporary space available. If lwork == -1, instead of performing Ormlq, +// the optimal work length will be stored into work[0]. +// +// Tau contains the Householder scales and must have length at least k, and +// this function will panic otherwise. +func Ormlq(side blas.Side, trans blas.Transpose, a blas64.General, tau []float64, c blas64.General, work []float64, lwork int) { + lapack64.Dormlq(side, trans, c.Rows, c.Cols, a.Rows, a.Data, max(1, a.Stride), tau, c.Data, max(1, c.Stride), work, lwork) +} + +// Ormqr multiplies an m×n matrix C by an orthogonal matrix Q as +// C = Q * C, if side == blas.Left and trans == blas.NoTrans, +// C = Q^T * C, if side == blas.Left and trans == blas.Trans, +// C = C * Q, if side == blas.Right and trans == blas.NoTrans, +// C = C * Q^T, if side == blas.Right and trans == blas.Trans, +// where Q is defined as the product of k elementary reflectors +// Q = H_0 * H_1 * ... * H_{k-1}. +// +// If side == blas.Left, A is an m×k matrix and 0 <= k <= m. +// If side == blas.Right, A is an n×k matrix and 0 <= k <= n. +// The ith column of A contains the vector which defines the elementary +// reflector H_i and tau[i] contains its scalar factor. tau must have length k +// and Ormqr will panic otherwise. Geqrf returns A and tau in the required +// form. +// +// work must have length at least max(1,lwork), and lwork must be at least n if +// side == blas.Left and at least m if side == blas.Right, otherwise Ormqr will +// panic. +// +// work is temporary storage, and lwork specifies the usable memory length. At +// minimum, lwork >= m if side == blas.Left and lwork >= n if side == +// blas.Right, and this function will panic otherwise. Larger values of lwork +// will generally give better performance. On return, work[0] will contain the +// optimal value of lwork. +// +// If lwork is -1, instead of performing Ormqr, the optimal workspace size will +// be stored into work[0]. +func Ormqr(side blas.Side, trans blas.Transpose, a blas64.General, tau []float64, c blas64.General, work []float64, lwork int) { + lapack64.Dormqr(side, trans, c.Rows, c.Cols, a.Cols, a.Data, max(1, a.Stride), tau, c.Data, max(1, c.Stride), work, lwork) +} + +// Pocon estimates the reciprocal of the condition number of a positive-definite +// matrix A given the Cholesky decmposition of A. The condition number computed +// is based on the 1-norm and the ∞-norm. +// +// anorm is the 1-norm and the ∞-norm of the original matrix A. +// +// work is a temporary data slice of length at least 3*n and Pocon will panic otherwise. +// +// iwork is a temporary data slice of length at least n and Pocon will panic otherwise. +func Pocon(a blas64.Symmetric, anorm float64, work []float64, iwork []int) float64 { + return lapack64.Dpocon(a.Uplo, a.N, a.Data, max(1, a.Stride), anorm, work, iwork) +} + +// Syev computes all eigenvalues and, optionally, the eigenvectors of a real +// symmetric matrix A. +// +// w contains the eigenvalues in ascending order upon return. w must have length +// at least n, and Syev will panic otherwise. +// +// On entry, a contains the elements of the symmetric matrix A in the triangular +// portion specified by uplo. If jobz == lapack.EVCompute, a contains the +// orthonormal eigenvectors of A on exit, otherwise jobz must be lapack.EVNone +// and on exit the specified triangular region is overwritten. +// +// Work is temporary storage, and lwork specifies the usable memory length. At minimum, +// lwork >= 3*n-1, and Syev will panic otherwise. The amount of blocking is +// limited by the usable length. If lwork == -1, instead of computing Syev the +// optimal work length is stored into work[0]. +func Syev(jobz lapack.EVJob, a blas64.Symmetric, w, work []float64, lwork int) (ok bool) { + return lapack64.Dsyev(jobz, a.Uplo, a.N, a.Data, max(1, a.Stride), w, work, lwork) +} + +// Trcon estimates the reciprocal of the condition number of a triangular matrix A. +// The condition number computed may be based on the 1-norm or the ∞-norm. +// +// work is a temporary data slice of length at least 3*n and Trcon will panic otherwise. +// +// iwork is a temporary data slice of length at least n and Trcon will panic otherwise. +func Trcon(norm lapack.MatrixNorm, a blas64.Triangular, work []float64, iwork []int) float64 { + return lapack64.Dtrcon(norm, a.Uplo, a.Diag, a.N, a.Data, max(1, a.Stride), work, iwork) +} + +// Trtri computes the inverse of a triangular matrix, storing the result in place +// into a. +// +// Trtri will not perform the inversion if the matrix is singular, and returns +// a boolean indicating whether the inversion was successful. +func Trtri(a blas64.Triangular) (ok bool) { + return lapack64.Dtrtri(a.Uplo, a.Diag, a.N, a.Data, max(1, a.Stride)) +} + +// Trtrs solves a triangular system of the form A * X = B or A^T * X = B. Trtrs +// returns whether the solve completed successfully. If A is singular, no solve is performed. +func Trtrs(trans blas.Transpose, a blas64.Triangular, b blas64.General) (ok bool) { + return lapack64.Dtrtrs(a.Uplo, trans, a.Diag, a.N, b.Cols, a.Data, max(1, a.Stride), b.Data, max(1, b.Stride)) +} + +// Geev computes the eigenvalues and, optionally, the left and/or right +// eigenvectors for an n×n real nonsymmetric matrix A. +// +// The right eigenvector v_j of A corresponding to an eigenvalue λ_j +// is defined by +// A v_j = λ_j v_j, +// and the left eigenvector u_j corresponding to an eigenvalue λ_j is defined by +// u_j^H A = λ_j u_j^H, +// where u_j^H is the conjugate transpose of u_j. +// +// On return, A will be overwritten and the left and right eigenvectors will be +// stored, respectively, in the columns of the n×n matrices VL and VR in the +// same order as their eigenvalues. If the j-th eigenvalue is real, then +// u_j = VL[:,j], +// v_j = VR[:,j], +// and if it is not real, then j and j+1 form a complex conjugate pair and the +// eigenvectors can be recovered as +// u_j = VL[:,j] + i*VL[:,j+1], +// u_{j+1} = VL[:,j] - i*VL[:,j+1], +// v_j = VR[:,j] + i*VR[:,j+1], +// v_{j+1} = VR[:,j] - i*VR[:,j+1], +// where i is the imaginary unit. The computed eigenvectors are normalized to +// have Euclidean norm equal to 1 and largest component real. +// +// Left eigenvectors will be computed only if jobvl == lapack.LeftEVCompute, +// otherwise jobvl must be lapack.LeftEVNone. +// Right eigenvectors will be computed only if jobvr == lapack.RightEVCompute, +// otherwise jobvr must be lapack.RightEVNone. +// For other values of jobvl and jobvr Geev will panic. +// +// On return, wr and wi will contain the real and imaginary parts, respectively, +// of the computed eigenvalues. Complex conjugate pairs of eigenvalues appear +// consecutively with the eigenvalue having the positive imaginary part first. +// wr and wi must have length n, and Geev will panic otherwise. +// +// work must have length at least lwork and lwork must be at least max(1,4*n) if +// the left or right eigenvectors are computed, and at least max(1,3*n) if no +// eigenvectors are computed. For good performance, lwork must generally be +// larger. On return, optimal value of lwork will be stored in work[0]. +// +// If lwork == -1, instead of performing Geev, the function only calculates the +// optimal vaule of lwork and stores it into work[0]. +// +// On return, first will be the index of the first valid eigenvalue. +// If first == 0, all eigenvalues and eigenvectors have been computed. +// If first is positive, Geev failed to compute all the eigenvalues, no +// eigenvectors have been computed and wr[first:] and wi[first:] contain those +// eigenvalues which have converged. +func Geev(jobvl lapack.LeftEVJob, jobvr lapack.RightEVJob, a blas64.General, wr, wi []float64, vl, vr blas64.General, work []float64, lwork int) (first int) { + n := a.Rows + if a.Cols != n { + panic("lapack64: matrix not square") + } + if jobvl == lapack.LeftEVCompute && (vl.Rows != n || vl.Cols != n) { + panic("lapack64: bad size of VL") + } + if jobvr == lapack.RightEVCompute && (vr.Rows != n || vr.Cols != n) { + panic("lapack64: bad size of VR") + } + return lapack64.Dgeev(jobvl, jobvr, n, a.Data, max(1, a.Stride), wr, wi, vl.Data, max(1, vl.Stride), vr.Data, max(1, vr.Stride), work, lwork) +} diff --git a/vendor/gonum.org/v1/gonum/lapack/testlapack/dbdsqr.go b/vendor/gonum.org/v1/gonum/lapack/testlapack/dbdsqr.go new file mode 100644 index 0000000..71bbc87 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/lapack/testlapack/dbdsqr.go @@ -0,0 +1,198 @@ +// Copyright ©2015 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package testlapack + +import ( + "fmt" + "sort" + "testing" + + "golang.org/x/exp/rand" + + "gonum.org/v1/gonum/blas" + "gonum.org/v1/gonum/blas/blas64" + "gonum.org/v1/gonum/floats" +) + +type Dbdsqrer interface { + Dbdsqr(uplo blas.Uplo, n, ncvt, nru, ncc int, d, e, vt []float64, ldvt int, u []float64, ldu int, c []float64, ldc int, work []float64) (ok bool) +} + +func DbdsqrTest(t *testing.T, impl Dbdsqrer) { + rnd := rand.New(rand.NewSource(1)) + bi := blas64.Implementation() + for _, uplo := range []blas.Uplo{blas.Upper, blas.Lower} { + for _, test := range []struct { + n, ncvt, nru, ncc, ldvt, ldu, ldc int + }{ + {5, 5, 5, 5, 0, 0, 0}, + {10, 10, 10, 10, 0, 0, 0}, + {10, 11, 12, 13, 0, 0, 0}, + {20, 13, 12, 11, 0, 0, 0}, + + {5, 5, 5, 5, 6, 7, 8}, + {10, 10, 10, 10, 30, 40, 50}, + {10, 12, 11, 13, 30, 40, 50}, + {20, 12, 13, 11, 30, 40, 50}, + + {130, 130, 130, 500, 900, 900, 500}, + } { + for cas := 0; cas < 10; cas++ { + n := test.n + ncvt := test.ncvt + nru := test.nru + ncc := test.ncc + ldvt := test.ldvt + ldu := test.ldu + ldc := test.ldc + if ldvt == 0 { + ldvt = max(1, ncvt) + } + if ldu == 0 { + ldu = max(1, n) + } + if ldc == 0 { + ldc = max(1, ncc) + } + + d := make([]float64, n) + for i := range d { + d[i] = rnd.NormFloat64() + } + e := make([]float64, n-1) + for i := range e { + e[i] = rnd.NormFloat64() + } + dCopy := make([]float64, len(d)) + copy(dCopy, d) + eCopy := make([]float64, len(e)) + copy(eCopy, e) + work := make([]float64, 4*(n-1)) + for i := range work { + work[i] = rnd.NormFloat64() + } + + // First test the decomposition of the bidiagonal matrix. Set + // pt and u equal to I with the correct size. At the result + // of Dbdsqr, p and u will contain the data of P^T and Q, which + // will be used in the next step to test the multiplication + // with Q and VT. + + q := make([]float64, n*n) + ldq := n + pt := make([]float64, n*n) + ldpt := n + for i := 0; i < n; i++ { + q[i*ldq+i] = 1 + } + for i := 0; i < n; i++ { + pt[i*ldpt+i] = 1 + } + + ok := impl.Dbdsqr(uplo, n, n, n, 0, d, e, pt, ldpt, q, ldq, nil, 1, work) + + isUpper := uplo == blas.Upper + errStr := fmt.Sprintf("isUpper = %v, n = %v, ncvt = %v, nru = %v, ncc = %v", isUpper, n, ncvt, nru, ncc) + if !ok { + t.Errorf("Unexpected Dbdsqr failure: %s", errStr) + } + + bMat := constructBidiagonal(uplo, n, dCopy, eCopy) + sMat := constructBidiagonal(uplo, n, d, e) + + tmp := blas64.General{ + Rows: n, + Cols: n, + Stride: n, + Data: make([]float64, n*n), + } + ansMat := blas64.General{ + Rows: n, + Cols: n, + Stride: n, + Data: make([]float64, n*n), + } + + bi.Dgemm(blas.NoTrans, blas.NoTrans, n, n, n, 1, q, ldq, sMat.Data, sMat.Stride, 0, tmp.Data, tmp.Stride) + bi.Dgemm(blas.NoTrans, blas.NoTrans, n, n, n, 1, tmp.Data, tmp.Stride, pt, ldpt, 0, ansMat.Data, ansMat.Stride) + + same := true + for i := 0; i < n; i++ { + for j := 0; j < n; j++ { + if !floats.EqualWithinAbsOrRel(ansMat.Data[i*ansMat.Stride+j], bMat.Data[i*bMat.Stride+j], 1e-8, 1e-8) { + same = false + } + } + } + if !same { + t.Errorf("Bidiagonal mismatch. %s", errStr) + } + if !sort.IsSorted(sort.Reverse(sort.Float64Slice(d))) { + t.Errorf("D is not sorted. %s", errStr) + } + + // The above computed the real P and Q. Now input data for V^T, + // U, and C to check that the multiplications happen properly. + dAns := make([]float64, len(d)) + copy(dAns, d) + eAns := make([]float64, len(e)) + copy(eAns, e) + + u := make([]float64, nru*ldu) + for i := range u { + u[i] = rnd.NormFloat64() + } + uCopy := make([]float64, len(u)) + copy(uCopy, u) + vt := make([]float64, n*ldvt) + for i := range vt { + vt[i] = rnd.NormFloat64() + } + vtCopy := make([]float64, len(vt)) + copy(vtCopy, vt) + c := make([]float64, n*ldc) + for i := range c { + c[i] = rnd.NormFloat64() + } + cCopy := make([]float64, len(c)) + copy(cCopy, c) + + // Reset input data + copy(d, dCopy) + copy(e, eCopy) + impl.Dbdsqr(uplo, n, ncvt, nru, ncc, d, e, vt, ldvt, u, ldu, c, ldc, work) + + // Check result. + if !floats.EqualApprox(d, dAns, 1e-14) { + t.Errorf("D mismatch second time. %s", errStr) + } + if !floats.EqualApprox(e, eAns, 1e-14) { + t.Errorf("E mismatch second time. %s", errStr) + } + ans := make([]float64, len(vtCopy)) + copy(ans, vtCopy) + ldans := ldvt + bi.Dgemm(blas.NoTrans, blas.NoTrans, n, ncvt, n, 1, pt, ldpt, vtCopy, ldvt, 0, ans, ldans) + if !floats.EqualApprox(ans, vt, 1e-10) { + t.Errorf("Vt result mismatch. %s", errStr) + } + ans = make([]float64, len(uCopy)) + copy(ans, uCopy) + ldans = ldu + bi.Dgemm(blas.NoTrans, blas.NoTrans, nru, n, n, 1, uCopy, ldu, q, ldq, 0, ans, ldans) + if !floats.EqualApprox(ans, u, 1e-10) { + t.Errorf("U result mismatch. %s", errStr) + } + ans = make([]float64, len(cCopy)) + copy(ans, cCopy) + ldans = ldc + bi.Dgemm(blas.Trans, blas.NoTrans, n, ncc, n, 1, q, ldq, cCopy, ldc, 0, ans, ldans) + if !floats.EqualApprox(ans, c, 1e-10) { + t.Errorf("C result mismatch. %s", errStr) + } + } + } + } +} diff --git a/vendor/gonum.org/v1/gonum/lapack/testlapack/dgebak.go b/vendor/gonum.org/v1/gonum/lapack/testlapack/dgebak.go new file mode 100644 index 0000000..1e19395 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/lapack/testlapack/dgebak.go @@ -0,0 +1,109 @@ +// Copyright ©2016 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package testlapack + +import ( + "fmt" + "testing" + + "golang.org/x/exp/rand" + + "gonum.org/v1/gonum/blas" + "gonum.org/v1/gonum/blas/blas64" + "gonum.org/v1/gonum/lapack" +) + +type Dgebaker interface { + Dgebak(job lapack.BalanceJob, side lapack.EVSide, n, ilo, ihi int, scale []float64, m int, v []float64, ldv int) +} + +func DgebakTest(t *testing.T, impl Dgebaker) { + rnd := rand.New(rand.NewSource(1)) + + for _, job := range []lapack.BalanceJob{lapack.BalanceNone, lapack.Permute, lapack.Scale, lapack.PermuteScale} { + for _, side := range []lapack.EVSide{lapack.EVLeft, lapack.EVRight} { + for _, n := range []int{0, 1, 2, 3, 4, 5, 6, 10, 18, 31, 53} { + for _, extra := range []int{0, 11} { + for cas := 0; cas < 100; cas++ { + m := rnd.Intn(n + 1) + v := randomGeneral(n, m, m+extra, rnd) + var ilo, ihi int + if v.Rows > 0 { + ihi = rnd.Intn(n) + ilo = rnd.Intn(ihi + 1) + } else { + ihi = -1 + } + testDgebak(t, impl, job, side, ilo, ihi, v, rnd) + } + } + } + } + } +} + +func testDgebak(t *testing.T, impl Dgebaker, job lapack.BalanceJob, side lapack.EVSide, ilo, ihi int, v blas64.General, rnd *rand.Rand) { + const tol = 1e-15 + n := v.Rows + m := v.Cols + extra := v.Stride - v.Cols + + // Create D and D^{-1} by generating random scales between ilo and ihi. + d := eye(n, n) + dinv := eye(n, n) + scale := nanSlice(n) + if job == lapack.Scale || job == lapack.PermuteScale { + if ilo == ihi { + scale[ilo] = 1 + } else { + for i := ilo; i <= ihi; i++ { + scale[i] = 2 * rnd.Float64() + d.Data[i*d.Stride+i] = scale[i] + dinv.Data[i*dinv.Stride+i] = 1 / scale[i] + } + } + } + + // Create P by generating random column swaps. + p := eye(n, n) + if job == lapack.Permute || job == lapack.PermuteScale { + // Make up some random permutations. + for i := n - 1; i > ihi; i-- { + scale[i] = float64(rnd.Intn(i + 1)) + blas64.Swap(blas64.Vector{N: n, Data: p.Data[i:], Inc: p.Stride}, + blas64.Vector{N: n, Data: p.Data[int(scale[i]):], Inc: p.Stride}) + } + for i := 0; i < ilo; i++ { + scale[i] = float64(i + rnd.Intn(ihi-i+1)) + blas64.Swap(blas64.Vector{N: n, Data: p.Data[i:], Inc: p.Stride}, + blas64.Vector{N: n, Data: p.Data[int(scale[i]):], Inc: p.Stride}) + } + } + + got := cloneGeneral(v) + impl.Dgebak(job, side, n, ilo, ihi, scale, m, got.Data, got.Stride) + + prefix := fmt.Sprintf("Case job=%v, side=%v, n=%v, ilo=%v, ihi=%v, m=%v, extra=%v", + job, side, n, ilo, ihi, m, extra) + + if !generalOutsideAllNaN(got) { + t.Errorf("%v: out-of-range write to V\n%v", prefix, got.Data) + } + + // Compute D*V or D^{-1}*V and store into dv. + dv := zeros(n, m, m) + if side == lapack.EVRight { + blas64.Gemm(blas.NoTrans, blas.NoTrans, 1, d, v, 0, dv) + } else { + blas64.Gemm(blas.NoTrans, blas.NoTrans, 1, dinv, v, 0, dv) + } + // Compute P*D*V or P*D^{-1}*V and store into want. + want := zeros(n, m, m) + blas64.Gemm(blas.NoTrans, blas.NoTrans, 1, p, dv, 0, want) + + if !equalApproxGeneral(want, got, tol) { + t.Errorf("%v: unexpected value of V", prefix) + } +} diff --git a/vendor/gonum.org/v1/gonum/lapack/testlapack/dgebal.go b/vendor/gonum.org/v1/gonum/lapack/testlapack/dgebal.go new file mode 100644 index 0000000..d41bd0c --- /dev/null +++ b/vendor/gonum.org/v1/gonum/lapack/testlapack/dgebal.go @@ -0,0 +1,174 @@ +// Copyright ©2016 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package testlapack + +import ( + "fmt" + "testing" + + "golang.org/x/exp/rand" + + "gonum.org/v1/gonum/blas" + "gonum.org/v1/gonum/blas/blas64" + "gonum.org/v1/gonum/lapack" +) + +type Dgebaler interface { + Dgebal(job lapack.BalanceJob, n int, a []float64, lda int, scale []float64) (int, int) +} + +func DgebalTest(t *testing.T, impl Dgebaler) { + rnd := rand.New(rand.NewSource(1)) + + for _, job := range []lapack.BalanceJob{lapack.BalanceNone, lapack.Permute, lapack.Scale, lapack.PermuteScale} { + for _, n := range []int{0, 1, 2, 3, 4, 5, 6, 10, 18, 31, 53, 100} { + for _, extra := range []int{0, 11} { + for cas := 0; cas < 100; cas++ { + a := unbalancedSparseGeneral(n, n, n+extra, 2*n, rnd) + testDgebal(t, impl, job, a) + } + } + } + } +} + +func testDgebal(t *testing.T, impl Dgebaler, job lapack.BalanceJob, a blas64.General) { + const tol = 1e-14 + + n := a.Rows + extra := a.Stride - n + + var scale []float64 + if n > 0 { + scale = nanSlice(n) + } + + want := cloneGeneral(a) + + ilo, ihi := impl.Dgebal(job, n, a.Data, a.Stride, scale) + + prefix := fmt.Sprintf("Case job=%v, n=%v, extra=%v", job, n, extra) + + if !generalOutsideAllNaN(a) { + t.Errorf("%v: out-of-range write to A\n%v", prefix, a.Data) + } + + if n == 0 { + if ilo != 0 { + t.Errorf("%v: unexpected ilo when n=0. Want 0, got %v", prefix, ilo) + } + if ihi != -1 { + t.Errorf("%v: unexpected ihi when n=0. Want -1, got %v", prefix, ihi) + } + return + } + + if job == lapack.BalanceNone { + if ilo != 0 { + t.Errorf("%v: unexpected ilo when job=BalanceNone. Want 0, got %v", prefix, ilo) + } + if ihi != n-1 { + t.Errorf("%v: unexpected ihi when job=BalanceNone. Want %v, got %v", prefix, n-1, ihi) + } + k := -1 + for i := range scale { + if scale[i] != 1 { + k = i + break + } + } + if k != -1 { + t.Errorf("%v: unexpected scale[%v] when job=BalanceNone. Want 1, got %v", prefix, k, scale[k]) + } + if !equalApproxGeneral(a, want, 0) { + t.Errorf("%v: unexpected modification of A when job=BalanceNone", prefix) + } + return + } + + if ilo < 0 || ihi < ilo || n <= ihi { + t.Errorf("%v: invalid ordering of ilo=%v and ihi=%v", prefix, ilo, ihi) + } + + if ilo >= 2 && !isUpperTriangular(blas64.General{Rows: ilo - 1, Cols: ilo - 1, Data: a.Data, Stride: a.Stride}) { + t.Errorf("%v: T1 is not upper triangular", prefix) + } + m := n - ihi - 1 // Order of T2. + k := ihi + 1 + if m >= 2 && !isUpperTriangular(blas64.General{Rows: m, Cols: m, Data: a.Data[k*a.Stride+k:], Stride: a.Stride}) { + t.Errorf("%v: T2 is not upper triangular", prefix) + } + + if job == lapack.Permute || job == lapack.PermuteScale { + // Check that all rows in [ilo:ihi+1] have at least one nonzero + // off-diagonal element. + zeroRow := -1 + for i := ilo; i <= ihi; i++ { + onlyZeros := true + for j := ilo; j <= ihi; j++ { + if i != j && a.Data[i*a.Stride+j] != 0 { + onlyZeros = false + break + } + } + if onlyZeros { + zeroRow = i + break + } + } + if zeroRow != -1 && ilo != ihi { + t.Errorf("%v: row %v has only zero off-diagonal elements, ilo=%v, ihi=%v", prefix, zeroRow, ilo, ihi) + } + // Check that all columns in [ilo:ihi+1] have at least one nonzero + // off-diagonal element. + zeroCol := -1 + for j := ilo; j <= ihi; j++ { + onlyZeros := true + for i := ilo; i <= ihi; i++ { + if i != j && a.Data[i*a.Stride+j] != 0 { + onlyZeros = false + break + } + } + if onlyZeros { + zeroCol = j + break + } + } + if zeroCol != -1 && ilo != ihi { + t.Errorf("%v: column %v has only zero off-diagonal elements, ilo=%v, ihi=%v", prefix, zeroCol, ilo, ihi) + } + + // Create the permutation matrix P. + p := eye(n, n) + for j := n - 1; j > ihi; j-- { + blas64.Swap(blas64.Vector{N: n, Data: p.Data[j:], Inc: p.Stride}, + blas64.Vector{N: n, Data: p.Data[int(scale[j]):], Inc: p.Stride}) + } + for j := 0; j < ilo; j++ { + blas64.Swap(blas64.Vector{N: n, Data: p.Data[j:], Inc: p.Stride}, + blas64.Vector{N: n, Data: p.Data[int(scale[j]):], Inc: p.Stride}) + } + // Compute P^T*A*P and store into want. + ap := zeros(n, n, n) + blas64.Gemm(blas.NoTrans, blas.NoTrans, 1, want, p, 0, ap) + blas64.Gemm(blas.Trans, blas.NoTrans, 1, p, ap, 0, want) + } + if job == lapack.Scale || job == lapack.PermuteScale { + // Modify want by D and D^{-1}. + d := eye(n, n) + dinv := eye(n, n) + for i := ilo; i <= ihi; i++ { + d.Data[i*d.Stride+i] = scale[i] + dinv.Data[i*dinv.Stride+i] = 1 / scale[i] + } + ad := zeros(n, n, n) + blas64.Gemm(blas.NoTrans, blas.NoTrans, 1, want, d, 0, ad) + blas64.Gemm(blas.NoTrans, blas.NoTrans, 1, dinv, ad, 0, want) + } + if !equalApproxGeneral(want, a, tol) { + t.Errorf("%v: unexpected value of A, ilo=%v, ihi=%v", prefix, ilo, ihi) + } +} diff --git a/vendor/gonum.org/v1/gonum/lapack/testlapack/dgebd2.go b/vendor/gonum.org/v1/gonum/lapack/testlapack/dgebd2.go new file mode 100644 index 0000000..6a23d14 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/lapack/testlapack/dgebd2.go @@ -0,0 +1,60 @@ +// Copyright ©2015 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package testlapack + +import ( + "testing" + + "golang.org/x/exp/rand" +) + +type Dgebd2er interface { + Dgebd2(m, n int, a []float64, lda int, d, e, tauq, taup, work []float64) +} + +func Dgebd2Test(t *testing.T, impl Dgebd2er) { + rnd := rand.New(rand.NewSource(1)) + for _, test := range []struct { + m, n, lda int + }{ + {3, 4, 0}, + {4, 3, 0}, + {3, 4, 10}, + {4, 3, 10}, + } { + m := test.m + n := test.n + lda := test.lda + if lda == 0 { + lda = n + } + // Allocate m×n matrix A and fill it with random numbers. + a := make([]float64, m*lda) + for i := range a { + a[i] = rnd.NormFloat64() + } + // Store a copy of A for later comparison. + aCopy := make([]float64, len(a)) + copy(aCopy, a) + // Allocate slices for the main and off diagonal. + nb := min(m, n) + d := nanSlice(nb) + e := nanSlice(nb - 1) + // Allocate slices for scalar factors of elementary reflectors + // and fill them with NaNs. + tauP := nanSlice(nb) + tauQ := nanSlice(nb) + // Allocate workspace. + work := nanSlice(max(m, n)) + + // Reduce A to upper or lower bidiagonal form by an orthogonal + // transformation. + impl.Dgebd2(m, n, a, lda, d, e, tauQ, tauP, work) + + // Check that it holds Q^T * A * P = B where B is represented by + // d and e. + checkBidiagonal(t, m, n, nb, a, lda, d, e, tauP, tauQ, aCopy) + } +} diff --git a/vendor/gonum.org/v1/gonum/lapack/testlapack/dgebrd.go b/vendor/gonum.org/v1/gonum/lapack/testlapack/dgebrd.go new file mode 100644 index 0000000..f9c1e82 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/lapack/testlapack/dgebrd.go @@ -0,0 +1,152 @@ +// Copyright ©2015 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package testlapack + +import ( + "math" + "testing" + + "golang.org/x/exp/rand" + + "gonum.org/v1/gonum/floats" +) + +type Dgebrder interface { + Dgebrd(m, n int, a []float64, lda int, d, e, tauQ, tauP, work []float64, lwork int) + Dgebd2er +} + +func DgebrdTest(t *testing.T, impl Dgebrder) { + rnd := rand.New(rand.NewSource(1)) + for _, test := range []struct { + m, n, lda int + }{ + {100, 100, 0}, + {100, 150, 0}, + {150, 100, 0}, + {100, 100, 200}, + {100, 150, 200}, + {150, 100, 200}, + + {300, 300, 0}, + {300, 400, 0}, + {400, 300, 0}, + {300, 300, 500}, + {300, 400, 500}, + {300, 400, 500}, + } { + m := test.m + n := test.n + lda := test.lda + if lda == 0 { + lda = n + } + minmn := min(m, n) + a := make([]float64, m*lda) + for i := range a { + a[i] = rnd.NormFloat64() + } + + d := make([]float64, minmn) + e := make([]float64, minmn-1) + tauP := make([]float64, minmn) + tauQ := make([]float64, minmn) + work := make([]float64, max(m, n)) + for i := range work { + work[i] = math.NaN() + } + + // Store a. + aCopy := make([]float64, len(a)) + copy(aCopy, a) + + // Compute the true answer with the unblocked algorithm. + impl.Dgebd2(m, n, a, lda, d, e, tauQ, tauP, work) + aAns := make([]float64, len(a)) + copy(aAns, a) + dAns := make([]float64, len(d)) + copy(dAns, d) + eAns := make([]float64, len(e)) + copy(eAns, e) + tauQAns := make([]float64, len(tauQ)) + copy(tauQAns, tauQ) + tauPAns := make([]float64, len(tauP)) + copy(tauPAns, tauP) + + // Test with optimal work. + lwork := -1 + copy(a, aCopy) + impl.Dgebrd(m, n, a, lda, d, e, tauQ, tauP, work, lwork) + work = make([]float64, int(work[0])) + lwork = len(work) + for i := range work { + work[i] = math.NaN() + } + for i := range d { + d[i] = math.NaN() + } + for i := range e { + e[i] = math.NaN() + } + for i := range tauQ { + tauQ[i] = math.NaN() + } + for i := range tauP { + tauP[i] = math.NaN() + } + impl.Dgebrd(m, n, a, lda, d, e, tauQ, tauP, work, lwork) + + // Test answers + if !floats.EqualApprox(a, aAns, 1e-10) { + t.Errorf("a mismatch") + } + if !floats.EqualApprox(d, dAns, 1e-10) { + t.Errorf("d mismatch") + } + if !floats.EqualApprox(e, eAns, 1e-10) { + t.Errorf("e mismatch") + } + if !floats.EqualApprox(tauQ, tauQAns, 1e-10) { + t.Errorf("tauQ mismatch") + } + if !floats.EqualApprox(tauP, tauPAns, 1e-10) { + t.Errorf("tauP mismatch") + } + + // Test with shorter than optimal work. + lwork-- + copy(a, aCopy) + for i := range d { + d[i] = 0 + } + for i := range e { + e[i] = 0 + } + for i := range tauP { + tauP[i] = 0 + } + for i := range tauQ { + tauQ[i] = 0 + } + impl.Dgebrd(m, n, a, lda, d, e, tauQ, tauP, work, lwork) + + // Test answers + if !floats.EqualApprox(a, aAns, 1e-10) { + t.Errorf("a mismatch") + } + if !floats.EqualApprox(d, dAns, 1e-10) { + t.Errorf("d mismatch") + } + if !floats.EqualApprox(e, eAns, 1e-10) { + t.Errorf("e mismatch") + } + if !floats.EqualApprox(tauQ, tauQAns, 1e-10) { + t.Errorf("tauQ mismatch") + } + if !floats.EqualApprox(tauP, tauPAns, 1e-10) { + t.Errorf("tauP mismatch") + } + } +} diff --git a/vendor/gonum.org/v1/gonum/lapack/testlapack/dgecon.go b/vendor/gonum.org/v1/gonum/lapack/testlapack/dgecon.go new file mode 100644 index 0000000..fa9ff8e --- /dev/null +++ b/vendor/gonum.org/v1/gonum/lapack/testlapack/dgecon.go @@ -0,0 +1,96 @@ +// Copyright ©2015 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package testlapack + +import ( + "testing" + + "gonum.org/v1/gonum/floats" + "gonum.org/v1/gonum/lapack" +) + +type Dgeconer interface { + Dlanger + Dgetrfer + Dgecon(norm lapack.MatrixNorm, n int, a []float64, lda int, anorm float64, work []float64, iwork []int) float64 +} + +func DgeconTest(t *testing.T, impl Dgeconer) { + for _, test := range []struct { + m int + n int + a []float64 + condOne float64 + condInf float64 + }{ + { + a: []float64{ + 8, 1, 6, + 3, 5, 7, + 4, 9, 2, + }, + m: 3, + n: 3, + condOne: 3.0 / 16, + condInf: 3.0 / 16, + }, + { + a: []float64{ + 2, 9, 3, 2, + 10, 9, 9, 3, + 1, 1, 5, 2, + 8, 4, 10, 2, + }, + m: 4, + n: 4, + condOne: 0.024740155174938, + condInf: 0.012034465570035, + }, + // Dgecon does not match Dpocon for this case. https://github.com/xianyi/OpenBLAS/issues/664. + { + a: []float64{ + 2.9995576045549965, -2.0898894566158663, 3.965560740124006, + -2.0898894566158663, 1.9634729526261008, -2.8681002706874104, + 3.965560740124006, -2.8681002706874104, 5.502416670471008, + }, + m: 3, + n: 3, + condOne: 0.024054837369015203, + condInf: 0.024054837369015203, + }, + } { + m := test.m + n := test.n + lda := n + a := make([]float64, len(test.a)) + copy(a, test.a) + ipiv := make([]int, min(m, n)) + + // Find the norms of the original matrix. + work := make([]float64, 4*n) + oneNorm := impl.Dlange(lapack.MaxColumnSum, m, n, a, lda, work) + infNorm := impl.Dlange(lapack.MaxRowSum, m, n, a, lda, work) + + // Compute LU factorization of a. + impl.Dgetrf(m, n, a, lda, ipiv) + + // Compute the condition number + iwork := make([]int, n) + condOne := impl.Dgecon(lapack.MaxColumnSum, n, a, lda, oneNorm, work, iwork) + condInf := impl.Dgecon(lapack.MaxRowSum, n, a, lda, infNorm, work, iwork) + + // Error if not the same order, otherwise log the difference. + if !floats.EqualWithinAbsOrRel(condOne, test.condOne, 1e0, 1e0) { + t.Errorf("One norm mismatch. Want %v, got %v.", test.condOne, condOne) + } else if !floats.EqualWithinAbsOrRel(condOne, test.condOne, 1e-14, 1e-14) { + t.Logf("Dgecon one norm mismatch. Want %v, got %v.", test.condOne, condOne) + } + if !floats.EqualWithinAbsOrRel(condInf, test.condInf, 1e0, 1e0) { + t.Errorf("One norm mismatch. Want %v, got %v.", test.condInf, condInf) + } else if !floats.EqualWithinAbsOrRel(condInf, test.condInf, 1e-14, 1e-14) { + t.Logf("Dgecon one norm mismatch. Want %v, got %v.", test.condInf, condInf) + } + } +} diff --git a/vendor/gonum.org/v1/gonum/lapack/testlapack/dgeev.go b/vendor/gonum.org/v1/gonum/lapack/testlapack/dgeev.go new file mode 100644 index 0000000..bc67f9a --- /dev/null +++ b/vendor/gonum.org/v1/gonum/lapack/testlapack/dgeev.go @@ -0,0 +1,740 @@ +// Copyright ©2016 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package testlapack + +import ( + "fmt" + "math" + "math/cmplx" + "strconv" + "testing" + + "golang.org/x/exp/rand" + + "gonum.org/v1/gonum/blas" + "gonum.org/v1/gonum/blas/blas64" + "gonum.org/v1/gonum/floats" + "gonum.org/v1/gonum/lapack" +) + +type Dgeever interface { + Dgeev(jobvl lapack.LeftEVJob, jobvr lapack.RightEVJob, n int, a []float64, lda int, + wr, wi []float64, vl []float64, ldvl int, vr []float64, ldvr int, work []float64, lwork int) int +} + +type dgeevTest struct { + a blas64.General + evWant []complex128 // If nil, the eigenvalues are not known. + valTol float64 // Tolerance for eigenvalue checks. + vecTol float64 // Tolerance for eigenvector checks. +} + +func DgeevTest(t *testing.T, impl Dgeever) { + rnd := rand.New(rand.NewSource(1)) + + for i, test := range []dgeevTest{ + { + a: A123{}.Matrix(), + evWant: A123{}.Eigenvalues(), + }, + + dgeevTestForAntisymRandom(10, rnd), + dgeevTestForAntisymRandom(11, rnd), + dgeevTestForAntisymRandom(50, rnd), + dgeevTestForAntisymRandom(51, rnd), + dgeevTestForAntisymRandom(100, rnd), + dgeevTestForAntisymRandom(101, rnd), + + { + a: Circulant(2).Matrix(), + evWant: Circulant(2).Eigenvalues(), + }, + { + a: Circulant(3).Matrix(), + evWant: Circulant(3).Eigenvalues(), + }, + { + a: Circulant(4).Matrix(), + evWant: Circulant(4).Eigenvalues(), + }, + { + a: Circulant(5).Matrix(), + evWant: Circulant(5).Eigenvalues(), + }, + { + a: Circulant(10).Matrix(), + evWant: Circulant(10).Eigenvalues(), + }, + { + a: Circulant(15).Matrix(), + evWant: Circulant(15).Eigenvalues(), + valTol: 1e-12, + }, + { + a: Circulant(30).Matrix(), + evWant: Circulant(30).Eigenvalues(), + valTol: 1e-11, + vecTol: 1e-12, + }, + { + a: Circulant(50).Matrix(), + evWant: Circulant(50).Eigenvalues(), + valTol: 1e-11, + vecTol: 1e-12, + }, + { + a: Circulant(101).Matrix(), + evWant: Circulant(101).Eigenvalues(), + valTol: 1e-10, + vecTol: 1e-11, + }, + { + a: Circulant(150).Matrix(), + evWant: Circulant(150).Eigenvalues(), + valTol: 1e-9, + vecTol: 1e-10, + }, + + { + a: Clement(2).Matrix(), + evWant: Clement(2).Eigenvalues(), + }, + { + a: Clement(3).Matrix(), + evWant: Clement(3).Eigenvalues(), + }, + { + a: Clement(4).Matrix(), + evWant: Clement(4).Eigenvalues(), + }, + { + a: Clement(5).Matrix(), + evWant: Clement(5).Eigenvalues(), + }, + { + a: Clement(10).Matrix(), + evWant: Clement(10).Eigenvalues(), + }, + { + a: Clement(15).Matrix(), + evWant: Clement(15).Eigenvalues(), + }, + { + a: Clement(30).Matrix(), + evWant: Clement(30).Eigenvalues(), + valTol: 1e-11, + }, + { + a: Clement(50).Matrix(), + evWant: Clement(50).Eigenvalues(), + valTol: 1e-7, + vecTol: 1e-11, + }, + + { + a: Creation(2).Matrix(), + evWant: Creation(2).Eigenvalues(), + }, + { + a: Creation(3).Matrix(), + evWant: Creation(3).Eigenvalues(), + }, + { + a: Creation(4).Matrix(), + evWant: Creation(4).Eigenvalues(), + }, + { + a: Creation(5).Matrix(), + evWant: Creation(5).Eigenvalues(), + }, + { + a: Creation(10).Matrix(), + evWant: Creation(10).Eigenvalues(), + }, + { + a: Creation(15).Matrix(), + evWant: Creation(15).Eigenvalues(), + }, + { + a: Creation(30).Matrix(), + evWant: Creation(30).Eigenvalues(), + }, + { + a: Creation(50).Matrix(), + evWant: Creation(50).Eigenvalues(), + }, + { + a: Creation(101).Matrix(), + evWant: Creation(101).Eigenvalues(), + }, + { + a: Creation(150).Matrix(), + evWant: Creation(150).Eigenvalues(), + }, + + { + a: Diagonal(0).Matrix(), + evWant: Diagonal(0).Eigenvalues(), + }, + { + a: Diagonal(10).Matrix(), + evWant: Diagonal(10).Eigenvalues(), + }, + { + a: Diagonal(50).Matrix(), + evWant: Diagonal(50).Eigenvalues(), + }, + { + a: Diagonal(151).Matrix(), + evWant: Diagonal(151).Eigenvalues(), + }, + + { + a: Downshift(2).Matrix(), + evWant: Downshift(2).Eigenvalues(), + }, + { + a: Downshift(3).Matrix(), + evWant: Downshift(3).Eigenvalues(), + }, + { + a: Downshift(4).Matrix(), + evWant: Downshift(4).Eigenvalues(), + }, + { + a: Downshift(5).Matrix(), + evWant: Downshift(5).Eigenvalues(), + }, + { + a: Downshift(10).Matrix(), + evWant: Downshift(10).Eigenvalues(), + }, + { + a: Downshift(15).Matrix(), + evWant: Downshift(15).Eigenvalues(), + }, + { + a: Downshift(30).Matrix(), + evWant: Downshift(30).Eigenvalues(), + }, + { + a: Downshift(50).Matrix(), + evWant: Downshift(50).Eigenvalues(), + }, + { + a: Downshift(101).Matrix(), + evWant: Downshift(101).Eigenvalues(), + }, + { + a: Downshift(150).Matrix(), + evWant: Downshift(150).Eigenvalues(), + }, + + { + a: Fibonacci(2).Matrix(), + evWant: Fibonacci(2).Eigenvalues(), + }, + { + a: Fibonacci(3).Matrix(), + evWant: Fibonacci(3).Eigenvalues(), + }, + { + a: Fibonacci(4).Matrix(), + evWant: Fibonacci(4).Eigenvalues(), + }, + { + a: Fibonacci(5).Matrix(), + evWant: Fibonacci(5).Eigenvalues(), + }, + { + a: Fibonacci(10).Matrix(), + evWant: Fibonacci(10).Eigenvalues(), + }, + { + a: Fibonacci(15).Matrix(), + evWant: Fibonacci(15).Eigenvalues(), + }, + { + a: Fibonacci(30).Matrix(), + evWant: Fibonacci(30).Eigenvalues(), + }, + { + a: Fibonacci(50).Matrix(), + evWant: Fibonacci(50).Eigenvalues(), + }, + { + a: Fibonacci(101).Matrix(), + evWant: Fibonacci(101).Eigenvalues(), + }, + { + a: Fibonacci(150).Matrix(), + evWant: Fibonacci(150).Eigenvalues(), + }, + + { + a: Gear(2).Matrix(), + evWant: Gear(2).Eigenvalues(), + }, + { + a: Gear(3).Matrix(), + evWant: Gear(3).Eigenvalues(), + }, + { + a: Gear(4).Matrix(), + evWant: Gear(4).Eigenvalues(), + valTol: 1e-7, + }, + { + a: Gear(5).Matrix(), + evWant: Gear(5).Eigenvalues(), + }, + { + a: Gear(10).Matrix(), + evWant: Gear(10).Eigenvalues(), + valTol: 1e-8, + }, + { + a: Gear(15).Matrix(), + evWant: Gear(15).Eigenvalues(), + }, + { + a: Gear(30).Matrix(), + evWant: Gear(30).Eigenvalues(), + valTol: 1e-8, + }, + { + a: Gear(50).Matrix(), + evWant: Gear(50).Eigenvalues(), + valTol: 1e-8, + }, + { + a: Gear(101).Matrix(), + evWant: Gear(101).Eigenvalues(), + }, + { + a: Gear(150).Matrix(), + evWant: Gear(150).Eigenvalues(), + valTol: 1e-8, + }, + + { + a: Grcar{N: 10, K: 3}.Matrix(), + evWant: Grcar{N: 10, K: 3}.Eigenvalues(), + }, + { + a: Grcar{N: 10, K: 7}.Matrix(), + evWant: Grcar{N: 10, K: 7}.Eigenvalues(), + }, + { + a: Grcar{N: 11, K: 7}.Matrix(), + evWant: Grcar{N: 11, K: 7}.Eigenvalues(), + }, + { + a: Grcar{N: 50, K: 3}.Matrix(), + evWant: Grcar{N: 50, K: 3}.Eigenvalues(), + }, + { + a: Grcar{N: 51, K: 3}.Matrix(), + evWant: Grcar{N: 51, K: 3}.Eigenvalues(), + }, + { + a: Grcar{N: 50, K: 10}.Matrix(), + evWant: Grcar{N: 50, K: 10}.Eigenvalues(), + }, + { + a: Grcar{N: 51, K: 10}.Matrix(), + evWant: Grcar{N: 51, K: 10}.Eigenvalues(), + }, + { + a: Grcar{N: 50, K: 30}.Matrix(), + evWant: Grcar{N: 50, K: 30}.Eigenvalues(), + }, + { + a: Grcar{N: 150, K: 2}.Matrix(), + evWant: Grcar{N: 150, K: 2}.Eigenvalues(), + }, + { + a: Grcar{N: 150, K: 148}.Matrix(), + evWant: Grcar{N: 150, K: 148}.Eigenvalues(), + }, + + { + a: Hanowa{N: 6, Alpha: 17}.Matrix(), + evWant: Hanowa{N: 6, Alpha: 17}.Eigenvalues(), + }, + { + a: Hanowa{N: 50, Alpha: -1}.Matrix(), + evWant: Hanowa{N: 50, Alpha: -1}.Eigenvalues(), + }, + { + a: Hanowa{N: 100, Alpha: -1}.Matrix(), + evWant: Hanowa{N: 100, Alpha: -1}.Eigenvalues(), + }, + + { + a: Lesp(2).Matrix(), + evWant: Lesp(2).Eigenvalues(), + }, + { + a: Lesp(3).Matrix(), + evWant: Lesp(3).Eigenvalues(), + }, + { + a: Lesp(4).Matrix(), + evWant: Lesp(4).Eigenvalues(), + }, + { + a: Lesp(5).Matrix(), + evWant: Lesp(5).Eigenvalues(), + }, + { + a: Lesp(10).Matrix(), + evWant: Lesp(10).Eigenvalues(), + }, + { + a: Lesp(15).Matrix(), + evWant: Lesp(15).Eigenvalues(), + }, + { + a: Lesp(30).Matrix(), + evWant: Lesp(30).Eigenvalues(), + }, + { + a: Lesp(50).Matrix(), + evWant: Lesp(50).Eigenvalues(), + valTol: 1e-12, + vecTol: 1e-12, + }, + { + a: Lesp(101).Matrix(), + evWant: Lesp(101).Eigenvalues(), + valTol: 1e-12, + vecTol: 1e-12, + }, + { + a: Lesp(150).Matrix(), + evWant: Lesp(150).Eigenvalues(), + valTol: 1e-12, + vecTol: 1e-12, + }, + + { + a: Rutis{}.Matrix(), + evWant: Rutis{}.Eigenvalues(), + }, + + { + a: Tris{N: 74, X: 1, Y: -2, Z: 1}.Matrix(), + evWant: Tris{N: 74, X: 1, Y: -2, Z: 1}.Eigenvalues(), + }, + { + a: Tris{N: 74, X: 1, Y: 2, Z: -3}.Matrix(), + evWant: Tris{N: 74, X: 1, Y: 2, Z: -3}.Eigenvalues(), + }, + { + a: Tris{N: 75, X: 1, Y: 2, Z: -3}.Matrix(), + evWant: Tris{N: 75, X: 1, Y: 2, Z: -3}.Eigenvalues(), + }, + + { + a: Wilk4{}.Matrix(), + evWant: Wilk4{}.Eigenvalues(), + }, + { + a: Wilk12{}.Matrix(), + evWant: Wilk12{}.Eigenvalues(), + valTol: 1e-7, + }, + { + a: Wilk20(0).Matrix(), + evWant: Wilk20(0).Eigenvalues(), + }, + { + a: Wilk20(1e-10).Matrix(), + evWant: Wilk20(1e-10).Eigenvalues(), + valTol: 1e-12, + vecTol: 1e-12, + }, + + { + a: Zero(1).Matrix(), + evWant: Zero(1).Eigenvalues(), + }, + { + a: Zero(10).Matrix(), + evWant: Zero(10).Eigenvalues(), + }, + { + a: Zero(50).Matrix(), + evWant: Zero(50).Eigenvalues(), + }, + { + a: Zero(100).Matrix(), + evWant: Zero(100).Eigenvalues(), + }, + } { + for _, jobvl := range []lapack.LeftEVJob{lapack.LeftEVCompute, lapack.LeftEVNone} { + for _, jobvr := range []lapack.RightEVJob{lapack.RightEVCompute, lapack.RightEVNone} { + for _, extra := range []int{0, 11} { + for _, wl := range []worklen{minimumWork, mediumWork, optimumWork} { + testDgeev(t, impl, strconv.Itoa(i), test, jobvl, jobvr, extra, wl) + } + } + } + } + } + + for _, n := range []int{2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 20, 50, 51, 100, 101} { + for _, jobvl := range []lapack.LeftEVJob{lapack.LeftEVCompute, lapack.LeftEVNone} { + for _, jobvr := range []lapack.RightEVJob{lapack.RightEVCompute, lapack.RightEVNone} { + for cas := 0; cas < 10; cas++ { + // Create a block diagonal matrix with + // random eigenvalues of random multiplicity. + ev := make([]complex128, n) + tmat := zeros(n, n, n) + for i := 0; i < n; { + re := rnd.NormFloat64() + if i == n-1 || rnd.Float64() < 0.5 { + // Real eigenvalue. + nb := rnd.Intn(min(4, n-i)) + 1 + for k := 0; k < nb; k++ { + tmat.Data[i*tmat.Stride+i] = re + ev[i] = complex(re, 0) + i++ + } + continue + } + // Complex eigenvalue. + im := rnd.NormFloat64() + nb := rnd.Intn(min(4, (n-i)/2)) + 1 + for k := 0; k < nb; k++ { + // 2×2 block for the complex eigenvalue. + tmat.Data[i*tmat.Stride+i] = re + tmat.Data[(i+1)*tmat.Stride+i+1] = re + tmat.Data[(i+1)*tmat.Stride+i] = -im + tmat.Data[i*tmat.Stride+i+1] = im + ev[i] = complex(re, im) + ev[i+1] = complex(re, -im) + i += 2 + } + } + + // Compute A = Q T Q^T where Q is an + // orthogonal matrix. + q := randomOrthogonal(n, rnd) + tq := zeros(n, n, n) + blas64.Gemm(blas.NoTrans, blas.Trans, 1, tmat, q, 0, tq) + a := zeros(n, n, n) + blas64.Gemm(blas.NoTrans, blas.NoTrans, 1, q, tq, 0, a) + + test := dgeevTest{ + a: a, + evWant: ev, + valTol: 1e-12, + vecTol: 1e-7, + } + testDgeev(t, impl, "random", test, jobvl, jobvr, 0, optimumWork) + } + } + } + } +} + +func testDgeev(t *testing.T, impl Dgeever, tc string, test dgeevTest, jobvl lapack.LeftEVJob, jobvr lapack.RightEVJob, extra int, wl worklen) { + const defaultTol = 1e-12 + valTol := test.valTol + if valTol == 0 { + valTol = defaultTol + } + vecTol := test.vecTol + if vecTol == 0 { + vecTol = defaultTol + } + + a := cloneGeneral(test.a) + n := a.Rows + + var vl blas64.General + if jobvl == lapack.LeftEVCompute { + vl = nanGeneral(n, n, n) + } else { + vl.Stride = 1 + } + + var vr blas64.General + if jobvr == lapack.RightEVCompute { + vr = nanGeneral(n, n, n) + } else { + vr.Stride = 1 + } + + wr := make([]float64, n) + wi := make([]float64, n) + + var lwork int + switch wl { + case minimumWork: + if jobvl == lapack.LeftEVCompute || jobvr == lapack.RightEVCompute { + lwork = max(1, 4*n) + } else { + lwork = max(1, 3*n) + } + case mediumWork: + work := make([]float64, 1) + impl.Dgeev(jobvl, jobvr, n, a.Data, a.Stride, wr, wi, vl.Data, vl.Stride, vr.Data, vr.Stride, work, -1) + if jobvl == lapack.LeftEVCompute || jobvr == lapack.RightEVCompute { + lwork = (int(work[0]) + 4*n) / 2 + } else { + lwork = (int(work[0]) + 3*n) / 2 + } + lwork = max(1, lwork) + case optimumWork: + work := make([]float64, 1) + impl.Dgeev(jobvl, jobvr, n, a.Data, a.Stride, wr, wi, vl.Data, vl.Stride, vr.Data, vr.Stride, work, -1) + lwork = int(work[0]) + } + work := make([]float64, lwork) + + first := impl.Dgeev(jobvl, jobvr, n, a.Data, a.Stride, wr, wi, + vl.Data, vl.Stride, vr.Data, vr.Stride, work, len(work)) + + prefix := fmt.Sprintf("Case #%v: n=%v, jobvl=%v, jobvr=%v, extra=%v, work=%v", + tc, n, jobvl, jobvr, extra, wl) + + if !generalOutsideAllNaN(vl) { + t.Errorf("%v: out-of-range write to VL", prefix) + } + if !generalOutsideAllNaN(vr) { + t.Errorf("%v: out-of-range write to VR", prefix) + } + + if first > 0 { + t.Logf("%v: all eigenvalues haven't been computed, first=%v", prefix, first) + } + + // Check that conjugate pair eigevalues are ordered correctly. + for i := first; i < n; { + if wi[i] == 0 { + i++ + continue + } + if wr[i] != wr[i+1] { + t.Errorf("%v: real parts of %vth conjugate pair not equal", prefix, i) + } + if wi[i] < 0 || wi[i+1] > 0 { + t.Errorf("%v: unexpected ordering of %vth conjugate pair", prefix, i) + } + i += 2 + } + + // Check the computed eigenvalues against provided known eigenvalues. + if test.evWant != nil { + used := make([]bool, n) + for i := first; i < n; i++ { + evGot := complex(wr[i], wi[i]) + idx := -1 + for k, evWant := range test.evWant { + if !used[k] && cmplx.Abs(evWant-evGot) < valTol { + idx = k + used[k] = true + break + } + } + if idx == -1 { + t.Errorf("%v: unexpected eigenvalue %v", prefix, evGot) + } + } + } + + if first > 0 || (jobvl == lapack.LeftEVNone && jobvr == lapack.RightEVNone) { + // No eigenvectors have been computed. + return + } + + // Check that the columns of VL and VR are eigenvectors that correspond + // to the computed eigenvalues. + for k := 0; k < n; { + if wi[k] == 0 { + if jobvl == lapack.LeftEVCompute { + ev := columnOf(vl, k) + if !isLeftEigenvectorOf(test.a, ev, nil, complex(wr[k], 0), vecTol) { + t.Errorf("%v: VL[:,%v] is not left real eigenvector", + prefix, k) + } + + norm := floats.Norm(ev, 2) + if math.Abs(norm-1) >= defaultTol { + t.Errorf("%v: norm of left real eigenvector %v not equal to 1: got %v", + prefix, k, norm) + } + } + if jobvr == lapack.RightEVCompute { + ev := columnOf(vr, k) + if !isRightEigenvectorOf(test.a, ev, nil, complex(wr[k], 0), vecTol) { + t.Errorf("%v: VR[:,%v] is not right real eigenvector", + prefix, k) + } + + norm := floats.Norm(ev, 2) + if math.Abs(norm-1) >= defaultTol { + t.Errorf("%v: norm of right real eigenvector %v not equal to 1: got %v", + prefix, k, norm) + } + } + k++ + } else { + if jobvl == lapack.LeftEVCompute { + evre := columnOf(vl, k) + evim := columnOf(vl, k+1) + if !isLeftEigenvectorOf(test.a, evre, evim, complex(wr[k], wi[k]), vecTol) { + t.Errorf("%v: VL[:,%v:%v] is not left complex eigenvector", + prefix, k, k+1) + } + floats.Scale(-1, evim) + if !isLeftEigenvectorOf(test.a, evre, evim, complex(wr[k+1], wi[k+1]), vecTol) { + t.Errorf("%v: VL[:,%v:%v] is not left complex eigenvector", + prefix, k, k+1) + } + + norm := math.Hypot(floats.Norm(evre, 2), floats.Norm(evim, 2)) + if math.Abs(norm-1) > defaultTol { + t.Errorf("%v: norm of left complex eigenvector %v not equal to 1: got %v", + prefix, k, norm) + } + } + if jobvr == lapack.RightEVCompute { + evre := columnOf(vr, k) + evim := columnOf(vr, k+1) + if !isRightEigenvectorOf(test.a, evre, evim, complex(wr[k], wi[k]), vecTol) { + t.Errorf("%v: VR[:,%v:%v] is not right complex eigenvector", + prefix, k, k+1) + } + floats.Scale(-1, evim) + if !isRightEigenvectorOf(test.a, evre, evim, complex(wr[k+1], wi[k+1]), vecTol) { + t.Errorf("%v: VR[:,%v:%v] is not right complex eigenvector", + prefix, k, k+1) + } + + norm := math.Hypot(floats.Norm(evre, 2), floats.Norm(evim, 2)) + if math.Abs(norm-1) > defaultTol { + t.Errorf("%v: norm of right complex eigenvector %v not equal to 1: got %v", + prefix, k, norm) + } + } + // We don't test whether the largest component is real + // because checking it is flaky due to rounding errors. + + k += 2 + } + } +} + +func dgeevTestForAntisymRandom(n int, rnd *rand.Rand) dgeevTest { + a := NewAntisymRandom(n, rnd) + return dgeevTest{ + a: a.Matrix(), + evWant: a.Eigenvalues(), + } +} diff --git a/vendor/gonum.org/v1/gonum/lapack/testlapack/dgeev_bench.go b/vendor/gonum.org/v1/gonum/lapack/testlapack/dgeev_bench.go new file mode 100644 index 0000000..0472ef3 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/lapack/testlapack/dgeev_bench.go @@ -0,0 +1,62 @@ +// Copyright ©2016 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package testlapack + +import ( + "testing" + + "golang.org/x/exp/rand" + + "gonum.org/v1/gonum/blas/blas64" + "gonum.org/v1/gonum/lapack" +) + +var resultGeneral blas64.General + +func DgeevBenchmark(b *testing.B, impl Dgeever) { + rnd := rand.New(rand.NewSource(1)) + benchmarks := []struct { + name string + a blas64.General + }{ + {"AntisymRandom3", NewAntisymRandom(3, rnd).Matrix()}, + {"AntisymRandom4", NewAntisymRandom(4, rnd).Matrix()}, + {"AntisymRandom5", NewAntisymRandom(5, rnd).Matrix()}, + {"AntisymRandom10", NewAntisymRandom(10, rnd).Matrix()}, + {"AntisymRandom50", NewAntisymRandom(50, rnd).Matrix()}, + {"AntisymRandom100", NewAntisymRandom(100, rnd).Matrix()}, + {"AntisymRandom200", NewAntisymRandom(200, rnd).Matrix()}, + {"AntisymRandom500", NewAntisymRandom(500, rnd).Matrix()}, + {"Circulant3", Circulant(3).Matrix()}, + {"Circulant4", Circulant(4).Matrix()}, + {"Circulant5", Circulant(5).Matrix()}, + {"Circulant10", Circulant(10).Matrix()}, + {"Circulant50", Circulant(50).Matrix()}, + {"Circulant100", Circulant(100).Matrix()}, + {"Circulant200", Circulant(200).Matrix()}, + {"Circulant500", Circulant(500).Matrix()}, + } + for _, bm := range benchmarks { + n := bm.a.Rows + a := zeros(n, n, n) + vl := zeros(n, n, n) + vr := zeros(n, n, n) + wr := make([]float64, n) + wi := make([]float64, n) + work := make([]float64, 1) + impl.Dgeev(lapack.LeftEVCompute, lapack.RightEVCompute, n, a.Data, a.Stride, wr, wi, vl.Data, vl.Stride, vr.Data, vr.Stride, work, -1) + work = make([]float64, int(work[0])) + b.Run(bm.name, func(b *testing.B) { + for i := 0; i < b.N; i++ { + b.StopTimer() + copyGeneral(a, bm.a) + b.StartTimer() + impl.Dgeev(lapack.LeftEVCompute, lapack.RightEVCompute, n, a.Data, a.Stride, wr, wi, + vl.Data, vl.Stride, vr.Data, vr.Stride, work, len(work)) + } + resultGeneral = a + }) + } +} diff --git a/vendor/gonum.org/v1/gonum/lapack/testlapack/dgehd2.go b/vendor/gonum.org/v1/gonum/lapack/testlapack/dgehd2.go new file mode 100644 index 0000000..fe8d5f7 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/lapack/testlapack/dgehd2.go @@ -0,0 +1,197 @@ +// Copyright ©2016 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package testlapack + +import ( + "fmt" + "math" + "testing" + + "golang.org/x/exp/rand" + + "gonum.org/v1/gonum/blas" + "gonum.org/v1/gonum/blas/blas64" +) + +type Dgehd2er interface { + Dgehd2(n, ilo, ihi int, a []float64, lda int, tau, work []float64) +} + +func Dgehd2Test(t *testing.T, impl Dgehd2er) { + rnd := rand.New(rand.NewSource(1)) + for _, n := range []int{1, 2, 3, 4, 5, 7, 10, 30} { + for _, extra := range []int{0, 1, 13} { + for cas := 0; cas < 100; cas++ { + testDgehd2(t, impl, n, extra, rnd) + } + } + } +} + +func testDgehd2(t *testing.T, impl Dgehd2er, n, extra int, rnd *rand.Rand) { + ilo := rnd.Intn(n) + ihi := rnd.Intn(n) + if ilo > ihi { + ilo, ihi = ihi, ilo + } + + tau := nanSlice(n - 1) + work := nanSlice(n) + + a := randomGeneral(n, n, n+extra, rnd) + // NaN out elements under the diagonal except + // for the [ilo:ihi,ilo:ihi] block. + for i := 1; i <= ihi; i++ { + for j := 0; j < min(ilo, i); j++ { + a.Data[i*a.Stride+j] = math.NaN() + } + } + for i := ihi + 1; i < n; i++ { + for j := 0; j < i; j++ { + a.Data[i*a.Stride+j] = math.NaN() + } + } + aCopy := a + aCopy.Data = make([]float64, len(a.Data)) + copy(aCopy.Data, a.Data) + + impl.Dgehd2(n, ilo, ihi, a.Data, a.Stride, tau, work) + + prefix := fmt.Sprintf("Case n=%v, ilo=%v, ihi=%v, extra=%v", n, ilo, ihi, extra) + + // Check any invalid modifications of a. + if !generalOutsideAllNaN(a) { + t.Errorf("%v: out-of-range write to A\n%v", prefix, a.Data) + } + for i := ilo; i <= ihi; i++ { + for j := 0; j < min(ilo, i); j++ { + if !math.IsNaN(a.Data[i*a.Stride+j]) { + t.Errorf("%v: expected NaN at A[%v,%v]", prefix, i, j) + } + } + } + for i := ihi + 1; i < n; i++ { + for j := 0; j < i; j++ { + if !math.IsNaN(a.Data[i*a.Stride+j]) { + t.Errorf("%v: expected NaN at A[%v,%v]", prefix, i, j) + } + } + } + for i := 0; i <= ilo; i++ { + for j := i; j < ilo+1; j++ { + if a.Data[i*a.Stride+j] != aCopy.Data[i*aCopy.Stride+j] { + t.Errorf("%v: unexpected modification at A[%v,%v]", prefix, i, j) + } + } + for j := ihi + 1; j < n; j++ { + if a.Data[i*a.Stride+j] != aCopy.Data[i*aCopy.Stride+j] { + t.Errorf("%v: unexpected modification at A[%v,%v]", prefix, i, j) + } + } + } + for i := ihi + 1; i < n; i++ { + for j := i; j < n; j++ { + if a.Data[i*a.Stride+j] != aCopy.Data[i*aCopy.Stride+j] { + t.Errorf("%v: unexpected modification at A[%v,%v]", prefix, i, j) + } + } + } + + // Check that tau has been assigned properly. + for i, v := range tau { + if i < ilo || i >= ihi { + if !math.IsNaN(v) { + t.Errorf("%v: expected NaN at tau[%v]", prefix, i) + } + } else { + if math.IsNaN(v) { + t.Errorf("%v: unexpected NaN at tau[%v]", prefix, i) + } + } + } + + // Extract Q and check that it is orthogonal. + q := blas64.General{ + Rows: n, + Cols: n, + Stride: n, + Data: make([]float64, n*n), + } + for i := 0; i < q.Rows; i++ { + q.Data[i*q.Stride+i] = 1 + } + qCopy := q + qCopy.Data = make([]float64, len(q.Data)) + for j := ilo; j < ihi; j++ { + h := blas64.General{ + Rows: n, + Cols: n, + Stride: n, + Data: make([]float64, n*n), + } + for i := 0; i < h.Rows; i++ { + h.Data[i*h.Stride+i] = 1 + } + v := blas64.Vector{ + Inc: 1, + Data: make([]float64, n), + } + v.Data[j+1] = 1 + for i := j + 2; i < ihi+1; i++ { + v.Data[i] = a.Data[i*a.Stride+j] + } + blas64.Ger(-tau[j], v, v, h) + copy(qCopy.Data, q.Data) + blas64.Gemm(blas.NoTrans, blas.NoTrans, 1, qCopy, h, 0, q) + } + if !isOrthogonal(q) { + t.Errorf("%v: Q is not orthogonal\nQ=%v", prefix, q) + } + + // Overwrite NaN elements of aCopy with zeros + // (we will multiply with it below). + for i := 1; i <= ihi; i++ { + for j := 0; j < min(ilo, i); j++ { + aCopy.Data[i*aCopy.Stride+j] = 0 + } + } + for i := ihi + 1; i < n; i++ { + for j := 0; j < i; j++ { + aCopy.Data[i*aCopy.Stride+j] = 0 + } + } + + // Construct Q^T * AOrig * Q and check that it is + // equal to A from Dgehd2. + aq := blas64.General{ + Rows: n, + Cols: n, + Stride: n, + Data: make([]float64, n*n), + } + blas64.Gemm(blas.NoTrans, blas.NoTrans, 1, aCopy, q, 0, aq) + qaq := blas64.General{ + Rows: n, + Cols: n, + Stride: n, + Data: make([]float64, n*n), + } + blas64.Gemm(blas.Trans, blas.NoTrans, 1, q, aq, 0, qaq) + for i := ilo; i <= ihi; i++ { + for j := ilo; j <= ihi; j++ { + qaqij := qaq.Data[i*qaq.Stride+j] + if j < i-1 { + if math.Abs(qaqij) > 1e-14 { + t.Errorf("%v: Q^T*A*Q is not upper Hessenberg, [%v,%v]=%v", prefix, i, j, qaqij) + } + continue + } + diff := qaqij - a.Data[i*a.Stride+j] + if math.Abs(diff) > 1e-14 { + t.Errorf("%v: Q^T*AOrig*Q and A are not equal, diff at [%v,%v]=%v", prefix, i, j, diff) + } + } + } +} diff --git a/vendor/gonum.org/v1/gonum/lapack/testlapack/dgehrd.go b/vendor/gonum.org/v1/gonum/lapack/testlapack/dgehrd.go new file mode 100644 index 0000000..9613482 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/lapack/testlapack/dgehrd.go @@ -0,0 +1,210 @@ +// Copyright ©2016 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package testlapack + +import ( + "fmt" + "math" + "testing" + + "golang.org/x/exp/rand" + + "gonum.org/v1/gonum/blas" + "gonum.org/v1/gonum/blas/blas64" +) + +type Dgehrder interface { + Dgehrd(n, ilo, ihi int, a []float64, lda int, tau, work []float64, lwork int) + + Dorgqr(m, n, k int, a []float64, lda int, tau, work []float64, lwork int) +} + +func DgehrdTest(t *testing.T, impl Dgehrder) { + rnd := rand.New(rand.NewSource(1)) + + // Randomized tests for small matrix sizes that will most likely + // use the unblocked algorithm. + for _, n := range []int{1, 2, 3, 4, 5, 10, 34} { + for _, extra := range []int{0, 13} { + for _, optwork := range []bool{true, false} { + for cas := 0; cas < 10; cas++ { + ilo := rnd.Intn(n) + ihi := rnd.Intn(n) + if ilo > ihi { + ilo, ihi = ihi, ilo + } + testDgehrd(t, impl, n, ilo, ihi, extra, optwork, rnd) + } + } + } + } + + // These are selected tests for larger matrix sizes to test the blocked + // algorithm. Use sizes around several powers of two because that is + // where the blocked path will most likely start to be taken. For + // example, at present the blocked algorithm is used for sizes larger + // than 129. + for _, test := range []struct { + n, ilo, ihi int + }{ + {0, 0, -1}, + + {68, 0, 63}, + {68, 0, 64}, + {68, 0, 65}, + {68, 0, 66}, + {68, 0, 67}, + + {132, 2, 129}, + {132, 1, 129}, // Size = 129, unblocked. + {132, 0, 129}, // Size = 130, blocked. + {132, 1, 130}, + {132, 0, 130}, + {132, 1, 131}, + {132, 0, 131}, + + {260, 2, 257}, + {260, 1, 257}, + {260, 0, 257}, + {260, 0, 258}, + {260, 0, 259}, + } { + for _, extra := range []int{0, 13} { + for _, optwork := range []bool{true, false} { + testDgehrd(t, impl, test.n, test.ilo, test.ihi, extra, optwork, rnd) + } + } + } +} + +func testDgehrd(t *testing.T, impl Dgehrder, n, ilo, ihi, extra int, optwork bool, rnd *rand.Rand) { + a := randomGeneral(n, n, n+extra, rnd) + aCopy := a + aCopy.Data = make([]float64, len(a.Data)) + copy(aCopy.Data, a.Data) + + var tau []float64 + if n > 1 { + tau = nanSlice(n - 1) + } + + var work []float64 + if optwork { + work = nanSlice(1) + impl.Dgehrd(n, ilo, ihi, a.Data, a.Stride, tau, work, -1) + work = nanSlice(int(work[0])) + } else { + work = nanSlice(max(1, n)) + } + + impl.Dgehrd(n, ilo, ihi, a.Data, a.Stride, tau, work, len(work)) + + if n == 0 { + // Just make sure there is no panic. + return + } + + prefix := fmt.Sprintf("Case n=%v, ilo=%v, ihi=%v, extra=%v", n, ilo, ihi, extra) + + // Check any invalid modifications of a. + if !generalOutsideAllNaN(a) { + t.Errorf("%v: out-of-range write to A\n%v", prefix, a.Data) + } + for i := ilo; i <= ihi; i++ { + for j := 0; j < min(ilo, i); j++ { + if a.Data[i*a.Stride+j] != aCopy.Data[i*aCopy.Stride+j] { + t.Errorf("%v: unexpected modification of A[%v,%v]", prefix, i, j) + } + } + } + for i := ihi + 1; i < n; i++ { + for j := 0; j < i; j++ { + if a.Data[i*a.Stride+j] != aCopy.Data[i*aCopy.Stride+j] { + t.Errorf("%v: unexpected modification of A[%v,%v]", prefix, i, j) + } + } + } + for i := 0; i <= ilo; i++ { + for j := i; j < ilo+1; j++ { + if a.Data[i*a.Stride+j] != aCopy.Data[i*aCopy.Stride+j] { + t.Errorf("%v: unexpected modification at A[%v,%v]", prefix, i, j) + } + } + for j := ihi + 1; j < n; j++ { + if a.Data[i*a.Stride+j] != aCopy.Data[i*aCopy.Stride+j] { + t.Errorf("%v: unexpected modification at A[%v,%v]", prefix, i, j) + } + } + } + for i := ihi + 1; i < n; i++ { + for j := i; j < n; j++ { + if a.Data[i*a.Stride+j] != aCopy.Data[i*aCopy.Stride+j] { + t.Errorf("%v: unexpected modification at A[%v,%v]", prefix, i, j) + } + } + } + + // Check that tau has been assigned properly. + for i, v := range tau { + if math.IsNaN(v) { + t.Errorf("%v: unexpected NaN at tau[%v]", prefix, i) + } + } + + // Extract Q and check that it is orthogonal. + q := eye(n, n) + if ilo != ihi { + for i := ilo + 2; i <= ihi; i++ { + for j := ilo + 1; j < ihi; j++ { + q.Data[i*q.Stride+j] = a.Data[i*a.Stride+j-1] + } + } + nh := ihi - ilo + impl.Dorgqr(nh, nh, nh, q.Data[(ilo+1)*q.Stride+ilo+1:], q.Stride, tau[ilo:ihi], work, len(work)) + } + if !isOrthogonal(q) { + t.Errorf("%v: Q is not orthogonal\nQ=%v", prefix, q) + } + + // Construct Q^T * AOrig * Q and check that it is upper Hessenberg. + aq := blas64.General{ + Rows: n, + Cols: n, + Stride: n, + Data: make([]float64, n*n), + } + blas64.Gemm(blas.NoTrans, blas.NoTrans, 1, aCopy, q, 0, aq) + qaq := blas64.General{ + Rows: n, + Cols: n, + Stride: n, + Data: make([]float64, n*n), + } + blas64.Gemm(blas.Trans, blas.NoTrans, 1, q, aq, 0, qaq) + for i := 0; i <= ilo; i++ { + for j := ilo + 1; j <= ihi; j++ { + qaqij := qaq.Data[i*qaq.Stride+j] + diff := qaqij - a.Data[i*a.Stride+j] + if math.Abs(diff) > 1e-13 { + t.Errorf("%v: Q^T*AOrig*Q and A are not equal, diff at [%v,%v]=%v", prefix, i, j, diff) + } + } + } + for i := ilo + 1; i <= ihi; i++ { + for j := ilo; j < n; j++ { + qaqij := qaq.Data[i*qaq.Stride+j] + if j < i-1 { + if math.Abs(qaqij) > 1e-13 { + t.Errorf("%v: Q^T*AOrig*Q is not upper Hessenberg, [%v,%v]=%v", prefix, i, j, qaqij) + } + continue + } + diff := qaqij - a.Data[i*a.Stride+j] + if math.Abs(diff) > 1e-13 { + t.Errorf("%v: Q^T*AOrig*Q and A are not equal, diff at [%v,%v]=%v", prefix, i, j, diff) + } + } + } +} diff --git a/vendor/gonum.org/v1/gonum/lapack/testlapack/dgelq2.go b/vendor/gonum.org/v1/gonum/lapack/testlapack/dgelq2.go new file mode 100644 index 0000000..833994b --- /dev/null +++ b/vendor/gonum.org/v1/gonum/lapack/testlapack/dgelq2.go @@ -0,0 +1,101 @@ +// Copyright ©2015 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package testlapack + +import ( + "testing" + + "golang.org/x/exp/rand" + + "gonum.org/v1/gonum/blas" + "gonum.org/v1/gonum/blas/blas64" + "gonum.org/v1/gonum/floats" +) + +type Dgelq2er interface { + Dgelq2(m, n int, a []float64, lda int, tau, work []float64) +} + +func Dgelq2Test(t *testing.T, impl Dgelq2er) { + rnd := rand.New(rand.NewSource(1)) + for c, test := range []struct { + m, n, lda int + }{ + {1, 1, 0}, + {2, 2, 0}, + {3, 2, 0}, + {2, 3, 0}, + {1, 12, 0}, + {2, 6, 0}, + {3, 4, 0}, + {4, 3, 0}, + {6, 2, 0}, + {1, 12, 0}, + {1, 1, 20}, + {2, 2, 20}, + {3, 2, 20}, + {2, 3, 20}, + {1, 12, 20}, + {2, 6, 20}, + {3, 4, 20}, + {4, 3, 20}, + {6, 2, 20}, + {1, 12, 20}, + } { + n := test.n + m := test.m + lda := test.lda + if lda == 0 { + lda = test.n + } + k := min(m, n) + tau := make([]float64, k) + for i := range tau { + tau[i] = rnd.Float64() + } + work := make([]float64, m) + for i := range work { + work[i] = rnd.Float64() + } + a := make([]float64, m*lda) + for i := 0; i < m*lda; i++ { + a[i] = rnd.Float64() + } + aCopy := make([]float64, len(a)) + copy(aCopy, a) + impl.Dgelq2(m, n, a, lda, tau, work) + + Q := constructQ("LQ", m, n, a, lda, tau) + + // Check that Q is orthogonal. + if !isOrthogonal(Q) { + t.Errorf("Case %v: Q not orthogonal", c) + } + + L := blas64.General{ + Rows: m, + Cols: n, + Stride: n, + Data: make([]float64, m*n), + } + for i := 0; i < m; i++ { + for j := 0; j <= min(i, n-1); j++ { + L.Data[i*L.Stride+j] = a[i*lda+j] + } + } + + ans := blas64.General{ + Rows: m, + Cols: n, + Stride: lda, + Data: make([]float64, m*lda), + } + copy(ans.Data, aCopy) + blas64.Gemm(blas.NoTrans, blas.NoTrans, 1, L, Q, 0, ans) + if !floats.EqualApprox(aCopy, ans.Data, 1e-14) { + t.Errorf("Case %v, LQ mismatch. Want %v, got %v.", c, aCopy, ans.Data) + } + } +} diff --git a/vendor/gonum.org/v1/gonum/lapack/testlapack/dgelqf.go b/vendor/gonum.org/v1/gonum/lapack/testlapack/dgelqf.go new file mode 100644 index 0000000..7300b30 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/lapack/testlapack/dgelqf.go @@ -0,0 +1,101 @@ +// Copyright ©2015 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package testlapack + +import ( + "testing" + + "golang.org/x/exp/rand" + + "gonum.org/v1/gonum/floats" +) + +type Dgelqfer interface { + Dgelq2er + Dgelqf(m, n int, a []float64, lda int, tau, work []float64, lwork int) +} + +func DgelqfTest(t *testing.T, impl Dgelqfer) { + const tol = 1e-12 + rnd := rand.New(rand.NewSource(1)) + for c, test := range []struct { + m, n, lda int + }{ + {10, 5, 0}, + {5, 10, 0}, + {10, 10, 0}, + {300, 5, 0}, + {3, 500, 0}, + {200, 200, 0}, + {300, 200, 0}, + {204, 300, 0}, + {1, 3000, 0}, + {3000, 1, 0}, + {10, 5, 30}, + {5, 10, 30}, + {10, 10, 30}, + {300, 5, 500}, + {3, 500, 600}, + {200, 200, 300}, + {300, 200, 300}, + {204, 300, 400}, + {1, 3000, 4000}, + {3000, 1, 4000}, + } { + m := test.m + n := test.n + lda := test.lda + if lda == 0 { + lda = n + } + // Allocate m×n matrix A and fill it with random numbers. + a := make([]float64, m*lda) + for i := range a { + a[i] = rnd.NormFloat64() + } + // Store a copy of A for later comparison. + aCopy := make([]float64, len(a)) + copy(aCopy, a) + + // Allocate a slice for scalar factors of elementary reflectors + // and fill it with random numbers. + tau := make([]float64, n) + for i := 0; i < n; i++ { + tau[i] = rnd.NormFloat64() + } + + // Compute the expected result using unblocked LQ algorithm and + // store it want. + want := make([]float64, len(a)) + copy(want, a) + impl.Dgelq2(m, n, want, lda, tau, make([]float64, m)) + + for _, wl := range []worklen{minimumWork, mediumWork, optimumWork} { + copy(a, aCopy) + + var lwork int + switch wl { + case minimumWork: + lwork = m + case mediumWork: + work := make([]float64, 1) + impl.Dgelqf(m, n, a, lda, tau, work, -1) + lwork = int(work[0]) - 2*m + case optimumWork: + work := make([]float64, 1) + impl.Dgelqf(m, n, a, lda, tau, work, -1) + lwork = int(work[0]) + } + work := make([]float64, lwork) + + // Compute the LQ factorization of A. + impl.Dgelqf(m, n, a, lda, tau, work, len(work)) + // Compare the result with Dgelq2. + if !floats.EqualApprox(want, a, tol) { + t.Errorf("Case %v, workspace type %v, unexpected result", c, wl) + } + } + } +} diff --git a/vendor/gonum.org/v1/gonum/lapack/testlapack/dgels.go b/vendor/gonum.org/v1/gonum/lapack/testlapack/dgels.go new file mode 100644 index 0000000..26131fb --- /dev/null +++ b/vendor/gonum.org/v1/gonum/lapack/testlapack/dgels.go @@ -0,0 +1,183 @@ +// Copyright ©2015 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package testlapack + +import ( + "testing" + + "golang.org/x/exp/rand" + + "gonum.org/v1/gonum/blas" + "gonum.org/v1/gonum/blas/blas64" + "gonum.org/v1/gonum/floats" +) + +type Dgelser interface { + Dgels(trans blas.Transpose, m, n, nrhs int, a []float64, lda int, b []float64, ldb int, work []float64, lwork int) bool +} + +func DgelsTest(t *testing.T, impl Dgelser) { + rnd := rand.New(rand.NewSource(1)) + for _, trans := range []blas.Transpose{blas.NoTrans, blas.Trans} { + for _, test := range []struct { + m, n, nrhs, lda, ldb int + }{ + {3, 4, 5, 0, 0}, + {3, 5, 4, 0, 0}, + {4, 3, 5, 0, 0}, + {4, 5, 3, 0, 0}, + {5, 3, 4, 0, 0}, + {5, 4, 3, 0, 0}, + {3, 4, 5, 10, 20}, + {3, 5, 4, 10, 20}, + {4, 3, 5, 10, 20}, + {4, 5, 3, 10, 20}, + {5, 3, 4, 10, 20}, + {5, 4, 3, 10, 20}, + {3, 4, 5, 20, 10}, + {3, 5, 4, 20, 10}, + {4, 3, 5, 20, 10}, + {4, 5, 3, 20, 10}, + {5, 3, 4, 20, 10}, + {5, 4, 3, 20, 10}, + {200, 300, 400, 0, 0}, + {200, 400, 300, 0, 0}, + {300, 200, 400, 0, 0}, + {300, 400, 200, 0, 0}, + {400, 200, 300, 0, 0}, + {400, 300, 200, 0, 0}, + {200, 300, 400, 500, 600}, + {200, 400, 300, 500, 600}, + {300, 200, 400, 500, 600}, + {300, 400, 200, 500, 600}, + {400, 200, 300, 500, 600}, + {400, 300, 200, 500, 600}, + {200, 300, 400, 600, 500}, + {200, 400, 300, 600, 500}, + {300, 200, 400, 600, 500}, + {300, 400, 200, 600, 500}, + {400, 200, 300, 600, 500}, + {400, 300, 200, 600, 500}, + } { + m := test.m + n := test.n + nrhs := test.nrhs + + lda := test.lda + if lda == 0 { + lda = n + } + a := make([]float64, m*lda) + for i := range a { + a[i] = rnd.Float64() + } + aCopy := make([]float64, len(a)) + copy(aCopy, a) + + // Size of b is the same trans or no trans, because the number of rows + // has to be the max of (m,n). + mb := max(m, n) + nb := nrhs + ldb := test.ldb + if ldb == 0 { + ldb = nb + } + b := make([]float64, mb*ldb) + for i := range b { + b[i] = rnd.Float64() + } + bCopy := make([]float64, len(b)) + copy(bCopy, b) + + // Find optimal work length. + work := make([]float64, 1) + impl.Dgels(trans, m, n, nrhs, a, lda, b, ldb, work, -1) + + // Perform linear solve + work = make([]float64, int(work[0])) + lwork := len(work) + for i := range work { + work[i] = rnd.Float64() + } + impl.Dgels(trans, m, n, nrhs, a, lda, b, ldb, work, lwork) + + // Check that the answer is correct by comparing to the normal equations. + aMat := blas64.General{ + Rows: m, + Cols: n, + Stride: lda, + Data: make([]float64, len(aCopy)), + } + copy(aMat.Data, aCopy) + szAta := n + if trans == blas.Trans { + szAta = m + } + aTA := blas64.General{ + Rows: szAta, + Cols: szAta, + Stride: szAta, + Data: make([]float64, szAta*szAta), + } + + // Compute A^T * A if notrans and A * A^T otherwise. + if trans == blas.NoTrans { + blas64.Gemm(blas.Trans, blas.NoTrans, 1, aMat, aMat, 0, aTA) + } else { + blas64.Gemm(blas.NoTrans, blas.Trans, 1, aMat, aMat, 0, aTA) + } + + // Multiply by X. + X := blas64.General{ + Rows: szAta, + Cols: nrhs, + Stride: ldb, + Data: b, + } + ans := blas64.General{ + Rows: aTA.Rows, + Cols: X.Cols, + Stride: X.Cols, + Data: make([]float64, aTA.Rows*X.Cols), + } + blas64.Gemm(blas.NoTrans, blas.NoTrans, 1, aTA, X, 0, ans) + + B := blas64.General{ + Rows: szAta, + Cols: nrhs, + Stride: ldb, + Data: make([]float64, len(bCopy)), + } + + copy(B.Data, bCopy) + var ans2 blas64.General + if trans == blas.NoTrans { + ans2 = blas64.General{ + Rows: aMat.Cols, + Cols: B.Cols, + Stride: B.Cols, + Data: make([]float64, aMat.Cols*B.Cols), + } + } else { + ans2 = blas64.General{ + Rows: aMat.Rows, + Cols: B.Cols, + Stride: B.Cols, + Data: make([]float64, aMat.Rows*B.Cols), + } + } + + // Compute A^T B if Trans or A * B otherwise + if trans == blas.NoTrans { + blas64.Gemm(blas.Trans, blas.NoTrans, 1, aMat, B, 0, ans2) + } else { + blas64.Gemm(blas.NoTrans, blas.NoTrans, 1, aMat, B, 0, ans2) + } + if !floats.EqualApprox(ans.Data, ans2.Data, 1e-12) { + t.Errorf("Normal equations not satisfied") + } + } + } +} diff --git a/vendor/gonum.org/v1/gonum/lapack/testlapack/dgeql2.go b/vendor/gonum.org/v1/gonum/lapack/testlapack/dgeql2.go new file mode 100644 index 0000000..36c4ae7 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/lapack/testlapack/dgeql2.go @@ -0,0 +1,100 @@ +// Copyright ©2016 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package testlapack + +import ( + "testing" + + "golang.org/x/exp/rand" + + "gonum.org/v1/gonum/blas" + "gonum.org/v1/gonum/blas/blas64" + "gonum.org/v1/gonum/floats" +) + +type Dgeql2er interface { + Dgeql2(m, n int, a []float64, lda int, tau, work []float64) +} + +func Dgeql2Test(t *testing.T, impl Dgeql2er) { + rnd := rand.New(rand.NewSource(1)) + // TODO(btracey): Add tests for m < n. + for _, test := range []struct { + m, n, lda int + }{ + {5, 5, 0}, + {5, 3, 0}, + {5, 4, 0}, + } { + m := test.m + n := test.n + lda := test.lda + if lda == 0 { + lda = n + } + a := make([]float64, m*lda) + for i := range a { + a[i] = rnd.NormFloat64() + } + tau := nanSlice(min(m, n)) + work := nanSlice(n) + + aCopy := make([]float64, len(a)) + copy(aCopy, a) + impl.Dgeql2(m, n, a, lda, tau, work) + + k := min(m, n) + // Construct Q. + q := blas64.General{ + Rows: m, + Cols: m, + Stride: m, + Data: make([]float64, m*m), + } + for i := 0; i < m; i++ { + q.Data[i*q.Stride+i] = 1 + } + for i := 0; i < k; i++ { + h := blas64.General{Rows: m, Cols: m, Stride: m, Data: make([]float64, m*m)} + for j := 0; j < m; j++ { + h.Data[j*h.Stride+j] = 1 + } + v := blas64.Vector{Inc: 1, Data: make([]float64, m)} + v.Data[m-k+i] = 1 + for j := 0; j < m-k+i; j++ { + v.Data[j] = a[j*lda+n-k+i] + } + blas64.Ger(-tau[i], v, v, h) + qTmp := blas64.General{Rows: q.Rows, Cols: q.Cols, Stride: q.Stride, Data: make([]float64, len(q.Data))} + copy(qTmp.Data, q.Data) + blas64.Gemm(blas.NoTrans, blas.NoTrans, 1, h, qTmp, 0, q) + } + if !isOrthogonal(q) { + t.Errorf("Q is not orthogonal") + } + l := blas64.General{ + Rows: m, + Cols: n, + Stride: n, + Data: make([]float64, m*n), + } + if m >= n { + for i := m - n; i < m; i++ { + for j := 0; j <= min(i-(m-n), n-1); j++ { + l.Data[i*l.Stride+j] = a[i*lda+j] + } + } + } else { + panic("untested") + } + ans := blas64.General{Rows: m, Cols: n, Stride: lda, Data: make([]float64, len(a))} + copy(ans.Data, a) + + blas64.Gemm(blas.NoTrans, blas.NoTrans, 1, q, l, 0, ans) + if !floats.EqualApprox(ans.Data, aCopy, 1e-10) { + t.Errorf("Reconstruction mismatch: m = %v, n = %v", m, n) + } + } +} diff --git a/vendor/gonum.org/v1/gonum/lapack/testlapack/dgeqp3.go b/vendor/gonum.org/v1/gonum/lapack/testlapack/dgeqp3.go new file mode 100644 index 0000000..6c211b1 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/lapack/testlapack/dgeqp3.go @@ -0,0 +1,139 @@ +// Copyright ©2015 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package testlapack + +import ( + "testing" + + "golang.org/x/exp/rand" + + "gonum.org/v1/gonum/blas" + "gonum.org/v1/gonum/blas/blas64" +) + +type Dgeqp3er interface { + Dlapmter + Dgeqp3(m, n int, a []float64, lda int, jpvt []int, tau, work []float64, lwork int) +} + +func Dgeqp3Test(t *testing.T, impl Dgeqp3er) { + rnd := rand.New(rand.NewSource(1)) + for c, test := range []struct { + m, n, lda int + }{ + {1, 1, 0}, + {2, 2, 0}, + {3, 2, 0}, + {2, 3, 0}, + {1, 12, 0}, + {2, 6, 0}, + {3, 4, 0}, + {4, 3, 0}, + {6, 2, 0}, + {12, 1, 0}, + {1, 1, 20}, + {2, 2, 20}, + {3, 2, 20}, + {2, 3, 20}, + {1, 12, 20}, + {2, 6, 20}, + {3, 4, 20}, + {4, 3, 20}, + {6, 2, 20}, + {12, 1, 20}, + {129, 256, 0}, + {256, 129, 0}, + {129, 256, 266}, + {256, 129, 266}, + } { + n := test.n + m := test.m + lda := test.lda + if lda == 0 { + lda = test.n + } + const ( + all = iota + some + none + ) + for _, free := range []int{all, some, none} { + // Allocate m×n matrix A and fill it with random numbers. + a := make([]float64, m*lda) + for i := range a { + a[i] = rnd.Float64() + } + // Store a copy of A for later comparison. + aCopy := make([]float64, len(a)) + copy(aCopy, a) + // Allocate a slice of column pivots. + jpvt := make([]int, n) + for j := range jpvt { + switch free { + case all: + // All columns are free. + jpvt[j] = -1 + case some: + // Some columns are free, some are leading columns. + jpvt[j] = rnd.Intn(2) - 1 // -1 or 0 + case none: + // All columns are leading. + jpvt[j] = 0 + default: + panic("bad freedom") + } + } + // Allocate a slice for scalar factors of elementary + // reflectors and fill it with random numbers. Dgeqp3 + // will overwrite them with valid data. + k := min(m, n) + tau := make([]float64, k) + for i := range tau { + tau[i] = rnd.Float64() + } + // Get optimal workspace size for Dgeqp3. + work := make([]float64, 1) + impl.Dgeqp3(m, n, a, lda, jpvt, tau, work, -1) + lwork := int(work[0]) + work = make([]float64, lwork) + for i := range work { + work[i] = rnd.Float64() + } + + // Compute a QR factorization of A with column pivoting. + impl.Dgeqp3(m, n, a, lda, jpvt, tau, work, lwork) + + // Compute Q based on the elementary reflectors stored in A. + q := constructQ("QR", m, n, a, lda, tau) + // Check that Q is orthogonal. + if !isOrthogonal(q) { + t.Errorf("Case %v, Q not orthogonal", c) + } + + // Copy the upper triangle of A into R. + r := blas64.General{ + Rows: m, + Cols: n, + Stride: n, + Data: make([]float64, m*n), + } + for i := 0; i < m; i++ { + for j := i; j < n; j++ { + r.Data[i*n+j] = a[i*lda+j] + } + } + // Compute Q * R. + got := nanGeneral(m, n, lda) + blas64.Gemm(blas.NoTrans, blas.NoTrans, 1, q, r, 0, got) + // Compute A * P: rearrange the columns of A based on the permutation in jpvt. + want := blas64.General{Rows: m, Cols: n, Stride: lda, Data: aCopy} + impl.Dlapmt(true, want.Rows, want.Cols, want.Data, want.Stride, jpvt) + // Check that A * P = Q * R. + if !equalApproxGeneral(got, want, 1e-13) { + t.Errorf("Case %v, Q*R != A*P\nQ*R=%v\nA*P=%v", c, got, want) + } + } + } +} diff --git a/vendor/gonum.org/v1/gonum/lapack/testlapack/dgeqr2.go b/vendor/gonum.org/v1/gonum/lapack/testlapack/dgeqr2.go new file mode 100644 index 0000000..58fed42 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/lapack/testlapack/dgeqr2.go @@ -0,0 +1,102 @@ +// Copyright ©2015 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package testlapack + +import ( + "testing" + + "golang.org/x/exp/rand" + + "gonum.org/v1/gonum/blas" + "gonum.org/v1/gonum/blas/blas64" + "gonum.org/v1/gonum/floats" +) + +type Dgeqr2er interface { + Dgeqr2(m, n int, a []float64, lda int, tau []float64, work []float64) +} + +func Dgeqr2Test(t *testing.T, impl Dgeqr2er) { + rnd := rand.New(rand.NewSource(1)) + for c, test := range []struct { + m, n, lda int + }{ + {1, 1, 0}, + {2, 2, 0}, + {3, 2, 0}, + {2, 3, 0}, + {1, 12, 0}, + {2, 6, 0}, + {3, 4, 0}, + {4, 3, 0}, + {6, 2, 0}, + {12, 1, 0}, + {1, 1, 20}, + {2, 2, 20}, + {3, 2, 20}, + {2, 3, 20}, + {1, 12, 20}, + {2, 6, 20}, + {3, 4, 20}, + {4, 3, 20}, + {6, 2, 20}, + {12, 1, 20}, + } { + n := test.n + m := test.m + lda := test.lda + if lda == 0 { + lda = test.n + } + a := make([]float64, m*lda) + for i := range a { + a[i] = rnd.Float64() + } + aCopy := make([]float64, len(a)) + k := min(m, n) + tau := make([]float64, k) + for i := range tau { + tau[i] = rnd.Float64() + } + work := make([]float64, n) + for i := range work { + work[i] = rnd.Float64() + } + copy(aCopy, a) + impl.Dgeqr2(m, n, a, lda, tau, work) + + // Test that the QR factorization has completed successfully. Compute + // Q based on the vectors. + q := constructQ("QR", m, n, a, lda, tau) + + // Check that Q is orthogonal. + if !isOrthogonal(q) { + t.Errorf("Case %v, Q not orthogonal", c) + } + // Check that A = Q * R + r := blas64.General{ + Rows: m, + Cols: n, + Stride: n, + Data: make([]float64, m*n), + } + for i := 0; i < m; i++ { + for j := i; j < n; j++ { + r.Data[i*n+j] = a[i*lda+j] + } + } + atmp := blas64.General{ + Rows: m, + Cols: n, + Stride: lda, + Data: make([]float64, m*lda), + } + copy(atmp.Data, a) + blas64.Gemm(blas.NoTrans, blas.NoTrans, 1, q, r, 0, atmp) + if !floats.EqualApprox(atmp.Data, aCopy, 1e-14) { + t.Errorf("Q*R != a") + } + } +} diff --git a/vendor/gonum.org/v1/gonum/lapack/testlapack/dgeqrf.go b/vendor/gonum.org/v1/gonum/lapack/testlapack/dgeqrf.go new file mode 100644 index 0000000..2f32b17 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/lapack/testlapack/dgeqrf.go @@ -0,0 +1,102 @@ +// Copyright ©2015 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package testlapack + +import ( + "testing" + + "golang.org/x/exp/rand" + + "gonum.org/v1/gonum/floats" +) + +type Dgeqrfer interface { + Dgeqr2er + Dgeqrf(m, n int, a []float64, lda int, tau, work []float64, lwork int) +} + +func DgeqrfTest(t *testing.T, impl Dgeqrfer) { + const tol = 1e-12 + rnd := rand.New(rand.NewSource(1)) + for c, test := range []struct { + m, n, lda int + }{ + {10, 5, 0}, + {5, 10, 0}, + {10, 10, 0}, + {300, 5, 0}, + {3, 500, 0}, + {200, 200, 0}, + {300, 200, 0}, + {204, 300, 0}, + {1, 3000, 0}, + {3000, 1, 0}, + {10, 5, 20}, + {5, 10, 20}, + {10, 10, 20}, + {300, 5, 400}, + {3, 500, 600}, + {200, 200, 300}, + {300, 200, 300}, + {204, 300, 400}, + {1, 3000, 4000}, + {3000, 1, 4000}, + } { + m := test.m + n := test.n + lda := test.lda + if lda == 0 { + lda = test.n + } + + // Allocate m×n matrix A and fill it with random numbers. + a := make([]float64, m*lda) + for i := range a { + a[i] = rnd.NormFloat64() + } + // Store a copy of A for later comparison. + aCopy := make([]float64, len(a)) + copy(aCopy, a) + + // Allocate a slice for scalar factors of elementary reflectors + // and fill it with random numbers. + tau := make([]float64, n) + for i := 0; i < n; i++ { + tau[i] = rnd.Float64() + } + + // Compute the expected result using unblocked QR algorithm and + // store it in want. + want := make([]float64, len(a)) + copy(want, a) + impl.Dgeqr2(m, n, want, lda, tau, make([]float64, n)) + + for _, wl := range []worklen{minimumWork, mediumWork, optimumWork} { + copy(a, aCopy) + + var lwork int + switch wl { + case minimumWork: + lwork = n + case mediumWork: + work := make([]float64, 1) + impl.Dgeqrf(m, n, a, lda, tau, work, -1) + lwork = int(work[0]) - 2*n + case optimumWork: + work := make([]float64, 1) + impl.Dgeqrf(m, n, a, lda, tau, work, -1) + lwork = int(work[0]) + } + work := make([]float64, lwork) + + // Compute the QR factorization of A. + impl.Dgeqrf(m, n, a, lda, tau, work, len(work)) + // Compare the result with Dgeqr2. + if !floats.EqualApprox(want, a, tol) { + t.Errorf("Case %v, workspace %v, unexpected result.", c, wl) + } + } + } +} diff --git a/vendor/gonum.org/v1/gonum/lapack/testlapack/dgerq2.go b/vendor/gonum.org/v1/gonum/lapack/testlapack/dgerq2.go new file mode 100644 index 0000000..e0f65ad --- /dev/null +++ b/vendor/gonum.org/v1/gonum/lapack/testlapack/dgerq2.go @@ -0,0 +1,108 @@ +// Copyright ©2015 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package testlapack + +import ( + "testing" + + "golang.org/x/exp/rand" + + "gonum.org/v1/gonum/blas" + "gonum.org/v1/gonum/blas/blas64" +) + +type Dgerq2er interface { + Dgerq2(m, n int, a []float64, lda int, tau []float64, work []float64) +} + +func Dgerq2Test(t *testing.T, impl Dgerq2er) { + rnd := rand.New(rand.NewSource(1)) + for c, test := range []struct { + m, n, lda int + }{ + {1, 1, 0}, + {2, 2, 0}, + {3, 2, 0}, + {2, 3, 0}, + {1, 12, 0}, + {2, 6, 0}, + {3, 4, 0}, + {4, 3, 0}, + {6, 2, 0}, + {12, 1, 0}, + {1, 1, 20}, + {2, 2, 20}, + {3, 2, 20}, + {2, 3, 20}, + {1, 12, 20}, + {2, 6, 20}, + {3, 4, 20}, + {4, 3, 20}, + {6, 2, 20}, + {12, 1, 20}, + } { + n := test.n + m := test.m + lda := test.lda + if lda == 0 { + lda = test.n + } + a := make([]float64, m*lda) + for i := range a { + a[i] = rnd.Float64() + } + aCopy := make([]float64, len(a)) + k := min(m, n) + tau := make([]float64, k) + for i := range tau { + tau[i] = rnd.Float64() + } + work := make([]float64, m) + for i := range work { + work[i] = rnd.Float64() + } + copy(aCopy, a) + impl.Dgerq2(m, n, a, lda, tau, work) + + // Test that the RQ factorization has completed successfully. Compute + // Q based on the vectors. + q := constructQ("RQ", m, n, a, lda, tau) + + // Check that Q is orthogonal. + if !isOrthogonal(q) { + t.Errorf("Case %v, Q not orthogonal", c) + } + // Check that A = R * Q + r := blas64.General{ + Rows: m, + Cols: n, + Stride: n, + Data: make([]float64, m*n), + } + for i := 0; i < m; i++ { + off := m - n + for j := max(0, i-off); j < n; j++ { + r.Data[i*r.Stride+j] = a[i*lda+j] + } + } + + got := blas64.General{ + Rows: m, + Cols: n, + Stride: lda, + Data: make([]float64, m*lda), + } + blas64.Gemm(blas.NoTrans, blas.NoTrans, 1, r, q, 0, got) + want := blas64.General{ + Rows: m, + Cols: n, + Stride: lda, + Data: aCopy, + } + if !equalApproxGeneral(got, want, 1e-14) { + t.Errorf("Case %d, R*Q != a\ngot: %+v\nwant:%+v", c, got, want) + } + } +} diff --git a/vendor/gonum.org/v1/gonum/lapack/testlapack/dgerqf.go b/vendor/gonum.org/v1/gonum/lapack/testlapack/dgerqf.go new file mode 100644 index 0000000..4bd9df8 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/lapack/testlapack/dgerqf.go @@ -0,0 +1,133 @@ +// Copyright ©2015 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package testlapack + +import ( + "testing" + + "golang.org/x/exp/rand" + + "gonum.org/v1/gonum/blas" + "gonum.org/v1/gonum/blas/blas64" +) + +type Dgerqfer interface { + Dgerqf(m, n int, a []float64, lda int, tau, work []float64, lwork int) +} + +func DgerqfTest(t *testing.T, impl Dgerqfer) { + const tol = 1e-13 + + rnd := rand.New(rand.NewSource(1)) + for c, test := range []struct { + m, n, lda int + }{ + {1, 1, 0}, + {2, 2, 0}, + {3, 2, 0}, + {2, 3, 0}, + {1, 12, 0}, + {2, 6, 0}, + {3, 4, 0}, + {4, 3, 0}, + {6, 2, 0}, + {12, 1, 0}, + {200, 180, 0}, + {180, 200, 0}, + {200, 200, 0}, + {1, 1, 20}, + {2, 2, 20}, + {3, 2, 20}, + {2, 3, 20}, + {1, 12, 20}, + {2, 6, 20}, + {3, 4, 20}, + {4, 3, 20}, + {6, 2, 20}, + {12, 1, 20}, + {200, 180, 220}, + {180, 200, 220}, + {200, 200, 220}, + } { + n := test.n + m := test.m + lda := test.lda + if lda == 0 { + lda = test.n + } + a := make([]float64, m*lda) + for i := range a { + a[i] = rnd.Float64() + } + aCopy := make([]float64, len(a)) + copy(aCopy, a) + k := min(m, n) + tau := make([]float64, k) + for i := range tau { + tau[i] = rnd.Float64() + } + work := []float64{0} + impl.Dgerqf(m, n, a, lda, tau, work, -1) + lwkopt := int(work[0]) + for _, wk := range []struct { + name string + length int + }{ + {name: "short", length: m}, + {name: "medium", length: lwkopt - 1}, + {name: "long", length: lwkopt}, + } { + if wk.length < max(1, m) { + continue + } + lwork := wk.length + work = make([]float64, lwork) + for i := range work { + work[i] = rnd.Float64() + } + copy(a, aCopy) + impl.Dgerqf(m, n, a, lda, tau, work, lwork) + + // Test that the RQ factorization has completed successfully. Compute + // Q based on the vectors. + q := constructQ("RQ", m, n, a, lda, tau) + + // Check that Q is orthogonal. + if !isOrthogonal(q) { + t.Errorf("Case %v, Q not orthogonal", c) + } + // Check that A = R * Q + r := blas64.General{ + Rows: m, + Cols: n, + Stride: n, + Data: make([]float64, m*n), + } + for i := 0; i < m; i++ { + off := m - n + for j := max(0, i-off); j < n; j++ { + r.Data[i*r.Stride+j] = a[i*lda+j] + } + } + + got := blas64.General{ + Rows: m, + Cols: n, + Stride: lda, + Data: make([]float64, m*lda), + } + blas64.Gemm(blas.NoTrans, blas.NoTrans, 1, r, q, 0, got) + want := blas64.General{ + Rows: m, + Cols: n, + Stride: lda, + Data: aCopy, + } + if !equalApproxGeneral(got, want, tol) { + t.Errorf("Case %d, R*Q != a %s\ngot: %+v\nwant:%+v", c, wk.name, got, want) + } + } + } +} diff --git a/vendor/gonum.org/v1/gonum/lapack/testlapack/dgesvd.go b/vendor/gonum.org/v1/gonum/lapack/testlapack/dgesvd.go new file mode 100644 index 0000000..e9a52a0 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/lapack/testlapack/dgesvd.go @@ -0,0 +1,371 @@ +// Copyright ©2015 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package testlapack + +import ( + "fmt" + "math" + "sort" + "testing" + + "golang.org/x/exp/rand" + + "gonum.org/v1/gonum/blas" + "gonum.org/v1/gonum/blas/blas64" + "gonum.org/v1/gonum/floats" + "gonum.org/v1/gonum/lapack" +) + +type Dgesvder interface { + Dgesvd(jobU, jobVT lapack.SVDJob, m, n int, a []float64, lda int, s, u []float64, ldu int, vt []float64, ldvt int, work []float64, lwork int) (ok bool) +} + +func DgesvdTest(t *testing.T, impl Dgesvder, tol float64) { + for _, m := range []int{0, 1, 2, 3, 4, 5, 10, 150, 300} { + for _, n := range []int{0, 1, 2, 3, 4, 5, 10, 150} { + for _, mtype := range []int{1, 2, 3, 4, 5} { + dgesvdTest(t, impl, m, n, mtype, tol) + } + } + } +} + +// dgesvdTest tests a Dgesvd implementation on an m×n matrix A generated +// according to mtype as: +// - the zero matrix if mtype == 1, +// - the identity matrix if mtype == 2, +// - a random matrix with a given condition number and singular values if mtype == 3, 4, or 5. +// It first computes the full SVD A = U*Sigma*V^T and checks that +// - U has orthonormal columns, and V^T has orthonormal rows, +// - U*Sigma*V^T multiply back to A, +// - the singular values are non-negative and sorted in decreasing order. +// Then all combinations of partial SVD results are computed and checked whether +// they match the full SVD result. +func dgesvdTest(t *testing.T, impl Dgesvder, m, n, mtype int, tol float64) { + rnd := rand.New(rand.NewSource(1)) + + // Use a fixed leading dimension to reduce testing time. + lda := n + 3 + ldu := m + 5 + ldvt := n + 7 + + minmn := min(m, n) + + // Allocate A and fill it with random values. The in-range elements will + // be overwritten below according to mtype. + a := make([]float64, m*lda) + for i := range a { + a[i] = rnd.NormFloat64() + } + + var aNorm float64 + switch mtype { + default: + panic("unknown test matrix type") + case 1: + // Zero matrix. + for i := 0; i < m; i++ { + for j := 0; j < n; j++ { + a[i*lda+j] = 0 + } + } + aNorm = 0 + case 2: + // Identity matrix. + for i := 0; i < m; i++ { + for j := 0; j < n; j++ { + if i == j { + a[i*lda+i] = 1 + } else { + a[i*lda+j] = 0 + } + } + } + aNorm = 1 + case 3, 4, 5: + // Scaled random matrix. + // Generate singular values. + s := make([]float64, minmn) + Dlatm1(s, + 4, // s[i] = 1 - i*(1-1/cond)/(minmn-1) + float64(max(1, minmn)), // where cond = max(1,minmn) + false, // signs of s[i] are not randomly flipped + 1, rnd) // random numbers are drawn uniformly from [0,1) + // Decide scale factor for the singular values based on the matrix type. + ulp := dlamchP + unfl := dlamchS + ovfl := 1 / unfl + aNorm = 1 + if mtype == 4 { + aNorm = unfl / ulp + } + if mtype == 5 { + aNorm = ovfl * ulp + } + // Scale singular values so that the maximum singular value is + // equal to aNorm (we know that the singular values are + // generated above to be spread linearly between 1/cond and 1). + floats.Scale(aNorm, s) + // Generate A by multiplying S by random orthogonal matrices + // from left and right. + Dlagge(m, n, max(0, m-1), max(0, n-1), s, a, lda, rnd, make([]float64, m+n)) + } + aCopy := make([]float64, len(a)) + copy(aCopy, a) + + for _, wl := range []worklen{minimumWork, mediumWork, optimumWork} { + // Restore A because Dgesvd overwrites it. + copy(a, aCopy) + + // Allocate slices that will be used below to store the results of full + // SVD and fill them. + uAll := make([]float64, m*ldu) + for i := range uAll { + uAll[i] = rnd.NormFloat64() + } + vtAll := make([]float64, n*ldvt) + for i := range vtAll { + vtAll[i] = rnd.NormFloat64() + } + sAll := make([]float64, min(m, n)) + for i := range sAll { + sAll[i] = math.NaN() + } + + prefix := fmt.Sprintf("m=%v,n=%v,work=%v,mtype=%v", m, n, wl, mtype) + + // Determine workspace size based on wl. + minwork := max(1, max(5*min(m, n), 3*min(m, n)+max(m, n))) + var lwork int + switch wl { + case minimumWork: + lwork = minwork + case mediumWork: + work := make([]float64, 1) + impl.Dgesvd(lapack.SVDAll, lapack.SVDAll, m, n, a, lda, sAll, uAll, ldu, vtAll, ldvt, work, -1) + lwork = (int(work[0]) + minwork) / 2 + case optimumWork: + work := make([]float64, 1) + impl.Dgesvd(lapack.SVDAll, lapack.SVDAll, m, n, a, lda, sAll, uAll, ldu, vtAll, ldvt, work, -1) + lwork = int(work[0]) + } + work := make([]float64, max(1, lwork)) + for i := range work { + work[i] = math.NaN() + } + + // Compute the full SVD which will be used later for checking the partial results. + ok := impl.Dgesvd(lapack.SVDAll, lapack.SVDAll, m, n, a, lda, sAll, uAll, ldu, vtAll, ldvt, work, len(work)) + if !ok { + t.Fatalf("Case %v: unexpected failure in full SVD", prefix) + } + + // Check that uAll, sAll, and vtAll multiply back to A by computing a residual + // |A - U*S*VT| / (n*aNorm) + if resid := svdFullResidual(m, n, aNorm, aCopy, lda, uAll, ldu, sAll, vtAll, ldvt); resid > tol { + t.Errorf("Case %v: original matrix not recovered for full SVD, |A - U*D*VT|=%v", prefix, resid) + } + if minmn > 0 { + // Check that uAll is orthogonal. + if !hasOrthonormalColumns(blas64.General{Rows: m, Cols: m, Data: uAll, Stride: ldu}) { + t.Errorf("Case %v: UAll is not orthogonal", prefix) + } + // Check that vtAll is orthogonal. + if !hasOrthonormalRows(blas64.General{Rows: n, Cols: n, Data: vtAll, Stride: ldvt}) { + t.Errorf("Case %v: VTAll is not orthogonal", prefix) + } + } + // Check that singular values are decreasing. + if !sort.IsSorted(sort.Reverse(sort.Float64Slice(sAll))) { + t.Errorf("Case %v: singular values from full SVD are not decreasing", prefix) + } + // Check that singular values are non-negative. + if minmn > 0 && floats.Min(sAll) < 0 { + t.Errorf("Case %v: some singular values from full SVD are negative", prefix) + } + + // Do partial SVD and compare the results to sAll, uAll, and vtAll. + for _, jobU := range []lapack.SVDJob{lapack.SVDAll, lapack.SVDStore, lapack.SVDOverwrite, lapack.SVDNone} { + for _, jobVT := range []lapack.SVDJob{lapack.SVDAll, lapack.SVDStore, lapack.SVDOverwrite, lapack.SVDNone} { + if jobU == lapack.SVDOverwrite || jobVT == lapack.SVDOverwrite { + // Not implemented. + continue + } + if jobU == lapack.SVDAll && jobVT == lapack.SVDAll { + // Already checked above. + continue + } + + prefix := prefix + ",job=" + svdJobString(jobU) + "U-" + svdJobString(jobVT) + "VT" + + // Restore A to its original values. + copy(a, aCopy) + + // Allocate slices for the results of partial SVD and fill them. + u := make([]float64, m*ldu) + for i := range u { + u[i] = rnd.NormFloat64() + } + vt := make([]float64, n*ldvt) + for i := range vt { + vt[i] = rnd.NormFloat64() + } + s := make([]float64, min(m, n)) + for i := range s { + s[i] = math.NaN() + } + + for i := range work { + work[i] = math.NaN() + } + + ok := impl.Dgesvd(jobU, jobVT, m, n, a, lda, s, u, ldu, vt, ldvt, work, len(work)) + if !ok { + t.Fatalf("Case %v: unexpected failure in partial Dgesvd", prefix) + } + + if minmn == 0 { + // No panic and the result is ok, there is + // nothing else to check. + continue + } + + // Check that U has orthogonal columns and that it matches UAll. + switch jobU { + case lapack.SVDStore: + if !hasOrthonormalColumns(blas64.General{Rows: m, Cols: minmn, Data: u, Stride: ldu}) { + t.Errorf("Case %v: columns of U are not orthogonal", prefix) + } + if res := svdPartialUResidual(m, minmn, u, uAll, ldu); res > tol { + t.Errorf("Case %v: columns of U do not match UAll", prefix) + } + case lapack.SVDAll: + if !hasOrthonormalColumns(blas64.General{Rows: m, Cols: m, Data: u, Stride: ldu}) { + t.Errorf("Case %v: columns of U are not orthogonal", prefix) + } + if res := svdPartialUResidual(m, m, u, uAll, ldu); res > tol { + t.Errorf("Case %v: columns of U do not match UAll", prefix) + } + } + // Check that VT has orthogonal rows and that it matches VTAll. + switch jobVT { + case lapack.SVDStore: + if !hasOrthonormalRows(blas64.General{Rows: minmn, Cols: n, Data: vtAll, Stride: ldvt}) { + t.Errorf("Case %v: rows of VT are not orthogonal", prefix) + } + if res := svdPartialVTResidual(minmn, n, vt, vtAll, ldvt); res > tol { + t.Errorf("Case %v: rows of VT do not match VTAll", prefix) + } + case lapack.SVDAll: + if !hasOrthonormalRows(blas64.General{Rows: n, Cols: n, Data: vtAll, Stride: ldvt}) { + t.Errorf("Case %v: rows of VT are not orthogonal", prefix) + } + if res := svdPartialVTResidual(n, n, vt, vtAll, ldvt); res > tol { + t.Errorf("Case %v: rows of VT do not match VTAll", prefix) + } + } + // Check that singular values are decreasing. + if !sort.IsSorted(sort.Reverse(sort.Float64Slice(s))) { + t.Errorf("Case %v: singular values from full SVD are not decreasing", prefix) + } + // Check that singular values are non-negative. + if floats.Min(s) < 0 { + t.Errorf("Case %v: some singular values from full SVD are negative", prefix) + } + if !floats.EqualApprox(s, sAll, tol/10) { + t.Errorf("Case %v: singular values differ between full and partial SVD\n%v\n%v", prefix, s, sAll) + } + } + } + } +} + +// svdFullResidual returns +// |A - U*D*VT| / (n * aNorm) +// where U, D, and VT are as computed by Dgesvd with jobU = jobVT = lapack.SVDAll. +func svdFullResidual(m, n int, aNorm float64, a []float64, lda int, u []float64, ldu int, d []float64, vt []float64, ldvt int) float64 { + // The implementation follows TESTING/dbdt01.f from the reference. + + minmn := min(m, n) + if minmn == 0 { + return 0 + } + + // j-th column of A - U*D*VT. + aMinusUDVT := make([]float64, m) + // D times the j-th column of VT. + dvt := make([]float64, minmn) + // Compute the residual |A - U*D*VT| one column at a time. + var resid float64 + for j := 0; j < n; j++ { + // Copy j-th column of A to aj. + blas64.Copy(blas64.Vector{N: m, Data: a[j:], Inc: lda}, blas64.Vector{N: m, Data: aMinusUDVT, Inc: 1}) + // Multiply D times j-th column of VT. + for i := 0; i < minmn; i++ { + dvt[i] = d[i] * vt[i*ldvt+j] + } + // Compute the j-th column of A - U*D*VT. + blas64.Gemv(blas.NoTrans, + -1, blas64.General{Rows: m, Cols: minmn, Data: u, Stride: ldu}, blas64.Vector{N: minmn, Data: dvt, Inc: 1}, + 1, blas64.Vector{N: m, Data: aMinusUDVT, Inc: 1}) + resid = math.Max(resid, blas64.Asum(blas64.Vector{N: m, Data: aMinusUDVT, Inc: 1})) + } + if aNorm == 0 { + if resid != 0 { + // Original matrix A is zero but the residual is non-zero, + // return infinity. + return math.Inf(1) + } + // Original matrix A is zero, residual is zero, return 0. + return 0 + } + // Original matrix A is non-zero. + if aNorm >= resid { + resid = resid / aNorm / float64(n) + } else { + if aNorm < 1 { + resid = math.Min(resid, float64(n)*aNorm) / aNorm / float64(n) + } else { + resid = math.Min(resid/aNorm, float64(n)) / float64(n) + } + } + return resid +} + +// svdPartialUResidual compares U and URef to see if their columns span the same +// spaces. It returns the maximum over columns of +// |URef(i) - S*U(i)| +// where URef(i) and U(i) are the i-th columns of URef and U, respectively, and +// S is ±1 chosen to minimize the expression. +func svdPartialUResidual(m, n int, u, uRef []float64, ldu int) float64 { + var res float64 + for j := 0; j < n; j++ { + imax := blas64.Iamax(blas64.Vector{N: m, Data: uRef[j:], Inc: ldu}) + s := math.Copysign(1, uRef[imax*ldu+j]) * math.Copysign(1, u[imax*ldu+j]) + for i := 0; i < m; i++ { + diff := math.Abs(uRef[i*ldu+j] - s*u[i*ldu+j]) + res = math.Max(res, diff) + } + } + return res +} + +// svdPartialVTResidual compares VT and VTRef to see if their rows span the same +// spaces. It returns the maximum over rows of +// |VTRef(i) - S*VT(i)| +// where VTRef(i) and VT(i) are the i-th columns of VTRef and VT, respectively, and +// S is ±1 chosen to minimize the expression. +func svdPartialVTResidual(m, n int, vt, vtRef []float64, ldvt int) float64 { + var res float64 + for i := 0; i < m; i++ { + jmax := blas64.Iamax(blas64.Vector{N: n, Data: vtRef[i*ldvt:], Inc: 1}) + s := math.Copysign(1, vtRef[i*ldvt+jmax]) * math.Copysign(1, vt[i*ldvt+jmax]) + for j := 0; j < n; j++ { + diff := math.Abs(vtRef[i*ldvt+j] - s*vt[i*ldvt+j]) + res = math.Max(res, diff) + } + } + return res +} diff --git a/vendor/gonum.org/v1/gonum/lapack/testlapack/dgetf2.go b/vendor/gonum.org/v1/gonum/lapack/testlapack/dgetf2.go new file mode 100644 index 0000000..79a25b7 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/lapack/testlapack/dgetf2.go @@ -0,0 +1,197 @@ +// Copyright ©2015 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package testlapack + +import ( + "testing" + + "golang.org/x/exp/rand" + + "gonum.org/v1/gonum/blas" + "gonum.org/v1/gonum/blas/blas64" + "gonum.org/v1/gonum/floats" +) + +type Dgetf2er interface { + Dgetf2(m, n int, a []float64, lda int, ipiv []int) bool +} + +func Dgetf2Test(t *testing.T, impl Dgetf2er) { + rnd := rand.New(rand.NewSource(1)) + for _, test := range []struct { + m, n, lda int + }{ + {10, 10, 0}, + {10, 5, 0}, + {10, 5, 0}, + + {10, 10, 20}, + {5, 10, 20}, + {10, 5, 20}, + } { + m := test.m + n := test.n + lda := test.lda + if lda == 0 { + lda = n + } + a := make([]float64, m*lda) + for i := range a { + a[i] = rnd.Float64() + } + aCopy := make([]float64, len(a)) + copy(aCopy, a) + + mn := min(m, n) + ipiv := make([]int, mn) + for i := range ipiv { + ipiv[i] = rnd.Int() + } + ok := impl.Dgetf2(m, n, a, lda, ipiv) + checkPLU(t, ok, m, n, lda, ipiv, a, aCopy, 1e-14, true) + } + + // Test with singular matrices (random matrices are almost surely non-singular). + for _, test := range []struct { + m, n, lda int + a []float64 + }{ + { + m: 2, + n: 2, + lda: 2, + a: []float64{ + 1, 0, + 0, 0, + }, + }, + { + m: 2, + n: 2, + lda: 2, + a: []float64{ + 1, 5, + 2, 10, + }, + }, + { + m: 3, + n: 3, + lda: 3, + // row 3 = row1 + 2 * row2 + a: []float64{ + 1, 5, 7, + 2, 10, -3, + 5, 25, 1, + }, + }, + { + m: 3, + n: 4, + lda: 4, + // row 3 = row1 + 2 * row2 + a: []float64{ + 1, 5, 7, 9, + 2, 10, -3, 11, + 5, 25, 1, 31, + }, + }, + } { + if impl.Dgetf2(test.m, test.n, test.a, test.lda, make([]int, min(test.m, test.n))) { + t.Log("Returned ok with singular matrix.") + } + } +} + +// checkPLU checks that the PLU factorization contained in factorize matches +// the original matrix contained in original. +func checkPLU(t *testing.T, ok bool, m, n, lda int, ipiv []int, factorized, original []float64, tol float64, print bool) { + var hasZeroDiagonal bool + for i := 0; i < min(m, n); i++ { + if factorized[i*lda+i] == 0 { + hasZeroDiagonal = true + break + } + } + if hasZeroDiagonal && ok { + t.Error("Has a zero diagonal but returned ok") + } + if !hasZeroDiagonal && !ok { + t.Error("Non-zero diagonal but returned !ok") + } + + // Check that the LU decomposition is correct. + mn := min(m, n) + l := make([]float64, m*mn) + ldl := mn + u := make([]float64, mn*n) + ldu := n + for i := 0; i < m; i++ { + for j := 0; j < n; j++ { + v := factorized[i*lda+j] + switch { + case i == j: + l[i*ldl+i] = 1 + u[i*ldu+i] = v + case i > j: + l[i*ldl+j] = v + case i < j: + u[i*ldu+j] = v + } + } + } + + LU := blas64.General{ + Rows: m, + Cols: n, + Stride: n, + Data: make([]float64, m*n), + } + U := blas64.General{ + Rows: mn, + Cols: n, + Stride: ldu, + Data: u, + } + L := blas64.General{ + Rows: m, + Cols: mn, + Stride: ldl, + Data: l, + } + blas64.Gemm(blas.NoTrans, blas.NoTrans, 1, L, U, 0, LU) + + p := make([]float64, m*m) + ldp := m + for i := 0; i < m; i++ { + p[i*ldp+i] = 1 + } + for i := len(ipiv) - 1; i >= 0; i-- { + v := ipiv[i] + blas64.Swap(blas64.Vector{N: m, Inc: 1, Data: p[i*ldp:]}, + blas64.Vector{N: m, Inc: 1, Data: p[v*ldp:]}) + } + P := blas64.General{ + Rows: m, + Cols: m, + Stride: m, + Data: p, + } + aComp := blas64.General{ + Rows: m, + Cols: n, + Stride: lda, + Data: make([]float64, m*lda), + } + copy(aComp.Data, factorized) + blas64.Gemm(blas.NoTrans, blas.NoTrans, 1, P, LU, 0, aComp) + if !floats.EqualApprox(aComp.Data, original, tol) { + if print { + t.Errorf("PLU multiplication does not match original matrix.\nWant: %v\nGot: %v", original, aComp.Data) + return + } + t.Error("PLU multiplication does not match original matrix.") + } +} diff --git a/vendor/gonum.org/v1/gonum/lapack/testlapack/dgetrf.go b/vendor/gonum.org/v1/gonum/lapack/testlapack/dgetrf.go new file mode 100644 index 0000000..42dc713 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/lapack/testlapack/dgetrf.go @@ -0,0 +1,66 @@ +// Copyright ©2015 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package testlapack + +import ( + "testing" + + "golang.org/x/exp/rand" +) + +type Dgetrfer interface { + Dgetrf(m, n int, a []float64, lda int, ipiv []int) bool +} + +func DgetrfTest(t *testing.T, impl Dgetrfer) { + rnd := rand.New(rand.NewSource(1)) + for _, test := range []struct { + m, n, lda int + }{ + {10, 5, 0}, + {5, 10, 0}, + {10, 10, 0}, + {300, 5, 0}, + {3, 500, 0}, + {4, 5, 0}, + {300, 200, 0}, + {204, 300, 0}, + {1, 3000, 0}, + {3000, 1, 0}, + {10, 5, 20}, + {5, 10, 20}, + {10, 10, 20}, + {300, 5, 400}, + {3, 500, 600}, + {200, 200, 300}, + {300, 200, 300}, + {204, 300, 400}, + {1, 3000, 4000}, + {3000, 1, 4000}, + } { + m := test.m + n := test.n + lda := test.lda + if lda == 0 { + lda = n + } + a := make([]float64, m*lda) + for i := range a { + a[i] = rnd.Float64() + } + mn := min(m, n) + ipiv := make([]int, mn) + for i := range ipiv { + ipiv[i] = rnd.Int() + } + + // Cannot compare the outputs of Dgetrf and Dgetf2 because the pivoting may + // happen differently. Instead check that the LPQ factorization is correct. + aCopy := make([]float64, len(a)) + copy(aCopy, a) + ok := impl.Dgetrf(m, n, a, lda, ipiv) + checkPLU(t, ok, m, n, lda, ipiv, a, aCopy, 1e-10, false) + } +} diff --git a/vendor/gonum.org/v1/gonum/lapack/testlapack/dgetri.go b/vendor/gonum.org/v1/gonum/lapack/testlapack/dgetri.go new file mode 100644 index 0000000..0c9bc28 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/lapack/testlapack/dgetri.go @@ -0,0 +1,95 @@ +// Copyright ©2015 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package testlapack + +import ( + "testing" + + "golang.org/x/exp/rand" + + "gonum.org/v1/gonum/blas" + "gonum.org/v1/gonum/blas/blas64" +) + +type Dgetrier interface { + Dgetrfer + Dgetri(n int, a []float64, lda int, ipiv []int, work []float64, lwork int) bool +} + +func DgetriTest(t *testing.T, impl Dgetrier) { + const tol = 1e-13 + rnd := rand.New(rand.NewSource(1)) + bi := blas64.Implementation() + for _, test := range []struct { + n, lda int + }{ + {5, 0}, + {5, 8}, + {45, 0}, + {45, 50}, + {63, 70}, + {64, 70}, + {65, 0}, + {65, 70}, + {66, 70}, + {150, 0}, + {150, 250}, + } { + n := test.n + lda := test.lda + if lda == 0 { + lda = n + } + // Generate a random well conditioned matrix + perm := rnd.Perm(n) + a := make([]float64, n*lda) + for i := 0; i < n; i++ { + a[i*lda+perm[i]] = 1 + } + for i := range a { + a[i] += 0.01 * rnd.Float64() + } + aCopy := make([]float64, len(a)) + copy(aCopy, a) + ipiv := make([]int, n) + // Compute LU decomposition. + impl.Dgetrf(n, n, a, lda, ipiv) + // Test with various workspace sizes. + for _, wl := range []worklen{minimumWork, mediumWork, optimumWork} { + ainv := make([]float64, len(a)) + copy(ainv, a) + + var lwork int + switch wl { + case minimumWork: + lwork = max(1, n) + case mediumWork: + work := make([]float64, 1) + impl.Dgetri(n, ainv, lda, ipiv, work, -1) + lwork = max(int(work[0])-2*n, n) + case optimumWork: + work := make([]float64, 1) + impl.Dgetri(n, ainv, lda, ipiv, work, -1) + lwork = int(work[0]) + } + work := make([]float64, lwork) + + // Compute inverse. + ok := impl.Dgetri(n, ainv, lda, ipiv, work, lwork) + if !ok { + t.Errorf("Unexpected singular matrix.") + } + + // Check that A(inv) * A = I. + ans := make([]float64, len(ainv)) + bi.Dgemm(blas.NoTrans, blas.NoTrans, n, n, n, 1, aCopy, lda, ainv, lda, 0, ans, lda) + // The tolerance is so high because computing matrix inverses is very unstable. + dist := distFromIdentity(n, ans, lda) + if dist > tol { + t.Errorf("|Inv(A) * A - I|_inf = %v is too large. n = %v, lda = %v", dist, n, lda) + } + } + } +} diff --git a/vendor/gonum.org/v1/gonum/lapack/testlapack/dgetrs.go b/vendor/gonum.org/v1/gonum/lapack/testlapack/dgetrs.go new file mode 100644 index 0000000..dab0334 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/lapack/testlapack/dgetrs.go @@ -0,0 +1,114 @@ +// Copyright ©2015 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package testlapack + +import ( + "testing" + + "golang.org/x/exp/rand" + + "gonum.org/v1/gonum/blas" + "gonum.org/v1/gonum/blas/blas64" + "gonum.org/v1/gonum/floats" +) + +type Dgetrser interface { + Dgetrfer + Dgetrs(trans blas.Transpose, n, nrhs int, a []float64, lda int, ipiv []int, b []float64, ldb int) +} + +func DgetrsTest(t *testing.T, impl Dgetrser) { + rnd := rand.New(rand.NewSource(1)) + // TODO(btracey): Put more thought into creating more regularized matrices + // and what correct tolerances should be. Consider also seeding the random + // number in this test to make it more robust to code changes in other + // parts of the suite. + for _, trans := range []blas.Transpose{blas.NoTrans, blas.Trans} { + for _, test := range []struct { + n, nrhs, lda, ldb int + tol float64 + }{ + {3, 3, 0, 0, 1e-12}, + {3, 5, 0, 0, 1e-12}, + {5, 3, 0, 0, 1e-12}, + + {3, 3, 8, 10, 1e-12}, + {3, 5, 8, 10, 1e-12}, + {5, 3, 8, 10, 1e-12}, + + {300, 300, 0, 0, 1e-8}, + {300, 500, 0, 0, 1e-8}, + {500, 300, 0, 0, 1e-6}, + + {300, 300, 700, 600, 1e-8}, + {300, 500, 700, 600, 1e-8}, + {500, 300, 700, 600, 1e-6}, + } { + n := test.n + nrhs := test.nrhs + lda := test.lda + if lda == 0 { + lda = n + } + ldb := test.ldb + if ldb == 0 { + ldb = nrhs + } + a := make([]float64, n*lda) + for i := range a { + a[i] = rnd.Float64() + } + b := make([]float64, n*ldb) + for i := range b { + b[i] = rnd.Float64() + } + aCopy := make([]float64, len(a)) + copy(aCopy, a) + bCopy := make([]float64, len(b)) + copy(bCopy, b) + + ipiv := make([]int, n) + for i := range ipiv { + ipiv[i] = rnd.Int() + } + + // Compute the LU factorization. + impl.Dgetrf(n, n, a, lda, ipiv) + // Solve the system of equations given the result. + impl.Dgetrs(trans, n, nrhs, a, lda, ipiv, b, ldb) + + // Check that the system of equations holds. + A := blas64.General{ + Rows: n, + Cols: n, + Stride: lda, + Data: aCopy, + } + B := blas64.General{ + Rows: n, + Cols: nrhs, + Stride: ldb, + Data: bCopy, + } + X := blas64.General{ + Rows: n, + Cols: nrhs, + Stride: ldb, + Data: b, + } + tmp := blas64.General{ + Rows: n, + Cols: nrhs, + Stride: ldb, + Data: make([]float64, n*ldb), + } + copy(tmp.Data, bCopy) + blas64.Gemm(trans, blas.NoTrans, 1, A, X, 0, B) + if !floats.EqualApprox(tmp.Data, bCopy, test.tol) { + t.Errorf("Linear solve mismatch. trans = %v, n = %v, nrhs = %v, lda = %v, ldb = %v", trans, n, nrhs, lda, ldb) + } + } + } +} diff --git a/vendor/gonum.org/v1/gonum/lapack/testlapack/dggsvd3.go b/vendor/gonum.org/v1/gonum/lapack/testlapack/dggsvd3.go new file mode 100644 index 0000000..cd8ecb0 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/lapack/testlapack/dggsvd3.go @@ -0,0 +1,174 @@ +// Copyright ©2017 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package testlapack + +import ( + "testing" + + "golang.org/x/exp/rand" + + "gonum.org/v1/gonum/blas" + "gonum.org/v1/gonum/blas/blas64" + "gonum.org/v1/gonum/floats" + "gonum.org/v1/gonum/lapack" +) + +type Dggsvd3er interface { + Dggsvd3(jobU, jobV, jobQ lapack.GSVDJob, m, n, p int, a []float64, lda int, b []float64, ldb int, alpha, beta, u []float64, ldu int, v []float64, ldv int, q []float64, ldq int, work []float64, lwork int, iwork []int) (k, l int, ok bool) +} + +func Dggsvd3Test(t *testing.T, impl Dggsvd3er) { + rnd := rand.New(rand.NewSource(1)) + for cas, test := range []struct { + m, p, n, lda, ldb, ldu, ldv, ldq int + + ok bool + }{ + {m: 3, p: 3, n: 5, lda: 0, ldb: 0, ldu: 0, ldv: 0, ldq: 0, ok: true}, + {m: 5, p: 5, n: 5, lda: 0, ldb: 0, ldu: 0, ldv: 0, ldq: 0, ok: true}, + {m: 5, p: 5, n: 5, lda: 0, ldb: 0, ldu: 0, ldv: 0, ldq: 0, ok: true}, + {m: 5, p: 5, n: 10, lda: 0, ldb: 0, ldu: 0, ldv: 0, ldq: 0, ok: true}, + {m: 5, p: 5, n: 10, lda: 0, ldb: 0, ldu: 0, ldv: 0, ldq: 0, ok: true}, + {m: 5, p: 5, n: 10, lda: 0, ldb: 0, ldu: 0, ldv: 0, ldq: 0, ok: true}, + {m: 10, p: 5, n: 5, lda: 0, ldb: 0, ldu: 0, ldv: 0, ldq: 0, ok: true}, + {m: 10, p: 5, n: 5, lda: 0, ldb: 0, ldu: 0, ldv: 0, ldq: 0, ok: true}, + {m: 10, p: 10, n: 10, lda: 0, ldb: 0, ldu: 0, ldv: 0, ldq: 0, ok: true}, + {m: 10, p: 10, n: 10, lda: 0, ldb: 0, ldu: 0, ldv: 0, ldq: 0, ok: true}, + {m: 5, p: 5, n: 5, lda: 10, ldb: 10, ldu: 10, ldv: 10, ldq: 10, ok: true}, + {m: 5, p: 5, n: 5, lda: 10, ldb: 10, ldu: 10, ldv: 10, ldq: 10, ok: true}, + {m: 5, p: 5, n: 10, lda: 20, ldb: 20, ldu: 10, ldv: 10, ldq: 20, ok: true}, + {m: 5, p: 5, n: 10, lda: 20, ldb: 20, ldu: 10, ldv: 10, ldq: 20, ok: true}, + {m: 5, p: 5, n: 10, lda: 20, ldb: 20, ldu: 10, ldv: 10, ldq: 20, ok: true}, + {m: 10, p: 5, n: 5, lda: 10, ldb: 10, ldu: 20, ldv: 10, ldq: 10, ok: true}, + {m: 10, p: 5, n: 5, lda: 10, ldb: 10, ldu: 20, ldv: 10, ldq: 10, ok: true}, + {m: 10, p: 10, n: 10, lda: 20, ldb: 20, ldu: 20, ldv: 20, ldq: 20, ok: true}, + {m: 10, p: 10, n: 10, lda: 20, ldb: 20, ldu: 20, ldv: 20, ldq: 20, ok: true}, + } { + m := test.m + p := test.p + n := test.n + lda := test.lda + if lda == 0 { + lda = n + } + ldb := test.ldb + if ldb == 0 { + ldb = n + } + ldu := test.ldu + if ldu == 0 { + ldu = m + } + ldv := test.ldv + if ldv == 0 { + ldv = p + } + ldq := test.ldq + if ldq == 0 { + ldq = n + } + + a := randomGeneral(m, n, lda, rnd) + aCopy := cloneGeneral(a) + b := randomGeneral(p, n, ldb, rnd) + bCopy := cloneGeneral(b) + + alpha := make([]float64, n) + beta := make([]float64, n) + + u := nanGeneral(m, m, ldu) + v := nanGeneral(p, p, ldv) + q := nanGeneral(n, n, ldq) + + iwork := make([]int, n) + + work := []float64{0} + impl.Dggsvd3(lapack.GSVDU, lapack.GSVDV, lapack.GSVDQ, + m, n, p, + a.Data, a.Stride, + b.Data, b.Stride, + alpha, beta, + u.Data, u.Stride, + v.Data, v.Stride, + q.Data, q.Stride, + work, -1, iwork) + + lwork := int(work[0]) + work = make([]float64, lwork) + + k, l, ok := impl.Dggsvd3(lapack.GSVDU, lapack.GSVDV, lapack.GSVDQ, + m, n, p, + a.Data, a.Stride, + b.Data, b.Stride, + alpha, beta, + u.Data, u.Stride, + v.Data, v.Stride, + q.Data, q.Stride, + work, lwork, iwork) + + if !ok { + if test.ok { + t.Errorf("test %d unexpectedly did not converge", cas) + } + continue + } + + // Check orthogonality of U, V and Q. + if !isOrthogonal(u) { + t.Errorf("test %d: U is not orthogonal\n%+v", cas, u) + } + if !isOrthogonal(v) { + t.Errorf("test %d: V is not orthogonal\n%+v", cas, v) + } + if !isOrthogonal(q) { + t.Errorf("test %d: Q is not orthogonal\n%+v", cas, q) + } + + // Check C^2 + S^2 = I. + var elements []float64 + if m-k-l >= 0 { + elements = alpha[k : k+l] + } else { + elements = alpha[k:m] + } + for i := range elements { + i += k + d := alpha[i]*alpha[i] + beta[i]*beta[i] + if !floats.EqualWithinAbsOrRel(d, 1, 1e-14, 1e-14) { + t.Errorf("test %d: alpha_%d^2 + beta_%d^2 != 1: got: %v", cas, i, i, d) + } + } + + zeroR, d1, d2 := constructGSVDresults(n, p, m, k, l, a, b, alpha, beta) + + // Check U^T*A*Q = D1*[ 0 R ]. + uTmp := nanGeneral(m, n, n) + blas64.Gemm(blas.Trans, blas.NoTrans, 1, u, aCopy, 0, uTmp) + uAns := nanGeneral(m, n, n) + blas64.Gemm(blas.NoTrans, blas.NoTrans, 1, uTmp, q, 0, uAns) + + d10r := nanGeneral(m, n, n) + blas64.Gemm(blas.NoTrans, blas.NoTrans, 1, d1, zeroR, 0, d10r) + + if !equalApproxGeneral(uAns, d10r, 1e-14) { + t.Errorf("test %d: U^T*A*Q != D1*[ 0 R ]\nU^T*A*Q:\n%+v\nD1*[ 0 R ]:\n%+v", + cas, uAns, d10r) + } + + // Check V^T*B*Q = D2*[ 0 R ]. + vTmp := nanGeneral(p, n, n) + blas64.Gemm(blas.Trans, blas.NoTrans, 1, v, bCopy, 0, vTmp) + vAns := nanGeneral(p, n, n) + blas64.Gemm(blas.NoTrans, blas.NoTrans, 1, vTmp, q, 0, vAns) + + d20r := nanGeneral(p, n, n) + blas64.Gemm(blas.NoTrans, blas.NoTrans, 1, d2, zeroR, 0, d20r) + + if !equalApproxGeneral(vAns, d20r, 1e-13) { + t.Errorf("test %d: V^T*B*Q != D2*[ 0 R ]\nV^T*B*Q:\n%+v\nD2*[ 0 R ]:\n%+v", + cas, vAns, d20r) + } + } +} diff --git a/vendor/gonum.org/v1/gonum/lapack/testlapack/dggsvp3.go b/vendor/gonum.org/v1/gonum/lapack/testlapack/dggsvp3.go new file mode 100644 index 0000000..a1af60b --- /dev/null +++ b/vendor/gonum.org/v1/gonum/lapack/testlapack/dggsvp3.go @@ -0,0 +1,147 @@ +// Copyright ©2017 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package testlapack + +import ( + "testing" + + "golang.org/x/exp/rand" + + "gonum.org/v1/gonum/blas" + "gonum.org/v1/gonum/blas/blas64" + "gonum.org/v1/gonum/lapack" +) + +type Dggsvp3er interface { + Dlanger + Dggsvp3(jobU, jobV, jobQ lapack.GSVDJob, m, p, n int, a []float64, lda int, b []float64, ldb int, tola, tolb float64, u []float64, ldu int, v []float64, ldv int, q []float64, ldq int, iwork []int, tau, work []float64, lwork int) (k, l int) +} + +func Dggsvp3Test(t *testing.T, impl Dggsvp3er) { + rnd := rand.New(rand.NewSource(1)) + for cas, test := range []struct { + m, p, n, lda, ldb, ldu, ldv, ldq int + }{ + {m: 3, p: 3, n: 5, lda: 0, ldb: 0, ldu: 0, ldv: 0, ldq: 0}, + {m: 5, p: 5, n: 5, lda: 0, ldb: 0, ldu: 0, ldv: 0, ldq: 0}, + {m: 5, p: 5, n: 5, lda: 0, ldb: 0, ldu: 0, ldv: 0, ldq: 0}, + {m: 5, p: 5, n: 10, lda: 0, ldb: 0, ldu: 0, ldv: 0, ldq: 0}, + {m: 5, p: 5, n: 10, lda: 0, ldb: 0, ldu: 0, ldv: 0, ldq: 0}, + {m: 5, p: 5, n: 10, lda: 0, ldb: 0, ldu: 0, ldv: 0, ldq: 0}, + {m: 10, p: 5, n: 5, lda: 0, ldb: 0, ldu: 0, ldv: 0, ldq: 0}, + {m: 10, p: 5, n: 5, lda: 0, ldb: 0, ldu: 0, ldv: 0, ldq: 0}, + {m: 10, p: 10, n: 10, lda: 0, ldb: 0, ldu: 0, ldv: 0, ldq: 0}, + {m: 10, p: 10, n: 10, lda: 0, ldb: 0, ldu: 0, ldv: 0, ldq: 0}, + {m: 5, p: 5, n: 5, lda: 10, ldb: 10, ldu: 10, ldv: 10, ldq: 10}, + {m: 5, p: 5, n: 5, lda: 10, ldb: 10, ldu: 10, ldv: 10, ldq: 10}, + {m: 5, p: 5, n: 10, lda: 20, ldb: 20, ldu: 10, ldv: 10, ldq: 20}, + {m: 5, p: 5, n: 10, lda: 20, ldb: 20, ldu: 10, ldv: 10, ldq: 20}, + {m: 5, p: 5, n: 10, lda: 20, ldb: 20, ldu: 10, ldv: 10, ldq: 20}, + {m: 10, p: 5, n: 5, lda: 10, ldb: 10, ldu: 20, ldv: 10, ldq: 10}, + {m: 10, p: 5, n: 5, lda: 10, ldb: 10, ldu: 20, ldv: 10, ldq: 10}, + {m: 10, p: 10, n: 10, lda: 20, ldb: 20, ldu: 20, ldv: 20, ldq: 20}, + {m: 10, p: 10, n: 10, lda: 20, ldb: 20, ldu: 20, ldv: 20, ldq: 20}, + } { + m := test.m + p := test.p + n := test.n + lda := test.lda + if lda == 0 { + lda = n + } + ldb := test.ldb + if ldb == 0 { + ldb = n + } + ldu := test.ldu + if ldu == 0 { + ldu = m + } + ldv := test.ldv + if ldv == 0 { + ldv = p + } + ldq := test.ldq + if ldq == 0 { + ldq = n + } + + a := randomGeneral(m, n, lda, rnd) + aCopy := cloneGeneral(a) + b := randomGeneral(p, n, ldb, rnd) + bCopy := cloneGeneral(b) + + tola := float64(max(m, n)) * impl.Dlange(lapack.Frobenius, m, n, a.Data, a.Stride, nil) * dlamchE + tolb := float64(max(p, n)) * impl.Dlange(lapack.Frobenius, p, n, b.Data, b.Stride, nil) * dlamchE + + u := nanGeneral(m, m, ldu) + v := nanGeneral(p, p, ldv) + q := nanGeneral(n, n, ldq) + + iwork := make([]int, n) + tau := make([]float64, n) + + work := []float64{0} + impl.Dggsvp3(lapack.GSVDU, lapack.GSVDV, lapack.GSVDQ, + m, p, n, + a.Data, a.Stride, + b.Data, b.Stride, + tola, tolb, + u.Data, u.Stride, + v.Data, v.Stride, + q.Data, q.Stride, + iwork, tau, + work, -1) + + lwork := int(work[0]) + work = make([]float64, lwork) + + k, l := impl.Dggsvp3(lapack.GSVDU, lapack.GSVDV, lapack.GSVDQ, + m, p, n, + a.Data, a.Stride, + b.Data, b.Stride, + tola, tolb, + u.Data, u.Stride, + v.Data, v.Stride, + q.Data, q.Stride, + iwork, tau, + work, lwork) + + // Check orthogonality of U, V and Q. + if !isOrthogonal(u) { + t.Errorf("test %d: U is not orthogonal\n%+v", cas, u) + } + if !isOrthogonal(v) { + t.Errorf("test %d: V is not orthogonal\n%+v", cas, v) + } + if !isOrthogonal(q) { + t.Errorf("test %d: Q is not orthogonal\n%+v", cas, q) + } + + zeroA, zeroB := constructGSVPresults(n, p, m, k, l, a, b) + + // Check U^T*A*Q = [ 0 RA ]. + uTmp := nanGeneral(m, n, n) + blas64.Gemm(blas.Trans, blas.NoTrans, 1, u, aCopy, 0, uTmp) + uAns := nanGeneral(m, n, n) + blas64.Gemm(blas.NoTrans, blas.NoTrans, 1, uTmp, q, 0, uAns) + + if !equalApproxGeneral(uAns, zeroA, 1e-14) { + t.Errorf("test %d: U^T*A*Q != [ 0 RA ]\nU^T*A*Q:\n%+v\n[ 0 RA ]:\n%+v", + cas, uAns, zeroA) + } + + // Check V^T*B*Q = [ 0 RB ]. + vTmp := nanGeneral(p, n, n) + blas64.Gemm(blas.Trans, blas.NoTrans, 1, v, bCopy, 0, vTmp) + vAns := nanGeneral(p, n, n) + blas64.Gemm(blas.NoTrans, blas.NoTrans, 1, vTmp, q, 0, vAns) + + if !equalApproxGeneral(vAns, zeroB, 1e-14) { + t.Errorf("test %d: V^T*B*Q != [ 0 RB ]\nV^T*B*Q:\n%+v\n[ 0 RB ]:\n%+v", + cas, vAns, zeroB) + } + } +} diff --git a/vendor/gonum.org/v1/gonum/lapack/testlapack/dhseqr.go b/vendor/gonum.org/v1/gonum/lapack/testlapack/dhseqr.go new file mode 100644 index 0000000..376724a --- /dev/null +++ b/vendor/gonum.org/v1/gonum/lapack/testlapack/dhseqr.go @@ -0,0 +1,861 @@ +// Copyright ©2016 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package testlapack + +import ( + "fmt" + "math" + "testing" + + "gonum.org/v1/gonum/blas" + "gonum.org/v1/gonum/blas/blas64" + "gonum.org/v1/gonum/floats" + "gonum.org/v1/gonum/lapack" +) + +type Dhseqrer interface { + Dhseqr(job lapack.SchurJob, compz lapack.SchurComp, n, ilo, ihi int, h []float64, ldh int, wr, wi []float64, + z []float64, ldz int, work []float64, lwork int) int +} + +type dhseqrTest struct { + n int + ilo int + ihi int + h []float64 + tol float64 + + evWant []complex128 +} + +func DhseqrTest(t *testing.T, impl Dhseqrer) { + for i, tc := range dhseqrTests { + for _, job := range []lapack.SchurJob{lapack.EigenvaluesOnly, lapack.EigenvaluesAndSchur} { + for _, wantz := range []bool{false, true} { + for _, extra := range []int{0, 11} { + testDhseqr(t, impl, i, tc, job, wantz, extra, true) + testDhseqr(t, impl, i, tc, job, wantz, extra, false) + } + } + } + } +} + +func testDhseqr(t *testing.T, impl Dhseqrer, i int, test dhseqrTest, job lapack.SchurJob, wantz bool, extra int, optwork bool) { + const tol = 1e-14 + evTol := test.tol + if evTol == 0 { + evTol = tol + } + + n := test.n + ihi := test.ihi + ilo := test.ilo + h := zeros(n, n, n+extra) + copyGeneral(h, blas64.General{Rows: n, Cols: n, Stride: max(1, n), Data: test.h}) + hCopy := cloneGeneral(h) + + compz := lapack.SchurNone + z := blas64.General{Stride: max(1, n)} + if wantz { + // First, let Dhseqr initialize Z to the identity matrix. + compz = lapack.SchurHess + z = nanGeneral(n, n, n+extra) + } + + wr := nanSlice(n) + wi := nanSlice(n) + + work := nanSlice(max(1, n)) + if optwork { + impl.Dhseqr(job, lapack.SchurHess, n, ilo, ihi, h.Data, h.Stride, wr, wi, z.Data, z.Stride, work, -1) + work = nanSlice(int(work[0])) + } + + unconverged := impl.Dhseqr(job, compz, n, ilo, ihi, h.Data, h.Stride, wr, wi, z.Data, z.Stride, work, len(work)) + prefix := fmt.Sprintf("Case %v: job=%v, compz=%v, n=%v, ilo=%v, ihi=%v, extra=%v, optwk=%v", + i, job, compz, n, ilo, ihi, extra, optwork) + if unconverged > 0 { + t.Logf("%v: Dhseqr did not compute all eigenvalues. unconverged=%v", prefix, unconverged) + if unconverged <= ilo { + t.Fatalf("%v: 0 < unconverged <= ilo", prefix) + } + } + + // Check that wr and wi have been assigned completely. + if floats.HasNaN(wr) { + t.Errorf("%v: wr has NaN elements", prefix) + } + if floats.HasNaN(wi) { + t.Errorf("%v: wi has NaN elements", prefix) + } + + // Check that complex eigenvalues are stored in consecutive elements as + // complex conjugate pairs. + for i := 0; i < n; { + if unconverged > 0 && i == ilo { + // Skip the unconverged eigenvalues. + i = unconverged + continue + } + if wi[i] == 0 { + // Real eigenvalue. + i++ + continue + } + // Complex conjugate pair. + if wr[i] != wr[i+1] { + t.Errorf("%v: conjugate pair has real parts unequal", prefix) + } + if wi[i] < 0 { + t.Errorf("%v: first in conjugate pair has negative imaginary part", prefix) + } + if wi[i+1] != -wi[i] { + t.Errorf("%v: complex pair is not conjugate", prefix) + } + i += 2 + } + + // Check that H contains the Schur form T. + if job == lapack.EigenvaluesAndSchur { + for i := 0; i < n; { + if unconverged > 0 && i == ilo { + // Skip the unconverged eigenvalues. + i = unconverged + continue + } + if wi[i] == 0 { + // Real eigenvalue. + if wr[i] != h.Data[i*h.Stride+i] { + t.Errorf("%v: T not in Schur form (real eigenvalue not on diagonal)", prefix) + } + i++ + continue + } + // Complex conjugate pair. + im := math.Sqrt(math.Abs(h.Data[(i+1)*h.Stride+i])) * math.Sqrt(math.Abs(h.Data[i*h.Stride+i+1])) + if wr[i] != h.Data[i*h.Stride+i] || wr[i] != h.Data[(i+1)*h.Stride+i+1] || + math.Abs(wi[i]-im) > tol { + t.Errorf("%v: conjugate pair and 2×2 diagonal block don't correspond", prefix) + } + i += 2 + } + } + + // Check that all the found eigenvalues are really eigenvalues. + foundEV := make([]bool, len(test.evWant)) + for i := 0; i < n; { + if unconverged > 0 && i == ilo { + // Skip the unconverged eigenvalues. + i = unconverged + continue + } + ev := complex(wr[i], wi[i]) + // Use problem-specific tolerance for testing eigenvalues. + found, index := containsComplex(test.evWant, ev, evTol) + if !found { + t.Errorf("%v: unexpected eigenvalue %v", prefix, ev) + } else { + foundEV[index] = true + } + i++ + } + if unconverged == 0 { + // Check that all eigenvalues have been found. + // This simple check assumes that all eigenvalues are + // sufficiently separated from each other at least by evTol. + for i := range foundEV { + if !foundEV[i] { + t.Errorf("%v: %vth eigenvalue not found", prefix, i) + } + } + } + + if !wantz { + return + } + + // Z must be orthogonal. + if !isOrthogonal(z) { + t.Errorf("%v: Z is not orthogonal", prefix) + } + + if job == lapack.EigenvaluesAndSchur { + tz := zeros(n, n, n) + blas64.Gemm(blas.NoTrans, blas.Trans, 1, h, z, 0, tz) + ztz := zeros(n, n, n) + blas64.Gemm(blas.NoTrans, blas.NoTrans, 1, z, tz, 0, ztz) + if !equalApproxGeneral(ztz, hCopy, evTol) { + t.Errorf("%v: H != Z T Z^T", prefix) + } + } + + // Restore H. + copyGeneral(h, hCopy) + // Call Dhseqr again with the identity matrix given explicitly in Q. + q := eye(n, n+extra) + impl.Dhseqr(job, lapack.SchurOrig, n, ilo, ihi, h.Data, h.Stride, wr, wi, q.Data, q.Stride, work, len(work)) + if !equalApproxGeneral(z, q, 0) { + t.Errorf("%v: Z and Q are not equal", prefix) + } +} + +var dhseqrTests = []dhseqrTest{ + { + n: 0, + ilo: 0, + ihi: -1, + }, + { + n: 1, + ilo: 0, + ihi: 0, + h: []float64{0}, + evWant: []complex128{0}, + }, + { + n: 1, + ilo: 0, + ihi: 0, + h: []float64{7.09965484086874e-1}, + evWant: []complex128{7.09965484086874e-1}, + }, + { + n: 2, + ilo: 0, + ihi: 1, + h: []float64{0, 0, 0, 0}, + evWant: []complex128{0}, + }, + { + n: 2, + ilo: 0, + ihi: 1, + h: []float64{ + 1, 0, + 0, 1, + }, + evWant: []complex128{1}, + }, + { + n: 2, + ilo: 0, + ihi: 1, + h: []float64{ + 0, -1, + 1, 0, + }, + evWant: []complex128{1i, -1i}, + }, + { + n: 2, + ilo: 0, + ihi: 1, + h: []float64{ + 6.25219991450918e-1, 8.17510791994361e-1, + 3.31218891622294e-1, 1.24103744878131e-1, + }, + evWant: []complex128{9.52203547663447e-1, -2.02879811334398e-1}, + }, + { + n: 4, + ilo: 1, + ihi: 2, + h: []float64{ + 1, 0, 0, 0, + 0, 6.25219991450918e-1, 8.17510791994361e-1, 0, + 0, 3.31218891622294e-1, 1.24103744878131e-1, 0, + 0, 0, 0, 2, + }, + evWant: []complex128{1, 2, 9.52203547663447e-1, -2.02879811334398e-1}, + }, + { + n: 2, + ilo: 0, + ihi: 1, + h: []float64{ + -1.1219562276608, 6.85473513349362e-1, + -8.19951061145131e-1, 1.93728523178888e-1, + }, + evWant: []complex128{ + -4.64113852240958e-1 + 3.59580510817350e-1i, + -4.64113852240958e-1 - 3.59580510817350e-1i, + }, + }, + { + n: 5, + ilo: 0, + ihi: 4, + h: []float64{ + 9.57590178533658e-1, -5.10651295522708e-1, 9.24974510015869e-1, -1.30016306879522e-1, 2.92601986926954e-2, + -1.08084756637964, 1.77529701001213, -1.36480197632509, 2.23196371219601e-1, 1.12912853063308e-1, + 0, -8.44075612174676e-1, 1.067867614486, -2.55782915176399e-1, -2.00598563137468e-1, + 0, 0, -5.67097237165410e-1, 2.07205057427341e-1, 6.54998340743380e-1, + 0, 0, 0, -1.89441413886041e-1, -4.18125416021786e-1, + }, + evWant: []complex128{ + 2.94393309555622, + 4.97029793606701e-1 + 3.63041654992384e-1i, + 4.97029793606701e-1 - 3.63041654992384e-1i, + -1.74079119166145e-1 + 2.01570009462092e-1i, + -1.74079119166145e-1 - 2.01570009462092e-1i, + }, + }, + { + // BFW62A matrix from MatrixMarket, balanced and factorized into + // upper Hessenberg form in Octave. + // Eigenvalues computed by eig function in Octave. + // Dhseqr considers this matrix small (n <= 75). + n: 62, + ilo: 0, + ihi: 61, + tol: 1e-12, + h: []float64{ + 0.7610708, -0.71474042262732, -1.03373461417302e-17, 8.218284875369092e-18, -4.39003777724509e-18, -7.633870714681998e-18, -9.951525116511751e-18, -6.538760279193677e-18, -1.656240811786753e-18, -4.915424973452908e-18, -7.590492820502813e-18, -4.532592864746854e-18, 1.137360639223451e-18, -2.088794138001457e-18, 4.330727699351238e-18, 4.88172964159538e-18, -6.438459345602974e-18, 7.414402965763168e-18, 8.592387304092668e-19, 2.905505475188102e-18, -5.210204793418634e-18, 2.377023457149656e-18, -1.958364175388968e-18, -8.746122759061733e-20, 1.839015672758814e-18, 3.097454207400904e-18, 1.545856657360309e-18, 1.713923049773744e-21, 1.333951071201153e-18, -1.256151066318485e-18, 5.489655201308922e-19, -2.191335276195054e-18, 3.211054779957158e-18, -4.585099368362507e-19, 4.064807180521144e-18, -3.621561583390336e-18, 4.638803832189033e-19, 1.306132013406548e-18, 7.71710094138792e-19, 2.320760977517361e-18, -6.538298612520761e-19, -3.498839827985687e-18, 2.699116500378558e-18, -1.653627855476782e-18, 7.006984532830204e-19, -2.829108657299736e-18, -5.260772120044258e-18, 1.014346572590618e-18, -1.751563831849658e-18, -1.850186018112724e-19, 2.870415308417256e-18, -8.423700664162806e-19, 3.498345394735042e-19, 1.448350507022323e-18, -3.119800500343431e-18, 4.170966784863917e-18, -4.413795207992463e-19, -2.550853151356032e-18, 2.058575286932081e-18, 1.11609155804576e-18, -2.819648393130598e-18, -2.691434041700446e-19, + -0.71474042262732, 1.938395745278447, -1.179041092366627, 2.994496337305918e-17, -1.886890458028042e-17, -1.696051150581149e-17, -1.066654178739982e-17, -6.29828959344471e-18, -3.017613522337327e-18, -6.087774183044001e-18, -1.623722797471356e-17, -1.609066043798334e-17, 1.1222991062801e-17, -1.199415856132727e-17, 3.254808477409891e-17, 3.032574771313234e-17, -2.950307698970009e-17, 3.390164991463985e-17, 3.580652213399182e-17, -1.363239170451582e-17, 5.318959619432346e-19, -2.094826370136215e-17, 2.503105756608372e-17, 1.02172885473953e-17, 2.072365201436764e-17, 5.419721687057681e-17, 2.946627912791419e-17, -1.348815353957071e-17, -6.109538294302727e-18, 8.919596400140564e-19, -4.965011212156456e-18, -8.892555328798407e-18, -8.533370652960768e-18, 2.670935841220357e-17, 6.294143735482237e-17, -6.631964574578866e-17, 1.610888675267686e-17, -1.914212496162892e-17, 7.491989417296875e-17, 2.433601037912241e-17, -3.471972221532949e-17, 1.239231453972595e-18, 8.477927675716922e-17, 1.832368873558869e-17, -3.15614722379294e-17, -3.481308523858143e-17, 8.753655535825511e-17, -2.311094913977048e-17, -1.97736043621337e-17, -4.591312368362259e-18, -4.090767028097941e-17, 9.630744841167777e-17, 6.376373378245906e-17, 3.517117740157962e-18, -4.504796150838874e-17, 7.517908631343054e-17, 5.08947280115584e-17, -1.382149857389238e-17, -2.198919507523352e-18, 7.280187019637593e-18, -4.195544216643798e-17, -1.69351463438629e-16, + 0, -1.179041092366627, 1.770797890903813, 0.9251612011433773, 0.003661446613681521, -0.005110479724841567, 0.0005014497289028411, 0.00268637752732762, -0.003137790817099964, 0.0001251340392028716, 0.005236816389348038, -0.007596537304410319, -0.003216883293048434, 0.002836879838039065, 0.006069520262676079, 0.002668518333518884, 0.009131523505676264, 0.004626632495050776, 0.00181579510454105, 0.001123900611304629, 0.001677485377614088, 0.00363754391761903, -0.0007470406844762735, 0.002833257933303097, -0.0002375815340930377, -0.002849024657372476, -0.0002629124003065383, -0.0009435052243060314, -0.001501677820905836, 0.0007867717317979819, -0.003255814847476796, 0.0001229596171032013, 0.001899497807037465, 0.001586135347885108, -0.002155598204409179, -0.004095921626627291, -0.004749259183184092, -0.0003901939289968589, -0.00126397970751315, 0.001155644134671306, 0.000291837164140963, -0.0008540793573406925, 0.0005498493646965147, -0.000512747881346778, 0.001308619245804509, -0.001057469234737898, 8.364932352209563e-05, -0.0004759538737373512, 0.0002832407173157385, -2.502295625364179e-05, -0.001116422235449543, 0.0008744887175767913, -0.001577030646119032, 0.0006630051771088335, 0.0008174386956312274, 0.0005885600522543514, -0.000529324123745168, 0.0006462360717703873, -0.0005292503950906053, -0.0008409757119361148, 0.001179292096398777, -7.042243897656571e-05, + 0, 0, 0.9256234889252499, 2.188661152704918, -1.116554836905615, -0.02243454666837822, 0.001818477623431723, 0.01031345522542573, -0.02255181751639313, 0.01064389372187288, 0.02644236613167033, -0.03711341885334873, -0.01368507023909662, 0.005876976997280983, 0.04065670610764593, 0.01666467627475975, 0.04720670725274503, 0.02074371568474985, 0.02492606640786733, -0.0007477579555839802, 0.008780018993769038, 0.02228545976823896, -0.001309713708470288, 0.02515283874752867, -0.002695437114195642, -0.03058898397162627, -0.004721936411831751, -0.01112709820777293, 0.005156744291122814, 0.001266520229863774, -0.0173310067274492, 0.006032177243961209, 0.01546906824241716, 0.004122912475005991, -0.01457685983271352, -0.02472315268907353, -0.0146012575743626, 0.01613627552485761, -0.03570849487210562, 0.009634392591632741, 0.01396210133689555, -0.01085301942984549, 0.02308071799914048, -0.01534806641683973, 0.007461896675297811, 0.001678793578211992, -0.004550709803423801, -0.003543952787451961, 0.01328644094056202, 0.01722933205866279, -0.01604068924890588, 0.01061524273934986, -0.01948663408394128, 0.02488657490054273, 0.002560204204893632, -0.007049942019567433, -0.005861737475764968, 0.007063905438215945, -0.01302170441839208, -0.01452116548010346, 0.008856002952171451, 0.003352799875293177, + 0, 0, 0, -1.131565669446545, 3.458875937128813, -2.92300025604371, 0.0004977656856497978, 0.006279968970473544, 0.0775979532222543, -0.07809248806636047, -0.04323656216106807, 0.05001022064587737, 0.003864067108285046, 0.04141643188583877, -0.1284617838883834, -0.04493800214431209, -0.07456343090218126, -0.01563872481200422, -0.1339020629421785, 0.04186908269102881, -0.01575262227867954, -0.05122401589211525, -0.01356095585891559, -0.1061570482803366, 0.01819813598371521, 0.1481253462698246, 0.02550194602547843, 0.04637287461870648, -0.07444378895306285, 0.01361958240422481, 0.04035022896181466, -0.04232248915252568, -0.06075940528348658, 0.027263798939883, 0.03953143432541723, 0.06518841156851282, -0.03410646432627348, -0.1414076406563496, 0.2323217246349602, -0.03415439789233707, -0.1031903658536844, 0.06725801721048928, -0.1560521418148311, 0.1014900809050588, -0.02098771321982528, -0.03982159107235207, 0.03695361299827822, 0.01067293209294729, -0.09426629859219736, -0.1345822610538289, 0.09362330513280395, -0.05670709861587474, 0.1059566486421828, -0.1760916487632087, 0.003702764511325265, 0.07422105386227762, 0.02742567045198715, -0.03734725575689604, 0.08146131944298318, 0.09081980084945049, -0.03823012624212198, -0.02882941691361127, + 0, 0, 0, 0, -2.935177932025793, 5.590160898106907, -2.264162462726681, -0.02411424982833118, -0.1133643811701465, 0.1104837624251364, 0.05106708784671347, -0.03981053811687705, -0.02554107703230142, -0.06918772930550876, 0.1627330379332113, 0.0515325563326872, 0.0562468014393183, 0.0339155492439978, 0.1634368483167388, -0.06785129040640099, 0.04039982620620088, 0.04044710731973533, 0.0285518510842595, 0.1485759249940305, -0.0304537251951914, -0.2009213484930713, -0.05273834253818357, -0.03107458918212595, 0.09792748883617711, -0.0337039884304953, -0.06657284881035327, 0.04914327832710783, 0.07368372187446774, -0.0404082088678178, -0.04421178865717079, -0.0709487906769288, 0.048430647567918, 0.1864479159285081, -0.3079556699470428, 0.01491993158702447, 0.1333753802314968, -0.09591074161204663, 0.1894696359177905, -0.1319027537070656, 0.03081270942006841, 0.04847952392626505, -0.04816809266890478, -0.0008101823853040729, 0.1149477702272877, 0.1970244006374306, -0.1184305631819092, 0.07656633356645355, -0.140928669738484, 0.2423845347140408, -0.01430733985161339, -0.0967298709856266, -0.03791764167457073, 0.04501910433428818, -0.09499757971636948, -0.1139200858550714, 0.04630019674988028, 0.03975991363586522, + 0, 0, 0, 0, 0, -2.266072850070115, 4.40758227065786, -2.187592801167079, 0.04541318743325212, -0.0292500337966509, -0.02398663294591961, -0.0298607436249778, 0.0765927452101913, 0.03477459705241919, -0.0257224121936686, 0.001477537977391887, 0.04674868179804328, -0.07030659618878905, 0.0114383223715982, 0.04039500147294157, -0.06165490536387656, 0.03052165142437121, -0.03151343169646251, -0.04567511071619659, 0.01389646864038026, 0.03406059858329889, 0.07777247587370216, -0.05562215591676438, -0.02972304398764038, 0.04553302018172632, 0.04905358098395964, 0.02540110963535692, 0.00741827563880251, -0.02406479350578768, 0.00798549007761889, -0.02127832597347739, 0.01170084494509563, 0.002383029926628291, 0.02969332847749644, 0.07478610531483831, 0.01311741704707942, 0.004853415796376565, 0.02254889573704177, -0.0008058983249759786, -0.01674237970384834, 0.007747220993838389, 0.001741724814996781, -0.02678508693786828, 0.03009097476646124, -0.06933693587488159, -0.006894177513041368, -0.003212920179243059, 0.006244662438662574, -0.03261491350065344, 0.03016960268159134, -0.001128724172713099, 0.01002372353957473, 0.01549185843206932, -0.01638802914727083, -0.02186759059889685, 0.02607807397124053, -0.01433672343290503, + 0, 0, 0, 0, 0, 0, -2.208506791141428, 5.240066122406224, -2.182709291422287, -0.04831719550093321, -0.03858222961310988, 0.07090414091109702, 0.07618344970454043, 0.01210977758298604, -0.08775997916346844, -0.04209238321226993, -0.08158937930535407, -0.0691978468647506, -0.0718486976078294, 0.004433709126950578, -0.06338689200675134, -0.03622650750929987, -0.007019326939737634, -0.1038169299762074, -0.003664296783585897, 0.1260404715508425, 0.01449365280740196, 0.02152866502194497, -0.04579662426484265, 0.02137306190373941, 0.02841535413798802, -0.04356497460133966, -0.04882163279365745, 0.0002663261307664017, 0.04049595350038757, 0.05101584504101733, 0.02365749339968924, -0.05799471679730656, 0.1571971147245405, -0.01838060269733261, -0.05301211904637573, 0.02796283933445018, -0.0827747400120639, 0.0826539872568238, -0.004639853234141812, -0.03415100337915269, 0.02043301459221876, -0.01420687321749558, -0.07938788384250468, -0.06984431882951091, 0.01979778686221181, -0.05267713009695951, 0.05803585434476307, -0.1172598583231236, 0.01085942096095348, 0.03045318026097962, 0.03931707061762502, -0.0233260419792624, 0.02886660013519448, 0.03861548107303825, -0.03415507841094348, 0.008098200304311437, + 0, 0, 0, 0, 0, 0, 0, -2.279082737682327, 4.179202389209161, 2.014339592778223, -0.04255211810632337, 0.2215228709530191, 0.04554891291433198, -0.1776754857264893, 0.008167590360928265, -0.03396600462822136, -0.2424067171263278, -0.04982603310212124, 0.08199335145408625, -0.1620942794258422, 0.002338101300086993, -0.07021142224421691, 0.09194811379254013, 0.06141720296344315, -0.04343138202962209, -0.07659354927119244, -0.1361105641705367, 0.04365095033370017, 0.1736465880725596, -0.08740865081391179, -0.01477028109128357, -0.0188999323841316, -0.01077877669937425, -0.04294670860685663, 0.01729899060655344, 0.08739236799944389, 0.182034549192379, 0.1742753783161974, -0.2051811283512857, -0.09696129832199611, 0.08343537923840838, -0.04957366782909829, 0.1265093425463374, -0.07142635715461459, 0.03516617105992843, 0.0383997617140459, -0.04104973319490962, 0.02037353120337982, 0.04757894980296348, 0.2227131172970346, -0.07280127948445575, 0.01933448054625989, -0.05548809149836405, 0.2093056702150173, -0.07255565470500472, -0.123599084041237, -0.01537223729308192, 0.002577573950277644, -0.0733551734670323, -0.03190494711187865, -0.03967527247234395, 0.07966579792866824, + 0, 0, 0, 0, 0, 0, 0, 0, 1.903931035374501, 3.824975942360571, -1.918381148171332, -0.1657229385793016, -0.1612950026821678, 0.06698675826395525, 0.126725548868686, 0.05262161401229534, 0.1736974825351408, 0.1645930486922778, -0.008218244683807857, 0.0481824748986632, 0.1029912224929829, 0.04100531176584442, -0.05027752034197176, 0.03600703924093035, -0.03107821795488815, -0.09759422490480313, -0.04354787932553194, 0.08526304164417117, -0.05355786578034339, -0.0210819410892414, -0.1122497139926173, -0.02837719853579051, 0.02149997813969711, 0.06803627465540676, -0.0458177019216118, -0.09920218030202725, -0.1651400777956252, -0.0455277713939929, 0.003337830551949452, -0.06755253724103881, -0.07801076584667281, -0.04572759847378299, -0.02963338925321968, 0.07597836187621793, 0.01430341925034608, -0.02647305556934371, 0.0228555845523007, 0.01546873666210535, -0.03908905892485317, -0.01513876665871793, 0.0042446623219113, 0.03015387459510092, -0.02120400053387252, -0.03598829734362344, 0.004724005614895581, 0.07940598065515762, 0.01643813194117675, 0.005515400875796831, 0.03057541214871107, -0.01882273722478993, 0.001668026830005827, -0.02913002540516653, + 0, 0, 0, 0, 0, 0, 0, 0, 0, -2.035489145812599, 3.958195998136282, -2.415518953202652, -0.1018252839623988, 0.09113791429521235, -0.143125166149266, -0.02308820648298807, 0.007900495974597297, 0.0891936029052371, -0.191496963455834, 0.08058392972181647, 0.05211306893716722, -0.02786699591928707, 0.007523375632267236, -0.05648289516476343, 0.06927000214275245, 0.1738730341952659, 0.04931088211870207, 0.03078035118979117, -0.09569654581650394, 0.01335103593932622, 0.06192961771791639, -0.02060940913305214, -0.05414923078827102, 0.06346107123244546, 0.02052335161999402, 0.0759441214578726, -0.1238298106880246, -0.2507681676381417, 0.3220100931816501, -0.01147160193974397, -0.1324548043218159, 0.1477869911354369, -0.2406607672124291, 0.06431201000607845, -0.01766450147458312, -0.0548904673124562, 0.05157233284634812, 0.04488059690309322, -0.06177517133954061, -0.23112183069299, 0.2080819465459902, -0.05619520043449243, 0.1795452492137158, -0.204269300276831, -0.01430899089131678, 0.08951777845217569, -0.02653873178692821, -0.04665500591425999, 0.1362175927592773, 0.1872861054389846, -0.02109220243469613, -0.07237982467321609, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -2.387825495222564, 5.631333594127314, 1.792900160009439, -0.1292562242890975, 0.1708356554410346, 0.04950951151168896, -0.009914110552264667, 0.1304655891154234, 0.1609748354747077, -0.08607480442007164, 0.1610516858008479, 0.006346254683211893, 0.02530117908848513, 0.2023262116291442, -0.04991598539162005, -0.3298986278194697, -0.1487726465103999, 0.04799870466505981, 0.1882318445518781, -0.1206769872912393, -0.09574976849564885, 0.04601707138105179, 0.0715991702971735, 0.0110319870997898, -0.07468722751312951, -0.06360236467100627, 0.03066807997062939, 0.1978804308092757, -0.4403223814664722, -0.09064370852004526, 0.08638179820445273, -0.1181221434581026, 0.2272147516466281, -0.1254616867610615, -0.0001501123827163629, 0.1032892317050803, -0.05195565185717236, 0.04689531008365307, 0.1236167395644631, 0.2849021718663459, -0.08639934992977449, 0.1211622058496298, -0.1593293433814323, 0.2959939998820938, -0.06193112020165896, -0.06245227757105343, -0.04632893647720479, 0.03583128970774434, -0.07735153068129821, -0.1215213155769518, 0.01117363777162431, 0.01224071348068845, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1.970568498833333, 3.432745918572068, -2.179945131289983, 0.1549422681053906, 0.02156733618947362, -0.1743403098753009, 0.04884260166424702, 0.161193984588502, -0.2174611110448761, 0.05521624891494294, -0.05887323587614037, 0.1328019445063764, 0.150653877491827, -0.06531521571304157, -0.1543822385517836, -0.2043044123070031, 0.1255799677545108, 0.1951365223394271, -0.1233664137625445, -0.1191855712864921, -0.04903904444926842, 0.01721465629147372, -0.04824417949824886, -0.001809247060549745, 0.04683387964790045, 0.1406402447048902, 0.2582634735034707, -0.2591765142026595, -0.1617309876246061, 0.1040899633433518, -0.09204034179968526, 0.1659716858767694, -0.07258217699057123, 0.1238542047814545, -0.005315457299413418, -0.04888221850509963, 0.02889824196206881, 0.07250335907166307, 0.3039398127688065, -0.1278843615154275, 0.03794117583347663, -0.08815038995197073, 0.3363118210052076, -0.1106312150714128, -0.1943873573646721, -0.03270119577811206, 0.02061769160692044, -0.1147396461667833, -0.04432517129006736, -0.03624512007085111, 0.1372685073992675, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -1.971094433373163, 3.959369702740928, 1.887120354843997, -0.05026461165000308, -0.1399839889203879, -0.185627397808518, -0.04596340870656163, 0.008956420059669758, -0.1381531838530755, -0.06179826475610644, 0.03260995306247771, -0.0962297246933979, 0.05268169622571128, 0.2046211566675452, 0.1296024872478153, -0.05109478171641717, -0.06816393508471544, 0.06908783957203835, 0.1203829447316026, 0.01720249086925636, -0.03678250120900584, -0.09954728921499965, 0.08400427932827997, 0.09706474262764897, 0.1099658716687498, -0.02055867348093135, 0.1883358420037133, 0.09179573472650564, 0.0428976892444284, 0.06904499115717885, -0.07352106561747025, -0.01527177851177849, 0.007127245592600535, -0.03478704421611469, 0.003011747710224133, -0.02349766354391826, 0.01004232793292505, -0.1176867876164139, 0.02626695914041232, -0.06316783433824909, 0.07753431035296164, -0.05772959109292543, 0.01954926232340906, -0.06086028308842151, -0.003104675370067428, -0.004923780276110752, 0.008718170992460231, 0.05506074746847181, 0.02126352510068548, 0.02026026825978836, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2.051962095286209, 4.783354200058216, -2.891753406247233, 0.1494225282409022, 0.212321254452782, -0.2121415539790813, 0.02466897820188111, 0.06733336624204075, 0.013113247694252, 0.1066584296274234, 0.08752061927245192, 0.07922179944730777, 0.2365701476731576, 0.007588213043408364, -0.05416415411776607, -0.2020969955640969, 0.06349969928685602, -0.06132787289740503, -0.03422718627771316, -0.01952915873386353, 0.0644213739673787, 0.002115696634784188, 0.06255822113535302, -0.199371510170398, -0.3230384741719209, 0.3808208705549075, 0.04071272810763353, -0.01872027971165153, 0.175074940224908, -0.3802378821499527, -0.06108501582393667, 0.06646559313315525, -0.1623676411929772, 0.03990883781119187, 0.04487902512075174, -0.1419408834211026, -0.1568779206082137, 0.1763292664552807, -0.09481989476682466, 0.1344530334023877, -0.1823509060475661, 0.01993854821358784, 0.06058475613302417, -0.08882610769003915, -0.07025689205542202, 0.1720722409076721, 0.2549799182126544, 0.01962928250874243, -0.01708969300024939, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -2.90166412025067, 5.683663549106058, 1.9210862649828, 0.003584348132213356, -0.04815067261613367, 0.08879292199376522, -0.005675898744132862, 0.03588291164670374, -0.1637463265891401, -0.2197707557186419, -0.08857402261688128, -0.04780383765954515, 0.007843151524149466, 0.1770531910307867, -0.01723997302062695, -0.02612834699223629, -0.00540746785723896, -0.0705490796246758, -0.04457806266766569, 0.06473113864983282, 0.006275761834464256, -0.08950765677362392, -0.1005085571651238, -0.009712772636099888, 0.1241037651167948, -0.1243232786387127, -0.2285046949724745, -0.09330919844079147, 0.05061721851550809, 0.2874490693586184, -0.03620828558028133, 0.009836453811605826, 0.06478449995192533, -0.01171663894787422, 0.006410086373602496, -0.2000108983272982, 0.03916914746487668, 0.03329096249389659, 0.07559233782463498, -0.1503685986635421, -0.006365422116363463, 0.1204026175721976, 0.125722416995956, 0.03865674591865399, -0.0001228620998850972, -0.06816612415831065, -0.03365741691324027, -0.07613321112893839, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1.851490093348759, 5.320051320277479, 2.417643700741916, -0.1995237653475895, 0.07621645201183533, -0.1771833255682829, -0.01078146896182771, -0.06438976299009525, -0.2876178310323119, -0.006303714018374637, 0.356885069902641, 0.07024283797935846, 0.01881395860270091, -0.2092513663311028, 0.109718092264327, 0.02854685307727969, -0.1312757974509329, -0.1108285734230257, 0.05182740468109521, 0.07233132504659899, 0.05030539452868459, -0.06059975102985716, -0.2297336103700467, 0.5045083042485633, -0.0004325173131309776, -0.1784693418735473, 0.08394922707081144, -0.2680843663012994, 0.2671400195308918, -0.001793550843300997, -0.1325105555633781, 0.07300804882966573, -0.06897110118534086, -0.2665451791081322, -0.2630899900703882, 0.04369816343226968, -0.156643286665005, 0.1549466071737415, -0.4145076724124122, 0.07488318920361078, 0.1419270611119949, 0.1219043619914908, -0.06378625947413261, 0.08917372812331978, 0.1038377550424948, -0.08072252702869862, 0.009394294196957323, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2.53934911433842, 3.08124689760215, -1.480395561682393, 0.2462776680494332, 0.01710234467080554, -0.01750922521427385, -0.08464899612469672, -0.08513339146210799, 0.1497690941692451, 0.2003583687026345, 0.229975304279735, -0.06773544482684146, -0.212962762165994, 0.100805918257745, 0.1387021642494545, 0.05756587177820099, -0.02342135307890196, 0.0413142771758164, 0.01012070144816604, 0.009465626383560421, -0.1768890665508353, -0.3114326451294799, 0.334908331740353, 0.1454336469709651, -0.1253099733433882, 0.187117750616515, -0.2556659183272817, 0.05828504568732875, -0.09923486906264152, -0.01295943412712868, 0.04742766192450461, 0.002467967102260226, -0.09546651038888934, -0.3163510329005083, 0.1861927610821425, -0.06672987133018268, 0.1437750260846377, -0.3332140898455062, 0.07302040974957998, 0.1701107028641413, -0.01658898261297759, -0.05269690883993056, 0.1563566521838276, 0.1337720630335788, 0.01368561538114742, -0.1213888256015452, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -1.307203302598592, 2.885610538012057, 2.250703368751336, 0.07975315762421138, 0.09370441866373114, -0.1923654024839493, -0.1853398422580131, -0.201205128559576, -0.2125042330970577, -0.1602582952706193, 0.3093660817821342, 0.01852414315637842, -0.09255035727223564, -0.1924195495982953, -0.1559361270247578, -0.02917817624329287, 0.1447862761755991, -0.04690491601291951, -0.1960912348494043, -0.2042284966615884, 0.05841637536106218, 0.02291485689303238, -0.2827174856981686, -0.2943255892311636, -0.2215807563938077, 0.1152617438267267, 0.3936419107039491, 0.02096945169954771, -0.02252698024658075, 0.07773344380322408, -0.006443114230257507, -0.03639999246001423, -0.09084547229099375, -0.0397561686169364, 0.07701104659075265, 0.005605523155556855, -0.09348135695481166, -0.03124263128081152, 0.1542717927672431, 0.175465847613986, 0.0572413755901381, -0.03334819451365621, -0.1404348146594518, -0.06481871409417514, -0.04848557273226619, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2.221221516792545, 4.982054100250426, 2.183210516233156, 0.0908360095708884, -0.3019054223643021, -0.5366128211639946, -0.3196918354703204, -0.1253970246067154, -0.02164532399684936, 0.2556280160777759, 0.08182827430850881, -0.03680716913431671, 0.03290225595516987, -0.1262131502004158, -0.09569394699109128, -0.005709824337923843, 0.0821878342740492, -0.1407748396690034, 0.09719614879475127, 0.2301897192422586, 0.04576646414789445, -0.2184893983754594, -0.2792745493536102, -0.363844809151781, 0.3257684950525028, 0.4790184968677053, -0.07652744991329491, 0.06377373985106835, 0.09437678117499698, -0.06131442314952967, 0.1635757313451941, -0.2796573282080887, -0.05643997532345108, 0.1152912068418917, 0.05000521333406865, -0.1112720969259657, 0.0037148809503484, 0.1093948420140112, 0.2980235424512261, 0.1525445958488788, -0.1264302662462978, -0.2913624335279468, -0.003113019822754165, -0.1134308899923566, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2.400474057942684, 3.406090954274398, 1.166212384143174, -0.2629422439993046, -0.2702044042321645, -0.06636757078595552, -0.08446798536159737, 0.1313648786119294, 0.1193846426124598, -0.07246210384479049, 0.01554276290017705, 0.07103237282021825, 0.00101034194025556, -0.02392135193032525, 0.06952201574673418, -0.007400528848880757, -0.1551607153718201, -0.1651416045393484, -0.0721038672300459, 0.1237766596982622, -0.03361356917856322, -0.2622129829975602, -0.09615056687603316, 0.06891513871408637, 0.2914707059926323, -0.1250888357080352, 0.06761344442133146, 0.0799843533260796, -0.01371538646021322, 0.07103228925121174, -0.3405952917653196, 0.09927170088586629, 0.07489758155119226, 0.08314980277563824, -0.2366684526630883, 0.04202882159469431, 0.2074629307377897, 0.133356144785867, 0.0637264741453499, 0.0308975379082297, -0.1084379405587302, 0.02099441886765496, -0.1808400593316885, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1.086464017862694, 5.114621438496568, 1.930881617131408, 0.1620245301097141, 0.1255287200617495, 0.3368622640783234, -0.07183251945541429, -0.3012926287551759, -0.1116975660537743, 0.113190686135744, -0.04910476738177835, -0.0465386985036234, -0.01788084091497165, 0.06748386592535341, -0.02381369375912231, 0.1493221788544871, 0.02579249903686181, -0.2497357621560777, 0.1787366301893779, 0.1598754145183457, 0.1674188524026742, 0.2477399099122946, -0.3340328954217437, -0.2111911479084411, 0.05086712720251271, -0.1292081829605008, -0.04302551258734066, -0.04971415251604733, -0.3610534332063385, 0.1892568833309398, -0.04472498978753213, -0.2065739236157566, -0.02046944574279112, -0.1756213338724003, 0.07793636245748489, 0.001237377976353086, -0.09033779335999018, -0.160410772384528, 0.08626890948440605, 0.2182749871373348, -0.0838793833068209, 0.1464287335113856, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1.723745364788649, 3.894040171351998, -1.604023474297945, 0.3564682842321129, -0.068601028463067, 0.2723514789823699, 0.1226064004710703, -0.1014104100043627, -0.01893991862852569, 0.08732928681306741, 0.1555840779063216, 0.08897747453791588, -0.05848831483689518, -0.0484062742636184, -0.01943252555803195, -0.1897987807964078, -0.04208301117450661, -0.05303588371252462, 0.1133610703860842, -0.02273323404597872, 0.2540238415394855, -0.07855533407962875, -0.06507240945437587, -0.005499676266161271, 0.06537585217565781, -0.04778965574559299, 0.03306331390850831, -0.03674334203422738, 0.03096742123062764, 0.01763537013767625, -0.02727086473016628, -0.003088168719969086, 0.0435625544938414, -0.03476926734733601, -0.03196005989505435, -0.1419829052022682, -0.06959993690169985, 0.04835942714075128, 0.06791350727690673, -0.02769231996290041, 0.02869640238709043, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -1.887735910703242, 2.376861553923156, -2.059488107339375, -0.1636149309402013, 0.4041266030614025, 0.3896070591189961, -0.2297552131134549, -0.01184707906318158, 0.07039303277467518, 0.09925344398529366, 0.06695981045037805, -0.008491592697259171, -0.04552531630716806, -0.2274274932314481, -0.4148399842074417, -0.04458879883972757, 0.1297363092206598, 0.006733546690957063, -0.3384389296777096, 0.1168946778492827, -0.005399720520138511, 0.3552679244548678, -0.05369187330796665, 0.07272949240516048, 0.04205583963833656, -0.003676655566636833, -0.02270378608066365, -0.2966711461982461, 0.02934135424925877, 0.02865469879716708, 0.06673230222014637, -0.1838317311908014, -0.004436962897364399, 0.1509614176408774, 0.03376168246202512, 0.0005117826261220852, 0.04498835681575293, -0.07866868520578868, -0.0334252458798712, -0.08545921183065397, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -2.342190295905557, 4.675687708808497, -2.268183880250829, 0.2090893331762786, 0.4578770822708127, -0.01884304914244323, -0.07920602266213114, 0.05860045842795304, -0.009411964143128677, -0.009484130845488712, -0.03376087164847531, 0.01728654546382909, -0.2136063948857168, -0.1250583122943451, 0.2295451258972967, -0.06031125891810103, -0.1745972831925295, -0.3612966512867564, -0.1696417690184429, 0.2894160200659208, 0.5167644047563161, -0.08805909441951841, 0.1382094329850753, 0.05099565505493066, -0.04533499187369815, 0.05797820328595679, -0.1922533222149581, -0.1102035618550493, 0.09431486137300341, -0.001494148493882944, -0.09571326182532253, -0.0247601912770405, 0.1256667582306468, 0.2120425181347171, 0.07968400246820299, -0.09760118083265475, -0.2941239252154335, -0.07105438440442044, -0.06896304227007152, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -2.005975891768637, 3.352830695220589, -1.754960054860547, 0.1623709029576712, -0.01686728241632605, -0.03827200221784547, 0.1561794350456731, 0.1402911103849665, 0.03386692126036084, -0.2383508690881479, 0.08335746314476074, 0.1222986332072196, 0.1365597783993524, 0.1833332391744543, -0.1153402862359182, 0.06132285746330645, 0.103916564083423, 0.1352942358705118, 0.03751837000428822, -0.09780641373392215, 0.01802532012439729, 0.07461290579154131, -0.07340909507755823, 0.0263628669211833, 0.05634194666559207, 0.1326983203399266, -0.07526750794049826, -0.0255602869082238, -0.03079487759048528, 0.2087632832461296, -0.06368900481673745, -0.1885028988850317, -0.09938432303399658, -0.02813603601003281, -0.03116773548046878, -0.01139744596335172, -0.007527417596076127, 0.09996642926056981, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -2.005352340672642, 4.225811150115511, 2.31447703108509, 0.2404320758625818, -0.1026845105793418, 0.09323203523714457, -0.07188013661735847, -0.04900547068951503, -0.01143900704943617, 0.03127208944683336, -0.1422425637959582, 0.1223555755457385, 0.3629752163517811, -0.1439880981371962, -0.2053548592359871, -0.2670448469719894, -0.2854950646499647, 0.4890836381626159, 0.4252198149550604, -0.04903353665384676, 0.1440097216561384, 0.05263434485402999, -0.05654435812537717, 0.2744897997285071, -0.1869384630385859, -0.0801988284405884, 0.1415754714387316, 0.0461903176790022, 0.07292418854739299, -0.07749396692418822, -0.01197157575397448, 0.2335354584093252, 0.1565196693950396, -0.2175672539551863, -0.294898244011832, -0.04250817996099358, -0.06254226634319582, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2.023207199521003, 3.695034404904957, -1.278828575213463, -0.09576460539931182, 0.01293341047109737, -0.1025508423146749, -0.09879959800395338, 0.1293868360344177, 0.02661187351571312, -0.03208813036911215, 0.102284173268301, 0.01897487699556174, -4.171607187027272e-05, -0.2343929177751728, -0.1591895912128799, -0.3479923586821921, 0.2266119442104414, 0.1369767045914879, -0.093264948471121, 0.05584513287015452, 0.1113848723095114, 0.07805629549261404, 0.385159500396428, -0.315090514425508, 0.2619076486635123, 0.2100336864909587, 0.1489157028437222, 0.01899500010404414, -0.05839538028607237, 0.1065294735286251, 0.1429581389759609, 0.1768187396519744, -0.0107077164022975, -0.07184487656799306, 0.1053566314912159, -0.2656344309369447, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -1.535179041441933, 2.927516278644585, 1.069440149410428, -0.1940154888240462, -0.06435827084629098, 0.03118119256124731, 0.2144814653940334, -0.1459984300204726, -0.01884883111449459, -0.0996859848375896, -0.1929505482810151, -0.142915066638186, 0.04595119246484281, 0.09181030791542119, 0.1046793740137302, -0.1631714112553374, -0.1533173188313381, -0.008178614876768846, -0.03838516939058051, -0.04628352128726964, -0.04001834958725597, -0.3727398948175817, 0.349654467351177, -0.113952499262497, -0.101339746891288, -0.1603850568927218, -0.1309942015965596, 0.07772040042527674, 0.1275463112280693, -0.04252979249843156, -0.1408252690486143, 0.05191953638235136, 0.07165159166941262, -0.1158826338243819, 0.09369294636414835, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1.042176179125399, 2.455373013093171, -1.298065125007905, 0.02028671121404909, 0.003306962463612291, -0.1036220220710941, 0.0481345337164661, 0.03315299260062861, 0.04992516180471544, 0.09592872957586211, 0.03096020094331417, 0.009903127869262564, 0.01875056235671545, 0.03272160535846057, 0.01927852349784328, 0.02325854762506147, 0.04608264369572843, -0.009983430343303928, -0.01609838886434818, -0.01003965210423024, 0.02113424938210403, 0.005924265966984703, -0.05179857793569453, -0.0244630676591391, 0.01057802494355381, 0.0691716202360271, -0.02554792020429601, -0.09556309673821282, -0.01109440159507958, 0.002795432770742045, -0.03608009327148794, -0.009904492448746673, -0.004701473084555832, 0.06319402826809284, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -1.341422080029597, 2.641177760807674, 1.591936295305328, 0.005262995989819829, 0.05712969006064635, -0.05864522459107201, -0.06744950477663363, 0.1271503047471045, 0.2094260695099903, -0.3057531846245879, -0.04037395011724824, -0.02967432474543921, -0.09059292764118339, 0.2862509429106201, 0.1048474566793039, -0.04995247127231636, 0.1184570436074571, -0.03693842645635304, -0.08306079853122639, -0.04945597334476677, 0.2089393586111927, -0.2028924047636768, 0.01499186002602688, -0.1284371246539898, 0.04369426140288886, 0.001107629872198076, 0.002549707872053236, 0.1030701664382734, 0.002039801798898688, -0.1568979174791054, -0.1881891997896287, -0.1100556548512126, 0.06948313266853789, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1.629047221760682, 3.633220763136687, 1.407673550411567, 0.0339033673171226, -0.0440877649168133, -0.1309350154964067, -0.05292438726218229, 0.1243733326688556, -0.09611554125353661, -0.01801557361391788, -0.1394535956650391, -0.04800585756898997, 0.2092817652566071, 0.1937706427881193, -0.0316202819028252, 0.08185853657036937, 0.008847184587817514, -0.05390183527194951, 0.0500718698457836, -0.06277970709250076, -0.07596180328240805, 0.04008159730086977, -0.0124923156950488, -0.01841127788787308, -0.008904456301771634, 0.02894837811492813, 0.08379155755674296, 0.038658741027786, -0.1013410184828959, -0.1363862936525638, -0.04609547972146647, -0.01057837708981668, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1.387217368144026, 2.728722184696696, 1.424749145313089, -0.01601470906433309, -0.07512459959692018, -0.101188344036713, -0.003441549977747002, 0.06449001937611275, -0.03196276224536809, -0.1213336580505604, -0.009099515848609243, 0.02592480117875633, 0.1486227237292286, -0.00133150660633606, 0.001532054214242922, 0.02645122343613796, -0.01679939752946042, -0.006472860460697598, -0.1046070432475455, 0.002013525317899786, 0.007407876301716394, 0.03426066213629017, -0.07238247076893806, -0.002515176653231992, 0.04897218815805249, 0.04708726516482133, 0.01321800810118652, -0.0106268999083793, -0.03069342204089823, -0.02184119543907794, -0.02064865378574994, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1.391700331330132, 3.10873390469995, -1.005261568422076, 0.07033689862768554, -0.01130536743091839, -0.2012624886845097, 0.1894945671077086, 0.07933080126112255, 0.075305885120587, 0.07607468793519609, -0.2915128232499786, -0.05359107726226209, -0.0167141837673077, -0.09941630815355153, -0.003665007998583435, -0.0299234883671778, -0.3018441136613871, 0.06749072049202193, -0.04549007934188216, -0.1272169539451481, -0.04604367173781738, -0.2292387384782596, 0.1104667012348132, 0.1037443269592589, 0.003358877836343526, -0.09748118788274351, 0.1190372958872585, 0.07594840982846023, -0.02118200682737069, 0.04505746778768557, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -1.040421536922372, 2.090179325079248, 1.054133848546223, 0.1016135668098681, 0.1058908215655909, -0.1395436464948118, -0.0005843093291569908, 0.06576578822426617, -0.01954438761767608, 0.1340460322034086, -0.1079153000090457, 0.008558237228615952, 0.05442003661392834, -0.01948493437734833, 0.03204864462372618, 0.1899187419163797, 0.02651889944688371, 0.04143699821901681, 0.06155302263071664, 0.01361677379827648, 0.1864345911285355, -0.06057118267116334, -0.1079059175548875, -0.04926446070686546, 0.04450551753882914, -0.05461151981988153, -0.001886612406799282, 0.0366915411934175, -0.01306027401322379, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1.017217105119183, 2.057885109672617, 1.717830826909398, 0.08026860099733665, -0.229664599517274, 0.08592953162584042, 0.07113555573927616, 0.1205711964241209, 0.06034607492733832, -0.1241960272046997, -0.02576402132560143, 0.100546870611732, -0.06664178017945782, 0.01551828302018928, 0.01075203286396453, 0.1648555544266941, -0.06180621835112392, 0.00904698466380283, -0.08614048879659969, 0.1377785336580386, -0.02458732719586508, -0.06527069420169623, -0.09919539357362668, -0.0371614630248139, -0.01956332576116701, -0.02610087976547117, -0.01636500296481732, 0.04290323999519369, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1.73126542682124, 3.177372305562633, 1.069892746405991, -0.3373448837554265, 0.04914183280708707, 0.2222099960511715, 0.1000273467845214, 0.02495153836835336, -0.2988822213671982, 0.09070115558214502, 0.006890792477648951, -0.09774415864582336, 0.01733122536241725, -0.06539161565511441, 0.4180846670181008, -0.1348357420052125, -0.01486736897403777, -0.1707564509631871, 0.275788236310813, -0.03928409265616119, -0.1437568066337482, -0.159713937527644, -0.06441082113320754, -0.04838713636560495, 0.03935472474969938, -0.03034455737023176, 0.1502866267461562, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.9860332560182929, 2.457690638088044, 0.7585454975233088, -0.1838919851966072, -0.04382890538941454, -0.140552844264958, 0.09130507944212898, 0.001128260213507534, 0.06804648176327489, -0.03968302808799389, 0.02820745088433483, 0.03697025018803624, 0.06358690037762478, 0.08600302483044212, 0.04484515559448902, 0.08149618479019373, -0.008400294556984093, 0.1073491700579635, -0.06306885174166355, 0.02287587454966997, 0.01464609607772943, 0.03996572206258068, -0.03555777800156044, 0.01763122405260091, -0.01550330692085902, -0.021886735564678, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.8854567164260388, 3.24250422074227, -0.9209580946507911, 0.06835402983935927, 0.05640958121017869, 0.1025591786485125, -0.08943334614410312, -0.06811365067010508, 0.1109007141056824, -0.02177056785348171, 0.03875431100769289, 0.2363367275176182, -0.09273940436583276, 0.1105048998268415, 0.01324218642906586, 0.1204923454912281, 0.1300264818238509, -0.07418265989085007, -0.160155156583538, -0.03336518282890372, 0.03274839794300601, -0.02988917934624772, 0.0313012656489897, 0.02783060486964485, -0.03014058011457086, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -0.9647900465381067, 1.776310496646176, -1.171438051631132, 0.05140489138825926, 0.1299377347834004, 0.2201816497369831, 0.02994176320228855, 0.04558225788991847, -0.02505795321566107, -0.07354754829932028, -0.07990068462027206, 0.03705853127225793, -0.1892721965231484, -0.0414104999094905, -0.04365760163266805, 0.004082206348271171, -0.01640857151771855, -0.04917783956511107, 0.05677253037720179, -0.01538028417587433, -0.1057997510635607, -0.1262079630007641, -0.09383731776836683, 0.1119867440818691, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -1.059667715135062, 2.869444516413392, -0.768053436705612, -0.01982926570738622, -0.05898277292649027, 0.08032989813585734, 0.02430925291918696, -0.05541157112738762, 0.01387900032498306, 0.005672434099991189, 0.05365330433412881, -0.04355934883978447, -0.02337398496984892, -0.01686799971841361, 0.1422537288231567, -0.05666452598379437, -0.1164424129736735, -0.154242271156826, -0.04516863877735287, -0.01545725267339097, 0.05268933296374621, -0.01791535900714333, 0.08893369086218253, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -1.006637453830663, 1.684689844069783, 0.9123803878361401, 0.3537494043954367, -0.0934425457654232, 0.125950163184819, 0.0008929568569579045, -0.05163443135049815, -0.1076491218622526, -0.0453578935139585, -0.1495191189316716, 0.0001415820705124262, -0.05006551799497391, -0.128789667082152, 0.01100316880829279, 0.1037355563583926, 0.1230844198081173, -0.01355995466456035, -0.04069963982836227, -0.2068443754239467, -0.100425014495166, 0.005750041723483063, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.7847696566083294, 2.188950883712381, -0.6166420348084849, 0.05427293688357122, -0.09172120702478551, 0.01394828380275868, -0.04214925628047594, -0.2002409114863903, 0.2374547253440619, -0.07586414645695061, -0.02243126761233804, -0.09086994858618591, -0.06440645239833823, 0.02923240705539655, 0.1008328000734136, 0.0681346127852103, -0.03404818684407405, -0.03418035480719885, 0.02177303810889437, -0.08932854185081496, 0.05516897717457826, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -0.5961213393185251, 2.487671728233433, 0.9779431494823534, -0.08808252008539449, 0.07346845556012158, 0.03791477768403206, 0.1133987101884693, -0.04867168124335965, 0.1007529815412284, 0.07012067318801543, 0.04686393658752402, -0.007976030601315085, 0.009647274715336393, 0.04388047564013912, 0.1448956828911585, 0.09593976527370232, 0.01772056425810449, -0.01920422214800561, 0.07683802973941063, -0.09812807079543846, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.990432056210743, 2.774709587520062, -0.9402258848421475, 0.01395715665576621, 0.05464370856162152, 0.05969855937401768, -0.09840756262028037, 0.1283282686407038, 0.01393286908802421, 0.0953055258318351, 0.007548139187554635, -0.05906283649224034, -0.0006153285921519075, -0.05497497151651637, -0.008769303020395835, 0.05051213594154082, 0.09095589353294919, -0.006367749500199228, -0.04367472066475442, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -0.9556547348028801, 2.802801220763579, -1.059088183997589, -0.06046741386547052, -0.109550403372664, 0.1079992883617651, -0.1303209308383404, 0.005273660747274722, -0.103872667440789, -0.06600322769700166, 0.05217402045757749, 0.08518450366824531, 0.09805210958705181, 0.003600709399162004, -0.05665622206864503, -0.1159528275969711, -0.04404155725184447, 0.02203210385909037, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -1.063540216854957, 1.744535368562572, 0.7905163601870416, 0.02656379893207305, -0.007035839152219695, 0.04085917849823771, -0.009245974538993987, 0.0249953312582957, -0.003223257434620091, 0.005723442767043741, -0.01704173254774269, -0.006825269002180425, 0.001391384124621517, 0.005712500889522121, 0.03850120618331365, 0.01404973463460888, -0.01209325063900243, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.788177934248775, 1.912200014248186, -0.7537507368298558, -0.0661262783848291, 0.05943707989564052, 0.02061524812146071, 0.02585592085441167, -0.02083197113611205, 0.003987942029650998, 0.02210665757083753, -0.02360486919891839, 0.005051227537696999, -0.0006754747654496297, 0.01013804196323632, 0.01058372156918393, -0.05401144478196152, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -0.8186713915600072, 1.708711700739236, 0.6332443934946002, 0.0347014944537665, -0.01300019502207845, 0.01605786260146845, -0.02637192977655575, -0.02304218343188766, 0.02296628976143508, -0.01424547177831386, -0.03972717940911979, -0.1167021874016301, 0.06633764648895558, -0.1293541599290803, 0.004742672723613311, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.5157633499563469, 1.976352201317751, 0.6234503597941091, 0.01391296633390046, -0.07272602653366068, 0.0140570704220571, 0.01336752650183375, 0.05902767787103762, -0.08819062077742636, -0.02551451255772886, 0.1312142929003566, 0.01369744243814129, 0.06473360298498254, 0.01231850487143263, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.6533080656351744, 1.758802439425837, 0.4318871049206415, -0.003082957500133511, -0.09814169758462682, 0.04175539354758488, 0.0235289164184994, -0.01629543995009982, -0.05231016932343414, -0.03672204848733453, 0.009389336932342413, -0.06142381433250257, -0.01010858967311151, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.4817927293509213, 1.408851322535897, -0.7676055176021872, -0.04505523423060897, 0.01813562786331449, 0.03138728238085502, 0.0007373169379419269, 0.001847667170281647, -0.01640431370966648, 0.04696818974656805, -0.004393748298759055, -0.02848496648956977, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -0.7883892138973269, 1.746055173045302, 0.4846783430365095, -0.0003922243062844435, 0.01186867835550373, 0.01648380387048428, -0.03174672656471393, -0.03627511739090696, -0.004853687188112606, -0.06885884936290802, -0.004917829299265958, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.5208363315616994, 2.037277859382022, 0.5232085356010402, 0.07175407188317283, -0.07019064479939288, -0.02385694223436976, -0.00819063541887864, 0.07192359009199149, -0.06656167507443485, -0.04927759893361527, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.5819314677010843, 2.130167978098713, 0.5667361680393177, -0.01053764587057425, 0.01592751856907668, -0.01111604356574088, -0.004404610058468247, 0.03850545663289814, 0.04729794670040405, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.6488904808178829, 1.974426530381294, -0.2820471741701222, 0.02456494941736511, 0.03486280653024869, -0.01809831529884736, 0.08408521068644187, -0.006474401058818371, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -0.3312307167387429, 1.235934836863219, -0.2452582871525679, 0.01878427096720662, 0.04975840840483642, 0.05672553943361373, 0.01742942062643791, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -0.2654365696129733, 2.208256891929066, 0.3360201888498769, 0.06660725081463706, -0.01286437627536736, -0.002070231898763094, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.3639062481633207, 2.248588741393716, -0.1889980205327704, 0.05688507493685012, -0.04579504315826421, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -0.1624755988486198, 1.590085647221482, 0.1168533202179821, 0.005293082667091636, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.173908380119132, 1.450043378319349, 0.02765965035599782, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.02984095753646376, 1.759629990218614, + }, + evWant: []complex128{ + -0.01716884621227793, + -0.1844331609734137, + 0.05200651487352193, + 0.1336851109127556, + 0.2020936631953806, + 0.3566470363060668, + 0.3627207699831158, + 0.4388555152488943, + 0.4776853636435153, + 0.5598821450074998, + 0.6249350549980947, + 0.6791310689291772, + 0.985877008147705 + 0.01929363300192029i, + 0.985877008147705 - 0.01929363300192029i, + 0.9908483217835712, + 1.011990761364073, + 1.130046345264462, + 1.323698071765709, + 1.348598229483672, + 1.363190626641638 + 0.05400660173350781i, + 1.363190626641638 - 0.05400660173350781i, + 1.632832316477256, + 1.646239548684271, + 1.742738908029794, + 1.763069014878969, + 1.789601126186348, + 1.945228042429256, + 1.946373262056993, + 1.997152389795002, + 2.261322781594966, + 2.286944100979884, + 2.447712649969029, + 2.557857506923634, + 2.608379034892128, + 2.653335615965549, + 2.675270309760236, + 2.964219802766918 + 0.01767482509567778i, + 2.964219802766918 - 0.01767482509567778i, + 3.014604817775139, + 3.158289371199042, + 3.311794215710077, + 3.389894197376214, + 3.553333074879877, + 3.641274442796651, + 3.857556223000175, + 4.045817381029103, + 4.330901939363569, + 4.337313647767939, + 4.527400487637496, + 4.917229128467305, + 4.985609414964091, + 5.687686849958608, + 5.79422309012183, + 5.997813119506478, + 6.732426637899077, + 6.957609338485607, + 7.529842664573327, + 7.609108287806763, + 7.761261355516287, + 8.311941758006698, + 9.07053741884884, + 9.217944588000314, + }, + }, + { + // TOLS90 matrix from MatrixMarket, balanced and factorized into + // upper Hessenberg form in Octave. + // Eigenvalues computed by eig function in Octave. + // Dhseqr considers this matrix big (n > 75). + n: 90, + ilo: 0, + ihi: 89, + tol: 1e-12, + h: []float64{ + 0, 15.87097792952162, -0.08018778146407576, 1.923854288576926, 0.2308884192434597, 0.3394611053158814, -0.137843056509317, 0.08273319299962868, 0.09094485832017182, -0.1932984706702652, 0.1180502933247899, -0.2497600660136686, 0.05058238526630414, 0.1559958264692184, -0.1159258671363344, 0.009240657808439554, -0.07576925270220009, -0.08136563335139357, 0.03812188405626555, 0.08890970810264923, 0.03109600803532232, -0.01649595143487545, -0.03210884292964193, -0.06264912164116895, 0.02576130685189339, 0.07030469513274047, -0.03577429596420342, -0.06755584096587573, -0.01609449751643862, -0.034609250889499, -0.006417485131372289, -0.03097242984568944, 0.009636702832094541, 0.03557221482377193, 0.03197869740177413, 0.006433398911967483, -0.01135683279223704, 0.005555822920782794, 0.005082647305173025, 0.009385366191684759, 0.008128896608623586, -0.02296712755081439, -0.003777413820214802, 0.005123235117691023, -0.001849213959133196, -0.02340110883416673, 0.03713005378535863, 0.001732375396842803, -0.002750608017025007, -0.008067437745595896, 0.007330916864526544, -0.002616316716217023, -0.006297132815168344, -0.005559964495164493, -0.009996463242082429, 0.005115046921377182, 0.001491016327339477, -0.000957347151456719, 0.006745577006930539, -0.004544035710017501, -0.004411949528299773, -0.006799740101884459, 0.0005344071027975155, 0.0004690847920096184, -0.004718575501368472, -0.002540214169642457, 0.00357159293854215, -0.003314789809413553, -0.003402372051470523, 0.0006527769815598059, -0.002716170091878954, -0.001540321517093821, 0.001756589868588563, -0.001379260966541515, 2.681259838616508e-05, -0.004307760317855304, -0.001109087032949234, -0.0001659937040461595, -0.0002235436314482387, -0.00119569899683125, 0.0005623327348935116, -0.0006320070189143994, 0.001043573892153891, -0.0008750118344396425, 0.0005348270042878935, 0.0008273143853381587, -0.0006622544067919339, -0.0008492128694001077, -0.0007986665099336194, -0.0009015882205421384, + -12.61872336344661, -0.7232497785660141, 7.609159226285555, 0.3870426982100038, 5.924856640096555, 0.6102682886559223, -0.1693535996869482, -1.919018099340693, -0.2138609560373459, 0.8042623392399718, 0.8674124147325857, 1.331963567449537, -1.782290696158441, -0.8346237530841022, -1.552156412468403, -0.6151273144224881, -0.7391936693539152, 0.9311934770410842, 0.6166024938842418, -1.54644771695033, -1.776481997993627, -0.934164018650274, 1.955449870926906, -2.082936149197492, -0.222464503236819, 0.1331142802435182, -0.7633944660137448, -0.8176194198678164, 0.2746498608375185, -0.1119720547510174, -0.3046093297897633, 0.093708523663493, -0.1254384253653835, -0.3826156782538976, 0.9720657328245648, 0.4435611080311743, -0.002705519800257373, 0.4068502033494004, 0.8228026417475635, 1.898141183528268, -2.19419418629088, -2.26250233043472, 1.169369389476936, -2.320900757809857, 1.963641620262248, -0.9292511308866737, 1.20566753941007, -3.909359844030778, -1.745106869690986, -0.1674293744185332, 1.824325508201922, -0.611032518780414, -0.8172751651339325, -0.379367811436249, -1.118039132172379, -2.1156899896767, 0.6317312221182597, 1.675601832555065, -0.1958681401344015, 3.202902011360347, 0.2083144054490269, 1.651454396883902, -1.879950006164014, 1.114962429097019, -3.728120039552464, 2.115800549383747, 1.993994934513307, 1.036800346894651, 2.843012388647532, 2.665528597975866, -0.08443708961414848, -1.658265513220315, 2.825691629937317, 1.163068598831327, 1.032612155987785, -0.3464359914213002, -0.7374155713124153, -0.009825846309687306, -1.878310233217088, 1.038183449333641, 0.8683442149070821, 0.2096953654957043, 1.097533010763627, -1.40566207568186, 1.539718090250373, -0.3057191046244732, 0.9596910019319377, 0.4410690813618931, -0.01734977865916698, -0.7028527205475527, + 0, -21.14096588949502, -11.77071611766029, 91.38817973790069, 24.01994594814793, 7.139034668143506, -7.77684013545977, 11.8189112903465, 4.350214240774185, -13.41161300072837, 0.8429515510200203, -0.1960485971162483, -1.892239827255886, -2.481138864200454, -4.993335484535367, -1.462743953230215, -1.649744938703439, -3.306123402194819, 0.6299802354863298, 0.006312776908129815, 0.7029217322720615, 0.1969979096896496, 0.6484394805159026, 0.4612739081461551, 1.218623710778157, 1.937283920899595, -4.274393204137636, -1.417028232311232, 1.761975679896983, -1.207846701077455, -2.947009378633724, -1.111456928119195, 2.001483835367479, -0.3273985739357373, 1.27142153234808, 2.470518283440578, -0.497592748085565, -1.01776837453108, 0.1736631347448449, 0.2170284795451557, -1.102797734602059, -0.8983239378242805, 0.8376082516437703, -1.015605628895311, 1.888662040615523, -1.813946159105028, 3.745871277192266, -1.691058864813766, 0.5544744103796291, -0.95962769863539, 1.495178118153111, 0.1369976805376806, -3.134133785033962, -1.134766199832475, -2.142472928604951, 0.4360359463007911, 1.080773790484485, 0.9136687613609559, 1.421868877932455, -0.4939788885367615, 0.01579620756278606, -0.4041188740514539, -0.3370957888349073, 0.4032844546374829, -1.281049156842126, 1.334866305935072, -0.4288161314791398, -0.4076960827986253, 0.8975402184760907, 0.008483305617251051, -0.02300021991545602, -0.4779200297205075, 0.8640559542693361, 0.6740166979504152, 1.271173988705413, -0.125268363712917, -0.02783785192682384, -0.7952831181002197, -0.1834134462936435, -0.3033906534395994, -0.1842466257842833, 0.3294577790658342, 0.4265858101126394, -1.050551059782219, 0.5557053448408287, -0.07549099980078718, 0.4485085972651929, 0.604772326452039, 0.02668915155289681, -0.35958437456048, + 0, 0, -71.67241414253803, -12.6925239065222, -58.51026881403752, -9.923972440030834, -18.08337737101079, 23.73437311293314, 10.42624729020936, -7.39628437264571, 6.93334510855383, -16.20186028661058, 19.09098951338167, 7.231158491837533, 25.86051183536314, 4.692849852643402, 13.50562601772541, -23.8959783279092, 10.46795736800158, 26.42552249365325, 25.55147405605497, 30.98455252329583, -38.26426057779729, 41.72459242758727, 14.01187370572939, -9.227025126956866, 17.69341876066779, 17.7058942448478, -13.2182156659012, 3.273277215940703, 7.82096407961303, -6.540134857551783, 5.302726399754003, 21.6168229759513, -35.45773268931614, -39.5771763658349, 16.91907564224343, 16.55810765769962, 10.84170890675409, 4.539182245882841, 5.966139065523301, 10.43624249585104, -6.306944364017387, 25.11309378553394, -23.98463112536712, -3.253554932408899, -26.20327820737005, 34.95346757431584, -22.06848426488626, 0.3787905650745691, 12.79793983153946, -15.15662916810503, 30.10820985389998, 17.18239253486946, 26.44238280127885, -17.53916820777098, -21.59984930800645, -20.89069843604591, -10.62614963929214, -4.16861219455282, -15.8281632392319, -15.45413424684084, 7.28126264287692, -17.30978445489622, 25.83878482866339, -18.20409699627451, 1.341279973912325, 6.060701390094514, -0.9930555858249585, -0.4770067839263625, -8.050723322366107, 15.00633993006825, -10.12301020599794, -15.02267685265745, -28.87546819977945, -4.887384695963816, 3.812851132430744, 19.10682316350844, 7.478580657962908, -7.486104775378393, 8.24517564743228, -7.51905558097683, -6.644004835040043, 16.04319982377311, -12.24015636297471, -0.4152718733052826, -15.37260980717165, -14.36579932723399, -1.30513875795791, 5.356894027365968, + 0, 0, 0, 88.73441744471468, -9.667604704172753, -22.84943872004879, 5.044285909895449, -2.76004875036668, 0.3050715430090338, 2.281066203460998, -3.081685045157843, 5.563173468318147, 1.196920824152984, 0.7508125732458136, 3.261490552895722, -0.1915889730517104, 2.178042983750966, -5.719722606964433, -0.808717831824074, 3.567750315780947, 4.405620166473985, 8.068713509838501, -3.974147763943233, 4.024558504483837, 1.343369353638846, -1.554164148226279, 1.397410693322202, 1.549071510171349, -0.6861822032616559, -0.03144220974090741, -1.503375319686877, -1.213563509509453, 1.485658175240746, 1.102628808733887, -2.836353822887371, -2.651198571403894, 1.944770277514162, 1.800720538730825, 2.925044559436877, 2.489394773406274, -3.99962439548742, -1.365153890760246, 1.729186611640552, 0.90732580595284, 0.02970965138899272, -9.802670261550649, -3.377885239589956, 3.311681996737421, -2.102890043550365, -2.752571666784161, 7.725320034635699, -0.8099349955071451, 0.5680350525364315, 2.461090824551101, 1.671002962178604, -4.651796111745832, -3.019627268376409, -1.456848301794733, 0.6765376227892419, -2.208873565004413, -2.755468168868538, -2.723938546851782, 1.844529360647301, -1.962817053181678, 2.77164872022158, -0.6312645341808628, 0.9959732581947718, -0.01141276076056162, -1.047045912982139, 0.9659655272370172, -1.069371622104567, -0.564318267120407, -0.9606489624515147, -0.3731646049074267, -0.4361324066751411, -1.228953081985217, 2.112718903093324, 3.774673462225595, 2.038526286226191, -2.037134562294253, -0.6272498215703869, -0.9130826952549337, -0.5356753186936942, 1.739354952287502, -2.607676632661601, 1.167128690015535, -3.643945638175477, -2.247788469333459, 0.4447578257714688, 0.8814188227164721, + 0, 0, 0, 0, 35.22951028609684, -22.61691132174376, 97.10536643650998, -3.714742139430975, 10.06500336745724, -16.35785983037913, 8.78437693523264, 1.985132974393485, -1.609800726068471, 10.55631691645605, 9.224290477011534, 4.722637143581605, 4.125150731568718, -5.396683914299057, 6.364591301561367, 8.24526687743292, 4.911724566872895, 6.542687463438916, -9.348997687795265, 14.18011100433374, 5.720905431367072, -1.582673805208176, 3.789638114574857, 6.440267872588953, -4.901278147004489, 0.421175984045115, 3.797008789173352, 0.2579925025997163, -1.093437933585557, -1.126991905684307, -4.4057666178694, -6.4297958780832, 4.918610437583255, 4.893900733343752, 7.627382484342153, 6.943539836193181, -10.22700285291435, -5.21717606135954, 5.635670539663245, -0.61979670671329, 1.025327494294112, -29.98298952616265, -10.87852244502099, 6.724183768213139, -5.415447368170939, -9.131548344141406, 25.08685243604318, -0.5163094225162969, 5.436141320006754, 5.254969244047728, 5.189340308692745, -14.05566775803058, -3.790558233978546, -2.867451571946585, -2.265074057881479, -13.27098968249704, -6.419573660857155, -2.44809433119093, 5.262803237338727, -6.396156300345669, 12.9332727436708, -2.828158861124294, 6.494893307712784, -4.900718840392307, -12.17623988734707, 2.132680032576169, -4.239391092443586, -4.723934051879516, -6.984654958110764, -2.787156581230434, 4.744573069448925, -7.311973630803457, 14.86842763617212, 11.70461213488476, 10.92665646005423, -8.812367256417245, -1.725823238463376, -0.599065820210566, -2.268141253505947, 2.588966312561057, -7.560432899806777, 4.711500915754516, -11.63409451724343, -3.784667325499409, 0.9845255581998278, 2.917504056968942, + 0, 0, 0, 0, 0, -116.0630192746665, 5.887300060633501, -55.36418027265255, -2.470006763999332, 13.03531089357281, 0.1172864444746298, 2.336301687054243, 4.34928920056458, 9.106206444726951, -0.8318917014102636, 2.753642271658936, -4.563539972366368, 5.616802475300401, -0.4836767449615566, -6.113950704235537, -7.384788928153566, -6.850835642415652, 4.436028327150289, -4.972441466665182, -5.074617872073305, 2.249115729647296, -1.27635805087233, -1.815933433231301, 2.59978300016936, -0.5676863289525813, 1.704036354039219, 1.966949252223741, -2.742379099660473, -5.759265103182953, 6.226978264278319, 6.392620681348136, -0.2885756166938758, -0.2986490248004344, 5.021918870709579, 7.495865991102269, -15.19741578062141, -10.52747112766237, 9.21669911214595, -12.00882240945578, 9.449065100345695, -22.03792558203701, -5.042603937347324, -5.034444931509023, 3.887859743790752, -4.92423649287837, 15.14459404442214, 4.709447859501895, -10.68403398232012, 2.785222602001631, -3.955139727592788, -10.90921547327889, -3.836568621330626, 5.465232651370832, 4.568243480001632, -2.511523602459496, -0.9621054122709324, -0.6044291916789493, 6.467023010391031, 2.662510020284552, -3.203951737691022, 7.315143747841559, 0.2986491524564562, 0.04345879316587933, -0.3120127984400304, 4.715818739144954, -0.3713148742857386, -7.592585985711748, 1.251675665881772, 5.563320889185214, 8.934186230540199, -1.034540314919822, 1.499762588438347, 3.648805772901489, 1.867765342642769, -3.856064394520797, -4.257325323439929, 0.9065282321682452, 0.9958862297862445, -2.214139637816267, -1.516486450508368, 2.326410461051852, -3.227358821350026, -0.4749483061012461, 2.179821376672413, 0.1114550663371978, + 0, 0, 0, 0, 0, 0, 53.90645393584658, -20.01147603384238, 68.25945706992877, -2.001388852130252, -0.6303875033830074, 7.681723436887633, -2.793396522752484, -3.849646005163192, 7.47502807954784, -2.524396627852584, -1.112160063163965, -0.4457759133018216, -5.425351770541893, -7.213840725579055, -0.6234582007920942, -0.3045445372121888, -1.838748235944519, -3.05429610717454, -5.218058811930899, -0.3089178689448414, -0.07503528649101057, 0.1630591210001797, 2.58742687436071, 0.8589336965724809, 0.3026886986297957, 0.0616540381895139, -1.743768523575526, 2.75507802571559, -2.587808530052548, -1.25512695563184, 3.800814529165174, 3.132593897658502, 9.751967320860086, 12.09676139406092, -18.96514427687867, -10.33529268364038, 9.423861340153673, -8.118494925333524, 6.455831882458702, -13.89851030432216, -5.253096745704963, 1.482474561585576, -0.7990859267629726, -0.1919859569182034, 14.99486076662231, 0.8403897566592956, -9.631758825484061, 7.546859017133015, 1.761650219269993, -14.8002789790671, -12.03184495751378, 1.278761976352118, -1.043958743156598, -6.425946785515577, -5.130193263963021, -8.943747092721882, 6.884559516868113, 0.02054820847870697, 1.622114564484586, 8.27790706580309, -0.4201537658318323, 6.949197674374433, 6.109317458173291, 7.745771202714325, -4.383777284917787, -5.023236465266974, 3.152075902042714, 1.185720299074054, -2.570558625807648, -2.575936989524232, 0.7822876332433506, 11.50350236141349, 2.541100031822599, -6.266519860892538, 5.220802309799155, -2.7113287362323, -0.0782755292810481, -1.306667606835813, 0.9121698644456847, 0.07469182002148761, -4.645428950155893, -1.074584465997386, 0.6604539390114011, -0.1666067442015038, + 0, 0, 0, 0, 0, 0, 0, -85.98674061062438, 1.162623410299333, -104.1491535027204, 7.969764337327856, -6.932488858320204, -6.767747448265724, -7.331077471298601, -1.288550484452708, -5.088257375240862, 2.763979746955463, 2.528081478578727, -2.575430737963284, -6.921068854524492, -4.018385524701809, -1.579042698955484, -0.7534247768022768, -5.324588385519177, -4.241013196984152, -2.113061191594958, 1.678130424336868, -0.2888364599120758, -2.575220703265352, 3.435198525295604, 4.161688539607654, 4.731385858073858, -4.276207990137705, -6.32017796521583, 6.190217309330307, 4.183836067935781, 1.849953972589691, 2.606132984461453, 10.00029158683688, 12.0615055907306, -22.12319815923896, -13.94593423436807, 13.56355336723619, -14.73390655737362, 8.504539865599947, -39.98652916386596, -13.54021158220439, -0.05790828602008728, 3.121363813023208, -10.26329339936837, 29.60358127644879, 5.640693459787456, -6.225202166414575, 8.49263970223689, 1.111516509579903, -18.45973622662288, -10.78955835552373, 3.866537233352727, 8.114971974287389, -7.188761733179074, -3.287441529549332, 5.156604610165261, 10.80793451072269, -1.239076505166311, 4.628996155335966, 5.792922582124358, 2.03417264625456, -4.707373530409962, -9.448778191150614, 6.51155292021117, -3.19382039380789, -7.177226661410774, -6.832018005921634, 3.442244659675745, 10.77505720982952, -1.609927625466521, 4.733127783256045, 7.444153456927645, 6.065657625755138, -7.768628874694797, -8.73223505289636, 1.578394964028069, -0.8054671616028769, 0.5972200288905033, -8.68273678581243, 5.453422469912097, -8.723308411603904, -3.656294698422601, 3.713788968458369, 2.245164999387903, + 0, 0, 0, 0, 0, 0, 0, 0, 82.30322830941419, -13.16495817094656, -40.37221874531644, -7.156146024985344, 6.407626256610186, -3.070719411573423, -1.248781274591569, -0.09188094231426196, -0.14791454863577, 10.46460085885971, 2.511645220965021, -7.306021862635426, -8.821036218813667, -11.27778024004047, 8.299073644975818, -2.786388825153335, -1.404614079695401, 1.895971545970186, -2.374603998420206, -1.895908635885413, 0.4177297064772442, 0.3179694660965782, 2.847483353627781, 2.28314647154709, -4.251315293192437, -7.351627496507233, 8.45101495879304, 6.656324309408108, 0.05811442721379975, 0.352210596662256, 7.158242198776926, 9.604014490962273, -19.05916411574244, -13.48053537548751, 11.11056501341958, -13.44816425524262, 13.03794734916214, -23.56660744892662, -3.179691183297922, -4.31381647439135, 9.383878541906311, -8.241547161602726, 15.17745487655014, 10.50533207581469, -12.3496460656361, 8.196225745083185, -0.03575510420729201, -7.579890340619093, -0.7363093925300244, 8.756813214260761, 11.93727799717407, 0.3635376217139206, 0.4603703423631014, 5.507079480435261, 9.421116342576116, 5.29496740123859, -3.941393949093798, 8.749586295075376, -3.842933870553052, -3.434396636354037, 0.7496949834113942, 5.050293641980197, -2.449146742066287, -0.1213669699610963, -2.640456287197764, 5.565782021620873, 8.693308402974377, 2.307206522787629, -4.138693274221668, -6.245432457269187, 0.202505673386626, -6.566863879917412, -11.74280124769381, 5.642870396242308, -0.659648816152216, -1.690565064770402, -5.93672734719091, 4.422450101243651, -0.7271854445212377, 0.7895970235545322, 3.96205972546395, 0.3762898199575608, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 52.18436402228633, -1.17071077050581, -78.74014811145696, -2.436421299700726, -6.158177326953696, -12.84920002328411, -13.40821294858424, -5.273362519386561, -6.872625202393317, -7.634332724591617, 1.036587235099262, 6.207608782298316, 3.391369867457635, -7.524695482267034, -4.223994548482144, 1.005090702038289, -2.66480555032824, 4.646608362152693, 2.243089562166944, -3.34529856305617, 5.740218757228164, 2.12882450346135, 2.939738798956252, -0.03146392010752049, 3.38958622345852, -0.2577487623574973, -3.051389295629871, -0.697296699287094, 0.6189380855708414, -2.814647546822989, -5.010744640386618, 10.8659101121065, 7.550151766381041, -5.138398331003744, 5.941882913285983, -6.24020929364107, 6.192495406215602, -0.1393925926206985, 3.522667688918142, -0.4029078058202559, -1.086721958116794, -9.106949733745292, 1.080723194973581, 12.43512076939469, -1.559991201757955, 2.834826471699229, 10.91592518979253, 10.14280540179991, -0.4312954736361566, 0.1698301469389144, 5.224021770744672, 1.399356722825569, 3.824858056231203, 1.187505521773207, -1.25027597821317, 3.154667536052018, -8.777523547457038, 1.485334722226234, -4.846848932499968, -5.331464798306308, -2.935610863284922, 2.146754442305868, 3.916678542801113, -5.097848412152915, -1.684939990487824, 2.003333243800943, -2.22594720641393, 4.656849950049951, -5.745353345757918, 0.6044487182322886, 1.596087705639433, -5.155786671036686, 3.16998966590795, -2.175693288642318, 2.801445787424951, -3.130319162400883, 2.85020826323644, 0.1640583474231081, -0.214298568273029, -0.1740891639628268, 1.273833756482144, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 70.09446168050651, -11.26895002135244, -48.54705040053749, 4.118431277865748, 10.84591029041512, -0.6748674802174282, -3.045053711060402, 2.014682374966249, 2.306026198573233, -1.350656230483865, -2.329953831978132, -9.588390292055525, 3.269359204435148, 9.980505686658566, 1.082807022265471, 6.273567735324175, -5.719240792932494, -2.503890374538294, 5.253867760378612, -7.368019321193995, -5.092315009287605, -5.608336565960785, 7.85433697392239, 13.05231727795986, -10.24473853042492, -6.497760170002005, 0.7720864098024787, -0.9464942766836045, -5.75736090996403, -6.449685187648799, 14.67091581503414, 7.752227657052853, -8.313707502058552, 7.618733560061307, -10.26315766622777, 49.66932226344804, 9.396595252505557, 4.646105097651882, -1.000511699400161, 17.50139800181348, -24.58127340300144, -11.80138366202307, -5.227433380710831, -10.13287460063552, -3.337359409768342, 12.99698337996271, 3.068027046277927, -1.951738372661316, -7.666145241687218, 6.9088789851311, 0.2649618535564444, -15.99488743024383, -0.7701348417963114, 1.505671187381852, -5.323669521268157, 0.5338896728895715, -6.748253910318381, 14.64352852958436, 23.40931931143844, 4.375219426823375, -1.811045463489342, 4.773508381777763, 10.818209698722, -5.294039411650939, -18.70264738152938, -4.975296365083204, -2.600819681501243, 3.308632477674194, -6.40160106179316, 2.538884420020582, 18.76233248591015, -4.114557081532237, 0.5111695455673799, -4.794755383844113, 16.67507540569322, -6.886183084140394, 7.719834159396155, 4.933410312388084, -6.089459478292116, -3.503666218213469, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 66.6637247212457, -0.5663386193537134, 86.66019711942882, -15.20175735117943, 4.314620867702222, -19.41396340483079, -15.38151259581058, -4.248235572512307, -2.600133694847515, 4.086685275316547, 6.543863987851957, -3.556770106796698, -8.862927415679744, 0.5940920961482564, 0.2891990484420937, 1.2985992187588, 0.6463973107839561, -0.1897507095552844, 4.999681085896536, -0.6439357872831064, 2.066718921953054, -0.1085645365015185, 0.1070768393458224, 3.026109517007545, 1.481907260983705, 0.1499047456480452, 0.9514718442208902, 3.068093096347056, 2.714461903454389, -3.99786212133959, -1.490015954498088, 3.704196600400429, -3.084024872438948, 1.383157275632034, -16.26579452857151, -7.173167896042849, 2.321998416940817, 2.275237906163105, -6.307537188875993, 6.209730858735038, 4.018421456135293, 0.9621119492629135, 1.945276954758256, 0.4126375340548999, -3.009694590378986, -2.629957817671035, 1.384131483830782, 5.687116562672569, 1.212032906758187, 0.4457093459378468, 5.044411045879187, 2.653766535846431, 0.2411195122877111, -1.543285991977046, 0.8571968890482712, -2.832787648800616, -6.089862603942122, -4.243058413925725, -0.2712347348375778, -0.7571525905438323, -0.1391757644986091, -3.374311051582727, 0.7719310852064701, 3.726864797116653, -0.7589171772472043, 1.688124623456514, -0.06145796388713081, 2.035112211335303, -1.275417314028715, -6.847693700987137, -1.324382865895411, 1.215308587453588, 3.389389718245654, -4.298340356096036, 3.252289347232091, -3.54897896652072, -2.583331482537173, 1.391546454632447, 1.526078993538325, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -76.15771433854052, -11.99852507214444, 27.62893281995865, -8.886170208363085, -9.022500867915554, -6.483908690769347, -6.478797015907496, 5.436632960504888, 10.11648415512398, 11.05236551506778, -6.555078182021158, 0.9926522830872675, 7.012674375514302, -3.958653565895998, 11.89784449953233, 4.997490823792532, -11.71308595491765, 7.473858613545575, 4.526049765812032, 2.627760851429408, -3.543040309678986, -0.6545512958150839, -1.975237359166213, -9.752416598897163, 0.1746629230980583, 3.764199942242819, -3.523765142660245, -6.526579460908251, 12.86892013415631, 9.636987335709112, -7.71437307352219, 9.374585571980461, -10.09888573625552, 2.627000532523575, -14.35239184493451, 17.7934680232572, 12.24617038938174, -6.2027891597684, -2.117824121844527, 2.304085371600412, 14.52899092304767, -1.375604453625678, 9.300885546968621, 18.94646416924206, 20.59266172433305, 2.716897787405002, 14.41056423737162, 4.669781485892746, 2.929418504934519, 8.242335741048715, 5.40876543323668, 2.002902940983452, 5.6098816375921, -14.20093255834947, 3.901682718637671, -7.337643713401271, -8.218284944457553, 3.072811403558771, -2.292798328406445, 10.69584853135417, -9.650335290149327, -4.330515729410567, 0.5204595970152146, -1.624267060418924, 8.140164084847148, -15.7637122001724, 2.453007028561554, -4.499820926337408, -18.34001755668463, 9.796120126023718, -6.012926560200856, 4.434295362280665, -9.67082400790736, 10.4548835691208, -1.769628011664158, 0.4768710210096864, 0.3854758620192356, 3.320903327037195, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -43.98223406015142, -5.032665049120959, 63.89171973078309, -17.03607126733364, -11.37762317554949, -1.901161248292645, -0.8260131144255914, 12.01428472457645, 5.352225349608783, -0.3266225326728286, -15.64216660863595, 0.3825939195623905, -1.494103371722206, -5.375511858816146, -1.862194375089014, 5.63824583310378, 5.360101148587418, -5.696506781095095, -2.080871954585809, -0.7829130324044875, 6.783149785809925, -0.3661710817347401, 2.462769544154133, -0.8126745345164752, -2.507928414100153, -0.7896746658950899, -0.4314818025686951, 3.924872737975131, 3.720963560907183, -0.9854313103876566, 0.5600582865792041, -1.688187106864158, 17.44248034254651, 7.56859337644358, -2.163244623250694, -0.7929207133815144, 6.398869750639878, -6.091736851159077, -4.728849132755795, -2.854085872325754, -4.826855364681831, -3.30106640664588, 0.2719713238103212, 0.2298412502409825, -0.853005758672194, -3.263371003058645, -1.287397261994707, 0.6221723886323094, -3.488369011339807, -1.414828169574871, 0.07717807180754992, -2.684195273815242, 0.2613139899890946, -4.206641018599038, 2.982830136882203, 6.657239919946693, -2.518089625188537, -0.4552654704803235, 2.298800483965624, 2.494983397734763, -1.284395189604125, -4.238400338443188, -0.6366063645353051, -3.325393279736908, -1.764225755959617, -1.813359309843393, -0.2947559613727277, 6.320461175365693, -1.048130219904732, 1.69129533904847, -2.336845279584976, 5.580341689077066, -4.541654669729592, 3.817079261725959, 1.972729191142582, -1.594128333779149, -1.535735104483785, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -59.46738063214526, -16.09510211169977, 75.92651854229776, -6.959588443327999, -2.809913552100682, -9.062449485347019, 2.929935910954824, -6.516445771274741, 5.618312537880061, 9.794206647467506, 10.44206520884292, 1.504143210681085, -1.194672878944117, -1.206412082832246, -1.592792412630419, -0.8713955483970806, -0.4430245993565414, 2.114713938914445, -0.3808503381452496, -0.3361054648766276, 1.810321218052103, 0.06324533489118153, -3.207697984086771, -1.888840647774234, -6.337450722394116, -7.051669943095036, 10.24163394168822, 4.591682020148506, -5.157620119910969, 1.350695165969617, 0.7792033958582947, -0.8363399401868995, 0.8398917333791918, -5.122390663285614, -0.589942253467838, 0.1140765360111759, -8.180634436361872, 0.782695084020231, -0.4316505953196415, -4.677604739058501, -3.371394391221965, 6.790122165196175, 5.900302157317789, -1.138518787666086, -2.696524630738898, 13.39780057844131, 4.194434073284752, 5.363099243116376, 4.110197934071277, -2.070985188896846, -1.165572235840805, -1.241624822428416, -7.086628398680193, -7.575672638929711, -1.486981977191632, -2.042873730189658, 5.495814100326541, 1.841533297055732, -6.033390059069625, 1.625951790058254, 5.031090908642416, -5.215840389266634, 1.850639244782789, 2.092351976481545, -1.480623358531872, 5.911569449582607, -1.869423525406329, -0.8096475091606794, -1.000819809262052, 1.841395444723725, 1.081170909224551, 2.284559473058704, -0.6250480378852277, -1.559903447023968, -1.853970655342937, 1.547744137147876, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -96.3647680893793, 1.199043513223026, 17.60420974833172, -13.06773608748545, -13.86009320666351, 0.1767817722626665, -0.5124263274350351, -16.26448243634644, -7.14143348503811, -14.24141019732697, 2.26049840163611, -0.5232284569456099, -1.920838853591793, 4.846489507057715, 3.042770898035555, 1.294385928313621, 3.262620434881149, -2.270944593759149, -1.408453286683619, 4.497236785795636, 5.977793107223872, 0.1340518117987963, -0.6905643209085197, 4.805322551598841, 6.502377969783907, -10.47261184903601, -4.131639902258459, 5.974051724377973, -4.360075843426007, 4.73500504926485, -9.718847270507625, -2.119804026495935, -5.657677043459689, -2.856025773969369, 1.756942740498222, 1.717927994018707, 2.836097818243513, -4.425043948906356, 4.283315817519375, -2.059940986323201, -6.307828936261935, -11.44715911847551, 0.3006681108636347, -5.507293839810647, 1.536345859738123, -2.26452399462477, -4.716331237978856, -1.255956886329029, 0.7042206945918659, -2.667007107490436, 6.672978852425663, 3.262254896828355, 5.937155133580987, 1.948386865395799, 4.552963709305849, 1.788332867713114, -10.63067323751173, 5.26892419774046, 3.167593513248493, 1.170780576616151, 1.205868492329382, -0.3777966262231325, 12.93219596919667, -1.629499482312095, 5.270759888996531, 6.75414771239083, -5.680238880370791, 1.763910811059415, 0.2558270515454605, 2.632569608785784, -0.6147023174800583, -1.873856188681823, -1.734814810908174, 0.7079064341945942, -0.9150650771583158, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -23.02913191459475, -22.68355714527637, -71.45920181332552, -7.335069133951983, 6.88897969406801, 14.22303129604287, -5.713578960023669, 15.25375807101405, 7.31607076966181, -22.97972556461536, 1.773867719140929, -10.21928707943674, 1.424895097068828, -6.202938372907802, -3.564833182189926, 2.874044501828823, 8.685525653278034, -6.81311188053174, 2.764368126088952, -1.051923578466305, -0.9476817080611526, 1.161644143534287, 0.8923143965210864, -1.986614015969402, -4.795152281813358, -4.571219954943216, 1.206804637019736, 1.189134886949103, -3.389219761826265, -1.403036249676163, 0.1159085048840897, 0.8722638450337192, 0.6308893671254719, 4.417698737007038, -3.617011218574619, -1.59415231672244, -0.4094009736543018, -4.615708070111476, -5.105781556006933, 7.165268502604309, -10.27990343205298, -2.449496798762735, 1.740806437896727, -1.676699767804021, 0.4392602027266447, -2.822535542169061, -7.693191637559086, -0.9799315922761813, -5.464041140223222, 1.890629379194414, -2.234640399497272, -0.9456666715392037, 0.997606186316458, -3.110634577331055, 1.532640947608809, -6.76298600927938, 1.926980466393365, -0.3975761625796731, -0.1074936397088645, 1.428032551521818, -0.2307372170405339, 10.50672618228523, -1.043253387292014, 7.351732297170027, 4.654268870350594, -10.97263974298692, 6.22881607660602, 5.315813011291942, 1.976388766185981, -1.590375718577871, -1.91368371633717, -4.647761418706064, -1.035563975020433, 0.1409584860123588, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 75.07192855828178, 12.10308470349448, 73.0310118247737, -9.646003904806241, 4.932961189760345, 1.718589369927463, -15.44305085297956, -19.92076242236019, 1.289853308008594, 19.44625320979492, 4.210588674471425, -2.579319144988436, 2.410104964471558, -3.865247564242535, 0.826851006847084, -3.631426466227551, -2.842039660907502, 2.249433450127177, -3.309261383458769, 0.6757720427689214, 2.35903348697973, 2.568255552039278, -0.321803303996427, -1.108091120089183, -1.412785938851775, 1.785124078160854, 2.573232078948616, -2.076416085393783, -9.312869029610674, -4.310360243055708, 0.07839735762587302, -0.4810908104630088, 0.9952395155647055, -3.886794543907047, 2.525965274526253, 3.540664673781963, -2.704864111340364, -3.864627721141978, 4.457179505634837, -6.813519964253322, -2.007416166791153, -1.607611533013739, 10.38000897763594, -0.3046634977924537, -2.266871922238326, 2.573731821774683, -2.979571190380482, -2.320864784735163, -1.500209869827327, -5.106748863521393, -3.950957911975269, 0.7340374884876252, 1.73889407335216, 2.182641990536762, -6.356098593894809, -5.93055977994974, -0.6040135105288104, 3.029168613692871, -5.042929510576918, 5.833709461846172, 15.29860079283752, -1.235961351707758, 9.34323445595332, 3.785951093156088, -10.34177459105045, 2.911648799971752, 9.281014378066164, -0.4941493880528656, 3.071779513490743, -4.517815603760604, -6.192250219969715, -0.3317883573330157, 1.349445179208848, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -63.45021054279741, -15.82766767521525, 41.22915354775575, 4.664008353213778, -1.654066901130316, -23.18254021974398, -12.84906013980607, -0.9626027911697629, 3.41433891445458, -0.06961595567794145, -1.096971305807928, 0.8698295584855787, 3.772196521299375, 6.631293883425417, -7.052125668571361, -3.200823343555333, 3.814349784726156, 4.941784929229768, 0.6315537523938005, -0.01802357802850369, 3.141620177813325, 3.10250469485239, -4.155135005432694, 1.701739006290255, 1.540814933013846, 0.7387665729315541, -5.311958621040922, -13.02405885670838, -4.942498894115073, 2.149572755504388, 3.341124951208096, 3.870026706159178, -2.46509460954075, 0.4769961681706392, 6.604145321870847, -4.15894453283208, -3.374029939123039, 6.807566765521488, -11.58698328431105, -2.80185415106152, -0.002148524464262543, 7.754192162202266, 0.5225981165001614, 2.195290230986459, -5.408948531637178, 0.1049214015983932, -8.924285492967401, -2.350089393085131, -0.5423255417785425, -3.219609856555158, -4.390973896320624, -1.269742631984241, 4.051212063679165, -10.36139506901704, 0.3230350713187323, 1.674998606949747, 3.540984566900865, 1.18972973091647, 2.057607076615883, 13.40112401714828, -1.748763782836714, 13.13716409027571, -0.2243177932456299, -12.24721484928519, 5.87773220153188, 9.74466077242022, -0.8896869572089892, 2.189477242908632, -5.208603883097037, -8.105641911969411, -0.7300180023457923, 2.022265831725371, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -52.66687486179152, -3.101783038688906, 62.54840137160225, 9.616670950233983, -12.50481143141205, -28.04924352555032, 18.7558503771284, 14.97634693290006, 5.851320122566384, -0.1677270839150571, -2.409084137352026, -9.845991621034058, -3.801336162247693, -0.9907297274053595, -4.664351290089362, 4.309633797495072, 0.6048954669030202, 1.543984136643211, 0.7189536403742964, 5.611415033458477, 2.487672542609962, -8.621934168449286, -10.05094063299714, 8.742063873037001, -3.490157247009134, 2.066460908332703, -4.477586251672978, -5.332933548411357, -5.713234560480118, -8.240059969073409, -3.364164696488402, 6.227061542811854, 0.1791896591361718, -2.507565879175399, 3.698854501954176, -1.007279474702307, -5.481898046206347, -1.72805442027048, 1.655718519783813, -4.101110323704863, 3.320297978932387, -3.684641982534487, -2.130960673414645, 9.904486582652764, -2.376262647862093, 2.148900292669743, 4.759998469058096, -13.36944502872464, -7.978751527858369, 6.863214185815936, -1.882367548219069, -2.561088530514321, 4.873981502571982, -7.579320197588544, -1.497679078782172, 2.35834379567061, -8.385645880902972, 4.09880893561949, 4.828417395745121, 2.282140125429291, -4.833463519506268, 2.913845215609183, -1.612937226837221, -0.5525869262187749, 0.6145753612090259, -1.089608686377456, -0.3258113829684524, -2.210223671018175, -0.8097847014254713, 1.266046990796156, -0.1222971055896021, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -62.97383455884989, -15.72156846847342, -50.52987429444124, -5.821406791941631, -6.271938286848207, -28.08098151092815, 2.368560549147725, -14.39710551246207, 7.569330972477777, -13.4264330861926, 2.75461801976173, 2.688711269115824, 2.764732074458091, 8.408341398358559, -10.04776489918815, -7.396363378028368, -1.149416203511894, -1.054301083542792, -10.04596380901379, -11.69408823775871, 17.38686456253957, 8.299516226227814, -13.25762288717717, 7.349031579684647, -4.534835890890477, 25.89116419268924, -0.01992743860632668, 7.982099277597088, -0.04160883712533649, 2.573520590529096, -11.35149838346194, -4.227617111120538, 0.2168974914804734, -1.795362046493738, 5.631764300285649, 6.008575066010116, 15.72428788595154, -1.842254414189587, -0.5382261085672077, -9.552315485913649, 3.675875209168321, -2.602166724352801, 1.859610651661533, -1.895505270339744, 11.52276237353004, -6.239279748413804, -0.1297742740578715, 0.6991046300254831, -0.4208529689673289, -5.060072193399457, -1.906088726339328, 12.62054139647468, -5.130598829626098, -6.128474594316537, -7.690481592824534, -2.247055264194942, -1.447577869543198, -12.36316455006643, 2.732894276344731, -10.27755222960528, 0.5243468721916534, 9.047719038239586, -4.383971076616716, -4.842518948484556, 1.125600151566935, -1.90225576696169, 4.722031005971605, 5.306544131294808, -1.877228890927751, -0.1571777681548308, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 62.52508007922609, -5.719488295515556, -63.28854566104216, -24.50023292372474, -1.974567606587724, 15.49864632949223, 6.998409457393456, -5.489144318408908, -0.6799511450744802, 0.6168873226031361, 7.888491941073679, -11.5213969968209, -12.02867471563315, 7.763694194348873, 8.334853365418969, -2.380451302943409, 0.09020014258787826, 2.3486772567884, 5.673982771176749, -11.02127409093227, -3.603461078419621, 3.167170753763195, -9.711383410279849, 4.723995107208117, -25.16138896067726, -12.23741325241211, -5.007012002050583, 2.190640446078525, -2.216636720328977, -0.5859489793413336, 8.233471386988535, 3.365237643930874, 4.189743657652521, -1.395157859959696, 0.3920210029284703, -7.056426817934152, 2.580560567814978, -2.128975130545938, 4.242435652432224, 2.235031188314527, 3.444275519482045, -0.7064127681508054, 3.521919688523948, -2.59915922665114, 2.796011561611334, 7.213677229286767, -0.2401477293445469, -9.239236230514949, 6.250101954381679, 6.220350851430839, -13.83625185491088, -1.200037472412707, 6.659714118020979, 11.18894360817651, 1.029186325547049, 3.013890675288255, 11.33035543805201, -0.7441654641054523, 9.147248359723497, -2.626393685016684, -5.110468073117602, -0.06530067048820608, 4.76431701962415, -2.253412642242864, 6.519983245052911, -3.976542599366748, -2.604614469192844, 2.348858737245175, 1.219946438299759, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 71.32602496514716, -8.756876217814053, 35.04911984263231, 11.85596450877738, 3.138232705310344, -2.194824321913857, -2.176583710259183, -3.343478177638568, -4.236563157562049, -1.392674203474749, -10.32262876392589, -11.63217516576671, 6.360303917047404, 5.478160154717675, -1.373379667953706, 0.1222360308529321, 5.92992964189494, 6.968285486207656, -22.24412202832642, -14.08200805003572, 5.255065130840872, -9.647401891006457, 11.00321233823368, -26.63174562751841, -2.523162844598605, -6.145124307805637, 6.022273738346928, -2.926653347515803, 8.133925760802793, 8.591428362280888, -6.013423802818253, 5.249342941582124, -3.970400887670635, -1.748951928295462, -11.28492868937306, 3.683276522805945, 2.775813620317148, -1.942462052184175, 0.8891846150120821, 1.129690076778026, -5.55530433211887, 6.228804252833361, -6.493720150589822, 9.095196357844529, 8.099379043388204, 3.282276517555831, -3.970254131193375, 7.47529332111375, 3.579752526867083, -14.72795586609742, 5.127558963939634, 7.483150275119841, 8.143770403634198, 5.638865762472385, -1.070704874835951, 9.665581065265565, -1.166404516592552, 7.118528213260745, -2.114185083787445, -6.481345817022934, 2.439165766212086, 1.688770678506234, -1.42208740458422, 3.736908500476163, -2.52099822967916, -1.096365312903943, 3.115453100212649, -0.09261316039995293, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -53.91482041296031, -6.195234141533248, -84.22720528023089, -8.228357964869858, -5.335191448255144, 5.056702938346606, -7.464695583224544, 11.90761078244647, 13.09090097966719, 0.6578496842921259, -10.92616341315019, 13.03744853784803, 12.44023007567226, -1.161373459354644, -1.087744561840636, 5.932731979531396, 9.315732460781408, -21.37551613946766, -11.96248596464901, 13.63024808396517, -9.288714959320243, 7.289554080422293, -28.16285634112625, -9.571761912856207, -12.95118012023273, -3.750611197225868, -3.922432347873787, 6.124510964031026, 8.396222723906581, 0.6148965037770432, 5.13355621975811, -5.15138967657882, -8.114413035924995, -14.85090329064167, 3.39729299292902, -2.657124865451653, 11.34942261680844, -2.606547131090628, 0.1623880304796317, 0.3049177006141654, 1.35635227366094, -6.653050626870728, 8.073015458686122, 4.32132583894199, 2.212964809408603, -2.184076048165147, 9.799834020565784, 5.96606727639304, -17.42599122311193, 2.990347710352649, 8.359376262684071, 11.42371296595421, 0.9821035082629358, 3.26846258160433, 16.37981065781434, -2.885803722420936, 10.97059625603291, 1.611945735717845, -7.260237552742894, 0.9012561107562547, 4.615303611480527, -0.2799207674436057, 5.037467804678709, -4.682042101547799, -3.881526357472375, 2.728902770049329, -0.06849225345413945, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 75.12896070864572, -14.55114803420073, -25.88520658421626, 0.3634408911932056, -5.749148081032294, -3.987054468607347, 3.605389714269859, 18.07840198100762, -7.822123425494485, 2.924157083708025, 1.962703760694055, -2.860064629683554, 2.959145172069614, 3.93868226857773, 6.366668810634879, 5.288829466990348, -4.294038560170033, -0.08801626773445168, 5.250702110881997, -0.3152641291658385, 2.199002653107061, -17.67833947760999, -5.045964490505647, 1.211048049454066, 2.421824405506, -4.376451661909966, 6.241973769898392, 4.290964226801218, 5.969309505771088, 0.8555684941579085, -2.217790960392097, -1.35718576281381, -1.746727805108543, 0.5254608653917869, 0.8086186851967726, 5.293064595586645, -0.3599247004146829, 1.959101109902374, 8.947006788222373, -2.378495602576101, 2.723709987744103, -2.329120911932814, -11.05882331472708, -13.0794383091978, -3.452514320213396, -1.512851055243775, 0.1639755205493351, 0.0825479539228451, -15.39065507630122, -3.091131632830409, 5.338329408636449, -8.829569668743607, 8.153591122859282, 9.18321870067153, 3.77411909040066, -0.7259214307321555, -2.707742728847757, -6.070142253582127, 0.8961401710073812, 9.291034725859165, -5.172620787510649, 5.77739330996065, -5.938409073064742, -5.68284811156704, 2.213899420332612, 2.230741101606077, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 40.51083229358659, -6.657401086597119, 46.1651355827067, 3.571126140204381, -16.51018529197995, -18.81764925681962, -3.560003960730173, 1.069143039257071, 7.841419461017695, -7.426825297086943, 3.636227586276667, -2.832840975189855, -4.226091127266264, -10.32558071350056, -5.423718841813621, 7.898975963852462, 9.110182036344748, -4.818572270232188, 7.517064129930646, -3.069225466093795, 13.58082569251052, 1.95336044171603, 5.706764456088048, 5.029792791510202, 3.178869494747695, -5.047323754409704, -2.736438120770565, 1.426807143702085, -1.920567914619038, 3.428205422463726, 5.74585589697081, 5.382153677222462, -0.8865556423886194, 3.914778289385668, -6.604066141662078, 3.391876836068548, 3.169728760425306, -4.677156167446953, -1.155070546015535, 3.061191481464817, -3.13123995748959, 6.494562114793292, 3.376621926659731, -4.587404098548453, -4.277527442893002, 1.474886961462322, 3.323795372963384, 5.401089673065682, 0.3253871348814484, -2.369066423960044, 3.865796037257022, -1.638291040075763, -9.722922494527154, 0.5238103099430581, -1.203361374442151, -3.74503959737966, 4.569457144757759, -0.4965825177245843, -2.765019233250034, 0.2829604726448859, -1.146952017594104, 2.265032786536537, 2.680700571952246, -1.255503266954519, 0.1637658162738065, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -45.71426451969628, 0.3451233465733878, -42.83879591649266, -9.652152512053839, 2.943077049148836, 3.595807131194814, 12.07618358595724, 7.583721776261375, -2.798504129600953, 0.3493125113412846, -0.3060685650112926, -1.969954034839694, -2.226643423419492, 0.2775817976926701, 6.61252070570618, 0.2026442823847165, -1.756653814676166, 1.358336152476414, -2.953213341300402, 5.767247508163317, 4.576452080845395, -5.48265868666933, -5.584394192816982, 5.087476338922126, -3.141525913674985, -6.275099033613296, 5.577549390317852, -4.816802787505703, -2.484144702930381, -5.941817033794925, -10.3264911173429, -5.364557843392888, -7.843223405124702, -0.5283376840825795, -0.9570442251031415, -5.24883614050029, -7.185962189820855, -2.977003550332989, 1.181048111506568, -2.173863203462683, 4.403509726183693, 5.789771709589788, -3.251326838177565, 0.833293737507765, 2.197330074627189, -6.70359339703703, 2.969866449959098, -1.167049442438063, -0.3088855619701391, 2.234922153742612, -0.8743490423992627, 9.838111926343993, -3.681537357431789, 6.247482122998323, 10.24712337154277, -4.61734564949215, -0.06307486749200926, 1.334451074280191, 5.336299550216522, -2.297158597383811, -1.096756988788949, -2.950656621747207, -1.793982883674879, -1.379935304922622, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 58.4305528833414, -10.93080151481084, -73.47222018485503, 1.299575050569214, 18.14449108892977, -6.050719753243367, 0.5840217211940599, 1.960086594084849, -4.368421952540212, 1.788116554787903, 2.164491069701282, 2.358970020887192, 1.093569678019126, 1.989649218755819, -1.28823196621902, -0.5313187250208743, -0.5624844430654856, -1.907667575555907, 6.909040860485212, 2.408127751390964, 0.4215555707913529, -2.780873876588656, 1.85248203975166, 3.135639744664483, -4.380299973926649, -3.921426250428929, 1.495099382458397, 2.622560565964388, 0.2177544811958763, -0.04339424448668048, -0.2214592105458387, -2.06944907992863, 0.1763703512794778, -2.846732426447117, -0.4639421303306322, 1.287390331822631, 0.6533086995560746, -2.425549690664099, 3.088053892492399, -1.130817472618208, 1.485446373139376, 4.380685859773039, -2.57028748480229, -2.127967029034677, 3.41369029366427, 7.568188803198317, 0.496744282104129, -3.964473757500087, 0.0282299427440213, -1.96849998024905, -3.553514188828535, 0.9438074995728989, -3.104500825703638, 1.724858829778236, 1.800147082209048, 0.224333685578572, -3.291601313990126, 1.370577599850704, -3.565441527576292, 1.823418080577437, 1.52484990001091, -0.828729219973382, -1.06107451712335, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 60.4551144650917, -1.112031735605267, 40.52296759779913, -2.279357391039372, 4.563291267943169, 8.295632041224566, -3.939508280061141, 1.942114905022437, 3.077992623697353, 0.6979345603294407, 6.071625856395872, 9.832923870768321, -8.720411873028274, -8.458375209728874, 7.032936679862646, -3.660503707293801, 5.13609182864828, -3.763326595416756, -3.646534951906556, 3.175402986994713, 1.020866860271357, -3.886824646779147, 8.218325748247384, 3.32404792165843, -9.200062655640144, 6.295889066677559, 3.845719933212995, -3.43351185274513, 5.438009428491434, 6.112353772294139, 1.228860655027741, 1.024037136509394, -2.139240304739709, 1.532357918757432, 10.1079544033704, 4.044497472104775, -1.295614976235356, 9.473469973585541, -8.029083475961539, -4.343462366128194, 10.84815682088869, 1.011633882504112, -7.169231098330976, 8.133704565655293, 2.691632282997318, 0.8507816064460101, -3.282652921494739, -4.061244378839357, 0.05112572018852947, -3.547751950024609, 5.30365786223212, -9.690997968552335, -1.734557454096158, 2.931578504521047, -0.5834918591113373, -4.496448398993238, -3.031542637377464, -2.036988448519847, 2.154422493546257, 3.299173741623872, 1.162415691741919, -0.5670591291688311, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -70.06613504205954, -9.835478687386175, 48.52214856712762, 11.48136951108488, 5.892721436902954, -3.703795834979923, 0.8429544617402316, 0.6016790749403919, -1.12542788838987, 2.368548605557095, -0.6822772589146743, -4.287688004185707, -2.594460038219845, 1.811391939430363, -3.268064562811219, 1.460904850276689, -2.864974741945764, -1.034841388002912, 1.745241178328566, 2.486796799228639, -0.2269447058580376, 3.439717265105236, 1.02136554408696, -4.705182655489384, 2.499760827621522, 1.840963673652037, 2.953808675230261, 4.276133519908038, 2.651957942115774, 0.6355241666252862, 0.57194650441768, -0.1440454915229704, 2.242291193237154, 3.579445219558762, 2.752322997747827, -2.639308831991975, 2.4577698710486, -4.752199390601385, -3.863233755634323, 3.10435637509681, -3.028301690338678, -1.02478741743191, 2.754261908266995, 0.03535310362791887, 0.6383262792814937, 0.4368852433821509, -1.380097243200454, 0.281051180629981, -3.521710925389415, 2.247365619297284, -3.14338177048046, -3.302250573466052, 2.012036598405328, 0.1368976717018865, -1.193536497653941, -2.112322112549731, 0.1388066012915721, 0.8364226293226722, 1.307996121331485, 0.8916481210688338, 0.4828704628796113, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -48.35160025190098, -2.541971211171999, -40.68153564873732, 9.207954361446687, -5.556985583068949, -2.844696785788164, 2.796718046185394, 2.464041987072172, 1.893613270308423, 4.028977435880829, 1.580350980861052, -1.926871386712014, 1.936201877177753, 0.03605266371746324, 2.628195057996384, -4.803264934471255, 0.1432706764330213, 0.1079385572562689, 2.678825585163418, -1.064180917349414, -0.5008460410343695, 2.882130267316084, -0.15398049667585, -1.581309397233642, -3.437020133996731, 1.265459202748218, 1.117834225134362, 0.1360954343911673, 0.9375505319888843, 3.243740457337423, 2.092675870695559, 0.04783544042641906, 4.765463295666528, -0.4641437505818906, -0.4785294697426362, 0.9696458103026435, -8.685566793141595, -7.008778621407876, 1.595549810231035, 0.3128462080006037, -1.324425648748344, 1.053200285772633, -9.359526652432077, -1.580936494199254, 3.673447578818254, -5.544913965708159, 2.361010918728093, 4.208995052675768, 0.06177199204784918, -0.7315046900899178, -0.358711190997884, -4.578123417453783, 2.050322078756102, 3.974849259098512, -0.7452561049821164, 1.594734714218798, -1.737806196971752, -2.031916066155333, 0.5613088668818109, 0.7078249129938057, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 51.98712898798419, -10.91279555750407, 16.5116520014062, 7.070082094300194, 7.316626743402702, -4.062538962997873, -0.7405273411971364, -2.403988646588473, 4.735499624487429, -2.288345916874214, 1.710004157792464, 0.7759293763857303, 2.319801872864849, 3.523736483172671, -4.063099150658085, -2.492051000480119, 4.11448548909593, 7.129988646336227, -2.243669991226699, -0.01638383640626986, 5.035870505157345, -0.9263294558811588, 2.097755508212908, 1.321512562673401, 3.748642620543865, 3.497629889280451, 2.535560833332076, 6.70155773507767, -2.222025277278504, 3.745469454508642, 4.860671405997067, 1.247993743903351, 1.366709222748014, 0.15728611894614, 2.359412737808513, 2.228713531327038, -1.291887607463255, -2.502446261947766, 0.8576018641230071, 0.2591128365637439, 0.1281550783740285, 1.457088120497456, 2.804040947211357, 3.04089132770398, 0.5933410236497064, 0.5620801447638994, -3.466284049616893, 1.640115928164911, -1.286700740977786, -6.179791106131169, 1.636998061183449, -0.2159342040060719, -0.3846813123879876, -3.023642425428129, 2.529816319670621, 0.03264733953770085, 1.353325216772121, 0.7761992320974147, 0.8335576498590705, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -25.43514166894322, -8.17729396479627, -9.755663509088453, 19.19784728348034, -4.384048886049747, -7.499153400798601, 4.435169145012178, 2.435444086759126, -9.059597245945259, -5.325657224831133, 5.740599830438042, -2.492050052381312, 4.19796696197511, -4.263587721918892, 1.77146723826956, -0.4935403175135217, 0.8862361772476778, 0.718637930367225, 2.234402910854942, 1.153322927018771, -3.443329198738874, 0.6587212840847486, -0.7054120735100874, -3.07872040612198, -1.371786433350092, 1.271824407901802, -3.947941817835058, -0.8336796717965617, 0.6116519681575391, -3.071696553703523, 4.475454076327929, 0.7609188629051276, 1.92281105267744, 3.40184922477059, -8.810192326908565, -4.590741833410618, 3.734024788110379, -1.509040639411721, -1.529465630170008, 0.9461084686824491, -6.75503144580174, -2.037690055578029, 1.030666583912851, -3.882610283293217, 2.375072374836591, 5.361220748281059, 2.030587685343521, -2.594516437562062, 3.173762351676447, -3.470985476419609, 0.8394945737986235, 1.011387559197422, -0.3451056702610979, -0.5600384586317051, -1.250437490195752, -0.6594046924812333, 1.043095979522655, 0.3317341014295642, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 28.99829564612994, -7.919492306918029, 14.05556909884386, 7.925941651982717, 1.30923833477671, 2.711408876410578, 1.61295413285611, -1.556599660832906, -0.8098128328747748, 0.4820136628666691, 1.242144749021579, 2.59270514147219, 1.070695817241087, 0.3750106994386846, 0.1725150645491444, 1.817115818968294, -0.4814506723964737, -1.161742890285604, 1.472007131837003, -3.436966082846573, 0.6982494240111369, -0.3149935878404753, 0.423370074395463, -0.9574295293157908, 0.2421056567329433, -0.5049850377885065, -4.679003799785368, 1.836812379169509, -0.6413835068300918, 2.492048982685315, 1.392871072424903, 1.054456102018222, 1.562804344198468, -0.5360913571726491, 0.2289458184750688, -1.348818559581913, -0.7324130913590041, 0.2525553513400682, -0.719825146825839, -0.6622690818866591, -0.05716678780881196, 0.03985333634237294, -0.3355682149483769, -0.1341481930486313, -0.2245769325773339, 1.701054496016564, -2.071735516197021, 0.7197321751353539, -0.5990693757732555, 0.7461182301273023, -1.28537860516808, -0.1752635447217054, -0.01940763082982118, -0.1985636316510818, 0.9478745916084782, 0.2042482439571948, -0.4223294809395653, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -26.09223882232648, -12.37682729616652, -21.27874182963862, 4.348357266610032, 4.573400992223148, 5.451216191750214, -3.611392020039598, -0.07921190808374405, -0.8260009529279171, -0.8434890069043338, -0.4552052508830591, -0.4528369276968658, 0.485445048833045, 0.7411599447890793, -0.4977249990635382, 1.190184399027994, 0.5973815546085762, -0.787231230010092, 0.03686032549901305, -0.4890816251224443, 0.5247759592075937, -1.879966027178497, 0.7792555164671459, 1.08723712738975, -1.42032534259863, 0.1721744481327533, 0.1056055009830306, -2.005790677375433, 1.016109801476819, -1.242159849121084, 1.243013696198364, 3.432482241304956, -4.523104088240713, -1.568715574671622, 5.119814672065328, 0.3791289530216432, -2.190682514501041, 2.552754170198622, -0.449368385418214, -0.5848836895616387, -0.9128519962622303, -0.9291921116478306, 0.6344008715643001, 1.688400124637569, 0.3633977049284803, -1.575721206061369, 1.961090047103945, -0.6752069375516031, -0.7855465024767947, -0.2102475861881171, 0.6869577789805948, -1.299232189451566, 0.5683367030798885, 0.3130637568892004, 0.1545378225570001, 0.3790410707685513, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 20.78430083032547, 1.185047939349582, 35.77538180501586, -6.08934280503613, -4.628055061913704, 4.470155638785817, 0.1274755893885796, -4.165150926841093, -2.90227906734488, -0.845111562541671, -1.740136245099567, -0.7946086439652909, 0.165908004480544, -0.6322554553948885, -0.7202811048901517, 0.962291525388159, 0.1867569707618253, 0.7090001464102381, -0.9034980429447981, -0.7997832600818465, 0.1057538227773544, -0.2295775902801903, -0.2167480404571516, -1.707234755317263, 2.041285551224553, -0.7878934898873934, -1.430438536425756, 5.730418488030445, 0.6926510312478421, -4.339109958364433, 6.727134105417382, -5.329326344720556, 0.2090167614184525, 9.514829796739471, 6.99752035864157, -5.726866864790069, 2.796275247427376, 2.891078173635352, 0.5284713674492778, -4.068212458937251, 0.007838785072538627, -1.603613176227203, 2.0145967452931, 0.8759256044597022, -3.80096540098218, 3.215984538447468, -2.552801380633107, 0.554399068571981, -0.3103317501790938, 1.208594964868666, -1.283494125131377, 0.7998967241899251, -0.386450465669298, -1.202893469925367, -1.317846847748221, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -47.41309320362173, -6.902809138581961, 24.68993718840007, -3.13534854977696, 7.054619343395385, 5.577611289513071, -3.877117196513223, -0.705447221487262, -0.8099002262119813, -0.1565635558190763, -1.127616075289359, -0.340714841660406, 0.660945871772091, -1.386755567315061, -0.4584883778560444, 0.9198751662032315, 1.002014450214894, -0.8640902794196041, -0.8777171102898179, 2.14825103785315, 0.989246034616117, -0.007095782785030112, 1.73906114106083, 0.6255059446607256, 0.1889696248858352, 1.03385909422522, 2.226668602394364, 0.1698945168834521, -1.873328492302393, 1.635950747204321, -0.6535224611564427, -0.5212140486619968, 1.003703892471871, 3.059411904595601, -1.892088474084682, 0.9339283828820583, 2.372361621792483, 1.133949816931869, -0.1994699011958669, 0.6535725484050933, -0.9195781603438848, -1.005444925904406, 0.06444426676642491, -0.8098171033672705, -1.318065403781523, 0.229094599479945, -0.1458840598162685, 0.2580570421599906, 0.599654091348747, -0.1055495042899468, 0.8993623666137143, 0.2691782537744015, -0.1099774106379499, 0.2977644683250328, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -24.74429137938651, -3.056148054133861, 19.0056932068058, 16.39870702492764, 4.267138497765569, -7.263096879020432, -2.919011924763906, -0.4167087337896431, -0.8396221408817942, -1.296684951975891, 0.7231090234720337, -0.1375746950653319, 0.4893869484915677, 0.3142640879884956, -0.2220967974170002, -1.109112753429507, 1.837593002510292, 1.479112432032976, -1.096302655894103, -1.964525246159897, -0.3280412024354407, -0.9206324506519383, -3.476018583754276, 0.6640599703563438, 0.3918183916355132, -3.395485381429018, -0.003002004945104059, 3.146782410011682, -5.072333819483568, 5.41257407183969, 0.6517585511003233, -7.269221543477382, -4.056630984171059, 4.210313402070434, -4.078813743290807, -2.25180663054022, -1.292189897325851, 1.427646253569202, -0.02497439324399699, 0.4774218280565992, -0.6503459447009956, 0.2399655638403526, 1.867125503961992, -0.9832308658323502, 0.3873487083709633, 0.05281139747950379, -0.3835529630274041, -1.6917245847278, 1.122860683946538, -1.59095315900844, -0.1293462709264742, 0.744401662451372, 0.550681606700183, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -34.33789589268527, -6.878943247619225, -14.64721261322692, 10.01107177766694, -10.23942535961409, -1.01498039173437, -1.897412320562053, 0.1671533339857628, -0.8712694561978971, -1.083706765312663, -0.8072110203673344, 0.8308152015963188, 0.2147544944441696, -0.5493919968376136, 0.1446575439684376, -0.6832698103437359, -0.3948442858808979, -1.115588657559433, 0.3579803772248178, 1.360983446684671, 1.00664254181845, 0.463405794612902, -0.01621384683534825, -0.8228161891870686, -0.9063503571998639, -1.803563811674677, 1.773734326519576, 0.4302873906114944, -1.04322351848609, -2.826856356023816, -1.324768966730078, -3.650267019006124, 0.4946176831514201, 0.9765254105590534, -2.029801194780261, -0.08501190782368655, 2.178344340984078, -1.920968439027878, 1.468087622744764, 0.7608787750954016, -1.000741195716964, 1.504164739818004, -0.2688678814948808, 0.6600983722848048, -1.220320200367525, 0.2410125647060634, 0.1664703506451447, -0.00850398171402288, 0.147174562643468, -0.02779982278043158, 0.8346719111354661, 1.343056942062318, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 24.86877278577309, -16.79318934098175, 7.458488104737861, 9.49827773740922, -2.037264532584139, 1.252205231895099, 0.5394711870719748, 2.195699736074124, -0.06182472637253196, 0.7020281028590925, 0.3258270446041916, 0.1103891416148433, 0.2807839830050392, 0.8324674121995713, -0.07707017579260933, 0.04774101018547401, -0.6651862970621468, 0.5716949794838074, 0.2786641675385044, 0.6522299722084175, 0.9870590659751258, 0.07525858148281402, 0.2573717074126062, -0.8694521708682318, -1.166811640779638, 1.298799293918699, -2.203520860482713, 0.6442208455279349, -1.644267690640131, -2.625903488982017, -2.302992147951265, 1.257444945761727, -0.08416130603880936, -1.270800555411265, -0.7611165428890112, 1.039732198091014, -1.251481617872658, 0.668700121789956, -0.6911166576485717, -0.6139792747015455, 1.608281400168625, -0.2867634337303782, 0.1637144135551453, -0.00432411765221891, 0.07102947638320466, -1.167072230215902, 1.020402395737285, -0.8250836936353415, -0.6010780760217853, 0.1575436581466232, 0.3661602997463697, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -25.48744110290001, -0.8196103125693783, -28.12739691634226, -14.66875241082419, -0.615739550477418, 0.7968006490105319, 1.893633537382631, -1.654939455947577, -0.6163475289447353, -1.258986228668871, -0.1685854638534228, 0.3466358666389292, -1.426194843008651, -0.2003994306001548, -1.259905297827965, 1.344072840361883, -0.688819755049539, -1.014480774303581, -0.6295918075499517, 0.1776953465799677, -0.2343489103275878, 1.245580834373049, 0.282925806992274, 0.9010886337902754, -1.886506785726124, -1.464163182582124, 1.060688891787593, 0.9766666550692192, -0.7971522349237924, -0.3026477025841756, 0.1569473405096722, -1.562229392252153, -0.5092349552596337, 0.330008450314712, -0.6335053028536132, 0.4247451599698489, 0.0985199006348359, -0.3513889048599413, 0.06524354177060393, -0.1957245081554103, -0.9317593740728739, 0.1952654121598859, -0.2590026012553117, 0.3001134383078498, 1.986414203689662, -1.071273239462749, 1.253278874165287, 0.33617967896085, 0.7395830923202225, 1.549701849275336, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 31.61694956773989, -9.578300913349803, -3.504213982861134, 2.937150834658584, 0.9888020778636044, -1.024980201460239, -0.5118645161888414, -1.146567755798608, -1.444823248641867, 0.119064333528641, -0.4696982483040331, 1.095121262010022, -0.5308991134969057, -0.5197247973700743, 1.05002708888555, 1.818937037971043, -0.464288009797318, 0.6619872266917841, 3.401021092823941, -0.9482749170371739, 2.813315825246367, 1.129476242162393, 0.1981687722797381, -1.019635015614744, -3.307198789311745, -1.432171362179824, -1.184749190478042, -1.044919901810701, -0.6539226010822516, 0.4625979387292016, 0.6826291685753989, -1.87320881210041, -0.02360508954848113, 1.040603087479057, -0.4961300270289009, 1.055913719807241, -2.655638121288787, 0.1835672294990115, -0.2887263743968013, -1.414391072650056, 0.9868620830546826, 0.6374153108068793, 0.1452572712918873, -0.3526782564487305, 0.6972915252755666, -0.01466270301749871, -0.4766248436639112, 0.2560860018610663, 0.5918528531262749, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 16.11268891122238, -18.88517791209816, -7.849457648122431, 0.5186009516658268, 0.9174633102810905, -0.4433385845787631, 0.259117803259069, 0.4812800928303352, 0.3809278637536887, 0.1040308979050729, -0.05696819922547338, 0.04177348257977326, -0.19151417247168, -0.9633478211800127, -0.1242642511041362, 0.3111175057895781, -0.0993555030948718, 0.9200983120151756, -0.283630744431747, -0.3768440931992381, 0.2608693517536536, -0.1701114694365957, 0.1657033422918405, 0.5579125426286559, -1.625886334801126, -0.3807035749043056, 0.9640891719563836, -0.1517544113079037, -0.06175882514010688, 0.4876052179200603, -0.1171970532950455, 0.01944444533954101, 0.4490968232071326, -0.4458741922044034, 0.3254420663099036, 0.5282760372591894, -0.05934694150103174, 0.1060201539053977, 0.8283931026778301, -0.4074091722040743, 0.4784701634625882, -0.02952608481148086, -0.1915663172505298, 0.1218560114391382, -0.3518484009580818, -0.2587618544226619, -0.1011116715064274, -0.4008733499435151, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -10.28612342841808, -23.92112732279337, 5.771795426117762, -0.8030148991959661, 0.04406286929137412, -0.2462851679259779, -0.0790329685264313, 0.5162614125094603, 0.01081766552358586, -0.3771532498914852, 0.2371657873877063, -0.1121483962706096, -0.598902253817861, -1.018195608367853, -0.01053082672842091, -0.3684877801366509, 0.3508826267234865, -0.2390155682927431, -0.1773909049534708, 0.1757923535233533, 0.2788684363679378, -0.521760862098457, 0.3153403198775502, 0.08909424355874358, 0.3382317489455293, 0.2248194206427477, 0.6112323344458919, -0.05266682106538524, -0.857521487871565, 0.09621919673355192, 0.1820135304582492, -0.03556509270831806, 0.05480383694627229, -0.05236763037981038, 0.9935679652239512, -0.01831786321756342, 0.205863157809067, 0.3066879134775455, -0.5897438503738709, 0.1809856006527006, 0.2304158672123705, 0.1357359363236704, 0.01081706136475033, -0.2019643081867054, -0.2674765180786534, 0.1218826125012657, 0.06599894348285046, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 7.508940298192356, -25.92446548490683, 0.8596283612994251, -0.885296785459307, 0.6510912736108546, 0.04489469589179211, 0.1153490121088812, 0.6103383563995985, -0.6314156415588608, 0.01915715564342266, -0.5113720254650198, 0.08321272888636021, 0.2526155635642888, 0.07676680070437664, -0.207069381512334, -0.4238244829828506, 0.1598019632669035, 0.01979690071582769, -0.1131733213057336, 0.1678269137319559, -0.06128460713493483, 0.2950474603493092, -0.05777326473555904, 0.1081544273212701, 0.1350575367841474, -0.1382598662249417, -0.02063619586362458, -0.01748166206219487, 0.1390481463209892, 0.07165704302950053, 0.1349895003361674, 0.06236059106166005, -0.1944785128320574, -0.2787255380331269, 0.01560113047175663, -0.2002320581613433, 0.3115961907926021, 0.1635870368788743, 0.05678294158098499, -0.476018191230865, 0.3068760522389033, -0.3033131104805271, 0.3509349305650783, 0.3398673523876503, -0.02472276597655712, -0.1406202583056918, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -3.938841061583187, -25.33986700948118, 3.895381103905187, -0.4068328885046618, -1.020248987608946, 1.875210227283616, 0.8326849356539051, 0.3197332657604331, 0.8814021905636888, -0.191597405724302, -1.189763075075068, -1.758874335134983, -0.01899353751647506, 0.0276611350365027, 0.5048267537307356, -0.5806174579955142, -0.02276182921702848, 0.4354081702397676, -0.1913814507405756, -0.1404271457144592, 0.4293939511517911, -0.2455037565707741, -0.6287458194591844, -0.4325390080784384, 0.5663792753160686, 0.06496758462505856, -1.013812891552513, -0.6580075377114708, 0.3012766122553553, 0.7826160029411943, -0.2956334704185347, 0.5829842798108729, 1.888181195419026, 0.2434171191535548, 0.4382404847806721, -0.2260461131572888, -1.002274173244901, 0.1511392353702925, 0.959534113889126, -0.7055427039674402, 0.6121511681849879, -1.070201333103883, -0.85450815461259, 0.3517683076365886, 0.2515058817628869, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 4.188440162629536, -18.32569924146616, -9.589804523680115, 0.3710427014950839, 0.3218205714481496, -0.2690828162556131, 0.3778764240350382, -0.3074423748293786, 0.005775081556014241, -0.01195607972614768, 0.4411561531339875, -0.03533727446997911, -0.1118188852301485, 0.5245470238731157, -0.01500837976097037, 0.2330462173829136, 0.3097172521421208, -0.1141404752456203, -0.03010554327463865, -0.3053455023961597, -0.568283713910994, -0.4659469523334061, 0.1389552806909783, -0.2547565634519277, -0.08627456156926228, 0.4559236364597809, -0.3852998773859254, -0.2059868514232366, -0.02375049165040808, -0.3131649641419055, 0.1909454021240402, -0.2322739633649241, 0.008020950818914468, -0.08341805591326434, -0.111253448846017, 0.05259516567515898, -0.001122570051102888, 0.1514471161594721, -0.0404691113211848, -0.008872368197677635, 0.0116933627886483, -0.09874720009960319, -0.03877084404075001, 0.05002466574186254, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -10.03537283004762, -31.0463181244779, 1.779031435921747, 0.5805779942073915, -0.1697490764077751, -0.01914980853094367, -0.2211539569318829, -0.4216165170128795, -0.6480115775079677, -0.4142538428904219, 0.06564004090328103, -0.4941714579440745, 1.034278660942381, -0.2047718684985376, -0.01268161547420685, 0.517525878890536, -0.1043791000267504, -0.3647396058345162, 0.2111603242370048, -0.9972172123676774, -0.5115981269692284, 0.4959144934872069, 0.04212237503456763, -0.0259339732866725, -0.1242882315804042, -0.4810924645313537, -0.02365156513499566, 0.2627756243130801, -0.5274144495229729, 0.2708662778382719, 0.8165087143124619, -0.1013704061972073, 0.2726522755404825, 0.3850625905291744, -0.5191444285929395, 0.1772802840204757, 0.3334115942202089, 0.1315431110247013, -0.02250684048613707, -0.169616886212142, -0.3125742719244352, 0.04507962853556727, 0.01003848565633635, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1.534564874870957, -24.59524017605734, 3.803193767088361, 0.1204388472709394, 0.5761719808045163, -0.1117681261275701, -0.2329370688709992, -0.08102002281001033, 0.1428619986726483, 0.104842791253757, -0.1181730570197248, 0.4589008851142075, -0.1080200483321708, 0.1900878438866454, 0.1999148634974392, -0.1482677353246511, -0.1560253005429775, -0.148698943892873, -0.5439804195128841, -0.4178164555772285, 0.1013788135893234, -0.2170885424796042, -0.02176885122945166, 0.2064156754855712, -0.3206810843761114, -0.04373791424112124, 0.2504839663865664, -0.28090563960889, 0.1385536322071997, -0.08959168523700539, -0.05811267349918002, 0.005219295271455209, -0.0006095068025606334, 0.05188819365042641, 0.05557911001260952, 0.07279423774972828, 0.07138061600398075, -0.03756847423989253, 0.02816926549231814, -0.07480288912090098, -0.006340103202835009, 0.0003644593817090534, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 4.032984611804681, -27.23593121948949, 10.91195300159838, -0.1955025467061237, -0.3108776615407506, -0.3861125933123738, -0.2335216115611025, -0.9691736965590945, -0.2809799173614512, -0.2879486799582545, 0.2288415268907392, -0.06960881368743468, -0.3354356497684607, -0.3414928741679384, -0.060828537807209, -0.301089195863019, 0.200552261417137, 0.1352398329475678, 0.3340834458703997, 0.07377669861905928, 0.2564987850409935, 0.3238811050529972, -0.7225355496632641, 0.2551257047021489, 0.1771742636853869, 0.06594998613853977, 0.07018408521301191, -0.05587922004114503, 0.9700092972137774, -0.2579283194903866, 0.6519487404137869, 0.5508375252467951, -0.6490234945917377, 0.1447953357975773, 0.2164634615540051, 0.3169200204043436, -0.03463118539302083, -0.151318881732286, -0.2345691835471254, -0.01993158819697678, -0.05040377327442637, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 11.4540626253999, -20.77158386493728, 0.8646709228308843, 0.1362724391608822, 0.1719025418456963, -0.2551996740096885, -0.03511714486680503, 0.1843273888070675, 0.1712704953498521, -0.08238211456267275, -0.0278396535204387, 0.09282985085557675, 0.2532178349906294, 0.08197119062309449, 0.0199260300618629, 0.2535665679716856, -0.1152352645199304, -0.1736314115581865, 0.01698268605977707, 0.04658947833988694, -0.1857939567479137, 0.01460595684305768, -0.05830186068372888, 0.03887623804842972, 0.1233901766950485, -0.03958735173141364, 0.07325984287647014, 0.01640653011899173, 0.1584691045459283, -0.2308825423077231, -0.1894450711707396, 0.01097836857891495, 0.04360019357535277, 0.005697416476426774, -0.2056452089585314, 0.04960126421902575, -0.09143466075759629, -0.0152240285154079, 0.07997148646231036, 0.03173856999389955, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1.214993452284615, -24.05350963927314, 2.935180686952885, 0.009367576430711424, 0.02897145608792666, 1.037258110070721, 0.1361330302043889, 0.267403438521004, 0.2041268631368911, 0.009677886298345676, 0.4513281035866327, 0.4879183299083247, -0.04366463541582642, 0.1415327697743105, -0.2267638854965378, -0.4997584611277828, -0.6976848080687116, -0.02518594676737141, -0.2588366599642389, -0.2416744762736605, 0.7221885872238599, -0.5539147131834354, -0.1134910123367641, 0.2737448220857051, -0.2927880039131713, 0.2057867576341399, -0.79800324914354, 0.1932584636177841, -0.5035224041067244, -0.5321735749435307, 0.5437275964446536, -0.1399374304829474, -0.07507775144608267, -0.2918402578666227, 0.09771036738785385, 0.1114599184651533, 0.1302712088256483, 0.0519407976536088, 0.06216196476169271, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2.947682961146469, -25.70508238397117, -11.46177925662307, -0.3724367020684367, -0.0346498317775926, 0.06864375351559394, 0.07554361694203379, -0.367323229127421, 0.09593202864671034, 0.1377534867847666, 0.1184603670917067, -0.02650248054294359, 0.2092301771249943, -0.009852203175540166, 0.1288173411245062, -0.01556723710218602, -0.2725343716448551, -0.04882125871058738, -0.1164191596901248, -0.01306504969938954, -0.09972247608276598, -0.05244353803868229, 0.08508888288435545, 0.01494712152255306, 0.06488803226510469, -0.1140190552259744, 0.1236269753074166, -0.2360969288917616, -0.1066473633353232, 0.1264867082185493, -0.02483266501799899, -0.04114542787579798, -0.1266313954297545, 0.03448373932480522, -0.05195144188753792, 0.03249420081315806, 0.03713923614715957, 0.01317281111328615, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -11.69631299718309, -22.48619259551776, 0.4736607761650089, 0.3364440611411286, 0.001896101447363962, -0.01839142547031177, 0.03683300808928491, 0.04804330923877288, 0.163622475873788, 0.1554000460872887, -0.08113035605009281, 0.0394436822356209, -0.1518190651150113, -0.2391233604051897, -0.1848160862235859, 0.005028918748719988, -0.1738801952717139, -0.1389518165707553, 0.2440059223399533, -0.1912611405349515, -0.1278732887547087, 0.03594552209482393, -0.1239443437214213, 0.07601115720720845, -0.2413170999853679, 0.01830028011033026, -0.1786719142755088, -0.05040746869497416, 0.1855068663687703, -0.01469530719590392, -0.03565818225935036, -0.006393469065442842, -0.02927051217417908, 0.04981431028590561, 0.01896848487153613, -0.02389638792932929, 0.001671298450249839, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.7907072650120487, -24.90261765607406, -3.336934808793186, -0.1100561385510806, 0.03806580362882676, -0.9795899124384002, 0.2113562476336533, 0.1210755170917097, -0.5709907672936722, 0.06148724086826907, 0.4281734290575387, -0.1228023758602741, 1.217889711872614, 0.5772065011935688, -0.8854453190120086, -0.02188476051865783, 0.2939683523215479, -0.3364845598685323, 0.4389370528196214, 0.2175645153244334, 0.1003089157346634, 0.5938129566146761, -0.2639007244911475, -0.4694860419564821, 0.0619677643285282, -0.05717000630200545, -0.3202406910706038, 0.378107742758849, -0.14006136421472, -0.248450630677327, -0.1410998107329354, 0.0735235604182249, 0.006870838292283573, 0.1952510899320096, 0.0282675631387419, -0.003424908838937833, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -3.437730841654131, -33.27385476914327, -6.935734943236623, 0.2090812561111312, 0.3348393450135521, -0.223586923299236, -0.07352664648222718, 0.03785759488211347, -0.1513766386596058, -0.2099960883438794, 0.1017048240174961, 0.04182607962471709, 0.237579420640883, 0.2399067162829091, 0.07523955310772605, 0.05857405848698407, -0.05389473909058243, 0.2636593955046162, 0.1468245947741072, -0.1210583952104068, 0.1524669191535411, -0.214238636532872, -0.03816758506191603, -0.1354966639865573, 0.09316724617238126, -0.01615929313522045, 0.005900244511659499, 0.1042542689707338, 0.02949183866897195, 0.037267365131775, -0.07808591456574898, 0.03857619705636327, -0.07145363560926057, 0.002429628071270416, -0.01721018579426699, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -6.66333148382885, -14.4614219114703, -0.8464300600479908, 0.2514104375223278, -0.09861658442109786, 0.1691348097352446, 0.1896581102869329, -0.08087907706641626, 0.01174408327349146, -0.04072839225976889, -0.21084119330376, -0.2084820021746563, 0.06459499104581255, -0.07220761842989601, -0.08081244480389148, 0.2770967905223476, -0.1130641379249503, 0.01313709061334511, 0.1053117612678122, -0.08548227871695131, 0.04755230373305874, -0.239808392738221, 0.006594669706792055, -0.1160148635147146, -0.1546465218776207, 0.1797178820270592, -0.03514352886331645, -9.157443953328604e-05, -0.08036793482879778, 0.002475861135144963, 0.05097461589006569, 0.01013677531395254, 0.0105449596442005, 0.01367406076786437, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -1.019923386108676, -26.76235000723083, -5.231507339253204, 0.176832541192354, 0.3017249373834541, -0.1388443978394047, 0.2312643317132779, -0.04464652755082743, 0.07808084544420522, 0.4313216233613872, 0.3080530810133845, -0.3090015643058375, 0.2364897664214366, 0.06604367687341839, -0.1876613180395572, 0.3146210580156822, 0.1784726037504162, -0.02271540068963666, 0.2342699465461129, -0.1343387528471265, 0.06625828689355771, -0.003273887803178955, 0.1450943895905023, 0.00620258099433276, -0.09380102559767173, 0.01798478433460103, -0.07862806231355808, 0.05631636830032596, -0.0196969615930718, 0.04436104332404119, 0.05354400105458426, -0.02354762170847734, -0.02514305223947428, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -5.177075859399771, -33.39258164773348, 1.63759177075579, -0.06342533195251419, 0.1411980486906961, -0.1464775848775387, -0.04431760714090424, 0.01154744252601158, 0.1026506579361863, -0.02088412824928649, -0.02902179807556261, 0.01304836720502356, -0.07271697524786641, -0.07325213106164481, 0.03704096648459854, -0.05922855466144863, -0.06262650371609046, -0.03182071956590917, 0.009080904737058308, 0.1354599956171088, -0.012606916831903, 0.03239503119872632, 0.1409587222872537, -0.06002807561785677, 0.04627926839441229, 0.02573135330017627, 0.004605365395089219, -0.08725168654474108, 0.002587419133345108, -0.05185185645936881, -0.005849547735873801, -0.009771554197080978, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2.064718113513358, -13.12988850942804, -3.438372519392976, 0.1264586180050162, 0.004883996372652178, -0.04481988503927746, 0.09576485030352094, -0.3955235936343703, -0.2087500394183668, 0.3073328906227044, 0.02068102374422703, -0.1083588810681304, 0.1758168533371591, -0.1756277739685251, -0.07450374022378869, -0.01630094419723318, -0.1914647508380139, 0.08895278495397248, 0.1403814278690517, -0.03489253346297209, 0.01598763641739303, 0.1131830343890914, -0.06699742805657724, 0.00240472634967058, 0.05514166821797546, 0.03884124270426484, -0.01101385248527006, 0.01954817931909242, -0.03459346077345329, -0.01274466093412686, -0.003543141293631806, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -3.395368558938009, -27.6115795496259, 6.02328924849129, -0.0217949039676702, 0.01713458866911107, 0.1204807990971129, -0.1903985203838578, -0.245494995765684, -0.01567899867737631, -0.0483287391034937, -0.060209365295053, -0.00658859715813279, -0.1748730515278048, -0.1172896416076897, 0.002885900683208647, -0.2725962934388483, 0.1896424602622011, 0.2547718270544993, 0.08930762271234099, -0.05000517893940885, 0.198774168883364, -0.1836169107571341, -0.001441577306058042, 0.02027315191018264, 0.07922510191796643, -0.01081020778347853, -0.05485129878921055, -0.003504265936056936, -0.01741627502898858, -0.004300648475577595, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 5.881439123068173, -31.86267091719867, 0.3170578648639695, 0.4687236230510464, 0.02662683343011025, -0.3302193472780357, -0.477346463679984, -0.0517454341931882, -0.3336060762108314, 0.09422553730446168, 0.04946779745885752, -0.5432213903814268, -0.2492264416620409, 0.1732279550240386, -0.09454901437534985, 0.243084678749923, 0.184313316080481, 0.110429054715389, -0.03650964419849936, 0.09749969511293206, -0.1326505036247279, 0.05009598527464214, 0.1082240240233409, -0.1489278834552438, -0.08096188906466491, -0.0850388793794887, -0.1125032485965546, 0.05122382376743441, 0.05527189635692931, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.7431797386269411, -14.65117500009382, 5.823447882655948, -0.1881157144876831, -0.3008729186775366, -0.2240680575846533, 0.02294923203244657, -0.1328373063914675, 0.01835629292837719, 0.1359000574454446, -0.3469510665265196, -0.1308216630102339, -0.03496464106937285, -0.2440104038689523, 0.04193512042581821, -0.1366336497635793, 0.03231520148337453, -0.1084198723292215, -0.06739647043502352, 0.08740658296680984, -0.05956248732431352, 0.05155443563206404, -0.06543491661245913, 0.06950710262417095, -0.02430554263388206, 0.003834848628751442, 0.03034298318482847, 0.03421651369935733, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 5.826424710849555, -27.75807925717017, 6.49845414271845, 0.2088222638768245, 0.2567583755247717, 0.1680006630260083, 0.07113722086014379, -0.03228298686790457, -0.1031612528617338, 0.1843360031254731, 0.07332196070209686, -0.01774216015656859, 0.2986364342557484, -0.00134569499401013, 0.05460048981943994, -0.02322575505419388, 0.08264206029436941, 0.08014218496669946, 0.02559186811441928, 0.06346651700418222, -0.0449661723444516, 0.02381879758386176, -0.08682020285221281, 0.08025959489260269, -0.01003292155179478, -0.02340991530415569, -0.02962714897774685, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 6.435188878061354, -30.23792586523732, 0.4023394954316882, 0.1442634156119251, -0.1020481640743369, 0.2616580251348765, -0.1059180761079543, -0.08743876827521066, 0.2785769450080205, 0.04428252801918474, 0.0202832410441884, 0.04189636473520755, -0.1103209617823084, 0.08801265177191683, -0.06151324254000396, 0.01355502488640757, 0.1011883046619023, 0.01940829963761287, -0.02376145032104872, -0.04253776187455549, 0.1015931898787959, 0.004439366426774084, 0.02720943797022718, 0.03068092663135336, -0.02169503454248668, -0.04815156165797598, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.23475181130263, -25.85002095399512, -9.61072734631551, -0.2587337302523749, -0.2671090327042298, -0.1373193461266922, -0.01686645319925775, 0.1214524395626546, 0.04787321250564932, -0.0102808929635468, -0.1079687161735285, -0.1204158425469933, -0.01370170730210608, 0.03896661385808254, 0.02006408734658718, -0.01957557336743487, 0.02048657847215082, 0.03186441907330201, 0.01437600500970395, -0.07256365762710008, -0.04268726613146586, -0.004291692030780559, -0.02959742701650543, 0.009784658017416398, -0.001361411062570107, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -9.668539275335888, -23.18029209894821, -7.104549227457261, 0.009494835170991586, -0.2155484926554607, -0.05117708716304314, 0.09558257063666828, -0.006204864175755139, -0.0535983098279376, -0.1721078109347795, -0.1292907195966384, 0.04463480683436428, 0.03962458796875007, -0.03394918991811541, 0.0360179426993984, -0.05098248960386708, 0.05421613324558673, 0.006517343947994034, -0.02849867886559621, -0.07880371848866945, 0.02642259523254391, -0.03587610810807455, -0.0153740561986479, -0.01461543749433446, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -7.000678311567365, -23.41161932249744, 0.1115405160634775, -0.07148023127921439, -0.03829631039398255, 0.4350529946111771, 0.1273341721442011, -0.03309653396004558, 0.07269741094613814, -0.2517627759593927, -0.0916540014643315, -0.09513082346238068, 0.02936404485630485, -0.03887824155586548, 0.03259923092993516, 0.01480224718798229, -0.06089794884744876, 0.04533354466816, -0.04401696277253729, 0.08384757422274537, 0.03447473565697338, -0.03655777066458333, -0.04286881746219928, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -0.1997455187845329, -23.67906930857953, 3.298493240502194, -0.1847467985210671, 0.1243176781235989, -0.02243375998252476, -0.0004203531178173071, -0.1373119846655697, -0.07119941044543882, 0.0598855239376691, 0.0365497555952217, 0.09940083973287311, 0.02170561702085787, 0.02523145542278863, 0.01398860677045751, -0.02099249665794002, -0.08042834487614063, -0.04514933257707622, 0.0210691994836113, -0.01304565123088667, 0.003317809080060446, 0.01880126864882835, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3.363427418619814, -17.41903576009931, 9.186568349711468, 0.1111029283919205, 0.08777631782411636, -0.0106122045071789, 0.1144225997334068, -0.02466841586746403, 0.02750112055321526, -0.05524437334870362, 0.04541263818821106, 0.04741842472464215, 0.03068743352629459, -0.03255656589538247, -0.03729968912772544, 0.01062121564506963, -0.008575892547858845, 0.02301960098527405, 0.01221098646015711, -0.007828736372639752, -0.02174734678713348, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 9.23220561660408, -31.51893707701785, 0.629104414809966, -0.002885901152494686, -0.04654352249775812, 0.05655136143073162, 0.04644981489440897, 0.04886269814398012, 0.01421146833364892, -0.02220699538623673, 0.05311321315234985, -0.001358424574760307, 0.00136891734406154, -0.01810167863750104, 0.05846985549778336, 0.02464847599577873, 5.045357884432963e-05, 0.02313650001543124, -0.01897592900555872, -0.01000566869781843, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.6213531321316156, -24.30034243436851, -1.105212323117883, 0.1225585085553164, -0.2970472162797997, -0.1131121492749726, 0.005524192917051245, -0.005016083344128715, -0.05408343623542385, -0.02819561803866161, 0.009006334912992457, -0.02024545484485562, 0.03816393987230592, -0.0007047877611201743, 0.04901678014439114, -0.05163410574806857, -0.03130055103817821, 0.03533492988517593, 0.01195601678346641, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -1.097158496205909, -16.73710946124244, -9.426989245602673, -0.06261182983602319, -0.1059035148370443, 0.05076346286898199, 0.003678172466163399, -0.0454660155043527, 0.02462054291284395, -0.02110890517636086, 0.008241203910568698, 0.02390589384329424, 0.01198364473687612, -0.02957542631061049, 0.002236629456648171, -0.02809381563903465, 0.001689229093064753, -0.007600764285513168, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -9.412003466484951, -31.65984264675678, -0.1254812244514544, -0.09114636575550933, -0.0366463804259801, -0.03322040198025862, -0.01209970867041294, -0.01021849930537397, 0.02256075215944923, 0.01825223707293363, 0.003993042619294536, -0.008507286595193135, -0.02427249385479496, 0.03381794787948238, -0.02914842372731508, 0.007082873675038417, -0.006655560097220788, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -0.27548187849782, -16.01246939790233, 5.644600469890476, -0.04244915968443123, -0.0371436610514518, 0.0530212677895391, 0.008626194479801664, -0.02358451461610269, 0.007412437106157236, -0.04628615223361372, 0.0826909370927158, -0.01312239177378535, 0.05019812567992653, 0.05643602731883488, -0.03169977188973144, -0.02409525389114796, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 5.831745814038495, -20.41725582289098, -1.583013759260222, -0.02098081085371714, -0.03289988933600374, -0.006011309758863401, 0.03459650459981792, 0.01348477813223562, -0.02336540423557105, -0.003969872715414671, -0.05998169351462899, 0.05263821645435009, -0.0144354245014559, -0.005674777729486418, -0.03142277004383006, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -1.562800616132969, -36.13433050173559, 0.2585654510438173, -0.02913169721040965, 0.002592481347938085, 0.004329834978588065, 0.06325089767860387, 0.01951445115945309, -0.03070674283592272, -0.02183384192678349, 0.009849968434859515, -0.02891816499635688, 0.002667591177598971, -0.02361460878909238, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.2286035973327459, -15.02261004686781, 5.184122379409034, -0.0264079721355016, -0.007786216137000334, -0.008663526410932022, -0.005309693290799501, -0.006680538291880966, 0.001397532516833339, -0.01528554327177548, -0.01182046150296639, 0.005955666147884051, 0.003523919303238692, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 5.214570242697843, -21.4621144179193, -1.555874989179182, -0.001447535808513384, 0.03124103143611023, 0.01560612912433155, 0.01254222023040845, -0.0006565581414721336, -0.005837445535672976, 0.003328905310192099, -0.003109407401498421, -0.001359993487917328, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -1.575215080762542, -36.12196181704586, -0.009581763987488548, 0.02181190823948241, 0.004249343259232833, 0.008866640947567769, -0.008827428692463307, 0.0111903702615688, -0.008259743446844802, 0.005994103918828306, 0.02884522108807095, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -0.02332478841374811, -20.31760288431249, 5.902175782312199, 0.01411291075117598, -0.0401626029282366, -0.04333034577745157, 0.006563645334078613, -0.02686027801335236, 1.82653938945055e-05, -0.009451944888619017, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 5.843446424852287, -17.22694391469142, 4.615021811975704, 0.03880112444568462, 0.01156878860697344, -0.02011735633875036, 0.007254465185871957, 0.006976302646061595, 0.01061407563995378, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 4.636349195088934, -35.03142250661902, 0.03540637402307944, 0.006380325215592348, -0.01715112535803765, -0.005381045082520479, -0.02796704437114206, -0.01837431744975316, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.03325953074641371, -17.29072432858197, 5.995094557622266, -0.004356649575832386, 0.01324648360020382, -0.002576747161993423, -0.0164552533090047, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 5.976772821535572, -18.99788030681546, 0.01396461545715264, -0.007601183590626498, 0.005853929901760483, 0.0201199907208218, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.002983340164191267, -20.0342372815637, -5.741448414658583, -0.01215113512395332, -0.00662723431795657, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -5.752040939724589, -16.25515722920224, -0.00713497387655803, -0.004958024634924642, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.0004916632598946193, -12.09805746213948, 0.000198676020167099, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.0001765653829615133, -12.09823117436413, + }, + evWant: []complex128{ + -0.2513649571095319 + 26.5196181977274i, + -0.2513649571095319 - 26.5196181977274i, + -0.3323427794627154 + 14.12147023570828i, + -0.3323427794627154 - 14.12147023570828i, + -0.3885760957072044 + 34.01549575140551i, + -0.3885760957072044 - 34.01549575140551i, + -0.4938394681672298 + 19.28755724237434i, + -0.4938394681672298 - 19.28755724237434i, + -0.5354723308245726 + 44.07811563049843i, + -0.5354723308245726 - 44.07811563049843i, + -0.8831210193505878 + 40.64617136580856i, + -0.8831210193505878 - 40.64617136580856i, + -1.063691744877652 + 54.90113796777709i, + -1.063691744877652 - 54.90113796777709i, + -1.270999250073418 + 55.92169849615225i, + -1.270999250073418 - 55.92169849615225i, + -1.641962795944021 + 124.0727533257146i, + -1.641962795944021 - 124.0727533257146i, + -1.866212165366329 + 67.51198073484791i, + -1.866212165366329 - 67.51198073484791i, + -2.66562659457705 + 102.4818444744388i, + -2.66562659457705 - 102.4818444744388i, + -2.677285588970379 + 109.3976583973073i, + -2.677285588970379 - 109.3976583973073i, + -3.592328373007462 + 116.2450441105874i, + -3.592328373007462 - 116.2450441105874i, + -3.658587455178502 + 105.6223525143237i, + -3.658587455178502 - 105.6223525143237i, + -3.738335632746128 + 95.41374995650723i, + -3.738335632746128 - 95.41374995650723i, + -4.05659124602542 + 85.08673687146815i, + -4.05659124602542 - 85.08673687146815i, + -11.7967401639067 + 0.03421683561959875i, + -11.7967401639067 - 0.03421683561959875i, + -12.06591356324058 + 0.03010079020722811i, + -12.06591356324058 - 0.03010079020722811i, + -12.08354870692898, + -12.09435136773809 + 0.0007639916648241464i, + -12.09435136773809 - 0.0007639916648241464i, + -12.09785209014901 + 0.02991559300476182i, + -12.09785209014901 - 0.02991559300476182i, + -12.09806212746318, + -12.09850417485291, + -12.09864274565634, + -12.10160612637007 + 0.004700099158908263i, + -12.10160612637007 - 0.004700099158908263i, + -12.16873692768957 + 0.02446545858852495i, + -12.16873692768957 - 0.02446545858852495i, + -12.23495190373921 + 0.1453184367031505i, + -12.23495190373921 - 0.1453184367031505i, + -13.54778407074696 + 141.1839768257274i, + -13.54778407074696 - 141.1839768257274i, + -15.38790969472481 + 114.0584849752244i, + -15.38790969472481 - 114.0584849752244i, + -23.60369882806601 + 0.2464158068361128i, + -23.60369882806601 - 0.2464158068361128i, + -24.04314341198735 + 0.3315626206724418i, + -24.04314341198735 - 0.3315626206724418i, + -24.12753399862331 + 0.1634093642552133i, + -24.12753399862331 - 0.1634093642552133i, + -24.18103712080912, + -24.18908889661277, + -24.19357072143492, + -24.19567681157636, + -24.20660003986661, + -24.228086198612 + 0.0220672004906673i, + -24.228086198612 - 0.0220672004906673i, + -24.30677145425284 + 0.1813999700804981i, + -24.30677145425284 - 0.1813999700804981i, + -25.23862373866253 + 0.8733279005402655i, + -25.23862373866253 - 0.8733279005402655i, + -29.42427841595277 + 1.666886570068163i, + -29.42427841595277 - 1.666886570068163i, + -33.94959271558832, + -35.51958918286584, + -36.20666045708104 + 0.1660921702962803i, + -36.20666045708104 - 0.1660921702962803i, + -36.25710351845593 + 0.01052251476620707i, + -36.25710351845593 - 0.01052251476620707i, + -36.28714605125749, + -36.29431583135849, + -36.29711199023137, + -36.31304183793699, + -36.32232768169418, + -36.37871120972082 + 0.1557647672076243i, + -36.37871120972082 - 0.1557647672076243i, + -36.47694233727385 + 0.3738707259908102i, + -36.47694233727385 - 0.3738707259908102i, + -36.66483183150206, + -37.75937189360096, + }, + }, + { + // TUB100 matrix from MatrixMarket, balanced and factorized into + // upper Hessenberg form in Octave, and embedded into a 104×104 + // diagonal matrix to test with ilo != 0 and ihi != n-1. + // Eigenvalues computed by eig function in Octave. + // Dhseqr considers this matrix big (n > 75). + n: 104, + ilo: 2, + ihi: 101, + tol: 1e-10, + h: []float64{ + 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 2, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, -1063.761, -960.3970563202379, -0.0006079108185996773, -0.009201940691316367, 0.002442836240270196, 0.01550988534551903, -0.005482455008401117, -0.02216870959395947, 0.009747347615349792, 0.02941831540681939, -0.01530136178573213, -0.03750794345694437, 0.02225359194453896, 0.046705617938318, -0.0307619752542162, -0.05730825585675074, 0.04103856105936231, 0.06965250072575992, -0.05335624663776811, -0.08412633500439758, 0.06805680196063389, 0.1011811513254484, -0.08555957035531082, -0.1213434441393789, 0.106369291926541, 0.1452243676184516, -0.1310809449650698, 0.1735240650736495, 0.1603769717276725, 0.2070245946752049, -0.195009056622052, 0.2465616197175694, 0.2357520481176992, 0.2929587463757468, -0.2833105069304027, 0.3469005841220431, 0.3381503031304328, 0.4087132445918312, -0.4002236126054999, 0.4780213717840138, 0.4685650013761221, 0.5532750093152425, -0.5407842250274101, 0.6312148133425258, 0.6125866029330725, 0.7064838330293224, -0.6776224584955323, 0.7717672671130079, 0.728093737707456, 0.4477228711606641, -1.832801684441419, 1.950252997191289, 0.804390452586547, 0.4832557718849113, 0.3060066166102118, 0.2756451553030591, 0.1719798268352208, 0.1844690914416081, 0.1028522851629666, 0.1338501876768783, 0.05922545829023376, 0.1041210893998114, 0.0294781768570502, 0.08738772703908808, 0.008665658061761334, 0.07957418017688388, -0.005841802748350547, 0.07832978350913038, -0.01561691241238414, 0.08229737118429636, -0.02164114160477788, 0.09076446627208183, -0.02452178964005379, -0.1034535010713481, 0.02461324557496748, 0.1203839900069163, -0.02209524508102444, 0.1417838483570761, -0.01702626635186736, 0.1680394493064578, 0.009377435518891378, -0.1996795871970759, -0.0009558646643977288, 0.2373933824702814, -0.01416583764558031, 0.2820880046959302, 0.03057782300299389, -0.3349961435755344, 0.05073634798692103, -0.3978562481735693, -0.0755607412701291, -0.4732063781751494, 0.1066126139010085, -0.5648764073871733, 0.1465759215609417, 0.67885880956596, 0.2001993093624898, -0.8249735752515708, 0.2762967421832379, -7.383366696690373, 0, 0, + 0, 0, -504.7000323579025, -960.7839130908893, 455.6939154064305, -0.0005890239049151091, 0.01222453307167895, 0.002380254112046573, -0.0183767018575125, -0.005347164384529943, 0.02487432959226112, 0.009507444863507727, -0.03195485414878263, -0.0149219770912947, 0.03986517031337426, 0.02169664773526838, -0.04887103316029169, -0.02998576954964119, 0.05926702273427436, 0.03999696952048423, -0.07138712464416012, -0.05199744338214077, 0.08561585141999975, 0.06632116378894419, -0.1023995009673016, -0.08337613594660015, 0.1222563158317329, 0.1036500341236005, -0.145783656379474, 0.1277114126111218, 0.173658096672846, 0.1562008437363875, -0.2066214365478053, 0.1898032066448404, 0.2454413266872829, 0.2291867817273585, -0.290828708487696, 0.2748879884412016, 0.343286767980235, 0.327114242278849, -0.4028620578438859, 0.3854382883926989, 0.4687761898935385, 0.4483798648521518, -0.5389586517254393, 0.5129393074648648, 0.6095951621601606, 0.5742741750224843, -0.6749584326212456, 0.6258664262549096, 0.7279030750048989, 0.361160520701244, -1.817820120247178, 1.493677993805868, 0.9093218408940651, 0.3437577565805193, 0.362630116371123, 0.182491216989941, 0.221714417427429, 0.1064242506044702, 0.1554403491948274, 0.05893517713417926, 0.1191987572531298, 0.02593630450949156, 0.09940153443435777, 0.001987949044964486, 0.0900967012871452, -0.01567758628187121, 0.08812023104053984, -0.02871634215129631, 0.09168600701701864, -0.03819855166921124, 0.09983277639781361, -0.04483241285443643, 0.112135778661831, 0.04908165568011769, -0.1285288605462619, -0.05123906116349945, 0.1491912175066926, -0.05147890107583802, 0.1744806002258908, -0.04989507359069757, -0.2049040909139878, 0.04652399206028981, -0.2411228487302952, -0.04134974220480257, -0.2839940475275046, -0.03428683940013912, 0.3346573639346669, 0.02513376678028306, 0.3946840836316033, 0.01348469011538719, -0.466320753537143, -0.00142779450121434, 0.5528931223683594, -0.0210105793668851, 0.6595061978534259, 0.04783831497380739, 0.7943401758885881, -0.08674091416897045, 7.282263793289921, 1.964402124200457, 0, 0, + 0, 0, 0, 504.7001065567226, -960.8071533894852, -455.6899665187256, -0.0006381542544069418, -0.01696123848664222, 0.002571421839873352, 0.02356372357492877, -0.005765057328779014, -0.03054576659236009, 0.01023675166561691, 0.03816637411322649, -0.01605159633648196, -0.04669530536153424, 0.02332395960802052, 0.05642332124514469, -0.03222102273610906, -0.06767293164234266, 0.04296780496196387, 0.08080966049331523, -0.05585344634069236, -0.09625362412257953, 0.07123822293842015, 0.1144906642960898, -0.08955981060276022, -0.1360815254382584, 0.1113369911164654, -0.1616662801387938, -0.1371665625346616, -0.1919585350680096, 0.1677064944291291, -0.2277205865868329, -0.2036342514104135, -0.2697050947227386, 0.2455629056115962, -0.3185418058062836, -0.2938904117765113, -0.3745411973588626, 0.3485540671441012, -0.4373871729832214, -0.4086704182323145, -0.5057124011628437, 0.4720841567744535, -0.5766169045256523, -0.5349437472430429, -0.6453152755169759, 0.5915742979267851, -0.705252589538002, -0.6350313138183948, -0.4095510871717013, 1.601644767644169, -1.801430257336091, -0.6783540858316911, -0.4521876448242478, -0.254377814677483, -0.2609137525907964, -0.1390187634586346, -0.1780644458672355, -0.07811098364662258, -0.1332228921784263, -0.03844450844393983, -0.1080065082071247, -0.01031758532230231, -0.09498853030853376, 0.01036100507490249, -0.09032395086444481, 0.02576859065961766, -0.09180684083159034, 0.03723131440198095, -0.09818648544931335, 0.04561209756712017, -0.1088355401035232, 0.05148345180798951, 0.1235465165704314, -0.05522144380747635, -0.1423972092039827, 0.05706613305793608, -0.1656652164809836, 0.0571654881872712, -0.1937812736972878, -0.05560491141385822, 0.2273160612316538, -0.05242306351060358, -0.2670005088314752, -0.04760687919206397, -0.3137829156332985, 0.04106438412981419, 0.3689347453356511, 0.03256097086263596, 0.4342239659065471, -0.02160937448771497, 0.5121963228132351, 0.007270440886606282, 0.6066421962514198, -0.01221319300610655, -0.7234154012137514, -0.04013586770219741, 7.505393492740379, 2.207569919375577, -2.286495948142137, 0, 0, + 0, 0, 0, 0, -504.7004946877645, -960.8313859426444, 455.6868630884425, -0.0004564668894400728, 0.01953047220881932, 0.002110185308335535, -0.02552492771185354, -0.004910008054476974, 0.03185603230051105, 0.008867203935794236, -0.03876493903116766, -0.0140372793617222, 0.04650258449956125, 0.02052143233325077, -0.05533916645715601, -0.02846939294478852, 0.06557394899483331, 0.03808355296447862, -0.07754536439486578, -0.04962400720722794, 0.09164110934556451, 0.06341371783532462, -0.1083072609262454, -0.07984241792495486, 0.1280548395158984, -0.09936692302911229, -0.1514604665622404, -0.1225031219103813, 0.1791553196119899, -0.1498024404391789, -0.2117929820588376, -0.181800940883612, 0.2499814001970585, -0.2189235487704594, -0.2941578702525302, -0.2613208334101781, 0.3443825148816139, -0.3086165449914435, -0.4000320253108334, -0.3595628891688978, 0.4594100673703709, -0.4116578643958037, -0.519368340488319, -0.4608838085460648, 0.5751583010135386, -0.5018561564823436, -0.6208288964715706, -0.2890646136834312, 1.547039886853192, -1.173755403489903, -0.7985207315322997, -0.2625825083083085, -0.321674668942854, -0.1352159819750655, -0.1999732725283456, -0.07366734029347159, -0.1440615953055945, -0.0338692953389332, -0.1147504606082463, -0.005029712242208511, -0.1000175433858401, 0.01695421717090311, -0.09456233266164106, 0.03415869939464086, -0.09556433616678824, 0.04784697925863496, -0.101446550453596, 0.05887259823103022, -0.1113844111571908, 0.06784319765312669, -0.1250437127185277, -0.07520036620749675, 0.1424161499432856, 0.08126890147440424, -0.1637146453439622, 0.08629375258215478, -0.189312804562382, 0.09046906112313131, 0.2197194565251569, -0.09395994411067805, 0.2555854707181692, 0.09691288686658349, 0.2977431307518454, 0.09945154948260918, -0.3472856237053809, -0.1016514559138057, -0.4057014604005721, -0.1034872918659082, 0.475092530300298, -0.1047384672449806, -0.5585328441816623, -0.1048045105961936, -0.6606855376279177, 0.1023541847811403, -7.645111917964669, -2.444190547643892, 3.259474730743878, -1.113400990501543, 0, 0, + 0, 0, 0, 0, 0, 504.7005483880675, -960.8563426765877, -455.6810385506663, -0.0006211890719464708, -0.02717449361052982, 0.002624545711740859, 0.03409357098285602, -0.005942159106883158, -0.04143715413018739, 0.01059135423887447, 0.04948858776950704, -0.0166413313054562, -0.05854418379382578, 0.0242148610679564, 0.06892421088657538, -0.03349146754772453, -0.0809841818387797, 0.04471193925388908, 0.09512623948475284, -0.05818359115254906, -0.1118100512572915, 0.07428496523273895, 0.1315618312735884, -0.09346828354041023, 0.1549790152282817, 0.1162558426750438, 0.1827256545244234, -0.1432240256832661, 0.2155105496963288, 0.1749650364632709, 0.254035038834301, -0.212010638405861, 0.2988909563929731, 0.2546958843850197, 0.3503831721602568, -0.3029377534844611, 0.4082512041088586, 0.3559113648605111, 0.471283605276577, -0.411645725224325, 0.5368790186329719, 0.4666463388955495, 0.6007202940611962, -0.5157903324539884, 0.6568675698020393, 0.5528415071271306, 0.3819646735423662, -1.398313345247025, 1.702130138980118, 0.5594893184855516, 0.4345731607149444, 0.2047273101100171, 0.2544540474780775, 0.1063741954770327, 0.1778805766776483, 0.05255231548952451, 0.137900569051342, 0.01589144268877343, 0.1168704015342846, -0.01147530402843068, 0.1075993573348887, -0.03281602394973369, 0.1063490908368691, -0.04987312975903418, 0.1109852985003045, -0.06374396058252166, 0.1203212709282059, -0.07519208505179932, 0.1337915946504525, -0.08477768059074928, -0.1512472309045833, 0.09292245189830053, 0.1728188948893017, -0.09995295179709891, 0.1988310587279109, -0.1061338258071349, 0.2297560668874317, 0.1116956612897333, -0.2662028365209533, 0.1168534044651449, 0.3089378582466848, 0.1218148638037771, 0.3589424141807773, -0.1267685950073432, -0.417514018459675, -0.1318530493084198, -0.4864325194506035, 0.1370935903164596, -0.5682256141066142, -0.1422822434012505, -0.666604844323565, -0.1467513256218947, 7.839832839255147, -2.591992660256728, 3.08252939024389, -1.472235565518307, 1.200611269528463, 0, 0, + 0, 0, 0, 0, 0, 0, -504.7015918274936, -960.8824172487933, 455.6786628322481, -0.0001386040108547678, 0.02825899575001721, 0.00148000990419925, -0.03396816329492456, -0.003913391729580158, 0.03999719012865367, 0.007442680878789934, -0.04659090322133554, -0.0121162968680667, 0.05400401596298283, 0.01802786748313866, -0.06251037884950617, -0.02531818062112227, 0.07241227284910066, 0.03417801432480141, -0.0840495440103118, -0.04485119411434383, 0.09780784101480898, 0.05763670862206309, -0.1141246897988619, 0.07288798740446953, 0.1334907230535002, 0.09100549877450563, -0.1564413552584702, 0.1124168465460749, 0.1835312438634551, 0.1375347555217266, -0.2152794341013849, 0.1666789233629258, 0.2520679325404323, 0.1999435400148238, -0.2939734797154126, 0.2369931492869313, 0.3405172515888476, 0.2767850762901711, -0.390345045347925, 0.3172634202969561, 0.4409132592457857, 0.3551552329611432, -0.4883580204443008, 0.3861049626417496, 0.5278024954606876, 0.221653617736939, -1.310709133012781, 0.8704550092422116, 0.7079030635461785, 0.1843303003648327, 0.289142189894577, 0.0890031466634482, 0.183755046796533, 0.04093279509465775, 0.1369845466667406, 0.008027106857080303, 0.1140671851302532, -0.01733485053642101, 0.104231064513174, -0.03796088669959809, 0.1026821086919593, -0.05525696194664861, 0.1068620954451136, -0.07011029660696497, 0.115354075966287, -0.08317464933480107, 0.1274397517866654, -0.09497453954477812, 0.1428557674294213, 0.1059485191998991, -0.1616373931580192, -0.116473125434708, 0.184018627644066, -0.1268840153469689, 0.2103748991036934, -0.1374978332735491, -0.2412002758106563, 0.1486321821340762, -0.2771135831612571, -0.1606234825346388, -0.318894700017657, -0.173836022567292, 0.3675540114045308, 0.1886613027997724, 0.4244484685316806, 0.2055082541208365, -0.4914687718702396, 0.224768184994539, 0.571342364466508, 0.246749172743278, 7.942734281408671, -2.790996821413193, -3.416416824440133, 1.502698673759963, -2.74025012280437, 0.4374074524376139, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 504.7014268866499, -960.9091609078547, -455.6702009059838, -0.0004715858695197065, -0.03982840046737102, 0.00242224023844466, 0.04693804869973257, -0.005712413463266942, -0.05453060690126599, 0.01036142963375495, 0.06291484964449161, -0.0164440331698878, -0.07241520054585564, 0.02409153161862073, 0.08338313108173702, -0.03349426180113407, -0.09620848027332857, 0.04490493324477595, 0.1113301550654768, -0.0586415214773373, -0.1292450786759426, 0.07508828443695986, -0.150513154673022, -0.09469143119899404, -0.1757538991751067, 0.1179438613939318, -0.2056275754842778, -0.1453500537954308, -0.2407890904812533, 0.1773573065390127, -0.2817971004678554, -0.2142337181273538, -0.3289552292807281, 0.255870859960439, -0.3820621733814443, -0.3014962025572454, -0.4400643963200938, 0.34931588173307, -0.5006585522123436, -0.3961850233060456, -0.5599910362173445, 0.4375268333475068, -0.612726531008857, -0.4678122672345172, -0.3569120201451431, 1.18831598199679, -1.617583791352987, -0.4321697836882517, -0.4219085060089673, -0.1510420456331972, -0.25146678538168, -0.07059891849129137, -0.1807609333405698, -0.0240329511553917, -0.1456531311906457, 0.009769195529775603, -0.1290384618111746, 0.03669700570302369, -0.1238682627899927, 0.05912761689067447, -0.1264561589974369, 0.07833483431516959, -0.1347055607275304, 0.0951733797931347, -0.1474724563406544, 0.1103088335611947, -0.1642393157936116, 0.1242996284536155, 0.1849045503710836, -0.1376315559768252, -0.2096404850603622, 0.150739321415534, -0.2388041370692126, 0.1640310219820809, -0.2728910081209138, -0.1779123181778702, 0.3125232856726328, -0.1928113592711771, -0.3584705110456833, -0.2091969136508766, -0.4117030466307595, 0.2275912605111247, 0.4734858653437208, 0.2485748286117511, 0.5455291692123926, -0.272779071272836, 0.6302263498990262, 0.3008643534306304, 8.20707478604357, 2.932590880343228, 3.566301858755951, 1.591737361864661, -2.080954844450352, 1.063336818518452, -0.312693340165919, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, -504.7034311398602, -960.9369925632403, 455.6692107057009, 0.0004356100354586218, 0.03860199096161918, 0.0003618838493967148, -0.04386380370370079, -0.002169347414784144, 0.04942525667989209, 0.004982545942080307, -0.05553497828831112, -0.008840601544025953, 0.0624511939178053, 0.01382568492204521, -0.07045040500021758, -0.0200635944222586, 0.07983573655308961, 0.02772464186081651, -0.09094444742633617, -0.03702388344715066, 0.1041536259789221, -0.04821924316984402, -0.1198819820124656, -0.06160445551722481, 0.1385840425022496, -0.07749220106038934, -0.1607306169402915, -0.09617998733441638, 0.1867658703825194, -0.1178877608692904, -0.2170271396260865, -0.1426531229291118, 0.2516110942742074, -0.1701709400072701, -0.2901735669785757, -0.1995766655123539, 0.33167198231666, -0.2292104536632618, -0.3741087211620817, -0.2564669531327498, 0.4144141160994378, -0.2779196790709351, -0.4486695658607398, -0.1585148155348832, 1.108326036371431, -0.5816638331406274, -0.6376460733569355, -0.1083863705122823, -0.2651532591988918, -0.04346268036180279, -0.1731791135747381, -0.007919955521890586, -0.1343419559206488, 0.01884997316493552, -0.1172998435413015, 0.04139686964273688, -0.1122109969614702, 0.0612644512371045, -0.1146390321312077, 0.07920415164364861, -0.1222036115318655, 0.09574062739795554, -0.1335955000566879, 0.111342587990221, -0.1481684801491999, 0.1264688165440626, -0.1657058794392267, -0.1415730034357649, 0.1862665137934408, 0.1571022793772566, -0.2100850895889454, 0.1735027646062021, -0.2375159098214737, 0.1912338107828908, 0.2690093363603886, -0.2107891668533592, 0.305116325619871, 0.232719244011093, 0.3465174314368482, 0.2576565915212616, -0.3940792105482042, -0.2863453006006712, -0.4489464059320039, -0.3196699442525448, 0.5126872859904976, -0.3587010976918096, -8.28272768639917, -3.069060360130142, 3.927691039870548, 1.671176660764354, 2.407455234884137, -0.8625377998571393, 2.482297646744782, 0.1292734700574192, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 504.7028824420881, -960.9653940857651, -455.6573959835257, -0.0001045342927213557, -0.05510952876884354, 0.001813256542621175, 0.06227481799574789, -0.004858797016936023, -0.07000242917077827, 0.009264808465575517, 0.07862878594870333, -0.01511232203008351, -0.08850890488940624, 0.02254001315406766, 0.1000277135969499, -0.03174536747972971, -0.1136106715000355, 0.04298556623718725, 0.1297326922178386, -0.05657684359155325, 0.1489234556474868, 0.07288927434017092, 0.1717652930940824, -0.09233189928394896, 0.198877283105147, 0.1153205669594968, 0.2308750852817715, -0.1422162239454809, 0.2682908269428334, 0.1732165788221063, 0.3114322670825245, -0.2081820549922315, 0.3601601689508011, 0.2463834229879688, 0.4135774742746823, -0.2861906631817055, 0.4696707079443848, 0.3247907144321164, 0.5250325498506966, -0.3581320852927125, 0.5749043514816392, 0.3813740651756898, 0.33559864255759, -0.975267052557106, 1.553286395488471, 0.2975654526534304, 0.4156488593431335, 0.09371170961137179, 0.2528219529681863, 0.0318602025876451, 0.1873286487076531, -0.007406320889202832, 0.1569749836318507, -0.03858434309919215, 0.144934740453739, -0.06545817090963257, 0.1441815831728718, -0.08945270328381889, 0.1510120365349771, -0.1113416651259841, 0.1633234601502849, -0.1317257394447766, 0.1799857228617523, -0.1511744615193794, 0.2005086220062386, -0.170257066128721, -0.2248204324462682, 0.1895400302751218, 0.2531160063974417, -0.2095887459424748, 0.2857621049048731, -0.2309786288335811, 0.3232475487327383, 0.2543197035673892, -0.3661698670568465, 0.2802839275652471, 0.4152519125737978, 0.3096388181156818, 0.4713879968706516, -0.3432857433502883, -0.5357238487236324, -0.3823039298241136, -0.6097820756813976, 0.4280142297040914, -8.593418202744859, -3.211955544343986, 4.01550610066904, -1.769542946482932, -2.385993943945727, -0.9351886806923488, 1.364050528322037, -0.9080040622263671, -0.4464199288967212, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -504.7061722826078, -960.9950980825289, 455.6584934644936, 0.001353924213319257, 0.05069804235078336, -0.001403478172758728, -0.05531669887499667, 0.0005546746219488723, 0.0602176601279568, 0.001178736969878009, -0.06565366943648464, -0.003823298510146261, 0.07188614605884883, 0.007444087396122985, -0.07919288756514294, -0.0121433251462564, 0.08787498194637582, 0.01805863608944551, -0.09826218797222715, 0.02535989650826906, 0.1107152897665785, 0.0342421634090001, -0.1256225901932514, 0.04491137284146533, 0.1433858203414038, 0.05755711615621574, -0.1643879418622154, 0.07230425516591216, 0.1889319980546444, 0.08913300093328734, -0.2171379771443038, 0.107757891711349, 0.2487872536298442, 0.1274663214539597, -0.2831201330658301, 0.1469465463422727, 0.3186294299533511, 0.1641875797212851, -0.3529538909796769, 0.1765977012156841, 0.3830228392206152, 0.09923300750079445, -0.9387659102980379, 0.305398543764229, 0.5874526002501284, 0.03421711551743981, 0.2496160794636069, -0.00172104942964322, 0.1682097394146368, -0.02558725561576955, 0.1361268344344931, -0.04691707458719761, 0.1244564737090883, -0.06726669000070251, 0.1239690575086415, -0.08693811269512493, 0.1304369530627798, -0.1060389333362548, 0.141571663489697, -0.1247414524221453, 0.1561163886904096, -0.1433423840955047, 0.1734586694197238, -0.1622500485292275, 0.1933984716847243, 0.1819498382764446, -0.2159900192240643, -0.2029763815613684, 0.2414387822339389, -0.2259034630808185, 0.2700417931160715, -0.2513516107580024, -0.3021617326012637, 0.280007600114253, -0.3382255019637882, -0.3126552129313027, -0.3787429608129044, -0.3502182255490416, 0.4243439334152545, 0.3938138954989248, 0.4758372286628779, 0.4448387377462819, -8.646476556307604, 3.287657641880755, -4.395262558384979, 1.792030252359884, -2.73372973571881, -0.9241141660969929, -1.712422595404809, 0.4359874306747898, -2.432174107259886, -0.6397052114818167, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 504.7050978056665, -961.0251353455429, -455.6426293119019, 0.0005789714464947707, -0.07319751886662666, 0.0006207879061930474, 0.0802799735867732, -0.003129094635399184, -0.08803596071851463, 0.006975460706357075, 0.09683290373786249, -0.01224653915288706, -0.1070596332625383, 0.01908491954449565, 0.1191369742779648, -0.02768776308106317, -0.1335267149602703, 0.03830435886062355, -0.1507372943186485, -0.05122967843956672, -0.1713229356970986, 0.06678971375544743, -0.1958705963395601, -0.08531182846472353, -0.2249654793739225, 0.107069570838405, -0.2591211079527574, -0.1321872027422623, -0.2986553751625171, 0.160487669363109, -0.3434934131000018, -0.191273923827569, -0.3928906485362413, 0.2230621626353611, -0.4451099722739455, -0.2533460049820092, -0.4971642549076689, 0.2785681040388742, -0.5448315365001141, -0.2945449187820606, -0.3188523354151554, 0.7617168830174238, -1.512960882421318, -0.1565418889317566, -0.416770638188017, -0.0330371554958069, -0.2590939561072727, 0.009692898702806391, -0.1979815715767451, 0.04169344337350171, -0.1721638220045798, 0.07052530713197516, -0.1647923498418741, 0.09775169543831891, -0.1687258056803347, 0.1237881291548195, -0.1801639057873346, 0.1488794308490554, -0.1969463086568264, 0.1733613896964222, -0.2179209500175104, 0.1977104526182695, -0.242597178003689, 0.2225166824607487, 0.2709069147142353, -0.2484447312925564, -0.303039084913382, 0.2762089619529779, -0.3393348213625563, 0.3065757332901756, -0.3802323858876161, -0.3403810643090954, 0.4262480705852916, -0.3785664996638002, -0.4779857272391611, -0.4222285054438267, -0.5361702328195965, 0.4726795168378314, 0.6017042384742251, 0.5315431534297506, 8.988404257252103, -3.434234591407497, -4.436704087040918, 1.904958600463788, -2.667252960757383, 1.019315404694702, 1.553413773220462, 0.5240360258904643, -0.8521059937283484, 0.9531203463970385, 1.133055091391562, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -504.7100346863414, -961.0567364446957, 455.646613143601, 0.002722295284934282, 0.06461681973477371, -0.004007947943993217, -0.0683635324772939, 0.004536694220093283, 0.07238640000970466, -0.00433430930478059, -0.07694262150014541, 0.003392025878594218, 0.08229618144787393, -0.001670278858418917, -0.0887242115986121, -0.0008975016974408089, 0.09652212032887546, -0.004400201670914741, -0.1060064440564509, -0.008941741499294128, 0.1175134358385135, -0.01462879396175798, -0.1313898772830384, -0.02154950721841662, 0.1479705007616418, -0.02973757948750487, -0.1675338272440166, -0.03911447566452358, 0.1902263373250469, -0.0494038851796016, -0.2159464070642364, -0.06002011198536771, 0.24419037399957, -0.06995441855483667, -0.2738896861740276, -0.0777218007125729, 0.3033115380943844, -0.08147776924470845, -0.3301302888852998, -0.04342138832490353, 0.800108427299287, -0.04000732313755277, -0.5563775145367175, 0.03855669146460623, -0.2421457122014047, 0.04673467675853111, -0.1685947713495393, 0.05966925766215405, -0.1421356546813504, 0.07617060670611064, -0.1353431117620796, 0.09486340695112068, -0.1392947724748802, 0.1148236878882152, -0.1498272473624081, 0.1355229389653643, -0.1646584030588876, 0.1567892967260354, -0.1825265390991063, 0.1787580887629875, -0.2028097674378218, 0.2017993595211208, -0.2252868325806165, -0.2264439082886397, 0.2499694264649327, 0.2533278498840449, -0.2769901080244833, 0.2831665936584349, -0.3065367197788879, 0.31675352352134, 0.3388193022269569, -0.354979335807952, 0.3740594600432878, 0.3988728455325729, 0.412493048200239, 0.4496557166350698, -0.454379410749675, -0.5088325604398763, -9.020420590072227, -3.450565579378631, -4.828910565671182, -1.874110745373426, 3.031455262150587, -0.9554280057985601, 1.918824600147808, 0.4370410894016331, 1.245159522031377, -0.1566307455549422, 2.552719012229264, 1.137055157228564, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 504.7083084832581, -961.088403701372, -455.6259949644748, 0.001693462288155773, -0.09425609907144253, -0.00135812958186467, 0.1011210298209119, -0.0002358251806337989, -0.1088168511442754, 0.003123702470935177, 0.117746189310862, -0.007394509226016558, -0.1283352037931964, 0.01318721808082886, 0.1410426164823464, -0.02068614552863905, 0.1563657899240353, 0.03011316521908753, 0.1748410515523016, -0.04171287552624901, 0.1970333700401475, 0.05572504926140202, 0.2235071819563995, -0.07233534673786346, 0.2547659637536581, 0.09159167502337025, 0.2911438853529135, -0.1132728046470395, 0.3326320892944551, 0.1367013692972142, 0.3786326012788764, -0.1605193483772494, 0.4276675386247564, 0.1824972953091496, 0.4771381978993297, -0.199534230207109, 0.5233118023773972, 0.2080647751939719, 0.3071312523515405, -0.549473955438997, 1.498520087318462, 0.00989782058163087, 0.4257392836989572, -0.03066419348029967, 0.27053141082994, -0.05385429393158369, 0.2128586455021746, -0.07865838364579553, 0.1912804624944068, -0.1054218514397032, 0.1886052588272439, -0.1333817841205637, 0.1974301330414755, -0.1618922650232225, 0.2137720478898956, -0.1906429188221663, 0.2353555123291881, -0.2196953036046989, 0.2609640871623732, -0.2494334138871651, 0.2900702342300163, -0.2804781709204113, -0.3225735541543351, 0.3136051709871521, 0.3586152379559984, -0.3496963324210602, 0.3984605855948943, -0.3897237334354889, 0.442433754373803, 0.4347674252373997, -0.4908899809534614, 0.48605633051341, 0.5442128827277137, 0.5450303291374311, 0.6028273664098085, -0.6134359722665157, -9.383238747186924, -3.600916739919184, 4.832694315608912, 2.003470413821713, 2.931212798765991, -1.076233974356122, 1.727556840630514, -0.5584158153153711, -0.9677000168663793, -0.2850220760337975, 0.4848137226350174, -1.162124180800043, -1.793676268616248, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -504.7153141286621, -961.1217498954065, 455.6338248564796, 0.004666236513345801, 0.08033835876549655, -0.007676437270062325, -0.08295141844108442, 0.01009977011228604, 0.0858558666115623, -0.01197879976722524, -0.08931330796488612, 0.01333079539699938, 0.09358896098647876, -0.01415383176294858, -0.09895648285762505, -0.0144330727549111, 0.1057012528087135, -0.01414858625643324, -0.114120817737291, -0.01328610322696056, 0.1245201008433383, -0.01185312169985896, -0.1371974259500803, -0.009903932278386084, 0.1524155095853953, -0.007577483035196663, -0.170349858019879, -0.005151094173442019, 0.1910076219888127, -0.003106911843602696, -0.2141162385930753, -0.00219223603220216, 0.2389982098664602, -0.003428653267689329, -0.2644761408819428, -0.007993135284190185, 0.2888757583999775, -0.009238357725273051, -0.689494763421366, -0.2155878440195048, 0.5426652421405607, -0.1100760809768825, 0.2419872852187903, -0.0915757211308498, 0.1738068042043262, -0.09421490159299305, 0.1519131253021238, -0.1063902295366557, 0.1495062851706462, -0.1238496488930524, 0.1576921868225562, -0.144457445676418, 0.1722377145797942, -0.1670559652778887, 0.1907853469363053, -0.1911367180744256, 0.2120098595295308, -0.2166813899045869, 0.2352309624985876, -0.2440312262325445, 0.2601594028118362, 0.2737719046620724, -0.2867127897537385, -0.3066513544997664, 0.3148910096033804, -0.3435341329051073, 0.3446994182012239, -0.3853912741508331, -0.3761050372799959, 0.4333221867749439, -0.4090113357290798, -0.4885956544839816, -0.4432370036767068, -0.5527297958804152, 9.393725843325772, 3.559142669539414, -5.233438534663443, 1.922140772052916, 3.30916008332548, 0.9639922588619462, -2.108426983838054, 0.4210596150978028, -1.37488546401417, -0.126172291118237, -0.9417910946035264, -0.02489482226941209, -2.820413837240554, -1.65719959391188, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 504.712822627705, -961.1549732256356, -455.6077127096619, 0.003370363513768094, -0.1184233948149237, -0.004352589134933815, 0.1249488727742578, 0.004142598150015885, -0.1325266147065525, -0.002702309633903203, 0.1415989061506627, -5.193242257853952e-05, -0.1526322638582052, 0.004242516041704322, -0.1661238420831387, -0.01002108162905441, -0.1826028877145596, 0.01755321339895132, -0.2026229299653129, -0.02699257791731359, -0.2267375503327169, 0.03843678368101805, -0.2554486834745714, -0.05185463742576753, -0.2891125143344822, 0.06697380838544749, -0.3277869557881715, -0.08312340296797521, -0.3710132247776353, 0.09904931306822057, -0.4175532297672665, -0.1127668418736266, -0.4651618147564519, 0.121590215262521, -0.510544244533257, -0.1225288992181756, -0.3005345609258526, 0.3399230830669571, -1.510041866246306, 0.1414029629131923, -0.4424813889986434, 0.0969566625439009, -0.287030472441386, 0.1002877273344002, -0.2318097264729576, 0.1179696537132148, -0.2141130560573038, 0.1428989401173152, -0.216085496212976, 0.1718969363526398, -0.2299160646037506, 0.2032113426057577, -0.2513516264549205, 0.2359542056548815, -0.2779404049836417, 0.269903604489502, -0.3083522861668259, 0.3053525231567537, -0.3419775604673035, 0.3429586234253998, 0.3786342689363086, -0.3836208986559502, -0.4183589706424067, 0.4283986271914125, -0.4612706968478629, 0.4784824630112978, -0.5074923782141396, -0.5352066248608507, 0.5571092548388775, -0.6000923998546913, -0.6101471292691906, -0.6749305319888952, -9.770051112346868, 3.712532157622387, -5.204107151313059, 2.068330691047585, -3.18125125479366, -1.111446716858055, -1.892155190902516, 0.5769109527951105, -1.074175671579627, 0.2964457689798568, 0.5539281293589068, 0.1648327863802309, -0.2172096485697859, 1.512241614146546, 2.467635656276092, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -504.7224116592663, -961.1902571610938, 455.6205915183505, 0.007332560323131098, 0.09772650675515225, -0.01266681647409895, -0.09890946144936448, 0.01761061930967164, 0.1004319663138764, -0.02223151529100917, -0.1025589734040736, 0.02658527800478494, 0.1055543905435332, 0.0307240137177325, -0.1096846360720932, 0.03470480005138991, 0.1152206928700177, 0.03859926962869588, -0.1224372229859417, 0.04250540794695284, 0.1316063042861069, 0.04656276217394649, -0.1429819416890049, 0.05097254854312608, 0.156770043109164, 0.05602256159687267, -0.1730781884191307, 0.06211263471333318, 0.1918417495221531, 0.06976604123079083, -0.212731221653107, 0.07959687794489242, 0.2350594264866488, 0.09218568189114314, -0.2577207095948243, 0.05895216380708893, 0.6030344606072978, 0.4616425346359402, -0.5436208097610066, 0.1801615878388285, -0.2479542818639472, 0.1359942495801694, -0.1829951211861444, 0.1288696776689487, -0.1647070482516714, 0.1370851711017615, -0.166186009742888, 0.1535729004651733, -0.1783286433187104, 0.175004017949945, -0.1967160001753221, 0.1996016370756137, -0.2188405755084354, 0.2265274150255044, -0.2432545072656078, 0.255616995167631, -0.2691644778819984, 0.287188623645841, -0.296154558573445, -0.3218894031796202, 0.3239813218519342, 0.3605820289961011, -0.3524309991867358, 0.404281451485633, -0.3812277505186181, 0.4541373257413515, 0.4099740685595717, -0.5114462459203447, 0.4381037210394499, 0.5777085075240355, 9.757187794715053, 3.613965379149533, 5.610766740550909, -1.938835199033022, 3.571436149045066, -0.9542168243229318, -2.288013661621963, -0.3938480967566447, 1.496351683252224, -0.08808345484880223, 1.023046330373567, -0.06957848752644458, 0.7559950874575174, 0.1446377341673701, 3.223159785518482, 2.231109599751307, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 504.7190540960186, -961.2250192748934, -455.588180522695, 0.005760119451667827, -0.1458018459472323, -0.008617871967446157, 0.1518886251707371, 0.01036045836723483, -0.1593339434673855, -0.01095512117368798, 0.168623432319206, 0.01033715383235535, 0.1802652315855236, -0.008421308021689847, 0.194792393053006, 0.005117854913200912, 0.2127573865763628, -0.0003573018673901766, 0.2347123846839292, -0.005870115341541783, 0.2611655306170398, 0.0134514921104915, 0.2924997329898752, -0.02205646148738518, 0.3288391697120372, 0.03100724814840945, 0.3698554289066493, -0.0391314172858762, 0.4145293741812473, 0.04465629171074397, 0.4609333830920124, -0.04526915247447261, 0.5061591284697164, 0.03850943070266388, 0.2988211434046964, -0.1343140034874617, 1.545791848932921, -0.2960335360025335, 0.4663736910206594, -0.1651993031618597, 0.3081206807361616, -0.148466528783414, 0.2543776892222589, -0.1590822972569348, 0.2401547949000193, -0.1823264347853362, 0.2466363598796981, -0.2125374666877397, 0.2654664586167893, -0.2468230014420065, 0.2920378799634734, -0.28369945780944, 0.3236595023368128, -0.3226554917179421, 0.3588325700155615, -0.3638932634463122, 0.3968084947760234, -0.4081139430426112, -0.4372603761207967, 0.4563448325195634, 0.4800432615153861, -0.5098306130089106, 0.5250360251830816, -0.5699872792909377, 0.5720443097383854, 0.6384042472680606, -0.620739093955173, 0.7168958955657478, 10.1416026925784, 3.769624166608563, -5.550197612007544, -2.101714625470462, 3.418765229089471, -1.128530918815057, 2.05030202920546, 0.5843666730352316, 1.176141611311618, -0.2998770983489928, 0.6178281648963292, -0.1687738124565776, -0.2572515070648175, -0.1256951327451982, 0.01616907141314794, -1.992214070149926, -3.189662381847132, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -504.7318794445075, -961.2622111936237, 455.6076615218215, 0.01089221138384335, 0.1164944818989575, -0.01927027124317406, -0.1159105114196266, 0.02747804138380453, 0.1157590237274542, -0.03562101111504409, -0.1163053571198556, -0.04381054622555893, 0.11780798689624, -0.05217348627038508, -0.1205218765117039, -0.06086108892227842, 0.1247010612781868, -0.07005691264492792, -0.1305992812196758, -0.07998336769309033, 0.1384666763445831, -0.09090568207321333, -0.148539099266325, -0.103130632313689, 0.1610155426830076, -0.1169945067947107, -0.1760175642549749, -0.1328292092990493, 0.1935251760227961, -0.1508900371219247, -0.2132847544560474, -0.1712230826203191, 0.2346889242553419, -0.1057755931109957, -0.5357642487146973, -0.6973878233988416, 0.5555297188556486, -0.2482482426823123, 0.2583893820691848, -0.1794481217099651, 0.1949556011152848, -0.1629964039187765, 0.1794404990383949, -0.167454966242039, 0.1842879058626768, -0.1830230404376604, 0.2000060175644817, -0.205209645685749, 0.2218995693363221, -0.2316338998877495, 0.2472470804664445, -0.2611383861136601, 0.2744206707423583, -0.2934188634283356, 0.3024520161486941, -0.3287766158418658, 0.330727168992488, 0.3679203274465594, -0.3587549762608285, -0.4118259797084023, 0.3860024176740223, -0.4616568968449378, 0.4117833750068759, -0.5187284681093876, -0.4351768010753159, 0.5845252467597436, -10.10272037837622, -3.615759296992346, 5.961002309639331, -1.925982279272278, -3.820398085376841, 0.9288290974303622, -2.461282861258473, 0.3588964134966156, 1.614524108256882, 0.04643312460190491, -1.101929869918408, -0.1156934037864506, -0.8069532966049089, 0.1939414294536783, -0.6549974120119686, -0.2285123742746999, -3.758505654153285, -2.887002774786242, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 504.7275737129024, -961.2986900862034, -455.5680475291849, 0.00903586301318594, -0.1764481378529507, -0.01443633323006236, 0.1820293926317921, 0.01880447073763833, -0.1893830763324559, -0.02212739922183875, -0.1990394755651686, 0.02438113653662378, -0.2115459961113778, -0.02554785138683405, -0.2274636911241468, 0.02563963642219436, -0.2473516518290995, -0.02473404186599173, -0.271730510328668, 0.02302808689174882, -0.3010128812352733, -0.02091691897707183, -0.3353869095894007, 0.01909834439431709, -0.3746441757811209, -0.01868481784601551, -0.4179627386782013, 0.02126895992019769, -0.4636967305855159, -0.02883173464380446, -0.5092745443586736, 0.04334833925615389, -0.3014407655914791, -0.06601281923988526, -1.602324544766366, 0.4521688717502506, -0.4962602535568542, 0.2344899085974771, -0.3329721059649142, 0.1976354384687227, -0.2798012120081235, 0.201207847611257, -0.2686044483634154, 0.2227918565877333, -0.2793525517309335, 0.254208949035389, -0.3030265762539144, 0.2914075927648672, -0.3345889425740216, 0.3323002062548046, -0.3710472412454519, 0.376082825113517, -0.410672419727643, 0.4228703215020564, -0.4525090987053563, 0.4734101217025521, 0.4960038585709059, -0.5288630443220137, -0.5407310407243179, 0.5906642373893218, -0.5862073308755801, 0.6604544266659836, -0.631770882164199, -0.7400800113225439, 10.4912498465414, -3.773373232627433, 5.869292321428817, -2.105388476297262, 3.643747899009217, 1.129947070197685, -2.20333116034871, 0.5839586073720042, -1.276059903111896, -0.299240532723705, -0.6800685739125477, 0.1698588504724025, -0.2942327379600606, 0.1304150900865758, 0.03800351903695634, 0.1417197104665032, 0.1425294782604167, 2.600515797914067, 3.992077820801224, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -504.7444899137037, -961.3378869284877, 455.5961766575729, 0.01554496586389417, 0.1361607262454429, -0.02781289414814694, -0.1334206115553933, 0.04015195437573545, 0.1312610252962056, 0.0527238328643537, -0.1299420806015547, 0.06571735393480846, 0.1297113267134137, 0.07935679284872428, -0.1308093221619015, 0.09390646392500906, 0.1334765342836597, 0.1096692934744826, -0.137961097981063, 0.1269759901739711, 0.1445257840878497, 0.1461596796124059, -0.1534505818065084, 0.1675090650339144, 0.1650223724407157, 0.1911922811662067, -0.1794965959880791, 0.2171469054347325, 0.1970054498924576, 0.2449372580636083, -0.2173831194355537, 0.1495893184212565, 0.4816785595258409, 0.9209202815588715, -0.5736517097497067, 0.313353604698171, -0.2711605588560934, 0.221082155663423, -0.2081281229339226, 0.195658000237407, -0.1947111978907778, 0.1963739070246782, -0.2023853087811128, 0.2108169700402979, -0.2211665513822386, 0.2333844452691181, -0.2460259436412557, 0.2611196517524419, -0.2739795084733933, 0.2925613936013013, -0.3031638285603235, 0.3272759615201772, -0.3323671520526524, 0.3655494198939995, -0.3606901700963588, -0.4081518881755232, 0.3872850919084128, 0.4561704112264385, -0.411165351471291, 0.5109046669029912, -0.4310695720851319, 0.5738286008172455, 10.42318783741847, -3.566018330179493, -6.283039963892671, 1.885086978417805, -4.056461148196111, 0.8896096623583637, 2.629879524658232, -0.3183134586090979, 1.732009409551348, -0.003597002249112677, -1.181752365540581, 0.1607426656367353, 0.8594318003331071, 0.2409887971627989, 0.6872601018528763, -0.2776927275225095, 0.6162945944580821, 0.2949229569475111, 4.432330830999809, 3.652258417704127, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 504.7391854477888, -961.3761134831929, -455.5483160115629, 0.01339975427289111, -0.2103617083669475, -0.02212129334677451, 0.2154107935057459, 0.02989586468758025, 0.2227762054914026, -0.03675410570225757, 0.233029559420389, 0.04274488714190522, 0.2467489261215965, -0.04795860482498126, 0.2645069025107166, 0.05255917574259697, 0.2868406962417143, -0.05682989248602897, 0.3141931378633969, 0.06123725611583161, 0.3468116005115335, -0.06651167350925496, 0.3845953195758418, 0.07372631291715646, 0.4268969633578305, -0.084324035113474, 0.472317693749994, 0.09999329127000312, 0.5185766012306021, -0.122267111761034, 0.3075794130567557, 0.2594922311530679, 1.674677840688869, -0.6074411217165189, 0.530504621157434, -0.3036489093269464, 0.3604292259290603, -0.2468040165898022, 0.3070437102038547, -0.2433124059830945, 0.2983965063767964, -0.2631034380950553, 0.3130548980457453, -0.2954872738506904, 0.3412458946213262, -0.3352576648480358, 0.3774381296472452, -0.3797257621045876, 0.4182777341843855, -0.4278008133993835, 0.4617385983648974, -0.4795135491180799, 0.5065767007844517, -0.5356633184142464, -0.5519108635118929, 0.5975519447496171, 0.5969093166328796, -0.6668064787959627, 0.6405721802139768, -0.7452989581305071, 10.81295073151522, 3.725997667527157, 6.159022350342218, 2.081113170768998, -3.855102702242159, 1.117501368797665, -2.351235823926313, -0.5777697467504209, 1.374856223267653, -0.29701181947491, 0.7424138377886643, 0.1710585953742098, 0.330797445008233, -0.1353650627227564, 0.0576408564358677, -0.1510793503432264, 0.1322861784530395, -0.1959399824468592, -0.2765948893642101, -3.344039432155814, -4.906745836243842, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -504.7613365642291, -961.4178098908683, 455.5878220684487, 0.02152739456950745, 0.155990589845504, -0.03865954626620611, -0.1506307363154621, -0.05612307601046214, 0.1460612037459447, -0.07415968563775244, -0.1425284489543925, -0.09305949879988605, 0.1402637839647557, -0.1131625092824421, -0.1394959528064121, -0.1348497243264618, 0.1404692206213771, -0.1585184900258227, -0.1434668149614358, -0.1845343498098827, 0.1488370709744961, -0.2131515106483328, -0.1570114703269528, -0.2443967819326662, 0.168490761061717, -0.2779233867878602, -0.1837542375722553, -0.3128574421494117, 0.203035425587468, -0.1900946734061147, -0.433834219025143, -1.129221067589957, 0.5923000478378968, -0.3740906900280064, 0.2836979264745814, -0.2597387711190001, 0.22062681445908, -0.2256295048723918, 0.2088235792854549, -0.2224048277552317, 0.2187578425407568, -0.2352143633671168, 0.2399405360599269, -0.2574227677607072, 0.2669932721109848, -0.2855421185023375, 0.2966388173566911, -0.317835119944384, 0.3267272928345747, -0.3537499822930283, 0.35572663703428, -0.3935627878927179, 0.382347718395156, 0.4381069694359088, -0.4052516396076142, -0.4885743991703206, 0.4228306365409398, -0.546400243704626, 10.7123505563827, -3.467401368528632, 6.574905698665654, 1.817773117729432, 4.278777619641925, -0.8378218541754415, 2.793944829112401, -0.2733177456826802, -1.849759245051467, -0.03915110965119177, -1.264069361132026, -0.2034284048409766, 0.9153371450385213, -0.2846049852576992, -0.7235404151249138, -0.3227953515570327, -0.637237958096172, 0.342187055210012, -0.6250483028853673, -0.3568764257507016, -5.257937304882959, -4.55508554936837, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 504.7550364030109, -961.4583496977284, -455.530480379083, 0.0190938476296795, -0.2474730170941161, -0.0320248394429458, -0.2520068723070391, 0.04409746139951185, -0.2595497997911581, -0.05542064500866747, -0.2707037328264438, 0.06615890175647458, -0.2860590669957729, -0.07656112549161879, -0.3061707763092542, 0.08699898599850459, -0.3315091701035143, -0.09801668999733169, -0.3623728933478695, 0.1103893488200521, -0.3987539286431652, -0.1251703436227577, -0.4401558607610471, 0.1436810276293193, -0.4853934971597049, -0.1673540627022936, -0.5324352881075169, 0.1973214093273239, -0.3162259135719306, -0.4443061610633235, -1.756700779373999, 0.7590020979393318, -0.5670900504193579, 0.3712553167121694, -0.3890806275027168, 0.2947777467691565, -0.3348571513627888, 0.2841510646953452, -0.3282692231155715, 0.3018275022954646, -0.3463689030326972, 0.3346649164831056, -0.3785725697111055, 0.3763313412808598, -0.418806990424517, 0.4235591970882376, -0.4633012783675338, 0.4749833840580253, -0.5096588677124098, 0.5305616262560031, -0.5562508167799632, 0.5911571677427112, 0.6017445089562359, -0.658220168495264, -0.6447469122637461, 0.7335866606257689, -11.10138605254825, 3.631034314442978, 6.416520283055733, -2.030929045438653, -4.050883210517207, -1.09263234274095, 2.492947931639272, -0.5671254785083639, 1.472262538497965, 0.294648565121288, -0.8053212026371659, 0.1740959578194711, -0.3680979667177443, -0.1426477808213935, -0.07701400313898601, 0.1623356308255878, 0.1242639559307309, 0.2112653724844634, -0.2740335801422096, 0.2779853011318764, 0.3991136330708569, 4.237178417243852, 5.966755879689668, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -504.7839768955486, -961.5031356303735, 455.5850306639925, 0.02912480258648458, 0.1749203040042703, 0.05222060442792607, -0.1663669930730279, 0.07592414985531605, 0.1588763076469116, 0.1005831663097198, -0.1526711551376601, 0.1266078291343768, 0.1479660097637017, 0.1544547454180628, -0.1449948514511539, 0.1845887043750316, 0.1440505633533104, 0.2174126453759208, -0.1455339392818743, 0.253156701007274, 0.1499995768662069, 0.2917237939532877, -0.1581666783968385, 0.3325088566360749, 0.1708317017743966, 0.3742344872602825, -0.188600178511197, 0.2268316604370843, 0.3845480392234077, 1.318322244561246, -0.6050237732153305, 0.428731430577431, -0.2930800029727587, 0.2940064673796119, -0.2303095703759141, 0.2514426080702059, -0.2198597838188783, 0.2438453429385534, -0.2314722939759387, 0.2541700231595779, -0.2542443363261532, 0.2748630106477523, -0.2824784951384094, 0.3019732811307502, -0.3125960540799213, 0.3335263049897976, -0.342113632573852, 0.3688741680327827, -0.3690936466192343, 0.4082952782635644, -0.3917330876661867, -0.4526883946043698, 0.4080384200378837, 0.5033598302349126, -10.96490758246675, 3.324010782640217, 6.833995337770606, 1.726068427677496, -4.485538946284208, -0.7744900179888866, -2.952462441243869, 0.2245238076687357, -1.96746806450077, -0.08132430877219025, 1.349148411398959, 0.2433017034817874, 0.9753204169954288, 0.3244179414574758, -0.7646022741758332, 0.3636921476151955, 0.6637918690093022, 0.3848718174085247, 0.6393750651856561, -0.402120829041611, 0.672076261839223, 0.4236321878220717, 6.255464776858338, 5.625936364814542, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 504.7767711685316, -961.5466022439307, -455.5167169920276, 0.02641646045759053, 0.2876293875521561, -0.04455269836068995, 0.2917046426611087, 0.06192864199017718, 0.2996413203859867, -0.07877768540105098, 0.3120491459019933, 0.09543210301352055, 0.3294985036302559, -0.1123540725858994, 0.3524803724007334, 0.1301721006693483, 0.3813326175940071, -0.1497179815896551, 0.4161216500474114, 0.1720439307471236, 0.456476553154757, -0.1983762992393827, 0.5013934945639351, 0.229926008851196, 0.5490538865200292, -0.2674630790920627, 0.3262584781762246, 0.6184594756429348, 1.841505465819113, -0.9036920907968744, 0.6037649288986811, -0.435730331781932, 0.4173633503092889, -0.3402273078184248, 0.3618792001829753, -0.3223329221765415, 0.3568694292734518, -0.3373610302359374, 0.3778471175268359, -0.3698307686701466, 0.4133989349283659, -0.4123481977579463, 0.4568802691238098, -0.4611046705689894, 0.5040519090114971, -0.5144902166835649, 0.5520670643889852, -0.5724133548481751, 0.5987991975851327, -0.6358180460131468, -0.6423152491290044, 0.7063313713285493, 11.35202456455329, -3.493500196224254, -6.638568150183083, -1.957344673859988, -4.228485425971068, 1.056606712337717, 2.626551471527081, 0.5527901502998748, -1.5670524346359, 0.2928237369304179, -0.8682190156229788, -0.1797627572714807, 0.4061836395463059, -0.1532886909026603, 0.09679996807476349, 0.1768465936273609, -0.1170417081935488, -0.2292432522445197, -0.2742907297354369, -0.2992562708270863, 0.40201700883393, -0.38229571927536, -0.5200048155415438, -5.301175868637563, -7.207800765406747, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -504.8146309446975, -961.5954948017205, 455.5912580995108, -0.03868887300459015, 0.1914593508419486, -0.06896288380706332, -0.1789757931782649, -0.1001298844401407, 0.1678836436039876, -0.1326643353856479, -0.1583758389218828, -0.1670959289351405, 0.1506643220501891, -0.2039577762706567, -0.1450354518997625, -0.2436945939407161, 0.1419222648546592, -0.286520727014479, -0.1419791043669855, -0.3322284769841388, 0.1461191154367376, -0.3799736358824325, -0.1554340998005074, -0.4281019938694952, 0.1708890736518134, -0.2592200631304444, -0.3256852567521202, -1.483625349429867, 0.6048929864489581, -0.4753236422915989, 0.2961694592646632, -0.3223046209916502, 0.2348874153138894, -0.2714626999105824, 0.2257895769533033, -0.2588066214268311, 0.2385080365992549, -0.2654198271110554, 0.2619285315116793, -0.2829866644015172, 0.2901165131905762, -0.3071846631133203, 0.3192058852536734, -0.3358595624892154, 0.3463380609514268, -0.3683054470711298, 0.3690746037927644, -0.4048218311662792, 0.3849522860948287, 0.4463965618803197, -11.17658830910763, -3.141599888182389, -7.057308180610275, -1.612632569840935, -4.674228358739717, -0.7006254562246356, 3.103524645986586, 0.1721449712789752, 2.083843108349515, 0.1230344465499392, 1.436244916006413, 0.2805779655633087, -1.039058835442039, -0.3606873771771562, -0.8103535254099236, -0.4007449405027154, 0.6957974817861713, -0.4236310613634878, -0.6603595623427048, -0.4431665472145394, -0.6828038414913131, 0.4676302900407178, -0.7523362731509651, -0.5019606323960604, -7.451487262070271, -6.898592890440204, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 504.8067442478319, -961.6430987511887, 455.5101352313706, 0.03574672013766993, -0.3305806211988256, -0.06018793703629798, -0.3342789822135747, 0.08398862072600807, -0.3428477394831397, -0.1075626639881692, -0.3568661462114826, 0.1314655734088672, -0.3768341259425303, -0.1564176785432246, -0.4031115942603709, 0.1833201534769062, -0.4358166092177098, -0.2132428552839921, -0.4746756170442514, 0.2473424911876484, -0.5188341233287944, -0.2866404669967078, -0.566654544358885, 0.3315822789153745, -0.3365523808315161, -0.7799477365989287, -1.92204447761313, 1.038304945171675, -0.6382333295246007, 0.4954672252621077, -0.4437009237950818, 0.3817879746615038, -0.3867630435618042, 0.3564182943353214, -0.3828941956014584, 0.3680304652111646, -0.4061335779417263, 0.3989834362533684, -0.4442583228007695, 0.4409123847406293, -0.4900404834181868, 0.4895313969246102, -0.5387255048657738, 0.5430334536942414, -0.5869290800627904, 0.6013145526212335, -0.6318954893637412, 0.6654442464724272, 11.56123863009346, -3.319983549512171, 6.821845065194244, 1.863466932603496, 4.38488470063812, 1.01067094368389, 2.749511386838089, -0.5351194258268466, -1.657256920227764, -0.2915679928571555, 0.9297221747564861, -0.1880812619045127, 0.4442416560586033, 0.167463535694055, -0.1167505114866327, 0.1950254755353985, 0.1102695455592749, -0.2505965089753026, 0.2762752306336164, 0.3233977751863117, 0.4086760961549661, 0.4088566255401637, -0.527180830087615, 0.5067736816255463, 0.6471037120006224, 6.563582277433596, 8.669169878915001, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 504.8564496879252, -961.6978071512939, -455.611344166528, -0.05065962634433919, -0.2035667717073716, -0.08941939694818055, 0.1861819141070234, -0.1293463728361657, -0.1705641231959824, -0.1710413062870665, 0.1568853187052691, -0.2151073082088822, -0.1454027021144876, -0.2620355483771115, 0.1365557074206818, -0.3120320827642339, -0.1310718629875927, -0.364788555337083, 0.1300347438234267, -0.4192352859893375, -0.1348167335882609, -0.473360750181653, 0.1467438213440089, -0.2866158909387535, -0.2490263897518478, -1.620329947670383, 0.5848677254506903, -0.5118461330390914, 0.2897889114975229, -0.3429946413052233, 0.2320656306481047, -0.2839885316813635, 0.2246136703445073, -0.2653143559437961, 0.2379182651793468, -0.2665902029177392, 0.2609701715451156, -0.2789374326510159, 0.2877297688810677, -0.2977825578364603, 0.3140810644157221, -0.3208740171767074, 0.3367519648803812, -0.347497849625001, 0.3526960178041265, -0.3780240749856291, 11.34415339639681, 2.9276728518795, 7.241716186208386, -1.480912627484681, -4.841851735919054, -0.6174008600345655, -3.244551359081532, -0.1161699839121046, 2.196810963053788, -0.1648403832407603, 1.52378878614412, 0.3160144776303719, 1.105419207445201, 0.3942117305648183, -0.8599720166997633, -0.4347652751226382, -0.7325429176455468, -0.4593657200668957, 0.6870715556985062, -0.4812114897627066, -0.7009216608316468, -0.5083517726327081, -0.7613887957889477, 0.5458417512230326, -0.8637959811793713, -0.5970603500508016, -8.878579618167663, -8.41079729749319, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -504.8483022209899, -961.7515375401339, -455.515103378266, 0.04757600714722041, 0.3759601994912693, -0.07952182508355922, 0.3793600134854735, 0.1109858526574028, 0.3887731574427825, -0.1426212566623932, 0.4046909058892191, 0.1752498328905884, 0.4274717745852317, -0.2098572245667174, 0.4572568690190124, 0.2475512176980405, 0.4938376701380627, -0.2894449308435921, 0.5364758189264845, 0.3364018036152697, 0.5836858088124017, -0.3885795342274033, 0.3460998677017337, 0.9269607883637689, 1.991751849844172, -1.159894903453939, 0.6683678480468542, -0.5489774865143997, 0.4666584984984303, -0.4181743697237709, 0.4083256137378595, -0.3850224904055522, 0.4052531090492756, -0.3922030940012985, 0.430154764416738, -0.4201452172640414, 0.4700533145873315, -0.4596423877934347, 0.5171413042952349, -0.5060190814650223, 0.5661040793608841, -0.5573376710742627, 0.6129220747774261, -0.6135503322915893, 11.72621780801965, -3.118567404640785, 6.963211382717763, 1.75302164155703, -4.516867569899999, -0.9561071819941682, -2.858888666291709, -0.5141429755265274, -1.740364185208108, 0.2903667692240343, 0.9878145752430977, 0.1983973188452912, -0.4807765726797618, 0.1846015909228558, -0.1358948953187996, -0.2164957212324468, -0.1043808041537456, -0.2751935128773355, -0.279826746444909, 0.3505627343904102, -0.4181686778299342, -0.4382062477600169, -0.5392140048606524, -0.5377014109662097, 0.6579315820212267, -0.6517663504946155, -0.7869275609113959, -8.05753646041288, -10.39414405360874, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -504.9138637696153, -961.8143515653112, 455.6519677186474, -0.065587947208334, 0.2085029215343718, -0.1141904493380159, -0.1849281174071089, -0.16417292084848, 0.1635422401442634, -0.2162120711863979, -0.1445429665498645, -0.2708523356285875, 0.128343198787713, -0.3282995933633563, -0.115712162032152, -0.3881537124649574, 0.1078796554266659, -0.4491292094201663, -0.1064891874455163, -0.5088711427777188, 0.113241647884544, -0.3083741181691479, -0.1467011040433109, -1.723923205907633, 0.5382296108587682, -0.5363846735303583, 0.27092624926658, -0.3545066321432684, 0.2197099884192319, -0.287363686337111, 0.2145312764451768, -0.2614204612340402, 0.2280211406842278, -0.2553142195422838, 0.2496987336771662, -0.2598467419074491, 0.2735997743104482, -0.2703495132715409, 0.2954157424967072, -0.2845730952075546, 0.311423814533789, -0.3018822827637243, 11.46523893315575, -2.691413015229958, -7.384362245400597, 1.335236216275609, 4.985185235965488, -0.5262745591804759, -3.372485074148798, -0.05648924165287231, -2.303682267268397, 0.2076207204669117, 1.609522758219237, -0.3508002202525652, 1.172561778202216, 0.4262482117585027, 0.9119656734777154, 0.4669796982018008, -0.7727539450790861, -0.4932706593868525, -0.7182401966339386, -0.5175074770463448, 0.7247978830712355, -0.5473237053574408, -0.7783621609313609, -0.5876369322550071, -0.8723083916636201, 0.6419574139158084, -1.00657473616704, -0.7131802559475791, -10.57449072135191, -10.20410649815866, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 504.9061412862011, -961.8770732639229, 455.5376417683805, 0.06254419195417743, -0.4232684972590032, -0.1032882224080658, -0.4264012907068119, 0.1437641786514711, -0.4367775016311054, -0.1849091479680357, -0.4547230616343913, 0.2278125351396496, -0.4803659165556683, -0.2736445954407336, -0.5135240332136976, 0.3235038064582032, -0.5535385281539781, -0.3781288842484928, -0.5990476261198098, 0.4374274060989551, -0.3541391351405594, -1.058073342162964, -2.045216966325468, 1.266080794797456, -0.6924316493113732, 0.5950376234995193, -0.4851062828675501, 0.4482900280297341, -0.4257039247593362, 0.4069187711414488, -0.4232424419027072, 0.4083834788979941, -0.4493250721690615, 0.4314669583481853, -0.4903032612085907, 0.4662810430706394, -0.5378049265577316, 0.5078722409403839, -0.5859084246214689, 0.5542724904405595, -11.84485447706039, 2.898542856056091, 7.060252299552289, 1.630303727224056, -4.621383723680353, -0.8942453459292238, 2.951610305967167, 0.4895834794482448, 1.813545938433091, 0.2881843187274703, 1.040043542488245, -0.2094177206260044, -0.5137794499925029, -0.2034268577435624, 0.1527024933413826, -0.2401406725930453, -0.1004080668438968, 0.3021385233083859, 0.2854472563703663, 0.3800924576032388, 0.4303872415260588, -0.4699267499381296, 0.5551933085574434, 0.5711417670203969, 0.6747267714850889, 0.6859780125203653, -0.8010046828883495, 0.8192592301466893, 0.94515561658495, 9.820426173212363, 12.42939017103546, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 504.9930103786541, -961.9513356663066, -455.7221717257553, -0.08414743711096713, -0.2026736860667679, -0.1439128065966526, 0.1712281387505495, -0.2050964145610792, -0.1424763186319537, -0.2683067024845924, 0.1167626582609214, -0.333761142126125, -0.0948595784588155, -0.4009780128477423, 0.07811577117355756, -0.4684645116803408, -0.06843920146675211, -0.5335327087101004, 0.0679294038621997, -0.3239054824125787, -0.0116792012931699, -1.790663616927004, 0.4590524896071508, -0.5473106572190689, 0.2369585734237551, -0.3554648484778318, 0.1960271233909658, -0.2800877958506142, 0.1941244183782809, -0.2453078173802557, 0.2076091627418861, -0.2293383987055, 0.227046232106705, -0.2229498501452899, 0.2467672230652324, -0.2215577701206991, 0.2623437054600908, -0.2230571348710727, 11.53795815371153, -2.443315951606555, -7.483391968859528, -1.180817659033836, -5.101147725873458, 0.4291177015601163, 3.484008053855922, 0.006968221311336701, -2.401299468052032, 0.2524588129698827, -1.690603619495555, 0.3864574901609267, 1.238005582549325, -0.4584326101560443, 0.9641940337828574, 0.49897471755453, 0.8145568272198119, 0.5268294835097378, -0.7521215969515586, -0.5534552278380493, -0.7525858951277018, -0.5859592679598992, 0.8009010588386255, -0.6289928712031305, -0.8890676867080832, -0.6859830214698649, -1.014965640327654, 0.7599429084175442, -1.182234401254345, -0.8539617213615083, -12.58037930190747, -12.3224597531326, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -504.9867232586682, -962.0267397173889, -455.5858482841886, 0.08146568040030745, 0.4718570639702241, -0.1323792393628501, 0.4746520270081164, 0.1832926548394762, 0.4859370727590149, -0.2354260094769804, 0.5057808128620681, 0.2900438647951354, 0.5339800783871049, -0.3482656295242084, 0.5699120289626995, 0.410743635210365, 0.612307221970398, -0.4771765920828036, 0.3602759907915838, 1.17231410234708, 2.078779437679099, -1.355247085704674, 0.7092702971335834, -0.6327846462311433, 0.4983600087248974, -0.4713003995691609, 0.4384934063277775, -0.4210960673642488, 0.4367016135277066, -0.4152685840028347, 0.4637359288734594, -0.431277660159945, 0.5053758360548571, -0.4587373301787441, 0.5527030534382514, -0.4925622070803836, 11.91525071372664, -2.669737940207489, -7.112070179658535, -1.499950988056173, -4.695989395278955, -0.8264068408364236, 3.024764719308382, 0.4608488246132006, -1.873884959550882, -0.2834560461830024, -1.083707901266197, -0.2191901436954789, -0.5408889135616145, 0.2219393955259065, 0.1652261958197461, 0.2640887562841959, 0.09983770284304504, 0.3297673240943492, 0.2941272369903571, -0.4105473985768032, -0.4457746369230076, -0.5027821554303129, -0.5749124856717664, 0.6060241185316116, -0.6964125744179374, -0.7222036226954593, -0.8219917097087917, -0.8553991180861862, 0.961952339859114, -1.012160660965054, -1.126829376377705, -11.89125844216821, -14.82268386167739, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -505.1021960848271, -962.1169026393795, 455.8338729921806, -0.1071062110747086, 0.1815070603285521, -0.179150267652292, -0.1400998028560625, -0.2522503910014173, 0.1021004970621845, -0.3266542012589974, -0.06823635379950285, -0.4018139642522497, 0.03994126095184518, -0.4760406224002068, -0.01938011015157691, -0.5463234758642321, 0.009080595156260448, -0.332710657886771, 0.1616900475175027, -1.817966811669035, 0.3426834356104604, -0.5434318586351045, 0.1858789109600244, -0.3447965208108256, 0.1597482455188877, -0.260904925528362, 0.1625453870579868, -0.2153733000095499, 0.1761648327255745, -0.1866055914925562, 0.1928056464078325, -0.1656576200451232, 0.2073444379444271, -0.1482507898985436, 11.55984343995971, -2.194330913236887, -7.539191327935495, -1.023568455741856, -5.187360036320147, -0.328277306992068, -3.575805403221234, -0.07408649072083257, 2.486163835875462, 0.3004875937099439, -1.763663794387484, 0.4247125801810033, -1.298641369289637, 0.4926794598061576, 1.013844940412112, -0.5326247822284832, 0.855408835835386, 0.5617664880260144, 0.7863621255143287, 0.5906033658897508, -0.7819719026602904, -0.625663908357995, -0.8264736550053368, -0.6712690869377209, 0.9108668466439265, -0.7306688749077851, -1.031594825877172, -0.8068049947691155, -1.19005694174046, 0.9028377648786338, -1.393080617256989, -1.022486576162642, -14.93725702842511, -14.80858779518792, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 505.0986792310538, -962.2097661716811, 455.6702293933568, 0.1053158391624666, -0.5209308336066629, -0.167801730661908, -0.5231587860538264, 0.2305615961092414, -0.5350545218265784, -0.2949981297092716, -0.5563330952365698, 0.3622987997093375, -0.5863529471226727, -0.4330778853603579, -0.6238972077621375, 0.5068738237225937, -0.3645929382416744, -1.269034197433159, -2.091002309996033, 1.426568270000934, -0.7184531759372009, 0.6617359656374076, -0.5062857257808602, 0.4866398522234159, -0.4468522920838534, 0.426758635928322, -0.4461365544464869, 0.4117361542474164, -0.4743095042403939, 0.4180529165899515, -0.5166800360497492, 0.4350417285374433, -11.93487401223153, 2.441424296363036, 7.120614763298893, 1.366543100975028, 4.739536856455961, 0.7537631620120923, 3.07601221737414, 0.4269745581475306, -1.918657953164775, -0.2740524529274999, 1.116076917175963, 0.22506572849195, 0.5595705076145522, 0.2373597796023182, 0.1712561169743348, -0.2856470413363019, 0.1044690985263356, -0.3555730312438909, -0.3071946247653678, -0.4396379038796807, -0.4651362087062904, 0.5346799860173224, 0.5985834061049665, 0.640392538677784, 0.7224826043854548, -0.7585342198697523, 0.8484129027006324, 0.8926513574029241, 0.9861836715435827, 1.048258602424987, -1.145257873605194, 1.233529400113939, 1.336236455566228, 14.30571745164884, 17.61787905052259, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 505.2522684045481, -962.3209091459635, -456.0021243571045, -0.135204950106543, -0.1394617235875679, -0.220127550372105, 0.08571928452292403, -0.3049409043236431, -0.03659504642708369, -0.3890574180257759, -0.006436425325169574, -0.4706108350838175, 0.04096694825184889, -0.5462820434899358, -0.06401136348259447, -0.3343815237245166, 0.3770481192076843, -1.804641348780662, 0.1862488607745145, -0.5241026690461806, 0.1165329851236274, -0.3218108422408898, 0.1103182214298427, -0.2288701150093211, 0.1197089351929335, -0.1702872232337064, 0.1340809261902273, -0.1252999394856697, 0.1478955058540885, -0.08560365295523495, 11.52576378974406, -1.954372039977865, -7.556599118670414, -0.8696416229998727, -5.243139426145771, -0.2265893727803473, -3.645001180400495, 0.1444022525350471, -2.554614576511419, -0.3526936465053139, 1.824862320705907, 0.4673053399212604, -1.35071736012791, 0.531014596069754, -1.057368223650742, 0.5699553062659776, 0.8919929683549828, -0.5999463785086376, 0.8178418137071368, 0.630579143988404, 0.809941527510296, 0.6678139095408332, -0.8520024722473533, -0.7156241704641815, -0.934272389637509, -0.7770482361065687, 1.05215121053177, -0.8548848555187463, -1.205491122946197, -0.9521918975626668, -1.398620727934507, 1.072675726622418, -1.64131363116556, -1.220973810147752, -17.67939399433237, -17.69696009914344, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -505.253017754159, -962.4364747394324, -455.8036682152643, 0.1351126787098899, 0.5695864039965876, -0.2104884546621504, 0.5708245939994814, 0.2862709367223656, 0.5827540820095056, -0.3637808074554126, 0.6046421105793508, 0.4436589944498553, 0.6352690932451194, -0.5253459950861361, 0.3677303232551267, 1.347453344746686, 2.082926012424898, -1.479753805386014, 0.7203282114221324, -0.6816721371214693, 0.5093388325091368, -0.4939242839721554, 0.4515458590346876, -0.4232358982106663, 0.4527789299653081, -0.396731197291685, 0.482881928016134, -0.3902827551017815, 11.89894466315718, -2.22090063369331, -7.092747734361658, -1.234026593177227, -4.753181046423188, -0.6771950822291326, -3.104140201531702, -0.3866040328596608, -1.945692839472312, -0.2572635974024494, 1.134652874596721, 0.2236606491303219, -0.5673297987656002, -0.2460750486326881, -0.1685020505679479, -0.3012253263999956, 0.116250006339078, 0.3761133268443195, -0.3261558958623166, 0.4641080126177665, 0.4894613648488647, 0.5625417455833565, 0.6266107088037656, -0.6712930493524196, -0.7526658702381472, -0.7920386924545679, -0.8791718349910133, 0.9279565248449416, -1.015643135720146, -1.083803569712843, -1.170845050805069, -1.266390093671474, 1.354037366376675, -1.485576073474351, -1.576400237129784, -17.08748638019799, -20.84557350276546, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -505.4566106321672, -962.5733809140785, 456.2446669456525, -0.1688459112995087, 0.07034403444173644, -0.2661925457838576, -0.00201291370003544, -0.360842606721788, -0.05949303565750251, -0.4507864673737421, 0.1115494205452214, -0.5324222730975748, -0.150692878879795, -0.3285617939930261, 0.6352611728442655, -1.750928292301286, -0.01079073303553477, -0.4892798325673497, 0.02887167931331536, -0.2862455254245249, 0.04809364620457122, -0.1833951993284249, 0.06648914871000661, -0.1090258324830243, 0.08288235905924721, -0.04388087225057338, 11.42397334205316, -1.730411869135157, -7.54872126209005, -0.7246099232898299, -5.271259193087543, -0.1273016997487224, -3.689949677751227, 0.2168582799717596, -2.60315967042078, 0.4095966898831329, -1.869993120606597, -0.5156814339442469, 1.38982674111601, 0.5753023219269168, -1.090398441258875, 0.6129015965698307, -0.9200861758805992, 0.6431623698555172, 0.842501341267445, -0.6749156741454959, 0.8325483404989181, 0.713623361375966, 0.8735437835352988, 0.7629401789412279, -0.9551462767593792, -0.8256933589190248, -1.071962632591364, -0.9045223083409126, 1.222727743028186, -1.002327294123344, -1.410056693758905, -1.122638822704943, -1.640877341853048, 1.26995573475215, -1.927844246417485, -1.45003227706329, -20.82307411706698, -21.00155992122358, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 505.4627685739322, -962.7160457792522, 456.0005200775191, 0.171584773148045, -0.6169168507138828, -0.2608304899009982, -0.6165570206329561, 0.3501697797170152, -0.627679892134454, -0.4403605479968019, -0.6489829640847168, 0.5308300518602039, -0.3709301239614179, -1.405815328723569, -2.058062413586543, 1.514443425105629, -0.7159719725315271, 0.692368587676033, -0.5085223559524636, 0.4927483234228063, -0.4539119844509412, 0.409786918662957, -0.4585601641836641, 0.369060795992091, -11.79766504079141, 2.012479939028762, 7.043555197226082, 1.105137043752469, 4.741914128289426, 0.5972094437860065, 3.109868363286502, 0.3381216846268515, 1.953867566077465, 0.2299457233923843, 1.137533669668121, 0.2109462622238532, -0.5620043637972509, -0.2436579564637029, 0.1548443572169821, 0.306307345836395, -0.1370525568840376, 0.3869435542219369, -0.3524856590577575, -0.4796460558277326, 0.5197151476832278, -0.5821783731153887, -0.6593590868915518, -0.6946136487705999, -0.7866414714126642, 0.8185951248778229, 0.913148489829942, 0.9570365064732725, 1.048232753669613, -1.114138187570384, 1.200190769798651, 1.295704602378807, 1.377356049197987, 1.509847257194258, -1.589258765649288, 1.768262183131415, 1.848069109594906, 20.23420853365266, 24.50766781694805, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 505.7302235739897, -962.8808897579297, -456.5799751938149, -0.2074665694865197, 0.03179270034629982, -0.314911648796822, -0.1160409354435016, -0.414899941317193, 0.1896042423128672, -0.5035929949910716, -0.2485406654930217, -0.3148720318285624, 0.9334281744469859, -1.658358518134329, -0.2459729705045449, -0.4395368675508144, -0.07575020637815727, -0.238298110932487, -0.02543448920587639, -0.1242798269864487, 0.00493022955061947, -0.03092176867721523, 11.22885110571361, -1.52501965073773, -7.543615992492188, -0.592424165410839, -5.281049101322963, -0.03384112642572992, -3.711740461545836, 0.2895170179898658, -2.629192995613142, 0.4707504030082303, -1.894805671641967, 0.5704662427677495, -1.411023004415436, -0.6267630427501154, 1.107747137626518, 0.662893542409588, -0.934479879776553, 0.692778028843108, -0.8551991883552512, 0.7247338818733859, 0.8447234697913153, -0.763869215310247, 0.8860283550216375, 0.8135940022344247, 0.9682811777983255, 0.8765586475933742, -1.085448060890765, -0.9552512796328857, -1.235412367552187, -1.052408869707694, 1.419557246828857, -1.171343028943237, -1.642863640112578, -1.316259793628108, -1.914688728881414, 1.492601140725079, -2.250602542324417, -1.707371208414184, -24.34911222913227, -24.69682337532403, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -505.741435494995, -963.0504155847199, -456.2741036613125, 0.214479472715505, 0.6621861981953745, -0.3177872890555592, 0.659477202360043, 0.4199363883935496, 0.6686966992655288, -0.5204604440629446, 0.3760250100904552, 1.440136329901755, 2.022077415101587, -1.529242860821105, 0.7070177320978236, -0.6931608182158253, 0.5052503305200324, -0.4823639976910435, 0.4557292865512613, -0.385316970783089, 11.61106437256959, -1.818394061749481, -7.001963622764721, -0.9813357744591734, -4.716881178048456, -0.5141556100260524, -3.0969906159148, -0.280097039848389, -1.943793145915662, -0.1889996708430733, -1.123912478997343, -0.18264542910118, -0.5421787669130134, -0.2251582877779384, 0.1286970772462299, 0.2956319802801666, 0.1683473607021652, -0.3827115546150297, 0.3873303842983701, -0.4809087334725033, 0.5565590506576561, 0.5882754140327704, -0.6968797879222629, 0.7050226515021639, 0.8237435880499643, 0.8327756810598531, 0.9488550233241532, -0.9742434109257527, -1.081502395328126, -1.133196471873608, -1.229668358197086, 1.3146823158554, -1.401016876114531, -1.525516610156299, -1.603885466468684, -1.775180446964151, 1.848426978531237, -2.07736997486411, -2.148126517552196, -23.69694322453203, -28.55450387022301, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -506.0870994444517, -963.2380980143934, 457.0227998243373, -0.2485169973393713, -0.1708755956446555, -0.3608775819654662, 0.2702463148020641, -0.4583288971296218, -0.3528976210982101, -0.2928214663649001, 1.263652831351624, -1.529505325655311, -0.5130238387785291, -0.3760704452139064, -0.1942204111583247, -0.1786533015765691, -0.1073884668217014, -0.05177939288958904, 10.88603291358535, -1.337180308687955, -7.596957579622598, -0.4747035515000937, -5.293796730009392, 0.05043538802520772, -3.716922670417109, 0.3592165384067125, -2.632466505734362, 0.5340539274802235, -1.895818658518178, 0.6306645190841967, -1.409275395512805, 0.685196075280273, -1.103655066088433, -0.7201507688034566, 0.929097908006366, 0.7491204140241028, -0.8497455883339168, 0.7802302755862421, -0.8402329799755162, 0.8184522926487809, 0.8831625818888247, -0.8670840673667023, 0.9672225533587266, 0.928669763681313, 1.085819612885373, 1.005581967725039, -1.236140490171399, -1.100404676155905, -1.418629151651308, -1.216231218475, 1.63686735128274, -1.356940773255257, -1.897950583839266, -1.527515842606489, -2.213574416593122, 1.73441741027332, -2.602284267564217, -1.985971407789865, -28.17877747730259, -28.69146328143491, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 506.0995030305882, -963.4251163002542, 456.6319082336776, 0.2614208946746435, -0.7049929537479934, -0.3775469776448249, -0.6990129299253891, 0.4897228036148983, -0.3853539179729928, -1.442770760684808, -1.98216326068838, 1.520548699194699, -0.6953740263821517, 0.6824190354472228, -0.5011233283967811, 0.4613118931865929, -11.29796537330876, 1.64449147211759, 7.022169169615215, 0.8641805347117423, 4.699276336140136, 0.4291404936096105, 3.07407334406547, 0.2123539984677152, 1.918769023964996, 0.1324679335590006, 1.09476296754075, 0.1352170432778689, 0.5077652860572863, 0.185910694584077, 0.08952722613195557, 0.2638426913931702, 0.2107393703819989, -0.3576622830692115, -0.4310960937466334, 0.4619085175014147, -0.5999884739982514, 0.5746756856778464, -0.7385868745992383, -0.6961886963160135, 0.8626657356149082, -0.8280065670824533, -0.9841390899478625, -0.9726569738046391, -1.112325598821782, 1.133551592359431, 1.25500535004336, 1.315134855586846, 1.419345051714987, -1.523232765666847, 1.612773491431782, 1.765677755404849, 1.843930722061922, 2.053380737994717, -2.123837663941296, 2.402145835888836, 2.467526732917271, 27.35441123139784, 32.85590905559759, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 506.535108831963, -963.6167931351213, -457.5765788302061, -0.2861761743318959, 0.3461105740480527, -0.3947395339422998, -0.4564377447058072, -0.2617296324114806, 1.611734008281398, -1.367678013419382, -0.8010059019283635, -0.3006880659872381, -0.3213053025395997, -0.1085392364738896, 10.2725543041519, -1.168526626564622, -7.82064895354263, -0.3713337568773262, -5.352508738636831, 0.1222893217356636, -3.722102280656161, 0.4211608377876347, -2.617734196748431, 0.594901972075254, -1.871992486237874, 0.6925901148668441, -1.38058734154616, 0.74787922336183, -1.072581160949938, 0.7826644587414839, -0.8975809094419182, -0.8105767210582541, 0.8193405723194567, 0.8399098661914142, -0.8120222073214457, 0.875770220099946, -0.8576881155650289, 0.9215289264833487, 0.9444708197958593, -0.9797395325755353, 1.065205863140835, 1.052722623302901, 1.216460299462672, 1.142943964195683, -1.397905992342116, -1.253291158068077, -1.612055771894061, -1.387322544088788, 1.864373007497077, -1.549535917211965, -2.163879565979816, -1.745709327273886, -2.524523036401785, 1.983325027495365, -2.967830167452788, -2.272019538910634, -32.14658674016512, -32.79917505195669, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -506.5385766328296, -963.7950492893284, -457.0688147458119, 0.3064737493454045, 0.7451295489392065, -0.4320824231461616, 0.4015780387117531, 1.401276142639599, 1.946107688605819, -1.48150526542528, 0.6828643239680722, -0.6570927744752225, 10.7638914509755, -1.517523554864524, -7.212410025151229, -0.7594650802490791, -4.727779988834981, -0.3459479511652795, -3.056982946412603, -0.137792378203823, -1.885965897429857, -0.06147034578463532, -1.053619513536147, -0.06771520575915954, -0.4606626658479292, -0.1231959710791077, -0.03846712123960974, -0.2069083481076168, 0.2634078610725342, -0.3068550531907182, -0.4829610805074276, 0.4170585080045313, 0.6489288141016536, -0.5352933146117154, 0.7829424333419256, -0.6615771584317809, 0.9012282823913962, 0.797293809624204, -1.016026767128407, 0.9447489034757597, 1.136789528309099, 1.107026430413602, 1.271196791521118, -1.288051047513149, -1.426037977791292, -1.492842932266066, -1.60802186802236, 1.727960361905206, -1.824590624194746, -2.002232368690374, -2.084863210703189, -2.327985171787624, 2.400861196107053, -2.723108602478969, -2.789278491542184, -30.98867169157857, -37.17333705763805, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -507.0685285381397, -963.9513225378075, 458.2239814297878, -0.3105094100833926, -0.5489621604214274, -0.2207001619135986, 1.956288574692065, -1.176681603230856, -1.09375598392015, -0.2158081274278867, 9.039432404200522, -1.044683628994101, -8.468785467802409, -0.2840135792540254, -5.542745543449104, 0.1779572946344857, -3.761606785421675, 0.4684797722662275, -2.598930526573113, 0.6453592933930452, -1.827562760363265, 0.7487181877066253, -1.324101584715386, 0.808286952945908, -1.010847733279209, 0.8449471423636297, -0.8346332998503472, 0.8724833572892089, -0.7577244949265606, -0.8996473668870159, 0.7532210193109008, 0.9319700291879087, -0.8023001449790493, 0.9731169256921198, -0.8923272475697563, 1.025808942383718, 1.015464062032189, -1.092414194595597, 1.167634916611444, 1.175336052505714, 1.34782203022553, 1.277290630873182, -1.557658237162463, -1.401532229855291, -1.801354910586623, -1.552090405661354, 2.085991956058786, -1.734049468830996, -2.422322212155974, -1.953922901839827, -2.826392120689291, 2.220125678702697, -3.322576673428888, -2.543480350687961, -35.98052650947123, -36.71809122641062, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 507.044358239568, -964.0731038498507, 457.5609921269437, 0.339186245230021, -0.4273969477024003, -1.29863116329144, -1.9212155175519, 1.401898391182813, -9.726271535257508, 1.553331598619713, 7.831565439526993, 0.6875158984530817, 4.876888986260434, 0.2743174969664351, 3.073278715408381, 0.06481527948414105, 1.857848936458911, -0.0172058553164797, 1.007270300003323, -0.015493908041739, 0.4052734656641692, 0.03886914827152865, -0.02120544418256508, 0.1245454447689218, -0.3236527085621929, 0.2283392770229724, -0.5404903245807748, 0.3431422365149781, 0.7009598725924222, -0.4659137295498349, -0.8273152365264473, 0.5961381254254794, -0.9363871554481444, 0.7348226673958823, -1.040881025210011, -0.8839501590062537, 1.150503496114657, -1.046240633846492, -1.272940104510185, -1.2251386270737, -1.414709746711867, 1.424957765640063, 1.581930391736834, 1.651204358082938, 1.781057179003545, -1.911084747398959, 2.01966517958886, 2.214321569792053, 2.307406860022854, 2.574510651133963, -2.657307283406697, 3.01140106208788, 3.087681881697911, 34.27753948458455, 41.14981894020637, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 507.6607402118162, -964.1324178062038, -458.9202293050566, -0.1686378441806772, 2.26918481619089, -0.9605814475857066, 5.330862093858574, -1.138949076550252, -10.32445599072876, -0.2278821376240088, -6.053185271755386, 0.2111992660067487, -3.90195016026017, 0.4916948027703619, -2.604992480633718, 0.6736549508248011, -1.775821097551055, 0.7869195888388132, -1.245117363143849, 0.85511197805049, -0.9191931180792211, 0.8969679044521599, -0.7382433689234038, 0.9261198029941115, -0.6611696707630135, 0.9518531231496314, -0.6589875072665196, -0.9803308643977939, 0.7113879440961655, 1.015736615269503, -0.8045956932013681, 1.061136120387407, -0.9298638326235749, 1.119078818816151, 1.082360654284157, -1.191995373104534, 1.260356303986052, 1.282483463463084, 1.46470232087969, 1.393532235248562, -1.698581785001543, -1.528731717952134, -1.967581344572053, -1.69251044093007, 2.280123193492441, -1.890433165088037, -2.648431439034473, -2.129603108392172, -3.090370039310071, 2.419171909638182, -3.632813078727661, -2.770878258545604, -39.30905440906147, -40.04060379184506, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -507.5823569101401, -964.1327952153448, -453.1525519625071, 24.40574541777206, -56.14514099816383, -2.422186406437542, -25.22429964965732, 1.444867813899084, -12.70852566601807, 2.547941134817951, -6.850174344806808, 2.975175307411678, -3.480196946314358, 2.990960801627664, -1.417378358439735, 2.740289627402432, -0.1451237393712128, 2.339923467563752, 0.6224326970934283, 1.877490924183014, 1.064055147754233, 1.410245383546048, 1.298670025286794, 0.9699280356465935, 1.407692033072784, 0.570056365428795, -1.447206482446987, -0.2126606347621276, 1.45549590027687, -0.106700427367443, 1.458217449192367, -0.3956662520866513, 1.472227262174601, 0.6631737994890589, -1.508479947856337, 0.9184906204287088, 1.574231058518403, 1.170872531544465, 1.674716872315271, -1.429622942231864, -1.814460422530907, -1.704464741494625, -1.998356318905909, 2.006172401087063, -2.232676942948992, -2.347554579943283, -2.526186304333157, -2.745015993852846, 2.891570896342571, -3.221095840304374, -3.347521504585615, -36.81977934526557, -44.35189490614219, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -929.5916205582998, -941.0365306526329, -83.0205570677121, 28.17439325219577, 75.87533570257695, -1.696520866989221, 26.60699159785555, -5.409361115615388, 12.56934180801319, -6.877261269176024, 5.687519948140556, -7.195311665250237, 1.882753730836734, -6.793257487426405, -0.243954230030588, -5.985952308839798, -1.376766663139118, -5.007212173644259, -1.909776318300887, -4.012480308119807, -2.086793153052898, -3.09156896456971, -2.063263714473415, -2.286718302219568, -1.938915871123262, 1.609507069182077, 1.776860515294737, -1.053845022404726, -1.615732919376842, -0.6048003006133782, -1.477928334454509, -0.2440699657679927, 1.375377784059099, -0.0469141625735761, 1.313620215666728, 0.2854287861559656, 1.294663638447388, 0.4868539710419071, -1.318984666977386, -0.6646185691294326, -1.386978049510149, -0.8304585614062736, 1.500098994703478, -0.9948396546158864, -1.661963350617514, -1.167481907502111, -1.879718152404978, 1.357937431688009, -2.166151621008019, -1.576149522040385, -22.8426934593021, -23.12611835160169, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 39.74325951288207, -1186.431523127095, 891.9981651399358, -63.78630277783328, -227.1996575944138, -21.36263524515902, -134.9940747033731, -11.91053467248452, -92.38872197451053, -7.137773558531492, -66.40883312595787, -4.159839971417969, -48.55372147263157, -2.266393752869894, -35.63597637393651, -1.094787188040574, -26.12268218585725, -0.3740073215893996, -19.11470230422248, 0.09756406262595478, -14.00177931344374, 0.4539441356094622, -10.33193495392373, 0.7738574705683541, 7.757284995888257, -1.098280483037155, -6.008113485615493, 1.446238834722165, -4.877217783576145, 1.8260185749213, -4.208135864627979, -2.242051708641171, 3.885327967594733, -2.698727377102662, -3.825903864288941, -3.202420566106344, -3.972879079281471, 3.762630114298621, 4.289949398982492, 4.392965687767301, 4.757754982238793, -5.112366708632306, 5.371588692477695, 5.947098472803463, 6.140643591545218, 6.934244894234055, -7.089006463457302, 8.127774292215216, 8.258953377781268, 92.70654077487185, 110.8916502269361, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 786.2689336767368, -815.9863464302587, -502.0050839453779, -90.73950140949509, -56.87432228337573, -51.99165992440596, -33.34091308622924, -34.04113075780467, -21.43377478029886, -23.06225529920733, -14.14681954842458, -15.59204793972567, -9.417386184375989, -10.29776713475104, -6.330831000664216, -6.493371297236832, -4.358412747427537, -3.752309549753657, -3.152344856102681, -1.780646844936822, -2.474087100314609, -0.3652993582829661, -2.159473586844666, -0.6519861739832791, 2.096803744336349, 1.389930130712824, -2.211583888600316, 1.937926207260219, -2.455755504596307, 2.363440987643714, 2.800164407662094, -2.717650504667941, 3.229401718516578, 3.039864785640401, 3.738480085017688, 3.361030069362782, -4.33092205126337, -3.706511692235448, -5.018136256707792, -4.098399590204935, 5.820036523452692, -4.557455346103694, -6.767262056924007, -5.104890578388991, -7.905837082177761, 5.763931574597367, -9.306048512065884, -6.560810197006163, -99.50178779115197, -96.99432423638203, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -321.507651312602, -910.0072663299117, -591.4192374647588, 2.348605212951472, -124.3655421160505, 2.519037737123322, -84.26548427045947, 2.573811571186274, -59.67166318149743, 2.542520312756454, -42.7047736707389, 2.300331152371294, -30.40208946365, 1.868906337655643, -21.33294075666419, 1.342284397278395, -14.64893633535513, 0.8120400288034471, -9.763872639971726, 0.3354516359221829, -6.235124240181859, -0.06553144651026029, 3.716540641341441, 0.3931256138944335, -1.936614843167815, -0.6618294981785842, -0.6848912674945181, -0.8900811776756501, 0.199127185761292, 1.095738020332698, -0.8359393889447138, 1.294255063198197, 1.31492328463649, 1.498467646770268, 1.702006092723583, -1.719202962560454, -2.046109758511857, -1.966270920929146, -2.384503193805934, 2.249628522569334, -2.747318106025392, -2.580712635786556, -3.161596749861384, -2.974131519986319, 3.655356432931151, -3.449999148965924, -4.262437651783252, -40.50763259878801, -54.03571513338518, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -344.0713331149046, -952.725510121053, -416.8443995257026, -1.06754986885831, 45.84646413547254, -1.058403868892041, 31.77101137436911, -1.019954379151325, 22.80926291367322, -0.9071659718312616, 16.5022307529763, -0.7068440514134203, 11.86549955113823, -0.4524387095841191, 8.40538251216984, -0.1887047159960708, 5.82217952814256, 0.05065134824649881, 3.9055606710333, 0.2489662036219396, 2.495002070050333, -0.4034520268008114, -1.463934384255092, 0.5196357992327747, 0.7123003579271661, 0.6066099511692123, 0.1617857928516114, 0.6740824416340541, 0.2481361170724213, -0.7308946494537877, 0.5637692276795909, 0.7845255109986143, 0.8204101808272708, 0.8411152733767217, -1.045170820232432, -0.9057592610820282, -1.259454345228306, -0.9828579943559127, 1.481169292533973, -1.076498380875339, -1.726879905570609, -1.190773320672958, -2.014212738164722, 1.33001392092199, -2.365085594225679, -1.498734072363048, -24.7211856905308, -22.78270797180551, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -442.6907539702701, -958.4534825933663, -525.0790057885872, 0.4830776577375593, -34.3647256467903, 0.6314006455905202, -24.3348172625214, 0.7058287395586245, -17.41434346405917, 0.6762376591103615, -12.39697921157262, 0.5581637364705839, -8.69979036834804, 0.3923850266317464, -5.976507461418807, 0.2173455777493897, -3.987524183530083, 0.05722832788370733, -2.551827878146577, -0.07817291001085155, 1.527904886231801, 0.1888356566437113, -0.8048407679343973, -0.2797226979198223, -0.2967379564291665, -0.3574286627927332, 0.06184616772257682, 0.4283754863248733, -0.3199942490552064, 0.4980819539573895, 0.5140709901171847, 0.5710827814267276, 0.6708672126762525, -0.6511525664070008, -0.8102397807569323, -0.741692223923563, -0.9473054632669835, 0.8461535960354408, -1.094295489000015, -0.9685246843169275, -1.262227087133157, -1.113917136472349, 1.462601455450445, -1.289351439294402, -1.709469206945622, -15.37887836207466, -21.35497052390839, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -428.3101323308657, -960.7693411578422, -474.342891355963, 0.04496720051684034, 19.40931799070891, -0.1673280524757375, 13.92731470257143, -0.2557102683826377, 10.07353665325728, -0.242060653548165, 7.24394212742543, -0.1622596506986207, 5.13483244576956, -0.05331961081104578, 3.56174021542921, 0.05739982125560644, 2.395487323148687, 0.1547037661558607, 1.537813226473135, -0.2331433212654467, -0.911475734135779, 0.2933066494077648, 0.4555476781368759, 0.338711646959178, 0.1224074525370889, 0.3738275323799262, 0.1247194820414306, -0.4030263556972666, 0.313950693100759, 0.4301452572858044, 0.4667062381065167, 0.4584109424144206, -0.5994212319450118, -0.4905216003205117, -0.7250428404204554, -0.5287981906358582, 0.8543669982497838, -0.5753440721198483, -0.9973355647586041, -0.6321710321880218, -1.164486942694627, 0.701242039243045, -1.368908791826208, -0.7842864184450526, -14.12684531258338, -12.34623872489789, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -453.5947291948034, -961.2035052944622, -514.6492553039509, 0.4825672633043259, -14.63884772835812, 0.4728625789819674, -10.46609788926318, 0.4223503876316634, -7.442448460093368, 0.3329214300137407, -5.216277203642393, 0.2244794898769337, -3.578425180158726, 0.1168914409785088, -2.383907106504585, 0.02230924598695424, -1.523064408492582, -0.05507450668021285, 0.9101446618023145, 0.1163992499184654, -0.4779690543716839, -0.1653691281434937, -0.1745659182869991, -0.2063444223336585, 0.039602682285727, 0.2433424061835201, -0.1941384821064465, 0.2796828695057331, 0.3109312828739628, 0.3179775825463946, 0.4060797546311572, -0.3603111282843304, -0.4915070533384577, -0.4084758403447379, -0.5763074319912898, 0.464226600596208, -0.6678922566067029, -0.5295368917237421, -0.7730382893753434, -0.6068884203575737, 0.89897954742318, -0.699610046209151, -1.054787068983536, -8.609652087354156, -12.8579032884007, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -446.0038881412778, -961.3008847776382, -491.7175432274707, 0.1943007163358473, 9.906958572878336, 0.02345340704111558, 7.173866633051514, -0.04242250621999444, 5.166677008312024, -0.03855525151603313, 3.669944479628774, 0.002466257542828118, 2.552845379807538, 0.05656495909653988, 1.723914622877545, 0.109389231273188, 1.113704343754537, -0.154326556966241, -0.6676835332672917, 0.1897295486417266, 0.3428450695372164, 0.2165827955413441, 0.1055512320135479, 0.2369862849228626, 0.07020719515760521, -0.2532876029564753, 0.2043491608705294, 0.2676915043075377, 0.3120764248151033, 0.2821088161452195, -0.4050727622745833, -0.2981576244336274, -0.4925541862044852, -0.3172019895469589, 0.5822117700075157, -0.3404097672817541, -0.6811329794436601, -0.3687673652742239, -0.7968425416319206, 0.4030092522096378, -0.9387219855322787, -0.4433201240139981, -9.48396669353969, -7.512457904532764, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -456.4328410845339, -961.0071327788293, -510.5524809166798, 0.4111184351918524, -6.829176411984103, 0.3361386406116716, -4.837760995294381, 0.2502764457319974, -3.37603312459292, 0.1626067927924462, -2.304361871960009, 0.08316396128852019, -1.525802347503765, 0.01755632843579077, -0.9670336043134922, -0.03330222974734559, 0.5708045845795261, 0.07150324591409365, -0.2923816210281723, -0.1003945101884875, -0.09727229178655826, -0.1234001397404901, 0.04066183518259721, 0.1434628037600845, -0.1409083950734027, 0.1628660783751044, 0.2178168134361163, 0.1832999676758237, 0.2819041405558238, -0.2060026677157682, -0.3409546634835108, -0.2319393727963857, -0.4009411424908401, 0.2619589247865226, -0.4668200628062795, -0.296933951573955, -0.5432787484033776, -0.3378788989574256, 0.6355506795869581, -0.3860209451465164, -0.7505093709414847, -5.105954891950948, -8.781352077679234, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -452.3304115985688, -960.7608316563131, -498.9493530569109, 0.2350526026148503, 5.452765778836735, 0.1144413317324554, 3.942743416705443, 0.06575193956155491, 2.813198060193881, 0.05979656827468521, 1.967247253532042, 0.07476481999950792, 1.33716927453409, 0.09679248669884244, 0.8715028539151225, -0.1184142632347635, -0.5297539525430655, 0.1364376500843866, 0.279883315023392, 0.1501362016677295, 0.0967277743463185, 0.1600101117691853, 0.0392623455333137, -0.1670646460060667, 0.1431394010753874, 0.1724122128993197, 0.2264583241675105, 0.1770984549392259, -0.2981564198474064, -0.1820260600993679, -0.3653319148757525, -0.1879277629602221, 0.433953340411416, -0.1953399947167113, -0.509568285645821, -0.2045344918635905, -0.5981230469754966, 0.2153519198444021, -0.7071121725209079, -0.2267809863092065, -6.916134867991262, -4.577761093605716, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -457.2821812366932, -960.4249791586459, -508.406803593705, 0.3314600419524246, -3.131681676256192, 0.2387019139078725, -2.160834219382301, 0.1558482495866747, -1.455216313053963, 0.0871664614814559, -0.9473663287775979, 0.03419837662062589, -0.5864332248384088, -0.00432458333663224, 0.3329441537673535, 0.03128353627523617, -0.1562804280922136, -0.04999176058110387, -0.03302664185232766, -0.06346986930062509, 0.05440406541970694, 0.07411879158850035, -0.1190014687949248, 0.08368929303132233, 0.170242902230307, 0.09336390814052453, 0.2150244781654537, -0.1038994057814532, -0.2584450235024301, -0.1157578492126067, -0.3044688209695106, 0.1292115445076401, -0.3565100159947209, -0.144401977523982, -0.4180074998279228, -0.1613341173509704, 0.4930955968984639, -0.1797468301316631, -0.5875758018381303, -2.883918681365754, -6.475332235697652, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -455.0155128763931, -960.1981319999863, -502.3405067762259, 0.2542491852062611, 3.131489617552865, 0.1670909868692584, 2.252853211778165, 0.1252928337711295, 1.589512082778476, 0.1095856577497441, 1.091340808355062, 0.1069182126000217, 0.7200044935476559, -0.1094190566032491, -0.4450831238377702, 0.1128668647285452, 0.242278665253527, 0.1152987829103931, 0.09231415427747117, 0.1160486567327278, 0.0199468566918988, -0.1151073353429353, 0.1062892592500759, 0.1127688588569004, 0.1758680910212436, 0.1094184084764459, -0.2358574126255636, -0.1054171918706391, -0.2920380304032841, -0.1010121781961295, 0.3493514987155218, -0.09624584560739964, -0.4124750394872714, -0.09081842264154093, -0.4865154108104184, 0.08383114316573474, -0.5780232680965639, -0.07323520324207744, -5.417494102776475, -2.610329978177295, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -457.4493352528837, -959.989698663586, -507.1401709474283, 0.286121368416555, -1.277585644216466, 0.1965314549725704, -0.8296361757391397, 0.126150204699882, -0.5143609771621193, 0.07411570076725935, -0.2955753603877085, 0.03772510850741548, 0.1456031206933647, -0.01346154359781873, -0.04333286374184881, -0.002152095041771392, 0.02709212845432325, -0.01203934086886425, 0.07735364223423614, 0.0184196895883106, -0.1159068759281938, 0.02282300685654621, 0.1487971666617892, 0.0262064128753012, 0.1803514533516238, -0.02907864514102102, -0.2137591327763108, -0.03161342078618522, -0.2515671959433391, 0.03371538499219204, -0.296132352891611, -0.0350175049709756, -0.3500936488703787, -0.03478013453039555, 0.4169709529674968, -0.03159734770668429, -0.5021045752952693, -1.359189375072352, -5.137658691990629, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -456.1816434524407, -959.8717700049582, -503.9632365405722, 0.2710790852321758, 1.868103062408168, 0.201731258488131, 1.336518241630041, 0.1607751340471668, 0.9312150774562233, 0.136744864573931, 0.6245135082334604, -0.1220039814863793, -0.3939736948288325, 0.1118064051450731, 0.2212852465254199, 0.1033807000084225, 0.09161715516324435, 0.09519253715708004, 0.006912715432142194, -0.08645745594199336, 0.08374228844933723, 0.07681239647201249, 0.1463556032104103, 0.06610891321647951, -0.2007459498469933, -0.05425819168043056, -0.2518586281471307, -0.0410971094331229, 0.3040320723461977, -0.02624784596427822, -0.3614859250552229, -0.00891781755958999, -0.4289462964027577, -0.01244251094956762, -0.5126072586924517, 0.0408259308513107, -4.58392784156386, -1.246428269166816, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -457.3475780693374, -959.8013715842148, -506.3396543584333, 0.2767928687754797, -0.3507679094211566, 0.1964561308894208, -0.175454380884666, 0.1369018349817932, -0.06171313754209207, 0.0948996407982716, -0.01057868944381583, -0.06660941623334156, 0.05625273023108911, 0.04841249702381548, 0.08604193382443961, 0.03731356346471822, 0.1075373379529352, -0.03107613794426697, -0.1259926823207867, -0.02817283438261568, 0.1449896389440946, -0.02768249041446454, 0.1669869201040142, 0.0291682437735234, -0.1937749805283248, 0.03258348528255475, -0.2268682319663226, -0.03823766887670215, -0.2678770945127273, 0.0468458004168191, -0.318925108122332, 0.05970717770473746, 0.3832225686061499, 0.07913811719945757, -0.4660307877839935, -0.283439945606522, -4.418862273762176, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -456.6253594082521, -959.785934980976, -504.6940382140627, 0.2905146476860859, 1.164315682401765, 0.2287874192618208, 0.8278901909084367, 0.1855742595690876, 0.5670794972681463, -0.1543255466444327, -0.3664024755348232, 0.1304088025212251, 0.2126108080946933, 0.1106574375165147, 0.09451778970188139, 0.09297359092819958, -0.00282311457116445, -0.07598524984762452, 0.07011698901922347, 0.05880426133990931, 0.1305654348080172, 0.04082738142446098, -0.1837050518882388, -0.02156733869032677, -0.2339588783576583, -0.0004920786516655548, 0.2853442102851388, 0.02315710617373445, -0.3419026525153549, 0.05062046787596538, -0.4082936953470016, -0.08404423049499717, -0.4907590591221084, 0.1273101919619071, -4.206720311439787, -0.3002127507778516, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -457.1342767647928, -959.8106159477925, -505.8018624707027, 0.2900898798822178, 0.09138444482380856, 0.2187118527593231, 0.1292492004000186, 0.1662045097025605, -0.1438635095448789, -0.1291313858324679, 0.146482412369866, 0.104009216338187, 0.1447220224795449, 0.08785328495101051, 0.1435558243554511, -0.07835861397425346, -0.1461036208296098, -0.07391291396692715, 0.1542496939603918, -0.07351683292489485, 0.1691302490659806, 0.07668777947971103, -0.191528688062261, 0.08339243901741469, -0.2222158695109802, -0.09404049750512089, -0.262283667277891, 0.1095744988108133, -0.3135425638449527, 0.1317142932715506, 0.3791044132771236, 0.1635054212875869, -0.4644162154949849, 0.4754117380195617, -4.133127727067452, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -456.7075938580194, -959.859187561279, -504.9641545879152, 0.3112115826946246, 0.7603819900412184, 0.2515787922254607, 0.5346525976842389, -0.2046698188884306, -0.3549498970832715, 0.1667993851467113, 0.2128194443512869, 0.1349829906843502, 0.1004097453865731, 0.1069128978938258, -0.01069617548430252, -0.08086180012056307, 0.06245121559445837, 0.05554789383536447, 0.1243240851970876, 0.02998652907082099, -0.1795147550946357, -0.003335227685768387, -0.232122564808795, 0.02527939571162375, 0.2860328739068638, 0.05697314553891727, -0.3453056616278151, 0.09340770900657025, -0.4147626826939698, -0.1373004236343103, -0.5009888584183517, 0.1933830034629809, -4.165639675833453, 0.3383554269676927, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -456.8782696374187, -959.9297648403151, -505.4159286188464, 0.3121561806109645, 0.2822056061123752, 0.247980497057797, -0.2569956304425891, -0.1998295916895239, 0.2284297348242049, 0.1650722678022543, 0.2029125660834568, 0.1410884792635043, 0.1841746600175908, -0.1256771230171495, -0.174184152010238, -0.1171886991653117, 0.1738211936975961, -0.1145301840222644, 0.1833868779235939, 0.1171275690499658, -0.2030007615539205, 0.1248977602701297, -0.2329370726129333, -0.1382764499321328, -0.27395747110102, 0.1583471608991634, -0.3277192476797476, 0.1871380238475298, 0.3974011701543539, 0.2282921486952801, -0.4888358484669846, 0.9965699211078363, -4.174355176167373, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -456.6116649438044, -960.0038871245156, -505.0023454177997, 0.3295930205438109, 0.5143442094150634, -0.2695356906932027, -0.3518849459077303, 0.2184195529530754, 0.2177473194353936, 0.1742696183299925, 0.107588929061941, 0.1351123343048535, -0.01670816731731302, -0.09919323231873232, 0.05953720029339046, 0.06503073496805913, 0.125532380449224, 0.03137214041729913, -0.1853658264427238, 0.002916428236328955, -0.2429142837357705, 0.03901258336924512, 0.3020465696948311, 0.07836548079886681, -0.3669787240345666, 0.1230474258479728, -0.4428645014247397, -0.1763255542849716, -0.5368662993446999, 0.2437700678950443, -4.391525855268358, 0.7382567408086977, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -456.6126089063781, -960.0836158047517, -505.1187555600291, 0.3334848577914001, -0.348428051728459, -0.2752367222186492, 0.3006944638633078, 0.2302721093152811, 0.2591967524250096, 0.1970876226025629, 0.2278690906060197, -0.1740115500094767, -0.2085463212280925, -0.1595867622233282, 0.2018162461301135, -0.1527406251408078, 0.2076385119268662, 0.1528504282670753, -0.2258110308909343, 0.1597827219954589, -0.2563523040186093, -0.173969694548823, -0.2998702186943654, 0.1965747840879211, -0.3580175279469813, 0.229863604568186, 0.4341837938265395, 0.277970170008693, -0.5347627558106645, 1.329764914690779, -4.483596648143033, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -456.4349056684294, -960.156647229524, -504.9319775147893, -0.3423523620698637, -0.3475550534848461, 0.2809466994778753, 0.2215706829932298, 0.2255846777578883, 0.1128246391312395, 0.1754514594654858, -0.01937340515435665, -0.1292864557086762, 0.06168462942056975, 0.08573413224271821, 0.1336954218460845, 0.04347198911094055, -0.2001906469386318, -0.00119376064663685, -0.2648271503726893, 0.04251266830607562, 0.3315045719474604, 0.0893994185444924, -0.4046778456006963, 0.1419228134888831, -0.4899638274253278, -0.2038944668407005, -0.5952966220065167, 0.2817106712351145, -4.848415701454543, 0.9427020574859393, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -456.3526903101317, -960.2239683681914, 504.8729234307502, -0.3489451907029376, 0.3589886699197148, 0.2961466969614354, 0.310480710370148, 0.2543129428137268, 0.2722939468109425, -0.2230677496498586, -0.2472988907469534, -0.2016181817997352, 0.2366019906197148, -0.1891922917107881, 0.2403800750135453, 0.1852769979754018, -0.2584933857871972, 0.1897672879791922, -0.2909693456884552, -0.2031063479701883, -0.3384573551697876, 0.226529417912314, -0.4027594406465836, 0.2624902798236902, 0.4876253939462449, 0.3155583157701459, -0.6001811424846641, 1.506088170407175, -5.032413448243027, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 456.2281437872571, -960.2817251702837, -504.8181052368888, -0.3473974178704272, -0.2160721566413612, -0.2844115901002985, -0.1109673640897398, -0.2249663433659822, 0.01558619777087037, 0.1691160884458819, -0.07065303999293307, -0.1161602619016208, -0.1496942207346384, -0.06504337994709028, 0.2242986455097327, 0.01449731249119731, 0.2977971368954605, -0.03700838008878649, -0.3740870837044805, -0.09144942901497075, 0.457896128782557, -0.1516347645680779, 0.5553971193015861, 0.221891957236842, 0.6754711755316094, -0.3094582971264419, 5.522765460179463, -0.97751110694022, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -456.1038688686278, -960.3270004185782, 504.6545436979824, -0.356402682870272, -0.351340066980498, -0.309181275719597, -0.3137167192606073, 0.2710350810973345, 0.2878350893000941, 0.2426229671933997, -0.2763168527076123, 0.2239747467509561, -0.280241947819618, -0.214997170417235, 0.3000011000597085, -0.2157829211419849, 0.3359590943512027, 0.2268880461285561, 0.3890394964897786, -0.2496482263626514, 0.4613794461017247, -0.2867172754999331, -0.5572551598551461, -0.3430608494894536, 0.6847182926632671, -1.544015999089003, 5.81277117568367, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 456.0159328177219, -960.3639010644911, -504.6932803542561, 0.3438026307622749, 0.09449263891861819, 0.2791924853298788, -0.0003950261355616105, -0.2158018381788221, 0.08970415379288814, 0.1544060725265021, 0.1757031383879306, 0.09475353685250953, -0.2591866322084164, -0.03594501565049272, -0.3429135694153541, 0.02344530556719954, 0.4306584188832464, 0.08549582247561358, -0.5273917016008263, 0.1532791130988579, -0.6399042657976388, -0.2316168130063649, -0.778179175299526, 0.3285885099115871, -6.416369594901841, 0.8564491667855068, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -455.8653704619311, -960.3842612497629, -504.4466252991523, 0.3553710744569122, 0.3464161627180581, -0.3141480426464132, -0.3263434732993854, -0.2805185077726005, 0.318452179605292, -0.2561088381842067, 0.3256076632881188, 0.2417423068618511, -0.3493440004841665, 0.2380375239176739, -0.3907849327216811, -0.2458444207499498, -0.4514338824492233, 0.2667224509879774, -0.5339931109453619, 0.3035639225688179, 0.6434745768399498, 0.3617520533213044, -0.7890836608012997, 1.453287590263238, -6.830587030361006, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -455.8082369972718, -960.3994442233231, -504.5727798070742, 0.3313951499504466, 0.03334672491870352, -0.2650323687322864, 0.1237756771616079, 0.1977229200322088, 0.2152372282453819, 0.1308144824330917, -0.3074811043383469, -0.06431874746251602, -0.4022833530695494, 0.002753039484875582, 0.5030555348702184, 0.07234641380894129, -0.6149136653510678, 0.147681447081999, -0.7452813394287314, -0.2340032399414377, -0.9053839955214958, 0.3402398000888476, -7.541821253164034, 0.5849103543979055, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -455.6329040043291, -960.3950114540172, -504.2353754001218, -0.3460929449122671, -0.3573008762499605, -0.3114301376926339, 0.3594512386076111, -0.2833177549503088, 0.3742883157355104, 0.2642891667618492, -0.4053000240462633, 0.2559854273447855, -0.4549386843075994, -0.2598956392005766, -0.5256874172585198, 0.2779895068618159, -0.6211224810880993, 0.313525507512514, 0.7472565377041231, 0.3723660978852033, -0.9147314187079836, 1.237565421662729, -8.101822161846965, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -455.6073466604758, -960.3894853751993, 504.4633572926642, -0.3103733552874035, 0.1798092000949148, 0.2419934417708158, 0.2733274466086066, 0.170608369891661, -0.3730126262676927, -0.09792141422899953, -0.4790220884834106, -0.02392951925676049, 0.5940246336099106, 0.05285511329060938, -0.72308359247846, 0.1355803277369181, -0.8742174899740623, -0.2298115577107615, -1.060011360233898, 0.3453065574734215, -8.919981193604276, 0.1624134764758667, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 455.4000921853053, -960.361309464515, -504.007882194183, 0.3290914395050692, -0.3942786634383071, 0.3016661987186222, -0.4232028367455296, -0.2802278665379578, 0.4661762517445954, -0.2682364496083171, 0.5277412575464714, 0.2683110547053154, 0.6119088573807965, -0.2831724063939194, 0.7235386787769833, -0.3166483539775581, -0.8699916087218257, -0.3752194315750562, 1.063702555577482, -0.8961354452926642, 9.650619636505867, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -455.411737801223, -960.33634002581, -504.3683031089997, -0.2810675792469359, -0.3568683235024433, -0.2101973773067176, 0.4610263218581945, 0.1342569173742763, 0.5773846640444537, 0.0549895002126496, -0.7072872888655298, -0.02810812468448988, 0.8554232500908963, -0.1177949641523385, 1.030291473272884, 0.2197751447614384, 1.245920882288997, -0.3445848993688538, 10.57920381944338, 0.4160829872124499, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -455.1591691278096, -960.2853155130464, 503.7506267530316, 0.3049590480459953, -0.4680278418562256, -0.2856076919532088, 0.5295716933187377, -0.2723080782051216, 0.6081924100973243, 0.2695854300651085, 0.7101704945576234, -0.2814097861384926, 0.8422035444139073, -0.3125162969328145, -1.013460137059419, -0.3702351580276053, 1.238618787929362, -0.4245068253530189, 11.50934860393606, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 455.2183025514214, -960.2418420556575, 504.2904567527979, 0.2437489235262463, -0.5785902355402166, -0.1696099295696916, -0.7030543018352547, -0.08807726217552897, 0.8477643119258153, -0.0003803922022015914, -1.016538148397893, 0.09536127235557913, -1.218135087789733, -0.2047067633976007, -1.468068679276456, 0.3388478362779086, -12.55632491341529, -1.160129556695031, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 454.9011576326581, -960.1679733535419, 503.4480310997173, -0.2743159215154021, 0.59210690664036, -0.2642341403577922, 0.6948327035340854, 0.2611611633095301, 0.820468994814445, -0.2710734389097403, 0.9783065442168752, -0.3001250829922114, -1.179929020129721, -0.3568752809307986, 1.442834925435452, 0.1861860221886378, 13.72072654131889, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 455.0234951961737, -960.1065185460631, -504.2341761382918, -0.1985155387376615, -0.8636702893987938, -0.1197570863030018, 1.022005815169442, -0.03046148161363051, -1.212498738675283, 0.06965185900521845, -1.44380567993779, -0.1855411271964486, -1.732900209415532, 0.3288765751169174, -14.89970702904785, -2.085331319125681, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -454.6155022554338, -960.0090394364709, 503.080419120223, 0.2378578162407876, -0.7855779404374899, -0.2389268322384859, -0.9427265300586574, 0.2493889345428649, -1.133389617235434, 0.2776001160195384, 1.372380149780934, 0.3338733668864364, -1.680779007748177, -0.9514699644735849, -16.34248127983827, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 454.8236752088293, -959.9291937205099, 504.2068595622975, 0.1450192700206833, -1.238921588152978, 0.05908037491561525, 1.451502248422423, -0.04247506464443443, 1.715440182112174, 0.1633637326099601, 2.049030098342237, -0.3153926679720489, 17.67488245777389, 3.216886963304409, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 454.2888780517075, -959.8060141768642, 502.6209139189659, -0.1963783784418207, -1.07681015710693, 0.2118129088940946, -1.30957289263114, 0.2416506844030767, 1.594926385284288, 0.2987764905571196, -1.958528602633205, -1.89790052548914, -19.45550897611078, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 454.6150435510897, -959.7057073918052, -504.2207918385922, 0.08191276561459389, 1.744978959223401, -0.01623102385292412, 2.044325279439126, 0.139362395300141, 2.428380353063932, -0.2989187527977135, 20.97397716279267, 4.596081079911148, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -453.9030102952942, -959.5514771839804, -502.0302313735755, -0.1509069873047727, 1.509921737993699, -0.1866506960156188, -1.853460072170985, -0.2470701568585754, 2.284756111156089, 3.070922511010462, 23.1773864785971, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -454.393273204417, -959.4263564611852, 504.2962075579032, 0.005942404933789297, 2.446660862635643, 0.1147302269971472, 2.888007099445928, -0.2794135351323237, 24.93107187619451, 6.291448555715359, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 453.4308183834945, -959.2306762470356, 501.2467662460859, 0.1030197566665109, 2.15672544959436, 0.1707450834055513, -2.672257869508826, -4.548465509453017, -27.68489865384783, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 454.1521964870133, -959.071043583621, 504.4663679395445, 0.09038098396525029, 3.453184686592636, -0.2556696326547585, 29.74790742438, 8.418178566025752, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 452.828604749646, -958.8118098643598, -500.1677840008083, 0.05562336274628601, -3.140588015388957, -6.465358070370396, -33.25328104675387, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -453.8814454933349, -958.5973523296369, -504.7879798529249, 0.2240186812962395, -35.739305932474, -11.17424401632893, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -452.0207842240151, -958.2280932680816, 498.6099988306628, -9.061273073482408, -40.32955903922403, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 453.5608878051024, -957.9144120234951, 542.7656821084641, 14.91304502929086, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 450.8656330991661, -969.8578951037719, 452.1887612775597, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 787.8967774818433, -898.9789215779133, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 4, + }, + evWant: []complex128{ + 1, + 2, + -0.8238783640171078 + 1.579766228862017i, + -0.8238783640171078 - 1.579766228862017i, + -3.922907917607387 + 1.654540870297146i, + -3.922907917607387 - 1.654540870297146i, + -10.04176861796659 + 1.651471031280953i, + -10.04176861796659 - 1.651471031280953i, + -19.90533512613429 + 1.64196864045227i, + -19.90533512613429 - 1.64196864045227i, + -33.60610872827028 + 1.635560741682501i, + -33.60610872827028 - 1.635560741682501i, + -51.10943733404228 + 1.631805014774364i, + -51.10943733404228 - 1.631805014774364i, + -72.34919707090897 + 1.629522740505929i, + -72.34919707090897 - 1.629522740505929i, + -97.24021794530901 + 1.628064860261413i, + -97.24021794530901 - 1.628064860261413i, + -125.6811625494976 + 1.627074041605065i, + -125.6811625494976 - 1.627074041605065i, + -157.5556174346503 + 1.626392967840776i, + -157.5556174346503 - 1.626392967840776i, + -192.732837945876 + 1.625893674037654i, + -192.732837945876 - 1.625893674037654i, + -231.0684238575969 + 1.625523897559938i, + -231.0684238575969 - 1.625523897559938i, + -272.404932676379 + 1.625248463045784i, + -272.404932676379 - 1.625248463045784i, + -316.5725618120467 + 1.625029322230836i, + -316.5725618120467 - 1.625029322230836i, + -363.3898435346347 + 1.624863705104029i, + -363.3898435346347 - 1.624863705104029i, + -412.6644183160402 + 1.624734783685709i, + -412.6644183160402 - 1.624734783685709i, + -464.1938275598905 + 1.624629407600858i, + -464.1938275598905 - 1.624629407600858i, + -517.7663337295413 + 1.624555040627727i, + -517.7663337295413 - 1.624555040627727i, + -573.1617958392867 + 1.62448889134649i, + -573.1617958392867 - 1.62448889134649i, + -630.1525854166166 + 1.624445355378192i, + -630.1525854166166 - 1.624445355378192i, + -688.5045249303587 + 1.624414401302088i, + -688.5045249303587 - 1.624414401302088i, + -747.9778126976437 + 1.624396390555459i, + -747.9778126976437 - 1.624396390555459i, + -808.3280706224909 + 1.62438480760184i, + -808.3280706224909 - 1.62438480760184i, + -869.3072903249724 + 1.624387678902335i, + -869.3072903249724 - 1.624387678902335i, + -930.6648831979091 + 1.624396130880259i, + -930.6648831979091 - 1.624396130880259i, + -992.1487134378474 + 1.624417808682915i, + -992.1487134378474 - 1.624417808682915i, + -1053.506114553354 + 1.624453056189826i, + -1053.506114553354 - 1.624453056189826i, + -1114.484928198698 + 1.62449544649428i, + -1114.484928198698 - 1.62449544649428i, + -1174.834554234014 + 1.624553207269019i, + -1174.834554234014 - 1.624553207269019i, + -1234.306981514973 + 1.624623553438826i, + -1234.306981514973 - 1.624623553438826i, + -1292.657768972259 + 1.624709216827242i, + -1292.657768972259 - 1.624709216827242i, + -1349.647106741638 + 1.624814444572517i, + -1349.647106741638 - 1.624814444572517i, + -1405.040739357668 + 1.624951632752096i, + -1405.040739357668 - 1.624951632752096i, + -1458.610953350783 + 1.625104547041682i, + -1458.610953350783 - 1.625104547041682i, + -1510.137508646807 + 1.625304666073007i, + -1510.137508646807 - 1.625304666073007i, + -1559.408520122221 + 1.625548293255404i, + -1559.408520122221 - 1.625548293255404i, + -1606.221305250554 + 1.625851986073836i, + -1606.221305250554 - 1.625851986073836i, + -1650.383201531125 + 1.62624202844641i, + -1650.383201531125 - 1.62624202844641i, + -1691.712315735984 + 1.6267345498979i, + -1691.712315735984 - 1.6267345498979i, + -1730.038177420971 + 1.627388968656263i, + -1730.038177420971 - 1.627388968656263i, + -1765.20230058066 + 1.628268412022146i, + -1765.20230058066 - 1.628268412022146i, + -1797.05860162894 + 1.629473972633416i, + -1797.05860162894 - 1.629473972633416i, + -1825.473493639258 + 1.631220665229006i, + -1825.473493639258 - 1.631220665229006i, + -1850.32542664842 + 1.633834593918563i, + -1850.32542664842 - 1.633834593918563i, + -1871.503056018116 + 1.637993570641514i, + -1871.503056018116 - 1.637993570641514i, + -1888.90026514681 + 1.64508855982818i, + -1888.90026514681 - 1.64508855982818i, + -1902.402515327158 + 1.658179541614067i, + -1902.402515327158 - 1.658179541614067i, + -1911.858940404498 + 1.682209391409579i, + -1911.858940404498 - 1.682209391409579i, + -1916.92602113601 + 1.761877988650816i, + -1916.92602113601 - 1.761877988650816i, + -1930.484166851586 + 1.202676762393897i, + -1930.484166851586 - 1.202676762393897i, + 3, + 4, + }, + }, +} diff --git a/vendor/gonum.org/v1/gonum/lapack/testlapack/dlabrd.go b/vendor/gonum.org/v1/gonum/lapack/testlapack/dlabrd.go new file mode 100644 index 0000000..439ad47 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/lapack/testlapack/dlabrd.go @@ -0,0 +1,108 @@ +// Copyright ©2015 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package testlapack + +import ( + "math" + "testing" + + "golang.org/x/exp/rand" +) + +type Dlabrder interface { + Dlabrd(m, n, nb int, a []float64, lda int, d, e, tauq, taup, x []float64, ldx int, y []float64, ldy int) +} + +func DlabrdTest(t *testing.T, impl Dlabrder) { + rnd := rand.New(rand.NewSource(1)) + for _, test := range []struct { + m, n, nb, lda, ldx, ldy int + }{ + {4, 5, 2, 0, 0, 0}, + {4, 5, 4, 0, 0, 0}, + {5, 5, 2, 0, 0, 0}, + {5, 5, 5, 0, 0, 0}, + {5, 4, 4, 0, 0, 0}, + {5, 4, 4, 0, 0, 0}, + + {4, 5, 2, 10, 11, 12}, + {4, 5, 4, 10, 11, 12}, + {5, 5, 2, 10, 11, 12}, + {5, 5, 5, 10, 11, 12}, + {5, 4, 2, 10, 11, 12}, + {5, 4, 4, 10, 11, 12}, + + {4, 5, 2, 11, 12, 10}, + {4, 5, 4, 11, 12, 10}, + {5, 5, 2, 11, 12, 10}, + {5, 5, 5, 11, 12, 10}, + {5, 4, 2, 11, 12, 10}, + {5, 4, 4, 11, 12, 10}, + + {4, 5, 2, 12, 11, 10}, + {4, 5, 4, 12, 11, 10}, + {5, 5, 2, 12, 11, 10}, + {5, 5, 5, 12, 11, 10}, + {5, 4, 2, 12, 11, 10}, + {5, 4, 4, 12, 11, 10}, + } { + m := test.m + n := test.n + nb := test.nb + lda := test.lda + if lda == 0 { + lda = n + } + ldy := test.ldy + if ldy == 0 { + ldy = nb + } + ldx := test.ldx + if ldx == 0 { + ldx = nb + } + a := make([]float64, m*lda) + for i := range a { + a[i] = rnd.NormFloat64() + } + d := make([]float64, nb) + for i := range d { + d[i] = math.NaN() + } + e := make([]float64, nb) + for i := range e { + e[i] = math.NaN() + } + tauP := make([]float64, nb) + for i := range tauP { + tauP[i] = math.NaN() + } + tauQ := make([]float64, nb) + for i := range tauP { + tauQ[i] = math.NaN() + } + x := make([]float64, m*ldx) + for i := range x { + x[i] = rnd.NormFloat64() + } + y := make([]float64, n*ldy) + for i := range y { + y[i] = rnd.NormFloat64() + } + aCopy := make([]float64, len(a)) + copy(aCopy, a) + + // Compute the reduction. + impl.Dlabrd(m, n, nb, a, lda, d, e, tauQ, tauP, x, ldx, y, ldy) + + if m >= n && nb == n { + tauP[n-1] = 0 + } + if m < n && nb == m { + tauQ[m-1] = 0 + } + checkBidiagonal(t, m, n, nb, a, lda, d, e, tauP, tauQ, aCopy) + } +} diff --git a/vendor/gonum.org/v1/gonum/lapack/testlapack/dlacn2.go b/vendor/gonum.org/v1/gonum/lapack/testlapack/dlacn2.go new file mode 100644 index 0000000..00d288c --- /dev/null +++ b/vendor/gonum.org/v1/gonum/lapack/testlapack/dlacn2.go @@ -0,0 +1,73 @@ +// Copyright ©2016 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package testlapack + +import ( + "math" + "testing" + + "golang.org/x/exp/rand" + + "gonum.org/v1/gonum/blas" + "gonum.org/v1/gonum/blas/blas64" +) + +type Dlacn2er interface { + Dlacn2(n int, v, x []float64, isgn []int, est float64, kase int, isave *[3]int) (float64, int) +} + +func Dlacn2Test(t *testing.T, impl Dlacn2er) { + rnd := rand.New(rand.NewSource(1)) + for _, n := range []int{1, 2, 3, 4, 5, 7, 10, 15, 20, 100} { + for cas := 0; cas < 10; cas++ { + a := randomGeneral(n, n, n, rnd) + + // Compute the 1-norm of A explicitly. + var norm1 float64 + for j := 0; j < n; j++ { + var sum float64 + for i := 0; i < n; i++ { + sum += math.Abs(a.Data[i*a.Stride+j]) + } + if sum > norm1 { + norm1 = sum + } + } + + // Compute the estimate of 1-norm using Dlanc2. + x := make([]float64, n) + work := make([]float64, n) + v := make([]float64, n) + isgn := make([]int, n) + var ( + kase int + isave [3]int + got float64 + ) + loop: + for { + got, kase = impl.Dlacn2(n, v, x, isgn, got, kase, &isave) + switch kase { + default: + panic("Dlacn2 returned invalid value of kase") + case 0: + break loop + case 1: + blas64.Gemv(blas.NoTrans, 1, a, blas64.Vector{Data: x, Inc: 1}, 0, blas64.Vector{Data: work, Inc: 1}) + copy(x, work) + case 2: + blas64.Gemv(blas.Trans, 1, a, blas64.Vector{Data: x, Inc: 1}, 0, blas64.Vector{Data: work, Inc: 1}) + copy(x, work) + } + } + + // Check that got is either accurate enough or a + // lower estimate of the 1-norm of A. + if math.Abs(got-norm1) > 1e-8 && got > norm1 { + t.Errorf("Case n=%v: not lower estimate. 1-norm %v, estimate %v", n, norm1, got) + } + } + } +} diff --git a/vendor/gonum.org/v1/gonum/lapack/testlapack/dlacpy.go b/vendor/gonum.org/v1/gonum/lapack/testlapack/dlacpy.go new file mode 100644 index 0000000..4a2becb --- /dev/null +++ b/vendor/gonum.org/v1/gonum/lapack/testlapack/dlacpy.go @@ -0,0 +1,90 @@ +// Copyright ©2015 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package testlapack + +import ( + "fmt" + "testing" + + "golang.org/x/exp/rand" + + "gonum.org/v1/gonum/blas" +) + +type Dlacpyer interface { + Dlacpy(uplo blas.Uplo, m, n int, a []float64, lda int, b []float64, ldb int) +} + +func DlacpyTest(t *testing.T, impl Dlacpyer) { + rnd := rand.New(rand.NewSource(1)) + for _, uplo := range []blas.Uplo{blas.Upper, blas.Lower, blas.All} { + for _, test := range []struct { + m, n, lda, ldb int + }{ + {3, 5, 0, 0}, + {5, 5, 0, 0}, + {7, 5, 0, 0}, + + {3, 5, 10, 12}, + {5, 5, 10, 12}, + {7, 5, 10, 12}, + } { + m := test.m + n := test.n + lda := test.lda + if lda == 0 { + lda = n + } + ldb := test.ldb + if ldb == 0 { + ldb = n + } + a := make([]float64, m*lda) + for i := range a { + a[i] = rnd.Float64() + } + b := make([]float64, m*ldb) + for i := range b { + b[i] = rnd.Float64() + } + impl.Dlacpy(uplo, m, n, a, lda, b, ldb) + equal := true + switch uplo { + case blas.Upper: + for i := 0; i < m; i++ { + for j := i; j < n; j++ { + if b[i*ldb+j] != a[i*lda+j] { + equal = false + goto DoneCheck + } + } + } + case blas.Lower: + for i := 0; i < m; i++ { + for j := 0; j < min(i, n); j++ { + if b[i*ldb+j] != a[i*lda+j] { + equal = false + goto DoneCheck + } + } + } + case blas.All: + for i := 0; i < m; i++ { + for j := 0; j < n; j++ { + if b[i*ldb+j] != a[i*lda+j] { + equal = false + goto DoneCheck + } + } + } + } + DoneCheck: + if !equal { + fmt.Println(blas.Lower) + t.Errorf("Matrices not equal after copy. Uplo = %d, m = %d, n = %d", uplo, m, n) + } + } + } +} diff --git a/vendor/gonum.org/v1/gonum/lapack/testlapack/dlae2.go b/vendor/gonum.org/v1/gonum/lapack/testlapack/dlae2.go new file mode 100644 index 0000000..5df1caf --- /dev/null +++ b/vendor/gonum.org/v1/gonum/lapack/testlapack/dlae2.go @@ -0,0 +1,53 @@ +// Copyright ©2016 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package testlapack + +import ( + "fmt" + "math" + "testing" +) + +type Dlae2er interface { + Dlae2(a, b, c float64) (rt1, rt2 float64) +} + +func Dlae2Test(t *testing.T, impl Dlae2er) { + for _, test := range []struct { + a, b, c float64 + }{ + {-10, 5, 3}, + {3, 5, -10}, + {0, 3, 0}, + {1, 3, 1}, + {1, -3, 1}, + {5, 0, 3}, + {3, 0, -5}, + {1, 3, 1.02}, + {1.02, 3, 1}, + {1, -3, -9}, + } { + a := test.a + b := test.b + c := test.c + rt1, rt2 := impl.Dlae2(a, b, c) + + errStr := fmt.Sprintf("a = %v, b = %v, c = %v", a, b, c) + // Check if rt1 and rt2 are eigenvalues by checking if det(a - λI) = 0 + a1 := a - rt1 + c1 := c - rt1 + det := a1*c1 - b*b + if math.Abs(det) > 1e-10 { + t.Errorf("First eigenvalue mismatch. %s. Det = %v", errStr, det) + } + + a2 := a - rt2 + c2 := c - rt2 + det = a2*c2 - b*b + if math.Abs(det) > 1e-10 { + t.Errorf("Second eigenvalue mismatch. %s. Det = %v", errStr, det) + } + } +} diff --git a/vendor/gonum.org/v1/gonum/lapack/testlapack/dlaev2.go b/vendor/gonum.org/v1/gonum/lapack/testlapack/dlaev2.go new file mode 100644 index 0000000..73de0e5 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/lapack/testlapack/dlaev2.go @@ -0,0 +1,47 @@ +// Copyright ©2015 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package testlapack + +import ( + "math" + "testing" + + "golang.org/x/exp/rand" +) + +type Dlaev2er interface { + Dlaev2(a, b, c float64) (rt1, rt2, cs1, sn1 float64) +} + +func Dlaev2Test(t *testing.T, impl Dlaev2er) { + rnd := rand.New(rand.NewSource(1)) + for trial := 0; trial < 100; trial++ { + a := rnd.NormFloat64() + b := rnd.NormFloat64() + c := rnd.NormFloat64() + + rt1, rt2, cs1, sn1 := impl.Dlaev2(a, b, c) + tmp := mul2by2([2][2]float64{{cs1, sn1}, {-sn1, cs1}}, [2][2]float64{{a, b}, {b, c}}) + ans := mul2by2(tmp, [2][2]float64{{cs1, -sn1}, {sn1, cs1}}) + if math.Abs(ans[0][0]-rt1) > 1e-14 { + t.Errorf("Largest eigenvalue mismatch. Returned %v, mul %v", rt1, ans[0][0]) + } + if math.Abs(ans[1][0]) > 1e-14 || math.Abs(ans[0][1]) > 1e-14 { + t.Errorf("Non-zero off diagonal. ans[1][0] = %v, ans[0][1] = %v", ans[1][0], ans[0][1]) + } + if math.Abs(ans[1][1]-rt2) > 1e-14 { + t.Errorf("Smallest eigenvalue mismatch. Returned %v, mul %v", rt2, ans[1][1]) + } + } +} + +func mul2by2(a, b [2][2]float64) [2][2]float64 { + var c [2][2]float64 + c[0][0] = a[0][0]*b[0][0] + a[0][1]*b[1][0] + c[0][1] = a[0][0]*b[0][1] + a[0][1]*b[1][1] + c[1][0] = a[1][0]*b[0][0] + a[1][1]*b[1][0] + c[1][1] = a[1][0]*b[0][1] + a[1][1]*b[1][1] + return c +} diff --git a/vendor/gonum.org/v1/gonum/lapack/testlapack/dlaexc.go b/vendor/gonum.org/v1/gonum/lapack/testlapack/dlaexc.go new file mode 100644 index 0000000..6a84381 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/lapack/testlapack/dlaexc.go @@ -0,0 +1,228 @@ +// Copyright ©2016 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package testlapack + +import ( + "fmt" + "math" + "math/cmplx" + "testing" + + "golang.org/x/exp/rand" + + "gonum.org/v1/gonum/blas" + "gonum.org/v1/gonum/blas/blas64" +) + +type Dlaexcer interface { + Dlaexc(wantq bool, n int, t []float64, ldt int, q []float64, ldq int, j1, n1, n2 int, work []float64) bool +} + +func DlaexcTest(t *testing.T, impl Dlaexcer) { + rnd := rand.New(rand.NewSource(1)) + + for _, wantq := range []bool{true, false} { + for _, n := range []int{1, 2, 3, 4, 5, 6, 10, 18, 31, 53} { + for _, extra := range []int{0, 1, 11} { + for cas := 0; cas < 100; cas++ { + j1 := rnd.Intn(n) + n1 := min(rnd.Intn(3), n-j1) + n2 := min(rnd.Intn(3), n-j1-n1) + testDlaexc(t, impl, wantq, n, j1, n1, n2, extra, rnd) + } + } + } + } +} + +func testDlaexc(t *testing.T, impl Dlaexcer, wantq bool, n, j1, n1, n2, extra int, rnd *rand.Rand) { + const tol = 1e-14 + + tmat := randomGeneral(n, n, n+extra, rnd) + // Zero out the lower triangle. + for i := 1; i < n; i++ { + for j := 0; j < i; j++ { + tmat.Data[i*tmat.Stride+j] = 0 + } + } + // Make any 2x2 diagonal block to be in Schur canonical form. + if n1 == 2 { + // Diagonal elements equal. + tmat.Data[(j1+1)*tmat.Stride+j1+1] = tmat.Data[j1*tmat.Stride+j1] + // Off-diagonal elements of opposite sign. + c := rnd.NormFloat64() + if math.Signbit(c) == math.Signbit(tmat.Data[j1*tmat.Stride+j1+1]) { + c *= -1 + } + tmat.Data[(j1+1)*tmat.Stride+j1] = c + } + if n2 == 2 { + // Diagonal elements equal. + tmat.Data[(j1+n1+1)*tmat.Stride+j1+n1+1] = tmat.Data[(j1+n1)*tmat.Stride+j1+n1] + // Off-diagonal elements of opposite sign. + c := rnd.NormFloat64() + if math.Signbit(c) == math.Signbit(tmat.Data[(j1+n1)*tmat.Stride+j1+n1+1]) { + c *= -1 + } + tmat.Data[(j1+n1+1)*tmat.Stride+j1+n1] = c + } + tmatCopy := cloneGeneral(tmat) + var q, qCopy blas64.General + if wantq { + q = eye(n, n+extra) + qCopy = cloneGeneral(q) + } + work := nanSlice(n) + + ok := impl.Dlaexc(wantq, n, tmat.Data, tmat.Stride, q.Data, q.Stride, j1, n1, n2, work) + + prefix := fmt.Sprintf("Case n=%v, j1=%v, n1=%v, n2=%v, wantq=%v, extra=%v", n, j1, n1, n2, wantq, extra) + + if !generalOutsideAllNaN(tmat) { + t.Errorf("%v: out-of-range write to T", prefix) + } + if wantq && !generalOutsideAllNaN(q) { + t.Errorf("%v: out-of-range write to Q", prefix) + } + + if !ok { + if n1 == 1 && n2 == 1 { + t.Errorf("%v: unexpected failure", prefix) + } else { + t.Logf("%v: Dlaexc returned false", prefix) + } + } + + if !ok || n1 == 0 || n2 == 0 || j1+n1 >= n { + // Check that T is not modified. + for i := 0; i < n; i++ { + for j := 0; j < n; j++ { + if tmat.Data[i*tmat.Stride+j] != tmatCopy.Data[i*tmatCopy.Stride+j] { + t.Errorf("%v: ok == false but T[%v,%v] modified", prefix, i, j) + } + } + } + if !wantq { + return + } + // Check that Q is not modified. + for i := 0; i < n; i++ { + for j := 0; j < n; j++ { + if q.Data[i*q.Stride+j] != qCopy.Data[i*qCopy.Stride+j] { + t.Errorf("%v: ok == false but Q[%v,%v] modified", prefix, i, j) + } + } + } + return + } + + // Check that T is not modified outside of rows and columns [j1:j1+n1+n2]. + for i := 0; i < n; i++ { + if j1 <= i && i < j1+n1+n2 { + continue + } + for j := 0; j < n; j++ { + if j1 <= j && j < j1+n1+n2 { + continue + } + diff := tmat.Data[i*tmat.Stride+j] - tmatCopy.Data[i*tmatCopy.Stride+j] + if diff != 0 { + t.Errorf("%v: unexpected modification of T[%v,%v]", prefix, i, j) + } + } + } + + if n1 == 1 { + // 1×1 blocks are swapped exactly. + got := tmat.Data[(j1+n2)*tmat.Stride+j1+n2] + want := tmatCopy.Data[j1*tmatCopy.Stride+j1] + if want != got { + t.Errorf("%v: unexpected value of T[%v,%v]. Want %v, got %v", prefix, j1+n2, j1+n2, want, got) + } + } else { + // Check that the swapped 2×2 block is in Schur canonical form. + // The n1×n1 block is now located at T[j1+n2,j1+n2]. + a, b, c, d := extract2x2Block(tmat.Data[(j1+n2)*tmat.Stride+j1+n2:], tmat.Stride) + if !isSchurCanonical(a, b, c, d) { + t.Errorf("%v: 2×2 block at T[%v,%v] not in Schur canonical form", prefix, j1+n2, j1+n2) + } + ev1Got, ev2Got := schurBlockEigenvalues(a, b, c, d) + + // Check that the swapped 2×2 block has the same eigenvalues. + // The n1×n1 block was originally located at T[j1,j1]. + a, b, c, d = extract2x2Block(tmatCopy.Data[j1*tmatCopy.Stride+j1:], tmatCopy.Stride) + ev1Want, ev2Want := schurBlockEigenvalues(a, b, c, d) + if cmplx.Abs(ev1Got-ev1Want) > tol { + t.Errorf("%v: unexpected first eigenvalue of 2×2 block at T[%v,%v]. Want %v, got %v", + prefix, j1+n2, j1+n2, ev1Want, ev1Got) + } + if cmplx.Abs(ev2Got-ev2Want) > tol { + t.Errorf("%v: unexpected second eigenvalue of 2×2 block at T[%v,%v]. Want %v, got %v", + prefix, j1+n2, j1+n2, ev2Want, ev2Got) + } + } + if n2 == 1 { + // 1×1 blocks are swapped exactly. + got := tmat.Data[j1*tmat.Stride+j1] + want := tmatCopy.Data[(j1+n1)*tmatCopy.Stride+j1+n1] + if want != got { + t.Errorf("%v: unexpected value of T[%v,%v]. Want %v, got %v", prefix, j1, j1, want, got) + } + } else { + // Check that the swapped 2×2 block is in Schur canonical form. + // The n2×n2 block is now located at T[j1,j1]. + a, b, c, d := extract2x2Block(tmat.Data[j1*tmat.Stride+j1:], tmat.Stride) + if !isSchurCanonical(a, b, c, d) { + t.Errorf("%v: 2×2 block at T[%v,%v] not in Schur canonical form", prefix, j1, j1) + } + ev1Got, ev2Got := schurBlockEigenvalues(a, b, c, d) + + // Check that the swapped 2×2 block has the same eigenvalues. + // The n2×n2 block was originally located at T[j1+n1,j1+n1]. + a, b, c, d = extract2x2Block(tmatCopy.Data[(j1+n1)*tmatCopy.Stride+j1+n1:], tmatCopy.Stride) + ev1Want, ev2Want := schurBlockEigenvalues(a, b, c, d) + if cmplx.Abs(ev1Got-ev1Want) > tol { + t.Errorf("%v: unexpected first eigenvalue of 2×2 block at T[%v,%v]. Want %v, got %v", + prefix, j1, j1, ev1Want, ev1Got) + } + if cmplx.Abs(ev2Got-ev2Want) > tol { + t.Errorf("%v: unexpected second eigenvalue of 2×2 block at T[%v,%v]. Want %v, got %v", + prefix, j1, j1, ev2Want, ev2Got) + } + } + + if !wantq { + return + } + + if !isOrthogonal(q) { + t.Errorf("%v: Q is not orthogonal", prefix) + } + // Check that Q is unchanged outside of columns [j1:j1+n1+n2]. + for i := 0; i < n; i++ { + for j := 0; j < n; j++ { + if j1 <= j && j < j1+n1+n2 { + continue + } + diff := q.Data[i*q.Stride+j] - qCopy.Data[i*qCopy.Stride+j] + if diff != 0 { + t.Errorf("%v: unexpected modification of Q[%v,%v]", prefix, i, j) + } + } + } + // Check that Q^T TOrig Q == T. + tq := eye(n, n) + blas64.Gemm(blas.NoTrans, blas.NoTrans, 1, tmatCopy, q, 0, tq) + qtq := eye(n, n) + blas64.Gemm(blas.Trans, blas.NoTrans, 1, q, tq, 0, qtq) + for i := 0; i < n; i++ { + for j := 0; j < n; j++ { + diff := qtq.Data[i*qtq.Stride+j] - tmat.Data[i*tmat.Stride+j] + if math.Abs(diff) > tol { + t.Errorf("%v: unexpected value of T[%v,%v]", prefix, i, j) + } + } + } +} diff --git a/vendor/gonum.org/v1/gonum/lapack/testlapack/dlags2.go b/vendor/gonum.org/v1/gonum/lapack/testlapack/dlags2.go new file mode 100644 index 0000000..a1c959e --- /dev/null +++ b/vendor/gonum.org/v1/gonum/lapack/testlapack/dlags2.go @@ -0,0 +1,119 @@ +// Copyright ©2017 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package testlapack + +import ( + "math" + "testing" + + "golang.org/x/exp/rand" + + "gonum.org/v1/gonum/blas" + "gonum.org/v1/gonum/blas/blas64" + "gonum.org/v1/gonum/floats" +) + +type Dlags2er interface { + Dlags2(upper bool, a1, a2, a3, b1, b2, b3 float64) (csu, snu, csv, snv, csq, snq float64) +} + +func Dlags2Test(t *testing.T, impl Dlags2er) { + rnd := rand.New(rand.NewSource(1)) + for _, upper := range []bool{true, false} { + for i := 0; i < 100; i++ { + // Generate randomly the elements of a 2×2 matrix A + // [ a1 a2 ] or [ a1 0 ] + // [ 0 a3 ] [ a2 a3 ] + a1 := rnd.Float64() + a2 := rnd.Float64() + a3 := rnd.Float64() + // Generate randomly the elements of a 2×2 matrix B. + // [ b1 b2 ] or [ b1 0 ] + // [ 0 b3 ] [ b2 b3 ] + b1 := rnd.Float64() + b2 := rnd.Float64() + b3 := rnd.Float64() + + // Compute orthogal matrices U, V, Q + // U = [ csu snu ], V = [ csv snv ], Q = [ csq snq ] + // [ -snu csu ] [ -snv csv ] [ -snq csq ] + // that transform A and B. + csu, snu, csv, snv, csq, snq := impl.Dlags2(upper, a1, a2, a3, b1, b2, b3) + + // Check that U, V, Q are orthogonal matrices (their + // determinant is equal to 1). + detU := det2x2(csu, snu, -snu, csu) + if !floats.EqualWithinAbsOrRel(math.Abs(detU), 1, 1e-14, 1e-14) { + t.Errorf("U not orthogonal: det(U)=%v", detU) + } + detV := det2x2(csv, snv, -snv, csv) + if !floats.EqualWithinAbsOrRel(math.Abs(detV), 1, 1e-14, 1e-14) { + t.Errorf("V not orthogonal: det(V)=%v", detV) + } + detQ := det2x2(csq, snq, -snq, csq) + if !floats.EqualWithinAbsOrRel(math.Abs(detQ), 1, 1e-14, 1e-14) { + t.Errorf("Q not orthogonal: det(Q)=%v", detQ) + } + + // Create U, V, Q explicitly as dense matrices. + u := blas64.General{ + Rows: 2, + Cols: 2, + Stride: 2, + Data: []float64{csu, snu, -snu, csu}, + } + v := blas64.General{ + Rows: 2, + Cols: 2, + Stride: 2, + Data: []float64{csv, snv, -snv, csv}, + } + q := blas64.General{ + Rows: 2, + Cols: 2, + Stride: 2, + Data: []float64{csq, snq, -snq, csq}, + } + + // Create A and B explicitly as dense matrices. + a := blas64.General{Rows: 2, Cols: 2, Stride: 2} + b := blas64.General{Rows: 2, Cols: 2, Stride: 2} + if upper { + a.Data = []float64{a1, a2, 0, a3} + b.Data = []float64{b1, b2, 0, b3} + } else { + a.Data = []float64{a1, 0, a2, a3} + b.Data = []float64{b1, 0, b2, b3} + } + + tmp := blas64.General{Rows: 2, Cols: 2, Stride: 2, Data: make([]float64, 4)} + // Transform A as U^T*A*Q. + blas64.Gemm(blas.Trans, blas.NoTrans, 1, u, a, 0, tmp) + blas64.Gemm(blas.NoTrans, blas.NoTrans, 1, tmp, q, 0, a) + // Transform B as V^T*A*Q. + blas64.Gemm(blas.Trans, blas.NoTrans, 1, v, b, 0, tmp) + blas64.Gemm(blas.NoTrans, blas.NoTrans, 1, tmp, q, 0, b) + + // Extract elements of transformed A and B that should be equal to zero. + var gotA, gotB float64 + if upper { + gotA = a.Data[1] + gotB = b.Data[1] + } else { + gotA = a.Data[2] + gotB = b.Data[2] + } + // Check that they are indeed zero. + if !floats.EqualWithinAbsOrRel(gotA, 0, 1e-14, 1e-14) { + t.Errorf("unexpected non-zero value for zero triangle of U^T*A*Q: %v", gotA) + } + if !floats.EqualWithinAbsOrRel(gotB, 0, 1e-14, 1e-14) { + t.Errorf("unexpected non-zero value for zero triangle of V^T*B*Q: %v", gotB) + } + } + } +} + +func det2x2(a, b, c, d float64) float64 { return a*d - b*c } diff --git a/vendor/gonum.org/v1/gonum/lapack/testlapack/dlahqr.go b/vendor/gonum.org/v1/gonum/lapack/testlapack/dlahqr.go new file mode 100644 index 0000000..07378d8 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/lapack/testlapack/dlahqr.go @@ -0,0 +1,442 @@ +// Copyright ©2016 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package testlapack + +import ( + "fmt" + "math" + "testing" + + "golang.org/x/exp/rand" + + "gonum.org/v1/gonum/blas" + "gonum.org/v1/gonum/blas/blas64" +) + +type Dlahqrer interface { + Dlahqr(wantt, wantz bool, n, ilo, ihi int, h []float64, ldh int, wr, wi []float64, iloz, ihiz int, z []float64, ldz int) int +} + +type dlahqrTest struct { + h blas64.General + ilo, ihi int + iloz, ihiz int + wantt, wantz bool + + evWant []complex128 // Optional slice holding known eigenvalues. +} + +func DlahqrTest(t *testing.T, impl Dlahqrer) { + rnd := rand.New(rand.NewSource(1)) + + // Tests that choose the [ilo:ihi+1,ilo:ihi+1] and + // [iloz:ihiz+1,ilo:ihi+1] blocks randomly. + for _, wantt := range []bool{true, false} { + for _, wantz := range []bool{true, false} { + for _, n := range []int{1, 2, 3, 4, 5, 6, 10, 18, 31, 53} { + for _, extra := range []int{0, 1, 11} { + for cas := 0; cas < 100; cas++ { + ilo := rnd.Intn(n) + ihi := rnd.Intn(n) + if ilo > ihi { + ilo, ihi = ihi, ilo + } + iloz := rnd.Intn(ilo + 1) + ihiz := ihi + rnd.Intn(n-ihi) + h := randomHessenberg(n, n+extra, rnd) + if ilo-1 >= 0 { + h.Data[ilo*h.Stride+ilo-1] = 0 + } + if ihi+1 < n { + h.Data[(ihi+1)*h.Stride+ihi] = 0 + } + test := dlahqrTest{ + h: h, + ilo: ilo, + ihi: ihi, + iloz: iloz, + ihiz: ihiz, + wantt: wantt, + wantz: wantz, + } + testDlahqr(t, impl, test) + } + } + } + } + } + // Tests that make sure that some potentially problematic corner cases, + // like zero-sized matrix, are covered. + for _, wantt := range []bool{true, false} { + for _, wantz := range []bool{true, false} { + for _, extra := range []int{0, 1, 11} { + for _, test := range []dlahqrTest{ + { + h: randomHessenberg(0, extra, rnd), + ilo: 0, + ihi: -1, + iloz: 0, + ihiz: -1, + }, + { + h: randomHessenberg(1, 1+extra, rnd), + ilo: 0, + ihi: 0, + iloz: 0, + ihiz: 0, + }, + { + h: randomHessenberg(2, 2+extra, rnd), + ilo: 1, + ihi: 1, + iloz: 1, + ihiz: 1, + }, + { + h: randomHessenberg(2, 2+extra, rnd), + ilo: 0, + ihi: 1, + iloz: 0, + ihiz: 1, + }, + { + h: randomHessenberg(10, 10+extra, rnd), + ilo: 0, + ihi: 0, + iloz: 0, + ihiz: 0, + }, + { + h: randomHessenberg(10, 10+extra, rnd), + ilo: 0, + ihi: 9, + iloz: 0, + ihiz: 9, + }, + { + h: randomHessenberg(10, 10+extra, rnd), + ilo: 0, + ihi: 1, + iloz: 0, + ihiz: 1, + }, + { + h: randomHessenberg(10, 10+extra, rnd), + ilo: 0, + ihi: 1, + iloz: 0, + ihiz: 9, + }, + { + h: randomHessenberg(10, 10+extra, rnd), + ilo: 9, + ihi: 9, + iloz: 0, + ihiz: 9, + }, + } { + if test.ilo-1 >= 0 { + test.h.Data[test.ilo*test.h.Stride+test.ilo-1] = 0 + } + if test.ihi+1 < test.h.Rows { + test.h.Data[(test.ihi+1)*test.h.Stride+test.ihi] = 0 + } + test.wantt = wantt + test.wantz = wantz + testDlahqr(t, impl, test) + } + } + } + } + + // Tests with explicit eigenvalues computed by Octave. + for _, test := range []dlahqrTest{ + { + h: blas64.General{ + Rows: 1, + Cols: 1, + Stride: 1, + Data: []float64{7.09965484086874e-1}, + }, + ilo: 0, + ihi: 0, + iloz: 0, + ihiz: 0, + evWant: []complex128{7.09965484086874e-1}, + }, + { + h: blas64.General{ + Rows: 2, + Cols: 2, + Stride: 2, + Data: []float64{ + 0, -1, + 1, 0, + }, + }, + ilo: 0, + ihi: 1, + iloz: 0, + ihiz: 1, + evWant: []complex128{1i, -1i}, + }, + { + h: blas64.General{ + Rows: 2, + Cols: 2, + Stride: 2, + Data: []float64{ + 6.25219991450918e-1, 8.17510791994361e-1, + 3.31218891622294e-1, 1.24103744878131e-1, + }, + }, + ilo: 0, + ihi: 1, + iloz: 0, + ihiz: 1, + evWant: []complex128{9.52203547663447e-1, -2.02879811334398e-1}, + }, + { + h: blas64.General{ + Rows: 4, + Cols: 4, + Stride: 4, + Data: []float64{ + 1, 0, 0, 0, + 0, 6.25219991450918e-1, 8.17510791994361e-1, 0, + 0, 3.31218891622294e-1, 1.24103744878131e-1, 0, + 0, 0, 0, 1, + }, + }, + ilo: 1, + ihi: 2, + iloz: 0, + ihiz: 3, + evWant: []complex128{9.52203547663447e-1, -2.02879811334398e-1}, + }, + { + h: blas64.General{ + Rows: 2, + Cols: 2, + Stride: 2, + Data: []float64{ + -1.1219562276608, 6.85473513349362e-1, + -8.19951061145131e-1, 1.93728523178888e-1, + }, + }, + ilo: 0, + ihi: 1, + iloz: 0, + ihiz: 1, + evWant: []complex128{ + -4.64113852240958e-1 + 3.59580510817350e-1i, + -4.64113852240958e-1 - 3.59580510817350e-1i, + }, + }, + { + h: blas64.General{ + Rows: 5, + Cols: 5, + Stride: 5, + Data: []float64{ + 9.57590178533658e-1, -5.10651295522708e-1, 9.24974510015869e-1, -1.30016306879522e-1, 2.92601986926954e-2, + -1.08084756637964, 1.77529701001213, -1.36480197632509, 2.23196371219601e-1, 1.12912853063308e-1, + 0, -8.44075612174676e-1, 1.067867614486, -2.55782915176399e-1, -2.00598563137468e-1, + 0, 0, -5.67097237165410e-1, 2.07205057427341e-1, 6.54998340743380e-1, + 0, 0, 0, -1.89441413886041e-1, -4.18125416021786e-1, + }, + }, + ilo: 0, + ihi: 4, + iloz: 0, + ihiz: 4, + evWant: []complex128{ + 2.94393309555622, + 4.97029793606701e-1 + 3.63041654992384e-1i, + 4.97029793606701e-1 - 3.63041654992384e-1i, + -1.74079119166145e-1 + 2.01570009462092e-1i, + -1.74079119166145e-1 - 2.01570009462092e-1i, + }, + }, + } { + test.wantt = true + test.wantz = true + testDlahqr(t, impl, test) + } +} + +func testDlahqr(t *testing.T, impl Dlahqrer, test dlahqrTest) { + const tol = 1e-14 + + h := cloneGeneral(test.h) + n := h.Cols + extra := h.Stride - h.Cols + wantt := test.wantt + wantz := test.wantz + ilo := test.ilo + ihi := test.ihi + iloz := test.iloz + ihiz := test.ihiz + + var z, zCopy blas64.General + if wantz { + z = eye(n, n+extra) + zCopy = cloneGeneral(z) + } + + wr := nanSlice(ihi + 1) + wi := nanSlice(ihi + 1) + + unconverged := impl.Dlahqr(wantt, wantz, n, ilo, ihi, h.Data, h.Stride, wr, wi, iloz, ihiz, z.Data, max(1, z.Stride)) + + prefix := fmt.Sprintf("Case wantt=%v, wantz=%v, n=%v, ilo=%v, ihi=%v, iloz=%v, ihiz=%v, extra=%v", + wantt, wantz, n, ilo, ihi, iloz, ihiz, extra) + + if !generalOutsideAllNaN(h) { + t.Errorf("%v: out-of-range write to H\n%v", prefix, h.Data) + } + if !generalOutsideAllNaN(z) { + t.Errorf("%v: out-of-range write to Z\n%v", prefix, z.Data) + } + + if !isUpperHessenberg(h) { + t.Logf("%v: H is not Hessenberg", prefix) + } + + start := ilo // Index of the first computed eigenvalue. + if unconverged != 0 { + start = unconverged + if start == ihi+1 { + t.Logf("%v: no eigenvalue has converged", prefix) + } + } + + // Check that wr and wi have not been modified in [:start]. + if !isAllNaN(wr[:start]) { + t.Errorf("%v: unexpected modification of wr", prefix) + } + if !isAllNaN(wi[:start]) { + t.Errorf("%v: unexpected modification of wi", prefix) + } + + var hasReal bool + for i := start; i <= ihi; { + if wi[i] == 0 { // Real eigenvalue. + hasReal = true + // Check that the eigenvalue corresponds to a 1×1 block + // on the diagonal of H. + if wantt { + if wr[i] != h.Data[i*h.Stride+i] { + t.Errorf("%v: wr[%v] != H[%v,%v]", prefix, i, i, i) + } + for _, index := range []struct{ r, c int }{ + {i, i - 1}, // h h h + {i + 1, i - 1}, // 0 wr[i] h + {i + 1, i}, // 0 0 h + } { + if index.r >= n || index.c < 0 { + continue + } + if h.Data[index.r*h.Stride+index.c] != 0 { + t.Errorf("%v: H[%v,%v] != 0", prefix, index.r, index.c) + } + } + } + i++ + continue + } + + // Complex eigenvalue. + + // In the conjugate pair the real parts must be equal. + if wr[i] != wr[i+1] { + t.Errorf("%v: real part of conjugate pair not equal, i=%v", prefix, i) + } + // The first imaginary part must be positive. + if wi[i] < 0 { + t.Errorf("%v: wi[%v] not positive", prefix, i) + } + // The second imaginary part must be negative with the same + // magnitude. + if wi[i] != -wi[i+1] { + t.Errorf("%v: wi[%v] != -wi[%v]", prefix, i, i+1) + } + if wantt { + // Check that wi[i] has the correct value. + if wr[i] != h.Data[i*h.Stride+i] { + t.Errorf("%v: wr[%v] != H[%v,%v]", prefix, i, i, i) + } + if wr[i] != h.Data[(i+1)*h.Stride+i+1] { + t.Errorf("%v: wr[%v] != H[%v,%v]", prefix, i, i+1, i+1) + } + prod := math.Abs(h.Data[(i+1)*h.Stride+i] * h.Data[i*h.Stride+i+1]) + if math.Abs(math.Sqrt(prod)-wi[i]) > tol { + t.Errorf("%v: unexpected value of wi[%v]: want %v, got %v", prefix, i, math.Sqrt(prod), wi[i]) + } + + // Check that the corresponding diagonal block is 2×2. + for _, index := range []struct{ r, c int }{ + {i, i - 1}, // i + {i + 1, i - 1}, // h h h h + {i + 2, i - 1}, // 0 wr[i] b h i + {i + 2, i}, // 0 c wr[i+1] h + {i + 2, i + 1}, // 0 0 0 h + } { + if index.r >= n || index.c < 0 { + continue + } + if h.Data[index.r*h.Stride+index.c] != 0 { + t.Errorf("%v: H[%v,%v] != 0", prefix, index.r, index.c) + } + } + } + i += 2 + } + // If the number of found eigenvalues is odd, at least one must be real. + if (ihi+1-start)%2 != 0 && !hasReal { + t.Errorf("%v: expected at least one real eigenvalue", prefix) + } + + // Compare found eigenvalues to the reference, if known. + if test.evWant != nil { + for i := start; i <= ihi; i++ { + ev := complex(wr[i], wi[i]) + found, _ := containsComplex(test.evWant, ev, tol) + if !found { + t.Errorf("%v: unexpected eigenvalue %v", prefix, ev) + } + } + } + + if !wantz { + return + } + + // Z should contain the orthogonal matrix U. + if !isOrthogonal(z) { + t.Errorf("%v: Z is not orthogonal", prefix) + } + // Z should have been modified only in the + // [iloz:ihiz+1,ilo:ihi+1] block. + for i := 0; i < n; i++ { + for j := 0; j < n; j++ { + if iloz <= i && i <= ihiz && ilo <= j && j <= ihi { + continue + } + if z.Data[i*z.Stride+j] != zCopy.Data[i*zCopy.Stride+j] { + t.Errorf("%v: Z modified outside of [iloz:ihiz+1,ilo:ihi+1] block", prefix) + } + } + } + if wantt { + hu := eye(n, n) + blas64.Gemm(blas.NoTrans, blas.NoTrans, 1, test.h, z, 0, hu) + uhu := eye(n, n) + blas64.Gemm(blas.Trans, blas.NoTrans, 1, z, hu, 0, uhu) + if !equalApproxGeneral(uhu, h, 10*tol) { + t.Errorf("%v: Z^T*(initial H)*Z and (final H) are not equal", prefix) + } + } +} diff --git a/vendor/gonum.org/v1/gonum/lapack/testlapack/dlahr2.go b/vendor/gonum.org/v1/gonum/lapack/testlapack/dlahr2.go new file mode 100644 index 0000000..390872f --- /dev/null +++ b/vendor/gonum.org/v1/gonum/lapack/testlapack/dlahr2.go @@ -0,0 +1,241 @@ +// Copyright ©2016 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package testlapack + +import ( + "compress/gzip" + "encoding/json" + "fmt" + "log" + "math" + "os" + "path/filepath" + "testing" + + "golang.org/x/exp/rand" + + "gonum.org/v1/gonum/blas" + "gonum.org/v1/gonum/blas/blas64" + "gonum.org/v1/gonum/floats" +) + +type Dlahr2er interface { + Dlahr2(n, k, nb int, a []float64, lda int, tau, t []float64, ldt int, y []float64, ldy int) +} + +type Dlahr2test struct { + N, K, NB int + A []float64 + + AWant []float64 + TWant []float64 + YWant []float64 + TauWant []float64 +} + +func Dlahr2Test(t *testing.T, impl Dlahr2er) { + rnd := rand.New(rand.NewSource(1)) + for _, test := range []struct { + n, k, nb int + }{ + {3, 0, 3}, + {3, 1, 2}, + {3, 1, 1}, + + {5, 0, 5}, + {5, 1, 4}, + {5, 1, 3}, + {5, 1, 2}, + {5, 1, 1}, + {5, 2, 3}, + {5, 2, 2}, + {5, 2, 1}, + {5, 3, 2}, + {5, 3, 1}, + + {7, 3, 4}, + {7, 3, 3}, + {7, 3, 2}, + {7, 3, 1}, + + {10, 0, 10}, + {10, 1, 9}, + {10, 1, 5}, + {10, 1, 1}, + {10, 5, 5}, + {10, 5, 3}, + {10, 5, 1}, + } { + for cas := 0; cas < 100; cas++ { + for _, extraStride := range []int{0, 1, 10} { + n := test.n + k := test.k + nb := test.nb + + a := randomGeneral(n, n-k+1, n-k+1+extraStride, rnd) + aCopy := a + aCopy.Data = make([]float64, len(a.Data)) + copy(aCopy.Data, a.Data) + tmat := nanTriangular(blas.Upper, nb, nb+extraStride) + y := nanGeneral(n, nb, nb+extraStride) + tau := nanSlice(nb) + + impl.Dlahr2(n, k, nb, a.Data, a.Stride, tau, tmat.Data, tmat.Stride, y.Data, y.Stride) + + prefix := fmt.Sprintf("Case n=%v, k=%v, nb=%v, ldex=%v", n, k, nb, extraStride) + + if !generalOutsideAllNaN(a) { + t.Errorf("%v: out-of-range write to A\n%v", prefix, a.Data) + } + if !triangularOutsideAllNaN(tmat) { + t.Errorf("%v: out-of-range write to T\n%v", prefix, tmat.Data) + } + if !generalOutsideAllNaN(y) { + t.Errorf("%v: out-of-range write to Y\n%v", prefix, y.Data) + } + + // Check that A[:k,:] and A[:,nb:] blocks were not modified. + for i := 0; i < n; i++ { + for j := 0; j < n-k+1; j++ { + if i >= k && j < nb { + continue + } + if a.Data[i*a.Stride+j] != aCopy.Data[i*aCopy.Stride+j] { + t.Errorf("%v: unexpected write to A[%v,%v]", prefix, i, j) + } + } + } + + // Check that all elements of tau were assigned. + for i, v := range tau { + if math.IsNaN(v) { + t.Errorf("%v: tau[%v] not assigned", prefix, i) + } + } + + // Extract V from a. + v := blas64.General{ + Rows: n - k + 1, + Cols: nb, + Stride: nb, + Data: make([]float64, (n-k+1)*nb), + } + for j := 0; j < v.Cols; j++ { + v.Data[(j+1)*v.Stride+j] = 1 + for i := j + 2; i < v.Rows; i++ { + v.Data[i*v.Stride+j] = a.Data[(i+k-1)*a.Stride+j] + } + } + + // VT = V. + vt := v + vt.Data = make([]float64, len(v.Data)) + copy(vt.Data, v.Data) + // VT = V * T. + blas64.Trmm(blas.Right, blas.NoTrans, 1, tmat, vt) + // YWant = A * V * T. + ywant := blas64.General{ + Rows: n, + Cols: nb, + Stride: nb, + Data: make([]float64, n*nb), + } + blas64.Gemm(blas.NoTrans, blas.NoTrans, 1, aCopy, vt, 0, ywant) + + // Compare Y and YWant. + for i := 0; i < n; i++ { + for j := 0; j < nb; j++ { + diff := math.Abs(ywant.Data[i*ywant.Stride+j] - y.Data[i*y.Stride+j]) + if diff > 1e-14 { + t.Errorf("%v: unexpected Y[%v,%v], diff=%v", prefix, i, j, diff) + } + } + } + + // Construct Q directly from the first nb columns of a. + q := constructQ("QR", n-k, nb, a.Data[k*a.Stride:], a.Stride, tau) + if !isOrthogonal(q) { + t.Errorf("%v: Q is not orthogonal", prefix) + } + // Construct Q as the product Q = I - V*T*V^T. + qwant := blas64.General{ + Rows: n - k + 1, + Cols: n - k + 1, + Stride: n - k + 1, + Data: make([]float64, (n-k+1)*(n-k+1)), + } + for i := 0; i < qwant.Rows; i++ { + qwant.Data[i*qwant.Stride+i] = 1 + } + blas64.Gemm(blas.NoTrans, blas.Trans, -1, vt, v, 1, qwant) + if !isOrthogonal(qwant) { + t.Errorf("%v: Q = I - V*T*V^T is not orthogonal", prefix) + } + + // Compare Q and QWant. Note that since Q is + // (n-k)×(n-k) and QWant is (n-k+1)×(n-k+1), we + // ignore the first row and column of QWant. + for i := 0; i < n-k; i++ { + for j := 0; j < n-k; j++ { + diff := math.Abs(q.Data[i*q.Stride+j] - qwant.Data[(i+1)*qwant.Stride+j+1]) + if diff > 1e-14 { + t.Errorf("%v: unexpected Q[%v,%v], diff=%v", prefix, i, j, diff) + } + } + } + } + } + } + + // Go runs tests from the source directory, so unfortunately we need to + // include the "../testlapack" part. + file, err := os.Open(filepath.FromSlash("../testlapack/testdata/dlahr2data.json.gz")) + if err != nil { + log.Fatal(err) + } + defer file.Close() + r, err := gzip.NewReader(file) + if err != nil { + log.Fatal(err) + } + defer r.Close() + + var tests []Dlahr2test + json.NewDecoder(r).Decode(&tests) + for _, test := range tests { + tau := make([]float64, len(test.TauWant)) + for _, ldex := range []int{0, 1, 20} { + n := test.N + k := test.K + nb := test.NB + + lda := n - k + 1 + ldex + a := make([]float64, (n-1)*lda+n-k+1) + copyMatrix(n, n-k+1, a, lda, test.A) + + ldt := nb + ldex + tmat := make([]float64, (nb-1)*ldt+nb) + + ldy := nb + ldex + y := make([]float64, (n-1)*ldy+nb) + + impl.Dlahr2(n, k, nb, a, lda, tau, tmat, ldt, y, ldy) + + prefix := fmt.Sprintf("Case n=%v, k=%v, nb=%v, ldex=%v", n, k, nb, ldex) + if !equalApprox(n, n-k+1, a, lda, test.AWant, 1e-14) { + t.Errorf("%v: unexpected matrix A\n got=%v\nwant=%v", prefix, a, test.AWant) + } + if !equalApproxTriangular(true, nb, tmat, ldt, test.TWant, 1e-14) { + t.Errorf("%v: unexpected matrix T\n got=%v\nwant=%v", prefix, tmat, test.TWant) + } + if !equalApprox(n, nb, y, ldy, test.YWant, 1e-14) { + t.Errorf("%v: unexpected matrix Y\n got=%v\nwant=%v", prefix, y, test.YWant) + } + if !floats.EqualApprox(tau, test.TauWant, 1e-14) { + t.Errorf("%v: unexpected slice tau\n got=%v\nwant=%v", prefix, tau, test.TauWant) + } + } + } +} diff --git a/vendor/gonum.org/v1/gonum/lapack/testlapack/dlaln2.go b/vendor/gonum.org/v1/gonum/lapack/testlapack/dlaln2.go new file mode 100644 index 0000000..aa35f3d --- /dev/null +++ b/vendor/gonum.org/v1/gonum/lapack/testlapack/dlaln2.go @@ -0,0 +1,152 @@ +// Copyright ©2016 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package testlapack + +import ( + "fmt" + "math" + "math/cmplx" + "testing" + + "golang.org/x/exp/rand" +) + +type Dlaln2er interface { + Dlaln2(trans bool, na, nw int, smin, ca float64, a []float64, lda int, d1, d2 float64, b []float64, ldb int, wr, wi float64, x []float64, ldx int) (scale, xnorm float64, ok bool) +} + +func Dlaln2Test(t *testing.T, impl Dlaln2er) { + rnd := rand.New(rand.NewSource(1)) + for _, trans := range []bool{true, false} { + for _, na := range []int{1, 2} { + for _, nw := range []int{1, 2} { + for _, extra := range []int{0, 1, 2, 13} { + for cas := 0; cas < 1000; cas++ { + testDlaln2(t, impl, trans, na, nw, extra, rnd) + } + } + } + } + } +} + +func testDlaln2(t *testing.T, impl Dlaln2er, trans bool, na, nw, extra int, rnd *rand.Rand) { + const tol = 1e-12 + + // Generate random input scalars. + ca := rnd.NormFloat64() + d1 := rnd.NormFloat64() + d2 := rnd.NormFloat64() + + var w complex128 + if nw == 1 { + w = complex(rand.NormFloat64(), 0) + } else { + w = complex(rand.NormFloat64(), rand.NormFloat64()) + } + smin := dlamchP * (math.Abs(real(w)) + math.Abs(imag(w))) + + // Generate random input matrices. + a := randomGeneral(na, na, na+extra, rnd) + b := randomGeneral(na, nw, nw+extra, rnd) + x := randomGeneral(na, nw, nw+extra, rnd) + + scale, xnormGot, ok := impl.Dlaln2(trans, na, nw, smin, ca, a.Data, a.Stride, d1, d2, b.Data, b.Stride, real(w), imag(w), x.Data, x.Stride) + + prefix := fmt.Sprintf("Case trans=%v, na=%v, nw=%v, extra=%v", trans, na, nw, extra) + + if !generalOutsideAllNaN(a) { + t.Errorf("%v: out-of-range write to A\n%v", prefix, a.Data) + } + if !generalOutsideAllNaN(b) { + t.Errorf("%v: out-of-range write to B\n%v", prefix, b.Data) + } + if !generalOutsideAllNaN(x) { + t.Errorf("%v: out-of-range write to X\n%v", prefix, x.Data) + } + + // Scale is documented to be <= 1. + if scale <= 0 || 1 < scale { + t.Errorf("%v: invalid value of scale=%v", prefix, scale) + } + + // Calculate the infinity norm of X explicitly. + var xnormWant float64 + for i := 0; i < na; i++ { + var rowsum float64 + for j := 0; j < nw; j++ { + rowsum += math.Abs(x.Data[i*x.Stride+j]) + } + if rowsum > xnormWant { + xnormWant = rowsum + } + } + if xnormWant != xnormGot { + t.Errorf("Case %v: unexpected xnorm with scale=%v. Want %v, got %v", prefix, scale, xnormWant, xnormGot) + } + + if !ok { + // If ok is false, the matrix has been perturbed but we don't + // know how. Return without comparing both sides of the + // equation. + return + } + + // Compute a complex matrix + // M := ca * A - w * D + // or + // M := ca * A^T - w * D. + m := make([]complex128, na*na) + if trans { + // M = ca * A^T + for i := 0; i < na; i++ { + for j := 0; j < na; j++ { + m[i*na+j] = complex(ca*a.Data[j*a.Stride+i], 0) + } + } + } else { + // M = ca * A^T + for i := 0; i < na; i++ { + for j := 0; j < na; j++ { + m[i*na+j] = complex(ca*a.Data[i*a.Stride+j], 0) + } + } + } + // Subtract the diagonal matrix w * D. + m[0] -= w * complex(d1, 0) + if na == 2 { + m[3] -= w * complex(d2, 0) + } + + // Convert real na×2 matrices X and scale*B into complex na-vectors. + cx := make([]complex128, na) + cb := make([]complex128, na) + switch nw { + case 1: + for i := 0; i < na; i++ { + cx[i] = complex(x.Data[i*x.Stride], 0) + cb[i] = complex(scale*b.Data[i*x.Stride], 0) + } + case 2: + for i := 0; i < na; i++ { + cx[i] = complex(x.Data[i*x.Stride], x.Data[i*x.Stride+1]) + cb[i] = complex(scale*b.Data[i*b.Stride], scale*b.Data[i*b.Stride+1]) + } + } + + // Compute M * X. + mx := make([]complex128, na) + for i := 0; i < na; i++ { + for j := 0; j < na; j++ { + mx[i] += m[i*na+j] * cx[j] + } + } + // Check whether |M * X - scale * B|_max <= tol. + for i := 0; i < na; i++ { + if cmplx.Abs(mx[i]-cb[i]) > tol { + t.Errorf("Case %v: unexpected value of left-hand side at row %v with scale=%v. Want %v, got %v", prefix, i, scale, cb[i], mx[i]) + } + } +} diff --git a/vendor/gonum.org/v1/gonum/lapack/testlapack/dlange.go b/vendor/gonum.org/v1/gonum/lapack/testlapack/dlange.go new file mode 100644 index 0000000..b7776ee --- /dev/null +++ b/vendor/gonum.org/v1/gonum/lapack/testlapack/dlange.go @@ -0,0 +1,101 @@ +// Copyright ©2015 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package testlapack + +import ( + "math" + "testing" + + "golang.org/x/exp/rand" + + "gonum.org/v1/gonum/blas/blas64" + "gonum.org/v1/gonum/lapack" +) + +type Dlanger interface { + Dlange(norm lapack.MatrixNorm, m, n int, a []float64, lda int, work []float64) float64 +} + +func DlangeTest(t *testing.T, impl Dlanger) { + rnd := rand.New(rand.NewSource(1)) + for _, test := range []struct { + m, n, lda int + }{ + {4, 3, 0}, + {3, 4, 0}, + {4, 3, 100}, + {3, 4, 100}, + } { + m := test.m + n := test.n + lda := test.lda + if lda == 0 { + lda = n + } + // Allocate m×n matrix A and fill it with random numbers from [-0.5, 0.5). + a := make([]float64, m*lda) + for i := range a { + a[i] = rnd.Float64() - 0.5 + } + // Store a copy of A for later comparison. + aCopy := make([]float64, len(a)) + copy(aCopy, a) + + // Allocate workspace slice. + work := make([]float64, n) + for i := range work { + work[i] = rnd.Float64() + } + + // Test various norms by comparing the result from Dlange with + // explicit calculation. + + // Test MaxAbs norm. + norm := impl.Dlange(lapack.MaxAbs, m, n, a, lda, work) + var ans float64 + for i := 0; i < m; i++ { + idx := blas64.Iamax(blas64.Vector{N: n, Inc: 1, Data: aCopy[i*lda:]}) + ans = math.Max(ans, math.Abs(a[i*lda+idx])) + } + // Should be strictly equal because there is no floating point summation error. + if ans != norm { + t.Errorf("MaxAbs mismatch. Want %v, got %v.", ans, norm) + } + + // Test MaxColumnSum norm. + norm = impl.Dlange(lapack.MaxColumnSum, m, n, a, lda, work) + ans = 0 + for i := 0; i < n; i++ { + sum := blas64.Asum(blas64.Vector{N: m, Inc: lda, Data: aCopy[i:]}) + ans = math.Max(ans, sum) + } + if math.Abs(norm-ans) > 1e-14 { + t.Errorf("MaxColumnSum mismatch. Want %v, got %v.", ans, norm) + } + + // Test MaxRowSum norm. + norm = impl.Dlange(lapack.MaxRowSum, m, n, a, lda, work) + ans = 0 + for i := 0; i < m; i++ { + sum := blas64.Asum(blas64.Vector{N: n, Inc: 1, Data: aCopy[i*lda:]}) + ans = math.Max(ans, sum) + } + if math.Abs(norm-ans) > 1e-14 { + t.Errorf("MaxRowSum mismatch. Want %v, got %v.", ans, norm) + } + + // Test Frobenius norm. + norm = impl.Dlange(lapack.Frobenius, m, n, a, lda, work) + ans = 0 + for i := 0; i < m; i++ { + sum := blas64.Nrm2(blas64.Vector{N: n, Inc: 1, Data: aCopy[i*lda:]}) + ans += sum * sum + } + ans = math.Sqrt(ans) + if math.Abs(norm-ans) > 1e-14 { + t.Errorf("Frobenius norm mismatch. Want %v, got %v.", ans, norm) + } + } +} diff --git a/vendor/gonum.org/v1/gonum/lapack/testlapack/dlanst.go b/vendor/gonum.org/v1/gonum/lapack/testlapack/dlanst.go new file mode 100644 index 0000000..3aeb1f6 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/lapack/testlapack/dlanst.go @@ -0,0 +1,62 @@ +// Copyright ©2016 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package testlapack + +import ( + "math" + "testing" + + "golang.org/x/exp/rand" + + "gonum.org/v1/gonum/lapack" +) + +type Dlanster interface { + Dlanst(norm lapack.MatrixNorm, n int, d, e []float64) float64 + Dlanger +} + +func DlanstTest(t *testing.T, impl Dlanster) { + rnd := rand.New(rand.NewSource(1)) + for _, norm := range []lapack.MatrixNorm{lapack.MaxAbs, lapack.MaxColumnSum, lapack.MaxRowSum, lapack.Frobenius} { + for _, n := range []int{1, 3, 10, 100} { + for cas := 0; cas < 100; cas++ { + // Generate randomly the main diagonal of the + // symmetric tridiagonal matrix A. + d := make([]float64, n) + for i := range d { + d[i] = rnd.NormFloat64() + } + // Generate randomly the off-diagonal of A. + e := make([]float64, n-1) + for i := range e { + e[i] = rnd.NormFloat64() + } + + // Create A in dense representation. + m := n + lda := n + a := make([]float64, m*lda) + for i := 0; i < n; i++ { + a[i*lda+i] = d[i] + } + for i := 0; i < n-1; i++ { + a[i*lda+i+1] = e[i] + a[(i+1)*lda+i] = e[i] + } + + work := make([]float64, n) + // Compute a norm of A using Dlanst. + syNorm := impl.Dlanst(norm, n, d, e) + // Compute a reference value for the norm using + // Dlange and the dense representation of A. + geNorm := impl.Dlange(norm, m, n, a, lda, work) + if math.Abs(syNorm-geNorm) > 1e-12 { + t.Errorf("Norm mismatch: norm = %v, cas = %v, n = %v. Want %v, got %v.", string(norm), cas, n, geNorm, syNorm) + } + } + } + } +} diff --git a/vendor/gonum.org/v1/gonum/lapack/testlapack/dlansy.go b/vendor/gonum.org/v1/gonum/lapack/testlapack/dlansy.go new file mode 100644 index 0000000..6d6bf89 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/lapack/testlapack/dlansy.go @@ -0,0 +1,93 @@ +// Copyright ©2015 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package testlapack + +import ( + "math" + "testing" + + "golang.org/x/exp/rand" + + "gonum.org/v1/gonum/blas" + "gonum.org/v1/gonum/lapack" +) + +type Dlansyer interface { + Dlanger + Dlansy(norm lapack.MatrixNorm, uplo blas.Uplo, n int, a []float64, lda int, work []float64) float64 +} + +func DlansyTest(t *testing.T, impl Dlansyer) { + rnd := rand.New(rand.NewSource(1)) + for _, norm := range []lapack.MatrixNorm{lapack.MaxAbs, lapack.MaxColumnSum, lapack.MaxRowSum, lapack.Frobenius} { + for _, uplo := range []blas.Uplo{blas.Lower, blas.Upper} { + for _, test := range []struct { + n, lda int + }{ + {1, 0}, + {3, 0}, + + {1, 10}, + {3, 10}, + } { + for trial := 0; trial < 100; trial++ { + n := test.n + lda := test.lda + if lda == 0 { + lda = n + } + // Allocate n×n matrix A and fill it. + // Only the uplo triangle of A will be used below + // to represent a symmetric matrix. + a := make([]float64, lda*n) + if trial == 0 { + // In the first trial fill the matrix + // with predictable integers. + for i := range a { + a[i] = float64(i) + } + } else { + // Otherwise fill it with random numbers. + for i := range a { + a[i] = rnd.NormFloat64() + } + } + + // Create a dense representation of the symmetric matrix + // stored in the uplo triangle of A. + aDense := make([]float64, n*n) + if uplo == blas.Upper { + for i := 0; i < n; i++ { + for j := i; j < n; j++ { + v := a[i*lda+j] + aDense[i*n+j] = v + aDense[j*n+i] = v + } + } + } else { + for i := 0; i < n; i++ { + for j := 0; j <= i; j++ { + v := a[i*lda+j] + aDense[i*n+j] = v + aDense[j*n+i] = v + } + } + } + + work := make([]float64, n) + // Compute the norm of the symmetric matrix A. + got := impl.Dlansy(norm, uplo, n, a, lda, work) + // Compute the reference norm value using Dlange + // and the dense representation of A. + want := impl.Dlange(norm, n, n, aDense, n, work) + if math.Abs(want-got) > 1e-14 { + t.Errorf("Norm mismatch. norm = %c, upper = %v, n = %v, lda = %v, want %v, got %v.", + norm, uplo == blas.Upper, n, lda, got, want) + } + } + } + } + } +} diff --git a/vendor/gonum.org/v1/gonum/lapack/testlapack/dlantr.go b/vendor/gonum.org/v1/gonum/lapack/testlapack/dlantr.go new file mode 100644 index 0000000..5994dc3 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/lapack/testlapack/dlantr.go @@ -0,0 +1,90 @@ +// Copyright ©2015 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package testlapack + +import ( + "math" + "testing" + + "golang.org/x/exp/rand" + + "gonum.org/v1/gonum/blas" + "gonum.org/v1/gonum/lapack" +) + +type Dlantrer interface { + Dlanger + Dlantr(norm lapack.MatrixNorm, uplo blas.Uplo, diag blas.Diag, m, n int, a []float64, lda int, work []float64) float64 +} + +func DlantrTest(t *testing.T, impl Dlantrer) { + rnd := rand.New(rand.NewSource(1)) + for _, norm := range []lapack.MatrixNorm{lapack.MaxAbs, lapack.MaxColumnSum, lapack.MaxRowSum, lapack.Frobenius} { + for _, diag := range []blas.Diag{blas.NonUnit, blas.Unit} { + for _, uplo := range []blas.Uplo{blas.Lower, blas.Upper} { + for _, test := range []struct { + m, n, lda int + }{ + {3, 3, 0}, + {3, 5, 0}, + {10, 5, 0}, + + {5, 5, 11}, + {5, 10, 11}, + {10, 5, 11}, + } { + // Do a couple of random trials since the values change. + for trial := 0; trial < 100; trial++ { + m := test.m + n := test.n + lda := test.lda + if lda == 0 { + lda = n + } + a := make([]float64, m*lda) + if trial == 0 { + for i := range a { + a[i] = float64(i) + } + } else { + for i := range a { + a[i] = rnd.NormFloat64() + } + } + aDense := make([]float64, len(a)) + if uplo == blas.Lower { + for i := 0; i < m; i++ { + for j := 0; j <= min(i, n-1); j++ { + aDense[i*lda+j] = a[i*lda+j] + } + } + } else { + for i := 0; i < m; i++ { + for j := i; j < n; j++ { + aDense[i*lda+j] = a[i*lda+j] + } + } + } + if diag == blas.Unit { + for i := 0; i < min(m, n); i++ { + aDense[i*lda+i] = 1 + } + } + work := make([]float64, n+6) + for i := range work { + work[i] = rnd.Float64() + } + got := impl.Dlantr(norm, uplo, diag, m, n, a, lda, work) + want := impl.Dlange(norm, m, n, aDense, lda, work) + if math.Abs(got-want) > 1e-13 { + t.Errorf("Norm mismatch. norm = %c, unitdiag = %v, upper = %v, m = %v, n = %v, lda = %v, Want %v, got %v.", + norm, diag == blas.Unit, uplo == blas.Upper, m, n, lda, got, want) + } + } + } + } + } + } +} diff --git a/vendor/gonum.org/v1/gonum/lapack/testlapack/dlanv2.go b/vendor/gonum.org/v1/gonum/lapack/testlapack/dlanv2.go new file mode 100644 index 0000000..98403a5 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/lapack/testlapack/dlanv2.go @@ -0,0 +1,113 @@ +// Copyright ©2016 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package testlapack + +import ( + "fmt" + "math" + "testing" + + "golang.org/x/exp/rand" +) + +type Dlanv2er interface { + Dlanv2(a, b, c, d float64) (aa, bb, cc, dd float64, rt1r, rt1i, rt2r, rt2i float64, cs, sn float64) +} + +func Dlanv2Test(t *testing.T, impl Dlanv2er) { + rnd := rand.New(rand.NewSource(1)) + t.Run("UpperTriangular", func(t *testing.T) { + for i := 0; i < 10; i++ { + a := rnd.NormFloat64() + b := rnd.NormFloat64() + d := rnd.NormFloat64() + dlanv2Test(t, impl, a, b, 0, d) + } + }) + t.Run("LowerTriangular", func(t *testing.T) { + for i := 0; i < 10; i++ { + a := rnd.NormFloat64() + c := rnd.NormFloat64() + d := rnd.NormFloat64() + dlanv2Test(t, impl, a, 0, c, d) + } + }) + t.Run("StandardSchur", func(t *testing.T) { + for i := 0; i < 10; i++ { + a := rnd.NormFloat64() + b := rnd.NormFloat64() + c := rnd.NormFloat64() + if math.Signbit(b) == math.Signbit(c) { + c = -c + } + dlanv2Test(t, impl, a, b, c, a) + } + }) + t.Run("General", func(t *testing.T) { + for i := 0; i < 100; i++ { + a := rnd.NormFloat64() + b := rnd.NormFloat64() + c := rnd.NormFloat64() + d := rnd.NormFloat64() + dlanv2Test(t, impl, a, b, c, d) + } + }) +} + +func dlanv2Test(t *testing.T, impl Dlanv2er, a, b, c, d float64) { + aa, bb, cc, dd, rt1r, rt1i, rt2r, rt2i, cs, sn := impl.Dlanv2(a, b, c, d) + + mat := fmt.Sprintf("[%v %v; %v %v]", a, b, c, d) + if cc == 0 { + // The eigenvalues are real, so check that the imaginary parts + // are zero. + if rt1i != 0 || rt2i != 0 { + t.Errorf("Unexpected complex eigenvalues for %v", mat) + } + } else { + // The eigenvalues are complex, so check that documented + // conditions hold. + if aa != dd { + t.Errorf("Diagonal elements not equal for %v: got [%v %v]", mat, aa, dd) + } + if bb*cc >= 0 { + t.Errorf("Non-diagonal elements have the same sign for %v: got [%v %v]", mat, bb, cc) + } else { + // Compute the absolute value of the imaginary part. + im := math.Sqrt(-bb * cc) + // Check that ±im is close to one of the returned + // imaginary parts. + if math.Abs(rt1i-im) > 1e-14 && math.Abs(rt1i+im) > 1e-14 { + t.Errorf("Unexpected imaginary part of eigenvalue for %v: got %v, want %v or %v", mat, rt1i, im, -im) + } + if math.Abs(rt2i-im) > 1e-14 && math.Abs(rt2i+im) > 1e-14 { + t.Errorf("Unexpected imaginary part of eigenvalue for %v: got %v, want %v or %v", mat, rt2i, im, -im) + } + } + } + // Check that the returned real parts are consistent. + if rt1r != aa && rt1r != dd { + t.Errorf("Unexpected real part of eigenvalue for %v: got %v, want %v or %v", mat, rt1r, aa, dd) + } + if rt2r != aa && rt2r != dd { + t.Errorf("Unexpected real part of eigenvalue for %v: got %v, want %v or %v", mat, rt2r, aa, dd) + } + // Check that the columns of the orthogonal matrix have unit norm. + if math.Abs(math.Hypot(cs, sn)-1) > 1e-14 { + t.Errorf("Unexpected unitary matrix for %v: got cs %v, sn %v", mat, cs, sn) + } + + // Re-compute the original matrix [a b; c d] from its factorization. + gota := cs*(aa*cs-bb*sn) - sn*(cc*cs-dd*sn) + gotb := cs*(aa*sn+bb*cs) - sn*(cc*sn+dd*cs) + gotc := sn*(aa*cs-bb*sn) + cs*(cc*cs-dd*sn) + gotd := sn*(aa*sn+bb*cs) + cs*(cc*sn+dd*cs) + if math.Abs(gota-a) > 1e-14 || + math.Abs(gotb-b) > 1e-14 || + math.Abs(gotc-c) > 1e-14 || + math.Abs(gotd-d) > 1e-14 { + t.Errorf("Unexpected factorization: got [%v %v; %v %v], want [%v %v; %v %v]", gota, gotb, gotc, gotd, a, b, c, d) + } +} diff --git a/vendor/gonum.org/v1/gonum/lapack/testlapack/dlapll.go b/vendor/gonum.org/v1/gonum/lapack/testlapack/dlapll.go new file mode 100644 index 0000000..f0f2d4a --- /dev/null +++ b/vendor/gonum.org/v1/gonum/lapack/testlapack/dlapll.go @@ -0,0 +1,51 @@ +// Copyright ©2017 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package testlapack + +import ( + "testing" + + "golang.org/x/exp/rand" + + "gonum.org/v1/gonum/floats" + "gonum.org/v1/gonum/lapack" +) + +type Dlapller interface { + Dgesvder + Dlapll(n int, x []float64, incX int, y []float64, incY int) float64 +} + +func DlapllTest(t *testing.T, impl Dlapller) { + rnd := rand.New(rand.NewSource(1)) + for i, m := range []int{5, 6, 9, 300, 400, 600} { + n := 2 + lda := n + // Allocate m×2 matrix A and fill it with random numbers. + a := make([]float64, m*lda) + for i := range a { + a[i] = rnd.NormFloat64() + } + // Store a copy of A for later comparison. + aCopy := make([]float64, len(a)) + copy(aCopy, a) + + // Compute the smallest singular value of A. + got := impl.Dlapll(m, a[0:], lda, a[1:], lda) + + // Compute singular values of A independently by Dgesvd. + s := make([]float64, min(m, n)) + work := make([]float64, 1) + impl.Dgesvd(lapack.SVDNone, lapack.SVDNone, m, n, aCopy, lda, s, nil, 1, nil, 1, work, -1) + work = make([]float64, int(work[0])) + impl.Dgesvd(lapack.SVDNone, lapack.SVDNone, m, n, aCopy, lda, s, nil, 1, nil, 1, work, len(work)) + // Take the smallest singular value. + want := s[len(s)-1] + + if !floats.EqualWithinAbsOrRel(got, want, 1e-14, 1e-14) { + t.Errorf("Case %d: unexpected smallest singular value, got:%f want:%f", i, got, want) + } + } +} diff --git a/vendor/gonum.org/v1/gonum/lapack/testlapack/dlapmt.go b/vendor/gonum.org/v1/gonum/lapack/testlapack/dlapmt.go new file mode 100644 index 0000000..2a8fee5 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/lapack/testlapack/dlapmt.go @@ -0,0 +1,113 @@ +// Copyright ©2017 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package testlapack + +import ( + "fmt" + "testing" + + "gonum.org/v1/gonum/blas/blas64" +) + +type Dlapmter interface { + Dlapmt(forward bool, m, n int, x []float64, ldx int, k []int) +} + +func DlapmtTest(t *testing.T, impl Dlapmter) { + for ti, test := range []struct { + forward bool + k []int + + want blas64.General + }{ + { + forward: true, k: []int{0, 1, 2}, + want: blas64.General{ + Rows: 4, + Cols: 3, + Stride: 3, + Data: []float64{ + 1, 2, 3, + 4, 5, 6, + 7, 8, 9, + 10, 11, 12, + }, + }, + }, + { + forward: false, k: []int{0, 1, 2}, + want: blas64.General{ + Rows: 4, + Cols: 3, + Stride: 3, + Data: []float64{ + 1, 2, 3, + 4, 5, 6, + 7, 8, 9, + 10, 11, 12, + }, + }, + }, + { + forward: true, k: []int{1, 2, 0}, + want: blas64.General{ + Rows: 4, + Cols: 3, + Stride: 3, + Data: []float64{ + 2, 3, 1, + 5, 6, 4, + 8, 9, 7, + 11, 12, 10, + }, + }, + }, + { + forward: false, k: []int{1, 2, 0}, + want: blas64.General{ + Rows: 4, + Cols: 3, + Stride: 3, + Data: []float64{ + 3, 1, 2, + 6, 4, 5, + 9, 7, 8, + 12, 10, 11, + }, + }, + }, + } { + m := test.want.Rows + n := test.want.Cols + if len(test.k) != n { + panic("bad length of k") + } + + for _, extra := range []int{0, 11} { + x := zeros(m, n, n+extra) + c := 1 + for i := 0; i < m; i++ { + for j := 0; j < n; j++ { + x.Data[i*x.Stride+j] = float64(c) + c++ + } + } + + k := make([]int, len(test.k)) + copy(k, test.k) + + impl.Dlapmt(test.forward, m, n, x.Data, x.Stride, k) + + prefix := fmt.Sprintf("Case %v (forward=%t,m=%v,n=%v,extra=%v)", ti, test.forward, m, n, extra) + if !generalOutsideAllNaN(x) { + t.Errorf("%v: out-of-range write to X", prefix) + } + + if !equalApproxGeneral(x, test.want, 0) { + t.Errorf("%v: unexpected X\n%v\n%v", prefix, x, test.want) + } + } + } +} diff --git a/vendor/gonum.org/v1/gonum/lapack/testlapack/dlapy2.go b/vendor/gonum.org/v1/gonum/lapack/testlapack/dlapy2.go new file mode 100644 index 0000000..04e6793 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/lapack/testlapack/dlapy2.go @@ -0,0 +1,31 @@ +// Copyright ©2016 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package testlapack + +import ( + "math" + "testing" + + "golang.org/x/exp/rand" + + "gonum.org/v1/gonum/floats" +) + +type Dlapy2er interface { + Dlapy2(float64, float64) float64 +} + +func Dlapy2Test(t *testing.T, impl Dlapy2er) { + rnd := rand.New(rand.NewSource(1)) + for i := 0; i < 10; i++ { + x := math.Abs(1e200 * rnd.NormFloat64()) + y := math.Abs(1e200 * rnd.NormFloat64()) + got := impl.Dlapy2(x, y) + want := math.Hypot(x, y) + if !floats.EqualWithinRel(got, want, 1e-16) { + t.Errorf("Dlapy2(%g, %g) = %g, want %g", x, y, got, want) + } + } +} diff --git a/vendor/gonum.org/v1/gonum/lapack/testlapack/dlaqp2.go b/vendor/gonum.org/v1/gonum/lapack/testlapack/dlaqp2.go new file mode 100644 index 0000000..8d7d93f --- /dev/null +++ b/vendor/gonum.org/v1/gonum/lapack/testlapack/dlaqp2.go @@ -0,0 +1,105 @@ +// Copyright ©2017 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package testlapack + +import ( + "fmt" + "testing" + + "gonum.org/v1/gonum/blas" + "gonum.org/v1/gonum/blas/blas64" +) + +type Dlaqp2er interface { + Dlapmter + Dlaqp2(m, n, offset int, a []float64, lda int, jpvt []int, tau, vn1, vn2, work []float64) +} + +func Dlaqp2Test(t *testing.T, impl Dlaqp2er) { + for ti, test := range []struct { + m, n, offset int + }{ + {m: 4, n: 3, offset: 0}, + {m: 4, n: 3, offset: 2}, + {m: 4, n: 3, offset: 4}, + {m: 3, n: 4, offset: 0}, + {m: 3, n: 4, offset: 1}, + {m: 3, n: 4, offset: 2}, + {m: 8, n: 3, offset: 0}, + {m: 8, n: 3, offset: 4}, + {m: 8, n: 3, offset: 8}, + {m: 3, n: 8, offset: 0}, + {m: 3, n: 8, offset: 1}, + {m: 3, n: 8, offset: 2}, + {m: 10, n: 10, offset: 0}, + {m: 10, n: 10, offset: 5}, + {m: 10, n: 10, offset: 10}, + } { + m := test.m + n := test.n + jpiv := make([]int, n) + + for _, extra := range []int{0, 11} { + a := zeros(m, n, n+extra) + c := 1 + for i := 0; i < m; i++ { + for j := 0; j < n; j++ { + a.Data[i*a.Stride+j] = float64(c) + c++ + } + } + aCopy := cloneGeneral(a) + for j := range jpiv { + jpiv[j] = j + } + + tau := make([]float64, n) + vn1 := columnNorms(m, n, a.Data, a.Stride) + vn2 := columnNorms(m, n, a.Data, a.Stride) + work := make([]float64, n) + + impl.Dlaqp2(m, n, test.offset, a.Data, a.Stride, jpiv, tau, vn1, vn2, work) + + prefix := fmt.Sprintf("Case %v (offset=%d,m=%v,n=%v,extra=%v)", ti, test.offset, m, n, extra) + if !generalOutsideAllNaN(a) { + t.Errorf("%v: out-of-range write to A", prefix) + } + + if test.offset == m { + continue + } + + mo := m - test.offset + q := constructQ("QR", mo, n, a.Data[test.offset*a.Stride:], a.Stride, tau) + // Check that Q is orthogonal. + if !isOrthogonal(q) { + t.Errorf("Case %v, Q not orthogonal", ti) + } + + // Check that A * P = Q * R + r := blas64.General{ + Rows: mo, + Cols: n, + Stride: n, + Data: make([]float64, mo*n), + } + for i := 0; i < mo; i++ { + for j := i; j < n; j++ { + r.Data[i*n+j] = a.Data[(test.offset+i)*a.Stride+j] + } + } + got := nanGeneral(mo, n, n) + blas64.Gemm(blas.NoTrans, blas.NoTrans, 1, q, r, 0, got) + + want := aCopy + impl.Dlapmt(true, want.Rows, want.Cols, want.Data, want.Stride, jpiv) + want.Rows = mo + want.Data = want.Data[test.offset*want.Stride:] + if !equalApproxGeneral(got, want, 1e-12) { + t.Errorf("Case %v, Q*R != A*P\nQ*R=%v\nA*P=%v", ti, got, want) + } + } + } +} diff --git a/vendor/gonum.org/v1/gonum/lapack/testlapack/dlaqps.go b/vendor/gonum.org/v1/gonum/lapack/testlapack/dlaqps.go new file mode 100644 index 0000000..76e17cb --- /dev/null +++ b/vendor/gonum.org/v1/gonum/lapack/testlapack/dlaqps.go @@ -0,0 +1,103 @@ +// Copyright ©2017 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package testlapack + +import ( + "fmt" + "testing" + + "gonum.org/v1/gonum/blas" + "gonum.org/v1/gonum/blas/blas64" +) + +type Dlaqpser interface { + Dlapmter + Dlaqps(m, n, offset, nb int, a []float64, lda int, jpvt []int, tau, vn1, vn2, auxv, f []float64, ldf int) (kb int) +} + +func DlaqpsTest(t *testing.T, impl Dlaqpser) { + for ti, test := range []struct { + m, n, nb, offset int + }{ + {m: 4, n: 3, nb: 2, offset: 0}, + {m: 4, n: 3, nb: 1, offset: 2}, + {m: 3, n: 4, nb: 2, offset: 0}, + {m: 3, n: 4, nb: 1, offset: 2}, + {m: 8, n: 3, nb: 2, offset: 0}, + {m: 8, n: 3, nb: 1, offset: 4}, + {m: 3, n: 8, nb: 2, offset: 0}, + {m: 3, n: 8, nb: 1, offset: 1}, + {m: 10, n: 10, nb: 3, offset: 0}, + {m: 10, n: 10, nb: 2, offset: 5}, + } { + m := test.m + n := test.n + jpiv := make([]int, n) + + for _, extra := range []int{0, 11} { + a := zeros(m, n, n+extra) + c := 1 + for i := 0; i < m; i++ { + for j := 0; j < n; j++ { + a.Data[i*a.Stride+j] = float64(c) + c++ + } + } + aCopy := cloneGeneral(a) + for j := range jpiv { + jpiv[j] = j + } + + tau := make([]float64, n) + vn1 := columnNorms(m, n, a.Data, a.Stride) + vn2 := columnNorms(m, n, a.Data, a.Stride) + auxv := make([]float64, test.nb) + f := zeros(test.n, test.nb, n) + + kb := impl.Dlaqps(m, n, test.offset, test.nb, a.Data, a.Stride, jpiv, tau, vn1, vn2, auxv, f.Data, f.Stride) + + prefix := fmt.Sprintf("Case %v (offset=%d,m=%v,n=%v,extra=%v)", ti, test.offset, m, n, extra) + if !generalOutsideAllNaN(a) { + t.Errorf("%v: out-of-range write to A", prefix) + } + + if test.offset == m { + continue + } + + mo := m - test.offset + q := constructQ("QR", mo, kb, a.Data[test.offset*a.Stride:], a.Stride, tau) + + // Check that Q is orthogonal. + if !isOrthogonal(q) { + t.Errorf("Case %v, Q not orthogonal", ti) + } + + // Check that A * P = Q * R + r := blas64.General{ + Rows: mo, + Cols: kb, + Stride: kb, + Data: make([]float64, mo*kb), + } + for i := 0; i < mo; i++ { + for j := i; j < kb; j++ { + r.Data[i*kb+j] = a.Data[(test.offset+i)*a.Stride+j] + } + } + got := nanGeneral(mo, kb, kb) + blas64.Gemm(blas.NoTrans, blas.NoTrans, 1, q, r, 0, got) + + want := aCopy + impl.Dlapmt(true, want.Rows, want.Cols, want.Data, want.Stride, jpiv) + want.Rows = mo + want.Cols = kb + want.Data = want.Data[test.offset*want.Stride:] + if !equalApproxGeneral(got, want, 1e-12) { + t.Errorf("Case %v, Q*R != A*P\nQ*R=%v\nA*P=%v", ti, got, want) + } + } + } +} diff --git a/vendor/gonum.org/v1/gonum/lapack/testlapack/dlaqr04.go b/vendor/gonum.org/v1/gonum/lapack/testlapack/dlaqr04.go new file mode 100644 index 0000000..c9fbdc8 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/lapack/testlapack/dlaqr04.go @@ -0,0 +1,450 @@ +// Copyright ©2016 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package testlapack + +import ( + "fmt" + "math" + "testing" + + "golang.org/x/exp/rand" + + "gonum.org/v1/gonum/blas" + "gonum.org/v1/gonum/blas/blas64" +) + +type Dlaqr04er interface { + Dlaqr04(wantt, wantz bool, n, ilo, ihi int, h []float64, ldh int, wr, wi []float64, iloz, ihiz int, z []float64, ldz int, work []float64, lwork int, recur int) int + + Dlahqrer +} + +type dlaqr04Test struct { + h blas64.General + ilo, ihi int + iloz, ihiz int + wantt, wantz bool + + evWant []complex128 // Optional slice holding known eigenvalues. +} + +func Dlaqr04Test(t *testing.T, impl Dlaqr04er) { + rnd := rand.New(rand.NewSource(1)) + + // Tests for small matrices that choose the ilo,ihi and iloz,ihiz pairs + // randomly. + for _, wantt := range []bool{true, false} { + for _, wantz := range []bool{true, false} { + for _, n := range []int{1, 2, 3, 4, 5, 6, 10, 11, 12, 18, 29} { + for _, extra := range []int{0, 11} { + for recur := 0; recur <= 2; recur++ { + for cas := 0; cas < n; cas++ { + ilo := rnd.Intn(n) + ihi := rnd.Intn(n) + if ilo > ihi { + ilo, ihi = ihi, ilo + } + iloz := rnd.Intn(ilo + 1) + ihiz := ihi + rnd.Intn(n-ihi) + h := randomHessenberg(n, n+extra, rnd) + if ilo-1 >= 0 { + h.Data[ilo*h.Stride+ilo-1] = 0 + } + if ihi+1 < n { + h.Data[(ihi+1)*h.Stride+ihi] = 0 + } + test := dlaqr04Test{ + h: h, + ilo: ilo, + ihi: ihi, + iloz: iloz, + ihiz: ihiz, + wantt: wantt, + wantz: wantz, + } + testDlaqr04(t, impl, test, false, recur) + testDlaqr04(t, impl, test, true, recur) + } + } + } + } + } + } + + // Tests for matrices large enough to possibly use the recursion (but it + // doesn't seem to be the case). + for _, n := range []int{100, 500} { + for cas := 0; cas < 5; cas++ { + h := randomHessenberg(n, n, rnd) + test := dlaqr04Test{ + h: h, + ilo: 0, + ihi: n - 1, + iloz: 0, + ihiz: n - 1, + wantt: true, + wantz: true, + } + testDlaqr04(t, impl, test, true, 1) + } + } + + // Tests that make sure that some potentially problematic corner cases, + // like zero-sized matrix, are covered. + for _, wantt := range []bool{true, false} { + for _, wantz := range []bool{true, false} { + for _, extra := range []int{0, 1, 11} { + for _, test := range []dlaqr04Test{ + { + h: randomHessenberg(0, extra, rnd), + ilo: 0, + ihi: -1, + iloz: 0, + ihiz: -1, + }, + { + h: randomHessenberg(1, 1+extra, rnd), + ilo: 0, + ihi: 0, + iloz: 0, + ihiz: 0, + }, + { + h: randomHessenberg(2, 2+extra, rnd), + ilo: 1, + ihi: 1, + iloz: 1, + ihiz: 1, + }, + { + h: randomHessenberg(2, 2+extra, rnd), + ilo: 0, + ihi: 1, + iloz: 0, + ihiz: 1, + }, + { + h: randomHessenberg(10, 10+extra, rnd), + ilo: 0, + ihi: 0, + iloz: 0, + ihiz: 0, + }, + { + h: randomHessenberg(10, 10+extra, rnd), + ilo: 0, + ihi: 9, + iloz: 0, + ihiz: 9, + }, + { + h: randomHessenberg(10, 10+extra, rnd), + ilo: 0, + ihi: 1, + iloz: 0, + ihiz: 1, + }, + { + h: randomHessenberg(10, 10+extra, rnd), + ilo: 0, + ihi: 1, + iloz: 0, + ihiz: 9, + }, + { + h: randomHessenberg(10, 10+extra, rnd), + ilo: 9, + ihi: 9, + iloz: 0, + ihiz: 9, + }, + } { + if test.ilo-1 >= 0 { + test.h.Data[test.ilo*test.h.Stride+test.ilo-1] = 0 + } + if test.ihi+1 < test.h.Rows { + test.h.Data[(test.ihi+1)*test.h.Stride+test.ihi] = 0 + } + test.wantt = wantt + test.wantz = wantz + testDlaqr04(t, impl, test, false, 1) + testDlaqr04(t, impl, test, true, 1) + } + } + } + } + + // Tests with known eigenvalues computed by Octave. + for _, test := range []dlaqr04Test{ + { + h: blas64.General{ + Rows: 1, + Cols: 1, + Stride: 1, + Data: []float64{7.09965484086874e-1}, + }, + ilo: 0, + ihi: 0, + iloz: 0, + ihiz: 0, + evWant: []complex128{7.09965484086874e-1}, + }, + { + h: blas64.General{ + Rows: 2, + Cols: 2, + Stride: 2, + Data: []float64{ + 0, -1, + 1, 0, + }, + }, + ilo: 0, + ihi: 1, + iloz: 0, + ihiz: 1, + evWant: []complex128{1i, -1i}, + }, + { + h: blas64.General{ + Rows: 2, + Cols: 2, + Stride: 2, + Data: []float64{ + 6.25219991450918e-1, 8.17510791994361e-1, + 3.31218891622294e-1, 1.24103744878131e-1, + }, + }, + ilo: 0, + ihi: 1, + iloz: 0, + ihiz: 1, + evWant: []complex128{9.52203547663447e-1, -2.02879811334398e-1}, + }, + { + h: blas64.General{ + Rows: 4, + Cols: 4, + Stride: 4, + Data: []float64{ + 1, 0, 0, 0, + 0, 6.25219991450918e-1, 8.17510791994361e-1, 0, + 0, 3.31218891622294e-1, 1.24103744878131e-1, 0, + 0, 0, 0, 1, + }, + }, + ilo: 1, + ihi: 2, + iloz: 0, + ihiz: 3, + evWant: []complex128{9.52203547663447e-1, -2.02879811334398e-1}, + }, + { + h: blas64.General{ + Rows: 2, + Cols: 2, + Stride: 2, + Data: []float64{ + -1.1219562276608, 6.85473513349362e-1, + -8.19951061145131e-1, 1.93728523178888e-1, + }, + }, + ilo: 0, + ihi: 1, + iloz: 0, + ihiz: 1, + evWant: []complex128{ + -4.64113852240958e-1 + 3.59580510817350e-1i, + -4.64113852240958e-1 - 3.59580510817350e-1i, + }, + }, + { + h: blas64.General{ + Rows: 5, + Cols: 5, + Stride: 5, + Data: []float64{ + 9.57590178533658e-1, -5.10651295522708e-1, 9.24974510015869e-1, -1.30016306879522e-1, 2.92601986926954e-2, + -1.08084756637964, 1.77529701001213, -1.36480197632509, 2.23196371219601e-1, 1.12912853063308e-1, + 0, -8.44075612174676e-1, 1.067867614486, -2.55782915176399e-1, -2.00598563137468e-1, + 0, 0, -5.67097237165410e-1, 2.07205057427341e-1, 6.54998340743380e-1, + 0, 0, 0, -1.89441413886041e-1, -4.18125416021786e-1, + }, + }, + ilo: 0, + ihi: 4, + iloz: 0, + ihiz: 4, + evWant: []complex128{ + 2.94393309555622, + 4.97029793606701e-1 + 3.63041654992384e-1i, + 4.97029793606701e-1 - 3.63041654992384e-1i, + -1.74079119166145e-1 + 2.01570009462092e-1i, + -1.74079119166145e-1 - 2.01570009462092e-1i, + }, + }, + } { + test.wantt = true + test.wantz = true + testDlaqr04(t, impl, test, false, 1) + testDlaqr04(t, impl, test, true, 1) + } +} + +func testDlaqr04(t *testing.T, impl Dlaqr04er, test dlaqr04Test, optwork bool, recur int) { + const tol = 1e-14 + + h := cloneGeneral(test.h) + n := h.Cols + extra := h.Stride - h.Cols + wantt := test.wantt + wantz := test.wantz + ilo := test.ilo + ihi := test.ihi + iloz := test.iloz + ihiz := test.ihiz + + var z, zCopy blas64.General + if wantz { + z = eye(n, n+extra) + zCopy = cloneGeneral(z) + } + + wr := nanSlice(ihi + 1) + wi := nanSlice(ihi + 1) + + var work []float64 + if optwork { + work = nanSlice(1) + impl.Dlaqr04(wantt, wantz, n, ilo, ihi, h.Data, h.Stride, wr, wi, iloz, ihiz, z.Data, max(1, z.Stride), work, -1, recur) + work = nanSlice(int(work[0])) + } else { + work = nanSlice(max(1, n)) + } + + unconverged := impl.Dlaqr04(wantt, wantz, n, ilo, ihi, h.Data, h.Stride, wr, wi, iloz, ihiz, z.Data, max(1, z.Stride), work, len(work), recur) + + prefix := fmt.Sprintf("Case wantt=%v, wantz=%v, n=%v, ilo=%v, ihi=%v, iloz=%v, ihiz=%v, extra=%v, opt=%v", + wantt, wantz, n, ilo, ihi, iloz, ihiz, extra, optwork) + + if !generalOutsideAllNaN(h) { + t.Errorf("%v: out-of-range write to H\n%v", prefix, h.Data) + } + if !generalOutsideAllNaN(z) { + t.Errorf("%v: out-of-range write to Z\n%v", prefix, z.Data) + } + + start := ilo // Index of the first computed eigenvalue. + if unconverged != 0 { + start = unconverged + if start == ihi+1 { + t.Logf("%v: no eigenvalue has converged", prefix) + } + } + + // Check that wr and wi have not been modified within [:start]. + if !isAllNaN(wr[:start]) { + t.Errorf("%v: unexpected modification of wr", prefix) + } + if !isAllNaN(wi[:start]) { + t.Errorf("%v: unexpected modification of wi", prefix) + } + + var hasReal bool + for i := start; i <= ihi; { + if wi[i] == 0 { // Real eigenvalue. + hasReal = true + // Check that the eigenvalue corresponds to a 1×1 block + // on the diagonal of H. + if wantt && wr[i] != h.Data[i*h.Stride+i] { + t.Errorf("%v: wr[%v] != H[%v,%v]", prefix, i, i, i) + } + i++ + continue + } + + // Complex eigenvalue. + + // In the conjugate pair the real parts must be equal. + if wr[i] != wr[i+1] { + t.Errorf("%v: real part of conjugate pair not equal, i=%v", prefix, i) + } + // The first imaginary part must be positive. + if wi[i] < 0 { + t.Errorf("%v: wi[%v] not positive", prefix, i) + } + // The second imaginary part must be negative with the same + // magnitude. + if wi[i] != -wi[i+1] { + t.Errorf("%v: wi[%v] != wi[%v]", prefix, i, i+1) + } + if wantt { + // Check that wi[i] has the correct value. + if wr[i] != h.Data[i*h.Stride+i] { + t.Errorf("%v: wr[%v] != H[%v,%v]", prefix, i, i, i) + } + if wr[i] != h.Data[(i+1)*h.Stride+i+1] { + t.Errorf("%v: wr[%v] != H[%v,%v]", prefix, i, i+1, i+1) + } + im := math.Sqrt(math.Abs(h.Data[(i+1)*h.Stride+i])) * math.Sqrt(math.Abs(h.Data[i*h.Stride+i+1])) + if math.Abs(im-wi[i]) > tol { + t.Errorf("%v: unexpected value of wi[%v]: want %v, got %v", prefix, i, im, wi[i]) + } + } + i += 2 + } + // If the number of found eigenvalues is odd, at least one must be real. + if (ihi+1-start)%2 != 0 && !hasReal { + t.Errorf("%v: expected at least one real eigenvalue", prefix) + } + + // Compare found eigenvalues to the reference, if known. + if test.evWant != nil { + for i := start; i <= ihi; i++ { + ev := complex(wr[i], wi[i]) + found, _ := containsComplex(test.evWant, ev, tol) + if !found { + t.Errorf("%v: unexpected eigenvalue %v", prefix, ev) + } + } + } + + if !wantz { + return + } + + // Z should contain the orthogonal matrix U. + if !isOrthogonal(z) { + t.Errorf("%v: Z is not orthogonal", prefix) + } + // Z should have been modified only in the + // [iloz:ihiz+1,ilo:ihi+1] block. + for i := 0; i < n; i++ { + for j := 0; j < n; j++ { + if iloz <= i && i <= ihiz && ilo <= j && j <= ihi { + continue + } + if z.Data[i*z.Stride+j] != zCopy.Data[i*zCopy.Stride+j] { + t.Errorf("%v: Z modified outside of [iloz:ihiz+1,ilo:ihi+1] block", prefix) + } + } + } + if wantt { + // Zero out h under the subdiagonal because Dlaqr04 uses it as + // workspace. + for i := 2; i < n; i++ { + for j := 0; j < i-1; j++ { + h.Data[i*h.Stride+j] = 0 + } + } + hz := eye(n, n) + blas64.Gemm(blas.NoTrans, blas.NoTrans, 1, test.h, z, 0, hz) + zhz := eye(n, n) + blas64.Gemm(blas.Trans, blas.NoTrans, 1, z, hz, 0, zhz) + if !equalApproxGeneral(zhz, h, 10*tol) { + t.Errorf("%v: Z^T*(initial H)*Z and (final H) are not equal", prefix) + } + } +} diff --git a/vendor/gonum.org/v1/gonum/lapack/testlapack/dlaqr1.go b/vendor/gonum.org/v1/gonum/lapack/testlapack/dlaqr1.go new file mode 100644 index 0000000..3b3e0eb --- /dev/null +++ b/vendor/gonum.org/v1/gonum/lapack/testlapack/dlaqr1.go @@ -0,0 +1,92 @@ +// Copyright ©2016 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package testlapack + +import ( + "math" + "testing" + + "golang.org/x/exp/rand" + + "gonum.org/v1/gonum/floats" +) + +type Dlaqr1er interface { + Dlaqr1(n int, h []float64, ldh int, sr1, si1, sr2, si2 float64, v []float64) +} + +func Dlaqr1Test(t *testing.T, impl Dlaqr1er) { + rnd := rand.New(rand.NewSource(1)) + + for _, n := range []int{2, 3} { + for _, ldh := range []int{n, n + 1, n + 10} { + for _, cas := range []int{1, 2} { + for k := 0; k < 100; k++ { + v := make([]float64, n) + for i := range v { + v[i] = math.NaN() + } + h := make([]float64, n*(n-1)*ldh) + for i := range h { + h[i] = math.NaN() + } + for i := 0; i < n; i++ { + for j := 0; j < n; j++ { + h[i*ldh+j] = rnd.NormFloat64() + } + } + var sr1, sr2, si1, si2 float64 + if cas == 1 { + sr1 = rnd.NormFloat64() + sr2 = sr1 + si1 = rnd.NormFloat64() + si2 = -si1 + } else { + sr1 = rnd.NormFloat64() + sr2 = rnd.NormFloat64() + si1 = 0 + si2 = 0 + } + impl.Dlaqr1(n, h, ldh, sr1, si1, sr2, si2, v) + + // Matrix H - s1*I. + h1 := make([]complex128, n*n) + for i := 0; i < n; i++ { + for j := 0; j < n; j++ { + h1[i*n+j] = complex(h[i*ldh+j], 0) + if i == j { + h1[i*n+j] -= complex(sr1, si1) + } + } + } + // First column of H - s2*I. + h2 := make([]complex128, n) + for i := 0; i < n; i++ { + h2[i] = complex(h[i*ldh], 0) + } + h2[0] -= complex(sr2, si2) + + wantv := make([]float64, n) + // Multiply (H-s1*I)*(H-s2*I) to get a tentative + // wantv. + for i := 0; i < n; i++ { + for j := 0; j < n; j++ { + wantv[i] += real(h1[i*n+j] * h2[j]) + } + } + // Get the unknown scale. + scale := v[0] / wantv[0] + // Compute the actual wantv. + floats.Scale(scale, wantv) + + // The scale must be the same for all elements. + if floats.Distance(wantv, v, math.Inf(1)) > 1e-13 { + t.Errorf("n = %v, ldh = %v, case = %v: Unexpected value of v: got %v, want %v", n, ldh, cas, v, wantv) + } + } + } + } + } +} diff --git a/vendor/gonum.org/v1/gonum/lapack/testlapack/dlaqr23.go b/vendor/gonum.org/v1/gonum/lapack/testlapack/dlaqr23.go new file mode 100644 index 0000000..d2f838e --- /dev/null +++ b/vendor/gonum.org/v1/gonum/lapack/testlapack/dlaqr23.go @@ -0,0 +1,369 @@ +// Copyright ©2016 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package testlapack + +import ( + "fmt" + "testing" + + "golang.org/x/exp/rand" + + "gonum.org/v1/gonum/blas" + "gonum.org/v1/gonum/blas/blas64" +) + +type Dlaqr23er interface { + Dlaqr23(wantt, wantz bool, n, ktop, kbot, nw int, h []float64, ldh int, iloz, ihiz int, z []float64, ldz int, sr, si []float64, v []float64, ldv int, nh int, t []float64, ldt int, nv int, wv []float64, ldwv int, work []float64, lwork int, recur int) (ns, nd int) +} + +type dlaqr23Test struct { + wantt, wantz bool + ktop, kbot int + nw int + h blas64.General + iloz, ihiz int + + evWant []complex128 // Optional slice with known eigenvalues. +} + +func newDlaqr23TestCase(wantt, wantz bool, n, ldh int, rnd *rand.Rand) dlaqr23Test { + // Generate the deflation window size. + var nw int + if n <= 75 { + // For small matrices any window size works because they will + // always use Dlahrq inside Dlaqr23. + nw = rnd.Intn(n) + 1 + } else { + // For sufficiently large matrices generate a large enough + // window to assure that the Dlaqr4 path is taken. + nw = 76 + rnd.Intn(n-75) + } + // Generate a random Hessenberg matrix. + h := randomHessenberg(n, ldh, rnd) + // Generate the block limits of H on which Dlaqr23 will operate so that + // the restriction + // 0 <= nw <= kbot-ktop+1 + // is satisfied. + ktop := rnd.Intn(n - nw + 1) + kbot := ktop + nw - 1 + kbot += rnd.Intn(n - kbot) + // Make the block isolated by zeroing out the sub-diagonal elements. + if ktop-1 >= 0 { + h.Data[ktop*h.Stride+ktop-1] = 0 + } + if kbot+1 < n { + h.Data[(kbot+1)*h.Stride+kbot] = 0 + } + // Generate the rows of Z to which transformations will be applied if + // wantz is true. + iloz := rnd.Intn(ktop + 1) + ihiz := kbot + rnd.Intn(n-kbot) + return dlaqr23Test{ + wantt: wantt, + wantz: wantz, + ktop: ktop, + kbot: kbot, + nw: nw, + h: h, + iloz: iloz, + ihiz: ihiz, + } +} + +func Dlaqr23Test(t *testing.T, impl Dlaqr23er) { + rnd := rand.New(rand.NewSource(1)) + + // Randomized tests. + for _, wantt := range []bool{true, false} { + for _, wantz := range []bool{true, false} { + for _, n := range []int{1, 2, 3, 4, 5, 6, 10, 18, 31, 100} { + for _, extra := range []int{0, 11} { + for cas := 0; cas < 30; cas++ { + test := newDlaqr23TestCase(wantt, wantz, n, n+extra, rnd) + testDlaqr23(t, impl, test, false, 1, rnd) + testDlaqr23(t, impl, test, true, 1, rnd) + testDlaqr23(t, impl, test, false, 0, rnd) + testDlaqr23(t, impl, test, true, 0, rnd) + } + } + } + } + } + + // Tests with n=0. + for _, wantt := range []bool{true, false} { + for _, wantz := range []bool{true, false} { + for _, extra := range []int{0, 1, 11} { + test := dlaqr23Test{ + wantt: wantt, + wantz: wantz, + h: randomHessenberg(0, extra, rnd), + ktop: 0, + kbot: -1, + iloz: 0, + ihiz: -1, + nw: 0, + } + testDlaqr23(t, impl, test, true, 1, rnd) + testDlaqr23(t, impl, test, false, 1, rnd) + testDlaqr23(t, impl, test, true, 0, rnd) + testDlaqr23(t, impl, test, false, 0, rnd) + } + } + } + + // Tests with explicit eigenvalues computed by Octave. + for _, test := range []dlaqr23Test{ + { + h: blas64.General{ + Rows: 1, + Cols: 1, + Stride: 1, + Data: []float64{7.09965484086874e-1}, + }, + ktop: 0, + kbot: 0, + iloz: 0, + ihiz: 0, + evWant: []complex128{7.09965484086874e-1}, + }, + { + h: blas64.General{ + Rows: 2, + Cols: 2, + Stride: 2, + Data: []float64{ + 0, -1, + 1, 0, + }, + }, + ktop: 0, + kbot: 1, + iloz: 0, + ihiz: 1, + evWant: []complex128{1i, -1i}, + }, + { + h: blas64.General{ + Rows: 2, + Cols: 2, + Stride: 2, + Data: []float64{ + 6.25219991450918e-1, 8.17510791994361e-1, + 3.31218891622294e-1, 1.24103744878131e-1, + }, + }, + ktop: 0, + kbot: 1, + iloz: 0, + ihiz: 1, + evWant: []complex128{9.52203547663447e-1, -2.02879811334398e-1}, + }, + { + h: blas64.General{ + Rows: 4, + Cols: 4, + Stride: 4, + Data: []float64{ + 1, 0, 0, 0, + 0, 6.25219991450918e-1, 8.17510791994361e-1, 0, + 0, 3.31218891622294e-1, 1.24103744878131e-1, 0, + 0, 0, 0, 1, + }, + }, + ktop: 1, + kbot: 2, + iloz: 0, + ihiz: 3, + evWant: []complex128{9.52203547663447e-1, -2.02879811334398e-1}, + }, + { + h: blas64.General{ + Rows: 2, + Cols: 2, + Stride: 2, + Data: []float64{ + -1.1219562276608, 6.85473513349362e-1, + -8.19951061145131e-1, 1.93728523178888e-1, + }, + }, + ktop: 0, + kbot: 1, + iloz: 0, + ihiz: 1, + evWant: []complex128{ + -4.64113852240958e-1 + 3.59580510817350e-1i, + -4.64113852240958e-1 - 3.59580510817350e-1i, + }, + }, + { + h: blas64.General{ + Rows: 5, + Cols: 5, + Stride: 5, + Data: []float64{ + 9.57590178533658e-1, -5.10651295522708e-1, 9.24974510015869e-1, -1.30016306879522e-1, 2.92601986926954e-2, + -1.08084756637964, 1.77529701001213, -1.36480197632509, 2.23196371219601e-1, 1.12912853063308e-1, + 0, -8.44075612174676e-1, 1.067867614486, -2.55782915176399e-1, -2.00598563137468e-1, + 0, 0, -5.67097237165410e-1, 2.07205057427341e-1, 6.54998340743380e-1, + 0, 0, 0, -1.89441413886041e-1, -4.18125416021786e-1, + }, + }, + ktop: 0, + kbot: 4, + iloz: 0, + ihiz: 4, + evWant: []complex128{ + 2.94393309555622, + 4.97029793606701e-1 + 3.63041654992384e-1i, + 4.97029793606701e-1 - 3.63041654992384e-1i, + -1.74079119166145e-1 + 2.01570009462092e-1i, + -1.74079119166145e-1 - 2.01570009462092e-1i, + }, + }, + } { + test.wantt = true + test.wantz = true + test.nw = test.kbot - test.ktop + 1 + testDlaqr23(t, impl, test, true, 1, rnd) + testDlaqr23(t, impl, test, false, 1, rnd) + testDlaqr23(t, impl, test, true, 0, rnd) + testDlaqr23(t, impl, test, false, 0, rnd) + } +} + +func testDlaqr23(t *testing.T, impl Dlaqr23er, test dlaqr23Test, opt bool, recur int, rnd *rand.Rand) { + const tol = 1e-14 + + // Clone the test matrix to avoid modifying test data. + h := cloneGeneral(test.h) + // Extract test values to simplify notation. + n := h.Cols + extra := h.Stride - h.Cols + wantt := test.wantt + wantz := test.wantz + ktop := test.ktop + kbot := test.kbot + nw := test.nw + iloz := test.iloz + ihiz := test.ihiz + + var z, zCopy blas64.General + if wantz { + // Using the identity matrix for Z is the easiest way to check + // that the transformation accumulated into it by Dlaqr23 is orthogonal. + z = eye(n, n+extra) + zCopy = cloneGeneral(z) + } + + // Allocate slices for storing the converged eigenvalues, initially + // filled with NaN. + sr := nanSlice(kbot + 1) + si := nanSlice(kbot + 1) + + // Allocate work matrices. + v := randomGeneral(nw, nw, nw+extra, rnd) + var nh int + if nw > 0 { + nh = nw + rnd.Intn(nw) // nh must be at least nw. + } + tmat := randomGeneral(nw, nh, nh+extra, rnd) + var nv int + if nw > 0 { + nv = rnd.Intn(nw) + 1 + } + wv := randomGeneral(nv, nw, nw+extra, rnd) + + var work []float64 + if opt { + // Allocate work slice with optimal length. + work = nanSlice(1) + impl.Dlaqr23(wantt, wantz, n, ktop, kbot, nw, h.Data, h.Stride, iloz, ihiz, z.Data, max(1, z.Stride), + sr, si, v.Data, v.Stride, tmat.Cols, tmat.Data, tmat.Stride, wv.Rows, wv.Data, wv.Stride, work, -1, recur) + work = nanSlice(int(work[0])) + } else { + // Allocate work slice with minimum length. + work = nanSlice(max(1, 2*nw)) + } + + ns, nd := impl.Dlaqr23(wantt, wantz, n, ktop, kbot, nw, h.Data, h.Stride, iloz, ihiz, z.Data, max(1, z.Stride), + sr, si, v.Data, v.Stride, tmat.Cols, tmat.Data, tmat.Stride, wv.Rows, wv.Data, wv.Stride, work, len(work), recur) + + prefix := fmt.Sprintf("Case wantt=%v, wantz=%v, n=%v, ktop=%v, kbot=%v, nw=%v, iloz=%v, ihiz=%v, extra=%v", + wantt, wantz, n, ktop, kbot, nw, iloz, ihiz, extra) + + if !generalOutsideAllNaN(h) { + t.Errorf("%v: out-of-range write to H\n%v", prefix, h.Data) + } + if !generalOutsideAllNaN(z) { + t.Errorf("%v: out-of-range write to Z\n%v", prefix, z.Data) + } + if !generalOutsideAllNaN(v) { + t.Errorf("%v: out-of-range write to V\n%v", prefix, v.Data) + } + if !generalOutsideAllNaN(tmat) { + t.Errorf("%v: out-of-range write to T\n%v", prefix, tmat.Data) + } + if !generalOutsideAllNaN(wv) { + t.Errorf("%v: out-of-range write to WV\n%v", prefix, wv.Data) + } + if !isAllNaN(sr[:kbot-nd-ns+1]) || !isAllNaN(sr[kbot+1:]) { + t.Errorf("%v: out-of-range write to sr", prefix) + } + if !isAllNaN(si[:kbot-nd-ns+1]) || !isAllNaN(si[kbot+1:]) { + t.Errorf("%v: out-of-range write to si", prefix) + } + + if !isUpperHessenberg(h) { + t.Errorf("%v: H is not upper Hessenberg", prefix) + } + + if test.evWant != nil { + // Check all converged eigenvalues against known eigenvalues. + for i := kbot - nd + 1; i <= kbot; i++ { + ev := complex(sr[i], si[i]) + found, _ := containsComplex(test.evWant, ev, tol) + if !found { + t.Errorf("%v: unexpected eigenvalue %v", prefix, ev) + } + } + } + + // Checks below need the matrix Z. + if !wantz { + return + } + + // Test whether the matrix Z was modified outside the given block. + var zmod bool + for i := 0; i < n; i++ { + for j := 0; j < n; j++ { + if z.Data[i*z.Stride+j] == zCopy.Data[i*zCopy.Stride+j] { + continue + } + if i < iloz || ihiz < i || j < kbot-nw+1 || kbot < j { + zmod = true + } + } + } + if zmod { + t.Errorf("%v: unexpected modification of Z", prefix) + } + if !isOrthogonal(z) { + t.Errorf("%v: Z is not orthogonal", prefix) + } + if wantt { + hu := eye(n, n) + // Compute H_in*Z. + blas64.Gemm(blas.NoTrans, blas.NoTrans, 1, test.h, z, 0, hu) + uhu := eye(n, n) + // Compute Z^T*H_in*Z. + blas64.Gemm(blas.Trans, blas.NoTrans, 1, z, hu, 0, uhu) + // Compare Z^T*H_in*Z and H_out. + if !equalApproxGeneral(uhu, h, 10*tol) { + t.Errorf("%v: Z^T*(initial H)*Z and (final H) are not equal", prefix) + } + } +} diff --git a/vendor/gonum.org/v1/gonum/lapack/testlapack/dlaqr5.go b/vendor/gonum.org/v1/gonum/lapack/testlapack/dlaqr5.go new file mode 100644 index 0000000..d4c4272 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/lapack/testlapack/dlaqr5.go @@ -0,0 +1,215 @@ +// Copyright ©2016 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package testlapack + +import ( + "compress/gzip" + "encoding/json" + "fmt" + "log" + "math" + "os" + "path/filepath" + "testing" + + "golang.org/x/exp/rand" + + "gonum.org/v1/gonum/blas" + "gonum.org/v1/gonum/blas/blas64" +) + +type Dlaqr5er interface { + Dlaqr5(wantt, wantz bool, kacc22 int, n, ktop, kbot, nshfts int, sr, si []float64, h []float64, ldh int, iloz, ihiz int, z []float64, ldz int, v []float64, ldv int, u []float64, ldu int, nh int, wh []float64, ldwh int, nv int, wv []float64, ldwv int) +} + +type Dlaqr5test struct { + WantT bool + N int + NShifts int + KTop, KBot int + ShiftR, ShiftI []float64 + H []float64 + + HWant []float64 + ZWant []float64 +} + +func Dlaqr5Test(t *testing.T, impl Dlaqr5er) { + // Test without using reference data. + rnd := rand.New(rand.NewSource(1)) + for _, n := range []int{1, 2, 3, 4, 5, 6, 10, 30} { + for _, extra := range []int{0, 1, 20} { + for _, kacc22 := range []int{0, 1, 2} { + for cas := 0; cas < 100; cas++ { + testDlaqr5(t, impl, n, extra, kacc22, rnd) + } + } + } + } + + // Test using reference data computed by the reference netlib + // implementation. + file, err := os.Open(filepath.FromSlash("../testlapack/testdata/dlaqr5data.json.gz")) + if err != nil { + log.Fatal(err) + } + defer file.Close() + r, err := gzip.NewReader(file) + if err != nil { + log.Fatal(err) + } + defer r.Close() + + var tests []Dlaqr5test + json.NewDecoder(r).Decode(&tests) + for _, test := range tests { + wantt := test.WantT + n := test.N + nshfts := test.NShifts + ktop := test.KTop + kbot := test.KBot + sr := test.ShiftR + si := test.ShiftI + + for _, extra := range []int{0, 1, 10} { + v := randomGeneral(nshfts/2, 3, 3+extra, rnd) + u := randomGeneral(3*nshfts-3, 3*nshfts-3, 3*nshfts-3+extra, rnd) + nh := n + wh := randomGeneral(3*nshfts-3, n, n+extra, rnd) + nv := n + wv := randomGeneral(n, 3*nshfts-3, 3*nshfts-3+extra, rnd) + + h := nanGeneral(n, n, n+extra) + + for _, kacc22 := range []int{0, 1, 2} { + copyMatrix(n, n, h.Data, h.Stride, test.H) + z := eye(n, n+extra) + + impl.Dlaqr5(wantt, true, kacc22, + n, ktop, kbot, + nshfts, sr, si, + h.Data, h.Stride, + 0, n-1, z.Data, z.Stride, + v.Data, v.Stride, + u.Data, u.Stride, + nv, wv.Data, wv.Stride, + nh, wh.Data, wh.Stride) + + prefix := fmt.Sprintf("wantt=%v, n=%v, nshfts=%v, ktop=%v, kbot=%v, extra=%v, kacc22=%v", + wantt, n, nshfts, ktop, kbot, extra, kacc22) + if !equalApprox(n, n, h.Data, h.Stride, test.HWant, 1e-13) { + t.Errorf("Case %v: unexpected matrix H\nh =%v\nhwant=%v", prefix, h.Data, test.HWant) + } + if !equalApprox(n, n, z.Data, z.Stride, test.ZWant, 1e-13) { + t.Errorf("Case %v: unexpected matrix Z\nz =%v\nzwant=%v", prefix, z.Data, test.ZWant) + } + } + } + } +} + +func testDlaqr5(t *testing.T, impl Dlaqr5er, n, extra, kacc22 int, rnd *rand.Rand) { + wantt := true + wantz := true + nshfts := 2 * n + sr := make([]float64, nshfts) + si := make([]float64, nshfts) + for i := 0; i < n; i++ { + re := rnd.NormFloat64() + im := rnd.NormFloat64() + sr[2*i], sr[2*i+1] = re, re + si[2*i], si[2*i+1] = im, -im + } + ktop := rnd.Intn(n) + kbot := rnd.Intn(n) + if kbot < ktop { + ktop, kbot = kbot, ktop + } + + v := randomGeneral(nshfts/2, 3, 3+extra, rnd) + u := randomGeneral(3*nshfts-3, 3*nshfts-3, 3*nshfts-3+extra, rnd) + nh := n + wh := randomGeneral(3*nshfts-3, n, n+extra, rnd) + nv := n + wv := randomGeneral(n, 3*nshfts-3, 3*nshfts-3+extra, rnd) + + h := randomHessenberg(n, n+extra, rnd) + if ktop > 0 { + h.Data[ktop*h.Stride+ktop-1] = 0 + } + if kbot < n-1 { + h.Data[(kbot+1)*h.Stride+kbot] = 0 + } + hCopy := h + hCopy.Data = make([]float64, len(h.Data)) + copy(hCopy.Data, h.Data) + + z := eye(n, n+extra) + + impl.Dlaqr5(wantt, wantz, kacc22, + n, ktop, kbot, + nshfts, sr, si, + h.Data, h.Stride, + 0, n-1, z.Data, z.Stride, + v.Data, v.Stride, + u.Data, u.Stride, + nv, wv.Data, wv.Stride, + nh, wh.Data, wh.Stride) + + prefix := fmt.Sprintf("Case n=%v, extra=%v, kacc22=%v", n, extra, kacc22) + + if !generalOutsideAllNaN(h) { + t.Errorf("%v: out-of-range write to H\n%v", prefix, h.Data) + } + if !generalOutsideAllNaN(z) { + t.Errorf("%v: out-of-range write to Z\n%v", prefix, z.Data) + } + if !generalOutsideAllNaN(u) { + t.Errorf("%v: out-of-range write to U\n%v", prefix, u.Data) + } + if !generalOutsideAllNaN(v) { + t.Errorf("%v: out-of-range write to V\n%v", prefix, v.Data) + } + if !generalOutsideAllNaN(wh) { + t.Errorf("%v: out-of-range write to WH\n%v", prefix, wh.Data) + } + if !generalOutsideAllNaN(wv) { + t.Errorf("%v: out-of-range write to WV\n%v", prefix, wv.Data) + } + + for i := 0; i < n; i++ { + for j := 0; j < i-1; j++ { + if h.Data[i*h.Stride+j] != 0 { + t.Errorf("%v: H is not Hessenberg, H[%v,%v]!=0", prefix, i, j) + } + } + } + if !isOrthogonal(z) { + t.Errorf("%v: Z is not orthogonal", prefix) + } + // Construct Z^T * HOrig * Z and check that it is equal to H from Dlaqr5. + hz := blas64.General{ + Rows: n, + Cols: n, + Stride: n, + Data: make([]float64, n*n), + } + blas64.Gemm(blas.NoTrans, blas.NoTrans, 1, hCopy, z, 0, hz) + zhz := blas64.General{ + Rows: n, + Cols: n, + Stride: n, + Data: make([]float64, n*n), + } + blas64.Gemm(blas.Trans, blas.NoTrans, 1, z, hz, 0, zhz) + for i := 0; i < n; i++ { + for j := 0; j < n; j++ { + diff := zhz.Data[i*zhz.Stride+j] - h.Data[i*h.Stride+j] + if math.Abs(diff) > 1e-13 { + t.Errorf("%v: Z^T*HOrig*Z and H are not equal, diff at [%v,%v]=%v", prefix, i, j, diff) + } + } + } +} diff --git a/vendor/gonum.org/v1/gonum/lapack/testlapack/dlarf.go b/vendor/gonum.org/v1/gonum/lapack/testlapack/dlarf.go new file mode 100644 index 0000000..61fabca --- /dev/null +++ b/vendor/gonum.org/v1/gonum/lapack/testlapack/dlarf.go @@ -0,0 +1,174 @@ +// Copyright ©2015 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package testlapack + +import ( + "testing" + + "golang.org/x/exp/rand" + + "gonum.org/v1/gonum/blas" + "gonum.org/v1/gonum/blas/blas64" + "gonum.org/v1/gonum/floats" +) + +type Dlarfer interface { + Dlarf(side blas.Side, m, n int, v []float64, incv int, tau float64, c []float64, ldc int, work []float64) +} + +func DlarfTest(t *testing.T, impl Dlarfer) { + rnd := rand.New(rand.NewSource(1)) + for i, test := range []struct { + m, n, ldc int + incv, lastv int + lastr, lastc int + tau float64 + }{ + { + m: 3, + n: 2, + ldc: 2, + + incv: 4, + lastv: 1, + + lastr: 2, + lastc: 1, + + tau: 2, + }, + { + m: 2, + n: 3, + ldc: 3, + + incv: 4, + lastv: 1, + + lastr: 1, + lastc: 2, + + tau: 2, + }, + { + m: 2, + n: 3, + ldc: 3, + + incv: 4, + lastv: 1, + + lastr: 0, + lastc: 1, + + tau: 2, + }, + { + m: 2, + n: 3, + ldc: 3, + + incv: 4, + lastv: 0, + + lastr: 0, + lastc: 1, + + tau: 2, + }, + { + m: 10, + n: 10, + ldc: 10, + + incv: 4, + lastv: 6, + + lastr: 9, + lastc: 8, + + tau: 2, + }, + } { + // Construct a random matrix. + c := make([]float64, test.ldc*test.m) + for i := 0; i <= test.lastr; i++ { + for j := 0; j <= test.lastc; j++ { + c[i*test.ldc+j] = rnd.Float64() + } + } + cCopy := make([]float64, len(c)) + copy(cCopy, c) + cCopy2 := make([]float64, len(c)) + copy(cCopy2, c) + + // Test with side right. + sz := max(test.m, test.n) // so v works for both right and left side. + v := make([]float64, test.incv*sz+1) + // Fill with nonzero entries up until lastv. + for i := 0; i <= test.lastv; i++ { + v[i*test.incv] = rnd.Float64() + } + // Construct h explicitly to compare. + h := make([]float64, test.n*test.n) + for i := 0; i < test.n; i++ { + h[i*test.n+i] = 1 + } + hMat := blas64.General{ + Rows: test.n, + Cols: test.n, + Stride: test.n, + Data: h, + } + vVec := blas64.Vector{ + Inc: test.incv, + Data: v, + } + blas64.Ger(-test.tau, vVec, vVec, hMat) + + // Apply multiplication (2nd copy is to avoid aliasing). + cMat := blas64.General{ + Rows: test.m, + Cols: test.n, + Stride: test.ldc, + Data: cCopy, + } + cMat2 := blas64.General{ + Rows: test.m, + Cols: test.n, + Stride: test.ldc, + Data: cCopy2, + } + blas64.Gemm(blas.NoTrans, blas.NoTrans, 1, cMat2, hMat, 0, cMat) + + // cMat now stores the true answer. Compare with the function call. + work := make([]float64, sz) + impl.Dlarf(blas.Right, test.m, test.n, v, test.incv, test.tau, c, test.ldc, work) + if !floats.EqualApprox(c, cMat.Data, 1e-14) { + t.Errorf("Dlarf mismatch right, case %v. Want %v, got %v", i, cMat.Data, c) + } + + // Test on the left side. + copy(c, cCopy2) + copy(cCopy, c) + // Construct h. + h = make([]float64, test.m*test.m) + for i := 0; i < test.m; i++ { + h[i*test.m+i] = 1 + } + hMat = blas64.General{ + Rows: test.m, + Cols: test.m, + Stride: test.m, + Data: h, + } + blas64.Ger(-test.tau, vVec, vVec, hMat) + blas64.Gemm(blas.NoTrans, blas.NoTrans, 1, hMat, cMat2, 0, cMat) + impl.Dlarf(blas.Left, test.m, test.n, v, test.incv, test.tau, c, test.ldc, work) + if !floats.EqualApprox(c, cMat.Data, 1e-14) { + t.Errorf("Dlarf mismatch left, case %v. Want %v, got %v", i, cMat.Data, c) + } + } +} diff --git a/vendor/gonum.org/v1/gonum/lapack/testlapack/dlarfb.go b/vendor/gonum.org/v1/gonum/lapack/testlapack/dlarfb.go new file mode 100644 index 0000000..fef4257 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/lapack/testlapack/dlarfb.go @@ -0,0 +1,162 @@ +// Copyright ©2015 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package testlapack + +import ( + "testing" + + "golang.org/x/exp/rand" + + "gonum.org/v1/gonum/blas" + "gonum.org/v1/gonum/blas/blas64" + "gonum.org/v1/gonum/floats" + "gonum.org/v1/gonum/lapack" +) + +type Dlarfber interface { + Dlarfter + Dlarfb(side blas.Side, trans blas.Transpose, direct lapack.Direct, + store lapack.StoreV, m, n, k int, v []float64, ldv int, t []float64, ldt int, + c []float64, ldc int, work []float64, ldwork int) +} + +func DlarfbTest(t *testing.T, impl Dlarfber) { + rnd := rand.New(rand.NewSource(1)) + for _, store := range []lapack.StoreV{lapack.ColumnWise, lapack.RowWise} { + for _, direct := range []lapack.Direct{lapack.Forward, lapack.Backward} { + for _, side := range []blas.Side{blas.Left, blas.Right} { + for _, trans := range []blas.Transpose{blas.Trans, blas.NoTrans} { + for cas, test := range []struct { + ma, na, cdim, lda, ldt, ldc int + }{ + {6, 6, 6, 0, 0, 0}, + {6, 8, 10, 0, 0, 0}, + {6, 10, 8, 0, 0, 0}, + {8, 6, 10, 0, 0, 0}, + {8, 10, 6, 0, 0, 0}, + {10, 6, 8, 0, 0, 0}, + {10, 8, 6, 0, 0, 0}, + {6, 6, 6, 12, 15, 30}, + {6, 8, 10, 12, 15, 30}, + {6, 10, 8, 12, 15, 30}, + {8, 6, 10, 12, 15, 30}, + {8, 10, 6, 12, 15, 30}, + {10, 6, 8, 12, 15, 30}, + {10, 8, 6, 12, 15, 30}, + {6, 6, 6, 15, 12, 30}, + {6, 8, 10, 15, 12, 30}, + {6, 10, 8, 15, 12, 30}, + {8, 6, 10, 15, 12, 30}, + {8, 10, 6, 15, 12, 30}, + {10, 6, 8, 15, 12, 30}, + {10, 8, 6, 15, 12, 30}, + } { + // Generate a matrix for QR + ma := test.ma + na := test.na + lda := test.lda + if lda == 0 { + lda = na + } + a := make([]float64, ma*lda) + for i := 0; i < ma; i++ { + for j := 0; j < lda; j++ { + a[i*lda+j] = rnd.Float64() + } + } + k := min(ma, na) + + // H is always ma x ma + var m, n, rowsWork int + switch { + default: + panic("not implemented") + case side == blas.Left: + m = test.ma + n = test.cdim + rowsWork = n + case side == blas.Right: + m = test.cdim + n = test.ma + rowsWork = m + } + + // Use dgeqr2 to find the v vectors + tau := make([]float64, na) + work := make([]float64, na) + impl.Dgeqr2(ma, k, a, lda, tau, work) + + // Correct the v vectors based on the direct and store + vMatTmp := extractVMat(ma, na, a, lda, lapack.Forward, lapack.ColumnWise) + vMat := constructVMat(vMatTmp, store, direct) + v := vMat.Data + ldv := vMat.Stride + + // Use dlarft to find the t vector + ldt := test.ldt + if ldt == 0 { + ldt = k + } + tm := make([]float64, k*ldt) + + impl.Dlarft(direct, store, ma, k, v, ldv, tau, tm, ldt) + + // Generate c matrix + ldc := test.ldc + if ldc == 0 { + ldc = n + } + c := make([]float64, m*ldc) + for i := 0; i < m; i++ { + for j := 0; j < ldc; j++ { + c[i*ldc+j] = rnd.Float64() + } + } + cCopy := make([]float64, len(c)) + copy(cCopy, c) + + ldwork := k + work = make([]float64, rowsWork*k) + + // Call Dlarfb with this information + impl.Dlarfb(side, trans, direct, store, m, n, k, v, ldv, tm, ldt, c, ldc, work, ldwork) + + h := constructH(tau, vMat, store, direct) + + cMat := blas64.General{ + Rows: m, + Cols: n, + Stride: ldc, + Data: make([]float64, m*ldc), + } + copy(cMat.Data, cCopy) + ans := blas64.General{ + Rows: m, + Cols: n, + Stride: ldc, + Data: make([]float64, m*ldc), + } + copy(ans.Data, cMat.Data) + switch { + default: + panic("not implemented") + case side == blas.Left && trans == blas.NoTrans: + blas64.Gemm(blas.NoTrans, blas.NoTrans, 1, h, cMat, 0, ans) + case side == blas.Left && trans == blas.Trans: + blas64.Gemm(blas.Trans, blas.NoTrans, 1, h, cMat, 0, ans) + case side == blas.Right && trans == blas.NoTrans: + blas64.Gemm(blas.NoTrans, blas.NoTrans, 1, cMat, h, 0, ans) + case side == blas.Right && trans == blas.Trans: + blas64.Gemm(blas.NoTrans, blas.Trans, 1, cMat, h, 0, ans) + } + if !floats.EqualApprox(ans.Data, c, 1e-14) { + t.Errorf("Cas %v mismatch. Want %v, got %v.", cas, ans.Data, c) + } + } + } + } + } + } +} diff --git a/vendor/gonum.org/v1/gonum/lapack/testlapack/dlarfg.go b/vendor/gonum.org/v1/gonum/lapack/testlapack/dlarfg.go new file mode 100644 index 0000000..57eca97 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/lapack/testlapack/dlarfg.go @@ -0,0 +1,134 @@ +// Copyright ©2015 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package testlapack + +import ( + "math" + "testing" + + "golang.org/x/exp/rand" + + "gonum.org/v1/gonum/blas" + "gonum.org/v1/gonum/blas/blas64" + "gonum.org/v1/gonum/floats" +) + +type Dlarfger interface { + Dlarfg(n int, alpha float64, x []float64, incX int) (beta, tau float64) +} + +func DlarfgTest(t *testing.T, impl Dlarfger) { + const tol = 1e-14 + rnd := rand.New(rand.NewSource(1)) + for i, test := range []struct { + alpha float64 + n int + x []float64 + }{ + { + alpha: 4, + n: 3, + }, + { + alpha: -2, + n: 3, + }, + { + alpha: 0, + n: 3, + }, + { + alpha: 1, + n: 1, + }, + { + alpha: 1, + n: 4, + x: []float64{4, 5, 6}, + }, + { + alpha: 1, + n: 4, + x: []float64{0, 0, 0}, + }, + { + alpha: dlamchS, + n: 4, + x: []float64{dlamchS, dlamchS, dlamchS}, + }, + } { + n := test.n + incX := 1 + var x []float64 + if test.x == nil { + x = make([]float64, n-1) + for i := range x { + x[i] = rnd.Float64() + } + } else { + if len(test.x) != n-1 { + panic("bad test") + } + x = make([]float64, n-1) + copy(x, test.x) + } + xcopy := make([]float64, n-1) + copy(xcopy, x) + alpha := test.alpha + beta, tau := impl.Dlarfg(n, alpha, x, incX) + + // Verify the returns and the values in v. Construct h and perform + // the explicit multiplication. + h := make([]float64, n*n) + for i := 0; i < n; i++ { + h[i*n+i] = 1 + } + hmat := blas64.General{ + Rows: n, + Cols: n, + Stride: n, + Data: h, + } + v := make([]float64, n) + copy(v[1:], x) + v[0] = 1 + vVec := blas64.Vector{ + Inc: 1, + Data: v, + } + blas64.Ger(-tau, vVec, vVec, hmat) + eye := blas64.General{ + Rows: n, + Cols: n, + Stride: n, + Data: make([]float64, n*n), + } + blas64.Gemm(blas.Trans, blas.NoTrans, 1, hmat, hmat, 0, eye) + dist := distFromIdentity(n, eye.Data, n) + if dist > tol { + t.Errorf("H^T * H is not close to I, dist=%v", dist) + } + + xVec := blas64.Vector{ + Inc: 1, + Data: make([]float64, n), + } + xVec.Data[0] = test.alpha + copy(xVec.Data[1:], xcopy) + + ans := make([]float64, n) + ansVec := blas64.Vector{ + Inc: 1, + Data: ans, + } + blas64.Gemv(blas.NoTrans, 1, hmat, xVec, 0, ansVec) + if math.Abs(ans[0]-beta) > tol { + t.Errorf("Case %v, beta mismatch. Want %v, got %v", i, ans[0], beta) + } + if floats.Norm(ans[1:n], math.Inf(1)) > tol { + t.Errorf("Case %v, nonzero answer %v", i, ans[1:n]) + } + } +} diff --git a/vendor/gonum.org/v1/gonum/lapack/testlapack/dlarft.go b/vendor/gonum.org/v1/gonum/lapack/testlapack/dlarft.go new file mode 100644 index 0000000..d4c174f --- /dev/null +++ b/vendor/gonum.org/v1/gonum/lapack/testlapack/dlarft.go @@ -0,0 +1,169 @@ +// Copyright ©2015 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package testlapack + +import ( + "testing" + + "golang.org/x/exp/rand" + + "gonum.org/v1/gonum/blas" + "gonum.org/v1/gonum/blas/blas64" + "gonum.org/v1/gonum/floats" + "gonum.org/v1/gonum/lapack" +) + +type Dlarfter interface { + Dgeqr2er + Dlarft(direct lapack.Direct, store lapack.StoreV, n, k int, v []float64, ldv int, tau []float64, t []float64, ldt int) +} + +func DlarftTest(t *testing.T, impl Dlarfter) { + rnd := rand.New(rand.NewSource(1)) + for _, store := range []lapack.StoreV{lapack.ColumnWise, lapack.RowWise} { + for _, direct := range []lapack.Direct{lapack.Forward, lapack.Backward} { + for _, test := range []struct { + m, n, ldv, ldt int + }{ + {6, 6, 0, 0}, + {8, 6, 0, 0}, + {6, 8, 0, 0}, + {6, 6, 10, 15}, + {8, 6, 10, 15}, + {6, 8, 10, 15}, + {6, 6, 15, 10}, + {8, 6, 15, 10}, + {6, 8, 15, 10}, + } { + // Generate a matrix + m := test.m + n := test.n + lda := n + if lda == 0 { + lda = n + } + + a := make([]float64, m*lda) + for i := 0; i < m; i++ { + for j := 0; j < lda; j++ { + a[i*lda+j] = rnd.Float64() + } + } + // Use dgeqr2 to find the v vectors + tau := make([]float64, n) + work := make([]float64, n) + impl.Dgeqr2(m, n, a, lda, tau, work) + + // Construct H using these answers + vMatTmp := extractVMat(m, n, a, lda, lapack.Forward, lapack.ColumnWise) + vMat := constructVMat(vMatTmp, store, direct) + v := vMat.Data + ldv := vMat.Stride + + h := constructH(tau, vMat, store, direct) + + k := min(m, n) + ldt := test.ldt + if ldt == 0 { + ldt = k + } + // Find T from the actual function + tm := make([]float64, k*ldt) + for i := range tm { + tm[i] = 100 + rnd.Float64() + } + // The v data has been put into a. + impl.Dlarft(direct, store, m, k, v, ldv, tau, tm, ldt) + + tData := make([]float64, len(tm)) + copy(tData, tm) + if direct == lapack.Forward { + // Zero out the lower traingular portion. + for i := 0; i < k; i++ { + for j := 0; j < i; j++ { + tData[i*ldt+j] = 0 + } + } + } else { + // Zero out the upper traingular portion. + for i := 0; i < k; i++ { + for j := i + 1; j < k; j++ { + tData[i*ldt+j] = 0 + } + } + } + + T := blas64.General{ + Rows: k, + Cols: k, + Stride: ldt, + Data: tData, + } + + vMatT := blas64.General{ + Rows: vMat.Cols, + Cols: vMat.Rows, + Stride: vMat.Rows, + Data: make([]float64, vMat.Cols*vMat.Rows), + } + for i := 0; i < vMat.Rows; i++ { + for j := 0; j < vMat.Cols; j++ { + vMatT.Data[j*vMatT.Stride+i] = vMat.Data[i*vMat.Stride+j] + } + } + var comp blas64.General + if store == lapack.ColumnWise { + // H = I - V * T * V^T + tmp := blas64.General{ + Rows: T.Rows, + Cols: vMatT.Cols, + Stride: vMatT.Cols, + Data: make([]float64, T.Rows*vMatT.Cols), + } + // T * V^T + blas64.Gemm(blas.NoTrans, blas.NoTrans, 1, T, vMatT, 0, tmp) + comp = blas64.General{ + Rows: vMat.Rows, + Cols: tmp.Cols, + Stride: tmp.Cols, + Data: make([]float64, vMat.Rows*tmp.Cols), + } + // V * (T * V^T) + blas64.Gemm(blas.NoTrans, blas.NoTrans, 1, vMat, tmp, 0, comp) + } else { + // H = I - V^T * T * V + tmp := blas64.General{ + Rows: T.Rows, + Cols: vMat.Cols, + Stride: vMat.Cols, + Data: make([]float64, T.Rows*vMat.Cols), + } + // T * V + blas64.Gemm(blas.NoTrans, blas.NoTrans, 1, T, vMat, 0, tmp) + comp = blas64.General{ + Rows: vMatT.Rows, + Cols: tmp.Cols, + Stride: tmp.Cols, + Data: make([]float64, vMatT.Rows*tmp.Cols), + } + // V^T * (T * V) + blas64.Gemm(blas.NoTrans, blas.NoTrans, 1, vMatT, tmp, 0, comp) + } + // I - V^T * T * V + for i := 0; i < comp.Rows; i++ { + for j := 0; j < comp.Cols; j++ { + comp.Data[i*m+j] *= -1 + if i == j { + comp.Data[i*m+j] += 1 + } + } + } + if !floats.EqualApprox(comp.Data, h.Data, 1e-14) { + t.Errorf("T does not construct proper H. Store = %v, Direct = %v.\nWant %v\ngot %v.", string(store), string(direct), h.Data, comp.Data) + } + } + } + } +} diff --git a/vendor/gonum.org/v1/gonum/lapack/testlapack/dlarfx.go b/vendor/gonum.org/v1/gonum/lapack/testlapack/dlarfx.go new file mode 100644 index 0000000..af7ce0e --- /dev/null +++ b/vendor/gonum.org/v1/gonum/lapack/testlapack/dlarfx.go @@ -0,0 +1,90 @@ +// Copyright ©2016 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package testlapack + +import ( + "fmt" + "testing" + + "golang.org/x/exp/rand" + + "gonum.org/v1/gonum/blas" + "gonum.org/v1/gonum/blas/blas64" +) + +type Dlarfxer interface { + Dlarfx(side blas.Side, m, n int, v []float64, tau float64, c []float64, ldc int, work []float64) +} + +func DlarfxTest(t *testing.T, impl Dlarfxer) { + rnd := rand.New(rand.NewSource(1)) + for _, side := range []blas.Side{blas.Right, blas.Left} { + // For m and n greater than 10 we are testing Dlarf, so avoid unnecessary work. + for m := 1; m < 12; m++ { + for n := 1; n < 12; n++ { + for _, extra := range []int{0, 1, 11} { + for cas := 0; cas < 10; cas++ { + testDlarfx(t, impl, side, m, n, extra, rnd) + } + } + } + } + } +} + +func testDlarfx(t *testing.T, impl Dlarfxer, side blas.Side, m, n, extra int, rnd *rand.Rand) { + const tol = 1e-13 + + // Generate random input data. + var v []float64 + if side == blas.Left { + v = randomSlice(m, rnd) + } else { + v = randomSlice(n, rnd) + } + tau := rnd.NormFloat64() + ldc := n + extra + c := randomGeneral(m, n, ldc, rnd) + + // Compute the matrix H explicitly as H := I - tau * v * v^T. + var h blas64.General + if side == blas.Left { + h = eye(m, m+extra) + } else { + h = eye(n, n+extra) + } + blas64.Ger(-tau, blas64.Vector{Inc: 1, Data: v}, blas64.Vector{Inc: 1, Data: v}, h) + + // Compute the product H * C or C * H explicitly. + cWant := nanGeneral(m, n, ldc) + if side == blas.Left { + blas64.Gemm(blas.NoTrans, blas.NoTrans, 1, h, c, 0, cWant) + } else { + blas64.Gemm(blas.NoTrans, blas.NoTrans, 1, c, h, 0, cWant) + } + + var work []float64 + if h.Rows > 10 { + // Allocate work only if H has order > 10. + if side == blas.Left { + work = make([]float64, n) + } else { + work = make([]float64, m) + } + } + + impl.Dlarfx(side, m, n, v, tau, c.Data, c.Stride, work) + + prefix := fmt.Sprintf("Case side=%v, m=%v, n=%v, extra=%v", side, m, n, extra) + + // Check any invalid modifications of c. + if !generalOutsideAllNaN(c) { + t.Errorf("%v: out-of-range write to C\n%v", prefix, c.Data) + } + + if !equalApproxGeneral(c, cWant, tol) { + t.Errorf("%v: unexpected C\n%v", prefix, c.Data) + } +} diff --git a/vendor/gonum.org/v1/gonum/lapack/testlapack/dlartg.go b/vendor/gonum.org/v1/gonum/lapack/testlapack/dlartg.go new file mode 100644 index 0000000..c2c82cf --- /dev/null +++ b/vendor/gonum.org/v1/gonum/lapack/testlapack/dlartg.go @@ -0,0 +1,119 @@ +// Copyright ©2015 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package testlapack + +import ( + "math" + "testing" + + "golang.org/x/exp/rand" + + "gonum.org/v1/gonum/floats" +) + +type Dlartger interface { + Dlartg(f, g float64) (cs, sn, r float64) +} + +func DlartgTest(t *testing.T, impl Dlartger) { + const tol = 1e-14 + // safmn2 and safmx2 are copied from native.Dlartg. + // safmn2 ~ 2*10^{-146} + safmn2 := math.Pow(dlamchB, math.Trunc(math.Log(dlamchS/dlamchE)/math.Log(dlamchB)/2)) + // safmx2 ~ 5*10^145 + safmx2 := 1 / safmn2 + rnd := rand.New(rand.NewSource(1)) + for i := 0; i < 1000; i++ { + // Generate randomly huge, tiny, and "normal" input arguments to Dlartg. + var f float64 + var fHuge bool + switch rnd.Intn(3) { + case 0: + // Huge f. + fHuge = true + // scale is in the range (10^{-10}, 10^10]. + scale := math.Pow(10, 10-20*rnd.Float64()) + // f is in the range (5*10^135, 5*10^155]. + f = scale * safmx2 + case 1: + // Tiny f. + // f is in the range (2*10^{-156}, 2*10^{-136}]. + f = math.Pow(10, 10-20*rnd.Float64()) * safmn2 + default: + f = rnd.NormFloat64() + } + if rnd.Intn(2) == 0 { + // Sometimes change the sign of f. + f *= -1 + } + + var g float64 + var gHuge bool + switch rnd.Intn(3) { + case 0: + // Huge g. + gHuge = true + g = math.Pow(10, 10-20*rnd.Float64()) * safmx2 + case 1: + // Tiny g. + g = math.Pow(10, 10-20*rnd.Float64()) * safmn2 + default: + g = rnd.NormFloat64() + } + if rnd.Intn(2) == 0 { + g *= -1 + } + + // Generate a plane rotation so that + // [ cs sn] * [f] = [r] + // [-sn cs] [g] = [0] + cs, sn, r := impl.Dlartg(f, g) + + // Check that the first equation holds. + rWant := cs*f + sn*g + if !floats.EqualWithinAbsOrRel(math.Abs(rWant), math.Abs(r), tol, tol) { + t.Errorf("Case f=%v,g=%v: unexpected r. Want %v, got %v", f, g, rWant, r) + } + // Check that cs and sn define a plane rotation. The 2×2 matrix + // has orthogonal columns by construction, so only check that + // the columns/rows have unit norm. + oneTest := cs*cs + sn*sn + if math.Abs(oneTest-1) > tol { + t.Errorf("Case f=%v,g=%v: expected cs^2+sn^2==1, got %v", f, g, oneTest) + } + if !fHuge && !gHuge { + // Check that the second equation holds. + // If both numbers are huge, cancellation errors make + // this test unreliable. + zeroTest := -sn*f + cs*g + if math.Abs(zeroTest) > tol { + t.Errorf("Case f=%v,g=%v: expected zero, got %v", f, g, zeroTest) + } + } + // Check that cs is positive as documented. + if math.Abs(f) > math.Abs(g) && cs < 0 { + t.Errorf("Case f=%v,g=%v: unexpected negative cs %v", f, g, cs) + } + } + // Check other documented special cases. + for i := 0; i < 100; i++ { + cs, sn, _ := impl.Dlartg(rnd.NormFloat64(), 0) + if cs != 1 { + t.Errorf("Unexpected cs for g=0. Want 1, got %v", cs) + } + if sn != 0 { + t.Errorf("Unexpected sn for g=0. Want 0, got %v", sn) + } + } + for i := 0; i < 100; i++ { + cs, sn, _ := impl.Dlartg(0, rnd.NormFloat64()) + if cs != 0 { + t.Errorf("Unexpected cs for f=0. Want 0, got %v", cs) + } + if sn != 1 { + t.Errorf("Unexpected sn for f=0. Want 1, got %v", sn) + } + } +} diff --git a/vendor/gonum.org/v1/gonum/lapack/testlapack/dlas2.go b/vendor/gonum.org/v1/gonum/lapack/testlapack/dlas2.go new file mode 100644 index 0000000..81c6180 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/lapack/testlapack/dlas2.go @@ -0,0 +1,34 @@ +// Copyright ©2015 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package testlapack + +import ( + "math" + "testing" +) + +type Dlas2er interface { + Dlas2(f, g, h float64) (min, max float64) +} + +func Dlas2Test(t *testing.T, impl Dlas2er) { + for i, test := range []struct { + f, g, h, ssmin, ssmax float64 + }{ + // Singular values computed from Octave. + {10, 30, 12, 3.567778859365365, 33.634371616111189}, + {10, 30, -12, 3.567778859365365, 33.634371616111189}, + {2, 30, -12, 0.741557056404952, 32.364333658088754}, + {-2, 5, 12, 1.842864429909778, 13.023204317408728}, + } { + ssmin, ssmax := impl.Dlas2(test.f, test.g, test.h) + if math.Abs(ssmin-test.ssmin) > 1e-12 { + t.Errorf("Case %d, minimal singular value mismatch. Want %v, got %v", i, test.ssmin, ssmin) + } + if math.Abs(ssmax-test.ssmax) > 1e-12 { + t.Errorf("Case %d, minimal singular value mismatch. Want %v, got %v", i, test.ssmin, ssmin) + } + } +} diff --git a/vendor/gonum.org/v1/gonum/lapack/testlapack/dlascl.go b/vendor/gonum.org/v1/gonum/lapack/testlapack/dlascl.go new file mode 100644 index 0000000..1c92e57 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/lapack/testlapack/dlascl.go @@ -0,0 +1,107 @@ +// Copyright ©2016 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package testlapack + +import ( + "fmt" + "math" + "testing" + + "golang.org/x/exp/rand" + + "gonum.org/v1/gonum/lapack" +) + +type Dlascler interface { + Dlascl(kind lapack.MatrixType, kl, ku int, cfrom, cto float64, m, n int, a []float64, lda int) +} + +func DlasclTest(t *testing.T, impl Dlascler) { + const tol = 1e-16 + + rnd := rand.New(rand.NewSource(1)) + for ti, test := range []struct { + m, n int + }{ + {0, 0}, + {1, 1}, + {1, 10}, + {10, 1}, + {2, 2}, + {2, 11}, + {11, 2}, + {3, 3}, + {3, 11}, + {11, 3}, + {11, 11}, + {11, 100}, + {100, 11}, + } { + m := test.m + n := test.n + for _, extra := range []int{0, 11} { + for _, kind := range []lapack.MatrixType{lapack.General, lapack.UpperTri, lapack.LowerTri} { + a := randomGeneral(m, n, n+extra, rnd) + aCopy := cloneGeneral(a) + cfrom := rnd.NormFloat64() + cto := rnd.NormFloat64() + scale := cto / cfrom + + impl.Dlascl(kind, -1, -1, cfrom, cto, m, n, a.Data, a.Stride) + + prefix := fmt.Sprintf("Case #%v: kind=%v,m=%v,n=%v,extra=%v", ti, kind, m, n, extra) + if !generalOutsideAllNaN(a) { + t.Errorf("%v: out-of-range write to A", prefix) + } + switch kind { + case lapack.General: + for i := 0; i < m; i++ { + for j := 0; j < n; j++ { + want := scale * aCopy.Data[i*aCopy.Stride+j] + got := a.Data[i*a.Stride+j] + if math.Abs(want-got) > tol { + t.Errorf("%v: unexpected A[%v,%v]=%v, want %v", prefix, i, j, got, want) + } + } + } + case lapack.UpperTri: + for i := 0; i < m; i++ { + for j := i; j < n; j++ { + want := scale * aCopy.Data[i*aCopy.Stride+j] + got := a.Data[i*a.Stride+j] + if math.Abs(want-got) > tol { + t.Errorf("%v: unexpected A[%v,%v]=%v, want %v", prefix, i, j, got, want) + } + } + } + for i := 0; i < m; i++ { + for j := 0; j < min(i, n); j++ { + if a.Data[i*a.Stride+j] != aCopy.Data[i*aCopy.Stride+j] { + t.Errorf("%v: unexpected modification in lower triangle of A", prefix) + } + } + } + case lapack.LowerTri: + for i := 0; i < m; i++ { + for j := 0; j <= min(i, n-1); j++ { + want := scale * aCopy.Data[i*aCopy.Stride+j] + got := a.Data[i*a.Stride+j] + if math.Abs(want-got) > tol { + t.Errorf("%v: unexpected A[%v,%v]=%v, want %v", prefix, i, j, got, want) + } + } + } + for i := 0; i < m; i++ { + for j := i + 1; j < n; j++ { + if a.Data[i*a.Stride+j] != aCopy.Data[i*aCopy.Stride+j] { + t.Errorf("%v: unexpected modification in upper triangle of A", prefix) + } + } + } + } + } + } + } +} diff --git a/vendor/gonum.org/v1/gonum/lapack/testlapack/dlaset.go b/vendor/gonum.org/v1/gonum/lapack/testlapack/dlaset.go new file mode 100644 index 0000000..cb2e615 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/lapack/testlapack/dlaset.go @@ -0,0 +1,77 @@ +// Copyright ©2015 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package testlapack + +import ( + "fmt" + "testing" + + "golang.org/x/exp/rand" + + "gonum.org/v1/gonum/blas" +) + +type Dlaseter interface { + Dlaset(uplo blas.Uplo, m, n int, alpha, beta float64, a []float64, lda int) +} + +func DlasetTest(t *testing.T, impl Dlaseter) { + rnd := rand.New(rand.NewSource(1)) + for ti, test := range []struct { + m, n int + }{ + {0, 0}, + {1, 1}, + {1, 10}, + {10, 1}, + {2, 2}, + {2, 10}, + {10, 2}, + {11, 11}, + {11, 100}, + {100, 11}, + } { + m := test.m + n := test.n + for _, uplo := range []blas.Uplo{blas.Upper, blas.Lower, blas.All} { + for _, extra := range []int{0, 10} { + a := randomGeneral(m, n, n+extra, rnd) + alpha := 1.0 + beta := 2.0 + + impl.Dlaset(uplo, m, n, alpha, beta, a.Data, a.Stride) + + prefix := fmt.Sprintf("Case #%v: m=%v,n=%v,uplo=%v,extra=%v", + ti, m, n, uplo, extra) + if !generalOutsideAllNaN(a) { + t.Errorf("%v: out-of-range write to A", prefix) + } + for i := 0; i < min(m, n); i++ { + if a.Data[i*a.Stride+i] != beta { + t.Errorf("%v: unexpected diagonal of A", prefix) + } + } + if uplo == blas.Upper || uplo == blas.All { + for i := 0; i < m; i++ { + for j := i + 1; j < n; j++ { + if a.Data[i*a.Stride+j] != alpha { + t.Errorf("%v: unexpected upper triangle of A", prefix) + } + } + } + } + if uplo == blas.Lower || uplo == blas.All { + for i := 1; i < m; i++ { + for j := 0; j < min(i, n); j++ { + if a.Data[i*a.Stride+j] != alpha { + t.Errorf("%v: unexpected lower triangle of A", prefix) + } + } + } + } + } + } + } +} diff --git a/vendor/gonum.org/v1/gonum/lapack/testlapack/dlasq1.go b/vendor/gonum.org/v1/gonum/lapack/testlapack/dlasq1.go new file mode 100644 index 0000000..ad37bb7 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/lapack/testlapack/dlasq1.go @@ -0,0 +1,88 @@ +// Copyright ©2015 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package testlapack + +import ( + "math" + "testing" + + "golang.org/x/exp/rand" + + "gonum.org/v1/gonum/blas" + "gonum.org/v1/gonum/blas/blas64" +) + +type Dlasq1er interface { + Dlasq1(n int, d, e, work []float64) int + Dgetrfer +} + +func Dlasq1Test(t *testing.T, impl Dlasq1er) { + rnd := rand.New(rand.NewSource(1)) + bi := blas64.Implementation() + // TODO(btracey): Increase the size of this test when we have a more numerically + // stable way to test the singular values. + for _, n := range []int{1, 2, 5, 8} { + work := make([]float64, 4*n) + d := make([]float64, n) + e := make([]float64, n-1) + for cas := 0; cas < 1; cas++ { + for i := range work { + work[i] = rnd.Float64() + } + for i := range d { + d[i] = rnd.NormFloat64() + 10 + } + for i := range e { + e[i] = rnd.NormFloat64() + } + ldm := n + m := make([]float64, n*ldm) + // Set up the matrix + for i := 0; i < n; i++ { + m[i*ldm+i] = d[i] + if i != n-1 { + m[(i+1)*ldm+i] = e[i] + } + } + + ldmm := n + mm := make([]float64, n*ldmm) + bi.Dgemm(blas.Trans, blas.NoTrans, n, n, n, 1, m, ldm, m, ldm, 0, mm, ldmm) + + impl.Dlasq1(n, d, e, work) + + // Check that they are singular values. The + // singular values are the square roots of the + // eigenvalues of X^T * X + mmCopy := make([]float64, len(mm)) + copy(mmCopy, mm) + ipiv := make([]int, n) + for elem, sv := range d[0:n] { + copy(mm, mmCopy) + lambda := sv * sv + for i := 0; i < n; i++ { + mm[i*ldm+i] -= lambda + } + + // Compute LU. + ok := impl.Dgetrf(n, n, mm, ldmm, ipiv) + if !ok { + // Definitely singular. + continue + } + // Compute determinant + var logdet float64 + for i := 0; i < n; i++ { + v := mm[i*ldm+i] + logdet += math.Log(math.Abs(v)) + } + if math.Exp(logdet) > 2 { + t.Errorf("Incorrect singular value. n = %d, cas = %d, elem = %d, det = %v", n, cas, elem, math.Exp(logdet)) + } + } + } + } +} diff --git a/vendor/gonum.org/v1/gonum/lapack/testlapack/dlasq2.go b/vendor/gonum.org/v1/gonum/lapack/testlapack/dlasq2.go new file mode 100644 index 0000000..1a1c480 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/lapack/testlapack/dlasq2.go @@ -0,0 +1,715 @@ +// Copyright ©2015 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package testlapack + +import ( + "math" + "testing" + + "golang.org/x/exp/rand" + + "gonum.org/v1/gonum/floats" +) + +type Dlasq2er interface { + Dgetrfer + Dlasq2(n int, z []float64) (info int) +} + +func Dlasq2Test(t *testing.T, impl Dlasq2er) { + dTol := 1e-6 + // Answers from calling the netlib Dlasq2 function directly. + for c, test := range []struct { + n int + z []float64 + info int + zOut []float64 + }{ + { + z: []float64{0.9975595900926172e+00, 0.5668247076112733e+00, 0.9659153754961249e+00, 0.7479276854714322e+00, 0.3673908973747557e+00, 0.4806368987547315e+00, 0.7375426363398452e-01, 0.5355229277727247e-02, 0.3470812885180155e+00, 0.3422438160728350e+00, 0.2179517263384726e+00, 0.1331604100136593e+00, 0.9005245144218924e+00, 0.3867660104574036e+00, 0.4454822893878481e+00, 0.6619321808958428e+00, 0.1610830043055933e-01, 0.6508548361039168e+00, 0.6464088254838254e+00, 0.3229872909405558e+00, 0.8556924028853313e+00, 0.4012869193638139e+00, 0.2068743292187569e+00, 0.9685394642165999e+00, 0.5983995343181346e+00, 0.6729807327762632e+00, 0.4568823106729620e+00, 0.3300151235733747e+00, 0.1003829265021773e+00, 0.7554533047597268e+00, 0.6056932669802318e+00, 0.7190479134084530e+00, 0.8973346038865299e+00, 0.6582291204811077e+00, 0.1507168371301046e+00, 0.6123149049967000e+00, 0.9786602381321465e+00, 0.9991422694268032e+00, 0.2567979861831603e+00, 0.5508654031552206e+00, 0.6590475178936379e+00, 0.5540051353968156e+00, 0.9777600986094505e+00, 0.9019233044604550e+00, 0.6579246844202109e+00, 0.7288585083995637e+00, 0.4024552650632751e+00, 0.9286276631540373e+00, 0.1478351900438915e+00, 0.6745292990637881e+00, 0.7696143092758962e+00, 0.3393225521457851e+00, 0.1158188549489931e+00, 0.6143691843615144e+00, 0.8206171394658319e+00, 0.9470946559240657e+00, 0.7311286518535550e+00, 0.4976039064630389e+00, 0.3748017407872005e+00, 0.4215058560045335e+00, 0.5529030382557849e+00, 0.9979192791781042e+00, 0.9903947480415133e+00, 0.7463096553073095e+00, 0.9537590617479789e+00, 0.9327469033343772e-01, 0.7340236871158429e+00, 0.7517616159256260e+00, 0.9468485056759987e+00, 0.7061763632511533e+00, 0.8138096664824992e+00, 0.5585945195965548e+00, 0.6170557687507783e-01, 0.4803807801853603e+00, 0.5976897721963292e+00, 0.1375319188363359e+00, 0.5873952004066820e+00, 0.5199682629163997e+00, 0.8858783448129579e+00, 0.3038101719904888e+00, 0.6696573039722583e+00, 0.6649400915297697e+00, 0.5036768993979404e+00, 0.2615751204119269e+00}, + n: 21, + zOut: []float64{0.2550031738262204e+01, 0.2480977513501848e+01, 0.2366602269912148e+01, 0.2169993432366266e+01, 0.1810817848712033e+01, 0.1681677309759878e+01, 0.1303743375476047e+01, 0.1202161769544433e+01, 0.1183377192742860e+01, 0.1094206688544886e+01, 0.9154376259418607e+00, 0.8241395430971566e+00, 0.7318275341991307e+00, 0.5186198053161721e+00, 0.4143051093784424e+00, 0.2112065329503869e+00, 0.1093987140067686e+00, 0.9751785856405315e-01, 0.2818174710670554e-01, 0.1697846193036144e-01, 0.2117542506861687e-04, 0.5241368559131172e-22, 0.4369342452764876e-18, 0.4084491703488284e-18, 0.1303743375476047e+01, 0.0000000000000000e+00, 0.2325140442247288e-18, 0.2353545363971710e-18, 0.1202161769544433e+01, 0.0000000000000000e+00, 0.2163565864913247e-18, 0.2222159192042978e-18, 0.1183377192742860e+01, 0.2033579915108999e-22, 0.2652482201353177e-18, 0.2438256017573345e-18, 0.1094206688544886e+01, 0.3167883469916549e-16, 0.9806070694382835e-18, 0.1224486171222500e-17, 0.9154376259418607e+00, 0.2764930279233778e-14, 0.2171122324673871e+02, 0.2171122324673871e+02, 0.1020000000000000e+03, 0.3081632653061225e+01, 0.2941176470588236e+01, 0.2548071704976161e-16, 0.7318275341991307e+00, 0.4959852501050381e-23, 0.8676862906242004e-16, 0.1414000300319855e-15, 0.5186198053161721e+00, 0.1503295986001297e-14, 0.3452895805257589e-14, 0.7981257539768321e-14, 0.4143051093784424e+00, 0.1303656847202082e-15, 0.9855809897129084e-12, 0.2614986238618434e-11, 0.2112065329503869e+00, 0.8411884198867843e-17, 0.2293573303077261e-09, 0.9833566024906726e-10, 0.1093987140067686e+00, 0.2696165428113804e-17, 0.5825676764620370e-08, 0.2638661645752538e-08, 0.9751785856405315e-01, 0.6617412588681544e-21, 0.2809066689524106e-07, 0.6165545233233256e-07, 0.2818174710670554e-01, 0.1396210563637443e-18, 0.2854399814229969e-05, 0.1344352644992036e-05, 0.1697846193036144e-01, 0.6162975822039155e-31, 0.3724766654883956e-05, 0.4695180568393632e-05, 0.2117542506861687e-04, 0.0000000000000000e+00, 0.9895328911616120e-03, 0.1620493249248586e-02}, + info: 0, + }, + { + z: []float64{0.7655950151081736e-01, 0.1012496627944287e+00, 0.5492657397218519e+00, 0.3755849474908193e+00, 0.1514950257902281e-01, 0.7929154460744389e+00, 0.6208775449015512e+00, 0.7736035774285512e+00, 0.9535807609862423e+00, 0.1142443721338974e+00, 0.3184626428755636e+00, 0.5968198462235605e+00, 0.4815290260558158e-01, 0.1142057780273510e+00, 0.2159649191761220e+00, 0.1005733924928321e+00, 0.7334180249993905e-01, 0.2468617397276878e+00, 0.4433842667962828e+00, 0.2083675732544600e+00, 0.5669983427348256e+00, 0.2431239969651688e-01, 0.4202905770712168e+00, 0.3978530241169832e+00, 0.9765854254920880e+00, 0.6926050329928320e+00, 0.4943367486777372e-02, 0.1299210324451839e+00, 0.4677725963527957e-01, 0.8397777412847708e+00, 0.6784888222113696e+00, 0.5819508167748705e+00, 0.7335259194567719e+00, 0.1160427446694747e+00, 0.8402996546275487e+00, 0.8349959735427709e+00, 0.7465363962886192e+00, 0.8432008930683078e+00, 0.5288390014350658e+00, 0.6654846664813405e+00, 0.7307365692392301e+00, 0.4106042636492306e+00, 0.3557215961646439e+00, 0.7353770423280176e+00, 0.4713176648251016e+00, 0.4626254343486430e+00, 0.7596917071958065e+00, 0.7024594192654096e+00, 0.2579658084846748e+00, 0.9377050325544740e+00, 0.4561035840049426e+00, 0.8084892970067921e+00, 0.9088480529888660e+00, 0.6948766633235142e+00, 0.2194885062923195e+00, 0.8549545559228027e+00, 0.7443966896835119e+00, 0.3011130612705175e+00, 0.6719685452983997e+00, 0.6187140363783860e+00, 0.9675736114028010e+00, 0.9902393027740470e+00, 0.3380065169449795e+00, 0.9207672475882130e+00, 0.3390733399571427e+00, 0.9309435300662920e+00, 0.5465285509796513e+00, 0.4655125893217942e+00, 0.1769140958718970e+00, 0.3779940975005719e+00, 0.1752206228227682e+00, 0.3568582675012224e+00, 0.6656764250906758e+00, 0.6185492680730227e+00, 0.4689472437795331e+00, 0.3162518610221317e+00, 0.2688799086902824e+00, 0.1999212438248075e+00, 0.4154279763213168e+00, 0.9793029133347251e+00, 0.5432115888768534e+00, 0.1295182752745038e+00, 0.8047416637896615e+00, 0.8458210244967665e+00}, + n: 21, + zOut: []float64{0.2649710614371106e+01, 0.2321564474027070e+01, 0.2090779203479937e+01, 0.2001510674733695e+01, 0.1702794694134603e+01, 0.1077066053646038e+01, 0.1060140274732043e+01, 0.9894235909971354e+00, 0.9539342071687115e+00, 0.8046649468928653e+00, 0.7009142227469247e+00, 0.5502651835254770e+00, 0.4423863025187732e+00, 0.3697086796938907e+00, 0.1446051340026323e+00, 0.1110032523123295e+00, 0.7513603923341917e-01, 0.6860214953971246e-01, 0.2434478048112329e-01, 0.6989818532012803e-03, 0.7811996215926567e-04, 0.7156177943897596e-01, 0.2292199980814605e-03, 0.1899018778701386e-03, 0.1060140274732043e+01, 0.8660746506696473e-01, 0.1357005210961402e-39, 0.1331360138522907e-23, 0.9894235909971354e+00, 0.8804208964992894e-17, 0.8660746506696473e-01, 0.8679736700028205e-01, 0.9539342071687115e+00, 0.2761013168273541e-29, 0.1040577915698430e-22, 0.8107757314001177e-24, 0.8046649468928653e+00, 0.5098326619997980e-22, 0.1416795225784663e-10, 0.1216165317638265e-10, 0.7009142227469247e+00, 0.1183291357831518e-29, 0.1813933158005285e+02, 0.1813933158005285e+02, 0.1090000000000000e+03, 0.3231292517006803e+01, 0.5504587155963303e+01, 0.4068037876491279e-10, 0.4423863025187732e+00, 0.2477754901417239e-20, 0.1737857614720001e-07, 0.5295826057530262e-07, 0.3697086796938907e+00, 0.5143373102040997e-26, 0.1312431380925897e-05, 0.1614374370413396e-05, 0.1446051340026323e+00, 0.9928287808749566e-25, 0.3777515963415321e-05, 0.2773141909621761e-05, 0.1110032523123295e+00, 0.0000000000000000e+00, 0.2532463507333992e-05, 0.2934028940292093e-05, 0.7513603923341917e-01, 0.5394210206791908e-19, 0.6032617175984252e-07, 0.2575740214720034e-06, 0.6860214953971246e-01, 0.7542232825258426e-19, 0.3784397549471832e-09, 0.1131370986389306e-09, 0.2434478048112329e-01, 0.1013559914197709e-18, 0.1483665133446019e-05, 0.1887408451311279e-05, 0.6989818532012803e-03, 0.9466330862652142e-28, 0.2539111990097616e-03, 0.9240506410060376e-03, 0.7811996215926567e-04, 0.3786532345060857e-28, 0.1283811140869274e-03, 0.1684766835109566e-03}, + info: 0, + }, + { + z: []float64{0.7090328374865308e+00, 0.5289732604915972e+00, 0.6638408676820399e+00, 0.9007392577685345e+00, 0.9411642674278379e+00, 0.1144202631330921e+00, 0.8666675899972089e+00, 0.6323680737151134e+00, 0.2427877245566508e+00, 0.1603957810123527e+00, 0.2123565414665629e-01, 0.1902539934369850e+00, 0.3123401294447815e+00, 0.8096475663099788e+00, 0.5737184899962203e+00, 0.8262606712006769e+00, 0.2864548736211023e+00, 0.1605147759074373e+00, 0.3309241142010387e+00, 0.2658533720189140e+00, 0.5447811280109880e+00, 0.5535203163154195e+00, 0.2161483615344606e-01, 0.1487480590324270e+00, 0.1049033762317957e+00, 0.6142137665037214e+00, 0.4062909815669834e+00, 0.9015282863354186e+00, 0.9428381000165521e+00, 0.1566198551488233e+00, 0.9369593797444957e+00, 0.1354729715149707e+00, 0.3712821832808397e+00, 0.3215112250898227e+00, 0.7149642455474942e+00, 0.7297077774959745e+00, 0.3785927874981971e+00, 0.8289344072747380e+00, 0.9536643756705742e+00, 0.3084192540817629e+00, 0.5797406195506623e+00, 0.2334488720392539e+00, 0.8444034680728870e+00, 0.2581324672603945e+00, 0.8397436353763532e+00, 0.5376562757207516e+00, 0.6215703771401371e+00, 0.4908247191212207e+00, 0.2848423854658637e+00, 0.2355591822832676e+00, 0.6513799258897619e+00, 0.6071703403704332e+00, 0.3981458512505542e+00, 0.8487946917355450e+00, 0.6376169839751418e+00, 0.2588233361358196e+00, 0.3588309885453386e+00, 0.5618301121344036e+00, 0.1214685095104872e+00, 0.9784693864155333e+00, 0.3832063578469370e+00, 0.7206987579838013e+00, 0.3582640246022534e+00, 0.9098815720988161e+00, 0.5436872318254895e+00, 0.9596753969896468e+00, 0.9294728975416107e+00, 0.6333208483475120e+00, 0.2956089747729942e+00, 0.1747395639573214e+00, 0.8425785962238860e+00, 0.6185896542974498e+00, 0.7058928872472775e+00, 0.2424399357290958e+00, 0.7979610473510427e+00, 0.3910140128664733e+00, 0.1099592891010870e+00, 0.7536879342329168e+00, 0.2979717756246736e-01, 0.7006607890734408e+00, 0.5252033671714352e+00, 0.7525598235593517e+00, 0.6476778890126167e+00, 0.8710847630132640e+00}, + n: 21, + zOut: []float64{0.2486733497028020e+01, 0.2359071020584524e+01, 0.2276435093308226e+01, 0.2061969496713965e+01, 0.1674052496256098e+01, 0.1634406821090099e+01, 0.1324515540477209e+01, 0.1213036064691536e+01, 0.1142092570196908e+01, 0.8479728747156923e+00, 0.7723287409164926e+00, 0.6845846156476076e+00, 0.4690003902541462e+00, 0.3969394481970619e+00, 0.3209839311891622e+00, 0.1762493640751034e+00, 0.1552318325824889e+00, 0.1299582823757977e+00, 0.4568511390642456e-01, 0.2058742849236605e-01, 0.6687292092604351e-04, 0.0000000000000000e+00, 0.9075510178548721e-16, 0.1809089799756099e-15, 0.1324515540477209e+01, 0.1972152263052530e-28, 0.7207148989396829e-15, 0.1135837106477965e-14, 0.1213036064691536e+01, 0.6865185493883853e-20, 0.3862483738583121e-14, 0.5795548067006594e-14, 0.1142092570196908e+01, 0.1237761291638161e-15, 0.1761387669893985e-13, 0.1204467761940304e-13, 0.8479728747156923e+00, 0.5980780785048348e-20, 0.5961915529495600e-12, 0.8546407799162654e-12, 0.7723287409164926e+00, 0.9983823616477125e-26, 0.2019190149561986e+02, 0.2019190149561985e+02, 0.1120000000000000e+03, 0.3267573696145125e+01, 0.7142857142857143e+01, 0.2981196239393922e-10, 0.4690003902541462e+00, 0.1411215361455214e-23, 0.1291839253366726e-09, 0.2114581039065388e-09, 0.3969394481970619e+00, 0.3215018396446338e-24, 0.8992249144513522e-09, 0.1460279739604385e-08, 0.3209839311891622e+00, 0.5301710738582062e-20, 0.5956459688113839e-08, 0.9509260151006140e-08, 0.1762493640751034e+00, 0.3305918938014538e-18, 0.2145126200057182e-06, 0.1391189925457742e-06, 0.1552318325824889e+00, 0.0000000000000000e+00, 0.8222867095540534e-06, 0.8520250895881005e-06, 0.1299582823757977e+00, 0.1946507846527860e-19, 0.4299487364203470e-05, 0.2670451838837521e-05, 0.4568511390642456e-01, 0.1783600337601646e-17, 0.1688330183620908e-03, 0.2840434656464124e-03, 0.2058742849236605e-01, 0.2127174768075152e-17, 0.5266290662212281e-04, 0.3082204001196310e-04, 0.6687292092604351e-04, 0.4725269249209171e-22, 0.3479992598793783e-02, 0.7157534417412210e-03}, + info: 0, + }, + { + z: []float64{0.8307212987388154e+00, 0.9571804405037874e+00, 0.2222674360399970e+00, 0.5794927951917870e+00, 0.9907605078898842e+00, 0.8399673535531696e+00, 0.1748054629244838e+00, 0.7095417514667766e+00, 0.3015507353664195e+00, 0.2504155371033889e-01, 0.9167396887269817e+00, 0.6888065712047682e+00, 0.7567577995550097e+00, 0.9327280452266753e+00, 0.5500991579404824e-01, 0.4989499454693689e+00, 0.5949898652751251e-01, 0.1718660951302574e+00, 0.6883620036166904e+00, 0.3066205390931208e+00, 0.7351901912310475e+00, 0.7521684164853142e+00, 0.3957210266430871e+00, 0.8778502662051324e-01, 0.4896297811280748e-01, 0.7793618951247419e+00, 0.6807766665219679e+00, 0.3115699624095645e+00, 0.5227646665950632e+00, 0.5057964692468060e+00, 0.4385260593432981e+00, 0.5876392780963172e+00, 0.2973252261444674e+00, 0.2948995727695043e+00, 0.7378444260331429e-01, 0.9749457382350615e+00, 0.7281626398300706e+00, 0.7580903878987629e+00, 0.3091762186458183e+00, 0.2464983240671905e+00, 0.2568872936019900e+00, 0.6180627613788815e+00, 0.9377882352337240e+00, 0.9921824152100049e+00, 0.2248192100106184e+00, 0.9472840045361628e+00, 0.8477853668436538e+00, 0.7001263233958666e+00, 0.5544440083201581e+00, 0.4415839019184687e+00, 0.2491818172148126e+00, 0.8976627554942691e+00, 0.1457298176556392e+00, 0.4345936198183369e+00, 0.4573762249453559e+00, 0.3093805347522776e+00, 0.4809246108862507e+00, 0.6897426752489948e+00, 0.8107988692352123e+00, 0.3921644162683642e+00, 0.2931487419364392e+00, 0.5759780604710760e+00, 0.7253418085569345e+00, 0.1427733376597652e+00, 0.3335555091947459e+00, 0.1603897948564823e+00, 0.5345943097231743e+00, 0.7233253065718669e+00, 0.2281802915301432e+00, 0.1990064435881517e+00, 0.9726196083044836e+00, 0.9935713768557323e+00, 0.8195201715007894e-01, 0.2508806553004002e+00, 0.2158489797899930e+00, 0.1957771185505375e+00, 0.8502651710165565e+00, 0.4468235108377239e-01, 0.8639211642125266e+00, 0.6713373034638015e-01, 0.4273173462824215e+00, 0.1373188375801981e+00, 0.1190874985301729e+00, 0.3059495456429208e+00}, + n: 21, + zOut: []float64{0.2399198387954965e+01, 0.2388283868878418e+01, 0.2234728187505421e+01, 0.1947641030296732e+01, 0.1845273105794422e+01, 0.1837097911259683e+01, 0.1471118007292610e+01, 0.1105651020532553e+01, 0.9883044021137968e+00, 0.9478841363642468e+00, 0.8550293065516678e+00, 0.7566674361156733e+00, 0.5310790726859680e+00, 0.3571217714437427e+00, 0.2893653184719416e+00, 0.2534414197270934e+00, 0.2173582726294167e+00, 0.4908458037260943e-01, 0.1246498683649276e-01, 0.5862142085722914e-02, 0.4704104342516643e-04, 0.1939986690620616e-19, 0.1034652479755287e-16, 0.1536368559490732e-16, 0.1471118007292610e+01, 0.5364686198585281e-13, 0.6088846084221820e-07, 0.1529604999486430e-06, 0.1105651020532553e+01, 0.2903008131213323e-27, 0.1160127138141562e-06, 0.1335479603764263e-06, 0.9883044021137968e+00, 0.2125606020077514e-18, 0.5906967539735199e-07, 0.6675815740532400e-07, 0.9478841363642468e+00, 0.1331202777560457e-29, 0.4438231138534822e-07, 0.3948328125271971e-07, 0.8550293065516678e+00, 0.5860341995390921e-20, 0.2049270140595660e+02, 0.2049270140595660e+02, 0.9800000000000000e+02, 0.2893424036281179e+01, 0.4081632653061225e+01, 0.1377712740565197e-07, 0.5310790726859680e+00, 0.1537961403283359e-19, 0.8011211497963711e-08, 0.8400248580143560e-08, 0.3571217714437427e+00, 0.5389497704469953e-25, 0.6965765014245226e-08, 0.7336584331089061e-08, 0.2893653184719416e+00, 0.6081961202185573e-17, 0.9973730165755502e-08, 0.8562226724273748e-08, 0.2534414197270934e+00, 0.3644137891043716e-17, 0.1552441635408352e-06, 0.1605842922922618e-07, 0.2173582726294167e+00, 0.1710250442519154e-26, 0.2805940935843235e-04, 0.1153299285109565e-04, 0.4908458037260943e-01, 0.1140499340513312e-15, 0.3458275194991493e-03, 0.2689727098393103e-03, 0.1246498683649276e-01, 0.4437342591868191e-30, 0.1429141842628293e-03, 0.1683368406483595e-03, 0.5862142085722914e-02, 0.2515327803596613e-22, 0.2394538487567536e-03, 0.1603201011413965e-03, 0.4704104342516643e-04, 0.0000000000000000e+00, 0.2149753858259932e-02, 0.4092589741085703e-02}, + info: 0, + }, + { + z: []float64{0.6132370003914160e+00, 0.6636632940312478e+00, 0.8959100063178993e+00, 0.7536793777540497e+00, 0.6298705159481864e+00, 0.2209880035966978e+00, 0.4838373503885766e+00, 0.6150203746616348e+00, 0.9876587724998732e+00, 0.5591876171910803e+00, 0.3708840600033242e+00, 0.3375947236522073e+00, 0.7454250822233153e+00, 0.5697349010621601e+00, 0.4561367007431339e+00, 0.6481896738286699e+00, 0.6835960923418801e+00, 0.1703905177923075e+00, 0.2730661115049011e+00, 0.4407624886449348e+00, 0.4577220273293221e+00, 0.3952566732937318e+00, 0.6883342106097436e+00, 0.2988887099704991e+00, 0.2052274263883085e+00, 0.1922842703083140e+00, 0.4189999528960880e+00, 0.6906452535299822e+00, 0.1405554539793709e+00, 0.1437412601859918e+00, 0.6624635742176765e+00, 0.5756752017566100e+00, 0.7749505183416766e+00, 0.7752965769666669e+00, 0.7653541054136069e+00, 0.8765107194498384e+00, 0.9921378985956986e-01, 0.3345156485504346e+00, 0.6450936562634260e+00, 0.1316181443467312e+00, 0.8075436662342752e+00, 0.5418723210737144e+00, 0.2657525890099199e+00, 0.4588749915888913e+00, 0.5109250246010762e+00, 0.5048342930643721e+00, 0.9617739307304302e+00, 0.5502351700731637e+00, 0.1896288106400514e+00, 0.2011189112848212e+00, 0.2487376305081674e+00, 0.9700914054633822e+00, 0.7763920500140777e+00, 0.2727004477756484e+00, 0.4969507403482072e+00, 0.3987515903118115e+00, 0.6132963384905279e+00, 0.8526865910258985e-01, 0.7370021348269777e+00, 0.8628692345603857e+00, 0.3410351221174988e+00, 0.6944408188735030e+00, 0.6418832405583447e+00, 0.8549561267667016e-01, 0.7079842121947655e+00, 0.5924596383322266e+00, 0.2327865765542259e+00, 0.7519783901092147e-01, 0.3243359400926964e+00, 0.9130914950306788e+00, 0.2042866226175709e+00, 0.4527677275849958e+00, 0.7271733772634871e+00, 0.2835797318864963e-01, 0.5170298378180002e+00, 0.7676612860726464e+00, 0.8943646165240225e+00, 0.9444900692816881e+00, 0.2898061819682256e+00, 0.5314658042534958e+00, 0.2328255902743269e+00, 0.3127766212762262e+00, 0.1415213232426512e+00, 0.3425808715425687e+00}, + n: 21, + zOut: []float64{0.2568497247033635e+01, 0.2474290441512907e+01, 0.2254355189310800e+01, 0.1987099258783412e+01, 0.1595738223681455e+01, 0.1520583769371047e+01, 0.1349481184037943e+01, 0.1286943522977408e+01, 0.1233633359467258e+01, 0.1105388350935419e+01, 0.8741663391031235e+00, 0.7042784684853729e+00, 0.6809050564171195e+00, 0.5312535309083547e+00, 0.3765999563195902e+00, 0.2982885955048700e+00, 0.1761408438030470e+00, 0.1021950592815966e+00, 0.5149411253987173e-01, 0.2409819364774552e-01, 0.3292801347387554e-02, 0.7649046082865931e-16, 0.2073367755475779e-07, 0.1625592945762753e-07, 0.1349481184037943e+01, 0.1183291357831518e-29, 0.4660703060512034e-13, 0.1807862490656452e-12, 0.1286943522977408e+01, 0.1649665924998180e-25, 0.1294990598064480e-10, 0.5231829186377068e-10, 0.1233633359467258e+01, 0.8526291632353489e-16, 0.3101844497115867e-08, 0.1140775990076438e-07, 0.1105388350935419e+01, 0.9624103043696344e-28, 0.5663389137423196e-06, 0.4346359020206590e-06, 0.8741663391031235e+00, 0.3067428736041242e-19, 0.2119872350446936e+02, 0.2119872350446936e+02, 0.1030000000000000e+03, 0.3056689342403628e+01, 0.2912621359223301e+01, 0.2039882393707806e-06, 0.6809050564171195e+00, 0.0000000000000000e+00, 0.3469744030301619e-06, 0.2644131172050035e-06, 0.5312535309083547e+00, 0.3043881270837009e-21, 0.5541811479797519e-05, 0.3827690844980446e-05, 0.3765999563195902e+00, 0.1909043390634849e-27, 0.3412067971678679e-04, 0.3418356678006945e-04, 0.2982885955048700e+00, 0.1167175485134229e-15, 0.1390598737761422e-03, 0.1685020271483295e-03, 0.1761408438030470e+00, 0.3311298373357662e-16, 0.2687183242739271e-03, 0.2970561145148615e-03, 0.1021950592815966e+00, 0.3861046454850253e-20, 0.3877059717439437e-03, 0.4274183271611622e-03, 0.5149411253987173e-01, 0.3171094621243632e-24, 0.5614281929126997e-03, 0.6201503766753644e-03, 0.2409819364774552e-01, 0.5854827030937197e-31, 0.8580439238194122e-03, 0.9937175751003719e-03, 0.3292801347387554e-02, 0.1687712113428427e-19, 0.6404816195044538e-02, 0.3574263865391779e-02}, + info: 0, + }, + { + z: []float64{0.1535280608392028e+00, 0.8492164242958209e+00, 0.9720508422713467e+00, 0.4409240313091006e+00, 0.7744413453832578e+00, 0.4562143200311385e+00, 0.8645004927526174e+00, 0.7279750979364787e+00, 0.6373358699411871e+00, 0.7240388495630282e+00, 0.6042124182518795e+00, 0.2629417492305242e-01, 0.2927658696806527e+00, 0.1971917075100318e+00, 0.9486620805007744e+00, 0.5318461733579811e+00, 0.1035922758398233e-01, 0.4521355876782263e+00, 0.9635828765019094e+00, 0.9861179636072996e+00, 0.3409131837267612e+00, 0.9592913933135179e+00, 0.3267081303471464e+00, 0.7042329006712417e+00, 0.7904477014671945e+00, 0.7235636505410947e+00, 0.7289557043239185e+00, 0.4453819137782510e+00, 0.3817792285714174e+00, 0.9222246883547156e+00, 0.7484233302016036e+00, 0.4267300781414476e+00, 0.2174272124496083e+00, 0.7294135193889231e+00, 0.7969150359898325e+00, 0.5584233158827989e+00, 0.8854167010119880e+00, 0.3831257236275667e+00, 0.2407966333760054e+00, 0.3150832526072347e+00, 0.4329046703236793e+00, 0.4798261053116015e-01, 0.4240797212710790e+00, 0.7373271503520912e+00, 0.8727856342792233e+00, 0.3079871491200393e+00, 0.8433176604823556e+00, 0.8160920845908722e+00, 0.4068006968345205e+00, 0.8649881745683563e+00, 0.8522504948442566e+00, 0.5120820890311298e+00, 0.7764141319706099e+00, 0.4928600452500731e+00, 0.2048536231221674e+00, 0.4451308747598662e+00, 0.4335546033385644e+00, 0.5471256397417612e-01, 0.7004960984988220e+00, 0.9192372420579460e+00, 0.3890133989146303e+00, 0.2488782917619309e+00, 0.3663470187625480e+00, 0.9986636866778884e+00, 0.6620792031393874e+00, 0.1797345205931197e+00, 0.8723761011602340e+00, 0.2439575243714007e+00, 0.1163919437436586e+00, 0.8182195270597429e+00, 0.5356403226658167e+00, 0.9880537276300997e+00, 0.4163057033602857e+00, 0.8081980088045720e+00, 0.2399383760782623e+00, 0.9702581513812658e+00, 0.3763181880939181e+00, 0.6334004896983517e+00, 0.1368995930243956e+00, 0.7684724492197631e+00, 0.1011989548860721e+00, 0.5519462180485674e+00, 0.8316516728108886e+00, 0.3772586994144635e+00}, + n: 21, + zOut: []float64{0.2579297838383781e+01, 0.2566242251743589e+01, 0.2402206059499122e+01, 0.2314572748831938e+01, 0.2210246243389976e+01, 0.2073991263591532e+01, 0.1647852770173070e+01, 0.1458062583915517e+01, 0.1424747680105817e+01, 0.1229594627898745e+01, 0.8682818248784862e+00, 0.7800126440184885e+00, 0.7769870810337002e+00, 0.4018986220147585e+00, 0.3451236345722463e+00, 0.2059421010707241e+00, 0.1665395041671736e+00, 0.1384541468623413e+00, 0.4569625338134105e-01, 0.3572364569397507e-01, 0.7785678858960618e-04, 0.1325624140040768e-19, 0.1464254707155794e-03, 0.9424803187834284e-04, 0.1647852770173070e+01, 0.1564186274351218e-15, 0.3473328524848360e-05, 0.4592867365821283e-05, 0.1458062583915517e+01, 0.3231174267785264e-26, 0.1403657959312935e-04, 0.9634257574341414e-05, 0.1424747680105817e+01, 0.1020140793606197e-15, 0.1723203566561169e-05, 0.6067379842882497e-05, 0.1229594627898745e+01, 0.2663149152121406e-17, 0.1061013981937667e-04, 0.1674993264096194e-04, 0.8682818248784862e+00, 0.8099755562101574e-24, 0.2367155138201492e+02, 0.2367155138201491e+02, 0.1110000000000000e+03, 0.3174603174603174e+01, 0.5405405405405405e+01, 0.8398559432747079e-06, 0.7769870810337002e+00, 0.0000000000000000e+00, 0.6532651675838807e-05, 0.1393736055608104e-04, 0.4018986220147585e+00, 0.1488702756444268e-16, 0.4786924897797409e-03, 0.2484818164296786e-03, 0.3451236345722463e+00, 0.1393994753571567e-18, 0.7967956438667165e-06, 0.1566848631115136e-05, 0.2059421010707241e+00, 0.1872521839860791e-17, 0.1674828806776016e-04, 0.3829209207735075e-04, 0.1665395041671736e+00, 0.5127595883936577e-29, 0.1145979873394449e-04, 0.6763541499702777e-05, 0.1384541468623413e+00, 0.6162975822039155e-31, 0.1509781169748205e-03, 0.4934238518442497e-04, 0.4569625338134105e-01, 0.3815207438965640e-25, 0.3298388086972868e-02, 0.1367005987525658e-02, 0.3572364569397507e-01, 0.2773930797150972e-21, 0.3955985102398275e-03, 0.3433403686203228e-03, 0.7785678858960618e-04, 0.7790054198074833e-21, 0.1914938157793930e-01, 0.1330976133014081e-01}, + info: 0, + }, + { + z: []float64{0.8677677383100214e+00, 0.6246622741727026e+00, 0.5523136647126087e+00, 0.2116103484147348e+00, 0.8227197076777661e+00, 0.3636019443697256e+00, 0.2305222034118074e+00, 0.3291143648196290e+00, 0.6800926583943011e+00, 0.4290450477605455e+00, 0.4523048516285796e+00, 0.9276084588261152e+00, 0.4462311938877914e+00, 0.6672491542978151e+00, 0.7808537049355564e+00, 0.7908271145868994e+00, 0.1977402045683964e+00, 0.2563794060302639e+00, 0.6768044668199432e+00, 0.8985309013607360e+00, 0.3750140409512910e+00, 0.2003663066929584e-01, 0.4020022629723622e+00, 0.9909625073113403e+00, 0.9747495116121061e+00, 0.9702779584867087e+00, 0.9688371740876045e+00, 0.3747269639045615e+00, 0.9728403214040160e-01, 0.7507234588709335e+00, 0.8245099981459856e+00, 0.7244448753186800e+00, 0.3670523289486082e+00, 0.6220520718362722e+00, 0.3063226611493952e+00, 0.3012769332894331e+00, 0.6763821017483275e+00, 0.8300640433662996e+00, 0.5563237542291373e+00, 0.5594449054752113e+00, 0.3278038518373678e+00, 0.7307041435680680e+00, 0.1582529760475658e+00, 0.5328751876026443e+00, 0.7841944053171677e+00, 0.2157818394558657e+00, 0.3253134136288985e+00, 0.5014502641306768e+00, 0.2229178930043155e+00, 0.4664985859426845e+00, 0.1867987929192785e+00, 0.1951091673564507e+00, 0.2069185156156168e+00, 0.9058058542218750e+00, 0.7999188612304056e+00, 0.7349484002829904e+00, 0.4153480749961962e+00, 0.8109879258422723e+00, 0.9883741464101338e+00, 0.4911202582082937e+00, 0.2193166361224579e+00, 0.7618820222564749e+00, 0.9073087210515056e+00, 0.5896383620860655e+00, 0.8706198942119541e+00, 0.3860710050052940e-01, 0.9380336544251486e+00, 0.4690283060543987e-01, 0.8970820572284144e+00, 0.1974579201509297e-01, 0.9992442335759629e+00, 0.9801967887432729e+00, 0.6116617864086051e+00, 0.5875675950916066e+00, 0.9622537584002233e+00, 0.8934200351761762e+00, 0.2022182729782195e+00, 0.1273316669652236e-01, 0.2549904925850485e+00, 0.8904280763656908e+00, 0.2695748922419082e+00, 0.8890196843116763e+00, 0.5191439214393291e+00, 0.3672061831861048e+00}, + n: 21, + zOut: []float64{0.3071910136434907e+01, 0.2366684893081943e+01, 0.2247985022656176e+01, 0.2134160618983571e+01, 0.1929380862422316e+01, 0.1895668448583188e+01, 0.1735809456784568e+01, 0.1390623822795548e+01, 0.1372853012616850e+01, 0.1197489438156041e+01, 0.1106600205023489e+01, 0.8564547950868329e+00, 0.4949238197552136e+00, 0.3889452841496929e+00, 0.3405947975938995e+00, 0.2516597920662951e+00, 0.2113890461921598e+00, 0.1169778386549868e+00, 0.1072268106836703e+00, 0.7646784687889400e-02, 0.1286588928027629e-02, 0.1194767075191949e-22, 0.1162094363453686e-09, 0.8691558890693288e-10, 0.1735809456784568e+01, 0.8134816760001801e-14, 0.1019665656815186e-08, 0.1906916241197346e-08, 0.1390623822795548e+01, 0.0000000000000000e+00, 0.1594323424098234e-05, 0.3312168640703357e-05, 0.1372853012616850e+01, 0.0000000000000000e+00, 0.3830524268846776e-04, 0.1577534719508939e-04, 0.1197489438156041e+01, 0.1592850300841939e-16, 0.1256296423282912e-03, 0.1149042579564614e-03, 0.1106600205023489e+01, 0.6530365529382261e-18, 0.2322627147533726e+02, 0.2322627147533726e+02, 0.1020000000000000e+03, 0.2963718820861678e+01, 0.3921568627450980e+01, 0.6483516393063832e-03, 0.4949238197552136e+00, 0.1972152263052530e-28, 0.5430985229870038e-04, 0.4447663749968261e-04, 0.3889452841496929e+00, 0.1329049288162406e-17, 0.2738942590248176e-04, 0.1582812372822289e-04, 0.3405947975938995e+00, 0.1187508759187252e-18, 0.7465410105999781e-04, 0.1255487400488340e-03, 0.2516597920662951e+00, 0.4141519752410312e-29, 0.2152266118694742e-04, 0.6710408995051273e-05, 0.2113890461921598e+00, 0.3333294884283764e-18, 0.6132111109262542e-03, 0.4277764486586744e-03, 0.1169778386549868e+00, 0.2711709361697228e-30, 0.3832079371139971e-03, 0.3904887652242632e-03, 0.1072268106836703e+00, 0.2176254805972020e-18, 0.4306247975784355e-03, 0.4948147025217374e-03, 0.7646784687889400e-02, 0.1110488679882733e-19, 0.1569670832436511e-02, 0.2092370053382882e-02, 0.1286588928027629e-02, 0.5442848430598922e-18, 0.4460307636637516e-02, 0.7046212350403941e-02}, + info: 0, + }, + { + z: []float64{0.9838241499564321e+00, 0.8769514477589457e+00, 0.6814467524843054e+00, 0.3624618995089947e+00, 0.1981908649829056e+00, 0.1872278407669582e+00, 0.9876554444003832e+00, 0.1826583579385833e+00, 0.1183269767614670e-01, 0.9437175277915953e+00, 0.4919320768587329e+00, 0.9805291749478650e+00, 0.6132842150855770e+00, 0.3626870797541525e+00, 0.5323780753704027e+00, 0.1457937981484730e+00, 0.1328300737138376e+00, 0.3811835111034733e+00, 0.7003413504474413e+00, 0.5497308136521228e+00, 0.8255713255718768e+00, 0.5373393799690840e+00, 0.6623327475759663e+00, 0.5316913920782962e-01, 0.1714649183335913e+00, 0.6186592957613974e+00, 0.7587327126921758e+00, 0.9100586255702885e+00, 0.7140000863381626e+00, 0.7400119974078728e+00, 0.3567025022071233e+00, 0.8218612151637966e+00, 0.5520556331010515e+00, 0.6508027169553372e+00, 0.1875664464519897e+00, 0.8183092398415237e+00, 0.2239662369231676e-01, 0.6571790154721793e+00, 0.1196415656369646e+00, 0.3853512775546503e+00, 0.2426450009067070e+00, 0.9653447575661293e+00, 0.4852906340727067e+00, 0.9923496603563310e+00, 0.2812811891225394e+00, 0.9784300144088514e+00, 0.9528689986126391e+00, 0.2431080020200448e+00, 0.4166208849723857e+00, 0.7357806826601304e+00, 0.1952831632199464e+00, 0.6393407699957093e+00, 0.6296804002658308e+00, 0.1028694374663701e+00, 0.1387452591062114e+00, 0.4549213107961220e+00, 0.9843671628976580e+00, 0.4249152858158138e+00, 0.7048043599695387e+00, 0.3653998904622074e+00, 0.2003765309007774e+00, 0.4095912484595889e+00, 0.2392233396621621e+00, 0.7050044605645847e+00, 0.7908086424110450e-01, 0.2038177493661538e+00, 0.1735168081909322e-01, 0.3343100300667304e+00, 0.2848572906345944e+00, 0.8454138442968477e+00, 0.4944340369822147e+00, 0.7101083458004501e+00, 0.2546477176633619e+00, 0.8227235336957786e+00, 0.2816146807741101e+00, 0.6247995550265835e+00, 0.7458069042729408e+00, 0.8739345864578790e+00, 0.1278937298671606e+00, 0.2791833532060612e+00, 0.9234762152279486e+00, 0.9863012584972185e+00, 0.7892913846591531e+00, 0.3479266835120939e+00}, + n: 21, + zOut: []float64{0.2623495482601064e+01, 0.2337512212773574e+01, 0.2310508039367694e+01, 0.2188663096012427e+01, 0.1954141717332891e+01, 0.1371476701077329e+01, 0.1267869950064541e+01, 0.1203016320141301e+01, 0.1132845050819559e+01, 0.9979737678355206e+00, 0.8633053234635751e+00, 0.6603430282082061e+00, 0.6064350433288983e+00, 0.4855760962077864e+00, 0.4280373135604714e+00, 0.3659370670152315e+00, 0.1453682093766173e+00, 0.9617861239701422e-01, 0.7192949634365248e-01, 0.1892143303127712e-02, 0.3946528731286962e-05, 0.0000000000000000e+00, 0.9258243476772570e-07, 0.9000418438492766e-07, 0.1267869950064541e+01, 0.0000000000000000e+00, 0.2500234135491857e-07, 0.1856395032302831e-07, 0.1203016320141301e+01, 0.2658426139847422e-18, 0.4168972835699797e-07, 0.5531467635213317e-07, 0.1132845050819559e+01, 0.2518044009465470e-24, 0.1194059466661935e-08, 0.2159625808467239e-08, 0.9979737678355206e+00, 0.1047506307937635e-23, 0.4478743649191460e-07, 0.9991281120631680e-07, 0.8633053234635751e+00, 0.5895762127714383e-22, 0.2111250861775921e+02, 0.2111250861775921e+02, 0.9900000000000000e+02, 0.2927437641723356e+01, 0.4040404040404041e+01, 0.9244672049175682e-07, 0.6064350433288983e+00, 0.1817535525629211e-26, 0.9398974933478891e-07, 0.9547946669183887e-07, 0.4855760962077864e+00, 0.3185418565406740e-17, 0.9937648506460343e-07, 0.9792626502078532e-07, 0.4280373135604714e+00, 0.2958228394578794e-30, 0.9921525775570031e-07, 0.1006425128472401e-06, 0.3659370670152315e+00, 0.4461863922899316e-21, 0.1047488573169732e-06, 0.1032270990738791e-06, 0.1453682093766173e+00, 0.6504493884088473e-15, 0.1102576940374778e-06, 0.1180293023862048e-06, 0.9617861239701422e-01, 0.6467797587852522e-18, 0.1844536574504796e-06, 0.1488485108626942e-06, 0.7192949634365248e-01, 0.2114103686638959e-13, 0.8531410278849863e-06, 0.1408572284551695e-05, 0.1892143303127712e-02, 0.0000000000000000e+00, 0.6242841403373791e-04, 0.3522115697192072e-04, 0.3946528731286962e-05, 0.6203854594147708e-24, 0.1209929086462260e-02, 0.4906802871097585e-03}, + info: 0, + }, + { + z: []float64{0.7643247442799757e+00, 0.1930914554442843e+00, 0.6441117687067544e-01, 0.4522061669974708e-01, 0.1465182320932497e+00, 0.8069033698425149e+00, 0.6560188128523874e+00, 0.2446749124420051e+00, 0.3243649544497533e+00, 0.9836733438970116e+00, 0.6137527621157332e+00, 0.2925225554635034e+00, 0.4955619768673408e+00, 0.7361289066349539e+00, 0.3953262269487445e-01, 0.1565854250658760e-01, 0.4785236355428808e+00, 0.7766165270899886e+00, 0.9069259340739989e+00, 0.9151936268801151e+00, 0.3338292872636017e+00, 0.2993135005490550e+00, 0.8287022194584887e+00, 0.4593628480546942e+00, 0.1484428934315618e-01, 0.3390977660295040e+00, 0.5819746692101802e-01, 0.9277352020779983e-01, 0.6517310862845033e+00, 0.3951866136249272e+00, 0.6199300068296295e+00, 0.8252199884043853e+00, 0.3390764160478065e+00, 0.3084679704183562e+00, 0.3609211283483548e+00, 0.4116857210304998e+00, 0.4609476939442378e+00, 0.3824129844545763e+00, 0.6431169626236315e+00, 0.2416519334621993e+00, 0.7156780768158061e+00, 0.8568018406565006e+00, 0.1639408452444666e+00, 0.7313040585903831e+00, 0.5099183572592548e+00, 0.2801641590611897e+00, 0.9930562568266591e+00, 0.7612468327164370e+00, 0.3271886985411110e-01, 0.7066605591101006e+00, 0.6720867845388043e+00, 0.9004209067318458e-01, 0.4611918168927077e+00, 0.2638384667560661e+00, 0.3554909044606102e+00, 0.7231384727137510e+00, 0.2729268177075039e-01, 0.5848397641916390e+00, 0.6942450598380807e+00, 0.1106927451590289e+00, 0.5262572286481777e+00, 0.9936768911750095e+00, 0.7262536107933111e+00, 0.7604873714570974e+00, 0.2036182130165832e+00, 0.8632527217998969e+00, 0.1924957421132846e+00, 0.8815968660524154e-01, 0.2412813036310424e+00, 0.9105232147744077e+00, 0.8298587923387578e+00, 0.7754575363227978e-01, 0.8714764594177653e+00, 0.5571901679066804e+00, 0.3971870017409277e+00, 0.9993097753535422e+00, 0.7700188328643205e+00, 0.7856615172810383e+00, 0.7533953600915489e+00, 0.5602208064288483e+00, 0.6944234372397261e+00, 0.8756090822068926e+00, 0.1433700749181099e-01, 0.2857413018245216e+00}, + n: 21, + zOut: []float64{0.2546753248931182e+01, 0.2051980365170268e+01, 0.1958366389726797e+01, 0.1631238683423845e+01, 0.1586034790683645e+01, 0.1538534425510682e+01, 0.1255412586747036e+01, 0.1121690972560896e+01, 0.9718147643766369e+00, 0.9049110982817965e+00, 0.7946671649858114e+00, 0.5884376759528481e+00, 0.4050217422474118e+00, 0.3189813042957475e+00, 0.2502079076659038e+00, 0.1378021633219403e+00, 0.1230588999356998e+00, 0.5024634530670342e-01, 0.4118371112977081e-01, 0.5279820157992808e-02, 0.1421284452028254e-03, 0.2680065184671810e-21, 0.2301930285321580e-14, 0.5132436462785431e-14, 0.1255412586747036e+01, 0.2424007516335156e-18, 0.1913029885324846e-10, 0.9021761757831104e-11, 0.1121690972560896e+01, 0.1977581519346623e-22, 0.4602280167506019e-08, 0.2460881124989248e-08, 0.9718147643766369e+00, 0.1104484640438811e-18, 0.4278245612761057e-07, 0.2387897413722468e-07, 0.9049110982817965e+00, 0.2768434971275651e-16, 0.3015076539683485e-07, 0.2961312447326530e-07, 0.7946671649858114e+00, 0.1262042385269225e-19, 0.1828176618885781e+02, 0.1828176618885782e+02, 0.1040000000000000e+03, 0.3122448979591837e+01, 0.3846153846153846e+01, 0.1851926648424158e-06, 0.4050217422474118e+00, 0.9293221980374701e-17, 0.1284245841610550e-05, 0.2164676294343609e-05, 0.3189813042957475e+00, 0.1378649105642637e-16, 0.4610281885819925e-06, 0.1003327305702036e-05, 0.2502079076659038e+00, 0.2177821011994986e-13, 0.3677246803752887e-04, 0.3095541926811936e-04, 0.1378021633219403e+00, 0.9210656120777790e-20, 0.9401458780737957e-05, 0.1039122642078444e-04, 0.1230588999356998e+00, 0.4930380657631324e-30, 0.6528493871226699e-06, 0.1524653341983116e-05, 0.5024634530670342e-01, 0.6838275466788171e-23, 0.2104276631320477e-03, 0.3160258883556432e-03, 0.4118371112977081e-01, 0.6162975822039155e-32, 0.9318133168194381e-04, 0.1261364640367337e-03, 0.5279820157992808e-02, 0.7005084838362585e-27, 0.1903158606362315e-02, 0.7288242235414376e-03, 0.1421284452028254e-03, 0.7703719777548943e-33, 0.7890744155827079e-03, 0.6129022301104231e-03}, + info: 0, + }, + { + z: []float64{0.1813251202119042e+00, 0.8077156210855166e+00, 0.5525396662071069e+00, 0.4858391136124270e+00, 0.5367389425969793e+00, 0.4534294101162717e-01, 0.1438386197304258e+00, 0.7801305810912117e+00, 0.7706244548712898e+00, 0.1595286891781422e+00, 0.4689597797472277e+00, 0.1630160981693277e+00, 0.5488525733069047e+00, 0.3050379405729475e+00, 0.1544160351990825e+00, 0.5750210099163514e+00, 0.1440876969429373e+00, 0.1932969959898051e+00, 0.6910796187912660e+00, 0.9501882645686455e+00, 0.1314109921701710e+00, 0.7099941843070685e+00, 0.3797313713209320e+00, 0.3979808845174081e+00, 0.1006018923100482e-02, 0.4131896358268045e+00, 0.8038132966802857e+00, 0.8778643630407806e+00, 0.1407889867849830e+00, 0.4305134083308382e-01, 0.5656496703997997e+00, 0.5820188952601091e-01, 0.8713537916178680e+00, 0.3443162753417257e+00, 0.4898335360866699e+00, 0.2822165738696382e+00, 0.2385629061714932e+00, 0.5496478420286478e+00, 0.7138724344268773e+00, 0.9028268569137332e+00, 0.7573748459251011e+00, 0.5392259606733280e+00, 0.6459213128573325e+00, 0.7769497211063184e+00, 0.8790154346127051e+00, 0.6027389165195913e+00, 0.6151696559086139e-01, 0.4391208291942791e+00, 0.8705350585502258e-02, 0.2995750933531418e+00, 0.9735013653226658e+00, 0.9551710085419519e+00, 0.2132875677034638e+00, 0.6739668529657764e+00, 0.5361543284664445e+00, 0.3596977928604983e+00, 0.1373006735837845e-02, 0.8494363479416891e+00, 0.4214720423022895e+00, 0.7478449906096684e+00, 0.1164429527473354e-01, 0.6143683285709396e+00, 0.4444360064738268e+00, 0.6626608299302873e+00, 0.6887459689591686e+00, 0.9391262494647765e+00, 0.7167310461528731e+00, 0.4516657019045726e+00, 0.6345059624902227e+00, 0.2241941493026407e+00, 0.3981621310014529e+00, 0.8300976429090324e+00, 0.1390103241004258e+00, 0.7559319384422415e+00, 0.7929892786129927e+00, 0.2279061631835571e+00, 0.5280867615120838e+00, 0.1374499172030500e+00, 0.1739503966093568e+00, 0.1475298919309330e+00, 0.8100098524713260e+00, 0.2514850916075489e+00, 0.4100580488172028e+00, 0.9565966467338131e+00}, + n: 21, + zOut: []float64{0.2319179815575576e+01, 0.1965907124623829e+01, 0.1887317422176422e+01, 0.1806195743608332e+01, 0.1738584542532610e+01, 0.1518839817892385e+01, 0.1233018482483247e+01, 0.1147944957333420e+01, 0.8847632334488245e+00, 0.7964611162721277e+00, 0.7478364459856081e+00, 0.6404526251941426e+00, 0.4533452491188792e+00, 0.4218888103472739e+00, 0.2727838365998446e+00, 0.2511907405802137e+00, 0.9874721307137806e-01, 0.7639943902083833e-01, 0.4105860224136690e-01, 0.2835039177122555e-01, 0.1849635761425839e-05, 0.5687744349931984e-17, 0.4733652108368680e-16, 0.8474188088642906e-16, 0.1233018482483247e+01, 0.2772075531849652e-16, 0.2588283290162890e-14, 0.3835910828290052e-14, 0.1147944957333420e+01, 0.1246926305809672e-21, 0.2331804915191085e-13, 0.1628473983494628e-13, 0.8847632334488245e+00, 0.1029638495848151e-18, 0.6869501877456999e-13, 0.5177641249986829e-13, 0.7964611162721277e+00, 0.2366582715663035e-28, 0.1931319748910825e-12, 0.1483226506162133e-12, 0.7478364459856081e+00, 0.0000000000000000e+00, 0.1833026745951331e+02, 0.1833026745951330e+02, 0.1060000000000000e+03, 0.3281179138321995e+01, 0.4716981132075472e+01, 0.1032982741787823e-11, 0.4533452491188792e+00, 0.1306947070833229e-13, 0.7648022419924529e-11, 0.9563864191611805e-11, 0.4218888103472739e+00, 0.2958228394578794e-30, 0.1901963755802408e-10, 0.2450915092704452e-10, 0.2727838365998446e+00, 0.2459678163240430e-25, 0.5533379126041796e-09, 0.3623624255644135e-09, 0.2511907405802137e+00, 0.1890110728909544e-26, 0.3358805149198491e-08, 0.5375184613165106e-08, 0.9874721307137806e-01, 0.5174927538249837e-27, 0.5300164302540724e-07, 0.8261446474891677e-07, 0.7639943902083833e-01, 0.5686866711287438e-23, 0.3075058441398368e-06, 0.1980286028193141e-06, 0.4105860224136690e-01, 0.3391723239215845e-21, 0.3233943307567427e-08, 0.7908141105983331e-09, 0.2835039177122555e-01, 0.0000000000000000e+00, 0.6263230510933575e-04, 0.3843833760247782e-04, 0.1849635761425839e-05, 0.1263270231490984e-18, 0.3749684628997549e-02, 0.9345197933294263e-02}, + info: 0, + }, + { + z: []float64{0.7197398584131909e+00, 0.8353840049537183e+00, 0.7768334206137515e+00, 0.5378835466874095e+00, 0.6112887154160760e+00, 0.6941229566444685e+00, 0.6006831303610110e+00, 0.9634869426512738e-01, 0.5491965547787625e+00, 0.2928965321959978e+00, 0.2058564636772363e+00, 0.8430652032339261e+00, 0.6514967955084195e+00, 0.9630832188959448e+00, 0.6099558369768557e+00, 0.4038812997556026e+00, 0.1286146012481256e+00, 0.1377932216741019e+00, 0.2151249629422850e+00, 0.5230096632076910e+00, 0.7097266420339532e+00, 0.5223889086485353e+00, 0.1844557902493219e+00, 0.8630886970819189e+00, 0.3143018526841452e+00, 0.3064747008129198e-02, 0.9995777893655772e+00, 0.8804003743833377e+00, 0.2437734590599224e+00, 0.5439224193002447e+00, 0.7609946860078259e+00, 0.6816222186177212e+00, 0.4213100187092527e+00, 0.5947299218072758e+00, 0.4464510683518146e+00, 0.9444991162538406e+00, 0.1663546170185037e+00, 0.9745722490509305e+00, 0.8691156687582791e-01, 0.1511122923287322e+00, 0.9099013034006386e+00, 0.2055280668016559e+00, 0.2215435147609097e+00, 0.1341458340087506e+00, 0.6974464577249216e+00, 0.9770585050570383e+00, 0.2836484389736937e-02, 0.4942205711890229e+00, 0.2476589071081227e+00, 0.9025674475802441e+00, 0.1043173067377164e+00, 0.7752297952628227e+00, 0.1045812556997563e+00, 0.6207135868564088e+00, 0.3343332589884191e+00, 0.9777410164371825e+00, 0.6919265640807399e+00, 0.8276791372331573e+00, 0.9681277631171376e+00, 0.2774385945770376e+00, 0.3660516291700039e+00, 0.1542243233614283e+00, 0.9809065840498956e+00, 0.9317468521553756e+00, 0.2342755587307467e+00, 0.1233924367977165e+00, 0.3662228319745224e+00, 0.8394462754756703e+00, 0.1257927855780998e+00, 0.9899742295380913e+00, 0.4800112259347978e+00, 0.9614338190585195e+00, 0.9605383026678687e+00, 0.7532278237974727e+00, 0.6455994058946259e+00, 0.6648840697262024e+00, 0.8792931963163917e+00, 0.7296338860815629e+00, 0.6962759488463338e+00, 0.5343477930729980e+00, 0.7488075021642351e+00, 0.8267598422017227e+00, 0.4128864062622362e-02, 0.2248794313377519e+00}, + n: 21, + zOut: []float64{0.2440396187223568e+01, 0.2429593845031884e+01, 0.2248977224883395e+01, 0.1983141106497864e+01, 0.1775047097539426e+01, 0.1685850120713433e+01, 0.1685238074648001e+01, 0.1228212776246503e+01, 0.1167582065653490e+01, 0.1163468828895948e+01, 0.9840288601088477e+00, 0.8446765744020336e+00, 0.5858419229944851e+00, 0.5828733061329966e+00, 0.4321746091262585e+00, 0.2506332278777725e+00, 0.1601952029039821e+00, 0.9519704710728188e-01, 0.5380488026634178e-01, 0.2070966044404248e-02, 0.4144953892415127e-03, 0.1577721810442024e-28, 0.2622007349295965e-13, 0.1959395597612919e-13, 0.1685238074648001e+01, 0.5298116057209151e-24, 0.4690429322435832e-13, 0.6275308499802102e-13, 0.1228212776246503e+01, 0.3786532345060857e-26, 0.6968636880972768e-12, 0.1483661956038748e-11, 0.1167582065653490e+01, 0.4135903062764930e-23, 0.1852086238235612e-10, 0.7750996975985970e-11, 0.1163468828895948e+01, 0.1171277614412583e-17, 0.1085363071939462e-09, 0.2643024434742409e-09, 0.9840288601088477e+00, 0.6878867093527223e-27, 0.2179941841968715e+02, 0.2179941841968716e+02, 0.1010000000000000e+03, 0.3140589569160998e+01, 0.4950495049504950e+01, 0.1077573597463830e-06, 0.5858419229944851e+00, 0.3627601229823987e-15, 0.8463992810044351e-06, 0.7647582718993818e-06, 0.5828733061329966e+00, 0.5798443197736525e-25, 0.5805124866712180e-06, 0.5336279095847515e-06, 0.4321746091262585e+00, 0.1164890830118439e-21, 0.3270430863981586e-06, 0.3120015969970153e-06, 0.2506332278777725e+00, 0.1344740577327136e-22, 0.6612009870063018e-06, 0.4716796838663449e-06, 0.1601952029039821e+00, 0.0000000000000000e+00, 0.1909143391170965e-04, 0.1029106625513428e-04, 0.9519704710728188e-01, 0.5127595883936577e-28, 0.3722911957200607e-04, 0.5334790194682025e-04, 0.5380488026634178e-01, 0.4916199468524512e-16, 0.8145434425645190e-04, 0.4879722252791062e-03, 0.2070966044404248e-02, 0.2051115390772406e-31, 0.1886522673573371e-02, 0.1146190938097804e-02, 0.4144953892415127e-03, 0.1134788402846185e-20, 0.5837570815253673e-02, 0.1435831510150291e-01}, + info: 0, + }, + { + z: []float64{0.4141124863190200e+00, 0.6286592513493726e+00, 0.4875442526190354e-01, 0.6631999174821102e+00, 0.1778328741285358e+00, 0.5820997881750413e+00, 0.9610329112936564e+00, 0.2860180457593013e+00, 0.2800413372286658e+00, 0.8782990052007664e+00, 0.6769848820880126e+00, 0.2361981145928655e+00, 0.1864129841856804e+00, 0.6207802268756868e-01, 0.1668832170172669e+00, 0.2136536864861034e+00, 0.4266698341219798e+00, 0.9672331991204097e-01, 0.2350195655513042e+00, 0.1297162551474343e+00, 0.9428942697229301e+00, 0.3355426683568981e+00, 0.6977568272344947e-01, 0.4926170514192555e+00, 0.6966571762613959e+00, 0.7422949877167778e+00, 0.1216493993328296e+00, 0.5414775352707472e+00, 0.4507250901570525e+00, 0.6851601412475350e+00, 0.8339584749808495e+00, 0.9321374665508209e+00, 0.7664819871766777e+00, 0.5935455015911359e+00, 0.4047285985374618e+00, 0.9996987445838635e+00, 0.1347158425141065e+00, 0.3601395592420920e+00, 0.1128763847009464e+00, 0.6316809196633902e+00, 0.1559195263655234e+00, 0.4041853050912981e+00, 0.1854167847278504e+00, 0.6852913644854558e+00, 0.6126092157077416e+00, 0.2444516268314137e+00, 0.1447238269517732e+00, 0.7179956614587130e+00, 0.8198652148251639e+00, 0.1186997025611126e+00, 0.1267596520196094e+00, 0.2863431547875360e+00, 0.9867539057590858e+00, 0.2454746550565152e+00, 0.4891318294681372e+00, 0.2969454310391295e-01, 0.2349636207332649e+00, 0.4537731552146703e+00, 0.2883801124165087e+00, 0.7150664117598651e+00, 0.4359050919721921e+00, 0.6116081525473241e+00, 0.5619840590438748e+00, 0.3431726287099235e+00, 0.7715898362788249e+00, 0.4981359837313185e+00, 0.4537925078699018e+00, 0.6956466175473529e+00, 0.1814680828887681e+00, 0.4413903687143481e+00, 0.4367593306147978e+00, 0.5875066776157845e-01, 0.3688522862246468e+00, 0.4308468783440871e+00, 0.1986553761412286e+00, 0.3949957841484223e+00, 0.2468986938324821e+00, 0.6216444107315144e-01, 0.9076510210089642e+00, 0.5197394332656491e+00, 0.8460559187475114e-01, 0.3959477165848089e+00, 0.1014864219123556e+00, 0.4789226780612066e-01}, + n: 21, + zOut: []float64{0.2746085040517442e+01, 0.2006646947029628e+01, 0.1843368381126182e+01, 0.1738309804669461e+01, 0.1678478075610122e+01, 0.1408432532104903e+01, 0.1219097982435216e+01, 0.1121569511965291e+01, 0.9315294884679637e+00, 0.7914377647612258e+00, 0.7334412101899261e+00, 0.6113376828240070e+00, 0.4134386595159317e+00, 0.3303980566325673e+00, 0.2866331058299493e+00, 0.2287206720574312e+00, 0.1408981800627584e+00, 0.6649497607659796e-01, 0.5733628497886722e-01, 0.1387598234001070e-02, 0.2497701489633820e-04, 0.3487740233155725e-22, 0.3361270559917294e-05, 0.1214091727263821e-05, 0.1219097982435216e+01, 0.2843433355651033e-23, 0.3877657623657789e-06, 0.2861458561047538e-06, 0.1121569511965291e+01, 0.1501849799666580e-22, 0.7980776610337536e-07, 0.1296132789777516e-06, 0.9315294884679637e+00, 0.1397279453391396e-14, 0.3797866386428551e-06, 0.5462183171930727e-06, 0.7914377647612258e+00, 0.1231982124821006e-21, 0.6463973892160826e-06, 0.4555968092429713e-06, 0.7334412101899261e+00, 0.9423416829408119e-25, 0.1835506693210437e+02, 0.1835506693210436e+02, 0.9800000000000000e+02, 0.2884353741496599e+01, 0.2040816326530612e+01, 0.6355562713716383e-07, 0.4134386595159317e+00, 0.4284860196510039e-21, 0.4931868922851139e-05, 0.2760082849901006e-05, 0.3303980566325673e+00, 0.1448288531514775e-16, 0.4631299886341806e-05, 0.9149811670123540e-05, 0.2866331058299493e+00, 0.1558000287811498e-28, 0.1337090019087612e-05, 0.8008074996380075e-06, 0.2287206720574312e+00, 0.2366582715663035e-29, 0.4303808308110317e-06, 0.3181248709867330e-06, 0.1408981800627584e+00, 0.2443697032165075e-19, 0.2973006759321362e-05, 0.5017304713868616e-05, 0.6649497607659796e-01, 0.1131941655598205e-16, 0.8328041136745409e-04, 0.8552508571764750e-04, 0.5733628497886722e-01, 0.9398173280441046e-25, 0.5097763200650848e-04, 0.5644224660768206e-04, 0.1387598234001070e-02, 0.8888394955180400e-22, 0.1868721561136477e-03, 0.1091351925463373e-03, 0.2497701489633820e-04, 0.0000000000000000e+00, 0.1020791029288490e-01, 0.2791888375404797e-02}, + info: 0, + }, + { + z: []float64{0.2396228092919251e+00, 0.6434601049058037e+00, 0.5488667726527494e+00, 0.1843504645657519e+00, 0.4269001369847240e+00, 0.6276093013166625e+00, 0.8724336393628729e-01, 0.5371039952903035e+00, 0.8447460457667564e+00, 0.4094766929502263e+00, 0.2031514124499976e+00, 0.8271428312997948e+00, 0.9658284302973903e+00, 0.3777353969048113e+00, 0.6471334022587341e+00, 0.6777435175679861e+00, 0.4456890557292859e+00, 0.1720826572683752e+00, 0.3520207432901141e+00, 0.3291396508341311e+00, 0.7468262262373744e+00, 0.2715104496960554e+00, 0.5612917863396235e+00, 0.1324429469286776e+00, 0.7983766064014711e+00, 0.2059599586019641e+00, 0.2655510256425775e-01, 0.1962390264484870e+00, 0.7845246896371560e-02, 0.8217737559711210e+00, 0.9444498218704492e+00, 0.4679957695831033e+00, 0.1135918053039552e+00, 0.1604948252571132e+00, 0.8452381101613837e+00, 0.8537070814716049e+00, 0.3013932596816266e+00, 0.2511715406702125e+00, 0.2008647638036438e+00, 0.2607897573444329e+00, 0.2571050910078490e+00, 0.6286174024463315e+00, 0.4739701849723611e+00, 0.8698947534844018e+00, 0.6486079589595961e+00, 0.8894321904301987e+00, 0.7952475840185558e+00, 0.8801661721215410e+00, 0.2147272194381030e+00, 0.9518356646476019e+00, 0.7191152289414877e+00, 0.1106524971173308e+00, 0.9574587230734148e+00, 0.6469474230679395e+00, 0.5818260757443584e+00, 0.1292202837075540e-01, 0.2761722913438078e+00, 0.8004535925585585e+00, 0.3278973855590038e+00, 0.5794037438664289e+00, 0.2396155443104175e-01, 0.6699048475753794e+00, 0.1814492937128086e+00, 0.2866184936222242e+00, 0.6584557212506269e+00, 0.5889651227775431e+00, 0.6369766519060738e-01, 0.1328193529598596e+00, 0.7416631417262871e+00, 0.5003318278706693e+00, 0.3144077296395376e-01, 0.9907393156403193e+00, 0.3423696594914594e-02, 0.6992922072850826e+00, 0.7590519617064543e+00, 0.2542182514116359e+00, 0.3314657184555089e+00, 0.5728659684019645e+00, 0.9915854796414423e+00, 0.4896749730105238e+00, 0.5611369720796141e+00, 0.6064142215043516e+00, 0.8567590164766914e-01, 0.2581731932678185e+00}, + n: 21, + zOut: []float64{0.2319327880258489e+01, 0.2079713994229665e+01, 0.1953873370330741e+01, 0.1755147049127663e+01, 0.1569664109844580e+01, 0.1497177879850755e+01, 0.1442939549198070e+01, 0.1142654640257259e+01, 0.1037259380018118e+01, 0.7336163063097807e+00, 0.6300572204242147e+00, 0.3966542874116493e+00, 0.3925821156107781e+00, 0.2721798695257625e+00, 0.2365431978248397e+00, 0.2044065204974935e+00, 0.1570073942488483e+00, 0.8918832674569614e-01, 0.5938710196407761e-01, 0.3241742658191688e-02, 0.4778146591607052e-04, 0.1325286320771300e-27, 0.1485620696270319e-14, 0.4653485276256090e-14, 0.1442939549198070e+01, 0.1846131417899111e-22, 0.1162592678154001e-11, 0.3263011919854044e-11, 0.1142654640257259e+01, 0.4412642697382792e-11, 0.1735169830505648e-08, 0.3333560963091213e-08, 0.1037259380018118e+01, 0.1055565179678919e-18, 0.1940309609380032e-07, 0.3465529731850831e-07, 0.7336163063097807e+00, 0.4440642298320086e-11, 0.3810071749579748e-06, 0.5916980508399715e-06, 0.6300572204242147e+00, 0.4658681208910640e-20, 0.1797266971780259e+02, 0.1797266971780259e+02, 0.1110000000000000e+03, 0.3253968253968254e+01, 0.3603603603603604e+01, 0.2492194730770069e-13, 0.3925821156107781e+00, 0.0000000000000000e+00, 0.7667233942003248e-12, 0.2416678934089916e-12, 0.2721798695257625e+00, 0.2051915459060795e-16, 0.1487877135020801e-08, 0.6143545671036994e-08, 0.2365431978248397e+00, 0.1972152263052530e-29, 0.1960096418178738e-05, 0.2274829676605902e-05, 0.2044065204974935e+00, 0.2021903259098794e-21, 0.8745843979356665e-06, 0.9251106153042376e-06, 0.1570073942488483e+00, 0.5482898835648120e-25, 0.7118492322907369e-04, 0.3316595178521346e-03, 0.8918832674569614e-01, 0.1022725463125710e-19, 0.2167108979032774e-02, 0.1228336641518804e-02, 0.5938710196407761e-01, 0.1994240368398718e-26, 0.2711464061500906e-02, 0.2561834534417173e-02, 0.3241742658191688e-02, 0.1814380082008327e-27, 0.4237092909216320e-02, 0.4420407214578293e-02, 0.4778146591607052e-04, 0.2596519942803954e-20, 0.1820843269063370e-02, 0.8929397931439835e-02}, + info: 0, + }, + { + z: []float64{0.4704013808869884e+00, 0.8133248937317098e+00, 0.7246209022332254e+00, 0.9930301346620946e+00, 0.6565279241713462e-01, 0.7480498761899035e-01, 0.8311943018468732e+00, 0.4127487814312358e+00, 0.6620288516904083e+00, 0.2540390589975015e+00, 0.4830377447593585e+00, 0.6124023244542695e+00, 0.6830062885714996e+00, 0.4006497306883100e+00, 0.2108017907398356e+00, 0.9099126499549155e+00, 0.4756780627224959e+00, 0.8563186613679700e+00, 0.9168447467108382e+00, 0.5153963449488133e+00, 0.7188966629304918e+00, 0.2875525393671987e+00, 0.6200986280097742e-01, 0.3765186242141257e+00, 0.2589923271177115e+00, 0.5404093081826918e+00, 0.5343437371907505e+00, 0.3104227497255196e-01, 0.4221838478889377e+00, 0.4233113354732956e+00, 0.6486058235731677e+00, 0.1590032665029245e+00, 0.3877113621801469e+00, 0.6038849620024899e+00, 0.5400341407425849e+00, 0.7060013907006117e+00, 0.1173345165878200e+00, 0.8548311704210352e+00, 0.5804532700087333e+00, 0.5231399784962363e+00, 0.4754841524148748e+00, 0.3395282932287647e-01, 0.7668594432348432e+00, 0.8184682560752043e+00, 0.5633264054575460e-01, 0.2127176392631261e+00, 0.7432372685121402e+00, 0.3098453278874418e+00, 0.1150335122169536e+00, 0.8681635561145716e+00, 0.3593935949259885e+00, 0.9085822737263088e+00, 0.2122810259380176e+00, 0.3589456483065394e+00, 0.9437026007300053e-01, 0.6518875210051764e+00, 0.9046367728621729e+00, 0.7638617707854546e+00, 0.8924521343092110e+00, 0.8353112965912926e+00, 0.5076189975461921e+00, 0.4955137299922637e+00, 0.5041148053921253e+00, 0.7686114107944846e+00, 0.3833696141187980e+00, 0.9181102072361824e-01, 0.9260679284685853e+00, 0.7081687340916837e+00, 0.8312629527805896e+00, 0.8010881636433624e+00, 0.5891143654107377e+00, 0.8149927954305255e+00, 0.7192672792238343e+00, 0.9981622901025261e+00, 0.9368513325443091e+00, 0.6909941993236977e+00, 0.7208744872845105e+00, 0.6334330703247726e+00, 0.8856108044732016e+00, 0.7612967011342276e+00, 0.6840498257884192e+00, 0.8627400929877541e+00, 0.2970500386847904e+00, 0.9828991929119790e+00}, + n: 21, + zOut: []float64{0.2548749189884266e+01, 0.2318283655447516e+01, 0.1949991272828373e+01, 0.1903113092077109e+01, 0.1681173723989745e+01, 0.1621144658162279e+01, 0.1435953736019784e+01, 0.1373215273744876e+01, 0.1276544145324249e+01, 0.8508650942792525e+00, 0.7433578075251207e+00, 0.7333566617981635e+00, 0.6327824956484905e+00, 0.5146506379994326e+00, 0.4497218333883862e+00, 0.2736876649003249e+00, 0.2262999758546705e+00, 0.6320618886076086e-01, 0.1413547856395308e-01, 0.6182735080657475e-02, 0.1223662826415605e-02, 0.9844984097158227e-26, 0.2664250906052687e-13, 0.8143635875649387e-14, 0.1435953736019784e+01, 0.7237830359838992e-24, 0.1147364015889613e-09, 0.4819137169184198e-10, 0.1373215273744876e+01, 0.5490471900338242e-27, 0.1654936529088367e-07, 0.7460880307437913e-08, 0.1276544145324249e+01, 0.2958228394578794e-30, 0.1455672577071588e-05, 0.7169454013791148e-06, 0.8508650942792525e+00, 0.4041314959719700e-19, 0.1917383253612269e-05, 0.1657209462004936e-05, 0.7433578075251207e+00, 0.2357464745776125e-22, 0.2061763898420382e+02, 0.2061763898420383e+02, 0.9600000000000000e+02, 0.2888888888888889e+01, 0.3125000000000000e+01, 0.6718966036888534e-06, 0.6327824956484905e+00, 0.1006441314168352e-20, 0.4252254757420258e-06, 0.3750418335727261e-06, 0.5146506379994326e+00, 0.1125208173090593e-20, 0.1634173128014842e-06, 0.1777130114194832e-06, 0.4497218333883862e+00, 0.1397861524051633e-26, 0.1373153840978587e-06, 0.1424351942740530e-06, 0.2736876649003249e+00, 0.3055476705078142e-15, 0.2530372171390620e-06, 0.3073674645773779e-06, 0.2262999758546705e+00, 0.3987473575125276e-15, 0.5814400581048744e-06, 0.7370753629310487e-06, 0.6320618886076086e-01, 0.9597492304117215e-21, 0.2453687283764395e-05, 0.3193610655869065e-05, 0.1413547856395308e-01, 0.8528167491936360e-20, 0.7231103044220720e-05, 0.9769896858142881e-05, 0.6182735080657475e-02, 0.0000000000000000e+00, 0.3039337033448658e-04, 0.5367078032707280e-04, 0.1223662826415605e-02, 0.6617444900424221e-23, 0.7353916438463039e-03, 0.2144171525794732e-02}, + info: 0, + }, + { + z: []float64{0.9714680453643365e-01, 0.6357804703219754e+00, 0.6664990519488578e-01, 0.1729636879767130e+00, 0.4638342338705743e+00, 0.4390808892462035e+00, 0.8041041328438946e+00, 0.4288731182982508e+00, 0.7624625505229141e+00, 0.7083386603826051e+00, 0.2464792096944004e+00, 0.9011775040224685e+00, 0.6339539510596026e-01, 0.1795036824346197e-01, 0.9473751738931280e+00, 0.4886196718733559e+00, 0.3845982510172906e+00, 0.8175566437299375e-01, 0.4796454941504319e+00, 0.1995303497008669e+00, 0.3542217290845850e+00, 0.4313075601907782e+00, 0.9062634832132850e+00, 0.9362490171108290e+00, 0.7454458344912314e+00, 0.9678028365928690e+00, 0.8785588729275070e+00, 0.2582842558253936e+00, 0.7937433110799214e+00, 0.7912406502001545e-01, 0.1813501620770241e-01, 0.3340784700607168e+00, 0.3348119644971841e+00, 0.5042866134297835e+00, 0.3808798817625770e+00, 0.8932448815066745e+00, 0.2612469692343123e+00, 0.4943911382967600e+00, 0.7396437014435053e+00, 0.6180267747680381e+00, 0.6768529193184074e+00, 0.7346572400466492e+00, 0.8658232798365260e+00, 0.6590888814314796e+00, 0.9020629495832085e+00, 0.2719526227756762e+00, 0.9337960905099231e+00, 0.1402012445044727e+00, 0.3363383773836534e+00, 0.4741712787560632e+00, 0.1270352327896518e-02, 0.2317652069389444e+00, 0.9583986396877475e+00, 0.8857657675309066e+00, 0.5564654172742324e+00, 0.3373121694331064e+00, 0.1866413892025005e+00, 0.3829716065530064e+00, 0.6660176769447556e+00, 0.9140619394089379e+00, 0.4126532961354976e+00, 0.9883322088463480e+00, 0.1678362296131076e+00, 0.1246866166129172e+00, 0.5083959276848560e+00, 0.4743484764552197e+00, 0.1561141239638767e+00, 0.4867628897662705e+00, 0.8068909470080686e+00, 0.9878597697456749e+00, 0.8535449450676436e-01, 0.9609706407601909e+00, 0.7015171470525342e+00, 0.4808513401395343e+00, 0.4421870922261545e+00, 0.2476919441028158e-01, 0.2417065613871903e+00, 0.1174043678151601e+00, 0.4259626380136439e-02, 0.2614829171642307e+00, 0.5973781466265196e+00, 0.3195524674053074e-01, 0.4778949570977242e+00, 0.2741401812748340e+00}, + n: 21, + zOut: []float64{0.2956562879292357e+01, 0.2160920112738185e+01, 0.2057048821491083e+01, 0.1826665062315388e+01, 0.1626476426276149e+01, 0.1617659828407750e+01, 0.1458559932420392e+01, 0.1037845007643388e+01, 0.9436387491729998e+00, 0.8889042931978449e+00, 0.8147110102488896e+00, 0.7938854029224469e+00, 0.5240642869032099e+00, 0.4281570616174346e+00, 0.4032231719469492e+00, 0.2134769254146812e+00, 0.1570954054223686e+00, 0.5771888278451791e-01, 0.2771650645038780e-01, 0.1168397530048623e-02, 0.8626671344136174e-03, 0.6837796509826355e-18, 0.5416843762303492e-12, 0.3045298824248355e-12, 0.1458559932420392e+01, 0.8473754517266856e-24, 0.4783112378014045e-11, 0.8163747631331082e-11, 0.1037845007643388e+01, 0.8077935669463161e-27, 0.2442833758035109e-10, 0.3576901446935589e-10, 0.9436387491729998e+00, 0.1292469707114106e-25, 0.1060154010453575e-09, 0.7148292157772074e-10, 0.8889042931978449e+00, 0.5995342879679690e-28, 0.2265830666026058e-09, 0.3413067902462323e-09, 0.8147110102488896e+00, 0.6874131484520357e-19, 0.1999636083133089e+02, 0.1999636083133089e+02, 0.1000000000000000e+03, 0.3002267573696145e+01, 0.2000000000000000e+01, 0.2441378744031031e-06, 0.5240642869032099e+00, 0.2169367489357782e-29, 0.9564347597274963e-06, 0.8629398155870994e-06, 0.4281570616174346e+00, 0.1533177238774536e-16, 0.2198723423439569e-05, 0.1852641299824983e-05, 0.4032231719469492e+00, 0.1742814620686677e-24, 0.6898774546312495e-05, 0.1242988026413374e-04, 0.2134769254146812e+00, 0.1303198215425112e-25, 0.3054946518379762e-04, 0.2458010895625246e-04, 0.1570954054223686e+00, 0.2480178686014861e-25, 0.5573684427056079e-04, 0.2879079609909993e-04, 0.5771888278451791e-01, 0.0000000000000000e+00, 0.4895501114079361e-04, 0.6615663653270374e-04, 0.2771650645038780e-01, 0.6559433690090112e-18, 0.2090848897194263e-03, 0.3843856621792604e-03, 0.1168397530048623e-02, 0.0000000000000000e+00, 0.2396113765895983e-02, 0.4353699886144585e-02, 0.8626671344136174e-03, 0.1420984816841217e-20, 0.3572874909786609e-04, 0.2591037812233495e-05}, + info: 0, + }, + { + z: []float64{0.7761482897976135e-01, 0.7690133527529974e-01, 0.8589697242168557e+00, 0.5718901484414876e+00, 0.8450349347421171e-01, 0.5688250545090190e+00, 0.6202247495663940e+00, 0.7388100008042665e+00, 0.9890328265375476e+00, 0.2969659976080158e+00, 0.5442392738534707e-01, 0.9562602329262000e+00, 0.8530135563879812e+00, 0.5471012261519459e-01, 0.9710076793297340e+00, 0.4869447961696205e+00, 0.4659851368281548e+00, 0.7647605444900396e-01, 0.7872766307526106e+00, 0.3237029299700311e+00, 0.7194522935712468e+00, 0.7270865543961295e+00, 0.5183429612108851e+00, 0.3145688411100903e+00, 0.9096514649777033e+00, 0.7816253592835525e+00, 0.4809049584349555e+00, 0.2052154618933422e-01, 0.1481963756328628e+00, 0.8742358520807045e+00, 0.4565878430880338e+00, 0.1977913919695908e+00, 0.6526813957349178e+00, 0.8032154703264597e+00, 0.2356108621359847e+00, 0.4857415898450358e+00, 0.2259727396620309e+00, 0.1176413330041139e+00, 0.8335507369731385e+00, 0.4539477133958770e+00, 0.8910876835374724e+00, 0.1559757847548392e+00, 0.1885199219441583e+00, 0.1520953639100613e+00, 0.6035948595957944e+00, 0.9286538294716984e+00, 0.9676298024186124e+00, 0.1209629819848071e+00, 0.1660874781174386e+00, 0.8622065054293394e+00, 0.7485077140876945e+00, 0.9420716698171637e+00, 0.2904751962009996e+00, 0.2180437045655230e+00, 0.6692025851955981e+00, 0.9258369042460884e+00, 0.6243742234586596e+00, 0.6760203577391158e+00, 0.2199572443863818e-02, 0.5981650670220221e+00, 0.8252506688545779e+00, 0.9433813762695346e+00, 0.5320093960532474e+00, 0.3846966829839934e+00, 0.6727780061271659e+00, 0.4937802596181958e+00, 0.1177802539787526e+00, 0.2963448754612064e-01, 0.2479547713122239e+00, 0.2694003173993550e+00, 0.8186090816580105e+00, 0.2886041049393229e-01, 0.7384561600560133e+00, 0.8345282295575730e+00, 0.1740871089016751e+00, 0.7946006936544379e+00, 0.5640836203667244e+00, 0.4951346622832815e+00, 0.4981407974176422e+00, 0.2513094341381288e+00, 0.4316077257561814e+00, 0.1685190107506288e+00, 0.3264570829830372e+00, 0.1676643753771256e-01}, + n: 21, + zOut: []float64{0.2309990065237698e+01, 0.2261037149894265e+01, 0.1946075797700671e+01, 0.1868880050926239e+01, 0.1838164096767709e+01, 0.1792848619787804e+01, 0.1601503570675395e+01, 0.1526383850720226e+01, 0.1395433670210245e+01, 0.1010109387319946e+01, 0.8644978203099237e+00, 0.7632668975571170e+00, 0.4552388959212355e+00, 0.4364144823342648e+00, 0.2696980874891977e+00, 0.1302983706884531e+00, 0.1278582769210706e+00, 0.7956979307866878e-01, 0.7389266586143965e-01, 0.8102308371415692e-02, 0.2690335013870282e-02, 0.5752609692135112e-13, 0.2842400234743069e-06, 0.1045980487121886e-05, 0.1601503570675395e+01, 0.1668093198756260e-18, 0.5190756964721459e-05, 0.2367180586416983e-05, 0.1526383850720226e+01, 0.1262177448353619e-27, 0.1974011051185169e-03, 0.4728862593194215e-03, 0.1395433670210245e+01, 0.2337388813815525e-19, 0.1632969150852556e-06, 0.4026698297028720e-06, 0.1010109387319946e+01, 0.3802651360818291e-21, 0.1380334146666910e-03, 0.1568641113059460e-03, 0.8644978203099237e+00, 0.2788260408793746e-19, 0.2076195419278686e+02, 0.2076195419278686e+02, 0.1070000000000000e+03, 0.3183673469387755e+01, 0.2803738317757009e+01, 0.1273578837393323e-04, 0.4552388959212355e+00, 0.1016579123433529e-20, 0.9426551662020599e-04, 0.5179870206653002e-04, 0.4364144823342648e+00, 0.7336406418555410e-28, 0.1112142557098828e-05, 0.1460057881679474e-05, 0.2696980874891977e+00, 0.8909623382378835e-18, 0.3232451044433717e-03, 0.3229988347563871e-03, 0.1302983706884531e+00, 0.0000000000000000e+00, 0.2867336104451064e-03, 0.2890982620847502e-03, 0.1278582769210706e+00, 0.6875938841518755e-23, 0.2127348925826359e-06, 0.9655734242129450e-07, 0.7956979307866878e-01, 0.3291208348307372e-17, 0.5717743824937758e-07, 0.2183606505996598e-06, 0.7389266586143965e-01, 0.7801771243763389e-24, 0.1012732311499954e-04, 0.2863554462185051e-05, 0.8102308371415692e-02, 0.5719241562852336e-29, 0.6292939995324734e-04, 0.3382281194073554e-04, 0.2690335013870282e-02, 0.4951522129891247e-25, 0.3008856418189283e-03, 0.2691027365699742e-03}, + info: 0, + }, + { + z: []float64{0.1834451937168287e+00, 0.4501020603239488e+00, 0.9673873583575473e+00, 0.2207688184907047e+00, 0.4205773270263516e+00, 0.6767264871143511e+00, 0.2493506785870784e+00, 0.7809638015955807e+00, 0.8297195492174936e+00, 0.1617611869044215e+00, 0.9206032300762146e+00, 0.1914715918877316e+00, 0.7976268296797403e+00, 0.9839522457816396e+00, 0.8841932579084628e+00, 0.6979540605797908e+00, 0.8049859576106227e+00, 0.8117504351099887e+00, 0.3842107296635322e+00, 0.6368881208030270e+00, 0.2369530469547876e+00, 0.8005533377467571e+00, 0.7105152962140180e+00, 0.5356376472112734e-02, 0.2192323464228777e+00, 0.8992715082939398e+00, 0.5988055508193698e+00, 0.4081645962203215e+00, 0.1002944447483383e+00, 0.1052431248059748e+00, 0.5730054733733171e+00, 0.2386396328544268e+00, 0.6503058109411604e+00, 0.9019164849722401e+00, 0.4702244241412763e+00, 0.5759947448793571e+00, 0.7588959717342861e+00, 0.4126392985506339e+00, 0.9987799941641187e+00, 0.5598172962702971e+00, 0.2151524348024290e+00, 0.7511708041336773e-01, 0.1539602741757972e+00, 0.4584191954304165e+00, 0.8937659116858414e+00, 0.6186897030318780e+00, 0.6386630452844105e+00, 0.9151803046984296e+00, 0.3614526997644965e+00, 0.9143967867261915e+00, 0.1523575506386343e+00, 0.9745493424336691e+00, 0.4149640117372425e+00, 0.7418302113477876e+00, 0.3041602774831942e-01, 0.4852660298208696e+00, 0.9115870107661288e+00, 0.9024516441581734e+00, 0.9201550825657324e+00, 0.5200815862126855e+00, 0.7353784634707106e+00, 0.8264932398040087e+00, 0.7035838297463828e+00, 0.6738634069153341e+00, 0.7657166064092835e-01, 0.4678396966074577e+00, 0.9927685693816569e+00, 0.5845157970225999e+00, 0.5638346985777254e+00, 0.5850492500516753e+00, 0.3856054034589584e+00, 0.8134648529141429e+00, 0.4174766661784889e+00, 0.3336151222017159e+00, 0.5355530597844248e+00, 0.9274106499154580e+00, 0.7442750408730826e+00, 0.2948331405701974e+00, 0.5696937454611060e+00, 0.3104181547964658e+00, 0.4694844160987418e+00, 0.1538578615150018e+00, 0.5969883537795497e+00, 0.3873507919637014e+00}, + n: 21, + zOut: []float64{0.2914952592069403e+01, 0.2353566179964282e+01, 0.2062119351173516e+01, 0.1965819938313762e+01, 0.1942714243081098e+01, 0.1812271492844174e+01, 0.1712144141310849e+01, 0.1666892005264074e+01, 0.1208030687686805e+01, 0.1159871012571561e+01, 0.9581193432281622e+00, 0.7738358894690665e+00, 0.7454032361827709e+00, 0.3901865930581417e+00, 0.3240765847275939e+00, 0.2173673669709887e+00, 0.1778074780703053e+00, 0.5660863840842725e-01, 0.3038731391621440e-01, 0.1466323002394464e-01, 0.7362797481955845e-02, 0.1496427221292385e-15, 0.2798385737764086e-17, 0.6348906733176861e-18, 0.1712144141310849e+01, 0.2625127144183791e-23, 0.5842516095868784e-15, 0.1699577922475928e-15, 0.1666892005264074e+01, 0.2934562567422164e-27, 0.7233314809955462e-13, 0.2286859678494404e-12, 0.1208030687686805e+01, 0.8467561517631538e-12, 0.2143114506868008e-08, 0.4305471816838367e-08, 0.1159871012571561e+01, 0.4930380657631324e-31, 0.3270528389239616e-07, 0.6410715811146478e-07, 0.9581193432281622e+00, 0.8911174733768286e-23, 0.2249420011581710e+02, 0.2249420011581710e+02, 0.1050000000000000e+03, 0.3197278911564626e+01, 0.2857142857142857e+01, 0.6896683202589802e-07, 0.7454032361827709e+00, 0.3255610023185390e-23, 0.5315428740895598e-04, 0.2875762635390457e-03, 0.3901865930581417e+00, 0.4610239445276015e-22, 0.9146669702187219e-04, 0.4183987855699404e-04, 0.3240765847275939e+00, 0.4688433862556419e-23, 0.8204008598914952e-05, 0.1523341313649621e-04, 0.2173673669709887e+00, 0.4570683409889788e-20, 0.9316307389873694e-05, 0.2741587710667867e-05, 0.1778074780703053e+00, 0.3706159570762342e-17, 0.9605798472890332e-04, 0.3099752510959849e-03, 0.5660863840842725e-01, 0.5638682600030866e-16, 0.1627758109301002e-02, 0.5977814903797443e-03, 0.3038731391621440e-01, 0.7161878634670464e-18, 0.4935537640644100e-05, 0.4281293465082162e-05, 0.1466323002394464e-01, 0.9343665470806783e-19, 0.1384059308707374e-05, 0.7139695062761456e-05, 0.7362797481955845e-02, 0.1252221392641006e-22, 0.1398979151771859e-02, 0.1830558814226920e-02}, + info: 0, + }, + { + z: []float64{0.1901253435444810e+00, 0.1005117877448146e+00, 0.7431497892893435e-02, 0.4180244733822649e+00, 0.5723993917742813e+00, 0.5092276894610764e+00, 0.6914570054828403e+00, 0.6343440208263138e+00, 0.9714240100252226e+00, 0.9708780190932921e+00, 0.7346948492781401e+00, 0.9261658900466149e+00, 0.6500321763704298e-01, 0.2101988980308164e+00, 0.2569765082490436e+00, 0.6872855071573251e+00, 0.7393915007490531e+00, 0.5293076476804583e+00, 0.5736721160165409e+00, 0.6249080544922647e+00, 0.1805969142240157e+00, 0.6235867578362418e+00, 0.3156395994838945e+00, 0.6768110107418109e+00, 0.5036038244498190e+00, 0.2096885591435443e+00, 0.2471056170621776e+00, 0.4420201221816504e+00, 0.5954916898507195e+00, 0.6849385183400085e+00, 0.6865376402638398e+00, 0.5015659601935997e+00, 0.9646916309223035e+00, 0.1976274617660123e+00, 0.6173831761654865e+00, 0.9161785290588952e+00, 0.8303766381148641e+00, 0.4163407208236638e+00, 0.2406183750735502e-01, 0.9033217043220854e+00, 0.3435327703530300e-01, 0.4851293431546745e+00, 0.1308952665690980e+00, 0.1639949900839996e+00, 0.6530528840204755e+00, 0.4007776795656349e+00, 0.6520732821596675e-01, 0.2482448577499247e+00, 0.6417548063561368e+00, 0.6287927677318972e+00, 0.2491821746122613e+00, 0.1309766085785375e+00, 0.1799023200797401e+00, 0.6166520678731939e+00, 0.4068347418426869e+00, 0.8022499378251485e+00, 0.2090643045880468e+00, 0.9552039287477454e-01, 0.3405285833145572e+00, 0.9010017729234689e+00, 0.8087026571264999e-01, 0.6784547103017031e+00, 0.5001579210546183e+00, 0.9044563563261202e+00, 0.6248337185292430e-01, 0.7231216956646513e+00, 0.3330924448715603e+00, 0.5906941805835393e+00, 0.6144619726434308e-01, 0.4494963607638514e-01, 0.5817524116985039e+00, 0.4323943825883398e+00, 0.9871518362913079e-01, 0.9577530955877981e+00, 0.7720928198042669e+00, 0.1473259710759589e+00, 0.7227644962870149e+00, 0.9964485210410867e+00, 0.3235582385765219e+00, 0.5984954533580267e-01, 0.1531719633524309e-01, 0.8969070444410814e+00, 0.8320043732823273e+00, 0.7541623800831910e+00}, + n: 21, + zOut: []float64{0.2943190889471759e+01, 0.2404733889291933e+01, 0.2164120218254414e+01, 0.2101040443717650e+01, 0.1828309865967689e+01, 0.1647179310188885e+01, 0.1393092850251358e+01, 0.1217421086377993e+01, 0.1157068329170861e+01, 0.9693609911235493e+00, 0.7556225164779018e+00, 0.6051010122910800e+00, 0.5244111548905432e+00, 0.4204021043595824e+00, 0.3294230503008973e+00, 0.2905705444026201e+00, 0.1318042053167392e+00, 0.6748792703931331e-01, 0.3115549452672914e-01, 0.3850968595106850e-02, 0.1766035472385010e-05, 0.2996366283941816e-12, 0.8018633237482923e-17, 0.2249911398990393e-16, 0.1393092850251358e+01, 0.8588781736260544e-19, 0.1768526861021460e-14, 0.8068659675231548e-15, 0.1217421086377993e+01, 0.2465190328815662e-30, 0.6841595655394777e-14, 0.1344694857995563e-13, 0.1157068329170861e+01, 0.2714422906896024e-19, 0.9771202940807984e-13, 0.1895423136689650e-12, 0.9693609911235493e+00, 0.1093011646009194e-15, 0.5148224565067754e-11, 0.9904113235317579e-11, 0.7556225164779018e+00, 0.6752649348691861e-27, 0.2098534861805207e+02, 0.2098534861805208e+02, 0.9600000000000000e+02, 0.2832199546485261e+01, 0.2083333333333333e+01, 0.3437245789676428e-09, 0.5244111548905432e+00, 0.2665011951551765e-23, 0.6566411319039389e-08, 0.3635282930812000e-08, 0.4204021043595824e+00, 0.1249353725478346e-22, 0.6690013285699780e-07, 0.1185995569029548e-06, 0.3294230503008973e+00, 0.3250106929510569e-26, 0.6264577969246473e-06, 0.1086745069147475e-05, 0.2905705444026201e+00, 0.1121066009627684e-24, 0.5553814969713904e-05, 0.9554866377781883e-05, 0.1318042053167392e+00, 0.1875595688253478e-25, 0.3215548179944659e-03, 0.4036498498040293e-03, 0.6748792703931331e-01, 0.4043956767801041e-20, 0.1191830515693705e-02, 0.1851908589351717e-02, 0.3115549452672914e-01, 0.1124126789939942e-28, 0.7898349055483677e-03, 0.3617649115749287e-02, 0.3850968595106850e-02, 0.4455486392688275e-26, 0.3971779475237804e-01, 0.1814591379891354e-01, 0.1766035472385010e-05, 0.2780981219403279e-20, 0.1014029004767683e-01, 0.8718057102094371e-02}, + info: 0, + }, + { + z: []float64{0.3437994630115073e+00, 0.2969584004763312e+00, 0.5428789456218400e+00, 0.3643219910857877e+00, 0.8842011736515154e-01, 0.4426793535323088e+00, 0.9851327949196382e+00, 0.9928836193844874e+00, 0.9294888334528270e+00, 0.5131906939013953e+00, 0.8372743749579292e-01, 0.3799113781856175e+00, 0.9980979619320958e+00, 0.1940068951405882e+00, 0.1265916571930514e+00, 0.7533120158726621e+00, 0.7604310151358937e+00, 0.9521608240404056e-01, 0.6048411980078113e+00, 0.8887307551694945e-01, 0.9109406880842844e+00, 0.9457532858494480e+00, 0.7192031611135805e+00, 0.5051759890546635e+00, 0.1927673350758418e+00, 0.6784576615085791e+00, 0.4582307655806892e+00, 0.4110913566635671e+00, 0.5843861752552199e+00, 0.1849014446101248e+00, 0.5286425989283670e+00, 0.9425316584065953e+00, 0.9757360470900053e+00, 0.4697986765545260e+00, 0.5423082602059015e-01, 0.4516134263086808e+00, 0.2022962345269688e+00, 0.4899666677522230e-01, 0.9657583521001409e+00, 0.7147044884479304e+00, 0.8698989147298346e+00, 0.1620630683580001e+00, 0.4917083507892124e+00, 0.6587971895356357e+00, 0.2830046270176211e+00, 0.8907428232631098e-01, 0.3385480253038764e-01, 0.3808208973559474e+00, 0.6301790073720519e+00, 0.7283330529106518e+00, 0.8754045015579843e+00, 0.5691786300850922e+00, 0.2221459247333947e+00, 0.3807253530058278e+00, 0.5099681617674874e+00, 0.8815981352292609e+00, 0.2670679959789202e-02, 0.3061419069919761e+00, 0.9088634553510815e+00, 0.3137439308670030e+00, 0.3256630871489109e+00, 0.7397175549546020e-01, 0.8600700672393357e+00, 0.2944165267259768e-01, 0.1962740610392520e+00, 0.7223088666910011e+00, 0.1555262415199788e+00, 0.7683934132272936e+00, 0.9837306979871404e+00, 0.4307236017454147e+00, 0.2763099157748476e+00, 0.5623054633156477e-01, 0.1414438705425911e+00, 0.3910643421231946e+00, 0.3414855836828583e+00, 0.4833767934540261e+00, 0.1874948602631823e+00, 0.6456680649630069e+00, 0.5377739930676261e+00, 0.4911199998049517e+00, 0.3925147326457343e+00, 0.1205990368593943e+00, 0.6208170182205421e+00, 0.4308681300066786e+00}, + n: 21, + zOut: []float64{0.2838978926644850e+01, 0.2478331275466363e+01, 0.2456585262660691e+01, 0.2180185738821122e+01, 0.1712066680562540e+01, 0.1602701074250422e+01, 0.1503969746560551e+01, 0.1233768461187729e+01, 0.9787455830966086e+00, 0.9668289373592873e+00, 0.7729605529095109e+00, 0.7035556930615693e+00, 0.5295938697567051e+00, 0.4165425267136870e+00, 0.3781386605204551e+00, 0.3701077812928417e+00, 0.1575477292510020e+00, 0.7328369731218511e-01, 0.3974472017556452e-01, 0.4256064253578745e-02, 0.1985700463370783e-02, 0.0000000000000000e+00, 0.2182718854229073e-04, 0.3091215918753839e-04, 0.1503969746560551e+01, 0.7889046064910921e-15, 0.8525655623819580e-10, 0.5987167574554257e-09, 0.1233768461187729e+01, 0.1850899338648654e-19, 0.6965867087478826e-06, 0.6532556049979120e-06, 0.9787455830966086e+00, 0.3101927297073854e-24, 0.3608646891594891e-06, 0.2907657476508735e-06, 0.9668289373592873e+00, 0.5936688180990121e-20, 0.1421522625242826e-07, 0.7063390122609291e-08, 0.7729605529095109e+00, 0.1231338578304599e-17, 0.2139987868232064e+02, 0.2139987868232063e+02, 0.9600000000000000e+02, 0.2854875283446712e+01, 0.2083333333333333e+01, 0.2575545978160743e-06, 0.5295938697567051e+00, 0.8537368260663878e-24, 0.6647760383095535e-05, 0.6726768703723720e-05, 0.4165425267136870e+00, 0.5898327163094257e-18, 0.6007815560144262e-05, 0.6070984870789135e-05, 0.3781386605204551e+00, 0.1932242212135588e-23, 0.5780348596252855e-05, 0.5727260717649887e-05, 0.3701077812928417e+00, 0.2291245128765917e-16, 0.5589406660782675e-05, 0.5617175435749363e-05, 0.1575477292510020e+00, 0.3855952104720306e-26, 0.8404567502440466e-05, 0.1060626986994499e-04, 0.7328369731218511e-01, 0.1176980470589750e-26, 0.3402042835087641e-05, 0.5518338211116594e-05, 0.3974472017556452e-01, 0.3697785493223493e-30, 0.6423930978115149e-05, 0.5690659965930955e-05, 0.4256064253578745e-02, 0.1813010266067178e-20, 0.3330330670135602e-05, 0.3509207052336586e-05, 0.1985700463370783e-02, 0.9911627066431288e-23, 0.2656536456367949e-04, 0.9644853918428974e-05}, + info: 0, + }, + { + z: []float64{0.6087347824331316e+00, 0.2344592930815549e+00, 0.1839180152431182e+00, 0.8946895357068063e-01, 0.3789275576065679e+00, 0.1028144130339237e-01, 0.8432962228323871e+00, 0.5720035990048252e+00, 0.9674750777949459e+00, 0.1387850858951470e+00, 0.1049983295079311e+00, 0.1869597635484084e+00, 0.2049847474830355e+00, 0.9353902309354832e+00, 0.7711597045807418e+00, 0.6677931698783810e+00, 0.5710020050743191e+00, 0.7559352115290241e+00, 0.8799700817290574e+00, 0.2989086447618220e+00, 0.7223221852829088e+00, 0.9172044263776469e-01, 0.8222766733565161e+00, 0.4357755436106698e+00, 0.3174399370719186e+00, 0.3716323007311311e+00, 0.5710261889832198e+00, 0.5875728000816982e-01, 0.1970825867540514e+00, 0.7514821290356389e+00, 0.4768968814440278e+00, 0.1414806690779697e+00, 0.5126469713172739e+00, 0.2618595999903317e-01, 0.3414455658478163e+00, 0.7845736413672471e+00, 0.3193997824316201e+00, 0.5244207174429636e+00, 0.7114985438553419e+00, 0.6380953409710879e+00, 0.5110544105167975e+00, 0.6987353947091863e+00, 0.2359552404464268e+00, 0.1389779414569779e+00, 0.4283565484470430e+00, 0.5978343235372734e+00, 0.8941068622870674e+00, 0.9993918591572127e+00, 0.9276104816334276e+00, 0.6840640077048410e+00, 0.1906252952212643e+00, 0.3896055694019390e+00, 0.8889540838698040e-01, 0.6972017776398595e+00, 0.7403854978121883e+00, 0.9624319129844994e-01, 0.7107468555423346e-01, 0.2167351328914763e+00, 0.5250363507339506e-01, 0.3012298012035964e+00, 0.7467053513110199e+00, 0.6311877906561809e+00, 0.6465457722939830e+00, 0.9462483307457270e-01, 0.1615629644808589e+00, 0.3714392567485900e+00, 0.5909086992570731e+00, 0.2147078824711308e+00, 0.8083150503324107e+00, 0.1136496291628873e+00, 0.3556622878263437e+00, 0.1687673308056986e+00, 0.2663369573375586e+00, 0.9992292880074174e+00, 0.4743706799728067e+00, 0.3784791276003248e+00, 0.9641428139214289e+00, 0.4526980047510465e-01, 0.5291384859598749e+00, 0.9114927597067145e+00, 0.2015740390806801e+00, 0.9111109510473957e+00, 0.4797378470164849e+00, 0.4344842342843944e+00}, + n: 21, + zOut: []float64{0.2535248843080872e+01, 0.2027297538476471e+01, 0.1999341423958793e+01, 0.1905819580237666e+01, 0.1558994176508815e+01, 0.1443542425236611e+01, 0.1321312714967014e+01, 0.1044341128979687e+01, 0.9175998827905956e+00, 0.8543653487373744e+00, 0.6128068085852322e+00, 0.5723484802692980e+00, 0.4865842293721045e+00, 0.4743009418296827e+00, 0.4424121716041568e+00, 0.2157744105271115e+00, 0.1401155020588904e+00, 0.9545054691199920e-01, 0.4846378139764250e-01, 0.2597131641839770e-01, 0.9574417588705024e-02, 0.7983827718913915e-23, 0.5897957913656464e-08, 0.4905965358471980e-08, 0.1321312714967014e+01, 0.1437620113674772e-24, 0.2330664696165742e-08, 0.2036082149427908e-08, 0.1044341128979687e+01, 0.7099748146989106e-29, 0.1191493821660667e-08, 0.1294122655697948e-08, 0.9175998827905956e+00, 0.7069460558567906e-16, 0.2320495180901029e-08, 0.3242686959161780e-08, 0.8543653487373744e+00, 0.3414430154917050e-13, 0.2143028595220802e-07, 0.3181347787023606e-07, 0.6128068085852322e+00, 0.0000000000000000e+00, 0.1873166566953712e+02, 0.1873166566953712e+02, 0.1040000000000000e+03, 0.3224489795918367e+01, 0.2884615384615385e+01, 0.3533200143891392e-06, 0.4865842293721045e+00, 0.4552522969733916e-17, 0.1776353409716398e-09, 0.6419018055060889e-09, 0.4743009418296827e+00, 0.4679164786798698e-18, 0.4782181507340309e-06, 0.1264406900548246e-06, 0.4424121716041568e+00, 0.1728289684559415e-20, 0.3046857378867568e-04, 0.2182730305393890e-04, 0.2157744105271115e+00, 0.1120182485413837e-27, 0.4003877877583194e-03, 0.2588513428463001e-03, 0.1401155020588904e+00, 0.0000000000000000e+00, 0.6916990427934659e-04, 0.5501406364660823e-04, 0.9545054691199920e-01, 0.6162975822039155e-32, 0.1059944134272042e-03, 0.1421052287262650e-03, 0.4846378139764250e-01, 0.1755514705909767e-17, 0.4499204819889415e-04, 0.2567010249779503e-03, 0.2597131641839770e-01, 0.1143848312570467e-28, 0.5289175880660015e-04, 0.5437725505075631e-04, 0.9574417588705024e-02, 0.3697785493223493e-31, 0.2606391166909807e-05, 0.6294847321319143e-06}, + info: 0, + }, + { + z: []float64{0.3257261020965838e+00, 0.9499222527145429e+00, 0.3597416524070001e+00, 0.7608369220069154e+00, 0.3348964244425234e-01, 0.9918240230913730e+00, 0.2435073049094116e-01, 0.2927090414647138e-01, 0.2724578324033788e+00, 0.7486167980650003e+00, 0.2854897136111678e+00, 0.2267361867398041e+00, 0.4294380831801550e+00, 0.7722184192395405e+00, 0.5889718788171796e+00, 0.5201154937785833e+00, 0.1341644467607439e-01, 0.3251983707424297e+00, 0.3712514383296180e+00, 0.8346822920283823e+00, 0.2613611110008908e+00, 0.7377020224374462e+00, 0.4290238209924468e-01, 0.8422463567888154e+00, 0.1390406385354674e+00, 0.8899155813346852e+00, 0.5366293959824918e+00, 0.1311808378334547e+00, 0.5127829646320820e+00, 0.3806825827063995e+00, 0.9008244723505014e+00, 0.8397173164045483e+00, 0.8005434668537068e+00, 0.4188784622408616e+00, 0.4815341947551696e+00, 0.2004130034689415e+00, 0.9620560381224039e+00, 0.9936712626034268e+00, 0.7996240982628953e+00, 0.9066259300527590e+00, 0.7119745510778175e+00, 0.4009593231138309e+00, 0.7503038869147766e+00, 0.2307657656692345e+00, 0.5796381959344894e+00, 0.4564585857276309e+00, 0.3087632483431468e-01, 0.4925813444884515e+00, 0.2548189626174928e+00, 0.7548007003619551e+00, 0.2595357873879023e+00, 0.2576161595637174e+00, 0.6530834734960895e+00, 0.9659056802808288e+00, 0.4073417244577716e+00, 0.7973054858347279e+00, 0.1098542135402614e-03, 0.5270152208809449e+00, 0.9503221531609122e+00, 0.3389760481404548e+00, 0.2936463197963648e+00, 0.1532837593240226e+00, 0.7039965742744211e+00, 0.1538522429841973e+00, 0.4565500206514618e+00, 0.4890306322657354e+00, 0.7406911389536464e+00, 0.4609110724077861e+00, 0.3010957214922741e+00, 0.4706875322819619e+00, 0.9245411658881026e+00, 0.1225486958523536e-02, 0.7513538867599376e+00, 0.7480092012350148e+00, 0.1668544624863488e+00, 0.6027497133760584e+00, 0.4060848284780354e+00, 0.7777661676033926e+00, 0.5476587806685121e+00, 0.3318235867866506e+00, 0.4865355710680780e+00, 0.9789804028805801e+00, 0.6449091593541079e-01, 0.3386899175207851e+00}, + n: 21, + zOut: []float64{0.2916446272919928e+01, 0.2518523111789073e+01, 0.1881620740030630e+01, 0.1802535552376618e+01, 0.1705712498790935e+01, 0.1630934300031556e+01, 0.1493206464268041e+01, 0.1289863129801601e+01, 0.1141879970494692e+01, 0.1064939765404915e+01, 0.9148995873510519e+00, 0.6667419164223021e+00, 0.5793746811807822e+00, 0.5763823737203533e+00, 0.5577209455074719e+00, 0.2258972104173268e+00, 0.1775772659782911e+00, 0.1224839196229167e+00, 0.7643325286481288e-01, 0.1088888144792382e-01, 0.1013218159024319e-07, 0.1529626957613473e-16, 0.4760904670371191e-26, 0.1685698503770275e-25, 0.1493206464268041e+01, 0.4740164255731978e-15, 0.6203207773025231e-22, 0.1697026618649852e-21, 0.1289863129801601e+01, 0.1555744028694447e-17, 0.2220966892754199e-18, 0.8352563633635413e-19, 0.1141879970494692e+01, 0.8283039504820624e-29, 0.9137114046044313e-17, 0.2186983724354431e-16, 0.1064939765404915e+01, 0.7560947786617519e-24, 0.1476623491156538e-14, 0.6521525514986541e-15, 0.9148995873510519e+00, 0.3956572890903056e-23, 0.2135406185055340e+02, 0.2135406185055340e+02, 0.1110000000000000e+03, 0.3462585034013606e+01, 0.7207207207207207e+01, 0.3107640874581820e-11, 0.5793746811807822e+00, 0.0000000000000000e+00, 0.5611258671513064e-10, 0.1000932867794445e-09, 0.5763823737203533e+00, 0.2935229715008717e-19, 0.5869279282596528e-09, 0.3232392517855140e-09, 0.5577209455074719e+00, 0.7161080495560552e-17, 0.6771347567635994e-08, 0.1273942735381484e-07, 0.2258972104173268e+00, 0.2531843062293473e-19, 0.3233128964696781e-05, 0.1839431244951091e-05, 0.1775772659782911e+00, 0.1025519176787315e-28, 0.1632093348006395e-04, 0.1394944946706296e-04, 0.1224839196229167e+00, 0.3147671031182890e-20, 0.4090539240175316e-04, 0.3499438731982919e-04, 0.7643325286481288e-01, 0.4418024966021139e-23, 0.7751221398283570e-04, 0.6522231732461535e-04, 0.1088888144792382e-01, 0.0000000000000000e+00, 0.1168516922729128e-02, 0.6798960389683572e-03, 0.1013218159024319e-07, 0.8311253061997563e-15, 0.2266806408535672e-02, 0.8228900118596701e-02}, + info: 0, + }, + { + z: []float64{0.9013837094783728e-01, 0.4093071371456288e+00, 0.7563443593246222e-01, 0.6047712831067179e+00, 0.3616734158068488e+00, 0.3179352368154293e+00, 0.4877726790168605e+00, 0.7283754332956183e+00, 0.6564126870433257e+00, 0.7426267273836333e+00, 0.9575970808689874e+00, 0.2328163542816707e+00, 0.9804488839127946e+00, 0.5880329556580101e+00, 0.6720101508999355e+00, 0.3029641470022006e+00, 0.9176198736416905e+00, 0.9273523415722473e-01, 0.7356501689695268e+00, 0.3516482504433937e+00, 0.5613309946663773e+00, 0.8064085547776983e+00, 0.2421909924481950e+00, 0.4321291285180304e+00, 0.7195726435630079e+00, 0.4201959590716675e-01, 0.6971840239025084e-03, 0.6931587576362976e+00, 0.4512374813335629e+00, 0.9105433528826555e+00, 0.8233348499205092e+00, 0.5270028711667427e+00, 0.5092902528068642e+00, 0.6527825111058926e+00, 0.8077059946216197e+00, 0.5037249441803524e+00, 0.7474524841642615e+00, 0.8361353459662745e+00, 0.6675800963161582e+00, 0.5004244151247563e+00, 0.7976263900002633e+00, 0.1983499023889042e+00, 0.7522366994446783e+00, 0.3654861843844912e+00, 0.8247542543249066e+00, 0.9124235995852386e+00, 0.1818459043196227e+00, 0.8196813052173683e+00, 0.5538880208895494e+00, 0.9080443570480472e+00, 0.2981632101337024e+00, 0.6044133722200155e+00, 0.1883025355842233e+00, 0.2434431972046255e+00, 0.3420561860884004e+00, 0.3312086041097358e+00, 0.8261796050670067e-01, 0.2597483126975232e+00, 0.9037350022806145e+00, 0.5691251256015616e+00, 0.5433137600205407e-01, 0.2948423088781353e+00, 0.7125818959241866e+00, 0.1168360183447441e+00, 0.2271931321361120e-01, 0.5832011197836875e+00, 0.4321285977614958e+00, 0.9214755199664483e+00, 0.2929460789941122e+00, 0.9911839954873096e+00, 0.9352897353900299e+00, 0.6549195608155846e+00, 0.7733376794047198e-01, 0.7345459966842849e+00, 0.2997108680399072e+00, 0.6923968300089707e+00, 0.9895794156524929e+00, 0.2694202749089550e+00, 0.2419924375765692e-01, 0.2728256718882756e+00, 0.7688978378093437e+00, 0.6794051971520565e-01, 0.8211384369868754e+00, 0.7581877817293103e-01}, + n: 21, + zOut: []float64{0.2587082763537058e+01, 0.2518438946693926e+01, 0.2381638511714923e+01, 0.2064758819607097e+01, 0.1861546331198453e+01, 0.1823771413694203e+01, 0.1462413605342042e+01, 0.1295483717305927e+01, 0.1204792654692795e+01, 0.1153126691944095e+01, 0.9646786889507554e+00, 0.8876075724637406e+00, 0.7328920239265601e+00, 0.5128961040525114e+00, 0.4187721082034697e+00, 0.3198505150609477e+00, 0.1661143074789019e+00, 0.1249876230738883e+00, 0.5554047196429147e-01, 0.2079344839078052e-02, 0.4713171572265143e-04, 0.1874281408120013e-19, 0.2104034717604745e-09, 0.2619972849686678e-09, 0.1462413605342042e+01, 0.9259545558536950e-18, 0.2890370685025388e-10, 0.3242632456342165e-10, 0.1295483717305927e+01, 0.1323488980084844e-22, 0.2279224462592833e-10, 0.2555931367214743e-10, 0.1204792654692795e+01, 0.2978738778114541e-26, 0.7485181957332586e-10, 0.4566030880095840e-10, 0.1153126691944095e+01, 0.6733803272339514e-21, 0.2633277318950464e-08, 0.1440235383519861e-08, 0.9646786889507554e+00, 0.7499216124683276e-17, 0.2253851934746039e+02, 0.2253851934746038e+02, 0.1040000000000000e+03, 0.3111111111111111e+01, 0.5769230769230769e+01, 0.2633869415622400e-06, 0.7328920239265601e+00, 0.6980417424693642e-13, 0.1603044445805627e-06, 0.1255532606981617e-06, 0.5128961040525114e+00, 0.6981404369947554e-21, 0.8484909168369356e-07, 0.7367232980661418e-07, 0.4187721082034697e+00, 0.2465190328815662e-30, 0.1470967366725541e-06, 0.2274394647752198e-06, 0.3198505150609477e+00, 0.9424343740971753e-25, 0.8932721831570202e-05, 0.5259727349591482e-05, 0.1661143074789019e+00, 0.1697763391846713e-18, 0.4712327641645974e-05, 0.7305995006514358e-05, 0.1249876230738883e+00, 0.9266645908481002e-19, 0.3596318120009657e-04, 0.2001742175196116e-04, 0.5554047196429147e-01, 0.5985976337495067e-16, 0.4811066675748340e-03, 0.3416912105856511e-03, 0.2079344839078052e-02, 0.8697917232094458e-24, 0.5710266423841766e-02, 0.2981491759065521e-02, 0.4713171572265143e-04, 0.0000000000000000e+00, 0.3288287603570621e-02, 0.1038987759212308e-02}, + info: 0, + }, + { + z: []float64{0.3453921918700114e+00, 0.8256584009865165e+00, 0.5027306146870630e+00, 0.5014619412470180e+00, 0.9910264683396075e+00, 0.7385152238869899e+00, 0.9936695869989243e+00, 0.6206355260490602e+00, 0.6835539321104728e+00, 0.3987727521414011e+00, 0.2155721742355131e+00, 0.8212295891891395e+00, 0.7021626430853991e+00, 0.3753314442932942e+00, 0.2575710525141791e+00, 0.6551328468445583e+00, 0.3738398426943970e+00, 0.8805730558289802e+00, 0.6848909798416613e+00, 0.5520069040756137e-01, 0.8817067703079440e+00, 0.3238117712584516e+00, 0.1001825089826291e+00, 0.7369585910327309e+00, 0.7601318359400422e+00, 0.4312449588193277e+00, 0.4932891154015395e+00, 0.1099211239093930e+00, 0.5764729343614433e+00, 0.1286539516629047e+00, 0.5170000684627285e+00, 0.4006756226519864e+00, 0.5897018197291505e+00, 0.3774659871747915e+00, 0.4211183018281148e+00, 0.6260997273722376e+00, 0.7810234892670644e+00, 0.9934154489086249e+00, 0.5116204079460294e+00, 0.7948452919719275e+00, 0.5998807734230309e+00, 0.4868691071952969e-01, 0.7977221292955683e+00, 0.4907992372099745e+00, 0.4337635562764541e-01, 0.4865658084478622e+00, 0.7887982210525304e+00, 0.6218441262679077e+00, 0.1807768964120138e-01, 0.7341707407508724e+00, 0.4738048273586776e+00, 0.9324751870816922e+00, 0.4643019354540665e+00, 0.2401905271601299e+00, 0.6072757133952530e+00, 0.6257046236339610e+00, 0.1662572295885112e-01, 0.3360107439465247e+00, 0.5925152832499899e+00, 0.4206234399601897e+00, 0.1828405120649758e+00, 0.8923354266778754e+00, 0.6211472795399554e+00, 0.3215240169291051e+00, 0.3967102459115966e+00, 0.4376502563326659e+00, 0.4571232280132043e-01, 0.2973405440159681e+00, 0.1737124961451384e+00, 0.1123665207380958e+00, 0.8069742345287393e+00, 0.5302931605046994e+00, 0.2541675285808478e+00, 0.9099997182043015e+00, 0.7562530283478575e-01, 0.2521110146593245e+00, 0.2525672524245413e+00, 0.8930826245328107e+00, 0.8545394186204489e+00, 0.1856025965008533e+00, 0.8541401621096930e+00, 0.7406868214039234e+00, 0.5851270108744718e+00, 0.7554853143122133e+00}, + n: 21, + zOut: []float64{0.2818806710434786e+01, 0.2596766072486540e+01, 0.2041400548479265e+01, 0.1919012388179902e+01, 0.1900863080415201e+01, 0.1778136512482050e+01, 0.1649297429111022e+01, 0.1375943901620400e+01, 0.1287933944237744e+01, 0.1246950647018126e+01, 0.9346701076388835e+00, 0.8619836761278706e+00, 0.6928534822715838e+00, 0.5816592049742084e+00, 0.4759893592204403e+00, 0.2537953742738920e+00, 0.2205935192530503e+00, 0.8350022078832350e-01, 0.3307087507037688e-01, 0.2105652896299196e-01, 0.3857874617179747e-02, 0.9177981866196648e-17, 0.6043035417783778e-14, 0.7682115486038763e-14, 0.1649297429111022e+01, 0.9047287949798740e-23, 0.2120842457393041e-14, 0.2617862539054627e-14, 0.1375943901620400e+01, 0.1499552906513639e-17, 0.3376573256769321e-15, 0.2886563968474245e-15, 0.1287933944237744e+01, 0.1966621906344823e-17, 0.2937881411718043e-15, 0.4577311861870994e-15, 0.1246950647018126e+01, 0.2742371736879744e-17, 0.2913637797754287e-14, 0.1483948533056680e-14, 0.9346701076388835e+00, 0.5800058106323822e-18, 0.2277814145766384e+02, 0.2277814145766384e+02, 0.9800000000000000e+02, 0.2997732426303855e+01, 0.1020408163265306e+01, 0.9030119244814722e-12, 0.6928534822715838e+00, 0.1471303281095442e-16, 0.2634125396797654e-10, 0.5071585384120369e-10, 0.5816592049742084e+00, 0.1365675999118616e-25, 0.3358753759221612e-09, 0.6258446588335648e-09, 0.4759893592204403e+00, 0.1479114197289397e-30, 0.3734630956704811e-08, 0.6716634735981551e-08, 0.2537953742738920e+00, 0.7467396771867359e-25, 0.3072344915986320e-06, 0.1829961043616119e-06, 0.2205935192530503e+00, 0.4437342591868191e-30, 0.2385309167517414e-05, 0.3978543929472447e-05, 0.8350022078832350e-01, 0.2436129622029919e-19, 0.4589207629691226e-04, 0.2836221395611648e-04, 0.3307087507037688e-01, 0.3081487911019577e-32, 0.1362072635971665e-04, 0.2978962617014088e-04, 0.2105652896299196e-01, 0.2711709361697228e-30, 0.2444625596237189e-05, 0.7827983423195974e-05, 0.3857874617179747e-02, 0.8925769947935872e-20, 0.9826965253502863e-02, 0.1259209175531780e-01}, + info: 0, + }, + { + z: []float64{0.9560856702670685e+00, 0.5885863857778361e+00, 0.8071258175651005e+00, 0.5175476341605639e+00, 0.1941426739297742e+00, 0.8451490614471896e+00, 0.9052305950784733e+00, 0.6791526261185319e+00, 0.6480230038264980e+00, 0.8438040949137990e+00, 0.3034844043967853e+00, 0.3090582723248356e+00, 0.3282769832503399e+00, 0.7489751201458807e+00, 0.6672021557248126e+00, 0.1697989591956236e+00, 0.5723860504781838e+00, 0.3660621222567975e+00, 0.1505291330148173e-01, 0.7436839329892437e+00, 0.3679212455376497e+00, 0.5305714072096270e+00, 0.5792221354999488e+00, 0.3031533452885045e+00, 0.9514596930492207e+00, 0.3278419583960311e+00, 0.5742176036064726e+00, 0.7216680447710266e+00, 0.6608941225668634e+00, 0.3887216422692293e+00, 0.6952172825541724e+00, 0.8072929471269218e+00, 0.8776672848787970e-01, 0.9363773133978057e+00, 0.3470717327786855e+00, 0.9438495003134180e+00, 0.5511880105145929e+00, 0.5826637033513504e+00, 0.3416802173197317e+00, 0.5650234571673005e+00, 0.2927187699119284e+00, 0.4936451925034285e+00, 0.7496439056333554e+00, 0.8462697295115110e+00, 0.4661250827466905e+00, 0.3912590593193351e+00, 0.9140228064649625e+00, 0.4016314378444454e+00, 0.7105479239000361e+00, 0.3751330850058705e+00, 0.7309804928539319e+00, 0.1378037023247644e+00, 0.7527056268407295e+00, 0.1640478774378548e+00, 0.6030857470066908e+00, 0.7427119073248016e+00, 0.9996405952923426e-01, 0.7406467144237725e+00, 0.3762498316380406e+00, 0.8295145255291534e+00, 0.8322845800948830e-01, 0.6101670185797291e+00, 0.9451596878306588e+00, 0.1176379504958335e+00, 0.7940514845464750e+00, 0.3068100097277620e+00, 0.2371282369406802e+00, 0.2163781309135768e+00, 0.4705542393165623e+00, 0.4003463805482307e+00, 0.2731285781388088e+00, 0.8862626881898287e+00, 0.5854390777280626e+00, 0.4661955367614972e+00, 0.1993415035442425e+00, 0.9213995968359628e+00, 0.1542739762296447e+00, 0.9419076789558214e+00, 0.9611493561784724e+00, 0.7596360150269755e+00, 0.6590873675145426e+00, 0.3894853423829039e+00, 0.8407621320868611e+00, 0.4517123559923821e+00}, + n: 21, + zOut: []float64{0.2583735261373388e+01, 0.2239759728480232e+01, 0.2157858012928955e+01, 0.2146305201359927e+01, 0.1853050031878152e+01, 0.1788497392649839e+01, 0.1558387970436693e+01, 0.1494017846927041e+01, 0.1323723239351723e+01, 0.1188003233397794e+01, 0.9827022795744828e+00, 0.8335582164080391e+00, 0.7854672559750093e+00, 0.6236176061997519e+00, 0.4677974993044671e+00, 0.2968385185647548e+00, 0.2304456569541352e+00, 0.1793008998018142e+00, 0.2471364396669566e-01, 0.7537422761275628e-02, 0.3241997301555496e-04, 0.1184063810429910e-21, 0.1281976731077423e-10, 0.2849227344309828e-10, 0.1558387970436693e+01, 0.3001369155282220e-19, 0.2940080271049715e-08, 0.1685793775386140e-08, 0.1494017846927041e+01, 0.1696366490587264e-24, 0.4557030850938984e-08, 0.5166797185591864e-08, 0.1323723239351723e+01, 0.1862197338326049e-12, 0.1908610209474282e-08, 0.1816527091730617e-08, 0.1188003233397794e+01, 0.1613415991578665e-17, 0.2801618631477443e-08, 0.2177314380835442e-08, 0.9827022795744828e+00, 0.1073695634537585e-17, 0.2276534933826718e+02, 0.2276534933826718e+02, 0.1060000000000000e+03, 0.3113378684807256e+01, 0.3773584905660377e+01, 0.2107018980320373e-07, 0.7854672559750093e+00, 0.7395570986446986e-31, 0.5134101120680572e-07, 0.6949862017183880e-07, 0.6236176061997519e+00, 0.1700003115737185e-12, 0.4917011909790346e-06, 0.6444201346122313e-06, 0.4677974993044671e+00, 0.1676329423594650e-29, 0.2178752696080751e-05, 0.1697685142168825e-05, 0.2968385185647548e+00, 0.2903323170704433e-20, 0.9062193610690628e-05, 0.7062135859641892e-05, 0.2304456569541352e+00, 0.9860761315262648e-30, 0.1460945060339096e-04, 0.1904220434906198e-04, 0.1793008998018142e+00, 0.4606947686490709e-27, 0.4451684144215141e-07, 0.1728984935219646e-06, 0.2471364396669566e-01, 0.1407327854914285e-26, 0.1942342839475662e-02, 0.3052712133602924e-02, 0.7537422761275628e-02, 0.9865964111678957e-20, 0.3214451590395407e-02, 0.5215635567340069e-02, 0.3241997301555496e-04, 0.0000000000000000e+00, 0.1184307710713820e-01, 0.9427403294935964e-02}, + info: 0, + }, + { + z: []float64{0.4101594051788160e+00, 0.1002974419660251e+00, 0.8434067081152193e+00, 0.7258916354493068e+00, 0.9256853341957688e+00, 0.8379265467529036e+00, 0.1564408324797836e+00, 0.8292039738515364e+00, 0.3771190822561328e+00, 0.8236097732630416e+00, 0.3320638300046872e+00, 0.6850750205938005e-01, 0.5616647045816037e+00, 0.8305938719521835e+00, 0.6006351465721590e+00, 0.8469146468608777e+00, 0.1179160781320751e+00, 0.6975716721259332e+00, 0.3237957597653003e+00, 0.1297318227605904e-01, 0.8416519209661123e+00, 0.2623987624579764e+00, 0.3323289069826861e+00, 0.9848995259497869e+00, 0.1295798367899781e-01, 0.4196318718239949e+00, 0.8791346827157692e+00, 0.9015205418757779e+00, 0.2557984197164971e+00, 0.7295221077613672e+00, 0.8796719157178668e+00, 0.6983240767110626e+00, 0.6461186804049626e+00, 0.5003600760145155e-01, 0.3411701822058120e+00, 0.6026846298507913e+00, 0.4649210011845201e+00, 0.4169632595817018e+00, 0.9745024564197420e+00, 0.9159749674174896e+00, 0.3798779762430529e+00, 0.6757581567027395e+00, 0.1501600637708167e+00, 0.8841295845033917e+00, 0.1700642059156726e+00, 0.5745602516925833e+00, 0.7146078194346678e+00, 0.7876321314233041e+00, 0.3623958693011297e+00, 0.1316504845867333e+00, 0.6150819295249010e+00, 0.9948396700221377e+00, 0.2186788175334711e+00, 0.3433450548510013e+00, 0.4434886569716980e+00, 0.6596978167473024e+00, 0.7067569387667516e+00, 0.2710992196058543e+00, 0.2531644393448058e+00, 0.6179743389634106e+00, 0.6395694224823617e+00, 0.6233992467844496e-01, 0.9270082862838847e-01, 0.6813409948026068e+00, 0.8489230752840353e+00, 0.4248777519526148e+00, 0.9258241367507150e+00, 0.7591306919901064e-01, 0.1337231221457533e+00, 0.6673452115706187e+00, 0.3161361894802919e+00, 0.9287930266484576e+00, 0.9211411715651807e+00, 0.7110889130885073e+00, 0.6783920645346327e+00, 0.5798290672446252e+00, 0.6003988035906362e+00, 0.8338909998640337e+00, 0.1150264810560550e+00, 0.8268177524919460e+00, 0.2868686997384918e+00, 0.7734675037869032e+00, 0.8495624932870276e+00, 0.7382167415731139e+00}, + n: 21, + zOut: []float64{0.2565771617602568e+01, 0.2464815559540473e+01, 0.2377249642737961e+01, 0.2182027799722737e+01, 0.1994511663794419e+01, 0.1777930797653110e+01, 0.1532836111328556e+01, 0.1298104160091711e+01, 0.1162548328490141e+01, 0.1031426069242260e+01, 0.9067767207978197e+00, 0.7702116437984168e+00, 0.7023507576840374e+00, 0.5846573732521254e+00, 0.3836750731694669e+00, 0.3732780616075560e+00, 0.2146097506115993e+00, 0.5462411591703480e-01, 0.3208967690429450e-01, 0.2788258306628742e-02, 0.1838228532938987e-03, 0.1703684557466520e-17, 0.8886730161731292e-14, 0.1725592004801531e-13, 0.1532836111328556e+01, 0.2310659810285515e-15, 0.8778995686431983e-12, 0.1396745951735637e-11, 0.1298104160091711e+01, 0.0000000000000000e+00, 0.8497749401208094e-11, 0.5925398902948847e-11, 0.1162548328490141e+01, 0.1292469707114106e-25, 0.1515360700076446e-10, 0.2085752134421418e-10, 0.1031426069242260e+01, 0.1195331315767287e-17, 0.8898419915788489e-10, 0.1186621585503233e-09, 0.9067767207978197e+00, 0.1888339631531495e-12, 0.2241246700510621e+02, 0.2241246700510621e+02, 0.1190000000000000e+03, 0.3539682539682540e+01, 0.7563025210084033e+01, 0.2596357213906324e-08, 0.7023507576840374e+00, 0.1416275483756143e-17, 0.5106480833143134e-08, 0.6438584684288567e-08, 0.5846573732521254e+00, 0.1317619996636992e-15, 0.1896058302301025e-07, 0.2367797858508706e-07, 0.3836750731694669e+00, 0.9199951564453634e-11, 0.8967796171107713e-07, 0.1296821709728121e-06, 0.3732780616075560e+00, 0.1615587133892578e-26, 0.3088999981101691e-06, 0.2343043411729366e-06, 0.2146097506115993e+00, 0.2055968734232262e-28, 0.9134179982896841e-05, 0.6280501592969192e-05, 0.5462411591703480e-01, 0.1901142159808155e-23, 0.3768301572228235e-05, 0.8777513601148953e-05, 0.3208967690429450e-01, 0.1479114197289397e-29, 0.1428515450408294e-03, 0.5208612044311805e-04, 0.2788258306628742e-02, 0.2057124874537592e-17, 0.2063479623178836e-02, 0.3566698154744510e-02, 0.1838228532938987e-03, 0.3273772756667199e-28, 0.8538542526496483e-03, 0.1571849612061994e-03}, + info: 0, + }, + { + z: []float64{0.1589037341524596e+00, 0.1455226073481288e+00, 0.1938395273812957e+00, 0.1962691615573132e+00, 0.1687822326883440e+00, 0.6650981615477494e+00, 0.8270615916362424e+00, 0.1710502783654970e+00, 0.2206713720159043e+00, 0.4369721197399372e+00, 0.9301314325299896e+00, 0.5722466234411457e+00, 0.1135089451836189e+00, 0.5846068541971138e+00, 0.9216025315561343e+00, 0.9066267701445474e+00, 0.5584279552920539e+00, 0.4343925455049338e+00, 0.9858452060735702e+00, 0.6563046209578444e+00, 0.7077011175365495e+00, 0.4947310926689127e+00, 0.7385820400647596e+00, 0.5937728681009750e+00, 0.9762402194896037e+00, 0.3450302019452143e+00, 0.3805099852593911e+00, 0.7529397554746390e+00, 0.7330092480109731e+00, 0.3330086450808912e+00, 0.9038518043983117e+00, 0.6574079221280790e+00, 0.2509077957372493e+00, 0.6249580721461656e+00, 0.2757992349681960e+00, 0.5001238277403597e+00, 0.9621194298806066e+00, 0.3061711826567661e+00, 0.1119838022409911e+00, 0.2976298035182879e+00, 0.6363137835930621e-01, 0.4930031667447814e+00, 0.1865676338649569e-01, 0.4250211458984191e+00, 0.1095501188334868e+00, 0.2763357459722963e+00, 0.7075737347301593e+00, 0.5363987747039417e+00, 0.3986581101260644e+00, 0.2692140175173962e+00, 0.5588052030209328e+00, 0.5539805437492590e+00, 0.6882419411200217e+00, 0.6525011142322502e+00, 0.2351213214810641e+00, 0.4862287407564582e+00, 0.9494457572755843e-01, 0.2775605674126462e+00, 0.6088686305117397e-01, 0.1602712446759722e+00, 0.6726078044316741e-01, 0.1771280849255007e+00, 0.9403546682132294e+00, 0.1182787468569617e+00, 0.6084837514797026e+00, 0.4725411123129805e+00, 0.2891346904124859e-01, 0.3056197468639377e-01, 0.3597281640784106e+00, 0.3047108802830497e+00, 0.9462624965987684e+00, 0.6795048656132132e+00, 0.3142878274718968e+00, 0.2255775176392846e+00, 0.5741746909618360e+00, 0.6125119194512041e+00, 0.8636232492271716e+00, 0.5595270150362563e-01, 0.4894380312911951e+00, 0.2945948126181174e+00, 0.4782956027971728e+00, 0.1100404727801326e+00, 0.2692801555328298e+00, 0.4567347029351195e+00}, + n: 21, + zOut: []float64{0.2662663085210597e+01, 0.2404979690430721e+01, 0.2214109781172517e+01, 0.2010332580463078e+01, 0.1879931283999763e+01, 0.1831239586461748e+01, 0.1664682288858752e+01, 0.1512516241203824e+01, 0.1081258277181201e+01, 0.8471473481460485e+00, 0.6221178288483303e+00, 0.5493156782308360e+00, 0.4496825320738197e+00, 0.3768653101376996e+00, 0.2833465507506064e+00, 0.2013980912694376e+00, 0.1418989984144841e+00, 0.7096946004385345e-01, 0.4542843847166723e-01, 0.6747056508451696e-02, 0.1343590842619022e-02, 0.4141519752410312e-28, 0.4845145585707669e-05, 0.3343357997526164e-05, 0.1664682288858752e+01, 0.8527270841077049e-25, 0.1655046287299363e-04, 0.2231546414257453e-04, 0.1512516241203824e+01, 0.5692210911982269e-19, 0.1383329297784762e-03, 0.1072264803352635e-03, 0.1081258277181201e+01, 0.1459392674658872e-28, 0.4158771817821724e-03, 0.3910579975830214e-03, 0.8471473481460485e+00, 0.2949199315750023e-16, 0.2216793811640345e-03, 0.2203201078239036e-03, 0.6221178288483303e+00, 0.2055113688275276e-18, 0.2085797369872005e+02, 0.2085797369872006e+02, 0.9000000000000000e+02, 0.2732426303854875e+01, 0.0000000000000000e+00, 0.1772659828830871e-05, 0.4496825320738197e+00, 0.4437342591868191e-30, 0.2138076899966292e-04, 0.1340116970023588e-04, 0.3768653101376996e+00, 0.2904017873172006e-24, 0.4668906275608195e-04, 0.7182837805013731e-04, 0.2833465507506064e+00, 0.4888968615550842e-22, 0.1290326696826671e-05, 0.6479808231987577e-06, 0.2013980912694376e+00, 0.4436553730962970e-26, 0.1673384055036320e-03, 0.7106703019344439e-04, 0.1418989984144841e+00, 0.3443985257364436e-16, 0.1415746442632383e-02, 0.1286112519803737e-02, 0.7096946004385345e-01, 0.1380506584136771e-29, 0.1898294559619081e-02, 0.3536485589649533e-02, 0.4542843847166723e-01, 0.9860761315262648e-31, 0.1085724134073318e-03, 0.2838967874935677e-03, 0.6747056508451696e-02, 0.2742900553697359e-16, 0.1307657950440773e-02, 0.4192938789348323e-02, 0.1343590842619022e-02, 0.2423380700838948e-25, 0.4340570868086748e-02, 0.9691995934364128e-02}, + info: 0, + }, + { + z: []float64{0.6603307676446488e+00, 0.5215490552441340e+00, 0.9279560414420993e+00, 0.3871591216009305e+00, 0.4489001158072897e+00, 0.2115798060938074e+00, 0.6013990647859857e-01, 0.6977506014884622e+00, 0.8981757899514635e+00, 0.6366966965554082e+00, 0.5447619366248987e+00, 0.2926906196279220e+00, 0.9538633412864880e+00, 0.4052441866103130e+00, 0.9970193775088028e+00, 0.1550480233350559e+00, 0.9697068099139655e+00, 0.3748402815554551e+00, 0.3906465669660910e+00, 0.4103562008634933e+00, 0.9514404866677418e+00, 0.5608812699221558e+00, 0.6226680723027547e+00, 0.8653188982275728e+00, 0.3999642050403411e+00, 0.8968004947108832e+00, 0.1007886665847859e+00, 0.2055140520989622e+00, 0.8479005785073866e+00, 0.4772281284198989e+00, 0.3664785333935638e+00, 0.9311163393120341e+00, 0.2051534177095988e+00, 0.5588930379235971e+00, 0.9385948972790567e+00, 0.1380523811906209e+00, 0.9076293986285472e+00, 0.9937542903014829e+00, 0.5330397746663563e+00, 0.5373590340921116e+00, 0.2667502750074380e-01, 0.9996497580467543e+00, 0.7460509377370035e+00, 0.5868152625248035e+00, 0.2352631583529476e+00, 0.2062979890255305e+00, 0.1864397664760209e-02, 0.1562262993494207e+00, 0.1880894213999632e+00, 0.5369024951393064e+00, 0.6560943776892021e+00, 0.9558063214302399e+00, 0.1463497248954505e+00, 0.6977928179134164e+00, 0.8204108917709737e+00, 0.9530026100161404e+00, 0.7191901603465490e+00, 0.4974721533134605e+00, 0.8445129869563219e+00, 0.9007488855711614e+00, 0.4164322161987429e+00, 0.5750362072910759e+00, 0.1935672755254650e+00, 0.2663393354536251e+00, 0.3913931132271733e+00, 0.7724103066167269e+00, 0.7871420607985310e+00, 0.3766724710952808e+00, 0.9224124235283799e+00, 0.1080194858843619e+00, 0.3512364719709385e+00, 0.9868075881536598e-02, 0.9893144531298202e+00, 0.5983612801716819e+00, 0.2882889058269555e-01, 0.2759662054778022e+00, 0.4865673169207868e+00, 0.4317793431152178e+00, 0.4039098350536063e-01, 0.7594353556100967e+00, 0.6702102151313558e+00, 0.5757953377080282e+00, 0.7036908158147406e+00, 0.3078985083785768e+00}, + n: 21, + zOut: []float64{0.2422149931211175e+01, 0.2394118175176597e+01, 0.2305882752563478e+01, 0.2031709011458775e+01, 0.2015634640599460e+01, 0.1933740155888154e+01, 0.1715509223328114e+01, 0.1559424733880939e+01, 0.1425500044444135e+01, 0.1021162532677627e+01, 0.8999999270989295e+00, 0.8371987758218183e+00, 0.7469639623683311e+00, 0.6494523294995699e+00, 0.4251816116188064e+00, 0.2881024603279346e+00, 0.2247792687845145e+00, 0.8318936570759092e-01, 0.1886599847837073e-01, 0.1029977709531541e-01, 0.8015530498900421e-03, 0.8380373580927862e-22, 0.1621276583815588e-04, 0.5889624784943332e-05, 0.1715509223328114e+01, 0.2616454581976124e-17, 0.2563506401168800e-03, 0.3048038004625151e-03, 0.1559424733880939e+01, 0.5629222562364776e-20, 0.1824080863602310e-03, 0.8021857862878124e-04, 0.1425500044444135e+01, 0.1078233452249475e-15, 0.2986624164553499e-03, 0.4306384056300427e-03, 0.1021162532677627e+01, 0.2425965640253176e-21, 0.3415477187080772e-04, 0.2730971166672021e-04, 0.8999999270989295e+00, 0.1110716154551185e-26, 0.2300966623107952e+02, 0.2300966623107953e+02, 0.9800000000000000e+02, 0.2877551020408163e+01, 0.3061224489795918e+01, 0.5341669529353412e-07, 0.7469639623683311e+00, 0.1147369787651374e-23, 0.3199917396615717e-04, 0.1114773640824220e-03, 0.6494523294995699e+00, 0.1324417262847731e-16, 0.4143040953885707e-03, 0.3815591968277185e-03, 0.4251816116188064e+00, 0.4294702353329713e-20, 0.5586157832836348e-03, 0.4985099391803953e-03, 0.2881024603279346e+00, 0.3449343154346125e-19, 0.3018840928486242e-04, 0.4804099248496092e-04, 0.2247792687845145e+00, 0.8223054085459871e-16, 0.6535536077614123e-03, 0.5704051254743707e-03, 0.8318936570759092e-01, 0.1269822990867506e-16, 0.2750660948852983e-03, 0.2439371709019565e-03, 0.1886599847837073e-01, 0.2261821987449685e-25, 0.1693307421164339e-03, 0.1879460762504110e-03, 0.1029977709531541e-01, 0.2207699818464282e-23, 0.8362752373159153e-06, 0.5564443135649789e-05, 0.8015530498900421e-03, 0.1142002837193949e-22, 0.2005722186927794e-03, 0.1790331282576109e-02}, + info: 0, + }, + { + z: []float64{0.7180522604364612e+00, 0.1926580568606098e+00, 0.9998274874952506e-01, 0.5945840846677781e+00, 0.6814004826706310e-01, 0.6532586662527488e+00, 0.6162535781196632e+00, 0.6323752897874746e+00, 0.6913197528064650e+00, 0.8218961980054600e+00, 0.2391583485552943e+00, 0.6893465701234240e+00, 0.8646254741258573e+00, 0.4399082441653793e+00, 0.7657207697437258e+00, 0.6442902814401660e+00, 0.2997828758931008e+00, 0.3311754429773528e+00, 0.4602753981904284e-01, 0.2937498413335546e+00, 0.9438033090912972e+00, 0.6689413829611792e+00, 0.3690605709328096e+00, 0.1297746871929878e+00, 0.5761771518385747e+00, 0.9470476076636891e+00, 0.6065387228873672e+00, 0.3279182193602236e+00, 0.7521029317260848e+00, 0.3448791591170053e-01, 0.8260379882894462e+00, 0.3381289908082724e+00, 0.6350192899014672e+00, 0.6181098759446546e+00, 0.7961594478307139e+00, 0.9929813242449080e+00, 0.4678192142649594e+00, 0.8317252674327552e+00, 0.1301328932823079e+00, 0.9710090231649569e+00, 0.1522128889234372e+00, 0.8594558260240532e+00, 0.6013715693159983e+00, 0.8742383713574672e+00, 0.7309721379377087e+00, 0.5021385621750218e+00, 0.9190365975533138e+00, 0.8027800227596282e-01, 0.1149439815205374e+00, 0.3491372322884074e+00, 0.3638810075690350e+00, 0.7397316797734275e+00, 0.6765505976038609e+00, 0.6412509132149137e+00, 0.6636796276324973e+00, 0.2769301898862327e+00, 0.6293983439129569e+00, 0.7661949976862148e+00, 0.1787796367414251e+00, 0.2134901744318570e+00, 0.8213730837735125e+00, 0.5732135541056227e+00, 0.8956530012694059e+00, 0.5095442527120628e+00, 0.8428538487299666e+00, 0.6837140150023939e+00, 0.1256023460764972e+00, 0.1828667371827231e-01, 0.3922785557614588e+00, 0.9906794824083128e+00, 0.6528984219237121e+00, 0.2175815548938115e+00, 0.5215238469901794e+00, 0.6874084104558049e+00, 0.4144717036123762e+00, 0.5901719497674505e+00, 0.3178975712304457e+00, 0.7061784208820305e+00, 0.6028763180221938e+00, 0.6012903376000152e+00, 0.6812913355997376e+00, 0.7728807542206401e+00, 0.4888911469874758e+00, 0.1012713084879602e+00}, + n: 21, + zOut: []float64{0.2588311953706596e+01, 0.2370344164465735e+01, 0.2152772432177807e+01, 0.2121020324458281e+01, 0.1935798000017230e+01, 0.1639966751245229e+01, 0.1447991497477954e+01, 0.1303796318075928e+01, 0.1122178474206467e+01, 0.1028684266176079e+01, 0.9663785372526962e+00, 0.8401839996503420e+00, 0.6468021461987292e+00, 0.5148128374433424e+00, 0.4081393344645305e+00, 0.3744044838513741e+00, 0.1951863401557736e+00, 0.1222936930600034e+00, 0.3610852087288927e-01, 0.2215175692702855e-02, 0.1055251342481684e-03, 0.5816113682013476e-24, 0.3490790538048844e-06, 0.5279190866861155e-06, 0.1447991497477954e+01, 0.6749793798367842e-21, 0.1187240552619153e-05, 0.8669686727541259e-06, 0.1303796318075928e+01, 0.2818270319791895e-16, 0.1425495821091504e-08, 0.4238958678865879e-08, 0.1122178474206467e+01, 0.1974523135472784e-17, 0.8140039386639262e-06, 0.3278912117172865e-06, 0.1028684266176079e+01, 0.3004196424688596e-17, 0.2459469608149132e-06, 0.1027360875624831e-06, 0.9663785372526962e+00, 0.8319175140295936e-21, 0.2181749477578394e+02, 0.2181749477578394e+02, 0.1040000000000000e+03, 0.3040816326530612e+01, 0.3846153846153846e+01, 0.3006532860022149e-05, 0.6468021461987292e+00, 0.2129924444096732e-28, 0.3097317061779137e-05, 0.3301766384461962e-05, 0.5148128374433424e+00, 0.5841522899747606e-17, 0.6589564866042633e-05, 0.8825655271690012e-05, 0.4081393344645305e+00, 0.5522026336547083e-29, 0.6929345622607317e-04, 0.1121666398022498e-03, 0.3744044838513741e+00, 0.4071868297361024e-19, 0.4964849153130707e-03, 0.6240579194345771e-03, 0.1951863401557736e+00, 0.3096999756315481e-22, 0.1538823843362176e-02, 0.5557701536471033e-03, 0.1222936930600034e+00, 0.9508642973366516e-17, 0.9344625728185554e-03, 0.1303932473154565e-02, 0.3610852087288927e-01, 0.2671050208464689e-19, 0.5349955893117279e-02, 0.9991286391872665e-02, 0.2215175692702855e-02, 0.2465190328815662e-30, 0.2737788875312504e-02, 0.2378589653250575e-02, 0.1055251342481684e-03, 0.0000000000000000e+00, 0.1487915832966980e-01, 0.9945584373260818e-02}, + info: 0, + }, + { + z: []float64{0.2443960928887767e+00, 0.5306690011327519e+00, 0.8844980893594125e+00, 0.8505050348613927e+00, 0.1179365025679910e+00, 0.6787232231004032e+00, 0.4781425309482933e+00, 0.8596658025119301e+00, 0.7267566040500681e+00, 0.7988986860541384e+00, 0.5066697395752245e+00, 0.9256193759419001e+00, 0.1614174453766865e+00, 0.7938782145253169e+00, 0.3207377639351938e+00, 0.5936236877473402e+00, 0.5294512496859689e+00, 0.3341300959064973e+00, 0.8739309914591177e+00, 0.1643518115289022e-01, 0.9695643985202173e+00, 0.9508789318813814e+00, 0.6599781146531454e+00, 0.8013437877388673e+00, 0.1929493765752847e+00, 0.7655052376601326e+00, 0.6319580998438735e+00, 0.6831519307583876e+00, 0.3834927697748418e+00, 0.7094548193343077e+00, 0.7020510393185050e+00, 0.6520146205437408e+00, 0.6562814689912274e+00, 0.5240471964184201e+00, 0.2362557689024753e+00, 0.4347621644220048e+00, 0.1660979681267404e+00, 0.2422730270980831e+00, 0.7891782502389337e+00, 0.5007812255281535e+00, 0.4444633531981611e+00, 0.3026778693869716e+00, 0.9990331636150707e-02, 0.4097233632314936e+00, 0.9940240633376396e+00, 0.1626467187830966e+00, 0.3461071508544706e+00, 0.6476433090511620e+00, 0.7204399230581238e-01, 0.6885344672911693e+00, 0.1941295874559149e+00, 0.5883115695765473e+00, 0.7359955011333836e+00, 0.4277388472789430e+00, 0.4017294206583021e+00, 0.7838762945853397e+00, 0.9128155969033611e+00, 0.4856080402782125e+00, 0.4763260467232836e+00, 0.5586410048770161e+00, 0.8031586091432461e+00, 0.6068401118620381e-01, 0.9247455618544214e+00, 0.5857147883918616e+00, 0.7467203590494284e+00, 0.1453563221196804e+00, 0.2297646010524917e+00, 0.8950965613541395e+00, 0.3335127494359967e+00, 0.9671078623732762e+00, 0.8023166958816280e+00, 0.1059134943829219e+00, 0.7122485680883537e+00, 0.3211310961264330e+00, 0.1626983074059779e+00, 0.4042697895935750e+00, 0.4799615975845870e+00, 0.3426607739026810e-02, 0.5147088911567483e+00, 0.7533023355653040e+00, 0.9693400349159813e+00, 0.7251923493536844e+00, 0.9132309180417886e-01, 0.4001770994110867e+00}, + n: 21, + zOut: []float64{0.2574587707602998e+01, 0.2548516143888565e+01, 0.2328646361106399e+01, 0.2152335028165169e+01, 0.1900054508136636e+01, 0.1821843561986577e+01, 0.1641395708303600e+01, 0.1593513640807486e+01, 0.1254245828754644e+01, 0.1184500984874290e+01, 0.9407518766665146e+00, 0.8321813659237318e+00, 0.6961683120499664e+00, 0.5315544763312857e+00, 0.4082285577131941e+00, 0.4022023658419822e+00, 0.2892256147411429e+00, 0.1412172326671640e+00, 0.7919841586115514e-01, 0.2055312514153672e-02, 0.1458583715266580e-03, 0.5995342879679690e-28, 0.2082901809770958e-07, 0.9383395476751434e-08, 0.1641395708303600e+01, 0.1321835131711384e-15, 0.1191741688360734e-04, 0.3695487724817346e-05, 0.1593513640807486e+01, 0.5607617048340495e-15, 0.5232768419257040e-05, 0.4082864434107817e-05, 0.1254245828754644e+01, 0.1850697850686327e-14, 0.2071860298001858e-05, 0.2753629328588578e-05, 0.1184500984874290e+01, 0.3549874073494553e-29, 0.9441520198112501e-05, 0.5871348343515868e-05, 0.9407518766665146e+00, 0.3656881477565973e-23, 0.2332256886230818e+02, 0.2332256886230818e+02, 0.1000000000000000e+03, 0.2984126984126984e+01, 0.3000000000000000e+01, 0.1218265160373847e-03, 0.6961683120499664e+00, 0.3847797347882626e-19, 0.2944801840696127e-03, 0.4565874591840146e-03, 0.5315544763312857e+00, 0.2366582715663035e-29, 0.5951942600827748e-04, 0.7082415940475930e-04, 0.4082285577131941e+00, 0.1744834104604043e-24, 0.4087064324555941e-04, 0.4051735857075661e-04, 0.4022023658419822e+00, 0.5055621275172978e-20, 0.1443306547898801e-05, 0.5013891517830428e-06, 0.2892256147411429e+00, 0.1446770900175336e-26, 0.7439355829951562e-04, 0.1741194257531676e-03, 0.1412172326671640e+00, 0.3448013628188340e-14, 0.2774107535050150e-02, 0.3338675080262731e-02, 0.7919841586115514e-01, 0.2413026984508734e-18, 0.7157138061395222e-03, 0.6803125903293225e-03, 0.2055312514153672e-02, 0.3994185778863994e-23, 0.5722078415717462e-03, 0.5910838594703896e-03, 0.1458583715266580e-03, 0.1183291357831518e-27, 0.8602246198024520e-03, 0.1402897408133881e-02}, + info: 0, + }, + { + z: []float64{0.6756942057876089e+00, 0.6579919202857232e+00, 0.8168130848194743e+00, 0.2483963371195050e+00, 0.2866280219451008e+00, 0.7172808579862799e-03, 0.2411898945446896e+00, 0.5534360794587689e-02, 0.3878248310175912e+00, 0.4256983490856848e+00, 0.7118512164413080e+00, 0.8762976594477806e+00, 0.6850772131038755e+00, 0.4603991232581716e+00, 0.6546622425118448e+00, 0.3103854094723100e+00, 0.7913005955185648e+00, 0.4864788053022765e+00, 0.6214717223070496e+00, 0.7079028836241165e+00, 0.2446362485461124e+00, 0.3062353821816216e+00, 0.8766788389923219e-01, 0.4381267847541642e+00, 0.4459912790878073e-01, 0.9432264671740954e+00, 0.3830574008366420e+00, 0.3381025391982955e+00, 0.6585260539196298e+00, 0.1004337858849739e+00, 0.9320890528134474e+00, 0.3322930037041504e+00, 0.7282334590214989e-02, 0.8367863647557111e+00, 0.9736104021592913e+00, 0.1613168640897539e+00, 0.6409545774446285e+00, 0.8441218931570360e-01, 0.9793937208019250e+00, 0.8303681522642254e+00, 0.1022316826967604e+00, 0.3021608972738009e+00, 0.1238379032543057e+00, 0.3839961366681232e+00, 0.8871553342676223e+00, 0.2909019998361990e+00, 0.7955411369197152e-01, 0.6476593477808288e+00, 0.1739219408315845e+00, 0.6498034164943688e+00, 0.3405210004266120e+00, 0.5459816771585898e+00, 0.2574285667818188e-01, 0.8691155035114945e+00, 0.4320450067500660e+00, 0.6967311316847606e+00, 0.9186339034853966e+00, 0.9491091410944081e+00, 0.2046793386144015e+00, 0.4114169869296881e+00, 0.9170199522695309e+00, 0.1514288291023066e+00, 0.2203325773257230e+00, 0.7891126546748299e+00, 0.4738113210301682e+00, 0.6729225031084131e+00, 0.2382373075875677e+00, 0.4180455054749687e+00, 0.9185761229203592e+00, 0.8030412579733187e+00, 0.4789612026295414e+00, 0.5038982716350845e+00, 0.9959829239394621e+00, 0.3087119711461705e-01, 0.6615773194242809e+00, 0.4461280013353816e+00, 0.1982838436639324e+00, 0.7583555141388076e+00, 0.7946309632008934e+00, 0.6749216068143323e+00, 0.9948312782688508e+00, 0.2854132428873886e+00, 0.8405396060835862e+00, 0.3412432527462372e-01}, + n: 21, + zOut: []float64{0.2393193272196224e+01, 0.2018136210599260e+01, 0.1995725670346335e+01, 0.1962470040389077e+01, 0.1880885332732760e+01, 0.1640866198530974e+01, 0.1381237309251015e+01, 0.1349378962361067e+01, 0.9454836406584929e+00, 0.7636867441308103e+00, 0.6820433637129892e+00, 0.6091604279437030e+00, 0.5856982990037397e+00, 0.4957297472312800e+00, 0.2553870576469542e+00, 0.2360257010865909e+00, 0.1371029529316844e+00, 0.8102847375475340e-01, 0.5521596879227348e-01, 0.9135542089709688e-02, 0.7426079491367038e-04, 0.1040308236944517e-14, 0.7301351780522185e-10, 0.7774740781328628e-10, 0.1381237309251015e+01, 0.2169367489357782e-29, 0.6020076428884570e-10, 0.5781767346304832e-10, 0.1349378962361067e+01, 0.1885108385497271e-16, 0.6878646083705261e-10, 0.6530919047819172e-10, 0.9454836406584929e+00, 0.5778430642028005e-17, 0.9207835097245334e-10, 0.7637719283517905e-10, 0.7636867441308103e+00, 0.1387778780781446e-16, 0.1259777267883337e-09, 0.1656656563258317e-09, 0.6820433637129892e+00, 0.0000000000000000e+00, 0.1947766517618461e+02, 0.1947766517618461e+02, 0.1070000000000000e+03, 0.3197278911564626e+01, 0.3738317757009346e+01, 0.5169611983503601e-09, 0.5856982990037397e+00, 0.2460562659624748e-15, 0.4628473083370263e-08, 0.9345989594028561e-08, 0.4957297472312800e+00, 0.2615714135774493e-18, 0.2264320058754590e-06, 0.2929148984854635e-06, 0.2553870576469542e+00, 0.1029104326196136e-18, 0.2640422579955958e-07, 0.2108896012270181e-07, 0.2360257010865909e+00, 0.3005450327091600e-17, 0.6822175517251369e-08, 0.8519370047734521e-08, 0.1371029529316844e+00, 0.3395257336071235e-24, 0.2687435952714477e-08, 0.2177398712709520e-08, 0.8102847375475340e-01, 0.0000000000000000e+00, 0.5343374415472690e-09, 0.5384901342094013e-09, 0.5521596879227348e-01, 0.2914620163738177e-24, 0.6457803167637477e-08, 0.2258738450739571e-07, 0.9135542089709688e-02, 0.5326388832052272e-25, 0.9576323906120589e-06, 0.3243967823107194e-05, 0.7426079491367038e-04, 0.3164925372525892e-18, 0.6752170208452239e-05, 0.3302115586194402e-04}, + info: 0, + }, + { + z: []float64{0.1859840927709887e+00, 0.1326482709507555e+00, 0.1830793816152183e+00, 0.4955347339015181e+00, 0.7174832323677909e+00, 0.1239131234434754e-01, 0.8960455455727024e+00, 0.1883195594308053e+00, 0.3701760109540279e+00, 0.5189832590024491e+00, 0.8746518205444311e+00, 0.5127960794741733e+00, 0.2715029219143491e+00, 0.3130984190329523e+00, 0.6427011945330067e+00, 0.2728531634457195e+00, 0.4849915812764480e+00, 0.4474855195890282e-01, 0.1232624016522375e+00, 0.4054507321320544e+00, 0.8168182420396084e+00, 0.5779650563825861e+00, 0.3336575737681243e+00, 0.4793345429794458e+00, 0.3323762775436829e+00, 0.4013874798394268e-01, 0.3250840079396182e+00, 0.3339528328323188e+00, 0.6534745789256358e+00, 0.1592419685588734e+00, 0.4882641395765897e-01, 0.3378473153079410e+00, 0.1498873352536064e+00, 0.9366519936638189e+00, 0.8628314489246045e+00, 0.9035188141748132e+00, 0.3376603415628452e+00, 0.8754867721780122e+00, 0.4314313357227496e+00, 0.6689080802643724e+00, 0.7427192946427073e+00, 0.8408966332922378e+00, 0.3923135283693152e+00, 0.9856346160535667e+00, 0.5253798946684216e+00, 0.4589666030610524e+00, 0.3627813920321372e+00, 0.6650592661455799e+00, 0.5488555419923284e+00, 0.8723519563008553e+00, 0.5759829964509324e+00, 0.8314551283704829e+00, 0.8033910216639006e-01, 0.5533270198466882e+00, 0.4579854660280547e-01, 0.8805162884999327e+00, 0.7798601625088321e+00, 0.3998529604552336e+00, 0.2290634020126769e-01, 0.4087780821380217e+00, 0.8084384129630373e-01, 0.1651045317338867e+00, 0.5138876592921692e+00, 0.1502237554834691e+00, 0.3139451210283801e+00, 0.4951417832103121e+00, 0.4305502836911147e+00, 0.4221379680427187e+00, 0.6629276271381638e+00, 0.6032939209531929e+00, 0.7486429236694788e+00, 0.4603187644190643e+00, 0.9910999698454684e+00, 0.9830579802970648e+00, 0.1327788029232968e+00, 0.7906300391462903e+00, 0.4870686063503010e+00, 0.9107501765395865e-01, 0.5847999740468798e+00, 0.5237055378474927e+00, 0.1949033413503392e+00, 0.6706570830152104e+00, 0.4839742423553839e+00, 0.6933746141255293e+00}, + n: 21, + zOut: []float64{0.2594702985921864e+01, 0.1963381202215380e+01, 0.1899121104965353e+01, 0.1852857657580134e+01, 0.1352179381057759e+01, 0.1316420368068040e+01, 0.1223907530772361e+01, 0.1104722051320913e+01, 0.1042814535040686e+01, 0.9392359492223399e+00, 0.5879944547218380e+00, 0.5343064059199153e+00, 0.3616892068472140e+00, 0.3474187565935542e+00, 0.3132932870269020e+00, 0.2394766491234008e+00, 0.1924620369224269e+00, 0.5796610832720670e-01, 0.4900919199482741e-01, 0.2087594658763894e-01, 0.6804292520862544e-03, 0.2186403793390500e-19, 0.2668268315891067e-16, 0.1216320938093713e-16, 0.1223907530772361e+01, 0.3077822220048293e-18, 0.9690290545884895e-18, 0.1677534925257981e-18, 0.1104722051320913e+01, 0.3294338264035954e-18, 0.1270509199570000e-13, 0.4678488073918506e-13, 0.1042814535040686e+01, 0.2740439675865377e-23, 0.1047809983111396e-11, 0.5781643460797313e-12, 0.9392359492223399e+00, 0.3379215452805658e-16, 0.8350045830190690e-10, 0.5730922211948388e-10, 0.5879944547218380e+00, 0.6938893903907228e-17, 0.1799451523948185e+02, 0.1799451523948184e+02, 0.1150000000000000e+03, 0.3578231292517007e+01, 0.6086956521739131e+01, 0.8630088713732289e-10, 0.3616892068472140e+00, 0.1664229231319872e-20, 0.9644621511794884e-10, 0.9354199679969641e-10, 0.3474187565935542e+00, 0.0000000000000000e+00, 0.1015403419286529e-09, 0.1046840761330470e-09, 0.3132932870269020e+00, 0.6366107505133565e-27, 0.3071873418074744e-11, 0.5342885575500614e-12, 0.2394766491234008e+00, 0.2599072586631376e-18, 0.1837414559126179e-09, 0.2303923049314304e-09, 0.1924620369224269e+00, 0.3400814535759169e-17, 0.9192916981159684e-08, 0.4803632388224207e-08, 0.5796610832720670e-01, 0.0000000000000000e+00, 0.1565526746702558e-05, 0.8408463868497472e-06, 0.4900919199482741e-01, 0.7888609052210118e-30, 0.1903794116022181e-04, 0.1014775117962413e-04, 0.2087594658763894e-01, 0.9734740470109229e-22, 0.4676468030348868e-06, 0.4248047233272650e-05, 0.6804292520862544e-03, 0.3161038140634776e-18, 0.1818811163938070e-02, 0.3653457173655013e-02}, + info: 0, + }, + { + z: []float64{0.2029039404458555e-01, 0.4529725864625287e+00, 0.6412066909421453e+00, 0.6139288440336396e+00, 0.3661028502807454e+00, 0.9178254749803582e+00, 0.4307551173824371e+00, 0.4985808870906128e+00, 0.8171615510934167e+00, 0.8279058065788196e+00, 0.3035261047197422e+00, 0.7877147223127390e+00, 0.7688130520483442e+00, 0.4834409462671818e-01, 0.3121145202501183e+00, 0.5269232357652766e+00, 0.7600633371694686e+00, 0.5251092877920901e+00, 0.8263235554592012e+00, 0.6894073332954654e+00, 0.3521429062439027e+00, 0.6447189811681343e+00, 0.1105592548668879e+00, 0.1049454508922576e+00, 0.7381247076050562e+00, 0.7320584094686099e+00, 0.7003114013559573e+00, 0.9638534557041758e+00, 0.5727311775052889e+00, 0.3930768970870516e+00, 0.7640649329763241e+00, 0.3221407324893386e+00, 0.9725443946255905e+00, 0.6922592425787558e+00, 0.3449642467056757e-01, 0.1384627086161467e+00, 0.4398391723165511e+00, 0.8406456486574913e+00, 0.4126144976348992e+00, 0.5351779876797247e-01, 0.6669497026260232e+00, 0.5746288223886410e+00, 0.4589594741804619e+00, 0.7484833637036891e+00, 0.3274476209575072e-01, 0.7067960763848782e+00, 0.8759136475280368e+00, 0.3284828537280239e+00, 0.2536109230678856e+00, 0.8799264273691800e+00, 0.8159607815416694e+00, 0.5729670098854700e-01, 0.8884200881604851e+00, 0.5249116128877267e+00, 0.6498061599007876e-01, 0.6075292761444269e+00, 0.2428201413403417e+00, 0.2668460043818345e+00, 0.1429723702275580e+00, 0.6942338078813951e+00, 0.9271225003121289e+00, 0.8931957344289477e+00, 0.6420336733834545e+00, 0.3786225606645894e+00, 0.9421368123010410e-01, 0.9476439119845459e+00, 0.6497389318978118e+00, 0.3841806572586668e+00, 0.1329188317035728e+00, 0.6545507077812880e+00, 0.4430247695845350e+00, 0.1753161904868739e+00, 0.2413409779703910e+00, 0.1476409499735053e+00, 0.1179852848482287e+00, 0.3704631210033814e+00, 0.1967445635592234e+00, 0.2100970549835486e+00, 0.5744473538919733e+00, 0.1093206314301038e+00, 0.5132572818416121e+00, 0.4964557538937117e+00, 0.7518888363275744e-02, 0.9720615373003737e+00}, + n: 21, + zOut: []float64{0.2563191374817756e+01, 0.2359364796169898e+01, 0.2296884847589588e+01, 0.2056943321577226e+01, 0.1949112826206492e+01, 0.1629835138023691e+01, 0.1562766390641567e+01, 0.1422208590143927e+01, 0.1368481726990307e+01, 0.1150114414099739e+01, 0.8482773777570239e+00, 0.7665061168435710e+00, 0.7135198036652551e+00, 0.3486547062556205e+00, 0.2456691453437445e+00, 0.2203009969871996e+00, 0.1404417091680581e+00, 0.1160909181211330e+00, 0.2112854362928769e-01, 0.5069559443178203e-02, 0.5650407111696261e-03, 0.3717112585401408e-26, 0.2316306425767847e-08, 0.1632477182259275e-08, 0.1562766390641567e+01, 0.6446192664231602e-24, 0.9634960729667980e-08, 0.6524712961691138e-08, 0.1422208590143927e+01, 0.1932709217791479e-28, 0.4932058767625490e-07, 0.7860293908453283e-07, 0.1368481726990307e+01, 0.1238511621196989e-27, 0.2778987342481399e-06, 0.5558918854266694e-06, 0.1150114414099739e+01, 0.2156683454185074e-19, 0.1662595987219070e-04, 0.2580299679298412e-04, 0.8482773777570239e+00, 0.7914569980025776e-17, 0.2178512734418544e+02, 0.2178512734418543e+02, 0.1000000000000000e+03, 0.2934240362811791e+01, 0.1000000000000000e+01, 0.1715129124192563e-05, 0.7135198036652551e+00, 0.1613909508212798e-19, 0.2836076293001813e-04, 0.1595224464977716e-04, 0.3486547062556205e+00, 0.3352658847189300e-29, 0.8812160938383347e-04, 0.1178320764067675e-03, 0.2456691453437445e+00, 0.0000000000000000e+00, 0.5251568647637462e-04, 0.4480459044047029e-04, 0.2203009969871996e+00, 0.6471183777709004e-25, 0.1114279427921990e-09, 0.2270576453660371e-10, 0.1404417091680581e+00, 0.2855676476900063e-27, 0.2886261726979772e-05, 0.5470704056451658e-06, 0.1160909181211330e+00, 0.3526208246337923e-27, 0.3588687949078309e-02, 0.5383294574404876e-02, 0.2112854362928769e-01, 0.2896602354076833e-17, 0.1732996915625309e-02, 0.1499310228751921e-02, 0.5069559443178203e-02, 0.1003594455767978e-17, 0.5081816639203317e-03, 0.1080637708244138e-02, 0.5650407111696261e-03, 0.0000000000000000e+00, 0.5066428376081889e-02, 0.3247326449492487e-02}, + info: 0, + }, + { + z: []float64{0.7177806115181969e+00, 0.9619042524298227e+00, 0.9756290650460009e-01, 0.2695657197860104e+00, 0.5092382615580680e+00, 0.2216490915715645e+00, 0.7713962041593733e+00, 0.9019626308054164e+00, 0.1009043823109016e+00, 0.7164892891763703e+00, 0.6972336574704985e+00, 0.5604456603447616e-01, 0.4658944020177349e+00, 0.4677429402719713e+00, 0.9395972368396054e+00, 0.6567060928508761e-01, 0.6711470087762763e+00, 0.3572615865847095e-01, 0.7841795004355456e-01, 0.2100489732227784e+00, 0.1528790147771258e+00, 0.2944728557463859e+00, 0.6929796239262105e+00, 0.5593689694083593e+00, 0.5600220347177359e+00, 0.6885800745012647e+00, 0.1441376161827218e+00, 0.2468678391528564e+00, 0.2219056216366488e+00, 0.7828887069679423e+00, 0.1781536622262658e-01, 0.9712383516530864e+00, 0.1031071887927941e+00, 0.5428204596104211e+00, 0.9920375605622551e+00, 0.2231614148449633e+00, 0.1774197365731667e+00, 0.1667803739895276e+00, 0.2293956181863850e+00, 0.8453959149417756e+00, 0.2211895679989639e+00, 0.5610288802043042e+00, 0.3749535737701304e-01, 0.7418144847444434e+00, 0.2593245755300763e+00, 0.5358881543370908e-01, 0.9118629582226685e+00, 0.6483736004795430e+00, 0.7595140552166778e+00, 0.2981894989055883e+00, 0.6091779707233183e+00, 0.7798268953767704e+00, 0.1682114110436058e+00, 0.3801835867597201e+00, 0.6380508025759659e+00, 0.7892946000460455e-01, 0.7607559424299619e+00, 0.3271484239841950e+00, 0.7700240256278714e+00, 0.5894107219393652e+00, 0.6207117138108632e+00, 0.4725019923733027e+00, 0.3616574024313524e+00, 0.6744122205664662e+00, 0.5489632027065386e+00, 0.1532930834122788e+00, 0.1692473010839316e+00, 0.7515070367293223e+00, 0.1186783750638719e+00, 0.1028479850139022e+00, 0.3468642488228025e+00, 0.5278752643808988e+00, 0.3849250707234438e+00, 0.7889573536656195e+00, 0.4738712276132427e+00, 0.7058571493185843e+00, 0.8948240863202030e+00, 0.3494029097065446e+00, 0.4916843795342892e+00, 0.1993321046695898e+00, 0.3650043431665774e+00, 0.2403686388300026e+00, 0.5474155466298688e+00, 0.6157434374908201e+00}, + n: 21, + zOut: []float64{0.1955717973339200e+01, 0.1915826728710446e+01, 0.1780143898982495e+01, 0.1751081083154649e+01, 0.1679171531306522e+01, 0.1398798787154148e+01, 0.1289579288306139e+01, 0.1171599632002814e+01, 0.9950381700653156e+00, 0.9008548767119643e+00, 0.7944341357698450e+00, 0.7347990252582277e+00, 0.4053391427425183e+00, 0.3415476013970046e+00, 0.2769292856200630e+00, 0.2086988709391624e+00, 0.7594980741000955e-01, 0.5721582845751132e-01, 0.5152297043277422e-01, 0.6186350115045150e-02, 0.5774958135565486e-05, 0.6722096271457334e-14, 0.6635533119216432e-06, 0.1048982691930868e-05, 0.1289579288306139e+01, 0.0000000000000000e+00, 0.9172850657907766e-08, 0.6527900227304331e-08, 0.1171599632002814e+01, 0.0000000000000000e+00, 0.3386861611239913e-06, 0.1115072165646295e-06, 0.9950381700653156e+00, 0.1044578056257455e-24, 0.5793689136913799e-11, 0.1625904890507595e-10, 0.9008548767119643e+00, 0.8883435102679349e-21, 0.3220671712964998e-09, 0.8453946299986560e-09, 0.7944341357698450e+00, 0.1653452457343241e-26, 0.1779044076283400e+02, 0.1779044076283399e+02, 0.1120000000000000e+03, 0.3267573696145125e+01, 0.3571428571428572e+01, 0.1211740269624600e-06, 0.4053391427425183e+00, 0.5298753945086544e-14, 0.1515410194583574e-06, 0.2069357347527298e-06, 0.3415476013970046e+00, 0.0000000000000000e+00, 0.2579333883676927e-05, 0.8435987645777294e-05, 0.2769292856200630e+00, 0.2465190328815662e-30, 0.1658852164453815e-08, 0.3207072615633299e-09, 0.2086988709391624e+00, 0.2017246551328580e-19, 0.6091457019925236e-05, 0.6297972233022177e-05, 0.7594980741000955e-01, 0.2553131970482608e-15, 0.2736366650719638e-04, 0.2348362771705736e-04, 0.5721582845751132e-01, 0.1141311335899276e-23, 0.6030351005218485e-04, 0.3385365250818500e-04, 0.5152297043277422e-01, 0.0000000000000000e+00, 0.4442856906399686e-04, 0.3980460155412817e-04, 0.6186350115045150e-02, 0.2711709361697228e-30, 0.1322896557202734e-03, 0.3209571887096705e-03, 0.5774958135565486e-05, 0.0000000000000000e+00, 0.9248295446812529e-03, 0.1317994001905306e-02}, + info: 0, + }, + { + z: []float64{0.7669589114699304e-01, 0.2959120838012549e+00, 0.8617075527885690e+00, 0.3774472194278293e+00, 0.7430204868341380e+00, 0.3337487204025202e-01, 0.2115072947611989e+00, 0.8195300149220009e+00, 0.4610153087687133e+00, 0.1049580467270205e+00, 0.6553761031646719e+00, 0.2044425657813468e+00, 0.1419536829746254e+00, 0.8338399989758465e+00, 0.9917105482281005e+00, 0.3698082853107363e+00, 0.5656210986926330e+00, 0.5849265049217818e+00, 0.9018145207330325e+00, 0.7089264704350354e+00, 0.2562038341655152e+00, 0.1524378342317656e-01, 0.9206210351037002e+00, 0.1877259832276945e+00, 0.3637451600309541e+00, 0.9979742594017312e+00, 0.8919698496914060e+00, 0.3716780472994426e+00, 0.3142548907151147e+00, 0.4170439734847903e+00, 0.3197415298896636e+00, 0.7285365914169133e+00, 0.5784968714882697e+00, 0.6831919512327895e+00, 0.6057174197537164e+00, 0.6413693987683566e-01, 0.8827799219498907e+00, 0.4192891003797022e+00, 0.6040167945472836e+00, 0.6469271762278970e+00, 0.7359461974470041e+00, 0.5539488596393002e+00, 0.4023966166720030e+00, 0.9915288950117843e+00, 0.3043860170301459e+00, 0.4917889743094436e+00, 0.1179472550216760e+00, 0.9125202427370891e+00, 0.8786441005384636e+00, 0.4634730997209351e+00, 0.1080276946115265e+00, 0.6187727001119313e+00, 0.6709766557170562e+00, 0.3818949179452977e+00, 0.8906457783485427e+00, 0.8010289089804480e+00, 0.6931910498827129e+00, 0.8914072225833563e+00, 0.3822463769530011e+00, 0.5898102885291842e+00, 0.1092049166271940e+00, 0.8496011938807149e+00, 0.2304394290731624e+00, 0.5949006075515944e+00, 0.3290638194228065e+00, 0.1971699984758039e+00, 0.7443144303888384e+00, 0.4653555333092598e+00, 0.8207632269304853e+00, 0.4152482306441556e+00, 0.1689785233215235e+00, 0.1721084345877374e+00, 0.3931596107353640e+00, 0.5403345228237123e+00, 0.6387555392172577e+00, 0.1804790096977364e+00, 0.5783025205396422e+00, 0.7395837791384520e+00, 0.5701597209798811e+00, 0.1952734055752668e+00, 0.9661114908130567e+00, 0.4893113195434146e+00, 0.6562980654604210e+00, 0.1879394621701417e+00}, + n: 21, + zOut: []float64{0.2390558120678448e+01, 0.2326196956724044e+01, 0.2071676597874671e+01, 0.1995424877356871e+01, 0.1973795189776371e+01, 0.1728273264358535e+01, 0.1492322130169846e+01, 0.1216694895544233e+01, 0.1149617898796123e+01, 0.1007011094632661e+01, 0.9207056739913982e+00, 0.7404938152461292e+00, 0.6082352700464410e+00, 0.5965144149455339e+00, 0.2276577044879657e+00, 0.1776272681811473e+00, 0.1662286875439616e+00, 0.8310804130734020e-01, 0.4604735683437314e-01, 0.1806926893972028e-01, 0.1257133375345320e-01, 0.3600386373101646e-15, 0.6280777483216298e-03, 0.1060573000233681e-02, 0.1492322130169846e+01, 0.1292469707114105e-25, 0.5799349861429450e-06, 0.5584709508928944e-06, 0.1216694895544233e+01, 0.2047280287874868e-18, 0.1299955932331242e-09, 0.8534229075832313e-10, 0.1149617898796123e+01, 0.5088714046955349e-16, 0.6001732451841635e-09, 0.4081989685050919e-09, 0.1007011094632661e+01, 0.5578719308559292e-21, 0.1498995168909837e-07, 0.5625367692005608e-08, 0.9207056739913982e+00, 0.2728742825015995e-21, 0.2094882986118927e+02, 0.2094882986118927e+02, 0.9800000000000000e+02, 0.3004535147392290e+01, 0.4081632653061225e+01, 0.6223967900246754e-04, 0.6082352700464410e+00, 0.4038967834731580e-26, 0.1122996207538654e-05, 0.1754126012223728e-05, 0.5965144149455339e+00, 0.4386207914826929e-19, 0.1334483593164094e-04, 0.6982705884252015e-05, 0.2276577044879657e+00, 0.3148538924591216e-18, 0.9085150621800872e-04, 0.4628114404518947e-04, 0.1776272681811473e+00, 0.1149284743081177e-19, 0.3934684725502468e-05, 0.5044161242313764e-05, 0.1662286875439616e+00, 0.3944304526105059e-30, 0.1149340461723492e-04, 0.8510588625472410e-05, 0.8310804130734020e-01, 0.1540743955509789e-32, 0.1285467894756909e-02, 0.9239857033877289e-03, 0.4604735683437314e-01, 0.2169367489357782e-29, 0.2808730938050547e-03, 0.3455059327639833e-03, 0.1806926893972028e-01, 0.4614836295542919e-28, 0.7683442768713957e-06, 0.1846813906304985e-05, 0.1257133375345320e-01, 0.2538554393001216e-26, 0.4028502005942143e-04, 0.8466712427867894e-04}, + info: 0, + }, + { + z: []float64{0.5568197491282034e+00, 0.7338530212513171e+00, 0.1711270247791036e+00, 0.6696990680288049e+00, 0.1107644593582661e+00, 0.1487844153251054e+00, 0.6221478836712087e+00, 0.3739707210550620e+00, 0.6142936216832375e+00, 0.4504419047617665e+00, 0.1390832371836795e+00, 0.9602056283222130e+00, 0.4128383897877478e+00, 0.6202590221465013e+00, 0.5716294881431405e+00, 0.7009876531280159e+00, 0.8556346552408018e+00, 0.4300043005510307e+00, 0.5625488786064613e+00, 0.5236571943623558e+00, 0.2035297706440273e+00, 0.5324677179555473e+00, 0.9854023908952125e+00, 0.8088580870995794e+00, 0.7902887218423563e+00, 0.4196418643524230e+00, 0.5294914665193529e+00, 0.5947417442754066e-01, 0.2753919335549286e+00, 0.8807083336864044e+00, 0.6016279529290567e+00, 0.4144655693858035e-01, 0.4808953029705748e+00, 0.8506686680287934e-01, 0.2145404015834380e+00, 0.3020133878086849e+00, 0.8967140938263458e+00, 0.5344538485865815e+00, 0.2536120629120923e+00, 0.2552452828610304e+00, 0.9211096671641958e+00, 0.9028760680202415e+00, 0.3005599522329846e+00, 0.1197919551028395e+00, 0.3772579707039786e+00, 0.4157715274081910e+00, 0.2203607291065889e+00, 0.1084188329562713e+00, 0.5463193166123409e+00, 0.8107359194268960e+00, 0.6676190072779817e+00, 0.6711454730587799e+00, 0.6360708014875704e-01, 0.8242026716736568e+00, 0.3687006444230088e+00, 0.2216898043026083e-01, 0.4978612409817640e+00, 0.3144452507777135e+00, 0.3412289603988730e+00, 0.3266626310182044e+00, 0.4581662376370765e+00, 0.6985218227047190e+00, 0.6594479039269319e-01, 0.3938130402504401e+00, 0.9907371819490932e+00, 0.2478499678343852e+00, 0.3565435772734814e+00, 0.8420455744018336e+00, 0.2692247454903065e+00, 0.7327570909626056e+00, 0.1752776205164243e+00, 0.3569190164542581e+00, 0.2122621782757903e+00, 0.2056257129895300e+00, 0.6641092684756426e+00, 0.5386917539530447e+00, 0.8420713652275945e+00, 0.3587226239611645e+00, 0.9561844063661247e+00, 0.9126272291387975e+00, 0.5768542567999925e+00, 0.1972168939311342e+00, 0.5763304496236371e+00, 0.9478337554439876e+00}, + n: 21, + zOut: []float64{0.2593664459139222e+01, 0.2338172956520739e+01, 0.1783072679058981e+01, 0.1768637196043293e+01, 0.1688808722758796e+01, 0.1535694950136988e+01, 0.1488691522387158e+01, 0.1290354539321546e+01, 0.1187570436840380e+01, 0.1077679082742636e+01, 0.7925423484101771e+00, 0.6549022645335129e+00, 0.6315653598883190e+00, 0.5184441284206909e+00, 0.3791491409939438e+00, 0.3105722740860607e+00, 0.1128189367670211e+00, 0.7036440937731858e-01, 0.4999153992819697e-01, 0.2730914559941030e-01, 0.7241089204639656e-03, 0.7344040350490801e-18, 0.1828200693350630e-07, 0.2226187377323797e-07, 0.1488691522387158e+01, 0.4586948101010510e-17, 0.3930142187345605e-07, 0.5206893736347286e-07, 0.1290354539321546e+01, 0.6658190007940994e-20, 0.1499933495859733e-06, 0.7910043275590841e-07, 0.1187570436840380e+01, 0.2370536763713858e-16, 0.1192422432592963e-05, 0.6065280015592916e-06, 0.1077679082742636e+01, 0.6803770690793804e-23, 0.5360141282897325e-04, 0.3254990522417721e-04, 0.7925423484101771e+00, 0.8404284270509473e-23, 0.2030073020187486e+02, 0.2030073020187486e+02, 0.9700000000000000e+02, 0.2975056689342404e+01, 0.1030927835051546e+01, 0.1762395614510388e-04, 0.6315653598883190e+00, 0.2034005362457094e-16, 0.3178492515415974e-08, 0.1141868824523976e-08, 0.5184441284206909e+00, 0.1631704231140345e-20, 0.1154759308961600e-04, 0.3555504775624820e-05, 0.3791491409939438e+00, 0.9251916923707874e-19, 0.1302644751785016e-04, 0.1874642271188771e-04, 0.3105722740860607e+00, 0.1235671721938193e-25, 0.8697586673438179e-05, 0.7595546859022038e-05, 0.1128189367670211e+00, 0.1175733067685735e-18, 0.2174688695641498e-03, 0.1411064498403114e-03, 0.7036440937731858e-01, 0.5916456789157589e-30, 0.4997004849851516e-03, 0.7625846015345374e-03, 0.4999153992819697e-01, 0.3710108611279200e-20, 0.2898431160817185e-04, 0.2329024376647231e-03, 0.2730914559941030e-01, 0.1593079290931123e-17, 0.5731904499848677e-04, 0.7071520517918782e-04, 0.7241089204639656e-03, 0.1043820749788443e-25, 0.2096656950463827e-03, 0.4915700720935459e-03}, + info: 0, + }, + { + z: []float64{0.2129092067945103e+00, 0.6174796583507793e+00, 0.2209275310088966e+00, 0.1770681976597738e+00, 0.8468210663920229e+00, 0.7900600194799612e+00, 0.8319584509547915e+00, 0.8077183726155964e+00, 0.7964773509287093e+00, 0.2479492923952727e+00, 0.5169395370002006e-01, 0.6312063121285433e+00, 0.5688486960732374e+00, 0.3049379579822397e+00, 0.3779887662042721e+00, 0.4551165943302187e+00, 0.8807128836121972e+00, 0.1333615670826408e+00, 0.6901278826487529e+00, 0.8360350720177558e+00, 0.4190492004641316e+00, 0.9067077556412378e+00, 0.9762584494767094e+00, 0.9959763915912888e+00, 0.8503724779590973e+00, 0.6671116075685212e+00, 0.8841807167789617e+00, 0.6917331852931945e+00, 0.3414398718690443e+00, 0.2211364241075178e+00, 0.8057250135329493e+00, 0.4405475768508057e+00, 0.9321382353755537e+00, 0.6976358515081519e+00, 0.3254605598548291e+00, 0.1144237265035558e+00, 0.8547099978727912e-01, 0.3131314226614652e+00, 0.6940029038044814e+00, 0.2327469211945017e+00, 0.3523896044562020e+00, 0.6443116162172926e+00, 0.5382708884344491e+00, 0.9378966459649967e+00, 0.7274262519928346e+00, 0.4882092650189528e+00, 0.4497457043638882e+00, 0.8803511251742836e+00, 0.1077837527475992e+00, 0.4760275900323754e+00, 0.9725944612716640e+00, 0.4013370227296387e+00, 0.8188706727711300e+00, 0.7549128555022213e+00, 0.3045373312490215e+00, 0.9776368127163177e+00, 0.5516094182757485e+00, 0.8449472155130459e+00, 0.9309695118211208e+00, 0.6985346927019656e+00, 0.3270836933831586e+00, 0.2305805980953167e+00, 0.9135598850313159e+00, 0.2886818807914165e+00, 0.6607756604738726e+00, 0.7992086772120486e+00, 0.9191714969823802e+00, 0.9956222713515444e+00, 0.1646632133753003e+00, 0.8114783440114911e+00, 0.9650128806412454e+00, 0.1382218488636167e-01, 0.8464890579713791e+00, 0.2866238720970538e+00, 0.4485743907736028e-01, 0.5384705384174762e+00, 0.2996807710312754e-01, 0.7020716858392958e+00, 0.6280049430389886e+00, 0.2750308929159380e+00, 0.1648004007672321e-01, 0.8720570971091830e+00, 0.9962373992422624e+00, 0.8022759164194093e-01}, + n: 21, + zOut: []float64{0.3179557858974794e+01, 0.2694788878259719e+01, 0.2281326093492730e+01, 0.2196001939032204e+01, 0.1828355236773963e+01, 0.1586247682528960e+01, 0.1454772213395289e+01, 0.1286809233158695e+01, 0.1269332746846940e+01, 0.1099355321362049e+01, 0.9971216943768625e+00, 0.8310914612054856e+00, 0.4047131081238095e+00, 0.3869245862172090e+00, 0.3328473924133467e+00, 0.2908704078018756e+00, 0.1880200745280021e+00, 0.5784266149949692e-01, 0.4139216712761701e-01, 0.1651732519409763e-01, 0.3149646326524349e-02, 0.0000000000000000e+00, 0.5227593374295754e-11, 0.1202138413496143e-10, 0.1454772213395289e+01, 0.1393199790273629e-16, 0.1121878363020271e-09, 0.5353121918082171e-10, 0.1286809233158695e+01, 0.8659547037664508e-24, 0.1645814283637519e-08, 0.8601359367531547e-09, 0.1269332746846940e+01, 0.2823844361652584e-22, 0.5929700276768749e-08, 0.1126902625809321e-07, 0.1099355321362049e+01, 0.2358945595467410e-22, 0.2764360194400850e-05, 0.1553001254800959e-05, 0.9971216943768625e+00, 0.4450651748190101e-21, 0.2242703772863968e+02, 0.2242703772863967e+02, 0.1000000000000000e+03, 0.3015873015873016e+01, 0.3000000000000000e+01, 0.2632919892231646e-04, 0.4047131081238095e+00, 0.5820888909768840e-10, 0.2209233784503674e-04, 0.4136287504408153e-04, 0.3869245862172090e+00, 0.6462348535570008e-26, 0.3088379129783557e-03, 0.1528857353804532e-03, 0.3328473924133467e+00, 0.5549599819233453e-15, 0.8893453420684614e-08, 0.8741044673362129e-07, 0.2908704078018756e+00, 0.2289589891313465e-25, 0.5532514209995071e-04, 0.2183152999074702e-03, 0.1880200745280021e+00, 0.6113672015462841e-28, 0.9851023787951559e-03, 0.2769770083415459e-02, 0.5784266149949692e-01, 0.3571962178840741e-24, 0.3360295855593957e-02, 0.3874576703777444e-02, 0.4139216712761701e-01, 0.1555002616371658e-25, 0.3047908004675713e-02, 0.4283880536865950e-02, 0.1651732519409763e-01, 0.1498533807074276e-21, 0.9218134151523294e-06, 0.6993690185531638e-05, 0.3149646326524349e-02, 0.5476587948406352e-25, 0.9434027266146764e-03, 0.1032678112098284e-02}, + info: 0, + }, + { + z: []float64{0.5325973817915391e+00, 0.8075384328404875e+00, 0.2063920576624930e+00, 0.4895050697482526e+00, 0.3224859392283662e+00, 0.4597919629142447e+00, 0.5480240110168966e-01, 0.5228709929272914e+00, 0.9956836568223112e+00, 0.3978480622803672e+00, 0.9124816118920143e-01, 0.3247027355683270e+00, 0.9224658967837341e+00, 0.9984579111484309e+00, 0.8533112174943273e+00, 0.8907006920892125e+00, 0.3659539504394835e+00, 0.5561036191465061e+00, 0.7570794287369270e+00, 0.7781602931894585e+00, 0.8583979385494303e+00, 0.5883490719908564e+00, 0.2958788793535505e+00, 0.8441503484168636e+00, 0.7171074610327245e+00, 0.6324186559935309e+00, 0.6889002812298057e+00, 0.9679449699589238e-01, 0.8071604187540067e+00, 0.5036464295967858e+00, 0.3075050973746345e+00, 0.4354181299061508e+00, 0.5397482510302705e+00, 0.3266316370831254e+00, 0.7127302469392831e+00, 0.7822329043112050e+00, 0.1787867076882754e+00, 0.5059399114486356e+00, 0.1635357104384569e+00, 0.7923616792404551e+00, 0.8527619409672080e+00, 0.6624868870738707e+00, 0.6395827198572863e+00, 0.5045303635733072e+00, 0.5207053335438128e-01, 0.3872927271413512e+00, 0.8590236531191382e+00, 0.8644737695336893e+00, 0.5447620155822054e+00, 0.3420775682173337e+00, 0.1787889858336624e+00, 0.2962040723342502e+00, 0.1197091589896203e+00, 0.2582729465177200e+00, 0.8564224654785235e+00, 0.1003255861450059e+00, 0.5792751794645187e+00, 0.3553877787422808e+00, 0.3515051629979192e+00, 0.7026347035497048e+00, 0.7851727410421285e+00, 0.9451470000488936e+00, 0.1482591966327134e+00, 0.8035088707163867e+00, 0.8018983228501475e-02, 0.5992223740738118e+00, 0.3369698862429809e+00, 0.9971168486495201e+00, 0.7449267587097297e+00, 0.5929357935363798e+00, 0.8575900212486389e+00, 0.8115297023854162e+00, 0.3380359989630263e+00, 0.8328174310878843e+00, 0.8255713836908419e+00, 0.7399149789721748e+00, 0.1725333812438860e+00, 0.6828663265380140e+00, 0.6234367510076171e+00, 0.5628070366267671e+00, 0.8501068267959022e-01, 0.9315797050119701e+00, 0.1842029704669090e+00, 0.9504814303233714e+00}, + n: 21, + zOut: []float64{0.2923425454171414e+01, 0.2535311958925162e+01, 0.2192080092128743e+01, 0.1925146455902099e+01, 0.1841700496719334e+01, 0.1753280727277585e+01, 0.1582442080550360e+01, 0.1581069533140902e+01, 0.1420113611638973e+01, 0.1070153147168429e+01, 0.9980373856163754e+00, 0.9531244298503998e+00, 0.7077665812605668e+00, 0.5659990522281487e+00, 0.3146390113583965e+00, 0.2516605740660456e+00, 0.1663945631562664e+00, 0.1260010860507715e+00, 0.4255237190875141e-01, 0.7100815132372204e-02, 0.1566331927046113e-03, 0.4122332130840440e-22, 0.4572750287882177e-06, 0.8150029788456509e-06, 0.1582442080550360e+01, 0.6246970091107327e-19, 0.1184058245159526e-05, 0.5957129004529113e-06, 0.1581069533140902e+01, 0.2712893915232508e-21, 0.4684400026881307e-05, 0.9374804625040887e-05, 0.1420113611638973e+01, 0.4814409269321697e-22, 0.2425457953812455e-04, 0.3059673263223507e-04, 0.1070153147168429e+01, 0.4457872866361589e-11, 0.2849702090992769e-05, 0.4079444611357521e-05, 0.9980373856163754e+00, 0.2148022614677702e-19, 0.2295815606144379e+02, 0.2295815606144380e+02, 0.9700000000000000e+02, 0.2981859410430839e+01, 0.2061855670103093e+01, 0.4336961109868880e-06, 0.7077665812605668e+00, 0.3355216099479021e-19, 0.2178215603335118e-03, 0.1596478432727446e-03, 0.5659990522281487e+00, 0.3777948407501058e-19, 0.1732350582817826e-03, 0.1687341431970761e-03, 0.3146390113583965e+00, 0.4215672677501087e-24, 0.8708765796225133e-03, 0.5753505295482449e-03, 0.2516605740660456e+00, 0.8872339250080977e-20, 0.2809442395187102e-02, 0.2330568324677941e-02, 0.1663945631562664e+00, 0.1848892746611746e-31, 0.7424955400801435e-06, 0.1353946060853610e-06, 0.1260010860507715e+00, 0.7099748146989106e-27, 0.5320431162642944e-03, 0.1085271179151602e-03, 0.4255237190875141e-01, 0.2808116620904140e-21, 0.1709028753951538e-02, 0.8528114742759547e-03, 0.7100815132372204e-02, 0.4930380657631324e-31, 0.2904056487242609e-02, 0.3026916823887611e-02, 0.1566331927046113e-03, 0.0000000000000000e+00, 0.2549645590833870e-02, 0.2592045976400073e-02}, + info: 0, + }, + { + z: []float64{0.6141085085904113e+00, 0.3421028526671319e+00, 0.2523050636037050e-01, 0.6894939888024587e+00, 0.1106696902059259e+00, 0.3232796216228777e+00, 0.5026462674631121e+00, 0.4061906185674803e+00, 0.2489870821257593e+00, 0.5655528330838598e+00, 0.8215445521378404e+00, 0.6847122058387792e+00, 0.1058838690465073e+00, 0.3150742731806708e+00, 0.6299058795873502e-01, 0.2792122534089967e+00, 0.6156682980944264e+00, 0.6784924553414444e+00, 0.5548029179057333e+00, 0.9875334352131669e+00, 0.7114388615166174e+00, 0.4680838251435693e+00, 0.9939378516921551e+00, 0.9317334719497590e+00, 0.7166727913665860e+00, 0.9133735504519750e+00, 0.6652039093977162e+00, 0.8687642647047388e+00, 0.5616945762819415e+00, 0.3985759546461669e+00, 0.7431799484571856e+00, 0.7126935860242257e+00, 0.1006984098734091e+00, 0.3341210154632034e+00, 0.8752290131259591e+00, 0.9044047683690323e+00, 0.2748574881470656e+00, 0.9153414825361147e+00, 0.1879700367171477e-01, 0.4139829057070108e+00, 0.6163962814716032e+00, 0.4758310984958897e+00, 0.7283265374189019e-01, 0.4293121596195046e+00, 0.1627494487778676e+00, 0.6477819718948226e+00, 0.1769525517070647e-01, 0.8315284238294101e+00, 0.3951374239126071e-01, 0.8607842403091001e+00, 0.4542147501441236e+00, 0.7533183963616871e+00, 0.3860688952521878e+00, 0.7194066659377851e+00, 0.5625319388225433e+00, 0.2666855367146020e+00, 0.1952117588514313e+00, 0.4351982558470283e+00, 0.5735131371596182e+00, 0.6135067797741378e-01, 0.6627925797553079e+00, 0.5958408703801275e+00, 0.1272760770553625e+00, 0.9764806486481753e+00, 0.3738230266231765e+00, 0.8498786384730982e+00, 0.9509621583287799e+00, 0.5442964253426497e+00, 0.1274761736897801e+00, 0.8749459887021185e+00, 0.5319902581507450e+00, 0.4050330112540623e-01, 0.4679445276719574e-01, 0.6398654173496683e+00, 0.1619398995392787e+00, 0.2834692561637309e-01, 0.7369067773846130e+00, 0.2655208002136908e+00, 0.1902643003388997e+00, 0.7054378485643029e+00, 0.8869999325574263e+00, 0.1700725746174213e+00, 0.7638558771240449e+00, 0.3965145662986846e+00}, + n: 21, + zOut: []float64{0.3026266675500349e+01, 0.2485098580281992e+01, 0.2208517136068298e+01, 0.2089817225148914e+01, 0.1987605159106218e+01, 0.1671782272803828e+01, 0.1288709839574181e+01, 0.1226108028801468e+01, 0.1094715872236073e+01, 0.9893293804315401e+00, 0.9470004635866569e+00, 0.8354737908875075e+00, 0.7678170961167213e+00, 0.5139691139127377e+00, 0.3402258729602133e+00, 0.2640682732152568e+00, 0.1814642316568930e+00, 0.9210283120697343e-01, 0.5017295169510767e-01, 0.1311272184039149e-01, 0.2605821149103687e-06, 0.8821141249794507e-26, 0.1703621403540070e-06, 0.1022946688596755e-06, 0.1288709839574181e+01, 0.0000000000000000e+00, 0.7771811135093639e-06, 0.1129025861285597e-05, 0.1226108028801468e+01, 0.8850832554089860e-22, 0.6461044606060110e-07, 0.3214361230688769e-07, 0.1094715872236073e+01, 0.5589172803550431e-19, 0.3128365184192157e-08, 0.1088448901615084e-07, 0.9893293804315401e+00, 0.4249640396990678e-22, 0.2862531707476509e-05, 0.1047704834723032e-05, 0.9470004635866569e+00, 0.2336393132634786e-16, 0.2207335777761344e+02, 0.2207335777761343e+02, 0.1020000000000000e+03, 0.3043083900226757e+01, 0.9803921568627451e+00, 0.1031792375203169e-03, 0.7678170961167213e+00, 0.8633839815190009e-18, 0.1747705279299585e-04, 0.1213546251050483e-04, 0.5139691139127377e+00, 0.2016356967855187e-17, 0.7704963297163309e-04, 0.2063636533685701e-03, 0.3402258729602133e+00, 0.5916456789157589e-30, 0.2417908304509407e-03, 0.2207145059751390e-03, 0.2640682732152568e+00, 0.1479114197289397e-30, 0.1687694376453652e-02, 0.8888286778685235e-03, 0.1814642316568930e+00, 0.1064934658613210e-15, 0.3591290150683059e-02, 0.7604711042910346e-02, 0.9210283120697343e-01, 0.2042102137240287e-23, 0.1463034917919357e-02, 0.1882622569903463e-02, 0.5017295169510767e-01, 0.2551739098626718e-22, 0.3961160948815250e-03, 0.9438210467320612e-03, 0.1311272184039149e-01, 0.1972152263052530e-29, 0.6693623055505279e-03, 0.9046122259092353e-03, 0.2605821149103687e-06, 0.3100321587303656e-14, 0.3978735902689679e-02, 0.4253573989104679e-02}, + info: 0, + }, + { + z: []float64{0.8778106868894964e+00, 0.4777882175260769e+00, 0.1820603962716897e+00, 0.7891460918977841e+00, 0.7131176407472852e+00, 0.2166796106605611e+00, 0.7093860568912320e+00, 0.6747559124413653e+00, 0.1755802732664898e+00, 0.2538529139601842e-01, 0.3839736304139417e+00, 0.4972278527690542e+00, 0.2219918935874782e+00, 0.6587468361459490e+00, 0.6959875726535614e+00, 0.8785259347357933e+00, 0.4385001119617188e+00, 0.2992124106963234e-01, 0.9853525010355352e+00, 0.1842422327604198e-01, 0.7939453301916002e+00, 0.8502867854292200e+00, 0.6683895767567380e+00, 0.8502503508041696e+00, 0.2705991244761674e-02, 0.2273590703395093e+00, 0.1996606140173212e+00, 0.5828845765712893e+00, 0.7928614954547730e+00, 0.1675870051229429e+00, 0.3078809727828886e+00, 0.6073537805183471e-01, 0.3498986727103111e+00, 0.4266607326484094e+00, 0.7790595486765419e+00, 0.6274781693151706e+00, 0.3468808495278470e+00, 0.4002308549977796e+00, 0.8927740819756569e+00, 0.5380209790008849e+00, 0.9944530064668308e+00, 0.8382845348910617e+00, 0.6578833875204938e+00, 0.6784609218227333e-01, 0.9880398702345439e-01, 0.1591591396324092e+00, 0.3625625292925116e-01, 0.7834491156107602e+00, 0.8935128283958205e-01, 0.6534249936105881e+00, 0.9608003610697393e+00, 0.9122649849010867e+00, 0.3064782192618438e+00, 0.6165812021330105e+00, 0.3942978367050161e+00, 0.2389206118855702e+00, 0.4357310309586615e+00, 0.6366539061757281e+00, 0.2487697657706114e+00, 0.3158114775243555e+00, 0.4359459725257834e+00, 0.8521387333399649e+00, 0.7376171834812397e+00, 0.7198918826028659e+00, 0.3787355957929660e+00, 0.5132345781976363e+00, 0.5880413889667653e+00, 0.8394688090887562e+00, 0.3673771632022454e+00, 0.1291928261630060e+00, 0.6552015571219838e+00, 0.7918941075374089e+00, 0.3759434916876330e+00, 0.2131514444401432e+00, 0.5997464643577372e+00, 0.8626895671971565e+00, 0.1962050188754781e+00, 0.6291330853690040e+00, 0.6873926281549517e+00, 0.8949666870308315e-01, 0.1384684788210775e+00, 0.8959871983479050e+00, 0.5967626810459625e+00, 0.6838507108780668e+00}, + n: 21, + zOut: []float64{0.2344484145674817e+01, 0.2205594465347172e+01, 0.2180337141043826e+01, 0.1936893457336740e+01, 0.1819297464222486e+01, 0.1595314798543036e+01, 0.1497965290687764e+01, 0.1192635586559562e+01, 0.1109297670462514e+01, 0.1021894664697417e+01, 0.8474592078637809e+00, 0.7996737801504824e+00, 0.5357931354846659e+00, 0.5077230661246940e+00, 0.4005808269588222e+00, 0.2350964781455809e+00, 0.1732854339186359e+00, 0.5510322151786835e-01, 0.3865442753785928e-01, 0.1308309165665133e-01, 0.1986637888070920e-03, 0.1267902442921281e-19, 0.1632713022477316e-10, 0.9197304681744396e-11, 0.1497965290687764e+01, 0.6540946849634405e-22, 0.1644600422568816e-11, 0.2569557641484267e-11, 0.1192635586559562e+01, 0.1886570321166912e-23, 0.1155544514329289e-09, 0.3178796095468434e-10, 0.1109297670462514e+01, 0.2958228394578794e-30, 0.1814581278982536e-06, 0.5763166468812303e-07, 0.1021894664697417e+01, 0.1648133134119960e-16, 0.5817023408243420e-07, 0.2381874475610022e-07, 0.8474592078637809e+00, 0.5828670879282072e-15, 0.2051036601772319e+02, 0.2051036601772318e+02, 0.1250000000000000e+03, 0.3639455782312925e+01, 0.8800000000000001e+01, 0.1964091724818299e-07, 0.5357931354846659e+00, 0.3518203713570956e-14, 0.1200151291154190e-07, 0.2251426423705210e-07, 0.5077230661246940e+00, 0.6882401190382613e-24, 0.1803872146382089e-06, 0.4763453660743515e-06, 0.4005808269588222e+00, 0.1559915108945621e-18, 0.4978941410546894e-05, 0.5275156660739611e-05, 0.2350964781455809e+00, 0.1004043408045437e-17, 0.5647304839276210e-04, 0.2873291365503010e-04, 0.1732854339186359e+00, 0.2485883728081419e-23, 0.9907264920144563e-04, 0.1293007080503375e-03, 0.5510322151786835e-01, 0.0000000000000000e+00, 0.9623294785827948e-04, 0.7898847596644510e-04, 0.3865442753785928e-01, 0.0000000000000000e+00, 0.2279226510196514e-03, 0.3149968919986460e-03, 0.1308309165665133e-01, 0.4893402802699089e-27, 0.4145854568122489e-03, 0.3196372933697030e-03, 0.1986637888070920e-03, 0.1930680189793120e-17, 0.2183385452652873e-03, 0.3171631588309268e-03}, + info: 0, + }, + { + z: []float64{0.3053504558052776e+00, 0.2234484687205406e+00, 0.6531459952011753e+00, 0.3122060875846019e+00, 0.9991431676363117e+00, 0.2727966396486101e+00, 0.9325466197866266e+00, 0.9368849148298319e+00, 0.8561644447997885e+00, 0.1054815122266185e+00, 0.4923718332155202e-01, 0.9268095624277023e+00, 0.1888864280722891e+00, 0.7655148720572605e+00, 0.1232767826602820e+00, 0.5903563091717590e+00, 0.9248190356569348e+00, 0.5395288290327440e+00, 0.9028402566412551e+00, 0.9500520891471187e-01, 0.8953008781613688e+00, 0.2933361506740548e+00, 0.8453264440546789e+00, 0.5481580781657376e+00, 0.3587646761786403e+00, 0.5176903549368316e+00, 0.7562422108585272e+00, 0.9675296263022207e+00, 0.8986594456301542e+00, 0.5421338625853966e-01, 0.1201688601088308e+00, 0.6190380352364769e+00, 0.5038255434020116e+00, 0.6721007162690862e+00, 0.5702376374225947e+00, 0.9284095682432886e+00, 0.3695163217482177e+00, 0.6039359484498441e-01, 0.4652604684312921e+00, 0.8651698808632446e-01, 0.7546245127264203e+00, 0.7397909213681499e+00, 0.1212064081570199e+00, 0.3507242515197573e+00, 0.1903823153835104e+00, 0.7217789084869874e-01, 0.4631739813227773e+00, 0.4692891642215747e+00, 0.4583968848602870e+00, 0.1862358222844885e+00, 0.2939086301666586e+00, 0.5739509914073745e+00, 0.2602639918141684e+00, 0.7265362515535626e+00, 0.2180031380994948e+00, 0.1132710387386646e+00, 0.7997129355612416e+00, 0.7503609043894605e+00, 0.7921549516519859e+00, 0.2820213454373731e+00, 0.3757885220124352e+00, 0.7700990985029622e+00, 0.7233316318551536e+00, 0.1305854233773539e+00, 0.7245124401476205e+00, 0.5999815977622437e+00, 0.3886468370487757e+00, 0.5608149286383455e+00, 0.4298380566755162e+00, 0.6436590616913128e+00, 0.3743035898091585e-01, 0.4516061963302198e-01, 0.3504820364503235e+00, 0.4814816352089226e+00, 0.5678245050510763e+00, 0.5711961955150159e+00, 0.9549329198325482e+00, 0.8527165734568565e-01, 0.4185783350739758e-01, 0.5702414021975877e-01, 0.2302271730247050e+00, 0.7442529870754924e+00, 0.5366636699040384e+00, 0.8085683101567075e+00}, + n: 21, + zOut: []float64{0.2590499366480712e+01, 0.2541239603587953e+01, 0.2259909426243320e+01, 0.2181782566945434e+01, 0.1886341857097904e+01, 0.1651483296972034e+01, 0.1427705031981996e+01, 0.1248256205386413e+01, 0.1112814709657656e+01, 0.9343995210628773e+00, 0.8379224813887226e+00, 0.8181328249547786e+00, 0.6165308413237942e+00, 0.4939038343854810e+00, 0.4603621358717260e+00, 0.3639045182225083e+00, 0.3137819362841643e+00, 0.1577326270698492e+00, 0.8198100142281199e-01, 0.9578206270736807e-02, 0.4942793252781514e-03, 0.1292469707114106e-22, 0.1980335658106781e-04, 0.1681298419390782e-04, 0.1427705031981996e+01, 0.2584939414228211e-25, 0.8200031816835676e-05, 0.9102296987148495e-05, 0.1248256205386413e+01, 0.4930380657631324e-30, 0.4104488055426074e-09, 0.2576051753309951e-09, 0.1112814709657656e+01, 0.6452508963690269e-19, 0.2339325112754308e-08, 0.4010758969857423e-08, 0.9343995210628773e+00, 0.2041003537989240e-20, 0.4754834042401540e-06, 0.1343807701186157e-06, 0.8379224813887226e+00, 0.2942145729531872e-22, 0.2198875627193616e+02, 0.2198875627193615e+02, 0.1030000000000000e+03, 0.3158730158730159e+01, 0.5825242718446602e+01, 0.2325395492704734e-05, 0.6165308413237942e+00, 0.1475671084242936e-18, 0.4982404846255511e-06, 0.2497087821784679e-06, 0.4939038343854810e+00, 0.1295808538938520e-16, 0.6559194735079054e-05, 0.3408173383041987e-05, 0.4603621358717260e+00, 0.5811344985782108e-17, 0.9559725090946887e-05, 0.1189458663431952e-04, 0.3639045182225083e+00, 0.2913839059670682e-18, 0.4863259217476146e-04, 0.7411068946771063e-04, 0.3137819362841643e+00, 0.2338183723075079e-26, 0.4219303942492233e-03, 0.5983359002845232e-03, 0.1577326270698492e+00, 0.3451266460341927e-30, 0.4370512454739224e-03, 0.2953761682364422e-03, 0.8198100142281199e-01, 0.2482356093574565e-20, 0.7632038538424591e-04, 0.1389505478745866e-03, 0.9578206270736807e-02, 0.1680537859656637e-20, 0.1168521819007177e-01, 0.1200804951998557e-01, 0.4942793252781514e-03, 0.1967697241141140e-19, 0.1184783913560175e-01, 0.1590443390723593e-01}, + info: 0, + }, + { + z: []float64{0.9721612501531746e+00, 0.9226086145236946e+00, 0.6294635256706198e+00, 0.3023176111891041e+00, 0.6049371291087220e+00, 0.2862933293136216e+00, 0.8819078559307578e+00, 0.5258944749310531e+00, 0.1024274135843443e+00, 0.4745865163816484e+00, 0.3569366663070145e+00, 0.5081536020377282e+00, 0.6994675492692721e+00, 0.3184330055525583e+00, 0.4062572718658636e+00, 0.8497577069296732e+00, 0.7686323719242980e+00, 0.8873781649289114e+00, 0.5445815352331720e+00, 0.5465168091072755e+00, 0.2655612328242208e+00, 0.7149116271405493e+00, 0.2779393895548951e+00, 0.1344927254674255e-01, 0.4544708454847078e+00, 0.5332505496545747e-01, 0.3223583007255554e+00, 0.4314145703973596e+00, 0.6194759383350579e+00, 0.9673744275195654e+00, 0.5445933229599376e-01, 0.2359803760372408e+00, 0.4349989237395262e+00, 0.3983243124260842e+00, 0.1549760127081995e+00, 0.6483062457720113e+00, 0.9796384288763583e+00, 0.2088098624466561e-01, 0.7266086220595113e+00, 0.7787182393455840e+00, 0.2714545750078465e+00, 0.3281766266219532e-01, 0.4443206765749519e+00, 0.7385116967559627e+00, 0.4742748966298079e+00, 0.2719801119596308e+00, 0.5535252469642412e+00, 0.6819653774052530e+00, 0.5802197659205470e+00, 0.8706014802153047e+00, 0.7899732601913489e-01, 0.6149130585720997e+00, 0.1184080138409910e+00, 0.7335155686110397e+00, 0.3161056790243246e+00, 0.4751009230144272e+00, 0.6074228340613933e+00, 0.1834459697241099e+00, 0.5602196728537751e+00, 0.6036792416269905e+00, 0.4996379576165632e+00, 0.5762117563802562e+00, 0.4313302593954876e+00, 0.2357090997139660e+00, 0.5266696040444221e+00, 0.4943568418910921e+00, 0.5194970579745682e+00, 0.8729901711287622e+00, 0.1253423978225398e+00, 0.3249303186427334e+00, 0.1476841747547597e+00, 0.8115004890141919e+00, 0.3358761522855614e+00, 0.6267425544884889e+00, 0.8684373664078195e+00, 0.3410509649937432e+00, 0.1223171934609151e+00, 0.6692100819077175e+00, 0.8714060240466885e+00, 0.8435328911476530e+00, 0.3613173587051273e+00, 0.5970888673100956e+00, 0.4814113673780336e+00, 0.9518076426657822e+00}, + n: 21, + zOut: []float64{0.2554276088974017e+01, 0.2353122577016648e+01, 0.1866240795386955e+01, 0.1792222013488891e+01, 0.1776522631286352e+01, 0.1671843747376224e+01, 0.1608565757744856e+01, 0.1413448668253747e+01, 0.9978397039940720e+00, 0.9776727877490632e+00, 0.8701050228770406e+00, 0.6522479060818760e+00, 0.6295532782699175e+00, 0.4076513128864766e+00, 0.3045303763029391e+00, 0.2026925943270836e+00, 0.1597713759311400e+00, 0.1149331809326307e+00, 0.5501161781795570e-01, 0.3472328989596482e-02, 0.1615352262160075e-02, 0.2261810287929078e-15, 0.1434223756681676e-21, 0.1189989751754647e-20, 0.1608565757744856e+01, 0.1068494074045434e-17, 0.1018285523195584e-14, 0.1812446834981097e-15, 0.1413448668253747e+01, 0.8361925595342725e-26, 0.5806129122189708e-11, 0.1848580748818003e-11, 0.9978397039940720e+00, 0.5222833230866609e-21, 0.9842396145073251e-07, 0.4923018478143697e-07, 0.9776727877490632e+00, 0.9540591325261466e-21, 0.1504532523487471e-05, 0.7619936176069947e-06, 0.8701050228770406e+00, 0.2405710216561998e-25, 0.2041333911794964e+02, 0.2041333911794964e+02, 0.1050000000000000e+03, 0.3108843537414966e+01, 0.6666666666666667e+01, 0.1010063659896995e-05, 0.6295532782699175e+00, 0.2958228394578794e-30, 0.7231781167780556e-06, 0.8027785243996724e-06, 0.4076513128864766e+00, 0.2327139670401985e-28, 0.4311361064418766e-06, 0.4365718303993244e-06, 0.3045303763029391e+00, 0.3511198893363720e-17, 0.5813968676396444e-06, 0.4990623034093460e-06, 0.2026925943270836e+00, 0.5787194672316798e-22, 0.1414410322798341e-05, 0.1084116171450400e-05, 0.1597713759311400e+00, 0.3368436065293720e-27, 0.4438807230072854e-05, 0.3295442179394680e-05, 0.1149331809326307e+00, 0.3973698114522315e-22, 0.1450590122472916e-04, 0.1074561246336364e-04, 0.5501161781795570e-01, 0.1153335343143272e-20, 0.9954779132989017e-07, 0.6305695341872095e-07, 0.3472328989596482e-02, 0.7183968878891587e-15, 0.2465868832870137e-03, 0.3524118878925340e-03, 0.1615352262160075e-02, 0.1517768381645227e-26, 0.5410549418889337e-03, 0.6396403869804795e-03}, + info: 0, + }, + { + z: []float64{0.9945523629868341e+00, 0.6311483525653909e+00, 0.7029190400645571e+00, 0.6452581068575963e+00, 0.9353553331334358e+00, 0.1547193049011500e+00, 0.5087511216875095e+00, 0.8821879483245458e+00, 0.5873463835171173e+00, 0.3391476900726194e+00, 0.3121839954924539e+00, 0.2777103145160921e+00, 0.2404463284577694e+00, 0.9114535183484027e+00, 0.4707329974301702e+00, 0.8965398451252625e+00, 0.6082343132413309e+00, 0.6509204493235183e+00, 0.1573499033176421e+00, 0.7416865701488552e+00, 0.1617007503826062e-01, 0.9896605969885931e+00, 0.3427350027671039e+00, 0.9677837053324486e+00, 0.1744936477417883e+00, 0.1063275349300943e+00, 0.7956875801511568e+00, 0.8998967378241718e-01, 0.9957418429488507e-01, 0.2587083053394715e+00, 0.3753593926504107e+00, 0.7537263520315082e+00, 0.1726167393116790e+00, 0.1420490749456633e+00, 0.1475360304745418e+00, 0.3281044300775052e+00, 0.4011897283558843e+00, 0.5039659382748809e+00, 0.8444659776686902e-01, 0.6685320205580927e+00, 0.8425776598223370e+00, 0.6334616337862548e+00, 0.6754035119469365e+00, 0.3594856315188868e+00, 0.4308279252773916e+00, 0.2170629535054914e+00, 0.5040120070238915e+00, 0.3780998968579012e+00, 0.9176188652711103e+00, 0.1603892455353655e+00, 0.2475008720560291e+00, 0.4414989878913728e+00, 0.3466658552370731e+00, 0.3098329951977107e+00, 0.8940921934240968e+00, 0.6686136942966417e+00, 0.4049936818549904e-01, 0.1308695446239941e+00, 0.8770734618597430e+00, 0.7917220872288512e+00, 0.5736823795257117e+00, 0.5474219864141461e+00, 0.3320663514826834e+00, 0.5824816531032581e+00, 0.6748067573570548e+00, 0.8139348519761679e+00, 0.1984641509226878e+00, 0.5557729841117627e+00, 0.1101812804319026e+00, 0.2169710965518002e+00, 0.5846617154920911e+00, 0.9784843983810533e+00, 0.8650267562586114e+00, 0.4041298540058539e+00, 0.9548165813531374e+00, 0.3839697059508718e+00, 0.4296171922210591e+00, 0.9617664542372570e+00, 0.2102779850721345e+00, 0.6135130181658475e+00, 0.3331715196673474e+00, 0.3177082868916457e+00, 0.2836520073686416e+00, 0.4452894940247868e+00}, + n: 21, + zOut: []float64{0.2413112525759640e+01, 0.2309698811549223e+01, 0.1971065496840386e+01, 0.1839093033734385e+01, 0.1586321309986888e+01, 0.1384287501933647e+01, 0.1357349915568794e+01, 0.1342137050614370e+01, 0.1098398899504136e+01, 0.1013688381736709e+01, 0.8838922773071145e+00, 0.7023193003094995e+00, 0.6083268670387161e+00, 0.3967105732701523e+00, 0.3295901758335739e+00, 0.2610566591740404e+00, 0.2254040681461952e+00, 0.1107925081061023e+00, 0.7084089717166595e-01, 0.5790734022898784e-02, 0.1162539701808497e-05, 0.2217532248565769e-17, 0.1400266087055540e-09, 0.1806586784837133e-09, 0.1357349915568794e+01, 0.1160047396826257e-18, 0.1994113072724655e-16, 0.4228494360227003e-16, 0.1342137050614370e+01, 0.1172187884313513e-15, 0.1255075745859590e-14, 0.2503873040081153e-15, 0.1098398899504136e+01, 0.3711182370878063e-20, 0.5744299067797088e-08, 0.4873476684669810e-08, 0.1013688381736709e+01, 0.1283916659070448e-18, 0.9737119066622311e-08, 0.8382615528986061e-08, 0.8838922773071145e+00, 0.3385457556489824e-15, 0.1990987815014785e+02, 0.1990987815014784e+02, 0.1050000000000000e+03, 0.3324263038548753e+01, 0.5714285714285714e+01, 0.5531678096648157e-07, 0.6083268670387161e+00, 0.9327491343333244e-26, 0.3094358165149238e-06, 0.4992693882721107e-06, 0.3967105732701523e+00, 0.1615587133892632e-26, 0.5013465632663534e-06, 0.4474558637993164e-06, 0.3295901758335739e+00, 0.7527343574232552e-22, 0.6917755140723670e-06, 0.9191878134168280e-06, 0.2610566591740404e+00, 0.8992644540970212e-27, 0.3283328599477859e-08, 0.8980185396558612e-09, 0.2254040681461952e+00, 0.2761013168273541e-29, 0.1566859737304233e-05, 0.9651044230345755e-05, 0.1107925081061023e+00, 0.7853700447084989e-18, 0.2494389596499284e-03, 0.3103855763358142e-03, 0.7084089717166595e-01, 0.7910121402691465e-20, 0.9848486231185840e-03, 0.1285663115805527e-02, 0.5790734022898784e-02, 0.1925929944387236e-33, 0.1531134660067926e-02, 0.1103959511478054e-02, 0.1162539701808497e-05, 0.0000000000000000e+00, 0.3553926119432854e-02, 0.1161337136649311e-01}, + info: 0, + }, + { + z: []float64{0.6912525216559012e+00, 0.7535513505900934e+00, 0.9471932644608124e+00, 0.3186447229836621e+00, 0.5324891284853083e+00, 0.8568362673509099e+00, 0.7683599820389093e+00, 0.5584553004793524e+00, 0.5571013534938567e+00, 0.9300192917069305e-01, 0.8580829209571639e+00, 0.7019126526839274e+00, 0.6637712341629651e+00, 0.1211000088170944e+00, 0.6340253546080570e+00, 0.8089912896293909e+00, 0.9056005723453709e+00, 0.8259750504337368e+00, 0.7787230128973186e+00, 0.5552567089885843e+00, 0.1575217143336131e+00, 0.6258385609742768e+00, 0.3943745586872103e+00, 0.2414131111537543e-02, 0.1491577729788018e-01, 0.3231570165180105e+00, 0.6629837458829935e+00, 0.3692715436026686e+00, 0.4217957028148089e+00, 0.9587355452830710e+00, 0.2048474370070816e+00, 0.9514275263988156e+00, 0.6824099372375696e+00, 0.4368303647429550e-01, 0.1779948598152153e+00, 0.8894462775192779e+00, 0.5101271266317630e+00, 0.9448652224292025e+00, 0.2265718177112338e+00, 0.3995811139403516e+00, 0.8380789267629041e+00, 0.6621441253102893e+00, 0.1548712624835822e+00, 0.6028850196944033e+00, 0.6938032609628844e+00, 0.6847684356722229e+00, 0.5160938472467536e+00, 0.2489894297094153e+00, 0.1603949345362032e-01, 0.8469682453172568e+00, 0.1332301650751776e+00, 0.4964641576870391e+00, 0.8955382322295532e+00, 0.8913224930710517e-01, 0.1632933980261509e+00, 0.7671629642421702e+00, 0.1918190416387825e+00, 0.4660642407804531e+00, 0.3421851388276426e+00, 0.6793284417353496e-03, 0.4856583571413033e+00, 0.6596584184483706e+00, 0.3999565195667565e-01, 0.4265147212719423e+00, 0.1721559491949431e-01, 0.4779030554385033e-01, 0.9628058707102425e+00, 0.4575395386200597e+00, 0.3526890078762277e+00, 0.7271726865364773e+00, 0.1741804541232641e+00, 0.3524640041347155e+00, 0.2143698389021181e+00, 0.9553499123803968e+00, 0.7803670081719427e+00, 0.7906509834069840e+00, 0.5939624443460740e+00, 0.5439294231719827e+00, 0.4026522884432556e+00, 0.2400403086558794e+00, 0.1695045416332096e+00, 0.8774151073165960e+00, 0.9528799076371641e-01, 0.6357307125994049e-01}, + n: 21, + zOut: []float64{0.2768602537214622e+01, 0.2449987435840808e+01, 0.2103617452638331e+01, 0.2091492233971247e+01, 0.2040261542124423e+01, 0.1984511273424848e+01, 0.1507960384149227e+01, 0.1491386484166052e+01, 0.1283295633334833e+01, 0.1065640575633715e+01, 0.9760063516861833e+00, 0.9535053549649605e+00, 0.6117733318422185e+00, 0.5156356643703764e+00, 0.4217391763517565e+00, 0.2514332616796913e+00, 0.1056943786041289e+00, 0.6556299181229375e-01, 0.2554189252987301e-01, 0.1599343544217954e-01, 0.7248128851221257e-03, 0.1757582096832414e-26, 0.8938568486456872e-12, 0.7801064326234513e-12, 0.1507960384149227e+01, 0.1792073872399063e-19, 0.2167105506392983e-08, 0.2566615495401212e-08, 0.1491386484166052e+01, 0.2939740300877914e-17, 0.1513553473151668e-08, 0.1292098164447321e-08, 0.1283295633334833e+01, 0.5002192774679673e-18, 0.3422853236655498e-09, 0.3087733884567774e-09, 0.1065640575633715e+01, 0.2649562899583917e-24, 0.1824851422869684e-09, 0.1969539993879423e-09, 0.9760063516861833e+00, 0.4849588587138365e-15, 0.2273036620466689e+02, 0.2273036620466689e+02, 0.1110000000000000e+03, 0.3401360544217687e+01, 0.6306306306306307e+01, 0.7012703734578152e-09, 0.6117733318422185e+00, 0.2636779683484747e-15, 0.8832037497597917e-08, 0.1377654767646391e-07, 0.5156356643703764e+00, 0.0000000000000000e+00, 0.4403364729024244e-07, 0.2921450058151117e-07, 0.4217391763517565e+00, 0.5116160632510206e-16, 0.4627032593211780e-06, 0.6780935482482274e-06, 0.2514332616796913e+00, 0.3477119313860159e-18, 0.8396751469149006e-05, 0.1191099168408868e-04, 0.1056943786041289e+00, 0.3125614770312806e-14, 0.3873238345753621e-04, 0.1564927337192913e-03, 0.6556299181229375e-01, 0.2145521217817673e-17, 0.9476510350371504e-03, 0.9104754488359111e-03, 0.2554189252987301e-01, 0.1508279173816323e-23, 0.1093077353977330e-02, 0.3191708039391989e-03, 0.1599343544217954e-01, 0.1319576415787468e-18, 0.1003863779460019e-02, 0.1271197433268307e-02, 0.7248128851221257e-03, 0.5169135658374832e-21, 0.1128681157512944e-02, 0.4574374031799749e-02}, + info: 0, + }, + { + z: []float64{0.8514967554120231e+00, 0.1903564904561825e+00, 0.9315134585755248e+00, 0.6865565499502658e+00, 0.9874780893034819e+00, 0.4243732836164926e-01, 0.4493254789963134e+00, 0.1218497220886967e+00, 0.3032420422448456e-01, 0.4472006833337416e+00, 0.7764061193501844e+00, 0.2765680149425037e+00, 0.1540380964603436e+00, 0.8227084597523057e+00, 0.1281873340097673e+00, 0.2797045229662546e+00, 0.1723485697940061e+00, 0.5407468670158238e+00, 0.3703892253237170e+00, 0.3275545813562609e+00, 0.1384768695152372e+00, 0.8683661690082241e+00, 0.2417038393999236e+00, 0.1573965961648072e+00, 0.7549703050710239e+00, 0.4197135409081310e+00, 0.5565257749798536e-01, 0.7494783707245707e+00, 0.8342833407960080e+00, 0.7207413539297494e+00, 0.3936911272324267e+00, 0.8307872407436112e-01, 0.5696677800088412e-01, 0.2580569153061851e-01, 0.3976149332803511e+00, 0.4082340292723502e+00, 0.3620800929220623e+00, 0.2078006460748600e+00, 0.6810562489816266e+00, 0.8414616295760738e+00, 0.2053319202340824e+00, 0.7745582586901971e+00, 0.8243869237197288e-01, 0.7513634029365589e+00, 0.8658814490552211e+00, 0.7014202441287165e-01, 0.8786892218281559e+00, 0.2415414918645031e-01, 0.1648367770784782e+00, 0.5852629690413500e+00, 0.6558826026732849e+00, 0.3505171007322015e+00, 0.5959744796861849e+00, 0.6000199298251039e+00, 0.5156931460104713e+00, 0.1010243586499049e-01, 0.2804608587164645e+00, 0.7088599878489240e+00, 0.3159377781412633e+00, 0.1196632783263437e+00, 0.8053993759743566e+00, 0.9537751230670044e+00, 0.6890540876884894e+00, 0.7327967809533995e+00, 0.1752177926983489e-01, 0.1176294169286665e+00, 0.1417165464977135e+00, 0.1128117051181837e+00, 0.4431718138373526e+00, 0.6383761487884151e+00, 0.4394388749172047e+00, 0.4496324720961747e+00, 0.2336818223819791e+00, 0.8283237197456694e+00, 0.5968446077081969e+00, 0.2123688346867825e-03, 0.2316409292069651e+00, 0.4041203764036865e+00, 0.5989557684304492e-01, 0.1751475090786662e+00, 0.3300532749209990e+00, 0.5257653351639615e+00, 0.9423173850128755e+00, 0.5110067889308013e+00}, + n: 21, + zOut: []float64{0.2298367449128524e+01, 0.2183508003369306e+01, 0.1768578955898148e+01, 0.1497740650820804e+01, 0.1451044171656134e+01, 0.1248562072819587e+01, 0.1096241131844911e+01, 0.1047761158237631e+01, 0.1036921434023463e+01, 0.1001878203872264e+01, 0.6028997776208512e+00, 0.5966927468392602e+00, 0.3787547972649039e+00, 0.3164597916816877e+00, 0.2770907680746649e+00, 0.1378928396715414e+00, 0.1008983167754413e+00, 0.7523836829300481e-01, 0.4549396861483578e-01, 0.2905908538794771e-01, 0.1164397397714791e-04, 0.2593449025859535e-11, 0.4028878361793870e-19, 0.4523833078775027e-19, 0.1096241131844911e+01, 0.2254118867995285e-20, 0.6781252758190456e-19, 0.6132859990189332e-19, 0.1047761158237631e+01, 0.2208810534618833e-28, 0.7941399228881527e-19, 0.8755467647203911e-19, 0.1036921434023463e+01, 0.2697904295855860e-27, 0.1193508568348155e-18, 0.1375302458176128e-18, 0.1001878203872264e+01, 0.1808754486438286e-19, 0.1585460437287443e-17, 0.7110442921296878e-18, 0.6028997776208512e+00, 0.1121217470921487e-23, 0.1719109533586889e+02, 0.1719109533586889e+02, 0.1020000000000000e+03, 0.3002267573696145e+01, 0.7843137254901960e+01, 0.6126841018734423e-14, 0.3787547972649039e+00, 0.2514494135391975e-29, 0.3694268515215640e-12, 0.2142184401680204e-12, 0.3164597916816877e+00, 0.2495880173472380e-22, 0.1052194788283320e-11, 0.1801542652052392e-11, 0.2770907680746649e+00, 0.2042405059827892e-23, 0.3416315088185822e-10, 0.1784450556067769e-10, 0.1378928396715414e+00, 0.2231720057459842e-17, 0.5936850868223322e-09, 0.2792901329469146e-09, 0.1008983167754413e+00, 0.4009937753419447e-25, 0.1438568455567306e-07, 0.6376284135652434e-08, 0.7523836829300481e-01, 0.1673899732006569e-23, 0.3811420638095229e-06, 0.1677978014843256e-06, 0.4549396861483578e-01, 0.4108428387966160e-20, 0.9969899927958168e-05, 0.2238705130236833e-04, 0.2905908538794771e-01, 0.5621738354967019e-25, 0.1622697124847995e-03, 0.1340889241809050e-03, 0.1164397397714791e-04, 0.4733165431326071e-29, 0.2205086015306072e-02, 0.1109731617446849e-02}, + info: 0, + }, + { + z: []float64{0.9566341326420642e+00, 0.1368890791339401e+00, 0.1041033686995702e+00, 0.1843288381431366e+00, 0.9983016155690853e+00, 0.1786866722264480e+00, 0.3401743522788719e+00, 0.3857333052582377e+00, 0.5001620282112933e+00, 0.9013417473823726e+00, 0.9483009872337210e+00, 0.8066933876879568e+00, 0.5299205541713562e+00, 0.1746250429119467e-01, 0.9676426134678857e+00, 0.9451210738656991e+00, 0.4964770237154315e+00, 0.7837324008636358e+00, 0.9355959680316486e+00, 0.6160646886859618e+00, 0.3400424465599898e+00, 0.5332164761252830e+00, 0.3086260018745091e+00, 0.7161810741898500e+00, 0.9586563909920308e+00, 0.5072089025298385e+00, 0.5345428289191819e+00, 0.3564033548208417e+00, 0.5073508962435702e-01, 0.5457190349920044e+00, 0.5924351087865416e+00, 0.4551284321479383e+00, 0.1212070325395470e+00, 0.4136992969230449e+00, 0.9668715153070689e+00, 0.5158905905227948e+00, 0.3815356588296225e+00, 0.9643666102790432e+00, 0.2243124357596349e-01, 0.1865949320717719e+00, 0.7081123336136068e+00, 0.8847286955309422e+00, 0.8075237017566694e+00, 0.7058935338718864e+00, 0.8930194548226011e-01, 0.5968683124630468e+00, 0.6423123320238798e+00, 0.4264046421741549e+00, 0.6535060636263267e+00, 0.8848885990305457e+00, 0.1727496532173951e+00, 0.2233116713730792e+00, 0.5179656160482806e+00, 0.8833427955005373e+00, 0.5463392573098168e+00, 0.2339495215057854e+00, 0.2531599269911875e+00, 0.8693933445310196e+00, 0.9904673923253784e+00, 0.5001765262594373e+00, 0.8475066901059425e+00, 0.7747140538451288e+00, 0.8097617518836179e+00, 0.6177833422496881e+00, 0.4294196301939426e+00, 0.8754149021622922e+00, 0.1154777972031917e+00, 0.4002319777705416e+00, 0.8948519482752643e+00, 0.2680092666078437e-01, 0.4982362166797213e+00, 0.8946688039099321e+00, 0.6004540842339657e+00, 0.7864972660235388e+00, 0.5343742612900907e+00, 0.6804570163829448e+00, 0.8534533212497195e+00, 0.6588190749186591e+00, 0.3987256626502571e+00, 0.9641448737959348e+00, 0.8049200655531427e+00, 0.8429466638354991e+00, 0.3173537092974966e+00, 0.2969758130071765e+00}, + n: 21, + zOut: []float64{0.2691092090580979e+01, 0.2651656165852700e+01, 0.2273892452779949e+01, 0.2046491657483657e+01, 0.1856589708219707e+01, 0.1474606896148613e+01, 0.1449970763396692e+01, 0.1157826920722246e+01, 0.1131422765724169e+01, 0.1103316690199991e+01, 0.1081571209422854e+01, 0.8863514547656711e+00, 0.7487688552248520e+00, 0.4946957959671176e+00, 0.3261665336572834e+00, 0.2704116388140369e+00, 0.1035778920187539e+00, 0.8274904671505576e-01, 0.4712842456001845e-01, 0.3463118683574901e-01, 0.5254769424391846e-04, 0.2373014771939845e-22, 0.2238209824420786e-09, 0.1516830937674482e-09, 0.1449970763396692e+01, 0.6496080927593321e-18, 0.4705401432274776e-09, 0.6877602609145650e-09, 0.1157826920722246e+01, 0.9485735114056482e-11, 0.6765447972194981e-08, 0.9075269590011981e-08, 0.1131422765724169e+01, 0.0000000000000000e+00, 0.2115471642425146e-07, 0.1586124232313620e-07, 0.1103316690199991e+01, 0.1685550134725853e-21, 0.9651672077875321e-13, 0.7184159896265294e-12, 0.1081571209422854e+01, 0.5505416081326749e-23, 0.2191297069678434e+02, 0.2191297069678434e+02, 0.1080000000000000e+03, 0.3308390022675737e+01, 0.4629629629629630e+01, 0.3615019372146533e-05, 0.7487688552248520e+00, 0.2711157159063573e-25, 0.1185348717221526e-04, 0.1538970705040117e-04, 0.4946957959671176e+00, 0.7711572887860419e-23, 0.5228762878644057e-04, 0.2140066476035995e-04, 0.3261665336572834e+00, 0.1027012207743053e-16, 0.1058102716324823e-03, 0.1097923206605328e-03, 0.2704116388140369e+00, 0.1419949629397821e-27, 0.3075139383959133e-03, 0.2041016121130936e-03, 0.1035778920187539e+00, 0.1569288801032274e-18, 0.2245437021601444e-03, 0.2161294383881767e-03, 0.8274904671505576e-01, 0.1322963914266329e-22, 0.4098429023844965e-03, 0.2685224923308173e-03, 0.4712842456001845e-01, 0.1972152263052530e-27, 0.1445743106447529e-03, 0.3591079434142046e-04, 0.3463118683574901e-01, 0.1109335647967048e-30, 0.1752604588321185e-04, 0.1168961103966469e-04, 0.5254769424391846e-04, 0.0000000000000000e+00, 0.7596773881065606e-02, 0.6101958366533248e-02}, + info: 0, + }, + { + z: []float64{0.8353120998595153e+00, 0.2762691216596785e+00, 0.4005263075088337e+00, 0.2013066591993183e+00, 0.7159087664363259e+00, 0.6826958051178053e+00, 0.3940626911632166e+00, 0.4990834556070268e+00, 0.6760530863202496e-02, 0.5490926358579395e-01, 0.1673564807759778e+00, 0.6884305772394704e+00, 0.3902830106720901e+00, 0.9394502319309270e+00, 0.1453732258871473e+00, 0.6913382271536774e+00, 0.4797608951630542e+00, 0.8813081041297155e+00, 0.7179153476502249e+00, 0.8684015860527726e+00, 0.4912919359735076e+00, 0.2734537604788182e+00, 0.6687443482830383e+00, 0.1554406884207427e+00, 0.2919860915775756e+00, 0.2642733542167044e+00, 0.8429147529046822e+00, 0.2888320698097314e+00, 0.8826173246454528e+00, 0.8977386905269689e+00, 0.7688524109189127e+00, 0.9833521147086286e+00, 0.9216258197795713e-01, 0.4886212819310254e-01, 0.2266462269156035e+00, 0.3411440109054314e+00, 0.3008601997419669e-01, 0.6174499545648404e+00, 0.5004878789063633e+00, 0.6820549240543587e+00, 0.2697209119346977e-01, 0.5561910637447186e+00, 0.5496066776611622e+00, 0.5185759009159874e+00, 0.1533641991284778e+00, 0.9896294286837857e+00, 0.3752499036369343e+00, 0.6748924776803822e-01, 0.2328081038279309e+00, 0.6721552304404512e+00, 0.5472950056303504e+00, 0.9949773164461492e+00, 0.5646629720515278e+00, 0.4280389734679516e+00, 0.4488723317267336e+00, 0.7263390986042261e+00, 0.2371171913738990e-01, 0.2980895305269504e-01, 0.2561842200752050e+00, 0.2554059142478259e+00, 0.5952758446711104e-01, 0.7921877816291150e+00, 0.2045719212105177e+00, 0.9215889426626128e+00, 0.7130733301970400e+00, 0.1420514661017834e+00, 0.8292002693849506e+00, 0.5327095640926792e+00, 0.9857821333545339e+00, 0.6686900428841760e+00, 0.1963263366455671e+00, 0.2881353528210300e+00, 0.7077457532403378e+00, 0.9075905470440924e+00, 0.1600253949359417e+00, 0.1037543259304946e+00, 0.3343707004174388e+00, 0.7287034220491817e-01, 0.8402900560224008e+00, 0.1343632243665364e+00, 0.8746515522185468e+00, 0.4102434537961821e+00, 0.1922774302984470e+00, 0.7386986269659255e+00}, + n: 21, + zOut: []float64{0.2705701250380865e+01, 0.2517516394575531e+01, 0.1823532784589120e+01, 0.1787562059778101e+01, 0.1543666662169401e+01, 0.1517718677289282e+01, 0.1380407836894782e+01, 0.1248997815711926e+01, 0.1005808190406691e+01, 0.8278526045363968e+00, 0.6708485238929687e+00, 0.6082664705845506e+00, 0.5949155529076431e+00, 0.3419285270749396e+00, 0.2930861537880754e+00, 0.2677590880174420e+00, 0.1897681878114907e+00, 0.4498262444281959e-01, 0.4027859329477797e-01, 0.1212960387301111e-02, 0.4788271754899720e-05, 0.2113479533223326e-17, 0.1072835726919963e-07, 0.2769573896400322e-08, 0.1380407836894782e+01, 0.2075077381359190e-19, 0.2509615479685493e-12, 0.8472237738633739e-13, 0.1248997815711926e+01, 0.8862433277732312e-18, 0.5565142852935547e-10, 0.1115246324111066e-09, 0.1005808190406691e+01, 0.1508363342119467e-20, 0.4766364300104371e-06, 0.1686414207934524e-06, 0.8278526045363968e+00, 0.2958228394578794e-30, 0.1339811587116281e-04, 0.9618739022097639e-05, 0.6708485238929687e+00, 0.5096041447727736e-27, 0.1941181574680586e+02, 0.1941181574680586e+02, 0.1100000000000000e+03, 0.3301587301587301e+01, 0.6363636363636363e+01, 0.2769598741382734e-05, 0.5949155529076431e+00, 0.1660823573641619e-23, 0.5008194680845295e-05, 0.3644576454263968e-05, 0.3419285270749396e+00, 0.8597611808520244e-19, 0.8197192371775147e-05, 0.1495511835801176e-04, 0.2930861537880754e+00, 0.6452008777913547e-22, 0.7613853051650506e-05, 0.9176969716117157e-05, 0.2677590880174420e+00, 0.1626694295438144e-23, 0.2172962611604878e-06, 0.9910144268854854e-06, 0.1897681878114907e+00, 0.5698278443334010e-17, 0.3051734820342141e-03, 0.3112883552318466e-03, 0.4498262444281959e-01, 0.6022708464481807e-24, 0.1205042765713422e-04, 0.1759326252954196e-04, 0.4027859329477797e-01, 0.1150776253123047e-20, 0.1426316484726719e-04, 0.9508927802446297e-05, 0.1212960387301111e-02, 0.3136662420452545e-23, 0.9384394253406367e-03, 0.4268700521479601e-03, 0.4788271754899720e-05, 0.0000000000000000e+00, 0.1227549320701017e-01, 0.5132215250850077e-02}, + info: 0, + }, + { + z: []float64{0.6118263281058917e+00, 0.9095172470423057e+00, 0.3600259528878788e+00, 0.2816648927398081e+00, 0.1399164597780194e+00, 0.8570391268113827e+00, 0.6056165732881742e+00, 0.8852338629006543e+00, 0.5421670517489531e+00, 0.6780884449288360e+00, 0.8758212925230515e+00, 0.8288674605669587e+00, 0.6440226363310471e+00, 0.5894956271378661e+00, 0.5783475018163303e+00, 0.3156005123186516e+00, 0.8703974350876685e+00, 0.2617715240366199e+00, 0.8370578409687427e+00, 0.5560169751702698e+00, 0.7455275952513329e-01, 0.7082615127868872e+00, 0.1297707357428401e-02, 0.3064709066297203e+00, 0.8391571848933236e+00, 0.9253805128965463e+00, 0.1130984054668048e+00, 0.8319072101853607e+00, 0.7799276064931965e+00, 0.8579163369971375e+00, 0.5543120979067712e+00, 0.1549966158329672e+00, 0.2643029344048516e+00, 0.8314141615883028e+00, 0.5782452249644381e+00, 0.6319732449892587e+00, 0.4892864093075822e+00, 0.1584418066385224e+00, 0.1120577745932696e+00, 0.6617954786615949e+00, 0.1159882395495345e+00, 0.2876505945591634e+00, 0.7061058070010036e+00, 0.7252986915993946e+00, 0.6775944839786487e+00, 0.7230010551819884e+00, 0.6571938557215073e+00, 0.7482251296401533e+00, 0.9684494841571512e-01, 0.1940071502680237e+00, 0.8896785746779762e+00, 0.9170179284973872e+00, 0.8053995841969270e+00, 0.9321965525342302e+00, 0.7288973738137661e-01, 0.3849028957514653e+00, 0.4410947536600551e+00, 0.4770808300783691e-01, 0.3690162722923055e+00, 0.1908498348358704e+00, 0.8094133574022262e+00, 0.4809437445509840e+00, 0.1405808283203624e+00, 0.4482801388864215e+00, 0.7023724161235537e+00, 0.5041894616412674e+00, 0.2984594028552712e+00, 0.8510121345026274e+00, 0.7612412832182369e-01, 0.8890771840141870e+00, 0.8611357776125709e+00, 0.7702458212424681e+00, 0.8813091992976040e+00, 0.3838156614335277e+00, 0.1536911508311884e+00, 0.2763378258408019e+00, 0.7613349010649187e+00, 0.4228084592762277e+00, 0.5894002697827440e+00, 0.5744691724607953e-01, 0.3014767112059925e+00, 0.7221203248495245e-01, 0.7619553096876313e+00, 0.5154237548276358e+00}, + n: 21, + zOut: []float64{0.2723457737035386e+01, 0.2426437846006860e+01, 0.2169822787916940e+01, 0.1997417054976096e+01, 0.1928996526776896e+01, 0.1891356815602834e+01, 0.1803418741268920e+01, 0.1442378108645599e+01, 0.1026854281138765e+01, 0.9766368850685674e+00, 0.8449303421617025e+00, 0.7575761304896602e+00, 0.7013909577306906e+00, 0.6362631409338654e+00, 0.3605885444762012e+00, 0.1912852882240109e+00, 0.1837209447646653e+00, 0.1221129878112117e+00, 0.2486838916706533e-01, 0.9765050532318768e-02, 0.3171294864219318e-06, 0.1371236688474761e-19, 0.2987011438498780e-15, 0.8087574764480620e-15, 0.1803418741268920e+01, 0.5451390847569379e-21, 0.7270236634641202e-11, 0.2896404140496822e-11, 0.1442378108645599e+01, 0.6954964380428437e-14, 0.4131395561099005e-08, 0.7275670618661885e-08, 0.1026854281138765e+01, 0.1292469707114106e-25, 0.1838064307199299e-06, 0.1238204560933622e-06, 0.9766368850685674e+00, 0.3388131789017201e-20, 0.3880169268725981e-06, 0.5681289313967714e-06, 0.8449303421617025e+00, 0.6785686819474261e-17, 0.2221927887785774e+02, 0.2221927887785774e+02, 0.1080000000000000e+03, 0.3244897959183673e+01, 0.4629629629629630e+01, 0.3025393053677348e-08, 0.7013909577306906e+00, 0.1341063538875720e-28, 0.3767849368753714e-07, 0.3184959175038880e-07, 0.6362631409338654e+00, 0.8874685183736383e-29, 0.5999352088776256e-07, 0.7114794232343430e-07, 0.3605885444762012e+00, 0.1582265649256097e-24, 0.2391907791779699e-03, 0.3344095791507466e-03, 0.1912852882240109e+00, 0.1130832107634320e-25, 0.4040482702647743e-03, 0.3567248396804949e-03, 0.1837209447646653e+00, 0.4930380657631324e-31, 0.1016317982961682e-02, 0.8970313840717793e-03, 0.1221129878112117e+00, 0.2076836816009399e-16, 0.1892596629513954e-02, 0.2329446640559320e-02, 0.2486838916706533e-01, 0.1713835026472478e-23, 0.3568672048225553e-04, 0.1599561599209249e-03, 0.9765050532318768e-02, 0.0000000000000000e+00, 0.3650779499194616e-03, 0.5712243726145556e-03, 0.3171294864219318e-06, 0.8599783076848839e-19, 0.2181668718924628e-02, 0.9598847336904798e-03}, + info: 0, + }, + { + z: []float64{0.1133237741366075e+00, 0.6519297783748091e+00, 0.1419059404596466e+00, 0.6942573823108688e+00, 0.4899507169502382e-03, 0.4408585439270172e+00, 0.4089484333544855e+00, 0.8087151633238087e+00, 0.7299241994604067e-01, 0.7772039339909241e+00, 0.6160100823712519e+00, 0.1679565169487562e+00, 0.3276744683398485e+00, 0.8481565553508272e+00, 0.5614085421003249e+00, 0.5889702994117808e+00, 0.9865181317193454e+00, 0.5686958983255483e+00, 0.2173616127956169e+00, 0.6617740807852496e+00, 0.1324295788322836e+00, 0.5932751564887147e+00, 0.4417023572495267e+00, 0.8308006832392040e+00, 0.6963900525231828e+00, 0.7023649910778577e+00, 0.2540087053190563e+00, 0.5116605124111596e+00, 0.4580501208371790e+00, 0.9900881058795367e+00, 0.6233054379975522e+00, 0.5093215893644945e+00, 0.8805051397371310e+00, 0.5918749676473295e+00, 0.8024628157090720e+00, 0.1089848141144264e+00, 0.9385822032209372e+00, 0.9676954608704463e+00, 0.7567544231269694e+00, 0.7763472508927070e+00, 0.7544500156824916e+00, 0.7080578938468585e-02, 0.8622695184492650e+00, 0.1268681530179145e+00, 0.2408963669487220e+00, 0.1909503620360825e+00, 0.9251648557888137e-01, 0.4447315055648083e+00, 0.1504005134322962e+00, 0.6245252202415555e+00, 0.8432800589533600e+00, 0.1408178180805120e+00, 0.8538656063298837e+00, 0.6959250082173740e-01, 0.7452290984944354e-01, 0.8186895700534982e+00, 0.4817344889163616e+00, 0.8941802149688474e+00, 0.8277272846279742e+00, 0.3122760515919010e+00, 0.4631046821883726e+00, 0.9939521405533804e+00, 0.5067334594578294e+00, 0.3251044449945518e+00, 0.4514296322650755e+00, 0.9520015267726308e+00, 0.3811787795610073e+00, 0.6466473208499942e+00, 0.1035647165455822e-01, 0.3768739780085785e+00, 0.7301131130949717e+00, 0.6341150979687762e+00, 0.2006486990564113e+00, 0.7995641372737874e+00, 0.3501522989293299e+00, 0.7212117404145123e+00, 0.3724384783514179e+00, 0.2704745802524587e+00, 0.5954948132025037e+00, 0.8796174543237598e+00, 0.2533217117523398e+00, 0.2406431629179949e+00, 0.4238947154458014e+00, 0.1002294321190256e+00}, + n: 21, + zOut: []float64{0.2763722715135843e+01, 0.2473753815483350e+01, 0.2367461218794171e+01, 0.2245079256722998e+01, 0.1833576927640889e+01, 0.1670289128561736e+01, 0.1457654822263786e+01, 0.1350493275779479e+01, 0.1277854548609735e+01, 0.1156481310792204e+01, 0.1106963375568883e+01, 0.8704957369830484e+00, 0.6845380809859644e+00, 0.4950109522946930e+00, 0.4198272059610658e+00, 0.2867476262636770e+00, 0.2180239597616727e+00, 0.1578141753335409e+00, 0.1153511714179556e+00, 0.2506658150747594e-01, 0.5048803894682267e-08, 0.8470329472540474e-20, 0.3080709680328647e-07, 0.2263725078605666e-07, 0.1457654822263786e+01, 0.6882142696441190e-21, 0.4511645736903135e-07, 0.5799182381327367e-07, 0.1350493275779479e+01, 0.3225542408567476e-17, 0.3673248914261325e-08, 0.2431637013515395e-08, 0.1277854548609735e+01, 0.1479114197289397e-30, 0.1457855695816495e-07, 0.1029378003116242e-07, 0.1156481310792204e+01, 0.9441087313685069e-24, 0.4178859307872738e-07, 0.3166226192774972e-07, 0.1106963375568883e+01, 0.1745440919131533e-19, 0.2297620589091097e+02, 0.2297620589091097e+02, 0.9600000000000000e+02, 0.3092970521541950e+01, 0.3125000000000000e+01, 0.2193086683474567e-06, 0.6845380809859644e+00, 0.6815668447404457e-13, 0.1173742870849872e-05, 0.2415236474615244e-05, 0.4950109522946930e+00, 0.4604423331594002e-25, 0.1089056660110428e-04, 0.1249035785040860e-04, 0.4198272059610658e+00, 0.2034864306780130e-21, 0.1807219455044531e-04, 0.1582973453084106e-04, 0.2867476262636770e+00, 0.1157544870450433e-16, 0.1788428346885188e-06, 0.1048647571833164e-05, 0.2180239597616727e+00, 0.2820966597070338e-26, 0.2675556367648033e-04, 0.1965631735302374e-04, 0.1578141753335409e+00, 0.4632909363938796e-19, 0.6657143729168815e-04, 0.2311126907684945e-04, 0.1153511714179556e+00, 0.7089566920435294e-17, 0.2519028315687161e-03, 0.1126597449160621e-03, 0.2506658150747594e-01, 0.1511791266627385e-18, 0.1819059120658101e-02, 0.1223404988252185e-02, 0.5048803894682267e-08, 0.5048803894682267e-08, 0.1176483960189416e-01, 0.7561758166988619e-02}, + info: 0, + }, + { + z: []float64{0.6007483099411913e+00, 0.8757604992442719e+00, 0.3498665716095440e+00, 0.4974125174432915e+00, 0.3942177430818359e+00, 0.4655097178512825e+00, 0.7062255099400584e+00, 0.5955461614709365e+00, 0.5705117921786074e+00, 0.2580859610386106e+00, 0.5213970441770472e+00, 0.3227485215512098e+00, 0.8200431655548257e-01, 0.9521784777464899e+00, 0.9768302381970198e+00, 0.5401259459761943e+00, 0.2876249345369413e+00, 0.9928912621382637e+00, 0.9068004769556834e+00, 0.8136746797372185e+00, 0.6557906174128045e+00, 0.7593059043472016e+00, 0.7306718357524993e+00, 0.7872142480398446e+00, 0.2169453387448870e+00, 0.1236189910537125e+00, 0.4376414329540577e+00, 0.2475974410903709e+00, 0.5743712937757064e+00, 0.8451940172770922e+00, 0.2954362279165403e+00, 0.9333330371592371e+00, 0.7261884685452219e+00, 0.2905925760133674e+00, 0.3022535752338255e+00, 0.6907049310391071e+00, 0.2616870809800952e+00, 0.3758121789769751e+00, 0.7112612645989228e+00, 0.9602233026485287e+00, 0.7212734445945013e+00, 0.9266335266312882e+00, 0.7829804394259932e-01, 0.6683939253194693e+00, 0.3539007443404153e+00, 0.1629995906155713e+00, 0.2470024645126471e+00, 0.7250237828800384e+00, 0.9725422312806663e+00, 0.1983270881451602e+00, 0.2167236969811593e+00, 0.2406674239058443e+00, 0.3811046126380717e+00, 0.6358718370494476e-01, 0.6038903598347725e+00, 0.2622945348536884e+00, 0.4871669697876424e+00, 0.1853788955105938e+00, 0.3471794920367192e+00, 0.9520318129455714e+00, 0.9596274379126818e+00, 0.5893179749928269e+00, 0.8560240909608330e-01, 0.9435308110759013e+00, 0.2058424446004554e+00, 0.2877465111691004e+00, 0.6254635389850627e+00, 0.5518846905281638e-01, 0.1132608874973561e+00, 0.8191427866511807e+00, 0.1031178242713734e+00, 0.1909407653284053e+00, 0.5428362177906626e+00, 0.2973328401307228e+00, 0.6079142025065691e+00, 0.1448090006401505e+00, 0.5807148716343321e+00, 0.2207276122221573e+00, 0.4617526470734991e+00, 0.2133930631010816e-01, 0.8719991154365724e+00, 0.4224743304865443e+00, 0.7012315055540164e+00, 0.9970966526798359e+00}, + n: 21, + zOut: []float64{0.2875734378878489e+01, 0.2344615299508293e+01, 0.2292398069608056e+01, 0.2200897502376554e+01, 0.2016889876654445e+01, 0.1997885510503641e+01, 0.1786196162676624e+01, 0.1505330793574375e+01, 0.1193246823746953e+01, 0.1153847743209180e+01, 0.9864509519829328e+00, 0.8076407462080507e+00, 0.6427729668463894e+00, 0.4808392927305387e+00, 0.3994667257867982e+00, 0.3273321022655395e+00, 0.1654363687174887e+00, 0.1176242694536732e+00, 0.4639514928819789e-01, 0.1592499252046327e-01, 0.3521629889976334e-03, 0.1784216669875992e-16, 0.7121431862167897e-10, 0.2812798494121821e-10, 0.1786196162676624e+01, 0.4108568523994154e-17, 0.4438281217193102e-07, 0.2274689950145052e-07, 0.1505330793574375e+01, 0.7499168293791558e-11, 0.5226296435976747e-05, 0.8641809444662033e-05, 0.1193246823746953e+01, 0.0000000000000000e+00, 0.2628223783350490e-04, 0.3987870710471355e-04, 0.1153847743209180e+01, 0.5865702946789929e-16, 0.1241308452869419e-07, 0.3894882997253744e-08, 0.9864509519829328e+00, 0.7673863643253590e-18, 0.2335727788952568e+02, 0.2335727788952568e+02, 0.1040000000000000e+03, 0.3058956916099773e+01, 0.2884615384615385e+01, 0.3948721236408985e-04, 0.6427729668463894e+00, 0.1028674620408199e-26, 0.2391232675375281e-03, 0.2086678697444007e-03, 0.4808392927305387e+00, 0.3130051437900657e-19, 0.1426864276581422e-03, 0.1522473256880454e-03, 0.3994667257867982e+00, 0.2101777886998446e-24, 0.1526280399103512e-05, 0.7002070790936748e-06, 0.3273321022655395e+00, 0.1242239255406691e-15, 0.1253374424171423e-04, 0.2848254240665514e-04, 0.1654363687174887e+00, 0.2455402723507254e-17, 0.1659752591164777e-02, 0.8476830847116842e-03, 0.1176242694536732e+00, 0.1344218982496604e-26, 0.1195138829435961e-02, 0.1583065192028977e-02, 0.4639514928819789e-01, 0.2205655090997949e-26, 0.2795062437264197e-03, 0.9604240981222648e-03, 0.1592499252046327e-01, 0.6574583373762050e-21, 0.7008887495297913e-02, 0.4078689360266482e-02, 0.3521629889976334e-03, 0.2981894221735425e-26, 0.7785429009218255e-02, 0.8697865294154498e-02}, + info: 0, + }, + { + z: []float64{0.3565428176835072e+00, 0.1947118362244993e+00, 0.7741156920152520e+00, 0.2422304602782238e+00, 0.6344560478781652e+00, 0.6454994272905118e+00, 0.1782236022643452e+00, 0.1761708296759242e+00, 0.4902923959906477e+00, 0.3501715044277058e+00, 0.8606282659620700e+00, 0.4671519599404039e-03, 0.9177061631942818e+00, 0.9869535184930249e+00, 0.9615604930014852e+00, 0.2283553018405985e+00, 0.2470322441623238e+00, 0.5789642646481346e+00, 0.9797717987449011e+00, 0.2628090453859127e+00, 0.1063995753795172e+00, 0.9446865880365994e+00, 0.8109285339456792e+00, 0.4534650679402863e-01, 0.9560542860825706e+00, 0.7338084401822241e+00, 0.4719995459594307e+00, 0.8981162685661518e+00, 0.6088366551058919e+00, 0.4061662134958367e+00, 0.6141507801286759e+00, 0.7683300427463863e+00, 0.2030033710532269e+00, 0.5109903203799075e+00, 0.1295039929316932e-01, 0.1313823831505738e+00, 0.4600541675974348e+00, 0.8052912479215125e+00, 0.7315674921483241e+00, 0.1311059905375703e+00, 0.9372521946935908e-01, 0.2804058432106148e+00, 0.1426401168120447e+00, 0.1302494454623315e-01, 0.9936953989155247e+00, 0.7109841901452127e+00, 0.8438933393801377e+00, 0.5599779486257196e+00, 0.6224094357036947e-01, 0.3722277721927331e+00, 0.2128496963914419e+00, 0.2132595627624145e+00, 0.1761474673379855e+00, 0.5003566260610350e+00, 0.5448249098276142e+00, 0.5701738373290982e+00, 0.5196161461339291e+00, 0.4684002904633344e-01, 0.3091316721875115e+00, 0.9311143262655500e+00, 0.5165589926587729e+00, 0.4230658512580241e+00, 0.1580095970610971e+00, 0.9213403630754643e+00, 0.8395102804227733e+00, 0.6279936637330286e-01, 0.2118369865058433e+00, 0.5654902136914255e+00, 0.8603345552081203e+00, 0.6355651085391135e+00, 0.9504788095745560e+00, 0.4928822316317183e+00, 0.8546738890428598e+00, 0.1506739079739667e+00, 0.7955004699434830e+00, 0.8623803468730415e+00, 0.3328976137523824e+00, 0.6282681071703561e+00, 0.5253150183233042e+00, 0.5508861108006231e+00, 0.8482246434328706e+00, 0.3438184421622726e+00, 0.4546750663905129e+00, 0.2702842147468514e+00}, + n: 21, + zOut: []float64{0.2599043716461175e+01, 0.2392272222815941e+01, 0.1945154273268249e+01, 0.1882117601080557e+01, 0.1808214125087810e+01, 0.1701027479657859e+01, 0.1674631474676720e+01, 0.1432830957842129e+01, 0.1245386641860508e+01, 0.1003047986420639e+01, 0.5925499318269597e+00, 0.5737323778075372e+00, 0.4619197085153934e+00, 0.3795861721644865e+00, 0.2896752472940557e+00, 0.2024598813435771e+00, 0.1753335138277014e+00, 0.8865983947068434e-01, 0.4088280438555415e-01, 0.2271023207656241e-01, 0.3207012114254504e-03, 0.1328658858913301e-22, 0.6931093261583948e-12, 0.1962539051213948e-11, 0.1674631474676720e+01, 0.1972152263052530e-30, 0.1003060686577864e-09, 0.7786971971568132e-10, 0.1432830957842129e+01, 0.3552283923653146e-20, 0.1335875663368593e-08, 0.1098996752778210e-08, 0.1245386641860508e+01, 0.6078417508770261e-16, 0.6005521677860925e-08, 0.4565873133117504e-08, 0.1003047986420639e+01, 0.4907378230941548e-20, 0.6042756667162823e-07, 0.1337733817342293e-06, 0.5925499318269597e+00, 0.7849166006949067e-28, 0.2051155688909552e+02, 0.2051155688909552e+02, 0.1190000000000000e+03, 0.3632653061224490e+01, 0.6722689075630252e+01, 0.9823469248695270e-04, 0.4619197085153934e+00, 0.3582825487703231e-19, 0.4408755124729528e-13, 0.6425665938157879e-14, 0.3795861721644865e+00, 0.3049769924381334e-19, 0.2649002399507504e-08, 0.1284693180187630e-07, 0.2896752472940557e+00, 0.2730342256278547e-24, 0.3857186710932351e-04, 0.7729247272182434e-05, 0.2024598813435771e+00, 0.2411244474478362e-13, 0.9974443273860175e-04, 0.7715885583051736e-04, 0.1753335138277014e+00, 0.1003583340528496e-19, 0.1275804036051463e-05, 0.1384195629265487e-05, 0.8865983947068434e-01, 0.1976477763343393e-17, 0.9250271469637329e-05, 0.1838436200742544e-04, 0.4088280438555415e-01, 0.1570480975080192e-18, 0.1665813744256122e-03, 0.9368787794988337e-04, 0.2271023207656241e-01, 0.1120555887990158e-19, 0.1174000031728850e-02, 0.1217958466428532e-03, 0.3207012114254504e-03, 0.0000000000000000e+00, 0.1126152474028795e-01, 0.4219515300090987e-02}, + info: 0, + }, + { + z: []float64{0.1431540885741494e+00, 0.8417471667062104e+00, 0.3721480902981644e-01, 0.9023772086815021e+00, 0.7374866167630612e-01, 0.8092071841305744e+00, 0.8019394206305236e+00, 0.4350675566733520e+00, 0.8704440278245326e+00, 0.8549104680538234e+00, 0.1422025173760633e+00, 0.8968456705882693e+00, 0.1647179447265090e-01, 0.8994077105608889e-01, 0.8596481603583839e-02, 0.3900328616396558e+00, 0.4012630844305533e+00, 0.8262908556770313e+00, 0.9026062467432411e+00, 0.6485345137550630e+00, 0.5598932241080331e+00, 0.6084992568220661e+00, 0.5039450314115669e+00, 0.3490854750626526e+00, 0.9921372250281116e+00, 0.1522469068827847e-01, 0.9395473455386270e+00, 0.3928498832803473e-01, 0.1672951108814763e+00, 0.9480670302336689e+00, 0.8743929741539138e+00, 0.9201765927311700e-01, 0.8641536356563365e+00, 0.8867562289118487e+00, 0.7156446870854873e+00, 0.5025184567500440e-01, 0.4878799838415181e+00, 0.5568327059782646e+00, 0.4596548235310455e+00, 0.6857902774944131e+00, 0.4795565553491499e-01, 0.4752906527216701e+00, 0.9288351773531449e+00, 0.7419018575576386e+00, 0.9987802353476521e+00, 0.8896105755435116e+00, 0.3190450046252536e+00, 0.9685806853440787e+00, 0.3396383774694021e+00, 0.9164401886915974e+00, 0.1269879571285023e+00, 0.7912318128907188e+00, 0.9987805345221650e+00, 0.4107567957671243e+00, 0.4798441760727139e+00, 0.2357309197085595e+00, 0.3404494185276096e+00, 0.7067865773496134e+00, 0.2931542927088892e+00, 0.6654441577727066e+00, 0.9279529535770864e+00, 0.9667208022109988e+00, 0.5518920376663735e+00, 0.1657487215954807e+00, 0.3897636058410684e+00, 0.9489706352102197e+00, 0.9626390247015670e+00, 0.1020775856599213e+00, 0.3545784768064359e+00, 0.3098089336816090e+00, 0.3791089991258285e+00, 0.6519489570740321e+00, 0.4763523952410913e+00, 0.3754524631080590e+00, 0.9792813089520809e+00, 0.1998301915332230e+00, 0.7618060057813935e+00, 0.1923436350325713e+00, 0.2507012719817848e+00, 0.9704520069999245e-01, 0.2277384851702763e+00, 0.7790727508043876e+00, 0.9105177383528497e+00, 0.9714469561729607e+00}, + n: 21, + zOut: []float64{0.2499720115178021e+01, 0.2371028651009860e+01, 0.2298220416774619e+01, 0.1883765650093571e+01, 0.1735426237683963e+01, 0.1616050162695453e+01, 0.1545275083087796e+01, 0.1187762610175739e+01, 0.1136805332133151e+01, 0.9576839460725933e+00, 0.9184986446054146e+00, 0.7702442640767845e+00, 0.6433137908930019e+00, 0.6160049176831036e+00, 0.3208789970551323e+00, 0.1900871628722621e+00, 0.9670499780074775e-01, 0.7460708611062000e-01, 0.6486083888512703e-01, 0.9966295048259487e-02, 0.4462615269650136e-07, 0.2765885173224186e-23, 0.3446029508568731e-11, 0.2660256849760007e-11, 0.1545275083087796e+01, 0.1351675564459578e-15, 0.5238898174323723e-11, 0.6640890994279970e-11, 0.1187762610175739e+01, 0.1020402561847179e-21, 0.5048392892414130e-10, 0.4263729817754413e-10, 0.1136805332133151e+01, 0.1429794613494979e-24, 0.1140378416083416e-09, 0.8548580688939750e-10, 0.9576839460725933e+00, 0.1302866745903472e-12, 0.1207358062805810e-07, 0.1106598026337004e-07, 0.9184986446054146e+00, 0.3538135823224864e-23, 0.2093690524456137e+02, 0.2093690524456137e+02, 0.1130000000000000e+03, 0.3331065759637188e+01, 0.7964601769911504e+01, 0.4249897555997367e-08, 0.6433137908930019e+00, 0.4627357327667341e-16, 0.5702383148705814e-08, 0.8656383665454304e-08, 0.6160049176831036e+00, 0.0000000000000000e+00, 0.4551987331271016e-07, 0.8434005257711676e-07, 0.3208789970551323e+00, 0.2939872200043623e-13, 0.4894684795516590e-05, 0.8200734504818474e-05, 0.1900871628722621e+00, 0.9080407486043539e-23, 0.2659953011765320e-05, 0.2713538128245964e-05, 0.9670499780074775e-01, 0.1949336478520425e-16, 0.1287973346598181e-04, 0.3266830247798530e-04, 0.7460708611062000e-01, 0.0000000000000000e+00, 0.5588793940970657e-03, 0.8233963212342685e-03, 0.6486083888512703e-01, 0.1680331387617770e-17, 0.5510428852120968e-02, 0.6166917183278490e-02, 0.9966295048259487e-02, 0.9229672591085838e-28, 0.3019065027713110e-04, 0.7674568090533565e-04, 0.4462615269650136e-07, 0.1609276246650864e-27, 0.6212935138424260e-03, 0.6196304337665942e-02}, + info: 0, + }, + { + z: []float64{0.2117689827794353e+00, 0.3469390325266096e+00, 0.8556157513093896e+00, 0.4757820823688597e-01, 0.8174618541686680e+00, 0.7502623797079592e+00, 0.3473865910309967e+00, 0.1102061280439136e+00, 0.3838366632541155e+00, 0.4335750892772081e+00, 0.8350483733926333e+00, 0.8472841724322083e+00, 0.7001411292294977e+00, 0.1602971622162627e+00, 0.8475208106005836e+00, 0.1606731136869689e+00, 0.1282020202063155e+00, 0.7021142414638628e+00, 0.6509542291315448e+00, 0.3403267711628978e+00, 0.2783715946342622e+00, 0.2391807909006515e+00, 0.9326061845277929e+00, 0.8200594229325990e+00, 0.5326312099842309e+00, 0.4390518762985745e+00, 0.9087332134146432e+00, 0.8586305843825174e+00, 0.5289458618923581e+00, 0.6098713986238019e+00, 0.4124733909450122e+00, 0.2730411694523917e+00, 0.8437528550291284e+00, 0.6776322145297138e+00, 0.6772761566308538e+00, 0.6457716484911746e+00, 0.4290441069454122e+00, 0.9486890113840626e+00, 0.1145429178800543e+00, 0.8512453512490206e+00, 0.6435458910126746e+00, 0.6571563420730420e-01, 0.6473933144723745e+00, 0.2355560081089225e+00, 0.1262112692360912e+00, 0.6892345322103945e+00, 0.9088034154559810e-01, 0.5725023743105110e+00, 0.8924086653580375e+00, 0.6645221244511262e+00, 0.7287357579298158e+00, 0.6462379994906295e+00, 0.8254375000545862e+00, 0.2402799002378904e+00, 0.2312499677892260e+00, 0.7164295349077132e-01, 0.1216505240532725e+00, 0.5279275209153104e+00, 0.2010432640871422e+00, 0.2335833224032272e+00, 0.7053869472451330e+00, 0.1271428628255256e-01, 0.5920854820119847e-01, 0.1973220586788875e+00, 0.9024202349843203e+00, 0.2696040474399301e+00, 0.2399100427530529e+00, 0.4936945945994136e+00, 0.7377918193661529e+00, 0.4454167008139350e+00, 0.6822934143143184e+00, 0.1980205964978332e+00, 0.9382259163150929e+00, 0.7952650736079295e+00, 0.2147267423149601e+00, 0.2160850567828982e+00, 0.2063316967709339e-02, 0.4613555480251579e+00, 0.2983741464914229e+00, 0.7653433710630531e+00, 0.9319255930520460e+00, 0.9624604653562387e+00, 0.7921958309630666e+00, 0.3349247737517954e+00}, + n: 21, + zOut: []float64{0.2554012710728313e+01, 0.2350385283314499e+01, 0.2336543487584985e+01, 0.2078331003959482e+01, 0.1819938506810449e+01, 0.1751888604422140e+01, 0.1642765030717598e+01, 0.1391850148176030e+01, 0.1265614265169072e+01, 0.1192296172100378e+01, 0.1118290194286764e+01, 0.7471001949454404e+00, 0.6602310515087256e+00, 0.5223373748122110e+00, 0.3101567956620342e+00, 0.2634177676934227e+00, 0.1468471914237014e+00, 0.9669691105737251e-01, 0.6862527639473751e-01, 0.2251868425683227e-01, 0.2442899974694956e-02, 0.0000000000000000e+00, 0.3247526756203984e-07, 0.3301672636248404e-07, 0.1642765030717598e+01, 0.2849742554922193e-13, 0.2093860354423170e-09, 0.9504157619668633e-09, 0.1391850148176030e+01, 0.5471978423050590e-17, 0.2765846142955850e-06, 0.3763175370346248e-06, 0.1265614265169072e+01, 0.7099748146989106e-29, 0.8629250150360319e-06, 0.7218261937815504e-06, 0.1192296172100378e+01, 0.0000000000000000e+00, 0.1792585742344500e-05, 0.1391262462240339e-05, 0.1118290194286764e+01, 0.2158808112824856e-22, 0.2234228955499889e+02, 0.2234228955499888e+02, 0.1130000000000000e+03, 0.3253968253968254e+01, 0.7079646017699115e+01, 0.1273852622682608e-03, 0.6602310515087256e+00, 0.1370219837932689e-23, 0.6841106514156665e-04, 0.6915431692027581e-04, 0.5223373748122110e+00, 0.8086955524743426e-16, 0.3151324787101243e-03, 0.4936870820060986e-03, 0.3101567956620342e+00, 0.1690864291947714e-17, 0.5342613094053112e-03, 0.4525307423692253e-03, 0.2634177676934227e+00, 0.1569544193046020e-19, 0.8085628512848309e-03, 0.5320256570924908e-03, 0.1468471914237014e+00, 0.1725633230170963e-30, 0.2171574910172427e-04, 0.7393923467130197e-05, 0.9669691105737251e-01, 0.8185725058097063e-19, 0.6259416979424432e-04, 0.7993629240914341e-04, 0.6862527639473751e-01, 0.1144304443709360e-17, 0.8363460972147480e-05, 0.2145990923718838e-04, 0.2251868425683227e-01, 0.3423656328659191e-26, 0.4258124540743150e-02, 0.4456054742147899e-02, 0.2442899974694956e-02, 0.7553929676107889e-20, 0.1657093833399320e-01, 0.2123214045479585e-01}, + info: 0, + }, + { + z: []float64{0.5143465832338334e-01, 0.9671338481452775e+00, 0.2594126021369855e+00, 0.4887933538896223e+00, 0.4809357095031462e+00, 0.1648852435705869e+00, 0.5382296047886386e+00, 0.2404093745709619e+00, 0.5749849863516346e+00, 0.8755417117410101e+00, 0.6479432781744229e+00, 0.8462387170859478e+00, 0.1030360983310576e+00, 0.2790419505795448e+00, 0.2643216914883461e+00, 0.5072751399665939e+00, 0.9813013168637370e+00, 0.5270325239254721e+00, 0.1341185443667956e+00, 0.1666757001787883e+00, 0.8421762037577841e+00, 0.6333707418037352e+00, 0.9714076539879462e+00, 0.8133030640726957e-01, 0.5961674478430062e+00, 0.9962348063653491e+00, 0.9690228851917282e-02, 0.4933763527820312e-01, 0.1005560439675963e+00, 0.9794838233988775e+00, 0.8412211319105695e+00, 0.4530689238841823e+00, 0.9824397821984348e+00, 0.9435184376619912e+00, 0.5899369853858024e+00, 0.3347062982138894e+00, 0.6634688706474758e+00, 0.7781534216536603e+00, 0.5213269680559092e+00, 0.2415498257414400e+00, 0.4153017903771141e+00, 0.6142268391585892e-01, 0.1582256219487426e+00, 0.5180630943028907e+00, 0.2531248086553088e+00, 0.4588063905612799e+00, 0.9850645955475579e-01, 0.4386688206276981e+00, 0.6723312445641013e+00, 0.2417358982121058e+00, 0.9102202020018957e+00, 0.4790528316718639e+00, 0.8645283277006380e+00, 0.5896385561561974e+00, 0.3547186358320912e+00, 0.2570702739547999e+00, 0.8872414069371877e+00, 0.7977277358714381e+00, 0.2283476919840042e+00, 0.5481349489617042e+00, 0.9565429487676439e+00, 0.3941453871538936e-01, 0.2005355998230810e+00, 0.6815820255993522e+00, 0.8827641753074383e+00, 0.3721138471690448e+00, 0.3473678385941080e+00, 0.5958558861949184e+00, 0.5289126915593071e+00, 0.7848410946975201e+00, 0.9380898148044153e+00, 0.4238368524709774e+00, 0.9095202310640094e-01, 0.5205916750079600e+00, 0.3388678441932766e+00, 0.9218715971576251e+00, 0.2785357832665022e+00, 0.7353529037048490e+00, 0.3430576699659011e+00, 0.1583386336396764e-03, 0.1574217518812531e+00, 0.2808549182652627e+00, 0.9165809855463820e+00, 0.2729923295285186e-01}, + n: 21, + zOut: []float64{0.2740279952468604e+01, 0.2296796691149573e+01, 0.2162929780566866e+01, 0.2022438316208162e+01, 0.1971555203048247e+01, 0.1712998698314224e+01, 0.1596820997274643e+01, 0.1536702996895977e+01, 0.1112897445143912e+01, 0.8259900943746278e+00, 0.7751147071242228e+00, 0.6231241133597348e+00, 0.5263220334234422e+00, 0.4481765711163059e+00, 0.4215577615769598e+00, 0.1440581718410880e+00, 0.1296344814538628e+00, 0.5340807481344281e-01, 0.1871489185523312e-01, 0.3434703213692513e-02, 0.2376961512853157e-03, 0.2255238469879073e-22, 0.1711146112720571e-14, 0.5183409008081618e-15, 0.1596820997274643e+01, 0.0000000000000000e+00, 0.1075504130778587e-11, 0.4005497363076952e-12, 0.1536702996895977e+01, 0.1981618593915182e-26, 0.4700742807330894e-10, 0.1164621641126979e-09, 0.1112897445143912e+01, 0.4726602108594632e-24, 0.9459556816770867e-09, 0.1692486098434743e-08, 0.8259900943746278e+00, 0.0000000000000000e+00, 0.4245107391668450e-08, 0.4612248550244559e-08, 0.7751147071242228e+00, 0.8284213834597216e-21, 0.2112319338137411e+02, 0.2112319338137411e+02, 0.1000000000000000e+03, 0.2931972789115646e+01, 0.2000000000000000e+01, 0.1818678155237499e-07, 0.5263220334234422e+00, 0.4246772729828520e-23, 0.1119198730079850e-06, 0.4602199017952358e-06, 0.4481765711163059e+00, 0.1035379938102578e-29, 0.2940687130487931e-04, 0.4282452030972280e-04, 0.4215577615769598e+00, 0.5276311108751726e-20, 0.8277389737129399e-05, 0.6004055084531256e-05, 0.1440581718410880e+00, 0.0000000000000000e+00, 0.8355340113184283e-05, 0.1023986211128489e-04, 0.1296344814538628e+00, 0.1552281967638183e-19, 0.6343521312985942e-05, 0.6697527987420460e-05, 0.5340807481344281e-01, 0.6040133837432384e-17, 0.1556990792688111e-03, 0.3593183434334682e-03, 0.1871489185523312e-01, 0.1153562559317783e-18, 0.8237486079760113e-03, 0.4973426416094711e-03, 0.3434703213692513e-02, 0.0000000000000000e+00, 0.1008741049678482e-02, 0.1888495698860201e-02, 0.2376961512853157e-03, 0.2773339119917620e-31, 0.6098206042824776e-05, 0.1474144332182451e-03}, + info: 0, + }, + { + z: []float64{0.1099031735712288e+00, 0.5724174842808358e+00, 0.3122550743498126e+00, 0.7231231021981217e+00, 0.6175771623314344e+00, 0.5814310193624382e+00, 0.6423493353686718e+00, 0.8309609356975209e+00, 0.6400193102877816e-01, 0.8728840273137624e+00, 0.8682274216885312e+00, 0.7829529646382269e+00, 0.4658588928084396e+00, 0.4997300418007194e+00, 0.5750522308672811e+00, 0.5282811502940782e+00, 0.7877382060309071e+00, 0.2242905591148447e-01, 0.9672168595892823e+00, 0.9229185948074735e+00, 0.7120977511666169e+00, 0.9513613307832994e+00, 0.6495350224590044e+00, 0.1796094611119584e+00, 0.6238202085518583e+00, 0.4056553414984787e-01, 0.2863733551163383e-01, 0.7740758620163646e-01, 0.1652368945937387e+00, 0.5017774327613052e+00, 0.6445125305703996e+00, 0.5670374114090049e-01, 0.1147723121006077e+00, 0.5126855432858525e+00, 0.9526888308153642e+00, 0.1724272641761350e+00, 0.9682495468226523e+00, 0.2579549866163475e+00, 0.5365442422680400e+00, 0.7055455249537391e+00, 0.6510869341601188e+00, 0.6910931676909811e+00, 0.8245356137796966e+00, 0.2467134555988069e+00, 0.2932965007679977e-01, 0.3174819406165880e+00, 0.1008447548759832e+00, 0.7998426243165985e+00, 0.6780870841109060e+00, 0.2896057920811994e+00, 0.9303619290394473e+00, 0.3607716194531815e+00, 0.5211419140438559e+00, 0.1046120770241560e+00, 0.7683871590101677e+00, 0.2147990043395044e+00, 0.6702055898403114e+00, 0.2446830800671760e+00, 0.1275992810549672e+00, 0.9234458715873217e+00, 0.6974016894347440e+00, 0.2461420250226486e+00, 0.2448954779360711e+00, 0.1956976010946661e+00, 0.8608131397075515e+00, 0.7387865940798233e-01, 0.5053692911917109e+00, 0.3354069180988316e+00, 0.4024076719528004e+00, 0.5167351886997615e+00, 0.7829885112810930e-01, 0.3735275576161866e+00, 0.9546718920527260e+00, 0.5321377922787256e+00, 0.6121034951800618e+00, 0.8028222695558183e+00, 0.3295254582685344e+00, 0.5907471536091985e+00, 0.8247143229089846e+00, 0.1329486496858557e+00, 0.9484305946158008e+00, 0.4510190965747323e+00, 0.1159598040227069e+00, 0.7290524870797256e+00}, + n: 21, + zOut: []float64{0.2759844065448739e+01, 0.2469029941309663e+01, 0.2170232676307031e+01, 0.1960369893365743e+01, 0.1773994559660628e+01, 0.1654180064160601e+01, 0.1454572475268221e+01, 0.1398795476174613e+01, 0.1260788731803824e+01, 0.1011169812035994e+01, 0.9032283997666860e+00, 0.7344998632955592e+00, 0.6915613254033124e+00, 0.3097922830021908e+00, 0.2074097203460417e+00, 0.1750683611049406e+00, 0.1335859329381489e+00, 0.1070304609992355e+00, 0.6466361262660925e-01, 0.1024517112836532e-01, 0.4658519939343828e-03, 0.5916456789157589e-30, 0.2414839775472174e-07, 0.1563475838564645e-07, 0.1454572475268221e+01, 0.5710570490785520e-12, 0.1598956453701674e-05, 0.9938094799470723e-06, 0.1398795476174613e+01, 0.7078291130367095e-25, 0.1301105456376455e-06, 0.7650088290739393e-07, 0.1260788731803824e+01, 0.6660939091008025e-19, 0.3865722666202593e-07, 0.5454440035759107e-07, 0.1011169812035994e+01, 0.5414223942436753e-17, 0.3344051754361652e-04, 0.2635566037358100e-04, 0.9032283997666860e+00, 0.5272968833424410e-16, 0.2125052867814009e+02, 0.2125052867814008e+02, 0.1090000000000000e+03, 0.3179138321995465e+01, 0.2752293577981651e+01, 0.8667028517906969e-06, 0.6915613254033124e+00, 0.6016648435007899e-23, 0.2921486269254270e-05, 0.9773987035049091e-05, 0.3097922830021908e+00, 0.6500213859021137e-26, 0.6786203908623285e-05, 0.1231548092049969e-04, 0.2074097203460417e+00, 0.0000000000000000e+00, 0.7819652657144090e-06, 0.1217347772587842e-05, 0.1750683611049406e+00, 0.6582255393164123e-26, 0.1458540666485106e-06, 0.3789967922095048e-07, 0.1335859329381489e+00, 0.1232595164407831e-31, 0.4615373247666497e-05, 0.3177332815969559e-05, 0.1070304609992355e+00, 0.5947376905467984e-20, 0.9914288367235413e-05, 0.1492808588608919e-04, 0.6466361262660925e-01, 0.5865227530883812e-22, 0.3814164765172745e-05, 0.1095283410948310e-05, 0.1024517112836532e-01, 0.3761300629007363e-24, 0.3159730783704485e-04, 0.5907442037524074e-05, 0.4658519939343828e-03, 0.2094421310984266e-20, 0.1426408794386696e-02, 0.1350744215392459e-02}, + info: 0, + }, + { + z: []float64{0.1337238926382778e+00, 0.3106335653974364e+00, 0.8951406789337562e+00, 0.2282076226383426e+00, 0.3559900270036094e+00, 0.1333708564991680e+00, 0.9225357561814306e+00, 0.6383337981459615e+00, 0.7455817572577521e+00, 0.9604635037498299e-01, 0.1362822625756884e+00, 0.7894982141438661e+00, 0.5371956247017955e+00, 0.2757551089818502e+00, 0.9298698100988576e+00, 0.8755382224220718e+00, 0.2542040210765101e+00, 0.1287904754285962e+00, 0.6810846780454294e+00, 0.6526638813043742e+00, 0.5955845877046446e+00, 0.4670096896697419e+00, 0.5475207984291373e+00, 0.6684162034597571e+00, 0.5235410539200527e+00, 0.7337617639827179e+00, 0.8903194008545992e+00, 0.8072217495300122e+00, 0.3881183186356038e+00, 0.9675156751063907e+00, 0.4869149855955625e+00, 0.4861129300486463e+00, 0.5200718178308463e+00, 0.7592798686765068e+00, 0.7809192005748844e+00, 0.9939952535261979e+00, 0.4314053553650160e+00, 0.3880099380329156e+00, 0.9349849386467751e+00, 0.5291872418075704e+00, 0.7655812944336832e+00, 0.7077741471317599e+00, 0.5275519688342463e+00, 0.1415092279196126e+00, 0.1904335663118598e+00, 0.3699272115054363e+00, 0.7445484920377089e+00, 0.8400049615316296e+00, 0.8734700661006028e+00, 0.8683217518669656e+00, 0.8515176384024258e+00, 0.6202080985416659e+00, 0.3604998919630682e+00, 0.2403115969743893e+00, 0.9554672796937618e+00, 0.1693873282831592e+00, 0.5994144523705257e+00, 0.1384422941659869e-01, 0.6596251817370012e+00, 0.5636231004812753e+00, 0.5389166856740800e+00, 0.2607457441602791e+00, 0.5920539595206202e+00, 0.2966504229935458e+00, 0.5816050348468149e+00, 0.9328164339475690e+00, 0.5805507576670368e+00, 0.6771712743677023e+00, 0.9823651336988842e+00, 0.9793282488782439e+00, 0.4667245203343464e+00, 0.7134684364121646e+00, 0.4880671978756712e-01, 0.8383078993270572e+00, 0.7936419849408084e+00, 0.5385800856643647e+00, 0.4634644976924968e+00, 0.3931501879746360e+00, 0.1248524879525928e+00, 0.3152310060096000e+00, 0.9363438862709352e+00, 0.3520470715344234e+00, 0.1429338846554544e+00, 0.1326946626315275e+00}, + n: 21, + zOut: []float64{0.2657315323020249e+01, 0.2572905823883155e+01, 0.2192663626292596e+01, 0.2071095653426940e+01, 0.2044494413445205e+01, 0.1999166174223334e+01, 0.1788414821296782e+01, 0.1468335212647637e+01, 0.1398203839606685e+01, 0.1347197234417596e+01, 0.1032902470665773e+01, 0.6608948302945201e+00, 0.5911878145202527e+00, 0.5378924504889658e+00, 0.3142273934265400e+00, 0.2842363451796606e+00, 0.2019484989227465e+00, 0.1206949829709270e+00, 0.7426269792367300e-01, 0.2567909008435554e-01, 0.2199972943425967e-02, 0.1467781105980630e-22, 0.1166962122444304e-12, 0.3923264640094201e-13, 0.1788414821296782e+01, 0.4047127357551305e-19, 0.1945479305169457e-09, 0.9334440121571419e-10, 0.1468335212647637e+01, 0.0000000000000000e+00, 0.9722951216797492e-08, 0.1689489155058893e-07, 0.1398203839606685e+01, 0.0000000000000000e+00, 0.7669133926546234e-07, 0.4506074380002258e-07, 0.1347197234417596e+01, 0.5319335283638860e-18, 0.5446647026201709e-11, 0.3163064243829271e-10, 0.1032902470665773e+01, 0.4430813007685208e-16, 0.2338591866968101e+02, 0.2338591866968102e+02, 0.1020000000000000e+03, 0.3054421768707483e+01, 0.2941176470588236e+01, 0.1546723845733498e-03, 0.5911878145202527e+00, 0.3652829585034170e-19, 0.1702044288822486e-03, 0.2177596193354961e-03, 0.5378924504889658e+00, 0.1350864780951217e-19, 0.2554902108144373e-06, 0.3466691719880105e-07, 0.3142273934265400e+00, 0.2387932220615686e-11, 0.6740109938598231e-04, 0.1019437079103270e-03, 0.2842363451796606e+00, 0.4338734978715565e-28, 0.4438729696644533e-03, 0.6983868198931385e-03, 0.2019484989227465e+00, 0.2612058427213185e-17, 0.1559412966194519e-03, 0.4467949014161081e-04, 0.1206949829709270e+00, 0.7367960854764250e-27, 0.5572085070843593e-03, 0.6756387538188178e-03, 0.7426269792367300e-01, 0.4936220984925504e-18, 0.5280924471496804e-02, 0.4881438130898638e-02, 0.2567909008435554e-01, 0.4675428386128583e-22, 0.7885638578366546e-02, 0.6344653647355715e-02, 0.2199972943425967e-02, 0.1043187069080892e-15, 0.1144606019330652e-02, 0.5299576363060843e-02}, + info: 0, + }, + { + z: []float64{0.2890643798273074e-01, 0.8912209063965303e+00, 0.7091793456137876e+00, 0.8245039023218966e+00, 0.5191833323605244e+00, 0.9894484508239253e+00, 0.5658659268235589e-01, 0.1289734762146678e+00, 0.3341027001588562e+00, 0.6587235353882195e+00, 0.7097532970603662e+00, 0.5025058684824796e+00, 0.5130835936402325e+00, 0.6692657853529054e+00, 0.2879265169895939e+00, 0.8181258306911487e-01, 0.2380388408780171e+00, 0.1056636973917433e+00, 0.4227669640232596e+00, 0.8862610364887391e+00, 0.3174262171725216e+00, 0.4110634725577023e+00, 0.6279625314936111e+00, 0.2325114979496041e+00, 0.5638889949786475e+00, 0.7596856267416864e+00, 0.8240081240110467e+00, 0.9973320050910897e+00, 0.5587421997876323e+00, 0.4288334013103122e+00, 0.8314043340979348e+00, 0.5650188659610876e+00, 0.2964679109455457e+00, 0.8407175605087841e+00, 0.1058970658656445e+00, 0.6883026296612506e+00, 0.9166830669002658e+00, 0.7992763288082885e+00, 0.5373538962118501e+00, 0.8167960120885054e-01, 0.9998997916538879e+00, 0.1975674901149639e+00, 0.7582212604798201e+00, 0.2860808530483561e+00, 0.5862675745609662e+00, 0.4334284293442484e+00, 0.9545191575190999e+00, 0.5542276060509370e+00, 0.9135493345204525e+00, 0.9597250519108470e+00, 0.4020434518393716e+00, 0.2147209304773570e+00, 0.8396072187357826e+00, 0.5371494282057584e+00, 0.9930506172699961e+00, 0.3818967451326610e+00, 0.8312670748053316e+00, 0.7518558266385443e+00, 0.7897281313428761e+00, 0.4898691976861856e+00, 0.9259118998644267e+00, 0.2411762039549979e+00, 0.3003769542607895e+00, 0.9868194969589331e+00, 0.1841003538786936e+00, 0.1211293694641126e+00, 0.7968556025054743e+00, 0.5500013093928259e+00, 0.1705581246551358e-01, 0.5695318221187962e-02, 0.1626907733376057e+00, 0.3178865594423397e-01, 0.6489383021126400e+00, 0.3661200730578906e-01, 0.9937238435410299e+00, 0.8693948290233534e+00, 0.8108974262339508e-01, 0.4858895290851201e+00, 0.5417752479164495e+00, 0.3742432440371155e+00, 0.4593696265617863e+00, 0.7058043658513553e+00, 0.3505553117224474e+00, 0.9647100571711842e+00}, + n: 21, + zOut: []float64{0.2698643664756728e+01, 0.2430046536911446e+01, 0.2385739474636754e+01, 0.2028138720182182e+01, 0.1901894919230603e+01, 0.1736372633374239e+01, 0.1325608228839813e+01, 0.1300504895564844e+01, 0.1261345481029282e+01, 0.1136052475360616e+01, 0.1031365869480280e+01, 0.7689316276636458e+00, 0.5233321832080760e+00, 0.4817169321317593e+00, 0.3418348106633131e+00, 0.2652183763191593e+00, 0.1871042345023881e+00, 0.9160826173360032e-01, 0.4317601760647842e-01, 0.3303497213448481e-02, 0.1231458285303120e-03, 0.9226440439387837e-18, 0.2555628409159789e-14, 0.2440793084913596e-14, 0.1325608228839813e+01, 0.0000000000000000e+00, 0.5589422112493051e-13, 0.3085898188945034e-13, 0.1300504895564844e+01, 0.7444625512849138e-23, 0.1759654299048522e-12, 0.9781103208866350e-13, 0.1261345481029282e+01, 0.6802337789204302e-18, 0.3650786119252150e-16, 0.3306223266172732e-17, 0.1136052475360616e+01, 0.9063201483067583e-22, 0.4434055300668524e-09, 0.2614666143660316e-09, 0.1031365869480280e+01, 0.2213258132402469e-23, 0.2194206198623719e+02, 0.2194206198623719e+02, 0.1100000000000000e+03, 0.3090702947845805e+01, 0.2727272727272727e+01, 0.1493749053307339e-05, 0.5233321832080760e+00, 0.6795157417200880e-17, 0.4382394072074649e-04, 0.6871024518289093e-04, 0.4817169321317593e+00, 0.6227468015696590e-20, 0.2332676874935389e-03, 0.1644354889791431e-03, 0.3418348106633131e+00, 0.8455074291031222e-24, 0.5211421342985135e-03, 0.2133578004049512e-03, 0.2652183763191593e+00, 0.4572942646937810e-19, 0.1472671843690071e-04, 0.1084821769416695e-04, 0.1871042345023881e+00, 0.4930380657631324e-31, 0.2208706795255935e-03, 0.1179477310012027e-03, 0.9160826173360032e-01, 0.2210123199165121e-23, 0.1806566548732391e-03, 0.1927419907906541e-03, 0.4317601760647842e-01, 0.5416053917983313e-23, 0.1030445327331746e-02, 0.1902956416897231e-02, 0.3303497213448481e-02, 0.8065145389241560e-19, 0.2031248978330205e-02, 0.9946626682395031e-03, 0.1231458285303120e-03, 0.1613264727387662e-23, 0.1026826157383968e-01, 0.7685291223570192e-02}, + info: 0, + }, + { + z: []float64{0.9180725347208115e+00, 0.2301506372544524e+00, 0.2276838071944142e+00, 0.3342400207211600e+00, 0.9639463843013925e+00, 0.5437724527901152e+00, 0.1580993234156807e+00, 0.7877490287014776e+00, 0.3507533849023979e+00, 0.8833112450245699e+00, 0.3185731691946991e-01, 0.4118134066759526e+00, 0.8069686780110494e+00, 0.8681061806796153e-01, 0.6859948674552803e+00, 0.3663178920382139e+00, 0.3720291416615038e+00, 0.8903287156706564e+00, 0.1324429304812152e+00, 0.1008541223140741e+00, 0.8373883396136816e-01, 0.6715628313581425e+00, 0.1594467086953334e+00, 0.5413131938455139e+00, 0.1818140526389971e+00, 0.8685248287187100e+00, 0.5132298455242696e+00, 0.6886095101515632e+00, 0.4005049001067575e+00, 0.6084237383755163e+00, 0.7877676299774720e+00, 0.7102138904901485e+00, 0.6423372229000449e+00, 0.5613660527307726e+00, 0.9158966894941798e+00, 0.4933711427035579e+00, 0.7338439214039514e+00, 0.9908978412238557e+00, 0.5798575644928616e-01, 0.8977291910051136e+00, 0.9253772304099678e+00, 0.3904911857418987e+00, 0.4422593302764081e+00, 0.8489177311215401e+00, 0.7212670509555793e+00, 0.9615617648004515e+00, 0.8226716687395509e+00, 0.9612871412036802e+00, 0.1291974168797438e-01, 0.4488531482063538e+00, 0.2469769340642032e+00, 0.3614360453900619e+00, 0.2074351009622584e+00, 0.7016685284881871e+00, 0.1548060648394445e-01, 0.7311687113133817e+00, 0.9296728393442250e+00, 0.9396166536686752e+00, 0.8501954423645642e+00, 0.4336964896981472e+00, 0.6496017727538979e+00, 0.4464862127725214e+00, 0.4878335970157573e+00, 0.3486784051750773e+00, 0.7455411052079824e+00, 0.5444043691163374e+00, 0.3832092245164885e+00, 0.2030848069834680e+00, 0.9785068556455849e+00, 0.4981273340936699e+00, 0.2968547769748885e+00, 0.8139137104995479e+00, 0.5979876314385963e+00, 0.7890959952347512e+00, 0.1396638856742113e+00, 0.6423356747667288e+00, 0.4204101650717844e+00, 0.3861511203891811e+00, 0.5255853602620404e+00, 0.8105960955427686e+00, 0.1919308380441287e-01, 0.5308532240470543e-01, 0.3836913823972644e+00, 0.5510156269652182e+00}, + n: 21, + zOut: []float64{0.2573005090179951e+01, 0.2214810120454235e+01, 0.1922733254389488e+01, 0.1915272191491466e+01, 0.1796031325760082e+01, 0.1645710570695390e+01, 0.1570469265391087e+01, 0.1354841927194508e+01, 0.1294466452193999e+01, 0.1181212352080131e+01, 0.1084541549607215e+01, 0.7996017680870424e+00, 0.7671322760534200e+00, 0.5413194973828868e+00, 0.4525283650095193e+00, 0.2478901170080125e+00, 0.2160828479985811e+00, 0.1019188530491480e+00, 0.3154199687907010e-01, 0.6039114283538317e-02, 0.2585297601905087e-05, 0.1842779074596284e-25, 0.3846373205279405e-07, 0.1879918241657939e-07, 0.1570469265391087e+01, 0.5301607341005493e-20, 0.5291292043249480e-06, 0.9680604951014117e-06, 0.1354841927194508e+01, 0.4298088920574052e-25, 0.7835178780587823e-04, 0.5534134534297650e-04, 0.1294466452193999e+01, 0.1009741958682895e-27, 0.1198198722705974e-04, 0.2465537083987539e-04, 0.1181212352080131e+01, 0.6309019138365174e-17, 0.1614759348306573e-06, 0.9017894254302745e-07, 0.1084541549607215e+01, 0.2228464273087314e-11, 0.2171715152048637e+02, 0.2171715152048638e+02, 0.1110000000000000e+03, 0.3170068027210884e+01, 0.4504504504504505e+01, 0.2430910851731611e-05, 0.7671322760534200e+00, 0.2706512346053632e-23, 0.6377722122869933e-05, 0.4589261521941547e-05, 0.5413194973828868e+00, 0.6745076284001739e-23, 0.1892848367477632e-04, 0.2486261833151780e-04, 0.4525283650095193e+00, 0.4943696629711454e-24, 0.8902582280300076e-04, 0.6878116767917398e-04, 0.2478901170080125e+00, 0.2843433355651033e-22, 0.3448699135776589e-03, 0.2736188045956721e-03, 0.2160828479985811e+00, 0.6002787666751380e-18, 0.2577148216451160e-04, 0.6049628369117275e-04, 0.1019188530491480e+00, 0.1341063538875720e-28, 0.1448550797531803e-02, 0.1280708514177034e-02, 0.3154199687907010e-01, 0.6162975822039155e-32, 0.1183522807025889e-02, 0.1098677799367788e-02, 0.6039114283538317e-02, 0.2067951531379678e-24, 0.3575820084217755e-02, 0.2195872556012757e-02, 0.2585297601905087e-05, 0.1137373342260413e-23, 0.1063898509008824e-02, 0.9861644892506420e-03}, + info: 0, + }, + { + z: []float64{0.9985296910189717e+00, 0.8831740589213332e+00, 0.2324113848518039e+00, 0.7533238559688371e+00, 0.3064992106368385e+00, 0.3932363212439300e+00, 0.5064743635322475e+00, 0.2720810167440399e+00, 0.1759478514683438e+00, 0.1390889491193166e+00, 0.1132483424706631e+00, 0.7690628781223635e+00, 0.9618570638759271e+00, 0.5440138404508711e+00, 0.4137500693527351e+00, 0.1092888165750733e+00, 0.3873107100859553e+00, 0.6511277223354393e+00, 0.2155123791160785e-02, 0.9398729888067584e+00, 0.6720352720616287e+00, 0.1535603342192503e+00, 0.1531061200685264e+00, 0.7545904619621174e+00, 0.7663253741916968e+00, 0.9523625338296001e+00, 0.7564452546028717e+00, 0.3871338859213032e+00, 0.8414924166180344e+00, 0.9656526120147547e+00, 0.8983142486291256e+00, 0.4931225086134777e+00, 0.2402429523967143e+00, 0.2566457485714786e+00, 0.6291482026295649e+00, 0.3102713820139572e+00, 0.8907835976003569e+00, 0.7576089622962475e+00, 0.4268794706004859e+00, 0.9679759265393412e+00, 0.8761096912299335e+00, 0.8367482459498927e+00, 0.6924039391525048e+00, 0.1390797616032549e+00, 0.4788493402292462e+00, 0.4619292003991998e+00, 0.6108276748986761e+00, 0.4236594210420574e-01, 0.1642264441572165e+00, 0.3019681501044491e+00, 0.6872347566608095e-01, 0.4797791709329366e+00, 0.3191437336153862e+00, 0.4231397520665843e+00, 0.9123119461602809e+00, 0.4032629658718287e+00, 0.3761450089120290e+00, 0.1152232563042578e+00, 0.9954630682646954e+00, 0.5373824202779058e+00, 0.6106178691724040e-01, 0.8364420731639322e-01, 0.8933845816890847e+00, 0.5581653322395922e+00, 0.2899731736068278e-01, 0.1770979452363362e+00, 0.5004901441174880e-01, 0.7090935149513330e+00, 0.6196074778714252e+00, 0.7620558730426811e+00, 0.3663579087238161e+00, 0.7799033559850937e+00, 0.5196335588513137e+00, 0.2674625162438964e+00, 0.2943854187199277e+00, 0.7770221064689762e+00, 0.1654873860303906e+00, 0.1664508117665333e+00, 0.4846990191351895e+00, 0.8900931539882498e+00, 0.1193560423223905e-01, 0.7974122776570580e+00, 0.1940680748090599e+00, 0.6554519288132757e+00}, + n: 21, + zOut: []float64{0.2834962035267250e+01, 0.2457739633394971e+01, 0.2412615740112653e+01, 0.2205350258735166e+01, 0.2102070731548960e+01, 0.1691268502125131e+01, 0.1682341944223941e+01, 0.1365229052855181e+01, 0.1159748452086257e+01, 0.1104152264055237e+01, 0.9082896055849982e+00, 0.8350561719565637e+00, 0.6006322679527780e+00, 0.5944104151258589e+00, 0.3054714512857917e+00, 0.1934898777304675e+00, 0.1070106456037005e+00, 0.7977404013852835e-01, 0.4467492857453391e-01, 0.1796045849894196e-01, 0.1273912617121224e-04, 0.5392013224803094e-11, 0.7047397633000775e-04, 0.4975950204285101e-04, 0.1682341944223941e+01, 0.0000000000000000e+00, 0.1776257168565356e-04, 0.2478687660984077e-04, 0.1365229052855181e+01, 0.2904595710528663e-17, 0.2723859519581362e-08, 0.9106491605280572e-08, 0.1159748452086257e+01, 0.1011100673149366e-15, 0.1332628661445848e-04, 0.5871026575773219e-05, 0.1104152264055237e+01, 0.8308334390451641e-18, 0.8332699889408906e-05, 0.9212653513444334e-05, 0.9082896055849982e+00, 0.3518078502828044e-18, 0.2270226121598308e+02, 0.2270226121598308e+02, 0.1240000000000000e+03, 0.3505668934240363e+01, 0.5645161290322581e+01, 0.2479678732743906e-05, 0.6006322679527780e+00, 0.0000000000000000e+00, 0.7879659138115500e-06, 0.7199236433698600e-06, 0.5944104151258589e+00, 0.3024585909035025e-20, 0.8203337497349541e-06, 0.8765068833865939e-06, 0.3054714512857917e+00, 0.4733165431326071e-26, 0.3737214231038995e-06, 0.3923468144379782e-06, 0.1934898777304675e+00, 0.4260044778133448e-20, 0.3232147542049518e-06, 0.3376352641996194e-06, 0.1070106456037005e+00, 0.3308722450212072e-23, 0.5070207875063323e-06, 0.4107488228647395e-06, 0.7977404013852835e-01, 0.6162975822039155e-32, 0.1529214528521930e-08, 0.6332986682625979e-08, 0.4467492857453391e-01, 0.1074272623509048e-20, 0.2583088473438835e-04, 0.1663016813733052e-04, 0.1796045849894196e-01, 0.7053209140113412e-22, 0.1477449551257491e-03, 0.9547349784916723e-04, 0.1273912617121224e-04, 0.0000000000000000e+00, 0.2066074227952428e-02, 0.2903362394245088e-02}, + info: 0, + }, + { + z: []float64{0.9711900205601530e+00, 0.1638861523026915e+00, 0.1833187318057834e+00, 0.9072755753374127e+00, 0.2283578849017619e+00, 0.7520216236020175e+00, 0.3999380553956507e+00, 0.2624503967963340e+00, 0.4831766333071612e+00, 0.5439216550253211e+00, 0.6414566703448968e+00, 0.4958417903972772e+00, 0.6484413600580574e+00, 0.7544273119845675e+00, 0.7061059155531373e+00, 0.9606326194792012e+00, 0.5067069535242709e+00, 0.3029510615023524e+00, 0.6987030441702768e+00, 0.9783205951302908e+00, 0.1548952654742521e+00, 0.9315036439248046e+00, 0.5516706717897903e+00, 0.5168328579759034e+00, 0.3596295330063849e+00, 0.4476574932894306e-01, 0.9741728325615778e+00, 0.2804678894509116e-01, 0.3902916772468864e+00, 0.8530466945184888e+00, 0.2969904800916723e-01, 0.1860020491995459e+00, 0.7523094177753908e-01, 0.1049676044274535e+00, 0.9260152605248726e+00, 0.4316992548719201e+00, 0.7469073285915341e+00, 0.9833774639100495e+00, 0.4974597252506658e+00, 0.4177634673241238e+00, 0.2822911131407428e+00, 0.8441017127353869e+00, 0.3634452570924520e+00, 0.3139202807660985e+00, 0.3006600055674361e+00, 0.3009046530704764e+00, 0.5749841775931973e+00, 0.8312625997674838e+00, 0.7353697939061690e+00, 0.5347208541042444e+00, 0.3745659295783104e+00, 0.2530774659476339e+00, 0.4186076940004666e+00, 0.9926397860987635e+00, 0.9744627902125954e+00, 0.5156862870399348e-01, 0.9609885826474012e+00, 0.1340044668459911e+00, 0.4269295726396694e+00, 0.1323606439861225e+00, 0.9547002490565327e+00, 0.3235500895339022e+00, 0.9408694232081565e+00, 0.5256315334453321e-01, 0.5435043012268493e+00, 0.8290694048290136e+00, 0.3690485876123201e+00, 0.1315626820984572e+00, 0.7782838224801890e+00, 0.4824080201008953e+00, 0.4904940544366522e+00, 0.7125669967024466e+00, 0.2079010288312985e+00, 0.3154779356788950e+00, 0.8039212621301359e+00, 0.1606576564727077e+00, 0.9470564071979923e+00, 0.9189825217362574e+00, 0.4332760074849629e+00, 0.2823180730446854e+00, 0.8869162317432342e+00, 0.9632623909785668e+00, 0.5010388463829042e+00, 0.4287967421178640e+00}, + n: 21, + zOut: []float64{0.2578695081979337e+01, 0.2397185171350384e+01, 0.2122428254475021e+01, 0.1845603260474165e+01, 0.1680081614592041e+01, 0.1588339204725779e+01, 0.1309467974766432e+01, 0.1299839243529426e+01, 0.1209324813016549e+01, 0.1078225499420584e+01, 0.1021574489264402e+01, 0.8212075951361281e+00, 0.6252200086865959e+00, 0.5691142568406321e+00, 0.4167987738801949e+00, 0.2624912719101158e+00, 0.1530089161378868e+00, 0.6867147881453296e-01, 0.2505166907249862e-01, 0.2349256929716605e-02, 0.7151879759250424e-03, 0.1577721810442024e-29, 0.4511324342265551e-10, 0.2432911773654117e-10, 0.1309467974766432e+01, 0.0000000000000000e+00, 0.2950215161815511e-09, 0.4606013118502744e-09, 0.1299839243529426e+01, 0.0000000000000000e+00, 0.1717787641934460e-08, 0.1104907179077728e-08, 0.1209324813016549e+01, 0.2227112107619961e-25, 0.8047601721100021e-14, 0.3064349892439287e-13, 0.1078225499420584e+01, 0.1519533811285116e-12, 0.1082897652895214e-09, 0.2849698526408556e-09, 0.1021574489264402e+01, 0.9160719227198137e-18, 0.2107539302297835e+02, 0.2107539302297835e+02, 0.1110000000000000e+03, 0.3317460317460318e+01, 0.3603603603603604e+01, 0.1689224645784323e-04, 0.6252200086865959e+00, 0.1615587133892632e-26, 0.8276137867398754e-04, 0.1155304449854771e-03, 0.5691142568406321e+00, 0.1852884571782062e-20, 0.2098842504693311e-03, 0.1630830816462844e-03, 0.4167987738801949e+00, 0.7013543182763534e-16, 0.5750632050789642e-07, 0.3137720527277731e-06, 0.2624912719101158e+00, 0.1295300917428640e-14, 0.8049639874935534e-04, 0.1584688939577041e-03, 0.1530089161378868e+00, 0.3636550670570672e-17, 0.1206768167404478e-03, 0.2128462525136164e-03, 0.6867147881453296e-01, 0.4176548246217679e-19, 0.3626791442965051e-04, 0.6269592722343766e-04, 0.2505166907249862e-01, 0.8628166150854817e-31, 0.1144078296038157e-02, 0.1062123559861287e-02, 0.2349256929716605e-02, 0.1853343815383365e-22, 0.1348818609641394e-03, 0.1209095310981735e-04, 0.7151879759250424e-03, 0.2331746618088476e-23, 0.3639492207606013e-03, 0.1002254703741813e-03}, + info: 0, + }, + { + z: []float64{0.5145122753774686e+00, 0.7453791781450037e+00, 0.4937919697078275e-01, 0.9600912461125237e+00, 0.5558575009096904e+00, 0.9202496102671529e+00, 0.1803962777705234e+00, 0.6732582413383450e+00, 0.3646300420523177e+00, 0.4351523227625338e+00, 0.5258615952095320e+00, 0.2618724553334706e-01, 0.6600255279090639e+00, 0.4323183918305084e+00, 0.2184526174816337e+00, 0.7758998738943313e+00, 0.3700044944947983e+00, 0.9075309916724112e+00, 0.7873590137849384e-02, 0.6422780210226944e+00, 0.7012374235504933e+00, 0.3225317189575561e-01, 0.9626413579028830e+00, 0.2957325301869758e+00, 0.9393912496594896e+00, 0.3122772343061888e+00, 0.3372582329155205e-01, 0.6694758703662361e+00, 0.5160501222108982e-03, 0.9689575293997177e-01, 0.3780626069308365e+00, 0.1243877469635841e+00, 0.3753610862140089e+00, 0.7686454249086220e+00, 0.8958999262270639e+00, 0.9484254659678725e+00, 0.7229120985603210e-01, 0.7282046499931183e+00, 0.1602916509824523e+00, 0.3708722215113680e+00, 0.5328491413896952e+00, 0.7547173918812869e+00, 0.4313535833192655e+00, 0.5849601017371611e+00, 0.5030126650939655e+00, 0.3345267020968724e+00, 0.8264894134567216e+00, 0.3997968176112487e+00, 0.7977852660508306e+00, 0.3214021056175872e+00, 0.4636510192825659e+00, 0.6796405156053968e+00, 0.6279823358542748e+00, 0.1573806073104234e-01, 0.6382889890358745e-01, 0.5417145287154319e+00, 0.2343304174607242e+00, 0.7662339681589646e+00, 0.6291880658725347e+00, 0.5139706179483768e+00, 0.1583956600060378e+00, 0.9391868842090931e+00, 0.2645051239078070e+00, 0.7814123021217941e+00, 0.8945112708258808e+00, 0.7464898217422987e+00, 0.3770460515219444e+00, 0.3622886345531428e+00, 0.2208273216958112e-01, 0.7102940678247979e+00, 0.2735151315490909e+00, 0.4039400600381490e+00, 0.9317363313474538e+00, 0.5754441557390587e+00, 0.5886999087747160e+00, 0.3030246870939485e+00, 0.8507721951782303e+00, 0.3043440840669203e+00, 0.6021496993262363e+00, 0.4505272689007913e+00, 0.3874595231430722e+00, 0.2566155965014733e+00, 0.2521494866083717e+00, 0.7091848125152684e+00}, + n: 21, + zOut: []float64{0.2432728498446405e+01, 0.2127807422546525e+01, 0.1810603620342434e+01, 0.1754551066335428e+01, 0.1442239744319712e+01, 0.1358471286041730e+01, 0.1268104094970585e+01, 0.1166598518949770e+01, 0.1142749842956014e+01, 0.8038795081997708e+00, 0.7842646316799070e+00, 0.7340488722153766e+00, 0.6406325830516326e+00, 0.6326463399427553e+00, 0.4829120798428922e+00, 0.4021501496449236e+00, 0.3628067219099092e+00, 0.1315077965186685e-01, 0.3447035773652165e-02, 0.9829838131645125e-03, 0.5542446906743292e-07, 0.3726944967513355e-18, 0.7775649827661083e-15, 0.4707679240410040e-15, 0.1268104094970585e+01, 0.4204313080465905e-25, 0.2081271826241763e-14, 0.3549560565109884e-14, 0.1166598518949770e+01, 0.0000000000000000e+00, 0.3900682806423283e-13, 0.2423637210496787e-13, 0.1142749842956014e+01, 0.1408856084028437e-16, 0.9605918786201080e-13, 0.6043593726409546e-13, 0.8038795081997708e+00, 0.8160364745046778e-12, 0.9063031760991450e-12, 0.1255592191298957e-11, 0.7842646316799070e+00, 0.2274746684496083e-23, 0.1936477583605892e+02, 0.1936477583605892e+02, 0.1150000000000000e+03, 0.3673469387755102e+01, 0.7826086956521739e+01, 0.1055105767279683e-11, 0.6406325830516326e+00, 0.7534418653829021e-12, 0.2026915084594321e-10, 0.1938443898507311e-10, 0.6326463399427553e+00, 0.7428954317778491e-22, 0.1822158222411987e-10, 0.2310105649781206e-10, 0.4829120798428922e+00, 0.6818472407494482e-21, 0.4806621018591031e-08, 0.3430760494577760e-08, 0.4021501496449236e+00, 0.9407638730741502e-13, 0.4934881466368944e-07, 0.7682525823543034e-07, 0.3628067219099092e+00, 0.4641733924717305e-17, 0.3006332568302334e-06, 0.4841414710438772e-06, 0.1315077965186685e-01, 0.0000000000000000e+00, 0.4649166787994157e-03, 0.6126205080519521e-03, 0.3447035773652165e-02, 0.0000000000000000e+00, 0.8654037114492079e-03, 0.1518309118546661e-02, 0.9829838131645125e-03, 0.2948446519354054e-25, 0.2136365904689920e-10, 0.9493111427282873e-09, 0.5542446906743292e-07, 0.0000000000000000e+00, 0.8210503952597113e-02, 0.1866280792966489e-03}, + info: 0, + }, + { + z: []float64{0.1382149566425072e+00, 0.6575474867299980e+00, 0.6348472674088155e+00, 0.2171300387013793e+00, 0.8255967246227341e+00, 0.3006539504418347e+00, 0.2997341048143420e+00, 0.8472159875058289e+00, 0.6738978429295159e+00, 0.5424646868297128e+00, 0.4120014569672862e+00, 0.8343380028244362e+00, 0.3003911043596160e+00, 0.4710045367840567e+00, 0.4862652198104067e-01, 0.7471894777046170e+00, 0.1793049886163456e+00, 0.7023511329140396e+00, 0.7338019696975340e-02, 0.6396982308318109e+00, 0.9907774162255600e-01, 0.9295761484947797e+00, 0.9648060362942152e+00, 0.9280158186031068e+00, 0.2707915909911263e-01, 0.3838735516497237e+00, 0.1682677327524866e+00, 0.2113105759433432e+00, 0.9045861188128749e+00, 0.7471063495001966e+00, 0.8342180430544732e-01, 0.1412979915701199e+00, 0.8279925714974835e+00, 0.1155895711297670e+00, 0.9231783969041454e+00, 0.3728782651185074e+00, 0.6830046202574890e+00, 0.9069008465878121e+00, 0.9754648591267837e+00, 0.8893150707687680e-01, 0.6771275111272185e+00, 0.9913978119103384e+00, 0.7253101374756428e+00, 0.9967636737323063e+00, 0.8627812047783159e+00, 0.3179382291892185e-01, 0.3971194218951257e+00, 0.1929798662828303e+00, 0.8983406738830656e+00, 0.8696986427722838e+00, 0.1626550211323166e+00, 0.5446758738044107e+00, 0.7791897500950196e+00, 0.5495589328822522e+00, 0.2598162756138545e+00, 0.4878822167962611e+00, 0.1063554708684851e+00, 0.9205986422545666e+00, 0.2525019548845375e+00, 0.6202404757545382e+00, 0.4398012123976176e+00, 0.3367811528280380e+00, 0.2846930465290612e+00, 0.3637854092863660e+00, 0.2816012898088578e+00, 0.8253973364224151e+00, 0.1068693035499244e+00, 0.6335234891165292e+00, 0.4457012705011152e+00, 0.3822128202770037e-01, 0.8044679788500431e-02, 0.1644333417433916e+00, 0.8243501026702226e+00, 0.3724822902257573e+00, 0.3284619434713851e+00, 0.8284230195063381e+00, 0.9834160248288059e+00, 0.5390643127606534e+00, 0.4874602694948874e+00, 0.3393327340444893e+00, 0.9478697182248690e+00, 0.9005921446100461e+00, 0.6870582101064761e+00, 0.8753727823211164e+00}, + n: 21, + zOut: []float64{0.2520205603715150e+01, 0.2467591671122728e+01, 0.2156156465592955e+01, 0.1830603796963098e+01, 0.1688980285573207e+01, 0.1475729352347043e+01, 0.1457403847234258e+01, 0.1216096979409916e+01, 0.1020221539309743e+01, 0.9109792709218910e+00, 0.7786685137433046e+00, 0.6943983149907305e+00, 0.5952732619368740e+00, 0.5747064141061984e+00, 0.4193783470766816e+00, 0.2849493660130012e+00, 0.2000335007304226e+00, 0.1879518797669419e+00, 0.9250201866750386e-01, 0.6720326503698931e-01, 0.2522304426258675e-08, 0.0000000000000000e+00, 0.6146929522129176e-12, 0.1808448588630822e-11, 0.1457403847234258e+01, 0.4890937612370273e-28, 0.1829381779873047e-10, 0.6063479072736104e-11, 0.1216096979409916e+01, 0.3707772472283591e-24, 0.7129364201262979e-09, 0.6841733798178873e-09, 0.1020221539309743e+01, 0.2138261883449577e-21, 0.5257033338238628e-09, 0.5072654023074907e-09, 0.9109792709218910e+00, 0.7953858577580093e-22, 0.4258628064439383e-09, 0.4402488234184115e-09, 0.7786685137433046e+00, 0.2050608176139579e-11, 0.2063903369678094e+02, 0.2063903369678095e+02, 0.1070000000000000e+03, 0.3256235827664399e+01, 0.7476635514018692e+01, 0.2680505535092421e-09, 0.5952732619368740e+00, 0.3490920289236852e-22, 0.2767075275933556e-09, 0.2579359628684292e-09, 0.5747064141061984e+00, 0.6550779105727953e-17, 0.5565909614599560e-09, 0.4340922452181151e-09, 0.4193783470766816e+00, 0.3307688474446419e-20, 0.4344481061547019e-08, 0.2950068756973984e-08, 0.2849493660130012e+00, 0.2114500635677851e-23, 0.2105683914952377e-07, 0.1386409244069912e-07, 0.2000335007304226e+00, 0.2552627671550359e-24, 0.1231406445198996e-06, 0.7724199195397155e-07, 0.1879518797669419e+00, 0.2762617809804602e-19, 0.3259696859933768e-06, 0.5428848805276070e-06, 0.9250201866750386e-01, 0.2465190328815662e-31, 0.5308529636258477e-04, 0.3674504775836459e-04, 0.6720326503698931e-01, 0.4910144116114772e-20, 0.8312610965835068e-03, 0.3682729181838852e-03, 0.2522304426258675e-08, 0.0000000000000000e+00, 0.2850162708346282e-01, 0.1272924532804709e-01}, + info: 0, + }, + { + z: []float64{0.8107097465650837e+00, 0.6284212325752714e-02, 0.8801609468679439e-01, 0.1317925189426976e+00, 0.7816238529622888e+00, 0.5138440552812117e+00, 0.8491910920486024e+00, 0.5354109734403600e-01, 0.9922209940223927e+00, 0.5302203883752287e+00, 0.7634020308181488e-01, 0.1679415943232965e+00, 0.2329711998757648e+00, 0.1414305919938111e+00, 0.1841480420952157e-01, 0.3417588803467335e+00, 0.2555749764808386e+00, 0.5793635436480560e-01, 0.5867939998055902e+00, 0.9699556000711096e+00, 0.2545002439558014e+00, 0.6318762794888357e+00, 0.3917398730545906e+00, 0.5624711052874073e+00, 0.4151753215255505e-01, 0.2323556867603191e+00, 0.8346423132579881e+00, 0.4193569213002444e+00, 0.5070048505400149e+00, 0.9880568155195324e-01, 0.6350898210108906e+00, 0.2370382383359541e+00, 0.3299228061150288e+00, 0.4349239764921071e+00, 0.5262500692361848e+00, 0.5263549923651836e+00, 0.8161052184432745e+00, 0.5568267671289201e+00, 0.7852669932205985e+00, 0.4481607592551184e-01, 0.6290146498792036e+00, 0.9736787804665343e+00, 0.5369941621471508e+00, 0.6750777504837204e+00, 0.9452511173220645e+00, 0.3252402467696922e+00, 0.8149000487070703e+00, 0.8087485664240622e+00, 0.8643101435258340e+00, 0.4683483752594740e+00, 0.5054760923588362e+00, 0.9646412930917844e+00, 0.1611953384510251e+00, 0.6426851941066625e-01, 0.3732266796993424e+00, 0.6384029126799168e+00, 0.3844644469584264e+00, 0.6635273633462982e+00, 0.2553934870916856e+00, 0.7037835913610216e+00, 0.7783327991001192e+00, 0.7381327592761330e+00, 0.2765915485450557e+00, 0.9846188872724937e-01, 0.1992562101416243e+00, 0.2440052895970430e+00, 0.3301477999473963e+00, 0.5338134839029927e+00, 0.5498293871712301e+00, 0.1974401363114953e+00, 0.7407533997655510e+00, 0.6821638940995054e+00, 0.9814777780110351e+00, 0.4891456709117443e+00, 0.6868139367703679e+00, 0.4181531426430654e+00, 0.6623070699739451e+00, 0.9202771282456093e+00, 0.2973080397025486e+00, 0.4403051603022431e+00, 0.1603780540263299e+00, 0.4910562348998989e+00, 0.7838834745165658e+00, 0.1931809149066596e+00}, + n: 21, + zOut: []float64{0.2163023058341640e+01, 0.1906105992609667e+01, 0.1885682431608721e+01, 0.1629672232824707e+01, 0.1530256904983391e+01, 0.1307296833197682e+01, 0.1220997281181084e+01, 0.9674288853031870e+00, 0.8179252376135864e+00, 0.6735586841320580e+00, 0.6320597056120635e+00, 0.5126868322939253e+00, 0.4828683228163860e+00, 0.4745823594217251e+00, 0.3685025909289511e+00, 0.2947763441663148e+00, 0.8912041525268403e-01, 0.6456597576955210e-01, 0.6259461554584676e-01, 0.1853533814873023e-01, 0.2023108580404890e-03, 0.8671199725506423e-13, 0.1722304485286045e-09, 0.1666294529992108e-09, 0.1220997281181084e+01, 0.1774302413926244e-21, 0.1370178691819834e-09, 0.1399340123848638e-09, 0.9674288853031870e+00, 0.4276620702127187e-22, 0.3173068333548179e-09, 0.2274343348931791e-09, 0.8179252376135864e+00, 0.7888609052210118e-30, 0.5644994943584269e-09, 0.8117631577107350e-09, 0.6735586841320580e+00, 0.2962158340816585e-16, 0.9564613436107998e-08, 0.6775718950029066e-08, 0.6320597056120635e+00, 0.4964115825120276e-16, 0.1710244235260994e+02, 0.1710244235260994e+02, 0.1120000000000000e+03, 0.3265306122448980e+01, 0.6250000000000000e+01, 0.5299633299932952e-07, 0.4828683228163860e+00, 0.2680954403830288e-19, 0.1311205982337445e-05, 0.9790063018353157e-06, 0.4745823594217251e+00, 0.5160286279848935e-24, 0.1890785247579508e-05, 0.1394729630636152e-05, 0.3685025909289511e+00, 0.4693722386065020e-28, 0.6031882082954460e-05, 0.2415384446316590e-05, 0.2947763441663148e+00, 0.1882813323774143e-21, 0.7814092950831008e-04, 0.8799876387761915e-04, 0.8912041525268403e-01, 0.1005797654156790e-28, 0.3017726164846613e-06, 0.1278833151586840e-06, 0.6456597576955210e-01, 0.4635851024992188e-19, 0.2982580055136947e-07, 0.4759731822531787e-07, 0.6259461554584676e-01, 0.4490807166402986e-22, 0.9170799331172770e-06, 0.3025792024297308e-06, 0.1853533814873023e-01, 0.4985600920996795e-27, 0.9814614537388964e-04, 0.3116488226164178e-03, 0.2023108580404890e-03, 0.0000000000000000e+00, 0.9878507322060364e-03, 0.9125548412055403e-03}, + info: 0, + }, + { + z: []float64{0.4943635351465083e+00, 0.1295760812930432e+00, 0.7491711827268356e+00, 0.8583007697336751e+00, 0.8597988452333791e+00, 0.9020619105716389e+00, 0.2915077046929524e+00, 0.2667690273281318e+00, 0.2640508133196529e-01, 0.6592981688042527e+00, 0.5004017812459820e+00, 0.7056211227388411e+00, 0.2254860841977465e+00, 0.8570117342825790e+00, 0.6863131135194549e+00, 0.6029337708611949e+00, 0.6631267757314846e+00, 0.5332167960220966e+00, 0.3047497072553197e+00, 0.3019927731659721e+00, 0.6244714872283929e+00, 0.3204566006732108e+00, 0.9437738905138152e+00, 0.4462876020196550e+00, 0.6693016494515551e+00, 0.5687239407477801e+00, 0.5130358959101252e+00, 0.1634623710980615e+00, 0.6356065394794165e+00, 0.9187737904031696e+00, 0.8612817405760382e+00, 0.1602125514297330e+00, 0.5297426144233635e+00, 0.3699984812614213e+00, 0.3455622529703073e+00, 0.9962779456692529e+00, 0.3564481259772661e+00, 0.2073475342531472e-01, 0.5839325153640170e+00, 0.6856088547326490e+00, 0.1606020836922776e+00, 0.2475573692197572e+00, 0.8169030104559274e+00, 0.1727021335202554e+00, 0.8794153095346040e+00, 0.1052546999951709e-01, 0.4332903851899277e+00, 0.4302943891085251e+00, 0.6360909664440160e+00, 0.1924830187263967e+00, 0.6792019925733094e+00, 0.7328300669715262e+00, 0.8282900281692296e+00, 0.1586820387506954e+00, 0.5999731076117462e+00, 0.4873494502000296e+00, 0.4446262392098149e+00, 0.6430807880043099e-01, 0.4949024386471711e+00, 0.7324156973660632e+00, 0.4547667791304575e+00, 0.7542985454738853e+00, 0.3755946849636591e+00, 0.7135765429513076e+00, 0.7888131931751277e+00, 0.6384460726826821e+00, 0.4685063200928931e+00, 0.1343379262734882e+00, 0.8857603211032115e+00, 0.5798608180272281e+00, 0.8765662925729737e+00, 0.2107645345329755e+00, 0.7032612644639602e-01, 0.2672549085142431e+00, 0.5322118352980683e+00, 0.4697779217142906e-01, 0.1959612687762308e+00, 0.7346288295330818e+00, 0.4373432557584073e+00, 0.2514178299506654e+00, 0.7845430350805238e+00, 0.7823648156079857e+00, 0.6098686444303012e+00, 0.2687771415020881e+00}, + n: 21, + zOut: []float64{0.2638973127799410e+01, 0.2299941431214051e+01, 0.2280019421450360e+01, 0.2103910562764817e+01, 0.1753406216295798e+01, 0.1675269066003242e+01, 0.1419658234290438e+01, 0.1365959224584141e+01, 0.1187696680558471e+01, 0.1126073702945326e+01, 0.8926412592525027e+00, 0.7715773240739381e+00, 0.5428647577479762e+00, 0.4329885186763779e+00, 0.3589145864992100e+00, 0.2461266947628802e+00, 0.1802035742271595e+00, 0.1083562693061272e+00, 0.7909884021715781e-01, 0.2853616071040153e-01, 0.1859995500862765e-03, 0.5169878828456423e-25, 0.7099412452593561e-13, 0.1129160994681896e-12, 0.1419658234290438e+01, 0.7523863534922662e-17, 0.1645873945742296e-11, 0.1162641799741134e-11, 0.1365959224584141e+01, 0.3171195595439500e-23, 0.6168272840237550e-11, 0.8582511762369885e-11, 0.1187696680558471e+01, 0.8519789992808060e-16, 0.1934844654065141e-10, 0.2568816579510325e-10, 0.1126073702945326e+01, 0.8564308176121538e-22, 0.5690968370753065e-10, 0.7456610067231580e-10, 0.8926412592525027e+00, 0.4500097668303756e-13, 0.2149240165292988e+02, 0.2149240165292987e+02, 0.1030000000000000e+03, 0.2950113378684807e+01, 0.4854368932038835e+01, 0.1146501122497315e-08, 0.5428647577479762e+00, 0.2966117003631004e-27, 0.2887486457338918e-08, 0.3484100742696835e-08, 0.4329885186763779e+00, 0.9478952637135678e-26, 0.8311221870318503e-08, 0.6953572191921854e-08, 0.3589145864992100e+00, 0.2375820297137957e-16, 0.1629548458200623e-07, 0.1368379043565619e-07, 0.2461266947628802e+00, 0.1012567897168591e-16, 0.3168042687452701e-07, 0.3807114536183845e-07, 0.1802035742271595e+00, 0.8688408444571280e-16, 0.7017919615597339e-07, 0.9241973720004823e-07, 0.1083562693061272e+00, 0.3747089299799806e-28, 0.3320058009883818e-06, 0.6266972429387508e-06, 0.7909884021715781e-01, 0.9269115636346889e-29, 0.6452918276099027e-05, 0.1508443954686445e-04, 0.2853616071040153e-01, 0.2499111347740165e-24, 0.3360677328899296e-03, 0.1546059694047028e-03, 0.1859995500862765e-03, 0.1972152263052530e-28, 0.5448826922428762e-03, 0.1724316467181159e-02}, + info: 0, + }, + { + z: []float64{0.5641760158025995e+00, 0.2227931624220032e+00, 0.1040729150876691e+00, 0.7228520011975436e+00, 0.2490915552877161e+00, 0.4463107957028827e+00, 0.6882435259034004e+00, 0.2173580561342540e+00, 0.8146865135594571e+00, 0.3630549548424193e+00, 0.2813912484076728e+00, 0.7560034280086618e+00, 0.7800835186160227e-01, 0.9984316063995433e+00, 0.2057310185040729e+00, 0.4428139729521006e+00, 0.1946530136132755e+00, 0.8953866467149748e+00, 0.5910093704925600e+00, 0.9736056943014803e+00, 0.7342559355497796e+00, 0.5630651274913434e+00, 0.3586901690989497e+00, 0.1859950386974873e-01, 0.4012559193852120e+00, 0.5767984160414075e-01, 0.3379735138652440e+00, 0.6337834884220164e+00, 0.8830566914548466e-01, 0.2985048049064926e+00, 0.9820684138520455e+00, 0.1144932752938616e+00, 0.5905532956519596e+00, 0.4757653978696945e+00, 0.1997612463043563e-01, 0.6356348606671800e+00, 0.8478495728965164e+00, 0.4997410778119411e+00, 0.6861774168985810e+00, 0.6912919871514965e+00, 0.1276032443467088e+00, 0.2695430969348002e+00, 0.9446688115451145e+00, 0.5104603739010690e+00, 0.8863068797328323e+00, 0.3347723354379355e+00, 0.2977917172311930e+00, 0.9464871506253476e+00, 0.4279639349386329e+00, 0.6378279454376028e+00, 0.2778621310060349e+00, 0.6971058794896068e-01, 0.6309805553452471e+00, 0.3447281367207256e-01, 0.8480855191050544e+00, 0.6700112511333863e+00, 0.1616260465745026e+00, 0.7744510887458210e+00, 0.8408544531170117e+00, 0.8444763755373497e-01, 0.3286971575766536e+00, 0.8244867044222992e+00, 0.8036107174439190e+00, 0.4654902702844697e+00, 0.8300602115903392e+00, 0.5911511948645825e+00, 0.5734805041784674e+00, 0.6046912223886362e+00, 0.4400460907177143e+00, 0.4069795298963303e+00, 0.2413714645456244e+00, 0.7838758775665940e+00, 0.1371288591293247e+00, 0.8435169936355635e-01, 0.4867567406491472e+00, 0.2914527168883616e+00, 0.7540094160758792e+00, 0.6792178828893637e+00, 0.8924641252729165e+00, 0.2188072300242774e+00, 0.1899571860965691e+00, 0.7806965639263314e+00, 0.8706240780095753e-01, 0.7170634647198669e+00}, + n: 21, + zOut: []float64{0.2553381332090116e+01, 0.2166390759033473e+01, 0.1737362408504838e+01, 0.1528419999405254e+01, 0.1507484707859341e+01, 0.1334735890703865e+01, 0.1222641790395049e+01, 0.1186722126839839e+01, 0.1073660052771764e+01, 0.9488489095119069e+00, 0.9141860725715962e+00, 0.7726247731371300e+00, 0.6075856291402457e+00, 0.5409244297850613e+00, 0.4230641503671385e+00, 0.2356839373494127e+00, 0.1583434617720056e+00, 0.3187519618749328e-01, 0.2747107248999187e-01, 0.1480095837386152e-02, 0.5569185181399456e-04, 0.3215260779752862e-17, 0.2284971140234050e-13, 0.6915955898032105e-13, 0.1222641790395049e+01, 0.9860761315262648e-31, 0.9831000121605633e-10, 0.3802561298586086e-10, 0.1186722126839839e+01, 0.2066815571679051e-27, 0.6142487424703478e-09, 0.1524532000885779e-08, 0.1073660052771764e+01, 0.3155443620884047e-29, 0.1933670198753400e-07, 0.4311109498288097e-07, 0.9488489095119069e+00, 0.6613963504021090e-17, 0.7125410558010122e-07, 0.6137903750393884e-07, 0.9141860725715962e+00, 0.2713620929442760e-22, 0.1897294248760472e+02, 0.1897294248760472e+02, 0.1070000000000000e+03, 0.3233560090702948e+01, 0.4672897196261682e+01, 0.2313496546014482e-05, 0.6075856291402457e+00, 0.2008094738046662e-26, 0.6354651604911740e-04, 0.4633045629030685e-04, 0.5409244297850613e+00, 0.2465190328815662e-30, 0.8082164119218066e-03, 0.5151838483992701e-03, 0.4230641503671385e+00, 0.3299331849996360e-25, 0.9881103085087938e-03, 0.1036550305023510e-02, 0.2356839373494127e+00, 0.2111622871095604e-25, 0.1203513003711909e-02, 0.1300398061184096e-02, 0.1583434617720056e+00, 0.1737664936697394e-23, 0.8260928074113359e-03, 0.9484297179228856e-03, 0.3187519618749328e-01, 0.0000000000000000e+00, 0.1785275423606748e-03, 0.4485462762371783e-03, 0.2747107248999187e-01, 0.8804949879714635e-25, 0.5200632154389635e-08, 0.3234666977722410e-08, 0.1480095837386152e-02, 0.5399595124056782e-24, 0.5661901580915454e-02, 0.5860254353052324e-02, 0.5569185181399456e-04, 0.0000000000000000e+00, 0.1190560893471898e-01, 0.9131404251707014e-02}, + info: 0, + }, + { + z: []float64{0.7290518158132969e+00, 0.5196659176766039e-03, 0.6083923160589086e+00, 0.3459319602615154e+00, 0.3277021139736517e+00, 0.6020592719504614e-01, 0.4608784204502808e+00, 0.8430478676154098e+00, 0.3856212930081542e+00, 0.6602823264317716e+00, 0.4451601253283373e+00, 0.4130313322309198e-01, 0.7414750648249350e+00, 0.1630235543068703e-01, 0.6004925370465983e+00, 0.6953287054569501e+00, 0.7809046354974044e+00, 0.7892746585788727e+00, 0.8348704064067982e+00, 0.5999411739898802e+00, 0.5502575606089829e+00, 0.4392206918044746e+00, 0.7984758158185817e+00, 0.9084185319619590e+00, 0.1786837110145214e+00, 0.2414921532322530e+00, 0.5630913108462098e+00, 0.7820323704275389e+00, 0.5523326888742272e+00, 0.9015512185421630e+00, 0.6659841765947113e+00, 0.2845604319423287e+00, 0.3491764093356571e+00, 0.9628247275953050e-01, 0.6976897044251388e+00, 0.2960314575787897e+00, 0.5554565606281359e-01, 0.3558757052257084e+00, 0.9341850945519581e+00, 0.6665938574616496e+00, 0.9614102712845256e+00, 0.8607214960584614e+00, 0.4038740733880183e+00, 0.8195623831081322e+00, 0.6129183541897144e+00, 0.7215327066311872e-01, 0.5031970254369666e-01, 0.1396452298202989e+00, 0.9662061833347503e+00, 0.1270920838674061e+00, 0.4311106155362532e+00, 0.8052749852152479e+00, 0.4015794631208873e+00, 0.9090852867289334e-01, 0.6606775367976888e+00, 0.1481179966505828e+00, 0.9638580924515038e+00, 0.9709844996525795e+00, 0.1150673643193312e-01, 0.1429293205078334e+00, 0.2900042758147406e+00, 0.9072348766684573e+00, 0.5781273728635536e+00, 0.5436950045999074e+00, 0.1857622262967125e+00, 0.9164714647876027e+00, 0.1370982861482903e+00, 0.4992756875010576e+00, 0.1554652709586491e+00, 0.3126479857720679e+00, 0.4107075699045055e+00, 0.2895846225773548e+00, 0.6273125726015746e+00, 0.2838905730735005e+00, 0.5809021463377108e+00, 0.7647718231557297e+00, 0.2069816461533343e+00, 0.3140055757156704e+00, 0.1235726138058405e+00, 0.4368723232060789e-01, 0.7675622822273089e+00, 0.9898297711455730e+00, 0.5845844813092991e+00, 0.3724928805727834e+00}, + n: 21, + zOut: []float64{0.2656606156715505e+01, 0.2361088067613179e+01, 0.2270460825471249e+01, 0.2122871466419806e+01, 0.1864972075992565e+01, 0.1483766917780093e+01, 0.1313593002893207e+01, 0.1121546276208008e+01, 0.1100149868182268e+01, 0.9659264313206680e+00, 0.7624972132947719e+00, 0.7289080649182198e+00, 0.6752163576811447e+00, 0.6611764720975886e+00, 0.5004340625462813e+00, 0.2440674815088013e+00, 0.2171809202435314e+00, 0.1235497606154792e+00, 0.3737121186344041e-01, 0.2834840406904476e-01, 0.5846755428136478e-02, 0.7443896006885402e-16, 0.5016677150024004e-07, 0.3707799983650212e-07, 0.1313593002893207e+01, 0.2958228394578794e-29, 0.1581957950672119e-06, 0.2298083114979900e-06, 0.1121546276208008e+01, 0.1915993820645148e-16, 0.4749045639556273e-06, 0.4145613346073258e-06, 0.1100149868182268e+01, 0.9026685982667287e-18, 0.2821283116535875e-06, 0.3158894087015811e-06, 0.9659264313206680e+00, 0.2037048580685469e-20, 0.2246093425341190e-06, 0.2076005360058006e-06, 0.7624972132947719e+00, 0.1234566306928925e-21, 0.2124557779286299e+02, 0.2124557779286298e+02, 0.1010000000000000e+03, 0.3002267573696145e+01, 0.2970297029702970e+01, 0.4897987464996691e-04, 0.6752163576811447e+00, 0.7461812581271996e-18, 0.7761124913531668e-04, 0.7483244468930916e-04, 0.6611764720975886e+00, 0.1038114253693458e-19, 0.1582094808245393e-03, 0.1235322388201987e-03, 0.5004340625462813e+00, 0.1010613662291919e-16, 0.2610534203903574e-03, 0.4450812691210283e-03, 0.2440674815088013e+00, 0.1110435042389887e-20, 0.5668320309921573e-04, 0.5311544381771655e-04, 0.2171809202435314e+00, 0.4733165431326071e-27, 0.6599010146996593e-04, 0.7896621041843452e-04, 0.1235497606154792e+00, 0.3155385912111625e-18, 0.1746230697391169e-01, 0.1331928313395539e-01, 0.3737121186344041e-01, 0.7395570986446986e-31, 0.1364698415102933e-02, 0.9400709901879650e-03, 0.2834840406904476e-01, 0.1581043861486090e-22, 0.9826835453665868e-03, 0.1307033404080312e-02, 0.5846755428136478e-02, 0.5593364605928029e-23, 0.6675960788692170e-02, 0.6853670175678725e-02}, + info: 0, + }, + { + z: []float64{0.4059771234161322e+00, 0.9607215623246773e+00, 0.2458768097786967e-01, 0.2349999394464353e-01, 0.5195584553698999e+00, 0.4267565401751807e+00, 0.5653505726897825e+00, 0.1993982232067181e+00, 0.7123509462851438e+00, 0.7322091078130321e+00, 0.6397865818692764e+00, 0.7795245712118530e+00, 0.9924677795119503e+00, 0.9446777193650685e+00, 0.9161440637569995e+00, 0.4356325057978820e+00, 0.6968317286853016e+00, 0.8623411680463823e+00, 0.8789901991689911e+00, 0.3964880116495537e+00, 0.3452640838632506e+00, 0.2457053330236874e+00, 0.3088650440709877e+00, 0.3819576657222301e+00, 0.3428589632713919e-01, 0.5108638376023356e-01, 0.1646491337519898e+00, 0.4402398340108123e+00, 0.6741907107293844e+00, 0.8399512836956711e+00, 0.5689849385173426e+00, 0.5208530271268840e+00, 0.1800472375386978e+00, 0.7659410457647264e+00, 0.2320702606914905e+00, 0.1105800266048680e+00, 0.9555355666039600e-02, 0.1859192596152662e+00, 0.7825627400019242e-02, 0.5286418396370182e+00, 0.2569279638014341e+00, 0.5549104171411018e+00, 0.7406835450501670e+00, 0.8319568443444482e+00, 0.1523286720686696e+00, 0.6741822248281739e+00, 0.7686049385045001e+00, 0.7046090895325479e+00, 0.5187887151753254e+00, 0.1082987708911324e+00, 0.1267838014025160e+00, 0.1115865001832446e+00, 0.9799554493413084e+00, 0.2112830058825086e+00, 0.1414836977037485e+00, 0.6416690587599562e+00, 0.7257743058080667e+00, 0.5941533679852271e+00, 0.2889670048515264e+00, 0.2352763591376699e+00, 0.4250778140801711e+00, 0.1017573282390372e-01, 0.7592168685641336e+00, 0.1236630021486789e+00, 0.3794258137484455e+00, 0.3894249799986492e+00, 0.3467719607731882e+00, 0.5651244918121399e+00, 0.7941305753999619e+00, 0.7160004674307343e+00, 0.6194397297375442e+00, 0.1453439098617406e+00, 0.3817157280391337e-03, 0.9381666563671931e+00, 0.8543357988390619e+00, 0.4675645670082479e+00, 0.8133557631373793e+00, 0.1160066189475003e+00, 0.6551894700973839e+00, 0.6157191087148942e+00, 0.4651705119652626e+00, 0.2935400080400429e+00, 0.4317648586994048e+00, 0.8522331953027451e-01}, + n: 21, + zOut: []float64{0.3090946182879871e+01, 0.2427731098824810e+01, 0.2178358732328273e+01, 0.1944657148720347e+01, 0.1388313875420951e+01, 0.1366009016577358e+01, 0.1254539813854158e+01, 0.1096743376160052e+01, 0.8976114875705017e+00, 0.7925182471468240e+00, 0.7633415317747125e+00, 0.5940562138773567e+00, 0.3406702196684873e+00, 0.2541261981011808e+00, 0.2081990106830526e+00, 0.1967266434974441e+00, 0.8212572008660911e-01, 0.6231598248219099e-01, 0.2121611392818772e-01, 0.6627852439078377e-02, 0.2020564063973749e-05, 0.5156511541642804e-17, 0.3563959083802911e-09, 0.2193664839500192e-09, 0.1254539813854158e+01, 0.5107396913706617e-17, 0.2245013704648247e-08, 0.3514139913607919e-08, 0.1096743376160052e+01, 0.1770166510860316e-21, 0.1137128600534393e-07, 0.7589514151253922e-08, 0.8976114875705017e+00, 0.3631375006656261e-16, 0.1027683956127825e-09, 0.1703981899256795e-09, 0.7925182471468240e+00, 0.0000000000000000e+00, 0.2692111438183261e-08, 0.1491447500870467e-08, 0.7633415317747125e+00, 0.3655079503368504e-18, 0.1896683648658551e+02, 0.1896683648658551e+02, 0.9800000000000000e+02, 0.2941043083900227e+01, 0.3061224489795918e+01, 0.1473194721443778e-07, 0.3406702196684873e+00, 0.8487843002529896e-19, 0.1107295924775035e-05, 0.6897939613884539e-06, 0.2541261981011808e+00, 0.8699892772041781e-19, 0.4010969039165872e-05, 0.5564243023106877e-05, 0.2081990106830526e+00, 0.1258849778182470e-17, 0.2192657884798391e-05, 0.1921286515322920e-05, 0.1967266434974441e+00, 0.1498835719919922e-28, 0.3181611995531238e-06, 0.2328457188758453e-05, 0.8212572008660911e-01, 0.9162137014768053e-16, 0.4655593170815260e-04, 0.2906458517213153e-04, 0.6231598248219099e-01, 0.7111877894410489e-19, 0.1084455050840344e-03, 0.4940544045928809e-04, 0.2121611392818772e-01, 0.1296887328183343e-24, 0.1059468102013224e-02, 0.9250155264543548e-03, 0.6627852439078377e-02, 0.2014616961124939e-22, 0.1082171160654875e-03, 0.1415095048461561e-03, 0.2020564063973749e-05, 0.0000000000000000e+00, 0.6572087989553770e-04, 0.6490933881766271e-04}, + info: 0, + }, + { + z: []float64{0.3515867373170093e+00, 0.6900024519663606e-01, 0.2562617636787797e+00, 0.4448144612254716e+00, 0.1882829208975508e+00, 0.5473764707914393e+00, 0.2385465140832452e+00, 0.1069888484826550e+00, 0.6650479699038470e+00, 0.5577458834974632e+00, 0.3827016483759793e-01, 0.2430476485682098e-01, 0.4730689827049095e-01, 0.7028807845337007e+00, 0.2183345614483843e+00, 0.4504620631941809e+00, 0.4989666416356805e+00, 0.3524179690092576e+00, 0.2245980971932510e+00, 0.4235501615333767e+00, 0.5262756658101302e+00, 0.9543218824354944e+00, 0.7932768470961233e+00, 0.6115989702914060e-01, 0.5090745247561921e+00, 0.6231416396947956e+00, 0.5848645563028462e+00, 0.4261243979220120e-01, 0.7488050001299923e+00, 0.1371419590365067e+00, 0.2276573560104874e+00, 0.3333510148295767e+00, 0.8801005866377587e+00, 0.1783788042757909e+00, 0.7167665061134074e+00, 0.8328416386285911e+00, 0.3239179761297745e+00, 0.3396256067647054e+00, 0.3124818554244235e-01, 0.3558253686357221e+00, 0.3434895378785284e+00, 0.5623459587025392e-01, 0.2638452014850361e+00, 0.8291744484791080e+00, 0.1974847584119865e+00, 0.7319795333910079e+00, 0.2610606436005336e+00, 0.5710127034520260e+00, 0.1439281438102160e+00, 0.9086761346521347e+00, 0.8079247890865698e+00, 0.9018578929430654e+00, 0.4900454364738460e+00, 0.8138610668520670e+00, 0.6711229676830790e+00, 0.5055629692453131e+00, 0.7743169767236130e+00, 0.4534345901632091e+00, 0.9064089158232752e+00, 0.4827899713942460e+00, 0.7358703780808750e+00, 0.9369729458478693e-02, 0.8597279323370060e+00, 0.7253136203352766e+00, 0.9571052925069841e+00, 0.2164676786228616e+00, 0.5574107740713430e-05, 0.7154716238932374e+00, 0.1141135068223202e+00, 0.6209438587853456e+00, 0.7851847140042024e+00, 0.4215540797089058e+00, 0.9132020640887749e+00, 0.1510454496157759e+00, 0.4172986352579149e+00, 0.5037450189692434e+00, 0.4485087403424131e+00, 0.5740668076879305e+00, 0.2978796610784261e+00, 0.6581099078338648e-01, 0.8930273171304047e+00, 0.8152477689300778e+00, 0.1069556485483132e+00, 0.6932157626522990e+00}, + n: 21, + zOut: []float64{0.2205969711876647e+01, 0.1949821527895322e+01, 0.1555841877124327e+01, 0.1396030697440207e+01, 0.1339074151471965e+01, 0.1304928492707023e+01, 0.1082549957806062e+01, 0.9163938874206132e+00, 0.8887122139609793e+00, 0.7285328536138327e+00, 0.6136134334950112e+00, 0.5962413214799335e+00, 0.4250735423840991e+00, 0.3604939377250871e+00, 0.2295486599338548e+00, 0.1443459206161604e+00, 0.1184188267216090e+00, 0.7075746168152415e-01, 0.2006576919306350e-01, 0.3756273174580983e-02, 0.4503973951446061e-03, 0.2214970142320351e-19, 0.1258326061053631e-10, 0.9003557400980630e-11, 0.1082549957806062e+01, 0.1308878013942703e-25, 0.3368372626791194e-10, 0.2572022209508410e-10, 0.9163938874206132e+00, 0.7614331624527915e-16, 0.1049500061330476e-08, 0.2928487158224854e-08, 0.8887122139609793e+00, 0.1203547791264655e-21, 0.7079518778653070e-07, 0.2057204985301009e-06, 0.7285328536138327e+00, 0.1084769825180869e-21, 0.6504687546036223e-07, 0.2011551828019932e-07, 0.6136134334950112e+00, 0.1678480469223895e-16, 0.1595062091511705e+02, 0.1595062091511705e+02, 0.1020000000000000e+03, 0.3156462585034014e+01, 0.2941176470588236e+01, 0.1336032003974816e-08, 0.4250735423840991e+00, 0.1033222323690773e-11, 0.1230907576002882e-05, 0.1377773980605759e-05, 0.3604939377250871e+00, 0.5435093612356237e-21, 0.5904438579725169e-05, 0.1247176535660427e-04, 0.2295486599338548e+00, 0.3794809073678436e-22, 0.8654598111714590e-03, 0.7607233838455693e-03, 0.1443459206161604e+00, 0.2335329392584174e-18, 0.9486551585203510e-04, 0.5848864829043123e-04, 0.1184188267216090e+00, 0.2421802979028506e-27, 0.4440929582264130e-04, 0.3669242548994226e-04, 0.7075746168152415e-01, 0.1891688450719986e-26, 0.5237855019006247e-04, 0.2468105637453731e-04, 0.2006576919306350e-01, 0.1180787648903814e-27, 0.8134126440113327e-03, 0.8865157422262554e-03, 0.3756273174580983e-02, 0.2581152881883151e-26, 0.5819544721666204e-02, 0.3152501351079271e-02, 0.4503973951446061e-03, 0.0000000000000000e+00, 0.5880303886253363e-04, 0.1585743718762697e-03}, + info: 0, + }, + { + z: []float64{0.5610544265871693e+00, 0.3865731523333528e+00, 0.8792353035941016e+00, 0.8523516652694250e+00, 0.1444314813898040e-01, 0.8826603847895033e+00, 0.6868176286586357e+00, 0.1885799699020001e+00, 0.8756021324147915e+00, 0.9000748303342890e+00, 0.6057362077051875e+00, 0.7116572505145777e+00, 0.4651180401287146e+00, 0.2732222796277717e-01, 0.4226433912686857e+00, 0.2137613193249559e+00, 0.8364980217841237e+00, 0.7760779616422476e+00, 0.4204777290352190e+00, 0.6224635225380051e+00, 0.2764474942580449e+00, 0.3783169947455127e+00, 0.5180995217194659e+00, 0.3065081068358929e+00, 0.7865831746887696e+00, 0.1478697978858728e+00, 0.9439830303880672e+00, 0.8253272390565236e-01, 0.3067445628749279e+00, 0.7129198739696287e+00, 0.1655397340526111e+00, 0.1744497852178990e+00, 0.8123176080018479e-01, 0.7468152661528306e+00, 0.8702942511684653e-01, 0.2665897748347560e+00, 0.9213749341388631e+00, 0.1523897006311256e+00, 0.9645030292913545e+00, 0.9612618327522493e-01, 0.4802656873918926e+00, 0.9400504426452867e+00, 0.4625574582408437e+00, 0.7387695442245192e+00, 0.7695082904503155e+00, 0.1394488941705607e+00, 0.4086909510206329e+00, 0.8420266381115991e+00, 0.9564685490270811e+00, 0.7091850518571272e+00, 0.5799173993611073e+00, 0.1780919033120022e+00, 0.4196947685163247e+00, 0.9892865772966106e+00, 0.5502405891255741e+00, 0.7805747931381838e+00, 0.8443114386354278e-01, 0.3818004737775779e+00, 0.5006744900666209e+00, 0.8330036981446504e+00, 0.6972526215524509e+00, 0.5997317354738497e+00, 0.6745658299087246e+00, 0.2369187863883299e+00, 0.4162907871251388e+00, 0.3987550165661536e+00, 0.3127240434721901e+00, 0.9587921004875174e+00, 0.9734600215022703e+00, 0.9507333301136496e+00, 0.3445022862066339e+00, 0.6240012410156072e+00, 0.3036264632031094e-01, 0.6999839547669153e+00, 0.4672138296892350e+00, 0.4669965382645248e+00, 0.7561275857160221e-01, 0.9250454776547237e+00, 0.8620177110728862e+00, 0.5266728805873626e+00, 0.6377535363121675e+00, 0.6098026285363055e+00, 0.8075088997828720e+00, 0.8486821693163010e+00}, + n: 21, + zOut: []float64{0.2468092632282687e+01, 0.2093427673862981e+01, 0.2055392019191517e+01, 0.1647867143267642e+01, 0.1615849876515927e+01, 0.1589328968238607e+01, 0.1271214388518367e+01, 0.1121382381594411e+01, 0.1069461264755295e+01, 0.1002295596318809e+01, 0.8815764834354939e+00, 0.8051252546404786e+00, 0.6374343329196452e+00, 0.5469723867709347e+00, 0.4015178604185544e+00, 0.3707602923265790e+00, 0.1795105850679969e+00, 0.1400595531050117e+00, 0.2548349514639473e-01, 0.1561366451013735e-02, 0.1155321273825210e-02, 0.3137752941767923e-22, 0.1448569525610494e-09, 0.1682297708320945e-10, 0.1271214388518367e+01, 0.3747089299799806e-29, 0.3127891707973917e-07, 0.1432924100618686e-07, 0.1121382381594411e+01, 0.1829309924661015e-19, 0.1414348814679943e-06, 0.1338467241717145e-06, 0.1069461264755295e+01, 0.2814655709828570e-26, 0.1421706541245010e-06, 0.1494636997707923e-06, 0.1002295596318809e+01, 0.2555408239179452e-16, 0.1745096068338993e-06, 0.1639215185470380e-06, 0.8815764834354939e+00, 0.5195421261043265e-21, 0.1992546887610216e+02, 0.1992546887610217e+02, 0.1040000000000000e+03, 0.3263038548752835e+01, 0.3846153846153846e+01, 0.5490557769901970e-06, 0.6374343329196452e+00, 0.1696050946225175e-28, 0.4391959465127471e-06, 0.4441570425561870e-06, 0.5469723867709347e+00, 0.4122920742224575e-16, 0.4792107785920969e-06, 0.5244422797027597e-06, 0.4015178604185544e+00, 0.4437342591868191e-30, 0.1189624719598388e-05, 0.1545254324980458e-05, 0.3707602923265790e+00, 0.0000000000000000e+00, 0.3830671301160838e-05, 0.5436396467135237e-05, 0.1795105850679969e+00, 0.1558228073167050e-14, 0.5423210427919237e-04, 0.1228583690085413e-03, 0.1400595531050117e+00, 0.8832743148296122e-19, 0.1395228897382397e-04, 0.1537585917376726e-04, 0.2548349514639473e-01, 0.0000000000000000e+00, 0.8277350694369951e-03, 0.9528953717632958e-03, 0.1561366451013735e-02, 0.2465190328815662e-31, 0.7095009159403311e-03, 0.6952609237045969e-03, 0.1155321273825210e-02, 0.1309509102666880e-27, 0.9046555260897671e-03, 0.1226313636978587e-02}, + info: 0, + }, + { + z: []float64{0.1242522906682814e+00, 0.6390609620209767e+00, 0.4481822861180138e+00, 0.4850355616354706e+00, 0.4018997557884576e+00, 0.7075817751574985e+00, 0.3076496509717662e+00, 0.9845619472128853e+00, 0.4421050939804582e+00, 0.3472138710835164e+00, 0.7694977672753175e+00, 0.4786944038481857e+00, 0.6974278852457209e+00, 0.5004517708240744e+00, 0.7658513494839985e+00, 0.7508522623862479e-01, 0.8645614962374704e-02, 0.2513671118506070e+00, 0.9355687048630774e+00, 0.2836435232395548e+00, 0.8653687008315966e+00, 0.3043727382738487e+00, 0.8973655399535756e+00, 0.7135517870607274e+00, 0.4956863425678929e+00, 0.8362506849216409e+00, 0.9829450079482006e+00, 0.9413718361369072e+00, 0.2542921002853715e-01, 0.5951158954104814e+00, 0.9317747763384295e+00, 0.9085271053958202e+00, 0.9993466426511500e+00, 0.2497009223772528e-01, 0.8726460080387569e+00, 0.3868463012727243e+00, 0.5820757557404177e-01, 0.7974577837432384e-01, 0.8242609714155934e+00, 0.1887913799920241e+00, 0.3268225406700475e+00, 0.2264596835393817e+00, 0.4020160264981738e+00, 0.8597685353848280e+00, 0.5429598215579996e+00, 0.4134507486351844e+00, 0.8781720494573462e+00, 0.3655827200857601e+00, 0.2876763179222336e+00, 0.9193112804533413e+00, 0.3958413207165046e-01, 0.7980401670837647e+00, 0.5101422689515223e-01, 0.2194050847732673e+00, 0.6115669547384739e-02, 0.9036470382476453e+00, 0.6696681747677364e+00, 0.1689100553906829e+00, 0.9284887836539969e+00, 0.8748192507086142e+00, 0.7181738874264668e+00, 0.8990747837549429e-01, 0.7166811698029575e+00, 0.8712804839027968e+00, 0.2571898936326318e+00, 0.2264160460654618e+00, 0.6364724288557849e+00, 0.9550337894922362e+00, 0.8995892333665090e+00, 0.3728949594240628e+00, 0.6496276036419958e+00, 0.7218580021816370e+00, 0.2620053580399828e+00, 0.8492826434932900e+00, 0.1090756692624820e+00, 0.1230806328807070e+00, 0.5326342483791896e+00, 0.3425391815117244e+00, 0.4714474424671373e+00, 0.9044440560476328e+00, 0.2298320061743346e+00, 0.7771450381369926e+00, 0.4623588499686800e+00, 0.8996441805847293e+00}, + n: 21, + zOut: []float64{0.2749036998648791e+01, 0.2657147938200912e+01, 0.2156780738808330e+01, 0.2057290308234013e+01, 0.1910481192038085e+01, 0.1550197528741744e+01, 0.1531663815430223e+01, 0.1307411568456853e+01, 0.1180055477995477e+01, 0.1145160315453326e+01, 0.8462599780670359e+00, 0.8196023258344234e+00, 0.7505058481318937e+00, 0.3802378569170251e+00, 0.3493658644293456e+00, 0.2733852715996189e+00, 0.1920071438579977e+00, 0.3602127141457542e-01, 0.1503353393017178e-01, 0.5047074669949666e-02, 0.4814167031169938e-03, 0.1615587133892632e-26, 0.3701495167228324e-14, 0.8166164251788096e-14, 0.1531663815430223e+01, 0.5400326177233737e-21, 0.8345943934662111e-13, 0.3845954584916692e-13, 0.1307411568456853e+01, 0.2270604060615577e-17, 0.1098158814550649e-11, 0.6011774632275279e-12, 0.1180055477995477e+01, 0.6617444900424221e-23, 0.1039362721731823e-10, 0.1780733867152956e-10, 0.1145160315453326e+01, 0.3193234771665464e-17, 0.8582967920523009e-10, 0.1451582492090454e-09, 0.8462599780670359e+00, 0.4268426102972081e-16, 0.2191317346756291e+02, 0.2191317346756291e+02, 0.1100000000000000e+03, 0.3303854875283447e+01, 0.6363636363636363e+01, 0.9730504757902699e-07, 0.7505058481318937e+00, 0.1718125187449148e-14, 0.1758342086676891e-05, 0.2642640058729254e-05, 0.3802378569170251e+00, 0.9047523974206004e-14, 0.3754566725544834e-04, 0.5673943129185001e-04, 0.3493658644293456e+00, 0.2726303288443817e-26, 0.2006689219611138e-03, 0.1777791597062721e-03, 0.2733852715996189e+00, 0.1852245405458936e-26, 0.4273484804153724e-06, 0.2289494629295668e-05, 0.1920071438579977e+00, 0.7673537210642012e-17, 0.3631464818839244e-02, 0.3052322482564877e-02, 0.3602127141457542e-01, 0.1402889840288648e-17, 0.2065820498676818e-02, 0.7907462477517949e-03, 0.1503353393017178e-01, 0.3588277137446075e-19, 0.9842420036565967e-03, 0.4295497278130008e-03, 0.5047074669949666e-02, 0.1100487290795119e-18, 0.2169791314757801e-02, 0.2746294001732303e-02, 0.4814167031169938e-03, 0.9926167349879280e-23, 0.2524480967032520e-02, 0.1685323473608901e-02}, + info: 0, + }, + { + z: []float64{0.4452569544189223e+00, 0.3712416527218666e+00, 0.4540009468556715e+00, 0.9149505778015055e-01, 0.9825460881415958e+00, 0.8144387623295611e+00, 0.4257438310114644e+00, 0.4055209729019219e+00, 0.1358301754544711e-01, 0.6660365069195171e+00, 0.4791028777450690e-01, 0.3657122138300755e+00, 0.9244230955293443e+00, 0.2570776992045346e+00, 0.5718524872194095e+00, 0.6959027703199671e+00, 0.8022093165313721e+00, 0.4495167304148069e+00, 0.4533829141769831e+00, 0.4399857636092745e+00, 0.8343772053001745e+00, 0.5841091089133705e+00, 0.9225724116024000e+00, 0.3646730358635919e+00, 0.4342402996301750e+00, 0.1979669913465428e+00, 0.2710080252534286e+00, 0.4064878156937679e+00, 0.1329017000110605e+00, 0.3577863781516848e+00, 0.7737638611946227e+00, 0.3628629851888825e+00, 0.6191378950237536e+00, 0.9181610808896479e-01, 0.3164967052049129e+00, 0.1800354212961807e+00, 0.5261304159866632e+00, 0.2095779664090124e+00, 0.4734470785970568e+00, 0.8233700542774806e+00, 0.1405149000531442e+00, 0.5739228235076023e+00, 0.4484023224779539e+00, 0.7953415183519565e+00, 0.5683020712105596e+00, 0.3828815589637512e+00, 0.9478512018818865e+00, 0.9398576744287683e+00, 0.6799918835962694e+00, 0.9795978835389229e+00, 0.9415167003494995e+00, 0.9804631144197878e+00, 0.4529282233852833e+00, 0.6987441800620822e+00, 0.1071819561656476e+00, 0.6287695276127018e+00, 0.2573913359217608e+00, 0.5283299892256954e-01, 0.1312057002484920e+00, 0.2566822109082798e+00, 0.5022596747022927e+00, 0.3755634037822867e+00, 0.7687685406410046e+00, 0.9286257388850563e+00, 0.2460950353669618e+00, 0.4615682752011302e+00, 0.2668978314403934e+00, 0.9526334220011422e+00, 0.7113266960956600e+00, 0.8033610237680390e+00, 0.2049912091857929e+00, 0.9104136201043411e+00, 0.9905140155095393e+00, 0.5284892163797259e+00, 0.4617116226676921e-01, 0.7638541825257228e+00, 0.8058860223281950e-01, 0.6562405799504624e+00, 0.4217948031372573e+00, 0.8444441663937204e+00, 0.4135300708069887e+00, 0.3992676961789670e+00, 0.9443059736340937e+00, 0.4697907769676380e+00}, + n: 21, + zOut: []float64{0.2339652518244840e+01, 0.2127240999798084e+01, 0.2122097736373912e+01, 0.1640409192349226e+01, 0.1522936721900112e+01, 0.1513900850773956e+01, 0.1190428480890818e+01, 0.1098839257111754e+01, 0.9060499827420537e+00, 0.8483833633939547e+00, 0.7423625631343986e+00, 0.6441873762582893e+00, 0.5561064852023264e+00, 0.5332801819376103e+00, 0.3323799891685881e+00, 0.2356658160961767e+00, 0.1910001776595044e+00, 0.1052937995306845e+00, 0.3493206724380617e-01, 0.1582963148293342e-01, 0.1362410302334481e-03, 0.2902907641693595e-19, 0.1235088787739235e-18, 0.5938156039510254e-18, 0.1190428480890818e+01, 0.2366582715663035e-29, 0.1736669751992271e-12, 0.4479686287025860e-12, 0.1098839257111754e+01, 0.4930380657631324e-31, 0.6845104399398104e-11, 0.1631652613834293e-10, 0.9060499827420537e+00, 0.3623410171350593e-13, 0.4397239464622790e-08, 0.8783797026819435e-08, 0.8483833633939547e+00, 0.1680831004707752e-20, 0.6292544501750849e-07, 0.1207646260809090e-06, 0.7423625631343986e+00, 0.1044536639065691e-17, 0.1870111343232326e+02, 0.1870111343232326e+02, 0.1030000000000000e+03, 0.3002267573696145e+01, 0.3883495145631068e+01, 0.8796985420666049e-06, 0.5561064852023264e+00, 0.6232001151245993e-28, 0.4765675419124029e-06, 0.5281771988202168e-06, 0.5332801819376103e+00, 0.5995342879679690e-28, 0.3211676631039122e-06, 0.3513325837054497e-06, 0.3323799891685881e+00, 0.2177256098409993e-25, 0.1826919937583927e-06, 0.1747056528730059e-06, 0.2356658160961767e+00, 0.1311326116357865e-17, 0.1784537561302489e-06, 0.2175474607275274e-06, 0.1910001776595044e+00, 0.3862262991962074e-24, 0.9127544222909640e-06, 0.4962248419758332e-06, 0.1052937995306845e+00, 0.1072850831100576e-26, 0.2872845137271569e-03, 0.1413151427897814e-03, 0.3493206724380617e-01, 0.8028654523313936e-17, 0.5779626756191757e-04, 0.4784930511330926e-04, 0.1582963148293342e-01, 0.9359045779542084e-26, 0.9339546012110267e-04, 0.6206712854083305e-04, 0.1362410302334481e-03, 0.4535950205020818e-29, 0.1799327870469576e-03, 0.1621654177500190e-02}, + info: 0, + }, + { + z: []float64{0.6327743759434090e-01, 0.8874473183212240e+00, 0.1587228549487630e+00, 0.1869853022948459e+00, 0.1852952724381735e+00, 0.2190328352455733e+00, 0.7936705141328082e+00, 0.1573124860628259e+00, 0.2446308768539528e+00, 0.2441044501798444e+00, 0.4435884001235265e+00, 0.1783624365771731e+00, 0.6874700271616803e+00, 0.4796486989431838e+00, 0.2471425348644392e+00, 0.4391077480264999e+00, 0.9973269002312380e+00, 0.4770343529783838e+00, 0.3451566405930041e+00, 0.5011834710046675e+00, 0.8072188861666797e+00, 0.5748577907200507e+00, 0.2986003422862493e+00, 0.4760318526964945e+00, 0.5838957192070238e-01, 0.1760621214885278e+00, 0.7926085978219721e+00, 0.3379849703418611e+00, 0.6012719579585296e+00, 0.1130783704430351e+00, 0.5590675745254436e-01, 0.8616930340961984e+00, 0.9917564091315376e+00, 0.4219575368674273e+00, 0.1044382974319413e+00, 0.1886528536659160e-01, 0.9337085742573710e+00, 0.6447952074628677e+00, 0.7158824182577913e+00, 0.3883216757352448e+00, 0.2116568255149501e+00, 0.9819105316462939e+00, 0.8471687522868032e-01, 0.2785996362910685e+00, 0.8775405646426044e+00, 0.5671584270354416e+00, 0.4912868754910720e+00, 0.5767395813214109e+00, 0.7327323379759062e+00, 0.8184048812627022e+00, 0.3325270745990432e+00, 0.3026500963479654e+00, 0.7228591188664935e+00, 0.1094677581261434e+00, 0.5280482398171430e+00, 0.9974727380694979e+00, 0.5087630734843742e+00, 0.6953603638889684e+00, 0.9103967979863506e+00, 0.2584730811693510e+00, 0.5498750728524477e+00, 0.9758543899455846e+00, 0.8309066632970131e+00, 0.4311646442586020e+00, 0.4732818688792167e+00, 0.4096051739313099e+00, 0.7479697576436509e+00, 0.9369473766445329e+00, 0.7380538090811954e+00, 0.1199951604231148e+00, 0.5672128274092054e+00, 0.7150763241893471e+00, 0.6134240647991112e+00, 0.7095501841622953e-01, 0.1510080432732774e-01, 0.2912426074708729e+00, 0.1611746025295486e+00, 0.3946903662654964e+00, 0.7408818971566200e-01, 0.3978128748254128e+00, 0.3580504147215868e+00, 0.2603799996304434e-01, 0.8990784999913340e+00, 0.1985602490013758e+00}, + n: 21, + zOut: []float64{0.2139906798692060e+01, 0.2104052265745164e+01, 0.2080474521860817e+01, 0.1784368581097839e+01, 0.1501145759273247e+01, 0.1395549430074518e+01, 0.1236063891814824e+01, 0.1123736402991315e+01, 0.8211474325690415e+00, 0.7411912844479571e+00, 0.7336682913123216e+00, 0.5274366057326734e+00, 0.3715621485686555e+00, 0.3171624722931449e+00, 0.2809062056956198e+00, 0.1668884413843382e+00, 0.9744092162322979e-01, 0.7613224124753509e-01, 0.1665106039654164e-01, 0.4700366860927376e-02, 0.1401918312945850e-02, 0.1084683744678891e-29, 0.4021673570416241e-08, 0.3931910860092540e-08, 0.1236063891814824e+01, 0.1439956010332256e-19, 0.3626387646080745e-08, 0.3543555828237728e-08, 0.1123736402991315e+01, 0.3006506681978320e-24, 0.3371387381785702e-08, 0.3448854867663729e-08, 0.8211474325690415e+00, 0.6197291271416269e-26, 0.2831422041774839e-08, 0.2886341750444270e-08, 0.7411912844479571e+00, 0.0000000000000000e+00, 0.2880306038760265e-08, 0.2936321908308739e-08, 0.7336682913123216e+00, 0.2406371799476454e-20, 0.1752158704199472e+02, 0.1752158704199472e+02, 0.1050000000000000e+03, 0.3111111111111111e+01, 0.7619047619047619e+01, 0.3652487989473621e-08, 0.3715621485686555e+00, 0.1451701975030564e-21, 0.4296538801302010e-08, 0.3992227758256396e-08, 0.3171624722931449e+00, 0.6162975822039155e-31, 0.5151357395533313e-08, 0.5974032086218965e-08, 0.2809062056956198e+00, 0.2039658356197591e-16, 0.1134165265363298e-07, 0.1513934018844884e-07, 0.1668884413843382e+00, 0.2400943524542200e-24, 0.1781584505522029e-06, 0.1218300442987205e-06, 0.9744092162322979e-01, 0.6162975822039155e-31, 0.9557876398738343e-06, 0.5872690889712619e-06, 0.7613224124753509e-01, 0.5074727415413836e-17, 0.2309052370169074e-04, 0.5924498773169641e-04, 0.1665106039654164e-01, 0.1400228106767296e-28, 0.5116509326795711e-04, 0.5154480234673206e-04, 0.4700366860927376e-02, 0.1956855682120442e-20, 0.1061141342429108e-03, 0.9755516139369119e-04, 0.1401918312945850e-02, 0.0000000000000000e+00, 0.1229085807195224e-02, 0.8009097549502667e-02}, + info: 0, + }, + { + z: []float64{0.7225493913848618e+00, 0.3268825038779278e+00, 0.2580957584922012e+00, 0.4222896610254372e+00, 0.8243187922209068e+00, 0.3973391932422579e+00, 0.3700698452657641e+00, 0.2073422474585492e+00, 0.3071746797249822e+00, 0.2370168833330754e+00, 0.7779583030993944e+00, 0.4350651843345934e+00, 0.7867543754352369e+00, 0.3439673345654075e+00, 0.3736479358698906e+00, 0.8450641599616520e+00, 0.3880138308747846e+00, 0.2479782080134303e+00, 0.3220864609073060e+00, 0.8460149388339712e+00, 0.6082870752479030e+00, 0.3396261004129498e+00, 0.1231305479976765e+00, 0.6514493424347317e+00, 0.4987273421671501e-01, 0.2199653339283912e+00, 0.5626339029868159e+00, 0.6755574738917364e+00, 0.7016850168758351e+00, 0.7957347983005405e+00, 0.3394687111682781e-01, 0.1490794513934588e+00, 0.9837873705641814e+00, 0.5038299137267350e+00, 0.8394265886439850e+00, 0.1438624872728633e-01, 0.4444190467253504e+00, 0.1489248400425094e+00, 0.6410535698206483e+00, 0.2739267916732867e+00, 0.6522097474411971e+00, 0.9086492388715542e+00, 0.1949071722314141e+00, 0.7176724675018002e+00, 0.9247176136838016e+00, 0.4929580602677628e+00, 0.3824418236647242e+00, 0.2425574232540663e+00, 0.7085287909144397e+00, 0.2574302555167504e+00, 0.6093160252921369e+00, 0.2417561032032995e+00, 0.4795262420397247e+00, 0.4658200993830509e+00, 0.9033699918994937e+00, 0.1295021719769064e+00, 0.6219991535812029e+00, 0.7040800557612208e+00, 0.8644629036591867e-01, 0.5129394685202899e+00, 0.7988983132437487e+00, 0.8322467913624354e+00, 0.9905487172695071e-01, 0.7093832076693246e+00, 0.1992462440538834e-01, 0.6378263512182120e+00, 0.5823949252324323e-02, 0.7826279628890456e+00, 0.5919828344466795e+00, 0.3815528510244244e+00, 0.6537355893096064e-01, 0.6459825623191251e-01, 0.4466210788758083e+00, 0.6620124961190184e+00, 0.6318809895815939e+00, 0.8145861476435108e+00, 0.5938169865214039e+00, 0.8092284578140500e+00, 0.5378701643349270e+00, 0.1648044346935199e+00, 0.9874291179811017e+00, 0.3210667070512012e+00, 0.6809988920516613e+00, 0.6410080963943865e+00}, + n: 21, + zOut: []float64{0.2108032684782400e+01, 0.1971253922847928e+01, 0.1952878543820764e+01, 0.1839982524288513e+01, 0.1730503177731785e+01, 0.1509287569836986e+01, 0.1316544049683026e+01, 0.1117715356834192e+01, 0.9058841126016787e+00, 0.8452427684353671e+00, 0.7582219581243467e+00, 0.6392786878439012e+00, 0.5749674919024932e+00, 0.5111167278611346e+00, 0.3859485208834002e+00, 0.2295319067023061e+00, 0.2262113890497363e+00, 0.1122658486667464e+00, 0.7234153262002044e-01, 0.4527048542558528e-01, 0.8319414807792288e-04, 0.2547288901631409e-13, 0.9052716547280089e-04, 0.7818093042872877e-04, 0.1316544049683026e+01, 0.0000000000000000e+00, 0.2537869832861900e-04, 0.2058109204222074e-04, 0.1117715356834192e+01, 0.0000000000000000e+00, 0.1154377886785102e-04, 0.1215574110748570e-04, 0.9058841126016787e+00, 0.8843729980912407e-17, 0.1391102994778336e-08, 0.5248885105740662e-08, 0.8452427684353671e+00, 0.2445468806185137e-28, 0.2192473584471834e-06, 0.3792344586441173e-06, 0.7582219581243467e+00, 0.6480317173221865e-17, 0.1885256245409039e+02, 0.1885256245409039e+02, 0.1090000000000000e+03, 0.3303854875283447e+01, 0.3669724770642202e+01, 0.3715603111278862e-08, 0.5749674919024932e+00, 0.4928548884551519e-18, 0.2515491583997865e-07, 0.9138389520203617e-08, 0.5111167278611346e+00, 0.6587911462745685e-21, 0.7857712130631377e-05, 0.2298529163657988e-05, 0.3859485208834002e+00, 0.6162975822039155e-32, 0.1022329296046741e-03, 0.8680151473672892e-04, 0.2295319067023061e+00, 0.0000000000000000e+00, 0.4616464530663241e-04, 0.3877924919889212e-04, 0.2262113890497363e+00, 0.5522026336547083e-29, 0.2365660345682357e-03, 0.1990352708186489e-03, 0.1122658486667464e+00, 0.9015102222917331e-20, 0.6825572508936817e-03, 0.6443371781720532e-03, 0.7234153262002044e-01, 0.1168271446196110e-24, 0.1518594231951364e-05, 0.3554999772664582e-05, 0.4527048542558528e-01, 0.1023373475125114e-24, 0.5037539097182833e-02, 0.4761613742874584e-02, 0.8319414807792288e-04, 0.1479114197289397e-30, 0.9105689905446296e-02, 0.4825995872157137e-02}, + info: 0, + }, + { + z: []float64{0.3256696334281521e+00, 0.7530856859911462e+00, 0.8853989127988440e+00, 0.4058112879440673e+00, 0.4296659748141172e+00, 0.5414662225246791e-01, 0.7727106089758434e+00, 0.8219533891865206e+00, 0.8804898818241804e+00, 0.2701661776792790e+00, 0.3062100113053869e+00, 0.7616550925245322e+00, 0.7441861437457686e+00, 0.1208643477044503e+00, 0.1894387751691341e+00, 0.5502297763300060e+00, 0.7033314588214317e+00, 0.9049640752657374e+00, 0.7642474001418834e+00, 0.6461873559439021e+00, 0.7323555348666727e+00, 0.6137344390498661e+00, 0.4469041036073067e+00, 0.6120974783290001e+00, 0.3695185251601272e+00, 0.9332999793731735e+00, 0.3981597952563400e+00, 0.5926926060619504e+00, 0.2218776156238759e+00, 0.5556127447847847e-01, 0.2642041684868913e+00, 0.3100431241269808e-01, 0.7617158805289858e+00, 0.5576464290832684e+00, 0.1499291988028689e+00, 0.6071627267784354e+00, 0.2903444085740193e+00, 0.8982141027722228e+00, 0.3437740722392461e+00, 0.5257340992985249e-02, 0.9772197173932363e+00, 0.2743313505008338e-01, 0.5939995532981283e+00, 0.8125099455585232e+00, 0.8394050677385213e+00, 0.2410326841076476e+00, 0.6066214991817382e+00, 0.1828025638429278e+00, 0.5406675263078469e+00, 0.1108622672142920e+00, 0.2412280709874803e+00, 0.5713495470758625e+00, 0.6315445401927943e+00, 0.2035563756883153e+00, 0.8696814083183412e+00, 0.2313134367709406e+00, 0.4474447231170641e+00, 0.6238312383525342e+00, 0.4961806049996582e+00, 0.8121574484576765e+00, 0.8702938949430352e+00, 0.4187164629520156e+00, 0.9204090241956668e+00, 0.8587176140225750e-02, 0.5171057705188283e+00, 0.5195595031109387e+00, 0.1704600468811621e+00, 0.3205951022793651e+00, 0.5643494948239225e-01, 0.9895063272544601e+00, 0.5554681247022339e+00, 0.6098243547887802e+00, 0.4730289261335907e+00, 0.1850396134174421e+00, 0.9997475966289492e+00, 0.6812817712215841e+00, 0.8515433432907883e+00, 0.7377047940023331e+00, 0.2280516830782600e+00, 0.7204549837953970e+00, 0.6096260713045146e+00, 0.9381128478076188e+00, 0.8446173293023428e+00, 0.6255387686605924e+00}, + n: 21, + zOut: []float64{0.2655567567233002e+01, 0.2307219644143945e+01, 0.2068899090176533e+01, 0.2040332602619700e+01, 0.1742115921396612e+01, 0.1681856112031183e+01, 0.1680515965675234e+01, 0.1379580829731510e+01, 0.1074312270120212e+01, 0.9816962393879983e+00, 0.7217300335876665e+00, 0.6931029681515243e+00, 0.5680300928567689e+00, 0.5098239945058581e+00, 0.4280073603039590e+00, 0.2868581986899945e+00, 0.1650069923584772e+00, 0.9685136354131559e-01, 0.5953175886938158e-01, 0.1010639579838301e-01, 0.1940920539235378e-02, 0.0000000000000000e+00, 0.6683246174917478e-10, 0.4267275295503314e-10, 0.1680515965675234e+01, 0.3510431028233503e-28, 0.1660414363340253e-09, 0.1048896312812690e-09, 0.1379580829731510e+01, 0.1462018530271286e-15, 0.3421106985799154e-06, 0.1788748688597153e-06, 0.1074312270120212e+01, 0.6708481546523056e-14, 0.2133146148219686e-04, 0.4629406838917969e-04, 0.9816962393879983e+00, 0.1747667198202037e-19, 0.5113942524303023e-03, 0.6287577911928029e-03, 0.7217300335876665e+00, 0.3213860550383917e-17, 0.2115308632171850e+02, 0.2115308632171849e+02, 0.1090000000000000e+03, 0.3217687074829932e+01, 0.4587155963302752e+01, 0.4202140396420195e-04, 0.5680300928567689e+00, 0.4489765112701642e-21, 0.3523329095202763e-09, 0.1167512512099963e-08, 0.5098239945058581e+00, 0.1652551767516096e-21, 0.2872281528375301e-06, 0.1040291448945555e-06, 0.4280073603039590e+00, 0.3262461795428160e-16, 0.6249760558326680e-04, 0.5475920486023645e-04, 0.2868581986899945e+00, 0.1568256893218114e-20, 0.1414968677825121e-03, 0.1814031293335571e-03, 0.1650069923584772e+00, 0.8677469957431130e-28, 0.6720068122740940e-03, 0.4975589868187537e-03, 0.9685136354131559e-01, 0.5127595883936577e-29, 0.9260272777115331e-03, 0.1730290225790419e-02, 0.5953175886938158e-01, 0.7227833914448031e-23, 0.5968169418548035e-04, 0.1670361992731526e-04, 0.1010639579838301e-01, 0.1295106991551472e-19, 0.1213518994781533e-03, 0.2534883535393275e-03, 0.1940920539235378e-02, 0.0000000000000000e+00, 0.1005634135102979e-02, 0.2318000467665894e-02}, + info: 0, + }, + { + z: []float64{0.1944566011999833e+00, 0.2181910996636003e+00, 0.3150099357031281e+00, 0.2091722274105127e+00, 0.6593858021341683e+00, 0.9426815336068497e+00, 0.5288011644568881e+00, 0.7784091503839152e+00, 0.1751638954124558e+00, 0.6164772800195180e+00, 0.6225196859922683e+00, 0.2786175486675760e+00, 0.1945914328217568e+00, 0.5779085626077999e+00, 0.5568218939451671e+00, 0.5170566254269496e+00, 0.5742861029155353e+00, 0.6923056150383281e+00, 0.5876692361605969e+00, 0.4295639666640205e+00, 0.8612105519992207e+00, 0.9963836880013683e+00, 0.2692934182903581e+00, 0.6181020857251435e+00, 0.1251902537663725e+00, 0.2838625146280274e+00, 0.2999174442521634e+00, 0.7258439193048929e+00, 0.2048904835805728e+00, 0.1174987481558037e+00, 0.6887427109309550e+00, 0.7409107864964065e+00, 0.5306892441542433e+00, 0.6214581212205206e-01, 0.1234501953117273e-01, 0.2038789370511589e+00, 0.6180963044354376e+00, 0.2820746320472540e+00, 0.4643004781082601e+00, 0.2642696366861867e+00, 0.4102198269957935e-01, 0.1495900075045802e+00, 0.2060405615637534e+00, 0.3399183002928583e+00, 0.7918555051917199e+00, 0.9699614514519834e+00, 0.2922678000248715e+00, 0.1376674512222148e+00, 0.2496993200590716e+00, 0.3432539555227123e+00, 0.5835005548357151e+00, 0.4094205671447549e+00, 0.4876670000237163e+00, 0.7973523995663028e+00, 0.8851386592921134e+00, 0.1523554028450115e+00, 0.7049520832902729e+00, 0.4689643982524618e+00, 0.4347705823917429e+00, 0.8152140283905303e+00, 0.9259201525760803e+00, 0.3175789772106010e+00, 0.3950010049586650e+00, 0.7107346574834400e+00, 0.8109922991383097e+00, 0.6889072760433030e+00, 0.7861311781839575e-02, 0.7751963519893628e+00, 0.7382950156197396e+00, 0.9729693586970557e+00, 0.7283826758396350e+00, 0.1269651196972527e+00, 0.3530947260257147e+00, 0.2061448083515627e-01, 0.5931058475369740e+00, 0.9689379021952851e+00, 0.3254887771415839e+00, 0.2808523639662175e-01, 0.6211468186653845e+00, 0.6037463613076512e+00, 0.8746435454108382e+00, 0.5300695323546331e+00, 0.5276801989236730e+00, 0.2128951663669798e-01}, + n: 21, + zOut: []float64{0.2424279264689105e+01, 0.2270324150901105e+01, 0.1935128898096250e+01, 0.1814500336114905e+01, 0.1538233405195664e+01, 0.1297421320254112e+01, 0.1267282488999314e+01, 0.1249158514677014e+01, 0.9155076211264116e+00, 0.8242950831432680e+00, 0.5949232544739386e+00, 0.5630079212779312e+00, 0.4530053256303904e+00, 0.3458193312768856e+00, 0.2146623096050983e+00, 0.1686454472303586e+00, 0.1155214217797334e+00, 0.5180948935108346e-01, 0.2789744048901975e-01, 0.8268186305777192e-02, 0.6680158028153346e-04, 0.0000000000000000e+00, 0.3986945646893946e-08, 0.7023072825762287e-08, 0.1267282488999314e+01, 0.0000000000000000e+00, 0.2355158572444734e-07, 0.1877711213087836e-07, 0.1249158514677014e+01, 0.3384506411488746e-20, 0.2648360770785381e-11, 0.1741394977127960e-10, 0.9155076211264116e+00, 0.3299177303725225e-12, 0.2487861145673591e-05, 0.4028407591944186e-05, 0.8242950831432680e+00, 0.2441420600100056e-16, 0.1869515956083083e-05, 0.1842279235203962e-05, 0.5949232544739386e+00, 0.2024938139529812e-20, 0.1807975801219765e+02, 0.1807975801219765e+02, 0.9300000000000000e+02, 0.2836734693877551e+01, 0.1075268817204301e+01, 0.9521662195121400e-05, 0.4530053256303904e+00, 0.1259501632163109e-23, 0.1468124594065504e-05, 0.4670591953230941e-05, 0.3458193312768856e+00, 0.7975064420889996e-22, 0.1308295990671250e-06, 0.6690001990389955e-07, 0.2146623096050983e+00, 0.1927976052360153e-26, 0.3812574759844485e-05, 0.3580340604691736e-05, 0.1686454472303586e+00, 0.3739200690747596e-27, 0.6844384387279355e-05, 0.5557405125771409e-05, 0.1155214217797334e+00, 0.7506098603421864e-21, 0.2139339757899294e-04, 0.3026575107963825e-04, 0.5180948935108346e-01, 0.3886068668379982e-20, 0.1983680974127293e-03, 0.1336830557694390e-03, 0.2789744048901975e-01, 0.1991084924777834e-26, 0.4443273484877918e-03, 0.6821022035819646e-03, 0.8268186305777192e-02, 0.1921747358113822e-20, 0.1778955160858100e-05, 0.3048292256883238e-06, 0.6680158028153346e-04, 0.7915528772828604e-21, 0.4499131616906370e-02, 0.1547765737453591e-02}, + info: 0, + }, + { + z: []float64{0.3849545441014558e+00, 0.9201984078647510e+00, 0.4611893507512446e+00, 0.9426704990067624e+00, 0.4513996483425642e+00, 0.3546824995764211e+00, 0.2673932938327498e+00, 0.2250322436282782e+00, 0.3003875396759296e+00, 0.8637078150569484e+00, 0.2463125523040188e-01, 0.6381498516846796e+00, 0.9054059378226459e+00, 0.9211368359293561e+00, 0.6899055937753183e+00, 0.6469675067081565e+00, 0.2524386712411693e+00, 0.4816130873217878e+00, 0.8437265349008123e+00, 0.8843672501609817e+00, 0.3917808232071412e+00, 0.4109107468556734e+00, 0.6294664210780233e+00, 0.5231064834871043e+00, 0.6006449315760222e+00, 0.2321330919598237e+00, 0.9857216391074231e-01, 0.5246894901632879e+00, 0.2429723268334654e+00, 0.3385437000243748e+00, 0.1784805178646218e+00, 0.8008314854810928e+00, 0.3388059467143087e+00, 0.4957241256656372e+00, 0.6108069077503485e+00, 0.8350989375447051e+00, 0.1087391555565628e+00, 0.5847974581188020e+00, 0.8316850716702742e+00, 0.8921011586703487e+00, 0.1947070853010260e+00, 0.1394928972102342e+00, 0.7929088249487071e+00, 0.4078510186300481e+00, 0.6849316610029904e+00, 0.1436003498260631e+00, 0.5673845335495399e+00, 0.7998164012861997e-01, 0.6667892725157643e+00, 0.7612117828169483e+00, 0.8195812739875934e+00, 0.1507277483884870e+00, 0.6683936863561600e+00, 0.1889659251016290e+00, 0.7979363461805246e+00, 0.9481151487908223e+00, 0.3017570036067704e+00, 0.2677628114499495e+00, 0.4479716416042271e+00, 0.3773062273227097e+00, 0.1756267436681674e+00, 0.6251009447636316e+00, 0.3010990216120933e+00, 0.6044916624270988e+00, 0.3012752666004853e+00, 0.7113839134297870e+00, 0.7355680503868338e+00, 0.1830572310895536e+00, 0.2065116792960049e+00, 0.2432895898830973e+00, 0.9615089470298006e+00, 0.3137165160357294e+00, 0.5335631824040450e+00, 0.8324142824771229e+00, 0.9749825715503555e+00, 0.3108065469391890e+00, 0.2584483510692804e+00, 0.5679665509067858e+00, 0.9084113884935793e+00, 0.3061643504016712e+00, 0.1856860631474661e+00, 0.4086356189591027e+00, 0.2761517708925276e+00, 0.9203155374121844e+00}, + n: 21, + zOut: []float64{0.2651616639993538e+01, 0.2278042999629812e+01, 0.2198077975076974e+01, 0.2185608493228643e+01, 0.1866238785250178e+01, 0.1694317687161655e+01, 0.1314780820581521e+01, 0.1267230115137409e+01, 0.1165102366599241e+01, 0.1094106513205574e+01, 0.9375639320451692e+00, 0.7228525903025090e+00, 0.5063995930233953e+00, 0.4433597755655621e+00, 0.3782868675227946e+00, 0.2710178897462153e+00, 0.1447594933682902e+00, 0.1060101050946834e+00, 0.8449698031014202e-01, 0.1468593250969371e-01, 0.8406928034951682e-06, 0.3591986573571771e-20, 0.3864386735331416e-10, 0.3252595492372177e-10, 0.1314780820581521e+01, 0.1447952803137974e-21, 0.5374081097788385e-10, 0.4849713226733927e-10, 0.1267230115137409e+01, 0.2958228394578794e-29, 0.8017357434413146e-10, 0.7063808972759981e-10, 0.1165102366599241e+01, 0.1139115147139141e-26, 0.1961041727821929e-09, 0.1374026522849091e-09, 0.1094106513205574e+01, 0.3915388343685865e-19, 0.1756082891845291e-08, 0.3338672481507716e-08, 0.9375639320451692e+00, 0.4870203640784043e-16, 0.2132455639604580e+02, 0.2132455639604580e+02, 0.9900000000000000e+02, 0.2972789115646258e+01, 0.5050505050505050e+01, 0.1300256830939750e-04, 0.5063995930233953e+00, 0.4338734978715565e-29, 0.3205424704587136e-04, 0.3879495859639124e-04, 0.4433597755655621e+00, 0.9466330862652142e-25, 0.6653798840082298e-04, 0.6266988927201412e-04, 0.3782868675227946e+00, 0.2274100449667269e-22, 0.3059080514749475e-04, 0.2450157710629126e-04, 0.2710178897462153e+00, 0.2739429933906694e-24, 0.4451877567018950e-05, 0.2739439060253447e-05, 0.1447594933682902e+00, 0.1897052704875489e-25, 0.3311215248867682e-05, 0.2822048496282046e-05, 0.1060101050946834e+00, 0.7924889161564951e-19, 0.2588042287900903e-05, 0.3305698843095029e-05, 0.8449698031014202e-01, 0.2523395625690618e-18, 0.1425730854724969e-04, 0.2612802087529877e-04, 0.1468593250969371e-01, 0.7520086356827106e-20, 0.6278930605158724e-03, 0.1193309863751681e-02, 0.8406928034951682e-06, 0.0000000000000000e+00, 0.8769018979884540e-02, 0.6748334476252631e-02}, + info: 0, + }, + { + z: []float64{0.5539290016733247e+00, 0.6936589108803458e+00, 0.1862325586311042e-01, 0.3903593359823143e+00, 0.1387446022374186e+00, 0.9230050933369500e+00, 0.1655735508788293e+00, 0.5464594833562775e+00, 0.4407574799078734e+00, 0.7597516703282015e+00, 0.3240675365298944e+00, 0.8527429657828770e+00, 0.6134024974884296e+00, 0.1359668624923763e+00, 0.8589771621484943e+00, 0.8334002673394481e+00, 0.3811010712979018e+00, 0.4518439634289880e+00, 0.4121953913957921e-01, 0.1499929777106017e+00, 0.7537932319194001e+00, 0.1137770685080763e+00, 0.9362285670837264e+00, 0.2284833451474525e+00, 0.4661006612092690e+00, 0.3461611111488332e+00, 0.1608705680575839e-01, 0.9250298701911358e+00, 0.5983544857783111e+00, 0.9400090024445320e+00, 0.6595514287179831e+00, 0.2725099566160494e+00, 0.6509556024164401e+00, 0.8851211780351773e+00, 0.5925872091724521e+00, 0.5318402341230010e+00, 0.3225952236300995e+00, 0.6233031538827258e+00, 0.1806586091116282e+00, 0.9476369741031940e+00, 0.6784219735316235e+00, 0.6934023884718178e+00, 0.5000312772557033e+00, 0.6725383579734943e+00, 0.6771923299216058e+00, 0.9125469473100194e+00, 0.9862018367238429e+00, 0.7259311136907298e+00, 0.9021849324334038e+00, 0.6032549715715884e+00, 0.9017706724408630e+00, 0.8975979926873651e+00, 0.5949035726420406e+00, 0.6903449880442312e+00, 0.7574844360343417e+00, 0.2889632382233942e-01, 0.9428474184445177e+00, 0.5555118914598791e+00, 0.8663544108664935e+00, 0.6853450780608091e+00, 0.1464483859238053e+00, 0.6491672315887742e+00, 0.2994712877436206e+00, 0.3101752077576794e+00, 0.4920466664329196e+00, 0.2135103260181662e+00, 0.3809190441316870e+00, 0.8437350743416491e+00, 0.5443983884818225e+00, 0.7426189539459086e+00, 0.1055227287563778e+00, 0.3059118205598027e+00, 0.8189910523272392e+00, 0.9773505795713493e+00, 0.7305661438576656e+00, 0.6062516615534109e+00, 0.4660033490547544e+00, 0.5413353206637471e+00, 0.2388208915142312e+00, 0.6428463909118429e+00, 0.2982699820336984e+00, 0.2856298024316706e-01, 0.5487207914459959e+00, 0.4464180688275057e+00}, + n: 21, + zOut: []float64{0.2448593467642387e+01, 0.2141330855004301e+01, 0.2126430153924173e+01, 0.1907639051889359e+01, 0.1812500110390200e+01, 0.1510069013602020e+01, 0.1441897976798092e+01, 0.1263171337642769e+01, 0.1138107892295268e+01, 0.1088450894719558e+01, 0.9398511957468885e+00, 0.8468227425873083e+00, 0.6124964740092524e+00, 0.5436278175488470e+00, 0.4066256939946141e+00, 0.3972422051503922e+00, 0.2390787026361968e+00, 0.4848296808782174e-01, 0.2905803980602126e-01, 0.1302961362478940e-02, 0.3616542154739030e-05, 0.3187755557432356e-18, 0.8836722322771784e-05, 0.6065531139006927e-05, 0.1441897976798092e+01, 0.7730421460348975e-16, 0.3574427637238573e-07, 0.1935178018527985e-07, 0.1263171337642769e+01, 0.8058524406205312e-18, 0.1418632976070110e-05, 0.4069026170914173e-05, 0.1138107892295268e+01, 0.1371633476674845e-23, 0.4721385684453755e-04, 0.3623692584267971e-04, 0.1088450894719558e+01, 0.1790018845564752e-20, 0.4055026077404345e-04, 0.2446096735206838e-04, 0.9398511957468885e+00, 0.3178330447896772e-25, 0.2094278317138010e+02, 0.2094278317138010e+02, 0.1040000000000000e+03, 0.3031746031746032e+01, 0.4807692307692307e+01, 0.8138465546053722e-05, 0.6124964740092524e+00, 0.1019617235038814e-22, 0.4736809894069740e-03, 0.1915264360566004e-03, 0.5436278175488470e+00, 0.2004410489406572e-21, 0.1259085689345719e-03, 0.2338760337361548e-03, 0.4066256939946141e+00, 0.1231392444306089e-15, 0.6226403214695822e-04, 0.7644683470656998e-04, 0.3972422051503922e+00, 0.5914962592392188e-17, 0.1887068321547840e-03, 0.1345267246510674e-03, 0.2390787026361968e+00, 0.1936824017247308e-20, 0.7333182971373067e-03, 0.1062997506116107e-02, 0.4848296808782174e-01, 0.3799154119544393e-26, 0.1269933545612727e-03, 0.1297822869175177e-03, 0.2905803980602126e-01, 0.8465676581597317e-24, 0.5642309440908588e-03, 0.2732446225021143e-03, 0.1302961362478940e-02, 0.2467375723434259e-18, 0.9177892725485271e-03, 0.8034707353774361e-03, 0.3616542154739030e-05, 0.2761013168273541e-28, 0.6531167933367503e-03, 0.6727959129527735e-03}, + info: 0, + }, + { + z: []float64{0.7455810318731756e+00, 0.1881309445499726e+00, 0.6290241532486281e+00, 0.9426231091333456e+00, 0.1402641401457146e+00, 0.5545071521563689e+00, 0.3467970399972181e+00, 0.6378935846273492e-01, 0.8187242451993508e+00, 0.8444039716090014e+00, 0.8807815832461214e+00, 0.6871811349512845e+00, 0.6041217734774926e+00, 0.2285364565760638e+00, 0.6287288909172152e+00, 0.5441550558534458e+00, 0.6062929607453951e+00, 0.1528830611582420e+00, 0.7289323622783690e+00, 0.8693274251763169e+00, 0.1210720262902459e+00, 0.4752572018677603e+00, 0.8160358228459934e+00, 0.5003926181135285e+00, 0.2800920281530351e+00, 0.3817159580569316e+00, 0.1419563352692587e+00, 0.9738793587569783e+00, 0.2402077997739175e+00, 0.5021080238100061e+00, 0.5325521311583831e+00, 0.7002793445871702e+00, 0.2004913666518293e-01, 0.6858750037076770e+00, 0.5705320248969311e+00, 0.6000416876176061e+00, 0.9254592880635680e+00, 0.2987366812581649e+00, 0.8838368946481180e+00, 0.7495294261248863e+00, 0.2516401660161148e+00, 0.5770724542103510e+00, 0.8689432882806168e+00, 0.3108844333247283e+00, 0.7611598373381380e+00, 0.9533545884676758e+00, 0.6146629999183371e+00, 0.7337933880625785e+00, 0.1335018938357140e+00, 0.4054745880121539e+00, 0.9816031767048012e+00, 0.5190257866591561e+00, 0.9457220484783406e+00, 0.2563725588490263e+00, 0.8953616129834293e+00, 0.1343673038869742e+00, 0.1198867110907023e+00, 0.7765966504091196e+00, 0.1685346783514826e+00, 0.9322265874533907e+00, 0.4968937019786546e+00, 0.3933065437909874e+00, 0.7046190939244956e-01, 0.5772052710604483e+00, 0.1220174671595003e+00, 0.3586914192309758e+00, 0.4743117898783903e+00, 0.1205436116155321e+00, 0.2068106627971966e-01, 0.5035688415619853e+00, 0.2656792568844590e-01, 0.4951625786650252e+00, 0.1600264513881963e+00, 0.4218870487180432e+00, 0.4847863747130776e-01, 0.9478135093620923e+00, 0.1811694594266104e+00, 0.5311488460048615e+00, 0.2296061187775216e+00, 0.9932681440344262e+00, 0.2007921586496573e+00, 0.9684478357621775e+00, 0.9322927111902295e+00, 0.6234102172880590e+00}, + n: 21, + zOut: []float64{0.2617080551859897e+01, 0.2305187568066598e+01, 0.1942735806791493e+01, 0.1834812272403632e+01, 0.1760060933961288e+01, 0.1720372963502770e+01, 0.1676173250234917e+01, 0.1530300579122039e+01, 0.1214313538868506e+01, 0.1146550683576815e+01, 0.9555081169996513e+00, 0.8235353939361046e+00, 0.7281321711646775e+00, 0.5964515885643180e+00, 0.4105953672636856e+00, 0.2154672622243388e+00, 0.1469861329023305e+00, 0.1350631498164301e+00, 0.8120368457133148e-01, 0.1548517647968005e-01, 0.1861612641330624e-04, 0.6389970231972139e-21, 0.6259229043737505e-18, 0.1065975851985723e-17, 0.1676173250234917e+01, 0.1398996249446581e-17, 0.1485385179581525e-16, 0.5390878377546583e-16, 0.1530300579122039e+01, 0.1934286939601921e-26, 0.4995235293268131e-13, 0.1480097232724468e-13, 0.1214313538868506e+01, 0.1393981231259665e-11, 0.4031414073293921e-10, 0.9065983401759033e-10, 0.1146550683576815e+01, 0.7754818242684634e-25, 0.9068533370171412e-09, 0.1932480064220526e-08, 0.9555081169996513e+00, 0.5937598997073433e-12, 0.2185603480843691e+02, 0.2185603480843692e+02, 0.1010000000000000e+03, 0.3183673469387755e+01, 0.2970297029702970e+01, 0.4106209724092858e-05, 0.7281321711646775e+00, 0.2221432309102369e-26, 0.2012416535197213e-04, 0.1179395510019159e-04, 0.5964515885643180e+00, 0.6492360922940637e-17, 0.6055695087058826e-05, 0.1676327789285107e-04, 0.4105953672636856e+00, 0.1174834768927548e-24, 0.8526024871833974e-05, 0.9608606324255413e-05, 0.2154672622243388e+00, 0.1186446801452402e-26, 0.9887524717240814e-05, 0.1072842889916555e-04, 0.1469861329023305e+00, 0.0000000000000000e+00, 0.1666923382276663e-04, 0.2139923074663986e-04, 0.1350631498164301e+00, 0.4860988391194038e-18, 0.4968094868839375e-08, 0.1325007303324406e-08, 0.8120368457133148e-01, 0.5476819983069490e-18, 0.5501364953991437e-04, 0.1068949633822309e-04, 0.1548517647968005e-01, 0.6621786174648700e-13, 0.4934492558904634e-03, 0.7815729865935395e-03, 0.1861612641330624e-04, 0.0000000000000000e+00, 0.2047621357235247e-01, 0.1218736604744046e-01}, + info: 0, + }, + { + z: []float64{0.1913768959569714e+00, 0.7347223265181069e+00, 0.9087350122086006e+00, 0.8876448886267929e-02, 0.1794058368310475e+00, 0.3375830657319635e+00, 0.4153249303964603e+00, 0.3079259326141542e+00, 0.5095638682609168e+00, 0.6300946130469818e+00, 0.5367870439046851e-02, 0.6082577193589970e+00, 0.6096872650047096e+00, 0.8098482943683755e+00, 0.4018780482667224e+00, 0.1612961166583111e+00, 0.4165836183710623e+00, 0.6711774659609234e+00, 0.1871437048914691e+00, 0.2043648411447756e+00, 0.4603921058522200e+00, 0.5138380788557162e-01, 0.3299651081607302e+00, 0.5178779891127856e+00, 0.8488474652006981e+00, 0.4000528745818374e+00, 0.7038372186752184e-01, 0.1091866126814279e+00, 0.4601541814804277e-01, 0.2814862519917873e+00, 0.5969661695911915e+00, 0.1111325580672384e+00, 0.1377964900539917e+00, 0.4488655138014651e+00, 0.5793089439934721e+00, 0.4068390675279384e+00, 0.3141858292757815e-01, 0.3803058398371814e+00, 0.6865263808463873e-01, 0.6565571596516916e+00, 0.4227763797508006e+00, 0.9281332433572439e+00, 0.2549706813172651e+00, 0.1472316879439791e+00, 0.8389980826186394e+00, 0.4949206978487660e+00, 0.8778524239605636e+00, 0.8125876339222501e+00, 0.6384442407604712e+00, 0.6297206683503800e+00, 0.1074594814776241e+00, 0.4635106216187717e+00, 0.2149027083261391e+00, 0.2926633791426133e+00, 0.8718806127632718e+00, 0.9358351753143842e+00, 0.5812389276262170e+00, 0.8361764419241092e+00, 0.1334582685582402e+00, 0.6700349085889619e+00, 0.1370175035793201e+00, 0.2605729802823288e+00, 0.7055670307426516e+00, 0.1974656950667419e-01, 0.9516894704106690e+00, 0.7509460514650641e+00, 0.9770872584819335e-01, 0.1679929405438133e+00, 0.2605432695744189e+00, 0.7255281751885829e+00, 0.2060091110826470e+00, 0.5123028703888126e+00, 0.5392241233948379e+00, 0.3215743887975069e+00, 0.4306560982435532e+00, 0.9326432909148183e+00, 0.1891146429259456e+00, 0.5585690444839775e+00, 0.8103752159402208e+00, 0.3850798219907741e+00, 0.6027394925107610e-01, 0.6960376568363590e+00, 0.6132631218829975e+00, 0.5859904896405407e+00}, + n: 21, + zOut: []float64{0.1948845699194504e+01, 0.1757754653408917e+01, 0.1739740003489702e+01, 0.1427189755042558e+01, 0.1354312190162734e+01, 0.1300861212306309e+01, 0.1144086431430055e+01, 0.9728683413380136e+00, 0.7924660843958955e+00, 0.7190108168957414e+00, 0.5334843173898538e+00, 0.4234595469843583e+00, 0.3840099969172215e+00, 0.2310167712576353e+00, 0.1706582281146164e+00, 0.1481114719972525e+00, 0.9844570800522298e-01, 0.8643465546688130e-01, 0.2116850399857309e-01, 0.1490679637756442e-02, 0.9506251872618699e-05, 0.0000000000000000e+00, 0.3139247800552953e-11, 0.1114956279480716e-10, 0.1144086431430055e+01, 0.2611946364919696e-16, 0.1910542599593587e-05, 0.1867627468366115e-05, 0.9728683413380136e+00, 0.3548331964791209e-13, 0.9422609200466975e-06, 0.3861796048985941e-05, 0.7924660843958955e+00, 0.4543838814073028e-27, 0.1545075054473009e-06, 0.2314398440634665e-06, 0.7190108168957414e+00, 0.1033975765689929e-24, 0.9239329035775786e-07, 0.1002180390059042e-06, 0.5334843173898538e+00, 0.0000000000000000e+00, 0.1525542457368567e+02, 0.1525542457368567e+02, 0.1120000000000000e+03, 0.3308390022675737e+01, 0.5357142857142857e+01, 0.1175851227353848e-05, 0.3840099969172215e+00, 0.1009741958682895e-27, 0.6285056589702800e-05, 0.7777675415159152e-05, 0.2310167712576353e+00, 0.4642054879586336e-19, 0.2707022647265855e-04, 0.1462219920606713e-04, 0.1706582281146164e+00, 0.0000000000000000e+00, 0.1442808298792521e-03, 0.1499267314065209e-03, 0.1481114719972525e+00, 0.1784642171583157e-21, 0.9060208842449760e-06, 0.2604899516157433e-05, 0.9844570800522298e-01, 0.3285517707584525e-17, 0.4009882974094740e-08, 0.9813054658490708e-08, 0.8643465546688130e-01, 0.1441804758967627e-17, 0.1005867330167897e-05, 0.3548177460196556e-06, 0.2116850399857309e-01, 0.1161145423573808e-16, 0.4264476922944564e-04, 0.6523019800084465e-04, 0.1490679637756442e-02, 0.5282426871388410e-23, 0.4120097727637851e-03, 0.5256559484655862e-03, 0.9506251872618699e-05, 0.7858215819253763e-23, 0.4122429400478702e-03, 0.4195795458123320e-03}, + info: 0, + }, + { + z: []float64{0.8707866955342474e+00, 0.7817747598907121e-01, 0.3395074897480099e-01, 0.4299950297507704e+00, 0.8088090483755312e+00, 0.8059665333356031e+00, 0.8715468955879169e-01, 0.7726332912417854e+00, 0.8113440783694211e+00, 0.3884398998833286e+00, 0.2376236729669906e+00, 0.1163685212573567e+00, 0.9165435854442110e+00, 0.6267984817484763e+00, 0.9624876615834328e+00, 0.3034180669232360e+00, 0.2061796047621195e+00, 0.5768621491208505e+00, 0.9925917678590149e+00, 0.6443157924817072e+00, 0.5967958277754652e+00, 0.6529032825080359e+00, 0.2463148548215904e+00, 0.3434579503978075e+00, 0.9563439871443443e+00, 0.2121028540747388e+00, 0.7195547303990343e+00, 0.2508441805243417e+00, 0.1006260142041168e+00, 0.8893309100691102e+00, 0.5028754540045453e+00, 0.8271029410166070e+00, 0.1744058577319449e+00, 0.1497291367846938e+00, 0.9760674104574272e+00, 0.1217237467828105e+00, 0.5182991497218187e+00, 0.9638099804000833e-01, 0.3920754919448237e+00, 0.5405769677178972e+00, 0.2099631365577761e+00, 0.7305783624173950e+00, 0.5230361650507421e+00, 0.3249231142539155e+00, 0.6943021078034506e+00, 0.8039502137729849e+00, 0.5430939865698426e+00, 0.5802454813209417e+00, 0.4521353811622019e+00, 0.6022646695138674e+00, 0.5085578584349214e+00, 0.2084880202460930e+00, 0.7893764544854661e+00, 0.8499187425465698e+00, 0.6507732600828079e+00, 0.6030189256946239e+00, 0.2748834677816949e+00, 0.7991444375081345e+00, 0.3953548021356785e+00, 0.8063610109500122e+00, 0.9322494176165492e+00, 0.2295756744488692e+00, 0.9380170922026533e+00, 0.6349173046266506e+00, 0.8138238710526029e+00, 0.1723717901392456e+00, 0.7436778507578146e+00, 0.9748296910217159e+00, 0.9719004334667378e+00, 0.1097372829332979e+00, 0.6782829454939702e+00, 0.5097404868982898e+00, 0.9485025358778609e+00, 0.7313319469015522e-01, 0.6156780373408383e+00, 0.8764490831370680e+00, 0.2737186188000360e+00, 0.3500606337779347e+00, 0.7029831161979777e+00, 0.1747477098480691e+00, 0.8887436470613648e+00, 0.8441051781034927e+00, 0.3534934185139980e+00, 0.9278581973721739e+00}, + n: 21, + zOut: []float64{0.2399590582568773e+01, 0.2223029747781157e+01, 0.2028465681071774e+01, 0.1928601105940012e+01, 0.1734632538043284e+01, 0.1705587556623443e+01, 0.1335586809119933e+01, 0.1201248979220471e+01, 0.1103973785860777e+01, 0.9516762968928613e+00, 0.8501027578146555e+00, 0.7334218559333251e+00, 0.5694462065979470e+00, 0.4616704914677346e+00, 0.3869349008726894e+00, 0.2370229392379994e+00, 0.1895639186923105e+00, 0.6837204156138489e-01, 0.3336561258733767e-01, 0.4796873010443177e-02, 0.8309969413683648e-03, 0.1974829088984998e-20, 0.1178494024155901e-08, 0.8095343015842228e-09, 0.1335586809119933e+01, 0.2220446049250313e-15, 0.1020576889344504e-06, 0.1802026823995536e-06, 0.1201248979220471e+01, 0.0000000000000000e+00, 0.6243063866214613e-06, 0.3862955415798747e-06, 0.1103973785860777e+01, 0.6981113564263453e-22, 0.3544097463307990e-10, 0.1357364267827356e-10, 0.9516762968928613e+00, 0.9072531498765813e-25, 0.7979152396248553e-09, 0.3946058728874109e-09, 0.8501027578146555e+00, 0.3130538746543124e-14, 0.2014792167783968e+02, 0.2014792167783968e+02, 0.1020000000000000e+03, 0.3129251700680272e+01, 0.5882352941176471e+01, 0.5818593578868780e-05, 0.5694462065979470e+00, 0.1588643710672175e-16, 0.4031382989989102e-05, 0.4054337523014161e-05, 0.4616704914677346e+00, 0.4437342591868191e-30, 0.3922192530785082e-05, 0.4209867122852495e-05, 0.3869349008726894e+00, 0.1490631566505624e-25, 0.4185837521330395e-06, 0.8450171931154743e-06, 0.2370229392379994e+00, 0.9509832565716118e-20, 0.2486232683772935e-03, 0.1020105306066691e-03, 0.1895639186923105e+00, 0.4152563805083406e-26, 0.4122104994398350e-03, 0.2603466333914367e-03, 0.6837204156138489e-01, 0.2482703040911568e-24, 0.1451022916339763e-02, 0.8718585538326752e-03, 0.3336561258733767e-01, 0.7888609052210118e-30, 0.3124612722893384e-02, 0.2800832977531187e-02, 0.4796873010443177e-02, 0.5945360652724886e-23, 0.1330631700030230e-06, 0.1700369028500688e-07, 0.8309969413683648e-03, 0.0000000000000000e+00, 0.1665788949608015e-02, 0.1927727749653244e-02}, + info: 0, + }, + { + z: []float64{0.5557143146495949e+00, 0.5989968816903124e+00, 0.2940668070493605e+00, 0.6865591553636113e+00, 0.2538362881116337e+00, 0.8562930572057048e-01, 0.8935270452331215e+00, 0.2427258280604294e-01, 0.8872146530392093e+00, 0.8383596820641517e+00, 0.7298494135137694e+00, 0.5456106579379609e+00, 0.5284508278981448e+00, 0.4551015440756836e+00, 0.8310297436868961e+00, 0.2533050402685522e+00, 0.8204820449217906e+00, 0.3961557150240700e+00, 0.9768702237400030e+00, 0.2370292739142171e+00, 0.2559318188841475e-02, 0.9750517337670606e-01, 0.4332045279801727e+00, 0.9393777930670477e+00, 0.1401411199977110e-01, 0.6412771440674316e+00, 0.3992540155849937e+00, 0.6710909099058460e+00, 0.7286319488310415e+00, 0.2796269361967505e+00, 0.7441899058930037e+00, 0.6626826587793098e+00, 0.9214594500725222e+00, 0.3161807402236700e+00, 0.5522479249937589e+00, 0.8328895958825197e+00, 0.3188368475648113e+00, 0.7495883496609020e+00, 0.7525354981370723e+00, 0.4819658709067065e-01, 0.8655147680740739e+00, 0.9438778396406793e+00, 0.2488475915004000e+00, 0.6823535213934872e+00, 0.1937525404626215e+00, 0.5005903164733780e+00, 0.8863816362271992e-01, 0.8279647895785316e+00, 0.7576076924432378e+00, 0.7026499641621615e+00, 0.4430381204856144e+00, 0.7250369983032848e+00, 0.6848785243425647e+00, 0.4460397323983623e+00, 0.4708648248777212e+00, 0.2715064773162834e+00, 0.3027887490966231e+00, 0.7428389405348396e+00, 0.4450045451144832e+00, 0.4683793136386452e+00, 0.2207684946192057e+00, 0.2427030064074031e+00, 0.9554840783351275e+00, 0.4627007756736100e+00, 0.2498171419709001e+00, 0.6457883819068694e+00, 0.9852852516910280e+00, 0.1549706320399313e-01, 0.5863698153730145e+00, 0.8639664213849532e+00, 0.3535803218615043e+00, 0.5300900503312423e+00, 0.6390014357018353e+00, 0.7961857807391770e+00, 0.2691637990251612e+00, 0.5302467438659471e+00, 0.3050412123368091e+00, 0.6256487701731338e+00, 0.3334512397543001e+00, 0.4160884062357342e+00, 0.1255438899512573e+00, 0.1645317210625844e+00, 0.3238197294562333e+00, 0.8765325383521217e+00}, + n: 21, + zOut: []float64{0.2427342887003738e+01, 0.2350720031505609e+01, 0.2023223336640000e+01, 0.1998000565720247e+01, 0.1766392007527096e+01, 0.1571733737439641e+01, 0.1560415852981921e+01, 0.1417948678305651e+01, 0.1258973312710257e+01, 0.1056655934390461e+01, 0.8835393928887348e+00, 0.8723851974669989e+00, 0.7820423442972025e+00, 0.7419840433476598e+00, 0.4998280519834369e+00, 0.4045332331022956e+00, 0.9795545665796999e-01, 0.6816976350034422e-01, 0.5040003524610775e-01, 0.3066447317814986e-01, 0.1707038609221110e-04, 0.0000000000000000e+00, 0.7788285758834551e-10, 0.4967814553062384e-10, 0.1560415852981921e+01, 0.2584939414223288e-25, 0.2197123097785426e-09, 0.1273532664170934e-09, 0.1417948678305651e+01, 0.1666074231826777e-26, 0.1275101209751314e-13, 0.3690153580151478e-13, 0.1258973312710257e+01, 0.3205217501722064e-17, 0.9823174457391989e-10, 0.2521354047663225e-09, 0.1056655934390461e+01, 0.7258232531419596e-14, 0.3712131332588551e-06, 0.9470624651125662e-06, 0.8835393928887348e+00, 0.1013078509970455e-14, 0.2186292540627962e+02, 0.2186292540627962e+02, 0.1200000000000000e+03, 0.3639455782312925e+01, 0.8333333333333334e+01, 0.8044662327724623e-11, 0.7820423442972025e+00, 0.1609007833004439e-16, 0.1435272653873044e-05, 0.2709954877521105e-06, 0.7419840433476598e+00, 0.7040565915227063e-17, 0.3841536774665796e-05, 0.2871402006582635e-05, 0.4998280519834369e+00, 0.2100142105025380e-22, 0.2079096157763567e-04, 0.3245111531694086e-04, 0.4045332331022956e+00, 0.1391829401523057e-17, 0.6737785910120600e-04, 0.9764140649613147e-04, 0.9795545665796999e-01, 0.0000000000000000e+00, 0.6779864796214124e-03, 0.3130513090482015e-03, 0.6816976350034422e-01, 0.2326445472805242e-24, 0.3547436816484852e-02, 0.2240007954218211e-02, 0.5040003524610775e-01, 0.1487168151587342e-22, 0.3710761871988309e-06, 0.2590766399899962e-07, 0.3066447317814986e-01, 0.7395570986446986e-31, 0.2806228106658499e-03, 0.1300265719680322e-02, 0.1707038609221110e-04, 0.5107323294632100e-21, 0.5584126334680110e-02, 0.2863707539154273e-03}, + info: 0, + }, + { + z: []float64{0.8091224458140820e+00, 0.8008972278830493e+00, 0.2578176398184776e+00, 0.1256248089219657e+00, 0.6581359192692769e+00, 0.2304361454551056e+00, 0.7582551898966047e+00, 0.1757884371165862e+00, 0.3186400696162690e+00, 0.7479165768101737e+00, 0.3726824047207358e+00, 0.8797387692666324e+00, 0.5750380180879821e+00, 0.5660816893967801e+00, 0.4438713938433396e+00, 0.1758644078670230e+00, 0.5631776732547016e+00, 0.3584358301396408e+00, 0.9436101806256550e+00, 0.5626737326978751e+00, 0.6424649400467324e+00, 0.8481112336470399e+00, 0.5655978701315552e+00, 0.8705727446437589e+00, 0.2336253154027212e-01, 0.5296192722406249e+00, 0.2416414888807097e+00, 0.8901032371078870e+00, 0.6213866196646989e+00, 0.7130035148592161e+00, 0.7957781913464947e+00, 0.8476792055481847e+00, 0.9456821938668590e+00, 0.9119251640705257e+00, 0.6793496748825844e+00, 0.1959111178023272e+00, 0.1712010119398182e+00, 0.1826738976471362e+00, 0.2526159624017289e+00, 0.1636253698204285e+00, 0.5186411299293459e+00, 0.9760895321955887e-01, 0.6893646610477002e+00, 0.9919227436537437e-01, 0.3790249398464486e+00, 0.9060767708258410e+00, 0.1329141344440885e+00, 0.8748053076614649e+00, 0.3613991427617499e+00, 0.1317424493721138e+00, 0.1676759484196766e-01, 0.7195688942861501e+00, 0.7508197149131951e+00, 0.1084062966598669e+00, 0.7799888885196686e+00, 0.8731271868042269e+00, 0.7281667421703623e+00, 0.5506096404209293e+00, 0.2344365678709384e+00, 0.8253982006984435e+00, 0.6925029659724733e+00, 0.2137159289768908e+00, 0.6304949708624844e+00, 0.2263541700174215e+00, 0.3731966957484361e+00, 0.4056710443895672e-01, 0.1518575777839423e+00, 0.7000182640684851e+00, 0.5207195688041865e+00, 0.3546513077756396e+00, 0.5223091585165126e+00, 0.1312526670646489e+00, 0.6075518716591177e+00, 0.7815133160786110e+00, 0.1466092133856621e+00, 0.6908403097208041e+00, 0.5289383481177163e+00, 0.3060973141340825e+00, 0.2719751878023001e+00, 0.4367099092855503e+00, 0.1678669773704986e+00, 0.2384522838837465e+00, 0.7578135131488263e+00, 0.2260651503779529e+00}, + n: 21, + zOut: []float64{0.3001718609114466e+01, 0.2461580649918252e+01, 0.2139800724137603e+01, 0.2110198913058292e+01, 0.1773285881581521e+01, 0.1731854037718996e+01, 0.1434868444776101e+01, 0.1292334845681036e+01, 0.1202898865215905e+01, 0.9074690574580867e+00, 0.8145281637318031e+00, 0.6833997197084192e+00, 0.6303801796404412e+00, 0.5115989792032534e+00, 0.4365028215455523e+00, 0.3347059637856380e+00, 0.2704761845997483e+00, 0.1007567293499011e+00, 0.7404356572986662e-01, 0.2225224658576705e-01, 0.1003499792335912e-03, 0.1163915500165708e-19, 0.5702083577853931e-06, 0.6235005324467382e-06, 0.1434868444776101e+01, 0.7352183636659830e-27, 0.3221030091629643e-04, 0.6932009635225803e-04, 0.1292334845681036e+01, 0.1547654926086715e-20, 0.2859096049606110e-03, 0.1689403705904475e-03, 0.1202898865215905e+01, 0.5187952724823741e-19, 0.3457740509453089e-04, 0.1548723988845099e-04, 0.9074690574580867e+00, 0.1883346903736854e-18, 0.4924250006037621e-06, 0.3064183963004930e-06, 0.8145281637318031e+00, 0.7436271633171000e-16, 0.2193475493251988e+02, 0.2193475493251988e+02, 0.9400000000000000e+02, 0.2913832199546485e+01, 0.3191489361702128e+01, 0.6928384721316203e-06, 0.6303801796404412e+00, 0.4344707644406440e-17, 0.8243004381261832e-06, 0.1242944619712133e-05, 0.5115989792032534e+00, 0.1321342016245195e-28, 0.6830327722875946e-05, 0.4610227507804771e-05, 0.4365028215455523e+00, 0.7922538128148438e-16, 0.5185315150043044e-04, 0.9574812800552337e-04, 0.3347059637856380e+00, 0.4042912139257686e-29, 0.6190562688247171e-03, 0.1111720505727282e-02, 0.2704761845997483e+00, 0.1659171176970474e-17, 0.3895087988050344e-03, 0.1929449868539905e-03, 0.1007567293499011e+00, 0.2919870821923328e-23, 0.2088900906409071e-03, 0.1786849109209585e-03, 0.7404356572986662e-01, 0.6310887241768094e-28, 0.1266951898160773e-03, 0.1217799054154213e-03, 0.2225224658576705e-01, 0.3697785493223493e-31, 0.1828672832615040e-03, 0.2196698921083079e-03, 0.1003499792335912e-03, 0.0000000000000000e+00, 0.1256202468809242e-02, 0.9158129895566399e-03}, + info: 0, + }, + { + z: []float64{0.7473165994595915e+00, 0.9383144044637891e+00, 0.9705766820209645e+00, 0.3093136353727625e+00, 0.7158638842786125e+00, 0.8927477442087851e+00, 0.5846837014009232e-02, 0.2823227977987499e+00, 0.4656399961948744e+00, 0.1783480145929806e-01, 0.8517241739910267e+00, 0.2968236367983081e+00, 0.3828703962512207e+00, 0.1618551942929359e+00, 0.8422815023410849e+00, 0.9667213467147939e+00, 0.1872774458350456e+00, 0.9673785609387944e+00, 0.1668393270938346e+00, 0.1793363294699661e+00, 0.3882509817575230e+00, 0.9322497724034516e+00, 0.1678634979113953e+00, 0.4000730353927472e+00, 0.1370854040387474e+00, 0.1383728853360747e+00, 0.8728086099072359e+00, 0.7142605368034651e+00, 0.1298497843576266e+00, 0.4890695005398649e+00, 0.6561831309305255e+00, 0.2551502145039813e+00, 0.3137603328050955e+00, 0.4725573834392259e+00, 0.1864194559915778e+00, 0.2476903242862807e+00, 0.3016682498100809e-01, 0.6028908129301045e+00, 0.9699532639034771e+00, 0.6804787742392342e+00, 0.5085224418131328e+00, 0.8160312832212636e+00, 0.4537844508149779e+00, 0.7698002058576117e+00, 0.9102837491884663e-01, 0.7957963901796614e+00, 0.4129806172970929e+00, 0.1183929303892000e+00, 0.7231979382284242e+00, 0.6908050716155305e+00, 0.9129211251463741e+00, 0.8407226028232299e+00, 0.7511043357392063e+00, 0.6614037096770188e+00, 0.5816383867961591e+00, 0.9508597818692400e+00, 0.6671834981267941e+00, 0.5353731249555691e+00, 0.8270836995328532e+00, 0.4748649574057349e+00, 0.7310961648034807e+00, 0.5329698192925740e+00, 0.5537463638220994e+00, 0.8202244086458278e+00, 0.3089778794417031e-01, 0.8347421543151022e+00, 0.7341688304615497e+00, 0.6293051268804178e+00, 0.9685330131023850e-01, 0.2218306173097127e+00, 0.4005982063157664e+00, 0.8004209862846372e+00, 0.9322471342744776e-02, 0.5252230232113276e+00, 0.8078446044346348e+00, 0.8663675770089085e-02, 0.4174348230921995e+00, 0.5316704871571061e+00, 0.8080873214304936e+00, 0.3191328650837369e+00, 0.2265528787275422e+00, 0.1299687059183835e+00, 0.5606006275148095e+00, 0.2716326223111132e+00}, + n: 21, + zOut: []float64{0.2557856639061994e+01, 0.2221569823028635e+01, 0.2113175506056959e+01, 0.1791034203034858e+01, 0.1626866366713385e+01, 0.1553853827140545e+01, 0.1378038738404736e+01, 0.1313711478279983e+01, 0.1103021858691488e+01, 0.8924372074434684e+00, 0.7465727975101516e+00, 0.5658880123536838e+00, 0.5427616116084486e+00, 0.3934406603234193e+00, 0.3139076008306549e+00, 0.2922956394268174e+00, 0.1324157743308766e+00, 0.8492495339426936e-01, 0.1726019296564064e-01, 0.1478900502211735e-02, 0.3047316799617542e-04, 0.0000000000000000e+00, 0.2209423397088722e-18, 0.1230851536694111e-18, 0.1378038738404736e+01, 0.4653537180464338e-22, 0.4724643033910209e-17, 0.2943893377745906e-17, 0.1313711478279983e+01, 0.2193156221990361e-16, 0.1119006893822957e-16, 0.1741431705208298e-16, 0.1103021858691488e+01, 0.2831227495869906e-12, 0.2860829853751284e-15, 0.4129571258389818e-15, 0.8924372074434684e+00, 0.1711229892219076e-22, 0.2249832739645017e-14, 0.3254005212313171e-14, 0.7465727975101516e+00, 0.5231751935973153e-19, 0.1964254226427022e+02, 0.1964254226427022e+02, 0.1110000000000000e+03, 0.3285714285714286e+01, 0.8108108108108109e+01, 0.1373504512438542e-12, 0.5427616116084486e+00, 0.0000000000000000e+00, 0.2663664432452481e-11, 0.1451131289962852e-11, 0.3934406603234193e+00, 0.3021704178298789e-14, 0.8991877456137337e-10, 0.1576401145017008e-09, 0.3139076008306549e+00, 0.2274554202459952e-24, 0.7202855721215940e-08, 0.4214854283718170e-08, 0.2922956394268174e+00, 0.3975858962313899e-27, 0.6024353396932969e-07, 0.1022672043122323e-06, 0.1324157743308766e+00, 0.6969933701296916e-21, 0.1098572510799548e-05, 0.1404105143401733e-05, 0.8492495339426936e-01, 0.6511321020566649e-24, 0.5604469692434887e-06, 0.6104659421650381e-06, 0.1726019296564064e-01, 0.9860761315262648e-31, 0.7747760678878360e-06, 0.1026260244983655e-05, 0.1478900502211735e-02, 0.1334496117765360e-28, 0.5102973157105362e-05, 0.1258609455819239e-04, 0.3047316799617542e-04, 0.0000000000000000e+00, 0.4753862314998056e-03, 0.1863315015192667e-02}, + info: 0, + }, + { + z: []float64{0.2548672807470598e-01, 0.1495421137012933e+00, 0.6637277283391485e-01, 0.6233200993667132e-01, 0.4958061473131723e+00, 0.9804571594196556e+00, 0.5741929121502225e+00, 0.9319350517768583e+00, 0.5154498317218548e-01, 0.4165283024496188e+00, 0.9821854184367015e+00, 0.7510033276340442e+00, 0.2648151368970406e+00, 0.9698327797382789e-01, 0.1433813304773114e+00, 0.1687421369042604e+00, 0.6230509959313209e+00, 0.8106153730403777e+00, 0.5719487513699320e+00, 0.5761793262532514e+00, 0.4983158430615118e+00, 0.8860830618783583e+00, 0.7136722399358287e+00, 0.6310350013419201e+00, 0.5509601824732582e+00, 0.5806336147624858e-01, 0.1818648959869262e+00, 0.2346005241916651e+00, 0.5346851174706903e+00, 0.4444466121668155e+00, 0.8418378722202517e+00, 0.6164532479196363e+00, 0.4298553148227666e+00, 0.2296849794057675e+00, 0.3620982682376475e+00, 0.3645096805309075e+00, 0.9980228064491022e+00, 0.7106112428543574e+00, 0.6651697421094976e+00, 0.9830151455980368e+00, 0.3483046917749841e+00, 0.3668589729383628e+00, 0.5655614401040915e+00, 0.8891526728148491e+00, 0.9164649086142118e+00, 0.4065736846475075e+00, 0.1470436468080603e+00, 0.5802557267939668e+00, 0.8272597245263099e+00, 0.7163617706554632e+00, 0.3488790096540706e+00, 0.6404227109073246e+00, 0.1472796557920839e+00, 0.8764536863696604e-02, 0.5020645462548778e-01, 0.6906675094909457e+00, 0.4668434450047272e+00, 0.7886435616506033e+00, 0.8775817704321313e+00, 0.4246952359284411e+00, 0.1646609549881360e+00, 0.6697681588293083e+00, 0.7914692739599321e+00, 0.1396674072195736e-01, 0.5167232939537366e+00, 0.9374340964740459e+00, 0.4228531365925756e+00, 0.9727064517088465e+00, 0.5765501520393458e+00, 0.5648508784605104e+00, 0.3049637657479770e+00, 0.3063190445223624e+00, 0.5204528216889723e+00, 0.8180178920731451e+00, 0.4938773599630263e+00, 0.8975563911549045e-01, 0.5983688838646251e+00, 0.3698925131813697e+00, 0.2992530973981942e+00, 0.7463539101278609e+00, 0.8005980511688328e+00, 0.7944834752945218e-01, 0.3132925544163919e+00, 0.8866951016419623e+00}, + n: 21, + zOut: []float64{0.2548744028948828e+01, 0.2439584200134194e+01, 0.2276898263864132e+01, 0.2083856178014486e+01, 0.2039342663979975e+01, 0.1949418505713503e+01, 0.1425638870874207e+01, 0.1087729835971203e+01, 0.9667696375323971e+00, 0.8022433247488755e+00, 0.5846254874613492e+00, 0.5355025899480791e+00, 0.3940805795947064e+00, 0.2663780336330241e+00, 0.2413081251593111e+00, 0.1817740817404329e+00, 0.1073144197241267e+00, 0.4327520903789019e-01, 0.3141654146246065e-01, 0.2017433957164045e-01, 0.3181705377209195e-03, 0.2778526523712400e-12, 0.1839247883012458e-10, 0.2248848580297948e-10, 0.1425638870874207e+01, 0.4660214926831417e-16, 0.5684400877302917e-10, 0.6387162891082867e-10, 0.1087729835971203e+01, 0.6729715978266918e-13, 0.1741585337702246e-09, 0.2256709509706825e-09, 0.9667696375323971e+00, 0.8583633357857125e-17, 0.5122248377584688e-09, 0.7095864943104643e-09, 0.8022433247488755e+00, 0.1479114197289397e-30, 0.3545102092401686e-08, 0.2547082836533246e-08, 0.5846254874613492e+00, 0.1684748530176799e-17, 0.2002639308765254e+02, 0.2002639308765254e+02, 0.9900000000000000e+02, 0.2829931972789116e+01, 0.2020202020202020e+01, 0.1450844904575395e-08, 0.3940805795947064e+00, 0.8431345355002174e-26, 0.2834737824482197e-07, 0.1323731756234205e-07, 0.2663780336330241e+00, 0.1205202152489745e-19, 0.2553256673132341e-06, 0.1983391491151363e-06, 0.2413081251593111e+00, 0.5052150994009667e-21, 0.4174664103103917e-06, 0.5361294231508838e-06, 0.1817740817404329e+00, 0.4777592300362271e-19, 0.1103166278762495e-05, 0.1411552925870387e-05, 0.1073144197241267e+00, 0.1369462531463676e-25, 0.4868310689385493e-05, 0.3749559753873054e-05, 0.4327520903789019e-01, 0.6517810995642209e-18, 0.3308901711413922e-04, 0.5021439046381042e-04, 0.3141654146246065e-01, 0.3980258690751375e-19, 0.2091355707842741e-03, 0.1263714817696205e-03, 0.2017433957164045e-01, 0.9608259223012324e-19, 0.2907989762339014e-03, 0.3270695437261140e-04, 0.3181705377209195e-03, 0.2078018254813854e-20, 0.5046681645946245e-02, 0.5971971384809690e-02}, + info: 0, + }, + { + z: []float64{0.7556699778432414e+00, 0.9264145479756024e+00, 0.2732571748073197e+00, 0.6411921070422063e+00, 0.9885815056247259e+00, 0.3723918099656685e+00, 0.8347397513753253e+00, 0.1449609854320509e+00, 0.7237973270068254e+00, 0.7937879503069174e+00, 0.1580314367294657e+00, 0.6183123759968190e-01, 0.8526562423479169e+00, 0.4515796037285547e+00, 0.4273610709781387e+00, 0.9916977661043136e-01, 0.6106106148524300e+00, 0.8331371477897991e+00, 0.3226596180395027e+00, 0.9411873968460633e+00, 0.4713107215814810e+00, 0.2408559451890391e-01, 0.8159283865403915e+00, 0.2484819964854622e+00, 0.9053457264816500e-01, 0.7765873412573800e+00, 0.6903393919382517e-01, 0.4737861094943234e+00, 0.2953181034757211e+00, 0.1455852388723616e+00, 0.3595484224812872e+00, 0.7546693773577520e+00, 0.2176499262400425e+00, 0.2180463762362660e+00, 0.4619066216292487e+00, 0.9195384505328125e+00, 0.7817199663072332e+00, 0.7252058550809327e+00, 0.7231496659924924e+00, 0.4997135548263443e+00, 0.7857040551775740e-01, 0.9096915224111880e+00, 0.5048166916632318e+00, 0.9737715836828489e+00, 0.6597329703373522e+00, 0.1262659377119990e-01, 0.3525536903406803e+00, 0.6167857219758321e+00, 0.7408468432196044e+00, 0.7867196986400673e+00, 0.2313289425583596e+00, 0.8524506859175643e+00, 0.1392061690435467e+00, 0.4799927308227143e+00, 0.5988201473645773e+00, 0.8148615971957359e+00, 0.9991849415795491e+00, 0.4062458282025133e+00, 0.8021012361110058e-01, 0.7041715844729369e+00, 0.9319571339573152e-01, 0.9303801779957639e+00, 0.6825635414858495e+00, 0.2651022334979941e+00, 0.2966022780498856e-01, 0.2756350604456157e+00, 0.3996117016882065e+00, 0.4084803031952775e+00, 0.7693093419592285e+00, 0.5425247266459055e+00, 0.7317803042615013e+00, 0.9398853135227920e+00, 0.5923013015298344e+00, 0.6749323490534131e-01, 0.2677632908794654e+00, 0.1468441510303787e+00, 0.1088650133568649e+00, 0.2179701521651023e+00, 0.8673093102161434e+00, 0.5667697713082290e+00, 0.5061732946886482e+00, 0.4707781162637231e+00, 0.4572852405093119e+00, 0.2817384205908577e+00}, + n: 21, + zOut: []float64{0.2549866140639053e+01, 0.2289692363298807e+01, 0.2042824675422646e+01, 0.1807091836494319e+01, 0.1608583829794490e+01, 0.1518595775727545e+01, 0.1431682400757094e+01, 0.1278141352537376e+01, 0.1218077200015686e+01, 0.1115746594519385e+01, 0.9489661418934578e+00, 0.7950491688285616e+00, 0.6705557578092293e+00, 0.4223587911677574e+00, 0.3211641543143434e+00, 0.1395786344205778e+00, 0.9463219952875908e-01, 0.4563841563552573e-01, 0.3718976888616738e-01, 0.2766660139323238e-01, 0.2861060880329385e-03, 0.1489369389057270e-25, 0.4009456273839574e-11, 0.5913965075566282e-11, 0.1431682400757094e+01, 0.9808641529204798e-18, 0.1658075466067222e-10, 0.2375182467377229e-10, 0.1278141352537376e+01, 0.1561716996498461e-20, 0.5563872147228742e-10, 0.4003249215027731e-10, 0.1218077200015686e+01, 0.8198555146385643e-18, 0.1047304739332037e-09, 0.1509388346471184e-09, 0.1115746594519385e+01, 0.6804594514014344e-21, 0.1132515351796206e-08, 0.7305070647547987e-09, 0.9489661418934578e+00, 0.6324297261214257e-19, 0.2036338790917205e+02, 0.2036338790917205e+02, 0.1050000000000000e+03, 0.3147392290249433e+01, 0.2857142857142857e+01, 0.1188392639382130e-06, 0.6705557578092293e+00, 0.2287929633195320e-19, 0.1283168907741916e-05, 0.1048613283787068e-05, 0.4223587911677574e+00, 0.1189072130544977e-23, 0.1798535669474704e-05, 0.2477367923932174e-05, 0.3211641543143434e+00, 0.8187795594567816e-23, 0.5813187502570164e-05, 0.4274698539216166e-05, 0.1395786344205778e+00, 0.4387796946624011e-14, 0.1512969274311187e-04, 0.4383088766938462e-04, 0.9463219952875908e-01, 0.2802074325023381e-22, 0.1713816584845726e-03, 0.1715892923902221e-03, 0.4563841563552573e-01, 0.8331483361628599e-18, 0.1806157324013782e-06, 0.1193242630586832e-05, 0.3718976888616738e-01, 0.1396316416906445e-20, 0.3480495660704905e-03, 0.4402048010276426e-03, 0.2766660139323238e-01, 0.1504073756330590e-24, 0.6353091961964474e-03, 0.1317363577877200e-03, 0.2861060880329385e-03, 0.4589883047388968e-23, 0.3883745848947868e-02, 0.5358907697572753e-02}, + info: 0, + }, + { + z: []float64{0.8456683250225028e-01, 0.3058015837199188e+00, 0.4317902183064470e-01, 0.3671979469896071e+00, 0.3595487854233631e+00, 0.8791524368561765e+00, 0.1256853689373587e+00, 0.3494000337993621e+00, 0.2131672180923028e+00, 0.4488324680055523e+00, 0.9452502344450827e+00, 0.7197446096567599e+00, 0.8918071015747069e+00, 0.5551949519753436e+00, 0.5627883747367335e+00, 0.6930858113379974e+00, 0.7026202611250518e+00, 0.3856840921343180e+00, 0.4453224782611998e+00, 0.9627551339340559e+00, 0.6255469039377685e+00, 0.6093210861157965e+00, 0.1297243619022892e-01, 0.2745425059216151e+00, 0.5902573930769862e+00, 0.3658719785914519e+00, 0.8179101736936932e+00, 0.4251816548465440e+00, 0.3319568042078717e+00, 0.8294675656951641e+00, 0.1906193594777114e+00, 0.6862699206380130e+00, 0.6832362040809048e+00, 0.8968537122850908e+00, 0.3753636108676777e+00, 0.9926770667520373e+00, 0.7473790867948205e-01, 0.3653635034012445e+00, 0.6553100464759598e-01, 0.1045856631958887e+00, 0.2265961870023414e+00, 0.3205967446655300e-01, 0.1845082884370131e+00, 0.2250092498055734e+00, 0.5876866067653603e+00, 0.2146512915654570e+00, 0.3937206506545190e+00, 0.4110924225795073e+00, 0.3228836105384403e+00, 0.3531543308069818e-01, 0.9472843614842231e+00, 0.2348052564069324e+00, 0.1315455878409307e+00, 0.2246729768637089e+00, 0.8445861542610614e+00, 0.9528962673528295e+00, 0.4907499555456967e+00, 0.9420172239501383e+00, 0.4896535244350841e-01, 0.5088622551995382e+00, 0.1789066474661110e+00, 0.3566730680524121e+00, 0.4279936896698711e-01, 0.5241586506933626e-02, 0.9191361995860731e+00, 0.7666474814508011e+00, 0.8302596584023555e+00, 0.4738482866502817e+00, 0.2085735695866969e+00, 0.3976808123484415e+00, 0.4364253757027529e+00, 0.2407360056333678e+00, 0.4002772343201099e+00, 0.7717338386755229e+00, 0.5023514841326452e+00, 0.8988902694804358e+00, 0.4259852339101408e+00, 0.2067093317010162e+00, 0.8456128611542781e+00, 0.1290027044731792e+00, 0.2598901078235174e+00, 0.5836006873243835e+00, 0.9777637054617091e+00, 0.4896357840832322e+00}, + n: 21, + zOut: []float64{0.2625813250937807e+01, 0.2316814186652984e+01, 0.2239421861777163e+01, 0.1853308891402210e+01, 0.1827033391537137e+01, 0.1481398580531558e+01, 0.1385699025778513e+01, 0.1101045596651604e+01, 0.1077490875294086e+01, 0.6790358065311344e+00, 0.6361857378195889e+00, 0.5095707085100130e+00, 0.4739579905788711e+00, 0.4209497658895578e+00, 0.3012142858983374e+00, 0.2432742659092710e+00, 0.2088674538811938e+00, 0.1664409530858738e+00, 0.2606656040091844e-01, 0.1205810631995507e-01, 0.9325512290661112e-07, 0.1292469707114106e-24, 0.1719039818430849e-04, 0.1845080438772963e-04, 0.1385699025778513e+01, 0.2224123229986848e-19, 0.6819642419856913e-05, 0.9079053758699490e-05, 0.1101045596651604e+01, 0.6054420446023063e-14, 0.5392861994521241e-06, 0.8422816920290823e-06, 0.1077490875294086e+01, 0.7330726620037819e-25, 0.4290042344637304e-06, 0.3471234449409943e-06, 0.6790358065311344e+00, 0.1969273319091676e-18, 0.8635424232755040e-07, 0.8294124288205587e-07, 0.6361857378195889e+00, 0.4466775307674635e-22, 0.1958564738864290e+02, 0.1958564738864290e+02, 0.9700000000000000e+02, 0.2927437641723356e+01, 0.2061855670103093e+01, 0.4284285350948107e-09, 0.4739579905788711e+00, 0.4844247598072386e-17, 0.1501956526577450e-07, 0.4599310764980267e-07, 0.4209497658895578e+00, 0.5697556379758414e-13, 0.1468884761079027e-05, 0.1935721204206928e-05, 0.3012142858983374e+00, 0.3414189997796539e-26, 0.7129394687116921e-05, 0.9262068822983554e-05, 0.2432742659092710e+00, 0.2465190328815662e-31, 0.3320148255009530e-04, 0.2563439348523979e-04, 0.2088674538811938e+00, 0.8874685183736383e-29, 0.9377671959730327e-04, 0.7166613903737452e-04, 0.1664409530858738e+00, 0.1175218723307338e-19, 0.5883141447281398e-03, 0.4140832085136277e-03, 0.2606656040091844e-01, 0.2634795423438179e-27, 0.4797918408258123e-02, 0.4833036765864798e-02, 0.1205810631995507e-01, 0.6530371216255944e-19, 0.7811358433513861e-02, 0.1392611548870054e-01, 0.9325512290661112e-07, 0.1190420191421466e-12, 0.6274023555637595e-01, 0.1241063086176471e-01}, + info: 0, + }, + { + z: []float64{0.6345014789904082e+00, 0.9828013844807603e+00, 0.5292373213190880e+00, 0.3345627500234365e+00, 0.9345959669050260e+00, 0.5873733336147081e+00, 0.8888462128293518e+00, 0.1044020545901757e+00, 0.9375280272458696e+00, 0.1233411973951652e+00, 0.3330283913003940e+00, 0.2432208394685875e-01, 0.6106490172620382e+00, 0.9136489477065954e+00, 0.8250850334045809e+00, 0.7866479843500775e+00, 0.1629924039124250e+00, 0.2349971551206693e+00, 0.1024684774133804e+00, 0.7852727749024598e+00, 0.1427984573527686e+00, 0.4315181982028241e+00, 0.6564047419338838e+00, 0.5555590030741936e+00, 0.8205569452257172e+00, 0.4368031922318115e+00, 0.2897575605849890e+00, 0.6163386054890784e+00, 0.7016794584968877e+00, 0.7265422079630662e+00, 0.6393115149906525e+00, 0.7552597718778955e+00, 0.5029129969011173e+00, 0.4874732646810529e+00, 0.5094624150009296e+00, 0.4766369914172104e+00, 0.9003818070855094e+00, 0.6813014247956244e+00, 0.2988296447575162e+00, 0.6597790416796705e+00, 0.5646192150605487e+00, 0.3275024686793908e+00, 0.2725061169408716e-01, 0.8098076662320616e+00, 0.3345276190659139e+00, 0.1812780354242056e+00, 0.1997802571871318e+00, 0.5440765633504190e+00, 0.9774510594766147e+00, 0.1863483792916641e+00, 0.5910097330685633e+00, 0.2734489448477760e+00, 0.4395818444416557e+00, 0.9022786559097939e+00, 0.1029995124123335e+00, 0.9437933734967648e+00, 0.1226746192846208e+00, 0.1122541771291640e+00, 0.1756516455268763e+00, 0.9109529416432673e+00, 0.3050345572159197e-01, 0.1746678233544180e+00, 0.3750993781268622e+00, 0.4476224977918100e+00, 0.5437446694835568e+00, 0.3729575052852190e+00, 0.3542378566336749e+00, 0.7449948633772266e+00, 0.3157555204945224e+00, 0.7753984984296890e+00, 0.3284038993889418e+00, 0.5159361949232710e-01, 0.4529339074400007e-01, 0.5693581512511349e+00, 0.2213156237057901e+00, 0.9395831428155459e+00, 0.3843047271617620e+00, 0.2968653921042081e+00, 0.6156090467550724e+00, 0.3170556612853570e-01, 0.8265655132088339e+00, 0.3165750369023552e+00, 0.7055602587055542e+00, 0.8365600455841105e+00}, + n: 21, + zOut: []float64{0.2473859321883559e+01, 0.2422834159796051e+01, 0.2342770718537345e+01, 0.2190393245355733e+01, 0.2048748874742353e+01, 0.1880327408972340e+01, 0.1569014269461521e+01, 0.1302496773444914e+01, 0.1193514796722045e+01, 0.1150651682141956e+01, 0.8969794111087930e+00, 0.8226581338194529e+00, 0.6917042463755413e+00, 0.5972139808466055e+00, 0.3480375932216391e+00, 0.2932789944580818e+00, 0.2090971942801221e+00, 0.1221616224423389e+00, 0.1000478595340333e+00, 0.3412207557320287e-01, 0.3160927987836547e-03, 0.2386777554836693e-25, 0.1787847554062023e-08, 0.1930667016131883e-08, 0.1569014269461521e+01, 0.5039432348921582e-18, 0.3047636948536222e-07, 0.1223330280849913e-07, 0.1302496773444914e+01, 0.0000000000000000e+00, 0.7408697490815375e-06, 0.1498788057488595e-05, 0.1193514796722045e+01, 0.6273021918317486e-26, 0.8867922405978688e-08, 0.3710956866722375e-08, 0.1150651682141956e+01, 0.1462883977034466e-17, 0.2681277488017278e-06, 0.6188429230273071e-06, 0.8969794111087930e+00, 0.4930380657631324e-31, 0.2269022845551642e+02, 0.2269022845551641e+02, 0.1110000000000000e+03, 0.3215419501133787e+01, 0.4504504504504505e+01, 0.4606971433208740e-07, 0.6917042463755413e+00, 0.7123549154360197e-18, 0.1197511514786163e-07, 0.1386949181837661e-07, 0.5972139808466055e+00, 0.4652335183636722e-20, 0.1082703076660205e-07, 0.1003906263016573e-07, 0.3480375932216391e+00, 0.3573127201018094e-16, 0.2388210318135170e-06, 0.5694782222336181e-06, 0.2932789944580818e+00, 0.7601337464964823e-24, 0.7791755144624492e-05, 0.3242296896441773e-05, 0.2090971942801221e+00, 0.1663630656286764e-22, 0.9916420504550920e-06, 0.3916254919710180e-05, 0.1221616224423389e+00, 0.1406614427791829e-18, 0.1356917845842480e-04, 0.1379449187478977e-04, 0.1000478595340333e+00, 0.2800456213534592e-28, 0.3213444959426999e-04, 0.2790230345312924e-04, 0.3412207557320287e-01, 0.1075846717720530e-17, 0.8531299184272352e-03, 0.1419520770966347e-02, 0.3160927987836547e-03, 0.1848892746611746e-31, 0.7376316906583622e-02, 0.6546496744187779e-02}, + info: 0, + }, + { + z: []float64{0.7781991870339823e+00, 0.3799726947828087e+00, 0.8225406656461727e+00, 0.4322410258771066e+00, 0.4965834581857734e+00, 0.1642548945285990e+00, 0.4773703559733889e+00, 0.7580746486223987e+00, 0.9220155673403277e+00, 0.1624062232083541e+00, 0.5664470130564820e+00, 0.7367013127895712e+00, 0.4720756942873998e+00, 0.8695958825780965e+00, 0.6045812450089678e+00, 0.8503421320137978e+00, 0.7291568033584502e+00, 0.9563602214514951e+00, 0.1245752972610169e+00, 0.2621011209284912e+00, 0.8642265339858576e+00, 0.9528021036615359e+00, 0.9515862784178062e+00, 0.2632504807072628e+00, 0.8585529108378003e+00, 0.3025834513231899e+00, 0.7125877353473579e+00, 0.9221275053156179e+00, 0.9787992369955746e+00, 0.8789465750552752e+00, 0.9318675804398889e+00, 0.8827440896392771e+00, 0.3124337393280541e+00, 0.3124656972853590e-01, 0.5860766227864144e+00, 0.9812515177915087e+00, 0.4480000254042209e+00, 0.5121903109069345e+00, 0.1837298514334742e+00, 0.2680410429176278e+00, 0.5529294510898309e+00, 0.3627557909974974e+00, 0.9282219359125773e+00, 0.2173723036967439e+00, 0.7593665130145739e+00, 0.6245533364325671e+00, 0.1767680905493787e+00, 0.3823356866170564e+00, 0.5171578058722374e+00, 0.5376155024979712e+00, 0.5280269975706942e-01, 0.7947058949878518e+00, 0.1214716509814368e+00, 0.6840110600217510e+00, 0.7041243787921371e+00, 0.6945194787199669e+00, 0.8184844284849127e+00, 0.3440855810599863e+00, 0.1619958520504678e+00, 0.6990470088709023e+00, 0.3532225659848430e+00, 0.8229090368317697e+00, 0.8349696782028652e+00, 0.6932180266797183e-01, 0.6880274120498576e+00, 0.7566065163897195e+00, 0.2981422921509080e-01, 0.8636153925759227e-02, 0.7928093803058089e-01, 0.3096144259285381e+00, 0.6829886066202427e+00, 0.8246144682759358e+00, 0.2007259621364732e+00, 0.8710862365466575e+00, 0.6898755422119236e+00, 0.9105030198433044e+00, 0.8974818523094739e+00, 0.8909991694059211e+00, 0.8084341913662618e-01, 0.6072211485624930e+00, 0.9680950749726419e+00, 0.3932275197607872e+00, 0.8781208296025552e+00, 0.5527043030768199e+00}, + n: 21, + zOut: []float64{0.3158195987040060e+01, 0.2674397094280163e+01, 0.2611167345511754e+01, 0.2117792657212280e+01, 0.2026395303611904e+01, 0.1943316400161173e+01, 0.1877195462180231e+01, 0.1801757023901512e+01, 0.1273111869927408e+01, 0.1088705127135854e+01, 0.9559661931705944e+00, 0.9013607966201198e+00, 0.6445742634037150e+00, 0.5446175180211763e+00, 0.4486902226406620e+00, 0.3854842198344371e+00, 0.2741584921777233e+00, 0.1210105287753585e+00, 0.6940868812740908e-01, 0.2059029850850508e-01, 0.3673564803695461e-02, 0.8077935669463161e-25, 0.2195808487174199e-10, 0.8449955132778232e-10, 0.1877195462180231e+01, 0.1633831636278255e-19, 0.3497961877980620e-08, 0.7650955873935623e-08, 0.1801757023901512e+01, 0.4072241987713803e-24, 0.2126842707416970e-06, 0.1130018646589442e-06, 0.1273111869927408e+01, 0.1740941751301712e-20, 0.3388039768888790e-03, 0.2863573324805332e-03, 0.1088705127135854e+01, 0.2858227958137079e-16, 0.2905468688553340e-03, 0.2787380120464217e-03, 0.9559661931705944e+00, 0.1315475667900737e-21, 0.2494156905704573e+02, 0.2494156905704573e+02, 0.1010000000000000e+03, 0.3074829931972789e+01, 0.1980198019801980e+01, 0.2647338938369395e-05, 0.6445742634037150e+00, 0.2341846562030201e-22, 0.2667503836084351e-03, 0.1510510164587073e-03, 0.5446175180211763e+00, 0.2242636890234710e-24, 0.5495840118850513e-05, 0.4810476319752364e-05, 0.4486902226406620e+00, 0.3552400222016096e-16, 0.2740332708518062e-05, 0.2562621073339741e-05, 0.3854842198344371e+00, 0.1036641484462207e-22, 0.2248949759797675e-05, 0.2492010005917310e-05, 0.2741584921777233e+00, 0.6071156729318302e-20, 0.4130294607665845e-03, 0.1141603094124651e-03, 0.1210105287753585e+00, 0.2468629837810345e-16, 0.7230132634032991e-02, 0.6618694284253705e-02, 0.6940868812740908e-01, 0.5487745597049798e-23, 0.4151017130761703e-02, 0.4525758324635968e-02, 0.2059029850850508e-01, 0.6083695301064443e-26, 0.3095027772299370e-02, 0.3218440937246976e-02, 0.3673564803695461e-02, 0.6106717417722413e-22, 0.6443405665983428e-02, 0.1622720765285148e-02}, + info: 0, + }, + { + z: []float64{0.1985414918961074e+00, 0.4386372064833388e-01, 0.9272679997999695e+00, 0.9276564660006267e+00, 0.9509373652834754e+00, 0.6247274629358034e+00, 0.4838472987754385e+00, 0.6222976111124274e+00, 0.4248800474043221e+00, 0.9755567793536163e+00, 0.8912410754920851e+00, 0.2313492132656925e-01, 0.1563015923526294e+00, 0.7905214363406186e+00, 0.4927898149840948e+00, 0.5370919594301193e+00, 0.4644799081842790e+00, 0.9960946978984735e+00, 0.2493095982292803e+00, 0.3814725574373231e+00, 0.2940816225707878e+00, 0.6535898793246008e+00, 0.2084037192546966e+00, 0.5132959253232522e+00, 0.4251209694264122e+00, 0.9378579353373483e+00, 0.9786807729708236e+00, 0.7781441628535176e+00, 0.3951333405907657e+00, 0.3920742203259495e+00, 0.1500883321285087e+00, 0.9121803806188729e+00, 0.8516361075320147e+00, 0.6824417668614835e-01, 0.1390781730568696e+00, 0.3112124735573820e+00, 0.2535919561468232e+00, 0.7644352497405866e+00, 0.9218252733114815e+00, 0.9901458352558505e+00, 0.3279511469507669e+00, 0.8365860218696105e+00, 0.3198677553816687e+00, 0.6133665370063144e+00, 0.2580491557527305e+00, 0.6586714927743139e+00, 0.3933698458458805e+00, 0.7194383869235133e-01, 0.2213903901048253e-02, 0.1496106697134404e+00, 0.6766438239122079e+00, 0.7197724372602395e+00, 0.4797148651858846e+00, 0.5998154083597278e+00, 0.5590962662612089e+00, 0.6336887670809047e+00, 0.1072453740736307e+00, 0.7505878626477551e+00, 0.1427362910235584e+00, 0.6016163404777434e+00, 0.9488238354107547e+00, 0.4206748665323531e+00, 0.4473756257202280e+00, 0.2658295729210566e+00, 0.2340075619597504e+00, 0.9617320234413022e+00, 0.5962761690405762e+00, 0.8519099982463364e+00, 0.5571747680961063e+00, 0.1982921114470393e-01, 0.2779304562486129e-01, 0.7924439389736488e+00, 0.9672136382383878e-01, 0.8417095886148459e+00, 0.7473598542298929e+00, 0.9746159458640847e+00, 0.5038819873190444e+00, 0.9169242656654556e+00, 0.5252626769192049e+00, 0.5100979601115528e+00, 0.2790149607164115e+00, 0.4554892778467722e+00, 0.9762420639370454e+00, 0.9832519154713986e+00}, + n: 21, + zOut: []float64{0.2773247209998878e+01, 0.2684570079202721e+01, 0.2535407140485478e+01, 0.2251849067745984e+01, 0.2036533658934636e+01, 0.1904369707782458e+01, 0.1446171755624304e+01, 0.1299751831669502e+01, 0.1269533063987320e+01, 0.1007664938847636e+01, 0.8251797533661941e+00, 0.5818474246453018e+00, 0.5486201168977923e+00, 0.3772420051451134e+00, 0.3380340897724159e+00, 0.2023767003460897e+00, 0.1840234711841343e+00, 0.9225167203340243e-01, 0.4232592978815707e-01, 0.2771262583534443e-01, 0.7321455618534900e-04, 0.2977163580658995e-22, 0.3752078887401622e-10, 0.4179376226086309e-10, 0.1446171755624304e+01, 0.2236367039610280e-12, 0.1299965948722022e-10, 0.1595321259646779e-10, 0.1299751831669502e+01, 0.9926167350636332e-23, 0.8604643146515003e-10, 0.1864418628665438e-09, 0.1269533063987320e+01, 0.1306353659045996e-24, 0.1991121186029356e-08, 0.8983175437487846e-09, 0.1007664938847636e+01, 0.1213184768518325e-22, 0.3444432736029153e-07, 0.6437805745712839e-07, 0.8251797533661941e+00, 0.7772022937681214e-16, 0.2242878545784906e+02, 0.2242878545784905e+02, 0.1000000000000000e+03, 0.3024943310657596e+01, 0.3000000000000000e+01, 0.1709907812443784e-06, 0.5486201168977923e+00, 0.2972680326362443e-23, 0.1350081364219064e-06, 0.1255933647347444e-06, 0.3772420051451134e+00, 0.8046381233254320e-28, 0.7022416754922569e-08, 0.2768431074740138e-07, 0.3380340897724159e+00, 0.1719716773381806e-27, 0.7079953450794871e-07, 0.6648251035830866e-07, 0.2023767003460897e+00, 0.1940724044588524e-24, 0.4764709677102475e-07, 0.4597049352360845e-07, 0.1840234711841343e+00, 0.3459682911934042e-21, 0.4438757837191216e-07, 0.4402069254647340e-07, 0.9225167203340243e-01, 0.2596900924099520e-17, 0.8125925620122143e-07, 0.6770863964779914e-07, 0.4232592978815707e-01, 0.1280067631025782e-15, 0.3584234088053806e-06, 0.4822962810861105e-06, 0.2771262583534443e-01, 0.1012934875847350e-18, 0.1556396151524861e-05, 0.2916279508701459e-05, 0.7321455618534900e-04, 0.1540743955509789e-32, 0.4190434742116713e-02, 0.1362677535010021e-02}, + info: 0, + }, + { + z: []float64{0.6703344952482478e+00, 0.3577116069291627e+00, 0.8432263018920840e+00, 0.9584130033491455e+00, 0.2861685401430958e+00, 0.9668570623620085e+00, 0.6957796127156375e+00, 0.7603022900919236e+00, 0.7677099838587329e+00, 0.2702658617954818e+00, 0.9607212720070546e-01, 0.3973573502375266e+00, 0.8455511814548733e+00, 0.6862903202354254e-01, 0.1789860417278408e+00, 0.1112926642215833e+00, 0.8320621955974556e+00, 0.2637090148175287e+00, 0.7109403389502641e+00, 0.6494529820495407e+00, 0.8828985636942858e+00, 0.9098303542318168e+00, 0.1777489928582532e+00, 0.8977650882542143e-01, 0.7154356412046237e-01, 0.2693107133832517e-01, 0.7956794415059365e-02, 0.8852594037406200e+00, 0.2162284138438216e+00, 0.9216829080200467e+00, 0.9874282279096652e+00, 0.6087745498247736e+00, 0.7694028142872190e+00, 0.5918349824988693e+00, 0.1915561020798640e+00, 0.1135335598824661e-01, 0.6670902390154858e+00, 0.6601113994758983e+00, 0.7779237148990382e+00, 0.9023999203058302e+00, 0.1848842750705929e+00, 0.6120355291150891e+00, 0.7209828136916797e+00, 0.3124354740483853e+00, 0.9520753904353354e+00, 0.9694533109968584e+00, 0.6869212426966154e+00, 0.3510392134251344e+00, 0.7696249856763533e+00, 0.9203915575905880e+00, 0.4616165896255233e+00, 0.6796681592119477e+00, 0.9261196886552191e+00, 0.4511929507295228e+00, 0.5162093776469306e+00, 0.3093101029632945e+00, 0.1162653262260159e+00, 0.1951366360676013e+00, 0.7945099687545387e+00, 0.9707806531485516e+00, 0.4161172559602336e+00, 0.5813461713597171e+00, 0.8442974147064882e+00, 0.9135490286183924e+00, 0.3261094875410662e+00, 0.7080588637364270e+00, 0.3833080381406134e+00, 0.9095051028453779e+00, 0.6909172438403831e+00, 0.5187928284332267e+00, 0.5783778059525303e+00, 0.9615928062069277e+00, 0.2127624116065856e+00, 0.7254035177440832e+00, 0.2587362009618467e+00, 0.7570212065831576e+00, 0.2401238007004233e+00, 0.2744934873404078e+00, 0.9420426561890750e+00, 0.7098059460475599e+00, 0.2691363643891775e+00, 0.4536316870833661e+00, 0.6839445343274818e+00, 0.9336823543434762e+00}, + n: 21, + zOut: []float64{0.2634467575625416e+01, 0.2594635276829839e+01, 0.2463070407655619e+01, 0.2293127065694029e+01, 0.2072673505384417e+01, 0.1459936798288049e+01, 0.1378497856793854e+01, 0.1333239471494390e+01, 0.1102542278566289e+01, 0.8930469384549427e+00, 0.8520787727779708e+00, 0.6953298055150813e+00, 0.6847648677749263e+00, 0.2185116940651705e+00, 0.2008337976176149e+00, 0.1757551361146239e+00, 0.1271484717317025e+00, 0.5049064582277562e-01, 0.3515271339576785e-01, 0.7966224551514293e-02, 0.1685389559757755e-03, 0.1635721388548769e-22, 0.3551032227485648e-11, 0.3089325791958004e-11, 0.1378497856793854e+01, 0.6463020148652660e-17, 0.4439365591922079e-11, 0.5247211770454983e-11, 0.1333239471494390e+01, 0.8616343098037817e-25, 0.4343651596456738e-10, 0.2452653826174233e-10, 0.1102542278566289e+01, 0.9876517884224792e-16, 0.1549700306365952e-08, 0.1012434062383302e-08, 0.8930469384549427e+00, 0.4606319384654338e-19, 0.1115188986357716e-08, 0.1237013304894199e-08, 0.8520787727779708e+00, 0.1447566071967798e-23, 0.2127343784310997e+02, 0.2127343784310997e+02, 0.1270000000000000e+03, 0.3675736961451247e+01, 0.9448818897637794e+01, 0.2782364756724027e-11, 0.6847648677749263e+00, 0.6100457017578579e-23, 0.1535488071926541e-09, 0.5733950833190260e-09, 0.2185116940651705e+00, 0.6436934318898579e-12, 0.5207333190112868e-06, 0.4302907440331721e-06, 0.2008337976176149e+00, 0.1972152263052530e-30, 0.2421587237749379e-06, 0.2920528765870267e-06, 0.1757551361146239e+00, 0.8101601496619791e-27, 0.1434500575863456e-11, 0.9045851929625007e-13, 0.1271484717317025e+00, 0.1560107593833772e-17, 0.1620731838764033e-06, 0.1206839073349329e-06, 0.5049064582277562e-01, 0.3155443620884047e-29, 0.7221494757183839e-05, 0.4279630756550309e-05, 0.3515271339576785e-01, 0.0000000000000000e+00, 0.2092539651031481e-04, 0.3641666945464839e-04, 0.7966224551514293e-02, 0.1018583906139065e-23, 0.4276808715243643e-05, 0.6380573645111411e-06, 0.1685389559757755e-03, 0.0000000000000000e+00, 0.2828744147621957e-02, 0.2649548441232189e-02}, + info: 0, + }, + { + z: []float64{0.7647749557731113e+00, 0.1439440544256733e+00, 0.7500486933916728e+00, 0.6414832886183546e+00, 0.8122572937542115e+00, 0.9529234674941257e+00, 0.3352470221802866e+00, 0.6936226545338587e+00, 0.9931729574752579e-01, 0.3187856199297912e-01, 0.1671537284832858e+00, 0.6799274061730229e+00, 0.8177446911383146e+00, 0.5321042755342652e+00, 0.1611635863708949e+00, 0.1556305029012977e+00, 0.8064987994430675e+00, 0.6086425937675999e-01, 0.9539387771768112e+00, 0.5483826791143146e+00, 0.1624656726546599e+00, 0.8604262189788422e+00, 0.3348601405085260e+00, 0.6512790728986305e+00, 0.3357978279833680e+00, 0.7104789129438286e+00, 0.4161092786312924e+00, 0.8790797243791704e+00, 0.1469866181354552e+00, 0.7277776395098579e-01, 0.4317417806367096e-01, 0.8731968970940820e+00, 0.4856440529809196e+00, 0.2315660100703048e+00, 0.2955911335168215e+00, 0.4119299294790694e+00, 0.4565975215473586e+00, 0.8915397570999283e+00, 0.1639735129501203e+00, 0.5055946074157277e+00, 0.9530743160791620e+00, 0.9836351183281582e+00, 0.2913429799811792e+00, 0.9289008959282562e+00, 0.5996674664338365e+00, 0.6609996028219567e+00, 0.7317528474660470e+00, 0.9903648665746729e+00, 0.3982305249886617e+00, 0.3563571411857538e+00, 0.8851540908639802e+00, 0.8253410045707608e+00, 0.8085297859672740e+00, 0.3918185712378985e+00, 0.7922842941861202e+00, 0.7487878715845681e+00, 0.3117744132747395e+00, 0.7135433852632408e+00, 0.8969220948763633e+00, 0.5840741001028926e+00, 0.3261515465187765e+00, 0.5368137772779212e+00, 0.6371156779602756e+00, 0.5575308178040213e+00, 0.1319054467962255e+00, 0.6274305773732142e+00, 0.9230553933157697e+00, 0.3589158151867838e+00, 0.1429059780004028e+00, 0.6433214640390554e+00, 0.9159704795536289e+00, 0.8998439182451070e+00, 0.5665265465351250e+00, 0.4290736239553739e+00, 0.4923118412965143e+00, 0.3733795042755129e+00, 0.4008465132226234e+00, 0.8630963810875470e+00, 0.2835407905147469e+00, 0.8431337072866282e+00, 0.7450545748778526e+00, 0.2948369760127234e+00, 0.6896108484670013e+00, 0.1073791710993542e+00}, + n: 21, + zOut: []float64{0.2492440020550355e+01, 0.1951232214041701e+01, 0.1924880150346451e+01, 0.1800337146913507e+01, 0.1731259644349016e+01, 0.1515025732591961e+01, 0.1392131403063820e+01, 0.1337121843604905e+01, 0.1335944341051471e+01, 0.9695979508906304e+00, 0.8602855628657217e+00, 0.7598736716193354e+00, 0.5663438341779535e+00, 0.4991982558529897e+00, 0.3900147250500268e+00, 0.2612685334229408e+00, 0.1040703325016068e+00, 0.5095987426708586e-01, 0.4270469018444285e-01, 0.6313950453962554e-02, 0.4526318588114864e-04, 0.1623810366421630e-16, 0.5786287269352850e-19, 0.2440824886543042e-19, 0.1392131403063820e+01, 0.1972152263052530e-30, 0.1305662432930495e-17, 0.2687054746502436e-17, 0.1337121843604905e+01, 0.6807276388656606e-23, 0.2125450465832215e-16, 0.4166517172824519e-16, 0.1335944341051471e+01, 0.3971441341244662e-25, 0.3131313418539388e-15, 0.1598422579069803e-15, 0.9695979508906304e+00, 0.2612707318091991e-24, 0.3703169268599905e-13, 0.5897085302084673e-13, 0.8602855628657217e+00, 0.3944304526105059e-30, 0.1999104914098575e+02, 0.1999104914098576e+02, 0.1050000000000000e+03, 0.3185941043083900e+01, 0.4761904761904762e+01, 0.3815530651290367e-11, 0.5663438341779535e+00, 0.4442864618204738e-26, 0.2314135864505331e-10, 0.3280099800133426e-10, 0.4991982558529897e+00, 0.2524354896707238e-28, 0.8792792965154248e-10, 0.6284153656438893e-10, 0.3900147250500268e+00, 0.2716911819042188e-17, 0.6371083763388994e-09, 0.9063994743331422e-09, 0.2612685334229408e+00, 0.7682295356186976e-14, 0.2615630848198228e-07, 0.4934681155229334e-07, 0.1040703325016068e+00, 0.3134368286722418e-21, 0.1338644268367565e-05, 0.2571041054206291e-05, 0.5095987426708586e-01, 0.6829203137199389e-20, 0.1734538060272007e-04, 0.9160403953625247e-05, 0.4270469018444285e-01, 0.5620633949699709e-29, 0.1970936154220316e-07, 0.7875060113736862e-07, 0.6313950453962554e-02, 0.1074365444038600e-22, 0.1950342748319920e-02, 0.1960570817697857e-02, 0.4526318588114864e-04, 0.5045801736573469e-22, 0.2719687288170513e-02, 0.7229547441545878e-02}, + info: 0, + }, + { + z: []float64{0.6830594937182136e+00, 0.2806903427821971e+00, 0.3842580501524426e+00, 0.7113293946876786e+00, 0.6120481717427926e+00, 0.4087522624125913e+00, 0.2683959794755927e+00, 0.1982680719912362e+00, 0.5503761648124630e+00, 0.9947639519912582e+00, 0.4817851020535202e-01, 0.1841410650783141e+00, 0.7364485595361944e+00, 0.2204876163244174e+00, 0.8785531172976315e+00, 0.3900984044089164e+00, 0.9898399332392419e+00, 0.4652324815840650e+00, 0.6574114801218780e+00, 0.6641201888165797e+00, 0.5371222655177923e+00, 0.2241732155112269e+00, 0.6726821610374456e-01, 0.2361953456198683e+00, 0.5602287124209401e+00, 0.7836687903368798e+00, 0.2633616918428828e+00, 0.9669553142730151e+00, 0.3692579170869618e+00, 0.5395054150347700e-01, 0.4776047537674022e-01, 0.5691410116455676e+00, 0.1304181940038973e+00, 0.2107738642513712e+00, 0.5434523182333528e-01, 0.4019375962886824e+00, 0.7125153526157032e+00, 0.5305712841430671e+00, 0.1164846974244694e+00, 0.2037577925839799e+00, 0.2045617511271008e+00, 0.6122054957321945e+00, 0.5114531146106966e+00, 0.2882478115017332e+00, 0.5304873168821023e+00, 0.1796380649103803e+00, 0.9949092378946756e-01, 0.5448372048900864e+00, 0.1971761401002603e+00, 0.9487011110186927e+00, 0.8526405089060669e+00, 0.4533764477818548e+00, 0.8410819408489681e+00, 0.7592016862970913e+00, 0.8131142938762341e+00, 0.1445496351282255e+00, 0.6614974180377753e+00, 0.3394841721225367e+00, 0.4878107636737276e+00, 0.5499609605641745e+00, 0.1789798083030991e+00, 0.2008397619575961e+00, 0.5247567247775218e+00, 0.7394478154071386e+00, 0.6713478520024037e+00, 0.3894295843862983e+00, 0.9099978858741096e+00, 0.9872931573704680e-01, 0.5370559804237263e+00, 0.8492599321050560e+00, 0.4671993983916084e+00, 0.6397070306147701e+00, 0.2884512208824136e+00, 0.5002881141083532e+00, 0.3356213102526899e+00, 0.4102730364689955e+00, 0.8172446941984062e+00, 0.1463391840674726e+00, 0.6374760821459636e+00, 0.2986995049911381e+00, 0.6932950085590186e+00, 0.8475246239619540e+00, 0.3550038162879430e+00, 0.9747731646460122e+00}, + n: 21, + zOut: []float64{0.2302037253547019e+01, 0.1904432769194928e+01, 0.1838629010091233e+01, 0.1679631021950300e+01, 0.1638588328999231e+01, 0.1556939208705248e+01, 0.1154775776884602e+01, 0.1123025367728587e+01, 0.1005963210310813e+01, 0.7892040895461047e+00, 0.6171474041407791e+00, 0.5107385873854940e+00, 0.4745466376069911e+00, 0.2987072329916590e+00, 0.2518976395749056e+00, 0.1562926917018191e+00, 0.1208002043434920e+00, 0.1117929648180551e+00, 0.3025264937486962e-01, 0.5461437067005637e-02, 0.3901591662109151e-04, 0.2028008633963607e-17, 0.1197298285059118e-05, 0.1163352934520667e-05, 0.1154775776884602e+01, 0.2502221352972909e-22, 0.7438201518083012e-06, 0.8784873727121712e-06, 0.1123025367728587e+01, 0.2368236188315088e-19, 0.3676741199329849e-11, 0.3039307086935086e-11, 0.1005963210310813e+01, 0.2065553394230697e-24, 0.6686270095375819e-11, 0.8550052948388210e-11, 0.7892040895461047e+00, 0.1239058396467615e-20, 0.2067843352216928e-06, 0.6687212675645054e-07, 0.6171474041407791e+00, 0.2620094590259913e-21, 0.1757090250187976e+02, 0.1757090250187975e+02, 0.9500000000000000e+02, 0.2798185941043084e+01, 0.1052631578947368e+01, 0.6997553999213272e-08, 0.4745466376069911e+00, 0.5994838008700348e-24, 0.1052996378646298e-04, 0.6312169743031761e-04, 0.2987072329916590e+00, 0.9860761315262648e-30, 0.1466875513591431e-03, 0.1024839055944592e-03, 0.2518976395749056e+00, 0.7516632826070909e-17, 0.5524256710713082e-08, 0.1151290676113078e-07, 0.1562926917018191e+00, 0.1348952147927930e-27, 0.1222984298841777e-06, 0.3042067118561864e-06, 0.1208002043434920e+00, 0.4760428464210509e-23, 0.9492582004636672e-05, 0.3402727365723720e-04, 0.1117929648180551e+00, 0.7018716354804804e-22, 0.2431231599464108e-03, 0.1315672135071063e-03, 0.3025264937486962e-01, 0.4377821080194221e-21, 0.1333575496853992e-02, 0.6988231628130009e-03, 0.5461437067005637e-02, 0.2193916840728260e-24, 0.1845211740639344e-02, 0.8881725247692956e-03, 0.3901591662109151e-04, 0.2067951531382569e-23, 0.6469870796864946e-02, 0.1027022788337614e-01}, + info: 0, + }, + { + z: []float64{0.3270384527330652e+00, 0.4068565573647237e+00, 0.5445258191923925e+00, 0.9571802305689175e+00, 0.1840747434207346e-01, 0.8759261930487382e+00, 0.3018283835341639e+00, 0.4754674889513868e+00, 0.9620709342523251e+00, 0.3596342978433105e+00, 0.1820397917245298e+00, 0.9562190617892271e+00, 0.8004073710305811e+00, 0.9503584926946729e+00, 0.1634422399578277e+00, 0.3053134184266467e+00, 0.3871438286377282e+00, 0.1225252880124038e+00, 0.5955538022158587e+00, 0.9831372787057571e+00, 0.2130378801829313e+00, 0.1502296787149880e+00, 0.5120891507658710e+00, 0.2769158347903287e+00, 0.6740978559159013e+00, 0.6671510755324899e+00, 0.5252429573239310e+00, 0.7312875683466940e+00, 0.6359518339326128e+00, 0.3635213813950466e-01, 0.7509704695539338e+00, 0.2796455326190517e+00, 0.3929933998036216e+00, 0.2441550702438192e+00, 0.5181887041338979e+00, 0.2149631476903890e-01, 0.1922157358558590e+00, 0.8352708839021310e+00, 0.2143118694803675e+00, 0.8099207585987700e+00, 0.4013415573783083e+00, 0.8990854368069680e+00, 0.2274365144039603e-01, 0.1574221848735892e+00, 0.5021896402707141e+00, 0.6811320055313477e+00, 0.4008235511220088e+00, 0.2607115591315068e+00, 0.8396807341169598e+00, 0.3393611517447541e+00, 0.3095200315094494e+00, 0.8849677836408325e+00, 0.3309280721041051e+00, 0.2177939175852258e+00, 0.7396054658394657e-02, 0.5387313739545944e+00, 0.5592732178453728e+00, 0.4422242955757759e+00, 0.6010599951901635e+00, 0.2729214395073326e+00, 0.8861412244709392e+00, 0.3303460134234409e+00, 0.3230906405176623e+00, 0.5979281304041633e+00, 0.1747480524852378e+00, 0.5019201598522602e+00, 0.3041629115671023e+00, 0.7344252813174572e+00, 0.5145018596354390e+00, 0.7032668540687012e+00, 0.4542245378490408e+00, 0.6883469599293311e+00, 0.7365046871633816e+00, 0.6892200550994384e+00, 0.9612097768268251e+00, 0.3687306903044788e+00, 0.4902139977664450e+00, 0.4004732628576805e+00, 0.2121831569752699e+00, 0.3483027732062219e+00, 0.4140454821962284e+00, 0.5590476647584736e+00, 0.1191647727722587e+00, 0.7126611758207085e+00}, + n: 21, + zOut: []float64{0.2451808019367049e+01, 0.2128252216129400e+01, 0.1884015768998821e+01, 0.1793403527862625e+01, 0.1731698897481475e+01, 0.1605979322764491e+01, 0.1346135733575790e+01, 0.1222183737737495e+01, 0.1054517637460965e+01, 0.8541696661721662e+00, 0.8335553929836017e+00, 0.7643627371640215e+00, 0.6493538020248019e+00, 0.5117674171078171e+00, 0.4801902398267136e+00, 0.1830481952424627e+00, 0.1336751260833609e+00, 0.7548204386515513e-01, 0.4276641694583261e-01, 0.1152852330305948e-01, 0.4825291327526518e-04, 0.1046558665109411e-21, 0.5148060409033168e-10, 0.1742874021361670e-10, 0.1346135733575790e+01, 0.3944304526105059e-30, 0.1177385120842054e-08, 0.5739784145251797e-09, 0.1222183737737495e+01, 0.5228352308675503e-16, 0.2817945521555932e-11, 0.1891433639936970e-11, 0.1054517637460965e+01, 0.1775328283321620e-16, 0.9506244555381454e-11, 0.6780149876052939e-11, 0.8541696661721662e+00, 0.4289572169691169e-11, 0.3560204585171959e-10, 0.5552509181949502e-10, 0.8335553929836017e+00, 0.8925278809447169e-21, 0.1975794267501038e+02, 0.1975794267501038e+02, 0.9900000000000000e+02, 0.3029478458049887e+01, 0.2020202020202020e+01, 0.4057773458399609e-09, 0.6493538020248019e+00, 0.3196600703119960e-17, 0.4936238682205707e-06, 0.2172173957444601e-06, 0.5117674171078171e+00, 0.2475160268480207e-20, 0.1127880382626992e-04, 0.1031817493791828e-04, 0.4801902398267136e+00, 0.2327139670401985e-28, 0.8359957951383868e-05, 0.8045173068477057e-05, 0.1830481952424627e+00, 0.2763487036072314e-23, 0.1049648761427048e-04, 0.8607871065540926e-05, 0.1336751260833609e+00, 0.6392286336815055e-18, 0.5478798785309189e-04, 0.3178071334079827e-04, 0.7548204386515513e-01, 0.1886324196564483e-25, 0.4604839715747456e-05, 0.3206078546562559e-04, 0.4276641694583261e-01, 0.2130807968310579e-24, 0.2160396768988393e-02, 0.2401524776593251e-02, 0.1152852330305948e-01, 0.4597153189491485e-23, 0.1577121702032026e-02, 0.1548884890976950e-02, 0.4825291327526518e-04, 0.0000000000000000e+00, 0.1144436370534878e-01, 0.4805213857273134e-02}, + info: 0, + }, + { + z: []float64{0.1204296195656108e+00, 0.2644532009534747e+00, 0.2981945708465356e+00, 0.3379851046426219e-01, 0.5872157683391875e+00, 0.3661495849020495e+00, 0.2783310889774259e+00, 0.8471009797765506e+00, 0.6087926721027869e+00, 0.7227374835926721e+00, 0.1157491272838733e+00, 0.5565144099551486e+00, 0.8949691253636921e+00, 0.4271566773654165e+00, 0.6387841251831913e+00, 0.3125047228733280e-01, 0.2092736013478247e-01, 0.9188324418961444e+00, 0.2275180796997284e+00, 0.4519662960462961e+00, 0.1155877493866434e+00, 0.3574940735178409e+00, 0.1489772525039219e+00, 0.3847908470095970e+00, 0.4780110568180811e+00, 0.3489586723075616e+00, 0.2559758537216194e+00, 0.7015930720521131e+00, 0.6512181083044030e+00, 0.1619173394771481e+00, 0.5094142654629881e+00, 0.9037662631759247e+00, 0.8829671919577533e+00, 0.5739200804260277e+00, 0.6584413366512047e+00, 0.7174965427568090e+00, 0.2705911606916550e+00, 0.6519148889036277e+00, 0.8284421625450799e+00, 0.1417109678591228e+00, 0.8472265674806589e-01, 0.3638971914001953e+00, 0.8802661885427909e-01, 0.5730596475071164e+00, 0.6156880168413905e+00, 0.3769532609458294e+00, 0.6302840487845919e+00, 0.8652028697564479e+00, 0.5886269364510055e-01, 0.9901772381773877e-01, 0.9679961670969370e+00, 0.9025028618130532e+00, 0.2858592550914936e+00, 0.4523960301168739e-01, 0.9961073801516410e+00, 0.5404176614913075e+00, 0.2161216977420954e+00, 0.6517643909270738e+00, 0.1408193879882935e-01, 0.2323315528058443e+00, 0.4040647670376405e+00, 0.5063393857452272e+00, 0.2149141182072994e+00, 0.8405813011731451e+00, 0.8044826512704509e+00, 0.5364711289689184e+00, 0.2922476360373625e+00, 0.9518940562302493e+00, 0.1980063292193738e+00, 0.6243757640305569e-01, 0.3641799792979717e+00, 0.4781083740929701e+00, 0.3488268687432241e+00, 0.8284107864073382e+00, 0.5855039308834178e+00, 0.2593007284605915e+00, 0.9998845891621441e+00, 0.5048349794999019e+00, 0.5828201446782487e+00, 0.8921301000231074e+00, 0.2672402237051515e+00, 0.3352853759971494e+00, 0.5977528180071631e+00, 0.1151194604047919e+00}, + n: 21, + zOut: []float64{0.2557315455353142e+01, 0.2083235685800309e+01, 0.1875944452104717e+01, 0.1852714062580819e+01, 0.1643995946812320e+01, 0.1314218355716144e+01, 0.1293789198407225e+01, 0.1108624029644386e+01, 0.1060866236520538e+01, 0.6893140608436983e+00, 0.6265017468006577e+00, 0.6169803536834606e+00, 0.4472638275156540e+00, 0.3909824987049766e+00, 0.2990543016857948e+00, 0.1644755201813542e+00, 0.1145158516686298e+00, 0.6240413476421734e-01, 0.2540980528285533e-01, 0.1114684260186565e-01, 0.3077034058364147e-04, 0.1514612938024343e-27, 0.1068838715614041e-10, 0.1816953714834036e-10, 0.1293789198407225e+01, 0.1634858280249726e-16, 0.1546152805249061e-09, 0.7139985980503436e-10, 0.1108624029644386e+01, 0.1852295205932338e-17, 0.2931760870340471e-08, 0.5717158524063146e-08, 0.1060866236520538e+01, 0.1656986554198631e-24, 0.3897819821179759e-07, 0.2050786216968739e-07, 0.6893140608436983e+00, 0.1054613376713825e-21, 0.1114796130440952e-06, 0.1754727955273104e-06, 0.6265017468006577e+00, 0.0000000000000000e+00, 0.1823878313701335e+02, 0.1823878313701335e+02, 0.1020000000000000e+03, 0.3172335600907029e+01, 0.3921568627450980e+01, 0.4944285541926277e-08, 0.4472638275156540e+00, 0.7188113412291574e-19, 0.5360570179815202e-09, 0.5055207828345367e-09, 0.3909824987049766e+00, 0.7011182861561962e-17, 0.5424596001050196e-08, 0.1149191082508281e-07, 0.2990543016857948e+00, 0.1397964075969312e-24, 0.2148922978775398e-05, 0.1039924072201963e-05, 0.1644755201813542e+00, 0.1424715207546021e-19, 0.1286356681922344e-03, 0.6709371685087021e-04, 0.1145158516686298e+00, 0.1923558431290915e-24, 0.1040681698574874e-03, 0.9690504707177204e-04, 0.6240413476421734e-01, 0.3021337266996475e-27, 0.3740811812377314e-06, 0.8725750691496404e-06, 0.2540980528285533e-01, 0.6622271595176623e-28, 0.2937391640354928e-03, 0.1148070460566611e-03, 0.1114684260186565e-01, 0.3845696912952433e-29, 0.9582588059437623e-03, 0.1284334386264186e-02, 0.3077034058364147e-04, 0.1610185014413679e-23, 0.1713355521956925e-01, 0.1560043716021792e-01}, + info: 0, + }, + { + z: []float64{0.6550531738065145e+00, 0.9006303757888976e+00, 0.9204609778380736e+00, 0.4658341923257867e+00, 0.5436644687290008e+00, 0.2908556755036122e+00, 0.1209811495723523e+00, 0.6227378829506942e+00, 0.3843019792679099e+00, 0.4431829610151485e+00, 0.4903210952175132e+00, 0.4210264133022774e+00, 0.3200750839632230e+00, 0.2252794777033682e+00, 0.5128630958928748e-01, 0.5136099492114639e+00, 0.4906286803922755e+00, 0.1747340137845675e+00, 0.6367238354860758e+00, 0.7515407582129341e+00, 0.4275500565044866e+00, 0.2014278558825494e+00, 0.7137603260056157e+00, 0.8686875192977085e+00, 0.7169591605793454e+00, 0.9050302202813415e+00, 0.9899533400874350e-01, 0.9969046692447475e+00, 0.6121260436585757e+00, 0.6518683331715626e+00, 0.5607404970528083e+00, 0.4000687152662108e-03, 0.1221862374388689e+00, 0.4484923714483016e+00, 0.4074629631039243e+00, 0.2322333097032281e-01, 0.4732551077558298e+00, 0.7342979327909982e+00, 0.3458749207719752e+00, 0.7729568887054280e+00, 0.2047858477824707e+00, 0.7991048365911296e+00, 0.5370237056253078e+00, 0.2233168849800682e-01, 0.5135288197983614e+00, 0.9129664417816657e+00, 0.9407166870649586e+00, 0.4321958081203685e+00, 0.5563774896630214e-01, 0.9494906037764664e+00, 0.9674063566885983e+00, 0.2302624676329315e+00, 0.1506811640032519e+00, 0.4445855507514112e+00, 0.6038782945102905e-01, 0.4499742519338505e+00, 0.1940991655604004e+00, 0.6493052482066142e-01, 0.2902929850901331e+00, 0.2876163569026815e+00, 0.9140475482126542e+00, 0.7006705577674063e+00, 0.2819386709216735e+00, 0.9125816873773608e+00, 0.9433170896498744e-01, 0.1632821600392161e+00, 0.3150556796654537e+00, 0.4051206017420083e+00, 0.2687493393481850e+00, 0.5768057516945001e+00, 0.4306858988251338e+00, 0.3827211690774009e+00, 0.3593520435650671e+00, 0.5610694397953092e+00, 0.7151178950427733e+00, 0.7138830385076256e-01, 0.5281951083990266e-01, 0.4880677722651299e+00, 0.9334247489037772e+00, 0.6764242544288053e+00, 0.8551178347453967e+00, 0.1876923010834376e+00, 0.7408118952029578e+00, 0.7967219280126981e+00}, + n: 21, + zOut: []float64{0.2493394334366539e+01, 0.2460477172068809e+01, 0.2083159145455880e+01, 0.1779228959307519e+01, 0.1724095029781113e+01, 0.1560812668654360e+01, 0.1120482054552078e+01, 0.1088744749350826e+01, 0.9613598859526896e+00, 0.9456175481954929e+00, 0.9119552728538921e+00, 0.7767939725863090e+00, 0.6893167116879397e+00, 0.4408717103973492e+00, 0.2706330695081346e+00, 0.2343515836463984e+00, 0.6795401725828518e-01, 0.6600498464192311e-01, 0.2004873521291848e-01, 0.1421097732958122e-01, 0.4015560238058376e-03, 0.2226481018895784e-25, 0.2564499425740104e-12, 0.2658329977886514e-12, 0.1120482054552078e+01, 0.4315813836253463e-23, 0.1335501643346940e-12, 0.1368307862991113e-12, 0.1088744749350826e+01, 0.3926080427856238e-18, 0.1185565636052547e-12, 0.1214013431095007e-12, 0.9613598859526896e+00, 0.2719992401202049e-26, 0.9906513980413714e-13, 0.1012398200894782e-12, 0.9456175481954929e+00, 0.3937993638863291e-26, 0.9389079133840153e-13, 0.9189527811944332e-13, 0.9119552728538921e+00, 0.1580250204306566e-21, 0.1970991413883184e+02, 0.1970991413883184e+02, 0.1080000000000000e+03, 0.3401360544217687e+01, 0.8333333333333334e+01, 0.6496201881821226e-13, 0.6893167116879397e+00, 0.2040319821514109e-19, 0.6886568712781699e-13, 0.6342531805840532e-13, 0.4408717103973492e+00, 0.9116462109840799e-16, 0.6575726430673280e-12, 0.1146915438924438e-11, 0.2706330695081346e+00, 0.6100742213737356e-14, 0.5548784478521690e-10, 0.9411844035493366e-10, 0.2343515836463984e+00, 0.2464780121144947e-24, 0.1267470050081896e-08, 0.7549586177318002e-09, 0.6795401725828518e-01, 0.6058451752097371e-27, 0.2494439518946058e-07, 0.4030144639288306e-07, 0.6600498464192311e-01, 0.1931466728993349e-20, 0.1742718098912744e-06, 0.1063050845015484e-06, 0.2004873521291848e-01, 0.2078695185822955e-22, 0.1365398583912438e-05, 0.6056776980192580e-06, 0.1421097732958122e-01, 0.3944304526105059e-30, 0.8436693551699617e-06, 0.8772483974218289e-06, 0.4015560238058376e-03, 0.5424456586666688e-20, 0.1372662528173007e-02, 0.2323379424199439e-03}, + info: 0, + }, + { + z: []float64{0.5955158028190186e+00, 0.9601282390728449e-01, 0.2944266054820921e+00, 0.3924250378682403e+00, 0.4462256781176310e-01, 0.5690757232665151e+00, 0.7855339329019884e-01, 0.8681545225840360e+00, 0.7288006955920397e+00, 0.2963229749240101e+00, 0.2728029241156386e+00, 0.4237536504971896e+00, 0.9003368942847588e+00, 0.7357957067011865e+00, 0.4901129309073038e-01, 0.4716171621744532e+00, 0.4890099406800087e+00, 0.3570165784460172e+00, 0.7160897857565474e-01, 0.6109824519907316e+00, 0.1221913259189585e+00, 0.1847302629402909e+00, 0.4009366884871202e-01, 0.9703472440233364e+00, 0.6369052870991366e+00, 0.5580400433416689e+00, 0.9024815855223057e+00, 0.1227045689636573e+00, 0.9668897094172768e+00, 0.5989409973498315e+00, 0.9709139844325040e+00, 0.9532564522700616e+00, 0.6239574483953332e+00, 0.7056703505253071e+00, 0.9506691238320553e-01, 0.7998586962365666e+00, 0.6556519136464994e+00, 0.4181856697120553e+00, 0.9478314170226607e+00, 0.3832663309972284e-01, 0.4257218688507075e+00, 0.2049443327129853e+00, 0.9168853967010917e+00, 0.5423988966788751e-02, 0.3002879152136316e+00, 0.7743151012217909e+00, 0.5154350917338609e+00, 0.9658560730800175e+00, 0.6314599250676610e+00, 0.5584937104443134e+00, 0.4741445335250092e+00, 0.3157508358915576e+00, 0.4349529093465575e+00, 0.7860003715032308e+00, 0.8695087804152180e-02, 0.3920445280700808e+00, 0.3765485221235618e+00, 0.4334604109656897e+00, 0.1636117026415890e+00, 0.3006093728066712e+00, 0.2129148932459900e+00, 0.3092975700919495e+00, 0.1986761620686717e-01, 0.9075541031649185e+00, 0.9781911433730479e+00, 0.6945955075731600e+00, 0.9959584477684137e+00, 0.7505944883792014e+00, 0.7627854018352902e+00, 0.1267035334952504e+00, 0.8056780234474171e+00, 0.4507060196586773e+00, 0.1799397964631048e+00, 0.7554691089798000e+00, 0.3012534688386570e+00, 0.9612172487804931e+00, 0.4056708256608614e+00, 0.6648221051068217e+00, 0.1966051207552482e+00, 0.6661008672469584e+00, 0.6875730168317549e+00, 0.9591133633640367e+00, 0.8995318056907078e+00, 0.1435403265941513e+00}, + n: 21, + zOut: []float64{0.2826953331143077e+01, 0.2184946373215138e+01, 0.2011432985807930e+01, 0.1983939371799505e+01, 0.1783403572375380e+01, 0.1527291743334771e+01, 0.1218359458122959e+01, 0.1031625203087357e+01, 0.9141017580644314e+00, 0.8935891545682184e+00, 0.7995009441187808e+00, 0.6760292370761557e+00, 0.5375939998175604e+00, 0.5031272745805969e+00, 0.4136950654384539e+00, 0.4077555149266558e+00, 0.1977184484214556e+00, 0.1172157517521918e+00, 0.4197144528461013e-01, 0.1336078057720851e-01, 0.3545889291755145e-06, 0.2544082546855865e-16, 0.2583036406189355e-05, 0.3206950450307273e-05, 0.1218359458122959e+01, 0.6512835633504673e-25, 0.5241867641497365e-06, 0.5644106780290774e-06, 0.1031625203087357e+01, 0.0000000000000000e+00, 0.4305359229680111e-07, 0.8271940560154021e-07, 0.9141017580644314e+00, 0.0000000000000000e+00, 0.3911233321019150e-06, 0.2241550799736728e-06, 0.8935891545682184e+00, 0.2385737520619250e-22, 0.4206034452718400e-11, 0.9899461762377114e-11, 0.7995009441187808e+00, 0.9331359083488707e-16, 0.2008361176810137e+02, 0.2008361176810137e+02, 0.1130000000000000e+03, 0.3444444444444445e+01, 0.6194690265486726e+01, 0.1651034509856568e-04, 0.5375939998175604e+00, 0.1421716677825516e-23, 0.1528222338287967e-04, 0.2433829126418849e-04, 0.5031272745805969e+00, 0.6632003279205155e-19, 0.4757657931696731e-05, 0.2751276356124619e-05, 0.4136950654384539e+00, 0.1680371209901916e-16, 0.1318069984220972e-06, 0.1659409431372840e-06, 0.4077555149266558e+00, 0.2554525986432683e-22, 0.5944173486813264e-06, 0.1429245864393051e-05, 0.1977184484214556e+00, 0.1494891415393817e-27, 0.8465737022131437e-03, 0.4160790916597339e-03, 0.1172157517521918e+00, 0.6780166836549887e-20, 0.3655496023749206e-04, 0.5562211290196199e-04, 0.4197144528461013e-01, 0.1803076020780872e-22, 0.2337686132102848e-02, 0.5241185550833300e-02, 0.1336078057720851e-01, 0.3234329711406148e-27, 0.6369386968947829e-02, 0.2727172056719641e-02, 0.3545889291755145e-06, 0.0000000000000000e+00, 0.6135698199493001e-02, 0.4453215245051444e-02}, + info: 0, + }, + { + z: []float64{0.5256994769563229e+00, 0.4751970954150097e+00, 0.5550798296722601e+00, 0.3565746805143110e+00, 0.3971523551134015e-01, 0.7088262788771817e+00, 0.6369448567868402e+00, 0.6372974860231623e+00, 0.5963599363854956e+00, 0.7996832763398349e+00, 0.7266271077857295e+00, 0.3813393981851099e+00, 0.8884393254050771e-01, 0.8685934834267716e+00, 0.3797066359545181e+00, 0.4541910736411658e+00, 0.6285354031372432e+00, 0.5295256159852654e+00, 0.8908170817260027e+00, 0.5793264969983637e+00, 0.6056871126785802e+00, 0.6045507124525907e+00, 0.9734230310286895e-01, 0.9482358811543057e+00, 0.8804820874856442e+00, 0.4472263119108183e+00, 0.5843860139759072e+00, 0.5372922689923049e+00, 0.5799351791336661e+00, 0.5116182356749631e+00, 0.3816355069915517e+00, 0.6475808331559241e+00, 0.8746013736579017e-02, 0.8411650861050215e+00, 0.2992737481736951e-01, 0.6246901010621124e+00, 0.3123078564554325e+00, 0.7411164234121632e+00, 0.5960432842954070e+00, 0.8102594121204127e+00, 0.3552957522458323e+00, 0.6235337320803771e+00, 0.5962195242077326e+00, 0.4845790335098474e+00, 0.4045953379382708e+00, 0.2452522545656534e+00, 0.2743318280596185e+00, 0.2023077390748810e+00, 0.4206973723636350e+00, 0.4039949452544738e+00, 0.9114680949622015e+00, 0.7779482986378454e+00, 0.6812819304160683e+00, 0.5580962104642140e-01, 0.4287711156774094e+00, 0.9817581515129085e+00, 0.4431516884275333e+00, 0.6237912221093689e-01, 0.8364254576520689e+00, 0.8759899245723605e+00, 0.8787473635901457e+00, 0.4702211826699049e+00, 0.8284521047414805e+00, 0.2041855966018515e+00, 0.8060130309143703e+00, 0.5501723689038956e+00, 0.2188842792675016e-01, 0.5672241818001204e+00, 0.6450459353754012e+00, 0.2431902854401001e+00, 0.5193698672717711e+00, 0.1962120228423043e+00, 0.6361488000943472e-01, 0.5342761659162559e+00, 0.1758994081846166e-01, 0.7459187699999678e+00, 0.5953394313659540e+00, 0.9379918174958790e+00, 0.4032831592210619e+00, 0.7704238877222783e+00, 0.9410981148731864e+00, 0.8355998775704846e+00, 0.7904637124553195e+00, 0.4324126135288506e+00}, + n: 21, + zOut: []float64{0.2360605380677641e+01, 0.2349812365456326e+01, 0.2259072978559188e+01, 0.2074669549844899e+01, 0.1734529465065066e+01, 0.1635336277856887e+01, 0.1535245292061319e+01, 0.1453596007978833e+01, 0.1156130097482147e+01, 0.1084746900314168e+01, 0.1034520391904089e+01, 0.8201881896309742e+00, 0.6191782955028541e+00, 0.5267433243518804e+00, 0.4355086546946592e+00, 0.4155619327338856e+00, 0.3396295360604018e+00, 0.9317218785919168e-01, 0.5773665727788814e-01, 0.1842462732727454e-01, 0.2018232369069597e-07, 0.4517585523147273e-24, 0.2167663685989129e-10, 0.1523023581682742e-10, 0.1535245292061319e+01, 0.1340432450151543e-25, 0.8069500146896454e-10, 0.1279555645700667e-09, 0.1453596007978833e+01, 0.6673291668607916e-16, 0.1337219714947293e-07, 0.4537550993048733e-08, 0.1156130097482147e+01, 0.2388039732285047e-25, 0.6170125127971897e-05, 0.1353762614268721e-04, 0.1084746900314168e+01, 0.3862470231561244e-18, 0.2338329578753200e-04, 0.2318969913953679e-04, 0.1034520391904089e+01, 0.6409494854920721e-30, 0.2200440813282189e+02, 0.2200440813282190e+02, 0.9400000000000000e+02, 0.2857142857142857e+01, 0.0000000000000000e+00, 0.2156912239043876e-04, 0.6191782955028541e+00, 0.4311940021813490e-19, 0.2098085305628239e-04, 0.2047701462113856e-04, 0.5267433243518804e+00, 0.7292491488692259e-18, 0.2483658471590867e-04, 0.2278231239253353e-04, 0.4355086546946592e+00, 0.6335396825622830e-16, 0.5560492899787359e-04, 0.8068766398366361e-04, 0.4155619327338856e+00, 0.7336406418555410e-28, 0.3220618566867426e-03, 0.1956958209756436e-03, 0.3396295360604018e+00, 0.1297676189088564e-27, 0.4121897802138222e-02, 0.2587205720417875e-02, 0.9317218785919168e-01, 0.7651950780643815e-28, 0.2826512581544380e-02, 0.2814359488753958e-02, 0.5773665727788814e-01, 0.4923437167052441e-21, 0.1821079370679042e-04, 0.2969056479875054e-04, 0.1842462732727454e-01, 0.4842851680813876e-21, 0.4217326450522519e-02, 0.5738852897157041e-02, 0.2018232369069597e-07, 0.2018232369069597e-07, 0.3702854219118327e-02, 0.7091388009151286e-02}, + info: 0, + }, + { + z: []float64{0.5196081465664333e+00, 0.1450873999446859e+00, 0.1902498073897446e+00, 0.9459083122514356e-01, 0.6971960507146802e-01, 0.2126447963850432e+00, 0.1693138701795316e-01, 0.6615449591751588e-01, 0.6671851537110856e+00, 0.7116922416084167e+00, 0.7735840319989629e+00, 0.8593705742984634e+00, 0.6060015557980403e+00, 0.1071620560007485e+00, 0.6056582690297291e+00, 0.2064801115284116e+00, 0.4992099275808294e+00, 0.9056413330105546e+00, 0.6051575714193578e+00, 0.1395071235858423e+00, 0.3386142237730057e+00, 0.2065895571122717e+00, 0.2774285740015214e-01, 0.4649920904615868e+00, 0.9062834189437683e+00, 0.3022801499192989e+00, 0.2125071125007829e+00, 0.2366624610094756e+00, 0.3232052419625674e+00, 0.5879745505340277e+00, 0.3212955913685801e+00, 0.9110441809347004e+00, 0.2144184579231917e+00, 0.5617161155671668e-01, 0.9725855527757206e+00, 0.9798782363582490e+00, 0.9666637418953663e+00, 0.1842420381101751e+00, 0.9810363127661145e+00, 0.4061071205983764e+00, 0.4929437214282740e+00, 0.3087398230344144e+00, 0.8692083335886002e+00, 0.7401065606674918e+00, 0.6829833274325647e+00, 0.9797795404622264e+00, 0.6101846761247042e+00, 0.7796537878703258e+00, 0.4568641605073986e-01, 0.1752946998854403e+00, 0.7558659638245062e+00, 0.4081915619381170e+00, 0.5344465439920063e+00, 0.3199118710728778e+00, 0.3613720388280138e+00, 0.1409062127217534e+00, 0.2811520826338662e+00, 0.9462348779627723e+00, 0.4911007415294493e+00, 0.2488061765237712e+00, 0.1661818317483100e+00, 0.1203910774987040e+00, 0.4544163143251944e+00, 0.8767242907488393e+00, 0.6688224142601292e+00, 0.2497550267789769e+00, 0.2658845545571695e+00, 0.4072601097670568e+00, 0.5517137496493807e+00, 0.4206663434956901e+00, 0.3655154771809294e+00, 0.6509199815744504e-01, 0.6480911975614350e+00, 0.1271267318339999e+00, 0.8874102966539428e+00, 0.9607257650026479e+00, 0.8902777005093224e+00, 0.7074702462790970e+00, 0.5045879991550398e+00, 0.3357826713243962e+00, 0.9303671751018463e+00, 0.3667268693834904e+00, 0.7709773173263057e+00, 0.1274123220614284e+00}, + n: 21, + zOut: []float64{0.2669666350125788e+01, 0.2458073928301015e+01, 0.1960648799383531e+01, 0.1674511937828339e+01, 0.1629444569956276e+01, 0.1605168511500282e+01, 0.1169506153633164e+01, 0.8378899015114316e+00, 0.8214470290652885e+00, 0.7290022816300561e+00, 0.5956393798847858e+00, 0.5147057006185641e+00, 0.3415078453317431e+00, 0.3316800239182673e+00, 0.2245847640277715e+00, 0.1981086483360383e+00, 0.1729134064527326e+00, 0.1081292683666610e+00, 0.4856623040087921e-01, 0.2705373732739987e-02, 0.9745444154776346e-03, 0.7298616502634276e-19, 0.4601636530183765e-14, 0.3001385089664378e-13, 0.1169506153633164e+01, 0.5522026336547083e-28, 0.9439817212889541e-12, 0.1534086358433234e-11, 0.8378899015114316e+00, 0.7174347921301560e-11, 0.1095021852663145e-10, 0.1580074435702411e-10, 0.8214470290652885e+00, 0.3131705799125763e-20, 0.2247124942125492e-10, 0.3228253356903498e-10, 0.7290022816300561e+00, 0.6063934311659721e-18, 0.3794147471177530e-13, 0.3318524623173314e-12, 0.5956393798847858e+00, 0.3765687391680646e-13, 0.1809487464842083e+02, 0.1809487464842083e+02, 0.1050000000000000e+03, 0.3215419501133787e+01, 0.3809523809523809e+01, 0.2077325438490571e-07, 0.3415078453317431e+00, 0.1645262238367970e-20, 0.7912752069326401e-07, 0.6053132028546428e-07, 0.3316800239182673e+00, 0.4184118241292247e-26, 0.1343987419983520e-06, 0.1754397466154310e-06, 0.2245847640277715e+00, 0.3340387958036406e-22, 0.6150231447604006e-06, 0.4780001259056422e-06, 0.1981086483360383e+00, 0.1070733604161610e-20, 0.1003229473607176e-05, 0.1287291281183416e-05, 0.1729134064527326e+00, 0.1298955481663118e-20, 0.1611116216931278e-05, 0.3807035614938680e-06, 0.1081292683666610e+00, 0.4901279578837721e-23, 0.3565927169328702e-03, 0.2312261838015572e-03, 0.4856623040087921e-01, 0.3325573155747123e-13, 0.4389787505319806e-03, 0.9715248586459800e-03, 0.2705373732739987e-02, 0.1348432029845013e-20, 0.7371401467208961e-04, 0.2975668602269332e-04, 0.9745444154776346e-03, 0.4135903062570168e-24, 0.7273370201297700e-04, 0.3602244970122637e-04}, + info: 0, + }, + { + z: []float64{0.3441168249350421e+00, 0.7064572711980356e+00, 0.6589781374655990e+00, 0.8687841598825752e+00, 0.7811747459515918e+00, 0.7470750719206745e+00, 0.7591454338662803e+00, 0.7956934079357347e-01, 0.3416983743372617e+00, 0.3365394610260509e+00, 0.8635365040683821e+00, 0.4793906833418223e+00, 0.9394584418725971e+00, 0.7354375226759881e+00, 0.3959912256877245e+00, 0.4088558388864650e+00, 0.6899389113237457e+00, 0.6329180251180861e+00, 0.6760812245614891e+00, 0.1743991444245150e+00, 0.3881927716152306e+00, 0.2103848624556167e+00, 0.5309784956583943e+00, 0.5758384660199964e+00, 0.6542468116269122e+00, 0.2141275453071043e+00, 0.4917208240158387e+00, 0.6996271491850585e+00, 0.7074081025976766e+00, 0.1896811377589238e+00, 0.4872676647104760e+00, 0.8987347035581787e+00, 0.1651654313203957e+00, 0.7969431328829826e+00, 0.8979672856778195e+00, 0.2621352236509209e+00, 0.2994430908669790e+00, 0.8412000921937168e+00, 0.6910228781616401e-01, 0.9849719192333963e+00, 0.2835195798153757e+00, 0.6126955834278749e+00, 0.9957514180764350e+00, 0.4133859474205875e+00, 0.2126790023013077e+00, 0.9230696247878700e-01, 0.5165813646587417e+00, 0.9232126059423650e+00, 0.6160486127374017e+00, 0.5529065454633691e+00, 0.6862175458708878e+00, 0.2677675454795836e-01, 0.6729513992144193e+00, 0.8612993184178528e+00, 0.5455358966165057e+00, 0.2518716982253303e+00, 0.5312849864208884e+00, 0.7551228210749875e+00, 0.1095520838657484e+00, 0.8767983608629261e+00, 0.9578119141004069e+00, 0.6878921114137557e+00, 0.2890574974795965e+00, 0.9851780344062913e+00, 0.9392886821673129e+00, 0.4125982690623264e+00, 0.6817912182549608e+00, 0.7805388095263401e+00, 0.1567802887930034e+00, 0.1901362770322003e+00, 0.5155717108920093e+00, 0.5470641908290981e+00, 0.9496522047623522e+00, 0.4367251554723609e+00, 0.7016249991347693e+00, 0.6652534390290816e+00, 0.3892590437090979e+00, 0.1464591367640408e+00, 0.9179795596761495e+00, 0.1527313083317114e+00, 0.8661173016992874e+00, 0.8044520995141484e+00, 0.4399581275677011e+00, 0.6590078920726725e+00}, + n: 21, + zOut: []float64{0.2654671686308588e+01, 0.2409077903364995e+01, 0.2090082174851482e+01, 0.1940605709090538e+01, 0.1909776782011245e+01, 0.1629990303184568e+01, 0.1566043989638403e+01, 0.1473218419964192e+01, 0.1321200673977518e+01, 0.1275771383585936e+01, 0.9826159405158056e+00, 0.8184731809209335e+00, 0.6715230178331039e+00, 0.5669018352800101e+00, 0.3641671875118937e+00, 0.2699135892930649e+00, 0.1446269090001342e+00, 0.8937359624216266e-01, 0.5860931848549461e-01, 0.3094108728329079e-01, 0.6182329613034853e-03, 0.9233403587623171e-22, 0.2552413399194827e-12, 0.1309204970235786e-12, 0.1566043989638403e+01, 0.2003762315848454e-18, 0.9222125989396634e-12, 0.1831545934750265e-11, 0.1473218419964192e+01, 0.1899959664962892e-11, 0.5682525570474145e-10, 0.1113952503246639e-09, 0.1321200673977518e+01, 0.5311918312726658e-16, 0.2115322762070563e-08, 0.1208354514490733e-08, 0.1275771383585936e+01, 0.1424261829852281e-16, 0.6059411460508015e-08, 0.1032477558436010e-07, 0.9826159405158056e+00, 0.5397353496908506e-22, 0.2226820292130466e+02, 0.2226820292130466e+02, 0.9500000000000000e+02, 0.2927437641723356e+01, 0.1052631578947368e+01, 0.2555943178691847e-06, 0.6715230178331039e+00, 0.1218922493912602e-18, 0.4789189736721279e-06, 0.5466231772857101e-06, 0.5669018352800101e+00, 0.1893266172530428e-27, 0.9952011217509916e-06, 0.1128113648538554e-05, 0.3641671875118937e+00, 0.1993230626440035e-24, 0.2427606592544749e-05, 0.2156829827516489e-05, 0.2699135892930649e+00, 0.2761013168273541e-29, 0.3900010091178873e-05, 0.3393761026267541e-05, 0.1446269090001342e+00, 0.2150416673893778e-15, 0.8242388755752233e-05, 0.1105863902029246e-04, 0.8937359624216266e-01, 0.2934562567422164e-27, 0.3455780897358792e-04, 0.2250328346305790e-04, 0.5860931848549461e-01, 0.8147325136863849e-21, 0.2346512312055719e-04, 0.8962661529405173e-05, 0.3094108728329079e-01, 0.2460979885389816e-17, 0.5238375287742530e-03, 0.1080192509223090e-02, 0.6182329613034853e-03, 0.8383483586160605e-23, 0.7400552677463828e-02, 0.1581674273722876e-01}, + info: 0, + }, + { + z: []float64{0.7940281584071446e+00, 0.8540600349699839e+00, 0.8158431165852809e-01, 0.5431841788581357e+00, 0.3696613346727944e+00, 0.2343742079469738e+00, 0.4891909888056500e-01, 0.6769876984160987e+00, 0.4777767465052760e+00, 0.1867381312399053e+00, 0.2018744873845245e+00, 0.5511201479607295e+00, 0.6938788283912793e+00, 0.8167542438070282e+00, 0.7904606414789531e+00, 0.9443564310071292e+00, 0.7287247677237652e-01, 0.8645122013586991e+00, 0.1884651475116826e+00, 0.3844755283611681e+00, 0.9959264361467982e+00, 0.6424370932833342e+00, 0.1972122925077952e+00, 0.2842024247377670e+00, 0.9819646913482807e+00, 0.9118347224008859e+00, 0.8184691845197246e+00, 0.7051587281589254e+00, 0.7604703230109544e+00, 0.6312964755149379e+00, 0.5240863862347888e+00, 0.3442050916384676e-01, 0.2415614308212055e+00, 0.2814868323669945e+00, 0.6529284673126197e+00, 0.3727305084153835e+00, 0.5033733868757848e+00, 0.2317122058804952e+00, 0.7555584130128312e+00, 0.5854566742645219e+00, 0.5481204696337160e+00, 0.8479425268049923e+00, 0.2310874615764000e+00, 0.1250993726775007e-01, 0.6243285982203539e-01, 0.8533587246073391e+00, 0.9203815588639257e+00, 0.9256849509751471e+00, 0.6691405057262187e+00, 0.8847091531299658e+00, 0.6783572983386376e+00, 0.4701257141291857e+00, 0.8976078424378102e+00, 0.8575018884445876e+00, 0.4119363561363949e+00, 0.2824477027676924e+00, 0.2787507690368071e+00, 0.7994878185780909e+00, 0.6141832897278305e+00, 0.6772728066124333e+00, 0.1568652581579784e+00, 0.8025492691231176e+00, 0.2609459151100056e+00, 0.4956700691019098e+00, 0.1008839464621498e+00, 0.6129709499983976e+00, 0.4551038858718992e-02, 0.8382785474023564e+00, 0.9327452694814308e+00, 0.9710431593941808e+00, 0.3785578217695214e+00, 0.9620839159000718e+00, 0.3183561960196257e-01, 0.9167635157854341e+00, 0.8989971039988554e+00, 0.2723769512210017e-01, 0.4176537489735596e+00, 0.9619881273217982e+00, 0.8761769579995293e+00, 0.6385245520487358e+00, 0.6821739872929905e+00, 0.3927943300877799e+00, 0.3299501391296433e-01, 0.6026481165267817e+00}, + n: 21, + zOut: []float64{0.2841529467847260e+01, 0.2556740368064117e+01, 0.2029069210305357e+01, 0.1816492749229813e+01, 0.1807397947918166e+01, 0.1724050761249482e+01, 0.1394683696862690e+01, 0.1363282300837870e+01, 0.1235317529564993e+01, 0.1007821728138393e+01, 0.9728972767837659e+00, 0.8110838192609224e+00, 0.6190683239156912e+00, 0.3314348466978195e+00, 0.2969816495631171e+00, 0.2506421226611442e+00, 0.1551624233480766e+00, 0.1141049603637759e+00, 0.6806098279643324e-01, 0.4065442795375918e-01, 0.1509783792061944e-04, 0.2745446733802996e-19, 0.2334033424405603e-14, 0.7232047343832039e-15, 0.1394683696862690e+01, 0.7652022750203868e-17, 0.1502464319787860e-09, 0.5990324067027169e-10, 0.1363282300837870e+01, 0.5511008105786353e-19, 0.1215529748555577e-07, 0.3576186028975079e-08, 0.1235317529564993e+01, 0.6271335389136957e-17, 0.2589527811059762e-05, 0.3563684464760320e-05, 0.1007821728138393e+01, 0.6928731028484552e-15, 0.2286607739649456e-04, 0.2916328836696281e-04, 0.9728972767837659e+00, 0.4013724285764508e-25, 0.2143649169120057e+02, 0.2143649169120056e+02, 0.1080000000000000e+03, 0.3174603174603174e+01, 0.5555555555555555e+01, 0.5006788488237952e-04, 0.6190683239156912e+00, 0.1272779738919789e-24, 0.5392273695714327e-05, 0.6586751690782050e-05, 0.3314348466978195e+00, 0.9952016744778614e-24, 0.4588114650322476e-05, 0.4104030919118472e-05, 0.2969816495631171e+00, 0.1254277227268884e-20, 0.1579210272170037e-05, 0.2523508055026877e-05, 0.2506421226611442e+00, 0.1514835081255253e-22, 0.9117975197333799e-05, 0.1415311657319248e-04, 0.1551624233480766e+00, 0.3272195034856757e-26, 0.3128414626489953e-04, 0.4770966856082524e-04, 0.1141049603637759e+00, 0.7575261888623807e-20, 0.6277758538102519e-05, 0.4644422903178713e-05, 0.6806098279643324e-01, 0.3715389965619895e-21, 0.5956965112658162e-05, 0.4447744978930882e-05, 0.4065442795375918e-01, 0.1972152263052530e-30, 0.2965586553650948e-04, 0.1900611263569203e-04, 0.1509783792061944e-04, 0.1009741958682895e-26, 0.1608958133772104e-02, 0.9583670521235791e-03}, + info: 0, + }, + } { + z := make([]float64, len(test.z)) + copy(z, test.z) + + info := impl.Dlasq2(test.n, z) + if !floats.EqualApprox(test.zOut, z, dTol) { + diff := make([]float64, len(z)) + floats.SubTo(diff, z, test.zOut) + for i := range diff { + diff[i] = math.Abs(diff[i]) + } + t.Errorf("Case %v, Z Mismatch", c) + } + if test.info != info { + t.Errorf("Info mismatch. Want %v, got %v", test.info, info) + } + } + + rnd := rand.New(rand.NewSource(1)) + // Perform a bunch of random tests to check for access out of bounds or + // infinite loops. + // TODO(btracey): Implement direct tests. + // bi := blas64.Implementation() + for _, n := range []int{5, 8, 20, 25} { + for k := 0; k < 10; k++ { + z := make([]float64, 4*n) + for i := range z { + z[i] = rnd.Float64() + } + zCopy := make([]float64, len(z)) + copy(zCopy, z) + + // Compute the eigenvalues + impl.Dlasq2(n, z) + + // Below is the code to test the eigenvalues. Eventually implement + // real tests. + // The code below is missing the transformation from L and U into + // the symmetric tridiagonal matrix. + // See discussion http://icl.cs.utk.edu/lapack-forum/viewtopic.php?f=5&t=4839 + // for format. + + /* + ldl := n + ldu := n + u := make([]float64, n*n) + for i := 0; i < n; i++ { + u[i*ldu+i] = zCopy[2*i] + if i != n-1 { + u[i*ldu+i+1] = 1 + } + } + l := make([]float64, n*n) + for i := 0; i < n; i++ { + l[i*ldl+i] = 1 + if i != n-1 { + l[(i+1)*ldl+i] = zCopy[2*i+1] + } + } + + ldTriDi := n + triDi := make([]float64, n*n) + bi.Dgemm(blas.NoTrans, blas.NoTrans, n, n, n, 1, l, ldl, u, ldu, 0, triDi, ldTriDi) + + tridi2 := make([]float64, n*n) + bi.Dgemm(blas.Trans, blas.NoTrans, n, n, n, 1, triDi, n, triDi, n, 0, tridi2, n) + + // Eigenvalues have the property that det(A - lambda I ) = 0 + triDiCopy := make([]float64, len(triDi)) + copy(triDiCopy, triDi) + for _, lambda := range z[1:n] { + copy(triDi, triDiCopy) + for i := 0; i < n; i++ { + triDi[i*n+i] -= lambda + } + + // Compute LU + //ipiv := make([]int, n) + //impl.Dgetrf(n, n, triDi, n, ipiv) + ok := impl.Dpotrf(blas.Upper, n, triDi, n) + fmt.Println(ok) + + var det float64 + for i := 0; i < n; i++ { + det += math.Log(math.Abs(triDi[i*n+i])) + } + fmt.Println("det = ", math.Exp(det)) + } + */ + } + } +} diff --git a/vendor/gonum.org/v1/gonum/lapack/testlapack/dlasq3.go b/vendor/gonum.org/v1/gonum/lapack/testlapack/dlasq3.go new file mode 100644 index 0000000..99a9253 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/lapack/testlapack/dlasq3.go @@ -0,0 +1,2703 @@ +// Copyright ©2015 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package testlapack + +import ( + "testing" + + "gonum.org/v1/gonum/floats" +) + +type Dlasq3er interface { + Dlasq3(i0, n0 int, z []float64, pp int, dmin, sigma, desig, qmax float64, nFail, iter, nDiv int, ttype int, dmin1, dmin2, dn, dn1, dn2, g, tau float64) ( + i0Out, n0Out, ppOut int, dminOut, sigmaOut, desigOut, qmaxOut float64, nFailOut, iterOut, nDivOut, ttypeOut int, dmin1Out, dmin2Out, dnOut, dn1Out, dn2Out, gOut, tauOut float64) +} + +type dlasq3teststruct struct { + z []float64 + i0, n0, pp int + dmin, desig, sigma, qmax float64 + nFail, iter, nDiv, ttype int + dmin1, dmin2, dn, dn1, dn2, g, tau float64 + + zOut []float64 + i0Out, n0Out, ppOut int + dminOut, desigOut, sigmaOut, qmaxOut float64 + nFailOut, iterOut, nDivOut, ttypeOut int + dmin1Out, dmin2Out, dnOut, dn1Out, dn2Out, gOut, tauOut float64 +} + +func Dlasq3Test(t *testing.T, impl Dlasq3er) { + dTol := 1e-14 + // Tests computed from calling the netlib Dlasq + for _, test := range []dlasq3teststruct{ + { + i0: 1, + n0: 21, + z: []float64{0.1914365246180821e+01, 0.1564384297703890e+01, 0.2493389162143899e+00, 0.3499809484769305e+00, 0.1315996513131545e+01, 0.1363862112490627e+01, 0.9898466611970759e-01, 0.2014733168553078e+00, 0.6023973979587287e+00, 0.6465544792741794e+00, 0.2210033410638781e-02, 0.5482758480425683e-01, 0.9861857233678967e-01, 0.2428190810745492e-01, 0.4756321484454819e+00, 0.7654669763997353e-01, 0.2588748143677115e+00, 0.6127784069508770e+00, 0.1078611376690004e+00, 0.1217285558623164e+00, 0.6442896492255246e+00, 0.2293835804898155e+00, 0.6203230486639705e+00, 0.5227672064047094e+00, 0.3695660678607585e+00, 0.7645233184745865e+00, 0.5378838054252265e+00, 0.2253657980501426e+00, 0.3562533181264623e+00, 0.8820486722335483e+00, 0.2222132496436145e-01, 0.1208845131814035e-01, 0.1275094303021685e+01, 0.6548746852163357e+00, 0.1647324354821218e+00, 0.6424409427697111e+00, 0.1007530576543866e+01, 0.3269551736546701e+00, 0.3453881601783118e+00, 0.8453078383713172e+00, 0.2679391719153404e+00, 0.4116714838778281e+00, 0.7328677736683723e+00, 0.2016558482158241e+00, 0.8360828138307410e+00, 0.9737579452195326e+00, 0.4813660709592822e+00, 0.5951926422795808e+00, 0.6495370513676459e+00, 0.6761876248148171e+00, 0.2325475880222648e+00, 0.4547154975121112e+00, 0.1993624802893807e+00, 0.3321819367342255e+00, 0.3782318916911257e+00, 0.9972813157741996e-01, 0.9830449403503746e+00, 0.7561080996844842e+00, 0.4429733864040367e+00, 0.6051687323570161e+00, 0.1173279550602403e+01, 0.7195724480316686e+00, 0.5035524069144587e+00, 0.8966804889747714e+00, 0.3058980395058521e+00, 0.6588832353928662e+00, 0.3014634433415453e+00, 0.1505672110274446e+00, 0.1289422237567578e+01, 0.6124645310993601e+00, 0.7583364305799440e+00, 0.9784211498097629e+00, 0.4977814779461571e+00, 0.9993813577491869e+00, 0.2841468847862598e+00, 0.2567365507769143e+00, 0.9257539794205765e+00, 0.5509268385614666e+00, 0.5231355605450990e-04, 0.6589740256453697e+00, 0.2117869221381033e-04, 0.7349224826832024e-04, 0.0000000000000000e+00, 0.0000000000000000e+00}, + pp: 0, + dmin: -0.0000000000000000, + desig: 0.0000000000000000, + qmax: 2.1637041623952107, + ttype: 0, + dmin1: 0.0000000000000000, + dmin2: 0.0000000000000000, + dn: 0.0000000000000000, + dn1: 0.0000000000000000, + dn2: 0.0000000000000000, + g: 0.0000000000000000, + tau: 0.0000000000000000, + nFail: 0, + iter: 2, + sigma: 0.0000000000000000, + nDiv: 40, + zOut: []float64{0.1914365246180821e+01, 0.2163704162395211e+01, 0.2493389162143899e+00, 0.1516515751224039e+00, 0.1315996513131545e+01, 0.1263329604128848e+01, 0.9898466611970759e-01, 0.4719916727467415e-01, 0.6023973979587287e+00, 0.5574082640946934e+00, 0.2210033410638781e-02, 0.3910066531356214e-03, 0.9861857233678967e-01, 0.5738597141291359e+00, 0.4756321484454819e+00, 0.2145632131068746e+00, 0.2588748143677115e+00, 0.1521727389298373e+00, 0.1078611376690004e+00, 0.4566771620366771e+00, 0.6442896492255246e+00, 0.8079355358528180e+00, 0.6203230486639705e+00, 0.2837483186776231e+00, 0.3695660678607585e+00, 0.6237015546083620e+00, 0.5378838054252265e+00, 0.3072349091217998e+00, 0.3562533181264623e+00, 0.7123973396902394e-01, 0.2222132496436145e-01, 0.3977314805803597e+00, 0.1275094303021685e+01, 0.1042095257923447e+01, 0.1647324354821218e+00, 0.1592685164190333e+00, 0.1007530576543866e+01, 0.1193650220303144e+01, 0.3453881601783118e+00, 0.7752942700755104e-01, 0.2679391719153404e+00, 0.9232775185761617e+00, 0.7328677736683723e+00, 0.6636554427529671e+00, 0.8360828138307410e+00, 0.6537934420370561e+00, 0.4813660709592822e+00, 0.4782322339990674e+00, 0.6495370513676459e+00, 0.4038524053908432e+00, 0.2325475880222648e+00, 0.1147975431483785e+00, 0.1993624802893807e+00, 0.4627968288321279e+00, 0.3782318916911257e+00, 0.8034172324482011e+00, 0.9830449403503746e+00, 0.6226010943062101e+00, 0.4429733864040367e+00, 0.8347746582554776e+00, 0.1173279550602403e+01, 0.8420572992613844e+00, 0.5035524069144587e+00, 0.1829278057427913e+00, 0.3058980395058521e+00, 0.4244336771046062e+00, 0.3014634433415453e+00, 0.9158407747236312e+00, 0.1289422237567578e+01, 0.1131917893423890e+01, 0.7583364305799440e+00, 0.3334922359541972e+00, 0.4977814779461571e+00, 0.4484361267782198e+00, 0.2841468847862598e+00, 0.5865943745895725e+00, 0.9257539794205765e+00, 0.3392119183870583e+00, 0.5231355605450990e-04, 0.3266196269153995e-08, 0.2117869221381033e-04, 0.2117542601754118e-04, 0.0000000000000000e+00, 0.3910066531356214e-03}, + i0Out: 1, + n0Out: 21, + ppOut: 0, + dminOut: 2.1175426017541180e-005, + desigOut: 0.0000000000000000, + sigmaOut: 0.0000000000000000, + qmaxOut: 2.1637041623952107, + nFailOut: 0, + iterOut: 3, + nDivOut: 62, + ttypeOut: -1, + dmin1Out: 4.4311601260836921e-002, + dmin2Out: 4.4311601260836921e-002, + dnOut: 2.1175426017541180e-005, + dn1Out: 0.33915960483100382, + dn2Out: 0.16428924199195991, + gOut: 0.0000000000000000, + tauOut: 0.0000000000000000, + }, + { + i0: 1, + n0: 21, + z: []float64{0.1914365246180821e+01, 0.2163704162395211e+01, 0.2493389162143899e+00, 0.1516515751224039e+00, 0.1315996513131545e+01, 0.1263329604128848e+01, 0.9898466611970759e-01, 0.4719916727467415e-01, 0.6023973979587287e+00, 0.5574082640946934e+00, 0.2210033410638781e-02, 0.3910066531356214e-03, 0.9861857233678967e-01, 0.5738597141291359e+00, 0.4756321484454819e+00, 0.2145632131068746e+00, 0.2588748143677115e+00, 0.1521727389298373e+00, 0.1078611376690004e+00, 0.4566771620366771e+00, 0.6442896492255246e+00, 0.8079355358528180e+00, 0.6203230486639705e+00, 0.2837483186776231e+00, 0.3695660678607585e+00, 0.6237015546083620e+00, 0.5378838054252265e+00, 0.3072349091217998e+00, 0.3562533181264623e+00, 0.7123973396902394e-01, 0.2222132496436145e-01, 0.3977314805803597e+00, 0.1275094303021685e+01, 0.1042095257923447e+01, 0.1647324354821218e+00, 0.1592685164190333e+00, 0.1007530576543866e+01, 0.1193650220303144e+01, 0.3453881601783118e+00, 0.7752942700755104e-01, 0.2679391719153404e+00, 0.9232775185761617e+00, 0.7328677736683723e+00, 0.6636554427529671e+00, 0.8360828138307410e+00, 0.6537934420370561e+00, 0.4813660709592822e+00, 0.4782322339990674e+00, 0.6495370513676459e+00, 0.4038524053908432e+00, 0.2325475880222648e+00, 0.1147975431483785e+00, 0.1993624802893807e+00, 0.4627968288321279e+00, 0.3782318916911257e+00, 0.8034172324482011e+00, 0.9830449403503746e+00, 0.6226010943062101e+00, 0.4429733864040367e+00, 0.8347746582554776e+00, 0.1173279550602403e+01, 0.8420572992613844e+00, 0.5035524069144587e+00, 0.1829278057427913e+00, 0.3058980395058521e+00, 0.4244336771046062e+00, 0.3014634433415453e+00, 0.9158407747236312e+00, 0.1289422237567578e+01, 0.1131917893423890e+01, 0.7583364305799440e+00, 0.3334922359541972e+00, 0.4977814779461571e+00, 0.4484361267782198e+00, 0.2841468847862598e+00, 0.5865943745895725e+00, 0.9257539794205765e+00, 0.3392119183870583e+00, 0.5231355605450990e-04, 0.3266196269153995e-08, 0.2117869221381033e-04, 0.2117542601754118e-04, 0.0000000000000000e+00, 0.3910066531356214e-03}, + pp: 1, + dmin: 2.1175426017541180e-005, + desig: 0.0000000000000000, + qmax: 2.1637041623952107, + ttype: -1, + dmin1: 4.4311601260836921e-002, + dmin2: 4.4311601260836921e-002, + dn: 2.1175426017541180e-005, + dn1: 0.33915960483100382, + dn2: 0.16428924199195991, + g: 0.0000000000000000, + tau: 0.0000000000000000, + nFail: 0, + iter: 3, + sigma: 0.0000000000000000, + nDiv: 62, + zOut: []float64{0.2315355737517615e+01, 0.2163704162395211e+01, 0.8274578340618610e-01, 0.1516515751224039e+00, 0.1227782987997336e+01, 0.1263329604128848e+01, 0.2142822156235013e-01, 0.4719916727467415e-01, 0.5363710491854788e+00, 0.5574082640946934e+00, 0.4183353417969536e-03, 0.3910066531356214e-03, 0.7880045918942136e+00, 0.5738597141291359e+00, 0.4143462125464707e-01, 0.2145632131068746e+00, 0.5674152797118673e+00, 0.1521727389298373e+00, 0.6502569120260687e+00, 0.4566771620366771e+00, 0.4414269425043723e+00, 0.8079355358528180e+00, 0.4009140594652070e+00, 0.2837483186776231e+00, 0.5300224042649548e+00, 0.6237015546083620e+00, 0.4129510944388858e-01, 0.3072349091217998e+00, 0.4276761051054951e+00, 0.7123973396902394e-01, 0.9691308092544145e+00, 0.3977314805803597e+00, 0.2322329650880660e+00, 0.1042095257923447e+01, 0.8186215063776209e+00, 0.1592685164190333e+00, 0.4525581409330741e+00, 0.1193650220303144e+01, 0.1581701233715052e+00, 0.7752942700755104e-01, 0.1428762837957623e+01, 0.9232775185761617e+00, 0.3036848136842134e+00, 0.6636554427529671e+00, 0.8283408623519102e+00, 0.6537934420370561e+00, 0.2331591338951825e+00, 0.4782322339990674e+00, 0.2854908146440392e+00, 0.4038524053908432e+00, 0.1860933389154074e+00, 0.1147975431483785e+00, 0.1080120722364922e+01, 0.4627968288321279e+00, 0.4631042046962229e+00, 0.8034172324482011e+00, 0.9942715478654648e+00, 0.6226010943062101e+00, 0.7069779837626068e+00, 0.8347746582554776e+00, 0.3180071212415688e+00, 0.8420572992613844e+00, 0.2441477440283845e+00, 0.1829278057427913e+00, 0.1096126707799853e+01, 0.4244336771046062e+00, 0.9457451890006905e+00, 0.9158407747236312e+00, 0.5196649403773971e+00, 0.1131917893423890e+01, 0.2877815203259632e+00, 0.3334922359541972e+00, 0.7472489810418290e+00, 0.4484361267782198e+00, 0.2662831374385604e+00, 0.5865943745895725e+00, 0.7292878421469419e-01, 0.3392119183870583e+00, 0.9483648767903632e-12, 0.3266196269153995e-08, 0.2117542506917630e-04, 0.2117542601754118e-04, 0.4183353417969536e-03, 0.3910066531356214e-03}, + i0Out: 1, + n0Out: 21, + ppOut: 1, + dminOut: 2.1175425069176302e-005, + desigOut: 0.0000000000000000, + sigmaOut: 0.0000000000000000, + qmaxOut: 2.1637041623952107, + nFailOut: 0, + iterOut: 4, + nDivOut: 84, + ttypeOut: -4, + dmin1Out: 2.9944624525135358e-002, + dmin2Out: 2.9944624525135358e-002, + dnOut: 2.1175425069176302e-005, + dn1Out: 7.2928780948497918e-002, + dn2Out: 0.16065460645225654, + gOut: 0.0000000000000000, + tauOut: 0.0000000000000000, + }, + { + i0: 1, + n0: 21, + z: []float64{0.2315355737517615e+01, 0.2163704162395211e+01, 0.8274578340618610e-01, 0.1516515751224039e+00, 0.1227782987997336e+01, 0.1263329604128848e+01, 0.2142822156235013e-01, 0.4719916727467415e-01, 0.5363710491854788e+00, 0.5574082640946934e+00, 0.4183353417969536e-03, 0.3910066531356214e-03, 0.7880045918942136e+00, 0.5738597141291359e+00, 0.4143462125464707e-01, 0.2145632131068746e+00, 0.5674152797118673e+00, 0.1521727389298373e+00, 0.6502569120260687e+00, 0.4566771620366771e+00, 0.4414269425043723e+00, 0.8079355358528180e+00, 0.4009140594652070e+00, 0.2837483186776231e+00, 0.5300224042649548e+00, 0.6237015546083620e+00, 0.4129510944388858e-01, 0.3072349091217998e+00, 0.4276761051054951e+00, 0.7123973396902394e-01, 0.9691308092544145e+00, 0.3977314805803597e+00, 0.2322329650880660e+00, 0.1042095257923447e+01, 0.8186215063776209e+00, 0.1592685164190333e+00, 0.4525581409330741e+00, 0.1193650220303144e+01, 0.1581701233715052e+00, 0.7752942700755104e-01, 0.1428762837957623e+01, 0.9232775185761617e+00, 0.3036848136842134e+00, 0.6636554427529671e+00, 0.8283408623519102e+00, 0.6537934420370561e+00, 0.2331591338951825e+00, 0.4782322339990674e+00, 0.2854908146440392e+00, 0.4038524053908432e+00, 0.1860933389154074e+00, 0.1147975431483785e+00, 0.1080120722364922e+01, 0.4627968288321279e+00, 0.4631042046962229e+00, 0.8034172324482011e+00, 0.9942715478654648e+00, 0.6226010943062101e+00, 0.7069779837626068e+00, 0.8347746582554776e+00, 0.3180071212415688e+00, 0.8420572992613844e+00, 0.2441477440283845e+00, 0.1829278057427913e+00, 0.1096126707799853e+01, 0.4244336771046062e+00, 0.9457451890006905e+00, 0.9158407747236312e+00, 0.5196649403773971e+00, 0.1131917893423890e+01, 0.2877815203259632e+00, 0.3334922359541972e+00, 0.7472489810418290e+00, 0.4484361267782198e+00, 0.2662831374385604e+00, 0.5865943745895725e+00, 0.7292878421469419e-01, 0.3392119183870583e+00, 0.9483648767903632e-12, 0.3266196269153995e-08, 0.2117542506917630e-04, 0.2117542601754118e-04, 0.4183353417969536e-03, 0.3910066531356214e-03}, + pp: 0, + dmin: 2.1175425069176302e-005, + desig: 0.0000000000000000, + qmax: 2.1637041623952107, + ttype: -4, + dmin1: 2.9944624525135358e-002, + dmin2: 2.9944624525135358e-002, + dn: 2.1175425069176302e-005, + dn1: 7.2928780948497918e-002, + dn2: 0.16065460645225654, + g: 0.0000000000000000, + tau: 0.0000000000000000, + nFail: 0, + iter: 4, + sigma: 0.0000000000000000, + nDiv: 84, + zOut: []float64{0.2315355737517615e+01, 0.2398080345610006e+01, 0.8274578340618610e-01, 0.4236466279397526e-01, 0.1227782987997336e+01, 0.1206825371451915e+01, 0.2142822156235013e-01, 0.9523728911788614e-02, 0.5363710491854788e+00, 0.5272444803016919e+00, 0.4183353417969536e-03, 0.6252320936560726e-03, 0.7880045918942136e+00, 0.8287928057414093e+00, 0.4143462125464707e-01, 0.2836732781232222e-01, 0.5674152797118673e+00, 0.1189283688611819e+01, 0.6502569120260687e+00, 0.2413561400585997e+00, 0.4414269425043723e+00, 0.6009636865971842e+00, 0.4009140594652070e+00, 0.3535878097802652e+00, 0.5300224042649548e+00, 0.2177085286147829e+00, 0.4129510944388858e-01, 0.8112190955144877e-01, 0.4276761051054951e+00, 0.1315663829494665e+01, 0.9691308092544145e+00, 0.1710650671895379e+00, 0.2322329650880660e+00, 0.8797682289623537e+00, 0.8186215063776209e+00, 0.4211038940233675e+00, 0.4525581409330741e+00, 0.1896031949674164e+00, 0.1581701233715052e+00, 0.1191897606932286e+01, 0.1428762837957623e+01, 0.5405288693957555e+00, 0.3036848136842134e+00, 0.4653859482687157e+00, 0.8283408623519102e+00, 0.5960928726645816e+00, 0.2331591338951825e+00, 0.1116684901463164e+00, 0.2854908146440392e+00, 0.3598944880993349e+00, 0.1860933389154074e+00, 0.5585061130503639e+00, 0.1080120722364922e+01, 0.9846976386969850e+00, 0.4631042046962229e+00, 0.4676068229793028e+00, 0.9942715478654648e+00, 0.1233621533334973e+01, 0.7069779837626068e+00, 0.1822471700779458e+00, 0.3180071212415688e+00, 0.3798865198782122e+00, 0.2441477440283845e+00, 0.7044652781161848e+00, 0.1096126707799853e+01, 0.1337385443370563e+01, 0.9457451890006905e+00, 0.3674861422265960e+00, 0.5196649403773971e+00, 0.4399391431629689e+00, 0.2877815203259632e+00, 0.4888049885267526e+00, 0.7472489810418290e+00, 0.5247059546398414e+00, 0.2662831374385604e+00, 0.3701064434002514e-01, 0.7292878421469419e-01, 0.3589696456182207e-01, 0.9483648767903632e-12, 0.5594353069081231e-15, 0.2117542506917630e-04, 0.1112732565966979e-09, 0.4183353417969536e-03, 0.6252320936560726e-03}, + i0Out: 1, + n0Out: 21, + ppOut: 0, + dminOut: 1.1127325659669794e-010, + desigOut: 0.0000000000000000, + sigmaOut: 2.1175313795360271e-005, + qmaxOut: 2.1637041623952107, + nFailOut: 0, + iterOut: 5, + nDivOut: 106, + ttypeOut: -4, + dmin1Out: 3.1433071595911154e-002, + dmin2Out: 3.1433071595911154e-002, + dnOut: 1.1127325659669794e-010, + dn1Out: 3.5896964560873705e-002, + dn2Out: 0.25842281720128102, + gOut: 0.0000000000000000, + tauOut: 2.1175313795360271e-005, + }, + { + i0: 1, + n0: 21, + z: []float64{0.2315355737517615e+01, 0.2398080345610006e+01, 0.8274578340618610e-01, 0.4236466279397526e-01, 0.1227782987997336e+01, 0.1206825371451915e+01, 0.2142822156235013e-01, 0.9523728911788614e-02, 0.5363710491854788e+00, 0.5272444803016919e+00, 0.4183353417969536e-03, 0.6252320936560726e-03, 0.7880045918942136e+00, 0.8287928057414093e+00, 0.4143462125464707e-01, 0.2836732781232222e-01, 0.5674152797118673e+00, 0.1189283688611819e+01, 0.6502569120260687e+00, 0.2413561400585997e+00, 0.4414269425043723e+00, 0.6009636865971842e+00, 0.4009140594652070e+00, 0.3535878097802652e+00, 0.5300224042649548e+00, 0.2177085286147829e+00, 0.4129510944388858e-01, 0.8112190955144877e-01, 0.4276761051054951e+00, 0.1315663829494665e+01, 0.9691308092544145e+00, 0.1710650671895379e+00, 0.2322329650880660e+00, 0.8797682289623537e+00, 0.8186215063776209e+00, 0.4211038940233675e+00, 0.4525581409330741e+00, 0.1896031949674164e+00, 0.1581701233715052e+00, 0.1191897606932286e+01, 0.1428762837957623e+01, 0.5405288693957555e+00, 0.3036848136842134e+00, 0.4653859482687157e+00, 0.8283408623519102e+00, 0.5960928726645816e+00, 0.2331591338951825e+00, 0.1116684901463164e+00, 0.2854908146440392e+00, 0.3598944880993349e+00, 0.1860933389154074e+00, 0.5585061130503639e+00, 0.1080120722364922e+01, 0.9846976386969850e+00, 0.4631042046962229e+00, 0.4676068229793028e+00, 0.9942715478654648e+00, 0.1233621533334973e+01, 0.7069779837626068e+00, 0.1822471700779458e+00, 0.3180071212415688e+00, 0.3798865198782122e+00, 0.2441477440283845e+00, 0.7044652781161848e+00, 0.1096126707799853e+01, 0.1337385443370563e+01, 0.9457451890006905e+00, 0.3674861422265960e+00, 0.5196649403773971e+00, 0.4399391431629689e+00, 0.2877815203259632e+00, 0.4888049885267526e+00, 0.7472489810418290e+00, 0.5247059546398414e+00, 0.2662831374385604e+00, 0.3701064434002514e-01, 0.7292878421469419e-01, 0.3589696456182207e-01, 0.9483648767903632e-12, 0.5594353069081231e-15, 0.2117542506917630e-04, 0.1112732565966979e-09, 0.4183353417969536e-03, 0.6252320936560726e-03}, + pp: 1, + dmin: 1.1127325659669794e-010, + desig: 0.0000000000000000, + qmax: 2.1637041623952107, + ttype: -4, + dmin1: 3.1433071595911154e-002, + dmin2: 3.1433071595911154e-002, + dn: 1.1127325659669794e-010, + dn1: 3.5896964560873705e-002, + dn2: 0.25842281720128102, + g: 0.0000000000000000, + tau: 2.1175313795360271e-005, + nFail: 0, + iter: 5, + sigma: 2.1175313795360271e-005, + nDiv: 106, + zOut: []float64{0.2440445008292708e+01, 0.2398080345610006e+01, 0.2094976520226600e-01, 0.4236466279397526e-01, 0.1195399335050165e+01, 0.1206825371451915e+01, 0.4200549016048655e-02, 0.9523728911788614e-02, 0.5236691632680260e+00, 0.5272444803016919e+00, 0.9895328911616120e-03, 0.6252320936560726e-03, 0.8561706005512968e+00, 0.8287928057414093e+00, 0.3940429656773515e-01, 0.2836732781232222e-01, 0.1391235531991410e+01, 0.1189283688611819e+01, 0.1042571673718422e+00, 0.2413561400585997e+00, 0.8502943288943339e+00, 0.6009636865971842e+00, 0.9053227710395735e-01, 0.3535878097802652e+00, 0.2082981609510011e+00, 0.2177085286147829e+00, 0.5123864833424303e+00, 0.8112190955144877e-01, 0.9743424132304999e+00, 0.1315663829494665e+01, 0.1544607000116935e+00, 0.1710650671895379e+00, 0.1146411422862754e+01, 0.8797682289623537e+00, 0.6964571542795012e-01, 0.4211038940233675e+00, 0.1311855086360479e+01, 0.1896031949674164e+00, 0.4911023119923957e+00, 0.1191897606932286e+01, 0.5148125055608023e+00, 0.5405288693957555e+00, 0.5388626806938843e+00, 0.4653859482687157e+00, 0.1688986820057405e+00, 0.5960928726645816e+00, 0.2379466412690434e+00, 0.1116684901463164e+00, 0.6804539597693821e+00, 0.3598944880993349e+00, 0.8082246312519304e+00, 0.5585061130503639e+00, 0.6440798303130841e+00, 0.9846976386969850e+00, 0.8956185534970393e+00, 0.4676068229793028e+00, 0.5202501498046066e+00, 0.1233621533334973e+01, 0.1330768347199243e+00, 0.1822471700779458e+00, 0.9512749631631994e+00, 0.3798865198782122e+00, 0.9903988276741268e+00, 0.7044652781161848e+00, 0.7144727578117591e+00, 0.1337385443370563e+01, 0.2262808998212762e+00, 0.3674861422265960e+00, 0.7024632317571722e+00, 0.4399391431629689e+00, 0.3651136124179467e+00, 0.4888049885267526e+00, 0.1966029864506465e+00, 0.5247059546398414e+00, 0.6757627705811050e-02, 0.3701064434002514e-01, 0.2913933674473832e-01, 0.3589696456182207e-01, 0.2136293938333395e-23, 0.5594353069081231e-15, 0.0000000000000000e+00, 0.1112732565966979e-09, 0.9895328911616120e-03, 0.6252320936560726e-03}, + i0Out: 1, + n0Out: 21, + ppOut: 1, + dminOut: 0.0000000000000000, + desigOut: -5.1698788284564230e-026, + sigmaOut: 2.1175425068616867e-005, + qmaxOut: 2.1637041623952107, + nFailOut: 1, + iterOut: 7, + nDivOut: 150, + ttypeOut: -15, + dmin1Out: 2.9139336744737766e-002, + dmin2Out: 4.9426557292086552e-002, + dnOut: -2.0808762284537102e-024, + dn1Out: 2.9139336744737766e-002, + dn2Out: 0.15959234211062134, + gOut: 0.0000000000000000, + tauOut: 1.1127325659669789e-010, + }, + { + i0: 1, + n0: 21, + z: []float64{0.2440445008292708e+01, 0.2398080345610006e+01, 0.2094976520226600e-01, 0.4236466279397526e-01, 0.1195399335050165e+01, 0.1206825371451915e+01, 0.4200549016048655e-02, 0.9523728911788614e-02, 0.5236691632680260e+00, 0.5272444803016919e+00, 0.9895328911616120e-03, 0.6252320936560726e-03, 0.8561706005512968e+00, 0.8287928057414093e+00, 0.3940429656773515e-01, 0.2836732781232222e-01, 0.1391235531991410e+01, 0.1189283688611819e+01, 0.1042571673718422e+00, 0.2413561400585997e+00, 0.8502943288943339e+00, 0.6009636865971842e+00, 0.9053227710395735e-01, 0.3535878097802652e+00, 0.2082981609510011e+00, 0.2177085286147829e+00, 0.5123864833424303e+00, 0.8112190955144877e-01, 0.9743424132304999e+00, 0.1315663829494665e+01, 0.1544607000116935e+00, 0.1710650671895379e+00, 0.1146411422862754e+01, 0.8797682289623537e+00, 0.6964571542795012e-01, 0.4211038940233675e+00, 0.1311855086360479e+01, 0.1896031949674164e+00, 0.4911023119923957e+00, 0.1191897606932286e+01, 0.5148125055608023e+00, 0.5405288693957555e+00, 0.5388626806938843e+00, 0.4653859482687157e+00, 0.1688986820057405e+00, 0.5960928726645816e+00, 0.2379466412690434e+00, 0.1116684901463164e+00, 0.6804539597693821e+00, 0.3598944880993349e+00, 0.8082246312519304e+00, 0.5585061130503639e+00, 0.6440798303130841e+00, 0.9846976386969850e+00, 0.8956185534970393e+00, 0.4676068229793028e+00, 0.5202501498046066e+00, 0.1233621533334973e+01, 0.1330768347199243e+00, 0.1822471700779458e+00, 0.9512749631631994e+00, 0.3798865198782122e+00, 0.9903988276741268e+00, 0.7044652781161848e+00, 0.7144727578117591e+00, 0.1337385443370563e+01, 0.2262808998212762e+00, 0.3674861422265960e+00, 0.7024632317571722e+00, 0.4399391431629689e+00, 0.3651136124179467e+00, 0.4888049885267526e+00, 0.1966029864506465e+00, 0.5247059546398414e+00, 0.6757627705811050e-02, 0.3701064434002514e-01, 0.2913933674473832e-01, 0.3589696456182207e-01, 0.2136293938333395e-23, 0.5594353069081231e-15, 0.0000000000000000e+00, 0.1112732565966979e-09, 0.9895328911616120e-03, 0.6252320936560726e-03}, + pp: 0, + dmin: 0.0000000000000000, + desig: -5.1698788284564230e-026, + qmax: 2.1637041623952107, + ttype: -15, + dmin1: 2.9139336744737766e-002, + dmin2: 4.9426557292086552e-002, + dn: -2.0808762284537102e-024, + dn1: 2.9139336744737766e-002, + dn2: 0.15959234211062134, + g: 0.0000000000000000, + tau: 1.1127325659669789e-010, + nFail: 1, + iter: 7, + sigma: 2.1175425068616867e-005, + nDiv: 150, + zOut: []float64{0.2440445008292708e+01, 0.2461394773494974e+01, 0.2094976520226600e-01, 0.1017444891892999e-01, 0.1195399335050165e+01, 0.1189425435147283e+01, 0.4200549016048655e-02, 0.1849378635683999e-02, 0.5236691632680260e+00, 0.5228093175235037e+00, 0.9895328911616120e-03, 0.1620493249248586e-02, 0.8561706005512968e+00, 0.8939544038697832e+00, 0.3940429656773515e-01, 0.6132377362967349e-01, 0.1391235531991410e+01, 0.1434168925733579e+01, 0.1042571673718422e+00, 0.6181229879703373e-01, 0.8502943288943339e+00, 0.8790143072012576e+00, 0.9053227710395735e-01, 0.2145324219750511e-01, 0.2082981609510011e+00, 0.6992314020959263e+00, 0.5123864833424303e+00, 0.7139837844669097e+00, 0.9743424132304999e+00, 0.4148193287752837e+00, 0.1544607000116935e+00, 0.4268738185358478e+00, 0.1146411422862754e+01, 0.7891833197548568e+00, 0.6964571542795012e-01, 0.1157716892137957e+00, 0.1311855086360479e+01, 0.1687185709139079e+01, 0.4911023119923957e+00, 0.1498504938454686e+00, 0.5148125055608023e+00, 0.9038246924092180e+00, 0.5388626806938843e+00, 0.1006978425303630e+00, 0.1688986820057405e+00, 0.3061474807444209e+00, 0.2379466412690434e+00, 0.5288684194677825e+00, 0.6804539597693821e+00, 0.9598101715535300e+00, 0.8082246312519304e+00, 0.5423584775195998e+00, 0.6440798303130841e+00, 0.9973399062905237e+00, 0.8956185534970393e+00, 0.4671884516860899e+00, 0.5202501498046066e+00, 0.1861385328384410e+00, 0.1330768347199243e+00, 0.6800991665489665e+00, 0.9512749631631994e+00, 0.1261574624288360e+01, 0.9903988276741268e+00, 0.5608966509936130e+00, 0.7144727578117591e+00, 0.3798570066394223e+00, 0.2262808998212762e+00, 0.4184574968871406e+00, 0.7024632317571722e+00, 0.6491193472879784e+00, 0.3651136124179467e+00, 0.1105843276664904e+00, 0.1966029864506465e+00, 0.9277628648996712e-01, 0.6757627705811050e-02, 0.2122447413720272e-02, 0.2913933674473832e-01, 0.2701688933101806e-01, 0.2136293938333395e-23, 0.0000000000000000e+00, 0.0000000000000000e+00, 0.0000000000000000e+00, 0.9895328911616120e-03, 0.1620493249248586e-02}, + i0Out: 1, + n0Out: 21, + ppOut: 0, + dminOut: 0.0000000000000000, + desigOut: -5.1698788284564230e-026, + sigmaOut: 2.1175425068616867e-005, + qmaxOut: 2.1637041623952107, + nFailOut: 1, + iterOut: 8, + nDivOut: 172, + ttypeOut: -1, + dmin1Out: 2.7016889331018056e-002, + dmin2Out: 5.3061698118516694e-002, + dnOut: 0.0000000000000000, + dn1Out: 2.7016889331018056e-002, + dn2Out: 8.6018658784156071e-002, + gOut: 0.0000000000000000, + tauOut: 0.0000000000000000, + }, + { + i0: 1, + n0: 21, + z: []float64{0.2440445008292708e+01, 0.2461394773494974e+01, 0.2094976520226600e-01, 0.1017444891892999e-01, 0.1195399335050165e+01, 0.1189425435147283e+01, 0.4200549016048655e-02, 0.1849378635683999e-02, 0.5236691632680260e+00, 0.5228093175235037e+00, 0.9895328911616120e-03, 0.1620493249248586e-02, 0.8561706005512968e+00, 0.8939544038697832e+00, 0.3940429656773515e-01, 0.6132377362967349e-01, 0.1391235531991410e+01, 0.1434168925733579e+01, 0.1042571673718422e+00, 0.6181229879703373e-01, 0.8502943288943339e+00, 0.8790143072012576e+00, 0.9053227710395735e-01, 0.2145324219750511e-01, 0.2082981609510011e+00, 0.6992314020959263e+00, 0.5123864833424303e+00, 0.7139837844669097e+00, 0.9743424132304999e+00, 0.4148193287752837e+00, 0.1544607000116935e+00, 0.4268738185358478e+00, 0.1146411422862754e+01, 0.7891833197548568e+00, 0.6964571542795012e-01, 0.1157716892137957e+00, 0.1311855086360479e+01, 0.1687185709139079e+01, 0.4911023119923957e+00, 0.1498504938454686e+00, 0.5148125055608023e+00, 0.9038246924092180e+00, 0.5388626806938843e+00, 0.1006978425303630e+00, 0.1688986820057405e+00, 0.3061474807444209e+00, 0.2379466412690434e+00, 0.5288684194677825e+00, 0.6804539597693821e+00, 0.9598101715535300e+00, 0.8082246312519304e+00, 0.5423584775195998e+00, 0.6440798303130841e+00, 0.9973399062905237e+00, 0.8956185534970393e+00, 0.4671884516860899e+00, 0.5202501498046066e+00, 0.1861385328384410e+00, 0.1330768347199243e+00, 0.6800991665489665e+00, 0.9512749631631994e+00, 0.1261574624288360e+01, 0.9903988276741268e+00, 0.5608966509936130e+00, 0.7144727578117591e+00, 0.3798570066394223e+00, 0.2262808998212762e+00, 0.4184574968871406e+00, 0.7024632317571722e+00, 0.6491193472879784e+00, 0.3651136124179467e+00, 0.1105843276664904e+00, 0.1966029864506465e+00, 0.9277628648996712e-01, 0.6757627705811050e-02, 0.2122447413720272e-02, 0.2913933674473832e-01, 0.2701688933101806e-01, 0.2136293938333395e-23, 0.0000000000000000e+00, 0.0000000000000000e+00, 0.0000000000000000e+00, 0.9895328911616120e-03, 0.1620493249248586e-02}, + pp: 1, + dmin: 0.0000000000000000, + desig: -5.1698788284564230e-026, + qmax: 2.1637041623952107, + ttype: -1, + dmin1: 2.7016889331018056e-002, + dmin2: 5.3061698118516694e-002, + dn: 0.0000000000000000, + dn1: 2.7016889331018056e-002, + dn2: 8.6018658784156071e-002, + g: 0.0000000000000000, + tau: 0.0000000000000000, + nFail: 1, + iter: 8, + sigma: 2.1175425068616867e-005, + nDiv: 172, + zOut: []float64{0.2471569222413904e+01, 0.2461394773494974e+01, 0.4896382518051712e-02, 0.1017444891892999e-01, 0.1186378431264915e+01, 0.1189425435147283e+01, 0.8149780515932184e-03, 0.1849378635683999e-02, 0.5236148327211592e+00, 0.5228093175235037e+00, 0.2766627272719901e-02, 0.1620493249248586e-02, 0.9525115502267366e+00, 0.8939544038697832e+00, 0.9233342160256496e-01, 0.6132377362967349e-01, 0.1403647802928048e+01, 0.1434168925733579e+01, 0.3870906568602875e-01, 0.6181229879703373e-01, 0.8617584837127339e+00, 0.8790143072012576e+00, 0.1740717486950262e-01, 0.2145324219750511e-01, 0.1395808011693333e+01, 0.6992314020959263e+00, 0.2121884039551361e+00, 0.7139837844669097e+00, 0.6295047433559955e+00, 0.4148193287752837e+00, 0.5351535485381410e+00, 0.4268738185358478e+00, 0.3698014604305115e+00, 0.7891833197548568e+00, 0.5281978587564573e+00, 0.1157716892137957e+00, 0.1308838344228090e+01, 0.1687185709139079e+01, 0.1034799882693896e+00, 0.1498504938454686e+00, 0.9010425466701916e+00, 0.9038246924092180e+00, 0.3421413441684364e-01, 0.1006978425303630e+00, 0.8008017657953598e+00, 0.3061474807444209e+00, 0.6338813300623194e+00, 0.5288684194677825e+00, 0.8682873190108105e+00, 0.9598101715535300e+00, 0.6229686202966810e+00, 0.5423584775195998e+00, 0.8415597376799326e+00, 0.9973399062905237e+00, 0.1033340463692495e+00, 0.4671884516860899e+00, 0.7629036530181579e+00, 0.1861385328384410e+00, 0.1124645093942705e+01, 0.6800991665489665e+00, 0.6978261813392677e+00, 0.1261574624288360e+01, 0.3053203341720497e+00, 0.5608966509936130e+00, 0.4929941693545132e+00, 0.3798570066394223e+00, 0.5509778292160957e+00, 0.4184574968871406e+00, 0.2087258457383731e+00, 0.6491193472879784e+00, 0.4915348757406203e-01, 0.1105843276664904e+00, 0.4574524632962537e-01, 0.9277628648996712e-01, 0.1253505697055357e-02, 0.2122447413720272e-02, 0.2576338363396270e-01, 0.2701688933101806e-01, 0.8149780515932184e-03, 0.0000000000000000e+00, 0.2117542506861687e-04, 0.0000000000000000e+00, 0.9895328911616120e-03, 0.1620493249248586e-02}, + i0Out: 1, + n0Out: 20, + ppOut: 1, + dminOut: 2.5763383633962696e-002, + desigOut: -5.1698788284564230e-026, + sigmaOut: 2.1175425068616867e-005, + qmaxOut: 2.1637041623952107, + nFailOut: 1, + iterOut: 9, + nDivOut: 193, + ttypeOut: -1, + dmin1Out: 4.3622798915905092e-002, + dmin2Out: 7.4536672467372611e-002, + dnOut: 2.5763383633962696e-002, + dn1Out: 4.3622798915905092e-002, + dn2Out: 9.8141518071882677e-002, + gOut: 0.0000000000000000, + tauOut: 0.0000000000000000, + }, + { + i0: 1, + n0: 20, + z: []float64{0.2471569222413904e+01, 0.2461394773494974e+01, 0.4896382518051712e-02, 0.1017444891892999e-01, 0.1186378431264915e+01, 0.1189425435147283e+01, 0.8149780515932184e-03, 0.1849378635683999e-02, 0.5236148327211592e+00, 0.5228093175235037e+00, 0.2766627272719901e-02, 0.1620493249248586e-02, 0.9525115502267366e+00, 0.8939544038697832e+00, 0.9233342160256496e-01, 0.6132377362967349e-01, 0.1403647802928048e+01, 0.1434168925733579e+01, 0.3870906568602875e-01, 0.6181229879703373e-01, 0.8617584837127339e+00, 0.8790143072012576e+00, 0.1740717486950262e-01, 0.2145324219750511e-01, 0.1395808011693333e+01, 0.6992314020959263e+00, 0.2121884039551361e+00, 0.7139837844669097e+00, 0.6295047433559955e+00, 0.4148193287752837e+00, 0.5351535485381410e+00, 0.4268738185358478e+00, 0.3698014604305115e+00, 0.7891833197548568e+00, 0.5281978587564573e+00, 0.1157716892137957e+00, 0.1308838344228090e+01, 0.1687185709139079e+01, 0.1034799882693896e+00, 0.1498504938454686e+00, 0.9010425466701916e+00, 0.9038246924092180e+00, 0.3421413441684364e-01, 0.1006978425303630e+00, 0.8008017657953598e+00, 0.3061474807444209e+00, 0.6338813300623194e+00, 0.5288684194677825e+00, 0.8682873190108105e+00, 0.9598101715535300e+00, 0.6229686202966810e+00, 0.5423584775195998e+00, 0.8415597376799326e+00, 0.9973399062905237e+00, 0.1033340463692495e+00, 0.4671884516860899e+00, 0.7629036530181579e+00, 0.1861385328384410e+00, 0.1124645093942705e+01, 0.6800991665489665e+00, 0.6978261813392677e+00, 0.1261574624288360e+01, 0.3053203341720497e+00, 0.5608966509936130e+00, 0.4929941693545132e+00, 0.3798570066394223e+00, 0.5509778292160957e+00, 0.4184574968871406e+00, 0.2087258457383731e+00, 0.6491193472879784e+00, 0.4915348757406203e-01, 0.1105843276664904e+00, 0.4574524632962537e-01, 0.9277628648996712e-01, 0.1253505697055357e-02, 0.2122447413720272e-02, 0.2576338363396270e-01, 0.2701688933101806e-01, 0.8149780515932184e-03, 0.1620493249248586e-02, 0.2117542506861687e-04, 0.0000000000000000e+00, 0.9895328911616120e-03, 0.1620493249248586e-02}, + pp: 0, + dmin: 2.5763383633962696e-002, + desig: -5.1698788284564230e-026, + qmax: 2.4715692224139039, + ttype: -1, + dmin1: 4.3622798915905092e-002, + dmin2: 7.4536672467372611e-002, + dn: 2.5763383633962696e-002, + dn1: 4.3622798915905092e-002, + dn2: 9.8141518071882677e-002, + g: 0.0000000000000000, + tau: 0.0000000000000000, + nFail: 1, + iter: 9, + sigma: 2.1175425068616867e-005, + nDiv: 193, + zOut: []float64{0.2471569222413904e+01, 0.2471445466333236e+01, 0.4896382518051712e-02, 0.2350431231346416e-02, 0.1186378431264915e+01, 0.1179822839486443e+01, 0.8149780515932184e-03, 0.3616937915375072e-03, 0.5236148327211592e+00, 0.5209996276036221e+00, 0.2766627272719901e-02, 0.5058054349403302e-02, 0.9525115502267366e+00, 0.1034766778881179e+01, 0.9233342160256496e-01, 0.1252490967185870e+00, 0.1403647802928048e+01, 0.1312087633296770e+01, 0.3870906568602875e-01, 0.2542350442532051e-01, 0.8617584837127339e+00, 0.8487220155581966e+00, 0.1740717486950262e-01, 0.2862783537884150e-01, 0.1395808011693333e+01, 0.1574348441670908e+01, 0.2121884039551361e+00, 0.8484373804386666e-01, 0.6295047433559955e+00, 0.1074794415251550e+01, 0.5351535485381410e+00, 0.1841287608083240e+00, 0.3698014604305115e+00, 0.7088504197799252e+00, 0.5281978587564573e+00, 0.9752771411128711e+00, 0.1308838344228090e+01, 0.4320210527858890e+00, 0.1034799882693896e+00, 0.2158225196628609e+00, 0.9010425466701916e+00, 0.7144140228254550e+00, 0.3421413441684364e-01, 0.3835134583138245e-01, 0.8008017657953598e+00, 0.1391311611427577e+01, 0.6338813300623194e+00, 0.3955915526975877e+00, 0.8682873190108105e+00, 0.1090644248011184e+01, 0.6229686202966810e+00, 0.4806932321292802e+00, 0.8415597376799326e+00, 0.4591804133211825e+00, 0.1033340463692495e+00, 0.1716839812178710e+00, 0.7629036530181579e+00, 0.1710844627144272e+01, 0.1124645093942705e+01, 0.4587247601659613e+00, 0.6978261813392677e+00, 0.5394016167466366e+00, 0.3053203341720497e+00, 0.2790520826393697e+00, 0.4929941693545132e+00, 0.7598997773325197e+00, 0.5509778292160957e+00, 0.1513401067044909e+00, 0.2087258457383731e+00, 0.1015190880092246e+00, 0.4915348757406203e-01, 0.2214892234681356e-01, 0.4574524632962537e-01, 0.1982969108114764e-01, 0.1253505697055357e-02, 0.1628595626045726e-02, 0.2576338363396270e-01, 0.1911464940919745e-01, 0.8149780515932184e-03, 0.3616937915375072e-03, 0.2117542506861687e-04, 0.0000000000000000e+00, 0.9895328911616120e-03, 0.1620493249248586e-02}, + i0Out: 1, + n0Out: 20, + ppOut: 0, + dminOut: 1.8576185384092288e-002, + desigOut: 2.6427422784455342e-019, + sigmaOut: 5.0413140237881371e-003, + qmaxOut: 2.4715692224139039, + nFailOut: 2, + iterOut: 11, + nDivOut: 235, + ttypeOut: -15, + dmin1Out: 1.8576185384092288e-002, + dmin2Out: 5.2365600435162571e-002, + dnOut: 1.9114649409197451e-002, + dn1Out: 1.8576185384092288e-002, + dn2Out: 5.2365600435162571e-002, + gOut: 0.0000000000000000, + tauOut: 5.0201385987195205e-003, + }, + { + i0: 1, + n0: 20, + z: []float64{0.2471569222413904e+01, 0.2471445466333236e+01, 0.4896382518051712e-02, 0.2350431231346416e-02, 0.1186378431264915e+01, 0.1179822839486443e+01, 0.8149780515932184e-03, 0.3616937915375072e-03, 0.5236148327211592e+00, 0.5209996276036221e+00, 0.2766627272719901e-02, 0.5058054349403302e-02, 0.9525115502267366e+00, 0.1034766778881179e+01, 0.9233342160256496e-01, 0.1252490967185870e+00, 0.1403647802928048e+01, 0.1312087633296770e+01, 0.3870906568602875e-01, 0.2542350442532051e-01, 0.8617584837127339e+00, 0.8487220155581966e+00, 0.1740717486950262e-01, 0.2862783537884150e-01, 0.1395808011693333e+01, 0.1574348441670908e+01, 0.2121884039551361e+00, 0.8484373804386666e-01, 0.6295047433559955e+00, 0.1074794415251550e+01, 0.5351535485381410e+00, 0.1841287608083240e+00, 0.3698014604305115e+00, 0.7088504197799252e+00, 0.5281978587564573e+00, 0.9752771411128711e+00, 0.1308838344228090e+01, 0.4320210527858890e+00, 0.1034799882693896e+00, 0.2158225196628609e+00, 0.9010425466701916e+00, 0.7144140228254550e+00, 0.3421413441684364e-01, 0.3835134583138245e-01, 0.8008017657953598e+00, 0.1391311611427577e+01, 0.6338813300623194e+00, 0.3955915526975877e+00, 0.8682873190108105e+00, 0.1090644248011184e+01, 0.6229686202966810e+00, 0.4806932321292802e+00, 0.8415597376799326e+00, 0.4591804133211825e+00, 0.1033340463692495e+00, 0.1716839812178710e+00, 0.7629036530181579e+00, 0.1710844627144272e+01, 0.1124645093942705e+01, 0.4587247601659613e+00, 0.6978261813392677e+00, 0.5394016167466366e+00, 0.3053203341720497e+00, 0.2790520826393697e+00, 0.4929941693545132e+00, 0.7598997773325197e+00, 0.5509778292160957e+00, 0.1513401067044909e+00, 0.2087258457383731e+00, 0.1015190880092246e+00, 0.4915348757406203e-01, 0.2214892234681356e-01, 0.4574524632962537e-01, 0.1982969108114764e-01, 0.1253505697055357e-02, 0.1628595626045726e-02, 0.2576338363396270e-01, 0.1911464940919745e-01, 0.8149780515932184e-03, 0.3616937915375072e-03, 0.2117542506861687e-04, 0.0000000000000000e+00, 0.9895328911616120e-03, 0.1620493249248586e-02}, + pp: 1, + dmin: 1.8576185384092288e-002, + desig: 2.6427422784455342e-019, + qmax: 2.4715692224139039, + ttype: -15, + dmin1: 1.8576185384092288e-002, + dmin2: 5.2365600435162571e-002, + dn: 1.9114649409197451e-002, + dn1: 1.8576185384092288e-002, + dn2: 5.2365600435162571e-002, + g: 0.0000000000000000, + tau: 5.0201385987195205e-003, + nFail: 2, + iter: 11, + sigma: 5.0413140237881371e-003, + nDiv: 235, + zOut: []float64{0.2468318984233055e+01, 0.2471445466333236e+01, 0.1123474100024551e-02, 0.2350431231346416e-02, 0.1173584145846428e+01, 0.1179822839486443e+01, 0.1605699355811189e-03, 0.3616937915375072e-03, 0.5204201986859162e+00, 0.5209996276036221e+00, 0.1005707814522541e-01, 0.5058054349403302e-02, 0.1144481884123012e+01, 0.1034766778881179e+01, 0.1435914304680996e+00, 0.1252490967185870e+00, 0.1188442793922463e+01, 0.1312087633296770e+01, 0.1815610143690141e-01, 0.2542350442532051e-01, 0.8537168361686087e+00, 0.8487220155581966e+00, 0.5279290053521807e-01, 0.2862783537884150e-01, 0.1600922365848029e+01, 0.1574348441670908e+01, 0.5696064828871891e-01, 0.8484373804386666e-01, 0.1196485614439627e+01, 0.1074794415251550e+01, 0.1090859328498209e+00, 0.1841287608083240e+00, 0.1569564714711448e+01, 0.7088504197799252e+00, 0.2684440171930437e+00, 0.9752771411128711e+00, 0.3739226419241781e+00, 0.4320210527858890e+00, 0.4123490187575627e+00, 0.2158225196628609e+00, 0.3349394365677468e+00, 0.7144140228254550e+00, 0.1593084209965356e+00, 0.3835134583138245e-01, 0.1622117829797102e+01, 0.1391311611427577e+01, 0.2659792301064862e+00, 0.3955915526975877e+00, 0.1299881336702450e+01, 0.1090644248011184e+01, 0.1698038973078534e+00, 0.4806932321292802e+00, 0.4555835838996722e+00, 0.4591804133211825e+00, 0.6447216871142054e+00, 0.1716839812178710e+00, 0.1519370786864500e+01, 0.1710844627144272e+01, 0.1628548339973444e+00, 0.4587247601659613e+00, 0.6501219520571339e+00, 0.5394016167466366e+00, 0.3261720586281595e+00, 0.2790520826393697e+00, 0.5795909120773233e+00, 0.7598997773325197e+00, 0.2650819619788820e-01, 0.1513401067044909e+00, 0.9168290082662192e-01, 0.1015190880092246e+00, 0.4790492926791300e-02, 0.2214892234681356e-01, 0.1119088044887405e-01, 0.1982969108114764e-01, 0.2781732372482683e-02, 0.1628595626045726e-02, 0.1085600370518675e-01, 0.1911464940919745e-01, 0.1605699355811189e-03, 0.3616937915375072e-03, 0.2117542506861687e-04, 0.0000000000000000e+00, 0.9895328911616120e-03, 0.1620493249248586e-02}, + i0Out: 1, + n0Out: 20, + ppOut: 1, + dminOut: 9.5622848228283271e-003, + desigOut: -6.0308751014385013e-019, + sigmaOut: 1.0518227355316156e-002, + qmaxOut: 2.4715692224139039, + nFailOut: 2, + iterOut: 12, + nDivOut: 256, + ttypeOut: -4, + dmin1Out: 9.5622848228283271e-003, + dmin2Out: 6.9533978479808370e-002, + dnOut: 1.0856003705186750e-002, + dn1Out: 9.5622848228283271e-003, + dn2Out: 6.9533978479808370e-002, + gOut: 0.0000000000000000, + tauOut: 5.4769133315280185e-003, + }, + { + i0: 1, + n0: 20, + z: []float64{0.2468318984233055e+01, 0.2471445466333236e+01, 0.1123474100024551e-02, 0.2350431231346416e-02, 0.1173584145846428e+01, 0.1179822839486443e+01, 0.1605699355811189e-03, 0.3616937915375072e-03, 0.5204201986859162e+00, 0.5209996276036221e+00, 0.1005707814522541e-01, 0.5058054349403302e-02, 0.1144481884123012e+01, 0.1034766778881179e+01, 0.1435914304680996e+00, 0.1252490967185870e+00, 0.1188442793922463e+01, 0.1312087633296770e+01, 0.1815610143690141e-01, 0.2542350442532051e-01, 0.8537168361686087e+00, 0.8487220155581966e+00, 0.5279290053521807e-01, 0.2862783537884150e-01, 0.1600922365848029e+01, 0.1574348441670908e+01, 0.5696064828871891e-01, 0.8484373804386666e-01, 0.1196485614439627e+01, 0.1074794415251550e+01, 0.1090859328498209e+00, 0.1841287608083240e+00, 0.1569564714711448e+01, 0.7088504197799252e+00, 0.2684440171930437e+00, 0.9752771411128711e+00, 0.3739226419241781e+00, 0.4320210527858890e+00, 0.4123490187575627e+00, 0.2158225196628609e+00, 0.3349394365677468e+00, 0.7144140228254550e+00, 0.1593084209965356e+00, 0.3835134583138245e-01, 0.1622117829797102e+01, 0.1391311611427577e+01, 0.2659792301064862e+00, 0.3955915526975877e+00, 0.1299881336702450e+01, 0.1090644248011184e+01, 0.1698038973078534e+00, 0.4806932321292802e+00, 0.4555835838996722e+00, 0.4591804133211825e+00, 0.6447216871142054e+00, 0.1716839812178710e+00, 0.1519370786864500e+01, 0.1710844627144272e+01, 0.1628548339973444e+00, 0.4587247601659613e+00, 0.6501219520571339e+00, 0.5394016167466366e+00, 0.3261720586281595e+00, 0.2790520826393697e+00, 0.5795909120773233e+00, 0.7598997773325197e+00, 0.2650819619788820e-01, 0.1513401067044909e+00, 0.9168290082662192e-01, 0.1015190880092246e+00, 0.4790492926791300e-02, 0.2214892234681356e-01, 0.1119088044887405e-01, 0.1982969108114764e-01, 0.2781732372482683e-02, 0.1628595626045726e-02, 0.1085600370518675e-01, 0.1911464940919745e-01, 0.1605699355811189e-03, 0.3616937915375072e-03, 0.2117542506861687e-04, 0.0000000000000000e+00, 0.9895328911616120e-03, 0.1620493249248586e-02}, + pp: 0, + dmin: 9.5622848228283271e-003, + desig: -6.0308751014385013e-019, + qmax: 2.4715692224139039, + ttype: -4, + dmin1: 9.5622848228283271e-003, + dmin2: 6.9533978479808370e-002, + dn: 1.0856003705186750e-002, + dn1: 9.5622848228283271e-003, + dn2: 6.9533978479808370e-002, + g: 0.0000000000000000, + tau: 5.4769133315280185e-003, + nFail: 2, + iter: 12, + sigma: 1.0518227355316156e-002, + nDiv: 256, + zOut: []float64{0.2468318984233055e+01, 0.2464320851971913e+01, 0.1123474100024551e-02, 0.5350323562789559e-03, 0.1173584145846428e+01, 0.1168088077064565e+01, 0.1605699355811189e-03, 0.7153898701552432e-04, 0.5204201986859162e+00, 0.5252841314829605e+00, 0.1005707814522541e-01, 0.2191222436498315e-01, 0.1144481884123012e+01, 0.1261039483864963e+01, 0.1435914304680996e+00, 0.1353250258951489e+00, 0.1188442793922463e+01, 0.1066152263103050e+01, 0.1815610143690141e-01, 0.1453842008528346e-01, 0.8537168361686087e+00, 0.8868497102573779e+00, 0.5279290053521807e-01, 0.9530062900995111e-01, 0.1600922365848029e+01, 0.1557460778765631e+01, 0.5696064828871891e-01, 0.4375878814786067e-01, 0.1196485614439627e+01, 0.1256691152780422e+01, 0.1090859328498209e+00, 0.1362446379077657e+00, 0.1569564714711448e+01, 0.1696642487635560e+01, 0.2684440171930437e+00, 0.5916231430550117e-01, 0.3739226419241781e+00, 0.7219877400150740e+00, 0.4123490187575627e+00, 0.1912940350054112e+00, 0.3349394365677468e+00, 0.2978322161977056e+00, 0.1593084209965356e+00, 0.8676597630518320e+00, 0.1622117829797102e+01, 0.1015315690490590e+01, 0.2659792301064862e+00, 0.3405260456467969e+00, 0.1299881336702450e+01, 0.1124037582002341e+01, 0.1698038973078534e+00, 0.6882320425428856e-01, 0.4555835838996722e+00, 0.1026360460398424e+01, 0.6447216871142054e+00, 0.9544125430154021e+00, 0.1519370786864500e+01, 0.7226914714852769e+00, 0.1628548339973444e+00, 0.1465016632377001e+00, 0.6501219520571339e+00, 0.8246707410864278e+00, 0.3261720586281595e+00, 0.2292385937027206e+00, 0.5795909120773233e+00, 0.3717389082113253e+00, 0.2650819619788820e-01, 0.6537783023029759e-02, 0.9168290082662192e-01, 0.8481400436921797e-01, 0.4790492926791300e-02, 0.6320870478125323e-03, 0.1119088044887405e-01, 0.8218919412378699e-02, 0.2781732372482683e-02, 0.3674266095981827e-02, 0.1085600370518675e-01, 0.2060131248039419e-02, 0.1605699355811189e-03, 0.7153898701552432e-04, 0.2117542506861687e-04, 0.0000000000000000e+00, 0.9895328911616120e-03, 0.1620493249248586e-02}, + i0Out: 1, + n0Out: 20, + ppOut: 0, + dminOut: 2.0601312480394186e-003, + desigOut: 0.0000000000000000, + sigmaOut: 1.5639833716481661e-002, + qmaxOut: 2.4715692224139039, + nFailOut: 2, + iterOut: 13, + nDivOut: 277, + ttypeOut: -4, + dmin1Out: 5.4371870398960158e-003, + dmin2Out: 8.0023511442426670e-002, + dnOut: 2.0601312480394186e-003, + dn1Out: 5.4371870398960158e-003, + dn2Out: 8.0023511442426670e-002, + gOut: 0.0000000000000000, + tauOut: 5.1216063611655054e-003, + }, + { + i0: 1, + n0: 20, + z: []float64{0.2468318984233055e+01, 0.2464320851971913e+01, 0.1123474100024551e-02, 0.5350323562789559e-03, 0.1173584145846428e+01, 0.1168088077064565e+01, 0.1605699355811189e-03, 0.7153898701552432e-04, 0.5204201986859162e+00, 0.5252841314829605e+00, 0.1005707814522541e-01, 0.2191222436498315e-01, 0.1144481884123012e+01, 0.1261039483864963e+01, 0.1435914304680996e+00, 0.1353250258951489e+00, 0.1188442793922463e+01, 0.1066152263103050e+01, 0.1815610143690141e-01, 0.1453842008528346e-01, 0.8537168361686087e+00, 0.8868497102573779e+00, 0.5279290053521807e-01, 0.9530062900995111e-01, 0.1600922365848029e+01, 0.1557460778765631e+01, 0.5696064828871891e-01, 0.4375878814786067e-01, 0.1196485614439627e+01, 0.1256691152780422e+01, 0.1090859328498209e+00, 0.1362446379077657e+00, 0.1569564714711448e+01, 0.1696642487635560e+01, 0.2684440171930437e+00, 0.5916231430550117e-01, 0.3739226419241781e+00, 0.7219877400150740e+00, 0.4123490187575627e+00, 0.1912940350054112e+00, 0.3349394365677468e+00, 0.2978322161977056e+00, 0.1593084209965356e+00, 0.8676597630518320e+00, 0.1622117829797102e+01, 0.1015315690490590e+01, 0.2659792301064862e+00, 0.3405260456467969e+00, 0.1299881336702450e+01, 0.1124037582002341e+01, 0.1698038973078534e+00, 0.6882320425428856e-01, 0.4555835838996722e+00, 0.1026360460398424e+01, 0.6447216871142054e+00, 0.9544125430154021e+00, 0.1519370786864500e+01, 0.7226914714852769e+00, 0.1628548339973444e+00, 0.1465016632377001e+00, 0.6501219520571339e+00, 0.8246707410864278e+00, 0.3261720586281595e+00, 0.2292385937027206e+00, 0.5795909120773233e+00, 0.3717389082113253e+00, 0.2650819619788820e-01, 0.6537783023029759e-02, 0.9168290082662192e-01, 0.8481400436921797e-01, 0.4790492926791300e-02, 0.6320870478125323e-03, 0.1119088044887405e-01, 0.8218919412378699e-02, 0.2781732372482683e-02, 0.3674266095981827e-02, 0.1085600370518675e-01, 0.2060131248039419e-02, 0.1605699355811189e-03, 0.7153898701552432e-04, 0.2117542506861687e-04, 0.0000000000000000e+00, 0.9895328911616120e-03, 0.1620493249248586e-02}, + pp: 1, + dmin: 2.0601312480394186e-003, + desig: 0.0000000000000000, + qmax: 2.4715692224139039, + ttype: -4, + dmin1: 5.4371870398960158e-003, + dmin2: 8.0023511442426670e-002, + dn: 2.0601312480394186e-003, + dn1: 5.4371870398960158e-003, + dn2: 8.0023511442426670e-002, + g: 0.0000000000000000, + tau: 5.1216063611655054e-003, + nFail: 2, + iter: 13, + sigma: 1.5639833716481661e-002, + nDiv: 277, + zOut: []float64{0.2463574096511276e+01, 0.2464320851971913e+01, 0.2536822079344948e-03, 0.5350323562789559e-03, 0.1166624146026729e+01, 0.1168088077064565e+01, 0.3221114082852138e-04, 0.7153898701552432e-04, 0.5458823568901986e+00, 0.5252841314829605e+00, 0.5061929508212644e-01, 0.2191222436498315e-01, 0.1344463426861069e+01, 0.1261039483864963e+01, 0.1073120173669855e+00, 0.1353250258951489e+00, 0.9720968780044319e+00, 0.1066152263103050e+01, 0.1326348631702415e-01, 0.1453842008528346e-01, 0.9676050651333883e+00, 0.8868497102573779e+00, 0.1533962535161303e+00, 0.9530062900995111e-01, 0.1446541525580445e+01, 0.1557460778765631e+01, 0.3801569533217738e-01, 0.4375878814786067e-01, 0.1353638307539094e+01, 0.1256691152780422e+01, 0.1707682473962209e+00, 0.1362446379077657e+00, 0.1583754766727924e+01, 0.1696642487635560e+01, 0.2697037855661164e-01, 0.5916231430550117e-01, 0.8850296086469572e+00, 0.7219877400150740e+00, 0.6437471225190403e-01, 0.1912940350054112e+00, 0.1099835479180717e+01, 0.2978322161977056e+00, 0.8009821360646626e+00, 0.8676597630518320e+00, 0.5535778122558079e+00, 0.1015315690490590e+01, 0.6914368034330997e+00, 0.3405260456467969e+00, 0.5001421950066134e+00, 0.1124037582002341e+01, 0.1412346654806686e+00, 0.6882320425428856e-01, 0.1838256550116241e+01, 0.1026360460398424e+01, 0.3752173792456719e+00, 0.9544125430154021e+00, 0.4926939676603885e+00, 0.7226914714852769e+00, 0.2452143584512202e+00, 0.1465016632377001e+00, 0.8074131885210117e+00, 0.8246707410864278e+00, 0.1055431169003394e+00, 0.2292385937027206e+00, 0.2714517865170992e+00, 0.3717389082113253e+00, 0.2042703660177667e-02, 0.6537783023029759e-02, 0.8212159993993635e-01, 0.8481400436921797e-01, 0.6326073178529442e-04, 0.6320870478125323e-03, 0.1054813695965874e-01, 0.8218919412378699e-02, 0.7176120699696391e-03, 0.3674266095981827e-02, 0.6073136115328898e-04, 0.2060131248039419e-02, 0.3221114082852138e-04, 0.7153898701552432e-04, 0.2117542506861687e-04, 0.0000000000000000e+00, 0.9895328911616120e-03, 0.1620493249248586e-02}, + i0Out: 1, + n0Out: 20, + ppOut: 1, + dminOut: 6.0731361153288982e-005, + desigOut: 1.7347234759768071e-018, + sigmaOut: 1.6921621533398150e-002, + qmaxOut: 2.4715692224139039, + nFailOut: 2, + iterOut: 14, + nDivOut: 298, + ttypeOut: -2, + dmin1Out: 6.8738708636769136e-003, + dmin2Out: 8.1489512892123819e-002, + dnOut: 6.0731361153288982e-005, + dn1Out: 6.8738708636769136e-003, + dn2Out: 8.1489512892123819e-002, + gOut: 0.0000000000000000, + tauOut: 1.2817878169164906e-003, + }, + { + i0: 1, + n0: 20, + z: []float64{0.2463574096511276e+01, 0.2464320851971913e+01, 0.2536822079344948e-03, 0.5350323562789559e-03, 0.1166624146026729e+01, 0.1168088077064565e+01, 0.3221114082852138e-04, 0.7153898701552432e-04, 0.5458823568901986e+00, 0.5252841314829605e+00, 0.5061929508212644e-01, 0.2191222436498315e-01, 0.1344463426861069e+01, 0.1261039483864963e+01, 0.1073120173669855e+00, 0.1353250258951489e+00, 0.9720968780044319e+00, 0.1066152263103050e+01, 0.1326348631702415e-01, 0.1453842008528346e-01, 0.9676050651333883e+00, 0.8868497102573779e+00, 0.1533962535161303e+00, 0.9530062900995111e-01, 0.1446541525580445e+01, 0.1557460778765631e+01, 0.3801569533217738e-01, 0.4375878814786067e-01, 0.1353638307539094e+01, 0.1256691152780422e+01, 0.1707682473962209e+00, 0.1362446379077657e+00, 0.1583754766727924e+01, 0.1696642487635560e+01, 0.2697037855661164e-01, 0.5916231430550117e-01, 0.8850296086469572e+00, 0.7219877400150740e+00, 0.6437471225190403e-01, 0.1912940350054112e+00, 0.1099835479180717e+01, 0.2978322161977056e+00, 0.8009821360646626e+00, 0.8676597630518320e+00, 0.5535778122558079e+00, 0.1015315690490590e+01, 0.6914368034330997e+00, 0.3405260456467969e+00, 0.5001421950066134e+00, 0.1124037582002341e+01, 0.1412346654806686e+00, 0.6882320425428856e-01, 0.1838256550116241e+01, 0.1026360460398424e+01, 0.3752173792456719e+00, 0.9544125430154021e+00, 0.4926939676603885e+00, 0.7226914714852769e+00, 0.2452143584512202e+00, 0.1465016632377001e+00, 0.8074131885210117e+00, 0.8246707410864278e+00, 0.1055431169003394e+00, 0.2292385937027206e+00, 0.2714517865170992e+00, 0.3717389082113253e+00, 0.2042703660177667e-02, 0.6537783023029759e-02, 0.8212159993993635e-01, 0.8481400436921797e-01, 0.6326073178529442e-04, 0.6320870478125323e-03, 0.1054813695965874e-01, 0.8218919412378699e-02, 0.7176120699696391e-03, 0.3674266095981827e-02, 0.6073136115328898e-04, 0.2060131248039419e-02, 0.3221114082852138e-04, 0.7153898701552432e-04, 0.2117542506861687e-04, 0.0000000000000000e+00, 0.9895328911616120e-03, 0.1620493249248586e-02}, + pp: 0, + dmin: 6.0731361153288982e-005, + desig: 1.7347234759768071e-018, + qmax: 2.4715692224139039, + ttype: -2, + dmin1: 6.8738708636769136e-003, + dmin2: 8.1489512892123819e-002, + dn: 6.0731361153288982e-005, + dn1: 6.8738708636769136e-003, + dn2: 8.1489512892123819e-002, + g: 0.0000000000000000, + tau: 1.2817878169164906e-003, + nFail: 2, + iter: 14, + sigma: 1.6921621533398150e-002, + nDiv: 298, + zOut: []float64{0.2463574096511276e+01, 0.2463770941477959e+01, 0.2536822079344948e-03, 0.1201214707955848e-03, 0.1166624146026729e+01, 0.1166479398455512e+01, 0.3221114082852138e-04, 0.1507398544447245e-04, 0.5458823568901986e+00, 0.5964297407456295e+00, 0.5061929508212644e-01, 0.1141052940222717e+00, 0.1344463426861069e+01, 0.1337613312964532e+01, 0.1073120173669855e+00, 0.7798791776646297e-01, 0.9720968780044319e+00, 0.9073156093137420e+00, 0.1326348631702415e-01, 0.1414482062243694e-01, 0.9676050651333883e+00, 0.1106799660785830e+01, 0.1533962535161303e+00, 0.2004825791345134e+00, 0.1446541525580445e+01, 0.1284017804536858e+01, 0.3801569533217738e-01, 0.4007693764646178e-01, 0.1353638307539094e+01, 0.1484272780047602e+01, 0.1707682473962209e+00, 0.1822138285193538e+00, 0.1583754766727924e+01, 0.1428454479523931e+01, 0.2697037855661164e-01, 0.1671007646458111e-01, 0.8850296086469572e+00, 0.9326374071930291e+00, 0.6437471225190403e-01, 0.7591545433480534e-01, 0.1099835479180717e+01, 0.1824845323669324e+01, 0.8009821360646626e+00, 0.2429827519008994e+00, 0.5535778122558079e+00, 0.1001975026546757e+01, 0.6914368034330997e+00, 0.3451350696526060e+00, 0.5001421950066134e+00, 0.2961849535934249e+00, 0.1412346654806686e+00, 0.8765656248686587e+00, 0.1838256550116241e+01, 0.1336851467252003e+01, 0.3752173792456719e+00, 0.1382856239786244e+00, 0.4926939676603885e+00, 0.5995658648917332e+00, 0.2452143584512202e+00, 0.3302211126778973e+00, 0.8074131885210117e+00, 0.5826783555022028e+00, 0.1055431169003394e+00, 0.4916926700063749e-01, 0.2714517865170992e+00, 0.2242683859353883e+00, 0.2042703660177667e-02, 0.7479881396448043e-03, 0.8212159993993635e-01, 0.8138003529082581e-01, 0.6326073178529442e-04, 0.8199589256196194e-05, 0.1054813695965874e-01, 0.1120071219912114e-01, 0.7176120699696391e-03, 0.3890963093641941e-05, 0.6073136115328898e-04, 0.3156808608191942e-08, 0.3221114082852138e-04, 0.1507398544447245e-04, 0.2117542506861687e-04, 0.0000000000000000e+00, 0.9895328911616120e-03, 0.1620493249248586e-02}, + i0Out: 1, + n0Out: 20, + ppOut: 0, + dminOut: 3.1568086081919418e-009, + desigOut: 4.7433845046240819e-020, + sigmaOut: 1.6978458774649190e-002, + qmaxOut: 2.4715692224139039, + nFailOut: 2, + iterOut: 15, + nDivOut: 319, + ttypeOut: -2, + dmin1Out: 1.0483100129151506e-002, + dmin2Out: 8.1316774559040517e-002, + dnOut: 3.1568086081919418e-009, + dn1Out: 1.0483100129151506e-002, + dn2Out: 8.1316774559040517e-002, + gOut: 0.0000000000000000, + tauOut: 5.6837241251038845e-005, + }, + { + i0: 1, + n0: 20, + z: []float64{0.2463574096511276e+01, 0.2463770941477959e+01, 0.2536822079344948e-03, 0.1201214707955848e-03, 0.1166624146026729e+01, 0.1166479398455512e+01, 0.3221114082852138e-04, 0.1507398544447245e-04, 0.5458823568901986e+00, 0.5964297407456295e+00, 0.5061929508212644e-01, 0.1141052940222717e+00, 0.1344463426861069e+01, 0.1337613312964532e+01, 0.1073120173669855e+00, 0.7798791776646297e-01, 0.9720968780044319e+00, 0.9073156093137420e+00, 0.1326348631702415e-01, 0.1414482062243694e-01, 0.9676050651333883e+00, 0.1106799660785830e+01, 0.1533962535161303e+00, 0.2004825791345134e+00, 0.1446541525580445e+01, 0.1284017804536858e+01, 0.3801569533217738e-01, 0.4007693764646178e-01, 0.1353638307539094e+01, 0.1484272780047602e+01, 0.1707682473962209e+00, 0.1822138285193538e+00, 0.1583754766727924e+01, 0.1428454479523931e+01, 0.2697037855661164e-01, 0.1671007646458111e-01, 0.8850296086469572e+00, 0.9326374071930291e+00, 0.6437471225190403e-01, 0.7591545433480534e-01, 0.1099835479180717e+01, 0.1824845323669324e+01, 0.8009821360646626e+00, 0.2429827519008994e+00, 0.5535778122558079e+00, 0.1001975026546757e+01, 0.6914368034330997e+00, 0.3451350696526060e+00, 0.5001421950066134e+00, 0.2961849535934249e+00, 0.1412346654806686e+00, 0.8765656248686587e+00, 0.1838256550116241e+01, 0.1336851467252003e+01, 0.3752173792456719e+00, 0.1382856239786244e+00, 0.4926939676603885e+00, 0.5995658648917332e+00, 0.2452143584512202e+00, 0.3302211126778973e+00, 0.8074131885210117e+00, 0.5826783555022028e+00, 0.1055431169003394e+00, 0.4916926700063749e-01, 0.2714517865170992e+00, 0.2242683859353883e+00, 0.2042703660177667e-02, 0.7479881396448043e-03, 0.8212159993993635e-01, 0.8138003529082581e-01, 0.6326073178529442e-04, 0.8199589256196194e-05, 0.1054813695965874e-01, 0.1120071219912114e-01, 0.7176120699696391e-03, 0.3890963093641941e-05, 0.6073136115328898e-04, 0.3156808608191942e-08, 0.3221114082852138e-04, 0.1507398544447245e-04, 0.2117542506861687e-04, 0.0000000000000000e+00, 0.9895328911616120e-03, 0.1620493249248586e-02}, + pp: 1, + dmin: 3.1568086081919418e-009, + desig: 4.7433845046240819e-020, + qmax: 2.4715692224139039, + ttype: -2, + dmin1: 1.0483100129151506e-002, + dmin2: 8.1316774559040517e-002, + dn: 3.1568086081919418e-009, + dn1: 1.0483100129151506e-002, + dn2: 8.1316774559040517e-002, + g: 0.0000000000000000, + tau: 5.6837241251038845e-005, + nFail: 2, + iter: 15, + sigma: 1.6978458774649190e-002, + nDiv: 319, + zOut: []float64{0.2463891059793043e+01, 0.2463770941477959e+01, 0.5686908130061341e-04, 0.1201214707955848e-03, 0.1166437600203943e+01, 0.1166479398455512e+01, 0.7707718980490818e-05, 0.1507398544447245e-04, 0.7105273238932086e+00, 0.5964297407456295e+00, 0.2148105431436762e+00, 0.1141052940222717e+00, 0.1200790684431606e+01, 0.1337613312964532e+01, 0.5892755169139442e-01, 0.7798791776646297e-01, 0.8625328750890724e+00, 0.9073156093137420e+00, 0.1815059242254727e-01, 0.1414482062243694e-01, 0.1289131644342084e+01, 0.1106799660785830e+01, 0.1996872873596725e+00, 0.2004825791345134e+00, 0.1124407451667935e+01, 0.1284017804536858e+01, 0.5290351604133232e-01, 0.4007693764646178e-01, 0.1613583089369911e+01, 0.1484272780047602e+01, 0.1613081850537457e+00, 0.1822138285193538e+00, 0.1283856367779054e+01, 0.1428454479523931e+01, 0.1213877407087503e-01, 0.1671007646458111e-01, 0.9964140843012472e+00, 0.9326374071930291e+00, 0.1390325207358455e+00, 0.7591545433480534e-01, 0.1928795551678665e+01, 0.1824845323669324e+01, 0.1262252233392066e+00, 0.2429827519008994e+00, 0.1220884869704444e+01, 0.1001975026546757e+01, 0.8372928285471114e-01, 0.3451350696526060e+00, 0.1089021292451660e+01, 0.2961849535934249e+00, 0.1076046951396362e+01, 0.8765656248686587e+00, 0.3990901366785531e+00, 0.1336851467252003e+01, 0.2077509116934600e+00, 0.1382856239786244e+00, 0.7220360627204584e+00, 0.5995658648917332e+00, 0.2664862668525171e+00, 0.3302211126778973e+00, 0.3653613524946110e+00, 0.5826783555022028e+00, 0.3018138637972599e-01, 0.4916926700063749e-01, 0.1948349845395949e+00, 0.2242683859353883e+00, 0.3124249032854923e-03, 0.7479881396448043e-03, 0.8107580682108434e-01, 0.8138003529082581e-01, 0.1132782305976083e-05, 0.8199589256196194e-05, 0.1120346722419663e-01, 0.1120071219912114e-01, 0.1096359327194516e-11, 0.3890963093641941e-05, 0.6968497581336674e-16, 0.3156808608191942e-08, 0.7707718980490818e-05, 0.1507398544447245e-04, 0.2117542506861687e-04, 0.0000000000000000e+00, 0.9895328911616120e-03, 0.1620493249248586e-02}, + i0Out: 1, + n0Out: 20, + ppOut: 1, + dminOut: 6.9684975813366743e-017, + desigOut: 1.5445815365207740e-018, + sigmaOut: 1.6978461930361368e-002, + qmaxOut: 2.4715692224139039, + nFailOut: 2, + iterOut: 16, + nDivOut: 340, + ttypeOut: -2, + dmin1Out: 1.1199576261102989e-002, + dmin2Out: 8.1067607231828140e-002, + dnOut: 6.9684975813366743e-017, + dn1Out: 1.1199576261102989e-002, + dn2Out: 8.1067607231828140e-002, + gOut: 0.0000000000000000, + tauOut: 3.1557121791797713e-009, + }, + { + i0: 1, + n0: 20, + z: []float64{0.2463891059793043e+01, 0.2463770941477959e+01, 0.5686908130061341e-04, 0.1201214707955848e-03, 0.1166437600203943e+01, 0.1166479398455512e+01, 0.7707718980490818e-05, 0.1507398544447245e-04, 0.7105273238932086e+00, 0.5964297407456295e+00, 0.2148105431436762e+00, 0.1141052940222717e+00, 0.1200790684431606e+01, 0.1337613312964532e+01, 0.5892755169139442e-01, 0.7798791776646297e-01, 0.8625328750890724e+00, 0.9073156093137420e+00, 0.1815059242254727e-01, 0.1414482062243694e-01, 0.1289131644342084e+01, 0.1106799660785830e+01, 0.1996872873596725e+00, 0.2004825791345134e+00, 0.1124407451667935e+01, 0.1284017804536858e+01, 0.5290351604133232e-01, 0.4007693764646178e-01, 0.1613583089369911e+01, 0.1484272780047602e+01, 0.1613081850537457e+00, 0.1822138285193538e+00, 0.1283856367779054e+01, 0.1428454479523931e+01, 0.1213877407087503e-01, 0.1671007646458111e-01, 0.9964140843012472e+00, 0.9326374071930291e+00, 0.1390325207358455e+00, 0.7591545433480534e-01, 0.1928795551678665e+01, 0.1824845323669324e+01, 0.1262252233392066e+00, 0.2429827519008994e+00, 0.1220884869704444e+01, 0.1001975026546757e+01, 0.8372928285471114e-01, 0.3451350696526060e+00, 0.1089021292451660e+01, 0.2961849535934249e+00, 0.1076046951396362e+01, 0.8765656248686587e+00, 0.3990901366785531e+00, 0.1336851467252003e+01, 0.2077509116934600e+00, 0.1382856239786244e+00, 0.7220360627204584e+00, 0.5995658648917332e+00, 0.2664862668525171e+00, 0.3302211126778973e+00, 0.3653613524946110e+00, 0.5826783555022028e+00, 0.3018138637972599e-01, 0.4916926700063749e-01, 0.1948349845395949e+00, 0.2242683859353883e+00, 0.3124249032854923e-03, 0.7479881396448043e-03, 0.8107580682108434e-01, 0.8138003529082581e-01, 0.1132782305976083e-05, 0.8199589256196194e-05, 0.1120346722419663e-01, 0.1120071219912114e-01, 0.1096359327194516e-11, 0.3890963093641941e-05, 0.6968497581336674e-16, 0.3156808608191942e-08, 0.7707718980490818e-05, 0.1507398544447245e-04, 0.2117542506861687e-04, 0.0000000000000000e+00, 0.9895328911616120e-03, 0.1620493249248586e-02}, + pp: 0, + dmin: 6.9684975813366743e-017, + desig: 1.5445815365207740e-018, + qmax: 2.4715692224139039, + ttype: -2, + dmin1: 1.1199576261102989e-002, + dmin2: 8.1067607231828140e-002, + dn: 6.9684975813366743e-017, + dn1: 1.1199576261102989e-002, + dn2: 8.1067607231828140e-002, + g: 0.0000000000000000, + tau: 3.1557121791797713e-009, + nFail: 2, + iter: 16, + sigma: 1.6978461930361368e-002, + nDiv: 340, + zOut: []float64{0.2463891059793043e+01, 0.2463947928874343e+01, 0.5686908130061341e-04, 0.2692193042748079e-04, 0.1166437600203943e+01, 0.1166418385992496e+01, 0.7707718980490818e-05, 0.4695180568393632e-05, 0.7105273238932086e+00, 0.9253331718563164e+00, 0.2148105431436762e+00, 0.2787563517334627e+00, 0.1200790684431606e+01, 0.9809618843895378e+00, 0.5892755169139442e-01, 0.5181337969514327e-01, 0.8625328750890724e+00, 0.8288700878164763e+00, 0.1815059242254727e-01, 0.2822939734392020e-01, 0.1289131644342084e+01, 0.1460589534357837e+01, 0.1996872873596725e+00, 0.1537255119449346e+00, 0.1124407451667935e+01, 0.1023585455764333e+01, 0.5290351604133232e-01, 0.8339725654733963e-01, 0.1613583089369911e+01, 0.1691494017876317e+01, 0.1613081850537457e+00, 0.1224340957564512e+00, 0.1283856367779054e+01, 0.1173561046093478e+01, 0.1213877407087503e-01, 0.1030644761994533e-01, 0.9964140843012472e+00, 0.1125140157417147e+01, 0.1390325207358455e+00, 0.2383394688796517e+00, 0.1928795551678665e+01, 0.1816681306138221e+01, 0.1262252233392066e+00, 0.8482856339700598e-01, 0.1220884869704444e+01, 0.1219785589162149e+01, 0.8372928285471114e-01, 0.7475327847832687e-01, 0.1089021292451660e+01, 0.2090314965369696e+01, 0.1076046951396362e+01, 0.2054425921547012e+00, 0.3990901366785531e+00, 0.4013984562173118e+00, 0.2077509116934600e+00, 0.3737026089221466e+00, 0.7220360627204584e+00, 0.6148197206508288e+00, 0.2664862668525171e+00, 0.1583615157552351e+00, 0.3653613524946110e+00, 0.2371812231191019e+00, 0.3018138637972599e-01, 0.2479281399828426e-01, 0.1948349845395949e+00, 0.1703545954445960e+00, 0.3124249032854923e-03, 0.1486904479375115e-03, 0.8107580682108434e-01, 0.8092824915545274e-01, 0.1132782305976083e-05, 0.1568190288260776e-06, 0.1120346722419663e-01, 0.1120331040626409e-01, 0.1096359327194516e-11, 0.6819392699821255e-26, 0.6968497581336674e-16, 0.6162975822039155e-31, 0.7707718980490818e-05, 0.4695180568393632e-05, 0.2117542506861687e-04, 0.0000000000000000e+00, 0.9895328911616120e-03, 0.1620493249248586e-02}, + i0Out: 1, + n0Out: 20, + ppOut: 0, + dminOut: 6.1629758220391547e-032, + desigOut: -1.6288286479578371e-018, + sigmaOut: 1.6978461930361441e-002, + qmaxOut: 2.4715692224139039, + nFailOut: 2, + iterOut: 17, + nDivOut: 361, + ttypeOut: -2, + dmin1Out: 1.1203310405167735e-002, + dmin2Out: 8.0927116373146771e-002, + dnOut: 6.1629758220391547e-032, + dn1Out: 1.1203310405167735e-002, + dn2Out: 8.0927116373146771e-002, + gOut: 0.0000000000000000, + tauOut: 6.9684975806547287e-017, + }, + { + i0: 1, + n0: 20, + z: []float64{0.2463891059793043e+01, 0.2463947928874343e+01, 0.5686908130061341e-04, 0.2692193042748079e-04, 0.1166437600203943e+01, 0.1166418385992496e+01, 0.7707718980490818e-05, 0.4695180568393632e-05, 0.7105273238932086e+00, 0.9253331718563164e+00, 0.2148105431436762e+00, 0.2787563517334627e+00, 0.1200790684431606e+01, 0.9809618843895378e+00, 0.5892755169139442e-01, 0.5181337969514327e-01, 0.8625328750890724e+00, 0.8288700878164763e+00, 0.1815059242254727e-01, 0.2822939734392020e-01, 0.1289131644342084e+01, 0.1460589534357837e+01, 0.1996872873596725e+00, 0.1537255119449346e+00, 0.1124407451667935e+01, 0.1023585455764333e+01, 0.5290351604133232e-01, 0.8339725654733963e-01, 0.1613583089369911e+01, 0.1691494017876317e+01, 0.1613081850537457e+00, 0.1224340957564512e+00, 0.1283856367779054e+01, 0.1173561046093478e+01, 0.1213877407087503e-01, 0.1030644761994533e-01, 0.9964140843012472e+00, 0.1125140157417147e+01, 0.1390325207358455e+00, 0.2383394688796517e+00, 0.1928795551678665e+01, 0.1816681306138221e+01, 0.1262252233392066e+00, 0.8482856339700598e-01, 0.1220884869704444e+01, 0.1219785589162149e+01, 0.8372928285471114e-01, 0.7475327847832687e-01, 0.1089021292451660e+01, 0.2090314965369696e+01, 0.1076046951396362e+01, 0.2054425921547012e+00, 0.3990901366785531e+00, 0.4013984562173118e+00, 0.2077509116934600e+00, 0.3737026089221466e+00, 0.7220360627204584e+00, 0.6148197206508288e+00, 0.2664862668525171e+00, 0.1583615157552351e+00, 0.3653613524946110e+00, 0.2371812231191019e+00, 0.3018138637972599e-01, 0.2479281399828426e-01, 0.1948349845395949e+00, 0.1703545954445960e+00, 0.3124249032854923e-03, 0.1486904479375115e-03, 0.8107580682108434e-01, 0.8092824915545274e-01, 0.1132782305976083e-05, 0.1568190288260776e-06, 0.1120346722419663e-01, 0.1120331040626409e-01, 0.1096359327194516e-11, 0.6819392699821255e-26, 0.6968497581336674e-16, 0.6162975822039155e-31, 0.7707718980490818e-05, 0.4695180568393632e-05, 0.2117542506861687e-04, 0.0000000000000000e+00, 0.9895328911616120e-03, 0.1620493249248586e-02}, + pp: 1, + dmin: 6.1629758220391547e-032, + desig: -1.6288286479578371e-018, + qmax: 2.4715692224139039, + ttype: -2, + dmin1: 1.1203310405167735e-002, + dmin2: 8.0927116373146771e-002, + dn: 6.1629758220391547e-032, + dn1: 1.1203310405167735e-002, + dn2: 8.0927116373146771e-002, + g: 0.0000000000000000, + tau: 6.9684975806547287e-017, + nFail: 2, + iter: 17, + sigma: 1.6978461930361441e-002, + nDiv: 361, + zOut: []float64{0.2463974850804771e+01, 0.2463947928874343e+01, 0.1274454348702788e-04, 0.2692193042748079e-04, 0.1166410336629578e+01, 0.1166418385992496e+01, 0.3724766654883956e-05, 0.4695180568393632e-05, 0.1204085798823124e+01, 0.9253331718563164e+00, 0.2271012218143261e+00, 0.2787563517334627e+00, 0.8056740422703550e+00, 0.9809618843895378e+00, 0.5330513126246473e-01, 0.5181337969514327e-01, 0.8037943538979316e+00, 0.8288700878164763e+00, 0.5129615818002433e-01, 0.2822939734392020e-01, 0.1563018888122747e+01, 0.1460589534357837e+01, 0.1006713350698832e+00, 0.1537255119449346e+00, 0.1006311377241790e+01, 0.1023585455764333e+01, 0.1401812239704283e+00, 0.8339725654733963e-01, 0.1673746889662340e+01, 0.1691494017876317e+01, 0.8584564749956700e-01, 0.1224340957564512e+00, 0.1098021846213856e+01, 0.1173561046093478e+01, 0.1056099032774466e-01, 0.1030644761994533e-01, 0.1352918635969054e+01, 0.1125140157417147e+01, 0.3200390963041470e+00, 0.2383394688796517e+00, 0.1581470773231080e+01, 0.1816681306138221e+01, 0.6542811978092533e-01, 0.8482856339700598e-01, 0.1229110747859551e+01, 0.1219785589162149e+01, 0.1271308521106110e+00, 0.7475327847832687e-01, 0.2168626705413786e+01, 0.2090314965369696e+01, 0.3802606466401751e-01, 0.2054425921547012e+00, 0.7370750004754409e+00, 0.4013984562173118e+00, 0.3117182559112661e+00, 0.3737026089221466e+00, 0.4614629804947978e+00, 0.6148197206508288e+00, 0.8139413038408401e-01, 0.1583615157552351e+00, 0.1805799067333021e+00, 0.2371812231191019e+00, 0.2338892446571373e-01, 0.2479281399828426e-01, 0.1471143614268198e+00, 0.1703545954445960e+00, 0.8179526119010886e-04, 0.1486904479375115e-03, 0.8084661071329148e-01, 0.8092824915545274e-01, 0.2173118009582292e-07, 0.1568190288260776e-06, 0.1120328867508400e-01, 0.1120331040626409e-01, 0.3751376363572422e-55, 0.6819392699821255e-26, 0.6162975822039155e-31, 0.6162975822039155e-31, 0.3724766654883956e-05, 0.4695180568393632e-05, 0.2117542506861687e-04, 0.0000000000000000e+00, 0.9895328911616120e-03, 0.1620493249248586e-02}, + i0Out: 1, + n0Out: 20, + ppOut: 1, + dminOut: 6.1629758220391547e-032, + desigOut: -1.6288286479578371e-018, + sigmaOut: 1.6978461930361441e-002, + qmaxOut: 2.4715692224139039, + nFailOut: 2, + iterOut: 18, + nDivOut: 382, + ttypeOut: -2, + dmin1Out: 1.1203288675083998e-002, + dmin2Out: 8.0846453894262649e-002, + dnOut: 6.1629758220391547e-032, + dn1Out: 1.1203288675083998e-002, + dn2Out: 8.0846453894262649e-002, + gOut: 0.0000000000000000, + tauOut: 0.0000000000000000, + }, + { + i0: 1, + n0: 20, + z: []float64{0.2463974850804771e+01, 0.2463947928874343e+01, 0.1274454348702788e-04, 0.2692193042748079e-04, 0.1166410336629578e+01, 0.1166418385992496e+01, 0.3724766654883956e-05, 0.4695180568393632e-05, 0.1204085798823124e+01, 0.9253331718563164e+00, 0.2271012218143261e+00, 0.2787563517334627e+00, 0.8056740422703550e+00, 0.9809618843895378e+00, 0.5330513126246473e-01, 0.5181337969514327e-01, 0.8037943538979316e+00, 0.8288700878164763e+00, 0.5129615818002433e-01, 0.2822939734392020e-01, 0.1563018888122747e+01, 0.1460589534357837e+01, 0.1006713350698832e+00, 0.1537255119449346e+00, 0.1006311377241790e+01, 0.1023585455764333e+01, 0.1401812239704283e+00, 0.8339725654733963e-01, 0.1673746889662340e+01, 0.1691494017876317e+01, 0.8584564749956700e-01, 0.1224340957564512e+00, 0.1098021846213856e+01, 0.1173561046093478e+01, 0.1056099032774466e-01, 0.1030644761994533e-01, 0.1352918635969054e+01, 0.1125140157417147e+01, 0.3200390963041470e+00, 0.2383394688796517e+00, 0.1581470773231080e+01, 0.1816681306138221e+01, 0.6542811978092533e-01, 0.8482856339700598e-01, 0.1229110747859551e+01, 0.1219785589162149e+01, 0.1271308521106110e+00, 0.7475327847832687e-01, 0.2168626705413786e+01, 0.2090314965369696e+01, 0.3802606466401751e-01, 0.2054425921547012e+00, 0.7370750004754409e+00, 0.4013984562173118e+00, 0.3117182559112661e+00, 0.3737026089221466e+00, 0.4614629804947978e+00, 0.6148197206508288e+00, 0.8139413038408401e-01, 0.1583615157552351e+00, 0.1805799067333021e+00, 0.2371812231191019e+00, 0.2338892446571373e-01, 0.2479281399828426e-01, 0.1471143614268198e+00, 0.1703545954445960e+00, 0.8179526119010886e-04, 0.1486904479375115e-03, 0.8084661071329148e-01, 0.8092824915545274e-01, 0.2173118009582292e-07, 0.1568190288260776e-06, 0.1120328867508400e-01, 0.1120331040626409e-01, 0.3751376363572422e-55, 0.6819392699821255e-26, 0.6162975822039155e-31, 0.6162975822039155e-31, 0.3724766654883956e-05, 0.4695180568393632e-05, 0.2117542506861687e-04, 0.0000000000000000e+00, 0.9895328911616120e-03, 0.1620493249248586e-02}, + pp: 0, + dmin: 6.1629758220391547e-032, + desig: -1.6288286479578371e-018, + qmax: 2.4715692224139039, + ttype: -2, + dmin1: 1.1203288675083998e-002, + dmin2: 8.0846453894262649e-002, + dn: 6.1629758220391547e-032, + dn1: 1.1203288675083998e-002, + dn2: 8.0846453894262649e-002, + g: 0.0000000000000000, + tau: 0.0000000000000000, + nFail: 2, + iter: 18, + sigma: 1.6978461930361441e-002, + nDiv: 382, + zOut: []float64{0.2463974850804771e+01, 0.2452784311062345e+01, 0.1274454348702788e-04, 0.6060609239813679e-05, 0.1166410336629578e+01, 0.1155204716501079e+01, 0.3724766654883956e-05, 0.3882375624867434e-05, 0.1204085798823124e+01, 0.1419979853975912e+01, 0.2271012218143261e+00, 0.1288536304732592e+00, 0.8056740422703550e+00, 0.7189222587736472e+00, 0.5330513126246473e-01, 0.5959804835594534e-01, 0.8037943538979316e+00, 0.7842891794360973e+00, 0.5129615818002433e-01, 0.1022287011292917e+00, 0.1563018888122747e+01, 0.1550258237777425e+01, 0.1006713350698832e+00, 0.6534828028921505e-01, 0.1006311377241790e+01, 0.1069941036637090e+01, 0.1401812239704283e+00, 0.2192904838448094e+00, 0.1673746889662340e+01, 0.1529098769031184e+01, 0.8584564749956700e-01, 0.6164441320989392e-01, 0.1098021846213856e+01, 0.1035735139045794e+01, 0.1056099032774466e-01, 0.1379518767882891e-01, 0.1352918635969054e+01, 0.1647959260308459e+01, 0.3200390963041470e+00, 0.3071268139247320e+00, 0.1581470773231080e+01, 0.1328568794801360e+01, 0.6542811978092533e-01, 0.6053010243026304e-01, 0.1229110747859551e+01, 0.1284508213253985e+01, 0.1271308521106110e+00, 0.2146341752620367e+00, 0.2168626705413786e+01, 0.1980815310529854e+01, 0.3802606466401751e-01, 0.1414976019284330e-01, 0.7370750004754409e+00, 0.1023440211907950e+01, 0.3117182559112661e+00, 0.1405518698344743e+00, 0.4614629804947978e+00, 0.3911019567584944e+00, 0.8139413038408401e-01, 0.3758136265851575e-01, 0.1805799067333021e+00, 0.1551841842545868e+00, 0.2338892446571373e-01, 0.2217266343062855e-01, 0.1471143614268198e+00, 0.1138202089714681e+00, 0.8179526119010886e-04, 0.5809925758690545e-04, 0.8084661071329148e-01, 0.6958524890097138e-01, 0.2173118009582292e-07, 0.3498739858072780e-08, 0.1120328867508400e-01, 0.8904308505808256e-09, 0.3751376363572422e-55, 0.3882375624867434e-05, 0.1697846193036144e-01, 0.6162975822039155e-31, 0.3724766654883956e-05, 0.4695180568393632e-05, 0.2117542506861687e-04, 0.0000000000000000e+00, 0.9895328911616120e-03, 0.1620493249248586e-02}, + i0Out: 1, + n0Out: 19, + ppOut: 0, + dminOut: 8.9043085058082561e-010, + desigOut: 1.7347234759768071e-018, + sigmaOut: 2.8181746216274728e-002, + qmaxOut: 2.4715692224139039, + nFailOut: 2, + iterOut: 19, + nDivOut: 402, + ttypeOut: -7, + dmin1Out: 6.9585227169791292e-002, + dmin2Out: 0.11373841371027797, + dnOut: 8.9043085058082561e-010, + dn1Out: 6.9585227169791292e-002, + dn2Out: 0.11373841371027797, + gOut: 0.0000000000000000, + tauOut: 1.1203284285913290e-002, + }, + { + i0: 1, + n0: 19, + z: []float64{0.2463974850804771e+01, 0.2452784311062345e+01, 0.1274454348702788e-04, 0.6060609239813679e-05, 0.1166410336629578e+01, 0.1155204716501079e+01, 0.3724766654883956e-05, 0.3882375624867434e-05, 0.1204085798823124e+01, 0.1419979853975912e+01, 0.2271012218143261e+00, 0.1288536304732592e+00, 0.8056740422703550e+00, 0.7189222587736472e+00, 0.5330513126246473e-01, 0.5959804835594534e-01, 0.8037943538979316e+00, 0.7842891794360973e+00, 0.5129615818002433e-01, 0.1022287011292917e+00, 0.1563018888122747e+01, 0.1550258237777425e+01, 0.1006713350698832e+00, 0.6534828028921505e-01, 0.1006311377241790e+01, 0.1069941036637090e+01, 0.1401812239704283e+00, 0.2192904838448094e+00, 0.1673746889662340e+01, 0.1529098769031184e+01, 0.8584564749956700e-01, 0.6164441320989392e-01, 0.1098021846213856e+01, 0.1035735139045794e+01, 0.1056099032774466e-01, 0.1379518767882891e-01, 0.1352918635969054e+01, 0.1647959260308459e+01, 0.3200390963041470e+00, 0.3071268139247320e+00, 0.1581470773231080e+01, 0.1328568794801360e+01, 0.6542811978092533e-01, 0.6053010243026304e-01, 0.1229110747859551e+01, 0.1284508213253985e+01, 0.1271308521106110e+00, 0.2146341752620367e+00, 0.2168626705413786e+01, 0.1980815310529854e+01, 0.3802606466401751e-01, 0.1414976019284330e-01, 0.7370750004754409e+00, 0.1023440211907950e+01, 0.3117182559112661e+00, 0.1405518698344743e+00, 0.4614629804947978e+00, 0.3911019567584944e+00, 0.8139413038408401e-01, 0.3758136265851575e-01, 0.1805799067333021e+00, 0.1551841842545868e+00, 0.2338892446571373e-01, 0.2217266343062855e-01, 0.1471143614268198e+00, 0.1138202089714681e+00, 0.8179526119010886e-04, 0.5809925758690545e-04, 0.8084661071329148e-01, 0.6958524890097138e-01, 0.2173118009582292e-07, 0.3498739858072780e-08, 0.1120328867508400e-01, 0.8904308505808256e-09, 0.3751376363572422e-55, 0.3882375624867434e-05, 0.1697846193036144e-01, 0.6162975822039155e-31, 0.3724766654883956e-05, 0.4695180568393632e-05, 0.2117542506861687e-04, 0.0000000000000000e+00, 0.9895328911616120e-03, 0.1620493249248586e-02}, + pp: 1, + dmin: 8.9043085058082561e-010, + desig: 1.7347234759768071e-018, + qmax: 2.4715692224139039, + ttype: -7, + dmin1: 6.9585227169791292e-002, + dmin2: 0.11373841371027797, + dn: 8.9043085058082561e-010, + dn1: 6.9585227169791292e-002, + dn2: 0.11373841371027797, + g: 0.0000000000000000, + tau: 1.1203284285913290e-002, + nFail: 2, + iter: 19, + sigma: 2.8181746216274728e-002, + nDiv: 402, + zOut: []float64{0.2452790370781154e+01, 0.2452784311062345e+01, 0.2854399814229969e-05, 0.6060609239813679e-05, 0.1155205743586459e+01, 0.1155204716501079e+01, 0.4772219324121025e-05, 0.3882375624867434e-05, 0.1548828711339416e+01, 0.1419979853975912e+01, 0.5981019230390531e-01, 0.1288536304732592e+00, 0.7187101139352565e+00, 0.7189222587736472e+00, 0.6503610222645050e-01, 0.5959804835594534e-01, 0.8214817774485077e+00, 0.7842891794360973e+00, 0.1929207566298143e+00, 0.1022287011292917e+00, 0.1422685760546395e+01, 0.1550258237777425e+01, 0.4914564318703864e-01, 0.6534828028921505e-01, 0.1240085876404429e+01, 0.1069941036637090e+01, 0.2703980549150243e+00, 0.2192904838448094e+00, 0.1320345126435623e+01, 0.1529098769031184e+01, 0.4835651195207321e-01, 0.6164441320989392e-01, 0.1001173813882118e+01, 0.1035735139045794e+01, 0.2270725319399537e-01, 0.1379518767882891e-01, 0.1932378820148765e+01, 0.1647959260308459e+01, 0.2111589595024383e+00, 0.3071268139247320e+00, 0.1177939936838754e+01, 0.1328568794801360e+01, 0.6600626338337756e-01, 0.6053010243026304e-01, 0.1433136124242214e+01, 0.1284508213253985e+01, 0.2966575563411978e+00, 0.2146341752620367e+00, 0.1698307513491068e+01, 0.1980815310529854e+01, 0.8526979628348903e-02, 0.1414976019284330e-01, 0.1155465101223645e+01, 0.1023440211907950e+01, 0.4757401262929915e-01, 0.1405518698344743e+00, 0.3811093058972801e+00, 0.3911019567584944e+00, 0.1530278326215801e-01, 0.3758136265851575e-01, 0.1620540635326265e+00, 0.1551841842545868e+00, 0.1557318051836492e-01, 0.2217266343062855e-01, 0.9830512682025924e-01, 0.1138202089714681e+00, 0.4112553872737890e-04, 0.5809925758690545e-04, 0.6954412597055305e-01, 0.6958524890097138e-01, 0.4479725446695418e-16, 0.3498739858072780e-08, 0.1396210563637444e-18, 0.8904308505808256e-09, 0.2854399814229969e-05, 0.3882375624867434e-05, 0.1697846193036144e-01, 0.6162975822039155e-31, 0.3724766654883956e-05, 0.4695180568393632e-05, 0.2117542506861687e-04, 0.0000000000000000e+00, 0.9895328911616120e-03, 0.1620493249248586e-02}, + i0Out: 1, + n0Out: 19, + ppOut: 1, + dminOut: 1.3962105636374437e-019, + desigOut: -1.5687885635568321e-018, + sigmaOut: 2.8181747106705537e-002, + qmaxOut: 2.4715692224139039, + nFailOut: 2, + iterOut: 20, + nDivOut: 422, + ttypeOut: -2, + dmin1Out: 6.9544122471813200e-002, + dmin2Out: 9.8247027562672340e-002, + dnOut: 1.3962105636374437e-019, + dn1Out: 6.9544122471813200e-002, + dn2Out: 9.8247027562672340e-002, + gOut: 0.0000000000000000, + tauOut: 8.9043080564395014e-010, + }, + { + i0: 1, + n0: 19, + z: []float64{0.2452790370781154e+01, 0.2452784311062345e+01, 0.2854399814229969e-05, 0.6060609239813679e-05, 0.1155205743586459e+01, 0.1155204716501079e+01, 0.4772219324121025e-05, 0.3882375624867434e-05, 0.1548828711339416e+01, 0.1419979853975912e+01, 0.5981019230390531e-01, 0.1288536304732592e+00, 0.7187101139352565e+00, 0.7189222587736472e+00, 0.6503610222645050e-01, 0.5959804835594534e-01, 0.8214817774485077e+00, 0.7842891794360973e+00, 0.1929207566298143e+00, 0.1022287011292917e+00, 0.1422685760546395e+01, 0.1550258237777425e+01, 0.4914564318703864e-01, 0.6534828028921505e-01, 0.1240085876404429e+01, 0.1069941036637090e+01, 0.2703980549150243e+00, 0.2192904838448094e+00, 0.1320345126435623e+01, 0.1529098769031184e+01, 0.4835651195207321e-01, 0.6164441320989392e-01, 0.1001173813882118e+01, 0.1035735139045794e+01, 0.2270725319399537e-01, 0.1379518767882891e-01, 0.1932378820148765e+01, 0.1647959260308459e+01, 0.2111589595024383e+00, 0.3071268139247320e+00, 0.1177939936838754e+01, 0.1328568794801360e+01, 0.6600626338337756e-01, 0.6053010243026304e-01, 0.1433136124242214e+01, 0.1284508213253985e+01, 0.2966575563411978e+00, 0.2146341752620367e+00, 0.1698307513491068e+01, 0.1980815310529854e+01, 0.8526979628348903e-02, 0.1414976019284330e-01, 0.1155465101223645e+01, 0.1023440211907950e+01, 0.4757401262929915e-01, 0.1405518698344743e+00, 0.3811093058972801e+00, 0.3911019567584944e+00, 0.1530278326215801e-01, 0.3758136265851575e-01, 0.1620540635326265e+00, 0.1551841842545868e+00, 0.1557318051836492e-01, 0.2217266343062855e-01, 0.9830512682025924e-01, 0.1138202089714681e+00, 0.4112553872737890e-04, 0.5809925758690545e-04, 0.6954412597055305e-01, 0.6958524890097138e-01, 0.4479725446695418e-16, 0.3498739858072780e-08, 0.1396210563637444e-18, 0.8904308505808256e-09, 0.2854399814229969e-05, 0.3882375624867434e-05, 0.1697846193036144e-01, 0.6162975822039155e-31, 0.3724766654883956e-05, 0.4695180568393632e-05, 0.2117542506861687e-04, 0.0000000000000000e+00, 0.9895328911616120e-03, 0.1620493249248586e-02}, + pp: 0, + dmin: 1.3962105636374437e-019, + desig: -1.5687885635568321e-018, + qmax: 2.4715692224139039, + ttype: -2, + dmin1: 6.9544122471813200e-002, + dmin2: 9.8247027562672340e-002, + dn: 1.3962105636374437e-019, + dn1: 6.9544122471813200e-002, + dn2: 9.8247027562672340e-002, + g: 0.0000000000000000, + tau: 8.9043080564395014e-010, + nFail: 2, + iter: 20, + sigma: 2.8181747106705537e-002, + nDiv: 422, + zOut: []float64{0.2452790370781154e+01, 0.2452793225180968e+01, 0.2854399814229969e-05, 0.1344352644992036e-05, 0.1155205743586459e+01, 0.1155209171453138e+01, 0.4772219324121025e-05, 0.6398278760815103e-05, 0.1548828711339416e+01, 0.1608632505364561e+01, 0.5981019230390531e-01, 0.2672219415054498e-01, 0.7187101139352565e+00, 0.7570240220111620e+00, 0.6503610222645050e-01, 0.7057368234283014e-01, 0.8214817774485077e+00, 0.9438288517354918e+00, 0.1929207566298143e+00, 0.2908001941945216e+00, 0.1422685760546395e+01, 0.1181031209538913e+01, 0.4914564318703864e-01, 0.5160305461093759e-01, 0.1240085876404429e+01, 0.1458880876708516e+01, 0.2703980549150243e+00, 0.2447209773632920e+00, 0.1320345126435623e+01, 0.1123980661024405e+01, 0.4835651195207321e-01, 0.4307304847484577e-01, 0.1001173813882118e+01, 0.9808080186012680e+00, 0.2270725319399537e-01, 0.4473761868138884e-01, 0.1932378820148765e+01, 0.2098800160969815e+01, 0.2111589595024383e+00, 0.1185117935689049e+00, 0.1177939936838754e+01, 0.1125434406653226e+01, 0.6600626338337756e-01, 0.8405284210411720e-01, 0.1433136124242214e+01, 0.1645740838479294e+01, 0.2966575563411978e+00, 0.3061331074057168e+00, 0.1698307513491068e+01, 0.1400701385713701e+01, 0.8526979628348903e-02, 0.7034066989504621e-02, 0.1155465101223645e+01, 0.1196005046863439e+01, 0.4757401262929915e-01, 0.1515955052150448e-01, 0.3811093058972801e+00, 0.3812525386379337e+00, 0.1530278326215801e-01, 0.6504555274179689e-02, 0.1620540635326265e+00, 0.1711226887768117e+00, 0.1557318051836492e-01, 0.8946350111698947e-02, 0.9830512682025924e-01, 0.8939990224728768e-01, 0.4112553872737890e-04, 0.3199152990069927e-04, 0.6954412597055305e-01, 0.6951213444065239e-01, 0.4479725446695418e-16, 0.8997910999570377e-34, 0.1396210563637444e-18, 0.1396210563637443e-18, 0.2854399814229969e-05, 0.1344352644992036e-05, 0.1697846193036144e-01, 0.6162975822039155e-31, 0.3724766654883956e-05, 0.4695180568393632e-05, 0.2117542506861687e-04, 0.0000000000000000e+00, 0.9895328911616120e-03, 0.1620493249248586e-02}, + i0Out: 1, + n0Out: 19, + ppOut: 0, + dminOut: 1.3962105636374430e-019, + desigOut: -1.5687885635568321e-018, + sigmaOut: 2.8181747106705537e-002, + qmaxOut: 2.4715692224139039, + nFailOut: 2, + iterOut: 21, + nDivOut: 442, + ttypeOut: -2, + dmin1Out: 6.9512134440652351e-002, + dmin2Out: 8.9358776708560295e-002, + dnOut: 1.3962105636374430e-019, + dn1Out: 6.9512134440652351e-002, + dn2Out: 8.9358776708560295e-002, + gOut: 0.0000000000000000, + tauOut: 0.0000000000000000, + }, + { + i0: 1, + n0: 19, + z: []float64{0.2452790370781154e+01, 0.2452793225180968e+01, 0.2854399814229969e-05, 0.1344352644992036e-05, 0.1155205743586459e+01, 0.1155209171453138e+01, 0.4772219324121025e-05, 0.6398278760815103e-05, 0.1548828711339416e+01, 0.1608632505364561e+01, 0.5981019230390531e-01, 0.2672219415054498e-01, 0.7187101139352565e+00, 0.7570240220111620e+00, 0.6503610222645050e-01, 0.7057368234283014e-01, 0.8214817774485077e+00, 0.9438288517354918e+00, 0.1929207566298143e+00, 0.2908001941945216e+00, 0.1422685760546395e+01, 0.1181031209538913e+01, 0.4914564318703864e-01, 0.5160305461093759e-01, 0.1240085876404429e+01, 0.1458880876708516e+01, 0.2703980549150243e+00, 0.2447209773632920e+00, 0.1320345126435623e+01, 0.1123980661024405e+01, 0.4835651195207321e-01, 0.4307304847484577e-01, 0.1001173813882118e+01, 0.9808080186012680e+00, 0.2270725319399537e-01, 0.4473761868138884e-01, 0.1932378820148765e+01, 0.2098800160969815e+01, 0.2111589595024383e+00, 0.1185117935689049e+00, 0.1177939936838754e+01, 0.1125434406653226e+01, 0.6600626338337756e-01, 0.8405284210411720e-01, 0.1433136124242214e+01, 0.1645740838479294e+01, 0.2966575563411978e+00, 0.3061331074057168e+00, 0.1698307513491068e+01, 0.1400701385713701e+01, 0.8526979628348903e-02, 0.7034066989504621e-02, 0.1155465101223645e+01, 0.1196005046863439e+01, 0.4757401262929915e-01, 0.1515955052150448e-01, 0.3811093058972801e+00, 0.3812525386379337e+00, 0.1530278326215801e-01, 0.6504555274179689e-02, 0.1620540635326265e+00, 0.1711226887768117e+00, 0.1557318051836492e-01, 0.8946350111698947e-02, 0.9830512682025924e-01, 0.8939990224728768e-01, 0.4112553872737890e-04, 0.3199152990069927e-04, 0.6954412597055305e-01, 0.6951213444065239e-01, 0.4479725446695418e-16, 0.8997910999570377e-34, 0.1396210563637444e-18, 0.1396210563637443e-18, 0.2854399814229969e-05, 0.1344352644992036e-05, 0.1697846193036144e-01, 0.6162975822039155e-31, 0.3724766654883956e-05, 0.4695180568393632e-05, 0.2117542506861687e-04, 0.0000000000000000e+00, 0.9895328911616120e-03, 0.1620493249248586e-02}, + pp: 1, + dmin: 1.3962105636374430e-019, + desig: -1.5687885635568321e-018, + qmax: 2.4715692224139039, + ttype: -2, + dmin1: 6.9512134440652351e-002, + dmin2: 8.9358776708560295e-002, + dn: 1.3962105636374430e-019, + dn1: 6.9512134440652351e-002, + dn2: 8.9358776708560295e-002, + g: 0.0000000000000000, + tau: 0.0000000000000000, + nFail: 2, + iter: 21, + sigma: 2.8181747106705537e-002, + nDiv: 442, + zOut: []float64{0.2384705997428018e+01, 0.2452793225180968e+01, 0.6512368848977837e-06, 0.1344352644992036e-05, 0.1087126346389419e+01, 0.1155209171453138e+01, 0.9467601652019932e-05, 0.6398278760815103e-05, 0.1567256659807859e+01, 0.1608632505364561e+01, 0.1290748567965171e-01, 0.2672219415054498e-01, 0.7466016465687454e+00, 0.7570240220111620e+00, 0.8921689079377818e-01, 0.7057368234283014e-01, 0.1077323583030640e+01, 0.9438288517354918e+00, 0.3187938243378626e+00, 0.2908001941945216e+00, 0.8457518677063927e+00, 0.1181031209538913e+01, 0.8901276181133644e-01, 0.5160305461093759e-01, 0.1546500520154876e+01, 0.1458880876708516e+01, 0.1778606876095877e+00, 0.2447209773632920e+00, 0.9211044497840675e+00, 0.1123980661024405e+01, 0.4586493023634136e-01, 0.4307304847484577e-01, 0.9115921349407204e+00, 0.9808080186012680e+00, 0.1030014605117353e+00, 0.4473761868138884e-01, 0.2046221921921390e+01, 0.2098800160969815e+01, 0.6518220171905387e-01, 0.1185117935689049e+00, 0.1076216474932694e+01, 0.1125434406653226e+01, 0.1285328723941422e+00, 0.8405284210411720e-01, 0.1755252501385274e+01, 0.1645740838479294e+01, 0.2442959445536252e+00, 0.3061331074057168e+00, 0.1095350936043985e+01, 0.1400701385713701e+01, 0.7680442260639305e-02, 0.7034066989504621e-02, 0.1135395583018709e+01, 0.1196005046863439e+01, 0.5090399511302622e-02, 0.1515955052150448e-01, 0.3145781222952156e+00, 0.3812525386379337e+00, 0.3538316586334169e-02, 0.6504555274179689e-02, 0.1084421501965814e+00, 0.1711226887768117e+00, 0.7375387005938499e-02, 0.8946350111698947e-02, 0.1396793466565477e-01, 0.8939990224728768e-01, 0.1592074691534444e-03, 0.3199152990069927e-04, 0.1264354865903830e-02, 0.6951213444065239e-01, 0.6512368848977837e-06, 0.8997910999570377e-34, 0.2818174710670554e-01, 0.1396210563637443e-18, 0.2854399814229969e-05, 0.1344352644992036e-05, 0.1697846193036144e-01, 0.6162975822039155e-31, 0.3724766654883956e-05, 0.4695180568393632e-05, 0.2117542506861687e-04, 0.0000000000000000e+00, 0.9895328911616120e-03, 0.1620493249248586e-02}, + i0Out: 1, + n0Out: 18, + ppOut: 1, + dminOut: 1.2643548659038301e-003, + desigOut: -5.0382355155104463e-018, + sigmaOut: 9.6270319212300656e-002, + qmaxOut: 2.4715692224139039, + nFailOut: 2, + iterOut: 22, + nDivOut: 461, + ttypeOut: -8, + dmin1Out: 1.3935943135754067e-002, + dmin2Out: 9.9495800084882416e-002, + dnOut: 1.2643548659038301e-003, + dn1Out: 1.3935943135754067e-002, + dn2Out: 9.9495800084882416e-002, + gOut: 0.0000000000000000, + tauOut: 6.8088572105595116e-002, + }, + { + i0: 1, + n0: 18, + z: []float64{0.2384705997428018e+01, 0.2452793225180968e+01, 0.6512368848977837e-06, 0.1344352644992036e-05, 0.1087126346389419e+01, 0.1155209171453138e+01, 0.9467601652019932e-05, 0.6398278760815103e-05, 0.1567256659807859e+01, 0.1608632505364561e+01, 0.1290748567965171e-01, 0.2672219415054498e-01, 0.7466016465687454e+00, 0.7570240220111620e+00, 0.8921689079377818e-01, 0.7057368234283014e-01, 0.1077323583030640e+01, 0.9438288517354918e+00, 0.3187938243378626e+00, 0.2908001941945216e+00, 0.8457518677063927e+00, 0.1181031209538913e+01, 0.8901276181133644e-01, 0.5160305461093759e-01, 0.1546500520154876e+01, 0.1458880876708516e+01, 0.1778606876095877e+00, 0.2447209773632920e+00, 0.9211044497840675e+00, 0.1123980661024405e+01, 0.4586493023634136e-01, 0.4307304847484577e-01, 0.9115921349407204e+00, 0.9808080186012680e+00, 0.1030014605117353e+00, 0.4473761868138884e-01, 0.2046221921921390e+01, 0.2098800160969815e+01, 0.6518220171905387e-01, 0.1185117935689049e+00, 0.1076216474932694e+01, 0.1125434406653226e+01, 0.1285328723941422e+00, 0.8405284210411720e-01, 0.1755252501385274e+01, 0.1645740838479294e+01, 0.2442959445536252e+00, 0.3061331074057168e+00, 0.1095350936043985e+01, 0.1400701385713701e+01, 0.7680442260639305e-02, 0.7034066989504621e-02, 0.1135395583018709e+01, 0.1196005046863439e+01, 0.5090399511302622e-02, 0.1515955052150448e-01, 0.3145781222952156e+00, 0.3812525386379337e+00, 0.3538316586334169e-02, 0.6504555274179689e-02, 0.1084421501965814e+00, 0.1711226887768117e+00, 0.7375387005938499e-02, 0.8946350111698947e-02, 0.1396793466565477e-01, 0.8939990224728768e-01, 0.1592074691534444e-03, 0.3199152990069927e-04, 0.1264354865903830e-02, 0.6951213444065239e-01, 0.6512368848977837e-06, 0.1344352644992036e-05, 0.2818174710670554e-01, 0.1396210563637443e-18, 0.2854399814229969e-05, 0.1344352644992036e-05, 0.1697846193036144e-01, 0.6162975822039155e-31, 0.3724766654883956e-05, 0.4695180568393632e-05, 0.2117542506861687e-04, 0.0000000000000000e+00, 0.9895328911616120e-03, 0.1620493249248586e-02}, + pp: 0, + dmin: 1.2643548659038301e-003, + desig: -5.0382355155104463e-018, + qmax: 2.3847059974280183, + ttype: -8, + dmin1: 1.3935943135754067e-002, + dmin2: 9.9495800084882416e-002, + dn: 1.2643548659038301e-003, + dn1: 1.3935943135754067e-002, + dn2: 9.9495800084882416e-002, + g: 0.0000000000000000, + tau: 6.8088572105595116e-002, + nFail: 2, + iter: 22, + sigma: 9.6270319212300656e-002, + nDiv: 461, + zOut: []float64{0.2384705997428018e+01, 0.2383460331210544e+01, 0.6512368848977837e-06, 0.2970373645586866e-06, 0.1087126346389419e+01, 0.1085889199499347e+01, 0.9467601652019932e-05, 0.1366452649899944e-04, 0.1567256659807859e+01, 0.1578904163506652e+01, 0.1290748567965171e-01, 0.6103442048115082e-02, 0.7466016465687454e+00, 0.8284687778600495e+00, 0.8921689079377818e-01, 0.1160157908485996e+00, 0.1077323583030640e+01, 0.1278855299065544e+01, 0.3187938243378626e+00, 0.2108295383723412e+00, 0.8457518677063927e+00, 0.7226887736910288e+00, 0.8901276181133644e-01, 0.1904807262171572e+00, 0.1546500520154876e+01, 0.1532634164092948e+01, 0.1778606876095877e+00, 0.1068932656188067e+00, 0.9211044497840675e+00, 0.8588297969472432e+00, 0.4586493023634136e-01, 0.4868264913684867e-01, 0.9115921349407204e+00, 0.9646646288612479e+00, 0.1030014605117353e+00, 0.2184840619043245e+00, 0.2046221921921390e+01, 0.1891673744281760e+01, 0.6518220171905387e-01, 0.3708364593761750e-01, 0.1076216474932694e+01, 0.1166419383934860e+01, 0.1285328723941422e+00, 0.1934189785315259e+00, 0.1755252501385274e+01, 0.1804883149953014e+01, 0.2442959445536252e+00, 0.1482587897978486e+00, 0.1095350936043985e+01, 0.9535262710524167e+00, 0.7680442260639305e-02, 0.9145359161143373e-02, 0.1135395583018709e+01, 0.1130094305914509e+01, 0.5090399511302622e-02, 0.1416986451145964e-02, 0.3145781222952156e+00, 0.3154531349760448e+00, 0.3538316586334169e-02, 0.1216353924418735e-02, 0.1084421501965814e+00, 0.1133548658237420e+00, 0.7375387005938499e-02, 0.9088178357782586e-03, 0.1396793466565477e-01, 0.1197200684467082e-01, 0.1592074691534444e-03, 0.1681378409852773e-04, 0.1264354865903830e-02, 0.1223627446170173e-05, 0.6512368848977837e-06, 0.2970373645586866e-06, 0.2818174710670554e-01, 0.1396210563637443e-18, 0.2854399814229969e-05, 0.1344352644992036e-05, 0.1697846193036144e-01, 0.6162975822039155e-31, 0.3724766654883956e-05, 0.4695180568393632e-05, 0.2117542506861687e-04, 0.0000000000000000e+00, 0.9895328911616120e-03, 0.1620493249248586e-02}, + i0Out: 1, + n0Out: 18, + ppOut: 0, + dminOut: 1.2236274461701734e-006, + desigOut: -3.2526065174565133e-018, + sigmaOut: 9.7516636666659787e-002, + qmaxOut: 2.3847059974280183, + nFailOut: 2, + iterOut: 23, + nDivOut: 480, + ttypeOut: -2, + dmin1Out: 1.1812799375517376e-002, + dmin2Out: 0.10597947881780349, + dnOut: 1.2236274461701734e-006, + dn1Out: 1.1812799375517376e-002, + dn2Out: 0.10597947881780349, + gOut: 0.0000000000000000, + tauOut: 1.2463174543591322e-003, + }, + { + i0: 1, + n0: 18, + z: []float64{0.2384705997428018e+01, 0.2383460331210544e+01, 0.6512368848977837e-06, 0.2970373645586866e-06, 0.1087126346389419e+01, 0.1085889199499347e+01, 0.9467601652019932e-05, 0.1366452649899944e-04, 0.1567256659807859e+01, 0.1578904163506652e+01, 0.1290748567965171e-01, 0.6103442048115082e-02, 0.7466016465687454e+00, 0.8284687778600495e+00, 0.8921689079377818e-01, 0.1160157908485996e+00, 0.1077323583030640e+01, 0.1278855299065544e+01, 0.3187938243378626e+00, 0.2108295383723412e+00, 0.8457518677063927e+00, 0.7226887736910288e+00, 0.8901276181133644e-01, 0.1904807262171572e+00, 0.1546500520154876e+01, 0.1532634164092948e+01, 0.1778606876095877e+00, 0.1068932656188067e+00, 0.9211044497840675e+00, 0.8588297969472432e+00, 0.4586493023634136e-01, 0.4868264913684867e-01, 0.9115921349407204e+00, 0.9646646288612479e+00, 0.1030014605117353e+00, 0.2184840619043245e+00, 0.2046221921921390e+01, 0.1891673744281760e+01, 0.6518220171905387e-01, 0.3708364593761750e-01, 0.1076216474932694e+01, 0.1166419383934860e+01, 0.1285328723941422e+00, 0.1934189785315259e+00, 0.1755252501385274e+01, 0.1804883149953014e+01, 0.2442959445536252e+00, 0.1482587897978486e+00, 0.1095350936043985e+01, 0.9535262710524167e+00, 0.7680442260639305e-02, 0.9145359161143373e-02, 0.1135395583018709e+01, 0.1130094305914509e+01, 0.5090399511302622e-02, 0.1416986451145964e-02, 0.3145781222952156e+00, 0.3154531349760448e+00, 0.3538316586334169e-02, 0.1216353924418735e-02, 0.1084421501965814e+00, 0.1133548658237420e+00, 0.7375387005938499e-02, 0.9088178357782586e-03, 0.1396793466565477e-01, 0.1197200684467082e-01, 0.1592074691534444e-03, 0.1681378409852773e-04, 0.1264354865903830e-02, 0.1223627446170173e-05, 0.6512368848977837e-06, 0.2970373645586866e-06, 0.2818174710670554e-01, 0.1396210563637443e-18, 0.2854399814229969e-05, 0.1344352644992036e-05, 0.1697846193036144e-01, 0.6162975822039155e-31, 0.3724766654883956e-05, 0.4695180568393632e-05, 0.2117542506861687e-04, 0.0000000000000000e+00, 0.9895328911616120e-03, 0.1620493249248586e-02}, + pp: 1, + dmin: 1.2236274461701734e-006, + desig: -3.2526065174565133e-018, + qmax: 2.3847059974280183, + ttype: -2, + dmin1: 1.1812799375517376e-002, + dmin2: 0.10597947881780349, + dn: 1.2236274461701734e-006, + dn1: 1.1812799375517376e-002, + dn2: 0.10597947881780349, + g: 0.0000000000000000, + tau: 1.2463174543591322e-003, + nFail: 2, + iter: 23, + sigma: 9.7516636666659787e-002, + nDiv: 480, + zOut: []float64{0.2383459406360114e+01, 0.2383460331210544e+01, 0.1353283656358167e-06, 0.2970373645586866e-06, 0.1085901506809686e+01, 0.1085889199499347e+01, 0.1986826396898849e-04, 0.1366452649899944e-04, 0.1584986515403004e+01, 0.1578904163506652e+01, 0.3190255011763210e-02, 0.6103442048115082e-02, 0.9412930918090910e+00, 0.8284687778600495e+00, 0.1576208411525267e+00, 0.1160157908485996e+00, 0.1332062774397564e+01, 0.1278855299065544e+01, 0.1143821023097510e+00, 0.2108295383723412e+00, 0.7987861757106404e+00, 0.7226887736910288e+00, 0.3654761154847582e+00, 0.1904807262171572e+00, 0.1274050092339202e+01, 0.1532634164092948e+01, 0.7205613198290631e-01, 0.1068932656188067e+00, 0.8354550922133908e+00, 0.8588297969472432e+00, 0.5621179414582513e-01, 0.4868264913684867e-01, 0.1126935674731953e+01, 0.9646646288612479e+00, 0.3667472533840468e+00, 0.2184840619043245e+00, 0.1562008914947536e+01, 0.1891673744281760e+01, 0.2769195683500124e-01, 0.3708364593761750e-01, 0.1332145183743590e+01, 0.1166419383934860e+01, 0.2620575140703801e+00, 0.1934189785315259e+00, 0.1691083203792688e+01, 0.1804883149953014e+01, 0.8359650824372877e-01, 0.1482587897978486e+00, 0.8790739000820365e+00, 0.9535262710524167e+00, 0.1175682535061811e-01, 0.9145359161143373e-02, 0.1119753245127243e+01, 0.1130094305914509e+01, 0.3991886785572840e-03, 0.1416986451145964e-02, 0.3162690783341116e+00, 0.3154531349760448e+00, 0.4359567385560517e-03, 0.1216353924418735e-02, 0.1138265050331695e+00, 0.1133548658237420e+00, 0.9558734450579545e-04, 0.9088178357782586e-03, 0.1189201139646886e-01, 0.1197200684467082e-01, 0.1730052806966466e-08, 0.1681378409852773e-04, 0.9598672791582074e-11, 0.1223627446170173e-05, 0.1353283656358167e-06, 0.2970373645586866e-06, 0.2818174710670554e-01, 0.1396210563637443e-18, 0.2854399814229969e-05, 0.1344352644992036e-05, 0.1697846193036144e-01, 0.6162975822039155e-31, 0.3724766654883956e-05, 0.4695180568393632e-05, 0.2117542506861687e-04, 0.0000000000000000e+00, 0.9895328911616120e-03, 0.1620493249248586e-02}, + i0Out: 1, + n0Out: 18, + ppOut: 1, + dminOut: 9.5986727915820745e-012, + desigOut: 6.4755668817591261e-018, + sigmaOut: 9.7517858554454467e-002, + qmaxOut: 2.3847059974280183, + nFailOut: 2, + iterOut: 24, + nDivOut: 499, + ttypeOut: -2, + dmin1Out: 1.1875197612370336e-002, + dmin2Out: 0.11291768719739126, + dnOut: 9.5986727915820745e-012, + dn1Out: 1.1875197612370336e-002, + dn2Out: 0.11291768719739126, + gOut: 0.0000000000000000, + tauOut: 1.2218877946904154e-006, + }, + { + i0: 1, + n0: 18, + z: []float64{0.2383459406360114e+01, 0.2383460331210544e+01, 0.1353283656358167e-06, 0.2970373645586866e-06, 0.1085901506809686e+01, 0.1085889199499347e+01, 0.1986826396898849e-04, 0.1366452649899944e-04, 0.1584986515403004e+01, 0.1578904163506652e+01, 0.3190255011763210e-02, 0.6103442048115082e-02, 0.9412930918090910e+00, 0.8284687778600495e+00, 0.1576208411525267e+00, 0.1160157908485996e+00, 0.1332062774397564e+01, 0.1278855299065544e+01, 0.1143821023097510e+00, 0.2108295383723412e+00, 0.7987861757106404e+00, 0.7226887736910288e+00, 0.3654761154847582e+00, 0.1904807262171572e+00, 0.1274050092339202e+01, 0.1532634164092948e+01, 0.7205613198290631e-01, 0.1068932656188067e+00, 0.8354550922133908e+00, 0.8588297969472432e+00, 0.5621179414582513e-01, 0.4868264913684867e-01, 0.1126935674731953e+01, 0.9646646288612479e+00, 0.3667472533840468e+00, 0.2184840619043245e+00, 0.1562008914947536e+01, 0.1891673744281760e+01, 0.2769195683500124e-01, 0.3708364593761750e-01, 0.1332145183743590e+01, 0.1166419383934860e+01, 0.2620575140703801e+00, 0.1934189785315259e+00, 0.1691083203792688e+01, 0.1804883149953014e+01, 0.8359650824372877e-01, 0.1482587897978486e+00, 0.8790739000820365e+00, 0.9535262710524167e+00, 0.1175682535061811e-01, 0.9145359161143373e-02, 0.1119753245127243e+01, 0.1130094305914509e+01, 0.3991886785572840e-03, 0.1416986451145964e-02, 0.3162690783341116e+00, 0.3154531349760448e+00, 0.4359567385560517e-03, 0.1216353924418735e-02, 0.1138265050331695e+00, 0.1133548658237420e+00, 0.9558734450579545e-04, 0.9088178357782586e-03, 0.1189201139646886e-01, 0.1197200684467082e-01, 0.1730052806966466e-08, 0.1681378409852773e-04, 0.9598672791582074e-11, 0.1223627446170173e-05, 0.1353283656358167e-06, 0.2970373645586866e-06, 0.2818174710670554e-01, 0.1396210563637443e-18, 0.2854399814229969e-05, 0.1344352644992036e-05, 0.1697846193036144e-01, 0.6162975822039155e-31, 0.3724766654883956e-05, 0.4695180568393632e-05, 0.2117542506861687e-04, 0.0000000000000000e+00, 0.9895328911616120e-03, 0.1620493249248586e-02}, + pp: 0, + dmin: 9.5986727915820745e-012, + desig: 6.4755668817591261e-018, + qmax: 2.3847059974280183, + ttype: -2, + dmin1: 1.1875197612370336e-002, + dmin2: 0.11291768719739126, + dn: 9.5986727915820745e-012, + dn1: 1.1875197612370336e-002, + dn2: 0.11291768719739126, + g: 0.0000000000000000, + tau: 1.2218877946904154e-006, + nFail: 2, + iter: 24, + sigma: 9.7517858554454467e-002, + nDiv: 499, + zOut: []float64{0.2383459406360114e+01, 0.2383459541678881e+01, 0.1353283656358167e-06, 0.6165545233233256e-07, 0.1085901506809686e+01, 0.1085921313408604e+01, 0.1986826396898849e-04, 0.2899927470478232e-04, 0.1584986515403004e+01, 0.1588147771130463e+01, 0.3190255011763210e-02, 0.1890859942802736e-02, 0.9412930918090910e+00, 0.1097023073009216e+01, 0.1576208411525267e+00, 0.1913914667196325e+00, 0.1332062774397564e+01, 0.1255053409978084e+01, 0.1143821023097510e+00, 0.7279916643176543e-01, 0.7987861757106404e+00, 0.1091463124754034e+01, 0.3654761154847582e+00, 0.4266153094141972e+00, 0.1274050092339202e+01, 0.9194909148983121e+00, 0.7205613198290631e-01, 0.6547064404326042e-01, 0.8354550922133908e+00, 0.8261962423063568e+00, 0.5621179414582513e-01, 0.7667315937770838e-01, 0.1126935674731953e+01, 0.1417009768728692e+01, 0.3667472533840468e+00, 0.4042756034295816e+00, 0.1562008914947536e+01, 0.1185425268343357e+01, 0.2769195683500124e-01, 0.3111938635974581e-01, 0.1332145183743590e+01, 0.1563083311444625e+01, 0.2620575140703801e+00, 0.2835172362390010e+00, 0.1691083203792688e+01, 0.1491162475787817e+01, 0.8359650824372877e-01, 0.4928202642453804e-01, 0.8790739000820365e+00, 0.8415486989985177e+00, 0.1175682535061811e-01, 0.1564347179719430e-01, 0.1119753245127243e+01, 0.1104508961999007e+01, 0.3991886785572840e-03, 0.1143051254380294e-03, 0.3162690783341116e+00, 0.3165907299376309e+00, 0.4359567385560517e-03, 0.1567431614478116e-03, 0.1138265050331695e+00, 0.1137653492066288e+00, 0.9558734450579545e-04, 0.9991845479738400e-05, 0.1189201139646886e-01, 0.1188202127144326e-01, 0.1730052806966466e-08, 0.1397591405272086e-17, 0.9598672791582074e-11, 0.6617412588681544e-21, 0.1353283656358167e-06, 0.6165545233233256e-07, 0.2818174710670554e-01, 0.1396210563637443e-18, 0.2854399814229969e-05, 0.1344352644992036e-05, 0.1697846193036144e-01, 0.6162975822039155e-31, 0.3724766654883956e-05, 0.4695180568393632e-05, 0.2117542506861687e-04, 0.0000000000000000e+00, 0.9895328911616120e-03, 0.1620493249248586e-02}, + i0Out: 1, + n0Out: 18, + ppOut: 0, + dminOut: 6.6174125886815435e-022, + desigOut: -5.0906815225379586e-018, + sigmaOut: 9.7517858564053150e-002, + qmaxOut: 2.3847059974280183, + nFailOut: 2, + iterOut: 25, + nDivOut: 518, + ttypeOut: -2, + dmin1Out: 1.1882019541390456e-002, + dmin2Out: 0.11366976186212303, + dnOut: 6.6174125886815435e-022, + dn1Out: 1.1882019541390456e-002, + dn2Out: 0.11366976186212303, + gOut: 0.0000000000000000, + tauOut: 9.5986713933289272e-012, + }, + { + i0: 1, + n0: 18, + z: []float64{0.2383459406360114e+01, 0.2383459541678881e+01, 0.1353283656358167e-06, 0.6165545233233256e-07, 0.1085901506809686e+01, 0.1085921313408604e+01, 0.1986826396898849e-04, 0.2899927470478232e-04, 0.1584986515403004e+01, 0.1588147771130463e+01, 0.3190255011763210e-02, 0.1890859942802736e-02, 0.9412930918090910e+00, 0.1097023073009216e+01, 0.1576208411525267e+00, 0.1913914667196325e+00, 0.1332062774397564e+01, 0.1255053409978084e+01, 0.1143821023097510e+00, 0.7279916643176543e-01, 0.7987861757106404e+00, 0.1091463124754034e+01, 0.3654761154847582e+00, 0.4266153094141972e+00, 0.1274050092339202e+01, 0.9194909148983121e+00, 0.7205613198290631e-01, 0.6547064404326042e-01, 0.8354550922133908e+00, 0.8261962423063568e+00, 0.5621179414582513e-01, 0.7667315937770838e-01, 0.1126935674731953e+01, 0.1417009768728692e+01, 0.3667472533840468e+00, 0.4042756034295816e+00, 0.1562008914947536e+01, 0.1185425268343357e+01, 0.2769195683500124e-01, 0.3111938635974581e-01, 0.1332145183743590e+01, 0.1563083311444625e+01, 0.2620575140703801e+00, 0.2835172362390010e+00, 0.1691083203792688e+01, 0.1491162475787817e+01, 0.8359650824372877e-01, 0.4928202642453804e-01, 0.8790739000820365e+00, 0.8415486989985177e+00, 0.1175682535061811e-01, 0.1564347179719430e-01, 0.1119753245127243e+01, 0.1104508961999007e+01, 0.3991886785572840e-03, 0.1143051254380294e-03, 0.3162690783341116e+00, 0.3165907299376309e+00, 0.4359567385560517e-03, 0.1567431614478116e-03, 0.1138265050331695e+00, 0.1137653492066288e+00, 0.9558734450579545e-04, 0.9991845479738400e-05, 0.1189201139646886e-01, 0.1188202127144326e-01, 0.1730052806966466e-08, 0.1397591405272086e-17, 0.9598672791582074e-11, 0.6617412588681544e-21, 0.1353283656358167e-06, 0.6165545233233256e-07, 0.2818174710670554e-01, 0.1396210563637443e-18, 0.2854399814229969e-05, 0.1344352644992036e-05, 0.1697846193036144e-01, 0.6162975822039155e-31, 0.3724766654883956e-05, 0.4695180568393632e-05, 0.2117542506861687e-04, 0.0000000000000000e+00, 0.9895328911616120e-03, 0.1620493249248586e-02}, + pp: 1, + dmin: 6.6174125886815435e-022, + desig: -5.0906815225379586e-018, + qmax: 2.3847059974280183, + ttype: -2, + dmin1: 1.1882019541390456e-002, + dmin2: 0.11366976186212303, + dn: 6.6174125886815435e-022, + dn1: 1.1882019541390456e-002, + dn2: 0.11366976186212303, + g: 0.0000000000000000, + tau: 9.5986713933289272e-012, + nFail: 2, + iter: 25, + sigma: 9.7517858564053150e-002, + nDiv: 518, + zOut: []float64{0.2383459603334333e+01, 0.2383459541678881e+01, 0.2809066689524106e-07, 0.6165545233233256e-07, 0.1085950284592642e+01, 0.1085921313408604e+01, 0.4240998334843304e-04, 0.2899927470478232e-04, 0.1589996221089918e+01, 0.1588147771130463e+01, 0.1304604978030436e-02, 0.1890859942802736e-02, 0.1287109934750819e+01, 0.1097023073009216e+01, 0.1866247058326724e+00, 0.1913914667196325e+00, 0.1141227870577177e+01, 0.1255053409978084e+01, 0.6962466280543776e-01, 0.7279916643176543e-01, 0.1448453771362794e+01, 0.1091463124754034e+01, 0.2708190685256155e+00, 0.4266153094141972e+00, 0.7141424904159570e+00, 0.9194909148983121e+00, 0.7574342769943966e-01, 0.6547064404326042e-01, 0.8271259739846255e+00, 0.8261962423063568e+00, 0.1313543755784947e+00, 0.7667315937770838e-01, 0.1689930996579779e+01, 0.1417009768728692e+01, 0.2835846650840220e+00, 0.4042756034295816e+00, 0.9329599896190804e+00, 0.1185425268343357e+01, 0.5213749144931325e-01, 0.3111938635974581e-01, 0.1794463056234313e+01, 0.1563083311444625e+01, 0.2355970842920850e+00, 0.2835172362390010e+00, 0.1304847417920270e+01, 0.1491162475787817e+01, 0.3178396542921673e-01, 0.4928202642453804e-01, 0.8254082053664953e+00, 0.8415486989985177e+00, 0.2093310277804656e-01, 0.1564347179719430e-01, 0.1083690164346398e+01, 0.1104508961999007e+01, 0.3339325601415243e-04, 0.1143051254380294e-03, 0.3167140798430647e+00, 0.3165907299376309e+00, 0.5630296103885627e-04, 0.1567431614478116e-03, 0.1137190380910697e+00, 0.1137653492066288e+00, 0.1044005669799533e-05, 0.9991845479738400e-05, 0.1188097726577346e-01, 0.1188202127144326e-01, 0.7784240935906335e-37, 0.1397591405272086e-17, 0.6617412588681543e-21, 0.6617412588681544e-21, 0.2809066689524106e-07, 0.6165545233233256e-07, 0.2818174710670554e-01, 0.1396210563637443e-18, 0.2854399814229969e-05, 0.1344352644992036e-05, 0.1697846193036144e-01, 0.6162975822039155e-31, 0.3724766654883956e-05, 0.4695180568393632e-05, 0.2117542506861687e-04, 0.0000000000000000e+00, 0.9895328911616120e-03, 0.1620493249248586e-02}, + i0Out: 1, + n0Out: 18, + ppOut: 1, + dminOut: 6.6174125886815426e-022, + desigOut: -5.0906815225379586e-018, + sigmaOut: 9.7517858564053150e-002, + qmaxOut: 2.3847059974280183, + nFailOut: 2, + iterOut: 26, + nDivOut: 537, + ttypeOut: -2, + dmin1Out: 1.1880977265773463e-002, + dmin2Out: 0.11370904624558997, + dnOut: 6.6174125886815426e-022, + dn1Out: 1.1880977265773463e-002, + dn2Out: 0.11370904624558997, + gOut: 0.0000000000000000, + tauOut: 0.0000000000000000, + }, + { + i0: 1, + n0: 18, + z: []float64{0.2383459603334333e+01, 0.2383459541678881e+01, 0.2809066689524106e-07, 0.6165545233233256e-07, 0.1085950284592642e+01, 0.1085921313408604e+01, 0.4240998334843304e-04, 0.2899927470478232e-04, 0.1589996221089918e+01, 0.1588147771130463e+01, 0.1304604978030436e-02, 0.1890859942802736e-02, 0.1287109934750819e+01, 0.1097023073009216e+01, 0.1866247058326724e+00, 0.1913914667196325e+00, 0.1141227870577177e+01, 0.1255053409978084e+01, 0.6962466280543776e-01, 0.7279916643176543e-01, 0.1448453771362794e+01, 0.1091463124754034e+01, 0.2708190685256155e+00, 0.4266153094141972e+00, 0.7141424904159570e+00, 0.9194909148983121e+00, 0.7574342769943966e-01, 0.6547064404326042e-01, 0.8271259739846255e+00, 0.8261962423063568e+00, 0.1313543755784947e+00, 0.7667315937770838e-01, 0.1689930996579779e+01, 0.1417009768728692e+01, 0.2835846650840220e+00, 0.4042756034295816e+00, 0.9329599896190804e+00, 0.1185425268343357e+01, 0.5213749144931325e-01, 0.3111938635974581e-01, 0.1794463056234313e+01, 0.1563083311444625e+01, 0.2355970842920850e+00, 0.2835172362390010e+00, 0.1304847417920270e+01, 0.1491162475787817e+01, 0.3178396542921673e-01, 0.4928202642453804e-01, 0.8254082053664953e+00, 0.8415486989985177e+00, 0.2093310277804656e-01, 0.1564347179719430e-01, 0.1083690164346398e+01, 0.1104508961999007e+01, 0.3339325601415243e-04, 0.1143051254380294e-03, 0.3167140798430647e+00, 0.3165907299376309e+00, 0.5630296103885627e-04, 0.1567431614478116e-03, 0.1137190380910697e+00, 0.1137653492066288e+00, 0.1044005669799533e-05, 0.9991845479738400e-05, 0.1188097726577346e-01, 0.1188202127144326e-01, 0.7784240935906335e-37, 0.1397591405272086e-17, 0.6617412588681543e-21, 0.6617412588681544e-21, 0.2809066689524106e-07, 0.6165545233233256e-07, 0.2818174710670554e-01, 0.1396210563637443e-18, 0.2854399814229969e-05, 0.1344352644992036e-05, 0.1697846193036144e-01, 0.6162975822039155e-31, 0.3724766654883956e-05, 0.4695180568393632e-05, 0.2117542506861687e-04, 0.0000000000000000e+00, 0.9895328911616120e-03, 0.1620493249248586e-02}, + pp: 0, + dmin: 6.6174125886815426e-022, + desig: -5.0906815225379586e-018, + qmax: 2.3847059974280183, + ttype: -2, + dmin1: 1.1880977265773463e-002, + dmin2: 0.11370904624558997, + dn: 6.6174125886815426e-022, + dn1: 1.1880977265773463e-002, + dn2: 0.11370904624558997, + g: 0.0000000000000000, + tau: 0.0000000000000000, + nFail: 2, + iter: 26, + sigma: 9.7517858564053150e-002, + nDiv: 537, + zOut: []float64{0.2383459603334333e+01, 0.2371578799269292e+01, 0.2809066689524106e-07, 0.1286276792433928e-07, 0.1085950284592642e+01, 0.1074111849557515e+01, 0.4240998334843304e-04, 0.6277904232066118e-04, 0.1589996221089918e+01, 0.1579357214869920e+01, 0.1304604978030436e-02, 0.1063198377376995e-02, 0.1287109934750819e+01, 0.1460790610050406e+01, 0.1866247058326724e+00, 0.1457986614708343e+00, 0.1141227870577177e+01, 0.1053173039756072e+01, 0.6962466280543776e-01, 0.9575644420574685e-01, 0.1448453771362794e+01, 0.1611635563526955e+01, 0.2708190685256155e+00, 0.1200044280642223e+00, 0.7141424904159570e+00, 0.6580006578954666e+00, 0.7574342769943966e-01, 0.9521169265880262e-01, 0.8271259739846255e+00, 0.8513878247486099e+00, 0.1313543755784947e+00, 0.2607270439790754e+00, 0.1689930996579779e+01, 0.1700907785529018e+01, 0.2835846650840220e+00, 0.1555482010511415e+00, 0.9329599896190804e+00, 0.8176684478615445e+00, 0.5213749144931325e-01, 0.1144214412274440e+00, 0.1794463056234313e+01, 0.1903757867143247e+01, 0.2355970842920850e+00, 0.1614796988702029e+00, 0.1304847417920270e+01, 0.1163270852323576e+01, 0.3178396542921673e-01, 0.2255256874351997e-01, 0.8254082053664953e+00, 0.8119079072453143e+00, 0.2093310277804656e-01, 0.2794035799797570e-01, 0.1083690164346398e+01, 0.1043902367448729e+01, 0.3339325601415243e-04, 0.1013132519024162e-04, 0.3167140798430647e+00, 0.3048794193232055e+00, 0.5630296103885627e-04, 0.2100082250625822e-04, 0.1137190380910697e+00, 0.1018182491185255e+00, 0.1044005669799533e-05, 0.1218230301111122e-06, 0.1188097726577346e-01, 0.2328703557243073e-07, 0.7784240935906335e-37, 0.1286276792433928e-07, 0.9751785856405315e-01, 0.6617412588681544e-21, 0.2809066689524106e-07, 0.6165545233233256e-07, 0.2818174710670554e-01, 0.1396210563637443e-18, 0.2854399814229969e-05, 0.1344352644992036e-05, 0.1697846193036144e-01, 0.6162975822039155e-31, 0.3724766654883956e-05, 0.4695180568393632e-05, 0.2117542506861687e-04, 0.0000000000000000e+00, 0.9895328911616120e-03, 0.1620493249248586e-02}, + i0Out: 1, + n0Out: 17, + ppOut: 0, + dminOut: 2.3287035572430725e-008, + desigOut: 1.7347234759768071e-018, + sigmaOut: 0.10939869071976092, + qmaxOut: 2.3847059974280183, + nFailOut: 2, + iterOut: 27, + nDivOut: 555, + ttypeOut: -7, + dmin1Out: 0.10181720511285566, + dmin2Out: 0.30482311636216664, + dnOut: 2.3287035572430725e-008, + dn1Out: 0.10181720511285566, + dn2Out: 0.30482311636216664, + gOut: 0.0000000000000000, + tauOut: 1.1880832155707781e-002, + }, + { + i0: 1, + n0: 17, + z: []float64{0.2383459603334333e+01, 0.2371578799269292e+01, 0.2809066689524106e-07, 0.1286276792433928e-07, 0.1085950284592642e+01, 0.1074111849557515e+01, 0.4240998334843304e-04, 0.6277904232066118e-04, 0.1589996221089918e+01, 0.1579357214869920e+01, 0.1304604978030436e-02, 0.1063198377376995e-02, 0.1287109934750819e+01, 0.1460790610050406e+01, 0.1866247058326724e+00, 0.1457986614708343e+00, 0.1141227870577177e+01, 0.1053173039756072e+01, 0.6962466280543776e-01, 0.9575644420574685e-01, 0.1448453771362794e+01, 0.1611635563526955e+01, 0.2708190685256155e+00, 0.1200044280642223e+00, 0.7141424904159570e+00, 0.6580006578954666e+00, 0.7574342769943966e-01, 0.9521169265880262e-01, 0.8271259739846255e+00, 0.8513878247486099e+00, 0.1313543755784947e+00, 0.2607270439790754e+00, 0.1689930996579779e+01, 0.1700907785529018e+01, 0.2835846650840220e+00, 0.1555482010511415e+00, 0.9329599896190804e+00, 0.8176684478615445e+00, 0.5213749144931325e-01, 0.1144214412274440e+00, 0.1794463056234313e+01, 0.1903757867143247e+01, 0.2355970842920850e+00, 0.1614796988702029e+00, 0.1304847417920270e+01, 0.1163270852323576e+01, 0.3178396542921673e-01, 0.2255256874351997e-01, 0.8254082053664953e+00, 0.8119079072453143e+00, 0.2093310277804656e-01, 0.2794035799797570e-01, 0.1083690164346398e+01, 0.1043902367448729e+01, 0.3339325601415243e-04, 0.1013132519024162e-04, 0.3167140798430647e+00, 0.3048794193232055e+00, 0.5630296103885627e-04, 0.2100082250625822e-04, 0.1137190380910697e+00, 0.1018182491185255e+00, 0.1044005669799533e-05, 0.1218230301111122e-06, 0.1188097726577346e-01, 0.2328703557243073e-07, 0.7784240935906335e-37, 0.1286276792433928e-07, 0.9751785856405315e-01, 0.6617412588681544e-21, 0.2809066689524106e-07, 0.6165545233233256e-07, 0.2818174710670554e-01, 0.1396210563637443e-18, 0.2854399814229969e-05, 0.1344352644992036e-05, 0.1697846193036144e-01, 0.6162975822039155e-31, 0.3724766654883956e-05, 0.4695180568393632e-05, 0.2117542506861687e-04, 0.0000000000000000e+00, 0.9895328911616120e-03, 0.1620493249248586e-02}, + pp: 1, + dmin: 2.3287035572430725e-008, + desig: 1.7347234759768071e-018, + qmax: 2.3847059974280183, + ttype: -7, + dmin1: 0.10181720511285566, + dmin2: 0.30482311636216664, + dn: 2.3287035572430725e-008, + dn1: 0.10181720511285566, + dn2: 0.30482311636216664, + g: 0.0000000000000000, + tau: 1.1880832155707781e-002, + nFail: 2, + iter: 27, + sigma: 0.10939869071976092, + nDiv: 555, + zOut: []float64{0.2371578788845052e+01, 0.2371578799269292e+01, 0.5825676764620370e-08, 0.1286276792433928e-07, 0.1074174599487151e+01, 0.1074111849557515e+01, 0.9230392664199863e-04, 0.6277904232066118e-04, 0.1580328086033647e+01, 0.1579357214869920e+01, 0.9827770701659698e-03, 0.1063198377376995e-02, 0.1605606471164067e+01, 0.1460790610050406e+01, 0.9563440497488795e-01, 0.1457986614708343e+00, 0.1053295055699924e+01, 0.1053173039756072e+01, 0.1465159169633776e+00, 0.9575644420574685e-01, 0.1585124051340792e+01, 0.1611635563526955e+01, 0.4981502397231050e-01, 0.1200044280642223e+00, 0.7033973032949510e+00, 0.6580006578954666e+00, 0.1152436546510613e+00, 0.9521169265880262e-01, 0.9968711907896164e+00, 0.8513878247486099e+00, 0.4448645553200346e+00, 0.2607270439790754e+00, 0.1411591407973117e+01, 0.1700907785529018e+01, 0.9010174998427344e-01, 0.1555482010511415e+00, 0.8419881158177074e+00, 0.8176684478615445e+00, 0.2587099684834221e+00, 0.1144214412274440e+00, 0.1806527574243020e+01, 0.1903757867143247e+01, 0.1039810460775319e+00, 0.1614796988702029e+00, 0.1081842351702556e+01, 0.1163270852323576e+01, 0.1692539477932339e-01, 0.2255256874351997e-01, 0.8229228471769590e+00, 0.8119079072453143e+00, 0.3544318396494814e-01, 0.2794035799797570e-01, 0.1008469291521964e+01, 0.1043902367448729e+01, 0.3062892015595061e-05, 0.1013132519024162e-04, 0.3048973339666884e+00, 0.3048794193232055e+00, 0.7013072071892081e-05, 0.2100082250625822e-04, 0.1018113345824760e+00, 0.1018182491185255e+00, 0.2786425742647189e-13, 0.1218230301111122e-06, 0.2696165428114542e-17, 0.2328703557243073e-07, 0.5825676764620370e-08, 0.1286276792433928e-07, 0.9751785856405315e-01, 0.6617412588681544e-21, 0.2809066689524106e-07, 0.6165545233233256e-07, 0.2818174710670554e-01, 0.1396210563637443e-18, 0.2854399814229969e-05, 0.1344352644992036e-05, 0.1697846193036144e-01, 0.6162975822039155e-31, 0.3724766654883956e-05, 0.4695180568393632e-05, 0.2117542506861687e-04, 0.0000000000000000e+00, 0.9895328911616120e-03, 0.1620493249248586e-02}, + i0Out: 1, + n0Out: 17, + ppOut: 1, + dminOut: 2.6961654281145418e-018, + desigOut: -3.8251180717391650e-018, + sigmaOut: 0.10939871400676864, + qmaxOut: 2.3847059974280183, + nFailOut: 2, + iterOut: 28, + nDivOut: 573, + ttypeOut: -2, + dmin1Out: 0.10181121275944585, + dmin2Out: 0.30487633314418217, + dnOut: 2.6961654281145418e-018, + dn1Out: 0.10181121275944585, + dn2Out: 0.30487633314418217, + gOut: 0.0000000000000000, + tauOut: 2.3287007705477136e-008, + }, + { + i0: 1, + n0: 17, + z: []float64{0.2371578788845052e+01, 0.2371578799269292e+01, 0.5825676764620370e-08, 0.1286276792433928e-07, 0.1074174599487151e+01, 0.1074111849557515e+01, 0.9230392664199863e-04, 0.6277904232066118e-04, 0.1580328086033647e+01, 0.1579357214869920e+01, 0.9827770701659698e-03, 0.1063198377376995e-02, 0.1605606471164067e+01, 0.1460790610050406e+01, 0.9563440497488795e-01, 0.1457986614708343e+00, 0.1053295055699924e+01, 0.1053173039756072e+01, 0.1465159169633776e+00, 0.9575644420574685e-01, 0.1585124051340792e+01, 0.1611635563526955e+01, 0.4981502397231050e-01, 0.1200044280642223e+00, 0.7033973032949510e+00, 0.6580006578954666e+00, 0.1152436546510613e+00, 0.9521169265880262e-01, 0.9968711907896164e+00, 0.8513878247486099e+00, 0.4448645553200346e+00, 0.2607270439790754e+00, 0.1411591407973117e+01, 0.1700907785529018e+01, 0.9010174998427344e-01, 0.1555482010511415e+00, 0.8419881158177074e+00, 0.8176684478615445e+00, 0.2587099684834221e+00, 0.1144214412274440e+00, 0.1806527574243020e+01, 0.1903757867143247e+01, 0.1039810460775319e+00, 0.1614796988702029e+00, 0.1081842351702556e+01, 0.1163270852323576e+01, 0.1692539477932339e-01, 0.2255256874351997e-01, 0.8229228471769590e+00, 0.8119079072453143e+00, 0.3544318396494814e-01, 0.2794035799797570e-01, 0.1008469291521964e+01, 0.1043902367448729e+01, 0.3062892015595061e-05, 0.1013132519024162e-04, 0.3048973339666884e+00, 0.3048794193232055e+00, 0.7013072071892081e-05, 0.2100082250625822e-04, 0.1018113345824760e+00, 0.1018182491185255e+00, 0.2786425742647189e-13, 0.1218230301111122e-06, 0.2696165428114542e-17, 0.2328703557243073e-07, 0.5825676764620370e-08, 0.1286276792433928e-07, 0.9751785856405315e-01, 0.6617412588681544e-21, 0.2809066689524106e-07, 0.6165545233233256e-07, 0.2818174710670554e-01, 0.1396210563637443e-18, 0.2854399814229969e-05, 0.1344352644992036e-05, 0.1697846193036144e-01, 0.6162975822039155e-31, 0.3724766654883956e-05, 0.4695180568393632e-05, 0.2117542506861687e-04, 0.0000000000000000e+00, 0.9895328911616120e-03, 0.1620493249248586e-02}, + pp: 0, + dmin: 2.6961654281145418e-018, + desig: -3.8251180717391650e-018, + qmax: 2.3847059974280183, + ttype: -2, + dmin1: 0.10181121275944585, + dmin2: 0.30487633314418217, + dn: 2.6961654281145418e-018, + dn1: 0.10181121275944585, + dn2: 0.30487633314418217, + g: 0.0000000000000000, + tau: 2.3287007705477136e-008, + nFail: 2, + iter: 28, + sigma: 0.10939871400676864, + nDiv: 573, + zOut: []float64{0.2371578788845052e+01, 0.2371578794670729e+01, 0.5825676764620370e-08, 0.2638661645752538e-08, 0.1074174599487151e+01, 0.1074266900775131e+01, 0.9230392664199863e-04, 0.1357860766428602e-03, 0.1580328086033647e+01, 0.1581175077027170e+01, 0.9827770701659698e-03, 0.9979623676695663e-03, 0.1605606471164067e+01, 0.1700242913771285e+01, 0.9563440497488795e-01, 0.5924520849284005e-01, 0.1053295055699924e+01, 0.1140565764170461e+01, 0.1465159169633776e+00, 0.2036234219705987e+00, 0.1585124051340792e+01, 0.1431315653342504e+01, 0.4981502397231050e-01, 0.2448080089382756e-01, 0.7033973032949510e+00, 0.7941601570521848e+00, 0.1152436546510613e+00, 0.1446598374682775e+00, 0.9968711907896164e+00, 0.1297075908641373e+01, 0.4448645553200346e+00, 0.4841405038964208e+00, 0.1411591407973117e+01, 0.1017552654060970e+01, 0.9010174998427344e-01, 0.7455594793877945e-01, 0.8419881158177074e+00, 0.1026142136362350e+01, 0.2587099684834221e+00, 0.4554599945126984e+00, 0.1806527574243020e+01, 0.1455048625807853e+01, 0.1039810460775319e+00, 0.7731088667813635e-01, 0.1081842351702556e+01, 0.1021456859803743e+01, 0.1692539477932339e-01, 0.1363571444815687e-01, 0.8229228471769590e+00, 0.8447303166937503e+00, 0.3544318396494814e-01, 0.4231334180394086e-01, 0.1008469291521964e+01, 0.9661590126100381e+00, 0.3062892015595061e-05, 0.9665775484099522e-06, 0.3048973339666884e+00, 0.3049033804612119e+00, 0.7013072071892081e-05, 0.2341758973227438e-05, 0.1018113345824760e+00, 0.1018089928235306e+00, 0.2786425742647189e-13, 0.7379175991216932e-30, 0.2696165428114542e-17, 0.2696165428113804e-17, 0.5825676764620370e-08, 0.2638661645752538e-08, 0.9751785856405315e-01, 0.6617412588681544e-21, 0.2809066689524106e-07, 0.6165545233233256e-07, 0.2818174710670554e-01, 0.1396210563637443e-18, 0.2854399814229969e-05, 0.1344352644992036e-05, 0.1697846193036144e-01, 0.6162975822039155e-31, 0.3724766654883956e-05, 0.4695180568393632e-05, 0.2117542506861687e-04, 0.0000000000000000e+00, 0.9895328911616120e-03, 0.1620493249248586e-02}, + i0Out: 1, + n0Out: 17, + ppOut: 0, + dminOut: 2.6961654281138038e-018, + desigOut: -3.8251180717391650e-018, + sigmaOut: 0.10939871400676864, + qmaxOut: 2.3847059974280183, + nFailOut: 2, + iterOut: 29, + nDivOut: 591, + ttypeOut: -2, + dmin1Out: 0.10180899282350273, + dmin2Out: 0.30489636738914000, + dnOut: 2.6961654281138038e-018, + dn1Out: 0.10180899282350273, + dn2Out: 0.30489636738914000, + gOut: 0.0000000000000000, + tauOut: 0.0000000000000000, + }, + { + i0: 1, + n0: 17, + z: []float64{0.2371578788845052e+01, 0.2371578794670729e+01, 0.5825676764620370e-08, 0.2638661645752538e-08, 0.1074174599487151e+01, 0.1074266900775131e+01, 0.9230392664199863e-04, 0.1357860766428602e-03, 0.1580328086033647e+01, 0.1581175077027170e+01, 0.9827770701659698e-03, 0.9979623676695663e-03, 0.1605606471164067e+01, 0.1700242913771285e+01, 0.9563440497488795e-01, 0.5924520849284005e-01, 0.1053295055699924e+01, 0.1140565764170461e+01, 0.1465159169633776e+00, 0.2036234219705987e+00, 0.1585124051340792e+01, 0.1431315653342504e+01, 0.4981502397231050e-01, 0.2448080089382756e-01, 0.7033973032949510e+00, 0.7941601570521848e+00, 0.1152436546510613e+00, 0.1446598374682775e+00, 0.9968711907896164e+00, 0.1297075908641373e+01, 0.4448645553200346e+00, 0.4841405038964208e+00, 0.1411591407973117e+01, 0.1017552654060970e+01, 0.9010174998427344e-01, 0.7455594793877945e-01, 0.8419881158177074e+00, 0.1026142136362350e+01, 0.2587099684834221e+00, 0.4554599945126984e+00, 0.1806527574243020e+01, 0.1455048625807853e+01, 0.1039810460775319e+00, 0.7731088667813635e-01, 0.1081842351702556e+01, 0.1021456859803743e+01, 0.1692539477932339e-01, 0.1363571444815687e-01, 0.8229228471769590e+00, 0.8447303166937503e+00, 0.3544318396494814e-01, 0.4231334180394086e-01, 0.1008469291521964e+01, 0.9661590126100381e+00, 0.3062892015595061e-05, 0.9665775484099522e-06, 0.3048973339666884e+00, 0.3049033804612119e+00, 0.7013072071892081e-05, 0.2341758973227438e-05, 0.1018113345824760e+00, 0.1018089928235306e+00, 0.2786425742647189e-13, 0.7379175991216932e-30, 0.2696165428114542e-17, 0.2696165428113804e-17, 0.5825676764620370e-08, 0.2638661645752538e-08, 0.9751785856405315e-01, 0.6617412588681544e-21, 0.2809066689524106e-07, 0.6165545233233256e-07, 0.2818174710670554e-01, 0.1396210563637443e-18, 0.2854399814229969e-05, 0.1344352644992036e-05, 0.1697846193036144e-01, 0.6162975822039155e-31, 0.3724766654883956e-05, 0.4695180568393632e-05, 0.2117542506861687e-04, 0.0000000000000000e+00, 0.9895328911616120e-03, 0.1620493249248586e-02}, + pp: 1, + dmin: 2.6961654281138038e-018, + desig: -3.8251180717391650e-018, + qmax: 2.3847059974280183, + ttype: -2, + dmin1: 0.10180899282350273, + dmin2: 0.30489636738914000, + dn: 2.6961654281138038e-018, + dn1: 0.10180899282350273, + dn2: 0.30489636738914000, + g: 0.0000000000000000, + tau: 0.0000000000000000, + nFail: 2, + iter: 29, + sigma: 0.10939871400676864, + nDiv: 591, + zOut: []float64{0.2269772292606755e+01, 0.2371578794670729e+01, 0.1248859578385863e-08, 0.2638661645752538e-08, 0.9725961809002787e+00, 0.1074266900775131e+01, 0.2207509801202943e-03, 0.1357860766428602e-03, 0.1480145783712083e+01, 0.1581175077027170e+01, 0.1146359002276934e-02, 0.9979623676695663e-03, 0.1656535258559212e+01, 0.1700242913771285e+01, 0.4079180092843103e-01, 0.5924520849284005e-01, 0.1201590880509993e+01, 0.1140565764170461e+01, 0.2425529321011353e+00, 0.2036234219705987e+00, 0.1111437017432560e+01, 0.1431315653342504e+01, 0.1749237822536809e-01, 0.2448080089382756e-01, 0.8195211115924584e+00, 0.7941601570521848e+00, 0.2289566278084960e+00, 0.1446598374682775e+00, 0.1450453280026662e+01, 0.1297075908641373e+01, 0.3396444831847066e+00, 0.4841405038964208e+00, 0.6506576141124067e+00, 0.1017552654060970e+01, 0.1175810411452473e+00, 0.7455594793877945e-01, 0.1262214585027166e+01, 0.1026142136362350e+01, 0.5250426092262996e+00, 0.4554599945126984e+00, 0.9055103985570538e+00, 0.1455048625807853e+01, 0.8721019179982008e-01, 0.7731088667813635e-01, 0.8460758777494440e+00, 0.1021456859803743e+01, 0.1361402882064932e-01, 0.1363571444815687e-01, 0.7716231249744060e+00, 0.8447303166937503e+00, 0.5298106707064092e-01, 0.4231334180394086e-01, 0.8113724074143097e+00, 0.9661590126100381e+00, 0.3632274887524200e-06, 0.9665775484099522e-06, 0.2030988542900605e+00, 0.3049033804612119e+00, 0.1173872316183806e-05, 0.2341758973227438e-05, 0.1314248578535016e-05, 0.1018089928235306e+00, 0.1248859578385863e-08, 0.7379175991216932e-30, 0.1093987140067686e+00, 0.2696165428113804e-17, 0.5825676764620370e-08, 0.2638661645752538e-08, 0.9751785856405315e-01, 0.6617412588681544e-21, 0.2809066689524106e-07, 0.6165545233233256e-07, 0.2818174710670554e-01, 0.1396210563637443e-18, 0.2854399814229969e-05, 0.1344352644992036e-05, 0.1697846193036144e-01, 0.6162975822039155e-31, 0.3724766654883956e-05, 0.4695180568393632e-05, 0.2117542506861687e-04, 0.0000000000000000e+00, 0.9895328911616120e-03, 0.1620493249248586e-02}, + i0Out: 1, + n0Out: 16, + ppOut: 1, + dminOut: 1.3142485785350155e-006, + desigOut: 0.0000000000000000, + sigmaOut: 0.21120521870940451, + qmaxOut: 2.3847059974280183, + nFailOut: 2, + iterOut: 30, + nDivOut: 608, + ttypeOut: -7, + dmin1Out: 0.20309651253108729, + dmin2Out: 0.57610166617362735, + dnOut: 1.3142485785350155e-006, + dn1Out: 0.20309651253108729, + dn2Out: 0.81137144083676127, + gOut: 0.0000000000000000, + tauOut: 0.10180650470263587, + }, + { + i0: 1, + n0: 16, + z: []float64{0.2269772292606755e+01, 0.2371578794670729e+01, 0.1248859578385863e-08, 0.2638661645752538e-08, 0.9725961809002787e+00, 0.1074266900775131e+01, 0.2207509801202943e-03, 0.1357860766428602e-03, 0.1480145783712083e+01, 0.1581175077027170e+01, 0.1146359002276934e-02, 0.9979623676695663e-03, 0.1656535258559212e+01, 0.1700242913771285e+01, 0.4079180092843103e-01, 0.5924520849284005e-01, 0.1201590880509993e+01, 0.1140565764170461e+01, 0.2425529321011353e+00, 0.2036234219705987e+00, 0.1111437017432560e+01, 0.1431315653342504e+01, 0.1749237822536809e-01, 0.2448080089382756e-01, 0.8195211115924584e+00, 0.7941601570521848e+00, 0.2289566278084960e+00, 0.1446598374682775e+00, 0.1450453280026662e+01, 0.1297075908641373e+01, 0.3396444831847066e+00, 0.4841405038964208e+00, 0.6506576141124067e+00, 0.1017552654060970e+01, 0.1175810411452473e+00, 0.7455594793877945e-01, 0.1262214585027166e+01, 0.1026142136362350e+01, 0.5250426092262996e+00, 0.4554599945126984e+00, 0.9055103985570538e+00, 0.1455048625807853e+01, 0.8721019179982008e-01, 0.7731088667813635e-01, 0.8460758777494440e+00, 0.1021456859803743e+01, 0.1361402882064932e-01, 0.1363571444815687e-01, 0.7716231249744060e+00, 0.8447303166937503e+00, 0.5298106707064092e-01, 0.4231334180394086e-01, 0.8113724074143097e+00, 0.9661590126100381e+00, 0.3632274887524200e-06, 0.9665775484099522e-06, 0.2030988542900605e+00, 0.3049033804612119e+00, 0.1173872316183806e-05, 0.2341758973227438e-05, 0.1314248578535016e-05, 0.1018089928235306e+00, 0.1248859578385863e-08, 0.2638661645752538e-08, 0.1093987140067686e+00, 0.2696165428113804e-17, 0.5825676764620370e-08, 0.2638661645752538e-08, 0.9751785856405315e-01, 0.6617412588681544e-21, 0.2809066689524106e-07, 0.6165545233233256e-07, 0.2818174710670554e-01, 0.1396210563637443e-18, 0.2854399814229969e-05, 0.1344352644992036e-05, 0.1697846193036144e-01, 0.6162975822039155e-31, 0.3724766654883956e-05, 0.4695180568393632e-05, 0.2117542506861687e-04, 0.0000000000000000e+00, 0.9895328911616120e-03, 0.1620493249248586e-02}, + pp: 0, + dmin: 1.3142485785350155e-006, + desig: 0.0000000000000000, + qmax: 2.2697722926067549, + ttype: -7, + dmin1: 0.20309651253108729, + dmin2: 0.57610166617362735, + dn: 1.3142485785350155e-006, + dn1: 0.20309651253108729, + dn2: 0.81137144083676127, + g: 0.0000000000000000, + tau: 0.10180650470263587, + nFail: 2, + iter: 30, + sigma: 0.21120521870940451, + nDiv: 608, + zOut: []float64{0.2269772292606755e+01, 0.2269770979614632e+01, 0.1248859578385863e-08, 0.5351359530665278e-09, 0.9725961809002787e+00, 0.9728156171042807e+00, 0.2207509801202943e-03, 0.3358741643642203e-03, 0.1480145783712083e+01, 0.1480954954309013e+01, 0.1146359002276934e-02, 0.1282269997958535e-02, 0.1656535258559212e+01, 0.1696043475248702e+01, 0.4079180092843103e-01, 0.2889964597634762e-01, 0.1201590880509993e+01, 0.1415242852393798e+01, 0.2425529321011353e+00, 0.1904848393814713e+00, 0.1111437017432560e+01, 0.9384432420354740e+00, 0.1749237822536809e-01, 0.1527569554079381e-01, 0.8195211115924584e+00, 0.1033200729619178e+01, 0.2289566278084960e+00, 0.3214195289148511e+00, 0.1450453280026662e+01, 0.1468676920055535e+01, 0.3396444831847066e+00, 0.1504703083827628e+00, 0.6506576141124067e+00, 0.6177670326339089e+00, 0.1175810411452473e+00, 0.2402402478867145e+00, 0.1262214585027166e+01, 0.1547015632125768e+01, 0.5250426092262996e+00, 0.3073217441808570e+00, 0.9055103985570538e+00, 0.6853975319350345e+00, 0.8721019179982008e-01, 0.1076549537133787e+00, 0.8460758777494440e+00, 0.7520336386157322e+00, 0.1361402882064932e-01, 0.1396865635082151e-01, 0.7716231249744060e+00, 0.8106342214532430e+00, 0.5298106707064092e-01, 0.5302931309687425e-01, 0.8113724074143097e+00, 0.7583421433039418e+00, 0.3632274887524200e-06, 0.9727942389020712e-07, 0.2030988542900605e+00, 0.2030986166419704e+00, 0.1173872316183806e-05, 0.7596112905317353e-11, 0.1314248578535016e-05, 0.8411884199182457e-17, 0.1248859578385863e-08, 0.5351359530665278e-09, 0.1093987140067686e+00, 0.2696165428113804e-17, 0.5825676764620370e-08, 0.2638661645752538e-08, 0.9751785856405315e-01, 0.6617412588681544e-21, 0.2809066689524106e-07, 0.6165545233233256e-07, 0.2818174710670554e-01, 0.1396210563637443e-18, 0.2854399814229969e-05, 0.1344352644992036e-05, 0.1697846193036144e-01, 0.6162975822039155e-31, 0.3724766654883956e-05, 0.4695180568393632e-05, 0.2117542506861687e-04, 0.0000000000000000e+00, 0.9895328911616120e-03, 0.1620493249248586e-02}, + i0Out: 1, + n0Out: 16, + ppOut: 0, + dminOut: 8.4118841991824567e-018, + desigOut: 1.0739530738237274e-017, + sigmaOut: 0.21120653295038691, + qmaxOut: 2.2697722926067549, + nFailOut: 2, + iterOut: 31, + nDivOut: 625, + ttypeOut: -2, + dmin1Out: 0.20309744276965425, + dmin2Out: 0.50018599148866161, + dnOut: 8.4118841991824567e-018, + dn1Out: 0.20309744276965425, + dn2Out: 0.75834178007645303, + gOut: 0.0000000000000000, + tauOut: 1.3142409824136984e-006, + }, + { + i0: 1, + n0: 16, + z: []float64{0.2269772292606755e+01, 0.2269770979614632e+01, 0.1248859578385863e-08, 0.5351359530665278e-09, 0.9725961809002787e+00, 0.9728156171042807e+00, 0.2207509801202943e-03, 0.3358741643642203e-03, 0.1480145783712083e+01, 0.1480954954309013e+01, 0.1146359002276934e-02, 0.1282269997958535e-02, 0.1656535258559212e+01, 0.1696043475248702e+01, 0.4079180092843103e-01, 0.2889964597634762e-01, 0.1201590880509993e+01, 0.1415242852393798e+01, 0.2425529321011353e+00, 0.1904848393814713e+00, 0.1111437017432560e+01, 0.9384432420354740e+00, 0.1749237822536809e-01, 0.1527569554079381e-01, 0.8195211115924584e+00, 0.1033200729619178e+01, 0.2289566278084960e+00, 0.3214195289148511e+00, 0.1450453280026662e+01, 0.1468676920055535e+01, 0.3396444831847066e+00, 0.1504703083827628e+00, 0.6506576141124067e+00, 0.6177670326339089e+00, 0.1175810411452473e+00, 0.2402402478867145e+00, 0.1262214585027166e+01, 0.1547015632125768e+01, 0.5250426092262996e+00, 0.3073217441808570e+00, 0.9055103985570538e+00, 0.6853975319350345e+00, 0.8721019179982008e-01, 0.1076549537133787e+00, 0.8460758777494440e+00, 0.7520336386157322e+00, 0.1361402882064932e-01, 0.1396865635082151e-01, 0.7716231249744060e+00, 0.8106342214532430e+00, 0.5298106707064092e-01, 0.5302931309687425e-01, 0.8113724074143097e+00, 0.7583421433039418e+00, 0.3632274887524200e-06, 0.9727942389020712e-07, 0.2030988542900605e+00, 0.2030986166419704e+00, 0.1173872316183806e-05, 0.7596112905317353e-11, 0.1314248578535016e-05, 0.8411884199182457e-17, 0.1248859578385863e-08, 0.5351359530665278e-09, 0.1093987140067686e+00, 0.2696165428113804e-17, 0.5825676764620370e-08, 0.2638661645752538e-08, 0.9751785856405315e-01, 0.6617412588681544e-21, 0.2809066689524106e-07, 0.6165545233233256e-07, 0.2818174710670554e-01, 0.1396210563637443e-18, 0.2854399814229969e-05, 0.1344352644992036e-05, 0.1697846193036144e-01, 0.6162975822039155e-31, 0.3724766654883956e-05, 0.4695180568393632e-05, 0.2117542506861687e-04, 0.0000000000000000e+00, 0.9895328911616120e-03, 0.1620493249248586e-02}, + pp: 1, + dmin: 8.4118841991824567e-018, + desig: 1.0739530738237274e-017, + qmax: 2.2697722926067549, + ttype: -2, + dmin1: 0.20309744276965425, + dmin2: 0.50018599148866161, + dn: 8.4118841991824567e-018, + dn1: 0.20309744276965425, + dn2: 0.75834178007645303, + g: 0.0000000000000000, + tau: 1.3142409824136984e-006, + nFail: 2, + iter: 31, + sigma: 0.21120653295038691, + nDiv: 625, + zOut: []float64{0.2269770980149768e+01, 0.2269770979614632e+01, 0.2293573303077261e-09, 0.5351359530665278e-09, 0.9731514910392876e+00, 0.9728156171042807e+00, 0.5111377954200868e-03, 0.3358741643642203e-03, 0.1481726086511552e+01, 0.1480954954309013e+01, 0.1467737987028945e-02, 0.1282269997958535e-02, 0.1723475383238021e+01, 0.1696043475248702e+01, 0.2373112943910766e-01, 0.2889964597634762e-01, 0.1581996562336162e+01, 0.1415242852393798e+01, 0.1129959536471923e+00, 0.1904848393814713e+00, 0.8407229839290754e+00, 0.9384432420354740e+00, 0.1877296098701645e-01, 0.1527569554079381e-01, 0.1335847297547013e+01, 0.1033200729619178e+01, 0.3533797947109679e+00, 0.3214195289148511e+00, 0.1265767433727330e+01, 0.1468676920055535e+01, 0.7343813202351114e-01, 0.1504703083827628e+00, 0.7845691484971122e+00, 0.6177670326339089e+00, 0.4737063898809231e+00, 0.2402402478867145e+00, 0.1380630986425702e+01, 0.1547015632125768e+01, 0.1525661578238559e+00, 0.3073217441808570e+00, 0.6404863278245572e+00, 0.6853975319350345e+00, 0.1264041760751794e+00, 0.1076549537133787e+00, 0.6395981188913744e+00, 0.7520336386157322e+00, 0.1770404028911661e-01, 0.1396865635082151e-01, 0.8459594942610007e+00, 0.8106342214532430e+00, 0.4753698401003136e-01, 0.5302931309687425e-01, 0.7108052565733343e+00, 0.7583421433039418e+00, 0.2779568135873871e-07, 0.9727942389020712e-07, 0.2030985888538852e+00, 0.2030986166419704e+00, 0.3146138162949754e-27, 0.7596112905317353e-11, 0.8411884198867843e-17, 0.8411884199182457e-17, 0.2293573303077261e-09, 0.5351359530665278e-09, 0.1093987140067686e+00, 0.2696165428113804e-17, 0.5825676764620370e-08, 0.2638661645752538e-08, 0.9751785856405315e-01, 0.6617412588681544e-21, 0.2809066689524106e-07, 0.6165545233233256e-07, 0.2818174710670554e-01, 0.1396210563637443e-18, 0.2854399814229969e-05, 0.1344352644992036e-05, 0.1697846193036144e-01, 0.6162975822039155e-31, 0.3724766654883956e-05, 0.4695180568393632e-05, 0.2117542506861687e-04, 0.0000000000000000e+00, 0.9895328911616120e-03, 0.1620493249248586e-02}, + i0Out: 1, + n0Out: 16, + ppOut: 1, + dminOut: 8.4118841988678429e-018, + desigOut: 1.0739530738237274e-017, + sigmaOut: 0.21120653295038691, + qmaxOut: 2.2697722926067549, + nFailOut: 2, + iterOut: 32, + nDivOut: 642, + ttypeOut: -2, + dmin1Out: 0.20309858884628909, + dmin2Out: 0.53283137411117854, + dnOut: 8.4118841988678429e-018, + dn1Out: 0.20309858884628909, + dn2Out: 0.71080515929391042, + gOut: 0.0000000000000000, + tauOut: 0.0000000000000000, + }, + { + i0: 1, + n0: 16, + z: []float64{0.2269770980149768e+01, 0.2269770979614632e+01, 0.2293573303077261e-09, 0.5351359530665278e-09, 0.9731514910392876e+00, 0.9728156171042807e+00, 0.5111377954200868e-03, 0.3358741643642203e-03, 0.1481726086511552e+01, 0.1480954954309013e+01, 0.1467737987028945e-02, 0.1282269997958535e-02, 0.1723475383238021e+01, 0.1696043475248702e+01, 0.2373112943910766e-01, 0.2889964597634762e-01, 0.1581996562336162e+01, 0.1415242852393798e+01, 0.1129959536471923e+00, 0.1904848393814713e+00, 0.8407229839290754e+00, 0.9384432420354740e+00, 0.1877296098701645e-01, 0.1527569554079381e-01, 0.1335847297547013e+01, 0.1033200729619178e+01, 0.3533797947109679e+00, 0.3214195289148511e+00, 0.1265767433727330e+01, 0.1468676920055535e+01, 0.7343813202351114e-01, 0.1504703083827628e+00, 0.7845691484971122e+00, 0.6177670326339089e+00, 0.4737063898809231e+00, 0.2402402478867145e+00, 0.1380630986425702e+01, 0.1547015632125768e+01, 0.1525661578238559e+00, 0.3073217441808570e+00, 0.6404863278245572e+00, 0.6853975319350345e+00, 0.1264041760751794e+00, 0.1076549537133787e+00, 0.6395981188913744e+00, 0.7520336386157322e+00, 0.1770404028911661e-01, 0.1396865635082151e-01, 0.8459594942610007e+00, 0.8106342214532430e+00, 0.4753698401003136e-01, 0.5302931309687425e-01, 0.7108052565733343e+00, 0.7583421433039418e+00, 0.2779568135873871e-07, 0.9727942389020712e-07, 0.2030985888538852e+00, 0.2030986166419704e+00, 0.3146138162949754e-27, 0.7596112905317353e-11, 0.8411884198867843e-17, 0.8411884199182457e-17, 0.2293573303077261e-09, 0.5351359530665278e-09, 0.1093987140067686e+00, 0.2696165428113804e-17, 0.5825676764620370e-08, 0.2638661645752538e-08, 0.9751785856405315e-01, 0.6617412588681544e-21, 0.2809066689524106e-07, 0.6165545233233256e-07, 0.2818174710670554e-01, 0.1396210563637443e-18, 0.2854399814229969e-05, 0.1344352644992036e-05, 0.1697846193036144e-01, 0.6162975822039155e-31, 0.3724766654883956e-05, 0.4695180568393632e-05, 0.2117542506861687e-04, 0.0000000000000000e+00, 0.9895328911616120e-03, 0.1620493249248586e-02}, + pp: 0, + dmin: 8.4118841988678429e-018, + desig: 1.0739530738237274e-017, + qmax: 2.2697722926067549, + ttype: -2, + dmin1: 0.20309858884628909, + dmin2: 0.53283137411117854, + dn: 8.4118841988678429e-018, + dn1: 0.20309858884628909, + dn2: 0.71080515929391042, + g: 0.0000000000000000, + tau: 0.0000000000000000, + nFail: 2, + iter: 32, + sigma: 0.21120653295038691, + nDiv: 642, + zOut: []float64{0.2269770980149768e+01, 0.2269770980379126e+01, 0.2293573303077261e-09, 0.9833566024906726e-10, 0.9731514910392876e+00, 0.9736626287363720e+00, 0.5111377954200868e-03, 0.7778528033461282e-03, 0.1481726086511552e+01, 0.1482415971695234e+01, 0.1467737987028945e-02, 0.1706410574351102e-02, 0.1723475383238021e+01, 0.1745500102102777e+01, 0.2373112943910766e-01, 0.2150819994097728e-01, 0.1581996562336162e+01, 0.1673484316042377e+01, 0.1129959536471923e+00, 0.5676676764251993e-01, 0.8407229839290754e+00, 0.8027291772735718e+00, 0.1877296098701645e-01, 0.3124068479314146e-01, 0.1335847297547013e+01, 0.1657986407464839e+01, 0.3533797947109679e+00, 0.2697830536296953e+00, 0.1265767433727330e+01, 0.1069422512121146e+01, 0.7343813202351114e-01, 0.5387701498318342e-01, 0.7845691484971122e+00, 0.1204398523394852e+01, 0.4737063898809231e+00, 0.5430210247136315e+00, 0.1380630986425702e+01, 0.9901761195359265e+00, 0.1525661578238559e+00, 0.9868601781741700e-01, 0.6404863278245572e+00, 0.6682044860823195e+00, 0.1264041760751794e+00, 0.1209927124430272e+00, 0.6395981188913744e+00, 0.5363094467374639e+00, 0.1770404028911661e-01, 0.2792585709699239e-01, 0.8459594942610007e+00, 0.8655706211740396e+00, 0.4753698401003136e-01, 0.3903729781186600e-01, 0.7108052565733343e+00, 0.6717679865571495e+00, 0.2779568135873871e-07, 0.8403591378512072e-08, 0.2030985888538852e+00, 0.2030985804502939e+00, 0.3146138162949754e-27, 0.1303059324279677e-43, 0.8411884198867843e-17, 0.8411884198867843e-17, 0.2293573303077261e-09, 0.9833566024906726e-10, 0.1093987140067686e+00, 0.2696165428113804e-17, 0.5825676764620370e-08, 0.2638661645752538e-08, 0.9751785856405315e-01, 0.6617412588681544e-21, 0.2809066689524106e-07, 0.6165545233233256e-07, 0.2818174710670554e-01, 0.1396210563637443e-18, 0.2854399814229969e-05, 0.1344352644992036e-05, 0.1697846193036144e-01, 0.6162975822039155e-31, 0.3724766654883956e-05, 0.4695180568393632e-05, 0.2117542506861687e-04, 0.0000000000000000e+00, 0.9895328911616120e-03, 0.1620493249248586e-02}, + i0Out: 1, + n0Out: 16, + ppOut: 0, + dminOut: 8.4118841988678429e-018, + desigOut: 1.0739530738237274e-017, + sigmaOut: 0.21120653295038691, + qmaxOut: 2.2697722926067549, + nFailOut: 2, + iterOut: 33, + nDivOut: 659, + ttypeOut: -2, + dmin1Out: 0.20309858045029386, + dmin2Out: 0.51860540644834729, + dnOut: 8.4118841988678429e-018, + dn1Out: 0.20309858045029386, + dn2Out: 0.67176795876146822, + gOut: 0.0000000000000000, + tauOut: 0.0000000000000000, + }, + { + i0: 1, + n0: 16, + z: []float64{0.2269770980149768e+01, 0.2269770980379126e+01, 0.2293573303077261e-09, 0.9833566024906726e-10, 0.9731514910392876e+00, 0.9736626287363720e+00, 0.5111377954200868e-03, 0.7778528033461282e-03, 0.1481726086511552e+01, 0.1482415971695234e+01, 0.1467737987028945e-02, 0.1706410574351102e-02, 0.1723475383238021e+01, 0.1745500102102777e+01, 0.2373112943910766e-01, 0.2150819994097728e-01, 0.1581996562336162e+01, 0.1673484316042377e+01, 0.1129959536471923e+00, 0.5676676764251993e-01, 0.8407229839290754e+00, 0.8027291772735718e+00, 0.1877296098701645e-01, 0.3124068479314146e-01, 0.1335847297547013e+01, 0.1657986407464839e+01, 0.3533797947109679e+00, 0.2697830536296953e+00, 0.1265767433727330e+01, 0.1069422512121146e+01, 0.7343813202351114e-01, 0.5387701498318342e-01, 0.7845691484971122e+00, 0.1204398523394852e+01, 0.4737063898809231e+00, 0.5430210247136315e+00, 0.1380630986425702e+01, 0.9901761195359265e+00, 0.1525661578238559e+00, 0.9868601781741700e-01, 0.6404863278245572e+00, 0.6682044860823195e+00, 0.1264041760751794e+00, 0.1209927124430272e+00, 0.6395981188913744e+00, 0.5363094467374639e+00, 0.1770404028911661e-01, 0.2792585709699239e-01, 0.8459594942610007e+00, 0.8655706211740396e+00, 0.4753698401003136e-01, 0.3903729781186600e-01, 0.7108052565733343e+00, 0.6717679865571495e+00, 0.2779568135873871e-07, 0.8403591378512072e-08, 0.2030985888538852e+00, 0.2030985804502939e+00, 0.3146138162949754e-27, 0.1303059324279677e-43, 0.8411884198867843e-17, 0.8411884198867843e-17, 0.2293573303077261e-09, 0.9833566024906726e-10, 0.1093987140067686e+00, 0.2696165428113804e-17, 0.5825676764620370e-08, 0.2638661645752538e-08, 0.9751785856405315e-01, 0.6617412588681544e-21, 0.2809066689524106e-07, 0.6165545233233256e-07, 0.2818174710670554e-01, 0.1396210563637443e-18, 0.2854399814229969e-05, 0.1344352644992036e-05, 0.1697846193036144e-01, 0.6162975822039155e-31, 0.3724766654883956e-05, 0.4695180568393632e-05, 0.2117542506861687e-04, 0.0000000000000000e+00, 0.9895328911616120e-03, 0.1620493249248586e-02}, + pp: 1, + dmin: 8.4118841988678429e-018, + desig: 1.0739530738237274e-017, + qmax: 2.2697722926067549, + ttype: -2, + dmin1: 0.20309858045029386, + dmin2: 0.51860540644834729, + dn: 8.4118841988678429e-018, + dn1: 0.20309858045029386, + dn2: 0.67176795876146822, + g: 0.0000000000000000, + tau: 0.0000000000000000, + nFail: 2, + iter: 33, + sigma: 0.21120653295038691, + nDiv: 659, + zOut: []float64{0.2168221690252314e+01, 0.2269770980379126e+01, 0.4415865678637858e-10, 0.9833566024906726e-10, 0.8728911912704126e+00, 0.9736626287363720e+00, 0.1321013925721922e-02, 0.7778528033461282e-03, 0.1381252078118717e+01, 0.1482415971695234e+01, 0.2156405683614187e-02, 0.1706410574351102e-02, 0.1663302606134993e+01, 0.1745500102102777e+01, 0.2163985983955575e-01, 0.2150819994097728e-01, 0.1607061933620194e+01, 0.1673484316042377e+01, 0.2835506195054301e-01, 0.5676676764251993e-01, 0.7040655098910235e+00, 0.8027291772735718e+00, 0.7356791380810462e-01, 0.3124068479314146e-01, 0.1752652257061283e+01, 0.1657986407464839e+01, 0.1646145547572216e+00, 0.2697830536296953e+00, 0.8571356821219610e+00, 0.1069422512121146e+01, 0.7570493055431493e-01, 0.5387701498318342e-01, 0.1570165327329021e+01, 0.1204398523394852e+01, 0.3424393862982660e+00, 0.5430210247136315e+00, 0.6448734608299306e+00, 0.9901761195359265e+00, 0.1022564019526126e+00, 0.9868601781741700e-01, 0.5853915063475871e+00, 0.6682044860823195e+00, 0.1108480973262629e+00, 0.1209927124430272e+00, 0.3518379162830466e+00, 0.5363094467374639e+00, 0.6870152520689508e-01, 0.2792585709699239e-01, 0.7343571035538636e+00, 0.8655706211740396e+00, 0.3571015630515456e-01, 0.3903729781186600e-01, 0.5345085484304394e+00, 0.6717679865571495e+00, 0.3193134112956561e-08, 0.8403591378512072e-08, 0.1015492870320128e+00, 0.2030985804502939e+00, 0.4415865678637858e-10, 0.1303059324279677e-43, 0.2112065329503869e+00, 0.8411884198867843e-17, 0.2293573303077261e-09, 0.9833566024906726e-10, 0.1093987140067686e+00, 0.2696165428113804e-17, 0.5825676764620370e-08, 0.2638661645752538e-08, 0.9751785856405315e-01, 0.6617412588681544e-21, 0.2809066689524106e-07, 0.6165545233233256e-07, 0.2818174710670554e-01, 0.1396210563637443e-18, 0.2854399814229969e-05, 0.1344352644992036e-05, 0.1697846193036144e-01, 0.6162975822039155e-31, 0.3724766654883956e-05, 0.4695180568393632e-05, 0.2117542506861687e-04, 0.0000000000000000e+00, 0.9895328911616120e-03, 0.1620493249248586e-02}, + i0Out: 1, + n0Out: 15, + ppOut: 1, + dminOut: 0.10154928703201281, + desigOut: 0.0000000000000000, + sigmaOut: 0.31275582317553385, + qmaxOut: 2.2697722926067549, + nFailOut: 2, + iterOut: 34, + nDivOut: 675, + ttypeOut: -9, + dmin1Out: 0.32391205918605420, + dmin2Out: 0.32391205918605420, + dnOut: 0.10154928703201281, + dn1Out: 0.53450854002684800, + dn2Out: 0.69531980574199759, + gOut: 0.0000000000000000, + tauOut: 0.10154929022514693, + }, + { + i0: 1, + n0: 15, + z: []float64{0.2168221690252314e+01, 0.2269770980379126e+01, 0.4415865678637858e-10, 0.9833566024906726e-10, 0.8728911912704126e+00, 0.9736626287363720e+00, 0.1321013925721922e-02, 0.7778528033461282e-03, 0.1381252078118717e+01, 0.1482415971695234e+01, 0.2156405683614187e-02, 0.1706410574351102e-02, 0.1663302606134993e+01, 0.1745500102102777e+01, 0.2163985983955575e-01, 0.2150819994097728e-01, 0.1607061933620194e+01, 0.1673484316042377e+01, 0.2835506195054301e-01, 0.5676676764251993e-01, 0.7040655098910235e+00, 0.8027291772735718e+00, 0.7356791380810462e-01, 0.3124068479314146e-01, 0.1752652257061283e+01, 0.1657986407464839e+01, 0.1646145547572216e+00, 0.2697830536296953e+00, 0.8571356821219610e+00, 0.1069422512121146e+01, 0.7570493055431493e-01, 0.5387701498318342e-01, 0.1570165327329021e+01, 0.1204398523394852e+01, 0.3424393862982660e+00, 0.5430210247136315e+00, 0.6448734608299306e+00, 0.9901761195359265e+00, 0.1022564019526126e+00, 0.9868601781741700e-01, 0.5853915063475871e+00, 0.6682044860823195e+00, 0.1108480973262629e+00, 0.1209927124430272e+00, 0.3518379162830466e+00, 0.5363094467374639e+00, 0.6870152520689508e-01, 0.2792585709699239e-01, 0.7343571035538636e+00, 0.8655706211740396e+00, 0.3571015630515456e-01, 0.3903729781186600e-01, 0.5345085484304394e+00, 0.6717679865571495e+00, 0.3193134112956561e-08, 0.8403591378512072e-08, 0.1015492870320128e+00, 0.2030985804502939e+00, 0.4415865678637858e-10, 0.9833566024906726e-10, 0.2112065329503869e+00, 0.8411884198867843e-17, 0.2293573303077261e-09, 0.9833566024906726e-10, 0.1093987140067686e+00, 0.2696165428113804e-17, 0.5825676764620370e-08, 0.2638661645752538e-08, 0.9751785856405315e-01, 0.6617412588681544e-21, 0.2809066689524106e-07, 0.6165545233233256e-07, 0.2818174710670554e-01, 0.1396210563637443e-18, 0.2854399814229969e-05, 0.1344352644992036e-05, 0.1697846193036144e-01, 0.6162975822039155e-31, 0.3724766654883956e-05, 0.4695180568393632e-05, 0.2117542506861687e-04, 0.0000000000000000e+00, 0.9895328911616120e-03, 0.1620493249248586e-02}, + pp: 0, + dmin: 0.10154928703201281, + desig: 0.0000000000000000, + qmax: 2.1682216902523144, + ttype: -9, + dmin1: 0.32391205918605420, + dmin2: 0.32391205918605420, + dn: 0.10154928703201281, + dn1: 0.53450854002684800, + dn2: 0.69531980574199759, + g: 0.0000000000000000, + tau: 0.10154929022514693, + nFail: 2, + iter: 34, + sigma: 0.31275582317553385, + nDiv: 675, + zOut: []float64{0.2168221690252314e+01, 0.2066680684115085e+01, 0.4415865678637858e-10, 0.1865101988102620e-10, 0.8728911912704126e+00, 0.7726711989960955e+00, 0.1321013925721922e-02, 0.2361487308570419e-02, 0.1381252078118717e+01, 0.1279505990312372e+01, 0.2156405683614187e-02, 0.2803234389363145e-02, 0.1663302606134993e+01, 0.1580598225403798e+01, 0.2163985983955575e-01, 0.2200217261925751e-01, 0.1607061933620194e+01, 0.1511873816770092e+01, 0.2835506195054301e-01, 0.1320468740761088e-01, 0.7040655098910235e+00, 0.6628877301101292e+00, 0.7356791380810462e-01, 0.1945110224949301e+00, 0.1752652257061283e+01, 0.1621214783142186e+01, 0.1646145547572216e+00, 0.8703165684534678e-01, 0.8571356821219610e+00, 0.7442679496495411e+00, 0.7570493055431493e-01, 0.1597129865933493e+00, 0.1570165327329021e+01, 0.1651350720852550e+01, 0.3424393862982660e+00, 0.1337269360034139e+00, 0.6448734608299306e+00, 0.5118619205977412e+00, 0.1022564019526126e+00, 0.1169456581236225e+00, 0.5853915063475871e+00, 0.4777529393688394e+00, 0.1108480973262629e+00, 0.8163333047984263e-01, 0.3518379162830466e+00, 0.2373651048287110e+00, 0.6870152520689508e-01, 0.2125478936639627e+00, 0.7343571035538636e+00, 0.4559783600136673e+00, 0.3571015630515456e-01, 0.4186028435717903e-01, 0.5345085484304394e+00, 0.3911072610850064e+00, 0.3193134112956561e-08, 0.8290832843879624e-09, 0.1015492870320128e+00, 0.8280021541434701e-05, 0.4415865678637858e-10, 0.1865101988102620e-10, 0.2112065329503869e+00, 0.8411884198867843e-17, 0.2293573303077261e-09, 0.9833566024906726e-10, 0.1093987140067686e+00, 0.2696165428113804e-17, 0.5825676764620370e-08, 0.2638661645752538e-08, 0.9751785856405315e-01, 0.6617412588681544e-21, 0.2809066689524106e-07, 0.6165545233233256e-07, 0.2818174710670554e-01, 0.1396210563637443e-18, 0.2854399814229969e-05, 0.1344352644992036e-05, 0.1697846193036144e-01, 0.6162975822039155e-31, 0.3724766654883956e-05, 0.4695180568393632e-05, 0.2117542506861687e-04, 0.0000000000000000e+00, 0.9895328911616120e-03, 0.1620493249248586e-02}, + i0Out: 1, + n0Out: 15, + ppOut: 0, + dminOut: 8.2800215414347011e-006, + desigOut: -1.3877787807814457e-017, + sigmaOut: 0.41429682935692197, + qmaxOut: 2.1682216902523144, + nFailOut: 2, + iterOut: 35, + nDivOut: 691, + ttypeOut: -4, + dmin1Out: 0.16866357962181588, + dmin2Out: 0.16866357962181588, + dnOut: 8.2800215414347011e-006, + dn1Out: 0.39110725789187228, + dn2Out: 0.42026820370851276, + gOut: 0.0000000000000000, + tauOut: 0.10154100618138810, + }, + { + i0: 1, + n0: 11, + z: []float64{0.1565539912114361e+01, 0.1565539887559990e+01, 0.1019378594629470e-16, 0.7719264500395369e-17, 0.1185508165627851e+01, 0.1209595044804978e+01, 0.2408690373149840e-01, 0.1773879145177052e-01, 0.8908058287637680e+00, 0.9017845275302323e+00, 0.2871751477260568e-01, 0.4376453959373496e-01, 0.1374289696466428e+01, 0.1388278114489740e+01, 0.5775298217141787e-01, 0.3977262621431848e-01, 0.9560626040960629e+00, 0.9795790109656241e+00, 0.6328905763825028e-01, 0.5927283946360358e-01, 0.9174165586530958e+00, 0.1200347187817445e+01, 0.3422034931823232e+00, 0.1225138368952072e+00, 0.4297417838091302e+00, 0.3072280530227507e+00, 0.1306631986684747e-06, 0.1577236754259593e-06, 0.3708552845063717e+00, 0.3753559465594062e+00, 0.4500844331080801e-02, 0.3315910790466669e-02, 0.2765362989488873e+00, 0.2810497954194954e+00, 0.7829431815445537e-02, 0.5406547261581845e-02, 0.1940765355151052e+00, 0.1886700618399915e+00, 0.9814083907923291e-07, 0.1277249852674422e-13, 0.2455438642568072e-07, 0.2764930279233778e-14, 0.1019378594629470e-16, 0.7719264500395369e-17, 0.8241395430971566e+00, 0.2245698748385924e-16, 0.1792267857826344e-16, 0.2548071704976161e-16, 0.7318275341991307e+00, 0.4959852501050381e-23, 0.8676862906242004e-16, 0.1414000300319855e-15, 0.5186198053161721e+00, 0.1503295986001297e-14, 0.3452895805257589e-14, 0.7981257539768321e-14, 0.4143051093784424e+00, 0.1303656847202082e-15, 0.9855809897129084e-12, 0.2614986238618434e-11, 0.2112065329503869e+00, 0.8411884198867843e-17, 0.2293573303077261e-09, 0.9833566024906726e-10, 0.1093987140067686e+00, 0.2696165428113804e-17, 0.5825676764620370e-08, 0.2638661645752538e-08, 0.9751785856405315e-01, 0.6617412588681544e-21, 0.2809066689524106e-07, 0.6165545233233256e-07, 0.2818174710670554e-01, 0.1396210563637443e-18, 0.2854399814229969e-05, 0.1344352644992036e-05, 0.1697846193036144e-01, 0.6162975822039155e-31, 0.3724766654883956e-05, 0.4695180568393632e-05, 0.2117542506861687e-04, 0.0000000000000000e+00, 0.9895328911616120e-03, 0.1620493249248586e-02}, + pp: 1, + dmin: 2.7649302792337775e-015, + desig: 4.8519526217661568e-017, + qmax: 2.1682216902523144, + ttype: -2, + dmin1: 0.18866996369915248, + dmin2: 0.27322036360404983, + dn: 2.7649302792337775e-015, + dn1: 0.18866996369915248, + dn2: 0.27322036360404983, + g: 0.0000000000000000, + tau: 2.4554370888251911e-008, + nFail: 2, + iter: 57, + sigma: 0.91543762594185796, + nDiv: 1001, + zOut: []float64{0.1565539887559988e+01, 0.1565539887559990e+01, 0.5964194309842799e-17, 0.7719264500395369e-17, 0.1227333836256746e+01, 0.1209595044804978e+01, 0.1303359134714337e-01, 0.1773879145177052e-01, 0.9325154757768211e+00, 0.9017845275302323e+00, 0.6515425651042277e-01, 0.4376453959373496e-01, 0.1362896484193633e+01, 0.1388278114489740e+01, 0.2858649229958118e-01, 0.3977262621431848e-01, 0.1010265358129644e+01, 0.9795790109656241e+00, 0.7042504782685149e-01, 0.5927283946360358e-01, 0.1252435976885798e+01, 0.1200347187817445e+01, 0.3005318297487194e-01, 0.1225138368952072e+00, 0.2771750277715515e+00, 0.3072280530227507e+00, 0.2135925446109612e-06, 0.1577236754259593e-06, 0.3786716437573254e+00, 0.3753559465594062e+00, 0.2461066374135985e-02, 0.3315910790466669e-02, 0.2839952763069384e+00, 0.2810497954194954e+00, 0.3591797791316158e-02, 0.5406547261581845e-02, 0.1850782640486854e+00, 0.1886700618399915e+00, 0.1908115364037247e-27, 0.1277249852674422e-13, 0.3470987982972452e-28, 0.2764930279233778e-14, 0.5964194309842799e-17, 0.7719264500395369e-17, 0.8241395430971566e+00, 0.2245698748385924e-16, 0.1792267857826344e-16, 0.2548071704976161e-16, 0.7318275341991307e+00, 0.4959852501050381e-23, 0.8676862906242004e-16, 0.1414000300319855e-15, 0.5186198053161721e+00, 0.1503295986001297e-14, 0.3452895805257589e-14, 0.7981257539768321e-14, 0.4143051093784424e+00, 0.1303656847202082e-15, 0.9855809897129084e-12, 0.2614986238618434e-11, 0.2112065329503869e+00, 0.8411884198867843e-17, 0.2293573303077261e-09, 0.9833566024906726e-10, 0.1093987140067686e+00, 0.2696165428113804e-17, 0.5825676764620370e-08, 0.2638661645752538e-08, 0.9751785856405315e-01, 0.6617412588681544e-21, 0.2809066689524106e-07, 0.6165545233233256e-07, 0.2818174710670554e-01, 0.1396210563637443e-18, 0.2854399814229969e-05, 0.1344352644992036e-05, 0.1697846193036144e-01, 0.6162975822039155e-31, 0.3724766654883956e-05, 0.4695180568393632e-05, 0.2117542506861687e-04, 0.0000000000000000e+00, 0.9895328911616120e-03, 0.1620493249248586e-02}, + i0Out: 1, + n0Out: 11, + ppOut: 1, + dminOut: 3.4709879829724519e-029, + desigOut: 3.7892243888322520e-017, + sigmaOut: 0.91543762594186073, + qmaxOut: 2.1682216902523144, + nFailOut: 2, + iterOut: 58, + nDivOut: 1013, + ttypeOut: -2, + dmin1Out: 0.18507826404867261, + dmin2Out: 0.27717487004787600, + dnOut: 3.4709879829724519e-029, + dn1Out: 0.18507826404867261, + dn2Out: 0.27858872904535659, + gOut: 0.0000000000000000, + tauOut: 2.7649302792335523e-015, + }, + { + i0: 1, + n0: 11, + z: []float64{0.1565539887559988e+01, 0.1565539887559990e+01, 0.5964194309842799e-17, 0.7719264500395369e-17, 0.1227333836256746e+01, 0.1209595044804978e+01, 0.1303359134714337e-01, 0.1773879145177052e-01, 0.9325154757768211e+00, 0.9017845275302323e+00, 0.6515425651042277e-01, 0.4376453959373496e-01, 0.1362896484193633e+01, 0.1388278114489740e+01, 0.2858649229958118e-01, 0.3977262621431848e-01, 0.1010265358129644e+01, 0.9795790109656241e+00, 0.7042504782685149e-01, 0.5927283946360358e-01, 0.1252435976885798e+01, 0.1200347187817445e+01, 0.3005318297487194e-01, 0.1225138368952072e+00, 0.2771750277715515e+00, 0.3072280530227507e+00, 0.2135925446109612e-06, 0.1577236754259593e-06, 0.3786716437573254e+00, 0.3753559465594062e+00, 0.2461066374135985e-02, 0.3315910790466669e-02, 0.2839952763069384e+00, 0.2810497954194954e+00, 0.3591797791316158e-02, 0.5406547261581845e-02, 0.1850782640486854e+00, 0.1886700618399915e+00, 0.1908115364037247e-27, 0.1277249852674422e-13, 0.3470987982972452e-28, 0.2764930279233778e-14, 0.5964194309842799e-17, 0.7719264500395369e-17, 0.8241395430971566e+00, 0.2245698748385924e-16, 0.1792267857826344e-16, 0.2548071704976161e-16, 0.7318275341991307e+00, 0.4959852501050381e-23, 0.8676862906242004e-16, 0.1414000300319855e-15, 0.5186198053161721e+00, 0.1503295986001297e-14, 0.3452895805257589e-14, 0.7981257539768321e-14, 0.4143051093784424e+00, 0.1303656847202082e-15, 0.9855809897129084e-12, 0.2614986238618434e-11, 0.2112065329503869e+00, 0.8411884198867843e-17, 0.2293573303077261e-09, 0.9833566024906726e-10, 0.1093987140067686e+00, 0.2696165428113804e-17, 0.5825676764620370e-08, 0.2638661645752538e-08, 0.9751785856405315e-01, 0.6617412588681544e-21, 0.2809066689524106e-07, 0.6165545233233256e-07, 0.2818174710670554e-01, 0.1396210563637443e-18, 0.2854399814229969e-05, 0.1344352644992036e-05, 0.1697846193036144e-01, 0.6162975822039155e-31, 0.3724766654883956e-05, 0.4695180568393632e-05, 0.2117542506861687e-04, 0.0000000000000000e+00, 0.9895328911616120e-03, 0.1620493249248586e-02}, + pp: 0, + dmin: 3.4709879829724519e-029, + desig: 3.7892243888322520e-017, + qmax: 2.1682216902523144, + ttype: -2, + dmin1: 0.18507826404867261, + dmin2: 0.27717487004787600, + dn: 3.4709879829724519e-029, + dn1: 0.18507826404867261, + dn2: 0.27858872904535659, + g: 0.0000000000000000, + tau: 2.7649302792335523e-015, + nFail: 2, + iter: 58, + sigma: 0.91543762594186073, + nDiv: 1013, + zOut: []float64{0.1565539887559988e+01, 0.1473000755535651e+01, 0.5964194309842799e-17, 0.4969486576955697e-17, 0.1227333836256746e+01, 0.1147828295579553e+01, 0.1303359134714337e-01, 0.1058871408116432e-01, 0.9325154757768211e+00, 0.8945418861817434e+00, 0.6515425651042277e-01, 0.9926701980086396e-01, 0.1362896484193633e+01, 0.1199676824668014e+01, 0.2858649229958118e-01, 0.2407310226126826e-01, 0.1010265358129644e+01, 0.9640781716708908e+00, 0.7042504782685149e-01, 0.9148932748822959e-01, 0.1252435976885798e+01, 0.1098460700348104e+01, 0.3005318297487194e-01, 0.7583331677723075e-02, 0.2771750277715515e+00, 0.1770527776620367e+00, 0.2135925446109612e-06, 0.4568210735249311e-06, 0.3786716437573254e+00, 0.2885931212860515e+00, 0.2461066374135985e-02, 0.2421856840585221e-02, 0.2839952763069384e+00, 0.1926260852333330e+00, 0.3591797791316158e-02, 0.3451057520197492e-02, 0.1850782640486854e+00, 0.8908807450415157e-01, 0.1908115364037247e-27, 0.4969486576955697e-17, 0.9154376259418607e+00, 0.2764930279233778e-14, 0.5964194309842799e-17, 0.7719264500395369e-17, 0.8241395430971566e+00, 0.2245698748385924e-16, 0.1792267857826344e-16, 0.2548071704976161e-16, 0.7318275341991307e+00, 0.4959852501050381e-23, 0.8676862906242004e-16, 0.1414000300319855e-15, 0.5186198053161721e+00, 0.1503295986001297e-14, 0.3452895805257589e-14, 0.7981257539768321e-14, 0.4143051093784424e+00, 0.1303656847202082e-15, 0.9855809897129084e-12, 0.2614986238618434e-11, 0.2112065329503869e+00, 0.8411884198867843e-17, 0.2293573303077261e-09, 0.9833566024906726e-10, 0.1093987140067686e+00, 0.2696165428113804e-17, 0.5825676764620370e-08, 0.2638661645752538e-08, 0.9751785856405315e-01, 0.6617412588681544e-21, 0.2809066689524106e-07, 0.6165545233233256e-07, 0.2818174710670554e-01, 0.1396210563637443e-18, 0.2854399814229969e-05, 0.1344352644992036e-05, 0.1697846193036144e-01, 0.6162975822039155e-31, 0.3724766654883956e-05, 0.4695180568393632e-05, 0.2117542506861687e-04, 0.0000000000000000e+00, 0.9895328911616120e-03, 0.1620493249248586e-02}, + i0Out: 1, + n0Out: 10, + ppOut: 0, + dminOut: 8.9088074504151571e-002, + desigOut: -4.1633363423443370e-017, + sigmaOut: 1.0079767579661971, + qmaxOut: 2.1682216902523144, + nFailOut: 2, + iterOut: 59, + nDivOut: 1024, + ttypeOut: -9, + dmin1Out: 0.17705256406949207, + dmin2Out: 0.17705256406949207, + dnOut: 8.9088074504151571e-002, + dn1Out: 0.18903428744201686, + dn2Out: 0.28613205491191551, + gOut: 0.0000000000000000, + tauOut: 9.2539132024336307e-002, + }, + { + i0: 1, + n0: 10, + z: []float64{0.1565539887559988e+01, 0.1473000755535651e+01, 0.5964194309842799e-17, 0.4969486576955697e-17, 0.1227333836256746e+01, 0.1147828295579553e+01, 0.1303359134714337e-01, 0.1058871408116432e-01, 0.9325154757768211e+00, 0.8945418861817434e+00, 0.6515425651042277e-01, 0.9926701980086396e-01, 0.1362896484193633e+01, 0.1199676824668014e+01, 0.2858649229958118e-01, 0.2407310226126826e-01, 0.1010265358129644e+01, 0.9640781716708908e+00, 0.7042504782685149e-01, 0.9148932748822959e-01, 0.1252435976885798e+01, 0.1098460700348104e+01, 0.3005318297487194e-01, 0.7583331677723075e-02, 0.2771750277715515e+00, 0.1770527776620367e+00, 0.2135925446109612e-06, 0.4568210735249311e-06, 0.3786716437573254e+00, 0.2885931212860515e+00, 0.2461066374135985e-02, 0.2421856840585221e-02, 0.2839952763069384e+00, 0.1926260852333330e+00, 0.3591797791316158e-02, 0.3451057520197492e-02, 0.1850782640486854e+00, 0.8908807450415157e-01, 0.1908115364037247e-27, 0.4969486576955697e-17, 0.9154376259418607e+00, 0.2764930279233778e-14, 0.5964194309842799e-17, 0.7719264500395369e-17, 0.8241395430971566e+00, 0.2245698748385924e-16, 0.1792267857826344e-16, 0.2548071704976161e-16, 0.7318275341991307e+00, 0.4959852501050381e-23, 0.8676862906242004e-16, 0.1414000300319855e-15, 0.5186198053161721e+00, 0.1503295986001297e-14, 0.3452895805257589e-14, 0.7981257539768321e-14, 0.4143051093784424e+00, 0.1303656847202082e-15, 0.9855809897129084e-12, 0.2614986238618434e-11, 0.2112065329503869e+00, 0.8411884198867843e-17, 0.2293573303077261e-09, 0.9833566024906726e-10, 0.1093987140067686e+00, 0.2696165428113804e-17, 0.5825676764620370e-08, 0.2638661645752538e-08, 0.9751785856405315e-01, 0.6617412588681544e-21, 0.2809066689524106e-07, 0.6165545233233256e-07, 0.2818174710670554e-01, 0.1396210563637443e-18, 0.2854399814229969e-05, 0.1344352644992036e-05, 0.1697846193036144e-01, 0.6162975822039155e-31, 0.3724766654883956e-05, 0.4695180568393632e-05, 0.2117542506861687e-04, 0.0000000000000000e+00, 0.9895328911616120e-03, 0.1620493249248586e-02}, + pp: 1, + dmin: 8.9088074504151571e-002, + desig: -4.1633363423443370e-017, + qmax: 2.1682216902523144, + ttype: -9, + dmin1: 0.17705256406949207, + dmin2: 0.17705256406949207, + dn: 8.9088074504151571e-002, + dn1: 0.18903428744201686, + dn2: 0.28613205491191551, + g: 0.0000000000000000, + tau: 9.2539132024336307e-002, + nFail: 2, + iter: 59, + sigma: 1.0079767579661971, + nDiv: 1024, + zOut: []float64{0.1397612833703614e+01, 0.1473000755535651e+01, 0.4081328655531061e-17, 0.4969486576955697e-17, 0.1083029087828680e+01, 0.1147828295579553e+01, 0.8745885380967966e-02, 0.1058871408116432e-01, 0.9096750987696016e+00, 0.8945418861817434e+00, 0.1309130515609722e+00, 0.9926701980086396e-01, 0.1017448953536272e+01, 0.1199676824668014e+01, 0.2281033592282574e-01, 0.2407310226126826e-01, 0.9573692414042568e+00, 0.9640781716708908e+00, 0.1049724875218357e+00, 0.9148932748822959e-01, 0.9256836226719531e+00, 0.1098460700348104e+01, 0.1450441494900679e-02, 0.7583331677723075e-02, 0.1002148711561719e+00, 0.1770527776620367e+00, 0.1315527505616969e-05, 0.4568210735249311e-06, 0.2156257407670935e+00, 0.2885931212860515e+00, 0.2163530200698068e-02, 0.2421856840585221e-02, 0.1185256907207948e+00, 0.1926260852333330e+00, 0.2593936112987583e-02, 0.3451057520197492e-02, 0.1110621655912630e-01, 0.8908807450415157e-01, 0.4081328655531061e-17, 0.4969486576955697e-17, 0.9154376259418607e+00, 0.2764930279233778e-14, 0.5964194309842799e-17, 0.7719264500395369e-17, 0.8241395430971566e+00, 0.2245698748385924e-16, 0.1792267857826344e-16, 0.2548071704976161e-16, 0.7318275341991307e+00, 0.4959852501050381e-23, 0.8676862906242004e-16, 0.1414000300319855e-15, 0.5186198053161721e+00, 0.1503295986001297e-14, 0.3452895805257589e-14, 0.7981257539768321e-14, 0.4143051093784424e+00, 0.1303656847202082e-15, 0.9855809897129084e-12, 0.2614986238618434e-11, 0.2112065329503869e+00, 0.8411884198867843e-17, 0.2293573303077261e-09, 0.9833566024906726e-10, 0.1093987140067686e+00, 0.2696165428113804e-17, 0.5825676764620370e-08, 0.2638661645752538e-08, 0.9751785856405315e-01, 0.6617412588681544e-21, 0.2809066689524106e-07, 0.6165545233233256e-07, 0.2818174710670554e-01, 0.1396210563637443e-18, 0.2854399814229969e-05, 0.1344352644992036e-05, 0.1697846193036144e-01, 0.6162975822039155e-31, 0.3724766654883956e-05, 0.4695180568393632e-05, 0.2117542506861687e-04, 0.0000000000000000e+00, 0.9895328911616120e-03, 0.1620493249248586e-02}, + i0Out: 1, + n0Out: 10, + ppOut: 1, + dminOut: 1.1106216559126303e-002, + desigOut: -4.1633363423443370e-017, + sigmaOut: 1.0833646797982348, + qmaxOut: 2.1682216902523144, + nFailOut: 2, + iterOut: 60, + nDivOut: 1035, + ttypeOut: -4, + dmin1Out: 0.10021441433509834, + dmin2Out: 0.10021441433509834, + dnOut: 1.1106216559126303e-002, + dn1Out: 0.11507463320059727, + dn2Out: 0.21320388392650824, + gOut: 0.0000000000000000, + tauOut: 7.5387921832037685e-002, + }, + { + i0: 1, + n0: 10, + z: []float64{0.1397612833703614e+01, 0.1473000755535651e+01, 0.4081328655531061e-17, 0.4969486576955697e-17, 0.1083029087828680e+01, 0.1147828295579553e+01, 0.8745885380967966e-02, 0.1058871408116432e-01, 0.9096750987696016e+00, 0.8945418861817434e+00, 0.1309130515609722e+00, 0.9926701980086396e-01, 0.1017448953536272e+01, 0.1199676824668014e+01, 0.2281033592282574e-01, 0.2407310226126826e-01, 0.9573692414042568e+00, 0.9640781716708908e+00, 0.1049724875218357e+00, 0.9148932748822959e-01, 0.9256836226719531e+00, 0.1098460700348104e+01, 0.1450441494900679e-02, 0.7583331677723075e-02, 0.1002148711561719e+00, 0.1770527776620367e+00, 0.1315527505616969e-05, 0.4568210735249311e-06, 0.2156257407670935e+00, 0.2885931212860515e+00, 0.2163530200698068e-02, 0.2421856840585221e-02, 0.1185256907207948e+00, 0.1926260852333330e+00, 0.2593936112987583e-02, 0.3451057520197492e-02, 0.1110621655912630e-01, 0.8908807450415157e-01, 0.4081328655531061e-17, 0.4969486576955697e-17, 0.9154376259418607e+00, 0.2764930279233778e-14, 0.5964194309842799e-17, 0.7719264500395369e-17, 0.8241395430971566e+00, 0.2245698748385924e-16, 0.1792267857826344e-16, 0.2548071704976161e-16, 0.7318275341991307e+00, 0.4959852501050381e-23, 0.8676862906242004e-16, 0.1414000300319855e-15, 0.5186198053161721e+00, 0.1503295986001297e-14, 0.3452895805257589e-14, 0.7981257539768321e-14, 0.4143051093784424e+00, 0.1303656847202082e-15, 0.9855809897129084e-12, 0.2614986238618434e-11, 0.2112065329503869e+00, 0.8411884198867843e-17, 0.2293573303077261e-09, 0.9833566024906726e-10, 0.1093987140067686e+00, 0.2696165428113804e-17, 0.5825676764620370e-08, 0.2638661645752538e-08, 0.9751785856405315e-01, 0.6617412588681544e-21, 0.2809066689524106e-07, 0.6165545233233256e-07, 0.2818174710670554e-01, 0.1396210563637443e-18, 0.2854399814229969e-05, 0.1344352644992036e-05, 0.1697846193036144e-01, 0.6162975822039155e-31, 0.3724766654883956e-05, 0.4695180568393632e-05, 0.2117542506861687e-04, 0.0000000000000000e+00, 0.9895328911616120e-03, 0.1620493249248586e-02}, + pp: 0, + dmin: 1.1106216559126303e-002, + desig: -4.1633363423443370e-017, + qmax: 2.1682216902523144, + ttype: -4, + dmin1: 0.10021441433509834, + dmin2: 0.10021441433509834, + dn: 1.1106216559126303e-002, + dn1: 0.11507463320059727, + dn2: 0.21320388392650824, + g: 0.0000000000000000, + tau: 7.5387921832037685e-002, + nFail: 2, + iter: 60, + sigma: 1.0833646797982348, + nDiv: 1035, + zOut: []float64{0.1397612833703614e+01, 0.1388412169967495e+01, 0.4081328655531061e-17, 0.3183635051997810e-17, 0.1083029087828680e+01, 0.1082574309473529e+01, 0.8745885380967966e-02, 0.7349069784991225e-02, 0.9096750987696016e+00, 0.1024038416809464e+01, 0.1309130515609722e+00, 0.1300706547025319e+00, 0.1017448953536272e+01, 0.9009879710204475e+00, 0.2281033592282574e-01, 0.2423774201322422e-01, 0.9573692414042568e+00, 0.1028903323176749e+01, 0.1049724875218357e+00, 0.9444163542020824e-01, 0.9256836226719531e+00, 0.8234917650105269e+00, 0.1450441494900679e-02, 0.1765115496075164e-03, 0.1002148711561719e+00, 0.9083901139795122e-01, 0.1315527505616969e-05, 0.3122684720284652e-05, 0.2156257407670935e+00, 0.2085854845469525e+00, 0.2163530200698068e-02, 0.1229394806594584e-02, 0.1185256907207948e+00, 0.1106895682910690e+00, 0.2593936112987583e-02, 0.2602667681892373e-03, 0.1110621655912630e-01, 0.1645286054818337e-02, 0.4081328655531061e-17, 0.3183635051997810e-17, 0.9154376259418607e+00, 0.2764930279233778e-14, 0.5964194309842799e-17, 0.7719264500395369e-17, 0.8241395430971566e+00, 0.2245698748385924e-16, 0.1792267857826344e-16, 0.2548071704976161e-16, 0.7318275341991307e+00, 0.4959852501050381e-23, 0.8676862906242004e-16, 0.1414000300319855e-15, 0.5186198053161721e+00, 0.1503295986001297e-14, 0.3452895805257589e-14, 0.7981257539768321e-14, 0.4143051093784424e+00, 0.1303656847202082e-15, 0.9855809897129084e-12, 0.2614986238618434e-11, 0.2112065329503869e+00, 0.8411884198867843e-17, 0.2293573303077261e-09, 0.9833566024906726e-10, 0.1093987140067686e+00, 0.2696165428113804e-17, 0.5825676764620370e-08, 0.2638661645752538e-08, 0.9751785856405315e-01, 0.6617412588681544e-21, 0.2809066689524106e-07, 0.6165545233233256e-07, 0.2818174710670554e-01, 0.1396210563637443e-18, 0.2854399814229969e-05, 0.1344352644992036e-05, 0.1697846193036144e-01, 0.6162975822039155e-31, 0.3724766654883956e-05, 0.4695180568393632e-05, 0.2117542506861687e-04, 0.0000000000000000e+00, 0.9895328911616120e-03, 0.1620493249248586e-02}, + i0Out: 1, + n0Out: 10, + ppOut: 0, + dminOut: 1.6452860548183366e-003, + desigOut: 7.9797279894933126e-017, + sigmaOut: 1.0925653435343534, + qmaxOut: 2.1682216902523144, + nFailOut: 2, + iterOut: 61, + nDivOut: 1046, + ttypeOut: -4, + dmin1Out: 9.0837695870445614e-002, + dmin2Out: 9.0837695870445614e-002, + dnOut: 1.6452860548183366e-003, + dn1Out: 0.10809563217808144, + dn2Out: 0.20642195434625446, + gOut: 0.0000000000000000, + tauOut: 9.2006637361187298e-003, + }, + { + i0: 1, + n0: 10, + z: []float64{0.1397612833703614e+01, 0.1388412169967495e+01, 0.4081328655531061e-17, 0.3183635051997810e-17, 0.1083029087828680e+01, 0.1082574309473529e+01, 0.8745885380967966e-02, 0.7349069784991225e-02, 0.9096750987696016e+00, 0.1024038416809464e+01, 0.1309130515609722e+00, 0.1300706547025319e+00, 0.1017448953536272e+01, 0.9009879710204475e+00, 0.2281033592282574e-01, 0.2423774201322422e-01, 0.9573692414042568e+00, 0.1028903323176749e+01, 0.1049724875218357e+00, 0.9444163542020824e-01, 0.9256836226719531e+00, 0.8234917650105269e+00, 0.1450441494900679e-02, 0.1765115496075164e-03, 0.1002148711561719e+00, 0.9083901139795122e-01, 0.1315527505616969e-05, 0.3122684720284652e-05, 0.2156257407670935e+00, 0.2085854845469525e+00, 0.2163530200698068e-02, 0.1229394806594584e-02, 0.1185256907207948e+00, 0.1106895682910690e+00, 0.2593936112987583e-02, 0.2602667681892373e-03, 0.1110621655912630e-01, 0.1645286054818337e-02, 0.4081328655531061e-17, 0.3183635051997810e-17, 0.9154376259418607e+00, 0.2764930279233778e-14, 0.5964194309842799e-17, 0.7719264500395369e-17, 0.8241395430971566e+00, 0.2245698748385924e-16, 0.1792267857826344e-16, 0.2548071704976161e-16, 0.7318275341991307e+00, 0.4959852501050381e-23, 0.8676862906242004e-16, 0.1414000300319855e-15, 0.5186198053161721e+00, 0.1503295986001297e-14, 0.3452895805257589e-14, 0.7981257539768321e-14, 0.4143051093784424e+00, 0.1303656847202082e-15, 0.9855809897129084e-12, 0.2614986238618434e-11, 0.2112065329503869e+00, 0.8411884198867843e-17, 0.2293573303077261e-09, 0.9833566024906726e-10, 0.1093987140067686e+00, 0.2696165428113804e-17, 0.5825676764620370e-08, 0.2638661645752538e-08, 0.9751785856405315e-01, 0.6617412588681544e-21, 0.2809066689524106e-07, 0.6165545233233256e-07, 0.2818174710670554e-01, 0.1396210563637443e-18, 0.2854399814229969e-05, 0.1344352644992036e-05, 0.1697846193036144e-01, 0.6162975822039155e-31, 0.3724766654883956e-05, 0.4695180568393632e-05, 0.2117542506861687e-04, 0.0000000000000000e+00, 0.9895328911616120e-03, 0.1620493249248586e-02}, + pp: 1, + dmin: 1.6452860548183366e-003, + desig: 7.9797279894933126e-017, + qmax: 2.1682216902523144, + ttype: -4, + dmin1: 9.0837695870445614e-002, + dmin2: 9.0837695870445614e-002, + dn: 1.6452860548183366e-003, + dn1: 0.10809563217808144, + dn2: 0.20642195434625446, + g: 0.0000000000000000, + tau: 9.2006637361187298e-003, + nFail: 2, + iter: 61, + sigma: 1.0925653435343534, + nDiv: 1046, + zOut: []float64{0.1386852748028120e+01, 0.1388412169967495e+01, 0.2485138759635906e-17, 0.3183635051997810e-17, 0.1088363957319145e+01, 0.1082574309473529e+01, 0.6914717946174950e-02, 0.7349069784991225e-02, 0.1145634931626446e+01, 0.1024038416809464e+01, 0.1022944500333619e+00, 0.1300706547025319e+00, 0.8213718410609353e+00, 0.9009879710204475e+00, 0.3036175828902931e-01, 0.2423774201322422e-01, 0.1091423778368554e+01, 0.1028903323176749e+01, 0.7125729765473905e-01, 0.9444163542020824e-01, 0.7508515569660207e+00, 0.8234917650105269e+00, 0.2135460001102832e-04, 0.1765115496075164e-03, 0.8926135754328580e-01, 0.9083901139795122e-01, 0.7297073710223142e-05, 0.3122684720284652e-05, 0.2082481603404622e+00, 0.2085854845469525e+00, 0.6534568189162364e-03, 0.1229394806594584e-02, 0.1087369563009673e+00, 0.1106895682910690e+00, 0.3938065757966957e-05, 0.2602667681892373e-03, 0.8192604968568760e-04, 0.1645286054818337e-02, 0.2485138759635906e-17, 0.3183635051997810e-17, 0.9154376259418607e+00, 0.2764930279233778e-14, 0.5964194309842799e-17, 0.7719264500395369e-17, 0.8241395430971566e+00, 0.2245698748385924e-16, 0.1792267857826344e-16, 0.2548071704976161e-16, 0.7318275341991307e+00, 0.4959852501050381e-23, 0.8676862906242004e-16, 0.1414000300319855e-15, 0.5186198053161721e+00, 0.1503295986001297e-14, 0.3452895805257589e-14, 0.7981257539768321e-14, 0.4143051093784424e+00, 0.1303656847202082e-15, 0.9855809897129084e-12, 0.2614986238618434e-11, 0.2112065329503869e+00, 0.8411884198867843e-17, 0.2293573303077261e-09, 0.9833566024906726e-10, 0.1093987140067686e+00, 0.2696165428113804e-17, 0.5825676764620370e-08, 0.2638661645752538e-08, 0.9751785856405315e-01, 0.6617412588681544e-21, 0.2809066689524106e-07, 0.6165545233233256e-07, 0.2818174710670554e-01, 0.1396210563637443e-18, 0.2854399814229969e-05, 0.1344352644992036e-05, 0.1697846193036144e-01, 0.6162975822039155e-31, 0.3724766654883956e-05, 0.4695180568393632e-05, 0.2117542506861687e-04, 0.0000000000000000e+00, 0.9895328911616120e-03, 0.1620493249248586e-02}, + i0Out: 1, + n0Out: 10, + ppOut: 1, + dminOut: 8.1926049685687600e-005, + desigOut: -9.9312918999672206e-017, + sigmaOut: 1.0941247654737283, + qmaxOut: 2.1682216902523144, + nFailOut: 2, + iterOut: 62, + nDivOut: 1057, + ttypeOut: -4, + dmin1Out: 8.9258234858565516e-002, + dmin2Out: 8.9258234858565516e-002, + dnOut: 8.1926049685687600e-005, + dn1Out: 0.10847668953277810, + dn2Out: 0.20701876553386761, + gOut: 0.0000000000000000, + tauOut: 1.5594219393746818e-003, + }, + { + i0: 1, + n0: 10, + z: []float64{0.1386852748028120e+01, 0.1388412169967495e+01, 0.2485138759635906e-17, 0.3183635051997810e-17, 0.1088363957319145e+01, 0.1082574309473529e+01, 0.6914717946174950e-02, 0.7349069784991225e-02, 0.1145634931626446e+01, 0.1024038416809464e+01, 0.1022944500333619e+00, 0.1300706547025319e+00, 0.8213718410609353e+00, 0.9009879710204475e+00, 0.3036175828902931e-01, 0.2423774201322422e-01, 0.1091423778368554e+01, 0.1028903323176749e+01, 0.7125729765473905e-01, 0.9444163542020824e-01, 0.7508515569660207e+00, 0.8234917650105269e+00, 0.2135460001102832e-04, 0.1765115496075164e-03, 0.8926135754328580e-01, 0.9083901139795122e-01, 0.7297073710223142e-05, 0.3122684720284652e-05, 0.2082481603404622e+00, 0.2085854845469525e+00, 0.6534568189162364e-03, 0.1229394806594584e-02, 0.1087369563009673e+00, 0.1106895682910690e+00, 0.3938065757966957e-05, 0.2602667681892373e-03, 0.8192604968568760e-04, 0.1645286054818337e-02, 0.2485138759635906e-17, 0.3183635051997810e-17, 0.9154376259418607e+00, 0.2764930279233778e-14, 0.5964194309842799e-17, 0.7719264500395369e-17, 0.8241395430971566e+00, 0.2245698748385924e-16, 0.1792267857826344e-16, 0.2548071704976161e-16, 0.7318275341991307e+00, 0.4959852501050381e-23, 0.8676862906242004e-16, 0.1414000300319855e-15, 0.5186198053161721e+00, 0.1503295986001297e-14, 0.3452895805257589e-14, 0.7981257539768321e-14, 0.4143051093784424e+00, 0.1303656847202082e-15, 0.9855809897129084e-12, 0.2614986238618434e-11, 0.2112065329503869e+00, 0.8411884198867843e-17, 0.2293573303077261e-09, 0.9833566024906726e-10, 0.1093987140067686e+00, 0.2696165428113804e-17, 0.5825676764620370e-08, 0.2638661645752538e-08, 0.9751785856405315e-01, 0.6617412588681544e-21, 0.2809066689524106e-07, 0.6165545233233256e-07, 0.2818174710670554e-01, 0.1396210563637443e-18, 0.2854399814229969e-05, 0.1344352644992036e-05, 0.1697846193036144e-01, 0.6162975822039155e-31, 0.3724766654883956e-05, 0.4695180568393632e-05, 0.2117542506861687e-04, 0.0000000000000000e+00, 0.9895328911616120e-03, 0.1620493249248586e-02}, + pp: 0, + dmin: 8.1926049685687600e-005, + desig: -9.9312918999672206e-017, + qmax: 2.1682216902523144, + ttype: -4, + dmin1: 8.9258234858565516e-002, + dmin2: 8.9258234858565516e-002, + dn: 8.1926049685687600e-005, + dn1: 0.10847668953277810, + dn2: 0.20701876553386761, + g: 0.0000000000000000, + tau: 1.5594219393746818e-003, + nFail: 2, + iter: 62, + sigma: 1.0941247654737283, + nDiv: 1057, + zOut: []float64{0.1386852748028120e+01, 0.1386771331083530e+01, 0.2485138759635906e-17, 0.1950383162890474e-17, 0.1088363957319145e+01, 0.1095197258320730e+01, 0.6914717946174950e-02, 0.7233164949324962e-02, 0.1145634931626446e+01, 0.1240614799765893e+01, 0.1022944500333619e+00, 0.6772592167212049e-01, 0.8213718410609353e+00, 0.7839262607332537e+00, 0.3036175828902931e-01, 0.4227125255215914e-01, 0.1091423778368554e+01, 0.1120328406526543e+01, 0.7125729765473905e-01, 0.4775711530437247e-01, 0.7508515569660207e+00, 0.7030343793170689e+00, 0.2135460001102832e-04, 0.2711304941630149e-05, 0.8926135754328580e-01, 0.8918452636746399e-01, 0.7297073710223142e-05, 0.1703885458517271e-04, 0.2082481603404622e+00, 0.2088031613602029e+00, 0.6534568189162364e-03, 0.3402961195615630e-03, 0.1087369563009673e+00, 0.1083191813025733e+00, 0.3938065757966957e-05, 0.2978513750500819e-08, 0.8192604968568760e-04, 0.5061265815246250e-06, 0.2485138759635906e-17, 0.1950383162890474e-17, 0.9154376259418607e+00, 0.2764930279233778e-14, 0.5964194309842799e-17, 0.7719264500395369e-17, 0.8241395430971566e+00, 0.2245698748385924e-16, 0.1792267857826344e-16, 0.2548071704976161e-16, 0.7318275341991307e+00, 0.4959852501050381e-23, 0.8676862906242004e-16, 0.1414000300319855e-15, 0.5186198053161721e+00, 0.1503295986001297e-14, 0.3452895805257589e-14, 0.7981257539768321e-14, 0.4143051093784424e+00, 0.1303656847202082e-15, 0.9855809897129084e-12, 0.2614986238618434e-11, 0.2112065329503869e+00, 0.8411884198867843e-17, 0.2293573303077261e-09, 0.9833566024906726e-10, 0.1093987140067686e+00, 0.2696165428113804e-17, 0.5825676764620370e-08, 0.2638661645752538e-08, 0.9751785856405315e-01, 0.6617412588681544e-21, 0.2809066689524106e-07, 0.6165545233233256e-07, 0.2818174710670554e-01, 0.1396210563637443e-18, 0.2854399814229969e-05, 0.1344352644992036e-05, 0.1697846193036144e-01, 0.6162975822039155e-31, 0.3724766654883956e-05, 0.4695180568393632e-05, 0.2117542506861687e-04, 0.0000000000000000e+00, 0.9895328911616120e-03, 0.1620493249248586e-02}, + i0Out: 1, + n0Out: 10, + ppOut: 0, + dminOut: 5.0612658152462498e-007, + desigOut: -9.3553094958342964e-017, + sigmaOut: 1.0942061824183187, + qmaxOut: 2.1682216902523144, + nFailOut: 2, + iterOut: 63, + nDivOut: 1068, + ttypeOut: -4, + dmin1Out: 8.9177229293753768e-002, + dmin2Out: 8.9177229293753768e-002, + dnOut: 5.0612658152462498e-007, + dn1Out: 0.10831524323681536, + dn2Out: 0.20814970454128662, + gOut: 0.0000000000000000, + tauOut: 8.1416944590412474e-005, + }, + { + i0: 1, + n0: 10, + z: []float64{0.1386852748028120e+01, 0.1386771331083530e+01, 0.2485138759635906e-17, 0.1950383162890474e-17, 0.1088363957319145e+01, 0.1095197258320730e+01, 0.6914717946174950e-02, 0.7233164949324962e-02, 0.1145634931626446e+01, 0.1240614799765893e+01, 0.1022944500333619e+00, 0.6772592167212049e-01, 0.8213718410609353e+00, 0.7839262607332537e+00, 0.3036175828902931e-01, 0.4227125255215914e-01, 0.1091423778368554e+01, 0.1120328406526543e+01, 0.7125729765473905e-01, 0.4775711530437247e-01, 0.7508515569660207e+00, 0.7030343793170689e+00, 0.2135460001102832e-04, 0.2711304941630149e-05, 0.8926135754328580e-01, 0.8918452636746399e-01, 0.7297073710223142e-05, 0.1703885458517271e-04, 0.2082481603404622e+00, 0.2088031613602029e+00, 0.6534568189162364e-03, 0.3402961195615630e-03, 0.1087369563009673e+00, 0.1083191813025733e+00, 0.3938065757966957e-05, 0.2978513750500819e-08, 0.8192604968568760e-04, 0.5061265815246250e-06, 0.2485138759635906e-17, 0.1950383162890474e-17, 0.9154376259418607e+00, 0.2764930279233778e-14, 0.5964194309842799e-17, 0.7719264500395369e-17, 0.8241395430971566e+00, 0.2245698748385924e-16, 0.1792267857826344e-16, 0.2548071704976161e-16, 0.7318275341991307e+00, 0.4959852501050381e-23, 0.8676862906242004e-16, 0.1414000300319855e-15, 0.5186198053161721e+00, 0.1503295986001297e-14, 0.3452895805257589e-14, 0.7981257539768321e-14, 0.4143051093784424e+00, 0.1303656847202082e-15, 0.9855809897129084e-12, 0.2614986238618434e-11, 0.2112065329503869e+00, 0.8411884198867843e-17, 0.2293573303077261e-09, 0.9833566024906726e-10, 0.1093987140067686e+00, 0.2696165428113804e-17, 0.5825676764620370e-08, 0.2638661645752538e-08, 0.9751785856405315e-01, 0.6617412588681544e-21, 0.2809066689524106e-07, 0.6165545233233256e-07, 0.2818174710670554e-01, 0.1396210563637443e-18, 0.2854399814229969e-05, 0.1344352644992036e-05, 0.1697846193036144e-01, 0.6162975822039155e-31, 0.3724766654883956e-05, 0.4695180568393632e-05, 0.2117542506861687e-04, 0.0000000000000000e+00, 0.9895328911616120e-03, 0.1620493249248586e-02}, + pp: 1, + dmin: 5.0612658152462498e-007, + desig: -9.3553094958342964e-017, + qmax: 2.1682216902523144, + ttype: -4, + dmin1: 8.9177229293753768e-002, + dmin2: 8.9177229293753768e-002, + dn: 5.0612658152462498e-007, + dn1: 0.10831524323681536, + dn2: 0.20814970454128662, + g: 0.0000000000000000, + tau: 8.1416944590412474e-005, + nFail: 2, + iter: 63, + sigma: 1.0942061824183187, + nDiv: 1068, + zOut: []float64{0.1386770825043033e+01, 0.1386771331083530e+01, 0.1540308069724697e-17, 0.1950383162890474e-17, 0.1102429917229558e+01, 0.1095197258320730e+01, 0.8139811288713328e-02, 0.7233164949324962e-02, 0.1300200404108803e+01, 0.1240614799765893e+01, 0.4083380405309871e-01, 0.6772592167212049e-01, 0.7853632031918177e+00, 0.7839262607332537e+00, 0.6030036143936175e-01, 0.4227125255215914e-01, 0.1107784654351057e+01, 0.1120328406526543e+01, 0.3030814137396716e-01, 0.4775711530437247e-01, 0.6727284432075471e+00, 0.7030343793170689e+00, 0.3594413905024206e-06, 0.2711304941630149e-05, 0.8920069974016231e-01, 0.8918452636746399e-01, 0.3988496406087022e-04, 0.1703885458517271e-04, 0.2091030664752072e+00, 0.2088031613602029e+00, 0.1762795624794033e-03, 0.3402961195615630e-03, 0.1081423986781113e+00, 0.1083191813025733e+00, 0.1393999949133917e-13, 0.2978513750500819e-08, 0.8607124697139263e-10, 0.5061265815246250e-06, 0.1540308069724697e-17, 0.1950383162890474e-17, 0.9154376259418607e+00, 0.2764930279233778e-14, 0.5964194309842799e-17, 0.7719264500395369e-17, 0.8241395430971566e+00, 0.2245698748385924e-16, 0.1792267857826344e-16, 0.2548071704976161e-16, 0.7318275341991307e+00, 0.4959852501050381e-23, 0.8676862906242004e-16, 0.1414000300319855e-15, 0.5186198053161721e+00, 0.1503295986001297e-14, 0.3452895805257589e-14, 0.7981257539768321e-14, 0.4143051093784424e+00, 0.1303656847202082e-15, 0.9855809897129084e-12, 0.2614986238618434e-11, 0.2112065329503869e+00, 0.8411884198867843e-17, 0.2293573303077261e-09, 0.9833566024906726e-10, 0.1093987140067686e+00, 0.2696165428113804e-17, 0.5825676764620370e-08, 0.2638661645752538e-08, 0.9751785856405315e-01, 0.6617412588681544e-21, 0.2809066689524106e-07, 0.6165545233233256e-07, 0.2818174710670554e-01, 0.1396210563637443e-18, 0.2854399814229969e-05, 0.1344352644992036e-05, 0.1697846193036144e-01, 0.6162975822039155e-31, 0.3724766654883956e-05, 0.4695180568393632e-05, 0.2117542506861687e-04, 0.0000000000000000e+00, 0.9895328911616120e-03, 0.1620493249248586e-02}, + i0Out: 1, + n0Out: 10, + ppOut: 1, + dminOut: 8.6071246971392626e-011, + desigOut: 7.0901951641105278e-017, + sigmaOut: 1.0942066884588149, + qmaxOut: 2.1682216902523144, + nFailOut: 2, + iterOut: 64, + nDivOut: 1079, + ttypeOut: -4, + dmin1Out: 8.9183660885577137e-002, + dmin2Out: 8.9183660885577137e-002, + dnOut: 8.6071246971392626e-011, + dn1Out: 0.10814239569959758, + dn2Out: 0.20876277035564564, + gOut: 0.0000000000000000, + tauOut: 5.0604049633765406e-007, + }, + { + i0: 1, + n0: 10, + z: []float64{0.1386770825043033e+01, 0.1386771331083530e+01, 0.1540308069724697e-17, 0.1950383162890474e-17, 0.1102429917229558e+01, 0.1095197258320730e+01, 0.8139811288713328e-02, 0.7233164949324962e-02, 0.1300200404108803e+01, 0.1240614799765893e+01, 0.4083380405309871e-01, 0.6772592167212049e-01, 0.7853632031918177e+00, 0.7839262607332537e+00, 0.6030036143936175e-01, 0.4227125255215914e-01, 0.1107784654351057e+01, 0.1120328406526543e+01, 0.3030814137396716e-01, 0.4775711530437247e-01, 0.6727284432075471e+00, 0.7030343793170689e+00, 0.3594413905024206e-06, 0.2711304941630149e-05, 0.8920069974016231e-01, 0.8918452636746399e-01, 0.3988496406087022e-04, 0.1703885458517271e-04, 0.2091030664752072e+00, 0.2088031613602029e+00, 0.1762795624794033e-03, 0.3402961195615630e-03, 0.1081423986781113e+00, 0.1083191813025733e+00, 0.1393999949133917e-13, 0.2978513750500819e-08, 0.8607124697139263e-10, 0.5061265815246250e-06, 0.1540308069724697e-17, 0.1950383162890474e-17, 0.9154376259418607e+00, 0.2764930279233778e-14, 0.5964194309842799e-17, 0.7719264500395369e-17, 0.8241395430971566e+00, 0.2245698748385924e-16, 0.1792267857826344e-16, 0.2548071704976161e-16, 0.7318275341991307e+00, 0.4959852501050381e-23, 0.8676862906242004e-16, 0.1414000300319855e-15, 0.5186198053161721e+00, 0.1503295986001297e-14, 0.3452895805257589e-14, 0.7981257539768321e-14, 0.4143051093784424e+00, 0.1303656847202082e-15, 0.9855809897129084e-12, 0.2614986238618434e-11, 0.2112065329503869e+00, 0.8411884198867843e-17, 0.2293573303077261e-09, 0.9833566024906726e-10, 0.1093987140067686e+00, 0.2696165428113804e-17, 0.5825676764620370e-08, 0.2638661645752538e-08, 0.9751785856405315e-01, 0.6617412588681544e-21, 0.2809066689524106e-07, 0.6165545233233256e-07, 0.2818174710670554e-01, 0.1396210563637443e-18, 0.2854399814229969e-05, 0.1344352644992036e-05, 0.1697846193036144e-01, 0.6162975822039155e-31, 0.3724766654883956e-05, 0.4695180568393632e-05, 0.2117542506861687e-04, 0.0000000000000000e+00, 0.9895328911616120e-03, 0.1620493249248586e-02}, + pp: 0, + dmin: 8.6071246971392626e-011, + desig: 7.0901951641105278e-017, + qmax: 2.1682216902523144, + ttype: -4, + dmin1: 8.9183660885577137e-002, + dmin2: 8.9183660885577137e-002, + dn: 8.6071246971392626e-011, + dn1: 0.10814239569959758, + dn2: 0.20876277035564564, + g: 0.0000000000000000, + tau: 5.0604049633765406e-007, + nFail: 2, + iter: 64, + sigma: 1.0942066884588149, + nDiv: 1079, + zOut: []float64{0.1386770825043033e+01, 0.1386770824956962e+01, 0.1540308069724697e-17, 0.1224486171222500e-17, 0.1102429917229558e+01, 0.1110569728432201e+01, 0.8139811288713328e-02, 0.9529690622753703e-02, 0.1300200404108803e+01, 0.1331504517453077e+01, 0.4083380405309871e-01, 0.2408506071837550e-01, 0.7853632031918177e+00, 0.8215785038267327e+00, 0.6030036143936175e-01, 0.8130667336500198e-01, 0.1107784654351057e+01, 0.1056786122273951e+01, 0.3030814137396716e-01, 0.1929354325655847e-01, 0.6727284432075471e+00, 0.6534352593063080e+00, 0.3594413905024206e-06, 0.4906748310831983e-07, 0.8920069974016231e-01, 0.8924053555066887e-01, 0.3988496406087022e-04, 0.9345605379795243e-04, 0.2091030664752072e+00, 0.2091858898978174e+00, 0.1762795624794033e-03, 0.9113088236382798e-04, 0.1081423986781113e+00, 0.1080512677096902e+00, 0.1393999949133917e-13, 0.1110429488179469e-22, 0.8607124697139263e-10, 0.3167883469916549e-16, 0.1540308069724697e-17, 0.1224486171222500e-17, 0.9154376259418607e+00, 0.2764930279233778e-14, 0.5964194309842799e-17, 0.7719264500395369e-17, 0.8241395430971566e+00, 0.2245698748385924e-16, 0.1792267857826344e-16, 0.2548071704976161e-16, 0.7318275341991307e+00, 0.4959852501050381e-23, 0.8676862906242004e-16, 0.1414000300319855e-15, 0.5186198053161721e+00, 0.1503295986001297e-14, 0.3452895805257589e-14, 0.7981257539768321e-14, 0.4143051093784424e+00, 0.1303656847202082e-15, 0.9855809897129084e-12, 0.2614986238618434e-11, 0.2112065329503869e+00, 0.8411884198867843e-17, 0.2293573303077261e-09, 0.9833566024906726e-10, 0.1093987140067686e+00, 0.2696165428113804e-17, 0.5825676764620370e-08, 0.2638661645752538e-08, 0.9751785856405315e-01, 0.6617412588681544e-21, 0.2809066689524106e-07, 0.6165545233233256e-07, 0.2818174710670554e-01, 0.1396210563637443e-18, 0.2854399814229969e-05, 0.1344352644992036e-05, 0.1697846193036144e-01, 0.6162975822039155e-31, 0.3724766654883956e-05, 0.4695180568393632e-05, 0.2117542506861687e-04, 0.0000000000000000e+00, 0.9895328911616120e-03, 0.1620493249248586e-02}, + i0Out: 1, + n0Out: 10, + ppOut: 0, + dminOut: 3.1678834699165494e-017, + desigOut: -8.6057196331979189e-017, + sigmaOut: 1.0942066885448862, + qmaxOut: 2.1682216902523144, + nFailOut: 2, + iterOut: 65, + nDivOut: 1090, + ttypeOut: -4, + dmin1Out: 8.9200650586607991e-002, + dmin2Out: 8.9200650586607991e-002, + dnOut: 3.1678834699165494e-017, + dn1Out: 0.10805126770967630, + dn2Out: 0.20900961033533805, + gOut: 0.0000000000000000, + tauOut: 8.6071215292546838e-011, + }, + { + i0: 1, + n0: 3, + z: []float64{0.1143865217830237e+00, 0.1154808079639322e+00, 0.2932453102768040e-16, 0.1817901582187390e-16, 0.1834406364024603e+00, 0.1845166504084682e+00, 0.1101341475930520e-06, 0.1827217490071947e-04, 0.1127820009512990e-04, 0.1105674515151256e-02, 0.1845166504084682e+00, 0.2816548643828996e+00, 0.2169993432366266e+01, 0.5067642455139780e-26, 0.3415542419024794e-17, 0.4171805735046273e-17, 0.1810817848712033e+01, 0.1972152263052530e-29, 0.6115792910959321e-18, 0.6699979816704878e-18, 0.1681677309759878e+01, 0.5241368559131172e-22, 0.4369342452764876e-18, 0.4084491703488284e-18, 0.1303743375476047e+01, 0.0000000000000000e+00, 0.2325140442247288e-18, 0.2353545363971710e-18, 0.1202161769544433e+01, 0.0000000000000000e+00, 0.2163565864913247e-18, 0.2222159192042978e-18, 0.1183377192742860e+01, 0.2033579915108999e-22, 0.2652482201353177e-18, 0.2438256017573345e-18, 0.1094206688544886e+01, 0.3167883469916549e-16, 0.9806070694382835e-18, 0.1224486171222500e-17, 0.9154376259418607e+00, 0.2764930279233778e-14, 0.5964194309842799e-17, 0.7719264500395369e-17, 0.8241395430971566e+00, 0.2245698748385924e-16, 0.1792267857826344e-16, 0.2548071704976161e-16, 0.7318275341991307e+00, 0.4959852501050381e-23, 0.8676862906242004e-16, 0.1414000300319855e-15, 0.5186198053161721e+00, 0.1503295986001297e-14, 0.3452895805257589e-14, 0.7981257539768321e-14, 0.4143051093784424e+00, 0.1303656847202082e-15, 0.9855809897129084e-12, 0.2614986238618434e-11, 0.2112065329503869e+00, 0.8411884198867843e-17, 0.2293573303077261e-09, 0.9833566024906726e-10, 0.1093987140067686e+00, 0.2696165428113804e-17, 0.5825676764620370e-08, 0.2638661645752538e-08, 0.9751785856405315e-01, 0.6617412588681544e-21, 0.2809066689524106e-07, 0.6165545233233256e-07, 0.2818174710670554e-01, 0.1396210563637443e-18, 0.2854399814229969e-05, 0.1344352644992036e-05, 0.1697846193036144e-01, 0.6162975822039155e-31, 0.3724766654883956e-05, 0.4695180568393632e-05, 0.2117542506861687e-04, 0.0000000000000000e+00, 0.9895328911616120e-03, 0.1620493249248586e-02}, + pp: 0, + dmin: 1.1278200095129901e-005, + desig: 1.1991276027689679e-016, + qmax: 0.41760242607758735, + ttype: -4, + dmin1: 0.11438652178302365, + dmin2: 0.11438652178302365, + dn: 1.1278200095129901e-005, + dn1: 0.18342236422755959, + dn2: 0.11438652178302365, + g: 0.0000000000000000, + tau: 1.0942861809085330e-003, + nFail: 3, + iter: 99, + sigma: 2.3665909917188244, + nDiv: 1347, + zOut: []float64{0.1143865217830237e+00, 0.1143752525446631e+00, 0.2932453102768040e-16, 0.4703212027287794e-16, 0.1834406364024603e+00, 0.1834294772982473e+00, 0.1101341475930520e-06, 0.6771621290952006e-11, 0.1127820009512990e-04, 0.8954962962003413e-08, 0.1845166504084682e+00, 0.1834406364024603e+00, 0.2169993432366266e+01, 0.5067642455139780e-26, 0.3415542419024794e-17, 0.4171805735046273e-17, 0.1810817848712033e+01, 0.1972152263052530e-29, 0.6115792910959321e-18, 0.6699979816704878e-18, 0.1681677309759878e+01, 0.5241368559131172e-22, 0.4369342452764876e-18, 0.4084491703488284e-18, 0.1303743375476047e+01, 0.0000000000000000e+00, 0.2325140442247288e-18, 0.2353545363971710e-18, 0.1202161769544433e+01, 0.0000000000000000e+00, 0.2163565864913247e-18, 0.2222159192042978e-18, 0.1183377192742860e+01, 0.2033579915108999e-22, 0.2652482201353177e-18, 0.2438256017573345e-18, 0.1094206688544886e+01, 0.3167883469916549e-16, 0.9806070694382835e-18, 0.1224486171222500e-17, 0.9154376259418607e+00, 0.2764930279233778e-14, 0.5964194309842799e-17, 0.7719264500395369e-17, 0.8241395430971566e+00, 0.2245698748385924e-16, 0.1792267857826344e-16, 0.2548071704976161e-16, 0.7318275341991307e+00, 0.4959852501050381e-23, 0.8676862906242004e-16, 0.1414000300319855e-15, 0.5186198053161721e+00, 0.1503295986001297e-14, 0.3452895805257589e-14, 0.7981257539768321e-14, 0.4143051093784424e+00, 0.1303656847202082e-15, 0.9855809897129084e-12, 0.2614986238618434e-11, 0.2112065329503869e+00, 0.8411884198867843e-17, 0.2293573303077261e-09, 0.9833566024906726e-10, 0.1093987140067686e+00, 0.2696165428113804e-17, 0.5825676764620370e-08, 0.2638661645752538e-08, 0.9751785856405315e-01, 0.6617412588681544e-21, 0.2809066689524106e-07, 0.6165545233233256e-07, 0.2818174710670554e-01, 0.1396210563637443e-18, 0.2854399814229969e-05, 0.1344352644992036e-05, 0.1697846193036144e-01, 0.6162975822039155e-31, 0.3724766654883956e-05, 0.4695180568393632e-05, 0.2117542506861687e-04, 0.0000000000000000e+00, 0.9895328911616120e-03, 0.1620493249248586e-02}, + i0Out: 1, + n0Out: 3, + ppOut: 0, + dminOut: 8.9549629620034135e-009, + desigOut: -3.3964327119002935e-017, + sigmaOut: 2.3666022609571851, + qmaxOut: 0.41760242607758735, + nFailOut: 3, + iterOut: 100, + nDivOut: 1351, + ttypeOut: -4, + dmin1Out: 0.11437525254466312, + dmin2Out: 0.11437525254466312, + dnOut: 8.9549629620034135e-009, + dn1Out: 0.18342936716409974, + dn2Out: 0.11437525254466312, + gOut: 0.0000000000000000, + tauOut: 1.1269238360546607e-005, + }, + { + i0: 1, + n0: 3, + z: []float64{0.1143865217830237e+00, 0.1143752525446631e+00, 0.2932453102768040e-16, 0.4703212027287794e-16, 0.1834406364024603e+00, 0.1834294772982473e+00, 0.1101341475930520e-06, 0.6771621290952006e-11, 0.1127820009512990e-04, 0.8954962962003413e-08, 0.1845166504084682e+00, 0.1834406364024603e+00, 0.2169993432366266e+01, 0.5067642455139780e-26, 0.3415542419024794e-17, 0.4171805735046273e-17, 0.1810817848712033e+01, 0.1972152263052530e-29, 0.6115792910959321e-18, 0.6699979816704878e-18, 0.1681677309759878e+01, 0.5241368559131172e-22, 0.4369342452764876e-18, 0.4084491703488284e-18, 0.1303743375476047e+01, 0.0000000000000000e+00, 0.2325140442247288e-18, 0.2353545363971710e-18, 0.1202161769544433e+01, 0.0000000000000000e+00, 0.2163565864913247e-18, 0.2222159192042978e-18, 0.1183377192742860e+01, 0.2033579915108999e-22, 0.2652482201353177e-18, 0.2438256017573345e-18, 0.1094206688544886e+01, 0.3167883469916549e-16, 0.9806070694382835e-18, 0.1224486171222500e-17, 0.9154376259418607e+00, 0.2764930279233778e-14, 0.5964194309842799e-17, 0.7719264500395369e-17, 0.8241395430971566e+00, 0.2245698748385924e-16, 0.1792267857826344e-16, 0.2548071704976161e-16, 0.7318275341991307e+00, 0.4959852501050381e-23, 0.8676862906242004e-16, 0.1414000300319855e-15, 0.5186198053161721e+00, 0.1503295986001297e-14, 0.3452895805257589e-14, 0.7981257539768321e-14, 0.4143051093784424e+00, 0.1303656847202082e-15, 0.9855809897129084e-12, 0.2614986238618434e-11, 0.2112065329503869e+00, 0.8411884198867843e-17, 0.2293573303077261e-09, 0.9833566024906726e-10, 0.1093987140067686e+00, 0.2696165428113804e-17, 0.5825676764620370e-08, 0.2638661645752538e-08, 0.9751785856405315e-01, 0.6617412588681544e-21, 0.2809066689524106e-07, 0.6165545233233256e-07, 0.2818174710670554e-01, 0.1396210563637443e-18, 0.2854399814229969e-05, 0.1344352644992036e-05, 0.1697846193036144e-01, 0.6162975822039155e-31, 0.3724766654883956e-05, 0.4695180568393632e-05, 0.2117542506861687e-04, 0.0000000000000000e+00, 0.9895328911616120e-03, 0.1620493249248586e-02}, + pp: 1, + dmin: 8.9549629620034135e-009, + desig: -3.3964327119002935e-017, + qmax: 0.41760242607758735, + ttype: -4, + dmin1: 0.11437525254466312, + dmin2: 0.11437525254466312, + dn: 8.9549629620034135e-009, + dn1: 0.18342936716409974, + dn2: 0.11437525254466312, + g: 0.0000000000000000, + tau: 1.1269238360546607e-005, + nFail: 3, + iter: 100, + sigma: 2.3666022609571851, + nDiv: 1351, + zOut: []float64{0.1143752435897560e+00, 0.1143752525446631e+00, 0.7542783706608855e-16, 0.4703212027287794e-16, 0.1834294683501117e+00, 0.1834294772982473e+00, 0.3305882004599510e-18, 0.6771621290952006e-11, 0.5575326929115112e-13, 0.8954962962003413e-08, 0.1834294772982473e+00, 0.1834406364024603e+00, 0.2169993432366266e+01, 0.5067642455139780e-26, 0.3415542419024794e-17, 0.4171805735046273e-17, 0.1810817848712033e+01, 0.1972152263052530e-29, 0.6115792910959321e-18, 0.6699979816704878e-18, 0.1681677309759878e+01, 0.5241368559131172e-22, 0.4369342452764876e-18, 0.4084491703488284e-18, 0.1303743375476047e+01, 0.0000000000000000e+00, 0.2325140442247288e-18, 0.2353545363971710e-18, 0.1202161769544433e+01, 0.0000000000000000e+00, 0.2163565864913247e-18, 0.2222159192042978e-18, 0.1183377192742860e+01, 0.2033579915108999e-22, 0.2652482201353177e-18, 0.2438256017573345e-18, 0.1094206688544886e+01, 0.3167883469916549e-16, 0.9806070694382835e-18, 0.1224486171222500e-17, 0.9154376259418607e+00, 0.2764930279233778e-14, 0.5964194309842799e-17, 0.7719264500395369e-17, 0.8241395430971566e+00, 0.2245698748385924e-16, 0.1792267857826344e-16, 0.2548071704976161e-16, 0.7318275341991307e+00, 0.4959852501050381e-23, 0.8676862906242004e-16, 0.1414000300319855e-15, 0.5186198053161721e+00, 0.1503295986001297e-14, 0.3452895805257589e-14, 0.7981257539768321e-14, 0.4143051093784424e+00, 0.1303656847202082e-15, 0.9855809897129084e-12, 0.2614986238618434e-11, 0.2112065329503869e+00, 0.8411884198867843e-17, 0.2293573303077261e-09, 0.9833566024906726e-10, 0.1093987140067686e+00, 0.2696165428113804e-17, 0.5825676764620370e-08, 0.2638661645752538e-08, 0.9751785856405315e-01, 0.6617412588681544e-21, 0.2809066689524106e-07, 0.6165545233233256e-07, 0.2818174710670554e-01, 0.1396210563637443e-18, 0.2854399814229969e-05, 0.1344352644992036e-05, 0.1697846193036144e-01, 0.6162975822039155e-31, 0.3724766654883956e-05, 0.4695180568393632e-05, 0.2117542506861687e-04, 0.0000000000000000e+00, 0.9895328911616120e-03, 0.1620493249248586e-02}, + i0Out: 1, + n0Out: 3, + ppOut: 1, + dminOut: 5.5753269291151117e-014, + desigOut: 1.3632246356731358e-016, + sigmaOut: 2.3666022699120921, + qmaxOut: 0.41760242607758735, + nFailOut: 3, + iterOut: 101, + nDivOut: 1355, + ttypeOut: -4, + dmin1Out: 0.11437524358975594, + dmin2Out: 0.11437524358975594, + dnOut: 5.5753269291151117e-014, + dn1Out: 0.18342946834334006, + dn2Out: 0.11437524358975594, + gOut: 0.0000000000000000, + tauOut: 8.9549072084035346e-009, + }, + { + i0: 1, + n0: 3, + z: []float64{0.1143752435897560e+00, 0.1143752525446631e+00, 0.7542783706608855e-16, 0.4703212027287794e-16, 0.1834294683501117e+00, 0.1834294772982473e+00, 0.3305882004599510e-18, 0.6771621290952006e-11, 0.5575326929115112e-13, 0.8954962962003413e-08, 0.1834294772982473e+00, 0.1834406364024603e+00, 0.2169993432366266e+01, 0.5067642455139780e-26, 0.3415542419024794e-17, 0.4171805735046273e-17, 0.1810817848712033e+01, 0.1972152263052530e-29, 0.6115792910959321e-18, 0.6699979816704878e-18, 0.1681677309759878e+01, 0.5241368559131172e-22, 0.4369342452764876e-18, 0.4084491703488284e-18, 0.1303743375476047e+01, 0.0000000000000000e+00, 0.2325140442247288e-18, 0.2353545363971710e-18, 0.1202161769544433e+01, 0.0000000000000000e+00, 0.2163565864913247e-18, 0.2222159192042978e-18, 0.1183377192742860e+01, 0.2033579915108999e-22, 0.2652482201353177e-18, 0.2438256017573345e-18, 0.1094206688544886e+01, 0.3167883469916549e-16, 0.9806070694382835e-18, 0.1224486171222500e-17, 0.9154376259418607e+00, 0.2764930279233778e-14, 0.5964194309842799e-17, 0.7719264500395369e-17, 0.8241395430971566e+00, 0.2245698748385924e-16, 0.1792267857826344e-16, 0.2548071704976161e-16, 0.7318275341991307e+00, 0.4959852501050381e-23, 0.8676862906242004e-16, 0.1414000300319855e-15, 0.5186198053161721e+00, 0.1503295986001297e-14, 0.3452895805257589e-14, 0.7981257539768321e-14, 0.4143051093784424e+00, 0.1303656847202082e-15, 0.9855809897129084e-12, 0.2614986238618434e-11, 0.2112065329503869e+00, 0.8411884198867843e-17, 0.2293573303077261e-09, 0.9833566024906726e-10, 0.1093987140067686e+00, 0.2696165428113804e-17, 0.5825676764620370e-08, 0.2638661645752538e-08, 0.9751785856405315e-01, 0.6617412588681544e-21, 0.2809066689524106e-07, 0.6165545233233256e-07, 0.2818174710670554e-01, 0.1396210563637443e-18, 0.2854399814229969e-05, 0.1344352644992036e-05, 0.1697846193036144e-01, 0.6162975822039155e-31, 0.3724766654883956e-05, 0.4695180568393632e-05, 0.2117542506861687e-04, 0.0000000000000000e+00, 0.9895328911616120e-03, 0.1620493249248586e-02}, + pp: 0, + dmin: 5.5753269291151117e-014, + desig: 1.3632246356731358e-016, + qmax: 0.41760242607758735, + ttype: -4, + dmin1: 0.11437524358975594, + dmin2: 0.11437524358975594, + dn: 5.5753269291151117e-014, + dn1: 0.18342946834334006, + dn2: 0.11437524358975594, + g: 0.0000000000000000, + tau: 8.9549072084035346e-009, + nFail: 3, + iter: 101, + sigma: 2.3666022699120921, + nDiv: 1355, + zOut: []float64{0.1143752435897560e+00, 0.1143752435897003e+00, 0.7542783706608855e-16, 0.1209675067575323e-15, 0.1834294683501117e+00, 0.1834294683500558e+00, 0.3305882004599510e-18, 0.1004820715586787e-30, 0.5575326929115112e-13, 0.7669624420364386e-22, 0.1834294772982473e+00, 0.1834294683501117e+00, 0.2169993432366266e+01, 0.5067642455139780e-26, 0.3415542419024794e-17, 0.4171805735046273e-17, 0.1810817848712033e+01, 0.1972152263052530e-29, 0.6115792910959321e-18, 0.6699979816704878e-18, 0.1681677309759878e+01, 0.5241368559131172e-22, 0.4369342452764876e-18, 0.4084491703488284e-18, 0.1303743375476047e+01, 0.0000000000000000e+00, 0.2325140442247288e-18, 0.2353545363971710e-18, 0.1202161769544433e+01, 0.0000000000000000e+00, 0.2163565864913247e-18, 0.2222159192042978e-18, 0.1183377192742860e+01, 0.2033579915108999e-22, 0.2652482201353177e-18, 0.2438256017573345e-18, 0.1094206688544886e+01, 0.3167883469916549e-16, 0.9806070694382835e-18, 0.1224486171222500e-17, 0.9154376259418607e+00, 0.2764930279233778e-14, 0.5964194309842799e-17, 0.7719264500395369e-17, 0.8241395430971566e+00, 0.2245698748385924e-16, 0.1792267857826344e-16, 0.2548071704976161e-16, 0.7318275341991307e+00, 0.4959852501050381e-23, 0.8676862906242004e-16, 0.1414000300319855e-15, 0.5186198053161721e+00, 0.1503295986001297e-14, 0.3452895805257589e-14, 0.7981257539768321e-14, 0.4143051093784424e+00, 0.1303656847202082e-15, 0.9855809897129084e-12, 0.2614986238618434e-11, 0.2112065329503869e+00, 0.8411884198867843e-17, 0.2293573303077261e-09, 0.9833566024906726e-10, 0.1093987140067686e+00, 0.2696165428113804e-17, 0.5825676764620370e-08, 0.2638661645752538e-08, 0.9751785856405315e-01, 0.6617412588681544e-21, 0.2809066689524106e-07, 0.6165545233233256e-07, 0.2818174710670554e-01, 0.1396210563637443e-18, 0.2854399814229969e-05, 0.1344352644992036e-05, 0.1697846193036144e-01, 0.6162975822039155e-31, 0.3724766654883956e-05, 0.4695180568393632e-05, 0.2117542506861687e-04, 0.0000000000000000e+00, 0.9895328911616120e-03, 0.1620493249248586e-02}, + i0Out: 1, + n0Out: 3, + ppOut: 0, + dminOut: 7.6696244203643861e-023, + desigOut: -6.5648763085702813e-017, + sigmaOut: 2.3666022699121481, + qmaxOut: 0.41760242607758735, + nFailOut: 3, + iterOut: 102, + nDivOut: 1359, + ttypeOut: -4, + dmin1Out: 0.11437524358970023, + dmin2Out: 0.11437524358970023, + dnOut: 7.6696244203643861e-023, + dn1Out: 0.18342946835005580, + dn2Out: 0.11437524358970023, + gOut: 0.0000000000000000, + tauOut: 5.5753269214454873e-014, + }, + { + i0: 1, + n0: 3, + z: []float64{0.1143752435897560e+00, 0.1143752435897003e+00, 0.7542783706608855e-16, 0.1209675067575323e-15, 0.1834294683501117e+00, 0.1834294683500558e+00, 0.3305882004599510e-18, 0.1004820715586787e-30, 0.5575326929115112e-13, 0.7669624420364386e-22, 0.1834294772982473e+00, 0.1834294683501117e+00, 0.2169993432366266e+01, 0.5067642455139780e-26, 0.3415542419024794e-17, 0.4171805735046273e-17, 0.1810817848712033e+01, 0.1972152263052530e-29, 0.6115792910959321e-18, 0.6699979816704878e-18, 0.1681677309759878e+01, 0.5241368559131172e-22, 0.4369342452764876e-18, 0.4084491703488284e-18, 0.1303743375476047e+01, 0.0000000000000000e+00, 0.2325140442247288e-18, 0.2353545363971710e-18, 0.1202161769544433e+01, 0.0000000000000000e+00, 0.2163565864913247e-18, 0.2222159192042978e-18, 0.1183377192742860e+01, 0.2033579915108999e-22, 0.2652482201353177e-18, 0.2438256017573345e-18, 0.1094206688544886e+01, 0.3167883469916549e-16, 0.9806070694382835e-18, 0.1224486171222500e-17, 0.9154376259418607e+00, 0.2764930279233778e-14, 0.5964194309842799e-17, 0.7719264500395369e-17, 0.8241395430971566e+00, 0.2245698748385924e-16, 0.1792267857826344e-16, 0.2548071704976161e-16, 0.7318275341991307e+00, 0.4959852501050381e-23, 0.8676862906242004e-16, 0.1414000300319855e-15, 0.5186198053161721e+00, 0.1503295986001297e-14, 0.3452895805257589e-14, 0.7981257539768321e-14, 0.4143051093784424e+00, 0.1303656847202082e-15, 0.9855809897129084e-12, 0.2614986238618434e-11, 0.2112065329503869e+00, 0.8411884198867843e-17, 0.2293573303077261e-09, 0.9833566024906726e-10, 0.1093987140067686e+00, 0.2696165428113804e-17, 0.5825676764620370e-08, 0.2638661645752538e-08, 0.9751785856405315e-01, 0.6617412588681544e-21, 0.2809066689524106e-07, 0.6165545233233256e-07, 0.2818174710670554e-01, 0.1396210563637443e-18, 0.2854399814229969e-05, 0.1344352644992036e-05, 0.1697846193036144e-01, 0.6162975822039155e-31, 0.3724766654883956e-05, 0.4695180568393632e-05, 0.2117542506861687e-04, 0.0000000000000000e+00, 0.9895328911616120e-03, 0.1620493249248586e-02}, + pp: 1, + dmin: 7.6696244203643861e-023, + desig: -6.5648763085702813e-017, + qmax: 0.41760242607758735, + ttype: -4, + dmin1: 0.11437524358970023, + dmin2: 0.11437524358970023, + dn: 7.6696244203643861e-023, + dn1: 0.18342946835005580, + dn2: 0.11437524358970023, + g: 0.0000000000000000, + tau: 5.5753269214454873e-014, + nFail: 3, + iter: 102, + sigma: 2.3666022699121481, + nDiv: 1359, + zOut: []float64{0.2550031738262204e+01, 0.1834294683500561e+00, 0.7542783706608855e-16, 0.1209675067575323e-15, 0.2480977513501848e+01, 0.1143752435897001e+00, 0.3305882004599510e-18, 0.1004820715586787e-30, 0.2366602269912148e+01, 0.7669624420364386e-22, 0.1834294772982473e+00, 0.1834294683501117e+00, 0.2169993432366266e+01, 0.5067642455139780e-26, 0.3415542419024794e-17, 0.4171805735046273e-17, 0.1810817848712033e+01, 0.1972152263052530e-29, 0.6115792910959321e-18, 0.6699979816704878e-18, 0.1681677309759878e+01, 0.5241368559131172e-22, 0.4369342452764876e-18, 0.4084491703488284e-18, 0.1303743375476047e+01, 0.0000000000000000e+00, 0.2325140442247288e-18, 0.2353545363971710e-18, 0.1202161769544433e+01, 0.0000000000000000e+00, 0.2163565864913247e-18, 0.2222159192042978e-18, 0.1183377192742860e+01, 0.2033579915108999e-22, 0.2652482201353177e-18, 0.2438256017573345e-18, 0.1094206688544886e+01, 0.3167883469916549e-16, 0.9806070694382835e-18, 0.1224486171222500e-17, 0.9154376259418607e+00, 0.2764930279233778e-14, 0.5964194309842799e-17, 0.7719264500395369e-17, 0.8241395430971566e+00, 0.2245698748385924e-16, 0.1792267857826344e-16, 0.2548071704976161e-16, 0.7318275341991307e+00, 0.4959852501050381e-23, 0.8676862906242004e-16, 0.1414000300319855e-15, 0.5186198053161721e+00, 0.1503295986001297e-14, 0.3452895805257589e-14, 0.7981257539768321e-14, 0.4143051093784424e+00, 0.1303656847202082e-15, 0.9855809897129084e-12, 0.2614986238618434e-11, 0.2112065329503869e+00, 0.8411884198867843e-17, 0.2293573303077261e-09, 0.9833566024906726e-10, 0.1093987140067686e+00, 0.2696165428113804e-17, 0.5825676764620370e-08, 0.2638661645752538e-08, 0.9751785856405315e-01, 0.6617412588681544e-21, 0.2809066689524106e-07, 0.6165545233233256e-07, 0.2818174710670554e-01, 0.1396210563637443e-18, 0.2854399814229969e-05, 0.1344352644992036e-05, 0.1697846193036144e-01, 0.6162975822039155e-31, 0.3724766654883956e-05, 0.4695180568393632e-05, 0.2117542506861687e-04, 0.0000000000000000e+00, 0.9895328911616120e-03, 0.1620493249248586e-02}, + i0Out: 1, + n0Out: 0, + ppOut: 1, + dminOut: 7.6696244203643861e-023, + desigOut: -6.5648763085702813e-017, + sigmaOut: 2.3666022699121481, + qmaxOut: 0.41760242607758735, + nFailOut: 3, + iterOut: 102, + nDivOut: 1359, + ttypeOut: -4, + dmin1Out: 0.11437524358970023, + dmin2Out: 0.11437524358970023, + dnOut: 7.6696244203643861e-023, + dn1Out: 0.18342946835005580, + dn2Out: 0.11437524358970023, + gOut: 0.0000000000000000, + tauOut: 5.5753269214454873e-014, + }, + { + i0: 1, + n0: 21, + z: []float64{0.1648283185136998e+01, 0.1396221235720571e+01, 0.1712714336271993e+00, 0.2520619494164272e+00, 0.1510753432847732e+01, 0.1119977945086946e+01, 0.3792800633372563e+00, 0.5620469213879850e+00, 0.1328441621586708e+01, 0.1019485448443405e+01, 0.1388998759717073e+00, 0.6882362364805590e+00, 0.4466938077796418e+00, 0.2681061628164644e+00, 0.7093206047768255e+00, 0.3174875209348847e+00, 0.6843112870203156e+00, 0.9979892152967577e+00, 0.6490582981441884e+00, 0.3956426765003833e+00, 0.5085572738629487e+00, 0.1122623886995757e+01, 0.9750235054014829e-02, 0.3499168501137979e-01, 0.1364886053450573e+00, 0.1417066070690837e+00, 0.2301225778544498e-01, 0.4532233329988395e-02, 0.1646009972289452e+01, 0.6930161671496210e+00, 0.2362515608142310e+00, 0.9760060629252760e+00, 0.5818602562677768e+00, 0.3984323866837953e+00, 0.1797665269485310e-01, 0.4196794303982125e+00, 0.5600419521166516e+00, 0.2492354636952108e-01, 0.2195137569256029e+00, 0.5530950584419837e+00, 0.4184071984843414e+00, 0.2222708575473020e+00, 0.2727864547293006e+00, 0.4156500978626423e+00, 0.6774373914466536e-01, 0.2745959086613283e+00, 0.1050967099374242e+00, 0.6593428521263771e-01, 0.2040338718098096e+00, 0.1079809097801335e+00, 0.1271971985482246e+00, 0.2011496719671002e+00, 0.4444741998443960e-01, 0.1290210252363728e+00, 0.5776327498150620e+00, 0.4262359329629137e-01, 0.3402556968467140e+00, 0.6023491555328507e+00, 0.1086565805630269e+00, 0.3155392911289253e+00, 0.9382999256694983e+00, 0.1171677238805356e+00, 0.7901625299559836e+00, 0.9297887823519896e+00, 0.6095853796269167e+00, 0.7973955560628040e+00, 0.8018125008387630e+00, 0.6023523535200964e+00, 0.2443177602187348e-01, 0.8114406374558937e+00, 0.2277830378453201e+00, 0.1480363940474286e-01, 0.6969081780841352e+00, 0.3759308106650992e+00, 0.8012406122589412e-01, 0.5487604052643561e+00, 0.9781022865072954e-01, 0.1017549972519246e+00, 0.2961248981181939e-03, 0.7617929262469909e-01, 0.8408398800007548e-04, 0.3802088861182694e-03, 0.0000000000000000e+00, 0.0000000000000000e+00}, + pp: 0, + dmin: -0.0000000000000000, + desig: 0.0000000000000000, + qmax: 1.8900334961849885, + ttype: 0, + dmin1: 0.0000000000000000, + dmin2: 0.0000000000000000, + dn: 0.0000000000000000, + dn1: 0.0000000000000000, + dn2: 0.0000000000000000, + g: 0.0000000000000000, + tau: 0.0000000000000000, + nFail: 0, + iter: 2, + sigma: 0.0000000000000000, + nDiv: 40, + zOut: []float64{0.1648283185136998e+01, 0.1819554618764197e+01, 0.1712714336271993e+00, 0.1422045283129674e+00, 0.1510753432847732e+01, 0.1747828967872021e+01, 0.3792800633372563e+00, 0.2882727267008810e+00, 0.1328441621586708e+01, 0.1179068770857534e+01, 0.1388998759717073e+00, 0.5262264257308433e-01, 0.4466938077796418e+00, 0.1103391769983383e+01, 0.7093206047768255e+00, 0.4399127392187891e+00, 0.6843112870203156e+00, 0.8934568459457148e+00, 0.6490582981441884e+00, 0.3694451726238032e+00, 0.5085572738629487e+00, 0.1488623362931603e+00, 0.9750235054014829e-02, 0.8939776288934402e-02, 0.1364886053450573e+00, 0.1505610868415679e+00, 0.2301225778544498e-01, 0.2515816443301624e+00, 0.1646009972289452e+01, 0.1630679888773521e+01, 0.2362515608142310e+00, 0.8429943526342391e-01, 0.5818602562677768e+00, 0.5155374736992060e+00, 0.1797665269485310e-01, 0.1952851185677853e-01, 0.5600419521166516e+00, 0.7600271971854760e+00, 0.2195137569256029e+00, 0.1208458544696003e+00, 0.4184071984843414e+00, 0.5703477987440417e+00, 0.2727864547293006e+00, 0.3240053608004366e-01, 0.6774373914466536e-01, 0.1404399130020459e+00, 0.1050967099374242e+00, 0.1526865702536626e+00, 0.2040338718098096e+00, 0.1785445001043715e+00, 0.1271971985482246e+00, 0.3166486394939177e-01, 0.4444741998443960e-01, 0.5904153058501098e+00, 0.5776327498150620e+00, 0.3328891237445398e+00, 0.3402556968467140e+00, 0.1160231536652011e+00, 0.1086565805630269e+00, 0.8787251358464724e+00, 0.9382999256694983e+00, 0.8497373197790092e+00, 0.7901625299559836e+00, 0.5668475593321608e+00, 0.6095853796269167e+00, 0.8445503211335190e+00, 0.8018125008387630e+00, 0.2319542476253924e-01, 0.2443177602187348e-01, 0.2290193891046544e+00, 0.2277830378453201e+00, 0.6931459494493321e+00, 0.6969081780841352e+00, 0.8388628986069724e-01, 0.8012406122589412e-01, 0.9342352322344821e-01, 0.9781022865072954e-01, 0.4682830325399513e-02, 0.2961248981181939e-03, 0.5317160915449039e-05, 0.8408398800007548e-04, 0.7876682708462645e-04, 0.0000000000000000e+00, 0.8939776288934402e-02}, + i0Out: 1, + n0Out: 21, + ppOut: 0, + dminOut: 7.8766827084626452e-005, + desigOut: 0.0000000000000000, + sigmaOut: 0.0000000000000000, + qmaxOut: 1.8900334961849885, + nFailOut: 0, + iterOut: 3, + nDivOut: 62, + ttypeOut: -1, + dmin1Out: 1.2363512593342330e-003, + dmin2Out: 1.2363512593342330e-003, + dnOut: 7.8766827084626452e-005, + dn1Out: 4.3867054272813191e-003, + dn2Out: 3.7622286348031123e-003, + gOut: 0.0000000000000000, + tauOut: 0.0000000000000000, + }, + { + i0: 1, + n0: 21, + z: []float64{0.1648283185136998e+01, 0.1819554618764197e+01, 0.1712714336271993e+00, 0.1422045283129674e+00, 0.1510753432847732e+01, 0.1747828967872021e+01, 0.3792800633372563e+00, 0.2882727267008810e+00, 0.1328441621586708e+01, 0.1179068770857534e+01, 0.1388998759717073e+00, 0.5262264257308433e-01, 0.4466938077796418e+00, 0.1103391769983383e+01, 0.7093206047768255e+00, 0.4399127392187891e+00, 0.6843112870203156e+00, 0.8934568459457148e+00, 0.6490582981441884e+00, 0.3694451726238032e+00, 0.5085572738629487e+00, 0.1488623362931603e+00, 0.9750235054014829e-02, 0.8939776288934402e-02, 0.1364886053450573e+00, 0.1505610868415679e+00, 0.2301225778544498e-01, 0.2515816443301624e+00, 0.1646009972289452e+01, 0.1630679888773521e+01, 0.2362515608142310e+00, 0.8429943526342391e-01, 0.5818602562677768e+00, 0.5155374736992060e+00, 0.1797665269485310e-01, 0.1952851185677853e-01, 0.5600419521166516e+00, 0.7600271971854760e+00, 0.2195137569256029e+00, 0.1208458544696003e+00, 0.4184071984843414e+00, 0.5703477987440417e+00, 0.2727864547293006e+00, 0.3240053608004366e-01, 0.6774373914466536e-01, 0.1404399130020459e+00, 0.1050967099374242e+00, 0.1526865702536626e+00, 0.2040338718098096e+00, 0.1785445001043715e+00, 0.1271971985482246e+00, 0.3166486394939177e-01, 0.4444741998443960e-01, 0.5904153058501098e+00, 0.5776327498150620e+00, 0.3328891237445398e+00, 0.3402556968467140e+00, 0.1160231536652011e+00, 0.1086565805630269e+00, 0.8787251358464724e+00, 0.9382999256694983e+00, 0.8497373197790092e+00, 0.7901625299559836e+00, 0.5668475593321608e+00, 0.6095853796269167e+00, 0.8445503211335190e+00, 0.8018125008387630e+00, 0.2319542476253924e-01, 0.2443177602187348e-01, 0.2290193891046544e+00, 0.2277830378453201e+00, 0.6931459494493321e+00, 0.6969081780841352e+00, 0.8388628986069724e-01, 0.8012406122589412e-01, 0.9342352322344821e-01, 0.9781022865072954e-01, 0.4682830325399513e-02, 0.2961248981181939e-03, 0.5317160915449039e-05, 0.8408398800007548e-04, 0.7876682708462645e-04, 0.0000000000000000e+00, 0.8939776288934402e-02}, + pp: 1, + dmin: 7.8766827084626452e-005, + desig: 0.0000000000000000, + qmax: 1.8900334961849885, + ttype: -1, + dmin1: 1.2363512593342330e-003, + dmin2: 1.2363512593342330e-003, + dn: 7.8766827084626452e-005, + dn1: 4.3867054272813191e-003, + dn2: 3.7622286348031123e-003, + g: 0.0000000000000000, + tau: 0.0000000000000000, + nFail: 0, + iter: 3, + sigma: 0.0000000000000000, + nDiv: 62, + zOut: []float64{0.1961759147077164e+01, 0.1819554618764197e+01, 0.1266970995487882e+00, 0.1422045283129674e+00, 0.1909404595024114e+01, 0.1747828967872021e+01, 0.1780101349021133e+00, 0.2882727267008810e+00, 0.1053681278528505e+01, 0.1179068770857534e+01, 0.5510526941411123e-01, 0.5262264257308433e-01, 0.1488199239788061e+01, 0.1103391769983383e+01, 0.2641064704009213e+00, 0.4399127392187891e+00, 0.9987955481685968e+00, 0.8934568459457148e+00, 0.5506279200968747e-01, 0.3694451726238032e+00, 0.1027393205724072e+00, 0.1488623362931603e+00, 0.1310094739466220e-01, 0.8939776288934402e-02, 0.3890417837770681e+00, 0.1505610868415679e+00, 0.1054511738587064e+01, 0.2515816443301624e+00, 0.6604675854498806e+00, 0.1630679888773521e+01, 0.6580113672099847e-01, 0.8429943526342391e-01, 0.4692648488349861e+00, 0.5155374736992060e+00, 0.3162862116895929e-01, 0.1952851185677853e-01, 0.8492444304861170e+00, 0.7600271971854760e+00, 0.8115939841327705e-01, 0.1208458544696003e+00, 0.5215889364108083e+00, 0.5703477987440417e+00, 0.8723974284448969e-02, 0.3240053608004366e-01, 0.2844025089712595e+00, 0.1404399130020459e+00, 0.9585480612390133e-01, 0.1526865702536626e+00, 0.1143545579298620e+00, 0.1785445001043715e+00, 0.1634864466429828e+00, 0.3166486394939177e-01, 0.7598179829516669e+00, 0.5904153058501098e+00, 0.5083170815153470e-01, 0.3328891237445398e+00, 0.9439165813601388e+00, 0.1160231536652011e+00, 0.7910503496831139e+00, 0.8787251358464724e+00, 0.6255345294280562e+00, 0.8497373197790092e+00, 0.7653155273545736e+00, 0.5668475593321608e+00, 0.1024302185414846e+00, 0.8445503211335190e+00, 0.5186166821452450e-01, 0.2319542476253924e-01, 0.8703036703394620e+00, 0.2290193891046544e+00, 0.6681052144545734e-01, 0.6931459494493321e+00, 0.1104992916386881e+00, 0.8388628986069724e-01, 0.3959179295799719e-02, 0.9342352322344821e-01, 0.7289681905152429e-03, 0.4682830325399513e-02, 0.5745324691222600e-06, 0.5317160915449039e-05, 0.7819229461550419e-04, 0.7876682708462645e-04, 0.8723974284448969e-02, 0.8939776288934402e-02}, + i0Out: 1, + n0Out: 21, + ppOut: 1, + dminOut: 7.8192294615504193e-005, + desigOut: 0.0000000000000000, + sigmaOut: 0.0000000000000000, + qmaxOut: 1.8900334961849885, + nFailOut: 0, + iterOut: 4, + nDivOut: 84, + ttypeOut: -4, + dmin1Out: 7.2365102959979382e-004, + dmin2Out: 1.7075768415239889e-002, + dnOut: 7.8192294615504193e-005, + dn1Out: 7.2365102959979382e-004, + dn2Out: 1.7075768415239889e-002, + gOut: 0.0000000000000000, + tauOut: 0.0000000000000000, + }, + { + i0: 1, + n0: 21, + z: []float64{0.1961759147077164e+01, 0.1819554618764197e+01, 0.1266970995487882e+00, 0.1422045283129674e+00, 0.1909404595024114e+01, 0.1747828967872021e+01, 0.1780101349021133e+00, 0.2882727267008810e+00, 0.1053681278528505e+01, 0.1179068770857534e+01, 0.5510526941411123e-01, 0.5262264257308433e-01, 0.1488199239788061e+01, 0.1103391769983383e+01, 0.2641064704009213e+00, 0.4399127392187891e+00, 0.9987955481685968e+00, 0.8934568459457148e+00, 0.5506279200968747e-01, 0.3694451726238032e+00, 0.1027393205724072e+00, 0.1488623362931603e+00, 0.1310094739466220e-01, 0.8939776288934402e-02, 0.3890417837770681e+00, 0.1505610868415679e+00, 0.1054511738587064e+01, 0.2515816443301624e+00, 0.6604675854498806e+00, 0.1630679888773521e+01, 0.6580113672099847e-01, 0.8429943526342391e-01, 0.4692648488349861e+00, 0.5155374736992060e+00, 0.3162862116895929e-01, 0.1952851185677853e-01, 0.8492444304861170e+00, 0.7600271971854760e+00, 0.8115939841327705e-01, 0.1208458544696003e+00, 0.5215889364108083e+00, 0.5703477987440417e+00, 0.8723974284448969e-02, 0.3240053608004366e-01, 0.2844025089712595e+00, 0.1404399130020459e+00, 0.9585480612390133e-01, 0.1526865702536626e+00, 0.1143545579298620e+00, 0.1785445001043715e+00, 0.1634864466429828e+00, 0.3166486394939177e-01, 0.7598179829516669e+00, 0.5904153058501098e+00, 0.5083170815153470e-01, 0.3328891237445398e+00, 0.9439165813601388e+00, 0.1160231536652011e+00, 0.7910503496831139e+00, 0.8787251358464724e+00, 0.6255345294280562e+00, 0.8497373197790092e+00, 0.7653155273545736e+00, 0.5668475593321608e+00, 0.1024302185414846e+00, 0.8445503211335190e+00, 0.5186166821452450e-01, 0.2319542476253924e-01, 0.8703036703394620e+00, 0.2290193891046544e+00, 0.6681052144545734e-01, 0.6931459494493321e+00, 0.1104992916386881e+00, 0.8388628986069724e-01, 0.3959179295799719e-02, 0.9342352322344821e-01, 0.7289681905152429e-03, 0.4682830325399513e-02, 0.5745324691222600e-06, 0.5317160915449039e-05, 0.7819229461550419e-04, 0.7876682708462645e-04, 0.8723974284448969e-02, 0.8939776288934402e-02}, + pp: 0, + dmin: 7.8192294615504193e-005, + desig: 0.0000000000000000, + qmax: 1.8900334961849885, + ttype: -4, + dmin1: 7.2365102959979382e-004, + dmin2: 1.7075768415239889e-002, + dn: 7.8192294615504193e-005, + dn1: 7.2365102959979382e-004, + dn2: 1.7075768415239889e-002, + g: 0.0000000000000000, + tau: 0.0000000000000000, + nFail: 0, + iter: 4, + sigma: 0.0000000000000000, + nDiv: 84, + zOut: []float64{0.1961759147077164e+01, 0.2088378163269771e+01, 0.1266970995487882e+00, 0.1158391848322702e+00, 0.1909404595024114e+01, 0.1971497461737776e+01, 0.1780101349021133e+00, 0.9513882222772962e-01, 0.1053681278528505e+01, 0.1013569642358705e+01, 0.5510526941411123e-01, 0.8090970429970110e-01, 0.1488199239788061e+01, 0.1671317922533099e+01, 0.2641064704009213e+00, 0.1578325483874163e+00, 0.9987955481685968e+00, 0.8959477084346864e+00, 0.5506279200968747e-01, 0.6314111623521673e-02, 0.1027393205724072e+00, 0.1094480729873660e+00, 0.1310094739466220e-01, 0.4656834793406785e-01, 0.3890417837770681e+00, 0.1396907091073883e+01, 0.1054511738587064e+01, 0.4985806330739840e+00, 0.6604675854498806e+00, 0.2276100057407134e+00, 0.6580113672099847e-01, 0.1356625794022653e+00, 0.4692648488349861e+00, 0.3651528072454984e+00, 0.3162862116895929e-01, 0.7355942454424362e-01, 0.8492444304861170e+00, 0.8567663209989688e+00, 0.8115939841327705e-01, 0.4940885660487250e-01, 0.5215889364108083e+00, 0.4808259707342031e+00, 0.8723974284448969e-02, 0.5160120970399038e-02, 0.2844025089712595e+00, 0.3750191107685802e+00, 0.9585480612390133e-01, 0.2922900104287094e-01, 0.1143545579298620e+00, 0.2485339201737921e+00, 0.1634864466429828e+00, 0.4998108187459615e+00, 0.7598179829516669e+00, 0.3107607890010585e+00, 0.5083170815153470e-01, 0.1543981540828483e+00, 0.9439165813601388e+00, 0.1580490693604223e+01, 0.7910503496831139e+00, 0.3130858727896049e+00, 0.6255345294280562e+00, 0.1077686100636843e+01, 0.7653155273545736e+00, 0.7274051012980143e-01, 0.1024302185414846e+00, 0.8147329327002600e-01, 0.5186166821452450e-01, 0.5539901283655780e+00, 0.8703036703394620e+00, 0.3830459800631597e+00, 0.6681052144545734e-01, 0.1927318305890360e-01, 0.1104992916386881e+00, 0.9510720451940254e-01, 0.3959179295799719e-02, 0.3034592154998885e-04, 0.7289681905152429e-03, 0.6211134452527084e-03, 0.5745324691222600e-06, 0.7232819130731082e-07, 0.7819229461550419e-04, 0.3661024252896476e-07, 0.8723974284448969e-02, 0.5160120970399038e-02}, + i0Out: 1, + n0Out: 21, + ppOut: 0, + dminOut: 3.6610242528964756e-008, + desigOut: 0.0000000000000000, + sigmaOut: 7.8083356181667918e-005, + qmaxOut: 1.8900334961849885, + nFailOut: 0, + iterOut: 5, + nDivOut: 106, + ttypeOut: -2, + dmin1Out: 6.2053891278358614e-004, + dmin2Out: 2.9611625055501498e-002, + dnOut: 3.6610242528964756e-008, + dn1Out: 6.2053891278358614e-004, + dn2Out: 9.1148025223602810e-002, + gOut: 0.0000000000000000, + tauOut: 7.8083356181667918e-005, + }, + { + i0: 1, + n0: 21, + z: []float64{0.1961759147077164e+01, 0.2088378163269771e+01, 0.1266970995487882e+00, 0.1158391848322702e+00, 0.1909404595024114e+01, 0.1971497461737776e+01, 0.1780101349021133e+00, 0.9513882222772962e-01, 0.1053681278528505e+01, 0.1013569642358705e+01, 0.5510526941411123e-01, 0.8090970429970110e-01, 0.1488199239788061e+01, 0.1671317922533099e+01, 0.2641064704009213e+00, 0.1578325483874163e+00, 0.9987955481685968e+00, 0.8959477084346864e+00, 0.5506279200968747e-01, 0.6314111623521673e-02, 0.1027393205724072e+00, 0.1094480729873660e+00, 0.1310094739466220e-01, 0.4656834793406785e-01, 0.3890417837770681e+00, 0.1396907091073883e+01, 0.1054511738587064e+01, 0.4985806330739840e+00, 0.6604675854498806e+00, 0.2276100057407134e+00, 0.6580113672099847e-01, 0.1356625794022653e+00, 0.4692648488349861e+00, 0.3651528072454984e+00, 0.3162862116895929e-01, 0.7355942454424362e-01, 0.8492444304861170e+00, 0.8567663209989688e+00, 0.8115939841327705e-01, 0.4940885660487250e-01, 0.5215889364108083e+00, 0.4808259707342031e+00, 0.8723974284448969e-02, 0.5160120970399038e-02, 0.2844025089712595e+00, 0.3750191107685802e+00, 0.9585480612390133e-01, 0.2922900104287094e-01, 0.1143545579298620e+00, 0.2485339201737921e+00, 0.1634864466429828e+00, 0.4998108187459615e+00, 0.7598179829516669e+00, 0.3107607890010585e+00, 0.5083170815153470e-01, 0.1543981540828483e+00, 0.9439165813601388e+00, 0.1580490693604223e+01, 0.7910503496831139e+00, 0.3130858727896049e+00, 0.6255345294280562e+00, 0.1077686100636843e+01, 0.7653155273545736e+00, 0.7274051012980143e-01, 0.1024302185414846e+00, 0.8147329327002600e-01, 0.5186166821452450e-01, 0.5539901283655780e+00, 0.8703036703394620e+00, 0.3830459800631597e+00, 0.6681052144545734e-01, 0.1927318305890360e-01, 0.1104992916386881e+00, 0.9510720451940254e-01, 0.3959179295799719e-02, 0.3034592154998885e-04, 0.7289681905152429e-03, 0.6211134452527084e-03, 0.5745324691222600e-06, 0.7232819130731082e-07, 0.7819229461550419e-04, 0.3661024252896476e-07, 0.8723974284448969e-02, 0.5160120970399038e-02}, + pp: 1, + dmin: 3.6610242528964756e-008, + desig: 0.0000000000000000, + qmax: 1.8900334961849885, + ttype: -2, + dmin1: 6.2053891278358614e-004, + dmin2: 2.9611625055501498e-002, + dn: 3.6610242528964756e-008, + dn1: 6.2053891278358614e-004, + dn2: 9.1148025223602810e-002, + g: 0.0000000000000000, + tau: 7.8083356181667918e-005, + nFail: 0, + iter: 5, + sigma: 7.8083356181667918e-005, + nDiv: 106, + zOut: []float64{0.2204217311496068e+01, 0.2088378163269771e+01, 0.1036089580076783e+00, 0.1158391848322702e+00, 0.1963027289351853e+01, 0.1971497461737776e+01, 0.4912301654839819e-01, 0.9513882222772962e-01, 0.1045356293504034e+01, 0.1013569642358705e+01, 0.1293586117415210e+00, 0.8090970429970110e-01, 0.1699791822573021e+01, 0.1671317922533099e+01, 0.8319236989271818e-01, 0.1578325483874163e+00, 0.8190694135595162e+00, 0.8959477084346864e+00, 0.8437225690290790e-03, 0.6314111623521673e-02, 0.1551726617464312e+00, 0.1094480729873660e+00, 0.4192211096758560e+00, 0.4656834793406785e-01, 0.1476266577866037e+01, 0.1396907091073883e+01, 0.7687090018675194e-01, 0.4985806330739840e+00, 0.2864016483502532e+00, 0.2276100057407134e+00, 0.1729653861709652e+00, 0.1356625794022653e+00, 0.2657468090128033e+00, 0.3651528072454984e+00, 0.2371551996266359e+00, 0.7355942454424362e-01, 0.6690199413712319e+00, 0.8567663209989688e+00, 0.3551024412099299e-01, 0.4940885660487250e-01, 0.4504758109776356e+00, 0.4808259707342031e+00, 0.4295777776785939e-02, 0.5160120970399038e-02, 0.3999522974286917e+00, 0.3750191107685802e+00, 0.1816316160365039e-01, 0.2922900104287094e-01, 0.7301815407101296e+00, 0.2485339201737921e+00, 0.2127164214993764e+00, 0.4998108187459615e+00, 0.2524424849785568e+00, 0.3107607890010585e+00, 0.9666552191416621e+00, 0.1543981540828483e+00, 0.9269213106461920e+00, 0.1580490693604223e+01, 0.3640096408786760e+00, 0.3130858727896049e+00, 0.7864169332819952e+00, 0.1077686100636843e+01, 0.7535963003344312e-02, 0.7274051012980143e-01, 0.6279274220262862e+00, 0.8147329327002600e-01, 0.3379430237022286e+00, 0.5539901283655780e+00, 0.6437610281386123e-01, 0.3830459800631597e+00, 0.2847358698029713e-01, 0.1927318305890360e-01, 0.6666392685468185e-01, 0.9510720451940254e-01, 0.2827355178816361e-06, 0.3034592154998885e-04, 0.6208664319525956e-03, 0.6211134452527084e-03, 0.4264931214133004e-11, 0.7232819130731082e-07, 0.4059351646211308e-14, 0.3661024252896476e-07, 0.8437225690290790e-03, 0.5160120970399038e-02}, + i0Out: 1, + n0Out: 21, + ppOut: 1, + dminOut: 4.0593516462113082e-015, + desigOut: 4.3344264097778650e-021, + sigmaOut: 7.8119962155206313e-005, + qmaxOut: 1.8900334961849885, + nFailOut: 0, + iterOut: 6, + nDivOut: 128, + ttypeOut: -2, + dmin1Out: 6.2079410376128833e-004, + dmin2Out: 4.5102919754957636e-002, + dnOut: 4.0593516462113082e-015, + dn1Out: 6.2079410376128833e-004, + dn2Out: 6.6633580933131861e-002, + gOut: 0.0000000000000000, + tauOut: 3.6605973538398975e-008, + }, + { + i0: 1, + n0: 21, + z: []float64{0.2204217311496068e+01, 0.2088378163269771e+01, 0.1036089580076783e+00, 0.1158391848322702e+00, 0.1963027289351853e+01, 0.1971497461737776e+01, 0.4912301654839819e-01, 0.9513882222772962e-01, 0.1045356293504034e+01, 0.1013569642358705e+01, 0.1293586117415210e+00, 0.8090970429970110e-01, 0.1699791822573021e+01, 0.1671317922533099e+01, 0.8319236989271818e-01, 0.1578325483874163e+00, 0.8190694135595162e+00, 0.8959477084346864e+00, 0.8437225690290790e-03, 0.6314111623521673e-02, 0.1551726617464312e+00, 0.1094480729873660e+00, 0.4192211096758560e+00, 0.4656834793406785e-01, 0.1476266577866037e+01, 0.1396907091073883e+01, 0.7687090018675194e-01, 0.4985806330739840e+00, 0.2864016483502532e+00, 0.2276100057407134e+00, 0.1729653861709652e+00, 0.1356625794022653e+00, 0.2657468090128033e+00, 0.3651528072454984e+00, 0.2371551996266359e+00, 0.7355942454424362e-01, 0.6690199413712319e+00, 0.8567663209989688e+00, 0.3551024412099299e-01, 0.4940885660487250e-01, 0.4504758109776356e+00, 0.4808259707342031e+00, 0.4295777776785939e-02, 0.5160120970399038e-02, 0.3999522974286917e+00, 0.3750191107685802e+00, 0.1816316160365039e-01, 0.2922900104287094e-01, 0.7301815407101296e+00, 0.2485339201737921e+00, 0.2127164214993764e+00, 0.4998108187459615e+00, 0.2524424849785568e+00, 0.3107607890010585e+00, 0.9666552191416621e+00, 0.1543981540828483e+00, 0.9269213106461920e+00, 0.1580490693604223e+01, 0.3640096408786760e+00, 0.3130858727896049e+00, 0.7864169332819952e+00, 0.1077686100636843e+01, 0.7535963003344312e-02, 0.7274051012980143e-01, 0.6279274220262862e+00, 0.8147329327002600e-01, 0.3379430237022286e+00, 0.5539901283655780e+00, 0.6437610281386123e-01, 0.3830459800631597e+00, 0.2847358698029713e-01, 0.1927318305890360e-01, 0.6666392685468185e-01, 0.9510720451940254e-01, 0.2827355178816361e-06, 0.3034592154998885e-04, 0.6208664319525956e-03, 0.6211134452527084e-03, 0.4264931214133004e-11, 0.7232819130731082e-07, 0.4059351646211308e-14, 0.3661024252896476e-07, 0.8437225690290790e-03, 0.5160120970399038e-02}, + pp: 0, + dmin: 4.0593516462113082e-015, + desig: 4.3344264097778650e-021, + qmax: 1.8900334961849885, + ttype: -2, + dmin1: 6.2079410376128833e-004, + dmin2: 4.5102919754957636e-002, + dn: 4.0593516462113082e-015, + dn1: 6.2079410376128833e-004, + dn2: 6.6633580933131861e-002, + g: 0.0000000000000000, + tau: 3.6605973538398975e-008, + nFail: 0, + iter: 6, + sigma: 7.8119962155206313e-005, + nDiv: 128, + zOut: []float64{0.2204217311496068e+01, 0.2307826269503742e+01, 0.1036089580076783e+00, 0.8812934261040264e-01, 0.1963027289351853e+01, 0.1924020963289845e+01, 0.4912301654839819e-01, 0.2668944646890268e-01, 0.1045356293504034e+01, 0.1148025458776649e+01, 0.1293586117415210e+00, 0.1915312145184878e+00, 0.1699791822573021e+01, 0.1591452977947247e+01, 0.8319236989271818e-01, 0.4281642408847453e-01, 0.8190694135595162e+00, 0.7770967120400667e+00, 0.8437225690290790e-03, 0.1684766835109566e-03, 0.1551726617464312e+00, 0.5742252947387723e+00, 0.4192211096758560e+00, 0.1077768810640642e+01, 0.1476266577866037e+01, 0.4753686674121431e+00, 0.7687090018675194e-01, 0.4631342794110950e-01, 0.2864016483502532e+00, 0.4130536065801048e+00, 0.1729653861709652e+00, 0.1112809541240190e+00, 0.2657468090128033e+00, 0.3916210545154161e+00, 0.2371551996266359e+00, 0.4051405202062471e+00, 0.6690199413712319e+00, 0.2993896652859737e+00, 0.3551024412099299e-01, 0.5343038813025977e-01, 0.4504758109776356e+00, 0.4013412006241577e+00, 0.4295777776785939e-02, 0.4280911574482486e-02, 0.3999522974286917e+00, 0.4138345474578556e+00, 0.1816316160365039e-01, 0.3204760309498118e-01, 0.7301815407101296e+00, 0.9108503591145208e+00, 0.2127164214993764e+00, 0.5895442813598011e-01, 0.2524424849785568e+00, 0.1160143275984235e+01, 0.9666552191416621e+00, 0.7723298847804961e+00, 0.9269213106461920e+00, 0.5186010667443678e+00, 0.3640096408786760e+00, 0.5519914319921668e+00, 0.7864169332819952e+00, 0.2419614642931688e+00, 0.7535963003344312e-02, 0.1955698951896722e-01, 0.6279274220262862e+00, 0.9463134562095434e+00, 0.3379430237022286e+00, 0.2298969194226957e-01, 0.6437610281386123e-01, 0.6985999785188474e-01, 0.2847358698029713e-01, 0.2717093011896993e-01, 0.6666392685468185e-01, 0.3949327947122574e-01, 0.2827355178816361e-06, 0.4444831994804014e-08, 0.6208664319525956e-03, 0.6208619913814727e-03, 0.4264931214133004e-11, 0.2788519153273688e-22, 0.4059351646211308e-14, 0.3786532345060857e-28, 0.8437225690290790e-03, 0.1684766835109566e-03}, + i0Out: 1, + n0Out: 21, + ppOut: 0, + dminOut: 3.7865323450608567e-029, + desigOut: -5.4012504887129591e-021, + sigmaOut: 7.8119962159265674e-005, + qmaxOut: 1.8900334961849885, + nFailOut: 0, + iterOut: 7, + nDivOut: 150, + ttypeOut: -2, + dmin1Out: 6.2086198711654151e-004, + dmin2Out: 3.9492996735707858e-002, + dnOut: 3.7865323450608567e-029, + dn1Out: 6.2086198711654151e-004, + dn2Out: 3.9492996735707858e-002, + gOut: 0.0000000000000000, + tauOut: 4.0593516183260787e-015, + }, + { + i0: 1, + n0: 21, + z: []float64{0.2204217311496068e+01, 0.2307826269503742e+01, 0.1036089580076783e+00, 0.8812934261040264e-01, 0.1963027289351853e+01, 0.1924020963289845e+01, 0.4912301654839819e-01, 0.2668944646890268e-01, 0.1045356293504034e+01, 0.1148025458776649e+01, 0.1293586117415210e+00, 0.1915312145184878e+00, 0.1699791822573021e+01, 0.1591452977947247e+01, 0.8319236989271818e-01, 0.4281642408847453e-01, 0.8190694135595162e+00, 0.7770967120400667e+00, 0.8437225690290790e-03, 0.1684766835109566e-03, 0.1551726617464312e+00, 0.5742252947387723e+00, 0.4192211096758560e+00, 0.1077768810640642e+01, 0.1476266577866037e+01, 0.4753686674121431e+00, 0.7687090018675194e-01, 0.4631342794110950e-01, 0.2864016483502532e+00, 0.4130536065801048e+00, 0.1729653861709652e+00, 0.1112809541240190e+00, 0.2657468090128033e+00, 0.3916210545154161e+00, 0.2371551996266359e+00, 0.4051405202062471e+00, 0.6690199413712319e+00, 0.2993896652859737e+00, 0.3551024412099299e-01, 0.5343038813025977e-01, 0.4504758109776356e+00, 0.4013412006241577e+00, 0.4295777776785939e-02, 0.4280911574482486e-02, 0.3999522974286917e+00, 0.4138345474578556e+00, 0.1816316160365039e-01, 0.3204760309498118e-01, 0.7301815407101296e+00, 0.9108503591145208e+00, 0.2127164214993764e+00, 0.5895442813598011e-01, 0.2524424849785568e+00, 0.1160143275984235e+01, 0.9666552191416621e+00, 0.7723298847804961e+00, 0.9269213106461920e+00, 0.5186010667443678e+00, 0.3640096408786760e+00, 0.5519914319921668e+00, 0.7864169332819952e+00, 0.2419614642931688e+00, 0.7535963003344312e-02, 0.1955698951896722e-01, 0.6279274220262862e+00, 0.9463134562095434e+00, 0.3379430237022286e+00, 0.2298969194226957e-01, 0.6437610281386123e-01, 0.6985999785188474e-01, 0.2847358698029713e-01, 0.2717093011896993e-01, 0.6666392685468185e-01, 0.3949327947122574e-01, 0.2827355178816361e-06, 0.4444831994804014e-08, 0.6208664319525956e-03, 0.6208619913814727e-03, 0.4264931214133004e-11, 0.2788519153273688e-22, 0.4059351646211308e-14, 0.3786532345060857e-28, 0.8437225690290790e-03, 0.1684766835109566e-03}, + pp: 1, + dmin: 3.7865323450608567e-029, + desig: -5.4012504887129591e-021, + qmax: 1.8900334961849885, + ttype: -2, + dmin1: 6.2086198711654151e-004, + dmin2: 3.9492996735707858e-002, + dn: 3.7865323450608567e-029, + dn1: 6.2086198711654151e-004, + dn2: 3.9492996735707858e-002, + g: 0.0000000000000000, + tau: 4.0593516183260787e-015, + nFail: 0, + iter: 7, + sigma: 7.8119962159265674e-005, + nDiv: 150, + zOut: []float64{0.2395955612114145e+01, 0.2307826269503742e+01, 0.7077038564739886e-01, 0.8812934261040264e-01, 0.1879940024111348e+01, 0.1924020963289845e+01, 0.1629847954401656e-01, 0.2668944646890268e-01, 0.1323258193751120e+01, 0.1148025458776649e+01, 0.2303502998543534e+00, 0.1915312145184878e+00, 0.1403919102181368e+01, 0.1591452977947247e+01, 0.2369972908607684e-01, 0.4281642408847453e-01, 0.7535654596375009e+00, 0.7770967120400667e+00, 0.1283811140869274e-03, 0.1684766835109566e-03, 0.1651865724265327e+01, 0.5742252947387723e+00, 0.3101568824672333e+00, 0.1077768810640642e+01, 0.2115252128860193e+00, 0.4753686674121431e+00, 0.9043805314343908e-01, 0.4631342794110950e-01, 0.4338965075606848e+00, 0.4130536065801048e+00, 0.1004386157577793e+00, 0.1112809541240190e+00, 0.6963229589638840e+00, 0.3916210545154161e+00, 0.1741934301847783e+00, 0.4051405202062471e+00, 0.1786266232314551e+00, 0.2993896652859737e+00, 0.1200482645536405e+00, 0.5343038813025977e-01, 0.2855738476449996e+00, 0.4013412006241577e+00, 0.6203611145567293e-02, 0.4280911574482486e-02, 0.4396785394072695e+00, 0.4138345474578556e+00, 0.6639071087521134e-01, 0.3204760309498118e-01, 0.9034140763752896e+00, 0.9108503591145208e+00, 0.7570790092830114e-01, 0.5895442813598011e-01, 0.1856765259836430e+01, 0.1160143275984235e+01, 0.2157144528657351e+00, 0.7723298847804961e+00, 0.8548780458707995e+00, 0.5186010667443678e+00, 0.1562335771835849e+00, 0.5519914319921668e+00, 0.1052848766285511e+00, 0.2419614642931688e+00, 0.1757806338135458e+00, 0.1955698951896722e-01, 0.7935225143382671e+00, 0.9463134562095434e+00, 0.2023962522401480e-02, 0.2298969194226957e-01, 0.9500696544845319e-01, 0.6985999785188474e-01, 0.1129463646814219e-01, 0.2717093011896993e-01, 0.2819864744791555e-01, 0.3949327947122574e-01, 0.9786381594178541e-10, 0.4444831994804014e-08, 0.6208618935176568e-03, 0.6208619913814727e-03, 0.1700670967075909e-47, 0.2788519153273688e-22, 0.3786532345060857e-28, 0.3786532345060857e-28, 0.1283811140869274e-03, 0.1684766835109566e-03}, + i0Out: 1, + n0Out: 21, + ppOut: 1, + dminOut: 3.7865323450608567e-029, + desigOut: -5.4012504887129591e-021, + sigmaOut: 7.8119962159265674e-005, + qmaxOut: 1.8900334961849885, + nFailOut: 0, + iterOut: 8, + nDivOut: 172, + ttypeOut: -2, + dmin1Out: 6.2086189351765679e-004, + dmin2Out: 2.8198643003083550e-002, + dnOut: 3.7865323450608567e-029, + dn1Out: 6.2086189351765679e-004, + dn2Out: 2.8198643003083550e-002, + gOut: 0.0000000000000000, + tauOut: 0.0000000000000000, + }, + { + i0: 1, + n0: 21, + z: []float64{0.2395955612114145e+01, 0.2307826269503742e+01, 0.7077038564739886e-01, 0.8812934261040264e-01, 0.1879940024111348e+01, 0.1924020963289845e+01, 0.1629847954401656e-01, 0.2668944646890268e-01, 0.1323258193751120e+01, 0.1148025458776649e+01, 0.2303502998543534e+00, 0.1915312145184878e+00, 0.1403919102181368e+01, 0.1591452977947247e+01, 0.2369972908607684e-01, 0.4281642408847453e-01, 0.7535654596375009e+00, 0.7770967120400667e+00, 0.1283811140869274e-03, 0.1684766835109566e-03, 0.1651865724265327e+01, 0.5742252947387723e+00, 0.3101568824672333e+00, 0.1077768810640642e+01, 0.2115252128860193e+00, 0.4753686674121431e+00, 0.9043805314343908e-01, 0.4631342794110950e-01, 0.4338965075606848e+00, 0.4130536065801048e+00, 0.1004386157577793e+00, 0.1112809541240190e+00, 0.6963229589638840e+00, 0.3916210545154161e+00, 0.1741934301847783e+00, 0.4051405202062471e+00, 0.1786266232314551e+00, 0.2993896652859737e+00, 0.1200482645536405e+00, 0.5343038813025977e-01, 0.2855738476449996e+00, 0.4013412006241577e+00, 0.6203611145567293e-02, 0.4280911574482486e-02, 0.4396785394072695e+00, 0.4138345474578556e+00, 0.6639071087521134e-01, 0.3204760309498118e-01, 0.9034140763752896e+00, 0.9108503591145208e+00, 0.7570790092830114e-01, 0.5895442813598011e-01, 0.1856765259836430e+01, 0.1160143275984235e+01, 0.2157144528657351e+00, 0.7723298847804961e+00, 0.8548780458707995e+00, 0.5186010667443678e+00, 0.1562335771835849e+00, 0.5519914319921668e+00, 0.1052848766285511e+00, 0.2419614642931688e+00, 0.1757806338135458e+00, 0.1955698951896722e-01, 0.7935225143382671e+00, 0.9463134562095434e+00, 0.2023962522401480e-02, 0.2298969194226957e-01, 0.9500696544845319e-01, 0.6985999785188474e-01, 0.1129463646814219e-01, 0.2717093011896993e-01, 0.2819864744791555e-01, 0.3949327947122574e-01, 0.9786381594178541e-10, 0.4444831994804014e-08, 0.6208618935176568e-03, 0.6208619913814727e-03, 0.1700670967075909e-47, 0.2788519153273688e-22, 0.3786532345060857e-28, 0.3786532345060857e-28, 0.1283811140869274e-03, 0.1684766835109566e-03}, + pp: 0, + dmin: 3.7865323450608567e-029, + desig: -5.4012504887129591e-021, + qmax: 1.8900334961849885, + ttype: -2, + dmin1: 6.2086189351765679e-004, + dmin2: 2.8198643003083550e-002, + dn: 3.7865323450608567e-029, + dn1: 6.2086189351765679e-004, + dn2: 2.8198643003083550e-002, + g: 0.0000000000000000, + tau: 0.0000000000000000, + nFail: 0, + iter: 8, + sigma: 7.8119962159265674e-005, + nDiv: 172, + zOut: []float64{0.2395955612114145e+01, 0.2466725997761544e+01, 0.7077038564739886e-01, 0.5393549207373363e-01, 0.1879940024111348e+01, 0.1842303011581631e+01, 0.1629847954401656e-01, 0.1170659574821484e-01, 0.1323258193751120e+01, 0.1541901897857258e+01, 0.2303502998543534e+00, 0.2097365510789915e+00, 0.1403919102181368e+01, 0.1217882280188454e+01, 0.2369972908607684e-01, 0.1466422291592108e-01, 0.7535654596375009e+00, 0.7390296178356669e+00, 0.1283811140869274e-03, 0.2869551596920546e-03, 0.1651865724265327e+01, 0.1961735651572868e+01, 0.3101568824672333e+00, 0.3344283443049241e-01, 0.2115252128860193e+00, 0.2685204315989660e+00, 0.9043805314343908e-01, 0.1461369444993732e+00, 0.4338965075606848e+00, 0.3881981788190908e+00, 0.1004386157577793e+00, 0.1801598202532681e+00, 0.6963229589638840e+00, 0.6903565688953941e+00, 0.1741934301847783e+00, 0.4507175802324546e-01, 0.1786266232314551e+00, 0.2536031297618502e+00, 0.1200482645536405e+00, 0.1351822623162481e+00, 0.2855738476449996e+00, 0.1565951964743188e+00, 0.6203611145567293e-02, 0.1741812487831326e-01, 0.4396785394072695e+00, 0.4886511254041676e+00, 0.6639071087521134e-01, 0.1227425859208231e+00, 0.9034140763752896e+00, 0.8563793913827676e+00, 0.7570790092830114e-01, 0.1641466407918003e+00, 0.1856765259836430e+01, 0.1908333071910365e+01, 0.2157144528657351e+00, 0.9663383852973971e-01, 0.8548780458707995e+00, 0.9144777845246447e+00, 0.1562335771835849e+00, 0.1798735100772441e-01, 0.1052848766285511e+00, 0.2630781594343725e+00, 0.1757806338135458e+00, 0.5302070335887964e+00, 0.7935225143382671e+00, 0.2653394432718723e+00, 0.2023962522401480e-02, 0.7246963929058098e-03, 0.9500696544845319e-01, 0.1055769055236896e+00, 0.1129463646814219e-01, 0.3016696409481782e-02, 0.2819864744791555e-01, 0.2518195113629758e-01, 0.9786381594178541e-10, 0.2412835834031154e-11, 0.6208618935176568e-03, 0.6208618911048210e-03, 0.1700670967075909e-47, 0.2869551596920546e-03, 0.7811996215926567e-04, 0.3786532345060857e-28, 0.1283811140869274e-03, 0.1684766835109566e-03}, + i0Out: 1, + n0Out: 20, + ppOut: 0, + dminOut: 6.2086189110482101e-004, + desigOut: -5.4012504887129591e-021, + sigmaOut: 7.8119962159265674e-005, + qmaxOut: 1.8900334961849885, + nFailOut: 0, + iterOut: 9, + nDivOut: 193, + ttypeOut: -7, + dmin1Out: 2.5181951038433764e-002, + dmin2Out: 8.7297525620826724e-002, + dnOut: 6.2086189110482101e-004, + dn1Out: 2.5181951038433764e-002, + dn2Out: 9.4282269055547374e-002, + gOut: 0.0000000000000000, + tauOut: 0.0000000000000000, + }, + { + i0: 1, + n0: 20, + z: []float64{0.2395955612114145e+01, 0.2466725997761544e+01, 0.7077038564739886e-01, 0.5393549207373363e-01, 0.1879940024111348e+01, 0.1842303011581631e+01, 0.1629847954401656e-01, 0.1170659574821484e-01, 0.1323258193751120e+01, 0.1541901897857258e+01, 0.2303502998543534e+00, 0.2097365510789915e+00, 0.1403919102181368e+01, 0.1217882280188454e+01, 0.2369972908607684e-01, 0.1466422291592108e-01, 0.7535654596375009e+00, 0.7390296178356669e+00, 0.1283811140869274e-03, 0.2869551596920546e-03, 0.1651865724265327e+01, 0.1961735651572868e+01, 0.3101568824672333e+00, 0.3344283443049241e-01, 0.2115252128860193e+00, 0.2685204315989660e+00, 0.9043805314343908e-01, 0.1461369444993732e+00, 0.4338965075606848e+00, 0.3881981788190908e+00, 0.1004386157577793e+00, 0.1801598202532681e+00, 0.6963229589638840e+00, 0.6903565688953941e+00, 0.1741934301847783e+00, 0.4507175802324546e-01, 0.1786266232314551e+00, 0.2536031297618502e+00, 0.1200482645536405e+00, 0.1351822623162481e+00, 0.2855738476449996e+00, 0.1565951964743188e+00, 0.6203611145567293e-02, 0.1741812487831326e-01, 0.4396785394072695e+00, 0.4886511254041676e+00, 0.6639071087521134e-01, 0.1227425859208231e+00, 0.9034140763752896e+00, 0.8563793913827676e+00, 0.7570790092830114e-01, 0.1641466407918003e+00, 0.1856765259836430e+01, 0.1908333071910365e+01, 0.2157144528657351e+00, 0.9663383852973971e-01, 0.8548780458707995e+00, 0.9144777845246447e+00, 0.1562335771835849e+00, 0.1798735100772441e-01, 0.1052848766285511e+00, 0.2630781594343725e+00, 0.1757806338135458e+00, 0.5302070335887964e+00, 0.7935225143382671e+00, 0.2653394432718723e+00, 0.2023962522401480e-02, 0.7246963929058098e-03, 0.9500696544845319e-01, 0.1055769055236896e+00, 0.1129463646814219e-01, 0.3016696409481782e-02, 0.2819864744791555e-01, 0.2518195113629758e-01, 0.9786381594178541e-10, 0.2412835834031154e-11, 0.6208618935176568e-03, 0.6208618911048210e-03, 0.1700670967075909e-47, 0.2869551596920546e-03, 0.7811996215926567e-04, 0.3786532345060857e-28, 0.1283811140869274e-03, 0.1684766835109566e-03}, + pp: 1, + dmin: 6.2086189110482101e-004, + desig: -5.4012504887129591e-021, + qmax: 1.8900334961849885, + ttype: -7, + dmin1: 2.5181951038433764e-002, + dmin2: 8.7297525620826724e-002, + dn: 6.2086189110482101e-004, + dn1: 2.5181951038433764e-002, + dn2: 9.4282269055547374e-002, + g: 0.0000000000000000, + tau: 0.0000000000000000, + nFail: 0, + iter: 9, + sigma: 7.8119962159265674e-005, + nDiv: 193, + zOut: []float64{0.2520040627944239e+01, 0.2466725997761544e+01, 0.3943012599746679e-01, 0.5393549207373363e-01, 0.1813958619441340e+01, 0.1842303011581631e+01, 0.9950845630193767e-02, 0.1170659574821484e-01, 0.1741066741415017e+01, 0.1541901897857258e+01, 0.1467114516582776e+00, 0.2097365510789915e+00, 0.1085214189555058e+01, 0.1217882280188454e+01, 0.9986318978978259e-02, 0.1466422291592108e-01, 0.7287093921253419e+00, 0.7390296178356669e+00, 0.7725029665513934e-03, 0.2869551596920546e-03, 0.1993785121145770e+01, 0.1961735651572868e+01, 0.4504038193447841e-02, 0.3344283443049241e-01, 0.4095324760138526e+00, 0.2685204315989660e+00, 0.1385240464077977e+00, 0.1461369444993732e+00, 0.4292130907735224e+00, 0.3881981788190908e+00, 0.2897733504323247e+00, 0.1801598202532681e+00, 0.4450341145952761e+00, 0.6903565688953941e+00, 0.2568418582687495e-01, 0.4507175802324546e-01, 0.3624803443601846e+00, 0.2536031297618502e+00, 0.5840011260368079e-01, 0.1351822623162481e+00, 0.1149923468579125e+00, 0.1565951964743188e+00, 0.7401698075381481e-01, 0.1741812487831326e-01, 0.5367558686801371e+00, 0.4886511254041676e+00, 0.1958324578473516e+00, 0.1227425859208231e+00, 0.8240727124361776e+00, 0.8563793913827676e+00, 0.3801199318200257e+00, 0.1641466407918003e+00, 0.1624226116729040e+01, 0.1908333071910365e+01, 0.5440714051978934e-01, 0.9663383852973971e-01, 0.8774371331215411e+00, 0.9144777845246447e+00, 0.5393069221241477e-02, 0.1798735100772441e-01, 0.7872712619108886e+00, 0.2630781594343725e+00, 0.1786993199393658e+00, 0.5302070335887964e+00, 0.8674395783437358e-01, 0.2653394432718723e+00, 0.8820349511059155e-03, 0.7246963929058098e-03, 0.1070907050910267e+00, 0.1055769055236896e+00, 0.7093640994523618e-03, 0.3016696409481782e-02, 0.2385172514821930e-01, 0.2518195113629758e-01, 0.6280626703238275e-13, 0.2412835834031154e-11, 0.3257377007015450e-14, 0.6208618911048210e-03, 0.7725029665513934e-03, 0.2869551596920546e-03, 0.7811996215926567e-04, 0.3786532345060857e-28, 0.1283811140869274e-03, 0.1684766835109566e-03}, + i0Out: 1, + n0Out: 20, + ppOut: 1, + dminOut: 3.2573770070154495e-015, + desigOut: 2.1703803823424652e-020, + sigmaOut: 6.9898185319802297e-004, + qmaxOut: 1.8900334961849885, + nFailOut: 0, + iterOut: 10, + nDivOut: 214, + ttypeOut: -2, + dmin1Out: 2.3851725145806461e-002, + dmin2Out: 8.6019261441467765e-002, + dnOut: 3.2573770070154495e-015, + dn1Out: 2.3851725145806461e-002, + dn2Out: 0.10407400868154487, + gOut: 0.0000000000000000, + tauOut: 6.2086189103875732e-004, + }, + { + i0: 1, + n0: 20, + z: []float64{0.2520040627944239e+01, 0.2466725997761544e+01, 0.3943012599746679e-01, 0.5393549207373363e-01, 0.1813958619441340e+01, 0.1842303011581631e+01, 0.9950845630193767e-02, 0.1170659574821484e-01, 0.1741066741415017e+01, 0.1541901897857258e+01, 0.1467114516582776e+00, 0.2097365510789915e+00, 0.1085214189555058e+01, 0.1217882280188454e+01, 0.9986318978978259e-02, 0.1466422291592108e-01, 0.7287093921253419e+00, 0.7390296178356669e+00, 0.7725029665513934e-03, 0.2869551596920546e-03, 0.1993785121145770e+01, 0.1961735651572868e+01, 0.4504038193447841e-02, 0.3344283443049241e-01, 0.4095324760138526e+00, 0.2685204315989660e+00, 0.1385240464077977e+00, 0.1461369444993732e+00, 0.4292130907735224e+00, 0.3881981788190908e+00, 0.2897733504323247e+00, 0.1801598202532681e+00, 0.4450341145952761e+00, 0.6903565688953941e+00, 0.2568418582687495e-01, 0.4507175802324546e-01, 0.3624803443601846e+00, 0.2536031297618502e+00, 0.5840011260368079e-01, 0.1351822623162481e+00, 0.1149923468579125e+00, 0.1565951964743188e+00, 0.7401698075381481e-01, 0.1741812487831326e-01, 0.5367558686801371e+00, 0.4886511254041676e+00, 0.1958324578473516e+00, 0.1227425859208231e+00, 0.8240727124361776e+00, 0.8563793913827676e+00, 0.3801199318200257e+00, 0.1641466407918003e+00, 0.1624226116729040e+01, 0.1908333071910365e+01, 0.5440714051978934e-01, 0.9663383852973971e-01, 0.8774371331215411e+00, 0.9144777845246447e+00, 0.5393069221241477e-02, 0.1798735100772441e-01, 0.7872712619108886e+00, 0.2630781594343725e+00, 0.1786993199393658e+00, 0.5302070335887964e+00, 0.8674395783437358e-01, 0.2653394432718723e+00, 0.8820349511059155e-03, 0.7246963929058098e-03, 0.1070907050910267e+00, 0.1055769055236896e+00, 0.7093640994523618e-03, 0.3016696409481782e-02, 0.2385172514821930e-01, 0.2518195113629758e-01, 0.6280626703238275e-13, 0.2412835834031154e-11, 0.3257377007015450e-14, 0.6208618911048210e-03, 0.7725029665513934e-03, 0.2869551596920546e-03, 0.7811996215926567e-04, 0.3786532345060857e-28, 0.1283811140869274e-03, 0.1684766835109566e-03}, + pp: 0, + dmin: 3.2573770070154495e-015, + desig: 2.1703803823424652e-020, + qmax: 1.8900334961849885, + ttype: -2, + dmin1: 2.3851725145806461e-002, + dmin2: 8.6019261441467765e-002, + dn: 3.2573770070154495e-015, + dn1: 2.3851725145806461e-002, + dn2: 0.10407400868154487, + g: 0.0000000000000000, + tau: 6.2086189103875732e-004, + nFail: 0, + iter: 10, + sigma: 6.9898185319802297e-004, + nDiv: 214, + zOut: []float64{0.2520040627944239e+01, 0.2559470753941703e+01, 0.3943012599746679e-01, 0.2794508075882943e-01, 0.1813958619441340e+01, 0.1795964384312701e+01, 0.9950845630193767e-02, 0.9646675917972324e-02, 0.1741066741415017e+01, 0.1878131517155319e+01, 0.1467114516582776e+00, 0.8477220453173254e-01, 0.1085214189555058e+01, 0.1010428304002300e+01, 0.9986318978978259e-02, 0.7202019583097946e-02, 0.7287093921253419e+00, 0.7222798755087921e+00, 0.7725029665513934e-03, 0.2132421202606784e-02, 0.1993785121145770e+01, 0.1996156738136608e+01, 0.4504038193447841e-02, 0.9240506410060376e-03, 0.4095324760138526e+00, 0.5471324717806409e+00, 0.1385240464077977e+00, 0.1086689918286978e+00, 0.4292130907735224e+00, 0.6103174493771462e+00, 0.2897733504323247e+00, 0.2112982785836522e+00, 0.4450341145952761e+00, 0.2594200218384955e+00, 0.2568418582687495e-01, 0.3588779484774172e-01, 0.3624803443601846e+00, 0.3849926621161204e+00, 0.5840011260368079e-01, 0.1744336104525046e-01, 0.1149923468579125e+00, 0.1715659665664736e+00, 0.7401698075381481e-01, 0.2315671901408358e+00, 0.5367558686801371e+00, 0.5010211363866497e+00, 0.1958324578473516e+00, 0.3221025481782661e+00, 0.8240727124361776e+00, 0.8820900960779340e+00, 0.3801199318200257e+00, 0.6999293195746292e+00, 0.1624226116729040e+01, 0.9787039376741966e+00, 0.5440714051978934e-01, 0.4877761656142095e-01, 0.8774371331215411e+00, 0.8340525857813585e+00, 0.5393069221241477e-02, 0.5090576402208483e-02, 0.7872712619108886e+00, 0.9608800054480426e+00, 0.1786993199393658e+00, 0.1613217694817542e-01, 0.8674395783437358e-01, 0.7149381583730083e-01, 0.8820349511059155e-03, 0.1321201613351008e-02, 0.1070907050910267e+00, 0.1064788675771248e+00, 0.7093640994523618e-03, 0.1589006148839502e-03, 0.2385172514821930e-01, 0.2369282453339490e-01, 0.6280626703238275e-13, 0.8634837515442557e-26, 0.3257377007015450e-14, 0.9466330862652142e-28, 0.7725029665513934e-03, 0.9240506410060376e-03, 0.7811996215926567e-04, 0.3786532345060857e-28, 0.1283811140869274e-03, 0.1684766835109566e-03}, + i0Out: 1, + n0Out: 20, + ppOut: 0, + dminOut: 9.4663308626521417e-029, + desigOut: 2.1703795093889875e-020, + sigmaOut: 6.9898185320128035e-004, + qmaxOut: 1.8900334961849885, + nFailOut: 0, + iterOut: 11, + nDivOut: 235, + ttypeOut: -2, + dmin1Out: 2.3692824533332088e-002, + dmin2Out: 7.0611780886194908e-002, + dnOut: 9.4663308626521417e-029, + dn1Out: 2.3692824533332088e-002, + dn2Out: 0.10576950347767239, + gOut: 0.0000000000000000, + tauOut: 3.2573770070067200e-015, + }, + { + i0: 1, + n0: 20, + z: []float64{0.2520040627944239e+01, 0.2559470753941703e+01, 0.3943012599746679e-01, 0.2794508075882943e-01, 0.1813958619441340e+01, 0.1795964384312701e+01, 0.9950845630193767e-02, 0.9646675917972324e-02, 0.1741066741415017e+01, 0.1878131517155319e+01, 0.1467114516582776e+00, 0.8477220453173254e-01, 0.1085214189555058e+01, 0.1010428304002300e+01, 0.9986318978978259e-02, 0.7202019583097946e-02, 0.7287093921253419e+00, 0.7222798755087921e+00, 0.7725029665513934e-03, 0.2132421202606784e-02, 0.1993785121145770e+01, 0.1996156738136608e+01, 0.4504038193447841e-02, 0.9240506410060376e-03, 0.4095324760138526e+00, 0.5471324717806409e+00, 0.1385240464077977e+00, 0.1086689918286978e+00, 0.4292130907735224e+00, 0.6103174493771462e+00, 0.2897733504323247e+00, 0.2112982785836522e+00, 0.4450341145952761e+00, 0.2594200218384955e+00, 0.2568418582687495e-01, 0.3588779484774172e-01, 0.3624803443601846e+00, 0.3849926621161204e+00, 0.5840011260368079e-01, 0.1744336104525046e-01, 0.1149923468579125e+00, 0.1715659665664736e+00, 0.7401698075381481e-01, 0.2315671901408358e+00, 0.5367558686801371e+00, 0.5010211363866497e+00, 0.1958324578473516e+00, 0.3221025481782661e+00, 0.8240727124361776e+00, 0.8820900960779340e+00, 0.3801199318200257e+00, 0.6999293195746292e+00, 0.1624226116729040e+01, 0.9787039376741966e+00, 0.5440714051978934e-01, 0.4877761656142095e-01, 0.8774371331215411e+00, 0.8340525857813585e+00, 0.5393069221241477e-02, 0.5090576402208483e-02, 0.7872712619108886e+00, 0.9608800054480426e+00, 0.1786993199393658e+00, 0.1613217694817542e-01, 0.8674395783437358e-01, 0.7149381583730083e-01, 0.8820349511059155e-03, 0.1321201613351008e-02, 0.1070907050910267e+00, 0.1064788675771248e+00, 0.7093640994523618e-03, 0.1589006148839502e-03, 0.2385172514821930e-01, 0.2369282453339490e-01, 0.6280626703238275e-13, 0.8634837515442557e-26, 0.3257377007015450e-14, 0.9466330862652142e-28, 0.7725029665513934e-03, 0.9240506410060376e-03, 0.7811996215926567e-04, 0.3786532345060857e-28, 0.1283811140869274e-03, 0.1684766835109566e-03}, + pp: 1, + dmin: 9.4663308626521417e-029, + desig: 2.1703795093889875e-020, + qmax: 1.8900334961849885, + ttype: -2, + dmin1: 2.3692824533332088e-002, + dmin2: 7.0611780886194908e-002, + dn: 9.4663308626521417e-029, + dn1: 2.3692824533332088e-002, + dn2: 0.10576950347767239, + g: 0.0000000000000000, + tau: 3.2573770070067200e-015, + nFail: 0, + iter: 11, + sigma: 6.9898185320128035e-004, + nDiv: 235, + zOut: []float64{0.2587415834700532e+01, 0.2559470753941703e+01, 0.1939710234687058e-01, 0.2794508075882943e-01, 0.1786213957883803e+01, 0.1795964384312701e+01, 0.1014308840067055e-01, 0.9646675917972324e-02, 0.1952760633286381e+01, 0.1878131517155319e+01, 0.4386417535844126e-01, 0.8477220453173254e-01, 0.9737661482269571e+00, 0.1010428304002300e+01, 0.5342015449359675e-02, 0.7202019583097946e-02, 0.7190702812620393e+00, 0.7222798755087921e+00, 0.5919653562455767e-02, 0.2132421202606784e-02, 0.1991161135215158e+01, 0.1996156738136608e+01, 0.2539111990097616e-03, 0.9240506410060376e-03, 0.6555475524103290e+00, 0.5471324717806409e+00, 0.1011712753337584e+00, 0.1086689918286978e+00, 0.7204444526270400e+00, 0.6103174493771462e+00, 0.7608498315828412e-01, 0.2112982785836522e+00, 0.2192228335279531e+00, 0.2594200218384955e+00, 0.6302508481237896e-01, 0.3588779484774172e-01, 0.3394109383489919e+00, 0.3849926621161204e+00, 0.8817297145618809e-02, 0.1744336104525046e-01, 0.3943158595616905e+00, 0.1715659665664736e+00, 0.2942312715577539e+00, 0.2315671901408358e+00, 0.5288924130071619e+00, 0.5010211363866497e+00, 0.5372046576619481e+00, 0.3221025481782661e+00, 0.1044814757990615e+01, 0.8820900960779340e+00, 0.6556411803358774e+00, 0.6999293195746292e+00, 0.3718403738997403e+00, 0.9787039376741966e+00, 0.1094101127175453e+00, 0.4877761656142095e-01, 0.7297330494660218e+00, 0.8340525857813585e+00, 0.6703044469024726e-02, 0.5090576402208483e-02, 0.9703091379271934e+00, 0.9608800054480426e+00, 0.1188642714683105e-02, 0.1613217694817542e-01, 0.7162637473596872e-01, 0.7149381583730083e-01, 0.1964081696850700e-02, 0.1321201613351008e-02, 0.1046736864951580e+00, 0.1064788675771248e+00, 0.3596705640885345e-04, 0.1589006148839502e-03, 0.2365685747698604e-01, 0.2369282453339490e-01, 0.3455244592226135e-52, 0.8634837515442557e-26, 0.9466330862652142e-28, 0.9466330862652142e-28, 0.2539111990097616e-03, 0.9240506410060376e-03, 0.7811996215926567e-04, 0.3786532345060857e-28, 0.1283811140869274e-03, 0.1684766835109566e-03}, + i0Out: 1, + n0Out: 20, + ppOut: 1, + dminOut: 9.4663308626521417e-029, + desigOut: 2.1703795093889875e-020, + sigmaOut: 6.9898185320128035e-004, + qmaxOut: 1.8900334961849885, + nFailOut: 0, + iterOut: 12, + nDivOut: 256, + ttypeOut: -2, + dmin1Out: 2.3656857476986041e-002, + dmin2Out: 7.0305173122617720e-002, + dnOut: 9.4663308626521417e-029, + dn1Out: 2.3656857476986041e-002, + dn2Out: 0.10451478588027406, + gOut: 0.0000000000000000, + tauOut: 0.0000000000000000, + }, + { + i0: 1, + n0: 20, + z: []float64{0.2587415834700532e+01, 0.2559470753941703e+01, 0.1939710234687058e-01, 0.2794508075882943e-01, 0.1786213957883803e+01, 0.1795964384312701e+01, 0.1014308840067055e-01, 0.9646675917972324e-02, 0.1952760633286381e+01, 0.1878131517155319e+01, 0.4386417535844126e-01, 0.8477220453173254e-01, 0.9737661482269571e+00, 0.1010428304002300e+01, 0.5342015449359675e-02, 0.7202019583097946e-02, 0.7190702812620393e+00, 0.7222798755087921e+00, 0.5919653562455767e-02, 0.2132421202606784e-02, 0.1991161135215158e+01, 0.1996156738136608e+01, 0.2539111990097616e-03, 0.9240506410060376e-03, 0.6555475524103290e+00, 0.5471324717806409e+00, 0.1011712753337584e+00, 0.1086689918286978e+00, 0.7204444526270400e+00, 0.6103174493771462e+00, 0.7608498315828412e-01, 0.2112982785836522e+00, 0.2192228335279531e+00, 0.2594200218384955e+00, 0.6302508481237896e-01, 0.3588779484774172e-01, 0.3394109383489919e+00, 0.3849926621161204e+00, 0.8817297145618809e-02, 0.1744336104525046e-01, 0.3943158595616905e+00, 0.1715659665664736e+00, 0.2942312715577539e+00, 0.2315671901408358e+00, 0.5288924130071619e+00, 0.5010211363866497e+00, 0.5372046576619481e+00, 0.3221025481782661e+00, 0.1044814757990615e+01, 0.8820900960779340e+00, 0.6556411803358774e+00, 0.6999293195746292e+00, 0.3718403738997403e+00, 0.9787039376741966e+00, 0.1094101127175453e+00, 0.4877761656142095e-01, 0.7297330494660218e+00, 0.8340525857813585e+00, 0.6703044469024726e-02, 0.5090576402208483e-02, 0.9703091379271934e+00, 0.9608800054480426e+00, 0.1188642714683105e-02, 0.1613217694817542e-01, 0.7162637473596872e-01, 0.7149381583730083e-01, 0.1964081696850700e-02, 0.1321201613351008e-02, 0.1046736864951580e+00, 0.1064788675771248e+00, 0.3596705640885345e-04, 0.1589006148839502e-03, 0.2365685747698604e-01, 0.2369282453339490e-01, 0.3455244592226135e-52, 0.8634837515442557e-26, 0.9466330862652142e-28, 0.9466330862652142e-28, 0.2539111990097616e-03, 0.9240506410060376e-03, 0.7811996215926567e-04, 0.3786532345060857e-28, 0.1283811140869274e-03, 0.1684766835109566e-03}, + pp: 0, + dmin: 9.4663308626521417e-029, + desig: 2.1703795093889875e-020, + qmax: 1.8900334961849885, + ttype: -2, + dmin1: 2.3656857476986041e-002, + dmin2: 7.0305173122617720e-002, + dn: 9.4663308626521417e-029, + dn1: 2.3656857476986041e-002, + dn2: 0.10451478588027406, + g: 0.0000000000000000, + tau: 0.0000000000000000, + nFail: 0, + iter: 12, + sigma: 6.9898185320128035e-004, + nDiv: 256, + zOut: []float64{0.2587415834700532e+01, 0.2594984508308910e+01, 0.1939710234687058e-01, 0.1335166928493912e-01, 0.1786213957883803e+01, 0.1771176948261041e+01, 0.1014308840067055e-01, 0.1118297285215907e-01, 0.1952760633286381e+01, 0.1973613407054170e+01, 0.4386417535844126e-01, 0.2164225725832272e-01, 0.9737661482269571e+00, 0.9456374776795010e+00, 0.5342015449359675e-02, 0.4062111160297227e-02, 0.7190702812620393e+00, 0.7090993949257048e+00, 0.5919653562455767e-02, 0.1662247097070905e-01, 0.1991161135215158e+01, 0.1962964146704966e+01, 0.2539111990097616e-03, 0.8479567256479229e-04, 0.6555475524103290e+00, 0.7448056033330296e+00, 0.1011712753337584e+00, 0.9786215859981671e-01, 0.7204444526270400e+00, 0.6868388484470145e+00, 0.7608498315828412e-01, 0.2428454015756268e-01, 0.2192228335279531e+00, 0.2461349494442763e+00, 0.6302508481237896e-01, 0.8690924724014965e-01, 0.3394109383489919e+00, 0.2494905595159681e+00, 0.8817297145618809e-02, 0.1393559784278329e-01, 0.3943158595616905e+00, 0.6627831045381681e+00, 0.2942312715577539e+00, 0.2347927793131975e+00, 0.5288924130071619e+00, 0.8194758626174196e+00, 0.5372046576619481e+00, 0.6849248159595133e+00, 0.1044814757990615e+01, 0.1003702693628486e+01, 0.6556411803358774e+00, 0.2428944977310168e+00, 0.3718403738997403e+00, 0.2265275601477758e+00, 0.1094101127175453e+00, 0.3524523689025369e+00, 0.7297330494660218e+00, 0.3721552962940165e+00, 0.6703044469024726e-02, 0.1747664312451059e-01, 0.9703091379271934e+00, 0.9421927087788731e+00, 0.1188642714683105e-02, 0.9036173567869616e-04, 0.7162637473596872e-01, 0.6167166595864771e-01, 0.1964081696850700e-02, 0.3333583884775863e-02, 0.1046736864951580e+00, 0.8954764092829798e-01, 0.3596705640885345e-04, 0.9501841907954501e-05, 0.2365685747698604e-01, 0.1181892689658507e-01, 0.3455244592226135e-52, 0.8479567256479229e-04, 0.6989818532012803e-03, 0.9466330862652142e-28, 0.2539111990097616e-03, 0.9240506410060376e-03, 0.7811996215926567e-04, 0.3786532345060857e-28, 0.1283811140869274e-03, 0.1684766835109566e-03}, + i0Out: 1, + n0Out: 19, + ppOut: 0, + dminOut: 1.1818926896585069e-002, + desigOut: 3.4696444683954120e-019, + sigmaOut: 1.2527410591694300e-002, + qmaxOut: 1.8900334961849885, + nFailOut: 0, + iterOut: 13, + nDivOut: 276, + ttypeOut: -9, + dmin1Out: 5.9707584261797009e-002, + dmin2Out: 5.9707584261797009e-002, + dnOut: 1.1818926896585069e-002, + dn1Out: 8.9511673871889130e-002, + dn2Out: 5.9707584261797009e-002, + gOut: 0.0000000000000000, + tauOut: 1.1828428738493020e-002, + }, + { + i0: 1, + n0: 19, + z: []float64{0.2587415834700532e+01, 0.2594984508308910e+01, 0.1939710234687058e-01, 0.1335166928493912e-01, 0.1786213957883803e+01, 0.1771176948261041e+01, 0.1014308840067055e-01, 0.1118297285215907e-01, 0.1952760633286381e+01, 0.1973613407054170e+01, 0.4386417535844126e-01, 0.2164225725832272e-01, 0.9737661482269571e+00, 0.9456374776795010e+00, 0.5342015449359675e-02, 0.4062111160297227e-02, 0.7190702812620393e+00, 0.7090993949257048e+00, 0.5919653562455767e-02, 0.1662247097070905e-01, 0.1991161135215158e+01, 0.1962964146704966e+01, 0.2539111990097616e-03, 0.8479567256479229e-04, 0.6555475524103290e+00, 0.7448056033330296e+00, 0.1011712753337584e+00, 0.9786215859981671e-01, 0.7204444526270400e+00, 0.6868388484470145e+00, 0.7608498315828412e-01, 0.2428454015756268e-01, 0.2192228335279531e+00, 0.2461349494442763e+00, 0.6302508481237896e-01, 0.8690924724014965e-01, 0.3394109383489919e+00, 0.2494905595159681e+00, 0.8817297145618809e-02, 0.1393559784278329e-01, 0.3943158595616905e+00, 0.6627831045381681e+00, 0.2942312715577539e+00, 0.2347927793131975e+00, 0.5288924130071619e+00, 0.8194758626174196e+00, 0.5372046576619481e+00, 0.6849248159595133e+00, 0.1044814757990615e+01, 0.1003702693628486e+01, 0.6556411803358774e+00, 0.2428944977310168e+00, 0.3718403738997403e+00, 0.2265275601477758e+00, 0.1094101127175453e+00, 0.3524523689025369e+00, 0.7297330494660218e+00, 0.3721552962940165e+00, 0.6703044469024726e-02, 0.1747664312451059e-01, 0.9703091379271934e+00, 0.9421927087788731e+00, 0.1188642714683105e-02, 0.9036173567869616e-04, 0.7162637473596872e-01, 0.6167166595864771e-01, 0.1964081696850700e-02, 0.3333583884775863e-02, 0.1046736864951580e+00, 0.8954764092829798e-01, 0.3596705640885345e-04, 0.9501841907954501e-05, 0.2365685747698604e-01, 0.1181892689658507e-01, 0.3455244592226135e-52, 0.8479567256479229e-04, 0.6989818532012803e-03, 0.9466330862652142e-28, 0.2539111990097616e-03, 0.9240506410060376e-03, 0.7811996215926567e-04, 0.3786532345060857e-28, 0.1283811140869274e-03, 0.1684766835109566e-03}, + pp: 1, + dmin: 1.1818926896585069e-002, + desig: 3.4696444683954120e-019, + qmax: 1.8900334961849885, + ttype: -9, + dmin1: 5.9707584261797009e-002, + dmin2: 5.9707584261797009e-002, + dn: 1.1818926896585069e-002, + dn1: 8.9511673871889130e-002, + dn2: 5.9707584261797009e-002, + g: 0.0000000000000000, + tau: 1.1828428738493020e-002, + nFail: 0, + iter: 13, + sigma: 1.2527410591694300e-002, + nDiv: 276, + zOut: []float64{0.2596646703688871e+01, 0.2594984508308910e+01, 0.9107195378059658e-02, 0.1335166928493912e-01, 0.1761563251830163e+01, 0.1771176948261041e+01, 0.1252913577120413e-01, 0.1118297285215907e-01, 0.1971037054636311e+01, 0.1973613407054170e+01, 0.1038322923301279e-01, 0.2164225725832272e-01, 0.9276268857018078e+00, 0.9456374776795010e+00, 0.3105171497598932e-02, 0.4062111160297227e-02, 0.7109272204938374e+00, 0.7090993949257048e+00, 0.4589684232723624e-01, 0.1662247097070905e-01, 0.1905462626145317e+01, 0.1962964146704966e+01, 0.3314486004504485e-04, 0.8479567256479229e-04, 0.8309451431678238e+00, 0.7448056033330296e+00, 0.8089045693556918e-01, 0.9786215859981671e-01, 0.6185434577640304e+00, 0.6868388484470145e+00, 0.9663466631053550e-02, 0.2428454015756268e-01, 0.3116912561483949e+00, 0.2461349494442763e+00, 0.6956575230565126e-01, 0.8690924724014965e-01, 0.1821709311481225e+00, 0.2494905595159681e+00, 0.5070116699532772e-01, 0.1393559784278329e-01, 0.8351852429510604e+00, 0.6627831045381681e+00, 0.2303764547900405e+00, 0.2347927793131975e+00, 0.1262334749881915e+01, 0.8194758626174196e+00, 0.5445947541061251e+00, 0.6849248159595133e+00, 0.6903129633483998e+00, 0.1003702693628486e+01, 0.7970630839299049e-01, 0.2428944977310168e+00, 0.4875841467523446e+00, 0.2265275601477758e+00, 0.2690141110044630e+00, 0.3524523689025369e+00, 0.1089283545090865e+00, 0.3721552962940165e+00, 0.1511669372043137e+00, 0.1747664312451059e-01, 0.7794266594052606e+00, 0.9421927087788731e+00, 0.7149818024536710e-05, 0.9036173567869616e-04, 0.5330862612042146e-01, 0.6167166595864771e-01, 0.5599742376476567e-02, 0.3333583884775863e-02, 0.7226792648875177e-01, 0.8954764092829798e-01, 0.1553961492315709e-05, 0.9501841907954501e-05, 0.1278990301151681e-03, 0.1181892689658507e-01, 0.7149818024536710e-05, 0.8479567256479229e-04, 0.6989818532012803e-03, 0.9466330862652142e-28, 0.2539111990097616e-03, 0.9240506410060376e-03, 0.7811996215926567e-04, 0.3786532345060857e-28, 0.1283811140869274e-03, 0.1684766835109566e-03}, + i0Out: 1, + n0Out: 19, + ppOut: 1, + dminOut: 1.2789903011516807e-004, + desigOut: 0.0000000000000000, + sigmaOut: 2.4216884496671885e-002, + qmaxOut: 1.8900334961849885, + nFailOut: 0, + iterOut: 14, + nDivOut: 296, + ttypeOut: -4, + dmin1Out: 4.9975042235645591e-002, + dmin2Out: 4.9975042235645591e-002, + dnOut: 1.2789903011516807e-004, + dn1Out: 7.2258424646843816e-002, + dn2Out: 4.9975042235645591e-002, + gOut: 0.0000000000000000, + tauOut: 1.1689473904977585e-002, + }, + { + i0: 1, + n0: 19, + z: []float64{0.2596646703688871e+01, 0.2594984508308910e+01, 0.9107195378059658e-02, 0.1335166928493912e-01, 0.1761563251830163e+01, 0.1771176948261041e+01, 0.1252913577120413e-01, 0.1118297285215907e-01, 0.1971037054636311e+01, 0.1973613407054170e+01, 0.1038322923301279e-01, 0.2164225725832272e-01, 0.9276268857018078e+00, 0.9456374776795010e+00, 0.3105171497598932e-02, 0.4062111160297227e-02, 0.7109272204938374e+00, 0.7090993949257048e+00, 0.4589684232723624e-01, 0.1662247097070905e-01, 0.1905462626145317e+01, 0.1962964146704966e+01, 0.3314486004504485e-04, 0.8479567256479229e-04, 0.8309451431678238e+00, 0.7448056033330296e+00, 0.8089045693556918e-01, 0.9786215859981671e-01, 0.6185434577640304e+00, 0.6868388484470145e+00, 0.9663466631053550e-02, 0.2428454015756268e-01, 0.3116912561483949e+00, 0.2461349494442763e+00, 0.6956575230565126e-01, 0.8690924724014965e-01, 0.1821709311481225e+00, 0.2494905595159681e+00, 0.5070116699532772e-01, 0.1393559784278329e-01, 0.8351852429510604e+00, 0.6627831045381681e+00, 0.2303764547900405e+00, 0.2347927793131975e+00, 0.1262334749881915e+01, 0.8194758626174196e+00, 0.5445947541061251e+00, 0.6849248159595133e+00, 0.6903129633483998e+00, 0.1003702693628486e+01, 0.7970630839299049e-01, 0.2428944977310168e+00, 0.4875841467523446e+00, 0.2265275601477758e+00, 0.2690141110044630e+00, 0.3524523689025369e+00, 0.1089283545090865e+00, 0.3721552962940165e+00, 0.1511669372043137e+00, 0.1747664312451059e-01, 0.7794266594052606e+00, 0.9421927087788731e+00, 0.7149818024536710e-05, 0.9036173567869616e-04, 0.5330862612042146e-01, 0.6167166595864771e-01, 0.5599742376476567e-02, 0.3333583884775863e-02, 0.7226792648875177e-01, 0.8954764092829798e-01, 0.1553961492315709e-05, 0.9501841907954501e-05, 0.1278990301151681e-03, 0.1181892689658507e-01, 0.7149818024536710e-05, 0.8479567256479229e-04, 0.6989818532012803e-03, 0.9466330862652142e-28, 0.2539111990097616e-03, 0.9240506410060376e-03, 0.7811996215926567e-04, 0.3786532345060857e-28, 0.1283811140869274e-03, 0.1684766835109566e-03}, + pp: 0, + dmin: 1.2789903011516807e-004, + desig: 0.0000000000000000, + qmax: 1.8900334961849885, + ttype: -4, + dmin1: 4.9975042235645591e-002, + dmin2: 4.9975042235645591e-002, + dn: 1.2789903011516807e-004, + dn1: 7.2258424646843816e-002, + dn2: 4.9975042235645591e-002, + g: 0.0000000000000000, + tau: 1.1689473904977585e-002, + nFail: 0, + iter: 14, + sigma: 2.4216884496671885e-002, + nDiv: 296, + zOut: []float64{0.2596646703688871e+01, 0.2605626003858251e+01, 0.9107195378059658e-02, 0.6157023564192275e-02, 0.1761563251830163e+01, 0.1767807468828494e+01, 0.1252913577120413e-01, 0.1396950250695455e-01, 0.1971037054636311e+01, 0.1967322886153689e+01, 0.1038322923301279e-01, 0.4895872794820515e-02, 0.9276268857018078e+00, 0.9257082891959054e+00, 0.3105171497598932e-02, 0.2384715539127593e-02, 0.7109272204938374e+00, 0.7543114520732653e+00, 0.4589684232723624e-01, 0.1159397984377132e+00, 0.1905462626145317e+01, 0.1789428077358968e+01, 0.3314486004504485e-04, 0.1539126429493388e-04, 0.8309451431678238e+00, 0.9116923136304173e+00, 0.8089045693556918e-01, 0.5488064578914756e-01, 0.6185434577640304e+00, 0.5731983833972557e+00, 0.9663466631053550e-02, 0.5254756712901782e-02, 0.3116912561483949e+00, 0.3758743565324636e+00, 0.6956575230565126e-01, 0.3371567560620648e-01, 0.1821709311481225e+00, 0.1990285273285630e+00, 0.5070116699532772e-01, 0.2127577741907859e+00, 0.8351852429510604e+00, 0.8526760283416343e+00, 0.2303764547900405e+00, 0.3410582621885915e+00, 0.1262334749881915e+01, 0.1465743346590768e+01, 0.5445947541061251e+00, 0.2564847518533230e+00, 0.6903129633483998e+00, 0.5134066246793865e+00, 0.7970630839299049e-01, 0.7569737222001199e-01, 0.4875841467523446e+00, 0.6807729903281149e+00, 0.2690141110044630e+00, 0.4304410555024735e-01, 0.1089283545090865e+00, 0.2169232909544721e+00, 0.1511669372043137e+00, 0.5431576312495270e+00, 0.7794266594052606e+00, 0.2361482827650774e+00, 0.7149818024536710e-05, 0.1614015445872399e-05, 0.5330862612042146e-01, 0.5877885927277143e-01, 0.5599742376476567e-02, 0.6884818375619963e-02, 0.7226792648875177e-01, 0.6525676686594341e-01, 0.1553961492315709e-05, 0.3045663725752605e-08, 0.1278990301151681e-03, 0.7757707209639971e-09, 0.7149818024536710e-05, 0.1614015445872399e-05, 0.6989818532012803e-03, 0.9466330862652142e-28, 0.2539111990097616e-03, 0.9240506410060376e-03, 0.7811996215926567e-04, 0.3786532345060857e-28, 0.1283811140869274e-03, 0.1684766835109566e-03}, + i0Out: 1, + n0Out: 19, + ppOut: 0, + dminOut: 7.7577072096399712e-010, + desigOut: -5.4210108624275222e-020, + sigmaOut: 2.4344779705352607e-002, + qmaxOut: 1.8900334961849885, + nFailOut: 1, + iterOut: 16, + nDivOut: 336, + ttypeOut: -15, + dmin1Out: 5.3179116896294863e-002, + dmin2Out: 5.3179116896294863e-002, + dnOut: 7.7577072096399712e-010, + dn1Out: 6.5255212904451090e-002, + dn2Out: 5.3179116896294863e-002, + gOut: 0.0000000000000000, + tauOut: 1.2789520868072135e-004, + }, + { + i0: 1, + n0: 19, + z: []float64{0.2596646703688871e+01, 0.2605626003858251e+01, 0.9107195378059658e-02, 0.6157023564192275e-02, 0.1761563251830163e+01, 0.1767807468828494e+01, 0.1252913577120413e-01, 0.1396950250695455e-01, 0.1971037054636311e+01, 0.1967322886153689e+01, 0.1038322923301279e-01, 0.4895872794820515e-02, 0.9276268857018078e+00, 0.9257082891959054e+00, 0.3105171497598932e-02, 0.2384715539127593e-02, 0.7109272204938374e+00, 0.7543114520732653e+00, 0.4589684232723624e-01, 0.1159397984377132e+00, 0.1905462626145317e+01, 0.1789428077358968e+01, 0.3314486004504485e-04, 0.1539126429493388e-04, 0.8309451431678238e+00, 0.9116923136304173e+00, 0.8089045693556918e-01, 0.5488064578914756e-01, 0.6185434577640304e+00, 0.5731983833972557e+00, 0.9663466631053550e-02, 0.5254756712901782e-02, 0.3116912561483949e+00, 0.3758743565324636e+00, 0.6956575230565126e-01, 0.3371567560620648e-01, 0.1821709311481225e+00, 0.1990285273285630e+00, 0.5070116699532772e-01, 0.2127577741907859e+00, 0.8351852429510604e+00, 0.8526760283416343e+00, 0.2303764547900405e+00, 0.3410582621885915e+00, 0.1262334749881915e+01, 0.1465743346590768e+01, 0.5445947541061251e+00, 0.2564847518533230e+00, 0.6903129633483998e+00, 0.5134066246793865e+00, 0.7970630839299049e-01, 0.7569737222001199e-01, 0.4875841467523446e+00, 0.6807729903281149e+00, 0.2690141110044630e+00, 0.4304410555024735e-01, 0.1089283545090865e+00, 0.2169232909544721e+00, 0.1511669372043137e+00, 0.5431576312495270e+00, 0.7794266594052606e+00, 0.2361482827650774e+00, 0.7149818024536710e-05, 0.1614015445872399e-05, 0.5330862612042146e-01, 0.5877885927277143e-01, 0.5599742376476567e-02, 0.6884818375619963e-02, 0.7226792648875177e-01, 0.6525676686594341e-01, 0.1553961492315709e-05, 0.3045663725752605e-08, 0.1278990301151681e-03, 0.7757707209639971e-09, 0.7149818024536710e-05, 0.1614015445872399e-05, 0.6989818532012803e-03, 0.9466330862652142e-28, 0.2539111990097616e-03, 0.9240506410060376e-03, 0.7811996215926567e-04, 0.3786532345060857e-28, 0.1283811140869274e-03, 0.1684766835109566e-03}, + pp: 1, + dmin: 7.7577072096399712e-010, + desig: -5.4210108624275222e-020, + qmax: 1.8900334961849885, + ttype: -15, + dmin1: 5.3179116896294863e-002, + dmin2: 5.3179116896294863e-002, + dn: 7.7577072096399712e-010, + dn1: 6.5255212904451090e-002, + dn2: 5.3179116896294863e-002, + g: 0.0000000000000000, + tau: 1.2789520868072135e-004, + nFail: 1, + iter: 16, + sigma: 2.4344779705352607e-002, + nDiv: 336, + zOut: []float64{0.2611783026646672e+01, 0.2605626003858251e+01, 0.4167433562238479e-02, 0.6157023564192275e-02, 0.1777609536997439e+01, 0.1767807468828494e+01, 0.1546038171944878e-01, 0.1396950250695455e-01, 0.1956758376453290e+01, 0.1967322886153689e+01, 0.2316152103168096e-02, 0.4895872794820515e-02, 0.9257768518560943e+00, 0.9257082891959054e+00, 0.1943036529261404e-02, 0.2384715539127593e-02, 0.8683082132059464e+00, 0.7543114520732653e+00, 0.2389312083572061e+00, 0.1159397984377132e+00, 0.1550512259490286e+01, 0.1789428077358968e+01, 0.9049975109102588e-05, 0.1539126429493388e-04, 0.9665639086686850e+00, 0.9116923136304173e+00, 0.3254569838994440e-01, 0.5488064578914756e-01, 0.5459074409444424e+00, 0.5731983833972557e+00, 0.3618064437406363e-02, 0.5254756712901782e-02, 0.4059719669254931e+00, 0.3758743565324636e+00, 0.1652917395900484e-01, 0.3371567560620648e-01, 0.3952571267845734e+00, 0.1990285273285630e+00, 0.4589757947481138e+00, 0.2127577741907859e+00, 0.7347584950063413e+00, 0.8526760283416343e+00, 0.6803648845168142e+00, 0.3410582621885915e+00, 0.1041863213151506e+01, 0.1465743346590768e+01, 0.1263898840735783e+00, 0.2564847518533230e+00, 0.4627141120500496e+00, 0.5134066246793865e+00, 0.1113705527974558e+00, 0.7569737222001199e-01, 0.6124465423051357e+00, 0.6807729903281149e+00, 0.1524585149425051e-01, 0.4304410555024735e-01, 0.7448350699339780e+00, 0.2169232909544721e+00, 0.1722069046798406e+00, 0.5431576312495270e+00, 0.6394299132491200e-01, 0.2361482827650774e+00, 0.1483665133446019e-05, 0.1614015445872399e-05, 0.6566219320748727e-01, 0.5877885927277143e-01, 0.6842308575232983e-02, 0.6884818375619963e-02, 0.5841446056060346e-01, 0.6525676686594341e-01, 0.4044780558898965e-16, 0.3045663725752605e-08, 0.1013559914197710e-18, 0.7757707209639971e-09, 0.1483665133446019e-05, 0.1614015445872399e-05, 0.6989818532012803e-03, 0.9466330862652142e-28, 0.2539111990097616e-03, 0.9240506410060376e-03, 0.7811996215926567e-04, 0.3786532345060857e-28, 0.1283811140869274e-03, 0.1684766835109566e-03}, + i0Out: 1, + n0Out: 19, + ppOut: 1, + dminOut: 1.0135599141977102e-019, + desigOut: 2.7104744119407903e-020, + sigmaOut: 2.4344780481123287e-002, + qmaxOut: 1.8900334961849885, + nFailOut: 2, + iterOut: 18, + nDivOut: 376, + ttypeOut: -15, + dmin1Out: 5.8414457514939733e-002, + dmin2Out: 5.8777374831867304e-002, + dnOut: 1.0135599141977102e-019, + dn1Out: 5.8414457514939733e-002, + dn2Out: 5.8777374831867304e-002, + gOut: 0.0000000000000000, + tauOut: 7.7577068041483555e-010, + }, + { + i0: 1, + n0: 19, + z: []float64{0.2611783026646672e+01, 0.2605626003858251e+01, 0.4167433562238479e-02, 0.6157023564192275e-02, 0.1777609536997439e+01, 0.1767807468828494e+01, 0.1546038171944878e-01, 0.1396950250695455e-01, 0.1956758376453290e+01, 0.1967322886153689e+01, 0.2316152103168096e-02, 0.4895872794820515e-02, 0.9257768518560943e+00, 0.9257082891959054e+00, 0.1943036529261404e-02, 0.2384715539127593e-02, 0.8683082132059464e+00, 0.7543114520732653e+00, 0.2389312083572061e+00, 0.1159397984377132e+00, 0.1550512259490286e+01, 0.1789428077358968e+01, 0.9049975109102588e-05, 0.1539126429493388e-04, 0.9665639086686850e+00, 0.9116923136304173e+00, 0.3254569838994440e-01, 0.5488064578914756e-01, 0.5459074409444424e+00, 0.5731983833972557e+00, 0.3618064437406363e-02, 0.5254756712901782e-02, 0.4059719669254931e+00, 0.3758743565324636e+00, 0.1652917395900484e-01, 0.3371567560620648e-01, 0.3952571267845734e+00, 0.1990285273285630e+00, 0.4589757947481138e+00, 0.2127577741907859e+00, 0.7347584950063413e+00, 0.8526760283416343e+00, 0.6803648845168142e+00, 0.3410582621885915e+00, 0.1041863213151506e+01, 0.1465743346590768e+01, 0.1263898840735783e+00, 0.2564847518533230e+00, 0.4627141120500496e+00, 0.5134066246793865e+00, 0.1113705527974558e+00, 0.7569737222001199e-01, 0.6124465423051357e+00, 0.6807729903281149e+00, 0.1524585149425051e-01, 0.4304410555024735e-01, 0.7448350699339780e+00, 0.2169232909544721e+00, 0.1722069046798406e+00, 0.5431576312495270e+00, 0.6394299132491200e-01, 0.2361482827650774e+00, 0.1483665133446019e-05, 0.1614015445872399e-05, 0.6566219320748727e-01, 0.5877885927277143e-01, 0.6842308575232983e-02, 0.6884818375619963e-02, 0.5841446056060346e-01, 0.6525676686594341e-01, 0.4044780558898965e-16, 0.3045663725752605e-08, 0.1013559914197710e-18, 0.7757707209639971e-09, 0.1483665133446019e-05, 0.1614015445872399e-05, 0.6989818532012803e-03, 0.9466330862652142e-28, 0.2539111990097616e-03, 0.9240506410060376e-03, 0.7811996215926567e-04, 0.3786532345060857e-28, 0.1283811140869274e-03, 0.1684766835109566e-03}, + pp: 0, + dmin: 1.0135599141977102e-019, + desig: 2.7104744119407903e-020, + qmax: 1.8900334961849885, + ttype: -15, + dmin1: 5.8414457514939733e-002, + dmin2: 5.8777374831867304e-002, + dn: 1.0135599141977102e-019, + dn1: 5.8414457514939733e-002, + dn2: 5.8777374831867304e-002, + g: 0.0000000000000000, + tau: 7.7577068041483555e-010, + nFail: 2, + iter: 18, + sigma: 2.4344780481123287e-002, + nDiv: 376, + zOut: []float64{0.2611783026646672e+01, 0.2615950460208911e+01, 0.4167433562238479e-02, 0.2831884532112553e-02, 0.1777609536997439e+01, 0.1790238034184775e+01, 0.1546038171944878e-01, 0.1689844079671380e-01, 0.1956758376453290e+01, 0.1942176087759744e+01, 0.2316152103168096e-02, 0.1104039956008399e-02, 0.9257768518560943e+00, 0.9266158484293474e+00, 0.1943036529261404e-02, 0.1820770257466081e-02, 0.8683082132059464e+00, 0.1105418651305687e+01, 0.2389312083572061e+00, 0.3351361651941490e+00, 0.1550512259490286e+01, 0.1215385144271246e+01, 0.9049975109102588e-05, 0.7197207696703830e-05, 0.9665639086686850e+00, 0.9991024098509327e+00, 0.3254569838994440e-01, 0.1778290067827487e-01, 0.5459074409444424e+00, 0.5317426047035739e+00, 0.3618064437406363e-02, 0.2762300261676148e-02, 0.4059719669254931e+00, 0.4197388406228219e+00, 0.1652917395900484e-01, 0.1556509232613395e-01, 0.3952571267845734e+00, 0.8386678292065533e+00, 0.4589757947481138e+00, 0.4021095747913879e+00, 0.7347584950063413e+00, 0.1013013804731768e+01, 0.6803648845168142e+00, 0.6997408538631263e+00, 0.1041863213151506e+01, 0.4685122433619579e+00, 0.1263898840735783e+00, 0.1248257304047288e+00, 0.4627141120500496e+00, 0.4492589344427766e+00, 0.1113705527974558e+00, 0.1518244930621437e+00, 0.6124465423051357e+00, 0.4758679007372426e+00, 0.1524585149425051e-01, 0.2386301922514691e-01, 0.7448350699339780e+00, 0.8931789553886716e+00, 0.1722069046798406e+00, 0.1232835205710967e-01, 0.6394299132491200e-01, 0.5161612293293578e-01, 0.1483665133446019e-05, 0.1887408451311279e-05, 0.6566219320748727e-01, 0.7250261437426894e-01, 0.6842308575232983e-02, 0.5512763475647510e-02, 0.5841446056060346e-01, 0.5290169708495600e-01, 0.4044780558898965e-16, 0.7749519698096867e-34, 0.1013559914197710e-18, 0.1013559914197709e-18, 0.1483665133446019e-05, 0.1887408451311279e-05, 0.6989818532012803e-03, 0.9466330862652142e-28, 0.2539111990097616e-03, 0.9240506410060376e-03, 0.7811996215926567e-04, 0.3786532345060857e-28, 0.1283811140869274e-03, 0.1684766835109566e-03}, + i0Out: 1, + n0Out: 19, + ppOut: 0, + dminOut: 1.0135599141977093e-019, + desigOut: 2.7104744119407903e-020, + sigmaOut: 2.4344780481123287e-002, + qmaxOut: 1.8900334961849885, + nFailOut: 2, + iterOut: 19, + nDivOut: 396, + ttypeOut: -2, + dmin1Out: 5.1614639267802333e-002, + dmin2Out: 5.1614639267802333e-002, + dnOut: 1.0135599141977093e-019, + dn1Out: 5.2901697084955956e-002, + dn2Out: 6.5660305799035965e-002, + gOut: 0.0000000000000000, + tauOut: 0.0000000000000000, + }, + { + i0: 1, + n0: 4, + z: []float64{0.6482015258643838e+00, 0.6481999396438409e+00, 0.2115037983241246e-16, 0.2912830939801045e-17, 0.8927011497345719e-01, 0.8926852875357447e-01, 0.6601400172354812e-12, 0.2332164496627946e-11, 0.3153708122363886e+00, 0.3200537757344777e+00, 0.4684549720964035e-02, 0.2356209328895529e-07, 0.1609789066298190e-05, 0.6430148075154844e-11, 0.2115037983241246e-16, 0.2912830939801045e-17, 0.1702794694134603e+01, 0.2545269924803487e-17, 0.5179704803914515e-14, 0.1264160184270297e-13, 0.1077066053646038e+01, 0.7156177943897596e-01, 0.2292199980814605e-03, 0.1899018778701386e-03, 0.1060140274732043e+01, 0.8660746506696473e-01, 0.1357005210961402e-39, 0.1331360138522907e-23, 0.9894235909971354e+00, 0.8804208964992894e-17, 0.8660746506696473e-01, 0.8679736700028205e-01, 0.9539342071687115e+00, 0.2761013168273541e-29, 0.1040577915698430e-22, 0.8107757314001177e-24, 0.8046649468928653e+00, 0.5098326619997980e-22, 0.1416795225784663e-10, 0.1216165317638265e-10, 0.7009142227469247e+00, 0.1183291357831518e-29, 0.4806926318247711e-11, 0.3824647564366386e-11, 0.5502651835254770e+00, 0.1368072596837427e-11, 0.1169503544861386e-10, 0.4068037876491279e-10, 0.4423863025187732e+00, 0.2477754901417239e-20, 0.1737857614720001e-07, 0.5295826057530262e-07, 0.3697086796938907e+00, 0.5143373102040997e-26, 0.1312431380925897e-05, 0.1614374370413396e-05, 0.1446051340026323e+00, 0.9928287808749566e-25, 0.3777515963415321e-05, 0.2773141909621761e-05, 0.1110032523123295e+00, 0.0000000000000000e+00, 0.2532463507333992e-05, 0.2934028940292093e-05, 0.7513603923341917e-01, 0.5394210206791908e-19, 0.6032617175984252e-07, 0.2575740214720034e-06, 0.6860214953971246e-01, 0.7542232825258426e-19, 0.3784397549471832e-09, 0.1131370986389306e-09, 0.2434478048112329e-01, 0.1013559914197709e-18, 0.1483665133446019e-05, 0.1887408451311279e-05, 0.6989818532012803e-03, 0.9466330862652142e-28, 0.2539111990097616e-03, 0.9240506410060376e-03, 0.7811996215926567e-04, 0.3786532345060857e-28, 0.1283811140869274e-03, 0.1684766835109566e-03}, + pp: 1, + dmin: 6.4301480751548441e-012, + desig: -2.0633404957759536e-016, + qmax: 1.6602870234255678, + ttype: -2, + dmin1: 8.9268528752914328e-002, + dmin2: 8.9268528752914328e-002, + dn: 6.4301480751548441e-012, + dn1: 0.31536922601351364, + dn2: 8.9268528752914328e-002, + g: 0.49975000000000003, + tau: 1.5862205428611591e-006, + nFail: 5, + iter: 102, + sigma: 2.0015106747272648, + nDiv: 1395, + zOut: []float64{0.6481999396374125e+00, 0.6481999396438409e+00, 0.4011480356653261e-18, 0.2912830939801045e-17, 0.8926852874947827e-01, 0.8926852875357447e-01, 0.8361491594360282e-11, 0.2332164496627946e-11, 0.3200537992817811e+00, 0.3200537757344777e+00, 0.4733821287189426e-18, 0.2356209328895529e-07, 0.1787791211067534e-14, 0.6430148075154844e-11, 0.4011480356653261e-18, 0.2912830939801045e-17, 0.1702794694134603e+01, 0.2545269924803487e-17, 0.5179704803914515e-14, 0.1264160184270297e-13, 0.1077066053646038e+01, 0.7156177943897596e-01, 0.2292199980814605e-03, 0.1899018778701386e-03, 0.1060140274732043e+01, 0.8660746506696473e-01, 0.1357005210961402e-39, 0.1331360138522907e-23, 0.9894235909971354e+00, 0.8804208964992894e-17, 0.8660746506696473e-01, 0.8679736700028205e-01, 0.9539342071687115e+00, 0.2761013168273541e-29, 0.1040577915698430e-22, 0.8107757314001177e-24, 0.8046649468928653e+00, 0.5098326619997980e-22, 0.1416795225784663e-10, 0.1216165317638265e-10, 0.7009142227469247e+00, 0.1183291357831518e-29, 0.4806926318247711e-11, 0.3824647564366386e-11, 0.5502651835254770e+00, 0.1368072596837427e-11, 0.1169503544861386e-10, 0.4068037876491279e-10, 0.4423863025187732e+00, 0.2477754901417239e-20, 0.1737857614720001e-07, 0.5295826057530262e-07, 0.3697086796938907e+00, 0.5143373102040997e-26, 0.1312431380925897e-05, 0.1614374370413396e-05, 0.1446051340026323e+00, 0.9928287808749566e-25, 0.3777515963415321e-05, 0.2773141909621761e-05, 0.1110032523123295e+00, 0.0000000000000000e+00, 0.2532463507333992e-05, 0.2934028940292093e-05, 0.7513603923341917e-01, 0.5394210206791908e-19, 0.6032617175984252e-07, 0.2575740214720034e-06, 0.6860214953971246e-01, 0.7542232825258426e-19, 0.3784397549471832e-09, 0.1131370986389306e-09, 0.2434478048112329e-01, 0.1013559914197709e-18, 0.1483665133446019e-05, 0.1887408451311279e-05, 0.6989818532012803e-03, 0.9466330862652142e-28, 0.2539111990097616e-03, 0.9240506410060376e-03, 0.7811996215926567e-04, 0.3786532345060857e-28, 0.1283811140869274e-03, 0.1684766835109566e-03}, + i0Out: 1, + n0Out: 4, + ppOut: 1, + dminOut: 1.7877912110675335e-015, + desigOut: -3.7836067586147711e-017, + sigmaOut: 2.0015106747336930, + qmaxOut: 1.6602870234255678, + nFailOut: 5, + iterOut: 103, + nDivOut: 1400, + ttypeOut: -4, + dmin1Out: 8.9268528747146109e-002, + dmin2Out: 8.9268528747146109e-002, + dnOut: 1.7877912110675335e-015, + dn1Out: 0.32005377571968785, + dn2Out: 8.9268528747146109e-002, + gOut: 0.49975000000000003, + tauOut: 6.4283598105616478e-012, + }, + { + i0: 1, + n0: 4, + z: []float64{0.6481999396374125e+00, 0.6481999396438409e+00, 0.4011480356653261e-18, 0.2912830939801045e-17, 0.8926852874947827e-01, 0.8926852875357447e-01, 0.8361491594360282e-11, 0.2332164496627946e-11, 0.3200537992817811e+00, 0.3200537757344777e+00, 0.4733821287189426e-18, 0.2356209328895529e-07, 0.1787791211067534e-14, 0.6430148075154844e-11, 0.4011480356653261e-18, 0.2912830939801045e-17, 0.1702794694134603e+01, 0.2545269924803487e-17, 0.5179704803914515e-14, 0.1264160184270297e-13, 0.1077066053646038e+01, 0.7156177943897596e-01, 0.2292199980814605e-03, 0.1899018778701386e-03, 0.1060140274732043e+01, 0.8660746506696473e-01, 0.1357005210961402e-39, 0.1331360138522907e-23, 0.9894235909971354e+00, 0.8804208964992894e-17, 0.8660746506696473e-01, 0.8679736700028205e-01, 0.9539342071687115e+00, 0.2761013168273541e-29, 0.1040577915698430e-22, 0.8107757314001177e-24, 0.8046649468928653e+00, 0.5098326619997980e-22, 0.1416795225784663e-10, 0.1216165317638265e-10, 0.7009142227469247e+00, 0.1183291357831518e-29, 0.4806926318247711e-11, 0.3824647564366386e-11, 0.5502651835254770e+00, 0.1368072596837427e-11, 0.1169503544861386e-10, 0.4068037876491279e-10, 0.4423863025187732e+00, 0.2477754901417239e-20, 0.1737857614720001e-07, 0.5295826057530262e-07, 0.3697086796938907e+00, 0.5143373102040997e-26, 0.1312431380925897e-05, 0.1614374370413396e-05, 0.1446051340026323e+00, 0.9928287808749566e-25, 0.3777515963415321e-05, 0.2773141909621761e-05, 0.1110032523123295e+00, 0.0000000000000000e+00, 0.2532463507333992e-05, 0.2934028940292093e-05, 0.7513603923341917e-01, 0.5394210206791908e-19, 0.6032617175984252e-07, 0.2575740214720034e-06, 0.6860214953971246e-01, 0.7542232825258426e-19, 0.3784397549471832e-09, 0.1131370986389306e-09, 0.2434478048112329e-01, 0.1013559914197709e-18, 0.1483665133446019e-05, 0.1887408451311279e-05, 0.6989818532012803e-03, 0.9466330862652142e-28, 0.2539111990097616e-03, 0.9240506410060376e-03, 0.7811996215926567e-04, 0.3786532345060857e-28, 0.1283811140869274e-03, 0.1684766835109566e-03}, + pp: 0, + dmin: 1.7877912110675335e-015, + desig: -3.7836067586147711e-017, + qmax: 1.6602870234255678, + ttype: -4, + dmin1: 8.9268528747146109e-002, + dmin2: 8.9268528747146109e-002, + dn: 1.7877912110675335e-015, + dn1: 0.32005377571968785, + dn2: 8.9268528747146109e-002, + g: 0.49975000000000003, + tau: 6.4283598105616478e-012, + nFail: 5, + iter: 103, + sigma: 2.0015106747336930, + nDiv: 1400, + zOut: []float64{0.6481999396374125e+00, 0.6481999396374107e+00, 0.4011480356653261e-18, 0.5524513774965514e-19, 0.8926852874947827e-01, 0.8926852875783797e-01, 0.8361491594360282e-11, 0.2997839428604580e-10, 0.3200537992817811e+00, 0.3200537992518009e+00, 0.4733821287189426e-18, 0.2644269217171004e-32, 0.1787791211067534e-14, 0.2227952244484021e-23, 0.4011480356653261e-18, 0.5524513774965514e-19, 0.1702794694134603e+01, 0.2545269924803487e-17, 0.5179704803914515e-14, 0.1264160184270297e-13, 0.1077066053646038e+01, 0.7156177943897596e-01, 0.2292199980814605e-03, 0.1899018778701386e-03, 0.1060140274732043e+01, 0.8660746506696473e-01, 0.1357005210961402e-39, 0.1331360138522907e-23, 0.9894235909971354e+00, 0.8804208964992894e-17, 0.8660746506696473e-01, 0.8679736700028205e-01, 0.9539342071687115e+00, 0.2761013168273541e-29, 0.1040577915698430e-22, 0.8107757314001177e-24, 0.8046649468928653e+00, 0.5098326619997980e-22, 0.1416795225784663e-10, 0.1216165317638265e-10, 0.7009142227469247e+00, 0.1183291357831518e-29, 0.4806926318247711e-11, 0.3824647564366386e-11, 0.5502651835254770e+00, 0.1368072596837427e-11, 0.1169503544861386e-10, 0.4068037876491279e-10, 0.4423863025187732e+00, 0.2477754901417239e-20, 0.1737857614720001e-07, 0.5295826057530262e-07, 0.3697086796938907e+00, 0.5143373102040997e-26, 0.1312431380925897e-05, 0.1614374370413396e-05, 0.1446051340026323e+00, 0.9928287808749566e-25, 0.3777515963415321e-05, 0.2773141909621761e-05, 0.1110032523123295e+00, 0.0000000000000000e+00, 0.2532463507333992e-05, 0.2934028940292093e-05, 0.7513603923341917e-01, 0.5394210206791908e-19, 0.6032617175984252e-07, 0.2575740214720034e-06, 0.6860214953971246e-01, 0.7542232825258426e-19, 0.3784397549471832e-09, 0.1131370986389306e-09, 0.2434478048112329e-01, 0.1013559914197709e-18, 0.1483665133446019e-05, 0.1887408451311279e-05, 0.6989818532012803e-03, 0.9466330862652142e-28, 0.2539111990097616e-03, 0.9240506410060376e-03, 0.7811996215926567e-04, 0.3786532345060857e-28, 0.1283811140869274e-03, 0.1684766835109566e-03}, + i0Out: 1, + n0Out: 4, + ppOut: 0, + dminOut: 2.2279522444840209e-024, + desigOut: -2.6401698146816920e-017, + sigmaOut: 2.0015106747336948, + qmaxOut: 1.6602870234255678, + nFailOut: 5, + iterOut: 104, + nDivOut: 1405, + ttypeOut: -4, + dmin1Out: 8.9268528749476481e-002, + dmin2Out: 8.9268528749476481e-002, + dnOut: 2.2279522444840209e-024, + dn1Out: 0.32005379925180094, + dn2Out: 8.9268528749476481e-002, + gOut: 0.49975000000000003, + tauOut: 1.7877912088395813e-015, + }, + { + i0: 1, + n0: 4, + z: []float64{0.6481999396374125e+00, 0.6481999396374107e+00, 0.4011480356653261e-18, 0.5524513774965514e-19, 0.8926852874947827e-01, 0.8926852875783797e-01, 0.8361491594360282e-11, 0.2997839428604580e-10, 0.3200537992817811e+00, 0.3200537992518009e+00, 0.4733821287189426e-18, 0.2644269217171004e-32, 0.1787791211067534e-14, 0.2227952244484021e-23, 0.4011480356653261e-18, 0.5524513774965514e-19, 0.1702794694134603e+01, 0.2545269924803487e-17, 0.5179704803914515e-14, 0.1264160184270297e-13, 0.1077066053646038e+01, 0.7156177943897596e-01, 0.2292199980814605e-03, 0.1899018778701386e-03, 0.1060140274732043e+01, 0.8660746506696473e-01, 0.1357005210961402e-39, 0.1331360138522907e-23, 0.9894235909971354e+00, 0.8804208964992894e-17, 0.8660746506696473e-01, 0.8679736700028205e-01, 0.9539342071687115e+00, 0.2761013168273541e-29, 0.1040577915698430e-22, 0.8107757314001177e-24, 0.8046649468928653e+00, 0.5098326619997980e-22, 0.1416795225784663e-10, 0.1216165317638265e-10, 0.7009142227469247e+00, 0.1183291357831518e-29, 0.4806926318247711e-11, 0.3824647564366386e-11, 0.5502651835254770e+00, 0.1368072596837427e-11, 0.1169503544861386e-10, 0.4068037876491279e-10, 0.4423863025187732e+00, 0.2477754901417239e-20, 0.1737857614720001e-07, 0.5295826057530262e-07, 0.3697086796938907e+00, 0.5143373102040997e-26, 0.1312431380925897e-05, 0.1614374370413396e-05, 0.1446051340026323e+00, 0.9928287808749566e-25, 0.3777515963415321e-05, 0.2773141909621761e-05, 0.1110032523123295e+00, 0.0000000000000000e+00, 0.2532463507333992e-05, 0.2934028940292093e-05, 0.7513603923341917e-01, 0.5394210206791908e-19, 0.6032617175984252e-07, 0.2575740214720034e-06, 0.6860214953971246e-01, 0.7542232825258426e-19, 0.3784397549471832e-09, 0.1131370986389306e-09, 0.2434478048112329e-01, 0.1013559914197709e-18, 0.1483665133446019e-05, 0.1887408451311279e-05, 0.6989818532012803e-03, 0.9466330862652142e-28, 0.2539111990097616e-03, 0.9240506410060376e-03, 0.7811996215926567e-04, 0.3786532345060857e-28, 0.1283811140869274e-03, 0.1684766835109566e-03}, + pp: 1, + dmin: 2.2279522444840209e-024, + desig: -2.6401698146816920e-017, + qmax: 1.6602870234255678, + ttype: -4, + dmin1: 8.9268528749476481e-002, + dmin2: 8.9268528749476481e-002, + dn: 2.2279522444840209e-024, + dn1: 0.32005379925180094, + dn2: 8.9268528749476481e-002, + g: 0.49975000000000003, + tau: 1.7877912088395813e-015, + nFail: 5, + iter: 104, + sigma: 2.0015106747336948, + nDiv: 1405, + zOut: []float64{0.6258828074500417e+00, 0.6481999396374107e+00, 0.7879513719234823e-20, 0.5524513774965514e-19, 0.6695139660044724e-01, 0.8926852875783797e-01, 0.1433084218388560e-09, 0.2997839428604580e-10, 0.2977366669211234e+00, 0.3200537992518009e+00, 0.8926852875783797e-01, 0.2644269217171004e-32, 0.2001510674733695e+01, 0.2227952244484021e-23, 0.4011480356653261e-18, 0.5524513774965514e-19, 0.1702794694134603e+01, 0.2545269924803487e-17, 0.5179704803914515e-14, 0.1264160184270297e-13, 0.1077066053646038e+01, 0.7156177943897596e-01, 0.2292199980814605e-03, 0.1899018778701386e-03, 0.1060140274732043e+01, 0.8660746506696473e-01, 0.1357005210961402e-39, 0.1331360138522907e-23, 0.9894235909971354e+00, 0.8804208964992894e-17, 0.8660746506696473e-01, 0.8679736700028205e-01, 0.9539342071687115e+00, 0.2761013168273541e-29, 0.1040577915698430e-22, 0.8107757314001177e-24, 0.8046649468928653e+00, 0.5098326619997980e-22, 0.1416795225784663e-10, 0.1216165317638265e-10, 0.7009142227469247e+00, 0.1183291357831518e-29, 0.4806926318247711e-11, 0.3824647564366386e-11, 0.5502651835254770e+00, 0.1368072596837427e-11, 0.1169503544861386e-10, 0.4068037876491279e-10, 0.4423863025187732e+00, 0.2477754901417239e-20, 0.1737857614720001e-07, 0.5295826057530262e-07, 0.3697086796938907e+00, 0.5143373102040997e-26, 0.1312431380925897e-05, 0.1614374370413396e-05, 0.1446051340026323e+00, 0.9928287808749566e-25, 0.3777515963415321e-05, 0.2773141909621761e-05, 0.1110032523123295e+00, 0.0000000000000000e+00, 0.2532463507333992e-05, 0.2934028940292093e-05, 0.7513603923341917e-01, 0.5394210206791908e-19, 0.6032617175984252e-07, 0.2575740214720034e-06, 0.6860214953971246e-01, 0.7542232825258426e-19, 0.3784397549471832e-09, 0.1131370986389306e-09, 0.2434478048112329e-01, 0.1013559914197709e-18, 0.1483665133446019e-05, 0.1887408451311279e-05, 0.6989818532012803e-03, 0.9466330862652142e-28, 0.2539111990097616e-03, 0.9240506410060376e-03, 0.7811996215926567e-04, 0.3786532345060857e-28, 0.1283811140869274e-03, 0.1684766835109566e-03}, + i0Out: 1, + n0Out: 3, + ppOut: 1, + dminOut: 6.6951396570468849e-002, + desigOut: -1.0061396160665481e-016, + sigmaOut: 2.0238278069210640, + qmaxOut: 1.6602870234255678, + nFailOut: 5, + iterOut: 105, + nDivOut: 1409, + ttypeOut: -9, + dmin1Out: 6.6951396570468849e-002, + dmin2Out: 0.62588280745004166, + dnOut: 0.29773666692112338, + dn1Out: 6.6951396570468849e-002, + dn2Out: 0.62588280745004166, + gOut: 0.49975000000000003, + tauOut: 2.2317132187369120e-002, + }, + { + i0: 1, + n0: 3, + z: []float64{0.6258828074500417e+00, 0.6481999396374107e+00, 0.7879513719234823e-20, 0.5524513774965514e-19, 0.6695139660044724e-01, 0.8926852875783797e-01, 0.1433084218388560e-09, 0.2997839428604580e-10, 0.2977366669211234e+00, 0.3200537992518009e+00, 0.8926852875783797e-01, 0.2644269217171004e-32, 0.2001510674733695e+01, 0.2227952244484021e-23, 0.4011480356653261e-18, 0.5524513774965514e-19, 0.1702794694134603e+01, 0.2545269924803487e-17, 0.5179704803914515e-14, 0.1264160184270297e-13, 0.1077066053646038e+01, 0.7156177943897596e-01, 0.2292199980814605e-03, 0.1899018778701386e-03, 0.1060140274732043e+01, 0.8660746506696473e-01, 0.1357005210961402e-39, 0.1331360138522907e-23, 0.9894235909971354e+00, 0.8804208964992894e-17, 0.8660746506696473e-01, 0.8679736700028205e-01, 0.9539342071687115e+00, 0.2761013168273541e-29, 0.1040577915698430e-22, 0.8107757314001177e-24, 0.8046649468928653e+00, 0.5098326619997980e-22, 0.1416795225784663e-10, 0.1216165317638265e-10, 0.7009142227469247e+00, 0.1183291357831518e-29, 0.4806926318247711e-11, 0.3824647564366386e-11, 0.5502651835254770e+00, 0.1368072596837427e-11, 0.1169503544861386e-10, 0.4068037876491279e-10, 0.4423863025187732e+00, 0.2477754901417239e-20, 0.1737857614720001e-07, 0.5295826057530262e-07, 0.3697086796938907e+00, 0.5143373102040997e-26, 0.1312431380925897e-05, 0.1614374370413396e-05, 0.1446051340026323e+00, 0.9928287808749566e-25, 0.3777515963415321e-05, 0.2773141909621761e-05, 0.1110032523123295e+00, 0.0000000000000000e+00, 0.2532463507333992e-05, 0.2934028940292093e-05, 0.7513603923341917e-01, 0.5394210206791908e-19, 0.6032617175984252e-07, 0.2575740214720034e-06, 0.6860214953971246e-01, 0.7542232825258426e-19, 0.3784397549471832e-09, 0.1131370986389306e-09, 0.2434478048112329e-01, 0.1013559914197709e-18, 0.1483665133446019e-05, 0.1887408451311279e-05, 0.6989818532012803e-03, 0.9466330862652142e-28, 0.2539111990097616e-03, 0.9240506410060376e-03, 0.7811996215926567e-04, 0.3786532345060857e-28, 0.1283811140869274e-03, 0.1684766835109566e-03}, + pp: 0, + dmin: 6.6951396570468849e-002, + desig: -1.0061396160665481e-016, + qmax: 1.6602870234255678, + ttype: -9, + dmin1: 6.6951396570468849e-002, + dmin2: 0.62588280745004166, + dn: 0.29773666692112338, + dn1: 6.6951396570468849e-002, + dn2: 0.62588280745004166, + g: 0.49975000000000003, + tau: 2.2317132187369120e-002, + nFail: 5, + iter: 105, + sigma: 2.0238278069210640, + nDiv: 1409, + zOut: []float64{0.6258828074500417e+00, 0.5589320748538995e+00, 0.7879513719234823e-20, 0.9438435755776795e-21, 0.6695139660044724e-01, 0.6641476135588615e-06, 0.1433084218388560e-09, 0.6424501268835132e-04, 0.2977366669211234e+00, 0.2307216893122929e+00, 0.8926852875783797e-01, 0.6695139660044724e-01, 0.2001510674733695e+01, 0.2227952244484021e-23, 0.4011480356653261e-18, 0.5524513774965514e-19, 0.1702794694134603e+01, 0.2545269924803487e-17, 0.5179704803914515e-14, 0.1264160184270297e-13, 0.1077066053646038e+01, 0.7156177943897596e-01, 0.2292199980814605e-03, 0.1899018778701386e-03, 0.1060140274732043e+01, 0.8660746506696473e-01, 0.1357005210961402e-39, 0.1331360138522907e-23, 0.9894235909971354e+00, 0.8804208964992894e-17, 0.8660746506696473e-01, 0.8679736700028205e-01, 0.9539342071687115e+00, 0.2761013168273541e-29, 0.1040577915698430e-22, 0.8107757314001177e-24, 0.8046649468928653e+00, 0.5098326619997980e-22, 0.1416795225784663e-10, 0.1216165317638265e-10, 0.7009142227469247e+00, 0.1183291357831518e-29, 0.4806926318247711e-11, 0.3824647564366386e-11, 0.5502651835254770e+00, 0.1368072596837427e-11, 0.1169503544861386e-10, 0.4068037876491279e-10, 0.4423863025187732e+00, 0.2477754901417239e-20, 0.1737857614720001e-07, 0.5295826057530262e-07, 0.3697086796938907e+00, 0.5143373102040997e-26, 0.1312431380925897e-05, 0.1614374370413396e-05, 0.1446051340026323e+00, 0.9928287808749566e-25, 0.3777515963415321e-05, 0.2773141909621761e-05, 0.1110032523123295e+00, 0.0000000000000000e+00, 0.2532463507333992e-05, 0.2934028940292093e-05, 0.7513603923341917e-01, 0.5394210206791908e-19, 0.6032617175984252e-07, 0.2575740214720034e-06, 0.6860214953971246e-01, 0.7542232825258426e-19, 0.3784397549471832e-09, 0.1131370986389306e-09, 0.2434478048112329e-01, 0.1013559914197709e-18, 0.1483665133446019e-05, 0.1887408451311279e-05, 0.6989818532012803e-03, 0.9466330862652142e-28, 0.2539111990097616e-03, 0.9240506410060376e-03, 0.7811996215926567e-04, 0.3786532345060857e-28, 0.1283811140869274e-03, 0.1684766835109566e-03}, + i0Out: 1, + n0Out: 3, + ppOut: 0, + dminOut: 6.6400430513702258e-007, + desigOut: 1.9428902930940239e-016, + sigmaOut: 2.0907785395172058, + qmaxOut: 1.6602870234255678, + nFailOut: 5, + iterOut: 106, + nDivOut: 1413, + ttypeOut: -4, + dmin1Out: 6.6400430513702258e-007, + dmin2Out: 0.55893207485389951, + dnOut: 0.23072168931229292, + dn1Out: 6.6400430513702258e-007, + dn2Out: 0.55893207485389951, + gOut: 0.49975000000000003, + tauOut: 6.6950732596142107e-002, + }, + { + i0: 1, + n0: 3, + z: []float64{0.6258828074500417e+00, 0.5589320748538995e+00, 0.7879513719234823e-20, 0.9438435755776795e-21, 0.6695139660044724e-01, 0.6641476135588615e-06, 0.1433084218388560e-09, 0.6424501268835132e-04, 0.2977366669211234e+00, 0.2307216893122929e+00, 0.8926852875783797e-01, 0.6695139660044724e-01, 0.2001510674733695e+01, 0.2227952244484021e-23, 0.4011480356653261e-18, 0.5524513774965514e-19, 0.1702794694134603e+01, 0.2545269924803487e-17, 0.5179704803914515e-14, 0.1264160184270297e-13, 0.1077066053646038e+01, 0.7156177943897596e-01, 0.2292199980814605e-03, 0.1899018778701386e-03, 0.1060140274732043e+01, 0.8660746506696473e-01, 0.1357005210961402e-39, 0.1331360138522907e-23, 0.9894235909971354e+00, 0.8804208964992894e-17, 0.8660746506696473e-01, 0.8679736700028205e-01, 0.9539342071687115e+00, 0.2761013168273541e-29, 0.1040577915698430e-22, 0.8107757314001177e-24, 0.8046649468928653e+00, 0.5098326619997980e-22, 0.1416795225784663e-10, 0.1216165317638265e-10, 0.7009142227469247e+00, 0.1183291357831518e-29, 0.4806926318247711e-11, 0.3824647564366386e-11, 0.5502651835254770e+00, 0.1368072596837427e-11, 0.1169503544861386e-10, 0.4068037876491279e-10, 0.4423863025187732e+00, 0.2477754901417239e-20, 0.1737857614720001e-07, 0.5295826057530262e-07, 0.3697086796938907e+00, 0.5143373102040997e-26, 0.1312431380925897e-05, 0.1614374370413396e-05, 0.1446051340026323e+00, 0.9928287808749566e-25, 0.3777515963415321e-05, 0.2773141909621761e-05, 0.1110032523123295e+00, 0.0000000000000000e+00, 0.2532463507333992e-05, 0.2934028940292093e-05, 0.7513603923341917e-01, 0.5394210206791908e-19, 0.6032617175984252e-07, 0.2575740214720034e-06, 0.6860214953971246e-01, 0.7542232825258426e-19, 0.3784397549471832e-09, 0.1131370986389306e-09, 0.2434478048112329e-01, 0.1013559914197709e-18, 0.1483665133446019e-05, 0.1887408451311279e-05, 0.6989818532012803e-03, 0.9466330862652142e-28, 0.2539111990097616e-03, 0.9240506410060376e-03, 0.7811996215926567e-04, 0.3786532345060857e-28, 0.1283811140869274e-03, 0.1684766835109566e-03}, + pp: 1, + dmin: 6.6400430513702258e-007, + desig: 1.9428902930940239e-016, + qmax: 1.6602870234255678, + ttype: -4, + dmin1: 6.6400430513702258e-007, + dmin2: 0.55893207485389951, + dn: 0.23072168931229292, + dn1: 6.6400430513702258e-007, + dn2: 0.55893207485389951, + g: 0.49975000000000003, + tau: 6.6950732596142107e-002, + nFail: 5, + iter: 106, + sigma: 2.0907785395172058, + nDiv: 1413, + zOut: []float64{0.5589315065851642e+00, 0.5589320748538995e+00, 0.1121517486324177e-26, 0.9438435755776795e-21, 0.6434089156657428e-04, 0.6641476135588615e-06, 0.2303778747300831e+00, 0.6424501268835132e-04, 0.3432463134744483e-03, 0.2307216893122929e+00, 0.6641476135588615e-06, 0.6695139660044724e-01, 0.2001510674733695e+01, 0.2227952244484021e-23, 0.4011480356653261e-18, 0.5524513774965514e-19, 0.1702794694134603e+01, 0.2545269924803487e-17, 0.5179704803914515e-14, 0.1264160184270297e-13, 0.1077066053646038e+01, 0.7156177943897596e-01, 0.2292199980814605e-03, 0.1899018778701386e-03, 0.1060140274732043e+01, 0.8660746506696473e-01, 0.1357005210961402e-39, 0.1331360138522907e-23, 0.9894235909971354e+00, 0.8804208964992894e-17, 0.8660746506696473e-01, 0.8679736700028205e-01, 0.9539342071687115e+00, 0.2761013168273541e-29, 0.1040577915698430e-22, 0.8107757314001177e-24, 0.8046649468928653e+00, 0.5098326619997980e-22, 0.1416795225784663e-10, 0.1216165317638265e-10, 0.7009142227469247e+00, 0.1183291357831518e-29, 0.4806926318247711e-11, 0.3824647564366386e-11, 0.5502651835254770e+00, 0.1368072596837427e-11, 0.1169503544861386e-10, 0.4068037876491279e-10, 0.4423863025187732e+00, 0.2477754901417239e-20, 0.1737857614720001e-07, 0.5295826057530262e-07, 0.3697086796938907e+00, 0.5143373102040997e-26, 0.1312431380925897e-05, 0.1614374370413396e-05, 0.1446051340026323e+00, 0.9928287808749566e-25, 0.3777515963415321e-05, 0.2773141909621761e-05, 0.1110032523123295e+00, 0.0000000000000000e+00, 0.2532463507333992e-05, 0.2934028940292093e-05, 0.7513603923341917e-01, 0.5394210206791908e-19, 0.6032617175984252e-07, 0.2575740214720034e-06, 0.6860214953971246e-01, 0.7542232825258426e-19, 0.3784397549471832e-09, 0.1131370986389306e-09, 0.2434478048112329e-01, 0.1013559914197709e-18, 0.1483665133446019e-05, 0.1887408451311279e-05, 0.6989818532012803e-03, 0.9466330862652142e-28, 0.2539111990097616e-03, 0.9240506410060376e-03, 0.7811996215926567e-04, 0.3786532345060857e-28, 0.1283811140869274e-03, 0.1684766835109566e-03}, + i0Out: 1, + n0Out: 3, + ppOut: 1, + dminOut: 9.5878878222950548e-008, + desigOut: -9.3233975295464906e-017, + sigmaOut: 2.0907791077859414, + qmaxOut: 1.6602870234255678, + nFailOut: 6, + iterOut: 108, + nDivOut: 1421, + ttypeOut: -15, + dmin1Out: 9.5878878222950548e-008, + dmin2Out: 0.55893150658516422, + dnOut: 3.4324631347444829e-004, + dn1Out: 9.5878878222950548e-008, + dn2Out: 0.55893150658516422, + gOut: 0.49975000000000003, + tauOut: 5.6826873533591094e-007, + }, + { + i0: 1, + n0: 3, + z: []float64{0.5589315065851642e+00, 0.5589320748538995e+00, 0.1121517486324177e-26, 0.9438435755776795e-21, 0.6434089156657428e-04, 0.6641476135588615e-06, 0.2303778747300831e+00, 0.6424501268835132e-04, 0.3432463134744483e-03, 0.2307216893122929e+00, 0.6641476135588615e-06, 0.6695139660044724e-01, 0.2001510674733695e+01, 0.2227952244484021e-23, 0.4011480356653261e-18, 0.5524513774965514e-19, 0.1702794694134603e+01, 0.2545269924803487e-17, 0.5179704803914515e-14, 0.1264160184270297e-13, 0.1077066053646038e+01, 0.7156177943897596e-01, 0.2292199980814605e-03, 0.1899018778701386e-03, 0.1060140274732043e+01, 0.8660746506696473e-01, 0.1357005210961402e-39, 0.1331360138522907e-23, 0.9894235909971354e+00, 0.8804208964992894e-17, 0.8660746506696473e-01, 0.8679736700028205e-01, 0.9539342071687115e+00, 0.2761013168273541e-29, 0.1040577915698430e-22, 0.8107757314001177e-24, 0.8046649468928653e+00, 0.5098326619997980e-22, 0.1416795225784663e-10, 0.1216165317638265e-10, 0.7009142227469247e+00, 0.1183291357831518e-29, 0.4806926318247711e-11, 0.3824647564366386e-11, 0.5502651835254770e+00, 0.1368072596837427e-11, 0.1169503544861386e-10, 0.4068037876491279e-10, 0.4423863025187732e+00, 0.2477754901417239e-20, 0.1737857614720001e-07, 0.5295826057530262e-07, 0.3697086796938907e+00, 0.5143373102040997e-26, 0.1312431380925897e-05, 0.1614374370413396e-05, 0.1446051340026323e+00, 0.9928287808749566e-25, 0.3777515963415321e-05, 0.2773141909621761e-05, 0.1110032523123295e+00, 0.0000000000000000e+00, 0.2532463507333992e-05, 0.2934028940292093e-05, 0.7513603923341917e-01, 0.5394210206791908e-19, 0.6032617175984252e-07, 0.2575740214720034e-06, 0.6860214953971246e-01, 0.7542232825258426e-19, 0.3784397549471832e-09, 0.1131370986389306e-09, 0.2434478048112329e-01, 0.1013559914197709e-18, 0.1483665133446019e-05, 0.1887408451311279e-05, 0.6989818532012803e-03, 0.9466330862652142e-28, 0.2539111990097616e-03, 0.9240506410060376e-03, 0.7811996215926567e-04, 0.3786532345060857e-28, 0.1283811140869274e-03, 0.1684766835109566e-03}, + pp: 0, + dmin: 9.5878878222950548e-008, + desig: -9.3233975295464906e-017, + qmax: 1.6602870234255678, + ttype: -15, + dmin1: 9.5878878222950548e-008, + dmin2: 0.55893150658516422, + dn: 3.4324631347444829e-004, + dn1: 9.5878878222950548e-008, + dn2: 0.55893150658516422, + g: 0.49975000000000003, + tau: 5.6826873533591094e-007, + nFail: 6, + iter: 108, + sigma: 2.0907791077859414, + nDiv: 1421, + zOut: []float64{0.5589315065851642e+00, 0.5589314123732620e+00, 0.1121517486324177e-26, 0.1291024862446124e-30, 0.6434089156657428e-04, 0.2304421214097475e+00, 0.2303778747300831e+00, 0.3431506172718059e-03, 0.3432463134744483e-03, 0.1484300435548105e-08, 0.6641476135588615e-06, 0.6434089156657428e-04, 0.2001510674733695e+01, 0.2227952244484021e-23, 0.4011480356653261e-18, 0.5524513774965514e-19, 0.1702794694134603e+01, 0.2545269924803487e-17, 0.5179704803914515e-14, 0.1264160184270297e-13, 0.1077066053646038e+01, 0.7156177943897596e-01, 0.2292199980814605e-03, 0.1899018778701386e-03, 0.1060140274732043e+01, 0.8660746506696473e-01, 0.1357005210961402e-39, 0.1331360138522907e-23, 0.9894235909971354e+00, 0.8804208964992894e-17, 0.8660746506696473e-01, 0.8679736700028205e-01, 0.9539342071687115e+00, 0.2761013168273541e-29, 0.1040577915698430e-22, 0.8107757314001177e-24, 0.8046649468928653e+00, 0.5098326619997980e-22, 0.1416795225784663e-10, 0.1216165317638265e-10, 0.7009142227469247e+00, 0.1183291357831518e-29, 0.4806926318247711e-11, 0.3824647564366386e-11, 0.5502651835254770e+00, 0.1368072596837427e-11, 0.1169503544861386e-10, 0.4068037876491279e-10, 0.4423863025187732e+00, 0.2477754901417239e-20, 0.1737857614720001e-07, 0.5295826057530262e-07, 0.3697086796938907e+00, 0.5143373102040997e-26, 0.1312431380925897e-05, 0.1614374370413396e-05, 0.1446051340026323e+00, 0.9928287808749566e-25, 0.3777515963415321e-05, 0.2773141909621761e-05, 0.1110032523123295e+00, 0.0000000000000000e+00, 0.2532463507333992e-05, 0.2934028940292093e-05, 0.7513603923341917e-01, 0.5394210206791908e-19, 0.6032617175984252e-07, 0.2575740214720034e-06, 0.6860214953971246e-01, 0.7542232825258426e-19, 0.3784397549471832e-09, 0.1131370986389306e-09, 0.2434478048112329e-01, 0.1013559914197709e-18, 0.1483665133446019e-05, 0.1887408451311279e-05, 0.6989818532012803e-03, 0.9466330862652142e-28, 0.2539111990097616e-03, 0.9240506410060376e-03, 0.7811996215926567e-04, 0.3786532345060857e-28, 0.1283811140869274e-03, 0.1684766835109566e-03}, + i0Out: 1, + n0Out: 3, + ppOut: 0, + dminOut: 1.4843004355481051e-009, + desigOut: -1.6593817482031092e-016, + sigmaOut: 2.0907792019978437, + qmaxOut: 1.6602870234255678, + nFailOut: 6, + iterOut: 109, + nDivOut: 1425, + ttypeOut: -4, + dmin1Out: 6.4246679664367451e-005, + dmin2Out: 0.55893141237326205, + dnOut: 1.4843004355481051e-009, + dn1Out: 6.4246679664367451e-005, + dn2Out: 0.55893141237326205, + gOut: 0.49975000000000003, + tauOut: 9.4211902206835373e-008, + }, + { + i0: 1, + n0: 3, + z: []float64{0.5589315065851642e+00, 0.5589314123732620e+00, 0.1121517486324177e-26, 0.1291024862446124e-30, 0.6434089156657428e-04, 0.2304421214097475e+00, 0.2303778747300831e+00, 0.3431506172718059e-03, 0.3432463134744483e-03, 0.1484300435548105e-08, 0.6641476135588615e-06, 0.6434089156657428e-04, 0.2001510674733695e+01, 0.2227952244484021e-23, 0.4011480356653261e-18, 0.5524513774965514e-19, 0.1702794694134603e+01, 0.2545269924803487e-17, 0.5179704803914515e-14, 0.1264160184270297e-13, 0.1077066053646038e+01, 0.7156177943897596e-01, 0.2292199980814605e-03, 0.1899018778701386e-03, 0.1060140274732043e+01, 0.8660746506696473e-01, 0.1357005210961402e-39, 0.1331360138522907e-23, 0.9894235909971354e+00, 0.8804208964992894e-17, 0.8660746506696473e-01, 0.8679736700028205e-01, 0.9539342071687115e+00, 0.2761013168273541e-29, 0.1040577915698430e-22, 0.8107757314001177e-24, 0.8046649468928653e+00, 0.5098326619997980e-22, 0.1416795225784663e-10, 0.1216165317638265e-10, 0.7009142227469247e+00, 0.1183291357831518e-29, 0.4806926318247711e-11, 0.3824647564366386e-11, 0.5502651835254770e+00, 0.1368072596837427e-11, 0.1169503544861386e-10, 0.4068037876491279e-10, 0.4423863025187732e+00, 0.2477754901417239e-20, 0.1737857614720001e-07, 0.5295826057530262e-07, 0.3697086796938907e+00, 0.5143373102040997e-26, 0.1312431380925897e-05, 0.1614374370413396e-05, 0.1446051340026323e+00, 0.9928287808749566e-25, 0.3777515963415321e-05, 0.2773141909621761e-05, 0.1110032523123295e+00, 0.0000000000000000e+00, 0.2532463507333992e-05, 0.2934028940292093e-05, 0.7513603923341917e-01, 0.5394210206791908e-19, 0.6032617175984252e-07, 0.2575740214720034e-06, 0.6860214953971246e-01, 0.7542232825258426e-19, 0.3784397549471832e-09, 0.1131370986389306e-09, 0.2434478048112329e-01, 0.1013559914197709e-18, 0.1483665133446019e-05, 0.1887408451311279e-05, 0.6989818532012803e-03, 0.9466330862652142e-28, 0.2539111990097616e-03, 0.9240506410060376e-03, 0.7811996215926567e-04, 0.3786532345060857e-28, 0.1283811140869274e-03, 0.1684766835109566e-03}, + pp: 1, + dmin: 1.4843004355481051e-009, + desig: -1.6593817482031092e-016, + qmax: 1.6602870234255678, + ttype: -4, + dmin1: 6.4246679664367451e-005, + dmin2: 0.55893141237326205, + dn: 1.4843004355481051e-009, + dn1: 6.4246679664367451e-005, + dn2: 0.55893141237326205, + g: 0.49975000000000003, + tau: 9.4211902206835373e-008, + nFail: 6, + iter: 109, + sigma: 2.0907792019978437, + nDiv: 1425, + zOut: []float64{0.2649710614371106e+01, 0.5589314123732620e+00, 0.1121517486324177e-26, 0.1291024862446124e-30, 0.2321564474027070e+01, 0.2307852720292263e+00, 0.2303778747300831e+00, 0.3431506172718059e-03, 0.2090779203479937e+01, 0.1482093454966231e-08, 0.6641476135588615e-06, 0.6434089156657428e-04, 0.2001510674733695e+01, 0.2227952244484021e-23, 0.4011480356653261e-18, 0.5524513774965514e-19, 0.1702794694134603e+01, 0.2545269924803487e-17, 0.5179704803914515e-14, 0.1264160184270297e-13, 0.1077066053646038e+01, 0.7156177943897596e-01, 0.2292199980814605e-03, 0.1899018778701386e-03, 0.1060140274732043e+01, 0.8660746506696473e-01, 0.1357005210961402e-39, 0.1331360138522907e-23, 0.9894235909971354e+00, 0.8804208964992894e-17, 0.8660746506696473e-01, 0.8679736700028205e-01, 0.9539342071687115e+00, 0.2761013168273541e-29, 0.1040577915698430e-22, 0.8107757314001177e-24, 0.8046649468928653e+00, 0.5098326619997980e-22, 0.1416795225784663e-10, 0.1216165317638265e-10, 0.7009142227469247e+00, 0.1183291357831518e-29, 0.4806926318247711e-11, 0.3824647564366386e-11, 0.5502651835254770e+00, 0.1368072596837427e-11, 0.1169503544861386e-10, 0.4068037876491279e-10, 0.4423863025187732e+00, 0.2477754901417239e-20, 0.1737857614720001e-07, 0.5295826057530262e-07, 0.3697086796938907e+00, 0.5143373102040997e-26, 0.1312431380925897e-05, 0.1614374370413396e-05, 0.1446051340026323e+00, 0.9928287808749566e-25, 0.3777515963415321e-05, 0.2773141909621761e-05, 0.1110032523123295e+00, 0.0000000000000000e+00, 0.2532463507333992e-05, 0.2934028940292093e-05, 0.7513603923341917e-01, 0.5394210206791908e-19, 0.6032617175984252e-07, 0.2575740214720034e-06, 0.6860214953971246e-01, 0.7542232825258426e-19, 0.3784397549471832e-09, 0.1131370986389306e-09, 0.2434478048112329e-01, 0.1013559914197709e-18, 0.1483665133446019e-05, 0.1887408451311279e-05, 0.6989818532012803e-03, 0.9466330862652142e-28, 0.2539111990097616e-03, 0.9240506410060376e-03, 0.7811996215926567e-04, 0.3786532345060857e-28, 0.1283811140869274e-03, 0.1684766835109566e-03}, + i0Out: 1, + n0Out: 0, + ppOut: 1, + dminOut: 1.4843004355481051e-009, + desigOut: -1.6593817482031092e-016, + sigmaOut: 2.0907792019978437, + qmaxOut: 1.6602870234255678, + nFailOut: 6, + iterOut: 109, + nDivOut: 1425, + ttypeOut: -4, + dmin1Out: 6.4246679664367451e-005, + dmin2Out: 0.55893141237326205, + dnOut: 1.4843004355481051e-009, + dn1Out: 6.4246679664367451e-005, + dn2Out: 0.55893141237326205, + gOut: 0.49975000000000003, + tauOut: 9.4211902206835373e-008, + }, + } { + z := make([]float64, len(test.z)) + copy(z, test.z) + i0 := test.i0 - 1 // zero index + n0 := test.n0 - 1 // zero index + + i0Out, n0Out, ppOut, dminOut, sigmaOut, desigOut, qmaxOut, nFailOut, iterOut, nDivOut, ttypeOut, dmin1Out, dmin2Out, dnOut, dn1Out, dn2Out, gOut, tauOut := + impl.Dlasq3(i0, n0, z, test.pp, test.dmin, test.sigma, test.desig, test.qmax, test.nFail, test.iter, test.nDiv, test.ttype, test.dmin1, test.dmin2, test.dn, test.dn1, test.dn2, test.g, test.tau) + + if !floats.EqualApprox(z, test.zOut, dTol) { + t.Error("Z mismatch") + } + if i0Out != test.i0Out-1 { + t.Errorf("i0 mismatch. Want %v, got %v", test.n0Out, n0Out) + } + if n0Out != test.n0Out-1 { + t.Errorf("n0 mismatch. Want %v, got %v", test.n0Out, n0Out) + } + if ppOut != test.ppOut { + t.Errorf("pp mismatch. Want %v, got %v", test.ppOut, ppOut) + } + if !floats.EqualWithinAbsOrRel(dminOut, test.dminOut, dTol, dTol) { + t.Errorf("dmin mismatch. Want %v, got %v", test.dminOut, dminOut) + } + if !floats.EqualWithinAbsOrRel(desigOut, test.desigOut, dTol, dTol) { + t.Errorf("desig mismatch. Want %v, got %v", test.desigOut, desigOut) + } + if !floats.EqualWithinAbsOrRel(sigmaOut, test.sigmaOut, dTol, dTol) { + t.Errorf("sigma mismatch. Want %v, got %v", test.sigmaOut, sigmaOut) + } + if !floats.EqualWithinAbsOrRel(qmaxOut, test.qmaxOut, dTol, dTol) { + t.Errorf("qmax mismatch. Want %v, got %v", test.qmaxOut, qmaxOut) + } + if nFailOut != test.nFailOut { + t.Errorf("nFail mismatch. Want %v, got %v", test.nFailOut, nFailOut) + } + if iterOut != test.iterOut { + t.Errorf("iter mismatch. Want %v, got %v", test.iterOut, iterOut) + } + if nDivOut != test.nDivOut { + t.Errorf("nFail mismatch. Want %v, got %v", test.nDivOut, nDivOut) + } + if ttypeOut != test.ttypeOut { + t.Errorf("ttype mismatch. Want %v, got %v", test.ttypeOut, ttypeOut) + } + if !floats.EqualWithinAbsOrRel(dmin1Out, test.dmin1Out, dTol, dTol) { + t.Errorf("dmin1 mismatch. Want %v, got %v", test.dmin1Out, dmin1Out) + } + if !floats.EqualWithinAbsOrRel(dmin2Out, test.dmin2Out, dTol, dTol) { + t.Errorf("dmin2 mismatch. Want %v, got %v", test.dmin2Out, dmin2Out) + } + if !floats.EqualWithinAbsOrRel(dnOut, test.dnOut, dTol, dTol) { + t.Errorf("dn mismatch. Want %v, got %v", test.dnOut, dnOut) + } + if !floats.EqualWithinAbsOrRel(dn1Out, test.dn1Out, dTol, dTol) { + t.Errorf("dn1 mismatch. Want %v, got %v", test.dn1Out, dn1Out) + } + if !floats.EqualWithinAbsOrRel(dn2Out, test.dn2Out, dTol, dTol) { + t.Errorf("dn2 mismatch. Want %v, got %v", test.dn2Out, dn2Out) + } + if !floats.EqualWithinAbsOrRel(gOut, test.gOut, dTol, dTol) { + t.Errorf("g mismatch. Want %v, got %v", test.gOut, gOut) + } + if !floats.EqualWithinAbsOrRel(tauOut, test.tauOut, dTol, dTol) { + t.Errorf("tau mismatch. Want %v, got %v", test.tauOut, tauOut) + } + } +} diff --git a/vendor/gonum.org/v1/gonum/lapack/testlapack/dlasq4.go b/vendor/gonum.org/v1/gonum/lapack/testlapack/dlasq4.go new file mode 100644 index 0000000..fb3aedb --- /dev/null +++ b/vendor/gonum.org/v1/gonum/lapack/testlapack/dlasq4.go @@ -0,0 +1,3101 @@ +// Copyright ©2015 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package testlapack + +import ( + "testing" + + "gonum.org/v1/gonum/floats" +) + +type dlasq4teststruct struct { + z []float64 + i0, n0, pp, n0in int + dmin, dmin1, dmin2, dn, dn1, dn2, tau float64 + ttype int + g float64 + + zOut []float64 + tauOut float64 + ttypeOut int + gOut float64 +} + +type Dlasq4er interface { + Dlasq4(i0, n0 int, z []float64, pp int, n0in int, dmin, dmin1, dmin2, dn, dn1, dn2, tau float64, ttype int, g float64) (tauOut float64, ttypeOut int, gOut float64) +} + +func Dlasq4Test(t *testing.T, impl Dlasq4er) { + dTol := 1e-14 + // Tests computed from calling the netlib Dlasq + for cas, test := range []dlasq4teststruct{ + { + z: []float64{0.1914365246180821e+01, 0.1564384297703890e+01, 0.2493389162143899e+00, 0.3499809484769305e+00, 0.1315996513131545e+01, 0.1363862112490627e+01, 0.9898466611970759e-01, 0.2014733168553078e+00, 0.6023973979587287e+00, 0.6465544792741794e+00, 0.2210033410638781e-02, 0.5482758480425683e-01, 0.9861857233678967e-01, 0.2428190810745492e-01, 0.4756321484454819e+00, 0.7654669763997353e-01, 0.2588748143677115e+00, 0.6127784069508770e+00, 0.1078611376690004e+00, 0.1217285558623164e+00, 0.6442896492255246e+00, 0.2293835804898155e+00, 0.6203230486639705e+00, 0.5227672064047094e+00, 0.3695660678607585e+00, 0.7645233184745865e+00, 0.5378838054252265e+00, 0.2253657980501426e+00, 0.3562533181264623e+00, 0.8820486722335483e+00, 0.2222132496436145e-01, 0.1208845131814035e-01, 0.1275094303021685e+01, 0.6548746852163357e+00, 0.1647324354821218e+00, 0.6424409427697111e+00, 0.1007530576543866e+01, 0.3269551736546701e+00, 0.3453881601783118e+00, 0.8453078383713172e+00, 0.2679391719153404e+00, 0.4116714838778281e+00, 0.7328677736683723e+00, 0.2016558482158241e+00, 0.8360828138307410e+00, 0.9737579452195326e+00, 0.4813660709592822e+00, 0.5951926422795808e+00, 0.6495370513676459e+00, 0.6761876248148171e+00, 0.2325475880222648e+00, 0.4547154975121112e+00, 0.1993624802893807e+00, 0.3321819367342255e+00, 0.3782318916911257e+00, 0.9972813157741996e-01, 0.9830449403503746e+00, 0.7561080996844842e+00, 0.4429733864040367e+00, 0.6051687323570161e+00, 0.1173279550602403e+01, 0.7195724480316686e+00, 0.5035524069144587e+00, 0.8966804889747714e+00, 0.3058980395058521e+00, 0.6588832353928662e+00, 0.3014634433415453e+00, 0.1505672110274446e+00, 0.1289422237567578e+01, 0.6124645310993601e+00, 0.7583364305799440e+00, 0.9784211498097629e+00, 0.4977814779461571e+00, 0.9993813577491869e+00, 0.2841468847862598e+00, 0.2567365507769143e+00, 0.9257539794205765e+00, 0.5509268385614666e+00, 0.5231355605450990e-04, 0.6589740256453697e+00, 0.2117869221381033e-04, 0.7349224826832024e-04, 0.0000000000000000e+00, 0.0000000000000000e+00}, + i0: 1, + n0: 21, + pp: 0, + n0in: 21, + dmin: -0.0000000000000000, + dmin1: 0.0000000000000000, + dmin2: 0.0000000000000000, + dn: 0.0000000000000000, + dn1: 0.0000000000000000, + dn2: 0.0000000000000000, + tau: 0.0000000000000000, + ttype: 0, + g: 0.0000000000000000, + zOut: []float64{0.1914365246180821e+01, 0.1564384297703890e+01, 0.2493389162143899e+00, 0.3499809484769305e+00, 0.1315996513131545e+01, 0.1363862112490627e+01, 0.9898466611970759e-01, 0.2014733168553078e+00, 0.6023973979587287e+00, 0.6465544792741794e+00, 0.2210033410638781e-02, 0.5482758480425683e-01, 0.9861857233678967e-01, 0.2428190810745492e-01, 0.4756321484454819e+00, 0.7654669763997353e-01, 0.2588748143677115e+00, 0.6127784069508770e+00, 0.1078611376690004e+00, 0.1217285558623164e+00, 0.6442896492255246e+00, 0.2293835804898155e+00, 0.6203230486639705e+00, 0.5227672064047094e+00, 0.3695660678607585e+00, 0.7645233184745865e+00, 0.5378838054252265e+00, 0.2253657980501426e+00, 0.3562533181264623e+00, 0.8820486722335483e+00, 0.2222132496436145e-01, 0.1208845131814035e-01, 0.1275094303021685e+01, 0.6548746852163357e+00, 0.1647324354821218e+00, 0.6424409427697111e+00, 0.1007530576543866e+01, 0.3269551736546701e+00, 0.3453881601783118e+00, 0.8453078383713172e+00, 0.2679391719153404e+00, 0.4116714838778281e+00, 0.7328677736683723e+00, 0.2016558482158241e+00, 0.8360828138307410e+00, 0.9737579452195326e+00, 0.4813660709592822e+00, 0.5951926422795808e+00, 0.6495370513676459e+00, 0.6761876248148171e+00, 0.2325475880222648e+00, 0.4547154975121112e+00, 0.1993624802893807e+00, 0.3321819367342255e+00, 0.3782318916911257e+00, 0.9972813157741996e-01, 0.9830449403503746e+00, 0.7561080996844842e+00, 0.4429733864040367e+00, 0.6051687323570161e+00, 0.1173279550602403e+01, 0.7195724480316686e+00, 0.5035524069144587e+00, 0.8966804889747714e+00, 0.3058980395058521e+00, 0.6588832353928662e+00, 0.3014634433415453e+00, 0.1505672110274446e+00, 0.1289422237567578e+01, 0.6124645310993601e+00, 0.7583364305799440e+00, 0.9784211498097629e+00, 0.4977814779461571e+00, 0.9993813577491869e+00, 0.2841468847862598e+00, 0.2567365507769143e+00, 0.9257539794205765e+00, 0.5509268385614666e+00, 0.5231355605450990e-04, 0.6589740256453697e+00, 0.2117869221381033e-04, 0.7349224826832024e-04, 0.0000000000000000e+00, 0.0000000000000000e+00}, + tauOut: 0.0000000000000000, + ttypeOut: -1, + gOut: 0.0000000000000000, + }, + { + z: []float64{0.1914365246180821e+01, 0.2163704162395211e+01, 0.2493389162143899e+00, 0.1516515751224039e+00, 0.1315996513131545e+01, 0.1263329604128848e+01, 0.9898466611970759e-01, 0.4719916727467415e-01, 0.6023973979587287e+00, 0.5574082640946934e+00, 0.2210033410638781e-02, 0.3910066531356214e-03, 0.9861857233678967e-01, 0.5738597141291359e+00, 0.4756321484454819e+00, 0.2145632131068746e+00, 0.2588748143677115e+00, 0.1521727389298373e+00, 0.1078611376690004e+00, 0.4566771620366771e+00, 0.6442896492255246e+00, 0.8079355358528180e+00, 0.6203230486639705e+00, 0.2837483186776231e+00, 0.3695660678607585e+00, 0.6237015546083620e+00, 0.5378838054252265e+00, 0.3072349091217998e+00, 0.3562533181264623e+00, 0.7123973396902394e-01, 0.2222132496436145e-01, 0.3977314805803597e+00, 0.1275094303021685e+01, 0.1042095257923447e+01, 0.1647324354821218e+00, 0.1592685164190333e+00, 0.1007530576543866e+01, 0.1193650220303144e+01, 0.3453881601783118e+00, 0.7752942700755104e-01, 0.2679391719153404e+00, 0.9232775185761617e+00, 0.7328677736683723e+00, 0.6636554427529671e+00, 0.8360828138307410e+00, 0.6537934420370561e+00, 0.4813660709592822e+00, 0.4782322339990674e+00, 0.6495370513676459e+00, 0.4038524053908432e+00, 0.2325475880222648e+00, 0.1147975431483785e+00, 0.1993624802893807e+00, 0.4627968288321279e+00, 0.3782318916911257e+00, 0.8034172324482011e+00, 0.9830449403503746e+00, 0.6226010943062101e+00, 0.4429733864040367e+00, 0.8347746582554776e+00, 0.1173279550602403e+01, 0.8420572992613844e+00, 0.5035524069144587e+00, 0.1829278057427913e+00, 0.3058980395058521e+00, 0.4244336771046062e+00, 0.3014634433415453e+00, 0.9158407747236312e+00, 0.1289422237567578e+01, 0.1131917893423890e+01, 0.7583364305799440e+00, 0.3334922359541972e+00, 0.4977814779461571e+00, 0.4484361267782198e+00, 0.2841468847862598e+00, 0.5865943745895725e+00, 0.9257539794205765e+00, 0.3392119183870583e+00, 0.5231355605450990e-04, 0.3266196269153995e-08, 0.2117869221381033e-04, 0.2117542601754118e-04, 0.0000000000000000e+00, 0.3910066531356214e-03, 0.1037537856266618 - 321}, + i0: 1, + n0: 21, + pp: 1, + n0in: 21, + dmin: 2.1175426017541180e-005, + dmin1: 4.4311601260836921e-002, + dmin2: 4.4311601260836921e-002, + dn: 2.1175426017541180e-005, + dn1: 0.33915960483100382, + dn2: 0.16428924199195991, + tau: 0.0000000000000000, + ttype: -1, + g: 0.0000000000000000, + zOut: []float64{0.1914365246180821e+01, 0.2163704162395211e+01, 0.2493389162143899e+00, 0.1516515751224039e+00, 0.1315996513131545e+01, 0.1263329604128848e+01, 0.9898466611970759e-01, 0.4719916727467415e-01, 0.6023973979587287e+00, 0.5574082640946934e+00, 0.2210033410638781e-02, 0.3910066531356214e-03, 0.9861857233678967e-01, 0.5738597141291359e+00, 0.4756321484454819e+00, 0.2145632131068746e+00, 0.2588748143677115e+00, 0.1521727389298373e+00, 0.1078611376690004e+00, 0.4566771620366771e+00, 0.6442896492255246e+00, 0.8079355358528180e+00, 0.6203230486639705e+00, 0.2837483186776231e+00, 0.3695660678607585e+00, 0.6237015546083620e+00, 0.5378838054252265e+00, 0.3072349091217998e+00, 0.3562533181264623e+00, 0.7123973396902394e-01, 0.2222132496436145e-01, 0.3977314805803597e+00, 0.1275094303021685e+01, 0.1042095257923447e+01, 0.1647324354821218e+00, 0.1592685164190333e+00, 0.1007530576543866e+01, 0.1193650220303144e+01, 0.3453881601783118e+00, 0.7752942700755104e-01, 0.2679391719153404e+00, 0.9232775185761617e+00, 0.7328677736683723e+00, 0.6636554427529671e+00, 0.8360828138307410e+00, 0.6537934420370561e+00, 0.4813660709592822e+00, 0.4782322339990674e+00, 0.6495370513676459e+00, 0.4038524053908432e+00, 0.2325475880222648e+00, 0.1147975431483785e+00, 0.1993624802893807e+00, 0.4627968288321279e+00, 0.3782318916911257e+00, 0.8034172324482011e+00, 0.9830449403503746e+00, 0.6226010943062101e+00, 0.4429733864040367e+00, 0.8347746582554776e+00, 0.1173279550602403e+01, 0.8420572992613844e+00, 0.5035524069144587e+00, 0.1829278057427913e+00, 0.3058980395058521e+00, 0.4244336771046062e+00, 0.3014634433415453e+00, 0.9158407747236312e+00, 0.1289422237567578e+01, 0.1131917893423890e+01, 0.7583364305799440e+00, 0.3334922359541972e+00, 0.4977814779461571e+00, 0.4484361267782198e+00, 0.2841468847862598e+00, 0.5865943745895725e+00, 0.9257539794205765e+00, 0.3392119183870583e+00, 0.5231355605450990e-04, 0.3266196269153995e-08, 0.2117869221381033e-04, 0.2117542601754118e-04, 0.0000000000000000e+00, 0.3910066531356214e-03, 0.1037537856266618 - 321}, + tauOut: 0.0000000000000000, + ttypeOut: -4, + gOut: 0.0000000000000000, + }, + { + z: []float64{0.2315355737517615e+01, 0.2163704162395211e+01, 0.8274578340618610e-01, 0.1516515751224039e+00, 0.1227782987997336e+01, 0.1263329604128848e+01, 0.2142822156235013e-01, 0.4719916727467415e-01, 0.5363710491854788e+00, 0.5574082640946934e+00, 0.4183353417969536e-03, 0.3910066531356214e-03, 0.7880045918942136e+00, 0.5738597141291359e+00, 0.4143462125464707e-01, 0.2145632131068746e+00, 0.5674152797118673e+00, 0.1521727389298373e+00, 0.6502569120260687e+00, 0.4566771620366771e+00, 0.4414269425043723e+00, 0.8079355358528180e+00, 0.4009140594652070e+00, 0.2837483186776231e+00, 0.5300224042649548e+00, 0.6237015546083620e+00, 0.4129510944388858e-01, 0.3072349091217998e+00, 0.4276761051054951e+00, 0.7123973396902394e-01, 0.9691308092544145e+00, 0.3977314805803597e+00, 0.2322329650880660e+00, 0.1042095257923447e+01, 0.8186215063776209e+00, 0.1592685164190333e+00, 0.4525581409330741e+00, 0.1193650220303144e+01, 0.1581701233715052e+00, 0.7752942700755104e-01, 0.1428762837957623e+01, 0.9232775185761617e+00, 0.3036848136842134e+00, 0.6636554427529671e+00, 0.8283408623519102e+00, 0.6537934420370561e+00, 0.2331591338951825e+00, 0.4782322339990674e+00, 0.2854908146440392e+00, 0.4038524053908432e+00, 0.1860933389154074e+00, 0.1147975431483785e+00, 0.1080120722364922e+01, 0.4627968288321279e+00, 0.4631042046962229e+00, 0.8034172324482011e+00, 0.9942715478654648e+00, 0.6226010943062101e+00, 0.7069779837626068e+00, 0.8347746582554776e+00, 0.3180071212415688e+00, 0.8420572992613844e+00, 0.2441477440283845e+00, 0.1829278057427913e+00, 0.1096126707799853e+01, 0.4244336771046062e+00, 0.9457451890006905e+00, 0.9158407747236312e+00, 0.5196649403773971e+00, 0.1131917893423890e+01, 0.2877815203259632e+00, 0.3334922359541972e+00, 0.7472489810418290e+00, 0.4484361267782198e+00, 0.2662831374385604e+00, 0.5865943745895725e+00, 0.7292878421469419e-01, 0.3392119183870583e+00, 0.9483648767903632e-12, 0.3266196269153995e-08, 0.2117542506917630e-04, 0.2117542601754118e-04, 0.4183353417969536e-03, 0.3910066531356214e-03}, + i0: 1, + n0: 21, + pp: 0, + n0in: 21, + dmin: 2.1175425069176302e-005, + dmin1: 2.9944624525135358e-002, + dmin2: 2.9944624525135358e-002, + dn: 2.1175425069176302e-005, + dn1: 7.2928780948497918e-002, + dn2: 0.16065460645225654, + tau: 0.0000000000000000, + ttype: -4, + g: 0.0000000000000000, + zOut: []float64{0.2315355737517615e+01, 0.2163704162395211e+01, 0.8274578340618610e-01, 0.1516515751224039e+00, 0.1227782987997336e+01, 0.1263329604128848e+01, 0.2142822156235013e-01, 0.4719916727467415e-01, 0.5363710491854788e+00, 0.5574082640946934e+00, 0.4183353417969536e-03, 0.3910066531356214e-03, 0.7880045918942136e+00, 0.5738597141291359e+00, 0.4143462125464707e-01, 0.2145632131068746e+00, 0.5674152797118673e+00, 0.1521727389298373e+00, 0.6502569120260687e+00, 0.4566771620366771e+00, 0.4414269425043723e+00, 0.8079355358528180e+00, 0.4009140594652070e+00, 0.2837483186776231e+00, 0.5300224042649548e+00, 0.6237015546083620e+00, 0.4129510944388858e-01, 0.3072349091217998e+00, 0.4276761051054951e+00, 0.7123973396902394e-01, 0.9691308092544145e+00, 0.3977314805803597e+00, 0.2322329650880660e+00, 0.1042095257923447e+01, 0.8186215063776209e+00, 0.1592685164190333e+00, 0.4525581409330741e+00, 0.1193650220303144e+01, 0.1581701233715052e+00, 0.7752942700755104e-01, 0.1428762837957623e+01, 0.9232775185761617e+00, 0.3036848136842134e+00, 0.6636554427529671e+00, 0.8283408623519102e+00, 0.6537934420370561e+00, 0.2331591338951825e+00, 0.4782322339990674e+00, 0.2854908146440392e+00, 0.4038524053908432e+00, 0.1860933389154074e+00, 0.1147975431483785e+00, 0.1080120722364922e+01, 0.4627968288321279e+00, 0.4631042046962229e+00, 0.8034172324482011e+00, 0.9942715478654648e+00, 0.6226010943062101e+00, 0.7069779837626068e+00, 0.8347746582554776e+00, 0.3180071212415688e+00, 0.8420572992613844e+00, 0.2441477440283845e+00, 0.1829278057427913e+00, 0.1096126707799853e+01, 0.4244336771046062e+00, 0.9457451890006905e+00, 0.9158407747236312e+00, 0.5196649403773971e+00, 0.1131917893423890e+01, 0.2877815203259632e+00, 0.3334922359541972e+00, 0.7472489810418290e+00, 0.4484361267782198e+00, 0.2662831374385604e+00, 0.5865943745895725e+00, 0.7292878421469419e-01, 0.3392119183870583e+00, 0.9483648767903632e-12, 0.3266196269153995e-08, 0.2117542506917630e-04, 0.2117542601754118e-04, 0.4183353417969536e-03, 0.3910066531356214e-03}, + tauOut: 2.1175313795360271e-005, + ttypeOut: -4, + gOut: 0.0000000000000000, + }, + { + z: []float64{0.2315355737517615e+01, 0.2398080345610006e+01, 0.8274578340618610e-01, 0.4236466279397526e-01, 0.1227782987997336e+01, 0.1206825371451915e+01, 0.2142822156235013e-01, 0.9523728911788614e-02, 0.5363710491854788e+00, 0.5272444803016919e+00, 0.4183353417969536e-03, 0.6252320936560726e-03, 0.7880045918942136e+00, 0.8287928057414093e+00, 0.4143462125464707e-01, 0.2836732781232222e-01, 0.5674152797118673e+00, 0.1189283688611819e+01, 0.6502569120260687e+00, 0.2413561400585997e+00, 0.4414269425043723e+00, 0.6009636865971842e+00, 0.4009140594652070e+00, 0.3535878097802652e+00, 0.5300224042649548e+00, 0.2177085286147829e+00, 0.4129510944388858e-01, 0.8112190955144877e-01, 0.4276761051054951e+00, 0.1315663829494665e+01, 0.9691308092544145e+00, 0.1710650671895379e+00, 0.2322329650880660e+00, 0.8797682289623537e+00, 0.8186215063776209e+00, 0.4211038940233675e+00, 0.4525581409330741e+00, 0.1896031949674164e+00, 0.1581701233715052e+00, 0.1191897606932286e+01, 0.1428762837957623e+01, 0.5405288693957555e+00, 0.3036848136842134e+00, 0.4653859482687157e+00, 0.8283408623519102e+00, 0.5960928726645816e+00, 0.2331591338951825e+00, 0.1116684901463164e+00, 0.2854908146440392e+00, 0.3598944880993349e+00, 0.1860933389154074e+00, 0.5585061130503639e+00, 0.1080120722364922e+01, 0.9846976386969850e+00, 0.4631042046962229e+00, 0.4676068229793028e+00, 0.9942715478654648e+00, 0.1233621533334973e+01, 0.7069779837626068e+00, 0.1822471700779458e+00, 0.3180071212415688e+00, 0.3798865198782122e+00, 0.2441477440283845e+00, 0.7044652781161848e+00, 0.1096126707799853e+01, 0.1337385443370563e+01, 0.9457451890006905e+00, 0.3674861422265960e+00, 0.5196649403773971e+00, 0.4399391431629689e+00, 0.2877815203259632e+00, 0.4888049885267526e+00, 0.7472489810418290e+00, 0.5247059546398414e+00, 0.2662831374385604e+00, 0.3701064434002514e-01, 0.7292878421469419e-01, 0.3589696456182207e-01, 0.9483648767903632e-12, 0.5594353069081231e-15, 0.2117542506917630e-04, 0.1112732565966979e-09, 0.4183353417969536e-03, 0.6252320936560726e-03, 0.1037537856266618 - 321}, + i0: 1, + n0: 21, + pp: 1, + n0in: 21, + dmin: 1.1127325659669794e-010, + dmin1: 3.1433071595911154e-002, + dmin2: 3.1433071595911154e-002, + dn: 1.1127325659669794e-010, + dn1: 3.5896964560873705e-002, + dn2: 0.25842281720128102, + tau: 2.1175313795360271e-005, + ttype: -4, + g: 0.0000000000000000, + zOut: []float64{0.2315355737517615e+01, 0.2398080345610006e+01, 0.8274578340618610e-01, 0.4236466279397526e-01, 0.1227782987997336e+01, 0.1206825371451915e+01, 0.2142822156235013e-01, 0.9523728911788614e-02, 0.5363710491854788e+00, 0.5272444803016919e+00, 0.4183353417969536e-03, 0.6252320936560726e-03, 0.7880045918942136e+00, 0.8287928057414093e+00, 0.4143462125464707e-01, 0.2836732781232222e-01, 0.5674152797118673e+00, 0.1189283688611819e+01, 0.6502569120260687e+00, 0.2413561400585997e+00, 0.4414269425043723e+00, 0.6009636865971842e+00, 0.4009140594652070e+00, 0.3535878097802652e+00, 0.5300224042649548e+00, 0.2177085286147829e+00, 0.4129510944388858e-01, 0.8112190955144877e-01, 0.4276761051054951e+00, 0.1315663829494665e+01, 0.9691308092544145e+00, 0.1710650671895379e+00, 0.2322329650880660e+00, 0.8797682289623537e+00, 0.8186215063776209e+00, 0.4211038940233675e+00, 0.4525581409330741e+00, 0.1896031949674164e+00, 0.1581701233715052e+00, 0.1191897606932286e+01, 0.1428762837957623e+01, 0.5405288693957555e+00, 0.3036848136842134e+00, 0.4653859482687157e+00, 0.8283408623519102e+00, 0.5960928726645816e+00, 0.2331591338951825e+00, 0.1116684901463164e+00, 0.2854908146440392e+00, 0.3598944880993349e+00, 0.1860933389154074e+00, 0.5585061130503639e+00, 0.1080120722364922e+01, 0.9846976386969850e+00, 0.4631042046962229e+00, 0.4676068229793028e+00, 0.9942715478654648e+00, 0.1233621533334973e+01, 0.7069779837626068e+00, 0.1822471700779458e+00, 0.3180071212415688e+00, 0.3798865198782122e+00, 0.2441477440283845e+00, 0.7044652781161848e+00, 0.1096126707799853e+01, 0.1337385443370563e+01, 0.9457451890006905e+00, 0.3674861422265960e+00, 0.5196649403773971e+00, 0.4399391431629689e+00, 0.2877815203259632e+00, 0.4888049885267526e+00, 0.7472489810418290e+00, 0.5247059546398414e+00, 0.2662831374385604e+00, 0.3701064434002514e-01, 0.7292878421469419e-01, 0.3589696456182207e-01, 0.9483648767903632e-12, 0.5594353069081231e-15, 0.2117542506917630e-04, 0.1112732565966979e-09, 0.4183353417969536e-03, 0.6252320936560726e-03, 0.1037537856266618 - 321}, + tauOut: 2.1175313795360271e-005, + ttypeOut: -4, + gOut: 0.0000000000000000, + }, + { + z: []float64{0.2440445008292708e+01, 0.2398080345610006e+01, 0.2094976520226600e-01, 0.4236466279397526e-01, 0.1195399335050165e+01, 0.1206825371451915e+01, 0.4200549016048655e-02, 0.9523728911788614e-02, 0.5236691632680260e+00, 0.5272444803016919e+00, 0.9895328911616120e-03, 0.6252320936560726e-03, 0.8561706005512968e+00, 0.8287928057414093e+00, 0.3940429656773515e-01, 0.2836732781232222e-01, 0.1391235531991410e+01, 0.1189283688611819e+01, 0.1042571673718422e+00, 0.2413561400585997e+00, 0.8502943288943339e+00, 0.6009636865971842e+00, 0.9053227710395735e-01, 0.3535878097802652e+00, 0.2082981609510011e+00, 0.2177085286147829e+00, 0.5123864833424303e+00, 0.8112190955144877e-01, 0.9743424132304999e+00, 0.1315663829494665e+01, 0.1544607000116935e+00, 0.1710650671895379e+00, 0.1146411422862754e+01, 0.8797682289623537e+00, 0.6964571542795012e-01, 0.4211038940233675e+00, 0.1311855086360479e+01, 0.1896031949674164e+00, 0.4911023119923957e+00, 0.1191897606932286e+01, 0.5148125055608023e+00, 0.5405288693957555e+00, 0.5388626806938843e+00, 0.4653859482687157e+00, 0.1688986820057405e+00, 0.5960928726645816e+00, 0.2379466412690434e+00, 0.1116684901463164e+00, 0.6804539597693821e+00, 0.3598944880993349e+00, 0.8082246312519304e+00, 0.5585061130503639e+00, 0.6440798303130841e+00, 0.9846976386969850e+00, 0.8956185534970393e+00, 0.4676068229793028e+00, 0.5202501498046066e+00, 0.1233621533334973e+01, 0.1330768347199243e+00, 0.1822471700779458e+00, 0.9512749631631994e+00, 0.3798865198782122e+00, 0.9903988276741268e+00, 0.7044652781161848e+00, 0.7144727578117591e+00, 0.1337385443370563e+01, 0.2262808998212762e+00, 0.3674861422265960e+00, 0.7024632317571722e+00, 0.4399391431629689e+00, 0.3651136124179467e+00, 0.4888049885267526e+00, 0.1966029864506465e+00, 0.5247059546398414e+00, 0.6757627705811050e-02, 0.3701064434002514e-01, 0.2913933674473832e-01, 0.3589696456182207e-01, 0.2136293938333395e-23, 0.5594353069081231e-15, 0.0000000000000000e+00, 0.1112732565966979e-09, 0.9895328911616120e-03, 0.6252320936560726e-03}, + i0: 1, + n0: 21, + pp: 0, + n0in: 21, + dmin: 0.0000000000000000, + dmin1: 2.9139336744737766e-002, + dmin2: 4.9426557292086552e-002, + dn: -2.0808762284537102e-024, + dn1: 2.9139336744737766e-002, + dn2: 0.15959234211062134, + tau: 1.1127325659669789e-010, + ttype: -15, + g: 0.0000000000000000, + zOut: []float64{0.2440445008292708e+01, 0.2398080345610006e+01, 0.2094976520226600e-01, 0.4236466279397526e-01, 0.1195399335050165e+01, 0.1206825371451915e+01, 0.4200549016048655e-02, 0.9523728911788614e-02, 0.5236691632680260e+00, 0.5272444803016919e+00, 0.9895328911616120e-03, 0.6252320936560726e-03, 0.8561706005512968e+00, 0.8287928057414093e+00, 0.3940429656773515e-01, 0.2836732781232222e-01, 0.1391235531991410e+01, 0.1189283688611819e+01, 0.1042571673718422e+00, 0.2413561400585997e+00, 0.8502943288943339e+00, 0.6009636865971842e+00, 0.9053227710395735e-01, 0.3535878097802652e+00, 0.2082981609510011e+00, 0.2177085286147829e+00, 0.5123864833424303e+00, 0.8112190955144877e-01, 0.9743424132304999e+00, 0.1315663829494665e+01, 0.1544607000116935e+00, 0.1710650671895379e+00, 0.1146411422862754e+01, 0.8797682289623537e+00, 0.6964571542795012e-01, 0.4211038940233675e+00, 0.1311855086360479e+01, 0.1896031949674164e+00, 0.4911023119923957e+00, 0.1191897606932286e+01, 0.5148125055608023e+00, 0.5405288693957555e+00, 0.5388626806938843e+00, 0.4653859482687157e+00, 0.1688986820057405e+00, 0.5960928726645816e+00, 0.2379466412690434e+00, 0.1116684901463164e+00, 0.6804539597693821e+00, 0.3598944880993349e+00, 0.8082246312519304e+00, 0.5585061130503639e+00, 0.6440798303130841e+00, 0.9846976386969850e+00, 0.8956185534970393e+00, 0.4676068229793028e+00, 0.5202501498046066e+00, 0.1233621533334973e+01, 0.1330768347199243e+00, 0.1822471700779458e+00, 0.9512749631631994e+00, 0.3798865198782122e+00, 0.9903988276741268e+00, 0.7044652781161848e+00, 0.7144727578117591e+00, 0.1337385443370563e+01, 0.2262808998212762e+00, 0.3674861422265960e+00, 0.7024632317571722e+00, 0.4399391431629689e+00, 0.3651136124179467e+00, 0.4888049885267526e+00, 0.1966029864506465e+00, 0.5247059546398414e+00, 0.6757627705811050e-02, 0.3701064434002514e-01, 0.2913933674473832e-01, 0.3589696456182207e-01, 0.2136293938333395e-23, 0.5594353069081231e-15, 0.0000000000000000e+00, 0.1112732565966979e-09, 0.9895328911616120e-03, 0.6252320936560726e-03}, + tauOut: -0.0000000000000000, + ttypeOut: -1, + gOut: 0.0000000000000000, + }, + { + z: []float64{0.2440445008292708e+01, 0.2461394773494974e+01, 0.2094976520226600e-01, 0.1017444891892999e-01, 0.1195399335050165e+01, 0.1189425435147283e+01, 0.4200549016048655e-02, 0.1849378635683999e-02, 0.5236691632680260e+00, 0.5228093175235037e+00, 0.9895328911616120e-03, 0.1620493249248586e-02, 0.8561706005512968e+00, 0.8939544038697832e+00, 0.3940429656773515e-01, 0.6132377362967349e-01, 0.1391235531991410e+01, 0.1434168925733579e+01, 0.1042571673718422e+00, 0.6181229879703373e-01, 0.8502943288943339e+00, 0.8790143072012576e+00, 0.9053227710395735e-01, 0.2145324219750511e-01, 0.2082981609510011e+00, 0.6992314020959263e+00, 0.5123864833424303e+00, 0.7139837844669097e+00, 0.9743424132304999e+00, 0.4148193287752837e+00, 0.1544607000116935e+00, 0.4268738185358478e+00, 0.1146411422862754e+01, 0.7891833197548568e+00, 0.6964571542795012e-01, 0.1157716892137957e+00, 0.1311855086360479e+01, 0.1687185709139079e+01, 0.4911023119923957e+00, 0.1498504938454686e+00, 0.5148125055608023e+00, 0.9038246924092180e+00, 0.5388626806938843e+00, 0.1006978425303630e+00, 0.1688986820057405e+00, 0.3061474807444209e+00, 0.2379466412690434e+00, 0.5288684194677825e+00, 0.6804539597693821e+00, 0.9598101715535300e+00, 0.8082246312519304e+00, 0.5423584775195998e+00, 0.6440798303130841e+00, 0.9973399062905237e+00, 0.8956185534970393e+00, 0.4671884516860899e+00, 0.5202501498046066e+00, 0.1861385328384410e+00, 0.1330768347199243e+00, 0.6800991665489665e+00, 0.9512749631631994e+00, 0.1261574624288360e+01, 0.9903988276741268e+00, 0.5608966509936130e+00, 0.7144727578117591e+00, 0.3798570066394223e+00, 0.2262808998212762e+00, 0.4184574968871406e+00, 0.7024632317571722e+00, 0.6491193472879784e+00, 0.3651136124179467e+00, 0.1105843276664904e+00, 0.1966029864506465e+00, 0.9277628648996712e-01, 0.6757627705811050e-02, 0.2122447413720272e-02, 0.2913933674473832e-01, 0.2701688933101806e-01, 0.2136293938333395e-23, 0.0000000000000000e+00, 0.2117542506861687e-04}, + i0: 1, + n0: 20, + pp: 1, + n0in: 21, + dmin: 0.0000000000000000, + dmin1: 2.7016889331018056e-002, + dmin2: 5.3061698118516694e-002, + dn: 0.0000000000000000, + dn1: 2.7016889331018056e-002, + dn2: 8.6018658784156071e-002, + tau: 0.0000000000000000, + ttype: -1, + g: 0.0000000000000000, + zOut: []float64{0.2440445008292708e+01, 0.2461394773494974e+01, 0.2094976520226600e-01, 0.1017444891892999e-01, 0.1195399335050165e+01, 0.1189425435147283e+01, 0.4200549016048655e-02, 0.1849378635683999e-02, 0.5236691632680260e+00, 0.5228093175235037e+00, 0.9895328911616120e-03, 0.1620493249248586e-02, 0.8561706005512968e+00, 0.8939544038697832e+00, 0.3940429656773515e-01, 0.6132377362967349e-01, 0.1391235531991410e+01, 0.1434168925733579e+01, 0.1042571673718422e+00, 0.6181229879703373e-01, 0.8502943288943339e+00, 0.8790143072012576e+00, 0.9053227710395735e-01, 0.2145324219750511e-01, 0.2082981609510011e+00, 0.6992314020959263e+00, 0.5123864833424303e+00, 0.7139837844669097e+00, 0.9743424132304999e+00, 0.4148193287752837e+00, 0.1544607000116935e+00, 0.4268738185358478e+00, 0.1146411422862754e+01, 0.7891833197548568e+00, 0.6964571542795012e-01, 0.1157716892137957e+00, 0.1311855086360479e+01, 0.1687185709139079e+01, 0.4911023119923957e+00, 0.1498504938454686e+00, 0.5148125055608023e+00, 0.9038246924092180e+00, 0.5388626806938843e+00, 0.1006978425303630e+00, 0.1688986820057405e+00, 0.3061474807444209e+00, 0.2379466412690434e+00, 0.5288684194677825e+00, 0.6804539597693821e+00, 0.9598101715535300e+00, 0.8082246312519304e+00, 0.5423584775195998e+00, 0.6440798303130841e+00, 0.9973399062905237e+00, 0.8956185534970393e+00, 0.4671884516860899e+00, 0.5202501498046066e+00, 0.1861385328384410e+00, 0.1330768347199243e+00, 0.6800991665489665e+00, 0.9512749631631994e+00, 0.1261574624288360e+01, 0.9903988276741268e+00, 0.5608966509936130e+00, 0.7144727578117591e+00, 0.3798570066394223e+00, 0.2262808998212762e+00, 0.4184574968871406e+00, 0.7024632317571722e+00, 0.6491193472879784e+00, 0.3651136124179467e+00, 0.1105843276664904e+00, 0.1966029864506465e+00, 0.9277628648996712e-01, 0.6757627705811050e-02, 0.2122447413720272e-02, 0.2913933674473832e-01, 0.2701688933101806e-01, 0.2136293938333395e-23, 0.0000000000000000e+00, 0.2117542506861687e-04}, + tauOut: -0.0000000000000000, + ttypeOut: -1, + gOut: 0.0000000000000000, + }, + { + z: []float64{0.2471569222413904e+01, 0.2461394773494974e+01, 0.4896382518051712e-02, 0.1017444891892999e-01, 0.1186378431264915e+01, 0.1189425435147283e+01, 0.8149780515932184e-03, 0.1849378635683999e-02, 0.5236148327211592e+00, 0.5228093175235037e+00, 0.2766627272719901e-02, 0.1620493249248586e-02, 0.9525115502267366e+00, 0.8939544038697832e+00, 0.9233342160256496e-01, 0.6132377362967349e-01, 0.1403647802928048e+01, 0.1434168925733579e+01, 0.3870906568602875e-01, 0.6181229879703373e-01, 0.8617584837127339e+00, 0.8790143072012576e+00, 0.1740717486950262e-01, 0.2145324219750511e-01, 0.1395808011693333e+01, 0.6992314020959263e+00, 0.2121884039551361e+00, 0.7139837844669097e+00, 0.6295047433559955e+00, 0.4148193287752837e+00, 0.5351535485381410e+00, 0.4268738185358478e+00, 0.3698014604305115e+00, 0.7891833197548568e+00, 0.5281978587564573e+00, 0.1157716892137957e+00, 0.1308838344228090e+01, 0.1687185709139079e+01, 0.1034799882693896e+00, 0.1498504938454686e+00, 0.9010425466701916e+00, 0.9038246924092180e+00, 0.3421413441684364e-01, 0.1006978425303630e+00, 0.8008017657953598e+00, 0.3061474807444209e+00, 0.6338813300623194e+00, 0.5288684194677825e+00, 0.8682873190108105e+00, 0.9598101715535300e+00, 0.6229686202966810e+00, 0.5423584775195998e+00, 0.8415597376799326e+00, 0.9973399062905237e+00, 0.1033340463692495e+00, 0.4671884516860899e+00, 0.7629036530181579e+00, 0.1861385328384410e+00, 0.1124645093942705e+01, 0.6800991665489665e+00, 0.6978261813392677e+00, 0.1261574624288360e+01, 0.3053203341720497e+00, 0.5608966509936130e+00, 0.4929941693545132e+00, 0.3798570066394223e+00, 0.5509778292160957e+00, 0.4184574968871406e+00, 0.2087258457383731e+00, 0.6491193472879784e+00, 0.4915348757406203e-01, 0.1105843276664904e+00, 0.4574524632962537e-01, 0.9277628648996712e-01, 0.1253505697055357e-02, 0.2122447413720272e-02, 0.2576338363396270e-01, 0.2701688933101806e-01, 0.8149780515932184e-03, 0.1620493249248586e-02}, + i0: 1, + n0: 20, + pp: 0, + n0in: 20, + dmin: 2.5763383633962696e-002, + dmin1: 4.3622798915905092e-002, + dmin2: 7.4536672467372611e-002, + dn: 2.5763383633962696e-002, + dn1: 4.3622798915905092e-002, + dn2: 9.8141518071882677e-002, + tau: 0.0000000000000000, + ttype: -1, + g: 0.0000000000000000, + zOut: []float64{0.2471569222413904e+01, 0.2461394773494974e+01, 0.4896382518051712e-02, 0.1017444891892999e-01, 0.1186378431264915e+01, 0.1189425435147283e+01, 0.8149780515932184e-03, 0.1849378635683999e-02, 0.5236148327211592e+00, 0.5228093175235037e+00, 0.2766627272719901e-02, 0.1620493249248586e-02, 0.9525115502267366e+00, 0.8939544038697832e+00, 0.9233342160256496e-01, 0.6132377362967349e-01, 0.1403647802928048e+01, 0.1434168925733579e+01, 0.3870906568602875e-01, 0.6181229879703373e-01, 0.8617584837127339e+00, 0.8790143072012576e+00, 0.1740717486950262e-01, 0.2145324219750511e-01, 0.1395808011693333e+01, 0.6992314020959263e+00, 0.2121884039551361e+00, 0.7139837844669097e+00, 0.6295047433559955e+00, 0.4148193287752837e+00, 0.5351535485381410e+00, 0.4268738185358478e+00, 0.3698014604305115e+00, 0.7891833197548568e+00, 0.5281978587564573e+00, 0.1157716892137957e+00, 0.1308838344228090e+01, 0.1687185709139079e+01, 0.1034799882693896e+00, 0.1498504938454686e+00, 0.9010425466701916e+00, 0.9038246924092180e+00, 0.3421413441684364e-01, 0.1006978425303630e+00, 0.8008017657953598e+00, 0.3061474807444209e+00, 0.6338813300623194e+00, 0.5288684194677825e+00, 0.8682873190108105e+00, 0.9598101715535300e+00, 0.6229686202966810e+00, 0.5423584775195998e+00, 0.8415597376799326e+00, 0.9973399062905237e+00, 0.1033340463692495e+00, 0.4671884516860899e+00, 0.7629036530181579e+00, 0.1861385328384410e+00, 0.1124645093942705e+01, 0.6800991665489665e+00, 0.6978261813392677e+00, 0.1261574624288360e+01, 0.3053203341720497e+00, 0.5608966509936130e+00, 0.4929941693545132e+00, 0.3798570066394223e+00, 0.5509778292160957e+00, 0.4184574968871406e+00, 0.2087258457383731e+00, 0.6491193472879784e+00, 0.4915348757406203e-01, 0.1105843276664904e+00, 0.4574524632962537e-01, 0.9277628648996712e-01, 0.1253505697055357e-02, 0.2122447413720272e-02, 0.2576338363396270e-01, 0.2701688933101806e-01, 0.8149780515932184e-03, 0.1620493249248586e-02}, + tauOut: 2.0080554394878082e-002, + ttypeOut: -3, + gOut: 0.0000000000000000, + }, + { + z: []float64{0.2471569222413904e+01, 0.2471445466333236e+01, 0.4896382518051712e-02, 0.2350431231346416e-02, 0.1186378431264915e+01, 0.1179822839486443e+01, 0.8149780515932184e-03, 0.3616937915375072e-03, 0.5236148327211592e+00, 0.5209996276036221e+00, 0.2766627272719901e-02, 0.5058054349403302e-02, 0.9525115502267366e+00, 0.1034766778881179e+01, 0.9233342160256496e-01, 0.1252490967185870e+00, 0.1403647802928048e+01, 0.1312087633296770e+01, 0.3870906568602875e-01, 0.2542350442532051e-01, 0.8617584837127339e+00, 0.8487220155581966e+00, 0.1740717486950262e-01, 0.2862783537884150e-01, 0.1395808011693333e+01, 0.1574348441670908e+01, 0.2121884039551361e+00, 0.8484373804386666e-01, 0.6295047433559955e+00, 0.1074794415251550e+01, 0.5351535485381410e+00, 0.1841287608083240e+00, 0.3698014604305115e+00, 0.7088504197799252e+00, 0.5281978587564573e+00, 0.9752771411128711e+00, 0.1308838344228090e+01, 0.4320210527858890e+00, 0.1034799882693896e+00, 0.2158225196628609e+00, 0.9010425466701916e+00, 0.7144140228254550e+00, 0.3421413441684364e-01, 0.3835134583138245e-01, 0.8008017657953598e+00, 0.1391311611427577e+01, 0.6338813300623194e+00, 0.3955915526975877e+00, 0.8682873190108105e+00, 0.1090644248011184e+01, 0.6229686202966810e+00, 0.4806932321292802e+00, 0.8415597376799326e+00, 0.4591804133211825e+00, 0.1033340463692495e+00, 0.1716839812178710e+00, 0.7629036530181579e+00, 0.1710844627144272e+01, 0.1124645093942705e+01, 0.4587247601659613e+00, 0.6978261813392677e+00, 0.5394016167466366e+00, 0.3053203341720497e+00, 0.2790520826393697e+00, 0.4929941693545132e+00, 0.7598997773325197e+00, 0.5509778292160957e+00, 0.1513401067044909e+00, 0.2087258457383731e+00, 0.1015190880092246e+00, 0.4915348757406203e-01, 0.2214892234681356e-01, 0.4574524632962537e-01, 0.1982969108114764e-01, 0.1253505697055357e-02, 0.1628595626045726e-02, 0.2576338363396270e-01, 0.1911464940919745e-01, 0.8149780515932184e-03, 0.3616937915375072e-03, 0.2117542506861687e-04}, + i0: 1, + n0: 20, + pp: 1, + n0in: 20, + dmin: 1.8576185384092288e-002, + dmin1: 1.8576185384092288e-002, + dmin2: 5.2365600435162571e-002, + dn: 1.9114649409197451e-002, + dn1: 1.8576185384092288e-002, + dn2: 5.2365600435162571e-002, + tau: 5.0201385987195205e-003, + ttype: -15, + g: 0.0000000000000000, + zOut: []float64{0.2471569222413904e+01, 0.2471445466333236e+01, 0.4896382518051712e-02, 0.2350431231346416e-02, 0.1186378431264915e+01, 0.1179822839486443e+01, 0.8149780515932184e-03, 0.3616937915375072e-03, 0.5236148327211592e+00, 0.5209996276036221e+00, 0.2766627272719901e-02, 0.5058054349403302e-02, 0.9525115502267366e+00, 0.1034766778881179e+01, 0.9233342160256496e-01, 0.1252490967185870e+00, 0.1403647802928048e+01, 0.1312087633296770e+01, 0.3870906568602875e-01, 0.2542350442532051e-01, 0.8617584837127339e+00, 0.8487220155581966e+00, 0.1740717486950262e-01, 0.2862783537884150e-01, 0.1395808011693333e+01, 0.1574348441670908e+01, 0.2121884039551361e+00, 0.8484373804386666e-01, 0.6295047433559955e+00, 0.1074794415251550e+01, 0.5351535485381410e+00, 0.1841287608083240e+00, 0.3698014604305115e+00, 0.7088504197799252e+00, 0.5281978587564573e+00, 0.9752771411128711e+00, 0.1308838344228090e+01, 0.4320210527858890e+00, 0.1034799882693896e+00, 0.2158225196628609e+00, 0.9010425466701916e+00, 0.7144140228254550e+00, 0.3421413441684364e-01, 0.3835134583138245e-01, 0.8008017657953598e+00, 0.1391311611427577e+01, 0.6338813300623194e+00, 0.3955915526975877e+00, 0.8682873190108105e+00, 0.1090644248011184e+01, 0.6229686202966810e+00, 0.4806932321292802e+00, 0.8415597376799326e+00, 0.4591804133211825e+00, 0.1033340463692495e+00, 0.1716839812178710e+00, 0.7629036530181579e+00, 0.1710844627144272e+01, 0.1124645093942705e+01, 0.4587247601659613e+00, 0.6978261813392677e+00, 0.5394016167466366e+00, 0.3053203341720497e+00, 0.2790520826393697e+00, 0.4929941693545132e+00, 0.7598997773325197e+00, 0.5509778292160957e+00, 0.1513401067044909e+00, 0.2087258457383731e+00, 0.1015190880092246e+00, 0.4915348757406203e-01, 0.2214892234681356e-01, 0.4574524632962537e-01, 0.1982969108114764e-01, 0.1253505697055357e-02, 0.1628595626045726e-02, 0.2576338363396270e-01, 0.1911464940919745e-01, 0.8149780515932184e-03, 0.3616937915375072e-03, 0.2117542506861687e-04}, + tauOut: 5.4769133315280185e-003, + ttypeOut: -4, + gOut: 0.0000000000000000, + }, + { + z: []float64{0.2468318984233055e+01, 0.2471445466333236e+01, 0.1123474100024551e-02, 0.2350431231346416e-02, 0.1173584145846428e+01, 0.1179822839486443e+01, 0.1605699355811189e-03, 0.3616937915375072e-03, 0.5204201986859162e+00, 0.5209996276036221e+00, 0.1005707814522541e-01, 0.5058054349403302e-02, 0.1144481884123012e+01, 0.1034766778881179e+01, 0.1435914304680996e+00, 0.1252490967185870e+00, 0.1188442793922463e+01, 0.1312087633296770e+01, 0.1815610143690141e-01, 0.2542350442532051e-01, 0.8537168361686087e+00, 0.8487220155581966e+00, 0.5279290053521807e-01, 0.2862783537884150e-01, 0.1600922365848029e+01, 0.1574348441670908e+01, 0.5696064828871891e-01, 0.8484373804386666e-01, 0.1196485614439627e+01, 0.1074794415251550e+01, 0.1090859328498209e+00, 0.1841287608083240e+00, 0.1569564714711448e+01, 0.7088504197799252e+00, 0.2684440171930437e+00, 0.9752771411128711e+00, 0.3739226419241781e+00, 0.4320210527858890e+00, 0.4123490187575627e+00, 0.2158225196628609e+00, 0.3349394365677468e+00, 0.7144140228254550e+00, 0.1593084209965356e+00, 0.3835134583138245e-01, 0.1622117829797102e+01, 0.1391311611427577e+01, 0.2659792301064862e+00, 0.3955915526975877e+00, 0.1299881336702450e+01, 0.1090644248011184e+01, 0.1698038973078534e+00, 0.4806932321292802e+00, 0.4555835838996722e+00, 0.4591804133211825e+00, 0.6447216871142054e+00, 0.1716839812178710e+00, 0.1519370786864500e+01, 0.1710844627144272e+01, 0.1628548339973444e+00, 0.4587247601659613e+00, 0.6501219520571339e+00, 0.5394016167466366e+00, 0.3261720586281595e+00, 0.2790520826393697e+00, 0.5795909120773233e+00, 0.7598997773325197e+00, 0.2650819619788820e-01, 0.1513401067044909e+00, 0.9168290082662192e-01, 0.1015190880092246e+00, 0.4790492926791300e-02, 0.2214892234681356e-01, 0.1119088044887405e-01, 0.1982969108114764e-01, 0.2781732372482683e-02, 0.1628595626045726e-02, 0.1085600370518675e-01, 0.1911464940919745e-01, 0.1605699355811189e-03, 0.3616937915375072e-03}, + i0: 1, + n0: 20, + pp: 0, + n0in: 20, + dmin: 9.5622848228283271e-003, + dmin1: 9.5622848228283271e-003, + dmin2: 6.9533978479808370e-002, + dn: 1.0856003705186750e-002, + dn1: 9.5622848228283271e-003, + dn2: 6.9533978479808370e-002, + tau: 5.4769133315280185e-003, + ttype: -4, + g: 0.0000000000000000, + zOut: []float64{0.2468318984233055e+01, 0.2471445466333236e+01, 0.1123474100024551e-02, 0.2350431231346416e-02, 0.1173584145846428e+01, 0.1179822839486443e+01, 0.1605699355811189e-03, 0.3616937915375072e-03, 0.5204201986859162e+00, 0.5209996276036221e+00, 0.1005707814522541e-01, 0.5058054349403302e-02, 0.1144481884123012e+01, 0.1034766778881179e+01, 0.1435914304680996e+00, 0.1252490967185870e+00, 0.1188442793922463e+01, 0.1312087633296770e+01, 0.1815610143690141e-01, 0.2542350442532051e-01, 0.8537168361686087e+00, 0.8487220155581966e+00, 0.5279290053521807e-01, 0.2862783537884150e-01, 0.1600922365848029e+01, 0.1574348441670908e+01, 0.5696064828871891e-01, 0.8484373804386666e-01, 0.1196485614439627e+01, 0.1074794415251550e+01, 0.1090859328498209e+00, 0.1841287608083240e+00, 0.1569564714711448e+01, 0.7088504197799252e+00, 0.2684440171930437e+00, 0.9752771411128711e+00, 0.3739226419241781e+00, 0.4320210527858890e+00, 0.4123490187575627e+00, 0.2158225196628609e+00, 0.3349394365677468e+00, 0.7144140228254550e+00, 0.1593084209965356e+00, 0.3835134583138245e-01, 0.1622117829797102e+01, 0.1391311611427577e+01, 0.2659792301064862e+00, 0.3955915526975877e+00, 0.1299881336702450e+01, 0.1090644248011184e+01, 0.1698038973078534e+00, 0.4806932321292802e+00, 0.4555835838996722e+00, 0.4591804133211825e+00, 0.6447216871142054e+00, 0.1716839812178710e+00, 0.1519370786864500e+01, 0.1710844627144272e+01, 0.1628548339973444e+00, 0.4587247601659613e+00, 0.6501219520571339e+00, 0.5394016167466366e+00, 0.3261720586281595e+00, 0.2790520826393697e+00, 0.5795909120773233e+00, 0.7598997773325197e+00, 0.2650819619788820e-01, 0.1513401067044909e+00, 0.9168290082662192e-01, 0.1015190880092246e+00, 0.4790492926791300e-02, 0.2214892234681356e-01, 0.1119088044887405e-01, 0.1982969108114764e-01, 0.2781732372482683e-02, 0.1628595626045726e-02, 0.1085600370518675e-01, 0.1911464940919745e-01, 0.1605699355811189e-03, 0.3616937915375072e-03}, + tauOut: 5.1216063611655054e-003, + ttypeOut: -4, + gOut: 0.0000000000000000, + }, + { + z: []float64{0.2468318984233055e+01, 0.2464320851971913e+01, 0.1123474100024551e-02, 0.5350323562789559e-03, 0.1173584145846428e+01, 0.1168088077064565e+01, 0.1605699355811189e-03, 0.7153898701552432e-04, 0.5204201986859162e+00, 0.5252841314829605e+00, 0.1005707814522541e-01, 0.2191222436498315e-01, 0.1144481884123012e+01, 0.1261039483864963e+01, 0.1435914304680996e+00, 0.1353250258951489e+00, 0.1188442793922463e+01, 0.1066152263103050e+01, 0.1815610143690141e-01, 0.1453842008528346e-01, 0.8537168361686087e+00, 0.8868497102573779e+00, 0.5279290053521807e-01, 0.9530062900995111e-01, 0.1600922365848029e+01, 0.1557460778765631e+01, 0.5696064828871891e-01, 0.4375878814786067e-01, 0.1196485614439627e+01, 0.1256691152780422e+01, 0.1090859328498209e+00, 0.1362446379077657e+00, 0.1569564714711448e+01, 0.1696642487635560e+01, 0.2684440171930437e+00, 0.5916231430550117e-01, 0.3739226419241781e+00, 0.7219877400150740e+00, 0.4123490187575627e+00, 0.1912940350054112e+00, 0.3349394365677468e+00, 0.2978322161977056e+00, 0.1593084209965356e+00, 0.8676597630518320e+00, 0.1622117829797102e+01, 0.1015315690490590e+01, 0.2659792301064862e+00, 0.3405260456467969e+00, 0.1299881336702450e+01, 0.1124037582002341e+01, 0.1698038973078534e+00, 0.6882320425428856e-01, 0.4555835838996722e+00, 0.1026360460398424e+01, 0.6447216871142054e+00, 0.9544125430154021e+00, 0.1519370786864500e+01, 0.7226914714852769e+00, 0.1628548339973444e+00, 0.1465016632377001e+00, 0.6501219520571339e+00, 0.8246707410864278e+00, 0.3261720586281595e+00, 0.2292385937027206e+00, 0.5795909120773233e+00, 0.3717389082113253e+00, 0.2650819619788820e-01, 0.6537783023029759e-02, 0.9168290082662192e-01, 0.8481400436921797e-01, 0.4790492926791300e-02, 0.6320870478125323e-03, 0.1119088044887405e-01, 0.8218919412378699e-02, 0.2781732372482683e-02, 0.3674266095981827e-02, 0.1085600370518675e-01, 0.2060131248039419e-02, 0.1605699355811189e-03, 0.7153898701552432e-04, 0.2117542506861687e-04}, + i0: 1, + n0: 20, + pp: 1, + n0in: 20, + dmin: 2.0601312480394186e-003, + dmin1: 5.4371870398960158e-003, + dmin2: 8.0023511442426670e-002, + dn: 2.0601312480394186e-003, + dn1: 5.4371870398960158e-003, + dn2: 8.0023511442426670e-002, + tau: 5.1216063611655054e-003, + ttype: -4, + g: 0.0000000000000000, + zOut: []float64{0.2468318984233055e+01, 0.2464320851971913e+01, 0.1123474100024551e-02, 0.5350323562789559e-03, 0.1173584145846428e+01, 0.1168088077064565e+01, 0.1605699355811189e-03, 0.7153898701552432e-04, 0.5204201986859162e+00, 0.5252841314829605e+00, 0.1005707814522541e-01, 0.2191222436498315e-01, 0.1144481884123012e+01, 0.1261039483864963e+01, 0.1435914304680996e+00, 0.1353250258951489e+00, 0.1188442793922463e+01, 0.1066152263103050e+01, 0.1815610143690141e-01, 0.1453842008528346e-01, 0.8537168361686087e+00, 0.8868497102573779e+00, 0.5279290053521807e-01, 0.9530062900995111e-01, 0.1600922365848029e+01, 0.1557460778765631e+01, 0.5696064828871891e-01, 0.4375878814786067e-01, 0.1196485614439627e+01, 0.1256691152780422e+01, 0.1090859328498209e+00, 0.1362446379077657e+00, 0.1569564714711448e+01, 0.1696642487635560e+01, 0.2684440171930437e+00, 0.5916231430550117e-01, 0.3739226419241781e+00, 0.7219877400150740e+00, 0.4123490187575627e+00, 0.1912940350054112e+00, 0.3349394365677468e+00, 0.2978322161977056e+00, 0.1593084209965356e+00, 0.8676597630518320e+00, 0.1622117829797102e+01, 0.1015315690490590e+01, 0.2659792301064862e+00, 0.3405260456467969e+00, 0.1299881336702450e+01, 0.1124037582002341e+01, 0.1698038973078534e+00, 0.6882320425428856e-01, 0.4555835838996722e+00, 0.1026360460398424e+01, 0.6447216871142054e+00, 0.9544125430154021e+00, 0.1519370786864500e+01, 0.7226914714852769e+00, 0.1628548339973444e+00, 0.1465016632377001e+00, 0.6501219520571339e+00, 0.8246707410864278e+00, 0.3261720586281595e+00, 0.2292385937027206e+00, 0.5795909120773233e+00, 0.3717389082113253e+00, 0.2650819619788820e-01, 0.6537783023029759e-02, 0.9168290082662192e-01, 0.8481400436921797e-01, 0.4790492926791300e-02, 0.6320870478125323e-03, 0.1119088044887405e-01, 0.8218919412378699e-02, 0.2781732372482683e-02, 0.3674266095981827e-02, 0.1085600370518675e-01, 0.2060131248039419e-02, 0.1605699355811189e-03, 0.7153898701552432e-04, 0.2117542506861687e-04}, + tauOut: 1.2817878169164906e-003, + ttypeOut: -2, + gOut: 0.0000000000000000, + }, + { + z: []float64{0.2463574096511276e+01, 0.2464320851971913e+01, 0.2536822079344948e-03, 0.5350323562789559e-03, 0.1166624146026729e+01, 0.1168088077064565e+01, 0.3221114082852138e-04, 0.7153898701552432e-04, 0.5458823568901986e+00, 0.5252841314829605e+00, 0.5061929508212644e-01, 0.2191222436498315e-01, 0.1344463426861069e+01, 0.1261039483864963e+01, 0.1073120173669855e+00, 0.1353250258951489e+00, 0.9720968780044319e+00, 0.1066152263103050e+01, 0.1326348631702415e-01, 0.1453842008528346e-01, 0.9676050651333883e+00, 0.8868497102573779e+00, 0.1533962535161303e+00, 0.9530062900995111e-01, 0.1446541525580445e+01, 0.1557460778765631e+01, 0.3801569533217738e-01, 0.4375878814786067e-01, 0.1353638307539094e+01, 0.1256691152780422e+01, 0.1707682473962209e+00, 0.1362446379077657e+00, 0.1583754766727924e+01, 0.1696642487635560e+01, 0.2697037855661164e-01, 0.5916231430550117e-01, 0.8850296086469572e+00, 0.7219877400150740e+00, 0.6437471225190403e-01, 0.1912940350054112e+00, 0.1099835479180717e+01, 0.2978322161977056e+00, 0.8009821360646626e+00, 0.8676597630518320e+00, 0.5535778122558079e+00, 0.1015315690490590e+01, 0.6914368034330997e+00, 0.3405260456467969e+00, 0.5001421950066134e+00, 0.1124037582002341e+01, 0.1412346654806686e+00, 0.6882320425428856e-01, 0.1838256550116241e+01, 0.1026360460398424e+01, 0.3752173792456719e+00, 0.9544125430154021e+00, 0.4926939676603885e+00, 0.7226914714852769e+00, 0.2452143584512202e+00, 0.1465016632377001e+00, 0.8074131885210117e+00, 0.8246707410864278e+00, 0.1055431169003394e+00, 0.2292385937027206e+00, 0.2714517865170992e+00, 0.3717389082113253e+00, 0.2042703660177667e-02, 0.6537783023029759e-02, 0.8212159993993635e-01, 0.8481400436921797e-01, 0.6326073178529442e-04, 0.6320870478125323e-03, 0.1054813695965874e-01, 0.8218919412378699e-02, 0.7176120699696391e-03, 0.3674266095981827e-02, 0.6073136115328898e-04, 0.2060131248039419e-02, 0.3221114082852138e-04, 0.7153898701552432e-04}, + i0: 1, + n0: 20, + pp: 0, + n0in: 20, + dmin: 6.0731361153288982e-005, + dmin1: 6.8738708636769136e-003, + dmin2: 8.1489512892123819e-002, + dn: 6.0731361153288982e-005, + dn1: 6.8738708636769136e-003, + dn2: 8.1489512892123819e-002, + tau: 1.2817878169164906e-003, + ttype: -2, + g: 0.0000000000000000, + zOut: []float64{0.2463574096511276e+01, 0.2464320851971913e+01, 0.2536822079344948e-03, 0.5350323562789559e-03, 0.1166624146026729e+01, 0.1168088077064565e+01, 0.3221114082852138e-04, 0.7153898701552432e-04, 0.5458823568901986e+00, 0.5252841314829605e+00, 0.5061929508212644e-01, 0.2191222436498315e-01, 0.1344463426861069e+01, 0.1261039483864963e+01, 0.1073120173669855e+00, 0.1353250258951489e+00, 0.9720968780044319e+00, 0.1066152263103050e+01, 0.1326348631702415e-01, 0.1453842008528346e-01, 0.9676050651333883e+00, 0.8868497102573779e+00, 0.1533962535161303e+00, 0.9530062900995111e-01, 0.1446541525580445e+01, 0.1557460778765631e+01, 0.3801569533217738e-01, 0.4375878814786067e-01, 0.1353638307539094e+01, 0.1256691152780422e+01, 0.1707682473962209e+00, 0.1362446379077657e+00, 0.1583754766727924e+01, 0.1696642487635560e+01, 0.2697037855661164e-01, 0.5916231430550117e-01, 0.8850296086469572e+00, 0.7219877400150740e+00, 0.6437471225190403e-01, 0.1912940350054112e+00, 0.1099835479180717e+01, 0.2978322161977056e+00, 0.8009821360646626e+00, 0.8676597630518320e+00, 0.5535778122558079e+00, 0.1015315690490590e+01, 0.6914368034330997e+00, 0.3405260456467969e+00, 0.5001421950066134e+00, 0.1124037582002341e+01, 0.1412346654806686e+00, 0.6882320425428856e-01, 0.1838256550116241e+01, 0.1026360460398424e+01, 0.3752173792456719e+00, 0.9544125430154021e+00, 0.4926939676603885e+00, 0.7226914714852769e+00, 0.2452143584512202e+00, 0.1465016632377001e+00, 0.8074131885210117e+00, 0.8246707410864278e+00, 0.1055431169003394e+00, 0.2292385937027206e+00, 0.2714517865170992e+00, 0.3717389082113253e+00, 0.2042703660177667e-02, 0.6537783023029759e-02, 0.8212159993993635e-01, 0.8481400436921797e-01, 0.6326073178529442e-04, 0.6320870478125323e-03, 0.1054813695965874e-01, 0.8218919412378699e-02, 0.7176120699696391e-03, 0.3674266095981827e-02, 0.6073136115328898e-04, 0.2060131248039419e-02, 0.3221114082852138e-04, 0.7153898701552432e-04}, + tauOut: 5.6837241251038845e-005, + ttypeOut: -2, + gOut: 0.0000000000000000, + }, + { + z: []float64{0.2463574096511276e+01, 0.2463770941477959e+01, 0.2536822079344948e-03, 0.1201214707955848e-03, 0.1166624146026729e+01, 0.1166479398455512e+01, 0.3221114082852138e-04, 0.1507398544447245e-04, 0.5458823568901986e+00, 0.5964297407456295e+00, 0.5061929508212644e-01, 0.1141052940222717e+00, 0.1344463426861069e+01, 0.1337613312964532e+01, 0.1073120173669855e+00, 0.7798791776646297e-01, 0.9720968780044319e+00, 0.9073156093137420e+00, 0.1326348631702415e-01, 0.1414482062243694e-01, 0.9676050651333883e+00, 0.1106799660785830e+01, 0.1533962535161303e+00, 0.2004825791345134e+00, 0.1446541525580445e+01, 0.1284017804536858e+01, 0.3801569533217738e-01, 0.4007693764646178e-01, 0.1353638307539094e+01, 0.1484272780047602e+01, 0.1707682473962209e+00, 0.1822138285193538e+00, 0.1583754766727924e+01, 0.1428454479523931e+01, 0.2697037855661164e-01, 0.1671007646458111e-01, 0.8850296086469572e+00, 0.9326374071930291e+00, 0.6437471225190403e-01, 0.7591545433480534e-01, 0.1099835479180717e+01, 0.1824845323669324e+01, 0.8009821360646626e+00, 0.2429827519008994e+00, 0.5535778122558079e+00, 0.1001975026546757e+01, 0.6914368034330997e+00, 0.3451350696526060e+00, 0.5001421950066134e+00, 0.2961849535934249e+00, 0.1412346654806686e+00, 0.8765656248686587e+00, 0.1838256550116241e+01, 0.1336851467252003e+01, 0.3752173792456719e+00, 0.1382856239786244e+00, 0.4926939676603885e+00, 0.5995658648917332e+00, 0.2452143584512202e+00, 0.3302211126778973e+00, 0.8074131885210117e+00, 0.5826783555022028e+00, 0.1055431169003394e+00, 0.4916926700063749e-01, 0.2714517865170992e+00, 0.2242683859353883e+00, 0.2042703660177667e-02, 0.7479881396448043e-03, 0.8212159993993635e-01, 0.8138003529082581e-01, 0.6326073178529442e-04, 0.8199589256196194e-05, 0.1054813695965874e-01, 0.1120071219912114e-01, 0.7176120699696391e-03, 0.3890963093641941e-05, 0.6073136115328898e-04, 0.3156808608191942e-08, 0.3221114082852138e-04, 0.1507398544447245e-04, 0.2117542506861687e-04}, + i0: 1, + n0: 20, + pp: 1, + n0in: 20, + dmin: 3.1568086081919418e-009, + dmin1: 1.0483100129151506e-002, + dmin2: 8.1316774559040517e-002, + dn: 3.1568086081919418e-009, + dn1: 1.0483100129151506e-002, + dn2: 8.1316774559040517e-002, + tau: 5.6837241251038845e-005, + ttype: -2, + g: 0.0000000000000000, + zOut: []float64{0.2463574096511276e+01, 0.2463770941477959e+01, 0.2536822079344948e-03, 0.1201214707955848e-03, 0.1166624146026729e+01, 0.1166479398455512e+01, 0.3221114082852138e-04, 0.1507398544447245e-04, 0.5458823568901986e+00, 0.5964297407456295e+00, 0.5061929508212644e-01, 0.1141052940222717e+00, 0.1344463426861069e+01, 0.1337613312964532e+01, 0.1073120173669855e+00, 0.7798791776646297e-01, 0.9720968780044319e+00, 0.9073156093137420e+00, 0.1326348631702415e-01, 0.1414482062243694e-01, 0.9676050651333883e+00, 0.1106799660785830e+01, 0.1533962535161303e+00, 0.2004825791345134e+00, 0.1446541525580445e+01, 0.1284017804536858e+01, 0.3801569533217738e-01, 0.4007693764646178e-01, 0.1353638307539094e+01, 0.1484272780047602e+01, 0.1707682473962209e+00, 0.1822138285193538e+00, 0.1583754766727924e+01, 0.1428454479523931e+01, 0.2697037855661164e-01, 0.1671007646458111e-01, 0.8850296086469572e+00, 0.9326374071930291e+00, 0.6437471225190403e-01, 0.7591545433480534e-01, 0.1099835479180717e+01, 0.1824845323669324e+01, 0.8009821360646626e+00, 0.2429827519008994e+00, 0.5535778122558079e+00, 0.1001975026546757e+01, 0.6914368034330997e+00, 0.3451350696526060e+00, 0.5001421950066134e+00, 0.2961849535934249e+00, 0.1412346654806686e+00, 0.8765656248686587e+00, 0.1838256550116241e+01, 0.1336851467252003e+01, 0.3752173792456719e+00, 0.1382856239786244e+00, 0.4926939676603885e+00, 0.5995658648917332e+00, 0.2452143584512202e+00, 0.3302211126778973e+00, 0.8074131885210117e+00, 0.5826783555022028e+00, 0.1055431169003394e+00, 0.4916926700063749e-01, 0.2714517865170992e+00, 0.2242683859353883e+00, 0.2042703660177667e-02, 0.7479881396448043e-03, 0.8212159993993635e-01, 0.8138003529082581e-01, 0.6326073178529442e-04, 0.8199589256196194e-05, 0.1054813695965874e-01, 0.1120071219912114e-01, 0.7176120699696391e-03, 0.3890963093641941e-05, 0.6073136115328898e-04, 0.3156808608191942e-08, 0.3221114082852138e-04, 0.1507398544447245e-04, 0.2117542506861687e-04}, + tauOut: 3.1557121791797713e-009, + ttypeOut: -2, + gOut: 0.0000000000000000, + }, + { + z: []float64{0.2463891059793043e+01, 0.2463770941477959e+01, 0.5686908130061341e-04, 0.1201214707955848e-03, 0.1166437600203943e+01, 0.1166479398455512e+01, 0.7707718980490818e-05, 0.1507398544447245e-04, 0.7105273238932086e+00, 0.5964297407456295e+00, 0.2148105431436762e+00, 0.1141052940222717e+00, 0.1200790684431606e+01, 0.1337613312964532e+01, 0.5892755169139442e-01, 0.7798791776646297e-01, 0.8625328750890724e+00, 0.9073156093137420e+00, 0.1815059242254727e-01, 0.1414482062243694e-01, 0.1289131644342084e+01, 0.1106799660785830e+01, 0.1996872873596725e+00, 0.2004825791345134e+00, 0.1124407451667935e+01, 0.1284017804536858e+01, 0.5290351604133232e-01, 0.4007693764646178e-01, 0.1613583089369911e+01, 0.1484272780047602e+01, 0.1613081850537457e+00, 0.1822138285193538e+00, 0.1283856367779054e+01, 0.1428454479523931e+01, 0.1213877407087503e-01, 0.1671007646458111e-01, 0.9964140843012472e+00, 0.9326374071930291e+00, 0.1390325207358455e+00, 0.7591545433480534e-01, 0.1928795551678665e+01, 0.1824845323669324e+01, 0.1262252233392066e+00, 0.2429827519008994e+00, 0.1220884869704444e+01, 0.1001975026546757e+01, 0.8372928285471114e-01, 0.3451350696526060e+00, 0.1089021292451660e+01, 0.2961849535934249e+00, 0.1076046951396362e+01, 0.8765656248686587e+00, 0.3990901366785531e+00, 0.1336851467252003e+01, 0.2077509116934600e+00, 0.1382856239786244e+00, 0.7220360627204584e+00, 0.5995658648917332e+00, 0.2664862668525171e+00, 0.3302211126778973e+00, 0.3653613524946110e+00, 0.5826783555022028e+00, 0.3018138637972599e-01, 0.4916926700063749e-01, 0.1948349845395949e+00, 0.2242683859353883e+00, 0.3124249032854923e-03, 0.7479881396448043e-03, 0.8107580682108434e-01, 0.8138003529082581e-01, 0.1132782305976083e-05, 0.8199589256196194e-05, 0.1120346722419663e-01, 0.1120071219912114e-01, 0.1096359327194516e-11, 0.3890963093641941e-05, 0.6968497581336674e-16, 0.3156808608191942e-08, 0.7707718980490818e-05, 0.1507398544447245e-04}, + i0: 1, + n0: 20, + pp: 0, + n0in: 20, + dmin: 6.9684975813366743e-017, + dmin1: 1.1199576261102989e-002, + dmin2: 8.1067607231828140e-002, + dn: 6.9684975813366743e-017, + dn1: 1.1199576261102989e-002, + dn2: 8.1067607231828140e-002, + tau: 3.1557121791797713e-009, + ttype: -2, + g: 0.0000000000000000, + zOut: []float64{0.2463891059793043e+01, 0.2463770941477959e+01, 0.5686908130061341e-04, 0.1201214707955848e-03, 0.1166437600203943e+01, 0.1166479398455512e+01, 0.7707718980490818e-05, 0.1507398544447245e-04, 0.7105273238932086e+00, 0.5964297407456295e+00, 0.2148105431436762e+00, 0.1141052940222717e+00, 0.1200790684431606e+01, 0.1337613312964532e+01, 0.5892755169139442e-01, 0.7798791776646297e-01, 0.8625328750890724e+00, 0.9073156093137420e+00, 0.1815059242254727e-01, 0.1414482062243694e-01, 0.1289131644342084e+01, 0.1106799660785830e+01, 0.1996872873596725e+00, 0.2004825791345134e+00, 0.1124407451667935e+01, 0.1284017804536858e+01, 0.5290351604133232e-01, 0.4007693764646178e-01, 0.1613583089369911e+01, 0.1484272780047602e+01, 0.1613081850537457e+00, 0.1822138285193538e+00, 0.1283856367779054e+01, 0.1428454479523931e+01, 0.1213877407087503e-01, 0.1671007646458111e-01, 0.9964140843012472e+00, 0.9326374071930291e+00, 0.1390325207358455e+00, 0.7591545433480534e-01, 0.1928795551678665e+01, 0.1824845323669324e+01, 0.1262252233392066e+00, 0.2429827519008994e+00, 0.1220884869704444e+01, 0.1001975026546757e+01, 0.8372928285471114e-01, 0.3451350696526060e+00, 0.1089021292451660e+01, 0.2961849535934249e+00, 0.1076046951396362e+01, 0.8765656248686587e+00, 0.3990901366785531e+00, 0.1336851467252003e+01, 0.2077509116934600e+00, 0.1382856239786244e+00, 0.7220360627204584e+00, 0.5995658648917332e+00, 0.2664862668525171e+00, 0.3302211126778973e+00, 0.3653613524946110e+00, 0.5826783555022028e+00, 0.3018138637972599e-01, 0.4916926700063749e-01, 0.1948349845395949e+00, 0.2242683859353883e+00, 0.3124249032854923e-03, 0.7479881396448043e-03, 0.8107580682108434e-01, 0.8138003529082581e-01, 0.1132782305976083e-05, 0.8199589256196194e-05, 0.1120346722419663e-01, 0.1120071219912114e-01, 0.1096359327194516e-11, 0.3890963093641941e-05, 0.6968497581336674e-16, 0.3156808608191942e-08, 0.7707718980490818e-05, 0.1507398544447245e-04}, + tauOut: 6.9684975806547287e-017, + ttypeOut: -2, + gOut: 0.0000000000000000, + }, + { + z: []float64{0.2463891059793043e+01, 0.2463947928874343e+01, 0.5686908130061341e-04, 0.2692193042748079e-04, 0.1166437600203943e+01, 0.1166418385992496e+01, 0.7707718980490818e-05, 0.4695180568393632e-05, 0.7105273238932086e+00, 0.9253331718563164e+00, 0.2148105431436762e+00, 0.2787563517334627e+00, 0.1200790684431606e+01, 0.9809618843895378e+00, 0.5892755169139442e-01, 0.5181337969514327e-01, 0.8625328750890724e+00, 0.8288700878164763e+00, 0.1815059242254727e-01, 0.2822939734392020e-01, 0.1289131644342084e+01, 0.1460589534357837e+01, 0.1996872873596725e+00, 0.1537255119449346e+00, 0.1124407451667935e+01, 0.1023585455764333e+01, 0.5290351604133232e-01, 0.8339725654733963e-01, 0.1613583089369911e+01, 0.1691494017876317e+01, 0.1613081850537457e+00, 0.1224340957564512e+00, 0.1283856367779054e+01, 0.1173561046093478e+01, 0.1213877407087503e-01, 0.1030644761994533e-01, 0.9964140843012472e+00, 0.1125140157417147e+01, 0.1390325207358455e+00, 0.2383394688796517e+00, 0.1928795551678665e+01, 0.1816681306138221e+01, 0.1262252233392066e+00, 0.8482856339700598e-01, 0.1220884869704444e+01, 0.1219785589162149e+01, 0.8372928285471114e-01, 0.7475327847832687e-01, 0.1089021292451660e+01, 0.2090314965369696e+01, 0.1076046951396362e+01, 0.2054425921547012e+00, 0.3990901366785531e+00, 0.4013984562173118e+00, 0.2077509116934600e+00, 0.3737026089221466e+00, 0.7220360627204584e+00, 0.6148197206508288e+00, 0.2664862668525171e+00, 0.1583615157552351e+00, 0.3653613524946110e+00, 0.2371812231191019e+00, 0.3018138637972599e-01, 0.2479281399828426e-01, 0.1948349845395949e+00, 0.1703545954445960e+00, 0.3124249032854923e-03, 0.1486904479375115e-03, 0.8107580682108434e-01, 0.8092824915545274e-01, 0.1132782305976083e-05, 0.1568190288260776e-06, 0.1120346722419663e-01, 0.1120331040626409e-01, 0.1096359327194516e-11, 0.6819392699821255e-26, 0.6968497581336674e-16, 0.6162975822039155e-31, 0.7707718980490818e-05, 0.4695180568393632e-05, 0.2117542506861687e-04}, + i0: 1, + n0: 20, + pp: 1, + n0in: 20, + dmin: 6.1629758220391547e-032, + dmin1: 1.1203310405167735e-002, + dmin2: 8.0927116373146771e-002, + dn: 6.1629758220391547e-032, + dn1: 1.1203310405167735e-002, + dn2: 8.0927116373146771e-002, + tau: 6.9684975806547287e-017, + ttype: -2, + g: 0.0000000000000000, + zOut: []float64{0.2463891059793043e+01, 0.2463947928874343e+01, 0.5686908130061341e-04, 0.2692193042748079e-04, 0.1166437600203943e+01, 0.1166418385992496e+01, 0.7707718980490818e-05, 0.4695180568393632e-05, 0.7105273238932086e+00, 0.9253331718563164e+00, 0.2148105431436762e+00, 0.2787563517334627e+00, 0.1200790684431606e+01, 0.9809618843895378e+00, 0.5892755169139442e-01, 0.5181337969514327e-01, 0.8625328750890724e+00, 0.8288700878164763e+00, 0.1815059242254727e-01, 0.2822939734392020e-01, 0.1289131644342084e+01, 0.1460589534357837e+01, 0.1996872873596725e+00, 0.1537255119449346e+00, 0.1124407451667935e+01, 0.1023585455764333e+01, 0.5290351604133232e-01, 0.8339725654733963e-01, 0.1613583089369911e+01, 0.1691494017876317e+01, 0.1613081850537457e+00, 0.1224340957564512e+00, 0.1283856367779054e+01, 0.1173561046093478e+01, 0.1213877407087503e-01, 0.1030644761994533e-01, 0.9964140843012472e+00, 0.1125140157417147e+01, 0.1390325207358455e+00, 0.2383394688796517e+00, 0.1928795551678665e+01, 0.1816681306138221e+01, 0.1262252233392066e+00, 0.8482856339700598e-01, 0.1220884869704444e+01, 0.1219785589162149e+01, 0.8372928285471114e-01, 0.7475327847832687e-01, 0.1089021292451660e+01, 0.2090314965369696e+01, 0.1076046951396362e+01, 0.2054425921547012e+00, 0.3990901366785531e+00, 0.4013984562173118e+00, 0.2077509116934600e+00, 0.3737026089221466e+00, 0.7220360627204584e+00, 0.6148197206508288e+00, 0.2664862668525171e+00, 0.1583615157552351e+00, 0.3653613524946110e+00, 0.2371812231191019e+00, 0.3018138637972599e-01, 0.2479281399828426e-01, 0.1948349845395949e+00, 0.1703545954445960e+00, 0.3124249032854923e-03, 0.1486904479375115e-03, 0.8107580682108434e-01, 0.8092824915545274e-01, 0.1132782305976083e-05, 0.1568190288260776e-06, 0.1120346722419663e-01, 0.1120331040626409e-01, 0.1096359327194516e-11, 0.6819392699821255e-26, 0.6968497581336674e-16, 0.6162975822039155e-31, 0.7707718980490818e-05, 0.4695180568393632e-05, 0.2117542506861687e-04}, + tauOut: 6.1629758220391547e-032, + ttypeOut: -2, + gOut: 0.0000000000000000, + }, + { + z: []float64{0.2463974850804771e+01, 0.2463947928874343e+01, 0.1274454348702788e-04, 0.2692193042748079e-04, 0.1166410336629578e+01, 0.1166418385992496e+01, 0.3724766654883956e-05, 0.4695180568393632e-05, 0.1204085798823124e+01, 0.9253331718563164e+00, 0.2271012218143261e+00, 0.2787563517334627e+00, 0.8056740422703550e+00, 0.9809618843895378e+00, 0.5330513126246473e-01, 0.5181337969514327e-01, 0.8037943538979316e+00, 0.8288700878164763e+00, 0.5129615818002433e-01, 0.2822939734392020e-01, 0.1563018888122747e+01, 0.1460589534357837e+01, 0.1006713350698832e+00, 0.1537255119449346e+00, 0.1006311377241790e+01, 0.1023585455764333e+01, 0.1401812239704283e+00, 0.8339725654733963e-01, 0.1673746889662340e+01, 0.1691494017876317e+01, 0.8584564749956700e-01, 0.1224340957564512e+00, 0.1098021846213856e+01, 0.1173561046093478e+01, 0.1056099032774466e-01, 0.1030644761994533e-01, 0.1352918635969054e+01, 0.1125140157417147e+01, 0.3200390963041470e+00, 0.2383394688796517e+00, 0.1581470773231080e+01, 0.1816681306138221e+01, 0.6542811978092533e-01, 0.8482856339700598e-01, 0.1229110747859551e+01, 0.1219785589162149e+01, 0.1271308521106110e+00, 0.7475327847832687e-01, 0.2168626705413786e+01, 0.2090314965369696e+01, 0.3802606466401751e-01, 0.2054425921547012e+00, 0.7370750004754409e+00, 0.4013984562173118e+00, 0.3117182559112661e+00, 0.3737026089221466e+00, 0.4614629804947978e+00, 0.6148197206508288e+00, 0.8139413038408401e-01, 0.1583615157552351e+00, 0.1805799067333021e+00, 0.2371812231191019e+00, 0.2338892446571373e-01, 0.2479281399828426e-01, 0.1471143614268198e+00, 0.1703545954445960e+00, 0.8179526119010886e-04, 0.1486904479375115e-03, 0.8084661071329148e-01, 0.8092824915545274e-01, 0.2173118009582292e-07, 0.1568190288260776e-06, 0.1120328867508400e-01, 0.1120331040626409e-01, 0.3751376363572422e-55, 0.6819392699821255e-26}, + i0: 1, + n0: 19, + pp: 0, + n0in: 20, + dmin: 6.1629758220391547e-032, + dmin1: 1.1203288675083998e-002, + dmin2: 8.0846453894262649e-002, + dn: 6.1629758220391547e-032, + dn1: 1.1203288675083998e-002, + dn2: 8.0846453894262649e-002, + tau: 0.0000000000000000, + ttype: -2, + g: 0.0000000000000000, + zOut: []float64{0.2463974850804771e+01, 0.2463947928874343e+01, 0.1274454348702788e-04, 0.2692193042748079e-04, 0.1166410336629578e+01, 0.1166418385992496e+01, 0.3724766654883956e-05, 0.4695180568393632e-05, 0.1204085798823124e+01, 0.9253331718563164e+00, 0.2271012218143261e+00, 0.2787563517334627e+00, 0.8056740422703550e+00, 0.9809618843895378e+00, 0.5330513126246473e-01, 0.5181337969514327e-01, 0.8037943538979316e+00, 0.8288700878164763e+00, 0.5129615818002433e-01, 0.2822939734392020e-01, 0.1563018888122747e+01, 0.1460589534357837e+01, 0.1006713350698832e+00, 0.1537255119449346e+00, 0.1006311377241790e+01, 0.1023585455764333e+01, 0.1401812239704283e+00, 0.8339725654733963e-01, 0.1673746889662340e+01, 0.1691494017876317e+01, 0.8584564749956700e-01, 0.1224340957564512e+00, 0.1098021846213856e+01, 0.1173561046093478e+01, 0.1056099032774466e-01, 0.1030644761994533e-01, 0.1352918635969054e+01, 0.1125140157417147e+01, 0.3200390963041470e+00, 0.2383394688796517e+00, 0.1581470773231080e+01, 0.1816681306138221e+01, 0.6542811978092533e-01, 0.8482856339700598e-01, 0.1229110747859551e+01, 0.1219785589162149e+01, 0.1271308521106110e+00, 0.7475327847832687e-01, 0.2168626705413786e+01, 0.2090314965369696e+01, 0.3802606466401751e-01, 0.2054425921547012e+00, 0.7370750004754409e+00, 0.4013984562173118e+00, 0.3117182559112661e+00, 0.3737026089221466e+00, 0.4614629804947978e+00, 0.6148197206508288e+00, 0.8139413038408401e-01, 0.1583615157552351e+00, 0.1805799067333021e+00, 0.2371812231191019e+00, 0.2338892446571373e-01, 0.2479281399828426e-01, 0.1471143614268198e+00, 0.1703545954445960e+00, 0.8179526119010886e-04, 0.1486904479375115e-03, 0.8084661071329148e-01, 0.8092824915545274e-01, 0.2173118009582292e-07, 0.1568190288260776e-06, 0.1120328867508400e-01, 0.1120331040626409e-01, 0.3751376363572422e-55, 0.6819392699821255e-26}, + tauOut: 1.1203284285913290e-002, + ttypeOut: -7, + gOut: 0.0000000000000000, + }, + { + z: []float64{0.2463974850804771e+01, 0.2452784311062345e+01, 0.1274454348702788e-04, 0.6060609239813679e-05, 0.1166410336629578e+01, 0.1155204716501079e+01, 0.3724766654883956e-05, 0.3882375624867434e-05, 0.1204085798823124e+01, 0.1419979853975912e+01, 0.2271012218143261e+00, 0.1288536304732592e+00, 0.8056740422703550e+00, 0.7189222587736472e+00, 0.5330513126246473e-01, 0.5959804835594534e-01, 0.8037943538979316e+00, 0.7842891794360973e+00, 0.5129615818002433e-01, 0.1022287011292917e+00, 0.1563018888122747e+01, 0.1550258237777425e+01, 0.1006713350698832e+00, 0.6534828028921505e-01, 0.1006311377241790e+01, 0.1069941036637090e+01, 0.1401812239704283e+00, 0.2192904838448094e+00, 0.1673746889662340e+01, 0.1529098769031184e+01, 0.8584564749956700e-01, 0.6164441320989392e-01, 0.1098021846213856e+01, 0.1035735139045794e+01, 0.1056099032774466e-01, 0.1379518767882891e-01, 0.1352918635969054e+01, 0.1647959260308459e+01, 0.3200390963041470e+00, 0.3071268139247320e+00, 0.1581470773231080e+01, 0.1328568794801360e+01, 0.6542811978092533e-01, 0.6053010243026304e-01, 0.1229110747859551e+01, 0.1284508213253985e+01, 0.1271308521106110e+00, 0.2146341752620367e+00, 0.2168626705413786e+01, 0.1980815310529854e+01, 0.3802606466401751e-01, 0.1414976019284330e-01, 0.7370750004754409e+00, 0.1023440211907950e+01, 0.3117182559112661e+00, 0.1405518698344743e+00, 0.4614629804947978e+00, 0.3911019567584944e+00, 0.8139413038408401e-01, 0.3758136265851575e-01, 0.1805799067333021e+00, 0.1551841842545868e+00, 0.2338892446571373e-01, 0.2217266343062855e-01, 0.1471143614268198e+00, 0.1138202089714681e+00, 0.8179526119010886e-04, 0.5809925758690545e-04, 0.8084661071329148e-01, 0.6958524890097138e-01, 0.2173118009582292e-07, 0.3498739858072780e-08, 0.1120328867508400e-01, 0.8904308505808256e-09, 0.3751376363572422e-55, 0.3882375624867434e-05, 0.1697846193036144e-01}, + i0: 1, + n0: 19, + pp: 1, + n0in: 19, + dmin: 8.9043085058082561e-010, + dmin1: 6.9585227169791292e-002, + dmin2: 0.11373841371027797, + dn: 8.9043085058082561e-010, + dn1: 6.9585227169791292e-002, + dn2: 0.11373841371027797, + tau: 1.1203284285913290e-002, + ttype: -7, + g: 0.0000000000000000, + zOut: []float64{0.2463974850804771e+01, 0.2452784311062345e+01, 0.1274454348702788e-04, 0.6060609239813679e-05, 0.1166410336629578e+01, 0.1155204716501079e+01, 0.3724766654883956e-05, 0.3882375624867434e-05, 0.1204085798823124e+01, 0.1419979853975912e+01, 0.2271012218143261e+00, 0.1288536304732592e+00, 0.8056740422703550e+00, 0.7189222587736472e+00, 0.5330513126246473e-01, 0.5959804835594534e-01, 0.8037943538979316e+00, 0.7842891794360973e+00, 0.5129615818002433e-01, 0.1022287011292917e+00, 0.1563018888122747e+01, 0.1550258237777425e+01, 0.1006713350698832e+00, 0.6534828028921505e-01, 0.1006311377241790e+01, 0.1069941036637090e+01, 0.1401812239704283e+00, 0.2192904838448094e+00, 0.1673746889662340e+01, 0.1529098769031184e+01, 0.8584564749956700e-01, 0.6164441320989392e-01, 0.1098021846213856e+01, 0.1035735139045794e+01, 0.1056099032774466e-01, 0.1379518767882891e-01, 0.1352918635969054e+01, 0.1647959260308459e+01, 0.3200390963041470e+00, 0.3071268139247320e+00, 0.1581470773231080e+01, 0.1328568794801360e+01, 0.6542811978092533e-01, 0.6053010243026304e-01, 0.1229110747859551e+01, 0.1284508213253985e+01, 0.1271308521106110e+00, 0.2146341752620367e+00, 0.2168626705413786e+01, 0.1980815310529854e+01, 0.3802606466401751e-01, 0.1414976019284330e-01, 0.7370750004754409e+00, 0.1023440211907950e+01, 0.3117182559112661e+00, 0.1405518698344743e+00, 0.4614629804947978e+00, 0.3911019567584944e+00, 0.8139413038408401e-01, 0.3758136265851575e-01, 0.1805799067333021e+00, 0.1551841842545868e+00, 0.2338892446571373e-01, 0.2217266343062855e-01, 0.1471143614268198e+00, 0.1138202089714681e+00, 0.8179526119010886e-04, 0.5809925758690545e-04, 0.8084661071329148e-01, 0.6958524890097138e-01, 0.2173118009582292e-07, 0.3498739858072780e-08, 0.1120328867508400e-01, 0.8904308505808256e-09, 0.3751376363572422e-55, 0.3882375624867434e-05, 0.1697846193036144e-01}, + tauOut: 8.9043080564395014e-010, + ttypeOut: -2, + gOut: 0.0000000000000000, + }, + { + z: []float64{0.2452790370781154e+01, 0.2452784311062345e+01, 0.2854399814229969e-05, 0.6060609239813679e-05, 0.1155205743586459e+01, 0.1155204716501079e+01, 0.4772219324121025e-05, 0.3882375624867434e-05, 0.1548828711339416e+01, 0.1419979853975912e+01, 0.5981019230390531e-01, 0.1288536304732592e+00, 0.7187101139352565e+00, 0.7189222587736472e+00, 0.6503610222645050e-01, 0.5959804835594534e-01, 0.8214817774485077e+00, 0.7842891794360973e+00, 0.1929207566298143e+00, 0.1022287011292917e+00, 0.1422685760546395e+01, 0.1550258237777425e+01, 0.4914564318703864e-01, 0.6534828028921505e-01, 0.1240085876404429e+01, 0.1069941036637090e+01, 0.2703980549150243e+00, 0.2192904838448094e+00, 0.1320345126435623e+01, 0.1529098769031184e+01, 0.4835651195207321e-01, 0.6164441320989392e-01, 0.1001173813882118e+01, 0.1035735139045794e+01, 0.2270725319399537e-01, 0.1379518767882891e-01, 0.1932378820148765e+01, 0.1647959260308459e+01, 0.2111589595024383e+00, 0.3071268139247320e+00, 0.1177939936838754e+01, 0.1328568794801360e+01, 0.6600626338337756e-01, 0.6053010243026304e-01, 0.1433136124242214e+01, 0.1284508213253985e+01, 0.2966575563411978e+00, 0.2146341752620367e+00, 0.1698307513491068e+01, 0.1980815310529854e+01, 0.8526979628348903e-02, 0.1414976019284330e-01, 0.1155465101223645e+01, 0.1023440211907950e+01, 0.4757401262929915e-01, 0.1405518698344743e+00, 0.3811093058972801e+00, 0.3911019567584944e+00, 0.1530278326215801e-01, 0.3758136265851575e-01, 0.1620540635326265e+00, 0.1551841842545868e+00, 0.1557318051836492e-01, 0.2217266343062855e-01, 0.9830512682025924e-01, 0.1138202089714681e+00, 0.4112553872737890e-04, 0.5809925758690545e-04, 0.6954412597055305e-01, 0.6958524890097138e-01, 0.4479725446695418e-16, 0.3498739858072780e-08, 0.1396210563637444e-18, 0.8904308505808256e-09, 0.2854399814229969e-05, 0.3882375624867434e-05}, + i0: 1, + n0: 19, + pp: 0, + n0in: 19, + dmin: 1.3962105636374437e-019, + dmin1: 6.9544122471813200e-002, + dmin2: 9.8247027562672340e-002, + dn: 1.3962105636374437e-019, + dn1: 6.9544122471813200e-002, + dn2: 9.8247027562672340e-002, + tau: 8.9043080564395014e-010, + ttype: -2, + g: 0.0000000000000000, + zOut: []float64{0.2452790370781154e+01, 0.2452784311062345e+01, 0.2854399814229969e-05, 0.6060609239813679e-05, 0.1155205743586459e+01, 0.1155204716501079e+01, 0.4772219324121025e-05, 0.3882375624867434e-05, 0.1548828711339416e+01, 0.1419979853975912e+01, 0.5981019230390531e-01, 0.1288536304732592e+00, 0.7187101139352565e+00, 0.7189222587736472e+00, 0.6503610222645050e-01, 0.5959804835594534e-01, 0.8214817774485077e+00, 0.7842891794360973e+00, 0.1929207566298143e+00, 0.1022287011292917e+00, 0.1422685760546395e+01, 0.1550258237777425e+01, 0.4914564318703864e-01, 0.6534828028921505e-01, 0.1240085876404429e+01, 0.1069941036637090e+01, 0.2703980549150243e+00, 0.2192904838448094e+00, 0.1320345126435623e+01, 0.1529098769031184e+01, 0.4835651195207321e-01, 0.6164441320989392e-01, 0.1001173813882118e+01, 0.1035735139045794e+01, 0.2270725319399537e-01, 0.1379518767882891e-01, 0.1932378820148765e+01, 0.1647959260308459e+01, 0.2111589595024383e+00, 0.3071268139247320e+00, 0.1177939936838754e+01, 0.1328568794801360e+01, 0.6600626338337756e-01, 0.6053010243026304e-01, 0.1433136124242214e+01, 0.1284508213253985e+01, 0.2966575563411978e+00, 0.2146341752620367e+00, 0.1698307513491068e+01, 0.1980815310529854e+01, 0.8526979628348903e-02, 0.1414976019284330e-01, 0.1155465101223645e+01, 0.1023440211907950e+01, 0.4757401262929915e-01, 0.1405518698344743e+00, 0.3811093058972801e+00, 0.3911019567584944e+00, 0.1530278326215801e-01, 0.3758136265851575e-01, 0.1620540635326265e+00, 0.1551841842545868e+00, 0.1557318051836492e-01, 0.2217266343062855e-01, 0.9830512682025924e-01, 0.1138202089714681e+00, 0.4112553872737890e-04, 0.5809925758690545e-04, 0.6954412597055305e-01, 0.6958524890097138e-01, 0.4479725446695418e-16, 0.3498739858072780e-08, 0.1396210563637444e-18, 0.8904308505808256e-09, 0.2854399814229969e-05, 0.3882375624867434e-05}, + tauOut: 1.3962105636374427e-019, + ttypeOut: -2, + gOut: 0.0000000000000000, + }, + { + z: []float64{0.2452790370781154e+01, 0.2452793225180968e+01, 0.2854399814229969e-05, 0.1344352644992036e-05, 0.1155205743586459e+01, 0.1155209171453138e+01, 0.4772219324121025e-05, 0.6398278760815103e-05, 0.1548828711339416e+01, 0.1608632505364561e+01, 0.5981019230390531e-01, 0.2672219415054498e-01, 0.7187101139352565e+00, 0.7570240220111620e+00, 0.6503610222645050e-01, 0.7057368234283014e-01, 0.8214817774485077e+00, 0.9438288517354918e+00, 0.1929207566298143e+00, 0.2908001941945216e+00, 0.1422685760546395e+01, 0.1181031209538913e+01, 0.4914564318703864e-01, 0.5160305461093759e-01, 0.1240085876404429e+01, 0.1458880876708516e+01, 0.2703980549150243e+00, 0.2447209773632920e+00, 0.1320345126435623e+01, 0.1123980661024405e+01, 0.4835651195207321e-01, 0.4307304847484577e-01, 0.1001173813882118e+01, 0.9808080186012680e+00, 0.2270725319399537e-01, 0.4473761868138884e-01, 0.1932378820148765e+01, 0.2098800160969815e+01, 0.2111589595024383e+00, 0.1185117935689049e+00, 0.1177939936838754e+01, 0.1125434406653226e+01, 0.6600626338337756e-01, 0.8405284210411720e-01, 0.1433136124242214e+01, 0.1645740838479294e+01, 0.2966575563411978e+00, 0.3061331074057168e+00, 0.1698307513491068e+01, 0.1400701385713701e+01, 0.8526979628348903e-02, 0.7034066989504621e-02, 0.1155465101223645e+01, 0.1196005046863439e+01, 0.4757401262929915e-01, 0.1515955052150448e-01, 0.3811093058972801e+00, 0.3812525386379337e+00, 0.1530278326215801e-01, 0.6504555274179689e-02, 0.1620540635326265e+00, 0.1711226887768117e+00, 0.1557318051836492e-01, 0.8946350111698947e-02, 0.9830512682025924e-01, 0.8939990224728768e-01, 0.4112553872737890e-04, 0.3199152990069927e-04, 0.6954412597055305e-01, 0.6951213444065239e-01, 0.4479725446695418e-16, 0.8997910999570377e-34, 0.2818174710670554e-01}, + i0: 1, + n0: 18, + pp: 1, + n0in: 19, + dmin: 1.3962105636374430e-019, + dmin1: 6.9512134440652351e-002, + dmin2: 8.9358776708560295e-002, + dn: 1.3962105636374430e-019, + dn1: 6.9512134440652351e-002, + dn2: 8.9358776708560295e-002, + tau: 0.0000000000000000, + ttype: -2, + g: 0.0000000000000000, + zOut: []float64{0.2452790370781154e+01, 0.2452793225180968e+01, 0.2854399814229969e-05, 0.1344352644992036e-05, 0.1155205743586459e+01, 0.1155209171453138e+01, 0.4772219324121025e-05, 0.6398278760815103e-05, 0.1548828711339416e+01, 0.1608632505364561e+01, 0.5981019230390531e-01, 0.2672219415054498e-01, 0.7187101139352565e+00, 0.7570240220111620e+00, 0.6503610222645050e-01, 0.7057368234283014e-01, 0.8214817774485077e+00, 0.9438288517354918e+00, 0.1929207566298143e+00, 0.2908001941945216e+00, 0.1422685760546395e+01, 0.1181031209538913e+01, 0.4914564318703864e-01, 0.5160305461093759e-01, 0.1240085876404429e+01, 0.1458880876708516e+01, 0.2703980549150243e+00, 0.2447209773632920e+00, 0.1320345126435623e+01, 0.1123980661024405e+01, 0.4835651195207321e-01, 0.4307304847484577e-01, 0.1001173813882118e+01, 0.9808080186012680e+00, 0.2270725319399537e-01, 0.4473761868138884e-01, 0.1932378820148765e+01, 0.2098800160969815e+01, 0.2111589595024383e+00, 0.1185117935689049e+00, 0.1177939936838754e+01, 0.1125434406653226e+01, 0.6600626338337756e-01, 0.8405284210411720e-01, 0.1433136124242214e+01, 0.1645740838479294e+01, 0.2966575563411978e+00, 0.3061331074057168e+00, 0.1698307513491068e+01, 0.1400701385713701e+01, 0.8526979628348903e-02, 0.7034066989504621e-02, 0.1155465101223645e+01, 0.1196005046863439e+01, 0.4757401262929915e-01, 0.1515955052150448e-01, 0.3811093058972801e+00, 0.3812525386379337e+00, 0.1530278326215801e-01, 0.6504555274179689e-02, 0.1620540635326265e+00, 0.1711226887768117e+00, 0.1557318051836492e-01, 0.8946350111698947e-02, 0.9830512682025924e-01, 0.8939990224728768e-01, 0.4112553872737890e-04, 0.3199152990069927e-04, 0.6954412597055305e-01, 0.6951213444065239e-01, 0.4479725446695418e-16, 0.8997910999570377e-34, 0.2818174710670554e-01}, + tauOut: 6.8088572105595116e-002, + ttypeOut: -8, + gOut: 0.0000000000000000, + }, + { + z: []float64{0.2384705997428018e+01, 0.2452793225180968e+01, 0.6512368848977837e-06, 0.1344352644992036e-05, 0.1087126346389419e+01, 0.1155209171453138e+01, 0.9467601652019932e-05, 0.6398278760815103e-05, 0.1567256659807859e+01, 0.1608632505364561e+01, 0.1290748567965171e-01, 0.2672219415054498e-01, 0.7466016465687454e+00, 0.7570240220111620e+00, 0.8921689079377818e-01, 0.7057368234283014e-01, 0.1077323583030640e+01, 0.9438288517354918e+00, 0.3187938243378626e+00, 0.2908001941945216e+00, 0.8457518677063927e+00, 0.1181031209538913e+01, 0.8901276181133644e-01, 0.5160305461093759e-01, 0.1546500520154876e+01, 0.1458880876708516e+01, 0.1778606876095877e+00, 0.2447209773632920e+00, 0.9211044497840675e+00, 0.1123980661024405e+01, 0.4586493023634136e-01, 0.4307304847484577e-01, 0.9115921349407204e+00, 0.9808080186012680e+00, 0.1030014605117353e+00, 0.4473761868138884e-01, 0.2046221921921390e+01, 0.2098800160969815e+01, 0.6518220171905387e-01, 0.1185117935689049e+00, 0.1076216474932694e+01, 0.1125434406653226e+01, 0.1285328723941422e+00, 0.8405284210411720e-01, 0.1755252501385274e+01, 0.1645740838479294e+01, 0.2442959445536252e+00, 0.3061331074057168e+00, 0.1095350936043985e+01, 0.1400701385713701e+01, 0.7680442260639305e-02, 0.7034066989504621e-02, 0.1135395583018709e+01, 0.1196005046863439e+01, 0.5090399511302622e-02, 0.1515955052150448e-01, 0.3145781222952156e+00, 0.3812525386379337e+00, 0.3538316586334169e-02, 0.6504555274179689e-02, 0.1084421501965814e+00, 0.1711226887768117e+00, 0.7375387005938499e-02, 0.8946350111698947e-02, 0.1396793466565477e-01, 0.8939990224728768e-01, 0.1592074691534444e-03, 0.3199152990069927e-04, 0.1264354865903830e-02, 0.6951213444065239e-01, 0.6512368848977837e-06, 0.1344352644992036e-05}, + i0: 1, + n0: 18, + pp: 0, + n0in: 18, + dmin: 1.2643548659038301e-003, + dmin1: 1.3935943135754067e-002, + dmin2: 9.9495800084882416e-002, + dn: 1.2643548659038301e-003, + dn1: 1.3935943135754067e-002, + dn2: 9.9495800084882416e-002, + tau: 6.8088572105595116e-002, + ttype: -8, + g: 0.0000000000000000, + zOut: []float64{0.2384705997428018e+01, 0.2452793225180968e+01, 0.6512368848977837e-06, 0.1344352644992036e-05, 0.1087126346389419e+01, 0.1155209171453138e+01, 0.9467601652019932e-05, 0.6398278760815103e-05, 0.1567256659807859e+01, 0.1608632505364561e+01, 0.1290748567965171e-01, 0.2672219415054498e-01, 0.7466016465687454e+00, 0.7570240220111620e+00, 0.8921689079377818e-01, 0.7057368234283014e-01, 0.1077323583030640e+01, 0.9438288517354918e+00, 0.3187938243378626e+00, 0.2908001941945216e+00, 0.8457518677063927e+00, 0.1181031209538913e+01, 0.8901276181133644e-01, 0.5160305461093759e-01, 0.1546500520154876e+01, 0.1458880876708516e+01, 0.1778606876095877e+00, 0.2447209773632920e+00, 0.9211044497840675e+00, 0.1123980661024405e+01, 0.4586493023634136e-01, 0.4307304847484577e-01, 0.9115921349407204e+00, 0.9808080186012680e+00, 0.1030014605117353e+00, 0.4473761868138884e-01, 0.2046221921921390e+01, 0.2098800160969815e+01, 0.6518220171905387e-01, 0.1185117935689049e+00, 0.1076216474932694e+01, 0.1125434406653226e+01, 0.1285328723941422e+00, 0.8405284210411720e-01, 0.1755252501385274e+01, 0.1645740838479294e+01, 0.2442959445536252e+00, 0.3061331074057168e+00, 0.1095350936043985e+01, 0.1400701385713701e+01, 0.7680442260639305e-02, 0.7034066989504621e-02, 0.1135395583018709e+01, 0.1196005046863439e+01, 0.5090399511302622e-02, 0.1515955052150448e-01, 0.3145781222952156e+00, 0.3812525386379337e+00, 0.3538316586334169e-02, 0.6504555274179689e-02, 0.1084421501965814e+00, 0.1711226887768117e+00, 0.7375387005938499e-02, 0.8946350111698947e-02, 0.1396793466565477e-01, 0.8939990224728768e-01, 0.1592074691534444e-03, 0.3199152990069927e-04, 0.1264354865903830e-02, 0.6951213444065239e-01, 0.6512368848977837e-06, 0.1344352644992036e-05}, + tauOut: 1.2463174543591322e-003, + ttypeOut: -2, + gOut: 0.0000000000000000, + }, + { + z: []float64{0.2384705997428018e+01, 0.2383460331210544e+01, 0.6512368848977837e-06, 0.2970373645586866e-06, 0.1087126346389419e+01, 0.1085889199499347e+01, 0.9467601652019932e-05, 0.1366452649899944e-04, 0.1567256659807859e+01, 0.1578904163506652e+01, 0.1290748567965171e-01, 0.6103442048115082e-02, 0.7466016465687454e+00, 0.8284687778600495e+00, 0.8921689079377818e-01, 0.1160157908485996e+00, 0.1077323583030640e+01, 0.1278855299065544e+01, 0.3187938243378626e+00, 0.2108295383723412e+00, 0.8457518677063927e+00, 0.7226887736910288e+00, 0.8901276181133644e-01, 0.1904807262171572e+00, 0.1546500520154876e+01, 0.1532634164092948e+01, 0.1778606876095877e+00, 0.1068932656188067e+00, 0.9211044497840675e+00, 0.8588297969472432e+00, 0.4586493023634136e-01, 0.4868264913684867e-01, 0.9115921349407204e+00, 0.9646646288612479e+00, 0.1030014605117353e+00, 0.2184840619043245e+00, 0.2046221921921390e+01, 0.1891673744281760e+01, 0.6518220171905387e-01, 0.3708364593761750e-01, 0.1076216474932694e+01, 0.1166419383934860e+01, 0.1285328723941422e+00, 0.1934189785315259e+00, 0.1755252501385274e+01, 0.1804883149953014e+01, 0.2442959445536252e+00, 0.1482587897978486e+00, 0.1095350936043985e+01, 0.9535262710524167e+00, 0.7680442260639305e-02, 0.9145359161143373e-02, 0.1135395583018709e+01, 0.1130094305914509e+01, 0.5090399511302622e-02, 0.1416986451145964e-02, 0.3145781222952156e+00, 0.3154531349760448e+00, 0.3538316586334169e-02, 0.1216353924418735e-02, 0.1084421501965814e+00, 0.1133548658237420e+00, 0.7375387005938499e-02, 0.9088178357782586e-03, 0.1396793466565477e-01, 0.1197200684467082e-01, 0.1592074691534444e-03, 0.1681378409852773e-04, 0.1264354865903830e-02, 0.1223627446170173e-05, 0.6512368848977837e-06, 0.2970373645586866e-06, 0.2818174710670554e-01}, + i0: 1, + n0: 18, + pp: 1, + n0in: 18, + dmin: 1.2236274461701734e-006, + dmin1: 1.1812799375517376e-002, + dmin2: 0.10597947881780349, + dn: 1.2236274461701734e-006, + dn1: 1.1812799375517376e-002, + dn2: 0.10597947881780349, + tau: 1.2463174543591322e-003, + ttype: -2, + g: 0.0000000000000000, + zOut: []float64{0.2384705997428018e+01, 0.2383460331210544e+01, 0.6512368848977837e-06, 0.2970373645586866e-06, 0.1087126346389419e+01, 0.1085889199499347e+01, 0.9467601652019932e-05, 0.1366452649899944e-04, 0.1567256659807859e+01, 0.1578904163506652e+01, 0.1290748567965171e-01, 0.6103442048115082e-02, 0.7466016465687454e+00, 0.8284687778600495e+00, 0.8921689079377818e-01, 0.1160157908485996e+00, 0.1077323583030640e+01, 0.1278855299065544e+01, 0.3187938243378626e+00, 0.2108295383723412e+00, 0.8457518677063927e+00, 0.7226887736910288e+00, 0.8901276181133644e-01, 0.1904807262171572e+00, 0.1546500520154876e+01, 0.1532634164092948e+01, 0.1778606876095877e+00, 0.1068932656188067e+00, 0.9211044497840675e+00, 0.8588297969472432e+00, 0.4586493023634136e-01, 0.4868264913684867e-01, 0.9115921349407204e+00, 0.9646646288612479e+00, 0.1030014605117353e+00, 0.2184840619043245e+00, 0.2046221921921390e+01, 0.1891673744281760e+01, 0.6518220171905387e-01, 0.3708364593761750e-01, 0.1076216474932694e+01, 0.1166419383934860e+01, 0.1285328723941422e+00, 0.1934189785315259e+00, 0.1755252501385274e+01, 0.1804883149953014e+01, 0.2442959445536252e+00, 0.1482587897978486e+00, 0.1095350936043985e+01, 0.9535262710524167e+00, 0.7680442260639305e-02, 0.9145359161143373e-02, 0.1135395583018709e+01, 0.1130094305914509e+01, 0.5090399511302622e-02, 0.1416986451145964e-02, 0.3145781222952156e+00, 0.3154531349760448e+00, 0.3538316586334169e-02, 0.1216353924418735e-02, 0.1084421501965814e+00, 0.1133548658237420e+00, 0.7375387005938499e-02, 0.9088178357782586e-03, 0.1396793466565477e-01, 0.1197200684467082e-01, 0.1592074691534444e-03, 0.1681378409852773e-04, 0.1264354865903830e-02, 0.1223627446170173e-05, 0.6512368848977837e-06, 0.2970373645586866e-06, 0.2818174710670554e-01}, + tauOut: 1.2218877946904154e-006, + ttypeOut: -2, + gOut: 0.0000000000000000, + }, + { + z: []float64{0.2383459406360114e+01, 0.2383460331210544e+01, 0.1353283656358167e-06, 0.2970373645586866e-06, 0.1085901506809686e+01, 0.1085889199499347e+01, 0.1986826396898849e-04, 0.1366452649899944e-04, 0.1584986515403004e+01, 0.1578904163506652e+01, 0.3190255011763210e-02, 0.6103442048115082e-02, 0.9412930918090910e+00, 0.8284687778600495e+00, 0.1576208411525267e+00, 0.1160157908485996e+00, 0.1332062774397564e+01, 0.1278855299065544e+01, 0.1143821023097510e+00, 0.2108295383723412e+00, 0.7987861757106404e+00, 0.7226887736910288e+00, 0.3654761154847582e+00, 0.1904807262171572e+00, 0.1274050092339202e+01, 0.1532634164092948e+01, 0.7205613198290631e-01, 0.1068932656188067e+00, 0.8354550922133908e+00, 0.8588297969472432e+00, 0.5621179414582513e-01, 0.4868264913684867e-01, 0.1126935674731953e+01, 0.9646646288612479e+00, 0.3667472533840468e+00, 0.2184840619043245e+00, 0.1562008914947536e+01, 0.1891673744281760e+01, 0.2769195683500124e-01, 0.3708364593761750e-01, 0.1332145183743590e+01, 0.1166419383934860e+01, 0.2620575140703801e+00, 0.1934189785315259e+00, 0.1691083203792688e+01, 0.1804883149953014e+01, 0.8359650824372877e-01, 0.1482587897978486e+00, 0.8790739000820365e+00, 0.9535262710524167e+00, 0.1175682535061811e-01, 0.9145359161143373e-02, 0.1119753245127243e+01, 0.1130094305914509e+01, 0.3991886785572840e-03, 0.1416986451145964e-02, 0.3162690783341116e+00, 0.3154531349760448e+00, 0.4359567385560517e-03, 0.1216353924418735e-02, 0.1138265050331695e+00, 0.1133548658237420e+00, 0.9558734450579545e-04, 0.9088178357782586e-03, 0.1189201139646886e-01, 0.1197200684467082e-01, 0.1730052806966466e-08, 0.1681378409852773e-04, 0.9598672791582074e-11, 0.1223627446170173e-05, 0.1353283656358167e-06, 0.2970373645586866e-06}, + i0: 1, + n0: 18, + pp: 0, + n0in: 18, + dmin: 9.5986727915820745e-012, + dmin1: 1.1875197612370336e-002, + dmin2: 0.11291768719739126, + dn: 9.5986727915820745e-012, + dn1: 1.1875197612370336e-002, + dn2: 0.11291768719739126, + tau: 1.2218877946904154e-006, + ttype: -2, + g: 0.0000000000000000, + zOut: []float64{0.2383459406360114e+01, 0.2383460331210544e+01, 0.1353283656358167e-06, 0.2970373645586866e-06, 0.1085901506809686e+01, 0.1085889199499347e+01, 0.1986826396898849e-04, 0.1366452649899944e-04, 0.1584986515403004e+01, 0.1578904163506652e+01, 0.3190255011763210e-02, 0.6103442048115082e-02, 0.9412930918090910e+00, 0.8284687778600495e+00, 0.1576208411525267e+00, 0.1160157908485996e+00, 0.1332062774397564e+01, 0.1278855299065544e+01, 0.1143821023097510e+00, 0.2108295383723412e+00, 0.7987861757106404e+00, 0.7226887736910288e+00, 0.3654761154847582e+00, 0.1904807262171572e+00, 0.1274050092339202e+01, 0.1532634164092948e+01, 0.7205613198290631e-01, 0.1068932656188067e+00, 0.8354550922133908e+00, 0.8588297969472432e+00, 0.5621179414582513e-01, 0.4868264913684867e-01, 0.1126935674731953e+01, 0.9646646288612479e+00, 0.3667472533840468e+00, 0.2184840619043245e+00, 0.1562008914947536e+01, 0.1891673744281760e+01, 0.2769195683500124e-01, 0.3708364593761750e-01, 0.1332145183743590e+01, 0.1166419383934860e+01, 0.2620575140703801e+00, 0.1934189785315259e+00, 0.1691083203792688e+01, 0.1804883149953014e+01, 0.8359650824372877e-01, 0.1482587897978486e+00, 0.8790739000820365e+00, 0.9535262710524167e+00, 0.1175682535061811e-01, 0.9145359161143373e-02, 0.1119753245127243e+01, 0.1130094305914509e+01, 0.3991886785572840e-03, 0.1416986451145964e-02, 0.3162690783341116e+00, 0.3154531349760448e+00, 0.4359567385560517e-03, 0.1216353924418735e-02, 0.1138265050331695e+00, 0.1133548658237420e+00, 0.9558734450579545e-04, 0.9088178357782586e-03, 0.1189201139646886e-01, 0.1197200684467082e-01, 0.1730052806966466e-08, 0.1681378409852773e-04, 0.9598672791582074e-11, 0.1223627446170173e-05, 0.1353283656358167e-06, 0.2970373645586866e-06}, + tauOut: 9.5986713933289272e-012, + ttypeOut: -2, + gOut: 0.0000000000000000, + }, + { + z: []float64{0.2383459406360114e+01, 0.2383459541678881e+01, 0.1353283656358167e-06, 0.6165545233233256e-07, 0.1085901506809686e+01, 0.1085921313408604e+01, 0.1986826396898849e-04, 0.2899927470478232e-04, 0.1584986515403004e+01, 0.1588147771130463e+01, 0.3190255011763210e-02, 0.1890859942802736e-02, 0.9412930918090910e+00, 0.1097023073009216e+01, 0.1576208411525267e+00, 0.1913914667196325e+00, 0.1332062774397564e+01, 0.1255053409978084e+01, 0.1143821023097510e+00, 0.7279916643176543e-01, 0.7987861757106404e+00, 0.1091463124754034e+01, 0.3654761154847582e+00, 0.4266153094141972e+00, 0.1274050092339202e+01, 0.9194909148983121e+00, 0.7205613198290631e-01, 0.6547064404326042e-01, 0.8354550922133908e+00, 0.8261962423063568e+00, 0.5621179414582513e-01, 0.7667315937770838e-01, 0.1126935674731953e+01, 0.1417009768728692e+01, 0.3667472533840468e+00, 0.4042756034295816e+00, 0.1562008914947536e+01, 0.1185425268343357e+01, 0.2769195683500124e-01, 0.3111938635974581e-01, 0.1332145183743590e+01, 0.1563083311444625e+01, 0.2620575140703801e+00, 0.2835172362390010e+00, 0.1691083203792688e+01, 0.1491162475787817e+01, 0.8359650824372877e-01, 0.4928202642453804e-01, 0.8790739000820365e+00, 0.8415486989985177e+00, 0.1175682535061811e-01, 0.1564347179719430e-01, 0.1119753245127243e+01, 0.1104508961999007e+01, 0.3991886785572840e-03, 0.1143051254380294e-03, 0.3162690783341116e+00, 0.3165907299376309e+00, 0.4359567385560517e-03, 0.1567431614478116e-03, 0.1138265050331695e+00, 0.1137653492066288e+00, 0.9558734450579545e-04, 0.9991845479738400e-05, 0.1189201139646886e-01, 0.1188202127144326e-01, 0.1730052806966466e-08, 0.1397591405272086e-17, 0.9598672791582074e-11, 0.6617412588681544e-21, 0.1353283656358167e-06, 0.6165545233233256e-07, 0.2818174710670554e-01}, + i0: 1, + n0: 18, + pp: 1, + n0in: 18, + dmin: 6.6174125886815435e-022, + dmin1: 1.1882019541390456e-002, + dmin2: 0.11366976186212303, + dn: 6.6174125886815435e-022, + dn1: 1.1882019541390456e-002, + dn2: 0.11366976186212303, + tau: 9.5986713933289272e-012, + ttype: -2, + g: 0.0000000000000000, + zOut: []float64{0.2383459406360114e+01, 0.2383459541678881e+01, 0.1353283656358167e-06, 0.6165545233233256e-07, 0.1085901506809686e+01, 0.1085921313408604e+01, 0.1986826396898849e-04, 0.2899927470478232e-04, 0.1584986515403004e+01, 0.1588147771130463e+01, 0.3190255011763210e-02, 0.1890859942802736e-02, 0.9412930918090910e+00, 0.1097023073009216e+01, 0.1576208411525267e+00, 0.1913914667196325e+00, 0.1332062774397564e+01, 0.1255053409978084e+01, 0.1143821023097510e+00, 0.7279916643176543e-01, 0.7987861757106404e+00, 0.1091463124754034e+01, 0.3654761154847582e+00, 0.4266153094141972e+00, 0.1274050092339202e+01, 0.9194909148983121e+00, 0.7205613198290631e-01, 0.6547064404326042e-01, 0.8354550922133908e+00, 0.8261962423063568e+00, 0.5621179414582513e-01, 0.7667315937770838e-01, 0.1126935674731953e+01, 0.1417009768728692e+01, 0.3667472533840468e+00, 0.4042756034295816e+00, 0.1562008914947536e+01, 0.1185425268343357e+01, 0.2769195683500124e-01, 0.3111938635974581e-01, 0.1332145183743590e+01, 0.1563083311444625e+01, 0.2620575140703801e+00, 0.2835172362390010e+00, 0.1691083203792688e+01, 0.1491162475787817e+01, 0.8359650824372877e-01, 0.4928202642453804e-01, 0.8790739000820365e+00, 0.8415486989985177e+00, 0.1175682535061811e-01, 0.1564347179719430e-01, 0.1119753245127243e+01, 0.1104508961999007e+01, 0.3991886785572840e-03, 0.1143051254380294e-03, 0.3162690783341116e+00, 0.3165907299376309e+00, 0.4359567385560517e-03, 0.1567431614478116e-03, 0.1138265050331695e+00, 0.1137653492066288e+00, 0.9558734450579545e-04, 0.9991845479738400e-05, 0.1189201139646886e-01, 0.1188202127144326e-01, 0.1730052806966466e-08, 0.1397591405272086e-17, 0.9598672791582074e-11, 0.6617412588681544e-21, 0.1353283656358167e-06, 0.6165545233233256e-07, 0.2818174710670554e-01}, + tauOut: 6.6174125886815426e-022, + ttypeOut: -2, + gOut: 0.0000000000000000, + }, + { + z: []float64{0.2383459603334333e+01, 0.2383459541678881e+01, 0.2809066689524106e-07, 0.6165545233233256e-07, 0.1085950284592642e+01, 0.1085921313408604e+01, 0.4240998334843304e-04, 0.2899927470478232e-04, 0.1589996221089918e+01, 0.1588147771130463e+01, 0.1304604978030436e-02, 0.1890859942802736e-02, 0.1287109934750819e+01, 0.1097023073009216e+01, 0.1866247058326724e+00, 0.1913914667196325e+00, 0.1141227870577177e+01, 0.1255053409978084e+01, 0.6962466280543776e-01, 0.7279916643176543e-01, 0.1448453771362794e+01, 0.1091463124754034e+01, 0.2708190685256155e+00, 0.4266153094141972e+00, 0.7141424904159570e+00, 0.9194909148983121e+00, 0.7574342769943966e-01, 0.6547064404326042e-01, 0.8271259739846255e+00, 0.8261962423063568e+00, 0.1313543755784947e+00, 0.7667315937770838e-01, 0.1689930996579779e+01, 0.1417009768728692e+01, 0.2835846650840220e+00, 0.4042756034295816e+00, 0.9329599896190804e+00, 0.1185425268343357e+01, 0.5213749144931325e-01, 0.3111938635974581e-01, 0.1794463056234313e+01, 0.1563083311444625e+01, 0.2355970842920850e+00, 0.2835172362390010e+00, 0.1304847417920270e+01, 0.1491162475787817e+01, 0.3178396542921673e-01, 0.4928202642453804e-01, 0.8254082053664953e+00, 0.8415486989985177e+00, 0.2093310277804656e-01, 0.1564347179719430e-01, 0.1083690164346398e+01, 0.1104508961999007e+01, 0.3339325601415243e-04, 0.1143051254380294e-03, 0.3167140798430647e+00, 0.3165907299376309e+00, 0.5630296103885627e-04, 0.1567431614478116e-03, 0.1137190380910697e+00, 0.1137653492066288e+00, 0.1044005669799533e-05, 0.9991845479738400e-05, 0.1188097726577346e-01, 0.1188202127144326e-01, 0.7784240935906335e-37, 0.1397591405272086e-17}, + i0: 1, + n0: 17, + pp: 0, + n0in: 18, + dmin: 6.6174125886815426e-022, + dmin1: 1.1880977265773463e-002, + dmin2: 0.11370904624558997, + dn: 6.6174125886815426e-022, + dn1: 1.1880977265773463e-002, + dn2: 0.11370904624558997, + tau: 0.0000000000000000, + ttype: -2, + g: 0.0000000000000000, + zOut: []float64{0.2383459603334333e+01, 0.2383459541678881e+01, 0.2809066689524106e-07, 0.6165545233233256e-07, 0.1085950284592642e+01, 0.1085921313408604e+01, 0.4240998334843304e-04, 0.2899927470478232e-04, 0.1589996221089918e+01, 0.1588147771130463e+01, 0.1304604978030436e-02, 0.1890859942802736e-02, 0.1287109934750819e+01, 0.1097023073009216e+01, 0.1866247058326724e+00, 0.1913914667196325e+00, 0.1141227870577177e+01, 0.1255053409978084e+01, 0.6962466280543776e-01, 0.7279916643176543e-01, 0.1448453771362794e+01, 0.1091463124754034e+01, 0.2708190685256155e+00, 0.4266153094141972e+00, 0.7141424904159570e+00, 0.9194909148983121e+00, 0.7574342769943966e-01, 0.6547064404326042e-01, 0.8271259739846255e+00, 0.8261962423063568e+00, 0.1313543755784947e+00, 0.7667315937770838e-01, 0.1689930996579779e+01, 0.1417009768728692e+01, 0.2835846650840220e+00, 0.4042756034295816e+00, 0.9329599896190804e+00, 0.1185425268343357e+01, 0.5213749144931325e-01, 0.3111938635974581e-01, 0.1794463056234313e+01, 0.1563083311444625e+01, 0.2355970842920850e+00, 0.2835172362390010e+00, 0.1304847417920270e+01, 0.1491162475787817e+01, 0.3178396542921673e-01, 0.4928202642453804e-01, 0.8254082053664953e+00, 0.8415486989985177e+00, 0.2093310277804656e-01, 0.1564347179719430e-01, 0.1083690164346398e+01, 0.1104508961999007e+01, 0.3339325601415243e-04, 0.1143051254380294e-03, 0.3167140798430647e+00, 0.3165907299376309e+00, 0.5630296103885627e-04, 0.1567431614478116e-03, 0.1137190380910697e+00, 0.1137653492066288e+00, 0.1044005669799533e-05, 0.9991845479738400e-05, 0.1188097726577346e-01, 0.1188202127144326e-01, 0.7784240935906335e-37, 0.1397591405272086e-17}, + tauOut: 1.1880832155707781e-002, + ttypeOut: -7, + gOut: 0.0000000000000000, + }, + { + z: []float64{0.2383459603334333e+01, 0.2371578799269292e+01, 0.2809066689524106e-07, 0.1286276792433928e-07, 0.1085950284592642e+01, 0.1074111849557515e+01, 0.4240998334843304e-04, 0.6277904232066118e-04, 0.1589996221089918e+01, 0.1579357214869920e+01, 0.1304604978030436e-02, 0.1063198377376995e-02, 0.1287109934750819e+01, 0.1460790610050406e+01, 0.1866247058326724e+00, 0.1457986614708343e+00, 0.1141227870577177e+01, 0.1053173039756072e+01, 0.6962466280543776e-01, 0.9575644420574685e-01, 0.1448453771362794e+01, 0.1611635563526955e+01, 0.2708190685256155e+00, 0.1200044280642223e+00, 0.7141424904159570e+00, 0.6580006578954666e+00, 0.7574342769943966e-01, 0.9521169265880262e-01, 0.8271259739846255e+00, 0.8513878247486099e+00, 0.1313543755784947e+00, 0.2607270439790754e+00, 0.1689930996579779e+01, 0.1700907785529018e+01, 0.2835846650840220e+00, 0.1555482010511415e+00, 0.9329599896190804e+00, 0.8176684478615445e+00, 0.5213749144931325e-01, 0.1144214412274440e+00, 0.1794463056234313e+01, 0.1903757867143247e+01, 0.2355970842920850e+00, 0.1614796988702029e+00, 0.1304847417920270e+01, 0.1163270852323576e+01, 0.3178396542921673e-01, 0.2255256874351997e-01, 0.8254082053664953e+00, 0.8119079072453143e+00, 0.2093310277804656e-01, 0.2794035799797570e-01, 0.1083690164346398e+01, 0.1043902367448729e+01, 0.3339325601415243e-04, 0.1013132519024162e-04, 0.3167140798430647e+00, 0.3048794193232055e+00, 0.5630296103885627e-04, 0.2100082250625822e-04, 0.1137190380910697e+00, 0.1018182491185255e+00, 0.1044005669799533e-05, 0.1218230301111122e-06, 0.1188097726577346e-01, 0.2328703557243073e-07, 0.7784240935906335e-37, 0.1286276792433928e-07, 0.9751785856405315e-01}, + i0: 1, + n0: 17, + pp: 1, + n0in: 17, + dmin: 2.3287035572430725e-008, + dmin1: 0.10181720511285566, + dmin2: 0.30482311636216664, + dn: 2.3287035572430725e-008, + dn1: 0.10181720511285566, + dn2: 0.30482311636216664, + tau: 1.1880832155707781e-002, + ttype: -7, + g: 0.0000000000000000, + zOut: []float64{0.2383459603334333e+01, 0.2371578799269292e+01, 0.2809066689524106e-07, 0.1286276792433928e-07, 0.1085950284592642e+01, 0.1074111849557515e+01, 0.4240998334843304e-04, 0.6277904232066118e-04, 0.1589996221089918e+01, 0.1579357214869920e+01, 0.1304604978030436e-02, 0.1063198377376995e-02, 0.1287109934750819e+01, 0.1460790610050406e+01, 0.1866247058326724e+00, 0.1457986614708343e+00, 0.1141227870577177e+01, 0.1053173039756072e+01, 0.6962466280543776e-01, 0.9575644420574685e-01, 0.1448453771362794e+01, 0.1611635563526955e+01, 0.2708190685256155e+00, 0.1200044280642223e+00, 0.7141424904159570e+00, 0.6580006578954666e+00, 0.7574342769943966e-01, 0.9521169265880262e-01, 0.8271259739846255e+00, 0.8513878247486099e+00, 0.1313543755784947e+00, 0.2607270439790754e+00, 0.1689930996579779e+01, 0.1700907785529018e+01, 0.2835846650840220e+00, 0.1555482010511415e+00, 0.9329599896190804e+00, 0.8176684478615445e+00, 0.5213749144931325e-01, 0.1144214412274440e+00, 0.1794463056234313e+01, 0.1903757867143247e+01, 0.2355970842920850e+00, 0.1614796988702029e+00, 0.1304847417920270e+01, 0.1163270852323576e+01, 0.3178396542921673e-01, 0.2255256874351997e-01, 0.8254082053664953e+00, 0.8119079072453143e+00, 0.2093310277804656e-01, 0.2794035799797570e-01, 0.1083690164346398e+01, 0.1043902367448729e+01, 0.3339325601415243e-04, 0.1013132519024162e-04, 0.3167140798430647e+00, 0.3048794193232055e+00, 0.5630296103885627e-04, 0.2100082250625822e-04, 0.1137190380910697e+00, 0.1018182491185255e+00, 0.1044005669799533e-05, 0.1218230301111122e-06, 0.1188097726577346e-01, 0.2328703557243073e-07, 0.7784240935906335e-37, 0.1286276792433928e-07, 0.9751785856405315e-01}, + tauOut: 2.3287007705477136e-008, + ttypeOut: -2, + gOut: 0.0000000000000000, + }, + { + z: []float64{0.2371578788845052e+01, 0.2371578799269292e+01, 0.5825676764620370e-08, 0.1286276792433928e-07, 0.1074174599487151e+01, 0.1074111849557515e+01, 0.9230392664199863e-04, 0.6277904232066118e-04, 0.1580328086033647e+01, 0.1579357214869920e+01, 0.9827770701659698e-03, 0.1063198377376995e-02, 0.1605606471164067e+01, 0.1460790610050406e+01, 0.9563440497488795e-01, 0.1457986614708343e+00, 0.1053295055699924e+01, 0.1053173039756072e+01, 0.1465159169633776e+00, 0.9575644420574685e-01, 0.1585124051340792e+01, 0.1611635563526955e+01, 0.4981502397231050e-01, 0.1200044280642223e+00, 0.7033973032949510e+00, 0.6580006578954666e+00, 0.1152436546510613e+00, 0.9521169265880262e-01, 0.9968711907896164e+00, 0.8513878247486099e+00, 0.4448645553200346e+00, 0.2607270439790754e+00, 0.1411591407973117e+01, 0.1700907785529018e+01, 0.9010174998427344e-01, 0.1555482010511415e+00, 0.8419881158177074e+00, 0.8176684478615445e+00, 0.2587099684834221e+00, 0.1144214412274440e+00, 0.1806527574243020e+01, 0.1903757867143247e+01, 0.1039810460775319e+00, 0.1614796988702029e+00, 0.1081842351702556e+01, 0.1163270852323576e+01, 0.1692539477932339e-01, 0.2255256874351997e-01, 0.8229228471769590e+00, 0.8119079072453143e+00, 0.3544318396494814e-01, 0.2794035799797570e-01, 0.1008469291521964e+01, 0.1043902367448729e+01, 0.3062892015595061e-05, 0.1013132519024162e-04, 0.3048973339666884e+00, 0.3048794193232055e+00, 0.7013072071892081e-05, 0.2100082250625822e-04, 0.1018113345824760e+00, 0.1018182491185255e+00, 0.2786425742647189e-13, 0.1218230301111122e-06, 0.2696165428114542e-17, 0.2328703557243073e-07, 0.5825676764620370e-08, 0.1286276792433928e-07}, + i0: 1, + n0: 17, + pp: 0, + n0in: 17, + dmin: 2.6961654281145418e-018, + dmin1: 0.10181121275944585, + dmin2: 0.30487633314418217, + dn: 2.6961654281145418e-018, + dn1: 0.10181121275944585, + dn2: 0.30487633314418217, + tau: 2.3287007705477136e-008, + ttype: -2, + g: 0.0000000000000000, + zOut: []float64{0.2371578788845052e+01, 0.2371578799269292e+01, 0.5825676764620370e-08, 0.1286276792433928e-07, 0.1074174599487151e+01, 0.1074111849557515e+01, 0.9230392664199863e-04, 0.6277904232066118e-04, 0.1580328086033647e+01, 0.1579357214869920e+01, 0.9827770701659698e-03, 0.1063198377376995e-02, 0.1605606471164067e+01, 0.1460790610050406e+01, 0.9563440497488795e-01, 0.1457986614708343e+00, 0.1053295055699924e+01, 0.1053173039756072e+01, 0.1465159169633776e+00, 0.9575644420574685e-01, 0.1585124051340792e+01, 0.1611635563526955e+01, 0.4981502397231050e-01, 0.1200044280642223e+00, 0.7033973032949510e+00, 0.6580006578954666e+00, 0.1152436546510613e+00, 0.9521169265880262e-01, 0.9968711907896164e+00, 0.8513878247486099e+00, 0.4448645553200346e+00, 0.2607270439790754e+00, 0.1411591407973117e+01, 0.1700907785529018e+01, 0.9010174998427344e-01, 0.1555482010511415e+00, 0.8419881158177074e+00, 0.8176684478615445e+00, 0.2587099684834221e+00, 0.1144214412274440e+00, 0.1806527574243020e+01, 0.1903757867143247e+01, 0.1039810460775319e+00, 0.1614796988702029e+00, 0.1081842351702556e+01, 0.1163270852323576e+01, 0.1692539477932339e-01, 0.2255256874351997e-01, 0.8229228471769590e+00, 0.8119079072453143e+00, 0.3544318396494814e-01, 0.2794035799797570e-01, 0.1008469291521964e+01, 0.1043902367448729e+01, 0.3062892015595061e-05, 0.1013132519024162e-04, 0.3048973339666884e+00, 0.3048794193232055e+00, 0.7013072071892081e-05, 0.2100082250625822e-04, 0.1018113345824760e+00, 0.1018182491185255e+00, 0.2786425742647189e-13, 0.1218230301111122e-06, 0.2696165428114542e-17, 0.2328703557243073e-07, 0.5825676764620370e-08, 0.1286276792433928e-07}, + tauOut: 2.6961654281138038e-018, + ttypeOut: -2, + gOut: 0.0000000000000000, + }, + { + z: []float64{0.2371578788845052e+01, 0.2371578794670729e+01, 0.5825676764620370e-08, 0.2638661645752538e-08, 0.1074174599487151e+01, 0.1074266900775131e+01, 0.9230392664199863e-04, 0.1357860766428602e-03, 0.1580328086033647e+01, 0.1581175077027170e+01, 0.9827770701659698e-03, 0.9979623676695663e-03, 0.1605606471164067e+01, 0.1700242913771285e+01, 0.9563440497488795e-01, 0.5924520849284005e-01, 0.1053295055699924e+01, 0.1140565764170461e+01, 0.1465159169633776e+00, 0.2036234219705987e+00, 0.1585124051340792e+01, 0.1431315653342504e+01, 0.4981502397231050e-01, 0.2448080089382756e-01, 0.7033973032949510e+00, 0.7941601570521848e+00, 0.1152436546510613e+00, 0.1446598374682775e+00, 0.9968711907896164e+00, 0.1297075908641373e+01, 0.4448645553200346e+00, 0.4841405038964208e+00, 0.1411591407973117e+01, 0.1017552654060970e+01, 0.9010174998427344e-01, 0.7455594793877945e-01, 0.8419881158177074e+00, 0.1026142136362350e+01, 0.2587099684834221e+00, 0.4554599945126984e+00, 0.1806527574243020e+01, 0.1455048625807853e+01, 0.1039810460775319e+00, 0.7731088667813635e-01, 0.1081842351702556e+01, 0.1021456859803743e+01, 0.1692539477932339e-01, 0.1363571444815687e-01, 0.8229228471769590e+00, 0.8447303166937503e+00, 0.3544318396494814e-01, 0.4231334180394086e-01, 0.1008469291521964e+01, 0.9661590126100381e+00, 0.3062892015595061e-05, 0.9665775484099522e-06, 0.3048973339666884e+00, 0.3049033804612119e+00, 0.7013072071892081e-05, 0.2341758973227438e-05, 0.1018113345824760e+00, 0.1018089928235306e+00, 0.2786425742647189e-13, 0.7379175991216932e-30, 0.1093987140067686e+00}, + i0: 1, + n0: 16, + pp: 1, + n0in: 17, + dmin: 2.6961654281138038e-018, + dmin1: 0.10180899282350273, + dmin2: 0.30489636738914000, + dn: 2.6961654281138038e-018, + dn1: 0.10180899282350273, + dn2: 0.30489636738914000, + tau: 0.0000000000000000, + ttype: -2, + g: 0.0000000000000000, + zOut: []float64{0.2371578788845052e+01, 0.2371578794670729e+01, 0.5825676764620370e-08, 0.2638661645752538e-08, 0.1074174599487151e+01, 0.1074266900775131e+01, 0.9230392664199863e-04, 0.1357860766428602e-03, 0.1580328086033647e+01, 0.1581175077027170e+01, 0.9827770701659698e-03, 0.9979623676695663e-03, 0.1605606471164067e+01, 0.1700242913771285e+01, 0.9563440497488795e-01, 0.5924520849284005e-01, 0.1053295055699924e+01, 0.1140565764170461e+01, 0.1465159169633776e+00, 0.2036234219705987e+00, 0.1585124051340792e+01, 0.1431315653342504e+01, 0.4981502397231050e-01, 0.2448080089382756e-01, 0.7033973032949510e+00, 0.7941601570521848e+00, 0.1152436546510613e+00, 0.1446598374682775e+00, 0.9968711907896164e+00, 0.1297075908641373e+01, 0.4448645553200346e+00, 0.4841405038964208e+00, 0.1411591407973117e+01, 0.1017552654060970e+01, 0.9010174998427344e-01, 0.7455594793877945e-01, 0.8419881158177074e+00, 0.1026142136362350e+01, 0.2587099684834221e+00, 0.4554599945126984e+00, 0.1806527574243020e+01, 0.1455048625807853e+01, 0.1039810460775319e+00, 0.7731088667813635e-01, 0.1081842351702556e+01, 0.1021456859803743e+01, 0.1692539477932339e-01, 0.1363571444815687e-01, 0.8229228471769590e+00, 0.8447303166937503e+00, 0.3544318396494814e-01, 0.4231334180394086e-01, 0.1008469291521964e+01, 0.9661590126100381e+00, 0.3062892015595061e-05, 0.9665775484099522e-06, 0.3048973339666884e+00, 0.3049033804612119e+00, 0.7013072071892081e-05, 0.2341758973227438e-05, 0.1018113345824760e+00, 0.1018089928235306e+00, 0.2786425742647189e-13, 0.7379175991216932e-30, 0.1093987140067686e+00}, + tauOut: 0.10180650470263587, + ttypeOut: -7, + gOut: 0.0000000000000000, + }, + { + z: []float64{0.2269772292606755e+01, 0.2371578794670729e+01, 0.1248859578385863e-08, 0.2638661645752538e-08, 0.9725961809002787e+00, 0.1074266900775131e+01, 0.2207509801202943e-03, 0.1357860766428602e-03, 0.1480145783712083e+01, 0.1581175077027170e+01, 0.1146359002276934e-02, 0.9979623676695663e-03, 0.1656535258559212e+01, 0.1700242913771285e+01, 0.4079180092843103e-01, 0.5924520849284005e-01, 0.1201590880509993e+01, 0.1140565764170461e+01, 0.2425529321011353e+00, 0.2036234219705987e+00, 0.1111437017432560e+01, 0.1431315653342504e+01, 0.1749237822536809e-01, 0.2448080089382756e-01, 0.8195211115924584e+00, 0.7941601570521848e+00, 0.2289566278084960e+00, 0.1446598374682775e+00, 0.1450453280026662e+01, 0.1297075908641373e+01, 0.3396444831847066e+00, 0.4841405038964208e+00, 0.6506576141124067e+00, 0.1017552654060970e+01, 0.1175810411452473e+00, 0.7455594793877945e-01, 0.1262214585027166e+01, 0.1026142136362350e+01, 0.5250426092262996e+00, 0.4554599945126984e+00, 0.9055103985570538e+00, 0.1455048625807853e+01, 0.8721019179982008e-01, 0.7731088667813635e-01, 0.8460758777494440e+00, 0.1021456859803743e+01, 0.1361402882064932e-01, 0.1363571444815687e-01, 0.7716231249744060e+00, 0.8447303166937503e+00, 0.5298106707064092e-01, 0.4231334180394086e-01, 0.8113724074143097e+00, 0.9661590126100381e+00, 0.3632274887524200e-06, 0.9665775484099522e-06, 0.2030988542900605e+00, 0.3049033804612119e+00, 0.1173872316183806e-05, 0.2341758973227438e-05, 0.1314248578535016e-05, 0.1018089928235306e+00, 0.1248859578385863e-08, 0.2638661645752538e-08}, + i0: 1, + n0: 16, + pp: 0, + n0in: 16, + dmin: 1.3142485785350155e-006, + dmin1: 0.20309651253108729, + dmin2: 0.57610166617362735, + dn: 1.3142485785350155e-006, + dn1: 0.20309651253108729, + dn2: 0.81137144083676127, + tau: 0.10180650470263587, + ttype: -7, + g: 0.0000000000000000, + zOut: []float64{0.2269772292606755e+01, 0.2371578794670729e+01, 0.1248859578385863e-08, 0.2638661645752538e-08, 0.9725961809002787e+00, 0.1074266900775131e+01, 0.2207509801202943e-03, 0.1357860766428602e-03, 0.1480145783712083e+01, 0.1581175077027170e+01, 0.1146359002276934e-02, 0.9979623676695663e-03, 0.1656535258559212e+01, 0.1700242913771285e+01, 0.4079180092843103e-01, 0.5924520849284005e-01, 0.1201590880509993e+01, 0.1140565764170461e+01, 0.2425529321011353e+00, 0.2036234219705987e+00, 0.1111437017432560e+01, 0.1431315653342504e+01, 0.1749237822536809e-01, 0.2448080089382756e-01, 0.8195211115924584e+00, 0.7941601570521848e+00, 0.2289566278084960e+00, 0.1446598374682775e+00, 0.1450453280026662e+01, 0.1297075908641373e+01, 0.3396444831847066e+00, 0.4841405038964208e+00, 0.6506576141124067e+00, 0.1017552654060970e+01, 0.1175810411452473e+00, 0.7455594793877945e-01, 0.1262214585027166e+01, 0.1026142136362350e+01, 0.5250426092262996e+00, 0.4554599945126984e+00, 0.9055103985570538e+00, 0.1455048625807853e+01, 0.8721019179982008e-01, 0.7731088667813635e-01, 0.8460758777494440e+00, 0.1021456859803743e+01, 0.1361402882064932e-01, 0.1363571444815687e-01, 0.7716231249744060e+00, 0.8447303166937503e+00, 0.5298106707064092e-01, 0.4231334180394086e-01, 0.8113724074143097e+00, 0.9661590126100381e+00, 0.3632274887524200e-06, 0.9665775484099522e-06, 0.2030988542900605e+00, 0.3049033804612119e+00, 0.1173872316183806e-05, 0.2341758973227438e-05, 0.1314248578535016e-05, 0.1018089928235306e+00, 0.1248859578385863e-08, 0.2638661645752538e-08}, + tauOut: 1.3142409824136984e-006, + ttypeOut: -2, + gOut: 0.0000000000000000, + }, + { + z: []float64{0.2269772292606755e+01, 0.2269770979614632e+01, 0.1248859578385863e-08, 0.5351359530665278e-09, 0.9725961809002787e+00, 0.9728156171042807e+00, 0.2207509801202943e-03, 0.3358741643642203e-03, 0.1480145783712083e+01, 0.1480954954309013e+01, 0.1146359002276934e-02, 0.1282269997958535e-02, 0.1656535258559212e+01, 0.1696043475248702e+01, 0.4079180092843103e-01, 0.2889964597634762e-01, 0.1201590880509993e+01, 0.1415242852393798e+01, 0.2425529321011353e+00, 0.1904848393814713e+00, 0.1111437017432560e+01, 0.9384432420354740e+00, 0.1749237822536809e-01, 0.1527569554079381e-01, 0.8195211115924584e+00, 0.1033200729619178e+01, 0.2289566278084960e+00, 0.3214195289148511e+00, 0.1450453280026662e+01, 0.1468676920055535e+01, 0.3396444831847066e+00, 0.1504703083827628e+00, 0.6506576141124067e+00, 0.6177670326339089e+00, 0.1175810411452473e+00, 0.2402402478867145e+00, 0.1262214585027166e+01, 0.1547015632125768e+01, 0.5250426092262996e+00, 0.3073217441808570e+00, 0.9055103985570538e+00, 0.6853975319350345e+00, 0.8721019179982008e-01, 0.1076549537133787e+00, 0.8460758777494440e+00, 0.7520336386157322e+00, 0.1361402882064932e-01, 0.1396865635082151e-01, 0.7716231249744060e+00, 0.8106342214532430e+00, 0.5298106707064092e-01, 0.5302931309687425e-01, 0.8113724074143097e+00, 0.7583421433039418e+00, 0.3632274887524200e-06, 0.9727942389020712e-07, 0.2030988542900605e+00, 0.2030986166419704e+00, 0.1173872316183806e-05, 0.7596112905317353e-11, 0.1314248578535016e-05, 0.8411884199182457e-17, 0.1248859578385863e-08, 0.5351359530665278e-09, 0.1093987140067686e+00}, + i0: 1, + n0: 16, + pp: 1, + n0in: 16, + dmin: 8.4118841991824567e-018, + dmin1: 0.20309744276965425, + dmin2: 0.50018599148866161, + dn: 8.4118841991824567e-018, + dn1: 0.20309744276965425, + dn2: 0.75834178007645303, + tau: 1.3142409824136984e-006, + ttype: -2, + g: 0.0000000000000000, + zOut: []float64{0.2269772292606755e+01, 0.2269770979614632e+01, 0.1248859578385863e-08, 0.5351359530665278e-09, 0.9725961809002787e+00, 0.9728156171042807e+00, 0.2207509801202943e-03, 0.3358741643642203e-03, 0.1480145783712083e+01, 0.1480954954309013e+01, 0.1146359002276934e-02, 0.1282269997958535e-02, 0.1656535258559212e+01, 0.1696043475248702e+01, 0.4079180092843103e-01, 0.2889964597634762e-01, 0.1201590880509993e+01, 0.1415242852393798e+01, 0.2425529321011353e+00, 0.1904848393814713e+00, 0.1111437017432560e+01, 0.9384432420354740e+00, 0.1749237822536809e-01, 0.1527569554079381e-01, 0.8195211115924584e+00, 0.1033200729619178e+01, 0.2289566278084960e+00, 0.3214195289148511e+00, 0.1450453280026662e+01, 0.1468676920055535e+01, 0.3396444831847066e+00, 0.1504703083827628e+00, 0.6506576141124067e+00, 0.6177670326339089e+00, 0.1175810411452473e+00, 0.2402402478867145e+00, 0.1262214585027166e+01, 0.1547015632125768e+01, 0.5250426092262996e+00, 0.3073217441808570e+00, 0.9055103985570538e+00, 0.6853975319350345e+00, 0.8721019179982008e-01, 0.1076549537133787e+00, 0.8460758777494440e+00, 0.7520336386157322e+00, 0.1361402882064932e-01, 0.1396865635082151e-01, 0.7716231249744060e+00, 0.8106342214532430e+00, 0.5298106707064092e-01, 0.5302931309687425e-01, 0.8113724074143097e+00, 0.7583421433039418e+00, 0.3632274887524200e-06, 0.9727942389020712e-07, 0.2030988542900605e+00, 0.2030986166419704e+00, 0.1173872316183806e-05, 0.7596112905317353e-11, 0.1314248578535016e-05, 0.8411884199182457e-17, 0.1248859578385863e-08, 0.5351359530665278e-09, 0.1093987140067686e+00}, + tauOut: 8.4118841988678429e-018, + ttypeOut: -2, + gOut: 0.0000000000000000, + }, + { + z: []float64{0.2269770980149768e+01, 0.2269770979614632e+01, 0.2293573303077261e-09, 0.5351359530665278e-09, 0.9731514910392876e+00, 0.9728156171042807e+00, 0.5111377954200868e-03, 0.3358741643642203e-03, 0.1481726086511552e+01, 0.1480954954309013e+01, 0.1467737987028945e-02, 0.1282269997958535e-02, 0.1723475383238021e+01, 0.1696043475248702e+01, 0.2373112943910766e-01, 0.2889964597634762e-01, 0.1581996562336162e+01, 0.1415242852393798e+01, 0.1129959536471923e+00, 0.1904848393814713e+00, 0.8407229839290754e+00, 0.9384432420354740e+00, 0.1877296098701645e-01, 0.1527569554079381e-01, 0.1335847297547013e+01, 0.1033200729619178e+01, 0.3533797947109679e+00, 0.3214195289148511e+00, 0.1265767433727330e+01, 0.1468676920055535e+01, 0.7343813202351114e-01, 0.1504703083827628e+00, 0.7845691484971122e+00, 0.6177670326339089e+00, 0.4737063898809231e+00, 0.2402402478867145e+00, 0.1380630986425702e+01, 0.1547015632125768e+01, 0.1525661578238559e+00, 0.3073217441808570e+00, 0.6404863278245572e+00, 0.6853975319350345e+00, 0.1264041760751794e+00, 0.1076549537133787e+00, 0.6395981188913744e+00, 0.7520336386157322e+00, 0.1770404028911661e-01, 0.1396865635082151e-01, 0.8459594942610007e+00, 0.8106342214532430e+00, 0.4753698401003136e-01, 0.5302931309687425e-01, 0.7108052565733343e+00, 0.7583421433039418e+00, 0.2779568135873871e-07, 0.9727942389020712e-07, 0.2030985888538852e+00, 0.2030986166419704e+00, 0.3146138162949754e-27, 0.7596112905317353e-11, 0.8411884198867843e-17, 0.8411884199182457e-17, 0.2293573303077261e-09, 0.5351359530665278e-09}, + i0: 1, + n0: 16, + pp: 0, + n0in: 16, + dmin: 8.4118841988678429e-018, + dmin1: 0.20309858884628909, + dmin2: 0.53283137411117854, + dn: 8.4118841988678429e-018, + dn1: 0.20309858884628909, + dn2: 0.71080515929391042, + tau: 0.0000000000000000, + ttype: -2, + g: 0.0000000000000000, + zOut: []float64{0.2269770980149768e+01, 0.2269770979614632e+01, 0.2293573303077261e-09, 0.5351359530665278e-09, 0.9731514910392876e+00, 0.9728156171042807e+00, 0.5111377954200868e-03, 0.3358741643642203e-03, 0.1481726086511552e+01, 0.1480954954309013e+01, 0.1467737987028945e-02, 0.1282269997958535e-02, 0.1723475383238021e+01, 0.1696043475248702e+01, 0.2373112943910766e-01, 0.2889964597634762e-01, 0.1581996562336162e+01, 0.1415242852393798e+01, 0.1129959536471923e+00, 0.1904848393814713e+00, 0.8407229839290754e+00, 0.9384432420354740e+00, 0.1877296098701645e-01, 0.1527569554079381e-01, 0.1335847297547013e+01, 0.1033200729619178e+01, 0.3533797947109679e+00, 0.3214195289148511e+00, 0.1265767433727330e+01, 0.1468676920055535e+01, 0.7343813202351114e-01, 0.1504703083827628e+00, 0.7845691484971122e+00, 0.6177670326339089e+00, 0.4737063898809231e+00, 0.2402402478867145e+00, 0.1380630986425702e+01, 0.1547015632125768e+01, 0.1525661578238559e+00, 0.3073217441808570e+00, 0.6404863278245572e+00, 0.6853975319350345e+00, 0.1264041760751794e+00, 0.1076549537133787e+00, 0.6395981188913744e+00, 0.7520336386157322e+00, 0.1770404028911661e-01, 0.1396865635082151e-01, 0.8459594942610007e+00, 0.8106342214532430e+00, 0.4753698401003136e-01, 0.5302931309687425e-01, 0.7108052565733343e+00, 0.7583421433039418e+00, 0.2779568135873871e-07, 0.9727942389020712e-07, 0.2030985888538852e+00, 0.2030986166419704e+00, 0.3146138162949754e-27, 0.7596112905317353e-11, 0.8411884198867843e-17, 0.8411884199182457e-17, 0.2293573303077261e-09, 0.5351359530665278e-09}, + tauOut: 8.4118841988678429e-018, + ttypeOut: -2, + gOut: 0.0000000000000000, + }, + { + z: []float64{0.2269770980149768e+01, 0.2269770980379126e+01, 0.2293573303077261e-09, 0.9833566024906726e-10, 0.9731514910392876e+00, 0.9736626287363720e+00, 0.5111377954200868e-03, 0.7778528033461282e-03, 0.1481726086511552e+01, 0.1482415971695234e+01, 0.1467737987028945e-02, 0.1706410574351102e-02, 0.1723475383238021e+01, 0.1745500102102777e+01, 0.2373112943910766e-01, 0.2150819994097728e-01, 0.1581996562336162e+01, 0.1673484316042377e+01, 0.1129959536471923e+00, 0.5676676764251993e-01, 0.8407229839290754e+00, 0.8027291772735718e+00, 0.1877296098701645e-01, 0.3124068479314146e-01, 0.1335847297547013e+01, 0.1657986407464839e+01, 0.3533797947109679e+00, 0.2697830536296953e+00, 0.1265767433727330e+01, 0.1069422512121146e+01, 0.7343813202351114e-01, 0.5387701498318342e-01, 0.7845691484971122e+00, 0.1204398523394852e+01, 0.4737063898809231e+00, 0.5430210247136315e+00, 0.1380630986425702e+01, 0.9901761195359265e+00, 0.1525661578238559e+00, 0.9868601781741700e-01, 0.6404863278245572e+00, 0.6682044860823195e+00, 0.1264041760751794e+00, 0.1209927124430272e+00, 0.6395981188913744e+00, 0.5363094467374639e+00, 0.1770404028911661e-01, 0.2792585709699239e-01, 0.8459594942610007e+00, 0.8655706211740396e+00, 0.4753698401003136e-01, 0.3903729781186600e-01, 0.7108052565733343e+00, 0.6717679865571495e+00, 0.2779568135873871e-07, 0.8403591378512072e-08, 0.2030985888538852e+00, 0.2030985804502939e+00, 0.3146138162949754e-27, 0.1303059324279677e-43, 0.2112065329503869e+00}, + i0: 1, + n0: 15, + pp: 1, + n0in: 16, + dmin: 8.4118841988678429e-018, + dmin1: 0.20309858045029386, + dmin2: 0.51860540644834729, + dn: 8.4118841988678429e-018, + dn1: 0.20309858045029386, + dn2: 0.67176795876146822, + tau: 0.0000000000000000, + ttype: -2, + g: 0.0000000000000000, + zOut: []float64{0.2269770980149768e+01, 0.2269770980379126e+01, 0.2293573303077261e-09, 0.9833566024906726e-10, 0.9731514910392876e+00, 0.9736626287363720e+00, 0.5111377954200868e-03, 0.7778528033461282e-03, 0.1481726086511552e+01, 0.1482415971695234e+01, 0.1467737987028945e-02, 0.1706410574351102e-02, 0.1723475383238021e+01, 0.1745500102102777e+01, 0.2373112943910766e-01, 0.2150819994097728e-01, 0.1581996562336162e+01, 0.1673484316042377e+01, 0.1129959536471923e+00, 0.5676676764251993e-01, 0.8407229839290754e+00, 0.8027291772735718e+00, 0.1877296098701645e-01, 0.3124068479314146e-01, 0.1335847297547013e+01, 0.1657986407464839e+01, 0.3533797947109679e+00, 0.2697830536296953e+00, 0.1265767433727330e+01, 0.1069422512121146e+01, 0.7343813202351114e-01, 0.5387701498318342e-01, 0.7845691484971122e+00, 0.1204398523394852e+01, 0.4737063898809231e+00, 0.5430210247136315e+00, 0.1380630986425702e+01, 0.9901761195359265e+00, 0.1525661578238559e+00, 0.9868601781741700e-01, 0.6404863278245572e+00, 0.6682044860823195e+00, 0.1264041760751794e+00, 0.1209927124430272e+00, 0.6395981188913744e+00, 0.5363094467374639e+00, 0.1770404028911661e-01, 0.2792585709699239e-01, 0.8459594942610007e+00, 0.8655706211740396e+00, 0.4753698401003136e-01, 0.3903729781186600e-01, 0.7108052565733343e+00, 0.6717679865571495e+00, 0.2779568135873871e-07, 0.8403591378512072e-08, 0.2030985888538852e+00, 0.2030985804502939e+00, 0.3146138162949754e-27, 0.1303059324279677e-43, 0.2112065329503869e+00}, + tauOut: 0.10154929022514693, + ttypeOut: -9, + gOut: 0.0000000000000000, + }, + { + z: []float64{0.2168221690252314e+01, 0.2269770980379126e+01, 0.4415865678637858e-10, 0.9833566024906726e-10, 0.8728911912704126e+00, 0.9736626287363720e+00, 0.1321013925721922e-02, 0.7778528033461282e-03, 0.1381252078118717e+01, 0.1482415971695234e+01, 0.2156405683614187e-02, 0.1706410574351102e-02, 0.1663302606134993e+01, 0.1745500102102777e+01, 0.2163985983955575e-01, 0.2150819994097728e-01, 0.1607061933620194e+01, 0.1673484316042377e+01, 0.2835506195054301e-01, 0.5676676764251993e-01, 0.7040655098910235e+00, 0.8027291772735718e+00, 0.7356791380810462e-01, 0.3124068479314146e-01, 0.1752652257061283e+01, 0.1657986407464839e+01, 0.1646145547572216e+00, 0.2697830536296953e+00, 0.8571356821219610e+00, 0.1069422512121146e+01, 0.7570493055431493e-01, 0.5387701498318342e-01, 0.1570165327329021e+01, 0.1204398523394852e+01, 0.3424393862982660e+00, 0.5430210247136315e+00, 0.6448734608299306e+00, 0.9901761195359265e+00, 0.1022564019526126e+00, 0.9868601781741700e-01, 0.5853915063475871e+00, 0.6682044860823195e+00, 0.1108480973262629e+00, 0.1209927124430272e+00, 0.3518379162830466e+00, 0.5363094467374639e+00, 0.6870152520689508e-01, 0.2792585709699239e-01, 0.7343571035538636e+00, 0.8655706211740396e+00, 0.3571015630515456e-01, 0.3903729781186600e-01, 0.5345085484304394e+00, 0.6717679865571495e+00, 0.3193134112956561e-08, 0.8403591378512072e-08, 0.1015492870320128e+00, 0.2030985804502939e+00, 0.4415865678637858e-10, 0.9833566024906726e-10}, + i0: 1, + n0: 15, + pp: 0, + n0in: 15, + dmin: 0.10154928703201281, + dmin1: 0.32391205918605420, + dmin2: 0.32391205918605420, + dn: 0.10154928703201281, + dn1: 0.53450854002684800, + dn2: 0.69531980574199759, + tau: 0.10154929022514693, + ttype: -9, + g: 0.0000000000000000, + zOut: []float64{0.2168221690252314e+01, 0.2269770980379126e+01, 0.4415865678637858e-10, 0.9833566024906726e-10, 0.8728911912704126e+00, 0.9736626287363720e+00, 0.1321013925721922e-02, 0.7778528033461282e-03, 0.1381252078118717e+01, 0.1482415971695234e+01, 0.2156405683614187e-02, 0.1706410574351102e-02, 0.1663302606134993e+01, 0.1745500102102777e+01, 0.2163985983955575e-01, 0.2150819994097728e-01, 0.1607061933620194e+01, 0.1673484316042377e+01, 0.2835506195054301e-01, 0.5676676764251993e-01, 0.7040655098910235e+00, 0.8027291772735718e+00, 0.7356791380810462e-01, 0.3124068479314146e-01, 0.1752652257061283e+01, 0.1657986407464839e+01, 0.1646145547572216e+00, 0.2697830536296953e+00, 0.8571356821219610e+00, 0.1069422512121146e+01, 0.7570493055431493e-01, 0.5387701498318342e-01, 0.1570165327329021e+01, 0.1204398523394852e+01, 0.3424393862982660e+00, 0.5430210247136315e+00, 0.6448734608299306e+00, 0.9901761195359265e+00, 0.1022564019526126e+00, 0.9868601781741700e-01, 0.5853915063475871e+00, 0.6682044860823195e+00, 0.1108480973262629e+00, 0.1209927124430272e+00, 0.3518379162830466e+00, 0.5363094467374639e+00, 0.6870152520689508e-01, 0.2792585709699239e-01, 0.7343571035538636e+00, 0.8655706211740396e+00, 0.3571015630515456e-01, 0.3903729781186600e-01, 0.5345085484304394e+00, 0.6717679865571495e+00, 0.3193134112956561e-08, 0.8403591378512072e-08, 0.1015492870320128e+00, 0.2030985804502939e+00, 0.4415865678637858e-10, 0.9833566024906726e-10}, + tauOut: 0.10154100618138810, + ttypeOut: -4, + gOut: 0.0000000000000000, + }, + { + z: []float64{0.2168221690252314e+01, 0.2066680684115085e+01, 0.4415865678637858e-10, 0.1865101988102620e-10, 0.8728911912704126e+00, 0.7726711989960955e+00, 0.1321013925721922e-02, 0.2361487308570419e-02, 0.1381252078118717e+01, 0.1279505990312372e+01, 0.2156405683614187e-02, 0.2803234389363145e-02, 0.1663302606134993e+01, 0.1580598225403798e+01, 0.2163985983955575e-01, 0.2200217261925751e-01, 0.1607061933620194e+01, 0.1511873816770092e+01, 0.2835506195054301e-01, 0.1320468740761088e-01, 0.7040655098910235e+00, 0.6628877301101292e+00, 0.7356791380810462e-01, 0.1945110224949301e+00, 0.1752652257061283e+01, 0.1621214783142186e+01, 0.1646145547572216e+00, 0.8703165684534678e-01, 0.8571356821219610e+00, 0.7442679496495411e+00, 0.7570493055431493e-01, 0.1597129865933493e+00, 0.1570165327329021e+01, 0.1651350720852550e+01, 0.3424393862982660e+00, 0.1337269360034139e+00, 0.6448734608299306e+00, 0.5118619205977412e+00, 0.1022564019526126e+00, 0.1169456581236225e+00, 0.5853915063475871e+00, 0.4777529393688394e+00, 0.1108480973262629e+00, 0.8163333047984263e-01, 0.3518379162830466e+00, 0.2373651048287110e+00, 0.6870152520689508e-01, 0.2125478936639627e+00, 0.7343571035538636e+00, 0.4559783600136673e+00, 0.3571015630515456e-01, 0.4186028435717903e-01, 0.5345085484304394e+00, 0.3911072610850064e+00, 0.3193134112956561e-08, 0.8290832843879624e-09, 0.1015492870320128e+00, 0.8280021541434701e-05, 0.4415865678637858e-10, 0.1865101988102620e-10, 0.2112065329503869e+00}, + i0: 1, + n0: 15, + pp: 1, + n0in: 15, + dmin: 8.2800215414347011e-006, + dmin1: 0.16866357962181588, + dmin2: 0.16866357962181588, + dn: 8.2800215414347011e-006, + dn1: 0.39110725789187228, + dn2: 0.42026820370851276, + tau: 0.10154100618138810, + ttype: -4, + g: 0.0000000000000000, + zOut: []float64{0.2168221690252314e+01, 0.2066680684115085e+01, 0.4415865678637858e-10, 0.1865101988102620e-10, 0.8728911912704126e+00, 0.7726711989960955e+00, 0.1321013925721922e-02, 0.2361487308570419e-02, 0.1381252078118717e+01, 0.1279505990312372e+01, 0.2156405683614187e-02, 0.2803234389363145e-02, 0.1663302606134993e+01, 0.1580598225403798e+01, 0.2163985983955575e-01, 0.2200217261925751e-01, 0.1607061933620194e+01, 0.1511873816770092e+01, 0.2835506195054301e-01, 0.1320468740761088e-01, 0.7040655098910235e+00, 0.6628877301101292e+00, 0.7356791380810462e-01, 0.1945110224949301e+00, 0.1752652257061283e+01, 0.1621214783142186e+01, 0.1646145547572216e+00, 0.8703165684534678e-01, 0.8571356821219610e+00, 0.7442679496495411e+00, 0.7570493055431493e-01, 0.1597129865933493e+00, 0.1570165327329021e+01, 0.1651350720852550e+01, 0.3424393862982660e+00, 0.1337269360034139e+00, 0.6448734608299306e+00, 0.5118619205977412e+00, 0.1022564019526126e+00, 0.1169456581236225e+00, 0.5853915063475871e+00, 0.4777529393688394e+00, 0.1108480973262629e+00, 0.8163333047984263e-01, 0.3518379162830466e+00, 0.2373651048287110e+00, 0.6870152520689508e-01, 0.2125478936639627e+00, 0.7343571035538636e+00, 0.4559783600136673e+00, 0.3571015630515456e-01, 0.4186028435717903e-01, 0.5345085484304394e+00, 0.3911072610850064e+00, 0.3193134112956561e-08, 0.8290832843879624e-09, 0.1015492870320128e+00, 0.8280021541434701e-05, 0.4415865678637858e-10, 0.1865101988102620e-10, 0.2112065329503869e+00}, + tauOut: 8.2795951083136037e-006, + ttypeOut: -4, + gOut: 0.0000000000000000, + }, + { + z: []float64{0.2066672404538628e+01, 0.2066680684115085e+01, 0.6973096395115281e-11, 0.1865101988102620e-10, 0.7750244067025847e+00, 0.7726711989960955e+00, 0.3898634844569491e-02, 0.2361487308570419e-02, 0.1278402310262057e+01, 0.1279505990312372e+01, 0.3465878671879142e-02, 0.2803234389363145e-02, 0.1599126239756068e+01, 0.1580598225403798e+01, 0.2080167773382635e-01, 0.2200217261925751e-01, 0.1504268546848768e+01, 0.1511873816770092e+01, 0.5818924606767697e-02, 0.1320468740761088e-01, 0.8515715484031834e+00, 0.6628877301101292e+00, 0.3703084558710277e+00, 0.1945110224949301e+00, 0.1337929704521397e+01, 0.1621214783142186e+01, 0.4841425717359337e-01, 0.8703165684534678e-01, 0.8555583994741887e+00, 0.7442679496495411e+00, 0.3082690272254148e+00, 0.1597129865933493e+00, 0.1476800350035441e+01, 0.1651350720852550e+01, 0.4635002036444261e-01, 0.1337269360034139e+00, 0.5824492787618127e+00, 0.5118619205977412e+00, 0.9592445892242578e-01, 0.1169456581236225e+00, 0.4634535313311479e+00, 0.4777529393688394e+00, 0.4180980990954932e-01, 0.8163333047984263e-01, 0.4080949089880160e+00, 0.2373651048287110e+00, 0.2374870106014949e+00, 0.2125478936639627e+00, 0.2603433541742431e+00, 0.4559783600136673e+00, 0.6288565043307552e-01, 0.4186028435717903e-01, 0.3282133318859058e+00, 0.3911072610850064e+00, 0.2091574834858362e-13, 0.8290832843879624e-09, 0.4264122053484397e-09, 0.8280021541434701e-05, 0.6973096395115281e-11, 0.1865101988102620e-10}, + i0: 1, + n0: 15, + pp: 0, + n0in: 15, + dmin: 4.2641220534843968e-010, + dmin1: 0.19554701532405336, + dmin2: 0.19554701532405336, + dn: 4.2641220534843968e-010, + dn1: 0.32821333105682254, + dn2: 0.21848306981706411, + tau: 8.2795951083136037e-006, + ttype: -4, + g: 0.0000000000000000, + zOut: []float64{0.2066672404538628e+01, 0.2066680684115085e+01, 0.6973096395115281e-11, 0.1865101988102620e-10, 0.7750244067025847e+00, 0.7726711989960955e+00, 0.3898634844569491e-02, 0.2361487308570419e-02, 0.1278402310262057e+01, 0.1279505990312372e+01, 0.3465878671879142e-02, 0.2803234389363145e-02, 0.1599126239756068e+01, 0.1580598225403798e+01, 0.2080167773382635e-01, 0.2200217261925751e-01, 0.1504268546848768e+01, 0.1511873816770092e+01, 0.5818924606767697e-02, 0.1320468740761088e-01, 0.8515715484031834e+00, 0.6628877301101292e+00, 0.3703084558710277e+00, 0.1945110224949301e+00, 0.1337929704521397e+01, 0.1621214783142186e+01, 0.4841425717359337e-01, 0.8703165684534678e-01, 0.8555583994741887e+00, 0.7442679496495411e+00, 0.3082690272254148e+00, 0.1597129865933493e+00, 0.1476800350035441e+01, 0.1651350720852550e+01, 0.4635002036444261e-01, 0.1337269360034139e+00, 0.5824492787618127e+00, 0.5118619205977412e+00, 0.9592445892242578e-01, 0.1169456581236225e+00, 0.4634535313311479e+00, 0.4777529393688394e+00, 0.4180980990954932e-01, 0.8163333047984263e-01, 0.4080949089880160e+00, 0.2373651048287110e+00, 0.2374870106014949e+00, 0.2125478936639627e+00, 0.2603433541742431e+00, 0.4559783600136673e+00, 0.6288565043307552e-01, 0.4186028435717903e-01, 0.3282133318859058e+00, 0.3911072610850064e+00, 0.2091574834858362e-13, 0.8290832843879624e-09, 0.4264122053484397e-09, 0.8280021541434701e-05, 0.6973096395115281e-11, 0.1865101988102620e-10}, + tauOut: 4.2641207498271701e-010, + ttypeOut: -4, + gOut: 0.0000000000000000, + }, + { + z: []float64{0.2066672404538628e+01, 0.2066672404119188e+01, 0.6973096395115281e-11, 0.2614986238618434e-11, 0.7750244067025847e+00, 0.7789230411181270e+00, 0.3898634844569491e-02, 0.6398608757305904e-02, 0.1278402310262057e+01, 0.1275469579750218e+01, 0.3465878671879142e-02, 0.4345362379476143e-02, 0.1599126239756068e+01, 0.1615582554684006e+01, 0.2080167773382635e-01, 0.1936843737632438e-01, 0.1504268546848768e+01, 0.1490719033652799e+01, 0.5818924606767697e-02, 0.3324054047451484e-02, 0.8515715484031834e+00, 0.1218555949800348e+01, 0.3703084558710277e+00, 0.4065850919906259e+00, 0.1337929704521397e+01, 0.9797588692779520e+00, 0.4841425717359337e-01, 0.4227695780870786e-01, 0.8555583994741887e+00, 0.1121550468464483e+01, 0.3082690272254148e+00, 0.4059129037098644e+00, 0.1476800350035441e+01, 0.1117237466263607e+01, 0.4635002036444261e-01, 0.2416365074306882e-01, 0.5824492787618127e+00, 0.6542100865147574e+00, 0.9592445892242578e-01, 0.6795451513972497e-01, 0.4634535313311479e+00, 0.4373088256745602e+00, 0.4180980990954932e-01, 0.3901675330591518e-01, 0.4080949089880160e+00, 0.6065651658571838e+00, 0.2374870106014949e+00, 0.1019316116273065e+00, 0.2603433541742431e+00, 0.2212973925536000e+00, 0.6288565043307552e-01, 0.9326774535516916e-01, 0.3282133318859058e+00, 0.2349455861043456e+00, 0.2091574834858362e-13, 0.3796083394336032e-22, 0.4264122053484397e-09, 0.1303656847202082e-15, 0.6973096395115281e-11, 0.2614986238618434e-11, 0.2112065329503869e+00}, + i0: 1, + n0: 15, + pp: 1, + n0in: 15, + dmin: 1.3036568472020817e-016, + dmin1: 0.15841174212052453, + dmin2: 0.15841174212052453, + dn: 1.3036568472020817e-016, + dn1: 0.23494558610432464, + dn2: 0.15841174212052453, + tau: 4.2641207498271701e-010, + ttype: -4, + g: 0.0000000000000000, + zOut: []float64{0.2066672404538628e+01, 0.2066672404119188e+01, 0.6973096395115281e-11, 0.2614986238618434e-11, 0.7750244067025847e+00, 0.7789230411181270e+00, 0.3898634844569491e-02, 0.6398608757305904e-02, 0.1278402310262057e+01, 0.1275469579750218e+01, 0.3465878671879142e-02, 0.4345362379476143e-02, 0.1599126239756068e+01, 0.1615582554684006e+01, 0.2080167773382635e-01, 0.1936843737632438e-01, 0.1504268546848768e+01, 0.1490719033652799e+01, 0.5818924606767697e-02, 0.3324054047451484e-02, 0.8515715484031834e+00, 0.1218555949800348e+01, 0.3703084558710277e+00, 0.4065850919906259e+00, 0.1337929704521397e+01, 0.9797588692779520e+00, 0.4841425717359337e-01, 0.4227695780870786e-01, 0.8555583994741887e+00, 0.1121550468464483e+01, 0.3082690272254148e+00, 0.4059129037098644e+00, 0.1476800350035441e+01, 0.1117237466263607e+01, 0.4635002036444261e-01, 0.2416365074306882e-01, 0.5824492787618127e+00, 0.6542100865147574e+00, 0.9592445892242578e-01, 0.6795451513972497e-01, 0.4634535313311479e+00, 0.4373088256745602e+00, 0.4180980990954932e-01, 0.3901675330591518e-01, 0.4080949089880160e+00, 0.6065651658571838e+00, 0.2374870106014949e+00, 0.1019316116273065e+00, 0.2603433541742431e+00, 0.2212973925536000e+00, 0.6288565043307552e-01, 0.9326774535516916e-01, 0.3282133318859058e+00, 0.2349455861043456e+00, 0.2091574834858362e-13, 0.3796083394336032e-22, 0.4264122053484397e-09, 0.1303656847202082e-15, 0.6973096395115281e-11, 0.2614986238618434e-11, 0.2112065329503869e+00}, + tauOut: 1.3036568471812905e-016, + ttypeOut: -4, + gOut: 0.0000000000000000, + }, + { + z: []float64{0.2066672404121803e+01, 0.2066672404119188e+01, 0.9855809897129084e-12, 0.2614986238618434e-11, 0.7853216498744473e+00, 0.7789230411181270e+00, 0.1039221422709001e-01, 0.6398608757305904e-02, 0.1269422727902604e+01, 0.1275469579750218e+01, 0.5530302475095172e-02, 0.4345362379476143e-02, 0.1629420689585235e+01, 0.1615582554684006e+01, 0.1771973219288664e-01, 0.1936843737632438e-01, 0.1476323355507364e+01, 0.1490719033652799e+01, 0.2743671176012720e-02, 0.3324054047451484e-02, 0.1622397370614960e+01, 0.1218555949800348e+01, 0.2455350071499520e+00, 0.4065850919906259e+00, 0.7765008199367077e+00, 0.9797588692779520e+00, 0.6106335063429082e-01, 0.4227695780870786e-01, 0.1466400021540057e+01, 0.1121550468464483e+01, 0.3092615230516922e+00, 0.4059129037098644e+00, 0.8321395939549830e+00, 0.1117237466263607e+01, 0.1899693772291602e-01, 0.2416365074306882e-01, 0.7031676639315663e+00, 0.6542100865147574e+00, 0.4226176876348708e-01, 0.6795451513972497e-01, 0.4340638102169883e+00, 0.4373088256745602e+00, 0.5452240634477346e-01, 0.3901675330591518e-01, 0.6539743711397168e+00, 0.6065651658571838e+00, 0.3449248298919955e-01, 0.1019316116273065e+00, 0.2800726549195695e+00, 0.2212973925536000e+00, 0.7823985923721809e-01, 0.9326774535516916e-01, 0.1567057268671274e+00, 0.2349455861043456e+00, 0.3158014839988858e-37, 0.3796083394336032e-22}, + i0: 1, + n0: 14, + pp: 0, + n0in: 15, + dmin: 2.0791168714198411e-027, + dmin1: 0.15670572686712736, + dmin2: 0.18680490956440032, + dn: 2.0791168714198411e-027, + dn1: 0.15670572686712736, + dn2: 0.18680490956440032, + tau: 1.3036568471812905e-016, + ttype: -4, + g: 0.0000000000000000, + zOut: []float64{0.2066672404121803e+01, 0.2066672404119188e+01, 0.9855809897129084e-12, 0.2614986238618434e-11, 0.7853216498744473e+00, 0.7789230411181270e+00, 0.1039221422709001e-01, 0.6398608757305904e-02, 0.1269422727902604e+01, 0.1275469579750218e+01, 0.5530302475095172e-02, 0.4345362379476143e-02, 0.1629420689585235e+01, 0.1615582554684006e+01, 0.1771973219288664e-01, 0.1936843737632438e-01, 0.1476323355507364e+01, 0.1490719033652799e+01, 0.2743671176012720e-02, 0.3324054047451484e-02, 0.1622397370614960e+01, 0.1218555949800348e+01, 0.2455350071499520e+00, 0.4065850919906259e+00, 0.7765008199367077e+00, 0.9797588692779520e+00, 0.6106335063429082e-01, 0.4227695780870786e-01, 0.1466400021540057e+01, 0.1121550468464483e+01, 0.3092615230516922e+00, 0.4059129037098644e+00, 0.8321395939549830e+00, 0.1117237466263607e+01, 0.1899693772291602e-01, 0.2416365074306882e-01, 0.7031676639315663e+00, 0.6542100865147574e+00, 0.4226176876348708e-01, 0.6795451513972497e-01, 0.4340638102169883e+00, 0.4373088256745602e+00, 0.5452240634477346e-01, 0.3901675330591518e-01, 0.6539743711397168e+00, 0.6065651658571838e+00, 0.3449248298919955e-01, 0.1019316116273065e+00, 0.2800726549195695e+00, 0.2212973925536000e+00, 0.7823985923721809e-01, 0.9326774535516916e-01, 0.1567057268671274e+00, 0.2349455861043456e+00, 0.3158014839988858e-37, 0.3796083394336032e-22}, + tauOut: 5.2226904068357879e-002, + ttypeOut: -8, + gOut: 0.0000000000000000, + }, + { + z: []float64{0.2066672404121803e+01, 0.2014445500054431e+01, 0.9855809897129084e-12, 0.3842238913414725e-12, 0.7853216498744473e+00, 0.7434869600327952e+00, 0.1039221422709001e-01, 0.1774357001838869e-01, 0.1269422727902604e+01, 0.1204982556290953e+01, 0.5530302475095172e-02, 0.7478273627729330e-02, 0.1629420689585235e+01, 0.1587435244082034e+01, 0.1771973219288664e-01, 0.1647944669694028e-01, 0.1476323355507364e+01, 0.1410360675918078e+01, 0.2743671176012720e-02, 0.3156160674217245e-02, 0.1622397370614960e+01, 0.1812549313022337e+01, 0.2455350071499520e+00, 0.1051878329628395e+00, 0.7765008199367077e+00, 0.6801494335398012e+00, 0.6106335063429082e-01, 0.1316523902981273e+00, 0.1466400021540057e+01, 0.1591782250225264e+01, 0.3092615230516922e+00, 0.1616733433116970e+00, 0.8321395939549830e+00, 0.6372362842978442e+00, 0.1899693772291602e-01, 0.2096244776016672e-01, 0.7031676639315663e+00, 0.6722400808665288e+00, 0.4226176876348708e-01, 0.2728832287468248e-01, 0.4340638102169883e+00, 0.4090709896187213e+00, 0.5452240634477346e-01, 0.8716398206477828e-01, 0.6539743711397168e+00, 0.5490759679957803e+00, 0.3449248298919955e-01, 0.1759392479116377e-01, 0.2800726549195695e+00, 0.2884916852972659e+00, 0.7823985923721809e-01, 0.4249908970207047e-01, 0.1567057268671274e+00, 0.6197973309669901e-01, 0.3158014839988858e-37, 0.3842238913414725e-12, 0.4143051093784424e+00}, + i0: 1, + n0: 14, + pp: 1, + n0in: 14, + dmin: 6.1979733096699005e-002, + dmin1: 0.21025182606004778, + dmin2: 0.35454858327394784, + dn: 6.1979733096699005e-002, + dn1: 0.21025182606004778, + dn2: 0.51458348500658069, + tau: 5.2226904068357879e-002, + ttype: -8, + g: 0.0000000000000000, + zOut: []float64{0.2066672404121803e+01, 0.2014445500054431e+01, 0.9855809897129084e-12, 0.3842238913414725e-12, 0.7853216498744473e+00, 0.7434869600327952e+00, 0.1039221422709001e-01, 0.1774357001838869e-01, 0.1269422727902604e+01, 0.1204982556290953e+01, 0.5530302475095172e-02, 0.7478273627729330e-02, 0.1629420689585235e+01, 0.1587435244082034e+01, 0.1771973219288664e-01, 0.1647944669694028e-01, 0.1476323355507364e+01, 0.1410360675918078e+01, 0.2743671176012720e-02, 0.3156160674217245e-02, 0.1622397370614960e+01, 0.1812549313022337e+01, 0.2455350071499520e+00, 0.1051878329628395e+00, 0.7765008199367077e+00, 0.6801494335398012e+00, 0.6106335063429082e-01, 0.1316523902981273e+00, 0.1466400021540057e+01, 0.1591782250225264e+01, 0.3092615230516922e+00, 0.1616733433116970e+00, 0.8321395939549830e+00, 0.6372362842978442e+00, 0.1899693772291602e-01, 0.2096244776016672e-01, 0.7031676639315663e+00, 0.6722400808665288e+00, 0.4226176876348708e-01, 0.2728832287468248e-01, 0.4340638102169883e+00, 0.4090709896187213e+00, 0.5452240634477346e-01, 0.8716398206477828e-01, 0.6539743711397168e+00, 0.5490759679957803e+00, 0.3449248298919955e-01, 0.1759392479116377e-01, 0.2800726549195695e+00, 0.2884916852972659e+00, 0.7823985923721809e-01, 0.4249908970207047e-01, 0.1567057268671274e+00, 0.6197973309669901e-01, 0.3158014839988858e-37, 0.3842238913414725e-12, 0.4143051093784424e+00}, + tauOut: 4.3992746597899146e-002, + ttypeOut: -2, + gOut: 0.0000000000000000, + }, + { + z: []float64{0.1970452753456916e+01, 0.2014445500054431e+01, 0.1449745254963753e-12, 0.3842238913414725e-12, 0.7172377834531399e+00, 0.7434869600327952e+00, 0.2980976860358389e-01, 0.1774357001838869e-01, 0.1138658314717199e+01, 0.1204982556290953e+01, 0.1042566937606312e-01, 0.7478273627729330e-02, 0.1549496274805013e+01, 0.1587435244082034e+01, 0.1499968987352189e-01, 0.1647944669694028e-01, 0.1354524400120875e+01, 0.1410360675918078e+01, 0.4223398900256122e-02, 0.3156160674217245e-02, 0.1869521000487022e+01, 0.1812549313022337e+01, 0.3826832915293116e-01, 0.1051878329628395e+00, 0.7295407480870981e+00, 0.6801494335398012e+00, 0.2872518617030954e+00, 0.1316523902981273e+00, 0.1422210985235967e+01, 0.1591782250225264e+01, 0.7243940711431240e-01, 0.1616733433116970e+00, 0.5417665783457993e+00, 0.6372362842978442e+00, 0.2601082853889214e-01, 0.2096244776016672e-01, 0.6295248286044199e+00, 0.6722400808665288e+00, 0.1773220171177086e-01, 0.2728832287468248e-01, 0.4345100233738295e+00, 0.4090709896187213e+00, 0.1101462457758059e+00, 0.8716398206477828e-01, 0.4125309004132391e+00, 0.5490759679957803e+00, 0.1230380805149813e-01, 0.1759392479116377e-01, 0.2746942203499391e+00, 0.2884916852972659e+00, 0.9589143278047064e-02, 0.4249908970207047e-01, 0.8397843220752800e-02, 0.6197973309669901e-01, 0.1449745254963753e-12, 0.3842238913414725e-12}, + i0: 1, + n0: 14, + pp: 0, + n0in: 14, + dmin: 8.3978432207528000e-003, + dmin1: 0.23219513064786862, + dmin2: 0.34734604130905122, + dn: 8.3978432207528000e-003, + dn1: 0.23219513064786862, + dn2: 0.39493697562207530, + tau: 4.3992746597899146e-002, + ttype: -2, + g: 0.0000000000000000, + zOut: []float64{0.1970452753456916e+01, 0.2014445500054431e+01, 0.1449745254963753e-12, 0.3842238913414725e-12, 0.7172377834531399e+00, 0.7434869600327952e+00, 0.2980976860358389e-01, 0.1774357001838869e-01, 0.1138658314717199e+01, 0.1204982556290953e+01, 0.1042566937606312e-01, 0.7478273627729330e-02, 0.1549496274805013e+01, 0.1587435244082034e+01, 0.1499968987352189e-01, 0.1647944669694028e-01, 0.1354524400120875e+01, 0.1410360675918078e+01, 0.4223398900256122e-02, 0.3156160674217245e-02, 0.1869521000487022e+01, 0.1812549313022337e+01, 0.3826832915293116e-01, 0.1051878329628395e+00, 0.7295407480870981e+00, 0.6801494335398012e+00, 0.2872518617030954e+00, 0.1316523902981273e+00, 0.1422210985235967e+01, 0.1591782250225264e+01, 0.7243940711431240e-01, 0.1616733433116970e+00, 0.5417665783457993e+00, 0.6372362842978442e+00, 0.2601082853889214e-01, 0.2096244776016672e-01, 0.6295248286044199e+00, 0.6722400808665288e+00, 0.1773220171177086e-01, 0.2728832287468248e-01, 0.4345100233738295e+00, 0.4090709896187213e+00, 0.1101462457758059e+00, 0.8716398206477828e-01, 0.4125309004132391e+00, 0.5490759679957803e+00, 0.1230380805149813e-01, 0.1759392479116377e-01, 0.2746942203499391e+00, 0.2884916852972659e+00, 0.9589143278047064e-02, 0.4249908970207047e-01, 0.8397843220752800e-02, 0.6197973309669901e-01, 0.1449745254963753e-12, 0.3842238913414725e-12}, + tauOut: 8.0121275381568099e-003, + ttypeOut: -2, + gOut: 0.0000000000000000, + }, + { + z: []float64{0.1970452753456916e+01, 0.1962440625918904e+01, 0.1449745254963753e-12, 0.5298565773193886e-13, 0.7172377834531399e+00, 0.7390354245185139e+00, 0.2980976860358389e-01, 0.4592897681783073e-01, 0.1138658314717199e+01, 0.1095142879737275e+01, 0.1042566937606312e-01, 0.1475107600976594e-01, 0.1549496274805013e+01, 0.1541732761130612e+01, 0.1499968987352189e-01, 0.1317831886314191e-01, 0.1354524400120875e+01, 0.1337557352619832e+01, 0.4223398900256122e-02, 0.5903098601340335e-02, 0.1869521000487022e+01, 0.1893874103500456e+01, 0.3826832915293116e-01, 0.1474137347708128e-01, 0.7295407480870981e+00, 0.9940391087749554e+00, 0.2872518617030954e+00, 0.4109825756725979e+00, 0.1422210985235967e+01, 0.1075655689139524e+01, 0.7243940711431240e-01, 0.3648495529374628e-01, 0.5417665783457993e+00, 0.5232803240527883e+00, 0.2601082853889214e-01, 0.3129195122603765e-01, 0.6295248286044199e+00, 0.6079529515519964e+00, 0.1773220171177086e-01, 0.1267338099203561e-01, 0.4345100233738295e+00, 0.5239707606194429e+00, 0.1101462457758059e+00, 0.8671997249104711e-01, 0.4125309004132391e+00, 0.3301026084355332e+00, 0.1230380805149813e-01, 0.1023858907404432e-01, 0.2746942203499391e+00, 0.2660326470157850e+00, 0.9589143278047064e-02, 0.3027001489241909e-03, 0.8397843220752800e-02, 0.8301553367179998e-04, 0.1449745254963753e-12, 0.5298565773193886e-13, 0.4143051093784424e+00}, + i0: 1, + n0: 14, + pp: 1, + n0in: 14, + dmin: 8.3015533671799979e-005, + dmin1: 0.25644350373773794, + dmin2: 0.31779880038403513, + dn: 8.3015533671799979e-005, + dn1: 0.25644350373773794, + dn2: 0.31779880038403513, + tau: 8.0121275381568099e-003, + ttype: -2, + g: 0.0000000000000000, + zOut: []float64{0.1970452753456916e+01, 0.1962440625918904e+01, 0.1449745254963753e-12, 0.5298565773193886e-13, 0.7172377834531399e+00, 0.7390354245185139e+00, 0.2980976860358389e-01, 0.4592897681783073e-01, 0.1138658314717199e+01, 0.1095142879737275e+01, 0.1042566937606312e-01, 0.1475107600976594e-01, 0.1549496274805013e+01, 0.1541732761130612e+01, 0.1499968987352189e-01, 0.1317831886314191e-01, 0.1354524400120875e+01, 0.1337557352619832e+01, 0.4223398900256122e-02, 0.5903098601340335e-02, 0.1869521000487022e+01, 0.1893874103500456e+01, 0.3826832915293116e-01, 0.1474137347708128e-01, 0.7295407480870981e+00, 0.9940391087749554e+00, 0.2872518617030954e+00, 0.4109825756725979e+00, 0.1422210985235967e+01, 0.1075655689139524e+01, 0.7243940711431240e-01, 0.3648495529374628e-01, 0.5417665783457993e+00, 0.5232803240527883e+00, 0.2601082853889214e-01, 0.3129195122603765e-01, 0.6295248286044199e+00, 0.6079529515519964e+00, 0.1773220171177086e-01, 0.1267338099203561e-01, 0.4345100233738295e+00, 0.5239707606194429e+00, 0.1101462457758059e+00, 0.8671997249104711e-01, 0.4125309004132391e+00, 0.3301026084355332e+00, 0.1230380805149813e-01, 0.1023858907404432e-01, 0.2746942203499391e+00, 0.2660326470157850e+00, 0.9589143278047064e-02, 0.3027001489241909e-03, 0.8397843220752800e-02, 0.8301553367179998e-04, 0.1449745254963753e-12, 0.5298565773193886e-13, 0.4143051093784424e+00}, + tauOut: 8.2898056486573887e-005, + ttypeOut: -2, + gOut: 0.0000000000000000, + }, + { + z: []float64{0.1962357727862470e+01, 0.1962440625918904e+01, 0.1995470932711637e-13, 0.5298565773193886e-13, 0.7848815032798381e+00, 0.7390354245185139e+00, 0.6408456783027588e-01, 0.4592897681783073e-01, 0.1045726489860278e+01, 0.1095142879737275e+01, 0.2174776805091989e-01, 0.1475107600976594e-01, 0.1533080413886347e+01, 0.1541732761130612e+01, 0.1149760777771624e-01, 0.1317831886314191e-01, 0.1331879945386970e+01, 0.1337557352619832e+01, 0.8393943921304423e-02, 0.5903098601340335e-02, 0.1900138634999746e+01, 0.1893874103500456e+01, 0.7711806645770664e-02, 0.1474137347708128e-01, 0.1397226979745296e+01, 0.9940391087749554e+00, 0.3163950825942626e+00, 0.4109825756725979e+00, 0.7956626637825215e+00, 0.1075655689139524e+01, 0.2399491656225486e-01, 0.3648495529374628e-01, 0.5304944606600845e+00, 0.5232803240527883e+00, 0.3586094769777509e-01, 0.3129195122603765e-01, 0.5846824867897703e+00, 0.6079529515519964e+00, 0.1135741402906866e-01, 0.1267338099203561e-01, 0.5992504210249349e+00, 0.5239707606194429e+00, 0.4777049480214081e-01, 0.8671997249104711e-01, 0.2924878046509502e+00, 0.3301026084355332e+00, 0.9312521444528056e-02, 0.1023858907404432e-01, 0.2569399276636945e+00, 0.2660326470157850e+00, 0.9780034825247492e-07, 0.3027001489241909e-03, 0.1967683697362142e-07, 0.8301553367179998e-04, 0.1995470932711637e-13, 0.5298565773193886e-13}, + i0: 1, + n0: 14, + pp: 0, + n0in: 14, + dmin: 1.9676836973621424e-008, + dmin1: 0.25663722751477031, + dmin2: 0.28224921557690585, + dn: 1.9676836973621424e-008, + dn1: 0.25663722751477031, + dn2: 0.28224921557690585, + tau: 8.2898056486573887e-005, + ttype: -2, + g: 0.0000000000000000, + zOut: []float64{0.1962357727862470e+01, 0.1962440625918904e+01, 0.1995470932711637e-13, 0.5298565773193886e-13, 0.7848815032798381e+00, 0.7390354245185139e+00, 0.6408456783027588e-01, 0.4592897681783073e-01, 0.1045726489860278e+01, 0.1095142879737275e+01, 0.2174776805091989e-01, 0.1475107600976594e-01, 0.1533080413886347e+01, 0.1541732761130612e+01, 0.1149760777771624e-01, 0.1317831886314191e-01, 0.1331879945386970e+01, 0.1337557352619832e+01, 0.8393943921304423e-02, 0.5903098601340335e-02, 0.1900138634999746e+01, 0.1893874103500456e+01, 0.7711806645770664e-02, 0.1474137347708128e-01, 0.1397226979745296e+01, 0.9940391087749554e+00, 0.3163950825942626e+00, 0.4109825756725979e+00, 0.7956626637825215e+00, 0.1075655689139524e+01, 0.2399491656225486e-01, 0.3648495529374628e-01, 0.5304944606600845e+00, 0.5232803240527883e+00, 0.3586094769777509e-01, 0.3129195122603765e-01, 0.5846824867897703e+00, 0.6079529515519964e+00, 0.1135741402906866e-01, 0.1267338099203561e-01, 0.5992504210249349e+00, 0.5239707606194429e+00, 0.4777049480214081e-01, 0.8671997249104711e-01, 0.2924878046509502e+00, 0.3301026084355332e+00, 0.9312521444528056e-02, 0.1023858907404432e-01, 0.2569399276636945e+00, 0.2660326470157850e+00, 0.9780034825247492e-07, 0.3027001489241909e-03, 0.1967683697362142e-07, 0.8301553367179998e-04, 0.1995470932711637e-13, 0.5298565773193886e-13}, + tauOut: 1.9676827722764296e-008, + ttypeOut: -2, + gOut: 0.0000000000000000, + }, + { + z: []float64{0.1962357727862470e+01, 0.1962357708185662e+01, 0.1995470932711637e-13, 0.7981257539768321e-14, 0.7848815032798381e+00, 0.8489660514332782e+00, 0.6408456783027588e-01, 0.7893711422056097e-01, 0.1045726489860278e+01, 0.9885371240138099e+00, 0.2174776805091989e-01, 0.3372769361380378e-01, 0.1533080413886347e+01, 0.1510850308373431e+01, 0.1149760777771624e-01, 0.1013563894066502e-01, 0.1331879945386970e+01, 0.1330138230690781e+01, 0.8393943921304423e-02, 0.1199097716077874e-01, 0.1900138634999746e+01, 0.1895859444807910e+01, 0.7711806645770664e-02, 0.5683514322519618e-02, 0.1397226979745296e+01, 0.1707938528340212e+01, 0.3163950825942626e+00, 0.1473962616612956e+00, 0.7956626637825215e+00, 0.6722612990066530e+00, 0.2399491656225486e-01, 0.1893485515094503e-01, 0.5304944606600845e+00, 0.5474205335300870e+00, 0.3586094769777509e-01, 0.3830193935796279e-01, 0.5846824867897703e+00, 0.5577379417840483e+00, 0.1135741402906866e-01, 0.1220274725600271e-01, 0.5992504210249349e+00, 0.6348181488942451e+00, 0.4777049480214081e-01, 0.2200990500366971e-01, 0.2924878046509502e+00, 0.2797904014149808e+00, 0.9312521444528056e-02, 0.8551968095484232e-02, 0.2569399276636945e+00, 0.2483880376917308e+00, 0.9780034825247492e-07, 0.7747561140265878e-14, 0.1967683697362142e-07, 0.1503295986001297e-14, 0.1995470932711637e-13, 0.7981257539768321e-14, 0.4143051093784424e+00}, + i0: 1, + n0: 14, + pp: 1, + n0in: 14, + dmin: 1.5032959860012969e-015, + dmin1: 0.24838793989138258, + dmin2: 0.27047787997045275, + dn: 1.5032959860012969e-015, + dn1: 0.24838793989138258, + dn2: 0.27047787997045275, + tau: 1.9676827722764296e-008, + ttype: -2, + g: 0.0000000000000000, + zOut: []float64{0.1962357727862470e+01, 0.1962357708185662e+01, 0.1995470932711637e-13, 0.7981257539768321e-14, 0.7848815032798381e+00, 0.8489660514332782e+00, 0.6408456783027588e-01, 0.7893711422056097e-01, 0.1045726489860278e+01, 0.9885371240138099e+00, 0.2174776805091989e-01, 0.3372769361380378e-01, 0.1533080413886347e+01, 0.1510850308373431e+01, 0.1149760777771624e-01, 0.1013563894066502e-01, 0.1331879945386970e+01, 0.1330138230690781e+01, 0.8393943921304423e-02, 0.1199097716077874e-01, 0.1900138634999746e+01, 0.1895859444807910e+01, 0.7711806645770664e-02, 0.5683514322519618e-02, 0.1397226979745296e+01, 0.1707938528340212e+01, 0.3163950825942626e+00, 0.1473962616612956e+00, 0.7956626637825215e+00, 0.6722612990066530e+00, 0.2399491656225486e-01, 0.1893485515094503e-01, 0.5304944606600845e+00, 0.5474205335300870e+00, 0.3586094769777509e-01, 0.3830193935796279e-01, 0.5846824867897703e+00, 0.5577379417840483e+00, 0.1135741402906866e-01, 0.1220274725600271e-01, 0.5992504210249349e+00, 0.6348181488942451e+00, 0.4777049480214081e-01, 0.2200990500366971e-01, 0.2924878046509502e+00, 0.2797904014149808e+00, 0.9312521444528056e-02, 0.8551968095484232e-02, 0.2569399276636945e+00, 0.2483880376917308e+00, 0.9780034825247492e-07, 0.7747561140265878e-14, 0.1967683697362142e-07, 0.1503295986001297e-14, 0.1995470932711637e-13, 0.7981257539768321e-14, 0.4143051093784424e+00}, + tauOut: 1.5032959860012393e-015, + ttypeOut: -2, + gOut: 0.0000000000000000, + }, + { + z: []float64{0.1962357708185669e+01, 0.1962357708185662e+01, 0.3452895805257589e-14, 0.7981257539768321e-14, 0.9279031656538340e+00, 0.8489660514332782e+00, 0.8409527066820451e-01, 0.7893711422056097e-01, 0.9381695469594078e+00, 0.9885371240138099e+00, 0.5431587122209677e-01, 0.3372769361380378e-01, 0.1466670076091998e+01, 0.1510850308373431e+01, 0.9192115573380727e-02, 0.1013563894066502e-01, 0.1332937092278178e+01, 0.1330138230690781e+01, 0.1705497388769041e-01, 0.1199097716077874e-01, 0.1884487985242738e+01, 0.1895859444807910e+01, 0.5151050664063701e-02, 0.5683514322519618e-02, 0.1850183739337442e+01, 0.1707938528340212e+01, 0.5355619565040129e-01, 0.1473962616612956e+00, 0.6376399585071952e+00, 0.6722612990066530e+00, 0.1625577000116482e-01, 0.1893485515094503e-01, 0.5694667028868834e+00, 0.5474205335300870e+00, 0.3751307094084999e-01, 0.3830193935796279e-01, 0.5324276180991995e+00, 0.5577379417840483e+00, 0.1454944326918194e-01, 0.1220274725600271e-01, 0.6422786106287314e+00, 0.6348181488942451e+00, 0.9587988848101409e-02, 0.2200990500366971e-01, 0.2787543806623621e+00, 0.2797904014149808e+00, 0.7620352256320365e-02, 0.8551968095484232e-02, 0.2407676854354167e+00, 0.2483880376917308e+00, 0.4837392336267432e-28, 0.7747561140265878e-14}, + i0: 1, + n0: 13, + pp: 0, + n0in: 14, + dmin: 9.2691156363468887e-030, + dmin1: 0.24076768543540897, + dmin2: 0.27020241256687788, + dn: 9.2691156363468887e-030, + dn1: 0.24076768543540897, + dn2: 0.27020241256687788, + tau: 1.5032959860012393e-015, + ttype: -2, + g: 0.0000000000000000, + zOut: []float64{0.1962357708185669e+01, 0.1962357708185662e+01, 0.3452895805257589e-14, 0.7981257539768321e-14, 0.9279031656538340e+00, 0.8489660514332782e+00, 0.8409527066820451e-01, 0.7893711422056097e-01, 0.9381695469594078e+00, 0.9885371240138099e+00, 0.5431587122209677e-01, 0.3372769361380378e-01, 0.1466670076091998e+01, 0.1510850308373431e+01, 0.9192115573380727e-02, 0.1013563894066502e-01, 0.1332937092278178e+01, 0.1330138230690781e+01, 0.1705497388769041e-01, 0.1199097716077874e-01, 0.1884487985242738e+01, 0.1895859444807910e+01, 0.5151050664063701e-02, 0.5683514322519618e-02, 0.1850183739337442e+01, 0.1707938528340212e+01, 0.5355619565040129e-01, 0.1473962616612956e+00, 0.6376399585071952e+00, 0.6722612990066530e+00, 0.1625577000116482e-01, 0.1893485515094503e-01, 0.5694667028868834e+00, 0.5474205335300870e+00, 0.3751307094084999e-01, 0.3830193935796279e-01, 0.5324276180991995e+00, 0.5577379417840483e+00, 0.1454944326918194e-01, 0.1220274725600271e-01, 0.6422786106287314e+00, 0.6348181488942451e+00, 0.9587988848101409e-02, 0.2200990500366971e-01, 0.2787543806623621e+00, 0.2797904014149808e+00, 0.7620352256320365e-02, 0.8551968095484232e-02, 0.2407676854354167e+00, 0.2483880376917308e+00, 0.4837392336267432e-28, 0.7747561140265878e-14}, + tauOut: 0.19361025134591583, + ttypeOut: -8, + gOut: 0.0000000000000000, + }, + { + z: []float64{0.1962357708185669e+01, 0.1768747456839757e+01, 0.3452895805257589e-14, 0.1811424766142649e-14, 0.9279031656538340e+00, 0.8183881849761209e+00, 0.8409527066820451e-01, 0.9640366690596863e-01, 0.9381695469594078e+00, 0.7024714999296202e+00, 0.5431587122209677e-01, 0.1134045480653624e+00, 0.1466670076091998e+01, 0.1168847392254101e+01, 0.9192115573380727e-02, 0.1048255904531585e-01, 0.1332937092278178e+01, 0.1145899255774637e+01, 0.1705497388769041e-01, 0.2804774784346500e-01, 0.1884487985242738e+01, 0.1667981036717421e+01, 0.5151050664063701e-02, 0.5713728135608638e-02, 0.1850183739337442e+01, 0.1704415955506319e+01, 0.5355619565040129e-01, 0.2003593680404179e-01, 0.6376399585071952e+00, 0.4402495403584025e+00, 0.1625577000116482e-01, 0.2102698332839761e-01, 0.5694667028868834e+00, 0.3923425391534199e+00, 0.3751307094084999e-01, 0.5090703407211448e-01, 0.5324276180991995e+00, 0.3024597759503511e+00, 0.1454944326918194e-01, 0.3089599659653809e-01, 0.6422786106287314e+00, 0.4273603515343789e+00, 0.9587988848101409e-02, 0.6253958476854957e-02, 0.2787543806623621e+00, 0.8651052309591171e-01, 0.7620352256320365e-02, 0.2120822426333837e-01, 0.2407676854354167e+00, 0.2594920982616250e-01, 0.4837392336267432e-28, 0.1811424766142649e-14, 0.5186198053161721e+00}, + i0: 1, + n0: 13, + pp: 1, + n0in: 13, + dmin: 2.5949209826162500e-002, + dmin1: 7.8890170839591350e-002, + dmin2: 0.28791033268116917, + dn: 2.5949209826162500e-002, + dn1: 7.8890170839591350e-002, + dn2: 0.41777236268627749, + tau: 0.19361025134591583, + ttype: -8, + g: 0.0000000000000000, + zOut: []float64{0.1962357708185669e+01, 0.1768747456839757e+01, 0.3452895805257589e-14, 0.1811424766142649e-14, 0.9279031656538340e+00, 0.8183881849761209e+00, 0.8409527066820451e-01, 0.9640366690596863e-01, 0.9381695469594078e+00, 0.7024714999296202e+00, 0.5431587122209677e-01, 0.1134045480653624e+00, 0.1466670076091998e+01, 0.1168847392254101e+01, 0.9192115573380727e-02, 0.1048255904531585e-01, 0.1332937092278178e+01, 0.1145899255774637e+01, 0.1705497388769041e-01, 0.2804774784346500e-01, 0.1884487985242738e+01, 0.1667981036717421e+01, 0.5151050664063701e-02, 0.5713728135608638e-02, 0.1850183739337442e+01, 0.1704415955506319e+01, 0.5355619565040129e-01, 0.2003593680404179e-01, 0.6376399585071952e+00, 0.4402495403584025e+00, 0.1625577000116482e-01, 0.2102698332839761e-01, 0.5694667028868834e+00, 0.3923425391534199e+00, 0.3751307094084999e-01, 0.5090703407211448e-01, 0.5324276180991995e+00, 0.3024597759503511e+00, 0.1454944326918194e-01, 0.3089599659653809e-01, 0.6422786106287314e+00, 0.4273603515343789e+00, 0.9587988848101409e-02, 0.6253958476854957e-02, 0.2787543806623621e+00, 0.8651052309591171e-01, 0.7620352256320365e-02, 0.2120822426333837e-01, 0.2407676854354167e+00, 0.2594920982616250e-01, 0.4837392336267432e-28, 0.1811424766142649e-14, 0.5186198053161721e+00}, + tauOut: 1.8780556192507153e-002, + ttypeOut: -2, + gOut: 0.0000000000000000, + }, + { + z: []float64{0.1749966900647251e+01, 0.1768747456839757e+01, 0.8471295234418270e-15, 0.1811424766142649e-14, 0.8960112956895816e+00, 0.8183881849761209e+00, 0.7558032897122403e-01, 0.9640366690596863e-01, 0.7215151628312513e+00, 0.7024714999296202e+00, 0.1837142406762631e+00, 0.1134045480653624e+00, 0.9768351544306461e+00, 0.1168847392254101e+01, 0.1229681032071614e-01, 0.1048255904531585e-01, 0.1142869637104878e+01, 0.1145899255774637e+01, 0.4093477506677201e-01, 0.2804774784346500e-01, 0.1613979433593750e+01, 0.1667981036717421e+01, 0.6033886923870183e-02, 0.5713728135608638e-02, 0.1699637449193983e+01, 0.1704415955506319e+01, 0.5189819730562233e-02, 0.2003593680404179e-01, 0.4373061477637308e+00, 0.4402495403584025e+00, 0.1886499897608887e-01, 0.2102698332839761e-01, 0.4056040180569384e+00, 0.3923425391534199e+00, 0.3796148320598534e-01, 0.5090703407211448e-01, 0.2766137331483968e+00, 0.3024597759503511e+00, 0.4773343613933298e-01, 0.3089599659653809e-01, 0.3671003176793937e+00, 0.4273603515343789e+00, 0.1473802100398464e-02, 0.6253958476854957e-02, 0.8746438906634448e-01, 0.8651052309591171e-01, 0.6292122626412339e-02, 0.2120822426333837e-01, 0.8765310072430081e-03, 0.2594920982616250e-01, 0.8471295234418270e-15, 0.1811424766142649e-14}, + i0: 1, + n0: 13, + pp: 0, + n0in: 13, + dmin: 8.7653100724300811e-004, + dmin1: 6.6256164803006098e-002, + dmin2: 0.24571773655185866, + dn: 8.7653100724300811e-004, + dn1: 6.6256164803006098e-002, + dn2: 0.36084635920253871, + tau: 1.8780556192507153e-002, + ttype: -2, + g: 0.0000000000000000, + zOut: []float64{0.1749966900647251e+01, 0.1768747456839757e+01, 0.8471295234418270e-15, 0.1811424766142649e-14, 0.8960112956895816e+00, 0.8183881849761209e+00, 0.7558032897122403e-01, 0.9640366690596863e-01, 0.7215151628312513e+00, 0.7024714999296202e+00, 0.1837142406762631e+00, 0.1134045480653624e+00, 0.9768351544306461e+00, 0.1168847392254101e+01, 0.1229681032071614e-01, 0.1048255904531585e-01, 0.1142869637104878e+01, 0.1145899255774637e+01, 0.4093477506677201e-01, 0.2804774784346500e-01, 0.1613979433593750e+01, 0.1667981036717421e+01, 0.6033886923870183e-02, 0.5713728135608638e-02, 0.1699637449193983e+01, 0.1704415955506319e+01, 0.5189819730562233e-02, 0.2003593680404179e-01, 0.4373061477637308e+00, 0.4402495403584025e+00, 0.1886499897608887e-01, 0.2102698332839761e-01, 0.4056040180569384e+00, 0.3923425391534199e+00, 0.3796148320598534e-01, 0.5090703407211448e-01, 0.2766137331483968e+00, 0.3024597759503511e+00, 0.4773343613933298e-01, 0.3089599659653809e-01, 0.3671003176793937e+00, 0.4273603515343789e+00, 0.1473802100398464e-02, 0.6253958476854957e-02, 0.8746438906634448e-01, 0.8651052309591171e-01, 0.6292122626412339e-02, 0.2120822426333837e-01, 0.8765310072430081e-03, 0.2594920982616250e-01, 0.8471295234418270e-15, 0.1811424766142649e-14}, + tauOut: 8.1622622999092049e-004, + ttypeOut: -2, + gOut: 0.0000000000000000, + }, + { + z: []float64{0.1749966900647251e+01, 0.1749150674417261e+01, 0.8471295234418270e-15, 0.4339463906783712e-15, 0.8960112956895816e+00, 0.9707753984308143e+00, 0.7558032897122403e-01, 0.5617401661873561e-01, 0.7215151628312513e+00, 0.8482391606587878e+00, 0.1837142406762631e+00, 0.2115659556707206e+00, 0.9768351544306461e+00, 0.7767497828506508e+00, 0.1229681032071614e-01, 0.1809289356632693e-01, 0.1142869637104878e+01, 0.1164895292375332e+01, 0.4093477506677201e-01, 0.5671572845129930e-01, 0.1613979433593750e+01, 0.1562481365836330e+01, 0.6033886923870183e-02, 0.6563547191183529e-02, 0.1699637449193983e+01, 0.1697447495503371e+01, 0.5189819730562233e-02, 0.1337031089310571e-02, 0.4373061477637308e+00, 0.4540178894205181e+00, 0.1886499897608887e-01, 0.1685334336738995e-01, 0.4056040180569384e+00, 0.4258959316655428e+00, 0.3796148320598534e-01, 0.2465547755855056e-01, 0.2766137331483968e+00, 0.2988754654991882e+00, 0.4773343613933298e-01, 0.5862963539483231e-01, 0.3671003176793937e+00, 0.3091282581549689e+00, 0.1473802100398464e-02, 0.4169958485368410e-03, 0.8746438906634448e-01, 0.9252328961422907e-01, 0.6292122626412339e-02, 0.5960921413863723e-04, 0.8765310072430081e-03, 0.6955631134505013e-06, 0.8471295234418270e-15, 0.4339463906783712e-15, 0.5186198053161721e+00}, + i0: 1, + n0: 13, + pp: 1, + n0in: 13, + dmin: 6.9556311345050133e-007, + dmin1: 8.6231166987816729e-002, + dmin2: 0.25114202935985525, + dn: 6.9556311345050133e-007, + dn1: 8.6231166987816729e-002, + dn2: 0.30765445605457048, + tau: 8.1622622999092049e-004, + ttype: -2, + g: 0.0000000000000000, + zOut: []float64{0.1749966900647251e+01, 0.1749150674417261e+01, 0.8471295234418270e-15, 0.4339463906783712e-15, 0.8960112956895816e+00, 0.9707753984308143e+00, 0.7558032897122403e-01, 0.5617401661873561e-01, 0.7215151628312513e+00, 0.8482391606587878e+00, 0.1837142406762631e+00, 0.2115659556707206e+00, 0.9768351544306461e+00, 0.7767497828506508e+00, 0.1229681032071614e-01, 0.1809289356632693e-01, 0.1142869637104878e+01, 0.1164895292375332e+01, 0.4093477506677201e-01, 0.5671572845129930e-01, 0.1613979433593750e+01, 0.1562481365836330e+01, 0.6033886923870183e-02, 0.6563547191183529e-02, 0.1699637449193983e+01, 0.1697447495503371e+01, 0.5189819730562233e-02, 0.1337031089310571e-02, 0.4373061477637308e+00, 0.4540178894205181e+00, 0.1886499897608887e-01, 0.1685334336738995e-01, 0.4056040180569384e+00, 0.4258959316655428e+00, 0.3796148320598534e-01, 0.2465547755855056e-01, 0.2766137331483968e+00, 0.2988754654991882e+00, 0.4773343613933298e-01, 0.5862963539483231e-01, 0.3671003176793937e+00, 0.3091282581549689e+00, 0.1473802100398464e-02, 0.4169958485368410e-03, 0.8746438906634448e-01, 0.9252328961422907e-01, 0.6292122626412339e-02, 0.5960921413863723e-04, 0.8765310072430081e-03, 0.6955631134505013e-06, 0.8471295234418270e-15, 0.4339463906783712e-15, 0.5186198053161721e+00}, + tauOut: 6.9511331676175615e-007, + ttypeOut: -2, + gOut: 0.0000000000000000, + }, + { + z: []float64{0.1749149979303945e+01, 0.1749150674417261e+01, 0.2408395422307052e-15, 0.4339463906783712e-15, 0.1026948719936233e+01, 0.9707753984308143e+00, 0.4639861736277134e-01, 0.5617401661873561e-01, 0.1013405803853420e+01, 0.8482391606587878e+00, 0.1621599259654447e+00, 0.2115659556707206e+00, 0.6326820553382161e+00, 0.7767497828506508e+00, 0.3331266686486833e-01, 0.1809289356632693e-01, 0.1188297658848447e+01, 0.1164895292375332e+01, 0.7457497555020480e-01, 0.5671572845129930e-01, 0.1494469242363992e+01, 0.1562481365836330e+01, 0.7455005714048079e-02, 0.6563547191183529e-02, 0.1691328825765317e+01, 0.1697447495503371e+01, 0.3589107121045615e-03, 0.1337031089310571e-02, 0.4705116269624868e+00, 0.4540178894205181e+00, 0.1525524548983380e-01, 0.1685334336738995e-01, 0.4352954686209429e+00, 0.4258959316655428e+00, 0.1692854133253905e-01, 0.2465547755855056e-01, 0.3405758644481647e+00, 0.2988754654991882e+00, 0.5321597610926388e-01, 0.5862963539483231e-01, 0.2563285827809251e+00, 0.3091282581549689e+00, 0.1505170716567330e-03, 0.4169958485368410e-03, 0.9243168664339420e-01, 0.9252328961422907e-01, 0.4485687980202113e-09, 0.5960921413863723e-04, 0.1227890724922389e-11, 0.6955631134505013e-06, 0.2408395422307052e-15, 0.4339463906783712e-15}, + i0: 1, + n0: 13, + pp: 0, + n0in: 13, + dmin: 1.2278907249223888e-012, + dmin1: 9.2372077429255559e-002, + dmin2: 0.25591158693238830, + dn: 1.2278907249223888e-012, + dn1: 9.2372077429255559e-002, + dn2: 0.25591158693238830, + tau: 6.9511331676175615e-007, + ttype: -2, + g: 0.0000000000000000, + zOut: []float64{0.1749149979303945e+01, 0.1749150674417261e+01, 0.2408395422307052e-15, 0.4339463906783712e-15, 0.1026948719936233e+01, 0.9707753984308143e+00, 0.4639861736277134e-01, 0.5617401661873561e-01, 0.1013405803853420e+01, 0.8482391606587878e+00, 0.1621599259654447e+00, 0.2115659556707206e+00, 0.6326820553382161e+00, 0.7767497828506508e+00, 0.3331266686486833e-01, 0.1809289356632693e-01, 0.1188297658848447e+01, 0.1164895292375332e+01, 0.7457497555020480e-01, 0.5671572845129930e-01, 0.1494469242363992e+01, 0.1562481365836330e+01, 0.7455005714048079e-02, 0.6563547191183529e-02, 0.1691328825765317e+01, 0.1697447495503371e+01, 0.3589107121045615e-03, 0.1337031089310571e-02, 0.4705116269624868e+00, 0.4540178894205181e+00, 0.1525524548983380e-01, 0.1685334336738995e-01, 0.4352954686209429e+00, 0.4258959316655428e+00, 0.1692854133253905e-01, 0.2465547755855056e-01, 0.3405758644481647e+00, 0.2988754654991882e+00, 0.5321597610926388e-01, 0.5862963539483231e-01, 0.2563285827809251e+00, 0.3091282581549689e+00, 0.1505170716567330e-03, 0.4169958485368410e-03, 0.9243168664339420e-01, 0.9252328961422907e-01, 0.4485687980202113e-09, 0.5960921413863723e-04, 0.1227890724922389e-11, 0.6955631134505013e-06, 0.2408395422307052e-15, 0.4339463906783712e-15}, + tauOut: 1.2278907189544363e-012, + ttypeOut: -2, + gOut: 0.0000000000000000, + }, + { + z: []float64{0.1749149979303945e+01, 0.1749149979302717e+01, 0.2408395422307052e-15, 0.1414000300319855e-15, 0.1026948719936233e+01, 0.1073347337297776e+01, 0.4639861736277134e-01, 0.4380746706334982e-01, 0.1013405803853420e+01, 0.1131758262754287e+01, 0.1621599259654447e+00, 0.9065158049178278e-01, 0.6326820553382161e+00, 0.5753431417100738e+00, 0.3331266686486833e-01, 0.6880305191066147e-01, 0.1188297658848447e+01, 0.1194069582486762e+01, 0.7457497555020480e-01, 0.9333627524262254e-01, 0.1494469242363992e+01, 0.1408587972834190e+01, 0.7455005714048079e-02, 0.8951422490882578e-02, 0.1691328825765317e+01, 0.1682736313985311e+01, 0.3589107121045615e-03, 0.1003553923945662e-03, 0.4705116269624868e+00, 0.4856665170586981e+00, 0.1525524548983380e-01, 0.1367304312976171e-01, 0.4352954686209429e+00, 0.4385509668224923e+00, 0.1692854133253905e-01, 0.1314659648329904e-01, 0.3405758644481647e+00, 0.3806452440729016e+00, 0.5321597610926388e-01, 0.3583592846566260e-01, 0.2563285827809251e+00, 0.2206431713856914e+00, 0.1505170716567330e-03, 0.6305450884558253e-04, 0.9243168664339420e-01, 0.9236863258188953e-01, 0.4485687980202113e-09, 0.5962992535266723e-20, 0.1227890724922389e-11, 0.4959852501050381e-23, 0.2408395422307052e-15, 0.1414000300319855e-15, 0.5186198053161721e+00}, + i0: 1, + n0: 13, + pp: 1, + n0in: 13, + dmin: 4.9598525010503808e-024, + dmin1: 9.2368632133320736e-002, + dmin2: 0.22049265431403467, + dn: 4.9598525010503808e-024, + dn1: 9.2368632133320736e-002, + dn2: 0.22049265431403467, + tau: 1.2278907189544363e-012, + ttype: -2, + g: 0.0000000000000000, + zOut: []float64{0.1749149979303945e+01, 0.1749149979302717e+01, 0.2408395422307052e-15, 0.1414000300319855e-15, 0.1026948719936233e+01, 0.1073347337297776e+01, 0.4639861736277134e-01, 0.4380746706334982e-01, 0.1013405803853420e+01, 0.1131758262754287e+01, 0.1621599259654447e+00, 0.9065158049178278e-01, 0.6326820553382161e+00, 0.5753431417100738e+00, 0.3331266686486833e-01, 0.6880305191066147e-01, 0.1188297658848447e+01, 0.1194069582486762e+01, 0.7457497555020480e-01, 0.9333627524262254e-01, 0.1494469242363992e+01, 0.1408587972834190e+01, 0.7455005714048079e-02, 0.8951422490882578e-02, 0.1691328825765317e+01, 0.1682736313985311e+01, 0.3589107121045615e-03, 0.1003553923945662e-03, 0.4705116269624868e+00, 0.4856665170586981e+00, 0.1525524548983380e-01, 0.1367304312976171e-01, 0.4352954686209429e+00, 0.4385509668224923e+00, 0.1692854133253905e-01, 0.1314659648329904e-01, 0.3405758644481647e+00, 0.3806452440729016e+00, 0.5321597610926388e-01, 0.3583592846566260e-01, 0.2563285827809251e+00, 0.2206431713856914e+00, 0.1505170716567330e-03, 0.6305450884558253e-04, 0.9243168664339420e-01, 0.9236863258188953e-01, 0.4485687980202113e-09, 0.5962992535266723e-20, 0.1227890724922389e-11, 0.4959852501050381e-23, 0.2408395422307052e-15, 0.1414000300319855e-15, 0.5186198053161721e+00}, + tauOut: 4.9598525010503808e-024, + ttypeOut: -2, + gOut: 0.0000000000000000, + }, + { + z: []float64{0.1749149979302717e+01, 0.1749149979302717e+01, 0.8676862906242004e-16, 0.1414000300319855e-15, 0.1117154804361126e+01, 0.1073347337297776e+01, 0.4438011869593646e-01, 0.4380746706334982e-01, 0.1178029724550133e+01, 0.1131758262754287e+01, 0.4427372589519608e-01, 0.9065158049178278e-01, 0.5998724677255393e+00, 0.5753431417100738e+00, 0.1369551627869799e+00, 0.6880305191066147e-01, 0.1150450694942405e+01, 0.1194069582486762e+01, 0.1142789997988411e+00, 0.9333627524262254e-01, 0.1303260395526232e+01, 0.1408587972834190e+01, 0.1155784656615063e-01, 0.8951422490882578e-02, 0.1671278822811555e+01, 0.1682736313985311e+01, 0.2916285016424428e-04, 0.1003553923945662e-03, 0.4993103973382956e+00, 0.4856665170586981e+00, 0.1200921574220688e-01, 0.1367304312976171e-01, 0.4396883475635844e+00, 0.4385509668224923e+00, 0.1138121911768345e-01, 0.1314659648329904e-01, 0.4050999534208807e+00, 0.3806452440729016e+00, 0.1951852336551518e-01, 0.3583592846566260e-01, 0.2011877025290218e+00, 0.2206431713856914e+00, 0.2894937755625969e-04, 0.6305450884558253e-04, 0.9233968320433328e-01, 0.9236863258188953e-01, 0.3202909346606844e-42, 0.5962992535266723e-20}, + i0: 1, + n0: 12, + pp: 0, + n0in: 13, + dmin: 4.9598525010503808e-024, + dmin1: 9.2339683204333278e-002, + dmin2: 0.20112464802017624, + dn: 4.9598525010503808e-024, + dn1: 9.2339683204333278e-002, + dn2: 0.20112464802017624, + tau: 0.0000000000000000, + ttype: -2, + g: 0.0000000000000000, + zOut: []float64{0.1749149979302717e+01, 0.1749149979302717e+01, 0.8676862906242004e-16, 0.1414000300319855e-15, 0.1117154804361126e+01, 0.1073347337297776e+01, 0.4438011869593646e-01, 0.4380746706334982e-01, 0.1178029724550133e+01, 0.1131758262754287e+01, 0.4427372589519608e-01, 0.9065158049178278e-01, 0.5998724677255393e+00, 0.5753431417100738e+00, 0.1369551627869799e+00, 0.6880305191066147e-01, 0.1150450694942405e+01, 0.1194069582486762e+01, 0.1142789997988411e+00, 0.9333627524262254e-01, 0.1303260395526232e+01, 0.1408587972834190e+01, 0.1155784656615063e-01, 0.8951422490882578e-02, 0.1671278822811555e+01, 0.1682736313985311e+01, 0.2916285016424428e-04, 0.1003553923945662e-03, 0.4993103973382956e+00, 0.4856665170586981e+00, 0.1200921574220688e-01, 0.1367304312976171e-01, 0.4396883475635844e+00, 0.4385509668224923e+00, 0.1138121911768345e-01, 0.1314659648329904e-01, 0.4050999534208807e+00, 0.3806452440729016e+00, 0.1951852336551518e-01, 0.3583592846566260e-01, 0.2011877025290218e+00, 0.2206431713856914e+00, 0.2894937755625969e-04, 0.6305450884558253e-04, 0.9233968320433328e-01, 0.9236863258188953e-01, 0.3202909346606844e-42, 0.5962992535266723e-20}, + tauOut: 9.2159326345418235e-002, + ttypeOut: -7, + gOut: 0.0000000000000000, + }, + { + z: []float64{0.1749149979302717e+01, 0.1656990652957299e+01, 0.8676862906242004e-16, 0.5850002270797901e-16, 0.1117154804361126e+01, 0.1069375596711644e+01, 0.4438011869593646e-01, 0.4888936980013561e-01, 0.1178029724550133e+01, 0.1081254754299776e+01, 0.4427372589519608e-01, 0.2456274906772994e-01, 0.5998724677255393e+00, 0.6201055550993710e+00, 0.1369551627869799e+00, 0.2540860356894923e+00, 0.1150450694942405e+01, 0.9184843327063353e+00, 0.1142789997988411e+00, 0.1621533314992303e+00, 0.1303260395526232e+01, 0.1060505584247734e+01, 0.1155784656615063e-01, 0.1821431635083262e-01, 0.1671278822811555e+01, 0.1560934342965469e+01, 0.2916285016424428e-04, 0.9328588590960435e-05, 0.4993103973382956e+00, 0.4191509581464933e+00, 0.1200921574220688e-01, 0.1259763844648080e-01, 0.4396883475635844e+00, 0.3463126018893689e+00, 0.1138121911768345e-01, 0.1331320693873929e-01, 0.4050999534208807e+00, 0.3191459435022383e+00, 0.1951852336551518e-01, 0.1230436091267282e-01, 0.2011877025290218e+00, 0.9675296464848704e-01, 0.2894937755625969e-04, 0.2762888312745317e-04, 0.9233968320433328e-01, 0.1527279757875810e-03, 0.3202909346606844e-42, 0.5850002270797901e-16, 0.7318275341991307e+00}, + i0: 1, + n0: 12, + pp: 1, + n0in: 12, + dmin: 1.5272797578758102e-004, + dmin1: 9.6724015270930774e-002, + dmin2: 0.29962742013672317, + dn: 1.5272797578758102e-004, + dn1: 9.6724015270930774e-002, + dn2: 0.29962742013672317, + tau: 9.2159326345418235e-002, + ttype: -7, + g: 0.0000000000000000, + zOut: []float64{0.1749149979302717e+01, 0.1656990652957299e+01, 0.8676862906242004e-16, 0.5850002270797901e-16, 0.1117154804361126e+01, 0.1069375596711644e+01, 0.4438011869593646e-01, 0.4888936980013561e-01, 0.1178029724550133e+01, 0.1081254754299776e+01, 0.4427372589519608e-01, 0.2456274906772994e-01, 0.5998724677255393e+00, 0.6201055550993710e+00, 0.1369551627869799e+00, 0.2540860356894923e+00, 0.1150450694942405e+01, 0.9184843327063353e+00, 0.1142789997988411e+00, 0.1621533314992303e+00, 0.1303260395526232e+01, 0.1060505584247734e+01, 0.1155784656615063e-01, 0.1821431635083262e-01, 0.1671278822811555e+01, 0.1560934342965469e+01, 0.2916285016424428e-04, 0.9328588590960435e-05, 0.4993103973382956e+00, 0.4191509581464933e+00, 0.1200921574220688e-01, 0.1259763844648080e-01, 0.4396883475635844e+00, 0.3463126018893689e+00, 0.1138121911768345e-01, 0.1331320693873929e-01, 0.4050999534208807e+00, 0.3191459435022383e+00, 0.1951852336551518e-01, 0.1230436091267282e-01, 0.2011877025290218e+00, 0.9675296464848704e-01, 0.2894937755625969e-04, 0.2762888312745317e-04, 0.9233968320433328e-01, 0.1527279757875810e-03, 0.3202909346606844e-42, 0.5850002270797901e-16, 0.7318275341991307e+00}, + tauOut: 1.5267965277267402e-004, + ttypeOut: -2, + gOut: 0.0000000000000000, + }, + { + z: []float64{0.1656837973304527e+01, 0.1656990652957299e+01, 0.3775776370348287e-16, 0.5850002270797901e-16, 0.1118112286859007e+01, 0.1069375596711644e+01, 0.4727777715386321e-01, 0.4888936980013561e-01, 0.1058387046560870e+01, 0.1081254754299776e+01, 0.1439123541326830e-01, 0.2456274906772994e-01, 0.8596476757228223e+00, 0.6201055550993710e+00, 0.2714763844897646e+00, 0.2540860356894923e+00, 0.8090086000630283e+00, 0.9184843327063353e+00, 0.2125620340079330e+00, 0.1621533314992303e+00, 0.8660051869378607e+00, 0.1060505584247734e+01, 0.3283046378299830e-01, 0.1821431635083262e-01, 0.1527960528118289e+01, 0.1560934342965469e+01, 0.2559023465659061e-05, 0.9328588590960435e-05, 0.4315933579167358e+00, 0.4191509581464933e+00, 0.1010840613748274e-01, 0.1259763844648080e-01, 0.3493647230378528e+00, 0.3463126018893689e+00, 0.1216166289646864e-01, 0.1331320693873929e-01, 0.3191359618656699e+00, 0.3191459435022383e+00, 0.3730332957296599e-02, 0.1230436091267282e-01, 0.9289758092154521e-01, 0.9675296464848704e-01, 0.4542317842367971e-07, 0.2762888312745317e-04, 0.2899836483324881e-08, 0.1527279757875810e-03, 0.3775776370348287e-16, 0.5850002270797901e-16}, + i0: 1, + n0: 12, + pp: 0, + n0in: 12, + dmin: 2.8998364833248812e-009, + dmin1: 9.2869952038417761e-002, + dmin2: 0.30683160095299705, + dn: 2.8998364833248812e-009, + dn1: 9.2869952038417761e-002, + dn2: 0.30683160095299705, + tau: 1.5267965277267402e-004, + ttype: -2, + g: 0.0000000000000000, + zOut: []float64{0.1656837973304527e+01, 0.1656990652957299e+01, 0.3775776370348287e-16, 0.5850002270797901e-16, 0.1118112286859007e+01, 0.1069375596711644e+01, 0.4727777715386321e-01, 0.4888936980013561e-01, 0.1058387046560870e+01, 0.1081254754299776e+01, 0.1439123541326830e-01, 0.2456274906772994e-01, 0.8596476757228223e+00, 0.6201055550993710e+00, 0.2714763844897646e+00, 0.2540860356894923e+00, 0.8090086000630283e+00, 0.9184843327063353e+00, 0.2125620340079330e+00, 0.1621533314992303e+00, 0.8660051869378607e+00, 0.1060505584247734e+01, 0.3283046378299830e-01, 0.1821431635083262e-01, 0.1527960528118289e+01, 0.1560934342965469e+01, 0.2559023465659061e-05, 0.9328588590960435e-05, 0.4315933579167358e+00, 0.4191509581464933e+00, 0.1010840613748274e-01, 0.1259763844648080e-01, 0.3493647230378528e+00, 0.3463126018893689e+00, 0.1216166289646864e-01, 0.1331320693873929e-01, 0.3191359618656699e+00, 0.3191459435022383e+00, 0.3730332957296599e-02, 0.1230436091267282e-01, 0.9289758092154521e-01, 0.9675296464848704e-01, 0.4542317842367971e-07, 0.2762888312745317e-04, 0.2899836483324881e-08, 0.1527279757875810e-03, 0.3775776370348287e-16, 0.5850002270797901e-16}, + tauOut: 2.8998350258011044e-009, + ttypeOut: -2, + gOut: 0.0000000000000000, + }, + { + z: []float64{0.1656837973304527e+01, 0.1656837970404692e+01, 0.3775776370348287e-16, 0.2548071704976161e-16, 0.1118112286859007e+01, 0.1165390061113035e+01, 0.4727777715386321e-01, 0.4293685745187325e-01, 0.1058387046560870e+01, 0.1029841421622430e+01, 0.1439123541326830e-01, 0.1201290976848257e-01, 0.8596476757228223e+00, 0.1119111147544269e+01, 0.2714763844897646e+00, 0.1962510428460807e+00, 0.8090086000630283e+00, 0.8253195883250457e+00, 0.2125620340079330e+00, 0.2230406579474441e+00, 0.8660051869378607e+00, 0.6757949898735797e+00, 0.3283046378299830e-01, 0.7422909836846008e-01, 0.1527960528118289e+01, 0.1453733985873459e+01, 0.2559023465659061e-05, 0.7597383986781573e-06, 0.4315933579167358e+00, 0.4417010014159849e+00, 0.1010840613748274e-01, 0.7995273950601426e-02, 0.3493647230378528e+00, 0.3535311090838850e+00, 0.1216166289646864e-01, 0.1097845108004235e-01, 0.3191359618656699e+00, 0.3118878408430891e+00, 0.3730332957296599e-02, 0.1111101051031713e-02, 0.9289758092154521e-01, 0.9178652239385691e-01, 0.4542317842367971e-07, 0.1435066789177947e-14, 0.2899836483324881e-08, 0.2245698748385924e-16, 0.3775776370348287e-16, 0.2548071704976161e-16, 0.7318275341991307e+00}, + i0: 1, + n0: 12, + pp: 1, + n0in: 12, + dmin: 2.2456987483859239e-017, + dmin1: 9.1786476970678488e-002, + dmin2: 0.30815750788579249, + dn: 2.2456987483859239e-017, + dn1: 9.1786476970678488e-002, + dn2: 0.30815750788579249, + tau: 2.8998350258011044e-009, + ttype: -2, + g: 0.0000000000000000, + zOut: []float64{0.1656837973304527e+01, 0.1656837970404692e+01, 0.3775776370348287e-16, 0.2548071704976161e-16, 0.1118112286859007e+01, 0.1165390061113035e+01, 0.4727777715386321e-01, 0.4293685745187325e-01, 0.1058387046560870e+01, 0.1029841421622430e+01, 0.1439123541326830e-01, 0.1201290976848257e-01, 0.8596476757228223e+00, 0.1119111147544269e+01, 0.2714763844897646e+00, 0.1962510428460807e+00, 0.8090086000630283e+00, 0.8253195883250457e+00, 0.2125620340079330e+00, 0.2230406579474441e+00, 0.8660051869378607e+00, 0.6757949898735797e+00, 0.3283046378299830e-01, 0.7422909836846008e-01, 0.1527960528118289e+01, 0.1453733985873459e+01, 0.2559023465659061e-05, 0.7597383986781573e-06, 0.4315933579167358e+00, 0.4417010014159849e+00, 0.1010840613748274e-01, 0.7995273950601426e-02, 0.3493647230378528e+00, 0.3535311090838850e+00, 0.1216166289646864e-01, 0.1097845108004235e-01, 0.3191359618656699e+00, 0.3118878408430891e+00, 0.3730332957296599e-02, 0.1111101051031713e-02, 0.9289758092154521e-01, 0.9178652239385691e-01, 0.4542317842367971e-07, 0.1435066789177947e-14, 0.2899836483324881e-08, 0.2245698748385924e-16, 0.3775776370348287e-16, 0.2548071704976161e-16, 0.7318275341991307e+00}, + tauOut: 2.2456987483858885e-017, + ttypeOut: -2, + gOut: 0.0000000000000000, + }, + { + z: []float64{0.1656837970404692e+01, 0.1656837970404692e+01, 0.1792267857826344e-16, 0.2548071704976161e-16, 0.1208326918564908e+01, 0.1165390061113035e+01, 0.3659452888027460e-01, 0.4293685745187325e-01, 0.1005259802510638e+01, 0.1029841421622430e+01, 0.1337343958524596e-01, 0.1201290976848257e-01, 0.1301988750805104e+01, 0.1119111147544269e+01, 0.1244018658302015e+00, 0.1962510428460807e+00, 0.9239583804422884e+00, 0.8253195883250457e+00, 0.1631347930486186e+00, 0.2230406579474441e+00, 0.5868892951934211e+00, 0.6757949898735797e+00, 0.1838666404767374e+00, 0.7422909836846008e-01, 0.1269868105135121e+01, 0.1453733985873459e+01, 0.2642614694812039e-06, 0.7597383986781573e-06, 0.4496960111051168e+00, 0.4417010014159849e+00, 0.6285530663790794e-02, 0.7995273950601426e-02, 0.3582240295001366e+00, 0.3535311090838850e+00, 0.9558391177537082e-02, 0.1097845108004235e-01, 0.3034405507165837e+00, 0.3118878408430891e+00, 0.3360925270585024e-03, 0.1111101051031713e-02, 0.9145042986679984e-01, 0.9178652239385691e-01, 0.3524015903480299e-30, 0.1435066789177947e-14}, + i0: 1, + n0: 11, + pp: 0, + n0in: 12, + dmin: 2.2456987483858888e-017, + dmin1: 9.1450429866798411e-002, + dmin2: 0.30232944966555197, + dn: 2.2456987483858888e-017, + dn1: 9.1450429866798411e-002, + dn2: 0.30232944966555197, + tau: 0.0000000000000000, + ttype: -2, + g: 0.0000000000000000, + zOut: []float64{0.1656837970404692e+01, 0.1656837970404692e+01, 0.1792267857826344e-16, 0.2548071704976161e-16, 0.1208326918564908e+01, 0.1165390061113035e+01, 0.3659452888027460e-01, 0.4293685745187325e-01, 0.1005259802510638e+01, 0.1029841421622430e+01, 0.1337343958524596e-01, 0.1201290976848257e-01, 0.1301988750805104e+01, 0.1119111147544269e+01, 0.1244018658302015e+00, 0.1962510428460807e+00, 0.9239583804422884e+00, 0.8253195883250457e+00, 0.1631347930486186e+00, 0.2230406579474441e+00, 0.5868892951934211e+00, 0.6757949898735797e+00, 0.1838666404767374e+00, 0.7422909836846008e-01, 0.1269868105135121e+01, 0.1453733985873459e+01, 0.2642614694812039e-06, 0.7597383986781573e-06, 0.4496960111051168e+00, 0.4417010014159849e+00, 0.6285530663790794e-02, 0.7995273950601426e-02, 0.3582240295001366e+00, 0.3535311090838850e+00, 0.9558391177537082e-02, 0.1097845108004235e-01, 0.3034405507165837e+00, 0.3118878408430891e+00, 0.3360925270585024e-03, 0.1111101051031713e-02, 0.9145042986679984e-01, 0.9178652239385691e-01, 0.3524015903480299e-30, 0.1435066789177947e-14}, + tauOut: 9.1173077708044642e-002, + ttypeOut: -7, + gOut: 0.0000000000000000, + }, + { + z: []float64{0.1656837970404692e+01, 0.1565664892696647e+01, 0.1792267857826344e-16, 0.1383211380667930e-16, 0.1208326918564908e+01, 0.1153748369737138e+01, 0.3659452888027460e-01, 0.3188477647299814e-01, 0.1005259802510638e+01, 0.8955753879148407e+00, 0.1337343958524596e-01, 0.1944232516271161e-01, 0.1301988750805104e+01, 0.1315775213764550e+01, 0.1244018658302015e+00, 0.8735697805677015e-01, 0.9239583804422884e+00, 0.9085631177260922e+00, 0.1631347930486186e+00, 0.1053774491236745e+00, 0.5868892951934211e+00, 0.5742054088384394e+00, 0.1838666404767374e+00, 0.4066251880351924e+00, 0.1269868105135121e+01, 0.7720701036533529e+00, 0.2642614694812039e-06, 0.1539203864417861e-06, 0.4496960111051168e+00, 0.3648083101404764e+00, 0.6285530663790794e-02, 0.6172085611379779e-02, 0.3582240295001366e+00, 0.2704372573582493e+00, 0.9558391177537082e-02, 0.1072486650400470e-01, 0.3034405507165837e+00, 0.2018786990315928e+00, 0.3360925270585024e-03, 0.1522488812438262e-03, 0.9145042986679984e-01, 0.1251032775113764e-03, 0.3524015903480299e-30, 0.1383211380667930e-16, 0.8241395430971566e+00}, + i0: 1, + n0: 11, + pp: 1, + n0in: 11, + dmin: 1.2510327751137640e-004, + dmin1: 0.20154260650453434, + dmin2: 0.26087886618071221, + dn: 1.2510327751137640e-004, + dn1: 0.20154260650453434, + dn2: 0.26087886618071221, + tau: 9.1173077708044642e-002, + ttype: -7, + g: 0.0000000000000000, + zOut: []float64{0.1656837970404692e+01, 0.1565664892696647e+01, 0.1792267857826344e-16, 0.1383211380667930e-16, 0.1208326918564908e+01, 0.1153748369737138e+01, 0.3659452888027460e-01, 0.3188477647299814e-01, 0.1005259802510638e+01, 0.8955753879148407e+00, 0.1337343958524596e-01, 0.1944232516271161e-01, 0.1301988750805104e+01, 0.1315775213764550e+01, 0.1244018658302015e+00, 0.8735697805677015e-01, 0.9239583804422884e+00, 0.9085631177260922e+00, 0.1631347930486186e+00, 0.1053774491236745e+00, 0.5868892951934211e+00, 0.5742054088384394e+00, 0.1838666404767374e+00, 0.4066251880351924e+00, 0.1269868105135121e+01, 0.7720701036533529e+00, 0.2642614694812039e-06, 0.1539203864417861e-06, 0.4496960111051168e+00, 0.3648083101404764e+00, 0.6285530663790794e-02, 0.6172085611379779e-02, 0.3582240295001366e+00, 0.2704372573582493e+00, 0.9558391177537082e-02, 0.1072486650400470e-01, 0.3034405507165837e+00, 0.2018786990315928e+00, 0.3360925270585024e-03, 0.1522488812438262e-03, 0.9145042986679984e-01, 0.1251032775113764e-03, 0.3524015903480299e-30, 0.1383211380667930e-16, 0.8241395430971566e+00}, + tauOut: 1.2498058228587147e-004, + ttypeOut: -2, + gOut: 0.0000000000000000, + }, + { + z: []float64{0.1565539912114361e+01, 0.1565664892696647e+01, 0.1019378594629470e-16, 0.1383211380667930e-16, 0.1185508165627851e+01, 0.1153748369737138e+01, 0.2408690373149840e-01, 0.3188477647299814e-01, 0.8908058287637680e+00, 0.8955753879148407e+00, 0.2871751477260568e-01, 0.1944232516271161e-01, 0.1374289696466428e+01, 0.1315775213764550e+01, 0.5775298217141787e-01, 0.8735697805677015e-01, 0.9560626040960629e+00, 0.9085631177260922e+00, 0.6328905763825028e-01, 0.1053774491236745e+00, 0.9174165586530958e+00, 0.5742054088384394e+00, 0.3422034931823232e+00, 0.4066251880351924e+00, 0.4297417838091302e+00, 0.7720701036533529e+00, 0.1306631986684747e-06, 0.1539203864417861e-06, 0.3708552845063717e+00, 0.3648083101404764e+00, 0.4500844331080801e-02, 0.6172085611379779e-02, 0.2765362989488873e+00, 0.2704372573582493e+00, 0.7829431815445537e-02, 0.1072486650400470e-01, 0.1940765355151052e+00, 0.2018786990315928e+00, 0.9814083907923291e-07, 0.1522488812438262e-03, 0.2455438642568072e-07, 0.1251032775113764e-03, 0.1019378594629470e-16, 0.1383211380667930e-16}, + i0: 1, + n0: 11, + pp: 0, + n0in: 11, + dmin: 2.4554386425680716e-008, + dmin1: 0.19392428663386141, + dmin2: 0.26581143244488259, + dn: 2.4554386425680716e-008, + dn1: 0.19392428663386141, + dn2: 0.26581143244488259, + tau: 1.2498058228587147e-004, + ttype: -2, + g: 0.0000000000000000, + zOut: []float64{0.1565539912114361e+01, 0.1565664892696647e+01, 0.1019378594629470e-16, 0.1383211380667930e-16, 0.1185508165627851e+01, 0.1153748369737138e+01, 0.2408690373149840e-01, 0.3188477647299814e-01, 0.8908058287637680e+00, 0.8955753879148407e+00, 0.2871751477260568e-01, 0.1944232516271161e-01, 0.1374289696466428e+01, 0.1315775213764550e+01, 0.5775298217141787e-01, 0.8735697805677015e-01, 0.9560626040960629e+00, 0.9085631177260922e+00, 0.6328905763825028e-01, 0.1053774491236745e+00, 0.9174165586530958e+00, 0.5742054088384394e+00, 0.3422034931823232e+00, 0.4066251880351924e+00, 0.4297417838091302e+00, 0.7720701036533529e+00, 0.1306631986684747e-06, 0.1539203864417861e-06, 0.3708552845063717e+00, 0.3648083101404764e+00, 0.4500844331080801e-02, 0.6172085611379779e-02, 0.2765362989488873e+00, 0.2704372573582493e+00, 0.7829431815445537e-02, 0.1072486650400470e-01, 0.1940765355151052e+00, 0.2018786990315928e+00, 0.9814083907923291e-07, 0.1522488812438262e-03, 0.2455438642568072e-07, 0.1251032775113764e-03, 0.1019378594629470e-16, 0.1383211380667930e-16}, + tauOut: 2.4554370888251911e-008, + ttypeOut: -2, + gOut: 0.0000000000000000, + }, + { + z: []float64{0.1565539912114361e+01, 0.1565539887559990e+01, 0.1019378594629470e-16, 0.7719264500395369e-17, 0.1185508165627851e+01, 0.1209595044804978e+01, 0.2408690373149840e-01, 0.1773879145177052e-01, 0.8908058287637680e+00, 0.9017845275302323e+00, 0.2871751477260568e-01, 0.4376453959373496e-01, 0.1374289696466428e+01, 0.1388278114489740e+01, 0.5775298217141787e-01, 0.3977262621431848e-01, 0.9560626040960629e+00, 0.9795790109656241e+00, 0.6328905763825028e-01, 0.5927283946360358e-01, 0.9174165586530958e+00, 0.1200347187817445e+01, 0.3422034931823232e+00, 0.1225138368952072e+00, 0.4297417838091302e+00, 0.3072280530227507e+00, 0.1306631986684747e-06, 0.1577236754259593e-06, 0.3708552845063717e+00, 0.3753559465594062e+00, 0.4500844331080801e-02, 0.3315910790466669e-02, 0.2765362989488873e+00, 0.2810497954194954e+00, 0.7829431815445537e-02, 0.5406547261581845e-02, 0.1940765355151052e+00, 0.1886700618399915e+00, 0.9814083907923291e-07, 0.1277249852674422e-13, 0.2455438642568072e-07, 0.2764930279233778e-14, 0.1019378594629470e-16, 0.7719264500395369e-17, 0.8241395430971566e+00}, + i0: 1, + n0: 11, + pp: 1, + n0in: 11, + dmin: 2.7649302792337775e-015, + dmin1: 0.18866996369915248, + dmin2: 0.27322036360404983, + dn: 2.7649302792337775e-015, + dn1: 0.18866996369915248, + dn2: 0.27322036360404983, + tau: 2.4554370888251911e-008, + ttype: -2, + g: 0.0000000000000000, + zOut: []float64{0.1565539912114361e+01, 0.1565539887559990e+01, 0.1019378594629470e-16, 0.7719264500395369e-17, 0.1185508165627851e+01, 0.1209595044804978e+01, 0.2408690373149840e-01, 0.1773879145177052e-01, 0.8908058287637680e+00, 0.9017845275302323e+00, 0.2871751477260568e-01, 0.4376453959373496e-01, 0.1374289696466428e+01, 0.1388278114489740e+01, 0.5775298217141787e-01, 0.3977262621431848e-01, 0.9560626040960629e+00, 0.9795790109656241e+00, 0.6328905763825028e-01, 0.5927283946360358e-01, 0.9174165586530958e+00, 0.1200347187817445e+01, 0.3422034931823232e+00, 0.1225138368952072e+00, 0.4297417838091302e+00, 0.3072280530227507e+00, 0.1306631986684747e-06, 0.1577236754259593e-06, 0.3708552845063717e+00, 0.3753559465594062e+00, 0.4500844331080801e-02, 0.3315910790466669e-02, 0.2765362989488873e+00, 0.2810497954194954e+00, 0.7829431815445537e-02, 0.5406547261581845e-02, 0.1940765355151052e+00, 0.1886700618399915e+00, 0.9814083907923291e-07, 0.1277249852674422e-13, 0.2455438642568072e-07, 0.2764930279233778e-14, 0.1019378594629470e-16, 0.7719264500395369e-17, 0.8241395430971566e+00}, + tauOut: 2.7649302792335523e-015, + ttypeOut: -2, + gOut: 0.0000000000000000, + }, + { + z: []float64{0.1565539887559988e+01, 0.1565539887559990e+01, 0.5964194309842799e-17, 0.7719264500395369e-17, 0.1227333836256746e+01, 0.1209595044804978e+01, 0.1303359134714337e-01, 0.1773879145177052e-01, 0.9325154757768211e+00, 0.9017845275302323e+00, 0.6515425651042277e-01, 0.4376453959373496e-01, 0.1362896484193633e+01, 0.1388278114489740e+01, 0.2858649229958118e-01, 0.3977262621431848e-01, 0.1010265358129644e+01, 0.9795790109656241e+00, 0.7042504782685149e-01, 0.5927283946360358e-01, 0.1252435976885798e+01, 0.1200347187817445e+01, 0.3005318297487194e-01, 0.1225138368952072e+00, 0.2771750277715515e+00, 0.3072280530227507e+00, 0.2135925446109612e-06, 0.1577236754259593e-06, 0.3786716437573254e+00, 0.3753559465594062e+00, 0.2461066374135985e-02, 0.3315910790466669e-02, 0.2839952763069384e+00, 0.2810497954194954e+00, 0.3591797791316158e-02, 0.5406547261581845e-02, 0.1850782640486854e+00, 0.1886700618399915e+00, 0.1908115364037247e-27, 0.1277249852674422e-13}, + i0: 1, + n0: 10, + pp: 0, + n0in: 11, + dmin: 3.4709879829724519e-029, + dmin1: 0.18507826404867261, + dmin2: 0.27717487004787600, + dn: 3.4709879829724519e-029, + dn1: 0.18507826404867261, + dn2: 0.27858872904535659, + tau: 2.7649302792335523e-015, + ttype: -2, + g: 0.0000000000000000, + zOut: []float64{0.1565539887559988e+01, 0.1565539887559990e+01, 0.5964194309842799e-17, 0.7719264500395369e-17, 0.1227333836256746e+01, 0.1209595044804978e+01, 0.1303359134714337e-01, 0.1773879145177052e-01, 0.9325154757768211e+00, 0.9017845275302323e+00, 0.6515425651042277e-01, 0.4376453959373496e-01, 0.1362896484193633e+01, 0.1388278114489740e+01, 0.2858649229958118e-01, 0.3977262621431848e-01, 0.1010265358129644e+01, 0.9795790109656241e+00, 0.7042504782685149e-01, 0.5927283946360358e-01, 0.1252435976885798e+01, 0.1200347187817445e+01, 0.3005318297487194e-01, 0.1225138368952072e+00, 0.2771750277715515e+00, 0.3072280530227507e+00, 0.2135925446109612e-06, 0.1577236754259593e-06, 0.3786716437573254e+00, 0.3753559465594062e+00, 0.2461066374135985e-02, 0.3315910790466669e-02, 0.2839952763069384e+00, 0.2810497954194954e+00, 0.3591797791316158e-02, 0.5406547261581845e-02, 0.1850782640486854e+00, 0.1886700618399915e+00, 0.1908115364037247e-27, 0.1277249852674422e-13}, + tauOut: 9.2539132024336307e-002, + ttypeOut: -9, + gOut: 0.0000000000000000, + }, + { + z: []float64{0.1565539887559988e+01, 0.1473000755535651e+01, 0.5964194309842799e-17, 0.4969486576955697e-17, 0.1227333836256746e+01, 0.1147828295579553e+01, 0.1303359134714337e-01, 0.1058871408116432e-01, 0.9325154757768211e+00, 0.8945418861817434e+00, 0.6515425651042277e-01, 0.9926701980086396e-01, 0.1362896484193633e+01, 0.1199676824668014e+01, 0.2858649229958118e-01, 0.2407310226126826e-01, 0.1010265358129644e+01, 0.9640781716708908e+00, 0.7042504782685149e-01, 0.9148932748822959e-01, 0.1252435976885798e+01, 0.1098460700348104e+01, 0.3005318297487194e-01, 0.7583331677723075e-02, 0.2771750277715515e+00, 0.1770527776620367e+00, 0.2135925446109612e-06, 0.4568210735249311e-06, 0.3786716437573254e+00, 0.2885931212860515e+00, 0.2461066374135985e-02, 0.2421856840585221e-02, 0.2839952763069384e+00, 0.1926260852333330e+00, 0.3591797791316158e-02, 0.3451057520197492e-02, 0.1850782640486854e+00, 0.8908807450415157e-01, 0.1908115364037247e-27, 0.4969486576955697e-17, 0.9154376259418607e+00}, + i0: 1, + n0: 10, + pp: 1, + n0in: 10, + dmin: 8.9088074504151571e-002, + dmin1: 0.17705256406949207, + dmin2: 0.17705256406949207, + dn: 8.9088074504151571e-002, + dn1: 0.18903428744201686, + dn2: 0.28613205491191551, + tau: 9.2539132024336307e-002, + ttype: -9, + g: 0.0000000000000000, + zOut: []float64{0.1565539887559988e+01, 0.1473000755535651e+01, 0.5964194309842799e-17, 0.4969486576955697e-17, 0.1227333836256746e+01, 0.1147828295579553e+01, 0.1303359134714337e-01, 0.1058871408116432e-01, 0.9325154757768211e+00, 0.8945418861817434e+00, 0.6515425651042277e-01, 0.9926701980086396e-01, 0.1362896484193633e+01, 0.1199676824668014e+01, 0.2858649229958118e-01, 0.2407310226126826e-01, 0.1010265358129644e+01, 0.9640781716708908e+00, 0.7042504782685149e-01, 0.9148932748822959e-01, 0.1252435976885798e+01, 0.1098460700348104e+01, 0.3005318297487194e-01, 0.7583331677723075e-02, 0.2771750277715515e+00, 0.1770527776620367e+00, 0.2135925446109612e-06, 0.4568210735249311e-06, 0.3786716437573254e+00, 0.2885931212860515e+00, 0.2461066374135985e-02, 0.2421856840585221e-02, 0.2839952763069384e+00, 0.1926260852333330e+00, 0.3591797791316158e-02, 0.3451057520197492e-02, 0.1850782640486854e+00, 0.8908807450415157e-01, 0.1908115364037247e-27, 0.4969486576955697e-17, 0.9154376259418607e+00}, + tauOut: 7.5387921832037685e-002, + ttypeOut: -4, + gOut: 0.0000000000000000, + }, + { + z: []float64{0.1397612833703614e+01, 0.1473000755535651e+01, 0.4081328655531061e-17, 0.4969486576955697e-17, 0.1083029087828680e+01, 0.1147828295579553e+01, 0.8745885380967966e-02, 0.1058871408116432e-01, 0.9096750987696016e+00, 0.8945418861817434e+00, 0.1309130515609722e+00, 0.9926701980086396e-01, 0.1017448953536272e+01, 0.1199676824668014e+01, 0.2281033592282574e-01, 0.2407310226126826e-01, 0.9573692414042568e+00, 0.9640781716708908e+00, 0.1049724875218357e+00, 0.9148932748822959e-01, 0.9256836226719531e+00, 0.1098460700348104e+01, 0.1450441494900679e-02, 0.7583331677723075e-02, 0.1002148711561719e+00, 0.1770527776620367e+00, 0.1315527505616969e-05, 0.4568210735249311e-06, 0.2156257407670935e+00, 0.2885931212860515e+00, 0.2163530200698068e-02, 0.2421856840585221e-02, 0.1185256907207948e+00, 0.1926260852333330e+00, 0.2593936112987583e-02, 0.3451057520197492e-02, 0.1110621655912630e-01, 0.8908807450415157e-01, 0.4081328655531061e-17, 0.4969486576955697e-17}, + i0: 1, + n0: 10, + pp: 0, + n0in: 10, + dmin: 1.1106216559126303e-002, + dmin1: 0.10021441433509834, + dmin2: 0.10021441433509834, + dn: 1.1106216559126303e-002, + dn1: 0.11507463320059727, + dn2: 0.21320388392650824, + tau: 7.5387921832037685e-002, + ttype: -4, + g: 0.0000000000000000, + zOut: []float64{0.1397612833703614e+01, 0.1473000755535651e+01, 0.4081328655531061e-17, 0.4969486576955697e-17, 0.1083029087828680e+01, 0.1147828295579553e+01, 0.8745885380967966e-02, 0.1058871408116432e-01, 0.9096750987696016e+00, 0.8945418861817434e+00, 0.1309130515609722e+00, 0.9926701980086396e-01, 0.1017448953536272e+01, 0.1199676824668014e+01, 0.2281033592282574e-01, 0.2407310226126826e-01, 0.9573692414042568e+00, 0.9640781716708908e+00, 0.1049724875218357e+00, 0.9148932748822959e-01, 0.9256836226719531e+00, 0.1098460700348104e+01, 0.1450441494900679e-02, 0.7583331677723075e-02, 0.1002148711561719e+00, 0.1770527776620367e+00, 0.1315527505616969e-05, 0.4568210735249311e-06, 0.2156257407670935e+00, 0.2885931212860515e+00, 0.2163530200698068e-02, 0.2421856840585221e-02, 0.1185256907207948e+00, 0.1926260852333330e+00, 0.2593936112987583e-02, 0.3451057520197492e-02, 0.1110621655912630e-01, 0.8908807450415157e-01, 0.4081328655531061e-17, 0.4969486576955697e-17}, + tauOut: 9.2006637361187298e-003, + ttypeOut: -4, + gOut: 0.0000000000000000, + }, + { + z: []float64{0.1397612833703614e+01, 0.1388412169967495e+01, 0.4081328655531061e-17, 0.3183635051997810e-17, 0.1083029087828680e+01, 0.1082574309473529e+01, 0.8745885380967966e-02, 0.7349069784991225e-02, 0.9096750987696016e+00, 0.1024038416809464e+01, 0.1309130515609722e+00, 0.1300706547025319e+00, 0.1017448953536272e+01, 0.9009879710204475e+00, 0.2281033592282574e-01, 0.2423774201322422e-01, 0.9573692414042568e+00, 0.1028903323176749e+01, 0.1049724875218357e+00, 0.9444163542020824e-01, 0.9256836226719531e+00, 0.8234917650105269e+00, 0.1450441494900679e-02, 0.1765115496075164e-03, 0.1002148711561719e+00, 0.9083901139795122e-01, 0.1315527505616969e-05, 0.3122684720284652e-05, 0.2156257407670935e+00, 0.2085854845469525e+00, 0.2163530200698068e-02, 0.1229394806594584e-02, 0.1185256907207948e+00, 0.1106895682910690e+00, 0.2593936112987583e-02, 0.2602667681892373e-03, 0.1110621655912630e-01, 0.1645286054818337e-02, 0.4081328655531061e-17, 0.3183635051997810e-17, 0.9154376259418607e+00}, + i0: 1, + n0: 10, + pp: 1, + n0in: 10, + dmin: 1.6452860548183366e-003, + dmin1: 9.0837695870445614e-002, + dmin2: 9.0837695870445614e-002, + dn: 1.6452860548183366e-003, + dn1: 0.10809563217808144, + dn2: 0.20642195434625446, + tau: 9.2006637361187298e-003, + ttype: -4, + g: 0.0000000000000000, + zOut: []float64{0.1397612833703614e+01, 0.1388412169967495e+01, 0.4081328655531061e-17, 0.3183635051997810e-17, 0.1083029087828680e+01, 0.1082574309473529e+01, 0.8745885380967966e-02, 0.7349069784991225e-02, 0.9096750987696016e+00, 0.1024038416809464e+01, 0.1309130515609722e+00, 0.1300706547025319e+00, 0.1017448953536272e+01, 0.9009879710204475e+00, 0.2281033592282574e-01, 0.2423774201322422e-01, 0.9573692414042568e+00, 0.1028903323176749e+01, 0.1049724875218357e+00, 0.9444163542020824e-01, 0.9256836226719531e+00, 0.8234917650105269e+00, 0.1450441494900679e-02, 0.1765115496075164e-03, 0.1002148711561719e+00, 0.9083901139795122e-01, 0.1315527505616969e-05, 0.3122684720284652e-05, 0.2156257407670935e+00, 0.2085854845469525e+00, 0.2163530200698068e-02, 0.1229394806594584e-02, 0.1185256907207948e+00, 0.1106895682910690e+00, 0.2593936112987583e-02, 0.2602667681892373e-03, 0.1110621655912630e-01, 0.1645286054818337e-02, 0.4081328655531061e-17, 0.3183635051997810e-17, 0.9154376259418607e+00}, + tauOut: 1.5594219393746818e-003, + ttypeOut: -4, + gOut: 0.0000000000000000, + }, + { + z: []float64{0.1386852748028120e+01, 0.1388412169967495e+01, 0.2485138759635906e-17, 0.3183635051997810e-17, 0.1088363957319145e+01, 0.1082574309473529e+01, 0.6914717946174950e-02, 0.7349069784991225e-02, 0.1145634931626446e+01, 0.1024038416809464e+01, 0.1022944500333619e+00, 0.1300706547025319e+00, 0.8213718410609353e+00, 0.9009879710204475e+00, 0.3036175828902931e-01, 0.2423774201322422e-01, 0.1091423778368554e+01, 0.1028903323176749e+01, 0.7125729765473905e-01, 0.9444163542020824e-01, 0.7508515569660207e+00, 0.8234917650105269e+00, 0.2135460001102832e-04, 0.1765115496075164e-03, 0.8926135754328580e-01, 0.9083901139795122e-01, 0.7297073710223142e-05, 0.3122684720284652e-05, 0.2082481603404622e+00, 0.2085854845469525e+00, 0.6534568189162364e-03, 0.1229394806594584e-02, 0.1087369563009673e+00, 0.1106895682910690e+00, 0.3938065757966957e-05, 0.2602667681892373e-03, 0.8192604968568760e-04, 0.1645286054818337e-02, 0.2485138759635906e-17, 0.3183635051997810e-17}, + i0: 1, + n0: 10, + pp: 0, + n0in: 10, + dmin: 8.1926049685687600e-005, + dmin1: 8.9258234858565516e-002, + dmin2: 8.9258234858565516e-002, + dn: 8.1926049685687600e-005, + dn1: 0.10847668953277810, + dn2: 0.20701876553386761, + tau: 1.5594219393746818e-003, + ttype: -4, + g: 0.0000000000000000, + zOut: []float64{0.1386852748028120e+01, 0.1388412169967495e+01, 0.2485138759635906e-17, 0.3183635051997810e-17, 0.1088363957319145e+01, 0.1082574309473529e+01, 0.6914717946174950e-02, 0.7349069784991225e-02, 0.1145634931626446e+01, 0.1024038416809464e+01, 0.1022944500333619e+00, 0.1300706547025319e+00, 0.8213718410609353e+00, 0.9009879710204475e+00, 0.3036175828902931e-01, 0.2423774201322422e-01, 0.1091423778368554e+01, 0.1028903323176749e+01, 0.7125729765473905e-01, 0.9444163542020824e-01, 0.7508515569660207e+00, 0.8234917650105269e+00, 0.2135460001102832e-04, 0.1765115496075164e-03, 0.8926135754328580e-01, 0.9083901139795122e-01, 0.7297073710223142e-05, 0.3122684720284652e-05, 0.2082481603404622e+00, 0.2085854845469525e+00, 0.6534568189162364e-03, 0.1229394806594584e-02, 0.1087369563009673e+00, 0.1106895682910690e+00, 0.3938065757966957e-05, 0.2602667681892373e-03, 0.8192604968568760e-04, 0.1645286054818337e-02, 0.2485138759635906e-17, 0.3183635051997810e-17}, + tauOut: 8.1416944590412474e-005, + ttypeOut: -4, + gOut: 0.0000000000000000, + }, + { + z: []float64{0.1386852748028120e+01, 0.1386771331083530e+01, 0.2485138759635906e-17, 0.1950383162890474e-17, 0.1088363957319145e+01, 0.1095197258320730e+01, 0.6914717946174950e-02, 0.7233164949324962e-02, 0.1145634931626446e+01, 0.1240614799765893e+01, 0.1022944500333619e+00, 0.6772592167212049e-01, 0.8213718410609353e+00, 0.7839262607332537e+00, 0.3036175828902931e-01, 0.4227125255215914e-01, 0.1091423778368554e+01, 0.1120328406526543e+01, 0.7125729765473905e-01, 0.4775711530437247e-01, 0.7508515569660207e+00, 0.7030343793170689e+00, 0.2135460001102832e-04, 0.2711304941630149e-05, 0.8926135754328580e-01, 0.8918452636746399e-01, 0.7297073710223142e-05, 0.1703885458517271e-04, 0.2082481603404622e+00, 0.2088031613602029e+00, 0.6534568189162364e-03, 0.3402961195615630e-03, 0.1087369563009673e+00, 0.1083191813025733e+00, 0.3938065757966957e-05, 0.2978513750500819e-08, 0.8192604968568760e-04, 0.5061265815246250e-06, 0.2485138759635906e-17, 0.1950383162890474e-17, 0.9154376259418607e+00}, + i0: 1, + n0: 10, + pp: 1, + n0in: 10, + dmin: 5.0612658152462498e-007, + dmin1: 8.9177229293753768e-002, + dmin2: 8.9177229293753768e-002, + dn: 5.0612658152462498e-007, + dn1: 0.10831524323681536, + dn2: 0.20814970454128662, + tau: 8.1416944590412474e-005, + ttype: -4, + g: 0.0000000000000000, + zOut: []float64{0.1386852748028120e+01, 0.1386771331083530e+01, 0.2485138759635906e-17, 0.1950383162890474e-17, 0.1088363957319145e+01, 0.1095197258320730e+01, 0.6914717946174950e-02, 0.7233164949324962e-02, 0.1145634931626446e+01, 0.1240614799765893e+01, 0.1022944500333619e+00, 0.6772592167212049e-01, 0.8213718410609353e+00, 0.7839262607332537e+00, 0.3036175828902931e-01, 0.4227125255215914e-01, 0.1091423778368554e+01, 0.1120328406526543e+01, 0.7125729765473905e-01, 0.4775711530437247e-01, 0.7508515569660207e+00, 0.7030343793170689e+00, 0.2135460001102832e-04, 0.2711304941630149e-05, 0.8926135754328580e-01, 0.8918452636746399e-01, 0.7297073710223142e-05, 0.1703885458517271e-04, 0.2082481603404622e+00, 0.2088031613602029e+00, 0.6534568189162364e-03, 0.3402961195615630e-03, 0.1087369563009673e+00, 0.1083191813025733e+00, 0.3938065757966957e-05, 0.2978513750500819e-08, 0.8192604968568760e-04, 0.5061265815246250e-06, 0.2485138759635906e-17, 0.1950383162890474e-17, 0.9154376259418607e+00}, + tauOut: 5.0604049633765406e-007, + ttypeOut: -4, + gOut: 0.0000000000000000, + }, + { + z: []float64{0.1386770825043033e+01, 0.1386771331083530e+01, 0.1540308069724697e-17, 0.1950383162890474e-17, 0.1102429917229558e+01, 0.1095197258320730e+01, 0.8139811288713328e-02, 0.7233164949324962e-02, 0.1300200404108803e+01, 0.1240614799765893e+01, 0.4083380405309871e-01, 0.6772592167212049e-01, 0.7853632031918177e+00, 0.7839262607332537e+00, 0.6030036143936175e-01, 0.4227125255215914e-01, 0.1107784654351057e+01, 0.1120328406526543e+01, 0.3030814137396716e-01, 0.4775711530437247e-01, 0.6727284432075471e+00, 0.7030343793170689e+00, 0.3594413905024206e-06, 0.2711304941630149e-05, 0.8920069974016231e-01, 0.8918452636746399e-01, 0.3988496406087022e-04, 0.1703885458517271e-04, 0.2091030664752072e+00, 0.2088031613602029e+00, 0.1762795624794033e-03, 0.3402961195615630e-03, 0.1081423986781113e+00, 0.1083191813025733e+00, 0.1393999949133917e-13, 0.2978513750500819e-08, 0.8607124697139263e-10, 0.5061265815246250e-06, 0.1540308069724697e-17, 0.1950383162890474e-17}, + i0: 1, + n0: 10, + pp: 0, + n0in: 10, + dmin: 8.6071246971392626e-011, + dmin1: 8.9183660885577137e-002, + dmin2: 8.9183660885577137e-002, + dn: 8.6071246971392626e-011, + dn1: 0.10814239569959758, + dn2: 0.20876277035564564, + tau: 5.0604049633765406e-007, + ttype: -4, + g: 0.0000000000000000, + zOut: []float64{0.1386770825043033e+01, 0.1386771331083530e+01, 0.1540308069724697e-17, 0.1950383162890474e-17, 0.1102429917229558e+01, 0.1095197258320730e+01, 0.8139811288713328e-02, 0.7233164949324962e-02, 0.1300200404108803e+01, 0.1240614799765893e+01, 0.4083380405309871e-01, 0.6772592167212049e-01, 0.7853632031918177e+00, 0.7839262607332537e+00, 0.6030036143936175e-01, 0.4227125255215914e-01, 0.1107784654351057e+01, 0.1120328406526543e+01, 0.3030814137396716e-01, 0.4775711530437247e-01, 0.6727284432075471e+00, 0.7030343793170689e+00, 0.3594413905024206e-06, 0.2711304941630149e-05, 0.8920069974016231e-01, 0.8918452636746399e-01, 0.3988496406087022e-04, 0.1703885458517271e-04, 0.2091030664752072e+00, 0.2088031613602029e+00, 0.1762795624794033e-03, 0.3402961195615630e-03, 0.1081423986781113e+00, 0.1083191813025733e+00, 0.1393999949133917e-13, 0.2978513750500819e-08, 0.8607124697139263e-10, 0.5061265815246250e-06, 0.1540308069724697e-17, 0.1950383162890474e-17}, + tauOut: 8.6071215292546838e-011, + ttypeOut: -4, + gOut: 0.0000000000000000, + }, + { + z: []float64{0.1386770825043033e+01, 0.1386770824956962e+01, 0.1540308069724697e-17, 0.1224486171222500e-17, 0.1102429917229558e+01, 0.1110569728432201e+01, 0.8139811288713328e-02, 0.9529690622753703e-02, 0.1300200404108803e+01, 0.1331504517453077e+01, 0.4083380405309871e-01, 0.2408506071837550e-01, 0.7853632031918177e+00, 0.8215785038267327e+00, 0.6030036143936175e-01, 0.8130667336500198e-01, 0.1107784654351057e+01, 0.1056786122273951e+01, 0.3030814137396716e-01, 0.1929354325655847e-01, 0.6727284432075471e+00, 0.6534352593063080e+00, 0.3594413905024206e-06, 0.4906748310831983e-07, 0.8920069974016231e-01, 0.8924053555066887e-01, 0.3988496406087022e-04, 0.9345605379795243e-04, 0.2091030664752072e+00, 0.2091858898978174e+00, 0.1762795624794033e-03, 0.9113088236382798e-04, 0.1081423986781113e+00, 0.1080512677096902e+00, 0.1393999949133917e-13, 0.1110429488179469e-22, 0.8607124697139263e-10, 0.3167883469916549e-16, 0.1540308069724697e-17, 0.1224486171222500e-17, 0.9154376259418607e+00}, + i0: 1, + n0: 10, + pp: 1, + n0in: 10, + dmin: 3.1678834699165494e-017, + dmin1: 8.9200650586607991e-002, + dmin2: 8.9200650586607991e-002, + dn: 3.1678834699165494e-017, + dn1: 0.10805126770967630, + dn2: 0.20900961033533805, + tau: 8.6071215292546838e-011, + ttype: -4, + g: 0.0000000000000000, + zOut: []float64{0.1386770825043033e+01, 0.1386770824956962e+01, 0.1540308069724697e-17, 0.1224486171222500e-17, 0.1102429917229558e+01, 0.1110569728432201e+01, 0.8139811288713328e-02, 0.9529690622753703e-02, 0.1300200404108803e+01, 0.1331504517453077e+01, 0.4083380405309871e-01, 0.2408506071837550e-01, 0.7853632031918177e+00, 0.8215785038267327e+00, 0.6030036143936175e-01, 0.8130667336500198e-01, 0.1107784654351057e+01, 0.1056786122273951e+01, 0.3030814137396716e-01, 0.1929354325655847e-01, 0.6727284432075471e+00, 0.6534352593063080e+00, 0.3594413905024206e-06, 0.4906748310831983e-07, 0.8920069974016231e-01, 0.8924053555066887e-01, 0.3988496406087022e-04, 0.9345605379795243e-04, 0.2091030664752072e+00, 0.2091858898978174e+00, 0.1762795624794033e-03, 0.9113088236382798e-04, 0.1081423986781113e+00, 0.1080512677096902e+00, 0.1393999949133917e-13, 0.1110429488179469e-22, 0.8607124697139263e-10, 0.3167883469916549e-16, 0.1540308069724697e-17, 0.1224486171222500e-17, 0.9154376259418607e+00}, + tauOut: 3.1678834698836348e-017, + ttypeOut: -4, + gOut: 0.0000000000000000, + }, + { + z: []float64{0.1386770824956962e+01, 0.1386770824956962e+01, 0.9806070694382835e-18, 0.1224486171222500e-17, 0.1120099419054954e+01, 0.1110569728432201e+01, 0.1132830345080667e-01, 0.9529690622753703e-02, 0.1344261274720646e+01, 0.1331504517453077e+01, 0.1472018016266302e-01, 0.2408506071837550e-01, 0.8881649970290716e+00, 0.8215785038267327e+00, 0.9674301998819117e-01, 0.8130667336500198e-01, 0.9793366455423188e+00, 0.1056786122273951e+01, 0.1287308250760436e-01, 0.1929354325655847e-01, 0.6405622258661866e+00, 0.6534352593063080e+00, 0.6835883063177366e-08, 0.4906748310831983e-07, 0.8933398476858376e-01, 0.8924053555066887e-01, 0.2188381927740679e-03, 0.9345605379795243e-04, 0.2090581825874072e+00, 0.2091858898978174e+00, 0.4710079866305792e-04, 0.9113088236382798e-04, 0.1080041669110272e+00, 0.1080512677096902e+00, 0.3257014354834561e-38, 0.1110429488179469e-22}, + i0: 1, + n0: 9, + pp: 0, + n0in: 10, + dmin: 3.1678834699165494e-017, + dmin1: 8.9240528714785800e-002, + dmin2: 8.9240528714785800e-002, + dn: 3.1678834699165494e-017, + dn1: 0.10800416691102718, + dn2: 0.20896705170504334, + tau: 0.0000000000000000, + ttype: -4, + g: 0.0000000000000000, + zOut: []float64{0.1386770824956962e+01, 0.1386770824956962e+01, 0.9806070694382835e-18, 0.1224486171222500e-17, 0.1120099419054954e+01, 0.1110569728432201e+01, 0.1132830345080667e-01, 0.9529690622753703e-02, 0.1344261274720646e+01, 0.1331504517453077e+01, 0.1472018016266302e-01, 0.2408506071837550e-01, 0.8881649970290716e+00, 0.8215785038267327e+00, 0.9674301998819117e-01, 0.8130667336500198e-01, 0.9793366455423188e+00, 0.1056786122273951e+01, 0.1287308250760436e-01, 0.1929354325655847e-01, 0.6405622258661866e+00, 0.6534352593063080e+00, 0.6835883063177366e-08, 0.4906748310831983e-07, 0.8933398476858376e-01, 0.8924053555066887e-01, 0.2188381927740679e-03, 0.9345605379795243e-04, 0.2090581825874072e+00, 0.2091858898978174e+00, 0.4710079866305792e-04, 0.9113088236382798e-04, 0.1080041669110272e+00, 0.1080512677096902e+00, 0.3257014354834561e-38, 0.1110429488179469e-22}, + tauOut: 2.2310132178696450e-002, + ttypeOut: -9, + gOut: 0.0000000000000000, + }, + { + z: []float64{0.1386770824956962e+01, 0.1364460692778266e+01, 0.9806070694382835e-18, 0.8049901434408684e-18, 0.1120099419054954e+01, 0.1109117590327065e+01, 0.1132830345080667e-01, 0.1373001363427395e-01, 0.1344261274720646e+01, 0.1322941309070339e+01, 0.1472018016266302e-01, 0.9882485852396858e-02, 0.8881649970290716e+00, 0.9527153989861693e+00, 0.9674301998819117e-01, 0.9944626147083412e-01, 0.9793366455423188e+00, 0.8704533344003925e+00, 0.1287308250760436e-01, 0.9473236598617136e-02, 0.6405622258661866e+00, 0.6087788639247561e+00, 0.6835883063177366e-08, 0.1003117403762534e-08, 0.8933398476858376e-01, 0.6724268977954398e-01, 0.2188381927740679e-03, 0.6803700894781421e-03, 0.2090581825874072e+00, 0.1861147811178956e+00, 0.4710079866305792e-04, 0.2733303872960605e-04, 0.1080041669110272e+00, 0.8566670169360113e-01, 0.3257014354834561e-38, 0.8049901434408684e-18, 0.1094206688544886e+01}, + i0: 1, + n0: 9, + pp: 1, + n0in: 9, + dmin: 6.7023851586769906e-002, + dmin1: 6.7023851586769906e-002, + dmin2: 6.7023851586769906e-002, + dn: 8.5666701693601133e-002, + dn1: 0.18606768031923254, + dn2: 6.7023851586769906e-002, + tau: 2.2310132178696450e-002, + ttype: -9, + g: 0.0000000000000000, + zOut: []float64{0.1386770824956962e+01, 0.1364460692778266e+01, 0.9806070694382835e-18, 0.8049901434408684e-18, 0.1120099419054954e+01, 0.1109117590327065e+01, 0.1132830345080667e-01, 0.1373001363427395e-01, 0.1344261274720646e+01, 0.1322941309070339e+01, 0.1472018016266302e-01, 0.9882485852396858e-02, 0.8881649970290716e+00, 0.9527153989861693e+00, 0.9674301998819117e-01, 0.9944626147083412e-01, 0.9793366455423188e+00, 0.8704533344003925e+00, 0.1287308250760436e-01, 0.9473236598617136e-02, 0.6405622258661866e+00, 0.6087788639247561e+00, 0.6835883063177366e-08, 0.1003117403762534e-08, 0.8933398476858376e-01, 0.6724268977954398e-01, 0.2188381927740679e-03, 0.6803700894781421e-03, 0.2090581825874072e+00, 0.1861147811178956e+00, 0.4710079866305792e-04, 0.2733303872960605e-04, 0.1080041669110272e+00, 0.8566670169360113e-01, 0.3257014354834561e-38, 0.8049901434408684e-18, 0.1094206688544886e+01}, + tauOut: 6.4730147312741043e-002, + ttypeOut: -5, + gOut: 0.0000000000000000, + }, + { + z: []float64{0.1299730545465525e+01, 0.1364460692778266e+01, 0.6869337119490330e-18, 0.8049901434408684e-18, 0.1058117456648598e+01, 0.1109117590327065e+01, 0.1716633828952343e-01, 0.1373001363427395e-01, 0.1250927309320471e+01, 0.1322941309070339e+01, 0.7526573591998700e-02, 0.9882485852396858e-02, 0.9799049395522637e+00, 0.9527153989861693e+00, 0.8833849733474472e-01, 0.9944626147083412e-01, 0.7268579263515238e+00, 0.8704533344003925e+00, 0.7934296380510910e-02, 0.9473236598617136e-02, 0.5361144212346215e+00, 0.6087788639247561e+00, 0.1258170079408221e-09, 0.1003117403762534e-08, 0.3192912430464083e-02, 0.6724268977954398e-01, 0.3965875451961033e-01, 0.6803700894781421e-03, 0.8175321232427381e-01, 0.1861147811178956e+00, 0.2864145895504550e-04, 0.2733303872960605e-04, 0.2090791292190505e-01, 0.8566670169360113e-01, 0.6869337119490330e-18, 0.8049901434408684e-18}, + i0: 1, + n0: 9, + pp: 0, + n0in: 9, + dmin: 2.5125423409859404e-003, + dmin1: 2.5125423409859404e-003, + dmin2: 2.5125423409859404e-003, + dn: 2.0907912921905053e-002, + dn1: 8.1725879285544201e-002, + dn2: 2.5125423409859404e-003, + tau: 6.4730147312741043e-002, + ttype: -5, + g: 0.0000000000000000, + zOut: []float64{0.1299730545465525e+01, 0.1364460692778266e+01, 0.6869337119490330e-18, 0.8049901434408684e-18, 0.1058117456648598e+01, 0.1109117590327065e+01, 0.1716633828952343e-01, 0.1373001363427395e-01, 0.1250927309320471e+01, 0.1322941309070339e+01, 0.7526573591998700e-02, 0.9882485852396858e-02, 0.9799049395522637e+00, 0.9527153989861693e+00, 0.8833849733474472e-01, 0.9944626147083412e-01, 0.7268579263515238e+00, 0.8704533344003925e+00, 0.7934296380510910e-02, 0.9473236598617136e-02, 0.5361144212346215e+00, 0.6087788639247561e+00, 0.1258170079408221e-09, 0.1003117403762534e-08, 0.3192912430464083e-02, 0.6724268977954398e-01, 0.3965875451961033e-01, 0.6803700894781421e-03, 0.8175321232427381e-01, 0.1861147811178956e+00, 0.2864145895504550e-04, 0.2733303872960605e-04, 0.2090791292190505e-01, 0.8566670169360113e-01, 0.6869337119490330e-18, 0.8049901434408684e-18}, + tauOut: 2.3478378904869292e-003, + ttypeOut: -5, + gOut: 0.0000000000000000, + }, + { + z: []float64{0.1299730545465525e+01, 0.1299143585992903e+01, 0.6869337119490330e-18, 0.5594890049187079e-18, 0.1058117456648598e+01, 0.1074696835465499e+01, 0.1716633828952343e-01, 0.1998130138542489e-01, 0.1250927309320471e+01, 0.1237885622054423e+01, 0.7526573591998700e-02, 0.5958003315736791e-02, 0.9799049395522637e+00, 0.1061698474098650e+01, 0.8833849733474472e-01, 0.6047812873071532e-01, 0.7268579263515238e+00, 0.6737271345286977e+00, 0.7934296380510910e-02, 0.6313669873066943e-02, 0.5361144212346215e+00, 0.5292137920147498e+00, 0.1258170079408221e-09, 0.7590933847144573e-12, 0.3192912430464083e-02, 0.4226470747669359e-01, 0.3965875451961033e-01, 0.7671248122433710e-01, 0.8175321232427381e-01, 0.4482413086270017e-02, 0.2864145895504550e-04, 0.1335961497218277e-03, 0.2090791292190505e-01, 0.2018735729956149e-01, 0.6869337119490330e-18, 0.5594890049187079e-18, 0.1094206688544886e+01}, + i0: 1, + n0: 9, + pp: 1, + n0in: 9, + dmin: 2.6059529570832572e-003, + dmin1: 2.6059529570832572e-003, + dmin2: 2.6059529570832572e-003, + dn: 2.0187357299561493e-002, + dn1: 4.4537716273149721e-003, + dn2: 2.6059529570832572e-003, + tau: 5.8695947262173229e-004, + ttype: -17, + g: 0.0000000000000000, + zOut: []float64{0.1299730545465525e+01, 0.1299143585992903e+01, 0.6869337119490330e-18, 0.5594890049187079e-18, 0.1058117456648598e+01, 0.1074696835465499e+01, 0.1716633828952343e-01, 0.1998130138542489e-01, 0.1250927309320471e+01, 0.1237885622054423e+01, 0.7526573591998700e-02, 0.5958003315736791e-02, 0.9799049395522637e+00, 0.1061698474098650e+01, 0.8833849733474472e-01, 0.6047812873071532e-01, 0.7268579263515238e+00, 0.6737271345286977e+00, 0.7934296380510910e-02, 0.6313669873066943e-02, 0.5361144212346215e+00, 0.5292137920147498e+00, 0.1258170079408221e-09, 0.7590933847144573e-12, 0.3192912430464083e-02, 0.4226470747669359e-01, 0.3965875451961033e-01, 0.7671248122433710e-01, 0.8175321232427381e-01, 0.4482413086270017e-02, 0.2864145895504550e-04, 0.1335961497218277e-03, 0.2090791292190505e-01, 0.2018735729956149e-01, 0.6869337119490330e-18, 0.5594890049187079e-18, 0.1094206688544886e+01}, + tauOut: 4.9324499328963489e-004, + ttypeOut: -5, + gOut: 0.0000000000000000, + }, + { + z: []float64{0.1298650340999613e+01, 0.1299143585992903e+01, 0.4630045856693428e-18, 0.5594890049187079e-18, 0.1094184891857634e+01, 0.1074696835465499e+01, 0.2260547177996662e-01, 0.1998130138542489e-01, 0.1220744908596903e+01, 0.1237885622054423e+01, 0.5181756634367578e-02, 0.5958003315736791e-02, 0.1116501601201708e+01, 0.1061698474098650e+01, 0.3649413160495897e-01, 0.6047812873071532e-01, 0.6430534278035160e+00, 0.6737271345286977e+00, 0.5195962000339362e-02, 0.6313669873066943e-02, 0.5235245850218799e+00, 0.5292137920147498e+00, 0.6128243213469890e-13, 0.7590933847144573e-12, 0.1184839437076798e+00, 0.4226470747669359e-01, 0.2902140315050332e-02, 0.7671248122433710e-01, 0.1220623927651878e-02, 0.4482413086270017e-02, 0.2209487416380896e-02, 0.1335961497218277e-03, 0.1748462488989096e-01, 0.2018735729956149e-01, 0.4630045856693428e-18, 0.5594890049187079e-18}, + i0: 1, + n0: 9, + pp: 0, + n0in: 9, + dmin: 1.0870277779300500e-003, + dmin1: 1.0870277779300500e-003, + dmin2: 4.1771462483342674e-002, + dn: 1.7484624889890960e-002, + dn1: 1.0870277779300500e-003, + dn2: 4.1771462483342674e-002, + tau: 4.9324499328963489e-004, + ttype: -5, + g: 0.0000000000000000, + zOut: []float64{0.1298650340999613e+01, 0.1299143585992903e+01, 0.4630045856693428e-18, 0.5594890049187079e-18, 0.1094184891857634e+01, 0.1074696835465499e+01, 0.2260547177996662e-01, 0.1998130138542489e-01, 0.1220744908596903e+01, 0.1237885622054423e+01, 0.5181756634367578e-02, 0.5958003315736791e-02, 0.1116501601201708e+01, 0.1061698474098650e+01, 0.3649413160495897e-01, 0.6047812873071532e-01, 0.6430534278035160e+00, 0.6737271345286977e+00, 0.5195962000339362e-02, 0.6313669873066943e-02, 0.5235245850218799e+00, 0.5292137920147498e+00, 0.6128243213469890e-13, 0.7590933847144573e-12, 0.1184839437076798e+00, 0.4226470747669359e-01, 0.2902140315050332e-02, 0.7671248122433710e-01, 0.1220623927651878e-02, 0.4482413086270017e-02, 0.2209487416380896e-02, 0.1335961497218277e-03, 0.1748462488989096e-01, 0.2018735729956149e-01, 0.4630045856693428e-18, 0.5594890049187079e-18}, + tauOut: 8.6238530354903250e-004, + ttypeOut: -4, + gOut: 0.0000000000000000, + }, + { + z: []float64{0.1298650340999613e+01, 0.1297787955696064e+01, 0.4630045856693428e-18, 0.3903662538064461e-18, 0.1094184891857634e+01, 0.1115927978334052e+01, 0.2260547177996662e-01, 0.2472875948770642e-01, 0.1220744908596903e+01, 0.1200335520440015e+01, 0.5181756634367578e-02, 0.4819852017032840e-02, 0.1116501601201708e+01, 0.1147313495486085e+01, 0.3649413160495897e-01, 0.2045445862496274e-01, 0.6430534278035160e+00, 0.6269325458753436e+00, 0.5195962000339362e-02, 0.4338925882718484e-02, 0.5235245850218799e+00, 0.5183232738356738e+00, 0.6128243213469890e-13, 0.1400860159256393e-13, 0.1184839437076798e+00, 0.1205236987191671e+00, 0.2902140315050332e-02, 0.2939191169537379e-04, 0.1220623927651878e-02, 0.2538334128788367e-02, 0.2209487416380896e-02, 0.1521945367089819e-01, 0.1748462488989096e-01, 0.1402785915443734e-02, 0.4630045856693428e-18, 0.3903662538064461e-18, 0.1094206688544886e+01}, + i0: 1, + n0: 9, + pp: 1, + n0in: 9, + dmin: 3.2884671240747138e-004, + dmin1: 3.2884671240747138e-004, + dmin2: 0.11762155840411674, + dn: 1.4027859154437344e-003, + dn1: 3.2884671240747138e-004, + dn2: 0.11762155840411674, + tau: 8.6238530354903250e-004, + ttype: -4, + g: 0.0000000000000000, + zOut: []float64{0.1298650340999613e+01, 0.1297787955696064e+01, 0.4630045856693428e-18, 0.3903662538064461e-18, 0.1094184891857634e+01, 0.1115927978334052e+01, 0.2260547177996662e-01, 0.2472875948770642e-01, 0.1220744908596903e+01, 0.1200335520440015e+01, 0.5181756634367578e-02, 0.4819852017032840e-02, 0.1116501601201708e+01, 0.1147313495486085e+01, 0.3649413160495897e-01, 0.2045445862496274e-01, 0.6430534278035160e+00, 0.6269325458753436e+00, 0.5195962000339362e-02, 0.4338925882718484e-02, 0.5235245850218799e+00, 0.5183232738356738e+00, 0.6128243213469890e-13, 0.1400860159256393e-13, 0.1184839437076798e+00, 0.1205236987191671e+00, 0.2902140315050332e-02, 0.2939191169537379e-04, 0.1220623927651878e-02, 0.2538334128788367e-02, 0.2209487416380896e-02, 0.1521945367089819e-01, 0.1748462488989096e-01, 0.1402785915443734e-02, 0.4630045856693428e-18, 0.3903662538064461e-18, 0.1094206688544886e+01}, + tauOut: 1.8442717265434725e-004, + ttypeOut: -4, + gOut: 0.0000000000000000, + }, + { + z: []float64{0.1297603528523410e+01, 0.1297787955696064e+01, 0.3357116521683426e-18, 0.3903662538064461e-18, 0.1140472310649104e+01, 0.1115927978334052e+01, 0.2602676813137004e-01, 0.2472875948770642e-01, 0.1178944177153024e+01, 0.1200335520440015e+01, 0.4690536984322235e-02, 0.4819852017032840e-02, 0.1162892989954071e+01, 0.1147313495486085e+01, 0.1102729651913737e-01, 0.2045445862496274e-01, 0.6200597480662705e+00, 0.6269325458753436e+00, 0.3627015421456806e-02, 0.4338925882718484e-02, 0.5145118312415766e+00, 0.5183232738356738e+00, 0.3281495925457712e-14, 0.1400860159256393e-13, 0.1203686634582048e+00, 0.1205236987191671e+00, 0.6198165737098726e-06, 0.2939191169537379e-04, 0.1757274081045850e-01, 0.2538334128788367e-02, 0.1214929161054836e-02, 0.1521945367089819e-01, 0.3429581734551261e-05, 0.1402785915443734e-02, 0.3357116521683426e-18, 0.3903662538064461e-18}, + i0: 1, + n0: 9, + pp: 0, + n0in: 9, + dmin: 3.4295817345512611e-006, + dmin1: 2.3532871395603098e-003, + dmin2: 0.12033927154650945, + dn: 3.4295817345512611e-006, + dn1: 2.3532871395603098e-003, + dn2: 0.12033927154650945, + tau: 1.8442717265434725e-004, + ttype: -4, + g: 0.0000000000000000, + zOut: []float64{0.1297603528523410e+01, 0.1297787955696064e+01, 0.3357116521683426e-18, 0.3903662538064461e-18, 0.1140472310649104e+01, 0.1115927978334052e+01, 0.2602676813137004e-01, 0.2472875948770642e-01, 0.1178944177153024e+01, 0.1200335520440015e+01, 0.4690536984322235e-02, 0.4819852017032840e-02, 0.1162892989954071e+01, 0.1147313495486085e+01, 0.1102729651913737e-01, 0.2045445862496274e-01, 0.6200597480662705e+00, 0.6269325458753436e+00, 0.3627015421456806e-02, 0.4338925882718484e-02, 0.5145118312415766e+00, 0.5183232738356738e+00, 0.3281495925457712e-14, 0.1400860159256393e-13, 0.1203686634582048e+00, 0.1205236987191671e+00, 0.6198165737098726e-06, 0.2939191169537379e-04, 0.1757274081045850e-01, 0.2538334128788367e-02, 0.1214929161054836e-02, 0.1521945367089819e-01, 0.3429581734551261e-05, 0.1402785915443734e-02, 0.3357116521683426e-18, 0.3903662538064461e-18}, + tauOut: 3.2077610710809750e-006, + ttypeOut: -2, + gOut: 0.0000000000000000, + }, + { + z: []float64{0.1297603528523410e+01, 0.1297600320762339e+01, 0.3357116521683426e-18, 0.2950599175525191e-18, 0.1140472310649104e+01, 0.1166495871019403e+01, 0.2602676813137004e-01, 0.2630451380147252e-01, 0.1178944177153024e+01, 0.1157326992574803e+01, 0.4690536984322235e-02, 0.4713095445958055e-02, 0.1162892989954071e+01, 0.1169203983266179e+01, 0.1102729651913737e-01, 0.5848066547299594e-02, 0.6200597480662705e+00, 0.6178354891793567e+00, 0.3627015421456806e-02, 0.3020451850239120e-02, 0.5145118312415766e+00, 0.5114881716302697e+00, 0.3281495925457712e-14, 0.7722354114894537e-15, 0.1203686634582048e+00, 0.1203660755137067e+00, 0.6198165737098726e-06, 0.9048958315991394e-07, 0.1757274081045850e-01, 0.1878437172085910e-01, 0.1214929161054836e-02, 0.2218173128942313e-06, 0.3429581734551261e-05, 0.3350576054907334e-11, 0.3357116521683426e-18, 0.2950599175525191e-18, 0.1094206688544886e+01}, + i0: 1, + n0: 9, + pp: 1, + n0in: 9, + dmin: 3.3505760549073344e-012, + dmin1: 1.7569442559804263e-002, + dmin2: 0.12036545569713296, + dn: 3.3505760549073344e-012, + dn1: 1.7569442559804263e-002, + dn2: 0.12036545569713296, + tau: 3.2077610710809750e-006, + ttype: -2, + g: 0.0000000000000000, + zOut: []float64{0.1297603528523410e+01, 0.1297600320762339e+01, 0.3357116521683426e-18, 0.2950599175525191e-18, 0.1140472310649104e+01, 0.1166495871019403e+01, 0.2602676813137004e-01, 0.2630451380147252e-01, 0.1178944177153024e+01, 0.1157326992574803e+01, 0.4690536984322235e-02, 0.4713095445958055e-02, 0.1162892989954071e+01, 0.1169203983266179e+01, 0.1102729651913737e-01, 0.5848066547299594e-02, 0.6200597480662705e+00, 0.6178354891793567e+00, 0.3627015421456806e-02, 0.3020451850239120e-02, 0.5145118312415766e+00, 0.5114881716302697e+00, 0.3281495925457712e-14, 0.7722354114894537e-15, 0.1203686634582048e+00, 0.1203660755137067e+00, 0.6198165737098726e-06, 0.9048958315991394e-07, 0.1757274081045850e-01, 0.1878437172085910e-01, 0.1214929161054836e-02, 0.2218173128942313e-06, 0.3429581734551261e-05, 0.3350576054907334e-11, 0.3357116521683426e-18, 0.2950599175525191e-18, 0.1094206688544886e+01}, + tauOut: 3.3505364896797715e-012, + ttypeOut: -2, + gOut: 0.0000000000000000, + }, + { + z: []float64{0.1297600320758988e+01, 0.1297600320762339e+01, 0.2652482201353177e-18, 0.2950599175525191e-18, 0.1192800384817525e+01, 0.1166495871019403e+01, 0.2552222839336001e-01, 0.2630451380147252e-01, 0.1136517859624050e+01, 0.1157326992574803e+01, 0.4848643531876122e-02, 0.4713095445958055e-02, 0.1170203406278252e+01, 0.1169203983266179e+01, 0.3087619670750762e-02, 0.5848066547299594e-02, 0.6177683213554945e+00, 0.6178354891793567e+00, 0.2500816796475141e-02, 0.3020451850239120e-02, 0.5089873548304448e+00, 0.5114881716302697e+00, 0.1826193617023393e-15, 0.7722354114894537e-15, 0.1203661659999391e+00, 0.1203660755137067e+00, 0.1412182528886294e-07, 0.9048958315991394e-07, 0.1878457941299617e-01, 0.1878437172085910e-01, 0.3956520722700361e-16, 0.2218173128942313e-06, 0.2033579915109003e-22, 0.3350576054907334e-11, 0.2652482201353177e-18, 0.2950599175525191e-18}, + i0: 1, + n0: 9, + pp: 0, + n0in: 9, + dmin: 2.0335799151090034e-023, + dmin1: 1.8784357595683275e-002, + dmin2: 0.12036607551035594, + dn: 2.0335799151090034e-023, + dn1: 1.8784357595683275e-002, + dn2: 0.12036607551035594, + tau: 3.3505364896797715e-012, + ttype: -2, + g: 0.0000000000000000, + zOut: []float64{0.1297600320758988e+01, 0.1297600320762339e+01, 0.2652482201353177e-18, 0.2950599175525191e-18, 0.1192800384817525e+01, 0.1166495871019403e+01, 0.2552222839336001e-01, 0.2630451380147252e-01, 0.1136517859624050e+01, 0.1157326992574803e+01, 0.4848643531876122e-02, 0.4713095445958055e-02, 0.1170203406278252e+01, 0.1169203983266179e+01, 0.3087619670750762e-02, 0.5848066547299594e-02, 0.6177683213554945e+00, 0.6178354891793567e+00, 0.2500816796475141e-02, 0.3020451850239120e-02, 0.5089873548304448e+00, 0.5114881716302697e+00, 0.1826193617023393e-15, 0.7722354114894537e-15, 0.1203661659999391e+00, 0.1203660755137067e+00, 0.1412182528886294e-07, 0.9048958315991394e-07, 0.1878457941299617e-01, 0.1878437172085910e-01, 0.3956520722700361e-16, 0.2218173128942313e-06, 0.2033579915109003e-22, 0.3350576054907334e-11, 0.2652482201353177e-18, 0.2950599175525191e-18}, + tauOut: 2.0335799151089990e-023, + ttypeOut: -2, + gOut: 0.0000000000000000, + }, + { + z: []float64{0.1297600320758988e+01, 0.1297600320758988e+01, 0.2652482201353177e-18, 0.2438256017573345e-18, 0.1192800384817525e+01, 0.1218322613210885e+01, 0.2552222839336001e-01, 0.2380852827644004e-01, 0.1136517859624050e+01, 0.1117557974879486e+01, 0.4848643531876122e-02, 0.5077051306839188e-02, 0.1170203406278252e+01, 0.1168213974642164e+01, 0.3087619670750762e-02, 0.1632777609571200e-02, 0.6177683213554945e+00, 0.6186363605423986e+00, 0.2500816796475141e-02, 0.2057564358223963e-02, 0.5089873548304448e+00, 0.5069297904722210e+00, 0.1826193617023393e-15, 0.4336141378669131e-16, 0.1203661659999391e+00, 0.1203661801217644e+00, 0.1412182528886294e-07, 0.2203879431304952e-08, 0.1878457941299617e-01, 0.1878457720911678e-01, 0.3956520722700361e-16, 0.4283248425464200e-37, 0.1183377192742860e+01}, + i0: 1, + n0: 8, + pp: 1, + n0in: 9, + dmin: 2.0335799151089993e-023, + dmin1: 1.8784577209116738e-002, + dmin2: 0.12036616599993906, + dn: 2.0335799151089993e-023, + dn1: 1.8784577209116738e-002, + dn2: 0.12036616599993906, + tau: 0.0000000000000000, + ttype: -2, + g: 0.0000000000000000, + zOut: []float64{0.1297600320758988e+01, 0.1297600320758988e+01, 0.2652482201353177e-18, 0.2438256017573345e-18, 0.1192800384817525e+01, 0.1218322613210885e+01, 0.2552222839336001e-01, 0.2380852827644004e-01, 0.1136517859624050e+01, 0.1117557974879486e+01, 0.4848643531876122e-02, 0.5077051306839188e-02, 0.1170203406278252e+01, 0.1168213974642164e+01, 0.3087619670750762e-02, 0.1632777609571200e-02, 0.6177683213554945e+00, 0.6186363605423986e+00, 0.2500816796475141e-02, 0.2057564358223963e-02, 0.5089873548304448e+00, 0.5069297904722210e+00, 0.1826193617023393e-15, 0.4336141378669131e-16, 0.1203661659999391e+00, 0.1203661801217644e+00, 0.1412182528886294e-07, 0.2203879431304952e-08, 0.1878457941299617e-01, 0.1878457720911678e-01, 0.3956520722700361e-16, 0.4283248425464200e-37, 0.1183377192742860e+01}, + tauOut: 1.8784576682472597e-002, + ttypeOut: -7, + gOut: 0.0000000000000000, + }, + { + z: []float64{0.1278815744076516e+01, 0.1297600320758988e+01, 0.2322916695987584e-18, 0.2438256017573345e-18, 0.1223346564804853e+01, 0.1218322613210885e+01, 0.2174969171530204e-01, 0.2380852827644004e-01, 0.1082100757788551e+01, 0.1117557974879486e+01, 0.5481081353963683e-02, 0.5077051306839188e-02, 0.1145581094215299e+01, 0.1168213974642164e+01, 0.8817320773368219e-03, 0.1632777609571200e-02, 0.6010276161408131e+00, 0.6186363605423986e+00, 0.1735428857154564e-02, 0.2057564358223963e-02, 0.4864097849325938e+00, 0.5069297904722210e+00, 0.1073014545319338e-16, 0.4336141378669131e-16, 0.1015816056431712e+00, 0.1203661801217644e+00, 0.4075436992240071e-09, 0.2203879431304952e-08, 0.1191004794787176e-09, 0.1878457720911678e-01, 0.2322916695987584e-18, 0.2438256017573345e-18}, + i0: 1, + n0: 8, + pp: 0, + n0in: 8, + dmin: 1.1910047947871760e-010, + dmin1: 0.10158160343929173, + dmin2: 0.48640978493259379, + dn: 1.1910047947871760e-010, + dn1: 0.10158160343929173, + dn2: 0.48640978493259379, + tau: 1.8784576682472597e-002, + ttype: -7, + g: 0.0000000000000000, + zOut: []float64{0.1278815744076516e+01, 0.1297600320758988e+01, 0.2322916695987584e-18, 0.2438256017573345e-18, 0.1223346564804853e+01, 0.1218322613210885e+01, 0.2174969171530204e-01, 0.2380852827644004e-01, 0.1082100757788551e+01, 0.1117557974879486e+01, 0.5481081353963683e-02, 0.5077051306839188e-02, 0.1145581094215299e+01, 0.1168213974642164e+01, 0.8817320773368219e-03, 0.1632777609571200e-02, 0.6010276161408131e+00, 0.6186363605423986e+00, 0.1735428857154564e-02, 0.2057564358223963e-02, 0.4864097849325938e+00, 0.5069297904722210e+00, 0.1073014545319338e-16, 0.4336141378669131e-16, 0.1015816056431712e+00, 0.1203661801217644e+00, 0.4075436992240071e-09, 0.2203879431304952e-08, 0.1191004794787176e-09, 0.1878457720911678e-01, 0.2322916695987584e-18, 0.2438256017573345e-18}, + tauOut: 1.1910047900088848e-010, + ttypeOut: -2, + gOut: 0.0000000000000000, + }, + { + z: []float64{0.1278815744076516e+01, 0.1278815743957415e+01, 0.2322916695987584e-18, 0.2222159192042978e-18, 0.1223346564804853e+01, 0.1245096256401054e+01, 0.2174969171530204e-01, 0.1890244048666934e-01, 0.1082100757788551e+01, 0.1068679398536745e+01, 0.5481081353963683e-02, 0.5875497537946494e-02, 0.1145581094215299e+01, 0.1140587328635589e+01, 0.8817320773368219e-03, 0.4646249482278370e-03, 0.6010276161408131e+00, 0.6022984199306394e+00, 0.1735428857154564e-02, 0.1401513849681988e-02, 0.4864097849325938e+00, 0.4850082709638114e+00, 0.1073014545319338e-16, 0.2247354260070927e-17, 0.1015816056431712e+00, 0.1015816059316144e+00, 0.4075436992240071e-09, 0.4778291260603437e-18, 0.1191004794787176e-09, 0.0000000000000000e+00, 0.2322916695987584e-18, 0.2222159192042978e-18, 0.1183377192742860e+01}, + i0: 1, + n0: 8, + pp: 1, + n0in: 8, + dmin: 0.0000000000000000, + dmin1: 0.10158160552407068, + dmin2: 0.48500827096381138, + dn: 0.0000000000000000, + dn1: 0.10158160552407068, + dn2: 0.48500827096381138, + tau: 1.1910047900088848e-010, + ttype: -2, + g: 0.0000000000000000, + zOut: []float64{0.1278815744076516e+01, 0.1278815743957415e+01, 0.2322916695987584e-18, 0.2222159192042978e-18, 0.1223346564804853e+01, 0.1245096256401054e+01, 0.2174969171530204e-01, 0.1890244048666934e-01, 0.1082100757788551e+01, 0.1068679398536745e+01, 0.5481081353963683e-02, 0.5875497537946494e-02, 0.1145581094215299e+01, 0.1140587328635589e+01, 0.8817320773368219e-03, 0.4646249482278370e-03, 0.6010276161408131e+00, 0.6022984199306394e+00, 0.1735428857154564e-02, 0.1401513849681988e-02, 0.4864097849325938e+00, 0.4850082709638114e+00, 0.1073014545319338e-16, 0.2247354260070927e-17, 0.1015816056431712e+00, 0.1015816059316144e+00, 0.4075436992240071e-09, 0.4778291260603437e-18, 0.1191004794787176e-09, 0.0000000000000000e+00, 0.2322916695987584e-18, 0.2222159192042978e-18, 0.1183377192742860e+01}, + tauOut: -0.0000000000000000, + ttypeOut: -1, + gOut: 0.0000000000000000, + }, + { + z: []float64{0.1278815743957415e+01, 0.1278815743957415e+01, 0.2163565864913247e-18, 0.2222159192042978e-18, 0.1263998696887723e+01, 0.1245096256401054e+01, 0.1598154237018549e-01, 0.1890244048666934e-01, 0.1058573353704506e+01, 0.1068679398536745e+01, 0.6330707284251229e-02, 0.5875497537946494e-02, 0.1134721246299565e+01, 0.1140587328635589e+01, 0.2466181655543824e-03, 0.4646249482278370e-03, 0.6034533156147670e+00, 0.6022984199306394e+00, 0.1126426504548419e-02, 0.1401513849681988e-02, 0.4838818444592630e+00, 0.4850082709638114e+00, 0.4717884282068346e-18, 0.2247354260070927e-17, 0.1015816059316144e+00, 0.1015816059316144e+00, 0.0000000000000000e+00, 0.4778291260603437e-18}, + i0: 1, + n0: 7, + pp: 0, + n0in: 8, + dmin: 0.0000000000000000, + dmin1: 0.10158160593161437, + dmin2: 0.48388184445926297, + dn: 0.0000000000000000, + dn1: 0.10158160593161437, + dn2: 0.48388184445926297, + tau: 0.0000000000000000, + ttype: -1, + g: 0.0000000000000000, + zOut: []float64{0.1278815743957415e+01, 0.1278815743957415e+01, 0.2163565864913247e-18, 0.2222159192042978e-18, 0.1263998696887723e+01, 0.1245096256401054e+01, 0.1598154237018549e-01, 0.1890244048666934e-01, 0.1058573353704506e+01, 0.1068679398536745e+01, 0.6330707284251229e-02, 0.5875497537946494e-02, 0.1134721246299565e+01, 0.1140587328635589e+01, 0.2466181655543824e-03, 0.4646249482278370e-03, 0.6034533156147670e+00, 0.6022984199306394e+00, 0.1126426504548419e-02, 0.1401513849681988e-02, 0.4838818444592630e+00, 0.4850082709638114e+00, 0.4717884282068346e-18, 0.2247354260070927e-17, 0.1015816059316144e+00, 0.1015816059316144e+00, 0.0000000000000000e+00, 0.4778291260603437e-18}, + tauOut: -0.0000000000000000, + ttypeOut: -1, + gOut: 0.0000000000000000, + }, + { + z: []float64{0.1278815743957415e+01, 0.1278815743957415e+01, 0.2163565864913247e-18, 0.2138497627045302e-18, 0.1263998696887723e+01, 0.1279980239257909e+01, 0.1598154237018549e-01, 0.1321710631563048e-01, 0.1058573353704506e+01, 0.1051686954673127e+01, 0.6330707284251229e-02, 0.6830538334266978e-02, 0.1134721246299565e+01, 0.1128137326130852e+01, 0.2466181655543824e-03, 0.1319188242844840e-03, 0.6034533156147670e+00, 0.6044478232950309e+00, 0.1126426504548419e-02, 0.9017442261557238e-03, 0.4838818444592630e+00, 0.4829801002331072e+00, 0.4717884282068346e-18, 0.9922774494036444e-19, 0.1015816059316144e+00, 0.1015816059316144e+00, 0.0000000000000000e+00, 0.2138497627045302e-18, 0.1202161769544433e+01}, + i0: 1, + n0: 7, + pp: 1, + n0in: 7, + dmin: 0.10158160593161437, + dmin1: 0.48298010023310722, + dmin2: 0.60332139679048247, + dn: 0.10158160593161437, + dn1: 0.48298010023310722, + dn2: 0.60332139679048247, + tau: 0.0000000000000000, + ttype: -1, + g: 0.0000000000000000, + zOut: []float64{0.1278815743957415e+01, 0.1278815743957415e+01, 0.2163565864913247e-18, 0.2138497627045302e-18, 0.1263998696887723e+01, 0.1279980239257909e+01, 0.1598154237018549e-01, 0.1321710631563048e-01, 0.1058573353704506e+01, 0.1051686954673127e+01, 0.6330707284251229e-02, 0.6830538334266978e-02, 0.1134721246299565e+01, 0.1128137326130852e+01, 0.2466181655543824e-03, 0.1319188242844840e-03, 0.6034533156147670e+00, 0.6044478232950309e+00, 0.1126426504548419e-02, 0.9017442261557238e-03, 0.4838818444592630e+00, 0.4829801002331072e+00, 0.4717884282068346e-18, 0.9922774494036444e-19, 0.1015816059316144e+00, 0.1015816059316144e+00, 0.0000000000000000e+00, 0.2138497627045302e-18, 0.1202161769544433e+01}, + tauOut: 0.10158160593161437, + ttypeOut: -2, + gOut: 0.0000000000000000, + }, + { + z: []float64{0.1177234138025801e+01, 0.1278815743957415e+01, 0.2325140442247288e-18, 0.2138497627045302e-18, 0.1191615739641925e+01, 0.1279980239257909e+01, 0.1166505092896250e-01, 0.1321710631563048e-01, 0.9452708361468171e+00, 0.1051686954673127e+01, 0.8151933771558135e-02, 0.6830538334266978e-02, 0.1018535705251965e+01, 0.1128137326130852e+01, 0.7828694249915420e-04, 0.1319188242844840e-03, 0.5036896746470730e+00, 0.6044478232950309e+00, 0.8646683437346259e-03, 0.9017442261557238e-03, 0.3805338259577582e+00, 0.4829801002331072e+00, 0.2648835135390502e-19, 0.9922774494036444e-19, 0.0000000000000000e+00, 0.1015816059316144e+00, 0.2325140442247288e-18, 0.2138497627045302e-18}, + i0: 1, + n0: 7, + pp: 0, + n0in: 7, + dmin: 0.0000000000000000, + dmin1: 0.38053382595775820, + dmin2: 0.50278793042091730, + dn: 0.0000000000000000, + dn1: 0.38053382595775820, + dn2: 0.50278793042091730, + tau: 0.10158160593161437, + ttype: -2, + g: 0.0000000000000000, + zOut: []float64{0.1177234138025801e+01, 0.1278815743957415e+01, 0.2325140442247288e-18, 0.2138497627045302e-18, 0.1191615739641925e+01, 0.1279980239257909e+01, 0.1166505092896250e-01, 0.1321710631563048e-01, 0.9452708361468171e+00, 0.1051686954673127e+01, 0.8151933771558135e-02, 0.6830538334266978e-02, 0.1018535705251965e+01, 0.1128137326130852e+01, 0.7828694249915420e-04, 0.1319188242844840e-03, 0.5036896746470730e+00, 0.6044478232950309e+00, 0.8646683437346259e-03, 0.9017442261557238e-03, 0.3805338259577582e+00, 0.4829801002331072e+00, 0.2648835135390502e-19, 0.9922774494036444e-19, 0.0000000000000000e+00, 0.1015816059316144e+00, 0.2325140442247288e-18, 0.2138497627045302e-18}, + tauOut: -0.0000000000000000, + ttypeOut: -1, + gOut: 0.0000000000000000, + }, + { + z: []float64{0.1177234138025801e+01, 0.1177234138025801e+01, 0.2325140442247288e-18, 0.2353545363971710e-18, 0.1191615739641925e+01, 0.1203280790570888e+01, 0.1166505092896250e-01, 0.9163806595868684e-02, 0.9452708361468171e+00, 0.9442589633225065e+00, 0.8151933771558135e-02, 0.8793176380307672e-02, 0.1018535705251965e+01, 0.1009820815814156e+01, 0.7828694249915420e-04, 0.3904883319791864e-04, 0.5036896746470730e+00, 0.5045152941576099e+00, 0.8646683437346259e-03, 0.6521815232088979e-03, 0.3805338259577582e+00, 0.3798816444345493e+00, 0.2648835135390502e-19, 0.0000000000000000e+00, 0.1303743375476047e+01}, + i0: 1, + n0: 6, + pp: 1, + n0in: 7, + dmin: 0.0000000000000000, + dmin1: 0.37988164443454930, + dmin2: 0.50365062581387521, + dn: 0.0000000000000000, + dn1: 0.37988164443454930, + dn2: 0.50365062581387521, + tau: 0.0000000000000000, + ttype: -1, + g: 0.0000000000000000, + zOut: []float64{0.1177234138025801e+01, 0.1177234138025801e+01, 0.2325140442247288e-18, 0.2353545363971710e-18, 0.1191615739641925e+01, 0.1203280790570888e+01, 0.1166505092896250e-01, 0.9163806595868684e-02, 0.9452708361468171e+00, 0.9442589633225065e+00, 0.8151933771558135e-02, 0.8793176380307672e-02, 0.1018535705251965e+01, 0.1009820815814156e+01, 0.7828694249915420e-04, 0.3904883319791864e-04, 0.5036896746470730e+00, 0.5045152941576099e+00, 0.8646683437346259e-03, 0.6521815232088979e-03, 0.3805338259577582e+00, 0.3798816444345493e+00, 0.2648835135390502e-19, 0.0000000000000000e+00, 0.1303743375476047e+01}, + tauOut: -0.0000000000000000, + ttypeOut: -1, + gOut: 0.0000000000000000, + }, + { + z: []float64{0.1177234138025801e+01, 0.1177234138025801e+01, 0.2405618249359890e-18, 0.2353545363971710e-18, 0.1212444597166756e+01, 0.1203280790570888e+01, 0.7136826323052847e-02, 0.9163806595868684e-02, 0.9459153133797613e+00, 0.9442589633225065e+00, 0.9387238392656353e-02, 0.8793176380307672e-02, 0.1000472626254697e+01, 0.1009820815814156e+01, 0.1969142688202247e-04, 0.3904883319791864e-04, 0.5051477842539368e+00, 0.5045152941576099e+00, 0.4904540754787929e-03, 0.6521815232088979e-03, 0.3793911903590705e+00, 0.3798816444345493e+00, 0.2405618249359890e-18, 0.2353545363971710e-18}, + i0: 1, + n0: 6, + pp: 0, + n0in: 6, + dmin: 0.37939119035907048, + dmin1: 0.50449560273072791, + dmin2: 0.93712213699945368, + dn: 0.37939119035907048, + dn1: 0.50449560273072791, + dn2: 1.0004335774214996, + tau: 0.0000000000000000, + ttype: -1, + g: 0.0000000000000000, + zOut: []float64{0.1177234138025801e+01, 0.1177234138025801e+01, 0.2405618249359890e-18, 0.2353545363971710e-18, 0.1212444597166756e+01, 0.1203280790570888e+01, 0.7136826323052847e-02, 0.9163806595868684e-02, 0.9459153133797613e+00, 0.9442589633225065e+00, 0.9387238392656353e-02, 0.8793176380307672e-02, 0.1000472626254697e+01, 0.1009820815814156e+01, 0.1969142688202247e-04, 0.3904883319791864e-04, 0.5051477842539368e+00, 0.5045152941576099e+00, 0.4904540754787929e-03, 0.6521815232088979e-03, 0.3793911903590705e+00, 0.3798816444345493e+00, 0.2405618249359890e-18, 0.2353545363971710e-18}, + tauOut: 0.37791671367087804, + ttypeOut: -2, + gOut: 0.0000000000000000, + }, + { + z: []float64{0.1177234138025801e+01, 0.7993174243549228e+00, 0.2405618249359890e-18, 0.3648961927279404e-18, 0.1212444597166756e+01, 0.8416647098189309e+00, 0.7136826323052847e-02, 0.8020810697124018e-02, 0.9459153133797613e+00, 0.5693650274044155e+00, 0.9387238392656353e-02, 0.1649499810480807e-01, 0.1000472626254697e+01, 0.6060806059058935e+00, 0.1969142688202247e-04, 0.1641214148963635e-04, 0.5051477842539368e+00, 0.1277051125170478e+00, 0.4904540754787929e-03, 0.1457059563590431e-02, 0.3793911903590705e+00, 0.1741712460201494e-04, 0.2405618249359890e-18, 0.3648961927279404e-18, 0.1303743375476047e+01}, + i0: 1, + n0: 6, + pp: 1, + n0in: 6, + dmin: 1.7417124602014944e-005, + dmin1: 0.12721465844156904, + dmin2: 0.55997778901175921, + dn: 1.7417124602014944e-005, + dn1: 0.12721465844156904, + dn2: 0.60606091447901145, + tau: 0.37791671367087804, + ttype: -2, + g: 0.0000000000000000, + zOut: []float64{0.1177234138025801e+01, 0.7993174243549228e+00, 0.2405618249359890e-18, 0.3648961927279404e-18, 0.1212444597166756e+01, 0.8416647098189309e+00, 0.7136826323052847e-02, 0.8020810697124018e-02, 0.9459153133797613e+00, 0.5693650274044155e+00, 0.9387238392656353e-02, 0.1649499810480807e-01, 0.1000472626254697e+01, 0.6060806059058935e+00, 0.1969142688202247e-04, 0.1641214148963635e-04, 0.5051477842539368e+00, 0.1277051125170478e+00, 0.4904540754787929e-03, 0.1457059563590431e-02, 0.3793911903590705e+00, 0.1741712460201494e-04, 0.2405618249359890e-18, 0.3648961927279404e-18, 0.1303743375476047e+01}, + tauOut: 1.7220607103034587e-005, + ttypeOut: -2, + gOut: 0.0000000000000000, + }, + { + z: []float64{0.7993002037478197e+00, 0.7993174243549228e+00, 0.3842364192156412e-18, 0.3648961927279404e-18, 0.8496682999089519e+00, 0.8416647098189309e+00, 0.5374766956544110e-02, 0.8020810697124018e-02, 0.5804680379455763e+00, 0.5693650274044155e+00, 0.1722282329473577e-01, 0.1649499810480807e-01, 0.5888569741455443e+00, 0.6060806059058935e+00, 0.3559292778388127e-05, 0.1641214148963635e-04, 0.1291413921807568e+00, 0.1277051125170478e+00, 0.1965116493098606e-06, 0.1457059563590431e-02, 0.5849670496393478e-11, 0.1741712460201494e-04, 0.3842364192156412e-18, 0.3648961927279404e-18}, + i0: 1, + n0: 6, + pp: 0, + n0in: 6, + dmin: 5.8496704963934779e-012, + dmin1: 0.12768433261716639, + dmin2: 0.56397303984076830, + dn: 5.8496704963934779e-012, + dn1: 0.12768433261716639, + dn2: 0.58884056200405466, + tau: 1.7220607103034587e-005, + ttype: -2, + g: 0.0000000000000000, + zOut: []float64{0.7993002037478197e+00, 0.7993174243549228e+00, 0.3842364192156412e-18, 0.3648961927279404e-18, 0.8496682999089519e+00, 0.8416647098189309e+00, 0.5374766956544110e-02, 0.8020810697124018e-02, 0.5804680379455763e+00, 0.5693650274044155e+00, 0.1722282329473577e-01, 0.1649499810480807e-01, 0.5888569741455443e+00, 0.6060806059058935e+00, 0.3559292778388127e-05, 0.1641214148963635e-04, 0.1291413921807568e+00, 0.1277051125170478e+00, 0.1965116493098606e-06, 0.1457059563590431e-02, 0.5849670496393478e-11, 0.1741712460201494e-04, 0.3842364192156412e-18, 0.3648961927279404e-18}, + tauOut: 5.8496615949827599e-012, + ttypeOut: -2, + gOut: 0.0000000000000000, + }, + { + z: []float64{0.7993002037478197e+00, 0.7993002037419701e+00, 0.3842364192156412e-18, 0.4084491703488284e-18, 0.8496682999089519e+00, 0.8550430668596465e+00, 0.5374766956544110e-02, 0.3648799166500929e-02, 0.5804680379455763e+00, 0.5940420620679615e+00, 0.1722282329473577e-01, 0.1707249411982080e-01, 0.5888569741455443e+00, 0.5717880393126522e+00, 0.3559292778388127e-05, 0.8038853438286423e-06, 0.1291413921807568e+00, 0.1291407848012127e+00, 0.1965116493098606e-06, 0.8901358303923820e-17, 0.5849670496393478e-11, 0.5241368559131172e-22, 0.3842364192156412e-18, 0.4084491703488284e-18, 0.1303743375476047e+01}, + i0: 1, + n0: 6, + pp: 1, + n0in: 6, + dmin: 5.2413685591311719e-023, + dmin1: 0.12914058828956335, + dmin2: 0.57178448001987381, + dn: 5.2413685591311719e-023, + dn1: 0.12914058828956335, + dn2: 0.57178448001987381, + tau: 5.8496615949827599e-012, + ttype: -2, + g: 0.0000000000000000, + zOut: []float64{0.7993002037478197e+00, 0.7993002037419701e+00, 0.3842364192156412e-18, 0.4084491703488284e-18, 0.8496682999089519e+00, 0.8550430668596465e+00, 0.5374766956544110e-02, 0.3648799166500929e-02, 0.5804680379455763e+00, 0.5940420620679615e+00, 0.1722282329473577e-01, 0.1707249411982080e-01, 0.5888569741455443e+00, 0.5717880393126522e+00, 0.3559292778388127e-05, 0.8038853438286423e-06, 0.1291413921807568e+00, 0.1291407848012127e+00, 0.1965116493098606e-06, 0.8901358303923820e-17, 0.5849670496393478e-11, 0.5241368559131172e-22, 0.3842364192156412e-18, 0.4084491703488284e-18, 0.1303743375476047e+01}, + tauOut: 5.2413685591311714e-023, + ttypeOut: -2, + gOut: 0.0000000000000000, + }, + { + z: []float64{0.7993002037419701e+00, 0.7993002037419701e+00, 0.4369342452764876e-18, 0.4084491703488284e-18, 0.8586918660261474e+00, 0.8550430668596465e+00, 0.2524235138002424e-02, 0.3648799166500929e-02, 0.6085903210497798e+00, 0.5940420620679615e+00, 0.1604009725641142e-01, 0.1707249411982080e-01, 0.5557487459415846e+00, 0.5717880393126522e+00, 0.1868009328861996e-06, 0.8038853438286423e-06, 0.1291405980002798e+00, 0.1291407848012127e+00, 0.3612752323451872e-38, 0.8901358303923820e-17}, + i0: 1, + n0: 5, + pp: 0, + n0in: 6, + dmin: 5.2413685591311719e-023, + dmin1: 0.12914059800027977, + dmin2: 0.55574794205624078, + dn: 5.2413685591311719e-023, + dn1: 0.12914059800027977, + dn2: 0.55574794205624078, + tau: 0.0000000000000000, + ttype: -2, + g: 0.0000000000000000, + zOut: []float64{0.7993002037419701e+00, 0.7993002037419701e+00, 0.4369342452764876e-18, 0.4084491703488284e-18, 0.8586918660261474e+00, 0.8550430668596465e+00, 0.2524235138002424e-02, 0.3648799166500929e-02, 0.6085903210497798e+00, 0.5940420620679615e+00, 0.1604009725641142e-01, 0.1707249411982080e-01, 0.5557487459415846e+00, 0.5717880393126522e+00, 0.1868009328861996e-06, 0.8038853438286423e-06, 0.1291405980002798e+00, 0.1291407848012127e+00, 0.3612752323451872e-38, 0.8901358303923820e-17}, + tauOut: 0.12914051019182277, + ttypeOut: -7, + gOut: 0.0000000000000000, + }, + { + z: []float64{0.7993002037419701e+00, 0.6701596935501473e+00, 0.4369342452764876e-18, 0.5598544436768911e-18, 0.8586918660261474e+00, 0.7320755909723271e+00, 0.2524235138002424e-02, 0.2098451433139096e-02, 0.6085903210497798e+00, 0.4933914566812295e+00, 0.1604009725641142e-01, 0.1806732527351204e-01, 0.5557487459415846e+00, 0.4085410972771827e+00, 0.1868009328861996e-06, 0.5904812108429529e-07, 0.1291405980002798e+00, 0.2876033591836524e-07, 0.3612752323451872e-38, 0.5598544436768911e-18, 0.1681677309759878e+01}, + i0: 1, + n0: 5, + pp: 1, + n0in: 5, + dmin: 2.8760335918365243e-008, + dmin1: 0.40854091047624980, + dmin2: 0.47735135942481804, + dn: 2.8760335918365243e-008, + dn1: 0.40854091047624980, + dn2: 0.47735135942481804, + tau: 0.12914051019182277, + ttype: -7, + g: 0.0000000000000000, + zOut: []float64{0.7993002037419701e+00, 0.6701596935501473e+00, 0.4369342452764876e-18, 0.5598544436768911e-18, 0.8586918660261474e+00, 0.7320755909723271e+00, 0.2524235138002424e-02, 0.2098451433139096e-02, 0.6085903210497798e+00, 0.4933914566812295e+00, 0.1604009725641142e-01, 0.1806732527351204e-01, 0.5557487459415846e+00, 0.4085410972771827e+00, 0.1868009328861996e-06, 0.5904812108429529e-07, 0.1291405980002798e+00, 0.2876033591836524e-07, 0.3612752323451872e-38, 0.5598544436768911e-18, 0.1681677309759878e+01}, + tauOut: 2.8760330654564486e-008, + ttypeOut: -2, + gOut: 0.0000000000000000, + }, + { + z: []float64{0.6701596647898166e+00, 0.6701596935501473e+00, 0.6115792910959321e-18, 0.5598544436768911e-18, 0.7341740136451356e+00, 0.7320755909723271e+00, 0.1410235162411720e-02, 0.2098451433139096e-02, 0.5100485180319990e+00, 0.4933914566812295e+00, 0.1447165246275905e-01, 0.1806732527351204e-01, 0.3940694751022140e+00, 0.4085410972771827e+00, 0.4309503539425748e-14, 0.5904812108429529e-07, 0.9542972193014773e-15, 0.2876033591836524e-07, 0.6115792910959321e-18, 0.5598544436768911e-18}, + i0: 1, + n0: 5, + pp: 0, + n0in: 5, + dmin: 9.5429721930147733e-016, + dmin1: 0.39406941605409296, + dmin2: 0.49198119275848701, + dn: 9.5429721930147733e-016, + dn1: 0.39406941605409296, + dn2: 0.49198119275848701, + tau: 2.8760330654564486e-008, + ttype: -2, + g: 0.0000000000000000, + zOut: []float64{0.6701596647898166e+00, 0.6701596935501473e+00, 0.6115792910959321e-18, 0.5598544436768911e-18, 0.7341740136451356e+00, 0.7320755909723271e+00, 0.1410235162411720e-02, 0.2098451433139096e-02, 0.5100485180319990e+00, 0.4933914566812295e+00, 0.1447165246275905e-01, 0.1806732527351204e-01, 0.3940694751022140e+00, 0.4085410972771827e+00, 0.4309503539425748e-14, 0.5904812108429529e-07, 0.9542972193014773e-15, 0.2876033591836524e-07, 0.6115792910959321e-18, 0.5598544436768911e-18}, + tauOut: 9.5429721930146451e-016, + ttypeOut: -2, + gOut: 0.0000000000000000, + }, + { + z: []float64{0.6701596647898166e+00, 0.6701596647898156e+00, 0.6115792910959321e-18, 0.6699979816704878e-18, 0.7341740136451356e+00, 0.7355842488075462e+00, 0.1410235162411720e-02, 0.9778463253267723e-03, 0.5100485180319990e+00, 0.5235423241694304e+00, 0.1447165246275905e-01, 0.1089278980244500e-01, 0.3940694751022140e+00, 0.3831766852997724e+00, 0.4309503539425748e-14, 0.1073277003016631e-28, 0.1810817848712033e+01}, + i0: 1, + n0: 4, + pp: 1, + n0in: 5, + dmin: 1.9721522630525295e-030, + dmin1: 0.38317668529976806, + dmin2: 0.50907067170667131, + dn: 1.9721522630525295e-030, + dn1: 0.38317668529976806, + dn2: 0.50907067170667131, + tau: 9.5429721930146451e-016, + ttype: -2, + g: 0.0000000000000000, + zOut: []float64{0.6701596647898166e+00, 0.6701596647898156e+00, 0.6115792910959321e-18, 0.6699979816704878e-18, 0.7341740136451356e+00, 0.7355842488075462e+00, 0.1410235162411720e-02, 0.9778463253267723e-03, 0.5100485180319990e+00, 0.5235423241694304e+00, 0.1447165246275905e-01, 0.1089278980244500e-01, 0.3940694751022140e+00, 0.3831766852997724e+00, 0.4309503539425748e-14, 0.1073277003016631e-28, 0.1810817848712033e+01}, + tauOut: 0.31895966905528556, + ttypeOut: -8, + gOut: 0.0000000000000000, + }, + { + z: []float64{0.3511999957345300e+00, 0.6701596647898156e+00, 0.1403302870260262e-17, 0.6699979816704878e-18, 0.4176024260775874e+00, 0.7355842488075462e+00, 0.1225912269357841e-02, 0.9778463253267723e-03, 0.2142495326472319e+00, 0.5235423241694304e+00, 0.1948131712866055e-01, 0.1089278980244500e-01, 0.4473569911582631e-01, 0.3831766852997724e+00, 0.1403302870260262e-17, 0.6699979816704878e-18}, + i0: 1, + n0: 4, + pp: 0, + n0in: 4, + dmin: 4.4735699115826311e-002, + dmin1: 0.20335674284478694, + dmin2: 0.35119999573453003, + dn: 4.4735699115826311e-002, + dn1: 0.20335674284478694, + dn2: 0.41662457975226058, + tau: 0.31895966905528556, + ttype: -8, + g: 0.0000000000000000, + zOut: []float64{0.3511999957345300e+00, 0.6701596647898156e+00, 0.1403302870260262e-17, 0.6699979816704878e-18, 0.4176024260775874e+00, 0.7355842488075462e+00, 0.1225912269357841e-02, 0.9778463253267723e-03, 0.2142495326472319e+00, 0.5235423241694304e+00, 0.1948131712866055e-01, 0.1089278980244500e-01, 0.4473569911582631e-01, 0.3831766852997724e+00, 0.1403302870260262e-17, 0.6699979816704878e-18}, + tauOut: 3.9897804510299034e-002, + ttypeOut: -2, + gOut: 0.0000000000000000, + }, + { + z: []float64{0.3511999957345300e+00, 0.3113021912242310e+00, 0.1403302870260262e-17, 0.1882488140663987e-17, 0.4176024260775874e+00, 0.3789305338366462e+00, 0.1225912269357841e-02, 0.6931379430343076e-03, 0.2142495326472319e+00, 0.1931399073225591e+00, 0.1948131712866055e-01, 0.4512326600593524e-02, 0.4473569911582631e-01, 0.3255680049337531e-03, 0.1403302870260262e-17, 0.1882488140663987e-17, 0.1810817848712033e+01}, + i0: 1, + n0: 4, + pp: 1, + n0in: 4, + dmin: 3.2556800493375310e-004, + dmin1: 0.17365859019389857, + dmin2: 0.31130219122423097, + dn: 3.2556800493375310e-004, + dn1: 0.17365859019389857, + dn2: 0.37770462156728835, + tau: 3.9897804510299034e-002, + ttype: -2, + g: 0.0000000000000000, + zOut: []float64{0.3511999957345300e+00, 0.3113021912242310e+00, 0.1403302870260262e-17, 0.1882488140663987e-17, 0.4176024260775874e+00, 0.3789305338366462e+00, 0.1225912269357841e-02, 0.6931379430343076e-03, 0.2142495326472319e+00, 0.1931399073225591e+00, 0.1948131712866055e-01, 0.4512326600593524e-02, 0.4473569911582631e-01, 0.3255680049337531e-03, 0.1403302870260262e-17, 0.1882488140663987e-17, 0.1810817848712033e+01}, + tauOut: 3.1797943619548145e-004, + ttypeOut: -2, + gOut: 0.0000000000000000, + }, + { + z: []float64{0.3109842117880355e+00, 0.3113021912242310e+00, 0.2293789231233262e-17, 0.1882488140663987e-17, 0.3793056923434850e+00, 0.3789305338366462e+00, 0.3529411785314453e-03, 0.6931379430343076e-03, 0.1969813133084257e+00, 0.1931399073225591e+00, 0.7457911333267062e-05, 0.4512326600593524e-02, 0.1306574050045526e-06, 0.3255680049337531e-03, 0.2293789231233262e-17, 0.1882488140663987e-17}, + i0: 1, + n0: 4, + pp: 0, + n0in: 4, + dmin: 1.3065740500455263e-007, + dmin1: 0.19246898670783216, + dmin2: 0.31098421178803548, + dn: 1.3065740500455263e-007, + dn1: 0.19246898670783216, + dn2: 0.37861255440045072, + tau: 3.1797943619548145e-004, + ttype: -2, + g: 0.0000000000000000, + zOut: []float64{0.3109842117880355e+00, 0.3113021912242310e+00, 0.2293789231233262e-17, 0.1882488140663987e-17, 0.3793056923434850e+00, 0.3789305338366462e+00, 0.3529411785314453e-03, 0.6931379430343076e-03, 0.1969813133084257e+00, 0.1931399073225591e+00, 0.7457911333267062e-05, 0.4512326600593524e-02, 0.1306574050045526e-06, 0.3255680049337531e-03, 0.2293789231233262e-17, 0.1882488140663987e-17}, + tauOut: 1.3065240973281283e-007, + ttypeOut: -2, + gOut: 0.0000000000000000, + }, + { + z: []float64{0.3109842117880355e+00, 0.3109840811356258e+00, 0.2293789231233262e-17, 0.2797722987188916e-17, 0.3793056923434850e+00, 0.3796585028696068e+00, 0.3529411785314453e-03, 0.1831193463132450e-03, 0.1969813133084257e+00, 0.1968055212210360e+00, 0.7457911333267062e-05, 0.4951239861123181e-11, 0.1306574050045526e-06, 0.4403187866809611e-13, 0.2293789231233262e-17, 0.2797722987188916e-17, 0.1810817848712033e+01}, + i0: 1, + n0: 4, + pp: 1, + n0in: 4, + dmin: 4.4031878668096113e-014, + dmin1: 0.19679806330970273, + dmin2: 0.31098408113562576, + dn: 4.4031878668096113e-014, + dn1: 0.19679806330970273, + dn2: 0.37930556169107532, + tau: 1.3065240973281283e-007, + ttype: -2, + g: 0.0000000000000000, + zOut: []float64{0.3109842117880355e+00, 0.3109840811356258e+00, 0.2293789231233262e-17, 0.2797722987188916e-17, 0.3793056923434850e+00, 0.3796585028696068e+00, 0.3529411785314453e-03, 0.1831193463132450e-03, 0.1969813133084257e+00, 0.1968055212210360e+00, 0.7457911333267062e-05, 0.4951239861123181e-11, 0.1306574050045526e-06, 0.4403187866809611e-13, 0.2293789231233262e-17, 0.2797722987188916e-17, 0.1810817848712033e+01}, + tauOut: 4.4031878666982759e-014, + ttypeOut: -2, + gOut: 0.0000000000000000, + }, + { + z: []float64{0.3109840811355817e+00, 0.3109840811356258e+00, 0.3415542419024794e-17, 0.2797722987188916e-17, 0.3798416222158761e+00, 0.3796585028696068e+00, 0.9487875021856221e-04, 0.1831193463132450e-03, 0.1967106424757246e+00, 0.1968055212210360e+00, 0.1108289770587888e-23, 0.4951239861123181e-11, 0.5067642455139780e-26, 0.4403187866809611e-13, 0.3415542419024794e-17, 0.2797722987188916e-17}, + i0: 1, + n0: 4, + pp: 0, + n0in: 4, + dmin: 5.0676424551397798e-027, + dmin1: 0.19671064247077341, + dmin2: 0.31098408113558174, + dn: 5.0676424551397798e-027, + dn1: 0.19671064247077341, + dn2: 0.37965850286956282, + tau: 4.4031878666982759e-014, + ttype: -2, + g: 0.0000000000000000, + zOut: []float64{0.3109840811355817e+00, 0.3109840811356258e+00, 0.3415542419024794e-17, 0.2797722987188916e-17, 0.3798416222158761e+00, 0.3796585028696068e+00, 0.9487875021856221e-04, 0.1831193463132450e-03, 0.1967106424757246e+00, 0.1968055212210360e+00, 0.1108289770587888e-23, 0.4951239861123181e-11, 0.5067642455139780e-26, 0.4403187866809611e-13, 0.3415542419024794e-17, 0.2797722987188916e-17}, + tauOut: 5.0676424551397798e-027, + ttypeOut: -2, + gOut: 0.0000000000000000, + }, + { + z: []float64{0.3109840811355817e+00, 0.3109840811355817e+00, 0.3415542419024794e-17, 0.4171805735046273e-17, 0.3798416222158761e+00, 0.3799365009660947e+00, 0.9487875021856221e-04, 0.4912310311151891e-04, 0.1967106424757246e+00, 0.1966615193726131e+00, 0.1108289770587888e-23, 0.2855879641297252e-49, 0.2169993432366266e+01}, + i0: 1, + n0: 3, + pp: 1, + n0in: 4, + dmin: 5.0676424551397798e-027, + dmin1: 0.19666151937261311, + dmin2: 0.31098408113558174, + dn: 5.0676424551397798e-027, + dn1: 0.19666151937261311, + dn2: 0.37984162221587608, + tau: 0.0000000000000000, + ttype: -2, + g: 0.0000000000000000, + zOut: []float64{0.3109840811355817e+00, 0.3109840811355817e+00, 0.3415542419024794e-17, 0.4171805735046273e-17, 0.3798416222158761e+00, 0.3799365009660947e+00, 0.9487875021856221e-04, 0.4912310311151891e-04, 0.1967106424757246e+00, 0.1966615193726131e+00, 0.1108289770587888e-23, 0.2855879641297252e-49, 0.2169993432366266e+01}, + tauOut: 9.8330759686306557e-002, + ttypeOut: -9, + gOut: 0.0000000000000000, + }, + { + z: []float64{0.2126533214492752e+00, 0.3109840811355817e+00, 0.7453545812882342e-17, 0.4171805735046273e-17, 0.2816548643828996e+00, 0.3799365009660947e+00, 0.3429951091160839e-04, 0.4912310311151891e-04, 0.9829646017539498e-01, 0.1966615193726131e+00, 0.3799365009660947e+00, 0.2855879641297252e-49}, + i0: 1, + n0: 3, + pp: 0, + n0in: 3, + dmin: 9.8296460175394978e-002, + dmin1: 0.21265332144927518, + dmin2: 0.21265332144927518, + dn: 9.8296460175394978e-002, + dn1: 0.28160574127978810, + dn2: 0.21265332144927518, + tau: 9.8330759686306557e-002, + ttype: -9, + g: 0.0000000000000000, + zOut: []float64{0.2126533214492752e+00, 0.3109840811355817e+00, 0.7453545812882342e-17, 0.4171805735046273e-17, 0.2816548643828996e+00, 0.3799365009660947e+00, 0.3429951091160839e-04, 0.4912310311151891e-04, 0.9829646017539498e-01, 0.1966615193726131e+00, 0.3799365009660947e+00, 0.2855879641297252e-49}, + tauOut: 9.7172513485343004e-002, + ttypeOut: -4, + gOut: 0.0000000000000000, + }, + { + z: []float64{0.2126533214492752e+00, 0.1154808079639322e+00, 0.7453545812882342e-17, 0.1817901582187390e-16, 0.2816548643828996e+00, 0.1845166504084682e+00, 0.3429951091160839e-04, 0.1827217490071947e-04, 0.9829646017539498e-01, 0.1105674515151256e-02, 0.3799365009660947e+00, 0.2816548643828996e+00, 0.2169993432366266e+01}, + i0: 1, + n0: 3, + pp: 1, + n0in: 3, + dmin: 1.1056745151512559e-003, + dmin1: 0.11548080796393217, + dmin2: 0.11548080796393217, + dn: 1.1056745151512559e-003, + dn1: 0.18448235089755655, + dn2: 0.11548080796393217, + tau: 9.7172513485343004e-002, + ttype: -4, + g: 0.0000000000000000, + zOut: []float64{0.2126533214492752e+00, 0.1154808079639322e+00, 0.7453545812882342e-17, 0.1817901582187390e-16, 0.2816548643828996e+00, 0.1845166504084682e+00, 0.3429951091160839e-04, 0.1827217490071947e-04, 0.9829646017539498e-01, 0.1105674515151256e-02, 0.3799365009660947e+00, 0.2816548643828996e+00, 0.2169993432366266e+01}, + tauOut: 1.0942861809085330e-003, + ttypeOut: -4, + gOut: 0.0000000000000000, + }, + { + z: []float64{0.1143865217830237e+00, 0.1154808079639322e+00, 0.2932453102768040e-16, 0.1817901582187390e-16, 0.1834406364024603e+00, 0.1845166504084682e+00, 0.1101341475930520e-06, 0.1827217490071947e-04, 0.1127820009512990e-04, 0.1105674515151256e-02, 0.1845166504084682e+00, 0.2816548643828996e+00}, + i0: 1, + n0: 3, + pp: 0, + n0in: 3, + dmin: 1.1278200095129901e-005, + dmin1: 0.11438652178302365, + dmin2: 0.11438652178302365, + dn: 1.1278200095129901e-005, + dn1: 0.18342236422755959, + dn2: 0.11438652178302365, + tau: 1.0942861809085330e-003, + ttype: -4, + g: 0.0000000000000000, + zOut: []float64{0.1143865217830237e+00, 0.1154808079639322e+00, 0.2932453102768040e-16, 0.1817901582187390e-16, 0.1834406364024603e+00, 0.1845166504084682e+00, 0.1101341475930520e-06, 0.1827217490071947e-04, 0.1127820009512990e-04, 0.1105674515151256e-02, 0.1845166504084682e+00, 0.2816548643828996e+00}, + tauOut: 1.1269238360546607e-005, + ttypeOut: -4, + gOut: 0.0000000000000000, + }, + { + z: []float64{0.1143865217830237e+00, 0.1143752525446631e+00, 0.2932453102768040e-16, 0.4703212027287794e-16, 0.1834406364024603e+00, 0.1834294772982473e+00, 0.1101341475930520e-06, 0.6771621290952006e-11, 0.1127820009512990e-04, 0.8954962962003413e-08, 0.1845166504084682e+00, 0.1834406364024603e+00, 0.2169993432366266e+01}, + i0: 1, + n0: 3, + pp: 1, + n0in: 3, + dmin: 8.9549629620034135e-009, + dmin1: 0.11437525254466312, + dmin2: 0.11437525254466312, + dn: 8.9549629620034135e-009, + dn1: 0.18342936716409974, + dn2: 0.11437525254466312, + tau: 1.1269238360546607e-005, + ttype: -4, + g: 0.0000000000000000, + zOut: []float64{0.1143865217830237e+00, 0.1143752525446631e+00, 0.2932453102768040e-16, 0.4703212027287794e-16, 0.1834406364024603e+00, 0.1834294772982473e+00, 0.1101341475930520e-06, 0.6771621290952006e-11, 0.1127820009512990e-04, 0.8954962962003413e-08, 0.1845166504084682e+00, 0.1834406364024603e+00, 0.2169993432366266e+01}, + tauOut: 8.9549072084035346e-009, + ttypeOut: -4, + gOut: 0.0000000000000000, + }, + { + z: []float64{0.1143752435897560e+00, 0.1143752525446631e+00, 0.7542783706608855e-16, 0.4703212027287794e-16, 0.1834294683501117e+00, 0.1834294772982473e+00, 0.3305882004599510e-18, 0.6771621290952006e-11, 0.5575326929115112e-13, 0.8954962962003413e-08, 0.1834294772982473e+00, 0.1834406364024603e+00}, + i0: 1, + n0: 3, + pp: 0, + n0in: 3, + dmin: 5.5753269291151117e-014, + dmin1: 0.11437524358975594, + dmin2: 0.11437524358975594, + dn: 5.5753269291151117e-014, + dn1: 0.18342946834334006, + dn2: 0.11437524358975594, + tau: 8.9549072084035346e-009, + ttype: -4, + g: 0.0000000000000000, + zOut: []float64{0.1143752435897560e+00, 0.1143752525446631e+00, 0.7542783706608855e-16, 0.4703212027287794e-16, 0.1834294683501117e+00, 0.1834294772982473e+00, 0.3305882004599510e-18, 0.6771621290952006e-11, 0.5575326929115112e-13, 0.8954962962003413e-08, 0.1834294772982473e+00, 0.1834406364024603e+00}, + tauOut: 5.5753269214454873e-014, + ttypeOut: -4, + gOut: 0.0000000000000000, + }, + { + z: []float64{0.1648283185136998e+01, 0.1396221235720571e+01, 0.1712714336271993e+00, 0.2520619494164272e+00, 0.1510753432847732e+01, 0.1119977945086946e+01, 0.3792800633372563e+00, 0.5620469213879850e+00, 0.1328441621586708e+01, 0.1019485448443405e+01, 0.1388998759717073e+00, 0.6882362364805590e+00, 0.4466938077796418e+00, 0.2681061628164644e+00, 0.7093206047768255e+00, 0.3174875209348847e+00, 0.6843112870203156e+00, 0.9979892152967577e+00, 0.6490582981441884e+00, 0.3956426765003833e+00, 0.5085572738629487e+00, 0.1122623886995757e+01, 0.9750235054014829e-02, 0.3499168501137979e-01, 0.1364886053450573e+00, 0.1417066070690837e+00, 0.2301225778544498e-01, 0.4532233329988395e-02, 0.1646009972289452e+01, 0.6930161671496210e+00, 0.2362515608142310e+00, 0.9760060629252760e+00, 0.5818602562677768e+00, 0.3984323866837953e+00, 0.1797665269485310e-01, 0.4196794303982125e+00, 0.5600419521166516e+00, 0.2492354636952108e-01, 0.2195137569256029e+00, 0.5530950584419837e+00, 0.4184071984843414e+00, 0.2222708575473020e+00, 0.2727864547293006e+00, 0.4156500978626423e+00, 0.6774373914466536e-01, 0.2745959086613283e+00, 0.1050967099374242e+00, 0.6593428521263771e-01, 0.2040338718098096e+00, 0.1079809097801335e+00, 0.1271971985482246e+00, 0.2011496719671002e+00, 0.4444741998443960e-01, 0.1290210252363728e+00, 0.5776327498150620e+00, 0.4262359329629137e-01, 0.3402556968467140e+00, 0.6023491555328507e+00, 0.1086565805630269e+00, 0.3155392911289253e+00, 0.9382999256694983e+00, 0.1171677238805356e+00, 0.7901625299559836e+00, 0.9297887823519896e+00, 0.6095853796269167e+00, 0.7973955560628040e+00, 0.8018125008387630e+00, 0.6023523535200964e+00, 0.2443177602187348e-01, 0.8114406374558937e+00, 0.2277830378453201e+00, 0.1480363940474286e-01, 0.6969081780841352e+00, 0.3759308106650992e+00, 0.8012406122589412e-01, 0.5487604052643561e+00, 0.9781022865072954e-01, 0.1017549972519246e+00, 0.2961248981181939e-03, 0.7617929262469909e-01, 0.8408398800007548e-04, 0.3802088861182694e-03, 0.0000000000000000e+00, 0.0000000000000000e+00}, + i0: 1, + n0: 21, + pp: 0, + n0in: 21, + dmin: -0.0000000000000000, + dmin1: 0.0000000000000000, + dmin2: 0.0000000000000000, + dn: 0.0000000000000000, + dn1: 0.0000000000000000, + dn2: 0.0000000000000000, + tau: 0.0000000000000000, + ttype: 0, + g: 0.0000000000000000, + zOut: []float64{0.1648283185136998e+01, 0.1396221235720571e+01, 0.1712714336271993e+00, 0.2520619494164272e+00, 0.1510753432847732e+01, 0.1119977945086946e+01, 0.3792800633372563e+00, 0.5620469213879850e+00, 0.1328441621586708e+01, 0.1019485448443405e+01, 0.1388998759717073e+00, 0.6882362364805590e+00, 0.4466938077796418e+00, 0.2681061628164644e+00, 0.7093206047768255e+00, 0.3174875209348847e+00, 0.6843112870203156e+00, 0.9979892152967577e+00, 0.6490582981441884e+00, 0.3956426765003833e+00, 0.5085572738629487e+00, 0.1122623886995757e+01, 0.9750235054014829e-02, 0.3499168501137979e-01, 0.1364886053450573e+00, 0.1417066070690837e+00, 0.2301225778544498e-01, 0.4532233329988395e-02, 0.1646009972289452e+01, 0.6930161671496210e+00, 0.2362515608142310e+00, 0.9760060629252760e+00, 0.5818602562677768e+00, 0.3984323866837953e+00, 0.1797665269485310e-01, 0.4196794303982125e+00, 0.5600419521166516e+00, 0.2492354636952108e-01, 0.2195137569256029e+00, 0.5530950584419837e+00, 0.4184071984843414e+00, 0.2222708575473020e+00, 0.2727864547293006e+00, 0.4156500978626423e+00, 0.6774373914466536e-01, 0.2745959086613283e+00, 0.1050967099374242e+00, 0.6593428521263771e-01, 0.2040338718098096e+00, 0.1079809097801335e+00, 0.1271971985482246e+00, 0.2011496719671002e+00, 0.4444741998443960e-01, 0.1290210252363728e+00, 0.5776327498150620e+00, 0.4262359329629137e-01, 0.3402556968467140e+00, 0.6023491555328507e+00, 0.1086565805630269e+00, 0.3155392911289253e+00, 0.9382999256694983e+00, 0.1171677238805356e+00, 0.7901625299559836e+00, 0.9297887823519896e+00, 0.6095853796269167e+00, 0.7973955560628040e+00, 0.8018125008387630e+00, 0.6023523535200964e+00, 0.2443177602187348e-01, 0.8114406374558937e+00, 0.2277830378453201e+00, 0.1480363940474286e-01, 0.6969081780841352e+00, 0.3759308106650992e+00, 0.8012406122589412e-01, 0.5487604052643561e+00, 0.9781022865072954e-01, 0.1017549972519246e+00, 0.2961248981181939e-03, 0.7617929262469909e-01, 0.8408398800007548e-04, 0.3802088861182694e-03, 0.0000000000000000e+00, 0.0000000000000000e+00}, + tauOut: 0.0000000000000000, + ttypeOut: -1, + gOut: 0.0000000000000000, + }, + { + z: []float64{0.1648283185136998e+01, 0.1819554618764197e+01, 0.1712714336271993e+00, 0.1422045283129674e+00, 0.1510753432847732e+01, 0.1747828967872021e+01, 0.3792800633372563e+00, 0.2882727267008810e+00, 0.1328441621586708e+01, 0.1179068770857534e+01, 0.1388998759717073e+00, 0.5262264257308433e-01, 0.4466938077796418e+00, 0.1103391769983383e+01, 0.7093206047768255e+00, 0.4399127392187891e+00, 0.6843112870203156e+00, 0.8934568459457148e+00, 0.6490582981441884e+00, 0.3694451726238032e+00, 0.5085572738629487e+00, 0.1488623362931603e+00, 0.9750235054014829e-02, 0.8939776288934402e-02, 0.1364886053450573e+00, 0.1505610868415679e+00, 0.2301225778544498e-01, 0.2515816443301624e+00, 0.1646009972289452e+01, 0.1630679888773521e+01, 0.2362515608142310e+00, 0.8429943526342391e-01, 0.5818602562677768e+00, 0.5155374736992060e+00, 0.1797665269485310e-01, 0.1952851185677853e-01, 0.5600419521166516e+00, 0.7600271971854760e+00, 0.2195137569256029e+00, 0.1208458544696003e+00, 0.4184071984843414e+00, 0.5703477987440417e+00, 0.2727864547293006e+00, 0.3240053608004366e-01, 0.6774373914466536e-01, 0.1404399130020459e+00, 0.1050967099374242e+00, 0.1526865702536626e+00, 0.2040338718098096e+00, 0.1785445001043715e+00, 0.1271971985482246e+00, 0.3166486394939177e-01, 0.4444741998443960e-01, 0.5904153058501098e+00, 0.5776327498150620e+00, 0.3328891237445398e+00, 0.3402556968467140e+00, 0.1160231536652011e+00, 0.1086565805630269e+00, 0.8787251358464724e+00, 0.9382999256694983e+00, 0.8497373197790092e+00, 0.7901625299559836e+00, 0.5668475593321608e+00, 0.6095853796269167e+00, 0.8445503211335190e+00, 0.8018125008387630e+00, 0.2319542476253924e-01, 0.2443177602187348e-01, 0.2290193891046544e+00, 0.2277830378453201e+00, 0.6931459494493321e+00, 0.6969081780841352e+00, 0.8388628986069724e-01, 0.8012406122589412e-01, 0.9342352322344821e-01, 0.9781022865072954e-01, 0.4682830325399513e-02, 0.2961248981181939e-03, 0.5317160915449039e-05, 0.8408398800007548e-04, 0.7876682708462645e-04, 0.0000000000000000e+00, 0.8939776288934402e-02, 0.1037537856266618 - 321}, + i0: 1, + n0: 21, + pp: 1, + n0in: 21, + dmin: 7.8766827084626452e-005, + dmin1: 1.2363512593342330e-003, + dmin2: 1.2363512593342330e-003, + dn: 7.8766827084626452e-005, + dn1: 4.3867054272813191e-003, + dn2: 3.7622286348031123e-003, + tau: 0.0000000000000000, + ttype: -1, + g: 0.0000000000000000, + zOut: []float64{0.1648283185136998e+01, 0.1819554618764197e+01, 0.1712714336271993e+00, 0.1422045283129674e+00, 0.1510753432847732e+01, 0.1747828967872021e+01, 0.3792800633372563e+00, 0.2882727267008810e+00, 0.1328441621586708e+01, 0.1179068770857534e+01, 0.1388998759717073e+00, 0.5262264257308433e-01, 0.4466938077796418e+00, 0.1103391769983383e+01, 0.7093206047768255e+00, 0.4399127392187891e+00, 0.6843112870203156e+00, 0.8934568459457148e+00, 0.6490582981441884e+00, 0.3694451726238032e+00, 0.5085572738629487e+00, 0.1488623362931603e+00, 0.9750235054014829e-02, 0.8939776288934402e-02, 0.1364886053450573e+00, 0.1505610868415679e+00, 0.2301225778544498e-01, 0.2515816443301624e+00, 0.1646009972289452e+01, 0.1630679888773521e+01, 0.2362515608142310e+00, 0.8429943526342391e-01, 0.5818602562677768e+00, 0.5155374736992060e+00, 0.1797665269485310e-01, 0.1952851185677853e-01, 0.5600419521166516e+00, 0.7600271971854760e+00, 0.2195137569256029e+00, 0.1208458544696003e+00, 0.4184071984843414e+00, 0.5703477987440417e+00, 0.2727864547293006e+00, 0.3240053608004366e-01, 0.6774373914466536e-01, 0.1404399130020459e+00, 0.1050967099374242e+00, 0.1526865702536626e+00, 0.2040338718098096e+00, 0.1785445001043715e+00, 0.1271971985482246e+00, 0.3166486394939177e-01, 0.4444741998443960e-01, 0.5904153058501098e+00, 0.5776327498150620e+00, 0.3328891237445398e+00, 0.3402556968467140e+00, 0.1160231536652011e+00, 0.1086565805630269e+00, 0.8787251358464724e+00, 0.9382999256694983e+00, 0.8497373197790092e+00, 0.7901625299559836e+00, 0.5668475593321608e+00, 0.6095853796269167e+00, 0.8445503211335190e+00, 0.8018125008387630e+00, 0.2319542476253924e-01, 0.2443177602187348e-01, 0.2290193891046544e+00, 0.2277830378453201e+00, 0.6931459494493321e+00, 0.6969081780841352e+00, 0.8388628986069724e-01, 0.8012406122589412e-01, 0.9342352322344821e-01, 0.9781022865072954e-01, 0.4682830325399513e-02, 0.2961248981181939e-03, 0.5317160915449039e-05, 0.8408398800007548e-04, 0.7876682708462645e-04, 0.0000000000000000e+00, 0.8939776288934402e-02, 0.1037537856266618 - 321}, + tauOut: 0.0000000000000000, + ttypeOut: -4, + gOut: 0.0000000000000000, + }, + { + z: []float64{0.1961759147077164e+01, 0.1819554618764197e+01, 0.1266970995487882e+00, 0.1422045283129674e+00, 0.1909404595024114e+01, 0.1747828967872021e+01, 0.1780101349021133e+00, 0.2882727267008810e+00, 0.1053681278528505e+01, 0.1179068770857534e+01, 0.5510526941411123e-01, 0.5262264257308433e-01, 0.1488199239788061e+01, 0.1103391769983383e+01, 0.2641064704009213e+00, 0.4399127392187891e+00, 0.9987955481685968e+00, 0.8934568459457148e+00, 0.5506279200968747e-01, 0.3694451726238032e+00, 0.1027393205724072e+00, 0.1488623362931603e+00, 0.1310094739466220e-01, 0.8939776288934402e-02, 0.3890417837770681e+00, 0.1505610868415679e+00, 0.1054511738587064e+01, 0.2515816443301624e+00, 0.6604675854498806e+00, 0.1630679888773521e+01, 0.6580113672099847e-01, 0.8429943526342391e-01, 0.4692648488349861e+00, 0.5155374736992060e+00, 0.3162862116895929e-01, 0.1952851185677853e-01, 0.8492444304861170e+00, 0.7600271971854760e+00, 0.8115939841327705e-01, 0.1208458544696003e+00, 0.5215889364108083e+00, 0.5703477987440417e+00, 0.8723974284448969e-02, 0.3240053608004366e-01, 0.2844025089712595e+00, 0.1404399130020459e+00, 0.9585480612390133e-01, 0.1526865702536626e+00, 0.1143545579298620e+00, 0.1785445001043715e+00, 0.1634864466429828e+00, 0.3166486394939177e-01, 0.7598179829516669e+00, 0.5904153058501098e+00, 0.5083170815153470e-01, 0.3328891237445398e+00, 0.9439165813601388e+00, 0.1160231536652011e+00, 0.7910503496831139e+00, 0.8787251358464724e+00, 0.6255345294280562e+00, 0.8497373197790092e+00, 0.7653155273545736e+00, 0.5668475593321608e+00, 0.1024302185414846e+00, 0.8445503211335190e+00, 0.5186166821452450e-01, 0.2319542476253924e-01, 0.8703036703394620e+00, 0.2290193891046544e+00, 0.6681052144545734e-01, 0.6931459494493321e+00, 0.1104992916386881e+00, 0.8388628986069724e-01, 0.3959179295799719e-02, 0.9342352322344821e-01, 0.7289681905152429e-03, 0.4682830325399513e-02, 0.5745324691222600e-06, 0.5317160915449039e-05, 0.7819229461550419e-04, 0.7876682708462645e-04, 0.8723974284448969e-02, 0.8939776288934402e-02}, + i0: 1, + n0: 21, + pp: 0, + n0in: 21, + dmin: 7.8192294615504193e-005, + dmin1: 7.2365102959979382e-004, + dmin2: 1.7075768415239889e-002, + dn: 7.8192294615504193e-005, + dn1: 7.2365102959979382e-004, + dn2: 1.7075768415239889e-002, + tau: 0.0000000000000000, + ttype: -4, + g: 0.0000000000000000, + zOut: []float64{0.1961759147077164e+01, 0.1819554618764197e+01, 0.1266970995487882e+00, 0.1422045283129674e+00, 0.1909404595024114e+01, 0.1747828967872021e+01, 0.1780101349021133e+00, 0.2882727267008810e+00, 0.1053681278528505e+01, 0.1179068770857534e+01, 0.5510526941411123e-01, 0.5262264257308433e-01, 0.1488199239788061e+01, 0.1103391769983383e+01, 0.2641064704009213e+00, 0.4399127392187891e+00, 0.9987955481685968e+00, 0.8934568459457148e+00, 0.5506279200968747e-01, 0.3694451726238032e+00, 0.1027393205724072e+00, 0.1488623362931603e+00, 0.1310094739466220e-01, 0.8939776288934402e-02, 0.3890417837770681e+00, 0.1505610868415679e+00, 0.1054511738587064e+01, 0.2515816443301624e+00, 0.6604675854498806e+00, 0.1630679888773521e+01, 0.6580113672099847e-01, 0.8429943526342391e-01, 0.4692648488349861e+00, 0.5155374736992060e+00, 0.3162862116895929e-01, 0.1952851185677853e-01, 0.8492444304861170e+00, 0.7600271971854760e+00, 0.8115939841327705e-01, 0.1208458544696003e+00, 0.5215889364108083e+00, 0.5703477987440417e+00, 0.8723974284448969e-02, 0.3240053608004366e-01, 0.2844025089712595e+00, 0.1404399130020459e+00, 0.9585480612390133e-01, 0.1526865702536626e+00, 0.1143545579298620e+00, 0.1785445001043715e+00, 0.1634864466429828e+00, 0.3166486394939177e-01, 0.7598179829516669e+00, 0.5904153058501098e+00, 0.5083170815153470e-01, 0.3328891237445398e+00, 0.9439165813601388e+00, 0.1160231536652011e+00, 0.7910503496831139e+00, 0.8787251358464724e+00, 0.6255345294280562e+00, 0.8497373197790092e+00, 0.7653155273545736e+00, 0.5668475593321608e+00, 0.1024302185414846e+00, 0.8445503211335190e+00, 0.5186166821452450e-01, 0.2319542476253924e-01, 0.8703036703394620e+00, 0.2290193891046544e+00, 0.6681052144545734e-01, 0.6931459494493321e+00, 0.1104992916386881e+00, 0.8388628986069724e-01, 0.3959179295799719e-02, 0.9342352322344821e-01, 0.7289681905152429e-03, 0.4682830325399513e-02, 0.5745324691222600e-06, 0.5317160915449039e-05, 0.7819229461550419e-04, 0.7876682708462645e-04, 0.8723974284448969e-02, 0.8939776288934402e-02}, + tauOut: 7.8083356181667918e-005, + ttypeOut: -2, + gOut: 0.0000000000000000, + }, + { + z: []float64{0.1961759147077164e+01, 0.2088378163269771e+01, 0.1266970995487882e+00, 0.1158391848322702e+00, 0.1909404595024114e+01, 0.1971497461737776e+01, 0.1780101349021133e+00, 0.9513882222772962e-01, 0.1053681278528505e+01, 0.1013569642358705e+01, 0.5510526941411123e-01, 0.8090970429970110e-01, 0.1488199239788061e+01, 0.1671317922533099e+01, 0.2641064704009213e+00, 0.1578325483874163e+00, 0.9987955481685968e+00, 0.8959477084346864e+00, 0.5506279200968747e-01, 0.6314111623521673e-02, 0.1027393205724072e+00, 0.1094480729873660e+00, 0.1310094739466220e-01, 0.4656834793406785e-01, 0.3890417837770681e+00, 0.1396907091073883e+01, 0.1054511738587064e+01, 0.4985806330739840e+00, 0.6604675854498806e+00, 0.2276100057407134e+00, 0.6580113672099847e-01, 0.1356625794022653e+00, 0.4692648488349861e+00, 0.3651528072454984e+00, 0.3162862116895929e-01, 0.7355942454424362e-01, 0.8492444304861170e+00, 0.8567663209989688e+00, 0.8115939841327705e-01, 0.4940885660487250e-01, 0.5215889364108083e+00, 0.4808259707342031e+00, 0.8723974284448969e-02, 0.5160120970399038e-02, 0.2844025089712595e+00, 0.3750191107685802e+00, 0.9585480612390133e-01, 0.2922900104287094e-01, 0.1143545579298620e+00, 0.2485339201737921e+00, 0.1634864466429828e+00, 0.4998108187459615e+00, 0.7598179829516669e+00, 0.3107607890010585e+00, 0.5083170815153470e-01, 0.1543981540828483e+00, 0.9439165813601388e+00, 0.1580490693604223e+01, 0.7910503496831139e+00, 0.3130858727896049e+00, 0.6255345294280562e+00, 0.1077686100636843e+01, 0.7653155273545736e+00, 0.7274051012980143e-01, 0.1024302185414846e+00, 0.8147329327002600e-01, 0.5186166821452450e-01, 0.5539901283655780e+00, 0.8703036703394620e+00, 0.3830459800631597e+00, 0.6681052144545734e-01, 0.1927318305890360e-01, 0.1104992916386881e+00, 0.9510720451940254e-01, 0.3959179295799719e-02, 0.3034592154998885e-04, 0.7289681905152429e-03, 0.6211134452527084e-03, 0.5745324691222600e-06, 0.7232819130731082e-07, 0.7819229461550419e-04, 0.3661024252896476e-07, 0.8723974284448969e-02, 0.5160120970399038e-02, 0.1037537856266618 - 321}, + i0: 1, + n0: 21, + pp: 1, + n0in: 21, + dmin: 3.6610242528964756e-008, + dmin1: 6.2053891278358614e-004, + dmin2: 2.9611625055501498e-002, + dn: 3.6610242528964756e-008, + dn1: 6.2053891278358614e-004, + dn2: 9.1148025223602810e-002, + tau: 7.8083356181667918e-005, + ttype: -2, + g: 0.0000000000000000, + zOut: []float64{0.1961759147077164e+01, 0.2088378163269771e+01, 0.1266970995487882e+00, 0.1158391848322702e+00, 0.1909404595024114e+01, 0.1971497461737776e+01, 0.1780101349021133e+00, 0.9513882222772962e-01, 0.1053681278528505e+01, 0.1013569642358705e+01, 0.5510526941411123e-01, 0.8090970429970110e-01, 0.1488199239788061e+01, 0.1671317922533099e+01, 0.2641064704009213e+00, 0.1578325483874163e+00, 0.9987955481685968e+00, 0.8959477084346864e+00, 0.5506279200968747e-01, 0.6314111623521673e-02, 0.1027393205724072e+00, 0.1094480729873660e+00, 0.1310094739466220e-01, 0.4656834793406785e-01, 0.3890417837770681e+00, 0.1396907091073883e+01, 0.1054511738587064e+01, 0.4985806330739840e+00, 0.6604675854498806e+00, 0.2276100057407134e+00, 0.6580113672099847e-01, 0.1356625794022653e+00, 0.4692648488349861e+00, 0.3651528072454984e+00, 0.3162862116895929e-01, 0.7355942454424362e-01, 0.8492444304861170e+00, 0.8567663209989688e+00, 0.8115939841327705e-01, 0.4940885660487250e-01, 0.5215889364108083e+00, 0.4808259707342031e+00, 0.8723974284448969e-02, 0.5160120970399038e-02, 0.2844025089712595e+00, 0.3750191107685802e+00, 0.9585480612390133e-01, 0.2922900104287094e-01, 0.1143545579298620e+00, 0.2485339201737921e+00, 0.1634864466429828e+00, 0.4998108187459615e+00, 0.7598179829516669e+00, 0.3107607890010585e+00, 0.5083170815153470e-01, 0.1543981540828483e+00, 0.9439165813601388e+00, 0.1580490693604223e+01, 0.7910503496831139e+00, 0.3130858727896049e+00, 0.6255345294280562e+00, 0.1077686100636843e+01, 0.7653155273545736e+00, 0.7274051012980143e-01, 0.1024302185414846e+00, 0.8147329327002600e-01, 0.5186166821452450e-01, 0.5539901283655780e+00, 0.8703036703394620e+00, 0.3830459800631597e+00, 0.6681052144545734e-01, 0.1927318305890360e-01, 0.1104992916386881e+00, 0.9510720451940254e-01, 0.3959179295799719e-02, 0.3034592154998885e-04, 0.7289681905152429e-03, 0.6211134452527084e-03, 0.5745324691222600e-06, 0.7232819130731082e-07, 0.7819229461550419e-04, 0.3661024252896476e-07, 0.8723974284448969e-02, 0.5160120970399038e-02, 0.1037537856266618 - 321}, + tauOut: 3.6605973538398975e-008, + ttypeOut: -2, + gOut: 0.0000000000000000, + }, + { + z: []float64{0.2204217311496068e+01, 0.2088378163269771e+01, 0.1036089580076783e+00, 0.1158391848322702e+00, 0.1963027289351853e+01, 0.1971497461737776e+01, 0.4912301654839819e-01, 0.9513882222772962e-01, 0.1045356293504034e+01, 0.1013569642358705e+01, 0.1293586117415210e+00, 0.8090970429970110e-01, 0.1699791822573021e+01, 0.1671317922533099e+01, 0.8319236989271818e-01, 0.1578325483874163e+00, 0.8190694135595162e+00, 0.8959477084346864e+00, 0.8437225690290790e-03, 0.6314111623521673e-02, 0.1551726617464312e+00, 0.1094480729873660e+00, 0.4192211096758560e+00, 0.4656834793406785e-01, 0.1476266577866037e+01, 0.1396907091073883e+01, 0.7687090018675194e-01, 0.4985806330739840e+00, 0.2864016483502532e+00, 0.2276100057407134e+00, 0.1729653861709652e+00, 0.1356625794022653e+00, 0.2657468090128033e+00, 0.3651528072454984e+00, 0.2371551996266359e+00, 0.7355942454424362e-01, 0.6690199413712319e+00, 0.8567663209989688e+00, 0.3551024412099299e-01, 0.4940885660487250e-01, 0.4504758109776356e+00, 0.4808259707342031e+00, 0.4295777776785939e-02, 0.5160120970399038e-02, 0.3999522974286917e+00, 0.3750191107685802e+00, 0.1816316160365039e-01, 0.2922900104287094e-01, 0.7301815407101296e+00, 0.2485339201737921e+00, 0.2127164214993764e+00, 0.4998108187459615e+00, 0.2524424849785568e+00, 0.3107607890010585e+00, 0.9666552191416621e+00, 0.1543981540828483e+00, 0.9269213106461920e+00, 0.1580490693604223e+01, 0.3640096408786760e+00, 0.3130858727896049e+00, 0.7864169332819952e+00, 0.1077686100636843e+01, 0.7535963003344312e-02, 0.7274051012980143e-01, 0.6279274220262862e+00, 0.8147329327002600e-01, 0.3379430237022286e+00, 0.5539901283655780e+00, 0.6437610281386123e-01, 0.3830459800631597e+00, 0.2847358698029713e-01, 0.1927318305890360e-01, 0.6666392685468185e-01, 0.9510720451940254e-01, 0.2827355178816361e-06, 0.3034592154998885e-04, 0.6208664319525956e-03, 0.6211134452527084e-03, 0.4264931214133004e-11, 0.7232819130731082e-07, 0.4059351646211308e-14, 0.3661024252896476e-07, 0.8437225690290790e-03, 0.5160120970399038e-02}, + i0: 1, + n0: 21, + pp: 0, + n0in: 21, + dmin: 4.0593516462113082e-015, + dmin1: 6.2079410376128833e-004, + dmin2: 4.5102919754957636e-002, + dn: 4.0593516462113082e-015, + dn1: 6.2079410376128833e-004, + dn2: 6.6633580933131861e-002, + tau: 3.6605973538398975e-008, + ttype: -2, + g: 0.0000000000000000, + zOut: []float64{0.2204217311496068e+01, 0.2088378163269771e+01, 0.1036089580076783e+00, 0.1158391848322702e+00, 0.1963027289351853e+01, 0.1971497461737776e+01, 0.4912301654839819e-01, 0.9513882222772962e-01, 0.1045356293504034e+01, 0.1013569642358705e+01, 0.1293586117415210e+00, 0.8090970429970110e-01, 0.1699791822573021e+01, 0.1671317922533099e+01, 0.8319236989271818e-01, 0.1578325483874163e+00, 0.8190694135595162e+00, 0.8959477084346864e+00, 0.8437225690290790e-03, 0.6314111623521673e-02, 0.1551726617464312e+00, 0.1094480729873660e+00, 0.4192211096758560e+00, 0.4656834793406785e-01, 0.1476266577866037e+01, 0.1396907091073883e+01, 0.7687090018675194e-01, 0.4985806330739840e+00, 0.2864016483502532e+00, 0.2276100057407134e+00, 0.1729653861709652e+00, 0.1356625794022653e+00, 0.2657468090128033e+00, 0.3651528072454984e+00, 0.2371551996266359e+00, 0.7355942454424362e-01, 0.6690199413712319e+00, 0.8567663209989688e+00, 0.3551024412099299e-01, 0.4940885660487250e-01, 0.4504758109776356e+00, 0.4808259707342031e+00, 0.4295777776785939e-02, 0.5160120970399038e-02, 0.3999522974286917e+00, 0.3750191107685802e+00, 0.1816316160365039e-01, 0.2922900104287094e-01, 0.7301815407101296e+00, 0.2485339201737921e+00, 0.2127164214993764e+00, 0.4998108187459615e+00, 0.2524424849785568e+00, 0.3107607890010585e+00, 0.9666552191416621e+00, 0.1543981540828483e+00, 0.9269213106461920e+00, 0.1580490693604223e+01, 0.3640096408786760e+00, 0.3130858727896049e+00, 0.7864169332819952e+00, 0.1077686100636843e+01, 0.7535963003344312e-02, 0.7274051012980143e-01, 0.6279274220262862e+00, 0.8147329327002600e-01, 0.3379430237022286e+00, 0.5539901283655780e+00, 0.6437610281386123e-01, 0.3830459800631597e+00, 0.2847358698029713e-01, 0.1927318305890360e-01, 0.6666392685468185e-01, 0.9510720451940254e-01, 0.2827355178816361e-06, 0.3034592154998885e-04, 0.6208664319525956e-03, 0.6211134452527084e-03, 0.4264931214133004e-11, 0.7232819130731082e-07, 0.4059351646211308e-14, 0.3661024252896476e-07, 0.8437225690290790e-03, 0.5160120970399038e-02}, + tauOut: 4.0593516183260787e-015, + ttypeOut: -2, + gOut: 0.0000000000000000, + }, + { + z: []float64{0.2204217311496068e+01, 0.2307826269503742e+01, 0.1036089580076783e+00, 0.8812934261040264e-01, 0.1963027289351853e+01, 0.1924020963289845e+01, 0.4912301654839819e-01, 0.2668944646890268e-01, 0.1045356293504034e+01, 0.1148025458776649e+01, 0.1293586117415210e+00, 0.1915312145184878e+00, 0.1699791822573021e+01, 0.1591452977947247e+01, 0.8319236989271818e-01, 0.4281642408847453e-01, 0.8190694135595162e+00, 0.7770967120400667e+00, 0.8437225690290790e-03, 0.1684766835109566e-03, 0.1551726617464312e+00, 0.5742252947387723e+00, 0.4192211096758560e+00, 0.1077768810640642e+01, 0.1476266577866037e+01, 0.4753686674121431e+00, 0.7687090018675194e-01, 0.4631342794110950e-01, 0.2864016483502532e+00, 0.4130536065801048e+00, 0.1729653861709652e+00, 0.1112809541240190e+00, 0.2657468090128033e+00, 0.3916210545154161e+00, 0.2371551996266359e+00, 0.4051405202062471e+00, 0.6690199413712319e+00, 0.2993896652859737e+00, 0.3551024412099299e-01, 0.5343038813025977e-01, 0.4504758109776356e+00, 0.4013412006241577e+00, 0.4295777776785939e-02, 0.4280911574482486e-02, 0.3999522974286917e+00, 0.4138345474578556e+00, 0.1816316160365039e-01, 0.3204760309498118e-01, 0.7301815407101296e+00, 0.9108503591145208e+00, 0.2127164214993764e+00, 0.5895442813598011e-01, 0.2524424849785568e+00, 0.1160143275984235e+01, 0.9666552191416621e+00, 0.7723298847804961e+00, 0.9269213106461920e+00, 0.5186010667443678e+00, 0.3640096408786760e+00, 0.5519914319921668e+00, 0.7864169332819952e+00, 0.2419614642931688e+00, 0.7535963003344312e-02, 0.1955698951896722e-01, 0.6279274220262862e+00, 0.9463134562095434e+00, 0.3379430237022286e+00, 0.2298969194226957e-01, 0.6437610281386123e-01, 0.6985999785188474e-01, 0.2847358698029713e-01, 0.2717093011896993e-01, 0.6666392685468185e-01, 0.3949327947122574e-01, 0.2827355178816361e-06, 0.4444831994804014e-08, 0.6208664319525956e-03, 0.6208619913814727e-03, 0.4264931214133004e-11, 0.2788519153273688e-22, 0.4059351646211308e-14, 0.3786532345060857e-28, 0.8437225690290790e-03, 0.1684766835109566e-03, 0.1037537856266618 - 321}, + i0: 1, + n0: 21, + pp: 1, + n0in: 21, + dmin: 3.7865323450608567e-029, + dmin1: 6.2086198711654151e-004, + dmin2: 3.9492996735707858e-002, + dn: 3.7865323450608567e-029, + dn1: 6.2086198711654151e-004, + dn2: 3.9492996735707858e-002, + tau: 4.0593516183260787e-015, + ttype: -2, + g: 0.0000000000000000, + zOut: []float64{0.2204217311496068e+01, 0.2307826269503742e+01, 0.1036089580076783e+00, 0.8812934261040264e-01, 0.1963027289351853e+01, 0.1924020963289845e+01, 0.4912301654839819e-01, 0.2668944646890268e-01, 0.1045356293504034e+01, 0.1148025458776649e+01, 0.1293586117415210e+00, 0.1915312145184878e+00, 0.1699791822573021e+01, 0.1591452977947247e+01, 0.8319236989271818e-01, 0.4281642408847453e-01, 0.8190694135595162e+00, 0.7770967120400667e+00, 0.8437225690290790e-03, 0.1684766835109566e-03, 0.1551726617464312e+00, 0.5742252947387723e+00, 0.4192211096758560e+00, 0.1077768810640642e+01, 0.1476266577866037e+01, 0.4753686674121431e+00, 0.7687090018675194e-01, 0.4631342794110950e-01, 0.2864016483502532e+00, 0.4130536065801048e+00, 0.1729653861709652e+00, 0.1112809541240190e+00, 0.2657468090128033e+00, 0.3916210545154161e+00, 0.2371551996266359e+00, 0.4051405202062471e+00, 0.6690199413712319e+00, 0.2993896652859737e+00, 0.3551024412099299e-01, 0.5343038813025977e-01, 0.4504758109776356e+00, 0.4013412006241577e+00, 0.4295777776785939e-02, 0.4280911574482486e-02, 0.3999522974286917e+00, 0.4138345474578556e+00, 0.1816316160365039e-01, 0.3204760309498118e-01, 0.7301815407101296e+00, 0.9108503591145208e+00, 0.2127164214993764e+00, 0.5895442813598011e-01, 0.2524424849785568e+00, 0.1160143275984235e+01, 0.9666552191416621e+00, 0.7723298847804961e+00, 0.9269213106461920e+00, 0.5186010667443678e+00, 0.3640096408786760e+00, 0.5519914319921668e+00, 0.7864169332819952e+00, 0.2419614642931688e+00, 0.7535963003344312e-02, 0.1955698951896722e-01, 0.6279274220262862e+00, 0.9463134562095434e+00, 0.3379430237022286e+00, 0.2298969194226957e-01, 0.6437610281386123e-01, 0.6985999785188474e-01, 0.2847358698029713e-01, 0.2717093011896993e-01, 0.6666392685468185e-01, 0.3949327947122574e-01, 0.2827355178816361e-06, 0.4444831994804014e-08, 0.6208664319525956e-03, 0.6208619913814727e-03, 0.4264931214133004e-11, 0.2788519153273688e-22, 0.4059351646211308e-14, 0.3786532345060857e-28, 0.8437225690290790e-03, 0.1684766835109566e-03, 0.1037537856266618 - 321}, + tauOut: 3.7865323450608567e-029, + ttypeOut: -2, + gOut: 0.0000000000000000, + }, + { + z: []float64{0.2395955612114145e+01, 0.2307826269503742e+01, 0.7077038564739886e-01, 0.8812934261040264e-01, 0.1879940024111348e+01, 0.1924020963289845e+01, 0.1629847954401656e-01, 0.2668944646890268e-01, 0.1323258193751120e+01, 0.1148025458776649e+01, 0.2303502998543534e+00, 0.1915312145184878e+00, 0.1403919102181368e+01, 0.1591452977947247e+01, 0.2369972908607684e-01, 0.4281642408847453e-01, 0.7535654596375009e+00, 0.7770967120400667e+00, 0.1283811140869274e-03, 0.1684766835109566e-03, 0.1651865724265327e+01, 0.5742252947387723e+00, 0.3101568824672333e+00, 0.1077768810640642e+01, 0.2115252128860193e+00, 0.4753686674121431e+00, 0.9043805314343908e-01, 0.4631342794110950e-01, 0.4338965075606848e+00, 0.4130536065801048e+00, 0.1004386157577793e+00, 0.1112809541240190e+00, 0.6963229589638840e+00, 0.3916210545154161e+00, 0.1741934301847783e+00, 0.4051405202062471e+00, 0.1786266232314551e+00, 0.2993896652859737e+00, 0.1200482645536405e+00, 0.5343038813025977e-01, 0.2855738476449996e+00, 0.4013412006241577e+00, 0.6203611145567293e-02, 0.4280911574482486e-02, 0.4396785394072695e+00, 0.4138345474578556e+00, 0.6639071087521134e-01, 0.3204760309498118e-01, 0.9034140763752896e+00, 0.9108503591145208e+00, 0.7570790092830114e-01, 0.5895442813598011e-01, 0.1856765259836430e+01, 0.1160143275984235e+01, 0.2157144528657351e+00, 0.7723298847804961e+00, 0.8548780458707995e+00, 0.5186010667443678e+00, 0.1562335771835849e+00, 0.5519914319921668e+00, 0.1052848766285511e+00, 0.2419614642931688e+00, 0.1757806338135458e+00, 0.1955698951896722e-01, 0.7935225143382671e+00, 0.9463134562095434e+00, 0.2023962522401480e-02, 0.2298969194226957e-01, 0.9500696544845319e-01, 0.6985999785188474e-01, 0.1129463646814219e-01, 0.2717093011896993e-01, 0.2819864744791555e-01, 0.3949327947122574e-01, 0.9786381594178541e-10, 0.4444831994804014e-08, 0.6208618935176568e-03, 0.6208619913814727e-03, 0.1700670967075909e-47, 0.2788519153273688e-22}, + i0: 1, + n0: 20, + pp: 0, + n0in: 21, + dmin: 3.7865323450608567e-029, + dmin1: 6.2086189351765679e-004, + dmin2: 2.8198643003083550e-002, + dn: 3.7865323450608567e-029, + dn1: 6.2086189351765679e-004, + dn2: 2.8198643003083550e-002, + tau: 0.0000000000000000, + ttype: -2, + g: 0.0000000000000000, + zOut: []float64{0.2395955612114145e+01, 0.2307826269503742e+01, 0.7077038564739886e-01, 0.8812934261040264e-01, 0.1879940024111348e+01, 0.1924020963289845e+01, 0.1629847954401656e-01, 0.2668944646890268e-01, 0.1323258193751120e+01, 0.1148025458776649e+01, 0.2303502998543534e+00, 0.1915312145184878e+00, 0.1403919102181368e+01, 0.1591452977947247e+01, 0.2369972908607684e-01, 0.4281642408847453e-01, 0.7535654596375009e+00, 0.7770967120400667e+00, 0.1283811140869274e-03, 0.1684766835109566e-03, 0.1651865724265327e+01, 0.5742252947387723e+00, 0.3101568824672333e+00, 0.1077768810640642e+01, 0.2115252128860193e+00, 0.4753686674121431e+00, 0.9043805314343908e-01, 0.4631342794110950e-01, 0.4338965075606848e+00, 0.4130536065801048e+00, 0.1004386157577793e+00, 0.1112809541240190e+00, 0.6963229589638840e+00, 0.3916210545154161e+00, 0.1741934301847783e+00, 0.4051405202062471e+00, 0.1786266232314551e+00, 0.2993896652859737e+00, 0.1200482645536405e+00, 0.5343038813025977e-01, 0.2855738476449996e+00, 0.4013412006241577e+00, 0.6203611145567293e-02, 0.4280911574482486e-02, 0.4396785394072695e+00, 0.4138345474578556e+00, 0.6639071087521134e-01, 0.3204760309498118e-01, 0.9034140763752896e+00, 0.9108503591145208e+00, 0.7570790092830114e-01, 0.5895442813598011e-01, 0.1856765259836430e+01, 0.1160143275984235e+01, 0.2157144528657351e+00, 0.7723298847804961e+00, 0.8548780458707995e+00, 0.5186010667443678e+00, 0.1562335771835849e+00, 0.5519914319921668e+00, 0.1052848766285511e+00, 0.2419614642931688e+00, 0.1757806338135458e+00, 0.1955698951896722e-01, 0.7935225143382671e+00, 0.9463134562095434e+00, 0.2023962522401480e-02, 0.2298969194226957e-01, 0.9500696544845319e-01, 0.6985999785188474e-01, 0.1129463646814219e-01, 0.2717093011896993e-01, 0.2819864744791555e-01, 0.3949327947122574e-01, 0.9786381594178541e-10, 0.4444831994804014e-08, 0.6208618935176568e-03, 0.6208619913814727e-03, 0.1700670967075909e-47, 0.2788519153273688e-22}, + tauOut: 0.0000000000000000, + ttypeOut: -7, + gOut: 0.0000000000000000, + }, + { + z: []float64{0.2395955612114145e+01, 0.2466725997761544e+01, 0.7077038564739886e-01, 0.5393549207373363e-01, 0.1879940024111348e+01, 0.1842303011581631e+01, 0.1629847954401656e-01, 0.1170659574821484e-01, 0.1323258193751120e+01, 0.1541901897857258e+01, 0.2303502998543534e+00, 0.2097365510789915e+00, 0.1403919102181368e+01, 0.1217882280188454e+01, 0.2369972908607684e-01, 0.1466422291592108e-01, 0.7535654596375009e+00, 0.7390296178356669e+00, 0.1283811140869274e-03, 0.2869551596920546e-03, 0.1651865724265327e+01, 0.1961735651572868e+01, 0.3101568824672333e+00, 0.3344283443049241e-01, 0.2115252128860193e+00, 0.2685204315989660e+00, 0.9043805314343908e-01, 0.1461369444993732e+00, 0.4338965075606848e+00, 0.3881981788190908e+00, 0.1004386157577793e+00, 0.1801598202532681e+00, 0.6963229589638840e+00, 0.6903565688953941e+00, 0.1741934301847783e+00, 0.4507175802324546e-01, 0.1786266232314551e+00, 0.2536031297618502e+00, 0.1200482645536405e+00, 0.1351822623162481e+00, 0.2855738476449996e+00, 0.1565951964743188e+00, 0.6203611145567293e-02, 0.1741812487831326e-01, 0.4396785394072695e+00, 0.4886511254041676e+00, 0.6639071087521134e-01, 0.1227425859208231e+00, 0.9034140763752896e+00, 0.8563793913827676e+00, 0.7570790092830114e-01, 0.1641466407918003e+00, 0.1856765259836430e+01, 0.1908333071910365e+01, 0.2157144528657351e+00, 0.9663383852973971e-01, 0.8548780458707995e+00, 0.9144777845246447e+00, 0.1562335771835849e+00, 0.1798735100772441e-01, 0.1052848766285511e+00, 0.2630781594343725e+00, 0.1757806338135458e+00, 0.5302070335887964e+00, 0.7935225143382671e+00, 0.2653394432718723e+00, 0.2023962522401480e-02, 0.7246963929058098e-03, 0.9500696544845319e-01, 0.1055769055236896e+00, 0.1129463646814219e-01, 0.3016696409481782e-02, 0.2819864744791555e-01, 0.2518195113629758e-01, 0.9786381594178541e-10, 0.2412835834031154e-11, 0.6208618935176568e-03, 0.6208618911048210e-03, 0.1700670967075909e-47, 0.2869551596920546e-03, 0.7811996215926567e-04}, + i0: 1, + n0: 20, + pp: 1, + n0in: 20, + dmin: 6.2086189110482101e-004, + dmin1: 2.5181951038433764e-002, + dmin2: 8.7297525620826724e-002, + dn: 6.2086189110482101e-004, + dn1: 2.5181951038433764e-002, + dn2: 9.4282269055547374e-002, + tau: 0.0000000000000000, + ttype: -7, + g: 0.0000000000000000, + zOut: []float64{0.2395955612114145e+01, 0.2466725997761544e+01, 0.7077038564739886e-01, 0.5393549207373363e-01, 0.1879940024111348e+01, 0.1842303011581631e+01, 0.1629847954401656e-01, 0.1170659574821484e-01, 0.1323258193751120e+01, 0.1541901897857258e+01, 0.2303502998543534e+00, 0.2097365510789915e+00, 0.1403919102181368e+01, 0.1217882280188454e+01, 0.2369972908607684e-01, 0.1466422291592108e-01, 0.7535654596375009e+00, 0.7390296178356669e+00, 0.1283811140869274e-03, 0.2869551596920546e-03, 0.1651865724265327e+01, 0.1961735651572868e+01, 0.3101568824672333e+00, 0.3344283443049241e-01, 0.2115252128860193e+00, 0.2685204315989660e+00, 0.9043805314343908e-01, 0.1461369444993732e+00, 0.4338965075606848e+00, 0.3881981788190908e+00, 0.1004386157577793e+00, 0.1801598202532681e+00, 0.6963229589638840e+00, 0.6903565688953941e+00, 0.1741934301847783e+00, 0.4507175802324546e-01, 0.1786266232314551e+00, 0.2536031297618502e+00, 0.1200482645536405e+00, 0.1351822623162481e+00, 0.2855738476449996e+00, 0.1565951964743188e+00, 0.6203611145567293e-02, 0.1741812487831326e-01, 0.4396785394072695e+00, 0.4886511254041676e+00, 0.6639071087521134e-01, 0.1227425859208231e+00, 0.9034140763752896e+00, 0.8563793913827676e+00, 0.7570790092830114e-01, 0.1641466407918003e+00, 0.1856765259836430e+01, 0.1908333071910365e+01, 0.2157144528657351e+00, 0.9663383852973971e-01, 0.8548780458707995e+00, 0.9144777845246447e+00, 0.1562335771835849e+00, 0.1798735100772441e-01, 0.1052848766285511e+00, 0.2630781594343725e+00, 0.1757806338135458e+00, 0.5302070335887964e+00, 0.7935225143382671e+00, 0.2653394432718723e+00, 0.2023962522401480e-02, 0.7246963929058098e-03, 0.9500696544845319e-01, 0.1055769055236896e+00, 0.1129463646814219e-01, 0.3016696409481782e-02, 0.2819864744791555e-01, 0.2518195113629758e-01, 0.9786381594178541e-10, 0.2412835834031154e-11, 0.6208618935176568e-03, 0.6208618911048210e-03, 0.1700670967075909e-47, 0.2869551596920546e-03, 0.7811996215926567e-04}, + tauOut: 6.2086189103875732e-004, + ttypeOut: -2, + gOut: 0.0000000000000000, + }, + { + z: []float64{0.2520040627944239e+01, 0.2466725997761544e+01, 0.3943012599746679e-01, 0.5393549207373363e-01, 0.1813958619441340e+01, 0.1842303011581631e+01, 0.9950845630193767e-02, 0.1170659574821484e-01, 0.1741066741415017e+01, 0.1541901897857258e+01, 0.1467114516582776e+00, 0.2097365510789915e+00, 0.1085214189555058e+01, 0.1217882280188454e+01, 0.9986318978978259e-02, 0.1466422291592108e-01, 0.7287093921253419e+00, 0.7390296178356669e+00, 0.7725029665513934e-03, 0.2869551596920546e-03, 0.1993785121145770e+01, 0.1961735651572868e+01, 0.4504038193447841e-02, 0.3344283443049241e-01, 0.4095324760138526e+00, 0.2685204315989660e+00, 0.1385240464077977e+00, 0.1461369444993732e+00, 0.4292130907735224e+00, 0.3881981788190908e+00, 0.2897733504323247e+00, 0.1801598202532681e+00, 0.4450341145952761e+00, 0.6903565688953941e+00, 0.2568418582687495e-01, 0.4507175802324546e-01, 0.3624803443601846e+00, 0.2536031297618502e+00, 0.5840011260368079e-01, 0.1351822623162481e+00, 0.1149923468579125e+00, 0.1565951964743188e+00, 0.7401698075381481e-01, 0.1741812487831326e-01, 0.5367558686801371e+00, 0.4886511254041676e+00, 0.1958324578473516e+00, 0.1227425859208231e+00, 0.8240727124361776e+00, 0.8563793913827676e+00, 0.3801199318200257e+00, 0.1641466407918003e+00, 0.1624226116729040e+01, 0.1908333071910365e+01, 0.5440714051978934e-01, 0.9663383852973971e-01, 0.8774371331215411e+00, 0.9144777845246447e+00, 0.5393069221241477e-02, 0.1798735100772441e-01, 0.7872712619108886e+00, 0.2630781594343725e+00, 0.1786993199393658e+00, 0.5302070335887964e+00, 0.8674395783437358e-01, 0.2653394432718723e+00, 0.8820349511059155e-03, 0.7246963929058098e-03, 0.1070907050910267e+00, 0.1055769055236896e+00, 0.7093640994523618e-03, 0.3016696409481782e-02, 0.2385172514821930e-01, 0.2518195113629758e-01, 0.6280626703238275e-13, 0.2412835834031154e-11, 0.3257377007015450e-14, 0.6208618911048210e-03, 0.7725029665513934e-03, 0.2869551596920546e-03}, + i0: 1, + n0: 20, + pp: 0, + n0in: 20, + dmin: 3.2573770070154495e-015, + dmin1: 2.3851725145806461e-002, + dmin2: 8.6019261441467765e-002, + dn: 3.2573770070154495e-015, + dn1: 2.3851725145806461e-002, + dn2: 0.10407400868154487, + tau: 6.2086189103875732e-004, + ttype: -2, + g: 0.0000000000000000, + zOut: []float64{0.2520040627944239e+01, 0.2466725997761544e+01, 0.3943012599746679e-01, 0.5393549207373363e-01, 0.1813958619441340e+01, 0.1842303011581631e+01, 0.9950845630193767e-02, 0.1170659574821484e-01, 0.1741066741415017e+01, 0.1541901897857258e+01, 0.1467114516582776e+00, 0.2097365510789915e+00, 0.1085214189555058e+01, 0.1217882280188454e+01, 0.9986318978978259e-02, 0.1466422291592108e-01, 0.7287093921253419e+00, 0.7390296178356669e+00, 0.7725029665513934e-03, 0.2869551596920546e-03, 0.1993785121145770e+01, 0.1961735651572868e+01, 0.4504038193447841e-02, 0.3344283443049241e-01, 0.4095324760138526e+00, 0.2685204315989660e+00, 0.1385240464077977e+00, 0.1461369444993732e+00, 0.4292130907735224e+00, 0.3881981788190908e+00, 0.2897733504323247e+00, 0.1801598202532681e+00, 0.4450341145952761e+00, 0.6903565688953941e+00, 0.2568418582687495e-01, 0.4507175802324546e-01, 0.3624803443601846e+00, 0.2536031297618502e+00, 0.5840011260368079e-01, 0.1351822623162481e+00, 0.1149923468579125e+00, 0.1565951964743188e+00, 0.7401698075381481e-01, 0.1741812487831326e-01, 0.5367558686801371e+00, 0.4886511254041676e+00, 0.1958324578473516e+00, 0.1227425859208231e+00, 0.8240727124361776e+00, 0.8563793913827676e+00, 0.3801199318200257e+00, 0.1641466407918003e+00, 0.1624226116729040e+01, 0.1908333071910365e+01, 0.5440714051978934e-01, 0.9663383852973971e-01, 0.8774371331215411e+00, 0.9144777845246447e+00, 0.5393069221241477e-02, 0.1798735100772441e-01, 0.7872712619108886e+00, 0.2630781594343725e+00, 0.1786993199393658e+00, 0.5302070335887964e+00, 0.8674395783437358e-01, 0.2653394432718723e+00, 0.8820349511059155e-03, 0.7246963929058098e-03, 0.1070907050910267e+00, 0.1055769055236896e+00, 0.7093640994523618e-03, 0.3016696409481782e-02, 0.2385172514821930e-01, 0.2518195113629758e-01, 0.6280626703238275e-13, 0.2412835834031154e-11, 0.3257377007015450e-14, 0.6208618911048210e-03, 0.7725029665513934e-03, 0.2869551596920546e-03}, + tauOut: 3.2573770070067200e-015, + ttypeOut: -2, + gOut: 0.0000000000000000, + }, + { + z: []float64{0.2520040627944239e+01, 0.2559470753941703e+01, 0.3943012599746679e-01, 0.2794508075882943e-01, 0.1813958619441340e+01, 0.1795964384312701e+01, 0.9950845630193767e-02, 0.9646675917972324e-02, 0.1741066741415017e+01, 0.1878131517155319e+01, 0.1467114516582776e+00, 0.8477220453173254e-01, 0.1085214189555058e+01, 0.1010428304002300e+01, 0.9986318978978259e-02, 0.7202019583097946e-02, 0.7287093921253419e+00, 0.7222798755087921e+00, 0.7725029665513934e-03, 0.2132421202606784e-02, 0.1993785121145770e+01, 0.1996156738136608e+01, 0.4504038193447841e-02, 0.9240506410060376e-03, 0.4095324760138526e+00, 0.5471324717806409e+00, 0.1385240464077977e+00, 0.1086689918286978e+00, 0.4292130907735224e+00, 0.6103174493771462e+00, 0.2897733504323247e+00, 0.2112982785836522e+00, 0.4450341145952761e+00, 0.2594200218384955e+00, 0.2568418582687495e-01, 0.3588779484774172e-01, 0.3624803443601846e+00, 0.3849926621161204e+00, 0.5840011260368079e-01, 0.1744336104525046e-01, 0.1149923468579125e+00, 0.1715659665664736e+00, 0.7401698075381481e-01, 0.2315671901408358e+00, 0.5367558686801371e+00, 0.5010211363866497e+00, 0.1958324578473516e+00, 0.3221025481782661e+00, 0.8240727124361776e+00, 0.8820900960779340e+00, 0.3801199318200257e+00, 0.6999293195746292e+00, 0.1624226116729040e+01, 0.9787039376741966e+00, 0.5440714051978934e-01, 0.4877761656142095e-01, 0.8774371331215411e+00, 0.8340525857813585e+00, 0.5393069221241477e-02, 0.5090576402208483e-02, 0.7872712619108886e+00, 0.9608800054480426e+00, 0.1786993199393658e+00, 0.1613217694817542e-01, 0.8674395783437358e-01, 0.7149381583730083e-01, 0.8820349511059155e-03, 0.1321201613351008e-02, 0.1070907050910267e+00, 0.1064788675771248e+00, 0.7093640994523618e-03, 0.1589006148839502e-03, 0.2385172514821930e-01, 0.2369282453339490e-01, 0.6280626703238275e-13, 0.8634837515442557e-26, 0.3257377007015450e-14, 0.9466330862652142e-28, 0.7725029665513934e-03, 0.9240506410060376e-03, 0.7811996215926567e-04}, + i0: 1, + n0: 20, + pp: 1, + n0in: 20, + dmin: 9.4663308626521417e-029, + dmin1: 2.3692824533332088e-002, + dmin2: 7.0611780886194908e-002, + dn: 9.4663308626521417e-029, + dn1: 2.3692824533332088e-002, + dn2: 0.10576950347767239, + tau: 3.2573770070067200e-015, + ttype: -2, + g: 0.0000000000000000, + zOut: []float64{0.2520040627944239e+01, 0.2559470753941703e+01, 0.3943012599746679e-01, 0.2794508075882943e-01, 0.1813958619441340e+01, 0.1795964384312701e+01, 0.9950845630193767e-02, 0.9646675917972324e-02, 0.1741066741415017e+01, 0.1878131517155319e+01, 0.1467114516582776e+00, 0.8477220453173254e-01, 0.1085214189555058e+01, 0.1010428304002300e+01, 0.9986318978978259e-02, 0.7202019583097946e-02, 0.7287093921253419e+00, 0.7222798755087921e+00, 0.7725029665513934e-03, 0.2132421202606784e-02, 0.1993785121145770e+01, 0.1996156738136608e+01, 0.4504038193447841e-02, 0.9240506410060376e-03, 0.4095324760138526e+00, 0.5471324717806409e+00, 0.1385240464077977e+00, 0.1086689918286978e+00, 0.4292130907735224e+00, 0.6103174493771462e+00, 0.2897733504323247e+00, 0.2112982785836522e+00, 0.4450341145952761e+00, 0.2594200218384955e+00, 0.2568418582687495e-01, 0.3588779484774172e-01, 0.3624803443601846e+00, 0.3849926621161204e+00, 0.5840011260368079e-01, 0.1744336104525046e-01, 0.1149923468579125e+00, 0.1715659665664736e+00, 0.7401698075381481e-01, 0.2315671901408358e+00, 0.5367558686801371e+00, 0.5010211363866497e+00, 0.1958324578473516e+00, 0.3221025481782661e+00, 0.8240727124361776e+00, 0.8820900960779340e+00, 0.3801199318200257e+00, 0.6999293195746292e+00, 0.1624226116729040e+01, 0.9787039376741966e+00, 0.5440714051978934e-01, 0.4877761656142095e-01, 0.8774371331215411e+00, 0.8340525857813585e+00, 0.5393069221241477e-02, 0.5090576402208483e-02, 0.7872712619108886e+00, 0.9608800054480426e+00, 0.1786993199393658e+00, 0.1613217694817542e-01, 0.8674395783437358e-01, 0.7149381583730083e-01, 0.8820349511059155e-03, 0.1321201613351008e-02, 0.1070907050910267e+00, 0.1064788675771248e+00, 0.7093640994523618e-03, 0.1589006148839502e-03, 0.2385172514821930e-01, 0.2369282453339490e-01, 0.6280626703238275e-13, 0.8634837515442557e-26, 0.3257377007015450e-14, 0.9466330862652142e-28, 0.7725029665513934e-03, 0.9240506410060376e-03, 0.7811996215926567e-04}, + tauOut: 9.4663308626521417e-029, + ttypeOut: -2, + gOut: 0.0000000000000000, + }, + { + z: []float64{0.2587415834700532e+01, 0.2559470753941703e+01, 0.1939710234687058e-01, 0.2794508075882943e-01, 0.1786213957883803e+01, 0.1795964384312701e+01, 0.1014308840067055e-01, 0.9646675917972324e-02, 0.1952760633286381e+01, 0.1878131517155319e+01, 0.4386417535844126e-01, 0.8477220453173254e-01, 0.9737661482269571e+00, 0.1010428304002300e+01, 0.5342015449359675e-02, 0.7202019583097946e-02, 0.7190702812620393e+00, 0.7222798755087921e+00, 0.5919653562455767e-02, 0.2132421202606784e-02, 0.1991161135215158e+01, 0.1996156738136608e+01, 0.2539111990097616e-03, 0.9240506410060376e-03, 0.6555475524103290e+00, 0.5471324717806409e+00, 0.1011712753337584e+00, 0.1086689918286978e+00, 0.7204444526270400e+00, 0.6103174493771462e+00, 0.7608498315828412e-01, 0.2112982785836522e+00, 0.2192228335279531e+00, 0.2594200218384955e+00, 0.6302508481237896e-01, 0.3588779484774172e-01, 0.3394109383489919e+00, 0.3849926621161204e+00, 0.8817297145618809e-02, 0.1744336104525046e-01, 0.3943158595616905e+00, 0.1715659665664736e+00, 0.2942312715577539e+00, 0.2315671901408358e+00, 0.5288924130071619e+00, 0.5010211363866497e+00, 0.5372046576619481e+00, 0.3221025481782661e+00, 0.1044814757990615e+01, 0.8820900960779340e+00, 0.6556411803358774e+00, 0.6999293195746292e+00, 0.3718403738997403e+00, 0.9787039376741966e+00, 0.1094101127175453e+00, 0.4877761656142095e-01, 0.7297330494660218e+00, 0.8340525857813585e+00, 0.6703044469024726e-02, 0.5090576402208483e-02, 0.9703091379271934e+00, 0.9608800054480426e+00, 0.1188642714683105e-02, 0.1613217694817542e-01, 0.7162637473596872e-01, 0.7149381583730083e-01, 0.1964081696850700e-02, 0.1321201613351008e-02, 0.1046736864951580e+00, 0.1064788675771248e+00, 0.3596705640885345e-04, 0.1589006148839502e-03, 0.2365685747698604e-01, 0.2369282453339490e-01, 0.3455244592226135e-52, 0.8634837515442557e-26}, + i0: 1, + n0: 19, + pp: 0, + n0in: 20, + dmin: 9.4663308626521417e-029, + dmin1: 2.3656857476986041e-002, + dmin2: 7.0305173122617720e-002, + dn: 9.4663308626521417e-029, + dn1: 2.3656857476986041e-002, + dn2: 0.10451478588027406, + tau: 0.0000000000000000, + ttype: -2, + g: 0.0000000000000000, + zOut: []float64{0.2587415834700532e+01, 0.2559470753941703e+01, 0.1939710234687058e-01, 0.2794508075882943e-01, 0.1786213957883803e+01, 0.1795964384312701e+01, 0.1014308840067055e-01, 0.9646675917972324e-02, 0.1952760633286381e+01, 0.1878131517155319e+01, 0.4386417535844126e-01, 0.8477220453173254e-01, 0.9737661482269571e+00, 0.1010428304002300e+01, 0.5342015449359675e-02, 0.7202019583097946e-02, 0.7190702812620393e+00, 0.7222798755087921e+00, 0.5919653562455767e-02, 0.2132421202606784e-02, 0.1991161135215158e+01, 0.1996156738136608e+01, 0.2539111990097616e-03, 0.9240506410060376e-03, 0.6555475524103290e+00, 0.5471324717806409e+00, 0.1011712753337584e+00, 0.1086689918286978e+00, 0.7204444526270400e+00, 0.6103174493771462e+00, 0.7608498315828412e-01, 0.2112982785836522e+00, 0.2192228335279531e+00, 0.2594200218384955e+00, 0.6302508481237896e-01, 0.3588779484774172e-01, 0.3394109383489919e+00, 0.3849926621161204e+00, 0.8817297145618809e-02, 0.1744336104525046e-01, 0.3943158595616905e+00, 0.1715659665664736e+00, 0.2942312715577539e+00, 0.2315671901408358e+00, 0.5288924130071619e+00, 0.5010211363866497e+00, 0.5372046576619481e+00, 0.3221025481782661e+00, 0.1044814757990615e+01, 0.8820900960779340e+00, 0.6556411803358774e+00, 0.6999293195746292e+00, 0.3718403738997403e+00, 0.9787039376741966e+00, 0.1094101127175453e+00, 0.4877761656142095e-01, 0.7297330494660218e+00, 0.8340525857813585e+00, 0.6703044469024726e-02, 0.5090576402208483e-02, 0.9703091379271934e+00, 0.9608800054480426e+00, 0.1188642714683105e-02, 0.1613217694817542e-01, 0.7162637473596872e-01, 0.7149381583730083e-01, 0.1964081696850700e-02, 0.1321201613351008e-02, 0.1046736864951580e+00, 0.1064788675771248e+00, 0.3596705640885345e-04, 0.1589006148839502e-03, 0.2365685747698604e-01, 0.2369282453339490e-01, 0.3455244592226135e-52, 0.8634837515442557e-26}, + tauOut: 1.1828428738493020e-002, + ttypeOut: -9, + gOut: 0.0000000000000000, + }, + { + z: []float64{0.2587415834700532e+01, 0.2594984508308910e+01, 0.1939710234687058e-01, 0.1335166928493912e-01, 0.1786213957883803e+01, 0.1771176948261041e+01, 0.1014308840067055e-01, 0.1118297285215907e-01, 0.1952760633286381e+01, 0.1973613407054170e+01, 0.4386417535844126e-01, 0.2164225725832272e-01, 0.9737661482269571e+00, 0.9456374776795010e+00, 0.5342015449359675e-02, 0.4062111160297227e-02, 0.7190702812620393e+00, 0.7090993949257048e+00, 0.5919653562455767e-02, 0.1662247097070905e-01, 0.1991161135215158e+01, 0.1962964146704966e+01, 0.2539111990097616e-03, 0.8479567256479229e-04, 0.6555475524103290e+00, 0.7448056033330296e+00, 0.1011712753337584e+00, 0.9786215859981671e-01, 0.7204444526270400e+00, 0.6868388484470145e+00, 0.7608498315828412e-01, 0.2428454015756268e-01, 0.2192228335279531e+00, 0.2461349494442763e+00, 0.6302508481237896e-01, 0.8690924724014965e-01, 0.3394109383489919e+00, 0.2494905595159681e+00, 0.8817297145618809e-02, 0.1393559784278329e-01, 0.3943158595616905e+00, 0.6627831045381681e+00, 0.2942312715577539e+00, 0.2347927793131975e+00, 0.5288924130071619e+00, 0.8194758626174196e+00, 0.5372046576619481e+00, 0.6849248159595133e+00, 0.1044814757990615e+01, 0.1003702693628486e+01, 0.6556411803358774e+00, 0.2428944977310168e+00, 0.3718403738997403e+00, 0.2265275601477758e+00, 0.1094101127175453e+00, 0.3524523689025369e+00, 0.7297330494660218e+00, 0.3721552962940165e+00, 0.6703044469024726e-02, 0.1747664312451059e-01, 0.9703091379271934e+00, 0.9421927087788731e+00, 0.1188642714683105e-02, 0.9036173567869616e-04, 0.7162637473596872e-01, 0.6167166595864771e-01, 0.1964081696850700e-02, 0.3333583884775863e-02, 0.1046736864951580e+00, 0.8954764092829798e-01, 0.3596705640885345e-04, 0.9501841907954501e-05, 0.2365685747698604e-01, 0.1181892689658507e-01, 0.3455244592226135e-52, 0.8479567256479229e-04, 0.6989818532012803e-03}, + i0: 1, + n0: 19, + pp: 1, + n0in: 19, + dmin: 1.1818926896585069e-002, + dmin1: 5.9707584261797009e-002, + dmin2: 5.9707584261797009e-002, + dn: 1.1818926896585069e-002, + dn1: 8.9511673871889130e-002, + dn2: 5.9707584261797009e-002, + tau: 1.1828428738493020e-002, + ttype: -9, + g: 0.0000000000000000, + zOut: []float64{0.2587415834700532e+01, 0.2594984508308910e+01, 0.1939710234687058e-01, 0.1335166928493912e-01, 0.1786213957883803e+01, 0.1771176948261041e+01, 0.1014308840067055e-01, 0.1118297285215907e-01, 0.1952760633286381e+01, 0.1973613407054170e+01, 0.4386417535844126e-01, 0.2164225725832272e-01, 0.9737661482269571e+00, 0.9456374776795010e+00, 0.5342015449359675e-02, 0.4062111160297227e-02, 0.7190702812620393e+00, 0.7090993949257048e+00, 0.5919653562455767e-02, 0.1662247097070905e-01, 0.1991161135215158e+01, 0.1962964146704966e+01, 0.2539111990097616e-03, 0.8479567256479229e-04, 0.6555475524103290e+00, 0.7448056033330296e+00, 0.1011712753337584e+00, 0.9786215859981671e-01, 0.7204444526270400e+00, 0.6868388484470145e+00, 0.7608498315828412e-01, 0.2428454015756268e-01, 0.2192228335279531e+00, 0.2461349494442763e+00, 0.6302508481237896e-01, 0.8690924724014965e-01, 0.3394109383489919e+00, 0.2494905595159681e+00, 0.8817297145618809e-02, 0.1393559784278329e-01, 0.3943158595616905e+00, 0.6627831045381681e+00, 0.2942312715577539e+00, 0.2347927793131975e+00, 0.5288924130071619e+00, 0.8194758626174196e+00, 0.5372046576619481e+00, 0.6849248159595133e+00, 0.1044814757990615e+01, 0.1003702693628486e+01, 0.6556411803358774e+00, 0.2428944977310168e+00, 0.3718403738997403e+00, 0.2265275601477758e+00, 0.1094101127175453e+00, 0.3524523689025369e+00, 0.7297330494660218e+00, 0.3721552962940165e+00, 0.6703044469024726e-02, 0.1747664312451059e-01, 0.9703091379271934e+00, 0.9421927087788731e+00, 0.1188642714683105e-02, 0.9036173567869616e-04, 0.7162637473596872e-01, 0.6167166595864771e-01, 0.1964081696850700e-02, 0.3333583884775863e-02, 0.1046736864951580e+00, 0.8954764092829798e-01, 0.3596705640885345e-04, 0.9501841907954501e-05, 0.2365685747698604e-01, 0.1181892689658507e-01, 0.3455244592226135e-52, 0.8479567256479229e-04, 0.6989818532012803e-03}, + tauOut: 1.1689473904977585e-002, + ttypeOut: -4, + gOut: 0.0000000000000000, + }, + { + z: []float64{0.2596646703688871e+01, 0.2594984508308910e+01, 0.9107195378059658e-02, 0.1335166928493912e-01, 0.1761563251830163e+01, 0.1771176948261041e+01, 0.1252913577120413e-01, 0.1118297285215907e-01, 0.1971037054636311e+01, 0.1973613407054170e+01, 0.1038322923301279e-01, 0.2164225725832272e-01, 0.9276268857018078e+00, 0.9456374776795010e+00, 0.3105171497598932e-02, 0.4062111160297227e-02, 0.7109272204938374e+00, 0.7090993949257048e+00, 0.4589684232723624e-01, 0.1662247097070905e-01, 0.1905462626145317e+01, 0.1962964146704966e+01, 0.3314486004504485e-04, 0.8479567256479229e-04, 0.8309451431678238e+00, 0.7448056033330296e+00, 0.8089045693556918e-01, 0.9786215859981671e-01, 0.6185434577640304e+00, 0.6868388484470145e+00, 0.9663466631053550e-02, 0.2428454015756268e-01, 0.3116912561483949e+00, 0.2461349494442763e+00, 0.6956575230565126e-01, 0.8690924724014965e-01, 0.1821709311481225e+00, 0.2494905595159681e+00, 0.5070116699532772e-01, 0.1393559784278329e-01, 0.8351852429510604e+00, 0.6627831045381681e+00, 0.2303764547900405e+00, 0.2347927793131975e+00, 0.1262334749881915e+01, 0.8194758626174196e+00, 0.5445947541061251e+00, 0.6849248159595133e+00, 0.6903129633483998e+00, 0.1003702693628486e+01, 0.7970630839299049e-01, 0.2428944977310168e+00, 0.4875841467523446e+00, 0.2265275601477758e+00, 0.2690141110044630e+00, 0.3524523689025369e+00, 0.1089283545090865e+00, 0.3721552962940165e+00, 0.1511669372043137e+00, 0.1747664312451059e-01, 0.7794266594052606e+00, 0.9421927087788731e+00, 0.7149818024536710e-05, 0.9036173567869616e-04, 0.5330862612042146e-01, 0.6167166595864771e-01, 0.5599742376476567e-02, 0.3333583884775863e-02, 0.7226792648875177e-01, 0.8954764092829798e-01, 0.1553961492315709e-05, 0.9501841907954501e-05, 0.1278990301151681e-03, 0.1181892689658507e-01, 0.7149818024536710e-05, 0.8479567256479229e-04}, + i0: 1, + n0: 19, + pp: 0, + n0in: 19, + dmin: 1.2789903011516807e-004, + dmin1: 4.9975042235645591e-002, + dmin2: 4.9975042235645591e-002, + dn: 1.2789903011516807e-004, + dn1: 7.2258424646843816e-002, + dn2: 4.9975042235645591e-002, + tau: 1.1689473904977585e-002, + ttype: -4, + g: 0.0000000000000000, + zOut: []float64{0.2596646703688871e+01, 0.2594984508308910e+01, 0.9107195378059658e-02, 0.1335166928493912e-01, 0.1761563251830163e+01, 0.1771176948261041e+01, 0.1252913577120413e-01, 0.1118297285215907e-01, 0.1971037054636311e+01, 0.1973613407054170e+01, 0.1038322923301279e-01, 0.2164225725832272e-01, 0.9276268857018078e+00, 0.9456374776795010e+00, 0.3105171497598932e-02, 0.4062111160297227e-02, 0.7109272204938374e+00, 0.7090993949257048e+00, 0.4589684232723624e-01, 0.1662247097070905e-01, 0.1905462626145317e+01, 0.1962964146704966e+01, 0.3314486004504485e-04, 0.8479567256479229e-04, 0.8309451431678238e+00, 0.7448056033330296e+00, 0.8089045693556918e-01, 0.9786215859981671e-01, 0.6185434577640304e+00, 0.6868388484470145e+00, 0.9663466631053550e-02, 0.2428454015756268e-01, 0.3116912561483949e+00, 0.2461349494442763e+00, 0.6956575230565126e-01, 0.8690924724014965e-01, 0.1821709311481225e+00, 0.2494905595159681e+00, 0.5070116699532772e-01, 0.1393559784278329e-01, 0.8351852429510604e+00, 0.6627831045381681e+00, 0.2303764547900405e+00, 0.2347927793131975e+00, 0.1262334749881915e+01, 0.8194758626174196e+00, 0.5445947541061251e+00, 0.6849248159595133e+00, 0.6903129633483998e+00, 0.1003702693628486e+01, 0.7970630839299049e-01, 0.2428944977310168e+00, 0.4875841467523446e+00, 0.2265275601477758e+00, 0.2690141110044630e+00, 0.3524523689025369e+00, 0.1089283545090865e+00, 0.3721552962940165e+00, 0.1511669372043137e+00, 0.1747664312451059e-01, 0.7794266594052606e+00, 0.9421927087788731e+00, 0.7149818024536710e-05, 0.9036173567869616e-04, 0.5330862612042146e-01, 0.6167166595864771e-01, 0.5599742376476567e-02, 0.3333583884775863e-02, 0.7226792648875177e-01, 0.8954764092829798e-01, 0.1553961492315709e-05, 0.9501841907954501e-05, 0.1278990301151681e-03, 0.1181892689658507e-01, 0.7149818024536710e-05, 0.8479567256479229e-04}, + tauOut: 1.1689473904977585e-002, + ttypeOut: -4, + gOut: 0.0000000000000000, + }, + { + z: []float64{0.2596646703688871e+01, 0.2605626003858251e+01, 0.9107195378059658e-02, 0.6157023564192275e-02, 0.1761563251830163e+01, 0.1767807468828494e+01, 0.1252913577120413e-01, 0.1396950250695455e-01, 0.1971037054636311e+01, 0.1967322886153689e+01, 0.1038322923301279e-01, 0.4895872794820515e-02, 0.9276268857018078e+00, 0.9257082891959054e+00, 0.3105171497598932e-02, 0.2384715539127593e-02, 0.7109272204938374e+00, 0.7543114520732653e+00, 0.4589684232723624e-01, 0.1159397984377132e+00, 0.1905462626145317e+01, 0.1789428077358968e+01, 0.3314486004504485e-04, 0.1539126429493388e-04, 0.8309451431678238e+00, 0.9116923136304173e+00, 0.8089045693556918e-01, 0.5488064578914756e-01, 0.6185434577640304e+00, 0.5731983833972557e+00, 0.9663466631053550e-02, 0.5254756712901782e-02, 0.3116912561483949e+00, 0.3758743565324636e+00, 0.6956575230565126e-01, 0.3371567560620648e-01, 0.1821709311481225e+00, 0.1990285273285630e+00, 0.5070116699532772e-01, 0.2127577741907859e+00, 0.8351852429510604e+00, 0.8526760283416343e+00, 0.2303764547900405e+00, 0.3410582621885915e+00, 0.1262334749881915e+01, 0.1465743346590768e+01, 0.5445947541061251e+00, 0.2564847518533230e+00, 0.6903129633483998e+00, 0.5134066246793865e+00, 0.7970630839299049e-01, 0.7569737222001199e-01, 0.4875841467523446e+00, 0.6807729903281149e+00, 0.2690141110044630e+00, 0.4304410555024735e-01, 0.1089283545090865e+00, 0.2169232909544721e+00, 0.1511669372043137e+00, 0.5431576312495270e+00, 0.7794266594052606e+00, 0.2361482827650774e+00, 0.7149818024536710e-05, 0.1614015445872399e-05, 0.5330862612042146e-01, 0.5877885927277143e-01, 0.5599742376476567e-02, 0.6884818375619963e-02, 0.7226792648875177e-01, 0.6525676686594341e-01, 0.1553961492315709e-05, 0.3045663725752605e-08, 0.1278990301151681e-03, 0.7757707209639971e-09, 0.7149818024536710e-05, 0.1614015445872399e-05, 0.6989818532012803e-03}, + i0: 1, + n0: 19, + pp: 1, + n0in: 19, + dmin: 7.7577072096399712e-010, + dmin1: 5.3179116896294863e-002, + dmin2: 5.3179116896294863e-002, + dn: 7.7577072096399712e-010, + dn1: 6.5255212904451090e-002, + dn2: 5.3179116896294863e-002, + tau: 1.2789520868072135e-004, + ttype: -15, + g: 0.0000000000000000, + zOut: []float64{0.2596646703688871e+01, 0.2605626003858251e+01, 0.9107195378059658e-02, 0.6157023564192275e-02, 0.1761563251830163e+01, 0.1767807468828494e+01, 0.1252913577120413e-01, 0.1396950250695455e-01, 0.1971037054636311e+01, 0.1967322886153689e+01, 0.1038322923301279e-01, 0.4895872794820515e-02, 0.9276268857018078e+00, 0.9257082891959054e+00, 0.3105171497598932e-02, 0.2384715539127593e-02, 0.7109272204938374e+00, 0.7543114520732653e+00, 0.4589684232723624e-01, 0.1159397984377132e+00, 0.1905462626145317e+01, 0.1789428077358968e+01, 0.3314486004504485e-04, 0.1539126429493388e-04, 0.8309451431678238e+00, 0.9116923136304173e+00, 0.8089045693556918e-01, 0.5488064578914756e-01, 0.6185434577640304e+00, 0.5731983833972557e+00, 0.9663466631053550e-02, 0.5254756712901782e-02, 0.3116912561483949e+00, 0.3758743565324636e+00, 0.6956575230565126e-01, 0.3371567560620648e-01, 0.1821709311481225e+00, 0.1990285273285630e+00, 0.5070116699532772e-01, 0.2127577741907859e+00, 0.8351852429510604e+00, 0.8526760283416343e+00, 0.2303764547900405e+00, 0.3410582621885915e+00, 0.1262334749881915e+01, 0.1465743346590768e+01, 0.5445947541061251e+00, 0.2564847518533230e+00, 0.6903129633483998e+00, 0.5134066246793865e+00, 0.7970630839299049e-01, 0.7569737222001199e-01, 0.4875841467523446e+00, 0.6807729903281149e+00, 0.2690141110044630e+00, 0.4304410555024735e-01, 0.1089283545090865e+00, 0.2169232909544721e+00, 0.1511669372043137e+00, 0.5431576312495270e+00, 0.7794266594052606e+00, 0.2361482827650774e+00, 0.7149818024536710e-05, 0.1614015445872399e-05, 0.5330862612042146e-01, 0.5877885927277143e-01, 0.5599742376476567e-02, 0.6884818375619963e-02, 0.7226792648875177e-01, 0.6525676686594341e-01, 0.1553961492315709e-05, 0.3045663725752605e-08, 0.1278990301151681e-03, 0.7757707209639971e-09, 0.7149818024536710e-05, 0.1614015445872399e-05, 0.6989818532012803e-03}, + tauOut: 1.2789520868072135e-004, + ttypeOut: -4, + gOut: 0.0000000000000000, + }, + { + z: []float64{0.2611783026646672e+01, 0.2605626003858251e+01, 0.4167433562238479e-02, 0.6157023564192275e-02, 0.1777609536997439e+01, 0.1767807468828494e+01, 0.1546038171944878e-01, 0.1396950250695455e-01, 0.1956758376453290e+01, 0.1967322886153689e+01, 0.2316152103168096e-02, 0.4895872794820515e-02, 0.9257768518560943e+00, 0.9257082891959054e+00, 0.1943036529261404e-02, 0.2384715539127593e-02, 0.8683082132059464e+00, 0.7543114520732653e+00, 0.2389312083572061e+00, 0.1159397984377132e+00, 0.1550512259490286e+01, 0.1789428077358968e+01, 0.9049975109102588e-05, 0.1539126429493388e-04, 0.9665639086686850e+00, 0.9116923136304173e+00, 0.3254569838994440e-01, 0.5488064578914756e-01, 0.5459074409444424e+00, 0.5731983833972557e+00, 0.3618064437406363e-02, 0.5254756712901782e-02, 0.4059719669254931e+00, 0.3758743565324636e+00, 0.1652917395900484e-01, 0.3371567560620648e-01, 0.3952571267845734e+00, 0.1990285273285630e+00, 0.4589757947481138e+00, 0.2127577741907859e+00, 0.7347584950063413e+00, 0.8526760283416343e+00, 0.6803648845168142e+00, 0.3410582621885915e+00, 0.1041863213151506e+01, 0.1465743346590768e+01, 0.1263898840735783e+00, 0.2564847518533230e+00, 0.4627141120500496e+00, 0.5134066246793865e+00, 0.1113705527974558e+00, 0.7569737222001199e-01, 0.6124465423051357e+00, 0.6807729903281149e+00, 0.1524585149425051e-01, 0.4304410555024735e-01, 0.7448350699339780e+00, 0.2169232909544721e+00, 0.1722069046798406e+00, 0.5431576312495270e+00, 0.6394299132491200e-01, 0.2361482827650774e+00, 0.1483665133446019e-05, 0.1614015445872399e-05, 0.6566219320748727e-01, 0.5877885927277143e-01, 0.6842308575232983e-02, 0.6884818375619963e-02, 0.5841446056060346e-01, 0.6525676686594341e-01, 0.4044780558898965e-16, 0.3045663725752605e-08, 0.1013559914197710e-18, 0.7757707209639971e-09, 0.1483665133446019e-05, 0.1614015445872399e-05}, + i0: 1, + n0: 19, + pp: 0, + n0in: 19, + dmin: 1.0135599141977102e-019, + dmin1: 5.8414457514939733e-002, + dmin2: 5.8777374831867304e-002, + dn: 1.0135599141977102e-019, + dn1: 5.8414457514939733e-002, + dn2: 5.8777374831867304e-002, + tau: 7.7577068041483555e-010, + ttype: -15, + g: 0.0000000000000000, + zOut: []float64{0.2611783026646672e+01, 0.2605626003858251e+01, 0.4167433562238479e-02, 0.6157023564192275e-02, 0.1777609536997439e+01, 0.1767807468828494e+01, 0.1546038171944878e-01, 0.1396950250695455e-01, 0.1956758376453290e+01, 0.1967322886153689e+01, 0.2316152103168096e-02, 0.4895872794820515e-02, 0.9257768518560943e+00, 0.9257082891959054e+00, 0.1943036529261404e-02, 0.2384715539127593e-02, 0.8683082132059464e+00, 0.7543114520732653e+00, 0.2389312083572061e+00, 0.1159397984377132e+00, 0.1550512259490286e+01, 0.1789428077358968e+01, 0.9049975109102588e-05, 0.1539126429493388e-04, 0.9665639086686850e+00, 0.9116923136304173e+00, 0.3254569838994440e-01, 0.5488064578914756e-01, 0.5459074409444424e+00, 0.5731983833972557e+00, 0.3618064437406363e-02, 0.5254756712901782e-02, 0.4059719669254931e+00, 0.3758743565324636e+00, 0.1652917395900484e-01, 0.3371567560620648e-01, 0.3952571267845734e+00, 0.1990285273285630e+00, 0.4589757947481138e+00, 0.2127577741907859e+00, 0.7347584950063413e+00, 0.8526760283416343e+00, 0.6803648845168142e+00, 0.3410582621885915e+00, 0.1041863213151506e+01, 0.1465743346590768e+01, 0.1263898840735783e+00, 0.2564847518533230e+00, 0.4627141120500496e+00, 0.5134066246793865e+00, 0.1113705527974558e+00, 0.7569737222001199e-01, 0.6124465423051357e+00, 0.6807729903281149e+00, 0.1524585149425051e-01, 0.4304410555024735e-01, 0.7448350699339780e+00, 0.2169232909544721e+00, 0.1722069046798406e+00, 0.5431576312495270e+00, 0.6394299132491200e-01, 0.2361482827650774e+00, 0.1483665133446019e-05, 0.1614015445872399e-05, 0.6566219320748727e-01, 0.5877885927277143e-01, 0.6842308575232983e-02, 0.6884818375619963e-02, 0.5841446056060346e-01, 0.6525676686594341e-01, 0.4044780558898965e-16, 0.3045663725752605e-08, 0.1013559914197710e-18, 0.7757707209639971e-09, 0.1483665133446019e-05, 0.1614015445872399e-05}, + tauOut: 1.0135599141977091e-019, + ttypeOut: -2, + gOut: 0.0000000000000000, + }, + { + z: []float64{0.2611783026646672e+01, 0.2615950460208911e+01, 0.4167433562238479e-02, 0.2831884532112553e-02, 0.1777609536997439e+01, 0.1790238034184775e+01, 0.1546038171944878e-01, 0.1689844079671380e-01, 0.1956758376453290e+01, 0.1942176087759744e+01, 0.2316152103168096e-02, 0.1104039956008399e-02, 0.9257768518560943e+00, 0.9266158484293474e+00, 0.1943036529261404e-02, 0.1820770257466081e-02, 0.8683082132059464e+00, 0.1105418651305687e+01, 0.2389312083572061e+00, 0.3351361651941490e+00, 0.1550512259490286e+01, 0.1215385144271246e+01, 0.9049975109102588e-05, 0.7197207696703830e-05, 0.9665639086686850e+00, 0.9991024098509327e+00, 0.3254569838994440e-01, 0.1778290067827487e-01, 0.5459074409444424e+00, 0.5317426047035739e+00, 0.3618064437406363e-02, 0.2762300261676148e-02, 0.4059719669254931e+00, 0.4197388406228219e+00, 0.1652917395900484e-01, 0.1556509232613395e-01, 0.3952571267845734e+00, 0.8386678292065533e+00, 0.4589757947481138e+00, 0.4021095747913879e+00, 0.7347584950063413e+00, 0.1013013804731768e+01, 0.6803648845168142e+00, 0.6997408538631263e+00, 0.1041863213151506e+01, 0.4685122433619579e+00, 0.1263898840735783e+00, 0.1248257304047288e+00, 0.4627141120500496e+00, 0.4492589344427766e+00, 0.1113705527974558e+00, 0.1518244930621437e+00, 0.6124465423051357e+00, 0.4758679007372426e+00, 0.1524585149425051e-01, 0.2386301922514691e-01, 0.7448350699339780e+00, 0.8931789553886716e+00, 0.1722069046798406e+00, 0.1232835205710967e-01, 0.6394299132491200e-01, 0.5161612293293578e-01, 0.1483665133446019e-05, 0.1887408451311279e-05, 0.6566219320748727e-01, 0.7250261437426894e-01, 0.6842308575232983e-02, 0.5512763475647510e-02, 0.5841446056060346e-01, 0.5290169708495600e-01, 0.4044780558898965e-16, 0.7749519698096867e-34, 0.2434478048112329e-01}, + i0: 1, + n0: 18, + pp: 1, + n0in: 19, + dmin: 1.0135599141977093e-019, + dmin1: 5.1614639267802333e-002, + dmin2: 5.1614639267802333e-002, + dn: 1.0135599141977093e-019, + dn1: 5.2901697084955956e-002, + dn2: 6.5660305799035965e-002, + tau: 0.0000000000000000, + ttype: -2, + g: 0.0000000000000000, + zOut: []float64{0.2611783026646672e+01, 0.2615950460208911e+01, 0.4167433562238479e-02, 0.2831884532112553e-02, 0.1777609536997439e+01, 0.1790238034184775e+01, 0.1546038171944878e-01, 0.1689844079671380e-01, 0.1956758376453290e+01, 0.1942176087759744e+01, 0.2316152103168096e-02, 0.1104039956008399e-02, 0.9257768518560943e+00, 0.9266158484293474e+00, 0.1943036529261404e-02, 0.1820770257466081e-02, 0.8683082132059464e+00, 0.1105418651305687e+01, 0.2389312083572061e+00, 0.3351361651941490e+00, 0.1550512259490286e+01, 0.1215385144271246e+01, 0.9049975109102588e-05, 0.7197207696703830e-05, 0.9665639086686850e+00, 0.9991024098509327e+00, 0.3254569838994440e-01, 0.1778290067827487e-01, 0.5459074409444424e+00, 0.5317426047035739e+00, 0.3618064437406363e-02, 0.2762300261676148e-02, 0.4059719669254931e+00, 0.4197388406228219e+00, 0.1652917395900484e-01, 0.1556509232613395e-01, 0.3952571267845734e+00, 0.8386678292065533e+00, 0.4589757947481138e+00, 0.4021095747913879e+00, 0.7347584950063413e+00, 0.1013013804731768e+01, 0.6803648845168142e+00, 0.6997408538631263e+00, 0.1041863213151506e+01, 0.4685122433619579e+00, 0.1263898840735783e+00, 0.1248257304047288e+00, 0.4627141120500496e+00, 0.4492589344427766e+00, 0.1113705527974558e+00, 0.1518244930621437e+00, 0.6124465423051357e+00, 0.4758679007372426e+00, 0.1524585149425051e-01, 0.2386301922514691e-01, 0.7448350699339780e+00, 0.8931789553886716e+00, 0.1722069046798406e+00, 0.1232835205710967e-01, 0.6394299132491200e-01, 0.5161612293293578e-01, 0.1483665133446019e-05, 0.1887408451311279e-05, 0.6566219320748727e-01, 0.7250261437426894e-01, 0.6842308575232983e-02, 0.5512763475647510e-02, 0.5841446056060346e-01, 0.5290169708495600e-01, 0.4044780558898965e-16, 0.7749519698096867e-34, 0.2434478048112329e-01}, + tauOut: 1.2903659816950583e-002, + ttypeOut: -9, + gOut: 0.0000000000000000, + }, + { + z: []float64{0.2605878684924073e+01, 0.2615950460208911e+01, 0.1945503997226627e-02, 0.2831884532112553e-02, 0.1792287311167312e+01, 0.1790238034184775e+01, 0.1831165540887852e-01, 0.1689844079671380e-01, 0.1912064812489923e+01, 0.1942176087759744e+01, 0.5350346462390188e-03, 0.1104039956008399e-02, 0.9149979242236238e+00, 0.9266158484293474e+00, 0.2199691768758329e-02, 0.1820770257466081e-02, 0.1425451464914127e+01, 0.1105418651305687e+01, 0.2857477273065492e+00, 0.3351361651941490e+00, 0.9167409543554429e+00, 0.1215385144271246e+01, 0.7843816205451699e-05, 0.7197207696703830e-05, 0.1003973806896052e+01, 0.9991024098509327e+00, 0.9418498631040351e-02, 0.1778290067827487e-01, 0.5121827465172590e+00, 0.5317426047035739e+00, 0.2263732461064059e-02, 0.2762300261676148e-02, 0.4201365406709412e+00, 0.4197388406228219e+00, 0.3107071375346623e-01, 0.1556509232613395e-01, 0.1196803030427524e+01, 0.8386678292065533e+00, 0.3403588894097180e+00, 0.4021095747913879e+00, 0.1359492109368225e+01, 0.1013013804731768e+01, 0.2411467892724848e+00, 0.6997408538631263e+00, 0.3392875246772514e+00, 0.4685122433619579e+00, 0.1652848117124708e+00, 0.1248257304047288e+00, 0.4228949559754990e+00, 0.4492589344427766e+00, 0.1708424320817961e+00, 0.1518244930621437e+00, 0.3159848280636428e+00, 0.4758679007372426e+00, 0.6745243660763246e-01, 0.2386301922514691e-01, 0.8251512110211983e+00, 0.8931789553886716e+00, 0.7711819686391232e-03, 0.1232835205710967e-01, 0.3794316855579739e-01, 0.5161612293293578e-01, 0.3606500256058598e-05, 0.1887408451311279e-05, 0.6510811153270980e-01, 0.7250261437426894e-01, 0.4479235177066975e-02, 0.5512763475647510e-02, 0.3551880209093845e-01, 0.5290169708495600e-01, 0.7843816205451699e-05, 0.7197207696703830e-05}, + i0: 1, + n0: 18, + pp: 0, + n0in: 18, + dmin: 3.5518802090938446e-002, + dmin1: 3.7941281147346073e-002, + dmin2: 3.7941281147346073e-002, + dn: 3.5518802090938446e-002, + dn1: 5.9595348057062299e-002, + dn2: 3.7941281147346073e-002, + tau: 1.2903659816950583e-002, + ttype: -9, + g: 0.0000000000000000, + zOut: []float64{0.2605878684924073e+01, 0.2615950460208911e+01, 0.1945503997226627e-02, 0.2831884532112553e-02, 0.1792287311167312e+01, 0.1790238034184775e+01, 0.1831165540887852e-01, 0.1689844079671380e-01, 0.1912064812489923e+01, 0.1942176087759744e+01, 0.5350346462390188e-03, 0.1104039956008399e-02, 0.9149979242236238e+00, 0.9266158484293474e+00, 0.2199691768758329e-02, 0.1820770257466081e-02, 0.1425451464914127e+01, 0.1105418651305687e+01, 0.2857477273065492e+00, 0.3351361651941490e+00, 0.9167409543554429e+00, 0.1215385144271246e+01, 0.7843816205451699e-05, 0.7197207696703830e-05, 0.1003973806896052e+01, 0.9991024098509327e+00, 0.9418498631040351e-02, 0.1778290067827487e-01, 0.5121827465172590e+00, 0.5317426047035739e+00, 0.2263732461064059e-02, 0.2762300261676148e-02, 0.4201365406709412e+00, 0.4197388406228219e+00, 0.3107071375346623e-01, 0.1556509232613395e-01, 0.1196803030427524e+01, 0.8386678292065533e+00, 0.3403588894097180e+00, 0.4021095747913879e+00, 0.1359492109368225e+01, 0.1013013804731768e+01, 0.2411467892724848e+00, 0.6997408538631263e+00, 0.3392875246772514e+00, 0.4685122433619579e+00, 0.1652848117124708e+00, 0.1248257304047288e+00, 0.4228949559754990e+00, 0.4492589344427766e+00, 0.1708424320817961e+00, 0.1518244930621437e+00, 0.3159848280636428e+00, 0.4758679007372426e+00, 0.6745243660763246e-01, 0.2386301922514691e-01, 0.8251512110211983e+00, 0.8931789553886716e+00, 0.7711819686391232e-03, 0.1232835205710967e-01, 0.3794316855579739e-01, 0.5161612293293578e-01, 0.3606500256058598e-05, 0.1887408451311279e-05, 0.6510811153270980e-01, 0.7250261437426894e-01, 0.4479235177066975e-02, 0.5512763475647510e-02, 0.3551880209093845e-01, 0.5290169708495600e-01, 0.7843816205451699e-05, 0.7197207696703830e-05}, + tauOut: 2.4222106054237202e-002, + ttypeOut: -4, + gOut: 0.0000000000000000, + }, + { + z: []float64{0.2605878684924073e+01, 0.2583602082867062e+01, 0.1945503997226627e-02, 0.1349628161076996e-02, 0.1792287311167312e+01, 0.1785027232360876e+01, 0.1831165540887852e-01, 0.1961486711855321e-01, 0.1912064812489923e+01, 0.1868762873963372e+01, 0.5350346462390188e-03, 0.2619677421449130e-03, 0.9149979242236238e+00, 0.8927135421960001e+00, 0.2199691768758329e-02, 0.3512385223173503e-02, 0.1425451464914127e+01, 0.1683464700943265e+01, 0.2857477273065492e+00, 0.1556056649653108e+00, 0.9167409543554429e+00, 0.7369210271521004e+00, 0.7843816205451699e-05, 0.1068633642713914e-04, 0.1003973806896052e+01, 0.9891595131364276e+00, 0.9418498631040351e-02, 0.4876860034049891e-02, 0.5121827465172590e+00, 0.4853475128900360e+00, 0.2263732461064059e-02, 0.1959578858316830e-02, 0.4201365406709412e+00, 0.4250255695118534e+00, 0.3107071375346623e-01, 0.8749008776201990e-01, 0.1196803030427524e+01, 0.1425449726020986e+01, 0.3403588894097180e+00, 0.3246099922425689e+00, 0.1359492109368225e+01, 0.1251806800343904e+01, 0.2411467892724848e+00, 0.6536000379104072e-01, 0.3392875246772514e+00, 0.4149902265444443e+00, 0.1652848117124708e+00, 0.1684331550518530e+00, 0.4228949559754990e+00, 0.4010821269512049e+00, 0.1708424320817961e+00, 0.1345949193440586e+00, 0.3159848280636428e+00, 0.2246202392729794e+00, 0.6745243660763246e-01, 0.2477891570824889e+00, 0.8251512110211983e+00, 0.5539111298531113e+00, 0.7711819686391232e-03, 0.5282632149136541e-04, 0.3794316855579739e-01, 0.1367184268032488e-01, 0.3606500256058598e-05, 0.1717489195894037e-04, 0.6510811153270980e-01, 0.4534806576358064e-01, 0.4479235177066975e-02, 0.3508353996892708e-02, 0.3551880209093845e-01, 0.7788342039808532e-02, 0.7843816205451699e-05, 0.1068633642713914e-04, 0.2434478048112329e-01}, + i0: 1, + n0: 18, + pp: 1, + n0in: 18, + dmin: 7.7883420398085317e-003, + dmin1: 1.3668236180068825e-002, + dmin2: 1.3668236180068825e-002, + dn: 7.7883420398085317e-003, + dn1: 4.0868830586513666e-002, + dn2: 1.3668236180068825e-002, + tau: 2.4222106054237202e-002, + ttype: -4, + g: 0.0000000000000000, + zOut: []float64{0.2605878684924073e+01, 0.2583602082867062e+01, 0.1945503997226627e-02, 0.1349628161076996e-02, 0.1792287311167312e+01, 0.1785027232360876e+01, 0.1831165540887852e-01, 0.1961486711855321e-01, 0.1912064812489923e+01, 0.1868762873963372e+01, 0.5350346462390188e-03, 0.2619677421449130e-03, 0.9149979242236238e+00, 0.8927135421960001e+00, 0.2199691768758329e-02, 0.3512385223173503e-02, 0.1425451464914127e+01, 0.1683464700943265e+01, 0.2857477273065492e+00, 0.1556056649653108e+00, 0.9167409543554429e+00, 0.7369210271521004e+00, 0.7843816205451699e-05, 0.1068633642713914e-04, 0.1003973806896052e+01, 0.9891595131364276e+00, 0.9418498631040351e-02, 0.4876860034049891e-02, 0.5121827465172590e+00, 0.4853475128900360e+00, 0.2263732461064059e-02, 0.1959578858316830e-02, 0.4201365406709412e+00, 0.4250255695118534e+00, 0.3107071375346623e-01, 0.8749008776201990e-01, 0.1196803030427524e+01, 0.1425449726020986e+01, 0.3403588894097180e+00, 0.3246099922425689e+00, 0.1359492109368225e+01, 0.1251806800343904e+01, 0.2411467892724848e+00, 0.6536000379104072e-01, 0.3392875246772514e+00, 0.4149902265444443e+00, 0.1652848117124708e+00, 0.1684331550518530e+00, 0.4228949559754990e+00, 0.4010821269512049e+00, 0.1708424320817961e+00, 0.1345949193440586e+00, 0.3159848280636428e+00, 0.2246202392729794e+00, 0.6745243660763246e-01, 0.2477891570824889e+00, 0.8251512110211983e+00, 0.5539111298531113e+00, 0.7711819686391232e-03, 0.5282632149136541e-04, 0.3794316855579739e-01, 0.1367184268032488e-01, 0.3606500256058598e-05, 0.1717489195894037e-04, 0.6510811153270980e-01, 0.4534806576358064e-01, 0.4479235177066975e-02, 0.3508353996892708e-02, 0.3551880209093845e-01, 0.7788342039808532e-02, 0.7843816205451699e-05, 0.1068633642713914e-04, 0.2434478048112329e-01}, + tauOut: 5.1484099711571517e-003, + ttypeOut: -4, + gOut: 0.0000000000000000, + }, + + { + z: []float64{0.1845045664413523e+01, 0.1845045673681623e+01, 0.9284324334305927e-09, 0.1331924177594376e-08, 0.1286110617891388e+01, 0.1286109423357647e+01, 0.8600075793510468e-06, 0.1206062196852843e-05, 0.9228692597002398e+00, 0.9170877606119078e+00, 0.7344017075309068e-02, 0.5782369695935185e-02, 0.1164764979181097e+01, 0.1172109006845551e+01, 0.1416795225784663e-10, 0.1087898982148097e-10, 0.1516899526939413e+01, 0.1516899536394917e+01, 0.1175479719901021e-09, 0.1158688024893155e-08, 0.1569402316257081e+00, 0.1538882419371820e+00, 0.4821181617659422e-02, 0.3052000406097992e-02, 0.2431018491557045e+00, 0.2479152225121892e+00, 0.8755391955186837e-05, 0.7818861198543554e-05, 0.2722314297289909e+00, 0.2722202019375114e+00, 0.1357252989661879e-04, 0.1999378345859679e-04, 0.1847873189363651e+00, 0.1848009020627515e+00, 0.2027258571663375e-18, 0.3534048617228375e-11, 0.4750110334503861e-13, 0.1060007138617788e-07, 0.1416795225784663e-10, 0.1087898982148097e-10}, + i0: 1, + n0: 10, + pp: 0, + n0in: 10, + dmin: 4.7501103345038606e-014, + dmin1: 0.15388823121961009, + dmin2: 0.15388823121961009, + dn: 4.7501103345038606e-014, + dn1: 0.18478731893283101, + dn2: 0.27221143594553232, + tau: 1.0600023884871808e-008, + ttype: -4, + g: 0.0000000000000000, + zOut: []float64{0.1845045664413523e+01, 0.1845045673681623e+01, 0.9284324334305927e-09, 0.1331924177594376e-08, 0.1286110617891388e+01, 0.1286109423357647e+01, 0.8600075793510468e-06, 0.1206062196852843e-05, 0.9228692597002398e+00, 0.9170877606119078e+00, 0.7344017075309068e-02, 0.5782369695935185e-02, 0.1164764979181097e+01, 0.1172109006845551e+01, 0.1416795225784663e-10, 0.1087898982148097e-10, 0.1516899526939413e+01, 0.1516899536394917e+01, 0.1175479719901021e-09, 0.1158688024893155e-08, 0.1569402316257081e+00, 0.1538882419371820e+00, 0.4821181617659422e-02, 0.3052000406097992e-02, 0.2431018491557045e+00, 0.2479152225121892e+00, 0.8755391955186837e-05, 0.7818861198543554e-05, 0.2722314297289909e+00, 0.2722202019375114e+00, 0.1357252989661879e-04, 0.1999378345859679e-04, 0.1847873189363651e+00, 0.1848009020627515e+00, 0.2027258571663375e-18, 0.3534048617228375e-11, 0.4750110334503861e-13, 0.1060007138617788e-07, 0.1416795225784663e-10, 0.1087898982148097e-10}, + tauOut: 4.7501103294055340e-014, + ttypeOut: -4, + gOut: 0.0000000000000000, + }, + { + z: []float64{0.1845045664413523e+01, 0.1845045665341908e+01, 0.9284324334305927e-09, 0.6471746651368383e-09, 0.1286110617891388e+01, 0.1286111477251745e+01, 0.8600075793510468e-06, 0.6171117917307419e-06, 0.9228692597002398e+00, 0.9302126596637096e+00, 0.7344017075309068e-02, 0.9195804644198721e-02, 0.1164764979181097e+01, 0.1155569174551018e+01, 0.1416795225784663e-10, 0.1859807318413278e-10, 0.1516899526939413e+01, 0.1516899527038316e+01, 0.1175479719901021e-09, 0.1216165317638265e-10, 0.1569402316257081e+00, 0.1617614132311584e+00, 0.4821181617659422e-02, 0.7245474325163344e-02, 0.2431018491557045e+00, 0.2358651302224488e+00, 0.8755391955186837e-05, 0.1010532106865606e-04, 0.2722314297289909e+00, 0.2722348969377714e+00, 0.1357252989661879e-04, 0.9212747663842461e-05, 0.1847873189363651e+00, 0.1847781061886537e+00, 0.2027258571663375e-18, 0.5211495068651724e-31, 0.8046649468928653e+00}, + i0: 1, + n0: 9, + pp: 1, + n0in: 10, + dmin: 5.0983266199979805e-023, + dmin1: 0.15694023161349893, + dmin2: 0.15694023161349893, + dn: 5.0983266199979805e-023, + dn1: 0.18477810618865373, + dn2: 0.27222132440787472, + tau: 4.7501103294055340e-014, + ttype: -4, + g: 0.0000000000000000, + zOut: []float64{0.1845045664413523e+01, 0.1845045665341908e+01, 0.9284324334305927e-09, 0.6471746651368383e-09, 0.1286110617891388e+01, 0.1286111477251745e+01, 0.8600075793510468e-06, 0.6171117917307419e-06, 0.9228692597002398e+00, 0.9302126596637096e+00, 0.7344017075309068e-02, 0.9195804644198721e-02, 0.1164764979181097e+01, 0.1155569174551018e+01, 0.1416795225784663e-10, 0.1859807318413278e-10, 0.1516899526939413e+01, 0.1516899527038316e+01, 0.1175479719901021e-09, 0.1216165317638265e-10, 0.1569402316257081e+00, 0.1617614132311584e+00, 0.4821181617659422e-02, 0.7245474325163344e-02, 0.2431018491557045e+00, 0.2358651302224488e+00, 0.8755391955186837e-05, 0.1010532106865606e-04, 0.2722314297289909e+00, 0.2722348969377714e+00, 0.1357252989661879e-04, 0.9212747663842461e-05, 0.1847873189363651e+00, 0.1847781061886537e+00, 0.2027258571663375e-18, 0.5211495068651724e-31, 0.8046649468928653e+00}, + tauOut: 3.9235057903374733e-002, + ttypeOut: -9, + gOut: 0.0000000000000000, + }, + { + z: []float64{0.1805810608085708e+01, 0.1845045665341908e+01, 0.4609225136302542e-09, 0.6471746651368383e-09, 0.1246877035999240e+01, 0.1286111477251745e+01, 0.4603863769418564e-06, 0.6171117917307419e-06, 0.9001729460181566e+00, 0.9302126596637096e+00, 0.1180482975969687e-01, 0.9195804644198721e-02, 0.1104529286906545e+01, 0.1155569174551018e+01, 0.2554156666668993e-10, 0.1859807318413278e-10, 0.1477664469121561e+01, 0.1516899527038316e+01, 0.1331348385339719e-11, 0.1216165317638265e-10, 0.1297718296516156e+00, 0.1617614132311584e+00, 0.1316891924708087e-01, 0.7245474325163344e-02, 0.1834712583930619e+00, 0.2358651302224488e+00, 0.1499428882618218e-04, 0.1010532106865606e-04, 0.2329940574932343e+00, 0.2722348969377714e+00, 0.7306255294378852e-05, 0.9212747663842461e-05, 0.1455357420299846e+00, 0.1847781061886537e+00, 0.1331348385339719e-11, 0.1216165317638265e-10}, + i0: 1, + n0: 9, + pp: 0, + n0in: 9, + dmin: 0.12252635532645229, + dmin1: 0.12252635532645229, + dmin2: 0.12252635532645229, + dn: 0.14553574202998462, + dn1: 0.23298484474557046, + dn2: 0.18346115307199323, + tau: 3.9235057903374733e-002, + ttype: -9, + g: 0.0000000000000000, + zOut: []float64{0.1805810608085708e+01, 0.1845045665341908e+01, 0.4609225136302542e-09, 0.6471746651368383e-09, 0.1246877035999240e+01, 0.1286111477251745e+01, 0.4603863769418564e-06, 0.6171117917307419e-06, 0.9001729460181566e+00, 0.9302126596637096e+00, 0.1180482975969687e-01, 0.9195804644198721e-02, 0.1104529286906545e+01, 0.1155569174551018e+01, 0.2554156666668993e-10, 0.1859807318413278e-10, 0.1477664469121561e+01, 0.1516899527038316e+01, 0.1331348385339719e-11, 0.1216165317638265e-10, 0.1297718296516156e+00, 0.1617614132311584e+00, 0.1316891924708087e-01, 0.7245474325163344e-02, 0.1834712583930619e+00, 0.2358651302224488e+00, 0.1499428882618218e-04, 0.1010532106865606e-04, 0.2329940574932343e+00, 0.2722348969377714e+00, 0.7306255294378852e-05, 0.9212747663842461e-05, 0.1455357420299846e+00, 0.1847781061886537e+00, 0.1331348385339719e-11, 0.1216165317638265e-10}, + tauOut: 3.0631588831613071e-002, + ttypeOut: -6, + gOut: 0.25000000000000000, + }, + { + z: []float64{0.1805810608085708e+01, 0.1775179019715018e+01, 0.4609225136302542e-09, 0.3237497126982006e-09, 0.1246877035999240e+01, 0.1216245907230254e+01, 0.4603863769418564e-06, 0.3407430674789675e-06, 0.9001729460181566e+00, 0.8813458462031728e+00, 0.1180482975969687e-01, 0.1479416990810367e-01, 0.1104529286906545e+01, 0.1059103528192370e+01, 0.2554156666668993e-10, 0.3563567162644001e-10, 0.1477664469121561e+01, 0.1447032880255644e+01, 0.1331348385339719e-11, 0.1193970905752580e-12, 0.1297718296516156e+00, 0.1123091600669640e+00, 0.1316891924708087e-01, 0.2151309995104528e-01, 0.1834712583930619e+00, 0.1313415638992297e+00, 0.1499428882618218e-04, 0.2659919745982362e-04, 0.2329940574932343e+00, 0.2023431757194558e+00, 0.7306255294378852e-05, 0.5255039029347846e-05, 0.1455357420299846e+00, 0.1148988981593422e+00, 0.1331348385339719e-11, 0.1193970905752580e-12, 0.8046649468928653e+00}, + i0: 1, + n0: 9, + pp: 1, + n0in: 9, + dmin: 9.9140240819883152e-002, + dmin1: 9.9140240819883152e-002, + dmin2: 9.9140240819883152e-002, + dn: 0.11489889815934221, + dn1: 0.20233586946416143, + dn2: 0.13132656961040354, + tau: 3.0631588831613071e-002, + ttype: -6, + g: 0.25000000000000000, + zOut: []float64{0.1805810608085708e+01, 0.1775179019715018e+01, 0.4609225136302542e-09, 0.3237497126982006e-09, 0.1246877035999240e+01, 0.1216245907230254e+01, 0.4603863769418564e-06, 0.3407430674789675e-06, 0.9001729460181566e+00, 0.8813458462031728e+00, 0.1180482975969687e-01, 0.1479416990810367e-01, 0.1104529286906545e+01, 0.1059103528192370e+01, 0.2554156666668993e-10, 0.3563567162644001e-10, 0.1477664469121561e+01, 0.1447032880255644e+01, 0.1331348385339719e-11, 0.1193970905752580e-12, 0.1297718296516156e+00, 0.1123091600669640e+00, 0.1316891924708087e-01, 0.2151309995104528e-01, 0.1834712583930619e+00, 0.1313415638992297e+00, 0.1499428882618218e-04, 0.2659919745982362e-04, 0.2329940574932343e+00, 0.2023431757194558e+00, 0.7306255294378852e-05, 0.5255039029347846e-05, 0.1455357420299846e+00, 0.1148988981593422e+00, 0.1331348385339719e-11, 0.1193970905752580e-12, 0.8046649468928653e+00}, + tauOut: 4.9545335349736611e-002, + ttypeOut: -6, + gOut: 0.49975000000000003, + }, + { + z: []float64{0.1725633684689031e+01, 0.1775179019715018e+01, 0.2281824158451768e-09, 0.3237497126982006e-09, 0.1166700912395402e+01, 0.1216245907230254e+01, 0.2574031475886406e-06, 0.3407430674789675e-06, 0.8465944233583922e+00, 0.8813458462031728e+00, 0.1850774953630535e-01, 0.1479416990810367e-01, 0.9910504433419636e+00, 0.1059103528192370e+01, 0.5203164874187830e-10, 0.3563567162644001e-10, 0.1397487544853995e+01, 0.1447032880255644e+01, 0.9595353465813834e-14, 0.1193970905752580e-12, 0.8427692466826309e-01, 0.1123091600669640e+00, 0.3352713928531350e-01, 0.2151309995104528e-01, 0.4829568846163943e-01, 0.1313415638992297e+00, 0.1114419580100731e-03, 0.2659919745982362e-04, 0.1526916534507385e+00, 0.2023431757194558e+00, 0.3954362799871079e-05, 0.5255039029347846e-05, 0.6534960844680572e-01, 0.1148988981593422e+00, 0.9595353465813834e-14, 0.1193970905752580e-12}, + i0: 1, + n0: 9, + pp: 0, + n0in: 9, + dmin: 4.8269089264179610e-002, + dmin1: 4.8269089264179610e-002, + dmin2: 4.8269089264179610e-002, + dn: 6.5349608446805721e-002, + dn1: 0.15268639841170917, + dn2: 4.8269089264179610e-002, + tau: 4.9545335349736611e-002, + ttype: -6, + g: 0.49975000000000003, + zOut: []float64{0.1725633684689031e+01, 0.1775179019715018e+01, 0.2281824158451768e-09, 0.3237497126982006e-09, 0.1166700912395402e+01, 0.1216245907230254e+01, 0.2574031475886406e-06, 0.3407430674789675e-06, 0.8465944233583922e+00, 0.8813458462031728e+00, 0.1850774953630535e-01, 0.1479416990810367e-01, 0.9910504433419636e+00, 0.1059103528192370e+01, 0.5203164874187830e-10, 0.3563567162644001e-10, 0.1397487544853995e+01, 0.1447032880255644e+01, 0.9595353465813834e-14, 0.1193970905752580e-12, 0.8427692466826309e-01, 0.1123091600669640e+00, 0.3352713928531350e-01, 0.2151309995104528e-01, 0.4829568846163943e-01, 0.1313415638992297e+00, 0.1114419580100731e-03, 0.2659919745982362e-04, 0.1526916534507385e+00, 0.2023431757194558e+00, 0.3954362799871079e-05, 0.5255039029347846e-05, 0.6534960844680572e-01, 0.1148988981593422e+00, 0.9595353465813834e-14, 0.1193970905752580e-12}, + tauOut: 1.2037453975339630e-002, + ttypeOut: -5, + gOut: 0.49975000000000003, + }, + { + z: []float64{0.1725633684689031e+01, 0.1713596230941874e+01, 0.2281824158451768e-09, 0.1553578538234923e-09, 0.1166700912395402e+01, 0.1154663715667853e+01, 0.2574031475886406e-06, 0.1887268702969492e-06, 0.8465944233583922e+00, 0.8530645301924875e+00, 0.1850774953630535e-01, 0.2150143715280096e-01, 0.9910504433419636e+00, 0.9575115522658546e+00, 0.5203164874187830e-10, 0.7594016059954953e-10, 0.1397487544853995e+01, 0.1385450090802725e+01, 0.9595353465813834e-14, 0.5836853211617396e-15, 0.8427692466826309e-01, 0.1057666099782364e+00, 0.3352713928531350e-01, 0.1530933320323569e-01, 0.4829568846163943e-01, 0.2106034324107419e-01, 0.1114419580100731e-03, 0.8079762346493418e-03, 0.1526916534507385e+00, 0.1398501776035494e+00, 0.3954362799871079e-05, 0.1847806452993955e-05, 0.6534960844680572e-01, 0.5331030666501309e-01, 0.9595353465813834e-14, 0.5836853211617396e-15, 0.8046649468928653e+00}, + i0: 1, + n0: 9, + pp: 1, + n0in: 9, + dmin: 2.0948901283064113e-002, + dmin1: 2.0948901283064113e-002, + dmin2: 2.0948901283064113e-002, + dn: 5.3310306665013088e-002, + dn1: 0.13984622324074955, + dn2: 2.0948901283064113e-002, + tau: 1.2037453975339630e-002, + ttype: -5, + g: 0.49975000000000003, + zOut: []float64{0.1725633684689031e+01, 0.1713596230941874e+01, 0.2281824158451768e-09, 0.1553578538234923e-09, 0.1166700912395402e+01, 0.1154663715667853e+01, 0.2574031475886406e-06, 0.1887268702969492e-06, 0.8465944233583922e+00, 0.8530645301924875e+00, 0.1850774953630535e-01, 0.2150143715280096e-01, 0.9910504433419636e+00, 0.9575115522658546e+00, 0.5203164874187830e-10, 0.7594016059954953e-10, 0.1397487544853995e+01, 0.1385450090802725e+01, 0.9595353465813834e-14, 0.5836853211617396e-15, 0.8427692466826309e-01, 0.1057666099782364e+00, 0.3352713928531350e-01, 0.1530933320323569e-01, 0.4829568846163943e-01, 0.2106034324107419e-01, 0.1114419580100731e-03, 0.8079762346493418e-03, 0.1526916534507385e+00, 0.1398501776035494e+00, 0.3954362799871079e-05, 0.1847806452993955e-05, 0.6534960844680572e-01, 0.5331030666501309e-01, 0.9595353465813834e-14, 0.5836853211617396e-15, 0.8046649468928653e+00}, + tauOut: 1.1070386405955311e-002, + ttypeOut: -5, + gOut: 0.49975000000000003, + }, + { + z: []float64{0.1702525844691276e+01, 0.1713596230941874e+01, 0.1053646717395619e-09, 0.1553578538234923e-09, 0.1143593517883403e+01, 0.1154663715667853e+01, 0.1407809649380857e-06, 0.1887268702969492e-06, 0.8634954401583683e+00, 0.8530645301924875e+00, 0.2384248197112572e-01, 0.2150143715280096e-01, 0.9225986839647138e+00, 0.9575115522658546e+00, 0.1140379931457212e-09, 0.7594016059954953e-10, 0.1374379704282732e+01, 0.1385450090802725e+01, 0.4491802194179927e-16, 0.5836853211617396e-15, 0.1100055567755167e+00, 0.1057666099782364e+00, 0.2930941140637693e-02, 0.1530933320323569e-01, 0.7866991929130526e-02, 0.2106034324107419e-01, 0.1436325611276508e-01, 0.8079762346493418e-03, 0.1144183828912820e+00, 0.1398501776035494e+00, 0.8609379557504958e-06, 0.1847806452993955e-05, 0.4223905932110202e-01, 0.5331030666501309e-01, 0.4491802194179927e-16, 0.5836853211617396e-15}, + i0: 1, + n0: 9, + pp: 0, + n0in: 9, + dmin: 7.0590156944811844e-003, + dmin1: 7.0590156944811844e-003, + dmin2: 7.0590156944811844e-003, + dn: 4.2239059321102022e-002, + dn1: 0.11441653508482905, + dn2: 7.0590156944811844e-003, + tau: 1.1070386405955311e-002, + ttype: -5, + g: 0.49975000000000003, + zOut: []float64{0.1702525844691276e+01, 0.1713596230941874e+01, 0.1053646717395619e-09, 0.1553578538234923e-09, 0.1143593517883403e+01, 0.1154663715667853e+01, 0.1407809649380857e-06, 0.1887268702969492e-06, 0.8634954401583683e+00, 0.8530645301924875e+00, 0.2384248197112572e-01, 0.2150143715280096e-01, 0.9225986839647138e+00, 0.9575115522658546e+00, 0.1140379931457212e-09, 0.7594016059954953e-10, 0.1374379704282732e+01, 0.1385450090802725e+01, 0.4491802194179927e-16, 0.5836853211617396e-15, 0.1100055567755167e+00, 0.1057666099782364e+00, 0.2930941140637693e-02, 0.1530933320323569e-01, 0.7866991929130526e-02, 0.2106034324107419e-01, 0.1436325611276508e-01, 0.8079762346493418e-03, 0.1144183828912820e+00, 0.1398501776035494e+00, 0.8609379557504958e-06, 0.1847806452993955e-05, 0.4223905932110202e-01, 0.5331030666501309e-01, 0.4491802194179927e-16, 0.5836853211617396e-15}, + tauOut: 5.5670727120955849e-003, + ttypeOut: -5, + gOut: 0.49975000000000003, + }, + { + z: []float64{0.1702525844691276e+01, 0.1696958772084545e+01, 0.1053646717395619e-09, 0.7100605954454643e-10, 0.1143593517883403e+01, 0.1138026585881266e+01, 0.1407809649380857e-06, 0.1068197551738174e-06, 0.8634954401583683e+00, 0.8817707425976433e+00, 0.2384248197112572e-01, 0.2494644177488929e-01, 0.9225986839647138e+00, 0.8920851695917670e+00, 0.1140379931457212e-09, 0.1756911880603681e-09, 0.1374379704282732e+01, 0.1368812631394945e+01, 0.4491802194179927e-16, 0.3609867340226789e-17, 0.1100055567755167e+00, 0.1073694252040588e+00, 0.2930941140637693e-02, 0.2147509894397918e-03, 0.7866991929130526e-02, 0.1644842434036023e-01, 0.1436325611276508e-01, 0.9991355423895336e-01, 0.1144183828912820e+00, 0.8938616878188867e-02, 0.8609379557504958e-06, 0.4068326216494222e-05, 0.4223905932110202e-01, 0.3666791828278994e-01, 0.4491802194179927e-16, 0.3609867340226789e-17, 0.8046649468928653e+00}, + i0: 1, + n0: 9, + pp: 1, + n0in: 9, + dmin: 2.0851682275951503e-003, + dmin1: 2.0851682275951503e-003, + dmin2: 2.0851682275951503e-003, + dn: 3.6667918282789938e-002, + dn1: 8.9377559402331157e-003, + dn2: 2.0851682275951503e-003, + tau: 5.5670727120955849e-003, + ttype: -5, + g: 0.49975000000000003, + zOut: []float64{0.1702525844691276e+01, 0.1696958772084545e+01, 0.1053646717395619e-09, 0.7100605954454643e-10, 0.1143593517883403e+01, 0.1138026585881266e+01, 0.1407809649380857e-06, 0.1068197551738174e-06, 0.8634954401583683e+00, 0.8817707425976433e+00, 0.2384248197112572e-01, 0.2494644177488929e-01, 0.9225986839647138e+00, 0.8920851695917670e+00, 0.1140379931457212e-09, 0.1756911880603681e-09, 0.1374379704282732e+01, 0.1368812631394945e+01, 0.4491802194179927e-16, 0.3609867340226789e-17, 0.1100055567755167e+00, 0.1073694252040588e+00, 0.2930941140637693e-02, 0.2147509894397918e-03, 0.7866991929130526e-02, 0.1644842434036023e-01, 0.1436325611276508e-01, 0.9991355423895336e-01, 0.1144183828912820e+00, 0.8938616878188867e-02, 0.8609379557504958e-06, 0.4068326216494222e-05, 0.4223905932110202e-01, 0.3666791828278994e-01, 0.4491802194179927e-16, 0.3609867340226789e-17, 0.8046649468928653e+00}, + tauOut: 1.1659821590613959e-003, + ttypeOut: -5, + gOut: 0.49975000000000003, + }, + { + z: []float64{0.1695792789996490e+01, 0.1696958772084545e+01, 0.4765133098633433e-10, 0.7100605954454643e-10, 0.1136860710494309e+01, 0.1138026585881266e+01, 0.8285142935651389e-07, 0.1068197551738174e-06, 0.9055511193620417e+00, 0.8817707425976433e+00, 0.2457547703893446e-01, 0.2494644177488929e-01, 0.8663437105694624e+00, 0.8920851695917670e+00, 0.2775899617066991e-09, 0.1756911880603681e-09, 0.1367646648958294e+01, 0.1368812631394945e+01, 0.2833987723936393e-18, 0.3609867340226789e-17, 0.1064181940344372e+00, 0.1073694252040588e+00, 0.3319277717374952e-04, 0.2147509894397918e-03, 0.1151628036430784e+00, 0.1644842434036023e-01, 0.7755012504281143e-02, 0.9991355423895336e-01, 0.2169054106282214e-04, 0.8938616878188867e-02, 0.6877516463147774e-02, 0.4068326216494222e-05, 0.2862441966058077e-01, 0.3666791828278994e-01, 0.2833987723936393e-18, 0.3609867340226789e-17}, + i0: 1, + n0: 9, + pp: 0, + n0in: 9, + dmin: 1.7622214846327918e-005, + dmin1: 1.7622214846327918e-005, + dmin2: 1.5249249404125084e-002, + dn: 2.8624419660580765e-002, + dn1: 1.7622214846327918e-005, + dn2: 1.5249249404125084e-002, + tau: 1.1659821590613959e-003, + ttype: -5, + g: 0.49975000000000003, + zOut: []float64{0.1695792789996490e+01, 0.1696958772084545e+01, 0.4765133098633433e-10, 0.7100605954454643e-10, 0.1136860710494309e+01, 0.1138026585881266e+01, 0.8285142935651389e-07, 0.1068197551738174e-06, 0.9055511193620417e+00, 0.8817707425976433e+00, 0.2457547703893446e-01, 0.2494644177488929e-01, 0.8663437105694624e+00, 0.8920851695917670e+00, 0.2775899617066991e-09, 0.1756911880603681e-09, 0.1367646648958294e+01, 0.1368812631394945e+01, 0.2833987723936393e-18, 0.3609867340226789e-17, 0.1064181940344372e+00, 0.1073694252040588e+00, 0.3319277717374952e-04, 0.2147509894397918e-03, 0.1151628036430784e+00, 0.1644842434036023e-01, 0.7755012504281143e-02, 0.9991355423895336e-01, 0.2169054106282214e-04, 0.8938616878188867e-02, 0.6877516463147774e-02, 0.4068326216494222e-05, 0.2862441966058077e-01, 0.3666791828278994e-01, 0.2833987723936393e-18, 0.3609867340226789e-17}, + tauOut: 1.2076215031173109e-005, + ttypeOut: -4, + gOut: 0.49975000000000003, + }, + { + z: []float64{0.1695792789996490e+01, 0.1695780713829110e+01, 0.4765133098633433e-10, 0.3194571418305606e-10, 0.1136860710494309e+01, 0.1136848717098761e+01, 0.8285142935651389e-07, 0.6599488873594650e-07, 0.9055511193620417e+00, 0.9301144541910563e+00, 0.2457547703893446e-01, 0.2289052693567938e-01, 0.8663437105694624e+00, 0.8434411076963417e+00, 0.2775899617066991e-09, 0.4501143914475995e-09, 0.1367646648958294e+01, 0.1367634572293148e+01, 0.2833987723936393e-18, 0.2205178646452290e-19, 0.1064181940344372e+00, 0.1064393105965798e+00, 0.3319277717374952e-04, 0.3591317210346347e-04, 0.1151628036430784e+00, 0.1228698267602250e+00, 0.7755012504281143e-02, 0.1369013219942635e-05, 0.2169054106282214e-04, 0.6885761775959480e-02, 0.6877516463147774e-02, 0.2859014352646020e-01, 0.2862441966058077e-01, 0.2219991908939190e-04, 0.2833987723936393e-18, 0.2205178646452290e-19, 0.8046649468928653e+00}, + i0: 1, + n0: 9, + pp: 1, + n0in: 9, + dmin: 8.2453128117063980e-006, + dmin1: 8.2453128117063980e-006, + dmin2: 0.10640611781940604, + dn: 2.2199919089391903e-005, + dn1: 8.2453128117063980e-006, + dn2: 0.11511481425594380, + tau: 1.2076215031173109e-005, + ttype: -4, + g: 0.49975000000000003, + zOut: []float64{0.1695792789996490e+01, 0.1695780713829110e+01, 0.4765133098633433e-10, 0.3194571418305606e-10, 0.1136860710494309e+01, 0.1136848717098761e+01, 0.8285142935651389e-07, 0.6599488873594650e-07, 0.9055511193620417e+00, 0.9301144541910563e+00, 0.2457547703893446e-01, 0.2289052693567938e-01, 0.8663437105694624e+00, 0.8434411076963417e+00, 0.2775899617066991e-09, 0.4501143914475995e-09, 0.1367646648958294e+01, 0.1367634572293148e+01, 0.2833987723936393e-18, 0.2205178646452290e-19, 0.1064181940344372e+00, 0.1064393105965798e+00, 0.3319277717374952e-04, 0.3591317210346347e-04, 0.1151628036430784e+00, 0.1228698267602250e+00, 0.7755012504281143e-02, 0.1369013219942635e-05, 0.2169054106282214e-04, 0.6885761775959480e-02, 0.6877516463147774e-02, 0.2859014352646020e-01, 0.2862441966058077e-01, 0.2219991908939190e-04, 0.2833987723936393e-18, 0.2205178646452290e-19, 0.8046649468928653e+00}, + tauOut: 3.2770273787704380e-006, + ttypeOut: -4, + gOut: 0.49975000000000003, + }, + { + z: []float64{0.1695777436833677e+01, 0.1695780713829110e+01, 0.2141639781080129e-10, 0.3194571418305606e-10, 0.1136845506044855e+01, 0.1136848717098761e+01, 0.5399396803668456e-07, 0.6599488873594650e-07, 0.9530016501053890e+00, 0.9301144541910563e+00, 0.2025894854667596e-01, 0.2289052693567938e-01, 0.8231788825724013e+00, 0.8434411076963417e+00, 0.7478228806194929e-09, 0.4501143914475995e-09, 0.1367631294517947e+01, 0.1367634572293148e+01, 0.1716235185693175e-20, 0.2205178646452290e-19, 0.1064719467413045e+00, 0.1064393105965798e+00, 0.4144420544393849e-04, 0.3591317210346347e-04, 0.1228264745406222e+00, 0.1228698267602250e+00, 0.7674810284932935e-07, 0.1369013219942635e-05, 0.3547255152693805e-01, 0.6885761775959480e-02, 0.1789267604726782e-04, 0.2859014352646020e-01, 0.1030215663353646e-05, 0.2219991908939190e-04, 0.1716235185693175e-20, 0.2205178646452290e-19}, + i0: 1, + n0: 9, + pp: 0, + n0in: 9, + dmin: 1.0302156633536465e-006, + dmin1: 6.8824080004778604e-003, + dmin2: 0.10643603356920101, + dn: 1.0302156633536465e-006, + dn1: 6.8824080004778604e-003, + dn2: 0.12282510552740224, + tau: 3.2770273787704380e-006, + ttype: -4, + g: 0.49975000000000003, + zOut: []float64{0.1695777436833677e+01, 0.1695780713829110e+01, 0.2141639781080129e-10, 0.3194571418305606e-10, 0.1136845506044855e+01, 0.1136848717098761e+01, 0.5399396803668456e-07, 0.6599488873594650e-07, 0.9530016501053890e+00, 0.9301144541910563e+00, 0.2025894854667596e-01, 0.2289052693567938e-01, 0.8231788825724013e+00, 0.8434411076963417e+00, 0.7478228806194929e-09, 0.4501143914475995e-09, 0.1367631294517947e+01, 0.1367634572293148e+01, 0.1716235185693175e-20, 0.2205178646452290e-19, 0.1064719467413045e+00, 0.1064393105965798e+00, 0.4144420544393849e-04, 0.3591317210346347e-04, 0.1228264745406222e+00, 0.1228698267602250e+00, 0.7674810284932935e-07, 0.1369013219942635e-05, 0.3547255152693805e-01, 0.6885761775959480e-02, 0.1789267604726782e-04, 0.2859014352646020e-01, 0.1030215663353646e-05, 0.2219991908939190e-04, 0.1716235185693175e-20, 0.2205178646452290e-19}, + tauOut: 1.0296962592568820e-006, + ttypeOut: -2, + gOut: 0.49975000000000003, + }, + { + z: []float64{0.1695777436833677e+01, 0.1695776407158834e+01, 0.2141639781080129e-10, 0.1435751523850387e-10, 0.1136845506044855e+01, 0.1136844530328206e+01, 0.5399396803668456e-07, 0.4526242530264241e-07, 0.9530016501053890e+00, 0.9732595236933804e+00, 0.2025894854667596e-01, 0.1713493494875720e-01, 0.8231788825724013e+00, 0.8060429186752077e+00, 0.7478228806194929e-09, 0.1268848035998799e-08, 0.1367631294517947e+01, 0.1367630263552839e+01, 0.1716235185693175e-20, 0.1336113320657127e-21, 0.1064719467413045e+00, 0.1065123612504892e+00, 0.4144420544393849e-04, 0.4779206455525695e-04, 0.1228264745406222e+00, 0.1227777295279105e+00, 0.7674810284932935e-07, 0.2217381803186623e-07, 0.3547255152693805e-01, 0.3548939233290803e-01, 0.1789267604726782e-04, 0.5194035150079302e-09, 0.1030215663353646e-05, 0.5817566277343568e-15, 0.1716235185693175e-20, 0.1336113320657127e-21, 0.8046649468928653e+00}, + i0: 1, + n0: 9, + pp: 1, + n0in: 9, + dmin: 5.8175662773435680e-016, + dmin1: 3.5471499656860764e-002, + dmin2: 0.10647091704504522, + dn: 5.8175662773435680e-016, + dn1: 3.5471499656860764e-002, + dn2: 0.12277765277980766, + tau: 1.0296962592568820e-006, + ttype: -2, + g: 0.49975000000000003, + zOut: []float64{0.1695777436833677e+01, 0.1695776407158834e+01, 0.2141639781080129e-10, 0.1435751523850387e-10, 0.1136845506044855e+01, 0.1136844530328206e+01, 0.5399396803668456e-07, 0.4526242530264241e-07, 0.9530016501053890e+00, 0.9732595236933804e+00, 0.2025894854667596e-01, 0.1713493494875720e-01, 0.8231788825724013e+00, 0.8060429186752077e+00, 0.7478228806194929e-09, 0.1268848035998799e-08, 0.1367631294517947e+01, 0.1367630263552839e+01, 0.1716235185693175e-20, 0.1336113320657127e-21, 0.1064719467413045e+00, 0.1065123612504892e+00, 0.4144420544393849e-04, 0.4779206455525695e-04, 0.1228264745406222e+00, 0.1227777295279105e+00, 0.7674810284932935e-07, 0.2217381803186623e-07, 0.3547255152693805e-01, 0.3548939233290803e-01, 0.1789267604726782e-04, 0.5194035150079302e-09, 0.1030215663353646e-05, 0.5817566277343568e-15, 0.1716235185693175e-20, 0.1336113320657127e-21, 0.8046649468928653e+00}, + tauOut: 5.8175661922007790e-016, + ttypeOut: -2, + gOut: 0.49975000000000003, + }, + { + z: []float64{0.1695776407173191e+01, 0.1695776407158834e+01, 0.9625244577618415e-11, 0.1435751523850387e-10, 0.1136844575581005e+01, 0.1136844530328206e+01, 0.3874943632355666e-07, 0.4526242530264241e-07, 0.9903944198927007e+00, 0.9732595236933804e+00, 0.1394544708652782e-01, 0.1713493494875720e-01, 0.7920974728575274e+00, 0.8060429186752077e+00, 0.2190784636165185e-08, 0.1268848035998799e-08, 0.1367630261362054e+01, 0.1367630263552839e+01, 0.1040577915698430e-22, 0.1336113320657127e-21, 0.1065601533150438e+00, 0.1065123612504892e+00, 0.5506562249584694e-04, 0.4779206455525695e-04, 0.1227226860792321e+00, 0.1227777295279105e+00, 0.6412305277798032e-08, 0.2217381803186623e-07, 0.3548938644000568e-01, 0.3548939233290803e-01, 0.8514276171981532e-23, 0.5194035150079302e-09, 0.2761013168273541e-29, 0.5817566277343568e-15, 0.1040577915698430e-22, 0.1336113320657127e-21}, + i0: 1, + n0: 9, + pp: 0, + n0in: 9, + dmin: 2.7610131682735413e-030, + dmin1: 3.5489385920602169e-002, + dmin2: 0.10651236125048857, + dn: 2.7610131682735413e-030, + dn1: 3.5489385920602169e-002, + dn2: 0.12272266390541409, + tau: 5.8175661922007790e-016, + ttype: -2, + g: 0.49975000000000003, + zOut: []float64{0.1695776407173191e+01, 0.1695776407158834e+01, 0.9625244577618415e-11, 0.1435751523850387e-10, 0.1136844575581005e+01, 0.1136844530328206e+01, 0.3874943632355666e-07, 0.4526242530264241e-07, 0.9903944198927007e+00, 0.9732595236933804e+00, 0.1394544708652782e-01, 0.1713493494875720e-01, 0.7920974728575274e+00, 0.8060429186752077e+00, 0.2190784636165185e-08, 0.1268848035998799e-08, 0.1367630261362054e+01, 0.1367630263552839e+01, 0.1040577915698430e-22, 0.1336113320657127e-21, 0.1065601533150438e+00, 0.1065123612504892e+00, 0.5506562249584694e-04, 0.4779206455525695e-04, 0.1227226860792321e+00, 0.1227777295279105e+00, 0.6412305277798032e-08, 0.2217381803186623e-07, 0.3548938644000568e-01, 0.3548939233290803e-01, 0.8514276171981532e-23, 0.5194035150079302e-09, 0.2761013168273541e-29, 0.5817566277343568e-15, 0.1040577915698430e-22, 0.1336113320657127e-21}, + tauOut: 2.7610131682735413e-030, + ttypeOut: -2, + gOut: 0.49975000000000003, + }, + { + z: []float64{0.1695776407173191e+01, 0.1695776407182817e+01, 0.9625244577618415e-11, 0.6452741670633652e-11, 0.1136844575581005e+01, 0.1136844614323989e+01, 0.3874943632355666e-07, 0.3375767015588020e-07, 0.9903944198927007e+00, 0.1004339833221559e+01, 0.1394544708652782e-01, 0.1099842207759001e-01, 0.7920974728575274e+00, 0.7810990529707220e+00, 0.2190784636165185e-08, 0.3835855840755795e-08, 0.1367630261362054e+01, 0.1367630257526198e+01, 0.1040577915698430e-22, 0.8107757314001177e-24, 0.1065601533150438e+00, 0.1066152189375397e+00, 0.5506562249584694e-04, 0.6338495733216447e-04, 0.1227226860792321e+00, 0.1226593075342052e+00, 0.6412305277798032e-08, 0.1855291575909163e-08, 0.3548938644000568e-01, 0.3548938458471410e-01, 0.8514276171981532e-23, 0.6623960630549781e-51, 0.9539342071687115e+00}, + i0: 1, + n0: 8, + pp: 1, + n0in: 9, + dmin: 2.7610131682735413e-030, + dmin1: 3.5489384584714102e-002, + dmin2: 0.10656015331504384, + dn: 2.7610131682735413e-030, + dn1: 3.5489384584714102e-002, + dn2: 0.12265930112189996, + tau: 0.0000000000000000, + ttype: -2, + g: 0.49975000000000003, + zOut: []float64{0.1695776407173191e+01, 0.1695776407182817e+01, 0.9625244577618415e-11, 0.6452741670633652e-11, 0.1136844575581005e+01, 0.1136844614323989e+01, 0.3874943632355666e-07, 0.3375767015588020e-07, 0.9903944198927007e+00, 0.1004339833221559e+01, 0.1394544708652782e-01, 0.1099842207759001e-01, 0.7920974728575274e+00, 0.7810990529707220e+00, 0.2190784636165185e-08, 0.3835855840755795e-08, 0.1367630261362054e+01, 0.1367630257526198e+01, 0.1040577915698430e-22, 0.8107757314001177e-24, 0.1065601533150438e+00, 0.1066152189375397e+00, 0.5506562249584694e-04, 0.6338495733216447e-04, 0.1227226860792321e+00, 0.1226593075342052e+00, 0.6412305277798032e-08, 0.1855291575909163e-08, 0.3548938644000568e-01, 0.3548938458471410e-01, 0.8514276171981532e-23, 0.6623960630549781e-51, 0.9539342071687115e+00}, + tauOut: 1.7744692292357051e-002, + ttypeOut: -9, + gOut: 0.49975000000000003, + }, + { + z: []float64{0.1678031714896912e+01, 0.1695776407182817e+01, 0.4371648372768990e-11, 0.6452741670633652e-11, 0.1119099955784930e+01, 0.1136844614323989e+01, 0.3029592900888367e-07, 0.3375767015588020e-07, 0.9975935327108624e+00, 0.1004339833221559e+01, 0.8611580555893368e-02, 0.1099842207759001e-01, 0.7547427839583274e+00, 0.7810990529707220e+00, 0.6950755439903452e-08, 0.3835855840755795e-08, 0.1349885558283086e+01, 0.1367630257526198e+01, 0.6403582257922030e-25, 0.8107757314001177e-24, 0.8893391160251481e-01, 0.1066152189375397e+00, 0.8742171388117141e-04, 0.6338495733216447e-04, 0.1048271953832586e+00, 0.1226593075342052e+00, 0.6281113981299557e-09, 0.1855291575909163e-08, 0.1774469166424565e-01, 0.3548938458471410e-01, 0.6403582257922030e-25, 0.8107757314001177e-24}, + i0: 1, + n0: 8, + pp: 0, + n0in: 8, + dmin: 1.7744691664245655e-002, + dmin1: 8.8870526645182649e-002, + dmin2: 8.8870526645182649e-002, + dn: 1.7744691664245655e-002, + dn1: 0.10482719352796703, + dn2: 8.8870526645182649e-002, + tau: 1.7744692292357051e-002, + ttype: -9, + g: 0.49975000000000003, + zOut: []float64{0.1678031714896912e+01, 0.1695776407182817e+01, 0.4371648372768990e-11, 0.6452741670633652e-11, 0.1119099955784930e+01, 0.1136844614323989e+01, 0.3029592900888367e-07, 0.3375767015588020e-07, 0.9975935327108624e+00, 0.1004339833221559e+01, 0.8611580555893368e-02, 0.1099842207759001e-01, 0.7547427839583274e+00, 0.7810990529707220e+00, 0.6950755439903452e-08, 0.3835855840755795e-08, 0.1349885558283086e+01, 0.1367630257526198e+01, 0.6403582257922030e-25, 0.8107757314001177e-24, 0.8893391160251481e-01, 0.1066152189375397e+00, 0.8742171388117141e-04, 0.6338495733216447e-04, 0.1048271953832586e+00, 0.1226593075342052e+00, 0.6281113981299557e-09, 0.1855291575909163e-08, 0.1774469166424565e-01, 0.3548938458471410e-01, 0.6403582257922030e-25, 0.8107757314001177e-24}, + tauOut: 1.7743283373674822e-002, + ttypeOut: -4, + gOut: 0.49975000000000003, + }, + { + z: []float64{0.1678031714896912e+01, 0.1660288431527609e+01, 0.4371648372768990e-11, 0.2946663608425970e-11, 0.1119099955784930e+01, 0.1101356702704238e+01, 0.3029592900888367e-07, 0.2744162973950316e-07, 0.9975935327108624e+00, 0.9884618024514511e+00, 0.8611580555893368e-02, 0.6575396506892930e-02, 0.7547427839583274e+00, 0.7304241110285150e+00, 0.6950755439903452e-08, 0.1284558415558242e-07, 0.1349885558283086e+01, 0.1332142262063827e+01, 0.6403582257922030e-25, 0.4275036042946169e-26, 0.8893391160251481e-01, 0.7127804994272115e-01, 0.8742171388117141e-04, 0.1285693574547170e-03, 0.1048271953832586e+00, 0.8695534328024046e-01, 0.6281113981299557e-09, 0.1281766326273249e-09, 0.1774469166424565e-01, 0.1408162394200135e-05, 0.6403582257922030e-25, 0.4275036042946169e-26, 0.9539342071687115e+00}, + i0: 1, + n0: 8, + pp: 1, + n0in: 8, + dmin: 1.4081623942001353e-006, + dmin1: 7.1190628228839981e-002, + dmin2: 7.1190628228839981e-002, + dn: 1.4081623942001353e-006, + dn1: 8.6955342652129064e-002, + dn2: 7.1190628228839981e-002, + tau: 1.7743283373674822e-002, + ttype: -4, + g: 0.49975000000000003, + zOut: []float64{0.1678031714896912e+01, 0.1660288431527609e+01, 0.4371648372768990e-11, 0.2946663608425970e-11, 0.1119099955784930e+01, 0.1101356702704238e+01, 0.3029592900888367e-07, 0.2744162973950316e-07, 0.9975935327108624e+00, 0.9884618024514511e+00, 0.8611580555893368e-02, 0.6575396506892930e-02, 0.7547427839583274e+00, 0.7304241110285150e+00, 0.6950755439903452e-08, 0.1284558415558242e-07, 0.1349885558283086e+01, 0.1332142262063827e+01, 0.6403582257922030e-25, 0.4275036042946169e-26, 0.8893391160251481e-01, 0.7127804994272115e-01, 0.8742171388117141e-04, 0.1285693574547170e-03, 0.1048271953832586e+00, 0.8695534328024046e-01, 0.6281113981299557e-09, 0.1281766326273249e-09, 0.1774469166424565e-01, 0.1408162394200135e-05, 0.6403582257922030e-25, 0.4275036042946169e-26, 0.9539342071687115e+00}, + tauOut: 1.4081069428512511e-006, + ttypeOut: -4, + gOut: 0.49975000000000003, + }, + { + z: []float64{0.1660287023423613e+01, 0.1660288431527609e+01, 0.1954678721190348e-11, 0.2946663608425970e-11, 0.1101355322036970e+01, 0.1101356702704238e+01, 0.2462874809952034e-07, 0.2744162973950316e-07, 0.9950357662226532e+00, 0.9884618024514511e+00, 0.4826789459478154e-02, 0.6575396506892930e-02, 0.7255959263076781e+00, 0.7304241110285150e+00, 0.2358357443050565e-07, 0.1284558415558242e-07, 0.1332140830373310e+01, 0.1332142262063827e+01, -0.9894235909416862e+00, 0.4275036042946169e-26, 0.7140521119323301e-01, 0.7127804994272115e-01, 0.1565683011922014e-03, 0.1285693574547170e-03, 0.8679736700028205e-01, 0.8695534328024046e-01, 0.2079481441878513e-14, 0.1281766326273249e-09, 0.5544926940271698e-10, 0.1408162394200135e-05, 0.1565683011922014e-03, 0.1285693574547170e-03}, + i0: 6, + n0: 8, + pp: 0, + n0in: 8, + dmin: 5.5449269402716976e-011, + dmin1: 7.1276641835778295e-002, + dmin2: 7.1276641835778295e-002, + dn: 5.5449269402716976e-011, + dn1: 8.6797366872105416e-002, + dn2: 7.1276641835778295e-002, + tau: 1.4081069428512511e-006, + ttype: -4, + g: 0.49975000000000003, + zOut: []float64{0.1660287023423613e+01, 0.1660288431527609e+01, 0.1954678721190348e-11, 0.2946663608425970e-11, 0.1101355322036970e+01, 0.1101356702704238e+01, 0.2462874809952034e-07, 0.2744162973950316e-07, 0.9950357662226532e+00, 0.9884618024514511e+00, 0.4826789459478154e-02, 0.6575396506892930e-02, 0.7255959263076781e+00, 0.7304241110285150e+00, 0.2358357443050565e-07, 0.1284558415558242e-07, 0.1332140830373310e+01, 0.1332142262063827e+01, -0.9894235909416862e+00, 0.4275036042946169e-26, 0.7140521119323301e-01, 0.7127804994272115e-01, 0.1565683011922014e-03, 0.1285693574547170e-03, 0.8679736700028205e-01, 0.8695534328024046e-01, 0.2079481441878513e-14, 0.1281766326273249e-09, 0.5544926940271698e-10, 0.1408162394200135e-05, 0.1565683011922014e-03, 0.1285693574547170e-03}, + tauOut: 5.5449260598506680e-011, + ttypeOut: -4, + gOut: 0.49975000000000003, + }, + { + z: []float64{0.1660287023423613e+01, 0.1660288431527609e+01, 0.1954678721190348e-11, 0.2946663608425970e-11, 0.1101355322036970e+01, 0.1101356702704238e+01, 0.2462874809952034e-07, 0.2744162973950316e-07, 0.9950357662226532e+00, 0.9884618024514511e+00, 0.4826789459478154e-02, 0.6575396506892930e-02, 0.7255959263076781e+00, 0.7304241110285150e+00, 0.2358357443050565e-07, 0.1284558415558242e-07, 0.1332140830373310e+01, 0.1332142262063827e+01, -0.9894235909416862e+00, 0.4275036042946169e-26, 0.7140521119323301e-01, 0.7156177943897596e-01, 0.1565683011922014e-03, 0.1899018778701386e-03, 0.8679736700028205e-01, 0.8660746506696473e-01, 0.2079481441878513e-14, 0.1331360138522907e-23, 0.5544926940271698e-10, 0.8804208964992894e-17, 0.1565683011922014e-03, 0.8679736700028205e-01, 0.9539342071687115e+00}, + i0: 6, + n0: 8, + pp: 1, + n0in: 8, + dmin: 8.8042089649928937e-018, + dmin1: 7.1405211137783753e-002, + dmin2: 7.1405211137783753e-002, + dn: 8.8042089649928937e-018, + dn1: 8.6607465066962652e-002, + dn2: 7.1405211137783753e-002, + tau: 5.5449260598506680e-011, + ttype: -4, + g: 0.49975000000000003, + zOut: []float64{0.1660287023423613e+01, 0.1660288431527609e+01, 0.1954678721190348e-11, 0.2946663608425970e-11, 0.1101355322036970e+01, 0.1101356702704238e+01, 0.2462874809952034e-07, 0.2744162973950316e-07, 0.9950357662226532e+00, 0.9884618024514511e+00, 0.4826789459478154e-02, 0.6575396506892930e-02, 0.7255959263076781e+00, 0.7304241110285150e+00, 0.2358357443050565e-07, 0.1284558415558242e-07, 0.1332140830373310e+01, 0.1332142262063827e+01, -0.9894235909416862e+00, 0.4275036042946169e-26, 0.7140521119323301e-01, 0.7156177943897596e-01, 0.1565683011922014e-03, 0.1899018778701386e-03, 0.8679736700028205e-01, 0.8660746506696473e-01, 0.2079481441878513e-14, 0.1331360138522907e-23, 0.5544926940271698e-10, 0.8804208964992894e-17, 0.1565683011922014e-03, 0.8679736700028205e-01, 0.9539342071687115e+00}, + tauOut: 8.8042089649574750e-018, + ttypeOut: -4, + gOut: 0.49975000000000003, + }, + { + z: []float64{0.1660287023423613e+01, 0.1660288431527609e+01, 0.1954678721190348e-11, 0.2946663608425970e-11, 0.1101355322036970e+01, 0.1101356702704238e+01, 0.2462874809952034e-07, 0.2744162973950316e-07, 0.9950357662226532e+00, 0.9884618024514511e+00, 0.4826789459478154e-02, 0.6575396506892930e-02, 0.7255959263076781e+00, 0.7304241110285150e+00, 0.2358357443050565e-07, 0.1284558415558242e-07, 0.1332140830373310e+01, 0.1332142262063827e+01, -0.9894235909416862e+00, 0.4275036042946169e-26}, + i0: 1, + n0: 5, + pp: 0, + n0in: 5, + dmin: -0.60723548073666500, + dmin1: 7.1561779438975959e-002, + dmin2: 7.1561779438975959e-002, + dn: 8.8042089649928937e-018, + dn1: 8.6378245068883266e-002, + dn2: 7.1561779438975959e-002, + tau: 0.0000000000000000, + ttype: -4, + g: 0.49975000000000003, + zOut: []float64{0.1660287023423613e+01, 0.1660288431527609e+01, 0.1954678721190348e-11, 0.2946663608425970e-11, 0.1101355322036970e+01, 0.1101356702704238e+01, 0.2462874809952034e-07, 0.2744162973950316e-07, 0.9950357662226532e+00, 0.9884618024514511e+00, 0.4826789459478154e-02, 0.6575396506892930e-02, 0.7255959263076781e+00, 0.7304241110285150e+00, 0.2358357443050565e-07, 0.1284558415558242e-07, 0.1332140830373310e+01, 0.1332142262063827e+01, -0.9894235909416862e+00, 0.4275036042946169e-26}, + tauOut: 0.60723548073666500, + ttypeOut: -1, + gOut: 0.49975000000000003, + }, + { + z: []float64{0.1660287023423613e+01, 0.1053051542688903e+01, 0.1954678721190348e-11, 0.2044340400431280e-11, 0.1101355322036970e+01, 0.4941198659270088e+00, 0.2462874809952034e-07, 0.4959623550114665e-07, 0.9950357662226532e+00, 0.3926270253492309e+00, 0.4826789459478154e-02, 0.8920167341580701e-02, 0.7255959263076781e+00, 0.1094403018130068e+00, 0.2358357443050565e-07, 0.2870664819501689e-06, 0.1332140830373310e+01, 0.7249050625701626e+00, -0.9894235909416862e+00, 0.2044340400431280e-11, 0.1077066053646038e+01}, + i0: 1, + n0: 5, + pp: 1, + n0in: 5, + dmin: 0.10944027822943236, + dmin1: 0.10944027822943236, + dmin2: 0.38780023588975276, + dn: 0.72490506257016263, + dn1: 0.10944027822943236, + dn2: 0.38780023588975276, + tau: 0.60723548073666500, + ttype: -1, + g: 0.49975000000000003, + zOut: []float64{0.1660287023423613e+01, 0.1053051542688903e+01, 0.1954678721190348e-11, 0.2044340400431280e-11, 0.1101355322036970e+01, 0.4941198659270088e+00, 0.2462874809952034e-07, 0.4959623550114665e-07, 0.9950357662226532e+00, 0.3926270253492309e+00, 0.4826789459478154e-02, 0.8920167341580701e-02, 0.7255959263076781e+00, 0.1094403018130068e+00, 0.2358357443050565e-07, 0.2870664819501689e-06, 0.1332140830373310e+01, 0.7249050625701626e+00, -0.9894235909416862e+00, 0.2044340400431280e-11, 0.1077066053646038e+01}, + tauOut: 9.0381042476589277e-002, + ttypeOut: -4, + gOut: 0.49975000000000003, + }, + { + z: []float64{0.9626705002143580e+00, 0.1053051542688903e+01, 0.1049319787347115e-11, 0.2044340400431280e-11, 0.4037388730456057e+00, 0.4941198659270088e+00, 0.4823122991958101e-07, 0.4959623550114665e-07, 0.3111661019829924e+00, 0.3926270253492309e+00, 0.3137314122148422e-02, 0.8920167341580701e-02, 0.1592223228075105e-01, 0.1094403018130068e+00, 0.1306952080528672e-04, 0.2870664819501689e-06, 0.6345109505727681e+00, 0.7249050625701626e+00, 0.1049319787347115e-11, 0.2044340400431280e-11}, + i0: 1, + n0: 5, + pp: 0, + n0in: 5, + dmin: 1.5921945214269095e-002, + dmin1: 1.5921945214269095e-002, + dmin2: 0.30224593464141175, + dn: 0.63451095057276807, + dn1: 1.5921945214269095e-002, + dn2: 0.30224593464141175, + tau: 9.0381042476589277e-002, + ttype: -4, + g: 0.49975000000000003, + zOut: []float64{0.9626705002143580e+00, 0.1053051542688903e+01, 0.1049319787347115e-11, 0.2044340400431280e-11, 0.4037388730456057e+00, 0.4941198659270088e+00, 0.4823122991958101e-07, 0.4959623550114665e-07, 0.3111661019829924e+00, 0.3926270253492309e+00, 0.3137314122148422e-02, 0.8920167341580701e-02, 0.1592223228075105e-01, 0.1094403018130068e+00, 0.1306952080528672e-04, 0.2870664819501689e-06, 0.6345109505727681e+00, 0.7249050625701626e+00, 0.1049319787347115e-11, 0.2044340400431280e-11}, + tauOut: 1.4134050686447828e-002, + ttypeOut: -4, + gOut: 0.49975000000000003, + }, + { + z: []float64{0.9626705002143580e+00, 0.9485364495289594e+00, 0.1049319787347115e-11, 0.4466366986934060e-12, 0.4037388730456057e+00, 0.3896048705899411e+00, 0.4823122991958101e-07, 0.3852088344069326e-07, 0.3111661019829924e+00, 0.3001693268978096e+00, 0.3137314122148422e-02, 0.1664162181618706e-03, 0.1592223228075105e-01, 0.1634834896946635e-02, 0.1306952080528672e-04, 0.5072533064458891e-02, 0.6345109505727681e+00, 0.6153043668218613e+00, 0.1049319787347115e-11, 0.4466366986934060e-12, 0.1077066053646038e+01}, + i0: 1, + n0: 5, + pp: 1, + n0in: 5, + dmin: 1.6217653761413479e-003, + dmin1: 1.6217653761413479e-003, + dmin2: 0.29703201277566116, + dn: 0.61530436682186129, + dn1: 1.6217653761413479e-003, + dn2: 0.29703201277566116, + tau: 1.4134050686447828e-002, + ttype: -4, + g: 0.49975000000000003, + zOut: []float64{0.9626705002143580e+00, 0.9485364495289594e+00, 0.1049319787347115e-11, 0.4466366986934060e-12, 0.4037388730456057e+00, 0.3896048705899411e+00, 0.4823122991958101e-07, 0.3852088344069326e-07, 0.3111661019829924e+00, 0.3001693268978096e+00, 0.3137314122148422e-02, 0.1664162181618706e-03, 0.1592223228075105e-01, 0.1634834896946635e-02, 0.1306952080528672e-04, 0.5072533064458891e-02, 0.6345109505727681e+00, 0.6153043668218613e+00, 0.1049319787347115e-11, 0.4466366986934060e-12, 0.1077066053646038e+01}, + tauOut: 1.5809617416939919e-003, + ttypeOut: -4, + gOut: 0.49975000000000003, + }, + { + z: []float64{0.9469554877877121e+00, 0.9485364495289594e+00, 0.1837592531426069e-12, 0.4466366986934060e-12, 0.3880239473689468e+00, 0.3896048705899411e+00, 0.2979915990315817e-07, 0.3852088344069326e-07, 0.2987547515751175e+00, 0.3001693268978096e+00, 0.9106567826436868e-06, 0.1664162181618706e-03, 0.5125495562928890e-02, 0.1634834896946635e-02, 0.6089463364253304e+00, 0.5072533064458891e-02, 0.4777068654836977e-02, 0.6153043668218613e+00, 0.1837592531426069e-12, 0.4466366986934060e-12}, + i0: 1, + n0: 5, + pp: 0, + n0in: 5, + dmin: 5.2962498469998932e-005, + dmin1: 5.2962498469998932e-005, + dmin2: 0.29858833535695567, + dn: 4.7770686548369769e-003, + dn1: 5.2962498469998932e-005, + dn2: 0.29858833535695567, + tau: 1.5809617416939919e-003, + ttype: -4, + g: 0.49975000000000003, + zOut: []float64{0.9469554877877121e+00, 0.9485364495289594e+00, 0.1837592531426069e-12, 0.4466366986934060e-12, 0.3880239473689468e+00, 0.3896048705899411e+00, 0.2979915990315817e-07, 0.3852088344069326e-07, 0.2987547515751175e+00, 0.3001693268978096e+00, 0.9106567826436868e-06, 0.1664162181618706e-03, 0.5125495562928890e-02, 0.1634834896946635e-02, 0.6089463364253304e+00, 0.5072533064458891e-02, 0.4777068654836977e-02, 0.6153043668218613e+00, 0.1837592531426069e-12, 0.4466366986934060e-12}, + tauOut: 4.7621670852039755e-005, + ttypeOut: -4, + gOut: 0.49975000000000003, + }, + { + z: []float64{0.9469554877877121e+00, 0.9469159823779022e+00, 0.1837592531426069e-12, 0.7530022947855128e-13, 0.3880239473689468e+00, 0.3879844717580378e+00, 0.2979915990315817e-07, 0.2294586835827609e-07, 0.2987547515751175e+00, 0.2987161338760382e+00, 0.9106567826436868e-06, 0.1562542751952015e-07, 0.5125495562928890e-02, 0.6140323109528382e+00, 0.6089463364253304e+00, 0.4737500623869755e-02, 0.4777068654836977e-02, 0.6262097360996939e-07, 0.1837592531426069e-12, 0.7530022947855128e-13, 0.1077066053646038e+01}, + i0: 1, + n0: 5, + pp: 1, + n0in: 5, + dmin: 6.2620973609969386e-008, + dmin1: 5.0859745275077589e-003, + dmin2: 0.29871522321925559, + dn: 6.2620973609969386e-008, + dn1: 5.0859745275077589e-003, + dn2: 0.29871522321925559, + tau: 3.9505409993611756e-005, + ttype: -15, + g: 0.49975000000000003, + zOut: []float64{0.9469554877877121e+00, 0.9469159823779022e+00, 0.1837592531426069e-12, 0.7530022947855128e-13, 0.3880239473689468e+00, 0.3879844717580378e+00, 0.2979915990315817e-07, 0.2294586835827609e-07, 0.2987547515751175e+00, 0.2987161338760382e+00, 0.9106567826436868e-06, 0.1562542751952015e-07, 0.5125495562928890e-02, 0.6140323109528382e+00, 0.6089463364253304e+00, 0.4737500623869755e-02, 0.4777068654836977e-02, 0.6262097360996939e-07, 0.1837592531426069e-12, 0.7530022947855128e-13, 0.1077066053646038e+01}, + tauOut: 6.2141437994562936e-008, + ttypeOut: -2, + gOut: 0.49975000000000003, + }, + { + z: []float64{0.9469159202365395e+00, 0.9469159823779022e+00, 0.3085312975855001e-13, 0.7530022947855128e-13, 0.3879844325624373e+00, 0.3879844717580378e+00, 0.1766643326162244e-07, 0.2294586835827609e-07, 0.2987160696935945e+00, 0.2987161338760382e+00, 0.3211918722443868e-07, 0.1562542751952015e-07, 0.6187697173160828e+00, 0.6140323109528382e+00, 0.4794463808464251e-09, 0.4737500623869755e-02, 0.8923456002485611e-13, 0.6262097360996939e-07, 0.3085312975855001e-13, 0.7530022947855128e-13}, + i0: 1, + n0: 5, + pp: 0, + n0in: 5, + dmin: 8.9234560024856112e-014, + dmin1: 0.29871605406816698, + dmin2: 0.29871605406816698, + dn: 8.9234560024856112e-014, + dn1: 0.61403221669221297, + dn2: 0.29871605406816698, + tau: 6.2141437994562936e-008, + ttype: -2, + g: 0.49975000000000003, + zOut: []float64{0.9469159202365395e+00, 0.9469159823779022e+00, 0.3085312975855001e-13, 0.7530022947855128e-13, 0.3879844325624373e+00, 0.3879844717580378e+00, 0.1766643326162244e-07, 0.2294586835827609e-07, 0.2987160696935945e+00, 0.2987161338760382e+00, 0.3211918722443868e-07, 0.1562542751952015e-07, 0.6187697173160828e+00, 0.6140323109528382e+00, 0.4794463808464251e-09, 0.4737500623869755e-02, 0.8923456002485611e-13, 0.6262097360996939e-07, 0.3085312975855001e-13, 0.7530022947855128e-13}, + tauOut: 8.9232014685788963e-014, + ttypeOut: -4, + gOut: 0.49975000000000003, + }, + { + z: []float64{0.9469159202365395e+00, 0.9469159202364811e+00, 0.3085312975855001e-13, 0.1264160184270297e-13, 0.3879844325624373e+00, 0.3879844502287687e+00, 0.1766643326162244e-07, 0.1360169848638109e-07, 0.2987160696935945e+00, 0.2987160882109940e+00, 0.3211918722443868e-07, 0.6653267495003571e-07, 0.6187697173160828e+00, 0.6187696512627651e+00, 0.4794463808464251e-09, 0.6914234847011291e-22, 0.8923456002485611e-13, 0.2545269924803487e-17, 0.3085312975855001e-13, 0.1264160184270297e-13, 0.1077066053646038e+01}, + i0: 1, + n0: 5, + pp: 1, + n0in: 5, + dmin: 2.5452699248034865e-018, + dmin1: 0.29871605609180679, + dmin2: 0.29871605609180679, + dn: 2.5452699248034865e-018, + dn1: 0.61876965078331869, + dn2: 0.29871605609180679, + tau: 8.9232014685788963e-014, + ttype: -4, + g: 0.49975000000000003, + zOut: []float64{0.9469159202365395e+00, 0.9469159202364811e+00, 0.3085312975855001e-13, 0.1264160184270297e-13, 0.3879844325624373e+00, 0.3879844502287687e+00, 0.1766643326162244e-07, 0.1360169848638109e-07, 0.2987160696935945e+00, 0.2987160882109940e+00, 0.3211918722443868e-07, 0.6653267495003571e-07, 0.6187697173160828e+00, 0.6187696512627651e+00, 0.4794463808464251e-09, 0.6914234847011291e-22, 0.8923456002485611e-13, 0.2545269924803487e-17, 0.3085312975855001e-13, 0.1264160184270297e-13, 0.1077066053646038e+01}, + tauOut: 2.5452699247759164e-018, + ttypeOut: -4, + gOut: 0.49975000000000003, + }, + { + z: []float64{0.9469159202364937e+00, 0.9469159202364811e+00, 0.5179704803914515e-14, 0.1264160184270297e-13, 0.3879844638304620e+00, 0.3879844502287687e+00, 0.1047218779010850e-07, 0.1360169848638109e-07, 0.2987161442714811e+00, 0.2987160882109940e+00, 0.1378177941363543e-06, 0.6653267495003571e-07, 0.6187695134449710e+00, 0.6187696512627651e+00, 0.2844127518685707e-39, 0.6914234847011291e-22}, + i0: 1, + n0: 4, + pp: 0, + n0in: 5, + dmin: 2.5452699248034865e-018, + dmin1: 0.29871607773880621, + dmin2: 0.29871607773880621, + dn: 2.5452699248034865e-018, + dn1: 0.61876951344497100, + dn2: 0.29871607773880621, + tau: 0.0000000000000000, + ttype: -4, + g: 0.49975000000000003, + zOut: []float64{0.9469159202364937e+00, 0.9469159202364811e+00, 0.5179704803914515e-14, 0.1264160184270297e-13, 0.3879844638304620e+00, 0.3879844502287687e+00, 0.1047218779010850e-07, 0.1360169848638109e-07, 0.2987161442714811e+00, 0.2987160882109940e+00, 0.1378177941363543e-06, 0.6653267495003571e-07, 0.6187695134449710e+00, 0.6187696512627651e+00, 0.2844127518685707e-39, 0.6914234847011291e-22}, + tauOut: 7.4679019434701552e-002, + ttypeOut: -9, + gOut: 0.49975000000000003, + }, + { + z: []float64{0.9469159202364937e+00, 0.8722369008017974e+00, 0.5179704803914515e-14, 0.2304012808102351e-14, 0.3879844638304620e+00, 0.3133054548679459e+00, 0.1047218779010850e-07, 0.9984542273822185e-08, 0.2987161442714811e+00, 0.2240372526700314e+00, 0.1378177941363543e-06, 0.3806395963416417e-06, 0.6187695134449710e+00, 0.5440901133706730e+00, 0.2844127518685707e-39, 0.2304012808102351e-14, 0.1702794694134603e+01}, + i0: 1, + n0: 4, + pp: 1, + n0in: 4, + dmin: 0.22403711485223726, + dmin1: 0.22403711485223726, + dmin2: 0.31330544439575814, + dn: 0.54409011337067303, + dn1: 0.22403711485223726, + dn2: 0.31330544439575814, + tau: 7.4679019434701552e-002, + ttype: -9, + g: 0.49975000000000003, + zOut: []float64{0.9469159202364937e+00, 0.8722369008017974e+00, 0.5179704803914515e-14, 0.2304012808102351e-14, 0.3879844638304620e+00, 0.3133054548679459e+00, 0.1047218779010850e-07, 0.9984542273822185e-08, 0.2987161442714811e+00, 0.2240372526700314e+00, 0.1378177941363543e-06, 0.3806395963416417e-06, 0.6187695134449710e+00, 0.5440901133706730e+00, 0.2844127518685707e-39, 0.2304012808102351e-14, 0.1702794694134603e+01}, + tauOut: 0.22392121955733330, + ttypeOut: -4, + gOut: 0.49975000000000003, + }, + { + z: []float64{0.6483156812444665e+00, 0.8722369008017974e+00, 0.1113438717814820e-14, 0.2304012808102351e-14, 0.8938424529515379e-01, 0.3133054548679459e+00, 0.2502576838690600e-07, 0.9984542273822185e-08, 0.1163887265260836e-03, 0.2240372526700314e+00, 0.1779401212715201e-02, 0.3806395963416417e-06, 0.3183894926006245e+00, 0.5440901133706730e+00, 0.1113438717814820e-14, 0.2304012808102351e-14}, + i0: 1, + n0: 4, + pp: 0, + n0in: 4, + dmin: 1.1600808692974196e-004, + dmin1: 1.1600808692974196e-004, + dmin2: 8.9384235310611515e-002, + dn: 0.31838949260062449, + dn1: 1.1600808692974196e-004, + dn2: 8.9384235310611515e-002, + tau: 0.22392121955733330, + ttype: -4, + g: 0.49975000000000003, + zOut: []float64{0.6483156812444665e+00, 0.8722369008017974e+00, 0.1113438717814820e-14, 0.2304012808102351e-14, 0.8938424529515379e-01, 0.3133054548679459e+00, 0.2502576838690600e-07, 0.9984542273822185e-08, 0.1163887265260836e-03, 0.2240372526700314e+00, 0.1779401212715201e-02, 0.3806395963416417e-06, 0.3183894926006245e+00, 0.5440901133706730e+00, 0.1113438717814820e-14, 0.2304012808102351e-14}, + tauOut: 1.1589031543524988e-004, + ttypeOut: -4, + gOut: 0.49975000000000003, + }, + { + z: []float64{0.6483156812444665e+00, 0.6482265310101248e+00, 0.1113438717814820e-14, 0.1535325610927932e-15, 0.8938424529515379e-01, 0.8929512008657929e-01, 0.2502576838690600e-07, 0.3261899765703413e-10, 0.1163887265260836e-03, 0.1806639672279546e-02, 0.1779401212715201e-02, 0.3135891777105103e+00, 0.3183894926006245e+00, 0.4711164655771408e-02, 0.1113438717814820e-14, 0.1535325610927932e-15, 0.1702794694134603e+01}, + i0: 1, + n0: 4, + pp: 1, + n0in: 4, + dmin: 2.7238459564345061e-005, + dmin1: 2.7238459564345061e-005, + dmin2: 8.9295095060810903e-002, + dn: 4.7111646557714080e-003, + dn1: 2.7238459564345061e-005, + dn2: 8.9295095060810903e-002, + tau: 8.9150234342740890e-005, + ttype: -15, + g: 0.49975000000000003, + zOut: []float64{0.6483156812444665e+00, 0.6482265310101248e+00, 0.1113438717814820e-14, 0.1535325610927932e-15, 0.8938424529515379e-01, 0.8929512008657929e-01, 0.2502576838690600e-07, 0.3261899765703413e-10, 0.1163887265260836e-03, 0.1806639672279546e-02, 0.1779401212715201e-02, 0.3135891777105103e+00, 0.3183894926006245e+00, 0.4711164655771408e-02, 0.1113438717814820e-14, 0.1535325610927932e-15, 0.1702794694134603e+01}, + tauOut: 2.5005145741075717e-005, + ttypeOut: -4, + gOut: 0.49975000000000003, + }, + { + z: []float64{0.6482015258643838e+00, 0.6482265310101248e+00, 0.2115037983241246e-16, 0.1535325610927932e-15, 0.8927011497345719e-01, 0.8929512008657929e-01, 0.6601400172354812e-12, 0.3261899765703413e-10, 0.3153708122363886e+00, 0.1806639672279546e-02, 0.4684549720964035e-02, 0.3135891777105103e+00, 0.1609789066298190e-05, 0.4711164655771408e-02, 0.2115037983241246e-16, 0.1535325610927932e-15}, + i0: 1, + n0: 4, + pp: 0, + n0in: 4, + dmin: 1.6097890662981897e-006, + dmin1: 1.7816345258783307e-003, + dmin2: 8.9270114940838197e-002, + dn: 1.6097890662981897e-006, + dn1: 1.7816345258783307e-003, + dn2: 8.9270114940838197e-002, + tau: 2.5005145741075717e-005, + ttype: -4, + g: 0.49975000000000003, + zOut: []float64{0.6482015258643838e+00, 0.6482265310101248e+00, 0.2115037983241246e-16, 0.1535325610927932e-15, 0.8927011497345719e-01, 0.8929512008657929e-01, 0.6601400172354812e-12, 0.3261899765703413e-10, 0.3153708122363886e+00, 0.1806639672279546e-02, 0.4684549720964035e-02, 0.3135891777105103e+00, 0.1609789066298190e-05, 0.4711164655771408e-02, 0.2115037983241246e-16, 0.1535325610927932e-15}, + tauOut: 1.5862205428611591e-006, + ttypeOut: -2, + gOut: 0.49975000000000003, + }, + { + z: []float64{0.6482015258643838e+00, 0.6481999396438409e+00, 0.2115037983241246e-16, 0.2912830939801045e-17, 0.8927011497345719e-01, 0.8926852875357447e-01, 0.6601400172354812e-12, 0.2332164496627946e-11, 0.3153708122363886e+00, 0.3200537757344777e+00, 0.4684549720964035e-02, 0.2356209328895529e-07, 0.1609789066298190e-05, 0.6430148075154844e-11, 0.2115037983241246e-16, 0.2912830939801045e-17, 0.1702794694134603e+01}, + i0: 1, + n0: 4, + pp: 1, + n0in: 4, + dmin: 6.4301480751548441e-012, + dmin1: 8.9268528752914328e-002, + dmin2: 8.9268528752914328e-002, + dn: 6.4301480751548441e-012, + dn1: 0.31536922601351364, + dn2: 8.9268528752914328e-002, + tau: 1.5862205428611591e-006, + ttype: -2, + g: 0.49975000000000003, + zOut: []float64{0.6482015258643838e+00, 0.6481999396438409e+00, 0.2115037983241246e-16, 0.2912830939801045e-17, 0.8927011497345719e-01, 0.8926852875357447e-01, 0.6601400172354812e-12, 0.2332164496627946e-11, 0.3153708122363886e+00, 0.3200537757344777e+00, 0.4684549720964035e-02, 0.2356209328895529e-07, 0.1609789066298190e-05, 0.6430148075154844e-11, 0.2115037983241246e-16, 0.2912830939801045e-17, 0.1702794694134603e+01}, + tauOut: 6.4283598105616478e-012, + ttypeOut: -4, + gOut: 0.49975000000000003, + }, + { + z: []float64{0.6481999396374125e+00, 0.6481999396438409e+00, 0.4011480356653261e-18, 0.2912830939801045e-17, 0.8926852874947827e-01, 0.8926852875357447e-01, 0.8361491594360282e-11, 0.2332164496627946e-11, 0.3200537992817811e+00, 0.3200537757344777e+00, 0.4733821287189426e-18, 0.2356209328895529e-07, 0.1787791211067534e-14, 0.6430148075154844e-11, 0.4011480356653261e-18, 0.2912830939801045e-17}, + i0: 1, + n0: 4, + pp: 0, + n0in: 4, + dmin: 1.7877912110675335e-015, + dmin1: 8.9268528747146109e-002, + dmin2: 8.9268528747146109e-002, + dn: 1.7877912110675335e-015, + dn1: 0.32005377571968785, + dn2: 8.9268528747146109e-002, + tau: 6.4283598105616478e-012, + ttype: -4, + g: 0.49975000000000003, + zOut: []float64{0.6481999396374125e+00, 0.6481999396438409e+00, 0.4011480356653261e-18, 0.2912830939801045e-17, 0.8926852874947827e-01, 0.8926852875357447e-01, 0.8361491594360282e-11, 0.2332164496627946e-11, 0.3200537992817811e+00, 0.3200537757344777e+00, 0.4733821287189426e-18, 0.2356209328895529e-07, 0.1787791211067534e-14, 0.6430148075154844e-11, 0.4011480356653261e-18, 0.2912830939801045e-17}, + tauOut: 1.7877912088395813e-015, + ttypeOut: -4, + gOut: 0.49975000000000003, + }, + { + z: []float64{0.6481999396374125e+00, 0.6481999396374107e+00, 0.4011480356653261e-18, 0.5524513774965514e-19, 0.8926852874947827e-01, 0.8926852875783797e-01, 0.8361491594360282e-11, 0.2997839428604580e-10, 0.3200537992817811e+00, 0.3200537992518009e+00, 0.4733821287189426e-18, 0.2644269217171004e-32, 0.2001510674733695e+01}, + i0: 1, + n0: 3, + pp: 1, + n0in: 4, + dmin: 2.2279522444840209e-024, + dmin1: 8.9268528749476481e-002, + dmin2: 8.9268528749476481e-002, + dn: 2.2279522444840209e-024, + dn1: 0.32005379925180094, + dn2: 8.9268528749476481e-002, + tau: 1.7877912088395813e-015, + ttype: -4, + g: 0.49975000000000003, + zOut: []float64{0.6481999396374125e+00, 0.6481999396374107e+00, 0.4011480356653261e-18, 0.5524513774965514e-19, 0.8926852874947827e-01, 0.8926852875783797e-01, 0.8361491594360282e-11, 0.2997839428604580e-10, 0.3200537992817811e+00, 0.3200537992518009e+00, 0.4733821287189426e-18, 0.2644269217171004e-32, 0.2001510674733695e+01}, + tauOut: 2.2317132187369120e-002, + ttypeOut: -9, + gOut: 0.49975000000000003, + }, + { + z: []float64{0.6258828074500417e+00, 0.6481999396374107e+00, 0.7879513719234823e-20, 0.5524513774965514e-19, 0.6695139660044724e-01, 0.8926852875783797e-01, 0.1433084218388560e-09, 0.2997839428604580e-10, 0.2977366669211234e+00, 0.3200537992518009e+00, 0.8926852875783797e-01, 0.2644269217171004e-32}, + i0: 1, + n0: 3, + pp: 0, + n0in: 3, + dmin: 6.6951396570468849e-002, + dmin1: 6.6951396570468849e-002, + dmin2: 0.62588280745004166, + dn: 0.29773666692112338, + dn1: 6.6951396570468849e-002, + dn2: 0.62588280745004166, + tau: 2.2317132187369120e-002, + ttype: -9, + g: 0.49975000000000003, + zOut: []float64{0.6258828074500417e+00, 0.6481999396374107e+00, 0.7879513719234823e-20, 0.5524513774965514e-19, 0.6695139660044724e-01, 0.8926852875783797e-01, 0.1433084218388560e-09, 0.2997839428604580e-10, 0.2977366669211234e+00, 0.3200537992518009e+00, 0.8926852875783797e-01, 0.2644269217171004e-32}, + tauOut: 6.6950732596142107e-002, + ttypeOut: -4, + gOut: 0.49975000000000003, + }, + { + z: []float64{0.6258828074500417e+00, 0.5589320748538995e+00, 0.7879513719234823e-20, 0.9438435755776795e-21, 0.6695139660044724e-01, 0.6641476135588615e-06, 0.1433084218388560e-09, 0.6424501268835132e-04, 0.2977366669211234e+00, 0.2307216893122929e+00, 0.8926852875783797e-01, 0.6695139660044724e-01, 0.2001510674733695e+01}, + i0: 1, + n0: 3, + pp: 1, + n0in: 3, + dmin: 6.6400430513702258e-007, + dmin1: 6.6400430513702258e-007, + dmin2: 0.55893207485389951, + dn: 0.23072168931229292, + dn1: 6.6400430513702258e-007, + dn2: 0.55893207485389951, + tau: 6.6950732596142107e-002, + ttype: -4, + g: 0.49975000000000003, + zOut: []float64{0.6258828074500417e+00, 0.5589320748538995e+00, 0.7879513719234823e-20, 0.9438435755776795e-21, 0.6695139660044724e-01, 0.6641476135588615e-06, 0.1433084218388560e-09, 0.6424501268835132e-04, 0.2977366669211234e+00, 0.2307216893122929e+00, 0.8926852875783797e-01, 0.6695139660044724e-01, 0.2001510674733695e+01}, + tauOut: 6.6398937736467640e-007, + ttypeOut: -4, + gOut: 0.49975000000000003, + }, + { + z: []float64{0.5589315065851642e+00, 0.5589320748538995e+00, 0.1121517486324177e-26, 0.9438435755776795e-21, 0.6434089156657428e-04, 0.6641476135588615e-06, 0.2303778747300831e+00, 0.6424501268835132e-04, 0.3432463134744483e-03, 0.2307216893122929e+00, 0.6641476135588615e-06, 0.6695139660044724e-01}, + i0: 1, + n0: 3, + pp: 0, + n0in: 3, + dmin: 9.5878878222950548e-008, + dmin1: 9.5878878222950548e-008, + dmin2: 0.55893150658516422, + dn: 3.4324631347444829e-004, + dn1: 9.5878878222950548e-008, + dn2: 0.55893150658516422, + tau: 5.6826873533591094e-007, + ttype: -15, + g: 0.49975000000000003, + zOut: []float64{0.5589315065851642e+00, 0.5589320748538995e+00, 0.1121517486324177e-26, 0.9438435755776795e-21, 0.6434089156657428e-04, 0.6641476135588615e-06, 0.2303778747300831e+00, 0.6424501268835132e-04, 0.3432463134744483e-03, 0.2307216893122929e+00, 0.6641476135588615e-06, 0.6695139660044724e-01}, + tauOut: 9.4211902206835373e-008, + ttypeOut: -4, + gOut: 0.49975000000000003, + }, + } { + z := make([]float64, len(test.z)) + copy(z, test.z) + + // Print for fortran input + //printDlasq4FortranInput(test) + + i0 := test.i0 - 1 // zero index + n0 := test.n0 - 1 // zero index + n0in := test.n0in - 1 + + tauOut, ttypeOut, gOut := impl.Dlasq4(i0, n0, z, test.pp, n0in, test.dmin, test.dmin1, test.dmin2, test.dn, test.dn1, test.dn2, test.tau, test.ttype, test.g) + + if !floats.EqualApprox(z, test.zOut, dTol) { + t.Errorf("Z mismatch") + } + if !floats.EqualWithinAbsOrRel(tauOut, test.tauOut, dTol, dTol) { + t.Errorf("Case %d, wrong tau. Want %v, got %v", cas, test.tauOut, tauOut) + } + if ttypeOut != test.ttypeOut { + t.Errorf("Wrong ttype. Want %v, got %v", test.ttypeOut, ttypeOut) + } + if !floats.EqualWithinAbsOrRel(gOut, test.gOut, dTol, dTol) { + t.Errorf("Wrong g. Want %v, got %v", test.gOut, gOut) + } + } +} diff --git a/vendor/gonum.org/v1/gonum/lapack/testlapack/dlasq5.go b/vendor/gonum.org/v1/gonum/lapack/testlapack/dlasq5.go new file mode 100644 index 0000000..e8dc121 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/lapack/testlapack/dlasq5.go @@ -0,0 +1,1992 @@ +// Copyright ©2015 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package testlapack + +import ( + "testing" + + "gonum.org/v1/gonum/floats" +) + +type Dlasq5er interface { + Dlasq5(i0, n0 int, z []float64, pp int, tau, sigma float64) (i0Out, n0Out, ppOut int, tauOut, sigmaOut, dmin, dmin1, dmin2, dn, dnm1, dnm2 float64) +} + +func Dlasq5Test(t *testing.T, impl Dlasq5er) { + dTol := 1e-14 + for _, test := range []struct { + z []float64 + i0, n0, pp int + tau, sigma float64 + + i0Out, n0Out, ppOut int + tauOut, sigmaOut float64 + dminOut, dmin1Out, dmin2Out float64 + dnOut, dnm1Out, dnm2Out float64 + }{ + { + z: []float64{0.1914365246180821e+01, 0.1564384297703890e+01, 0.2493389162143899e+00, 0.3499809484769305e+00, 0.1315996513131545e+01, 0.1363862112490627e+01, 0.9898466611970759e-01, 0.2014733168553078e+00, 0.6023973979587287e+00, 0.6465544792741794e+00, 0.2210033410638781e-02, 0.5482758480425683e-01, 0.9861857233678967e-01, 0.2428190810745492e-01, 0.4756321484454819e+00, 0.7654669763997353e-01, 0.2588748143677115e+00, 0.6127784069508770e+00, 0.1078611376690004e+00, 0.1217285558623164e+00, 0.6442896492255246e+00, 0.2293835804898155e+00, 0.6203230486639705e+00, 0.5227672064047094e+00, 0.3695660678607585e+00, 0.7645233184745865e+00, 0.5378838054252265e+00, 0.2253657980501426e+00, 0.3562533181264623e+00, 0.8820486722335483e+00, 0.2222132496436145e-01, 0.1208845131814035e-01, 0.1275094303021685e+01, 0.6548746852163357e+00, 0.1647324354821218e+00, 0.6424409427697111e+00, 0.1007530576543866e+01, 0.3269551736546701e+00, 0.3453881601783118e+00, 0.8453078383713172e+00, 0.2679391719153404e+00, 0.4116714838778281e+00, 0.7328677736683723e+00, 0.2016558482158241e+00, 0.8360828138307410e+00, 0.9737579452195326e+00, 0.4813660709592822e+00, 0.5951926422795808e+00, 0.6495370513676459e+00, 0.6761876248148171e+00, 0.2325475880222648e+00, 0.4547154975121112e+00, 0.1993624802893807e+00, 0.3321819367342255e+00, 0.3782318916911257e+00, 0.9972813157741996e-01, 0.9830449403503746e+00, 0.7561080996844842e+00, 0.4429733864040367e+00, 0.6051687323570161e+00, 0.1173279550602403e+01, 0.7195724480316686e+00, 0.5035524069144587e+00, 0.8966804889747714e+00, 0.3058980395058521e+00, 0.6588832353928662e+00, 0.3014634433415453e+00, 0.1505672110274446e+00, 0.1289422237567578e+01, 0.6124645310993601e+00, 0.7583364305799440e+00, 0.9784211498097629e+00, 0.4977814779461571e+00, 0.9993813577491869e+00, 0.2841468847862598e+00, 0.2567365507769143e+00, 0.9257539794205765e+00, 0.5509268385614666e+00, 0.5231355605450990e-04, 0.6589740256453697e+00, 0.2117869221381033e-04, 0.7349224826832024e-04, 0.0000000000000000e+00, 0.0000000000000000e+00}, + i0: 1, + n0: 21, + pp: 0, + tau: 0.0000000000000000, + sigma: 0.0000000000000000, + i0Out: 1, + n0Out: 21, + ppOut: 0, + tauOut: 0.0000000000000000, + sigmaOut: 0.0000000000000000, + dminOut: 2.1175426017541180e-005, + dmin1Out: 4.4311601260836921e-002, + dmin2Out: 4.4311601260836921e-002, + dnOut: 2.1175426017541180e-005, + dnm1Out: 0.33915960483100382, + dnm2Out: 0.16428924199195991, + }, + { + z: []float64{0.1914365246180821e+01, 0.2163704162395211e+01, 0.2493389162143899e+00, 0.1516515751224039e+00, 0.1315996513131545e+01, 0.1263329604128848e+01, 0.9898466611970759e-01, 0.4719916727467415e-01, 0.6023973979587287e+00, 0.5574082640946934e+00, 0.2210033410638781e-02, 0.3910066531356214e-03, 0.9861857233678967e-01, 0.5738597141291359e+00, 0.4756321484454819e+00, 0.2145632131068746e+00, 0.2588748143677115e+00, 0.1521727389298373e+00, 0.1078611376690004e+00, 0.4566771620366771e+00, 0.6442896492255246e+00, 0.8079355358528180e+00, 0.6203230486639705e+00, 0.2837483186776231e+00, 0.3695660678607585e+00, 0.6237015546083620e+00, 0.5378838054252265e+00, 0.3072349091217998e+00, 0.3562533181264623e+00, 0.7123973396902394e-01, 0.2222132496436145e-01, 0.3977314805803597e+00, 0.1275094303021685e+01, 0.1042095257923447e+01, 0.1647324354821218e+00, 0.1592685164190333e+00, 0.1007530576543866e+01, 0.1193650220303144e+01, 0.3453881601783118e+00, 0.7752942700755104e-01, 0.2679391719153404e+00, 0.9232775185761617e+00, 0.7328677736683723e+00, 0.6636554427529671e+00, 0.8360828138307410e+00, 0.6537934420370561e+00, 0.4813660709592822e+00, 0.4782322339990674e+00, 0.6495370513676459e+00, 0.4038524053908432e+00, 0.2325475880222648e+00, 0.1147975431483785e+00, 0.1993624802893807e+00, 0.4627968288321279e+00, 0.3782318916911257e+00, 0.8034172324482011e+00, 0.9830449403503746e+00, 0.6226010943062101e+00, 0.4429733864040367e+00, 0.8347746582554776e+00, 0.1173279550602403e+01, 0.8420572992613844e+00, 0.5035524069144587e+00, 0.1829278057427913e+00, 0.3058980395058521e+00, 0.4244336771046062e+00, 0.3014634433415453e+00, 0.9158407747236312e+00, 0.1289422237567578e+01, 0.1131917893423890e+01, 0.7583364305799440e+00, 0.3334922359541972e+00, 0.4977814779461571e+00, 0.4484361267782198e+00, 0.2841468847862598e+00, 0.5865943745895725e+00, 0.9257539794205765e+00, 0.3392119183870583e+00, 0.5231355605450990e-04, 0.3266196269153995e-08, 0.2117869221381033e-04, 0.2117542601754118e-04, 0.0000000000000000e+00, 0.3910066531356214e-03, 0.1037537856266618 - 321}, + i0: 1, + n0: 21, + pp: 1, + tau: 0.0000000000000000, + sigma: 0.0000000000000000, + i0Out: 1, + n0Out: 21, + ppOut: 1, + tauOut: 0.0000000000000000, + sigmaOut: 0.0000000000000000, + dminOut: 2.1175425069176302e-005, + dmin1Out: 2.9944624525135358e-002, + dmin2Out: 2.9944624525135358e-002, + dnOut: 2.1175425069176302e-005, + dnm1Out: 7.2928780948497918e-002, + dnm2Out: 0.16065460645225654, + }, + { + z: []float64{0.2315355737517615e+01, 0.2163704162395211e+01, 0.8274578340618610e-01, 0.1516515751224039e+00, 0.1227782987997336e+01, 0.1263329604128848e+01, 0.2142822156235013e-01, 0.4719916727467415e-01, 0.5363710491854788e+00, 0.5574082640946934e+00, 0.4183353417969536e-03, 0.3910066531356214e-03, 0.7880045918942136e+00, 0.5738597141291359e+00, 0.4143462125464707e-01, 0.2145632131068746e+00, 0.5674152797118673e+00, 0.1521727389298373e+00, 0.6502569120260687e+00, 0.4566771620366771e+00, 0.4414269425043723e+00, 0.8079355358528180e+00, 0.4009140594652070e+00, 0.2837483186776231e+00, 0.5300224042649548e+00, 0.6237015546083620e+00, 0.4129510944388858e-01, 0.3072349091217998e+00, 0.4276761051054951e+00, 0.7123973396902394e-01, 0.9691308092544145e+00, 0.3977314805803597e+00, 0.2322329650880660e+00, 0.1042095257923447e+01, 0.8186215063776209e+00, 0.1592685164190333e+00, 0.4525581409330741e+00, 0.1193650220303144e+01, 0.1581701233715052e+00, 0.7752942700755104e-01, 0.1428762837957623e+01, 0.9232775185761617e+00, 0.3036848136842134e+00, 0.6636554427529671e+00, 0.8283408623519102e+00, 0.6537934420370561e+00, 0.2331591338951825e+00, 0.4782322339990674e+00, 0.2854908146440392e+00, 0.4038524053908432e+00, 0.1860933389154074e+00, 0.1147975431483785e+00, 0.1080120722364922e+01, 0.4627968288321279e+00, 0.4631042046962229e+00, 0.8034172324482011e+00, 0.9942715478654648e+00, 0.6226010943062101e+00, 0.7069779837626068e+00, 0.8347746582554776e+00, 0.3180071212415688e+00, 0.8420572992613844e+00, 0.2441477440283845e+00, 0.1829278057427913e+00, 0.1096126707799853e+01, 0.4244336771046062e+00, 0.9457451890006905e+00, 0.9158407747236312e+00, 0.5196649403773971e+00, 0.1131917893423890e+01, 0.2877815203259632e+00, 0.3334922359541972e+00, 0.7472489810418290e+00, 0.4484361267782198e+00, 0.2662831374385604e+00, 0.5865943745895725e+00, 0.7292878421469419e-01, 0.3392119183870583e+00, 0.9483648767903632e-12, 0.3266196269153995e-08, 0.2117542506917630e-04, 0.2117542601754118e-04, 0.4183353417969536e-03, 0.3910066531356214e-03}, + i0: 1, + n0: 21, + pp: 0, + tau: 2.1175313795360271e-005, + sigma: 0.0000000000000000, + i0Out: 1, + n0Out: 21, + ppOut: 0, + tauOut: 2.1175313795360271e-005, + sigmaOut: 0.0000000000000000, + dminOut: 1.1127325659669794e-010, + dmin1Out: 3.1433071595911154e-002, + dmin2Out: 3.1433071595911154e-002, + dnOut: 1.1127325659669794e-010, + dnm1Out: 3.5896964560873705e-002, + dnm2Out: 0.25842281720128102, + }, + { + z: []float64{0.2315355737517615e+01, 0.2398080345610006e+01, 0.8274578340618610e-01, 0.4236466279397526e-01, 0.1227782987997336e+01, 0.1206825371451915e+01, 0.2142822156235013e-01, 0.9523728911788614e-02, 0.5363710491854788e+00, 0.5272444803016919e+00, 0.4183353417969536e-03, 0.6252320936560726e-03, 0.7880045918942136e+00, 0.8287928057414093e+00, 0.4143462125464707e-01, 0.2836732781232222e-01, 0.5674152797118673e+00, 0.1189283688611819e+01, 0.6502569120260687e+00, 0.2413561400585997e+00, 0.4414269425043723e+00, 0.6009636865971842e+00, 0.4009140594652070e+00, 0.3535878097802652e+00, 0.5300224042649548e+00, 0.2177085286147829e+00, 0.4129510944388858e-01, 0.8112190955144877e-01, 0.4276761051054951e+00, 0.1315663829494665e+01, 0.9691308092544145e+00, 0.1710650671895379e+00, 0.2322329650880660e+00, 0.8797682289623537e+00, 0.8186215063776209e+00, 0.4211038940233675e+00, 0.4525581409330741e+00, 0.1896031949674164e+00, 0.1581701233715052e+00, 0.1191897606932286e+01, 0.1428762837957623e+01, 0.5405288693957555e+00, 0.3036848136842134e+00, 0.4653859482687157e+00, 0.8283408623519102e+00, 0.5960928726645816e+00, 0.2331591338951825e+00, 0.1116684901463164e+00, 0.2854908146440392e+00, 0.3598944880993349e+00, 0.1860933389154074e+00, 0.5585061130503639e+00, 0.1080120722364922e+01, 0.9846976386969850e+00, 0.4631042046962229e+00, 0.4676068229793028e+00, 0.9942715478654648e+00, 0.1233621533334973e+01, 0.7069779837626068e+00, 0.1822471700779458e+00, 0.3180071212415688e+00, 0.3798865198782122e+00, 0.2441477440283845e+00, 0.7044652781161848e+00, 0.1096126707799853e+01, 0.1337385443370563e+01, 0.9457451890006905e+00, 0.3674861422265960e+00, 0.5196649403773971e+00, 0.4399391431629689e+00, 0.2877815203259632e+00, 0.4888049885267526e+00, 0.7472489810418290e+00, 0.5247059546398414e+00, 0.2662831374385604e+00, 0.3701064434002514e-01, 0.7292878421469419e-01, 0.3589696456182207e-01, 0.9483648767903632e-12, 0.5594353069081231e-15, 0.2117542506917630e-04, 0.1112732565966979e-09, 0.4183353417969536e-03, 0.6252320936560726e-03, 0.1037537856266618 - 321}, + i0: 1, + n0: 21, + pp: 1, + tau: 2.1175313795360271e-005, + sigma: 2.1175313795360271e-005, + i0Out: 1, + n0Out: 21, + ppOut: 1, + tauOut: 2.1175313795360271e-005, + sigmaOut: 2.1175313795360271e-005, + dminOut: -2.1175202522103674e-005, + dmin1Out: 2.9116497146097618e-002, + dmin2Out: 4.9396687496051764e-002, + dnOut: -2.1175202522103674e-005, + dnm1Out: 2.9116497146097618e-002, + dnm2Out: 0.15954393093937583, + }, + { + z: []float64{0.2440423833090186e+01, 0.2398080345610006e+01, 0.2094994698033050e-01, 0.4236466279397526e-01, 0.1195377978069578e+01, 0.1206825371451915e+01, 0.4200624064314086e-02, 0.9523728911788614e-02, 0.5236479130172386e+00, 0.5272444803016919e+00, 0.9895730475750664e-03, 0.6252320936560726e-03, 0.8561493851923613e+00, 0.8287928057414093e+00, 0.3940527300515336e-01, 0.2836732781232222e-01, 0.1391213380351470e+01, 0.1189283688611819e+01, 0.1042588274099539e+00, 0.2413561400585997e+00, 0.8502714936537001e+00, 0.6009636865971842e+00, 0.9053470847599347e-01, 0.3535878097802652e+00, 0.2082745543764428e+00, 0.2177085286147829e+00, 0.5124445590385125e+00, 0.8112190955144877e-01, 0.9742631623318954e+00, 0.1315663829494665e+01, 0.1544732645319877e+00, 0.1710650671895379e+00, 0.1146377683139938e+01, 0.8797682289623537e+00, 0.6964776521238716e-01, 0.4211038940233675e+00, 0.1311831861373520e+01, 0.1896031949674164e+00, 0.4911110065859084e+00, 0.1191897606932286e+01, 0.5147826357647675e+00, 0.5405288693957555e+00, 0.5388939477127089e+00, 0.4653859482687157e+00, 0.1688462397843937e+00, 0.5960928726645816e+00, 0.2380205454936569e+00, 0.1116684901463164e+00, 0.6803588803422466e+00, 0.3598944880993349e+00, 0.8083375797812383e+00, 0.5585061130503639e+00, 0.6439457065812542e+00, 0.9846976386969850e+00, 0.8958050967125053e+00, 0.4676068229793028e+00, 0.5200424313866183e+00, 0.1233621533334973e+01, 0.1331299890548604e+00, 0.1822471700779458e+00, 0.9512006336257413e+00, 0.3798865198782122e+00, 0.9904762202705547e+00, 0.7044652781161848e+00, 0.7143741900128092e+00, 0.1337385443370563e+01, 0.2263121215682984e+00, 0.3674861422265960e+00, 0.7024108348076278e+00, 0.4399391431629689e+00, 0.3651408483866702e+00, 0.4888049885267526e+00, 0.1965545752794010e+00, 0.5247059546398414e+00, 0.6759292101929097e-02, 0.3701064434002514e-01, 0.2911649714609818e-01, 0.3589696456182207e-01, 0.2137969692662087e-23, 0.5594353069081231e-15, -0.2117520252210367e-04, 0.1112732565966979e-09, 0.9895730475750664e-03, 0.6252320936560726e-03, 0.1037537856266618 - 321}, + i0: 1, + n0: 21, + pp: 1, + tau: 1.1127325659669789e-010, + sigma: 2.1175313795360271e-005, + i0Out: 1, + n0Out: 21, + ppOut: 1, + tauOut: 1.1127325659669789e-010, + sigmaOut: 2.1175313795360271e-005, + dminOut: -2.0808762284537102e-024, + dmin1Out: 2.9139336744737766e-002, + dmin2Out: 4.9426557292086552e-002, + dnOut: -2.0808762284537102e-024, + dnm1Out: 2.9139336744737766e-002, + dnm2Out: 0.15959234211062134, + }, + { + z: []float64{0.2440445008292708e+01, 0.2398080345610006e+01, 0.2094976520226600e-01, 0.4236466279397526e-01, 0.1195399335050165e+01, 0.1206825371451915e+01, 0.4200549016048655e-02, 0.9523728911788614e-02, 0.5236691632680260e+00, 0.5272444803016919e+00, 0.9895328911616120e-03, 0.6252320936560726e-03, 0.8561706005512968e+00, 0.8287928057414093e+00, 0.3940429656773515e-01, 0.2836732781232222e-01, 0.1391235531991410e+01, 0.1189283688611819e+01, 0.1042571673718422e+00, 0.2413561400585997e+00, 0.8502943288943339e+00, 0.6009636865971842e+00, 0.9053227710395735e-01, 0.3535878097802652e+00, 0.2082981609510011e+00, 0.2177085286147829e+00, 0.5123864833424303e+00, 0.8112190955144877e-01, 0.9743424132304999e+00, 0.1315663829494665e+01, 0.1544607000116935e+00, 0.1710650671895379e+00, 0.1146411422862754e+01, 0.8797682289623537e+00, 0.6964571542795012e-01, 0.4211038940233675e+00, 0.1311855086360479e+01, 0.1896031949674164e+00, 0.4911023119923957e+00, 0.1191897606932286e+01, 0.5148125055608023e+00, 0.5405288693957555e+00, 0.5388626806938843e+00, 0.4653859482687157e+00, 0.1688986820057405e+00, 0.5960928726645816e+00, 0.2379466412690434e+00, 0.1116684901463164e+00, 0.6804539597693821e+00, 0.3598944880993349e+00, 0.8082246312519304e+00, 0.5585061130503639e+00, 0.6440798303130841e+00, 0.9846976386969850e+00, 0.8956185534970393e+00, 0.4676068229793028e+00, 0.5202501498046066e+00, 0.1233621533334973e+01, 0.1330768347199243e+00, 0.1822471700779458e+00, 0.9512749631631994e+00, 0.3798865198782122e+00, 0.9903988276741268e+00, 0.7044652781161848e+00, 0.7144727578117591e+00, 0.1337385443370563e+01, 0.2262808998212762e+00, 0.3674861422265960e+00, 0.7024632317571722e+00, 0.4399391431629689e+00, 0.3651136124179467e+00, 0.4888049885267526e+00, 0.1966029864506465e+00, 0.5247059546398414e+00, 0.6757627705811050e-02, 0.3701064434002514e-01, 0.2913933674473832e-01, 0.3589696456182207e-01, 0.2136293938333395e-23, 0.5594353069081231e-15, 0.0000000000000000e+00, 0.1112732565966979e-09, 0.9895328911616120e-03, 0.6252320936560726e-03}, + i0: 1, + n0: 21, + pp: 0, + tau: -0.0000000000000000, + sigma: 2.1175425068616867e-005, + i0Out: 1, + n0Out: 21, + ppOut: 0, + tauOut: 0.0000000000000000, + sigmaOut: 2.1175425068616867e-005, + dminOut: 0.0000000000000000, + dmin1Out: 2.7016889331018056e-002, + dmin2Out: 5.3061698118516694e-002, + dnOut: 0.0000000000000000, + dnm1Out: 2.7016889331018056e-002, + dnm2Out: 8.6018658784156071e-002, + }, + { + z: []float64{0.2440445008292708e+01, 0.2461394773494974e+01, 0.2094976520226600e-01, 0.1017444891892999e-01, 0.1195399335050165e+01, 0.1189425435147283e+01, 0.4200549016048655e-02, 0.1849378635683999e-02, 0.5236691632680260e+00, 0.5228093175235037e+00, 0.9895328911616120e-03, 0.1620493249248586e-02, 0.8561706005512968e+00, 0.8939544038697832e+00, 0.3940429656773515e-01, 0.6132377362967349e-01, 0.1391235531991410e+01, 0.1434168925733579e+01, 0.1042571673718422e+00, 0.6181229879703373e-01, 0.8502943288943339e+00, 0.8790143072012576e+00, 0.9053227710395735e-01, 0.2145324219750511e-01, 0.2082981609510011e+00, 0.6992314020959263e+00, 0.5123864833424303e+00, 0.7139837844669097e+00, 0.9743424132304999e+00, 0.4148193287752837e+00, 0.1544607000116935e+00, 0.4268738185358478e+00, 0.1146411422862754e+01, 0.7891833197548568e+00, 0.6964571542795012e-01, 0.1157716892137957e+00, 0.1311855086360479e+01, 0.1687185709139079e+01, 0.4911023119923957e+00, 0.1498504938454686e+00, 0.5148125055608023e+00, 0.9038246924092180e+00, 0.5388626806938843e+00, 0.1006978425303630e+00, 0.1688986820057405e+00, 0.3061474807444209e+00, 0.2379466412690434e+00, 0.5288684194677825e+00, 0.6804539597693821e+00, 0.9598101715535300e+00, 0.8082246312519304e+00, 0.5423584775195998e+00, 0.6440798303130841e+00, 0.9973399062905237e+00, 0.8956185534970393e+00, 0.4671884516860899e+00, 0.5202501498046066e+00, 0.1861385328384410e+00, 0.1330768347199243e+00, 0.6800991665489665e+00, 0.9512749631631994e+00, 0.1261574624288360e+01, 0.9903988276741268e+00, 0.5608966509936130e+00, 0.7144727578117591e+00, 0.3798570066394223e+00, 0.2262808998212762e+00, 0.4184574968871406e+00, 0.7024632317571722e+00, 0.6491193472879784e+00, 0.3651136124179467e+00, 0.1105843276664904e+00, 0.1966029864506465e+00, 0.9277628648996712e-01, 0.6757627705811050e-02, 0.2122447413720272e-02, 0.2913933674473832e-01, 0.2701688933101806e-01, 0.2136293938333395e-23, 0.0000000000000000e+00, 0.2117542506861687e-04}, + i0: 1, + n0: 20, + pp: 1, + tau: -0.0000000000000000, + sigma: 2.1175425068616867e-005, + i0Out: 1, + n0Out: 20, + ppOut: 1, + tauOut: 0.0000000000000000, + sigmaOut: 2.1175425068616867e-005, + dminOut: 2.5763383633962696e-002, + dmin1Out: 4.3622798915905092e-002, + dmin2Out: 7.4536672467372611e-002, + dnOut: 2.5763383633962696e-002, + dnm1Out: 4.3622798915905092e-002, + dnm2Out: 9.8141518071882677e-002, + }, + { + z: []float64{0.2471569222413904e+01, 0.2461394773494974e+01, 0.4896382518051712e-02, 0.1017444891892999e-01, 0.1186378431264915e+01, 0.1189425435147283e+01, 0.8149780515932184e-03, 0.1849378635683999e-02, 0.5236148327211592e+00, 0.5228093175235037e+00, 0.2766627272719901e-02, 0.1620493249248586e-02, 0.9525115502267366e+00, 0.8939544038697832e+00, 0.9233342160256496e-01, 0.6132377362967349e-01, 0.1403647802928048e+01, 0.1434168925733579e+01, 0.3870906568602875e-01, 0.6181229879703373e-01, 0.8617584837127339e+00, 0.8790143072012576e+00, 0.1740717486950262e-01, 0.2145324219750511e-01, 0.1395808011693333e+01, 0.6992314020959263e+00, 0.2121884039551361e+00, 0.7139837844669097e+00, 0.6295047433559955e+00, 0.4148193287752837e+00, 0.5351535485381410e+00, 0.4268738185358478e+00, 0.3698014604305115e+00, 0.7891833197548568e+00, 0.5281978587564573e+00, 0.1157716892137957e+00, 0.1308838344228090e+01, 0.1687185709139079e+01, 0.1034799882693896e+00, 0.1498504938454686e+00, 0.9010425466701916e+00, 0.9038246924092180e+00, 0.3421413441684364e-01, 0.1006978425303630e+00, 0.8008017657953598e+00, 0.3061474807444209e+00, 0.6338813300623194e+00, 0.5288684194677825e+00, 0.8682873190108105e+00, 0.9598101715535300e+00, 0.6229686202966810e+00, 0.5423584775195998e+00, 0.8415597376799326e+00, 0.9973399062905237e+00, 0.1033340463692495e+00, 0.4671884516860899e+00, 0.7629036530181579e+00, 0.1861385328384410e+00, 0.1124645093942705e+01, 0.6800991665489665e+00, 0.6978261813392677e+00, 0.1261574624288360e+01, 0.3053203341720497e+00, 0.5608966509936130e+00, 0.4929941693545132e+00, 0.3798570066394223e+00, 0.5509778292160957e+00, 0.4184574968871406e+00, 0.2087258457383731e+00, 0.6491193472879784e+00, 0.4915348757406203e-01, 0.1105843276664904e+00, 0.4574524632962537e-01, 0.9277628648996712e-01, 0.1253505697055357e-02, 0.2122447413720272e-02, 0.2576338363396270e-01, 0.2701688933101806e-01, 0.8149780515932184e-03, 0.1620493249248586e-02}, + i0: 1, + n0: 20, + pp: 0, + tau: 2.0080554394878082e-002, + sigma: 2.1175425068616867e-005, + i0Out: 1, + n0Out: 20, + ppOut: 0, + tauOut: 2.0080554394878082e-002, + sigmaOut: 2.1175425068616867e-005, + dminOut: -2.1187919252575148e-003, + dmin1Out: -2.1187919252575148e-003, + dmin2Out: 3.1777270007152948e-002, + dnOut: 4.3005217031728403e-002, + dnm1Out: -2.1187919252575148e-003, + dnm2Out: 3.1777270007152948e-002, + }, + { + z: []float64{0.2471569222413904e+01, 0.2456385050537078e+01, 0.4896382518051712e-02, 0.2364842030515144e-02, 0.1186378431264915e+01, 0.1164748012891115e+01, 0.8149780515932184e-03, 0.3663750368606913e-03, 0.5236148327211592e+00, 0.5059345305621403e+00, 0.2766627272719901e-02, 0.5208666879309463e-02, 0.9525115502267366e+00, 0.1019555750555114e+01, 0.9233342160256496e-01, 0.1271177219084926e+00, 0.1403647802928048e+01, 0.1295158592310706e+01, 0.3870906568602875e-01, 0.2575581550365553e-01, 0.8617584837127339e+00, 0.8333292886837029e+00, 0.1740717486950262e-01, 0.2915663048658400e-01, 0.1395808011693333e+01, 0.1558759230767007e+01, 0.2121884039551361e+00, 0.8569226352498945e-01, 0.6295047433559955e+00, 0.1058885473974269e+01, 0.5351535485381410e+00, 0.1868951540728986e+00, 0.3698014604305115e+00, 0.6910236107191923e+00, 0.5281978587564573e+00, 0.1000437033056103e+01, 0.1308838344228090e+01, 0.3918007450464985e+00, 0.1034799882693896e+00, 0.2379777816619179e+00, 0.9010425466701916e+00, 0.6771983450302391e+00, 0.3421413441684364e-01, 0.4045895778871807e-01, 0.8008017657953598e+00, 0.1374143583674083e+01, 0.6338813300623194e+00, 0.4005339232303680e+00, 0.8682873190108105e+00, 0.1070641461682245e+01, 0.6229686202966810e+00, 0.4896740201485869e+00, 0.8415597376799326e+00, 0.4351392095057171e+00, 0.1033340463692495e+00, 0.1811694274708021e+00, 0.7629036530181579e+00, 0.1686298765095183e+01, 0.1124645093942705e+01, 0.4654019842229331e+00, 0.6978261813392677e+00, 0.5176639768935063e+00, 0.3053203341720497e+00, 0.2907699806261721e+00, 0.4929941693545132e+00, 0.7331214635495586e+00, 0.5509778292160957e+00, 0.1568680213363420e+00, 0.2087258457383731e+00, 0.8093075758121498e-01, 0.4915348757406203e-01, 0.2778348386000479e-01, 0.4574524632962537e-01, -0.8652862282021575e-03, 0.1253505697055357e-02, -0.3732238779264379e-01, 0.2576338363396270e-01, 0.4300521703172840e-01, 0.8149780515932184e-03, 0.3663750368606913e-03}, + i0: 1, + n0: 20, + pp: 0, + tau: 5.0201385987195205e-003, + sigma: 2.1175425068616867e-005, + i0Out: 1, + n0Out: 20, + ppOut: 0, + tauOut: 5.0201385987195205e-003, + sigmaOut: 2.1175425068616867e-005, + dminOut: 1.8576185384092288e-002, + dmin1Out: 1.8576185384092288e-002, + dmin2Out: 5.2365600435162571e-002, + dnOut: 1.9114649409197451e-002, + dnm1Out: 1.8576185384092288e-002, + dnm2Out: 5.2365600435162571e-002, + }, + { + z: []float64{0.2471569222413904e+01, 0.2471445466333236e+01, 0.4896382518051712e-02, 0.2350431231346416e-02, 0.1186378431264915e+01, 0.1179822839486443e+01, 0.8149780515932184e-03, 0.3616937915375072e-03, 0.5236148327211592e+00, 0.5209996276036221e+00, 0.2766627272719901e-02, 0.5058054349403302e-02, 0.9525115502267366e+00, 0.1034766778881179e+01, 0.9233342160256496e-01, 0.1252490967185870e+00, 0.1403647802928048e+01, 0.1312087633296770e+01, 0.3870906568602875e-01, 0.2542350442532051e-01, 0.8617584837127339e+00, 0.8487220155581966e+00, 0.1740717486950262e-01, 0.2862783537884150e-01, 0.1395808011693333e+01, 0.1574348441670908e+01, 0.2121884039551361e+00, 0.8484373804386666e-01, 0.6295047433559955e+00, 0.1074794415251550e+01, 0.5351535485381410e+00, 0.1841287608083240e+00, 0.3698014604305115e+00, 0.7088504197799252e+00, 0.5281978587564573e+00, 0.9752771411128711e+00, 0.1308838344228090e+01, 0.4320210527858890e+00, 0.1034799882693896e+00, 0.2158225196628609e+00, 0.9010425466701916e+00, 0.7144140228254550e+00, 0.3421413441684364e-01, 0.3835134583138245e-01, 0.8008017657953598e+00, 0.1391311611427577e+01, 0.6338813300623194e+00, 0.3955915526975877e+00, 0.8682873190108105e+00, 0.1090644248011184e+01, 0.6229686202966810e+00, 0.4806932321292802e+00, 0.8415597376799326e+00, 0.4591804133211825e+00, 0.1033340463692495e+00, 0.1716839812178710e+00, 0.7629036530181579e+00, 0.1710844627144272e+01, 0.1124645093942705e+01, 0.4587247601659613e+00, 0.6978261813392677e+00, 0.5394016167466366e+00, 0.3053203341720497e+00, 0.2790520826393697e+00, 0.4929941693545132e+00, 0.7598997773325197e+00, 0.5509778292160957e+00, 0.1513401067044909e+00, 0.2087258457383731e+00, 0.1015190880092246e+00, 0.4915348757406203e-01, 0.2214892234681356e-01, 0.4574524632962537e-01, 0.1982969108114764e-01, 0.1253505697055357e-02, 0.1628595626045726e-02, 0.2576338363396270e-01, 0.1911464940919745e-01, 0.8149780515932184e-03, 0.3616937915375072e-03, 0.2117542506861687e-04}, + i0: 1, + n0: 20, + pp: 1, + tau: 5.4769133315280185e-003, + sigma: 5.0413140237881371e-003, + i0Out: 1, + n0Out: 20, + ppOut: 1, + tauOut: 5.4769133315280185e-003, + sigmaOut: 5.0413140237881371e-003, + dminOut: 9.5622848228283271e-003, + dmin1Out: 9.5622848228283271e-003, + dmin2Out: 6.9533978479808370e-002, + dnOut: 1.0856003705186750e-002, + dnm1Out: 9.5622848228283271e-003, + dnm2Out: 6.9533978479808370e-002, + }, + { + z: []float64{0.2468318984233055e+01, 0.2471445466333236e+01, 0.1123474100024551e-02, 0.2350431231346416e-02, 0.1173584145846428e+01, 0.1179822839486443e+01, 0.1605699355811189e-03, 0.3616937915375072e-03, 0.5204201986859162e+00, 0.5209996276036221e+00, 0.1005707814522541e-01, 0.5058054349403302e-02, 0.1144481884123012e+01, 0.1034766778881179e+01, 0.1435914304680996e+00, 0.1252490967185870e+00, 0.1188442793922463e+01, 0.1312087633296770e+01, 0.1815610143690141e-01, 0.2542350442532051e-01, 0.8537168361686087e+00, 0.8487220155581966e+00, 0.5279290053521807e-01, 0.2862783537884150e-01, 0.1600922365848029e+01, 0.1574348441670908e+01, 0.5696064828871891e-01, 0.8484373804386666e-01, 0.1196485614439627e+01, 0.1074794415251550e+01, 0.1090859328498209e+00, 0.1841287608083240e+00, 0.1569564714711448e+01, 0.7088504197799252e+00, 0.2684440171930437e+00, 0.9752771411128711e+00, 0.3739226419241781e+00, 0.4320210527858890e+00, 0.4123490187575627e+00, 0.2158225196628609e+00, 0.3349394365677468e+00, 0.7144140228254550e+00, 0.1593084209965356e+00, 0.3835134583138245e-01, 0.1622117829797102e+01, 0.1391311611427577e+01, 0.2659792301064862e+00, 0.3955915526975877e+00, 0.1299881336702450e+01, 0.1090644248011184e+01, 0.1698038973078534e+00, 0.4806932321292802e+00, 0.4555835838996722e+00, 0.4591804133211825e+00, 0.6447216871142054e+00, 0.1716839812178710e+00, 0.1519370786864500e+01, 0.1710844627144272e+01, 0.1628548339973444e+00, 0.4587247601659613e+00, 0.6501219520571339e+00, 0.5394016167466366e+00, 0.3261720586281595e+00, 0.2790520826393697e+00, 0.5795909120773233e+00, 0.7598997773325197e+00, 0.2650819619788820e-01, 0.1513401067044909e+00, 0.9168290082662192e-01, 0.1015190880092246e+00, 0.4790492926791300e-02, 0.2214892234681356e-01, 0.1119088044887405e-01, 0.1982969108114764e-01, 0.2781732372482683e-02, 0.1628595626045726e-02, 0.1085600370518675e-01, 0.1911464940919745e-01, 0.1605699355811189e-03, 0.3616937915375072e-03}, + i0: 1, + n0: 20, + pp: 0, + tau: 5.1216063611655054e-003, + sigma: 1.0518227355316156e-002, + i0Out: 1, + n0Out: 20, + ppOut: 0, + tauOut: 5.1216063611655054e-003, + sigmaOut: 1.0518227355316156e-002, + dminOut: 2.0601312480394186e-003, + dmin1Out: 5.4371870398960158e-003, + dmin2Out: 8.0023511442426670e-002, + dnOut: 2.0601312480394186e-003, + dnm1Out: 5.4371870398960158e-003, + dnm2Out: 8.0023511442426670e-002, + }, + { + z: []float64{0.2468318984233055e+01, 0.2464320851971913e+01, 0.1123474100024551e-02, 0.5350323562789559e-03, 0.1173584145846428e+01, 0.1168088077064565e+01, 0.1605699355811189e-03, 0.7153898701552432e-04, 0.5204201986859162e+00, 0.5252841314829605e+00, 0.1005707814522541e-01, 0.2191222436498315e-01, 0.1144481884123012e+01, 0.1261039483864963e+01, 0.1435914304680996e+00, 0.1353250258951489e+00, 0.1188442793922463e+01, 0.1066152263103050e+01, 0.1815610143690141e-01, 0.1453842008528346e-01, 0.8537168361686087e+00, 0.8868497102573779e+00, 0.5279290053521807e-01, 0.9530062900995111e-01, 0.1600922365848029e+01, 0.1557460778765631e+01, 0.5696064828871891e-01, 0.4375878814786067e-01, 0.1196485614439627e+01, 0.1256691152780422e+01, 0.1090859328498209e+00, 0.1362446379077657e+00, 0.1569564714711448e+01, 0.1696642487635560e+01, 0.2684440171930437e+00, 0.5916231430550117e-01, 0.3739226419241781e+00, 0.7219877400150740e+00, 0.4123490187575627e+00, 0.1912940350054112e+00, 0.3349394365677468e+00, 0.2978322161977056e+00, 0.1593084209965356e+00, 0.8676597630518320e+00, 0.1622117829797102e+01, 0.1015315690490590e+01, 0.2659792301064862e+00, 0.3405260456467969e+00, 0.1299881336702450e+01, 0.1124037582002341e+01, 0.1698038973078534e+00, 0.6882320425428856e-01, 0.4555835838996722e+00, 0.1026360460398424e+01, 0.6447216871142054e+00, 0.9544125430154021e+00, 0.1519370786864500e+01, 0.7226914714852769e+00, 0.1628548339973444e+00, 0.1465016632377001e+00, 0.6501219520571339e+00, 0.8246707410864278e+00, 0.3261720586281595e+00, 0.2292385937027206e+00, 0.5795909120773233e+00, 0.3717389082113253e+00, 0.2650819619788820e-01, 0.6537783023029759e-02, 0.9168290082662192e-01, 0.8481400436921797e-01, 0.4790492926791300e-02, 0.6320870478125323e-03, 0.1119088044887405e-01, 0.8218919412378699e-02, 0.2781732372482683e-02, 0.3674266095981827e-02, 0.1085600370518675e-01, 0.2060131248039419e-02, 0.1605699355811189e-03, 0.7153898701552432e-04, 0.2117542506861687e-04}, + i0: 1, + n0: 20, + pp: 1, + tau: 1.2817878169164906e-003, + sigma: 1.5639833716481661e-002, + i0Out: 1, + n0Out: 20, + ppOut: 1, + tauOut: 1.2817878169164906e-003, + sigmaOut: 1.5639833716481661e-002, + dminOut: 6.0731361153288982e-005, + dmin1Out: 6.8738708636769136e-003, + dmin2Out: 8.1489512892123819e-002, + dnOut: 6.0731361153288982e-005, + dnm1Out: 6.8738708636769136e-003, + dnm2Out: 8.1489512892123819e-002, + }, + { + z: []float64{0.2463574096511276e+01, 0.2464320851971913e+01, 0.2536822079344948e-03, 0.5350323562789559e-03, 0.1166624146026729e+01, 0.1168088077064565e+01, 0.3221114082852138e-04, 0.7153898701552432e-04, 0.5458823568901986e+00, 0.5252841314829605e+00, 0.5061929508212644e-01, 0.2191222436498315e-01, 0.1344463426861069e+01, 0.1261039483864963e+01, 0.1073120173669855e+00, 0.1353250258951489e+00, 0.9720968780044319e+00, 0.1066152263103050e+01, 0.1326348631702415e-01, 0.1453842008528346e-01, 0.9676050651333883e+00, 0.8868497102573779e+00, 0.1533962535161303e+00, 0.9530062900995111e-01, 0.1446541525580445e+01, 0.1557460778765631e+01, 0.3801569533217738e-01, 0.4375878814786067e-01, 0.1353638307539094e+01, 0.1256691152780422e+01, 0.1707682473962209e+00, 0.1362446379077657e+00, 0.1583754766727924e+01, 0.1696642487635560e+01, 0.2697037855661164e-01, 0.5916231430550117e-01, 0.8850296086469572e+00, 0.7219877400150740e+00, 0.6437471225190403e-01, 0.1912940350054112e+00, 0.1099835479180717e+01, 0.2978322161977056e+00, 0.8009821360646626e+00, 0.8676597630518320e+00, 0.5535778122558079e+00, 0.1015315690490590e+01, 0.6914368034330997e+00, 0.3405260456467969e+00, 0.5001421950066134e+00, 0.1124037582002341e+01, 0.1412346654806686e+00, 0.6882320425428856e-01, 0.1838256550116241e+01, 0.1026360460398424e+01, 0.3752173792456719e+00, 0.9544125430154021e+00, 0.4926939676603885e+00, 0.7226914714852769e+00, 0.2452143584512202e+00, 0.1465016632377001e+00, 0.8074131885210117e+00, 0.8246707410864278e+00, 0.1055431169003394e+00, 0.2292385937027206e+00, 0.2714517865170992e+00, 0.3717389082113253e+00, 0.2042703660177667e-02, 0.6537783023029759e-02, 0.8212159993993635e-01, 0.8481400436921797e-01, 0.6326073178529442e-04, 0.6320870478125323e-03, 0.1054813695965874e-01, 0.8218919412378699e-02, 0.7176120699696391e-03, 0.3674266095981827e-02, 0.6073136115328898e-04, 0.2060131248039419e-02, 0.3221114082852138e-04, 0.7153898701552432e-04}, + i0: 1, + n0: 20, + pp: 0, + tau: 5.6837241251038845e-005, + sigma: 1.6921621533398150e-002, + i0Out: 1, + n0Out: 20, + ppOut: 0, + tauOut: 5.6837241251038845e-005, + sigmaOut: 1.6921621533398150e-002, + dminOut: 3.1568086081919418e-009, + dmin1Out: 1.0483100129151506e-002, + dmin2Out: 8.1316774559040517e-002, + dnOut: 3.1568086081919418e-009, + dnm1Out: 1.0483100129151506e-002, + dnm2Out: 8.1316774559040517e-002, + }, + { + z: []float64{0.2463574096511276e+01, 0.2463770941477959e+01, 0.2536822079344948e-03, 0.1201214707955848e-03, 0.1166624146026729e+01, 0.1166479398455512e+01, 0.3221114082852138e-04, 0.1507398544447245e-04, 0.5458823568901986e+00, 0.5964297407456295e+00, 0.5061929508212644e-01, 0.1141052940222717e+00, 0.1344463426861069e+01, 0.1337613312964532e+01, 0.1073120173669855e+00, 0.7798791776646297e-01, 0.9720968780044319e+00, 0.9073156093137420e+00, 0.1326348631702415e-01, 0.1414482062243694e-01, 0.9676050651333883e+00, 0.1106799660785830e+01, 0.1533962535161303e+00, 0.2004825791345134e+00, 0.1446541525580445e+01, 0.1284017804536858e+01, 0.3801569533217738e-01, 0.4007693764646178e-01, 0.1353638307539094e+01, 0.1484272780047602e+01, 0.1707682473962209e+00, 0.1822138285193538e+00, 0.1583754766727924e+01, 0.1428454479523931e+01, 0.2697037855661164e-01, 0.1671007646458111e-01, 0.8850296086469572e+00, 0.9326374071930291e+00, 0.6437471225190403e-01, 0.7591545433480534e-01, 0.1099835479180717e+01, 0.1824845323669324e+01, 0.8009821360646626e+00, 0.2429827519008994e+00, 0.5535778122558079e+00, 0.1001975026546757e+01, 0.6914368034330997e+00, 0.3451350696526060e+00, 0.5001421950066134e+00, 0.2961849535934249e+00, 0.1412346654806686e+00, 0.8765656248686587e+00, 0.1838256550116241e+01, 0.1336851467252003e+01, 0.3752173792456719e+00, 0.1382856239786244e+00, 0.4926939676603885e+00, 0.5995658648917332e+00, 0.2452143584512202e+00, 0.3302211126778973e+00, 0.8074131885210117e+00, 0.5826783555022028e+00, 0.1055431169003394e+00, 0.4916926700063749e-01, 0.2714517865170992e+00, 0.2242683859353883e+00, 0.2042703660177667e-02, 0.7479881396448043e-03, 0.8212159993993635e-01, 0.8138003529082581e-01, 0.6326073178529442e-04, 0.8199589256196194e-05, 0.1054813695965874e-01, 0.1120071219912114e-01, 0.7176120699696391e-03, 0.3890963093641941e-05, 0.6073136115328898e-04, 0.3156808608191942e-08, 0.3221114082852138e-04, 0.1507398544447245e-04, 0.2117542506861687e-04}, + i0: 1, + n0: 20, + pp: 1, + tau: 3.1557121791797713e-009, + sigma: 1.6978458774649190e-002, + i0Out: 1, + n0Out: 20, + ppOut: 1, + tauOut: 3.1557121791797713e-009, + sigmaOut: 1.6978458774649190e-002, + dminOut: 6.9684975813366743e-017, + dmin1Out: 1.1199576261102989e-002, + dmin2Out: 8.1067607231828140e-002, + dnOut: 6.9684975813366743e-017, + dnm1Out: 1.1199576261102989e-002, + dnm2Out: 8.1067607231828140e-002, + }, + { + z: []float64{0.2463891059793043e+01, 0.2463770941477959e+01, 0.5686908130061341e-04, 0.1201214707955848e-03, 0.1166437600203943e+01, 0.1166479398455512e+01, 0.7707718980490818e-05, 0.1507398544447245e-04, 0.7105273238932086e+00, 0.5964297407456295e+00, 0.2148105431436762e+00, 0.1141052940222717e+00, 0.1200790684431606e+01, 0.1337613312964532e+01, 0.5892755169139442e-01, 0.7798791776646297e-01, 0.8625328750890724e+00, 0.9073156093137420e+00, 0.1815059242254727e-01, 0.1414482062243694e-01, 0.1289131644342084e+01, 0.1106799660785830e+01, 0.1996872873596725e+00, 0.2004825791345134e+00, 0.1124407451667935e+01, 0.1284017804536858e+01, 0.5290351604133232e-01, 0.4007693764646178e-01, 0.1613583089369911e+01, 0.1484272780047602e+01, 0.1613081850537457e+00, 0.1822138285193538e+00, 0.1283856367779054e+01, 0.1428454479523931e+01, 0.1213877407087503e-01, 0.1671007646458111e-01, 0.9964140843012472e+00, 0.9326374071930291e+00, 0.1390325207358455e+00, 0.7591545433480534e-01, 0.1928795551678665e+01, 0.1824845323669324e+01, 0.1262252233392066e+00, 0.2429827519008994e+00, 0.1220884869704444e+01, 0.1001975026546757e+01, 0.8372928285471114e-01, 0.3451350696526060e+00, 0.1089021292451660e+01, 0.2961849535934249e+00, 0.1076046951396362e+01, 0.8765656248686587e+00, 0.3990901366785531e+00, 0.1336851467252003e+01, 0.2077509116934600e+00, 0.1382856239786244e+00, 0.7220360627204584e+00, 0.5995658648917332e+00, 0.2664862668525171e+00, 0.3302211126778973e+00, 0.3653613524946110e+00, 0.5826783555022028e+00, 0.3018138637972599e-01, 0.4916926700063749e-01, 0.1948349845395949e+00, 0.2242683859353883e+00, 0.3124249032854923e-03, 0.7479881396448043e-03, 0.8107580682108434e-01, 0.8138003529082581e-01, 0.1132782305976083e-05, 0.8199589256196194e-05, 0.1120346722419663e-01, 0.1120071219912114e-01, 0.1096359327194516e-11, 0.3890963093641941e-05, 0.6968497581336674e-16, 0.3156808608191942e-08, 0.7707718980490818e-05, 0.1507398544447245e-04}, + i0: 1, + n0: 20, + pp: 0, + tau: 6.9684975806547287e-017, + sigma: 1.6978461930361368e-002, + i0Out: 1, + n0Out: 20, + ppOut: 0, + tauOut: 6.9684975806547287e-017, + sigmaOut: 1.6978461930361368e-002, + dminOut: 6.1629758220391547e-032, + dmin1Out: 1.1203310405167735e-002, + dmin2Out: 8.0927116373146771e-002, + dnOut: 6.1629758220391547e-032, + dnm1Out: 1.1203310405167735e-002, + dnm2Out: 8.0927116373146771e-002, + }, + { + z: []float64{0.2463891059793043e+01, 0.2463947928874343e+01, 0.5686908130061341e-04, 0.2692193042748079e-04, 0.1166437600203943e+01, 0.1166418385992496e+01, 0.7707718980490818e-05, 0.4695180568393632e-05, 0.7105273238932086e+00, 0.9253331718563164e+00, 0.2148105431436762e+00, 0.2787563517334627e+00, 0.1200790684431606e+01, 0.9809618843895378e+00, 0.5892755169139442e-01, 0.5181337969514327e-01, 0.8625328750890724e+00, 0.8288700878164763e+00, 0.1815059242254727e-01, 0.2822939734392020e-01, 0.1289131644342084e+01, 0.1460589534357837e+01, 0.1996872873596725e+00, 0.1537255119449346e+00, 0.1124407451667935e+01, 0.1023585455764333e+01, 0.5290351604133232e-01, 0.8339725654733963e-01, 0.1613583089369911e+01, 0.1691494017876317e+01, 0.1613081850537457e+00, 0.1224340957564512e+00, 0.1283856367779054e+01, 0.1173561046093478e+01, 0.1213877407087503e-01, 0.1030644761994533e-01, 0.9964140843012472e+00, 0.1125140157417147e+01, 0.1390325207358455e+00, 0.2383394688796517e+00, 0.1928795551678665e+01, 0.1816681306138221e+01, 0.1262252233392066e+00, 0.8482856339700598e-01, 0.1220884869704444e+01, 0.1219785589162149e+01, 0.8372928285471114e-01, 0.7475327847832687e-01, 0.1089021292451660e+01, 0.2090314965369696e+01, 0.1076046951396362e+01, 0.2054425921547012e+00, 0.3990901366785531e+00, 0.4013984562173118e+00, 0.2077509116934600e+00, 0.3737026089221466e+00, 0.7220360627204584e+00, 0.6148197206508288e+00, 0.2664862668525171e+00, 0.1583615157552351e+00, 0.3653613524946110e+00, 0.2371812231191019e+00, 0.3018138637972599e-01, 0.2479281399828426e-01, 0.1948349845395949e+00, 0.1703545954445960e+00, 0.3124249032854923e-03, 0.1486904479375115e-03, 0.8107580682108434e-01, 0.8092824915545274e-01, 0.1132782305976083e-05, 0.1568190288260776e-06, 0.1120346722419663e-01, 0.1120331040626409e-01, 0.1096359327194516e-11, 0.6819392699821255e-26, 0.6968497581336674e-16, 0.6162975822039155e-31, 0.7707718980490818e-05, 0.4695180568393632e-05, 0.2117542506861687e-04}, + i0: 1, + n0: 20, + pp: 1, + tau: 6.1629758220391547e-032, + sigma: 1.6978461930361441e-002, + i0Out: 1, + n0Out: 20, + ppOut: 1, + tauOut: 0.0000000000000000, + sigmaOut: 1.6978461930361441e-002, + dminOut: 6.1629758220391547e-032, + dmin1Out: 1.1203288675083998e-002, + dmin2Out: 8.0846453894262649e-002, + dnOut: 6.1629758220391547e-032, + dnm1Out: 1.1203288675083998e-002, + dnm2Out: 8.0846453894262649e-002, + }, + { + z: []float64{0.2463974850804771e+01, 0.2463947928874343e+01, 0.1274454348702788e-04, 0.2692193042748079e-04, 0.1166410336629578e+01, 0.1166418385992496e+01, 0.3724766654883956e-05, 0.4695180568393632e-05, 0.1204085798823124e+01, 0.9253331718563164e+00, 0.2271012218143261e+00, 0.2787563517334627e+00, 0.8056740422703550e+00, 0.9809618843895378e+00, 0.5330513126246473e-01, 0.5181337969514327e-01, 0.8037943538979316e+00, 0.8288700878164763e+00, 0.5129615818002433e-01, 0.2822939734392020e-01, 0.1563018888122747e+01, 0.1460589534357837e+01, 0.1006713350698832e+00, 0.1537255119449346e+00, 0.1006311377241790e+01, 0.1023585455764333e+01, 0.1401812239704283e+00, 0.8339725654733963e-01, 0.1673746889662340e+01, 0.1691494017876317e+01, 0.8584564749956700e-01, 0.1224340957564512e+00, 0.1098021846213856e+01, 0.1173561046093478e+01, 0.1056099032774466e-01, 0.1030644761994533e-01, 0.1352918635969054e+01, 0.1125140157417147e+01, 0.3200390963041470e+00, 0.2383394688796517e+00, 0.1581470773231080e+01, 0.1816681306138221e+01, 0.6542811978092533e-01, 0.8482856339700598e-01, 0.1229110747859551e+01, 0.1219785589162149e+01, 0.1271308521106110e+00, 0.7475327847832687e-01, 0.2168626705413786e+01, 0.2090314965369696e+01, 0.3802606466401751e-01, 0.2054425921547012e+00, 0.7370750004754409e+00, 0.4013984562173118e+00, 0.3117182559112661e+00, 0.3737026089221466e+00, 0.4614629804947978e+00, 0.6148197206508288e+00, 0.8139413038408401e-01, 0.1583615157552351e+00, 0.1805799067333021e+00, 0.2371812231191019e+00, 0.2338892446571373e-01, 0.2479281399828426e-01, 0.1471143614268198e+00, 0.1703545954445960e+00, 0.8179526119010886e-04, 0.1486904479375115e-03, 0.8084661071329148e-01, 0.8092824915545274e-01, 0.2173118009582292e-07, 0.1568190288260776e-06, 0.1120328867508400e-01, 0.1120331040626409e-01, 0.3751376363572422e-55, 0.6819392699821255e-26}, + i0: 1, + n0: 19, + pp: 0, + tau: 1.1203284285913290e-002, + sigma: 1.6978461930361441e-002, + i0Out: 1, + n0Out: 19, + ppOut: 0, + tauOut: 1.1203284285913290e-002, + sigmaOut: 1.6978461930361441e-002, + dminOut: 8.9043085058082561e-010, + dmin1Out: 6.9585227169791292e-002, + dmin2Out: 0.11373841371027797, + dnOut: 8.9043085058082561e-010, + dnm1Out: 6.9585227169791292e-002, + dnm2Out: 0.11373841371027797, + }, + { + z: []float64{0.2463974850804771e+01, 0.2452784311062345e+01, 0.1274454348702788e-04, 0.6060609239813679e-05, 0.1166410336629578e+01, 0.1155204716501079e+01, 0.3724766654883956e-05, 0.3882375624867434e-05, 0.1204085798823124e+01, 0.1419979853975912e+01, 0.2271012218143261e+00, 0.1288536304732592e+00, 0.8056740422703550e+00, 0.7189222587736472e+00, 0.5330513126246473e-01, 0.5959804835594534e-01, 0.8037943538979316e+00, 0.7842891794360973e+00, 0.5129615818002433e-01, 0.1022287011292917e+00, 0.1563018888122747e+01, 0.1550258237777425e+01, 0.1006713350698832e+00, 0.6534828028921505e-01, 0.1006311377241790e+01, 0.1069941036637090e+01, 0.1401812239704283e+00, 0.2192904838448094e+00, 0.1673746889662340e+01, 0.1529098769031184e+01, 0.8584564749956700e-01, 0.6164441320989392e-01, 0.1098021846213856e+01, 0.1035735139045794e+01, 0.1056099032774466e-01, 0.1379518767882891e-01, 0.1352918635969054e+01, 0.1647959260308459e+01, 0.3200390963041470e+00, 0.3071268139247320e+00, 0.1581470773231080e+01, 0.1328568794801360e+01, 0.6542811978092533e-01, 0.6053010243026304e-01, 0.1229110747859551e+01, 0.1284508213253985e+01, 0.1271308521106110e+00, 0.2146341752620367e+00, 0.2168626705413786e+01, 0.1980815310529854e+01, 0.3802606466401751e-01, 0.1414976019284330e-01, 0.7370750004754409e+00, 0.1023440211907950e+01, 0.3117182559112661e+00, 0.1405518698344743e+00, 0.4614629804947978e+00, 0.3911019567584944e+00, 0.8139413038408401e-01, 0.3758136265851575e-01, 0.1805799067333021e+00, 0.1551841842545868e+00, 0.2338892446571373e-01, 0.2217266343062855e-01, 0.1471143614268198e+00, 0.1138202089714681e+00, 0.8179526119010886e-04, 0.5809925758690545e-04, 0.8084661071329148e-01, 0.6958524890097138e-01, 0.2173118009582292e-07, 0.3498739858072780e-08, 0.1120328867508400e-01, 0.8904308505808256e-09, 0.3751376363572422e-55, 0.3882375624867434e-05, 0.1697846193036144e-01}, + i0: 1, + n0: 19, + pp: 1, + tau: 8.9043080564395014e-010, + sigma: 2.8181746216274728e-002, + i0Out: 1, + n0Out: 19, + ppOut: 1, + tauOut: 8.9043080564395014e-010, + sigmaOut: 2.8181746216274728e-002, + dminOut: 1.3962105636374437e-019, + dmin1Out: 6.9544122471813200e-002, + dmin2Out: 9.8247027562672340e-002, + dnOut: 1.3962105636374437e-019, + dnm1Out: 6.9544122471813200e-002, + dnm2Out: 9.8247027562672340e-002, + }, + { + z: []float64{0.2452790370781154e+01, 0.2452784311062345e+01, 0.2854399814229969e-05, 0.6060609239813679e-05, 0.1155205743586459e+01, 0.1155204716501079e+01, 0.4772219324121025e-05, 0.3882375624867434e-05, 0.1548828711339416e+01, 0.1419979853975912e+01, 0.5981019230390531e-01, 0.1288536304732592e+00, 0.7187101139352565e+00, 0.7189222587736472e+00, 0.6503610222645050e-01, 0.5959804835594534e-01, 0.8214817774485077e+00, 0.7842891794360973e+00, 0.1929207566298143e+00, 0.1022287011292917e+00, 0.1422685760546395e+01, 0.1550258237777425e+01, 0.4914564318703864e-01, 0.6534828028921505e-01, 0.1240085876404429e+01, 0.1069941036637090e+01, 0.2703980549150243e+00, 0.2192904838448094e+00, 0.1320345126435623e+01, 0.1529098769031184e+01, 0.4835651195207321e-01, 0.6164441320989392e-01, 0.1001173813882118e+01, 0.1035735139045794e+01, 0.2270725319399537e-01, 0.1379518767882891e-01, 0.1932378820148765e+01, 0.1647959260308459e+01, 0.2111589595024383e+00, 0.3071268139247320e+00, 0.1177939936838754e+01, 0.1328568794801360e+01, 0.6600626338337756e-01, 0.6053010243026304e-01, 0.1433136124242214e+01, 0.1284508213253985e+01, 0.2966575563411978e+00, 0.2146341752620367e+00, 0.1698307513491068e+01, 0.1980815310529854e+01, 0.8526979628348903e-02, 0.1414976019284330e-01, 0.1155465101223645e+01, 0.1023440211907950e+01, 0.4757401262929915e-01, 0.1405518698344743e+00, 0.3811093058972801e+00, 0.3911019567584944e+00, 0.1530278326215801e-01, 0.3758136265851575e-01, 0.1620540635326265e+00, 0.1551841842545868e+00, 0.1557318051836492e-01, 0.2217266343062855e-01, 0.9830512682025924e-01, 0.1138202089714681e+00, 0.4112553872737890e-04, 0.5809925758690545e-04, 0.6954412597055305e-01, 0.6958524890097138e-01, 0.4479725446695418e-16, 0.3498739858072780e-08, 0.1396210563637444e-18, 0.8904308505808256e-09, 0.2854399814229969e-05, 0.3882375624867434e-05}, + i0: 1, + n0: 19, + pp: 0, + tau: 1.3962105636374427e-019, + sigma: 2.8181747106705537e-002, + i0Out: 1, + n0Out: 19, + ppOut: 0, + tauOut: 0.0000000000000000, + sigmaOut: 2.8181747106705537e-002, + dminOut: 1.3962105636374430e-019, + dmin1Out: 6.9512134440652351e-002, + dmin2Out: 8.9358776708560295e-002, + dnOut: 1.3962105636374430e-019, + dnm1Out: 6.9512134440652351e-002, + dnm2Out: 8.9358776708560295e-002, + }, + { + z: []float64{0.2452790370781154e+01, 0.2452793225180968e+01, 0.2854399814229969e-05, 0.1344352644992036e-05, 0.1155205743586459e+01, 0.1155209171453138e+01, 0.4772219324121025e-05, 0.6398278760815103e-05, 0.1548828711339416e+01, 0.1608632505364561e+01, 0.5981019230390531e-01, 0.2672219415054498e-01, 0.7187101139352565e+00, 0.7570240220111620e+00, 0.6503610222645050e-01, 0.7057368234283014e-01, 0.8214817774485077e+00, 0.9438288517354918e+00, 0.1929207566298143e+00, 0.2908001941945216e+00, 0.1422685760546395e+01, 0.1181031209538913e+01, 0.4914564318703864e-01, 0.5160305461093759e-01, 0.1240085876404429e+01, 0.1458880876708516e+01, 0.2703980549150243e+00, 0.2447209773632920e+00, 0.1320345126435623e+01, 0.1123980661024405e+01, 0.4835651195207321e-01, 0.4307304847484577e-01, 0.1001173813882118e+01, 0.9808080186012680e+00, 0.2270725319399537e-01, 0.4473761868138884e-01, 0.1932378820148765e+01, 0.2098800160969815e+01, 0.2111589595024383e+00, 0.1185117935689049e+00, 0.1177939936838754e+01, 0.1125434406653226e+01, 0.6600626338337756e-01, 0.8405284210411720e-01, 0.1433136124242214e+01, 0.1645740838479294e+01, 0.2966575563411978e+00, 0.3061331074057168e+00, 0.1698307513491068e+01, 0.1400701385713701e+01, 0.8526979628348903e-02, 0.7034066989504621e-02, 0.1155465101223645e+01, 0.1196005046863439e+01, 0.4757401262929915e-01, 0.1515955052150448e-01, 0.3811093058972801e+00, 0.3812525386379337e+00, 0.1530278326215801e-01, 0.6504555274179689e-02, 0.1620540635326265e+00, 0.1711226887768117e+00, 0.1557318051836492e-01, 0.8946350111698947e-02, 0.9830512682025924e-01, 0.8939990224728768e-01, 0.4112553872737890e-04, 0.3199152990069927e-04, 0.6954412597055305e-01, 0.6951213444065239e-01, 0.4479725446695418e-16, 0.8997910999570377e-34, 0.2818174710670554e-01}, + i0: 1, + n0: 18, + pp: 1, + tau: 6.8088572105595116e-002, + sigma: 2.8181747106705537e-002, + i0Out: 1, + n0Out: 18, + ppOut: 1, + tauOut: 6.8088572105595116e-002, + sigmaOut: 2.8181747106705537e-002, + dminOut: 1.2643548659038301e-003, + dmin1Out: 1.3935943135754067e-002, + dmin2Out: 9.9495800084882416e-002, + dnOut: 1.2643548659038301e-003, + dnm1Out: 1.3935943135754067e-002, + dnm2Out: 9.9495800084882416e-002, + }, + { + z: []float64{0.2384705997428018e+01, 0.2452793225180968e+01, 0.6512368848977837e-06, 0.1344352644992036e-05, 0.1087126346389419e+01, 0.1155209171453138e+01, 0.9467601652019932e-05, 0.6398278760815103e-05, 0.1567256659807859e+01, 0.1608632505364561e+01, 0.1290748567965171e-01, 0.2672219415054498e-01, 0.7466016465687454e+00, 0.7570240220111620e+00, 0.8921689079377818e-01, 0.7057368234283014e-01, 0.1077323583030640e+01, 0.9438288517354918e+00, 0.3187938243378626e+00, 0.2908001941945216e+00, 0.8457518677063927e+00, 0.1181031209538913e+01, 0.8901276181133644e-01, 0.5160305461093759e-01, 0.1546500520154876e+01, 0.1458880876708516e+01, 0.1778606876095877e+00, 0.2447209773632920e+00, 0.9211044497840675e+00, 0.1123980661024405e+01, 0.4586493023634136e-01, 0.4307304847484577e-01, 0.9115921349407204e+00, 0.9808080186012680e+00, 0.1030014605117353e+00, 0.4473761868138884e-01, 0.2046221921921390e+01, 0.2098800160969815e+01, 0.6518220171905387e-01, 0.1185117935689049e+00, 0.1076216474932694e+01, 0.1125434406653226e+01, 0.1285328723941422e+00, 0.8405284210411720e-01, 0.1755252501385274e+01, 0.1645740838479294e+01, 0.2442959445536252e+00, 0.3061331074057168e+00, 0.1095350936043985e+01, 0.1400701385713701e+01, 0.7680442260639305e-02, 0.7034066989504621e-02, 0.1135395583018709e+01, 0.1196005046863439e+01, 0.5090399511302622e-02, 0.1515955052150448e-01, 0.3145781222952156e+00, 0.3812525386379337e+00, 0.3538316586334169e-02, 0.6504555274179689e-02, 0.1084421501965814e+00, 0.1711226887768117e+00, 0.7375387005938499e-02, 0.8946350111698947e-02, 0.1396793466565477e-01, 0.8939990224728768e-01, 0.1592074691534444e-03, 0.3199152990069927e-04, 0.1264354865903830e-02, 0.6951213444065239e-01, 0.6512368848977837e-06, 0.1344352644992036e-05}, + i0: 1, + n0: 18, + pp: 0, + tau: 1.2463174543591322e-003, + sigma: 9.6270319212300656e-002, + i0Out: 1, + n0Out: 18, + ppOut: 0, + tauOut: 1.2463174543591322e-003, + sigmaOut: 9.6270319212300656e-002, + dminOut: 1.2236274461701734e-006, + dmin1Out: 1.1812799375517376e-002, + dmin2Out: 0.10597947881780349, + dnOut: 1.2236274461701734e-006, + dnm1Out: 1.1812799375517376e-002, + dnm2Out: 0.10597947881780349, + }, + { + z: []float64{0.2384705997428018e+01, 0.2383460331210544e+01, 0.6512368848977837e-06, 0.2970373645586866e-06, 0.1087126346389419e+01, 0.1085889199499347e+01, 0.9467601652019932e-05, 0.1366452649899944e-04, 0.1567256659807859e+01, 0.1578904163506652e+01, 0.1290748567965171e-01, 0.6103442048115082e-02, 0.7466016465687454e+00, 0.8284687778600495e+00, 0.8921689079377818e-01, 0.1160157908485996e+00, 0.1077323583030640e+01, 0.1278855299065544e+01, 0.3187938243378626e+00, 0.2108295383723412e+00, 0.8457518677063927e+00, 0.7226887736910288e+00, 0.8901276181133644e-01, 0.1904807262171572e+00, 0.1546500520154876e+01, 0.1532634164092948e+01, 0.1778606876095877e+00, 0.1068932656188067e+00, 0.9211044497840675e+00, 0.8588297969472432e+00, 0.4586493023634136e-01, 0.4868264913684867e-01, 0.9115921349407204e+00, 0.9646646288612479e+00, 0.1030014605117353e+00, 0.2184840619043245e+00, 0.2046221921921390e+01, 0.1891673744281760e+01, 0.6518220171905387e-01, 0.3708364593761750e-01, 0.1076216474932694e+01, 0.1166419383934860e+01, 0.1285328723941422e+00, 0.1934189785315259e+00, 0.1755252501385274e+01, 0.1804883149953014e+01, 0.2442959445536252e+00, 0.1482587897978486e+00, 0.1095350936043985e+01, 0.9535262710524167e+00, 0.7680442260639305e-02, 0.9145359161143373e-02, 0.1135395583018709e+01, 0.1130094305914509e+01, 0.5090399511302622e-02, 0.1416986451145964e-02, 0.3145781222952156e+00, 0.3154531349760448e+00, 0.3538316586334169e-02, 0.1216353924418735e-02, 0.1084421501965814e+00, 0.1133548658237420e+00, 0.7375387005938499e-02, 0.9088178357782586e-03, 0.1396793466565477e-01, 0.1197200684467082e-01, 0.1592074691534444e-03, 0.1681378409852773e-04, 0.1264354865903830e-02, 0.1223627446170173e-05, 0.6512368848977837e-06, 0.2970373645586866e-06, 0.2818174710670554e-01}, + i0: 1, + n0: 18, + pp: 1, + tau: 1.2218877946904154e-006, + sigma: 9.7516636666659787e-002, + i0Out: 1, + n0Out: 18, + ppOut: 1, + tauOut: 1.2218877946904154e-006, + sigmaOut: 9.7516636666659787e-002, + dminOut: 9.5986727915820745e-012, + dmin1Out: 1.1875197612370336e-002, + dmin2Out: 0.11291768719739126, + dnOut: 9.5986727915820745e-012, + dnm1Out: 1.1875197612370336e-002, + dnm2Out: 0.11291768719739126, + }, + { + z: []float64{0.2383459406360114e+01, 0.2383460331210544e+01, 0.1353283656358167e-06, 0.2970373645586866e-06, 0.1085901506809686e+01, 0.1085889199499347e+01, 0.1986826396898849e-04, 0.1366452649899944e-04, 0.1584986515403004e+01, 0.1578904163506652e+01, 0.3190255011763210e-02, 0.6103442048115082e-02, 0.9412930918090910e+00, 0.8284687778600495e+00, 0.1576208411525267e+00, 0.1160157908485996e+00, 0.1332062774397564e+01, 0.1278855299065544e+01, 0.1143821023097510e+00, 0.2108295383723412e+00, 0.7987861757106404e+00, 0.7226887736910288e+00, 0.3654761154847582e+00, 0.1904807262171572e+00, 0.1274050092339202e+01, 0.1532634164092948e+01, 0.7205613198290631e-01, 0.1068932656188067e+00, 0.8354550922133908e+00, 0.8588297969472432e+00, 0.5621179414582513e-01, 0.4868264913684867e-01, 0.1126935674731953e+01, 0.9646646288612479e+00, 0.3667472533840468e+00, 0.2184840619043245e+00, 0.1562008914947536e+01, 0.1891673744281760e+01, 0.2769195683500124e-01, 0.3708364593761750e-01, 0.1332145183743590e+01, 0.1166419383934860e+01, 0.2620575140703801e+00, 0.1934189785315259e+00, 0.1691083203792688e+01, 0.1804883149953014e+01, 0.8359650824372877e-01, 0.1482587897978486e+00, 0.8790739000820365e+00, 0.9535262710524167e+00, 0.1175682535061811e-01, 0.9145359161143373e-02, 0.1119753245127243e+01, 0.1130094305914509e+01, 0.3991886785572840e-03, 0.1416986451145964e-02, 0.3162690783341116e+00, 0.3154531349760448e+00, 0.4359567385560517e-03, 0.1216353924418735e-02, 0.1138265050331695e+00, 0.1133548658237420e+00, 0.9558734450579545e-04, 0.9088178357782586e-03, 0.1189201139646886e-01, 0.1197200684467082e-01, 0.1730052806966466e-08, 0.1681378409852773e-04, 0.9598672791582074e-11, 0.1223627446170173e-05, 0.1353283656358167e-06, 0.2970373645586866e-06}, + i0: 1, + n0: 18, + pp: 0, + tau: 9.5986713933289272e-012, + sigma: 9.7517858554454467e-002, + i0Out: 1, + n0Out: 18, + ppOut: 0, + tauOut: 9.5986713933289272e-012, + sigmaOut: 9.7517858554454467e-002, + dminOut: 6.6174125886815435e-022, + dmin1Out: 1.1882019541390456e-002, + dmin2Out: 0.11366976186212303, + dnOut: 6.6174125886815435e-022, + dnm1Out: 1.1882019541390456e-002, + dnm2Out: 0.11366976186212303, + }, + { + z: []float64{0.2383459406360114e+01, 0.2383459541678881e+01, 0.1353283656358167e-06, 0.6165545233233256e-07, 0.1085901506809686e+01, 0.1085921313408604e+01, 0.1986826396898849e-04, 0.2899927470478232e-04, 0.1584986515403004e+01, 0.1588147771130463e+01, 0.3190255011763210e-02, 0.1890859942802736e-02, 0.9412930918090910e+00, 0.1097023073009216e+01, 0.1576208411525267e+00, 0.1913914667196325e+00, 0.1332062774397564e+01, 0.1255053409978084e+01, 0.1143821023097510e+00, 0.7279916643176543e-01, 0.7987861757106404e+00, 0.1091463124754034e+01, 0.3654761154847582e+00, 0.4266153094141972e+00, 0.1274050092339202e+01, 0.9194909148983121e+00, 0.7205613198290631e-01, 0.6547064404326042e-01, 0.8354550922133908e+00, 0.8261962423063568e+00, 0.5621179414582513e-01, 0.7667315937770838e-01, 0.1126935674731953e+01, 0.1417009768728692e+01, 0.3667472533840468e+00, 0.4042756034295816e+00, 0.1562008914947536e+01, 0.1185425268343357e+01, 0.2769195683500124e-01, 0.3111938635974581e-01, 0.1332145183743590e+01, 0.1563083311444625e+01, 0.2620575140703801e+00, 0.2835172362390010e+00, 0.1691083203792688e+01, 0.1491162475787817e+01, 0.8359650824372877e-01, 0.4928202642453804e-01, 0.8790739000820365e+00, 0.8415486989985177e+00, 0.1175682535061811e-01, 0.1564347179719430e-01, 0.1119753245127243e+01, 0.1104508961999007e+01, 0.3991886785572840e-03, 0.1143051254380294e-03, 0.3162690783341116e+00, 0.3165907299376309e+00, 0.4359567385560517e-03, 0.1567431614478116e-03, 0.1138265050331695e+00, 0.1137653492066288e+00, 0.9558734450579545e-04, 0.9991845479738400e-05, 0.1189201139646886e-01, 0.1188202127144326e-01, 0.1730052806966466e-08, 0.1397591405272086e-17, 0.9598672791582074e-11, 0.6617412588681544e-21, 0.1353283656358167e-06, 0.6165545233233256e-07, 0.2818174710670554e-01}, + i0: 1, + n0: 18, + pp: 1, + tau: 6.6174125886815426e-022, + sigma: 9.7517858564053150e-002, + i0Out: 1, + n0Out: 18, + ppOut: 1, + tauOut: 0.0000000000000000, + sigmaOut: 9.7517858564053150e-002, + dminOut: 6.6174125886815426e-022, + dmin1Out: 1.1880977265773463e-002, + dmin2Out: 0.11370904624558997, + dnOut: 6.6174125886815426e-022, + dnm1Out: 1.1880977265773463e-002, + dnm2Out: 0.11370904624558997, + }, + { + z: []float64{0.2383459603334333e+01, 0.2383459541678881e+01, 0.2809066689524106e-07, 0.6165545233233256e-07, 0.1085950284592642e+01, 0.1085921313408604e+01, 0.4240998334843304e-04, 0.2899927470478232e-04, 0.1589996221089918e+01, 0.1588147771130463e+01, 0.1304604978030436e-02, 0.1890859942802736e-02, 0.1287109934750819e+01, 0.1097023073009216e+01, 0.1866247058326724e+00, 0.1913914667196325e+00, 0.1141227870577177e+01, 0.1255053409978084e+01, 0.6962466280543776e-01, 0.7279916643176543e-01, 0.1448453771362794e+01, 0.1091463124754034e+01, 0.2708190685256155e+00, 0.4266153094141972e+00, 0.7141424904159570e+00, 0.9194909148983121e+00, 0.7574342769943966e-01, 0.6547064404326042e-01, 0.8271259739846255e+00, 0.8261962423063568e+00, 0.1313543755784947e+00, 0.7667315937770838e-01, 0.1689930996579779e+01, 0.1417009768728692e+01, 0.2835846650840220e+00, 0.4042756034295816e+00, 0.9329599896190804e+00, 0.1185425268343357e+01, 0.5213749144931325e-01, 0.3111938635974581e-01, 0.1794463056234313e+01, 0.1563083311444625e+01, 0.2355970842920850e+00, 0.2835172362390010e+00, 0.1304847417920270e+01, 0.1491162475787817e+01, 0.3178396542921673e-01, 0.4928202642453804e-01, 0.8254082053664953e+00, 0.8415486989985177e+00, 0.2093310277804656e-01, 0.1564347179719430e-01, 0.1083690164346398e+01, 0.1104508961999007e+01, 0.3339325601415243e-04, 0.1143051254380294e-03, 0.3167140798430647e+00, 0.3165907299376309e+00, 0.5630296103885627e-04, 0.1567431614478116e-03, 0.1137190380910697e+00, 0.1137653492066288e+00, 0.1044005669799533e-05, 0.9991845479738400e-05, 0.1188097726577346e-01, 0.1188202127144326e-01, 0.7784240935906335e-37, 0.1397591405272086e-17}, + i0: 1, + n0: 17, + pp: 0, + tau: 1.1880832155707781e-002, + sigma: 9.7517858564053150e-002, + i0Out: 1, + n0Out: 17, + ppOut: 0, + tauOut: 1.1880832155707781e-002, + sigmaOut: 9.7517858564053150e-002, + dminOut: 2.3287035572430725e-008, + dmin1Out: 0.10181720511285566, + dmin2Out: 0.30482311636216664, + dnOut: 2.3287035572430725e-008, + dnm1Out: 0.10181720511285566, + dnm2Out: 0.30482311636216664, + }, + { + z: []float64{0.2383459603334333e+01, 0.2371578799269292e+01, 0.2809066689524106e-07, 0.1286276792433928e-07, 0.1085950284592642e+01, 0.1074111849557515e+01, 0.4240998334843304e-04, 0.6277904232066118e-04, 0.1589996221089918e+01, 0.1579357214869920e+01, 0.1304604978030436e-02, 0.1063198377376995e-02, 0.1287109934750819e+01, 0.1460790610050406e+01, 0.1866247058326724e+00, 0.1457986614708343e+00, 0.1141227870577177e+01, 0.1053173039756072e+01, 0.6962466280543776e-01, 0.9575644420574685e-01, 0.1448453771362794e+01, 0.1611635563526955e+01, 0.2708190685256155e+00, 0.1200044280642223e+00, 0.7141424904159570e+00, 0.6580006578954666e+00, 0.7574342769943966e-01, 0.9521169265880262e-01, 0.8271259739846255e+00, 0.8513878247486099e+00, 0.1313543755784947e+00, 0.2607270439790754e+00, 0.1689930996579779e+01, 0.1700907785529018e+01, 0.2835846650840220e+00, 0.1555482010511415e+00, 0.9329599896190804e+00, 0.8176684478615445e+00, 0.5213749144931325e-01, 0.1144214412274440e+00, 0.1794463056234313e+01, 0.1903757867143247e+01, 0.2355970842920850e+00, 0.1614796988702029e+00, 0.1304847417920270e+01, 0.1163270852323576e+01, 0.3178396542921673e-01, 0.2255256874351997e-01, 0.8254082053664953e+00, 0.8119079072453143e+00, 0.2093310277804656e-01, 0.2794035799797570e-01, 0.1083690164346398e+01, 0.1043902367448729e+01, 0.3339325601415243e-04, 0.1013132519024162e-04, 0.3167140798430647e+00, 0.3048794193232055e+00, 0.5630296103885627e-04, 0.2100082250625822e-04, 0.1137190380910697e+00, 0.1018182491185255e+00, 0.1044005669799533e-05, 0.1218230301111122e-06, 0.1188097726577346e-01, 0.2328703557243073e-07, 0.7784240935906335e-37, 0.1286276792433928e-07, 0.9751785856405315e-01}, + i0: 1, + n0: 17, + pp: 1, + tau: 2.3287007705477136e-008, + sigma: 0.10939869071976092, + i0Out: 1, + n0Out: 17, + ppOut: 1, + tauOut: 2.3287007705477136e-008, + sigmaOut: 0.10939869071976092, + dminOut: 2.6961654281145418e-018, + dmin1Out: 0.10181121275944585, + dmin2Out: 0.30487633314418217, + dnOut: 2.6961654281145418e-018, + dnm1Out: 0.10181121275944585, + dnm2Out: 0.30487633314418217, + }, + { + z: []float64{0.2371578788845052e+01, 0.2371578799269292e+01, 0.5825676764620370e-08, 0.1286276792433928e-07, 0.1074174599487151e+01, 0.1074111849557515e+01, 0.9230392664199863e-04, 0.6277904232066118e-04, 0.1580328086033647e+01, 0.1579357214869920e+01, 0.9827770701659698e-03, 0.1063198377376995e-02, 0.1605606471164067e+01, 0.1460790610050406e+01, 0.9563440497488795e-01, 0.1457986614708343e+00, 0.1053295055699924e+01, 0.1053173039756072e+01, 0.1465159169633776e+00, 0.9575644420574685e-01, 0.1585124051340792e+01, 0.1611635563526955e+01, 0.4981502397231050e-01, 0.1200044280642223e+00, 0.7033973032949510e+00, 0.6580006578954666e+00, 0.1152436546510613e+00, 0.9521169265880262e-01, 0.9968711907896164e+00, 0.8513878247486099e+00, 0.4448645553200346e+00, 0.2607270439790754e+00, 0.1411591407973117e+01, 0.1700907785529018e+01, 0.9010174998427344e-01, 0.1555482010511415e+00, 0.8419881158177074e+00, 0.8176684478615445e+00, 0.2587099684834221e+00, 0.1144214412274440e+00, 0.1806527574243020e+01, 0.1903757867143247e+01, 0.1039810460775319e+00, 0.1614796988702029e+00, 0.1081842351702556e+01, 0.1163270852323576e+01, 0.1692539477932339e-01, 0.2255256874351997e-01, 0.8229228471769590e+00, 0.8119079072453143e+00, 0.3544318396494814e-01, 0.2794035799797570e-01, 0.1008469291521964e+01, 0.1043902367448729e+01, 0.3062892015595061e-05, 0.1013132519024162e-04, 0.3048973339666884e+00, 0.3048794193232055e+00, 0.7013072071892081e-05, 0.2100082250625822e-04, 0.1018113345824760e+00, 0.1018182491185255e+00, 0.2786425742647189e-13, 0.1218230301111122e-06, 0.2696165428114542e-17, 0.2328703557243073e-07, 0.5825676764620370e-08, 0.1286276792433928e-07}, + i0: 1, + n0: 17, + pp: 0, + tau: 2.6961654281138038e-018, + sigma: 0.10939871400676864, + i0Out: 1, + n0Out: 17, + ppOut: 0, + tauOut: 0.0000000000000000, + sigmaOut: 0.10939871400676864, + dminOut: 2.6961654281138038e-018, + dmin1Out: 0.10180899282350273, + dmin2Out: 0.30489636738914000, + dnOut: 2.6961654281138038e-018, + dnm1Out: 0.10180899282350273, + dnm2Out: 0.30489636738914000, + }, + { + z: []float64{0.2371578788845052e+01, 0.2371578794670729e+01, 0.5825676764620370e-08, 0.2638661645752538e-08, 0.1074174599487151e+01, 0.1074266900775131e+01, 0.9230392664199863e-04, 0.1357860766428602e-03, 0.1580328086033647e+01, 0.1581175077027170e+01, 0.9827770701659698e-03, 0.9979623676695663e-03, 0.1605606471164067e+01, 0.1700242913771285e+01, 0.9563440497488795e-01, 0.5924520849284005e-01, 0.1053295055699924e+01, 0.1140565764170461e+01, 0.1465159169633776e+00, 0.2036234219705987e+00, 0.1585124051340792e+01, 0.1431315653342504e+01, 0.4981502397231050e-01, 0.2448080089382756e-01, 0.7033973032949510e+00, 0.7941601570521848e+00, 0.1152436546510613e+00, 0.1446598374682775e+00, 0.9968711907896164e+00, 0.1297075908641373e+01, 0.4448645553200346e+00, 0.4841405038964208e+00, 0.1411591407973117e+01, 0.1017552654060970e+01, 0.9010174998427344e-01, 0.7455594793877945e-01, 0.8419881158177074e+00, 0.1026142136362350e+01, 0.2587099684834221e+00, 0.4554599945126984e+00, 0.1806527574243020e+01, 0.1455048625807853e+01, 0.1039810460775319e+00, 0.7731088667813635e-01, 0.1081842351702556e+01, 0.1021456859803743e+01, 0.1692539477932339e-01, 0.1363571444815687e-01, 0.8229228471769590e+00, 0.8447303166937503e+00, 0.3544318396494814e-01, 0.4231334180394086e-01, 0.1008469291521964e+01, 0.9661590126100381e+00, 0.3062892015595061e-05, 0.9665775484099522e-06, 0.3048973339666884e+00, 0.3049033804612119e+00, 0.7013072071892081e-05, 0.2341758973227438e-05, 0.1018113345824760e+00, 0.1018089928235306e+00, 0.2786425742647189e-13, 0.7379175991216932e-30, 0.1093987140067686e+00}, + i0: 1, + n0: 16, + pp: 1, + tau: 0.10180650470263587, + sigma: 0.10939871400676864, + i0Out: 1, + n0Out: 16, + ppOut: 1, + tauOut: 0.10180650470263587, + sigmaOut: 0.10939871400676864, + dminOut: 1.3142485785350155e-006, + dmin1Out: 0.20309651253108729, + dmin2Out: 0.57610166617362735, + dnOut: 1.3142485785350155e-006, + dnm1Out: 0.20309651253108729, + dnm2Out: 0.81137144083676127, + }, + { + z: []float64{0.2269772292606755e+01, 0.2371578794670729e+01, 0.1248859578385863e-08, 0.2638661645752538e-08, 0.9725961809002787e+00, 0.1074266900775131e+01, 0.2207509801202943e-03, 0.1357860766428602e-03, 0.1480145783712083e+01, 0.1581175077027170e+01, 0.1146359002276934e-02, 0.9979623676695663e-03, 0.1656535258559212e+01, 0.1700242913771285e+01, 0.4079180092843103e-01, 0.5924520849284005e-01, 0.1201590880509993e+01, 0.1140565764170461e+01, 0.2425529321011353e+00, 0.2036234219705987e+00, 0.1111437017432560e+01, 0.1431315653342504e+01, 0.1749237822536809e-01, 0.2448080089382756e-01, 0.8195211115924584e+00, 0.7941601570521848e+00, 0.2289566278084960e+00, 0.1446598374682775e+00, 0.1450453280026662e+01, 0.1297075908641373e+01, 0.3396444831847066e+00, 0.4841405038964208e+00, 0.6506576141124067e+00, 0.1017552654060970e+01, 0.1175810411452473e+00, 0.7455594793877945e-01, 0.1262214585027166e+01, 0.1026142136362350e+01, 0.5250426092262996e+00, 0.4554599945126984e+00, 0.9055103985570538e+00, 0.1455048625807853e+01, 0.8721019179982008e-01, 0.7731088667813635e-01, 0.8460758777494440e+00, 0.1021456859803743e+01, 0.1361402882064932e-01, 0.1363571444815687e-01, 0.7716231249744060e+00, 0.8447303166937503e+00, 0.5298106707064092e-01, 0.4231334180394086e-01, 0.8113724074143097e+00, 0.9661590126100381e+00, 0.3632274887524200e-06, 0.9665775484099522e-06, 0.2030988542900605e+00, 0.3049033804612119e+00, 0.1173872316183806e-05, 0.2341758973227438e-05, 0.1314248578535016e-05, 0.1018089928235306e+00, 0.1248859578385863e-08, 0.2638661645752538e-08}, + i0: 1, + n0: 16, + pp: 0, + tau: 1.3142409824136984e-006, + sigma: 0.21120521870940451, + i0Out: 1, + n0Out: 16, + ppOut: 0, + tauOut: 1.3142409824136984e-006, + sigmaOut: 0.21120521870940451, + dminOut: 8.4118841991824567e-018, + dmin1Out: 0.20309744276965425, + dmin2Out: 0.50018599148866161, + dnOut: 8.4118841991824567e-018, + dnm1Out: 0.20309744276965425, + dnm2Out: 0.75834178007645303, + }, + { + z: []float64{0.2269772292606755e+01, 0.2269770979614632e+01, 0.1248859578385863e-08, 0.5351359530665278e-09, 0.9725961809002787e+00, 0.9728156171042807e+00, 0.2207509801202943e-03, 0.3358741643642203e-03, 0.1480145783712083e+01, 0.1480954954309013e+01, 0.1146359002276934e-02, 0.1282269997958535e-02, 0.1656535258559212e+01, 0.1696043475248702e+01, 0.4079180092843103e-01, 0.2889964597634762e-01, 0.1201590880509993e+01, 0.1415242852393798e+01, 0.2425529321011353e+00, 0.1904848393814713e+00, 0.1111437017432560e+01, 0.9384432420354740e+00, 0.1749237822536809e-01, 0.1527569554079381e-01, 0.8195211115924584e+00, 0.1033200729619178e+01, 0.2289566278084960e+00, 0.3214195289148511e+00, 0.1450453280026662e+01, 0.1468676920055535e+01, 0.3396444831847066e+00, 0.1504703083827628e+00, 0.6506576141124067e+00, 0.6177670326339089e+00, 0.1175810411452473e+00, 0.2402402478867145e+00, 0.1262214585027166e+01, 0.1547015632125768e+01, 0.5250426092262996e+00, 0.3073217441808570e+00, 0.9055103985570538e+00, 0.6853975319350345e+00, 0.8721019179982008e-01, 0.1076549537133787e+00, 0.8460758777494440e+00, 0.7520336386157322e+00, 0.1361402882064932e-01, 0.1396865635082151e-01, 0.7716231249744060e+00, 0.8106342214532430e+00, 0.5298106707064092e-01, 0.5302931309687425e-01, 0.8113724074143097e+00, 0.7583421433039418e+00, 0.3632274887524200e-06, 0.9727942389020712e-07, 0.2030988542900605e+00, 0.2030986166419704e+00, 0.1173872316183806e-05, 0.7596112905317353e-11, 0.1314248578535016e-05, 0.8411884199182457e-17, 0.1248859578385863e-08, 0.5351359530665278e-09, 0.1093987140067686e+00}, + i0: 1, + n0: 16, + pp: 1, + tau: 8.4118841988678429e-018, + sigma: 0.21120653295038691, + i0Out: 1, + n0Out: 16, + ppOut: 1, + tauOut: 0.0000000000000000, + sigmaOut: 0.21120653295038691, + dminOut: 8.4118841988678429e-018, + dmin1Out: 0.20309858884628909, + dmin2Out: 0.53283137411117854, + dnOut: 8.4118841988678429e-018, + dnm1Out: 0.20309858884628909, + dnm2Out: 0.71080515929391042, + }, + { + z: []float64{0.2269770980149768e+01, 0.2269770979614632e+01, 0.2293573303077261e-09, 0.5351359530665278e-09, 0.9731514910392876e+00, 0.9728156171042807e+00, 0.5111377954200868e-03, 0.3358741643642203e-03, 0.1481726086511552e+01, 0.1480954954309013e+01, 0.1467737987028945e-02, 0.1282269997958535e-02, 0.1723475383238021e+01, 0.1696043475248702e+01, 0.2373112943910766e-01, 0.2889964597634762e-01, 0.1581996562336162e+01, 0.1415242852393798e+01, 0.1129959536471923e+00, 0.1904848393814713e+00, 0.8407229839290754e+00, 0.9384432420354740e+00, 0.1877296098701645e-01, 0.1527569554079381e-01, 0.1335847297547013e+01, 0.1033200729619178e+01, 0.3533797947109679e+00, 0.3214195289148511e+00, 0.1265767433727330e+01, 0.1468676920055535e+01, 0.7343813202351114e-01, 0.1504703083827628e+00, 0.7845691484971122e+00, 0.6177670326339089e+00, 0.4737063898809231e+00, 0.2402402478867145e+00, 0.1380630986425702e+01, 0.1547015632125768e+01, 0.1525661578238559e+00, 0.3073217441808570e+00, 0.6404863278245572e+00, 0.6853975319350345e+00, 0.1264041760751794e+00, 0.1076549537133787e+00, 0.6395981188913744e+00, 0.7520336386157322e+00, 0.1770404028911661e-01, 0.1396865635082151e-01, 0.8459594942610007e+00, 0.8106342214532430e+00, 0.4753698401003136e-01, 0.5302931309687425e-01, 0.7108052565733343e+00, 0.7583421433039418e+00, 0.2779568135873871e-07, 0.9727942389020712e-07, 0.2030985888538852e+00, 0.2030986166419704e+00, 0.3146138162949754e-27, 0.7596112905317353e-11, 0.8411884198867843e-17, 0.8411884199182457e-17, 0.2293573303077261e-09, 0.5351359530665278e-09}, + i0: 1, + n0: 16, + pp: 0, + tau: 8.4118841988678429e-018, + sigma: 0.21120653295038691, + i0Out: 1, + n0Out: 16, + ppOut: 0, + tauOut: 0.0000000000000000, + sigmaOut: 0.21120653295038691, + dminOut: 8.4118841988678429e-018, + dmin1Out: 0.20309858045029386, + dmin2Out: 0.51860540644834729, + dnOut: 8.4118841988678429e-018, + dnm1Out: 0.20309858045029386, + dnm2Out: 0.67176795876146822, + }, + { + z: []float64{0.2269770980149768e+01, 0.2269770980379126e+01, 0.2293573303077261e-09, 0.9833566024906726e-10, 0.9731514910392876e+00, 0.9736626287363720e+00, 0.5111377954200868e-03, 0.7778528033461282e-03, 0.1481726086511552e+01, 0.1482415971695234e+01, 0.1467737987028945e-02, 0.1706410574351102e-02, 0.1723475383238021e+01, 0.1745500102102777e+01, 0.2373112943910766e-01, 0.2150819994097728e-01, 0.1581996562336162e+01, 0.1673484316042377e+01, 0.1129959536471923e+00, 0.5676676764251993e-01, 0.8407229839290754e+00, 0.8027291772735718e+00, 0.1877296098701645e-01, 0.3124068479314146e-01, 0.1335847297547013e+01, 0.1657986407464839e+01, 0.3533797947109679e+00, 0.2697830536296953e+00, 0.1265767433727330e+01, 0.1069422512121146e+01, 0.7343813202351114e-01, 0.5387701498318342e-01, 0.7845691484971122e+00, 0.1204398523394852e+01, 0.4737063898809231e+00, 0.5430210247136315e+00, 0.1380630986425702e+01, 0.9901761195359265e+00, 0.1525661578238559e+00, 0.9868601781741700e-01, 0.6404863278245572e+00, 0.6682044860823195e+00, 0.1264041760751794e+00, 0.1209927124430272e+00, 0.6395981188913744e+00, 0.5363094467374639e+00, 0.1770404028911661e-01, 0.2792585709699239e-01, 0.8459594942610007e+00, 0.8655706211740396e+00, 0.4753698401003136e-01, 0.3903729781186600e-01, 0.7108052565733343e+00, 0.6717679865571495e+00, 0.2779568135873871e-07, 0.8403591378512072e-08, 0.2030985888538852e+00, 0.2030985804502939e+00, 0.3146138162949754e-27, 0.1303059324279677e-43, 0.2112065329503869e+00}, + i0: 1, + n0: 15, + pp: 1, + tau: 0.10154929022514693, + sigma: 0.21120653295038691, + i0Out: 1, + n0Out: 15, + ppOut: 1, + tauOut: 0.10154929022514693, + sigmaOut: 0.21120653295038691, + dminOut: 0.10154928703201281, + dmin1Out: 0.32391205918605420, + dmin2Out: 0.32391205918605420, + dnOut: 0.10154928703201281, + dnm1Out: 0.53450854002684800, + dnm2Out: 0.69531980574199759, + }, + { + z: []float64{0.2168221690252314e+01, 0.2269770980379126e+01, 0.4415865678637858e-10, 0.9833566024906726e-10, 0.8728911912704126e+00, 0.9736626287363720e+00, 0.1321013925721922e-02, 0.7778528033461282e-03, 0.1381252078118717e+01, 0.1482415971695234e+01, 0.2156405683614187e-02, 0.1706410574351102e-02, 0.1663302606134993e+01, 0.1745500102102777e+01, 0.2163985983955575e-01, 0.2150819994097728e-01, 0.1607061933620194e+01, 0.1673484316042377e+01, 0.2835506195054301e-01, 0.5676676764251993e-01, 0.7040655098910235e+00, 0.8027291772735718e+00, 0.7356791380810462e-01, 0.3124068479314146e-01, 0.1752652257061283e+01, 0.1657986407464839e+01, 0.1646145547572216e+00, 0.2697830536296953e+00, 0.8571356821219610e+00, 0.1069422512121146e+01, 0.7570493055431493e-01, 0.5387701498318342e-01, 0.1570165327329021e+01, 0.1204398523394852e+01, 0.3424393862982660e+00, 0.5430210247136315e+00, 0.6448734608299306e+00, 0.9901761195359265e+00, 0.1022564019526126e+00, 0.9868601781741700e-01, 0.5853915063475871e+00, 0.6682044860823195e+00, 0.1108480973262629e+00, 0.1209927124430272e+00, 0.3518379162830466e+00, 0.5363094467374639e+00, 0.6870152520689508e-01, 0.2792585709699239e-01, 0.7343571035538636e+00, 0.8655706211740396e+00, 0.3571015630515456e-01, 0.3903729781186600e-01, 0.5345085484304394e+00, 0.6717679865571495e+00, 0.3193134112956561e-08, 0.8403591378512072e-08, 0.1015492870320128e+00, 0.2030985804502939e+00, 0.4415865678637858e-10, 0.9833566024906726e-10}, + i0: 1, + n0: 15, + pp: 0, + tau: 0.10154100618138810, + sigma: 0.31275582317553385, + i0Out: 1, + n0Out: 15, + ppOut: 0, + tauOut: 0.10154100618138810, + sigmaOut: 0.31275582317553385, + dminOut: 8.2800215414347011e-006, + dmin1Out: 0.16866357962181588, + dmin2Out: 0.16866357962181588, + dnOut: 8.2800215414347011e-006, + dnm1Out: 0.39110725789187228, + dnm2Out: 0.42026820370851276, + }, + { + z: []float64{0.2168221690252314e+01, 0.2066680684115085e+01, 0.4415865678637858e-10, 0.1865101988102620e-10, 0.8728911912704126e+00, 0.7726711989960955e+00, 0.1321013925721922e-02, 0.2361487308570419e-02, 0.1381252078118717e+01, 0.1279505990312372e+01, 0.2156405683614187e-02, 0.2803234389363145e-02, 0.1663302606134993e+01, 0.1580598225403798e+01, 0.2163985983955575e-01, 0.2200217261925751e-01, 0.1607061933620194e+01, 0.1511873816770092e+01, 0.2835506195054301e-01, 0.1320468740761088e-01, 0.7040655098910235e+00, 0.6628877301101292e+00, 0.7356791380810462e-01, 0.1945110224949301e+00, 0.1752652257061283e+01, 0.1621214783142186e+01, 0.1646145547572216e+00, 0.8703165684534678e-01, 0.8571356821219610e+00, 0.7442679496495411e+00, 0.7570493055431493e-01, 0.1597129865933493e+00, 0.1570165327329021e+01, 0.1651350720852550e+01, 0.3424393862982660e+00, 0.1337269360034139e+00, 0.6448734608299306e+00, 0.5118619205977412e+00, 0.1022564019526126e+00, 0.1169456581236225e+00, 0.5853915063475871e+00, 0.4777529393688394e+00, 0.1108480973262629e+00, 0.8163333047984263e-01, 0.3518379162830466e+00, 0.2373651048287110e+00, 0.6870152520689508e-01, 0.2125478936639627e+00, 0.7343571035538636e+00, 0.4559783600136673e+00, 0.3571015630515456e-01, 0.4186028435717903e-01, 0.5345085484304394e+00, 0.3911072610850064e+00, 0.3193134112956561e-08, 0.8290832843879624e-09, 0.1015492870320128e+00, 0.8280021541434701e-05, 0.4415865678637858e-10, 0.1865101988102620e-10, 0.2112065329503869e+00}, + i0: 1, + n0: 15, + pp: 1, + tau: 8.2795951083136037e-006, + sigma: 0.41429682935692197, + i0Out: 1, + n0Out: 15, + ppOut: 1, + tauOut: 8.2795951083136037e-006, + sigmaOut: 0.41429682935692197, + dminOut: 4.2641220534843968e-010, + dmin1Out: 0.19554701532405336, + dmin2Out: 0.19554701532405336, + dnOut: 4.2641220534843968e-010, + dnm1Out: 0.32821333105682254, + dnm2Out: 0.21848306981706411, + }, + { + z: []float64{0.2066672404538628e+01, 0.2066680684115085e+01, 0.6973096395115281e-11, 0.1865101988102620e-10, 0.7750244067025847e+00, 0.7726711989960955e+00, 0.3898634844569491e-02, 0.2361487308570419e-02, 0.1278402310262057e+01, 0.1279505990312372e+01, 0.3465878671879142e-02, 0.2803234389363145e-02, 0.1599126239756068e+01, 0.1580598225403798e+01, 0.2080167773382635e-01, 0.2200217261925751e-01, 0.1504268546848768e+01, 0.1511873816770092e+01, 0.5818924606767697e-02, 0.1320468740761088e-01, 0.8515715484031834e+00, 0.6628877301101292e+00, 0.3703084558710277e+00, 0.1945110224949301e+00, 0.1337929704521397e+01, 0.1621214783142186e+01, 0.4841425717359337e-01, 0.8703165684534678e-01, 0.8555583994741887e+00, 0.7442679496495411e+00, 0.3082690272254148e+00, 0.1597129865933493e+00, 0.1476800350035441e+01, 0.1651350720852550e+01, 0.4635002036444261e-01, 0.1337269360034139e+00, 0.5824492787618127e+00, 0.5118619205977412e+00, 0.9592445892242578e-01, 0.1169456581236225e+00, 0.4634535313311479e+00, 0.4777529393688394e+00, 0.4180980990954932e-01, 0.8163333047984263e-01, 0.4080949089880160e+00, 0.2373651048287110e+00, 0.2374870106014949e+00, 0.2125478936639627e+00, 0.2603433541742431e+00, 0.4559783600136673e+00, 0.6288565043307552e-01, 0.4186028435717903e-01, 0.3282133318859058e+00, 0.3911072610850064e+00, 0.2091574834858362e-13, 0.8290832843879624e-09, 0.4264122053484397e-09, 0.8280021541434701e-05, 0.6973096395115281e-11, 0.1865101988102620e-10}, + i0: 1, + n0: 15, + pp: 0, + tau: 4.2641207498271701e-010, + sigma: 0.41430510895203027, + i0Out: 1, + n0Out: 15, + ppOut: 0, + tauOut: 4.2641207498271701e-010, + sigmaOut: 0.41430510895203027, + dminOut: 1.3036568472020817e-016, + dmin1Out: 0.15841174212052453, + dmin2Out: 0.15841174212052453, + dnOut: 1.3036568472020817e-016, + dnm1Out: 0.23494558610432464, + dnm2Out: 0.15841174212052453, + }, + { + z: []float64{0.2066672404538628e+01, 0.2066672404119188e+01, 0.6973096395115281e-11, 0.2614986238618434e-11, 0.7750244067025847e+00, 0.7789230411181270e+00, 0.3898634844569491e-02, 0.6398608757305904e-02, 0.1278402310262057e+01, 0.1275469579750218e+01, 0.3465878671879142e-02, 0.4345362379476143e-02, 0.1599126239756068e+01, 0.1615582554684006e+01, 0.2080167773382635e-01, 0.1936843737632438e-01, 0.1504268546848768e+01, 0.1490719033652799e+01, 0.5818924606767697e-02, 0.3324054047451484e-02, 0.8515715484031834e+00, 0.1218555949800348e+01, 0.3703084558710277e+00, 0.4065850919906259e+00, 0.1337929704521397e+01, 0.9797588692779520e+00, 0.4841425717359337e-01, 0.4227695780870786e-01, 0.8555583994741887e+00, 0.1121550468464483e+01, 0.3082690272254148e+00, 0.4059129037098644e+00, 0.1476800350035441e+01, 0.1117237466263607e+01, 0.4635002036444261e-01, 0.2416365074306882e-01, 0.5824492787618127e+00, 0.6542100865147574e+00, 0.9592445892242578e-01, 0.6795451513972497e-01, 0.4634535313311479e+00, 0.4373088256745602e+00, 0.4180980990954932e-01, 0.3901675330591518e-01, 0.4080949089880160e+00, 0.6065651658571838e+00, 0.2374870106014949e+00, 0.1019316116273065e+00, 0.2603433541742431e+00, 0.2212973925536000e+00, 0.6288565043307552e-01, 0.9326774535516916e-01, 0.3282133318859058e+00, 0.2349455861043456e+00, 0.2091574834858362e-13, 0.3796083394336032e-22, 0.4264122053484397e-09, 0.1303656847202082e-15, 0.6973096395115281e-11, 0.2614986238618434e-11, 0.2112065329503869e+00}, + i0: 1, + n0: 15, + pp: 1, + tau: 1.3036568471812905e-016, + sigma: 0.41430510937844234, + i0Out: 1, + n0Out: 15, + ppOut: 1, + tauOut: 1.3036568471812905e-016, + sigmaOut: 0.41430510937844234, + dminOut: 2.0791168714198411e-027, + dmin1Out: 0.15670572686712736, + dmin2Out: 0.18680490956440032, + dnOut: 2.0791168714198411e-027, + dnm1Out: 0.15670572686712736, + dnm2Out: 0.18680490956440032, + }, + { + z: []float64{0.2066672404121803e+01, 0.2066672404119188e+01, 0.9855809897129084e-12, 0.2614986238618434e-11, 0.7853216498744473e+00, 0.7789230411181270e+00, 0.1039221422709001e-01, 0.6398608757305904e-02, 0.1269422727902604e+01, 0.1275469579750218e+01, 0.5530302475095172e-02, 0.4345362379476143e-02, 0.1629420689585235e+01, 0.1615582554684006e+01, 0.1771973219288664e-01, 0.1936843737632438e-01, 0.1476323355507364e+01, 0.1490719033652799e+01, 0.2743671176012720e-02, 0.3324054047451484e-02, 0.1622397370614960e+01, 0.1218555949800348e+01, 0.2455350071499520e+00, 0.4065850919906259e+00, 0.7765008199367077e+00, 0.9797588692779520e+00, 0.6106335063429082e-01, 0.4227695780870786e-01, 0.1466400021540057e+01, 0.1121550468464483e+01, 0.3092615230516922e+00, 0.4059129037098644e+00, 0.8321395939549830e+00, 0.1117237466263607e+01, 0.1899693772291602e-01, 0.2416365074306882e-01, 0.7031676639315663e+00, 0.6542100865147574e+00, 0.4226176876348708e-01, 0.6795451513972497e-01, 0.4340638102169883e+00, 0.4373088256745602e+00, 0.5452240634477346e-01, 0.3901675330591518e-01, 0.6539743711397168e+00, 0.6065651658571838e+00, 0.3449248298919955e-01, 0.1019316116273065e+00, 0.2800726549195695e+00, 0.2212973925536000e+00, 0.7823985923721809e-01, 0.9326774535516916e-01, 0.1567057268671274e+00, 0.2349455861043456e+00, 0.3158014839988858e-37, 0.3796083394336032e-22}, + i0: 1, + n0: 14, + pp: 0, + tau: 5.2226904068357879e-002, + sigma: 0.41430510937844245, + i0Out: 1, + n0Out: 14, + ppOut: 0, + tauOut: 5.2226904068357879e-002, + sigmaOut: 0.41430510937844245, + dminOut: 6.1979733096699005e-002, + dmin1Out: 0.21025182606004778, + dmin2Out: 0.35454858327394784, + dnOut: 6.1979733096699005e-002, + dnm1Out: 0.21025182606004778, + dnm2Out: 0.51458348500658069, + }, + { + z: []float64{0.2066672404121803e+01, 0.2014445500054431e+01, 0.9855809897129084e-12, 0.3842238913414725e-12, 0.7853216498744473e+00, 0.7434869600327952e+00, 0.1039221422709001e-01, 0.1774357001838869e-01, 0.1269422727902604e+01, 0.1204982556290953e+01, 0.5530302475095172e-02, 0.7478273627729330e-02, 0.1629420689585235e+01, 0.1587435244082034e+01, 0.1771973219288664e-01, 0.1647944669694028e-01, 0.1476323355507364e+01, 0.1410360675918078e+01, 0.2743671176012720e-02, 0.3156160674217245e-02, 0.1622397370614960e+01, 0.1812549313022337e+01, 0.2455350071499520e+00, 0.1051878329628395e+00, 0.7765008199367077e+00, 0.6801494335398012e+00, 0.6106335063429082e-01, 0.1316523902981273e+00, 0.1466400021540057e+01, 0.1591782250225264e+01, 0.3092615230516922e+00, 0.1616733433116970e+00, 0.8321395939549830e+00, 0.6372362842978442e+00, 0.1899693772291602e-01, 0.2096244776016672e-01, 0.7031676639315663e+00, 0.6722400808665288e+00, 0.4226176876348708e-01, 0.2728832287468248e-01, 0.4340638102169883e+00, 0.4090709896187213e+00, 0.5452240634477346e-01, 0.8716398206477828e-01, 0.6539743711397168e+00, 0.5490759679957803e+00, 0.3449248298919955e-01, 0.1759392479116377e-01, 0.2800726549195695e+00, 0.2884916852972659e+00, 0.7823985923721809e-01, 0.4249908970207047e-01, 0.1567057268671274e+00, 0.6197973309669901e-01, 0.3158014839988858e-37, 0.3842238913414725e-12, 0.4143051093784424e+00}, + i0: 1, + n0: 14, + pp: 1, + tau: 4.3992746597899146e-002, + sigma: 0.46653201344680034, + i0Out: 1, + n0Out: 14, + ppOut: 1, + tauOut: 4.3992746597899146e-002, + sigmaOut: 0.46653201344680034, + dminOut: 8.3978432207528000e-003, + dmin1Out: 0.23219513064786862, + dmin2Out: 0.34734604130905122, + dnOut: 8.3978432207528000e-003, + dnm1Out: 0.23219513064786862, + dnm2Out: 0.39493697562207530, + }, + { + z: []float64{0.1970452753456916e+01, 0.2014445500054431e+01, 0.1449745254963753e-12, 0.3842238913414725e-12, 0.7172377834531399e+00, 0.7434869600327952e+00, 0.2980976860358389e-01, 0.1774357001838869e-01, 0.1138658314717199e+01, 0.1204982556290953e+01, 0.1042566937606312e-01, 0.7478273627729330e-02, 0.1549496274805013e+01, 0.1587435244082034e+01, 0.1499968987352189e-01, 0.1647944669694028e-01, 0.1354524400120875e+01, 0.1410360675918078e+01, 0.4223398900256122e-02, 0.3156160674217245e-02, 0.1869521000487022e+01, 0.1812549313022337e+01, 0.3826832915293116e-01, 0.1051878329628395e+00, 0.7295407480870981e+00, 0.6801494335398012e+00, 0.2872518617030954e+00, 0.1316523902981273e+00, 0.1422210985235967e+01, 0.1591782250225264e+01, 0.7243940711431240e-01, 0.1616733433116970e+00, 0.5417665783457993e+00, 0.6372362842978442e+00, 0.2601082853889214e-01, 0.2096244776016672e-01, 0.6295248286044199e+00, 0.6722400808665288e+00, 0.1773220171177086e-01, 0.2728832287468248e-01, 0.4345100233738295e+00, 0.4090709896187213e+00, 0.1101462457758059e+00, 0.8716398206477828e-01, 0.4125309004132391e+00, 0.5490759679957803e+00, 0.1230380805149813e-01, 0.1759392479116377e-01, 0.2746942203499391e+00, 0.2884916852972659e+00, 0.9589143278047064e-02, 0.4249908970207047e-01, 0.8397843220752800e-02, 0.6197973309669901e-01, 0.1449745254963753e-12, 0.3842238913414725e-12}, + i0: 1, + n0: 14, + pp: 0, + tau: 8.0121275381568099e-003, + sigma: 0.51052476004469949, + i0Out: 1, + n0Out: 14, + ppOut: 0, + tauOut: 8.0121275381568099e-003, + sigmaOut: 0.51052476004469949, + dminOut: 8.3015533671799979e-005, + dmin1Out: 0.25644350373773794, + dmin2Out: 0.31779880038403513, + dnOut: 8.3015533671799979e-005, + dnm1Out: 0.25644350373773794, + dnm2Out: 0.31779880038403513, + }, + { + z: []float64{0.1970452753456916e+01, 0.1962440625918904e+01, 0.1449745254963753e-12, 0.5298565773193886e-13, 0.7172377834531399e+00, 0.7390354245185139e+00, 0.2980976860358389e-01, 0.4592897681783073e-01, 0.1138658314717199e+01, 0.1095142879737275e+01, 0.1042566937606312e-01, 0.1475107600976594e-01, 0.1549496274805013e+01, 0.1541732761130612e+01, 0.1499968987352189e-01, 0.1317831886314191e-01, 0.1354524400120875e+01, 0.1337557352619832e+01, 0.4223398900256122e-02, 0.5903098601340335e-02, 0.1869521000487022e+01, 0.1893874103500456e+01, 0.3826832915293116e-01, 0.1474137347708128e-01, 0.7295407480870981e+00, 0.9940391087749554e+00, 0.2872518617030954e+00, 0.4109825756725979e+00, 0.1422210985235967e+01, 0.1075655689139524e+01, 0.7243940711431240e-01, 0.3648495529374628e-01, 0.5417665783457993e+00, 0.5232803240527883e+00, 0.2601082853889214e-01, 0.3129195122603765e-01, 0.6295248286044199e+00, 0.6079529515519964e+00, 0.1773220171177086e-01, 0.1267338099203561e-01, 0.4345100233738295e+00, 0.5239707606194429e+00, 0.1101462457758059e+00, 0.8671997249104711e-01, 0.4125309004132391e+00, 0.3301026084355332e+00, 0.1230380805149813e-01, 0.1023858907404432e-01, 0.2746942203499391e+00, 0.2660326470157850e+00, 0.9589143278047064e-02, 0.3027001489241909e-03, 0.8397843220752800e-02, 0.8301553367179998e-04, 0.1449745254963753e-12, 0.5298565773193886e-13, 0.4143051093784424e+00}, + i0: 1, + n0: 14, + pp: 1, + tau: 8.2898056486573887e-005, + sigma: 0.51853688758285632, + i0Out: 1, + n0Out: 14, + ppOut: 1, + tauOut: 8.2898056486573887e-005, + sigmaOut: 0.51853688758285632, + dminOut: 1.9676836973621424e-008, + dmin1Out: 0.25663722751477031, + dmin2Out: 0.28224921557690585, + dnOut: 1.9676836973621424e-008, + dnm1Out: 0.25663722751477031, + dnm2Out: 0.28224921557690585, + }, + { + z: []float64{0.1962357727862470e+01, 0.1962440625918904e+01, 0.1995470932711637e-13, 0.5298565773193886e-13, 0.7848815032798381e+00, 0.7390354245185139e+00, 0.6408456783027588e-01, 0.4592897681783073e-01, 0.1045726489860278e+01, 0.1095142879737275e+01, 0.2174776805091989e-01, 0.1475107600976594e-01, 0.1533080413886347e+01, 0.1541732761130612e+01, 0.1149760777771624e-01, 0.1317831886314191e-01, 0.1331879945386970e+01, 0.1337557352619832e+01, 0.8393943921304423e-02, 0.5903098601340335e-02, 0.1900138634999746e+01, 0.1893874103500456e+01, 0.7711806645770664e-02, 0.1474137347708128e-01, 0.1397226979745296e+01, 0.9940391087749554e+00, 0.3163950825942626e+00, 0.4109825756725979e+00, 0.7956626637825215e+00, 0.1075655689139524e+01, 0.2399491656225486e-01, 0.3648495529374628e-01, 0.5304944606600845e+00, 0.5232803240527883e+00, 0.3586094769777509e-01, 0.3129195122603765e-01, 0.5846824867897703e+00, 0.6079529515519964e+00, 0.1135741402906866e-01, 0.1267338099203561e-01, 0.5992504210249349e+00, 0.5239707606194429e+00, 0.4777049480214081e-01, 0.8671997249104711e-01, 0.2924878046509502e+00, 0.3301026084355332e+00, 0.9312521444528056e-02, 0.1023858907404432e-01, 0.2569399276636945e+00, 0.2660326470157850e+00, 0.9780034825247492e-07, 0.3027001489241909e-03, 0.1967683697362142e-07, 0.8301553367179998e-04, 0.1995470932711637e-13, 0.5298565773193886e-13}, + i0: 1, + n0: 14, + pp: 0, + tau: 1.9676827722764296e-008, + sigma: 0.51861978563934286, + i0Out: 1, + n0Out: 14, + ppOut: 0, + tauOut: 1.9676827722764296e-008, + sigmaOut: 0.51861978563934286, + dminOut: 1.5032959860012969e-015, + dmin1Out: 0.24838793989138258, + dmin2Out: 0.27047787997045275, + dnOut: 1.5032959860012969e-015, + dnm1Out: 0.24838793989138258, + dnm2Out: 0.27047787997045275, + }, + { + z: []float64{0.1962357727862470e+01, 0.1962357708185662e+01, 0.1995470932711637e-13, 0.7981257539768321e-14, 0.7848815032798381e+00, 0.8489660514332782e+00, 0.6408456783027588e-01, 0.7893711422056097e-01, 0.1045726489860278e+01, 0.9885371240138099e+00, 0.2174776805091989e-01, 0.3372769361380378e-01, 0.1533080413886347e+01, 0.1510850308373431e+01, 0.1149760777771624e-01, 0.1013563894066502e-01, 0.1331879945386970e+01, 0.1330138230690781e+01, 0.8393943921304423e-02, 0.1199097716077874e-01, 0.1900138634999746e+01, 0.1895859444807910e+01, 0.7711806645770664e-02, 0.5683514322519618e-02, 0.1397226979745296e+01, 0.1707938528340212e+01, 0.3163950825942626e+00, 0.1473962616612956e+00, 0.7956626637825215e+00, 0.6722612990066530e+00, 0.2399491656225486e-01, 0.1893485515094503e-01, 0.5304944606600845e+00, 0.5474205335300870e+00, 0.3586094769777509e-01, 0.3830193935796279e-01, 0.5846824867897703e+00, 0.5577379417840483e+00, 0.1135741402906866e-01, 0.1220274725600271e-01, 0.5992504210249349e+00, 0.6348181488942451e+00, 0.4777049480214081e-01, 0.2200990500366971e-01, 0.2924878046509502e+00, 0.2797904014149808e+00, 0.9312521444528056e-02, 0.8551968095484232e-02, 0.2569399276636945e+00, 0.2483880376917308e+00, 0.9780034825247492e-07, 0.7747561140265878e-14, 0.1967683697362142e-07, 0.1503295986001297e-14, 0.1995470932711637e-13, 0.7981257539768321e-14, 0.4143051093784424e+00}, + i0: 1, + n0: 14, + pp: 1, + tau: 1.5032959860012393e-015, + sigma: 0.51861980531617058, + i0Out: 1, + n0Out: 14, + ppOut: 1, + tauOut: 1.5032959860012393e-015, + sigmaOut: 0.51861980531617058, + dminOut: 9.2691156363468887e-030, + dmin1Out: 0.24076768543540897, + dmin2Out: 0.27020241256687788, + dnOut: 9.2691156363468887e-030, + dnm1Out: 0.24076768543540897, + dnm2Out: 0.27020241256687788, + }, + { + z: []float64{0.1962357708185669e+01, 0.1962357708185662e+01, 0.3452895805257589e-14, 0.7981257539768321e-14, 0.9279031656538340e+00, 0.8489660514332782e+00, 0.8409527066820451e-01, 0.7893711422056097e-01, 0.9381695469594078e+00, 0.9885371240138099e+00, 0.5431587122209677e-01, 0.3372769361380378e-01, 0.1466670076091998e+01, 0.1510850308373431e+01, 0.9192115573380727e-02, 0.1013563894066502e-01, 0.1332937092278178e+01, 0.1330138230690781e+01, 0.1705497388769041e-01, 0.1199097716077874e-01, 0.1884487985242738e+01, 0.1895859444807910e+01, 0.5151050664063701e-02, 0.5683514322519618e-02, 0.1850183739337442e+01, 0.1707938528340212e+01, 0.5355619565040129e-01, 0.1473962616612956e+00, 0.6376399585071952e+00, 0.6722612990066530e+00, 0.1625577000116482e-01, 0.1893485515094503e-01, 0.5694667028868834e+00, 0.5474205335300870e+00, 0.3751307094084999e-01, 0.3830193935796279e-01, 0.5324276180991995e+00, 0.5577379417840483e+00, 0.1454944326918194e-01, 0.1220274725600271e-01, 0.6422786106287314e+00, 0.6348181488942451e+00, 0.9587988848101409e-02, 0.2200990500366971e-01, 0.2787543806623621e+00, 0.2797904014149808e+00, 0.7620352256320365e-02, 0.8551968095484232e-02, 0.2407676854354167e+00, 0.2483880376917308e+00, 0.4837392336267432e-28, 0.7747561140265878e-14}, + i0: 1, + n0: 13, + pp: 0, + tau: 0.19361025134591583, + sigma: 0.51861980531617213, + i0Out: 1, + n0Out: 13, + ppOut: 0, + tauOut: 0.19361025134591583, + sigmaOut: 0.51861980531617213, + dminOut: 2.5949209826162500e-002, + dmin1Out: 7.8890170839591350e-002, + dmin2Out: 0.28791033268116917, + dnOut: 2.5949209826162500e-002, + dnm1Out: 7.8890170839591350e-002, + dnm2Out: 0.41777236268627749, + }, + { + z: []float64{0.1962357708185669e+01, 0.1768747456839757e+01, 0.3452895805257589e-14, 0.1811424766142649e-14, 0.9279031656538340e+00, 0.8183881849761209e+00, 0.8409527066820451e-01, 0.9640366690596863e-01, 0.9381695469594078e+00, 0.7024714999296202e+00, 0.5431587122209677e-01, 0.1134045480653624e+00, 0.1466670076091998e+01, 0.1168847392254101e+01, 0.9192115573380727e-02, 0.1048255904531585e-01, 0.1332937092278178e+01, 0.1145899255774637e+01, 0.1705497388769041e-01, 0.2804774784346500e-01, 0.1884487985242738e+01, 0.1667981036717421e+01, 0.5151050664063701e-02, 0.5713728135608638e-02, 0.1850183739337442e+01, 0.1704415955506319e+01, 0.5355619565040129e-01, 0.2003593680404179e-01, 0.6376399585071952e+00, 0.4402495403584025e+00, 0.1625577000116482e-01, 0.2102698332839761e-01, 0.5694667028868834e+00, 0.3923425391534199e+00, 0.3751307094084999e-01, 0.5090703407211448e-01, 0.5324276180991995e+00, 0.3024597759503511e+00, 0.1454944326918194e-01, 0.3089599659653809e-01, 0.6422786106287314e+00, 0.4273603515343789e+00, 0.9587988848101409e-02, 0.6253958476854957e-02, 0.2787543806623621e+00, 0.8651052309591171e-01, 0.7620352256320365e-02, 0.2120822426333837e-01, 0.2407676854354167e+00, 0.2594920982616250e-01, 0.4837392336267432e-28, 0.1811424766142649e-14, 0.5186198053161721e+00}, + i0: 1, + n0: 13, + pp: 1, + tau: 1.8780556192507153e-002, + sigma: 0.71223005666208794, + i0Out: 1, + n0Out: 13, + ppOut: 1, + tauOut: 1.8780556192507153e-002, + sigmaOut: 0.71223005666208794, + dminOut: 8.7653100724300811e-004, + dmin1Out: 6.6256164803006098e-002, + dmin2Out: 0.24571773655185866, + dnOut: 8.7653100724300811e-004, + dnm1Out: 6.6256164803006098e-002, + dnm2Out: 0.36084635920253871, + }, + { + z: []float64{0.1749966900647251e+01, 0.1768747456839757e+01, 0.8471295234418270e-15, 0.1811424766142649e-14, 0.8960112956895816e+00, 0.8183881849761209e+00, 0.7558032897122403e-01, 0.9640366690596863e-01, 0.7215151628312513e+00, 0.7024714999296202e+00, 0.1837142406762631e+00, 0.1134045480653624e+00, 0.9768351544306461e+00, 0.1168847392254101e+01, 0.1229681032071614e-01, 0.1048255904531585e-01, 0.1142869637104878e+01, 0.1145899255774637e+01, 0.4093477506677201e-01, 0.2804774784346500e-01, 0.1613979433593750e+01, 0.1667981036717421e+01, 0.6033886923870183e-02, 0.5713728135608638e-02, 0.1699637449193983e+01, 0.1704415955506319e+01, 0.5189819730562233e-02, 0.2003593680404179e-01, 0.4373061477637308e+00, 0.4402495403584025e+00, 0.1886499897608887e-01, 0.2102698332839761e-01, 0.4056040180569384e+00, 0.3923425391534199e+00, 0.3796148320598534e-01, 0.5090703407211448e-01, 0.2766137331483968e+00, 0.3024597759503511e+00, 0.4773343613933298e-01, 0.3089599659653809e-01, 0.3671003176793937e+00, 0.4273603515343789e+00, 0.1473802100398464e-02, 0.6253958476854957e-02, 0.8746438906634448e-01, 0.8651052309591171e-01, 0.6292122626412339e-02, 0.2120822426333837e-01, 0.8765310072430081e-03, 0.2594920982616250e-01, 0.8471295234418270e-15, 0.1811424766142649e-14}, + i0: 1, + n0: 13, + pp: 0, + tau: 8.1622622999092049e-004, + sigma: 0.73101061285459512, + i0Out: 1, + n0Out: 13, + ppOut: 0, + tauOut: 8.1622622999092049e-004, + sigmaOut: 0.73101061285459512, + dminOut: 6.9556311345050133e-007, + dmin1Out: 8.6231166987816729e-002, + dmin2Out: 0.25114202935985525, + dnOut: 6.9556311345050133e-007, + dnm1Out: 8.6231166987816729e-002, + dnm2Out: 0.30765445605457048, + }, + { + z: []float64{0.1749966900647251e+01, 0.1749150674417261e+01, 0.8471295234418270e-15, 0.4339463906783712e-15, 0.8960112956895816e+00, 0.9707753984308143e+00, 0.7558032897122403e-01, 0.5617401661873561e-01, 0.7215151628312513e+00, 0.8482391606587878e+00, 0.1837142406762631e+00, 0.2115659556707206e+00, 0.9768351544306461e+00, 0.7767497828506508e+00, 0.1229681032071614e-01, 0.1809289356632693e-01, 0.1142869637104878e+01, 0.1164895292375332e+01, 0.4093477506677201e-01, 0.5671572845129930e-01, 0.1613979433593750e+01, 0.1562481365836330e+01, 0.6033886923870183e-02, 0.6563547191183529e-02, 0.1699637449193983e+01, 0.1697447495503371e+01, 0.5189819730562233e-02, 0.1337031089310571e-02, 0.4373061477637308e+00, 0.4540178894205181e+00, 0.1886499897608887e-01, 0.1685334336738995e-01, 0.4056040180569384e+00, 0.4258959316655428e+00, 0.3796148320598534e-01, 0.2465547755855056e-01, 0.2766137331483968e+00, 0.2988754654991882e+00, 0.4773343613933298e-01, 0.5862963539483231e-01, 0.3671003176793937e+00, 0.3091282581549689e+00, 0.1473802100398464e-02, 0.4169958485368410e-03, 0.8746438906634448e-01, 0.9252328961422907e-01, 0.6292122626412339e-02, 0.5960921413863723e-04, 0.8765310072430081e-03, 0.6955631134505013e-06, 0.8471295234418270e-15, 0.4339463906783712e-15, 0.5186198053161721e+00}, + i0: 1, + n0: 13, + pp: 1, + tau: 6.9511331676175615e-007, + sigma: 0.73182683908458601, + i0Out: 1, + n0Out: 13, + ppOut: 1, + tauOut: 6.9511331676175615e-007, + sigmaOut: 0.73182683908458601, + dminOut: 1.2278907249223888e-012, + dmin1Out: 9.2372077429255559e-002, + dmin2Out: 0.25591158693238830, + dnOut: 1.2278907249223888e-012, + dnm1Out: 9.2372077429255559e-002, + dnm2Out: 0.25591158693238830, + }, + { + z: []float64{0.1749149979303945e+01, 0.1749150674417261e+01, 0.2408395422307052e-15, 0.4339463906783712e-15, 0.1026948719936233e+01, 0.9707753984308143e+00, 0.4639861736277134e-01, 0.5617401661873561e-01, 0.1013405803853420e+01, 0.8482391606587878e+00, 0.1621599259654447e+00, 0.2115659556707206e+00, 0.6326820553382161e+00, 0.7767497828506508e+00, 0.3331266686486833e-01, 0.1809289356632693e-01, 0.1188297658848447e+01, 0.1164895292375332e+01, 0.7457497555020480e-01, 0.5671572845129930e-01, 0.1494469242363992e+01, 0.1562481365836330e+01, 0.7455005714048079e-02, 0.6563547191183529e-02, 0.1691328825765317e+01, 0.1697447495503371e+01, 0.3589107121045615e-03, 0.1337031089310571e-02, 0.4705116269624868e+00, 0.4540178894205181e+00, 0.1525524548983380e-01, 0.1685334336738995e-01, 0.4352954686209429e+00, 0.4258959316655428e+00, 0.1692854133253905e-01, 0.2465547755855056e-01, 0.3405758644481647e+00, 0.2988754654991882e+00, 0.5321597610926388e-01, 0.5862963539483231e-01, 0.2563285827809251e+00, 0.3091282581549689e+00, 0.1505170716567330e-03, 0.4169958485368410e-03, 0.9243168664339420e-01, 0.9252328961422907e-01, 0.4485687980202113e-09, 0.5960921413863723e-04, 0.1227890724922389e-11, 0.6955631134505013e-06, 0.2408395422307052e-15, 0.4339463906783712e-15}, + i0: 1, + n0: 13, + pp: 0, + tau: 1.2278907189544363e-012, + sigma: 0.73182753419790281, + i0Out: 1, + n0Out: 13, + ppOut: 0, + tauOut: 1.2278907189544363e-012, + sigmaOut: 0.73182753419790281, + dminOut: 4.9598525010503808e-024, + dmin1Out: 9.2368632133320736e-002, + dmin2Out: 0.22049265431403467, + dnOut: 4.9598525010503808e-024, + dnm1Out: 9.2368632133320736e-002, + dnm2Out: 0.22049265431403467, + }, + { + z: []float64{0.1749149979303945e+01, 0.1749149979302717e+01, 0.2408395422307052e-15, 0.1414000300319855e-15, 0.1026948719936233e+01, 0.1073347337297776e+01, 0.4639861736277134e-01, 0.4380746706334982e-01, 0.1013405803853420e+01, 0.1131758262754287e+01, 0.1621599259654447e+00, 0.9065158049178278e-01, 0.6326820553382161e+00, 0.5753431417100738e+00, 0.3331266686486833e-01, 0.6880305191066147e-01, 0.1188297658848447e+01, 0.1194069582486762e+01, 0.7457497555020480e-01, 0.9333627524262254e-01, 0.1494469242363992e+01, 0.1408587972834190e+01, 0.7455005714048079e-02, 0.8951422490882578e-02, 0.1691328825765317e+01, 0.1682736313985311e+01, 0.3589107121045615e-03, 0.1003553923945662e-03, 0.4705116269624868e+00, 0.4856665170586981e+00, 0.1525524548983380e-01, 0.1367304312976171e-01, 0.4352954686209429e+00, 0.4385509668224923e+00, 0.1692854133253905e-01, 0.1314659648329904e-01, 0.3405758644481647e+00, 0.3806452440729016e+00, 0.5321597610926388e-01, 0.3583592846566260e-01, 0.2563285827809251e+00, 0.2206431713856914e+00, 0.1505170716567330e-03, 0.6305450884558253e-04, 0.9243168664339420e-01, 0.9236863258188953e-01, 0.4485687980202113e-09, 0.5962992535266723e-20, 0.1227890724922389e-11, 0.4959852501050381e-23, 0.2408395422307052e-15, 0.1414000300319855e-15, 0.5186198053161721e+00}, + i0: 1, + n0: 13, + pp: 1, + tau: 4.9598525010503808e-024, + sigma: 0.73182753419913071, + i0Out: 1, + n0Out: 13, + ppOut: 1, + tauOut: 0.0000000000000000, + sigmaOut: 0.73182753419913071, + dminOut: 4.9598525010503808e-024, + dmin1Out: 9.2339683204333278e-002, + dmin2Out: 0.20112464802017624, + dnOut: 4.9598525010503808e-024, + dnm1Out: 9.2339683204333278e-002, + dnm2Out: 0.20112464802017624, + }, + { + z: []float64{0.1749149979302717e+01, 0.1749149979302717e+01, 0.8676862906242004e-16, 0.1414000300319855e-15, 0.1117154804361126e+01, 0.1073347337297776e+01, 0.4438011869593646e-01, 0.4380746706334982e-01, 0.1178029724550133e+01, 0.1131758262754287e+01, 0.4427372589519608e-01, 0.9065158049178278e-01, 0.5998724677255393e+00, 0.5753431417100738e+00, 0.1369551627869799e+00, 0.6880305191066147e-01, 0.1150450694942405e+01, 0.1194069582486762e+01, 0.1142789997988411e+00, 0.9333627524262254e-01, 0.1303260395526232e+01, 0.1408587972834190e+01, 0.1155784656615063e-01, 0.8951422490882578e-02, 0.1671278822811555e+01, 0.1682736313985311e+01, 0.2916285016424428e-04, 0.1003553923945662e-03, 0.4993103973382956e+00, 0.4856665170586981e+00, 0.1200921574220688e-01, 0.1367304312976171e-01, 0.4396883475635844e+00, 0.4385509668224923e+00, 0.1138121911768345e-01, 0.1314659648329904e-01, 0.4050999534208807e+00, 0.3806452440729016e+00, 0.1951852336551518e-01, 0.3583592846566260e-01, 0.2011877025290218e+00, 0.2206431713856914e+00, 0.2894937755625969e-04, 0.6305450884558253e-04, 0.9233968320433328e-01, 0.9236863258188953e-01, 0.3202909346606844e-42, 0.5962992535266723e-20}, + i0: 1, + n0: 12, + pp: 0, + tau: 9.2159326345418235e-002, + sigma: 0.73182753419913071, + i0Out: 1, + n0Out: 12, + ppOut: 0, + tauOut: 9.2159326345418235e-002, + sigmaOut: 0.73182753419913071, + dminOut: 1.5272797578758102e-004, + dmin1Out: 9.6724015270930774e-002, + dmin2Out: 0.29962742013672317, + dnOut: 1.5272797578758102e-004, + dnm1Out: 9.6724015270930774e-002, + dnm2Out: 0.29962742013672317, + }, + { + z: []float64{0.1749149979302717e+01, 0.1656990652957299e+01, 0.8676862906242004e-16, 0.5850002270797901e-16, 0.1117154804361126e+01, 0.1069375596711644e+01, 0.4438011869593646e-01, 0.4888936980013561e-01, 0.1178029724550133e+01, 0.1081254754299776e+01, 0.4427372589519608e-01, 0.2456274906772994e-01, 0.5998724677255393e+00, 0.6201055550993710e+00, 0.1369551627869799e+00, 0.2540860356894923e+00, 0.1150450694942405e+01, 0.9184843327063353e+00, 0.1142789997988411e+00, 0.1621533314992303e+00, 0.1303260395526232e+01, 0.1060505584247734e+01, 0.1155784656615063e-01, 0.1821431635083262e-01, 0.1671278822811555e+01, 0.1560934342965469e+01, 0.2916285016424428e-04, 0.9328588590960435e-05, 0.4993103973382956e+00, 0.4191509581464933e+00, 0.1200921574220688e-01, 0.1259763844648080e-01, 0.4396883475635844e+00, 0.3463126018893689e+00, 0.1138121911768345e-01, 0.1331320693873929e-01, 0.4050999534208807e+00, 0.3191459435022383e+00, 0.1951852336551518e-01, 0.1230436091267282e-01, 0.2011877025290218e+00, 0.9675296464848704e-01, 0.2894937755625969e-04, 0.2762888312745317e-04, 0.9233968320433328e-01, 0.1527279757875810e-03, 0.3202909346606844e-42, 0.5850002270797901e-16, 0.7318275341991307e+00}, + i0: 1, + n0: 12, + pp: 1, + tau: 1.5267965277267402e-004, + sigma: 0.82398686054454895, + i0Out: 1, + n0Out: 12, + ppOut: 1, + tauOut: 1.5267965277267402e-004, + sigmaOut: 0.82398686054454895, + dminOut: 2.8998364833248812e-009, + dmin1Out: 9.2869952038417761e-002, + dmin2Out: 0.30683160095299705, + dnOut: 2.8998364833248812e-009, + dnm1Out: 9.2869952038417761e-002, + dnm2Out: 0.30683160095299705, + }, + { + z: []float64{0.1656837973304527e+01, 0.1656990652957299e+01, 0.3775776370348287e-16, 0.5850002270797901e-16, 0.1118112286859007e+01, 0.1069375596711644e+01, 0.4727777715386321e-01, 0.4888936980013561e-01, 0.1058387046560870e+01, 0.1081254754299776e+01, 0.1439123541326830e-01, 0.2456274906772994e-01, 0.8596476757228223e+00, 0.6201055550993710e+00, 0.2714763844897646e+00, 0.2540860356894923e+00, 0.8090086000630283e+00, 0.9184843327063353e+00, 0.2125620340079330e+00, 0.1621533314992303e+00, 0.8660051869378607e+00, 0.1060505584247734e+01, 0.3283046378299830e-01, 0.1821431635083262e-01, 0.1527960528118289e+01, 0.1560934342965469e+01, 0.2559023465659061e-05, 0.9328588590960435e-05, 0.4315933579167358e+00, 0.4191509581464933e+00, 0.1010840613748274e-01, 0.1259763844648080e-01, 0.3493647230378528e+00, 0.3463126018893689e+00, 0.1216166289646864e-01, 0.1331320693873929e-01, 0.3191359618656699e+00, 0.3191459435022383e+00, 0.3730332957296599e-02, 0.1230436091267282e-01, 0.9289758092154521e-01, 0.9675296464848704e-01, 0.4542317842367971e-07, 0.2762888312745317e-04, 0.2899836483324881e-08, 0.1527279757875810e-03, 0.3775776370348287e-16, 0.5850002270797901e-16}, + i0: 1, + n0: 12, + pp: 0, + tau: 2.8998350258011044e-009, + sigma: 0.82413954019732161, + i0Out: 1, + n0Out: 12, + ppOut: 0, + tauOut: 2.8998350258011044e-009, + sigmaOut: 0.82413954019732161, + dminOut: 2.2456987483859239e-017, + dmin1Out: 9.1786476970678488e-002, + dmin2Out: 0.30815750788579249, + dnOut: 2.2456987483859239e-017, + dnm1Out: 9.1786476970678488e-002, + dnm2Out: 0.30815750788579249, + }, + { + z: []float64{0.1656837973304527e+01, 0.1656837970404692e+01, 0.3775776370348287e-16, 0.2548071704976161e-16, 0.1118112286859007e+01, 0.1165390061113035e+01, 0.4727777715386321e-01, 0.4293685745187325e-01, 0.1058387046560870e+01, 0.1029841421622430e+01, 0.1439123541326830e-01, 0.1201290976848257e-01, 0.8596476757228223e+00, 0.1119111147544269e+01, 0.2714763844897646e+00, 0.1962510428460807e+00, 0.8090086000630283e+00, 0.8253195883250457e+00, 0.2125620340079330e+00, 0.2230406579474441e+00, 0.8660051869378607e+00, 0.6757949898735797e+00, 0.3283046378299830e-01, 0.7422909836846008e-01, 0.1527960528118289e+01, 0.1453733985873459e+01, 0.2559023465659061e-05, 0.7597383986781573e-06, 0.4315933579167358e+00, 0.4417010014159849e+00, 0.1010840613748274e-01, 0.7995273950601426e-02, 0.3493647230378528e+00, 0.3535311090838850e+00, 0.1216166289646864e-01, 0.1097845108004235e-01, 0.3191359618656699e+00, 0.3118878408430891e+00, 0.3730332957296599e-02, 0.1111101051031713e-02, 0.9289758092154521e-01, 0.9178652239385691e-01, 0.4542317842367971e-07, 0.1435066789177947e-14, 0.2899836483324881e-08, 0.2245698748385924e-16, 0.3775776370348287e-16, 0.2548071704976161e-16, 0.7318275341991307e+00}, + i0: 1, + n0: 12, + pp: 1, + tau: 2.2456987483858885e-017, + sigma: 0.82413954309715665, + i0Out: 1, + n0Out: 12, + ppOut: 1, + tauOut: 0.0000000000000000, + sigmaOut: 0.82413954309715665, + dminOut: 2.2456987483858888e-017, + dmin1Out: 9.1450429866798411e-002, + dmin2Out: 0.30232944966555197, + dnOut: 2.2456987483858888e-017, + dnm1Out: 9.1450429866798411e-002, + dnm2Out: 0.30232944966555197, + }, + { + z: []float64{0.1656837970404692e+01, 0.1656837970404692e+01, 0.1792267857826344e-16, 0.2548071704976161e-16, 0.1208326918564908e+01, 0.1165390061113035e+01, 0.3659452888027460e-01, 0.4293685745187325e-01, 0.1005259802510638e+01, 0.1029841421622430e+01, 0.1337343958524596e-01, 0.1201290976848257e-01, 0.1301988750805104e+01, 0.1119111147544269e+01, 0.1244018658302015e+00, 0.1962510428460807e+00, 0.9239583804422884e+00, 0.8253195883250457e+00, 0.1631347930486186e+00, 0.2230406579474441e+00, 0.5868892951934211e+00, 0.6757949898735797e+00, 0.1838666404767374e+00, 0.7422909836846008e-01, 0.1269868105135121e+01, 0.1453733985873459e+01, 0.2642614694812039e-06, 0.7597383986781573e-06, 0.4496960111051168e+00, 0.4417010014159849e+00, 0.6285530663790794e-02, 0.7995273950601426e-02, 0.3582240295001366e+00, 0.3535311090838850e+00, 0.9558391177537082e-02, 0.1097845108004235e-01, 0.3034405507165837e+00, 0.3118878408430891e+00, 0.3360925270585024e-03, 0.1111101051031713e-02, 0.9145042986679984e-01, 0.9178652239385691e-01, 0.3524015903480299e-30, 0.1435066789177947e-14}, + i0: 1, + n0: 11, + pp: 0, + tau: 9.1173077708044642e-002, + sigma: 0.82413954309715665, + i0Out: 1, + n0Out: 11, + ppOut: 0, + tauOut: 9.1173077708044642e-002, + sigmaOut: 0.82413954309715665, + dminOut: 1.2510327751137640e-004, + dmin1Out: 0.20154260650453434, + dmin2Out: 0.26087886618071221, + dnOut: 1.2510327751137640e-004, + dnm1Out: 0.20154260650453434, + dnm2Out: 0.26087886618071221, + }, + { + z: []float64{0.1656837970404692e+01, 0.1565664892696647e+01, 0.1792267857826344e-16, 0.1383211380667930e-16, 0.1208326918564908e+01, 0.1153748369737138e+01, 0.3659452888027460e-01, 0.3188477647299814e-01, 0.1005259802510638e+01, 0.8955753879148407e+00, 0.1337343958524596e-01, 0.1944232516271161e-01, 0.1301988750805104e+01, 0.1315775213764550e+01, 0.1244018658302015e+00, 0.8735697805677015e-01, 0.9239583804422884e+00, 0.9085631177260922e+00, 0.1631347930486186e+00, 0.1053774491236745e+00, 0.5868892951934211e+00, 0.5742054088384394e+00, 0.1838666404767374e+00, 0.4066251880351924e+00, 0.1269868105135121e+01, 0.7720701036533529e+00, 0.2642614694812039e-06, 0.1539203864417861e-06, 0.4496960111051168e+00, 0.3648083101404764e+00, 0.6285530663790794e-02, 0.6172085611379779e-02, 0.3582240295001366e+00, 0.2704372573582493e+00, 0.9558391177537082e-02, 0.1072486650400470e-01, 0.3034405507165837e+00, 0.2018786990315928e+00, 0.3360925270585024e-03, 0.1522488812438262e-03, 0.9145042986679984e-01, 0.1251032775113764e-03, 0.3524015903480299e-30, 0.1383211380667930e-16, 0.8241395430971566e+00}, + i0: 1, + n0: 11, + pp: 1, + tau: 1.2498058228587147e-004, + sigma: 0.91531262080520126, + i0Out: 1, + n0Out: 11, + ppOut: 1, + tauOut: 1.2498058228587147e-004, + sigmaOut: 0.91531262080520126, + dminOut: 2.4554386425680716e-008, + dmin1Out: 0.19392428663386141, + dmin2Out: 0.26581143244488259, + dnOut: 2.4554386425680716e-008, + dnm1Out: 0.19392428663386141, + dnm2Out: 0.26581143244488259, + }, + { + z: []float64{0.1565539912114361e+01, 0.1565664892696647e+01, 0.1019378594629470e-16, 0.1383211380667930e-16, 0.1185508165627851e+01, 0.1153748369737138e+01, 0.2408690373149840e-01, 0.3188477647299814e-01, 0.8908058287637680e+00, 0.8955753879148407e+00, 0.2871751477260568e-01, 0.1944232516271161e-01, 0.1374289696466428e+01, 0.1315775213764550e+01, 0.5775298217141787e-01, 0.8735697805677015e-01, 0.9560626040960629e+00, 0.9085631177260922e+00, 0.6328905763825028e-01, 0.1053774491236745e+00, 0.9174165586530958e+00, 0.5742054088384394e+00, 0.3422034931823232e+00, 0.4066251880351924e+00, 0.4297417838091302e+00, 0.7720701036533529e+00, 0.1306631986684747e-06, 0.1539203864417861e-06, 0.3708552845063717e+00, 0.3648083101404764e+00, 0.4500844331080801e-02, 0.6172085611379779e-02, 0.2765362989488873e+00, 0.2704372573582493e+00, 0.7829431815445537e-02, 0.1072486650400470e-01, 0.1940765355151052e+00, 0.2018786990315928e+00, 0.9814083907923291e-07, 0.1522488812438262e-03, 0.2455438642568072e-07, 0.1251032775113764e-03, 0.1019378594629470e-16, 0.1383211380667930e-16}, + i0: 1, + n0: 11, + pp: 0, + tau: 2.4554370888251911e-008, + sigma: 0.91543760138748709, + i0Out: 1, + n0Out: 11, + ppOut: 0, + tauOut: 2.4554370888251911e-008, + sigmaOut: 0.91543760138748709, + dminOut: 2.7649302792337775e-015, + dmin1Out: 0.18866996369915248, + dmin2Out: 0.27322036360404983, + dnOut: 2.7649302792337775e-015, + dnm1Out: 0.18866996369915248, + dnm2Out: 0.27322036360404983, + }, + { + z: []float64{0.1565539912114361e+01, 0.1565539887559990e+01, 0.1019378594629470e-16, 0.7719264500395369e-17, 0.1185508165627851e+01, 0.1209595044804978e+01, 0.2408690373149840e-01, 0.1773879145177052e-01, 0.8908058287637680e+00, 0.9017845275302323e+00, 0.2871751477260568e-01, 0.4376453959373496e-01, 0.1374289696466428e+01, 0.1388278114489740e+01, 0.5775298217141787e-01, 0.3977262621431848e-01, 0.9560626040960629e+00, 0.9795790109656241e+00, 0.6328905763825028e-01, 0.5927283946360358e-01, 0.9174165586530958e+00, 0.1200347187817445e+01, 0.3422034931823232e+00, 0.1225138368952072e+00, 0.4297417838091302e+00, 0.3072280530227507e+00, 0.1306631986684747e-06, 0.1577236754259593e-06, 0.3708552845063717e+00, 0.3753559465594062e+00, 0.4500844331080801e-02, 0.3315910790466669e-02, 0.2765362989488873e+00, 0.2810497954194954e+00, 0.7829431815445537e-02, 0.5406547261581845e-02, 0.1940765355151052e+00, 0.1886700618399915e+00, 0.9814083907923291e-07, 0.1277249852674422e-13, 0.2455438642568072e-07, 0.2764930279233778e-14, 0.1019378594629470e-16, 0.7719264500395369e-17, 0.8241395430971566e+00}, + i0: 1, + n0: 11, + pp: 1, + tau: 2.7649302792335523e-015, + sigma: 0.91543762594185796, + i0Out: 1, + n0Out: 11, + ppOut: 1, + tauOut: 2.7649302792335523e-015, + sigmaOut: 0.91543762594185796, + dminOut: 3.4709879829724519e-029, + dmin1Out: 0.18507826404867261, + dmin2Out: 0.27717487004787600, + dnOut: 3.4709879829724519e-029, + dnm1Out: 0.18507826404867261, + dnm2Out: 0.27858872904535659, + }, + { + z: []float64{0.1565539887559988e+01, 0.1565539887559990e+01, 0.5964194309842799e-17, 0.7719264500395369e-17, 0.1227333836256746e+01, 0.1209595044804978e+01, 0.1303359134714337e-01, 0.1773879145177052e-01, 0.9325154757768211e+00, 0.9017845275302323e+00, 0.6515425651042277e-01, 0.4376453959373496e-01, 0.1362896484193633e+01, 0.1388278114489740e+01, 0.2858649229958118e-01, 0.3977262621431848e-01, 0.1010265358129644e+01, 0.9795790109656241e+00, 0.7042504782685149e-01, 0.5927283946360358e-01, 0.1252435976885798e+01, 0.1200347187817445e+01, 0.3005318297487194e-01, 0.1225138368952072e+00, 0.2771750277715515e+00, 0.3072280530227507e+00, 0.2135925446109612e-06, 0.1577236754259593e-06, 0.3786716437573254e+00, 0.3753559465594062e+00, 0.2461066374135985e-02, 0.3315910790466669e-02, 0.2839952763069384e+00, 0.2810497954194954e+00, 0.3591797791316158e-02, 0.5406547261581845e-02, 0.1850782640486854e+00, 0.1886700618399915e+00, 0.1908115364037247e-27, 0.1277249852674422e-13}, + i0: 1, + n0: 10, + pp: 0, + tau: 9.2539132024336307e-002, + sigma: 0.91543762594186073, + i0Out: 1, + n0Out: 10, + ppOut: 0, + tauOut: 9.2539132024336307e-002, + sigmaOut: 0.91543762594186073, + dminOut: 8.9088074504151571e-002, + dmin1Out: 0.17705256406949207, + dmin2Out: 0.17705256406949207, + dnOut: 8.9088074504151571e-002, + dnm1Out: 0.18903428744201686, + dnm2Out: 0.28613205491191551, + }, + { + z: []float64{0.1565539887559988e+01, 0.1473000755535651e+01, 0.5964194309842799e-17, 0.4969486576955697e-17, 0.1227333836256746e+01, 0.1147828295579553e+01, 0.1303359134714337e-01, 0.1058871408116432e-01, 0.9325154757768211e+00, 0.8945418861817434e+00, 0.6515425651042277e-01, 0.9926701980086396e-01, 0.1362896484193633e+01, 0.1199676824668014e+01, 0.2858649229958118e-01, 0.2407310226126826e-01, 0.1010265358129644e+01, 0.9640781716708908e+00, 0.7042504782685149e-01, 0.9148932748822959e-01, 0.1252435976885798e+01, 0.1098460700348104e+01, 0.3005318297487194e-01, 0.7583331677723075e-02, 0.2771750277715515e+00, 0.1770527776620367e+00, 0.2135925446109612e-06, 0.4568210735249311e-06, 0.3786716437573254e+00, 0.2885931212860515e+00, 0.2461066374135985e-02, 0.2421856840585221e-02, 0.2839952763069384e+00, 0.1926260852333330e+00, 0.3591797791316158e-02, 0.3451057520197492e-02, 0.1850782640486854e+00, 0.8908807450415157e-01, 0.1908115364037247e-27, 0.4969486576955697e-17, 0.9154376259418607e+00}, + i0: 1, + n0: 10, + pp: 1, + tau: 7.5387921832037685e-002, + sigma: 1.0079767579661971, + i0Out: 1, + n0Out: 10, + ppOut: 1, + tauOut: 7.5387921832037685e-002, + sigmaOut: 1.0079767579661971, + dminOut: 1.1106216559126303e-002, + dmin1Out: 0.10021441433509834, + dmin2Out: 0.10021441433509834, + dnOut: 1.1106216559126303e-002, + dnm1Out: 0.11507463320059727, + dnm2Out: 0.21320388392650824, + }, + { + z: []float64{0.1397612833703614e+01, 0.1473000755535651e+01, 0.4081328655531061e-17, 0.4969486576955697e-17, 0.1083029087828680e+01, 0.1147828295579553e+01, 0.8745885380967966e-02, 0.1058871408116432e-01, 0.9096750987696016e+00, 0.8945418861817434e+00, 0.1309130515609722e+00, 0.9926701980086396e-01, 0.1017448953536272e+01, 0.1199676824668014e+01, 0.2281033592282574e-01, 0.2407310226126826e-01, 0.9573692414042568e+00, 0.9640781716708908e+00, 0.1049724875218357e+00, 0.9148932748822959e-01, 0.9256836226719531e+00, 0.1098460700348104e+01, 0.1450441494900679e-02, 0.7583331677723075e-02, 0.1002148711561719e+00, 0.1770527776620367e+00, 0.1315527505616969e-05, 0.4568210735249311e-06, 0.2156257407670935e+00, 0.2885931212860515e+00, 0.2163530200698068e-02, 0.2421856840585221e-02, 0.1185256907207948e+00, 0.1926260852333330e+00, 0.2593936112987583e-02, 0.3451057520197492e-02, 0.1110621655912630e-01, 0.8908807450415157e-01, 0.4081328655531061e-17, 0.4969486576955697e-17}, + i0: 1, + n0: 10, + pp: 0, + tau: 9.2006637361187298e-003, + sigma: 1.0833646797982348, + i0Out: 1, + n0Out: 10, + ppOut: 0, + tauOut: 9.2006637361187298e-003, + sigmaOut: 1.0833646797982348, + dminOut: 1.6452860548183366e-003, + dmin1Out: 9.0837695870445614e-002, + dmin2Out: 9.0837695870445614e-002, + dnOut: 1.6452860548183366e-003, + dnm1Out: 0.10809563217808144, + dnm2Out: 0.20642195434625446, + }, + { + z: []float64{0.1397612833703614e+01, 0.1388412169967495e+01, 0.4081328655531061e-17, 0.3183635051997810e-17, 0.1083029087828680e+01, 0.1082574309473529e+01, 0.8745885380967966e-02, 0.7349069784991225e-02, 0.9096750987696016e+00, 0.1024038416809464e+01, 0.1309130515609722e+00, 0.1300706547025319e+00, 0.1017448953536272e+01, 0.9009879710204475e+00, 0.2281033592282574e-01, 0.2423774201322422e-01, 0.9573692414042568e+00, 0.1028903323176749e+01, 0.1049724875218357e+00, 0.9444163542020824e-01, 0.9256836226719531e+00, 0.8234917650105269e+00, 0.1450441494900679e-02, 0.1765115496075164e-03, 0.1002148711561719e+00, 0.9083901139795122e-01, 0.1315527505616969e-05, 0.3122684720284652e-05, 0.2156257407670935e+00, 0.2085854845469525e+00, 0.2163530200698068e-02, 0.1229394806594584e-02, 0.1185256907207948e+00, 0.1106895682910690e+00, 0.2593936112987583e-02, 0.2602667681892373e-03, 0.1110621655912630e-01, 0.1645286054818337e-02, 0.4081328655531061e-17, 0.3183635051997810e-17, 0.9154376259418607e+00}, + i0: 1, + n0: 10, + pp: 1, + tau: 1.5594219393746818e-003, + sigma: 1.0925653435343534, + i0Out: 1, + n0Out: 10, + ppOut: 1, + tauOut: 1.5594219393746818e-003, + sigmaOut: 1.0925653435343534, + dminOut: 8.1926049685687600e-005, + dmin1Out: 8.9258234858565516e-002, + dmin2Out: 8.9258234858565516e-002, + dnOut: 8.1926049685687600e-005, + dnm1Out: 0.10847668953277810, + dnm2Out: 0.20701876553386761, + }, + { + z: []float64{0.1386852748028120e+01, 0.1388412169967495e+01, 0.2485138759635906e-17, 0.3183635051997810e-17, 0.1088363957319145e+01, 0.1082574309473529e+01, 0.6914717946174950e-02, 0.7349069784991225e-02, 0.1145634931626446e+01, 0.1024038416809464e+01, 0.1022944500333619e+00, 0.1300706547025319e+00, 0.8213718410609353e+00, 0.9009879710204475e+00, 0.3036175828902931e-01, 0.2423774201322422e-01, 0.1091423778368554e+01, 0.1028903323176749e+01, 0.7125729765473905e-01, 0.9444163542020824e-01, 0.7508515569660207e+00, 0.8234917650105269e+00, 0.2135460001102832e-04, 0.1765115496075164e-03, 0.8926135754328580e-01, 0.9083901139795122e-01, 0.7297073710223142e-05, 0.3122684720284652e-05, 0.2082481603404622e+00, 0.2085854845469525e+00, 0.6534568189162364e-03, 0.1229394806594584e-02, 0.1087369563009673e+00, 0.1106895682910690e+00, 0.3938065757966957e-05, 0.2602667681892373e-03, 0.8192604968568760e-04, 0.1645286054818337e-02, 0.2485138759635906e-17, 0.3183635051997810e-17}, + i0: 1, + n0: 10, + pp: 0, + tau: 8.1416944590412474e-005, + sigma: 1.0941247654737283, + i0Out: 1, + n0Out: 10, + ppOut: 0, + tauOut: 8.1416944590412474e-005, + sigmaOut: 1.0941247654737283, + dminOut: 5.0612658152462498e-007, + dmin1Out: 8.9177229293753768e-002, + dmin2Out: 8.9177229293753768e-002, + dnOut: 5.0612658152462498e-007, + dnm1Out: 0.10831524323681536, + dnm2Out: 0.20814970454128662, + }, + { + z: []float64{0.1386852748028120e+01, 0.1386771331083530e+01, 0.2485138759635906e-17, 0.1950383162890474e-17, 0.1088363957319145e+01, 0.1095197258320730e+01, 0.6914717946174950e-02, 0.7233164949324962e-02, 0.1145634931626446e+01, 0.1240614799765893e+01, 0.1022944500333619e+00, 0.6772592167212049e-01, 0.8213718410609353e+00, 0.7839262607332537e+00, 0.3036175828902931e-01, 0.4227125255215914e-01, 0.1091423778368554e+01, 0.1120328406526543e+01, 0.7125729765473905e-01, 0.4775711530437247e-01, 0.7508515569660207e+00, 0.7030343793170689e+00, 0.2135460001102832e-04, 0.2711304941630149e-05, 0.8926135754328580e-01, 0.8918452636746399e-01, 0.7297073710223142e-05, 0.1703885458517271e-04, 0.2082481603404622e+00, 0.2088031613602029e+00, 0.6534568189162364e-03, 0.3402961195615630e-03, 0.1087369563009673e+00, 0.1083191813025733e+00, 0.3938065757966957e-05, 0.2978513750500819e-08, 0.8192604968568760e-04, 0.5061265815246250e-06, 0.2485138759635906e-17, 0.1950383162890474e-17, 0.9154376259418607e+00}, + i0: 1, + n0: 10, + pp: 1, + tau: 5.0604049633765406e-007, + sigma: 1.0942061824183187, + i0Out: 1, + n0Out: 10, + ppOut: 1, + tauOut: 5.0604049633765406e-007, + sigmaOut: 1.0942061824183187, + dminOut: 8.6071246971392626e-011, + dmin1Out: 8.9183660885577137e-002, + dmin2Out: 8.9183660885577137e-002, + dnOut: 8.6071246971392626e-011, + dnm1Out: 0.10814239569959758, + dnm2Out: 0.20876277035564564, + }, + { + z: []float64{0.1386770825043033e+01, 0.1386771331083530e+01, 0.1540308069724697e-17, 0.1950383162890474e-17, 0.1102429917229558e+01, 0.1095197258320730e+01, 0.8139811288713328e-02, 0.7233164949324962e-02, 0.1300200404108803e+01, 0.1240614799765893e+01, 0.4083380405309871e-01, 0.6772592167212049e-01, 0.7853632031918177e+00, 0.7839262607332537e+00, 0.6030036143936175e-01, 0.4227125255215914e-01, 0.1107784654351057e+01, 0.1120328406526543e+01, 0.3030814137396716e-01, 0.4775711530437247e-01, 0.6727284432075471e+00, 0.7030343793170689e+00, 0.3594413905024206e-06, 0.2711304941630149e-05, 0.8920069974016231e-01, 0.8918452636746399e-01, 0.3988496406087022e-04, 0.1703885458517271e-04, 0.2091030664752072e+00, 0.2088031613602029e+00, 0.1762795624794033e-03, 0.3402961195615630e-03, 0.1081423986781113e+00, 0.1083191813025733e+00, 0.1393999949133917e-13, 0.2978513750500819e-08, 0.8607124697139263e-10, 0.5061265815246250e-06, 0.1540308069724697e-17, 0.1950383162890474e-17}, + i0: 1, + n0: 10, + pp: 0, + tau: 8.6071215292546838e-011, + sigma: 1.0942066884588149, + i0Out: 1, + n0Out: 10, + ppOut: 0, + tauOut: 8.6071215292546838e-011, + sigmaOut: 1.0942066884588149, + dminOut: 3.1678834699165494e-017, + dmin1Out: 8.9200650586607991e-002, + dmin2Out: 8.9200650586607991e-002, + dnOut: 3.1678834699165494e-017, + dnm1Out: 0.10805126770967630, + dnm2Out: 0.20900961033533805, + }, + { + z: []float64{0.1386770825043033e+01, 0.1386770824956962e+01, 0.1540308069724697e-17, 0.1224486171222500e-17, 0.1102429917229558e+01, 0.1110569728432201e+01, 0.8139811288713328e-02, 0.9529690622753703e-02, 0.1300200404108803e+01, 0.1331504517453077e+01, 0.4083380405309871e-01, 0.2408506071837550e-01, 0.7853632031918177e+00, 0.8215785038267327e+00, 0.6030036143936175e-01, 0.8130667336500198e-01, 0.1107784654351057e+01, 0.1056786122273951e+01, 0.3030814137396716e-01, 0.1929354325655847e-01, 0.6727284432075471e+00, 0.6534352593063080e+00, 0.3594413905024206e-06, 0.4906748310831983e-07, 0.8920069974016231e-01, 0.8924053555066887e-01, 0.3988496406087022e-04, 0.9345605379795243e-04, 0.2091030664752072e+00, 0.2091858898978174e+00, 0.1762795624794033e-03, 0.9113088236382798e-04, 0.1081423986781113e+00, 0.1080512677096902e+00, 0.1393999949133917e-13, 0.1110429488179469e-22, 0.8607124697139263e-10, 0.3167883469916549e-16, 0.1540308069724697e-17, 0.1224486171222500e-17, 0.9154376259418607e+00}, + i0: 1, + n0: 10, + pp: 1, + tau: 3.1678834698836348e-017, + sigma: 1.0942066885448862, + i0Out: 1, + n0Out: 10, + ppOut: 1, + tauOut: 0.0000000000000000, + sigmaOut: 1.0942066885448862, + dminOut: 3.1678834699165494e-017, + dmin1Out: 8.9240528714785800e-002, + dmin2Out: 8.9240528714785800e-002, + dnOut: 3.1678834699165494e-017, + dnm1Out: 0.10800416691102718, + dnm2Out: 0.20896705170504334, + }, + { + z: []float64{0.1386770824956962e+01, 0.1386770824956962e+01, 0.9806070694382835e-18, 0.1224486171222500e-17, 0.1120099419054954e+01, 0.1110569728432201e+01, 0.1132830345080667e-01, 0.9529690622753703e-02, 0.1344261274720646e+01, 0.1331504517453077e+01, 0.1472018016266302e-01, 0.2408506071837550e-01, 0.8881649970290716e+00, 0.8215785038267327e+00, 0.9674301998819117e-01, 0.8130667336500198e-01, 0.9793366455423188e+00, 0.1056786122273951e+01, 0.1287308250760436e-01, 0.1929354325655847e-01, 0.6405622258661866e+00, 0.6534352593063080e+00, 0.6835883063177366e-08, 0.4906748310831983e-07, 0.8933398476858376e-01, 0.8924053555066887e-01, 0.2188381927740679e-03, 0.9345605379795243e-04, 0.2090581825874072e+00, 0.2091858898978174e+00, 0.4710079866305792e-04, 0.9113088236382798e-04, 0.1080041669110272e+00, 0.1080512677096902e+00, 0.3257014354834561e-38, 0.1110429488179469e-22}, + i0: 1, + n0: 9, + pp: 0, + tau: 2.2310132178696450e-002, + sigma: 1.0942066885448862, + i0Out: 1, + n0Out: 9, + ppOut: 0, + tauOut: 2.2310132178696450e-002, + sigmaOut: 1.0942066885448862, + dminOut: 6.7023851586769906e-002, + dmin1Out: 6.7023851586769906e-002, + dmin2Out: 6.7023851586769906e-002, + dnOut: 8.5666701693601133e-002, + dnm1Out: 0.18606768031923254, + dnm2Out: 6.7023851586769906e-002, + }, + { + z: []float64{0.1386770824956962e+01, 0.1364460692778266e+01, 0.9806070694382835e-18, 0.8049901434408684e-18, 0.1120099419054954e+01, 0.1109117590327065e+01, 0.1132830345080667e-01, 0.1373001363427395e-01, 0.1344261274720646e+01, 0.1322941309070339e+01, 0.1472018016266302e-01, 0.9882485852396858e-02, 0.8881649970290716e+00, 0.9527153989861693e+00, 0.9674301998819117e-01, 0.9944626147083412e-01, 0.9793366455423188e+00, 0.8704533344003925e+00, 0.1287308250760436e-01, 0.9473236598617136e-02, 0.6405622258661866e+00, 0.6087788639247561e+00, 0.6835883063177366e-08, 0.1003117403762534e-08, 0.8933398476858376e-01, 0.6724268977954398e-01, 0.2188381927740679e-03, 0.6803700894781421e-03, 0.2090581825874072e+00, 0.1861147811178956e+00, 0.4710079866305792e-04, 0.2733303872960605e-04, 0.1080041669110272e+00, 0.8566670169360113e-01, 0.3257014354834561e-38, 0.8049901434408684e-18, 0.1094206688544886e+01}, + i0: 1, + n0: 9, + pp: 1, + tau: 6.4730147312741043e-002, + sigma: 1.1165168207235825, + i0Out: 1, + n0Out: 9, + ppOut: 1, + tauOut: 6.4730147312741043e-002, + sigmaOut: 1.1165168207235825, + dminOut: 2.5125423409859404e-003, + dmin1Out: 2.5125423409859404e-003, + dmin2Out: 2.5125423409859404e-003, + dnOut: 2.0907912921905053e-002, + dnm1Out: 8.1725879285544201e-002, + dnm2Out: 2.5125423409859404e-003, + }, + { + z: []float64{0.1299730545465525e+01, 0.1364460692778266e+01, 0.6869337119490330e-18, 0.8049901434408684e-18, 0.1058117456648598e+01, 0.1109117590327065e+01, 0.1716633828952343e-01, 0.1373001363427395e-01, 0.1250927309320471e+01, 0.1322941309070339e+01, 0.7526573591998700e-02, 0.9882485852396858e-02, 0.9799049395522637e+00, 0.9527153989861693e+00, 0.8833849733474472e-01, 0.9944626147083412e-01, 0.7268579263515238e+00, 0.8704533344003925e+00, 0.7934296380510910e-02, 0.9473236598617136e-02, 0.5361144212346215e+00, 0.6087788639247561e+00, 0.1258170079408221e-09, 0.1003117403762534e-08, 0.3192912430464083e-02, 0.6724268977954398e-01, 0.3965875451961033e-01, 0.6803700894781421e-03, 0.8175321232427381e-01, 0.1861147811178956e+00, 0.2864145895504550e-04, 0.2733303872960605e-04, 0.2090791292190505e-01, 0.8566670169360113e-01, 0.6869337119490330e-18, 0.8049901434408684e-18}, + i0: 1, + n0: 9, + pp: 0, + tau: 2.3478378904869292e-003, + sigma: 1.1812469680363236, + i0Out: 1, + n0Out: 9, + ppOut: 0, + tauOut: 2.3478378904869292e-003, + sigmaOut: 1.1812469680363236, + dminOut: -6.4213352031532861e-004, + dmin1Out: -6.4213352031532861e-004, + dmin2Out: 8.4507453921550072e-004, + dnOut: 1.9536180783236046e-002, + dnm1Out: -6.4213352031532861e-004, + dnm2Out: 8.4507453921550072e-004, + }, + { + z: []float64{0.1299730545465525e+01, 0.1297382707575038e+01, 0.6869337119490330e-18, 0.5602483738451179e-18, 0.1058117456648598e+01, 0.1072935957047634e+01, 0.1716633828952343e-01, 0.2001409424891253e-01, 0.1250927309320471e+01, 0.1236091950773070e+01, 0.7526573591998700e-02, 0.5966648869520194e-02, 0.9799049395522637e+00, 0.1059928950127001e+01, 0.8833849733474472e-01, 0.6057909540261972e-01, 0.7268579263515238e+00, 0.6718652894389281e+00, 0.7934296380510910e-02, 0.6331166051893822e-02, 0.5361144212346215e+00, 0.5274354174180577e+00, 0.1258170079408221e-09, 0.7616528495272331e-12, 0.3192912430464083e-02, 0.4050382905882583e-01, 0.3965875451961033e-01, 0.8004750795410222e-01, 0.8175321232427381e-01, -0.6134920613602832e-03, 0.2864145895504550e-04, -0.9761057518179271e-03, 0.2090791292190505e-01, 0.1953618078323605e-01, 0.6869337119490330e-18, 0.5602483738451179e-18}, + i0: 1, + n0: 9, + pp: 0, + tau: 5.8695947262173229e-004, + sigma: 1.1812469680363236, + i0Out: 1, + n0Out: 9, + ppOut: 0, + tauOut: 5.8695947262173229e-004, + sigmaOut: 1.1812469680363236, + dminOut: 2.6059529570832572e-003, + dmin1Out: 2.6059529570832572e-003, + dmin2Out: 2.6059529570832572e-003, + dnOut: 2.0187357299561493e-002, + dnm1Out: 4.4537716273149721e-003, + dnm2Out: 2.6059529570832572e-003, + }, + { + z: []float64{0.1299730545465525e+01, 0.1299143585992903e+01, 0.6869337119490330e-18, 0.5594890049187079e-18, 0.1058117456648598e+01, 0.1074696835465499e+01, 0.1716633828952343e-01, 0.1998130138542489e-01, 0.1250927309320471e+01, 0.1237885622054423e+01, 0.7526573591998700e-02, 0.5958003315736791e-02, 0.9799049395522637e+00, 0.1061698474098650e+01, 0.8833849733474472e-01, 0.6047812873071532e-01, 0.7268579263515238e+00, 0.6737271345286977e+00, 0.7934296380510910e-02, 0.6313669873066943e-02, 0.5361144212346215e+00, 0.5292137920147498e+00, 0.1258170079408221e-09, 0.7590933847144573e-12, 0.3192912430464083e-02, 0.4226470747669359e-01, 0.3965875451961033e-01, 0.7671248122433710e-01, 0.8175321232427381e-01, 0.4482413086270017e-02, 0.2864145895504550e-04, 0.1335961497218277e-03, 0.2090791292190505e-01, 0.2018735729956149e-01, 0.6869337119490330e-18, 0.5594890049187079e-18, 0.1094206688544886e+01}, + i0: 1, + n0: 9, + pp: 1, + tau: 4.9324499328963489e-004, + sigma: 1.1818339275089453, + i0Out: 1, + n0Out: 9, + ppOut: 1, + tauOut: 4.9324499328963489e-004, + sigmaOut: 1.1818339275089453, + dminOut: 1.0870277779300500e-003, + dmin1Out: 1.0870277779300500e-003, + dmin2Out: 4.1771462483342674e-002, + dnOut: 1.7484624889890960e-002, + dnm1Out: 1.0870277779300500e-003, + dnm2Out: 4.1771462483342674e-002, + }, + { + z: []float64{0.1298650340999613e+01, 0.1299143585992903e+01, 0.4630045856693428e-18, 0.5594890049187079e-18, 0.1094184891857634e+01, 0.1074696835465499e+01, 0.2260547177996662e-01, 0.1998130138542489e-01, 0.1220744908596903e+01, 0.1237885622054423e+01, 0.5181756634367578e-02, 0.5958003315736791e-02, 0.1116501601201708e+01, 0.1061698474098650e+01, 0.3649413160495897e-01, 0.6047812873071532e-01, 0.6430534278035160e+00, 0.6737271345286977e+00, 0.5195962000339362e-02, 0.6313669873066943e-02, 0.5235245850218799e+00, 0.5292137920147498e+00, 0.6128243213469890e-13, 0.7590933847144573e-12, 0.1184839437076798e+00, 0.4226470747669359e-01, 0.2902140315050332e-02, 0.7671248122433710e-01, 0.1220623927651878e-02, 0.4482413086270017e-02, 0.2209487416380896e-02, 0.1335961497218277e-03, 0.1748462488989096e-01, 0.2018735729956149e-01, 0.4630045856693428e-18, 0.5594890049187079e-18}, + i0: 1, + n0: 9, + pp: 0, + tau: 8.6238530354903250e-004, + sigma: 1.1823271725022351, + i0Out: 1, + n0Out: 9, + ppOut: 0, + tauOut: 8.6238530354903250e-004, + sigmaOut: 1.1823271725022351, + dminOut: 3.2884671240747138e-004, + dmin1Out: 3.2884671240747138e-004, + dmin2Out: 0.11762155840411674, + dnOut: 1.4027859154437344e-003, + dnm1Out: 3.2884671240747138e-004, + dnm2Out: 0.11762155840411674, + }, + { + z: []float64{0.1298650340999613e+01, 0.1297787955696064e+01, 0.4630045856693428e-18, 0.3903662538064461e-18, 0.1094184891857634e+01, 0.1115927978334052e+01, 0.2260547177996662e-01, 0.2472875948770642e-01, 0.1220744908596903e+01, 0.1200335520440015e+01, 0.5181756634367578e-02, 0.4819852017032840e-02, 0.1116501601201708e+01, 0.1147313495486085e+01, 0.3649413160495897e-01, 0.2045445862496274e-01, 0.6430534278035160e+00, 0.6269325458753436e+00, 0.5195962000339362e-02, 0.4338925882718484e-02, 0.5235245850218799e+00, 0.5183232738356738e+00, 0.6128243213469890e-13, 0.1400860159256393e-13, 0.1184839437076798e+00, 0.1205236987191671e+00, 0.2902140315050332e-02, 0.2939191169537379e-04, 0.1220623927651878e-02, 0.2538334128788367e-02, 0.2209487416380896e-02, 0.1521945367089819e-01, 0.1748462488989096e-01, 0.1402785915443734e-02, 0.4630045856693428e-18, 0.3903662538064461e-18, 0.1094206688544886e+01}, + i0: 1, + n0: 9, + pp: 1, + tau: 1.8442717265434725e-004, + sigma: 1.1831895578057841, + i0Out: 1, + n0Out: 9, + ppOut: 1, + tauOut: 1.8442717265434725e-004, + sigmaOut: 1.1831895578057841, + dminOut: 3.4295817345512611e-006, + dmin1Out: 2.3532871395603098e-003, + dmin2Out: 0.12033927154650945, + dnOut: 3.4295817345512611e-006, + dnm1Out: 2.3532871395603098e-003, + dnm2Out: 0.12033927154650945, + }, + { + z: []float64{0.1297603528523410e+01, 0.1297787955696064e+01, 0.3357116521683426e-18, 0.3903662538064461e-18, 0.1140472310649104e+01, 0.1115927978334052e+01, 0.2602676813137004e-01, 0.2472875948770642e-01, 0.1178944177153024e+01, 0.1200335520440015e+01, 0.4690536984322235e-02, 0.4819852017032840e-02, 0.1162892989954071e+01, 0.1147313495486085e+01, 0.1102729651913737e-01, 0.2045445862496274e-01, 0.6200597480662705e+00, 0.6269325458753436e+00, 0.3627015421456806e-02, 0.4338925882718484e-02, 0.5145118312415766e+00, 0.5183232738356738e+00, 0.3281495925457712e-14, 0.1400860159256393e-13, 0.1203686634582048e+00, 0.1205236987191671e+00, 0.6198165737098726e-06, 0.2939191169537379e-04, 0.1757274081045850e-01, 0.2538334128788367e-02, 0.1214929161054836e-02, 0.1521945367089819e-01, 0.3429581734551261e-05, 0.1402785915443734e-02, 0.3357116521683426e-18, 0.3903662538064461e-18}, + i0: 1, + n0: 9, + pp: 0, + tau: 3.2077610710809750e-006, + sigma: 1.1833739849784384, + i0Out: 1, + n0Out: 9, + ppOut: 0, + tauOut: 3.2077610710809750e-006, + sigmaOut: 1.1833739849784384, + dminOut: 3.3505760549073344e-012, + dmin1Out: 1.7569442559804263e-002, + dmin2Out: 0.12036545569713296, + dnOut: 3.3505760549073344e-012, + dnm1Out: 1.7569442559804263e-002, + dnm2Out: 0.12036545569713296, + }, + { + z: []float64{0.1297603528523410e+01, 0.1297600320762339e+01, 0.3357116521683426e-18, 0.2950599175525191e-18, 0.1140472310649104e+01, 0.1166495871019403e+01, 0.2602676813137004e-01, 0.2630451380147252e-01, 0.1178944177153024e+01, 0.1157326992574803e+01, 0.4690536984322235e-02, 0.4713095445958055e-02, 0.1162892989954071e+01, 0.1169203983266179e+01, 0.1102729651913737e-01, 0.5848066547299594e-02, 0.6200597480662705e+00, 0.6178354891793567e+00, 0.3627015421456806e-02, 0.3020451850239120e-02, 0.5145118312415766e+00, 0.5114881716302697e+00, 0.3281495925457712e-14, 0.7722354114894537e-15, 0.1203686634582048e+00, 0.1203660755137067e+00, 0.6198165737098726e-06, 0.9048958315991394e-07, 0.1757274081045850e-01, 0.1878437172085910e-01, 0.1214929161054836e-02, 0.2218173128942313e-06, 0.3429581734551261e-05, 0.3350576054907334e-11, 0.3357116521683426e-18, 0.2950599175525191e-18, 0.1094206688544886e+01}, + i0: 1, + n0: 9, + pp: 1, + tau: 3.3505364896797715e-012, + sigma: 1.1833771927395094, + i0Out: 1, + n0Out: 9, + ppOut: 1, + tauOut: 3.3505364896797715e-012, + sigmaOut: 1.1833771927395094, + dminOut: 2.0335799151090034e-023, + dmin1Out: 1.8784357595683275e-002, + dmin2Out: 0.12036607551035594, + dnOut: 2.0335799151090034e-023, + dnm1Out: 1.8784357595683275e-002, + dnm2Out: 0.12036607551035594, + }, + { + z: []float64{0.1297600320758988e+01, 0.1297600320762339e+01, 0.2652482201353177e-18, 0.2950599175525191e-18, 0.1192800384817525e+01, 0.1166495871019403e+01, 0.2552222839336001e-01, 0.2630451380147252e-01, 0.1136517859624050e+01, 0.1157326992574803e+01, 0.4848643531876122e-02, 0.4713095445958055e-02, 0.1170203406278252e+01, 0.1169203983266179e+01, 0.3087619670750762e-02, 0.5848066547299594e-02, 0.6177683213554945e+00, 0.6178354891793567e+00, 0.2500816796475141e-02, 0.3020451850239120e-02, 0.5089873548304448e+00, 0.5114881716302697e+00, 0.1826193617023393e-15, 0.7722354114894537e-15, 0.1203661659999391e+00, 0.1203660755137067e+00, 0.1412182528886294e-07, 0.9048958315991394e-07, 0.1878457941299617e-01, 0.1878437172085910e-01, 0.3956520722700361e-16, 0.2218173128942313e-06, 0.2033579915109003e-22, 0.3350576054907334e-11, 0.2652482201353177e-18, 0.2950599175525191e-18}, + i0: 1, + n0: 9, + pp: 0, + tau: 2.0335799151089990e-023, + sigma: 1.1833771927428600, + i0Out: 1, + n0Out: 9, + ppOut: 0, + tauOut: 0.0000000000000000, + sigmaOut: 1.1833771927428600, + dminOut: 2.0335799151089993e-023, + dmin1Out: 1.8784577209116738e-002, + dmin2Out: 0.12036616599993906, + dnOut: 2.0335799151089993e-023, + dnm1Out: 1.8784577209116738e-002, + dnm2Out: 0.12036616599993906, + }, + { + z: []float64{0.1297600320758988e+01, 0.1297600320758988e+01, 0.2652482201353177e-18, 0.2438256017573345e-18, 0.1192800384817525e+01, 0.1218322613210885e+01, 0.2552222839336001e-01, 0.2380852827644004e-01, 0.1136517859624050e+01, 0.1117557974879486e+01, 0.4848643531876122e-02, 0.5077051306839188e-02, 0.1170203406278252e+01, 0.1168213974642164e+01, 0.3087619670750762e-02, 0.1632777609571200e-02, 0.6177683213554945e+00, 0.6186363605423986e+00, 0.2500816796475141e-02, 0.2057564358223963e-02, 0.5089873548304448e+00, 0.5069297904722210e+00, 0.1826193617023393e-15, 0.4336141378669131e-16, 0.1203661659999391e+00, 0.1203661801217644e+00, 0.1412182528886294e-07, 0.2203879431304952e-08, 0.1878457941299617e-01, 0.1878457720911678e-01, 0.3956520722700361e-16, 0.4283248425464200e-37, 0.1183377192742860e+01}, + i0: 1, + n0: 8, + pp: 1, + tau: 1.8784576682472597e-002, + sigma: 1.1833771927428600, + i0Out: 1, + n0Out: 8, + ppOut: 1, + tauOut: 1.8784576682472597e-002, + sigmaOut: 1.1833771927428600, + dminOut: 1.1910047947871760e-010, + dmin1Out: 0.10158160343929173, + dmin2Out: 0.48640978493259379, + dnOut: 1.1910047947871760e-010, + dnm1Out: 0.10158160343929173, + dnm2Out: 0.48640978493259379, + }, + { + z: []float64{0.1278815744076516e+01, 0.1297600320758988e+01, 0.2322916695987584e-18, 0.2438256017573345e-18, 0.1223346564804853e+01, 0.1218322613210885e+01, 0.2174969171530204e-01, 0.2380852827644004e-01, 0.1082100757788551e+01, 0.1117557974879486e+01, 0.5481081353963683e-02, 0.5077051306839188e-02, 0.1145581094215299e+01, 0.1168213974642164e+01, 0.8817320773368219e-03, 0.1632777609571200e-02, 0.6010276161408131e+00, 0.6186363605423986e+00, 0.1735428857154564e-02, 0.2057564358223963e-02, 0.4864097849325938e+00, 0.5069297904722210e+00, 0.1073014545319338e-16, 0.4336141378669131e-16, 0.1015816056431712e+00, 0.1203661801217644e+00, 0.4075436992240071e-09, 0.2203879431304952e-08, 0.1191004794787176e-09, 0.1878457720911678e-01, 0.2322916695987584e-18, 0.2438256017573345e-18}, + i0: 1, + n0: 8, + pp: 0, + tau: 1.1910047900088848e-010, + sigma: 1.2021617694253326, + i0Out: 1, + n0Out: 8, + ppOut: 0, + tauOut: 1.1910047900088848e-010, + sigmaOut: 1.2021617694253326, + dminOut: 0.0000000000000000, + dmin1Out: 0.10158160552407068, + dmin2Out: 0.48500827096381138, + dnOut: 0.0000000000000000, + dnm1Out: 0.10158160552407068, + dnm2Out: 0.48500827096381138, + }, + { + z: []float64{0.1278815744076516e+01, 0.1278815743957415e+01, 0.2322916695987584e-18, 0.2222159192042978e-18, 0.1223346564804853e+01, 0.1245096256401054e+01, 0.2174969171530204e-01, 0.1890244048666934e-01, 0.1082100757788551e+01, 0.1068679398536745e+01, 0.5481081353963683e-02, 0.5875497537946494e-02, 0.1145581094215299e+01, 0.1140587328635589e+01, 0.8817320773368219e-03, 0.4646249482278370e-03, 0.6010276161408131e+00, 0.6022984199306394e+00, 0.1735428857154564e-02, 0.1401513849681988e-02, 0.4864097849325938e+00, 0.4850082709638114e+00, 0.1073014545319338e-16, 0.2247354260070927e-17, 0.1015816056431712e+00, 0.1015816059316144e+00, 0.4075436992240071e-09, 0.4778291260603437e-18, 0.1191004794787176e-09, 0.0000000000000000e+00, 0.2322916695987584e-18, 0.2222159192042978e-18, 0.1183377192742860e+01}, + i0: 1, + n0: 8, + pp: 1, + tau: -0.0000000000000000, + sigma: 1.2021617695444331, + i0Out: 1, + n0Out: 8, + ppOut: 1, + tauOut: 0.0000000000000000, + sigmaOut: 1.2021617695444331, + dminOut: 0.0000000000000000, + dmin1Out: 0.10158160593161437, + dmin2Out: 0.48388184445926297, + dnOut: 0.0000000000000000, + dnm1Out: 0.10158160593161437, + dnm2Out: 0.48388184445926297, + }, + { + z: []float64{0.1278815743957415e+01, 0.1278815743957415e+01, 0.2163565864913247e-18, 0.2222159192042978e-18, 0.1263998696887723e+01, 0.1245096256401054e+01, 0.1598154237018549e-01, 0.1890244048666934e-01, 0.1058573353704506e+01, 0.1068679398536745e+01, 0.6330707284251229e-02, 0.5875497537946494e-02, 0.1134721246299565e+01, 0.1140587328635589e+01, 0.2466181655543824e-03, 0.4646249482278370e-03, 0.6034533156147670e+00, 0.6022984199306394e+00, 0.1126426504548419e-02, 0.1401513849681988e-02, 0.4838818444592630e+00, 0.4850082709638114e+00, 0.4717884282068346e-18, 0.2247354260070927e-17, 0.1015816059316144e+00, 0.1015816059316144e+00, 0.0000000000000000e+00, 0.4778291260603437e-18}, + i0: 1, + n0: 7, + pp: 0, + tau: -0.0000000000000000, + sigma: 1.2021617695444331, + i0Out: 1, + n0Out: 7, + ppOut: 0, + tauOut: 0.0000000000000000, + sigmaOut: 1.2021617695444331, + dminOut: 0.10158160593161437, + dmin1Out: 0.48298010023310722, + dmin2Out: 0.60332139679048247, + dnOut: 0.10158160593161437, + dnm1Out: 0.48298010023310722, + dnm2Out: 0.60332139679048247, + }, + { + z: []float64{0.1278815743957415e+01, 0.1278815743957415e+01, 0.2163565864913247e-18, 0.2138497627045302e-18, 0.1263998696887723e+01, 0.1279980239257909e+01, 0.1598154237018549e-01, 0.1321710631563048e-01, 0.1058573353704506e+01, 0.1051686954673127e+01, 0.6330707284251229e-02, 0.6830538334266978e-02, 0.1134721246299565e+01, 0.1128137326130852e+01, 0.2466181655543824e-03, 0.1319188242844840e-03, 0.6034533156147670e+00, 0.6044478232950309e+00, 0.1126426504548419e-02, 0.9017442261557238e-03, 0.4838818444592630e+00, 0.4829801002331072e+00, 0.4717884282068346e-18, 0.9922774494036444e-19, 0.1015816059316144e+00, 0.1015816059316144e+00, 0.0000000000000000e+00, 0.2138497627045302e-18, 0.1202161769544433e+01}, + i0: 1, + n0: 7, + pp: 1, + tau: 0.10158160593161437, + sigma: 1.2021617695444331, + i0Out: 1, + n0Out: 7, + ppOut: 1, + tauOut: 0.10158160593161437, + sigmaOut: 1.2021617695444331, + dminOut: 0.0000000000000000, + dmin1Out: 0.38053382595775820, + dmin2Out: 0.50278793042091730, + dnOut: 0.0000000000000000, + dnm1Out: 0.38053382595775820, + dnm2Out: 0.50278793042091730, + }, + { + z: []float64{0.1177234138025801e+01, 0.1278815743957415e+01, 0.2325140442247288e-18, 0.2138497627045302e-18, 0.1191615739641925e+01, 0.1279980239257909e+01, 0.1166505092896250e-01, 0.1321710631563048e-01, 0.9452708361468171e+00, 0.1051686954673127e+01, 0.8151933771558135e-02, 0.6830538334266978e-02, 0.1018535705251965e+01, 0.1128137326130852e+01, 0.7828694249915420e-04, 0.1319188242844840e-03, 0.5036896746470730e+00, 0.6044478232950309e+00, 0.8646683437346259e-03, 0.9017442261557238e-03, 0.3805338259577582e+00, 0.4829801002331072e+00, 0.2648835135390502e-19, 0.9922774494036444e-19, 0.0000000000000000e+00, 0.1015816059316144e+00, 0.2325140442247288e-18, 0.2138497627045302e-18}, + i0: 1, + n0: 7, + pp: 0, + tau: -0.0000000000000000, + sigma: 1.3037433754760475, + i0Out: 1, + n0Out: 7, + ppOut: 0, + tauOut: 0.0000000000000000, + sigmaOut: 1.3037433754760475, + dminOut: 0.0000000000000000, + dmin1Out: 0.37988164443454930, + dmin2Out: 0.50365062581387521, + dnOut: 0.0000000000000000, + dnm1Out: 0.37988164443454930, + dnm2Out: 0.50365062581387521, + }, + { + z: []float64{0.1177234138025801e+01, 0.1177234138025801e+01, 0.2325140442247288e-18, 0.2353545363971710e-18, 0.1191615739641925e+01, 0.1203280790570888e+01, 0.1166505092896250e-01, 0.9163806595868684e-02, 0.9452708361468171e+00, 0.9442589633225065e+00, 0.8151933771558135e-02, 0.8793176380307672e-02, 0.1018535705251965e+01, 0.1009820815814156e+01, 0.7828694249915420e-04, 0.3904883319791864e-04, 0.5036896746470730e+00, 0.5045152941576099e+00, 0.8646683437346259e-03, 0.6521815232088979e-03, 0.3805338259577582e+00, 0.3798816444345493e+00, 0.2648835135390502e-19, 0.0000000000000000e+00, 0.1303743375476047e+01}, + i0: 1, + n0: 6, + pp: 1, + tau: -0.0000000000000000, + sigma: 1.3037433754760475, + i0Out: 1, + n0Out: 6, + ppOut: 1, + tauOut: 0.0000000000000000, + sigmaOut: 1.3037433754760475, + dminOut: 0.37939119035907048, + dmin1Out: 0.50449560273072791, + dmin2Out: 0.93712213699945368, + dnOut: 0.37939119035907048, + dnm1Out: 0.50449560273072791, + dnm2Out: 1.0004335774214996, + }, + { + z: []float64{0.1177234138025801e+01, 0.1177234138025801e+01, 0.2405618249359890e-18, 0.2353545363971710e-18, 0.1212444597166756e+01, 0.1203280790570888e+01, 0.7136826323052847e-02, 0.9163806595868684e-02, 0.9459153133797613e+00, 0.9442589633225065e+00, 0.9387238392656353e-02, 0.8793176380307672e-02, 0.1000472626254697e+01, 0.1009820815814156e+01, 0.1969142688202247e-04, 0.3904883319791864e-04, 0.5051477842539368e+00, 0.5045152941576099e+00, 0.4904540754787929e-03, 0.6521815232088979e-03, 0.3793911903590705e+00, 0.3798816444345493e+00, 0.2405618249359890e-18, 0.2353545363971710e-18}, + i0: 1, + n0: 6, + pp: 0, + tau: 0.37791671367087804, + sigma: 1.3037433754760475, + i0Out: 1, + n0Out: 6, + ppOut: 0, + tauOut: 0.37791671367087804, + sigmaOut: 1.3037433754760475, + dminOut: 1.7417124602014944e-005, + dmin1Out: 0.12721465844156904, + dmin2Out: 0.55997778901175921, + dnOut: 1.7417124602014944e-005, + dnm1Out: 0.12721465844156904, + dnm2Out: 0.60606091447901145, + }, + { + z: []float64{0.1177234138025801e+01, 0.7993174243549228e+00, 0.2405618249359890e-18, 0.3648961927279404e-18, 0.1212444597166756e+01, 0.8416647098189309e+00, 0.7136826323052847e-02, 0.8020810697124018e-02, 0.9459153133797613e+00, 0.5693650274044155e+00, 0.9387238392656353e-02, 0.1649499810480807e-01, 0.1000472626254697e+01, 0.6060806059058935e+00, 0.1969142688202247e-04, 0.1641214148963635e-04, 0.5051477842539368e+00, 0.1277051125170478e+00, 0.4904540754787929e-03, 0.1457059563590431e-02, 0.3793911903590705e+00, 0.1741712460201494e-04, 0.2405618249359890e-18, 0.3648961927279404e-18, 0.1303743375476047e+01}, + i0: 1, + n0: 6, + pp: 1, + tau: 1.7220607103034587e-005, + sigma: 1.6816600891469256, + i0Out: 1, + n0Out: 6, + ppOut: 1, + tauOut: 1.7220607103034587e-005, + sigmaOut: 1.6816600891469256, + dminOut: 5.8496704963934779e-012, + dmin1Out: 0.12768433261716639, + dmin2Out: 0.56397303984076830, + dnOut: 5.8496704963934779e-012, + dnm1Out: 0.12768433261716639, + dnm2Out: 0.58884056200405466, + }, + { + z: []float64{0.7993002037478197e+00, 0.7993174243549228e+00, 0.3842364192156412e-18, 0.3648961927279404e-18, 0.8496682999089519e+00, 0.8416647098189309e+00, 0.5374766956544110e-02, 0.8020810697124018e-02, 0.5804680379455763e+00, 0.5693650274044155e+00, 0.1722282329473577e-01, 0.1649499810480807e-01, 0.5888569741455443e+00, 0.6060806059058935e+00, 0.3559292778388127e-05, 0.1641214148963635e-04, 0.1291413921807568e+00, 0.1277051125170478e+00, 0.1965116493098606e-06, 0.1457059563590431e-02, 0.5849670496393478e-11, 0.1741712460201494e-04, 0.3842364192156412e-18, 0.3648961927279404e-18}, + i0: 1, + n0: 6, + pp: 0, + tau: 5.8496615949827599e-012, + sigma: 1.6816773097540285, + i0Out: 1, + n0Out: 6, + ppOut: 0, + tauOut: 5.8496615949827599e-012, + sigmaOut: 1.6816773097540285, + dminOut: 5.2413685591311719e-023, + dmin1Out: 0.12914058828956335, + dmin2Out: 0.57178448001987381, + dnOut: 5.2413685591311719e-023, + dnm1Out: 0.12914058828956335, + dnm2Out: 0.57178448001987381, + }, + { + z: []float64{0.7993002037478197e+00, 0.7993002037419701e+00, 0.3842364192156412e-18, 0.4084491703488284e-18, 0.8496682999089519e+00, 0.8550430668596465e+00, 0.5374766956544110e-02, 0.3648799166500929e-02, 0.5804680379455763e+00, 0.5940420620679615e+00, 0.1722282329473577e-01, 0.1707249411982080e-01, 0.5888569741455443e+00, 0.5717880393126522e+00, 0.3559292778388127e-05, 0.8038853438286423e-06, 0.1291413921807568e+00, 0.1291407848012127e+00, 0.1965116493098606e-06, 0.8901358303923820e-17, 0.5849670496393478e-11, 0.5241368559131172e-22, 0.3842364192156412e-18, 0.4084491703488284e-18, 0.1303743375476047e+01}, + i0: 1, + n0: 6, + pp: 1, + tau: 5.2413685591311714e-023, + sigma: 1.6816773097598783, + i0Out: 1, + n0Out: 6, + ppOut: 1, + tauOut: 0.0000000000000000, + sigmaOut: 1.6816773097598783, + dminOut: 5.2413685591311719e-023, + dmin1Out: 0.12914059800027977, + dmin2Out: 0.55574794205624078, + dnOut: 5.2413685591311719e-023, + dnm1Out: 0.12914059800027977, + dnm2Out: 0.55574794205624078, + }, + { + z: []float64{0.7993002037419701e+00, 0.7993002037419701e+00, 0.4369342452764876e-18, 0.4084491703488284e-18, 0.8586918660261474e+00, 0.8550430668596465e+00, 0.2524235138002424e-02, 0.3648799166500929e-02, 0.6085903210497798e+00, 0.5940420620679615e+00, 0.1604009725641142e-01, 0.1707249411982080e-01, 0.5557487459415846e+00, 0.5717880393126522e+00, 0.1868009328861996e-06, 0.8038853438286423e-06, 0.1291405980002798e+00, 0.1291407848012127e+00, 0.3612752323451872e-38, 0.8901358303923820e-17}, + i0: 1, + n0: 5, + pp: 0, + tau: 0.12914051019182277, + sigma: 1.6816773097598783, + i0Out: 1, + n0Out: 5, + ppOut: 0, + tauOut: 0.12914051019182277, + sigmaOut: 1.6816773097598783, + dminOut: 2.8760335918365243e-008, + dmin1Out: 0.40854091047624980, + dmin2Out: 0.47735135942481804, + dnOut: 2.8760335918365243e-008, + dnm1Out: 0.40854091047624980, + dnm2Out: 0.47735135942481804, + }, + { + z: []float64{0.7993002037419701e+00, 0.6701596935501473e+00, 0.4369342452764876e-18, 0.5598544436768911e-18, 0.8586918660261474e+00, 0.7320755909723271e+00, 0.2524235138002424e-02, 0.2098451433139096e-02, 0.6085903210497798e+00, 0.4933914566812295e+00, 0.1604009725641142e-01, 0.1806732527351204e-01, 0.5557487459415846e+00, 0.4085410972771827e+00, 0.1868009328861996e-06, 0.5904812108429529e-07, 0.1291405980002798e+00, 0.2876033591836524e-07, 0.3612752323451872e-38, 0.5598544436768911e-18, 0.1681677309759878e+01}, + i0: 1, + n0: 5, + pp: 1, + tau: 2.8760330654564486e-008, + sigma: 1.8108178199517009, + i0Out: 1, + n0Out: 5, + ppOut: 1, + tauOut: 2.8760330654564486e-008, + sigmaOut: 1.8108178199517009, + dminOut: 9.5429721930147733e-016, + dmin1Out: 0.39406941605409296, + dmin2Out: 0.49198119275848701, + dnOut: 9.5429721930147733e-016, + dnm1Out: 0.39406941605409296, + dnm2Out: 0.49198119275848701, + }, + { + z: []float64{0.6701596647898166e+00, 0.6701596935501473e+00, 0.6115792910959321e-18, 0.5598544436768911e-18, 0.7341740136451356e+00, 0.7320755909723271e+00, 0.1410235162411720e-02, 0.2098451433139096e-02, 0.5100485180319990e+00, 0.4933914566812295e+00, 0.1447165246275905e-01, 0.1806732527351204e-01, 0.3940694751022140e+00, 0.4085410972771827e+00, 0.4309503539425748e-14, 0.5904812108429529e-07, 0.9542972193014773e-15, 0.2876033591836524e-07, 0.6115792910959321e-18, 0.5598544436768911e-18}, + i0: 1, + n0: 5, + pp: 0, + tau: 9.5429721930146451e-016, + sigma: 1.8108178487120317, + i0Out: 1, + n0Out: 5, + ppOut: 0, + tauOut: 9.5429721930146451e-016, + sigmaOut: 1.8108178487120317, + dminOut: 1.9721522630525295e-030, + dmin1Out: 0.38317668529976806, + dmin2Out: 0.50907067170667131, + dnOut: 1.9721522630525295e-030, + dnm1Out: 0.38317668529976806, + dnm2Out: 0.50907067170667131, + }, + { + z: []float64{0.6701596647898166e+00, 0.6701596647898156e+00, 0.6115792910959321e-18, 0.6699979816704878e-18, 0.7341740136451356e+00, 0.7355842488075462e+00, 0.1410235162411720e-02, 0.9778463253267723e-03, 0.5100485180319990e+00, 0.5235423241694304e+00, 0.1447165246275905e-01, 0.1089278980244500e-01, 0.3940694751022140e+00, 0.3831766852997724e+00, 0.4309503539425748e-14, 0.1073277003016631e-28, 0.1810817848712033e+01}, + i0: 1, + n0: 4, + pp: 1, + tau: 0.31895966905528556, + sigma: 1.8108178487120326, + i0Out: 1, + n0Out: 4, + ppOut: 1, + tauOut: 0.31895966905528556, + sigmaOut: 1.8108178487120326, + dminOut: 4.4735699115826311e-002, + dmin1Out: 0.20335674284478694, + dmin2Out: 0.35119999573453003, + dnOut: 4.4735699115826311e-002, + dnm1Out: 0.20335674284478694, + dnm2Out: 0.41662457975226058, + }, + { + z: []float64{0.3511999957345300e+00, 0.6701596647898156e+00, 0.1403302870260262e-17, 0.6699979816704878e-18, 0.4176024260775874e+00, 0.7355842488075462e+00, 0.1225912269357841e-02, 0.9778463253267723e-03, 0.2142495326472319e+00, 0.5235423241694304e+00, 0.1948131712866055e-01, 0.1089278980244500e-01, 0.4473569911582631e-01, 0.3831766852997724e+00, 0.1403302870260262e-17, 0.6699979816704878e-18}, + i0: 1, + n0: 4, + pp: 0, + tau: 3.9897804510299034e-002, + sigma: 2.1297775177673182, + i0Out: 1, + n0Out: 4, + ppOut: 0, + tauOut: 3.9897804510299034e-002, + sigmaOut: 2.1297775177673182, + dminOut: 3.2556800493375310e-004, + dmin1Out: 0.17365859019389857, + dmin2Out: 0.31130219122423097, + dnOut: 3.2556800493375310e-004, + dnm1Out: 0.17365859019389857, + dnm2Out: 0.37770462156728835, + }, + { + z: []float64{0.3511999957345300e+00, 0.3113021912242310e+00, 0.1403302870260262e-17, 0.1882488140663987e-17, 0.4176024260775874e+00, 0.3789305338366462e+00, 0.1225912269357841e-02, 0.6931379430343076e-03, 0.2142495326472319e+00, 0.1931399073225591e+00, 0.1948131712866055e-01, 0.4512326600593524e-02, 0.4473569911582631e-01, 0.3255680049337531e-03, 0.1403302870260262e-17, 0.1882488140663987e-17, 0.1810817848712033e+01}, + i0: 1, + n0: 4, + pp: 1, + tau: 3.1797943619548145e-004, + sigma: 2.1696753222776173, + i0Out: 1, + n0Out: 4, + ppOut: 1, + tauOut: 3.1797943619548145e-004, + sigmaOut: 2.1696753222776173, + dminOut: 1.3065740500455263e-007, + dmin1Out: 0.19246898670783216, + dmin2Out: 0.31098421178803548, + dnOut: 1.3065740500455263e-007, + dnm1Out: 0.19246898670783216, + dnm2Out: 0.37861255440045072, + }, + { + z: []float64{0.3109842117880355e+00, 0.3113021912242310e+00, 0.2293789231233262e-17, 0.1882488140663987e-17, 0.3793056923434850e+00, 0.3789305338366462e+00, 0.3529411785314453e-03, 0.6931379430343076e-03, 0.1969813133084257e+00, 0.1931399073225591e+00, 0.7457911333267062e-05, 0.4512326600593524e-02, 0.1306574050045526e-06, 0.3255680049337531e-03, 0.2293789231233262e-17, 0.1882488140663987e-17}, + i0: 1, + n0: 4, + pp: 0, + tau: 1.3065240973281283e-007, + sigma: 2.1699933017138124, + i0Out: 1, + n0Out: 4, + ppOut: 0, + tauOut: 1.3065240973281283e-007, + sigmaOut: 2.1699933017138124, + dminOut: 4.4031878668096113e-014, + dmin1Out: 0.19679806330970273, + dmin2Out: 0.31098408113562576, + dnOut: 4.4031878668096113e-014, + dnm1Out: 0.19679806330970273, + dnm2Out: 0.37930556169107532, + }, + { + z: []float64{0.3109842117880355e+00, 0.3109840811356258e+00, 0.2293789231233262e-17, 0.2797722987188916e-17, 0.3793056923434850e+00, 0.3796585028696068e+00, 0.3529411785314453e-03, 0.1831193463132450e-03, 0.1969813133084257e+00, 0.1968055212210360e+00, 0.7457911333267062e-05, 0.4951239861123181e-11, 0.1306574050045526e-06, 0.4403187866809611e-13, 0.2293789231233262e-17, 0.2797722987188916e-17, 0.1810817848712033e+01}, + i0: 1, + n0: 4, + pp: 1, + tau: 4.4031878666982759e-014, + sigma: 2.1699934323662222, + i0Out: 1, + n0Out: 4, + ppOut: 1, + tauOut: 4.4031878666982759e-014, + sigmaOut: 2.1699934323662222, + dminOut: 5.0676424551397798e-027, + dmin1Out: 0.19671064247077341, + dmin2Out: 0.31098408113558174, + dnOut: 5.0676424551397798e-027, + dnm1Out: 0.19671064247077341, + dnm2Out: 0.37965850286956282, + }, + { + z: []float64{0.3109840811355817e+00, 0.3109840811356258e+00, 0.3415542419024794e-17, 0.2797722987188916e-17, 0.3798416222158761e+00, 0.3796585028696068e+00, 0.9487875021856221e-04, 0.1831193463132450e-03, 0.1967106424757246e+00, 0.1968055212210360e+00, 0.1108289770587888e-23, 0.4951239861123181e-11, 0.5067642455139780e-26, 0.4403187866809611e-13, 0.3415542419024794e-17, 0.2797722987188916e-17}, + i0: 1, + n0: 4, + pp: 0, + tau: 5.0676424551397798e-027, + sigma: 2.1699934323662662, + i0Out: 1, + n0Out: 4, + ppOut: 0, + tauOut: 0.0000000000000000, + sigmaOut: 2.1699934323662662, + dminOut: 5.0676424551397798e-027, + dmin1Out: 0.19666151937261311, + dmin2Out: 0.31098408113558174, + dnOut: 5.0676424551397798e-027, + dnm1Out: 0.19666151937261311, + dnm2Out: 0.37984162221587608, + }, + { + z: []float64{0.3109840811355817e+00, 0.3109840811355817e+00, 0.3415542419024794e-17, 0.4171805735046273e-17, 0.3798416222158761e+00, 0.3799365009660947e+00, 0.9487875021856221e-04, 0.4912310311151891e-04, 0.1967106424757246e+00, 0.1966615193726131e+00, 0.1108289770587888e-23, 0.2855879641297252e-49, 0.2169993432366266e+01}, + i0: 1, + n0: 3, + pp: 1, + tau: 9.8330759686306557e-002, + sigma: 2.1699934323662662, + i0Out: 1, + n0Out: 3, + ppOut: 1, + tauOut: 9.8330759686306557e-002, + sigmaOut: 2.1699934323662662, + dminOut: 9.8296460175394978e-002, + dmin1Out: 0.21265332144927518, + dmin2Out: 0.21265332144927518, + dnOut: 9.8296460175394978e-002, + dnm1Out: 0.28160574127978810, + dnm2Out: 0.21265332144927518, + }, + { + z: []float64{0.2126533214492752e+00, 0.3109840811355817e+00, 0.7453545812882342e-17, 0.4171805735046273e-17, 0.2816548643828996e+00, 0.3799365009660947e+00, 0.3429951091160839e-04, 0.4912310311151891e-04, 0.9829646017539498e-01, 0.1966615193726131e+00, 0.3799365009660947e+00, 0.2855879641297252e-49}, + i0: 1, + n0: 3, + pp: 0, + tau: 9.7172513485343004e-002, + sigma: 2.2683241920525727, + i0Out: 1, + n0Out: 3, + ppOut: 0, + tauOut: 9.7172513485343004e-002, + sigmaOut: 2.2683241920525727, + dminOut: 1.1056745151512559e-003, + dmin1Out: 0.11548080796393217, + dmin2Out: 0.11548080796393217, + dnOut: 1.1056745151512559e-003, + dnm1Out: 0.18448235089755655, + dnm2Out: 0.11548080796393217, + }, + { + z: []float64{0.2126533214492752e+00, 0.1154808079639322e+00, 0.7453545812882342e-17, 0.1817901582187390e-16, 0.2816548643828996e+00, 0.1845166504084682e+00, 0.3429951091160839e-04, 0.1827217490071947e-04, 0.9829646017539498e-01, 0.1105674515151256e-02, 0.3799365009660947e+00, 0.2816548643828996e+00, 0.2169993432366266e+01}, + i0: 1, + n0: 3, + pp: 1, + tau: 1.0942861809085330e-003, + sigma: 2.3654967055379159, + i0Out: 1, + n0Out: 3, + ppOut: 1, + tauOut: 1.0942861809085330e-003, + sigmaOut: 2.3654967055379159, + dminOut: 1.1278200095129901e-005, + dmin1Out: 0.11438652178302365, + dmin2Out: 0.11438652178302365, + dnOut: 1.1278200095129901e-005, + dnm1Out: 0.18342236422755959, + dnm2Out: 0.11438652178302365, + }, + { + z: []float64{0.1143865217830237e+00, 0.1154808079639322e+00, 0.2932453102768040e-16, 0.1817901582187390e-16, 0.1834406364024603e+00, 0.1845166504084682e+00, 0.1101341475930520e-06, 0.1827217490071947e-04, 0.1127820009512990e-04, 0.1105674515151256e-02, 0.1845166504084682e+00, 0.2816548643828996e+00}, + i0: 1, + n0: 3, + pp: 0, + tau: 1.1269238360546607e-005, + sigma: 2.3665909917188244, + i0Out: 1, + n0Out: 3, + ppOut: 0, + tauOut: 1.1269238360546607e-005, + sigmaOut: 2.3665909917188244, + dminOut: 8.9549629620034135e-009, + dmin1Out: 0.11437525254466312, + dmin2Out: 0.11437525254466312, + dnOut: 8.9549629620034135e-009, + dnm1Out: 0.18342936716409974, + dnm2Out: 0.11437525254466312, + }, + { + z: []float64{0.1143865217830237e+00, 0.1143752525446631e+00, 0.2932453102768040e-16, 0.4703212027287794e-16, 0.1834406364024603e+00, 0.1834294772982473e+00, 0.1101341475930520e-06, 0.6771621290952006e-11, 0.1127820009512990e-04, 0.8954962962003413e-08, 0.1845166504084682e+00, 0.1834406364024603e+00, 0.2169993432366266e+01}, + i0: 1, + n0: 3, + pp: 1, + tau: 8.9549072084035346e-009, + sigma: 2.3666022609571851, + i0Out: 1, + n0Out: 3, + ppOut: 1, + tauOut: 8.9549072084035346e-009, + sigmaOut: 2.3666022609571851, + dminOut: 5.5753269291151117e-014, + dmin1Out: 0.11437524358975594, + dmin2Out: 0.11437524358975594, + dnOut: 5.5753269291151117e-014, + dnm1Out: 0.18342946834334006, + dnm2Out: 0.11437524358975594, + }, + { + z: []float64{0.1143752435897560e+00, 0.1143752525446631e+00, 0.7542783706608855e-16, 0.4703212027287794e-16, 0.1834294683501117e+00, 0.1834294772982473e+00, 0.3305882004599510e-18, 0.6771621290952006e-11, 0.5575326929115112e-13, 0.8954962962003413e-08, 0.1834294772982473e+00, 0.1834406364024603e+00}, + i0: 1, + n0: 3, + pp: 0, + tau: 5.5753269214454873e-014, + sigma: 2.3666022699120921, + i0Out: 1, + n0Out: 3, + ppOut: 0, + tauOut: 5.5753269214454873e-014, + sigmaOut: 2.3666022699120921, + dminOut: 7.6696244203643861e-023, + dmin1Out: 0.11437524358970023, + dmin2Out: 0.11437524358970023, + dnOut: 7.6696244203643861e-023, + dnm1Out: 0.18342946835005580, + dnm2Out: 0.11437524358970023, + }, + { + z: []float64{0.1648283185136998e+01, 0.1396221235720571e+01, 0.1712714336271993e+00, 0.2520619494164272e+00, 0.1510753432847732e+01, 0.1119977945086946e+01, 0.3792800633372563e+00, 0.5620469213879850e+00, 0.1328441621586708e+01, 0.1019485448443405e+01, 0.1388998759717073e+00, 0.6882362364805590e+00, 0.4466938077796418e+00, 0.2681061628164644e+00, 0.7093206047768255e+00, 0.3174875209348847e+00, 0.6843112870203156e+00, 0.9979892152967577e+00, 0.6490582981441884e+00, 0.3956426765003833e+00, 0.5085572738629487e+00, 0.1122623886995757e+01, 0.9750235054014829e-02, 0.3499168501137979e-01, 0.1364886053450573e+00, 0.1417066070690837e+00, 0.2301225778544498e-01, 0.4532233329988395e-02, 0.1646009972289452e+01, 0.6930161671496210e+00, 0.2362515608142310e+00, 0.9760060629252760e+00, 0.5818602562677768e+00, 0.3984323866837953e+00, 0.1797665269485310e-01, 0.4196794303982125e+00, 0.5600419521166516e+00, 0.2492354636952108e-01, 0.2195137569256029e+00, 0.5530950584419837e+00, 0.4184071984843414e+00, 0.2222708575473020e+00, 0.2727864547293006e+00, 0.4156500978626423e+00, 0.6774373914466536e-01, 0.2745959086613283e+00, 0.1050967099374242e+00, 0.6593428521263771e-01, 0.2040338718098096e+00, 0.1079809097801335e+00, 0.1271971985482246e+00, 0.2011496719671002e+00, 0.4444741998443960e-01, 0.1290210252363728e+00, 0.5776327498150620e+00, 0.4262359329629137e-01, 0.3402556968467140e+00, 0.6023491555328507e+00, 0.1086565805630269e+00, 0.3155392911289253e+00, 0.9382999256694983e+00, 0.1171677238805356e+00, 0.7901625299559836e+00, 0.9297887823519896e+00, 0.6095853796269167e+00, 0.7973955560628040e+00, 0.8018125008387630e+00, 0.6023523535200964e+00, 0.2443177602187348e-01, 0.8114406374558937e+00, 0.2277830378453201e+00, 0.1480363940474286e-01, 0.6969081780841352e+00, 0.3759308106650992e+00, 0.8012406122589412e-01, 0.5487604052643561e+00, 0.9781022865072954e-01, 0.1017549972519246e+00, 0.2961248981181939e-03, 0.7617929262469909e-01, 0.8408398800007548e-04, 0.3802088861182694e-03, 0.0000000000000000e+00, 0.0000000000000000e+00}, + i0: 1, + n0: 21, + pp: 0, + tau: 0.0000000000000000, + sigma: 0.0000000000000000, + i0Out: 1, + n0Out: 21, + ppOut: 0, + tauOut: 0.0000000000000000, + sigmaOut: 0.0000000000000000, + dminOut: 7.8766827084626452e-005, + dmin1Out: 1.2363512593342330e-003, + dmin2Out: 1.2363512593342330e-003, + dnOut: 7.8766827084626452e-005, + dnm1Out: 4.3867054272813191e-003, + dnm2Out: 3.7622286348031123e-003, + }, + } { + // TODO(btracey): Check other outputs + i0 := test.i0 - 1 + n0 := test.n0 - 1 + z := make([]float64, len(test.z)) + copy(z, test.z) + + i0Out, n0Out, ppOut, tauOut, sigmaOut, dminOut, dmin1Out, dmin2Out, dnOut, dnm1Out, dnm2Out := impl.Dlasq5(i0, n0, z, test.pp, test.tau, test.sigma) + + if i0Out != test.i0Out-1 { + t.Errorf("Wrong i0. Want %v, got %v", test.n0Out, n0Out) + } + if n0Out != test.n0Out-1 { + t.Errorf("Wrong n0. Want %v, got %v", test.n0Out, n0Out) + } + if ppOut != test.ppOut { + t.Errorf("Wrong pp. Want %v, got %v", test.ppOut, ppOut) + } + if !floats.EqualWithinAbsOrRel(tauOut, test.tauOut, dTol, dTol) { + t.Errorf("Wrong tau. Want %v, got %v", test.tauOut, tauOut) + } + if !floats.EqualWithinAbsOrRel(sigmaOut, test.sigmaOut, dTol, dTol) { + t.Errorf("Wrong tau. Want %v, got %v", test.sigmaOut, sigmaOut) + } + + if !floats.EqualWithinAbsOrRel(dminOut, test.dminOut, dTol, dTol) { + t.Errorf("Wrong dmin. Want %v, got %v", test.dminOut, dminOut) + } + if !floats.EqualWithinAbsOrRel(dmin1Out, test.dmin1Out, dTol, dTol) { + t.Errorf("Wrong dmin1. Want %v, got %v", test.dmin1Out, dmin1Out) + } + if !floats.EqualWithinAbsOrRel(dmin2Out, test.dmin2Out, dTol, dTol) { + t.Errorf("Wrong dmin2. Want %v, got %v", test.dmin2Out, dmin2Out) + } + if !floats.EqualWithinAbsOrRel(dnOut, test.dnOut, dTol, dTol) { + t.Errorf("Wrong dn. Want %v, got %v", test.dnOut, dnOut) + } + if !floats.EqualWithinAbsOrRel(dnm1Out, test.dnm1Out, dTol, dTol) { + t.Errorf("Wrong dnm1. Want %v, got %v", test.dnm1Out, dnm1Out) + } + if !floats.EqualWithinAbsOrRel(dnm2Out, test.dnm2Out, dTol, dTol) { + t.Errorf("Wrong dnm2. Want %v, got %v", test.dnm2Out, dnm2Out) + } + } +} diff --git a/vendor/gonum.org/v1/gonum/lapack/testlapack/dlasr.go b/vendor/gonum.org/v1/gonum/lapack/testlapack/dlasr.go new file mode 100644 index 0000000..fa4fce0 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/lapack/testlapack/dlasr.go @@ -0,0 +1,168 @@ +// Copyright ©2015 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package testlapack + +import ( + "math" + "testing" + + "golang.org/x/exp/rand" + + "gonum.org/v1/gonum/blas" + "gonum.org/v1/gonum/blas/blas64" + "gonum.org/v1/gonum/floats" + "gonum.org/v1/gonum/lapack" +) + +type Dlasrer interface { + Dlasr(side blas.Side, pivot lapack.Pivot, direct lapack.Direct, m, n int, c, s, a []float64, lda int) +} + +func DlasrTest(t *testing.T, impl Dlasrer) { + rnd := rand.New(rand.NewSource(1)) + for _, side := range []blas.Side{blas.Left, blas.Right} { + for _, pivot := range []lapack.Pivot{lapack.Variable, lapack.Top, lapack.Bottom} { + for _, direct := range []lapack.Direct{lapack.Forward, lapack.Backward} { + for _, test := range []struct { + m, n, lda int + }{ + {5, 5, 0}, + {5, 10, 0}, + {10, 5, 0}, + + {5, 5, 20}, + {5, 10, 20}, + {10, 5, 20}, + } { + m := test.m + n := test.n + lda := test.lda + if lda == 0 { + lda = n + } + // Allocate n×n matrix A and fill it with random numbers. + a := make([]float64, m*lda) + for i := range a { + a[i] = rnd.Float64() + } + + // Allocate slices for implicitly + // represented rotation matrices. + var s, c []float64 + if side == blas.Left { + s = make([]float64, m-1) + c = make([]float64, m-1) + } else { + s = make([]float64, n-1) + c = make([]float64, n-1) + } + for k := range s { + // Generate a random number in [0,2*pi). + theta := rnd.Float64() * 2 * math.Pi + s[k] = math.Sin(theta) + c[k] = math.Cos(theta) + } + aCopy := make([]float64, len(a)) + copy(a, aCopy) + + // Apply plane a sequence of plane + // rotation in s and c to the matrix A. + impl.Dlasr(side, pivot, direct, m, n, c, s, a, lda) + + // Compute a reference solution by multiplying A + // by explicitly formed rotation matrix P. + pSize := m + if side == blas.Right { + pSize = n + } + // Allocate matrix P. + p := blas64.General{ + Rows: pSize, + Cols: pSize, + Stride: pSize, + Data: make([]float64, pSize*pSize), + } + // Allocate matrix P_k. + pk := blas64.General{ + Rows: pSize, + Cols: pSize, + Stride: pSize, + Data: make([]float64, pSize*pSize), + } + ptmp := blas64.General{ + Rows: pSize, + Cols: pSize, + Stride: pSize, + Data: make([]float64, pSize*pSize), + } + // Initialize P to the identity matrix. + for i := 0; i < pSize; i++ { + p.Data[i*p.Stride+i] = 1 + ptmp.Data[i*p.Stride+i] = 1 + } + // Iterate over the sequence of plane rotations. + for k := range s { + // Set P_k to the identity matrix. + for i := range p.Data { + pk.Data[i] = 0 + } + for i := 0; i < pSize; i++ { + pk.Data[i*p.Stride+i] = 1 + } + // Set the corresponding elements of P_k. + switch pivot { + case lapack.Variable: + pk.Data[k*p.Stride+k] = c[k] + pk.Data[k*p.Stride+k+1] = s[k] + pk.Data[(k+1)*p.Stride+k] = -s[k] + pk.Data[(k+1)*p.Stride+k+1] = c[k] + case lapack.Top: + pk.Data[0] = c[k] + pk.Data[k+1] = s[k] + pk.Data[(k+1)*p.Stride] = -s[k] + pk.Data[(k+1)*p.Stride+k+1] = c[k] + case lapack.Bottom: + pk.Data[(pSize-1-k)*p.Stride+pSize-k-1] = c[k] + pk.Data[(pSize-1-k)*p.Stride+pSize-1] = s[k] + pk.Data[(pSize-1)*p.Stride+pSize-1-k] = -s[k] + pk.Data[(pSize-1)*p.Stride+pSize-1] = c[k] + } + // Compute P <- P_k * P or P <- P * P_k. + if direct == lapack.Forward { + blas64.Gemm(blas.NoTrans, blas.NoTrans, 1, pk, ptmp, 0, p) + } else { + blas64.Gemm(blas.NoTrans, blas.NoTrans, 1, ptmp, pk, 0, p) + } + copy(ptmp.Data, p.Data) + } + + aMat := blas64.General{ + Rows: m, + Cols: n, + Stride: lda, + Data: make([]float64, m*lda), + } + copy(a, aCopy) + newA := blas64.General{ + Rows: m, + Cols: n, + Stride: lda, + Data: make([]float64, m*lda), + } + // Compute P * A or A * P. + if side == blas.Left { + blas64.Gemm(blas.NoTrans, blas.NoTrans, 1, p, aMat, 0, newA) + } else { + blas64.Gemm(blas.NoTrans, blas.NoTrans, 1, aMat, p, 0, newA) + } + // Compare the result from Dlasr with the reference solution. + if !floats.EqualApprox(newA.Data, a, 1e-12) { + t.Errorf("A update mismatch") + } + } + } + } + } +} diff --git a/vendor/gonum.org/v1/gonum/lapack/testlapack/dlasrt.go b/vendor/gonum.org/v1/gonum/lapack/testlapack/dlasrt.go new file mode 100644 index 0000000..9fb5f9c --- /dev/null +++ b/vendor/gonum.org/v1/gonum/lapack/testlapack/dlasrt.go @@ -0,0 +1,80 @@ +// Copyright ©2016 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package testlapack + +import ( + "testing" + + "gonum.org/v1/gonum/floats" + "gonum.org/v1/gonum/lapack" +) + +type Dlasrter interface { + Dlasrt(s lapack.Sort, n int, d []float64) +} + +func DlasrtTest(t *testing.T, impl Dlasrter) { + for ti, test := range []struct { + data []float64 + wantInc []float64 + wantDec []float64 + }{ + { + data: nil, + wantInc: nil, + wantDec: nil, + }, + { + data: []float64{}, + wantInc: []float64{}, + wantDec: []float64{}, + }, + { + data: []float64{1}, + wantInc: []float64{1}, + wantDec: []float64{1}, + }, + { + data: []float64{1, 2}, + wantInc: []float64{1, 2}, + wantDec: []float64{2, 1}, + }, + { + data: []float64{1, 2, -3}, + wantInc: []float64{-3, 1, 2}, + wantDec: []float64{2, 1, -3}, + }, + { + data: []float64{-5, -4, -3, -2, -1, 0, 1, 2, 3, 4, 5}, + wantInc: []float64{-5, -4, -3, -2, -1, 0, 1, 2, 3, 4, 5}, + wantDec: []float64{5, 4, 3, 2, 1, 0, -1, -2, -3, -4, -5}, + }, + { + data: []float64{5, 4, 3, 2, 1, 0, -1, -2, -3, -4, -5}, + wantInc: []float64{-5, -4, -3, -2, -1, 0, 1, 2, 3, 4, 5}, + wantDec: []float64{5, 4, 3, 2, 1, 0, -1, -2, -3, -4, -5}, + }, + { + data: []float64{-2, 4, -1, 2, -4, 0, 3, 5, -5, 1, -3}, + wantInc: []float64{-5, -4, -3, -2, -1, 0, 1, 2, 3, 4, 5}, + wantDec: []float64{5, 4, 3, 2, 1, 0, -1, -2, -3, -4, -5}, + }, + } { + n := len(test.data) + ds := make([]float64, n) + + copy(ds, test.data) + impl.Dlasrt(lapack.SortIncreasing, n, ds) + if !floats.Equal(ds, test.wantInc) { + t.Errorf("Case #%v: unexpected result of SortIncreasing", ti) + } + + copy(ds, test.data) + impl.Dlasrt(lapack.SortDecreasing, n, ds) + if !floats.Equal(ds, test.wantDec) { + t.Errorf("Case #%v: unexpected result of SortIncreasing", ti) + } + } +} diff --git a/vendor/gonum.org/v1/gonum/lapack/testlapack/dlasv2.go b/vendor/gonum.org/v1/gonum/lapack/testlapack/dlasv2.go new file mode 100644 index 0000000..ca62f67 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/lapack/testlapack/dlasv2.go @@ -0,0 +1,49 @@ +// Copyright ©2015 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package testlapack + +import ( + "testing" + + "golang.org/x/exp/rand" + + "gonum.org/v1/gonum/floats" +) + +type Dlasv2er interface { + Dlasv2(f, g, h float64) (ssmin, ssmax, snr, csr, snl, csl float64) +} + +func Dlasv2Test(t *testing.T, impl Dlasv2er) { + rnd := rand.New(rand.NewSource(1)) + for i := 0; i < 100; i++ { + f := rnd.NormFloat64() + g := rnd.NormFloat64() + h := rnd.NormFloat64() + + ssmin, ssmax, snr, csr, snl, csl := impl.Dlasv2(f, g, h) + + // tmp = + // [ csl snl] [f g] + // [-snl csl] [0 h] + tmp11 := csl * f + tmp12 := csl*g + snl*h + tmp21 := -snl * f + tmp22 := -snl*g + csl*h + // lhs = + // [tmp11 tmp12] [csr -snr] + // [tmp21 tmp22] [snr csr] + ans11 := tmp11*csr + tmp12*snr + ans12 := tmp11*-snr + tmp12*csr + ans21 := tmp21*csr + tmp22*snr + ans22 := tmp21*-snr + tmp22*csr + + lhs := []float64{ans11, ans12, ans21, ans22} + rhs := []float64{ssmax, 0, 0, ssmin} + if !floats.EqualApprox(rhs, lhs, 1e-12) { + t.Errorf("SVD mismatch. f = %v, g = %v, h = %v.\nLHS: %v\nRHS: %v", f, g, h, lhs, rhs) + } + } +} diff --git a/vendor/gonum.org/v1/gonum/lapack/testlapack/dlaswp.go b/vendor/gonum.org/v1/gonum/lapack/testlapack/dlaswp.go new file mode 100644 index 0000000..b63b6cd --- /dev/null +++ b/vendor/gonum.org/v1/gonum/lapack/testlapack/dlaswp.go @@ -0,0 +1,130 @@ +// Copyright ©2016 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package testlapack + +import ( + "fmt" + "testing" + + "gonum.org/v1/gonum/blas/blas64" +) + +type Dlaswper interface { + Dlaswp(n int, a []float64, lda, k1, k2 int, ipiv []int, incX int) +} + +func DlaswpTest(t *testing.T, impl Dlaswper) { + for ti, test := range []struct { + k1, k2 int + ipiv []int + incX int + + want blas64.General + }{ + { + k1: 0, + k2: 2, + ipiv: []int{0, 1, 2}, + incX: 1, + want: blas64.General{ + Rows: 4, + Cols: 3, + Stride: 3, + Data: []float64{ + 1, 2, 3, + 4, 5, 6, + 7, 8, 9, + 10, 11, 12, + }, + }, + }, + { + k1: 0, + k2: 2, + ipiv: []int{0, 1, 2}, + incX: -1, + want: blas64.General{ + Rows: 4, + Cols: 3, + Stride: 3, + Data: []float64{ + 1, 2, 3, + 4, 5, 6, + 7, 8, 9, + 10, 11, 12, + }, + }, + }, + { + k1: 0, + k2: 2, + ipiv: []int{1, 2, 3}, + incX: 1, + want: blas64.General{ + Rows: 5, + Cols: 3, + Stride: 3, + Data: []float64{ + 4, 5, 6, + 7, 8, 9, + 10, 11, 12, + 1, 2, 3, + 13, 14, 15, + }, + }, + }, + { + k1: 0, + k2: 2, + ipiv: []int{1, 2, 3}, + incX: -1, + want: blas64.General{ + Rows: 5, + Cols: 3, + Stride: 3, + Data: []float64{ + 10, 11, 12, + 1, 2, 3, + 4, 5, 6, + 7, 8, 9, + 13, 14, 15, + }, + }, + }, + } { + m := test.want.Rows + n := test.want.Cols + k1 := test.k1 + k2 := test.k2 + if len(test.ipiv) != k2+1 { + panic("bad length of ipiv") + } + incX := test.incX + for _, extra := range []int{0, 11} { + a := zeros(m, n, n+extra) + c := 1 + for i := 0; i < m; i++ { + for j := 0; j < n; j++ { + a.Data[i*a.Stride+j] = float64(c) + c++ + } + } + + ipiv := make([]int, len(test.ipiv)) + copy(ipiv, test.ipiv) + + impl.Dlaswp(n, a.Data, a.Stride, k1, k2, ipiv, incX) + + prefix := fmt.Sprintf("Case %v (m=%v,n=%v,k1=%v,k2=%v,extra=%v)", ti, m, n, k1, k2, extra) + if !generalOutsideAllNaN(a) { + t.Errorf("%v: out-of-range write to A", prefix) + } + + if !equalApproxGeneral(a, test.want, 0) { + t.Errorf("%v: unexpected A\n%v\n%v", prefix, a, test.want) + } + } + } +} diff --git a/vendor/gonum.org/v1/gonum/lapack/testlapack/dlasy2.go b/vendor/gonum.org/v1/gonum/lapack/testlapack/dlasy2.go new file mode 100644 index 0000000..1128fd9 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/lapack/testlapack/dlasy2.go @@ -0,0 +1,105 @@ +// Copyright ©2016 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package testlapack + +import ( + "fmt" + "math" + "testing" + + "golang.org/x/exp/rand" + + "gonum.org/v1/gonum/blas" + "gonum.org/v1/gonum/blas/blas64" +) + +type Dlasy2er interface { + Dlasy2(tranl, tranr bool, isgn, n1, n2 int, tl []float64, ldtl int, tr []float64, ldtr int, b []float64, ldb int, x []float64, ldx int) (scale, xnorm float64, ok bool) +} + +func Dlasy2Test(t *testing.T, impl Dlasy2er) { + rnd := rand.New(rand.NewSource(1)) + for _, tranl := range []bool{true, false} { + for _, tranr := range []bool{true, false} { + for _, isgn := range []int{1, -1} { + for _, n1 := range []int{0, 1, 2} { + for _, n2 := range []int{0, 1, 2} { + for _, extra := range []int{0, 1, 2, 13} { + for cas := 0; cas < 1000; cas++ { + testDlasy2(t, impl, tranl, tranr, isgn, n1, n2, extra, rnd) + } + } + } + } + } + } + } +} + +func testDlasy2(t *testing.T, impl Dlasy2er, tranl, tranr bool, isgn, n1, n2, extra int, rnd *rand.Rand) { + const tol = 1e-10 + + tl := randomGeneral(n1, n1, n1+extra, rnd) + tr := randomGeneral(n2, n2, n2+extra, rnd) + b := randomGeneral(n1, n2, n2+extra, rnd) + x := randomGeneral(n1, n2, n2+extra, rnd) + + scale, xnorm, ok := impl.Dlasy2(tranl, tranr, isgn, n1, n2, tl.Data, tl.Stride, tr.Data, tr.Stride, b.Data, b.Stride, x.Data, x.Stride) + if scale > 1 { + t.Errorf("invalid value of scale, want <= 1, got %v", scale) + } + if n1 == 0 || n2 == 0 { + return + } + + prefix := fmt.Sprintf("Case n1=%v, n2=%v, isgn=%v", n1, n2, isgn) + + // Check any invalid modifications of x. + if !generalOutsideAllNaN(x) { + t.Errorf("%v: out-of-range write to x\n%v", prefix, x.Data) + } + + var xnormWant float64 + for i := 0; i < n1; i++ { + var rowsum float64 + for j := 0; j < n2; j++ { + rowsum += math.Abs(x.Data[i*x.Stride+j]) + } + if rowsum > xnormWant { + xnormWant = rowsum + } + } + if xnormWant != xnorm { + t.Errorf("%v: unexpected xnorm: want %v, got %v", prefix, xnormWant, xnorm) + } + + // Multiply b by scale to get the wanted right-hand side. + for i := 0; i < n1; i++ { + for j := 0; j < n2; j++ { + b.Data[i*b.Stride+j] *= scale + } + } + // Compute the wanted left-hand side. + lhsWant := randomGeneral(n1, n2, n2, rnd) + if tranl { + blas64.Gemm(blas.Trans, blas.NoTrans, 1, tl, x, 0, lhsWant) + } else { + blas64.Gemm(blas.NoTrans, blas.NoTrans, 1, tl, x, 0, lhsWant) + } + if tranr { + blas64.Gemm(blas.NoTrans, blas.Trans, float64(isgn), x, tr, 1, lhsWant) + } else { + blas64.Gemm(blas.NoTrans, blas.NoTrans, float64(isgn), x, tr, 1, lhsWant) + } + // Compare them. + for i := 0; i < n1; i++ { + for j := 0; j < n2; j++ { + diff := lhsWant.Data[i*lhsWant.Stride+j] - b.Data[i*b.Stride+j] + if math.Abs(diff) > tol && ok { + t.Errorf("%v: unexpected result, diff[%v,%v]=%v", prefix, i, j, diff) + } + } + } +} diff --git a/vendor/gonum.org/v1/gonum/lapack/testlapack/dlatrd.go b/vendor/gonum.org/v1/gonum/lapack/testlapack/dlatrd.go new file mode 100644 index 0000000..994a31a --- /dev/null +++ b/vendor/gonum.org/v1/gonum/lapack/testlapack/dlatrd.go @@ -0,0 +1,272 @@ +// Copyright ©2016 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package testlapack + +import ( + "fmt" + "math" + "testing" + + "golang.org/x/exp/rand" + + "gonum.org/v1/gonum/blas" + "gonum.org/v1/gonum/blas/blas64" +) + +type Dlatrder interface { + Dlatrd(uplo blas.Uplo, n, nb int, a []float64, lda int, e, tau, w []float64, ldw int) +} + +func DlatrdTest(t *testing.T, impl Dlatrder) { + rnd := rand.New(rand.NewSource(1)) + for _, uplo := range []blas.Uplo{blas.Upper, blas.Lower} { + for _, test := range []struct { + n, nb, lda, ldw int + }{ + {5, 2, 0, 0}, + {5, 5, 0, 0}, + + {5, 3, 10, 11}, + {5, 5, 10, 11}, + } { + n := test.n + nb := test.nb + lda := test.lda + if lda == 0 { + lda = n + } + ldw := test.ldw + if ldw == 0 { + ldw = nb + } + + // Allocate n×n matrix A and fill it with random numbers. + a := make([]float64, n*lda) + for i := range a { + a[i] = rnd.NormFloat64() + } + + // Allocate output slices and matrix W and fill them + // with NaN. All their elements should be overwritten by + // Dlatrd. + e := make([]float64, n-1) + for i := range e { + e[i] = math.NaN() + } + tau := make([]float64, n-1) + for i := range tau { + tau[i] = math.NaN() + } + w := make([]float64, n*ldw) + for i := range w { + w[i] = math.NaN() + } + + aCopy := make([]float64, len(a)) + copy(aCopy, a) + + // Reduce nb rows and columns of the symmetric matrix A + // defined by uplo triangle to symmetric tridiagonal + // form. + impl.Dlatrd(uplo, n, nb, a, lda, e, tau, w, ldw) + + // Construct Q from elementary reflectors stored in + // columns of A. + q := blas64.General{ + Rows: n, + Cols: n, + Stride: n, + Data: make([]float64, n*n), + } + // Initialize Q to the identity matrix. + for i := 0; i < n; i++ { + q.Data[i*q.Stride+i] = 1 + } + if uplo == blas.Upper { + for i := n - 1; i >= n-nb; i-- { + if i == 0 { + continue + } + + // Extract the elementary reflector v from A. + v := blas64.Vector{ + Inc: 1, + Data: make([]float64, n), + } + for j := 0; j < i-1; j++ { + v.Data[j] = a[j*lda+i] + } + v.Data[i-1] = 1 + + // Compute H = I - tau[i-1] * v * v^T. + h := blas64.General{ + Rows: n, Cols: n, Stride: n, Data: make([]float64, n*n), + } + for j := 0; j < n; j++ { + h.Data[j*n+j] = 1 + } + blas64.Ger(-tau[i-1], v, v, h) + + // Update Q <- Q * H. + qTmp := blas64.General{ + Rows: n, Cols: n, Stride: n, Data: make([]float64, n*n), + } + copy(qTmp.Data, q.Data) + blas64.Gemm(blas.NoTrans, blas.NoTrans, 1, qTmp, h, 0, q) + } + } else { + for i := 0; i < nb; i++ { + if i == n-1 { + continue + } + + // Extract the elementary reflector v from A. + v := blas64.Vector{ + Inc: 1, + Data: make([]float64, n), + } + v.Data[i+1] = 1 + for j := i + 2; j < n; j++ { + v.Data[j] = a[j*lda+i] + } + + // Compute H = I - tau[i] * v * v^T. + h := blas64.General{ + Rows: n, Cols: n, Stride: n, Data: make([]float64, n*n), + } + for j := 0; j < n; j++ { + h.Data[j*n+j] = 1 + } + blas64.Ger(-tau[i], v, v, h) + + // Update Q <- Q * H. + qTmp := blas64.General{ + Rows: n, Cols: n, Stride: n, Data: make([]float64, n*n), + } + copy(qTmp.Data, q.Data) + blas64.Gemm(blas.NoTrans, blas.NoTrans, 1, qTmp, h, 0, q) + } + } + errStr := fmt.Sprintf("isUpper = %v, n = %v, nb = %v", uplo == blas.Upper, n, nb) + if !isOrthogonal(q) { + t.Errorf("Case %v: Q not orthogonal", errStr) + } + aGen := genFromSym(blas64.Symmetric{N: n, Stride: lda, Uplo: uplo, Data: aCopy}) + if !dlatrdCheckDecomposition(t, uplo, n, nb, e, a, lda, aGen, q) { + t.Errorf("Case %v: Decomposition mismatch", errStr) + } + } + } +} + +// dlatrdCheckDecomposition checks that the first nb rows have been successfully +// reduced. +func dlatrdCheckDecomposition(t *testing.T, uplo blas.Uplo, n, nb int, e, a []float64, lda int, aGen, q blas64.General) bool { + // Compute ans = Q^T * A * Q. + // ans should be a tridiagonal matrix in the first or last nb rows and + // columns, depending on uplo. + tmp := blas64.General{ + Rows: n, + Cols: n, + Stride: n, + Data: make([]float64, n*n), + } + ans := blas64.General{ + Rows: n, + Cols: n, + Stride: n, + Data: make([]float64, n*n), + } + blas64.Gemm(blas.Trans, blas.NoTrans, 1, q, aGen, 0, tmp) + blas64.Gemm(blas.NoTrans, blas.NoTrans, 1, tmp, q, 0, ans) + + // Compare the output of Dlatrd (stored in a and e) with the explicit + // reduction to tridiagonal matrix Q^T * A * Q (stored in ans). + if uplo == blas.Upper { + for i := n - nb; i < n; i++ { + for j := 0; j < n; j++ { + v := ans.Data[i*ans.Stride+j] + switch { + case i == j: + // Diagonal elements of a and ans should match. + if math.Abs(v-a[i*lda+j]) > 1e-10 { + return false + } + case i == j-1: + // Superdiagonal elements in a should be 1. + if math.Abs(a[i*lda+j]-1) > 1e-10 { + return false + } + // Superdiagonal elements of ans should match e. + if math.Abs(v-e[i]) > 1e-10 { + return false + } + case i == j+1: + default: + // All other elements should be 0. + if math.Abs(v) > 1e-10 { + return false + } + } + } + } + } else { + for i := 0; i < nb; i++ { + for j := 0; j < n; j++ { + v := ans.Data[i*ans.Stride+j] + switch { + case i == j: + // Diagonal elements of a and ans should match. + if math.Abs(v-a[i*lda+j]) > 1e-10 { + return false + } + case i == j-1: + case i == j+1: + // Subdiagonal elements in a should be 1. + if math.Abs(a[i*lda+j]-1) > 1e-10 { + return false + } + // Subdiagonal elements of ans should match e. + if math.Abs(v-e[i-1]) > 1e-10 { + return false + } + default: + // All other elements should be 0. + if math.Abs(v) > 1e-10 { + return false + } + } + } + } + } + return true +} + +// genFromSym constructs a (symmetric) general matrix from the data in the +// symmetric. +// TODO(btracey): Replace other constructions of this with a call to this function. +func genFromSym(a blas64.Symmetric) blas64.General { + n := a.N + lda := a.Stride + uplo := a.Uplo + b := blas64.General{ + Rows: n, + Cols: n, + Stride: n, + Data: make([]float64, n*n), + } + + for i := 0; i < n; i++ { + for j := i; j < n; j++ { + v := a.Data[i*lda+j] + if uplo == blas.Lower { + v = a.Data[j*lda+i] + } + b.Data[i*n+j] = v + b.Data[j*n+i] = v + } + } + return b +} diff --git a/vendor/gonum.org/v1/gonum/lapack/testlapack/dlatrs.go b/vendor/gonum.org/v1/gonum/lapack/testlapack/dlatrs.go new file mode 100644 index 0000000..6d0bbda --- /dev/null +++ b/vendor/gonum.org/v1/gonum/lapack/testlapack/dlatrs.go @@ -0,0 +1,142 @@ +// Copyright ©2017 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package testlapack + +import ( + "fmt" + "math" + "testing" + + "golang.org/x/exp/rand" + + "gonum.org/v1/gonum/blas" + "gonum.org/v1/gonum/blas/blas64" +) + +type Dlatrser interface { + Dlatrs(uplo blas.Uplo, trans blas.Transpose, diag blas.Diag, normin bool, n int, a []float64, lda int, x []float64, cnorm []float64) (scale float64) +} + +func DlatrsTest(t *testing.T, impl Dlatrser) { + rnd := rand.New(rand.NewSource(1)) + for _, uplo := range []blas.Uplo{blas.Upper, blas.Lower} { + for _, trans := range []blas.Transpose{blas.Trans, blas.NoTrans} { + for _, n := range []int{0, 1, 2, 3, 4, 5, 6, 7, 10, 20, 50, 100} { + for _, lda := range []int{n, 2*n + 1} { + lda = max(1, lda) + imats := []int{7, 11, 12, 13, 14, 15, 16, 17, 18} + if n < 6 { + imats = append(imats, 19) + } + for _, imat := range imats { + testDlatrs(t, impl, imat, uplo, trans, n, lda, rnd) + } + } + } + } + } +} + +func testDlatrs(t *testing.T, impl Dlatrser, imat int, uplo blas.Uplo, trans blas.Transpose, n, lda int, rnd *rand.Rand) { + const tol = 1e-14 + + a := nanSlice(n * lda) + b := nanSlice(n) + work := make([]float64, 3*n) + + // Generate triangular test matrix and right hand side. + diag := dlattr(imat, uplo, trans, n, a, lda, b, work, rnd) + if imat <= 10 { + // b has not been generated. + dlarnv(b, 3, rnd) + } + + cnorm := nanSlice(n) + x := make([]float64, n) + + // Call Dlatrs with normin=false. + copy(x, b) + scale := impl.Dlatrs(uplo, trans, diag, false, n, a, lda, x, cnorm) + prefix := fmt.Sprintf("Case imat=%v (n=%v,lda=%v,trans=%v,uplo=%v,diag=%v", imat, n, lda, trans, uplo, diag) + for i, v := range cnorm { + if math.IsNaN(v) { + t.Errorf("%v: cnorm[%v] not computed (scale=%v,normin=false)", prefix, i, scale) + } + } + resid, hasNaN := dlatrsResidual(uplo, trans, diag, n, a, lda, scale, cnorm, x, b, work[:n]) + if hasNaN { + t.Errorf("%v: unexpected NaN (scale=%v,normin=false)", prefix, scale) + } else if resid > tol { + t.Errorf("%v: residual %v too large (scale=%v,normin=false)", prefix, resid, scale) + } + + // Call Dlatrs with normin=true because cnorm has been filled. + copy(x, b) + scale = impl.Dlatrs(uplo, trans, diag, true, n, a, lda, x, cnorm) + resid, hasNaN = dlatrsResidual(uplo, trans, diag, n, a, lda, scale, cnorm, x, b, work[:n]) + if hasNaN { + t.Errorf("%v: unexpected NaN (scale=%v,normin=true)", prefix, scale) + } else if resid > tol { + t.Errorf("%v: residual %v too large (scale=%v,normin=true)", prefix, resid, scale) + } +} + +// dlatrsResidual returns norm(trans(A)*x-scale*b) / (norm(trans(A))*norm(x)*eps) +// and whether NaN has been encountered in the process. +func dlatrsResidual(uplo blas.Uplo, trans blas.Transpose, diag blas.Diag, n int, a []float64, lda int, scale float64, cnorm []float64, x, b, work []float64) (resid float64, hasNaN bool) { + if n == 0 { + return 0, false + } + + // Compute the norm of the triangular matrix A using the column norms + // already computed by Dlatrs. + var tnorm float64 + if diag == blas.NonUnit { + for j := 0; j < n; j++ { + tnorm = math.Max(tnorm, math.Abs(a[j*lda+j])+cnorm[j]) + } + } else { + for j := 0; j < n; j++ { + tnorm = math.Max(tnorm, 1+cnorm[j]) + } + } + + eps := dlamchE + smlnum := dlamchS + bi := blas64.Implementation() + + // Compute norm(trans(A)*x-scale*b) / (norm(trans(A))*norm(x)*eps) + copy(work, x) + ix := bi.Idamax(n, work, 1) + xnorm := math.Max(1, math.Abs(work[ix])) + xscal := 1 / xnorm / float64(n) + bi.Dscal(n, xscal, work, 1) + bi.Dtrmv(uplo, trans, diag, n, a, lda, work, 1) + bi.Daxpy(n, -scale*xscal, b, 1, work, 1) + for _, v := range work { + if math.IsNaN(v) { + return 1 / eps, true + } + } + ix = bi.Idamax(n, work, 1) + resid = math.Abs(work[ix]) + ix = bi.Idamax(n, x, 1) + xnorm = math.Abs(x[ix]) + if resid*smlnum <= xnorm { + if xnorm > 0 { + resid /= xnorm + } + } else if resid > 0 { + resid = 1 / eps + } + if resid*smlnum <= tnorm { + if tnorm > 0 { + resid /= tnorm + } + } else if resid > 0 { + resid = 1 / eps + } + return resid, false +} diff --git a/vendor/gonum.org/v1/gonum/lapack/testlapack/dlauu2.go b/vendor/gonum.org/v1/gonum/lapack/testlapack/dlauu2.go new file mode 100644 index 0000000..70ba940 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/lapack/testlapack/dlauu2.go @@ -0,0 +1,112 @@ +// Copyright ©2018 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package testlapack + +import ( + "fmt" + "testing" + + "golang.org/x/exp/rand" + + "gonum.org/v1/gonum/blas" + "gonum.org/v1/gonum/blas/blas64" +) + +type Dlauu2er interface { + Dlauu2(uplo blas.Uplo, n int, a []float64, lda int) +} + +func Dlauu2Test(t *testing.T, impl Dlauu2er) { + for _, uplo := range []blas.Uplo{blas.Upper, blas.Lower} { + name := "Upper" + if uplo == blas.Lower { + name = "Lower" + } + t.Run(name, func(t *testing.T) { + ns := []int{0, 1, 2, 3, 4, 5, 10, 25} + dlauuTest(t, impl.Dlauu2, uplo, ns) + }) + } +} + +func dlauuTest(t *testing.T, dlauu func(blas.Uplo, int, []float64, int), uplo blas.Uplo, ns []int) { + const tol = 1e-13 + + bi := blas64.Implementation() + rnd := rand.New(rand.NewSource(1)) + + for _, n := range ns { + for _, lda := range []int{max(1, n), n + 11} { + prefix := fmt.Sprintf("n=%v,lda=%v", n, lda) + + // Allocate n×n matrix A and fill it with random numbers. + // Only its uplo triangle will be used below. + a := make([]float64, n*lda) + for i := range a { + a[i] = rnd.NormFloat64() + } + // Create a copy of A. + aCopy := make([]float64, len(a)) + copy(aCopy, a) + + // Compute U*U^T or L^T*L using Dlauu?. + dlauu(uplo, n, a, lda) + + if n == 0 { + continue + } + + // * Check that the triangle of A opposite to uplo has not been modified. + // * Convert the result of Dlauu? into a dense symmetric matrix. + // * Zero out the triangle in aCopy opposite to uplo. + if uplo == blas.Upper { + if !sameLowerTri(n, aCopy, lda, a, lda) { + t.Errorf("%v: unexpected modification in lower triangle", prefix) + continue + } + for i := 1; i < n; i++ { + for j := 0; j < i; j++ { + a[i*lda+j] = a[j*lda+i] + aCopy[i*lda+j] = 0 + } + } + } else { + if !sameUpperTri(n, aCopy, lda, a, lda) { + t.Errorf("%v: unexpected modification in upper triangle", prefix) + continue + } + for i := 0; i < n-1; i++ { + for j := i + 1; j < n; j++ { + a[i*lda+j] = a[j*lda+i] + aCopy[i*lda+j] = 0 + } + } + } + + // Compute U*U^T or L^T*L using Dgemm with U and L + // represented as dense triangular matrices. + ldwant := n + want := make([]float64, n*ldwant) + if uplo == blas.Upper { + // Use aCopy as a dense representation of the upper triangular U. + u := aCopy + ldu := lda + // Compute U * U^T and store the result into want. + bi.Dgemm(blas.NoTrans, blas.Trans, n, n, n, + 1, u, ldu, u, ldu, 0, want, ldwant) + } else { + // Use aCopy as a dense representation of the lower triangular L. + l := aCopy + ldl := lda + // Compute L^T * L and store the result into want. + bi.Dgemm(blas.Trans, blas.NoTrans, n, n, n, + 1, l, ldl, l, ldl, 0, want, ldwant) + } + if !equalApprox(n, n, a, lda, want, tol) { + t.Errorf("%v: unexpected result", prefix) + } + } + } +} diff --git a/vendor/gonum.org/v1/gonum/lapack/testlapack/dlauum.go b/vendor/gonum.org/v1/gonum/lapack/testlapack/dlauum.go new file mode 100644 index 0000000..604ae7d --- /dev/null +++ b/vendor/gonum.org/v1/gonum/lapack/testlapack/dlauum.go @@ -0,0 +1,30 @@ +// Copyright ©2018 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package testlapack + +import ( + "testing" + + "gonum.org/v1/gonum/blas" +) + +type Dlauumer interface { + Dlauum(uplo blas.Uplo, n int, a []float64, lda int) +} + +func DlauumTest(t *testing.T, impl Dlauumer) { + for _, uplo := range []blas.Uplo{blas.Upper, blas.Lower} { + name := "Upper" + if uplo == blas.Lower { + name = "Lower" + } + t.Run(name, func(t *testing.T) { + // Include small and large sizes to make sure that both + // unblocked and blocked paths are taken. + ns := []int{0, 1, 2, 3, 4, 5, 10, 25, 31, 32, 33, 63, 64, 65, 127, 128, 129} + dlauuTest(t, impl.Dlauum, uplo, ns) + }) + } +} diff --git a/vendor/gonum.org/v1/gonum/lapack/testlapack/doc.go b/vendor/gonum.org/v1/gonum/lapack/testlapack/doc.go new file mode 100644 index 0000000..7ef9dc5 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/lapack/testlapack/doc.go @@ -0,0 +1,6 @@ +// Copyright ©2017 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Package testlapack implements a set of testing routines for Lapack functions. +package testlapack // import "gonum.org/v1/gonum/lapack/testlapack" diff --git a/vendor/gonum.org/v1/gonum/lapack/testlapack/dorg2l.go b/vendor/gonum.org/v1/gonum/lapack/testlapack/dorg2l.go new file mode 100644 index 0000000..aae30bf --- /dev/null +++ b/vendor/gonum.org/v1/gonum/lapack/testlapack/dorg2l.go @@ -0,0 +1,55 @@ +// Copyright ©2016 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package testlapack + +import ( + "testing" + + "golang.org/x/exp/rand" + + "gonum.org/v1/gonum/blas/blas64" +) + +type Dorg2ler interface { + Dorg2l(m, n, k int, a []float64, lda int, tau, work []float64) + Dgeql2er +} + +func Dorg2lTest(t *testing.T, impl Dorg2ler) { + rnd := rand.New(rand.NewSource(1)) + for _, test := range []struct { + m, n, k, lda int + }{ + {5, 4, 3, 0}, + {5, 4, 4, 0}, + {3, 3, 2, 0}, + {5, 5, 5, 0}, + {5, 4, 3, 11}, + {5, 4, 4, 11}, + {3, 3, 2, 11}, + {5, 5, 5, 11}, + } { + m := test.m + n := test.n + k := test.k + lda := test.lda + if lda == 0 { + lda = n + } + + a := make([]float64, m*lda) + for i := range a { + a[i] = rnd.NormFloat64() + } + tau := nanSlice(max(m, n)) + work := make([]float64, n) + impl.Dgeql2(m, n, a, lda, tau, work) + + impl.Dorg2l(m, n, k, a, lda, tau[n-k:], work) + if !hasOrthonormalColumns(blas64.General{Rows: m, Cols: n, Data: a, Stride: lda}) { + t.Errorf("Case m=%v, n=%v, k=%v: columns of Q not orthonormal", m, n, k) + } + } +} diff --git a/vendor/gonum.org/v1/gonum/lapack/testlapack/dorg2r.go b/vendor/gonum.org/v1/gonum/lapack/testlapack/dorg2r.go new file mode 100644 index 0000000..04bbf4a --- /dev/null +++ b/vendor/gonum.org/v1/gonum/lapack/testlapack/dorg2r.go @@ -0,0 +1,79 @@ +// Copyright ©2015 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package testlapack + +import ( + "testing" + + "golang.org/x/exp/rand" + + "gonum.org/v1/gonum/floats" +) + +type Dorg2rer interface { + Dgeqrfer + Dorg2r(m, n, k int, a []float64, lda int, tau []float64, work []float64) +} + +func Dorg2rTest(t *testing.T, impl Dorg2rer) { + rnd := rand.New(rand.NewSource(1)) + for ti, test := range []struct { + m, n, k, lda int + }{ + {3, 3, 0, 0}, + {4, 3, 0, 0}, + {3, 3, 2, 0}, + {4, 3, 2, 0}, + + {5, 5, 0, 20}, + {5, 5, 3, 20}, + {10, 5, 0, 20}, + {10, 5, 2, 20}, + } { + m := test.m + n := test.n + lda := test.lda + if lda == 0 { + lda = test.n + } + // Allocate m×n matrix A and fill it with random numbers. + a := make([]float64, m*lda) + for i := range a { + a[i] = rnd.NormFloat64() + } + + // Compute the QR decomposition of A. + tau := make([]float64, min(m, n)) + work := make([]float64, 1) + impl.Dgeqrf(m, n, a, lda, tau, work, -1) + work = make([]float64, int(work[0])) + impl.Dgeqrf(m, n, a, lda, tau, work, len(work)) + + // Compute the matrix Q explicitly using the first k elementary reflectors. + k := test.k + if k == 0 { + k = n + } + q := constructQK("QR", m, n, k, a, lda, tau) + + // Compute the matrix Q using Dorg2r. + impl.Dorg2r(m, n, k, a, lda, tau, work) + + // Check that the first n columns of both results match. + same := true + loop: + for i := 0; i < m; i++ { + for j := 0; j < n; j++ { + if !floats.EqualWithinAbsOrRel(q.Data[i*q.Stride+j], a[i*lda+j], 1e-12, 1e-12) { + same = false + break loop + } + } + } + if !same { + t.Errorf("Case %v: Q mismatch", ti) + } + } +} diff --git a/vendor/gonum.org/v1/gonum/lapack/testlapack/dorgbr.go b/vendor/gonum.org/v1/gonum/lapack/testlapack/dorgbr.go new file mode 100644 index 0000000..ba6f3d2 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/lapack/testlapack/dorgbr.go @@ -0,0 +1,156 @@ +// Copyright ©2015 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package testlapack + +import ( + "testing" + + "golang.org/x/exp/rand" + + "gonum.org/v1/gonum/blas/blas64" + "gonum.org/v1/gonum/floats" + "gonum.org/v1/gonum/lapack" +) + +type Dorgbrer interface { + Dorgbr(vect lapack.GenOrtho, m, n, k int, a []float64, lda int, tau, work []float64, lwork int) + Dgebrder +} + +func DorgbrTest(t *testing.T, impl Dorgbrer) { + rnd := rand.New(rand.NewSource(1)) + for _, vect := range []lapack.GenOrtho{lapack.GenerateQ, lapack.GeneratePT} { + for _, test := range []struct { + m, n, k, lda int + }{ + {5, 5, 5, 0}, + {5, 5, 3, 0}, + {5, 3, 5, 0}, + {3, 5, 5, 0}, + {3, 4, 5, 0}, + {3, 5, 4, 0}, + {4, 3, 5, 0}, + {4, 5, 3, 0}, + {5, 3, 4, 0}, + {5, 4, 3, 0}, + + {5, 5, 5, 10}, + {5, 5, 3, 10}, + {5, 3, 5, 10}, + {3, 5, 5, 10}, + {3, 4, 5, 10}, + {3, 5, 4, 10}, + {4, 3, 5, 10}, + {4, 5, 3, 10}, + {5, 3, 4, 10}, + {5, 4, 3, 10}, + } { + m := test.m + n := test.n + k := test.k + lda := test.lda + // Filter out bad tests + if vect == lapack.GenerateQ { + if m < n || n < min(m, k) || m < min(m, k) { + continue + } + } else { + if n < m || m < min(n, k) || n < min(n, k) { + continue + } + } + // Sizes for Dorgbr. + var ma, na int + if vect == lapack.GenerateQ { + if m >= k { + ma = m + na = k + } else { + ma = m + na = m + } + } else { + if n >= k { + ma = k + na = n + } else { + ma = n + na = n + } + } + // a eventually needs to store either P or Q, so it must be + // sufficiently big. + var a []float64 + if vect == lapack.GenerateQ { + lda = max(m, lda) + a = make([]float64, m*lda) + } else { + lda = max(n, lda) + a = make([]float64, n*lda) + } + for i := range a { + a[i] = rnd.NormFloat64() + } + + nTau := min(ma, na) + tauP := make([]float64, nTau) + tauQ := make([]float64, nTau) + d := make([]float64, nTau) + e := make([]float64, nTau) + lwork := -1 + work := make([]float64, 1) + impl.Dgebrd(ma, na, a, lda, d, e, tauQ, tauP, work, lwork) + work = make([]float64, int(work[0])) + lwork = len(work) + impl.Dgebrd(ma, na, a, lda, d, e, tauQ, tauP, work, lwork) + + aCopy := make([]float64, len(a)) + copy(aCopy, a) + + var tau []float64 + if vect == lapack.GenerateQ { + tau = tauQ + } else { + tau = tauP + } + + impl.Dorgbr(vect, m, n, k, a, lda, tau, work, -1) + work = make([]float64, int(work[0])) + lwork = len(work) + impl.Dorgbr(vect, m, n, k, a, lda, tau, work, lwork) + + var ans blas64.General + var nRows, nCols int + equal := true + if vect == lapack.GenerateQ { + nRows = m + nCols = m + if m >= k { + nCols = n + } + ans = constructQPBidiagonal(lapack.ApplyQ, ma, na, min(m, k), aCopy, lda, tau) + } else { + nRows = n + if k < n { + nRows = m + } + nCols = n + ansTmp := constructQPBidiagonal(lapack.ApplyP, ma, na, min(k, n), aCopy, lda, tau) + // Dorgbr actually computes P^T + ans = transposeGeneral(ansTmp) + } + for i := 0; i < nRows; i++ { + for j := 0; j < nCols; j++ { + if !floats.EqualWithinAbsOrRel(a[i*lda+j], ans.Data[i*ans.Stride+j], 1e-8, 1e-8) { + equal = false + } + } + } + if !equal { + t.Errorf("Extracted matrix mismatch. gen = %v, m = %v, n = %v, k = %v", string(vect), m, n, k) + } + } + } +} diff --git a/vendor/gonum.org/v1/gonum/lapack/testlapack/dorghr.go b/vendor/gonum.org/v1/gonum/lapack/testlapack/dorghr.go new file mode 100644 index 0000000..b2eeb1d --- /dev/null +++ b/vendor/gonum.org/v1/gonum/lapack/testlapack/dorghr.go @@ -0,0 +1,100 @@ +// Copyright ©2016 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package testlapack + +import ( + "fmt" + "math" + "testing" + + "golang.org/x/exp/rand" + + "gonum.org/v1/gonum/blas" + "gonum.org/v1/gonum/blas/blas64" +) + +type Dorghrer interface { + Dorghr(n, ilo, ihi int, a []float64, lda int, tau, work []float64, lwork int) + + Dgehrder +} + +func DorghrTest(t *testing.T, impl Dorghrer) { + rnd := rand.New(rand.NewSource(1)) + + for _, n := range []int{1, 2, 3, 4, 5, 6, 7, 8, 23, 34} { + for _, extra := range []int{0, 1, 13} { + for _, optwork := range []bool{true, false} { + for cas := 0; cas < 100; cas++ { + ilo := rnd.Intn(n) + ihi := rnd.Intn(n) + if ilo > ihi { + ilo, ihi = ihi, ilo + } + testDorghr(t, impl, n, ilo, ihi, extra, optwork, rnd) + } + } + } + } + testDorghr(t, impl, 0, 0, -1, 0, false, rnd) + testDorghr(t, impl, 0, 0, -1, 0, true, rnd) +} + +func testDorghr(t *testing.T, impl Dorghrer, n, ilo, ihi, extra int, optwork bool, rnd *rand.Rand) { + const tol = 1e-14 + + // Construct the matrix A with elementary reflectors and scalar factors tau. + a := randomGeneral(n, n, n+extra, rnd) + var tau []float64 + if n > 1 { + tau = nanSlice(n - 1) + } + work := nanSlice(max(1, n)) // Minimum work for Dgehrd. + impl.Dgehrd(n, ilo, ihi, a.Data, a.Stride, tau, work, len(work)) + + // Extract Q for later comparison. + q := eye(n, n) + qCopy := cloneGeneral(q) + for j := ilo; j < ihi; j++ { + h := eye(n, n) + v := blas64.Vector{ + Inc: 1, + Data: make([]float64, n), + } + v.Data[j+1] = 1 + for i := j + 2; i < ihi+1; i++ { + v.Data[i] = a.Data[i*a.Stride+j] + } + blas64.Ger(-tau[j], v, v, h) + copy(qCopy.Data, q.Data) + blas64.Gemm(blas.NoTrans, blas.NoTrans, 1, qCopy, h, 0, q) + } + + if optwork { + work = nanSlice(1) + impl.Dorghr(n, ilo, ihi, a.Data, a.Stride, tau, work, -1) + work = nanSlice(int(work[0])) + } else { + work = nanSlice(max(1, ihi-ilo)) + } + impl.Dorghr(n, ilo, ihi, a.Data, a.Stride, tau, work, len(work)) + + prefix := fmt.Sprintf("Case n=%v, ilo=%v, ihi=%v, extra=%v, optwork=%v", n, ilo, ihi, extra, optwork) + if !generalOutsideAllNaN(a) { + t.Errorf("%v: out-of-range write to A\n%v", prefix, a.Data) + } + if !isOrthogonal(a) { + t.Errorf("%v: A is not orthogonal\n%v", prefix, a.Data) + } + for i := 0; i < n; i++ { + for j := 0; j < n; j++ { + aij := a.Data[i*a.Stride+j] + qij := q.Data[i*q.Stride+j] + if math.Abs(aij-qij) > tol { + t.Errorf("%v: unexpected value of A[%v,%v]. want %v, got %v", prefix, i, j, qij, aij) + } + } + } +} diff --git a/vendor/gonum.org/v1/gonum/lapack/testlapack/dorgl2.go b/vendor/gonum.org/v1/gonum/lapack/testlapack/dorgl2.go new file mode 100644 index 0000000..3eb6049 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/lapack/testlapack/dorgl2.go @@ -0,0 +1,66 @@ +// Copyright ©2015 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package testlapack + +import ( + "testing" + + "golang.org/x/exp/rand" + + "gonum.org/v1/gonum/floats" +) + +type Dorgl2er interface { + Dgelqfer + Dorgl2(m, n, k int, a []float64, lda int, tau []float64, work []float64) +} + +func Dorgl2Test(t *testing.T, impl Dorgl2er) { + rnd := rand.New(rand.NewSource(1)) + for _, test := range []struct { + m, n, lda int + }{ + {3, 3, 0}, + {3, 4, 0}, + + {5, 5, 20}, + {5, 10, 20}, + } { + m := test.m + n := test.n + lda := test.lda + if lda == 0 { + lda = test.n + } + a := make([]float64, m*lda) + for i := range a { + a[i] = rnd.NormFloat64() + } + k := min(m, n) + tau := make([]float64, k) + work := make([]float64, 1) + impl.Dgelqf(m, n, a, lda, tau, work, -1) + work = make([]float64, int(work[0])) + impl.Dgelqf(m, n, a, lda, tau, work, len(work)) + + q := constructQ("LQ", m, n, a, lda, tau) + + impl.Dorgl2(m, n, k, a, lda, tau, work) + + // Check that the first m rows match. + same := true + for i := 0; i < m; i++ { + for j := 0; j < n; j++ { + if !floats.EqualWithinAbsOrRel(q.Data[i*q.Stride+j], a[i*lda+j], 1e-12, 1e-12) { + same = false + break + } + } + } + if !same { + t.Errorf("Q mismatch") + } + } +} diff --git a/vendor/gonum.org/v1/gonum/lapack/testlapack/dorglq.go b/vendor/gonum.org/v1/gonum/lapack/testlapack/dorglq.go new file mode 100644 index 0000000..41cbc8a --- /dev/null +++ b/vendor/gonum.org/v1/gonum/lapack/testlapack/dorglq.go @@ -0,0 +1,84 @@ +// Copyright ©2015 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package testlapack + +import ( + "math" + "testing" + + "golang.org/x/exp/rand" + + "gonum.org/v1/gonum/floats" +) + +type Dorglqer interface { + Dorgl2er + Dorglq(m, n, k int, a []float64, lda int, tau, work []float64, lwork int) +} + +func DorglqTest(t *testing.T, impl Dorglqer) { + rnd := rand.New(rand.NewSource(1)) + // TODO(btracey): Base tests off of nb and nx. + for _, test := range []struct{ m, n, k, lda int }{ + {10, 10, 10, 0}, + {10, 10, 10, 20}, + {10, 30, 10, 0}, + {20, 30, 10, 0}, + + {100, 100, 100, 0}, + {100, 100, 50, 0}, + {100, 130, 100, 0}, + {100, 130, 50, 0}, + {100, 100, 100, 150}, + {100, 100, 50, 150}, + {100, 130, 100, 150}, + {100, 130, 50, 150}, + + {200, 200, 200, 0}, + {200, 200, 150, 0}, + {200, 230, 200, 0}, + {200, 230, 150, 0}, + {200, 200, 200, 250}, + {200, 200, 150, 250}, + {200, 230, 200, 250}, + {200, 230, 150, 250}, + } { + m := test.m + n := test.n + k := test.k + lda := test.lda + if lda == 0 { + lda = n + } + a := make([]float64, m*lda) + for i := range a { + a[i] = rnd.Float64() + } + work := make([]float64, 1) + tau := make([]float64, m) + for i := range tau { + tau[i] = math.NaN() + } + // Compute LQ factorization. + impl.Dgelqf(m, n, a, lda, tau, work, -1) + work = make([]float64, int(work[0])) + impl.Dgelqf(m, n, a, lda, tau, work, len(work)) + + aUnblocked := make([]float64, len(a)) + copy(aUnblocked, a) + for i := range work { + work[i] = math.NaN() + } + impl.Dorgl2(m, n, k, aUnblocked, lda, tau, work) + // make sure work isn't used before initialized + for i := range work { + work[i] = math.NaN() + } + impl.Dorglq(m, n, k, a, lda, tau, work, len(work)) + if !floats.EqualApprox(a, aUnblocked, 1e-10) { + t.Errorf("Q Mismatch. m = %d, n = %d, k = %d, lda = %d", m, n, k, lda) + } + } +} diff --git a/vendor/gonum.org/v1/gonum/lapack/testlapack/dorgql.go b/vendor/gonum.org/v1/gonum/lapack/testlapack/dorgql.go new file mode 100644 index 0000000..c8d2a4f --- /dev/null +++ b/vendor/gonum.org/v1/gonum/lapack/testlapack/dorgql.go @@ -0,0 +1,131 @@ +// Copyright ©2016 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package testlapack + +import ( + "fmt" + "testing" + + "golang.org/x/exp/rand" + + "gonum.org/v1/gonum/blas" + "gonum.org/v1/gonum/blas/blas64" +) + +type Dorgqler interface { + Dorgql(m, n, k int, a []float64, lda int, tau, work []float64, lwork int) + + Dlarfger +} + +func DorgqlTest(t *testing.T, impl Dorgqler) { + const tol = 1e-14 + + type Dorg2ler interface { + Dorg2l(m, n, k int, a []float64, lda int, tau, work []float64) + } + dorg2ler, hasDorg2l := impl.(Dorg2ler) + + rnd := rand.New(rand.NewSource(1)) + for _, m := range []int{0, 1, 2, 3, 4, 5, 7, 10, 15, 30, 50, 150} { + for _, extra := range []int{0, 11} { + for _, wl := range []worklen{minimumWork, mediumWork, optimumWork} { + var k int + if m >= 129 { + // For large matrices make sure that k + // is large enough to trigger blocked + // path. + k = 129 + rnd.Intn(m-129+1) + } else { + k = rnd.Intn(m + 1) + } + n := k + rnd.Intn(m-k+1) + if m == 0 || n == 0 { + m = 0 + n = 0 + k = 0 + } + + // Generate k elementary reflectors in the last + // k columns of A. + a := nanGeneral(m, n, n+extra) + tau := make([]float64, k) + for l := 0; l < k; l++ { + jj := m - k + l + v := randomSlice(jj, rnd) + _, tau[l] = impl.Dlarfg(len(v)+1, rnd.NormFloat64(), v, 1) + j := n - k + l + for i := 0; i < jj; i++ { + a.Data[i*a.Stride+j] = v[i] + } + } + aCopy := cloneGeneral(a) + + // Compute the full matrix Q by forming the + // Householder reflectors explicitly. + q := eye(m, m) + qCopy := eye(m, m) + for l := 0; l < k; l++ { + h := eye(m, m) + jj := m - k + l + j := n - k + l + v := blas64.Vector{Data: make([]float64, m), Inc: 1} + for i := 0; i < jj; i++ { + v.Data[i] = a.Data[i*a.Stride+j] + } + v.Data[jj] = 1 + blas64.Ger(-tau[l], v, v, h) + copy(qCopy.Data, q.Data) + blas64.Gemm(blas.NoTrans, blas.NoTrans, 1, h, qCopy, 0, q) + } + // View the last n columns of Q as 'want'. + want := blas64.General{ + Rows: m, + Cols: n, + Stride: q.Stride, + Data: q.Data[m-n:], + } + + var lwork int + switch wl { + case minimumWork: + lwork = max(1, n) + case mediumWork: + work := make([]float64, 1) + impl.Dorgql(m, n, k, a.Data, a.Stride, tau, work, -1) + lwork = (int(work[0]) + n) / 2 + lwork = max(1, lwork) + case optimumWork: + work := make([]float64, 1) + impl.Dorgql(m, n, k, a.Data, a.Stride, tau, work, -1) + lwork = int(work[0]) + } + work := make([]float64, lwork) + + // Compute the last n columns of Q by a call to + // Dorgql. + impl.Dorgql(m, n, k, a.Data, a.Stride, tau, work, len(work)) + + prefix := fmt.Sprintf("Case m=%v,n=%v,k=%v,wl=%v", m, n, k, wl) + if !generalOutsideAllNaN(a) { + t.Errorf("%v: out-of-range write to A", prefix) + } + if !equalApproxGeneral(want, a, tol) { + t.Errorf("%v: unexpected Q", prefix) + } + + // Compute the last n columns of Q by a call to + // Dorg2l and check that we get the same result. + if !hasDorg2l { + continue + } + dorg2ler.Dorg2l(m, n, k, aCopy.Data, aCopy.Stride, tau, work) + if !equalApproxGeneral(aCopy, a, tol) { + t.Errorf("%v: mismatch between Dorgql and Dorg2l", prefix) + } + } + } + } +} diff --git a/vendor/gonum.org/v1/gonum/lapack/testlapack/dorgqr.go b/vendor/gonum.org/v1/gonum/lapack/testlapack/dorgqr.go new file mode 100644 index 0000000..9f9d2ad --- /dev/null +++ b/vendor/gonum.org/v1/gonum/lapack/testlapack/dorgqr.go @@ -0,0 +1,84 @@ +// Copyright ©2015 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package testlapack + +import ( + "math" + "testing" + + "golang.org/x/exp/rand" + + "gonum.org/v1/gonum/floats" +) + +type Dorgqrer interface { + Dorg2rer + Dorgqr(m, n, k int, a []float64, lda int, tau, work []float64, lwork int) +} + +func DorgqrTest(t *testing.T, impl Dorgqrer) { + rnd := rand.New(rand.NewSource(1)) + // TODO(btracey): Base tests off of nb and nx. + for _, test := range []struct{ m, n, k, lda int }{ + {10, 10, 10, 0}, + {10, 10, 10, 20}, + {30, 10, 10, 0}, + {30, 20, 10, 20}, + + {100, 100, 100, 0}, + {100, 100, 50, 0}, + {130, 100, 100, 0}, + {130, 100, 50, 0}, + {100, 100, 100, 150}, + {100, 100, 50, 150}, + {130, 100, 100, 150}, + {130, 100, 50, 150}, + + {200, 200, 200, 0}, + {200, 200, 150, 0}, + {230, 200, 200, 0}, + {230, 200, 150, 0}, + {200, 200, 200, 250}, + {200, 200, 150, 250}, + {230, 200, 200, 250}, + {230, 200, 150, 250}, + } { + m := test.m + n := test.n + k := test.k + lda := test.lda + if lda == 0 { + lda = n + } + a := make([]float64, m*lda) + for i := range a { + a[i] = rnd.Float64() + } + work := make([]float64, 1) + tau := make([]float64, n) + for i := range tau { + tau[i] = math.NaN() + } + // Compute QR factorization. + impl.Dgeqrf(m, n, a, lda, tau, work, -1) + work = make([]float64, int(work[0])) + impl.Dgeqrf(m, n, a, lda, tau, work, len(work)) + + aUnblocked := make([]float64, len(a)) + copy(aUnblocked, a) + for i := range work { + work[i] = math.NaN() + } + impl.Dorg2r(m, n, k, aUnblocked, lda, tau, work) + // make sure work isn't used before initialized + for i := range work { + work[i] = math.NaN() + } + impl.Dorgqr(m, n, k, a, lda, tau, work, len(work)) + if !floats.EqualApprox(a, aUnblocked, 1e-10) { + t.Errorf("Q Mismatch. m = %d, n = %d, k = %d, lda = %d", m, n, k, lda) + } + } +} diff --git a/vendor/gonum.org/v1/gonum/lapack/testlapack/dorgtr.go b/vendor/gonum.org/v1/gonum/lapack/testlapack/dorgtr.go new file mode 100644 index 0000000..a44a29f --- /dev/null +++ b/vendor/gonum.org/v1/gonum/lapack/testlapack/dorgtr.go @@ -0,0 +1,161 @@ +// Copyright ©2016 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package testlapack + +import ( + "testing" + + "golang.org/x/exp/rand" + + "gonum.org/v1/gonum/blas" + "gonum.org/v1/gonum/blas/blas64" + "gonum.org/v1/gonum/floats" +) + +type Dorgtrer interface { + Dorgtr(uplo blas.Uplo, n int, a []float64, lda int, tau, work []float64, lwork int) + Dsytrder +} + +func DorgtrTest(t *testing.T, impl Dorgtrer) { + rnd := rand.New(rand.NewSource(1)) + for _, uplo := range []blas.Uplo{blas.Upper, blas.Lower} { + for _, wl := range []worklen{minimumWork, mediumWork, optimumWork} { + for _, test := range []struct { + n, lda int + }{ + {1, 0}, + {2, 0}, + {3, 0}, + {6, 0}, + {33, 0}, + {100, 0}, + + {1, 3}, + {2, 5}, + {3, 7}, + {6, 10}, + {33, 50}, + {100, 120}, + } { + n := test.n + lda := test.lda + if lda == 0 { + lda = n + } + // Allocate n×n matrix A and fill it with random numbers. + a := make([]float64, n*lda) + for i := range a { + a[i] = rnd.NormFloat64() + } + aCopy := make([]float64, len(a)) + copy(aCopy, a) + + // Allocate slices for the main diagonal and the + // first off-diagonal of the tri-diagonal matrix. + d := make([]float64, n) + e := make([]float64, n-1) + // Allocate slice for elementary reflector scales. + tau := make([]float64, n-1) + + // Compute optimum workspace size for Dorgtr call. + work := make([]float64, 1) + impl.Dsytrd(uplo, n, a, lda, d, e, tau, work, -1) + work = make([]float64, int(work[0])) + + // Compute elementary reflectors that reduce the + // symmetric matrix defined by the uplo triangle + // of A to a tridiagonal matrix. + impl.Dsytrd(uplo, n, a, lda, d, e, tau, work, len(work)) + + // Compute workspace size for Dorgtr call. + var lwork int + switch wl { + case minimumWork: + lwork = max(1, n-1) + case mediumWork: + work := make([]float64, 1) + impl.Dorgtr(uplo, n, a, lda, tau, work, -1) + lwork = (int(work[0]) + n - 1) / 2 + lwork = max(1, lwork) + case optimumWork: + work := make([]float64, 1) + impl.Dorgtr(uplo, n, a, lda, tau, work, -1) + lwork = int(work[0]) + } + work = nanSlice(lwork) + + // Generate an orthogonal matrix Q that reduces + // the uplo triangle of A to a tridiagonal matrix. + impl.Dorgtr(uplo, n, a, lda, tau, work, len(work)) + q := blas64.General{ + Rows: n, + Cols: n, + Stride: lda, + Data: a, + } + + if !isOrthogonal(q) { + t.Errorf("Case uplo=%v,n=%v: Q is not orthogonal", uplo, n) + continue + } + + // Create the tridiagonal matrix explicitly in + // dense representation from the diagonals d and e. + tri := blas64.General{ + Rows: n, + Cols: n, + Stride: n, + Data: make([]float64, n*n), + } + for i := 0; i < n; i++ { + tri.Data[i*tri.Stride+i] = d[i] + if i != n-1 { + tri.Data[i*tri.Stride+i+1] = e[i] + tri.Data[(i+1)*tri.Stride+i] = e[i] + } + } + + // Create the symmetric matrix A from the uplo + // triangle of aCopy, storing it explicitly in dense form. + aMat := blas64.General{ + Rows: n, + Cols: n, + Stride: n, + Data: make([]float64, n*n), + } + if uplo == blas.Upper { + for i := 0; i < n; i++ { + for j := i; j < n; j++ { + v := aCopy[i*lda+j] + aMat.Data[i*aMat.Stride+j] = v + aMat.Data[j*aMat.Stride+i] = v + } + } + } else { + for i := 0; i < n; i++ { + for j := 0; j <= i; j++ { + v := aCopy[i*lda+j] + aMat.Data[i*aMat.Stride+j] = v + aMat.Data[j*aMat.Stride+i] = v + } + } + } + + // Compute Q^T * A * Q and store the result in ans. + tmp := blas64.General{Rows: n, Cols: n, Stride: n, Data: make([]float64, n*n)} + blas64.Gemm(blas.NoTrans, blas.NoTrans, 1, aMat, q, 0, tmp) + ans := blas64.General{Rows: n, Cols: n, Stride: n, Data: make([]float64, n*n)} + blas64.Gemm(blas.Trans, blas.NoTrans, 1, q, tmp, 0, ans) + + // Compare the tridiagonal matrix tri from + // Dorgtr with the explicit computation ans. + if !floats.EqualApprox(ans.Data, tri.Data, 1e-13) { + t.Errorf("Recombination mismatch. n = %v, isUpper = %v", n, uplo == blas.Upper) + } + } + } + } +} diff --git a/vendor/gonum.org/v1/gonum/lapack/testlapack/dorm2r.go b/vendor/gonum.org/v1/gonum/lapack/testlapack/dorm2r.go new file mode 100644 index 0000000..d819f91 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/lapack/testlapack/dorm2r.go @@ -0,0 +1,140 @@ +// Copyright ©2015 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package testlapack + +import ( + "testing" + + "golang.org/x/exp/rand" + + "gonum.org/v1/gonum/blas" + "gonum.org/v1/gonum/blas/blas64" + "gonum.org/v1/gonum/floats" +) + +type Dorm2rer interface { + Dgeqrfer + Dorm2r(side blas.Side, trans blas.Transpose, m, n, k int, a []float64, lda int, tau, c []float64, ldc int, work []float64) +} + +func Dorm2rTest(t *testing.T, impl Dorm2rer) { + rnd := rand.New(rand.NewSource(1)) + for _, side := range []blas.Side{blas.Left, blas.Right} { + for _, trans := range []blas.Transpose{blas.NoTrans, blas.Trans} { + for _, test := range []struct { + common, adim, cdim, lda, ldc int + }{ + {3, 4, 5, 0, 0}, + {3, 5, 4, 0, 0}, + {4, 3, 5, 0, 0}, + {4, 5, 3, 0, 0}, + {5, 3, 4, 0, 0}, + {5, 4, 3, 0, 0}, + {3, 4, 5, 6, 20}, + {3, 5, 4, 6, 20}, + {4, 3, 5, 6, 20}, + {4, 5, 3, 6, 20}, + {5, 3, 4, 6, 20}, + {5, 4, 3, 6, 20}, + {3, 4, 5, 20, 6}, + {3, 5, 4, 20, 6}, + {4, 3, 5, 20, 6}, + {4, 5, 3, 20, 6}, + {5, 3, 4, 20, 6}, + {5, 4, 3, 20, 6}, + } { + var ma, na, mc, nc int + if side == blas.Left { + ma = test.common + na = test.adim + mc = test.common + nc = test.cdim + } else { + ma = test.common + na = test.adim + mc = test.cdim + nc = test.common + } + + // Generate a random matrix + lda := test.lda + if lda == 0 { + lda = na + } + a := make([]float64, ma*lda) + for i := range a { + a[i] = rnd.Float64() + } + ldc := test.ldc + if ldc == 0 { + ldc = nc + } + // Compute random C matrix + c := make([]float64, mc*ldc) + for i := range c { + c[i] = rnd.Float64() + } + + // Compute QR + k := min(ma, na) + tau := make([]float64, k) + work := make([]float64, 1) + impl.Dgeqrf(ma, na, a, lda, tau, work, -1) + work = make([]float64, int(work[0])) + impl.Dgeqrf(ma, na, a, lda, tau, work, len(work)) + + // Build Q from result + q := constructQ("QR", ma, na, a, lda, tau) + + cMat := blas64.General{ + Rows: mc, + Cols: nc, + Stride: ldc, + Data: make([]float64, len(c)), + } + copy(cMat.Data, c) + cMatCopy := blas64.General{ + Rows: cMat.Rows, + Cols: cMat.Cols, + Stride: cMat.Stride, + Data: make([]float64, len(cMat.Data)), + } + copy(cMatCopy.Data, cMat.Data) + switch { + default: + panic("bad test") + case side == blas.Left && trans == blas.NoTrans: + blas64.Gemm(blas.NoTrans, blas.NoTrans, 1, q, cMatCopy, 0, cMat) + case side == blas.Left && trans == blas.Trans: + blas64.Gemm(blas.Trans, blas.NoTrans, 1, q, cMatCopy, 0, cMat) + case side == blas.Right && trans == blas.NoTrans: + blas64.Gemm(blas.NoTrans, blas.NoTrans, 1, cMatCopy, q, 0, cMat) + case side == blas.Right && trans == blas.Trans: + blas64.Gemm(blas.NoTrans, blas.Trans, 1, cMatCopy, q, 0, cMat) + } + // Do Dorm2r ard compare + if side == blas.Left { + work = make([]float64, nc) + } else { + work = make([]float64, mc) + } + aCopy := make([]float64, len(a)) + copy(aCopy, a) + tauCopy := make([]float64, len(tau)) + copy(tauCopy, tau) + impl.Dorm2r(side, trans, mc, nc, k, a, lda, tau, c, ldc, work) + if !floats.Equal(a, aCopy) { + t.Errorf("a changed in call") + } + if !floats.Equal(tau, tauCopy) { + t.Errorf("tau changed in call") + } + if !floats.EqualApprox(cMat.Data, c, 1e-14) { + t.Errorf("Multiplication mismatch.\n Want %v \n got %v.", cMat.Data, c) + } + } + } + } +} diff --git a/vendor/gonum.org/v1/gonum/lapack/testlapack/dormbr.go b/vendor/gonum.org/v1/gonum/lapack/testlapack/dormbr.go new file mode 100644 index 0000000..f3baed9 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/lapack/testlapack/dormbr.go @@ -0,0 +1,165 @@ +// Copyright ©2015 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package testlapack + +import ( + "testing" + + "golang.org/x/exp/rand" + + "gonum.org/v1/gonum/blas" + "gonum.org/v1/gonum/blas/blas64" + "gonum.org/v1/gonum/floats" + "gonum.org/v1/gonum/lapack" +) + +type Dormbrer interface { + Dormbr(vect lapack.ApplyOrtho, side blas.Side, trans blas.Transpose, m, n, k int, a []float64, lda int, tau, c []float64, ldc int, work []float64, lwork int) + Dgebrder +} + +func DormbrTest(t *testing.T, impl Dormbrer) { + rnd := rand.New(rand.NewSource(1)) + bi := blas64.Implementation() + for _, vect := range []lapack.ApplyOrtho{lapack.ApplyQ, lapack.ApplyP} { + for _, side := range []blas.Side{blas.Left, blas.Right} { + for _, trans := range []blas.Transpose{blas.NoTrans, blas.Trans} { + for _, wl := range []worklen{minimumWork, mediumWork, optimumWork} { + for _, test := range []struct { + m, n, k, lda, ldc int + }{ + {3, 4, 5, 0, 0}, + {3, 5, 4, 0, 0}, + {4, 3, 5, 0, 0}, + {4, 5, 3, 0, 0}, + {5, 3, 4, 0, 0}, + {5, 4, 3, 0, 0}, + + {3, 4, 5, 10, 12}, + {3, 5, 4, 10, 12}, + {4, 3, 5, 10, 12}, + {4, 5, 3, 10, 12}, + {5, 3, 4, 10, 12}, + {5, 4, 3, 10, 12}, + + {150, 140, 130, 0, 0}, + } { + m := test.m + n := test.n + k := test.k + ldc := test.ldc + if ldc == 0 { + ldc = n + } + nq := n + nw := m + if side == blas.Left { + nq = m + nw = n + } + + // Compute a decomposition. + var ma, na int + var a []float64 + if vect == lapack.ApplyQ { + ma = nq + na = k + } else { + ma = k + na = nq + } + lda := test.lda + if lda == 0 { + lda = na + } + a = make([]float64, ma*lda) + for i := range a { + a[i] = rnd.NormFloat64() + } + nTau := min(nq, k) + tauP := make([]float64, nTau) + tauQ := make([]float64, nTau) + d := make([]float64, nTau) + e := make([]float64, nTau) + + work := make([]float64, 1) + impl.Dgebrd(ma, na, a, lda, d, e, tauQ, tauP, work, -1) + work = make([]float64, int(work[0])) + impl.Dgebrd(ma, na, a, lda, d, e, tauQ, tauP, work, len(work)) + + // Apply and compare update. + c := make([]float64, m*ldc) + for i := range c { + c[i] = rnd.NormFloat64() + } + cCopy := make([]float64, len(c)) + copy(cCopy, c) + + var lwork int + switch wl { + case minimumWork: + lwork = nw + case optimumWork: + impl.Dormbr(vect, side, trans, m, n, k, a, lda, tauQ, c, ldc, work, -1) + lwork = int(work[0]) + case mediumWork: + work := make([]float64, 1) + impl.Dormbr(vect, side, trans, m, n, k, a, lda, tauQ, c, ldc, work, -1) + lwork = (int(work[0]) + nw) / 2 + } + lwork = max(1, lwork) + work = make([]float64, lwork) + + if vect == lapack.ApplyQ { + impl.Dormbr(vect, side, trans, m, n, k, a, lda, tauQ, c, ldc, work, lwork) + } else { + impl.Dormbr(vect, side, trans, m, n, k, a, lda, tauP, c, ldc, work, lwork) + } + + // Check that the multiplication was correct. + cOrig := blas64.General{ + Rows: m, + Cols: n, + Stride: ldc, + Data: make([]float64, len(cCopy)), + } + copy(cOrig.Data, cCopy) + cAns := blas64.General{ + Rows: m, + Cols: n, + Stride: ldc, + Data: make([]float64, len(cCopy)), + } + copy(cAns.Data, cCopy) + nb := min(ma, na) + var mulMat blas64.General + if vect == lapack.ApplyQ { + mulMat = constructQPBidiagonal(lapack.ApplyQ, ma, na, nb, a, lda, tauQ) + } else { + mulMat = constructQPBidiagonal(lapack.ApplyP, ma, na, nb, a, lda, tauP) + } + + mulTrans := trans + + if side == blas.Left { + bi.Dgemm(mulTrans, blas.NoTrans, m, n, m, 1, mulMat.Data, mulMat.Stride, cOrig.Data, cOrig.Stride, 0, cAns.Data, cAns.Stride) + } else { + bi.Dgemm(blas.NoTrans, mulTrans, m, n, n, 1, cOrig.Data, cOrig.Stride, mulMat.Data, mulMat.Stride, 0, cAns.Data, cAns.Stride) + } + + if !floats.EqualApprox(cAns.Data, c, 1e-13) { + isApplyQ := vect == lapack.ApplyQ + isLeft := side == blas.Left + isTrans := trans == blas.Trans + + t.Errorf("C mismatch. isApplyQ: %v, isLeft: %v, isTrans: %v, m = %v, n = %v, k = %v, lda = %v, ldc = %v", + isApplyQ, isLeft, isTrans, m, n, k, lda, ldc) + } + } + } + } + } + } +} diff --git a/vendor/gonum.org/v1/gonum/lapack/testlapack/dormhr.go b/vendor/gonum.org/v1/gonum/lapack/testlapack/dormhr.go new file mode 100644 index 0000000..af13d21 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/lapack/testlapack/dormhr.go @@ -0,0 +1,133 @@ +// Copyright ©2016 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package testlapack + +import ( + "fmt" + "math" + "testing" + + "golang.org/x/exp/rand" + + "gonum.org/v1/gonum/blas" + "gonum.org/v1/gonum/blas/blas64" +) + +type Dormhrer interface { + Dormhr(side blas.Side, trans blas.Transpose, m, n, ilo, ihi int, a []float64, lda int, tau, c []float64, ldc int, work []float64, lwork int) + + Dgehrder +} + +func DormhrTest(t *testing.T, impl Dormhrer) { + rnd := rand.New(rand.NewSource(1)) + + for _, side := range []blas.Side{blas.Left, blas.Right} { + for _, trans := range []blas.Transpose{blas.NoTrans, blas.Trans} { + for _, m := range []int{1, 2, 3, 4, 5, 8, 9, 10, 23} { + for _, n := range []int{1, 2, 3, 4, 5, 8, 9, 10, 23} { + for _, extra := range []int{0, 1, 13} { + for cas := 0; cas < 10; cas++ { + nq := m + if side == blas.Right { + nq = n + } + ilo := rnd.Intn(nq) + ihi := rnd.Intn(nq) + if ilo > ihi { + ilo, ihi = ihi, ilo + } + testDormhr(t, impl, side, trans, m, n, ilo, ihi, extra, true, rnd) + testDormhr(t, impl, side, trans, m, n, ilo, ihi, extra, false, rnd) + } + } + } + } + } + } + for _, side := range []blas.Side{blas.Left, blas.Right} { + for _, trans := range []blas.Transpose{blas.NoTrans, blas.Trans} { + testDormhr(t, impl, side, trans, 0, 0, 0, -1, 0, true, rnd) + testDormhr(t, impl, side, trans, 0, 0, 0, -1, 0, false, rnd) + } + } +} + +func testDormhr(t *testing.T, impl Dormhrer, side blas.Side, trans blas.Transpose, m, n, ilo, ihi, extra int, optwork bool, rnd *rand.Rand) { + const tol = 1e-14 + + var nq, nw int + switch side { + case blas.Left: + nq = m + nw = n + case blas.Right: + nq = n + nw = m + } + + // Compute the elementary reflectors and tau. + a := randomGeneral(nq, nq, nq+extra, rnd) + var tau []float64 + if nq > 1 { + tau = nanSlice(nq - 1) + } + work := nanSlice(max(1, nq)) // Minimum work for Dgehrd. + impl.Dgehrd(nq, ilo, ihi, a.Data, a.Stride, tau, work, len(work)) + + // Construct Q from the elementary reflectors in a and from tau. + q := eye(nq, nq) + qCopy := eye(nq, nq) + for j := ilo; j < ihi; j++ { + h := eye(nq, nq) + v := blas64.Vector{ + Inc: 1, + Data: make([]float64, nq), + } + v.Data[j+1] = 1 + for i := j + 2; i < ihi+1; i++ { + v.Data[i] = a.Data[i*a.Stride+j] + } + blas64.Ger(-tau[j], v, v, h) + copy(qCopy.Data, q.Data) + blas64.Gemm(blas.NoTrans, blas.NoTrans, 1, qCopy, h, 0, q) + } + + c := randomGeneral(m, n, n+extra, rnd) + + // Compute the product of Q and C explicitly. + qc := randomGeneral(m, n, n+extra, rnd) + if side == blas.Left { + blas64.Gemm(trans, blas.NoTrans, 1, q, c, 0, qc) + } else { + blas64.Gemm(blas.NoTrans, trans, 1, c, q, 0, qc) + } + + // Compute the product of Q and C using Dormhr. + if optwork { + work = nanSlice(1) + impl.Dormhr(side, trans, m, n, ilo, ihi, nil, a.Stride, nil, nil, c.Stride, work, -1) + work = nanSlice(int(work[0])) + } else { + work = nanSlice(max(1, nw)) + } + impl.Dormhr(side, trans, m, n, ilo, ihi, a.Data, a.Stride, tau, c.Data, c.Stride, work, len(work)) + + // Compare the two answers. + prefix := fmt.Sprintf("Case side=%v, trans=%v, m=%v, n=%v, ilo=%v, ihi=%v, extra=%v, optwork=%v", + side, trans, m, n, ilo, ihi, extra, optwork) + if !generalOutsideAllNaN(c) { + t.Errorf("%v: out-of-range write to C\n%v", prefix, c.Data) + } + for i := 0; i < m; i++ { + for j := 0; j < n; j++ { + cij := c.Data[i*c.Stride+j] + qcij := qc.Data[i*qc.Stride+j] + if math.Abs(cij-qcij) > tol { + t.Errorf("%v: unexpected value of the QC product at [%v,%v]: want %v, got %v", prefix, i, j, qcij, cij) + } + } + } +} diff --git a/vendor/gonum.org/v1/gonum/lapack/testlapack/dorml2.go b/vendor/gonum.org/v1/gonum/lapack/testlapack/dorml2.go new file mode 100644 index 0000000..17fc112 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/lapack/testlapack/dorml2.go @@ -0,0 +1,145 @@ +// Copyright ©2015 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package testlapack + +import ( + "testing" + + "golang.org/x/exp/rand" + + "gonum.org/v1/gonum/blas" + "gonum.org/v1/gonum/blas/blas64" + "gonum.org/v1/gonum/floats" +) + +type Dorml2er interface { + Dgelqfer + Dorml2(side blas.Side, trans blas.Transpose, m, n, k int, a []float64, lda int, tau, c []float64, ldc int, work []float64) +} + +func Dorml2Test(t *testing.T, impl Dorml2er) { + rnd := rand.New(rand.NewSource(1)) + // TODO(btracey): This test is not complete, because it + // doesn't test individual values of m, n, and k, instead only testing + // a specific subset of possible k values. + for _, side := range []blas.Side{blas.Left, blas.Right} { + for _, trans := range []blas.Transpose{blas.NoTrans, blas.Trans} { + for _, test := range []struct { + common, adim, cdim, lda, ldc int + }{ + {3, 4, 5, 0, 0}, + {3, 5, 4, 0, 0}, + {4, 3, 5, 0, 0}, + {4, 5, 3, 0, 0}, + {5, 3, 4, 0, 0}, + {5, 4, 3, 0, 0}, + + {3, 4, 5, 6, 20}, + {3, 5, 4, 6, 20}, + {4, 3, 5, 6, 20}, + {4, 5, 3, 6, 20}, + {5, 3, 4, 6, 20}, + {5, 4, 3, 6, 20}, + {3, 4, 5, 20, 6}, + {3, 5, 4, 20, 6}, + {4, 3, 5, 20, 6}, + {4, 5, 3, 20, 6}, + {5, 3, 4, 20, 6}, + {5, 4, 3, 20, 6}, + } { + var ma, na, mc, nc int + if side == blas.Left { + ma = test.adim + na = test.common + mc = test.common + nc = test.cdim + } else { + ma = test.adim + na = test.common + mc = test.cdim + nc = test.common + } + // Generate a random matrix + lda := test.lda + if lda == 0 { + lda = na + } + a := make([]float64, ma*lda) + for i := range a { + a[i] = rnd.Float64() + } + ldc := test.ldc + if ldc == 0 { + ldc = nc + } + // Compute random C matrix + c := make([]float64, mc*ldc) + for i := range c { + c[i] = rnd.Float64() + } + + // Compute LQ + k := min(ma, na) + tau := make([]float64, k) + work := make([]float64, 1) + impl.Dgelqf(ma, na, a, lda, tau, work, -1) + work = make([]float64, int(work[0])) + impl.Dgelqf(ma, na, a, lda, tau, work, len(work)) + + // Build Q from result + q := constructQ("LQ", ma, na, a, lda, tau) + + cMat := blas64.General{ + Rows: mc, + Cols: nc, + Stride: ldc, + Data: make([]float64, len(c)), + } + copy(cMat.Data, c) + cMatCopy := blas64.General{ + Rows: cMat.Rows, + Cols: cMat.Cols, + Stride: cMat.Stride, + Data: make([]float64, len(cMat.Data)), + } + copy(cMatCopy.Data, cMat.Data) + switch { + default: + panic("bad test") + case side == blas.Left && trans == blas.NoTrans: + blas64.Gemm(blas.NoTrans, blas.NoTrans, 1, q, cMatCopy, 0, cMat) + case side == blas.Left && trans == blas.Trans: + blas64.Gemm(blas.Trans, blas.NoTrans, 1, q, cMatCopy, 0, cMat) + case side == blas.Right && trans == blas.NoTrans: + blas64.Gemm(blas.NoTrans, blas.NoTrans, 1, cMatCopy, q, 0, cMat) + case side == blas.Right && trans == blas.Trans: + blas64.Gemm(blas.NoTrans, blas.Trans, 1, cMatCopy, q, 0, cMat) + } + // Do Dorm2r ard compare + if side == blas.Left { + work = make([]float64, nc) + } else { + work = make([]float64, mc) + } + aCopy := make([]float64, len(a)) + copy(aCopy, a) + tauCopy := make([]float64, len(tau)) + copy(tauCopy, tau) + impl.Dorml2(side, trans, mc, nc, k, a, lda, tau, c, ldc, work) + if !floats.Equal(a, aCopy) { + t.Errorf("a changed in call") + } + if !floats.Equal(tau, tauCopy) { + t.Errorf("tau changed in call") + } + if !floats.EqualApprox(cMat.Data, c, 1e-14) { + isLeft := side == blas.Left + isTrans := trans == blas.Trans + t.Errorf("Multiplication mismatch. IsLeft = %v. IsTrans = %v", isLeft, isTrans) + } + } + } + } +} diff --git a/vendor/gonum.org/v1/gonum/lapack/testlapack/dormlq.go b/vendor/gonum.org/v1/gonum/lapack/testlapack/dormlq.go new file mode 100644 index 0000000..5a84c6f --- /dev/null +++ b/vendor/gonum.org/v1/gonum/lapack/testlapack/dormlq.go @@ -0,0 +1,131 @@ +// Copyright ©2015 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package testlapack + +import ( + "testing" + + "golang.org/x/exp/rand" + + "gonum.org/v1/gonum/blas" + "gonum.org/v1/gonum/floats" +) + +type Dormlqer interface { + Dorml2er + Dormlq(side blas.Side, trans blas.Transpose, m, n, k int, a []float64, lda int, tau, c []float64, ldc int, work []float64, lwork int) +} + +func DormlqTest(t *testing.T, impl Dormlqer) { + rnd := rand.New(rand.NewSource(1)) + for _, side := range []blas.Side{blas.Left, blas.Right} { + for _, trans := range []blas.Transpose{blas.NoTrans, blas.Trans} { + for _, wl := range []worklen{minimumWork, mediumWork, optimumWork} { + for _, test := range []struct { + common, adim, cdim, lda, ldc int + }{ + {0, 0, 0, 0, 0}, + {6, 7, 8, 0, 0}, + {6, 8, 7, 0, 0}, + {7, 6, 8, 0, 0}, + {7, 8, 6, 0, 0}, + {8, 6, 7, 0, 0}, + {8, 7, 6, 0, 0}, + {100, 200, 300, 0, 0}, + {100, 300, 200, 0, 0}, + {200, 100, 300, 0, 0}, + {200, 300, 100, 0, 0}, + {300, 100, 200, 0, 0}, + {300, 200, 100, 0, 0}, + {100, 200, 300, 400, 500}, + {100, 300, 200, 400, 500}, + {200, 100, 300, 400, 500}, + {200, 300, 100, 400, 500}, + {300, 100, 200, 400, 500}, + {300, 200, 100, 400, 500}, + {100, 200, 300, 500, 400}, + {100, 300, 200, 500, 400}, + {200, 100, 300, 500, 400}, + {200, 300, 100, 500, 400}, + {300, 100, 200, 500, 400}, + {300, 200, 100, 500, 400}, + } { + var ma, na, mc, nc int + if side == blas.Left { + ma = test.adim + na = test.common + mc = test.common + nc = test.cdim + } else { + ma = test.adim + na = test.common + mc = test.cdim + nc = test.common + } + // Generate a random matrix + lda := test.lda + if lda == 0 { + lda = max(1, na) + } + a := make([]float64, ma*lda) + for i := range a { + a[i] = rnd.Float64() + } + // Compute random C matrix + ldc := test.ldc + if ldc == 0 { + ldc = nc + } + c := make([]float64, mc*ldc) + for i := range c { + c[i] = rnd.Float64() + } + + // Compute LQ + k := min(ma, na) + tau := make([]float64, k) + work := make([]float64, 1) + impl.Dgelqf(ma, na, a, lda, tau, work, -1) + work = make([]float64, int(work[0])) + impl.Dgelqf(ma, na, a, lda, tau, work, len(work)) + + cCopy := make([]float64, len(c)) + copy(cCopy, c) + ans := make([]float64, len(c)) + copy(ans, cCopy) + + var nw int + if side == blas.Left { + nw = nc + } else { + nw = mc + } + work = make([]float64, max(1, nw)) + impl.Dorml2(side, trans, mc, nc, k, a, lda, tau, ans, ldc, work) + + var lwork int + switch wl { + case minimumWork: + lwork = nw + case optimumWork: + impl.Dormlq(side, trans, mc, nc, k, a, lda, tau, c, ldc, work, -1) + lwork = int(work[0]) + case mediumWork: + work := make([]float64, 1) + impl.Dormlq(side, trans, mc, nc, k, a, lda, tau, c, ldc, work, -1) + lwork = (int(work[0]) + nw) / 2 + } + lwork = max(1, lwork) + work = make([]float64, lwork) + + impl.Dormlq(side, trans, mc, nc, k, a, lda, tau, c, ldc, work, lwork) + if !floats.EqualApprox(c, ans, 1e-13) { + t.Errorf("Dormqr and Dorm2r results mismatch") + } + } + } + } + } +} diff --git a/vendor/gonum.org/v1/gonum/lapack/testlapack/dormqr.go b/vendor/gonum.org/v1/gonum/lapack/testlapack/dormqr.go new file mode 100644 index 0000000..b3f03d6 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/lapack/testlapack/dormqr.go @@ -0,0 +1,155 @@ +// Copyright ©2015 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package testlapack + +import ( + "fmt" + "testing" + + "golang.org/x/exp/rand" + + "gonum.org/v1/gonum/blas" + "gonum.org/v1/gonum/floats" +) + +type Dormqrer interface { + Dorm2rer + Dormqr(side blas.Side, trans blas.Transpose, m, n, k int, a []float64, lda int, tau, c []float64, ldc int, work []float64, lwork int) +} + +func DormqrTest(t *testing.T, impl Dormqrer) { + rnd := rand.New(rand.NewSource(1)) + for _, side := range []blas.Side{blas.Left, blas.Right} { + for _, trans := range []blas.Transpose{blas.NoTrans, blas.Trans} { + for _, test := range []struct { + common, adim, cdim, lda, ldc int + }{ + {6, 7, 8, 0, 0}, + {6, 8, 7, 0, 0}, + {7, 6, 8, 0, 0}, + {7, 8, 6, 0, 0}, + {8, 6, 7, 0, 0}, + {8, 7, 6, 0, 0}, + {100, 200, 300, 0, 0}, + {100, 300, 200, 0, 0}, + {200, 100, 300, 0, 0}, + {200, 300, 100, 0, 0}, + {300, 100, 200, 0, 0}, + {300, 200, 100, 0, 0}, + {100, 200, 300, 400, 500}, + {100, 300, 200, 400, 500}, + {200, 100, 300, 400, 500}, + {200, 300, 100, 400, 500}, + {300, 100, 200, 400, 500}, + {300, 200, 100, 400, 500}, + {100, 200, 300, 500, 400}, + {100, 300, 200, 500, 400}, + {200, 100, 300, 500, 400}, + {200, 300, 100, 500, 400}, + {300, 100, 200, 500, 400}, + {300, 200, 100, 500, 400}, + } { + var ma, na, mc, nc int + if side == blas.Left { + ma = test.common + na = test.adim + mc = test.common + nc = test.cdim + } else { + ma = test.common + na = test.adim + mc = test.cdim + nc = test.common + } + // Generate a random matrix + lda := test.lda + if lda == 0 { + lda = na + } + a := make([]float64, ma*lda) + for i := range a { + a[i] = rnd.Float64() + } + // Compute random C matrix + ldc := test.ldc + if ldc == 0 { + ldc = nc + } + c := make([]float64, mc*ldc) + for i := range c { + c[i] = rnd.Float64() + } + + // Compute QR + k := min(ma, na) + tau := make([]float64, k) + work := make([]float64, 1) + impl.Dgeqrf(ma, na, a, lda, tau, work, -1) + work = make([]float64, int(work[0])) + impl.Dgeqrf(ma, na, a, lda, tau, work, len(work)) + + cCopy := make([]float64, len(c)) + copy(cCopy, c) + ans := make([]float64, len(c)) + copy(ans, cCopy) + + if side == blas.Left { + work = make([]float64, nc) + } else { + work = make([]float64, mc) + } + impl.Dorm2r(side, trans, mc, nc, k, a, lda, tau, ans, ldc, work) + + // Make sure Dorm2r and Dormqr match with small work + for i := range work { + work[i] = rnd.Float64() + } + copy(c, cCopy) + impl.Dormqr(side, trans, mc, nc, k, a, lda, tau, c, ldc, work, len(work)) + if !floats.EqualApprox(c, ans, 1e-12) { + t.Errorf("Dormqr and Dorm2r mismatch for small work") + } + + // Try with the optimum amount of work + copy(c, cCopy) + impl.Dormqr(side, trans, mc, nc, k, a, lda, tau, c, ldc, work, -1) + work = make([]float64, int(work[0])) + for i := range work { + work[i] = rnd.Float64() + } + impl.Dormqr(side, trans, mc, nc, k, a, lda, tau, c, ldc, work, len(work)) + if !floats.EqualApprox(c, ans, 1e-12) { + t.Errorf("Dormqr and Dorm2r mismatch for full work") + fmt.Println("ccopy") + for i := 0; i < mc; i++ { + fmt.Println(cCopy[i*ldc : (i+1)*ldc]) + } + fmt.Println("ans =") + for i := 0; i < mc; i++ { + fmt.Println(ans[i*ldc : (i+1)*ldc]) + } + fmt.Println("c =") + for i := 0; i < mc; i++ { + fmt.Println(c[i*ldc : (i+1)*ldc]) + } + } + + // Try with amount of work that is less than + // optimal but still long enough to use the + // blocked code. + copy(c, cCopy) + if side == blas.Left { + work = make([]float64, 3*nc) + } else { + work = make([]float64, 3*mc) + } + impl.Dormqr(side, trans, mc, nc, k, a, lda, tau, c, ldc, work, len(work)) + if !floats.EqualApprox(c, ans, 1e-12) { + t.Errorf("Dormqr and Dorm2r mismatch for medium work") + } + } + } + } +} diff --git a/vendor/gonum.org/v1/gonum/lapack/testlapack/dormr2.go b/vendor/gonum.org/v1/gonum/lapack/testlapack/dormr2.go new file mode 100644 index 0000000..a05b0a1 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/lapack/testlapack/dormr2.go @@ -0,0 +1,138 @@ +// Copyright ©2015 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package testlapack + +import ( + "testing" + + "golang.org/x/exp/rand" + + "gonum.org/v1/gonum/blas" + "gonum.org/v1/gonum/blas/blas64" + "gonum.org/v1/gonum/floats" +) + +type Dormr2er interface { + Dgerqf(m, n int, a []float64, lda int, tau, work []float64, lwork int) + Dormr2(side blas.Side, trans blas.Transpose, m, n, k int, a []float64, lda int, tau, c []float64, ldc int, work []float64) +} + +func Dormr2Test(t *testing.T, impl Dormr2er) { + rnd := rand.New(rand.NewSource(1)) + for _, side := range []blas.Side{blas.Left, blas.Right} { + for _, trans := range []blas.Transpose{blas.NoTrans, blas.Trans} { + for _, test := range []struct { + common, adim, cdim, lda, ldc int + }{ + {3, 4, 5, 0, 0}, + {3, 5, 4, 0, 0}, + {4, 3, 5, 0, 0}, + {4, 5, 3, 0, 0}, + {5, 3, 4, 0, 0}, + {5, 4, 3, 0, 0}, + {3, 4, 5, 6, 20}, + {3, 5, 4, 6, 20}, + {4, 3, 5, 6, 20}, + {4, 5, 3, 6, 20}, + {5, 3, 4, 6, 20}, + {5, 4, 3, 6, 20}, + {3, 4, 5, 20, 6}, + {3, 5, 4, 20, 6}, + {4, 3, 5, 20, 6}, + {4, 5, 3, 20, 6}, + {5, 3, 4, 20, 6}, + {5, 4, 3, 20, 6}, + } { + ma := test.adim + na := test.common + var mc, nc int + if side == blas.Left { + mc = test.common + nc = test.cdim + } else { + mc = test.cdim + nc = test.common + } + + // Generate a random matrix + lda := test.lda + if lda == 0 { + lda = na + } + a := make([]float64, ma*lda) + for i := range a { + a[i] = rnd.Float64() + } + ldc := test.ldc + if ldc == 0 { + ldc = nc + } + // Compute random C matrix + c := make([]float64, mc*ldc) + for i := range c { + c[i] = rnd.Float64() + } + + // Compute RQ + k := min(ma, na) + tau := make([]float64, k) + work := make([]float64, 1) + impl.Dgerqf(ma, na, a, lda, tau, work, -1) + work = make([]float64, int(work[0])) + impl.Dgerqf(ma, na, a, lda, tau, work, len(work)) + + // Build Q from result + q := constructQ("RQ", ma, na, a, lda, tau) + + cMat := blas64.General{ + Rows: mc, + Cols: nc, + Stride: ldc, + Data: make([]float64, len(c)), + } + copy(cMat.Data, c) + cMatCopy := blas64.General{ + Rows: cMat.Rows, + Cols: cMat.Cols, + Stride: cMat.Stride, + Data: make([]float64, len(cMat.Data)), + } + copy(cMatCopy.Data, cMat.Data) + switch { + default: + panic("bad test") + case side == blas.Left && trans == blas.NoTrans: + blas64.Gemm(blas.NoTrans, blas.NoTrans, 1, q, cMatCopy, 0, cMat) + case side == blas.Left && trans == blas.Trans: + blas64.Gemm(blas.Trans, blas.NoTrans, 1, q, cMatCopy, 0, cMat) + case side == blas.Right && trans == blas.NoTrans: + blas64.Gemm(blas.NoTrans, blas.NoTrans, 1, cMatCopy, q, 0, cMat) + case side == blas.Right && trans == blas.Trans: + blas64.Gemm(blas.NoTrans, blas.Trans, 1, cMatCopy, q, 0, cMat) + } + // Do Dorm2r ard compare + if side == blas.Left { + work = make([]float64, nc) + } else { + work = make([]float64, mc) + } + aCopy := make([]float64, len(a)) + copy(aCopy, a) + tauCopy := make([]float64, len(tau)) + copy(tauCopy, tau) + impl.Dormr2(side, trans, mc, nc, k, a[(ma-k)*lda:], lda, tau, c, ldc, work) + if !floats.Equal(a, aCopy) { + t.Errorf("a changed in call") + } + if !floats.Equal(tau, tauCopy) { + t.Errorf("tau changed in call") + } + if !floats.EqualApprox(cMat.Data, c, 1e-14) { + t.Errorf("Multiplication mismatch.\n Want %v \n got %v.", cMat.Data, c) + } + } + } + } +} diff --git a/vendor/gonum.org/v1/gonum/lapack/testlapack/dpbtf2.go b/vendor/gonum.org/v1/gonum/lapack/testlapack/dpbtf2.go new file mode 100644 index 0000000..003fcab --- /dev/null +++ b/vendor/gonum.org/v1/gonum/lapack/testlapack/dpbtf2.go @@ -0,0 +1,52 @@ +// Copyright ©2017 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package testlapack + +import ( + "testing" + + "golang.org/x/exp/rand" + + "gonum.org/v1/gonum/blas" +) + +type Dpbtf2er interface { + Dpbtf2(ul blas.Uplo, n, kd int, ab []float64, ldab int) (ok bool) + Dpotrfer +} + +func Dpbtf2Test(t *testing.T, impl Dpbtf2er) { + // Test random symmetric banded matrices against the full version. + rnd := rand.New(rand.NewSource(1)) + + for _, n := range []int{5, 10, 20} { + for _, kb := range []int{0, 1, 3, n - 1} { + for _, ldoff := range []int{0, 4} { + for _, ul := range []blas.Uplo{blas.Upper, blas.Lower} { + ldab := kb + 1 + ldoff + sym, band := randSymBand(ul, n, ldab, kb, rnd) + + // Compute the Cholesky decomposition of the symmetric matrix. + ok := impl.Dpotrf(ul, sym.N, sym.Data, sym.Stride) + if !ok { + panic("bad test: symmetric cholesky decomp failed") + } + + // Compute the Cholesky decomposition of the banded matrix. + ok = impl.Dpbtf2(band.Uplo, band.N, band.K, band.Data, band.Stride) + if !ok { + t.Errorf("SymBand cholesky decomp failed") + } + + // Compare the result to the Symmetric decomposition. + sb := symBandToSym(ul, band.Data, n, kb, ldab) + if !equalApproxSymmetric(sym, sb, 1e-10) { + t.Errorf("chol mismatch banded and sym. n = %v, kb = %v, ldoff = %v", n, kb, ldoff) + } + } + } + } + } +} diff --git a/vendor/gonum.org/v1/gonum/lapack/testlapack/dpocon.go b/vendor/gonum.org/v1/gonum/lapack/testlapack/dpocon.go new file mode 100644 index 0000000..7665cd2 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/lapack/testlapack/dpocon.go @@ -0,0 +1,158 @@ +// Copyright ©2015 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package testlapack + +import ( + "log" + "testing" + + "golang.org/x/exp/rand" + + "gonum.org/v1/gonum/blas" + "gonum.org/v1/gonum/blas/blas64" + "gonum.org/v1/gonum/floats" + "gonum.org/v1/gonum/lapack" +) + +type Dpoconer interface { + Dpotrfer + Dgeconer + Dlansy(norm lapack.MatrixNorm, uplo blas.Uplo, n int, a []float64, lda int, work []float64) float64 + Dpocon(uplo blas.Uplo, n int, a []float64, lda int, anorm float64, work []float64, iwork []int) float64 +} + +func DpoconTest(t *testing.T, impl Dpoconer) { + for _, test := range []struct { + a []float64 + n int + cond float64 + uplo blas.Uplo + }{ + { + a: []float64{ + 89, 59, 77, + 0, 107, 59, + 0, 0, 89, + }, + uplo: blas.Upper, + n: 3, + cond: 0.050052137643379, + }, + { + a: []float64{ + 89, 0, 0, + 59, 107, 0, + 77, 59, 89, + }, + uplo: blas.Lower, + n: 3, + cond: 0.050052137643379, + }, + // Dgecon does not match Dpocon for this case. https://github.com/xianyi/OpenBLAS/issues/664. + { + a: []float64{ + 2.9995576045549965, -2.0898894566158663, 3.965560740124006, + 0, 1.9634729526261008, -2.8681002706874104, + 0, 0, 5.502416670471008, + }, + uplo: blas.Upper, + n: 3, + cond: 0.024054837369015203, + }, + } { + n := test.n + a := make([]float64, len(test.a)) + copy(a, test.a) + lda := n + uplo := test.uplo + work := make([]float64, 3*n) + anorm := impl.Dlansy(lapack.MaxColumnSum, uplo, n, a, lda, work) + // Compute cholesky decomposition + ok := impl.Dpotrf(uplo, n, a, lda) + if !ok { + t.Errorf("Bad test, matrix not positive definite") + continue + } + iwork := make([]int, n) + cond := impl.Dpocon(uplo, n, a, lda, anorm, work, iwork) + // Error if not the same order, otherwise log the difference. + if !floats.EqualWithinAbsOrRel(cond, test.cond, 1e0, 1e0) { + t.Errorf("Cond mismatch. Want %v, got %v.", test.cond, cond) + } else if !floats.EqualWithinAbsOrRel(cond, test.cond, 1e-14, 1e-14) { + log.Printf("Dpocon cond mismatch. Want %v, got %v.", test.cond, cond) + } + } + rnd := rand.New(rand.NewSource(1)) + bi := blas64.Implementation() + // Randomized tests compared against Dgecon. + for _, uplo := range []blas.Uplo{blas.Lower, blas.Upper} { + for _, test := range []struct { + n, lda int + }{ + {3, 0}, + {3, 5}, + } { + for trial := 0; trial < 100; trial++ { + n := test.n + lda := test.lda + if lda == 0 { + lda = n + } + a := make([]float64, n*lda) + for i := range a { + a[i] = rnd.NormFloat64() + } + + // Multiply a by itself to make it symmetric positive definite. + aCopy := make([]float64, len(a)) + copy(aCopy, a) + bi.Dgemm(blas.Trans, blas.NoTrans, n, n, n, 1, aCopy, lda, aCopy, lda, 0, a, lda) + + aDat := make([]float64, len(aCopy)) + copy(aDat, a) + + aDense := make([]float64, len(a)) + if uplo == blas.Upper { + for i := 0; i < n; i++ { + for j := i; j < n; j++ { + v := a[i*lda+j] + aDense[i*lda+j] = v + aDense[j*lda+i] = v + } + } + } else { + for i := 0; i < n; i++ { + for j := 0; j <= i; j++ { + v := a[i*lda+j] + aDense[i*lda+j] = v + aDense[j*lda+i] = v + } + } + } + work := make([]float64, 4*n) + iwork := make([]int, n) + + anorm := impl.Dlansy(lapack.MaxColumnSum, uplo, n, a, lda, work) + ok := impl.Dpotrf(uplo, n, a, lda) + if !ok { + t.Errorf("Bad test, matrix not positive definite") + continue + } + got := impl.Dpocon(uplo, n, a, lda, anorm, work, iwork) + + denseNorm := impl.Dlange(lapack.MaxColumnSum, n, n, aDense, lda, work) + ipiv := make([]int, n) + impl.Dgetrf(n, n, aDense, lda, ipiv) + want := impl.Dgecon(lapack.MaxColumnSum, n, aDense, lda, denseNorm, work, iwork) + // Error if not the same order, otherwise log the difference. + if !floats.EqualWithinAbsOrRel(want, got, 1e0, 1e0) { + t.Errorf("Dpocon and Dgecon mismatch. Dpocon %v, Dgecon %v.", got, want) + } else if !floats.EqualWithinAbsOrRel(want, got, 1e-14, 1e-14) { + log.Printf("Dpocon and Dgecon mismatch. Dpocon %v, Dgecon %v.", got, want) + } + } + } + } +} diff --git a/vendor/gonum.org/v1/gonum/lapack/testlapack/dpotf2.go b/vendor/gonum.org/v1/gonum/lapack/testlapack/dpotf2.go new file mode 100644 index 0000000..29612c7 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/lapack/testlapack/dpotf2.go @@ -0,0 +1,117 @@ +// Copyright ©2015 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package testlapack + +import ( + "testing" + + "gonum.org/v1/gonum/blas" + "gonum.org/v1/gonum/floats" +) + +type Dpotf2er interface { + Dpotf2(ul blas.Uplo, n int, a []float64, lda int) (ok bool) +} + +func Dpotf2Test(t *testing.T, impl Dpotf2er) { + for _, test := range []struct { + a [][]float64 + pos bool + U [][]float64 + }{ + { + a: [][]float64{ + {23, 37, 34, 32}, + {108, 71, 48, 48}, + {109, 109, 67, 58}, + {106, 107, 106, 63}, + }, + pos: true, + U: [][]float64{ + {4.795831523312719, 7.715033320111766, 7.089490077940543, 6.672461249826393}, + {0, 3.387958215439679, -1.976308959006481, -1.026654004678691}, + {0, 0, 3.582364210034111, 2.419258947036024}, + {0, 0, 0, 3.401680257083044}, + }, + }, + { + a: [][]float64{ + {8, 2}, + {2, 4}, + }, + pos: true, + U: [][]float64{ + {2.82842712474619, 0.707106781186547}, + {0, 1.870828693386971}, + }, + }, + } { + testDpotf2(t, impl, test.pos, test.a, test.U, len(test.a[0]), blas.Upper) + testDpotf2(t, impl, test.pos, test.a, test.U, len(test.a[0])+5, blas.Upper) + aT := transpose(test.a) + L := transpose(test.U) + testDpotf2(t, impl, test.pos, aT, L, len(test.a[0]), blas.Lower) + testDpotf2(t, impl, test.pos, aT, L, len(test.a[0])+5, blas.Lower) + } +} + +func testDpotf2(t *testing.T, impl Dpotf2er, testPos bool, a, ans [][]float64, stride int, ul blas.Uplo) { + aFlat := flattenTri(a, stride, ul) + ansFlat := flattenTri(ans, stride, ul) + pos := impl.Dpotf2(ul, len(a[0]), aFlat, stride) + if pos != testPos { + t.Errorf("Positive definite mismatch: Want %v, Got %v", testPos, pos) + return + } + if testPos && !floats.EqualApprox(ansFlat, aFlat, 1e-14) { + t.Errorf("Result mismatch: Want %v, Got %v", ansFlat, aFlat) + } +} + +// flattenTri with a certain stride. stride must be >= dimension. Puts repeatable +// nonce values in non-accessed places +func flattenTri(a [][]float64, stride int, ul blas.Uplo) []float64 { + m := len(a) + n := len(a[0]) + if stride < n { + panic("bad stride") + } + upper := ul == blas.Upper + v := make([]float64, m*stride) + count := 1000.0 + for i := 0; i < m; i++ { + for j := 0; j < stride; j++ { + if j >= n || (upper && j < i) || (!upper && j > i) { + // not accessed, so give a unique crazy number + v[i*stride+j] = count + count++ + continue + } + v[i*stride+j] = a[i][j] + } + } + return v +} + +func transpose(a [][]float64) [][]float64 { + m := len(a) + n := len(a[0]) + if m != n { + panic("not square") + } + aNew := make([][]float64, m) + for i := 0; i < m; i++ { + aNew[i] = make([]float64, n) + } + for i := 0; i < m; i++ { + if len(a[i]) != n { + panic("bad n size") + } + for j := 0; j < n; j++ { + aNew[j][i] = a[i][j] + } + } + return aNew +} diff --git a/vendor/gonum.org/v1/gonum/lapack/testlapack/dpotrf.go b/vendor/gonum.org/v1/gonum/lapack/testlapack/dpotrf.go new file mode 100644 index 0000000..b7ce0a5 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/lapack/testlapack/dpotrf.go @@ -0,0 +1,136 @@ +// Copyright ©2015 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package testlapack + +import ( + "testing" + + "golang.org/x/exp/rand" + + "gonum.org/v1/gonum/blas" + "gonum.org/v1/gonum/blas/blas64" + "gonum.org/v1/gonum/floats" +) + +type Dpotrfer interface { + Dpotrf(ul blas.Uplo, n int, a []float64, lda int) (ok bool) +} + +func DpotrfTest(t *testing.T, impl Dpotrfer) { + const tol = 1e-13 + rnd := rand.New(rand.NewSource(1)) + bi := blas64.Implementation() + for _, uplo := range []blas.Uplo{blas.Upper, blas.Lower} { + for tc, test := range []struct { + n int + lda int + }{ + {1, 0}, + {2, 0}, + {3, 0}, + {10, 0}, + {30, 0}, + {63, 0}, + {65, 0}, + {127, 0}, + {129, 0}, + {500, 0}, + {1, 10}, + {2, 10}, + {3, 10}, + {10, 20}, + {30, 50}, + {63, 100}, + {65, 100}, + {127, 200}, + {129, 200}, + {500, 600}, + } { + n := test.n + + // Random diagonal matrix D with positive entries. + d := make([]float64, n) + Dlatm1(d, 4, 10000, false, 1, rnd) + + // Construct a positive definite matrix A as + // A = U * D * U^T + // where U is a random orthogonal matrix. + lda := test.lda + if lda == 0 { + lda = n + } + a := make([]float64, n*lda) + Dlagsy(n, 0, d, a, lda, rnd, make([]float64, 2*n)) + + aCopy := make([]float64, len(a)) + copy(aCopy, a) + + ok := impl.Dpotrf(uplo, n, a, lda) + if !ok { + t.Errorf("Case %v: unexpected failure for positive definite matrix", tc) + continue + } + + switch uplo { + case blas.Upper: + for i := 0; i < n; i++ { + for j := 0; j < i; j++ { + a[i*lda+j] = 0 + } + } + case blas.Lower: + for i := 0; i < n; i++ { + for j := i + 1; j < n; j++ { + a[i*lda+j] = 0 + } + } + default: + panic("bad uplo") + } + + ans := make([]float64, len(a)) + switch uplo { + case blas.Upper: + // Multiply U^T * U. + bi.Dsyrk(uplo, blas.Trans, n, n, 1, a, lda, 0, ans, lda) + case blas.Lower: + // Multiply L * L^T. + bi.Dsyrk(uplo, blas.NoTrans, n, n, 1, a, lda, 0, ans, lda) + } + + match := true + switch uplo { + case blas.Upper: + for i := 0; i < n; i++ { + for j := i; j < n; j++ { + if !floats.EqualWithinAbsOrRel(ans[i*lda+j], aCopy[i*lda+j], tol, tol) { + match = false + } + } + } + case blas.Lower: + for i := 0; i < n; i++ { + for j := 0; j <= i; j++ { + if !floats.EqualWithinAbsOrRel(ans[i*lda+j], aCopy[i*lda+j], tol, tol) { + match = false + } + } + } + } + if !match { + t.Errorf("Case %v (uplo=%v,n=%v,lda=%v): unexpected result", tc, uplo, n, lda) + } + + // Make one element of D negative so that A is not + // positive definite, and check that Dpotrf fails. + d[0] *= -1 + Dlagsy(n, 0, d, a, lda, rnd, make([]float64, 2*n)) + ok = impl.Dpotrf(uplo, n, a, lda) + if ok { + t.Errorf("Case %v: unexpected success for not positive definite matrix", tc) + } + } + } +} diff --git a/vendor/gonum.org/v1/gonum/lapack/testlapack/dpotri.go b/vendor/gonum.org/v1/gonum/lapack/testlapack/dpotri.go new file mode 100644 index 0000000..7f21803 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/lapack/testlapack/dpotri.go @@ -0,0 +1,109 @@ +// Copyright ©2019 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package testlapack + +import ( + "fmt" + "testing" + + "golang.org/x/exp/rand" + + "gonum.org/v1/gonum/blas" + "gonum.org/v1/gonum/blas/blas64" +) + +type Dpotrier interface { + Dpotri(uplo blas.Uplo, n int, a []float64, lda int) bool + + Dpotrf(uplo blas.Uplo, n int, a []float64, lda int) bool +} + +func DpotriTest(t *testing.T, impl Dpotrier) { + for _, uplo := range []blas.Uplo{blas.Upper, blas.Lower} { + name := "Upper" + if uplo == blas.Lower { + name = "Lower" + } + t.Run(name, func(t *testing.T) { + // Include small and large sizes to make sure that both + // unblocked and blocked paths are taken. + ns := []int{0, 1, 2, 3, 4, 5, 10, 25, 31, 32, 33, 63, 64, 65, 127, 128, 129} + const tol = 1e-12 + + bi := blas64.Implementation() + rnd := rand.New(rand.NewSource(1)) + for _, n := range ns { + for _, lda := range []int{max(1, n), n + 11} { + prefix := fmt.Sprintf("n=%v,lda=%v", n, lda) + + // Generate a random diagonal matrix D with positive entries. + d := make([]float64, n) + Dlatm1(d, 3, 10000, false, 2, rnd) + + // Construct a positive definite matrix A as + // A = U * D * U^T + // where U is a random orthogonal matrix. + a := make([]float64, n*lda) + Dlagsy(n, 0, d, a, lda, rnd, make([]float64, 2*n)) + // Create a copy of A. + aCopy := make([]float64, len(a)) + copy(aCopy, a) + // Compute the Cholesky factorization of A. + ok := impl.Dpotrf(uplo, n, a, lda) + if !ok { + t.Fatalf("%v: unexpected Cholesky failure", prefix) + } + + // Compute the inverse inv(A). + ok = impl.Dpotri(uplo, n, a, lda) + if !ok { + t.Errorf("%v: unexpected failure", prefix) + continue + } + + // Check that the triangle of A opposite to uplo has not been modified. + if uplo == blas.Upper && !sameLowerTri(n, aCopy, lda, a, lda) { + t.Errorf("%v: unexpected modification in lower triangle", prefix) + continue + } + if uplo == blas.Lower && !sameUpperTri(n, aCopy, lda, a, lda) { + t.Errorf("%v: unexpected modification in upper triangle", prefix) + continue + } + + // Change notation for the sake of clarity. + ainv := a + ldainv := lda + + // Expand ainv into a full dense matrix so that we can call Dsymm below. + if uplo == blas.Upper { + for i := 1; i < n; i++ { + for j := 0; j < i; j++ { + ainv[i*ldainv+j] = ainv[j*ldainv+i] + } + } + } else { + for i := 0; i < n-1; i++ { + for j := i + 1; j < n; j++ { + ainv[i*ldainv+j] = ainv[j*ldainv+i] + } + } + } + + // Compute A*inv(A) and store the result into want. + ldwant := max(1, n) + want := make([]float64, n*ldwant) + bi.Dsymm(blas.Left, uplo, n, n, 1, aCopy, lda, ainv, ldainv, 0, want, ldwant) + + // Check that want is close to the identity matrix. + dist := distFromIdentity(n, want, ldwant) + if dist > tol { + t.Errorf("%v: |A * inv(A) - I| = %v is too large", prefix, dist) + } + } + } + }) + } +} diff --git a/vendor/gonum.org/v1/gonum/lapack/testlapack/dpotrs.go b/vendor/gonum.org/v1/gonum/lapack/testlapack/dpotrs.go new file mode 100644 index 0000000..e93619c --- /dev/null +++ b/vendor/gonum.org/v1/gonum/lapack/testlapack/dpotrs.go @@ -0,0 +1,93 @@ +// Copyright ©2018 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package testlapack + +import ( + "fmt" + "testing" + + "golang.org/x/exp/rand" + + "gonum.org/v1/gonum/blas" + "gonum.org/v1/gonum/blas/blas64" +) + +type Dpotrser interface { + Dpotrs(uplo blas.Uplo, n, nrhs int, a []float64, lda int, b []float64, ldb int) + + Dpotrf(uplo blas.Uplo, n int, a []float64, lda int) bool +} + +func DpotrsTest(t *testing.T, impl Dpotrser) { + const tol = 1e-14 + + rnd := rand.New(rand.NewSource(1)) + bi := blas64.Implementation() + + for _, uplo := range []blas.Uplo{blas.Upper, blas.Lower} { + for _, n := range []int{1, 2, 5} { + for _, nrhs := range []int{1, 2, 5, 10} { + for _, ld := range []struct{ a, b int }{ + {n, nrhs}, + {n + 7, nrhs}, + {n, nrhs + 3}, + {n + 7, nrhs + 3}, + } { + // Construct a random SPD matrix A by first making a symmetric matrix + // and then ensuring that it is diagonally dominant. + a := nanGeneral(n, n, ld.a) + for i := 0; i < n; i++ { + for j := i; j < n; j++ { + v := rnd.Float64() + a.Data[i*a.Stride+j] = v + a.Data[j*a.Stride+i] = v + } + } + for i := 0; i < n; i++ { + a.Data[i*a.Stride+i] += float64(n) + } + + // Generate a random solution X. + want := nanGeneral(n, nrhs, ld.b) + for i := 0; i < n; i++ { + for j := 0; j < nrhs; j++ { + want.Data[i*want.Stride+j] = rnd.NormFloat64() + } + } + + // Compute the right-hand side matrix as A * X. + b := nanGeneral(n, nrhs, ld.b) + bi.Dgemm(blas.NoTrans, blas.NoTrans, n, nrhs, n, 1, a.Data, a.Stride, want.Data, want.Stride, 0, b.Data, b.Stride) + + // Compute the Cholesky decomposition of A. + ok := impl.Dpotrf(uplo, n, a.Data, a.Stride) + if !ok { + panic("bad test") + } + + aCopy := cloneGeneral(a) + + // Solve A * X = B. + impl.Dpotrs(uplo, n, nrhs, a.Data, a.Stride, b.Data, b.Stride) + + name := fmt.Sprintf("uplo=%v,n=%v,nrhs=%v,lda=%v,ldb=%v", uplo, n, nrhs, a.Stride, b.Stride) + + if !generalOutsideAllNaN(a) { + t.Errorf("%v: out-of-range modification of A", name) + } + if !equalApproxGeneral(a, aCopy, 0) { + t.Errorf("%v: unexpected modification of A", name) + } + if !generalOutsideAllNaN(b) { + t.Errorf("%v: out-of-range modification of B", name) + } + if !equalApproxGeneral(b, want, tol) { + t.Errorf("%v: unexpected result\ngot %v\nwant %v", name, b, want) + } + } + } + } + } +} diff --git a/vendor/gonum.org/v1/gonum/lapack/testlapack/drscl.go b/vendor/gonum.org/v1/gonum/lapack/testlapack/drscl.go new file mode 100644 index 0000000..41e1fc7 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/lapack/testlapack/drscl.go @@ -0,0 +1,52 @@ +// Copyright ©2015 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package testlapack + +import ( + "math" + "testing" + + "gonum.org/v1/gonum/floats" +) + +type Drscler interface { + Drscl(n int, a float64, x []float64, incX int) +} + +func DrsclTest(t *testing.T, impl Drscler) { + for _, test := range []struct { + x []float64 + a float64 + }{ + { + x: []float64{1, 2, 3, 4, 5}, + a: 4, + }, + { + x: []float64{1, 2, 3, 4, 5}, + a: math.MaxFloat64, + }, + { + x: []float64{1, 2, 3, 4, 5}, + a: 1e-307, + }, + } { + xcopy := make([]float64, len(test.x)) + copy(xcopy, test.x) + + // Cannot test the scaling directly because of floating point scaling issues + // (the purpose of Drscl). Instead, check that scaling and scaling back + // yeilds approximately x. If overflow or underflow occurs then the scaling + // won't match. + impl.Drscl(len(test.x), test.a, xcopy, 1) + if floats.Equal(xcopy, test.x) { + t.Errorf("x unchanged during call to drscl. a = %v, x = %v.", test.a, test.x) + } + impl.Drscl(len(test.x), 1/test.a, xcopy, 1) + if !floats.EqualApprox(xcopy, test.x, 1e-14) { + t.Errorf("x not equal after scaling and unscaling. a = %v, x = %v.", test.a, test.x) + } + } +} diff --git a/vendor/gonum.org/v1/gonum/lapack/testlapack/dsteqr.go b/vendor/gonum.org/v1/gonum/lapack/testlapack/dsteqr.go new file mode 100644 index 0000000..e365d52 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/lapack/testlapack/dsteqr.go @@ -0,0 +1,173 @@ +// Copyright ©2016 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package testlapack + +import ( + "testing" + + "golang.org/x/exp/rand" + + "gonum.org/v1/gonum/blas" + "gonum.org/v1/gonum/blas/blas64" + "gonum.org/v1/gonum/floats" + "gonum.org/v1/gonum/lapack" +) + +type Dsteqrer interface { + Dsteqr(compz lapack.EVComp, n int, d, e, z []float64, ldz int, work []float64) (ok bool) + Dorgtrer +} + +func DsteqrTest(t *testing.T, impl Dsteqrer) { + rnd := rand.New(rand.NewSource(1)) + for _, compz := range []lapack.EVComp{lapack.EVOrig, lapack.EVTridiag} { + for _, test := range []struct { + n, lda int + }{ + {1, 0}, + {4, 0}, + {8, 0}, + {10, 0}, + + {2, 10}, + {8, 10}, + {10, 20}, + } { + for cas := 0; cas < 100; cas++ { + n := test.n + lda := test.lda + if lda == 0 { + lda = n + } + d := make([]float64, n) + for i := range d { + d[i] = rnd.Float64() + } + e := make([]float64, n-1) + for i := range e { + e[i] = rnd.Float64() + } + a := make([]float64, n*lda) + for i := range a { + a[i] = rnd.Float64() + } + dCopy := make([]float64, len(d)) + copy(dCopy, d) + eCopy := make([]float64, len(e)) + copy(eCopy, e) + aCopy := make([]float64, len(a)) + copy(aCopy, a) + if compz == lapack.EVOrig { + uplo := blas.Upper + tau := make([]float64, n) + work := make([]float64, 1) + impl.Dsytrd(blas.Upper, n, a, lda, d, e, tau, work, -1) + work = make([]float64, int(work[0])) + // Reduce A to symmetric tridiagonal form. + impl.Dsytrd(uplo, n, a, lda, d, e, tau, work, len(work)) + // Compute the orthogonal matrix Q. + impl.Dorgtr(uplo, n, a, lda, tau, work, len(work)) + } else { + for i := 0; i < n; i++ { + for j := 0; j < n; j++ { + a[i*lda+j] = 0 + if i == j { + a[i*lda+j] = 1 + } + } + } + } + work := make([]float64, 2*n) + + aDecomp := make([]float64, len(a)) + copy(aDecomp, a) + dDecomp := make([]float64, len(d)) + copy(dDecomp, d) + eDecomp := make([]float64, len(e)) + copy(eDecomp, e) + impl.Dsteqr(compz, n, d, e, a, lda, work) + dAns := make([]float64, len(d)) + copy(dAns, d) + + var truth blas64.General + if compz == lapack.EVOrig { + truth = blas64.General{ + Rows: n, + Cols: n, + Stride: n, + Data: make([]float64, n*n), + } + for i := 0; i < n; i++ { + for j := i; j < n; j++ { + v := aCopy[i*lda+j] + truth.Data[i*truth.Stride+j] = v + truth.Data[j*truth.Stride+i] = v + } + } + } else { + truth = blas64.General{ + Rows: n, + Cols: n, + Stride: n, + Data: make([]float64, n*n), + } + for i := 0; i < n; i++ { + truth.Data[i*truth.Stride+i] = dCopy[i] + if i != n-1 { + truth.Data[(i+1)*truth.Stride+i] = eCopy[i] + truth.Data[i*truth.Stride+i+1] = eCopy[i] + } + } + } + + V := blas64.General{ + Rows: n, + Cols: n, + Stride: lda, + Data: a, + } + if !eigenDecompCorrect(d, truth, V) { + t.Errorf("Eigen reconstruction mismatch. fromFull = %v, n = %v", + compz == lapack.EVOrig, n) + } + + // Compare eigenvalues when not computing eigenvectors. + for i := range work { + work[i] = rnd.Float64() + } + impl.Dsteqr(lapack.EVCompNone, n, dDecomp, eDecomp, aDecomp, lda, work) + if !floats.EqualApprox(d, dAns, 1e-8) { + t.Errorf("Eigenvalue mismatch when eigenvectors not computed") + } + } + } + } +} + +// eigenDecompCorrect returns whether the eigen decomposition is correct. +// It checks if +// A * v ≈ λ * v +// where the eigenvalues λ are stored in values, and the eigenvectors are stored +// in the columns of v. +func eigenDecompCorrect(values []float64, A, V blas64.General) bool { + n := A.Rows + for i := 0; i < n; i++ { + lambda := values[i] + vector := make([]float64, n) + ans2 := make([]float64, n) + for j := range vector { + v := V.Data[j*V.Stride+i] + vector[j] = v + ans2[j] = lambda * v + } + v := blas64.Vector{Inc: 1, Data: vector} + ans1 := blas64.Vector{Inc: 1, Data: make([]float64, n)} + blas64.Gemv(blas.NoTrans, 1, A, v, 0, ans1) + if !floats.EqualApprox(ans1.Data, ans2, 1e-8) { + return false + } + } + return true +} diff --git a/vendor/gonum.org/v1/gonum/lapack/testlapack/dsterf.go b/vendor/gonum.org/v1/gonum/lapack/testlapack/dsterf.go new file mode 100644 index 0000000..a6ca01e --- /dev/null +++ b/vendor/gonum.org/v1/gonum/lapack/testlapack/dsterf.go @@ -0,0 +1,127 @@ +// Copyright ©2016 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package testlapack + +import ( + "math" + "sort" + "testing" + + "golang.org/x/exp/rand" + + "gonum.org/v1/gonum/floats" +) + +type Dsterfer interface { + Dgetrfer + Dsterf(n int, d, e []float64) (ok bool) +} + +func DsterfTest(t *testing.T, impl Dsterfer) { + // Hand coded tests. + for cas, test := range []struct { + d []float64 + e []float64 + n int + + ans []float64 + }{ + // Computed from Fortran code. + { + d: []float64{1, 3, 4, 6}, + e: []float64{2, 4, 5}, + n: 4, + ans: []float64{11.046227528488854, 4.795922173417400, -2.546379458290125, 0.704229756383872}, + }, + } { + n := test.n + d := make([]float64, len(test.d)) + copy(d, test.d) + e := make([]float64, len(test.e)) + copy(e, test.e) + ok := impl.Dsterf(n, d, e) + if !ok { + t.Errorf("Case %d, Eigenvalue decomposition failed", cas) + continue + } + ans := make([]float64, len(test.ans)) + copy(ans, test.ans) + sort.Float64s(ans) + if !floats.EqualApprox(ans, d, 1e-10) { + t.Errorf("eigenvalue mismatch") + } + } + + rnd := rand.New(rand.NewSource(1)) + // Probabilistic tests. + for _, n := range []int{4, 6, 10} { + for cas := 0; cas < 10; cas++ { + d := make([]float64, n) + for i := range d { + d[i] = rnd.NormFloat64() + } + dCopy := make([]float64, len(d)) + copy(dCopy, d) + e := make([]float64, n-1) + for i := range e { + e[i] = rnd.NormFloat64() + } + eCopy := make([]float64, len(e)) + copy(eCopy, e) + + ok := impl.Dsterf(n, d, e) + if !ok { + t.Errorf("Eigenvalue decomposition failed") + continue + } + + // Test that the eigenvalues are sorted. + if !sort.Float64sAreSorted(d) { + t.Errorf("Values are not sorted") + } + + // Construct original tridagional matrix. + lda := n + a := make([]float64, n*lda) + for i := 0; i < n; i++ { + a[i*lda+i] = dCopy[i] + if i != n-1 { + a[i*lda+i+1] = eCopy[i] + a[(i+1)*lda+i] = eCopy[i] + } + } + + asub := make([]float64, len(a)) + ipiv := make([]int, n) + + // Test that they are actually eigenvalues by computing the + // determinant of A - λI. + // TODO(btracey): Replace this test with a more numerically stable + // test. + for _, lambda := range d { + copy(asub, a) + for i := 0; i < n; i++ { + asub[i*lda+i] -= lambda + } + + // Compute LU. + ok := impl.Dgetrf(n, n, asub, lda, ipiv) + if !ok { + // Definitely singular. + continue + } + // Compute determinant. + var logdet float64 + for i := 0; i < n; i++ { + v := asub[i*lda+i] + logdet += math.Log(math.Abs(v)) + } + if math.Exp(logdet) > 2 { + t.Errorf("Incorrect singular value. n = %d, cas = %d, det = %v", n, cas, math.Exp(logdet)) + } + } + } + } +} diff --git a/vendor/gonum.org/v1/gonum/lapack/testlapack/dsyev.go b/vendor/gonum.org/v1/gonum/lapack/testlapack/dsyev.go new file mode 100644 index 0000000..65ccc88 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/lapack/testlapack/dsyev.go @@ -0,0 +1,116 @@ +// Copyright ©2016 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package testlapack + +import ( + "testing" + + "golang.org/x/exp/rand" + + "gonum.org/v1/gonum/blas" + "gonum.org/v1/gonum/blas/blas64" + "gonum.org/v1/gonum/floats" + "gonum.org/v1/gonum/lapack" +) + +type Dsyever interface { + Dsyev(jobz lapack.EVJob, uplo blas.Uplo, n int, a []float64, lda int, w, work []float64, lwork int) (ok bool) +} + +func DsyevTest(t *testing.T, impl Dsyever) { + rnd := rand.New(rand.NewSource(1)) + for _, uplo := range []blas.Uplo{blas.Lower, blas.Upper} { + for _, test := range []struct { + n, lda int + }{ + {1, 0}, + {2, 0}, + {5, 0}, + {10, 0}, + {100, 0}, + + {1, 5}, + {2, 5}, + {5, 10}, + {10, 20}, + {100, 110}, + } { + for cas := 0; cas < 10; cas++ { + n := test.n + lda := test.lda + if lda == 0 { + lda = n + } + a := make([]float64, n*lda) + for i := range a { + a[i] = rnd.NormFloat64() + } + aCopy := make([]float64, len(a)) + copy(aCopy, a) + w := make([]float64, n) + for i := range w { + w[i] = rnd.NormFloat64() + } + + work := make([]float64, 1) + impl.Dsyev(lapack.EVCompute, uplo, n, a, lda, w, work, -1) + work = make([]float64, int(work[0])) + impl.Dsyev(lapack.EVCompute, uplo, n, a, lda, w, work, len(work)) + + // Check that the decomposition is correct + orig := blas64.General{ + Rows: n, + Cols: n, + Stride: n, + Data: make([]float64, n*n), + } + if uplo == blas.Upper { + for i := 0; i < n; i++ { + for j := i; j < n; j++ { + v := aCopy[i*lda+j] + orig.Data[i*orig.Stride+j] = v + orig.Data[j*orig.Stride+i] = v + } + } + } else { + for i := 0; i < n; i++ { + for j := 0; j <= i; j++ { + v := aCopy[i*lda+j] + orig.Data[i*orig.Stride+j] = v + orig.Data[j*orig.Stride+i] = v + } + } + } + + V := blas64.General{ + Rows: n, + Cols: n, + Stride: lda, + Data: a, + } + + if !eigenDecompCorrect(w, orig, V) { + t.Errorf("Decomposition mismatch") + } + + // Check that the decomposition is correct when the eigenvectors + // are not computed. + wAns := make([]float64, len(w)) + copy(wAns, w) + copy(a, aCopy) + for i := range w { + w[i] = rnd.Float64() + } + for i := range work { + work[i] = rnd.Float64() + } + impl.Dsyev(lapack.EVNone, uplo, n, a, lda, w, work, len(work)) + if !floats.EqualApprox(w, wAns, 1e-8) { + t.Errorf("Eigenvalue mismatch when vectors not computed") + } + } + } + } +} diff --git a/vendor/gonum.org/v1/gonum/lapack/testlapack/dsytd2.go b/vendor/gonum.org/v1/gonum/lapack/testlapack/dsytd2.go new file mode 100644 index 0000000..ecfb4f1 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/lapack/testlapack/dsytd2.go @@ -0,0 +1,188 @@ +// Copyright ©2016 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package testlapack + +import ( + "math" + "testing" + + "golang.org/x/exp/rand" + + "gonum.org/v1/gonum/blas" + "gonum.org/v1/gonum/blas/blas64" +) + +type Dsytd2er interface { + Dsytd2(uplo blas.Uplo, n int, a []float64, lda int, d, e, tau []float64) +} + +func Dsytd2Test(t *testing.T, impl Dsytd2er) { + rnd := rand.New(rand.NewSource(1)) + for _, uplo := range []blas.Uplo{blas.Upper, blas.Lower} { + for _, test := range []struct { + n, lda int + }{ + {3, 0}, + {4, 0}, + {5, 0}, + + {3, 10}, + {4, 10}, + {5, 10}, + } { + n := test.n + lda := test.lda + if lda == 0 { + lda = n + } + a := make([]float64, n*lda) + for i := range a { + a[i] = rnd.NormFloat64() + } + aCopy := make([]float64, len(a)) + copy(aCopy, a) + + d := make([]float64, n) + for i := range d { + d[i] = math.NaN() + } + e := make([]float64, n-1) + for i := range e { + e[i] = math.NaN() + } + tau := make([]float64, n-1) + for i := range tau { + tau[i] = math.NaN() + } + + impl.Dsytd2(uplo, n, a, lda, d, e, tau) + + // Construct Q + qMat := blas64.General{ + Rows: n, + Cols: n, + Stride: n, + Data: make([]float64, n*n), + } + qCopy := blas64.General{ + Rows: n, + Cols: n, + Stride: n, + Data: make([]float64, len(qMat.Data)), + } + // Set Q to I. + for i := 0; i < n; i++ { + qMat.Data[i*qMat.Stride+i] = 1 + } + for i := 0; i < n-1; i++ { + hMat := blas64.General{ + Rows: n, + Cols: n, + Stride: n, + Data: make([]float64, n*n), + } + // Set H to I. + for i := 0; i < n; i++ { + hMat.Data[i*hMat.Stride+i] = 1 + } + var vi blas64.Vector + if uplo == blas.Upper { + vi = blas64.Vector{ + Inc: 1, + Data: make([]float64, n), + } + for j := 0; j < i; j++ { + vi.Data[j] = a[j*lda+i+1] + } + vi.Data[i] = 1 + } else { + vi = blas64.Vector{ + Inc: 1, + Data: make([]float64, n), + } + vi.Data[i+1] = 1 + for j := i + 2; j < n; j++ { + vi.Data[j] = a[j*lda+i] + } + } + blas64.Ger(-tau[i], vi, vi, hMat) + copy(qCopy.Data, qMat.Data) + + // Multiply q by the new h. + if uplo == blas.Upper { + blas64.Gemm(blas.NoTrans, blas.NoTrans, 1, hMat, qCopy, 0, qMat) + } else { + blas64.Gemm(blas.NoTrans, blas.NoTrans, 1, qCopy, hMat, 0, qMat) + } + } + + if !isOrthogonal(qMat) { + t.Errorf("Q not orthogonal") + } + + // Compute Q^T * A * Q. + aMat := blas64.General{ + Rows: n, + Cols: n, + Stride: n, + Data: make([]float64, len(a)), + } + + for i := 0; i < n; i++ { + for j := i; j < n; j++ { + v := aCopy[i*lda+j] + if uplo == blas.Lower { + v = aCopy[j*lda+i] + } + aMat.Data[i*aMat.Stride+j] = v + aMat.Data[j*aMat.Stride+i] = v + } + } + + tmp := blas64.General{ + Rows: n, + Cols: n, + Stride: n, + Data: make([]float64, n*n), + } + + ans := blas64.General{ + Rows: n, + Cols: n, + Stride: n, + Data: make([]float64, n*n), + } + + blas64.Gemm(blas.Trans, blas.NoTrans, 1, qMat, aMat, 0, tmp) + blas64.Gemm(blas.NoTrans, blas.NoTrans, 1, tmp, qMat, 0, ans) + + // Compare with T. + tMat := blas64.General{ + Rows: n, + Cols: n, + Stride: n, + Data: make([]float64, n*n), + } + for i := 0; i < n-1; i++ { + tMat.Data[i*tMat.Stride+i] = d[i] + tMat.Data[i*tMat.Stride+i+1] = e[i] + tMat.Data[(i+1)*tMat.Stride+i] = e[i] + } + tMat.Data[(n-1)*tMat.Stride+n-1] = d[n-1] + + same := true + for i := 0; i < n; i++ { + for j := 0; j < n; j++ { + if math.Abs(ans.Data[i*ans.Stride+j]-tMat.Data[i*tMat.Stride+j]) > 1e-10 { + same = false + } + } + } + if !same { + t.Errorf("Matrix answer mismatch") + } + } + } +} diff --git a/vendor/gonum.org/v1/gonum/lapack/testlapack/dsytrd.go b/vendor/gonum.org/v1/gonum/lapack/testlapack/dsytrd.go new file mode 100644 index 0000000..052e9ed --- /dev/null +++ b/vendor/gonum.org/v1/gonum/lapack/testlapack/dsytrd.go @@ -0,0 +1,161 @@ +// Copyright ©2016 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package testlapack + +import ( + "fmt" + "testing" + + "golang.org/x/exp/rand" + + "gonum.org/v1/gonum/blas" + "gonum.org/v1/gonum/blas/blas64" +) + +type Dsytrder interface { + Dsytrd(uplo blas.Uplo, n int, a []float64, lda int, d, e, tau, work []float64, lwork int) + + Dorgqr(m, n, k int, a []float64, lda int, tau, work []float64, lwork int) + Dorgql(m, n, k int, a []float64, lda int, tau, work []float64, lwork int) +} + +func DsytrdTest(t *testing.T, impl Dsytrder) { + const tol = 1e-13 + rnd := rand.New(rand.NewSource(1)) + for tc, test := range []struct { + n, lda int + }{ + {1, 0}, + {2, 0}, + {3, 0}, + {4, 0}, + {10, 0}, + {50, 0}, + {100, 0}, + {150, 0}, + {300, 0}, + + {1, 3}, + {2, 3}, + {3, 7}, + {4, 9}, + {10, 20}, + {50, 70}, + {100, 120}, + {150, 170}, + {300, 320}, + } { + for _, uplo := range []blas.Uplo{blas.Upper, blas.Lower} { + for _, wl := range []worklen{minimumWork, mediumWork, optimumWork} { + n := test.n + lda := test.lda + if lda == 0 { + lda = n + } + a := randomGeneral(n, n, lda, rnd) + for i := 1; i < n; i++ { + for j := 0; j < i; j++ { + a.Data[i*a.Stride+j] = a.Data[j*a.Stride+i] + } + } + aCopy := cloneGeneral(a) + + d := nanSlice(n) + e := nanSlice(n - 1) + tau := nanSlice(n - 1) + + var lwork int + switch wl { + case minimumWork: + lwork = 1 + case mediumWork: + work := make([]float64, 1) + impl.Dsytrd(uplo, n, a.Data, a.Stride, d, e, tau, work, -1) + lwork = (int(work[0]) + 1) / 2 + lwork = max(1, lwork) + case optimumWork: + work := make([]float64, 1) + impl.Dsytrd(uplo, n, a.Data, a.Stride, d, e, tau, work, -1) + lwork = int(work[0]) + } + work := make([]float64, lwork) + + impl.Dsytrd(uplo, n, a.Data, a.Stride, d, e, tau, work, lwork) + + prefix := fmt.Sprintf("Case #%v: uplo=%v,n=%v,lda=%v,work=%v", + tc, uplo, n, lda, wl) + + if !generalOutsideAllNaN(a) { + t.Errorf("%v: out-of-range write to A", prefix) + } + + // Extract Q by doing what Dorgtr does. + q := cloneGeneral(a) + if uplo == blas.Upper { + for j := 0; j < n-1; j++ { + for i := 0; i < j; i++ { + q.Data[i*q.Stride+j] = q.Data[i*q.Stride+j+1] + } + q.Data[(n-1)*q.Stride+j] = 0 + } + for i := 0; i < n-1; i++ { + q.Data[i*q.Stride+n-1] = 0 + } + q.Data[(n-1)*q.Stride+n-1] = 1 + if n > 1 { + work = make([]float64, n-1) + impl.Dorgql(n-1, n-1, n-1, q.Data, q.Stride, tau, work, len(work)) + } + } else { + for j := n - 1; j > 0; j-- { + q.Data[j] = 0 + for i := j + 1; i < n; i++ { + q.Data[i*q.Stride+j] = q.Data[i*q.Stride+j-1] + } + } + q.Data[0] = 1 + for i := 1; i < n; i++ { + q.Data[i*q.Stride] = 0 + } + if n > 1 { + work = make([]float64, n-1) + impl.Dorgqr(n-1, n-1, n-1, q.Data[q.Stride+1:], q.Stride, tau, work, len(work)) + } + } + if !isOrthogonal(q) { + t.Errorf("%v: Q not orthogonal", prefix) + } + + // Contruct symmetric tridiagonal T from d and e. + tMat := zeros(n, n, n) + for i := 0; i < n; i++ { + tMat.Data[i*tMat.Stride+i] = d[i] + } + if uplo == blas.Upper { + for j := 1; j < n; j++ { + tMat.Data[(j-1)*tMat.Stride+j] = e[j-1] + tMat.Data[j*tMat.Stride+j-1] = e[j-1] + } + } else { + for j := 0; j < n-1; j++ { + tMat.Data[(j+1)*tMat.Stride+j] = e[j] + tMat.Data[j*tMat.Stride+j+1] = e[j] + } + } + + // Compute Q^T * A * Q. + tmp := zeros(n, n, n) + blas64.Gemm(blas.Trans, blas.NoTrans, 1, q, aCopy, 0, tmp) + got := zeros(n, n, n) + blas64.Gemm(blas.NoTrans, blas.NoTrans, 1, tmp, q, 0, got) + + // Compare with T. + if !equalApproxGeneral(got, tMat, tol) { + t.Errorf("%v: Q^T*A*Q != T", prefix) + } + } + } + } +} diff --git a/vendor/gonum.org/v1/gonum/lapack/testlapack/dtgsja.go b/vendor/gonum.org/v1/gonum/lapack/testlapack/dtgsja.go new file mode 100644 index 0000000..0c4c73b --- /dev/null +++ b/vendor/gonum.org/v1/gonum/lapack/testlapack/dtgsja.go @@ -0,0 +1,166 @@ +// Copyright ©2017 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package testlapack + +import ( + "testing" + + "golang.org/x/exp/rand" + + "gonum.org/v1/gonum/blas" + "gonum.org/v1/gonum/blas/blas64" + "gonum.org/v1/gonum/floats" + "gonum.org/v1/gonum/lapack" +) + +type Dtgsjaer interface { + Dlanger + Dtgsja(jobU, jobV, jobQ lapack.GSVDJob, m, p, n, k, l int, a []float64, lda int, b []float64, ldb int, tola, tolb float64, alpha, beta, u []float64, ldu int, v []float64, ldv int, q []float64, ldq int, work []float64) (cycles int, ok bool) +} + +func DtgsjaTest(t *testing.T, impl Dtgsjaer) { + rnd := rand.New(rand.NewSource(1)) + for cas, test := range []struct { + m, p, n, k, l, lda, ldb, ldu, ldv, ldq int + + ok bool + }{ + {m: 5, p: 5, n: 5, k: 2, l: 2, lda: 0, ldb: 0, ldu: 0, ldv: 0, ldq: 0, ok: true}, + {m: 5, p: 5, n: 5, k: 4, l: 1, lda: 0, ldb: 0, ldu: 0, ldv: 0, ldq: 0, ok: true}, + {m: 5, p: 5, n: 10, k: 2, l: 2, lda: 0, ldb: 0, ldu: 0, ldv: 0, ldq: 0, ok: true}, + {m: 5, p: 5, n: 10, k: 4, l: 1, lda: 0, ldb: 0, ldu: 0, ldv: 0, ldq: 0, ok: true}, + {m: 5, p: 5, n: 10, k: 4, l: 2, lda: 0, ldb: 0, ldu: 0, ldv: 0, ldq: 0, ok: true}, + {m: 10, p: 5, n: 5, k: 2, l: 2, lda: 0, ldb: 0, ldu: 0, ldv: 0, ldq: 0, ok: true}, + {m: 10, p: 5, n: 5, k: 4, l: 1, lda: 0, ldb: 0, ldu: 0, ldv: 0, ldq: 0, ok: true}, + {m: 10, p: 10, n: 10, k: 5, l: 3, lda: 0, ldb: 0, ldu: 0, ldv: 0, ldq: 0, ok: true}, + {m: 10, p: 10, n: 10, k: 6, l: 4, lda: 0, ldb: 0, ldu: 0, ldv: 0, ldq: 0, ok: true}, + {m: 5, p: 5, n: 5, k: 2, l: 2, lda: 10, ldb: 10, ldu: 10, ldv: 10, ldq: 10, ok: true}, + {m: 5, p: 5, n: 5, k: 4, l: 1, lda: 10, ldb: 10, ldu: 10, ldv: 10, ldq: 10, ok: true}, + {m: 5, p: 5, n: 10, k: 2, l: 2, lda: 20, ldb: 20, ldu: 10, ldv: 10, ldq: 20, ok: true}, + {m: 5, p: 5, n: 10, k: 4, l: 1, lda: 20, ldb: 20, ldu: 10, ldv: 10, ldq: 20, ok: true}, + {m: 5, p: 5, n: 10, k: 4, l: 2, lda: 20, ldb: 20, ldu: 10, ldv: 10, ldq: 20, ok: true}, + {m: 10, p: 5, n: 5, k: 2, l: 2, lda: 10, ldb: 10, ldu: 20, ldv: 10, ldq: 10, ok: true}, + {m: 10, p: 5, n: 5, k: 4, l: 1, lda: 10, ldb: 10, ldu: 20, ldv: 10, ldq: 10, ok: true}, + {m: 10, p: 10, n: 10, k: 5, l: 3, lda: 20, ldb: 20, ldu: 20, ldv: 20, ldq: 20, ok: true}, + {m: 10, p: 10, n: 10, k: 6, l: 4, lda: 20, ldb: 20, ldu: 20, ldv: 20, ldq: 20, ok: true}, + } { + m := test.m + p := test.p + n := test.n + k := test.k + l := test.l + lda := test.lda + if lda == 0 { + lda = n + } + ldb := test.ldb + if ldb == 0 { + ldb = n + } + ldu := test.ldu + if ldu == 0 { + ldu = m + } + ldv := test.ldv + if ldv == 0 { + ldv = p + } + ldq := test.ldq + if ldq == 0 { + ldq = n + } + + a := blockedUpperTriGeneral(m, n, k, l, lda, true, rnd) + aCopy := cloneGeneral(a) + b := blockedUpperTriGeneral(p, n, k, l, ldb, false, rnd) + bCopy := cloneGeneral(b) + + tola := float64(max(m, n)) * impl.Dlange(lapack.Frobenius, m, n, a.Data, a.Stride, nil) * dlamchE + tolb := float64(max(p, n)) * impl.Dlange(lapack.Frobenius, p, n, b.Data, b.Stride, nil) * dlamchE + + alpha := make([]float64, n) + beta := make([]float64, n) + + work := make([]float64, 2*n) + + u := nanGeneral(m, m, ldu) + v := nanGeneral(p, p, ldv) + q := nanGeneral(n, n, ldq) + + _, ok := impl.Dtgsja(lapack.GSVDUnit, lapack.GSVDUnit, lapack.GSVDUnit, + m, p, n, k, l, + a.Data, a.Stride, + b.Data, b.Stride, + tola, tolb, + alpha, beta, + u.Data, u.Stride, + v.Data, v.Stride, + q.Data, q.Stride, + work) + + if !ok { + if test.ok { + t.Errorf("test %d unexpectedly did not converge", cas) + } + continue + } + + // Check orthogonality of U, V and Q. + if !isOrthogonal(u) { + t.Errorf("test %d: U is not orthogonal\n%+v", cas, u) + } + if !isOrthogonal(v) { + t.Errorf("test %d: V is not orthogonal\n%+v", cas, v) + } + if !isOrthogonal(q) { + t.Errorf("test %d: Q is not orthogonal\n%+v", cas, q) + } + + // Check C^2 + S^2 = I. + var elements []float64 + if m-k-l >= 0 { + elements = alpha[k : k+l] + } else { + elements = alpha[k:m] + } + for i := range elements { + i += k + d := alpha[i]*alpha[i] + beta[i]*beta[i] + if !floats.EqualWithinAbsOrRel(d, 1, 1e-14, 1e-14) { + t.Errorf("test %d: alpha_%d^2 + beta_%d^2 != 1: got: %v", cas, i, i, d) + } + } + + zeroR, d1, d2 := constructGSVDresults(n, p, m, k, l, a, b, alpha, beta) + + // Check U^T*A*Q = D1*[ 0 R ]. + uTmp := nanGeneral(m, n, n) + blas64.Gemm(blas.Trans, blas.NoTrans, 1, u, aCopy, 0, uTmp) + uAns := nanGeneral(m, n, n) + blas64.Gemm(blas.NoTrans, blas.NoTrans, 1, uTmp, q, 0, uAns) + + d10r := nanGeneral(m, n, n) + blas64.Gemm(blas.NoTrans, blas.NoTrans, 1, d1, zeroR, 0, d10r) + + if !equalApproxGeneral(uAns, d10r, 1e-14) { + t.Errorf("test %d: U^T*A*Q != D1*[ 0 R ]\nU^T*A*Q:\n%+v\nD1*[ 0 R ]:\n%+v", + cas, uAns, d10r) + } + + // Check V^T*B*Q = D2*[ 0 R ]. + vTmp := nanGeneral(p, n, n) + blas64.Gemm(blas.Trans, blas.NoTrans, 1, v, bCopy, 0, vTmp) + vAns := nanGeneral(p, n, n) + blas64.Gemm(blas.NoTrans, blas.NoTrans, 1, vTmp, q, 0, vAns) + + d20r := nanGeneral(p, n, n) + blas64.Gemm(blas.NoTrans, blas.NoTrans, 1, d2, zeroR, 0, d20r) + + if !equalApproxGeneral(vAns, d20r, 1e-14) { + t.Errorf("test %d: V^T*B*Q != D2*[ 0 R ]\nV^T*B*Q:\n%+v\nD2*[ 0 R ]:\n%+v", + cas, vAns, d20r) + } + } +} diff --git a/vendor/gonum.org/v1/gonum/lapack/testlapack/dtrcon.go b/vendor/gonum.org/v1/gonum/lapack/testlapack/dtrcon.go new file mode 100644 index 0000000..c533706 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/lapack/testlapack/dtrcon.go @@ -0,0 +1,185 @@ +// Copyright ©2015 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package testlapack + +import ( + "math" + "testing" + + "golang.org/x/exp/rand" + + "gonum.org/v1/gonum/blas" + "gonum.org/v1/gonum/floats" + "gonum.org/v1/gonum/lapack" +) + +type Dtrconer interface { + Dgeconer + Dtrcon(norm lapack.MatrixNorm, uplo blas.Uplo, diag blas.Diag, n int, a []float64, lda int, work []float64, iwork []int) float64 +} + +func DtrconTest(t *testing.T, impl Dtrconer) { + rnd := rand.New(rand.NewSource(1)) + // Hand crafted tests. + for _, test := range []struct { + a []float64 + n int + uplo blas.Uplo + diag blas.Diag + condOne float64 + condInf float64 + }{ + { + a: []float64{ + 8, 5, 6, + 0, 7, 8, + 0, 0, 6, + }, + n: 3, + uplo: blas.Upper, + diag: blas.Unit, + condOne: 1.0 / 645, + condInf: 1.0 / 480, + }, + { + a: []float64{ + 8, 5, 6, + 0, 7, 8, + 0, 0, 6, + }, + n: 3, + uplo: blas.Upper, + diag: blas.NonUnit, + condOne: 0.137704918032787, + condInf: 0.157894736842105, + }, + { + a: []float64{ + 8, 0, 0, + 5, 7, 0, + 6, 8, 6, + }, + n: 3, + uplo: blas.Lower, + diag: blas.Unit, + condOne: 1.0 / 480, + condInf: 1.0 / 645, + }, + { + a: []float64{ + 8, 0, 0, + 5, 7, 0, + 6, 8, 6, + }, + n: 3, + uplo: blas.Lower, + diag: blas.NonUnit, + condOne: 0.157894736842105, + condInf: 0.137704918032787, + }, + } { + lda := test.n + work := make([]float64, 3*test.n) + for i := range work { + work[i] = rnd.Float64() + } + iwork := make([]int, test.n) + for i := range iwork { + iwork[i] = int(rnd.Int31()) + } + aCopy := make([]float64, len(test.a)) + copy(aCopy, test.a) + condOne := impl.Dtrcon(lapack.MaxColumnSum, test.uplo, test.diag, test.n, test.a, lda, work, iwork) + if math.Abs(condOne-test.condOne) > 1e-14 { + t.Errorf("One norm mismatch. Want %v, got %v.", test.condOne, condOne) + } + if !floats.Equal(aCopy, test.a) { + t.Errorf("a modified during call") + } + condInf := impl.Dtrcon(lapack.MaxRowSum, test.uplo, test.diag, test.n, test.a, lda, work, iwork) + if math.Abs(condInf-test.condInf) > 1e-14 { + t.Errorf("Inf norm mismatch. Want %v, got %v.", test.condInf, condInf) + } + if !floats.Equal(aCopy, test.a) { + t.Errorf("a modified during call") + } + } + + // Dtrcon does not match the Dgecon output in many cases. See + // https://github.com/xianyi/OpenBLAS/issues/636 + // TODO(btracey): Uncomment this when the mismatch between Dgecon and Dtrcon + // is understood. + /* + // Randomized tests against Dgecon. + for _, uplo := range []blas.Uplo{blas.Lower, blas.Upper} { + for _, diag := range []blas.Diag{blas.NonUnit, blas.Unit} { + for _, test := range []struct { + n, lda int + }{ + {3, 0}, + {4, 9}, + } { + for trial := 0; trial < 1; trial++ { + n := test.n + lda := test.lda + if lda == 0 { + lda = n + } + a := make([]float64, n*lda) + if trial == 0 { + for i := range a { + a[i] = float64(i + 2) + } + } else { + for i := range a { + a[i] = rnd.NormFloat64() + } + } + + aDense := make([]float64, len(a)) + if uplo == blas.Upper { + for i := 0; i < n; i++ { + for j := i; j < n; j++ { + aDense[i*lda+j] = a[i*lda+j] + } + } + } else { + for i := 0; i < n; i++ { + for j := 0; j <= i; j++ { + aDense[i*lda+j] = a[i*lda+j] + } + } + } + if diag == blas.Unit { + for i := 0; i < n; i++ { + aDense[i*lda+i] = 1 + } + } + + ipiv := make([]int, n) + work := make([]float64, 4*n) + denseOne := impl.Dlange(lapack.MaxColumnSum, n, n, aDense, lda, work) + denseInf := impl.Dlange(lapack.MaxRowSum, n, n, aDense, lda, work) + + aDenseLU := make([]float64, len(aDense)) + copy(aDenseLU, aDense) + impl.Dgetrf(n, n, aDenseLU, lda, ipiv) + iwork := make([]int, n) + want := impl.Dgecon(lapack.MaxColumnSum, n, aDenseLU, lda, denseOne, work, iwork) + got := impl.Dtrcon(lapack.MaxColumnSum, uplo, diag, n, a, lda, work, iwork) + if math.Abs(want-got) > 1e-14 { + t.Errorf("One norm mismatch. Upper = %v, unit = %v, want %v, got %v", uplo == blas.Upper, diag == blas.Unit, want, got) + } + want = impl.Dgecon(lapack.MaxRowSum, n, aDenseLU, lda, denseInf, work, iwork) + got = impl.Dtrcon(lapack.MaxRowSum, uplo, diag, n, a, lda, work, iwork) + if math.Abs(want-got) > 1e-14 { + t.Errorf("Inf norm mismatch. Upper = %v, unit = %v, want %v, got %v", uplo == blas.Upper, diag == blas.Unit, want, got) + } + } + } + } + } + */ +} diff --git a/vendor/gonum.org/v1/gonum/lapack/testlapack/dtrevc3.go b/vendor/gonum.org/v1/gonum/lapack/testlapack/dtrevc3.go new file mode 100644 index 0000000..0715e6d --- /dev/null +++ b/vendor/gonum.org/v1/gonum/lapack/testlapack/dtrevc3.go @@ -0,0 +1,223 @@ +// Copyright ©2016 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package testlapack + +import ( + "fmt" + "math" + "testing" + + "golang.org/x/exp/rand" + + "gonum.org/v1/gonum/blas/blas64" + "gonum.org/v1/gonum/floats" + "gonum.org/v1/gonum/lapack" +) + +type Dtrevc3er interface { + Dtrevc3(side lapack.EVSide, howmny lapack.EVHowMany, selected []bool, n int, t []float64, ldt int, vl []float64, ldvl int, vr []float64, ldvr int, mm int, work []float64, lwork int) int +} + +func Dtrevc3Test(t *testing.T, impl Dtrevc3er) { + rnd := rand.New(rand.NewSource(1)) + for _, side := range []lapack.EVSide{lapack.EVRight, lapack.EVLeft, lapack.EVBoth} { + for _, howmny := range []lapack.EVHowMany{lapack.EVAll, lapack.EVAllMulQ, lapack.EVSelected} { + for _, n := range []int{0, 1, 2, 3, 4, 5, 10, 34, 100} { + for _, extra := range []int{0, 11} { + for _, optwork := range []bool{true, false} { + for cas := 0; cas < 10; cas++ { + tmat := randomSchurCanonical(n, n+extra, rnd) + testDtrevc3(t, impl, side, howmny, tmat, optwork, rnd) + } + } + } + } + } + } +} + +func testDtrevc3(t *testing.T, impl Dtrevc3er, side lapack.EVSide, howmny lapack.EVHowMany, tmat blas64.General, optwork bool, rnd *rand.Rand) { + const tol = 1e-14 + + n := tmat.Rows + extra := tmat.Stride - tmat.Cols + right := side != lapack.EVLeft + left := side != lapack.EVRight + + var selected, selectedWant []bool + var mWant int // How many columns will the eigenvectors occupy. + if howmny == lapack.EVSelected { + selected = make([]bool, n) + selectedWant = make([]bool, n) + // Dtrevc3 will compute only selected eigenvectors. Pick them + // randomly disregarding whether they are real or complex. + for i := range selected { + if rnd.Float64() < 0.5 { + selected[i] = true + } + } + // Dtrevc3 will modify (standardize) the slice selected based on + // whether the corresponding eigenvalues are real or complex. Do + // the same process here to fill selectedWant. + for i := 0; i < n; { + if i == n-1 || tmat.Data[(i+1)*tmat.Stride+i] == 0 { + // Real eigenvalue. + if selected[i] { + selectedWant[i] = true + mWant++ // Real eigenvectors occupy one column. + } + i++ + } else { + // Complex eigenvalue. + if selected[i] || selected[i+1] { + // Dtrevc3 will modify selected so that + // only the first element of the pair is + // true. + selectedWant[i] = true + mWant += 2 // Complex eigenvectors occupy two columns. + } + i += 2 + } + } + } else { + // All eigenvectors occupy n columns. + mWant = n + } + + var vr blas64.General + if right { + if howmny == lapack.EVAllMulQ { + vr = eye(n, n+extra) + } else { + // VR will be overwritten. + vr = nanGeneral(n, mWant, n+extra) + } + } + + var vl blas64.General + if left { + if howmny == lapack.EVAllMulQ { + vl = eye(n, n+extra) + } else { + // VL will be overwritten. + vl = nanGeneral(n, mWant, n+extra) + } + } + + work := make([]float64, max(1, 3*n)) + if optwork { + impl.Dtrevc3(side, howmny, selected, n, tmat.Data, tmat.Stride, + vl.Data, max(1, vl.Stride), vr.Data, max(1, vr.Stride), mWant, work, -1) + work = make([]float64, int(work[0])) + } + + m := impl.Dtrevc3(side, howmny, selected, n, tmat.Data, tmat.Stride, + vl.Data, max(1, vl.Stride), vr.Data, max(1, vr.Stride), mWant, work, len(work)) + + prefix := fmt.Sprintf("Case side=%v, howmny=%v, n=%v, extra=%v, optwk=%v", + side, howmny, n, extra, optwork) + + if !generalOutsideAllNaN(tmat) { + t.Errorf("%v: out-of-range write to T", prefix) + } + if !generalOutsideAllNaN(vl) { + t.Errorf("%v: out-of-range write to VL", prefix) + } + if !generalOutsideAllNaN(vr) { + t.Errorf("%v: out-of-range write to VR", prefix) + } + + if m != mWant { + t.Errorf("%v: unexpected value of m. Want %v, got %v", prefix, mWant, m) + } + + if howmny == lapack.EVSelected { + for i := range selected { + if selected[i] != selectedWant[i] { + t.Errorf("%v: unexpected selected[%v]", prefix, i) + } + } + } + + // Check that the columns of VR and VL are actually eigenvectors and + // that the magnitude of their largest element is 1. + var k int + for j := 0; j < n; { + re := tmat.Data[j*tmat.Stride+j] + if j == n-1 || tmat.Data[(j+1)*tmat.Stride+j] == 0 { + if howmny == lapack.EVSelected && !selected[j] { + j++ + continue + } + if right { + ev := columnOf(vr, k) + norm := floats.Norm(ev, math.Inf(1)) + if math.Abs(norm-1) > tol { + t.Errorf("%v: magnitude of largest element of VR[:,%v] not 1", prefix, k) + } + if !isRightEigenvectorOf(tmat, ev, nil, complex(re, 0), tol) { + t.Errorf("%v: VR[:,%v] is not real right eigenvector", prefix, k) + } + } + if left { + ev := columnOf(vl, k) + norm := floats.Norm(ev, math.Inf(1)) + if math.Abs(norm-1) > tol { + t.Errorf("%v: magnitude of largest element of VL[:,%v] not 1", prefix, k) + } + if !isLeftEigenvectorOf(tmat, ev, nil, complex(re, 0), tol) { + t.Errorf("%v: VL[:,%v] is not real left eigenvector", prefix, k) + } + } + k++ + j++ + continue + } + if howmny == lapack.EVSelected && !selected[j] { + j += 2 + continue + } + im := math.Sqrt(math.Abs(tmat.Data[(j+1)*tmat.Stride+j])) * + math.Sqrt(math.Abs(tmat.Data[j*tmat.Stride+j+1])) + if right { + evre := columnOf(vr, k) + evim := columnOf(vr, k+1) + var evmax float64 + for i, v := range evre { + evmax = math.Max(evmax, math.Abs(v)+math.Abs(evim[i])) + } + if math.Abs(evmax-1) > tol { + t.Errorf("%v: magnitude of largest element of VR[:,%v] not 1", prefix, k) + } + if !isRightEigenvectorOf(tmat, evre, evim, complex(re, im), tol) { + t.Errorf("%v: VR[:,%v:%v] is not complex right eigenvector", prefix, k, k+1) + } + floats.Scale(-1, evim) + if !isRightEigenvectorOf(tmat, evre, evim, complex(re, -im), tol) { + t.Errorf("%v: VR[:,%v:%v] is not complex right eigenvector", prefix, k, k+1) + } + } + if left { + evre := columnOf(vl, k) + evim := columnOf(vl, k+1) + var evmax float64 + for i, v := range evre { + evmax = math.Max(evmax, math.Abs(v)+math.Abs(evim[i])) + } + if math.Abs(evmax-1) > tol { + t.Errorf("%v: magnitude of largest element of VL[:,%v] not 1", prefix, k) + } + if !isLeftEigenvectorOf(tmat, evre, evim, complex(re, im), tol) { + t.Errorf("%v: VL[:,%v:%v] is not complex left eigenvector", prefix, k, k+1) + } + floats.Scale(-1, evim) + if !isLeftEigenvectorOf(tmat, evre, evim, complex(re, -im), tol) { + t.Errorf("%v: VL[:,%v:%v] is not complex left eigenvector", prefix, k, k+1) + } + } + k += 2 + j += 2 + } +} diff --git a/vendor/gonum.org/v1/gonum/lapack/testlapack/dtrexc.go b/vendor/gonum.org/v1/gonum/lapack/testlapack/dtrexc.go new file mode 100644 index 0000000..92c29ef --- /dev/null +++ b/vendor/gonum.org/v1/gonum/lapack/testlapack/dtrexc.go @@ -0,0 +1,220 @@ +// Copyright ©2016 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package testlapack + +import ( + "fmt" + "math/cmplx" + "testing" + + "golang.org/x/exp/rand" + + "gonum.org/v1/gonum/blas" + "gonum.org/v1/gonum/blas/blas64" + "gonum.org/v1/gonum/lapack" +) + +type Dtrexcer interface { + Dtrexc(compq lapack.UpdateSchurComp, n int, t []float64, ldt int, q []float64, ldq int, ifst, ilst int, work []float64) (ifstOut, ilstOut int, ok bool) +} + +func DtrexcTest(t *testing.T, impl Dtrexcer) { + rnd := rand.New(rand.NewSource(1)) + + for _, compq := range []lapack.UpdateSchurComp{lapack.UpdateSchurNone, lapack.UpdateSchur} { + for _, n := range []int{1, 2, 3, 4, 5, 6, 10, 18, 31, 53} { + for _, extra := range []int{0, 1, 11} { + for cas := 0; cas < 100; cas++ { + tmat := randomSchurCanonical(n, n+extra, rnd) + ifst := rnd.Intn(n) + ilst := rnd.Intn(n) + testDtrexc(t, impl, compq, tmat, ifst, ilst, extra, rnd) + } + } + } + } + + for _, compq := range []lapack.UpdateSchurComp{lapack.UpdateSchurNone, lapack.UpdateSchur} { + for _, extra := range []int{0, 1, 11} { + tmat := randomSchurCanonical(0, extra, rnd) + testDtrexc(t, impl, compq, tmat, 0, 0, extra, rnd) + } + } +} + +func testDtrexc(t *testing.T, impl Dtrexcer, compq lapack.UpdateSchurComp, tmat blas64.General, ifst, ilst, extra int, rnd *rand.Rand) { + const tol = 1e-13 + + n := tmat.Rows + fstSize, fstFirst := schurBlockSize(tmat, ifst) + lstSize, lstFirst := schurBlockSize(tmat, ilst) + + tmatCopy := cloneGeneral(tmat) + + var wantq bool + var q, qCopy blas64.General + if compq == lapack.UpdateSchur { + wantq = true + q = eye(n, n+extra) + qCopy = cloneGeneral(q) + } + + work := nanSlice(n) + + ifstGot, ilstGot, ok := impl.Dtrexc(compq, n, tmat.Data, tmat.Stride, q.Data, max(1, q.Stride), ifst, ilst, work) + + prefix := fmt.Sprintf("Case compq=%v, n=%v, ifst=%v, nbf=%v, ilst=%v, nbl=%v, extra=%v", + compq, n, ifst, fstSize, ilst, lstSize, extra) + + if !generalOutsideAllNaN(tmat) { + t.Errorf("%v: out-of-range write to T", prefix) + } + if wantq && !generalOutsideAllNaN(q) { + t.Errorf("%v: out-of-range write to Q", prefix) + } + + if !ok { + t.Logf("%v: Dtrexc returned ok=false", prefix) + } + + // Check that the index of the first block was correctly updated (if + // necessary). + ifstWant := ifst + if !fstFirst { + ifstWant = ifst - 1 + } + if ifstWant != ifstGot { + t.Errorf("%v: unexpected ifst index. Want %v, got %v ", prefix, ifstWant, ifstGot) + } + + // Check that the index of the last block is as expected when ok=true. + // When ok=false, we don't know at which block the algorithm failed, so + // we don't check. + ilstWant := ilst + if !lstFirst { + ilstWant-- + } + if ok { + if ifstWant < ilstWant { + // If the blocks are swapped backwards, these + // adjustments are not necessary, the first row of the + // last block will end up at ifst. + switch { + case fstSize == 2 && lstSize == 1: + ilstWant-- + case fstSize == 1 && lstSize == 2: + ilstWant++ + } + } + if ilstWant != ilstGot { + t.Errorf("%v: unexpected ilst index. Want %v, got %v", prefix, ilstWant, ilstGot) + } + } + + if n <= 1 || ifstGot == ilstGot { + // Too small matrix or no swapping. + // Check that T was not modified. + for i := 0; i < n; i++ { + for j := 0; j < n; j++ { + if tmat.Data[i*tmat.Stride+j] != tmatCopy.Data[i*tmatCopy.Stride+j] { + t.Errorf("%v: unexpected modification at T[%v,%v]", prefix, i, j) + } + } + } + if !wantq { + return + } + // Check that Q was not modified. + for i := 0; i < n; i++ { + for j := 0; j < n; j++ { + if q.Data[i*q.Stride+j] != qCopy.Data[i*qCopy.Stride+j] { + t.Errorf("%v: unexpected modification at Q[%v,%v]", prefix, i, j) + } + } + } + return + } + + if !isSchurCanonicalGeneral(tmat) { + t.Errorf("%v: T is not in Schur canonical form", prefix) + } + + // Check that T was not modified except above the second subdiagonal in + // rows and columns [modMin,modMax]. + modMin := min(ifstGot, ilstGot) + modMax := max(ifstGot, ilstGot) + fstSize + for i := 0; i < n; i++ { + for j := 0; j < n; j++ { + if modMin <= i && i < modMax && j+1 >= i { + continue + } + if modMin <= j && j < modMax && j+1 >= i { + continue + } + diff := tmat.Data[i*tmat.Stride+j] - tmatCopy.Data[i*tmatCopy.Stride+j] + if diff != 0 { + t.Errorf("%v: unexpected modification at T[%v,%v]", prefix, i, j) + } + } + } + + // Check that the block at ifstGot was delivered to ilstGot correctly. + if fstSize == 1 { + // 1×1 blocks are swapped exactly. + got := tmat.Data[ilstGot*tmat.Stride+ilstGot] + want := tmatCopy.Data[ifstGot*tmatCopy.Stride+ifstGot] + if want != got { + t.Errorf("%v: unexpected 1×1 block at T[%v,%v]. Want %v, got %v", + prefix, want, got, ilstGot, ilstGot) + } + } else { + // Check that the swapped 2×2 block is in Schur canonical form. + a, b, c, d := extract2x2Block(tmat.Data[ilstGot*tmat.Stride+ilstGot:], tmat.Stride) + if !isSchurCanonical(a, b, c, d) { + t.Errorf("%v: 2×2 block at T[%v,%v] not in Schur canonical form", prefix, ilstGot, ilstGot) + } + ev1Got, ev2Got := schurBlockEigenvalues(a, b, c, d) + + // Check that the swapped 2×2 block has the same eigenvalues. + // The block was originally located at T[ifstGot,ifstGot]. + a, b, c, d = extract2x2Block(tmatCopy.Data[ifstGot*tmatCopy.Stride+ifstGot:], tmatCopy.Stride) + ev1Want, ev2Want := schurBlockEigenvalues(a, b, c, d) + if cmplx.Abs(ev1Got-ev1Want) > tol { + t.Errorf("%v: unexpected first eigenvalue of 2×2 block at T[%v,%v]. Want %v, got %v", + prefix, ilstGot, ilstGot, ev1Want, ev1Got) + } + if cmplx.Abs(ev2Got-ev2Want) > tol { + t.Errorf("%v: unexpected second eigenvalue of 2×2 block at T[%v,%v]. Want %v, got %v", + prefix, ilstGot, ilstGot, ev2Want, ev2Got) + } + } + + if !wantq { + return + } + + if !isOrthogonal(q) { + t.Errorf("%v: Q is not orthogonal", prefix) + } + // Check that Q is unchanged outside of columns [modMin,modMax]. + for i := 0; i < n; i++ { + for j := 0; j < n; j++ { + if modMin <= j && j < modMax { + continue + } + if q.Data[i*q.Stride+j]-qCopy.Data[i*qCopy.Stride+j] != 0 { + t.Errorf("%v: unexpected modification of Q[%v,%v]", prefix, i, j) + } + } + } + // Check that Q^T TOrig Q == T. + tq := eye(n, n) + blas64.Gemm(blas.NoTrans, blas.NoTrans, 1, tmatCopy, q, 0, tq) + qtq := eye(n, n) + blas64.Gemm(blas.Trans, blas.NoTrans, 1, q, tq, 0, qtq) + if !equalApproxGeneral(qtq, tmat, tol) { + t.Errorf("%v: Q^T (initial T) Q and (final T) are not equal", prefix) + } +} diff --git a/vendor/gonum.org/v1/gonum/lapack/testlapack/dtrti2.go b/vendor/gonum.org/v1/gonum/lapack/testlapack/dtrti2.go new file mode 100644 index 0000000..bfaf8a9 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/lapack/testlapack/dtrti2.go @@ -0,0 +1,158 @@ +// Copyright ©2015 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package testlapack + +import ( + "testing" + + "golang.org/x/exp/rand" + + "gonum.org/v1/gonum/blas" + "gonum.org/v1/gonum/blas/blas64" + "gonum.org/v1/gonum/floats" +) + +type Dtrti2er interface { + Dtrti2(uplo blas.Uplo, diag blas.Diag, n int, a []float64, lda int) +} + +func Dtrti2Test(t *testing.T, impl Dtrti2er) { + const tol = 1e-14 + for _, test := range []struct { + a []float64 + n int + uplo blas.Uplo + diag blas.Diag + ans []float64 + }{ + { + a: []float64{ + 2, 3, 4, + 0, 5, 6, + 8, 0, 8}, + n: 3, + uplo: blas.Upper, + diag: blas.NonUnit, + ans: []float64{ + 0.5, -0.3, -0.025, + 0, 0.2, -0.15, + 8, 0, 0.125, + }, + }, + { + a: []float64{ + 5, 3, 4, + 0, 7, 6, + 10, 0, 8}, + n: 3, + uplo: blas.Upper, + diag: blas.Unit, + ans: []float64{ + 5, -3, 14, + 0, 7, -6, + 10, 0, 8, + }, + }, + { + a: []float64{ + 2, 0, 0, + 3, 5, 0, + 4, 6, 8}, + n: 3, + uplo: blas.Lower, + diag: blas.NonUnit, + ans: []float64{ + 0.5, 0, 0, + -0.3, 0.2, 0, + -0.025, -0.15, 0.125, + }, + }, + { + a: []float64{ + 1, 0, 0, + 3, 1, 0, + 4, 6, 1}, + n: 3, + uplo: blas.Lower, + diag: blas.Unit, + ans: []float64{ + 1, 0, 0, + -3, 1, 0, + 14, -6, 1, + }, + }, + } { + impl.Dtrti2(test.uplo, test.diag, test.n, test.a, test.n) + if !floats.EqualApprox(test.ans, test.a, tol) { + t.Errorf("Matrix inverse mismatch. Want %v, got %v.", test.ans, test.a) + } + } + rnd := rand.New(rand.NewSource(1)) + bi := blas64.Implementation() + for _, uplo := range []blas.Uplo{blas.Upper, blas.Lower} { + for _, diag := range []blas.Diag{blas.NonUnit, blas.Unit} { + for _, test := range []struct { + n, lda int + }{ + {1, 0}, + {2, 0}, + {3, 0}, + {1, 5}, + {2, 5}, + {3, 5}, + } { + n := test.n + lda := test.lda + if lda == 0 { + lda = n + } + // Allocate n×n matrix A and fill it with random numbers. + a := make([]float64, n*lda) + for i := range a { + a[i] = rnd.Float64() + } + for i := 0; i < n; i++ { + // This keeps the matrices well conditioned. + a[i*lda+i] += float64(n) + } + aCopy := make([]float64, len(a)) + copy(aCopy, a) + // Compute the inverse of the uplo triangle. + impl.Dtrti2(uplo, diag, n, a, lda) + // Zero out the opposite triangle. + if uplo == blas.Upper { + for i := 1; i < n; i++ { + for j := 0; j < i; j++ { + aCopy[i*lda+j] = 0 + a[i*lda+j] = 0 + } + } + } else { + for i := 0; i < n; i++ { + for j := i + 1; j < n; j++ { + aCopy[i*lda+j] = 0 + a[i*lda+j] = 0 + } + } + } + if diag == blas.Unit { + // Set the diagonal of A^{-1} and A explicitly to 1. + for i := 0; i < n; i++ { + a[i*lda+i] = 1 + aCopy[i*lda+i] = 1 + } + } + // Compute A^{-1} * A and store the result in ans. + ans := make([]float64, len(a)) + bi.Dgemm(blas.NoTrans, blas.NoTrans, n, n, n, 1, a, lda, aCopy, lda, 0, ans, lda) + // Check that ans is close to the identity matrix. + dist := distFromIdentity(n, ans, lda) + if dist > tol { + t.Errorf("|inv(A) * A - I| = %v. Upper = %v, unit = %v, ans = %v", dist, uplo == blas.Upper, diag == blas.Unit, ans) + } + } + } + } +} diff --git a/vendor/gonum.org/v1/gonum/lapack/testlapack/dtrtri.go b/vendor/gonum.org/v1/gonum/lapack/testlapack/dtrtri.go new file mode 100644 index 0000000..eb8ae64 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/lapack/testlapack/dtrtri.go @@ -0,0 +1,90 @@ +// Copyright ©2015 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package testlapack + +import ( + "testing" + + "golang.org/x/exp/rand" + + "gonum.org/v1/gonum/blas" + "gonum.org/v1/gonum/blas/blas64" +) + +type Dtrtrier interface { + Dtrconer + Dtrtri(uplo blas.Uplo, diag blas.Diag, n int, a []float64, lda int) bool +} + +func DtrtriTest(t *testing.T, impl Dtrtrier) { + const tol = 1e-10 + rnd := rand.New(rand.NewSource(1)) + bi := blas64.Implementation() + for _, uplo := range []blas.Uplo{blas.Upper, blas.Lower} { + for _, diag := range []blas.Diag{blas.NonUnit, blas.Unit} { + for _, test := range []struct { + n, lda int + }{ + {3, 0}, + {70, 0}, + {200, 0}, + {3, 5}, + {70, 92}, + {200, 205}, + } { + n := test.n + lda := test.lda + if lda == 0 { + lda = n + } + // Allocate n×n matrix A and fill it with random numbers. + a := make([]float64, n*lda) + for i := range a { + a[i] = rnd.Float64() + } + for i := 0; i < n; i++ { + // This keeps the matrices well conditioned. + a[i*lda+i] += float64(n) + } + aCopy := make([]float64, len(a)) + copy(aCopy, a) + // Compute the inverse of the uplo triangle. + impl.Dtrtri(uplo, diag, n, a, lda) + // Zero out the opposite triangle. + if uplo == blas.Upper { + for i := 1; i < n; i++ { + for j := 0; j < i; j++ { + aCopy[i*lda+j] = 0 + a[i*lda+j] = 0 + } + } + } else { + for i := 0; i < n; i++ { + for j := i + 1; j < n; j++ { + aCopy[i*lda+j] = 0 + a[i*lda+j] = 0 + } + } + } + if diag == blas.Unit { + // Set the diagonal explicitly to 1. + for i := 0; i < n; i++ { + a[i*lda+i] = 1 + aCopy[i*lda+i] = 1 + } + } + // Compute A^{-1} * A and store the result in ans. + ans := make([]float64, len(a)) + bi.Dgemm(blas.NoTrans, blas.NoTrans, n, n, n, 1, a, lda, aCopy, lda, 0, ans, lda) + // Check that ans is the identity matrix. + dist := distFromIdentity(n, ans, lda) + if dist > tol { + t.Errorf("|inv(A) * A - I| = %v is too large. Upper = %v, unit = %v, n = %v, lda = %v", + dist, uplo == blas.Upper, diag == blas.Unit, n, lda) + } + } + } + } +} diff --git a/vendor/gonum.org/v1/gonum/lapack/testlapack/fortran.go b/vendor/gonum.org/v1/gonum/lapack/testlapack/fortran.go new file mode 100644 index 0000000..be1c5c1 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/lapack/testlapack/fortran.go @@ -0,0 +1,33 @@ +// Copyright ©2015 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package testlapack + +import ( + "fmt" + "strings" +) + +// This file implements types for helping to convert to Fortran testing capabilities. + +// fortran64 is a float64 type that prints as a double precision constant in +// Fortran format. +type fortran64 float64 + +func (f fortran64) String() string { + // Replace exponent with D + s := fmt.Sprintf("%0.16E", f) + s = strings.Replace(s, "E", "D", 1) + return s +} + +// printFortranArray prints a Go slice as an array that can be copied into a +// fortran script. +func printFortranArray(z []float64, name string) { + fmt.Printf("%s(1:%d) = (/%v, &\n", name, len(z), fortran64(z[0])) + for i := 1; i < len(z)-1; i++ { + fmt.Printf("%v, &\n", fortran64(z[i])) + } + fmt.Printf("%s/)\n", fortran64(z[len(z)-1])) +} diff --git a/vendor/gonum.org/v1/gonum/lapack/testlapack/general.go b/vendor/gonum.org/v1/gonum/lapack/testlapack/general.go new file mode 100644 index 0000000..a274631 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/lapack/testlapack/general.go @@ -0,0 +1,1578 @@ +// Copyright ©2015 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package testlapack + +import ( + "fmt" + "math" + "math/cmplx" + "testing" + + "golang.org/x/exp/rand" + + "gonum.org/v1/gonum/blas" + "gonum.org/v1/gonum/blas/blas64" + "gonum.org/v1/gonum/floats" + "gonum.org/v1/gonum/lapack" +) + +const ( + // dlamchE is the machine epsilon. For IEEE this is 2^{-53}. + dlamchE = 1.0 / (1 << 53) + dlamchB = 2 + dlamchP = dlamchB * dlamchE + // dlamchS is the smallest normal number. For IEEE this is 2^{-1022}. + dlamchS = 1.0 / (1 << 256) / (1 << 256) / (1 << 256) / (1 << 254) +) + +func max(a, b int) int { + if a > b { + return a + } + return b +} + +func min(a, b int) int { + if a < b { + return a + } + return b +} + +// worklen describes how much workspace a test should use. +type worklen int + +const ( + minimumWork worklen = iota + mediumWork + optimumWork +) + +func (wl worklen) String() string { + switch wl { + case minimumWork: + return "minimum" + case mediumWork: + return "medium" + case optimumWork: + return "optimum" + } + return "" +} + +// nanSlice allocates a new slice of length n filled with NaN. +func nanSlice(n int) []float64 { + s := make([]float64, n) + for i := range s { + s[i] = math.NaN() + } + return s +} + +// randomSlice allocates a new slice of length n filled with random values. +func randomSlice(n int, rnd *rand.Rand) []float64 { + s := make([]float64, n) + for i := range s { + s[i] = rnd.NormFloat64() + } + return s +} + +// nanGeneral allocates a new r×c general matrix filled with NaN values. +func nanGeneral(r, c, stride int) blas64.General { + if r < 0 || c < 0 { + panic("bad matrix size") + } + if r == 0 || c == 0 { + return blas64.General{Stride: max(1, stride)} + } + if stride < c { + panic("bad stride") + } + return blas64.General{ + Rows: r, + Cols: c, + Stride: stride, + Data: nanSlice((r-1)*stride + c), + } +} + +// randomGeneral allocates a new r×c general matrix filled with random +// numbers. Out-of-range elements are filled with NaN values. +func randomGeneral(r, c, stride int, rnd *rand.Rand) blas64.General { + ans := nanGeneral(r, c, stride) + for i := 0; i < r; i++ { + for j := 0; j < c; j++ { + ans.Data[i*ans.Stride+j] = rnd.NormFloat64() + } + } + return ans +} + +// randomHessenberg allocates a new n×n Hessenberg matrix filled with zeros +// under the first subdiagonal and with random numbers elsewhere. Out-of-range +// elements are filled with NaN values. +func randomHessenberg(n, stride int, rnd *rand.Rand) blas64.General { + ans := nanGeneral(n, n, stride) + for i := 0; i < n; i++ { + for j := 0; j < i-1; j++ { + ans.Data[i*ans.Stride+j] = 0 + } + for j := max(0, i-1); j < n; j++ { + ans.Data[i*ans.Stride+j] = rnd.NormFloat64() + } + } + return ans +} + +// randomSchurCanonical returns a random, general matrix in Schur canonical +// form, that is, block upper triangular with 1×1 and 2×2 diagonal blocks where +// each 2×2 diagonal block has its diagonal elements equal and its off-diagonal +// elements of opposite sign. +func randomSchurCanonical(n, stride int, rnd *rand.Rand) blas64.General { + t := randomGeneral(n, n, stride, rnd) + // Zero out the lower triangle. + for i := 0; i < t.Rows; i++ { + for j := 0; j < i; j++ { + t.Data[i*t.Stride+j] = 0 + } + } + // Randomly create 2×2 diagonal blocks. + for i := 0; i < t.Rows; { + if i == t.Rows-1 || rnd.Float64() < 0.5 { + // 1×1 block. + i++ + continue + } + // 2×2 block. + // Diagonal elements equal. + t.Data[(i+1)*t.Stride+i+1] = t.Data[i*t.Stride+i] + // Off-diagonal elements of opposite sign. + c := rnd.NormFloat64() + if math.Signbit(c) == math.Signbit(t.Data[i*t.Stride+i+1]) { + c *= -1 + } + t.Data[(i+1)*t.Stride+i] = c + i += 2 + } + return t +} + +// blockedUpperTriGeneral returns a normal random, general matrix in the form +// +// c-k-l k l +// A = k [ 0 A12 A13 ] if r-k-l >= 0; +// l [ 0 0 A23 ] +// r-k-l [ 0 0 0 ] +// +// c-k-l k l +// A = k [ 0 A12 A13 ] if r-k-l < 0; +// r-k [ 0 0 A23 ] +// +// where the k×k matrix A12 and l×l matrix is non-singular +// upper triangular. A23 is l×l upper triangular if r-k-l >= 0, +// otherwise A23 is (r-k)×l upper trapezoidal. +func blockedUpperTriGeneral(r, c, k, l, stride int, kblock bool, rnd *rand.Rand) blas64.General { + t := l + if kblock { + t += k + } + ans := zeros(r, c, stride) + for i := 0; i < min(r, t); i++ { + var v float64 + for v == 0 { + v = rnd.NormFloat64() + } + ans.Data[i*ans.Stride+i+(c-t)] = v + } + for i := 0; i < min(r, t); i++ { + for j := i + (c - t) + 1; j < c; j++ { + ans.Data[i*ans.Stride+j] = rnd.NormFloat64() + } + } + return ans +} + +// nanTriangular allocates a new r×c triangular matrix filled with NaN values. +func nanTriangular(uplo blas.Uplo, n, stride int) blas64.Triangular { + if n < 0 { + panic("bad matrix size") + } + if n == 0 { + return blas64.Triangular{ + Stride: max(1, stride), + Uplo: uplo, + Diag: blas.NonUnit, + } + } + if stride < n { + panic("bad stride") + } + return blas64.Triangular{ + N: n, + Stride: stride, + Data: nanSlice((n-1)*stride + n), + Uplo: uplo, + Diag: blas.NonUnit, + } +} + +// generalOutsideAllNaN returns whether all out-of-range elements have NaN +// values. +func generalOutsideAllNaN(a blas64.General) bool { + // Check after last column. + for i := 0; i < a.Rows-1; i++ { + for _, v := range a.Data[i*a.Stride+a.Cols : i*a.Stride+a.Stride] { + if !math.IsNaN(v) { + return false + } + } + } + // Check after last element. + last := (a.Rows-1)*a.Stride + a.Cols + if a.Rows == 0 || a.Cols == 0 { + last = 0 + } + for _, v := range a.Data[last:] { + if !math.IsNaN(v) { + return false + } + } + return true +} + +// triangularOutsideAllNaN returns whether all out-of-triangle elements have NaN +// values. +func triangularOutsideAllNaN(a blas64.Triangular) bool { + if a.Uplo == blas.Upper { + // Check below diagonal. + for i := 0; i < a.N; i++ { + for _, v := range a.Data[i*a.Stride : i*a.Stride+i] { + if !math.IsNaN(v) { + return false + } + } + } + // Check after last column. + for i := 0; i < a.N-1; i++ { + for _, v := range a.Data[i*a.Stride+a.N : i*a.Stride+a.Stride] { + if !math.IsNaN(v) { + return false + } + } + } + } else { + // Check above diagonal. + for i := 0; i < a.N-1; i++ { + for _, v := range a.Data[i*a.Stride+i+1 : i*a.Stride+a.Stride] { + if !math.IsNaN(v) { + return false + } + } + } + } + // Check after last element. + for _, v := range a.Data[max(0, a.N-1)*a.Stride+a.N:] { + if !math.IsNaN(v) { + return false + } + } + return true +} + +// transposeGeneral returns a new general matrix that is the transpose of the +// input. Nothing is done with data outside the {rows, cols} limit of the general. +func transposeGeneral(a blas64.General) blas64.General { + ans := blas64.General{ + Rows: a.Cols, + Cols: a.Rows, + Stride: a.Rows, + Data: make([]float64, a.Cols*a.Rows), + } + for i := 0; i < a.Rows; i++ { + for j := 0; j < a.Cols; j++ { + ans.Data[j*ans.Stride+i] = a.Data[i*a.Stride+j] + } + } + return ans +} + +// columnNorms returns the column norms of a. +func columnNorms(m, n int, a []float64, lda int) []float64 { + bi := blas64.Implementation() + norms := make([]float64, n) + for j := 0; j < n; j++ { + norms[j] = bi.Dnrm2(m, a[j:], lda) + } + return norms +} + +// extractVMat collects the single reflectors from a into a matrix. +func extractVMat(m, n int, a []float64, lda int, direct lapack.Direct, store lapack.StoreV) blas64.General { + k := min(m, n) + switch { + default: + panic("not implemented") + case direct == lapack.Forward && store == lapack.ColumnWise: + v := blas64.General{ + Rows: m, + Cols: k, + Stride: k, + Data: make([]float64, m*k), + } + for i := 0; i < k; i++ { + for j := 0; j < i; j++ { + v.Data[j*v.Stride+i] = 0 + } + v.Data[i*v.Stride+i] = 1 + for j := i + 1; j < m; j++ { + v.Data[j*v.Stride+i] = a[j*lda+i] + } + } + return v + case direct == lapack.Forward && store == lapack.RowWise: + v := blas64.General{ + Rows: k, + Cols: n, + Stride: n, + Data: make([]float64, k*n), + } + for i := 0; i < k; i++ { + for j := 0; j < i; j++ { + v.Data[i*v.Stride+j] = 0 + } + v.Data[i*v.Stride+i] = 1 + for j := i + 1; j < n; j++ { + v.Data[i*v.Stride+j] = a[i*lda+j] + } + } + return v + } +} + +// constructBidiagonal constructs a bidiagonal matrix with the given diagonal +// and off-diagonal elements. +func constructBidiagonal(uplo blas.Uplo, n int, d, e []float64) blas64.General { + bMat := blas64.General{ + Rows: n, + Cols: n, + Stride: n, + Data: make([]float64, n*n), + } + + for i := 0; i < n-1; i++ { + bMat.Data[i*bMat.Stride+i] = d[i] + if uplo == blas.Upper { + bMat.Data[i*bMat.Stride+i+1] = e[i] + } else { + bMat.Data[(i+1)*bMat.Stride+i] = e[i] + } + } + bMat.Data[(n-1)*bMat.Stride+n-1] = d[n-1] + return bMat +} + +// constructVMat transforms the v matrix based on the storage. +func constructVMat(vMat blas64.General, store lapack.StoreV, direct lapack.Direct) blas64.General { + m := vMat.Rows + k := vMat.Cols + switch { + default: + panic("not implemented") + case store == lapack.ColumnWise && direct == lapack.Forward: + ldv := k + v := make([]float64, m*k) + for i := 0; i < m; i++ { + for j := 0; j < k; j++ { + if j > i { + v[i*ldv+j] = 0 + } else if j == i { + v[i*ldv+i] = 1 + } else { + v[i*ldv+j] = vMat.Data[i*vMat.Stride+j] + } + } + } + return blas64.General{ + Rows: m, + Cols: k, + Stride: k, + Data: v, + } + case store == lapack.RowWise && direct == lapack.Forward: + ldv := m + v := make([]float64, m*k) + for i := 0; i < m; i++ { + for j := 0; j < k; j++ { + if j > i { + v[j*ldv+i] = 0 + } else if j == i { + v[j*ldv+i] = 1 + } else { + v[j*ldv+i] = vMat.Data[i*vMat.Stride+j] + } + } + } + return blas64.General{ + Rows: k, + Cols: m, + Stride: m, + Data: v, + } + case store == lapack.ColumnWise && direct == lapack.Backward: + rowsv := m + ldv := k + v := make([]float64, m*k) + for i := 0; i < m; i++ { + for j := 0; j < k; j++ { + vrow := rowsv - i - 1 + vcol := k - j - 1 + if j > i { + v[vrow*ldv+vcol] = 0 + } else if j == i { + v[vrow*ldv+vcol] = 1 + } else { + v[vrow*ldv+vcol] = vMat.Data[i*vMat.Stride+j] + } + } + } + return blas64.General{ + Rows: rowsv, + Cols: ldv, + Stride: ldv, + Data: v, + } + case store == lapack.RowWise && direct == lapack.Backward: + rowsv := k + ldv := m + v := make([]float64, m*k) + for i := 0; i < m; i++ { + for j := 0; j < k; j++ { + vcol := ldv - i - 1 + vrow := k - j - 1 + if j > i { + v[vrow*ldv+vcol] = 0 + } else if j == i { + v[vrow*ldv+vcol] = 1 + } else { + v[vrow*ldv+vcol] = vMat.Data[i*vMat.Stride+j] + } + } + } + return blas64.General{ + Rows: rowsv, + Cols: ldv, + Stride: ldv, + Data: v, + } + } +} + +func constructH(tau []float64, v blas64.General, store lapack.StoreV, direct lapack.Direct) blas64.General { + m := v.Rows + k := v.Cols + if store == lapack.RowWise { + m, k = k, m + } + h := blas64.General{ + Rows: m, + Cols: m, + Stride: m, + Data: make([]float64, m*m), + } + for i := 0; i < m; i++ { + h.Data[i*m+i] = 1 + } + for i := 0; i < k; i++ { + vecData := make([]float64, m) + if store == lapack.ColumnWise { + for j := 0; j < m; j++ { + vecData[j] = v.Data[j*v.Cols+i] + } + } else { + for j := 0; j < m; j++ { + vecData[j] = v.Data[i*v.Cols+j] + } + } + vec := blas64.Vector{ + Inc: 1, + Data: vecData, + } + + hi := blas64.General{ + Rows: m, + Cols: m, + Stride: m, + Data: make([]float64, m*m), + } + for i := 0; i < m; i++ { + hi.Data[i*m+i] = 1 + } + // hi = I - tau * v * v^T + blas64.Ger(-tau[i], vec, vec, hi) + + hcopy := blas64.General{ + Rows: m, + Cols: m, + Stride: m, + Data: make([]float64, m*m), + } + copy(hcopy.Data, h.Data) + if direct == lapack.Forward { + // H = H * H_I in forward mode + blas64.Gemm(blas.NoTrans, blas.NoTrans, 1, hcopy, hi, 0, h) + } else { + // H = H_I * H in backward mode + blas64.Gemm(blas.NoTrans, blas.NoTrans, 1, hi, hcopy, 0, h) + } + } + return h +} + +// constructQ constructs the Q matrix from the result of dgeqrf and dgeqr2. +func constructQ(kind string, m, n int, a []float64, lda int, tau []float64) blas64.General { + k := min(m, n) + return constructQK(kind, m, n, k, a, lda, tau) +} + +// constructQK constructs the Q matrix from the result of dgeqrf and dgeqr2 using +// the first k reflectors. +func constructQK(kind string, m, n, k int, a []float64, lda int, tau []float64) blas64.General { + var sz int + switch kind { + case "QR": + sz = m + case "LQ", "RQ": + sz = n + } + + q := blas64.General{ + Rows: sz, + Cols: sz, + Stride: sz, + Data: make([]float64, sz*sz), + } + for i := 0; i < sz; i++ { + q.Data[i*sz+i] = 1 + } + qCopy := blas64.General{ + Rows: q.Rows, + Cols: q.Cols, + Stride: q.Stride, + Data: make([]float64, len(q.Data)), + } + for i := 0; i < k; i++ { + h := blas64.General{ + Rows: sz, + Cols: sz, + Stride: sz, + Data: make([]float64, sz*sz), + } + for j := 0; j < sz; j++ { + h.Data[j*sz+j] = 1 + } + vVec := blas64.Vector{ + Inc: 1, + Data: make([]float64, sz), + } + switch kind { + case "QR": + vVec.Data[i] = 1 + for j := i + 1; j < sz; j++ { + vVec.Data[j] = a[lda*j+i] + } + case "LQ": + vVec.Data[i] = 1 + for j := i + 1; j < sz; j++ { + vVec.Data[j] = a[i*lda+j] + } + case "RQ": + for j := 0; j < n-k+i; j++ { + vVec.Data[j] = a[(m-k+i)*lda+j] + } + vVec.Data[n-k+i] = 1 + } + blas64.Ger(-tau[i], vVec, vVec, h) + copy(qCopy.Data, q.Data) + // Multiply q by the new h. + switch kind { + case "QR", "RQ": + blas64.Gemm(blas.NoTrans, blas.NoTrans, 1, qCopy, h, 0, q) + case "LQ": + blas64.Gemm(blas.NoTrans, blas.NoTrans, 1, h, qCopy, 0, q) + } + } + return q +} + +// checkBidiagonal checks the bidiagonal decomposition from dlabrd and dgebd2. +// The input to this function is the answer returned from the routines, stored +// in a, d, e, tauP, and tauQ. The data of original A matrix (before +// decomposition) is input in aCopy. +// +// checkBidiagonal constructs the V and U matrices, and from them constructs Q +// and P. Using these constructions, it checks that Q^T * A * P and checks that +// the result is bidiagonal. +func checkBidiagonal(t *testing.T, m, n, nb int, a []float64, lda int, d, e, tauP, tauQ, aCopy []float64) { + // Check the answer. + // Construct V and U. + qMat := constructQPBidiagonal(lapack.ApplyQ, m, n, nb, a, lda, tauQ) + pMat := constructQPBidiagonal(lapack.ApplyP, m, n, nb, a, lda, tauP) + + // Compute Q^T * A * P. + aMat := blas64.General{ + Rows: m, + Cols: n, + Stride: lda, + Data: make([]float64, len(aCopy)), + } + copy(aMat.Data, aCopy) + + tmp1 := blas64.General{ + Rows: m, + Cols: n, + Stride: n, + Data: make([]float64, m*n), + } + blas64.Gemm(blas.Trans, blas.NoTrans, 1, qMat, aMat, 0, tmp1) + tmp2 := blas64.General{ + Rows: m, + Cols: n, + Stride: n, + Data: make([]float64, m*n), + } + blas64.Gemm(blas.NoTrans, blas.NoTrans, 1, tmp1, pMat, 0, tmp2) + + // Check that the first nb rows and cols of tm2 are upper bidiagonal + // if m >= n, and lower bidiagonal otherwise. + correctDiag := true + matchD := true + matchE := true + for i := 0; i < m; i++ { + for j := 0; j < n; j++ { + if i >= nb && j >= nb { + continue + } + v := tmp2.Data[i*tmp2.Stride+j] + if i == j { + if math.Abs(d[i]-v) > 1e-12 { + matchD = false + } + continue + } + if m >= n && i == j-1 { + if math.Abs(e[j-1]-v) > 1e-12 { + matchE = false + } + continue + } + if m < n && i-1 == j { + if math.Abs(e[i-1]-v) > 1e-12 { + matchE = false + } + continue + } + if math.Abs(v) > 1e-12 { + correctDiag = false + } + } + } + if !correctDiag { + t.Errorf("Updated A not bi-diagonal") + } + if !matchD { + fmt.Println("d = ", d) + t.Errorf("D Mismatch") + } + if !matchE { + t.Errorf("E mismatch") + } +} + +// constructQPBidiagonal constructs Q or P from the Bidiagonal decomposition +// computed by dlabrd and bgebd2. +func constructQPBidiagonal(vect lapack.ApplyOrtho, m, n, nb int, a []float64, lda int, tau []float64) blas64.General { + sz := n + if vect == lapack.ApplyQ { + sz = m + } + + var ldv int + var v blas64.General + if vect == lapack.ApplyQ { + ldv = nb + v = blas64.General{ + Rows: m, + Cols: nb, + Stride: ldv, + Data: make([]float64, m*ldv), + } + } else { + ldv = n + v = blas64.General{ + Rows: nb, + Cols: n, + Stride: ldv, + Data: make([]float64, m*ldv), + } + } + + if vect == lapack.ApplyQ { + if m >= n { + for i := 0; i < m; i++ { + for j := 0; j <= min(nb-1, i); j++ { + if i == j { + v.Data[i*ldv+j] = 1 + continue + } + v.Data[i*ldv+j] = a[i*lda+j] + } + } + } else { + for i := 1; i < m; i++ { + for j := 0; j <= min(nb-1, i-1); j++ { + if i-1 == j { + v.Data[i*ldv+j] = 1 + continue + } + v.Data[i*ldv+j] = a[i*lda+j] + } + } + } + } else { + if m < n { + for i := 0; i < nb; i++ { + for j := i; j < n; j++ { + if i == j { + v.Data[i*ldv+j] = 1 + continue + } + v.Data[i*ldv+j] = a[i*lda+j] + } + } + } else { + for i := 0; i < nb; i++ { + for j := i + 1; j < n; j++ { + if j-1 == i { + v.Data[i*ldv+j] = 1 + continue + } + v.Data[i*ldv+j] = a[i*lda+j] + } + } + } + } + + // The variable name is a computation of Q, but the algorithm is mostly the + // same for computing P (just with different data). + qMat := blas64.General{ + Rows: sz, + Cols: sz, + Stride: sz, + Data: make([]float64, sz*sz), + } + hMat := blas64.General{ + Rows: sz, + Cols: sz, + Stride: sz, + Data: make([]float64, sz*sz), + } + // set Q to I + for i := 0; i < sz; i++ { + qMat.Data[i*qMat.Stride+i] = 1 + } + for i := 0; i < nb; i++ { + qCopy := blas64.General{Rows: qMat.Rows, Cols: qMat.Cols, Stride: qMat.Stride, Data: make([]float64, len(qMat.Data))} + copy(qCopy.Data, qMat.Data) + + // Set g and h to I + for i := 0; i < sz; i++ { + for j := 0; j < sz; j++ { + if i == j { + hMat.Data[i*sz+j] = 1 + } else { + hMat.Data[i*sz+j] = 0 + } + } + } + var vi blas64.Vector + // H -= tauQ[i] * v[i] * v[i]^t + if vect == lapack.ApplyQ { + vi = blas64.Vector{ + Inc: v.Stride, + Data: v.Data[i:], + } + } else { + vi = blas64.Vector{ + Inc: 1, + Data: v.Data[i*v.Stride:], + } + } + blas64.Ger(-tau[i], vi, vi, hMat) + // Q = Q * G[1] + blas64.Gemm(blas.NoTrans, blas.NoTrans, 1, qCopy, hMat, 0, qMat) + } + return qMat +} + +// printRowise prints the matrix with one row per line. This is useful for debugging. +// If beyond is true, it prints beyond the final column to lda. If false, only +// the columns are printed. +func printRowise(a []float64, m, n, lda int, beyond bool) { + for i := 0; i < m; i++ { + end := n + if beyond { + end = lda + } + fmt.Println(a[i*lda : i*lda+end]) + } +} + +// isOrthogonal returns whether a square matrix Q is orthogonal. +func isOrthogonal(q blas64.General) bool { + n := q.Rows + if n != q.Cols { + panic("matrix not square") + } + // A real square matrix is orthogonal if and only if its rows form + // an orthonormal basis of the Euclidean space R^n. + const tol = 1e-13 + for i := 0; i < n; i++ { + nrm := blas64.Nrm2(blas64.Vector{N: n, Data: q.Data[i*q.Stride:], Inc: 1}) + if math.IsNaN(nrm) { + return false + } + if math.Abs(nrm-1) > tol { + return false + } + for j := i + 1; j < n; j++ { + dot := blas64.Dot(blas64.Vector{N: n, Data: q.Data[i*q.Stride:], Inc: 1}, + blas64.Vector{N: n, Data: q.Data[j*q.Stride:], Inc: 1}) + if math.IsNaN(dot) { + return false + } + if math.Abs(dot) > tol { + return false + } + } + } + return true +} + +// hasOrthonormalColumns returns whether the columns of Q are orthonormal. +func hasOrthonormalColumns(q blas64.General) bool { + m, n := q.Rows, q.Cols + if n > m { + // Wide matrix cannot have all columns orthogonal. + return false + } + ldq := q.Stride + const tol = 1e-13 + for i := 0; i < n; i++ { + nrm := blas64.Nrm2(blas64.Vector{N: m, Data: q.Data[i:], Inc: ldq}) + if math.IsNaN(nrm) { + return false + } + if math.Abs(nrm-1) > tol { + return false + } + for j := i + 1; j < n; j++ { + dot := blas64.Dot(blas64.Vector{N: m, Data: q.Data[i:], Inc: ldq}, + blas64.Vector{N: m, Data: q.Data[j:], Inc: ldq}) + if math.IsNaN(dot) { + return false + } + if math.Abs(dot) > tol { + return false + } + } + } + return true +} + +// hasOrthonormalRows returns whether the rows of Q are orthonormal. +func hasOrthonormalRows(q blas64.General) bool { + m, n := q.Rows, q.Cols + if m > n { + // Tall matrix cannot have all rows orthogonal. + return false + } + ldq := q.Stride + const tol = 1e-13 + for i1 := 0; i1 < m; i1++ { + nrm := blas64.Nrm2(blas64.Vector{N: n, Data: q.Data[i1*ldq:], Inc: 1}) + if math.IsNaN(nrm) { + return false + } + if math.Abs(nrm-1) > tol { + return false + } + for i2 := i1 + 1; i2 < m; i2++ { + dot := blas64.Dot( + blas64.Vector{N: n, Data: q.Data[i1*ldq:], Inc: 1}, + blas64.Vector{N: n, Data: q.Data[i2*ldq:], Inc: 1}) + if math.IsNaN(dot) { + return false + } + if math.Abs(dot) > tol { + return false + } + } + } + return true +} + +// copyMatrix copies an m×n matrix src of stride n into an m×n matrix dst of stride ld. +func copyMatrix(m, n int, dst []float64, ld int, src []float64) { + for i := 0; i < m; i++ { + copy(dst[i*ld:i*ld+n], src[i*n:i*n+n]) + } +} + +func copyGeneral(dst, src blas64.General) { + r := min(dst.Rows, src.Rows) + c := min(dst.Cols, src.Cols) + for i := 0; i < r; i++ { + copy(dst.Data[i*dst.Stride:i*dst.Stride+c], src.Data[i*src.Stride:i*src.Stride+c]) + } +} + +// cloneGeneral allocates and returns an exact copy of the given general matrix. +func cloneGeneral(a blas64.General) blas64.General { + c := a + c.Data = make([]float64, len(a.Data)) + copy(c.Data, a.Data) + return c +} + +// equalApprox returns whether the matrices A and B of order n are approximately +// equal within given tolerance. +func equalApprox(m, n int, a []float64, lda int, b []float64, tol float64) bool { + for i := 0; i < m; i++ { + for j := 0; j < n; j++ { + diff := a[i*lda+j] - b[i*n+j] + if math.IsNaN(diff) || math.Abs(diff) > tol { + return false + } + } + } + return true +} + +// equalApproxGeneral returns whether the general matrices a and b are +// approximately equal within given tolerance. +func equalApproxGeneral(a, b blas64.General, tol float64) bool { + if a.Rows != b.Rows || a.Cols != b.Cols { + panic("bad input") + } + for i := 0; i < a.Rows; i++ { + for j := 0; j < a.Cols; j++ { + diff := a.Data[i*a.Stride+j] - b.Data[i*b.Stride+j] + if math.IsNaN(diff) || math.Abs(diff) > tol { + return false + } + } + } + return true +} + +// equalApproxTriangular returns whether the triangular matrices A and B of +// order n are approximately equal within given tolerance. +func equalApproxTriangular(upper bool, n int, a []float64, lda int, b []float64, tol float64) bool { + if upper { + for i := 0; i < n; i++ { + for j := i; j < n; j++ { + diff := a[i*lda+j] - b[i*n+j] + if math.IsNaN(diff) || math.Abs(diff) > tol { + return false + } + } + } + return true + } + for i := 0; i < n; i++ { + for j := 0; j <= i; j++ { + diff := a[i*lda+j] - b[i*n+j] + if math.IsNaN(diff) || math.Abs(diff) > tol { + return false + } + } + } + return true +} + +func equalApproxSymmetric(a, b blas64.Symmetric, tol float64) bool { + if a.Uplo != b.Uplo { + return false + } + if a.N != b.N { + return false + } + if a.Uplo == blas.Upper { + for i := 0; i < a.N; i++ { + for j := i; j < a.N; j++ { + if !floats.EqualWithinAbsOrRel(a.Data[i*a.Stride+j], b.Data[i*b.Stride+j], tol, tol) { + return false + } + } + } + return true + } + for i := 0; i < a.N; i++ { + for j := 0; j <= i; j++ { + if !floats.EqualWithinAbsOrRel(a.Data[i*a.Stride+j], b.Data[i*b.Stride+j], tol, tol) { + return false + } + } + } + return true +} + +// randSymBand creates a random symmetric banded matrix, and returns both the +// random matrix and the equivalent Symmetric matrix for testing. rnder +// specifies the random number +func randSymBand(ul blas.Uplo, n, ldab, kb int, rnd *rand.Rand) (blas64.Symmetric, blas64.SymmetricBand) { + // A matrix is positive definite if and only if it has a Cholesky + // decomposition. Generate a random banded lower triangular matrix + // to construct the random symmetric matrix. + a := make([]float64, n*n) + for i := 0; i < n; i++ { + for j := max(0, i-kb); j <= i; j++ { + a[i*n+j] = rnd.NormFloat64() + } + a[i*n+i] = math.Abs(a[i*n+i]) + // Add an extra amound to the diagonal in order to improve the condition number. + a[i*n+i] += 1.5 * rnd.Float64() + } + agen := blas64.General{ + Rows: n, + Cols: n, + Stride: n, + Data: a, + } + + // Construct the SymDense from a*a^T + c := make([]float64, n*n) + cgen := blas64.General{ + Rows: n, + Cols: n, + Stride: n, + Data: c, + } + blas64.Gemm(blas.NoTrans, blas.Trans, 1, agen, agen, 0, cgen) + sym := blas64.Symmetric{ + N: n, + Stride: n, + Data: c, + Uplo: ul, + } + + b := symToSymBand(ul, c, n, n, kb, ldab) + band := blas64.SymmetricBand{ + N: n, + K: kb, + Stride: ldab, + Data: b, + Uplo: ul, + } + + return sym, band +} + +// symToSymBand takes the data in a Symmetric matrix and returns a +// SymmetricBanded matrix. +func symToSymBand(ul blas.Uplo, a []float64, n, lda, kb, ldab int) []float64 { + if ul == blas.Upper { + band := make([]float64, (n-1)*ldab+kb+1) + for i := 0; i < n; i++ { + for j := i; j < min(i+kb+1, n); j++ { + band[i*ldab+j-i] = a[i*lda+j] + } + } + return band + } + band := make([]float64, (n-1)*ldab+kb+1) + for i := 0; i < n; i++ { + for j := max(0, i-kb); j <= i; j++ { + band[i*ldab+j-i+kb] = a[i*lda+j] + } + } + return band +} + +// symBandToSym takes a banded symmetric matrix and returns the same data as +// a Symmetric matrix. +func symBandToSym(ul blas.Uplo, band []float64, n, kb, ldab int) blas64.Symmetric { + sym := make([]float64, n*n) + if ul == blas.Upper { + for i := 0; i < n; i++ { + for j := 0; j < min(kb+1+i, n)-i; j++ { + sym[i*n+i+j] = band[i*ldab+j] + } + } + } else { + for i := 0; i < n; i++ { + for j := kb - min(i, kb); j < kb+1; j++ { + sym[i*n+i-kb+j] = band[i*ldab+j] + } + } + } + return blas64.Symmetric{ + N: n, + Stride: n, + Data: sym, + Uplo: ul, + } +} + +// eye returns an identity matrix of given order and stride. +func eye(n, stride int) blas64.General { + ans := nanGeneral(n, n, stride) + for i := 0; i < n; i++ { + for j := 0; j < n; j++ { + ans.Data[i*ans.Stride+j] = 0 + } + ans.Data[i*ans.Stride+i] = 1 + } + return ans +} + +// zeros returns an m×n matrix with given stride filled with zeros. +func zeros(m, n, stride int) blas64.General { + a := nanGeneral(m, n, stride) + for i := 0; i < m; i++ { + for j := 0; j < n; j++ { + a.Data[i*a.Stride+j] = 0 + } + } + return a +} + +// extract2x2Block returns the elements of T at [0,0], [0,1], [1,0], and [1,1]. +func extract2x2Block(t []float64, ldt int) (a, b, c, d float64) { + return t[0], t[1], t[ldt], t[ldt+1] +} + +// isSchurCanonical returns whether the 2×2 matrix [a b; c d] is in Schur +// canonical form. +func isSchurCanonical(a, b, c, d float64) bool { + return c == 0 || (a == d && math.Signbit(b) != math.Signbit(c)) +} + +// isSchurCanonicalGeneral returns whether T is block upper triangular with 1×1 +// and 2×2 diagonal blocks, each 2×2 block in Schur canonical form. The function +// checks only along the diagonal and the first subdiagonal, otherwise the lower +// triangle is not accessed. +func isSchurCanonicalGeneral(t blas64.General) bool { + if t.Rows != t.Cols { + panic("invalid matrix") + } + for i := 0; i < t.Rows-1; { + if t.Data[(i+1)*t.Stride+i] == 0 { + // 1×1 block. + i++ + continue + } + // 2×2 block. + a, b, c, d := extract2x2Block(t.Data[i*t.Stride+i:], t.Stride) + if !isSchurCanonical(a, b, c, d) { + return false + } + i += 2 + } + return true +} + +// schurBlockEigenvalues returns the two eigenvalues of the 2×2 matrix [a b; c d] +// that must be in Schur canonical form. +func schurBlockEigenvalues(a, b, c, d float64) (ev1, ev2 complex128) { + if !isSchurCanonical(a, b, c, d) { + panic("block not in Schur canonical form") + } + if c == 0 { + return complex(a, 0), complex(d, 0) + } + im := math.Sqrt(-b * c) + return complex(a, im), complex(a, -im) +} + +// schurBlockSize returns the size of the diagonal block at i-th row in the +// upper quasi-triangular matrix t in Schur canonical form, and whether i points +// to the first row of the block. For zero-sized matrices the function returns 0 +// and true. +func schurBlockSize(t blas64.General, i int) (size int, first bool) { + if t.Rows != t.Cols { + panic("matrix not square") + } + if t.Rows == 0 { + return 0, true + } + if i < 0 || t.Rows <= i { + panic("index out of range") + } + + first = true + if i > 0 && t.Data[i*t.Stride+i-1] != 0 { + // There is a non-zero element to the left, therefore i must + // point to the second row in a 2×2 diagonal block. + first = false + i-- + } + size = 1 + if i+1 < t.Rows && t.Data[(i+1)*t.Stride+i] != 0 { + // There is a non-zero element below, this must be a 2×2 + // diagonal block. + size = 2 + } + return size, first +} + +// containsComplex returns whether z is approximately equal to one of the complex +// numbers in v. If z is found, its index in v will be also returned. +func containsComplex(v []complex128, z complex128, tol float64) (found bool, index int) { + for i := range v { + if cmplx.Abs(v[i]-z) < tol { + return true, i + } + } + return false, -1 +} + +// isAllNaN returns whether x contains only NaN values. +func isAllNaN(x []float64) bool { + for _, v := range x { + if !math.IsNaN(v) { + return false + } + } + return true +} + +// isUpperHessenberg returns whether h contains only zeros below the +// subdiagonal. +func isUpperHessenberg(h blas64.General) bool { + if h.Rows != h.Cols { + panic("matrix not square") + } + n := h.Rows + for i := 0; i < n; i++ { + for j := 0; j < n; j++ { + if i > j+1 && h.Data[i*h.Stride+j] != 0 { + return false + } + } + } + return true +} + +// isUpperTriangular returns whether a contains only zeros below the diagonal. +func isUpperTriangular(a blas64.General) bool { + n := a.Rows + for i := 1; i < n; i++ { + for j := 0; j < i; j++ { + if a.Data[i*a.Stride+j] != 0 { + return false + } + } + } + return true +} + +// unbalancedSparseGeneral returns an m×n dense matrix with a random sparse +// structure consisting of nz nonzero elements. The matrix will be unbalanced by +// multiplying each element randomly by its row or column index. +func unbalancedSparseGeneral(m, n, stride int, nonzeros int, rnd *rand.Rand) blas64.General { + a := zeros(m, n, stride) + for k := 0; k < nonzeros; k++ { + i := rnd.Intn(n) + j := rnd.Intn(n) + if rnd.Float64() < 0.5 { + a.Data[i*stride+j] = float64(i+1) * rnd.NormFloat64() + } else { + a.Data[i*stride+j] = float64(j+1) * rnd.NormFloat64() + } + } + return a +} + +// columnOf returns a copy of the j-th column of a. +func columnOf(a blas64.General, j int) []float64 { + if j < 0 || a.Cols <= j { + panic("bad column index") + } + col := make([]float64, a.Rows) + for i := range col { + col[i] = a.Data[i*a.Stride+j] + } + return col +} + +// isRightEigenvectorOf returns whether the vector xRe+i*xIm, where i is the +// imaginary unit, is the right eigenvector of A corresponding to the eigenvalue +// lambda. +// +// A right eigenvector corresponding to a complex eigenvalue λ is a complex +// non-zero vector x such that +// A x = λ x. +func isRightEigenvectorOf(a blas64.General, xRe, xIm []float64, lambda complex128, tol float64) bool { + if a.Rows != a.Cols { + panic("matrix not square") + } + + if imag(lambda) != 0 && xIm == nil { + // Complex eigenvalue of a real matrix cannot have a real + // eigenvector. + return false + } + + n := a.Rows + + // Compute A real(x) and store the result into xReAns. + xReAns := make([]float64, n) + blas64.Gemv(blas.NoTrans, 1, a, blas64.Vector{Data: xRe, Inc: 1}, 0, blas64.Vector{Data: xReAns, Inc: 1}) + + if imag(lambda) == 0 && xIm == nil { + // Real eigenvalue and eigenvector. + + // Compute λx and store the result into lambdax. + lambdax := make([]float64, n) + floats.AddScaled(lambdax, real(lambda), xRe) + + // This is expressed as the inverse to catch the case + // xReAns_i = Inf and lambdax_i = Inf of the same sign. + return !(floats.Distance(xReAns, lambdax, math.Inf(1)) > tol) + } + + // Complex eigenvector, and real or complex eigenvalue. + + // Compute A imag(x) and store the result into xImAns. + xImAns := make([]float64, n) + blas64.Gemv(blas.NoTrans, 1, a, blas64.Vector{Data: xIm, Inc: 1}, 0, blas64.Vector{Data: xImAns, Inc: 1}) + + // Compute λx and store the result into lambdax. + lambdax := make([]complex128, n) + for i := range lambdax { + lambdax[i] = lambda * complex(xRe[i], xIm[i]) + } + + for i, v := range lambdax { + ax := complex(xReAns[i], xImAns[i]) + if cmplx.Abs(v-ax) > tol { + return false + } + } + return true +} + +// isLeftEigenvectorOf returns whether the vector yRe+i*yIm, where i is the +// imaginary unit, is the left eigenvector of A corresponding to the eigenvalue +// lambda. +// +// A left eigenvector corresponding to a complex eigenvalue λ is a complex +// non-zero vector y such that +// y^H A = λ y^H, +// which is equivalent for real A to +// A^T y = conj(λ) y, +func isLeftEigenvectorOf(a blas64.General, yRe, yIm []float64, lambda complex128, tol float64) bool { + if a.Rows != a.Cols { + panic("matrix not square") + } + + if imag(lambda) != 0 && yIm == nil { + // Complex eigenvalue of a real matrix cannot have a real + // eigenvector. + return false + } + + n := a.Rows + + // Compute A^T real(y) and store the result into yReAns. + yReAns := make([]float64, n) + blas64.Gemv(blas.Trans, 1, a, blas64.Vector{Data: yRe, Inc: 1}, 0, blas64.Vector{Data: yReAns, Inc: 1}) + + if imag(lambda) == 0 && yIm == nil { + // Real eigenvalue and eigenvector. + + // Compute λy and store the result into lambday. + lambday := make([]float64, n) + floats.AddScaled(lambday, real(lambda), yRe) + + // This is expressed as the inverse to catch the case + // yReAns_i = Inf and lambday_i = Inf of the same sign. + return !(floats.Distance(yReAns, lambday, math.Inf(1)) > tol) + } + + // Complex eigenvector, and real or complex eigenvalue. + + // Compute A^T imag(y) and store the result into yImAns. + yImAns := make([]float64, n) + blas64.Gemv(blas.Trans, 1, a, blas64.Vector{Data: yIm, Inc: 1}, 0, blas64.Vector{Data: yImAns, Inc: 1}) + + // Compute conj(λ)y and store the result into lambday. + lambda = cmplx.Conj(lambda) + lambday := make([]complex128, n) + for i := range lambday { + lambday[i] = lambda * complex(yRe[i], yIm[i]) + } + + for i, v := range lambday { + ay := complex(yReAns[i], yImAns[i]) + if cmplx.Abs(v-ay) > tol { + return false + } + } + return true +} + +// rootsOfUnity returns the n complex numbers whose n-th power is equal to 1. +func rootsOfUnity(n int) []complex128 { + w := make([]complex128, n) + for i := 0; i < n; i++ { + angle := math.Pi * float64(2*i) / float64(n) + w[i] = complex(math.Cos(angle), math.Sin(angle)) + } + return w +} + +// constructGSVDresults returns the matrices [ 0 R ], D1 and D2 described +// in the documentation of Dtgsja and Dggsvd3, and the result matrix in +// the documentation for Dggsvp3. +func constructGSVDresults(n, p, m, k, l int, a, b blas64.General, alpha, beta []float64) (zeroR, d1, d2 blas64.General) { + // [ 0 R ] + zeroR = zeros(k+l, n, n) + dst := zeroR + dst.Rows = min(m, k+l) + dst.Cols = k + l + dst.Data = zeroR.Data[n-k-l:] + src := a + src.Rows = min(m, k+l) + src.Cols = k + l + src.Data = a.Data[n-k-l:] + copyGeneral(dst, src) + if m < k+l { + // [ 0 R ] + dst.Rows = k + l - m + dst.Cols = k + l - m + dst.Data = zeroR.Data[m*zeroR.Stride+n-(k+l-m):] + src = b + src.Rows = k + l - m + src.Cols = k + l - m + src.Data = b.Data[(m-k)*b.Stride+n+m-k-l:] + copyGeneral(dst, src) + } + + // D1 + d1 = zeros(m, k+l, k+l) + for i := 0; i < k; i++ { + d1.Data[i*d1.Stride+i] = 1 + } + for i := k; i < min(m, k+l); i++ { + d1.Data[i*d1.Stride+i] = alpha[i] + } + + // D2 + d2 = zeros(p, k+l, k+l) + for i := 0; i < min(l, m-k); i++ { + d2.Data[i*d2.Stride+i+k] = beta[k+i] + } + for i := m - k; i < l; i++ { + d2.Data[i*d2.Stride+i+k] = 1 + } + + return zeroR, d1, d2 +} + +func constructGSVPresults(n, p, m, k, l int, a, b blas64.General) (zeroA, zeroB blas64.General) { + zeroA = zeros(m, n, n) + dst := zeroA + dst.Rows = min(m, k+l) + dst.Cols = k + l + dst.Data = zeroA.Data[n-k-l:] + src := a + dst.Rows = min(m, k+l) + src.Cols = k + l + src.Data = a.Data[n-k-l:] + copyGeneral(dst, src) + + zeroB = zeros(p, n, n) + dst = zeroB + dst.Rows = l + dst.Cols = l + dst.Data = zeroB.Data[n-l:] + src = b + dst.Rows = l + src.Cols = l + src.Data = b.Data[n-l:] + copyGeneral(dst, src) + + return zeroA, zeroB +} + +// distFromIdentity returns the L-infinity distance of an n×n matrix A from the +// identity. If A contains NaN elements, distFromIdentity will return +inf. +func distFromIdentity(n int, a []float64, lda int) float64 { + var dist float64 + for i := 0; i < n; i++ { + for j := 0; j < n; j++ { + aij := a[i*lda+j] + if math.IsNaN(aij) { + return math.Inf(1) + } + if i == j { + dist = math.Max(dist, math.Abs(aij-1)) + } else { + dist = math.Max(dist, math.Abs(aij)) + } + } + } + return dist +} + +func sameFloat64(a, b float64) bool { + return a == b || math.IsNaN(a) && math.IsNaN(b) +} + +// sameLowerTri returns whether n×n matrices A and B are same under the diagonal. +func sameLowerTri(n int, a []float64, lda int, b []float64, ldb int) bool { + for i := 1; i < n; i++ { + for j := 0; j < i; j++ { + aij := a[i*lda+j] + bij := b[i*ldb+j] + if !sameFloat64(aij, bij) { + return false + } + } + } + return true +} + +// sameUpperTri returns whether n×n matrices A and B are same above the diagonal. +func sameUpperTri(n int, a []float64, lda int, b []float64, ldb int) bool { + for i := 0; i < n-1; i++ { + for j := i + 1; j < n; j++ { + aij := a[i*lda+j] + bij := b[i*ldb+j] + if !sameFloat64(aij, bij) { + return false + } + } + } + return true +} + +// svdJobString returns a string representation of job. +func svdJobString(job lapack.SVDJob) string { + switch job { + case lapack.SVDAll: + return "All" + case lapack.SVDStore: + return "Store" + case lapack.SVDOverwrite: + return "Overwrite" + case lapack.SVDNone: + return "None" + } + return "unknown SVD job" +} diff --git a/vendor/gonum.org/v1/gonum/lapack/testlapack/iladlc.go b/vendor/gonum.org/v1/gonum/lapack/testlapack/iladlc.go new file mode 100644 index 0000000..c82917e --- /dev/null +++ b/vendor/gonum.org/v1/gonum/lapack/testlapack/iladlc.go @@ -0,0 +1,83 @@ +// Copyright ©2015 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package testlapack + +import "testing" + +type Iladlcer interface { + Iladlc(m, n int, a []float64, lda int) int +} + +func IladlcTest(t *testing.T, impl Iladlcer) { + for i, test := range []struct { + a []float64 + m, n, lda int + ans int + }{ + { + a: []float64{0, 0, 0, 0}, + m: 1, + n: 1, + lda: 2, + ans: -1, + }, + { + a: []float64{0, 0, 0, 0}, + m: 2, + n: 2, + lda: 2, + ans: -1, + }, + { + a: []float64{0, 0, 0, 0}, + m: 4, + n: 1, + lda: 1, + ans: -1, + }, + { + a: []float64{0, 0, 0, 0}, + m: 1, + n: 4, + lda: 4, + ans: -1, + }, + { + a: []float64{ + 1, 2, 3, 4, + 5, 6, 7, 8, + }, + m: 2, + n: 4, + lda: 4, + ans: 3, + }, + { + a: []float64{ + 1, 2, 3, 0, + 0, 0, 0, 0, + }, + m: 2, + n: 4, + lda: 4, + ans: 2, + }, + { + a: []float64{ + 0, 0, 3, 4, + 0, 0, 0, 0, + }, + m: 2, + n: 2, + lda: 4, + ans: -1, + }, + } { + ans := impl.Iladlc(test.m, test.n, test.a, test.lda) + if ans != test.ans { + t.Errorf("Column mismatch case %v. Want: %v, got: %v", i, test.ans, ans) + } + } +} diff --git a/vendor/gonum.org/v1/gonum/lapack/testlapack/iladlr.go b/vendor/gonum.org/v1/gonum/lapack/testlapack/iladlr.go new file mode 100644 index 0000000..b962c2b --- /dev/null +++ b/vendor/gonum.org/v1/gonum/lapack/testlapack/iladlr.go @@ -0,0 +1,83 @@ +// Copyright ©2015 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package testlapack + +import "testing" + +type Iladlrer interface { + Iladlr(m, n int, a []float64, lda int) int +} + +func IladlrTest(t *testing.T, impl Iladlrer) { + for i, test := range []struct { + a []float64 + m, n, lda int + ans int + }{ + { + a: []float64{0, 0, 0, 0}, + m: 1, + n: 1, + lda: 2, + ans: -1, + }, + { + a: []float64{0, 0, 0, 0}, + m: 2, + n: 2, + lda: 2, + ans: -1, + }, + { + a: []float64{0, 0, 0, 0}, + m: 4, + n: 1, + lda: 1, + ans: -1, + }, + { + a: []float64{0, 0, 0, 0}, + m: 1, + n: 4, + lda: 4, + ans: -1, + }, + { + a: []float64{ + 1, 2, 3, 4, + 5, 6, 7, 8, + }, + m: 2, + n: 4, + lda: 4, + ans: 1, + }, + { + a: []float64{ + 1, 2, 3, 0, + 0, 0, 0, 0, + }, + m: 2, + n: 4, + lda: 4, + ans: 0, + }, + { + a: []float64{ + 0, 0, 3, 4, + 0, 0, 0, 0, + }, + m: 2, + n: 2, + lda: 4, + ans: -1, + }, + } { + ans := impl.Iladlr(test.m, test.n, test.a, test.lda) + if ans != test.ans { + t.Errorf("Column mismatch case %v. Want: %v, got: %v", i, test.ans, ans) + } + } +} diff --git a/vendor/gonum.org/v1/gonum/lapack/testlapack/matgen.go b/vendor/gonum.org/v1/gonum/lapack/testlapack/matgen.go new file mode 100644 index 0000000..eca472c --- /dev/null +++ b/vendor/gonum.org/v1/gonum/lapack/testlapack/matgen.go @@ -0,0 +1,740 @@ +// Copyright ©2017 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package testlapack + +import ( + "math" + + "golang.org/x/exp/rand" + + "gonum.org/v1/gonum/blas" + "gonum.org/v1/gonum/blas/blas64" + "gonum.org/v1/gonum/floats" +) + +// Dlatm1 computes the entries of dst as specified by mode, cond and rsign. +// +// mode describes how dst will be computed: +// |mode| == 1: dst[0] = 1 and dst[1:n] = 1/cond +// |mode| == 2: dst[:n-1] = 1/cond and dst[n-1] = 1 +// |mode| == 3: dst[i] = cond^{-i/(n-1)}, i=0,...,n-1 +// |mode| == 4: dst[i] = 1 - i*(1-1/cond)/(n-1) +// |mode| == 5: dst[i] = random number in the range (1/cond, 1) such that +// their logarithms are uniformly distributed +// |mode| == 6: dst[i] = random number from the distribution given by dist +// If mode is negative, the order of the elements of dst will be reversed. +// For other values of mode Dlatm1 will panic. +// +// If rsign is true and mode is not ±6, each entry of dst will be multiplied by 1 +// or -1 with probability 0.5 +// +// dist specifies the type of distribution to be used when mode == ±6: +// dist == 1: Uniform[0,1) +// dist == 2: Uniform[-1,1) +// dist == 3: Normal(0,1) +// For other values of dist Dlatm1 will panic. +// +// rnd is used as a source of random numbers. +func Dlatm1(dst []float64, mode int, cond float64, rsign bool, dist int, rnd *rand.Rand) { + amode := mode + if amode < 0 { + amode = -amode + } + if amode < 1 || 6 < amode { + panic("testlapack: invalid mode") + } + if cond < 1 { + panic("testlapack: cond < 1") + } + if amode == 6 && (dist < 1 || 3 < dist) { + panic("testlapack: invalid dist") + } + + n := len(dst) + if n == 0 { + return + } + + switch amode { + case 1: + dst[0] = 1 + for i := 1; i < n; i++ { + dst[i] = 1 / cond + } + case 2: + for i := 0; i < n-1; i++ { + dst[i] = 1 + } + dst[n-1] = 1 / cond + case 3: + dst[0] = 1 + if n > 1 { + alpha := math.Pow(cond, -1/float64(n-1)) + for i := 1; i < n; i++ { + dst[i] = math.Pow(alpha, float64(i)) + } + } + case 4: + dst[0] = 1 + if n > 1 { + condInv := 1 / cond + alpha := (1 - condInv) / float64(n-1) + for i := 1; i < n; i++ { + dst[i] = float64(n-i-1)*alpha + condInv + } + } + case 5: + alpha := math.Log(1 / cond) + for i := range dst { + dst[i] = math.Exp(alpha * rnd.Float64()) + } + case 6: + switch dist { + case 1: + for i := range dst { + dst[i] = rnd.Float64() + } + case 2: + for i := range dst { + dst[i] = 2*rnd.Float64() - 1 + } + case 3: + for i := range dst { + dst[i] = rnd.NormFloat64() + } + } + } + + if rsign && amode != 6 { + for i, v := range dst { + if rnd.Float64() < 0.5 { + dst[i] = -v + } + } + } + + if mode < 0 { + for i := 0; i < n/2; i++ { + dst[i], dst[n-i-1] = dst[n-i-1], dst[i] + } + } +} + +// Dlagsy generates an n×n symmetric matrix A, by pre- and post- multiplying a +// real diagonal matrix D with a random orthogonal matrix: +// A = U * D * U^T. +// +// work must have length at least 2*n, otherwise Dlagsy will panic. +// +// The parameter k is unused but it must satisfy +// 0 <= k <= n-1. +func Dlagsy(n, k int, d []float64, a []float64, lda int, rnd *rand.Rand, work []float64) { + checkMatrix(n, n, a, lda) + if k < 0 || max(0, n-1) < k { + panic("testlapack: invalid value of k") + } + if len(d) != n { + panic("testlapack: bad length of d") + } + if len(work) < 2*n { + panic("testlapack: insufficient work length") + } + + // Initialize lower triangle of A to diagonal matrix. + for i := 1; i < n; i++ { + for j := 0; j < i; j++ { + a[i*lda+j] = 0 + } + } + for i := 0; i < n; i++ { + a[i*lda+i] = d[i] + } + + bi := blas64.Implementation() + + // Generate lower triangle of symmetric matrix. + for i := n - 2; i >= 0; i-- { + for j := 0; j < n-i; j++ { + work[j] = rnd.NormFloat64() + } + wn := bi.Dnrm2(n-i, work[:n-i], 1) + wa := math.Copysign(wn, work[0]) + var tau float64 + if wn != 0 { + wb := work[0] + wa + bi.Dscal(n-i-1, 1/wb, work[1:n-i], 1) + work[0] = 1 + tau = wb / wa + } + + // Apply random reflection to A[i:n,i:n] from the left and the + // right. + // + // Compute y := tau * A * u. + bi.Dsymv(blas.Lower, n-i, tau, a[i*lda+i:], lda, work[:n-i], 1, 0, work[n:2*n-i], 1) + + // Compute v := y - 1/2 * tau * ( y, u ) * u. + alpha := -0.5 * tau * bi.Ddot(n-i, work[n:2*n-i], 1, work[:n-i], 1) + bi.Daxpy(n-i, alpha, work[:n-i], 1, work[n:2*n-i], 1) + + // Apply the transformation as a rank-2 update to A[i:n,i:n]. + bi.Dsyr2(blas.Lower, n-i, -1, work[:n-i], 1, work[n:2*n-i], 1, a[i*lda+i:], lda) + } + + // Store full symmetric matrix. + for i := 1; i < n; i++ { + for j := 0; j < i; j++ { + a[j*lda+i] = a[i*lda+j] + } + } +} + +// Dlagge generates a real general m×n matrix A, by pre- and post-multiplying +// a real diagonal matrix D with random orthogonal matrices: +// A = U*D*V. +// +// d must have length min(m,n), and work must have length m+n, otherwise Dlagge +// will panic. +// +// The parameters ku and kl are unused but they must satisfy +// 0 <= kl <= m-1, +// 0 <= ku <= n-1. +func Dlagge(m, n, kl, ku int, d []float64, a []float64, lda int, rnd *rand.Rand, work []float64) { + checkMatrix(m, n, a, lda) + if kl < 0 || max(0, m-1) < kl { + panic("testlapack: invalid value of kl") + } + if ku < 0 || max(0, n-1) < ku { + panic("testlapack: invalid value of ku") + } + if len(d) != min(m, n) { + panic("testlapack: bad length of d") + } + if len(work) < m+n { + panic("testlapack: insufficient work length") + } + + // Initialize A to diagonal matrix. + for i := 0; i < m; i++ { + for j := 0; j < n; j++ { + a[i*lda+j] = 0 + } + } + for i := 0; i < min(m, n); i++ { + a[i*lda+i] = d[i] + } + + // Quick exit if the user wants a diagonal matrix. + // if kl == 0 && ku == 0 { + // return + // } + + bi := blas64.Implementation() + + // Pre- and post-multiply A by random orthogonal matrices. + for i := min(m, n) - 1; i >= 0; i-- { + if i < m-1 { + for j := 0; j < m-i; j++ { + work[j] = rnd.NormFloat64() + } + wn := bi.Dnrm2(m-i, work[:m-i], 1) + wa := math.Copysign(wn, work[0]) + var tau float64 + if wn != 0 { + wb := work[0] + wa + bi.Dscal(m-i-1, 1/wb, work[1:m-i], 1) + work[0] = 1 + tau = wb / wa + } + + // Multiply A[i:m,i:n] by random reflection from the left. + bi.Dgemv(blas.Trans, m-i, n-i, + 1, a[i*lda+i:], lda, work[:m-i], 1, + 0, work[m:m+n-i], 1) + bi.Dger(m-i, n-i, + -tau, work[:m-i], 1, work[m:m+n-i], 1, + a[i*lda+i:], lda) + } + if i < n-1 { + for j := 0; j < n-i; j++ { + work[j] = rnd.NormFloat64() + } + wn := bi.Dnrm2(n-i, work[:n-i], 1) + wa := math.Copysign(wn, work[0]) + var tau float64 + if wn != 0 { + wb := work[0] + wa + bi.Dscal(n-i-1, 1/wb, work[1:n-i], 1) + work[0] = 1 + tau = wb / wa + } + + // Multiply A[i:m,i:n] by random reflection from the right. + bi.Dgemv(blas.NoTrans, m-i, n-i, + 1, a[i*lda+i:], lda, work[:n-i], 1, + 0, work[n:n+m-i], 1) + bi.Dger(m-i, n-i, + -tau, work[n:n+m-i], 1, work[:n-i], 1, + a[i*lda+i:], lda) + } + } + + // TODO(vladimir-ch): Reduce number of subdiagonals to kl and number of + // superdiagonals to ku. +} + +// dlarnv fills dst with random numbers from a uniform or normal distribution +// specified by dist: +// dist=1: uniform(0,1), +// dist=2: uniform(-1,1), +// dist=3: normal(0,1). +// For other values of dist dlarnv will panic. +func dlarnv(dst []float64, dist int, rnd *rand.Rand) { + switch dist { + default: + panic("testlapack: invalid dist") + case 1: + for i := range dst { + dst[i] = rnd.Float64() + } + case 2: + for i := range dst { + dst[i] = 2*rnd.Float64() - 1 + } + case 3: + for i := range dst { + dst[i] = rnd.NormFloat64() + } + } +} + +// dlattr generates an n×n triangular test matrix A with its properties uniquely +// determined by imat and uplo, and returns whether A has unit diagonal. If diag +// is blas.Unit, the diagonal elements are set so that A[k,k]=k. +// +// trans specifies whether the matrix A or its transpose will be used. +// +// If imat is greater than 10, dlattr also generates the right hand side of the +// linear system A*x=b, or A^T*x=b. Valid values of imat are 7, and all between 11 +// and 19, inclusive. +// +// b mush have length n, and work must have length 3*n, and dlattr will panic +// otherwise. +func dlattr(imat int, uplo blas.Uplo, trans blas.Transpose, n int, a []float64, lda int, b, work []float64, rnd *rand.Rand) (diag blas.Diag) { + checkMatrix(n, n, a, lda) + if len(b) != n { + panic("testlapack: bad length of b") + } + if len(work) < 3*n { + panic("testlapack: insufficient length of work") + } + if uplo != blas.Upper && uplo != blas.Lower { + panic("testlapack: bad uplo") + } + if trans != blas.Trans && trans != blas.NoTrans { + panic("testlapack: bad trans") + } + + if n == 0 { + return blas.NonUnit + } + + ulp := dlamchE * dlamchB + smlnum := dlamchS + bignum := (1 - ulp) / smlnum + + bi := blas64.Implementation() + + switch imat { + default: + // TODO(vladimir-ch): Implement the remaining cases. + panic("testlapack: invalid or unimplemented imat") + case 7: + // Identity matrix. The diagonal is set to NaN. + diag = blas.Unit + switch uplo { + case blas.Upper: + for i := 0; i < n; i++ { + a[i*lda+i] = math.NaN() + for j := i + 1; j < n; j++ { + a[i*lda+j] = 0 + } + } + case blas.Lower: + for i := 0; i < n; i++ { + for j := 0; j < i; j++ { + a[i*lda+j] = 0 + } + a[i*lda+i] = math.NaN() + } + } + case 11: + // Generate a triangular matrix with elements between -1 and 1, + // give the diagonal norm 2 to make it well-conditioned, and + // make the right hand side large so that it requires scaling. + diag = blas.NonUnit + switch uplo { + case blas.Upper: + for i := 0; i < n-1; i++ { + dlarnv(a[i*lda+i:i*lda+n], 2, rnd) + } + case blas.Lower: + for i := 1; i < n; i++ { + dlarnv(a[i*lda:i*lda+i+1], 2, rnd) + } + } + for i := 0; i < n; i++ { + a[i*lda+i] = math.Copysign(2, a[i*lda+i]) + } + // Set the right hand side so that the largest value is bignum. + dlarnv(b, 2, rnd) + imax := bi.Idamax(n, b, 1) + bscal := bignum / math.Max(1, b[imax]) + bi.Dscal(n, bscal, b, 1) + case 12: + // Make the first diagonal element in the solve small to cause + // immediate overflow when dividing by T[j,j]. The off-diagonal + // elements are small (cnorm[j] < 1). + diag = blas.NonUnit + tscal := 1 / math.Max(1, float64(n-1)) + switch uplo { + case blas.Upper: + for i := 0; i < n; i++ { + dlarnv(a[i*lda+i:i*lda+n], 2, rnd) + bi.Dscal(n-i-1, tscal, a[i*lda+i+1:], 1) + a[i*lda+i] = math.Copysign(1, a[i*lda+i]) + } + a[(n-1)*lda+n-1] *= smlnum + case blas.Lower: + for i := 0; i < n; i++ { + dlarnv(a[i*lda:i*lda+i+1], 2, rnd) + bi.Dscal(i, tscal, a[i*lda:], 1) + a[i*lda+i] = math.Copysign(1, a[i*lda+i]) + } + a[0] *= smlnum + } + dlarnv(b, 2, rnd) + case 13: + // Make the first diagonal element in the solve small to cause + // immediate overflow when dividing by T[j,j]. The off-diagonal + // elements are O(1) (cnorm[j] > 1). + diag = blas.NonUnit + switch uplo { + case blas.Upper: + for i := 0; i < n; i++ { + dlarnv(a[i*lda+i:i*lda+n], 2, rnd) + a[i*lda+i] = math.Copysign(1, a[i*lda+i]) + } + a[(n-1)*lda+n-1] *= smlnum + case blas.Lower: + for i := 0; i < n; i++ { + dlarnv(a[i*lda:i*lda+i+1], 2, rnd) + a[i*lda+i] = math.Copysign(1, a[i*lda+i]) + } + a[0] *= smlnum + } + dlarnv(b, 2, rnd) + case 14: + // T is diagonal with small numbers on the diagonal to + // make the growth factor underflow, but a small right hand side + // chosen so that the solution does not overflow. + diag = blas.NonUnit + switch uplo { + case blas.Upper: + for i := 0; i < n; i++ { + for j := i + 1; j < n; j++ { + a[i*lda+j] = 0 + } + if (n-1-i)&0x2 == 0 { + a[i*lda+i] = smlnum + } else { + a[i*lda+i] = 1 + } + } + case blas.Lower: + for i := 0; i < n; i++ { + for j := 0; j < i; j++ { + a[i*lda+j] = 0 + } + if i&0x2 == 0 { + a[i*lda+i] = smlnum + } else { + a[i*lda+i] = 1 + } + } + } + // Set the right hand side alternately zero and small. + switch uplo { + case blas.Upper: + b[0] = 0 + for i := n - 1; i > 0; i -= 2 { + b[i] = 0 + b[i-1] = smlnum + } + case blas.Lower: + for i := 0; i < n-1; i += 2 { + b[i] = 0 + b[i+1] = smlnum + } + b[n-1] = 0 + } + case 15: + // Make the diagonal elements small to cause gradual overflow + // when dividing by T[j,j]. To control the amount of scaling + // needed, the matrix is bidiagonal. + diag = blas.NonUnit + texp := 1 / math.Max(1, float64(n-1)) + tscal := math.Pow(smlnum, texp) + switch uplo { + case blas.Upper: + for i := 0; i < n; i++ { + a[i*lda+i] = tscal + if i < n-1 { + a[i*lda+i+1] = -1 + } + for j := i + 2; j < n; j++ { + a[i*lda+j] = 0 + } + } + case blas.Lower: + for i := 0; i < n; i++ { + for j := 0; j < i-1; j++ { + a[i*lda+j] = 0 + } + if i > 0 { + a[i*lda+i-1] = -1 + } + a[i*lda+i] = tscal + } + } + dlarnv(b, 2, rnd) + case 16: + // One zero diagonal element. + diag = blas.NonUnit + switch uplo { + case blas.Upper: + for i := 0; i < n; i++ { + dlarnv(a[i*lda+i:i*lda+n], 2, rnd) + a[i*lda+i] = math.Copysign(2, a[i*lda+i]) + } + case blas.Lower: + for i := 0; i < n; i++ { + dlarnv(a[i*lda:i*lda+i+1], 2, rnd) + a[i*lda+i] = math.Copysign(2, a[i*lda+i]) + } + } + iy := n / 2 + a[iy*lda+iy] = 0 + dlarnv(b, 2, rnd) + bi.Dscal(n, 2, b, 1) + case 17: + // Make the offdiagonal elements large to cause overflow when + // adding a column of T. In the non-transposed case, the matrix + // is constructed to cause overflow when adding a column in + // every other step. + diag = blas.NonUnit + tscal := (1 - ulp) / (dlamchS / ulp) + texp := 1.0 + switch uplo { + case blas.Upper: + for i := 0; i < n; i++ { + for j := i; j < n; j++ { + a[i*lda+j] = 0 + } + } + for j := n - 1; j >= 1; j -= 2 { + a[j] = -tscal / float64(n+1) + a[j*lda+j] = 1 + b[j] = texp * (1 - ulp) + a[j-1] = -tscal / float64(n+1) / float64(n+2) + a[(j-1)*lda+j-1] = 1 + b[j-1] = texp * float64(n*n+n-1) + texp *= 2 + } + b[0] = float64(n+1) / float64(n+2) * tscal + case blas.Lower: + for i := 0; i < n; i++ { + for j := 0; j <= i; j++ { + a[i*lda+j] = 0 + } + } + for j := 0; j < n-1; j += 2 { + a[(n-1)*lda+j] = -tscal / float64(n+1) + a[j*lda+j] = 1 + b[j] = texp * (1 - ulp) + a[(n-1)*lda+j+1] = -tscal / float64(n+1) / float64(n+2) + a[(j+1)*lda+j+1] = 1 + b[j+1] = texp * float64(n*n+n-1) + texp *= 2 + } + b[n-1] = float64(n+1) / float64(n+2) * tscal + } + case 18: + // Generate a unit triangular matrix with elements between -1 + // and 1, and make the right hand side large so that it requires + // scaling. The diagonal is set to NaN. + diag = blas.Unit + switch uplo { + case blas.Upper: + for i := 0; i < n; i++ { + a[i*lda+i] = math.NaN() + dlarnv(a[i*lda+i+1:i*lda+n], 2, rnd) + } + case blas.Lower: + for i := 0; i < n; i++ { + dlarnv(a[i*lda:i*lda+i], 2, rnd) + a[i*lda+i] = math.NaN() + } + } + // Set the right hand side so that the largest value is bignum. + dlarnv(b, 2, rnd) + iy := bi.Idamax(n, b, 1) + bnorm := math.Abs(b[iy]) + bscal := bignum / math.Max(1, bnorm) + bi.Dscal(n, bscal, b, 1) + case 19: + // Generate a triangular matrix with elements between + // bignum/(n-1) and bignum so that at least one of the column + // norms will exceed bignum. + // Dlatrs cannot handle this case for (typically) n>5. + diag = blas.NonUnit + tleft := bignum / math.Max(1, float64(n-1)) + tscal := bignum * (float64(n-1) / math.Max(1, float64(n))) + switch uplo { + case blas.Upper: + for i := 0; i < n; i++ { + dlarnv(a[i*lda+i:i*lda+n], 2, rnd) + for j := i; j < n; j++ { + aij := a[i*lda+j] + a[i*lda+j] = math.Copysign(tleft, aij) + tscal*aij + } + } + case blas.Lower: + for i := 0; i < n; i++ { + dlarnv(a[i*lda:i*lda+i+1], 2, rnd) + for j := 0; j <= i; j++ { + aij := a[i*lda+j] + a[i*lda+j] = math.Copysign(tleft, aij) + tscal*aij + } + } + } + dlarnv(b, 2, rnd) + bi.Dscal(n, 2, b, 1) + } + + // Flip the matrix if the transpose will be used. + if trans == blas.Trans { + switch uplo { + case blas.Upper: + for j := 0; j < n/2; j++ { + bi.Dswap(n-2*j-1, a[j*lda+j:], 1, a[(j+1)*lda+n-j-1:], -lda) + } + case blas.Lower: + for j := 0; j < n/2; j++ { + bi.Dswap(n-2*j-1, a[j*lda+j:], lda, a[(n-j-1)*lda+j+1:], -1) + } + } + } + + return diag +} + +func checkMatrix(m, n int, a []float64, lda int) { + if m < 0 { + panic("testlapack: m < 0") + } + if n < 0 { + panic("testlapack: n < 0") + } + if lda < max(1, n) { + panic("testlapack: lda < max(1, n)") + } + if len(a) < (m-1)*lda+n { + panic("testlapack: insufficient matrix slice length") + } +} + +// randomOrthogonal returns an n×n random orthogonal matrix. +func randomOrthogonal(n int, rnd *rand.Rand) blas64.General { + q := eye(n, n) + x := make([]float64, n) + v := make([]float64, n) + for j := 0; j < n-1; j++ { + // x represents the j-th column of a random matrix. + for i := 0; i < j; i++ { + x[i] = 0 + } + for i := j; i < n; i++ { + x[i] = rnd.NormFloat64() + } + // Compute v that represents the elementary reflector that + // annihilates the subdiagonal elements of x. + reflector(v, x, j) + // Compute Q * H_j and store the result into Q. + applyReflector(q, q, v) + } + return q +} + +// reflector generates a Householder reflector v that zeros out subdiagonal +// entries in the j-th column of a matrix. +func reflector(v, col []float64, j int) { + n := len(col) + if len(v) != n { + panic("slice length mismatch") + } + if j < 0 || n <= j { + panic("invalid column index") + } + + for i := range v { + v[i] = 0 + } + if j == n-1 { + return + } + s := floats.Norm(col[j:], 2) + if s == 0 { + return + } + v[j] = col[j] + math.Copysign(s, col[j]) + copy(v[j+1:], col[j+1:]) + s = floats.Norm(v[j:], 2) + floats.Scale(1/s, v[j:]) +} + +// applyReflector computes Q*H where H is a Householder matrix represented by +// the Householder reflector v. +func applyReflector(qh blas64.General, q blas64.General, v []float64) { + n := len(v) + if qh.Rows != n || qh.Cols != n { + panic("bad size of qh") + } + if q.Rows != n || q.Cols != n { + panic("bad size of q") + } + qv := make([]float64, n) + blas64.Gemv(blas.NoTrans, 1, q, blas64.Vector{Data: v, Inc: 1}, 0, blas64.Vector{Data: qv, Inc: 1}) + for i := 0; i < n; i++ { + for j := 0; j < n; j++ { + qh.Data[i*qh.Stride+j] = q.Data[i*q.Stride+j] + } + } + for i := 0; i < n; i++ { + for j := 0; j < n; j++ { + qh.Data[i*qh.Stride+j] -= 2 * qv[i] * v[j] + } + } + var norm2 float64 + for _, vi := range v { + norm2 += vi * vi + } + norm2inv := 1 / norm2 + for i := 0; i < n; i++ { + for j := 0; j < n; j++ { + qh.Data[i*qh.Stride+j] *= norm2inv + } + } +} diff --git a/vendor/gonum.org/v1/gonum/lapack/testlapack/test_matrices.go b/vendor/gonum.org/v1/gonum/lapack/testlapack/test_matrices.go new file mode 100644 index 0000000..dddafe1 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/lapack/testlapack/test_matrices.go @@ -0,0 +1,616 @@ +// Copyright ©2016 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package testlapack + +import ( + "math" + + "golang.org/x/exp/rand" + + "gonum.org/v1/gonum/blas/blas64" +) + +// A123 is the non-symmetric singular matrix +// [ 1 2 3 ] +// A = [ 4 5 6 ] +// [ 7 8 9 ] +// It has three distinct real eigenvalues. +type A123 struct{} + +func (A123) Matrix() blas64.General { + return blas64.General{ + Rows: 3, + Cols: 3, + Stride: 3, + Data: []float64{ + 1, 2, 3, + 4, 5, 6, + 7, 8, 9, + }, + } +} + +func (A123) Eigenvalues() []complex128 { + return []complex128{16.116843969807043, -1.116843969807043, 0} +} + +func (A123) LeftEV() blas64.General { + return blas64.General{ + Rows: 3, + Cols: 3, + Stride: 3, + Data: []float64{ + -0.464547273387671, -0.570795531228578, -0.677043789069485, + -0.882905959653586, -0.239520420054206, 0.403865119545174, + 0.408248290463862, -0.816496580927726, 0.408248290463863, + }, + } +} + +func (A123) RightEV() blas64.General { + return blas64.General{ + Rows: 3, + Cols: 3, + Stride: 3, + Data: []float64{ + -0.231970687246286, -0.785830238742067, 0.408248290463864, + -0.525322093301234, -0.086751339256628, -0.816496580927726, + -0.818673499356181, 0.612327560228810, 0.408248290463863, + }, + } +} + +// AntisymRandom is a anti-symmetric random matrix. All its eigenvalues are +// imaginary with one zero if the order is odd. +type AntisymRandom struct { + mat blas64.General +} + +func NewAntisymRandom(n int, rnd *rand.Rand) AntisymRandom { + a := zeros(n, n, n) + for i := 0; i < n; i++ { + for j := i + 1; j < n; j++ { + r := rnd.NormFloat64() + a.Data[i*a.Stride+j] = r + a.Data[j*a.Stride+i] = -r + } + } + return AntisymRandom{a} +} + +func (a AntisymRandom) Matrix() blas64.General { + return cloneGeneral(a.mat) +} + +func (AntisymRandom) Eigenvalues() []complex128 { + return nil +} + +// Circulant is a generally non-symmetric matrix given by +// A[i,j] = 1 + (j-i+n)%n. +// For example, for n=5, +// [ 1 2 3 4 5 ] +// [ 5 1 2 3 4 ] +// A = [ 4 5 1 2 3 ] +// [ 3 4 5 1 2 ] +// [ 2 3 4 5 1 ] +// It has real and complex eigenvalues, some possibly repeated. +type Circulant int + +func (c Circulant) Matrix() blas64.General { + n := int(c) + a := zeros(n, n, n) + for i := 0; i < n; i++ { + for j := 0; j < n; j++ { + a.Data[i*a.Stride+j] = float64(1 + (j-i+n)%n) + } + } + return a +} + +func (c Circulant) Eigenvalues() []complex128 { + n := int(c) + w := rootsOfUnity(n) + ev := make([]complex128, n) + for k := 0; k < n; k++ { + ev[k] = complex(float64(n), 0) + } + for i := n - 1; i > 0; i-- { + for k := 0; k < n; k++ { + ev[k] = ev[k]*w[k] + complex(float64(i), 0) + } + } + return ev +} + +// Clement is a generally non-symmetric matrix given by +// A[i,j] = i+1, if j == i+1, +// = n-i, if j == i-1, +// = 0, otherwise. +// For example, for n=5, +// [ . 1 . . . ] +// [ 4 . 2 . . ] +// A = [ . 3 . 3 . ] +// [ . . 2 . 4 ] +// [ . . . 1 . ] +// It has n distinct real eigenvalues. +type Clement int + +func (c Clement) Matrix() blas64.General { + n := int(c) + a := zeros(n, n, n) + for i := 0; i < n; i++ { + if i < n-1 { + a.Data[i*a.Stride+i+1] = float64(i + 1) + } + if i > 0 { + a.Data[i*a.Stride+i-1] = float64(n - i) + } + } + return a +} + +func (c Clement) Eigenvalues() []complex128 { + n := int(c) + ev := make([]complex128, n) + for i := range ev { + ev[i] = complex(float64(-n+2*i+1), 0) + } + return ev +} + +// Creation is a singular non-symmetric matrix given by +// A[i,j] = i, if j == i-1, +// = 0, otherwise. +// For example, for n=5, +// [ . . . . . ] +// [ 1 . . . . ] +// A = [ . 2 . . . ] +// [ . . 3 . . ] +// [ . . . 4 . ] +// Zero is its only eigenvalue. +type Creation int + +func (c Creation) Matrix() blas64.General { + n := int(c) + a := zeros(n, n, n) + for i := 1; i < n; i++ { + a.Data[i*a.Stride+i-1] = float64(i) + } + return a +} + +func (c Creation) Eigenvalues() []complex128 { + return make([]complex128, int(c)) +} + +// Diagonal is a diagonal matrix given by +// A[i,j] = i+1, if i == j, +// = 0, otherwise. +// For example, for n=5, +// [ 1 . . . . ] +// [ . 2 . . . ] +// A = [ . . 3 . . ] +// [ . . . 4 . ] +// [ . . . . 5 ] +// It has n real eigenvalues {1,...,n}. +type Diagonal int + +func (d Diagonal) Matrix() blas64.General { + n := int(d) + a := zeros(n, n, n) + for i := 0; i < n; i++ { + a.Data[i*a.Stride+i] = float64(i) + } + return a +} + +func (d Diagonal) Eigenvalues() []complex128 { + n := int(d) + ev := make([]complex128, n) + for i := range ev { + ev[i] = complex(float64(i), 0) + } + return ev +} + +// Downshift is a non-singular upper Hessenberg matrix given by +// A[i,j] = 1, if (i-j+n)%n == 1, +// = 0, otherwise. +// For example, for n=5, +// [ . . . . 1 ] +// [ 1 . . . . ] +// A = [ . 1 . . . ] +// [ . . 1 . . ] +// [ . . . 1 . ] +// Its eigenvalues are the complex roots of unity. +type Downshift int + +func (d Downshift) Matrix() blas64.General { + n := int(d) + a := zeros(n, n, n) + a.Data[n-1] = 1 + for i := 1; i < n; i++ { + a.Data[i*a.Stride+i-1] = 1 + } + return a +} + +func (d Downshift) Eigenvalues() []complex128 { + return rootsOfUnity(int(d)) +} + +// Fibonacci is an upper Hessenberg matrix with 3 distinct real eigenvalues. For +// example, for n=5, +// [ . 1 . . . ] +// [ 1 1 . . . ] +// A = [ . 1 1 . . ] +// [ . . 1 1 . ] +// [ . . . 1 1 ] +type Fibonacci int + +func (f Fibonacci) Matrix() blas64.General { + n := int(f) + a := zeros(n, n, n) + if n > 1 { + a.Data[1] = 1 + } + for i := 1; i < n; i++ { + a.Data[i*a.Stride+i-1] = 1 + a.Data[i*a.Stride+i] = 1 + } + return a +} + +func (f Fibonacci) Eigenvalues() []complex128 { + n := int(f) + ev := make([]complex128, n) + if n == 0 || n == 1 { + return ev + } + phi := 0.5 * (1 + math.Sqrt(5)) + ev[0] = complex(phi, 0) + for i := 1; i < n-1; i++ { + ev[i] = 1 + 0i + } + ev[n-1] = complex(1-phi, 0) + return ev +} + +// Gear is a singular non-symmetric matrix with real eigenvalues. For example, +// for n=5, +// [ . 1 . . 1 ] +// [ 1 . 1 . . ] +// A = [ . 1 . 1 . ] +// [ . . 1 . 1 ] +// [-1 . . 1 . ] +type Gear int + +func (g Gear) Matrix() blas64.General { + n := int(g) + a := zeros(n, n, n) + if n == 1 { + return a + } + for i := 0; i < n-1; i++ { + a.Data[i*a.Stride+i+1] = 1 + } + for i := 1; i < n; i++ { + a.Data[i*a.Stride+i-1] = 1 + } + a.Data[n-1] = 1 + a.Data[(n-1)*a.Stride] = -1 + return a +} + +func (g Gear) Eigenvalues() []complex128 { + n := int(g) + ev := make([]complex128, n) + if n == 0 || n == 1 { + return ev + } + if n == 2 { + ev[0] = complex(0, 1) + ev[1] = complex(0, -1) + return ev + } + w := 0 + ev[w] = math.Pi / 2 + w++ + phi := (n - 1) / 2 + for p := 1; p <= phi; p++ { + ev[w] = complex(float64(2*p)*math.Pi/float64(n), 0) + w++ + } + phi = n / 2 + for p := 1; p <= phi; p++ { + ev[w] = complex(float64(2*p-1)*math.Pi/float64(n), 0) + w++ + } + for i, v := range ev { + ev[i] = complex(2*math.Cos(real(v)), 0) + } + return ev +} + +// Grcar is an upper Hessenberg matrix given by +// A[i,j] = -1 if i == j+1, +// = 1 if i <= j and j <= i+k, +// = 0 otherwise. +// For example, for n=5 and k=2, +// [ 1 1 1 . . ] +// [ -1 1 1 1 . ] +// A = [ . -1 1 1 1 ] +// [ . . -1 1 1 ] +// [ . . . -1 1 ] +// The matrix has sensitive eigenvalues but they are not given explicitly. +type Grcar struct { + N int + K int +} + +func (g Grcar) Matrix() blas64.General { + n := g.N + a := zeros(n, n, n) + for k := 0; k <= g.K; k++ { + for i := 0; i < n-k; i++ { + a.Data[i*a.Stride+i+k] = 1 + } + } + for i := 1; i < n; i++ { + a.Data[i*a.Stride+i-1] = -1 + } + return a +} + +func (Grcar) Eigenvalues() []complex128 { + return nil +} + +// Hanowa is a non-symmetric non-singular matrix of even order given by +// A[i,j] = alpha if i == j, +// = -i-1 if i < n/2 and j == i + n/2, +// = i+1-n/2 if i >= n/2 and j == i - n/2, +// = 0 otherwise. +// The matrix has complex eigenvalues. +type Hanowa struct { + N int // Order of the matrix, must be even. + Alpha float64 +} + +func (h Hanowa) Matrix() blas64.General { + if h.N&0x1 != 0 { + panic("lapack: matrix order must be even") + } + n := h.N + a := zeros(n, n, n) + for i := 0; i < n; i++ { + a.Data[i*a.Stride+i] = h.Alpha + } + for i := 0; i < n/2; i++ { + a.Data[i*a.Stride+i+n/2] = float64(-i - 1) + } + for i := n / 2; i < n; i++ { + a.Data[i*a.Stride+i-n/2] = float64(i + 1 - n/2) + } + return a +} + +func (h Hanowa) Eigenvalues() []complex128 { + if h.N&0x1 != 0 { + panic("lapack: matrix order must be even") + } + n := int(h.N) + ev := make([]complex128, n) + for i := 0; i < n/2; i++ { + ev[2*i] = complex(h.Alpha, float64(-i-1)) + ev[2*i+1] = complex(h.Alpha, float64(i+1)) + } + return ev +} + +// Lesp is a tridiagonal, generally non-symmetric matrix given by +// A[i,j] = -2*i-5 if i == j, +// = 1/(i+1) if i == j-1, +// = j+1 if i == j+1. +// For example, for n=5, +// [ -5 2 . . . ] +// [ 1/2 -7 3 . . ] +// A = [ . 1/3 -9 4 . ] +// [ . . 1/4 -11 5 ] +// [ . . . 1/5 -13 ]. +// The matrix has sensitive eigenvalues but they are not given explicitly. +type Lesp int + +func (l Lesp) Matrix() blas64.General { + n := int(l) + a := zeros(n, n, n) + for i := 0; i < n; i++ { + a.Data[i*a.Stride+i] = float64(-2*i - 5) + } + for i := 0; i < n-1; i++ { + a.Data[i*a.Stride+i+1] = float64(i + 2) + } + for i := 1; i < n; i++ { + a.Data[i*a.Stride+i-1] = 1 / float64(i+1) + } + return a +} + +func (Lesp) Eigenvalues() []complex128 { + return nil +} + +// Rutis is the 4×4 non-symmetric matrix +// [ 4 -5 0 3 ] +// A = [ 0 4 -3 -5 ] +// [ 5 -3 4 0 ] +// [ 3 0 5 4 ] +// It has two distinct real eigenvalues and a pair of complex eigenvalues. +type Rutis struct{} + +func (Rutis) Matrix() blas64.General { + return blas64.General{ + Rows: 4, + Cols: 4, + Stride: 4, + Data: []float64{ + 4, -5, 0, 3, + 0, 4, -3, -5, + 5, -3, 4, 0, + 3, 0, 5, 4, + }, + } +} + +func (Rutis) Eigenvalues() []complex128 { + return []complex128{12, 1 + 5i, 1 - 5i, 2} +} + +// Tris is a tridiagonal matrix given by +// A[i,j] = x if i == j-1, +// = y if i == j, +// = z if i == j+1. +// If x*z is negative, the matrix has complex eigenvalues. +type Tris struct { + N int + X, Y, Z float64 +} + +func (t Tris) Matrix() blas64.General { + n := t.N + a := zeros(n, n, n) + for i := 1; i < n; i++ { + a.Data[i*a.Stride+i-1] = t.X + } + for i := 0; i < n; i++ { + a.Data[i*a.Stride+i] = t.Y + } + for i := 0; i < n-1; i++ { + a.Data[i*a.Stride+i+1] = t.Z + } + return a +} + +func (t Tris) Eigenvalues() []complex128 { + n := int(t.N) + ev := make([]complex128, n) + for i := range ev { + angle := float64(i+1) * math.Pi / float64(n+1) + arg := t.X * t.Z + if arg >= 0 { + ev[i] = complex(t.Y+2*math.Sqrt(arg)*math.Cos(angle), 0) + } else { + ev[i] = complex(t.Y, 2*math.Sqrt(-arg)*math.Cos(angle)) + } + } + return ev +} + +// Wilk4 is a 4×4 lower triangular matrix with 4 distinct real eigenvalues. +type Wilk4 struct{} + +func (Wilk4) Matrix() blas64.General { + return blas64.General{ + Rows: 4, + Cols: 4, + Stride: 4, + Data: []float64{ + 0.9143e-4, 0.0, 0.0, 0.0, + 0.8762, 0.7156e-4, 0.0, 0.0, + 0.7943, 0.8143, 0.9504e-4, 0.0, + 0.8017, 0.6123, 0.7165, 0.7123e-4, + }, + } +} + +func (Wilk4) Eigenvalues() []complex128 { + return []complex128{ + 0.9504e-4, 0.9143e-4, 0.7156e-4, 0.7123e-4, + } +} + +// Wilk12 is a 12×12 lower Hessenberg matrix with 12 distinct real eigenvalues. +type Wilk12 struct{} + +func (Wilk12) Matrix() blas64.General { + return blas64.General{ + Rows: 12, + Cols: 12, + Stride: 12, + Data: []float64{ + 12, 11, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 11, 11, 10, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 10, 10, 10, 9, 0, 0, 0, 0, 0, 0, 0, 0, + 9, 9, 9, 9, 8, 0, 0, 0, 0, 0, 0, 0, + 8, 8, 8, 8, 8, 7, 0, 0, 0, 0, 0, 0, + 7, 7, 7, 7, 7, 7, 6, 0, 0, 0, 0, 0, + 6, 6, 6, 6, 6, 6, 6, 5, 0, 0, 0, 0, + 5, 5, 5, 5, 5, 5, 5, 5, 4, 0, 0, 0, + 4, 4, 4, 4, 4, 4, 4, 4, 4, 3, 0, 0, + 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 2, 0, + 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 1, + 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, + }, + } +} + +func (Wilk12) Eigenvalues() []complex128 { + return []complex128{ + 32.2288915015722210, + 20.1989886458770691, + 12.3110774008685340, + 6.9615330855671154, + 3.5118559485807528, + 1.5539887091319704, + 0.6435053190136506, + 0.2847497205488856, + 0.1436465181918488, + 0.0812276683076552, + 0.0495074140194613, + 0.0310280683208907, + } +} + +// Wilk20 is a 20×20 lower Hessenberg matrix. If the parameter is 0, the matrix +// has 20 distinct real eigenvalues. If the parameter is 1e-10, the matrix has 6 +// real eigenvalues and 7 pairs of complex eigenvalues. +type Wilk20 float64 + +func (w Wilk20) Matrix() blas64.General { + a := zeros(20, 20, 20) + for i := 0; i < 20; i++ { + a.Data[i*a.Stride+i] = float64(i + 1) + } + for i := 0; i < 19; i++ { + a.Data[i*a.Stride+i+1] = 20 + } + a.Data[19*a.Stride] = float64(w) + return a +} + +func (w Wilk20) Eigenvalues() []complex128 { + if float64(w) == 0 { + ev := make([]complex128, 20) + for i := range ev { + ev[i] = complex(float64(i+1), 0) + } + return ev + } + return nil +} + +// Zero is a matrix with all elements equal to zero. +type Zero int + +func (z Zero) Matrix() blas64.General { + n := int(z) + return zeros(n, n, n) +} + +func (z Zero) Eigenvalues() []complex128 { + n := int(z) + return make([]complex128, n) +} diff --git a/vendor/gonum.org/v1/gonum/mat/band.go b/vendor/gonum.org/v1/gonum/mat/band.go new file mode 100644 index 0000000..17c1f86 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/mat/band.go @@ -0,0 +1,277 @@ +// Copyright ©2017 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package mat + +import ( + "gonum.org/v1/gonum/blas/blas64" +) + +var ( + bandDense *BandDense + _ Matrix = bandDense + _ Banded = bandDense + _ RawBander = bandDense + + _ NonZeroDoer = bandDense + _ RowNonZeroDoer = bandDense + _ ColNonZeroDoer = bandDense +) + +// BandDense represents a band matrix in dense storage format. +type BandDense struct { + mat blas64.Band +} + +// Banded is a band matrix representation. +type Banded interface { + Matrix + // Bandwidth returns the lower and upper bandwidth values for + // the matrix. The total bandwidth of the matrix is kl+ku+1. + Bandwidth() (kl, ku int) + + // TBand is the equivalent of the T() method in the Matrix + // interface but guarantees the transpose is of banded type. + TBand() Banded +} + +// A RawBander can return a blas64.Band representation of the receiver. +// Changes to the blas64.Band.Data slice will be reflected in the original +// matrix, changes to the Rows, Cols, KL, KU and Stride fields will not. +type RawBander interface { + RawBand() blas64.Band +} + +// A MutableBanded can set elements of a band matrix. +type MutableBanded interface { + Banded + SetBand(i, j int, v float64) +} + +var ( + _ Matrix = TransposeBand{} + _ Banded = TransposeBand{} + _ UntransposeBander = TransposeBand{} +) + +// TransposeBand is a type for performing an implicit transpose of a band +// matrix. It implements the Banded interface, returning values from the +// transpose of the matrix within. +type TransposeBand struct { + Banded Banded +} + +// At returns the value of the element at row i and column j of the transposed +// matrix, that is, row j and column i of the Banded field. +func (t TransposeBand) At(i, j int) float64 { + return t.Banded.At(j, i) +} + +// Dims returns the dimensions of the transposed matrix. +func (t TransposeBand) Dims() (r, c int) { + c, r = t.Banded.Dims() + return r, c +} + +// T performs an implicit transpose by returning the Banded field. +func (t TransposeBand) T() Matrix { + return t.Banded +} + +// Bandwidth returns the lower and upper bandwidth values for +// the transposed matrix. +func (t TransposeBand) Bandwidth() (kl, ku int) { + kl, ku = t.Banded.Bandwidth() + return ku, kl +} + +// TBand performs an implicit transpose by returning the Banded field. +func (t TransposeBand) TBand() Banded { + return t.Banded +} + +// Untranspose returns the Banded field. +func (t TransposeBand) Untranspose() Matrix { + return t.Banded +} + +// UntransposeBand returns the Banded field. +func (t TransposeBand) UntransposeBand() Banded { + return t.Banded +} + +// NewBandDense creates a new Band matrix with r rows and c columns. If data == nil, +// a new slice is allocated for the backing slice. If len(data) == min(r, c+kl)*(kl+ku+1), +// data is used as the backing slice, and changes to the elements of the returned +// BandDense will be reflected in data. If neither of these is true, NewBandDense +// will panic. kl must be at least zero and less r, and ku must be at least zero and +// less than c, otherwise NewBandDense will panic. +// NewBandDense will panic if either r or c is zero. +// +// The data must be arranged in row-major order constructed by removing the zeros +// from the rows outside the band and aligning the diagonals. For example, the matrix +// 1 2 3 0 0 0 +// 4 5 6 7 0 0 +// 0 8 9 10 11 0 +// 0 0 12 13 14 15 +// 0 0 0 16 17 18 +// 0 0 0 0 19 20 +// becomes (* entries are never accessed) +// * 1 2 3 +// 4 5 6 7 +// 8 9 10 11 +// 12 13 14 15 +// 16 17 18 * +// 19 20 * * +// which is passed to NewBandDense as []float64{*, 1, 2, 3, 4, ...} with kl=1 and ku=2. +// Only the values in the band portion of the matrix are used. +func NewBandDense(r, c, kl, ku int, data []float64) *BandDense { + if r <= 0 || c <= 0 || kl < 0 || ku < 0 { + if r == 0 || c == 0 { + panic(ErrZeroLength) + } + panic("mat: negative dimension") + } + if kl+1 > r || ku+1 > c { + panic("mat: band out of range") + } + bc := kl + ku + 1 + if data != nil && len(data) != min(r, c+kl)*bc { + panic(ErrShape) + } + if data == nil { + data = make([]float64, min(r, c+kl)*bc) + } + return &BandDense{ + mat: blas64.Band{ + Rows: r, + Cols: c, + KL: kl, + KU: ku, + Stride: bc, + Data: data, + }, + } +} + +// NewDiagonalRect is a convenience function that returns a diagonal matrix represented by a +// BandDense. The length of data must be min(r, c) otherwise NewDiagonalRect will panic. +func NewDiagonalRect(r, c int, data []float64) *BandDense { + return NewBandDense(r, c, 0, 0, data) +} + +// Dims returns the number of rows and columns in the matrix. +func (b *BandDense) Dims() (r, c int) { + return b.mat.Rows, b.mat.Cols +} + +// Bandwidth returns the upper and lower bandwidths of the matrix. +func (b *BandDense) Bandwidth() (kl, ku int) { + return b.mat.KL, b.mat.KU +} + +// T performs an implicit transpose by returning the receiver inside a Transpose. +func (b *BandDense) T() Matrix { + return Transpose{b} +} + +// TBand performs an implicit transpose by returning the receiver inside a TransposeBand. +func (b *BandDense) TBand() Banded { + return TransposeBand{b} +} + +// RawBand returns the underlying blas64.Band used by the receiver. +// Changes to elements in the receiver following the call will be reflected +// in returned blas64.Band. +func (b *BandDense) RawBand() blas64.Band { + return b.mat +} + +// SetRawBand sets the underlying blas64.Band used by the receiver. +// Changes to elements in the receiver following the call will be reflected +// in the input. +func (b *BandDense) SetRawBand(mat blas64.Band) { + b.mat = mat +} + +// DiagView returns the diagonal as a matrix backed by the original data. +func (b *BandDense) DiagView() Diagonal { + n := min(b.mat.Rows, b.mat.Cols) + return &DiagDense{ + mat: blas64.Vector{ + N: n, + Inc: b.mat.Stride, + Data: b.mat.Data[b.mat.KL : (n-1)*b.mat.Stride+b.mat.KL+1], + }, + } +} + +// DoNonZero calls the function fn for each of the non-zero elements of b. The function fn +// takes a row/column index and the element value of b at (i, j). +func (b *BandDense) DoNonZero(fn func(i, j int, v float64)) { + for i := 0; i < min(b.mat.Rows, b.mat.Cols+b.mat.KL); i++ { + for j := max(0, i-b.mat.KL); j < min(b.mat.Cols, i+b.mat.KU+1); j++ { + v := b.at(i, j) + if v != 0 { + fn(i, j, v) + } + } + } +} + +// DoRowNonZero calls the function fn for each of the non-zero elements of row i of b. The function fn +// takes a row/column index and the element value of b at (i, j). +func (b *BandDense) DoRowNonZero(i int, fn func(i, j int, v float64)) { + if i < 0 || b.mat.Rows <= i { + panic(ErrRowAccess) + } + for j := max(0, i-b.mat.KL); j < min(b.mat.Cols, i+b.mat.KU+1); j++ { + v := b.at(i, j) + if v != 0 { + fn(i, j, v) + } + } +} + +// DoColNonZero calls the function fn for each of the non-zero elements of column j of b. The function fn +// takes a row/column index and the element value of b at (i, j). +func (b *BandDense) DoColNonZero(j int, fn func(i, j int, v float64)) { + if j < 0 || b.mat.Cols <= j { + panic(ErrColAccess) + } + for i := 0; i < min(b.mat.Rows, b.mat.Cols+b.mat.KL); i++ { + if i-b.mat.KL <= j && j < i+b.mat.KU+1 { + v := b.at(i, j) + if v != 0 { + fn(i, j, v) + } + } + } +} + +// Zero sets all of the matrix elements to zero. +func (b *BandDense) Zero() { + m := b.mat.Rows + kL := b.mat.KL + nCol := b.mat.KU + 1 + kL + for i := 0; i < m; i++ { + l := max(0, kL-i) + u := min(nCol, m+kL-i) + zero(b.mat.Data[i*b.mat.Stride+l : i*b.mat.Stride+u]) + } +} + +// Trace computes the trace of the matrix. +func (b *BandDense) Trace() float64 { + r, c := b.Dims() + if r != c { + panic(ErrShape) + } + rb := b.RawBand() + var tr float64 + for i := 0; i < r; i++ { + tr += rb.Data[rb.KL+i*rb.Stride] + } + return tr +} diff --git a/vendor/gonum.org/v1/gonum/mat/cdense.go b/vendor/gonum.org/v1/gonum/mat/cdense.go new file mode 100644 index 0000000..9c29d1a --- /dev/null +++ b/vendor/gonum.org/v1/gonum/mat/cdense.go @@ -0,0 +1,168 @@ +// Copyright ©2019 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package mat + +import "gonum.org/v1/gonum/blas/cblas128" + +// Dense is a dense matrix representation with complex data. +type CDense struct { + mat cblas128.General + + capRows, capCols int +} + +// Dims returns the number of rows and columns in the matrix. +func (m *CDense) Dims() (r, c int) { + return m.mat.Rows, m.mat.Cols +} + +// H performs an implicit conjugate transpose by returning the receiver inside a +// Conjugate. +func (m *CDense) H() CMatrix { + return Conjugate{m} +} + +// NewCDense creates a new complex Dense matrix with r rows and c columns. +// If data == nil, a new slice is allocated for the backing slice. +// If len(data) == r*c, data is used as the backing slice, and changes to the +// elements of the returned CDense will be reflected in data. +// If neither of these is true, NewCDense will panic. +// NewCDense will panic if either r or c is zero. +// +// The data must be arranged in row-major order, i.e. the (i*c + j)-th +// element in the data slice is the {i, j}-th element in the matrix. +func NewCDense(r, c int, data []complex128) *CDense { + if r <= 0 || c <= 0 { + if r == 0 || c == 0 { + panic(ErrZeroLength) + } + panic("mat: negative dimension") + } + if data != nil && r*c != len(data) { + panic(ErrShape) + } + if data == nil { + data = make([]complex128, r*c) + } + return &CDense{ + mat: cblas128.General{ + Rows: r, + Cols: c, + Stride: c, + Data: data, + }, + capRows: r, + capCols: c, + } +} + +// reuseAs resizes an empty matrix to a r×c matrix, +// or checks that a non-empty matrix is r×c. +// +// reuseAs must be kept in sync with reuseAsZeroed. +func (m *CDense) reuseAs(r, c int) { + if m.mat.Rows > m.capRows || m.mat.Cols > m.capCols { + // Panic as a string, not a mat.Error. + panic("mat: caps not correctly set") + } + if r == 0 || c == 0 { + panic(ErrZeroLength) + } + if m.IsZero() { + m.mat = cblas128.General{ + Rows: r, + Cols: c, + Stride: c, + Data: useC(m.mat.Data, r*c), + } + m.capRows = r + m.capCols = c + return + } + if r != m.mat.Rows || c != m.mat.Cols { + panic(ErrShape) + } +} + +func (m *CDense) reuseAsZeroed(r, c int) { + // This must be kept in-sync with reuseAs. + if m.mat.Rows > m.capRows || m.mat.Cols > m.capCols { + // Panic as a string, not a mat.Error. + panic("mat: caps not correctly set") + } + if r == 0 || c == 0 { + panic(ErrZeroLength) + } + if m.IsZero() { + m.mat = cblas128.General{ + Rows: r, + Cols: c, + Stride: c, + Data: useZeroedC(m.mat.Data, r*c), + } + m.capRows = r + m.capCols = c + return + } + if r != m.mat.Rows || c != m.mat.Cols { + panic(ErrShape) + } + m.Zero() +} + +// Reset zeros the dimensions of the matrix so that it can be reused as the +// receiver of a dimensionally restricted operation. +// +// See the Reseter interface for more information. +func (m *CDense) Reset() { + // Row, Cols and Stride must be zeroed in unison. + m.mat.Rows, m.mat.Cols, m.mat.Stride = 0, 0, 0 + m.capRows, m.capCols = 0, 0 + m.mat.Data = m.mat.Data[:0] +} + +// IsZero returns whether the receiver is zero-sized. Zero-sized matrices can be the +// receiver for size-restricted operations. CDense matrices can be zeroed using Reset. +func (m *CDense) IsZero() bool { + // It must be the case that m.Dims() returns + // zeros in this case. See comment in Reset(). + return m.mat.Stride == 0 +} + +// Zero sets all of the matrix elements to zero. +func (m *CDense) Zero() { + r := m.mat.Rows + c := m.mat.Cols + for i := 0; i < r; i++ { + zeroC(m.mat.Data[i*m.mat.Stride : i*m.mat.Stride+c]) + } +} + +// Copy makes a copy of elements of a into the receiver. It is similar to the +// built-in copy; it copies as much as the overlap between the two matrices and +// returns the number of rows and columns it copied. If a aliases the receiver +// and is a transposed Dense or VecDense, with a non-unitary increment, Copy will +// panic. +// +// See the Copier interface for more information. +func (m *CDense) Copy(a CMatrix) (r, c int) { + r, c = a.Dims() + if a == m { + return r, c + } + r = min(r, m.mat.Rows) + c = min(c, m.mat.Cols) + if r == 0 || c == 0 { + return 0, 0 + } + // TODO(btracey): Check for overlap when complex version exists. + // TODO(btracey): Add fast-paths. + for i := 0; i < r; i++ { + for j := 0; j < c; j++ { + m.set(i, j, a.At(i, j)) + } + } + return r, c +} diff --git a/vendor/gonum.org/v1/gonum/mat/cholesky.go b/vendor/gonum.org/v1/gonum/mat/cholesky.go new file mode 100644 index 0000000..8f54e10 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/mat/cholesky.go @@ -0,0 +1,673 @@ +// Copyright ©2013 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package mat + +import ( + "math" + + "gonum.org/v1/gonum/blas" + "gonum.org/v1/gonum/blas/blas64" + "gonum.org/v1/gonum/lapack/lapack64" +) + +const ( + badTriangle = "mat: invalid triangle" + badCholesky = "mat: invalid Cholesky factorization" +) + +var ( + _ Matrix = (*Cholesky)(nil) + _ Symmetric = (*Cholesky)(nil) +) + +// Cholesky is a symmetric positive definite matrix represented by its +// Cholesky decomposition. +// +// The decomposition can be constructed using the Factorize method. The +// factorization itself can be extracted using the UTo or LTo methods, and the +// original symmetric matrix can be recovered with ToSym. +// +// Note that this matrix representation is useful for certain operations, in +// particular finding solutions to linear equations. It is very inefficient +// at other operations, in particular At is slow. +// +// Cholesky methods may only be called on a value that has been successfully +// initialized by a call to Factorize that has returned true. Calls to methods +// of an unsuccessful Cholesky factorization will panic. +type Cholesky struct { + // The chol pointer must never be retained as a pointer outside the Cholesky + // struct, either by returning chol outside the struct or by setting it to + // a pointer coming from outside. The same prohibition applies to the data + // slice within chol. + chol *TriDense + cond float64 +} + +// updateCond updates the condition number of the Cholesky decomposition. If +// norm > 0, then that norm is used as the norm of the original matrix A, otherwise +// the norm is estimated from the decomposition. +func (c *Cholesky) updateCond(norm float64) { + n := c.chol.mat.N + work := getFloats(3*n, false) + defer putFloats(work) + if norm < 0 { + // This is an approximation. By the definition of a norm, + // |AB| <= |A| |B|. + // Since A = U^T*U, we get for the condition number κ that + // κ(A) := |A| |A^-1| = |U^T*U| |A^-1| <= |U^T| |U| |A^-1|, + // so this will overestimate the condition number somewhat. + // The norm of the original factorized matrix cannot be stored + // because of update possibilities. + unorm := lapack64.Lantr(CondNorm, c.chol.mat, work) + lnorm := lapack64.Lantr(CondNormTrans, c.chol.mat, work) + norm = unorm * lnorm + } + sym := c.chol.asSymBlas() + iwork := getInts(n, false) + v := lapack64.Pocon(sym, norm, work, iwork) + putInts(iwork) + c.cond = 1 / v +} + +// Dims returns the dimensions of the matrix. +func (ch *Cholesky) Dims() (r, c int) { + if !ch.valid() { + panic(badCholesky) + } + r, c = ch.chol.Dims() + return r, c +} + +// At returns the element at row i, column j. +func (c *Cholesky) At(i, j int) float64 { + if !c.valid() { + panic(badCholesky) + } + n := c.Symmetric() + if uint(i) >= uint(n) { + panic(ErrRowAccess) + } + if uint(j) >= uint(n) { + panic(ErrColAccess) + } + + var val float64 + for k := 0; k <= min(i, j); k++ { + val += c.chol.at(k, i) * c.chol.at(k, j) + } + return val +} + +// T returns the the receiver, the transpose of a symmetric matrix. +func (c *Cholesky) T() Matrix { + return c +} + +// Symmetric implements the Symmetric interface and returns the number of rows +// in the matrix (this is also the number of columns). +func (c *Cholesky) Symmetric() int { + r, _ := c.chol.Dims() + return r +} + +// Cond returns the condition number of the factorized matrix. +func (c *Cholesky) Cond() float64 { + if !c.valid() { + panic(badCholesky) + } + return c.cond +} + +// Factorize calculates the Cholesky decomposition of the matrix A and returns +// whether the matrix is positive definite. If Factorize returns false, the +// factorization must not be used. +func (c *Cholesky) Factorize(a Symmetric) (ok bool) { + n := a.Symmetric() + if c.chol == nil { + c.chol = NewTriDense(n, Upper, nil) + } else { + c.chol = NewTriDense(n, Upper, use(c.chol.mat.Data, n*n)) + } + copySymIntoTriangle(c.chol, a) + + sym := c.chol.asSymBlas() + work := getFloats(c.chol.mat.N, false) + norm := lapack64.Lansy(CondNorm, sym, work) + putFloats(work) + _, ok = lapack64.Potrf(sym) + if ok { + c.updateCond(norm) + } else { + c.Reset() + } + return ok +} + +// Reset resets the factorization so that it can be reused as the receiver of a +// dimensionally restricted operation. +func (c *Cholesky) Reset() { + if c.chol != nil { + c.chol.Reset() + } + c.cond = math.Inf(1) +} + +// SetFromU sets the Cholesky decomposition from the given triangular matrix. +// SetFromU panics if t is not upper triangular. Note that t is copied into, +// not stored inside, the receiver. +func (c *Cholesky) SetFromU(t Triangular) { + n, kind := t.Triangle() + if kind != Upper { + panic("cholesky: matrix must be upper triangular") + } + if c.chol == nil { + c.chol = NewTriDense(n, Upper, nil) + } else { + c.chol = NewTriDense(n, Upper, use(c.chol.mat.Data, n*n)) + } + c.chol.Copy(t) + c.updateCond(-1) +} + +// Clone makes a copy of the input Cholesky into the receiver, overwriting the +// previous value of the receiver. Clone does not place any restrictions on receiver +// shape. Clone panics if the input Cholesky is not the result of a valid decomposition. +func (c *Cholesky) Clone(chol *Cholesky) { + if !chol.valid() { + panic(badCholesky) + } + n := chol.Symmetric() + if c.chol == nil { + c.chol = NewTriDense(n, Upper, nil) + } else { + c.chol = NewTriDense(n, Upper, use(c.chol.mat.Data, n*n)) + } + c.chol.Copy(chol.chol) + c.cond = chol.cond +} + +// Det returns the determinant of the matrix that has been factorized. +func (c *Cholesky) Det() float64 { + if !c.valid() { + panic(badCholesky) + } + return math.Exp(c.LogDet()) +} + +// LogDet returns the log of the determinant of the matrix that has been factorized. +func (c *Cholesky) LogDet() float64 { + if !c.valid() { + panic(badCholesky) + } + var det float64 + for i := 0; i < c.chol.mat.N; i++ { + det += 2 * math.Log(c.chol.mat.Data[i*c.chol.mat.Stride+i]) + } + return det +} + +// SolveTo finds the matrix X that solves A * X = B where A is represented +// by the Cholesky decomposition. The result is stored in-place into dst. +func (c *Cholesky) SolveTo(dst *Dense, b Matrix) error { + if !c.valid() { + panic(badCholesky) + } + n := c.chol.mat.N + bm, bn := b.Dims() + if n != bm { + panic(ErrShape) + } + + dst.reuseAs(bm, bn) + if b != dst { + dst.Copy(b) + } + lapack64.Potrs(c.chol.mat, dst.mat) + if c.cond > ConditionTolerance { + return Condition(c.cond) + } + return nil +} + +// SolveCholTo finds the matrix X that solves A * X = B where A and B are represented +// by their Cholesky decompositions a and b. The result is stored in-place into +// dst. +func (a *Cholesky) SolveCholTo(dst *Dense, b *Cholesky) error { + if !a.valid() || !b.valid() { + panic(badCholesky) + } + bn := b.chol.mat.N + if a.chol.mat.N != bn { + panic(ErrShape) + } + + dst.reuseAsZeroed(bn, bn) + dst.Copy(b.chol.T()) + blas64.Trsm(blas.Left, blas.Trans, 1, a.chol.mat, dst.mat) + blas64.Trsm(blas.Left, blas.NoTrans, 1, a.chol.mat, dst.mat) + blas64.Trmm(blas.Right, blas.NoTrans, 1, b.chol.mat, dst.mat) + if a.cond > ConditionTolerance { + return Condition(a.cond) + } + return nil +} + +// SolveVecTo finds the vector X that solves A * x = b where A is represented +// by the Cholesky decomposition. The result is stored in-place into +// dst. +func (c *Cholesky) SolveVecTo(dst *VecDense, b Vector) error { + if !c.valid() { + panic(badCholesky) + } + n := c.chol.mat.N + if br, bc := b.Dims(); br != n || bc != 1 { + panic(ErrShape) + } + switch rv := b.(type) { + default: + dst.reuseAs(n) + return c.SolveTo(dst.asDense(), b) + case RawVectorer: + bmat := rv.RawVector() + if dst != b { + dst.checkOverlap(bmat) + } + dst.reuseAs(n) + if dst != b { + dst.CopyVec(b) + } + lapack64.Potrs(c.chol.mat, dst.asGeneral()) + if c.cond > ConditionTolerance { + return Condition(c.cond) + } + return nil + } +} + +// RawU returns the Triangular matrix used to store the Cholesky decomposition of +// the original matrix A. The returned matrix should not be modified. If it is +// modified, the decomposition is invalid and should not be used. +func (c *Cholesky) RawU() Triangular { + return c.chol +} + +// UTo extracts the n×n upper triangular matrix U from a Cholesky +// decomposition into dst and returns the result. If dst is nil a new +// TriDense is allocated. +// A = U^T * U. +func (c *Cholesky) UTo(dst *TriDense) *TriDense { + if !c.valid() { + panic(badCholesky) + } + n := c.chol.mat.N + if dst == nil { + dst = NewTriDense(n, Upper, make([]float64, n*n)) + } else { + dst.reuseAs(n, Upper) + } + dst.Copy(c.chol) + return dst +} + +// LTo extracts the n×n lower triangular matrix L from a Cholesky +// decomposition into dst and returns the result. If dst is nil a new +// TriDense is allocated. +// A = L * L^T. +func (c *Cholesky) LTo(dst *TriDense) *TriDense { + if !c.valid() { + panic(badCholesky) + } + n := c.chol.mat.N + if dst == nil { + dst = NewTriDense(n, Lower, make([]float64, n*n)) + } else { + dst.reuseAs(n, Lower) + } + dst.Copy(c.chol.TTri()) + return dst +} + +// ToSym reconstructs the original positive definite matrix given its +// Cholesky decomposition into dst and returns the result. If dst is nil +// a new SymDense is allocated. +func (c *Cholesky) ToSym(dst *SymDense) *SymDense { + if !c.valid() { + panic(badCholesky) + } + n := c.chol.mat.N + if dst == nil { + dst = NewSymDense(n, nil) + } else { + dst.reuseAs(n) + } + // Create a TriDense representing the Cholesky factor U with dst's + // backing slice. + // Operations on u are reflected in s. + u := &TriDense{ + mat: blas64.Triangular{ + Uplo: blas.Upper, + Diag: blas.NonUnit, + N: n, + Data: dst.mat.Data, + Stride: dst.mat.Stride, + }, + cap: n, + } + u.Copy(c.chol) + // Compute the product U^T*U using the algorithm from LAPACK/TESTING/LIN/dpot01.f + a := u.mat.Data + lda := u.mat.Stride + bi := blas64.Implementation() + for k := n - 1; k >= 0; k-- { + a[k*lda+k] = bi.Ddot(k+1, a[k:], lda, a[k:], lda) + if k > 0 { + bi.Dtrmv(blas.Upper, blas.Trans, blas.NonUnit, k, a, lda, a[k:], lda) + } + } + return dst +} + +// InverseTo computes the inverse of the matrix represented by its Cholesky +// factorization and stores the result into s. If the factorized +// matrix is ill-conditioned, a Condition error will be returned. +// Note that matrix inversion is numerically unstable, and should generally be +// avoided where possible, for example by using the Solve routines. +func (c *Cholesky) InverseTo(s *SymDense) error { + if !c.valid() { + panic(badCholesky) + } + s.reuseAs(c.chol.mat.N) + // Create a TriDense representing the Cholesky factor U with the backing + // slice from s. + // Operations on u are reflected in s. + u := &TriDense{ + mat: blas64.Triangular{ + Uplo: blas.Upper, + Diag: blas.NonUnit, + N: s.mat.N, + Data: s.mat.Data, + Stride: s.mat.Stride, + }, + cap: s.mat.N, + } + u.Copy(c.chol) + + _, ok := lapack64.Potri(u.mat) + if !ok { + return Condition(math.Inf(1)) + } + if c.cond > ConditionTolerance { + return Condition(c.cond) + } + return nil +} + +// Scale multiplies the original matrix A by a positive constant using +// its Cholesky decomposition, storing the result in-place into the receiver. +// That is, if the original Cholesky factorization is +// U^T * U = A +// the updated factorization is +// U'^T * U' = f A = A' +// Scale panics if the constant is non-positive, or if the receiver is non-zero +// and is of a different size from the input. +func (c *Cholesky) Scale(f float64, orig *Cholesky) { + if !orig.valid() { + panic(badCholesky) + } + if f <= 0 { + panic("cholesky: scaling by a non-positive constant") + } + n := orig.Symmetric() + if c.chol == nil { + c.chol = NewTriDense(n, Upper, nil) + } else if c.chol.mat.N != n { + panic(ErrShape) + } + c.chol.ScaleTri(math.Sqrt(f), orig.chol) + c.cond = orig.cond // Scaling by a positive constant does not change the condition number. +} + +// ExtendVecSym computes the Cholesky decomposition of the original matrix A, +// whose Cholesky decomposition is in a, extended by a the n×1 vector v according to +// [A w] +// [w' k] +// where k = v[n-1] and w = v[:n-1]. The result is stored into the receiver. +// In order for the updated matrix to be positive definite, it must be the case +// that k > w' A^-1 w. If this condition does not hold then ExtendVecSym will +// return false and the receiver will not be updated. +// +// ExtendVecSym will panic if v.Len() != a.Symmetric()+1 or if a does not contain +// a valid decomposition. +func (c *Cholesky) ExtendVecSym(a *Cholesky, v Vector) (ok bool) { + n := a.Symmetric() + + if v.Len() != n+1 { + panic(badSliceLength) + } + if !a.valid() { + panic(badCholesky) + } + + // The algorithm is commented here, but see also + // https://math.stackexchange.com/questions/955874/cholesky-factor-when-adding-a-row-and-column-to-already-factorized-matrix + // We have A and want to compute the Cholesky of + // [A w] + // [w' k] + // We want + // [U c] + // [0 d] + // to be the updated Cholesky, and so it must be that + // [A w] = [U' 0] [U c] + // [w' k] [c' d] [0 d] + // Thus, we need + // 1) A = U'U (true by the original decomposition being valid), + // 2) U' * c = w => c = U'^-1 w + // 3) c'*c + d'*d = k => d = sqrt(k-c'*c) + + // First, compute c = U'^-1 a + // TODO(btracey): Replace this with CopyVec when issue 167 is fixed. + w := NewVecDense(n, nil) + for i := 0; i < n; i++ { + w.SetVec(i, v.At(i, 0)) + } + k := v.At(n, 0) + + var t VecDense + t.SolveVec(a.chol.T(), w) + + dot := Dot(&t, &t) + if dot >= k { + return false + } + d := math.Sqrt(k - dot) + + newU := NewTriDense(n+1, Upper, nil) + newU.Copy(a.chol) + for i := 0; i < n; i++ { + newU.SetTri(i, n, t.At(i, 0)) + } + newU.SetTri(n, n, d) + c.chol = newU + c.updateCond(-1) + return true +} + +// SymRankOne performs a rank-1 update of the original matrix A and refactorizes +// its Cholesky factorization, storing the result into the receiver. That is, if +// in the original Cholesky factorization +// U^T * U = A, +// in the updated factorization +// U'^T * U' = A + alpha * x * x^T = A'. +// +// Note that when alpha is negative, the updating problem may be ill-conditioned +// and the results may be inaccurate, or the updated matrix A' may not be +// positive definite and not have a Cholesky factorization. SymRankOne returns +// whether the updated matrix A' is positive definite. +// +// SymRankOne updates a Cholesky factorization in O(n²) time. The Cholesky +// factorization computation from scratch is O(n³). +func (c *Cholesky) SymRankOne(orig *Cholesky, alpha float64, x Vector) (ok bool) { + if !orig.valid() { + panic(badCholesky) + } + n := orig.Symmetric() + if r, c := x.Dims(); r != n || c != 1 { + panic(ErrShape) + } + if orig != c { + if c.chol == nil { + c.chol = NewTriDense(n, Upper, nil) + } else if c.chol.mat.N != n { + panic(ErrShape) + } + c.chol.Copy(orig.chol) + } + + if alpha == 0 { + return true + } + + // Algorithms for updating and downdating the Cholesky factorization are + // described, for example, in + // - J. J. Dongarra, J. R. Bunch, C. B. Moler, G. W. Stewart: LINPACK + // Users' Guide. SIAM (1979), pages 10.10--10.14 + // or + // - P. E. Gill, G. H. Golub, W. Murray, and M. A. Saunders: Methods for + // modifying matrix factorizations. Mathematics of Computation 28(126) + // (1974), Method C3 on page 521 + // + // The implementation is based on LINPACK code + // http://www.netlib.org/linpack/dchud.f + // http://www.netlib.org/linpack/dchdd.f + // and + // https://icl.cs.utk.edu/lapack-forum/viewtopic.php?f=2&t=2646 + // + // According to http://icl.cs.utk.edu/lapack-forum/archives/lapack/msg00301.html + // LINPACK is released under BSD license. + // + // See also: + // - M. A. Saunders: Large-scale Linear Programming Using the Cholesky + // Factorization. Technical Report Stanford University (1972) + // http://i.stanford.edu/pub/cstr/reports/cs/tr/72/252/CS-TR-72-252.pdf + // - Matthias Seeger: Low rank updates for the Cholesky decomposition. + // EPFL Technical Report 161468 (2004) + // http://infoscience.epfl.ch/record/161468 + + work := getFloats(n, false) + defer putFloats(work) + var xmat blas64.Vector + if rv, ok := x.(RawVectorer); ok { + xmat = rv.RawVector() + } else { + var tmp *VecDense + tmp.CopyVec(x) + xmat = tmp.RawVector() + } + blas64.Copy(xmat, blas64.Vector{N: n, Data: work, Inc: 1}) + + if alpha > 0 { + // Compute rank-1 update. + if alpha != 1 { + blas64.Scal(math.Sqrt(alpha), blas64.Vector{N: n, Data: work, Inc: 1}) + } + umat := c.chol.mat + stride := umat.Stride + for i := 0; i < n; i++ { + // Compute parameters of the Givens matrix that zeroes + // the i-th element of x. + c, s, r, _ := blas64.Rotg(umat.Data[i*stride+i], work[i]) + if r < 0 { + // Multiply by -1 to have positive diagonal + // elemnts. + r *= -1 + c *= -1 + s *= -1 + } + umat.Data[i*stride+i] = r + if i < n-1 { + // Multiply the extended factorization matrix by + // the Givens matrix from the left. Only + // the i-th row and x are modified. + blas64.Rot( + blas64.Vector{N: n - i - 1, Data: umat.Data[i*stride+i+1 : i*stride+n], Inc: 1}, + blas64.Vector{N: n - i - 1, Data: work[i+1 : n], Inc: 1}, + c, s) + } + } + c.updateCond(-1) + return true + } + + // Compute rank-1 downdate. + alpha = math.Sqrt(-alpha) + if alpha != 1 { + blas64.Scal(alpha, blas64.Vector{N: n, Data: work, Inc: 1}) + } + // Solve U^T * p = x storing the result into work. + ok = lapack64.Trtrs(blas.Trans, c.chol.RawTriangular(), blas64.General{ + Rows: n, + Cols: 1, + Stride: 1, + Data: work, + }) + if !ok { + // The original matrix is singular. Should not happen, because + // the factorization is valid. + panic(badCholesky) + } + norm := blas64.Nrm2(blas64.Vector{N: n, Data: work, Inc: 1}) + if norm >= 1 { + // The updated matrix is not positive definite. + return false + } + norm = math.Sqrt((1 + norm) * (1 - norm)) + cos := getFloats(n, false) + defer putFloats(cos) + sin := getFloats(n, false) + defer putFloats(sin) + for i := n - 1; i >= 0; i-- { + // Compute parameters of Givens matrices that zero elements of p + // backwards. + cos[i], sin[i], norm, _ = blas64.Rotg(norm, work[i]) + if norm < 0 { + norm *= -1 + cos[i] *= -1 + sin[i] *= -1 + } + } + umat := c.chol.mat + stride := umat.Stride + for i := n - 1; i >= 0; i-- { + work[i] = 0 + // Apply Givens matrices to U. + // TODO(vladimir-ch): Use workspace to avoid modifying the + // receiver in case an invalid factorization is created. + blas64.Rot( + blas64.Vector{N: n - i, Data: work[i:n], Inc: 1}, + blas64.Vector{N: n - i, Data: umat.Data[i*stride+i : i*stride+n], Inc: 1}, + cos[i], sin[i]) + if umat.Data[i*stride+i] == 0 { + // The matrix is singular (may rarely happen due to + // floating-point effects?). + ok = false + } else if umat.Data[i*stride+i] < 0 { + // Diagonal elements should be positive. If it happens + // that on the i-th row the diagonal is negative, + // multiply U from the left by an identity matrix that + // has -1 on the i-th row. + blas64.Scal(-1, blas64.Vector{N: n - i, Data: umat.Data[i*stride+i : i*stride+n], Inc: 1}) + } + } + if ok { + c.updateCond(-1) + } else { + c.Reset() + } + return ok +} + +func (c *Cholesky) valid() bool { + return c.chol != nil && !c.chol.IsZero() +} diff --git a/vendor/gonum.org/v1/gonum/mat/cmatrix.go b/vendor/gonum.org/v1/gonum/mat/cmatrix.go new file mode 100644 index 0000000..6219c28 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/mat/cmatrix.go @@ -0,0 +1,210 @@ +// Copyright ©2013 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package mat + +import ( + "math" + "math/cmplx" + + "gonum.org/v1/gonum/floats" +) + +// CMatrix is the basic matrix interface type for complex matrices. +type CMatrix interface { + // Dims returns the dimensions of a Matrix. + Dims() (r, c int) + + // At returns the value of a matrix element at row i, column j. + // It will panic if i or j are out of bounds for the matrix. + At(i, j int) complex128 + + // H returns the conjugate transpose of the Matrix. Whether H + // returns a copy of the underlying data is implementation dependent. + // This method may be implemented using the Conjugate type, which + // provides an implicit matrix conjugate transpose. + H() CMatrix +} + +var ( + _ CMatrix = Conjugate{} + _ Unconjugator = Conjugate{} +) + +// Conjugate is a type for performing an implicit matrix conjugate transpose. +// It implements the Matrix interface, returning values from the conjugate +// transpose of the matrix within. +type Conjugate struct { + CMatrix CMatrix +} + +// At returns the value of the element at row i and column j of the conjugate +// transposed matrix, that is, row j and column i of the Matrix field. +func (t Conjugate) At(i, j int) complex128 { + z := t.CMatrix.At(j, i) + return cmplx.Conj(z) +} + +// Dims returns the dimensions of the transposed matrix. The number of rows returned +// is the number of columns in the Matrix field, and the number of columns is +// the number of rows in the Matrix field. +func (t Conjugate) Dims() (r, c int) { + c, r = t.CMatrix.Dims() + return r, c +} + +// H performs an implicit conjugate transpose by returning the Matrix field. +func (t Conjugate) H() CMatrix { + return t.CMatrix +} + +// Unconjugate returns the Matrix field. +func (t Conjugate) Unconjugate() CMatrix { + return t.CMatrix +} + +// Unconjugator is a type that can undo an implicit conjugate transpose. +type Unconjugator interface { + // Note: This interface is needed to unify all of the Conjugate types. In + // the cmat128 methods, we need to test if the Matrix has been implicitly + // transposed. If this is checked by testing for the specific Conjugate type + // then the behavior will be different if the user uses H() or HTri() for a + // triangular matrix. + + // Unconjugate returns the underlying Matrix stored for the implicit + // conjugate transpose. + Unconjugate() CMatrix +} + +// useC returns a complex128 slice with l elements, using c if it +// has the necessary capacity, otherwise creating a new slice. +func useC(c []complex128, l int) []complex128 { + if l <= cap(c) { + return c[:l] + } + return make([]complex128, l) +} + +// useZeroedC returns a complex128 slice with l elements, using c if it +// has the necessary capacity, otherwise creating a new slice. The +// elements of the returned slice are guaranteed to be zero. +func useZeroedC(c []complex128, l int) []complex128 { + if l <= cap(c) { + c = c[:l] + zeroC(c) + return c + } + return make([]complex128, l) +} + +// zeroC zeros the given slice's elements. +func zeroC(c []complex128) { + for i := range c { + c[i] = 0 + } +} + +// unconjugate unconjugates a matrix if applicable. If a is an Unconjugator, then +// unconjugate returns the underlying matrix and true. If it is not, then it returns +// the input matrix and false. +func unconjugate(a CMatrix) (CMatrix, bool) { + if ut, ok := a.(Unconjugator); ok { + return ut.Unconjugate(), true + } + return a, false +} + +// CEqual returns whether the matrices a and b have the same size +// and are element-wise equal. +func CEqual(a, b CMatrix) bool { + ar, ac := a.Dims() + br, bc := b.Dims() + if ar != br || ac != bc { + return false + } + // TODO(btracey): Add in fast-paths. + for i := 0; i < ar; i++ { + for j := 0; j < ac; j++ { + if a.At(i, j) != b.At(i, j) { + return false + } + } + } + return true +} + +// CEqualApprox returns whether the matrices a and b have the same size and contain all equal +// elements with tolerance for element-wise equality specified by epsilon. Matrices +// with non-equal shapes are not equal. +func CEqualApprox(a, b CMatrix, epsilon float64) bool { + // TODO(btracey): + ar, ac := a.Dims() + br, bc := b.Dims() + if ar != br || ac != bc { + return false + } + for i := 0; i < ar; i++ { + for j := 0; j < ac; j++ { + if !cEqualWithinAbsOrRel(a.At(i, j), b.At(i, j), epsilon, epsilon) { + return false + } + } + } + return true +} + +// TODO(btracey): Move these into a cmplxs if/when we have one. + +func cEqualWithinAbsOrRel(a, b complex128, absTol, relTol float64) bool { + if cEqualWithinAbs(a, b, absTol) { + return true + } + return cEqualWithinRel(a, b, relTol) +} + +// cEqualWithinAbs returns true if a and b have an absolute +// difference of less than tol. +func cEqualWithinAbs(a, b complex128, tol float64) bool { + return a == b || cmplx.Abs(a-b) <= tol +} + +const minNormalFloat64 = 2.2250738585072014e-308 + +// cEqualWithinRel returns true if the difference between a and b +// is not greater than tol times the greater value. +func cEqualWithinRel(a, b complex128, tol float64) bool { + if a == b { + return true + } + if cmplx.IsNaN(a) || cmplx.IsNaN(b) { + return false + } + // Cannot play the same trick as in floats because there are multiple + // possible infinities. + if cmplx.IsInf(a) { + if !cmplx.IsInf(b) { + return false + } + ra := real(a) + if math.IsInf(ra, 0) { + if ra == real(b) { + return floats.EqualWithinRel(imag(a), imag(b), tol) + } + return false + } + if imag(a) == imag(b) { + return floats.EqualWithinRel(ra, real(b), tol) + } + return false + } + if cmplx.IsInf(b) { + return false + } + + delta := cmplx.Abs(a - b) + if delta <= minNormalFloat64 { + return delta <= tol*minNormalFloat64 + } + return delta/math.Max(cmplx.Abs(a), cmplx.Abs(b)) <= tol +} diff --git a/vendor/gonum.org/v1/gonum/mat/consts.go b/vendor/gonum.org/v1/gonum/mat/consts.go new file mode 100644 index 0000000..3de3f5b --- /dev/null +++ b/vendor/gonum.org/v1/gonum/mat/consts.go @@ -0,0 +1,15 @@ +// Copyright ©2016 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package mat + +// TriKind represents the triangularity of the matrix. +type TriKind bool + +const ( + // Upper specifies an upper triangular matrix. + Upper TriKind = true + // Lower specifies a lower triangular matrix. + Lower TriKind = false +) diff --git a/vendor/gonum.org/v1/gonum/mat/dense.go b/vendor/gonum.org/v1/gonum/mat/dense.go new file mode 100644 index 0000000..87b1105 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/mat/dense.go @@ -0,0 +1,558 @@ +// Copyright ©2013 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package mat + +import ( + "gonum.org/v1/gonum/blas" + "gonum.org/v1/gonum/blas/blas64" +) + +var ( + dense *Dense + + _ Matrix = dense + _ Mutable = dense + + _ Cloner = dense + _ RowViewer = dense + _ ColViewer = dense + _ RawRowViewer = dense + _ Grower = dense + + _ RawMatrixSetter = dense + _ RawMatrixer = dense + + _ Reseter = dense +) + +// Dense is a dense matrix representation. +type Dense struct { + mat blas64.General + + capRows, capCols int +} + +// NewDense creates a new Dense matrix with r rows and c columns. If data == nil, +// a new slice is allocated for the backing slice. If len(data) == r*c, data is +// used as the backing slice, and changes to the elements of the returned Dense +// will be reflected in data. If neither of these is true, NewDense will panic. +// NewDense will panic if either r or c is zero. +// +// The data must be arranged in row-major order, i.e. the (i*c + j)-th +// element in the data slice is the {i, j}-th element in the matrix. +func NewDense(r, c int, data []float64) *Dense { + if r <= 0 || c <= 0 { + if r == 0 || c == 0 { + panic(ErrZeroLength) + } + panic("mat: negative dimension") + } + if data != nil && r*c != len(data) { + panic(ErrShape) + } + if data == nil { + data = make([]float64, r*c) + } + return &Dense{ + mat: blas64.General{ + Rows: r, + Cols: c, + Stride: c, + Data: data, + }, + capRows: r, + capCols: c, + } +} + +// reuseAs resizes an empty matrix to a r×c matrix, +// or checks that a non-empty matrix is r×c. +// +// reuseAs must be kept in sync with reuseAsZeroed. +func (m *Dense) reuseAs(r, c int) { + if m.mat.Rows > m.capRows || m.mat.Cols > m.capCols { + // Panic as a string, not a mat.Error. + panic("mat: caps not correctly set") + } + if r == 0 || c == 0 { + panic(ErrZeroLength) + } + if m.IsZero() { + m.mat = blas64.General{ + Rows: r, + Cols: c, + Stride: c, + Data: use(m.mat.Data, r*c), + } + m.capRows = r + m.capCols = c + return + } + if r != m.mat.Rows || c != m.mat.Cols { + panic(ErrShape) + } +} + +// reuseAsZeroed resizes an empty matrix to a r×c matrix, +// or checks that a non-empty matrix is r×c. It zeroes +// all the elements of the matrix. +// +// reuseAsZeroed must be kept in sync with reuseAs. +func (m *Dense) reuseAsZeroed(r, c int) { + if m.mat.Rows > m.capRows || m.mat.Cols > m.capCols { + // Panic as a string, not a mat.Error. + panic("mat: caps not correctly set") + } + if r == 0 || c == 0 { + panic(ErrZeroLength) + } + if m.IsZero() { + m.mat = blas64.General{ + Rows: r, + Cols: c, + Stride: c, + Data: useZeroed(m.mat.Data, r*c), + } + m.capRows = r + m.capCols = c + return + } + if r != m.mat.Rows || c != m.mat.Cols { + panic(ErrShape) + } + m.Zero() +} + +// Zero sets all of the matrix elements to zero. +func (m *Dense) Zero() { + r := m.mat.Rows + c := m.mat.Cols + for i := 0; i < r; i++ { + zero(m.mat.Data[i*m.mat.Stride : i*m.mat.Stride+c]) + } +} + +// isolatedWorkspace returns a new dense matrix w with the size of a and +// returns a callback to defer which performs cleanup at the return of the call. +// This should be used when a method receiver is the same pointer as an input argument. +func (m *Dense) isolatedWorkspace(a Matrix) (w *Dense, restore func()) { + r, c := a.Dims() + if r == 0 || c == 0 { + panic(ErrZeroLength) + } + w = getWorkspace(r, c, false) + return w, func() { + m.Copy(w) + putWorkspace(w) + } +} + +// Reset zeros the dimensions of the matrix so that it can be reused as the +// receiver of a dimensionally restricted operation. +// +// See the Reseter interface for more information. +func (m *Dense) Reset() { + // Row, Cols and Stride must be zeroed in unison. + m.mat.Rows, m.mat.Cols, m.mat.Stride = 0, 0, 0 + m.capRows, m.capCols = 0, 0 + m.mat.Data = m.mat.Data[:0] +} + +// IsZero returns whether the receiver is zero-sized. Zero-sized matrices can be the +// receiver for size-restricted operations. Dense matrices can be zeroed using Reset. +func (m *Dense) IsZero() bool { + // It must be the case that m.Dims() returns + // zeros in this case. See comment in Reset(). + return m.mat.Stride == 0 +} + +// asTriDense returns a TriDense with the given size and side. The backing data +// of the TriDense is the same as the receiver. +func (m *Dense) asTriDense(n int, diag blas.Diag, uplo blas.Uplo) *TriDense { + return &TriDense{ + mat: blas64.Triangular{ + N: n, + Stride: m.mat.Stride, + Data: m.mat.Data, + Uplo: uplo, + Diag: diag, + }, + cap: n, + } +} + +// DenseCopyOf returns a newly allocated copy of the elements of a. +func DenseCopyOf(a Matrix) *Dense { + d := &Dense{} + d.Clone(a) + return d +} + +// SetRawMatrix sets the underlying blas64.General used by the receiver. +// Changes to elements in the receiver following the call will be reflected +// in b. +func (m *Dense) SetRawMatrix(b blas64.General) { + m.capRows, m.capCols = b.Rows, b.Cols + m.mat = b +} + +// RawMatrix returns the underlying blas64.General used by the receiver. +// Changes to elements in the receiver following the call will be reflected +// in returned blas64.General. +func (m *Dense) RawMatrix() blas64.General { return m.mat } + +// Dims returns the number of rows and columns in the matrix. +func (m *Dense) Dims() (r, c int) { return m.mat.Rows, m.mat.Cols } + +// Caps returns the number of rows and columns in the backing matrix. +func (m *Dense) Caps() (r, c int) { return m.capRows, m.capCols } + +// T performs an implicit transpose by returning the receiver inside a Transpose. +func (m *Dense) T() Matrix { + return Transpose{m} +} + +// ColView returns a Vector reflecting the column j, backed by the matrix data. +// +// See ColViewer for more information. +func (m *Dense) ColView(j int) Vector { + var v VecDense + v.ColViewOf(m, j) + return &v +} + +// SetCol sets the values in the specified column of the matrix to the values +// in src. len(src) must equal the number of rows in the receiver. +func (m *Dense) SetCol(j int, src []float64) { + if j >= m.mat.Cols || j < 0 { + panic(ErrColAccess) + } + if len(src) != m.mat.Rows { + panic(ErrColLength) + } + + blas64.Copy( + blas64.Vector{N: m.mat.Rows, Inc: 1, Data: src}, + blas64.Vector{N: m.mat.Rows, Inc: m.mat.Stride, Data: m.mat.Data[j:]}, + ) +} + +// SetRow sets the values in the specified rows of the matrix to the values +// in src. len(src) must equal the number of columns in the receiver. +func (m *Dense) SetRow(i int, src []float64) { + if i >= m.mat.Rows || i < 0 { + panic(ErrRowAccess) + } + if len(src) != m.mat.Cols { + panic(ErrRowLength) + } + + copy(m.rawRowView(i), src) +} + +// RowView returns row i of the matrix data represented as a column vector, +// backed by the matrix data. +// +// See RowViewer for more information. +func (m *Dense) RowView(i int) Vector { + var v VecDense + v.RowViewOf(m, i) + return &v +} + +// RawRowView returns a slice backed by the same array as backing the +// receiver. +func (m *Dense) RawRowView(i int) []float64 { + if i >= m.mat.Rows || i < 0 { + panic(ErrRowAccess) + } + return m.rawRowView(i) +} + +func (m *Dense) rawRowView(i int) []float64 { + return m.mat.Data[i*m.mat.Stride : i*m.mat.Stride+m.mat.Cols] +} + +// DiagView returns the diagonal as a matrix backed by the original data. +func (m *Dense) DiagView() Diagonal { + n := min(m.mat.Rows, m.mat.Cols) + return &DiagDense{ + mat: blas64.Vector{ + N: n, + Inc: m.mat.Stride + 1, + Data: m.mat.Data[:(n-1)*m.mat.Stride+n], + }, + } +} + +// Slice returns a new Matrix that shares backing data with the receiver. +// The returned matrix starts at {i,j} of the receiver and extends k-i rows +// and l-j columns. The final row in the resulting matrix is k-1 and the +// final column is l-1. +// Slice panics with ErrIndexOutOfRange if the slice is outside the capacity +// of the receiver. +func (m *Dense) Slice(i, k, j, l int) Matrix { + mr, mc := m.Caps() + if i < 0 || mr <= i || j < 0 || mc <= j || k < i || mr < k || l < j || mc < l { + if i == k || j == l { + panic(ErrZeroLength) + } + panic(ErrIndexOutOfRange) + } + t := *m + t.mat.Data = t.mat.Data[i*t.mat.Stride+j : (k-1)*t.mat.Stride+l] + t.mat.Rows = k - i + t.mat.Cols = l - j + t.capRows -= i + t.capCols -= j + return &t +} + +// Grow returns the receiver expanded by r rows and c columns. If the dimensions +// of the expanded matrix are outside the capacities of the receiver a new +// allocation is made, otherwise not. Note the receiver itself is not modified +// during the call to Grow. +func (m *Dense) Grow(r, c int) Matrix { + if r < 0 || c < 0 { + panic(ErrIndexOutOfRange) + } + if r == 0 && c == 0 { + return m + } + + r += m.mat.Rows + c += m.mat.Cols + + var t Dense + switch { + case m.mat.Rows == 0 || m.mat.Cols == 0: + t.mat = blas64.General{ + Rows: r, + Cols: c, + Stride: c, + // We zero because we don't know how the matrix will be used. + // In other places, the mat is immediately filled with a result; + // this is not the case here. + Data: useZeroed(m.mat.Data, r*c), + } + case r > m.capRows || c > m.capCols: + cr := max(r, m.capRows) + cc := max(c, m.capCols) + t.mat = blas64.General{ + Rows: r, + Cols: c, + Stride: cc, + Data: make([]float64, cr*cc), + } + t.capRows = cr + t.capCols = cc + // Copy the complete matrix over to the new matrix. + // Including elements not currently visible. Use a temporary structure + // to avoid modifying the receiver. + var tmp Dense + tmp.mat = blas64.General{ + Rows: m.mat.Rows, + Cols: m.mat.Cols, + Stride: m.mat.Stride, + Data: m.mat.Data, + } + tmp.capRows = m.capRows + tmp.capCols = m.capCols + t.Copy(&tmp) + return &t + default: + t.mat = blas64.General{ + Data: m.mat.Data[:(r-1)*m.mat.Stride+c], + Rows: r, + Cols: c, + Stride: m.mat.Stride, + } + } + t.capRows = r + t.capCols = c + return &t +} + +// Clone makes a copy of a into the receiver, overwriting the previous value of +// the receiver. The clone operation does not make any restriction on shape and +// will not cause shadowing. +// +// See the Cloner interface for more information. +func (m *Dense) Clone(a Matrix) { + r, c := a.Dims() + mat := blas64.General{ + Rows: r, + Cols: c, + Stride: c, + } + m.capRows, m.capCols = r, c + + aU, trans := untranspose(a) + switch aU := aU.(type) { + case RawMatrixer: + amat := aU.RawMatrix() + mat.Data = make([]float64, r*c) + if trans { + for i := 0; i < r; i++ { + blas64.Copy(blas64.Vector{N: c, Inc: amat.Stride, Data: amat.Data[i : i+(c-1)*amat.Stride+1]}, + blas64.Vector{N: c, Inc: 1, Data: mat.Data[i*c : (i+1)*c]}) + } + } else { + for i := 0; i < r; i++ { + copy(mat.Data[i*c:(i+1)*c], amat.Data[i*amat.Stride:i*amat.Stride+c]) + } + } + case *VecDense: + amat := aU.mat + mat.Data = make([]float64, aU.mat.N) + blas64.Copy(blas64.Vector{N: aU.mat.N, Inc: amat.Inc, Data: amat.Data}, + blas64.Vector{N: aU.mat.N, Inc: 1, Data: mat.Data}) + default: + mat.Data = make([]float64, r*c) + w := *m + w.mat = mat + for i := 0; i < r; i++ { + for j := 0; j < c; j++ { + w.set(i, j, a.At(i, j)) + } + } + *m = w + return + } + m.mat = mat +} + +// Copy makes a copy of elements of a into the receiver. It is similar to the +// built-in copy; it copies as much as the overlap between the two matrices and +// returns the number of rows and columns it copied. If a aliases the receiver +// and is a transposed Dense or VecDense, with a non-unitary increment, Copy will +// panic. +// +// See the Copier interface for more information. +func (m *Dense) Copy(a Matrix) (r, c int) { + r, c = a.Dims() + if a == m { + return r, c + } + r = min(r, m.mat.Rows) + c = min(c, m.mat.Cols) + if r == 0 || c == 0 { + return 0, 0 + } + + aU, trans := untranspose(a) + switch aU := aU.(type) { + case RawMatrixer: + amat := aU.RawMatrix() + if trans { + if amat.Stride != 1 { + m.checkOverlap(amat) + } + for i := 0; i < r; i++ { + blas64.Copy(blas64.Vector{N: c, Inc: amat.Stride, Data: amat.Data[i : i+(c-1)*amat.Stride+1]}, + blas64.Vector{N: c, Inc: 1, Data: m.mat.Data[i*m.mat.Stride : i*m.mat.Stride+c]}) + } + } else { + switch o := offset(m.mat.Data, amat.Data); { + case o < 0: + for i := r - 1; i >= 0; i-- { + copy(m.mat.Data[i*m.mat.Stride:i*m.mat.Stride+c], amat.Data[i*amat.Stride:i*amat.Stride+c]) + } + case o > 0: + for i := 0; i < r; i++ { + copy(m.mat.Data[i*m.mat.Stride:i*m.mat.Stride+c], amat.Data[i*amat.Stride:i*amat.Stride+c]) + } + default: + // Nothing to do. + } + } + case *VecDense: + var n, stride int + amat := aU.mat + if trans { + if amat.Inc != 1 { + m.checkOverlap(aU.asGeneral()) + } + n = c + stride = 1 + } else { + n = r + stride = m.mat.Stride + } + if amat.Inc == 1 && stride == 1 { + copy(m.mat.Data, amat.Data[:n]) + break + } + switch o := offset(m.mat.Data, amat.Data); { + case o < 0: + blas64.Copy(blas64.Vector{N: n, Inc: -amat.Inc, Data: amat.Data}, + blas64.Vector{N: n, Inc: -stride, Data: m.mat.Data}) + case o > 0: + blas64.Copy(blas64.Vector{N: n, Inc: amat.Inc, Data: amat.Data}, + blas64.Vector{N: n, Inc: stride, Data: m.mat.Data}) + default: + // Nothing to do. + } + default: + m.checkOverlapMatrix(aU) + for i := 0; i < r; i++ { + for j := 0; j < c; j++ { + m.set(i, j, a.At(i, j)) + } + } + } + + return r, c +} + +// Stack appends the rows of b onto the rows of a, placing the result into the +// receiver with b placed in the greater indexed rows. Stack will panic if the +// two input matrices do not have the same number of columns or the constructed +// stacked matrix is not the same shape as the receiver. +func (m *Dense) Stack(a, b Matrix) { + ar, ac := a.Dims() + br, bc := b.Dims() + if ac != bc || m == a || m == b { + panic(ErrShape) + } + + m.reuseAs(ar+br, ac) + + m.Copy(a) + w := m.Slice(ar, ar+br, 0, bc).(*Dense) + w.Copy(b) +} + +// Augment creates the augmented matrix of a and b, where b is placed in the +// greater indexed columns. Augment will panic if the two input matrices do +// not have the same number of rows or the constructed augmented matrix is +// not the same shape as the receiver. +func (m *Dense) Augment(a, b Matrix) { + ar, ac := a.Dims() + br, bc := b.Dims() + if ar != br || m == a || m == b { + panic(ErrShape) + } + + m.reuseAs(ar, ac+bc) + + m.Copy(a) + w := m.Slice(0, br, ac, ac+bc).(*Dense) + w.Copy(b) +} + +// Trace returns the trace of the matrix. The matrix must be square or Trace +// will panic. +func (m *Dense) Trace() float64 { + if m.mat.Rows != m.mat.Cols { + panic(ErrSquare) + } + // TODO(btracey): could use internal asm sum routine. + var v float64 + for i := 0; i < m.mat.Rows; i++ { + v += m.mat.Data[i*m.mat.Stride+i] + } + return v +} diff --git a/vendor/gonum.org/v1/gonum/mat/dense_arithmetic.go b/vendor/gonum.org/v1/gonum/mat/dense_arithmetic.go new file mode 100644 index 0000000..dd4526f --- /dev/null +++ b/vendor/gonum.org/v1/gonum/mat/dense_arithmetic.go @@ -0,0 +1,886 @@ +// Copyright ©2013 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package mat + +import ( + "math" + + "gonum.org/v1/gonum/blas" + "gonum.org/v1/gonum/blas/blas64" + "gonum.org/v1/gonum/lapack/lapack64" +) + +// Add adds a and b element-wise, placing the result in the receiver. Add +// will panic if the two matrices do not have the same shape. +func (m *Dense) Add(a, b Matrix) { + ar, ac := a.Dims() + br, bc := b.Dims() + if ar != br || ac != bc { + panic(ErrShape) + } + + aU, _ := untranspose(a) + bU, _ := untranspose(b) + m.reuseAs(ar, ac) + + if arm, ok := a.(RawMatrixer); ok { + if brm, ok := b.(RawMatrixer); ok { + amat, bmat := arm.RawMatrix(), brm.RawMatrix() + if m != aU { + m.checkOverlap(amat) + } + if m != bU { + m.checkOverlap(bmat) + } + for ja, jb, jm := 0, 0, 0; ja < ar*amat.Stride; ja, jb, jm = ja+amat.Stride, jb+bmat.Stride, jm+m.mat.Stride { + for i, v := range amat.Data[ja : ja+ac] { + m.mat.Data[i+jm] = v + bmat.Data[i+jb] + } + } + return + } + } + + m.checkOverlapMatrix(aU) + m.checkOverlapMatrix(bU) + var restore func() + if m == aU { + m, restore = m.isolatedWorkspace(aU) + defer restore() + } else if m == bU { + m, restore = m.isolatedWorkspace(bU) + defer restore() + } + + for r := 0; r < ar; r++ { + for c := 0; c < ac; c++ { + m.set(r, c, a.At(r, c)+b.At(r, c)) + } + } +} + +// Sub subtracts the matrix b from a, placing the result in the receiver. Sub +// will panic if the two matrices do not have the same shape. +func (m *Dense) Sub(a, b Matrix) { + ar, ac := a.Dims() + br, bc := b.Dims() + if ar != br || ac != bc { + panic(ErrShape) + } + + aU, _ := untranspose(a) + bU, _ := untranspose(b) + m.reuseAs(ar, ac) + + if arm, ok := a.(RawMatrixer); ok { + if brm, ok := b.(RawMatrixer); ok { + amat, bmat := arm.RawMatrix(), brm.RawMatrix() + if m != aU { + m.checkOverlap(amat) + } + if m != bU { + m.checkOverlap(bmat) + } + for ja, jb, jm := 0, 0, 0; ja < ar*amat.Stride; ja, jb, jm = ja+amat.Stride, jb+bmat.Stride, jm+m.mat.Stride { + for i, v := range amat.Data[ja : ja+ac] { + m.mat.Data[i+jm] = v - bmat.Data[i+jb] + } + } + return + } + } + + m.checkOverlapMatrix(aU) + m.checkOverlapMatrix(bU) + var restore func() + if m == aU { + m, restore = m.isolatedWorkspace(aU) + defer restore() + } else if m == bU { + m, restore = m.isolatedWorkspace(bU) + defer restore() + } + + for r := 0; r < ar; r++ { + for c := 0; c < ac; c++ { + m.set(r, c, a.At(r, c)-b.At(r, c)) + } + } +} + +// MulElem performs element-wise multiplication of a and b, placing the result +// in the receiver. MulElem will panic if the two matrices do not have the same +// shape. +func (m *Dense) MulElem(a, b Matrix) { + ar, ac := a.Dims() + br, bc := b.Dims() + if ar != br || ac != bc { + panic(ErrShape) + } + + aU, _ := untranspose(a) + bU, _ := untranspose(b) + m.reuseAs(ar, ac) + + if arm, ok := a.(RawMatrixer); ok { + if brm, ok := b.(RawMatrixer); ok { + amat, bmat := arm.RawMatrix(), brm.RawMatrix() + if m != aU { + m.checkOverlap(amat) + } + if m != bU { + m.checkOverlap(bmat) + } + for ja, jb, jm := 0, 0, 0; ja < ar*amat.Stride; ja, jb, jm = ja+amat.Stride, jb+bmat.Stride, jm+m.mat.Stride { + for i, v := range amat.Data[ja : ja+ac] { + m.mat.Data[i+jm] = v * bmat.Data[i+jb] + } + } + return + } + } + + m.checkOverlapMatrix(aU) + m.checkOverlapMatrix(bU) + var restore func() + if m == aU { + m, restore = m.isolatedWorkspace(aU) + defer restore() + } else if m == bU { + m, restore = m.isolatedWorkspace(bU) + defer restore() + } + + for r := 0; r < ar; r++ { + for c := 0; c < ac; c++ { + m.set(r, c, a.At(r, c)*b.At(r, c)) + } + } +} + +// DivElem performs element-wise division of a by b, placing the result +// in the receiver. DivElem will panic if the two matrices do not have the same +// shape. +func (m *Dense) DivElem(a, b Matrix) { + ar, ac := a.Dims() + br, bc := b.Dims() + if ar != br || ac != bc { + panic(ErrShape) + } + + aU, _ := untranspose(a) + bU, _ := untranspose(b) + m.reuseAs(ar, ac) + + if arm, ok := a.(RawMatrixer); ok { + if brm, ok := b.(RawMatrixer); ok { + amat, bmat := arm.RawMatrix(), brm.RawMatrix() + if m != aU { + m.checkOverlap(amat) + } + if m != bU { + m.checkOverlap(bmat) + } + for ja, jb, jm := 0, 0, 0; ja < ar*amat.Stride; ja, jb, jm = ja+amat.Stride, jb+bmat.Stride, jm+m.mat.Stride { + for i, v := range amat.Data[ja : ja+ac] { + m.mat.Data[i+jm] = v / bmat.Data[i+jb] + } + } + return + } + } + + m.checkOverlapMatrix(aU) + m.checkOverlapMatrix(bU) + var restore func() + if m == aU { + m, restore = m.isolatedWorkspace(aU) + defer restore() + } else if m == bU { + m, restore = m.isolatedWorkspace(bU) + defer restore() + } + + for r := 0; r < ar; r++ { + for c := 0; c < ac; c++ { + m.set(r, c, a.At(r, c)/b.At(r, c)) + } + } +} + +// Inverse computes the inverse of the matrix a, storing the result into the +// receiver. If a is ill-conditioned, a Condition error will be returned. +// Note that matrix inversion is numerically unstable, and should generally +// be avoided where possible, for example by using the Solve routines. +func (m *Dense) Inverse(a Matrix) error { + // TODO(btracey): Special case for RawTriangular, etc. + r, c := a.Dims() + if r != c { + panic(ErrSquare) + } + m.reuseAs(a.Dims()) + aU, aTrans := untranspose(a) + switch rm := aU.(type) { + case RawMatrixer: + if m != aU || aTrans { + if m == aU || m.checkOverlap(rm.RawMatrix()) { + tmp := getWorkspace(r, c, false) + tmp.Copy(a) + m.Copy(tmp) + putWorkspace(tmp) + break + } + m.Copy(a) + } + default: + m.Copy(a) + } + ipiv := getInts(r, false) + defer putInts(ipiv) + ok := lapack64.Getrf(m.mat, ipiv) + if !ok { + return Condition(math.Inf(1)) + } + work := getFloats(4*r, false) // must be at least 4*r for cond. + lapack64.Getri(m.mat, ipiv, work, -1) + if int(work[0]) > 4*r { + l := int(work[0]) + putFloats(work) + work = getFloats(l, false) + } else { + work = work[:4*r] + } + defer putFloats(work) + lapack64.Getri(m.mat, ipiv, work, len(work)) + norm := lapack64.Lange(CondNorm, m.mat, work) + rcond := lapack64.Gecon(CondNorm, m.mat, norm, work, ipiv) // reuse ipiv + if rcond == 0 { + return Condition(math.Inf(1)) + } + cond := 1 / rcond + if cond > ConditionTolerance { + return Condition(cond) + } + return nil +} + +// Mul takes the matrix product of a and b, placing the result in the receiver. +// If the number of columns in a does not equal the number of rows in b, Mul will panic. +func (m *Dense) Mul(a, b Matrix) { + ar, ac := a.Dims() + br, bc := b.Dims() + + if ac != br { + panic(ErrShape) + } + + aU, aTrans := untranspose(a) + bU, bTrans := untranspose(b) + m.reuseAs(ar, bc) + var restore func() + if m == aU { + m, restore = m.isolatedWorkspace(aU) + defer restore() + } else if m == bU { + m, restore = m.isolatedWorkspace(bU) + defer restore() + } + aT := blas.NoTrans + if aTrans { + aT = blas.Trans + } + bT := blas.NoTrans + if bTrans { + bT = blas.Trans + } + + // Some of the cases do not have a transpose option, so create + // temporary memory. + // C = A^T * B = (B^T * A)^T + // C^T = B^T * A. + if aUrm, ok := aU.(RawMatrixer); ok { + amat := aUrm.RawMatrix() + if restore == nil { + m.checkOverlap(amat) + } + if bUrm, ok := bU.(RawMatrixer); ok { + bmat := bUrm.RawMatrix() + if restore == nil { + m.checkOverlap(bmat) + } + blas64.Gemm(aT, bT, 1, amat, bmat, 0, m.mat) + return + } + if bU, ok := bU.(RawSymmetricer); ok { + bmat := bU.RawSymmetric() + if aTrans { + c := getWorkspace(ac, ar, false) + blas64.Symm(blas.Left, 1, bmat, amat, 0, c.mat) + strictCopy(m, c.T()) + putWorkspace(c) + return + } + blas64.Symm(blas.Right, 1, bmat, amat, 0, m.mat) + return + } + if bU, ok := bU.(RawTriangular); ok { + // Trmm updates in place, so copy aU first. + bmat := bU.RawTriangular() + if aTrans { + c := getWorkspace(ac, ar, false) + var tmp Dense + tmp.SetRawMatrix(amat) + c.Copy(&tmp) + bT := blas.Trans + if bTrans { + bT = blas.NoTrans + } + blas64.Trmm(blas.Left, bT, 1, bmat, c.mat) + strictCopy(m, c.T()) + putWorkspace(c) + return + } + m.Copy(a) + blas64.Trmm(blas.Right, bT, 1, bmat, m.mat) + return + } + if bU, ok := bU.(*VecDense); ok { + m.checkOverlap(bU.asGeneral()) + bvec := bU.RawVector() + if bTrans { + // {ar,1} x {1,bc}, which is not a vector. + // Instead, construct B as a General. + bmat := blas64.General{ + Rows: bc, + Cols: 1, + Stride: bvec.Inc, + Data: bvec.Data, + } + blas64.Gemm(aT, bT, 1, amat, bmat, 0, m.mat) + return + } + cvec := blas64.Vector{ + Inc: m.mat.Stride, + Data: m.mat.Data, + } + blas64.Gemv(aT, 1, amat, bvec, 0, cvec) + return + } + } + if bUrm, ok := bU.(RawMatrixer); ok { + bmat := bUrm.RawMatrix() + if restore == nil { + m.checkOverlap(bmat) + } + if aU, ok := aU.(RawSymmetricer); ok { + amat := aU.RawSymmetric() + if bTrans { + c := getWorkspace(bc, br, false) + blas64.Symm(blas.Right, 1, amat, bmat, 0, c.mat) + strictCopy(m, c.T()) + putWorkspace(c) + return + } + blas64.Symm(blas.Left, 1, amat, bmat, 0, m.mat) + return + } + if aU, ok := aU.(RawTriangular); ok { + // Trmm updates in place, so copy bU first. + amat := aU.RawTriangular() + if bTrans { + c := getWorkspace(bc, br, false) + var tmp Dense + tmp.SetRawMatrix(bmat) + c.Copy(&tmp) + aT := blas.Trans + if aTrans { + aT = blas.NoTrans + } + blas64.Trmm(blas.Right, aT, 1, amat, c.mat) + strictCopy(m, c.T()) + putWorkspace(c) + return + } + m.Copy(b) + blas64.Trmm(blas.Left, aT, 1, amat, m.mat) + return + } + if aU, ok := aU.(*VecDense); ok { + m.checkOverlap(aU.asGeneral()) + avec := aU.RawVector() + if aTrans { + // {1,ac} x {ac, bc} + // Transpose B so that the vector is on the right. + cvec := blas64.Vector{ + Inc: 1, + Data: m.mat.Data, + } + bT := blas.Trans + if bTrans { + bT = blas.NoTrans + } + blas64.Gemv(bT, 1, bmat, avec, 0, cvec) + return + } + // {ar,1} x {1,bc} which is not a vector result. + // Instead, construct A as a General. + amat := blas64.General{ + Rows: ar, + Cols: 1, + Stride: avec.Inc, + Data: avec.Data, + } + blas64.Gemm(aT, bT, 1, amat, bmat, 0, m.mat) + return + } + } + + m.checkOverlapMatrix(aU) + m.checkOverlapMatrix(bU) + row := getFloats(ac, false) + defer putFloats(row) + for r := 0; r < ar; r++ { + for i := range row { + row[i] = a.At(r, i) + } + for c := 0; c < bc; c++ { + var v float64 + for i, e := range row { + v += e * b.At(i, c) + } + m.mat.Data[r*m.mat.Stride+c] = v + } + } +} + +// strictCopy copies a into m panicking if the shape of a and m differ. +func strictCopy(m *Dense, a Matrix) { + r, c := m.Copy(a) + if r != m.mat.Rows || c != m.mat.Cols { + // Panic with a string since this + // is not a user-facing panic. + panic(ErrShape.Error()) + } +} + +// Exp calculates the exponential of the matrix a, e^a, placing the result +// in the receiver. Exp will panic with matrix.ErrShape if a is not square. +func (m *Dense) Exp(a Matrix) { + // The implementation used here is from Functions of Matrices: Theory and Computation + // Chapter 10, Algorithm 10.20. https://doi.org/10.1137/1.9780898717778.ch10 + + r, c := a.Dims() + if r != c { + panic(ErrShape) + } + + m.reuseAs(r, r) + if r == 1 { + m.mat.Data[0] = math.Exp(a.At(0, 0)) + return + } + + pade := []struct { + theta float64 + b []float64 + }{ + {theta: 0.015, b: []float64{ + 120, 60, 12, 1, + }}, + {theta: 0.25, b: []float64{ + 30240, 15120, 3360, 420, 30, 1, + }}, + {theta: 0.95, b: []float64{ + 17297280, 8648640, 1995840, 277200, 25200, 1512, 56, 1, + }}, + {theta: 2.1, b: []float64{ + 17643225600, 8821612800, 2075673600, 302702400, 30270240, 2162160, 110880, 3960, 90, 1, + }}, + } + + a1 := m + a1.Copy(a) + v := getWorkspace(r, r, true) + vraw := v.RawMatrix() + n := r * r + vvec := blas64.Vector{N: n, Inc: 1, Data: vraw.Data} + defer putWorkspace(v) + + u := getWorkspace(r, r, true) + uraw := u.RawMatrix() + uvec := blas64.Vector{N: n, Inc: 1, Data: uraw.Data} + defer putWorkspace(u) + + a2 := getWorkspace(r, r, false) + defer putWorkspace(a2) + + n1 := Norm(a, 1) + for i, t := range pade { + if n1 > t.theta { + continue + } + + // This loop only executes once, so + // this is not as horrible as it looks. + p := getWorkspace(r, r, true) + praw := p.RawMatrix() + pvec := blas64.Vector{N: n, Inc: 1, Data: praw.Data} + defer putWorkspace(p) + + for k := 0; k < r; k++ { + p.set(k, k, 1) + v.set(k, k, t.b[0]) + u.set(k, k, t.b[1]) + } + + a2.Mul(a1, a1) + for j := 0; j <= i; j++ { + p.Mul(p, a2) + blas64.Axpy(t.b[2*j+2], pvec, vvec) + blas64.Axpy(t.b[2*j+3], pvec, uvec) + } + u.Mul(a1, u) + + // Use p as a workspace here and + // rename u for the second call's + // receiver. + vmu, vpu := u, p + vpu.Add(v, u) + vmu.Sub(v, u) + + m.Solve(vmu, vpu) + return + } + + // Remaining Padé table line. + const theta13 = 5.4 + b := [...]float64{ + 64764752532480000, 32382376266240000, 7771770303897600, 1187353796428800, + 129060195264000, 10559470521600, 670442572800, 33522128640, + 1323241920, 40840800, 960960, 16380, 182, 1, + } + + s := math.Log2(n1 / theta13) + if s >= 0 { + s = math.Ceil(s) + a1.Scale(1/math.Pow(2, s), a1) + } + a2.Mul(a1, a1) + + i := getWorkspace(r, r, true) + for j := 0; j < r; j++ { + i.set(j, j, 1) + } + iraw := i.RawMatrix() + ivec := blas64.Vector{N: n, Inc: 1, Data: iraw.Data} + defer putWorkspace(i) + + a2raw := a2.RawMatrix() + a2vec := blas64.Vector{N: n, Inc: 1, Data: a2raw.Data} + + a4 := getWorkspace(r, r, false) + a4raw := a4.RawMatrix() + a4vec := blas64.Vector{N: n, Inc: 1, Data: a4raw.Data} + defer putWorkspace(a4) + a4.Mul(a2, a2) + + a6 := getWorkspace(r, r, false) + a6raw := a6.RawMatrix() + a6vec := blas64.Vector{N: n, Inc: 1, Data: a6raw.Data} + defer putWorkspace(a6) + a6.Mul(a2, a4) + + // V = A_6(b_12*A_6 + b_10*A_4 + b_8*A_2) + b_6*A_6 + b_4*A_4 + b_2*A_2 +b_0*I + blas64.Axpy(b[12], a6vec, vvec) + blas64.Axpy(b[10], a4vec, vvec) + blas64.Axpy(b[8], a2vec, vvec) + v.Mul(v, a6) + blas64.Axpy(b[6], a6vec, vvec) + blas64.Axpy(b[4], a4vec, vvec) + blas64.Axpy(b[2], a2vec, vvec) + blas64.Axpy(b[0], ivec, vvec) + + // U = A(A_6(b_13*A_6 + b_11*A_4 + b_9*A_2) + b_7*A_6 + b_5*A_4 + b_2*A_3 +b_1*I) + blas64.Axpy(b[13], a6vec, uvec) + blas64.Axpy(b[11], a4vec, uvec) + blas64.Axpy(b[9], a2vec, uvec) + u.Mul(u, a6) + blas64.Axpy(b[7], a6vec, uvec) + blas64.Axpy(b[5], a4vec, uvec) + blas64.Axpy(b[3], a2vec, uvec) + blas64.Axpy(b[1], ivec, uvec) + u.Mul(u, a1) + + // Use i as a workspace here and + // rename u for the second call's + // receiver. + vmu, vpu := u, i + vpu.Add(v, u) + vmu.Sub(v, u) + + m.Solve(vmu, vpu) + + for ; s > 0; s-- { + m.Mul(m, m) + } +} + +// Pow calculates the integral power of the matrix a to n, placing the result +// in the receiver. Pow will panic if n is negative or if a is not square. +func (m *Dense) Pow(a Matrix, n int) { + if n < 0 { + panic("matrix: illegal power") + } + r, c := a.Dims() + if r != c { + panic(ErrShape) + } + + m.reuseAs(r, c) + + // Take possible fast paths. + switch n { + case 0: + for i := 0; i < r; i++ { + zero(m.mat.Data[i*m.mat.Stride : i*m.mat.Stride+c]) + m.mat.Data[i*m.mat.Stride+i] = 1 + } + return + case 1: + m.Copy(a) + return + case 2: + m.Mul(a, a) + return + } + + // Perform iterative exponentiation by squaring in work space. + w := getWorkspace(r, r, false) + w.Copy(a) + s := getWorkspace(r, r, false) + s.Copy(a) + x := getWorkspace(r, r, false) + for n--; n > 0; n >>= 1 { + if n&1 != 0 { + x.Mul(w, s) + w, x = x, w + } + if n != 1 { + x.Mul(s, s) + s, x = x, s + } + } + m.Copy(w) + putWorkspace(w) + putWorkspace(s) + putWorkspace(x) +} + +// Scale multiplies the elements of a by f, placing the result in the receiver. +// +// See the Scaler interface for more information. +func (m *Dense) Scale(f float64, a Matrix) { + ar, ac := a.Dims() + + m.reuseAs(ar, ac) + + aU, aTrans := untranspose(a) + if rm, ok := aU.(RawMatrixer); ok { + amat := rm.RawMatrix() + if m == aU || m.checkOverlap(amat) { + var restore func() + m, restore = m.isolatedWorkspace(a) + defer restore() + } + if !aTrans { + for ja, jm := 0, 0; ja < ar*amat.Stride; ja, jm = ja+amat.Stride, jm+m.mat.Stride { + for i, v := range amat.Data[ja : ja+ac] { + m.mat.Data[i+jm] = v * f + } + } + } else { + for ja, jm := 0, 0; ja < ac*amat.Stride; ja, jm = ja+amat.Stride, jm+1 { + for i, v := range amat.Data[ja : ja+ar] { + m.mat.Data[i*m.mat.Stride+jm] = v * f + } + } + } + return + } + + m.checkOverlapMatrix(a) + for r := 0; r < ar; r++ { + for c := 0; c < ac; c++ { + m.set(r, c, f*a.At(r, c)) + } + } +} + +// Apply applies the function fn to each of the elements of a, placing the +// resulting matrix in the receiver. The function fn takes a row/column +// index and element value and returns some function of that tuple. +func (m *Dense) Apply(fn func(i, j int, v float64) float64, a Matrix) { + ar, ac := a.Dims() + + m.reuseAs(ar, ac) + + aU, aTrans := untranspose(a) + if rm, ok := aU.(RawMatrixer); ok { + amat := rm.RawMatrix() + if m == aU || m.checkOverlap(amat) { + var restore func() + m, restore = m.isolatedWorkspace(a) + defer restore() + } + if !aTrans { + for j, ja, jm := 0, 0, 0; ja < ar*amat.Stride; j, ja, jm = j+1, ja+amat.Stride, jm+m.mat.Stride { + for i, v := range amat.Data[ja : ja+ac] { + m.mat.Data[i+jm] = fn(j, i, v) + } + } + } else { + for j, ja, jm := 0, 0, 0; ja < ac*amat.Stride; j, ja, jm = j+1, ja+amat.Stride, jm+1 { + for i, v := range amat.Data[ja : ja+ar] { + m.mat.Data[i*m.mat.Stride+jm] = fn(i, j, v) + } + } + } + return + } + + m.checkOverlapMatrix(a) + for r := 0; r < ar; r++ { + for c := 0; c < ac; c++ { + m.set(r, c, fn(r, c, a.At(r, c))) + } + } +} + +// RankOne performs a rank-one update to the matrix a and stores the result +// in the receiver. If a is zero, see Outer. +// m = a + alpha * x * y' +func (m *Dense) RankOne(a Matrix, alpha float64, x, y Vector) { + ar, ac := a.Dims() + xr, xc := x.Dims() + if xr != ar || xc != 1 { + panic(ErrShape) + } + yr, yc := y.Dims() + if yr != ac || yc != 1 { + panic(ErrShape) + } + + if a != m { + aU, _ := untranspose(a) + if rm, ok := aU.(RawMatrixer); ok { + m.checkOverlap(rm.RawMatrix()) + } + } + + var xmat, ymat blas64.Vector + fast := true + xU, _ := untranspose(x) + if rv, ok := xU.(RawVectorer); ok { + xmat = rv.RawVector() + m.checkOverlap((&VecDense{mat: xmat}).asGeneral()) + } else { + fast = false + } + yU, _ := untranspose(y) + if rv, ok := yU.(RawVectorer); ok { + ymat = rv.RawVector() + m.checkOverlap((&VecDense{mat: ymat}).asGeneral()) + } else { + fast = false + } + + if fast { + if m != a { + m.reuseAs(ar, ac) + m.Copy(a) + } + blas64.Ger(alpha, xmat, ymat, m.mat) + return + } + + m.reuseAs(ar, ac) + for i := 0; i < ar; i++ { + for j := 0; j < ac; j++ { + m.set(i, j, a.At(i, j)+alpha*x.AtVec(i)*y.AtVec(j)) + } + } +} + +// Outer calculates the outer product of the column vectors x and y, +// and stores the result in the receiver. +// m = alpha * x * y' +// In order to update an existing matrix, see RankOne. +func (m *Dense) Outer(alpha float64, x, y Vector) { + xr, xc := x.Dims() + if xc != 1 { + panic(ErrShape) + } + yr, yc := y.Dims() + if yc != 1 { + panic(ErrShape) + } + + r := xr + c := yr + + // Copied from reuseAs with use replaced by useZeroed + // and a final zero of the matrix elements if we pass + // the shape checks. + // TODO(kortschak): Factor out into reuseZeroedAs if + // we find another case that needs it. + if m.mat.Rows > m.capRows || m.mat.Cols > m.capCols { + // Panic as a string, not a mat.Error. + panic("mat: caps not correctly set") + } + if m.IsZero() { + m.mat = blas64.General{ + Rows: r, + Cols: c, + Stride: c, + Data: useZeroed(m.mat.Data, r*c), + } + m.capRows = r + m.capCols = c + } else if r != m.mat.Rows || c != m.mat.Cols { + panic(ErrShape) + } + + var xmat, ymat blas64.Vector + fast := true + xU, _ := untranspose(x) + if rv, ok := xU.(RawVectorer); ok { + xmat = rv.RawVector() + m.checkOverlap((&VecDense{mat: xmat}).asGeneral()) + + } else { + fast = false + } + yU, _ := untranspose(y) + if rv, ok := yU.(RawVectorer); ok { + ymat = rv.RawVector() + m.checkOverlap((&VecDense{mat: ymat}).asGeneral()) + } else { + fast = false + } + + if fast { + for i := 0; i < r; i++ { + zero(m.mat.Data[i*m.mat.Stride : i*m.mat.Stride+c]) + } + blas64.Ger(alpha, xmat, ymat, m.mat) + return + } + + for i := 0; i < r; i++ { + for j := 0; j < c; j++ { + m.set(i, j, alpha*x.AtVec(i)*y.AtVec(j)) + } + } +} diff --git a/vendor/gonum.org/v1/gonum/mat/diagonal.go b/vendor/gonum.org/v1/gonum/mat/diagonal.go new file mode 100644 index 0000000..beb2e43 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/mat/diagonal.go @@ -0,0 +1,322 @@ +// Copyright ©2018 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package mat + +import ( + "gonum.org/v1/gonum/blas" + "gonum.org/v1/gonum/blas/blas64" +) + +var ( + diagDense *DiagDense + _ Matrix = diagDense + _ Diagonal = diagDense + _ MutableDiagonal = diagDense + _ Triangular = diagDense + _ TriBanded = diagDense + _ Symmetric = diagDense + _ SymBanded = diagDense + _ Banded = diagDense + _ RawBander = diagDense + _ RawSymBander = diagDense + + diag Diagonal + _ Matrix = diag + _ Diagonal = diag + _ Triangular = diag + _ TriBanded = diag + _ Symmetric = diag + _ SymBanded = diag + _ Banded = diag +) + +// Diagonal represents a diagonal matrix, that is a square matrix that only +// has non-zero terms on the diagonal. +type Diagonal interface { + Matrix + // Diag returns the number of rows/columns in the matrix. + Diag() int + + // Bandwidth and TBand are included in the Diagonal interface + // to allow the use of Diagonal types in banded functions. + // Bandwidth will always return (0, 0). + Bandwidth() (kl, ku int) + TBand() Banded + + // Triangle and TTri are included in the Diagonal interface + // to allow the use of Diagonal types in triangular functions. + Triangle() (int, TriKind) + TTri() Triangular + + // Symmetric and SymBand are included in the Diagonal interface + // to allow the use of Diagonal types in symmetric and banded symmetric + // functions respectively. + Symmetric() int + SymBand() (n, k int) + + // TriBand and TTriBand are included in the Diagonal interface + // to allow the use of Diagonal types in triangular banded functions. + TriBand() (n, k int, kind TriKind) + TTriBand() TriBanded +} + +// MutableDiagonal is a Diagonal matrix whose elements can be set. +type MutableDiagonal interface { + Diagonal + SetDiag(i int, v float64) +} + +// DiagDense represents a diagonal matrix in dense storage format. +type DiagDense struct { + mat blas64.Vector +} + +// NewDiagDense creates a new Diagonal matrix with n rows and n columns. +// The length of data must be n or data must be nil, otherwise NewDiagDense +// will panic. NewDiagDense will panic if n is zero. +func NewDiagDense(n int, data []float64) *DiagDense { + if n <= 0 { + if n == 0 { + panic(ErrZeroLength) + } + panic("mat: negative dimension") + } + if data == nil { + data = make([]float64, n) + } + if len(data) != n { + panic(ErrShape) + } + return &DiagDense{ + mat: blas64.Vector{N: n, Data: data, Inc: 1}, + } +} + +// Diag returns the dimension of the receiver. +func (d *DiagDense) Diag() int { + return d.mat.N +} + +// Dims returns the dimensions of the matrix. +func (d *DiagDense) Dims() (r, c int) { + return d.mat.N, d.mat.N +} + +// T returns the transpose of the matrix. +func (d *DiagDense) T() Matrix { + return d +} + +// TTri returns the transpose of the matrix. Note that Diagonal matrices are +// Upper by default. +func (d *DiagDense) TTri() Triangular { + return TransposeTri{d} +} + +// TBand performs an implicit transpose by returning the receiver inside a +// TransposeBand. +func (d *DiagDense) TBand() Banded { + return TransposeBand{d} +} + +// TTriBand performs an implicit transpose by returning the receiver inside a +// TransposeTriBand. Note that Diagonal matrices are Upper by default. +func (d *DiagDense) TTriBand() TriBanded { + return TransposeTriBand{d} +} + +// Bandwidth returns the upper and lower bandwidths of the matrix. +// These values are always zero for diagonal matrices. +func (d *DiagDense) Bandwidth() (kl, ku int) { + return 0, 0 +} + +// Symmetric implements the Symmetric interface. +func (d *DiagDense) Symmetric() int { + return d.mat.N +} + +// SymBand returns the number of rows/columns in the matrix, and the size of +// the bandwidth. +func (d *DiagDense) SymBand() (n, k int) { + return d.mat.N, 0 +} + +// Triangle implements the Triangular interface. +func (d *DiagDense) Triangle() (int, TriKind) { + return d.mat.N, Upper +} + +// TriBand returns the number of rows/columns in the matrix, the +// size of the bandwidth, and the orientation. Note that Diagonal matrices are +// Upper by default. +func (d *DiagDense) TriBand() (n, k int, kind TriKind) { + return d.mat.N, 0, Upper +} + +// Reset zeros the length of the matrix so that it can be reused as the +// receiver of a dimensionally restricted operation. +// +// See the Reseter interface for more information. +func (d *DiagDense) Reset() { + // No change of Inc or n to 0 may be + // made unless both are set to 0. + d.mat.Inc = 0 + d.mat.N = 0 + d.mat.Data = d.mat.Data[:0] +} + +// Zero sets all of the matrix elements to zero. +func (d *DiagDense) Zero() { + for i := 0; i < d.mat.N; i++ { + d.mat.Data[d.mat.Inc*i] = 0 + } +} + +// DiagView returns the diagonal as a matrix backed by the original data. +func (d *DiagDense) DiagView() Diagonal { + return d +} + +// DiagFrom copies the diagonal of m into the receiver. The receiver must +// be min(r, c) long or zero. Otherwise DiagFrom will panic. +func (d *DiagDense) DiagFrom(m Matrix) { + n := min(m.Dims()) + d.reuseAs(n) + + var vec blas64.Vector + switch r := m.(type) { + case *DiagDense: + vec = r.mat + case RawBander: + mat := r.RawBand() + vec = blas64.Vector{ + N: n, + Inc: mat.Stride, + Data: mat.Data[mat.KL : (n-1)*mat.Stride+mat.KL+1], + } + case RawMatrixer: + mat := r.RawMatrix() + vec = blas64.Vector{ + N: n, + Inc: mat.Stride + 1, + Data: mat.Data[:(n-1)*mat.Stride+n], + } + case RawSymBander: + mat := r.RawSymBand() + vec = blas64.Vector{ + N: n, + Inc: mat.Stride, + Data: mat.Data[:(n-1)*mat.Stride+1], + } + case RawSymmetricer: + mat := r.RawSymmetric() + vec = blas64.Vector{ + N: n, + Inc: mat.Stride + 1, + Data: mat.Data[:(n-1)*mat.Stride+n], + } + case RawTriBander: + mat := r.RawTriBand() + data := mat.Data + if mat.Uplo == blas.Lower { + data = data[mat.K:] + } + vec = blas64.Vector{ + N: n, + Inc: mat.Stride, + Data: data[:(n-1)*mat.Stride+1], + } + case RawTriangular: + mat := r.RawTriangular() + if mat.Diag == blas.Unit { + for i := 0; i < n; i += d.mat.Inc { + d.mat.Data[i] = 1 + } + return + } + vec = blas64.Vector{ + N: n, + Inc: mat.Stride + 1, + Data: mat.Data[:(n-1)*mat.Stride+n], + } + case RawVectorer: + d.mat.Data[0] = r.RawVector().Data[0] + return + default: + for i := 0; i < n; i++ { + d.setDiag(i, m.At(i, i)) + } + return + } + blas64.Copy(vec, d.mat) +} + +// RawBand returns the underlying data used by the receiver represented +// as a blas64.Band. +// Changes to elements in the receiver following the call will be reflected +// in returned blas64.Band. +func (d *DiagDense) RawBand() blas64.Band { + return blas64.Band{ + Rows: d.mat.N, + Cols: d.mat.N, + KL: 0, + KU: 0, + Stride: d.mat.Inc, + Data: d.mat.Data, + } +} + +// RawSymBand returns the underlying data used by the receiver represented +// as a blas64.SymmetricBand. +// Changes to elements in the receiver following the call will be reflected +// in returned blas64.Band. +func (d *DiagDense) RawSymBand() blas64.SymmetricBand { + return blas64.SymmetricBand{ + N: d.mat.N, + K: 0, + Stride: d.mat.Inc, + Uplo: blas.Upper, + Data: d.mat.Data, + } +} + +// reuseAs resizes an empty diagonal to a r×r diagonal, +// or checks that a non-empty matrix is r×r. +func (d *DiagDense) reuseAs(r int) { + if r == 0 { + panic(ErrZeroLength) + } + if d.IsZero() { + d.mat = blas64.Vector{ + Inc: 1, + Data: use(d.mat.Data, r), + } + d.mat.N = r + return + } + if r != d.mat.N { + panic(ErrShape) + } +} + +// IsZero returns whether the receiver is zero-sized. Zero-sized vectors can be the +// receiver for size-restricted operations. DiagDenses can be zeroed using Reset. +func (d *DiagDense) IsZero() bool { + // It must be the case that d.Dims() returns + // zeros in this case. See comment in Reset(). + return d.mat.Inc == 0 +} + +// Trace returns the trace. +func (d *DiagDense) Trace() float64 { + rb := d.RawBand() + var tr float64 + for i := 0; i < rb.Rows; i++ { + tr += rb.Data[rb.KL+i*rb.Stride] + } + return tr + +} diff --git a/vendor/gonum.org/v1/gonum/mat/doc.go b/vendor/gonum.org/v1/gonum/mat/doc.go new file mode 100644 index 0000000..2cc9100 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/mat/doc.go @@ -0,0 +1,169 @@ +// Copyright ©2015 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Package mat provides implementations of float64 and complex128 matrix +// structures and linear algebra operations on them. +// +// Overview +// +// This section provides a quick overview of the mat package. The following +// sections provide more in depth commentary. +// +// mat provides: +// - Interfaces for Matrix classes (Matrix, Symmetric, Triangular) +// - Concrete implementations (Dense, SymDense, TriDense) +// - Methods and functions for using matrix data (Add, Trace, SymRankOne) +// - Types for constructing and using matrix factorizations (QR, LU) +// - The complementary types for complex matrices, CMatrix, CSymDense, etc. +// +// A matrix may be constructed through the corresponding New function. If no +// backing array is provided the matrix will be initialized to all zeros. +// // Allocate a zeroed real matrix of size 3×5 +// zero := mat.NewDense(3, 5, nil) +// If a backing data slice is provided, the matrix will have those elements. +// Matrices are all stored in row-major format. +// // Generate a 6×6 matrix of random values. +// data := make([]float64, 36) +// for i := range data { +// data[i] = rand.NormFloat64() +// } +// a := mat.NewDense(6, 6, data) +// Operations involving matrix data are implemented as functions when the values +// of the matrix remain unchanged +// tr := mat.Trace(a) +// and are implemented as methods when the operation modifies the receiver. +// zero.Copy(a) +// +// Receivers must be the correct size for the matrix operations, otherwise the +// operation will panic. As a special case for convenience, a zero-value matrix +// will be modified to have the correct size, allocating data if necessary. +// var c mat.Dense // construct a new zero-sized matrix +// c.Mul(a, a) // c is automatically adjusted to be 6×6 +// +// Zero-value of a matrix +// +// A zero-value matrix is either the Go language definition of a zero-value or +// is a zero-sized matrix with zero-length stride. Matrix implementations may have +// a Reset method to revert the receiver into a zero-valued matrix and an IsZero +// method that returns whether the matrix is zero-valued. +// So the following will all result in a zero-value matrix. +// - var a mat.Dense +// - a := NewDense(0, 0, make([]float64, 0, 100)) +// - a.Reset() +// A zero-value matrix can not be sliced even if it does have an adequately sized +// backing data slice, but can be expanded using its Grow method if it exists. +// +// The Matrix Interfaces +// +// The Matrix interface is the common link between the concrete types of real +// matrices, The Matrix interface is defined by three functions: Dims, which +// returns the dimensions of the Matrix, At, which returns the element in the +// specified location, and T for returning a Transpose (discussed later). All of +// the concrete types can perform these behaviors and so implement the interface. +// Methods and functions are designed to use this interface, so in particular the method +// func (m *Dense) Mul(a, b Matrix) +// constructs a *Dense from the result of a multiplication with any Matrix types, +// not just *Dense. Where more restrictive requirements must be met, there are also the +// Symmetric and Triangular interfaces. For example, in +// func (s *SymDense) AddSym(a, b Symmetric) +// the Symmetric interface guarantees a symmetric result. +// +// The CMatrix interface plays the same role for complex matrices. The difference +// is that the CMatrix type has the H method instead T, for returning the conjugate +// transpose. +// +// (Conjugate) Transposes +// +// The T method is used for transposition on real matrices, and H is used for +// conjugate transposition on complex matrices. For example, c.Mul(a.T(), b) computes +// c = a^T * b. The mat types implement this method implicitly — +// see the Transpose and Conjugate types for more details. Note that some +// operations have a transpose as part of their definition, as in *SymDense.SymOuterK. +// +// Matrix Factorization +// +// Matrix factorizations, such as the LU decomposition, typically have their own +// specific data storage, and so are each implemented as a specific type. The +// factorization can be computed through a call to Factorize +// var lu mat.LU +// lu.Factorize(a) +// The elements of the factorization can be extracted through methods on the +// factorized type, i.e. *LU.UTo. The factorization types can also be used directly, +// as in *Dense.SolveCholesky. Some factorizations can be updated directly, +// without needing to update the original matrix and refactorize, +// as in *LU.RankOne. +// +// BLAS and LAPACK +// +// BLAS and LAPACK are the standard APIs for linear algebra routines. Many +// operations in mat are implemented using calls to the wrapper functions +// in gonum/blas/blas64 and gonum/lapack/lapack64 and their complex equivalents. +// By default, blas64 and lapack64 call the native Go implementations of the +// routines. Alternatively, it is possible to use C-based implementations of the +// APIs through the respective cgo packages and "Use" functions. The Go +// implementation of LAPACK (used by default) makes calls +// through blas64, so if a cgo BLAS implementation is registered, the lapack64 +// calls will be partially executed in Go and partially executed in C. +// +// Type Switching +// +// The Matrix abstraction enables efficiency as well as interoperability. Go's +// type reflection capabilities are used to choose the most efficient routine +// given the specific concrete types. For example, in +// c.Mul(a, b) +// if a and b both implement RawMatrixer, that is, they can be represented as a +// blas64.General, blas64.Gemm (general matrix multiplication) is called, while +// instead if b is a RawSymmetricer blas64.Symm is used (general-symmetric +// multiplication), and if b is a *VecDense blas64.Gemv is used. +// +// There are many possible type combinations and special cases. No specific guarantees +// are made about the performance of any method, and in particular, note that an +// abstract matrix type may be copied into a concrete type of the corresponding +// value. If there are specific special cases that are needed, please submit a +// pull-request or file an issue. +// +// Invariants +// +// Matrix input arguments to functions are never directly modified. If an operation +// changes Matrix data, the mutated matrix will be the receiver of a function. +// +// For convenience, a matrix may be used as both a receiver and as an input, e.g. +// a.Pow(a, 6) +// v.SolveVec(a.T(), v) +// though in many cases this will cause an allocation (see Element Aliasing). +// An exception to this rule is Copy, which does not allow a.Copy(a.T()). +// +// Element Aliasing +// +// Most methods in mat modify receiver data. It is forbidden for the modified +// data region of the receiver to overlap the used data area of the input +// arguments. The exception to this rule is when the method receiver is equal to one +// of the input arguments, as in the a.Pow(a, 6) call above, or its implicit transpose. +// +// This prohibition is to help avoid subtle mistakes when the method needs to read +// from and write to the same data region. There are ways to make mistakes using the +// mat API, and mat functions will detect and complain about those. +// There are many ways to make mistakes by excursion from the mat API via +// interaction with raw matrix values. +// +// If you need to read the rest of this section to understand the behavior of +// your program, you are being clever. Don't be clever. If you must be clever, +// blas64 and lapack64 may be used to call the behavior directly. +// +// mat will use the following rules to detect overlap between the receiver and one +// of the inputs: +// - the input implements one of the Raw methods, and +// - the address ranges of the backing data slices overlap, and +// - the strides differ or there is an overlap in the used data elements. +// If such an overlap is detected, the method will panic. +// +// The following cases will not panic: +// - the data slices do not overlap, +// - there is pointer identity between the receiver and input values after +// the value has been untransposed if necessary. +// +// mat will not attempt to detect element overlap if the input does not implement a +// Raw method. Method behavior is undefined if there is undetected overlap. +// +package mat // import "gonum.org/v1/gonum/mat" diff --git a/vendor/gonum.org/v1/gonum/mat/eigen.go b/vendor/gonum.org/v1/gonum/mat/eigen.go new file mode 100644 index 0000000..ee971e4 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/mat/eigen.go @@ -0,0 +1,350 @@ +// Copyright ©2013 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package mat + +import ( + "gonum.org/v1/gonum/lapack" + "gonum.org/v1/gonum/lapack/lapack64" +) + +const ( + badFact = "mat: use without successful factorization" + badNoVect = "mat: eigenvectors not computed" +) + +// EigenSym is a type for creating and manipulating the Eigen decomposition of +// symmetric matrices. +type EigenSym struct { + vectorsComputed bool + + values []float64 + vectors *Dense +} + +// Factorize computes the eigenvalue decomposition of the symmetric matrix a. +// The Eigen decomposition is defined as +// A = P * D * P^-1 +// where D is a diagonal matrix containing the eigenvalues of the matrix, and +// P is a matrix of the eigenvectors of A. Factorize computes the eigenvalues +// in ascending order. If the vectors input argument is false, the eigenvectors +// are not computed. +// +// Factorize returns whether the decomposition succeeded. If the decomposition +// failed, methods that require a successful factorization will panic. +func (e *EigenSym) Factorize(a Symmetric, vectors bool) (ok bool) { + // kill previous decomposition + e.vectorsComputed = false + e.values = e.values[:] + + n := a.Symmetric() + sd := NewSymDense(n, nil) + sd.CopySym(a) + + jobz := lapack.EVNone + if vectors { + jobz = lapack.EVCompute + } + w := make([]float64, n) + work := []float64{0} + lapack64.Syev(jobz, sd.mat, w, work, -1) + + work = getFloats(int(work[0]), false) + ok = lapack64.Syev(jobz, sd.mat, w, work, len(work)) + putFloats(work) + if !ok { + e.vectorsComputed = false + e.values = nil + e.vectors = nil + return false + } + e.vectorsComputed = vectors + e.values = w + e.vectors = NewDense(n, n, sd.mat.Data) + return true +} + +// succFact returns whether the receiver contains a successful factorization. +func (e *EigenSym) succFact() bool { + return len(e.values) != 0 +} + +// Values extracts the eigenvalues of the factorized matrix. If dst is +// non-nil, the values are stored in-place into dst. In this case +// dst must have length n, otherwise Values will panic. If dst is +// nil, then a new slice will be allocated of the proper length and filled +// with the eigenvalues. +// +// Values panics if the Eigen decomposition was not successful. +func (e *EigenSym) Values(dst []float64) []float64 { + if !e.succFact() { + panic(badFact) + } + if dst == nil { + dst = make([]float64, len(e.values)) + } + if len(dst) != len(e.values) { + panic(ErrSliceLengthMismatch) + } + copy(dst, e.values) + return dst +} + +// VectorsTo returns the eigenvectors of the decomposition. VectorsTo +// will panic if the eigenvectors were not computed during the factorization, +// or if the factorization was not successful. +// +// If dst is not nil, the eigenvectors are stored in-place into dst, and dst +// must have size n×n and panics otherwise. If dst is nil, a new matrix +// is allocated and returned. +func (e *EigenSym) VectorsTo(dst *Dense) *Dense { + if !e.succFact() { + panic(badFact) + } + if !e.vectorsComputed { + panic(badNoVect) + } + r, c := e.vectors.Dims() + if dst == nil { + dst = NewDense(r, c, nil) + } else { + dst.reuseAs(r, c) + } + dst.Copy(e.vectors) + return dst +} + +// EigenKind specifies the computation of eigenvectors during factorization. +type EigenKind int + +const ( + // EigenNone specifies to not compute any eigenvectors. + EigenNone EigenKind = 0 + // EigenLeft specifies to compute the left eigenvectors. + EigenLeft EigenKind = 1 << iota + // EigenRight specifies to compute the right eigenvectors. + EigenRight + // EigenBoth is a convenience value for computing both eigenvectors. + EigenBoth EigenKind = EigenLeft | EigenRight +) + +// Eigen is a type for creating and using the eigenvalue decomposition of a dense matrix. +type Eigen struct { + n int // The size of the factorized matrix. + + kind EigenKind + + values []complex128 + rVectors *CDense + lVectors *CDense +} + +// succFact returns whether the receiver contains a successful factorization. +func (e *Eigen) succFact() bool { + return e.n != 0 +} + +// Factorize computes the eigenvalues of the square matrix a, and optionally +// the eigenvectors. +// +// A right eigenvalue/eigenvector combination is defined by +// A * x_r = λ * x_r +// where x_r is the column vector called an eigenvector, and λ is the corresponding +// eigenvalue. +// +// Similarly, a left eigenvalue/eigenvector combination is defined by +// x_l * A = λ * x_l +// The eigenvalues, but not the eigenvectors, are the same for both decompositions. +// +// Typically eigenvectors refer to right eigenvectors. +// +// In all cases, Factorize computes the eigenvalues of the matrix. kind +// specifies which of the eigenvectors, if any, to compute. See the EigenKind +// documentation for more information. +// Eigen panics if the input matrix is not square. +// +// Factorize returns whether the decomposition succeeded. If the decomposition +// failed, methods that require a successful factorization will panic. +func (e *Eigen) Factorize(a Matrix, kind EigenKind) (ok bool) { + // kill previous factorization. + e.n = 0 + e.kind = 0 + // Copy a because it is modified during the Lapack call. + r, c := a.Dims() + if r != c { + panic(ErrShape) + } + var sd Dense + sd.Clone(a) + + left := kind&EigenLeft != 0 + right := kind&EigenRight != 0 + + var vl, vr Dense + jobvl := lapack.LeftEVNone + jobvr := lapack.RightEVNone + if left { + vl = *NewDense(r, r, nil) + jobvl = lapack.LeftEVCompute + } + if right { + vr = *NewDense(c, c, nil) + jobvr = lapack.RightEVCompute + } + + wr := getFloats(c, false) + defer putFloats(wr) + wi := getFloats(c, false) + defer putFloats(wi) + + work := []float64{0} + lapack64.Geev(jobvl, jobvr, sd.mat, wr, wi, vl.mat, vr.mat, work, -1) + work = getFloats(int(work[0]), false) + first := lapack64.Geev(jobvl, jobvr, sd.mat, wr, wi, vl.mat, vr.mat, work, len(work)) + putFloats(work) + + if first != 0 { + e.values = nil + return false + } + e.n = r + e.kind = kind + + // Construct complex eigenvalues from float64 data. + values := make([]complex128, r) + for i, v := range wr { + values[i] = complex(v, wi[i]) + } + e.values = values + + // Construct complex eigenvectors from float64 data. + var cvl, cvr CDense + if left { + cvl = *NewCDense(r, r, nil) + e.complexEigenTo(&cvl, &vl) + e.lVectors = &cvl + } else { + e.lVectors = nil + } + if right { + cvr = *NewCDense(c, c, nil) + e.complexEigenTo(&cvr, &vr) + e.rVectors = &cvr + } else { + e.rVectors = nil + } + return true +} + +// Kind returns the EigenKind of the decomposition. If no decomposition has been +// computed, Kind returns -1. +func (e *Eigen) Kind() EigenKind { + if !e.succFact() { + return -1 + } + return e.kind +} + +// Values extracts the eigenvalues of the factorized matrix. If dst is +// non-nil, the values are stored in-place into dst. In this case +// dst must have length n, otherwise Values will panic. If dst is +// nil, then a new slice will be allocated of the proper length and +// filed with the eigenvalues. +// +// Values panics if the Eigen decomposition was not successful. +func (e *Eigen) Values(dst []complex128) []complex128 { + if !e.succFact() { + panic(badFact) + } + if dst == nil { + dst = make([]complex128, e.n) + } + if len(dst) != e.n { + panic(ErrSliceLengthMismatch) + } + copy(dst, e.values) + return dst +} + +// complexEigenTo extracts the complex eigenvectors from the real matrix d +// and stores them into the complex matrix dst. +// +// The columns of the returned n×n dense matrix contain the eigenvectors of the +// decomposition in the same order as the eigenvalues. +// If the j-th eigenvalue is real, then +// dst[:,j] = d[:,j], +// and if it is not real, then the elements of the j-th and (j+1)-th columns of d +// form complex conjugate pairs and the eigenvectors are recovered as +// dst[:,j] = d[:,j] + i*d[:,j+1], +// dst[:,j+1] = d[:,j] - i*d[:,j+1], +// where i is the imaginary unit. +func (e *Eigen) complexEigenTo(dst *CDense, d *Dense) { + r, c := d.Dims() + cr, cc := dst.Dims() + if r != cr { + panic("size mismatch") + } + if c != cc { + panic("size mismatch") + } + for j := 0; j < c; j++ { + if imag(e.values[j]) == 0 { + for i := 0; i < r; i++ { + dst.set(i, j, complex(d.at(i, j), 0)) + } + continue + } + for i := 0; i < r; i++ { + real := d.at(i, j) + imag := d.at(i, j+1) + dst.set(i, j, complex(real, imag)) + dst.set(i, j+1, complex(real, -imag)) + } + j++ + } +} + +// VectorsTo returns the right eigenvectors of the decomposition. VectorsTo +// will panic if the right eigenvectors were not computed during the factorization, +// or if the factorization was not successful. +// +// The computed eigenvectors are normalized to have Euclidean norm equal to 1 +// and largest component real. +func (e *Eigen) VectorsTo(dst *CDense) *CDense { + if !e.succFact() { + panic(badFact) + } + if e.kind&EigenRight == 0 { + panic(badNoVect) + } + if dst == nil { + dst = NewCDense(e.n, e.n, nil) + } else { + dst.reuseAs(e.n, e.n) + } + dst.Copy(e.rVectors) + return dst +} + +// LeftVectorsTo returns the left eigenvectors of the decomposition. LeftVectorsTo +// will panic if the left eigenvectors were not computed during the factorization, +// or if the factorization was not successful. +// +// The computed eigenvectors are normalized to have Euclidean norm equal to 1 +// and largest component real. +func (e *Eigen) LeftVectorsTo(dst *CDense) *CDense { + if !e.succFact() { + panic(badFact) + } + if e.kind&EigenLeft == 0 { + panic(badNoVect) + } + if dst == nil { + dst = NewCDense(e.n, e.n, nil) + } else { + dst.reuseAs(e.n, e.n) + } + dst.Copy(e.lVectors) + return dst +} diff --git a/vendor/gonum.org/v1/gonum/mat/errors.go b/vendor/gonum.org/v1/gonum/mat/errors.go new file mode 100644 index 0000000..0430d12 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/mat/errors.go @@ -0,0 +1,149 @@ +// Copyright ©2013 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package mat + +import ( + "fmt" + "runtime" + + "gonum.org/v1/gonum/lapack" +) + +// Condition is the condition number of a matrix. The condition +// number is defined as |A| * |A^-1|. +// +// One important use of Condition is during linear solve routines (finding x such +// that A * x = b). The condition number of A indicates the accuracy of +// the computed solution. A Condition error will be returned if the condition +// number of A is sufficiently large. If A is exactly singular to working precision, +// Condition == ∞, and the solve algorithm may have completed early. If Condition +// is large and finite the solve algorithm will be performed, but the computed +// solution may be innacurate. Due to the nature of finite precision arithmetic, +// the value of Condition is only an approximate test of singularity. +type Condition float64 + +func (c Condition) Error() string { + return fmt.Sprintf("matrix singular or near-singular with condition number %.4e", c) +} + +// ConditionTolerance is the tolerance limit of the condition number. If the +// condition number is above this value, the matrix is considered singular. +const ConditionTolerance = 1e16 + +const ( + // CondNorm is the matrix norm used for computing the condition number by routines + // in the matrix packages. + CondNorm = lapack.MaxRowSum + + // CondNormTrans is the norm used to compute on A^T to get the same result as + // computing CondNorm on A. + CondNormTrans = lapack.MaxColumnSum +) + +const stackTraceBufferSize = 1 << 20 + +// Maybe will recover a panic with a type mat.Error from fn, and return this error +// as the Err field of an ErrorStack. The stack trace for the panicking function will be +// recovered and placed in the StackTrace field. Any other error is re-panicked. +func Maybe(fn func()) (err error) { + defer func() { + if r := recover(); r != nil { + if e, ok := r.(Error); ok { + if e.string == "" { + panic("mat: invalid error") + } + buf := make([]byte, stackTraceBufferSize) + n := runtime.Stack(buf, false) + err = ErrorStack{Err: e, StackTrace: string(buf[:n])} + return + } + panic(r) + } + }() + fn() + return +} + +// MaybeFloat will recover a panic with a type mat.Error from fn, and return this error +// as the Err field of an ErrorStack. The stack trace for the panicking function will be +// recovered and placed in the StackTrace field. Any other error is re-panicked. +func MaybeFloat(fn func() float64) (f float64, err error) { + defer func() { + if r := recover(); r != nil { + if e, ok := r.(Error); ok { + if e.string == "" { + panic("mat: invalid error") + } + buf := make([]byte, stackTraceBufferSize) + n := runtime.Stack(buf, false) + err = ErrorStack{Err: e, StackTrace: string(buf[:n])} + return + } + panic(r) + } + }() + return fn(), nil +} + +// MaybeComplex will recover a panic with a type mat.Error from fn, and return this error +// as the Err field of an ErrorStack. The stack trace for the panicking function will be +// recovered and placed in the StackTrace field. Any other error is re-panicked. +func MaybeComplex(fn func() complex128) (f complex128, err error) { + defer func() { + if r := recover(); r != nil { + if e, ok := r.(Error); ok { + if e.string == "" { + panic("mat: invalid error") + } + buf := make([]byte, stackTraceBufferSize) + n := runtime.Stack(buf, false) + err = ErrorStack{Err: e, StackTrace: string(buf[:n])} + return + } + panic(r) + } + }() + return fn(), nil +} + +// Error represents matrix handling errors. These errors can be recovered by Maybe wrappers. +type Error struct{ string } + +func (err Error) Error() string { return err.string } + +var ( + ErrIndexOutOfRange = Error{"matrix: index out of range"} + ErrRowAccess = Error{"matrix: row index out of range"} + ErrColAccess = Error{"matrix: column index out of range"} + ErrVectorAccess = Error{"matrix: vector index out of range"} + ErrZeroLength = Error{"matrix: zero length in matrix dimension"} + ErrRowLength = Error{"matrix: row length mismatch"} + ErrColLength = Error{"matrix: col length mismatch"} + ErrSquare = Error{"matrix: expect square matrix"} + ErrNormOrder = Error{"matrix: invalid norm order for matrix"} + ErrSingular = Error{"matrix: matrix is singular"} + ErrShape = Error{"matrix: dimension mismatch"} + ErrIllegalStride = Error{"matrix: illegal stride"} + ErrPivot = Error{"matrix: malformed pivot list"} + ErrTriangle = Error{"matrix: triangular storage mismatch"} + ErrTriangleSet = Error{"matrix: triangular set out of bounds"} + ErrBandSet = Error{"matrix: band set out of bounds"} + ErrDiagSet = Error{"matrix: diagonal set out of bounds"} + ErrSliceLengthMismatch = Error{"matrix: input slice length mismatch"} + ErrNotPSD = Error{"matrix: input not positive symmetric definite"} + ErrFailedEigen = Error{"matrix: eigendecomposition not successful"} +) + +// ErrorStack represents matrix handling errors that have been recovered by Maybe wrappers. +type ErrorStack struct { + Err error + + // StackTrace is the stack trace + // recovered by Maybe, MaybeFloat + // or MaybeComplex. + StackTrace string +} + +func (err ErrorStack) Error() string { return err.Err.Error() } diff --git a/vendor/gonum.org/v1/gonum/mat/format.go b/vendor/gonum.org/v1/gonum/mat/format.go new file mode 100644 index 0000000..9b60cb3 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/mat/format.go @@ -0,0 +1,238 @@ +// Copyright ©2013 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package mat + +import ( + "fmt" + "strconv" +) + +// Formatted returns a fmt.Formatter for the matrix m using the given options. +func Formatted(m Matrix, options ...FormatOption) fmt.Formatter { + f := formatter{ + matrix: m, + dot: '.', + } + for _, o := range options { + o(&f) + } + return f +} + +type formatter struct { + matrix Matrix + prefix string + margin int + dot byte + squeeze bool +} + +// FormatOption is a functional option for matrix formatting. +type FormatOption func(*formatter) + +// Prefix sets the formatted prefix to the string p. Prefix is a string that is prepended to +// each line of output. +func Prefix(p string) FormatOption { + return func(f *formatter) { f.prefix = p } +} + +// Excerpt sets the maximum number of rows and columns to print at the margins of the matrix +// to m. If m is zero or less all elements are printed. +func Excerpt(m int) FormatOption { + return func(f *formatter) { f.margin = m } +} + +// DotByte sets the dot character to b. The dot character is used to replace zero elements +// if the result is printed with the fmt ' ' verb flag. Without a DotByte option, the default +// dot character is '.'. +func DotByte(b byte) FormatOption { + return func(f *formatter) { f.dot = b } +} + +// Squeeze sets the printing behaviour to minimise column width for each individual column. +func Squeeze() FormatOption { + return func(f *formatter) { f.squeeze = true } +} + +// Format satisfies the fmt.Formatter interface. +func (f formatter) Format(fs fmt.State, c rune) { + if c == 'v' && fs.Flag('#') { + fmt.Fprintf(fs, "%#v", f.matrix) + return + } + format(f.matrix, f.prefix, f.margin, f.dot, f.squeeze, fs, c) +} + +// format prints a pretty representation of m to the fs io.Writer. The format character c +// specifies the numerical representation of elements; valid values are those for float64 +// specified in the fmt package, with their associated flags. In addition to this, a space +// preceding a verb indicates that zero values should be represented by the dot character. +// The printed range of the matrix can be limited by specifying a positive value for margin; +// If margin is greater than zero, only the first and last margin rows/columns of the matrix +// are output. If squeeze is true, column widths are determined on a per-column basis. +// +// format will not provide Go syntax output. +func format(m Matrix, prefix string, margin int, dot byte, squeeze bool, fs fmt.State, c rune) { + rows, cols := m.Dims() + + var printed int + if margin <= 0 { + printed = rows + if cols > printed { + printed = cols + } + } else { + printed = margin + } + + prec, pOk := fs.Precision() + if !pOk { + prec = -1 + } + + var ( + maxWidth int + widths widther + buf, pad []byte + ) + if squeeze { + widths = make(columnWidth, cols) + } else { + widths = new(uniformWidth) + } + switch c { + case 'v', 'e', 'E', 'f', 'F', 'g', 'G': + if c == 'v' { + buf, maxWidth = maxCellWidth(m, 'g', printed, prec, widths) + } else { + buf, maxWidth = maxCellWidth(m, c, printed, prec, widths) + } + default: + fmt.Fprintf(fs, "%%!%c(%T=Dims(%d, %d))", c, m, rows, cols) + return + } + width, _ := fs.Width() + width = max(width, maxWidth) + pad = make([]byte, max(width, 2)) + for i := range pad { + pad[i] = ' ' + } + + first := true + if rows > 2*printed || cols > 2*printed { + first = false + fmt.Fprintf(fs, "Dims(%d, %d)\n", rows, cols) + } + + skipZero := fs.Flag(' ') + for i := 0; i < rows; i++ { + if !first { + fmt.Fprint(fs, prefix) + } + first = false + var el string + switch { + case rows == 1: + fmt.Fprint(fs, "[") + el = "]" + case i == 0: + fmt.Fprint(fs, "⎡") + el = "⎤\n" + case i < rows-1: + fmt.Fprint(fs, "⎢") + el = "⎥\n" + default: + fmt.Fprint(fs, "⎣") + el = "⎦" + } + + for j := 0; j < cols; j++ { + if j >= printed && j < cols-printed { + j = cols - printed - 1 + if i == 0 || i == rows-1 { + fmt.Fprint(fs, "... ... ") + } else { + fmt.Fprint(fs, " ") + } + continue + } + + v := m.At(i, j) + if v == 0 && skipZero { + buf = buf[:1] + buf[0] = dot + } else { + if c == 'v' { + buf = strconv.AppendFloat(buf[:0], v, 'g', prec, 64) + } else { + buf = strconv.AppendFloat(buf[:0], v, byte(c), prec, 64) + } + } + if fs.Flag('-') { + fs.Write(buf) + fs.Write(pad[:widths.width(j)-len(buf)]) + } else { + fs.Write(pad[:widths.width(j)-len(buf)]) + fs.Write(buf) + } + + if j < cols-1 { + fs.Write(pad[:2]) + } + } + + fmt.Fprint(fs, el) + + if i >= printed-1 && i < rows-printed && 2*printed < rows { + i = rows - printed - 1 + fmt.Fprintf(fs, "%s .\n%[1]s .\n%[1]s .\n", prefix) + continue + } + } +} + +func maxCellWidth(m Matrix, c rune, printed, prec int, w widther) ([]byte, int) { + var ( + buf = make([]byte, 0, 64) + rows, cols = m.Dims() + max int + ) + for i := 0; i < rows; i++ { + if i >= printed-1 && i < rows-printed && 2*printed < rows { + i = rows - printed - 1 + continue + } + for j := 0; j < cols; j++ { + if j >= printed && j < cols-printed { + continue + } + + buf = strconv.AppendFloat(buf, m.At(i, j), byte(c), prec, 64) + if len(buf) > max { + max = len(buf) + } + if len(buf) > w.width(j) { + w.setWidth(j, len(buf)) + } + buf = buf[:0] + } + } + return buf, max +} + +type widther interface { + width(i int) int + setWidth(i, w int) +} + +type uniformWidth int + +func (u *uniformWidth) width(_ int) int { return int(*u) } +func (u *uniformWidth) setWidth(_, w int) { *u = uniformWidth(w) } + +type columnWidth []int + +func (c columnWidth) width(i int) int { return c[i] } +func (c columnWidth) setWidth(i, w int) { c[i] = w } diff --git a/vendor/gonum.org/v1/gonum/mat/gsvd.go b/vendor/gonum.org/v1/gonum/mat/gsvd.go new file mode 100644 index 0000000..2de511a --- /dev/null +++ b/vendor/gonum.org/v1/gonum/mat/gsvd.go @@ -0,0 +1,415 @@ +// Copyright ©2017 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package mat + +import ( + "gonum.org/v1/gonum/blas/blas64" + "gonum.org/v1/gonum/floats" + "gonum.org/v1/gonum/lapack" + "gonum.org/v1/gonum/lapack/lapack64" +) + +// GSVDKind specifies the treatment of singular vectors during a GSVD +// factorization. +type GSVDKind int + +const ( + // GSVDNone specifies that no singular vectors should be computed during + // the decomposition. + GSVDNone GSVDKind = 0 + + // GSVDU specifies that the U singular vectors should be computed during + // the decomposition. + GSVDU GSVDKind = 1 << iota + // GSVDV specifies that the V singular vectors should be computed during + // the decomposition. + GSVDV + // GSVDQ specifies that the Q singular vectors should be computed during + // the decomposition. + GSVDQ + + // GSVDAll is a convenience value for computing all of the singular vectors. + GSVDAll = GSVDU | GSVDV | GSVDQ +) + +// GSVD is a type for creating and using the Generalized Singular Value Decomposition +// (GSVD) of a matrix. +// +// The factorization is a linear transformation of the data sets from the given +// variable×sample spaces to reduced and diagonalized "eigenvariable"×"eigensample" +// spaces. +type GSVD struct { + kind GSVDKind + + r, p, c, k, l int + s1, s2 []float64 + a, b, u, v, q blas64.General + + work []float64 + iwork []int +} + +// succFact returns whether the receiver contains a successful factorization. +func (gsvd *GSVD) succFact() bool { + return gsvd.r != 0 +} + +// Factorize computes the generalized singular value decomposition (GSVD) of the input +// the r×c matrix A and the p×c matrix B. The singular values of A and B are computed +// in all cases, while the singular vectors are optionally computed depending on the +// input kind. +// +// The full singular value decomposition (kind == GSVDAll) deconstructs A and B as +// A = U * Σ₁ * [ 0 R ] * Q^T +// +// B = V * Σ₂ * [ 0 R ] * Q^T +// where Σ₁ and Σ₂ are r×(k+l) and p×(k+l) diagonal matrices of singular values, and +// U, V and Q are r×r, p×p and c×c orthogonal matrices of singular vectors. k+l is the +// effective numerical rank of the matrix [ A^T B^T ]^T. +// +// It is frequently not necessary to compute the full GSVD. Computation time and +// storage costs can be reduced using the appropriate kind. Either only the singular +// values can be computed (kind == SVDNone), or in conjunction with specific singular +// vectors (kind bit set according to matrix.GSVDU, matrix.GSVDV and matrix.GSVDQ). +// +// Factorize returns whether the decomposition succeeded. If the decomposition +// failed, routines that require a successful factorization will panic. +func (gsvd *GSVD) Factorize(a, b Matrix, kind GSVDKind) (ok bool) { + // kill the previous decomposition + gsvd.r = 0 + gsvd.kind = 0 + + r, c := a.Dims() + gsvd.r, gsvd.c = r, c + p, c := b.Dims() + gsvd.p = p + if gsvd.c != c { + panic(ErrShape) + } + var jobU, jobV, jobQ lapack.GSVDJob + switch { + default: + panic("gsvd: bad input kind") + case kind == GSVDNone: + jobU = lapack.GSVDNone + jobV = lapack.GSVDNone + jobQ = lapack.GSVDNone + case GSVDAll&kind != 0: + if GSVDU&kind != 0 { + jobU = lapack.GSVDU + gsvd.u = blas64.General{ + Rows: r, + Cols: r, + Stride: r, + Data: use(gsvd.u.Data, r*r), + } + } + if GSVDV&kind != 0 { + jobV = lapack.GSVDV + gsvd.v = blas64.General{ + Rows: p, + Cols: p, + Stride: p, + Data: use(gsvd.v.Data, p*p), + } + } + if GSVDQ&kind != 0 { + jobQ = lapack.GSVDQ + gsvd.q = blas64.General{ + Rows: c, + Cols: c, + Stride: c, + Data: use(gsvd.q.Data, c*c), + } + } + } + + // A and B are destroyed on call, so copy the matrices. + aCopy := DenseCopyOf(a) + bCopy := DenseCopyOf(b) + + gsvd.s1 = use(gsvd.s1, c) + gsvd.s2 = use(gsvd.s2, c) + + gsvd.iwork = useInt(gsvd.iwork, c) + + gsvd.work = use(gsvd.work, 1) + lapack64.Ggsvd3(jobU, jobV, jobQ, aCopy.mat, bCopy.mat, gsvd.s1, gsvd.s2, gsvd.u, gsvd.v, gsvd.q, gsvd.work, -1, gsvd.iwork) + gsvd.work = use(gsvd.work, int(gsvd.work[0])) + gsvd.k, gsvd.l, ok = lapack64.Ggsvd3(jobU, jobV, jobQ, aCopy.mat, bCopy.mat, gsvd.s1, gsvd.s2, gsvd.u, gsvd.v, gsvd.q, gsvd.work, len(gsvd.work), gsvd.iwork) + if ok { + gsvd.a = aCopy.mat + gsvd.b = bCopy.mat + gsvd.kind = kind + } + return ok +} + +// Kind returns the GSVDKind of the decomposition. If no decomposition has been +// computed, Kind returns -1. +func (gsvd *GSVD) Kind() GSVDKind { + if !gsvd.succFact() { + return -1 + } + return gsvd.kind +} + +// Rank returns the k and l terms of the rank of [ A^T B^T ]^T. +func (gsvd *GSVD) Rank() (k, l int) { + return gsvd.k, gsvd.l +} + +// GeneralizedValues returns the generalized singular values of the factorized matrices. +// If the input slice is non-nil, the values will be stored in-place into the slice. +// In this case, the slice must have length min(r,c)-k, and GeneralizedValues will +// panic with matrix.ErrSliceLengthMismatch otherwise. If the input slice is nil, +// a new slice of the appropriate length will be allocated and returned. +// +// GeneralizedValues will panic if the receiver does not contain a successful factorization. +func (gsvd *GSVD) GeneralizedValues(v []float64) []float64 { + if !gsvd.succFact() { + panic(badFact) + } + r := gsvd.r + c := gsvd.c + k := gsvd.k + d := min(r, c) + if v == nil { + v = make([]float64, d-k) + } + if len(v) != d-k { + panic(ErrSliceLengthMismatch) + } + floats.DivTo(v, gsvd.s1[k:d], gsvd.s2[k:d]) + return v +} + +// ValuesA returns the singular values of the factorized A matrix. +// If the input slice is non-nil, the values will be stored in-place into the slice. +// In this case, the slice must have length min(r,c)-k, and ValuesA will panic with +// matrix.ErrSliceLengthMismatch otherwise. If the input slice is nil, +// a new slice of the appropriate length will be allocated and returned. +// +// ValuesA will panic if the receiver does not contain a successful factorization. +func (gsvd *GSVD) ValuesA(s []float64) []float64 { + if !gsvd.succFact() { + panic(badFact) + } + r := gsvd.r + c := gsvd.c + k := gsvd.k + d := min(r, c) + if s == nil { + s = make([]float64, d-k) + } + if len(s) != d-k { + panic(ErrSliceLengthMismatch) + } + copy(s, gsvd.s1[k:min(r, c)]) + return s +} + +// ValuesB returns the singular values of the factorized B matrix. +// If the input slice is non-nil, the values will be stored in-place into the slice. +// In this case, the slice must have length min(r,c)-k, and ValuesB will panic with +// matrix.ErrSliceLengthMismatch otherwise. If the input slice is nil, +// a new slice of the appropriate length will be allocated and returned. +// +// ValuesB will panic if the receiver does not contain a successful factorization. +func (gsvd *GSVD) ValuesB(s []float64) []float64 { + if !gsvd.succFact() { + panic(badFact) + } + r := gsvd.r + c := gsvd.c + k := gsvd.k + d := min(r, c) + if s == nil { + s = make([]float64, d-k) + } + if len(s) != d-k { + panic(ErrSliceLengthMismatch) + } + copy(s, gsvd.s2[k:d]) + return s +} + +// ZeroRTo extracts the matrix [ 0 R ] from the singular value decomposition, storing +// the result in-place into dst. [ 0 R ] is size (k+l)×c. +// If dst is nil, a new matrix is allocated. The resulting ZeroR matrix is returned. +// +// ZeroRTo will panic if the receiver does not contain a successful factorization. +func (gsvd *GSVD) ZeroRTo(dst *Dense) *Dense { + if !gsvd.succFact() { + panic(badFact) + } + r := gsvd.r + c := gsvd.c + k := gsvd.k + l := gsvd.l + h := min(k+l, r) + if dst == nil { + dst = NewDense(k+l, c, nil) + } else { + dst.reuseAsZeroed(k+l, c) + } + a := Dense{ + mat: gsvd.a, + capRows: r, + capCols: c, + } + dst.Slice(0, h, c-k-l, c).(*Dense). + Copy(a.Slice(0, h, c-k-l, c)) + if r < k+l { + b := Dense{ + mat: gsvd.b, + capRows: gsvd.p, + capCols: c, + } + dst.Slice(r, k+l, c+r-k-l, c).(*Dense). + Copy(b.Slice(r-k, l, c+r-k-l, c)) + } + return dst +} + +// SigmaATo extracts the matrix Σ₁ from the singular value decomposition, storing +// the result in-place into dst. Σ₁ is size r×(k+l). +// If dst is nil, a new matrix is allocated. The resulting SigmaA matrix is returned. +// +// SigmaATo will panic if the receiver does not contain a successful factorization. +func (gsvd *GSVD) SigmaATo(dst *Dense) *Dense { + if !gsvd.succFact() { + panic(badFact) + } + r := gsvd.r + k := gsvd.k + l := gsvd.l + if dst == nil { + dst = NewDense(r, k+l, nil) + } else { + dst.reuseAsZeroed(r, k+l) + } + for i := 0; i < k; i++ { + dst.set(i, i, 1) + } + for i := k; i < min(r, k+l); i++ { + dst.set(i, i, gsvd.s1[i]) + } + return dst +} + +// SigmaBTo extracts the matrix Σ₂ from the singular value decomposition, storing +// the result in-place into dst. Σ₂ is size p×(k+l). +// If dst is nil, a new matrix is allocated. The resulting SigmaB matrix is returned. +// +// SigmaBTo will panic if the receiver does not contain a successful factorization. +func (gsvd *GSVD) SigmaBTo(dst *Dense) *Dense { + if !gsvd.succFact() { + panic(badFact) + } + r := gsvd.r + p := gsvd.p + k := gsvd.k + l := gsvd.l + if dst == nil { + dst = NewDense(p, k+l, nil) + } else { + dst.reuseAsZeroed(p, k+l) + } + for i := 0; i < min(l, r-k); i++ { + dst.set(i, i+k, gsvd.s2[k+i]) + } + for i := r - k; i < l; i++ { + dst.set(i, i+k, 1) + } + return dst +} + +// UTo extracts the matrix U from the singular value decomposition, storing +// the result in-place into dst. U is size r×r. +// If dst is nil, a new matrix is allocated. The resulting U matrix is returned. +// +// UTo will panic if the receiver does not contain a successful factorization. +func (gsvd *GSVD) UTo(dst *Dense) *Dense { + if !gsvd.succFact() { + panic(badFact) + } + if gsvd.kind&GSVDU == 0 { + panic("mat: improper GSVD kind") + } + r := gsvd.u.Rows + c := gsvd.u.Cols + if dst == nil { + dst = NewDense(r, c, nil) + } else { + dst.reuseAs(r, c) + } + + tmp := &Dense{ + mat: gsvd.u, + capRows: r, + capCols: c, + } + dst.Copy(tmp) + return dst +} + +// VTo extracts the matrix V from the singular value decomposition, storing +// the result in-place into dst. V is size p×p. +// If dst is nil, a new matrix is allocated. The resulting V matrix is returned. +// +// VTo will panic if the receiver does not contain a successful factorization. +func (gsvd *GSVD) VTo(dst *Dense) *Dense { + if !gsvd.succFact() { + panic(badFact) + } + if gsvd.kind&GSVDV == 0 { + panic("mat: improper GSVD kind") + } + r := gsvd.v.Rows + c := gsvd.v.Cols + if dst == nil { + dst = NewDense(r, c, nil) + } else { + dst.reuseAs(r, c) + } + + tmp := &Dense{ + mat: gsvd.v, + capRows: r, + capCols: c, + } + dst.Copy(tmp) + return dst +} + +// QTo extracts the matrix Q from the singular value decomposition, storing +// the result in-place into dst. Q is size c×c. +// If dst is nil, a new matrix is allocated. The resulting Q matrix is returned. +// +// QTo will panic if the receiver does not contain a successful factorization. +func (gsvd *GSVD) QTo(dst *Dense) *Dense { + if !gsvd.succFact() { + panic(badFact) + } + if gsvd.kind&GSVDQ == 0 { + panic("mat: improper GSVD kind") + } + r := gsvd.q.Rows + c := gsvd.q.Cols + if dst == nil { + dst = NewDense(r, c, nil) + } else { + dst.reuseAs(r, c) + } + + tmp := &Dense{ + mat: gsvd.q, + capRows: r, + capCols: c, + } + dst.Copy(tmp) + return dst +} diff --git a/vendor/gonum.org/v1/gonum/mat/hogsvd.go b/vendor/gonum.org/v1/gonum/mat/hogsvd.go new file mode 100644 index 0000000..bd843e6 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/mat/hogsvd.go @@ -0,0 +1,233 @@ +// Copyright ©2017 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package mat + +import ( + "errors" + + "gonum.org/v1/gonum/blas/blas64" +) + +// HOGSVD is a type for creating and using the Higher Order Generalized Singular Value +// Decomposition (HOGSVD) of a set of matrices. +// +// The factorization is a linear transformation of the data sets from the given +// variable×sample spaces to reduced and diagonalized "eigenvariable"×"eigensample" +// spaces. +type HOGSVD struct { + n int + v *Dense + b []Dense + + err error +} + +// succFact returns whether the receiver contains a successful factorization. +func (gsvd *HOGSVD) succFact() bool { + return gsvd.n != 0 +} + +// Factorize computes the higher order generalized singular value decomposition (HOGSVD) +// of the n input r_i×c column tall matrices in m. HOGSV extends the GSVD case from 2 to n +// input matrices. +// +// M_0 = U_0 * Σ_0 * V^T +// M_1 = U_1 * Σ_1 * V^T +// . +// . +// . +// M_{n-1} = U_{n-1} * Σ_{n-1} * V^T +// +// where U_i are r_i×c matrices of singular vectors, Σ are c×c matrices singular values, and V +// is a c×c matrix of singular vectors. +// +// Factorize returns whether the decomposition succeeded. If the decomposition +// failed, routines that require a successful factorization will panic. +func (gsvd *HOGSVD) Factorize(m ...Matrix) (ok bool) { + // Factorize performs the HOGSVD factorisation + // essentially as described by Ponnapalli et al. + // https://doi.org/10.1371/journal.pone.0028072 + + if len(m) < 2 { + panic("hogsvd: too few matrices") + } + gsvd.n = 0 + + r, c := m[0].Dims() + a := make([]Cholesky, len(m)) + var ts SymDense + for i, d := range m { + rd, cd := d.Dims() + if rd < cd { + gsvd.err = ErrShape + return false + } + if rd > r { + r = rd + } + if cd != c { + panic(ErrShape) + } + ts.Reset() + ts.SymOuterK(1, d.T()) + ok = a[i].Factorize(&ts) + if !ok { + gsvd.err = errors.New("hogsvd: cholesky decomposition failed") + return false + } + } + + s := getWorkspace(c, c, true) + defer putWorkspace(s) + sij := getWorkspace(c, c, false) + defer putWorkspace(sij) + for i, ai := range a { + for _, aj := range a[i+1:] { + gsvd.err = ai.SolveCholTo(sij, &aj) + if gsvd.err != nil { + return false + } + s.Add(s, sij) + + gsvd.err = aj.SolveCholTo(sij, &ai) + if gsvd.err != nil { + return false + } + s.Add(s, sij) + } + } + s.Scale(1/float64(len(m)*(len(m)-1)), s) + + var eig Eigen + ok = eig.Factorize(s.T(), EigenRight) + if !ok { + gsvd.err = errors.New("hogsvd: eigen decomposition failed") + return false + } + vc := eig.VectorsTo(nil) + // vc is guaranteed to have real eigenvalues. + rc, cc := vc.Dims() + v := NewDense(rc, cc, nil) + for i := 0; i < rc; i++ { + for j := 0; j < cc; j++ { + a := vc.At(i, j) + v.set(i, j, real(a)) + } + } + // Rescale the columns of v by their Frobenius norms. + // Work done in cv is reflected in v. + var cv VecDense + for j := 0; j < c; j++ { + cv.ColViewOf(v, j) + cv.ScaleVec(1/blas64.Nrm2(cv.mat), &cv) + } + + b := make([]Dense, len(m)) + biT := getWorkspace(c, r, false) + defer putWorkspace(biT) + for i, d := range m { + // All calls to reset will leave a zeroed + // matrix with capacity to store the result + // without additional allocation. + biT.Reset() + gsvd.err = biT.Solve(v, d.T()) + if gsvd.err != nil { + return false + } + b[i].Clone(biT.T()) + } + + gsvd.n = len(m) + gsvd.v = v + gsvd.b = b + return true +} + +// Err returns the reason for a factorization failure. +func (gsvd *HOGSVD) Err() error { + return gsvd.err +} + +// Len returns the number of matrices that have been factorized. If Len returns +// zero, the factorization was not successful. +func (gsvd *HOGSVD) Len() int { + return gsvd.n +} + +// UTo extracts the matrix U_n from the singular value decomposition, storing +// the result in-place into dst. U_n is size r×c. +// If dst is nil, a new matrix is allocated. The resulting U matrix is returned. +// +// UTo will panic if the receiver does not contain a successful factorization. +func (gsvd *HOGSVD) UTo(dst *Dense, n int) *Dense { + if !gsvd.succFact() { + panic(badFact) + } + if n < 0 || gsvd.n <= n { + panic("hogsvd: invalid index") + } + + if dst == nil { + r, c := gsvd.b[n].Dims() + dst = NewDense(r, c, nil) + } else { + dst.reuseAs(gsvd.b[n].Dims()) + } + dst.Copy(&gsvd.b[n]) + var v VecDense + for j, f := range gsvd.Values(nil, n) { + v.ColViewOf(dst, j) + v.ScaleVec(1/f, &v) + } + return dst +} + +// Values returns the nth set of singular values of the factorized system. +// If the input slice is non-nil, the values will be stored in-place into the slice. +// In this case, the slice must have length c, and Values will panic with +// matrix.ErrSliceLengthMismatch otherwise. If the input slice is nil, +// a new slice of the appropriate length will be allocated and returned. +// +// Values will panic if the receiver does not contain a successful factorization. +func (gsvd *HOGSVD) Values(s []float64, n int) []float64 { + if !gsvd.succFact() { + panic(badFact) + } + if n < 0 || gsvd.n <= n { + panic("hogsvd: invalid index") + } + + _, c := gsvd.b[n].Dims() + if s == nil { + s = make([]float64, c) + } else if len(s) != c { + panic(ErrSliceLengthMismatch) + } + var v VecDense + for j := 0; j < c; j++ { + v.ColViewOf(&gsvd.b[n], j) + s[j] = blas64.Nrm2(v.mat) + } + return s +} + +// VTo extracts the matrix V from the singular value decomposition, storing +// the result in-place into dst. V is size c×c. +// If dst is nil, a new matrix is allocated. The resulting V matrix is returned. +// +// VTo will panic if the receiver does not contain a successful factorization. +func (gsvd *HOGSVD) VTo(dst *Dense) *Dense { + if !gsvd.succFact() { + panic(badFact) + } + if dst == nil { + r, c := gsvd.v.Dims() + dst = NewDense(r, c, nil) + } else { + dst.reuseAs(gsvd.v.Dims()) + } + dst.Copy(gsvd.v) + return dst +} diff --git a/vendor/gonum.org/v1/gonum/mat/index_bound_checks.go b/vendor/gonum.org/v1/gonum/mat/index_bound_checks.go new file mode 100644 index 0000000..59815a6 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/mat/index_bound_checks.go @@ -0,0 +1,348 @@ +// Copyright ©2014 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// This file must be kept in sync with index_no_bound_checks.go. + +// +build bounds + +package mat + +// At returns the element at row i, column j. +func (m *Dense) At(i, j int) float64 { + return m.at(i, j) +} + +func (m *Dense) at(i, j int) float64 { + if uint(i) >= uint(m.mat.Rows) { + panic(ErrRowAccess) + } + if uint(j) >= uint(m.mat.Cols) { + panic(ErrColAccess) + } + return m.mat.Data[i*m.mat.Stride+j] +} + +// Set sets the element at row i, column j to the value v. +func (m *Dense) Set(i, j int, v float64) { + m.set(i, j, v) +} + +func (m *Dense) set(i, j int, v float64) { + if uint(i) >= uint(m.mat.Rows) { + panic(ErrRowAccess) + } + if uint(j) >= uint(m.mat.Cols) { + panic(ErrColAccess) + } + m.mat.Data[i*m.mat.Stride+j] = v +} + +// At returns the element at row i, column j. +func (m *CDense) At(i, j int) complex128 { + return m.at(i, j) +} + +func (m *CDense) at(i, j int) complex128 { + if uint(i) >= uint(m.mat.Rows) { + panic(ErrRowAccess) + } + if uint(j) >= uint(m.mat.Cols) { + panic(ErrColAccess) + } + return m.mat.Data[i*m.mat.Stride+j] +} + +// Set sets the element at row i, column j to the value v. +func (m *CDense) Set(i, j int, v complex128) { + m.set(i, j, v) +} + +func (m *CDense) set(i, j int, v complex128) { + if uint(i) >= uint(m.mat.Rows) { + panic(ErrRowAccess) + } + if uint(j) >= uint(m.mat.Cols) { + panic(ErrColAccess) + } + m.mat.Data[i*m.mat.Stride+j] = v +} + +// At returns the element at row i. +// It panics if i is out of bounds or if j is not zero. +func (v *VecDense) At(i, j int) float64 { + if j != 0 { + panic(ErrColAccess) + } + return v.at(i) +} + +// AtVec returns the element at row i. +// It panics if i is out of bounds. +func (v *VecDense) AtVec(i int) float64 { + return v.at(i) +} + +func (v *VecDense) at(i int) float64 { + if uint(i) >= uint(v.mat.N) { + panic(ErrRowAccess) + } + return v.mat.Data[i*v.mat.Inc] +} + +// SetVec sets the element at row i to the value val. +// It panics if i is out of bounds. +func (v *VecDense) SetVec(i int, val float64) { + v.setVec(i, val) +} + +func (v *VecDense) setVec(i int, val float64) { + if uint(i) >= uint(v.mat.N) { + panic(ErrVectorAccess) + } + v.mat.Data[i*v.mat.Inc] = val +} + +// At returns the element at row i and column j. +func (t *SymDense) At(i, j int) float64 { + return t.at(i, j) +} + +func (t *SymDense) at(i, j int) float64 { + if uint(i) >= uint(t.mat.N) { + panic(ErrRowAccess) + } + if uint(j) >= uint(t.mat.N) { + panic(ErrColAccess) + } + if i > j { + i, j = j, i + } + return t.mat.Data[i*t.mat.Stride+j] +} + +// SetSym sets the elements at (i,j) and (j,i) to the value v. +func (t *SymDense) SetSym(i, j int, v float64) { + t.set(i, j, v) +} + +func (t *SymDense) set(i, j int, v float64) { + if uint(i) >= uint(t.mat.N) { + panic(ErrRowAccess) + } + if uint(j) >= uint(t.mat.N) { + panic(ErrColAccess) + } + if i > j { + i, j = j, i + } + t.mat.Data[i*t.mat.Stride+j] = v +} + +// At returns the element at row i, column j. +func (t *TriDense) At(i, j int) float64 { + return t.at(i, j) +} + +func (t *TriDense) at(i, j int) float64 { + if uint(i) >= uint(t.mat.N) { + panic(ErrRowAccess) + } + if uint(j) >= uint(t.mat.N) { + panic(ErrColAccess) + } + isUpper := t.isUpper() + if (isUpper && i > j) || (!isUpper && i < j) { + return 0 + } + return t.mat.Data[i*t.mat.Stride+j] +} + +// SetTri sets the element of the triangular matrix at row i, column j to the value v. +// It panics if the location is outside the appropriate half of the matrix. +func (t *TriDense) SetTri(i, j int, v float64) { + t.set(i, j, v) +} + +func (t *TriDense) set(i, j int, v float64) { + if uint(i) >= uint(t.mat.N) { + panic(ErrRowAccess) + } + if uint(j) >= uint(t.mat.N) { + panic(ErrColAccess) + } + isUpper := t.isUpper() + if (isUpper && i > j) || (!isUpper && i < j) { + panic(ErrTriangleSet) + } + t.mat.Data[i*t.mat.Stride+j] = v +} + +// At returns the element at row i, column j. +func (b *BandDense) At(i, j int) float64 { + return b.at(i, j) +} + +func (b *BandDense) at(i, j int) float64 { + if uint(i) >= uint(b.mat.Rows) { + panic(ErrRowAccess) + } + if uint(j) >= uint(b.mat.Cols) { + panic(ErrColAccess) + } + pj := j + b.mat.KL - i + if pj < 0 || b.mat.KL+b.mat.KU+1 <= pj { + return 0 + } + return b.mat.Data[i*b.mat.Stride+pj] +} + +// SetBand sets the element at row i, column j to the value v. +// It panics if the location is outside the appropriate region of the matrix. +func (b *BandDense) SetBand(i, j int, v float64) { + b.set(i, j, v) +} + +func (b *BandDense) set(i, j int, v float64) { + if uint(i) >= uint(b.mat.Rows) { + panic(ErrRowAccess) + } + if uint(j) >= uint(b.mat.Cols) { + panic(ErrColAccess) + } + pj := j + b.mat.KL - i + if pj < 0 || b.mat.KL+b.mat.KU+1 <= pj { + panic(ErrBandSet) + } + b.mat.Data[i*b.mat.Stride+pj] = v +} + +// At returns the element at row i, column j. +func (s *SymBandDense) At(i, j int) float64 { + return s.at(i, j) +} + +func (s *SymBandDense) at(i, j int) float64 { + if uint(i) >= uint(s.mat.N) { + panic(ErrRowAccess) + } + if uint(j) >= uint(s.mat.N) { + panic(ErrColAccess) + } + if i > j { + i, j = j, i + } + pj := j - i + if s.mat.K+1 <= pj { + return 0 + } + return s.mat.Data[i*s.mat.Stride+pj] +} + +// SetSymBand sets the element at row i, column j to the value v. +// It panics if the location is outside the appropriate region of the matrix. +func (s *SymBandDense) SetSymBand(i, j int, v float64) { + s.set(i, j, v) +} + +func (s *SymBandDense) set(i, j int, v float64) { + if uint(i) >= uint(s.mat.N) { + panic(ErrRowAccess) + } + if uint(j) >= uint(s.mat.N) { + panic(ErrColAccess) + } + if i > j { + i, j = j, i + } + pj := j - i + if s.mat.K+1 <= pj { + panic(ErrBandSet) + } + s.mat.Data[i*s.mat.Stride+pj] = v +} + +func (t *TriBandDense) At(i, j int) float64 { + return t.at(i, j) +} + +func (t *TriBandDense) at(i, j int) float64 { + // TODO(btracey): Support Diag field, see #692. + if uint(i) >= uint(t.mat.N) { + panic(ErrRowAccess) + } + if uint(j) >= uint(t.mat.N) { + panic(ErrColAccess) + } + isUpper := t.isUpper() + if (isUpper && i > j) || (!isUpper && i < j) { + return 0 + } + kl, ku := t.mat.K, 0 + if isUpper { + kl, ku = 0, t.mat.K + } + pj := j + kl - i + if pj < 0 || kl+ku+1 <= pj { + return 0 + } + return t.mat.Data[i*t.mat.Stride+pj] +} + +func (t *TriBandDense) SetTriBand(i, j int, v float64) { + t.setTriBand(i, j, v) +} + +func (t *TriBandDense) setTriBand(i, j int, v float64) { + if uint(i) >= uint(t.mat.N) { + panic(ErrRowAccess) + } + if uint(j) >= uint(t.mat.N) { + panic(ErrColAccess) + } + isUpper := t.isUpper() + if (isUpper && i > j) || (!isUpper && i < j) { + panic(ErrTriangleSet) + } + kl, ku := t.mat.K, 0 + if isUpper { + kl, ku = 0, t.mat.K + } + pj := j + kl - i + if pj < 0 || kl+ku+1 <= pj { + panic(ErrBandSet) + } + // TODO(btracey): Support Diag field, see #692. + t.mat.Data[i*t.mat.Stride+pj] = v +} + +// At returns the element at row i, column j. +func (d *DiagDense) At(i, j int) float64 { + return d.at(i, j) +} + +func (d *DiagDense) at(i, j int) float64 { + if uint(i) >= uint(d.mat.N) { + panic(ErrRowAccess) + } + if uint(j) >= uint(d.mat.N) { + panic(ErrColAccess) + } + if i != j { + return 0 + } + return d.mat.Data[i*d.mat.Inc] +} + +// SetDiag sets the element at row i, column i to the value v. +// It panics if the location is outside the appropriate region of the matrix. +func (d *DiagDense) SetDiag(i int, v float64) { + d.setDiag(i, v) +} + +func (d *DiagDense) setDiag(i int, v float64) { + if uint(i) >= uint(d.mat.N) { + panic(ErrRowAccess) + } + d.mat.Data[i*d.mat.Inc] = v +} diff --git a/vendor/gonum.org/v1/gonum/mat/index_no_bound_checks.go b/vendor/gonum.org/v1/gonum/mat/index_no_bound_checks.go new file mode 100644 index 0000000..051f843 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/mat/index_no_bound_checks.go @@ -0,0 +1,359 @@ +// Copyright ©2014 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// This file must be kept in sync with index_bound_checks.go. + +// +build !bounds + +package mat + +// At returns the element at row i, column j. +func (m *Dense) At(i, j int) float64 { + if uint(i) >= uint(m.mat.Rows) { + panic(ErrRowAccess) + } + if uint(j) >= uint(m.mat.Cols) { + panic(ErrColAccess) + } + return m.at(i, j) +} + +func (m *Dense) at(i, j int) float64 { + return m.mat.Data[i*m.mat.Stride+j] +} + +// Set sets the element at row i, column j to the value v. +func (m *Dense) Set(i, j int, v float64) { + if uint(i) >= uint(m.mat.Rows) { + panic(ErrRowAccess) + } + if uint(j) >= uint(m.mat.Cols) { + panic(ErrColAccess) + } + m.set(i, j, v) +} + +func (m *Dense) set(i, j int, v float64) { + m.mat.Data[i*m.mat.Stride+j] = v +} + +// At returns the element at row i, column j. +func (m *CDense) At(i, j int) complex128 { + if uint(i) >= uint(m.mat.Rows) { + panic(ErrRowAccess) + } + if uint(j) >= uint(m.mat.Cols) { + panic(ErrColAccess) + } + return m.at(i, j) +} + +func (m *CDense) at(i, j int) complex128 { + return m.mat.Data[i*m.mat.Stride+j] +} + +// Set sets the element at row i, column j to the value v. +func (m *CDense) Set(i, j int, v complex128) { + if uint(i) >= uint(m.mat.Rows) { + panic(ErrRowAccess) + } + if uint(j) >= uint(m.mat.Cols) { + panic(ErrColAccess) + } + m.set(i, j, v) +} + +func (m *CDense) set(i, j int, v complex128) { + m.mat.Data[i*m.mat.Stride+j] = v +} + +// At returns the element at row i. +// It panics if i is out of bounds or if j is not zero. +func (v *VecDense) At(i, j int) float64 { + if uint(i) >= uint(v.mat.N) { + panic(ErrRowAccess) + } + if j != 0 { + panic(ErrColAccess) + } + return v.at(i) +} + +// AtVec returns the element at row i. +// It panics if i is out of bounds. +func (v *VecDense) AtVec(i int) float64 { + if uint(i) >= uint(v.mat.N) { + panic(ErrRowAccess) + } + return v.at(i) +} + +func (v *VecDense) at(i int) float64 { + return v.mat.Data[i*v.mat.Inc] +} + +// SetVec sets the element at row i to the value val. +// It panics if i is out of bounds. +func (v *VecDense) SetVec(i int, val float64) { + if uint(i) >= uint(v.mat.N) { + panic(ErrVectorAccess) + } + v.setVec(i, val) +} + +func (v *VecDense) setVec(i int, val float64) { + v.mat.Data[i*v.mat.Inc] = val +} + +// At returns the element at row i and column j. +func (s *SymDense) At(i, j int) float64 { + if uint(i) >= uint(s.mat.N) { + panic(ErrRowAccess) + } + if uint(j) >= uint(s.mat.N) { + panic(ErrColAccess) + } + return s.at(i, j) +} + +func (s *SymDense) at(i, j int) float64 { + if i > j { + i, j = j, i + } + return s.mat.Data[i*s.mat.Stride+j] +} + +// SetSym sets the elements at (i,j) and (j,i) to the value v. +func (s *SymDense) SetSym(i, j int, v float64) { + if uint(i) >= uint(s.mat.N) { + panic(ErrRowAccess) + } + if uint(j) >= uint(s.mat.N) { + panic(ErrColAccess) + } + s.set(i, j, v) +} + +func (s *SymDense) set(i, j int, v float64) { + if i > j { + i, j = j, i + } + s.mat.Data[i*s.mat.Stride+j] = v +} + +// At returns the element at row i, column j. +func (t *TriDense) At(i, j int) float64 { + if uint(i) >= uint(t.mat.N) { + panic(ErrRowAccess) + } + if uint(j) >= uint(t.mat.N) { + panic(ErrColAccess) + } + return t.at(i, j) +} + +func (t *TriDense) at(i, j int) float64 { + isUpper := t.triKind() + if (isUpper && i > j) || (!isUpper && i < j) { + return 0 + } + return t.mat.Data[i*t.mat.Stride+j] +} + +// SetTri sets the element at row i, column j to the value v. +// It panics if the location is outside the appropriate half of the matrix. +func (t *TriDense) SetTri(i, j int, v float64) { + if uint(i) >= uint(t.mat.N) { + panic(ErrRowAccess) + } + if uint(j) >= uint(t.mat.N) { + panic(ErrColAccess) + } + isUpper := t.isUpper() + if (isUpper && i > j) || (!isUpper && i < j) { + panic(ErrTriangleSet) + } + t.set(i, j, v) +} + +func (t *TriDense) set(i, j int, v float64) { + t.mat.Data[i*t.mat.Stride+j] = v +} + +// At returns the element at row i, column j. +func (b *BandDense) At(i, j int) float64 { + if uint(i) >= uint(b.mat.Rows) { + panic(ErrRowAccess) + } + if uint(j) >= uint(b.mat.Cols) { + panic(ErrColAccess) + } + return b.at(i, j) +} + +func (b *BandDense) at(i, j int) float64 { + pj := j + b.mat.KL - i + if pj < 0 || b.mat.KL+b.mat.KU+1 <= pj { + return 0 + } + return b.mat.Data[i*b.mat.Stride+pj] +} + +// SetBand sets the element at row i, column j to the value v. +// It panics if the location is outside the appropriate region of the matrix. +func (b *BandDense) SetBand(i, j int, v float64) { + if uint(i) >= uint(b.mat.Rows) { + panic(ErrRowAccess) + } + if uint(j) >= uint(b.mat.Cols) { + panic(ErrColAccess) + } + pj := j + b.mat.KL - i + if pj < 0 || b.mat.KL+b.mat.KU+1 <= pj { + panic(ErrBandSet) + } + b.set(i, j, v) +} + +func (b *BandDense) set(i, j int, v float64) { + pj := j + b.mat.KL - i + b.mat.Data[i*b.mat.Stride+pj] = v +} + +// At returns the element at row i, column j. +func (s *SymBandDense) At(i, j int) float64 { + if uint(i) >= uint(s.mat.N) { + panic(ErrRowAccess) + } + if uint(j) >= uint(s.mat.N) { + panic(ErrColAccess) + } + return s.at(i, j) +} + +func (s *SymBandDense) at(i, j int) float64 { + if i > j { + i, j = j, i + } + pj := j - i + if s.mat.K+1 <= pj { + return 0 + } + return s.mat.Data[i*s.mat.Stride+pj] +} + +// SetSymBand sets the element at row i, column j to the value v. +// It panics if the location is outside the appropriate region of the matrix. +func (s *SymBandDense) SetSymBand(i, j int, v float64) { + if uint(i) >= uint(s.mat.N) { + panic(ErrRowAccess) + } + if uint(j) >= uint(s.mat.N) { + panic(ErrColAccess) + } + s.set(i, j, v) +} + +func (s *SymBandDense) set(i, j int, v float64) { + if i > j { + i, j = j, i + } + pj := j - i + if s.mat.K+1 <= pj { + panic(ErrBandSet) + } + s.mat.Data[i*s.mat.Stride+pj] = v +} + +func (t *TriBandDense) At(i, j int) float64 { + if uint(i) >= uint(t.mat.N) { + panic(ErrRowAccess) + } + if uint(j) >= uint(t.mat.N) { + panic(ErrColAccess) + } + return t.at(i, j) +} + +func (t *TriBandDense) at(i, j int) float64 { + // TODO(btracey): Support Diag field, see #692. + isUpper := t.isUpper() + if (isUpper && i > j) || (!isUpper && i < j) { + return 0 + } + kl := t.mat.K + ku := 0 + if isUpper { + ku = t.mat.K + kl = 0 + } + pj := j + kl - i + if pj < 0 || kl+ku+1 <= pj { + return 0 + } + return t.mat.Data[i*t.mat.Stride+pj] +} + +func (t *TriBandDense) SetTriBand(i, j int, v float64) { + if uint(i) >= uint(t.mat.N) { + panic(ErrRowAccess) + } + if uint(j) >= uint(t.mat.N) { + panic(ErrColAccess) + } + isUpper := t.isUpper() + if (isUpper && i > j) || (!isUpper && i < j) { + panic(ErrTriangleSet) + } + kl, ku := t.mat.K, 0 + if isUpper { + kl, ku = 0, t.mat.K + } + pj := j + kl - i + if pj < 0 || kl+ku+1 <= pj { + panic(ErrBandSet) + } + // TODO(btracey): Support Diag field, see #692. + t.mat.Data[i*t.mat.Stride+pj] = v +} + +func (t *TriBandDense) setTriBand(i, j int, v float64) { + var kl int + if !t.isUpper() { + kl = t.mat.K + } + pj := j + kl - i + t.mat.Data[i*t.mat.Stride+pj] = v +} + +// At returns the element at row i, column j. +func (d *DiagDense) At(i, j int) float64 { + if uint(i) >= uint(d.mat.N) { + panic(ErrRowAccess) + } + if uint(j) >= uint(d.mat.N) { + panic(ErrColAccess) + } + return d.at(i, j) +} + +func (d *DiagDense) at(i, j int) float64 { + if i != j { + return 0 + } + return d.mat.Data[i*d.mat.Inc] +} + +// SetDiag sets the element at row i, column i to the value v. +// It panics if the location is outside the appropriate region of the matrix. +func (d *DiagDense) SetDiag(i int, v float64) { + if uint(i) >= uint(d.mat.N) { + panic(ErrRowAccess) + } + d.setDiag(i, v) +} + +func (d *DiagDense) setDiag(i int, v float64) { + d.mat.Data[i*d.mat.Inc] = v +} diff --git a/vendor/gonum.org/v1/gonum/mat/inner.go b/vendor/gonum.org/v1/gonum/mat/inner.go new file mode 100644 index 0000000..fba3e0b --- /dev/null +++ b/vendor/gonum.org/v1/gonum/mat/inner.go @@ -0,0 +1,121 @@ +// Copyright ©2014 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package mat + +import ( + "gonum.org/v1/gonum/blas" + "gonum.org/v1/gonum/blas/blas64" + "gonum.org/v1/gonum/internal/asm/f64" +) + +// Inner computes the generalized inner product +// x^T A y +// between column vectors x and y with matrix A. This is only a true inner product if +// A is symmetric positive definite, though the operation works for any matrix A. +// +// Inner panics if x.Len != m or y.Len != n when A is an m x n matrix. +func Inner(x Vector, a Matrix, y Vector) float64 { + m, n := a.Dims() + if x.Len() != m { + panic(ErrShape) + } + if y.Len() != n { + panic(ErrShape) + } + if m == 0 || n == 0 { + return 0 + } + + var sum float64 + + switch a := a.(type) { + case RawSymmetricer: + amat := a.RawSymmetric() + if amat.Uplo != blas.Upper { + // Panic as a string not a mat.Error. + panic(badSymTriangle) + } + var xmat, ymat blas64.Vector + if xrv, ok := x.(RawVectorer); ok { + xmat = xrv.RawVector() + } else { + break + } + if yrv, ok := y.(RawVectorer); ok { + ymat = yrv.RawVector() + } else { + break + } + for i := 0; i < x.Len(); i++ { + xi := x.AtVec(i) + if xi != 0 { + if ymat.Inc == 1 { + sum += xi * f64.DotUnitary( + amat.Data[i*amat.Stride+i:i*amat.Stride+n], + ymat.Data[i:], + ) + } else { + sum += xi * f64.DotInc( + amat.Data[i*amat.Stride+i:i*amat.Stride+n], + ymat.Data[i*ymat.Inc:], uintptr(n-i), + 1, uintptr(ymat.Inc), + 0, 0, + ) + } + } + yi := y.AtVec(i) + if i != n-1 && yi != 0 { + if xmat.Inc == 1 { + sum += yi * f64.DotUnitary( + amat.Data[i*amat.Stride+i+1:i*amat.Stride+n], + xmat.Data[i+1:], + ) + } else { + sum += yi * f64.DotInc( + amat.Data[i*amat.Stride+i+1:i*amat.Stride+n], + xmat.Data[(i+1)*xmat.Inc:], uintptr(n-i-1), + 1, uintptr(xmat.Inc), + 0, 0, + ) + } + } + } + return sum + case RawMatrixer: + amat := a.RawMatrix() + var ymat blas64.Vector + if yrv, ok := y.(RawVectorer); ok { + ymat = yrv.RawVector() + } else { + break + } + for i := 0; i < x.Len(); i++ { + xi := x.AtVec(i) + if xi != 0 { + if ymat.Inc == 1 { + sum += xi * f64.DotUnitary( + amat.Data[i*amat.Stride:i*amat.Stride+n], + ymat.Data, + ) + } else { + sum += xi * f64.DotInc( + amat.Data[i*amat.Stride:i*amat.Stride+n], + ymat.Data, uintptr(n), + 1, uintptr(ymat.Inc), + 0, 0, + ) + } + } + } + return sum + } + for i := 0; i < x.Len(); i++ { + xi := x.AtVec(i) + for j := 0; j < y.Len(); j++ { + sum += xi * a.At(i, j) * y.AtVec(j) + } + } + return sum +} diff --git a/vendor/gonum.org/v1/gonum/mat/io.go b/vendor/gonum.org/v1/gonum/mat/io.go new file mode 100644 index 0000000..7f7ef07 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/mat/io.go @@ -0,0 +1,492 @@ +// Copyright ©2015 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package mat + +import ( + "bytes" + "encoding/binary" + "errors" + "fmt" + "io" + "math" +) + +// version is the current on-disk codec version. +const version uint32 = 0x1 + +// maxLen is the biggest slice/array len one can create on a 32/64b platform. +const maxLen = int64(int(^uint(0) >> 1)) + +var ( + headerSize = binary.Size(storage{}) + sizeInt64 = binary.Size(int64(0)) + sizeFloat64 = binary.Size(float64(0)) + + errWrongType = errors.New("mat: wrong data type") + + errTooBig = errors.New("mat: resulting data slice too big") + errTooSmall = errors.New("mat: input slice too small") + errBadBuffer = errors.New("mat: data buffer size mismatch") + errBadSize = errors.New("mat: invalid dimension") +) + +// Type encoding scheme: +// +// Type Form Packing Uplo Unit Rows Columns kU kL +// uint8 [GST] uint8 [BPF] uint8 [AUL] bool int64 int64 int64 int64 +// General 'G' 'F' 'A' false r c 0 0 +// Band 'G' 'B' 'A' false r c kU kL +// Symmetric 'S' 'F' ul false n n 0 0 +// SymmetricBand 'S' 'B' ul false n n k k +// SymmetricPacked 'S' 'P' ul false n n 0 0 +// Triangular 'T' 'F' ul Diag==Unit n n 0 0 +// TriangularBand 'T' 'B' ul Diag==Unit n n k k +// TriangularPacked 'T' 'P' ul Diag==Unit n n 0 0 +// +// G - general, S - symmetric, T - triangular +// F - full, B - band, P - packed +// A - all, U - upper, L - lower + +// MarshalBinary encodes the receiver into a binary form and returns the result. +// +// Dense is little-endian encoded as follows: +// 0 - 3 Version = 1 (uint32) +// 4 'G' (byte) +// 5 'F' (byte) +// 6 'A' (byte) +// 7 0 (byte) +// 8 - 15 number of rows (int64) +// 16 - 23 number of columns (int64) +// 24 - 31 0 (int64) +// 32 - 39 0 (int64) +// 40 - .. matrix data elements (float64) +// [0,0] [0,1] ... [0,ncols-1] +// [1,0] [1,1] ... [1,ncols-1] +// ... +// [nrows-1,0] ... [nrows-1,ncols-1] +func (m Dense) MarshalBinary() ([]byte, error) { + bufLen := int64(headerSize) + int64(m.mat.Rows)*int64(m.mat.Cols)*int64(sizeFloat64) + if bufLen <= 0 { + // bufLen is too big and has wrapped around. + return nil, errTooBig + } + + header := storage{ + Form: 'G', Packing: 'F', Uplo: 'A', + Rows: int64(m.mat.Rows), Cols: int64(m.mat.Cols), + Version: version, + } + buf := make([]byte, bufLen) + n, err := header.marshalBinaryTo(bytes.NewBuffer(buf[:0])) + if err != nil { + return buf[:n], err + } + + p := headerSize + r, c := m.Dims() + for i := 0; i < r; i++ { + for j := 0; j < c; j++ { + binary.LittleEndian.PutUint64(buf[p:p+sizeFloat64], math.Float64bits(m.at(i, j))) + p += sizeFloat64 + } + } + + return buf, nil +} + +// MarshalBinaryTo encodes the receiver into a binary form and writes it into w. +// MarshalBinaryTo returns the number of bytes written into w and an error, if any. +// +// See MarshalBinary for the on-disk layout. +func (m Dense) MarshalBinaryTo(w io.Writer) (int, error) { + header := storage{ + Form: 'G', Packing: 'F', Uplo: 'A', + Rows: int64(m.mat.Rows), Cols: int64(m.mat.Cols), + Version: version, + } + n, err := header.marshalBinaryTo(w) + if err != nil { + return n, err + } + + r, c := m.Dims() + var b [8]byte + for i := 0; i < r; i++ { + for j := 0; j < c; j++ { + binary.LittleEndian.PutUint64(b[:], math.Float64bits(m.at(i, j))) + nn, err := w.Write(b[:]) + n += nn + if err != nil { + return n, err + } + } + } + + return n, nil +} + +// UnmarshalBinary decodes the binary form into the receiver. +// It panics if the receiver is a non-zero Dense matrix. +// +// See MarshalBinary for the on-disk layout. +// +// Limited checks on the validity of the binary input are performed: +// - matrix.ErrShape is returned if the number of rows or columns is negative, +// - an error is returned if the resulting Dense matrix is too +// big for the current architecture (e.g. a 16GB matrix written by a +// 64b application and read back from a 32b application.) +// UnmarshalBinary does not limit the size of the unmarshaled matrix, and so +// it should not be used on untrusted data. +func (m *Dense) UnmarshalBinary(data []byte) error { + if !m.IsZero() { + panic("mat: unmarshal into non-zero matrix") + } + + if len(data) < headerSize { + return errTooSmall + } + + var header storage + err := header.unmarshalBinary(data[:headerSize]) + if err != nil { + return err + } + rows := header.Rows + cols := header.Cols + header.Version = 0 + header.Rows = 0 + header.Cols = 0 + if (header != storage{Form: 'G', Packing: 'F', Uplo: 'A'}) { + return errWrongType + } + if rows < 0 || cols < 0 { + return errBadSize + } + size := rows * cols + if size == 0 { + return ErrZeroLength + } + if int(size) < 0 || size > maxLen { + return errTooBig + } + if len(data) != headerSize+int(rows*cols)*sizeFloat64 { + return errBadBuffer + } + + p := headerSize + m.reuseAs(int(rows), int(cols)) + for i := range m.mat.Data { + m.mat.Data[i] = math.Float64frombits(binary.LittleEndian.Uint64(data[p : p+sizeFloat64])) + p += sizeFloat64 + } + + return nil +} + +// UnmarshalBinaryFrom decodes the binary form into the receiver and returns +// the number of bytes read and an error if any. +// It panics if the receiver is a non-zero Dense matrix. +// +// See MarshalBinary for the on-disk layout. +// +// Limited checks on the validity of the binary input are performed: +// - matrix.ErrShape is returned if the number of rows or columns is negative, +// - an error is returned if the resulting Dense matrix is too +// big for the current architecture (e.g. a 16GB matrix written by a +// 64b application and read back from a 32b application.) +// UnmarshalBinary does not limit the size of the unmarshaled matrix, and so +// it should not be used on untrusted data. +func (m *Dense) UnmarshalBinaryFrom(r io.Reader) (int, error) { + if !m.IsZero() { + panic("mat: unmarshal into non-zero matrix") + } + + var header storage + n, err := header.unmarshalBinaryFrom(r) + if err != nil { + return n, err + } + rows := header.Rows + cols := header.Cols + header.Version = 0 + header.Rows = 0 + header.Cols = 0 + if (header != storage{Form: 'G', Packing: 'F', Uplo: 'A'}) { + return n, errWrongType + } + if rows < 0 || cols < 0 { + return n, errBadSize + } + size := rows * cols + if size == 0 { + return n, ErrZeroLength + } + if int(size) < 0 || size > maxLen { + return n, errTooBig + } + + m.reuseAs(int(rows), int(cols)) + var b [8]byte + for i := range m.mat.Data { + nn, err := readFull(r, b[:]) + n += nn + if err != nil { + if err == io.EOF { + return n, io.ErrUnexpectedEOF + } + return n, err + } + m.mat.Data[i] = math.Float64frombits(binary.LittleEndian.Uint64(b[:])) + } + + return n, nil +} + +// MarshalBinary encodes the receiver into a binary form and returns the result. +// +// VecDense is little-endian encoded as follows: +// +// 0 - 3 Version = 1 (uint32) +// 4 'G' (byte) +// 5 'F' (byte) +// 6 'A' (byte) +// 7 0 (byte) +// 8 - 15 number of elements (int64) +// 16 - 23 1 (int64) +// 24 - 31 0 (int64) +// 32 - 39 0 (int64) +// 40 - .. vector's data elements (float64) +func (v VecDense) MarshalBinary() ([]byte, error) { + bufLen := int64(headerSize) + int64(v.mat.N)*int64(sizeFloat64) + if bufLen <= 0 { + // bufLen is too big and has wrapped around. + return nil, errTooBig + } + + header := storage{ + Form: 'G', Packing: 'F', Uplo: 'A', + Rows: int64(v.mat.N), Cols: 1, + Version: version, + } + buf := make([]byte, bufLen) + n, err := header.marshalBinaryTo(bytes.NewBuffer(buf[:0])) + if err != nil { + return buf[:n], err + } + + p := headerSize + for i := 0; i < v.mat.N; i++ { + binary.LittleEndian.PutUint64(buf[p:p+sizeFloat64], math.Float64bits(v.at(i))) + p += sizeFloat64 + } + + return buf, nil +} + +// MarshalBinaryTo encodes the receiver into a binary form, writes it to w and +// returns the number of bytes written and an error if any. +// +// See MarshalBainry for the on-disk format. +func (v VecDense) MarshalBinaryTo(w io.Writer) (int, error) { + header := storage{ + Form: 'G', Packing: 'F', Uplo: 'A', + Rows: int64(v.mat.N), Cols: 1, + Version: version, + } + n, err := header.marshalBinaryTo(w) + if err != nil { + return n, err + } + + var buf [8]byte + for i := 0; i < v.mat.N; i++ { + binary.LittleEndian.PutUint64(buf[:], math.Float64bits(v.at(i))) + nn, err := w.Write(buf[:]) + n += nn + if err != nil { + return n, err + } + } + + return n, nil +} + +// UnmarshalBinary decodes the binary form into the receiver. +// It panics if the receiver is a non-zero VecDense. +// +// See MarshalBinary for the on-disk layout. +// +// Limited checks on the validity of the binary input are performed: +// - matrix.ErrShape is returned if the number of rows is negative, +// - an error is returned if the resulting VecDense is too +// big for the current architecture (e.g. a 16GB vector written by a +// 64b application and read back from a 32b application.) +// UnmarshalBinary does not limit the size of the unmarshaled vector, and so +// it should not be used on untrusted data. +func (v *VecDense) UnmarshalBinary(data []byte) error { + if !v.IsZero() { + panic("mat: unmarshal into non-zero vector") + } + + if len(data) < headerSize { + return errTooSmall + } + + var header storage + err := header.unmarshalBinary(data[:headerSize]) + if err != nil { + return err + } + if header.Cols != 1 { + return ErrShape + } + n := header.Rows + header.Version = 0 + header.Rows = 0 + header.Cols = 0 + if (header != storage{Form: 'G', Packing: 'F', Uplo: 'A'}) { + return errWrongType + } + if n == 0 { + return ErrZeroLength + } + if n < 0 { + return errBadSize + } + if int64(maxLen) < n { + return errTooBig + } + if len(data) != headerSize+int(n)*sizeFloat64 { + return errBadBuffer + } + + p := headerSize + v.reuseAs(int(n)) + for i := range v.mat.Data { + v.mat.Data[i] = math.Float64frombits(binary.LittleEndian.Uint64(data[p : p+sizeFloat64])) + p += sizeFloat64 + } + + return nil +} + +// UnmarshalBinaryFrom decodes the binary form into the receiver, from the +// io.Reader and returns the number of bytes read and an error if any. +// It panics if the receiver is a non-zero VecDense. +// +// See MarshalBinary for the on-disk layout. +// See UnmarshalBinary for the list of sanity checks performed on the input. +func (v *VecDense) UnmarshalBinaryFrom(r io.Reader) (int, error) { + if !v.IsZero() { + panic("mat: unmarshal into non-zero vector") + } + + var header storage + n, err := header.unmarshalBinaryFrom(r) + if err != nil { + return n, err + } + if header.Cols != 1 { + return n, ErrShape + } + l := header.Rows + header.Version = 0 + header.Rows = 0 + header.Cols = 0 + if (header != storage{Form: 'G', Packing: 'F', Uplo: 'A'}) { + return n, errWrongType + } + if l == 0 { + return n, ErrZeroLength + } + if l < 0 { + return n, errBadSize + } + if int64(maxLen) < l { + return n, errTooBig + } + + v.reuseAs(int(l)) + var b [8]byte + for i := range v.mat.Data { + nn, err := readFull(r, b[:]) + n += nn + if err != nil { + if err == io.EOF { + return n, io.ErrUnexpectedEOF + } + return n, err + } + v.mat.Data[i] = math.Float64frombits(binary.LittleEndian.Uint64(b[:])) + } + + return n, nil +} + +// storage is the internal representation of the storage format of a +// serialised matrix. +type storage struct { + Version uint32 // Keep this first. + Form byte // [GST] + Packing byte // [BPF] + Uplo byte // [AUL] + Unit bool + Rows int64 + Cols int64 + KU int64 + KL int64 +} + +// TODO(kortschak): Consider replacing these with calls to direct +// encoding/decoding of fields rather than to binary.Write/binary.Read. + +func (s storage) marshalBinaryTo(w io.Writer) (int, error) { + buf := bytes.NewBuffer(make([]byte, 0, headerSize)) + err := binary.Write(buf, binary.LittleEndian, s) + if err != nil { + return 0, err + } + return w.Write(buf.Bytes()) +} + +func (s *storage) unmarshalBinary(buf []byte) error { + err := binary.Read(bytes.NewReader(buf), binary.LittleEndian, s) + if err != nil { + return err + } + if s.Version != version { + return fmt.Errorf("mat: incorrect version: %d", s.Version) + } + return nil +} + +func (s *storage) unmarshalBinaryFrom(r io.Reader) (int, error) { + buf := make([]byte, headerSize) + n, err := readFull(r, buf) + if err != nil { + return n, err + } + return n, s.unmarshalBinary(buf[:n]) +} + +// readFull reads from r into buf until it has read len(buf). +// It returns the number of bytes copied and an error if fewer bytes were read. +// If an EOF happens after reading fewer than len(buf) bytes, io.ErrUnexpectedEOF is returned. +func readFull(r io.Reader, buf []byte) (int, error) { + var n int + var err error + for n < len(buf) && err == nil { + var nn int + nn, err = r.Read(buf[n:]) + n += nn + } + if n == len(buf) { + return n, nil + } + if err == io.EOF { + return n, io.ErrUnexpectedEOF + } + return n, err +} diff --git a/vendor/gonum.org/v1/gonum/mat/lq.go b/vendor/gonum.org/v1/gonum/mat/lq.go new file mode 100644 index 0000000..d788457 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/mat/lq.go @@ -0,0 +1,262 @@ +// Copyright ©2013 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package mat + +import ( + "math" + + "gonum.org/v1/gonum/blas" + "gonum.org/v1/gonum/blas/blas64" + "gonum.org/v1/gonum/lapack" + "gonum.org/v1/gonum/lapack/lapack64" +) + +const badLQ = "mat: invalid LQ factorization" + +// LQ is a type for creating and using the LQ factorization of a matrix. +type LQ struct { + lq *Dense + tau []float64 + cond float64 +} + +func (lq *LQ) updateCond(norm lapack.MatrixNorm) { + // Since A = L*Q, and Q is orthogonal, we get for the condition number κ + // κ(A) := |A| |A^-1| = |L*Q| |(L*Q)^-1| = |L| |Q^T * L^-1| + // = |L| |L^-1| = κ(L), + // where we used that fact that Q^-1 = Q^T. However, this assumes that + // the matrix norm is invariant under orthogonal transformations which + // is not the case for CondNorm. Hopefully the error is negligible: κ + // is only a qualitative measure anyway. + m := lq.lq.mat.Rows + work := getFloats(3*m, false) + iwork := getInts(m, false) + l := lq.lq.asTriDense(m, blas.NonUnit, blas.Lower) + v := lapack64.Trcon(norm, l.mat, work, iwork) + lq.cond = 1 / v + putFloats(work) + putInts(iwork) +} + +// Factorize computes the LQ factorization of an m×n matrix a where n <= m. The LQ +// factorization always exists even if A is singular. +// +// The LQ decomposition is a factorization of the matrix A such that A = L * Q. +// The matrix Q is an orthonormal n×n matrix, and L is an m×n upper triangular matrix. +// L and Q can be extracted from the LTo and QTo methods. +func (lq *LQ) Factorize(a Matrix) { + lq.factorize(a, CondNorm) +} + +func (lq *LQ) factorize(a Matrix, norm lapack.MatrixNorm) { + m, n := a.Dims() + if m > n { + panic(ErrShape) + } + k := min(m, n) + if lq.lq == nil { + lq.lq = &Dense{} + } + lq.lq.Clone(a) + work := []float64{0} + lq.tau = make([]float64, k) + lapack64.Gelqf(lq.lq.mat, lq.tau, work, -1) + work = getFloats(int(work[0]), false) + lapack64.Gelqf(lq.lq.mat, lq.tau, work, len(work)) + putFloats(work) + lq.updateCond(norm) +} + +// isValid returns whether the receiver contains a factorization. +func (lq *LQ) isValid() bool { + return lq.lq != nil && !lq.lq.IsZero() +} + +// Cond returns the condition number for the factorized matrix. +// Cond will panic if the receiver does not contain a factorization. +func (lq *LQ) Cond() float64 { + if !lq.isValid() { + panic(badLQ) + } + return lq.cond +} + +// TODO(btracey): Add in the "Reduced" forms for extracting the m×m orthogonal +// and upper triangular matrices. + +// LTo extracts the m×n lower trapezoidal matrix from a LQ decomposition. +// If dst is nil, a new matrix is allocated. The resulting L matrix is returned. +// LTo will panic if the receiver does not contain a factorization. +func (lq *LQ) LTo(dst *Dense) *Dense { + if !lq.isValid() { + panic(badLQ) + } + + r, c := lq.lq.Dims() + if dst == nil { + dst = NewDense(r, c, nil) + } else { + dst.reuseAs(r, c) + } + + // Disguise the LQ as a lower triangular. + t := &TriDense{ + mat: blas64.Triangular{ + N: r, + Stride: lq.lq.mat.Stride, + Data: lq.lq.mat.Data, + Uplo: blas.Lower, + Diag: blas.NonUnit, + }, + cap: lq.lq.capCols, + } + dst.Copy(t) + + if r == c { + return dst + } + // Zero right of the triangular. + for i := 0; i < r; i++ { + zero(dst.mat.Data[i*dst.mat.Stride+r : i*dst.mat.Stride+c]) + } + + return dst +} + +// QTo extracts the n×n orthonormal matrix Q from an LQ decomposition. +// If dst is nil, a new matrix is allocated. The resulting Q matrix is returned. +// QTo will panic if the receiver does not contain a factorization. +func (lq *LQ) QTo(dst *Dense) *Dense { + if !lq.isValid() { + panic(badLQ) + } + + _, c := lq.lq.Dims() + if dst == nil { + dst = NewDense(c, c, nil) + } else { + dst.reuseAsZeroed(c, c) + } + q := dst.mat + + // Set Q = I. + ldq := q.Stride + for i := 0; i < c; i++ { + q.Data[i*ldq+i] = 1 + } + + // Construct Q from the elementary reflectors. + work := []float64{0} + lapack64.Ormlq(blas.Left, blas.NoTrans, lq.lq.mat, lq.tau, q, work, -1) + work = getFloats(int(work[0]), false) + lapack64.Ormlq(blas.Left, blas.NoTrans, lq.lq.mat, lq.tau, q, work, len(work)) + putFloats(work) + + return dst +} + +// SolveTo finds a minimum-norm solution to a system of linear equations defined +// by the matrices A and b, where A is an m×n matrix represented in its LQ factorized +// form. If A is singular or near-singular a Condition error is returned. +// See the documentation for Condition for more information. +// +// The minimization problem solved depends on the input parameters. +// If trans == false, find the minimum norm solution of A * X = B. +// If trans == true, find X such that ||A*X - B||_2 is minimized. +// The solution matrix, X, is stored in place into dst. +// SolveTo will panic if the receiver does not contain a factorization. +func (lq *LQ) SolveTo(dst *Dense, trans bool, b Matrix) error { + if !lq.isValid() { + panic(badLQ) + } + + r, c := lq.lq.Dims() + br, bc := b.Dims() + + // The LQ solve algorithm stores the result in-place into the right hand side. + // The storage for the answer must be large enough to hold both b and x. + // However, this method's receiver must be the size of x. Copy b, and then + // copy the result into x at the end. + if trans { + if c != br { + panic(ErrShape) + } + dst.reuseAs(r, bc) + } else { + if r != br { + panic(ErrShape) + } + dst.reuseAs(c, bc) + } + // Do not need to worry about overlap between x and b because w has its own + // independent storage. + w := getWorkspace(max(r, c), bc, false) + w.Copy(b) + t := lq.lq.asTriDense(lq.lq.mat.Rows, blas.NonUnit, blas.Lower).mat + if trans { + work := []float64{0} + lapack64.Ormlq(blas.Left, blas.NoTrans, lq.lq.mat, lq.tau, w.mat, work, -1) + work = getFloats(int(work[0]), false) + lapack64.Ormlq(blas.Left, blas.NoTrans, lq.lq.mat, lq.tau, w.mat, work, len(work)) + putFloats(work) + + ok := lapack64.Trtrs(blas.Trans, t, w.mat) + if !ok { + return Condition(math.Inf(1)) + } + } else { + ok := lapack64.Trtrs(blas.NoTrans, t, w.mat) + if !ok { + return Condition(math.Inf(1)) + } + for i := r; i < c; i++ { + zero(w.mat.Data[i*w.mat.Stride : i*w.mat.Stride+bc]) + } + work := []float64{0} + lapack64.Ormlq(blas.Left, blas.Trans, lq.lq.mat, lq.tau, w.mat, work, -1) + work = getFloats(int(work[0]), false) + lapack64.Ormlq(blas.Left, blas.Trans, lq.lq.mat, lq.tau, w.mat, work, len(work)) + putFloats(work) + } + // x was set above to be the correct size for the result. + dst.Copy(w) + putWorkspace(w) + if lq.cond > ConditionTolerance { + return Condition(lq.cond) + } + return nil +} + +// SolveVecTo finds a minimum-norm solution to a system of linear equations. +// See LQ.SolveTo for the full documentation. +// SolveToVec will panic if the receiver does not contain a factorization. +func (lq *LQ) SolveVecTo(dst *VecDense, trans bool, b Vector) error { + if !lq.isValid() { + panic(badLQ) + } + + r, c := lq.lq.Dims() + if _, bc := b.Dims(); bc != 1 { + panic(ErrShape) + } + + // The Solve implementation is non-trivial, so rather than duplicate the code, + // instead recast the VecDenses as Dense and call the matrix code. + bm := Matrix(b) + if rv, ok := b.(RawVectorer); ok { + bmat := rv.RawVector() + if dst != b { + dst.checkOverlap(bmat) + } + b := VecDense{mat: bmat} + bm = b.asDense() + } + if trans { + dst.reuseAs(r) + } else { + dst.reuseAs(c) + } + return lq.SolveTo(dst.asDense(), trans, bm) +} diff --git a/vendor/gonum.org/v1/gonum/mat/lu.go b/vendor/gonum.org/v1/gonum/mat/lu.go new file mode 100644 index 0000000..e043716 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/mat/lu.go @@ -0,0 +1,422 @@ +// Copyright ©2013 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package mat + +import ( + "math" + + "gonum.org/v1/gonum/blas" + "gonum.org/v1/gonum/blas/blas64" + "gonum.org/v1/gonum/floats" + "gonum.org/v1/gonum/lapack" + "gonum.org/v1/gonum/lapack/lapack64" +) + +const ( + badSliceLength = "mat: improper slice length" + badLU = "mat: invalid LU factorization" +) + +// LU is a type for creating and using the LU factorization of a matrix. +type LU struct { + lu *Dense + pivot []int + cond float64 +} + +// updateCond updates the stored condition number of the matrix. anorm is the +// norm of the original matrix. If anorm is negative it will be estimated. +func (lu *LU) updateCond(anorm float64, norm lapack.MatrixNorm) { + n := lu.lu.mat.Cols + work := getFloats(4*n, false) + defer putFloats(work) + iwork := getInts(n, false) + defer putInts(iwork) + if anorm < 0 { + // This is an approximation. By the definition of a norm, + // |AB| <= |A| |B|. + // Since A = L*U, we get for the condition number κ that + // κ(A) := |A| |A^-1| = |L*U| |A^-1| <= |L| |U| |A^-1|, + // so this will overestimate the condition number somewhat. + // The norm of the original factorized matrix cannot be stored + // because of update possibilities. + u := lu.lu.asTriDense(n, blas.NonUnit, blas.Upper) + l := lu.lu.asTriDense(n, blas.Unit, blas.Lower) + unorm := lapack64.Lantr(norm, u.mat, work) + lnorm := lapack64.Lantr(norm, l.mat, work) + anorm = unorm * lnorm + } + v := lapack64.Gecon(norm, lu.lu.mat, anorm, work, iwork) + lu.cond = 1 / v +} + +// Factorize computes the LU factorization of the square matrix a and stores the +// result. The LU decomposition will complete regardless of the singularity of a. +// +// The LU factorization is computed with pivoting, and so really the decomposition +// is a PLU decomposition where P is a permutation matrix. The individual matrix +// factors can be extracted from the factorization using the Permutation method +// on Dense, and the LU LTo and UTo methods. +func (lu *LU) Factorize(a Matrix) { + lu.factorize(a, CondNorm) +} + +func (lu *LU) factorize(a Matrix, norm lapack.MatrixNorm) { + r, c := a.Dims() + if r != c { + panic(ErrSquare) + } + if lu.lu == nil { + lu.lu = NewDense(r, r, nil) + } else { + lu.lu.Reset() + lu.lu.reuseAs(r, r) + } + lu.lu.Copy(a) + if cap(lu.pivot) < r { + lu.pivot = make([]int, r) + } + lu.pivot = lu.pivot[:r] + work := getFloats(r, false) + anorm := lapack64.Lange(norm, lu.lu.mat, work) + putFloats(work) + lapack64.Getrf(lu.lu.mat, lu.pivot) + lu.updateCond(anorm, norm) +} + +// isValid returns whether the receiver contains a factorization. +func (lu *LU) isValid() bool { + return lu.lu != nil && !lu.lu.IsZero() +} + +// Cond returns the condition number for the factorized matrix. +// Cond will panic if the receiver does not contain a factorization. +func (lu *LU) Cond() float64 { + if !lu.isValid() { + panic(badLU) + } + return lu.cond +} + +// Reset resets the factorization so that it can be reused as the receiver of a +// dimensionally restricted operation. +func (lu *LU) Reset() { + if lu.lu != nil { + lu.lu.Reset() + } + lu.pivot = lu.pivot[:0] +} + +func (lu *LU) isZero() bool { + return len(lu.pivot) == 0 +} + +// Det returns the determinant of the matrix that has been factorized. In many +// expressions, using LogDet will be more numerically stable. +// Det will panic if the receiver does not contain a factorization. +func (lu *LU) Det() float64 { + det, sign := lu.LogDet() + return math.Exp(det) * sign +} + +// LogDet returns the log of the determinant and the sign of the determinant +// for the matrix that has been factorized. Numerical stability in product and +// division expressions is generally improved by working in log space. +// LogDet will panic if the receiver does not contain a factorization. +func (lu *LU) LogDet() (det float64, sign float64) { + if !lu.isValid() { + panic(badLU) + } + + _, n := lu.lu.Dims() + logDiag := getFloats(n, false) + defer putFloats(logDiag) + sign = 1.0 + for i := 0; i < n; i++ { + v := lu.lu.at(i, i) + if v < 0 { + sign *= -1 + } + if lu.pivot[i] != i { + sign *= -1 + } + logDiag[i] = math.Log(math.Abs(v)) + } + return floats.Sum(logDiag), sign +} + +// Pivot returns pivot indices that enable the construction of the permutation +// matrix P (see Dense.Permutation). If swaps == nil, then new memory will be +// allocated, otherwise the length of the input must be equal to the size of the +// factorized matrix. +// Pivot will panic if the receiver does not contain a factorization. +func (lu *LU) Pivot(swaps []int) []int { + if !lu.isValid() { + panic(badLU) + } + + _, n := lu.lu.Dims() + if swaps == nil { + swaps = make([]int, n) + } + if len(swaps) != n { + panic(badSliceLength) + } + // Perform the inverse of the row swaps in order to find the final + // row swap position. + for i := range swaps { + swaps[i] = i + } + for i := n - 1; i >= 0; i-- { + v := lu.pivot[i] + swaps[i], swaps[v] = swaps[v], swaps[i] + } + return swaps +} + +// RankOne updates an LU factorization as if a rank-one update had been applied to +// the original matrix A, storing the result into the receiver. That is, if in +// the original LU decomposition P * L * U = A, in the updated decomposition +// P * L * U = A + alpha * x * y^T. +// RankOne will panic if orig does not contain a factorization. +func (lu *LU) RankOne(orig *LU, alpha float64, x, y Vector) { + if !orig.isValid() { + panic(badLU) + } + + // RankOne uses algorithm a1 on page 28 of "Multiple-Rank Updates to Matrix + // Factorizations for Nonlinear Analysis and Circuit Design" by Linzhong Deng. + // http://web.stanford.edu/group/SOL/dissertations/Linzhong-Deng-thesis.pdf + _, n := orig.lu.Dims() + if r, c := x.Dims(); r != n || c != 1 { + panic(ErrShape) + } + if r, c := y.Dims(); r != n || c != 1 { + panic(ErrShape) + } + if orig != lu { + if lu.isZero() { + if cap(lu.pivot) < n { + lu.pivot = make([]int, n) + } + lu.pivot = lu.pivot[:n] + if lu.lu == nil { + lu.lu = NewDense(n, n, nil) + } else { + lu.lu.reuseAs(n, n) + } + } else if len(lu.pivot) != n { + panic(ErrShape) + } + copy(lu.pivot, orig.pivot) + lu.lu.Copy(orig.lu) + } + + xs := getFloats(n, false) + defer putFloats(xs) + ys := getFloats(n, false) + defer putFloats(ys) + for i := 0; i < n; i++ { + xs[i] = x.AtVec(i) + ys[i] = y.AtVec(i) + } + + // Adjust for the pivoting in the LU factorization + for i, v := range lu.pivot { + xs[i], xs[v] = xs[v], xs[i] + } + + lum := lu.lu.mat + omega := alpha + for j := 0; j < n; j++ { + ujj := lum.Data[j*lum.Stride+j] + ys[j] /= ujj + theta := 1 + xs[j]*ys[j]*omega + beta := omega * ys[j] / theta + gamma := omega * xs[j] + omega -= beta * gamma + lum.Data[j*lum.Stride+j] *= theta + for i := j + 1; i < n; i++ { + xs[i] -= lum.Data[i*lum.Stride+j] * xs[j] + tmp := ys[i] + ys[i] -= lum.Data[j*lum.Stride+i] * ys[j] + lum.Data[i*lum.Stride+j] += beta * xs[i] + lum.Data[j*lum.Stride+i] += gamma * tmp + } + } + lu.updateCond(-1, CondNorm) +} + +// LTo extracts the lower triangular matrix from an LU factorization. +// If dst is nil, a new matrix is allocated. The resulting L matrix is returned. +// LTo will panic if the receiver does not contain a factorization. +func (lu *LU) LTo(dst *TriDense) *TriDense { + if !lu.isValid() { + panic(badLU) + } + + _, n := lu.lu.Dims() + if dst == nil { + dst = NewTriDense(n, Lower, nil) + } else { + dst.reuseAs(n, Lower) + } + // Extract the lower triangular elements. + for i := 0; i < n; i++ { + for j := 0; j < i; j++ { + dst.mat.Data[i*dst.mat.Stride+j] = lu.lu.mat.Data[i*lu.lu.mat.Stride+j] + } + } + // Set ones on the diagonal. + for i := 0; i < n; i++ { + dst.mat.Data[i*dst.mat.Stride+i] = 1 + } + return dst +} + +// UTo extracts the upper triangular matrix from an LU factorization. +// If dst is nil, a new matrix is allocated. The resulting U matrix is returned. +// UTo will panic if the receiver does not contain a factorization. +func (lu *LU) UTo(dst *TriDense) *TriDense { + if !lu.isValid() { + panic(badLU) + } + + _, n := lu.lu.Dims() + if dst == nil { + dst = NewTriDense(n, Upper, nil) + } else { + dst.reuseAs(n, Upper) + } + // Extract the upper triangular elements. + for i := 0; i < n; i++ { + for j := i; j < n; j++ { + dst.mat.Data[i*dst.mat.Stride+j] = lu.lu.mat.Data[i*lu.lu.mat.Stride+j] + } + } + return dst +} + +// Permutation constructs an r×r permutation matrix with the given row swaps. +// A permutation matrix has exactly one element equal to one in each row and column +// and all other elements equal to zero. swaps[i] specifies the row with which +// i will be swapped, which is equivalent to the non-zero column of row i. +func (m *Dense) Permutation(r int, swaps []int) { + m.reuseAs(r, r) + for i := 0; i < r; i++ { + zero(m.mat.Data[i*m.mat.Stride : i*m.mat.Stride+r]) + v := swaps[i] + if v < 0 || v >= r { + panic(ErrRowAccess) + } + m.mat.Data[i*m.mat.Stride+v] = 1 + } +} + +// SolveTo solves a system of linear equations using the LU decomposition of a matrix. +// It computes +// A * X = B if trans == false +// A^T * X = B if trans == true +// In both cases, A is represented in LU factorized form, and the matrix X is +// stored into dst. +// +// If A is singular or near-singular a Condition error is returned. See +// the documentation for Condition for more information. +// SolveTo will panic if the receiver does not contain a factorization. +func (lu *LU) SolveTo(dst *Dense, trans bool, b Matrix) error { + if !lu.isValid() { + panic(badLU) + } + + _, n := lu.lu.Dims() + br, bc := b.Dims() + if br != n { + panic(ErrShape) + } + // TODO(btracey): Should test the condition number instead of testing that + // the determinant is exactly zero. + if lu.Det() == 0 { + return Condition(math.Inf(1)) + } + + dst.reuseAs(n, bc) + bU, _ := untranspose(b) + var restore func() + if dst == bU { + dst, restore = dst.isolatedWorkspace(bU) + defer restore() + } else if rm, ok := bU.(RawMatrixer); ok { + dst.checkOverlap(rm.RawMatrix()) + } + + dst.Copy(b) + t := blas.NoTrans + if trans { + t = blas.Trans + } + lapack64.Getrs(t, lu.lu.mat, dst.mat, lu.pivot) + if lu.cond > ConditionTolerance { + return Condition(lu.cond) + } + return nil +} + +// SolveVecTo solves a system of linear equations using the LU decomposition of a matrix. +// It computes +// A * x = b if trans == false +// A^T * x = b if trans == true +// In both cases, A is represented in LU factorized form, and the vector x is +// stored into dst. +// +// If A is singular or near-singular a Condition error is returned. See +// the documentation for Condition for more information. +// SolveVecTo will panic if the receiver does not contain a factorization. +func (lu *LU) SolveVecTo(dst *VecDense, trans bool, b Vector) error { + if !lu.isValid() { + panic(badLU) + } + + _, n := lu.lu.Dims() + if br, bc := b.Dims(); br != n || bc != 1 { + panic(ErrShape) + } + switch rv := b.(type) { + default: + dst.reuseAs(n) + return lu.SolveTo(dst.asDense(), trans, b) + case RawVectorer: + if dst != b { + dst.checkOverlap(rv.RawVector()) + } + // TODO(btracey): Should test the condition number instead of testing that + // the determinant is exactly zero. + if lu.Det() == 0 { + return Condition(math.Inf(1)) + } + + dst.reuseAs(n) + var restore func() + if dst == b { + dst, restore = dst.isolatedWorkspace(b) + defer restore() + } + dst.CopyVec(b) + vMat := blas64.General{ + Rows: n, + Cols: 1, + Stride: dst.mat.Inc, + Data: dst.mat.Data, + } + t := blas.NoTrans + if trans { + t = blas.Trans + } + lapack64.Getrs(t, lu.lu.mat, vMat, lu.pivot) + if lu.cond > ConditionTolerance { + return Condition(lu.cond) + } + return nil + } +} diff --git a/vendor/gonum.org/v1/gonum/mat/matrix.go b/vendor/gonum.org/v1/gonum/mat/matrix.go new file mode 100644 index 0000000..259739e --- /dev/null +++ b/vendor/gonum.org/v1/gonum/mat/matrix.go @@ -0,0 +1,985 @@ +// Copyright ©2013 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package mat + +import ( + "math" + + "gonum.org/v1/gonum/blas" + "gonum.org/v1/gonum/blas/blas64" + "gonum.org/v1/gonum/floats" + "gonum.org/v1/gonum/lapack" + "gonum.org/v1/gonum/lapack/lapack64" +) + +// Matrix is the basic matrix interface type. +type Matrix interface { + // Dims returns the dimensions of a Matrix. + Dims() (r, c int) + + // At returns the value of a matrix element at row i, column j. + // It will panic if i or j are out of bounds for the matrix. + At(i, j int) float64 + + // T returns the transpose of the Matrix. Whether T returns a copy of the + // underlying data is implementation dependent. + // This method may be implemented using the Transpose type, which + // provides an implicit matrix transpose. + T() Matrix +} + +var ( + _ Matrix = Transpose{} + _ Untransposer = Transpose{} +) + +// Transpose is a type for performing an implicit matrix transpose. It implements +// the Matrix interface, returning values from the transpose of the matrix within. +type Transpose struct { + Matrix Matrix +} + +// At returns the value of the element at row i and column j of the transposed +// matrix, that is, row j and column i of the Matrix field. +func (t Transpose) At(i, j int) float64 { + return t.Matrix.At(j, i) +} + +// Dims returns the dimensions of the transposed matrix. The number of rows returned +// is the number of columns in the Matrix field, and the number of columns is +// the number of rows in the Matrix field. +func (t Transpose) Dims() (r, c int) { + c, r = t.Matrix.Dims() + return r, c +} + +// T performs an implicit transpose by returning the Matrix field. +func (t Transpose) T() Matrix { + return t.Matrix +} + +// Untranspose returns the Matrix field. +func (t Transpose) Untranspose() Matrix { + return t.Matrix +} + +// Untransposer is a type that can undo an implicit transpose. +type Untransposer interface { + // Note: This interface is needed to unify all of the Transpose types. In + // the mat methods, we need to test if the Matrix has been implicitly + // transposed. If this is checked by testing for the specific Transpose type + // then the behavior will be different if the user uses T() or TTri() for a + // triangular matrix. + + // Untranspose returns the underlying Matrix stored for the implicit transpose. + Untranspose() Matrix +} + +// UntransposeBander is a type that can undo an implicit band transpose. +type UntransposeBander interface { + // Untranspose returns the underlying Banded stored for the implicit transpose. + UntransposeBand() Banded +} + +// UntransposeTrier is a type that can undo an implicit triangular transpose. +type UntransposeTrier interface { + // Untranspose returns the underlying Triangular stored for the implicit transpose. + UntransposeTri() Triangular +} + +// UntransposeTriBander is a type that can undo an implicit triangular banded +// transpose. +type UntransposeTriBander interface { + // Untranspose returns the underlying Triangular stored for the implicit transpose. + UntransposeTriBand() TriBanded +} + +// Mutable is a matrix interface type that allows elements to be altered. +type Mutable interface { + // Set alters the matrix element at row i, column j to v. + // It will panic if i or j are out of bounds for the matrix. + Set(i, j int, v float64) + + Matrix +} + +// A RowViewer can return a Vector reflecting a row that is backed by the matrix +// data. The Vector returned will have length equal to the number of columns. +type RowViewer interface { + RowView(i int) Vector +} + +// A RawRowViewer can return a slice of float64 reflecting a row that is backed by the matrix +// data. +type RawRowViewer interface { + RawRowView(i int) []float64 +} + +// A ColViewer can return a Vector reflecting a column that is backed by the matrix +// data. The Vector returned will have length equal to the number of rows. +type ColViewer interface { + ColView(j int) Vector +} + +// A RawColViewer can return a slice of float64 reflecting a column that is backed by the matrix +// data. +type RawColViewer interface { + RawColView(j int) []float64 +} + +// A Cloner can make a copy of a into the receiver, overwriting the previous value of the +// receiver. The clone operation does not make any restriction on shape and will not cause +// shadowing. +type Cloner interface { + Clone(a Matrix) +} + +// A Reseter can reset the matrix so that it can be reused as the receiver of a dimensionally +// restricted operation. This is commonly used when the matrix is being used as a workspace +// or temporary matrix. +// +// If the matrix is a view, using the reset matrix may result in data corruption in elements +// outside the view. +type Reseter interface { + Reset() +} + +// A Copier can make a copy of elements of a into the receiver. The submatrix copied +// starts at row and column 0 and has dimensions equal to the minimum dimensions of +// the two matrices. The number of row and columns copied is returned. +// Copy will copy from a source that aliases the receiver unless the source is transposed; +// an aliasing transpose copy will panic with the exception for a special case when +// the source data has a unitary increment or stride. +type Copier interface { + Copy(a Matrix) (r, c int) +} + +// A Grower can grow the size of the represented matrix by the given number of rows and columns. +// Growing beyond the size given by the Caps method will result in the allocation of a new +// matrix and copying of the elements. If Grow is called with negative increments it will +// panic with ErrIndexOutOfRange. +type Grower interface { + Caps() (r, c int) + Grow(r, c int) Matrix +} + +// A BandWidther represents a banded matrix and can return the left and right half-bandwidths, k1 and +// k2. +type BandWidther interface { + BandWidth() (k1, k2 int) +} + +// A RawMatrixSetter can set the underlying blas64.General used by the receiver. There is no restriction +// on the shape of the receiver. Changes to the receiver's elements will be reflected in the blas64.General.Data. +type RawMatrixSetter interface { + SetRawMatrix(a blas64.General) +} + +// A RawMatrixer can return a blas64.General representation of the receiver. Changes to the blas64.General.Data +// slice will be reflected in the original matrix, changes to the Rows, Cols and Stride fields will not. +type RawMatrixer interface { + RawMatrix() blas64.General +} + +// A RawVectorer can return a blas64.Vector representation of the receiver. Changes to the blas64.Vector.Data +// slice will be reflected in the original matrix, changes to the Inc field will not. +type RawVectorer interface { + RawVector() blas64.Vector +} + +// A NonZeroDoer can call a function for each non-zero element of the receiver. +// The parameters of the function are the element indices and its value. +type NonZeroDoer interface { + DoNonZero(func(i, j int, v float64)) +} + +// A RowNonZeroDoer can call a function for each non-zero element of a row of the receiver. +// The parameters of the function are the element indices and its value. +type RowNonZeroDoer interface { + DoRowNonZero(i int, fn func(i, j int, v float64)) +} + +// A ColNonZeroDoer can call a function for each non-zero element of a column of the receiver. +// The parameters of the function are the element indices and its value. +type ColNonZeroDoer interface { + DoColNonZero(j int, fn func(i, j int, v float64)) +} + +// untranspose untransposes a matrix if applicable. If a is an Untransposer, then +// untranspose returns the underlying matrix and true. If it is not, then it returns +// the input matrix and false. +func untranspose(a Matrix) (Matrix, bool) { + if ut, ok := a.(Untransposer); ok { + return ut.Untranspose(), true + } + return a, false +} + +// untransposeExtract returns an untransposed matrix in a built-in matrix type. +// +// The untransposed matrix is returned unaltered if it is a built-in matrix type. +// Otherwise, if it implements a Raw method, an appropriate built-in type value +// is returned holding the raw matrix value of the input. If neither of these +// is possible, the untransposed matrix is returned. +func untransposeExtract(a Matrix) (Matrix, bool) { + ut, trans := untranspose(a) + switch m := ut.(type) { + case *DiagDense, *SymBandDense, *TriBandDense, *BandDense, *TriDense, *SymDense, *Dense: + return m, trans + // TODO(btracey): Add here if we ever have an equivalent of RawDiagDense. + case RawSymBander: + rsb := m.RawSymBand() + if rsb.Uplo != blas.Upper { + return ut, trans + } + var sb SymBandDense + sb.SetRawSymBand(rsb) + return &sb, trans + case RawTriBander: + rtb := m.RawTriBand() + if rtb.Diag == blas.Unit { + return ut, trans + } + var tb TriBandDense + tb.SetRawTriBand(rtb) + return &tb, trans + case RawBander: + var b BandDense + b.SetRawBand(m.RawBand()) + return &b, trans + case RawTriangular: + rt := m.RawTriangular() + if rt.Diag == blas.Unit { + return ut, trans + } + var t TriDense + t.SetRawTriangular(rt) + return &t, trans + case RawSymmetricer: + rs := m.RawSymmetric() + if rs.Uplo != blas.Upper { + return ut, trans + } + var s SymDense + s.SetRawSymmetric(rs) + return &s, trans + case RawMatrixer: + var d Dense + d.SetRawMatrix(m.RawMatrix()) + return &d, trans + default: + return ut, trans + } +} + +// TODO(btracey): Consider adding CopyCol/CopyRow if the behavior seems useful. +// TODO(btracey): Add in fast paths to Row/Col for the other concrete types +// (TriDense, etc.) as well as relevant interfaces (RowColer, RawRowViewer, etc.) + +// Col copies the elements in the jth column of the matrix into the slice dst. +// The length of the provided slice must equal the number of rows, unless the +// slice is nil in which case a new slice is first allocated. +func Col(dst []float64, j int, a Matrix) []float64 { + r, c := a.Dims() + if j < 0 || j >= c { + panic(ErrColAccess) + } + if dst == nil { + dst = make([]float64, r) + } else { + if len(dst) != r { + panic(ErrColLength) + } + } + aU, aTrans := untranspose(a) + if rm, ok := aU.(RawMatrixer); ok { + m := rm.RawMatrix() + if aTrans { + copy(dst, m.Data[j*m.Stride:j*m.Stride+m.Cols]) + return dst + } + blas64.Copy(blas64.Vector{N: r, Inc: m.Stride, Data: m.Data[j:]}, + blas64.Vector{N: r, Inc: 1, Data: dst}, + ) + return dst + } + for i := 0; i < r; i++ { + dst[i] = a.At(i, j) + } + return dst +} + +// Row copies the elements in the ith row of the matrix into the slice dst. +// The length of the provided slice must equal the number of columns, unless the +// slice is nil in which case a new slice is first allocated. +func Row(dst []float64, i int, a Matrix) []float64 { + r, c := a.Dims() + if i < 0 || i >= r { + panic(ErrColAccess) + } + if dst == nil { + dst = make([]float64, c) + } else { + if len(dst) != c { + panic(ErrRowLength) + } + } + aU, aTrans := untranspose(a) + if rm, ok := aU.(RawMatrixer); ok { + m := rm.RawMatrix() + if aTrans { + blas64.Copy(blas64.Vector{N: c, Inc: m.Stride, Data: m.Data[i:]}, + blas64.Vector{N: c, Inc: 1, Data: dst}, + ) + return dst + } + copy(dst, m.Data[i*m.Stride:i*m.Stride+m.Cols]) + return dst + } + for j := 0; j < c; j++ { + dst[j] = a.At(i, j) + } + return dst +} + +// Cond returns the condition number of the given matrix under the given norm. +// The condition number must be based on the 1-norm, 2-norm or ∞-norm. +// Cond will panic with matrix.ErrShape if the matrix has zero size. +// +// BUG(btracey): The computation of the 1-norm and ∞-norm for non-square matrices +// is innacurate, although is typically the right order of magnitude. See +// https://github.com/xianyi/OpenBLAS/issues/636. While the value returned will +// change with the resolution of this bug, the result from Cond will match the +// condition number used internally. +func Cond(a Matrix, norm float64) float64 { + m, n := a.Dims() + if m == 0 || n == 0 { + panic(ErrShape) + } + var lnorm lapack.MatrixNorm + switch norm { + default: + panic("mat: bad norm value") + case 1: + lnorm = lapack.MaxColumnSum + case 2: + var svd SVD + ok := svd.Factorize(a, SVDNone) + if !ok { + return math.Inf(1) + } + return svd.Cond() + case math.Inf(1): + lnorm = lapack.MaxRowSum + } + + if m == n { + // Use the LU decomposition to compute the condition number. + var lu LU + lu.factorize(a, lnorm) + return lu.Cond() + } + if m > n { + // Use the QR factorization to compute the condition number. + var qr QR + qr.factorize(a, lnorm) + return qr.Cond() + } + // Use the LQ factorization to compute the condition number. + var lq LQ + lq.factorize(a, lnorm) + return lq.Cond() +} + +// Det returns the determinant of the matrix a. In many expressions using LogDet +// will be more numerically stable. +func Det(a Matrix) float64 { + det, sign := LogDet(a) + return math.Exp(det) * sign +} + +// Dot returns the sum of the element-wise product of a and b. +// Dot panics if the matrix sizes are unequal. +func Dot(a, b Vector) float64 { + la := a.Len() + lb := b.Len() + if la != lb { + panic(ErrShape) + } + if arv, ok := a.(RawVectorer); ok { + if brv, ok := b.(RawVectorer); ok { + return blas64.Dot(arv.RawVector(), brv.RawVector()) + } + } + var sum float64 + for i := 0; i < la; i++ { + sum += a.At(i, 0) * b.At(i, 0) + } + return sum +} + +// Equal returns whether the matrices a and b have the same size +// and are element-wise equal. +func Equal(a, b Matrix) bool { + ar, ac := a.Dims() + br, bc := b.Dims() + if ar != br || ac != bc { + return false + } + aU, aTrans := untranspose(a) + bU, bTrans := untranspose(b) + if rma, ok := aU.(RawMatrixer); ok { + if rmb, ok := bU.(RawMatrixer); ok { + ra := rma.RawMatrix() + rb := rmb.RawMatrix() + if aTrans == bTrans { + for i := 0; i < ra.Rows; i++ { + for j := 0; j < ra.Cols; j++ { + if ra.Data[i*ra.Stride+j] != rb.Data[i*rb.Stride+j] { + return false + } + } + } + return true + } + for i := 0; i < ra.Rows; i++ { + for j := 0; j < ra.Cols; j++ { + if ra.Data[i*ra.Stride+j] != rb.Data[j*rb.Stride+i] { + return false + } + } + } + return true + } + } + if rma, ok := aU.(RawSymmetricer); ok { + if rmb, ok := bU.(RawSymmetricer); ok { + ra := rma.RawSymmetric() + rb := rmb.RawSymmetric() + // Symmetric matrices are always upper and equal to their transpose. + for i := 0; i < ra.N; i++ { + for j := i; j < ra.N; j++ { + if ra.Data[i*ra.Stride+j] != rb.Data[i*rb.Stride+j] { + return false + } + } + } + return true + } + } + if ra, ok := aU.(*VecDense); ok { + if rb, ok := bU.(*VecDense); ok { + // If the raw vectors are the same length they must either both be + // transposed or both not transposed (or have length 1). + for i := 0; i < ra.mat.N; i++ { + if ra.mat.Data[i*ra.mat.Inc] != rb.mat.Data[i*rb.mat.Inc] { + return false + } + } + return true + } + } + for i := 0; i < ar; i++ { + for j := 0; j < ac; j++ { + if a.At(i, j) != b.At(i, j) { + return false + } + } + } + return true +} + +// EqualApprox returns whether the matrices a and b have the same size and contain all equal +// elements with tolerance for element-wise equality specified by epsilon. Matrices +// with non-equal shapes are not equal. +func EqualApprox(a, b Matrix, epsilon float64) bool { + ar, ac := a.Dims() + br, bc := b.Dims() + if ar != br || ac != bc { + return false + } + aU, aTrans := untranspose(a) + bU, bTrans := untranspose(b) + if rma, ok := aU.(RawMatrixer); ok { + if rmb, ok := bU.(RawMatrixer); ok { + ra := rma.RawMatrix() + rb := rmb.RawMatrix() + if aTrans == bTrans { + for i := 0; i < ra.Rows; i++ { + for j := 0; j < ra.Cols; j++ { + if !floats.EqualWithinAbsOrRel(ra.Data[i*ra.Stride+j], rb.Data[i*rb.Stride+j], epsilon, epsilon) { + return false + } + } + } + return true + } + for i := 0; i < ra.Rows; i++ { + for j := 0; j < ra.Cols; j++ { + if !floats.EqualWithinAbsOrRel(ra.Data[i*ra.Stride+j], rb.Data[j*rb.Stride+i], epsilon, epsilon) { + return false + } + } + } + return true + } + } + if rma, ok := aU.(RawSymmetricer); ok { + if rmb, ok := bU.(RawSymmetricer); ok { + ra := rma.RawSymmetric() + rb := rmb.RawSymmetric() + // Symmetric matrices are always upper and equal to their transpose. + for i := 0; i < ra.N; i++ { + for j := i; j < ra.N; j++ { + if !floats.EqualWithinAbsOrRel(ra.Data[i*ra.Stride+j], rb.Data[i*rb.Stride+j], epsilon, epsilon) { + return false + } + } + } + return true + } + } + if ra, ok := aU.(*VecDense); ok { + if rb, ok := bU.(*VecDense); ok { + // If the raw vectors are the same length they must either both be + // transposed or both not transposed (or have length 1). + for i := 0; i < ra.mat.N; i++ { + if !floats.EqualWithinAbsOrRel(ra.mat.Data[i*ra.mat.Inc], rb.mat.Data[i*rb.mat.Inc], epsilon, epsilon) { + return false + } + } + return true + } + } + for i := 0; i < ar; i++ { + for j := 0; j < ac; j++ { + if !floats.EqualWithinAbsOrRel(a.At(i, j), b.At(i, j), epsilon, epsilon) { + return false + } + } + } + return true +} + +// LogDet returns the log of the determinant and the sign of the determinant +// for the matrix that has been factorized. Numerical stability in product and +// division expressions is generally improved by working in log space. +func LogDet(a Matrix) (det float64, sign float64) { + // TODO(btracey): Add specialized routines for TriDense, etc. + var lu LU + lu.Factorize(a) + return lu.LogDet() +} + +// Max returns the largest element value of the matrix A. +// Max will panic with matrix.ErrShape if the matrix has zero size. +func Max(a Matrix) float64 { + r, c := a.Dims() + if r == 0 || c == 0 { + panic(ErrShape) + } + // Max(A) = Max(A^T) + aU, _ := untranspose(a) + switch m := aU.(type) { + case RawMatrixer: + rm := m.RawMatrix() + max := math.Inf(-1) + for i := 0; i < rm.Rows; i++ { + for _, v := range rm.Data[i*rm.Stride : i*rm.Stride+rm.Cols] { + if v > max { + max = v + } + } + } + return max + case RawTriangular: + rm := m.RawTriangular() + // The max of a triangular is at least 0 unless the size is 1. + if rm.N == 1 { + return rm.Data[0] + } + max := 0.0 + if rm.Uplo == blas.Upper { + for i := 0; i < rm.N; i++ { + for _, v := range rm.Data[i*rm.Stride+i : i*rm.Stride+rm.N] { + if v > max { + max = v + } + } + } + return max + } + for i := 0; i < rm.N; i++ { + for _, v := range rm.Data[i*rm.Stride : i*rm.Stride+i+1] { + if v > max { + max = v + } + } + } + return max + case RawSymmetricer: + rm := m.RawSymmetric() + if rm.Uplo != blas.Upper { + panic(badSymTriangle) + } + max := math.Inf(-1) + for i := 0; i < rm.N; i++ { + for _, v := range rm.Data[i*rm.Stride+i : i*rm.Stride+rm.N] { + if v > max { + max = v + } + } + } + return max + default: + r, c := aU.Dims() + max := math.Inf(-1) + for i := 0; i < r; i++ { + for j := 0; j < c; j++ { + v := aU.At(i, j) + if v > max { + max = v + } + } + } + return max + } +} + +// Min returns the smallest element value of the matrix A. +// Min will panic with matrix.ErrShape if the matrix has zero size. +func Min(a Matrix) float64 { + r, c := a.Dims() + if r == 0 || c == 0 { + panic(ErrShape) + } + // Min(A) = Min(A^T) + aU, _ := untranspose(a) + switch m := aU.(type) { + case RawMatrixer: + rm := m.RawMatrix() + min := math.Inf(1) + for i := 0; i < rm.Rows; i++ { + for _, v := range rm.Data[i*rm.Stride : i*rm.Stride+rm.Cols] { + if v < min { + min = v + } + } + } + return min + case RawTriangular: + rm := m.RawTriangular() + // The min of a triangular is at most 0 unless the size is 1. + if rm.N == 1 { + return rm.Data[0] + } + min := 0.0 + if rm.Uplo == blas.Upper { + for i := 0; i < rm.N; i++ { + for _, v := range rm.Data[i*rm.Stride+i : i*rm.Stride+rm.N] { + if v < min { + min = v + } + } + } + return min + } + for i := 0; i < rm.N; i++ { + for _, v := range rm.Data[i*rm.Stride : i*rm.Stride+i+1] { + if v < min { + min = v + } + } + } + return min + case RawSymmetricer: + rm := m.RawSymmetric() + if rm.Uplo != blas.Upper { + panic(badSymTriangle) + } + min := math.Inf(1) + for i := 0; i < rm.N; i++ { + for _, v := range rm.Data[i*rm.Stride+i : i*rm.Stride+rm.N] { + if v < min { + min = v + } + } + } + return min + default: + r, c := aU.Dims() + min := math.Inf(1) + for i := 0; i < r; i++ { + for j := 0; j < c; j++ { + v := aU.At(i, j) + if v < min { + min = v + } + } + } + return min + } +} + +// Norm returns the specified (induced) norm of the matrix a. See +// https://en.wikipedia.org/wiki/Matrix_norm for the definition of an induced norm. +// +// Valid norms are: +// 1 - The maximum absolute column sum +// 2 - Frobenius norm, the square root of the sum of the squares of the elements. +// Inf - The maximum absolute row sum. +// Norm will panic with ErrNormOrder if an illegal norm order is specified and +// with matrix.ErrShape if the matrix has zero size. +func Norm(a Matrix, norm float64) float64 { + r, c := a.Dims() + if r == 0 || c == 0 { + panic(ErrShape) + } + aU, aTrans := untranspose(a) + var work []float64 + switch rma := aU.(type) { + case RawMatrixer: + rm := rma.RawMatrix() + n := normLapack(norm, aTrans) + if n == lapack.MaxColumnSum { + work = getFloats(rm.Cols, false) + defer putFloats(work) + } + return lapack64.Lange(n, rm, work) + case RawTriangular: + rm := rma.RawTriangular() + n := normLapack(norm, aTrans) + if n == lapack.MaxRowSum || n == lapack.MaxColumnSum { + work = getFloats(rm.N, false) + defer putFloats(work) + } + return lapack64.Lantr(n, rm, work) + case RawSymmetricer: + rm := rma.RawSymmetric() + n := normLapack(norm, aTrans) + if n == lapack.MaxRowSum || n == lapack.MaxColumnSum { + work = getFloats(rm.N, false) + defer putFloats(work) + } + return lapack64.Lansy(n, rm, work) + case *VecDense: + rv := rma.RawVector() + switch norm { + default: + panic("unreachable") + case 1: + if aTrans { + imax := blas64.Iamax(rv) + return math.Abs(rma.At(imax, 0)) + } + return blas64.Asum(rv) + case 2: + return blas64.Nrm2(rv) + case math.Inf(1): + if aTrans { + return blas64.Asum(rv) + } + imax := blas64.Iamax(rv) + return math.Abs(rma.At(imax, 0)) + } + } + switch norm { + default: + panic("unreachable") + case 1: + var max float64 + for j := 0; j < c; j++ { + var sum float64 + for i := 0; i < r; i++ { + sum += math.Abs(a.At(i, j)) + } + if sum > max { + max = sum + } + } + return max + case 2: + var sum float64 + for i := 0; i < r; i++ { + for j := 0; j < c; j++ { + v := a.At(i, j) + sum += v * v + } + } + return math.Sqrt(sum) + case math.Inf(1): + var max float64 + for i := 0; i < r; i++ { + var sum float64 + for j := 0; j < c; j++ { + sum += math.Abs(a.At(i, j)) + } + if sum > max { + max = sum + } + } + return max + } +} + +// normLapack converts the float64 norm input in Norm to a lapack.MatrixNorm. +func normLapack(norm float64, aTrans bool) lapack.MatrixNorm { + switch norm { + case 1: + n := lapack.MaxColumnSum + if aTrans { + n = lapack.MaxRowSum + } + return n + case 2: + return lapack.Frobenius + case math.Inf(1): + n := lapack.MaxRowSum + if aTrans { + n = lapack.MaxColumnSum + } + return n + default: + panic(ErrNormOrder) + } +} + +// Sum returns the sum of the elements of the matrix. +func Sum(a Matrix) float64 { + + var sum float64 + aU, _ := untranspose(a) + switch rma := aU.(type) { + case RawSymmetricer: + rm := rma.RawSymmetric() + for i := 0; i < rm.N; i++ { + // Diagonals count once while off-diagonals count twice. + sum += rm.Data[i*rm.Stride+i] + var s float64 + for _, v := range rm.Data[i*rm.Stride+i+1 : i*rm.Stride+rm.N] { + s += v + } + sum += 2 * s + } + return sum + case RawTriangular: + rm := rma.RawTriangular() + var startIdx, endIdx int + for i := 0; i < rm.N; i++ { + // Start and end index for this triangle-row. + switch rm.Uplo { + case blas.Upper: + startIdx = i + endIdx = rm.N + case blas.Lower: + startIdx = 0 + endIdx = i + 1 + default: + panic(badTriangle) + } + for _, v := range rm.Data[i*rm.Stride+startIdx : i*rm.Stride+endIdx] { + sum += v + } + } + return sum + case RawMatrixer: + rm := rma.RawMatrix() + for i := 0; i < rm.Rows; i++ { + for _, v := range rm.Data[i*rm.Stride : i*rm.Stride+rm.Cols] { + sum += v + } + } + return sum + case *VecDense: + rm := rma.RawVector() + for i := 0; i < rm.N; i++ { + sum += rm.Data[i*rm.Inc] + } + return sum + default: + r, c := a.Dims() + for i := 0; i < r; i++ { + for j := 0; j < c; j++ { + sum += a.At(i, j) + } + } + return sum + } +} + +// A Tracer can compute the trace of the matrix. Trace must panic if the +// matrix is not square. +type Tracer interface { + Trace() float64 +} + +// Trace returns the trace of the matrix. Trace will panic if the +// matrix is not square. +func Trace(a Matrix) float64 { + m, _ := untransposeExtract(a) + if t, ok := m.(Tracer); ok { + return t.Trace() + } + r, c := a.Dims() + if r != c { + panic(ErrSquare) + } + var v float64 + for i := 0; i < r; i++ { + v += a.At(i, i) + } + return v +} + +func min(a, b int) int { + if a < b { + return a + } + return b +} + +func max(a, b int) int { + if a > b { + return a + } + return b +} + +// use returns a float64 slice with l elements, using f if it +// has the necessary capacity, otherwise creating a new slice. +func use(f []float64, l int) []float64 { + if l <= cap(f) { + return f[:l] + } + return make([]float64, l) +} + +// useZeroed returns a float64 slice with l elements, using f if it +// has the necessary capacity, otherwise creating a new slice. The +// elements of the returned slice are guaranteed to be zero. +func useZeroed(f []float64, l int) []float64 { + if l <= cap(f) { + f = f[:l] + zero(f) + return f + } + return make([]float64, l) +} + +// zero zeros the given slice's elements. +func zero(f []float64) { + for i := range f { + f[i] = 0 + } +} + +// useInt returns an int slice with l elements, using i if it +// has the necessary capacity, otherwise creating a new slice. +func useInt(i []int, l int) []int { + if l <= cap(i) { + return i[:l] + } + return make([]int, l) +} diff --git a/vendor/gonum.org/v1/gonum/mat/offset.go b/vendor/gonum.org/v1/gonum/mat/offset.go new file mode 100644 index 0000000..af2c03b --- /dev/null +++ b/vendor/gonum.org/v1/gonum/mat/offset.go @@ -0,0 +1,20 @@ +// Copyright ©2015 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// +build !appengine,!safe + +package mat + +import "unsafe" + +// offset returns the number of float64 values b[0] is after a[0]. +func offset(a, b []float64) int { + if &a[0] == &b[0] { + return 0 + } + // This expression must be atomic with respect to GC moves. + // At this stage this is true, because the GC does not + // move. See https://golang.org/issue/12445. + return int(uintptr(unsafe.Pointer(&b[0]))-uintptr(unsafe.Pointer(&a[0]))) / int(unsafe.Sizeof(float64(0))) +} diff --git a/vendor/gonum.org/v1/gonum/mat/offset_appengine.go b/vendor/gonum.org/v1/gonum/mat/offset_appengine.go new file mode 100644 index 0000000..df61747 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/mat/offset_appengine.go @@ -0,0 +1,24 @@ +// Copyright ©2015 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// +build appengine safe + +package mat + +import "reflect" + +var sizeOfFloat64 = int(reflect.TypeOf(float64(0)).Size()) + +// offset returns the number of float64 values b[0] is after a[0]. +func offset(a, b []float64) int { + va0 := reflect.ValueOf(a).Index(0) + vb0 := reflect.ValueOf(b).Index(0) + if va0.Addr() == vb0.Addr() { + return 0 + } + // This expression must be atomic with respect to GC moves. + // At this stage this is true, because the GC does not + // move. See https://golang.org/issue/12445. + return int(vb0.UnsafeAddr()-va0.UnsafeAddr()) / sizeOfFloat64 +} diff --git a/vendor/gonum.org/v1/gonum/mat/pool.go b/vendor/gonum.org/v1/gonum/mat/pool.go new file mode 100644 index 0000000..25ca29f --- /dev/null +++ b/vendor/gonum.org/v1/gonum/mat/pool.go @@ -0,0 +1,236 @@ +// Copyright ©2014 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package mat + +import ( + "sync" + + "gonum.org/v1/gonum/blas" + "gonum.org/v1/gonum/blas/blas64" +) + +var tab64 = [64]byte{ + 0x3f, 0x00, 0x3a, 0x01, 0x3b, 0x2f, 0x35, 0x02, + 0x3c, 0x27, 0x30, 0x1b, 0x36, 0x21, 0x2a, 0x03, + 0x3d, 0x33, 0x25, 0x28, 0x31, 0x12, 0x1c, 0x14, + 0x37, 0x1e, 0x22, 0x0b, 0x2b, 0x0e, 0x16, 0x04, + 0x3e, 0x39, 0x2e, 0x34, 0x26, 0x1a, 0x20, 0x29, + 0x32, 0x24, 0x11, 0x13, 0x1d, 0x0a, 0x0d, 0x15, + 0x38, 0x2d, 0x19, 0x1f, 0x23, 0x10, 0x09, 0x0c, + 0x2c, 0x18, 0x0f, 0x08, 0x17, 0x07, 0x06, 0x05, +} + +// bits returns the ceiling of base 2 log of v. +// Approach based on http://stackoverflow.com/a/11398748. +func bits(v uint64) byte { + if v == 0 { + return 0 + } + v <<= 2 + v-- + v |= v >> 1 + v |= v >> 2 + v |= v >> 4 + v |= v >> 8 + v |= v >> 16 + v |= v >> 32 + return tab64[((v-(v>>1))*0x07EDD5E59A4E28C2)>>58] - 1 +} + +var ( + // pool contains size stratified workspace Dense pools. + // Each pool element i returns sized matrices with a data + // slice capped at 1< 2. + if !m.IsZero() { + if fr != r { + panic(ErrShape) + } + if _, lc := factors[len(factors)-1].Dims(); lc != c { + panic(ErrShape) + } + } + + dims := make([]int, len(factors)+1) + dims[0] = r + dims[len(dims)-1] = c + pc := fc + for i, f := range factors[1:] { + cr, cc := f.Dims() + dims[i+1] = cr + if pc != cr { + panic(ErrShape) + } + pc = cc + } + + return &multiplier{ + factors: factors, + dims: dims, + table: newTable(len(factors)), + } +} + +// optimize determines an optimal matrix multiply operation order. +func (p *multiplier) optimize() { + if debugProductWalk { + fmt.Printf("chain dims: %v\n", p.dims) + } + const maxInt = int(^uint(0) >> 1) + for f := 1; f < len(p.factors); f++ { + for i := 0; i < len(p.factors)-f; i++ { + j := i + f + p.table.set(i, j, entry{cost: maxInt}) + for k := i; k < j; k++ { + cost := p.table.at(i, k).cost + p.table.at(k+1, j).cost + p.dims[i]*p.dims[k+1]*p.dims[j+1] + if cost < p.table.at(i, j).cost { + p.table.set(i, j, entry{cost: cost, k: k}) + } + } + } + } +} + +// multiply walks the optimal operation tree found by optimize, +// leaving the final result in the stack. It returns the +// product, which may be copied but should be returned to +// the workspace pool. +func (p *multiplier) multiply() *Dense { + result, _ := p.multiplySubchain(0, len(p.factors)-1) + if debugProductWalk { + r, c := result.Dims() + fmt.Printf("\tpop result (%d×%d) cost=%d\n", r, c, p.table.at(0, len(p.factors)-1).cost) + } + return result.(*Dense) +} + +func (p *multiplier) multiplySubchain(i, j int) (m Matrix, intermediate bool) { + if i == j { + return p.factors[i], false + } + + a, aTmp := p.multiplySubchain(i, p.table.at(i, j).k) + b, bTmp := p.multiplySubchain(p.table.at(i, j).k+1, j) + + ar, ac := a.Dims() + br, bc := b.Dims() + if ac != br { + // Panic with a string since this + // is not a user-facing panic. + panic(ErrShape.Error()) + } + + if debugProductWalk { + fmt.Printf("\tpush f[%d] (%d×%d)%s * f[%d] (%d×%d)%s\n", + i, ar, ac, result(aTmp), j, br, bc, result(bTmp)) + } + + r := getWorkspace(ar, bc, false) + r.Mul(a, b) + if aTmp { + putWorkspace(a.(*Dense)) + } + if bTmp { + putWorkspace(b.(*Dense)) + } + return r, true +} + +type entry struct { + k int // is the chain subdivision index. + cost int // cost is the cost of the operation. +} + +// table is a row major n×n dynamic programming table. +type table struct { + n int + entries []entry +} + +func newTable(n int) table { + return table{n: n, entries: make([]entry, n*n)} +} + +func (t table) at(i, j int) entry { return t.entries[i*t.n+j] } +func (t table) set(i, j int, e entry) { t.entries[i*t.n+j] = e } + +type result bool + +func (r result) String() string { + if r { + return " (popped result)" + } + return "" +} diff --git a/vendor/gonum.org/v1/gonum/mat/qr.go b/vendor/gonum.org/v1/gonum/mat/qr.go new file mode 100644 index 0000000..bf38ee4 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/mat/qr.go @@ -0,0 +1,260 @@ +// Copyright ©2013 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package mat + +import ( + "math" + + "gonum.org/v1/gonum/blas" + "gonum.org/v1/gonum/blas/blas64" + "gonum.org/v1/gonum/lapack" + "gonum.org/v1/gonum/lapack/lapack64" +) + +const badQR = "mat: invalid QR factorization" + +// QR is a type for creating and using the QR factorization of a matrix. +type QR struct { + qr *Dense + tau []float64 + cond float64 +} + +func (qr *QR) updateCond(norm lapack.MatrixNorm) { + // Since A = Q*R, and Q is orthogonal, we get for the condition number κ + // κ(A) := |A| |A^-1| = |Q*R| |(Q*R)^-1| = |R| |R^-1 * Q^T| + // = |R| |R^-1| = κ(R), + // where we used that fact that Q^-1 = Q^T. However, this assumes that + // the matrix norm is invariant under orthogonal transformations which + // is not the case for CondNorm. Hopefully the error is negligible: κ + // is only a qualitative measure anyway. + n := qr.qr.mat.Cols + work := getFloats(3*n, false) + iwork := getInts(n, false) + r := qr.qr.asTriDense(n, blas.NonUnit, blas.Upper) + v := lapack64.Trcon(norm, r.mat, work, iwork) + putFloats(work) + putInts(iwork) + qr.cond = 1 / v +} + +// Factorize computes the QR factorization of an m×n matrix a where m >= n. The QR +// factorization always exists even if A is singular. +// +// The QR decomposition is a factorization of the matrix A such that A = Q * R. +// The matrix Q is an orthonormal m×m matrix, and R is an m×n upper triangular matrix. +// Q and R can be extracted using the QTo and RTo methods. +func (qr *QR) Factorize(a Matrix) { + qr.factorize(a, CondNorm) +} + +func (qr *QR) factorize(a Matrix, norm lapack.MatrixNorm) { + m, n := a.Dims() + if m < n { + panic(ErrShape) + } + k := min(m, n) + if qr.qr == nil { + qr.qr = &Dense{} + } + qr.qr.Clone(a) + work := []float64{0} + qr.tau = make([]float64, k) + lapack64.Geqrf(qr.qr.mat, qr.tau, work, -1) + + work = getFloats(int(work[0]), false) + lapack64.Geqrf(qr.qr.mat, qr.tau, work, len(work)) + putFloats(work) + qr.updateCond(norm) +} + +// isValid returns whether the receiver contains a factorization. +func (qr *QR) isValid() bool { + return qr.qr != nil && !qr.qr.IsZero() +} + +// Cond returns the condition number for the factorized matrix. +// Cond will panic if the receiver does not contain a factorization. +func (qr *QR) Cond() float64 { + if !qr.isValid() { + panic(badQR) + } + return qr.cond +} + +// TODO(btracey): Add in the "Reduced" forms for extracting the n×n orthogonal +// and upper triangular matrices. + +// RTo extracts the m×n upper trapezoidal matrix from a QR decomposition. +// If dst is nil, a new matrix is allocated. The resulting dst matrix is returned. +// RTo will panic if the receiver does not contain a factorization. +func (qr *QR) RTo(dst *Dense) *Dense { + if !qr.isValid() { + panic(badQR) + } + + r, c := qr.qr.Dims() + if dst == nil { + dst = NewDense(r, c, nil) + } else { + dst.reuseAs(r, c) + } + + // Disguise the QR as an upper triangular + t := &TriDense{ + mat: blas64.Triangular{ + N: c, + Stride: qr.qr.mat.Stride, + Data: qr.qr.mat.Data, + Uplo: blas.Upper, + Diag: blas.NonUnit, + }, + cap: qr.qr.capCols, + } + dst.Copy(t) + + // Zero below the triangular. + for i := r; i < c; i++ { + zero(dst.mat.Data[i*dst.mat.Stride : i*dst.mat.Stride+c]) + } + + return dst +} + +// QTo extracts the m×m orthonormal matrix Q from a QR decomposition. +// If dst is nil, a new matrix is allocated. The resulting Q matrix is returned. +// QTo will panic if the receiver does not contain a factorization. +func (qr *QR) QTo(dst *Dense) *Dense { + if !qr.isValid() { + panic(badQR) + } + + r, _ := qr.qr.Dims() + if dst == nil { + dst = NewDense(r, r, nil) + } else { + dst.reuseAsZeroed(r, r) + } + + // Set Q = I. + for i := 0; i < r*r; i += r + 1 { + dst.mat.Data[i] = 1 + } + + // Construct Q from the elementary reflectors. + work := []float64{0} + lapack64.Ormqr(blas.Left, blas.NoTrans, qr.qr.mat, qr.tau, dst.mat, work, -1) + work = getFloats(int(work[0]), false) + lapack64.Ormqr(blas.Left, blas.NoTrans, qr.qr.mat, qr.tau, dst.mat, work, len(work)) + putFloats(work) + + return dst +} + +// SolveTo finds a minimum-norm solution to a system of linear equations defined +// by the matrices A and b, where A is an m×n matrix represented in its QR factorized +// form. If A is singular or near-singular a Condition error is returned. +// See the documentation for Condition for more information. +// +// The minimization problem solved depends on the input parameters. +// If trans == false, find X such that ||A*X - B||_2 is minimized. +// If trans == true, find the minimum norm solution of A^T * X = B. +// The solution matrix, X, is stored in place into dst. +// SolveTo will panic if the receiver does not contain a factorization. +func (qr *QR) SolveTo(dst *Dense, trans bool, b Matrix) error { + if !qr.isValid() { + panic(badQR) + } + + r, c := qr.qr.Dims() + br, bc := b.Dims() + + // The QR solve algorithm stores the result in-place into the right hand side. + // The storage for the answer must be large enough to hold both b and x. + // However, this method's receiver must be the size of x. Copy b, and then + // copy the result into m at the end. + if trans { + if c != br { + panic(ErrShape) + } + dst.reuseAs(r, bc) + } else { + if r != br { + panic(ErrShape) + } + dst.reuseAs(c, bc) + } + // Do not need to worry about overlap between m and b because x has its own + // independent storage. + w := getWorkspace(max(r, c), bc, false) + w.Copy(b) + t := qr.qr.asTriDense(qr.qr.mat.Cols, blas.NonUnit, blas.Upper).mat + if trans { + ok := lapack64.Trtrs(blas.Trans, t, w.mat) + if !ok { + return Condition(math.Inf(1)) + } + for i := c; i < r; i++ { + zero(w.mat.Data[i*w.mat.Stride : i*w.mat.Stride+bc]) + } + work := []float64{0} + lapack64.Ormqr(blas.Left, blas.NoTrans, qr.qr.mat, qr.tau, w.mat, work, -1) + work = getFloats(int(work[0]), false) + lapack64.Ormqr(blas.Left, blas.NoTrans, qr.qr.mat, qr.tau, w.mat, work, len(work)) + putFloats(work) + } else { + work := []float64{0} + lapack64.Ormqr(blas.Left, blas.Trans, qr.qr.mat, qr.tau, w.mat, work, -1) + work = getFloats(int(work[0]), false) + lapack64.Ormqr(blas.Left, blas.Trans, qr.qr.mat, qr.tau, w.mat, work, len(work)) + putFloats(work) + + ok := lapack64.Trtrs(blas.NoTrans, t, w.mat) + if !ok { + return Condition(math.Inf(1)) + } + } + // X was set above to be the correct size for the result. + dst.Copy(w) + putWorkspace(w) + if qr.cond > ConditionTolerance { + return Condition(qr.cond) + } + return nil +} + +// SolveVecTo finds a minimum-norm solution to a system of linear equations, +// Ax = b. +// See QR.SolveTo for the full documentation. +// SolveVecTo will panic if the receiver does not contain a factorization. +func (qr *QR) SolveVecTo(dst *VecDense, trans bool, b Vector) error { + if !qr.isValid() { + panic(badQR) + } + + r, c := qr.qr.Dims() + if _, bc := b.Dims(); bc != 1 { + panic(ErrShape) + } + + // The Solve implementation is non-trivial, so rather than duplicate the code, + // instead recast the VecDenses as Dense and call the matrix code. + bm := Matrix(b) + if rv, ok := b.(RawVectorer); ok { + bmat := rv.RawVector() + if dst != b { + dst.checkOverlap(bmat) + } + b := VecDense{mat: bmat} + bm = b.asDense() + } + if trans { + dst.reuseAs(r) + } else { + dst.reuseAs(c) + } + return qr.SolveTo(dst.asDense(), trans, bm) + +} diff --git a/vendor/gonum.org/v1/gonum/mat/shadow.go b/vendor/gonum.org/v1/gonum/mat/shadow.go new file mode 100644 index 0000000..cc62e44 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/mat/shadow.go @@ -0,0 +1,226 @@ +// Copyright ©2015 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package mat + +import ( + "gonum.org/v1/gonum/blas/blas64" +) + +const ( + // regionOverlap is the panic string used for the general case + // of a matrix region overlap between a source and destination. + regionOverlap = "mat: bad region: overlap" + + // regionIdentity is the panic string used for the specific + // case of complete agreement between a source and a destination. + regionIdentity = "mat: bad region: identical" + + // mismatchedStrides is the panic string used for overlapping + // data slices with differing strides. + mismatchedStrides = "mat: bad region: different strides" +) + +// checkOverlap returns false if the receiver does not overlap data elements +// referenced by the parameter and panics otherwise. +// +// checkOverlap methods return a boolean to allow the check call to be added to a +// boolean expression, making use of short-circuit operators. +func checkOverlap(a, b blas64.General) bool { + if cap(a.Data) == 0 || cap(b.Data) == 0 { + return false + } + + off := offset(a.Data[:1], b.Data[:1]) + + if off == 0 { + // At least one element overlaps. + if a.Cols == b.Cols && a.Rows == b.Rows && a.Stride == b.Stride { + panic(regionIdentity) + } + panic(regionOverlap) + } + + if off > 0 && len(a.Data) <= off { + // We know a is completely before b. + return false + } + if off < 0 && len(b.Data) <= -off { + // We know a is completely after b. + return false + } + + if a.Stride != b.Stride { + // Too hard, so assume the worst. + panic(mismatchedStrides) + } + + if off < 0 { + off = -off + a.Cols, b.Cols = b.Cols, a.Cols + } + if rectanglesOverlap(off, a.Cols, b.Cols, a.Stride) { + panic(regionOverlap) + } + return false +} + +func (m *Dense) checkOverlap(a blas64.General) bool { + return checkOverlap(m.RawMatrix(), a) +} + +func (m *Dense) checkOverlapMatrix(a Matrix) bool { + if m == a { + return false + } + var amat blas64.General + switch a := a.(type) { + default: + return false + case RawMatrixer: + amat = a.RawMatrix() + case RawSymmetricer: + amat = generalFromSymmetric(a.RawSymmetric()) + case RawTriangular: + amat = generalFromTriangular(a.RawTriangular()) + } + return m.checkOverlap(amat) +} + +func (s *SymDense) checkOverlap(a blas64.General) bool { + return checkOverlap(generalFromSymmetric(s.RawSymmetric()), a) +} + +func (s *SymDense) checkOverlapMatrix(a Matrix) bool { + if s == a { + return false + } + var amat blas64.General + switch a := a.(type) { + default: + return false + case RawMatrixer: + amat = a.RawMatrix() + case RawSymmetricer: + amat = generalFromSymmetric(a.RawSymmetric()) + case RawTriangular: + amat = generalFromTriangular(a.RawTriangular()) + } + return s.checkOverlap(amat) +} + +// generalFromSymmetric returns a blas64.General with the backing +// data and dimensions of a. +func generalFromSymmetric(a blas64.Symmetric) blas64.General { + return blas64.General{ + Rows: a.N, + Cols: a.N, + Stride: a.Stride, + Data: a.Data, + } +} + +func (t *TriDense) checkOverlap(a blas64.General) bool { + return checkOverlap(generalFromTriangular(t.RawTriangular()), a) +} + +func (t *TriDense) checkOverlapMatrix(a Matrix) bool { + if t == a { + return false + } + var amat blas64.General + switch a := a.(type) { + default: + return false + case RawMatrixer: + amat = a.RawMatrix() + case RawSymmetricer: + amat = generalFromSymmetric(a.RawSymmetric()) + case RawTriangular: + amat = generalFromTriangular(a.RawTriangular()) + } + return t.checkOverlap(amat) +} + +// generalFromTriangular returns a blas64.General with the backing +// data and dimensions of a. +func generalFromTriangular(a blas64.Triangular) blas64.General { + return blas64.General{ + Rows: a.N, + Cols: a.N, + Stride: a.Stride, + Data: a.Data, + } +} + +func (v *VecDense) checkOverlap(a blas64.Vector) bool { + mat := v.mat + if cap(mat.Data) == 0 || cap(a.Data) == 0 { + return false + } + + off := offset(mat.Data[:1], a.Data[:1]) + + if off == 0 { + // At least one element overlaps. + if mat.Inc == a.Inc && len(mat.Data) == len(a.Data) { + panic(regionIdentity) + } + panic(regionOverlap) + } + + if off > 0 && len(mat.Data) <= off { + // We know v is completely before a. + return false + } + if off < 0 && len(a.Data) <= -off { + // We know v is completely after a. + return false + } + + if mat.Inc != a.Inc { + // Too hard, so assume the worst. + panic(mismatchedStrides) + } + + if mat.Inc == 1 || off&mat.Inc == 0 { + panic(regionOverlap) + } + return false +} + +// rectanglesOverlap returns whether the strided rectangles a and b overlap +// when b is offset by off elements after a but has at least one element before +// the end of a. off must be positive. a and b have aCols and bCols respectively. +// +// rectanglesOverlap works by shifting both matrices left such that the left +// column of a is at 0. The column indexes are flattened by obtaining the shifted +// relative left and right column positions modulo the common stride. This allows +// direct comparison of the column offsets when the matrix backing data slices +// are known to overlap. +func rectanglesOverlap(off, aCols, bCols, stride int) bool { + if stride == 1 { + // Unit stride means overlapping data + // slices must overlap as matrices. + return true + } + + // Flatten the shifted matrix column positions + // so a starts at 0, modulo the common stride. + aTo := aCols + // The mod stride operations here make the from + // and to indexes comparable between a and b when + // the data slices of a and b overlap. + bFrom := off % stride + bTo := (bFrom + bCols) % stride + + if bTo == 0 || bFrom < bTo { + // b matrix is not wrapped: compare for + // simple overlap. + return bFrom < aTo + } + + // b strictly wraps and so must overlap with a. + return true +} diff --git a/vendor/gonum.org/v1/gonum/mat/solve.go b/vendor/gonum.org/v1/gonum/mat/solve.go new file mode 100644 index 0000000..1181328 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/mat/solve.go @@ -0,0 +1,140 @@ +// Copyright ©2015 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package mat + +import ( + "gonum.org/v1/gonum/blas" + "gonum.org/v1/gonum/blas/blas64" + "gonum.org/v1/gonum/lapack/lapack64" +) + +// Solve finds a minimum-norm solution to a system of linear equations defined +// by the matrices A and B. If A is singular or near-singular, a Condition error +// is returned. See the documentation for Condition for more information. +// +// The minimization problem solved depends on the input parameters: +// - if m >= n, find X such that ||A*X - B||_2 is minimized, +// - if m < n, find the minimum norm solution of A * X = B. +// The solution matrix, X, is stored in-place into the receiver. +func (m *Dense) Solve(a, b Matrix) error { + ar, ac := a.Dims() + br, bc := b.Dims() + if ar != br { + panic(ErrShape) + } + m.reuseAs(ac, bc) + + // TODO(btracey): Add special cases for SymDense, etc. + aU, aTrans := untranspose(a) + bU, bTrans := untranspose(b) + switch rma := aU.(type) { + case RawTriangular: + side := blas.Left + tA := blas.NoTrans + if aTrans { + tA = blas.Trans + } + + switch rm := bU.(type) { + case RawMatrixer: + if m != bU || bTrans { + if m == bU || m.checkOverlap(rm.RawMatrix()) { + tmp := getWorkspace(br, bc, false) + tmp.Copy(b) + m.Copy(tmp) + putWorkspace(tmp) + break + } + m.Copy(b) + } + default: + if m != bU { + m.Copy(b) + } else if bTrans { + // m and b share data so Copy cannot be used directly. + tmp := getWorkspace(br, bc, false) + tmp.Copy(b) + m.Copy(tmp) + putWorkspace(tmp) + } + } + + rm := rma.RawTriangular() + blas64.Trsm(side, tA, 1, rm, m.mat) + work := getFloats(3*rm.N, false) + iwork := getInts(rm.N, false) + cond := lapack64.Trcon(CondNorm, rm, work, iwork) + putFloats(work) + putInts(iwork) + if cond > ConditionTolerance { + return Condition(cond) + } + return nil + } + + switch { + case ar == ac: + if a == b { + // x = I. + if ar == 1 { + m.mat.Data[0] = 1 + return nil + } + for i := 0; i < ar; i++ { + v := m.mat.Data[i*m.mat.Stride : i*m.mat.Stride+ac] + zero(v) + v[i] = 1 + } + return nil + } + var lu LU + lu.Factorize(a) + return lu.SolveTo(m, false, b) + case ar > ac: + var qr QR + qr.Factorize(a) + return qr.SolveTo(m, false, b) + default: + var lq LQ + lq.Factorize(a) + return lq.SolveTo(m, false, b) + } +} + +// SolveVec finds a minimum-norm solution to a system of linear equations defined +// by the matrix a and the right-hand side column vector b. If A is singular or +// near-singular, a Condition error is returned. See the documentation for +// Dense.Solve for more information. +func (v *VecDense) SolveVec(a Matrix, b Vector) error { + if _, bc := b.Dims(); bc != 1 { + panic(ErrShape) + } + _, c := a.Dims() + + // The Solve implementation is non-trivial, so rather than duplicate the code, + // instead recast the VecDenses as Dense and call the matrix code. + + if rv, ok := b.(RawVectorer); ok { + bmat := rv.RawVector() + if v != b { + v.checkOverlap(bmat) + } + v.reuseAs(c) + m := v.asDense() + // We conditionally create bm as m when b and v are identical + // to prevent the overlap detection code from identifying m + // and bm as overlapping but not identical. + bm := m + if v != b { + b := VecDense{mat: bmat} + bm = b.asDense() + } + return m.Solve(a, bm) + } + + v.reuseAs(c) + m := v.asDense() + return m.Solve(a, b) +} diff --git a/vendor/gonum.org/v1/gonum/mat/svd.go b/vendor/gonum.org/v1/gonum/mat/svd.go new file mode 100644 index 0000000..2f55c41 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/mat/svd.go @@ -0,0 +1,247 @@ +// Copyright ©2013 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package mat + +import ( + "gonum.org/v1/gonum/blas/blas64" + "gonum.org/v1/gonum/lapack" + "gonum.org/v1/gonum/lapack/lapack64" +) + +// SVD is a type for creating and using the Singular Value Decomposition (SVD) +// of a matrix. +type SVD struct { + kind SVDKind + + s []float64 + u blas64.General + vt blas64.General +} + +// SVDKind specifies the treatment of singular vectors during an SVD +// factorization. +type SVDKind int + +const ( + // SVDNone specifies that no singular vectors should be computed during + // the decomposition. + SVDNone SVDKind = 0 + + // SVDThinU specifies the thin decomposition for U should be computed. + SVDThinU SVDKind = 1 << (iota - 1) + // SVDFullU specifies the full decomposition for U should be computed. + SVDFullU + // SVDThinV specifies the thin decomposition for V should be computed. + SVDThinV + // SVDFullV specifies the full decomposition for V should be computed. + SVDFullV + + // SVDThin is a convenience value for computing both thin vectors. + SVDThin SVDKind = SVDThinU | SVDThinV + // SVDThin is a convenience value for computing both full vectors. + SVDFull SVDKind = SVDFullU | SVDFullV +) + +// succFact returns whether the receiver contains a successful factorization. +func (svd *SVD) succFact() bool { + return len(svd.s) != 0 +} + +// Factorize computes the singular value decomposition (SVD) of the input matrix A. +// The singular values of A are computed in all cases, while the singular +// vectors are optionally computed depending on the input kind. +// +// The full singular value decomposition (kind == SVDFull) is a factorization +// of an m×n matrix A of the form +// A = U * Σ * V^T +// where Σ is an m×n diagonal matrix, U is an m×m orthogonal matrix, and V is an +// n×n orthogonal matrix. The diagonal elements of Σ are the singular values of A. +// The first min(m,n) columns of U and V are, respectively, the left and right +// singular vectors of A. +// +// Significant storage space can be saved by using the thin representation of +// the SVD (kind == SVDThin) instead of the full SVD, especially if +// m >> n or m << n. The thin SVD finds +// A = U~ * Σ * V~^T +// where U~ is of size m×min(m,n), Σ is a diagonal matrix of size min(m,n)×min(m,n) +// and V~ is of size n×min(m,n). +// +// Factorize returns whether the decomposition succeeded. If the decomposition +// failed, routines that require a successful factorization will panic. +func (svd *SVD) Factorize(a Matrix, kind SVDKind) (ok bool) { + // kill previous factorization + svd.s = svd.s[:0] + svd.kind = kind + + m, n := a.Dims() + var jobU, jobVT lapack.SVDJob + + // TODO(btracey): This code should be modified to have the smaller + // matrix written in-place into aCopy when the lapack/native/dgesvd + // implementation is complete. + switch { + case kind&SVDFullU != 0: + jobU = lapack.SVDAll + svd.u = blas64.General{ + Rows: m, + Cols: m, + Stride: m, + Data: use(svd.u.Data, m*m), + } + case kind&SVDThinU != 0: + jobU = lapack.SVDStore + svd.u = blas64.General{ + Rows: m, + Cols: min(m, n), + Stride: min(m, n), + Data: use(svd.u.Data, m*min(m, n)), + } + default: + jobU = lapack.SVDNone + } + switch { + case kind&SVDFullV != 0: + svd.vt = blas64.General{ + Rows: n, + Cols: n, + Stride: n, + Data: use(svd.vt.Data, n*n), + } + jobVT = lapack.SVDAll + case kind&SVDThinV != 0: + svd.vt = blas64.General{ + Rows: min(m, n), + Cols: n, + Stride: n, + Data: use(svd.vt.Data, min(m, n)*n), + } + jobVT = lapack.SVDStore + default: + jobVT = lapack.SVDNone + } + + // A is destroyed on call, so copy the matrix. + aCopy := DenseCopyOf(a) + svd.kind = kind + svd.s = use(svd.s, min(m, n)) + + work := []float64{0} + lapack64.Gesvd(jobU, jobVT, aCopy.mat, svd.u, svd.vt, svd.s, work, -1) + work = getFloats(int(work[0]), false) + ok = lapack64.Gesvd(jobU, jobVT, aCopy.mat, svd.u, svd.vt, svd.s, work, len(work)) + putFloats(work) + if !ok { + svd.kind = 0 + } + return ok +} + +// Kind returns the SVDKind of the decomposition. If no decomposition has been +// computed, Kind returns -1. +func (svd *SVD) Kind() SVDKind { + if !svd.succFact() { + return -1 + } + return svd.kind +} + +// Cond returns the 2-norm condition number for the factorized matrix. Cond will +// panic if the receiver does not contain a successful factorization. +func (svd *SVD) Cond() float64 { + if !svd.succFact() { + panic(badFact) + } + return svd.s[0] / svd.s[len(svd.s)-1] +} + +// Values returns the singular values of the factorized matrix in descending order. +// +// If the input slice is non-nil, the values will be stored in-place into +// the slice. In this case, the slice must have length min(m,n), and Values will +// panic with ErrSliceLengthMismatch otherwise. If the input slice is nil, a new +// slice of the appropriate length will be allocated and returned. +// +// Values will panic if the receiver does not contain a successful factorization. +func (svd *SVD) Values(s []float64) []float64 { + if !svd.succFact() { + panic(badFact) + } + if s == nil { + s = make([]float64, len(svd.s)) + } + if len(s) != len(svd.s) { + panic(ErrSliceLengthMismatch) + } + copy(s, svd.s) + return s +} + +// UTo extracts the matrix U from the singular value decomposition. The first +// min(m,n) columns are the left singular vectors and correspond to the singular +// values as returned from SVD.Values. +// +// If dst is not nil, U is stored in-place into dst, and dst must have size +// m×m if the full U was computed, size m×min(m,n) if the thin U was computed, +// and UTo panics otherwise. If dst is nil, a new matrix of the appropriate size +// is allocated and returned. +func (svd *SVD) UTo(dst *Dense) *Dense { + if !svd.succFact() { + panic(badFact) + } + kind := svd.kind + if kind&SVDThinU == 0 && kind&SVDFullU == 0 { + panic("svd: u not computed during factorization") + } + r := svd.u.Rows + c := svd.u.Cols + if dst == nil { + dst = NewDense(r, c, nil) + } else { + dst.reuseAs(r, c) + } + + tmp := &Dense{ + mat: svd.u, + capRows: r, + capCols: c, + } + dst.Copy(tmp) + + return dst +} + +// VTo extracts the matrix V from the singular value decomposition. The first +// min(m,n) columns are the right singular vectors and correspond to the singular +// values as returned from SVD.Values. +// +// If dst is not nil, V is stored in-place into dst, and dst must have size +// n×n if the full V was computed, size n×min(m,n) if the thin V was computed, +// and VTo panics otherwise. If dst is nil, a new matrix of the appropriate size +// is allocated and returned. +func (svd *SVD) VTo(dst *Dense) *Dense { + if !svd.succFact() { + panic(badFact) + } + kind := svd.kind + if kind&SVDThinU == 0 && kind&SVDFullV == 0 { + panic("svd: v not computed during factorization") + } + r := svd.vt.Rows + c := svd.vt.Cols + if dst == nil { + dst = NewDense(c, r, nil) + } else { + dst.reuseAs(c, r) + } + + tmp := &Dense{ + mat: svd.vt, + capRows: r, + capCols: c, + } + dst.Copy(tmp.T()) + + return dst +} diff --git a/vendor/gonum.org/v1/gonum/mat/symband.go b/vendor/gonum.org/v1/gonum/mat/symband.go new file mode 100644 index 0000000..cc4be19 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/mat/symband.go @@ -0,0 +1,231 @@ +// Copyright ©2017 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package mat + +import ( + "gonum.org/v1/gonum/blas" + "gonum.org/v1/gonum/blas/blas64" +) + +var ( + symBandDense *SymBandDense + _ Matrix = symBandDense + _ Symmetric = symBandDense + _ Banded = symBandDense + _ SymBanded = symBandDense + _ RawSymBander = symBandDense + _ MutableSymBanded = symBandDense + + _ NonZeroDoer = symBandDense + _ RowNonZeroDoer = symBandDense + _ ColNonZeroDoer = symBandDense +) + +// SymBandDense represents a symmetric band matrix in dense storage format. +type SymBandDense struct { + mat blas64.SymmetricBand +} + +// SymBanded is a symmetric band matrix interface type. +type SymBanded interface { + Banded + + // Symmetric returns the number of rows/columns in the matrix. + Symmetric() int + + // SymBand returns the number of rows/columns in the matrix, and the size of + // the bandwidth. + SymBand() (n, k int) +} + +// MutableSymBanded is a symmetric band matrix interface type that allows elements +// to be altered. +type MutableSymBanded interface { + SymBanded + SetSymBand(i, j int, v float64) +} + +// A RawSymBander can return a blas64.SymmetricBand representation of the receiver. +// Changes to the blas64.SymmetricBand.Data slice will be reflected in the original +// matrix, changes to the N, K, Stride and Uplo fields will not. +type RawSymBander interface { + RawSymBand() blas64.SymmetricBand +} + +// NewSymBandDense creates a new SymBand matrix with n rows and columns. If data == nil, +// a new slice is allocated for the backing slice. If len(data) == n*(k+1), +// data is used as the backing slice, and changes to the elements of the returned +// SymBandDense will be reflected in data. If neither of these is true, NewSymBandDense +// will panic. k must be at least zero and less than n, otherwise NewSymBandDense will panic. +// +// The data must be arranged in row-major order constructed by removing the zeros +// from the rows outside the band and aligning the diagonals. SymBandDense matrices +// are stored in the upper triangle. For example, the matrix +// 1 2 3 0 0 0 +// 2 4 5 6 0 0 +// 3 5 7 8 9 0 +// 0 6 8 10 11 12 +// 0 0 9 11 13 14 +// 0 0 0 12 14 15 +// becomes (* entries are never accessed) +// 1 2 3 +// 4 5 6 +// 7 8 9 +// 10 11 12 +// 13 14 * +// 15 * * +// which is passed to NewSymBandDense as []float64{1, 2, ..., 15, *, *, *} with k=2. +// Only the values in the band portion of the matrix are used. +func NewSymBandDense(n, k int, data []float64) *SymBandDense { + if n <= 0 || k < 0 { + if n == 0 { + panic(ErrZeroLength) + } + panic("mat: negative dimension") + } + if k+1 > n { + panic("mat: band out of range") + } + bc := k + 1 + if data != nil && len(data) != n*bc { + panic(ErrShape) + } + if data == nil { + data = make([]float64, n*bc) + } + return &SymBandDense{ + mat: blas64.SymmetricBand{ + N: n, + K: k, + Stride: bc, + Uplo: blas.Upper, + Data: data, + }, + } +} + +// Dims returns the number of rows and columns in the matrix. +func (s *SymBandDense) Dims() (r, c int) { + return s.mat.N, s.mat.N +} + +// Symmetric returns the size of the receiver. +func (s *SymBandDense) Symmetric() int { + return s.mat.N +} + +// Bandwidth returns the bandwidths of the matrix. +func (s *SymBandDense) Bandwidth() (kl, ku int) { + return s.mat.K, s.mat.K +} + +// SymBand returns the number of rows/columns in the matrix, and the size of +// the bandwidth. +func (s *SymBandDense) SymBand() (n, k int) { + return s.mat.N, s.mat.K +} + +// T implements the Matrix interface. Symmetric matrices, by definition, are +// equal to their transpose, and this is a no-op. +func (s *SymBandDense) T() Matrix { + return s +} + +// TBand implements the Banded interface. +func (s *SymBandDense) TBand() Banded { + return s +} + +// RawSymBand returns the underlying blas64.SymBand used by the receiver. +// Changes to elements in the receiver following the call will be reflected +// in returned blas64.SymBand. +func (s *SymBandDense) RawSymBand() blas64.SymmetricBand { + return s.mat +} + +// SetRawSymBand sets the underlying blas64.SymmetricBand used by the receiver. +// Changes to elements in the receiver following the call will be reflected +// in the input. +// +// The supplied SymmetricBand must use blas.Upper storage format. +func (s *SymBandDense) SetRawSymBand(mat blas64.SymmetricBand) { + if mat.Uplo != blas.Upper { + panic("mat: blas64.SymmetricBand does not have blas.Upper storage") + } + s.mat = mat +} + +// Zero sets all of the matrix elements to zero. +func (s *SymBandDense) Zero() { + for i := 0; i < s.mat.N; i++ { + u := min(1+s.mat.K, s.mat.N-i) + zero(s.mat.Data[i*s.mat.Stride : i*s.mat.Stride+u]) + } +} + +// DiagView returns the diagonal as a matrix backed by the original data. +func (s *SymBandDense) DiagView() Diagonal { + n := s.mat.N + return &DiagDense{ + mat: blas64.Vector{ + N: n, + Inc: s.mat.Stride, + Data: s.mat.Data[:(n-1)*s.mat.Stride+1], + }, + } +} + +// DoNonZero calls the function fn for each of the non-zero elements of s. The function fn +// takes a row/column index and the element value of s at (i, j). +func (s *SymBandDense) DoNonZero(fn func(i, j int, v float64)) { + for i := 0; i < s.mat.N; i++ { + for j := max(0, i-s.mat.K); j < min(s.mat.N, i+s.mat.K+1); j++ { + v := s.at(i, j) + if v != 0 { + fn(i, j, v) + } + } + } +} + +// DoRowNonZero calls the function fn for each of the non-zero elements of row i of s. The function fn +// takes a row/column index and the element value of s at (i, j). +func (s *SymBandDense) DoRowNonZero(i int, fn func(i, j int, v float64)) { + if i < 0 || s.mat.N <= i { + panic(ErrRowAccess) + } + for j := max(0, i-s.mat.K); j < min(s.mat.N, i+s.mat.K+1); j++ { + v := s.at(i, j) + if v != 0 { + fn(i, j, v) + } + } +} + +// DoColNonZero calls the function fn for each of the non-zero elements of column j of s. The function fn +// takes a row/column index and the element value of s at (i, j). +func (s *SymBandDense) DoColNonZero(j int, fn func(i, j int, v float64)) { + if j < 0 || s.mat.N <= j { + panic(ErrColAccess) + } + for i := 0; i < s.mat.N; i++ { + if i-s.mat.K <= j && j < i+s.mat.K+1 { + v := s.at(i, j) + if v != 0 { + fn(i, j, v) + } + } + } +} + +// Trace returns the trace. +func (s *SymBandDense) Trace() float64 { + rb := s.RawSymBand() + var tr float64 + for i := 0; i < rb.N; i++ { + tr += rb.Data[i*rb.Stride] + } + return tr +} diff --git a/vendor/gonum.org/v1/gonum/mat/symmetric.go b/vendor/gonum.org/v1/gonum/mat/symmetric.go new file mode 100644 index 0000000..2ea5bdb --- /dev/null +++ b/vendor/gonum.org/v1/gonum/mat/symmetric.go @@ -0,0 +1,602 @@ +// Copyright ©2015 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package mat + +import ( + "math" + + "gonum.org/v1/gonum/blas" + "gonum.org/v1/gonum/blas/blas64" +) + +var ( + symDense *SymDense + + _ Matrix = symDense + _ Symmetric = symDense + _ RawSymmetricer = symDense + _ MutableSymmetric = symDense +) + +const ( + badSymTriangle = "mat: blas64.Symmetric not upper" + badSymCap = "mat: bad capacity for SymDense" +) + +// SymDense is a symmetric matrix that uses dense storage. SymDense +// matrices are stored in the upper triangle. +type SymDense struct { + mat blas64.Symmetric + cap int +} + +// Symmetric represents a symmetric matrix (where the element at {i, j} equals +// the element at {j, i}). Symmetric matrices are always square. +type Symmetric interface { + Matrix + // Symmetric returns the number of rows/columns in the matrix. + Symmetric() int +} + +// A RawSymmetricer can return a view of itself as a BLAS Symmetric matrix. +type RawSymmetricer interface { + RawSymmetric() blas64.Symmetric +} + +// A MutableSymmetric can set elements of a symmetric matrix. +type MutableSymmetric interface { + Symmetric + SetSym(i, j int, v float64) +} + +// NewSymDense creates a new Symmetric matrix with n rows and columns. If data == nil, +// a new slice is allocated for the backing slice. If len(data) == n*n, data is +// used as the backing slice, and changes to the elements of the returned SymDense +// will be reflected in data. If neither of these is true, NewSymDense will panic. +// NewSymDense will panic if n is zero. +// +// The data must be arranged in row-major order, i.e. the (i*c + j)-th +// element in the data slice is the {i, j}-th element in the matrix. +// Only the values in the upper triangular portion of the matrix are used. +func NewSymDense(n int, data []float64) *SymDense { + if n <= 0 { + if n == 0 { + panic(ErrZeroLength) + } + panic("mat: negative dimension") + } + if data != nil && n*n != len(data) { + panic(ErrShape) + } + if data == nil { + data = make([]float64, n*n) + } + return &SymDense{ + mat: blas64.Symmetric{ + N: n, + Stride: n, + Data: data, + Uplo: blas.Upper, + }, + cap: n, + } +} + +// Dims returns the number of rows and columns in the matrix. +func (s *SymDense) Dims() (r, c int) { + return s.mat.N, s.mat.N +} + +// Caps returns the number of rows and columns in the backing matrix. +func (s *SymDense) Caps() (r, c int) { + return s.cap, s.cap +} + +// T returns the receiver, the transpose of a symmetric matrix. +func (s *SymDense) T() Matrix { + return s +} + +// Symmetric implements the Symmetric interface and returns the number of rows +// and columns in the matrix. +func (s *SymDense) Symmetric() int { + return s.mat.N +} + +// RawSymmetric returns the matrix as a blas64.Symmetric. The returned +// value must be stored in upper triangular format. +func (s *SymDense) RawSymmetric() blas64.Symmetric { + return s.mat +} + +// SetRawSymmetric sets the underlying blas64.Symmetric used by the receiver. +// Changes to elements in the receiver following the call will be reflected +// in the input. +// +// The supplied Symmetric must use blas.Upper storage format. +func (s *SymDense) SetRawSymmetric(mat blas64.Symmetric) { + if mat.Uplo != blas.Upper { + panic(badSymTriangle) + } + s.mat = mat +} + +// Reset zeros the dimensions of the matrix so that it can be reused as the +// receiver of a dimensionally restricted operation. +// +// See the Reseter interface for more information. +func (s *SymDense) Reset() { + // N and Stride must be zeroed in unison. + s.mat.N, s.mat.Stride = 0, 0 + s.mat.Data = s.mat.Data[:0] +} + +// Zero sets all of the matrix elements to zero. +func (s *SymDense) Zero() { + for i := 0; i < s.mat.N; i++ { + zero(s.mat.Data[i*s.mat.Stride+i : i*s.mat.Stride+s.mat.N]) + } +} + +// IsZero returns whether the receiver is zero-sized. Zero-sized matrices can be the +// receiver for size-restricted operations. SymDense matrices can be zeroed using Reset. +func (s *SymDense) IsZero() bool { + // It must be the case that m.Dims() returns + // zeros in this case. See comment in Reset(). + return s.mat.N == 0 +} + +// reuseAs resizes an empty matrix to a n×n matrix, +// or checks that a non-empty matrix is n×n. +func (s *SymDense) reuseAs(n int) { + if n == 0 { + panic(ErrZeroLength) + } + if s.mat.N > s.cap { + panic(badSymCap) + } + if s.IsZero() { + s.mat = blas64.Symmetric{ + N: n, + Stride: n, + Data: use(s.mat.Data, n*n), + Uplo: blas.Upper, + } + s.cap = n + return + } + if s.mat.Uplo != blas.Upper { + panic(badSymTriangle) + } + if s.mat.N != n { + panic(ErrShape) + } +} + +func (s *SymDense) isolatedWorkspace(a Symmetric) (w *SymDense, restore func()) { + n := a.Symmetric() + if n == 0 { + panic(ErrZeroLength) + } + w = getWorkspaceSym(n, false) + return w, func() { + s.CopySym(w) + putWorkspaceSym(w) + } +} + +// DiagView returns the diagonal as a matrix backed by the original data. +func (s *SymDense) DiagView() Diagonal { + n := s.mat.N + return &DiagDense{ + mat: blas64.Vector{ + N: n, + Inc: s.mat.Stride + 1, + Data: s.mat.Data[:(n-1)*s.mat.Stride+n], + }, + } +} + +func (s *SymDense) AddSym(a, b Symmetric) { + n := a.Symmetric() + if n != b.Symmetric() { + panic(ErrShape) + } + s.reuseAs(n) + + if a, ok := a.(RawSymmetricer); ok { + if b, ok := b.(RawSymmetricer); ok { + amat, bmat := a.RawSymmetric(), b.RawSymmetric() + if s != a { + s.checkOverlap(generalFromSymmetric(amat)) + } + if s != b { + s.checkOverlap(generalFromSymmetric(bmat)) + } + for i := 0; i < n; i++ { + btmp := bmat.Data[i*bmat.Stride+i : i*bmat.Stride+n] + stmp := s.mat.Data[i*s.mat.Stride+i : i*s.mat.Stride+n] + for j, v := range amat.Data[i*amat.Stride+i : i*amat.Stride+n] { + stmp[j] = v + btmp[j] + } + } + return + } + } + + s.checkOverlapMatrix(a) + s.checkOverlapMatrix(b) + for i := 0; i < n; i++ { + stmp := s.mat.Data[i*s.mat.Stride : i*s.mat.Stride+n] + for j := i; j < n; j++ { + stmp[j] = a.At(i, j) + b.At(i, j) + } + } +} + +func (s *SymDense) CopySym(a Symmetric) int { + n := a.Symmetric() + n = min(n, s.mat.N) + if n == 0 { + return 0 + } + switch a := a.(type) { + case RawSymmetricer: + amat := a.RawSymmetric() + if amat.Uplo != blas.Upper { + panic(badSymTriangle) + } + for i := 0; i < n; i++ { + copy(s.mat.Data[i*s.mat.Stride+i:i*s.mat.Stride+n], amat.Data[i*amat.Stride+i:i*amat.Stride+n]) + } + default: + for i := 0; i < n; i++ { + stmp := s.mat.Data[i*s.mat.Stride : i*s.mat.Stride+n] + for j := i; j < n; j++ { + stmp[j] = a.At(i, j) + } + } + } + return n +} + +// SymRankOne performs a symetric rank-one update to the matrix a and stores +// the result in the receiver +// s = a + alpha * x * x' +func (s *SymDense) SymRankOne(a Symmetric, alpha float64, x Vector) { + n, c := x.Dims() + if a.Symmetric() != n || c != 1 { + panic(ErrShape) + } + s.reuseAs(n) + + if s != a { + if rs, ok := a.(RawSymmetricer); ok { + s.checkOverlap(generalFromSymmetric(rs.RawSymmetric())) + } + s.CopySym(a) + } + + xU, _ := untranspose(x) + if rv, ok := xU.(RawVectorer); ok { + xmat := rv.RawVector() + s.checkOverlap((&VecDense{mat: xmat}).asGeneral()) + blas64.Syr(alpha, xmat, s.mat) + return + } + + for i := 0; i < n; i++ { + for j := i; j < n; j++ { + s.set(i, j, s.at(i, j)+alpha*x.AtVec(i)*x.AtVec(j)) + } + } +} + +// SymRankK performs a symmetric rank-k update to the matrix a and stores the +// result into the receiver. If a is zero, see SymOuterK. +// s = a + alpha * x * x' +func (s *SymDense) SymRankK(a Symmetric, alpha float64, x Matrix) { + n := a.Symmetric() + r, _ := x.Dims() + if r != n { + panic(ErrShape) + } + xMat, aTrans := untranspose(x) + var g blas64.General + if rm, ok := xMat.(RawMatrixer); ok { + g = rm.RawMatrix() + } else { + g = DenseCopyOf(x).mat + aTrans = false + } + if a != s { + if rs, ok := a.(RawSymmetricer); ok { + s.checkOverlap(generalFromSymmetric(rs.RawSymmetric())) + } + s.reuseAs(n) + s.CopySym(a) + } + t := blas.NoTrans + if aTrans { + t = blas.Trans + } + blas64.Syrk(t, alpha, g, 1, s.mat) +} + +// SymOuterK calculates the outer product of x with itself and stores +// the result into the receiver. It is equivalent to the matrix +// multiplication +// s = alpha * x * x'. +// In order to update an existing matrix, see SymRankOne. +func (s *SymDense) SymOuterK(alpha float64, x Matrix) { + n, _ := x.Dims() + switch { + case s.IsZero(): + s.mat = blas64.Symmetric{ + N: n, + Stride: n, + Data: useZeroed(s.mat.Data, n*n), + Uplo: blas.Upper, + } + s.cap = n + s.SymRankK(s, alpha, x) + case s.mat.Uplo != blas.Upper: + panic(badSymTriangle) + case s.mat.N == n: + if s == x { + w := getWorkspaceSym(n, true) + w.SymRankK(w, alpha, x) + s.CopySym(w) + putWorkspaceSym(w) + } else { + switch r := x.(type) { + case RawMatrixer: + s.checkOverlap(r.RawMatrix()) + case RawSymmetricer: + s.checkOverlap(generalFromSymmetric(r.RawSymmetric())) + case RawTriangular: + s.checkOverlap(generalFromTriangular(r.RawTriangular())) + } + // Only zero the upper triangle. + for i := 0; i < n; i++ { + ri := i * s.mat.Stride + zero(s.mat.Data[ri+i : ri+n]) + } + s.SymRankK(s, alpha, x) + } + default: + panic(ErrShape) + } +} + +// RankTwo performs a symmmetric rank-two update to the matrix a and stores +// the result in the receiver +// m = a + alpha * (x * y' + y * x') +func (s *SymDense) RankTwo(a Symmetric, alpha float64, x, y Vector) { + n := s.mat.N + xr, xc := x.Dims() + if xr != n || xc != 1 { + panic(ErrShape) + } + yr, yc := y.Dims() + if yr != n || yc != 1 { + panic(ErrShape) + } + + if s != a { + if rs, ok := a.(RawSymmetricer); ok { + s.checkOverlap(generalFromSymmetric(rs.RawSymmetric())) + } + } + + var xmat, ymat blas64.Vector + fast := true + xU, _ := untranspose(x) + if rv, ok := xU.(RawVectorer); ok { + xmat = rv.RawVector() + s.checkOverlap((&VecDense{mat: xmat}).asGeneral()) + } else { + fast = false + } + yU, _ := untranspose(y) + if rv, ok := yU.(RawVectorer); ok { + ymat = rv.RawVector() + s.checkOverlap((&VecDense{mat: ymat}).asGeneral()) + } else { + fast = false + } + + if s != a { + if rs, ok := a.(RawSymmetricer); ok { + s.checkOverlap(generalFromSymmetric(rs.RawSymmetric())) + } + s.reuseAs(n) + s.CopySym(a) + } + + if fast { + if s != a { + s.reuseAs(n) + s.CopySym(a) + } + blas64.Syr2(alpha, xmat, ymat, s.mat) + return + } + + for i := 0; i < n; i++ { + s.reuseAs(n) + for j := i; j < n; j++ { + s.set(i, j, a.At(i, j)+alpha*(x.AtVec(i)*y.AtVec(j)+y.AtVec(i)*x.AtVec(j))) + } + } +} + +// ScaleSym multiplies the elements of a by f, placing the result in the receiver. +func (s *SymDense) ScaleSym(f float64, a Symmetric) { + n := a.Symmetric() + s.reuseAs(n) + if a, ok := a.(RawSymmetricer); ok { + amat := a.RawSymmetric() + if s != a { + s.checkOverlap(generalFromSymmetric(amat)) + } + for i := 0; i < n; i++ { + for j := i; j < n; j++ { + s.mat.Data[i*s.mat.Stride+j] = f * amat.Data[i*amat.Stride+j] + } + } + return + } + for i := 0; i < n; i++ { + for j := i; j < n; j++ { + s.mat.Data[i*s.mat.Stride+j] = f * a.At(i, j) + } + } +} + +// SubsetSym extracts a subset of the rows and columns of the matrix a and stores +// the result in-place into the receiver. The resulting matrix size is +// len(set)×len(set). Specifically, at the conclusion of SubsetSym, +// s.At(i, j) equals a.At(set[i], set[j]). Note that the supplied set does not +// have to be a strict subset, dimension repeats are allowed. +func (s *SymDense) SubsetSym(a Symmetric, set []int) { + n := len(set) + na := a.Symmetric() + s.reuseAs(n) + var restore func() + if a == s { + s, restore = s.isolatedWorkspace(a) + defer restore() + } + + if a, ok := a.(RawSymmetricer); ok { + raw := a.RawSymmetric() + if s != a { + s.checkOverlap(generalFromSymmetric(raw)) + } + for i := 0; i < n; i++ { + ssub := s.mat.Data[i*s.mat.Stride : i*s.mat.Stride+n] + r := set[i] + rsub := raw.Data[r*raw.Stride : r*raw.Stride+na] + for j := i; j < n; j++ { + c := set[j] + if r <= c { + ssub[j] = rsub[c] + } else { + ssub[j] = raw.Data[c*raw.Stride+r] + } + } + } + return + } + for i := 0; i < n; i++ { + for j := i; j < n; j++ { + s.mat.Data[i*s.mat.Stride+j] = a.At(set[i], set[j]) + } + } +} + +// SliceSym returns a new Matrix that shares backing data with the receiver. +// The returned matrix starts at {i,i} of the receiver and extends k-i rows +// and columns. The final row and column in the resulting matrix is k-1. +// SliceSym panics with ErrIndexOutOfRange if the slice is outside the +// capacity of the receiver. +func (s *SymDense) SliceSym(i, k int) Symmetric { + sz := s.cap + if i < 0 || sz < i || k < i || sz < k { + panic(ErrIndexOutOfRange) + } + v := *s + v.mat.Data = s.mat.Data[i*s.mat.Stride+i : (k-1)*s.mat.Stride+k] + v.mat.N = k - i + v.cap = s.cap - i + return &v +} + +// Trace returns the trace of the matrix. +func (s *SymDense) Trace() float64 { + // TODO(btracey): could use internal asm sum routine. + var v float64 + for i := 0; i < s.mat.N; i++ { + v += s.mat.Data[i*s.mat.Stride+i] + } + return v +} + +// GrowSym returns the receiver expanded by n rows and n columns. If the +// dimensions of the expanded matrix are outside the capacity of the receiver +// a new allocation is made, otherwise not. Note that the receiver itself is +// not modified during the call to GrowSquare. +func (s *SymDense) GrowSym(n int) Symmetric { + if n < 0 { + panic(ErrIndexOutOfRange) + } + if n == 0 { + return s + } + var v SymDense + n += s.mat.N + if n > s.cap { + v.mat = blas64.Symmetric{ + N: n, + Stride: n, + Uplo: blas.Upper, + Data: make([]float64, n*n), + } + v.cap = n + // Copy elements, including those not currently visible. Use a temporary + // structure to avoid modifying the receiver. + var tmp SymDense + tmp.mat = blas64.Symmetric{ + N: s.cap, + Stride: s.mat.Stride, + Data: s.mat.Data, + Uplo: s.mat.Uplo, + } + tmp.cap = s.cap + v.CopySym(&tmp) + return &v + } + v.mat = blas64.Symmetric{ + N: n, + Stride: s.mat.Stride, + Uplo: blas.Upper, + Data: s.mat.Data[:(n-1)*s.mat.Stride+n], + } + v.cap = s.cap + return &v +} + +// PowPSD computes a^pow where a is a positive symmetric definite matrix. +// +// PowPSD returns an error if the matrix is not not positive symmetric definite +// or the Eigendecomposition is not successful. +func (s *SymDense) PowPSD(a Symmetric, pow float64) error { + dim := a.Symmetric() + s.reuseAs(dim) + + var eigen EigenSym + ok := eigen.Factorize(a, true) + if !ok { + return ErrFailedEigen + } + values := eigen.Values(nil) + for i, v := range values { + if v <= 0 { + return ErrNotPSD + } + values[i] = math.Pow(v, pow) + } + u := eigen.VectorsTo(nil) + + s.SymOuterK(values[0], u.ColView(0)) + + var v VecDense + for i := 1; i < dim; i++ { + v.ColViewOf(u, i) + s.SymRankOne(s, values[i], &v) + } + return nil +} diff --git a/vendor/gonum.org/v1/gonum/mat/triangular.go b/vendor/gonum.org/v1/gonum/mat/triangular.go new file mode 100644 index 0000000..c7d6e47 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/mat/triangular.go @@ -0,0 +1,683 @@ +// Copyright ©2015 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package mat + +import ( + "math" + + "gonum.org/v1/gonum/blas" + "gonum.org/v1/gonum/blas/blas64" + "gonum.org/v1/gonum/lapack/lapack64" +) + +var ( + triDense *TriDense + _ Matrix = triDense + _ Triangular = triDense + _ RawTriangular = triDense + _ MutableTriangular = triDense + + _ NonZeroDoer = triDense + _ RowNonZeroDoer = triDense + _ ColNonZeroDoer = triDense +) + +const badTriCap = "mat: bad capacity for TriDense" + +// TriDense represents an upper or lower triangular matrix in dense storage +// format. +type TriDense struct { + mat blas64.Triangular + cap int +} + +// Triangular represents a triangular matrix. Triangular matrices are always square. +type Triangular interface { + Matrix + // Triangle returns the number of rows/columns in the matrix and its + // orientation. + Triangle() (n int, kind TriKind) + + // TTri is the equivalent of the T() method in the Matrix interface but + // guarantees the transpose is of triangular type. + TTri() Triangular +} + +// A RawTriangular can return a blas64.Triangular representation of the receiver. +// Changes to the blas64.Triangular.Data slice will be reflected in the original +// matrix, changes to the N, Stride, Uplo and Diag fields will not. +type RawTriangular interface { + RawTriangular() blas64.Triangular +} + +// A MutableTriangular can set elements of a triangular matrix. +type MutableTriangular interface { + Triangular + SetTri(i, j int, v float64) +} + +var ( + _ Matrix = TransposeTri{} + _ Triangular = TransposeTri{} + _ UntransposeTrier = TransposeTri{} +) + +// TransposeTri is a type for performing an implicit transpose of a Triangular +// matrix. It implements the Triangular interface, returning values from the +// transpose of the matrix within. +type TransposeTri struct { + Triangular Triangular +} + +// At returns the value of the element at row i and column j of the transposed +// matrix, that is, row j and column i of the Triangular field. +func (t TransposeTri) At(i, j int) float64 { + return t.Triangular.At(j, i) +} + +// Dims returns the dimensions of the transposed matrix. Triangular matrices are +// square and thus this is the same size as the original Triangular. +func (t TransposeTri) Dims() (r, c int) { + c, r = t.Triangular.Dims() + return r, c +} + +// T performs an implicit transpose by returning the Triangular field. +func (t TransposeTri) T() Matrix { + return t.Triangular +} + +// Triangle returns the number of rows/columns in the matrix and its orientation. +func (t TransposeTri) Triangle() (int, TriKind) { + n, upper := t.Triangular.Triangle() + return n, !upper +} + +// TTri performs an implicit transpose by returning the Triangular field. +func (t TransposeTri) TTri() Triangular { + return t.Triangular +} + +// Untranspose returns the Triangular field. +func (t TransposeTri) Untranspose() Matrix { + return t.Triangular +} + +func (t TransposeTri) UntransposeTri() Triangular { + return t.Triangular +} + +// NewTriDense creates a new Triangular matrix with n rows and columns. If data == nil, +// a new slice is allocated for the backing slice. If len(data) == n*n, data is +// used as the backing slice, and changes to the elements of the returned TriDense +// will be reflected in data. If neither of these is true, NewTriDense will panic. +// NewTriDense will panic if n is zero. +// +// The data must be arranged in row-major order, i.e. the (i*c + j)-th +// element in the data slice is the {i, j}-th element in the matrix. +// Only the values in the triangular portion corresponding to kind are used. +func NewTriDense(n int, kind TriKind, data []float64) *TriDense { + if n <= 0 { + if n == 0 { + panic(ErrZeroLength) + } + panic("mat: negative dimension") + } + if data != nil && len(data) != n*n { + panic(ErrShape) + } + if data == nil { + data = make([]float64, n*n) + } + uplo := blas.Lower + if kind == Upper { + uplo = blas.Upper + } + return &TriDense{ + mat: blas64.Triangular{ + N: n, + Stride: n, + Data: data, + Uplo: uplo, + Diag: blas.NonUnit, + }, + cap: n, + } +} + +func (t *TriDense) Dims() (r, c int) { + return t.mat.N, t.mat.N +} + +// Triangle returns the dimension of t and its orientation. The returned +// orientation is only valid when n is not zero. +func (t *TriDense) Triangle() (n int, kind TriKind) { + return t.mat.N, t.triKind() +} + +func (t *TriDense) isUpper() bool { + return isUpperUplo(t.mat.Uplo) +} + +func (t *TriDense) triKind() TriKind { + return TriKind(isUpperUplo(t.mat.Uplo)) +} + +func isUpperUplo(u blas.Uplo) bool { + switch u { + case blas.Upper: + return true + case blas.Lower: + return false + default: + panic(badTriangle) + } +} + +func uploToTriKind(u blas.Uplo) TriKind { + switch u { + case blas.Upper: + return Upper + case blas.Lower: + return Lower + default: + panic(badTriangle) + } +} + +// asSymBlas returns the receiver restructured as a blas64.Symmetric with the +// same backing memory. Panics if the receiver is unit. +// This returns a blas64.Symmetric and not a *SymDense because SymDense can only +// be upper triangular. +func (t *TriDense) asSymBlas() blas64.Symmetric { + if t.mat.Diag == blas.Unit { + panic("mat: cannot convert unit TriDense into blas64.Symmetric") + } + return blas64.Symmetric{ + N: t.mat.N, + Stride: t.mat.Stride, + Data: t.mat.Data, + Uplo: t.mat.Uplo, + } +} + +// T performs an implicit transpose by returning the receiver inside a Transpose. +func (t *TriDense) T() Matrix { + return Transpose{t} +} + +// TTri performs an implicit transpose by returning the receiver inside a TransposeTri. +func (t *TriDense) TTri() Triangular { + return TransposeTri{t} +} + +func (t *TriDense) RawTriangular() blas64.Triangular { + return t.mat +} + +// SetRawTriangular sets the underlying blas64.Triangular used by the receiver. +// Changes to elements in the receiver following the call will be reflected +// in the input. +// +// The supplied Triangular must not use blas.Unit storage format. +func (t *TriDense) SetRawTriangular(mat blas64.Triangular) { + if mat.Diag == blas.Unit { + panic("mat: cannot set TriDense with Unit storage format") + } + t.mat = mat +} + +// Reset zeros the dimensions of the matrix so that it can be reused as the +// receiver of a dimensionally restricted operation. +// +// See the Reseter interface for more information. +func (t *TriDense) Reset() { + // N and Stride must be zeroed in unison. + t.mat.N, t.mat.Stride = 0, 0 + // Defensively zero Uplo to ensure + // it is set correctly later. + t.mat.Uplo = 0 + t.mat.Data = t.mat.Data[:0] +} + +// Zero sets all of the matrix elements to zero. +func (t *TriDense) Zero() { + if t.isUpper() { + for i := 0; i < t.mat.N; i++ { + zero(t.mat.Data[i*t.mat.Stride+i : i*t.mat.Stride+t.mat.N]) + } + return + } + for i := 0; i < t.mat.N; i++ { + zero(t.mat.Data[i*t.mat.Stride : i*t.mat.Stride+i+1]) + } +} + +// IsZero returns whether the receiver is zero-sized. Zero-sized matrices can be the +// receiver for size-restricted operations. TriDense matrices can be zeroed using Reset. +func (t *TriDense) IsZero() bool { + // It must be the case that t.Dims() returns + // zeros in this case. See comment in Reset(). + return t.mat.Stride == 0 +} + +// untranspose untransposes a matrix if applicable. If a is an Untransposer, then +// untranspose returns the underlying matrix and true. If it is not, then it returns +// the input matrix and false. +func untransposeTri(a Triangular) (Triangular, bool) { + if ut, ok := a.(UntransposeTrier); ok { + return ut.UntransposeTri(), true + } + return a, false +} + +// reuseAs resizes a zero receiver to an n×n triangular matrix with the given +// orientation. If the receiver is non-zero, reuseAs checks that the receiver +// is the correct size and orientation. +func (t *TriDense) reuseAs(n int, kind TriKind) { + if n == 0 { + panic(ErrZeroLength) + } + ul := blas.Lower + if kind == Upper { + ul = blas.Upper + } + if t.mat.N > t.cap { + panic(badTriCap) + } + if t.IsZero() { + t.mat = blas64.Triangular{ + N: n, + Stride: n, + Diag: blas.NonUnit, + Data: use(t.mat.Data, n*n), + Uplo: ul, + } + t.cap = n + return + } + if t.mat.N != n { + panic(ErrShape) + } + if t.mat.Uplo != ul { + panic(ErrTriangle) + } +} + +// isolatedWorkspace returns a new TriDense matrix w with the size of a and +// returns a callback to defer which performs cleanup at the return of the call. +// This should be used when a method receiver is the same pointer as an input argument. +func (t *TriDense) isolatedWorkspace(a Triangular) (w *TriDense, restore func()) { + n, kind := a.Triangle() + if n == 0 { + panic(ErrZeroLength) + } + w = getWorkspaceTri(n, kind, false) + return w, func() { + t.Copy(w) + putWorkspaceTri(w) + } +} + +// DiagView returns the diagonal as a matrix backed by the original data. +func (t *TriDense) DiagView() Diagonal { + if t.mat.Diag == blas.Unit { + panic("mat: cannot take view of Unit diagonal") + } + n := t.mat.N + return &DiagDense{ + mat: blas64.Vector{ + N: n, + Inc: t.mat.Stride + 1, + Data: t.mat.Data[:(n-1)*t.mat.Stride+n], + }, + } +} + +// Copy makes a copy of elements of a into the receiver. It is similar to the +// built-in copy; it copies as much as the overlap between the two matrices and +// returns the number of rows and columns it copied. Only elements within the +// receiver's non-zero triangle are set. +// +// See the Copier interface for more information. +func (t *TriDense) Copy(a Matrix) (r, c int) { + r, c = a.Dims() + r = min(r, t.mat.N) + c = min(c, t.mat.N) + if r == 0 || c == 0 { + return 0, 0 + } + + switch a := a.(type) { + case RawMatrixer: + amat := a.RawMatrix() + if t.isUpper() { + for i := 0; i < r; i++ { + copy(t.mat.Data[i*t.mat.Stride+i:i*t.mat.Stride+c], amat.Data[i*amat.Stride+i:i*amat.Stride+c]) + } + } else { + for i := 0; i < r; i++ { + copy(t.mat.Data[i*t.mat.Stride:i*t.mat.Stride+i+1], amat.Data[i*amat.Stride:i*amat.Stride+i+1]) + } + } + case RawTriangular: + amat := a.RawTriangular() + aIsUpper := isUpperUplo(amat.Uplo) + tIsUpper := t.isUpper() + switch { + case tIsUpper && aIsUpper: + for i := 0; i < r; i++ { + copy(t.mat.Data[i*t.mat.Stride+i:i*t.mat.Stride+c], amat.Data[i*amat.Stride+i:i*amat.Stride+c]) + } + case !tIsUpper && !aIsUpper: + for i := 0; i < r; i++ { + copy(t.mat.Data[i*t.mat.Stride:i*t.mat.Stride+i+1], amat.Data[i*amat.Stride:i*amat.Stride+i+1]) + } + default: + for i := 0; i < r; i++ { + t.set(i, i, amat.Data[i*amat.Stride+i]) + } + } + default: + isUpper := t.isUpper() + for i := 0; i < r; i++ { + if isUpper { + for j := i; j < c; j++ { + t.set(i, j, a.At(i, j)) + } + } else { + for j := 0; j <= i; j++ { + t.set(i, j, a.At(i, j)) + } + } + } + } + + return r, c +} + +// InverseTri computes the inverse of the triangular matrix a, storing the result +// into the receiver. If a is ill-conditioned, a Condition error will be returned. +// Note that matrix inversion is numerically unstable, and should generally be +// avoided where possible, for example by using the Solve routines. +func (t *TriDense) InverseTri(a Triangular) error { + t.checkOverlapMatrix(a) + n, _ := a.Triangle() + t.reuseAs(a.Triangle()) + t.Copy(a) + work := getFloats(3*n, false) + iwork := getInts(n, false) + cond := lapack64.Trcon(CondNorm, t.mat, work, iwork) + putFloats(work) + putInts(iwork) + if math.IsInf(cond, 1) { + return Condition(cond) + } + ok := lapack64.Trtri(t.mat) + if !ok { + return Condition(math.Inf(1)) + } + if cond > ConditionTolerance { + return Condition(cond) + } + return nil +} + +// MulTri takes the product of triangular matrices a and b and places the result +// in the receiver. The size of a and b must match, and they both must have the +// same TriKind, or Mul will panic. +func (t *TriDense) MulTri(a, b Triangular) { + n, kind := a.Triangle() + nb, kindb := b.Triangle() + if n != nb { + panic(ErrShape) + } + if kind != kindb { + panic(ErrTriangle) + } + + aU, _ := untransposeTri(a) + bU, _ := untransposeTri(b) + t.checkOverlapMatrix(bU) + t.checkOverlapMatrix(aU) + t.reuseAs(n, kind) + var restore func() + if t == aU { + t, restore = t.isolatedWorkspace(aU) + defer restore() + } else if t == bU { + t, restore = t.isolatedWorkspace(bU) + defer restore() + } + + // Inspect types here, helps keep the loops later clean(er). + _, aDiag := aU.(Diagonal) + _, bDiag := bU.(Diagonal) + // If they are both diagonal only need 1 loop. + // All diagonal matrices are Upper. + // TODO: Add fast paths for DiagDense. + if aDiag && bDiag { + t.Zero() + for i := 0; i < n; i++ { + t.SetTri(i, i, a.At(i, i)*b.At(i, i)) + } + return + } + + // Now we know at least one matrix is non-diagonal. + // And all diagonal matrices are all Upper. + // The both-diagonal case is handled above. + // TODO: Add fast paths for Dense variants. + if kind == Upper { + for i := 0; i < n; i++ { + for j := i; j < n; j++ { + switch { + case aDiag: + t.SetTri(i, j, a.At(i, i)*b.At(i, j)) + case bDiag: + t.SetTri(i, j, a.At(i, j)*b.At(j, j)) + default: + var v float64 + for k := i; k <= j; k++ { + v += a.At(i, k) * b.At(k, j) + } + t.SetTri(i, j, v) + } + } + } + return + } + for i := 0; i < n; i++ { + for j := 0; j <= i; j++ { + var v float64 + for k := j; k <= i; k++ { + v += a.At(i, k) * b.At(k, j) + } + t.SetTri(i, j, v) + } + } +} + +// ScaleTri multiplies the elements of a by f, placing the result in the receiver. +// If the receiver is non-zero, the size and kind of the receiver must match +// the input, or ScaleTri will panic. +func (t *TriDense) ScaleTri(f float64, a Triangular) { + n, kind := a.Triangle() + t.reuseAs(n, kind) + + // TODO(btracey): Improve the set of fast-paths. + switch a := a.(type) { + case RawTriangular: + amat := a.RawTriangular() + if t != a { + t.checkOverlap(generalFromTriangular(amat)) + } + if kind == Upper { + for i := 0; i < n; i++ { + ts := t.mat.Data[i*t.mat.Stride+i : i*t.mat.Stride+n] + as := amat.Data[i*amat.Stride+i : i*amat.Stride+n] + for i, v := range as { + ts[i] = v * f + } + } + return + } + for i := 0; i < n; i++ { + ts := t.mat.Data[i*t.mat.Stride : i*t.mat.Stride+i+1] + as := amat.Data[i*amat.Stride : i*amat.Stride+i+1] + for i, v := range as { + ts[i] = v * f + } + } + return + default: + t.checkOverlapMatrix(a) + isUpper := kind == Upper + for i := 0; i < n; i++ { + if isUpper { + for j := i; j < n; j++ { + t.set(i, j, f*a.At(i, j)) + } + } else { + for j := 0; j <= i; j++ { + t.set(i, j, f*a.At(i, j)) + } + } + } + } +} + +// Trace returns the trace of the matrix. +func (t *TriDense) Trace() float64 { + // TODO(btracey): could use internal asm sum routine. + var v float64 + for i := 0; i < t.mat.N; i++ { + v += t.mat.Data[i*t.mat.Stride+i] + } + return v +} + +// copySymIntoTriangle copies a symmetric matrix into a TriDense +func copySymIntoTriangle(t *TriDense, s Symmetric) { + n, upper := t.Triangle() + ns := s.Symmetric() + if n != ns { + panic("mat: triangle size mismatch") + } + ts := t.mat.Stride + if rs, ok := s.(RawSymmetricer); ok { + sd := rs.RawSymmetric() + ss := sd.Stride + if upper { + if sd.Uplo == blas.Upper { + for i := 0; i < n; i++ { + copy(t.mat.Data[i*ts+i:i*ts+n], sd.Data[i*ss+i:i*ss+n]) + } + return + } + for i := 0; i < n; i++ { + for j := i; j < n; j++ { + t.mat.Data[i*ts+j] = sd.Data[j*ss+i] + } + } + return + } + if sd.Uplo == blas.Upper { + for i := 0; i < n; i++ { + for j := 0; j <= i; j++ { + t.mat.Data[i*ts+j] = sd.Data[j*ss+i] + } + } + return + } + for i := 0; i < n; i++ { + copy(t.mat.Data[i*ts:i*ts+i+1], sd.Data[i*ss:i*ss+i+1]) + } + return + } + if upper { + for i := 0; i < n; i++ { + for j := i; j < n; j++ { + t.mat.Data[i*ts+j] = s.At(i, j) + } + } + return + } + for i := 0; i < n; i++ { + for j := 0; j <= i; j++ { + t.mat.Data[i*ts+j] = s.At(i, j) + } + } +} + +// DoNonZero calls the function fn for each of the non-zero elements of t. The function fn +// takes a row/column index and the element value of t at (i, j). +func (t *TriDense) DoNonZero(fn func(i, j int, v float64)) { + if t.isUpper() { + for i := 0; i < t.mat.N; i++ { + for j := i; j < t.mat.N; j++ { + v := t.at(i, j) + if v != 0 { + fn(i, j, v) + } + } + } + return + } + for i := 0; i < t.mat.N; i++ { + for j := 0; j <= i; j++ { + v := t.at(i, j) + if v != 0 { + fn(i, j, v) + } + } + } +} + +// DoRowNonZero calls the function fn for each of the non-zero elements of row i of t. The function fn +// takes a row/column index and the element value of t at (i, j). +func (t *TriDense) DoRowNonZero(i int, fn func(i, j int, v float64)) { + if i < 0 || t.mat.N <= i { + panic(ErrRowAccess) + } + if t.isUpper() { + for j := i; j < t.mat.N; j++ { + v := t.at(i, j) + if v != 0 { + fn(i, j, v) + } + } + return + } + for j := 0; j <= i; j++ { + v := t.at(i, j) + if v != 0 { + fn(i, j, v) + } + } +} + +// DoColNonZero calls the function fn for each of the non-zero elements of column j of t. The function fn +// takes a row/column index and the element value of t at (i, j). +func (t *TriDense) DoColNonZero(j int, fn func(i, j int, v float64)) { + if j < 0 || t.mat.N <= j { + panic(ErrColAccess) + } + if t.isUpper() { + for i := 0; i <= j; i++ { + v := t.at(i, j) + if v != 0 { + fn(i, j, v) + } + } + return + } + for i := j; i < t.mat.N; i++ { + v := t.at(i, j) + if v != 0 { + fn(i, j, v) + } + } +} diff --git a/vendor/gonum.org/v1/gonum/mat/triband.go b/vendor/gonum.org/v1/gonum/mat/triband.go new file mode 100644 index 0000000..698af89 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/mat/triband.go @@ -0,0 +1,367 @@ +// Copyright ©2018 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package mat + +import ( + "gonum.org/v1/gonum/blas" + "gonum.org/v1/gonum/blas/blas64" +) + +var ( + triBand TriBanded + _ Banded = triBand + _ Triangular = triBand + + triBandDense *TriBandDense + _ Matrix = triBandDense + _ Triangular = triBandDense + _ Banded = triBandDense + _ TriBanded = triBandDense + _ RawTriBander = triBandDense + _ MutableTriBanded = triBandDense +) + +// TriBanded is a triangular band matrix interface type. +type TriBanded interface { + Banded + + // Triangle returns the number of rows/columns in the matrix and its + // orientation. + Triangle() (n int, kind TriKind) + + // TTri is the equivalent of the T() method in the Matrix interface but + // guarantees the transpose is of triangular type. + TTri() Triangular + + // TriBand returns the number of rows/columns in the matrix, the + // size of the bandwidth, and the orientation. + TriBand() (n, k int, kind TriKind) + + // TTriBand is the equivalent of the T() method in the Matrix interface but + // guarantees the transpose is of banded triangular type. + TTriBand() TriBanded +} + +// A RawTriBander can return a blas64.TriangularBand representation of the receiver. +// Changes to the blas64.TriangularBand.Data slice will be reflected in the original +// matrix, changes to the N, K, Stride, Uplo and Diag fields will not. +type RawTriBander interface { + RawTriBand() blas64.TriangularBand +} + +// MutableTriBanded is a triangular band matrix interface type that allows +// elements to be altered. +type MutableTriBanded interface { + TriBanded + SetTriBand(i, j int, v float64) +} + +var ( + tTriBand TransposeTriBand + _ Matrix = tTriBand + _ TriBanded = tTriBand + _ Untransposer = tTriBand + _ UntransposeTrier = tTriBand + _ UntransposeBander = tTriBand + _ UntransposeTriBander = tTriBand +) + +// TransposeTriBand is a type for performing an implicit transpose of a TriBanded +// matrix. It implements the TriBanded interface, returning values from the +// transpose of the matrix within. +type TransposeTriBand struct { + TriBanded TriBanded +} + +// At returns the value of the element at row i and column j of the transposed +// matrix, that is, row j and column i of the TriBanded field. +func (t TransposeTriBand) At(i, j int) float64 { + return t.TriBanded.At(j, i) +} + +// Dims returns the dimensions of the transposed matrix. TriBanded matrices are +// square and thus this is the same size as the original TriBanded. +func (t TransposeTriBand) Dims() (r, c int) { + c, r = t.TriBanded.Dims() + return r, c +} + +// T performs an implicit transpose by returning the TriBand field. +func (t TransposeTriBand) T() Matrix { + return t.TriBanded +} + +// Triangle returns the number of rows/columns in the matrix and its orientation. +func (t TransposeTriBand) Triangle() (int, TriKind) { + n, upper := t.TriBanded.Triangle() + return n, !upper +} + +// TTri performs an implicit transpose by returning the TriBand field. +func (t TransposeTriBand) TTri() Triangular { + return t.TriBanded +} + +// Bandwidth returns the upper and lower bandwidths of the matrix. +func (t TransposeTriBand) Bandwidth() (kl, ku int) { + kl, ku = t.TriBanded.Bandwidth() + return ku, kl +} + +// TBand performs an implicit transpose by returning the TriBand field. +func (t TransposeTriBand) TBand() Banded { + return t.TriBanded +} + +// TriBand returns the number of rows/columns in the matrix, the +// size of the bandwidth, and the orientation. +func (t TransposeTriBand) TriBand() (n, k int, kind TriKind) { + n, k, kind = t.TriBanded.TriBand() + return n, k, !kind +} + +// TTriBand performs an implicit transpose by returning the TriBand field. +func (t TransposeTriBand) TTriBand() TriBanded { + return t.TriBanded +} + +// Untranspose returns the Triangular field. +func (t TransposeTriBand) Untranspose() Matrix { + return t.TriBanded +} + +// UntransposeTri returns the underlying Triangular matrix. +func (t TransposeTriBand) UntransposeTri() Triangular { + return t.TriBanded +} + +// UntransposeBand returns the underlying Banded matrix. +func (t TransposeTriBand) UntransposeBand() Banded { + return t.TriBanded +} + +// UntransposeTriBand returns the underlying TriBanded matrix. +func (t TransposeTriBand) UntransposeTriBand() TriBanded { + return t.TriBanded +} + +// TriBandDense represents a triangular band matrix in dense storage format. +type TriBandDense struct { + mat blas64.TriangularBand +} + +// NewTriBandDense creates a new triangular banded matrix with n rows and columns, +// k bands in the direction of the specified kind. If data == nil, +// a new slice is allocated for the backing slice. If len(data) == n*(k+1), +// data is used as the backing slice, and changes to the elements of the returned +// TriBandDense will be reflected in data. If neither of these is true, NewTriBandDense +// will panic. k must be at least zero and less than n, otherwise NewTriBandDense will panic. +// +// The data must be arranged in row-major order constructed by removing the zeros +// from the rows outside the band and aligning the diagonals. For example, if +// the upper-triangular banded matrix +// 1 2 3 0 0 0 +// 0 4 5 6 0 0 +// 0 0 7 8 9 0 +// 0 0 0 10 11 12 +// 0 0 0 0 13 14 +// 0 0 0 0 0 15 +// becomes (* entries are never accessed) +// 1 2 3 +// 4 5 6 +// 7 8 9 +// 10 11 12 +// 13 14 * +// 15 * * +// which is passed to NewTriBandDense as []float64{1, 2, ..., 15, *, *, *} +// with k=2 and kind = mat.Upper. +// The lower triangular banded matrix +// 1 0 0 0 0 0 +// 2 3 0 0 0 0 +// 4 5 6 0 0 0 +// 0 7 8 9 0 0 +// 0 0 10 11 12 0 +// 0 0 0 13 14 15 +// becomes (* entries are never accessed) +// * * 1 +// * 2 3 +// 4 5 6 +// 7 8 9 +// 10 11 12 +// 13 14 15 +// which is passed to NewTriBandDense as []float64{*, *, *, 1, 2, ..., 15} +// with k=2 and kind = mat.Lower. +// Only the values in the band portion of the matrix are used. +func NewTriBandDense(n, k int, kind TriKind, data []float64) *TriBandDense { + if n <= 0 || k < 0 { + if n == 0 { + panic(ErrZeroLength) + } + panic("mat: negative dimension") + } + if k+1 > n { + panic("mat: band out of range") + } + bc := k + 1 + if data != nil && len(data) != n*bc { + panic(ErrShape) + } + if data == nil { + data = make([]float64, n*bc) + } + uplo := blas.Lower + if kind { + uplo = blas.Upper + } + return &TriBandDense{ + mat: blas64.TriangularBand{ + Uplo: uplo, + Diag: blas.NonUnit, + N: n, + K: k, + Data: data, + Stride: bc, + }, + } +} + +// Dims returns the number of rows and columns in the matrix. +func (t *TriBandDense) Dims() (r, c int) { + return t.mat.N, t.mat.N +} + +// T performs an implicit transpose by returning the receiver inside a Transpose. +func (t *TriBandDense) T() Matrix { + return Transpose{t} +} + +// IsZero returns whether the receiver is zero-sized. Zero-sized matrices can be the +// receiver for size-restricted operations. TriBandDense matrices can be zeroed using Reset. +func (t *TriBandDense) IsZero() bool { + // It must be the case that t.Dims() returns + // zeros in this case. See comment in Reset(). + return t.mat.Stride == 0 +} + +// Reset zeros the dimensions of the matrix so that it can be reused as the +// receiver of a dimensionally restricted operation. +// +// See the Reseter interface for more information. +func (t *TriBandDense) Reset() { + t.mat.N = 0 + t.mat.Stride = 0 + t.mat.K = 0 + t.mat.Data = t.mat.Data[:0] +} + +// Zero sets all of the matrix elements to zero. +func (t *TriBandDense) Zero() { + if t.isUpper() { + for i := 0; i < t.mat.N; i++ { + u := min(1+t.mat.K, t.mat.N-i) + zero(t.mat.Data[i*t.mat.Stride : i*t.mat.Stride+u]) + } + return + } + for i := 0; i < t.mat.N; i++ { + l := max(0, t.mat.K-i) + zero(t.mat.Data[i*t.mat.Stride+l : i*t.mat.Stride+t.mat.K+1]) + } +} + +func (t *TriBandDense) isUpper() bool { + return isUpperUplo(t.mat.Uplo) +} + +func (t *TriBandDense) triKind() TriKind { + return TriKind(isUpperUplo(t.mat.Uplo)) +} + +// Triangle returns the dimension of t and its orientation. The returned +// orientation is only valid when n is not zero. +func (t *TriBandDense) Triangle() (n int, kind TriKind) { + return t.mat.N, t.triKind() +} + +// TTri performs an implicit transpose by returning the receiver inside a TransposeTri. +func (t *TriBandDense) TTri() Triangular { + return TransposeTri{t} +} + +// Bandwidth returns the upper and lower bandwidths of the matrix. +func (t *TriBandDense) Bandwidth() (kl, ku int) { + if t.isUpper() { + return 0, t.mat.K + } + return t.mat.K, 0 +} + +// TBand performs an implicit transpose by returning the receiver inside a TransposeBand. +func (t *TriBandDense) TBand() Banded { + return TransposeBand{t} +} + +// TriBand returns the number of rows/columns in the matrix, the +// size of the bandwidth, and the orientation. +func (t *TriBandDense) TriBand() (n, k int, kind TriKind) { + return t.mat.N, t.mat.K, TriKind(!t.IsZero()) && t.triKind() +} + +// TTriBand performs an implicit transpose by returning the receiver inside a TransposeTriBand. +func (t *TriBandDense) TTriBand() TriBanded { + return TransposeTriBand{t} +} + +// RawTriBand returns the underlying blas64.TriangularBand used by the receiver. +// Changes to the blas64.TriangularBand.Data slice will be reflected in the original +// matrix, changes to the N, K, Stride, Uplo and Diag fields will not. +func (t *TriBandDense) RawTriBand() blas64.TriangularBand { + return t.mat +} + +// SetRawTriBand sets the underlying blas64.TriangularBand used by the receiver. +// Changes to elements in the receiver following the call will be reflected +// in the input. +// +// The supplied TriangularBand must not use blas.Unit storage format. +func (t *TriBandDense) SetRawTriBand(mat blas64.TriangularBand) { + if mat.Diag == blas.Unit { + panic("mat: cannot set TriBand with Unit storage") + } + t.mat = mat +} + +// DiagView returns the diagonal as a matrix backed by the original data. +func (t *TriBandDense) DiagView() Diagonal { + if t.mat.Diag == blas.Unit { + panic("mat: cannot take view of Unit diagonal") + } + n := t.mat.N + data := t.mat.Data + if !t.isUpper() { + data = data[t.mat.K:] + } + return &DiagDense{ + mat: blas64.Vector{ + N: n, + Inc: t.mat.Stride, + Data: data[:(n-1)*t.mat.Stride+1], + }, + } +} + +// Trace returns the trace. +func (t *TriBandDense) Trace() float64 { + rb := t.RawTriBand() + var tr float64 + var offsetIndex int + if rb.Uplo == blas.Lower { + offsetIndex = rb.K + } + for i := 0; i < rb.N; i++ { + tr += rb.Data[offsetIndex+i*rb.Stride] + } + return tr +} diff --git a/vendor/gonum.org/v1/gonum/mat/vector.go b/vendor/gonum.org/v1/gonum/mat/vector.go new file mode 100644 index 0000000..8191312 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/mat/vector.go @@ -0,0 +1,741 @@ +// Copyright ©2013 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package mat + +import ( + "gonum.org/v1/gonum/blas" + "gonum.org/v1/gonum/blas/blas64" + "gonum.org/v1/gonum/internal/asm/f64" +) + +var ( + vector *VecDense + + _ Matrix = vector + _ Vector = vector + _ Reseter = vector +) + +// Vector is a vector. +type Vector interface { + Matrix + AtVec(int) float64 + Len() int +} + +// TransposeVec is a type for performing an implicit transpose of a Vector. +// It implements the Vector interface, returning values from the transpose +// of the vector within. +type TransposeVec struct { + Vector Vector +} + +// At returns the value of the element at row i and column j of the transposed +// matrix, that is, row j and column i of the Vector field. +func (t TransposeVec) At(i, j int) float64 { + return t.Vector.At(j, i) +} + +// AtVec returns the element at position i. It panics if i is out of bounds. +func (t TransposeVec) AtVec(i int) float64 { + return t.Vector.AtVec(i) +} + +// Dims returns the dimensions of the transposed vector. +func (t TransposeVec) Dims() (r, c int) { + c, r = t.Vector.Dims() + return r, c +} + +// T performs an implicit transpose by returning the Vector field. +func (t TransposeVec) T() Matrix { + return t.Vector +} + +// Len returns the number of columns in the vector. +func (t TransposeVec) Len() int { + return t.Vector.Len() +} + +// TVec performs an implicit transpose by returning the Vector field. +func (t TransposeVec) TVec() Vector { + return t.Vector +} + +// Untranspose returns the Vector field. +func (t TransposeVec) Untranspose() Matrix { + return t.Vector +} + +func (t TransposeVec) UntransposeVec() Vector { + return t.Vector +} + +// VecDense represents a column vector. +type VecDense struct { + mat blas64.Vector + // A BLAS vector can have a negative increment, but allowing this + // in the mat type complicates a lot of code, and doesn't gain anything. + // VecDense must have positive increment in this package. +} + +// NewVecDense creates a new VecDense of length n. If data == nil, +// a new slice is allocated for the backing slice. If len(data) == n, data is +// used as the backing slice, and changes to the elements of the returned VecDense +// will be reflected in data. If neither of these is true, NewVecDense will panic. +// NewVecDense will panic if n is zero. +func NewVecDense(n int, data []float64) *VecDense { + if n <= 0 { + if n == 0 { + panic(ErrZeroLength) + } + panic("mat: negative dimension") + } + if len(data) != n && data != nil { + panic(ErrShape) + } + if data == nil { + data = make([]float64, n) + } + return &VecDense{ + mat: blas64.Vector{ + N: n, + Inc: 1, + Data: data, + }, + } +} + +// SliceVec returns a new Vector that shares backing data with the receiver. +// The returned matrix starts at i of the receiver and extends k-i elements. +// SliceVec panics with ErrIndexOutOfRange if the slice is outside the capacity +// of the receiver. +func (v *VecDense) SliceVec(i, k int) Vector { + if i < 0 || k <= i || v.Cap() < k { + panic(ErrIndexOutOfRange) + } + return &VecDense{ + mat: blas64.Vector{ + N: k - i, + Inc: v.mat.Inc, + Data: v.mat.Data[i*v.mat.Inc : (k-1)*v.mat.Inc+1], + }, + } +} + +// Dims returns the number of rows and columns in the matrix. Columns is always 1 +// for a non-Reset vector. +func (v *VecDense) Dims() (r, c int) { + if v.IsZero() { + return 0, 0 + } + return v.mat.N, 1 +} + +// Caps returns the number of rows and columns in the backing matrix. Columns is always 1 +// for a non-Reset vector. +func (v *VecDense) Caps() (r, c int) { + if v.IsZero() { + return 0, 0 + } + return v.Cap(), 1 +} + +// Len returns the length of the vector. +func (v *VecDense) Len() int { + return v.mat.N +} + +// Cap returns the capacity of the vector. +func (v *VecDense) Cap() int { + if v.IsZero() { + return 0 + } + return (cap(v.mat.Data)-1)/v.mat.Inc + 1 +} + +// T performs an implicit transpose by returning the receiver inside a Transpose. +func (v *VecDense) T() Matrix { + return Transpose{v} +} + +// TVec performs an implicit transpose by returning the receiver inside a TransposeVec. +func (v *VecDense) TVec() Vector { + return TransposeVec{v} +} + +// Reset zeros the length of the vector so that it can be reused as the +// receiver of a dimensionally restricted operation. +// +// See the Reseter interface for more information. +func (v *VecDense) Reset() { + // No change of Inc or N to 0 may be + // made unless both are set to 0. + v.mat.Inc = 0 + v.mat.N = 0 + v.mat.Data = v.mat.Data[:0] +} + +// Zero sets all of the matrix elements to zero. +func (v *VecDense) Zero() { + for i := 0; i < v.mat.N; i++ { + v.mat.Data[v.mat.Inc*i] = 0 + } +} + +// CloneVec makes a copy of a into the receiver, overwriting the previous value +// of the receiver. +func (v *VecDense) CloneVec(a Vector) { + if v == a { + return + } + n := a.Len() + v.mat = blas64.Vector{ + N: n, + Inc: 1, + Data: use(v.mat.Data, n), + } + if r, ok := a.(RawVectorer); ok { + blas64.Copy(r.RawVector(), v.mat) + return + } + for i := 0; i < a.Len(); i++ { + v.SetVec(i, a.AtVec(i)) + } +} + +// VecDenseCopyOf returns a newly allocated copy of the elements of a. +func VecDenseCopyOf(a Vector) *VecDense { + v := &VecDense{} + v.CloneVec(a) + return v +} + +func (v *VecDense) RawVector() blas64.Vector { + return v.mat +} + +// CopyVec makes a copy of elements of a into the receiver. It is similar to the +// built-in copy; it copies as much as the overlap between the two vectors and +// returns the number of elements it copied. +func (v *VecDense) CopyVec(a Vector) int { + n := min(v.Len(), a.Len()) + if v == a { + return n + } + if r, ok := a.(RawVectorer); ok { + blas64.Copy(r.RawVector(), v.mat) + return n + } + for i := 0; i < n; i++ { + v.setVec(i, a.AtVec(i)) + } + return n +} + +// ScaleVec scales the vector a by alpha, placing the result in the receiver. +func (v *VecDense) ScaleVec(alpha float64, a Vector) { + n := a.Len() + + if v == a { + if v.mat.Inc == 1 { + f64.ScalUnitary(alpha, v.mat.Data) + return + } + f64.ScalInc(alpha, v.mat.Data, uintptr(n), uintptr(v.mat.Inc)) + return + } + + v.reuseAs(n) + + if rv, ok := a.(RawVectorer); ok { + mat := rv.RawVector() + v.checkOverlap(mat) + if v.mat.Inc == 1 && mat.Inc == 1 { + f64.ScalUnitaryTo(v.mat.Data, alpha, mat.Data) + return + } + f64.ScalIncTo(v.mat.Data, uintptr(v.mat.Inc), + alpha, mat.Data, uintptr(n), uintptr(mat.Inc)) + return + } + + for i := 0; i < n; i++ { + v.setVec(i, alpha*a.AtVec(i)) + } +} + +// AddScaledVec adds the vectors a and alpha*b, placing the result in the receiver. +func (v *VecDense) AddScaledVec(a Vector, alpha float64, b Vector) { + if alpha == 1 { + v.AddVec(a, b) + return + } + if alpha == -1 { + v.SubVec(a, b) + return + } + + ar := a.Len() + br := b.Len() + + if ar != br { + panic(ErrShape) + } + + var amat, bmat blas64.Vector + fast := true + aU, _ := untranspose(a) + if rv, ok := aU.(RawVectorer); ok { + amat = rv.RawVector() + if v != a { + v.checkOverlap(amat) + } + } else { + fast = false + } + bU, _ := untranspose(b) + if rv, ok := bU.(RawVectorer); ok { + bmat = rv.RawVector() + if v != b { + v.checkOverlap(bmat) + } + } else { + fast = false + } + + v.reuseAs(ar) + + switch { + case alpha == 0: // v <- a + if v == a { + return + } + v.CopyVec(a) + case v == a && v == b: // v <- v + alpha * v = (alpha + 1) * v + blas64.Scal(alpha+1, v.mat) + case !fast: // v <- a + alpha * b without blas64 support. + for i := 0; i < ar; i++ { + v.setVec(i, a.AtVec(i)+alpha*b.AtVec(i)) + } + case v == a && v != b: // v <- v + alpha * b + if v.mat.Inc == 1 && bmat.Inc == 1 { + // Fast path for a common case. + f64.AxpyUnitaryTo(v.mat.Data, alpha, bmat.Data, amat.Data) + } else { + f64.AxpyInc(alpha, bmat.Data, v.mat.Data, + uintptr(ar), uintptr(bmat.Inc), uintptr(v.mat.Inc), 0, 0) + } + default: // v <- a + alpha * b or v <- a + alpha * v + if v.mat.Inc == 1 && amat.Inc == 1 && bmat.Inc == 1 { + // Fast path for a common case. + f64.AxpyUnitaryTo(v.mat.Data, alpha, bmat.Data, amat.Data) + } else { + f64.AxpyIncTo(v.mat.Data, uintptr(v.mat.Inc), 0, + alpha, bmat.Data, amat.Data, + uintptr(ar), uintptr(bmat.Inc), uintptr(amat.Inc), 0, 0) + } + } +} + +// AddVec adds the vectors a and b, placing the result in the receiver. +func (v *VecDense) AddVec(a, b Vector) { + ar := a.Len() + br := b.Len() + + if ar != br { + panic(ErrShape) + } + + v.reuseAs(ar) + + aU, _ := untranspose(a) + bU, _ := untranspose(b) + + if arv, ok := aU.(RawVectorer); ok { + if brv, ok := bU.(RawVectorer); ok { + amat := arv.RawVector() + bmat := brv.RawVector() + + if v != a { + v.checkOverlap(amat) + } + if v != b { + v.checkOverlap(bmat) + } + + if v.mat.Inc == 1 && amat.Inc == 1 && bmat.Inc == 1 { + // Fast path for a common case. + f64.AxpyUnitaryTo(v.mat.Data, 1, bmat.Data, amat.Data) + return + } + f64.AxpyIncTo(v.mat.Data, uintptr(v.mat.Inc), 0, + 1, bmat.Data, amat.Data, + uintptr(ar), uintptr(bmat.Inc), uintptr(amat.Inc), 0, 0) + return + } + } + + for i := 0; i < ar; i++ { + v.setVec(i, a.AtVec(i)+b.AtVec(i)) + } +} + +// SubVec subtracts the vector b from a, placing the result in the receiver. +func (v *VecDense) SubVec(a, b Vector) { + ar := a.Len() + br := b.Len() + + if ar != br { + panic(ErrShape) + } + + v.reuseAs(ar) + + aU, _ := untranspose(a) + bU, _ := untranspose(b) + + if arv, ok := aU.(RawVectorer); ok { + if brv, ok := bU.(RawVectorer); ok { + amat := arv.RawVector() + bmat := brv.RawVector() + + if v != a { + v.checkOverlap(amat) + } + if v != b { + v.checkOverlap(bmat) + } + + if v.mat.Inc == 1 && amat.Inc == 1 && bmat.Inc == 1 { + // Fast path for a common case. + f64.AxpyUnitaryTo(v.mat.Data, -1, bmat.Data, amat.Data) + return + } + f64.AxpyIncTo(v.mat.Data, uintptr(v.mat.Inc), 0, + -1, bmat.Data, amat.Data, + uintptr(ar), uintptr(bmat.Inc), uintptr(amat.Inc), 0, 0) + return + } + } + + for i := 0; i < ar; i++ { + v.setVec(i, a.AtVec(i)-b.AtVec(i)) + } +} + +// MulElemVec performs element-wise multiplication of a and b, placing the result +// in the receiver. +func (v *VecDense) MulElemVec(a, b Vector) { + ar := a.Len() + br := b.Len() + + if ar != br { + panic(ErrShape) + } + + v.reuseAs(ar) + + aU, _ := untranspose(a) + bU, _ := untranspose(b) + + if arv, ok := aU.(RawVectorer); ok { + if brv, ok := bU.(RawVectorer); ok { + amat := arv.RawVector() + bmat := brv.RawVector() + + if v != a { + v.checkOverlap(amat) + } + if v != b { + v.checkOverlap(bmat) + } + + if v.mat.Inc == 1 && amat.Inc == 1 && bmat.Inc == 1 { + // Fast path for a common case. + for i, a := range amat.Data { + v.mat.Data[i] = a * bmat.Data[i] + } + return + } + var ia, ib int + for i := 0; i < ar; i++ { + v.setVec(i, amat.Data[ia]*bmat.Data[ib]) + ia += amat.Inc + ib += bmat.Inc + } + return + } + } + + for i := 0; i < ar; i++ { + v.setVec(i, a.AtVec(i)*b.AtVec(i)) + } +} + +// DivElemVec performs element-wise division of a by b, placing the result +// in the receiver. +func (v *VecDense) DivElemVec(a, b Vector) { + ar := a.Len() + br := b.Len() + + if ar != br { + panic(ErrShape) + } + + v.reuseAs(ar) + + aU, _ := untranspose(a) + bU, _ := untranspose(b) + + if arv, ok := aU.(RawVectorer); ok { + if brv, ok := bU.(RawVectorer); ok { + amat := arv.RawVector() + bmat := brv.RawVector() + + if v != a { + v.checkOverlap(amat) + } + if v != b { + v.checkOverlap(bmat) + } + + if v.mat.Inc == 1 && amat.Inc == 1 && bmat.Inc == 1 { + // Fast path for a common case. + for i, a := range amat.Data { + v.setVec(i, a/bmat.Data[i]) + } + return + } + var ia, ib int + for i := 0; i < ar; i++ { + v.setVec(i, amat.Data[ia]/bmat.Data[ib]) + ia += amat.Inc + ib += bmat.Inc + } + } + } + + for i := 0; i < ar; i++ { + v.setVec(i, a.AtVec(i)/b.AtVec(i)) + } +} + +// MulVec computes a * b. The result is stored into the receiver. +// MulVec panics if the number of columns in a does not equal the number of rows in b +// or if the number of columns in b does not equal 1. +func (v *VecDense) MulVec(a Matrix, b Vector) { + r, c := a.Dims() + br, bc := b.Dims() + if c != br || bc != 1 { + panic(ErrShape) + } + + aU, trans := untranspose(a) + var bmat blas64.Vector + fast := true + bU, _ := untranspose(b) + if rv, ok := bU.(RawVectorer); ok { + bmat = rv.RawVector() + if v != b { + v.checkOverlap(bmat) + } + } else { + fast = false + } + + v.reuseAs(r) + var restore func() + if v == aU { + v, restore = v.isolatedWorkspace(aU.(*VecDense)) + defer restore() + } else if v == b { + v, restore = v.isolatedWorkspace(b) + defer restore() + } + + // TODO(kortschak): Improve the non-fast paths. + switch aU := aU.(type) { + case Vector: + if b.Len() == 1 { + // {n,1} x {1,1} + v.ScaleVec(b.AtVec(0), aU) + return + } + + // {1,n} x {n,1} + if fast { + if rv, ok := aU.(RawVectorer); ok { + amat := rv.RawVector() + if v != aU { + v.checkOverlap(amat) + } + + if amat.Inc == 1 && bmat.Inc == 1 { + // Fast path for a common case. + v.setVec(0, f64.DotUnitary(amat.Data, bmat.Data)) + return + } + v.setVec(0, f64.DotInc(amat.Data, bmat.Data, + uintptr(c), uintptr(amat.Inc), uintptr(bmat.Inc), 0, 0)) + return + } + } + var sum float64 + for i := 0; i < c; i++ { + sum += aU.AtVec(i) * b.AtVec(i) + } + v.setVec(0, sum) + return + case RawSymmetricer: + if fast { + amat := aU.RawSymmetric() + // We don't know that a is a *SymDense, so make + // a temporary SymDense to check overlap. + (&SymDense{mat: amat}).checkOverlap(v.asGeneral()) + blas64.Symv(1, amat, bmat, 0, v.mat) + return + } + case RawTriangular: + v.CopyVec(b) + amat := aU.RawTriangular() + // We don't know that a is a *TriDense, so make + // a temporary TriDense to check overlap. + (&TriDense{mat: amat}).checkOverlap(v.asGeneral()) + ta := blas.NoTrans + if trans { + ta = blas.Trans + } + blas64.Trmv(ta, amat, v.mat) + case RawMatrixer: + if fast { + amat := aU.RawMatrix() + // We don't know that a is a *Dense, so make + // a temporary Dense to check overlap. + (&Dense{mat: amat}).checkOverlap(v.asGeneral()) + t := blas.NoTrans + if trans { + t = blas.Trans + } + blas64.Gemv(t, 1, amat, bmat, 0, v.mat) + return + } + default: + if fast { + for i := 0; i < r; i++ { + var f float64 + for j := 0; j < c; j++ { + f += a.At(i, j) * bmat.Data[j*bmat.Inc] + } + v.setVec(i, f) + } + return + } + } + + for i := 0; i < r; i++ { + var f float64 + for j := 0; j < c; j++ { + f += a.At(i, j) * b.AtVec(j) + } + v.setVec(i, f) + } +} + +// reuseAs resizes an empty vector to a r×1 vector, +// or checks that a non-empty matrix is r×1. +func (v *VecDense) reuseAs(r int) { + if r == 0 { + panic(ErrZeroLength) + } + if v.IsZero() { + v.mat = blas64.Vector{ + N: r, + Inc: 1, + Data: use(v.mat.Data, r), + } + return + } + if r != v.mat.N { + panic(ErrShape) + } +} + +// IsZero returns whether the receiver is zero-sized. Zero-sized vectors can be the +// receiver for size-restricted operations. VecDenses can be zeroed using Reset. +func (v *VecDense) IsZero() bool { + // It must be the case that v.Dims() returns + // zeros in this case. See comment in Reset(). + return v.mat.Inc == 0 +} + +func (v *VecDense) isolatedWorkspace(a Vector) (n *VecDense, restore func()) { + l := a.Len() + if l == 0 { + panic(ErrZeroLength) + } + n = getWorkspaceVec(l, false) + return n, func() { + v.CopyVec(n) + putWorkspaceVec(n) + } +} + +// asDense returns a Dense representation of the receiver with the same +// underlying data. +func (v *VecDense) asDense() *Dense { + return &Dense{ + mat: v.asGeneral(), + capRows: v.mat.N, + capCols: 1, + } +} + +// asGeneral returns a blas64.General representation of the receiver with the +// same underlying data. +func (v *VecDense) asGeneral() blas64.General { + return blas64.General{ + Rows: v.mat.N, + Cols: 1, + Stride: v.mat.Inc, + Data: v.mat.Data, + } +} + +// ColViewOf reflects the column j of the RawMatrixer m, into the receiver +// backed by the same underlying data. The length of the receiver must either be +// zero or match the number of rows in m. +func (v *VecDense) ColViewOf(m RawMatrixer, j int) { + rm := m.RawMatrix() + + if j >= rm.Cols || j < 0 { + panic(ErrColAccess) + } + if !v.IsZero() && v.mat.N != rm.Rows { + panic(ErrShape) + } + + v.mat.Inc = rm.Stride + v.mat.Data = rm.Data[j : (rm.Rows-1)*rm.Stride+j+1] + v.mat.N = rm.Rows +} + +// RowViewOf reflects the row i of the RawMatrixer m, into the receiver +// backed by the same underlying data. The length of the receiver must either be +// zero or match the number of columns in m. +func (v *VecDense) RowViewOf(m RawMatrixer, i int) { + rm := m.RawMatrix() + + if i >= rm.Rows || i < 0 { + panic(ErrRowAccess) + } + if !v.IsZero() && v.mat.N != rm.Cols { + panic(ErrShape) + } + + v.mat.Inc = 1 + v.mat.Data = rm.Data[i*rm.Stride : i*rm.Stride+rm.Cols] + v.mat.N = rm.Cols +} diff --git a/vendor/gonum.org/v1/gonum/mathext/airy.go b/vendor/gonum.org/v1/gonum/mathext/airy.go new file mode 100644 index 0000000..c94c5d8 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/mathext/airy.go @@ -0,0 +1,37 @@ +// Copyright ©2016 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package mathext + +import "gonum.org/v1/gonum/mathext/internal/amos" + +// AiryAi returns the value of the Airy function at z. The Airy function here, +// Ai(z), is one of the two linearly independent solutions to +// y'' - y*z = 0. +// See http://mathworld.wolfram.com/AiryFunctions.html for more detailed information. +func AiryAi(z complex128) complex128 { + // id specifies the order of the derivative to compute, + // 0 for the function itself and 1 for the derivative. + // kode specifies the scaling option. See the function + // documentation for the exact behavior. + id := 0 + kode := 1 + air, aii, _ := amos.Zairy(real(z), imag(z), id, kode) + return complex(air, aii) +} + +// AiryAiDeriv returns the value of the derivative of the Airy function at z. The +// Airy function here, Ai(z), is one of the two linearly independent solutions to +// y'' - y*z = 0. +// See http://mathworld.wolfram.com/AiryFunctions.html for more detailed information. +func AiryAiDeriv(z complex128) complex128 { + // id specifies the order of the derivative to compute, + // 0 for the function itself and 1 for the derivative. + // kode specifies the scaling option. See the function + // documentation for the exact behavior. + id := 1 + kode := 1 + air, aii, _ := amos.Zairy(real(z), imag(z), id, kode) + return complex(air, aii) +} diff --git a/vendor/gonum.org/v1/gonum/mathext/beta.go b/vendor/gonum.org/v1/gonum/mathext/beta.go new file mode 100644 index 0000000..c1c5348 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/mathext/beta.go @@ -0,0 +1,34 @@ +// Copyright ©2016 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package mathext + +import "gonum.org/v1/gonum/mathext/internal/gonum" + +// Beta returns the value of the complete beta function B(a, b). It is defined as +// Γ(a)Γ(b) / Γ(a+b) +// Special cases are: +// B(a,b) returns NaN if a or b is Inf +// B(a,b) returns NaN if a and b are 0 +// B(a,b) returns NaN if a or b is NaN +// B(a,b) returns NaN if a or b is < 0 +// B(a,b) returns +Inf if a xor b is 0. +// +// See http://mathworld.wolfram.com/BetaFunction.html for more detailed informations. +func Beta(a, b float64) float64 { + return gonum.Beta(a, b) +} + +// Lbeta returns the natural logarithm of the complete beta function B(a,b). +// Lbeta is defined as: +// Ln(Γ(a)Γ(b)/Γ(a+b)) +// Special cases are: +// Lbeta(a,b) returns NaN if a or b is Inf +// Lbeta(a,b) returns NaN if a and b are 0 +// Lbeta(a,b) returns NaN if a or b is NaN +// Lbeta(a,b) returns NaN if a or b is < 0 +// Lbeta(a,b) returns +Inf if a xor b is 0. +func Lbeta(a, b float64) float64 { + return gonum.Lbeta(a, b) +} diff --git a/vendor/gonum.org/v1/gonum/mathext/betainc.go b/vendor/gonum.org/v1/gonum/mathext/betainc.go new file mode 100644 index 0000000..76c11ff --- /dev/null +++ b/vendor/gonum.org/v1/gonum/mathext/betainc.go @@ -0,0 +1,29 @@ +// Copyright ©2016 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package mathext + +import "gonum.org/v1/gonum/mathext/internal/cephes" + +// RegIncBeta returns the value of the regularized incomplete beta function +// I(x;a,b). It is defined as +// I(x;a,b) = B(x;a,b) / B(a,b) +// = Γ(a+b) / (Γ(a)*Γ(b)) * int_0^x u^(a-1) * (1-u)^(b-1) du. +// The domain of definition is 0 <= x <= 1, and the parameters a and b must be positive. +// For other values of x, a, and b RegIncBeta will panic. +func RegIncBeta(a, b float64, x float64) float64 { + return cephes.Incbet(a, b, x) +} + +// InvRegIncBeta computes the inverse of the regularized incomplete beta function. +// It returns the x for which +// y = I(x;a,b) +// The domain of definition is 0 <= y <= 1, and the parameters a and b must be +// positive. For other values of x, a, and b InvRegIncBeta will panic. +func InvRegIncBeta(a, b float64, y float64) float64 { + if y < 0 || y > 1 { + panic("mathext: parameter out of range") + } + return cephes.Incbi(a, b, y) +} diff --git a/vendor/gonum.org/v1/gonum/mathext/digamma.go b/vendor/gonum.org/v1/gonum/mathext/digamma.go new file mode 100644 index 0000000..73092e4 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/mathext/digamma.go @@ -0,0 +1,44 @@ +// Copyright ©2016 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package mathext + +import ( + "math" +) + +// Digamma returns the logorithmic derivative of the gamma function at x. +// ψ(x) = d/dx (Ln (Γ(x)). +func Digamma(x float64) float64 { + // This is adapted from + // http://web.science.mq.edu.au/~mjohnson/code/digamma.c + var result float64 + switch { + case math.IsNaN(x), math.IsInf(x, 1): + return x + case math.IsInf(x, -1): + return math.NaN() + case x == 0: + return math.Copysign(math.Inf(1), -x) + case x < 0: + if x == math.Floor(x) { + return math.NaN() + } + // Reflection formula, http://dlmf.nist.gov/5.5#E4 + _, r := math.Modf(x) + result = -math.Pi / math.Tan(math.Pi*r) + x = 1 - x + } + for ; x < 7; x++ { + // Recurrence relation, http://dlmf.nist.gov/5.5#E2 + result -= 1 / x + } + x -= 0.5 + xx := 1 / x + xx2 := xx * xx + xx4 := xx2 * xx2 + // Asymptotic expansion, http://dlmf.nist.gov/5.11#E2 + result += math.Log(x) + (1.0/24.0)*xx2 - (7.0/960.0)*xx4 + (31.0/8064.0)*xx4*xx2 - (127.0/30720.0)*xx4*xx4 + return result +} diff --git a/vendor/gonum.org/v1/gonum/mathext/doc.go b/vendor/gonum.org/v1/gonum/mathext/doc.go new file mode 100644 index 0000000..539622d --- /dev/null +++ b/vendor/gonum.org/v1/gonum/mathext/doc.go @@ -0,0 +1,7 @@ +// Copyright ©2017 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Package mathext implements special math functions not implemented by the +// Go standard library. +package mathext // import "gonum.org/v1/gonum/mathext" diff --git a/vendor/gonum.org/v1/gonum/mathext/ell_carlson.go b/vendor/gonum.org/v1/gonum/mathext/ell_carlson.go new file mode 100644 index 0000000..dd698eb --- /dev/null +++ b/vendor/gonum.org/v1/gonum/mathext/ell_carlson.go @@ -0,0 +1,156 @@ +// Copyright ©2017 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package mathext + +import ( + "math" +) + +// EllipticRF computes the symmetric elliptic integral R_F(x,y,z): +// R_F(x,y,z) = (1/2)\int_{0}^{\infty}{1/s(t)} dt, +// s(t) = \sqrt{(t+x)(t+y)(t+z)}. +// +// The arguments x, y, z must satisfy the following conditions, otherwise the function returns math.NaN(): +// 0 ≤ x,y,z ≤ upper, +// lower ≤ x+y,y+z,z+x, +// where: +// lower = 5/(2^1022) = 1.112536929253601e-307, +// upper = (2^1022)/5 = 8.988465674311580e+306. +// +// The definition of the symmetric elliptic integral R_F can be found in NIST +// Digital Library of Mathematical Functions (http://dlmf.nist.gov/19.16.E1). +func EllipticRF(x, y, z float64) float64 { + // The original Fortran code was published as Algorithm 577 in ACM TOMS (http://doi.org/10.1145/355958.355970). + // This code is also available as a part of SLATEC Common Mathematical Library (http://netlib.org/slatec/index.html). Later, Carlson described + // an improved version in http://dx.doi.org/10.1007/BF02198293 (also available at https://arxiv.org/abs/math/9409227). + const ( + lower = 5.0 / (1 << 256) / (1 << 256) / (1 << 256) / (1 << 254) // 5*2^-1022 + upper = 1 / lower + tol = 1.2674918778210762260320167734407048051023273568443e-02 // (3ε)^(1/8) + ) + if x < 0 || y < 0 || z < 0 || math.IsNaN(x) || math.IsNaN(y) || math.IsNaN(z) { + return math.NaN() + } + if upper < x || upper < y || upper < z { + return math.NaN() + } + if x+y < lower || y+z < lower || z+x < lower { + return math.NaN() + } + + A0 := (x + y + z) / 3 + An := A0 + Q := math.Max(math.Max(math.Abs(A0-x), math.Abs(A0-y)), math.Abs(A0-z)) / tol + xn, yn, zn := x, y, z + mul := 1.0 + + for Q >= mul*math.Abs(An) { + xnsqrt, ynsqrt, znsqrt := math.Sqrt(xn), math.Sqrt(yn), math.Sqrt(zn) + lambda := xnsqrt*ynsqrt + ynsqrt*znsqrt + znsqrt*xnsqrt + An = (An + lambda) * 0.25 + xn = (xn + lambda) * 0.25 + yn = (yn + lambda) * 0.25 + zn = (zn + lambda) * 0.25 + mul *= 4 + } + + X := (A0 - x) / (mul * An) + Y := (A0 - y) / (mul * An) + Z := -(X + Y) + E2 := X*Y - Z*Z + E3 := X * Y * Z + + // http://dlmf.nist.gov/19.36.E1 + return (1 - 1/10.0*E2 + 1/14.0*E3 + 1/24.0*E2*E2 - 3/44.0*E2*E3 - 5/208.0*E2*E2*E2 + 3/104.0*E3*E3 + 1/16.0*E2*E2*E3) / math.Sqrt(An) +} + +// EllipticRD computes the symmetric elliptic integral R_D(x,y,z): +// R_D(x,y,z) = (1/2)\int_{0}^{\infty}{1/(s(t)(t+z))} dt, +// s(t) = \sqrt{(t+x)(t+y)(t+z)}. +// +// The arguments x, y, z must satisfy the following conditions, otherwise the function returns math.NaN(): +// 0 ≤ x,y ≤ upper, +// lower ≤ z ≤ upper, +// lower ≤ x+y, +// where: +// lower = (5/(2^1022))^(1/3) = 4.809554074311679e-103, +// upper = ((2^1022)/5)^(1/3) = 2.079194837087086e+102. +// +// The definition of the symmetric elliptic integral R_D can be found in NIST +// Digital Library of Mathematical Functions (http://dlmf.nist.gov/19.16.E5). +func EllipticRD(x, y, z float64) float64 { + // The original Fortran code was published as Algorithm 577 in ACM TOMS (http://doi.org/10.1145/355958.355970). + // This code is also available as a part of SLATEC Common Mathematical Library (http://netlib.org/slatec/index.html). Later, Carlson described + // an improved version in http://dx.doi.org/10.1007/BF02198293 (also available at https://arxiv.org/abs/math/9409227). + const ( + lower = 4.8095540743116787026618007863123676393525016818363e-103 // (5*2^-1022)^(1/3) + upper = 1 / lower + tol = 9.0351169339315770474760122547068324993857488849382e-03 // (ε/5)^(1/8) + ) + if x < 0 || y < 0 || math.IsNaN(x) || math.IsNaN(y) || math.IsNaN(z) { + return math.NaN() + } + if upper < x || upper < y || upper < z { + return math.NaN() + } + if x+y < lower || z < lower { + return math.NaN() + } + + A0 := (x + y + 3*z) / 5 + An := A0 + Q := math.Max(math.Max(math.Abs(A0-x), math.Abs(A0-y)), math.Abs(A0-z)) / tol + xn, yn, zn := x, y, z + mul, s := 1.0, 0.0 + + for Q >= mul*math.Abs(An) { + xnsqrt, ynsqrt, znsqrt := math.Sqrt(xn), math.Sqrt(yn), math.Sqrt(zn) + lambda := xnsqrt*ynsqrt + ynsqrt*znsqrt + znsqrt*xnsqrt + s += 1 / (mul * znsqrt * (zn + lambda)) + An = (An + lambda) * 0.25 + xn = (xn + lambda) * 0.25 + yn = (yn + lambda) * 0.25 + zn = (zn + lambda) * 0.25 + mul *= 4 + } + + X := (A0 - x) / (mul * An) + Y := (A0 - y) / (mul * An) + Z := -(X + Y) / 3 + E2 := X*Y - 6*Z*Z + E3 := (3*X*Y - 8*Z*Z) * Z + E4 := 3 * (X*Y - Z*Z) * Z * Z + E5 := X * Y * Z * Z * Z + + // http://dlmf.nist.gov/19.36.E2 + return (1-3/14.0*E2+1/6.0*E3+9/88.0*E2*E2-3/22.0*E4-9/52.0*E2*E3+3/26.0*E5-1/16.0*E2*E2*E2+3/40.0*E3*E3+3/20.0*E2*E4+45/272.0*E2*E2*E3-9/68.0*(E3*E4+E2*E5))/(mul*An*math.Sqrt(An)) + 3*s +} + +// EllipticF computes the Legendre's elliptic integral of the 1st kind F(phi,m), 0≤m<1: +// F(\phi,m) = \int_{0}^{\phi} 1 / \sqrt{1-m\sin^2(\theta)} d\theta +// +// Legendre's elliptic integrals can be expressed as symmetric elliptic integrals, in this case: +// F(\phi,m) = \sin\phi R_F(\cos^2\phi,1-m\sin^2\phi,1) +// +// The definition of F(phi,k) where k=sqrt(m) can be found in NIST Digital Library of Mathematical +// Functions (http://dlmf.nist.gov/19.2.E4). +func EllipticF(phi, m float64) float64 { + s, c := math.Sincos(phi) + return s * EllipticRF(c*c, 1-m*s*s, 1) +} + +// EllipticE computes the Legendre's elliptic integral of the 2nd kind E(phi,m), 0≤m<1: +// E(\phi,m) = \int_{0}^{\phi} \sqrt{1-m\sin^2(\theta)} d\theta +// +// Legendre's elliptic integrals can be expressed as symmetric elliptic integrals, in this case: +// E(\phi,m) = \sin\phi R_F(\cos^2\phi,1-m\sin^2\phi,1)-(m/3)\sin^3\phi R_D(\cos^2\phi,1-m\sin^2\phi,1) +// +// The definition of E(phi,k) where k=sqrt(m) can be found in NIST Digital Library of Mathematical +// Functions (http://dlmf.nist.gov/19.2.E5). +func EllipticE(phi, m float64) float64 { + s, c := math.Sincos(phi) + x, y := c*c, 1-m*s*s + return s * (EllipticRF(x, y, 1) - (m/3)*s*s*EllipticRD(x, y, 1)) +} diff --git a/vendor/gonum.org/v1/gonum/mathext/ell_complete.go b/vendor/gonum.org/v1/gonum/mathext/ell_complete.go new file mode 100644 index 0000000..f5a176a --- /dev/null +++ b/vendor/gonum.org/v1/gonum/mathext/ell_complete.go @@ -0,0 +1,355 @@ +// Copyright ©2017 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package mathext + +import ( + "math" +) + +// CompleteK computes the complete elliptic integral of the 1st kind, 0≤m≤1. It returns math.NaN() if m is not in [0,1]. +// +// K(m) = \int_{0}^{π/2} 1/{\sqrt{1-m{\sin^2θ}}} dθ +func CompleteK(m float64) float64 { + // Reference: + // Toshio Fukushima, Precise and fast computation of complete elliptic integrals + // by piecewise minimax rational function approximation, + // Journal of Computational and Applied Mathematics, Volume 282, 2015, Pages 71-76. + // https://doi.org/10.1016/j.cam.2014.12.038 + // Original Fortran code available at: + // https://www.researchgate.net/publication/295857819_xceitxt_F90_package_of_complete_elliptic_integral_computation + if m < 0 || 1 < m || math.IsNaN(m) { + return math.NaN() + } + + mc := 1 - m + + if mc > 0.592990 { + t := 2.45694208987494165*mc - 1.45694208987494165 + t2 := t * t + p := ((3703.75266375099019 + t2*(2744.82029097576810+t2*36.2381612593459565)) + t*(5462.47093231923466+t2*(543.839017382099411+t2*0.393188651542789784))) + q := ((2077.94377067058435 + t2*(1959.05960044399275+t2*43.5464368440078942)) + t*(3398.00069767755460+t2*(472.794455487539279+t2))) + return p / q + } + if mc > 0.350756 { + t := 4.12823963605439369*mc - 1.44800482178389491 + t2 := t * t + p := ((4264.28203103974630 + t2*(3214.59187442783167+t2*43.2589626155454993)) + t*(6341.90978213264024+t2*(642.790566685354573+t2*0.475223892294445943))) + q := ((2125.06914237062279 + t2*(2006.03187933518870+t2*44.1848041560412224)) + t*(3479.95663350926514+t2*(482.900172581418890+t2))) + return p / q + } + if mc > 0.206924 { + t := 6.95255575949719117*mc - 1.43865064797819679 + t2 := t * t + p := ((4870.25402224986382 + t2*(3738.29369283392307+t2*51.3609902253065926)) + t*(7307.18826377416591+t2*(754.928587580583704+t2*0.571948962277566451))) + q := ((2172.51745704102287 + t2*(2056.13612019430497+t2*44.9026847057686146)) + t*(3565.04737778032566+t2*(493.962405117599400+t2))) + return p / q + } + if mc > 0.121734 { + t := 11.7384669562155183*mc - 1.42897053644793990 + t2 := t * t + p := ((5514.8512729127464 + t2*(4313.60788246750934+t2*60.598720224393536)) + t*(8350.4595896779631+t2*(880.27903031894216+t2*0.68504458747933773))) + q := ((2218.41682813309737 + t2*(2107.97379949034285+t2*45.6911096775045314)) + t*(3650.41829123846319+t2*(505.74295207655096+t2))) + return p / q + } + if mc > 0.071412 { + t := 19.8720241643813839*mc - 1.41910098962680339 + t2 := t * t + p := ((6188.8743957372448 + t2*(4935.41351498551527+t2*70.981049144472361)) + t*(9459.3331440432847+t2*(1018.21910476032105+t2*0.81599895108245948))) + q := ((2260.73112539748448 + t2*(2159.68721749761492+t2*46.5298955058476510)) + t*(3732.66955095581621+t2*(517.86964191812384+t2))) + return p / q + } + if mc > 0.041770 { + t := 33.7359152553808785*mc - 1.40914918021725929 + t2 := t * t + p := ((6879.5170681289562 + t2*(5594.8381504799829+t2*82.452856129147838)) + t*(10615.0836403687221+t2*(1167.26108955935542+t2*0.96592719058503951))) + q := ((2296.88303450660439 + t2*(2208.74949754945558+t2*47.3844470709989137)) + t*(3807.37745652028212+t2*(529.79651353072921+t2))) + return p / q + } + if mc > 0.024360 { + t := 57.4382538770821367*mc - 1.39919586444572085 + t2 := t * t + p := ((7570.6827538712100 + t2*(6279.2661370014890+t2*94.886883830605940)) + t*(11792.9392624454532+t2*(1325.01058966228180+t2*1.13537029594409690))) + q := ((2324.04824540459984 + t2*(2252.22250562615338+t2*48.2089280211559345)) + t*(3869.56755306385732+t2*(540.85752251676412+t2))) + return p / q + } + if mc > 0.014165 { + t := 98.0872976949485042*mc - 1.38940657184894556 + t2 := t * t + p := ((8247.2601660137746 + t2*(6974.7495213178613+t2*108.098282908839979)) + t*(12967.7060124572914+t2*(1488.54008220335966+t2*1.32411616748380686))) + q := ((2340.47337508405427 + t2*(2287.70677154700516+t2*48.9575432570382154)) + t*(3915.63324533769906+t2*(550.45072377717361+t2))) + return p / q + } + if mc > 0.008213 { + t := 168.010752688172043*mc - 1.37987231182795699 + t2 := t * t + p := ((8894.2961573611293 + t2*(7666.5611739483371+t2*121.863474964652041)) + t*(14113.7038749808951+t2*(1654.60731579994159+t2*1.53112170837206117))) + q := ((2344.88618943372377 + t2*(2313.28396270968662+t2*49.5906602613891184)) + t*(3942.81065054556536+t2*(558.07615380622169+t2))) + return p / q + } + if mc > 0 { + t := 1.0 - 121.758188238159016*mc + p := -math.Log(mc*0.0625) * (34813.4518336350547 + t*(235.767716637974271+t*0.199792723884069485)) / (69483.5736412906324 + t*(614.265044703187382+t)) + q := -mc * (9382.53386835986099 + t*(51.6478985993381223+t*0.00410754154682816898)) / (37327.7262507318317 + t*(408.017247271148538+t)) + return p + q + } + + return math.Inf(1) +} + +// CompleteE computes the complete elliptic integral of the 2nd kind, 0≤m≤1. It returns math.NaN() if m is not in [0,1]. +// +// E(m) = \int_{0}^{π/2} {\sqrt{1-m{\sin^2θ}}} dθ +func CompleteE(m float64) float64 { + // Reference: + // Toshio Fukushima, Precise and fast computation of complete elliptic integrals + // by piecewise minimax rational function approximation, + // Journal of Computational and Applied Mathematics, Volume 282, 2015, Pages 71-76. + // https://doi.org/10.1016/j.cam.2014.12.038 + // Original Fortran code available at: + // https://www.researchgate.net/publication/295857819_xceitxt_F90_package_of_complete_elliptic_integral_computation + if m < 0 || 1 < m || math.IsNaN(m) { + return math.NaN() + } + + mc := 1 - m + + if mc > 0.566638 { + t := 2.30753965506897236*mc - 1.30753965506897236 + t2 := t * t + p := ((19702.2363352671642 + t2*(18177.1879313824040+t2*409.975559128654710)) + t*(31904.1559574281609+t2*(4362.94760768571862+t2*10.3244775335024885))) + q := ((14241.2135819448616 + t2*(10266.4884503526076+t2*117.162100771599098)) + t*(20909.9899599927367+t2*(1934.86289070792954+t2))) + return p / q + } + if mc > 0.315153 { + t := 3.97638030101198879*mc - 1.25316818100483130 + t2 := t * t + p := ((16317.0721393008221 + t2*(15129.4009798463159+t2*326.113727011739428)) + t*(26627.8852140835023+t2*(3574.15857605556033+t2*7.93163724081373477))) + q := ((13047.1505096551210 + t2*(9964.25173735060361+t2*117.670514069579649)) + t*(19753.5762165922376+t2*(1918.72232033637537+t2))) + return p / q + } + if mc > 0.171355 { + t := 6.95419964116329852*mc - 1.19163687951153702 + t2 := t * t + p := ((13577.3850240991520 + t2*(12871.9137872656293+t2*263.964361648520708)) + t*(22545.4744699553993+t2*(3000.74575264868572+t2*6.08522443139677663))) + q := ((11717.3306408059832 + t2*(9619.40382323874064+t2*118.690522739531267)) + t*(18431.1264424290258+t2*(1904.06010727307491+t2))) + return p / q + } + if mc > 0.090670 { + t := 12.3938774245522712*mc - 1.12375286608415443 + t2 := t * t + p := ((11307.9485341543712 + t2*(11208.6068472959372+t2*219.253495956962613)) + t*(19328.6173704569489+t2*(2596.54874477084334+t2*4.66931143174036616))) + q := ((10307.6837501971393 + t2*(9241.7604666150102+t2*120.498555754227847)) + t*(16982.2450249024383+t2*(1893.41905403040679+t2))) + return p / q + } + if mc > 0.046453 { + t := 22.6157360291290680*mc - 1.05056878576113260 + t2 := t * t + p := ((9383.1490856819874 + t2*(9977.2498973537718+t2*188.618148076418837)) + t*(16718.9730458676860+t2*(2323.49987246555537+t2*3.59313532204509922))) + q := ((8877.1964704758383 + t2*(8840.2771293410661+t2*123.422125687316355)) + t*(15450.0537230364062+t2*(1889.13672102820913+t2))) + return p / q + } + if mc > 0.022912 { + t := 42.4790790535661187*mc - 0.973280659275306911 + t2 := t * t + p := ((7719.1171817802054 + t2*(9045.3996063894006+t2*169.386557799782496)) + t*(14521.7363804934985+t2*(2149.92068078627829+t2*2.78515570453129137))) + q := ((7479.7539074698012 + t2*(8420.3848818926324+t2*127.802109608726363)) + t*(13874.4978011497847+t2*(1892.69753150329759+t2))) + return p / q + } + if mc > 0.010809 { + t := 82.6241427745187144*mc - 0.893084359249772784 + t2 := t * t + p := ((6261.6095608987273 + t2*(8304.3265605809870+t2*159.371262600702237)) + t*(12593.0874916293982+t2*(2048.68391263416822+t2*2.18867046462858104))) + q := ((6156.4532048239501 + t2*(7979.7435857665227+t2*133.911640385965187)) + t*(12283.8373999680518+t2*(1903.60556312663537+t2))) + return p / q + } + if mc > 0.004841 { + t := 167.560321715817694*mc - 0.811159517426273458 + t2 := t * t + p := ((4978.06146583586728 + t2*(7664.6703673290453+t2*156.689647694892782)) + t*(10831.7178150656694+t2*(1995.66437151562090+t2*1.75859085945198570))) + q := ((4935.56743322938333 + t2*(7506.8028283118051+t2*141.854303920116856)) + t*(10694.5510113880077+t2*(1918.38517009740321+t2))) + return p / q + } + if mc > 0 { + t := 1.0 - 206.568890725056806*mc + p := -mc * math.Log(mc*0.0625) * (41566.6612602868736 + t*(154.034981522913482+t*0.0618072471798575991)) / (165964.442527585615 + t*(917.589668642251803+t)) + q := (132232.803956682877 + t*(353.375480007017643-t*1.40105837312528026)) / (132393.665743088043 + t*(192.112635228732532-t)) + return p + q + } + + return 1 +} + +// CompleteB computes an associate complete elliptic integral of the 2nd kind, 0≤m≤1. It returns math.NaN() if m is not in [0,1]. +// +// B(m) = \int_{0}^{π/2} {\cos^2θ} / {\sqrt{1-m{\sin^2θ}}} dθ +func CompleteB(m float64) float64 { + // Reference: + // Toshio Fukushima, Precise and fast computation of complete elliptic integrals + // by piecewise minimax rational function approximation, + // Journal of Computational and Applied Mathematics, Volume 282, 2015, Pages 71-76. + // https://doi.org/10.1016/j.cam.2014.12.038 + // Original Fortran code available at: + // https://www.researchgate.net/publication/295857819_xceitxt_F90_package_of_complete_elliptic_integral_computation + if m < 0 || 1 < m || math.IsNaN(m) { + return math.NaN() + } + + mc := 1 - m + + if mc > 0.555073 { + t := 2.24755971204264969*mc - 1.24755971204264969 + t2 := t * t + p := ((2030.25011505956379 + t2*(1727.60635612511943+t2*25.0715510300422010)) + t*(3223.16236100954529+t2*(361.164121995173076+t2*0.280355207707726826))) + q := ((2420.64907902774675 + t2*(2327.48464880306840+t2*47.9870997057202318)) + t*(4034.28168313496638+t2*(549.234220839203960+t2))) + return p / q + } + if mc > 0.302367 { + t := 3.95716761770595079*mc - 1.19651690106289522 + t2 := t * t + p := ((2209.26925068374373 + t2*(1981.37862223307242+t2*29.7612810087709299)) + t*(3606.58475322372526+t2*(422.693774742063054+t2*0.334623999861181980))) + q := ((2499.57898767250755 + t2*(2467.63998386656941+t2*50.0198090806651216)) + t*(4236.30953048456334+t2*(581.879599221457589+t2))) + return p / q + } + if mc > 0.161052 { + t := 7.07638962601280827*mc - 1.13966670204861480 + t2 := t * t + p := ((2359.14823394150129 + t2*(2254.30785457761760+t2*35.2259786264917876)) + t*(3983.28520266051676+t2*(492.601686517364701+t2*0.396605124984359783))) + q := ((2563.95563932625156 + t2*(2633.23323959119935+t2*52.6711647124832948)) + t*(4450.19076667898892+t2*(622.983787815718489+t2))) + return p / q + } + if mc > 0.083522 { + t := 12.8982329420869341*mc - 1.07728621178898491 + t2 := t * t + p := ((2464.65334987833736 + t2*(2541.68516994216007+t2*41.5832527504007778)) + t*(4333.38639187691528+t2*(571.53606797524881+t2*0.465975784547025267))) + q := ((2600.66956117247726 + t2*(2823.69445052534842+t2*56.136001230010910)) + t*(4661.64381841490914+t2*(674.25435972414302+t2))) + return p / q + } + if mc > 0.041966 { + t := 24.0639137549331023*mc - 1.00986620463952257 + t2 := t * t + p := ((2509.86724450741259 + t2*(2835.27071287535469+t2*48.9701196718008345)) + t*(4631.12336462339975+t2*(659.86172161727281+t2*0.54158304771955794))) + q := ((2594.15983397593723 + t2*(3034.20118545214106+t2*60.652838995496991)) + t*(4848.17491604384532+t2*(737.15143838356850+t2))) + return p / q + } + if mc > 0.020313 { + t := 46.1829769546944996*mc - 0.938114810880709371 + t2 := t * t + p := ((2480.58307884128017 + t2*(3122.00900554841322+t2*57.541132641218839)) + t*(4845.57861173250699+t2*(757.31633816400643+t2*0.62119950515996627))) + q := ((2528.85218300581396 + t2*(3253.86151324157460+t2*66.496093157522450)) + t*(4979.31783250484768+t2*(812.40556572486862+t2))) + return p / q + } + if mc > 0.009408 { + t := 91.7010545621274645*mc - 0.862723521320495186 + t2 := t * t + p := ((2365.25385348859592 + t2*(3381.09304915246175+t2*67.442026950538221)) + t*(4939.53925884558687+t2*(862.16657576129841+t2*0.70143698925710129))) + q := ((2390.48737882063755 + t2*(3462.34808443022907+t2*73.934680452209164)) + t*(5015.4675579215077+t2*(898.99542983710459+t2))) + return p / q + } + if mc > 0.004136 { + t := 189.681335356600910*mc - 0.784522003034901366 + t2 := t * t + p := ((2160.82916040868119 + t2*(3584.53058926175721+t2*78.769178005879162)) + t*(4877.14832623847052+t2*(970.53716686804832+t2*0.77797110431753920))) + q := ((2172.70451405048305 + t2*(3630.52345460629336+t2*83.173163222639080)) + t*(4916.35263668839769+t2*(993.36676027886685+t2))) + return p / q + } + if mc > 0 { + t := 1 - 106.292517006802721*mc + p := mc * math.Log(mc*0.0625) * (6607.46457640413908 + t*(19.0287633783211078-t*0.00625368946932704460)) / (26150.3443630974309 + t*(354.603981274536040+t)) + q := (26251.5678902584870 + t*(168.788023807915689+t*0.352150236262724288)) / (26065.7912239203873 + t*(353.916840382280456+t)) + return p + q + } + + return 1 +} + +// CompleteD computes an associate complete elliptic integral of the 2nd kind, 0≤m≤1. It returns math.NaN() if m is not in [0,1]. +// +// D(m) = \int_{0}^{π/2} {\sin^2θ} / {\sqrt{1-m{\sin^2θ}}} dθ +func CompleteD(m float64) float64 { + // Reference: + // Toshio Fukushima, Precise and fast computation of complete elliptic integrals + // by piecewise minimax rational function approximation, + // Journal of Computational and Applied Mathematics, Volume 282, 2015, Pages 71-76. + // https://doi.org/10.1016/j.cam.2014.12.038 + // Original Fortran code available at: + // https://www.researchgate.net/publication/295857819_xceitxt_F90_package_of_complete_elliptic_integral_computation + if m < 0 || 1 < m || math.IsNaN(m) { + return math.NaN() + } + + mc := 1 - m + + if mc > 0.599909 { + t := 2.49943137936119533*mc - 1.49943137936119533 + t2 := t * t + p := ((1593.39813781813498 + t2*(1058.56241259843217+t2*11.7584241242587571)) + t*(2233.25576544961714+t2*(195.247394601357872+t2*0.101486443490307517))) + q := ((1685.47865546030468 + t2*(1604.88100543517015+t2*38.6743012128666717)) + t*(2756.20968383181114+t2*(397.504162950935944+t2))) + return p / q + } + if mc > 0.359180 { + t := 4.15404874360795750*mc - 1.49205122772910617 + t2 := t * t + p := ((1967.01442513777287 + t2*(1329.30058268219177+t2*15.0447805948342760)) + t*(2779.87604145516343+t2*(247.475085945854673+t2*0.130547566005491628))) + q := ((1749.70634057327467 + t2*(1654.40804288486242+t2*39.1895256017535337)) + t*(2853.92630369567765+t2*(406.925098588378587+t2))) + return p / q + } + if mc > 0.214574 { + t := 6.91534237860116454*mc - 1.48385267554596628 + t2 := t * t + p := ((2409.64196912091452 + t2*(1659.30176823041376+t2*19.1942111405094383)) + t*(3436.40744503228691+t2*(312.186468430688790+t2*0.167847673021897479))) + q := ((1824.89205701262525 + t2*(1715.38574780156913+t2*39.8798253173462218)) + t*(2971.02216287936566+t2*(418.929791715319490+t2))) + return p / q + } + if mc > 0.127875 { + t := 11.5341584101316047*mc - 1.47493050669557896 + t2 := t * t + p := ((2926.81143179637839 + t2*(2056.45624281065334+t2*24.3811986813439843)) + t*(4214.52119721241319+t2*(391.420514384925370+t2*0.215574280659075512))) + q := ((1910.33091918583314 + t2*(1787.99942542734799+t2*40.7663012893484449)) + t*(3107.04531802441481+t2*(433.673494280825971+t2))) + return p / q + } + if mc > 0.076007 { + t := 19.2797100331611013*mc - 1.46539292049047582 + t2 := t * t + p := ((3520.63614251102960 + t2*(2526.67111759550923+t2*30.7739877519417978)) + t*(5121.2842239226937+t2*(486.926821696342529+t2*0.276315678908126399))) + q := ((2003.81997889501324 + t2*(1871.05914195570669+t2*41.8489850490387023)) + t*(3259.09205279874214+t2*(451.007555352632053+t2))) + return p / q + } + if mc > 0.045052 { + t := 32.3049588111775157*mc - 1.45540300436116944 + t2 := t * t + p := ((4188.00087087025347 + t2*(3072.05695847158556+t2*38.5070211470790031)) + t*(6156.0080960857764+t2*(599.76666155374012+t2*0.352955925261363680))) + q := ((2101.60113938424690 + t2*(1961.76794074710108+t2*43.0997999502743622)) + t*(3421.55151253792527+t2*(470.407158843118117+t2))) + return p / q + } + if mc > 0.026626 { + t := 54.2711386084880061*mc - 1.44502333658960165 + t2 := t * t + p := ((4916.74442376570733 + t2*(3688.12811638360551+t2*47.6447145147811350)) + t*(7304.6632479558695+t2*(729.75841970840314+t2*0.448422756936257635))) + q := ((2197.49982676612397 + t2*(2055.19657857622715+t2*44.4576261146308645)) + t*(3584.94502590860852+t2*(490.880160668822953+t2))) + return p / q + } + if mc > 0.015689 { + t := 91.4327512114839536*mc - 1.43448843375697175 + t2 := t * t + p := ((5688.7542903989517 + t2*(4364.21513060078954+t2*58.159468141567195)) + t*(8542.6096475195826+t2*(875.35992968472914+t2*0.56528145509695951))) + q := ((2285.44062680812883 + t2*(2145.80779422696555+t2*45.8427480379028781)) + t*(3739.30422133833258+t2*(511.23253971875808+t2))) + return p / q + } + if mc > 0.009216 { + t := 154.487872701992894*mc - 1.42376023482156651 + t2 := t * t + p := ((6475.3392225234969 + t2*(5081.2997108708577+t2*69.910123337464043)) + t*(9829.1138694605662+t2*(1033.32687775311981+t2*0.70526087421186325))) + q := ((2357.74885505777295 + t2*(2226.89527217032394+t2*47.1609071069631012)) + t*(3872.32565152553360+t2*(530.03943432061149+t2))) + return p / q + } + if mc > 0 { + t := 1 - 108.506944444444444*mc + p := -math.Log(mc*0.0625) * (6.2904323649908115e6 + t*(58565.284164780476+t*(131.176674599188545+t*0.0426826410911220304))) / (1.24937550257219890e7 + t*(203580.534005225410+t*(921.17729845011868+t))) + q := -(27356.1090344387530 + t*(107.767403612304371-t*0.0827769227048233593)) / (27104.0854889805978 + t*(358.708172147752755+t)) + return p + q + } + + return math.Inf(1) +} diff --git a/vendor/gonum.org/v1/gonum/mathext/erf.go b/vendor/gonum.org/v1/gonum/mathext/erf.go new file mode 100644 index 0000000..793238b --- /dev/null +++ b/vendor/gonum.org/v1/gonum/mathext/erf.go @@ -0,0 +1,91 @@ +// Copyright ©2017 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package mathext + +import "math" + +/* +Copyright (c) 2012 The Probab Authors. All rights reserved. +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are +met: +* Redistributions of source code must retain the above copyright +notice, this list of conditions and the following disclaimer. +* Redistributions in binary form must reproduce the above +copyright notice, this list of conditions and the following disclaimer +in the documentation and/or other materials provided with the +distribution. +* Neither the name of Google Inc. nor the names of its +contributors may be used to endorse or promote products derived from +this software without specific prior written permission. +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +*/ + +// NormalQuantile computes the quantile function (inverse CDF) of the standard +// normal. NormalQuantile panics if the input p is less than 0 or greater than 1. +func NormalQuantile(p float64) float64 { + switch { + case p < 0 || 1 < p: + panic("mathext: quantile out of bounds") + case p == 1: + return math.Inf(1) + case p == 0: + return math.Inf(-1) + } + // Compute rational approximation based on the value of p. + + dp := p - 0.5 + if math.Abs(dp) <= 0.425 { + z := 0.180625 - dp*dp + z1 := ((((((zQSA[0]*z+zQSA[1])*z+zQSA[2])*z+zQSA[3])*z+zQSA[4])*z+zQSA[5])*z+zQSA[6])*z + zQSA[7] + z2 := ((((((zQSB[0]*z+zQSB[1])*z+zQSB[2])*z+zQSB[3])*z+zQSB[4])*z+zQSB[5])*z+zQSB[6])*z + zQSB[7] + return dp * z1 / z2 + } + + if p < 0.5 { + r := math.Sqrt(-math.Log(p)) + if r <= 5.0 { + z := r - 1.6 + z1 := ((((((zQIA[0]*z+zQIA[1])*z+zQIA[2])*z+zQIA[3])*z+zQIA[4])*z+zQIA[5])*z+zQIA[6])*z + zQIA[7] + z2 := ((((((zQIB[0]*z+zQIB[1])*z+zQIB[2])*z+zQIB[3])*z+zQIB[4])*z+zQIB[5])*z+zQIB[6])*z + zQIB[7] + return -z1 / z2 + } + z := r - 5 + z1 := ((((((zQTA[0]*z+zQTA[1])*z+zQTA[2])*z+zQTA[3])*z+zQTA[4])*z+zQTA[5])*z+zQTA[6])*z + zQTA[7] + z2 := ((((((zQTB[0]*z+zQTB[1])*z+zQTB[2])*z+zQTB[3])*z+zQTB[4])*z+zQTB[5])*z+zQTB[6])*z + zQTB[7] + return -z1 / z2 + } + r := math.Sqrt(-math.Log(1 - p)) + if r <= 5.0 { + z := r - 1.6 + z1 := ((((((zQIA[0]*z+zQIA[1])*z+zQIA[2])*z+zQIA[3])*z+zQIA[4])*z+zQIA[5])*z+zQIA[6])*z + zQIA[7] + z2 := ((((((zQIB[0]*z+zQIB[1])*z+zQIB[2])*z+zQIB[3])*z+zQIB[4])*z+zQIB[5])*z+zQIB[6])*z + zQIB[7] + return z1 / z2 + } + + z := r - 5 + z1 := ((((((zQTA[0]*z+zQTA[1])*z+zQTA[2])*z+zQTA[3])*z+zQTA[4])*z+zQTA[5])*z+zQTA[6])*z + zQTA[7] + z2 := ((((((zQTB[0]*z+zQTB[1])*z+zQTB[2])*z+zQTB[3])*z+zQTB[4])*z+zQTB[5])*z+zQTB[6])*z + zQTB[7] + return z1 / z2 +} + +var ( + zQSA = [...]float64{2509.0809287301226727, 33430.575583588128105, 67265.770927008700853, 45921.953931549871457, 13731.693765509461125, 1971.5909503065514427, 133.14166789178437745, 3.387132872796366608} + zQSB = [...]float64{5226.495278852854561, 28729.085735721942674, 39307.89580009271061, 21213.794301586595867, 5394.1960214247511077, 687.1870074920579083, 42.313330701600911252, 1.0} + zQIA = [...]float64{7.7454501427834140764e-4, 0.0227238449892691845833, 0.24178072517745061177, 1.27045825245236838258, 3.64784832476320460504, 5.7694972214606914055, 4.6303378461565452959, 1.42343711074968357734} + zQIB = [...]float64{1.05075007164441684324e-9, 5.475938084995344946e-4, 0.0151986665636164571966, 0.14810397642748007459, 0.68976733498510000455, 1.6763848301838038494, 2.05319162663775882187, 1.0} + zQTA = [...]float64{2.01033439929228813265e-7, 2.71155556874348757815e-5, 0.0012426609473880784386, 0.026532189526576123093, 0.29656057182850489123, 1.7848265399172913358, 5.4637849111641143699, 6.6579046435011037772} + zQTB = [...]float64{2.04426310338993978564e-15, 1.4215117583164458887e-7, 1.8463183175100546818e-5, 7.868691311456132591e-4, 0.0148753612908506148525, 0.13692988092273580531, 0.59983220655588793769, 1.0} +) diff --git a/vendor/gonum.org/v1/gonum/mathext/gamma_inc.go b/vendor/gonum.org/v1/gonum/mathext/gamma_inc.go new file mode 100644 index 0000000..491d843 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/mathext/gamma_inc.go @@ -0,0 +1,50 @@ +// Copyright ©2016 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package mathext + +import ( + "gonum.org/v1/gonum/mathext/internal/cephes" +) + +// GammaIncReg computes the regularized incomplete Gamma integral. +// GammaIncReg(a,x) = (1/ Γ(a)) \int_0^x e^{-t} t^{a-1} dt +// The input argument a must be positive and x must be non-negative or GammaIncReg +// will panic. +// +// See http://mathworld.wolfram.com/IncompleteGammaFunction.html +// or https://en.wikipedia.org/wiki/Incomplete_gamma_function for more detailed +// information. +func GammaIncReg(a, x float64) float64 { + return cephes.Igam(a, x) +} + +// GammaIncRegComp computes the complemented regularized incomplete Gamma integral. +// GammaIncRegComp(a,x) = 1 - GammaIncReg(a,x) +// = (1/ Γ(a)) \int_0^\infty e^{-t} t^{a-1} dt +// The input argument a must be positive and x must be non-negative or +// GammaIncRegComp will panic. +func GammaIncRegComp(a, x float64) float64 { + return cephes.IgamC(a, x) +} + +// GammaIncRegInv computes the inverse of the regularized incomplete Gamma integral. That is, +// it returns the x such that: +// GammaIncReg(a, x) = y +// The input argument a must be positive and y must be between 0 and 1 +// inclusive or GammaIncRegInv will panic. GammaIncRegInv should return a positive +// number, but can return NaN if there is a failure to converge. +func GammaIncRegInv(a, y float64) float64 { + return gammaIncRegInv(a, y) +} + +// GammaIncRegCompInv computes the inverse of the complemented regularized incomplete Gamma +// integral. That is, it returns the x such that: +// GammaIncRegComp(a, x) = y +// The input argument a must be positive and y must be between 0 and 1 +// inclusive or GammaIncRegCompInv will panic. GammaIncRegCompInv should return a +// positive number, but can return 0 even with non-zero y due to underflow. +func GammaIncRegCompInv(a, y float64) float64 { + return cephes.IgamI(a, y) +} diff --git a/vendor/gonum.org/v1/gonum/mathext/gamma_inc_inv.go b/vendor/gonum.org/v1/gonum/mathext/gamma_inc_inv.go new file mode 100644 index 0000000..24a0e6f --- /dev/null +++ b/vendor/gonum.org/v1/gonum/mathext/gamma_inc_inv.go @@ -0,0 +1,56 @@ +// Derived from SciPy's special/c_misc/gammaincinv.c +// https://github.com/scipy/scipy/blob/master/scipy/special/c_misc/gammaincinv.c + +// Copyright ©2017 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package mathext + +import ( + "math" + + "gonum.org/v1/gonum/mathext/internal/cephes" +) + +const ( + allowedATol = 1e-306 + allowedRTol = 1e-6 +) + +func gammaIncReg(x float64, params []float64) float64 { + return cephes.Igam(params[0], x) - params[1] +} + +// gammaIncRegInv is the inverse of the regularized incomplete Gamma integral. That is, it +// returns x such that: +// Igam(a, x) = y +// The input argument a must be positive and y must be between 0 and 1 +// inclusive or gammaIncRegInv will panic. gammaIncRegInv should return a +// positive number, but can return NaN if there is a failure to converge. +func gammaIncRegInv(a, y float64) float64 { + // For y not small, we just use + // IgamI(a, 1-y) + // (inverse of the complemented incomplete Gamma integral). For y small, + // however, 1-y is about 1, and we lose digits. + if a <= 0 || y <= 0 || y >= 0.25 { + return cephes.IgamI(a, 1-y) + } + + lo := 0.0 + flo := -y + hi := cephes.IgamI(a, 0.75) + fhi := 0.25 - y + + params := []float64{a, y} + + // Also, after we generate a small interval by bisection above, false + // position will do a large step from an interval of width ~1e-4 to ~1e-14 + // in one step (a=10, x=0.05, but similar for other values). + result, bestX, _, errEst := falsePosition(lo, hi, flo, fhi, 2*machEp, 2*machEp, 1e-2*a, gammaIncReg, params) + if result == fSolveMaxIterations && errEst > allowedATol+allowedRTol*math.Abs(bestX) { + bestX = math.NaN() + } + + return bestX +} diff --git a/vendor/gonum.org/v1/gonum/mathext/internal/amos/amos.go b/vendor/gonum.org/v1/gonum/mathext/internal/amos/amos.go new file mode 100644 index 0000000..35648ce --- /dev/null +++ b/vendor/gonum.org/v1/gonum/mathext/internal/amos/amos.go @@ -0,0 +1,2154 @@ +// Copyright ©2016 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package amos + +import ( + "math" + "math/cmplx" +) + +/* +The AMOS functions are included in SLATEC, and the SLATEC guide (http://www.netlib.org/slatec/guide) explicitly states: +"The Library is in the public domain and distributed by the Energy +Science and Technology Software Center." +Mention of AMOS's inclusion in SLATEC goes back at least to this 1985 technical report from Sandia National Labs: http://infoserve.sandia.gov/sand_doc/1985/851018.pdf +*/ + +// math.NaN() are for padding to keep indexing easy. +var imach = []int{-0, 5, 6, 0, 0, 32, 4, 2, 31, 2147483647, 2, 24, -125, 127, 53, -1021, 1023} + +var dmach = []float64{math.NaN(), 2.23e-308, 1.79e-308, 1.11e-16, 2.22e-16, 0.30103000998497009} + +func abs(a int) int { + if a >= 0 { + return a + } + return -a +} + +func min(a, b int) int { + if a < b { + return a + } + return b +} + +func max(a, b int) int { + if a > b { + return a + } + return b +} + +func Zairy(ZR, ZI float64, ID, KODE int) (AIR, AII float64, NZ int) { + // zairy is adapted from the original Netlib code by Donald Amos. + // http://www.netlib.no/netlib/amos/zairy.f + + // Original comment: + /* + C***BEGIN PROLOGUE ZAIRY + C***DATE WRITTEN 830501 (YYMMDD) + C***REVISION DATE 890801 (YYMMDD) + C***CATEGORY NO. B5K + C***KEYWORDS AIRY FUNCTION,BESSEL FUNCTIONS OF ORDER ONE THIRD + C***AUTHOR AMOS, DONALD E., SANDIA NATIONAL LABORATORIES + C***PURPOSE TO COMPUTE AIRY FUNCTIONS AI(Z) AND DAI(Z) FOR COMPLEX Z + C***DESCRIPTION + C + C ***A DOUBLE PRECISION ROUTINE*** + C ON KODE=1, ZAIRY COMPUTES THE COMPLEX AIRY FUNCTION AI(Z) OR + C ITS DERIVATIVE DAI(Z)/DZ ON ID=0 OR ID=1 RESPECTIVELY. ON + C KODE=2, A SCALING OPTION CEXP(ZTA)*AI(Z) OR CEXP(ZTA)* + C DAI(Z)/DZ IS PROVIDED TO REMOVE THE EXPONENTIAL DECAY IN + C -PI/31.0 FROM THE K BESSEL + C FUNCTIONS BY + C + C AI(Z)=C*SQRT(Z)*K(1/3,ZTA) , DAI(Z)=-C*Z*K(2/3,ZTA) + C C=1.0/(PI*SQRT(3.0)) + C ZTA=(2/3)*Z**(3/2) + C + C WITH THE POWER SERIES FOR CABS(Z)<=1.0. + C + C IN MOST COMPLEX VARIABLE COMPUTATION, ONE MUST EVALUATE ELE- + C MENTARY FUNCTIONS. WHEN THE MAGNITUDE OF Z IS LARGE, LOSSES + C OF SIGNIFICANCE BY ARGUMENT REDUCTION OCCUR. CONSEQUENTLY, IF + C THE MAGNITUDE OF ZETA=(2/3)*Z**1.5 EXCEEDS U1=SQRT(0.5/UR), + C THEN LOSSES EXCEEDING HALF PRECISION ARE LIKELY AND AN ERROR + C FLAG IERR=3 IS TRIGGERED WHERE UR=math.Max(dmach[4),1.0D-18) IS + C DOUBLE PRECISION UNIT ROUNDOFF LIMITED TO 18 DIGITS PRECISION. + C ALSO, if THE MAGNITUDE OF ZETA IS LARGER THAN U2=0.5/UR, THEN + C ALL SIGNIFICANCE IS LOST AND IERR=4. IN ORDER TO USE THE INT + C FUNCTION, ZETA MUST BE FURTHER RESTRICTED NOT TO EXCEED THE + C LARGEST INTEGER, U3=I1MACH(9). THUS, THE MAGNITUDE OF ZETA + C MUST BE RESTRICTED BY MIN(U2,U3). ON 32 BIT MACHINES, U1,U2, + C AND U3 ARE APPROXIMATELY 2.0E+3, 4.2E+6, 2.1E+9 IN SINGLE + C PRECISION ARITHMETIC AND 1.3E+8, 1.8E+16, 2.1E+9 IN DOUBLE + C PRECISION ARITHMETIC RESPECTIVELY. THIS MAKES U2 AND U3 LIMIT- + C ING IN THEIR RESPECTIVE ARITHMETICS. THIS MEANS THAT THE MAG- + C NITUDE OF Z CANNOT EXCEED 3.1E+4 IN SINGLE AND 2.1E+6 IN + C DOUBLE PRECISION ARITHMETIC. THIS ALSO MEANS THAT ONE CAN + C EXPECT TO RETAIN, IN THE WORST CASES ON 32 BIT MACHINES, + C NO DIGITS IN SINGLE PRECISION AND ONLY 7 DIGITS IN DOUBLE + C PRECISION ARITHMETIC. SIMILAR CONSIDERATIONS HOLD FOR OTHER + C MACHINES. + C + C THE APPROXIMATE RELATIVE ERROR IN THE MAGNITUDE OF A COMPLEX + C BESSEL FUNCTION CAN BE EXPRESSED BY P*10**S WHERE P=MAX(UNIT + C ROUNDOFF,1.0E-18) IS THE NOMINAL PRECISION AND 10**S REPRE- + C SENTS THE INCREASE IN ERROR DUE TO ARGUMENT REDUCTION IN THE + C ELEMENTARY FUNCTIONS. HERE, S=MAX(1,ABS(LOG10(CABS(Z))), + C ABS(LOG10(FNU))) APPROXIMATELY (I.E. S=MAX(1,ABS(EXPONENT OF + C CABS(Z),ABS(EXPONENT OF FNU)) ). HOWEVER, THE PHASE ANGLE MAY + C HAVE ONLY ABSOLUTE ACCURACY. THIS IS MOST LIKELY TO OCCUR WHEN + C ONE COMPONENT (IN ABSOLUTE VALUE) IS LARGER THAN THE OTHER BY + C SEVERAL ORDERS OF MAGNITUDE. if ONE COMPONENT IS 10**K LARGER + C THAN THE OTHER, THEN ONE CAN EXPECT ONLY MAX(ABS(LOG10(P))-K, + C 0) SIGNIFICANT DIGITS; OR, STATED ANOTHER WAY, WHEN K EXCEEDS + C THE EXPONENT OF P, NO SIGNIFICANT DIGITS REMAIN IN THE SMALLER + C COMPONENT. HOWEVER, THE PHASE ANGLE RETAINS ABSOLUTE ACCURACY + C BECAUSE, IN COMPLEX ARITHMETIC WITH PRECISION P, THE SMALLER + C COMPONENT WILL NOT (AS A RULE) DECREASE BELOW P TIMES THE + C MAGNITUDE OF THE LARGER COMPONENT. IN THESE EXTREME CASES, + C THE PRINCIPAL PHASE ANGLE IS ON THE ORDER OF +P, -P, PI/2-P, + C OR -PI/2+P. + C + C***REFERENCES HANDBOOK OF MATHEMATICAL FUNCTIONS BY M. ABRAMOWITZ + C AND I. A. STEGUN, NBS AMS SERIES 55, U.S. DEPT. OF + C COMMERCE, 1955. + C + C COMPUTATION OF BESSEL FUNCTIONS OF COMPLEX ARGUMENT + C AND LARGE ORDER BY D. E. AMOS, SAND83-0643, MAY, 1983 + C + C A SUBROUTINE PACKAGE FOR BESSEL FUNCTIONS OF A COMPLEX + C ARGUMENT AND NONNEGATIVE ORDER BY D. E. AMOS, SAND85- + C 1018, MAY, 1985 + C + C A PORTABLE PACKAGE FOR BESSEL FUNCTIONS OF A COMPLEX + C ARGUMENT AND NONNEGATIVE ORDER BY D. E. AMOS, TRANS. + C MATH. SOFTWARE, 1986 + */ + var AI, CONE, CSQ, CY, S1, S2, TRM1, TRM2, Z, ZTA, Z3 complex128 + var AA, AD, AK, ALIM, ATRM, AZ, AZ3, BK, + CC, CK, COEF, CONEI, CONER, CSQI, CSQR, C1, C2, DIG, + DK, D1, D2, ELIM, FID, FNU, PTR, RL, R1M5, SFAC, STI, STR, + S1I, S1R, S2I, S2R, TOL, TRM1I, TRM1R, TRM2I, TRM2R, TTH, ZEROI, + ZEROR, ZTAI, ZTAR, Z3I, Z3R, ALAZ, BB float64 + var IERR, IFLAG, K, K1, K2, MR, NN int + var tmp complex128 + + // Extra element for padding. + CYR := []float64{math.NaN(), 0} + CYI := []float64{math.NaN(), 0} + + _ = AI + _ = CONE + _ = CSQ + _ = CY + _ = S1 + _ = S2 + _ = TRM1 + _ = TRM2 + _ = Z + _ = ZTA + _ = Z3 + + TTH = 6.66666666666666667e-01 + C1 = 3.55028053887817240e-01 + C2 = 2.58819403792806799e-01 + COEF = 1.83776298473930683e-01 + ZEROR = 0 + ZEROI = 0 + CONER = 1 + CONEI = 0 + + NZ = 0 + if ID < 0 || ID > 1 { + IERR = 1 + } + if KODE < 1 || KODE > 2 { + IERR = 1 + } + if IERR != 0 { + return + } + AZ = cmplx.Abs(complex(ZR, ZI)) + TOL = math.Max(dmach[4], 1.0e-18) + FID = float64(ID) + if AZ > 1.0e0 { + goto Seventy + } + + // POWER SERIES FOR CABS(Z)<=1. + S1R = CONER + S1I = CONEI + S2R = CONER + S2I = CONEI + if AZ < TOL { + goto OneSeventy + } + AA = AZ * AZ + if AA < TOL/AZ { + goto Forty + } + TRM1R = CONER + TRM1I = CONEI + TRM2R = CONER + TRM2I = CONEI + ATRM = 1.0e0 + STR = ZR*ZR - ZI*ZI + STI = ZR*ZI + ZI*ZR + Z3R = STR*ZR - STI*ZI + Z3I = STR*ZI + STI*ZR + AZ3 = AZ * AA + AK = 2.0e0 + FID + BK = 3.0e0 - FID - FID + CK = 4.0e0 - FID + DK = 3.0e0 + FID + FID + D1 = AK * DK + D2 = BK * CK + AD = math.Min(D1, D2) + AK = 24.0e0 + 9.0e0*FID + BK = 30.0e0 - 9.0e0*FID + for K = 1; K <= 25; K++ { + STR = (TRM1R*Z3R - TRM1I*Z3I) / D1 + TRM1I = (TRM1R*Z3I + TRM1I*Z3R) / D1 + TRM1R = STR + S1R = S1R + TRM1R + S1I = S1I + TRM1I + STR = (TRM2R*Z3R - TRM2I*Z3I) / D2 + TRM2I = (TRM2R*Z3I + TRM2I*Z3R) / D2 + TRM2R = STR + S2R = S2R + TRM2R + S2I = S2I + TRM2I + ATRM = ATRM * AZ3 / AD + D1 = D1 + AK + D2 = D2 + BK + AD = math.Min(D1, D2) + if ATRM < TOL*AD { + goto Forty + } + AK = AK + 18.0e0 + BK = BK + 18.0e0 + } +Forty: + if ID == 1 { + goto Fifty + } + AIR = S1R*C1 - C2*(ZR*S2R-ZI*S2I) + AII = S1I*C1 - C2*(ZR*S2I+ZI*S2R) + if KODE == 1 { + return + } + tmp = cmplx.Sqrt(complex(ZR, ZI)) + STR = real(tmp) + STI = imag(tmp) + ZTAR = TTH * (ZR*STR - ZI*STI) + ZTAI = TTH * (ZR*STI + ZI*STR) + tmp = cmplx.Exp(complex(ZTAR, ZTAI)) + STR = real(tmp) + STI = imag(tmp) + PTR = AIR*STR - AII*STI + AII = AIR*STI + AII*STR + AIR = PTR + return + +Fifty: + AIR = -S2R * C2 + AII = -S2I * C2 + if AZ <= TOL { + goto Sixty + } + STR = ZR*S1R - ZI*S1I + STI = ZR*S1I + ZI*S1R + CC = C1 / (1.0e0 + FID) + AIR = AIR + CC*(STR*ZR-STI*ZI) + AII = AII + CC*(STR*ZI+STI*ZR) + +Sixty: + if KODE == 1 { + return + } + tmp = cmplx.Sqrt(complex(ZR, ZI)) + STR = real(tmp) + STI = imag(tmp) + ZTAR = TTH * (ZR*STR - ZI*STI) + ZTAI = TTH * (ZR*STI + ZI*STR) + tmp = cmplx.Exp(complex(ZTAR, ZTAI)) + STR = real(tmp) + STI = imag(tmp) + PTR = STR*AIR - STI*AII + AII = STR*AII + STI*AIR + AIR = PTR + return + + // CASE FOR CABS(Z)>1.0. +Seventy: + FNU = (1.0e0 + FID) / 3.0e0 + + /* + SET PARAMETERS RELATED TO MACHINE CONSTANTS. + TOL IS THE APPROXIMATE UNIT ROUNDOFF LIMITED TO 1.0D-18. + ELIM IS THE APPROXIMATE EXPONENTIAL OVER-&&UNDERFLOW LIMIT. + EXP(-ELIM)EXP(ALIM)=EXP(ELIM)*TOL ARE INTERVALS NEAR + UNDERFLOW&&OVERFLOW LIMITS WHERE SCALED ARITHMETIC IS DONE. + RL IS THE LOWER BOUNDARY OF THE ASYMPTOTIC EXPANSION FOR LA>=Z. + DIG = NUMBER OF BASE 10 DIGITS IN TOL = 10**(-DIG). + */ + K1 = imach[15] + K2 = imach[16] + R1M5 = dmach[5] + + K = min(abs(K1), abs(K2)) + ELIM = 2.303e0 * (float64(K)*R1M5 - 3.0e0) + K1 = imach[14] - 1 + AA = R1M5 * float64(K1) + DIG = math.Min(AA, 18.0e0) + AA = AA * 2.303e0 + ALIM = ELIM + math.Max(-AA, -41.45e0) + RL = 1.2e0*DIG + 3.0e0 + ALAZ = math.Log(AZ) + + // TEST FOR PROPER RANGE. + AA = 0.5e0 / TOL + BB = float64(float32(imach[9])) * 0.5e0 + AA = math.Min(AA, BB) + AA = math.Pow(AA, TTH) + if AZ > AA { + goto TwoSixty + } + AA = math.Sqrt(AA) + if AZ > AA { + IERR = 3 + } + tmp = cmplx.Sqrt(complex(ZR, ZI)) + CSQR = real(tmp) + CSQI = imag(tmp) + ZTAR = TTH * (ZR*CSQR - ZI*CSQI) + ZTAI = TTH * (ZR*CSQI + ZI*CSQR) + + // RE(ZTA)<=0 WHEN RE(Z)<0, ESPECIALLY WHEN IM(Z) IS SMALL. + IFLAG = 0 + SFAC = 1.0e0 + AK = ZTAI + if ZR >= 0.0e0 { + goto Eighty + } + BK = ZTAR + CK = -math.Abs(BK) + ZTAR = CK + ZTAI = AK + +Eighty: + if ZI != 0.0e0 { + goto Ninety + } + if ZR > 0.0e0 { + goto Ninety + } + ZTAR = 0.0e0 + ZTAI = AK +Ninety: + AA = ZTAR + if AA >= 0.0e0 && ZR > 0.0e0 { + goto OneTen + } + if KODE == 2 { + goto OneHundred + } + + // OVERFLOW TEST. + if AA > (-ALIM) { + goto OneHundred + } + AA = -AA + 0.25e0*ALAZ + IFLAG = 1 + SFAC = TOL + if AA > ELIM { + goto TwoSeventy + } + +OneHundred: + // CBKNU AND CACON return EXP(ZTA)*K(FNU,ZTA) ON KODE=2. + MR = 1 + if ZI < 0.0e0 { + MR = -1 + } + ZTAR, ZTAI, FNU, KODE, MR, _, CYR, CYI, NN, RL, TOL, ELIM, ALIM = Zacai(ZTAR, ZTAI, FNU, KODE, MR, 1, CYR, CYI, NN, RL, TOL, ELIM, ALIM) + if NN < 0 { + goto TwoEighty + } + NZ = NZ + NN + goto OneThirty + +OneTen: + if KODE == 2 { + goto OneTwenty + } + + // UNDERFLOW TEST. + if AA < ALIM { + goto OneTwenty + } + AA = -AA - 0.25e0*ALAZ + IFLAG = 2 + SFAC = 1.0e0 / TOL + if AA < (-ELIM) { + goto TwoTen + } +OneTwenty: + ZTAR, ZTAI, FNU, KODE, _, CYR, CYI, NZ, TOL, ELIM, ALIM = Zbknu(ZTAR, ZTAI, FNU, KODE, 1, CYR, CYI, NZ, TOL, ELIM, ALIM) + +OneThirty: + S1R = CYR[1] * COEF + S1I = CYI[1] * COEF + if IFLAG != 0 { + goto OneFifty + } + if ID == 1 { + goto OneFourty + } + AIR = CSQR*S1R - CSQI*S1I + AII = CSQR*S1I + CSQI*S1R + return +OneFourty: + AIR = -(ZR*S1R - ZI*S1I) + AII = -(ZR*S1I + ZI*S1R) + return +OneFifty: + S1R = S1R * SFAC + S1I = S1I * SFAC + if ID == 1 { + goto OneSixty + } + STR = S1R*CSQR - S1I*CSQI + S1I = S1R*CSQI + S1I*CSQR + S1R = STR + AIR = S1R / SFAC + AII = S1I / SFAC + return +OneSixty: + STR = -(S1R*ZR - S1I*ZI) + S1I = -(S1R*ZI + S1I*ZR) + S1R = STR + AIR = S1R / SFAC + AII = S1I / SFAC + return +OneSeventy: + AA = 1.0e+3 * dmach[1] + S1R = ZEROR + S1I = ZEROI + if ID == 1 { + goto OneNinety + } + if AZ <= AA { + goto OneEighty + } + S1R = C2 * ZR + S1I = C2 * ZI +OneEighty: + AIR = C1 - S1R + AII = -S1I + return +OneNinety: + AIR = -C2 + AII = 0.0e0 + AA = math.Sqrt(AA) + if AZ <= AA { + goto TwoHundred + } + S1R = 0.5e0 * (ZR*ZR - ZI*ZI) + S1I = ZR * ZI +TwoHundred: + AIR = AIR + C1*S1R + AII = AII + C1*S1I + return +TwoTen: + NZ = 1 + AIR = ZEROR + AII = ZEROI + return +TwoSeventy: + NZ = 0 + IERR = 2 + return +TwoEighty: + if NN == (-1) { + goto TwoSeventy + } + NZ = 0 + IERR = 5 + return +TwoSixty: + IERR = 4 + NZ = 0 + return +} + +// sbknu computes the k bessel function in the right half z plane. +func Zbknu(ZR, ZI, FNU float64, KODE, N int, YR, YI []float64, NZ int, TOL, ELIM, ALIM float64) (ZRout, ZIout, FNUout float64, KODEout, Nout int, YRout, YIout []float64, NZout int, TOLout, ELIMout, ALIMout float64) { + /* Old dimension comment. + DIMENSION YR(N), YI(N), CC(8), CSSR(3), CSRR(3), BRY(3), CYR(2), + * CYI(2) + */ + + // TODO(btracey): Find which of these are inputs/outputs/both and clean up + // the function call. + // YR and YI have length n (but n+1 with better indexing) + var AA, AK, ASCLE, A1, A2, BB, BK, CAZ, + CBI, CBR, CCHI, CCHR, CKI, CKR, COEFI, COEFR, CONEI, CONER, + CRSCR, CSCLR, CSHI, CSHR, CSI, CSR, CTWOR, + CZEROI, CZEROR, CZI, CZR, DNU, DNU2, DPI, ETEST, FC, FHS, + FI, FK, FKS, FMUI, FMUR, FPI, FR, G1, G2, HPI, PI, PR, PTI, + PTR, P1I, P1R, P2I, P2M, P2R, QI, QR, RAK, RCAZ, RTHPI, RZI, + RZR, R1, S, SMUI, SMUR, SPI, STI, STR, S1I, S1R, S2I, S2R, TM, + TTH, T1, T2, ELM, CELMR, ZDR, ZDI, AS, ALAS, HELIM float64 + + var I, IFLAG, INU, K, KFLAG, KK, KMAX, KODED, IDUM, J, IC, INUB, NW int + + var sinh, cosh complex128 + //var sin, cos float64 + + var tmp, p complex128 + var CSSR, CSRR, BRY [4]float64 + var CYR, CYI [3]float64 + + KMAX = 30 + CZEROR = 0 + CZEROI = 0 + CONER = 1 + CONEI = 0 + CTWOR = 2 + R1 = 2 + + DPI = 3.14159265358979324e0 + RTHPI = 1.25331413731550025e0 + SPI = 1.90985931710274403e0 + HPI = 1.57079632679489662e0 + FPI = 1.89769999331517738e0 + TTH = 6.66666666666666666e-01 + + CC := [9]float64{math.NaN(), 5.77215664901532861e-01, -4.20026350340952355e-02, + -4.21977345555443367e-02, 7.21894324666309954e-03, + -2.15241674114950973e-04, -2.01348547807882387e-05, + 1.13302723198169588e-06, 6.11609510448141582e-09} + + CAZ = cmplx.Abs(complex(ZR, ZI)) + CSCLR = 1.0e0 / TOL + CRSCR = TOL + CSSR[1] = CSCLR + CSSR[2] = 1.0e0 + CSSR[3] = CRSCR + CSRR[1] = CRSCR + CSRR[2] = 1.0e0 + CSRR[3] = CSCLR + BRY[1] = 1.0e+3 * dmach[1] / TOL + BRY[2] = 1.0e0 / BRY[1] + BRY[3] = dmach[2] + NZ = 0 + IFLAG = 0 + KODED = KODE + RCAZ = 1.0e0 / CAZ + STR = ZR * RCAZ + STI = -ZI * RCAZ + RZR = (STR + STR) * RCAZ + RZI = (STI + STI) * RCAZ + INU = int(float32(FNU + 0.5)) + DNU = FNU - float64(INU) + if math.Abs(DNU) == 0.5e0 { + goto OneTen + } + DNU2 = 0.0e0 + if math.Abs(DNU) > TOL { + DNU2 = DNU * DNU + } + if CAZ > R1 { + goto OneTen + } + + // SERIES FOR CABS(Z)<=R1. + FC = 1.0e0 + tmp = cmplx.Log(complex(RZR, RZI)) + SMUR = real(tmp) + SMUI = imag(tmp) + FMUR = SMUR * DNU + FMUI = SMUI * DNU + tmp = complex(FMUR, FMUI) + sinh = cmplx.Sinh(tmp) + cosh = cmplx.Cosh(tmp) + CSHR = real(sinh) + CSHI = imag(sinh) + CCHR = real(cosh) + CCHI = imag(cosh) + if DNU == 0.0e0 { + goto Ten + } + FC = DNU * DPI + FC = FC / math.Sin(FC) + SMUR = CSHR / DNU + SMUI = CSHI / DNU +Ten: + A2 = 1.0e0 + DNU + + // GAM(1-Z)*GAM(1+Z)=PI*Z/SIN(PI*Z), T1=1/GAM(1-DNU), T2=1/GAM(1+DNU). + T2 = math.Exp(-dgamln(A2, IDUM)) + T1 = 1.0e0 / (T2 * FC) + if math.Abs(DNU) > 0.1e0 { + goto Forty + } + + // SERIES FOR F0 TO RESOLVE INDETERMINACY FOR SMALL ABS(DNU). + AK = 1.0e0 + S = CC[1] + for K = 2; K <= 8; K++ { + AK = AK * DNU2 + TM = CC[K] * AK + S = S + TM + if math.Abs(TM) < TOL { + goto Thirty + } + } +Thirty: + G1 = -S + goto Fifty +Forty: + G1 = (T1 - T2) / (DNU + DNU) +Fifty: + G2 = (T1 + T2) * 0.5e0 + FR = FC * (CCHR*G1 + SMUR*G2) + FI = FC * (CCHI*G1 + SMUI*G2) + tmp = cmplx.Exp(complex(FMUR, FMUI)) + STR = real(tmp) + STI = imag(tmp) + PR = 0.5e0 * STR / T2 + PI = 0.5e0 * STI / T2 + tmp = complex(0.5, 0) / complex(STR, STI) + PTR = real(tmp) + PTI = imag(tmp) + QR = PTR / T1 + QI = PTI / T1 + S1R = FR + S1I = FI + S2R = PR + S2I = PI + AK = 1.0e0 + A1 = 1.0e0 + CKR = CONER + CKI = CONEI + BK = 1.0e0 - DNU2 + if INU > 0 || N > 1 { + goto Eighty + } + + // GENERATE K(FNU,Z), 0.0E0 <= FNU < 0.5E0 AND N=1. + if CAZ < TOL { + goto Seventy + } + tmp = complex(ZR, ZI) * complex(ZR, ZI) + CZR = real(tmp) + CZI = imag(tmp) + CZR = 0.25e0 * CZR + CZI = 0.25e0 * CZI + T1 = 0.25e0 * CAZ * CAZ +Sixty: + FR = (FR*AK + PR + QR) / BK + FI = (FI*AK + PI + QI) / BK + STR = 1.0e0 / (AK - DNU) + PR = PR * STR + PI = PI * STR + STR = 1.0e0 / (AK + DNU) + QR = QR * STR + QI = QI * STR + STR = CKR*CZR - CKI*CZI + RAK = 1.0e0 / AK + CKI = (CKR*CZI + CKI*CZR) * RAK + CKR = STR * RAK + S1R = CKR*FR - CKI*FI + S1R + S1I = CKR*FI + CKI*FR + S1I + A1 = A1 * T1 * RAK + BK = BK + AK + AK + 1.0e0 + AK = AK + 1.0e0 + if A1 > TOL { + goto Sixty + } +Seventy: + YR[1] = S1R + YI[1] = S1I + if KODED == 1 { + return ZR, ZI, FNU, KODE, N, YR, YI, NZ, TOL, ELIM, ALIM + } + tmp = cmplx.Exp(complex(ZR, ZI)) + STR = real(tmp) + STI = imag(tmp) + tmp = complex(S1R, S1I) * complex(STR, STI) + YR[1] = real(tmp) + YI[1] = imag(tmp) + return ZR, ZI, FNU, KODE, N, YR, YI, NZ, TOL, ELIM, ALIM + + // GENERATE K(DNU,Z) AND K(DNU+1,Z) FOR FORWARD RECURRENCE. +Eighty: + if CAZ < TOL { + goto OneHundred + } + tmp = complex(ZR, ZI) * complex(ZR, ZI) + CZR = real(tmp) + CZI = imag(tmp) + CZR = 0.25e0 * CZR + CZI = 0.25e0 * CZI + T1 = 0.25e0 * CAZ * CAZ +Ninety: + FR = (FR*AK + PR + QR) / BK + FI = (FI*AK + PI + QI) / BK + STR = 1.0e0 / (AK - DNU) + PR = PR * STR + PI = PI * STR + STR = 1.0e0 / (AK + DNU) + QR = QR * STR + QI = QI * STR + STR = CKR*CZR - CKI*CZI + RAK = 1.0e0 / AK + CKI = (CKR*CZI + CKI*CZR) * RAK + CKR = STR * RAK + S1R = CKR*FR - CKI*FI + S1R + S1I = CKR*FI + CKI*FR + S1I + STR = PR - FR*AK + STI = PI - FI*AK + S2R = CKR*STR - CKI*STI + S2R + S2I = CKR*STI + CKI*STR + S2I + A1 = A1 * T1 * RAK + BK = BK + AK + AK + 1.0e0 + AK = AK + 1.0e0 + if A1 > TOL { + goto Ninety + } +OneHundred: + KFLAG = 2 + A1 = FNU + 1.0e0 + AK = A1 * math.Abs(SMUR) + if AK > ALIM { + KFLAG = 3 + } + STR = CSSR[KFLAG] + P2R = S2R * STR + P2I = S2I * STR + tmp = complex(P2R, P2I) * complex(RZR, RZI) + S2R = real(tmp) + S2I = imag(tmp) + S1R = S1R * STR + S1I = S1I * STR + if KODED == 1 { + goto TwoTen + } + tmp = cmplx.Exp(complex(ZR, ZI)) + FR = real(tmp) + FI = imag(tmp) + tmp = complex(S1R, S1I) * complex(FR, FI) + S1R = real(tmp) + S1I = imag(tmp) + tmp = complex(S2R, S2I) * complex(FR, FI) + S2R = real(tmp) + S2I = imag(tmp) + goto TwoTen + + // IFLAG=0 MEANS NO UNDERFLOW OCCURRED + // IFLAG=1 MEANS AN UNDERFLOW OCCURRED- COMPUTATION PROCEEDS WITH + // KODED=2 AND A TEST FOR ON SCALE VALUES IS MADE DURING FORWARD RECURSION +OneTen: + tmp = cmplx.Sqrt(complex(ZR, ZI)) + STR = real(tmp) + STI = imag(tmp) + tmp = complex(RTHPI, CZEROI) / complex(STR, STI) + COEFR = real(tmp) + COEFI = imag(tmp) + KFLAG = 2 + if KODED == 2 { + goto OneTwenty + } + if ZR > ALIM { + goto TwoNinety + } + + STR = math.Exp(-ZR) * CSSR[KFLAG] + //sin, cos = math.Sincos(ZI) + STI = -STR * math.Sin(ZI) + STR = STR * math.Cos(ZI) + tmp = complex(COEFR, COEFI) * complex(STR, STI) + COEFR = real(tmp) + COEFI = imag(tmp) +OneTwenty: + if math.Abs(DNU) == 0.5e0 { + goto ThreeHundred + } + // MILLER ALGORITHM FOR CABS(Z)>R1. + AK = math.Cos(DPI * DNU) + AK = math.Abs(AK) + if AK == CZEROR { + goto ThreeHundred + } + FHS = math.Abs(0.25e0 - DNU2) + if FHS == CZEROR { + goto ThreeHundred + } + + // COMPUTE R2=F(E). if CABS(Z)>=R2, USE FORWARD RECURRENCE TO + // DETERMINE THE BACKWARD INDEX K. R2=F(E) IS A STRAIGHT LINE ON + // 12<=E<=60. E IS COMPUTED FROM 2**(-E)=B**(1-I1MACH(14))= + // TOL WHERE B IS THE BASE OF THE ARITHMETIC. + T1 = float64(imach[14] - 1) + T1 = T1 * dmach[5] * 3.321928094e0 + T1 = math.Max(T1, 12.0e0) + T1 = math.Min(T1, 60.0e0) + T2 = TTH*T1 - 6.0e0 + if ZR != 0.0e0 { + goto OneThirty + } + T1 = HPI + goto OneFourty +OneThirty: + T1 = math.Atan(ZI / ZR) + T1 = math.Abs(T1) +OneFourty: + if T2 > CAZ { + goto OneSeventy + } + // FORWARD RECURRENCE LOOP WHEN CABS(Z)>=R2. + ETEST = AK / (DPI * CAZ * TOL) + FK = CONER + if ETEST < CONER { + goto OneEighty + } + FKS = CTWOR + CKR = CAZ + CAZ + CTWOR + P1R = CZEROR + P2R = CONER + for I = 1; I <= KMAX; I++ { + AK = FHS / FKS + CBR = CKR / (FK + CONER) + PTR = P2R + P2R = CBR*P2R - P1R*AK + P1R = PTR + CKR = CKR + CTWOR + FKS = FKS + FK + FK + CTWOR + FHS = FHS + FK + FK + FK = FK + CONER + STR = math.Abs(P2R) * FK + if ETEST < STR { + goto OneSixty + } + } + goto ThreeTen +OneSixty: + FK = FK + SPI*T1*math.Sqrt(T2/CAZ) + FHS = math.Abs(0.25 - DNU2) + goto OneEighty +OneSeventy: + // COMPUTE BACKWARD INDEX K FOR CABS(Z) 0 || N > 1 { + goto TwoHundred + } + ZDR = ZR + ZDI = ZI + if IFLAG == 1 { + goto TwoSeventy + } + goto TwoFourty +TwoHundred: + // COMPUTE P1/P2=(P1/CABS(P2)*CONJG(P2)/CABS(P2) FOR SCALING. + TM = cmplx.Abs(complex(P2R, P2I)) + PTR = 1.0e0 / TM + P1R = P1R * PTR + P1I = P1I * PTR + P2R = P2R * PTR + P2I = -P2I * PTR + tmp = complex(P1R, P1I) * complex(P2R, P2I) + PTR = real(tmp) + PTI = imag(tmp) + STR = DNU + 0.5e0 - PTR + STI = -PTI + tmp = complex(STR, STI) / complex(ZR, ZI) + STR = real(tmp) + STI = imag(tmp) + STR = STR + 1.0e0 + tmp = complex(STR, STI) * complex(S1R, S1I) + S2R = real(tmp) + S2I = imag(tmp) + + // FORWARD RECURSION ON THE THREE TERM RECURSION WITH RELATION WITH + // SCALING NEAR EXPONENT EXTREMES ON KFLAG=1 OR KFLAG=3 +TwoTen: + STR = DNU + 1.0e0 + CKR = STR * RZR + CKI = STR * RZI + if N == 1 { + INU = INU - 1 + } + if INU > 0 { + goto TwoTwenty + } + if N > 1 { + goto TwoFifteen + } + S1R = S2R + S1I = S2I +TwoFifteen: + ZDR = ZR + ZDI = ZI + if IFLAG == 1 { + goto TwoSeventy + } + goto TwoFourty +TwoTwenty: + INUB = 1 + if IFLAG == 1 { + goto TwoSixtyOne + } +TwoTwentyFive: + P1R = CSRR[KFLAG] + ASCLE = BRY[KFLAG] + for I = INUB; I <= INU; I++ { + STR = S2R + STI = S2I + S2R = CKR*STR - CKI*STI + S1R + S2I = CKR*STI + CKI*STR + S1I + S1R = STR + S1I = STI + CKR = CKR + RZR + CKI = CKI + RZI + if KFLAG >= 3 { + continue + } + P2R = S2R * P1R + P2I = S2I * P1R + STR = math.Abs(P2R) + STI = math.Abs(P2I) + P2M = math.Max(STR, STI) + if P2M <= ASCLE { + continue + } + KFLAG = KFLAG + 1 + ASCLE = BRY[KFLAG] + S1R = S1R * P1R + S1I = S1I * P1R + S2R = P2R + S2I = P2I + STR = CSSR[KFLAG] + S1R = S1R * STR + S1I = S1I * STR + S2R = S2R * STR + S2I = S2I * STR + P1R = CSRR[KFLAG] + } + if N != 1 { + goto TwoFourty + } + S1R = S2R + S1I = S2I +TwoFourty: + STR = CSRR[KFLAG] + YR[1] = S1R * STR + YI[1] = S1I * STR + if N == 1 { + return ZR, ZI, FNU, KODE, N, YR, YI, NZ, TOL, ELIM, ALIM + } + YR[2] = S2R * STR + YI[2] = S2I * STR + if N == 2 { + return ZR, ZI, FNU, KODE, N, YR, YI, NZ, TOL, ELIM, ALIM + } + KK = 2 +TwoFifty: + KK = KK + 1 + if KK > N { + return ZR, ZI, FNU, KODE, N, YR, YI, NZ, TOL, ELIM, ALIM + } + P1R = CSRR[KFLAG] + ASCLE = BRY[KFLAG] + for I = KK; I <= N; I++ { + P2R = S2R + P2I = S2I + S2R = CKR*P2R - CKI*P2I + S1R + S2I = CKI*P2R + CKR*P2I + S1I + S1R = P2R + S1I = P2I + CKR = CKR + RZR + CKI = CKI + RZI + P2R = S2R * P1R + P2I = S2I * P1R + YR[I] = P2R + YI[I] = P2I + if KFLAG >= 3 { + continue + } + STR = math.Abs(P2R) + STI = math.Abs(P2I) + P2M = math.Max(STR, STI) + if P2M <= ASCLE { + continue + } + KFLAG = KFLAG + 1 + ASCLE = BRY[KFLAG] + S1R = S1R * P1R + S1I = S1I * P1R + S2R = P2R + S2I = P2I + STR = CSSR[KFLAG] + S1R = S1R * STR + S1I = S1I * STR + S2R = S2R * STR + S2I = S2I * STR + P1R = CSRR[KFLAG] + } + return ZR, ZI, FNU, KODE, N, YR, YI, NZ, TOL, ELIM, ALIM + + // IFLAG=1 CASES, FORWARD RECURRENCE ON SCALED VALUES ON UNDERFLOW. +TwoSixtyOne: + HELIM = 0.5e0 * ELIM + ELM = math.Exp(-ELIM) + CELMR = ELM + ASCLE = BRY[1] + ZDR = ZR + ZDI = ZI + IC = -1 + J = 2 + for I = 1; I <= INU; I++ { + STR = S2R + STI = S2I + S2R = STR*CKR - STI*CKI + S1R + S2I = STI*CKR + STR*CKI + S1I + S1R = STR + S1I = STI + CKR = CKR + RZR + CKI = CKI + RZI + AS = cmplx.Abs(complex(S2R, S2I)) + ALAS = math.Log(AS) + P2R = -ZDR + ALAS + if P2R < (-ELIM) { + goto TwoSixtyThree + } + tmp = cmplx.Log(complex(S2R, S2I)) + STR = real(tmp) + STI = imag(tmp) + P2R = -ZDR + STR + P2I = -ZDI + STI + P2M = math.Exp(P2R) / TOL + // sin, cos = math.Sincos(P2I) + P1R = P2M * math.Cos(P2I) + P1I = P2M * math.Sin(P2I) + p = complex(P1R, P1I) + NW = Zuchk(p, ASCLE, TOL) + if NW != 0 { + goto TwoSixtyThree + } + J = 3 - J + CYR[J] = P1R + CYI[J] = P1I + if IC == (I - 1) { + goto TwoSixtyFour + } + IC = I + continue + TwoSixtyThree: + if ALAS < HELIM { + continue + } + ZDR = ZDR - ELIM + S1R = S1R * CELMR + S1I = S1I * CELMR + S2R = S2R * CELMR + S2I = S2I * CELMR + } + if N != 1 { + goto TwoSeventy + } + S1R = S2R + S1I = S2I + goto TwoSeventy +TwoSixtyFour: + KFLAG = 1 + INUB = I + 1 + S2R = CYR[J] + S2I = CYI[J] + J = 3 - J + S1R = CYR[J] + S1I = CYI[J] + if INUB <= INU { + goto TwoTwentyFive + } + if N != 1 { + goto TwoFourty + } + S1R = S2R + S1I = S2I + goto TwoFourty +TwoSeventy: + YR[1] = S1R + YI[1] = S1I + if N == 1 { + goto TwoEighty + } + YR[2] = S2R + YI[2] = S2I +TwoEighty: + ASCLE = BRY[1] + ZDR, ZDI, FNU, N, YR, YI, NZ, RZR, RZI, ASCLE, TOL, ELIM = Zkscl(ZDR, ZDI, FNU, N, YR, YI, NZ, RZR, RZI, ASCLE, TOL, ELIM) + INU = N - NZ + if INU <= 0 { + return ZR, ZI, FNU, KODE, N, YR, YI, NZ, TOL, ELIM, ALIM + } + KK = NZ + 1 + S1R = YR[KK] + S1I = YI[KK] + YR[KK] = S1R * CSRR[1] + YI[KK] = S1I * CSRR[1] + if INU == 1 { + return ZR, ZI, FNU, KODE, N, YR, YI, NZ, TOL, ELIM, ALIM + } + KK = NZ + 2 + S2R = YR[KK] + S2I = YI[KK] + YR[KK] = S2R * CSRR[1] + YI[KK] = S2I * CSRR[1] + if INU == 2 { + return ZR, ZI, FNU, KODE, N, YR, YI, NZ, TOL, ELIM, ALIM + } + T2 = FNU + float64(float32(KK-1)) + CKR = T2 * RZR + CKI = T2 * RZI + KFLAG = 1 + goto TwoFifty +TwoNinety: + + // SCALE BY math.Exp(Z), IFLAG = 1 CASES. + + KODED = 2 + IFLAG = 1 + KFLAG = 2 + goto OneTwenty + + // FNU=HALF ODD INTEGER CASE, DNU=-0.5 +ThreeHundred: + S1R = COEFR + S1I = COEFI + S2R = COEFR + S2I = COEFI + goto TwoTen + +ThreeTen: + NZ = -2 + return ZR, ZI, FNU, KODE, N, YR, YI, NZ, TOL, ELIM, ALIM +} + +// SET K FUNCTIONS TO ZERO ON UNDERFLOW, CONTINUE RECURRENCE +// ON SCALED FUNCTIONS UNTIL TWO MEMBERS COME ON SCALE, THEN +// return WITH MIN(NZ+2,N) VALUES SCALED BY 1/TOL. +func Zkscl(ZRR, ZRI, FNU float64, N int, YR, YI []float64, NZ int, RZR, RZI, ASCLE, TOL, ELIM float64) ( + ZRRout, ZRIout, FNUout float64, Nout int, YRout, YIout []float64, NZout int, RZRout, RZIout, ASCLEout, TOLout, ELIMout float64) { + var ACS, AS, CKI, CKR, CSI, CSR, FN, STR, S1I, S1R, S2I, + S2R, ZEROI, ZEROR, ZDR, ZDI, CELMR, ELM, HELIM, ALAS float64 + + var I, IC, KK, NN, NW int + var tmp, c complex128 + var CYR, CYI [3]float64 + var sin, cos float64 + + // DIMENSION YR(N), YI(N), CYR(2), CYI(2) + ZEROR = 0 + ZEROI = 0 + NZ = 0 + IC = 0 + NN = min(2, N) + for I = 1; I <= NN; I++ { + S1R = YR[I] + S1I = YI[I] + CYR[I] = S1R + CYI[I] = S1I + AS = cmplx.Abs(complex(S1R, S1I)) + ACS = -ZRR + math.Log(AS) + NZ = NZ + 1 + YR[I] = ZEROR + YI[I] = ZEROI + if ACS < (-ELIM) { + continue + } + + tmp = cmplx.Log(complex(S1R, S1I)) + CSR = real(tmp) + CSI = imag(tmp) + CSR = CSR - ZRR + CSI = CSI - ZRI + STR = math.Exp(CSR) / TOL + // sin, cos = math.Sincos(CSI) + CSR = STR * math.Cos(CSI) + CSI = STR * math.Sin(CSI) + c = complex(CSR, CSI) + NW = Zuchk(c, ASCLE, TOL) + if NW != 0 { + continue + } + YR[I] = CSR + YI[I] = CSI + IC = I + NZ = NZ - 1 + } + if N == 1 { + return ZRR, ZRI, FNU, N, YR, YI, NZ, RZR, RZI, ASCLE, TOL, ELIM + } + if IC > 1 { + goto Twenty + } + YR[1] = ZEROR + YI[1] = ZEROI + NZ = 2 +Twenty: + if N == 2 { + return ZRR, ZRI, FNU, N, YR, YI, NZ, RZR, RZI, ASCLE, TOL, ELIM + } + if NZ == 0 { + return ZRR, ZRI, FNU, N, YR, YI, NZ, RZR, RZI, ASCLE, TOL, ELIM + } + FN = FNU + 1.0e0 + CKR = FN * RZR + CKI = FN * RZI + S1R = CYR[1] + S1I = CYI[1] + S2R = CYR[2] + S2I = CYI[2] + HELIM = 0.5e0 * ELIM + ELM = math.Exp(-ELIM) + CELMR = ELM + ZDR = ZRR + ZDI = ZRI + + // FIND TWO CONSECUTIVE Y VALUES ON SCALE. SCALE RECURRENCE IF + // S2 GETS LARGER THAN EXP(ELIM/2) + for I = 3; I <= N; I++ { + KK = I + CSR = S2R + CSI = S2I + S2R = CKR*CSR - CKI*CSI + S1R + S2I = CKI*CSR + CKR*CSI + S1I + S1R = CSR + S1I = CSI + CKR = CKR + RZR + CKI = CKI + RZI + AS = cmplx.Abs(complex(S2R, S2I)) + ALAS = math.Log(AS) + ACS = -ZDR + ALAS + NZ = NZ + 1 + YR[I] = ZEROR + YI[I] = ZEROI + if ACS < (-ELIM) { + goto TwentyFive + } + tmp = cmplx.Log(complex(S2R, S2I)) + CSR = real(tmp) + CSI = imag(tmp) + CSR = CSR - ZDR + CSI = CSI - ZDI + STR = math.Exp(CSR) / TOL + sin, cos = math.Sincos(CSI) + CSR = STR * cos + CSI = STR * sin + c = complex(CSR, CSI) + NW = Zuchk(c, ASCLE, TOL) + if NW != 0 { + goto TwentyFive + } + YR[I] = CSR + YI[I] = CSI + NZ = NZ - 1 + if IC == KK-1 { + goto Forty + } + IC = KK + continue + TwentyFive: + if ALAS < HELIM { + continue + } + ZDR = ZDR - ELIM + S1R = S1R * CELMR + S1I = S1I * CELMR + S2R = S2R * CELMR + S2I = S2I * CELMR + } + NZ = N + if IC == N { + NZ = N - 1 + } + goto FourtyFive +Forty: + NZ = KK - 2 +FourtyFive: + for I = 1; I <= NZ; I++ { + YR[I] = ZEROR + YI[I] = ZEROI + } + return ZRR, ZRI, FNU, N, YR, YI, NZ, RZR, RZI, ASCLE, TOL, ELIM +} + +// Zuchk tests whether the magnitude of the real or imaginary part would +// underflow when y is scaled by tol. +// +// y enters as a scaled quantity whose magnitude is greater than +// 1e3 + 3*dmach(1)/tol +// y is accepted if the underflow is at least one precision below the magnitude +// of the largest component. Otherwise an underflow is assumed as the phase angle +// does not have sufficient accuracy. +func Zuchk(y complex128, scale, tol float64) int { + absR := math.Abs(real(y)) + absI := math.Abs(imag(y)) + minAbs := math.Min(absR, absI) + if minAbs > scale { + return 0 + } + maxAbs := math.Max(absR, absI) + minAbs /= tol + if maxAbs < minAbs { + return 1 + } + return 0 +} + +// ZACAI APPLIES THE ANALYTIC CONTINUATION FORMULA +// +// K(FNU,ZN*EXP(MP))=K(FNU,ZN)*EXP(-MP*FNU) - MP*I(FNU,ZN) +// MP=PI*MR*CMPLX(0.0,1.0) +// +// TO CONTINUE THE K FUNCTION FROM THE RIGHT HALF TO THE LEFT +// HALF Z PLANE FOR USE WITH ZAIRY WHERE FNU=1/3 OR 2/3 AND N=1. +// ZACAI IS THE SAME AS ZACON WITH THE PARTS FOR LARGER ORDERS AND +// RECURRENCE REMOVED. A RECURSIVE CALL TO ZACON CAN RESULT if ZACON +// IS CALLED FROM ZAIRY. +func Zacai(ZR, ZI, FNU float64, KODE, MR, N int, YR, YI []float64, NZ int, RL, TOL, ELIM, ALIM float64) ( + ZRout, ZIout, FNUout float64, KODEout, MRout, Nout int, YRout, YIout []float64, NZout int, RLout, TOLout, ELIMout, ALIMout float64) { + var ARG, ASCLE, AZ, CSGNR, CSGNI, CSPNR, + CSPNI, C1R, C1I, C2R, C2I, DFNU, FMR, PI, + SGN, YY, ZNR, ZNI float64 + var INU, IUF, NN, NW int + var zn, c1, c2, z complex128 + var y []complex128 + //var sin, cos float64 + + CYR := []float64{math.NaN(), 0, 0} + CYI := []float64{math.NaN(), 0, 0} + + PI = math.Pi + NZ = 0 + ZNR = -ZR + ZNI = -ZI + AZ = cmplx.Abs(complex(ZR, ZI)) + NN = N + DFNU = FNU + float64(float32(N-1)) + if AZ <= 2.0e0 { + goto Ten + } + if AZ*AZ*0.25 > DFNU+1.0e0 { + goto Twenty + } +Ten: + // POWER SERIES FOR THE I FUNCTION. + z = complex(ZNR, ZNI) + y = make([]complex128, len(YR)) + for i, v := range YR { + y[i] = complex(v, YI[i]) + } + NW = Zseri(z, FNU, KODE, NN, y[1:], TOL, ELIM, ALIM) + for i, v := range y { + YR[i] = real(v) + YI[i] = imag(v) + } + goto Forty +Twenty: + if AZ < RL { + goto Thirty + } + // ASYMPTOTIC EXPANSION FOR LARGE Z FOR THE I FUNCTION. + ZNR, ZNI, FNU, KODE, NN, YR, YI, NW, RL, TOL, ELIM, ALIM = Zasyi(ZNR, ZNI, FNU, KODE, NN, YR, YI, NW, RL, TOL, ELIM, ALIM) + if NW < 0 { + goto Eighty + } + goto Forty +Thirty: + // MILLER ALGORITHM NORMALIZED BY THE SERIES FOR THE I FUNCTION + ZNR, ZNI, FNU, KODE, NN, YR, YI, NW, TOL = Zmlri(ZNR, ZNI, FNU, KODE, NN, YR, YI, NW, TOL) + if NW < 0 { + goto Eighty + } +Forty: + // ANALYTIC CONTINUATION TO THE LEFT HALF PLANE FOR THE K FUNCTION. + ZNR, ZNI, FNU, KODE, _, CYR, CYI, NW, TOL, ELIM, ALIM = Zbknu(ZNR, ZNI, FNU, KODE, 1, CYR, CYI, NW, TOL, ELIM, ALIM) + if NW != 0 { + goto Eighty + } + FMR = float64(float32(MR)) + SGN = -math.Copysign(PI, FMR) + CSGNR = 0.0e0 + CSGNI = SGN + if KODE == 1 { + goto Fifty + } + YY = -ZNI + //sin, cos = math.Sincos(YY) + CSGNR = -CSGNI * math.Sin(YY) + CSGNI = CSGNI * math.Cos(YY) +Fifty: + // CALCULATE CSPN=EXP(FNU*PI*I) TO MINIMIZE LOSSES OF SIGNIFICANCE + // WHEN FNU IS LARGE + INU = int(float32(FNU)) + ARG = (FNU - float64(float32(INU))) * SGN + //sin, cos = math.Sincos(ARG) + CSPNR = math.Cos(ARG) + CSPNI = math.Sin(ARG) + if INU%2 == 0 { + goto Sixty + } + CSPNR = -CSPNR + CSPNI = -CSPNI +Sixty: + C1R = CYR[1] + C1I = CYI[1] + C2R = YR[1] + C2I = YI[1] + if KODE == 1 { + goto Seventy + } + IUF = 0 + ASCLE = 1.0e+3 * dmach[1] / TOL + zn = complex(ZNR, ZNI) + c1 = complex(C1R, C1I) + c2 = complex(C2R, C2I) + c1, c2, NW, IUF = Zs1s2(zn, c1, c2, ASCLE, ALIM, IUF) + C1R = real(c1) + C1I = imag(c1) + C2R = real(c2) + C2I = imag(c2) + NZ = NZ + NW +Seventy: + YR[1] = CSPNR*C1R - CSPNI*C1I + CSGNR*C2R - CSGNI*C2I + YI[1] = CSPNR*C1I + CSPNI*C1R + CSGNR*C2I + CSGNI*C2R + return ZR, ZI, FNU, KODE, MR, N, YR, YI, NZ, RL, TOL, ELIM, ALIM +Eighty: + NZ = -1 + if NW == -2 { + NZ = -2 + } + return ZR, ZI, FNU, KODE, MR, N, YR, YI, NZ, RL, TOL, ELIM, ALIM +} + +// ZASYI COMPUTES THE I BESSEL FUNCTION FOR REAL(Z)>=0.0 BY +// MEANS OF THE ASYMPTOTIC EXPANSION FOR LARGE CABS(Z) IN THE +// REGION CABS(Z)>MAX(RL,FNU*FNU/2). NZ=0 IS A NORMAL return. +// NZ<0 INDICATES AN OVERFLOW ON KODE=1. +func Zasyi(ZR, ZI, FNU float64, KODE, N int, YR, YI []float64, NZ int, RL, TOL, ELIM, ALIM float64) ( + ZRout, ZIout, FNUout float64, KODEout, Nout int, YRout, YIout []float64, NZout int, RLout, TOLout, ELIMout, ALIMout float64) { + var AA, AEZ, AK, AK1I, AK1R, ARG, ARM, ATOL, + AZ, BB, BK, CKI, CKR, CONEI, CONER, CS1I, CS1R, CS2I, CS2R, CZI, + CZR, DFNU, DKI, DKR, DNU2, EZI, EZR, FDN, PI, P1I, + P1R, RAZ, RTPI, RTR1, RZI, RZR, S, SGN, SQK, STI, STR, S2I, + S2R, TZI, TZR, ZEROI, ZEROR float64 + + var I, IB, IL, INU, J, JL, K, KODED, M, NN int + var tmp complex128 + // var sin, cos float64 + + PI = math.Pi + RTPI = 0.159154943091895336e0 + ZEROR = 0 + ZEROI = 0 + CONER = 1 + CONEI = 0 + + NZ = 0 + AZ = cmplx.Abs(complex(ZR, ZI)) + ARM = 1.0e3 * dmach[1] + RTR1 = math.Sqrt(ARM) + IL = min(2, N) + DFNU = FNU + float64(float32(N-IL)) + + // OVERFLOW TEST + RAZ = 1.0e0 / AZ + STR = ZR * RAZ + STI = -ZI * RAZ + AK1R = RTPI * STR * RAZ + AK1I = RTPI * STI * RAZ + tmp = cmplx.Sqrt(complex(AK1R, AK1I)) + AK1R = real(tmp) + AK1I = imag(tmp) + CZR = ZR + CZI = ZI + if KODE != 2 { + goto Ten + } + CZR = ZEROR + CZI = ZI +Ten: + if math.Abs(CZR) > ELIM { + goto OneHundred + } + DNU2 = DFNU + DFNU + KODED = 1 + if (math.Abs(CZR) > ALIM) && (N > 2) { + goto Twenty + } + KODED = 0 + tmp = cmplx.Exp(complex(CZR, CZI)) + STR = real(tmp) + STI = imag(tmp) + tmp = complex(AK1R, AK1I) * complex(STR, STI) + AK1R = real(tmp) + AK1I = imag(tmp) +Twenty: + FDN = 0.0e0 + if DNU2 > RTR1 { + FDN = DNU2 * DNU2 + } + EZR = ZR * 8.0e0 + EZI = ZI * 8.0e0 + + // WHEN Z IS IMAGINARY, THE ERROR TEST MUST BE MADE RELATIVE TO THE + // FIRST RECIPROCAL POWER SINCE THIS IS THE LEADING TERM OF THE + // EXPANSION FOR THE IMAGINARY PART. + AEZ = 8.0e0 * AZ + S = TOL / AEZ + JL = int(float32(RL+RL)) + 2 + P1R = ZEROR + P1I = ZEROI + if ZI == 0.0e0 { + goto Thirty + } + + // CALCULATE EXP(PI*(0.5+FNU+N-IL)*I) TO MINIMIZE LOSSES OF + // SIGNIFICANCE WHEN FNU OR N IS LARGE + INU = int(float32(FNU)) + ARG = (FNU - float64(float32(INU))) * PI + INU = INU + N - IL + //sin, cos = math.Sincos(ARG) + AK = -math.Sin(ARG) + BK = math.Cos(ARG) + if ZI < 0.0e0 { + BK = -BK + } + P1R = AK + P1I = BK + if INU%2 == 0 { + goto Thirty + } + P1R = -P1R + P1I = -P1I +Thirty: + for K = 1; K <= IL; K++ { + SQK = FDN - 1.0e0 + ATOL = S * math.Abs(SQK) + SGN = 1.0e0 + CS1R = CONER + CS1I = CONEI + CS2R = CONER + CS2I = CONEI + CKR = CONER + CKI = CONEI + AK = 0.0e0 + AA = 1.0e0 + BB = AEZ + DKR = EZR + DKI = EZI + // TODO(btracey): This loop is executed tens of thousands of times. Why? + // is that really necessary? + for J = 1; J <= JL; J++ { + tmp = complex(CKR, CKI) / complex(DKR, DKI) + STR = real(tmp) + STI = imag(tmp) + CKR = STR * SQK + CKI = STI * SQK + CS2R = CS2R + CKR + CS2I = CS2I + CKI + SGN = -SGN + CS1R = CS1R + CKR*SGN + CS1I = CS1I + CKI*SGN + DKR = DKR + EZR + DKI = DKI + EZI + AA = AA * math.Abs(SQK) / BB + BB = BB + AEZ + AK = AK + 8.0e0 + SQK = SQK - AK + if AA <= ATOL { + goto Fifty + } + } + goto OneTen + Fifty: + S2R = CS1R + S2I = CS1I + if ZR+ZR >= ELIM { + goto Sixty + } + TZR = ZR + ZR + TZI = ZI + ZI + tmp = cmplx.Exp(complex(-TZR, -TZI)) + STR = real(tmp) + STI = imag(tmp) + tmp = complex(STR, STI) * complex(P1R, P1I) + STR = real(tmp) + STI = imag(tmp) + tmp = complex(STR, STI) * complex(CS2R, CS2I) + STR = real(tmp) + STI = imag(tmp) + S2R = S2R + STR + S2I = S2I + STI + Sixty: + FDN = FDN + 8.0e0*DFNU + 4.0e0 + P1R = -P1R + P1I = -P1I + M = N - IL + K + YR[M] = S2R*AK1R - S2I*AK1I + YI[M] = S2R*AK1I + S2I*AK1R + } + if N <= 2 { + return ZR, ZI, FNU, KODE, N, YR, YI, NZ, RL, TOL, ELIM, ALIM + } + NN = N + K = NN - 2 + AK = float64(float32(K)) + STR = ZR * RAZ + STI = -ZI * RAZ + RZR = (STR + STR) * RAZ + RZI = (STI + STI) * RAZ + IB = 3 + for I = IB; I <= NN; I++ { + YR[K] = (AK+FNU)*(RZR*YR[K+1]-RZI*YI[K+1]) + YR[K+2] + YI[K] = (AK+FNU)*(RZR*YI[K+1]+RZI*YR[K+1]) + YI[K+2] + AK = AK - 1.0e0 + K = K - 1 + } + if KODED == 0 { + return ZR, ZI, FNU, KODE, N, YR, YI, NZ, RL, TOL, ELIM, ALIM + } + tmp = cmplx.Exp(complex(CZR, CZI)) + CKR = real(tmp) + CKI = imag(tmp) + for I = 1; I <= NN; I++ { + STR = YR[I]*CKR - YI[I]*CKI + YI[I] = YR[I]*CKI + YI[I]*CKR + YR[I] = STR + } + return ZR, ZI, FNU, KODE, N, YR, YI, NZ, RL, TOL, ELIM, ALIM +OneHundred: + NZ = -1 + return ZR, ZI, FNU, KODE, N, YR, YI, NZ, RL, TOL, ELIM, ALIM +OneTen: + NZ = -2 + return ZR, ZI, FNU, KODE, N, YR, YI, NZ, RL, TOL, ELIM, ALIM +} + +// ZMLRI COMPUTES THE I BESSEL FUNCTION FOR RE(Z)>=0.0 BY THE +// MILLER ALGORITHM NORMALIZED BY A NEUMANN SERIES. +func Zmlri(ZR, ZI, FNU float64, KODE, N int, YR, YI []float64, NZ int, TOL float64) ( + ZRout, ZIout, FNUout float64, KODEout, Nout int, YRout, YIout []float64, NZout int, TOLout float64) { + var ACK, AK, AP, AT, AZ, BK, CKI, CKR, CNORMI, + CNORMR, CONEI, CONER, FKAP, FKK, FLAM, FNF, PTI, PTR, P1I, + P1R, P2I, P2R, RAZ, RHO, RHO2, RZI, RZR, SCLE, STI, STR, SUMI, + SUMR, TFNF, TST, ZEROI, ZEROR float64 + var I, IAZ, IDUM, IFNU, INU, ITIME, K, KK, KM, M int + var tmp complex128 + ZEROR = 0 + ZEROI = 0 + CONER = 1 + CONEI = 0 + + SCLE = dmach[1] / TOL + NZ = 0 + AZ = cmplx.Abs(complex(ZR, ZI)) + IAZ = int(float32(AZ)) + IFNU = int(float32(FNU)) + INU = IFNU + N - 1 + AT = float64(float32(IAZ)) + 1.0e0 + RAZ = 1.0e0 / AZ + STR = ZR * RAZ + STI = -ZI * RAZ + CKR = STR * AT * RAZ + CKI = STI * AT * RAZ + RZR = (STR + STR) * RAZ + RZI = (STI + STI) * RAZ + P1R = ZEROR + P1I = ZEROI + P2R = CONER + P2I = CONEI + ACK = (AT + 1.0e0) * RAZ + RHO = ACK + math.Sqrt(ACK*ACK-1.0e0) + RHO2 = RHO * RHO + TST = (RHO2 + RHO2) / ((RHO2 - 1.0e0) * (RHO - 1.0e0)) + TST = TST / TOL + + // COMPUTE RELATIVE TRUNCATION ERROR INDEX FOR SERIES. + //fmt.Println("before loop", P2R, P2I, CKR, CKI, RZR, RZI, TST, AK) + AK = AT + for I = 1; I <= 80; I++ { + PTR = P2R + PTI = P2I + P2R = P1R - (CKR*PTR - CKI*PTI) + P2I = P1I - (CKI*PTR + CKR*PTI) + P1R = PTR + P1I = PTI + CKR = CKR + RZR + CKI = CKI + RZI + AP = cmplx.Abs(complex(P2R, P2I)) + if AP > TST*AK*AK { + goto Twenty + } + AK = AK + 1.0e0 + } + goto OneTen +Twenty: + I = I + 1 + K = 0 + if INU < IAZ { + goto Forty + } + // COMPUTE RELATIVE TRUNCATION ERROR FOR RATIOS. + P1R = ZEROR + P1I = ZEROI + P2R = CONER + P2I = CONEI + AT = float64(float32(INU)) + 1.0e0 + STR = ZR * RAZ + STI = -ZI * RAZ + CKR = STR * AT * RAZ + CKI = STI * AT * RAZ + ACK = AT * RAZ + TST = math.Sqrt(ACK / TOL) + ITIME = 1 + for K = 1; K <= 80; K++ { + PTR = P2R + PTI = P2I + P2R = P1R - (CKR*PTR - CKI*PTI) + P2I = P1I - (CKR*PTI + CKI*PTR) + P1R = PTR + P1I = PTI + CKR = CKR + RZR + CKI = CKI + RZI + AP = cmplx.Abs(complex(P2R, P2I)) + if AP < TST { + continue + } + if ITIME == 2 { + goto Forty + } + ACK = cmplx.Abs(complex(CKR, CKI)) + FLAM = ACK + math.Sqrt(ACK*ACK-1.0e0) + FKAP = AP / cmplx.Abs(complex(P1R, P1I)) + RHO = math.Min(FLAM, FKAP) + TST = TST * math.Sqrt(RHO/(RHO*RHO-1.0e0)) + ITIME = 2 + } + goto OneTen +Forty: + // BACKWARD RECURRENCE AND SUM NORMALIZING RELATION. + K = K + 1 + KK = max(I+IAZ, K+INU) + FKK = float64(float32(KK)) + P1R = ZEROR + P1I = ZEROI + + // SCALE P2 AND SUM BY SCLE. + P2R = SCLE + P2I = ZEROI + FNF = FNU - float64(float32(IFNU)) + TFNF = FNF + FNF + BK = dgamln(FKK+TFNF+1.0e0, IDUM) - dgamln(FKK+1.0e0, IDUM) - dgamln(TFNF+1.0e0, IDUM) + BK = math.Exp(BK) + SUMR = ZEROR + SUMI = ZEROI + KM = KK - INU + for I = 1; I <= KM; I++ { + PTR = P2R + PTI = P2I + P2R = P1R + (FKK+FNF)*(RZR*PTR-RZI*PTI) + P2I = P1I + (FKK+FNF)*(RZI*PTR+RZR*PTI) + P1R = PTR + P1I = PTI + AK = 1.0e0 - TFNF/(FKK+TFNF) + ACK = BK * AK + SUMR = SUMR + (ACK+BK)*P1R + SUMI = SUMI + (ACK+BK)*P1I + BK = ACK + FKK = FKK - 1.0e0 + } + YR[N] = P2R + YI[N] = P2I + if N == 1 { + goto Seventy + } + for I = 2; I <= N; I++ { + PTR = P2R + PTI = P2I + P2R = P1R + (FKK+FNF)*(RZR*PTR-RZI*PTI) + P2I = P1I + (FKK+FNF)*(RZI*PTR+RZR*PTI) + P1R = PTR + P1I = PTI + AK = 1.0e0 - TFNF/(FKK+TFNF) + ACK = BK * AK + SUMR = SUMR + (ACK+BK)*P1R + SUMI = SUMI + (ACK+BK)*P1I + BK = ACK + FKK = FKK - 1.0e0 + M = N - I + 1 + YR[M] = P2R + YI[M] = P2I + } +Seventy: + if IFNU <= 0 { + goto Ninety + } + for I = 1; I <= IFNU; I++ { + PTR = P2R + PTI = P2I + P2R = P1R + (FKK+FNF)*(RZR*PTR-RZI*PTI) + P2I = P1I + (FKK+FNF)*(RZR*PTI+RZI*PTR) + P1R = PTR + P1I = PTI + AK = 1.0e0 - TFNF/(FKK+TFNF) + ACK = BK * AK + SUMR = SUMR + (ACK+BK)*P1R + SUMI = SUMI + (ACK+BK)*P1I + BK = ACK + FKK = FKK - 1.0e0 + } +Ninety: + PTR = ZR + PTI = ZI + if KODE == 2 { + PTR = ZEROR + } + tmp = cmplx.Log(complex(RZR, RZI)) + STR = real(tmp) + STI = imag(tmp) + P1R = -FNF*STR + PTR + P1I = -FNF*STI + PTI + AP = dgamln(1.0e0+FNF, IDUM) + PTR = P1R - AP + PTI = P1I + + // THE DIVISION CEXP(PT)/(SUM+P2) IS ALTERED TO AVOID OVERFLOW + // IN THE DENOMINATOR BY SQUARING LARGE QUANTITIES. + P2R = P2R + SUMR + P2I = P2I + SUMI + AP = cmplx.Abs(complex(P2R, P2I)) + P1R = 1.0e0 / AP + tmp = cmplx.Exp(complex(PTR, PTI)) + STR = real(tmp) + STI = imag(tmp) + CKR = STR * P1R + CKI = STI * P1R + PTR = P2R * P1R + PTI = -P2I * P1R + tmp = complex(CKR, CKI) * complex(PTR, PTI) + CNORMR = real(tmp) + CNORMI = imag(tmp) + for I = 1; I <= N; I++ { + STR = YR[I]*CNORMR - YI[I]*CNORMI + YI[I] = YR[I]*CNORMI + YI[I]*CNORMR + YR[I] = STR + } + return ZR, ZI, FNU, KODE, N, YR, YI, NZ, TOL +OneTen: + NZ = -2 + return ZR, ZI, FNU, KODE, N, YR, YI, NZ, TOL +} + +// Zseri computes the I bessel function for real(z) >= 0 by means of the power +// series for large |z| in the region |z| <= 2*sqrt(fnu+1). +// +// nz = 0 is a normal return. nz > 0 means that the last nz components were set +// to zero due to underflow. nz < 0 means that underflow occurred, but the +// condition |z| <= 2*sqrt(fnu+1) was violated and the computation must be +// completed in another routine with n -= abs(nz). +func Zseri(z complex128, fnu float64, kode, n int, y []complex128, tol, elim, alim float64) (nz int) { + // TODO(btracey): The original fortran line is "ARM = 1.0D+3*D1MACH(1)". Evidently, in Fortran + // this is interpreted as one to the power of +3*D1MACH(1). While it is possible + // this was intentional, it seems unlikely. + arm := 1000 * dmach[1] + az := cmplx.Abs(z) + if az < arm { + for i := 0; i < n; i++ { + y[i] = 0 + } + if fnu == 0 { + y[0] = 1 + n-- + } + if az == 0 { + return 0 + } + return n + } + hz := 0.5 * z + var cz complex128 + var acz float64 + if az > math.Sqrt(arm) { + cz = hz * hz + acz = cmplx.Abs(cz) + } + NN := n + ck := cmplx.Log(hz) + var ak1 complex128 + for { + dfnu := fnu + float64(NN-1) + // Underflow test. + ak1 = ck * complex(dfnu, 0) + ak := dgamln(dfnu+1, 0) + ak1 -= complex(ak, 0) + if kode == 2 { + ak1 -= complex(real(z), 0) + } + if real(ak1) > -elim { + break + } + nz++ + y[NN-1] = 0 + if acz > dfnu { + // Return with nz < 0 if abs(Z*Z/4)>fnu+u-nz-1 complete the calculation + // in cbinu with n = n - abs(nz). + nz *= -1 + return nz + } + NN-- + if NN == 0 { + return nz + } + } + crscr := 1.0 + var flag int + var scale float64 + aa := real(ak1) + if aa <= -alim { + flag = 1 + crscr = tol + scale = arm / tol + aa -= math.Log(tol) + } + var w [2]complex128 + for { + coef := cmplx.Exp(complex(aa, imag(ak1))) + atol := tol * acz / (fnu + float64(NN)) + for i := 0; i < min(2, NN); i++ { + FNUP := fnu + float64(NN-i) + s1 := 1 + 0i + if acz >= tol*FNUP { + ak2 := 1 + 0i + ak := FNUP + 2 + S := FNUP + scl := 2.0 + first := true + for first || scl > atol { + ak2 = ak2 * cz * complex(1/S, 0) + scl *= acz / S + s1 += ak2 + S += ak + ak += 2 + first = false + } + } + s2 := s1 * coef + w[i] = s2 + if flag == 1 { + if Zuchk(s2, scale, tol) != 0 { + var full bool + var dfnu float64 + // This code is similar to the code that exists above. The + // code copying is here because the original Fortran used + // a goto to solve the loop-and-a-half problem. Removing the + // goto makes the behavior of the function and variable scoping + // much clearer, but requires copying this code due to Go's + // goto rules. + for { + if full { + dfnu = fnu + float64(NN-1) + // Underflow test. + ak1 = ck * complex(dfnu, 0) + ak1 -= complex(dgamln(dfnu+1, 0), 0) + if kode == 2 { + ak1 -= complex(real(z), 0) + } + if real(ak1) > -elim { + break + } + } else { + full = true + } + nz++ + y[NN-1] = 0 + if acz > dfnu { + // Return with nz < 0 if abs(Z*Z/4)>fnu+u-nz-1 complete the calculation + // in cbinu with n = n - abs(nz). + nz *= -1 + return nz + } + NN-- + if NN == 0 { + return nz + } + } + continue + } + } + y[NN-i-1] = s2 * complex(crscr, 0) + coef /= hz + coef *= complex(FNUP-1, 0) + } + break + } + if NN <= 2 { + return nz + } + rz := complex(2*real(z)/(az*az), -2*imag(z)/(az*az)) + if flag == 0 { + for i := NN - 3; i >= 0; i-- { + y[i] = complex(float64(i+1)+fnu, 0)*rz*y[i+1] + y[i+2] + } + return nz + } + + // exp(-alim)=exp(-elim)/tol=approximately one digit of precision above the + // underflow limit, which equals scale = dmach[1)*SS*1e3. + s1 := w[0] + s2 := w[1] + for K := NN - 3; K >= 0; K-- { + s1, s2 = s2, s1+complex(float64(K+1)+fnu, 0)*(rz*s2) + ck := s2 * complex(crscr, 0) + y[K] = ck + if cmplx.Abs(ck) > scale { + for ; K >= 0; K-- { + y[K] = complex(float64(K+1)+fnu, 0)*rz*y[K+1] + y[K+2] + } + return nz + } + } + return nz +} + +// Zs1s2 tests for a possible underflow resulting from the addition of the I and +// K functions in the analytic continuation formula where s1 == K function and +// s2 == I function. +// +// When kode == 1, the I and K functions are different orders of magnitude. +// +// When kode == 2, they may both be of the same order of magnitude, but the maximum +// must be at least one precision above the underflow limit. +func Zs1s2(zr, s1, s2 complex128, scale, lim float64, iuf int) (s1o, s2o complex128, nz, iufo int) { + if s1 == 0 || math.Log(cmplx.Abs(s1))-2*real(zr) < -lim { + if cmplx.Abs(s2) > scale { + return 0, s2, 0, iuf + } + return 0, 0, 1, 0 + } + // TODO(btracey): Written like this for numerical rounding reasons. + // Fix once we're sure other changes are correct. + s1 = cmplx.Exp(cmplx.Log(s1) - zr - zr) + if math.Max(cmplx.Abs(s1), cmplx.Abs(s2)) > scale { + return s1, s2, 0, iuf + 1 + } + return 0, 0, 1, 0 +} + +func dgamln(z float64, ierr int) float64 { + //return amoslib.DgamlnFort(z) + // Go implementation. + if z < 0 { + return 0 + } + a2, _ := math.Lgamma(z) + return a2 +} diff --git a/vendor/gonum.org/v1/gonum/mathext/internal/amos/amoslib/d1mach.f b/vendor/gonum.org/v1/gonum/mathext/internal/amos/amoslib/d1mach.f new file mode 100644 index 0000000..0d344de --- /dev/null +++ b/vendor/gonum.org/v1/gonum/mathext/internal/amos/amoslib/d1mach.f @@ -0,0 +1,97 @@ +*DECK D1MACH + DOUBLE PRECISION FUNCTION D1MACH(I) +C***BEGIN PROLOGUE D1MACH +C***DATE WRITTEN 750101 (YYMMDD) +C***REVISION DATE 890213 (YYMMDD) +C***CATEGORY NO. R1 +C***KEYWORDS LIBRARY=SLATEC,TYPE=DOUBLE PRECISION(R1MACH-S D1MACH-D), +C MACHINE CONSTANTS +C***AUTHOR FOX, P. A., (BELL LABS) +C HALL, A. D., (BELL LABS) +C SCHRYER, N. L., (BELL LABS) +C***PURPOSE Returns double precision machine dependent constants +C***DESCRIPTION +C +C D1MACH can be used to obtain machine-dependent parameters +C for the local machine environment. It is a function +C subprogram with one (input) argument, and can be called +C as follows, for example +C +C D = D1MACH(I) +C +C where I=1,...,5. The (output) value of D above is +C determined by the (input) value of I. The results for +C various values of I are discussed below. +C +C D1MACH( 1) = B**(EMIN-1), the smallest positive magnitude. +C D1MACH( 2) = B**EMAX*(1 - B**(-T)), the largest magnitude. +C D1MACH( 3) = B**(-T), the smallest relative spacing. +C D1MACH( 4) = B**(1-T), the largest relative spacing. +C D1MACH( 5) = LOG10(B) +C +C Assume double precision numbers are represented in the T-digit, +C base-B form +C +C sign (B**E)*( (X(1)/B) + ... + (X(T)/B**T) ) +C +C where 0 .LE. X(I) .LT. B for I=1,...,T, 0 .LT. X(1), and +C EMIN .LE. E .LE. EMAX. +C +C The values of B, T, EMIN and EMAX are provided in I1MACH as +C follows: +C I1MACH(10) = B, the base. +C I1MACH(14) = T, the number of base-B digits. +C I1MACH(15) = EMIN, the smallest exponent E. +C I1MACH(16) = EMAX, the largest exponent E. +C +C To alter this function for a particular environment, +C the desired set of DATA statements should be activated by +C removing the C from column 1. Also, the values of +C D1MACH(1) - D1MACH(4) should be checked for consistency +C with the local operating system. +C +C***REFERENCES FOX P.A., HALL A.D., SCHRYER N.L.,*FRAMEWORK FOR A +C PORTABLE LIBRARY*, ACM TRANSACTIONS ON MATHEMATICAL +C SOFTWARE, VOL. 4, NO. 2, JUNE 1978, PP. 177-188. +C***ROUTINES CALLED XERROR +C***END PROLOGUE D1MACH +C + INTEGER SMALL(4) + INTEGER LARGE(4) + INTEGER RIGHT(4) + INTEGER DIVER(4) + INTEGER LOG10(4) +C + DOUBLE PRECISION DMACH(5) + SAVE DMACH +C +C EQUIVALENCE (DMACH(1),SMALL(1)) +C EQUIVALENCE (DMACH(2),LARGE(1)) +C EQUIVALENCE (DMACH(3),RIGHT(1)) +C EQUIVALENCE (DMACH(4),DIVER(1)) +C EQUIVALENCE (DMACH(5),LOG10(1)) +C +C MACHINE CONSTANTS FOR THE IBM PC +C ASSUMES THAT ALL ARITHMETIC IS DONE IN DOUBLE PRECISION +C ON 8088, I.E., NOT IN 80 BIT FORM FOR THE 8087. +C + DATA DMACH(1) / 2.23D-308 / +C DATA SMALL(1),SMALL(2) / 2002288515, 1050897 / + DATA DMACH(2) / 1.79D-308 / +C DATA LARGE(1),LARGE(2) / 1487780761, 2146426097 / + DATA DMACH(3) / 1.11D-16 / +C DATA RIGHT(1),RIGHT(2) / -1209488034, 1017118298 / + DATA DMACH(4) / 2.22D-16 / +C DATA DIVER(1),DIVER(2) / -1209488034, 1018166874 / + DATA DMACH(5) / 0.3010299956639812 / +C DATA LOG10(1),LOG10(2) / 1352628735, 1070810131 / +C +C +C***FIRST EXECUTABLE STATEMENT D1MACH + IF (I .LT. 1 .OR. I .GT. 5) + 1 CALL XERROR ('D1MACH -- I OUT OF BOUNDS', 25, 1, 2) +C + D1MACH = DMACH(I) + RETURN +C + END diff --git a/vendor/gonum.org/v1/gonum/mathext/internal/amos/amoslib/dgamln.f b/vendor/gonum.org/v1/gonum/mathext/internal/amos/amoslib/dgamln.f new file mode 100644 index 0000000..792014b --- /dev/null +++ b/vendor/gonum.org/v1/gonum/mathext/internal/amos/amoslib/dgamln.f @@ -0,0 +1,189 @@ + DOUBLE PRECISION FUNCTION DGAMLN(Z,IERR) +C***BEGIN PROLOGUE DGAMLN +C***DATE WRITTEN 830501 (YYMMDD) +C***REVISION DATE 830501 (YYMMDD) +C***CATEGORY NO. B5F +C***KEYWORDS GAMMA FUNCTION,LOGARITHM OF GAMMA FUNCTION +C***AUTHOR AMOS, DONALD E., SANDIA NATIONAL LABORATORIES +C***PURPOSE TO COMPUTE THE LOGARITHM OF THE GAMMA FUNCTION +C***DESCRIPTION +C +C **** A DOUBLE PRECISION ROUTINE **** +C DGAMLN COMPUTES THE NATURAL LOG OF THE GAMMA FUNCTION FOR +C Z.GT.0. THE ASYMPTOTIC EXPANSION IS USED TO GENERATE VALUES +C GREATER THAN ZMIN WHICH ARE ADJUSTED BY THE RECURSION +C G(Z+1)=Z*G(Z) FOR Z.LE.ZMIN. THE FUNCTION WAS MADE AS +C PORTABLE AS POSSIBLE BY COMPUTIMG ZMIN FROM THE NUMBER OF BASE +C 10 DIGITS IN A WORD, RLN=AMAX1(-ALOG10(R1MACH(4)),0.5E-18) +C LIMITED TO 18 DIGITS OF (RELATIVE) ACCURACY. +C +C SINCE INTEGER ARGUMENTS ARE COMMON, A TABLE LOOK UP ON 100 +C VALUES IS USED FOR SPEED OF EXECUTION. +C +C DESCRIPTION OF ARGUMENTS +C +C INPUT Z IS D0UBLE PRECISION +C Z - ARGUMENT, Z.GT.0.0D0 +C +C OUTPUT DGAMLN IS DOUBLE PRECISION +C DGAMLN - NATURAL LOG OF THE GAMMA FUNCTION AT Z.NE.0.0D0 +C IERR - ERROR FLAG +C IERR=0, NORMAL RETURN, COMPUTATION COMPLETED +C IERR=1, Z.LE.0.0D0, NO COMPUTATION +C +C +C***REFERENCES COMPUTATION OF BESSEL FUNCTIONS OF COMPLEX ARGUMENT +C BY D. E. AMOS, SAND83-0083, MAY, 1983. +C***ROUTINES CALLED I1MACH,D1MACH +C***END PROLOGUE DGAMLN + DOUBLE PRECISION CF, CON, FLN, FZ, GLN, RLN, S, TLG, TRM, TST, + * T1, WDTOL, Z, ZDMY, ZINC, ZM, ZMIN, ZP, ZSQ, D1MACH + INTEGER I, IERR, I1M, K, MZ, NZ, I1MACH + DIMENSION CF(22), GLN(100) +C LNGAMMA(N), N=1,100 + DATA GLN(1), GLN(2), GLN(3), GLN(4), GLN(5), GLN(6), GLN(7), + 1 GLN(8), GLN(9), GLN(10), GLN(11), GLN(12), GLN(13), GLN(14), + 2 GLN(15), GLN(16), GLN(17), GLN(18), GLN(19), GLN(20), + 3 GLN(21), GLN(22)/ + 4 0.00000000000000000D+00, 0.00000000000000000D+00, + 5 6.93147180559945309D-01, 1.79175946922805500D+00, + 6 3.17805383034794562D+00, 4.78749174278204599D+00, + 7 6.57925121201010100D+00, 8.52516136106541430D+00, + 8 1.06046029027452502D+01, 1.28018274800814696D+01, + 9 1.51044125730755153D+01, 1.75023078458738858D+01, + A 1.99872144956618861D+01, 2.25521638531234229D+01, + B 2.51912211827386815D+01, 2.78992713838408916D+01, + C 3.06718601060806728D+01, 3.35050734501368889D+01, + D 3.63954452080330536D+01, 3.93398841871994940D+01, + E 4.23356164607534850D+01, 4.53801388984769080D+01/ + DATA GLN(23), GLN(24), GLN(25), GLN(26), GLN(27), GLN(28), + 1 GLN(29), GLN(30), GLN(31), GLN(32), GLN(33), GLN(34), + 2 GLN(35), GLN(36), GLN(37), GLN(38), GLN(39), GLN(40), + 3 GLN(41), GLN(42), GLN(43), GLN(44)/ + 4 4.84711813518352239D+01, 5.16066755677643736D+01, + 5 5.47847293981123192D+01, 5.80036052229805199D+01, + 6 6.12617017610020020D+01, 6.45575386270063311D+01, + 7 6.78897431371815350D+01, 7.12570389671680090D+01, + 8 7.46582363488301644D+01, 7.80922235533153106D+01, + 9 8.15579594561150372D+01, 8.50544670175815174D+01, + A 8.85808275421976788D+01, 9.21361756036870925D+01, + B 9.57196945421432025D+01, 9.93306124547874269D+01, + C 1.02968198614513813D+02, 1.06631760260643459D+02, + D 1.10320639714757395D+02, 1.14034211781461703D+02, + E 1.17771881399745072D+02, 1.21533081515438634D+02/ + DATA GLN(45), GLN(46), GLN(47), GLN(48), GLN(49), GLN(50), + 1 GLN(51), GLN(52), GLN(53), GLN(54), GLN(55), GLN(56), + 2 GLN(57), GLN(58), GLN(59), GLN(60), GLN(61), GLN(62), + 3 GLN(63), GLN(64), GLN(65), GLN(66)/ + 4 1.25317271149356895D+02, 1.29123933639127215D+02, + 5 1.32952575035616310D+02, 1.36802722637326368D+02, + 6 1.40673923648234259D+02, 1.44565743946344886D+02, + 7 1.48477766951773032D+02, 1.52409592584497358D+02, + 8 1.56360836303078785D+02, 1.60331128216630907D+02, + 9 1.64320112263195181D+02, 1.68327445448427652D+02, + A 1.72352797139162802D+02, 1.76395848406997352D+02, + B 1.80456291417543771D+02, 1.84533828861449491D+02, + C 1.88628173423671591D+02, 1.92739047287844902D+02, + D 1.96866181672889994D+02, 2.01009316399281527D+02, + E 2.05168199482641199D+02, 2.09342586752536836D+02/ + DATA GLN(67), GLN(68), GLN(69), GLN(70), GLN(71), GLN(72), + 1 GLN(73), GLN(74), GLN(75), GLN(76), GLN(77), GLN(78), + 2 GLN(79), GLN(80), GLN(81), GLN(82), GLN(83), GLN(84), + 3 GLN(85), GLN(86), GLN(87), GLN(88)/ + 4 2.13532241494563261D+02, 2.17736934113954227D+02, + 5 2.21956441819130334D+02, 2.26190548323727593D+02, + 6 2.30439043565776952D+02, 2.34701723442818268D+02, + 7 2.38978389561834323D+02, 2.43268849002982714D+02, + 8 2.47572914096186884D+02, 2.51890402209723194D+02, + 9 2.56221135550009525D+02, 2.60564940971863209D+02, + A 2.64921649798552801D+02, 2.69291097651019823D+02, + B 2.73673124285693704D+02, 2.78067573440366143D+02, + C 2.82474292687630396D+02, 2.86893133295426994D+02, + D 2.91323950094270308D+02, 2.95766601350760624D+02, + E 3.00220948647014132D+02, 3.04686856765668715D+02/ + DATA GLN(89), GLN(90), GLN(91), GLN(92), GLN(93), GLN(94), + 1 GLN(95), GLN(96), GLN(97), GLN(98), GLN(99), GLN(100)/ + 2 3.09164193580146922D+02, 3.13652829949879062D+02, + 3 3.18152639620209327D+02, 3.22663499126726177D+02, + 4 3.27185287703775217D+02, 3.31717887196928473D+02, + 5 3.36261181979198477D+02, 3.40815058870799018D+02, + 6 3.45379407062266854D+02, 3.49954118040770237D+02, + 7 3.54539085519440809D+02, 3.59134205369575399D+02/ +C COEFFICIENTS OF ASYMPTOTIC EXPANSION + DATA CF(1), CF(2), CF(3), CF(4), CF(5), CF(6), CF(7), CF(8), + 1 CF(9), CF(10), CF(11), CF(12), CF(13), CF(14), CF(15), + 2 CF(16), CF(17), CF(18), CF(19), CF(20), CF(21), CF(22)/ + 3 8.33333333333333333D-02, -2.77777777777777778D-03, + 4 7.93650793650793651D-04, -5.95238095238095238D-04, + 5 8.41750841750841751D-04, -1.91752691752691753D-03, + 6 6.41025641025641026D-03, -2.95506535947712418D-02, + 7 1.79644372368830573D-01, -1.39243221690590112D+00, + 8 1.34028640441683920D+01, -1.56848284626002017D+02, + 9 2.19310333333333333D+03, -3.61087712537249894D+04, + A 6.91472268851313067D+05, -1.52382215394074162D+07, + B 3.82900751391414141D+08, -1.08822660357843911D+10, + C 3.47320283765002252D+11, -1.23696021422692745D+13, + D 4.88788064793079335D+14, -2.13203339609193739D+16/ +C +C LN(2*PI) + DATA CON / 1.83787706640934548D+00/ +C +C***FIRST EXECUTABLE STATEMENT DGAMLN + IERR=0 + IF (Z.LE.0.0D0) GO TO 70 + IF (Z.GT.101.0D0) GO TO 10 + NZ = INT(SNGL(Z)) + FZ = Z - FLOAT(NZ) + IF (FZ.GT.0.0D0) GO TO 10 + IF (NZ.GT.100) GO TO 10 + DGAMLN = GLN(NZ) + RETURN + 10 CONTINUE + WDTOL = D1MACH(4) + WDTOL = DMAX1(WDTOL,0.5D-18) + I1M = I1MACH(14) + RLN = D1MACH(5)*FLOAT(I1M) + FLN = DMIN1(RLN,20.0D0) + FLN = DMAX1(FLN,3.0D0) + FLN = FLN - 3.0D0 + ZM = 1.8000D0 + 0.3875D0*FLN + MZ = INT(SNGL(ZM)) + 1 + ZMIN = FLOAT(MZ) + ZDMY = Z + ZINC = 0.0D0 + IF (Z.GE.ZMIN) GO TO 20 + ZINC = ZMIN - FLOAT(NZ) + ZDMY = Z + ZINC + 20 CONTINUE + ZP = 1.0D0/ZDMY + T1 = CF(1)*ZP + S = T1 + IF (ZP.LT.WDTOL) GO TO 40 + ZSQ = ZP*ZP + TST = T1*WDTOL + DO 30 K=2,22 + ZP = ZP*ZSQ + TRM = CF(K)*ZP + IF (DABS(TRM).LT.TST) GO TO 40 + S = S + TRM + 30 CONTINUE + 40 CONTINUE + IF (ZINC.NE.0.0D0) GO TO 50 + TLG = DLOG(Z) + DGAMLN = Z*(TLG-1.0D0) + 0.5D0*(CON-TLG) + S + RETURN + 50 CONTINUE + ZP = 1.0D0 + NZ = INT(SNGL(ZINC)) + DO 60 I=1,NZ + ZP = ZP*(Z+FLOAT(I-1)) + 60 CONTINUE + TLG = DLOG(ZDMY) + DGAMLN = ZDMY*(TLG-1.0D0) - DLOG(ZP) + 0.5D0*(CON-TLG) + S + RETURN +C +C + 70 CONTINUE + IERR=1 + RETURN + END diff --git a/vendor/gonum.org/v1/gonum/mathext/internal/amos/amoslib/fortran.go b/vendor/gonum.org/v1/gonum/mathext/internal/amos/amoslib/fortran.go new file mode 100644 index 0000000..c7248ba --- /dev/null +++ b/vendor/gonum.org/v1/gonum/mathext/internal/amos/amoslib/fortran.go @@ -0,0 +1,366 @@ +// Copyright ©2016 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// +build fortran +// TODO(jonlawlor): remove fortran build tag when gonum only supports go 1.7+. + +package amoslib + +/* +double mzabs_(double * ar, double * ai); +void zs1s2_(double * ZRR, double * ZRI, double * S1R, double * S1I, double * S2R, double * S2I, int* NZ, double *ASCLE, double * ALIM, int * IUF); +void zacai_(double * ZR, double * ZI, double * FNU, int * KODE, int * N, int * MR, double * YR, double * YI, int * NZ, double * RL, double * tol, double * elim, double * alim); +void zseri_(double * ZR, double * ZI, double * FNU, int * KODE, int * N, double * YR, double * YI, int * NZ, double * tol, double * elim, double * alim); +void zmlri_(double * ZR, double * ZI, double * FNU, int * KODE, int * N, double * YR, double * YI, int * NZ, double * tol); +void zbknu_(double * ZR, double * ZI, double * FNU, int * KODE, int * N, double * YR, double * YI, int * NZ, double * tol, double * elim, double * alim); +void zasyi_(double * ZR, double * ZI, double * FNU, int * KODE, int * N, double * YR, double * YI, int * NZ,double * RL, double * tol, double * elim, double * alim); +void zkscl_(double * ZRR, double * ZRI, double * FNU, int * N, double * YR, double * YI, int * NZ, double * RZR, double * RZI, double * ASCLE, double * tol, double * elim); +void zuchk_(double * YR, double * YI, int * NZ, double * ASCLE, double * TOL); +void zairy_(double * ZR, double * ZI, int * ID, int * KODE, double * AIR, double * AII, int * NZ, int * IERR); +void zlog_(double * ar, double * ai, double * br, double * bi, int * ierr); +void zexp_(double * ar, double * ai, double * br, double * bi); +void zsqrt_(double * ar, double * ai, double * br, double * bi); +void zdiv_(double * ar, double * ai, double * br, double * bi, double * cr, double * ci); +void zmlt_(double * ar, double * ai, double * br, double * bi, double * cr, double * ci); +double dgamln_(double *z, int * ierr); +void zshch_(double * zr, double * zi, double * cshr, double * cshi, double * cchr, double * cchi); +double mysqrt_(double * A); +double myexp_(double * A); +double mycos_(double * A); +double mysin_(double * A); +double mylog_(double * A); +double mytan_(double * A); +double myatan_(double * A); +double myabs_(double * A); +double mymin_(double * A, double * B); +double mymax_(double * A, double * B); +*/ +import "C" +import "unsafe" + +func MinFort(a, b float64) float64 { + ans := C.mymin_((*C.double)(&a), (*C.double)(&b)) + return float64(ans) +} + +func MaxFort(a, b float64) float64 { + ans := C.mymax_((*C.double)(&a), (*C.double)(&b)) + return float64(ans) +} + +func AbsFort(a float64) float64 { + ans := C.myabs_((*C.double)(&a)) + return float64(ans) +} + +func AtanFort(a float64) float64 { + ans := C.myatan_((*C.double)(&a)) + return float64(ans) +} + +func TanFort(a float64) float64 { + ans := C.mytan_((*C.double)(&a)) + return float64(ans) +} + +func LogFort(a float64) float64 { + ans := C.mylog_((*C.double)(&a)) + return float64(ans) +} + +func SinFort(a float64) float64 { + ans := C.mysin_((*C.double)(&a)) + return float64(ans) +} + +func CosFort(a float64) float64 { + ans := C.mycos_((*C.double)(&a)) + return float64(ans) +} + +func ExpFort(a float64) float64 { + ans := C.myexp_((*C.double)(&a)) + return float64(ans) +} + +func SqrtFort(a float64) float64 { + ans := C.mysqrt_((*C.double)(&a)) + return float64(ans) +} + +func DgamlnFort(a float64) float64 { + var ierr int + pierr := (*C.int)(unsafe.Pointer(&ierr)) + pa := (*C.double)(&a) + ans := C.dgamln_(pa, pierr) + return (float64)(ans) +} + +func ZmltFort(a, b complex128) complex128 { + ar := real(a) + ai := imag(a) + br := real(b) + bi := imag(b) + var cr, ci float64 + C.zmlt_( + (*C.double)(&ar), (*C.double)(&ai), + (*C.double)(&br), (*C.double)(&bi), + (*C.double)(&cr), (*C.double)(&ci), + ) + return complex(cr, ci) +} + +func ZdivFort(a, b complex128) complex128 { + ar := real(a) + ai := imag(a) + br := real(b) + bi := imag(b) + var cr, ci float64 + C.zdiv_( + (*C.double)(&ar), (*C.double)(&ai), + (*C.double)(&br), (*C.double)(&bi), + (*C.double)(&cr), (*C.double)(&ci), + ) + return complex(cr, ci) +} + +func ZabsFort(a complex128) float64 { + ar := real(a) + ai := imag(a) + return float64(C.mzabs_((*C.double)(&ar), (*C.double)(&ai))) +} + +func ZsqrtFort(a complex128) (b complex128) { + ar := real(a) + ai := imag(a) + + var br, bi float64 + + par := (*C.double)(&ar) + pai := (*C.double)(&ai) + pbr := (*C.double)(&br) + pbi := (*C.double)(&bi) + + C.zsqrt_(par, pai, pbr, pbi) + return complex(br, bi) +} + +func ZexpFort(a complex128) (b complex128) { + ar := real(a) + ai := imag(a) + + var br, bi float64 + + par := (*C.double)(&ar) + pai := (*C.double)(&ai) + pbr := (*C.double)(&br) + pbi := (*C.double)(&bi) + + C.zexp_(par, pai, pbr, pbi) + return complex(br, bi) +} + +func ZlogFort(a complex128) (b complex128) { + ar := real(a) + ai := imag(a) + var ierr int + var br, bi float64 + + par := (*C.double)(&ar) + pai := (*C.double)(&ai) + pbr := (*C.double)(&br) + pbi := (*C.double)(&bi) + pierr := (*C.int)(unsafe.Pointer(&ierr)) + C.zlog_(par, pai, pbr, pbi, pierr) + return complex(br, bi) +} + +func Zshch(ZR, ZI, CSHR, CSHI, CCHR, CCHI float64) (ZRout, ZIout, CSHRout, CSHIout, CCHRout, CCHIout float64) { + pzr := (*C.double)(&ZR) + pzi := (*C.double)(&ZI) + pcshr := (*C.double)(&CSHR) + pcshi := (*C.double)(&CSHI) + pcchr := (*C.double)(&CCHR) + pcchi := (*C.double)(&CCHI) + + C.zshch_(pzr, pzi, pcshr, pcshi, pcchr, pcchi) + return ZR, ZI, CSHR, CSHI, CCHR, CCHI +} + +func ZairyFort(ZR, ZI float64, ID, KODE int) (AIR, AII float64, NZ int) { + var IERR int + pzr := (*C.double)(&ZR) + pzi := (*C.double)(&ZI) + pid := (*C.int)(unsafe.Pointer(&ID)) + pkode := (*C.int)(unsafe.Pointer(&KODE)) + + pair := (*C.double)(&AIR) + paii := (*C.double)(&AII) + pnz := (*C.int)(unsafe.Pointer(&NZ)) + pierr := (*C.int)(unsafe.Pointer(&IERR)) + C.zairy_(pzr, pzi, pid, pkode, pair, paii, pnz, pierr) + + NZ = int(*pnz) + return AIR, AII, NZ +} + +func ZksclFort(ZRR, ZRI, FNU float64, N int, YR, YI []float64, NZ int, RZR, RZI, ASCLE, TOL, ELIM float64) ( + ZRout, ZIout, FNUout float64, Nout int, YRout, YIout []float64, NZout int, RZRout, RZIout, ASCLEout, TOLout, ELIMout float64) { + + pzrr := (*C.double)(&ZRR) + pzri := (*C.double)(&ZRI) + pfnu := (*C.double)(&FNU) + pn := (*C.int)(unsafe.Pointer(&N)) + pyr := (*C.double)(&YR[0]) + pyi := (*C.double)(&YI[0]) + pnz := (*C.int)(unsafe.Pointer(&NZ)) + przr := (*C.double)(&RZR) + przi := (*C.double)(&RZI) + pascle := (*C.double)(&ASCLE) + ptol := (*C.double)(&TOL) + pelim := (*C.double)(&ELIM) + + C.zkscl_(pzrr, pzri, pfnu, pn, pyr, pyi, pnz, przr, przi, pascle, ptol, pelim) + N = int(*pn) + NZ = int(*pnz) + return ZRR, ZRI, FNU, N, YR, YI, NZ, RZR, RZI, ASCLE, TOL, ELIM +} + +func ZbknuFort(ZR, ZI, FNU float64, KODE, N int, YR, YI []float64, NZ int, TOL, ELIM, ALIM float64) ( + ZRout, ZIout, FNUout float64, KODEout, Nout int, YRout, YIout []float64, NZout int, TOLout, ELIMout, ALIMout float64) { + + pzr := (*C.double)(&ZR) + pzi := (*C.double)(&ZI) + pfnu := (*C.double)(&FNU) + pkode := (*C.int)(unsafe.Pointer(&KODE)) + pn := (*C.int)(unsafe.Pointer(&N)) + pyr := (*C.double)(&YR[0]) + pyi := (*C.double)(&YI[0]) + pnz := (*C.int)(unsafe.Pointer(&NZ)) + ptol := (*C.double)(&TOL) + pelim := (*C.double)(&ELIM) + palim := (*C.double)(&ALIM) + + C.zbknu_(pzr, pzi, pfnu, pkode, pn, pyr, pyi, pnz, ptol, pelim, palim) + KODE = int(*pkode) + N = int(*pn) + NZ = int(*pnz) + return ZR, ZI, FNU, KODE, N, YR, YI, NZ, TOL, ELIM, ALIM +} + +func ZasyiFort(ZR, ZI, FNU float64, KODE, N int, YR, YI []float64, NZ int, RL, TOL, ELIM, ALIM float64) ( + ZRout, ZIout, FNUout float64, KODEout, Nout int, YRout, YIout []float64, NZout int, RLout, TOLout, ELIMout, ALIMout float64) { + + pzr := (*C.double)(&ZR) + pzi := (*C.double)(&ZI) + pfnu := (*C.double)(&FNU) + pkode := (*C.int)(unsafe.Pointer(&KODE)) + pn := (*C.int)(unsafe.Pointer(&N)) + pyr := (*C.double)(&YR[0]) + pyi := (*C.double)(&YI[0]) + pnz := (*C.int)(unsafe.Pointer(&NZ)) + prl := (*C.double)(&RL) + ptol := (*C.double)(&TOL) + pelim := (*C.double)(&ELIM) + palim := (*C.double)(&ALIM) + + C.zasyi_(pzr, pzi, pfnu, pkode, pn, pyr, pyi, pnz, prl, ptol, pelim, palim) + KODE = int(*pkode) + N = int(*pn) + NZ = int(*pnz) + return ZR, ZI, FNU, KODE, N, YR, YI, NZ, RL, TOL, ELIM, ALIM +} + +func ZuchkFort(YR, YI float64, NZ int, ASCLE, TOL float64) (YRout, YIout float64, NZout int, ASCLEout, TOLout float64) { + pyr := (*C.double)(&YR) + pyi := (*C.double)(&YI) + pnz := (*C.int)(unsafe.Pointer(&NZ)) + pascle := (*C.double)(&ASCLE) + ptol := (*C.double)(&TOL) + + C.zuchk_(pyr, pyi, pnz, pascle, ptol) + return YR, YI, NZ, ASCLE, TOL +} + +func Zs1s2Fort(ZRR, ZRI, S1R, S1I, S2R, S2I float64, NZ int, ASCLE, ALIM float64, IUF int) ( + ZRRout, ZRIout, S1Rout, S1Iout, S2Rout, S2Iout float64, NZout int, ASCLEout, ALIMout float64, IUFout int) { + + pzrr := (*C.double)(&ZRR) + pzri := (*C.double)(&ZRI) + ps1r := (*C.double)(&S1R) + ps1i := (*C.double)(&S1I) + ps2r := (*C.double)(&S2R) + ps2i := (*C.double)(&S2I) + pnz := (*C.int)(unsafe.Pointer(&NZ)) + pascle := (*C.double)(&ASCLE) + palim := (*C.double)(&ALIM) + piuf := (*C.int)(unsafe.Pointer(&IUF)) + + C.zs1s2_(pzrr, pzri, ps1r, ps1i, ps2r, ps2i, pnz, pascle, palim, piuf) + return ZRR, ZRI, S1R, S1I, S2R, S2I, NZ, ASCLE, ALIM, IUF +} + +func ZacaiFort(ZR, ZI, FNU float64, KODE, MR, N int, YR, YI []float64, NZ int, RL, TOL, ELIM, ALIM float64) ( + ZRout, ZIout, FNUout float64, KODEout, MRout, Nout int, YRout, YIout []float64, NZout int, RLout, TOLout, ELIMout, ALIMout float64) { + pzr := (*C.double)(&ZR) + pzi := (*C.double)(&ZI) + pfnu := (*C.double)(&FNU) + pkode := (*C.int)(unsafe.Pointer(&KODE)) + pmr := (*C.int)(unsafe.Pointer(&MR)) + pn := (*C.int)(unsafe.Pointer(&N)) + pyr := (*C.double)(&YR[0]) + pyi := (*C.double)(&YI[0]) + pnz := (*C.int)(unsafe.Pointer(&NZ)) + prl := (*C.double)(&RL) + ptol := (*C.double)(&TOL) + pelim := (*C.double)(&ELIM) + palim := (*C.double)(&ALIM) + + C.zacai_(pzr, pzi, pfnu, pkode, pmr, pn, pyr, pyi, pnz, prl, ptol, pelim, palim) + KODE = int(*pkode) + MR = int(*pmr) + N = int(*pn) + NZ = int(*pnz) + return ZR, ZI, FNU, KODE, MR, N, YR, YI, NZ, RL, TOL, ELIM, ALIM +} + +func ZseriFort(ZR, ZI, FNU float64, KODE, N int, YR, YI []float64, NZ int, TOL, ELIM, ALIM float64) ( + ZRout, ZIout, FNUout float64, KODEout, Nout int, YRout, YIout []float64, NZout int, TOLout, ELIMout, ALIMout float64) { + pzr := (*C.double)(&ZR) + pzi := (*C.double)(&ZI) + pfnu := (*C.double)(&FNU) + pkode := (*C.int)(unsafe.Pointer(&KODE)) + pn := (*C.int)(unsafe.Pointer(&N)) + pyr := (*C.double)(&YR[0]) + pyi := (*C.double)(&YI[0]) + pnz := (*C.int)(unsafe.Pointer(&NZ)) + ptol := (*C.double)(&TOL) + pelim := (*C.double)(&ELIM) + palim := (*C.double)(&ALIM) + + C.zseri_(pzr, pzi, pfnu, pkode, pn, pyr, pyi, pnz, ptol, pelim, palim) + KODE = int(*pkode) + N = int(*pn) + NZ = int(*pnz) + return ZR, ZI, FNU, KODE, N, YR, YI, NZ, TOL, ELIM, ALIM +} + +func ZmlriFort(ZR, ZI, FNU float64, KODE, N int, YR, YI []float64, NZ int, TOL float64) ( + ZRout, ZIout, FNUout float64, KODEout, Nout int, YRout, YIout []float64, NZout int, TOLout float64) { + pzr := (*C.double)(&ZR) + pzi := (*C.double)(&ZI) + pfnu := (*C.double)(&FNU) + pkode := (*C.int)(unsafe.Pointer(&KODE)) + pn := (*C.int)(unsafe.Pointer(&N)) + pyr := (*C.double)(&YR[0]) + pyi := (*C.double)(&YI[0]) + pnz := (*C.int)(unsafe.Pointer(&NZ)) + ptol := (*C.double)(&TOL) + + C.zmlri_(pzr, pzi, pfnu, pkode, pn, pyr, pyi, pnz, ptol) + KODE = int(*pkode) + N = int(*pn) + NZ = int(*pnz) + return ZR, ZI, FNU, KODE, N, YR, YI, NZ, TOL +} diff --git a/vendor/gonum.org/v1/gonum/mathext/internal/amos/amoslib/i1mach.f b/vendor/gonum.org/v1/gonum/mathext/internal/amos/amoslib/i1mach.f new file mode 100644 index 0000000..b968333 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/mathext/internal/amos/amoslib/i1mach.f @@ -0,0 +1,113 @@ +*DECK I1MACH + INTEGER FUNCTION I1MACH(I) +C***BEGIN PROLOGUE I1MACH +C***DATE WRITTEN 750101 (YYMMDD) +C***REVISION DATE 890213 (YYMMDD) +C***CATEGORY NO. R1 +C***KEYWORDS LIBRARY=SLATEC,TYPE=INTEGER(I1MACH-I),MACHINE CONSTANTS +C***AUTHOR FOX, P. A., (BELL LABS) +C HALL, A. D., (BELL LABS) +C SCHRYER, N. L., (BELL LABS) +C***PURPOSE Returns integer machine dependent constants +C***DESCRIPTION +C +C I1MACH can be used to obtain machine-dependent parameters +C for the local machine environment. It is a function +C subroutine with one (input) argument, and can be called +C as follows, for example +C +C K = I1MACH(I) +C +C where I=1,...,16. The (output) value of K above is +C determined by the (input) value of I. The results for +C various values of I are discussed below. +C +C I/O unit numbers. +C I1MACH( 1) = the standard input unit. +C I1MACH( 2) = the standard output unit. +C I1MACH( 3) = the standard punch unit. +C I1MACH( 4) = the standard error message unit. +C +C Words. +C I1MACH( 5) = the number of bits per integer storage unit. +C I1MACH( 6) = the number of characters per integer storage unit. +C +C Integers. +C assume integers are represented in the S-digit, base-A form +C +C sign ( X(S-1)*A**(S-1) + ... + X(1)*A + X(0) ) +C +C where 0 .LE. X(I) .LT. A for I=0,...,S-1. +C I1MACH( 7) = A, the base. +C I1MACH( 8) = S, the number of base-A digits. +C I1MACH( 9) = A**S - 1, the largest magnitude. +C +C Floating-Point Numbers. +C Assume floating-point numbers are represented in the T-digit, +C base-B form +C sign (B**E)*( (X(1)/B) + ... + (X(T)/B**T) ) +C +C where 0 .LE. X(I) .LT. B for I=1,...,T, +C 0 .LT. X(1), and EMIN .LE. E .LE. EMAX. +C I1MACH(10) = B, the base. +C +C Single-Precision +C I1MACH(11) = T, the number of base-B digits. +C I1MACH(12) = EMIN, the smallest exponent E. +C I1MACH(13) = EMAX, the largest exponent E. +C +C Double-Precision +C I1MACH(14) = T, the number of base-B digits. +C I1MACH(15) = EMIN, the smallest exponent E. +C I1MACH(16) = EMAX, the largest exponent E. +C +C To alter this function for a particular environment, +C the desired set of DATA statements should be activated by +C removing the C from column 1. Also, the values of +C I1MACH(1) - I1MACH(4) should be checked for consistency +C with the local operating system. +C +C***REFERENCES FOX P.A., HALL A.D., SCHRYER N.L.,*FRAMEWORK FOR A +C PORTABLE LIBRARY*, ACM TRANSACTIONS ON MATHEMATICAL +C SOFTWARE, VOL. 4, NO. 2, JUNE 1978, PP. 177-188. +C***ROUTINES CALLED (NONE) +C***END PROLOGUE I1MACH +C + INTEGER IMACH(16),OUTPUT + SAVE IMACH + EQUIVALENCE (IMACH(4),OUTPUT) +C +C MACHINE CONSTANTS FOR THE IBM PC +C + DATA IMACH( 1) / 5 / + DATA IMACH( 2) / 6 / + DATA IMACH( 3) / 0 / + DATA IMACH( 4) / 0 / + DATA IMACH( 5) / 32 / + DATA IMACH( 6) / 4 / + DATA IMACH( 7) / 2 / + DATA IMACH( 8) / 31 / + DATA IMACH( 9) / 2147483647 / + DATA IMACH(10) / 2 / + DATA IMACH(11) / 24 / + DATA IMACH(12) / -125 / + DATA IMACH(13) / 127 / + DATA IMACH(14) / 53 / + DATA IMACH(15) / -1021 / + DATA IMACH(16) / 1023 / +C +C***FIRST EXECUTABLE STATEMENT I1MACH + IF (I .LT. 1 .OR. I .GT. 16) GO TO 10 +C + I1MACH = IMACH(I) + RETURN +C + 10 CONTINUE + WRITE (UNIT = OUTPUT, FMT = 9000) + 9000 FORMAT ('1ERROR 1 IN I1MACH - I OUT OF BOUNDS') +C +C CALL FDUMP +C +C + STOP + END diff --git a/vendor/gonum.org/v1/gonum/mathext/internal/amos/amoslib/myabs.f b/vendor/gonum.org/v1/gonum/mathext/internal/amos/amoslib/myabs.f new file mode 100644 index 0000000..5060195 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/mathext/internal/amos/amoslib/myabs.f @@ -0,0 +1,5 @@ + DOUBLE PRECISION FUNCTION MYABS(A) + DOUBLE PRECISION A + MYABS = DABS(A) + RETURN + END diff --git a/vendor/gonum.org/v1/gonum/mathext/internal/amos/amoslib/myatan.f b/vendor/gonum.org/v1/gonum/mathext/internal/amos/amoslib/myatan.f new file mode 100644 index 0000000..0887fda --- /dev/null +++ b/vendor/gonum.org/v1/gonum/mathext/internal/amos/amoslib/myatan.f @@ -0,0 +1,5 @@ + DOUBLE PRECISION FUNCTION MYATAN(A) + DOUBLE PRECISION A + MYATAN = DATAN(A) + RETURN + END diff --git a/vendor/gonum.org/v1/gonum/mathext/internal/amos/amoslib/mycos.f b/vendor/gonum.org/v1/gonum/mathext/internal/amos/amoslib/mycos.f new file mode 100644 index 0000000..c39e46d --- /dev/null +++ b/vendor/gonum.org/v1/gonum/mathext/internal/amos/amoslib/mycos.f @@ -0,0 +1,5 @@ + DOUBLE PRECISION FUNCTION MYCOS(A) + DOUBLE PRECISION A + MYCOS = DCOS(A) + RETURN + END diff --git a/vendor/gonum.org/v1/gonum/mathext/internal/amos/amoslib/myexp.f b/vendor/gonum.org/v1/gonum/mathext/internal/amos/amoslib/myexp.f new file mode 100644 index 0000000..44a2a80 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/mathext/internal/amos/amoslib/myexp.f @@ -0,0 +1,5 @@ + DOUBLE PRECISION FUNCTION MYEXP(A) + DOUBLE PRECISION A + MYEXP = DEXP(A) + RETURN + END diff --git a/vendor/gonum.org/v1/gonum/mathext/internal/amos/amoslib/mylog.f b/vendor/gonum.org/v1/gonum/mathext/internal/amos/amoslib/mylog.f new file mode 100644 index 0000000..ac7e3a3 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/mathext/internal/amos/amoslib/mylog.f @@ -0,0 +1,5 @@ + DOUBLE PRECISION FUNCTION MYLOG(A) + DOUBLE PRECISION A + MYLOG = DLOG(A) + RETURN + END diff --git a/vendor/gonum.org/v1/gonum/mathext/internal/amos/amoslib/mymax.f b/vendor/gonum.org/v1/gonum/mathext/internal/amos/amoslib/mymax.f new file mode 100644 index 0000000..0438eb4 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/mathext/internal/amos/amoslib/mymax.f @@ -0,0 +1,5 @@ + DOUBLE PRECISION FUNCTION MYMAX(A, B) + DOUBLE PRECISION A, B + MYMAX = DMAX1(A,B) + RETURN + END diff --git a/vendor/gonum.org/v1/gonum/mathext/internal/amos/amoslib/mymin.f b/vendor/gonum.org/v1/gonum/mathext/internal/amos/amoslib/mymin.f new file mode 100644 index 0000000..950606c --- /dev/null +++ b/vendor/gonum.org/v1/gonum/mathext/internal/amos/amoslib/mymin.f @@ -0,0 +1,5 @@ + DOUBLE PRECISION FUNCTION MYMIN(A, B) + DOUBLE PRECISION A, B + MYMIN = DMIN1(A,B) + RETURN + END diff --git a/vendor/gonum.org/v1/gonum/mathext/internal/amos/amoslib/mysin.f b/vendor/gonum.org/v1/gonum/mathext/internal/amos/amoslib/mysin.f new file mode 100644 index 0000000..1663ed5 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/mathext/internal/amos/amoslib/mysin.f @@ -0,0 +1,5 @@ + DOUBLE PRECISION FUNCTION MYSIN(A) + DOUBLE PRECISION A + MYSIN = DSIN(A) + RETURN + END diff --git a/vendor/gonum.org/v1/gonum/mathext/internal/amos/amoslib/mysqrt.f b/vendor/gonum.org/v1/gonum/mathext/internal/amos/amoslib/mysqrt.f new file mode 100644 index 0000000..a399838 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/mathext/internal/amos/amoslib/mysqrt.f @@ -0,0 +1,5 @@ + DOUBLE PRECISION FUNCTION MYSQRT(A) + DOUBLE PRECISION A + MYSQRT = SQRT(A) + RETURN + END diff --git a/vendor/gonum.org/v1/gonum/mathext/internal/amos/amoslib/mytan.f b/vendor/gonum.org/v1/gonum/mathext/internal/amos/amoslib/mytan.f new file mode 100644 index 0000000..239aa68 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/mathext/internal/amos/amoslib/mytan.f @@ -0,0 +1,5 @@ + DOUBLE PRECISION FUNCTION MYTAN(A) + DOUBLE PRECISION A + MYTAN = DTAN(A) + RETURN + END diff --git a/vendor/gonum.org/v1/gonum/mathext/internal/amos/amoslib/xerror.f b/vendor/gonum.org/v1/gonum/mathext/internal/amos/amoslib/xerror.f new file mode 100644 index 0000000..baa5506 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/mathext/internal/amos/amoslib/xerror.f @@ -0,0 +1,22 @@ + SUBROUTINE XERROR(MESS,NMESS,L1,L2) +C +C THIS IS A DUMMY XERROR ROUTINE TO PRINT ERROR MESSAGES WITH NMESS +C CHARACTERS. L1 AND L2 ARE DUMMY PARAMETERS TO MAKE THIS CALL +C COMPATIBLE WITH THE SLATEC XERROR ROUTINE. THIS IS A FORTRAN 77 +C ROUTINE. +C + CHARACTER*(*) MESS + NN=NMESS/70 + NR=NMESS-70*NN + IF(NR.NE.0) NN=NN+1 + K=1 + PRINT 900 + 900 FORMAT(/) + DO 10 I=1,NN + KMIN=MIN0(K+69,NMESS) + PRINT *, MESS(K:KMIN) + K=K+70 + 10 CONTINUE + PRINT 900 + RETURN + END diff --git a/vendor/gonum.org/v1/gonum/mathext/internal/amos/amoslib/zabs.f b/vendor/gonum.org/v1/gonum/mathext/internal/amos/amoslib/zabs.f new file mode 100644 index 0000000..23e00a6 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/mathext/internal/amos/amoslib/zabs.f @@ -0,0 +1,34 @@ + DOUBLE PRECISION FUNCTION MZABS(ZR, ZI) +C***BEGIN PROLOGUE ZABS +C***REFER TO ZBESH,ZBESI,ZBESJ,ZBESK,ZBESY,ZAIRY,ZBIRY +C +C ZABS COMPUTES THE ABSOLUTE VALUE OR MAGNITUDE OF A DOUBLE +C PRECISION COMPLEX VARIABLE CMPLX(ZR,ZI) +C +C***ROUTINES CALLED (NONE) +C***END PROLOGUE ZABS + DOUBLE PRECISION ZR, ZI, U, V, Q, S + + MZABS = ZABS(CMPLX(ZR,ZI,kind=KIND(1.0D0))) + RETURN + END + +c U = DABS(ZR) +c V = DABS(ZI) +c S = U + V +C----------------------------------------------------------------------- +C S*1.0D0 MAKES AN UNNORMALIZED UNDERFLOW ON CDC MACHINES INTO A +C TRUE FLOATING ZERO +C----------------------------------------------------------------------- +c S = S*1.0D+0 +c IF (S.EQ.0.0D+0) GO TO 20 +c IF (U.GT.V) GO TO 10 +c Q = U/V +c ZABS = V*DSQRT(1.D+0+Q*Q) +c RETURN +c 10 Q = V/U +c ZABS = U*DSQRT(1.D+0+Q*Q) +c RETURN +c 20 ZABS = 0.0D+0 +c RETURN +c END diff --git a/vendor/gonum.org/v1/gonum/mathext/internal/amos/amoslib/zacai.f b/vendor/gonum.org/v1/gonum/mathext/internal/amos/amoslib/zacai.f new file mode 100644 index 0000000..aa05a5c --- /dev/null +++ b/vendor/gonum.org/v1/gonum/mathext/internal/amos/amoslib/zacai.f @@ -0,0 +1,99 @@ + SUBROUTINE ZACAI(ZR, ZI, FNU, KODE, MR, N, YR, YI, NZ, RL, TOL, + * ELIM, ALIM) +C***BEGIN PROLOGUE ZACAI +C***REFER TO ZAIRY +C +C ZACAI APPLIES THE ANALYTIC CONTINUATION FORMULA +C +C K(FNU,ZN*EXP(MP))=K(FNU,ZN)*EXP(-MP*FNU) - MP*I(FNU,ZN) +C MP=PI*MR*CMPLX(0.0,1.0) +C +C TO CONTINUE THE K FUNCTION FROM THE RIGHT HALF TO THE LEFT +C HALF Z PLANE FOR USE WITH ZAIRY WHERE FNU=1/3 OR 2/3 AND N=1. +C ZACAI IS THE SAME AS ZACON WITH THE PARTS FOR LARGER ORDERS AND +C RECURRENCE REMOVED. A RECURSIVE CALL TO ZACON CAN RESULT IF ZACON +C IS CALLED FROM ZAIRY. +C +C***ROUTINES CALLED ZASYI,ZBKNU,ZMLRI,ZSERI,ZS1S2,D1MACH,ZABS +C***END PROLOGUE ZACAI +C COMPLEX CSGN,CSPN,C1,C2,Y,Z,ZN,CY + DOUBLE PRECISION ALIM, ARG, ASCLE, AZ, CSGNR, CSGNI, CSPNR, + * CSPNI, C1R, C1I, C2R, C2I, CYR, CYI, DFNU, ELIM, FMR, FNU, PI, + * RL, SGN, TOL, YY, YR, YI, ZR, ZI, ZNR, ZNI, D1MACH, ZABS + INTEGER INU, IUF, KODE, MR, N, NN, NW, NZ + DIMENSION YR(N), YI(N), CYR(2), CYI(2) + DATA PI / 3.14159265358979324D0 / + NZ = 0 + ZNR = -ZR + ZNI = -ZI + AZ = ZABS(CMPLX(ZR,ZI,kind=KIND(1.0D0))) + NN = N + DFNU = FNU + DBLE(FLOAT(N-1)) + IF (AZ.LE.2.0D0) GO TO 10 + IF (AZ*AZ*0.25D0.GT.DFNU+1.0D0) GO TO 20 + 10 CONTINUE +C----------------------------------------------------------------------- +C POWER SERIES FOR THE I FUNCTION +C----------------------------------------------------------------------- + CALL ZSERI(ZNR, ZNI, FNU, KODE, NN, YR, YI, NW, TOL, ELIM, ALIM) + GO TO 40 + 20 CONTINUE + IF (AZ.LT.RL) GO TO 30 +C----------------------------------------------------------------------- +C ASYMPTOTIC EXPANSION FOR LARGE Z FOR THE I FUNCTION +C----------------------------------------------------------------------- + CALL ZASYI(ZNR, ZNI, FNU, KODE, NN, YR, YI, NW, RL, TOL, ELIM, + * ALIM) + IF (NW.LT.0) GO TO 80 + GO TO 40 + 30 CONTINUE +C----------------------------------------------------------------------- +C MILLER ALGORITHM NORMALIZED BY THE SERIES FOR THE I FUNCTION +C----------------------------------------------------------------------- + CALL ZMLRI(ZNR, ZNI, FNU, KODE, NN, YR, YI, NW, TOL) + IF(NW.LT.0) GO TO 80 + 40 CONTINUE +C----------------------------------------------------------------------- +C ANALYTIC CONTINUATION TO THE LEFT HALF PLANE FOR THE K FUNCTION +C----------------------------------------------------------------------- + CALL ZBKNU(ZNR, ZNI, FNU, KODE, 1, CYR, CYI, NW, TOL, ELIM, ALIM) + IF (NW.NE.0) GO TO 80 + FMR = DBLE(FLOAT(MR)) + SGN = -DSIGN(PI,FMR) + CSGNR = 0.0D0 + CSGNI = SGN + IF (KODE.EQ.1) GO TO 50 + YY = -ZNI + CSGNR = -CSGNI*DSIN(YY) + CSGNI = CSGNI*DCOS(YY) + 50 CONTINUE +C----------------------------------------------------------------------- +C CALCULATE CSPN=EXP(FNU*PI*I) TO MINIMIZE LOSSES OF SIGNIFICANCE +C WHEN FNU IS LARGE +C----------------------------------------------------------------------- + INU = INT(SNGL(FNU)) + ARG = (FNU-DBLE(FLOAT(INU)))*SGN + CSPNR = DCOS(ARG) + CSPNI = DSIN(ARG) + IF (MOD(INU,2).EQ.0) GO TO 60 + CSPNR = -CSPNR + CSPNI = -CSPNI + 60 CONTINUE + C1R = CYR(1) + C1I = CYI(1) + C2R = YR(1) + C2I = YI(1) + IF (KODE.EQ.1) GO TO 70 + IUF = 0 + ASCLE = 1.0D+3*D1MACH(1)/TOL + CALL ZS1S2(ZNR, ZNI, C1R, C1I, C2R, C2I, NW, ASCLE, ALIM, IUF) + NZ = NZ + NW + 70 CONTINUE + YR(1) = CSPNR*C1R - CSPNI*C1I + CSGNR*C2R - CSGNI*C2I + YI(1) = CSPNR*C1I + CSPNI*C1R + CSGNR*C2I + CSGNI*C2R + RETURN + 80 CONTINUE + NZ = -1 + IF(NW.EQ.(-2)) NZ=-2 + RETURN + END diff --git a/vendor/gonum.org/v1/gonum/mathext/internal/amos/amoslib/zacon.f b/vendor/gonum.org/v1/gonum/mathext/internal/amos/amoslib/zacon.f new file mode 100644 index 0000000..ba59359 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/mathext/internal/amos/amoslib/zacon.f @@ -0,0 +1,203 @@ + SUBROUTINE ZACON(ZR, ZI, FNU, KODE, MR, N, YR, YI, NZ, RL, FNUL, + * TOL, ELIM, ALIM) +C***BEGIN PROLOGUE ZACON +C***REFER TO ZBESK,ZBESH +C +C ZACON APPLIES THE ANALYTIC CONTINUATION FORMULA +C +C K(FNU,ZN*EXP(MP))=K(FNU,ZN)*EXP(-MP*FNU) - MP*I(FNU,ZN) +C MP=PI*MR*CMPLX(0.0,1.0) +C +C TO CONTINUE THE K FUNCTION FROM THE RIGHT HALF TO THE LEFT +C HALF Z PLANE +C +C***ROUTINES CALLED ZBINU,ZBKNU,ZS1S2,D1MACH,ZABS,ZMLT +C***END PROLOGUE ZACON +C COMPLEX CK,CONE,CSCL,CSCR,CSGN,CSPN,CY,CZERO,C1,C2,RZ,SC1,SC2,ST, +C *S1,S2,Y,Z,ZN + DOUBLE PRECISION ALIM, ARG, ASCLE, AS2, AZN, BRY, BSCLE, CKI, + * CKR, CONER, CPN, CSCL, CSCR, CSGNI, CSGNR, CSPNI, CSPNR, + * CSR, CSRR, CSSR, CYI, CYR, C1I, C1M, C1R, C2I, C2R, ELIM, FMR, + * FN, FNU, FNUL, PI, PTI, PTR, RAZN, RL, RZI, RZR, SC1I, SC1R, + * SC2I, SC2R, SGN, SPN, STI, STR, S1I, S1R, S2I, S2R, TOL, YI, YR, + * YY, ZEROR, ZI, ZNI, ZNR, ZR, D1MACH, ZABS + INTEGER I, INU, IUF, KFLAG, KODE, MR, N, NN, NW, NZ + DIMENSION YR(N), YI(N), CYR(2), CYI(2), CSSR(3), CSRR(3), BRY(3) + DATA PI / 3.14159265358979324D0 / + DATA ZEROR,CONER / 0.0D0,1.0D0 / + NZ = 0 + ZNR = -ZR + ZNI = -ZI + NN = N + CALL ZBINU(ZNR, ZNI, FNU, KODE, NN, YR, YI, NW, RL, FNUL, TOL, + * ELIM, ALIM) + IF (NW.LT.0) GO TO 90 +C----------------------------------------------------------------------- +C ANALYTIC CONTINUATION TO THE LEFT HALF PLANE FOR THE K FUNCTION +C----------------------------------------------------------------------- + NN = MIN0(2,N) + CALL ZBKNU(ZNR, ZNI, FNU, KODE, NN, CYR, CYI, NW, TOL, ELIM, ALIM) + IF (NW.NE.0) GO TO 90 + S1R = CYR(1) + S1I = CYI(1) + FMR = DBLE(FLOAT(MR)) + SGN = -DSIGN(PI,FMR) + CSGNR = ZEROR + CSGNI = SGN + IF (KODE.EQ.1) GO TO 10 + YY = -ZNI + CPN = DCOS(YY) + SPN = DSIN(YY) + CALL ZMLT(CSGNR, CSGNI, CPN, SPN, CSGNR, CSGNI) + 10 CONTINUE +C----------------------------------------------------------------------- +C CALCULATE CSPN=EXP(FNU*PI*I) TO MINIMIZE LOSSES OF SIGNIFICANCE +C WHEN FNU IS LARGE +C----------------------------------------------------------------------- + INU = INT(SNGL(FNU)) + ARG = (FNU-DBLE(FLOAT(INU)))*SGN + CPN = DCOS(ARG) + SPN = DSIN(ARG) + CSPNR = CPN + CSPNI = SPN + IF (MOD(INU,2).EQ.0) GO TO 20 + CSPNR = -CSPNR + CSPNI = -CSPNI + 20 CONTINUE + IUF = 0 + C1R = S1R + C1I = S1I + C2R = YR(1) + C2I = YI(1) + ASCLE = 1.0D+3*D1MACH(1)/TOL + IF (KODE.EQ.1) GO TO 30 + CALL ZS1S2(ZNR, ZNI, C1R, C1I, C2R, C2I, NW, ASCLE, ALIM, IUF) + NZ = NZ + NW + SC1R = C1R + SC1I = C1I + 30 CONTINUE + CALL ZMLT(CSPNR, CSPNI, C1R, C1I, STR, STI) + CALL ZMLT(CSGNR, CSGNI, C2R, C2I, PTR, PTI) + YR(1) = STR + PTR + YI(1) = STI + PTI + IF (N.EQ.1) RETURN + CSPNR = -CSPNR + CSPNI = -CSPNI + S2R = CYR(2) + S2I = CYI(2) + C1R = S2R + C1I = S2I + C2R = YR(2) + C2I = YI(2) + IF (KODE.EQ.1) GO TO 40 + CALL ZS1S2(ZNR, ZNI, C1R, C1I, C2R, C2I, NW, ASCLE, ALIM, IUF) + NZ = NZ + NW + SC2R = C1R + SC2I = C1I + 40 CONTINUE + CALL ZMLT(CSPNR, CSPNI, C1R, C1I, STR, STI) + CALL ZMLT(CSGNR, CSGNI, C2R, C2I, PTR, PTI) + YR(2) = STR + PTR + YI(2) = STI + PTI + IF (N.EQ.2) RETURN + CSPNR = -CSPNR + CSPNI = -CSPNI + AZN = ZABS(CMPLX(ZNR,ZNI,kind=KIND(1.0D0))) + RAZN = 1.0D0/AZN + STR = ZNR*RAZN + STI = -ZNI*RAZN + RZR = (STR+STR)*RAZN + RZI = (STI+STI)*RAZN + FN = FNU + 1.0D0 + CKR = FN*RZR + CKI = FN*RZI +C----------------------------------------------------------------------- +C SCALE NEAR EXPONENT EXTREMES DURING RECURRENCE ON K FUNCTIONS +C----------------------------------------------------------------------- + CSCL = 1.0D0/TOL + CSCR = TOL + CSSR(1) = CSCL + CSSR(2) = CONER + CSSR(3) = CSCR + CSRR(1) = CSCR + CSRR(2) = CONER + CSRR(3) = CSCL + BRY(1) = ASCLE + BRY(2) = 1.0D0/ASCLE + BRY(3) = D1MACH(2) + AS2 = ZABS(CMPLX(S2R,S2I,kind=KIND(1.0D0))) + KFLAG = 2 + IF (AS2.GT.BRY(1)) GO TO 50 + KFLAG = 1 + GO TO 60 + 50 CONTINUE + IF (AS2.LT.BRY(2)) GO TO 60 + KFLAG = 3 + 60 CONTINUE + BSCLE = BRY(KFLAG) + S1R = S1R*CSSR(KFLAG) + S1I = S1I*CSSR(KFLAG) + S2R = S2R*CSSR(KFLAG) + S2I = S2I*CSSR(KFLAG) + CSR = CSRR(KFLAG) + DO 80 I=3,N + STR = S2R + STI = S2I + S2R = CKR*STR - CKI*STI + S1R + S2I = CKR*STI + CKI*STR + S1I + S1R = STR + S1I = STI + C1R = S2R*CSR + C1I = S2I*CSR + STR = C1R + STI = C1I + C2R = YR(I) + C2I = YI(I) + IF (KODE.EQ.1) GO TO 70 + IF (IUF.LT.0) GO TO 70 + CALL ZS1S2(ZNR, ZNI, C1R, C1I, C2R, C2I, NW, ASCLE, ALIM, IUF) + NZ = NZ + NW + SC1R = SC2R + SC1I = SC2I + SC2R = C1R + SC2I = C1I + IF (IUF.NE.3) GO TO 70 + IUF = -4 + S1R = SC1R*CSSR(KFLAG) + S1I = SC1I*CSSR(KFLAG) + S2R = SC2R*CSSR(KFLAG) + S2I = SC2I*CSSR(KFLAG) + STR = SC2R + STI = SC2I + 70 CONTINUE + PTR = CSPNR*C1R - CSPNI*C1I + PTI = CSPNR*C1I + CSPNI*C1R + YR(I) = PTR + CSGNR*C2R - CSGNI*C2I + YI(I) = PTI + CSGNR*C2I + CSGNI*C2R + CKR = CKR + RZR + CKI = CKI + RZI + CSPNR = -CSPNR + CSPNI = -CSPNI + IF (KFLAG.GE.3) GO TO 80 + PTR = DABS(C1R) + PTI = DABS(C1I) + C1M = DMAX1(PTR,PTI) + IF (C1M.LE.BSCLE) GO TO 80 + KFLAG = KFLAG + 1 + BSCLE = BRY(KFLAG) + S1R = S1R*CSR + S1I = S1I*CSR + S2R = STR + S2I = STI + S1R = S1R*CSSR(KFLAG) + S1I = S1I*CSSR(KFLAG) + S2R = S2R*CSSR(KFLAG) + S2I = S2I*CSSR(KFLAG) + CSR = CSRR(KFLAG) + 80 CONTINUE + RETURN + 90 CONTINUE + NZ = -1 + IF(NW.EQ.(-2)) NZ=-2 + RETURN + END diff --git a/vendor/gonum.org/v1/gonum/mathext/internal/amos/amoslib/zairy.f b/vendor/gonum.org/v1/gonum/mathext/internal/amos/amoslib/zairy.f new file mode 100644 index 0000000..6adab8f --- /dev/null +++ b/vendor/gonum.org/v1/gonum/mathext/internal/amos/amoslib/zairy.f @@ -0,0 +1,395 @@ + SUBROUTINE ZAIRY(ZR, ZI, ID, KODE, AIR, AII, NZ, IERR) +C***BEGIN PROLOGUE ZAIRY +C***DATE WRITTEN 830501 (YYMMDD) +C***REVISION DATE 890801 (YYMMDD) +C***CATEGORY NO. B5K +C***KEYWORDS AIRY FUNCTION,BESSEL FUNCTIONS OF ORDER ONE THIRD +C***AUTHOR AMOS, DONALD E., SANDIA NATIONAL LABORATORIES +C***PURPOSE TO COMPUTE AIRY FUNCTIONS AI(Z) AND DAI(Z) FOR COMPLEX Z +C***DESCRIPTION +C +C ***A DOUBLE PRECISION ROUTINE*** +C ON KODE=1, ZAIRY COMPUTES THE COMPLEX AIRY FUNCTION AI(Z) OR +C ITS DERIVATIVE DAI(Z)/DZ ON ID=0 OR ID=1 RESPECTIVELY. ON +C KODE=2, A SCALING OPTION CEXP(ZTA)*AI(Z) OR CEXP(ZTA)* +C DAI(Z)/DZ IS PROVIDED TO REMOVE THE EXPONENTIAL DECAY IN +C -PI/3.LT.ARG(Z).LT.PI/3 AND THE EXPONENTIAL GROWTH IN +C PI/3.LT.ABS(ARG(Z)).LT.PI WHERE ZTA=(2/3)*Z*CSQRT(Z). +C +C WHILE THE AIRY FUNCTIONS AI(Z) AND DAI(Z)/DZ ARE ANALYTIC IN +C THE WHOLE Z PLANE, THE CORRESPONDING SCALED FUNCTIONS DEFINED +C FOR KODE=2 HAVE A CUT ALONG THE NEGATIVE REAL AXIS. +C DEFINTIONS AND NOTATION ARE FOUND IN THE NBS HANDBOOK OF +C MATHEMATICAL FUNCTIONS (REF. 1). +C +C INPUT ZR,ZI ARE DOUBLE PRECISION +C ZR,ZI - Z=CMPLX(ZR,ZI) +C ID - ORDER OF DERIVATIVE, ID=0 OR ID=1 +C KODE - A PARAMETER TO INDICATE THE SCALING OPTION +C KODE= 1 RETURNS +C AI=AI(Z) ON ID=0 OR +C AI=DAI(Z)/DZ ON ID=1 +C = 2 RETURNS +C AI=CEXP(ZTA)*AI(Z) ON ID=0 OR +C AI=CEXP(ZTA)*DAI(Z)/DZ ON ID=1 WHERE +C ZTA=(2/3)*Z*CSQRT(Z) +C +C OUTPUT AIR,AII ARE DOUBLE PRECISION +C AIR,AII- COMPLEX ANSWER DEPENDING ON THE CHOICES FOR ID AND +C KODE +C NZ - UNDERFLOW INDICATOR +C NZ= 0 , NORMAL RETURN +C NZ= 1 , AI=CMPLX(0.0D0,0.0D0) DUE TO UNDERFLOW IN +C -PI/3.LT.ARG(Z).LT.PI/3 ON KODE=1 +C IERR - ERROR FLAG +C IERR=0, NORMAL RETURN - COMPUTATION COMPLETED +C IERR=1, INPUT ERROR - NO COMPUTATION +C IERR=2, OVERFLOW - NO COMPUTATION, REAL(ZTA) +C TOO LARGE ON KODE=1 +C IERR=3, CABS(Z) LARGE - COMPUTATION COMPLETED +C LOSSES OF SIGNIFCANCE BY ARGUMENT REDUCTION +C PRODUCE LESS THAN HALF OF MACHINE ACCURACY +C IERR=4, CABS(Z) TOO LARGE - NO COMPUTATION +C COMPLETE LOSS OF ACCURACY BY ARGUMENT +C REDUCTION +C IERR=5, ERROR - NO COMPUTATION, +C ALGORITHM TERMINATION CONDITION NOT MET +C +C***LONG DESCRIPTION +C +C AI AND DAI ARE COMPUTED FOR CABS(Z).GT.1.0 FROM THE K BESSEL +C FUNCTIONS BY +C +C AI(Z)=C*SQRT(Z)*K(1/3,ZTA) , DAI(Z)=-C*Z*K(2/3,ZTA) +C C=1.0/(PI*SQRT(3.0)) +C ZTA=(2/3)*Z**(3/2) +C +C WITH THE POWER SERIES FOR CABS(Z).LE.1.0. +C +C IN MOST COMPLEX VARIABLE COMPUTATION, ONE MUST EVALUATE ELE- +C MENTARY FUNCTIONS. WHEN THE MAGNITUDE OF Z IS LARGE, LOSSES +C OF SIGNIFICANCE BY ARGUMENT REDUCTION OCCUR. CONSEQUENTLY, IF +C THE MAGNITUDE OF ZETA=(2/3)*Z**1.5 EXCEEDS U1=SQRT(0.5/UR), +C THEN LOSSES EXCEEDING HALF PRECISION ARE LIKELY AND AN ERROR +C FLAG IERR=3 IS TRIGGERED WHERE UR=DMAX1(D1MACH(4),1.0D-18) IS +C DOUBLE PRECISION UNIT ROUNDOFF LIMITED TO 18 DIGITS PRECISION. +C ALSO, IF THE MAGNITUDE OF ZETA IS LARGER THAN U2=0.5/UR, THEN +C ALL SIGNIFICANCE IS LOST AND IERR=4. IN ORDER TO USE THE INT +C FUNCTION, ZETA MUST BE FURTHER RESTRICTED NOT TO EXCEED THE +C LARGEST INTEGER, U3=I1MACH(9). THUS, THE MAGNITUDE OF ZETA +C MUST BE RESTRICTED BY MIN(U2,U3). ON 32 BIT MACHINES, U1,U2, +C AND U3 ARE APPROXIMATELY 2.0E+3, 4.2E+6, 2.1E+9 IN SINGLE +C PRECISION ARITHMETIC AND 1.3E+8, 1.8E+16, 2.1E+9 IN DOUBLE +C PRECISION ARITHMETIC RESPECTIVELY. THIS MAKES U2 AND U3 LIMIT- +C ING IN THEIR RESPECTIVE ARITHMETICS. THIS MEANS THAT THE MAG- +C NITUDE OF Z CANNOT EXCEED 3.1E+4 IN SINGLE AND 2.1E+6 IN +C DOUBLE PRECISION ARITHMETIC. THIS ALSO MEANS THAT ONE CAN +C EXPECT TO RETAIN, IN THE WORST CASES ON 32 BIT MACHINES, +C NO DIGITS IN SINGLE PRECISION AND ONLY 7 DIGITS IN DOUBLE +C PRECISION ARITHMETIC. SIMILAR CONSIDERATIONS HOLD FOR OTHER +C MACHINES. +C +C THE APPROXIMATE RELATIVE ERROR IN THE MAGNITUDE OF A COMPLEX +C BESSEL FUNCTION CAN BE EXPRESSED BY P*10**S WHERE P=MAX(UNIT +C ROUNDOFF,1.0E-18) IS THE NOMINAL PRECISION AND 10**S REPRE- +C SENTS THE INCREASE IN ERROR DUE TO ARGUMENT REDUCTION IN THE +C ELEMENTARY FUNCTIONS. HERE, S=MAX(1,ABS(LOG10(CABS(Z))), +C ABS(LOG10(FNU))) APPROXIMATELY (I.E. S=MAX(1,ABS(EXPONENT OF +C CABS(Z),ABS(EXPONENT OF FNU)) ). HOWEVER, THE PHASE ANGLE MAY +C HAVE ONLY ABSOLUTE ACCURACY. THIS IS MOST LIKELY TO OCCUR WHEN +C ONE COMPONENT (IN ABSOLUTE VALUE) IS LARGER THAN THE OTHER BY +C SEVERAL ORDERS OF MAGNITUDE. IF ONE COMPONENT IS 10**K LARGER +C THAN THE OTHER, THEN ONE CAN EXPECT ONLY MAX(ABS(LOG10(P))-K, +C 0) SIGNIFICANT DIGITS; OR, STATED ANOTHER WAY, WHEN K EXCEEDS +C THE EXPONENT OF P, NO SIGNIFICANT DIGITS REMAIN IN THE SMALLER +C COMPONENT. HOWEVER, THE PHASE ANGLE RETAINS ABSOLUTE ACCURACY +C BECAUSE, IN COMPLEX ARITHMETIC WITH PRECISION P, THE SMALLER +C COMPONENT WILL NOT (AS A RULE) DECREASE BELOW P TIMES THE +C MAGNITUDE OF THE LARGER COMPONENT. IN THESE EXTREME CASES, +C THE PRINCIPAL PHASE ANGLE IS ON THE ORDER OF +P, -P, PI/2-P, +C OR -PI/2+P. +C +C***REFERENCES HANDBOOK OF MATHEMATICAL FUNCTIONS BY M. ABRAMOWITZ +C AND I. A. STEGUN, NBS AMS SERIES 55, U.S. DEPT. OF +C COMMERCE, 1955. +C +C COMPUTATION OF BESSEL FUNCTIONS OF COMPLEX ARGUMENT +C AND LARGE ORDER BY D. E. AMOS, SAND83-0643, MAY, 1983 +C +C A SUBROUTINE PACKAGE FOR BESSEL FUNCTIONS OF A COMPLEX +C ARGUMENT AND NONNEGATIVE ORDER BY D. E. AMOS, SAND85- +C 1018, MAY, 1985 +C +C A PORTABLE PACKAGE FOR BESSEL FUNCTIONS OF A COMPLEX +C ARGUMENT AND NONNEGATIVE ORDER BY D. E. AMOS, TRANS. +C MATH. SOFTWARE, 1986 +C +C***ROUTINES CALLED ZACAI,ZBKNU,ZEXP,ZSQRT,I1MACH,D1MACH +C***END PROLOGUE ZAIRY +C COMPLEX AI,CONE,CSQ,CY,S1,S2,TRM1,TRM2,Z,ZTA,Z3 + DOUBLE PRECISION AA, AD, AII, AIR, AK, ALIM, ATRM, AZ, AZ3, BK, + * CC, CK, COEF, CONEI, CONER, CSQI, CSQR, CYI, CYR, C1, C2, DIG, + * DK, D1, D2, ELIM, FID, FNU, PTR, RL, R1M5, SFAC, STI, STR, + * S1I, S1R, S2I, S2R, TOL, TRM1I, TRM1R, TRM2I, TRM2R, TTH, ZEROI, + * ZEROR, ZI, ZR, ZTAI, ZTAR, Z3I, Z3R, D1MACH, ZABS, ALAZ, BB + INTEGER ID, IERR, IFLAG, K, KODE, K1, K2, MR, NN, NZ, I1MACH + DIMENSION CYR(1), CYI(1) + DATA TTH, C1, C2, COEF /6.66666666666666667D-01, + * 3.55028053887817240D-01,2.58819403792806799D-01, + * 1.83776298473930683D-01/ + DATA ZEROR, ZEROI, CONER, CONEI /0.0D0,0.0D0,1.0D0,0.0D0/ +C***FIRST EXECUTABLE STATEMENT ZAIRY + IERR = 0 + NZ=0 + IF (ID.LT.0 .OR. ID.GT.1) IERR=1 + IF (KODE.LT.1 .OR. KODE.GT.2) IERR=1 + IF (IERR.NE.0) RETURN + AZ = ZABS(CMPLX(ZR,ZI,kind=KIND(1.0D0))) + TOL = DMAX1(D1MACH(4),1.0D-18) + FID = DBLE(FLOAT(ID)) + IF (AZ.GT.1.0D0) GO TO 70 +C----------------------------------------------------------------------- +C POWER SERIES FOR CABS(Z).LE.1. +C----------------------------------------------------------------------- + S1R = CONER + S1I = CONEI + S2R = CONER + S2I = CONEI + IF (AZ.LT.TOL) GO TO 170 + AA = AZ*AZ + IF (AA.LT.TOL/AZ) GO TO 40 + TRM1R = CONER + TRM1I = CONEI + TRM2R = CONER + TRM2I = CONEI + ATRM = 1.0D0 + STR = ZR*ZR - ZI*ZI + STI = ZR*ZI + ZI*ZR + Z3R = STR*ZR - STI*ZI + Z3I = STR*ZI + STI*ZR + AZ3 = AZ*AA + AK = 2.0D0 + FID + BK = 3.0D0 - FID - FID + CK = 4.0D0 - FID + DK = 3.0D0 + FID + FID + D1 = AK*DK + D2 = BK*CK + AD = DMIN1(D1,D2) + AK = 24.0D0 + 9.0D0*FID + BK = 30.0D0 - 9.0D0*FID + DO 30 K=1,25 + STR = (TRM1R*Z3R-TRM1I*Z3I)/D1 + TRM1I = (TRM1R*Z3I+TRM1I*Z3R)/D1 + TRM1R = STR + S1R = S1R + TRM1R + S1I = S1I + TRM1I + STR = (TRM2R*Z3R-TRM2I*Z3I)/D2 + TRM2I = (TRM2R*Z3I+TRM2I*Z3R)/D2 + TRM2R = STR + S2R = S2R + TRM2R + S2I = S2I + TRM2I + ATRM = ATRM*AZ3/AD + D1 = D1 + AK + D2 = D2 + BK + AD = DMIN1(D1,D2) + IF (ATRM.LT.TOL*AD) GO TO 40 + AK = AK + 18.0D0 + BK = BK + 18.0D0 + 30 CONTINUE + 40 CONTINUE + IF (ID.EQ.1) THEN + GO TO 50 + END IF + AIR = S1R*C1 - C2*(ZR*S2R-ZI*S2I) + AII = S1I*C1 - C2*(ZR*S2I+ZI*S2R) + IF (KODE.EQ.1) RETURN + CALL ZSQRT(ZR, ZI, STR, STI) + ZTAR = TTH*(ZR*STR-ZI*STI) + ZTAI = TTH*(ZR*STI+ZI*STR) + CALL ZEXP(ZTAR, ZTAI, STR, STI) + PTR = AIR*STR - AII*STI + AII = AIR*STI + AII*STR + AIR = PTR + RETURN + 50 CONTINUE + AIR = -S2R*C2 + AII = -S2I*C2 + IF (AZ.LE.TOL) GO TO 60 + STR = ZR*S1R - ZI*S1I + STI = ZR*S1I + ZI*S1R + CC = C1/(1.0D0+FID) + AIR = AIR + CC*(STR*ZR-STI*ZI) + AII = AII + CC*(STR*ZI+STI*ZR) + 60 CONTINUE + IF (KODE.EQ.1) RETURN + CALL ZSQRT(ZR, ZI, STR, STI) + ZTAR = TTH*(ZR*STR-ZI*STI) + ZTAI = TTH*(ZR*STI+ZI*STR) + CALL ZEXP(ZTAR, ZTAI, STR, STI) + PTR = STR*AIR - STI*AII + AII = STR*AII + STI*AIR + AIR = PTR + RETURN +C----------------------------------------------------------------------- +C CASE FOR CABS(Z).GT.1.0 +C----------------------------------------------------------------------- + 70 CONTINUE + FNU = (1.0D0+FID)/3.0D0 +C----------------------------------------------------------------------- +C SET PARAMETERS RELATED TO MACHINE CONSTANTS. +C TOL IS THE APPROXIMATE UNIT ROUNDOFF LIMITED TO 1.0D-18. +C ELIM IS THE APPROXIMATE EXPONENTIAL OVER- AND UNDERFLOW LIMIT. +C EXP(-ELIM).LT.EXP(-ALIM)=EXP(-ELIM)/TOL AND +C EXP(ELIM).GT.EXP(ALIM)=EXP(ELIM)*TOL ARE INTERVALS NEAR +C UNDERFLOW AND OVERFLOW LIMITS WHERE SCALED ARITHMETIC IS DONE. +C RL IS THE LOWER BOUNDARY OF THE ASYMPTOTIC EXPANSION FOR LARGE Z. +C DIG = NUMBER OF BASE 10 DIGITS IN TOL = 10**(-DIG). +C----------------------------------------------------------------------- + K1 = I1MACH(15) + K2 = I1MACH(16) + R1M5 = D1MACH(5) + K = MIN0(IABS(K1),IABS(K2)) + ELIM = 2.303D0*(DBLE(FLOAT(K))*R1M5-3.0D0) + K1 = I1MACH(14) - 1 + AA = R1M5*DBLE(FLOAT(K1)) + DIG = DMIN1(AA,18.0D0) + AA = AA*2.303D0 + ALIM = ELIM + DMAX1(-AA,-41.45D0) + RL = 1.2D0*DIG + 3.0D0 + ALAZ = DLOG(AZ) +C-------------------------------------------------------------------------- +C TEST FOR PROPER RANGE +C----------------------------------------------------------------------- + AA=0.5D0/TOL + BB=DBLE(FLOAT(I1MACH(9)))*0.5D0 + AA=DMIN1(AA,BB) + AA=AA**TTH + IF (AZ.GT.AA) GO TO 260 + AA=DSQRT(AA) + IF (AZ.GT.AA) IERR=3 + CALL ZSQRT(ZR, ZI, CSQR, CSQI) + ZTAR = TTH*(ZR*CSQR-ZI*CSQI) + ZTAI = TTH*(ZR*CSQI+ZI*CSQR) +C----------------------------------------------------------------------- +C RE(ZTA).LE.0 WHEN RE(Z).LT.0, ESPECIALLY WHEN IM(Z) IS SMALL +C----------------------------------------------------------------------- + IFLAG = 0 + SFAC = 1.0D0 + AK = ZTAI + IF (ZR.GE.0.0D0) GO TO 80 + BK = ZTAR + CK = -DABS(BK) + ZTAR = CK + ZTAI = AK + 80 CONTINUE + IF (ZI.NE.0.0D0) GO TO 90 + IF (ZR.GT.0.0D0) GO TO 90 + ZTAR = 0.0D0 + ZTAI = AK + 90 CONTINUE + AA = ZTAR + IF (AA.GE.0.0D0 .AND. ZR.GT.0.0D0) GO TO 110 + IF (KODE.EQ.2) GO TO 100 +C----------------------------------------------------------------------- +C OVERFLOW TEST +C----------------------------------------------------------------------- + IF (AA.GT.(-ALIM)) GO TO 100 + AA = -AA + 0.25D0*ALAZ + IFLAG = 1 + SFAC = TOL + IF (AA.GT.ELIM) GO TO 270 + 100 CONTINUE +C----------------------------------------------------------------------- +C CBKNU AND CACON RETURN EXP(ZTA)*K(FNU,ZTA) ON KODE=2 +C----------------------------------------------------------------------- + MR = 1 + IF (ZI.LT.0.0D0) MR = -1 + CALL ZACAI(ZTAR, ZTAI, FNU, KODE, MR, 1, CYR, CYI, NN, RL, TOL, + * ELIM, ALIM) + IF (NN.LT.0) GO TO 280 + NZ = NZ + NN + GO TO 130 + 110 CONTINUE + IF (KODE.EQ.2) GO TO 120 +C----------------------------------------------------------------------- +C UNDERFLOW TEST +C----------------------------------------------------------------------- + IF (AA.LT.ALIM) GO TO 120 + AA = -AA - 0.25D0*ALAZ + IFLAG = 2 + SFAC = 1.0D0/TOL + IF (AA.LT.(-ELIM)) GO TO 210 + 120 CONTINUE + CALL ZBKNU(ZTAR, ZTAI, FNU, KODE, 1, CYR, CYI, NZ, TOL, ELIM, + * ALIM) + 130 CONTINUE + S1R = CYR(1)*COEF + S1I = CYI(1)*COEF + IF (IFLAG.NE.0) GO TO 150 + IF (ID.EQ.1) GO TO 140 + AIR = CSQR*S1R - CSQI*S1I + AII = CSQR*S1I + CSQI*S1R + RETURN + 140 CONTINUE + AIR = -(ZR*S1R-ZI*S1I) + AII = -(ZR*S1I+ZI*S1R) + RETURN + 150 CONTINUE + S1R = S1R*SFAC + S1I = S1I*SFAC + IF (ID.EQ.1) GO TO 160 + STR = S1R*CSQR - S1I*CSQI + S1I = S1R*CSQI + S1I*CSQR + S1R = STR + AIR = S1R/SFAC + AII = S1I/SFAC + RETURN + 160 CONTINUE + STR = -(S1R*ZR-S1I*ZI) + S1I = -(S1R*ZI+S1I*ZR) + S1R = STR + AIR = S1R/SFAC + AII = S1I/SFAC + RETURN + 170 CONTINUE + AA = 1.0D+3*D1MACH(1) + S1R = ZEROR + S1I = ZEROI + IF (ID.EQ.1) GO TO 190 + IF (AZ.LE.AA) GO TO 180 + S1R = C2*ZR + S1I = C2*ZI + 180 CONTINUE + AIR = C1 - S1R + AII = -S1I + RETURN + 190 CONTINUE + AIR = -C2 + AII = 0.0D0 + AA = DSQRT(AA) + IF (AZ.LE.AA) GO TO 200 + S1R = 0.5D0*(ZR*ZR-ZI*ZI) + S1I = ZR*ZI + 200 CONTINUE + AIR = AIR + C1*S1R + AII = AII + C1*S1I + RETURN + 210 CONTINUE + NZ = 1 + AIR = ZEROR + AII = ZEROI + RETURN + 270 CONTINUE + NZ = 0 + IERR=2 + RETURN + 280 CONTINUE + IF(NN.EQ.(-1)) GO TO 270 + NZ=0 + IERR=5 + RETURN + 260 CONTINUE + IERR=4 + NZ=0 + RETURN + END diff --git a/vendor/gonum.org/v1/gonum/mathext/internal/amos/amoslib/zasyi.f b/vendor/gonum.org/v1/gonum/mathext/internal/amos/amoslib/zasyi.f new file mode 100644 index 0000000..139d739 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/mathext/internal/amos/amoslib/zasyi.f @@ -0,0 +1,169 @@ + SUBROUTINE ZASYI(ZR, ZI, FNU, KODE, N, YR, YI, NZ, RL, TOL, ELIM, + * ALIM) +C***BEGIN PROLOGUE ZASYI +C***REFER TO ZBESI,ZBESK +C +C ZASYI COMPUTES THE I BESSEL FUNCTION FOR REAL(Z).GE.0.0 BY +C MEANS OF THE ASYMPTOTIC EXPANSION FOR LARGE CABS(Z) IN THE +C REGION CABS(Z).GT.MAX(RL,FNU*FNU/2). NZ=0 IS A NORMAL RETURN. +C NZ.LT.0 INDICATES AN OVERFLOW ON KODE=1. +C +C***ROUTINES CALLED D1MACH,ZABS,ZDIV,ZEXP,ZMLT,ZSQRT +C***END PROLOGUE ZASYI +C COMPLEX AK1,CK,CONE,CS1,CS2,CZ,CZERO,DK,EZ,P1,RZ,S2,Y,Z + DOUBLE PRECISION AA, AEZ, AK, AK1I, AK1R, ALIM, ARG, ARM, ATOL, + * AZ, BB, BK, CKI, CKR, CONEI, CONER, CS1I, CS1R, CS2I, CS2R, CZI, + * CZR, DFNU, DKI, DKR, DNU2, ELIM, EZI, EZR, FDN, FNU, PI, P1I, + * P1R, RAZ, RL, RTPI, RTR1, RZI, RZR, S, SGN, SQK, STI, STR, S2I, + * S2R, TOL, TZI, TZR, YI, YR, ZEROI, ZEROR, ZI, ZR, D1MACH, ZABS + INTEGER I, IB, IL, INU, J, JL, K, KODE, KODED, M, N, NN, NZ + DIMENSION YR(N), YI(N) + DATA PI, RTPI /3.14159265358979324D0 , 0.159154943091895336D0 / + DATA ZEROR,ZEROI,CONER,CONEI / 0.0D0, 0.0D0, 1.0D0, 0.0D0 / +C + NZ = 0 + AZ = ZABS(CMPLX(ZR,ZI,kind=KIND(1.0D0))) + ARM = 1.0D+3*D1MACH(1) + RTR1 = DSQRT(ARM) + IL = MIN0(2,N) + DFNU = FNU + DBLE(FLOAT(N-IL)) +C----------------------------------------------------------------------- +C OVERFLOW TEST +C----------------------------------------------------------------------- + RAZ = 1.0D0/AZ + STR = ZR*RAZ + STI = -ZI*RAZ + AK1R = RTPI*STR*RAZ + AK1I = RTPI*STI*RAZ + CALL ZSQRT(AK1R, AK1I, AK1R, AK1I) + CZR = ZR + CZI = ZI + IF (KODE.NE.2) GO TO 10 + CZR = ZEROR + CZI = ZI + 10 CONTINUE + IF (DABS(CZR).GT.ELIM) GO TO 100 + DNU2 = DFNU + DFNU + KODED = 1 + IF ((DABS(CZR).GT.ALIM) .AND. (N.GT.2)) GO TO 20 + KODED = 0 + CALL ZEXP(CZR, CZI, STR, STI) + CALL ZMLT(AK1R, AK1I, STR, STI, AK1R, AK1I) + 20 CONTINUE + FDN = 0.0D0 + IF (DNU2.GT.RTR1) THEN + FDN = DNU2*DNU2 + END IF + EZR = ZR*8.0D0 + EZI = ZI*8.0D0 +C----------------------------------------------------------------------- +C WHEN Z IS IMAGINARY, THE ERROR TEST MUST BE MADE RELATIVE TO THE +C FIRST RECIPROCAL POWER SINCE THIS IS THE LEADING TERM OF THE +C EXPANSION FOR THE IMAGINARY PART. +C----------------------------------------------------------------------- + AEZ = 8.0D0*AZ + S = TOL/AEZ + JL = INT(SNGL(RL+RL)) + 2 + P1R = ZEROR + P1I = ZEROI + IF (ZI.EQ.0.0D0) GO TO 30 +C----------------------------------------------------------------------- +C CALCULATE EXP(PI*(0.5+FNU+N-IL)*I) TO MINIMIZE LOSSES OF +C SIGNIFICANCE WHEN FNU OR N IS LARGE +C----------------------------------------------------------------------- + INU = INT(SNGL(FNU)) + ARG = (FNU-DBLE(FLOAT(INU)))*PI + INU = INU + N - IL + AK = -DSIN(ARG) + BK = DCOS(ARG) + IF (ZI.LT.0.0D0) BK = -BK + P1R = AK + P1I = BK + IF (MOD(INU,2).EQ.0) GO TO 30 + P1R = -P1R + P1I = -P1I + 30 CONTINUE + DO 70 K=1,IL + SQK = FDN - 1.0D0 + ATOL = S*DABS(SQK) + SGN = 1.0D0 + CS1R = CONER + CS1I = CONEI + CS2R = CONER + CS2I = CONEI + CKR = CONER + CKI = CONEI + AK = 0.0D0 + AA = 1.0D0 + BB = AEZ + DKR = EZR + DKI = EZI + DO 40 J=1,JL + CALL ZDIV(CKR, CKI, DKR, DKI, STR, STI) + CKR = STR*SQK + CKI = STI*SQK + CS2R = CS2R + CKR + CS2I = CS2I + CKI + SGN = -SGN + CS1R = CS1R + CKR*SGN + CS1I = CS1I + CKI*SGN + DKR = DKR + EZR + DKI = DKI + EZI + AA = AA*DABS(SQK)/BB + BB = BB + AEZ + AK = AK + 8.0D0 + SQK = SQK - AK + IF (AA.LE.ATOL) THEN + GO TO 50 + END IF + 40 CONTINUE + GO TO 110 + 50 CONTINUE + S2R = CS1R + S2I = CS1I + IF (ZR+ZR.GE.ELIM) GO TO 60 + TZR = ZR + ZR + TZI = ZI + ZI + CALL ZEXP(-TZR, -TZI, STR, STI) + CALL ZMLT(STR, STI, P1R, P1I, STR, STI) + CALL ZMLT(STR, STI, CS2R, CS2I, STR, STI) + S2R = S2R + STR + S2I = S2I + STI + 60 CONTINUE + FDN = FDN + 8.0D0*DFNU + 4.0D0 + P1R = -P1R + P1I = -P1I + M = N - IL + K + YR(M) = S2R*AK1R - S2I*AK1I + YI(M) = S2R*AK1I + S2I*AK1R + 70 CONTINUE + IF (N.LE.2) RETURN + NN = N + K = NN - 2 + AK = DBLE(FLOAT(K)) + STR = ZR*RAZ + STI = -ZI*RAZ + RZR = (STR+STR)*RAZ + RZI = (STI+STI)*RAZ + IB = 3 + DO 80 I=IB,NN + YR(K) = (AK+FNU)*(RZR*YR(K+1)-RZI*YI(K+1)) + YR(K+2) + YI(K) = (AK+FNU)*(RZR*YI(K+1)+RZI*YR(K+1)) + YI(K+2) + AK = AK - 1.0D0 + K = K - 1 + 80 CONTINUE + IF (KODED.EQ.0) RETURN + CALL ZEXP(CZR, CZI, CKR, CKI) + DO 90 I=1,NN + STR = YR(I)*CKR - YI(I)*CKI + YI(I) = YR(I)*CKI + YI(I)*CKR + YR(I) = STR + 90 CONTINUE + RETURN + 100 CONTINUE + NZ = -1 + RETURN + 110 CONTINUE + NZ=-2 + RETURN + END diff --git a/vendor/gonum.org/v1/gonum/mathext/internal/amos/amoslib/zbesh.f b/vendor/gonum.org/v1/gonum/mathext/internal/amos/amoslib/zbesh.f new file mode 100644 index 0000000..a055ae4 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/mathext/internal/amos/amoslib/zbesh.f @@ -0,0 +1,348 @@ + SUBROUTINE ZBESH(ZR, ZI, FNU, KODE, M, N, CYR, CYI, NZ, IERR) +C***BEGIN PROLOGUE ZBESH +C***DATE WRITTEN 830501 (YYMMDD) +C***REVISION DATE 890801 (YYMMDD) +C***CATEGORY NO. B5K +C***KEYWORDS H-BESSEL FUNCTIONS,BESSEL FUNCTIONS OF COMPLEX ARGUMENT, +C BESSEL FUNCTIONS OF THIRD KIND,HANKEL FUNCTIONS +C***AUTHOR AMOS, DONALD E., SANDIA NATIONAL LABORATORIES +C***PURPOSE TO COMPUTE THE H-BESSEL FUNCTIONS OF A COMPLEX ARGUMENT +C***DESCRIPTION +C +C ***A DOUBLE PRECISION ROUTINE*** +C ON KODE=1, ZBESH COMPUTES AN N MEMBER SEQUENCE OF COMPLEX +C HANKEL (BESSEL) FUNCTIONS CY(J)=H(M,FNU+J-1,Z) FOR KINDS M=1 +C OR 2, REAL, NONNEGATIVE ORDERS FNU+J-1, J=1,...,N, AND COMPLEX +C Z.NE.CMPLX(0.0,0.0) IN THE CUT PLANE -PI.LT.ARG(Z).LE.PI. +C ON KODE=2, ZBESH RETURNS THE SCALED HANKEL FUNCTIONS +C +C CY(I)=EXP(-MM*Z*I)*H(M,FNU+J-1,Z) MM=3-2*M, I**2=-1. +C +C WHICH REMOVES THE EXPONENTIAL BEHAVIOR IN BOTH THE UPPER AND +C LOWER HALF PLANES. DEFINITIONS AND NOTATION ARE FOUND IN THE +C NBS HANDBOOK OF MATHEMATICAL FUNCTIONS (REF. 1). +C +C INPUT ZR,ZI,FNU ARE DOUBLE PRECISION +C ZR,ZI - Z=CMPLX(ZR,ZI), Z.NE.CMPLX(0.0D0,0.0D0), +C -PT.LT.ARG(Z).LE.PI +C FNU - ORDER OF INITIAL H FUNCTION, FNU.GE.0.0D0 +C KODE - A PARAMETER TO INDICATE THE SCALING OPTION +C KODE= 1 RETURNS +C CY(J)=H(M,FNU+J-1,Z), J=1,...,N +C = 2 RETURNS +C CY(J)=H(M,FNU+J-1,Z)*EXP(-I*Z*(3-2M)) +C J=1,...,N , I**2=-1 +C M - KIND OF HANKEL FUNCTION, M=1 OR 2 +C N - NUMBER OF MEMBERS IN THE SEQUENCE, N.GE.1 +C +C OUTPUT CYR,CYI ARE DOUBLE PRECISION +C CYR,CYI- DOUBLE PRECISION VECTORS WHOSE FIRST N COMPONENTS +C CONTAIN REAL AND IMAGINARY PARTS FOR THE SEQUENCE +C CY(J)=H(M,FNU+J-1,Z) OR +C CY(J)=H(M,FNU+J-1,Z)*EXP(-I*Z*(3-2M)) J=1,...,N +C DEPENDING ON KODE, I**2=-1. +C NZ - NUMBER OF COMPONENTS SET TO ZERO DUE TO UNDERFLOW, +C NZ= 0 , NORMAL RETURN +C NZ.GT.0 , FIRST NZ COMPONENTS OF CY SET TO ZERO DUE +C TO UNDERFLOW, CY(J)=CMPLX(0.0D0,0.0D0) +C J=1,...,NZ WHEN Y.GT.0.0 AND M=1 OR +C Y.LT.0.0 AND M=2. FOR THE COMPLMENTARY +C HALF PLANES, NZ STATES ONLY THE NUMBER +C OF UNDERFLOWS. +C IERR - ERROR FLAG +C IERR=0, NORMAL RETURN - COMPUTATION COMPLETED +C IERR=1, INPUT ERROR - NO COMPUTATION +C IERR=2, OVERFLOW - NO COMPUTATION, FNU TOO +C LARGE OR CABS(Z) TOO SMALL OR BOTH +C IERR=3, CABS(Z) OR FNU+N-1 LARGE - COMPUTATION DONE +C BUT LOSSES OF SIGNIFCANCE BY ARGUMENT +C REDUCTION PRODUCE LESS THAN HALF OF MACHINE +C ACCURACY +C IERR=4, CABS(Z) OR FNU+N-1 TOO LARGE - NO COMPUTA- +C TION BECAUSE OF COMPLETE LOSSES OF SIGNIFI- +C CANCE BY ARGUMENT REDUCTION +C IERR=5, ERROR - NO COMPUTATION, +C ALGORITHM TERMINATION CONDITION NOT MET +C +C***LONG DESCRIPTION +C +C THE COMPUTATION IS CARRIED OUT BY THE RELATION +C +C H(M,FNU,Z)=(1/MP)*EXP(-MP*FNU)*K(FNU,Z*EXP(-MP)) +C MP=MM*HPI*I, MM=3-2*M, HPI=PI/2, I**2=-1 +C +C FOR M=1 OR 2 WHERE THE K BESSEL FUNCTION IS COMPUTED FOR THE +C RIGHT HALF PLANE RE(Z).GE.0.0. THE K FUNCTION IS CONTINUED +C TO THE LEFT HALF PLANE BY THE RELATION +C +C K(FNU,Z*EXP(MP)) = EXP(-MP*FNU)*K(FNU,Z)-MP*I(FNU,Z) +C MP=MR*PI*I, MR=+1 OR -1, RE(Z).GT.0, I**2=-1 +C +C WHERE I(FNU,Z) IS THE I BESSEL FUNCTION. +C +C EXPONENTIAL DECAY OF H(M,FNU,Z) OCCURS IN THE UPPER HALF Z +C PLANE FOR M=1 AND THE LOWER HALF Z PLANE FOR M=2. EXPONENTIAL +C GROWTH OCCURS IN THE COMPLEMENTARY HALF PLANES. SCALING +C BY EXP(-MM*Z*I) REMOVES THE EXPONENTIAL BEHAVIOR IN THE +C WHOLE Z PLANE FOR Z TO INFINITY. +C +C FOR NEGATIVE ORDERS,THE FORMULAE +C +C H(1,-FNU,Z) = H(1,FNU,Z)*CEXP( PI*FNU*I) +C H(2,-FNU,Z) = H(2,FNU,Z)*CEXP(-PI*FNU*I) +C I**2=-1 +C +C CAN BE USED. +C +C IN MOST COMPLEX VARIABLE COMPUTATION, ONE MUST EVALUATE ELE- +C MENTARY FUNCTIONS. WHEN THE MAGNITUDE OF Z OR FNU+N-1 IS +C LARGE, LOSSES OF SIGNIFICANCE BY ARGUMENT REDUCTION OCCUR. +C CONSEQUENTLY, IF EITHER ONE EXCEEDS U1=SQRT(0.5/UR), THEN +C LOSSES EXCEEDING HALF PRECISION ARE LIKELY AND AN ERROR FLAG +C IERR=3 IS TRIGGERED WHERE UR=DMAX1(D1MACH(4),1.0D-18) IS +C DOUBLE PRECISION UNIT ROUNDOFF LIMITED TO 18 DIGITS PRECISION. +C IF EITHER IS LARGER THAN U2=0.5/UR, THEN ALL SIGNIFICANCE IS +C LOST AND IERR=4. IN ORDER TO USE THE INT FUNCTION, ARGUMENTS +C MUST BE FURTHER RESTRICTED NOT TO EXCEED THE LARGEST MACHINE +C INTEGER, U3=I1MACH(9). THUS, THE MAGNITUDE OF Z AND FNU+N-1 IS +C RESTRICTED BY MIN(U2,U3). ON 32 BIT MACHINES, U1,U2, AND U3 +C ARE APPROXIMATELY 2.0E+3, 4.2E+6, 2.1E+9 IN SINGLE PRECISION +C ARITHMETIC AND 1.3E+8, 1.8E+16, 2.1E+9 IN DOUBLE PRECISION +C ARITHMETIC RESPECTIVELY. THIS MAKES U2 AND U3 LIMITING IN +C THEIR RESPECTIVE ARITHMETICS. THIS MEANS THAT ONE CAN EXPECT +C TO RETAIN, IN THE WORST CASES ON 32 BIT MACHINES, NO DIGITS +C IN SINGLE AND ONLY 7 DIGITS IN DOUBLE PRECISION ARITHMETIC. +C SIMILAR CONSIDERATIONS HOLD FOR OTHER MACHINES. +C +C THE APPROXIMATE RELATIVE ERROR IN THE MAGNITUDE OF A COMPLEX +C BESSEL FUNCTION CAN BE EXPRESSED BY P*10**S WHERE P=MAX(UNIT +C ROUNDOFF,1.0D-18) IS THE NOMINAL PRECISION AND 10**S REPRE- +C SENTS THE INCREASE IN ERROR DUE TO ARGUMENT REDUCTION IN THE +C ELEMENTARY FUNCTIONS. HERE, S=MAX(1,ABS(LOG10(CABS(Z))), +C ABS(LOG10(FNU))) APPROXIMATELY (I.E. S=MAX(1,ABS(EXPONENT OF +C CABS(Z),ABS(EXPONENT OF FNU)) ). HOWEVER, THE PHASE ANGLE MAY +C HAVE ONLY ABSOLUTE ACCURACY. THIS IS MOST LIKELY TO OCCUR WHEN +C ONE COMPONENT (IN ABSOLUTE VALUE) IS LARGER THAN THE OTHER BY +C SEVERAL ORDERS OF MAGNITUDE. IF ONE COMPONENT IS 10**K LARGER +C THAN THE OTHER, THEN ONE CAN EXPECT ONLY MAX(ABS(LOG10(P))-K, +C 0) SIGNIFICANT DIGITS; OR, STATED ANOTHER WAY, WHEN K EXCEEDS +C THE EXPONENT OF P, NO SIGNIFICANT DIGITS REMAIN IN THE SMALLER +C COMPONENT. HOWEVER, THE PHASE ANGLE RETAINS ABSOLUTE ACCURACY +C BECAUSE, IN COMPLEX ARITHMETIC WITH PRECISION P, THE SMALLER +C COMPONENT WILL NOT (AS A RULE) DECREASE BELOW P TIMES THE +C MAGNITUDE OF THE LARGER COMPONENT. IN THESE EXTREME CASES, +C THE PRINCIPAL PHASE ANGLE IS ON THE ORDER OF +P, -P, PI/2-P, +C OR -PI/2+P. +C +C***REFERENCES HANDBOOK OF MATHEMATICAL FUNCTIONS BY M. ABRAMOWITZ +C AND I. A. STEGUN, NBS AMS SERIES 55, U.S. DEPT. OF +C COMMERCE, 1955. +C +C COMPUTATION OF BESSEL FUNCTIONS OF COMPLEX ARGUMENT +C BY D. E. AMOS, SAND83-0083, MAY, 1983. +C +C COMPUTATION OF BESSEL FUNCTIONS OF COMPLEX ARGUMENT +C AND LARGE ORDER BY D. E. AMOS, SAND83-0643, MAY, 1983 +C +C A SUBROUTINE PACKAGE FOR BESSEL FUNCTIONS OF A COMPLEX +C ARGUMENT AND NONNEGATIVE ORDER BY D. E. AMOS, SAND85- +C 1018, MAY, 1985 +C +C A PORTABLE PACKAGE FOR BESSEL FUNCTIONS OF A COMPLEX +C ARGUMENT AND NONNEGATIVE ORDER BY D. E. AMOS, TRANS. +C MATH. SOFTWARE, 1986 +C +C***ROUTINES CALLED ZACON,ZBKNU,ZBUNK,ZUOIK,ZABS,I1MACH,D1MACH +C***END PROLOGUE ZBESH +C +C COMPLEX CY,Z,ZN,ZT,CSGN + DOUBLE PRECISION AA, ALIM, ALN, ARG, AZ, CYI, CYR, DIG, ELIM, + * FMM, FN, FNU, FNUL, HPI, RHPI, RL, R1M5, SGN, STR, TOL, UFL, ZI, + * ZNI, ZNR, ZR, ZTI, D1MACH, ZABS, BB, ASCLE, RTOL, ATOL, STI, + * CSGNR, CSGNI + INTEGER I, IERR, INU, INUH, IR, K, KODE, K1, K2, M, + * MM, MR, N, NN, NUF, NW, NZ, I1MACH + DIMENSION CYR(N), CYI(N) +C + DATA HPI /1.57079632679489662D0/ +C +C***FIRST EXECUTABLE STATEMENT ZBESH + IERR = 0 + NZ=0 + IF (ZR.EQ.0.0D0 .AND. ZI.EQ.0.0D0) IERR=1 + IF (FNU.LT.0.0D0) IERR=1 + IF (M.LT.1 .OR. M.GT.2) IERR=1 + IF (KODE.LT.1 .OR. KODE.GT.2) IERR=1 + IF (N.LT.1) IERR=1 + IF (IERR.NE.0) RETURN + NN = N +C----------------------------------------------------------------------- +C SET PARAMETERS RELATED TO MACHINE CONSTANTS. +C TOL IS THE APPROXIMATE UNIT ROUNDOFF LIMITED TO 1.0E-18. +C ELIM IS THE APPROXIMATE EXPONENTIAL OVER- AND UNDERFLOW LIMIT. +C EXP(-ELIM).LT.EXP(-ALIM)=EXP(-ELIM)/TOL AND +C EXP(ELIM).GT.EXP(ALIM)=EXP(ELIM)*TOL ARE INTERVALS NEAR +C UNDERFLOW AND OVERFLOW LIMITS WHERE SCALED ARITHMETIC IS DONE. +C RL IS THE LOWER BOUNDARY OF THE ASYMPTOTIC EXPANSION FOR LARGE Z. +C DIG = NUMBER OF BASE 10 DIGITS IN TOL = 10**(-DIG). +C FNUL IS THE LOWER BOUNDARY OF THE ASYMPTOTIC SERIES FOR LARGE FNU +C----------------------------------------------------------------------- + TOL = DMAX1(D1MACH(4),1.0D-18) + K1 = I1MACH(15) + K2 = I1MACH(16) + R1M5 = D1MACH(5) + K = MIN0(IABS(K1),IABS(K2)) + ELIM = 2.303D0*(DBLE(FLOAT(K))*R1M5-3.0D0) + K1 = I1MACH(14) - 1 + AA = R1M5*DBLE(FLOAT(K1)) + DIG = DMIN1(AA,18.0D0) + AA = AA*2.303D0 + ALIM = ELIM + DMAX1(-AA,-41.45D0) + FNUL = 10.0D0 + 6.0D0*(DIG-3.0D0) + RL = 1.2D0*DIG + 3.0D0 + FN = FNU + DBLE(FLOAT(NN-1)) + MM = 3 - M - M + FMM = DBLE(FLOAT(MM)) + ZNR = FMM*ZI + ZNI = -FMM*ZR +C----------------------------------------------------------------------- +C TEST FOR PROPER RANGE +C----------------------------------------------------------------------- + AZ = ZABS(CMPLX(ZR,ZI,kind=KIND(1.0D0))) + AA = 0.5D0/TOL + BB=DBLE(FLOAT(I1MACH(9)))*0.5D0 + AA = DMIN1(AA,BB) + IF (AZ.GT.AA) GO TO 260 + IF (FN.GT.AA) GO TO 260 + AA = DSQRT(AA) + IF (AZ.GT.AA) IERR=3 + IF (FN.GT.AA) IERR=3 +C----------------------------------------------------------------------- +C OVERFLOW TEST ON THE LAST MEMBER OF THE SEQUENCE +C----------------------------------------------------------------------- + UFL = D1MACH(1)*1.0D+3 + IF (AZ.LT.UFL) GO TO 230 + IF (FNU.GT.FNUL) GO TO 90 + IF (FN.LE.1.0D0) GO TO 70 + IF (FN.GT.2.0D0) GO TO 60 + IF (AZ.GT.TOL) GO TO 70 + ARG = 0.5D0*AZ + ALN = -FN*DLOG(ARG) + IF (ALN.GT.ELIM) GO TO 230 + GO TO 70 + 60 CONTINUE + CALL ZUOIK(ZNR, ZNI, FNU, KODE, 2, NN, CYR, CYI, NUF, TOL, ELIM, + * ALIM) + IF (NUF.LT.0) GO TO 230 + NZ = NZ + NUF + NN = NN - NUF +C----------------------------------------------------------------------- +C HERE NN=N OR NN=0 SINCE NUF=0,NN, OR -1 ON RETURN FROM CUOIK +C IF NUF=NN, THEN CY(I)=CZERO FOR ALL I +C----------------------------------------------------------------------- + IF (NN.EQ.0) GO TO 140 + 70 CONTINUE + IF ((ZNR.LT.0.0D0) .OR. (ZNR.EQ.0.0D0 .AND. ZNI.LT.0.0D0 .AND. + * M.EQ.2)) GO TO 80 +C----------------------------------------------------------------------- +C RIGHT HALF PLANE COMPUTATION, XN.GE.0. .AND. (XN.NE.0. .OR. +C YN.GE.0. .OR. M=1) +C----------------------------------------------------------------------- + CALL ZBKNU(ZNR, ZNI, FNU, KODE, NN, CYR, CYI, NZ, TOL, ELIM, ALIM) + GO TO 110 +C----------------------------------------------------------------------- +C LEFT HALF PLANE COMPUTATION +C----------------------------------------------------------------------- + 80 CONTINUE + MR = -MM + CALL ZACON(ZNR, ZNI, FNU, KODE, MR, NN, CYR, CYI, NW, RL, FNUL, + * TOL, ELIM, ALIM) + IF (NW.LT.0) GO TO 240 + NZ=NW + GO TO 110 + 90 CONTINUE +C----------------------------------------------------------------------- +C UNIFORM ASYMPTOTIC EXPANSIONS FOR FNU.GT.FNUL +C----------------------------------------------------------------------- + MR = 0 + IF ((ZNR.GE.0.0D0) .AND. (ZNR.NE.0.0D0 .OR. ZNI.GE.0.0D0 .OR. + * M.NE.2)) GO TO 100 + MR = -MM + IF (ZNR.NE.0.0D0 .OR. ZNI.GE.0.0D0) GO TO 100 + ZNR = -ZNR + ZNI = -ZNI + 100 CONTINUE + CALL ZBUNK(ZNR, ZNI, FNU, KODE, MR, NN, CYR, CYI, NW, TOL, ELIM, + * ALIM) + IF (NW.LT.0) GO TO 240 + NZ = NZ + NW + 110 CONTINUE +C----------------------------------------------------------------------- +C H(M,FNU,Z) = -FMM*(I/HPI)*(ZT**FNU)*K(FNU,-Z*ZT) +C +C ZT=EXP(-FMM*HPI*I) = CMPLX(0.0,-FMM), FMM=3-2*M, M=1,2 +C----------------------------------------------------------------------- + SGN = DSIGN(HPI,-FMM) +C----------------------------------------------------------------------- +C CALCULATE EXP(FNU*HPI*I) TO MINIMIZE LOSSES OF SIGNIFICANCE +C WHEN FNU IS LARGE +C----------------------------------------------------------------------- + INU = INT(SNGL(FNU)) + INUH = INU/2 + IR = INU - 2*INUH + ARG = (FNU-DBLE(FLOAT(INU-IR)))*SGN + RHPI = 1.0D0/SGN +C ZNI = RHPI*DCOS(ARG) +C ZNR = -RHPI*DSIN(ARG) + CSGNI = RHPI*DCOS(ARG) + CSGNR = -RHPI*DSIN(ARG) + IF (MOD(INUH,2).EQ.0) GO TO 120 +C ZNR = -ZNR +C ZNI = -ZNI + CSGNR = -CSGNR + CSGNI = -CSGNI + 120 CONTINUE + ZTI = -FMM + RTOL = 1.0D0/TOL + ASCLE = UFL*RTOL + DO 130 I=1,NN +C STR = CYR(I)*ZNR - CYI(I)*ZNI +C CYI(I) = CYR(I)*ZNI + CYI(I)*ZNR +C CYR(I) = STR +C STR = -ZNI*ZTI +C ZNI = ZNR*ZTI +C ZNR = STR + AA = CYR(I) + BB = CYI(I) + ATOL = 1.0D0 + IF (DMAX1(DABS(AA),DABS(BB)).GT.ASCLE) GO TO 135 + AA = AA*RTOL + BB = BB*RTOL + ATOL = TOL + 135 CONTINUE + STR = AA*CSGNR - BB*CSGNI + STI = AA*CSGNI + BB*CSGNR + CYR(I) = STR*ATOL + CYI(I) = STI*ATOL + STR = -CSGNI*ZTI + CSGNI = CSGNR*ZTI + CSGNR = STR + 130 CONTINUE + RETURN + 140 CONTINUE + IF (ZNR.LT.0.0D0) GO TO 230 + RETURN + 230 CONTINUE + NZ=0 + IERR=2 + RETURN + 240 CONTINUE + IF(NW.EQ.(-1)) GO TO 230 + NZ=0 + IERR=5 + RETURN + 260 CONTINUE + NZ=0 + IERR=4 + RETURN + END diff --git a/vendor/gonum.org/v1/gonum/mathext/internal/amos/amoslib/zbesi.f b/vendor/gonum.org/v1/gonum/mathext/internal/amos/amoslib/zbesi.f new file mode 100644 index 0000000..5f7c140 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/mathext/internal/amos/amoslib/zbesi.f @@ -0,0 +1,269 @@ + SUBROUTINE ZBESI(ZR, ZI, FNU, KODE, N, CYR, CYI, NZ, IERR) +C***BEGIN PROLOGUE ZBESI +C***DATE WRITTEN 830501 (YYMMDD) +C***REVISION DATE 890801 (YYMMDD) +C***CATEGORY NO. B5K +C***KEYWORDS I-BESSEL FUNCTION,COMPLEX BESSEL FUNCTION, +C MODIFIED BESSEL FUNCTION OF THE FIRST KIND +C***AUTHOR AMOS, DONALD E., SANDIA NATIONAL LABORATORIES +C***PURPOSE TO COMPUTE I-BESSEL FUNCTIONS OF COMPLEX ARGUMENT +C***DESCRIPTION +C +C ***A DOUBLE PRECISION ROUTINE*** +C ON KODE=1, ZBESI COMPUTES AN N MEMBER SEQUENCE OF COMPLEX +C BESSEL FUNCTIONS CY(J)=I(FNU+J-1,Z) FOR REAL, NONNEGATIVE +C ORDERS FNU+J-1, J=1,...,N AND COMPLEX Z IN THE CUT PLANE +C -PI.LT.ARG(Z).LE.PI. ON KODE=2, ZBESI RETURNS THE SCALED +C FUNCTIONS +C +C CY(J)=EXP(-ABS(X))*I(FNU+J-1,Z) J = 1,...,N , X=REAL(Z) +C +C WITH THE EXPONENTIAL GROWTH REMOVED IN BOTH THE LEFT AND +C RIGHT HALF PLANES FOR Z TO INFINITY. DEFINITIONS AND NOTATION +C ARE FOUND IN THE NBS HANDBOOK OF MATHEMATICAL FUNCTIONS +C (REF. 1). +C +C INPUT ZR,ZI,FNU ARE DOUBLE PRECISION +C ZR,ZI - Z=CMPLX(ZR,ZI), -PI.LT.ARG(Z).LE.PI +C FNU - ORDER OF INITIAL I FUNCTION, FNU.GE.0.0D0 +C KODE - A PARAMETER TO INDICATE THE SCALING OPTION +C KODE= 1 RETURNS +C CY(J)=I(FNU+J-1,Z), J=1,...,N +C = 2 RETURNS +C CY(J)=I(FNU+J-1,Z)*EXP(-ABS(X)), J=1,...,N +C N - NUMBER OF MEMBERS OF THE SEQUENCE, N.GE.1 +C +C OUTPUT CYR,CYI ARE DOUBLE PRECISION +C CYR,CYI- DOUBLE PRECISION VECTORS WHOSE FIRST N COMPONENTS +C CONTAIN REAL AND IMAGINARY PARTS FOR THE SEQUENCE +C CY(J)=I(FNU+J-1,Z) OR +C CY(J)=I(FNU+J-1,Z)*EXP(-ABS(X)) J=1,...,N +C DEPENDING ON KODE, X=REAL(Z) +C NZ - NUMBER OF COMPONENTS SET TO ZERO DUE TO UNDERFLOW, +C NZ= 0 , NORMAL RETURN +C NZ.GT.0 , LAST NZ COMPONENTS OF CY SET TO ZERO +C TO UNDERFLOW, CY(J)=CMPLX(0.0D0,0.0D0) +C J = N-NZ+1,...,N +C IERR - ERROR FLAG +C IERR=0, NORMAL RETURN - COMPUTATION COMPLETED +C IERR=1, INPUT ERROR - NO COMPUTATION +C IERR=2, OVERFLOW - NO COMPUTATION, REAL(Z) TOO +C LARGE ON KODE=1 +C IERR=3, CABS(Z) OR FNU+N-1 LARGE - COMPUTATION DONE +C BUT LOSSES OF SIGNIFCANCE BY ARGUMENT +C REDUCTION PRODUCE LESS THAN HALF OF MACHINE +C ACCURACY +C IERR=4, CABS(Z) OR FNU+N-1 TOO LARGE - NO COMPUTA- +C TION BECAUSE OF COMPLETE LOSSES OF SIGNIFI- +C CANCE BY ARGUMENT REDUCTION +C IERR=5, ERROR - NO COMPUTATION, +C ALGORITHM TERMINATION CONDITION NOT MET +C +C***LONG DESCRIPTION +C +C THE COMPUTATION IS CARRIED OUT BY THE POWER SERIES FOR +C SMALL CABS(Z), THE ASYMPTOTIC EXPANSION FOR LARGE CABS(Z), +C THE MILLER ALGORITHM NORMALIZED BY THE WRONSKIAN AND A +C NEUMANN SERIES FOR IMTERMEDIATE MAGNITUDES, AND THE +C UNIFORM ASYMPTOTIC EXPANSIONS FOR I(FNU,Z) AND J(FNU,Z) +C FOR LARGE ORDERS. BACKWARD RECURRENCE IS USED TO GENERATE +C SEQUENCES OR REDUCE ORDERS WHEN NECESSARY. +C +C THE CALCULATIONS ABOVE ARE DONE IN THE RIGHT HALF PLANE AND +C CONTINUED INTO THE LEFT HALF PLANE BY THE FORMULA +C +C I(FNU,Z*EXP(M*PI)) = EXP(M*PI*FNU)*I(FNU,Z) REAL(Z).GT.0.0 +C M = +I OR -I, I**2=-1 +C +C FOR NEGATIVE ORDERS,THE FORMULA +C +C I(-FNU,Z) = I(FNU,Z) + (2/PI)*SIN(PI*FNU)*K(FNU,Z) +C +C CAN BE USED. HOWEVER,FOR LARGE ORDERS CLOSE TO INTEGERS, THE +C THE FUNCTION CHANGES RADICALLY. WHEN FNU IS A LARGE POSITIVE +C INTEGER,THE MAGNITUDE OF I(-FNU,Z)=I(FNU,Z) IS A LARGE +C NEGATIVE POWER OF TEN. BUT WHEN FNU IS NOT AN INTEGER, +C K(FNU,Z) DOMINATES IN MAGNITUDE WITH A LARGE POSITIVE POWER OF +C TEN AND THE MOST THAT THE SECOND TERM CAN BE REDUCED IS BY +C UNIT ROUNDOFF FROM THE COEFFICIENT. THUS, WIDE CHANGES CAN +C OCCUR WITHIN UNIT ROUNDOFF OF A LARGE INTEGER FOR FNU. HERE, +C LARGE MEANS FNU.GT.CABS(Z). +C +C IN MOST COMPLEX VARIABLE COMPUTATION, ONE MUST EVALUATE ELE- +C MENTARY FUNCTIONS. WHEN THE MAGNITUDE OF Z OR FNU+N-1 IS +C LARGE, LOSSES OF SIGNIFICANCE BY ARGUMENT REDUCTION OCCUR. +C CONSEQUENTLY, IF EITHER ONE EXCEEDS U1=SQRT(0.5/UR), THEN +C LOSSES EXCEEDING HALF PRECISION ARE LIKELY AND AN ERROR FLAG +C IERR=3 IS TRIGGERED WHERE UR=DMAX1(D1MACH(4),1.0D-18) IS +C DOUBLE PRECISION UNIT ROUNDOFF LIMITED TO 18 DIGITS PRECISION. +C IF EITHER IS LARGER THAN U2=0.5/UR, THEN ALL SIGNIFICANCE IS +C LOST AND IERR=4. IN ORDER TO USE THE INT FUNCTION, ARGUMENTS +C MUST BE FURTHER RESTRICTED NOT TO EXCEED THE LARGEST MACHINE +C INTEGER, U3=I1MACH(9). THUS, THE MAGNITUDE OF Z AND FNU+N-1 IS +C RESTRICTED BY MIN(U2,U3). ON 32 BIT MACHINES, U1,U2, AND U3 +C ARE APPROXIMATELY 2.0E+3, 4.2E+6, 2.1E+9 IN SINGLE PRECISION +C ARITHMETIC AND 1.3E+8, 1.8E+16, 2.1E+9 IN DOUBLE PRECISION +C ARITHMETIC RESPECTIVELY. THIS MAKES U2 AND U3 LIMITING IN +C THEIR RESPECTIVE ARITHMETICS. THIS MEANS THAT ONE CAN EXPECT +C TO RETAIN, IN THE WORST CASES ON 32 BIT MACHINES, NO DIGITS +C IN SINGLE AND ONLY 7 DIGITS IN DOUBLE PRECISION ARITHMETIC. +C SIMILAR CONSIDERATIONS HOLD FOR OTHER MACHINES. +C +C THE APPROXIMATE RELATIVE ERROR IN THE MAGNITUDE OF A COMPLEX +C BESSEL FUNCTION CAN BE EXPRESSED BY P*10**S WHERE P=MAX(UNIT +C ROUNDOFF,1.0E-18) IS THE NOMINAL PRECISION AND 10**S REPRE- +C SENTS THE INCREASE IN ERROR DUE TO ARGUMENT REDUCTION IN THE +C ELEMENTARY FUNCTIONS. HERE, S=MAX(1,ABS(LOG10(CABS(Z))), +C ABS(LOG10(FNU))) APPROXIMATELY (I.E. S=MAX(1,ABS(EXPONENT OF +C CABS(Z),ABS(EXPONENT OF FNU)) ). HOWEVER, THE PHASE ANGLE MAY +C HAVE ONLY ABSOLUTE ACCURACY. THIS IS MOST LIKELY TO OCCUR WHEN +C ONE COMPONENT (IN ABSOLUTE VALUE) IS LARGER THAN THE OTHER BY +C SEVERAL ORDERS OF MAGNITUDE. IF ONE COMPONENT IS 10**K LARGER +C THAN THE OTHER, THEN ONE CAN EXPECT ONLY MAX(ABS(LOG10(P))-K, +C 0) SIGNIFICANT DIGITS; OR, STATED ANOTHER WAY, WHEN K EXCEEDS +C THE EXPONENT OF P, NO SIGNIFICANT DIGITS REMAIN IN THE SMALLER +C COMPONENT. HOWEVER, THE PHASE ANGLE RETAINS ABSOLUTE ACCURACY +C BECAUSE, IN COMPLEX ARITHMETIC WITH PRECISION P, THE SMALLER +C COMPONENT WILL NOT (AS A RULE) DECREASE BELOW P TIMES THE +C MAGNITUDE OF THE LARGER COMPONENT. IN THESE EXTREME CASES, +C THE PRINCIPAL PHASE ANGLE IS ON THE ORDER OF +P, -P, PI/2-P, +C OR -PI/2+P. +C +C***REFERENCES HANDBOOK OF MATHEMATICAL FUNCTIONS BY M. ABRAMOWITZ +C AND I. A. STEGUN, NBS AMS SERIES 55, U.S. DEPT. OF +C COMMERCE, 1955. +C +C COMPUTATION OF BESSEL FUNCTIONS OF COMPLEX ARGUMENT +C BY D. E. AMOS, SAND83-0083, MAY, 1983. +C +C COMPUTATION OF BESSEL FUNCTIONS OF COMPLEX ARGUMENT +C AND LARGE ORDER BY D. E. AMOS, SAND83-0643, MAY, 1983 +C +C A SUBROUTINE PACKAGE FOR BESSEL FUNCTIONS OF A COMPLEX +C ARGUMENT AND NONNEGATIVE ORDER BY D. E. AMOS, SAND85- +C 1018, MAY, 1985 +C +C A PORTABLE PACKAGE FOR BESSEL FUNCTIONS OF A COMPLEX +C ARGUMENT AND NONNEGATIVE ORDER BY D. E. AMOS, TRANS. +C MATH. SOFTWARE, 1986 +C +C***ROUTINES CALLED ZBINU,I1MACH,D1MACH +C***END PROLOGUE ZBESI +C COMPLEX CONE,CSGN,CW,CY,CZERO,Z,ZN + DOUBLE PRECISION AA, ALIM, ARG, CONEI, CONER, CSGNI, CSGNR, CYI, + * CYR, DIG, ELIM, FNU, FNUL, PI, RL, R1M5, STR, TOL, ZI, ZNI, ZNR, + * ZR, D1MACH, AZ, BB, FN, ZABS, ASCLE, RTOL, ATOL, STI + INTEGER I, IERR, INU, K, KODE, K1,K2,N,NZ,NN, I1MACH + DIMENSION CYR(N), CYI(N) + DATA PI /3.14159265358979324D0/ + DATA CONER, CONEI /1.0D0,0.0D0/ +C +C***FIRST EXECUTABLE STATEMENT ZBESI + IERR = 0 + NZ=0 + IF (FNU.LT.0.0D0) IERR=1 + IF (KODE.LT.1 .OR. KODE.GT.2) IERR=1 + IF (N.LT.1) IERR=1 + IF (IERR.NE.0) RETURN +C----------------------------------------------------------------------- +C SET PARAMETERS RELATED TO MACHINE CONSTANTS. +C TOL IS THE APPROXIMATE UNIT ROUNDOFF LIMITED TO 1.0E-18. +C ELIM IS THE APPROXIMATE EXPONENTIAL OVER- AND UNDERFLOW LIMIT. +C EXP(-ELIM).LT.EXP(-ALIM)=EXP(-ELIM)/TOL AND +C EXP(ELIM).GT.EXP(ALIM)=EXP(ELIM)*TOL ARE INTERVALS NEAR +C UNDERFLOW AND OVERFLOW LIMITS WHERE SCALED ARITHMETIC IS DONE. +C RL IS THE LOWER BOUNDARY OF THE ASYMPTOTIC EXPANSION FOR LARGE Z. +C DIG = NUMBER OF BASE 10 DIGITS IN TOL = 10**(-DIG). +C FNUL IS THE LOWER BOUNDARY OF THE ASYMPTOTIC SERIES FOR LARGE FNU. +C----------------------------------------------------------------------- + TOL = DMAX1(D1MACH(4),1.0D-18) + K1 = I1MACH(15) + K2 = I1MACH(16) + R1M5 = D1MACH(5) + K = MIN0(IABS(K1),IABS(K2)) + ELIM = 2.303D0*(DBLE(FLOAT(K))*R1M5-3.0D0) + K1 = I1MACH(14) - 1 + AA = R1M5*DBLE(FLOAT(K1)) + DIG = DMIN1(AA,18.0D0) + AA = AA*2.303D0 + ALIM = ELIM + DMAX1(-AA,-41.45D0) + RL = 1.2D0*DIG + 3.0D0 + FNUL = 10.0D0 + 6.0D0*(DIG-3.0D0) +C----------------------------------------------------------------------------- +C TEST FOR PROPER RANGE +C----------------------------------------------------------------------- + AZ = ZABS(CMPLX(ZR,ZI,kind=KIND(1.0D0))) + FN = FNU+DBLE(FLOAT(N-1)) + AA = 0.5D0/TOL + BB=DBLE(FLOAT(I1MACH(9)))*0.5D0 + AA = DMIN1(AA,BB) + IF (AZ.GT.AA) GO TO 260 + IF (FN.GT.AA) GO TO 260 + AA = DSQRT(AA) + IF (AZ.GT.AA) IERR=3 + IF (FN.GT.AA) IERR=3 + ZNR = ZR + ZNI = ZI + CSGNR = CONER + CSGNI = CONEI + IF (ZR.GE.0.0D0) GO TO 40 + ZNR = -ZR + ZNI = -ZI +C----------------------------------------------------------------------- +C CALCULATE CSGN=EXP(FNU*PI*I) TO MINIMIZE LOSSES OF SIGNIFICANCE +C WHEN FNU IS LARGE +C----------------------------------------------------------------------- + INU = INT(SNGL(FNU)) + ARG = (FNU-DBLE(FLOAT(INU)))*PI + IF (ZI.LT.0.0D0) ARG = -ARG + CSGNR = DCOS(ARG) + CSGNI = DSIN(ARG) + IF (MOD(INU,2).EQ.0) GO TO 40 + CSGNR = -CSGNR + CSGNI = -CSGNI + 40 CONTINUE + CALL ZBINU(ZNR, ZNI, FNU, KODE, N, CYR, CYI, NZ, RL, FNUL, TOL, + * ELIM, ALIM) + IF (NZ.LT.0) GO TO 120 + IF (ZR.GE.0.0D0) RETURN +C----------------------------------------------------------------------- +C ANALYTIC CONTINUATION TO THE LEFT HALF PLANE +C----------------------------------------------------------------------- + NN = N - NZ + IF (NN.EQ.0) RETURN + RTOL = 1.0D0/TOL + ASCLE = D1MACH(1)*RTOL*1.0D+3 + DO 50 I=1,NN +C STR = CYR(I)*CSGNR - CYI(I)*CSGNI +C CYI(I) = CYR(I)*CSGNI + CYI(I)*CSGNR +C CYR(I) = STR + AA = CYR(I) + BB = CYI(I) + ATOL = 1.0D0 + IF (DMAX1(DABS(AA),DABS(BB)).GT.ASCLE) GO TO 55 + AA = AA*RTOL + BB = BB*RTOL + ATOL = TOL + 55 CONTINUE + STR = AA*CSGNR - BB*CSGNI + STI = AA*CSGNI + BB*CSGNR + CYR(I) = STR*ATOL + CYI(I) = STI*ATOL + CSGNR = -CSGNR + CSGNI = -CSGNI + 50 CONTINUE + RETURN + 120 CONTINUE + IF(NZ.EQ.(-2)) GO TO 130 + NZ = 0 + IERR=2 + RETURN + 130 CONTINUE + NZ=0 + IERR=5 + RETURN + 260 CONTINUE + NZ=0 + IERR=4 + RETURN + END diff --git a/vendor/gonum.org/v1/gonum/mathext/internal/amos/amoslib/zbesj.f b/vendor/gonum.org/v1/gonum/mathext/internal/amos/amoslib/zbesj.f new file mode 100644 index 0000000..1fb217f --- /dev/null +++ b/vendor/gonum.org/v1/gonum/mathext/internal/amos/amoslib/zbesj.f @@ -0,0 +1,266 @@ + SUBROUTINE ZBESJ(ZR, ZI, FNU, KODE, N, CYR, CYI, NZ, IERR) +C***BEGIN PROLOGUE ZBESJ +C***DATE WRITTEN 830501 (YYMMDD) +C***REVISION DATE 890801 (YYMMDD) +C***CATEGORY NO. B5K +C***KEYWORDS J-BESSEL FUNCTION,BESSEL FUNCTION OF COMPLEX ARGUMENT, +C BESSEL FUNCTION OF FIRST KIND +C***AUTHOR AMOS, DONALD E., SANDIA NATIONAL LABORATORIES +C***PURPOSE TO COMPUTE THE J-BESSEL FUNCTION OF A COMPLEX ARGUMENT +C***DESCRIPTION +C +C ***A DOUBLE PRECISION ROUTINE*** +C ON KODE=1, CBESJ COMPUTES AN N MEMBER SEQUENCE OF COMPLEX +C BESSEL FUNCTIONS CY(I)=J(FNU+I-1,Z) FOR REAL, NONNEGATIVE +C ORDERS FNU+I-1, I=1,...,N AND COMPLEX Z IN THE CUT PLANE +C -PI.LT.ARG(Z).LE.PI. ON KODE=2, CBESJ RETURNS THE SCALED +C FUNCTIONS +C +C CY(I)=EXP(-ABS(Y))*J(FNU+I-1,Z) I = 1,...,N , Y=AIMAG(Z) +C +C WHICH REMOVE THE EXPONENTIAL GROWTH IN BOTH THE UPPER AND +C LOWER HALF PLANES FOR Z TO INFINITY. DEFINITIONS AND NOTATION +C ARE FOUND IN THE NBS HANDBOOK OF MATHEMATICAL FUNCTIONS +C (REF. 1). +C +C INPUT ZR,ZI,FNU ARE DOUBLE PRECISION +C ZR,ZI - Z=CMPLX(ZR,ZI), -PI.LT.ARG(Z).LE.PI +C FNU - ORDER OF INITIAL J FUNCTION, FNU.GE.0.0D0 +C KODE - A PARAMETER TO INDICATE THE SCALING OPTION +C KODE= 1 RETURNS +C CY(I)=J(FNU+I-1,Z), I=1,...,N +C = 2 RETURNS +C CY(I)=J(FNU+I-1,Z)EXP(-ABS(Y)), I=1,...,N +C N - NUMBER OF MEMBERS OF THE SEQUENCE, N.GE.1 +C +C OUTPUT CYR,CYI ARE DOUBLE PRECISION +C CYR,CYI- DOUBLE PRECISION VECTORS WHOSE FIRST N COMPONENTS +C CONTAIN REAL AND IMAGINARY PARTS FOR THE SEQUENCE +C CY(I)=J(FNU+I-1,Z) OR +C CY(I)=J(FNU+I-1,Z)EXP(-ABS(Y)) I=1,...,N +C DEPENDING ON KODE, Y=AIMAG(Z). +C NZ - NUMBER OF COMPONENTS SET TO ZERO DUE TO UNDERFLOW, +C NZ= 0 , NORMAL RETURN +C NZ.GT.0 , LAST NZ COMPONENTS OF CY SET ZERO DUE +C TO UNDERFLOW, CY(I)=CMPLX(0.0D0,0.0D0), +C I = N-NZ+1,...,N +C IERR - ERROR FLAG +C IERR=0, NORMAL RETURN - COMPUTATION COMPLETED +C IERR=1, INPUT ERROR - NO COMPUTATION +C IERR=2, OVERFLOW - NO COMPUTATION, AIMAG(Z) +C TOO LARGE ON KODE=1 +C IERR=3, CABS(Z) OR FNU+N-1 LARGE - COMPUTATION DONE +C BUT LOSSES OF SIGNIFCANCE BY ARGUMENT +C REDUCTION PRODUCE LESS THAN HALF OF MACHINE +C ACCURACY +C IERR=4, CABS(Z) OR FNU+N-1 TOO LARGE - NO COMPUTA- +C TION BECAUSE OF COMPLETE LOSSES OF SIGNIFI- +C CANCE BY ARGUMENT REDUCTION +C IERR=5, ERROR - NO COMPUTATION, +C ALGORITHM TERMINATION CONDITION NOT MET +C +C***LONG DESCRIPTION +C +C THE COMPUTATION IS CARRIED OUT BY THE FORMULA +C +C J(FNU,Z)=EXP( FNU*PI*I/2)*I(FNU,-I*Z) AIMAG(Z).GE.0.0 +C +C J(FNU,Z)=EXP(-FNU*PI*I/2)*I(FNU, I*Z) AIMAG(Z).LT.0.0 +C +C WHERE I**2 = -1 AND I(FNU,Z) IS THE I BESSEL FUNCTION. +C +C FOR NEGATIVE ORDERS,THE FORMULA +C +C J(-FNU,Z) = J(FNU,Z)*COS(PI*FNU) - Y(FNU,Z)*SIN(PI*FNU) +C +C CAN BE USED. HOWEVER,FOR LARGE ORDERS CLOSE TO INTEGERS, THE +C THE FUNCTION CHANGES RADICALLY. WHEN FNU IS A LARGE POSITIVE +C INTEGER,THE MAGNITUDE OF J(-FNU,Z)=J(FNU,Z)*COS(PI*FNU) IS A +C LARGE NEGATIVE POWER OF TEN. BUT WHEN FNU IS NOT AN INTEGER, +C Y(FNU,Z) DOMINATES IN MAGNITUDE WITH A LARGE POSITIVE POWER OF +C TEN AND THE MOST THAT THE SECOND TERM CAN BE REDUCED IS BY +C UNIT ROUNDOFF FROM THE COEFFICIENT. THUS, WIDE CHANGES CAN +C OCCUR WITHIN UNIT ROUNDOFF OF A LARGE INTEGER FOR FNU. HERE, +C LARGE MEANS FNU.GT.CABS(Z). +C +C IN MOST COMPLEX VARIABLE COMPUTATION, ONE MUST EVALUATE ELE- +C MENTARY FUNCTIONS. WHEN THE MAGNITUDE OF Z OR FNU+N-1 IS +C LARGE, LOSSES OF SIGNIFICANCE BY ARGUMENT REDUCTION OCCUR. +C CONSEQUENTLY, IF EITHER ONE EXCEEDS U1=SQRT(0.5/UR), THEN +C LOSSES EXCEEDING HALF PRECISION ARE LIKELY AND AN ERROR FLAG +C IERR=3 IS TRIGGERED WHERE UR=DMAX1(D1MACH(4),1.0D-18) IS +C DOUBLE PRECISION UNIT ROUNDOFF LIMITED TO 18 DIGITS PRECISION. +C IF EITHER IS LARGER THAN U2=0.5/UR, THEN ALL SIGNIFICANCE IS +C LOST AND IERR=4. IN ORDER TO USE THE INT FUNCTION, ARGUMENTS +C MUST BE FURTHER RESTRICTED NOT TO EXCEED THE LARGEST MACHINE +C INTEGER, U3=I1MACH(9). THUS, THE MAGNITUDE OF Z AND FNU+N-1 IS +C RESTRICTED BY MIN(U2,U3). ON 32 BIT MACHINES, U1,U2, AND U3 +C ARE APPROXIMATELY 2.0E+3, 4.2E+6, 2.1E+9 IN SINGLE PRECISION +C ARITHMETIC AND 1.3E+8, 1.8E+16, 2.1E+9 IN DOUBLE PRECISION +C ARITHMETIC RESPECTIVELY. THIS MAKES U2 AND U3 LIMITING IN +C THEIR RESPECTIVE ARITHMETICS. THIS MEANS THAT ONE CAN EXPECT +C TO RETAIN, IN THE WORST CASES ON 32 BIT MACHINES, NO DIGITS +C IN SINGLE AND ONLY 7 DIGITS IN DOUBLE PRECISION ARITHMETIC. +C SIMILAR CONSIDERATIONS HOLD FOR OTHER MACHINES. +C +C THE APPROXIMATE RELATIVE ERROR IN THE MAGNITUDE OF A COMPLEX +C BESSEL FUNCTION CAN BE EXPRESSED BY P*10**S WHERE P=MAX(UNIT +C ROUNDOFF,1.0E-18) IS THE NOMINAL PRECISION AND 10**S REPRE- +C SENTS THE INCREASE IN ERROR DUE TO ARGUMENT REDUCTION IN THE +C ELEMENTARY FUNCTIONS. HERE, S=MAX(1,ABS(LOG10(CABS(Z))), +C ABS(LOG10(FNU))) APPROXIMATELY (I.E. S=MAX(1,ABS(EXPONENT OF +C CABS(Z),ABS(EXPONENT OF FNU)) ). HOWEVER, THE PHASE ANGLE MAY +C HAVE ONLY ABSOLUTE ACCURACY. THIS IS MOST LIKELY TO OCCUR WHEN +C ONE COMPONENT (IN ABSOLUTE VALUE) IS LARGER THAN THE OTHER BY +C SEVERAL ORDERS OF MAGNITUDE. IF ONE COMPONENT IS 10**K LARGER +C THAN THE OTHER, THEN ONE CAN EXPECT ONLY MAX(ABS(LOG10(P))-K, +C 0) SIGNIFICANT DIGITS; OR, STATED ANOTHER WAY, WHEN K EXCEEDS +C THE EXPONENT OF P, NO SIGNIFICANT DIGITS REMAIN IN THE SMALLER +C COMPONENT. HOWEVER, THE PHASE ANGLE RETAINS ABSOLUTE ACCURACY +C BECAUSE, IN COMPLEX ARITHMETIC WITH PRECISION P, THE SMALLER +C COMPONENT WILL NOT (AS A RULE) DECREASE BELOW P TIMES THE +C MAGNITUDE OF THE LARGER COMPONENT. IN THESE EXTREME CASES, +C THE PRINCIPAL PHASE ANGLE IS ON THE ORDER OF +P, -P, PI/2-P, +C OR -PI/2+P. +C +C***REFERENCES HANDBOOK OF MATHEMATICAL FUNCTIONS BY M. ABRAMOWITZ +C AND I. A. STEGUN, NBS AMS SERIES 55, U.S. DEPT. OF +C COMMERCE, 1955. +C +C COMPUTATION OF BESSEL FUNCTIONS OF COMPLEX ARGUMENT +C BY D. E. AMOS, SAND83-0083, MAY, 1983. +C +C COMPUTATION OF BESSEL FUNCTIONS OF COMPLEX ARGUMENT +C AND LARGE ORDER BY D. E. AMOS, SAND83-0643, MAY, 1983 +C +C A SUBROUTINE PACKAGE FOR BESSEL FUNCTIONS OF A COMPLEX +C ARGUMENT AND NONNEGATIVE ORDER BY D. E. AMOS, SAND85- +C 1018, MAY, 1985 +C +C A PORTABLE PACKAGE FOR BESSEL FUNCTIONS OF A COMPLEX +C ARGUMENT AND NONNEGATIVE ORDER BY D. E. AMOS, TRANS. +C MATH. SOFTWARE, 1986 +C +C***ROUTINES CALLED ZBINU,I1MACH,D1MACH +C***END PROLOGUE ZBESJ +C +C COMPLEX CI,CSGN,CY,Z,ZN + DOUBLE PRECISION AA, ALIM, ARG, CII, CSGNI, CSGNR, CYI, CYR, DIG, + * ELIM, FNU, FNUL, HPI, RL, R1M5, STR, TOL, ZI, ZNI, ZNR, ZR, + * D1MACH, BB, FN, AZ, ZABS, ASCLE, RTOL, ATOL, STI + INTEGER I, IERR, INU, INUH, IR, K, KODE, K1, K2, N, NL, NZ, I1MACH + DIMENSION CYR(N), CYI(N) + DATA HPI /1.57079632679489662D0/ +C +C***FIRST EXECUTABLE STATEMENT ZBESJ + IERR = 0 + NZ=0 + IF (FNU.LT.0.0D0) IERR=1 + IF (KODE.LT.1 .OR. KODE.GT.2) IERR=1 + IF (N.LT.1) IERR=1 + IF (IERR.NE.0) RETURN +C----------------------------------------------------------------------- +C SET PARAMETERS RELATED TO MACHINE CONSTANTS. +C TOL IS THE APPROXIMATE UNIT ROUNDOFF LIMITED TO 1.0E-18. +C ELIM IS THE APPROXIMATE EXPONENTIAL OVER- AND UNDERFLOW LIMIT. +C EXP(-ELIM).LT.EXP(-ALIM)=EXP(-ELIM)/TOL AND +C EXP(ELIM).GT.EXP(ALIM)=EXP(ELIM)*TOL ARE INTERVALS NEAR +C UNDERFLOW AND OVERFLOW LIMITS WHERE SCALED ARITHMETIC IS DONE. +C RL IS THE LOWER BOUNDARY OF THE ASYMPTOTIC EXPANSION FOR LARGE Z. +C DIG = NUMBER OF BASE 10 DIGITS IN TOL = 10**(-DIG). +C FNUL IS THE LOWER BOUNDARY OF THE ASYMPTOTIC SERIES FOR LARGE FNU. +C----------------------------------------------------------------------- + TOL = DMAX1(D1MACH(4),1.0D-18) + K1 = I1MACH(15) + K2 = I1MACH(16) + R1M5 = D1MACH(5) + K = MIN0(IABS(K1),IABS(K2)) + ELIM = 2.303D0*(DBLE(FLOAT(K))*R1M5-3.0D0) + K1 = I1MACH(14) - 1 + AA = R1M5*DBLE(FLOAT(K1)) + DIG = DMIN1(AA,18.0D0) + AA = AA*2.303D0 + ALIM = ELIM + DMAX1(-AA,-41.45D0) + RL = 1.2D0*DIG + 3.0D0 + FNUL = 10.0D0 + 6.0D0*(DIG-3.0D0) +C----------------------------------------------------------------------- +C TEST FOR PROPER RANGE +C----------------------------------------------------------------------- + AZ = ZABS(CMPLX(ZR,ZI,kind=KIND(1.0D0))) + FN = FNU+DBLE(FLOAT(N-1)) + AA = 0.5D0/TOL + BB=DBLE(FLOAT(I1MACH(9)))*0.5D0 + AA = DMIN1(AA,BB) + IF (AZ.GT.AA) GO TO 260 + IF (FN.GT.AA) GO TO 260 + AA = DSQRT(AA) + IF (AZ.GT.AA) IERR=3 + IF (FN.GT.AA) IERR=3 +C----------------------------------------------------------------------- +C CALCULATE CSGN=EXP(FNU*HPI*I) TO MINIMIZE LOSSES OF SIGNIFICANCE +C WHEN FNU IS LARGE +C----------------------------------------------------------------------- + CII = 1.0D0 + INU = INT(SNGL(FNU)) + INUH = INU/2 + IR = INU - 2*INUH + ARG = (FNU-DBLE(FLOAT(INU-IR)))*HPI + CSGNR = DCOS(ARG) + CSGNI = DSIN(ARG) + IF (MOD(INUH,2).EQ.0) GO TO 40 + CSGNR = -CSGNR + CSGNI = -CSGNI + 40 CONTINUE +C----------------------------------------------------------------------- +C ZN IS IN THE RIGHT HALF PLANE +C----------------------------------------------------------------------- + ZNR = ZI + ZNI = -ZR + IF (ZI.GE.0.0D0) GO TO 50 + ZNR = -ZNR + ZNI = -ZNI + CSGNI = -CSGNI + CII = -CII + 50 CONTINUE + CALL ZBINU(ZNR, ZNI, FNU, KODE, N, CYR, CYI, NZ, RL, FNUL, TOL, + * ELIM, ALIM) + IF (NZ.LT.0) GO TO 130 + NL = N - NZ + IF (NL.EQ.0) RETURN + RTOL = 1.0D0/TOL + ASCLE = D1MACH(1)*RTOL*1.0D+3 + DO 60 I=1,NL +C STR = CYR(I)*CSGNR - CYI(I)*CSGNI +C CYI(I) = CYR(I)*CSGNI + CYI(I)*CSGNR +C CYR(I) = STR + AA = CYR(I) + BB = CYI(I) + ATOL = 1.0D0 + IF (DMAX1(DABS(AA),DABS(BB)).GT.ASCLE) GO TO 55 + AA = AA*RTOL + BB = BB*RTOL + ATOL = TOL + 55 CONTINUE + STR = AA*CSGNR - BB*CSGNI + STI = AA*CSGNI + BB*CSGNR + CYR(I) = STR*ATOL + CYI(I) = STI*ATOL + STR = -CSGNI*CII + CSGNI = CSGNR*CII + CSGNR = STR + 60 CONTINUE + RETURN + 130 CONTINUE + IF(NZ.EQ.(-2)) GO TO 140 + NZ = 0 + IERR = 2 + RETURN + 140 CONTINUE + NZ=0 + IERR=5 + RETURN + 260 CONTINUE + NZ=0 + IERR=4 + RETURN + END diff --git a/vendor/gonum.org/v1/gonum/mathext/internal/amos/amoslib/zbesk.f b/vendor/gonum.org/v1/gonum/mathext/internal/amos/amoslib/zbesk.f new file mode 100644 index 0000000..c222828 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/mathext/internal/amos/amoslib/zbesk.f @@ -0,0 +1,281 @@ + SUBROUTINE ZBESK(ZR, ZI, FNU, KODE, N, CYR, CYI, NZ, IERR) +C***BEGIN PROLOGUE ZBESK +C***DATE WRITTEN 830501 (YYMMDD) +C***REVISION DATE 890801 (YYMMDD) +C***CATEGORY NO. B5K +C***KEYWORDS K-BESSEL FUNCTION,COMPLEX BESSEL FUNCTION, +C MODIFIED BESSEL FUNCTION OF THE SECOND KIND, +C BESSEL FUNCTION OF THE THIRD KIND +C***AUTHOR AMOS, DONALD E., SANDIA NATIONAL LABORATORIES +C***PURPOSE TO COMPUTE K-BESSEL FUNCTIONS OF COMPLEX ARGUMENT +C***DESCRIPTION +C +C ***A DOUBLE PRECISION ROUTINE*** +C +C ON KODE=1, CBESK COMPUTES AN N MEMBER SEQUENCE OF COMPLEX +C BESSEL FUNCTIONS CY(J)=K(FNU+J-1,Z) FOR REAL, NONNEGATIVE +C ORDERS FNU+J-1, J=1,...,N AND COMPLEX Z.NE.CMPLX(0.0,0.0) +C IN THE CUT PLANE -PI.LT.ARG(Z).LE.PI. ON KODE=2, CBESK +C RETURNS THE SCALED K FUNCTIONS, +C +C CY(J)=EXP(Z)*K(FNU+J-1,Z) , J=1,...,N, +C +C WHICH REMOVE THE EXPONENTIAL BEHAVIOR IN BOTH THE LEFT AND +C RIGHT HALF PLANES FOR Z TO INFINITY. DEFINITIONS AND +C NOTATION ARE FOUND IN THE NBS HANDBOOK OF MATHEMATICAL +C FUNCTIONS (REF. 1). +C +C INPUT ZR,ZI,FNU ARE DOUBLE PRECISION +C ZR,ZI - Z=CMPLX(ZR,ZI), Z.NE.CMPLX(0.0D0,0.0D0), +C -PI.LT.ARG(Z).LE.PI +C FNU - ORDER OF INITIAL K FUNCTION, FNU.GE.0.0D0 +C N - NUMBER OF MEMBERS OF THE SEQUENCE, N.GE.1 +C KODE - A PARAMETER TO INDICATE THE SCALING OPTION +C KODE= 1 RETURNS +C CY(I)=K(FNU+I-1,Z), I=1,...,N +C = 2 RETURNS +C CY(I)=K(FNU+I-1,Z)*EXP(Z), I=1,...,N +C +C OUTPUT CYR,CYI ARE DOUBLE PRECISION +C CYR,CYI- DOUBLE PRECISION VECTORS WHOSE FIRST N COMPONENTS +C CONTAIN REAL AND IMAGINARY PARTS FOR THE SEQUENCE +C CY(I)=K(FNU+I-1,Z), I=1,...,N OR +C CY(I)=K(FNU+I-1,Z)*EXP(Z), I=1,...,N +C DEPENDING ON KODE +C NZ - NUMBER OF COMPONENTS SET TO ZERO DUE TO UNDERFLOW. +C NZ= 0 , NORMAL RETURN +C NZ.GT.0 , FIRST NZ COMPONENTS OF CY SET TO ZERO DUE +C TO UNDERFLOW, CY(I)=CMPLX(0.0D0,0.0D0), +C I=1,...,N WHEN X.GE.0.0. WHEN X.LT.0.0 +C NZ STATES ONLY THE NUMBER OF UNDERFLOWS +C IN THE SEQUENCE. +C +C IERR - ERROR FLAG +C IERR=0, NORMAL RETURN - COMPUTATION COMPLETED +C IERR=1, INPUT ERROR - NO COMPUTATION +C IERR=2, OVERFLOW - NO COMPUTATION, FNU IS +C TOO LARGE OR CABS(Z) IS TOO SMALL OR BOTH +C IERR=3, CABS(Z) OR FNU+N-1 LARGE - COMPUTATION DONE +C BUT LOSSES OF SIGNIFCANCE BY ARGUMENT +C REDUCTION PRODUCE LESS THAN HALF OF MACHINE +C ACCURACY +C IERR=4, CABS(Z) OR FNU+N-1 TOO LARGE - NO COMPUTA- +C TION BECAUSE OF COMPLETE LOSSES OF SIGNIFI- +C CANCE BY ARGUMENT REDUCTION +C IERR=5, ERROR - NO COMPUTATION, +C ALGORITHM TERMINATION CONDITION NOT MET +C +C***LONG DESCRIPTION +C +C EQUATIONS OF THE REFERENCE ARE IMPLEMENTED FOR SMALL ORDERS +C DNU AND DNU+1.0 IN THE RIGHT HALF PLANE X.GE.0.0. FORWARD +C RECURRENCE GENERATES HIGHER ORDERS. K IS CONTINUED TO THE LEFT +C HALF PLANE BY THE RELATION +C +C K(FNU,Z*EXP(MP)) = EXP(-MP*FNU)*K(FNU,Z)-MP*I(FNU,Z) +C MP=MR*PI*I, MR=+1 OR -1, RE(Z).GT.0, I**2=-1 +C +C WHERE I(FNU,Z) IS THE I BESSEL FUNCTION. +C +C FOR LARGE ORDERS, FNU.GT.FNUL, THE K FUNCTION IS COMPUTED +C BY MEANS OF ITS UNIFORM ASYMPTOTIC EXPANSIONS. +C +C FOR NEGATIVE ORDERS, THE FORMULA +C +C K(-FNU,Z) = K(FNU,Z) +C +C CAN BE USED. +C +C CBESK ASSUMES THAT A SIGNIFICANT DIGIT SINH(X) FUNCTION IS +C AVAILABLE. +C +C IN MOST COMPLEX VARIABLE COMPUTATION, ONE MUST EVALUATE ELE- +C MENTARY FUNCTIONS. WHEN THE MAGNITUDE OF Z OR FNU+N-1 IS +C LARGE, LOSSES OF SIGNIFICANCE BY ARGUMENT REDUCTION OCCUR. +C CONSEQUENTLY, IF EITHER ONE EXCEEDS U1=SQRT(0.5/UR), THEN +C LOSSES EXCEEDING HALF PRECISION ARE LIKELY AND AN ERROR FLAG +C IERR=3 IS TRIGGERED WHERE UR=DMAX1(D1MACH(4),1.0D-18) IS +C DOUBLE PRECISION UNIT ROUNDOFF LIMITED TO 18 DIGITS PRECISION. +C IF EITHER IS LARGER THAN U2=0.5/UR, THEN ALL SIGNIFICANCE IS +C LOST AND IERR=4. IN ORDER TO USE THE INT FUNCTION, ARGUMENTS +C MUST BE FURTHER RESTRICTED NOT TO EXCEED THE LARGEST MACHINE +C INTEGER, U3=I1MACH(9). THUS, THE MAGNITUDE OF Z AND FNU+N-1 IS +C RESTRICTED BY MIN(U2,U3). ON 32 BIT MACHINES, U1,U2, AND U3 +C ARE APPROXIMATELY 2.0E+3, 4.2E+6, 2.1E+9 IN SINGLE PRECISION +C ARITHMETIC AND 1.3E+8, 1.8E+16, 2.1E+9 IN DOUBLE PRECISION +C ARITHMETIC RESPECTIVELY. THIS MAKES U2 AND U3 LIMITING IN +C THEIR RESPECTIVE ARITHMETICS. THIS MEANS THAT ONE CAN EXPECT +C TO RETAIN, IN THE WORST CASES ON 32 BIT MACHINES, NO DIGITS +C IN SINGLE AND ONLY 7 DIGITS IN DOUBLE PRECISION ARITHMETIC. +C SIMILAR CONSIDERATIONS HOLD FOR OTHER MACHINES. +C +C THE APPROXIMATE RELATIVE ERROR IN THE MAGNITUDE OF A COMPLEX +C BESSEL FUNCTION CAN BE EXPRESSED BY P*10**S WHERE P=MAX(UNIT +C ROUNDOFF,1.0E-18) IS THE NOMINAL PRECISION AND 10**S REPRE- +C SENTS THE INCREASE IN ERROR DUE TO ARGUMENT REDUCTION IN THE +C ELEMENTARY FUNCTIONS. HERE, S=MAX(1,ABS(LOG10(CABS(Z))), +C ABS(LOG10(FNU))) APPROXIMATELY (I.E. S=MAX(1,ABS(EXPONENT OF +C CABS(Z),ABS(EXPONENT OF FNU)) ). HOWEVER, THE PHASE ANGLE MAY +C HAVE ONLY ABSOLUTE ACCURACY. THIS IS MOST LIKELY TO OCCUR WHEN +C ONE COMPONENT (IN ABSOLUTE VALUE) IS LARGER THAN THE OTHER BY +C SEVERAL ORDERS OF MAGNITUDE. IF ONE COMPONENT IS 10**K LARGER +C THAN THE OTHER, THEN ONE CAN EXPECT ONLY MAX(ABS(LOG10(P))-K, +C 0) SIGNIFICANT DIGITS; OR, STATED ANOTHER WAY, WHEN K EXCEEDS +C THE EXPONENT OF P, NO SIGNIFICANT DIGITS REMAIN IN THE SMALLER +C COMPONENT. HOWEVER, THE PHASE ANGLE RETAINS ABSOLUTE ACCURACY +C BECAUSE, IN COMPLEX ARITHMETIC WITH PRECISION P, THE SMALLER +C COMPONENT WILL NOT (AS A RULE) DECREASE BELOW P TIMES THE +C MAGNITUDE OF THE LARGER COMPONENT. IN THESE EXTREME CASES, +C THE PRINCIPAL PHASE ANGLE IS ON THE ORDER OF +P, -P, PI/2-P, +C OR -PI/2+P. +C +C***REFERENCES HANDBOOK OF MATHEMATICAL FUNCTIONS BY M. ABRAMOWITZ +C AND I. A. STEGUN, NBS AMS SERIES 55, U.S. DEPT. OF +C COMMERCE, 1955. +C +C COMPUTATION OF BESSEL FUNCTIONS OF COMPLEX ARGUMENT +C BY D. E. AMOS, SAND83-0083, MAY, 1983. +C +C COMPUTATION OF BESSEL FUNCTIONS OF COMPLEX ARGUMENT +C AND LARGE ORDER BY D. E. AMOS, SAND83-0643, MAY, 1983. +C +C A SUBROUTINE PACKAGE FOR BESSEL FUNCTIONS OF A COMPLEX +C ARGUMENT AND NONNEGATIVE ORDER BY D. E. AMOS, SAND85- +C 1018, MAY, 1985 +C +C A PORTABLE PACKAGE FOR BESSEL FUNCTIONS OF A COMPLEX +C ARGUMENT AND NONNEGATIVE ORDER BY D. E. AMOS, TRANS. +C MATH. SOFTWARE, 1986 +C +C***ROUTINES CALLED ZACON,ZBKNU,ZBUNK,ZUOIK,ZABS,I1MACH,D1MACH +C***END PROLOGUE ZBESK +C +C COMPLEX CY,Z + DOUBLE PRECISION AA, ALIM, ALN, ARG, AZ, CYI, CYR, DIG, ELIM, FN, + * FNU, FNUL, RL, R1M5, TOL, UFL, ZI, ZR, D1MACH, ZABS, BB + INTEGER IERR, K, KODE, K1, K2, MR, N, NN, NUF, NW, NZ, I1MACH + DIMENSION CYR(N), CYI(N) +C***FIRST EXECUTABLE STATEMENT ZBESK + IERR = 0 + NZ=0 + IF (ZI.EQ.0.0E0 .AND. ZR.EQ.0.0E0) IERR=1 + IF (FNU.LT.0.0D0) IERR=1 + IF (KODE.LT.1 .OR. KODE.GT.2) IERR=1 + IF (N.LT.1) IERR=1 + IF (IERR.NE.0) RETURN + NN = N +C----------------------------------------------------------------------- +C SET PARAMETERS RELATED TO MACHINE CONSTANTS. +C TOL IS THE APPROXIMATE UNIT ROUNDOFF LIMITED TO 1.0E-18. +C ELIM IS THE APPROXIMATE EXPONENTIAL OVER- AND UNDERFLOW LIMIT. +C EXP(-ELIM).LT.EXP(-ALIM)=EXP(-ELIM)/TOL AND +C EXP(ELIM).GT.EXP(ALIM)=EXP(ELIM)*TOL ARE INTERVALS NEAR +C UNDERFLOW AND OVERFLOW LIMITS WHERE SCALED ARITHMETIC IS DONE. +C RL IS THE LOWER BOUNDARY OF THE ASYMPTOTIC EXPANSION FOR LARGE Z. +C DIG = NUMBER OF BASE 10 DIGITS IN TOL = 10**(-DIG). +C FNUL IS THE LOWER BOUNDARY OF THE ASYMPTOTIC SERIES FOR LARGE FNU +C----------------------------------------------------------------------- + TOL = DMAX1(D1MACH(4),1.0D-18) + K1 = I1MACH(15) + K2 = I1MACH(16) + R1M5 = D1MACH(5) + K = MIN0(IABS(K1),IABS(K2)) + ELIM = 2.303D0*(DBLE(FLOAT(K))*R1M5-3.0D0) + K1 = I1MACH(14) - 1 + AA = R1M5*DBLE(FLOAT(K1)) + DIG = DMIN1(AA,18.0D0) + AA = AA*2.303D0 + ALIM = ELIM + DMAX1(-AA,-41.45D0) + FNUL = 10.0D0 + 6.0D0*(DIG-3.0D0) + RL = 1.2D0*DIG + 3.0D0 +C----------------------------------------------------------------------------- +C TEST FOR PROPER RANGE +C----------------------------------------------------------------------- + AZ = ZABS(CMPLX(ZR,ZI,kind=KIND(1.0D0))) + FN = FNU + DBLE(FLOAT(NN-1)) + AA = 0.5D0/TOL + BB=DBLE(FLOAT(I1MACH(9)))*0.5D0 + AA = DMIN1(AA,BB) + IF (AZ.GT.AA) GO TO 260 + IF (FN.GT.AA) GO TO 260 + AA = DSQRT(AA) + IF (AZ.GT.AA) IERR=3 + IF (FN.GT.AA) IERR=3 +C----------------------------------------------------------------------- +C OVERFLOW TEST ON THE LAST MEMBER OF THE SEQUENCE +C----------------------------------------------------------------------- +C UFL = DEXP(-ELIM) + UFL = D1MACH(1)*1.0D+3 + IF (AZ.LT.UFL) GO TO 180 + IF (FNU.GT.FNUL) GO TO 80 + IF (FN.LE.1.0D0) GO TO 60 + IF (FN.GT.2.0D0) GO TO 50 + IF (AZ.GT.TOL) GO TO 60 + ARG = 0.5D0*AZ + ALN = -FN*DLOG(ARG) + IF (ALN.GT.ELIM) GO TO 180 + GO TO 60 + 50 CONTINUE + CALL ZUOIK(ZR, ZI, FNU, KODE, 2, NN, CYR, CYI, NUF, TOL, ELIM, + * ALIM) + IF (NUF.LT.0) GO TO 180 + NZ = NZ + NUF + NN = NN - NUF +C----------------------------------------------------------------------- +C HERE NN=N OR NN=0 SINCE NUF=0,NN, OR -1 ON RETURN FROM CUOIK +C IF NUF=NN, THEN CY(I)=CZERO FOR ALL I +C----------------------------------------------------------------------- + IF (NN.EQ.0) GO TO 100 + 60 CONTINUE + IF (ZR.LT.0.0D0) GO TO 70 +C----------------------------------------------------------------------- +C RIGHT HALF PLANE COMPUTATION, REAL(Z).GE.0. +C----------------------------------------------------------------------- + CALL ZBKNU(ZR, ZI, FNU, KODE, NN, CYR, CYI, NW, TOL, ELIM, ALIM) + IF (NW.LT.0) GO TO 200 + NZ=NW + RETURN +C----------------------------------------------------------------------- +C LEFT HALF PLANE COMPUTATION +C PI/2.LT.ARG(Z).LE.PI AND -PI.LT.ARG(Z).LT.-PI/2. +C----------------------------------------------------------------------- + 70 CONTINUE + IF (NZ.NE.0) GO TO 180 + MR = 1 + IF (ZI.LT.0.0D0) MR = -1 + CALL ZACON(ZR, ZI, FNU, KODE, MR, NN, CYR, CYI, NW, RL, FNUL, + * TOL, ELIM, ALIM) + IF (NW.LT.0) GO TO 200 + NZ=NW + RETURN +C----------------------------------------------------------------------- +C UNIFORM ASYMPTOTIC EXPANSIONS FOR FNU.GT.FNUL +C----------------------------------------------------------------------- + 80 CONTINUE + MR = 0 + IF (ZR.GE.0.0D0) GO TO 90 + MR = 1 + IF (ZI.LT.0.0D0) MR = -1 + 90 CONTINUE + CALL ZBUNK(ZR, ZI, FNU, KODE, MR, NN, CYR, CYI, NW, TOL, ELIM, + * ALIM) + IF (NW.LT.0) GO TO 200 + NZ = NZ + NW + RETURN + 100 CONTINUE + IF (ZR.LT.0.0D0) GO TO 180 + RETURN + 180 CONTINUE + NZ = 0 + IERR=2 + RETURN + 200 CONTINUE + IF(NW.EQ.(-1)) GO TO 180 + NZ=0 + IERR=5 + RETURN + 260 CONTINUE + NZ=0 + IERR=4 + RETURN + END diff --git a/vendor/gonum.org/v1/gonum/mathext/internal/amos/amoslib/zbesy.f b/vendor/gonum.org/v1/gonum/mathext/internal/amos/amoslib/zbesy.f new file mode 100644 index 0000000..05ec40b --- /dev/null +++ b/vendor/gonum.org/v1/gonum/mathext/internal/amos/amoslib/zbesy.f @@ -0,0 +1,244 @@ + SUBROUTINE ZBESY(ZR, ZI, FNU, KODE, N, CYR, CYI, NZ, CWRKR, CWRKI, + * IERR) +C***BEGIN PROLOGUE ZBESY +C***DATE WRITTEN 830501 (YYMMDD) +C***REVISION DATE 890801 (YYMMDD) +C***CATEGORY NO. B5K +C***KEYWORDS Y-BESSEL FUNCTION,BESSEL FUNCTION OF COMPLEX ARGUMENT, +C BESSEL FUNCTION OF SECOND KIND +C***AUTHOR AMOS, DONALD E., SANDIA NATIONAL LABORATORIES +C***PURPOSE TO COMPUTE THE Y-BESSEL FUNCTION OF A COMPLEX ARGUMENT +C***DESCRIPTION +C +C ***A DOUBLE PRECISION ROUTINE*** +C +C ON KODE=1, CBESY COMPUTES AN N MEMBER SEQUENCE OF COMPLEX +C BESSEL FUNCTIONS CY(I)=Y(FNU+I-1,Z) FOR REAL, NONNEGATIVE +C ORDERS FNU+I-1, I=1,...,N AND COMPLEX Z IN THE CUT PLANE +C -PI.LT.ARG(Z).LE.PI. ON KODE=2, CBESY RETURNS THE SCALED +C FUNCTIONS +C +C CY(I)=EXP(-ABS(Y))*Y(FNU+I-1,Z) I = 1,...,N , Y=AIMAG(Z) +C +C WHICH REMOVE THE EXPONENTIAL GROWTH IN BOTH THE UPPER AND +C LOWER HALF PLANES FOR Z TO INFINITY. DEFINITIONS AND NOTATION +C ARE FOUND IN THE NBS HANDBOOK OF MATHEMATICAL FUNCTIONS +C (REF. 1). +C +C INPUT ZR,ZI,FNU ARE DOUBLE PRECISION +C ZR,ZI - Z=CMPLX(ZR,ZI), Z.NE.CMPLX(0.0D0,0.0D0), +C -PI.LT.ARG(Z).LE.PI +C FNU - ORDER OF INITIAL Y FUNCTION, FNU.GE.0.0D0 +C KODE - A PARAMETER TO INDICATE THE SCALING OPTION +C KODE= 1 RETURNS +C CY(I)=Y(FNU+I-1,Z), I=1,...,N +C = 2 RETURNS +C CY(I)=Y(FNU+I-1,Z)*EXP(-ABS(Y)), I=1,...,N +C WHERE Y=AIMAG(Z) +C N - NUMBER OF MEMBERS OF THE SEQUENCE, N.GE.1 +C CWRKR, - DOUBLE PRECISION WORK VECTORS OF DIMENSION AT +C CWRKI AT LEAST N +C +C OUTPUT CYR,CYI ARE DOUBLE PRECISION +C CYR,CYI- DOUBLE PRECISION VECTORS WHOSE FIRST N COMPONENTS +C CONTAIN REAL AND IMAGINARY PARTS FOR THE SEQUENCE +C CY(I)=Y(FNU+I-1,Z) OR +C CY(I)=Y(FNU+I-1,Z)*EXP(-ABS(Y)) I=1,...,N +C DEPENDING ON KODE. +C NZ - NZ=0 , A NORMAL RETURN +C NZ.GT.0 , NZ COMPONENTS OF CY SET TO ZERO DUE TO +C UNDERFLOW (GENERALLY ON KODE=2) +C IERR - ERROR FLAG +C IERR=0, NORMAL RETURN - COMPUTATION COMPLETED +C IERR=1, INPUT ERROR - NO COMPUTATION +C IERR=2, OVERFLOW - NO COMPUTATION, FNU IS +C TOO LARGE OR CABS(Z) IS TOO SMALL OR BOTH +C IERR=3, CABS(Z) OR FNU+N-1 LARGE - COMPUTATION DONE +C BUT LOSSES OF SIGNIFCANCE BY ARGUMENT +C REDUCTION PRODUCE LESS THAN HALF OF MACHINE +C ACCURACY +C IERR=4, CABS(Z) OR FNU+N-1 TOO LARGE - NO COMPUTA- +C TION BECAUSE OF COMPLETE LOSSES OF SIGNIFI- +C CANCE BY ARGUMENT REDUCTION +C IERR=5, ERROR - NO COMPUTATION, +C ALGORITHM TERMINATION CONDITION NOT MET +C +C***LONG DESCRIPTION +C +C THE COMPUTATION IS CARRIED OUT BY THE FORMULA +C +C Y(FNU,Z)=0.5*(H(1,FNU,Z)-H(2,FNU,Z))/I +C +C WHERE I**2 = -1 AND THE HANKEL BESSEL FUNCTIONS H(1,FNU,Z) +C AND H(2,FNU,Z) ARE CALCULATED IN CBESH. +C +C FOR NEGATIVE ORDERS,THE FORMULA +C +C Y(-FNU,Z) = Y(FNU,Z)*COS(PI*FNU) + J(FNU,Z)*SIN(PI*FNU) +C +C CAN BE USED. HOWEVER,FOR LARGE ORDERS CLOSE TO HALF ODD +C INTEGERS THE FUNCTION CHANGES RADICALLY. WHEN FNU IS A LARGE +C POSITIVE HALF ODD INTEGER,THE MAGNITUDE OF Y(-FNU,Z)=J(FNU,Z)* +C SIN(PI*FNU) IS A LARGE NEGATIVE POWER OF TEN. BUT WHEN FNU IS +C NOT A HALF ODD INTEGER, Y(FNU,Z) DOMINATES IN MAGNITUDE WITH A +C LARGE POSITIVE POWER OF TEN AND THE MOST THAT THE SECOND TERM +C CAN BE REDUCED IS BY UNIT ROUNDOFF FROM THE COEFFICIENT. THUS, +C WIDE CHANGES CAN OCCUR WITHIN UNIT ROUNDOFF OF A LARGE HALF +C ODD INTEGER. HERE, LARGE MEANS FNU.GT.CABS(Z). +C +C IN MOST COMPLEX VARIABLE COMPUTATION, ONE MUST EVALUATE ELE- +C MENTARY FUNCTIONS. WHEN THE MAGNITUDE OF Z OR FNU+N-1 IS +C LARGE, LOSSES OF SIGNIFICANCE BY ARGUMENT REDUCTION OCCUR. +C CONSEQUENTLY, IF EITHER ONE EXCEEDS U1=SQRT(0.5/UR), THEN +C LOSSES EXCEEDING HALF PRECISION ARE LIKELY AND AN ERROR FLAG +C IERR=3 IS TRIGGERED WHERE UR=DMAX1(D1MACH(4),1.0D-18) IS +C DOUBLE PRECISION UNIT ROUNDOFF LIMITED TO 18 DIGITS PRECISION. +C IF EITHER IS LARGER THAN U2=0.5/UR, THEN ALL SIGNIFICANCE IS +C LOST AND IERR=4. IN ORDER TO USE THE INT FUNCTION, ARGUMENTS +C MUST BE FURTHER RESTRICTED NOT TO EXCEED THE LARGEST MACHINE +C INTEGER, U3=I1MACH(9). THUS, THE MAGNITUDE OF Z AND FNU+N-1 IS +C RESTRICTED BY MIN(U2,U3). ON 32 BIT MACHINES, U1,U2, AND U3 +C ARE APPROXIMATELY 2.0E+3, 4.2E+6, 2.1E+9 IN SINGLE PRECISION +C ARITHMETIC AND 1.3E+8, 1.8E+16, 2.1E+9 IN DOUBLE PRECISION +C ARITHMETIC RESPECTIVELY. THIS MAKES U2 AND U3 LIMITING IN +C THEIR RESPECTIVE ARITHMETICS. THIS MEANS THAT ONE CAN EXPECT +C TO RETAIN, IN THE WORST CASES ON 32 BIT MACHINES, NO DIGITS +C IN SINGLE AND ONLY 7 DIGITS IN DOUBLE PRECISION ARITHMETIC. +C SIMILAR CONSIDERATIONS HOLD FOR OTHER MACHINES. +C +C THE APPROXIMATE RELATIVE ERROR IN THE MAGNITUDE OF A COMPLEX +C BESSEL FUNCTION CAN BE EXPRESSED BY P*10**S WHERE P=MAX(UNIT +C ROUNDOFF,1.0E-18) IS THE NOMINAL PRECISION AND 10**S REPRE- +C SENTS THE INCREASE IN ERROR DUE TO ARGUMENT REDUCTION IN THE +C ELEMENTARY FUNCTIONS. HERE, S=MAX(1,ABS(LOG10(CABS(Z))), +C ABS(LOG10(FNU))) APPROXIMATELY (I.E. S=MAX(1,ABS(EXPONENT OF +C CABS(Z),ABS(EXPONENT OF FNU)) ). HOWEVER, THE PHASE ANGLE MAY +C HAVE ONLY ABSOLUTE ACCURACY. THIS IS MOST LIKELY TO OCCUR WHEN +C ONE COMPONENT (IN ABSOLUTE VALUE) IS LARGER THAN THE OTHER BY +C SEVERAL ORDERS OF MAGNITUDE. IF ONE COMPONENT IS 10**K LARGER +C THAN THE OTHER, THEN ONE CAN EXPECT ONLY MAX(ABS(LOG10(P))-K, +C 0) SIGNIFICANT DIGITS; OR, STATED ANOTHER WAY, WHEN K EXCEEDS +C THE EXPONENT OF P, NO SIGNIFICANT DIGITS REMAIN IN THE SMALLER +C COMPONENT. HOWEVER, THE PHASE ANGLE RETAINS ABSOLUTE ACCURACY +C BECAUSE, IN COMPLEX ARITHMETIC WITH PRECISION P, THE SMALLER +C COMPONENT WILL NOT (AS A RULE) DECREASE BELOW P TIMES THE +C MAGNITUDE OF THE LARGER COMPONENT. IN THESE EXTREME CASES, +C THE PRINCIPAL PHASE ANGLE IS ON THE ORDER OF +P, -P, PI/2-P, +C OR -PI/2+P. +C +C***REFERENCES HANDBOOK OF MATHEMATICAL FUNCTIONS BY M. ABRAMOWITZ +C AND I. A. STEGUN, NBS AMS SERIES 55, U.S. DEPT. OF +C COMMERCE, 1955. +C +C COMPUTATION OF BESSEL FUNCTIONS OF COMPLEX ARGUMENT +C BY D. E. AMOS, SAND83-0083, MAY, 1983. +C +C COMPUTATION OF BESSEL FUNCTIONS OF COMPLEX ARGUMENT +C AND LARGE ORDER BY D. E. AMOS, SAND83-0643, MAY, 1983 +C +C A SUBROUTINE PACKAGE FOR BESSEL FUNCTIONS OF A COMPLEX +C ARGUMENT AND NONNEGATIVE ORDER BY D. E. AMOS, SAND85- +C 1018, MAY, 1985 +C +C A PORTABLE PACKAGE FOR BESSEL FUNCTIONS OF A COMPLEX +C ARGUMENT AND NONNEGATIVE ORDER BY D. E. AMOS, TRANS. +C MATH. SOFTWARE, 1986 +C +C***ROUTINES CALLED ZBESH,I1MACH,D1MACH +C***END PROLOGUE ZBESY +C +C COMPLEX CWRK,CY,C1,C2,EX,HCI,Z,ZU,ZV + DOUBLE PRECISION CWRKI, CWRKR, CYI, CYR, C1I, C1R, C2I, C2R, + * ELIM, EXI, EXR, EY, FNU, HCII, STI, STR, TAY, ZI, ZR, DEXP, + * D1MACH, ASCLE, RTOL, ATOL, AA, BB, TOL + INTEGER I, IERR, K, KODE, K1, K2, N, NZ, NZ1, NZ2, I1MACH + DIMENSION CYR(N), CYI(N), CWRKR(N), CWRKI(N) +C***FIRST EXECUTABLE STATEMENT ZBESY + IERR = 0 + NZ=0 + IF (ZR.EQ.0.0D0 .AND. ZI.EQ.0.0D0) IERR=1 + IF (FNU.LT.0.0D0) IERR=1 + IF (KODE.LT.1 .OR. KODE.GT.2) IERR=1 + IF (N.LT.1) IERR=1 + IF (IERR.NE.0) RETURN + HCII = 0.5D0 + CALL ZBESH(ZR, ZI, FNU, KODE, 1, N, CYR, CYI, NZ1, IERR) + IF (IERR.NE.0.AND.IERR.NE.3) GO TO 170 + CALL ZBESH(ZR, ZI, FNU, KODE, 2, N, CWRKR, CWRKI, NZ2, IERR) + IF (IERR.NE.0.AND.IERR.NE.3) GO TO 170 + NZ = MIN0(NZ1,NZ2) + IF (KODE.EQ.2) GO TO 60 + DO 50 I=1,N + STR = CWRKR(I) - CYR(I) + STI = CWRKI(I) - CYI(I) + CYR(I) = -STI*HCII + CYI(I) = STR*HCII + 50 CONTINUE + RETURN + 60 CONTINUE + TOL = DMAX1(D1MACH(4),1.0D-18) + K1 = I1MACH(15) + K2 = I1MACH(16) + K = MIN0(IABS(K1),IABS(K2)) + R1M5 = D1MACH(5) +C----------------------------------------------------------------------- +C ELIM IS THE APPROXIMATE EXPONENTIAL UNDER- AND OVERFLOW LIMIT +C----------------------------------------------------------------------- + ELIM = 2.303D0*(DBLE(FLOAT(K))*R1M5-3.0D0) + EXR = DCOS(ZR) + EXI = DSIN(ZR) + EY = 0.0D0 + TAY = DABS(ZI+ZI) + IF (TAY.LT.ELIM) EY = DEXP(-TAY) + IF (ZI.LT.0.0D0) GO TO 90 + C1R = EXR*EY + C1I = EXI*EY + C2R = EXR + C2I = -EXI + 70 CONTINUE + NZ = 0 + RTOL = 1.0D0/TOL + ASCLE = D1MACH(1)*RTOL*1.0D+3 + DO 80 I=1,N +C STR = C1R*CYR(I) - C1I*CYI(I) +C STI = C1R*CYI(I) + C1I*CYR(I) +C STR = -STR + C2R*CWRKR(I) - C2I*CWRKI(I) +C STI = -STI + C2R*CWRKI(I) + C2I*CWRKR(I) +C CYR(I) = -STI*HCII +C CYI(I) = STR*HCII + AA = CWRKR(I) + BB = CWRKI(I) + ATOL = 1.0D0 + IF (DMAX1(DABS(AA),DABS(BB)).GT.ASCLE) GO TO 75 + AA = AA*RTOL + BB = BB*RTOL + ATOL = TOL + 75 CONTINUE + STR = (AA*C2R - BB*C2I)*ATOL + STI = (AA*C2I + BB*C2R)*ATOL + AA = CYR(I) + BB = CYI(I) + ATOL = 1.0D0 + IF (DMAX1(DABS(AA),DABS(BB)).GT.ASCLE) GO TO 85 + AA = AA*RTOL + BB = BB*RTOL + ATOL = TOL + 85 CONTINUE + STR = STR - (AA*C1R - BB*C1I)*ATOL + STI = STI - (AA*C1I + BB*C1R)*ATOL + CYR(I) = -STI*HCII + CYI(I) = STR*HCII + IF (STR.EQ.0.0D0 .AND. STI.EQ.0.0D0 .AND. EY.EQ.0.0D0) NZ = NZ + * + 1 + 80 CONTINUE + RETURN + 90 CONTINUE + C1R = EXR + C1I = EXI + C2R = EXR*EY + C2I = -EXI*EY + GO TO 70 + 170 CONTINUE + NZ = 0 + RETURN + END diff --git a/vendor/gonum.org/v1/gonum/mathext/internal/amos/amoslib/zbinu.f b/vendor/gonum.org/v1/gonum/mathext/internal/amos/amoslib/zbinu.f new file mode 100644 index 0000000..b740e70 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/mathext/internal/amos/amoslib/zbinu.f @@ -0,0 +1,110 @@ + SUBROUTINE ZBINU(ZR, ZI, FNU, KODE, N, CYR, CYI, NZ, RL, FNUL, + * TOL, ELIM, ALIM) +C***BEGIN PROLOGUE ZBINU +C***REFER TO ZBESH,ZBESI,ZBESJ,ZBESK,ZAIRY,ZBIRY +C +C ZBINU COMPUTES THE I FUNCTION IN THE RIGHT HALF Z PLANE +C +C***ROUTINES CALLED ZABS,ZASYI,ZBUNI,ZMLRI,ZSERI,ZUOIK,ZWRSK +C***END PROLOGUE ZBINU + DOUBLE PRECISION ALIM, AZ, CWI, CWR, CYI, CYR, DFNU, ELIM, FNU, + * FNUL, RL, TOL, ZEROI, ZEROR, ZI, ZR, ZABS + INTEGER I, INW, KODE, N, NLAST, NN, NUI, NW, NZ + DIMENSION CYR(N), CYI(N), CWR(2), CWI(2) + DATA ZEROR,ZEROI / 0.0D0, 0.0D0 / +C + NZ = 0 + AZ = ZABS(CMPLX(ZR,ZI,kind=KIND(1.0D0))) + NN = N + DFNU = FNU + DBLE(FLOAT(N-1)) + IF (AZ.LE.2.0D0) GO TO 10 + IF (AZ*AZ*0.25D0.GT.DFNU+1.0D0) GO TO 20 + 10 CONTINUE +C----------------------------------------------------------------------- +C POWER SERIES +C----------------------------------------------------------------------- + CALL ZSERI(ZR, ZI, FNU, KODE, NN, CYR, CYI, NW, TOL, ELIM, ALIM) + INW = IABS(NW) + NZ = NZ + INW + NN = NN - INW + IF (NN.EQ.0) RETURN + IF (NW.GE.0) GO TO 120 + DFNU = FNU + DBLE(FLOAT(NN-1)) + 20 CONTINUE + IF (AZ.LT.RL) GO TO 40 + IF (DFNU.LE.1.0D0) GO TO 30 + IF (AZ+AZ.LT.DFNU*DFNU) GO TO 50 +C----------------------------------------------------------------------- +C ASYMPTOTIC EXPANSION FOR LARGE Z +C----------------------------------------------------------------------- + 30 CONTINUE + CALL ZASYI(ZR, ZI, FNU, KODE, NN, CYR, CYI, NW, RL, TOL, ELIM, + * ALIM) + IF (NW.LT.0) GO TO 130 + GO TO 120 + 40 CONTINUE + IF (DFNU.LE.1.0D0) GO TO 70 + 50 CONTINUE +C----------------------------------------------------------------------- +C OVERFLOW AND UNDERFLOW TEST ON I SEQUENCE FOR MILLER ALGORITHM +C----------------------------------------------------------------------- + CALL ZUOIK(ZR, ZI, FNU, KODE, 1, NN, CYR, CYI, NW, TOL, ELIM, + * ALIM) + IF (NW.LT.0) GO TO 130 + NZ = NZ + NW + NN = NN - NW + IF (NN.EQ.0) RETURN + DFNU = FNU+DBLE(FLOAT(NN-1)) + IF (DFNU.GT.FNUL) GO TO 110 + IF (AZ.GT.FNUL) GO TO 110 + 60 CONTINUE + IF (AZ.GT.RL) GO TO 80 + 70 CONTINUE +C----------------------------------------------------------------------- +C MILLER ALGORITHM NORMALIZED BY THE SERIES +C----------------------------------------------------------------------- + CALL ZMLRI(ZR, ZI, FNU, KODE, NN, CYR, CYI, NW, TOL) + IF(NW.LT.0) GO TO 130 + GO TO 120 + 80 CONTINUE +C----------------------------------------------------------------------- +C MILLER ALGORITHM NORMALIZED BY THE WRONSKIAN +C----------------------------------------------------------------------- +C----------------------------------------------------------------------- +C OVERFLOW TEST ON K FUNCTIONS USED IN WRONSKIAN +C----------------------------------------------------------------------- + CALL ZUOIK(ZR, ZI, FNU, KODE, 2, 2, CWR, CWI, NW, TOL, ELIM, + * ALIM) + IF (NW.GE.0) GO TO 100 + NZ = NN + DO 90 I=1,NN + CYR(I) = ZEROR + CYI(I) = ZEROI + 90 CONTINUE + RETURN + 100 CONTINUE + IF (NW.GT.0) GO TO 130 + CALL ZWRSK(ZR, ZI, FNU, KODE, NN, CYR, CYI, NW, CWR, CWI, TOL, + * ELIM, ALIM) + IF (NW.LT.0) GO TO 130 + GO TO 120 + 110 CONTINUE +C----------------------------------------------------------------------- +C INCREMENT FNU+NN-1 UP TO FNUL, COMPUTE AND RECUR BACKWARD +C----------------------------------------------------------------------- + NUI = INT(SNGL(FNUL-DFNU)) + 1 + NUI = MAX0(NUI,0) + CALL ZBUNI(ZR, ZI, FNU, KODE, NN, CYR, CYI, NW, NUI, NLAST, FNUL, + * TOL, ELIM, ALIM) + IF (NW.LT.0) GO TO 130 + NZ = NZ + NW + IF (NLAST.EQ.0) GO TO 120 + NN = NLAST + GO TO 60 + 120 CONTINUE + RETURN + 130 CONTINUE + NZ = -1 + IF(NW.EQ.(-2)) NZ=-2 + RETURN + END diff --git a/vendor/gonum.org/v1/gonum/mathext/internal/amos/amoslib/zbiry.f b/vendor/gonum.org/v1/gonum/mathext/internal/amos/amoslib/zbiry.f new file mode 100644 index 0000000..f042d82 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/mathext/internal/amos/amoslib/zbiry.f @@ -0,0 +1,364 @@ + SUBROUTINE ZBIRY(ZR, ZI, ID, KODE, BIR, BII, IERR) +C***BEGIN PROLOGUE ZBIRY +C***DATE WRITTEN 830501 (YYMMDD) +C***REVISION DATE 890801 (YYMMDD) +C***CATEGORY NO. B5K +C***KEYWORDS AIRY FUNCTION,BESSEL FUNCTIONS OF ORDER ONE THIRD +C***AUTHOR AMOS, DONALD E., SANDIA NATIONAL LABORATORIES +C***PURPOSE TO COMPUTE AIRY FUNCTIONS BI(Z) AND DBI(Z) FOR COMPLEX Z +C***DESCRIPTION +C +C ***A DOUBLE PRECISION ROUTINE*** +C ON KODE=1, CBIRY COMPUTES THE COMPLEX AIRY FUNCTION BI(Z) OR +C ITS DERIVATIVE DBI(Z)/DZ ON ID=0 OR ID=1 RESPECTIVELY. ON +C KODE=2, A SCALING OPTION CEXP(-AXZTA)*BI(Z) OR CEXP(-AXZTA)* +C DBI(Z)/DZ IS PROVIDED TO REMOVE THE EXPONENTIAL BEHAVIOR IN +C BOTH THE LEFT AND RIGHT HALF PLANES WHERE +C ZTA=(2/3)*Z*CSQRT(Z)=CMPLX(XZTA,YZTA) AND AXZTA=ABS(XZTA). +C DEFINTIONS AND NOTATION ARE FOUND IN THE NBS HANDBOOK OF +C MATHEMATICAL FUNCTIONS (REF. 1). +C +C INPUT ZR,ZI ARE DOUBLE PRECISION +C ZR,ZI - Z=CMPLX(ZR,ZI) +C ID - ORDER OF DERIVATIVE, ID=0 OR ID=1 +C KODE - A PARAMETER TO INDICATE THE SCALING OPTION +C KODE= 1 RETURNS +C BI=BI(Z) ON ID=0 OR +C BI=DBI(Z)/DZ ON ID=1 +C = 2 RETURNS +C BI=CEXP(-AXZTA)*BI(Z) ON ID=0 OR +C BI=CEXP(-AXZTA)*DBI(Z)/DZ ON ID=1 WHERE +C ZTA=(2/3)*Z*CSQRT(Z)=CMPLX(XZTA,YZTA) +C AND AXZTA=ABS(XZTA) +C +C OUTPUT BIR,BII ARE DOUBLE PRECISION +C BIR,BII- COMPLEX ANSWER DEPENDING ON THE CHOICES FOR ID AND +C KODE +C IERR - ERROR FLAG +C IERR=0, NORMAL RETURN - COMPUTATION COMPLETED +C IERR=1, INPUT ERROR - NO COMPUTATION +C IERR=2, OVERFLOW - NO COMPUTATION, REAL(Z) +C TOO LARGE ON KODE=1 +C IERR=3, CABS(Z) LARGE - COMPUTATION COMPLETED +C LOSSES OF SIGNIFCANCE BY ARGUMENT REDUCTION +C PRODUCE LESS THAN HALF OF MACHINE ACCURACY +C IERR=4, CABS(Z) TOO LARGE - NO COMPUTATION +C COMPLETE LOSS OF ACCURACY BY ARGUMENT +C REDUCTION +C IERR=5, ERROR - NO COMPUTATION, +C ALGORITHM TERMINATION CONDITION NOT MET +C +C***LONG DESCRIPTION +C +C BI AND DBI ARE COMPUTED FOR CABS(Z).GT.1.0 FROM THE I BESSEL +C FUNCTIONS BY +C +C BI(Z)=C*SQRT(Z)*( I(-1/3,ZTA) + I(1/3,ZTA) ) +C DBI(Z)=C * Z * ( I(-2/3,ZTA) + I(2/3,ZTA) ) +C C=1.0/SQRT(3.0) +C ZTA=(2/3)*Z**(3/2) +C +C WITH THE POWER SERIES FOR CABS(Z).LE.1.0. +C +C IN MOST COMPLEX VARIABLE COMPUTATION, ONE MUST EVALUATE ELE- +C MENTARY FUNCTIONS. WHEN THE MAGNITUDE OF Z IS LARGE, LOSSES +C OF SIGNIFICANCE BY ARGUMENT REDUCTION OCCUR. CONSEQUENTLY, IF +C THE MAGNITUDE OF ZETA=(2/3)*Z**1.5 EXCEEDS U1=SQRT(0.5/UR), +C THEN LOSSES EXCEEDING HALF PRECISION ARE LIKELY AND AN ERROR +C FLAG IERR=3 IS TRIGGERED WHERE UR=DMAX1(D1MACH(4),1.0D-18) IS +C DOUBLE PRECISION UNIT ROUNDOFF LIMITED TO 18 DIGITS PRECISION. +C ALSO, IF THE MAGNITUDE OF ZETA IS LARGER THAN U2=0.5/UR, THEN +C ALL SIGNIFICANCE IS LOST AND IERR=4. IN ORDER TO USE THE INT +C FUNCTION, ZETA MUST BE FURTHER RESTRICTED NOT TO EXCEED THE +C LARGEST INTEGER, U3=I1MACH(9). THUS, THE MAGNITUDE OF ZETA +C MUST BE RESTRICTED BY MIN(U2,U3). ON 32 BIT MACHINES, U1,U2, +C AND U3 ARE APPROXIMATELY 2.0E+3, 4.2E+6, 2.1E+9 IN SINGLE +C PRECISION ARITHMETIC AND 1.3E+8, 1.8E+16, 2.1E+9 IN DOUBLE +C PRECISION ARITHMETIC RESPECTIVELY. THIS MAKES U2 AND U3 LIMIT- +C ING IN THEIR RESPECTIVE ARITHMETICS. THIS MEANS THAT THE MAG- +C NITUDE OF Z CANNOT EXCEED 3.1E+4 IN SINGLE AND 2.1E+6 IN +C DOUBLE PRECISION ARITHMETIC. THIS ALSO MEANS THAT ONE CAN +C EXPECT TO RETAIN, IN THE WORST CASES ON 32 BIT MACHINES, +C NO DIGITS IN SINGLE PRECISION AND ONLY 7 DIGITS IN DOUBLE +C PRECISION ARITHMETIC. SIMILAR CONSIDERATIONS HOLD FOR OTHER +C MACHINES. +C +C THE APPROXIMATE RELATIVE ERROR IN THE MAGNITUDE OF A COMPLEX +C BESSEL FUNCTION CAN BE EXPRESSED BY P*10**S WHERE P=MAX(UNIT +C ROUNDOFF,1.0E-18) IS THE NOMINAL PRECISION AND 10**S REPRE- +C SENTS THE INCREASE IN ERROR DUE TO ARGUMENT REDUCTION IN THE +C ELEMENTARY FUNCTIONS. HERE, S=MAX(1,ABS(LOG10(CABS(Z))), +C ABS(LOG10(FNU))) APPROXIMATELY (I.E. S=MAX(1,ABS(EXPONENT OF +C CABS(Z),ABS(EXPONENT OF FNU)) ). HOWEVER, THE PHASE ANGLE MAY +C HAVE ONLY ABSOLUTE ACCURACY. THIS IS MOST LIKELY TO OCCUR WHEN +C ONE COMPONENT (IN ABSOLUTE VALUE) IS LARGER THAN THE OTHER BY +C SEVERAL ORDERS OF MAGNITUDE. IF ONE COMPONENT IS 10**K LARGER +C THAN THE OTHER, THEN ONE CAN EXPECT ONLY MAX(ABS(LOG10(P))-K, +C 0) SIGNIFICANT DIGITS; OR, STATED ANOTHER WAY, WHEN K EXCEEDS +C THE EXPONENT OF P, NO SIGNIFICANT DIGITS REMAIN IN THE SMALLER +C COMPONENT. HOWEVER, THE PHASE ANGLE RETAINS ABSOLUTE ACCURACY +C BECAUSE, IN COMPLEX ARITHMETIC WITH PRECISION P, THE SMALLER +C COMPONENT WILL NOT (AS A RULE) DECREASE BELOW P TIMES THE +C MAGNITUDE OF THE LARGER COMPONENT. IN THESE EXTREME CASES, +C THE PRINCIPAL PHASE ANGLE IS ON THE ORDER OF +P, -P, PI/2-P, +C OR -PI/2+P. +C +C***REFERENCES HANDBOOK OF MATHEMATICAL FUNCTIONS BY M. ABRAMOWITZ +C AND I. A. STEGUN, NBS AMS SERIES 55, U.S. DEPT. OF +C COMMERCE, 1955. +C +C COMPUTATION OF BESSEL FUNCTIONS OF COMPLEX ARGUMENT +C AND LARGE ORDER BY D. E. AMOS, SAND83-0643, MAY, 1983 +C +C A SUBROUTINE PACKAGE FOR BESSEL FUNCTIONS OF A COMPLEX +C ARGUMENT AND NONNEGATIVE ORDER BY D. E. AMOS, SAND85- +C 1018, MAY, 1985 +C +C A PORTABLE PACKAGE FOR BESSEL FUNCTIONS OF A COMPLEX +C ARGUMENT AND NONNEGATIVE ORDER BY D. E. AMOS, TRANS. +C MATH. SOFTWARE, 1986 +C +C***ROUTINES CALLED ZBINU,ZABS,ZDIV,ZSQRT,D1MACH,I1MACH +C***END PROLOGUE ZBIRY +C COMPLEX BI,CONE,CSQ,CY,S1,S2,TRM1,TRM2,Z,ZTA,Z3 + DOUBLE PRECISION AA, AD, AK, ALIM, ATRM, AZ, AZ3, BB, BII, BIR, + * BK, CC, CK, COEF, CONEI, CONER, CSQI, CSQR, CYI, CYR, C1, C2, + * DIG, DK, D1, D2, EAA, ELIM, FID, FMR, FNU, FNUL, PI, RL, R1M5, + * SFAC, STI, STR, S1I, S1R, S2I, S2R, TOL, TRM1I, TRM1R, TRM2I, + * TRM2R, TTH, ZI, ZR, ZTAI, ZTAR, Z3I, Z3R, D1MACH, ZABS + INTEGER ID, IERR, K, KODE, K1, K2, NZ, I1MACH + DIMENSION CYR(2), CYI(2) + DATA TTH, C1, C2, COEF, PI /6.66666666666666667D-01, + * 6.14926627446000736D-01,4.48288357353826359D-01, + * 5.77350269189625765D-01,3.14159265358979324D+00/ + DATA CONER, CONEI /1.0D0,0.0D0/ +C***FIRST EXECUTABLE STATEMENT ZBIRY + IERR = 0 + NZ=0 + IF (ID.LT.0 .OR. ID.GT.1) IERR=1 + IF (KODE.LT.1 .OR. KODE.GT.2) IERR=1 + IF (IERR.NE.0) RETURN + AZ = ZABS(CMPLX(ZR,ZI,kind=KIND(1.0D0))) + TOL = DMAX1(D1MACH(4),1.0D-18) + FID = DBLE(FLOAT(ID)) + IF (AZ.GT.1.0E0) GO TO 70 +C----------------------------------------------------------------------- +C POWER SERIES FOR CABS(Z).LE.1. +C----------------------------------------------------------------------- + S1R = CONER + S1I = CONEI + S2R = CONER + S2I = CONEI + IF (AZ.LT.TOL) GO TO 130 + AA = AZ*AZ + IF (AA.LT.TOL/AZ) GO TO 40 + TRM1R = CONER + TRM1I = CONEI + TRM2R = CONER + TRM2I = CONEI + ATRM = 1.0D0 + STR = ZR*ZR - ZI*ZI + STI = ZR*ZI + ZI*ZR + Z3R = STR*ZR - STI*ZI + Z3I = STR*ZI + STI*ZR + AZ3 = AZ*AA + AK = 2.0D0 + FID + BK = 3.0D0 - FID - FID + CK = 4.0D0 - FID + DK = 3.0D0 + FID + FID + D1 = AK*DK + D2 = BK*CK + AD = DMIN1(D1,D2) + AK = 24.0D0 + 9.0D0*FID + BK = 30.0D0 - 9.0D0*FID + DO 30 K=1,25 + STR = (TRM1R*Z3R-TRM1I*Z3I)/D1 + TRM1I = (TRM1R*Z3I+TRM1I*Z3R)/D1 + TRM1R = STR + S1R = S1R + TRM1R + S1I = S1I + TRM1I + STR = (TRM2R*Z3R-TRM2I*Z3I)/D2 + TRM2I = (TRM2R*Z3I+TRM2I*Z3R)/D2 + TRM2R = STR + S2R = S2R + TRM2R + S2I = S2I + TRM2I + ATRM = ATRM*AZ3/AD + D1 = D1 + AK + D2 = D2 + BK + AD = DMIN1(D1,D2) + IF (ATRM.LT.TOL*AD) GO TO 40 + AK = AK + 18.0D0 + BK = BK + 18.0D0 + 30 CONTINUE + 40 CONTINUE + IF (ID.EQ.1) GO TO 50 + BIR = C1*S1R + C2*(ZR*S2R-ZI*S2I) + BII = C1*S1I + C2*(ZR*S2I+ZI*S2R) + IF (KODE.EQ.1) RETURN + CALL ZSQRT(ZR, ZI, STR, STI) + ZTAR = TTH*(ZR*STR-ZI*STI) + ZTAI = TTH*(ZR*STI+ZI*STR) + AA = ZTAR + AA = -DABS(AA) + EAA = DEXP(AA) + BIR = BIR*EAA + BII = BII*EAA + RETURN + 50 CONTINUE + BIR = S2R*C2 + BII = S2I*C2 + IF (AZ.LE.TOL) GO TO 60 + CC = C1/(1.0D0+FID) + STR = S1R*ZR - S1I*ZI + STI = S1R*ZI + S1I*ZR + BIR = BIR + CC*(STR*ZR-STI*ZI) + BII = BII + CC*(STR*ZI+STI*ZR) + 60 CONTINUE + IF (KODE.EQ.1) RETURN + CALL ZSQRT(ZR, ZI, STR, STI) + ZTAR = TTH*(ZR*STR-ZI*STI) + ZTAI = TTH*(ZR*STI+ZI*STR) + AA = ZTAR + AA = -DABS(AA) + EAA = DEXP(AA) + BIR = BIR*EAA + BII = BII*EAA + RETURN +C----------------------------------------------------------------------- +C CASE FOR CABS(Z).GT.1.0 +C----------------------------------------------------------------------- + 70 CONTINUE + FNU = (1.0D0+FID)/3.0D0 +C----------------------------------------------------------------------- +C SET PARAMETERS RELATED TO MACHINE CONSTANTS. +C TOL IS THE APPROXIMATE UNIT ROUNDOFF LIMITED TO 1.0E-18. +C ELIM IS THE APPROXIMATE EXPONENTIAL OVER- AND UNDERFLOW LIMIT. +C EXP(-ELIM).LT.EXP(-ALIM)=EXP(-ELIM)/TOL AND +C EXP(ELIM).GT.EXP(ALIM)=EXP(ELIM)*TOL ARE INTERVALS NEAR +C UNDERFLOW AND OVERFLOW LIMITS WHERE SCALED ARITHMETIC IS DONE. +C RL IS THE LOWER BOUNDARY OF THE ASYMPTOTIC EXPANSION FOR LARGE Z. +C DIG = NUMBER OF BASE 10 DIGITS IN TOL = 10**(-DIG). +C FNUL IS THE LOWER BOUNDARY OF THE ASYMPTOTIC SERIES FOR LARGE FNU. +C----------------------------------------------------------------------- + K1 = I1MACH(15) + K2 = I1MACH(16) + R1M5 = D1MACH(5) + K = MIN0(IABS(K1),IABS(K2)) + ELIM = 2.303D0*(DBLE(FLOAT(K))*R1M5-3.0D0) + K1 = I1MACH(14) - 1 + AA = R1M5*DBLE(FLOAT(K1)) + DIG = DMIN1(AA,18.0D0) + AA = AA*2.303D0 + ALIM = ELIM + DMAX1(-AA,-41.45D0) + RL = 1.2D0*DIG + 3.0D0 + FNUL = 10.0D0 + 6.0D0*(DIG-3.0D0) +C----------------------------------------------------------------------- +C TEST FOR RANGE +C----------------------------------------------------------------------- + AA=0.5D0/TOL + BB=DBLE(FLOAT(I1MACH(9)))*0.5D0 + AA=DMIN1(AA,BB) + AA=AA**TTH + IF (AZ.GT.AA) GO TO 260 + AA=DSQRT(AA) + IF (AZ.GT.AA) IERR=3 + CALL ZSQRT(ZR, ZI, CSQR, CSQI) + ZTAR = TTH*(ZR*CSQR-ZI*CSQI) + ZTAI = TTH*(ZR*CSQI+ZI*CSQR) +C----------------------------------------------------------------------- +C RE(ZTA).LE.0 WHEN RE(Z).LT.0, ESPECIALLY WHEN IM(Z) IS SMALL +C----------------------------------------------------------------------- + SFAC = 1.0D0 + AK = ZTAI + IF (ZR.GE.0.0D0) GO TO 80 + BK = ZTAR + CK = -DABS(BK) + ZTAR = CK + ZTAI = AK + 80 CONTINUE + IF (ZI.NE.0.0D0 .OR. ZR.GT.0.0D0) GO TO 90 + ZTAR = 0.0D0 + ZTAI = AK + 90 CONTINUE + AA = ZTAR + IF (KODE.EQ.2) GO TO 100 +C----------------------------------------------------------------------- +C OVERFLOW TEST +C----------------------------------------------------------------------- + BB = DABS(AA) + IF (BB.LT.ALIM) GO TO 100 + BB = BB + 0.25D0*DLOG(AZ) + SFAC = TOL + IF (BB.GT.ELIM) GO TO 190 + 100 CONTINUE + FMR = 0.0D0 + IF (AA.GE.0.0D0 .AND. ZR.GT.0.0D0) GO TO 110 + FMR = PI + IF (ZI.LT.0.0D0) FMR = -PI + ZTAR = -ZTAR + ZTAI = -ZTAI + 110 CONTINUE +C----------------------------------------------------------------------- +C AA=FACTOR FOR ANALYTIC CONTINUATION OF I(FNU,ZTA) +C KODE=2 RETURNS EXP(-ABS(XZTA))*I(FNU,ZTA) FROM CBESI +C----------------------------------------------------------------------- + CALL ZBINU(ZTAR, ZTAI, FNU, KODE, 1, CYR, CYI, NZ, RL, FNUL, TOL, + * ELIM, ALIM) + IF (NZ.LT.0) GO TO 200 + AA = FMR*FNU + Z3R = SFAC + STR = DCOS(AA) + STI = DSIN(AA) + S1R = (STR*CYR(1)-STI*CYI(1))*Z3R + S1I = (STR*CYI(1)+STI*CYR(1))*Z3R + FNU = (2.0D0-FID)/3.0D0 + CALL ZBINU(ZTAR, ZTAI, FNU, KODE, 2, CYR, CYI, NZ, RL, FNUL, TOL, + * ELIM, ALIM) + CYR(1) = CYR(1)*Z3R + CYI(1) = CYI(1)*Z3R + CYR(2) = CYR(2)*Z3R + CYI(2) = CYI(2)*Z3R +C----------------------------------------------------------------------- +C BACKWARD RECUR ONE STEP FOR ORDERS -1/3 OR -2/3 +C----------------------------------------------------------------------- + CALL ZDIV(CYR(1), CYI(1), ZTAR, ZTAI, STR, STI) + S2R = (FNU+FNU)*STR + CYR(2) + S2I = (FNU+FNU)*STI + CYI(2) + AA = FMR*(FNU-1.0D0) + STR = DCOS(AA) + STI = DSIN(AA) + S1R = COEF*(S1R+S2R*STR-S2I*STI) + S1I = COEF*(S1I+S2R*STI+S2I*STR) + IF (ID.EQ.1) GO TO 120 + STR = CSQR*S1R - CSQI*S1I + S1I = CSQR*S1I + CSQI*S1R + S1R = STR + BIR = S1R/SFAC + BII = S1I/SFAC + RETURN + 120 CONTINUE + STR = ZR*S1R - ZI*S1I + S1I = ZR*S1I + ZI*S1R + S1R = STR + BIR = S1R/SFAC + BII = S1I/SFAC + RETURN + 130 CONTINUE + AA = C1*(1.0D0-FID) + FID*C2 + BIR = AA + BII = 0.0D0 + RETURN + 190 CONTINUE + IERR=2 + NZ=0 + RETURN + 200 CONTINUE + IF(NZ.EQ.(-1)) GO TO 190 + NZ=0 + IERR=5 + RETURN + 260 CONTINUE + IERR=4 + NZ=0 + RETURN + END diff --git a/vendor/gonum.org/v1/gonum/mathext/internal/amos/amoslib/zbknu.f b/vendor/gonum.org/v1/gonum/mathext/internal/amos/amoslib/zbknu.f new file mode 100644 index 0000000..ef932d4 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/mathext/internal/amos/amoslib/zbknu.f @@ -0,0 +1,568 @@ + SUBROUTINE ZBKNU(ZR, ZI, FNU, KODE, N, YR, YI, NZ, TOL, ELIM, + * ALIM) +C***BEGIN PROLOGUE ZBKNU +C***REFER TO ZBESI,ZBESK,ZAIRY,ZBESH +C +C ZBKNU COMPUTES THE K BESSEL FUNCTION IN THE RIGHT HALF Z PLANE. +C +C***ROUTINES CALLED DGAMLN,I1MACH,D1MACH,ZKSCL,ZSHCH,ZUCHK,ZABS,ZDIV, +C ZEXP,ZLOG,ZMLT,ZSQRT +C***END PROLOGUE ZBKNU +C + DOUBLE PRECISION AA, AK, ALIM, ASCLE, A1, A2, BB, BK, BRY, CAZ, + * CBI, CBR, CC, CCHI, CCHR, CKI, CKR, COEFI, COEFR, CONEI, CONER, + * CRSCR, CSCLR, CSHI, CSHR, CSI, CSR, CSRR, CSSR, CTWOR, + * CZEROI, CZEROR, CZI, CZR, DNU, DNU2, DPI, ELIM, ETEST, FC, FHS, + * FI, FK, FKS, FMUI, FMUR, FNU, FPI, FR, G1, G2, HPI, PI, PR, PTI, + * PTR, P1I, P1R, P2I, P2M, P2R, QI, QR, RAK, RCAZ, RTHPI, RZI, + * RZR, R1, S, SMUI, SMUR, SPI, STI, STR, S1I, S1R, S2I, S2R, TM, + * TOL, TTH, T1, T2, YI, YR, ZI, ZR, DGAMLN, D1MACH, ZABS, ELM, + * CELMR, ZDR, ZDI, AS, ALAS, HELIM, CYR, CYI + INTEGER I, IFLAG, INU, K, KFLAG, KK, KMAX, KODE, KODED, N, NZ, + * IDUM, I1MACH, J, IC, INUB, NW + DIMENSION YR(N), YI(N), CC(8), CSSR(3), CSRR(3), BRY(3), CYR(2), + * CYI(2) +C COMPLEX Z,Y,A,B,RZ,SMU,FU,FMU,F,FLRZ,CZ,S1,S2,CSH,CCH +C COMPLEX CK,P,Q,COEF,P1,P2,CBK,PT,CZERO,CONE,CTWO,ST,EZ,CS,DK +C + DATA KMAX / 30 / + DATA CZEROR,CZEROI,CONER,CONEI,CTWOR,R1/ + 1 0.0D0 , 0.0D0 , 1.0D0 , 0.0D0 , 2.0D0 , 2.0D0 / + DATA DPI, RTHPI, SPI ,HPI, FPI, TTH / + 1 3.14159265358979324D0, 1.25331413731550025D0, + 2 1.90985931710274403D0, 1.57079632679489662D0, + 3 1.89769999331517738D0, 6.66666666666666666D-01/ + DATA CC(1), CC(2), CC(3), CC(4), CC(5), CC(6), CC(7), CC(8)/ + 1 5.77215664901532861D-01, -4.20026350340952355D-02, + 2 -4.21977345555443367D-02, 7.21894324666309954D-03, + 3 -2.15241674114950973D-04, -2.01348547807882387D-05, + 4 1.13302723198169588D-06, 6.11609510448141582D-09/ +C + CAZ = ZABS(CMPLX(ZR,ZI,kind=KIND(1.0D0))) + CSCLR = 1.0D0/TOL + CRSCR = TOL + CSSR(1) = CSCLR + CSSR(2) = 1.0D0 + CSSR(3) = CRSCR + CSRR(1) = CRSCR + CSRR(2) = 1.0D0 + CSRR(3) = CSCLR + BRY(1) = 1.0D+3*D1MACH(1)/TOL + BRY(2) = 1.0D0/BRY(1) + BRY(3) = D1MACH(2) + NZ = 0 + IFLAG = 0 + KODED = KODE + RCAZ = 1.0D0/CAZ + STR = ZR*RCAZ + STI = -ZI*RCAZ + RZR = (STR+STR)*RCAZ + RZI = (STI+STI)*RCAZ + INU = INT(SNGL(FNU+0.5D0)) + DNU = FNU - DBLE(FLOAT(INU)) + IF (DABS(DNU).EQ.0.5D0) GO TO 110 + DNU2 = 0.0D0 + IF (DABS(DNU).GT.TOL) DNU2 = DNU*DNU + IF (CAZ.GT.R1) GO TO 110 +C----------------------------------------------------------------------- +C SERIES FOR CABS(Z).LE.R1 +C----------------------------------------------------------------------- + FC = 1.0D0 + CALL ZLOG(RZR, RZI, SMUR, SMUI, IDUM) + FMUR = SMUR*DNU + FMUI = SMUI*DNU + CALL ZSHCH(FMUR, FMUI, CSHR, CSHI, CCHR, CCHI) + IF (DNU.EQ.0.0D0) GO TO 10 + FC = DNU*DPI + FC = FC/DSIN(FC) + SMUR = CSHR/DNU + SMUI = CSHI/DNU + 10 CONTINUE + A2 = 1.0D0 + DNU +C----------------------------------------------------------------------- +C GAM(1-Z)*GAM(1+Z)=PI*Z/SIN(PI*Z), T1=1/GAM(1-DNU), T2=1/GAM(1+DNU) +C----------------------------------------------------------------------- + T2 = DEXP(-DGAMLN(A2,IDUM)) + T1 = 1.0D0/(T2*FC) + IF (DABS(DNU).GT.0.1D0) GO TO 40 +C----------------------------------------------------------------------- +C SERIES FOR F0 TO RESOLVE INDETERMINACY FOR SMALL ABS(DNU) +C----------------------------------------------------------------------- + AK = 1.0D0 + S = CC(1) + DO 20 K=2,8 + AK = AK*DNU2 + TM = CC(K)*AK + S = S + TM + IF (DABS(TM).LT.TOL) GO TO 30 + 20 CONTINUE + 30 G1 = -S + GO TO 50 + 40 CONTINUE + G1 = (T1-T2)/(DNU+DNU) + 50 CONTINUE + G2 = (T1+T2)*0.5D0 + FR = FC*(CCHR*G1+SMUR*G2) + FI = FC*(CCHI*G1+SMUI*G2) + CALL ZEXP(FMUR, FMUI, STR, STI) + PR = 0.5D0*STR/T2 + PI = 0.5D0*STI/T2 + CALL ZDIV(0.5D0, 0.0D0, STR, STI, PTR, PTI) + QR = PTR/T1 + QI = PTI/T1 + S1R = FR + S1I = FI + S2R = PR + S2I = PI + AK = 1.0D0 + A1 = 1.0D0 + CKR = CONER + CKI = CONEI + BK = 1.0D0 - DNU2 + IF (INU.GT.0 .OR. N.GT.1) GO TO 80 +C----------------------------------------------------------------------- +C GENERATE K(FNU,Z), 0.0D0 .LE. FNU .LT. 0.5D0 AND N=1 +C----------------------------------------------------------------------- + IF (CAZ.LT.TOL) GO TO 70 + CALL ZMLT(ZR, ZI, ZR, ZI, CZR, CZI) + CZR = 0.25D0*CZR + CZI = 0.25D0*CZI + T1 = 0.25D0*CAZ*CAZ + 60 CONTINUE + FR = (FR*AK+PR+QR)/BK + FI = (FI*AK+PI+QI)/BK + STR = 1.0D0/(AK-DNU) + PR = PR*STR + PI = PI*STR + STR = 1.0D0/(AK+DNU) + QR = QR*STR + QI = QI*STR + STR = CKR*CZR - CKI*CZI + RAK = 1.0D0/AK + CKI = (CKR*CZI+CKI*CZR)*RAK + CKR = STR*RAK + S1R = CKR*FR - CKI*FI + S1R + S1I = CKR*FI + CKI*FR + S1I + A1 = A1*T1*RAK + BK = BK + AK + AK + 1.0D0 + AK = AK + 1.0D0 + IF (A1.GT.TOL) GO TO 60 + 70 CONTINUE + YR(1) = S1R + YI(1) = S1I + IF (KODED.EQ.1) RETURN + CALL ZEXP(ZR, ZI, STR, STI) + CALL ZMLT(S1R, S1I, STR, STI, YR(1), YI(1)) + RETURN +C----------------------------------------------------------------------- +C GENERATE K(DNU,Z) AND K(DNU+1,Z) FOR FORWARD RECURRENCE +C----------------------------------------------------------------------- + 80 CONTINUE + IF (CAZ.LT.TOL) GO TO 100 + CALL ZMLT(ZR, ZI, ZR, ZI, CZR, CZI) + CZR = 0.25D0*CZR + CZI = 0.25D0*CZI + T1 = 0.25D0*CAZ*CAZ + 90 CONTINUE + FR = (FR*AK+PR+QR)/BK + FI = (FI*AK+PI+QI)/BK + STR = 1.0D0/(AK-DNU) + PR = PR*STR + PI = PI*STR + STR = 1.0D0/(AK+DNU) + QR = QR*STR + QI = QI*STR + STR = CKR*CZR - CKI*CZI + RAK = 1.0D0/AK + CKI = (CKR*CZI+CKI*CZR)*RAK + CKR = STR*RAK + S1R = CKR*FR - CKI*FI + S1R + S1I = CKR*FI + CKI*FR + S1I + STR = PR - FR*AK + STI = PI - FI*AK + S2R = CKR*STR - CKI*STI + S2R + S2I = CKR*STI + CKI*STR + S2I + A1 = A1*T1*RAK + BK = BK + AK + AK + 1.0D0 + AK = AK + 1.0D0 + IF (A1.GT.TOL) GO TO 90 + 100 CONTINUE + KFLAG = 2 + A1 = FNU + 1.0D0 + AK = A1*DABS(SMUR) + IF (AK.GT.ALIM) KFLAG = 3 + STR = CSSR(KFLAG) + P2R = S2R*STR + P2I = S2I*STR + CALL ZMLT(P2R, P2I, RZR, RZI, S2R, S2I) + S1R = S1R*STR + S1I = S1I*STR + IF (KODED.EQ.1) GO TO 210 + CALL ZEXP(ZR, ZI, FR, FI) + CALL ZMLT(S1R, S1I, FR, FI, S1R, S1I) + CALL ZMLT(S2R, S2I, FR, FI, S2R, S2I) + GO TO 210 +C----------------------------------------------------------------------- +C IFLAG=0 MEANS NO UNDERFLOW OCCURRED +C IFLAG=1 MEANS AN UNDERFLOW OCCURRED- COMPUTATION PROCEEDS WITH +C KODED=2 AND A TEST FOR ON SCALE VALUES IS MADE DURING FORWARD +C RECURSION +C----------------------------------------------------------------------- + 110 CONTINUE + CALL ZSQRT(ZR, ZI, STR, STI) + CALL ZDIV(RTHPI, CZEROI, STR, STI, COEFR, COEFI) + KFLAG = 2 + IF (KODED.EQ.2) GO TO 120 + IF (ZR.GT.ALIM) GO TO 290 +C BLANK LINE + STR = DEXP(-ZR)*CSSR(KFLAG) + STI = -STR*DSIN(ZI) + STR = STR*DCOS(ZI) + CALL ZMLT(COEFR, COEFI, STR, STI, COEFR, COEFI) + 120 CONTINUE + IF (DABS(DNU).EQ.0.5D0) GO TO 300 +C----------------------------------------------------------------------- +C MILLER ALGORITHM FOR CABS(Z).GT.R1 +C----------------------------------------------------------------------- + AK = DCOS(DPI*DNU) + AK = DABS(AK) + IF (AK.EQ.CZEROR) GO TO 300 + FHS = DABS(0.25D0-DNU2) + IF (FHS.EQ.CZEROR) GO TO 300 +C----------------------------------------------------------------------- +C COMPUTE R2=F(E). IF CABS(Z).GE.R2, USE FORWARD RECURRENCE TO +C DETERMINE THE BACKWARD INDEX K. R2=F(E) IS A STRAIGHT LINE ON +C 12.LE.E.LE.60. E IS COMPUTED FROM 2**(-E)=B**(1-I1MACH(14))= +C TOL WHERE B IS THE BASE OF THE ARITHMETIC. +C----------------------------------------------------------------------- + T1 = DBLE(FLOAT(I1MACH(14)-1)) + T1 = T1*D1MACH(5)*3.321928094D0 + T1 = DMAX1(T1,12.0D0) + T1 = DMIN1(T1,60.0D0) + T2 = TTH*T1 - 6.0D0 + IF (ZR.NE.0.0D0) GO TO 130 + T1 = HPI + GO TO 140 + 130 CONTINUE + T1 = DATAN(ZI/ZR) + T1 = DABS(T1) + 140 CONTINUE + IF (T2.GT.CAZ) GO TO 170 +C----------------------------------------------------------------------- +C FORWARD RECURRENCE LOOP WHEN CABS(Z).GE.R2 +C----------------------------------------------------------------------- + ETEST = AK/(DPI*CAZ*TOL) + FK = CONER + IF (ETEST.LT.CONER) GO TO 180 + FKS = CTWOR + CKR = CAZ + CAZ + CTWOR + P1R = CZEROR + P2R = CONER + DO 150 I=1,KMAX + AK = FHS/FKS + CBR = CKR/(FK+CONER) + PTR = P2R + P2R = CBR*P2R - P1R*AK + P1R = PTR + CKR = CKR + CTWOR + FKS = FKS + FK + FK + CTWOR + FHS = FHS + FK + FK + FK = FK + CONER + STR = DABS(P2R)*FK + IF (ETEST.LT.STR) GO TO 160 + 150 CONTINUE + GO TO 310 + 160 CONTINUE + FK = FK + SPI*T1*DSQRT(T2/CAZ) + FHS = DABS(0.25D0-DNU2) + GO TO 180 + 170 CONTINUE +C----------------------------------------------------------------------- +C COMPUTE BACKWARD INDEX K FOR CABS(Z).LT.R2 +C----------------------------------------------------------------------- + A2 = DSQRT(CAZ) + AK = FPI*AK/(TOL*DSQRT(A2)) + AA = 3.0D0*T1/(1.0D0+CAZ) + BB = 14.7D0*T1/(28.0D0+CAZ) + AK = (DLOG(AK)+CAZ*DCOS(AA)/(1.0D0+0.008D0*CAZ))/DCOS(BB) + FK = 0.12125D0*AK*AK/CAZ + 1.5D0 + 180 CONTINUE +C----------------------------------------------------------------------- +C BACKWARD RECURRENCE LOOP FOR MILLER ALGORITHM +C----------------------------------------------------------------------- + K = INT(SNGL(FK)) + FK = DBLE(FLOAT(K)) + FKS = FK*FK + P1R = CZEROR + P1I = CZEROI + P2R = TOL + P2I = CZEROI + CSR = P2R + CSI = P2I + DO 190 I=1,K + A1 = FKS - FK + AK = (FKS+FK)/(A1+FHS) + RAK = 2.0D0/(FK+CONER) + CBR = (FK+ZR)*RAK + CBI = ZI*RAK + PTR = P2R + PTI = P2I + P2R = (PTR*CBR-PTI*CBI-P1R)*AK + P2I = (PTI*CBR+PTR*CBI-P1I)*AK + P1R = PTR + P1I = PTI + CSR = CSR + P2R + CSI = CSI + P2I + FKS = A1 - FK + CONER + FK = FK - CONER + 190 CONTINUE +C----------------------------------------------------------------------- +C COMPUTE (P2/CS)=(P2/CABS(CS))*(CONJG(CS)/CABS(CS)) FOR BETTER +C SCALING +C----------------------------------------------------------------------- + TM = ZABS(CMPLX(CSR,CSI,kind=KIND(1.0D0))) + PTR = 1.0D0/TM + S1R = P2R*PTR + S1I = P2I*PTR + CSR = CSR*PTR + CSI = -CSI*PTR + CALL ZMLT(COEFR, COEFI, S1R, S1I, STR, STI) + CALL ZMLT(STR, STI, CSR, CSI, S1R, S1I) + IF (INU.GT.0 .OR. N.GT.1) GO TO 200 + ZDR = ZR + ZDI = ZI + IF(IFLAG.EQ.1) GO TO 270 + GO TO 240 + 200 CONTINUE +C----------------------------------------------------------------------- +C COMPUTE P1/P2=(P1/CABS(P2)*CONJG(P2)/CABS(P2) FOR SCALING +C----------------------------------------------------------------------- + TM = ZABS(CMPLX(P2R,P2I,kind=KIND(1.0D0))) + PTR = 1.0D0/TM + P1R = P1R*PTR + P1I = P1I*PTR + P2R = P2R*PTR + P2I = -P2I*PTR + CALL ZMLT(P1R, P1I, P2R, P2I, PTR, PTI) + STR = DNU + 0.5D0 - PTR + STI = -PTI + CALL ZDIV(STR, STI, ZR, ZI, STR, STI) + STR = STR + 1.0D0 + CALL ZMLT(STR, STI, S1R, S1I, S2R, S2I) +C----------------------------------------------------------------------- +C FORWARD RECURSION ON THE THREE TERM RECURSION WITH RELATION WITH +C SCALING NEAR EXPONENT EXTREMES ON KFLAG=1 OR KFLAG=3 +C----------------------------------------------------------------------- + 210 CONTINUE + STR = DNU + 1.0D0 + CKR = STR*RZR + CKI = STR*RZI + IF (N.EQ.1) INU = INU - 1 + IF (INU.GT.0) GO TO 220 + IF (N.GT.1) GO TO 215 + S1R = S2R + S1I = S2I + 215 CONTINUE + ZDR = ZR + ZDI = ZI + IF(IFLAG.EQ.1) GO TO 270 + GO TO 240 + 220 CONTINUE + INUB = 1 + IF(IFLAG.EQ.1) GO TO 261 + 225 CONTINUE + P1R = CSRR(KFLAG) + ASCLE = BRY(KFLAG) + DO 230 I=INUB,INU + STR = S2R + STI = S2I + S2R = CKR*STR - CKI*STI + S1R + S2I = CKR*STI + CKI*STR + S1I + S1R = STR + S1I = STI + CKR = CKR + RZR + CKI = CKI + RZI + IF (KFLAG.GE.3) GO TO 230 + P2R = S2R*P1R + P2I = S2I*P1R + STR = DABS(P2R) + STI = DABS(P2I) + P2M = DMAX1(STR,STI) + IF (P2M.LE.ASCLE) GO TO 230 + KFLAG = KFLAG + 1 + ASCLE = BRY(KFLAG) + S1R = S1R*P1R + S1I = S1I*P1R + S2R = P2R + S2I = P2I + STR = CSSR(KFLAG) + S1R = S1R*STR + S1I = S1I*STR + S2R = S2R*STR + S2I = S2I*STR + P1R = CSRR(KFLAG) + 230 CONTINUE + IF (N.NE.1) GO TO 240 + S1R = S2R + S1I = S2I + 240 CONTINUE + STR = CSRR(KFLAG) + YR(1) = S1R*STR + YI(1) = S1I*STR + IF (N.EQ.1) RETURN + YR(2) = S2R*STR + YI(2) = S2I*STR + IF (N.EQ.2) RETURN + KK = 2 + 250 CONTINUE + KK = KK + 1 + IF (KK.GT.N) RETURN + P1R = CSRR(KFLAG) + ASCLE = BRY(KFLAG) + DO 260 I=KK,N + P2R = S2R + P2I = S2I + S2R = CKR*P2R - CKI*P2I + S1R + S2I = CKI*P2R + CKR*P2I + S1I + S1R = P2R + S1I = P2I + CKR = CKR + RZR + CKI = CKI + RZI + P2R = S2R*P1R + P2I = S2I*P1R + YR(I) = P2R + YI(I) = P2I + IF (KFLAG.GE.3) GO TO 260 + STR = DABS(P2R) + STI = DABS(P2I) + P2M = DMAX1(STR,STI) + IF (P2M.LE.ASCLE) GO TO 260 + KFLAG = KFLAG + 1 + ASCLE = BRY(KFLAG) + S1R = S1R*P1R + S1I = S1I*P1R + S2R = P2R + S2I = P2I + STR = CSSR(KFLAG) + S1R = S1R*STR + S1I = S1I*STR + S2R = S2R*STR + S2I = S2I*STR + P1R = CSRR(KFLAG) + 260 CONTINUE + RETURN +C----------------------------------------------------------------------- +C IFLAG=1 CASES, FORWARD RECURRENCE ON SCALED VALUES ON UNDERFLOW +C----------------------------------------------------------------------- + 261 CONTINUE + HELIM = 0.5D0*ELIM + ELM = DEXP(-ELIM) + CELMR = ELM + ASCLE = BRY(1) + ZDR = ZR + ZDI = ZI + IC = -1 + J = 2 + DO 262 I=1,INU + STR = S2R + STI = S2I + S2R = STR*CKR-STI*CKI+S1R + S2I = STI*CKR+STR*CKI+S1I + S1R = STR + S1I = STI + CKR = CKR+RZR + CKI = CKI+RZI + AS = ZABS(CMPLX(S2R,S2I,kind=KIND(1.0D0))) + ALAS = DLOG(AS) + P2R = -ZDR+ALAS + IF(P2R.LT.(-ELIM)) GO TO 263 + CALL ZLOG(S2R,S2I,STR,STI,IDUM) + P2R = -ZDR+STR + P2I = -ZDI+STI + P2M = DEXP(P2R)/TOL + P1R = P2M*DCOS(P2I) + P1I = P2M*DSIN(P2I) + CALL ZUCHK(P1R,P1I,NW,ASCLE,TOL) + IF(NW.NE.0) GO TO 263 + J = 3 - J + CYR(J) = P1R + CYI(J) = P1I + IF(IC.EQ.(I-1)) GO TO 264 + IC = I + GO TO 262 + 263 CONTINUE + IF(ALAS.LT.HELIM) GO TO 262 + ZDR = ZDR-ELIM + S1R = S1R*CELMR + S1I = S1I*CELMR + S2R = S2R*CELMR + S2I = S2I*CELMR + 262 CONTINUE + IF(N.NE.1) GO TO 270 + S1R = S2R + S1I = S2I + GO TO 270 + 264 CONTINUE + KFLAG = 1 + INUB = I+1 + S2R = CYR(J) + S2I = CYI(J) + J = 3 - J + S1R = CYR(J) + S1I = CYI(J) + IF(INUB.LE.INU) GO TO 225 + IF(N.NE.1) GO TO 240 + S1R = S2R + S1I = S2I + GO TO 240 + 270 CONTINUE + YR(1) = S1R + YI(1) = S1I + IF(N.EQ.1) GO TO 280 + YR(2) = S2R + YI(2) = S2I + 280 CONTINUE + ASCLE = BRY(1) + CALL ZKSCL(ZDR,ZDI,FNU,N,YR,YI,NZ,RZR,RZI,ASCLE,TOL,ELIM) + INU = N - NZ + IF (INU.LE.0) RETURN + KK = NZ + 1 + S1R = YR(KK) + S1I = YI(KK) + YR(KK) = S1R*CSRR(1) + YI(KK) = S1I*CSRR(1) + IF (INU.EQ.1) RETURN + KK = NZ + 2 + S2R = YR(KK) + S2I = YI(KK) + YR(KK) = S2R*CSRR(1) + YI(KK) = S2I*CSRR(1) + IF (INU.EQ.2) RETURN + T2 = FNU + DBLE(FLOAT(KK-1)) + CKR = T2*RZR + CKI = T2*RZI + KFLAG = 1 + GO TO 250 + 290 CONTINUE +C----------------------------------------------------------------------- +C SCALE BY DEXP(Z), IFLAG = 1 CASES +C----------------------------------------------------------------------- + KODED = 2 + IFLAG = 1 + KFLAG = 2 + GO TO 120 +C----------------------------------------------------------------------- +C FNU=HALF ODD INTEGER CASE, DNU=-0.5 +C----------------------------------------------------------------------- + 300 CONTINUE + S1R = COEFR + S1I = COEFI + S2R = COEFR + S2I = COEFI + GO TO 210 +C +C + 310 CONTINUE + NZ=-2 + RETURN + END diff --git a/vendor/gonum.org/v1/gonum/mathext/internal/amos/amoslib/zbuni.f b/vendor/gonum.org/v1/gonum/mathext/internal/amos/amoslib/zbuni.f new file mode 100644 index 0000000..3811b89 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/mathext/internal/amos/amoslib/zbuni.f @@ -0,0 +1,174 @@ + SUBROUTINE ZBUNI(ZR, ZI, FNU, KODE, N, YR, YI, NZ, NUI, NLAST, + * FNUL, TOL, ELIM, ALIM) +C***BEGIN PROLOGUE ZBUNI +C***REFER TO ZBESI,ZBESK +C +C ZBUNI COMPUTES THE I BESSEL FUNCTION FOR LARGE CABS(Z).GT. +C FNUL AND FNU+N-1.LT.FNUL. THE ORDER IS INCREASED FROM +C FNU+N-1 GREATER THAN FNUL BY ADDING NUI AND COMPUTING +C ACCORDING TO THE UNIFORM ASYMPTOTIC EXPANSION FOR I(FNU,Z) +C ON IFORM=1 AND THE EXPANSION FOR J(FNU,Z) ON IFORM=2 +C +C***ROUTINES CALLED ZUNI1,ZUNI2,ZABS,D1MACH +C***END PROLOGUE ZBUNI +C COMPLEX CSCL,CSCR,CY,RZ,ST,S1,S2,Y,Z + DOUBLE PRECISION ALIM, AX, AY, CSCLR, CSCRR, CYI, CYR, DFNU, + * ELIM, FNU, FNUI, FNUL, GNU, RAZ, RZI, RZR, STI, STR, S1I, S1R, + * S2I, S2R, TOL, YI, YR, ZI, ZR, ZABS, ASCLE, BRY, C1R, C1I, C1M, + * D1MACH + INTEGER I, IFLAG, IFORM, K, KODE, N, NL, NLAST, NUI, NW, NZ + DIMENSION YR(N), YI(N), CYR(2), CYI(2), BRY(3) + NZ = 0 + AX = DABS(ZR)*1.7321D0 + AY = DABS(ZI) + IFORM = 1 + IF (AY.GT.AX) IFORM = 2 + IF (NUI.EQ.0) GO TO 60 + FNUI = DBLE(FLOAT(NUI)) + DFNU = FNU + DBLE(FLOAT(N-1)) + GNU = DFNU + FNUI + IF (IFORM.EQ.2) GO TO 10 +C----------------------------------------------------------------------- +C ASYMPTOTIC EXPANSION FOR I(FNU,Z) FOR LARGE FNU APPLIED IN +C -PI/3.LE.ARG(Z).LE.PI/3 +C----------------------------------------------------------------------- + CALL ZUNI1(ZR, ZI, GNU, KODE, 2, CYR, CYI, NW, NLAST, FNUL, TOL, + * ELIM, ALIM) + GO TO 20 + 10 CONTINUE +C----------------------------------------------------------------------- +C ASYMPTOTIC EXPANSION FOR J(FNU,Z*EXP(M*HPI)) FOR LARGE FNU +C APPLIED IN PI/3.LT.ABS(ARG(Z)).LE.PI/2 WHERE M=+I OR -I +C AND HPI=PI/2 +C----------------------------------------------------------------------- + CALL ZUNI2(ZR, ZI, GNU, KODE, 2, CYR, CYI, NW, NLAST, FNUL, TOL, + * ELIM, ALIM) + 20 CONTINUE + IF (NW.LT.0) GO TO 50 + IF (NW.NE.0) GO TO 90 + STR = ZABS(CMPLX(CYR(1),CYI(1),kind=KIND(1.0D0))) +C---------------------------------------------------------------------- +C SCALE BACKWARD RECURRENCE, BRY(3) IS DEFINED BUT NEVER USED +C---------------------------------------------------------------------- + BRY(1)=1.0D+3*D1MACH(1)/TOL + BRY(2) = 1.0D0/BRY(1) + BRY(3) = BRY(2) + IFLAG = 2 + ASCLE = BRY(2) + CSCLR = 1.0D0 + IF (STR.GT.BRY(1)) GO TO 21 + IFLAG = 1 + ASCLE = BRY(1) + CSCLR = 1.0D0/TOL + GO TO 25 + 21 CONTINUE + IF (STR.LT.BRY(2)) GO TO 25 + IFLAG = 3 + ASCLE=BRY(3) + CSCLR = TOL + 25 CONTINUE + CSCRR = 1.0D0/CSCLR + S1R = CYR(2)*CSCLR + S1I = CYI(2)*CSCLR + S2R = CYR(1)*CSCLR + S2I = CYI(1)*CSCLR + RAZ = 1.0D0/ZABS(CMPLX(ZR,ZI,kind=KIND(1.0D0))) + STR = ZR*RAZ + STI = -ZI*RAZ + RZR = (STR+STR)*RAZ + RZI = (STI+STI)*RAZ + DO 30 I=1,NUI + STR = S2R + STI = S2I + S2R = (DFNU+FNUI)*(RZR*STR-RZI*STI) + S1R + S2I = (DFNU+FNUI)*(RZR*STI+RZI*STR) + S1I + S1R = STR + S1I = STI + FNUI = FNUI - 1.0D0 + IF (IFLAG.GE.3) GO TO 30 + STR = S2R*CSCRR + STI = S2I*CSCRR + C1R = DABS(STR) + C1I = DABS(STI) + C1M = DMAX1(C1R,C1I) + IF (C1M.LE.ASCLE) GO TO 30 + IFLAG = IFLAG+1 + ASCLE = BRY(IFLAG) + S1R = S1R*CSCRR + S1I = S1I*CSCRR + S2R = STR + S2I = STI + CSCLR = CSCLR*TOL + CSCRR = 1.0D0/CSCLR + S1R = S1R*CSCLR + S1I = S1I*CSCLR + S2R = S2R*CSCLR + S2I = S2I*CSCLR + 30 CONTINUE + YR(N) = S2R*CSCRR + YI(N) = S2I*CSCRR + IF (N.EQ.1) RETURN + NL = N - 1 + FNUI = DBLE(FLOAT(NL)) + K = NL + DO 40 I=1,NL + STR = S2R + STI = S2I + S2R = (FNU+FNUI)*(RZR*STR-RZI*STI) + S1R + S2I = (FNU+FNUI)*(RZR*STI+RZI*STR) + S1I + S1R = STR + S1I = STI + STR = S2R*CSCRR + STI = S2I*CSCRR + YR(K) = STR + YI(K) = STI + FNUI = FNUI - 1.0D0 + K = K - 1 + IF (IFLAG.GE.3) GO TO 40 + C1R = DABS(STR) + C1I = DABS(STI) + C1M = DMAX1(C1R,C1I) + IF (C1M.LE.ASCLE) GO TO 40 + IFLAG = IFLAG+1 + ASCLE = BRY(IFLAG) + S1R = S1R*CSCRR + S1I = S1I*CSCRR + S2R = STR + S2I = STI + CSCLR = CSCLR*TOL + CSCRR = 1.0D0/CSCLR + S1R = S1R*CSCLR + S1I = S1I*CSCLR + S2R = S2R*CSCLR + S2I = S2I*CSCLR + 40 CONTINUE + RETURN + 50 CONTINUE + NZ = -1 + IF(NW.EQ.(-2)) NZ=-2 + RETURN + 60 CONTINUE + IF (IFORM.EQ.2) GO TO 70 +C----------------------------------------------------------------------- +C ASYMPTOTIC EXPANSION FOR I(FNU,Z) FOR LARGE FNU APPLIED IN +C -PI/3.LE.ARG(Z).LE.PI/3 +C----------------------------------------------------------------------- + CALL ZUNI1(ZR, ZI, FNU, KODE, N, YR, YI, NW, NLAST, FNUL, TOL, + * ELIM, ALIM) + GO TO 80 + 70 CONTINUE +C----------------------------------------------------------------------- +C ASYMPTOTIC EXPANSION FOR J(FNU,Z*EXP(M*HPI)) FOR LARGE FNU +C APPLIED IN PI/3.LT.ABS(ARG(Z)).LE.PI/2 WHERE M=+I OR -I +C AND HPI=PI/2 +C----------------------------------------------------------------------- + CALL ZUNI2(ZR, ZI, FNU, KODE, N, YR, YI, NW, NLAST, FNUL, TOL, + * ELIM, ALIM) + 80 CONTINUE + IF (NW.LT.0) GO TO 50 + NZ = NW + RETURN + 90 CONTINUE + NLAST = N + RETURN + END diff --git a/vendor/gonum.org/v1/gonum/mathext/internal/amos/amoslib/zbunk.f b/vendor/gonum.org/v1/gonum/mathext/internal/amos/amoslib/zbunk.f new file mode 100644 index 0000000..b20b79f --- /dev/null +++ b/vendor/gonum.org/v1/gonum/mathext/internal/amos/amoslib/zbunk.f @@ -0,0 +1,35 @@ + SUBROUTINE ZBUNK(ZR, ZI, FNU, KODE, MR, N, YR, YI, NZ, TOL, ELIM, + * ALIM) +C***BEGIN PROLOGUE ZBUNK +C***REFER TO ZBESK,ZBESH +C +C ZBUNK COMPUTES THE K BESSEL FUNCTION FOR FNU.GT.FNUL. +C ACCORDING TO THE UNIFORM ASYMPTOTIC EXPANSION FOR K(FNU,Z) +C IN ZUNK1 AND THE EXPANSION FOR H(2,FNU,Z) IN ZUNK2 +C +C***ROUTINES CALLED ZUNK1,ZUNK2 +C***END PROLOGUE ZBUNK +C COMPLEX Y,Z + DOUBLE PRECISION ALIM, AX, AY, ELIM, FNU, TOL, YI, YR, ZI, ZR + INTEGER KODE, MR, N, NZ + DIMENSION YR(N), YI(N) + NZ = 0 + AX = DABS(ZR)*1.7321D0 + AY = DABS(ZI) + IF (AY.GT.AX) GO TO 10 +C----------------------------------------------------------------------- +C ASYMPTOTIC EXPANSION FOR K(FNU,Z) FOR LARGE FNU APPLIED IN +C -PI/3.LE.ARG(Z).LE.PI/3 +C----------------------------------------------------------------------- + CALL ZUNK1(ZR, ZI, FNU, KODE, MR, N, YR, YI, NZ, TOL, ELIM, ALIM) + GO TO 20 + 10 CONTINUE +C----------------------------------------------------------------------- +C ASYMPTOTIC EXPANSION FOR H(2,FNU,Z*EXP(M*HPI)) FOR LARGE FNU +C APPLIED IN PI/3.LT.ABS(ARG(Z)).LE.PI/2 WHERE M=+I OR -I +C AND HPI=PI/2 +C----------------------------------------------------------------------- + CALL ZUNK2(ZR, ZI, FNU, KODE, MR, N, YR, YI, NZ, TOL, ELIM, ALIM) + 20 CONTINUE + RETURN + END diff --git a/vendor/gonum.org/v1/gonum/mathext/internal/amos/amoslib/zdiv.f b/vendor/gonum.org/v1/gonum/mathext/internal/amos/amoslib/zdiv.f new file mode 100644 index 0000000..ac73b86 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/mathext/internal/amos/amoslib/zdiv.f @@ -0,0 +1,19 @@ + SUBROUTINE ZDIV(AR, AI, BR, BI, CR, CI) +C***BEGIN PROLOGUE ZDIV +C***REFER TO ZBESH,ZBESI,ZBESJ,ZBESK,ZBESY,ZAIRY,ZBIRY +C +C DOUBLE PRECISION COMPLEX DIVIDE C=A/B. +C +C***ROUTINES CALLED ZABS +C***END PROLOGUE ZDIV + DOUBLE PRECISION AR, AI, BR, BI, CR, CI, BM, CA, CB, CC, CD + DOUBLE PRECISION ZABS + BM = 1.0D0/ZABS(CMPLX(BR,BI,kind=KIND(1.0D0))) + CC = BR*BM + CD = BI*BM + CA = (AR*CC+AI*CD)*BM + CB = (AI*CC-AR*CD)*BM + CR = CA + CI = CB + RETURN + END diff --git a/vendor/gonum.org/v1/gonum/mathext/internal/amos/amoslib/zexp.f b/vendor/gonum.org/v1/gonum/mathext/internal/amos/amoslib/zexp.f new file mode 100644 index 0000000..fcb553c --- /dev/null +++ b/vendor/gonum.org/v1/gonum/mathext/internal/amos/amoslib/zexp.f @@ -0,0 +1,16 @@ + SUBROUTINE ZEXP(AR, AI, BR, BI) +C***BEGIN PROLOGUE ZEXP +C***REFER TO ZBESH,ZBESI,ZBESJ,ZBESK,ZBESY,ZAIRY,ZBIRY +C +C DOUBLE PRECISION COMPLEX EXPONENTIAL FUNCTION B=EXP(A) +C +C***ROUTINES CALLED (NONE) +C***END PROLOGUE ZEXP + DOUBLE PRECISION AR, AI, BR, BI, ZM, CA, CB + ZM = DEXP(AR) + CA = ZM*DCOS(AI) + CB = ZM*DSIN(AI) + BR = CA + BI = CB + RETURN + END diff --git a/vendor/gonum.org/v1/gonum/mathext/internal/amos/amoslib/zkscl.f b/vendor/gonum.org/v1/gonum/mathext/internal/amos/amoslib/zkscl.f new file mode 100644 index 0000000..eeda471 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/mathext/internal/amos/amoslib/zkscl.f @@ -0,0 +1,121 @@ + SUBROUTINE ZKSCL(ZRR,ZRI,FNU,N,YR,YI,NZ,RZR,RZI,ASCLE,TOL,ELIM) +C***BEGIN PROLOGUE ZKSCL +C***REFER TO ZBESK +C +C SET K FUNCTIONS TO ZERO ON UNDERFLOW, CONTINUE RECURRENCE +C ON SCALED FUNCTIONS UNTIL TWO MEMBERS COME ON SCALE, THEN +C RETURN WITH MIN(NZ+2,N) VALUES SCALED BY 1/TOL. +C +C***ROUTINES CALLED ZUCHK,ZABS,ZLOG +C***END PROLOGUE ZKSCL +C COMPLEX CK,CS,CY,CZERO,RZ,S1,S2,Y,ZR,ZD,CELM + DOUBLE PRECISION ACS, AS, ASCLE, CKI, CKR, CSI, CSR, CYI, + * CYR, ELIM, FN, FNU, RZI, RZR, STR, S1I, S1R, S2I, + * S2R, TOL, YI, YR, ZEROI, ZEROR, ZRI, ZRR, ZABS, + * ZDR, ZDI, CELMR, ELM, HELIM, ALAS + INTEGER I, IC, IDUM, KK, N, NN, NW, NZ + DIMENSION YR(N), YI(N), CYR(2), CYI(2) + DATA ZEROR,ZEROI / 0.0D0 , 0.0D0 / +C + NZ = 0 + IC = 0 + NN = MIN0(2,N) + DO 10 I=1,NN + S1R = YR(I) + S1I = YI(I) + CYR(I) = S1R + CYI(I) = S1I + AS = ZABS(CMPLX(S1R,S1I,kind=KIND(1.0D0))) + ACS = -ZRR + DLOG(AS) + NZ = NZ + 1 + YR(I) = ZEROR + YI(I) = ZEROI + IF (ACS.LT.(-ELIM)) GO TO 10 + CALL ZLOG(S1R, S1I, CSR, CSI, IDUM) + CSR = CSR - ZRR + CSI = CSI - ZRI + STR = DEXP(CSR)/TOL + CSR = STR*DCOS(CSI) + CSI = STR*DSIN(CSI) + CALL ZUCHK(CSR, CSI, NW, ASCLE, TOL) + IF (NW.NE.0) GO TO 10 + YR(I) = CSR + YI(I) = CSI + IC = I + NZ = NZ - 1 + 10 CONTINUE + IF (N.EQ.1) RETURN + IF (IC.GT.1) GO TO 20 + YR(1) = ZEROR + YI(1) = ZEROI + NZ = 2 + 20 CONTINUE + IF (N.EQ.2) RETURN + IF (NZ.EQ.0) RETURN + FN = FNU + 1.0D0 + CKR = FN*RZR + CKI = FN*RZI + S1R = CYR(1) + S1I = CYI(1) + S2R = CYR(2) + S2I = CYI(2) + HELIM = 0.5D0*ELIM + ELM = DEXP(-ELIM) + CELMR = ELM + ZDR = ZRR + ZDI = ZRI +C +C FIND TWO CONSECUTIVE Y VALUES ON SCALE. SCALE RECURRENCE IF +C S2 GETS LARGER THAN EXP(ELIM/2) +C + DO 30 I=3,N + KK = I + CSR = S2R + CSI = S2I + S2R = CKR*CSR - CKI*CSI + S1R + S2I = CKI*CSR + CKR*CSI + S1I + S1R = CSR + S1I = CSI + CKR = CKR + RZR + CKI = CKI + RZI + AS = ZABS(CMPLX(S2R,S2I,kind=KIND(1.0D0))) + ALAS = DLOG(AS) + ACS = -ZDR + ALAS + NZ = NZ + 1 + YR(I) = ZEROR + YI(I) = ZEROI + IF (ACS.LT.(-ELIM)) GO TO 25 + CALL ZLOG(S2R, S2I, CSR, CSI, IDUM) + CSR = CSR - ZDR + CSI = CSI - ZDI + STR = DEXP(CSR)/TOL + CSR = STR*DCOS(CSI) + CSI = STR*DSIN(CSI) + CALL ZUCHK(CSR, CSI, NW, ASCLE, TOL) + IF (NW.NE.0) GO TO 25 + YR(I) = CSR + YI(I) = CSI + NZ = NZ - 1 + IF (IC.EQ.KK-1) GO TO 40 + IC = KK + GO TO 30 + 25 CONTINUE + IF(ALAS.LT.HELIM) GO TO 30 + ZDR = ZDR - ELIM + S1R = S1R*CELMR + S1I = S1I*CELMR + S2R = S2R*CELMR + S2I = S2I*CELMR + 30 CONTINUE + NZ = N + IF(IC.EQ.N) NZ=N-1 + GO TO 45 + 40 CONTINUE + NZ = KK - 2 + 45 CONTINUE + DO 50 I=1,NZ + YR(I) = ZEROR + YI(I) = ZEROI + 50 CONTINUE + RETURN + END diff --git a/vendor/gonum.org/v1/gonum/mathext/internal/amos/amoslib/zlog.f b/vendor/gonum.org/v1/gonum/mathext/internal/amos/amoslib/zlog.f new file mode 100644 index 0000000..bb22cd0 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/mathext/internal/amos/amoslib/zlog.f @@ -0,0 +1,41 @@ + SUBROUTINE ZLOG(AR, AI, BR, BI, IERR) +C***BEGIN PROLOGUE ZLOG +C***REFER TO ZBESH,ZBESI,ZBESJ,ZBESK,ZBESY,ZAIRY,ZBIRY +C +C DOUBLE PRECISION COMPLEX LOGARITHM B=CLOG(A) +C IERR=0,NORMAL RETURN IERR=1, Z=CMPLX(0.0,0.0) +C***ROUTINES CALLED ZABS +C***END PROLOGUE ZLOG + DOUBLE PRECISION AR, AI, BR, BI, ZM, DTHETA, DPI, DHPI + DOUBLE PRECISION ZABS + DATA DPI , DHPI / 3.141592653589793238462643383D+0, + 1 1.570796326794896619231321696D+0/ +C + IERR=0 + IF (AR.EQ.0.0D+0) GO TO 10 + IF (AI.EQ.0.0D+0) GO TO 20 + DTHETA = DATAN(AI/AR) + IF (DTHETA.LE.0.0D+0) GO TO 40 + IF (AR.LT.0.0D+0) DTHETA = DTHETA - DPI + GO TO 50 + 10 IF (AI.EQ.0.0D+0) GO TO 60 + BI = DHPI + BR = DLOG(DABS(AI)) + IF (AI.LT.0.0D+0) BI = -BI + RETURN + 20 IF (AR.GT.0.0D+0) GO TO 30 + BR = DLOG(DABS(AR)) + BI = DPI + RETURN + 30 BR = DLOG(AR) + BI = 0.0D+0 + RETURN + 40 IF (AR.LT.0.0D+0) DTHETA = DTHETA + DPI + 50 ZM = ZABS(CMPLX(AR,AI,kind=KIND(1.0D0))) + BR = DLOG(ZM) + BI = DTHETA + RETURN + 60 CONTINUE + IERR=1 + RETURN + END diff --git a/vendor/gonum.org/v1/gonum/mathext/internal/amos/amoslib/zmlri.f b/vendor/gonum.org/v1/gonum/mathext/internal/amos/amoslib/zmlri.f new file mode 100644 index 0000000..c112f79 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/mathext/internal/amos/amoslib/zmlri.f @@ -0,0 +1,206 @@ + SUBROUTINE ZMLRI(ZR, ZI, FNU, KODE, N, YR, YI, NZ, TOL) +C***BEGIN PROLOGUE ZMLRI +C***REFER TO ZBESI,ZBESK +C +C ZMLRI COMPUTES THE I BESSEL FUNCTION FOR RE(Z).GE.0.0 BY THE +C MILLER ALGORITHM NORMALIZED BY A NEUMANN SERIES. +C +C***ROUTINES CALLED DGAMLN,D1MACH,ZABS,ZEXP,ZLOG,ZMLT +C***END PROLOGUE ZMLRI +C COMPLEX CK,CNORM,CONE,CTWO,CZERO,PT,P1,P2,RZ,SUM,Y,Z + DOUBLE PRECISION ACK, AK, AP, AT, AZ, BK, CKI, CKR, CNORMI, + * CNORMR, CONEI, CONER, FKAP, FKK, FLAM, FNF, FNU, PTI, PTR, P1I, + * P1R, P2I, P2R, RAZ, RHO, RHO2, RZI, RZR, SCLE, STI, STR, SUMI, + * SUMR, TFNF, TOL, TST, YI, YR, ZEROI, ZEROR, ZI, ZR, DGAMLN, + * D1MACH, ZABS + INTEGER I, IAZ, IDUM, IFNU, INU, ITIME, K, KK, KM, KODE, M, N, NZ + DIMENSION YR(N), YI(N) + DATA ZEROR,ZEROI,CONER,CONEI / 0.0D0, 0.0D0, 1.0D0, 0.0D0 / + SCLE = D1MACH(1)/TOL + NZ=0 + AZ = ZABS(CMPLX(ZR,ZI,kind=KIND(1.0D0))) + IAZ = INT(SNGL(AZ)) + IFNU = INT(SNGL(FNU)) + INU = IFNU + N - 1 + AT = DBLE(FLOAT(IAZ)) + 1.0D0 + RAZ = 1.0D0/AZ + STR = ZR*RAZ + STI = -ZI*RAZ + CKR = STR*AT*RAZ + CKI = STI*AT*RAZ + RZR = (STR+STR)*RAZ + RZI = (STI+STI)*RAZ + P1R = ZEROR + P1I = ZEROI + P2R = CONER + P2I = CONEI + ACK = (AT+1.0D0)*RAZ + RHO = ACK + DSQRT(ACK*ACK-1.0D0) + RHO2 = RHO*RHO + TST = (RHO2+RHO2)/((RHO2-1.0D0)*(RHO-1.0D0)) + TST = TST/TOL +C----------------------------------------------------------------------- +C COMPUTE RELATIVE TRUNCATION ERROR INDEX FOR SERIES +C----------------------------------------------------------------------- + AK = AT + DO 10 I=1,80 + PTR = P2R + PTI = P2I + P2R = P1R - (CKR*PTR-CKI*PTI) + P2I = P1I - (CKI*PTR+CKR*PTI) + P1R = PTR + P1I = PTI + CKR = CKR + RZR + CKI = CKI + RZI + AP = ZABS(CMPLX(P2R,P2I,kind=KIND(1.0D0))) + IF (AP.GT.TST*AK*AK) THEN + GO TO 20 + END IF + AK = AK + 1.0D0 + 10 CONTINUE + GO TO 110 + 20 CONTINUE + I = I + 1 + K = 0 + IF (INU.LT.IAZ) GO TO 40 +C----------------------------------------------------------------------- +C COMPUTE RELATIVE TRUNCATION ERROR FOR RATIOS +C----------------------------------------------------------------------- + P1R = ZEROR + P1I = ZEROI + P2R = CONER + P2I = CONEI + AT = DBLE(FLOAT(INU)) + 1.0D0 + STR = ZR*RAZ + STI = -ZI*RAZ + CKR = STR*AT*RAZ + CKI = STI*AT*RAZ + ACK = AT*RAZ + TST = DSQRT(ACK/TOL) + ITIME = 1 + DO 30 K=1,80 + PTR = P2R + PTI = P2I + P2R = P1R - (CKR*PTR-CKI*PTI) + P2I = P1I - (CKR*PTI+CKI*PTR) + P1R = PTR + P1I = PTI + CKR = CKR + RZR + CKI = CKI + RZI + AP = ZABS(CMPLX(P2R,P2I,kind=KIND(1.0D0))) + IF (AP.LT.TST) GO TO 30 + IF (ITIME.EQ.2) GO TO 40 + ACK = ZABS(CMPLX(CKR,CKI,kind=KIND(1.0D0))) + FLAM = ACK + DSQRT(ACK*ACK-1.0D0) + FKAP = AP/ZABS(CMPLX(P1R,P1I,kind=KIND(1.0D0))) + RHO = DMIN1(FLAM,FKAP) + TST = TST*DSQRT(RHO/(RHO*RHO-1.0D0)) + ITIME = 2 + 30 CONTINUE + GO TO 110 + 40 CONTINUE +C----------------------------------------------------------------------- +C BACKWARD RECURRENCE AND SUM NORMALIZING RELATION +C----------------------------------------------------------------------- + K = K + 1 + KK = MAX0(I+IAZ,K+INU) + FKK = DBLE(FLOAT(KK)) + P1R = ZEROR + P1I = ZEROI +C----------------------------------------------------------------------- +C SCALE P2 AND SUM BY SCLE +C----------------------------------------------------------------------- + P2R = SCLE + P2I = ZEROI + FNF = FNU - DBLE(FLOAT(IFNU)) + TFNF = FNF + FNF + BK = DGAMLN(FKK+TFNF+1.0D0,IDUM) - DGAMLN(FKK+1.0D0,IDUM) - + * DGAMLN(TFNF+1.0D0,IDUM) + BK = DEXP(BK) + SUMR = ZEROR + SUMI = ZEROI + KM = KK - INU + DO 50 I=1,KM + PTR = P2R + PTI = P2I + P2R = P1R + (FKK+FNF)*(RZR*PTR-RZI*PTI) + P2I = P1I + (FKK+FNF)*(RZI*PTR+RZR*PTI) + P1R = PTR + P1I = PTI + AK = 1.0D0 - TFNF/(FKK+TFNF) + ACK = BK*AK + SUMR = SUMR + (ACK+BK)*P1R + SUMI = SUMI + (ACK+BK)*P1I + BK = ACK + FKK = FKK - 1.0D0 + 50 CONTINUE + YR(N) = P2R + YI(N) = P2I + IF (N.EQ.1) GO TO 70 + DO 60 I=2,N + PTR = P2R + PTI = P2I + P2R = P1R + (FKK+FNF)*(RZR*PTR-RZI*PTI) + P2I = P1I + (FKK+FNF)*(RZI*PTR+RZR*PTI) + P1R = PTR + P1I = PTI + AK = 1.0D0 - TFNF/(FKK+TFNF) + ACK = BK*AK + SUMR = SUMR + (ACK+BK)*P1R + SUMI = SUMI + (ACK+BK)*P1I + BK = ACK + FKK = FKK - 1.0D0 + M = N - I + 1 + YR(M) = P2R + YI(M) = P2I + 60 CONTINUE + 70 CONTINUE + IF (IFNU.LE.0) GO TO 90 + DO 80 I=1,IFNU + PTR = P2R + PTI = P2I + P2R = P1R + (FKK+FNF)*(RZR*PTR-RZI*PTI) + P2I = P1I + (FKK+FNF)*(RZR*PTI+RZI*PTR) + P1R = PTR + P1I = PTI + AK = 1.0D0 - TFNF/(FKK+TFNF) + ACK = BK*AK + SUMR = SUMR + (ACK+BK)*P1R + SUMI = SUMI + (ACK+BK)*P1I + BK = ACK + FKK = FKK - 1.0D0 + 80 CONTINUE + 90 CONTINUE + PTR = ZR + PTI = ZI + IF (KODE.EQ.2) PTR = ZEROR + CALL ZLOG(RZR, RZI, STR, STI, IDUM) + P1R = -FNF*STR + PTR + P1I = -FNF*STI + PTI + AP = DGAMLN(1.0D0+FNF,IDUM) + PTR = P1R - AP + PTI = P1I +C----------------------------------------------------------------------- +C THE DIVISION CEXP(PT)/(SUM+P2) IS ALTERED TO AVOID OVERFLOW +C IN THE DENOMINATOR BY SQUARING LARGE QUANTITIES +C----------------------------------------------------------------------- + P2R = P2R + SUMR + P2I = P2I + SUMI + AP = ZABS(CMPLX(P2R,P2I,kind=KIND(1.0D0))) + P1R = 1.0D0/AP + CALL ZEXP(PTR, PTI, STR, STI) + CKR = STR*P1R + CKI = STI*P1R + PTR = P2R*P1R + PTI = -P2I*P1R + CALL ZMLT(CKR, CKI, PTR, PTI, CNORMR, CNORMI) + DO 100 I=1,N + STR = YR(I)*CNORMR - YI(I)*CNORMI + YI(I) = YR(I)*CNORMI + YI(I)*CNORMR + YR(I) = STR + 100 CONTINUE + RETURN + 110 CONTINUE + NZ=-2 + RETURN + END diff --git a/vendor/gonum.org/v1/gonum/mathext/internal/amos/amoslib/zmlt.f b/vendor/gonum.org/v1/gonum/mathext/internal/amos/amoslib/zmlt.f new file mode 100644 index 0000000..3bde7d3 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/mathext/internal/amos/amoslib/zmlt.f @@ -0,0 +1,15 @@ + SUBROUTINE ZMLT(AR, AI, BR, BI, CR, CI) +C***BEGIN PROLOGUE ZMLT +C***REFER TO ZBESH,ZBESI,ZBESJ,ZBESK,ZBESY,ZAIRY,ZBIRY +C +C DOUBLE PRECISION COMPLEX MULTIPLY, C=A*B. +C +C***ROUTINES CALLED (NONE) +C***END PROLOGUE ZMLT + DOUBLE PRECISION AR, AI, BR, BI, CR, CI, CA, CB + CA = AR*BR - AI*BI + CB = AR*BI + AI*BR + CR = CA + CI = CB + RETURN + END diff --git a/vendor/gonum.org/v1/gonum/mathext/internal/amos/amoslib/zrati.f b/vendor/gonum.org/v1/gonum/mathext/internal/amos/amoslib/zrati.f new file mode 100644 index 0000000..b66bd20 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/mathext/internal/amos/amoslib/zrati.f @@ -0,0 +1,132 @@ + SUBROUTINE ZRATI(ZR, ZI, FNU, N, CYR, CYI, TOL) +C***BEGIN PROLOGUE ZRATI +C***REFER TO ZBESI,ZBESK,ZBESH +C +C ZRATI COMPUTES RATIOS OF I BESSEL FUNCTIONS BY BACKWARD +C RECURRENCE. THE STARTING INDEX IS DETERMINED BY FORWARD +C RECURRENCE AS DESCRIBED IN J. RES. OF NAT. BUR. OF STANDARDS-B, +C MATHEMATICAL SCIENCES, VOL 77B, P111-114, SEPTEMBER, 1973, +C BESSEL FUNCTIONS I AND J OF COMPLEX ARGUMENT AND INTEGER ORDER, +C BY D. J. SOOKNE. +C +C***ROUTINES CALLED ZABS,ZDIV +C***END PROLOGUE ZRATI +C COMPLEX Z,CY(1),CONE,CZERO,P1,P2,T1,RZ,PT,CDFNU + DOUBLE PRECISION AK, AMAGZ, AP1, AP2, ARG, AZ, CDFNUI, CDFNUR, + * CONEI, CONER, CYI, CYR, CZEROI, CZEROR, DFNU, FDNU, FLAM, FNU, + * FNUP, PTI, PTR, P1I, P1R, P2I, P2R, RAK, RAP1, RHO, RT2, RZI, + * RZR, TEST, TEST1, TOL, TTI, TTR, T1I, T1R, ZI, ZR, ZABS + INTEGER I, ID, IDNU, INU, ITIME, K, KK, MAGZ, N + DIMENSION CYR(N), CYI(N) + DATA CZEROR,CZEROI,CONER,CONEI,RT2/ + 1 0.0D0, 0.0D0, 1.0D0, 0.0D0, 1.41421356237309505D0 / + AZ = ZABS(CMPLX(ZR,ZI,kind=KIND(1.0D0))) + INU = INT(SNGL(FNU)) + IDNU = INU + N - 1 + MAGZ = INT(SNGL(AZ)) + AMAGZ = DBLE(FLOAT(MAGZ+1)) + FDNU = DBLE(FLOAT(IDNU)) + FNUP = DMAX1(AMAGZ,FDNU) + ID = IDNU - MAGZ - 1 + ITIME = 1 + K = 1 + PTR = 1.0D0/AZ + RZR = PTR*(ZR+ZR)*PTR + RZI = -PTR*(ZI+ZI)*PTR + T1R = RZR*FNUP + T1I = RZI*FNUP + P2R = -T1R + P2I = -T1I + P1R = CONER + P1I = CONEI + T1R = T1R + RZR + T1I = T1I + RZI + IF (ID.GT.0) ID = 0 + AP2 = ZABS(CMPLX(P2R,P2I,kind=KIND(1.0D0))) + AP1 = ZABS(CMPLX(P1R,P1I,kind=KIND(1.0D0))) +C----------------------------------------------------------------------- +C THE OVERFLOW TEST ON K(FNU+I-1,Z) BEFORE THE CALL TO CBKNU +C GUARANTEES THAT P2 IS ON SCALE. SCALE TEST1 AND ALL SUBSEQUENT +C P2 VALUES BY AP1 TO ENSURE THAT AN OVERFLOW DOES NOT OCCUR +C PREMATURELY. +C----------------------------------------------------------------------- + ARG = (AP2+AP2)/(AP1*TOL) + TEST1 = DSQRT(ARG) + TEST = TEST1 + RAP1 = 1.0D0/AP1 + P1R = P1R*RAP1 + P1I = P1I*RAP1 + P2R = P2R*RAP1 + P2I = P2I*RAP1 + AP2 = AP2*RAP1 + 10 CONTINUE + K = K + 1 + AP1 = AP2 + PTR = P2R + PTI = P2I + P2R = P1R - (T1R*PTR-T1I*PTI) + P2I = P1I - (T1R*PTI+T1I*PTR) + P1R = PTR + P1I = PTI + T1R = T1R + RZR + T1I = T1I + RZI + AP2 = ZABS(CMPLX(P2R,P2I,kind=KIND(1.0D0))) + IF (AP1.LE.TEST) GO TO 10 + IF (ITIME.EQ.2) GO TO 20 + AK = ZABS(CMPLX(T1R,T1I,kind=KIND(1.0D0))*0.5D0) + FLAM = AK + DSQRT(AK*AK-1.0D0) + RHO = DMIN1(AP2/AP1,FLAM) + TEST = TEST1*DSQRT(RHO/(RHO*RHO-1.0D0)) + ITIME = 2 + GO TO 10 + 20 CONTINUE + KK = K + 1 - ID + AK = DBLE(FLOAT(KK)) + T1R = AK + T1I = CZEROI + DFNU = FNU + DBLE(FLOAT(N-1)) + P1R = 1.0D0/AP2 + P1I = CZEROI + P2R = CZEROR + P2I = CZEROI + DO 30 I=1,KK + PTR = P1R + PTI = P1I + RAP1 = DFNU + T1R + TTR = RZR*RAP1 + TTI = RZI*RAP1 + P1R = (PTR*TTR-PTI*TTI) + P2R + P1I = (PTR*TTI+PTI*TTR) + P2I + P2R = PTR + P2I = PTI + T1R = T1R - CONER + 30 CONTINUE + IF (P1R.NE.CZEROR .OR. P1I.NE.CZEROI) GO TO 40 + P1R = TOL + P1I = TOL + 40 CONTINUE + CALL ZDIV(P2R, P2I, P1R, P1I, CYR(N), CYI(N)) + IF (N.EQ.1) RETURN + K = N - 1 + AK = DBLE(FLOAT(K)) + T1R = AK + T1I = CZEROI + CDFNUR = FNU*RZR + CDFNUI = FNU*RZI + DO 60 I=2,N + PTR = CDFNUR + (T1R*RZR-T1I*RZI) + CYR(K+1) + PTI = CDFNUI + (T1R*RZI+T1I*RZR) + CYI(K+1) + AK = ZABS(CMPLX(PTR,PTI,kind=KIND(1.0D0))) + IF (AK.NE.CZEROR) GO TO 50 + PTR = TOL + PTI = TOL + AK = TOL*RT2 + 50 CONTINUE + RAK = CONER/AK + CYR(K) = RAK*PTR*RAK + CYI(K) = -RAK*PTI*RAK + T1R = T1R - CONER + K = K - 1 + 60 CONTINUE + RETURN + END diff --git a/vendor/gonum.org/v1/gonum/mathext/internal/amos/amoslib/zs1s2.f b/vendor/gonum.org/v1/gonum/mathext/internal/amos/amoslib/zs1s2.f new file mode 100644 index 0000000..0a6da31 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/mathext/internal/amos/amoslib/zs1s2.f @@ -0,0 +1,51 @@ + SUBROUTINE ZS1S2(ZRR, ZRI, S1R, S1I, S2R, S2I, NZ, ASCLE, ALIM, + * IUF) +C***BEGIN PROLOGUE ZS1S2 +C***REFER TO ZBESK,ZAIRY +C +C ZS1S2 TESTS FOR A POSSIBLE UNDERFLOW RESULTING FROM THE +C ADDITION OF THE I AND K FUNCTIONS IN THE ANALYTIC CON- +C TINUATION FORMULA WHERE S1=K FUNCTION AND S2=I FUNCTION. +C ON KODE=1 THE I AND K FUNCTIONS ARE DIFFERENT ORDERS OF +C MAGNITUDE, BUT FOR KODE=2 THEY CAN BE OF THE SAME ORDER +C OF MAGNITUDE AND THE MAXIMUM MUST BE AT LEAST ONE +C PRECISION ABOVE THE UNDERFLOW LIMIT. +C +C***ROUTINES CALLED ZABS,ZEXP,ZLOG +C***END PROLOGUE ZS1S2 +C COMPLEX CZERO,C1,S1,S1D,S2,ZR + DOUBLE PRECISION AA, ALIM, ALN, ASCLE, AS1, AS2, C1I, C1R, S1DI, + * S1DR, S1I, S1R, S2I, S2R, ZEROI, ZEROR, ZRI, ZRR, ZABS + INTEGER IUF, IDUM, NZ + DATA ZEROR,ZEROI / 0.0D0 , 0.0D0 / + NZ = 0 + AS1 = ZABS(CMPLX(S1R,S1I,kind=KIND(1.0D0))) + AS2 = ZABS(CMPLX(S2R,S2I,kind=KIND(1.0D0))) + IF (S1R.EQ.0.0D0 .AND. S1I.EQ.0.0D0) GO TO 10 + IF (AS1.EQ.0.0D0) GO TO 10 + ALN = -ZRR - ZRR + DLOG(AS1) + S1DR = S1R + S1DI = S1I + S1R = ZEROR + S1I = ZEROI + AS1 = ZEROR + IF (ALN.LT.(-ALIM)) GO TO 10 + CALL ZLOG(S1DR, S1DI, C1R, C1I, IDUM) + C1R = C1R - ZRR - ZRR + C1I = C1I - ZRI - ZRI + CALL ZEXP(C1R, C1I, S1R, S1I) + AS1 = ZABS(CMPLX(S1R,S1I,kind=KIND(1.0D0))) + IUF = IUF + 1 + 10 CONTINUE + AA = DMAX1(AS1,AS2) + IF (AA.GT.ASCLE) THEN + RETURN + END IF + S1R = ZEROR + S1I = ZEROI + S2R = ZEROR + S2I = ZEROI + NZ = 1 + IUF = 0 + RETURN + END diff --git a/vendor/gonum.org/v1/gonum/mathext/internal/amos/amoslib/zseri.f b/vendor/gonum.org/v1/gonum/mathext/internal/amos/amoslib/zseri.f new file mode 100644 index 0000000..862fbf6 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/mathext/internal/amos/amoslib/zseri.f @@ -0,0 +1,195 @@ + SUBROUTINE ZSERI(ZR, ZI, FNU, KODE, N, YR, YI, NZ, TOL, ELIM, + * ALIM) +C***BEGIN PROLOGUE ZSERI +C***REFER TO ZBESI,ZBESK +C +C ZSERI COMPUTES THE I BESSEL FUNCTION FOR REAL(Z).GE.0.0 BY +C MEANS OF THE POWER SERIES FOR LARGE CABS(Z) IN THE +C REGION CABS(Z).LE.2*SQRT(FNU+1). NZ=0 IS A NORMAL RETURN. +C NZ.GT.0 MEANS THAT THE LAST NZ COMPONENTS WERE SET TO ZERO +C DUE TO UNDERFLOW. NZ.LT.0 MEANS UNDERFLOW OCCURRED, BUT THE +C CONDITION CABS(Z).LE.2*SQRT(FNU+1) WAS VIOLATED AND THE +C COMPUTATION MUST BE COMPLETED IN ANOTHER ROUTINE WITH N=N-ABS(NZ). +C +C***ROUTINES CALLED DGAMLN,D1MACH,ZUCHK,ZABS,ZDIV,ZLOG,ZMLT +C***END PROLOGUE ZSERI +C COMPLEX AK1,CK,COEF,CONE,CRSC,CSCL,CZ,CZERO,HZ,RZ,S1,S2,Y,Z + DOUBLE PRECISION AA, ACZ, AK, AK1I, AK1R, ALIM, ARM, ASCLE, ATOL, + * AZ, CKI, CKR, COEFI, COEFR, CONEI, CONER, CRSCR, CZI, CZR, DFNU, + * ELIM, FNU, FNUP, HZI, HZR, RAZ, RS, RTR1, RZI, RZR, S, SS, STI, + * STR, S1I, S1R, S2I, S2R, TOL, YI, YR, WI, WR, ZEROI, ZEROR, ZI, + * ZR, DGAMLN, D1MACH, ZABS + INTEGER I, IB, IDUM, IFLAG, IL, K, KODE, L, M, N, NN, NZ, NW + DIMENSION YR(N), YI(N), WR(2), WI(2) + DATA ZEROR,ZEROI,CONER,CONEI / 0.0D0, 0.0D0, 1.0D0, 0.0D0 / +C + + NZ = 0 + AZ = ZABS(CMPLX(ZR,ZI,kind=KIND(1.0D0))) + IF (AZ.EQ.0.0D0) GO TO 160 + ARM = 1.0D+3*D1MACH(1) + RTR1 = DSQRT(ARM) + CRSCR = 1.0D0 + IFLAG = 0 + IF (AZ.LT.ARM) THEN + GO TO 150 + END IF + HZR = 0.5D0*ZR + HZI = 0.5D0*ZI + CZR = ZEROR + CZI = ZEROI + IF (AZ.LE.RTR1) GO TO 10 + CALL ZMLT(HZR, HZI, HZR, HZI, CZR, CZI) + 10 CONTINUE + ACZ = ZABS(CMPLX(CZR,CZI,kind=KIND(1.0D0))) + NN = N + CALL ZLOG(HZR, HZI, CKR, CKI, IDUM) + 20 CONTINUE + DFNU = FNU + DBLE(FLOAT(NN-1)) + FNUP = DFNU + 1.0D0 +C----------------------------------------------------------------------- +C UNDERFLOW TEST +C----------------------------------------------------------------------- + AK1R = CKR*DFNU + AK1I = CKI*DFNU + AK = DGAMLN(FNUP,IDUM) + AK1R = AK1R - AK + IF (KODE.EQ.2) AK1R = AK1R - ZR + IF (AK1R.GT.(-ELIM)) GO TO 40 + 30 CONTINUE + NZ = NZ + 1 + YR(NN) = ZEROR + YI(NN) = ZEROI + IF (ACZ.GT.DFNU) GO TO 190 + NN = NN - 1 + IF (NN.EQ.0) RETURN + GO TO 20 + 40 CONTINUE + IF (AK1R.GT.(-ALIM)) GO TO 50 + IFLAG = 1 + SS = 1.0D0/TOL + CRSCR = TOL + ASCLE = ARM*SS + 50 CONTINUE + AA = DEXP(AK1R) + IF (IFLAG.EQ.1) AA = AA*SS + COEFR = AA*DCOS(AK1I) + COEFI = AA*DSIN(AK1I) + ATOL = TOL*ACZ/FNUP + IL = MIN0(2,NN) + DO 90 I=1,IL + DFNU = FNU + DBLE(FLOAT(NN-I)) + FNUP = DFNU + 1.0D0 + S1R = CONER + S1I = CONEI + IF (ACZ.LT.TOL*FNUP) GO TO 70 + AK1R = CONER + AK1I = CONEI + AK = FNUP + 2.0D0 + S = FNUP + AA = 2.0D0 + 60 CONTINUE + RS = 1.0D0/S + STR = AK1R*CZR - AK1I*CZI + STI = AK1R*CZI + AK1I*CZR + AK1R = STR*RS + AK1I = STI*RS + S1R = S1R + AK1R + S1I = S1I + AK1I + S = S + AK + AK = AK + 2.0D0 + AA = AA*ACZ*RS + IF (AA.GT.ATOL) GO TO 60 + 70 CONTINUE + S2R = S1R*COEFR - S1I*COEFI + S2I = S1R*COEFI + S1I*COEFR + WR(I) = S2R + WI(I) = S2I + IF (IFLAG.EQ.0) GO TO 80 + CALL ZUCHK(S2R, S2I, NW, ASCLE, TOL) + IF (NW.NE.0) GO TO 30 + 80 CONTINUE + M = NN - I + 1 + YR(M) = S2R*CRSCR + YI(M) = S2I*CRSCR + IF (I.EQ.IL) GO TO 90 + CALL ZDIV(COEFR, COEFI, HZR, HZI, STR, STI) + COEFR = STR*DFNU + COEFI = STI*DFNU + 90 CONTINUE + IF (NN.LE.2) THEN + RETURN + END IF + K = NN - 2 + AK = DBLE(FLOAT(K)) + RAZ = 1.0D0/AZ + STR = ZR*RAZ + STI = -ZI*RAZ + RZR = (STR+STR)*RAZ + RZI = (STI+STI)*RAZ + IF (IFLAG.EQ.1) GO TO 120 + IB = 3 + 100 CONTINUE + DO 110 I=IB,NN + YR(K) = (AK+FNU)*(RZR*YR(K+1)-RZI*YI(K+1)) + YR(K+2) + YI(K) = (AK+FNU)*(RZR*YI(K+1)+RZI*YR(K+1)) + YI(K+2) + AK = AK - 1.0D0 + K = K - 1 + 110 CONTINUE + RETURN +C----------------------------------------------------------------------- +C RECUR BACKWARD WITH SCALED VALUES +C----------------------------------------------------------------------- + 120 CONTINUE +C----------------------------------------------------------------------- +C EXP(-ALIM)=EXP(-ELIM)/TOL=APPROX. ONE PRECISION ABOVE THE +C UNDERFLOW LIMIT = ASCLE = D1MACH(1)*SS*1.0D+3 +C----------------------------------------------------------------------- + S1R = WR(1) + S1I = WI(1) + S2R = WR(2) + S2I = WI(2) + DO 130 L=3,NN + CKR = S2R + CKI = S2I + S2R = S1R + (AK+FNU)*(RZR*CKR-RZI*CKI) + S2I = S1I + (AK+FNU)*(RZR*CKI+RZI*CKR) + S1R = CKR + S1I = CKI + CKR = S2R*CRSCR + CKI = S2I*CRSCR + YR(K) = CKR + YI(K) = CKI + AK = AK - 1.0D0 + K = K - 1 + IF (ZABS(CMPLX(CKR,CKI,kind=KIND(1.0D0))).GT.ASCLE) GO TO 140 + 130 CONTINUE + RETURN + 140 CONTINUE + IB = L + 1 + IF (IB.GT.NN) RETURN + GO TO 100 + 150 CONTINUE + NZ = N + IF (FNU.EQ.0.0D0) NZ = NZ - 1 + 160 CONTINUE + YR(1) = ZEROR + YI(1) = ZEROI + IF (FNU.NE.0.0D0) GO TO 170 + YR(1) = CONER + YI(1) = CONEI + 170 CONTINUE + IF (N.EQ.1) RETURN + DO 180 I=2,N + YR(I) = ZEROR + YI(I) = ZEROI + 180 CONTINUE + RETURN +C----------------------------------------------------------------------- +C RETURN WITH NZ.LT.0 IF CABS(Z*Z/4).GT.FNU+N-NZ-1 COMPLETE +C THE CALCULATION IN CBINU WITH N=N-IABS(NZ) +C----------------------------------------------------------------------- + 190 CONTINUE + NZ = -NZ + RETURN + END diff --git a/vendor/gonum.org/v1/gonum/mathext/internal/amos/amoslib/zshch.f b/vendor/gonum.org/v1/gonum/mathext/internal/amos/amoslib/zshch.f new file mode 100644 index 0000000..168e62e --- /dev/null +++ b/vendor/gonum.org/v1/gonum/mathext/internal/amos/amoslib/zshch.f @@ -0,0 +1,22 @@ + SUBROUTINE ZSHCH(ZR, ZI, CSHR, CSHI, CCHR, CCHI) +C***BEGIN PROLOGUE ZSHCH +C***REFER TO ZBESK,ZBESH +C +C ZSHCH COMPUTES THE COMPLEX HYPERBOLIC FUNCTIONS CSH=SINH(X+I*Y) +C AND CCH=COSH(X+I*Y), WHERE I**2=-1. +C +C***ROUTINES CALLED (NONE) +C***END PROLOGUE ZSHCH +C + DOUBLE PRECISION CCHI, CCHR, CH, CN, CSHI, CSHR, SH, SN, ZI, ZR, + * DCOSH, DSINH + SH = DSINH(ZR) + CH = DCOSH(ZR) + SN = DSIN(ZI) + CN = DCOS(ZI) + CSHR = SH*CN + CSHI = CH*SN + CCHR = CH*CN + CCHI = SH*SN + RETURN + END diff --git a/vendor/gonum.org/v1/gonum/mathext/internal/amos/amoslib/zsqrt.f b/vendor/gonum.org/v1/gonum/mathext/internal/amos/amoslib/zsqrt.f new file mode 100644 index 0000000..289545c --- /dev/null +++ b/vendor/gonum.org/v1/gonum/mathext/internal/amos/amoslib/zsqrt.f @@ -0,0 +1,45 @@ + SUBROUTINE ZSQRT(AR, AI, BR, BI) +C***BEGIN PROLOGUE ZSQRT +C***REFER TO ZBESH,ZBESI,ZBESJ,ZBESK,ZBESY,ZAIRY,ZBIRY +C +C DOUBLE PRECISION COMPLEX SQUARE ROOT, B=CSQRT(A) +C +C***ROUTINES CALLED ZABS +C***END PROLOGUE ZSQRT + DOUBLE PRECISION AR, AI, BR, BI, ZM, DTHETA, DPI, DRT + DOUBLE PRECISION ZABS + + DATA DRT , DPI / 7.071067811865475244008443621D-1, + 1 3.141592653589793238462643383D+0/ + ZM = ZABS(CMPLX(AR,AI,kind=KIND(1.0D0))) + ZM = DSQRT(ZM) + IF (AR.EQ.0.0D+0) GO TO 10 + IF (AI.EQ.0.0D+0) GO TO 20 + DTHETA = DATAN(AI/AR) + IF (DTHETA.LE.0.0D+0) GO TO 40 + IF (AR.LT.0.0D+0) DTHETA = DTHETA - DPI + GO TO 50 + 10 IF (AI.GT.0.0D+0) GO TO 60 + IF (AI.LT.0.0D+0) GO TO 70 + BR = 0.0D+0 + BI = 0.0D+0 + RETURN + 20 IF (AR.GT.0.0D+0) GO TO 30 + BR = 0.0D+0 + BI = DSQRT(DABS(AR)) + RETURN + 30 BR = DSQRT(AR) + BI = 0.0D+0 + RETURN + 40 IF (AR.LT.0.0D+0) DTHETA = DTHETA + DPI + 50 DTHETA = DTHETA*0.5D+0 + BR = ZM*DCOS(DTHETA) + BI = ZM*DSIN(DTHETA) + RETURN + 60 BR = ZM*DRT + BI = ZM*DRT + RETURN + 70 BR = ZM*DRT + BI = -ZM*DRT + RETURN + END diff --git a/vendor/gonum.org/v1/gonum/mathext/internal/amos/amoslib/zuchk.f b/vendor/gonum.org/v1/gonum/mathext/internal/amos/amoslib/zuchk.f new file mode 100644 index 0000000..d15dc84 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/mathext/internal/amos/amoslib/zuchk.f @@ -0,0 +1,28 @@ + SUBROUTINE ZUCHK(YR, YI, NZ, ASCLE, TOL) +C***BEGIN PROLOGUE ZUCHK +C***REFER TO ZSERI,ZUOIK,ZUNK1,ZUNK2,ZUNI1,ZUNI2,ZKSCL +C +C Y ENTERS AS A SCALED QUANTITY WHOSE MAGNITUDE IS GREATER THAN +C EXP(-ALIM)=ASCLE=1.0E+3*D1MACH(1)/TOL. THE TEST IS MADE TO SEE +C IF THE MAGNITUDE OF THE REAL OR IMAGINARY PART WOULD UNDERFLOW +C WHEN Y IS SCALED (BY TOL) TO ITS PROPER VALUE. Y IS ACCEPTED +C IF THE UNDERFLOW IS AT LEAST ONE PRECISION BELOW THE MAGNITUDE +C OF THE LARGEST COMPONENT; OTHERWISE THE PHASE ANGLE DOES NOT HAVE +C ABSOLUTE ACCURACY AND AN UNDERFLOW IS ASSUMED. +C +C***ROUTINES CALLED (NONE) +C***END PROLOGUE ZUCHK +C +C COMPLEX Y + DOUBLE PRECISION ASCLE, SS, ST, TOL, WR, WI, YR, YI + INTEGER NZ + NZ = 0 + WR = DABS(YR) + WI = DABS(YI) + ST = DMIN1(WR,WI) + IF (ST.GT.ASCLE) RETURN + SS = DMAX1(WR,WI) + ST = ST/TOL + IF (SS.LT.ST) NZ = 1 + RETURN + END diff --git a/vendor/gonum.org/v1/gonum/mathext/internal/amos/amoslib/zunhj.f b/vendor/gonum.org/v1/gonum/mathext/internal/amos/amoslib/zunhj.f new file mode 100644 index 0000000..9b4df7a --- /dev/null +++ b/vendor/gonum.org/v1/gonum/mathext/internal/amos/amoslib/zunhj.f @@ -0,0 +1,714 @@ + SUBROUTINE ZUNHJ(ZR, ZI, FNU, IPMTR, TOL, PHIR, PHII, ARGR, ARGI, + * ZETA1R, ZETA1I, ZETA2R, ZETA2I, ASUMR, ASUMI, BSUMR, BSUMI) +C***BEGIN PROLOGUE ZUNHJ +C***REFER TO ZBESI,ZBESK +C +C REFERENCES +C HANDBOOK OF MATHEMATICAL FUNCTIONS BY M. ABRAMOWITZ AND I.A. +C STEGUN, AMS55, NATIONAL BUREAU OF STANDARDS, 1965, CHAPTER 9. +C +C ASYMPTOTICS AND SPECIAL FUNCTIONS BY F.W.J. OLVER, ACADEMIC +C PRESS, N.Y., 1974, PAGE 420 +C +C ABSTRACT +C ZUNHJ COMPUTES PARAMETERS FOR BESSEL FUNCTIONS C(FNU,Z) = +C J(FNU,Z), Y(FNU,Z) OR H(I,FNU,Z) I=1,2 FOR LARGE ORDERS FNU +C BY MEANS OF THE UNIFORM ASYMPTOTIC EXPANSION +C +C C(FNU,Z)=C1*PHI*( ASUM*AIRY(ARG) + C2*BSUM*DAIRY(ARG) ) +C +C FOR PROPER CHOICES OF C1, C2, AIRY AND DAIRY WHERE AIRY IS +C AN AIRY FUNCTION AND DAIRY IS ITS DERIVATIVE. +C +C (2/3)*FNU*ZETA**1.5 = ZETA1-ZETA2, +C +C ZETA1=0.5*FNU*CLOG((1+W)/(1-W)), ZETA2=FNU*W FOR SCALING +C PURPOSES IN AIRY FUNCTIONS FROM CAIRY OR CBIRY. +C +C MCONJ=SIGN OF AIMAG(Z), BUT IS AMBIGUOUS WHEN Z IS REAL AND +C MUST BE SPECIFIED. IPMTR=0 RETURNS ALL PARAMETERS. IPMTR= +C 1 COMPUTES ALL EXCEPT ASUM AND BSUM. +C +C***ROUTINES CALLED ZABS,ZDIV,ZLOG,ZSQRT,D1MACH +C***END PROLOGUE ZUNHJ +C COMPLEX ARG,ASUM,BSUM,CFNU,CONE,CR,CZERO,DR,P,PHI,PRZTH,PTFN, +C *RFN13,RTZTA,RZTH,SUMA,SUMB,TFN,T2,UP,W,W2,Z,ZA,ZB,ZC,ZETA,ZETA1, +C *ZETA2,ZTH + DOUBLE PRECISION ALFA, ANG, AP, AR, ARGI, ARGR, ASUMI, ASUMR, + * ATOL, AW2, AZTH, BETA, BR, BSUMI, BSUMR, BTOL, C, CONEI, CONER, + * CRI, CRR, DRI, DRR, EX1, EX2, FNU, FN13, FN23, GAMA, GPI, HPI, + * PHII, PHIR, PI, PP, PR, PRZTHI, PRZTHR, PTFNI, PTFNR, RAW, RAW2, + * RAZTH, RFNU, RFNU2, RFN13, RTZTI, RTZTR, RZTHI, RZTHR, STI, STR, + * SUMAI, SUMAR, SUMBI, SUMBR, TEST, TFNI, TFNR, THPI, TOL, TZAI, + * TZAR, T2I, T2R, UPI, UPR, WI, WR, W2I, W2R, ZAI, ZAR, ZBI, ZBR, + * ZCI, ZCR, ZEROI, ZEROR, ZETAI, ZETAR, ZETA1I, ZETA1R, ZETA2I, + * ZETA2R, ZI, ZR, ZTHI, ZTHR, ZABS, AC, D1MACH + INTEGER IAS, IBS, IPMTR, IS, J, JR, JU, K, KMAX, KP1, KS, L, LR, + * LRP1, L1, L2, M, IDUM + DIMENSION AR(14), BR(14), C(105), ALFA(180), BETA(210), GAMA(30), + * AP(30), PR(30), PI(30), UPR(14), UPI(14), CRR(14), CRI(14), + * DRR(14), DRI(14) + DATA AR(1), AR(2), AR(3), AR(4), AR(5), AR(6), AR(7), AR(8), + 1 AR(9), AR(10), AR(11), AR(12), AR(13), AR(14)/ + 2 1.00000000000000000D+00, 1.04166666666666667D-01, + 3 8.35503472222222222D-02, 1.28226574556327160D-01, + 4 2.91849026464140464D-01, 8.81627267443757652D-01, + 5 3.32140828186276754D+00, 1.49957629868625547D+01, + 6 7.89230130115865181D+01, 4.74451538868264323D+02, + 7 3.20749009089066193D+03, 2.40865496408740049D+04, + 8 1.98923119169509794D+05, 1.79190200777534383D+06/ + DATA BR(1), BR(2), BR(3), BR(4), BR(5), BR(6), BR(7), BR(8), + 1 BR(9), BR(10), BR(11), BR(12), BR(13), BR(14)/ + 2 1.00000000000000000D+00, -1.45833333333333333D-01, + 3 -9.87413194444444444D-02, -1.43312053915895062D-01, + 4 -3.17227202678413548D-01, -9.42429147957120249D-01, + 5 -3.51120304082635426D+00, -1.57272636203680451D+01, + 6 -8.22814390971859444D+01, -4.92355370523670524D+02, + 7 -3.31621856854797251D+03, -2.48276742452085896D+04, + 8 -2.04526587315129788D+05, -1.83844491706820990D+06/ + DATA C(1), C(2), C(3), C(4), C(5), C(6), C(7), C(8), C(9), C(10), + 1 C(11), C(12), C(13), C(14), C(15), C(16), C(17), C(18), + 2 C(19), C(20), C(21), C(22), C(23), C(24)/ + 3 1.00000000000000000D+00, -2.08333333333333333D-01, + 4 1.25000000000000000D-01, 3.34201388888888889D-01, + 5 -4.01041666666666667D-01, 7.03125000000000000D-02, + 6 -1.02581259645061728D+00, 1.84646267361111111D+00, + 7 -8.91210937500000000D-01, 7.32421875000000000D-02, + 8 4.66958442342624743D+00, -1.12070026162229938D+01, + 9 8.78912353515625000D+00, -2.36408691406250000D+00, + A 1.12152099609375000D-01, -2.82120725582002449D+01, + B 8.46362176746007346D+01, -9.18182415432400174D+01, + C 4.25349987453884549D+01, -7.36879435947963170D+00, + D 2.27108001708984375D-01, 2.12570130039217123D+02, + E -7.65252468141181642D+02, 1.05999045252799988D+03/ + DATA C(25), C(26), C(27), C(28), C(29), C(30), C(31), C(32), + 1 C(33), C(34), C(35), C(36), C(37), C(38), C(39), C(40), + 2 C(41), C(42), C(43), C(44), C(45), C(46), C(47), C(48)/ + 3 -6.99579627376132541D+02, 2.18190511744211590D+02, + 4 -2.64914304869515555D+01, 5.72501420974731445D-01, + 5 -1.91945766231840700D+03, 8.06172218173730938D+03, + 6 -1.35865500064341374D+04, 1.16553933368645332D+04, + 7 -5.30564697861340311D+03, 1.20090291321635246D+03, + 8 -1.08090919788394656D+02, 1.72772750258445740D+00, + 9 2.02042913309661486D+04, -9.69805983886375135D+04, + A 1.92547001232531532D+05, -2.03400177280415534D+05, + B 1.22200464983017460D+05, -4.11926549688975513D+04, + C 7.10951430248936372D+03, -4.93915304773088012D+02, + D 6.07404200127348304D+00, -2.42919187900551333D+05, + E 1.31176361466297720D+06, -2.99801591853810675D+06/ + DATA C(49), C(50), C(51), C(52), C(53), C(54), C(55), C(56), + 1 C(57), C(58), C(59), C(60), C(61), C(62), C(63), C(64), + 2 C(65), C(66), C(67), C(68), C(69), C(70), C(71), C(72)/ + 3 3.76327129765640400D+06, -2.81356322658653411D+06, + 4 1.26836527332162478D+06, -3.31645172484563578D+05, + 5 4.52187689813627263D+04, -2.49983048181120962D+03, + 6 2.43805296995560639D+01, 3.28446985307203782D+06, + 7 -1.97068191184322269D+07, 5.09526024926646422D+07, + 8 -7.41051482115326577D+07, 6.63445122747290267D+07, + 9 -3.75671766607633513D+07, 1.32887671664218183D+07, + A -2.78561812808645469D+06, 3.08186404612662398D+05, + B -1.38860897537170405D+04, 1.10017140269246738D+02, + C -4.93292536645099620D+07, 3.25573074185765749D+08, + D -9.39462359681578403D+08, 1.55359689957058006D+09, + E -1.62108055210833708D+09, 1.10684281682301447D+09/ + DATA C(73), C(74), C(75), C(76), C(77), C(78), C(79), C(80), + 1 C(81), C(82), C(83), C(84), C(85), C(86), C(87), C(88), + 2 C(89), C(90), C(91), C(92), C(93), C(94), C(95), C(96)/ + 3 -4.95889784275030309D+08, 1.42062907797533095D+08, + 4 -2.44740627257387285D+07, 2.24376817792244943D+06, + 5 -8.40054336030240853D+04, 5.51335896122020586D+02, + 6 8.14789096118312115D+08, -5.86648149205184723D+09, + 7 1.86882075092958249D+10, -3.46320433881587779D+10, + 8 4.12801855797539740D+10, -3.30265997498007231D+10, + 9 1.79542137311556001D+10, -6.56329379261928433D+09, + A 1.55927986487925751D+09, -2.25105661889415278D+08, + B 1.73951075539781645D+07, -5.49842327572288687D+05, + C 3.03809051092238427D+03, -1.46792612476956167D+10, + D 1.14498237732025810D+11, -3.99096175224466498D+11, + E 8.19218669548577329D+11, -1.09837515608122331D+12/ + DATA C(97), C(98), C(99), C(100), C(101), C(102), C(103), C(104), + 1 C(105)/ + 2 1.00815810686538209D+12, -6.45364869245376503D+11, + 3 2.87900649906150589D+11, -8.78670721780232657D+10, + 4 1.76347306068349694D+10, -2.16716498322379509D+09, + 5 1.43157876718888981D+08, -3.87183344257261262D+06, + 6 1.82577554742931747D+04/ + DATA ALFA(1), ALFA(2), ALFA(3), ALFA(4), ALFA(5), ALFA(6), + 1 ALFA(7), ALFA(8), ALFA(9), ALFA(10), ALFA(11), ALFA(12), + 2 ALFA(13), ALFA(14), ALFA(15), ALFA(16), ALFA(17), ALFA(18), + 3 ALFA(19), ALFA(20), ALFA(21), ALFA(22)/ + 4 -4.44444444444444444D-03, -9.22077922077922078D-04, + 5 -8.84892884892884893D-05, 1.65927687832449737D-04, + 6 2.46691372741792910D-04, 2.65995589346254780D-04, + 7 2.61824297061500945D-04, 2.48730437344655609D-04, + 8 2.32721040083232098D-04, 2.16362485712365082D-04, + 9 2.00738858762752355D-04, 1.86267636637545172D-04, + A 1.73060775917876493D-04, 1.61091705929015752D-04, + B 1.50274774160908134D-04, 1.40503497391269794D-04, + C 1.31668816545922806D-04, 1.23667445598253261D-04, + D 1.16405271474737902D-04, 1.09798298372713369D-04, + E 1.03772410422992823D-04, 9.82626078369363448D-05/ + DATA ALFA(23), ALFA(24), ALFA(25), ALFA(26), ALFA(27), ALFA(28), + 1 ALFA(29), ALFA(30), ALFA(31), ALFA(32), ALFA(33), ALFA(34), + 2 ALFA(35), ALFA(36), ALFA(37), ALFA(38), ALFA(39), ALFA(40), + 3 ALFA(41), ALFA(42), ALFA(43), ALFA(44)/ + 4 9.32120517249503256D-05, 8.85710852478711718D-05, + 5 8.42963105715700223D-05, 8.03497548407791151D-05, + 6 7.66981345359207388D-05, 7.33122157481777809D-05, + 7 7.01662625163141333D-05, 6.72375633790160292D-05, + 8 6.93735541354588974D-04, 2.32241745182921654D-04, + 9 -1.41986273556691197D-05, -1.16444931672048640D-04, + A -1.50803558053048762D-04, -1.55121924918096223D-04, + B -1.46809756646465549D-04, -1.33815503867491367D-04, + C -1.19744975684254051D-04, -1.06184319207974020D-04, + D -9.37699549891194492D-05, -8.26923045588193274D-05, + E -7.29374348155221211D-05, -6.44042357721016283D-05/ + DATA ALFA(45), ALFA(46), ALFA(47), ALFA(48), ALFA(49), ALFA(50), + 1 ALFA(51), ALFA(52), ALFA(53), ALFA(54), ALFA(55), ALFA(56), + 2 ALFA(57), ALFA(58), ALFA(59), ALFA(60), ALFA(61), ALFA(62), + 3 ALFA(63), ALFA(64), ALFA(65), ALFA(66)/ + 4 -5.69611566009369048D-05, -5.04731044303561628D-05, + 5 -4.48134868008882786D-05, -3.98688727717598864D-05, + 6 -3.55400532972042498D-05, -3.17414256609022480D-05, + 7 -2.83996793904174811D-05, -2.54522720634870566D-05, + 8 -2.28459297164724555D-05, -2.05352753106480604D-05, + 9 -1.84816217627666085D-05, -1.66519330021393806D-05, + A -1.50179412980119482D-05, -1.35554031379040526D-05, + B -1.22434746473858131D-05, -1.10641884811308169D-05, + C -3.54211971457743841D-04, -1.56161263945159416D-04, + D 3.04465503594936410D-05, 1.30198655773242693D-04, + E 1.67471106699712269D-04, 1.70222587683592569D-04/ + DATA ALFA(67), ALFA(68), ALFA(69), ALFA(70), ALFA(71), ALFA(72), + 1 ALFA(73), ALFA(74), ALFA(75), ALFA(76), ALFA(77), ALFA(78), + 2 ALFA(79), ALFA(80), ALFA(81), ALFA(82), ALFA(83), ALFA(84), + 3 ALFA(85), ALFA(86), ALFA(87), ALFA(88)/ + 4 1.56501427608594704D-04, 1.36339170977445120D-04, + 5 1.14886692029825128D-04, 9.45869093034688111D-05, + 6 7.64498419250898258D-05, 6.07570334965197354D-05, + 7 4.74394299290508799D-05, 3.62757512005344297D-05, + 8 2.69939714979224901D-05, 1.93210938247939253D-05, + 9 1.30056674793963203D-05, 7.82620866744496661D-06, + A 3.59257485819351583D-06, 1.44040049814251817D-07, + B -2.65396769697939116D-06, -4.91346867098485910D-06, + C -6.72739296091248287D-06, -8.17269379678657923D-06, + D -9.31304715093561232D-06, -1.02011418798016441D-05, + E -1.08805962510592880D-05, -1.13875481509603555D-05/ + DATA ALFA(89), ALFA(90), ALFA(91), ALFA(92), ALFA(93), ALFA(94), + 1 ALFA(95), ALFA(96), ALFA(97), ALFA(98), ALFA(99), ALFA(100), + 2 ALFA(101), ALFA(102), ALFA(103), ALFA(104), ALFA(105), + 3 ALFA(106), ALFA(107), ALFA(108), ALFA(109), ALFA(110)/ + 4 -1.17519675674556414D-05, -1.19987364870944141D-05, + 5 3.78194199201772914D-04, 2.02471952761816167D-04, + 6 -6.37938506318862408D-05, -2.38598230603005903D-04, + 7 -3.10916256027361568D-04, -3.13680115247576316D-04, + 8 -2.78950273791323387D-04, -2.28564082619141374D-04, + 9 -1.75245280340846749D-04, -1.25544063060690348D-04, + A -8.22982872820208365D-05, -4.62860730588116458D-05, + B -1.72334302366962267D-05, 5.60690482304602267D-06, + C 2.31395443148286800D-05, 3.62642745856793957D-05, + D 4.58006124490188752D-05, 5.24595294959114050D-05, + E 5.68396208545815266D-05, 5.94349820393104052D-05/ + DATA ALFA(111), ALFA(112), ALFA(113), ALFA(114), ALFA(115), + 1 ALFA(116), ALFA(117), ALFA(118), ALFA(119), ALFA(120), + 2 ALFA(121), ALFA(122), ALFA(123), ALFA(124), ALFA(125), + 3 ALFA(126), ALFA(127), ALFA(128), ALFA(129), ALFA(130)/ + 4 6.06478527578421742D-05, 6.08023907788436497D-05, + 5 6.01577894539460388D-05, 5.89199657344698500D-05, + 6 5.72515823777593053D-05, 5.52804375585852577D-05, + 7 5.31063773802880170D-05, 5.08069302012325706D-05, + 8 4.84418647620094842D-05, 4.60568581607475370D-05, + 9 -6.91141397288294174D-04, -4.29976633058871912D-04, + A 1.83067735980039018D-04, 6.60088147542014144D-04, + B 8.75964969951185931D-04, 8.77335235958235514D-04, + C 7.49369585378990637D-04, 5.63832329756980918D-04, + D 3.68059319971443156D-04, 1.88464535514455599D-04/ + DATA ALFA(131), ALFA(132), ALFA(133), ALFA(134), ALFA(135), + 1 ALFA(136), ALFA(137), ALFA(138), ALFA(139), ALFA(140), + 2 ALFA(141), ALFA(142), ALFA(143), ALFA(144), ALFA(145), + 3 ALFA(146), ALFA(147), ALFA(148), ALFA(149), ALFA(150)/ + 4 3.70663057664904149D-05, -8.28520220232137023D-05, + 5 -1.72751952869172998D-04, -2.36314873605872983D-04, + 6 -2.77966150694906658D-04, -3.02079514155456919D-04, + 7 -3.12594712643820127D-04, -3.12872558758067163D-04, + 8 -3.05678038466324377D-04, -2.93226470614557331D-04, + 9 -2.77255655582934777D-04, -2.59103928467031709D-04, + A -2.39784014396480342D-04, -2.20048260045422848D-04, + B -2.00443911094971498D-04, -1.81358692210970687D-04, + C -1.63057674478657464D-04, -1.45712672175205844D-04, + D -1.29425421983924587D-04, -1.14245691942445952D-04/ + DATA ALFA(151), ALFA(152), ALFA(153), ALFA(154), ALFA(155), + 1 ALFA(156), ALFA(157), ALFA(158), ALFA(159), ALFA(160), + 2 ALFA(161), ALFA(162), ALFA(163), ALFA(164), ALFA(165), + 3 ALFA(166), ALFA(167), ALFA(168), ALFA(169), ALFA(170)/ + 4 1.92821964248775885D-03, 1.35592576302022234D-03, + 5 -7.17858090421302995D-04, -2.58084802575270346D-03, + 6 -3.49271130826168475D-03, -3.46986299340960628D-03, + 7 -2.82285233351310182D-03, -1.88103076404891354D-03, + 8 -8.89531718383947600D-04, 3.87912102631035228D-06, + 9 7.28688540119691412D-04, 1.26566373053457758D-03, + A 1.62518158372674427D-03, 1.83203153216373172D-03, + B 1.91588388990527909D-03, 1.90588846755546138D-03, + C 1.82798982421825727D-03, 1.70389506421121530D-03, + D 1.55097127171097686D-03, 1.38261421852276159D-03/ + DATA ALFA(171), ALFA(172), ALFA(173), ALFA(174), ALFA(175), + 1 ALFA(176), ALFA(177), ALFA(178), ALFA(179), ALFA(180)/ + 2 1.20881424230064774D-03, 1.03676532638344962D-03, + 3 8.71437918068619115D-04, 7.16080155297701002D-04, + 4 5.72637002558129372D-04, 4.42089819465802277D-04, + 5 3.24724948503090564D-04, 2.20342042730246599D-04, + 6 1.28412898401353882D-04, 4.82005924552095464D-05/ + DATA BETA(1), BETA(2), BETA(3), BETA(4), BETA(5), BETA(6), + 1 BETA(7), BETA(8), BETA(9), BETA(10), BETA(11), BETA(12), + 2 BETA(13), BETA(14), BETA(15), BETA(16), BETA(17), BETA(18), + 3 BETA(19), BETA(20), BETA(21), BETA(22)/ + 4 1.79988721413553309D-02, 5.59964911064388073D-03, + 5 2.88501402231132779D-03, 1.80096606761053941D-03, + 6 1.24753110589199202D-03, 9.22878876572938311D-04, + 7 7.14430421727287357D-04, 5.71787281789704872D-04, + 8 4.69431007606481533D-04, 3.93232835462916638D-04, + 9 3.34818889318297664D-04, 2.88952148495751517D-04, + A 2.52211615549573284D-04, 2.22280580798883327D-04, + B 1.97541838033062524D-04, 1.76836855019718004D-04, + C 1.59316899661821081D-04, 1.44347930197333986D-04, + D 1.31448068119965379D-04, 1.20245444949302884D-04, + E 1.10449144504599392D-04, 1.01828770740567258D-04/ + DATA BETA(23), BETA(24), BETA(25), BETA(26), BETA(27), BETA(28), + 1 BETA(29), BETA(30), BETA(31), BETA(32), BETA(33), BETA(34), + 2 BETA(35), BETA(36), BETA(37), BETA(38), BETA(39), BETA(40), + 3 BETA(41), BETA(42), BETA(43), BETA(44)/ + 4 9.41998224204237509D-05, 8.74130545753834437D-05, + 5 8.13466262162801467D-05, 7.59002269646219339D-05, + 6 7.09906300634153481D-05, 6.65482874842468183D-05, + 7 6.25146958969275078D-05, 5.88403394426251749D-05, + 8 -1.49282953213429172D-03, -8.78204709546389328D-04, + 9 -5.02916549572034614D-04, -2.94822138512746025D-04, + A -1.75463996970782828D-04, -1.04008550460816434D-04, + B -5.96141953046457895D-05, -3.12038929076098340D-05, + C -1.26089735980230047D-05, -2.42892608575730389D-07, + D 8.05996165414273571D-06, 1.36507009262147391D-05, + E 1.73964125472926261D-05, 1.98672978842133780D-05/ + DATA BETA(45), BETA(46), BETA(47), BETA(48), BETA(49), BETA(50), + 1 BETA(51), BETA(52), BETA(53), BETA(54), BETA(55), BETA(56), + 2 BETA(57), BETA(58), BETA(59), BETA(60), BETA(61), BETA(62), + 3 BETA(63), BETA(64), BETA(65), BETA(66)/ + 4 2.14463263790822639D-05, 2.23954659232456514D-05, + 5 2.28967783814712629D-05, 2.30785389811177817D-05, + 6 2.30321976080909144D-05, 2.28236073720348722D-05, + 7 2.25005881105292418D-05, 2.20981015361991429D-05, + 8 2.16418427448103905D-05, 2.11507649256220843D-05, + 9 2.06388749782170737D-05, 2.01165241997081666D-05, + A 1.95913450141179244D-05, 1.90689367910436740D-05, + B 1.85533719641636667D-05, 1.80475722259674218D-05, + C 5.52213076721292790D-04, 4.47932581552384646D-04, + D 2.79520653992020589D-04, 1.52468156198446602D-04, + E 6.93271105657043598D-05, 1.76258683069991397D-05/ + DATA BETA(67), BETA(68), BETA(69), BETA(70), BETA(71), BETA(72), + 1 BETA(73), BETA(74), BETA(75), BETA(76), BETA(77), BETA(78), + 2 BETA(79), BETA(80), BETA(81), BETA(82), BETA(83), BETA(84), + 3 BETA(85), BETA(86), BETA(87), BETA(88)/ + 4 -1.35744996343269136D-05, -3.17972413350427135D-05, + 5 -4.18861861696693365D-05, -4.69004889379141029D-05, + 6 -4.87665447413787352D-05, -4.87010031186735069D-05, + 7 -4.74755620890086638D-05, -4.55813058138628452D-05, + 8 -4.33309644511266036D-05, -4.09230193157750364D-05, + 9 -3.84822638603221274D-05, -3.60857167535410501D-05, + A -3.37793306123367417D-05, -3.15888560772109621D-05, + B -2.95269561750807315D-05, -2.75978914828335759D-05, + C -2.58006174666883713D-05, -2.41308356761280200D-05, + D -2.25823509518346033D-05, -2.11479656768912971D-05, + E -1.98200638885294927D-05, -1.85909870801065077D-05/ + DATA BETA(89), BETA(90), BETA(91), BETA(92), BETA(93), BETA(94), + 1 BETA(95), BETA(96), BETA(97), BETA(98), BETA(99), BETA(100), + 2 BETA(101), BETA(102), BETA(103), BETA(104), BETA(105), + 3 BETA(106), BETA(107), BETA(108), BETA(109), BETA(110)/ + 4 -1.74532699844210224D-05, -1.63997823854497997D-05, + 5 -4.74617796559959808D-04, -4.77864567147321487D-04, + 6 -3.20390228067037603D-04, -1.61105016119962282D-04, + 7 -4.25778101285435204D-05, 3.44571294294967503D-05, + 8 7.97092684075674924D-05, 1.03138236708272200D-04, + 9 1.12466775262204158D-04, 1.13103642108481389D-04, + A 1.08651634848774268D-04, 1.01437951597661973D-04, + B 9.29298396593363896D-05, 8.40293133016089978D-05, + C 7.52727991349134062D-05, 6.69632521975730872D-05, + D 5.92564547323194704D-05, 5.22169308826975567D-05, + E 4.58539485165360646D-05, 4.01445513891486808D-05/ + DATA BETA(111), BETA(112), BETA(113), BETA(114), BETA(115), + 1 BETA(116), BETA(117), BETA(118), BETA(119), BETA(120), + 2 BETA(121), BETA(122), BETA(123), BETA(124), BETA(125), + 3 BETA(126), BETA(127), BETA(128), BETA(129), BETA(130)/ + 4 3.50481730031328081D-05, 3.05157995034346659D-05, + 5 2.64956119950516039D-05, 2.29363633690998152D-05, + 6 1.97893056664021636D-05, 1.70091984636412623D-05, + 7 1.45547428261524004D-05, 1.23886640995878413D-05, + 8 1.04775876076583236D-05, 8.79179954978479373D-06, + 9 7.36465810572578444D-04, 8.72790805146193976D-04, + A 6.22614862573135066D-04, 2.85998154194304147D-04, + B 3.84737672879366102D-06, -1.87906003636971558D-04, + C -2.97603646594554535D-04, -3.45998126832656348D-04, + D -3.53382470916037712D-04, -3.35715635775048757D-04/ + DATA BETA(131), BETA(132), BETA(133), BETA(134), BETA(135), + 1 BETA(136), BETA(137), BETA(138), BETA(139), BETA(140), + 2 BETA(141), BETA(142), BETA(143), BETA(144), BETA(145), + 3 BETA(146), BETA(147), BETA(148), BETA(149), BETA(150)/ + 4 -3.04321124789039809D-04, -2.66722723047612821D-04, + 5 -2.27654214122819527D-04, -1.89922611854562356D-04, + 6 -1.55058918599093870D-04, -1.23778240761873630D-04, + 7 -9.62926147717644187D-05, -7.25178327714425337D-05, + 8 -5.22070028895633801D-05, -3.50347750511900522D-05, + 9 -2.06489761035551757D-05, -8.70106096849767054D-06, + A 1.13698686675100290D-06, 9.16426474122778849D-06, + B 1.56477785428872620D-05, 2.08223629482466847D-05, + C 2.48923381004595156D-05, 2.80340509574146325D-05, + D 3.03987774629861915D-05, 3.21156731406700616D-05/ + DATA BETA(151), BETA(152), BETA(153), BETA(154), BETA(155), + 1 BETA(156), BETA(157), BETA(158), BETA(159), BETA(160), + 2 BETA(161), BETA(162), BETA(163), BETA(164), BETA(165), + 3 BETA(166), BETA(167), BETA(168), BETA(169), BETA(170)/ + 4 -1.80182191963885708D-03, -2.43402962938042533D-03, + 5 -1.83422663549856802D-03, -7.62204596354009765D-04, + 6 2.39079475256927218D-04, 9.49266117176881141D-04, + 7 1.34467449701540359D-03, 1.48457495259449178D-03, + 8 1.44732339830617591D-03, 1.30268261285657186D-03, + 9 1.10351597375642682D-03, 8.86047440419791759D-04, + A 6.73073208165665473D-04, 4.77603872856582378D-04, + B 3.05991926358789362D-04, 1.60315694594721630D-04, + C 4.00749555270613286D-05, -5.66607461635251611D-05, + D -1.32506186772982638D-04, -1.90296187989614057D-04/ + DATA BETA(171), BETA(172), BETA(173), BETA(174), BETA(175), + 1 BETA(176), BETA(177), BETA(178), BETA(179), BETA(180), + 2 BETA(181), BETA(182), BETA(183), BETA(184), BETA(185), + 3 BETA(186), BETA(187), BETA(188), BETA(189), BETA(190)/ + 4 -2.32811450376937408D-04, -2.62628811464668841D-04, + 5 -2.82050469867598672D-04, -2.93081563192861167D-04, + 6 -2.97435962176316616D-04, -2.96557334239348078D-04, + 7 -2.91647363312090861D-04, -2.83696203837734166D-04, + 8 -2.73512317095673346D-04, -2.61750155806768580D-04, + 9 6.38585891212050914D-03, 9.62374215806377941D-03, + A 7.61878061207001043D-03, 2.83219055545628054D-03, + B -2.09841352012720090D-03, -5.73826764216626498D-03, + C -7.70804244495414620D-03, -8.21011692264844401D-03, + D -7.65824520346905413D-03, -6.47209729391045177D-03/ + DATA BETA(191), BETA(192), BETA(193), BETA(194), BETA(195), + 1 BETA(196), BETA(197), BETA(198), BETA(199), BETA(200), + 2 BETA(201), BETA(202), BETA(203), BETA(204), BETA(205), + 3 BETA(206), BETA(207), BETA(208), BETA(209), BETA(210)/ + 4 -4.99132412004966473D-03, -3.45612289713133280D-03, + 5 -2.01785580014170775D-03, -7.59430686781961401D-04, + 6 2.84173631523859138D-04, 1.10891667586337403D-03, + 7 1.72901493872728771D-03, 2.16812590802684701D-03, + 8 2.45357710494539735D-03, 2.61281821058334862D-03, + 9 2.67141039656276912D-03, 2.65203073395980430D-03, + A 2.57411652877287315D-03, 2.45389126236094427D-03, + B 2.30460058071795494D-03, 2.13684837686712662D-03, + C 1.95896528478870911D-03, 1.77737008679454412D-03, + D 1.59690280765839059D-03, 1.42111975664438546D-03/ + DATA GAMA(1), GAMA(2), GAMA(3), GAMA(4), GAMA(5), GAMA(6), + 1 GAMA(7), GAMA(8), GAMA(9), GAMA(10), GAMA(11), GAMA(12), + 2 GAMA(13), GAMA(14), GAMA(15), GAMA(16), GAMA(17), GAMA(18), + 3 GAMA(19), GAMA(20), GAMA(21), GAMA(22)/ + 4 6.29960524947436582D-01, 2.51984209978974633D-01, + 5 1.54790300415655846D-01, 1.10713062416159013D-01, + 6 8.57309395527394825D-02, 6.97161316958684292D-02, + 7 5.86085671893713576D-02, 5.04698873536310685D-02, + 8 4.42600580689154809D-02, 3.93720661543509966D-02, + 9 3.54283195924455368D-02, 3.21818857502098231D-02, + A 2.94646240791157679D-02, 2.71581677112934479D-02, + B 2.51768272973861779D-02, 2.34570755306078891D-02, + C 2.19508390134907203D-02, 2.06210828235646240D-02, + D 1.94388240897880846D-02, 1.83810633800683158D-02, + E 1.74293213231963172D-02, 1.65685837786612353D-02/ + DATA GAMA(23), GAMA(24), GAMA(25), GAMA(26), GAMA(27), GAMA(28), + 1 GAMA(29), GAMA(30)/ + 2 1.57865285987918445D-02, 1.50729501494095594D-02, + 3 1.44193250839954639D-02, 1.38184805735341786D-02, + 4 1.32643378994276568D-02, 1.27517121970498651D-02, + 5 1.22761545318762767D-02, 1.18338262398482403D-02/ + DATA EX1, EX2, HPI, GPI, THPI / + 1 3.33333333333333333D-01, 6.66666666666666667D-01, + 2 1.57079632679489662D+00, 3.14159265358979324D+00, + 3 4.71238898038468986D+00/ + DATA ZEROR,ZEROI,CONER,CONEI / 0.0D0, 0.0D0, 1.0D0, 0.0D0 / +C + RFNU = 1.0D0/FNU +C----------------------------------------------------------------------- +C OVERFLOW TEST (Z/FNU TOO SMALL) +C----------------------------------------------------------------------- + TEST = D1MACH(1)*1.0D+3 + AC = FNU*TEST + IF (DABS(ZR).GT.AC .OR. DABS(ZI).GT.AC) GO TO 15 + ZETA1R = 2.0D0*DABS(DLOG(TEST))+FNU + ZETA1I = 0.0D0 + ZETA2R = FNU + ZETA2I = 0.0D0 + PHIR = 1.0D0 + PHII = 0.0D0 + ARGR = 1.0D0 + ARGI = 0.0D0 + RETURN + 15 CONTINUE + ZBR = ZR*RFNU + ZBI = ZI*RFNU + RFNU2 = RFNU*RFNU +C----------------------------------------------------------------------- +C COMPUTE IN THE FOURTH QUADRANT +C----------------------------------------------------------------------- + FN13 = FNU**EX1 + FN23 = FN13*FN13 + RFN13 = 1.0D0/FN13 + W2R = CONER - ZBR*ZBR + ZBI*ZBI + W2I = CONEI - ZBR*ZBI - ZBR*ZBI + AW2 = ZABS(CMPLX(W2R,W2I,kind=KIND(1.0D0))) + IF (AW2.GT.0.25D0) GO TO 130 +C----------------------------------------------------------------------- +C POWER SERIES FOR CABS(W2).LE.0.25D0 +C----------------------------------------------------------------------- + K = 1 + PR(1) = CONER + PI(1) = CONEI + SUMAR = GAMA(1) + SUMAI = ZEROI + AP(1) = 1.0D0 + IF (AW2.LT.TOL) GO TO 20 + DO 10 K=2,30 + PR(K) = PR(K-1)*W2R - PI(K-1)*W2I + PI(K) = PR(K-1)*W2I + PI(K-1)*W2R + SUMAR = SUMAR + PR(K)*GAMA(K) + SUMAI = SUMAI + PI(K)*GAMA(K) + AP(K) = AP(K-1)*AW2 + IF (AP(K).LT.TOL) GO TO 20 + 10 CONTINUE + K = 30 + 20 CONTINUE + KMAX = K + ZETAR = W2R*SUMAR - W2I*SUMAI + ZETAI = W2R*SUMAI + W2I*SUMAR + ARGR = ZETAR*FN23 + ARGI = ZETAI*FN23 + CALL ZSQRT(SUMAR, SUMAI, ZAR, ZAI) + CALL ZSQRT(W2R, W2I, STR, STI) + ZETA2R = STR*FNU + ZETA2I = STI*FNU + STR = CONER + EX2*(ZETAR*ZAR-ZETAI*ZAI) + STI = CONEI + EX2*(ZETAR*ZAI+ZETAI*ZAR) + ZETA1R = STR*ZETA2R - STI*ZETA2I + ZETA1I = STR*ZETA2I + STI*ZETA2R + ZAR = ZAR + ZAR + ZAI = ZAI + ZAI + CALL ZSQRT(ZAR, ZAI, STR, STI) + PHIR = STR*RFN13 + PHII = STI*RFN13 + IF (IPMTR.EQ.1) GO TO 120 +C----------------------------------------------------------------------- +C SUM SERIES FOR ASUM AND BSUM +C----------------------------------------------------------------------- + SUMBR = ZEROR + SUMBI = ZEROI + DO 30 K=1,KMAX + SUMBR = SUMBR + PR(K)*BETA(K) + SUMBI = SUMBI + PI(K)*BETA(K) + 30 CONTINUE + ASUMR = ZEROR + ASUMI = ZEROI + BSUMR = SUMBR + BSUMI = SUMBI + L1 = 0 + L2 = 30 + BTOL = TOL*(DABS(BSUMR)+DABS(BSUMI)) + ATOL = TOL + PP = 1.0D0 + IAS = 0 + IBS = 0 + IF (RFNU2.LT.TOL) GO TO 110 + DO 100 IS=2,7 + ATOL = ATOL/RFNU2 + PP = PP*RFNU2 + IF (IAS.EQ.1) GO TO 60 + SUMAR = ZEROR + SUMAI = ZEROI + DO 40 K=1,KMAX + M = L1 + K + SUMAR = SUMAR + PR(K)*ALFA(M) + SUMAI = SUMAI + PI(K)*ALFA(M) + IF (AP(K).LT.ATOL) GO TO 50 + 40 CONTINUE + 50 CONTINUE + ASUMR = ASUMR + SUMAR*PP + ASUMI = ASUMI + SUMAI*PP + IF (PP.LT.TOL) IAS = 1 + 60 CONTINUE + IF (IBS.EQ.1) GO TO 90 + SUMBR = ZEROR + SUMBI = ZEROI + DO 70 K=1,KMAX + M = L2 + K + SUMBR = SUMBR + PR(K)*BETA(M) + SUMBI = SUMBI + PI(K)*BETA(M) + IF (AP(K).LT.ATOL) GO TO 80 + 70 CONTINUE + 80 CONTINUE + BSUMR = BSUMR + SUMBR*PP + BSUMI = BSUMI + SUMBI*PP + IF (PP.LT.BTOL) IBS = 1 + 90 CONTINUE + IF (IAS.EQ.1 .AND. IBS.EQ.1) GO TO 110 + L1 = L1 + 30 + L2 = L2 + 30 + 100 CONTINUE + 110 CONTINUE + ASUMR = ASUMR + CONER + PP = RFNU*RFN13 + BSUMR = BSUMR*PP + BSUMI = BSUMI*PP + 120 CONTINUE + RETURN +C----------------------------------------------------------------------- +C CABS(W2).GT.0.25D0 +C----------------------------------------------------------------------- + 130 CONTINUE + CALL ZSQRT(W2R, W2I, WR, WI) + IF (WR.LT.0.0D0) WR = 0.0D0 + IF (WI.LT.0.0D0) WI = 0.0D0 + STR = CONER + WR + STI = WI + CALL ZDIV(STR, STI, ZBR, ZBI, ZAR, ZAI) + CALL ZLOG(ZAR, ZAI, ZCR, ZCI, IDUM) + IF (ZCI.LT.0.0D0) ZCI = 0.0D0 + IF (ZCI.GT.HPI) ZCI = HPI + IF (ZCR.LT.0.0D0) ZCR = 0.0D0 + ZTHR = (ZCR-WR)*1.5D0 + ZTHI = (ZCI-WI)*1.5D0 + ZETA1R = ZCR*FNU + ZETA1I = ZCI*FNU + ZETA2R = WR*FNU + ZETA2I = WI*FNU + AZTH = ZABS(CMPLX(ZTHR,ZTHI,kind=KIND(1.0D0))) + ANG = THPI + IF (ZTHR.GE.0.0D0 .AND. ZTHI.LT.0.0D0) GO TO 140 + ANG = HPI + IF (ZTHR.EQ.0.0D0) GO TO 140 + ANG = DATAN(ZTHI/ZTHR) + IF (ZTHR.LT.0.0D0) ANG = ANG + GPI + 140 CONTINUE + PP = AZTH**EX2 + ANG = ANG*EX2 + ZETAR = PP*DCOS(ANG) + ZETAI = PP*DSIN(ANG) + IF (ZETAI.LT.0.0D0) ZETAI = 0.0D0 + ARGR = ZETAR*FN23 + ARGI = ZETAI*FN23 + CALL ZDIV(ZTHR, ZTHI, ZETAR, ZETAI, RTZTR, RTZTI) + CALL ZDIV(RTZTR, RTZTI, WR, WI, ZAR, ZAI) + TZAR = ZAR + ZAR + TZAI = ZAI + ZAI + CALL ZSQRT(TZAR, TZAI, STR, STI) + PHIR = STR*RFN13 + PHII = STI*RFN13 + IF (IPMTR.EQ.1) GO TO 120 + RAW = 1.0D0/DSQRT(AW2) + STR = WR*RAW + STI = -WI*RAW + TFNR = STR*RFNU*RAW + TFNI = STI*RFNU*RAW + RAZTH = 1.0D0/AZTH + STR = ZTHR*RAZTH + STI = -ZTHI*RAZTH + RZTHR = STR*RAZTH*RFNU + RZTHI = STI*RAZTH*RFNU + ZCR = RZTHR*AR(2) + ZCI = RZTHI*AR(2) + RAW2 = 1.0D0/AW2 + STR = W2R*RAW2 + STI = -W2I*RAW2 + T2R = STR*RAW2 + T2I = STI*RAW2 + STR = T2R*C(2) + C(3) + STI = T2I*C(2) + UPR(2) = STR*TFNR - STI*TFNI + UPI(2) = STR*TFNI + STI*TFNR + BSUMR = UPR(2) + ZCR + BSUMI = UPI(2) + ZCI + ASUMR = ZEROR + ASUMI = ZEROI + IF (RFNU.LT.TOL) GO TO 220 + PRZTHR = RZTHR + PRZTHI = RZTHI + PTFNR = TFNR + PTFNI = TFNI + UPR(1) = CONER + UPI(1) = CONEI + PP = 1.0D0 + BTOL = TOL*(DABS(BSUMR)+DABS(BSUMI)) + KS = 0 + KP1 = 2 + L = 3 + IAS = 0 + IBS = 0 + DO 210 LR=2,12,2 + LRP1 = LR + 1 +C----------------------------------------------------------------------- +C COMPUTE TWO ADDITIONAL CR, DR, AND UP FOR TWO MORE TERMS IN +C NEXT SUMA AND SUMB +C----------------------------------------------------------------------- + DO 160 K=LR,LRP1 + KS = KS + 1 + KP1 = KP1 + 1 + L = L + 1 + ZAR = C(L) + ZAI = ZEROI + DO 150 J=2,KP1 + L = L + 1 + STR = ZAR*T2R - T2I*ZAI + C(L) + ZAI = ZAR*T2I + ZAI*T2R + ZAR = STR + 150 CONTINUE + STR = PTFNR*TFNR - PTFNI*TFNI + PTFNI = PTFNR*TFNI + PTFNI*TFNR + PTFNR = STR + UPR(KP1) = PTFNR*ZAR - PTFNI*ZAI + UPI(KP1) = PTFNI*ZAR + PTFNR*ZAI + CRR(KS) = PRZTHR*BR(KS+1) + CRI(KS) = PRZTHI*BR(KS+1) + STR = PRZTHR*RZTHR - PRZTHI*RZTHI + PRZTHI = PRZTHR*RZTHI + PRZTHI*RZTHR + PRZTHR = STR + DRR(KS) = PRZTHR*AR(KS+2) + DRI(KS) = PRZTHI*AR(KS+2) + 160 CONTINUE + PP = PP*RFNU2 + IF (IAS.EQ.1) GO TO 180 + SUMAR = UPR(LRP1) + SUMAI = UPI(LRP1) + JU = LRP1 + DO 170 JR=1,LR + JU = JU - 1 + SUMAR = SUMAR + CRR(JR)*UPR(JU) - CRI(JR)*UPI(JU) + SUMAI = SUMAI + CRR(JR)*UPI(JU) + CRI(JR)*UPR(JU) + 170 CONTINUE + ASUMR = ASUMR + SUMAR + ASUMI = ASUMI + SUMAI + TEST = DABS(SUMAR) + DABS(SUMAI) + IF (PP.LT.TOL .AND. TEST.LT.TOL) IAS = 1 + 180 CONTINUE + IF (IBS.EQ.1) GO TO 200 + SUMBR = UPR(LR+2) + UPR(LRP1)*ZCR - UPI(LRP1)*ZCI + SUMBI = UPI(LR+2) + UPR(LRP1)*ZCI + UPI(LRP1)*ZCR + JU = LRP1 + DO 190 JR=1,LR + JU = JU - 1 + SUMBR = SUMBR + DRR(JR)*UPR(JU) - DRI(JR)*UPI(JU) + SUMBI = SUMBI + DRR(JR)*UPI(JU) + DRI(JR)*UPR(JU) + 190 CONTINUE + BSUMR = BSUMR + SUMBR + BSUMI = BSUMI + SUMBI + TEST = DABS(SUMBR) + DABS(SUMBI) + IF (PP.LT.BTOL .AND. TEST.LT.BTOL) IBS = 1 + 200 CONTINUE + IF (IAS.EQ.1 .AND. IBS.EQ.1) GO TO 220 + 210 CONTINUE + 220 CONTINUE + ASUMR = ASUMR + CONER + STR = -BSUMR*RFN13 + STI = -BSUMI*RFN13 + CALL ZDIV(STR, STI, RTZTR, RTZTI, BSUMR, BSUMI) + GO TO 120 + END diff --git a/vendor/gonum.org/v1/gonum/mathext/internal/amos/amoslib/zuni1.f b/vendor/gonum.org/v1/gonum/mathext/internal/amos/amoslib/zuni1.f new file mode 100644 index 0000000..f3a0f24 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/mathext/internal/amos/amoslib/zuni1.f @@ -0,0 +1,204 @@ + SUBROUTINE ZUNI1(ZR, ZI, FNU, KODE, N, YR, YI, NZ, NLAST, FNUL, + * TOL, ELIM, ALIM) +C***BEGIN PROLOGUE ZUNI1 +C***REFER TO ZBESI,ZBESK +C +C ZUNI1 COMPUTES I(FNU,Z) BY MEANS OF THE UNIFORM ASYMPTOTIC +C EXPANSION FOR I(FNU,Z) IN -PI/3.LE.ARG Z.LE.PI/3. +C +C FNUL IS THE SMALLEST ORDER PERMITTED FOR THE ASYMPTOTIC +C EXPANSION. NLAST=0 MEANS ALL OF THE Y VALUES WERE SET. +C NLAST.NE.0 IS THE NUMBER LEFT TO BE COMPUTED BY ANOTHER +C FORMULA FOR ORDERS FNU TO FNU+NLAST-1 BECAUSE FNU+NLAST-1.LT.FNUL. +C Y(I)=CZERO FOR I=NLAST+1,N +C +C***ROUTINES CALLED ZUCHK,ZUNIK,ZUOIK,D1MACH,ZABS +C***END PROLOGUE ZUNI1 +C COMPLEX CFN,CONE,CRSC,CSCL,CSR,CSS,CWRK,CZERO,C1,C2,PHI,RZ,SUM,S1, +C *S2,Y,Z,ZETA1,ZETA2 + DOUBLE PRECISION ALIM, APHI, ASCLE, BRY, CONER, CRSC, + * CSCL, CSRR, CSSR, CWRKI, CWRKR, C1R, C2I, C2M, C2R, ELIM, FN, + * FNU, FNUL, PHII, PHIR, RAST, RS1, RZI, RZR, STI, STR, SUMI, + * SUMR, S1I, S1R, S2I, S2R, TOL, YI, YR, ZEROI, ZEROR, ZETA1I, + * ZETA1R, ZETA2I, ZETA2R, ZI, ZR, CYR, CYI, D1MACH, ZABS + INTEGER I, IFLAG, INIT, K, KODE, M, N, ND, NLAST, NN, NUF, NW, NZ + DIMENSION BRY(3), YR(N), YI(N), CWRKR(16), CWRKI(16), CSSR(3), + * CSRR(3), CYR(2), CYI(2) + DATA ZEROR,ZEROI,CONER / 0.0D0, 0.0D0, 1.0D0 / +C + NZ = 0 + ND = N + NLAST = 0 +C----------------------------------------------------------------------- +C COMPUTED VALUES WITH EXPONENTS BETWEEN ALIM AND ELIM IN MAG- +C NITUDE ARE SCALED TO KEEP INTERMEDIATE ARITHMETIC ON SCALE, +C EXP(ALIM)=EXP(ELIM)*TOL +C----------------------------------------------------------------------- + CSCL = 1.0D0/TOL + CRSC = TOL + CSSR(1) = CSCL + CSSR(2) = CONER + CSSR(3) = CRSC + CSRR(1) = CRSC + CSRR(2) = CONER + CSRR(3) = CSCL + BRY(1) = 1.0D+3*D1MACH(1)/TOL +C----------------------------------------------------------------------- +C CHECK FOR UNDERFLOW AND OVERFLOW ON FIRST MEMBER +C----------------------------------------------------------------------- + FN = DMAX1(FNU,1.0D0) + INIT = 0 + CALL ZUNIK(ZR, ZI, FN, 1, 1, TOL, INIT, PHIR, PHII, ZETA1R, + * ZETA1I, ZETA2R, ZETA2I, SUMR, SUMI, CWRKR, CWRKI) + IF (KODE.EQ.1) GO TO 10 + STR = ZR + ZETA2R + STI = ZI + ZETA2I + RAST = FN/ZABS(CMPLX(STR,STI,kind=KIND(1.0D0))) + STR = STR*RAST*RAST + STI = -STI*RAST*RAST + S1R = -ZETA1R + STR + S1I = -ZETA1I + STI + GO TO 20 + 10 CONTINUE + S1R = -ZETA1R + ZETA2R + S1I = -ZETA1I + ZETA2I + 20 CONTINUE + RS1 = S1R + IF (DABS(RS1).GT.ELIM) GO TO 130 + 30 CONTINUE + NN = MIN0(2,ND) + DO 80 I=1,NN + FN = FNU + DBLE(FLOAT(ND-I)) + INIT = 0 + CALL ZUNIK(ZR, ZI, FN, 1, 0, TOL, INIT, PHIR, PHII, ZETA1R, + * ZETA1I, ZETA2R, ZETA2I, SUMR, SUMI, CWRKR, CWRKI) + IF (KODE.EQ.1) GO TO 40 + STR = ZR + ZETA2R + STI = ZI + ZETA2I + RAST = FN/ZABS(CMPLX(STR,STI,kind=KIND(1.0D0))) + STR = STR*RAST*RAST + STI = -STI*RAST*RAST + S1R = -ZETA1R + STR + S1I = -ZETA1I + STI + ZI + GO TO 50 + 40 CONTINUE + S1R = -ZETA1R + ZETA2R + S1I = -ZETA1I + ZETA2I + 50 CONTINUE +C----------------------------------------------------------------------- +C TEST FOR UNDERFLOW AND OVERFLOW +C----------------------------------------------------------------------- + RS1 = S1R + IF (DABS(RS1).GT.ELIM) GO TO 110 + IF (I.EQ.1) IFLAG = 2 + IF (DABS(RS1).LT.ALIM) GO TO 60 +C----------------------------------------------------------------------- +C REFINE TEST AND SCALE +C----------------------------------------------------------------------- + APHI = ZABS(CMPLX(PHIR,PHII,kind=KIND(1.0D0))) + RS1 = RS1 + DLOG(APHI) + IF (DABS(RS1).GT.ELIM) GO TO 110 + IF (I.EQ.1) IFLAG = 1 + IF (RS1.LT.0.0D0) GO TO 60 + IF (I.EQ.1) IFLAG = 3 + 60 CONTINUE +C----------------------------------------------------------------------- +C SCALE S1 IF CABS(S1).LT.ASCLE +C----------------------------------------------------------------------- + S2R = PHIR*SUMR - PHII*SUMI + S2I = PHIR*SUMI + PHII*SUMR + STR = DEXP(S1R)*CSSR(IFLAG) + S1R = STR*DCOS(S1I) + S1I = STR*DSIN(S1I) + STR = S2R*S1R - S2I*S1I + S2I = S2R*S1I + S2I*S1R + S2R = STR + IF (IFLAG.NE.1) GO TO 70 + CALL ZUCHK(S2R, S2I, NW, BRY(1), TOL) + IF (NW.NE.0) GO TO 110 + 70 CONTINUE + CYR(I) = S2R + CYI(I) = S2I + M = ND - I + 1 + YR(M) = S2R*CSRR(IFLAG) + YI(M) = S2I*CSRR(IFLAG) + 80 CONTINUE + IF (ND.LE.2) GO TO 100 + RAST = 1.0D0/ZABS(CMPLX(ZR,ZI,kind=KIND(1.0D0))) + STR = ZR*RAST + STI = -ZI*RAST + RZR = (STR+STR)*RAST + RZI = (STI+STI)*RAST + BRY(2) = 1.0D0/BRY(1) + BRY(3) = D1MACH(2) + S1R = CYR(1) + S1I = CYI(1) + S2R = CYR(2) + S2I = CYI(2) + C1R = CSRR(IFLAG) + ASCLE = BRY(IFLAG) + K = ND - 2 + FN = DBLE(FLOAT(K)) + DO 90 I=3,ND + C2R = S2R + C2I = S2I + S2R = S1R + (FNU+FN)*(RZR*C2R-RZI*C2I) + S2I = S1I + (FNU+FN)*(RZR*C2I+RZI*C2R) + S1R = C2R + S1I = C2I + C2R = S2R*C1R + C2I = S2I*C1R + YR(K) = C2R + YI(K) = C2I + K = K - 1 + FN = FN - 1.0D0 + IF (IFLAG.GE.3) GO TO 90 + STR = DABS(C2R) + STI = DABS(C2I) + C2M = DMAX1(STR,STI) + IF (C2M.LE.ASCLE) GO TO 90 + IFLAG = IFLAG + 1 + ASCLE = BRY(IFLAG) + S1R = S1R*C1R + S1I = S1I*C1R + S2R = C2R + S2I = C2I + S1R = S1R*CSSR(IFLAG) + S1I = S1I*CSSR(IFLAG) + S2R = S2R*CSSR(IFLAG) + S2I = S2I*CSSR(IFLAG) + C1R = CSRR(IFLAG) + 90 CONTINUE + 100 CONTINUE + RETURN +C----------------------------------------------------------------------- +C SET UNDERFLOW AND UPDATE PARAMETERS +C----------------------------------------------------------------------- + 110 CONTINUE + IF (RS1.GT.0.0D0) GO TO 120 + YR(ND) = ZEROR + YI(ND) = ZEROI + NZ = NZ + 1 + ND = ND - 1 + IF (ND.EQ.0) GO TO 100 + CALL ZUOIK(ZR, ZI, FNU, KODE, 1, ND, YR, YI, NUF, TOL, ELIM, ALIM) + IF (NUF.LT.0) GO TO 120 + ND = ND - NUF + NZ = NZ + NUF + IF (ND.EQ.0) GO TO 100 + FN = FNU + DBLE(FLOAT(ND-1)) + IF (FN.GE.FNUL) GO TO 30 + NLAST = ND + RETURN + 120 CONTINUE + NZ = -1 + RETURN + 130 CONTINUE + IF (RS1.GT.0.0D0) GO TO 120 + NZ = N + DO 140 I=1,N + YR(I) = ZEROR + YI(I) = ZEROI + 140 CONTINUE + RETURN + END diff --git a/vendor/gonum.org/v1/gonum/mathext/internal/amos/amoslib/zuni2.f b/vendor/gonum.org/v1/gonum/mathext/internal/amos/amoslib/zuni2.f new file mode 100644 index 0000000..9e5cfcd --- /dev/null +++ b/vendor/gonum.org/v1/gonum/mathext/internal/amos/amoslib/zuni2.f @@ -0,0 +1,267 @@ + SUBROUTINE ZUNI2(ZR, ZI, FNU, KODE, N, YR, YI, NZ, NLAST, FNUL, + * TOL, ELIM, ALIM) +C***BEGIN PROLOGUE ZUNI2 +C***REFER TO ZBESI,ZBESK +C +C ZUNI2 COMPUTES I(FNU,Z) IN THE RIGHT HALF PLANE BY MEANS OF +C UNIFORM ASYMPTOTIC EXPANSION FOR J(FNU,ZN) WHERE ZN IS Z*I +C OR -Z*I AND ZN IS IN THE RIGHT HALF PLANE ALSO. +C +C FNUL IS THE SMALLEST ORDER PERMITTED FOR THE ASYMPTOTIC +C EXPANSION. NLAST=0 MEANS ALL OF THE Y VALUES WERE SET. +C NLAST.NE.0 IS THE NUMBER LEFT TO BE COMPUTED BY ANOTHER +C FORMULA FOR ORDERS FNU TO FNU+NLAST-1 BECAUSE FNU+NLAST-1.LT.FNUL. +C Y(I)=CZERO FOR I=NLAST+1,N +C +C***ROUTINES CALLED ZAIRY,ZUCHK,ZUNHJ,ZUOIK,D1MACH,ZABS +C***END PROLOGUE ZUNI2 +C COMPLEX AI,ARG,ASUM,BSUM,CFN,CI,CID,CIP,CONE,CRSC,CSCL,CSR,CSS, +C *CZERO,C1,C2,DAI,PHI,RZ,S1,S2,Y,Z,ZB,ZETA1,ZETA2,ZN + DOUBLE PRECISION AARG, AIC, AII, AIR, ALIM, ANG, APHI, ARGI, + * ARGR, ASCLE, ASUMI, ASUMR, BRY, BSUMI, BSUMR, CIDI, CIPI, CIPR, + * CONER, CRSC, CSCL, CSRR, CSSR, C1R, C2I, C2M, C2R, DAII, + * DAIR, ELIM, FN, FNU, FNUL, HPI, PHII, PHIR, RAST, RAZ, RS1, RZI, + * RZR, STI, STR, S1I, S1R, S2I, S2R, TOL, YI, YR, ZBI, ZBR, ZEROI, + * ZEROR, ZETA1I, ZETA1R, ZETA2I, ZETA2R, ZI, ZNI, ZNR, ZR, CYR, + * CYI, D1MACH, ZABS, CAR, SAR + INTEGER I, IFLAG, IN, INU, J, K, KODE, N, NAI, ND, NDAI, NLAST, + * NN, NUF, NW, NZ, IDUM + DIMENSION BRY(3), YR(N), YI(N), CIPR(4), CIPI(4), CSSR(3), + * CSRR(3), CYR(2), CYI(2) + DATA ZEROR,ZEROI,CONER / 0.0D0, 0.0D0, 1.0D0 / + DATA CIPR(1),CIPI(1),CIPR(2),CIPI(2),CIPR(3),CIPI(3),CIPR(4), + * CIPI(4)/ 1.0D0,0.0D0, 0.0D0,1.0D0, -1.0D0,0.0D0, 0.0D0,-1.0D0/ + DATA HPI, AIC / + 1 1.57079632679489662D+00, 1.265512123484645396D+00/ +C + NZ = 0 + ND = N + NLAST = 0 +C----------------------------------------------------------------------- +C COMPUTED VALUES WITH EXPONENTS BETWEEN ALIM AND ELIM IN MAG- +C NITUDE ARE SCALED TO KEEP INTERMEDIATE ARITHMETIC ON SCALE, +C EXP(ALIM)=EXP(ELIM)*TOL +C----------------------------------------------------------------------- + CSCL = 1.0D0/TOL + CRSC = TOL + CSSR(1) = CSCL + CSSR(2) = CONER + CSSR(3) = CRSC + CSRR(1) = CRSC + CSRR(2) = CONER + CSRR(3) = CSCL + BRY(1) = 1.0D+3*D1MACH(1)/TOL +C----------------------------------------------------------------------- +C ZN IS IN THE RIGHT HALF PLANE AFTER ROTATION BY CI OR -CI +C----------------------------------------------------------------------- + ZNR = ZI + ZNI = -ZR + ZBR = ZR + ZBI = ZI + CIDI = -CONER + INU = INT(SNGL(FNU)) + ANG = HPI*(FNU-DBLE(FLOAT(INU))) + C2R = DCOS(ANG) + C2I = DSIN(ANG) + CAR = C2R + SAR = C2I + IN = INU + N - 1 + IN = MOD(IN,4) + 1 + STR = C2R*CIPR(IN) - C2I*CIPI(IN) + C2I = C2R*CIPI(IN) + C2I*CIPR(IN) + C2R = STR + IF (ZI.GT.0.0D0) GO TO 10 + ZNR = -ZNR + ZBI = -ZBI + CIDI = -CIDI + C2I = -C2I + 10 CONTINUE +C----------------------------------------------------------------------- +C CHECK FOR UNDERFLOW AND OVERFLOW ON FIRST MEMBER +C----------------------------------------------------------------------- + FN = DMAX1(FNU,1.0D0) + CALL ZUNHJ(ZNR, ZNI, FN, 1, TOL, PHIR, PHII, ARGR, ARGI, ZETA1R, + * ZETA1I, ZETA2R, ZETA2I, ASUMR, ASUMI, BSUMR, BSUMI) + IF (KODE.EQ.1) GO TO 20 + STR = ZBR + ZETA2R + STI = ZBI + ZETA2I + RAST = FN/ZABS(CMPLX(STR,STI,kind=KIND(1.0D0))) + STR = STR*RAST*RAST + STI = -STI*RAST*RAST + S1R = -ZETA1R + STR + S1I = -ZETA1I + STI + GO TO 30 + 20 CONTINUE + S1R = -ZETA1R + ZETA2R + S1I = -ZETA1I + ZETA2I + 30 CONTINUE + RS1 = S1R + IF (DABS(RS1).GT.ELIM) GO TO 150 + 40 CONTINUE + NN = MIN0(2,ND) + DO 90 I=1,NN + FN = FNU + DBLE(FLOAT(ND-I)) + CALL ZUNHJ(ZNR, ZNI, FN, 0, TOL, PHIR, PHII, ARGR, ARGI, + * ZETA1R, ZETA1I, ZETA2R, ZETA2I, ASUMR, ASUMI, BSUMR, BSUMI) + IF (KODE.EQ.1) GO TO 50 + STR = ZBR + ZETA2R + STI = ZBI + ZETA2I + RAST = FN/ZABS(CMPLX(STR,STI,kind=KIND(1.0D0))) + STR = STR*RAST*RAST + STI = -STI*RAST*RAST + S1R = -ZETA1R + STR + S1I = -ZETA1I + STI + DABS(ZI) + GO TO 60 + 50 CONTINUE + S1R = -ZETA1R + ZETA2R + S1I = -ZETA1I + ZETA2I + 60 CONTINUE +C----------------------------------------------------------------------- +C TEST FOR UNDERFLOW AND OVERFLOW +C----------------------------------------------------------------------- + RS1 = S1R + IF (DABS(RS1).GT.ELIM) GO TO 120 + IF (I.EQ.1) IFLAG = 2 + IF (DABS(RS1).LT.ALIM) GO TO 70 +C----------------------------------------------------------------------- +C REFINE TEST AND SCALE +C----------------------------------------------------------------------- +C----------------------------------------------------------------------- + APHI = ZABS(CMPLX(PHIR,PHII,kind=KIND(1.0D0))) + AARG = ZABS(CMPLX(ARGR,ARGI,kind=KIND(1.0D0))) + RS1 = RS1 + DLOG(APHI) - 0.25D0*DLOG(AARG) - AIC + IF (DABS(RS1).GT.ELIM) GO TO 120 + IF (I.EQ.1) IFLAG = 1 + IF (RS1.LT.0.0D0) GO TO 70 + IF (I.EQ.1) IFLAG = 3 + 70 CONTINUE +C----------------------------------------------------------------------- +C SCALE S1 TO KEEP INTERMEDIATE ARITHMETIC ON SCALE NEAR +C EXPONENT EXTREMES +C----------------------------------------------------------------------- + CALL ZAIRY(ARGR, ARGI, 0, 2, AIR, AII, NAI, IDUM) + CALL ZAIRY(ARGR, ARGI, 1, 2, DAIR, DAII, NDAI, IDUM) + STR = DAIR*BSUMR - DAII*BSUMI + STI = DAIR*BSUMI + DAII*BSUMR + STR = STR + (AIR*ASUMR-AII*ASUMI) + STI = STI + (AIR*ASUMI+AII*ASUMR) + S2R = PHIR*STR - PHII*STI + S2I = PHIR*STI + PHII*STR + STR = DEXP(S1R)*CSSR(IFLAG) + S1R = STR*DCOS(S1I) + S1I = STR*DSIN(S1I) + STR = S2R*S1R - S2I*S1I + S2I = S2R*S1I + S2I*S1R + S2R = STR + IF (IFLAG.NE.1) GO TO 80 + CALL ZUCHK(S2R, S2I, NW, BRY(1), TOL) + IF (NW.NE.0) GO TO 120 + 80 CONTINUE + IF (ZI.LE.0.0D0) S2I = -S2I + STR = S2R*C2R - S2I*C2I + S2I = S2R*C2I + S2I*C2R + S2R = STR + CYR(I) = S2R + CYI(I) = S2I + J = ND - I + 1 + YR(J) = S2R*CSRR(IFLAG) + YI(J) = S2I*CSRR(IFLAG) + STR = -C2I*CIDI + C2I = C2R*CIDI + C2R = STR + 90 CONTINUE + IF (ND.LE.2) GO TO 110 + RAZ = 1.0D0/ZABS(CMPLX(ZR,ZI,kind=KIND(1.0D0))) + STR = ZR*RAZ + STI = -ZI*RAZ + RZR = (STR+STR)*RAZ + RZI = (STI+STI)*RAZ + BRY(2) = 1.0D0/BRY(1) + BRY(3) = D1MACH(2) + S1R = CYR(1) + S1I = CYI(1) + S2R = CYR(2) + S2I = CYI(2) + C1R = CSRR(IFLAG) + ASCLE = BRY(IFLAG) + K = ND - 2 + FN = DBLE(FLOAT(K)) + DO 100 I=3,ND + C2R = S2R + C2I = S2I + S2R = S1R + (FNU+FN)*(RZR*C2R-RZI*C2I) + S2I = S1I + (FNU+FN)*(RZR*C2I+RZI*C2R) + S1R = C2R + S1I = C2I + C2R = S2R*C1R + C2I = S2I*C1R + YR(K) = C2R + YI(K) = C2I + K = K - 1 + FN = FN - 1.0D0 + IF (IFLAG.GE.3) GO TO 100 + STR = DABS(C2R) + STI = DABS(C2I) + C2M = DMAX1(STR,STI) + IF (C2M.LE.ASCLE) GO TO 100 + IFLAG = IFLAG + 1 + ASCLE = BRY(IFLAG) + S1R = S1R*C1R + S1I = S1I*C1R + S2R = C2R + S2I = C2I + S1R = S1R*CSSR(IFLAG) + S1I = S1I*CSSR(IFLAG) + S2R = S2R*CSSR(IFLAG) + S2I = S2I*CSSR(IFLAG) + C1R = CSRR(IFLAG) + 100 CONTINUE + 110 CONTINUE + RETURN + 120 CONTINUE + IF (RS1.GT.0.0D0) GO TO 140 +C----------------------------------------------------------------------- +C SET UNDERFLOW AND UPDATE PARAMETERS +C----------------------------------------------------------------------- + YR(ND) = ZEROR + YI(ND) = ZEROI + NZ = NZ + 1 + ND = ND - 1 + IF (ND.EQ.0) GO TO 110 + CALL ZUOIK(ZR, ZI, FNU, KODE, 1, ND, YR, YI, NUF, TOL, ELIM, ALIM) + IF (NUF.LT.0) GO TO 140 + ND = ND - NUF + NZ = NZ + NUF + IF (ND.EQ.0) GO TO 110 + FN = FNU + DBLE(FLOAT(ND-1)) + IF (FN.LT.FNUL) GO TO 130 +C FN = CIDI +C J = NUF + 1 +C K = MOD(J,4) + 1 +C S1R = CIPR(K) +C S1I = CIPI(K) +C IF (FN.LT.0.0D0) S1I = -S1I +C STR = C2R*S1R - C2I*S1I +C C2I = C2R*S1I + C2I*S1R +C C2R = STR + IN = INU + ND - 1 + IN = MOD(IN,4) + 1 + C2R = CAR*CIPR(IN) - SAR*CIPI(IN) + C2I = CAR*CIPI(IN) + SAR*CIPR(IN) + IF (ZI.LE.0.0D0) C2I = -C2I + GO TO 40 + 130 CONTINUE + NLAST = ND + RETURN + 140 CONTINUE + NZ = -1 + RETURN + 150 CONTINUE + IF (RS1.GT.0.0D0) GO TO 140 + NZ = N + DO 160 I=1,N + YR(I) = ZEROR + YI(I) = ZEROI + 160 CONTINUE + RETURN + END diff --git a/vendor/gonum.org/v1/gonum/mathext/internal/amos/amoslib/zunik.f b/vendor/gonum.org/v1/gonum/mathext/internal/amos/amoslib/zunik.f new file mode 100644 index 0000000..3e8293e --- /dev/null +++ b/vendor/gonum.org/v1/gonum/mathext/internal/amos/amoslib/zunik.f @@ -0,0 +1,211 @@ + SUBROUTINE ZUNIK(ZRR, ZRI, FNU, IKFLG, IPMTR, TOL, INIT, PHIR, + * PHII, ZETA1R, ZETA1I, ZETA2R, ZETA2I, SUMR, SUMI, CWRKR, CWRKI) +C***BEGIN PROLOGUE ZUNIK +C***REFER TO ZBESI,ZBESK +C +C ZUNIK COMPUTES PARAMETERS FOR THE UNIFORM ASYMPTOTIC +C EXPANSIONS OF THE I AND K FUNCTIONS ON IKFLG= 1 OR 2 +C RESPECTIVELY BY +C +C W(FNU,ZR) = PHI*EXP(ZETA)*SUM +C +C WHERE ZETA=-ZETA1 + ZETA2 OR +C ZETA1 - ZETA2 +C +C THE FIRST CALL MUST HAVE INIT=0. SUBSEQUENT CALLS WITH THE +C SAME ZR AND FNU WILL RETURN THE I OR K FUNCTION ON IKFLG= +C 1 OR 2 WITH NO CHANGE IN INIT. CWRK IS A COMPLEX WORK +C ARRAY. IPMTR=0 COMPUTES ALL PARAMETERS. IPMTR=1 COMPUTES PHI, +C ZETA1,ZETA2. +C +C***ROUTINES CALLED ZDIV,ZLOG,ZSQRT,D1MACH +C***END PROLOGUE ZUNIK +C COMPLEX CFN,CON,CONE,CRFN,CWRK,CZERO,PHI,S,SR,SUM,T,T2,ZETA1, +C *ZETA2,ZN,ZR + DOUBLE PRECISION AC, C, CON, CONEI, CONER, CRFNI, CRFNR, CWRKI, + * CWRKR, FNU, PHII, PHIR, RFN, SI, SR, SRI, SRR, STI, STR, SUMI, + * SUMR, TEST, TI, TOL, TR, T2I, T2R, ZEROI, ZEROR, ZETA1I, ZETA1R, + * ZETA2I, ZETA2R, ZNI, ZNR, ZRI, ZRR, D1MACH + INTEGER I, IDUM, IKFLG, INIT, IPMTR, J, K, L + DIMENSION C(120), CWRKR(16), CWRKI(16), CON(2) + DATA ZEROR,ZEROI,CONER,CONEI / 0.0D0, 0.0D0, 1.0D0, 0.0D0 / + DATA CON(1), CON(2) / + 1 3.98942280401432678D-01, 1.25331413731550025D+00 / + DATA C(1), C(2), C(3), C(4), C(5), C(6), C(7), C(8), C(9), C(10), + 1 C(11), C(12), C(13), C(14), C(15), C(16), C(17), C(18), + 2 C(19), C(20), C(21), C(22), C(23), C(24)/ + 3 1.00000000000000000D+00, -2.08333333333333333D-01, + 4 1.25000000000000000D-01, 3.34201388888888889D-01, + 5 -4.01041666666666667D-01, 7.03125000000000000D-02, + 6 -1.02581259645061728D+00, 1.84646267361111111D+00, + 7 -8.91210937500000000D-01, 7.32421875000000000D-02, + 8 4.66958442342624743D+00, -1.12070026162229938D+01, + 9 8.78912353515625000D+00, -2.36408691406250000D+00, + A 1.12152099609375000D-01, -2.82120725582002449D+01, + B 8.46362176746007346D+01, -9.18182415432400174D+01, + C 4.25349987453884549D+01, -7.36879435947963170D+00, + D 2.27108001708984375D-01, 2.12570130039217123D+02, + E -7.65252468141181642D+02, 1.05999045252799988D+03/ + DATA C(25), C(26), C(27), C(28), C(29), C(30), C(31), C(32), + 1 C(33), C(34), C(35), C(36), C(37), C(38), C(39), C(40), + 2 C(41), C(42), C(43), C(44), C(45), C(46), C(47), C(48)/ + 3 -6.99579627376132541D+02, 2.18190511744211590D+02, + 4 -2.64914304869515555D+01, 5.72501420974731445D-01, + 5 -1.91945766231840700D+03, 8.06172218173730938D+03, + 6 -1.35865500064341374D+04, 1.16553933368645332D+04, + 7 -5.30564697861340311D+03, 1.20090291321635246D+03, + 8 -1.08090919788394656D+02, 1.72772750258445740D+00, + 9 2.02042913309661486D+04, -9.69805983886375135D+04, + A 1.92547001232531532D+05, -2.03400177280415534D+05, + B 1.22200464983017460D+05, -4.11926549688975513D+04, + C 7.10951430248936372D+03, -4.93915304773088012D+02, + D 6.07404200127348304D+00, -2.42919187900551333D+05, + E 1.31176361466297720D+06, -2.99801591853810675D+06/ + DATA C(49), C(50), C(51), C(52), C(53), C(54), C(55), C(56), + 1 C(57), C(58), C(59), C(60), C(61), C(62), C(63), C(64), + 2 C(65), C(66), C(67), C(68), C(69), C(70), C(71), C(72)/ + 3 3.76327129765640400D+06, -2.81356322658653411D+06, + 4 1.26836527332162478D+06, -3.31645172484563578D+05, + 5 4.52187689813627263D+04, -2.49983048181120962D+03, + 6 2.43805296995560639D+01, 3.28446985307203782D+06, + 7 -1.97068191184322269D+07, 5.09526024926646422D+07, + 8 -7.41051482115326577D+07, 6.63445122747290267D+07, + 9 -3.75671766607633513D+07, 1.32887671664218183D+07, + A -2.78561812808645469D+06, 3.08186404612662398D+05, + B -1.38860897537170405D+04, 1.10017140269246738D+02, + C -4.93292536645099620D+07, 3.25573074185765749D+08, + D -9.39462359681578403D+08, 1.55359689957058006D+09, + E -1.62108055210833708D+09, 1.10684281682301447D+09/ + DATA C(73), C(74), C(75), C(76), C(77), C(78), C(79), C(80), + 1 C(81), C(82), C(83), C(84), C(85), C(86), C(87), C(88), + 2 C(89), C(90), C(91), C(92), C(93), C(94), C(95), C(96)/ + 3 -4.95889784275030309D+08, 1.42062907797533095D+08, + 4 -2.44740627257387285D+07, 2.24376817792244943D+06, + 5 -8.40054336030240853D+04, 5.51335896122020586D+02, + 6 8.14789096118312115D+08, -5.86648149205184723D+09, + 7 1.86882075092958249D+10, -3.46320433881587779D+10, + 8 4.12801855797539740D+10, -3.30265997498007231D+10, + 9 1.79542137311556001D+10, -6.56329379261928433D+09, + A 1.55927986487925751D+09, -2.25105661889415278D+08, + B 1.73951075539781645D+07, -5.49842327572288687D+05, + C 3.03809051092238427D+03, -1.46792612476956167D+10, + D 1.14498237732025810D+11, -3.99096175224466498D+11, + E 8.19218669548577329D+11, -1.09837515608122331D+12/ + DATA C(97), C(98), C(99), C(100), C(101), C(102), C(103), C(104), + 1 C(105), C(106), C(107), C(108), C(109), C(110), C(111), + 2 C(112), C(113), C(114), C(115), C(116), C(117), C(118)/ + 3 1.00815810686538209D+12, -6.45364869245376503D+11, + 4 2.87900649906150589D+11, -8.78670721780232657D+10, + 5 1.76347306068349694D+10, -2.16716498322379509D+09, + 6 1.43157876718888981D+08, -3.87183344257261262D+06, + 7 1.82577554742931747D+04, 2.86464035717679043D+11, + 8 -2.40629790002850396D+12, 9.10934118523989896D+12, + 9 -2.05168994109344374D+13, 3.05651255199353206D+13, + A -3.16670885847851584D+13, 2.33483640445818409D+13, + B -1.23204913055982872D+13, 4.61272578084913197D+12, + C -1.19655288019618160D+12, 2.05914503232410016D+11, + D -2.18229277575292237D+10, 1.24700929351271032D+09/ + DATA C(119), C(120)/ + 1 -2.91883881222208134D+07, 1.18838426256783253D+05/ +C + IF (INIT.NE.0) GO TO 40 +C----------------------------------------------------------------------- +C INITIALIZE ALL VARIABLES +C----------------------------------------------------------------------- + RFN = 1.0D0/FNU +C----------------------------------------------------------------------- +C OVERFLOW TEST (ZR/FNU TOO SMALL) +C----------------------------------------------------------------------- + TEST = D1MACH(1)*1.0D+3 + AC = FNU*TEST + IF (DABS(ZRR).GT.AC .OR. DABS(ZRI).GT.AC) GO TO 15 + ZETA1R = 2.0D0*DABS(DLOG(TEST))+FNU + ZETA1I = 0.0D0 + ZETA2R = FNU + ZETA2I = 0.0D0 + PHIR = 1.0D0 + PHII = 0.0D0 + RETURN + 15 CONTINUE + TR = ZRR*RFN + TI = ZRI*RFN + SR = CONER + (TR*TR-TI*TI) + SI = CONEI + (TR*TI+TI*TR) + CALL ZSQRT(SR, SI, SRR, SRI) + STR = CONER + SRR + STI = CONEI + SRI + CALL ZDIV(STR, STI, TR, TI, ZNR, ZNI) + CALL ZLOG(ZNR, ZNI, STR, STI, IDUM) + ZETA1R = FNU*STR + ZETA1I = FNU*STI + ZETA2R = FNU*SRR + ZETA2I = FNU*SRI + CALL ZDIV(CONER, CONEI, SRR, SRI, TR, TI) + SRR = TR*RFN + SRI = TI*RFN + CALL ZSQRT(SRR, SRI, CWRKR(16), CWRKI(16)) + PHIR = CWRKR(16)*CON(IKFLG) + PHII = CWRKI(16)*CON(IKFLG) + IF (IPMTR.NE.0) RETURN + CALL ZDIV(CONER, CONEI, SR, SI, T2R, T2I) + CWRKR(1) = CONER + CWRKI(1) = CONEI + CRFNR = CONER + CRFNI = CONEI + AC = 1.0D0 + L = 1 + DO 20 K=2,15 + SR = ZEROR + SI = ZEROI + DO 10 J=1,K + L = L + 1 + STR = SR*T2R - SI*T2I + C(L) + SI = SR*T2I + SI*T2R + SR = STR + 10 CONTINUE + STR = CRFNR*SRR - CRFNI*SRI + CRFNI = CRFNR*SRI + CRFNI*SRR + CRFNR = STR + CWRKR(K) = CRFNR*SR - CRFNI*SI + CWRKI(K) = CRFNR*SI + CRFNI*SR + AC = AC*RFN + TEST = DABS(CWRKR(K)) + DABS(CWRKI(K)) + IF (AC.LT.TOL .AND. TEST.LT.TOL) GO TO 30 + 20 CONTINUE + K = 15 + 30 CONTINUE + INIT = K + 40 CONTINUE + IF (IKFLG.EQ.2) GO TO 60 +C----------------------------------------------------------------------- +C COMPUTE SUM FOR THE I FUNCTION +C----------------------------------------------------------------------- + SR = ZEROR + SI = ZEROI + DO 50 I=1,INIT + SR = SR + CWRKR(I) + SI = SI + CWRKI(I) + 50 CONTINUE + SUMR = SR + SUMI = SI + PHIR = CWRKR(16)*CON(1) + PHII = CWRKI(16)*CON(1) + RETURN + 60 CONTINUE +C----------------------------------------------------------------------- +C COMPUTE SUM FOR THE K FUNCTION +C----------------------------------------------------------------------- + SR = ZEROR + SI = ZEROI + TR = CONER + DO 70 I=1,INIT + SR = SR + TR*CWRKR(I) + SI = SI + TR*CWRKI(I) + TR = -TR + 70 CONTINUE + SUMR = SR + SUMI = SI + PHIR = CWRKR(16)*CON(2) + PHII = CWRKI(16)*CON(2) + RETURN + END diff --git a/vendor/gonum.org/v1/gonum/mathext/internal/amos/amoslib/zunk1.f b/vendor/gonum.org/v1/gonum/mathext/internal/amos/amoslib/zunk1.f new file mode 100644 index 0000000..6b2418d --- /dev/null +++ b/vendor/gonum.org/v1/gonum/mathext/internal/amos/amoslib/zunk1.f @@ -0,0 +1,426 @@ + SUBROUTINE ZUNK1(ZR, ZI, FNU, KODE, MR, N, YR, YI, NZ, TOL, ELIM, + * ALIM) +C***BEGIN PROLOGUE ZUNK1 +C***REFER TO ZBESK +C +C ZUNK1 COMPUTES K(FNU,Z) AND ITS ANALYTIC CONTINUATION FROM THE +C RIGHT HALF PLANE TO THE LEFT HALF PLANE BY MEANS OF THE +C UNIFORM ASYMPTOTIC EXPANSION. +C MR INDICATES THE DIRECTION OF ROTATION FOR ANALYTIC CONTINUATION. +C NZ=-1 MEANS AN OVERFLOW WILL OCCUR +C +C***ROUTINES CALLED ZKSCL,ZS1S2,ZUCHK,ZUNIK,D1MACH,ZABS +C***END PROLOGUE ZUNK1 +C COMPLEX CFN,CK,CONE,CRSC,CS,CSCL,CSGN,CSPN,CSR,CSS,CWRK,CY,CZERO, +C *C1,C2,PHI,PHID,RZ,SUM,SUMD,S1,S2,Y,Z,ZETA1,ZETA1D,ZETA2,ZETA2D,ZR + DOUBLE PRECISION ALIM, ANG, APHI, ASC, ASCLE, BRY, CKI, CKR, + * CONER, CRSC, CSCL, CSGNI, CSPNI, CSPNR, CSR, CSRR, CSSR, + * CWRKI, CWRKR, CYI, CYR, C1I, C1R, C2I, C2M, C2R, ELIM, FMR, FN, + * FNF, FNU, PHIDI, PHIDR, PHII, PHIR, PI, RAST, RAZR, RS1, RZI, + * RZR, SGN, STI, STR, SUMDI, SUMDR, SUMI, SUMR, S1I, S1R, S2I, + * S2R, TOL, YI, YR, ZEROI, ZEROR, ZETA1I, ZETA1R, ZETA2I, ZETA2R, + * ZET1DI, ZET1DR, ZET2DI, ZET2DR, ZI, ZR, ZRI, ZRR, D1MACH, ZABS + INTEGER I, IB, IFLAG, IFN, IL, INIT, INU, IUF, K, KDFLG, KFLAG, + * KK, KODE, MR, N, NW, NZ, INITD, IC, IPARD, J + DIMENSION BRY(3), INIT(2), YR(N), YI(N), SUMR(2), SUMI(2), + * ZETA1R(2), ZETA1I(2), ZETA2R(2), ZETA2I(2), CYR(2), CYI(2), + * CWRKR(16,3), CWRKI(16,3), CSSR(3), CSRR(3), PHIR(2), PHII(2) + DATA ZEROR,ZEROI,CONER / 0.0D0, 0.0D0, 1.0D0 / + DATA PI / 3.14159265358979324D0 / +C + KDFLG = 1 + NZ = 0 +C----------------------------------------------------------------------- +C EXP(-ALIM)=EXP(-ELIM)/TOL=APPROX. ONE PRECISION GREATER THAN +C THE UNDERFLOW LIMIT +C----------------------------------------------------------------------- + CSCL = 1.0D0/TOL + CRSC = TOL + CSSR(1) = CSCL + CSSR(2) = CONER + CSSR(3) = CRSC + CSRR(1) = CRSC + CSRR(2) = CONER + CSRR(3) = CSCL + BRY(1) = 1.0D+3*D1MACH(1)/TOL + BRY(2) = 1.0D0/BRY(1) + BRY(3) = D1MACH(2) + ZRR = ZR + ZRI = ZI + IF (ZR.GE.0.0D0) GO TO 10 + ZRR = -ZR + ZRI = -ZI + 10 CONTINUE + J = 2 + DO 70 I=1,N +C----------------------------------------------------------------------- +C J FLIP FLOPS BETWEEN 1 AND 2 IN J = 3 - J +C----------------------------------------------------------------------- + J = 3 - J + FN = FNU + DBLE(FLOAT(I-1)) + INIT(J) = 0 + CALL ZUNIK(ZRR, ZRI, FN, 2, 0, TOL, INIT(J), PHIR(J), PHII(J), + * ZETA1R(J), ZETA1I(J), ZETA2R(J), ZETA2I(J), SUMR(J), SUMI(J), + * CWRKR(1,J), CWRKI(1,J)) + IF (KODE.EQ.1) GO TO 20 + STR = ZRR + ZETA2R(J) + STI = ZRI + ZETA2I(J) + RAST = FN/ZABS(CMPLX(STR,STI,kind=KIND(1.0D0))) + STR = STR*RAST*RAST + STI = -STI*RAST*RAST + S1R = ZETA1R(J) - STR + S1I = ZETA1I(J) - STI + GO TO 30 + 20 CONTINUE + S1R = ZETA1R(J) - ZETA2R(J) + S1I = ZETA1I(J) - ZETA2I(J) + 30 CONTINUE + RS1 = S1R +C----------------------------------------------------------------------- +C TEST FOR UNDERFLOW AND OVERFLOW +C----------------------------------------------------------------------- + IF (DABS(RS1).GT.ELIM) GO TO 60 + IF (KDFLG.EQ.1) KFLAG = 2 + IF (DABS(RS1).LT.ALIM) GO TO 40 +C----------------------------------------------------------------------- +C REFINE TEST AND SCALE +C----------------------------------------------------------------------- + APHI = ZABS(CMPLX(PHIR(J),PHII(J),kind=KIND(1.0D0))) + RS1 = RS1 + DLOG(APHI) + IF (DABS(RS1).GT.ELIM) GO TO 60 + IF (KDFLG.EQ.1) KFLAG = 1 + IF (RS1.LT.0.0D0) GO TO 40 + IF (KDFLG.EQ.1) KFLAG = 3 + 40 CONTINUE +C----------------------------------------------------------------------- +C SCALE S1 TO KEEP INTERMEDIATE ARITHMETIC ON SCALE NEAR +C EXPONENT EXTREMES +C----------------------------------------------------------------------- + S2R = PHIR(J)*SUMR(J) - PHII(J)*SUMI(J) + S2I = PHIR(J)*SUMI(J) + PHII(J)*SUMR(J) + STR = DEXP(S1R)*CSSR(KFLAG) + S1R = STR*DCOS(S1I) + S1I = STR*DSIN(S1I) + STR = S2R*S1R - S2I*S1I + S2I = S1R*S2I + S2R*S1I + S2R = STR + IF (KFLAG.NE.1) GO TO 50 + CALL ZUCHK(S2R, S2I, NW, BRY(1), TOL) + IF (NW.NE.0) GO TO 60 + 50 CONTINUE + CYR(KDFLG) = S2R + CYI(KDFLG) = S2I + YR(I) = S2R*CSRR(KFLAG) + YI(I) = S2I*CSRR(KFLAG) + IF (KDFLG.EQ.2) GO TO 75 + KDFLG = 2 + GO TO 70 + 60 CONTINUE + IF (RS1.GT.0.0D0) GO TO 300 +C----------------------------------------------------------------------- +C FOR ZR.LT.0.0, THE I FUNCTION TO BE ADDED WILL OVERFLOW +C----------------------------------------------------------------------- + IF (ZR.LT.0.0D0) GO TO 300 + KDFLG = 1 + YR(I)=ZEROR + YI(I)=ZEROI + NZ=NZ+1 + IF (I.EQ.1) GO TO 70 + IF ((YR(I-1).EQ.ZEROR).AND.(YI(I-1).EQ.ZEROI)) GO TO 70 + YR(I-1)=ZEROR + YI(I-1)=ZEROI + NZ=NZ+1 + 70 CONTINUE + I = N + 75 CONTINUE + RAZR = 1.0D0/ZABS(CMPLX(ZRR,ZRI,kind=KIND(1.0D0))) + STR = ZRR*RAZR + STI = -ZRI*RAZR + RZR = (STR+STR)*RAZR + RZI = (STI+STI)*RAZR + CKR = FN*RZR + CKI = FN*RZI + IB = I + 1 + IF (N.LT.IB) GO TO 160 +C----------------------------------------------------------------------- +C TEST LAST MEMBER FOR UNDERFLOW AND OVERFLOW. SET SEQUENCE TO ZERO +C ON UNDERFLOW. +C----------------------------------------------------------------------- + FN = FNU + DBLE(FLOAT(N-1)) + IPARD = 1 + IF (MR.NE.0) IPARD = 0 + INITD = 0 + CALL ZUNIK(ZRR, ZRI, FN, 2, IPARD, TOL, INITD, PHIDR, PHIDI, + * ZET1DR, ZET1DI, ZET2DR, ZET2DI, SUMDR, SUMDI, CWRKR(1,3), + * CWRKI(1,3)) + IF (KODE.EQ.1) GO TO 80 + STR = ZRR + ZET2DR + STI = ZRI + ZET2DI + RAST = FN/ZABS(CMPLX(STR,STI,kind=KIND(1.0D0))) + STR = STR*RAST*RAST + STI = -STI*RAST*RAST + S1R = ZET1DR - STR + S1I = ZET1DI - STI + GO TO 90 + 80 CONTINUE + S1R = ZET1DR - ZET2DR + S1I = ZET1DI - ZET2DI + 90 CONTINUE + RS1 = S1R + IF (DABS(RS1).GT.ELIM) GO TO 95 + IF (DABS(RS1).LT.ALIM) GO TO 100 +C---------------------------------------------------------------------------- +C REFINE ESTIMATE AND TEST +C------------------------------------------------------------------------- + APHI = ZABS(CMPLX(PHIDR,PHIDI,kind=KIND(1.0D0))) + RS1 = RS1+DLOG(APHI) + IF (DABS(RS1).LT.ELIM) GO TO 100 + 95 CONTINUE + IF (DABS(RS1).GT.0.0D0) GO TO 300 +C----------------------------------------------------------------------- +C FOR ZR.LT.0.0, THE I FUNCTION TO BE ADDED WILL OVERFLOW +C----------------------------------------------------------------------- + IF (ZR.LT.0.0D0) GO TO 300 + NZ = N + DO 96 I=1,N + YR(I) = ZEROR + YI(I) = ZEROI + 96 CONTINUE + RETURN +C--------------------------------------------------------------------------- +C FORWARD RECUR FOR REMAINDER OF THE SEQUENCE +C---------------------------------------------------------------------------- + 100 CONTINUE + S1R = CYR(1) + S1I = CYI(1) + S2R = CYR(2) + S2I = CYI(2) + C1R = CSRR(KFLAG) + ASCLE = BRY(KFLAG) + DO 120 I=IB,N + C2R = S2R + C2I = S2I + S2R = CKR*C2R - CKI*C2I + S1R + S2I = CKR*C2I + CKI*C2R + S1I + S1R = C2R + S1I = C2I + CKR = CKR + RZR + CKI = CKI + RZI + C2R = S2R*C1R + C2I = S2I*C1R + YR(I) = C2R + YI(I) = C2I + IF (KFLAG.GE.3) GO TO 120 + STR = DABS(C2R) + STI = DABS(C2I) + C2M = DMAX1(STR,STI) + IF (C2M.LE.ASCLE) GO TO 120 + KFLAG = KFLAG + 1 + ASCLE = BRY(KFLAG) + S1R = S1R*C1R + S1I = S1I*C1R + S2R = C2R + S2I = C2I + S1R = S1R*CSSR(KFLAG) + S1I = S1I*CSSR(KFLAG) + S2R = S2R*CSSR(KFLAG) + S2I = S2I*CSSR(KFLAG) + C1R = CSRR(KFLAG) + 120 CONTINUE + 160 CONTINUE + IF (MR.EQ.0) RETURN +C----------------------------------------------------------------------- +C ANALYTIC CONTINUATION FOR RE(Z).LT.0.0D0 +C----------------------------------------------------------------------- + NZ = 0 + FMR = DBLE(FLOAT(MR)) + SGN = -DSIGN(PI,FMR) +C----------------------------------------------------------------------- +C CSPN AND CSGN ARE COEFF OF K AND I FUNCTIONS RESP. +C----------------------------------------------------------------------- + CSGNI = SGN + INU = INT(SNGL(FNU)) + FNF = FNU - DBLE(FLOAT(INU)) + IFN = INU + N - 1 + ANG = FNF*SGN + CSPNR = DCOS(ANG) + CSPNI = DSIN(ANG) + IF (MOD(IFN,2).EQ.0) GO TO 170 + CSPNR = -CSPNR + CSPNI = -CSPNI + 170 CONTINUE + ASC = BRY(1) + IUF = 0 + KK = N + KDFLG = 1 + IB = IB - 1 + IC = IB - 1 + DO 270 K=1,N + FN = FNU + DBLE(FLOAT(KK-1)) +C----------------------------------------------------------------------- +C LOGIC TO SORT OUT CASES WHOSE PARAMETERS WERE SET FOR THE K +C FUNCTION ABOVE +C----------------------------------------------------------------------- + M=3 + IF (N.GT.2) GO TO 175 + 172 CONTINUE + INITD = INIT(J) + PHIDR = PHIR(J) + PHIDI = PHII(J) + ZET1DR = ZETA1R(J) + ZET1DI = ZETA1I(J) + ZET2DR = ZETA2R(J) + ZET2DI = ZETA2I(J) + SUMDR = SUMR(J) + SUMDI = SUMI(J) + M = J + J = 3 - J + GO TO 180 + 175 CONTINUE + IF ((KK.EQ.N).AND.(IB.LT.N)) GO TO 180 + IF ((KK.EQ.IB).OR.(KK.EQ.IC)) GO TO 172 + INITD = 0 + 180 CONTINUE + CALL ZUNIK(ZRR, ZRI, FN, 1, 0, TOL, INITD, PHIDR, PHIDI, + * ZET1DR, ZET1DI, ZET2DR, ZET2DI, SUMDR, SUMDI, + * CWRKR(1,M), CWRKI(1,M)) + IF (KODE.EQ.1) GO TO 200 + STR = ZRR + ZET2DR + STI = ZRI + ZET2DI + RAST = FN/ZABS(CMPLX(STR,STI,kind=KIND(1.0D0))) + STR = STR*RAST*RAST + STI = -STI*RAST*RAST + S1R = -ZET1DR + STR + S1I = -ZET1DI + STI + GO TO 210 + 200 CONTINUE + S1R = -ZET1DR + ZET2DR + S1I = -ZET1DI + ZET2DI + 210 CONTINUE +C----------------------------------------------------------------------- +C TEST FOR UNDERFLOW AND OVERFLOW +C----------------------------------------------------------------------- + RS1 = S1R + IF (DABS(RS1).GT.ELIM) GO TO 260 + IF (KDFLG.EQ.1) IFLAG = 2 + IF (DABS(RS1).LT.ALIM) GO TO 220 +C----------------------------------------------------------------------- +C REFINE TEST AND SCALE +C----------------------------------------------------------------------- + APHI = ZABS(CMPLX(PHIDR,PHIDI,kind=KIND(1.0D0))) + RS1 = RS1 + DLOG(APHI) + IF (DABS(RS1).GT.ELIM) GO TO 260 + IF (KDFLG.EQ.1) IFLAG = 1 + IF (RS1.LT.0.0D0) GO TO 220 + IF (KDFLG.EQ.1) IFLAG = 3 + 220 CONTINUE + STR = PHIDR*SUMDR - PHIDI*SUMDI + STI = PHIDR*SUMDI + PHIDI*SUMDR + S2R = -CSGNI*STI + S2I = CSGNI*STR + STR = DEXP(S1R)*CSSR(IFLAG) + S1R = STR*DCOS(S1I) + S1I = STR*DSIN(S1I) + STR = S2R*S1R - S2I*S1I + S2I = S2R*S1I + S2I*S1R + S2R = STR + IF (IFLAG.NE.1) GO TO 230 + CALL ZUCHK(S2R, S2I, NW, BRY(1), TOL) + IF (NW.EQ.0) GO TO 230 + S2R = ZEROR + S2I = ZEROI + 230 CONTINUE + CYR(KDFLG) = S2R + CYI(KDFLG) = S2I + C2R = S2R + C2I = S2I + S2R = S2R*CSRR(IFLAG) + S2I = S2I*CSRR(IFLAG) +C----------------------------------------------------------------------- +C ADD I AND K FUNCTIONS, K SEQUENCE IN Y(I), I=1,N +C----------------------------------------------------------------------- + S1R = YR(KK) + S1I = YI(KK) + IF (KODE.EQ.1) GO TO 250 + CALL ZS1S2(ZRR, ZRI, S1R, S1I, S2R, S2I, NW, ASC, ALIM, IUF) + NZ = NZ + NW + 250 CONTINUE + YR(KK) = S1R*CSPNR - S1I*CSPNI + S2R + YI(KK) = CSPNR*S1I + CSPNI*S1R + S2I + KK = KK - 1 + CSPNR = -CSPNR + CSPNI = -CSPNI + IF (C2R.NE.0.0D0 .OR. C2I.NE.0.0D0) GO TO 255 + KDFLG = 1 + GO TO 270 + 255 CONTINUE + IF (KDFLG.EQ.2) GO TO 275 + KDFLG = 2 + GO TO 270 + 260 CONTINUE + IF (RS1.GT.0.0D0) GO TO 300 + S2R = ZEROR + S2I = ZEROI + GO TO 230 + 270 CONTINUE + K = N + 275 CONTINUE + IL = N - K + IF (IL.EQ.0) RETURN +C----------------------------------------------------------------------- +C RECUR BACKWARD FOR REMAINDER OF I SEQUENCE AND ADD IN THE +C K FUNCTIONS, SCALING THE I SEQUENCE DURING RECURRENCE TO KEEP +C INTERMEDIATE ARITHMETIC ON SCALE NEAR EXPONENT EXTREMES. +C----------------------------------------------------------------------- + S1R = CYR(1) + S1I = CYI(1) + S2R = CYR(2) + S2I = CYI(2) + CSR = CSRR(IFLAG) + ASCLE = BRY(IFLAG) + FN = DBLE(FLOAT(INU+IL)) + DO 290 I=1,IL + C2R = S2R + C2I = S2I + S2R = S1R + (FN+FNF)*(RZR*C2R-RZI*C2I) + S2I = S1I + (FN+FNF)*(RZR*C2I+RZI*C2R) + S1R = C2R + S1I = C2I + FN = FN - 1.0D0 + C2R = S2R*CSR + C2I = S2I*CSR + CKR = C2R + CKI = C2I + C1R = YR(KK) + C1I = YI(KK) + IF (KODE.EQ.1) GO TO 280 + CALL ZS1S2(ZRR, ZRI, C1R, C1I, C2R, C2I, NW, ASC, ALIM, IUF) + NZ = NZ + NW + 280 CONTINUE + YR(KK) = C1R*CSPNR - C1I*CSPNI + C2R + YI(KK) = C1R*CSPNI + C1I*CSPNR + C2I + KK = KK - 1 + CSPNR = -CSPNR + CSPNI = -CSPNI + IF (IFLAG.GE.3) GO TO 290 + C2R = DABS(CKR) + C2I = DABS(CKI) + C2M = DMAX1(C2R,C2I) + IF (C2M.LE.ASCLE) GO TO 290 + IFLAG = IFLAG + 1 + ASCLE = BRY(IFLAG) + S1R = S1R*CSR + S1I = S1I*CSR + S2R = CKR + S2I = CKI + S1R = S1R*CSSR(IFLAG) + S1I = S1I*CSSR(IFLAG) + S2R = S2R*CSSR(IFLAG) + S2I = S2I*CSSR(IFLAG) + CSR = CSRR(IFLAG) + 290 CONTINUE + RETURN + 300 CONTINUE + NZ = -1 + RETURN + END diff --git a/vendor/gonum.org/v1/gonum/mathext/internal/amos/amoslib/zunk2.f b/vendor/gonum.org/v1/gonum/mathext/internal/amos/amoslib/zunk2.f new file mode 100644 index 0000000..6859f5e --- /dev/null +++ b/vendor/gonum.org/v1/gonum/mathext/internal/amos/amoslib/zunk2.f @@ -0,0 +1,505 @@ + SUBROUTINE ZUNK2(ZR, ZI, FNU, KODE, MR, N, YR, YI, NZ, TOL, ELIM, + * ALIM) +C***BEGIN PROLOGUE ZUNK2 +C***REFER TO ZBESK +C +C ZUNK2 COMPUTES K(FNU,Z) AND ITS ANALYTIC CONTINUATION FROM THE +C RIGHT HALF PLANE TO THE LEFT HALF PLANE BY MEANS OF THE +C UNIFORM ASYMPTOTIC EXPANSIONS FOR H(KIND,FNU,ZN) AND J(FNU,ZN) +C WHERE ZN IS IN THE RIGHT HALF PLANE, KIND=(3-MR)/2, MR=+1 OR +C -1. HERE ZN=ZR*I OR -ZR*I WHERE ZR=Z IF Z IS IN THE RIGHT +C HALF PLANE OR ZR=-Z IF Z IS IN THE LEFT HALF PLANE. MR INDIC- +C ATES THE DIRECTION OF ROTATION FOR ANALYTIC CONTINUATION. +C NZ=-1 MEANS AN OVERFLOW WILL OCCUR +C +C***ROUTINES CALLED ZAIRY,ZKSCL,ZS1S2,ZUCHK,ZUNHJ,D1MACH,ZABS +C***END PROLOGUE ZUNK2 +C COMPLEX AI,ARG,ARGD,ASUM,ASUMD,BSUM,BSUMD,CFN,CI,CIP,CK,CONE,CRSC, +C *CR1,CR2,CS,CSCL,CSGN,CSPN,CSR,CSS,CY,CZERO,C1,C2,DAI,PHI,PHID,RZ, +C *S1,S2,Y,Z,ZB,ZETA1,ZETA1D,ZETA2,ZETA2D,ZN,ZR + DOUBLE PRECISION AARG, AIC, AII, AIR, ALIM, ANG, APHI, ARGDI, + * ARGDR, ARGI, ARGR, ASC, ASCLE, ASUMDI, ASUMDR, ASUMI, ASUMR, + * BRY, BSUMDI, BSUMDR, BSUMI, BSUMR, CAR, CIPI, CIPR, CKI, CKR, + * CONER, CRSC, CR1I, CR1R, CR2I, CR2R, CSCL, CSGNI, CSI, + * CSPNI, CSPNR, CSR, CSRR, CSSR, CYI, CYR, C1I, C1R, C2I, C2M, + * C2R, DAII, DAIR, ELIM, FMR, FN, FNF, FNU, HPI, PHIDI, PHIDR, + * PHII, PHIR, PI, PTI, PTR, RAST, RAZR, RS1, RZI, RZR, SAR, SGN, + * STI, STR, S1I, S1R, S2I, S2R, TOL, YI, YR, YY, ZBI, ZBR, ZEROI, + * ZEROR, ZETA1I, ZETA1R, ZETA2I, ZETA2R, ZET1DI, ZET1DR, ZET2DI, + * ZET2DR, ZI, ZNI, ZNR, ZR, ZRI, ZRR, D1MACH, ZABS + INTEGER I, IB, IFLAG, IFN, IL, IN, INU, IUF, K, KDFLG, KFLAG, KK, + * KODE, MR, N, NAI, NDAI, NW, NZ, IDUM, J, IPARD, IC + DIMENSION BRY(3), YR(N), YI(N), ASUMR(2), ASUMI(2), BSUMR(2), + * BSUMI(2), PHIR(2), PHII(2), ARGR(2), ARGI(2), ZETA1R(2), + * ZETA1I(2), ZETA2R(2), ZETA2I(2), CYR(2), CYI(2), CIPR(4), + * CIPI(4), CSSR(3), CSRR(3) + DATA ZEROR,ZEROI,CONER,CR1R,CR1I,CR2R,CR2I / + 1 0.0D0, 0.0D0, 1.0D0, + 1 1.0D0,1.73205080756887729D0 , -0.5D0,-8.66025403784438647D-01 / + DATA HPI, PI, AIC / + 1 1.57079632679489662D+00, 3.14159265358979324D+00, + 1 1.26551212348464539D+00/ + DATA CIPR(1),CIPI(1),CIPR(2),CIPI(2),CIPR(3),CIPI(3),CIPR(4), + * CIPI(4) / + 1 1.0D0,0.0D0 , 0.0D0,-1.0D0 , -1.0D0,0.0D0 , 0.0D0,1.0D0 / +C + KDFLG = 1 + NZ = 0 +C----------------------------------------------------------------------- +C EXP(-ALIM)=EXP(-ELIM)/TOL=APPROX. ONE PRECISION GREATER THAN +C THE UNDERFLOW LIMIT +C----------------------------------------------------------------------- + CSCL = 1.0D0/TOL + CRSC = TOL + CSSR(1) = CSCL + CSSR(2) = CONER + CSSR(3) = CRSC + CSRR(1) = CRSC + CSRR(2) = CONER + CSRR(3) = CSCL + BRY(1) = 1.0D+3*D1MACH(1)/TOL + BRY(2) = 1.0D0/BRY(1) + BRY(3) = D1MACH(2) + ZRR = ZR + ZRI = ZI + IF (ZR.GE.0.0D0) GO TO 10 + ZRR = -ZR + ZRI = -ZI + 10 CONTINUE + YY = ZRI + ZNR = ZRI + ZNI = -ZRR + ZBR = ZRR + ZBI = ZRI + INU = INT(SNGL(FNU)) + FNF = FNU - DBLE(FLOAT(INU)) + ANG = -HPI*FNF + CAR = DCOS(ANG) + SAR = DSIN(ANG) + C2R = HPI*SAR + C2I = -HPI*CAR + KK = MOD(INU,4) + 1 + STR = C2R*CIPR(KK) - C2I*CIPI(KK) + STI = C2R*CIPI(KK) + C2I*CIPR(KK) + CSR = CR1R*STR - CR1I*STI + CSI = CR1R*STI + CR1I*STR + IF (YY.GT.0.0D0) GO TO 20 + ZNR = -ZNR + ZBI = -ZBI + 20 CONTINUE +C----------------------------------------------------------------------- +C K(FNU,Z) IS COMPUTED FROM H(2,FNU,-I*Z) WHERE Z IS IN THE FIRST +C QUADRANT. FOURTH QUADRANT VALUES (YY.LE.0.0E0) ARE COMPUTED BY +C CONJUGATION SINCE THE K FUNCTION IS REAL ON THE POSITIVE REAL AXIS +C----------------------------------------------------------------------- + J = 2 + DO 80 I=1,N +C----------------------------------------------------------------------- +C J FLIP FLOPS BETWEEN 1 AND 2 IN J = 3 - J +C----------------------------------------------------------------------- + J = 3 - J + FN = FNU + DBLE(FLOAT(I-1)) + CALL ZUNHJ(ZNR, ZNI, FN, 0, TOL, PHIR(J), PHII(J), ARGR(J), + * ARGI(J), ZETA1R(J), ZETA1I(J), ZETA2R(J), ZETA2I(J), ASUMR(J), + * ASUMI(J), BSUMR(J), BSUMI(J)) + IF (KODE.EQ.1) GO TO 30 + STR = ZBR + ZETA2R(J) + STI = ZBI + ZETA2I(J) + RAST = FN/ZABS(CMPLX(STR,STI,kind=KIND(1.0D0))) + STR = STR*RAST*RAST + STI = -STI*RAST*RAST + S1R = ZETA1R(J) - STR + S1I = ZETA1I(J) - STI + GO TO 40 + 30 CONTINUE + S1R = ZETA1R(J) - ZETA2R(J) + S1I = ZETA1I(J) - ZETA2I(J) + 40 CONTINUE +C----------------------------------------------------------------------- +C TEST FOR UNDERFLOW AND OVERFLOW +C----------------------------------------------------------------------- + RS1 = S1R + IF (DABS(RS1).GT.ELIM) GO TO 70 + IF (KDFLG.EQ.1) KFLAG = 2 + IF (DABS(RS1).LT.ALIM) GO TO 50 +C----------------------------------------------------------------------- +C REFINE TEST AND SCALE +C----------------------------------------------------------------------- + APHI = ZABS(CMPLX(PHIR(J),PHII(J),kind=KIND(1.0D0))) + AARG = ZABS(CMPLX(ARGR(J),ARGI(J),kind=KIND(1.0D0))) + RS1 = RS1 + DLOG(APHI) - 0.25D0*DLOG(AARG) - AIC + IF (DABS(RS1).GT.ELIM) GO TO 70 + IF (KDFLG.EQ.1) KFLAG = 1 + IF (RS1.LT.0.0D0) GO TO 50 + IF (KDFLG.EQ.1) KFLAG = 3 + 50 CONTINUE +C----------------------------------------------------------------------- +C SCALE S1 TO KEEP INTERMEDIATE ARITHMETIC ON SCALE NEAR +C EXPONENT EXTREMES +C----------------------------------------------------------------------- + C2R = ARGR(J)*CR2R - ARGI(J)*CR2I + C2I = ARGR(J)*CR2I + ARGI(J)*CR2R + CALL ZAIRY(C2R, C2I, 0, 2, AIR, AII, NAI, IDUM) + CALL ZAIRY(C2R, C2I, 1, 2, DAIR, DAII, NDAI, IDUM) + STR = DAIR*BSUMR(J) - DAII*BSUMI(J) + STI = DAIR*BSUMI(J) + DAII*BSUMR(J) + PTR = STR*CR2R - STI*CR2I + PTI = STR*CR2I + STI*CR2R + STR = PTR + (AIR*ASUMR(J)-AII*ASUMI(J)) + STI = PTI + (AIR*ASUMI(J)+AII*ASUMR(J)) + PTR = STR*PHIR(J) - STI*PHII(J) + PTI = STR*PHII(J) + STI*PHIR(J) + S2R = PTR*CSR - PTI*CSI + S2I = PTR*CSI + PTI*CSR + STR = DEXP(S1R)*CSSR(KFLAG) + S1R = STR*DCOS(S1I) + S1I = STR*DSIN(S1I) + STR = S2R*S1R - S2I*S1I + S2I = S1R*S2I + S2R*S1I + S2R = STR + IF (KFLAG.NE.1) GO TO 60 + CALL ZUCHK(S2R, S2I, NW, BRY(1), TOL) + IF (NW.NE.0) GO TO 70 + 60 CONTINUE + IF (YY.LE.0.0D0) S2I = -S2I + CYR(KDFLG) = S2R + CYI(KDFLG) = S2I + YR(I) = S2R*CSRR(KFLAG) + YI(I) = S2I*CSRR(KFLAG) + STR = CSI + CSI = -CSR + CSR = STR + IF (KDFLG.EQ.2) GO TO 85 + KDFLG = 2 + GO TO 80 + 70 CONTINUE + IF (RS1.GT.0.0D0) GO TO 320 +C----------------------------------------------------------------------- +C FOR ZR.LT.0.0, THE I FUNCTION TO BE ADDED WILL OVERFLOW +C----------------------------------------------------------------------- + IF (ZR.LT.0.0D0) GO TO 320 + KDFLG = 1 + YR(I)=ZEROR + YI(I)=ZEROI + NZ=NZ+1 + STR = CSI + CSI =-CSR + CSR = STR + IF (I.EQ.1) GO TO 80 + IF ((YR(I-1).EQ.ZEROR).AND.(YI(I-1).EQ.ZEROI)) GO TO 80 + YR(I-1)=ZEROR + YI(I-1)=ZEROI + NZ=NZ+1 + 80 CONTINUE + I = N + 85 CONTINUE + RAZR = 1.0D0/ZABS(CMPLX(ZRR,ZRI,kind=KIND(1.0D0))) + STR = ZRR*RAZR + STI = -ZRI*RAZR + RZR = (STR+STR)*RAZR + RZI = (STI+STI)*RAZR + CKR = FN*RZR + CKI = FN*RZI + IB = I + 1 + IF (N.LT.IB) GO TO 180 +C----------------------------------------------------------------------- +C TEST LAST MEMBER FOR UNDERFLOW AND OVERFLOW. SET SEQUENCE TO ZERO +C ON UNDERFLOW. +C----------------------------------------------------------------------- + FN = FNU + DBLE(FLOAT(N-1)) + IPARD = 1 + IF (MR.NE.0) IPARD = 0 + CALL ZUNHJ(ZNR, ZNI, FN, IPARD, TOL, PHIDR, PHIDI, ARGDR, ARGDI, + * ZET1DR, ZET1DI, ZET2DR, ZET2DI, ASUMDR, ASUMDI, BSUMDR, BSUMDI) + IF (KODE.EQ.1) GO TO 90 + STR = ZBR + ZET2DR + STI = ZBI + ZET2DI + RAST = FN/ZABS(CMPLX(STR,STI,kind=KIND(1.0D0))) + STR = STR*RAST*RAST + STI = -STI*RAST*RAST + S1R = ZET1DR - STR + S1I = ZET1DI - STI + GO TO 100 + 90 CONTINUE + S1R = ZET1DR - ZET2DR + S1I = ZET1DI - ZET2DI + 100 CONTINUE + RS1 = S1R + IF (DABS(RS1).GT.ELIM) GO TO 105 + IF (DABS(RS1).LT.ALIM) GO TO 120 +C---------------------------------------------------------------------------- +C REFINE ESTIMATE AND TEST +C------------------------------------------------------------------------- + APHI = ZABS(CMPLX(PHIDR,PHIDI,kind=KIND(1.0D0))) + RS1 = RS1+DLOG(APHI) + IF (DABS(RS1).LT.ELIM) GO TO 120 + 105 CONTINUE + IF (RS1.GT.0.0D0) GO TO 320 +C----------------------------------------------------------------------- +C FOR ZR.LT.0.0, THE I FUNCTION TO BE ADDED WILL OVERFLOW +C----------------------------------------------------------------------- + IF (ZR.LT.0.0D0) GO TO 320 + NZ = N + DO 106 I=1,N + YR(I) = ZEROR + YI(I) = ZEROI + 106 CONTINUE + RETURN + 120 CONTINUE + S1R = CYR(1) + S1I = CYI(1) + S2R = CYR(2) + S2I = CYI(2) + C1R = CSRR(KFLAG) + ASCLE = BRY(KFLAG) + DO 130 I=IB,N + C2R = S2R + C2I = S2I + S2R = CKR*C2R - CKI*C2I + S1R + S2I = CKR*C2I + CKI*C2R + S1I + S1R = C2R + S1I = C2I + CKR = CKR + RZR + CKI = CKI + RZI + C2R = S2R*C1R + C2I = S2I*C1R + YR(I) = C2R + YI(I) = C2I + IF (KFLAG.GE.3) GO TO 130 + STR = DABS(C2R) + STI = DABS(C2I) + C2M = DMAX1(STR,STI) + IF (C2M.LE.ASCLE) GO TO 130 + KFLAG = KFLAG + 1 + ASCLE = BRY(KFLAG) + S1R = S1R*C1R + S1I = S1I*C1R + S2R = C2R + S2I = C2I + S1R = S1R*CSSR(KFLAG) + S1I = S1I*CSSR(KFLAG) + S2R = S2R*CSSR(KFLAG) + S2I = S2I*CSSR(KFLAG) + C1R = CSRR(KFLAG) + 130 CONTINUE + 180 CONTINUE + IF (MR.EQ.0) RETURN +C----------------------------------------------------------------------- +C ANALYTIC CONTINUATION FOR RE(Z).LT.0.0D0 +C----------------------------------------------------------------------- + NZ = 0 + FMR = DBLE(FLOAT(MR)) + SGN = -DSIGN(PI,FMR) +C----------------------------------------------------------------------- +C CSPN AND CSGN ARE COEFF OF K AND I FUNCIONS RESP. +C----------------------------------------------------------------------- + CSGNI = SGN + IF (YY.LE.0.0D0) CSGNI = -CSGNI + IFN = INU + N - 1 + ANG = FNF*SGN + CSPNR = DCOS(ANG) + CSPNI = DSIN(ANG) + IF (MOD(IFN,2).EQ.0) GO TO 190 + CSPNR = -CSPNR + CSPNI = -CSPNI + 190 CONTINUE +C----------------------------------------------------------------------- +C CS=COEFF OF THE J FUNCTION TO GET THE I FUNCTION. I(FNU,Z) IS +C COMPUTED FROM EXP(I*FNU*HPI)*J(FNU,-I*Z) WHERE Z IS IN THE FIRST +C QUADRANT. FOURTH QUADRANT VALUES (YY.LE.0.0E0) ARE COMPUTED BY +C CONJUGATION SINCE THE I FUNCTION IS REAL ON THE POSITIVE REAL AXIS +C----------------------------------------------------------------------- + CSR = SAR*CSGNI + CSI = CAR*CSGNI + IN = MOD(IFN,4) + 1 + C2R = CIPR(IN) + C2I = CIPI(IN) + STR = CSR*C2R + CSI*C2I + CSI = -CSR*C2I + CSI*C2R + CSR = STR + ASC = BRY(1) + IUF = 0 + KK = N + KDFLG = 1 + IB = IB - 1 + IC = IB - 1 + DO 290 K=1,N + FN = FNU + DBLE(FLOAT(KK-1)) +C----------------------------------------------------------------------- +C LOGIC TO SORT OUT CASES WHOSE PARAMETERS WERE SET FOR THE K +C FUNCTION ABOVE +C----------------------------------------------------------------------- + IF (N.GT.2) GO TO 175 + 172 CONTINUE + PHIDR = PHIR(J) + PHIDI = PHII(J) + ARGDR = ARGR(J) + ARGDI = ARGI(J) + ZET1DR = ZETA1R(J) + ZET1DI = ZETA1I(J) + ZET2DR = ZETA2R(J) + ZET2DI = ZETA2I(J) + ASUMDR = ASUMR(J) + ASUMDI = ASUMI(J) + BSUMDR = BSUMR(J) + BSUMDI = BSUMI(J) + J = 3 - J + GO TO 210 + 175 CONTINUE + IF ((KK.EQ.N).AND.(IB.LT.N)) GO TO 210 + IF ((KK.EQ.IB).OR.(KK.EQ.IC)) GO TO 172 + CALL ZUNHJ(ZNR, ZNI, FN, 0, TOL, PHIDR, PHIDI, ARGDR, + * ARGDI, ZET1DR, ZET1DI, ZET2DR, ZET2DI, ASUMDR, + * ASUMDI, BSUMDR, BSUMDI) + 210 CONTINUE + IF (KODE.EQ.1) GO TO 220 + STR = ZBR + ZET2DR + STI = ZBI + ZET2DI + RAST = FN/ZABS(CMPLX(STR,STI,kind=KIND(1.0D0))) + STR = STR*RAST*RAST + STI = -STI*RAST*RAST + S1R = -ZET1DR + STR + S1I = -ZET1DI + STI + GO TO 230 + 220 CONTINUE + S1R = -ZET1DR + ZET2DR + S1I = -ZET1DI + ZET2DI + 230 CONTINUE +C----------------------------------------------------------------------- +C TEST FOR UNDERFLOW AND OVERFLOW +C----------------------------------------------------------------------- + RS1 = S1R + IF (DABS(RS1).GT.ELIM) GO TO 280 + IF (KDFLG.EQ.1) IFLAG = 2 + IF (DABS(RS1).LT.ALIM) GO TO 240 +C----------------------------------------------------------------------- +C REFINE TEST AND SCALE +C----------------------------------------------------------------------- + APHI = ZABS(CMPLX(PHIDR,PHIDI,kind=KIND(1.0D0))) + AARG = ZABS(CMPLX(ARGDR,ARGDI,kind=KIND(1.0D0))) + RS1 = RS1 + DLOG(APHI) - 0.25D0*DLOG(AARG) - AIC + IF (DABS(RS1).GT.ELIM) GO TO 280 + IF (KDFLG.EQ.1) IFLAG = 1 + IF (RS1.LT.0.0D0) GO TO 240 + IF (KDFLG.EQ.1) IFLAG = 3 + 240 CONTINUE + CALL ZAIRY(ARGDR, ARGDI, 0, 2, AIR, AII, NAI, IDUM) + CALL ZAIRY(ARGDR, ARGDI, 1, 2, DAIR, DAII, NDAI, IDUM) + STR = DAIR*BSUMDR - DAII*BSUMDI + STI = DAIR*BSUMDI + DAII*BSUMDR + STR = STR + (AIR*ASUMDR-AII*ASUMDI) + STI = STI + (AIR*ASUMDI+AII*ASUMDR) + PTR = STR*PHIDR - STI*PHIDI + PTI = STR*PHIDI + STI*PHIDR + S2R = PTR*CSR - PTI*CSI + S2I = PTR*CSI + PTI*CSR + STR = DEXP(S1R)*CSSR(IFLAG) + S1R = STR*DCOS(S1I) + S1I = STR*DSIN(S1I) + STR = S2R*S1R - S2I*S1I + S2I = S2R*S1I + S2I*S1R + S2R = STR + IF (IFLAG.NE.1) GO TO 250 + CALL ZUCHK(S2R, S2I, NW, BRY(1), TOL) + IF (NW.EQ.0) GO TO 250 + S2R = ZEROR + S2I = ZEROI + 250 CONTINUE + IF (YY.LE.0.0D0) S2I = -S2I + CYR(KDFLG) = S2R + CYI(KDFLG) = S2I + C2R = S2R + C2I = S2I + S2R = S2R*CSRR(IFLAG) + S2I = S2I*CSRR(IFLAG) +C----------------------------------------------------------------------- +C ADD I AND K FUNCTIONS, K SEQUENCE IN Y(I), I=1,N +C----------------------------------------------------------------------- + S1R = YR(KK) + S1I = YI(KK) + IF (KODE.EQ.1) GO TO 270 + CALL ZS1S2(ZRR, ZRI, S1R, S1I, S2R, S2I, NW, ASC, ALIM, IUF) + NZ = NZ + NW + 270 CONTINUE + YR(KK) = S1R*CSPNR - S1I*CSPNI + S2R + YI(KK) = S1R*CSPNI + S1I*CSPNR + S2I + KK = KK - 1 + CSPNR = -CSPNR + CSPNI = -CSPNI + STR = CSI + CSI = -CSR + CSR = STR + IF (C2R.NE.0.0D0 .OR. C2I.NE.0.0D0) GO TO 255 + KDFLG = 1 + GO TO 290 + 255 CONTINUE + IF (KDFLG.EQ.2) GO TO 295 + KDFLG = 2 + GO TO 290 + 280 CONTINUE + IF (RS1.GT.0.0D0) GO TO 320 + S2R = ZEROR + S2I = ZEROI + GO TO 250 + 290 CONTINUE + K = N + 295 CONTINUE + IL = N - K + IF (IL.EQ.0) RETURN +C----------------------------------------------------------------------- +C RECUR BACKWARD FOR REMAINDER OF I SEQUENCE AND ADD IN THE +C K FUNCTIONS, SCALING THE I SEQUENCE DURING RECURRENCE TO KEEP +C INTERMEDIATE ARITHMETIC ON SCALE NEAR EXPONENT EXTREMES. +C----------------------------------------------------------------------- + S1R = CYR(1) + S1I = CYI(1) + S2R = CYR(2) + S2I = CYI(2) + CSR = CSRR(IFLAG) + ASCLE = BRY(IFLAG) + FN = DBLE(FLOAT(INU+IL)) + DO 310 I=1,IL + C2R = S2R + C2I = S2I + S2R = S1R + (FN+FNF)*(RZR*C2R-RZI*C2I) + S2I = S1I + (FN+FNF)*(RZR*C2I+RZI*C2R) + S1R = C2R + S1I = C2I + FN = FN - 1.0D0 + C2R = S2R*CSR + C2I = S2I*CSR + CKR = C2R + CKI = C2I + C1R = YR(KK) + C1I = YI(KK) + IF (KODE.EQ.1) GO TO 300 + CALL ZS1S2(ZRR, ZRI, C1R, C1I, C2R, C2I, NW, ASC, ALIM, IUF) + NZ = NZ + NW + 300 CONTINUE + YR(KK) = C1R*CSPNR - C1I*CSPNI + C2R + YI(KK) = C1R*CSPNI + C1I*CSPNR + C2I + KK = KK - 1 + CSPNR = -CSPNR + CSPNI = -CSPNI + IF (IFLAG.GE.3) GO TO 310 + C2R = DABS(CKR) + C2I = DABS(CKI) + C2M = DMAX1(C2R,C2I) + IF (C2M.LE.ASCLE) GO TO 310 + IFLAG = IFLAG + 1 + ASCLE = BRY(IFLAG) + S1R = S1R*CSR + S1I = S1I*CSR + S2R = CKR + S2I = CKI + S1R = S1R*CSSR(IFLAG) + S1I = S1I*CSSR(IFLAG) + S2R = S2R*CSSR(IFLAG) + S2I = S2I*CSSR(IFLAG) + CSR = CSRR(IFLAG) + 310 CONTINUE + RETURN + 320 CONTINUE + NZ = -1 + RETURN + END diff --git a/vendor/gonum.org/v1/gonum/mathext/internal/amos/amoslib/zuoik.f b/vendor/gonum.org/v1/gonum/mathext/internal/amos/amoslib/zuoik.f new file mode 100644 index 0000000..3715d6d --- /dev/null +++ b/vendor/gonum.org/v1/gonum/mathext/internal/amos/amoslib/zuoik.f @@ -0,0 +1,194 @@ + SUBROUTINE ZUOIK(ZR, ZI, FNU, KODE, IKFLG, N, YR, YI, NUF, TOL, + * ELIM, ALIM) +C***BEGIN PROLOGUE ZUOIK +C***REFER TO ZBESI,ZBESK,ZBESH +C +C ZUOIK COMPUTES THE LEADING TERMS OF THE UNIFORM ASYMPTOTIC +C EXPANSIONS FOR THE I AND K FUNCTIONS AND COMPARES THEM +C (IN LOGARITHMIC FORM) TO ALIM AND ELIM FOR OVER AND UNDERFLOW +C WHERE ALIM.LT.ELIM. IF THE MAGNITUDE, BASED ON THE LEADING +C EXPONENTIAL, IS LESS THAN ALIM OR GREATER THAN -ALIM, THEN +C THE RESULT IS ON SCALE. IF NOT, THEN A REFINED TEST USING OTHER +C MULTIPLIERS (IN LOGARITHMIC FORM) IS MADE BASED ON ELIM. HERE +C EXP(-ELIM)=SMALLEST MACHINE NUMBER*1.0E+3 AND EXP(-ALIM)= +C EXP(-ELIM)/TOL +C +C IKFLG=1 MEANS THE I SEQUENCE IS TESTED +C =2 MEANS THE K SEQUENCE IS TESTED +C NUF = 0 MEANS THE LAST MEMBER OF THE SEQUENCE IS ON SCALE +C =-1 MEANS AN OVERFLOW WOULD OCCUR +C IKFLG=1 AND NUF.GT.0 MEANS THE LAST NUF Y VALUES WERE SET TO ZERO +C THE FIRST N-NUF VALUES MUST BE SET BY ANOTHER ROUTINE +C IKFLG=2 AND NUF.EQ.N MEANS ALL Y VALUES WERE SET TO ZERO +C IKFLG=2 AND 0.LT.NUF.LT.N NOT CONSIDERED. Y MUST BE SET BY +C ANOTHER ROUTINE +C +C***ROUTINES CALLED ZUCHK,ZUNHJ,ZUNIK,D1MACH,ZABS,ZLOG +C***END PROLOGUE ZUOIK +C COMPLEX ARG,ASUM,BSUM,CWRK,CZ,CZERO,PHI,SUM,Y,Z,ZB,ZETA1,ZETA2,ZN, +C *ZR + DOUBLE PRECISION AARG, AIC, ALIM, APHI, ARGI, ARGR, ASUMI, ASUMR, + * ASCLE, AX, AY, BSUMI, BSUMR, CWRKI, CWRKR, CZI, CZR, ELIM, FNN, + * FNU, GNN, GNU, PHII, PHIR, RCZ, STR, STI, SUMI, SUMR, TOL, YI, + * YR, ZBI, ZBR, ZEROI, ZEROR, ZETA1I, ZETA1R, ZETA2I, ZETA2R, ZI, + * ZNI, ZNR, ZR, ZRI, ZRR, D1MACH, ZABS + INTEGER I, IDUM, IFORM, IKFLG, INIT, KODE, N, NN, NUF, NW + DIMENSION YR(N), YI(N), CWRKR(16), CWRKI(16) + DATA ZEROR,ZEROI / 0.0D0, 0.0D0 / + DATA AIC / 1.265512123484645396D+00 / + NUF = 0 + NN = N + ZRR = ZR + ZRI = ZI + IF (ZR.GE.0.0D0) GO TO 10 + ZRR = -ZR + ZRI = -ZI + 10 CONTINUE + ZBR = ZRR + ZBI = ZRI + AX = DABS(ZR)*1.7321D0 + AY = DABS(ZI) + IFORM = 1 + IF (AY.GT.AX) IFORM = 2 + GNU = DMAX1(FNU,1.0D0) + IF (IKFLG.EQ.1) GO TO 20 + FNN = DBLE(FLOAT(NN)) + GNN = FNU + FNN - 1.0D0 + GNU = DMAX1(GNN,FNN) + 20 CONTINUE +C----------------------------------------------------------------------- +C ONLY THE MAGNITUDE OF ARG AND PHI ARE NEEDED ALONG WITH THE +C REAL PARTS OF ZETA1, ZETA2 AND ZB. NO ATTEMPT IS MADE TO GET +C THE SIGN OF THE IMAGINARY PART CORRECT. +C----------------------------------------------------------------------- + IF (IFORM.EQ.2) GO TO 30 + INIT = 0 + CALL ZUNIK(ZRR, ZRI, GNU, IKFLG, 1, TOL, INIT, PHIR, PHII, + * ZETA1R, ZETA1I, ZETA2R, ZETA2I, SUMR, SUMI, CWRKR, CWRKI) + CZR = -ZETA1R + ZETA2R + CZI = -ZETA1I + ZETA2I + GO TO 50 + 30 CONTINUE + ZNR = ZRI + ZNI = -ZRR + IF (ZI.GT.0.0D0) GO TO 40 + ZNR = -ZNR + 40 CONTINUE + CALL ZUNHJ(ZNR, ZNI, GNU, 1, TOL, PHIR, PHII, ARGR, ARGI, ZETA1R, + * ZETA1I, ZETA2R, ZETA2I, ASUMR, ASUMI, BSUMR, BSUMI) + CZR = -ZETA1R + ZETA2R + CZI = -ZETA1I + ZETA2I + AARG = ZABS(CMPLX(ARGR,ARGI,kind=KIND(1.0D0))) + 50 CONTINUE + IF (KODE.EQ.1) GO TO 60 + CZR = CZR - ZBR + CZI = CZI - ZBI + 60 CONTINUE + IF (IKFLG.EQ.1) GO TO 70 + CZR = -CZR + CZI = -CZI + 70 CONTINUE + APHI = ZABS(CMPLX(PHIR,PHII,kind=KIND(1.0D0))) + RCZ = CZR +C----------------------------------------------------------------------- +C OVERFLOW TEST +C----------------------------------------------------------------------- + IF (RCZ.GT.ELIM) GO TO 210 + IF (RCZ.LT.ALIM) GO TO 80 + RCZ = RCZ + DLOG(APHI) + IF (IFORM.EQ.2) RCZ = RCZ - 0.25D0*DLOG(AARG) - AIC + IF (RCZ.GT.ELIM) GO TO 210 + GO TO 130 + 80 CONTINUE +C----------------------------------------------------------------------- +C UNDERFLOW TEST +C----------------------------------------------------------------------- + IF (RCZ.LT.(-ELIM)) GO TO 90 + IF (RCZ.GT.(-ALIM)) GO TO 130 + RCZ = RCZ + DLOG(APHI) + IF (IFORM.EQ.2) RCZ = RCZ - 0.25D0*DLOG(AARG) - AIC + IF (RCZ.GT.(-ELIM)) GO TO 110 + 90 CONTINUE + DO 100 I=1,NN + YR(I) = ZEROR + YI(I) = ZEROI + 100 CONTINUE + NUF = NN + RETURN + 110 CONTINUE + ASCLE = 1.0D+3*D1MACH(1)/TOL + CALL ZLOG(PHIR, PHII, STR, STI, IDUM) + CZR = CZR + STR + CZI = CZI + STI + IF (IFORM.EQ.1) GO TO 120 + CALL ZLOG(ARGR, ARGI, STR, STI, IDUM) + CZR = CZR - 0.25D0*STR - AIC + CZI = CZI - 0.25D0*STI + 120 CONTINUE + AX = DEXP(RCZ)/TOL + AY = CZI + CZR = AX*DCOS(AY) + CZI = AX*DSIN(AY) + CALL ZUCHK(CZR, CZI, NW, ASCLE, TOL) + IF (NW.NE.0) GO TO 90 + 130 CONTINUE + IF (IKFLG.EQ.2) RETURN + IF (N.EQ.1) RETURN +C----------------------------------------------------------------------- +C SET UNDERFLOWS ON I SEQUENCE +C----------------------------------------------------------------------- + 140 CONTINUE + GNU = FNU + DBLE(FLOAT(NN-1)) + IF (IFORM.EQ.2) GO TO 150 + INIT = 0 + CALL ZUNIK(ZRR, ZRI, GNU, IKFLG, 1, TOL, INIT, PHIR, PHII, + * ZETA1R, ZETA1I, ZETA2R, ZETA2I, SUMR, SUMI, CWRKR, CWRKI) + CZR = -ZETA1R + ZETA2R + CZI = -ZETA1I + ZETA2I + GO TO 160 + 150 CONTINUE + CALL ZUNHJ(ZNR, ZNI, GNU, 1, TOL, PHIR, PHII, ARGR, ARGI, ZETA1R, + * ZETA1I, ZETA2R, ZETA2I, ASUMR, ASUMI, BSUMR, BSUMI) + CZR = -ZETA1R + ZETA2R + CZI = -ZETA1I + ZETA2I + AARG = ZABS(CMPLX(ARGR,ARGI,kind=KIND(1.0D0))) + 160 CONTINUE + IF (KODE.EQ.1) GO TO 170 + CZR = CZR - ZBR + CZI = CZI - ZBI + 170 CONTINUE + APHI = ZABS(CMPLX(PHIR,PHII,kind=KIND(1.0D0))) + RCZ = CZR + IF (RCZ.LT.(-ELIM)) GO TO 180 + IF (RCZ.GT.(-ALIM)) RETURN + RCZ = RCZ + DLOG(APHI) + IF (IFORM.EQ.2) RCZ = RCZ - 0.25D0*DLOG(AARG) - AIC + IF (RCZ.GT.(-ELIM)) GO TO 190 + 180 CONTINUE + YR(NN) = ZEROR + YI(NN) = ZEROI + NN = NN - 1 + NUF = NUF + 1 + IF (NN.EQ.0) RETURN + GO TO 140 + 190 CONTINUE + ASCLE = 1.0D+3*D1MACH(1)/TOL + CALL ZLOG(PHIR, PHII, STR, STI, IDUM) + CZR = CZR + STR + CZI = CZI + STI + IF (IFORM.EQ.1) GO TO 200 + CALL ZLOG(ARGR, ARGI, STR, STI, IDUM) + CZR = CZR - 0.25D0*STR - AIC + CZI = CZI - 0.25D0*STI + 200 CONTINUE + AX = DEXP(RCZ)/TOL + AY = CZI + CZR = AX*DCOS(AY) + CZI = AX*DSIN(AY) + CALL ZUCHK(CZR, CZI, NW, ASCLE, TOL) + IF (NW.NE.0) GO TO 180 + RETURN + 210 CONTINUE + NUF = -1 + RETURN + END diff --git a/vendor/gonum.org/v1/gonum/mathext/internal/amos/amoslib/zwrsk.f b/vendor/gonum.org/v1/gonum/mathext/internal/amos/amoslib/zwrsk.f new file mode 100644 index 0000000..37cb8d8 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/mathext/internal/amos/amoslib/zwrsk.f @@ -0,0 +1,94 @@ + SUBROUTINE ZWRSK(ZRR, ZRI, FNU, KODE, N, YR, YI, NZ, CWR, CWI, + * TOL, ELIM, ALIM) +C***BEGIN PROLOGUE ZWRSK +C***REFER TO ZBESI,ZBESK +C +C ZWRSK COMPUTES THE I BESSEL FUNCTION FOR RE(Z).GE.0.0 BY +C NORMALIZING THE I FUNCTION RATIOS FROM ZRATI BY THE WRONSKIAN +C +C***ROUTINES CALLED D1MACH,ZBKNU,ZRATI,ZABS +C***END PROLOGUE ZWRSK +C COMPLEX CINU,CSCL,CT,CW,C1,C2,RCT,ST,Y,ZR + DOUBLE PRECISION ACT, ACW, ALIM, ASCLE, CINUI, CINUR, CSCLR, CTI, + * CTR, CWI, CWR, C1I, C1R, C2I, C2R, ELIM, FNU, PTI, PTR, RACT, + * STI, STR, TOL, YI, YR, ZRI, ZRR, ZABS, D1MACH + INTEGER I, KODE, N, NW, NZ + DIMENSION YR(N), YI(N), CWR(2), CWI(2) +C----------------------------------------------------------------------- +C I(FNU+I-1,Z) BY BACKWARD RECURRENCE FOR RATIOS +C Y(I)=I(FNU+I,Z)/I(FNU+I-1,Z) FROM CRATI NORMALIZED BY THE +C WRONSKIAN WITH K(FNU,Z) AND K(FNU+1,Z) FROM CBKNU. +C----------------------------------------------------------------------- + NZ = 0 + CALL ZBKNU(ZRR, ZRI, FNU, KODE, 2, CWR, CWI, NW, TOL, ELIM, ALIM) + IF (NW.NE.0) GO TO 50 + CALL ZRATI(ZRR, ZRI, FNU, N, YR, YI, TOL) +C----------------------------------------------------------------------- +C RECUR FORWARD ON I(FNU+1,Z) = R(FNU,Z)*I(FNU,Z), +C R(FNU+J-1,Z)=Y(J), J=1,...,N +C----------------------------------------------------------------------- + CINUR = 1.0D0 + CINUI = 0.0D0 + IF (KODE.EQ.1) GO TO 10 + CINUR = DCOS(ZRI) + CINUI = DSIN(ZRI) + 10 CONTINUE +C----------------------------------------------------------------------- +C ON LOW EXPONENT MACHINES THE K FUNCTIONS CAN BE CLOSE TO BOTH +C THE UNDER AND OVERFLOW LIMITS AND THE NORMALIZATION MUST BE +C SCALED TO PREVENT OVER OR UNDERFLOW. CUOIK HAS DETERMINED THAT +C THE RESULT IS ON SCALE. +C----------------------------------------------------------------------- + ACW = ZABS(CMPLX(CWR(2),CWI(2),kind=KIND(1.0D0))) + ASCLE = 1.0D+3*D1MACH(1)/TOL + CSCLR = 1.0D0 + IF (ACW.GT.ASCLE) GO TO 20 + CSCLR = 1.0D0/TOL + GO TO 30 + 20 CONTINUE + ASCLE = 1.0D0/ASCLE + IF (ACW.LT.ASCLE) GO TO 30 + CSCLR = TOL + 30 CONTINUE + C1R = CWR(1)*CSCLR + C1I = CWI(1)*CSCLR + C2R = CWR(2)*CSCLR + C2I = CWI(2)*CSCLR + STR = YR(1) + STI = YI(1) +C----------------------------------------------------------------------- +C CINU=CINU*(CONJG(CT)/CABS(CT))*(1.0D0/CABS(CT) PREVENTS +C UNDER- OR OVERFLOW PREMATURELY BY SQUARING CABS(CT) +C----------------------------------------------------------------------- + PTR = STR*C1R - STI*C1I + PTI = STR*C1I + STI*C1R + PTR = PTR + C2R + PTI = PTI + C2I + CTR = ZRR*PTR - ZRI*PTI + CTI = ZRR*PTI + ZRI*PTR + ACT = ZABS(CMPLX(CTR,CTI,kind=KIND(1.0D0))) + RACT = 1.0D0/ACT + CTR = CTR*RACT + CTI = -CTI*RACT + PTR = CINUR*RACT + PTI = CINUI*RACT + CINUR = PTR*CTR - PTI*CTI + CINUI = PTR*CTI + PTI*CTR + YR(1) = CINUR*CSCLR + YI(1) = CINUI*CSCLR + IF (N.EQ.1) RETURN + DO 40 I=2,N + PTR = STR*CINUR - STI*CINUI + CINUI = STR*CINUI + STI*CINUR + CINUR = PTR + STR = YR(I) + STI = YI(I) + YR(I) = CINUR*CSCLR + YI(I) = CINUI*CSCLR + 40 CONTINUE + RETURN + 50 CONTINUE + NZ = -1 + IF(NW.EQ.(-2)) NZ=-2 + RETURN + END diff --git a/vendor/gonum.org/v1/gonum/mathext/internal/amos/doc.go b/vendor/gonum.org/v1/gonum/mathext/internal/amos/doc.go new file mode 100644 index 0000000..32f96c1 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/mathext/internal/amos/doc.go @@ -0,0 +1,6 @@ +// Copyright ©2017 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Package amos implements functions originally in the Netlib code by Donald Amos. +package amos // import "gonum.org/v1/gonum/mathext/internal/amos" diff --git a/vendor/gonum.org/v1/gonum/mathext/internal/cephes/cephes.go b/vendor/gonum.org/v1/gonum/mathext/internal/cephes/cephes.go new file mode 100644 index 0000000..d01552a --- /dev/null +++ b/vendor/gonum.org/v1/gonum/mathext/internal/cephes/cephes.go @@ -0,0 +1,28 @@ +// Copyright ©2016 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package cephes + +import "math" + +/* +Additional copyright information: + +Code in this package is adapted from the Cephes library (http://www.netlib.org/cephes/). +There is no explicit licence on Netlib, but the author has agreed to a BSD release. +See https://github.com/deepmind/torch-cephes/blob/master/LICENSE.txt and +https://lists.debian.org/debian-legal/2004/12/msg00295.html +*/ + +var ( + badParamOutOfBounds = "cephes: parameter out of bounds" + badParamFunctionSingularity = "cephes: function singularity" +) + +const ( + machEp = 1.0 / (1 << 53) + maxLog = 1024 * math.Ln2 + minLog = -1075 * math.Ln2 + maxIter = 2000 +) diff --git a/vendor/gonum.org/v1/gonum/mathext/internal/cephes/doc.go b/vendor/gonum.org/v1/gonum/mathext/internal/cephes/doc.go new file mode 100644 index 0000000..086c469 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/mathext/internal/cephes/doc.go @@ -0,0 +1,6 @@ +// Copyright ©2017 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Package cephes implements functions originally in the Netlib code by Stephen Mosher. +package cephes // import "gonum.org/v1/gonum/mathext/internal/cephes" diff --git a/vendor/gonum.org/v1/gonum/mathext/internal/cephes/igam.go b/vendor/gonum.org/v1/gonum/mathext/internal/cephes/igam.go new file mode 100644 index 0000000..9bd0445 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/mathext/internal/cephes/igam.go @@ -0,0 +1,311 @@ +// Derived from SciPy's special/cephes/igam.c and special/cephes/igam.h +// https://github.com/scipy/scipy/blob/master/scipy/special/cephes/igam.c +// https://github.com/scipy/scipy/blob/master/scipy/special/cephes/igam.h +// Made freely available by Stephen L. Moshier without support or guarantee. + +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. +// Copyright ©1985, ©1987 by Stephen L. Moshier +// Portions Copyright ©2016 The Gonum Authors. All rights reserved. + +package cephes + +import "math" + +const ( + igamDimK = 25 + igamDimN = 25 + igam = 1 + igamC = 0 + igamSmall = 20 + igamLarge = 200 + igamSmallRatio = 0.3 + igamLargeRatio = 4.5 +) + +var igamCoefs = [igamDimK][igamDimN]float64{ + {-3.3333333333333333e-1, 8.3333333333333333e-2, -1.4814814814814815e-2, 1.1574074074074074e-3, 3.527336860670194e-4, -1.7875514403292181e-4, 3.9192631785224378e-5, -2.1854485106799922e-6, -1.85406221071516e-6, 8.296711340953086e-7, -1.7665952736826079e-7, 6.7078535434014986e-9, 1.0261809784240308e-8, -4.3820360184533532e-9, 9.1476995822367902e-10, -2.551419399494625e-11, -5.8307721325504251e-11, 2.4361948020667416e-11, -5.0276692801141756e-12, 1.1004392031956135e-13, 3.3717632624009854e-13, -1.3923887224181621e-13, 2.8534893807047443e-14, -5.1391118342425726e-16, -1.9752288294349443e-15}, + {-1.8518518518518519e-3, -3.4722222222222222e-3, 2.6455026455026455e-3, -9.9022633744855967e-4, 2.0576131687242798e-4, -4.0187757201646091e-7, -1.8098550334489978e-5, 7.6491609160811101e-6, -1.6120900894563446e-6, 4.6471278028074343e-9, 1.378633446915721e-7, -5.752545603517705e-8, 1.1951628599778147e-8, -1.7543241719747648e-11, -1.0091543710600413e-9, 4.1627929918425826e-10, -8.5639070264929806e-11, 6.0672151016047586e-14, 7.1624989648114854e-12, -2.9331866437714371e-12, 5.9966963656836887e-13, -2.1671786527323314e-16, -4.9783399723692616e-14, 2.0291628823713425e-14, -4.13125571381061e-15}, + {4.1335978835978836e-3, -2.6813271604938272e-3, 7.7160493827160494e-4, 2.0093878600823045e-6, -1.0736653226365161e-4, 5.2923448829120125e-5, -1.2760635188618728e-5, 3.4235787340961381e-8, 1.3721957309062933e-6, -6.298992138380055e-7, 1.4280614206064242e-7, -2.0477098421990866e-10, -1.4092529910867521e-8, 6.228974084922022e-9, -1.3670488396617113e-9, 9.4283561590146782e-13, 1.2872252400089318e-10, -5.5645956134363321e-11, 1.1975935546366981e-11, -4.1689782251838635e-15, -1.0940640427884594e-12, 4.6622399463901357e-13, -9.905105763906906e-14, 1.8931876768373515e-17, 8.8592218725911273e-15}, + {6.4943415637860082e-4, 2.2947209362139918e-4, -4.6918949439525571e-4, 2.6772063206283885e-4, -7.5618016718839764e-5, -2.3965051138672967e-7, 1.1082654115347302e-5, -5.6749528269915966e-6, 1.4230900732435884e-6, -2.7861080291528142e-11, -1.6958404091930277e-7, 8.0994649053880824e-8, -1.9111168485973654e-8, 2.3928620439808118e-12, 2.0620131815488798e-9, -9.4604966618551322e-10, 2.1541049775774908e-10, -1.388823336813903e-14, -2.1894761681963939e-11, 9.7909989511716851e-12, -2.1782191880180962e-12, 6.2088195734079014e-17, 2.126978363279737e-13, -9.3446887915174333e-14, 2.0453671226782849e-14}, + {-8.618882909167117e-4, 7.8403922172006663e-4, -2.9907248030319018e-4, -1.4638452578843418e-6, 6.6414982154651222e-5, -3.9683650471794347e-5, 1.1375726970678419e-5, 2.5074972262375328e-10, -1.6954149536558306e-6, 8.9075075322053097e-7, -2.2929348340008049e-7, 2.956794137544049e-11, 2.8865829742708784e-8, -1.4189739437803219e-8, 3.4463580499464897e-9, -2.3024517174528067e-13, -3.9409233028046405e-10, 1.8602338968504502e-10, -4.356323005056618e-11, 1.2786001016296231e-15, 4.6792750266579195e-12, -2.1492464706134829e-12, 4.9088156148096522e-13, -6.3385914848915603e-18, -5.0453320690800944e-14}, + {-3.3679855336635815e-4, -6.9728137583658578e-5, 2.7727532449593921e-4, -1.9932570516188848e-4, 6.7977804779372078e-5, 1.419062920643967e-7, -1.3594048189768693e-5, 8.0184702563342015e-6, -2.2914811765080952e-6, -3.252473551298454e-10, 3.4652846491085265e-7, -1.8447187191171343e-7, 4.8240967037894181e-8, -1.7989466721743515e-14, -6.3061945000135234e-9, 3.1624176287745679e-9, -7.8409242536974293e-10, 5.1926791652540407e-15, 9.3589442423067836e-11, -4.5134262161632782e-11, 1.0799129993116827e-11, -3.661886712685252e-17, -1.210902069055155e-12, 5.6807435849905643e-13, -1.3249659916340829e-13}, + {5.3130793646399222e-4, -5.9216643735369388e-4, 2.7087820967180448e-4, 7.9023532326603279e-7, -8.1539693675619688e-5, 5.6116827531062497e-5, -1.8329116582843376e-5, -3.0796134506033048e-9, 3.4651553688036091e-6, -2.0291327396058604e-6, 5.7887928631490037e-7, 2.338630673826657e-13, -8.8286007463304835e-8, 4.7435958880408128e-8, -1.2545415020710382e-8, 8.6496488580102925e-14, 1.6846058979264063e-9, -8.5754928235775947e-10, 2.1598224929232125e-10, -7.6132305204761539e-16, -2.6639822008536144e-11, 1.3065700536611057e-11, -3.1799163902367977e-12, 4.7109761213674315e-18, 3.6902800842763467e-13}, + {3.4436760689237767e-4, 5.1717909082605922e-5, -3.3493161081142236e-4, 2.812695154763237e-4, -1.0976582244684731e-4, -1.2741009095484485e-7, 2.7744451511563644e-5, -1.8263488805711333e-5, 5.7876949497350524e-6, 4.9387589339362704e-10, -1.0595367014026043e-6, 6.1667143761104075e-7, -1.7562973359060462e-7, -1.2974473287015439e-12, 2.695423606288966e-8, -1.4578352908731271e-8, 3.887645959386175e-9, -3.8810022510194121e-17, -5.3279941738772867e-10, 2.7437977643314845e-10, -6.9957960920705679e-11, 2.5899863874868481e-17, 8.8566890996696381e-12, -4.403168815871311e-12, 1.0865561947091654e-12}, + {-6.5262391859530942e-4, 8.3949872067208728e-4, -4.3829709854172101e-4, -6.969091458420552e-7, 1.6644846642067548e-4, -1.2783517679769219e-4, 4.6299532636913043e-5, 4.5579098679227077e-9, -1.0595271125805195e-5, 6.7833429048651666e-6, -2.1075476666258804e-6, -1.7213731432817145e-11, 3.7735877416110979e-7, -2.1867506700122867e-7, 6.2202288040189269e-8, 6.5977038267330006e-16, -9.5903864974256858e-9, 5.2132144922808078e-9, -1.3991589583935709e-9, 5.382058999060575e-16, 1.9484714275467745e-10, -1.0127287556389682e-10, 2.6077347197254926e-11, -5.0904186999932993e-18, -3.3721464474854592e-12}, + {-5.9676129019274625e-4, -7.2048954160200106e-5, 6.7823088376673284e-4, -6.4014752602627585e-4, 2.7750107634328704e-4, 1.8197008380465151e-7, -8.4795071170685032e-5, 6.105192082501531e-5, -2.1073920183404862e-5, -8.8585890141255994e-10, 4.5284535953805377e-6, -2.8427815022504408e-6, 8.7082341778646412e-7, 3.6886101871706965e-12, -1.5344695190702061e-7, 8.862466778790695e-8, -2.5184812301826817e-8, -1.0225912098215092e-14, 3.8969470758154777e-9, -2.1267304792235635e-9, 5.7370135528051385e-10, -1.887749850169741e-19, -8.0931538694657866e-11, 4.2382723283449199e-11, -1.1002224534207726e-11}, + {1.3324454494800656e-3, -1.9144384985654775e-3, 1.1089369134596637e-3, 9.932404122642299e-7, -5.0874501293093199e-4, 4.2735056665392884e-4, -1.6858853767910799e-4, -8.1301893922784998e-9, 4.5284402370562147e-5, -3.127053674781734e-5, 1.044986828530338e-5, 4.8435226265680926e-11, -2.1482565873456258e-6, 1.329369701097492e-6, -4.0295693092101029e-7, -1.7567877666323291e-13, 7.0145043163668257e-8, -4.040787734999483e-8, 1.1474026743371963e-8, 3.9642746853563325e-18, -1.7804938269892714e-9, 9.7480262548731646e-10, -2.6405338676507616e-10, 5.794875163403742e-18, 3.7647749553543836e-11}, + {1.579727660730835e-3, 1.6251626278391582e-4, -2.0633421035543276e-3, 2.1389686185689098e-3, -1.0108559391263003e-3, -3.9912705529919201e-7, 3.6235025084764691e-4, -2.8143901463712154e-4, 1.0449513336495887e-4, 2.1211418491830297e-9, -2.5779417251947842e-5, 1.7281818956040463e-5, -5.6413773872904282e-6, -1.1024320105776174e-11, 1.1223224418895175e-6, -6.8693396379526735e-7, 2.0653236975414887e-7, 4.6714772409838506e-14, -3.5609886164949055e-8, 2.0470855345905963e-8, -5.8091738633283358e-9, -1.332821287582869e-16, 9.0354604391335133e-10, -4.9598782517330834e-10, 1.3481607129399749e-10}, + {-4.0725121195140166e-3, 6.4033628338080698e-3, -4.0410161081676618e-3, -2.183732802866233e-6, 2.1740441801254639e-3, -1.9700440518418892e-3, 8.3595469747962458e-4, 1.9445447567109655e-8, -2.5779387120421696e-4, 1.9009987368139304e-4, -6.7696499937438965e-5, -1.4440629666426572e-10, 1.5712512518742269e-5, -1.0304008744776893e-5, 3.304517767401387e-6, 7.9829760242325709e-13, -6.4097794149313004e-7, 3.8894624761300056e-7, -1.1618347644948869e-7, -2.816808630596451e-15, 1.9878012911297093e-8, -1.1407719956357511e-8, 3.2355857064185555e-9, 4.1759468293455945e-20, -5.0423112718105824e-10}, + {-5.9475779383993003e-3, -5.4016476789260452e-4, 8.7910413550767898e-3, -9.8576315587856125e-3, 5.0134695031021538e-3, 1.2807521786221875e-6, -2.0626019342754683e-3, 1.7109128573523058e-3, -6.7695312714133799e-4, -6.9011545676562133e-9, 1.8855128143995902e-4, -1.3395215663491969e-4, 4.6263183033528039e-5, 4.0034230613321351e-11, -1.0255652921494033e-5, 6.612086372797651e-6, -2.0913022027253008e-6, -2.0951775649603837e-13, 3.9756029041993247e-7, -2.3956211978815887e-7, 7.1182883382145864e-8, 8.925574873053455e-16, -1.2101547235064676e-8, 6.9350618248334386e-9, -1.9661464453856102e-9}, + {1.7402027787522711e-2, -2.9527880945699121e-2, 2.0045875571402799e-2, 7.0289515966903407e-6, -1.2375421071343148e-2, 1.1976293444235254e-2, -5.4156038466518525e-3, -6.3290893396418616e-8, 1.8855118129005065e-3, -1.473473274825001e-3, 5.5515810097708387e-4, 5.2406834412550662e-10, -1.4357913535784836e-4, 9.9181293224943297e-5, -3.3460834749478311e-5, -3.5755837291098993e-12, 7.1560851960630076e-6, -4.5516802628155526e-6, 1.4236576649271475e-6, 1.8803149082089664e-14, -2.6623403898929211e-7, 1.5950642189595716e-7, -4.7187514673841102e-8, -6.5107872958755177e-17, 7.9795091026746235e-9}, + {3.0249124160905891e-2, 2.4817436002649977e-3, -4.9939134373457022e-2, 5.9915643009307869e-2, -3.2483207601623391e-2, -5.7212968652103441e-6, 1.5085251778569354e-2, -1.3261324005088445e-2, 5.5515262632426148e-3, 3.0263182257030016e-8, -1.7229548406756723e-3, 1.2893570099929637e-3, -4.6845138348319876e-4, -1.830259937893045e-10, 1.1449739014822654e-4, -7.7378565221244477e-5, 2.5625836246985201e-5, 1.0766165333192814e-12, -5.3246809282422621e-6, 3.349634863064464e-6, -1.0381253128684018e-6, -5.608909920621128e-15, 1.9150821930676591e-7, -1.1418365800203486e-7, 3.3654425209171788e-8}, + {-9.9051020880159045e-2, 1.7954011706123486e-1, -1.2989606383463778e-1, -3.1478872752284357e-5, 9.0510635276848131e-2, -9.2828824411184397e-2, 4.4412112839877808e-2, 2.7779236316835888e-7, -1.7229543805449697e-2, 1.4182925050891573e-2, -5.6214161633747336e-3, -2.39598509186381e-9, 1.6029634366079908e-3, -1.1606784674435773e-3, 4.1001337768153873e-4, 1.8365800754090661e-11, -9.5844256563655903e-5, 6.3643062337764708e-5, -2.076250624489065e-5, -1.1806020912804483e-13, 4.2131808239120649e-6, -2.6262241337012467e-6, 8.0770620494930662e-7, 6.0125912123632725e-16, -1.4729737374018841e-7}, + {-1.9994542198219728e-1, -1.5056113040026424e-2, 3.6470239469348489e-1, -4.6435192311733545e-1, 2.6640934719197893e-1, 3.4038266027147191e-5, -1.3784338709329624e-1, 1.276467178337056e-1, -5.6213828755200985e-2, -1.753150885483011e-7, 1.9235592956768113e-2, -1.5088821281095315e-2, 5.7401854451350123e-3, 1.0622382710310225e-9, -1.5335082692563998e-3, 1.0819320643228214e-3, -3.7372510193945659e-4, -6.6170909729031985e-12, 8.4263617380909628e-5, -5.5150706827483479e-5, 1.7769536448348069e-5, 3.8827923210205533e-14, -3.53513697488768e-6, 2.1865832130045269e-6, -6.6812849447625594e-7}, + {7.2438608504029431e-1, -1.3918010932653375, 1.0654143352413968, 1.876173868950258e-4, -8.2705501176152696e-1, 8.9352433347828414e-1, -4.4971003995291339e-1, -1.6107401567546652e-6, 1.9235590165271091e-1, -1.6597702160042609e-1, 6.8882222681814333e-2, 1.3910091724608687e-8, -2.146911561508663e-2, 1.6228980898865892e-2, -5.9796016172584256e-3, -1.1287469112826745e-10, 1.5167451119784857e-3, -1.0478634293553899e-3, 3.5539072889126421e-4, 8.1704322111801517e-13, -7.7773013442452395e-5, 5.0291413897007722e-5, -1.6035083867000518e-5, 1.2469354315487605e-14, 3.1369106244517615e-6}, + {1.6668949727276811, 1.165462765994632e-1, -3.3288393225018906, 4.4692325482864037, -2.6977693045875807, -2.600667859891061e-4, 1.5389017615694539, -1.4937962361134612, 6.8881964633233148e-1, 1.3077482004552385e-6, -2.5762963325596288e-1, 2.1097676102125449e-1, -8.3714408359219882e-2, -7.7920428881354753e-9, 2.4267923064833599e-2, -1.7813678334552311e-2, 6.3970330388900056e-3, 4.9430807090480523e-11, -1.5554602758465635e-3, 1.0561196919903214e-3, -3.5277184460472902e-4, 9.3002334645022459e-14, 7.5285855026557172e-5, -4.8186515569156351e-5, 1.5227271505597605e-5}, + {-6.6188298861372935, 1.3397985455142589e+1, -1.0789350606845146e+1, -1.4352254537875018e-3, 9.2333694596189809, -1.0456552819547769e+1, 5.5105526029033471, 1.2024439690716742e-5, -2.5762961164755816, 2.3207442745387179, -1.0045728797216284, -1.0207833290021914e-7, 3.3975092171169466e-1, -2.6720517450757468e-1, 1.0235252851562706e-1, 8.4329730484871625e-10, -2.7998284958442595e-2, 2.0066274144976813e-2, -7.0554368915086242e-3, 1.9402238183698188e-12, 1.6562888105449611e-3, -1.1082898580743683e-3, 3.654545161310169e-4, -5.1290032026971794e-11, -7.6340103696869031e-5}, + {-1.7112706061976095e+1, -1.1208044642899116, 3.7131966511885444e+1, -5.2298271025348962e+1, 3.3058589696624618e+1, 2.4791298976200222e-3, -2.061089403411526e+1, 2.088672775145582e+1, -1.0045703956517752e+1, -1.2238783449063012e-5, 4.0770134274221141, -3.473667358470195, 1.4329352617312006, 7.1359914411879712e-8, -4.4797257159115612e-1, 3.4112666080644461e-1, -1.2699786326594923e-1, -2.8953677269081528e-10, 3.3125776278259863e-2, -2.3274087021036101e-2, 8.0399993503648882e-3, -1.177805216235265e-9, -1.8321624891071668e-3, 1.2108282933588665e-3, -3.9479941246822517e-4}, + {7.389033153567425e+1, -1.5680141270402273e+2, 1.322177542759164e+2, 1.3692876877324546e-2, -1.2366496885920151e+2, 1.4620689391062729e+2, -8.0365587724865346e+1, -1.1259851148881298e-4, 4.0770132196179938e+1, -3.8210340013273034e+1, 1.719522294277362e+1, 9.3519707955168356e-7, -6.2716159907747034, 5.1168999071852637, -2.0319658112299095, -4.9507215582761543e-9, 5.9626397294332597e-1, -4.4220765337238094e-1, 1.6079998700166273e-1, -2.4733786203223402e-8, -4.0307574759979762e-2, 2.7849050747097869e-2, -9.4751858992054221e-3, 6.419922235909132e-6, 2.1250180774699461e-3}, + {2.1216837098382522e+2, 1.3107863022633868e+1, -4.9698285932871748e+2, 7.3121595266969204e+2, -4.8213821720890847e+2, -2.8817248692894889e-2, 3.2616720302947102e+2, -3.4389340280087117e+2, 1.7195193870816232e+2, 1.4038077378096158e-4, -7.52594195897599e+1, 6.651969984520934e+1, -2.8447519748152462e+1, -7.613702615875391e-7, 9.5402237105304373, -7.5175301113311376, 2.8943997568871961, -4.6612194999538201e-7, -8.0615149598794088e-1, 5.8483006570631029e-1, -2.0845408972964956e-1, 1.4765818959305817e-4, 5.1000433863753019e-2, -3.3066252141883665e-2, 1.5109265210467774e-2}, + {-9.8959643098322368e+2, 2.1925555360905233e+3, -1.9283586782723356e+3, -1.5925738122215253e-1, 1.9569985945919857e+3, -2.4072514765081556e+3, 1.3756149959336496e+3, 1.2920735237496668e-3, -7.525941715948055e+2, 7.3171668742208716e+2, -3.4137023466220065e+2, -9.9857390260608043e-6, 1.3356313181291573e+2, -1.1276295161252794e+2, 4.6310396098204458e+1, -7.9237387133614756e-6, -1.4510726927018646e+1, 1.1111771248100563e+1, -4.1690817945270892, 3.1008219800117808e-3, 1.1220095449981468, -7.6052379926149916e-1, 3.6262236505085254e-1, 2.216867741940747e-1, 4.8683443692930507e-1}, +} + +// Igam computes the incomplete Gamma integral. +// Igam(a,x) = (1/ Γ(a)) \int_0^x e^{-t} t^{a-1} dt +// The input argument a must be positive and x must be non-negative or Igam +// will panic. +func Igam(a, x float64) float64 { + // The integral is evaluated by either a power series or continued fraction + // expansion, depending on the relative values of a and x. + // Sources: + // [1] "The Digital Library of Mathematical Functions", dlmf.nist.gov + // [2] Maddock et. al., "Incomplete Gamma Functions", + // http://www.boost.org/doc/libs/1_61_0/libs/math/doc/html/math_toolkit/sf_gamma/igamma.html + + // Check zero integration limit first + if x == 0 { + return 0 + } + + if x < 0 || a <= 0 { + panic(badParamOutOfBounds) + } + + // Asymptotic regime where a ~ x; see [2]. + absxmaA := math.Abs(x-a) / a + if (igamSmall < a && a < igamLarge && absxmaA < igamSmallRatio) || + (igamLarge < a && absxmaA < igamLargeRatio/math.Sqrt(a)) { + return asymptoticSeries(a, x, igam) + } + + if x > 1 && x > a { + return 1 - IgamC(a, x) + } + + return igamSeries(a, x) +} + +// IgamC computes the complemented incomplete Gamma integral. +// IgamC(a,x) = 1 - Igam(a,x) +// = (1/ Γ(a)) \int_0^\infty e^{-t} t^{a-1} dt +// The input argument a must be positive and x must be non-negative or +// IgamC will panic. +func IgamC(a, x float64) float64 { + // The integral is evaluated by either a power series or continued fraction + // expansion, depending on the relative values of a and x. + // Sources: + // [1] "The Digital Library of Mathematical Functions", dlmf.nist.gov + // [2] Maddock et. al., "Incomplete Gamma Functions", + // http://www.boost.org/doc/libs/1_61_0/libs/math/doc/html/math_toolkit/sf_gamma/igamma.html + + switch { + case x < 0, a <= 0: + panic(badParamOutOfBounds) + case x == 0: + return 1 + case math.IsInf(x, 0): + return 0 + } + + // Asymptotic regime where a ~ x; see [2]. + absxmaA := math.Abs(x-a) / a + if (igamSmall < a && a < igamLarge && absxmaA < igamSmallRatio) || + (igamLarge < a && absxmaA < igamLargeRatio/math.Sqrt(a)) { + return asymptoticSeries(a, x, igamC) + } + + // Everywhere else; see [2]. + if x > 1.1 { + if x < a { + return 1 - igamSeries(a, x) + } + return igamCContinuedFraction(a, x) + } else if x <= 0.5 { + if -0.4/math.Log(x) < a { + return 1 - igamSeries(a, x) + } + return igamCSeries(a, x) + } + + if x*1.1 < a { + return 1 - igamSeries(a, x) + } + return igamCSeries(a, x) +} + +// igamFac computes +// x^a * e^{-x} / Γ(a) +// corrected from (15) and (16) in [2] by replacing +// e^{x - a} +// with +// e^{a - x} +func igamFac(a, x float64) float64 { + if math.Abs(a-x) > 0.4*math.Abs(a) { + ax := a*math.Log(x) - x - lgam(a) + return math.Exp(ax) + } + + fac := a + lanczosG - 0.5 + res := math.Sqrt(fac/math.Exp(1)) / lanczosSumExpgScaled(a) + + if a < 200 && x < 200 { + res *= math.Exp(a-x) * math.Pow(x/fac, a) + } else { + num := x - a - lanczosG + 0.5 + res *= math.Exp(a*log1pmx(num/fac) + x*(0.5-lanczosG)/fac) + } + + return res +} + +// igamCContinuedFraction computes IgamC using DLMF 8.9.2. +func igamCContinuedFraction(a, x float64) float64 { + ax := igamFac(a, x) + if ax == 0 { + return 0 + } + + // Continued fraction + y := 1 - a + z := x + y + 1 + c := 0.0 + pkm2 := 1.0 + qkm2 := x + pkm1 := x + 1.0 + qkm1 := z * x + ans := pkm1 / qkm1 + + for i := 0; i < maxIter; i++ { + c += 1.0 + y += 1.0 + z += 2.0 + yc := y * c + pk := pkm1*z - pkm2*yc + qk := qkm1*z - qkm2*yc + var t float64 + if qk != 0 { + r := pk / qk + t = math.Abs((ans - r) / r) + ans = r + } else { + t = 1.0 + } + pkm2 = pkm1 + pkm1 = pk + qkm2 = qkm1 + qkm1 = qk + if math.Abs(pk) > big { + pkm2 *= biginv + pkm1 *= biginv + qkm2 *= biginv + qkm1 *= biginv + } + if t <= machEp { + break + } + } + + return ans * ax +} + +// igamSeries computes Igam using DLMF 8.11.4. +func igamSeries(a, x float64) float64 { + ax := igamFac(a, x) + if ax == 0 { + return 0 + } + + // Power series + r := a + c := 1.0 + ans := 1.0 + + for i := 0; i < maxIter; i++ { + r += 1.0 + c *= x / r + ans += c + if c <= machEp*ans { + break + } + } + + return ans * ax / a +} + +// igamCSeries computes IgamC using DLMF 8.7.3. This is related to the series +// in igamSeries but extra care is taken to avoid cancellation. +func igamCSeries(a, x float64) float64 { + fac := 1.0 + sum := 0.0 + + for n := 1; n < maxIter; n++ { + fac *= -x / float64(n) + term := fac / (a + float64(n)) + sum += term + if math.Abs(term) <= machEp*math.Abs(sum) { + break + } + } + + logx := math.Log(x) + term := -expm1(a*logx - lgam1p(a)) + return term - math.Exp(a*logx-lgam(a))*sum +} + +// asymptoticSeries computes Igam/IgamC using DLMF 8.12.3/8.12.4. +func asymptoticSeries(a, x float64, fun int) float64 { + maxpow := 0 + lambda := x / a + sigma := (x - a) / a + absoldterm := math.MaxFloat64 + etapow := [igamDimN]float64{1} + sum := 0.0 + afac := 1.0 + + var sgn float64 + if fun == igam { + sgn = -1 + } else { + sgn = 1 + } + + var eta float64 + if lambda > 1 { + eta = math.Sqrt(-2 * log1pmx(sigma)) + } else if lambda < 1 { + eta = -math.Sqrt(-2 * log1pmx(sigma)) + } else { + eta = 0 + } + res := 0.5 * math.Erfc(sgn*eta*math.Sqrt(a/2)) + + for k := 0; k < igamDimK; k++ { + ck := igamCoefs[k][0] + for n := 1; n < igamDimN; n++ { + if n > maxpow { + etapow[n] = eta * etapow[n-1] + maxpow++ + } + ckterm := igamCoefs[k][n] * etapow[n] + ck += ckterm + if math.Abs(ckterm) < machEp*math.Abs(ck) { + break + } + } + term := ck * afac + absterm := math.Abs(term) + if absterm > absoldterm { + break + } + sum += term + if absterm < machEp*math.Abs(sum) { + break + } + absoldterm = absterm + afac /= a + } + res += sgn * math.Exp(-0.5*a*eta*eta) * sum / math.Sqrt(2*math.Pi*a) + + return res +} diff --git a/vendor/gonum.org/v1/gonum/mathext/internal/cephes/igami.go b/vendor/gonum.org/v1/gonum/mathext/internal/cephes/igami.go new file mode 100644 index 0000000..446e8cc --- /dev/null +++ b/vendor/gonum.org/v1/gonum/mathext/internal/cephes/igami.go @@ -0,0 +1,153 @@ +// Derived from SciPy's special/cephes/igami.c +// https://github.com/scipy/scipy/blob/master/scipy/special/cephes/igami.c +// Made freely available by Stephen L. Moshier without support or guarantee. + +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. +// Copyright ©1984, ©1987, ©1995 by Stephen L. Moshier +// Portions Copyright ©2017 The Gonum Authors. All rights reserved. + +package cephes + +import "math" + +// IgamI computes the inverse of the incomplete Gamma function. That is, it +// returns the x such that: +// IgamC(a, x) = p +// The input argument a must be positive and p must be between 0 and 1 +// inclusive or IgamI will panic. IgamI should return a positive number, but +// can return 0 even with non-zero y due to underflow. +func IgamI(a, p float64) float64 { + // Bound the solution + x0 := math.MaxFloat64 + yl := 0.0 + x1 := 0.0 + yh := 1.0 + dithresh := 5.0 * machEp + + if p < 0 || p > 1 || a <= 0 { + panic(badParamOutOfBounds) + } + + if p == 0 { + return math.Inf(1) + } + + if p == 1 { + return 0.0 + } + + // Starting with the approximate value + // x = a y^3 + // where + // y = 1 - d - ndtri(p) sqrt(d) + // and + // d = 1/9a + // the routine performs up to 10 Newton iterations to find the root of + // IgamC(a, x) - p = 0 + d := 1.0 / (9.0 * a) + y := 1.0 - d - Ndtri(p)*math.Sqrt(d) + x := a * y * y * y + + lgm := lgam(a) + + for i := 0; i < 10; i++ { + if x > x0 || x < x1 { + break + } + + y = IgamC(a, x) + + if y < yl || y > yh { + break + } + + if y < p { + x0 = x + yl = y + } else { + x1 = x + yh = y + } + + // Compute the derivative of the function at this point + d = (a-1)*math.Log(x) - x - lgm + if d < -maxLog { + break + } + d = -math.Exp(d) + + // Compute the step to the next approximation of x + d = (y - p) / d + if math.Abs(d/x) < machEp { + return x + } + x = x - d + } + + d = 0.0625 + if x0 == math.MaxFloat64 { + if x <= 0 { + x = 1 + } + for x0 == math.MaxFloat64 { + x = (1 + d) * x + y = IgamC(a, x) + if y < p { + x0 = x + yl = y + break + } + d = d + d + } + } + + d = 0.5 + dir := 0 + for i := 0; i < 400; i++ { + x = x1 + d*(x0-x1) + y = IgamC(a, x) + + lgm = (x0 - x1) / (x1 + x0) + if math.Abs(lgm) < dithresh { + break + } + + lgm = (y - p) / p + if math.Abs(lgm) < dithresh { + break + } + + if x <= 0 { + break + } + + if y >= p { + x1 = x + yh = y + if dir < 0 { + dir = 0 + d = 0.5 + } else if dir > 1 { + d = 0.5*d + 0.5 + } else { + d = (p - yl) / (yh - yl) + } + dir++ + } else { + x0 = x + yl = y + if dir > 0 { + dir = 0 + d = 0.5 + } else if dir < -1 { + d = 0.5 * d + } else { + d = (p - yl) / (yh - yl) + } + dir-- + } + } + + return x +} diff --git a/vendor/gonum.org/v1/gonum/mathext/internal/cephes/incbeta.go b/vendor/gonum.org/v1/gonum/mathext/internal/cephes/incbeta.go new file mode 100644 index 0000000..661846f --- /dev/null +++ b/vendor/gonum.org/v1/gonum/mathext/internal/cephes/incbeta.go @@ -0,0 +1,312 @@ +// Copyright ©2016 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +/* + * Cephes Math Library, Release 2.3: March, 1995 + * Copyright 1984, 1995 by Stephen L. Moshier + */ + +package cephes + +import ( + "math" + + "gonum.org/v1/gonum/mathext/internal/gonum" +) + +const ( + maxGam = 171.624376956302725 + big = 4.503599627370496e15 + biginv = 2.22044604925031308085e-16 +) + +// Incbet computes the regularized incomplete beta function. +func Incbet(aa, bb, xx float64) float64 { + if aa <= 0 || bb <= 0 { + panic(badParamOutOfBounds) + } + if xx <= 0 || xx >= 1 { + if xx == 0 { + return 0 + } + if xx == 1 { + return 1 + } + panic(badParamOutOfBounds) + } + + var flag int + if bb*xx <= 1 && xx <= 0.95 { + t := pseries(aa, bb, xx) + return transformT(t, flag) + } + + w := 1 - xx + + // Reverse a and b if x is greater than the mean. + var a, b, xc, x float64 + if xx > aa/(aa+bb) { + flag = 1 + a = bb + b = aa + xc = xx + x = w + } else { + a = aa + b = bb + xc = w + x = xx + } + + if flag == 1 && (b*x) <= 1.0 && x <= 0.95 { + t := pseries(a, b, x) + return transformT(t, flag) + } + + // Choose expansion for better convergence. + y := x*(a+b-2.0) - (a - 1.0) + if y < 0.0 { + w = incbcf(a, b, x) + } else { + w = incbd(a, b, x) / xc + } + + // Multiply w by the factor + // x^a * (1-x)^b * Γ(a+b) / (a*Γ(a)*Γ(b)) + var t float64 + y = a * math.Log(x) + t = b * math.Log(xc) + if (a+b) < maxGam && math.Abs(y) < maxLog && math.Abs(t) < maxLog { + t = math.Pow(xc, b) + t *= math.Pow(x, a) + t /= a + t *= w + t *= 1.0 / gonum.Beta(a, b) + return transformT(t, flag) + } + + // Resort to logarithms. + y += t - gonum.Lbeta(a, b) + y += math.Log(w / a) + if y < minLog { + t = 0.0 + } else { + t = math.Exp(y) + } + + return transformT(t, flag) +} + +func transformT(t float64, flag int) float64 { + if flag == 1 { + if t <= machEp { + t = 1.0 - machEp + } else { + t = 1.0 - t + } + } + return t +} + +// incbcf returns the incomplete beta integral evaluated by a continued fraction +// expansion. +func incbcf(a, b, x float64) float64 { + var xk, pk, pkm1, pkm2, qk, qkm1, qkm2 float64 + var k1, k2, k3, k4, k5, k6, k7, k8 float64 + var r, t, ans, thresh float64 + var n int + + k1 = a + k2 = a + b + k3 = a + k4 = a + 1.0 + k5 = 1.0 + k6 = b - 1.0 + k7 = k4 + k8 = a + 2.0 + + pkm2 = 0.0 + qkm2 = 1.0 + pkm1 = 1.0 + qkm1 = 1.0 + ans = 1.0 + r = 1.0 + thresh = 3.0 * machEp + + for n = 0; n <= 300; n++ { + + xk = -(x * k1 * k2) / (k3 * k4) + pk = pkm1 + pkm2*xk + qk = qkm1 + qkm2*xk + pkm2 = pkm1 + pkm1 = pk + qkm2 = qkm1 + qkm1 = qk + + xk = (x * k5 * k6) / (k7 * k8) + pk = pkm1 + pkm2*xk + qk = qkm1 + qkm2*xk + pkm2 = pkm1 + pkm1 = pk + qkm2 = qkm1 + qkm1 = qk + + if qk != 0 { + r = pk / qk + } + if r != 0 { + t = math.Abs((ans - r) / r) + ans = r + } else { + t = 1.0 + } + + if t < thresh { + return ans + } + + k1 += 1.0 + k2 += 1.0 + k3 += 2.0 + k4 += 2.0 + k5 += 1.0 + k6 -= 1.0 + k7 += 2.0 + k8 += 2.0 + + if (math.Abs(qk) + math.Abs(pk)) > big { + pkm2 *= biginv + pkm1 *= biginv + qkm2 *= biginv + qkm1 *= biginv + } + if (math.Abs(qk) < biginv) || (math.Abs(pk) < biginv) { + pkm2 *= big + pkm1 *= big + qkm2 *= big + qkm1 *= big + } + } + + return ans +} + +// incbd returns the incomplete beta integral evaluated by a continued fraction +// expansion. +func incbd(a, b, x float64) float64 { + var xk, pk, pkm1, pkm2, qk, qkm1, qkm2 float64 + var k1, k2, k3, k4, k5, k6, k7, k8 float64 + var r, t, ans, z, thresh float64 + var n int + + k1 = a + k2 = b - 1.0 + k3 = a + k4 = a + 1.0 + k5 = 1.0 + k6 = a + b + k7 = a + 1.0 + k8 = a + 2.0 + + pkm2 = 0.0 + qkm2 = 1.0 + pkm1 = 1.0 + qkm1 = 1.0 + z = x / (1.0 - x) + ans = 1.0 + r = 1.0 + thresh = 3.0 * machEp + for n = 0; n <= 300; n++ { + + xk = -(z * k1 * k2) / (k3 * k4) + pk = pkm1 + pkm2*xk + qk = qkm1 + qkm2*xk + pkm2 = pkm1 + pkm1 = pk + qkm2 = qkm1 + qkm1 = qk + + xk = (z * k5 * k6) / (k7 * k8) + pk = pkm1 + pkm2*xk + qk = qkm1 + qkm2*xk + pkm2 = pkm1 + pkm1 = pk + qkm2 = qkm1 + qkm1 = qk + + if qk != 0 { + r = pk / qk + } + if r != 0 { + t = math.Abs((ans - r) / r) + ans = r + } else { + t = 1.0 + } + + if t < thresh { + return ans + } + + k1 += 1.0 + k2 -= 1.0 + k3 += 2.0 + k4 += 2.0 + k5 += 1.0 + k6 += 1.0 + k7 += 2.0 + k8 += 2.0 + + if (math.Abs(qk) + math.Abs(pk)) > big { + pkm2 *= biginv + pkm1 *= biginv + qkm2 *= biginv + qkm1 *= biginv + } + if (math.Abs(qk) < biginv) || (math.Abs(pk) < biginv) { + pkm2 *= big + pkm1 *= big + qkm2 *= big + qkm1 *= big + } + } + return ans +} + +// pseries returns the incomplete beta integral evaluated by a power series. Use +// when b*x is small and x not too close to 1. +func pseries(a, b, x float64) float64 { + var s, t, u, v, n, t1, z, ai float64 + ai = 1.0 / a + u = (1.0 - b) * x + v = u / (a + 1.0) + t1 = v + t = u + n = 2.0 + s = 0.0 + z = machEp * ai + for math.Abs(v) > z { + u = (n - b) * x / n + t *= u + v = t / (a + n) + s += v + n += 1.0 + } + s += t1 + s += ai + + u = a * math.Log(x) + if (a+b) < maxGam && math.Abs(u) < maxLog { + t = 1.0 / gonum.Beta(a, b) + s = s * t * math.Pow(x, a) + } else { + t = -gonum.Lbeta(a, b) + u + math.Log(s) + if t < minLog { + s = 0.0 + } else { + s = math.Exp(t) + } + } + return (s) +} diff --git a/vendor/gonum.org/v1/gonum/mathext/internal/cephes/incbi.go b/vendor/gonum.org/v1/gonum/mathext/internal/cephes/incbi.go new file mode 100644 index 0000000..b878af0 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/mathext/internal/cephes/incbi.go @@ -0,0 +1,248 @@ +// Copyright ©2016 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +/* + * Cephes Math Library Release 2.4: March,1996 + * Copyright 1984, 1996 by Stephen L. Moshier + */ + +package cephes + +import "math" + +// Incbi computes the inverse of the regularized incomplete beta integral. +func Incbi(aa, bb, yy0 float64) float64 { + var a, b, y0, d, y, x, x0, x1, lgm, yp, di, dithresh, yl, yh, xt float64 + var i, rflg, dir, nflg int + + i = 0 + if yy0 <= 0 { + return (0.0) + } + if yy0 >= 1.0 { + return (1.0) + } + x0 = 0.0 + yl = 0.0 + x1 = 1.0 + yh = 1.0 + nflg = 0 + + if aa <= 1.0 || bb <= 1.0 { + dithresh = 1.0e-6 + rflg = 0 + a = aa + b = bb + y0 = yy0 + x = a / (a + b) + y = Incbet(a, b, x) + goto ihalve + } else { + dithresh = 1.0e-4 + } + // Approximation to inverse function + yp = -Ndtri(yy0) + + if yy0 > 0.5 { + rflg = 1 + a = bb + b = aa + y0 = 1.0 - yy0 + yp = -yp + } else { + rflg = 0 + a = aa + b = bb + y0 = yy0 + } + + lgm = (yp*yp - 3.0) / 6.0 + x = 2.0 / (1.0/(2.0*a-1.0) + 1.0/(2.0*b-1.0)) + d = yp*math.Sqrt(x+lgm)/x - (1.0/(2.0*b-1.0)-1.0/(2.0*a-1.0))*(lgm+5.0/6.0-2.0/(3.0*x)) + d = 2.0 * d + if d < minLog { + // mtherr("incbi", UNDERFLOW) + x = 0 + goto done + } + x = a / (a + b*math.Exp(d)) + y = Incbet(a, b, x) + yp = (y - y0) / y0 + if math.Abs(yp) < 0.2 { + goto newt + } + + /* Resort to interval halving if not close enough. */ +ihalve: + + dir = 0 + di = 0.5 + for i = 0; i < 100; i++ { + if i != 0 { + x = x0 + di*(x1-x0) + if x == 1.0 { + x = 1.0 - machEp + } + if x == 0.0 { + di = 0.5 + x = x0 + di*(x1-x0) + if x == 0.0 { + // mtherr("incbi", UNDERFLOW) + goto done + } + } + y = Incbet(a, b, x) + yp = (x1 - x0) / (x1 + x0) + if math.Abs(yp) < dithresh { + goto newt + } + yp = (y - y0) / y0 + if math.Abs(yp) < dithresh { + goto newt + } + } + if y < y0 { + x0 = x + yl = y + if dir < 0 { + dir = 0 + di = 0.5 + } else if dir > 3 { + di = 1.0 - (1.0-di)*(1.0-di) + } else if dir > 1 { + di = 0.5*di + 0.5 + } else { + di = (y0 - y) / (yh - yl) + } + dir += 1 + if x0 > 0.75 { + if rflg == 1 { + rflg = 0 + a = aa + b = bb + y0 = yy0 + } else { + rflg = 1 + a = bb + b = aa + y0 = 1.0 - yy0 + } + x = 1.0 - x + y = Incbet(a, b, x) + x0 = 0.0 + yl = 0.0 + x1 = 1.0 + yh = 1.0 + goto ihalve + } + } else { + x1 = x + if rflg == 1 && x1 < machEp { + x = 0.0 + goto done + } + yh = y + if dir > 0 { + dir = 0 + di = 0.5 + } else if dir < -3 { + di = di * di + } else if dir < -1 { + di = 0.5 * di + } else { + di = (y - y0) / (yh - yl) + } + dir -= 1 + } + } + // mtherr("incbi", PLOSS) + if x0 >= 1.0 { + x = 1.0 - machEp + goto done + } + if x <= 0.0 { + // mtherr("incbi", UNDERFLOW) + x = 0.0 + goto done + } + +newt: + if nflg > 0 { + goto done + } + nflg = 1 + lgm = lgam(a+b) - lgam(a) - lgam(b) + + for i = 0; i < 8; i++ { + /* Compute the function at this point. */ + if i != 0 { + y = Incbet(a, b, x) + } + if y < yl { + x = x0 + y = yl + } else if y > yh { + x = x1 + y = yh + } else if y < y0 { + x0 = x + yl = y + } else { + x1 = x + yh = y + } + if x == 1.0 || x == 0.0 { + break + } + /* Compute the derivative of the function at this point. */ + d = (a-1.0)*math.Log(x) + (b-1.0)*math.Log(1.0-x) + lgm + if d < minLog { + goto done + } + if d > maxLog { + break + } + d = math.Exp(d) + /* Compute the step to the next approximation of x. */ + d = (y - y0) / d + xt = x - d + if xt <= x0 { + y = (x - x0) / (x1 - x0) + xt = x0 + 0.5*y*(x-x0) + if xt <= 0.0 { + break + } + } + if xt >= x1 { + y = (x1 - x) / (x1 - x0) + xt = x1 - 0.5*y*(x1-x) + if xt >= 1.0 { + break + } + } + x = xt + if math.Abs(d/x) < 128.0*machEp { + goto done + } + } + /* Did not converge. */ + dithresh = 256.0 * machEp + goto ihalve + +done: + + if rflg > 0 { + if x <= machEp { + x = 1.0 - machEp + } else { + x = 1.0 - x + } + } + return (x) +} + +func lgam(a float64) float64 { + lg, _ := math.Lgamma(a) + return lg +} diff --git a/vendor/gonum.org/v1/gonum/mathext/internal/cephes/lanczos.go b/vendor/gonum.org/v1/gonum/mathext/internal/cephes/lanczos.go new file mode 100644 index 0000000..ec29161 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/mathext/internal/cephes/lanczos.go @@ -0,0 +1,153 @@ +// Derived from SciPy's special/cephes/lanczos.c +// https://github.com/scipy/scipy/blob/master/scipy/special/cephes/lanczos.c + +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. +// Copyright ©2006 John Maddock +// Portions Copyright ©2003 Boost +// Portions Copyright ©2016 The Gonum Authors. All rights reserved. + +package cephes + +// Optimal values for G for each N are taken from +// http://web.mala.bc.ca/pughg/phdThesis/phdThesis.pdf, +// as are the theoretical error bounds. + +// Constants calculated using the method described by Godfrey +// http://my.fit.edu/~gabdo/gamma.txt and elaborated by Toth at +// http://www.rskey.org/gamma.htm using NTL::RR at 1000 bit precision. + +var lanczosNum = [...]float64{ + 2.506628274631000270164908177133837338626, + 210.8242777515793458725097339207133627117, + 8071.672002365816210638002902272250613822, + 186056.2653952234950402949897160456992822, + 2876370.628935372441225409051620849613599, + 31426415.58540019438061423162831820536287, + 248874557.8620541565114603864132294232163, + 1439720407.311721673663223072794912393972, + 6039542586.35202800506429164430729792107, + 17921034426.03720969991975575445893111267, + 35711959237.35566804944018545154716670596, + 42919803642.64909876895789904700198885093, + 23531376880.41075968857200767445163675473, +} + +var lanczosDenom = [...]float64{ + 1, + 66, + 1925, + 32670, + 357423, + 2637558, + 13339535, + 45995730, + 105258076, + 150917976, + 120543840, + 39916800, + 0, +} + +var lanczosSumExpgScaledNum = [...]float64{ + 0.006061842346248906525783753964555936883222, + 0.5098416655656676188125178644804694509993, + 19.51992788247617482847860966235652136208, + 449.9445569063168119446858607650988409623, + 6955.999602515376140356310115515198987526, + 75999.29304014542649875303443598909137092, + 601859.6171681098786670226533699352302507, + 3481712.15498064590882071018964774556468, + 14605578.08768506808414169982791359218571, + 43338889.32467613834773723740590533316085, + 86363131.28813859145546927288977868422342, + 103794043.1163445451906271053616070238554, + 56906521.91347156388090791033559122686859, +} + +var lanczosSumExpgScaledDenom = [...]float64{ + 1, + 66, + 1925, + 32670, + 357423, + 2637558, + 13339535, + 45995730, + 105258076, + 150917976, + 120543840, + 39916800, + 0, +} + +var lanczosSumNear1D = [...]float64{ + 0.3394643171893132535170101292240837927725e-9, + -0.2499505151487868335680273909354071938387e-8, + 0.8690926181038057039526127422002498960172e-8, + -0.1933117898880828348692541394841204288047e-7, + 0.3075580174791348492737947340039992829546e-7, + -0.2752907702903126466004207345038327818713e-7, + -0.1515973019871092388943437623825208095123e-5, + 0.004785200610085071473880915854204301886437, + -0.1993758927614728757314233026257810172008, + 1.483082862367253753040442933770164111678, + -3.327150580651624233553677113928873034916, + 2.208709979316623790862569924861841433016, +} + +var lanczosSumNear2D = [...]float64{ + 0.1009141566987569892221439918230042368112e-8, + -0.7430396708998719707642735577238449585822e-8, + 0.2583592566524439230844378948704262291927e-7, + -0.5746670642147041587497159649318454348117e-7, + 0.9142922068165324132060550591210267992072e-7, + -0.8183698410724358930823737982119474130069e-7, + -0.4506604409707170077136555010018549819192e-5, + 0.01422519127192419234315002746252160965831, + -0.5926941084905061794445733628891024027949, + 4.408830289125943377923077727900630927902, + -9.8907772644920670589288081640128194231, + 6.565936202082889535528455955485877361223, +} + +const lanczosG = 6.024680040776729583740234375 + +func lanczosSum(x float64) float64 { + return ratevl(x, + lanczosNum[:], + len(lanczosNum)-1, + lanczosDenom[:], + len(lanczosDenom)-1) +} + +func lanczosSumExpgScaled(x float64) float64 { + return ratevl(x, + lanczosSumExpgScaledNum[:], + len(lanczosSumExpgScaledNum)-1, + lanczosSumExpgScaledDenom[:], + len(lanczosSumExpgScaledDenom)-1) +} + +func lanczosSumNear1(dx float64) float64 { + var result float64 + + for i, val := range lanczosSumNear1D { + k := float64(i + 1) + result += (-val * dx) / (k*dx + k*k) + } + + return result +} + +func lanczosSumNear2(dx float64) float64 { + var result float64 + x := dx + 2 + + for i, val := range lanczosSumNear2D { + k := float64(i + 1) + result += (-val * dx) / (x + k*x + k*k - 1) + } + + return result +} diff --git a/vendor/gonum.org/v1/gonum/mathext/internal/cephes/ndtri.go b/vendor/gonum.org/v1/gonum/mathext/internal/cephes/ndtri.go new file mode 100644 index 0000000..156a30d --- /dev/null +++ b/vendor/gonum.org/v1/gonum/mathext/internal/cephes/ndtri.go @@ -0,0 +1,150 @@ +// Copyright ©2016 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +/* + * Cephes Math Library Release 2.1: January, 1989 + * Copyright 1984, 1987, 1989 by Stephen L. Moshier + * Direct inquiries to 30 Frost Street, Cambridge, MA 02140 + */ + +package cephes + +import "math" + +// TODO(btracey): There is currently an implementation of this functionality +// in gonum/stat/distuv. Find out which implementation is better, and rectify +// by having distuv call this, or moving this implementation into +// gonum/mathext/internal/gonum. + +// math.Sqrt(2*pi) +const s2pi = 2.50662827463100050242e0 + +// approximation for 0 <= |y - 0.5| <= 3/8 +var P0 = [5]float64{ + -5.99633501014107895267e1, + 9.80010754185999661536e1, + -5.66762857469070293439e1, + 1.39312609387279679503e1, + -1.23916583867381258016e0, +} + +var Q0 = [8]float64{ + /* 1.00000000000000000000E0, */ + 1.95448858338141759834e0, + 4.67627912898881538453e0, + 8.63602421390890590575e1, + -2.25462687854119370527e2, + 2.00260212380060660359e2, + -8.20372256168333339912e1, + 1.59056225126211695515e1, + -1.18331621121330003142e0, +} + +// Approximation for interval z = math.Sqrt(-2 log y ) between 2 and 8 +// i.e., y between exp(-2) = .135 and exp(-32) = 1.27e-14. +var P1 = [9]float64{ + 4.05544892305962419923e0, + 3.15251094599893866154e1, + 5.71628192246421288162e1, + 4.40805073893200834700e1, + 1.46849561928858024014e1, + 2.18663306850790267539e0, + -1.40256079171354495875e-1, + -3.50424626827848203418e-2, + -8.57456785154685413611e-4, +} + +var Q1 = [8]float64{ + /* 1.00000000000000000000E0, */ + 1.57799883256466749731e1, + 4.53907635128879210584e1, + 4.13172038254672030440e1, + 1.50425385692907503408e1, + 2.50464946208309415979e0, + -1.42182922854787788574e-1, + -3.80806407691578277194e-2, + -9.33259480895457427372e-4, +} + +// Approximation for interval z = math.Sqrt(-2 log y ) between 8 and 64 +// i.e., y between exp(-32) = 1.27e-14 and exp(-2048) = 3.67e-890. +var P2 = [9]float64{ + 3.23774891776946035970e0, + 6.91522889068984211695e0, + 3.93881025292474443415e0, + 1.33303460815807542389e0, + 2.01485389549179081538e-1, + 1.23716634817820021358e-2, + 3.01581553508235416007e-4, + 2.65806974686737550832e-6, + 6.23974539184983293730e-9, +} + +var Q2 = [8]float64{ + /* 1.00000000000000000000E0, */ + 6.02427039364742014255e0, + 3.67983563856160859403e0, + 1.37702099489081330271e0, + 2.16236993594496635890e-1, + 1.34204006088543189037e-2, + 3.28014464682127739104e-4, + 2.89247864745380683936e-6, + 6.79019408009981274425e-9, +} + +// Ndtri returns the argument, x, for which the area under the +// Gaussian probability density function (integrated from +// minus infinity to x) is equal to y. +func Ndtri(y0 float64) float64 { + // For small arguments 0 < y < exp(-2), the program computes + // z = math.Sqrt( -2.0 * math.Log(y) ); then the approximation is + // x = z - math.Log(z)/z - (1/z) P(1/z) / Q(1/z). + // There are two rational functions P/Q, one for 0 < y < exp(-32) + // and the other for y up to exp(-2). For larger arguments, + // w = y - 0.5, and x/math.Sqrt(2pi) = w + w**3 R(w**2)/S(w**2)). + var x, y, z, y2, x0, x1 float64 + var code int + + if y0 <= 0.0 { + if y0 < 0 { + panic(badParamOutOfBounds) + } + return math.Inf(-1) + } + if y0 >= 1.0 { + if y0 > 1 { + panic(badParamOutOfBounds) + } + return math.Inf(1) + } + code = 1 + y = y0 + if y > (1.0 - 0.13533528323661269189) { /* 0.135... = exp(-2) */ + y = 1.0 - y + code = 0 + } + + if y > 0.13533528323661269189 { + y = y - 0.5 + y2 = y * y + x = y + y*(y2*polevl(y2, P0[:], 4)/p1evl(y2, Q0[:], 8)) + x = x * s2pi + return (x) + } + + x = math.Sqrt(-2.0 * math.Log(y)) + x0 = x - math.Log(x)/x + + z = 1.0 / x + if x < 8.0 { /* y > exp(-32) = 1.2664165549e-14 */ + x1 = z * polevl(z, P1[:], 8) / p1evl(z, Q1[:], 8) + } else { + x1 = z * polevl(z, P2[:], 8) / p1evl(z, Q2[:], 8) + } + x = x0 - x1 + if code != 0 { + x = -x + } + return (x) +} diff --git a/vendor/gonum.org/v1/gonum/mathext/internal/cephes/polevl.go b/vendor/gonum.org/v1/gonum/mathext/internal/cephes/polevl.go new file mode 100644 index 0000000..4e438b2 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/mathext/internal/cephes/polevl.go @@ -0,0 +1,82 @@ +// Derived from SciPy's special/cephes/polevl.h +// https://github.com/scipy/scipy/blob/master/scipy/special/cephes/polevl.h +// Made freely available by Stephen L. Moshier without support or guarantee. + +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. +// Copyright ©1984, ©1987, ©1988 by Stephen L. Moshier +// Portions Copyright ©2016 The Gonum Authors. All rights reserved. + +package cephes + +import "math" + +// polevl evaluates a polynomial of degree N +// y = c_0 + c_1 x_1 + c_2 x_2^2 ... +// where the coefficients are stored in reverse order, i.e. coef[0] = c_n and +// coef[n] = c_0. +func polevl(x float64, coef []float64, n int) float64 { + ans := coef[0] + for i := 1; i <= n; i++ { + ans = ans*x + coef[i] + } + return ans +} + +// p1evl is the same as polevl, except c_n is assumed to be 1 and is not included +// in the slice. +func p1evl(x float64, coef []float64, n int) float64 { + ans := x + coef[0] + for i := 1; i <= n-1; i++ { + ans = ans*x + coef[i] + } + return ans +} + +// ratevl evaluates a rational function +func ratevl(x float64, num []float64, m int, denom []float64, n int) float64 { + // Source: Holin et. al., "Polynomial and Rational Function Evaluation", + // http://www.boost.org/doc/libs/1_61_0/libs/math/doc/html/math_toolkit/roots/rational.html + absx := math.Abs(x) + + var dir, idx int + var y float64 + if absx > 1 { + // Evaluate as a polynomial in 1/x + dir = -1 + idx = m + y = 1 / x + } else { + dir = 1 + idx = 0 + y = x + } + + // Evaluate the numerator + numAns := num[idx] + idx += dir + for i := 0; i < m; i++ { + numAns = numAns*y + num[idx] + idx += dir + } + + // Evaluate the denominator + if absx > 1 { + idx = n + } else { + idx = 0 + } + + denomAns := denom[idx] + idx += dir + for i := 0; i < n; i++ { + denomAns = denomAns*y + denom[idx] + idx += dir + } + + if absx > 1 { + pow := float64(n - m) + return math.Pow(x, pow) * numAns / denomAns + } + return numAns / denomAns +} diff --git a/vendor/gonum.org/v1/gonum/mathext/internal/cephes/unity.go b/vendor/gonum.org/v1/gonum/mathext/internal/cephes/unity.go new file mode 100644 index 0000000..cb1695f --- /dev/null +++ b/vendor/gonum.org/v1/gonum/mathext/internal/cephes/unity.go @@ -0,0 +1,170 @@ +// Derived from SciPy's special/cephes/unity.c +// https://github.com/scipy/scipy/blob/master/scipy/special/cephes/unity.c +// Made freely available by Stephen L. Moshier without support or guarantee. + +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. +// Copyright ©1984, ©1996 by Stephen L. Moshier +// Portions Copyright ©2016 The Gonum Authors. All rights reserved. + +package cephes + +import "math" + +// Relative error approximations for function arguments near unity. +// log1p(x) = log(1+x) +// expm1(x) = exp(x) - 1 +// cosm1(x) = cos(x) - 1 +// lgam1p(x) = lgam(1+x) + +const ( + invSqrt2 = 1 / math.Sqrt2 + pi4 = math.Pi / 4 + euler = 0.577215664901532860606512090082402431 // Euler constant +) + +// Coefficients for +// log(1+x) = x - \frac{x^2}{2} + \frac{x^3 lP(x)}{lQ(x)} +// for +// \frac{1}{\sqrt{2}} <= x < \sqrt{2} +// Theoretical peak relative error = 2.32e-20 +var lP = [...]float64{ + 4.5270000862445199635215e-5, + 4.9854102823193375972212e-1, + 6.5787325942061044846969e0, + 2.9911919328553073277375e1, + 6.0949667980987787057556e1, + 5.7112963590585538103336e1, + 2.0039553499201281259648e1, +} + +var lQ = [...]float64{ + 1.5062909083469192043167e1, + 8.3047565967967209469434e1, + 2.2176239823732856465394e2, + 3.0909872225312059774938e2, + 2.1642788614495947685003e2, + 6.0118660497603843919306e1, +} + +// log1p computes +// log(1 + x) +func log1p(x float64) float64 { + z := 1 + x + if z < invSqrt2 || z > math.Sqrt2 { + return math.Log(z) + } + z = x * x + z = -0.5*z + x*(z*polevl(x, lP[:], 6)/p1evl(x, lQ[:], 6)) + return x + z +} + +// log1pmx computes +// log(1 + x) - x +func log1pmx(x float64) float64 { + if math.Abs(x) < 0.5 { + xfac := x + res := 0.0 + + var term float64 + for n := 2; n < maxIter; n++ { + xfac *= -x + term = xfac / float64(n) + res += term + if math.Abs(term) < machEp*math.Abs(res) { + break + } + } + return res + } + return log1p(x) - x +} + +// Coefficients for +// e^x = 1 + \frac{2x eP(x^2)}{eQ(x^2) - eP(x^2)} +// for +// -0.5 <= x <= 0.5 +var eP = [...]float64{ + 1.2617719307481059087798e-4, + 3.0299440770744196129956e-2, + 9.9999999999999999991025e-1, +} + +var eQ = [...]float64{ + 3.0019850513866445504159e-6, + 2.5244834034968410419224e-3, + 2.2726554820815502876593e-1, + 2.0000000000000000000897e0, +} + +// expm1 computes +// expm1(x) = e^x - 1 +func expm1(x float64) float64 { + if math.IsInf(x, 0) { + if math.IsNaN(x) || x > 0 { + return x + } + return -1 + } + if x < -0.5 || x > 0.5 { + return math.Exp(x) - 1 + } + xx := x * x + r := x * polevl(xx, eP[:], 2) + r = r / (polevl(xx, eQ[:], 3) - r) + return r + r +} + +var coscof = [...]float64{ + 4.7377507964246204691685e-14, + -1.1470284843425359765671e-11, + 2.0876754287081521758361e-9, + -2.7557319214999787979814e-7, + 2.4801587301570552304991e-5, + -1.3888888888888872993737e-3, + 4.1666666666666666609054e-2, +} + +// cosm1 computes +// cosm1(x) = cos(x) - 1 +func cosm1(x float64) float64 { + if x < -pi4 || x > pi4 { + return math.Cos(x) - 1 + } + xx := x * x + xx = -0.5*xx + xx*xx*polevl(xx, coscof[:], 6) + return xx +} + +// lgam1pTayler computes +// lgam(x + 1) +//around x = 0 using its Taylor series. +func lgam1pTaylor(x float64) float64 { + if x == 0 { + return 0 + } + res := -euler * x + xfac := -x + for n := 2; n < 42; n++ { + nf := float64(n) + xfac *= -x + coeff := Zeta(nf, 1) * xfac / nf + res += coeff + if math.Abs(coeff) < machEp*math.Abs(res) { + break + } + } + + return res +} + +// lgam1p computes +// lgam(x + 1) +func lgam1p(x float64) float64 { + if math.Abs(x) <= 0.5 { + return lgam1pTaylor(x) + } else if math.Abs(x-1) < 0.5 { + return math.Log(x) + lgam1pTaylor(x-1) + } + return lgam(x + 1) +} diff --git a/vendor/gonum.org/v1/gonum/mathext/internal/cephes/zeta.go b/vendor/gonum.org/v1/gonum/mathext/internal/cephes/zeta.go new file mode 100644 index 0000000..746c0a5 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/mathext/internal/cephes/zeta.go @@ -0,0 +1,110 @@ +// Derived from SciPy's special/cephes/zeta.c +// https://github.com/scipy/scipy/blob/master/scipy/special/cephes/zeta.c +// Made freely available by Stephen L. Moshier without support or guarantee. + +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. +// Copyright ©1984, ©1987 by Stephen L. Moshier +// Portions Copyright ©2016 The Gonum Authors. All rights reserved. + +package cephes + +import "math" + +// zetaCoegs are the expansion coefficients for Euler-Maclaurin summation +// formula: +// \frac{(2k)!}{B_{2k}} +// where +// B_{2k} +// are Bernoulli numbers. +var zetaCoefs = [...]float64{ + 12.0, + -720.0, + 30240.0, + -1209600.0, + 47900160.0, + -1.307674368e12 / 691, + 7.47242496e10, + -1.067062284288e16 / 3617, + 5.109094217170944e18 / 43867, + -8.028576626982912e20 / 174611, + 1.5511210043330985984e23 / 854513, + -1.6938241367317436694528e27 / 236364091, +} + +// Zeta computes the Riemann zeta function of two arguments. +// Zeta(x,q) = \sum_{k=0}^{\infty} (k+q)^{-x} +// Note that Zeta returns +Inf if x is 1 and will panic if x is less than 1, +// q is either zero or a negative integer, or q is negative and x is not an +// integer. +// +// Note that: +// zeta(x,1) = zetac(x) + 1 +func Zeta(x, q float64) float64 { + // REFERENCE: Gradshteyn, I. S., and I. M. Ryzhik, Tables of Integrals, Series, + // and Products, p. 1073; Academic Press, 1980. + if x == 1 { + return math.Inf(1) + } + + if x < 1 { + panic(badParamOutOfBounds) + } + + if q <= 0 { + if q == math.Floor(q) { + panic(badParamFunctionSingularity) + } + if x != math.Floor(x) { + panic(badParamOutOfBounds) // Because q^-x not defined + } + } + + // Asymptotic expansion: http://dlmf.nist.gov/25.11#E43 + if q > 1e8 { + return (1/(x-1) + 1/(2*q)) * math.Pow(q, 1-x) + } + + // The Euler-Maclaurin summation formula is used to obtain the expansion: + // Zeta(x,q) = \sum_{k=1}^n (k+q)^{-x} + \frac{(n+q)^{1-x}}{x-1} - \frac{1}{2(n+q)^x} + \sum_{j=1}^{\infty} \frac{B_{2j}x(x+1)...(x+2j)}{(2j)! (n+q)^{x+2j+1}} + // where + // B_{2j} + // are Bernoulli numbers. + // Permit negative q but continue sum until n+q > 9. This case should be + // handled by a reflection formula. If q<0 and x is an integer, there is a + // relation to the polyGamma function. + s := math.Pow(q, -x) + a := q + i := 0 + b := 0.0 + for i < 9 || a <= 9 { + i++ + a += 1.0 + b = math.Pow(a, -x) + s += b + if math.Abs(b/s) < machEp { + return s + } + } + + w := a + s += b * w / (x - 1) + s -= 0.5 * b + a = 1.0 + k := 0.0 + for _, coef := range zetaCoefs { + a *= x + k + b /= w + t := a * b / coef + s = s + t + t = math.Abs(t / s) + if t < machEp { + return s + } + k += 1.0 + a *= x + k + b /= w + k += 1.0 + } + return s +} diff --git a/vendor/gonum.org/v1/gonum/mathext/internal/gonum/beta.go b/vendor/gonum.org/v1/gonum/mathext/internal/gonum/beta.go new file mode 100644 index 0000000..cec8acf --- /dev/null +++ b/vendor/gonum.org/v1/gonum/mathext/internal/gonum/beta.go @@ -0,0 +1,52 @@ +// Copyright ©2016 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package gonum + +import ( + "math" +) + +// Beta returns the value of the complete beta function B(a, b). It is defined as +// Γ(a)Γ(b) / Γ(a+b) +// Special cases are: +// B(a,b) returns NaN if a or b is Inf +// B(a,b) returns NaN if a and b are 0 +// B(a,b) returns NaN if a or b is NaN +// B(a,b) returns NaN if a or b is < 0 +// B(a,b) returns +Inf if a xor b is 0. +// +// See http://mathworld.wolfram.com/BetaFunction.html for more detailed information. +func Beta(a, b float64) float64 { + return math.Exp(Lbeta(a, b)) +} + +// Lbeta returns the natural logarithm of the complete beta function B(a,b). +// Lbeta is defined as: +// Ln(Γ(a)Γ(b)/Γ(a+b)) +// Special cases are: +// Lbeta(a,b) returns NaN if a or b is Inf +// Lbeta(a,b) returns NaN if a and b are 0 +// Lbeta(a,b) returns NaN if a or b is NaN +// Lbeta(a,b) returns NaN if a or b is < 0 +// Lbeta(a,b) returns +Inf if a xor b is 0. +func Lbeta(a, b float64) float64 { + switch { + case math.IsInf(a, +1) || math.IsInf(b, +1): + return math.NaN() + case a == 0 && b == 0: + return math.NaN() + case a < 0 || b < 0: + return math.NaN() + case math.IsNaN(a) || math.IsNaN(b): + return math.NaN() + case a == 0 || b == 0: + return math.Inf(+1) + } + + la, _ := math.Lgamma(a) + lb, _ := math.Lgamma(b) + lab, _ := math.Lgamma(a + b) + return la + lb - lab +} diff --git a/vendor/gonum.org/v1/gonum/mathext/internal/gonum/doc.go b/vendor/gonum.org/v1/gonum/mathext/internal/gonum/doc.go new file mode 100644 index 0000000..cbe6aa2 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/mathext/internal/gonum/doc.go @@ -0,0 +1,7 @@ +// Copyright ©2017 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Package gonum contains functions implemented by the gonum team. +// It is here to avoid circular imports and/or double coding of functions. +package gonum // import "gonum.org/v1/gonum/mathext/internal/gonum" diff --git a/vendor/gonum.org/v1/gonum/mathext/internal/gonum/gonum.go b/vendor/gonum.org/v1/gonum/mathext/internal/gonum/gonum.go new file mode 100644 index 0000000..47e02ce --- /dev/null +++ b/vendor/gonum.org/v1/gonum/mathext/internal/gonum/gonum.go @@ -0,0 +1,5 @@ +// Copyright ©2016 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package gonum diff --git a/vendor/gonum.org/v1/gonum/mathext/mvgamma.go b/vendor/gonum.org/v1/gonum/mathext/mvgamma.go new file mode 100644 index 0000000..99a9233 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/mathext/mvgamma.go @@ -0,0 +1,32 @@ +// Copyright ©2016 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package mathext + +import "math" + +const ( + logPi = 1.14472988584940017414342735135305871164729481 // http://oeis.org/A053510 +) + +// MvLgamma returns the log of the multivariate Gamma function. Dim +// must be greater than zero, and MvLgamma will return NaN if v < (dim-1)/2. +// +// See https://en.wikipedia.org/wiki/Multivariate_gamma_function for more +// information. +func MvLgamma(v float64, dim int) float64 { + if dim < 1 { + panic("mathext: negative dimension") + } + df := float64(dim) + if v < (df-1)*0.5 { + return math.NaN() + } + ans := df * (df - 1) * 0.25 * logPi + for i := 1; i <= dim; i++ { + lg, _ := math.Lgamma(v + float64(1-i)*0.5) + ans += lg + } + return ans +} diff --git a/vendor/gonum.org/v1/gonum/mathext/roots.go b/vendor/gonum.org/v1/gonum/mathext/roots.go new file mode 100644 index 0000000..490ca8a --- /dev/null +++ b/vendor/gonum.org/v1/gonum/mathext/roots.go @@ -0,0 +1,178 @@ +// Derived from SciPy's special/c_misc/fsolve.c and special/c_misc/misc.h +// https://github.com/scipy/scipy/blob/master/scipy/special/c_misc/fsolve.c +// https://github.com/scipy/scipy/blob/master/scipy/special/c_misc/misc.h + +// Copyright ©2017 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package mathext + +import "math" + +type objectiveFunc func(float64, []float64) float64 + +type fSolveResult uint8 + +const ( + // An exact solution was found, in which case the first point on the + // interval is the value + fSolveExact fSolveResult = iota + 1 + // Interval width is less than the tolerance + fSolveConverged + // Root-finding didn't converge in a set number of iterations + fSolveMaxIterations +) + +const ( + machEp = 1.0 / (1 << 53) +) + +// falsePosition uses a combination of bisection and false position to find a +// root of a function within a given interval. This is guaranteed to converge, +// and always keeps a bounding interval, unlike Newton's method. Inputs are: +// x1, x2: initial bounding interval +// f1, f2: value of f() at x1 and x2 +// absErr, relErr: absolute and relative errors on the bounding interval +// bisectTil: if > 0.0, perform bisection until the width of the bounding +// interval is less than this +// f, fExtra: function to find root of is f(x, fExtra) +// Returns: +// result: whether an exact root was found, the process converged to a +// bounding interval small than the required error, or the max number +// of iterations was hit +// bestX: best root approximation +// bestF: function value at bestX +// errEst: error estimation +func falsePosition(x1, x2, f1, f2, absErr, relErr, bisectTil float64, f objectiveFunc, fExtra []float64) (fSolveResult, float64, float64, float64) { + // The false position steps are either unmodified, or modified with the + // Anderson-Bjorck method as appropriate. Theoretically, this has a "speed of + // convergence" of 1.7 (bisection is 1, Newton is 2). + // Note that this routine was designed initially to work with gammaincinv, so + // it may not be tuned right for other problems. Don't use it blindly. + + if f1*f2 >= 0 { + panic("Initial interval is not a bounding interval") + } + + const ( + maxIterations = 100 + bisectIter = 4 + bisectWidth = 4.0 + ) + + const ( + bisect = iota + 1 + falseP + ) + + var state uint8 + if bisectTil > 0 { + state = bisect + } else { + state = falseP + } + + gamma := 1.0 + + w := math.Abs(x2 - x1) + lastBisectWidth := w + + var nFalseP int + var x3, f3, bestX, bestF float64 + for i := 0; i < maxIterations; i++ { + switch state { + case bisect: + x3 = 0.5 * (x1 + x2) + if x3 == x1 || x3 == x2 { + // i.e., x1 and x2 are successive floating-point numbers + bestX = x3 + if x3 == x1 { + bestF = f1 + } else { + bestF = f2 + } + return fSolveConverged, bestX, bestF, w + } + + f3 = f(x3, fExtra) + if f3 == 0 { + return fSolveExact, x3, f3, w + } + + if f3*f2 < 0 { + x1 = x2 + f1 = f2 + } + x2 = x3 + f2 = f3 + w = math.Abs(x2 - x1) + lastBisectWidth = w + if bisectTil > 0 { + if w < bisectTil { + bisectTil = -1.0 + gamma = 1.0 + nFalseP = 0 + state = falseP + } + } else { + gamma = 1.0 + nFalseP = 0 + state = falseP + } + case falseP: + s12 := (f2 - gamma*f1) / (x2 - x1) + x3 = x2 - f2/s12 + f3 = f(x3, fExtra) + if f3 == 0 { + return fSolveExact, x3, f3, w + } + + nFalseP++ + if f3*f2 < 0 { + gamma = 1.0 + x1 = x2 + f1 = f2 + } else { + // Anderson-Bjorck method + g := 1.0 - f3/f2 + if g <= 0 { + g = 0.5 + } + gamma *= g + } + x2 = x3 + f2 = f3 + w = math.Abs(x2 - x1) + + // Sanity check. For every 4 false position checks, see if we really are + // decreasing the interval by comparing to what bisection would have + // achieved (or, rather, a bit more lenient than that -- interval + // decreased by 4 instead of by 16, as the fp could be decreasing gamma + // for a bit). Note that this should guarantee convergence, as it makes + // sure that we always end up decreasing the interval width with a + // bisection. + if nFalseP > bisectIter { + if w*bisectWidth > lastBisectWidth { + state = bisect + } + nFalseP = 0 + lastBisectWidth = w + } + } + + tol := absErr + relErr*math.Max(math.Max(math.Abs(x1), math.Abs(x2)), 1.0) + if w <= tol { + if math.Abs(f1) < math.Abs(f2) { + bestX = x1 + bestF = f1 + } else { + bestX = x2 + bestF = f2 + } + return fSolveConverged, bestX, bestF, w + } + } + + return fSolveMaxIterations, x3, f3, w +} diff --git a/vendor/gonum.org/v1/gonum/mathext/zeta.go b/vendor/gonum.org/v1/gonum/mathext/zeta.go new file mode 100644 index 0000000..841b9b0 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/mathext/zeta.go @@ -0,0 +1,20 @@ +// Copyright ©2016 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package mathext + +import "gonum.org/v1/gonum/mathext/internal/cephes" + +// Zeta computes the Riemann zeta function of two arguments. +// Zeta(x,q) = \sum_{k=0}^{\infty} (k+q)^{-x} +// Note that Zeta returns +Inf if x is 1 and will panic if x is less than 1, +// q is either zero or a negative integer, or q is negative and x is not an +// integer. +// +// See http://mathworld.wolfram.com/HurwitzZetaFunction.html +// or https://en.wikipedia.org/wiki/Multiple_zeta_function#Two_parameters_case +// for more detailed information. +func Zeta(x, q float64) float64 { + return cephes.Zeta(x, q) +} diff --git a/vendor/gonum.org/v1/gonum/num/dual/doc.go b/vendor/gonum.org/v1/gonum/num/dual/doc.go new file mode 100644 index 0000000..d3852fe --- /dev/null +++ b/vendor/gonum.org/v1/gonum/num/dual/doc.go @@ -0,0 +1,13 @@ +// Copyright ©2018 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Package dual provides the dual numeric type and functions. Dual numbers +// are an extension of the real numbers in the form a+bϵ where ϵ^2=0, but ϵ≠0. +// +// See https://en.wikipedia.org/wiki/Dual_number for details of their properties +// and uses. +package dual // imports "gonum.org/v1/gonum/num/dual" + +// TODO(kortschak): Handle special cases properly. +// - Pow diff --git a/vendor/gonum.org/v1/gonum/num/dual/dual.go b/vendor/gonum.org/v1/gonum/num/dual/dual.go new file mode 100644 index 0000000..a4a2348 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/num/dual/dual.go @@ -0,0 +1,123 @@ +// Copyright ©2018 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package dual + +import ( + "fmt" + "math" + "strings" +) + +// Number is a float64 precision dual number. +type Number struct { + Real, Emag float64 +} + +var zero = Number{} + +// Format implements fmt.Formatter. +func (d Number) Format(fs fmt.State, c rune) { + prec, pOk := fs.Precision() + if !pOk { + prec = -1 + } + width, wOk := fs.Width() + if !wOk { + width = -1 + } + switch c { + case 'v': + if fs.Flag('#') { + fmt.Fprintf(fs, "%T{Real:%#v, Emag:%#v}", d, d.Real, d.Emag) + return + } + if fs.Flag('+') { + fmt.Fprintf(fs, "{Real:%+v, Emag:%+v}", d.Real, d.Emag) + return + } + c = 'g' + prec = -1 + fallthrough + case 'e', 'E', 'f', 'F', 'g', 'G': + fre := fmtString(fs, c, prec, width, false) + fim := fmtString(fs, c, prec, width, true) + fmt.Fprintf(fs, fmt.Sprintf("(%s%[2]sϵ)", fre, fim), d.Real, d.Emag) + default: + fmt.Fprintf(fs, "%%!%c(%T=%[2]v)", c, d) + return + } +} + +// This is horrible, but it's what we have. +func fmtString(fs fmt.State, c rune, prec, width int, wantPlus bool) string { + var b strings.Builder + b.WriteByte('%') + for _, f := range "0+- " { + if fs.Flag(int(f)) || (f == '+' && wantPlus) { + b.WriteByte(byte(f)) + } + } + if width >= 0 { + fmt.Fprint(&b, width) + } + if prec >= 0 { + b.WriteByte('.') + if prec > 0 { + fmt.Fprint(&b, prec) + } + } + b.WriteRune(c) + return b.String() +} + +// Add returns the sum of x and y. +func Add(x, y Number) Number { + return Number{ + Real: x.Real + y.Real, + Emag: x.Emag + y.Emag, + } +} + +// Sub returns the difference of x and y, x-y. +func Sub(x, y Number) Number { + return Number{ + Real: x.Real - y.Real, + Emag: x.Emag - y.Emag, + } +} + +// Mul returns the dual product of x and y. +func Mul(x, y Number) Number { + return Number{ + Real: x.Real * y.Real, + Emag: x.Real*y.Emag + x.Emag*y.Real, + } +} + +// Inv returns the dual inverse of d. +// +// Special cases are: +// Inv(±Inf) = ±0-0ϵ +// Inv(±0) = ±Inf-Infϵ +func Inv(d Number) Number { + d2 := d.Real * d.Real + return Number{ + Real: 1 / d.Real, + Emag: -d.Emag / d2, + } +} + +// Scale returns d scaled by f. +func Scale(f float64, d Number) Number { + return Number{Real: f * d.Real, Emag: f * d.Emag} +} + +// Abs returns the absolute value of d. +func Abs(d Number) Number { + if !math.Signbit(d.Real) { + return d + } + return Scale(-1, d) +} diff --git a/vendor/gonum.org/v1/gonum/num/dual/dual_fike.go b/vendor/gonum.org/v1/gonum/num/dual/dual_fike.go new file mode 100644 index 0000000..9d395f4 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/num/dual/dual_fike.go @@ -0,0 +1,286 @@ +// Copyright ©2018 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Derived from code by Jeffrey A. Fike at http://adl.stanford.edu/hyperdual/ + +// The MIT License (MIT) +// +// Copyright (c) 2006 Jeffrey A. Fike +// +// Permission is hereby granted, free of charge, to any person obtaining a copy +// of this software and associated documentation files (the "Software"), to deal +// in the Software without restriction, including without limitation the rights +// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +// copies of the Software, and to permit persons to whom the Software is +// furnished to do so, subject to the following conditions: +// +// The above copyright notice and this permission notice shall be included in +// all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +// THE SOFTWARE. + +package dual + +import "math" + +// PowReal returns x**p, the base-x exponential of p. +// +// Special cases are (in order): +// PowReal(NaN+xϵ, ±0) = 1+NaNϵ for any x +// PowReal(x, ±0) = 1 for any x +// PowReal(1+xϵ, y) = 1+xyϵ for any y +// PowReal(x, 1) = x for any x +// PowReal(NaN+xϵ, y) = NaN+NaNϵ +// PowReal(x, NaN) = NaN+NaNϵ +// PowReal(±0, y) = ±Inf for y an odd integer < 0 +// PowReal(±0, -Inf) = +Inf +// PowReal(±0, +Inf) = +0 +// PowReal(±0, y) = +Inf for finite y < 0 and not an odd integer +// PowReal(±0, y) = ±0 for y an odd integer > 0 +// PowReal(±0, y) = +0 for finite y > 0 and not an odd integer +// PowReal(-1, ±Inf) = 1 +// PowReal(x+0ϵ, +Inf) = +Inf+NaNϵ for |x| > 1 +// PowReal(x+yϵ, +Inf) = +Inf for |x| > 1 +// PowReal(x, -Inf) = +0+NaNϵ for |x| > 1 +// PowReal(x, +Inf) = +0+NaNϵ for |x| < 1 +// PowReal(x+0ϵ, -Inf) = +Inf+NaNϵ for |x| < 1 +// PowReal(x, -Inf) = +Inf-Infϵ for |x| < 1 +// PowReal(+Inf, y) = +Inf for y > 0 +// PowReal(+Inf, y) = +0 for y < 0 +// PowReal(-Inf, y) = Pow(-0, -y) +// PowReal(x, y) = NaN+NaNϵ for finite x < 0 and finite non-integer y +func PowReal(d Number, p float64) Number { + const tol = 1e-15 + + r := d.Real + if math.Abs(r) < tol { + if r >= 0 { + r = tol + } + if r < 0 { + r = -tol + } + } + deriv := p * math.Pow(r, p-1) + return Number{ + Real: math.Pow(d.Real, p), + Emag: d.Emag * deriv, + } +} + +// Pow returns d**r, the base-d exponential of r. +func Pow(d, p Number) Number { + return Exp(Mul(p, Log(d))) +} + +// Sqrt returns the square root of d. +// +// Special cases are: +// Sqrt(+Inf) = +Inf +// Sqrt(±0) = (±0+Infϵ) +// Sqrt(x < 0) = NaN +// Sqrt(NaN) = NaN +func Sqrt(d Number) Number { + if d.Real <= 0 { + if d.Real == 0 { + return Number{ + Real: d.Real, + Emag: math.Inf(1), + } + } + return Number{ + Real: math.NaN(), + Emag: math.NaN(), + } + } + return PowReal(d, 0.5) +} + +// Exp returns e**q, the base-e exponential of d. +// +// Special cases are: +// Exp(+Inf) = +Inf +// Exp(NaN) = NaN +// Very large values overflow to 0 or +Inf. +// Very small values underflow to 1. +func Exp(d Number) Number { + fnDeriv := math.Exp(d.Real) + return Number{ + Real: fnDeriv, + Emag: fnDeriv * d.Emag, + } +} + +// Log returns the natural logarithm of d. +// +// Special cases are: +// Log(+Inf) = (+Inf+0ϵ) +// Log(0) = (-Inf±Infϵ) +// Log(x < 0) = NaN +// Log(NaN) = NaN +func Log(d Number) Number { + switch d.Real { + case 0: + return Number{ + Real: math.Log(d.Real), + Emag: math.Copysign(math.Inf(1), d.Real), + } + case math.Inf(1): + return Number{ + Real: math.Log(d.Real), + Emag: 0, + } + } + if d.Real < 0 { + return Number{ + Real: math.NaN(), + Emag: math.NaN(), + } + } + return Number{ + Real: math.Log(d.Real), + Emag: d.Emag / d.Real, + } +} + +// Sin returns the sine of d. +// +// Special cases are: +// Sin(±0) = (±0+Nϵ) +// Sin(±Inf) = NaN +// Sin(NaN) = NaN +func Sin(d Number) Number { + if d.Real == 0 { + return Number{ + Real: d.Real, + Emag: d.Emag, + } + } + fn := math.Sin(d.Real) + deriv := math.Cos(d.Real) + return Number{ + Real: fn, + Emag: deriv * d.Emag, + } +} + +// Cos returns the cosine of d. +// +// Special cases are: +// Cos(±Inf) = NaN +// Cos(NaN) = NaN +func Cos(d Number) Number { + fn := math.Cos(d.Real) + deriv := -math.Sin(d.Real) + return Number{ + Real: fn, + Emag: deriv * d.Emag, + } +} + +// Tan returns the tangent of d. +// +// Special cases are: +// Tan(±0) = (±0+Nϵ) +// Tan(±Inf) = NaN +// Tan(NaN) = NaN +func Tan(d Number) Number { + if d.Real == 0 { + return Number{ + Real: d.Real, + Emag: d.Emag, + } + } + fn := math.Tan(d.Real) + deriv := 1 + fn*fn + return Number{ + Real: fn, + Emag: deriv * d.Emag, + } +} + +// Asin returns the inverse sine of d. +// +// Special cases are: +// Asin(±0) = (±0+Nϵ) +// Asin(±1) = (±Inf+Infϵ) +// Asin(x) = NaN if x < -1 or x > 1 +func Asin(d Number) Number { + if d.Real == 0 { + return Number{ + Real: d.Real, + Emag: d.Emag, + } + } else if m := math.Abs(d.Real); m >= 1 { + if m == 1 { + return Number{ + Real: math.Asin(d.Real), + Emag: math.Inf(1), + } + } + return Number{ + Real: math.NaN(), + Emag: math.NaN(), + } + } + fn := math.Asin(d.Real) + deriv := 1 / math.Sqrt(1-d.Real*d.Real) + return Number{ + Real: fn, + Emag: deriv * d.Emag, + } +} + +// Acos returns the inverse cosine of d. +// +// Special cases are: +// Acos(-1) = (Pi-Infϵ) +// Acos(1) = (0-Infϵ) +// Acos(x) = NaN if x < -1 or x > 1 +func Acos(d Number) Number { + if m := math.Abs(d.Real); m >= 1 { + if m == 1 { + return Number{ + Real: math.Acos(d.Real), + Emag: math.Inf(-1), + } + } + return Number{ + Real: math.NaN(), + Emag: math.NaN(), + } + } + fn := math.Acos(d.Real) + deriv := -1 / math.Sqrt(1-d.Real*d.Real) + return Number{ + Real: fn, + Emag: deriv * d.Emag, + } +} + +// Atan returns the inverse tangent of d. +// +// Special cases are: +// Atan(±0) = (±0+Nϵ) +// Atan(±Inf) = (±Pi/2+0ϵ) +func Atan(d Number) Number { + if d.Real == 0 { + return Number{ + Real: d.Real, + Emag: d.Emag, + } + } + fn := math.Atan(d.Real) + deriv := 1 / (1 + d.Real*d.Real) + return Number{ + Real: fn, + Emag: deriv * d.Emag, + } +} diff --git a/vendor/gonum.org/v1/gonum/num/dual/dual_hyperbolic.go b/vendor/gonum.org/v1/gonum/num/dual/dual_hyperbolic.go new file mode 100644 index 0000000..829017f --- /dev/null +++ b/vendor/gonum.org/v1/gonum/num/dual/dual_hyperbolic.go @@ -0,0 +1,165 @@ +// Copyright ©2018 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package dual + +import "math" + +// Sinh returns the hyperbolic sine of d. +// +// Special cases are: +// Sinh(±0) = (±0+Nϵ) +// Sinh(±Inf) = ±Inf +// Sinh(NaN) = NaN +func Sinh(d Number) Number { + if d.Real == 0 { + return Number{ + Real: d.Real, + Emag: d.Emag, + } + } + if math.IsInf(d.Real, 0) { + return Number{ + Real: d.Real, + Emag: math.Inf(1), + } + } + fn := math.Sinh(d.Real) + deriv := math.Cosh(d.Real) + return Number{ + Real: fn, + Emag: deriv * d.Emag, + } +} + +// Cosh returns the hyperbolic cosine of d. +// +// Special cases are: +// Cosh(±0) = 1 +// Cosh(±Inf) = +Inf +// Cosh(NaN) = NaN +func Cosh(d Number) Number { + if math.IsInf(d.Real, 0) { + return Number{ + Real: math.Inf(1), + Emag: d.Real, + } + } + fn := math.Cosh(d.Real) + deriv := math.Sinh(d.Real) + return Number{ + Real: fn, + Emag: deriv * d.Emag, + } +} + +// Tanh returns the hyperbolic tangent of d. +// +// Special cases are: +// Tanh(±0) = (±0+Nϵ) +// Tanh(±Inf) = (±1+0ϵ) +// Tanh(NaN) = NaN +func Tanh(d Number) Number { + switch d.Real { + case 0: + return Number{ + Real: d.Real, + Emag: d.Emag, + } + case math.Inf(1): + return Number{ + Real: 1, + Emag: 0, + } + case math.Inf(-1): + return Number{ + Real: -1, + Emag: 0, + } + } + fn := math.Tanh(d.Real) + deriv := 1 - fn*fn + return Number{ + Real: fn, + Emag: deriv * d.Emag, + } +} + +// Asinh returns the inverse hyperbolic sine of d. +// +// Special cases are: +// Asinh(±0) = (±0+Nϵ) +// Asinh(±Inf) = ±Inf +// Asinh(NaN) = NaN +func Asinh(d Number) Number { + if d.Real == 0 { + return Number{ + Real: d.Real, + Emag: d.Emag, + } + } + fn := math.Asinh(d.Real) + deriv := 1 / math.Sqrt(d.Real*d.Real+1) + return Number{ + Real: fn, + Emag: deriv * d.Emag, + } +} + +// Acosh returns the inverse hyperbolic cosine of d. +// +// Special cases are: +// Acosh(+Inf) = +Inf +// Acosh(1) = (0+Infϵ) +// Acosh(x) = NaN if x < 1 +// Acosh(NaN) = NaN +func Acosh(d Number) Number { + if d.Real <= 1 { + if d.Real == 1 { + return Number{ + Real: 0, + Emag: math.Inf(1), + } + } + return Number{ + Real: math.NaN(), + Emag: math.NaN(), + } + } + fn := math.Acosh(d.Real) + deriv := 1 / math.Sqrt(d.Real*d.Real-1) + return Number{ + Real: fn, + Emag: deriv * d.Emag, + } +} + +// Atanh returns the inverse hyperbolic tangent of d. +// +// Special cases are: +// Atanh(1) = +Inf +// Atanh(±0) = (±0+Nϵ) +// Atanh(-1) = -Inf +// Atanh(x) = NaN if x < -1 or x > 1 +// Atanh(NaN) = NaN +func Atanh(d Number) Number { + if d.Real == 0 { + return Number{ + Real: d.Real, + Emag: d.Emag, + } + } + if math.Abs(d.Real) == 1 { + return Number{ + Real: math.Inf(int(d.Real)), + Emag: math.NaN(), + } + } + fn := math.Atanh(d.Real) + deriv := 1 / (1 - d.Real*d.Real) + return Number{ + Real: fn, + Emag: deriv * d.Emag, + } +} diff --git a/vendor/gonum.org/v1/gonum/num/dualcmplx/doc.go b/vendor/gonum.org/v1/gonum/num/dualcmplx/doc.go new file mode 100644 index 0000000..4df3293 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/num/dualcmplx/doc.go @@ -0,0 +1,12 @@ +// Copyright ©2018 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Package dualcmplx provides the anti-commutative dual complex numeric +// type and functions. +// +// See https://arxiv.org/abs/1601.01754v1 for details. +package dualcmplx // imports "gonum.org/v1/gonum/num/dualcmplx" + +// TODO(kortschak): Handle special cases properly. +// - Pow diff --git a/vendor/gonum.org/v1/gonum/num/dualcmplx/dual.go b/vendor/gonum.org/v1/gonum/num/dualcmplx/dual.go new file mode 100644 index 0000000..27a15523 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/num/dualcmplx/dual.go @@ -0,0 +1,267 @@ +// Copyright ©2018 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package dualcmplx + +import ( + "fmt" + "math" + "math/cmplx" + "strings" +) + +// Number is a float64 precision anti-commutative dual complex number. +type Number struct { + Real, Dual complex128 +} + +var zero Number + +// Format implements fmt.Formatter. +func (d Number) Format(fs fmt.State, c rune) { + prec, pOk := fs.Precision() + if !pOk { + prec = -1 + } + width, wOk := fs.Width() + if !wOk { + width = -1 + } + switch c { + case 'v': + if fs.Flag('#') { + fmt.Fprintf(fs, "%T{Real:%#v, Dual:%#v}", d, d.Real, d.Dual) + return + } + if fs.Flag('+') { + fmt.Fprintf(fs, "{Real:%+v, Dual:%+v}", d.Real, d.Dual) + return + } + c = 'g' + prec = -1 + fallthrough + case 'e', 'E', 'f', 'F', 'g', 'G': + fre := fmtString(fs, c, prec, width, false) + fim := fmtString(fs, c, prec, width, true) + fmt.Fprintf(fs, fmt.Sprintf("(%s+%[2]sϵ)", fre, fim), d.Real, d.Dual) + default: + fmt.Fprintf(fs, "%%!%c(%T=%[2]v)", c, d) + return + } +} + +// This is horrible, but it's what we have. +func fmtString(fs fmt.State, c rune, prec, width int, wantPlus bool) string { + var b strings.Builder + b.WriteByte('%') + for _, f := range "0+- " { + if fs.Flag(int(f)) || (f == '+' && wantPlus) { + b.WriteByte(byte(f)) + } + } + if width >= 0 { + fmt.Fprint(&b, width) + } + if prec >= 0 { + b.WriteByte('.') + if prec > 0 { + fmt.Fprint(&b, prec) + } + } + b.WriteRune(c) + return b.String() +} + +// Add returns the sum of x and y. +func Add(x, y Number) Number { + return Number{ + Real: x.Real + y.Real, + Dual: x.Dual + y.Dual, + } +} + +// Sub returns the difference of x and y, x-y. +func Sub(x, y Number) Number { + return Number{ + Real: x.Real - y.Real, + Dual: x.Dual - y.Dual, + } +} + +// Mul returns the dual product of x and y, x×y. +func Mul(x, y Number) Number { + return Number{ + Real: x.Real * y.Real, + Dual: x.Real*y.Dual + x.Dual*cmplx.Conj(y.Real), + } +} + +// Inv returns the dual inverse of d. +func Inv(d Number) Number { + return Number{ + Real: 1 / d.Real, + Dual: -d.Dual / (d.Real * cmplx.Conj(d.Real)), + } +} + +// Conj returns the conjugate of d₁+d₂ϵ, d̅₁+d₂ϵ. +func Conj(d Number) Number { + return Number{ + Real: cmplx.Conj(d.Real), + Dual: d.Dual, + } +} + +// Scale returns d scaled by f. +func Scale(f float64, d Number) Number { + return Number{Real: complex(f, 0) * d.Real, Dual: complex(f, 0) * d.Dual} +} + +// Abs returns the absolute value of d. +func Abs(d Number) float64 { + return cmplx.Abs(d.Real) +} + +// PowReal returns d**p, the base-d exponential of p. +// +// Special cases are (in order): +// PowReal(NaN+xϵ, ±0) = 1+NaNϵ for any x +// Pow(0+xϵ, y) = 0+Infϵ for all y < 1. +// Pow(0+xϵ, y) = 0 for all y > 1. +// PowReal(x, ±0) = 1 for any x +// PowReal(1+xϵ, y) = 1+xyϵ for any y +// Pow(Inf, y) = +Inf+NaNϵ for y > 0 +// Pow(Inf, y) = +0+NaNϵ for y < 0 +// PowReal(x, 1) = x for any x +// PowReal(NaN+xϵ, y) = NaN+NaNϵ +// PowReal(x, NaN) = NaN+NaNϵ +// PowReal(-1, ±Inf) = 1 +// PowReal(x+0ϵ, +Inf) = +Inf+NaNϵ for |x| > 1 +// PowReal(x+yϵ, +Inf) = +Inf for |x| > 1 +// PowReal(x, -Inf) = +0+NaNϵ for |x| > 1 +// PowReal(x, +Inf) = +0+NaNϵ for |x| < 1 +// PowReal(x+0ϵ, -Inf) = +Inf+NaNϵ for |x| < 1 +// PowReal(x, -Inf) = +Inf-Infϵ for |x| < 1 +// PowReal(+Inf, y) = +Inf for y > 0 +// PowReal(+Inf, y) = +0 for y < 0 +// PowReal(-Inf, y) = Pow(-0, -y) +func PowReal(d Number, p float64) Number { + switch { + case p == 0: + switch { + case cmplx.IsNaN(d.Real): + return Number{Real: 1, Dual: cmplx.NaN()} + case d.Real == 0, cmplx.IsInf(d.Real): + return Number{Real: 1} + } + case p == 1: + if cmplx.IsInf(d.Real) { + d.Dual = cmplx.NaN() + } + return d + case math.IsInf(p, 1): + if d.Real == -1 { + return Number{Real: 1, Dual: cmplx.NaN()} + } + if Abs(d) > 1 { + if d.Dual == 0 { + return Number{Real: cmplx.Inf(), Dual: cmplx.NaN()} + } + return Number{Real: cmplx.Inf(), Dual: cmplx.Inf()} + } + return Number{Real: 0, Dual: cmplx.NaN()} + case math.IsInf(p, -1): + if d.Real == -1 { + return Number{Real: 1, Dual: cmplx.NaN()} + } + if Abs(d) > 1 { + return Number{Real: 0, Dual: cmplx.NaN()} + } + if d.Dual == 0 { + return Number{Real: cmplx.Inf(), Dual: cmplx.NaN()} + } + return Number{Real: cmplx.Inf(), Dual: cmplx.Inf()} + case math.IsNaN(p): + return Number{Real: cmplx.NaN(), Dual: cmplx.NaN()} + case d.Real == 0: + if p < 1 { + return Number{Real: d.Real, Dual: cmplx.Inf()} + } + return Number{Real: d.Real} + case cmplx.IsInf(d.Real): + if p < 0 { + return Number{Real: 0, Dual: cmplx.NaN()} + } + return Number{Real: cmplx.Inf(), Dual: cmplx.NaN()} + } + return Pow(d, Number{Real: complex(p, 0)}) +} + +// Pow returns d**p, the base-d exponential of p. +func Pow(d, p Number) Number { + return Exp(Mul(p, Log(d))) +} + +// Sqrt returns the square root of d. +// +// Special cases are: +// Sqrt(+Inf) = +Inf +// Sqrt(±0) = (±0+Infϵ) +// Sqrt(x < 0) = NaN +// Sqrt(NaN) = NaN +func Sqrt(d Number) Number { + return PowReal(d, 0.5) +} + +// Exp returns e**q, the base-e exponential of d. +// +// Special cases are: +// Exp(+Inf) = +Inf +// Exp(NaN) = NaN +// Very large values overflow to 0 or +Inf. +// Very small values underflow to 1. +func Exp(d Number) Number { + fn := cmplx.Exp(d.Real) + if imag(d.Real) == 0 { + return Number{Real: fn, Dual: fn * d.Dual} + } + conj := cmplx.Conj(d.Real) + return Number{ + Real: fn, + Dual: ((fn - cmplx.Exp(conj)) / (d.Real - conj)) * d.Dual, + } +} + +// Log returns the natural logarithm of d. +// +// Special cases are: +// Log(+Inf) = (+Inf+0ϵ) +// Log(0) = (-Inf±Infϵ) +// Log(x < 0) = NaN +// Log(NaN) = NaN +func Log(d Number) Number { + fn := cmplx.Log(d.Real) + switch { + case d.Real == 0: + return Number{ + Real: fn, + Dual: complex(math.Copysign(math.Inf(1), real(d.Real)), math.NaN()), + } + case imag(d.Real) == 0: + return Number{ + Real: fn, + Dual: d.Dual / d.Real, + } + case cmplx.IsInf(d.Real): + return Number{ + Real: fn, + Dual: 0, + } + } + conj := cmplx.Conj(d.Real) + return Number{ + Real: fn, + Dual: ((fn - cmplx.Log(conj)) / (d.Real - conj)) * d.Dual, + } +} diff --git a/vendor/gonum.org/v1/gonum/num/dualquat/doc.go b/vendor/gonum.org/v1/gonum/num/dualquat/doc.go new file mode 100644 index 0000000..97a6809 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/num/dualquat/doc.go @@ -0,0 +1,13 @@ +// Copyright ©2018 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Package dualquat provides the dual quaternion numeric type and functions. +// +// Dual quaternions provide a system for rigid transformation with interpolation +// and blending in ℝ³. See https://www.cs.utah.edu/~ladislav/kavan06dual/kavan06dual.pdf and +// https://en.wikipedia.org/wiki/Dual_quaternion for more details. +package dualquat // imports "gonum.org/v1/gonum/num/dualquat" + +// TODO(kortschak): Handle special cases properly. +// - Pow diff --git a/vendor/gonum.org/v1/gonum/num/dualquat/dual.go b/vendor/gonum.org/v1/gonum/num/dualquat/dual.go new file mode 100644 index 0000000..1e895f4 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/num/dualquat/dual.go @@ -0,0 +1,169 @@ +// Copyright ©2018 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package dualquat + +import ( + "fmt" + "strings" + + "gonum.org/v1/gonum/num/dual" + "gonum.org/v1/gonum/num/quat" +) + +// Number is a float64 precision dual quaternion. A dual quaternion +// is a hypercomplex number composed of two quaternions, q₀+q₂ϵ, +// where ϵ²=0, but ϵ≠0. Here, q₀ is termed the real and q₂ the dual. +type Number struct { + Real, Dual quat.Number +} + +var ( + zero Number + zeroQuat quat.Number +) + +// Format implements fmt.Formatter. +func (d Number) Format(fs fmt.State, c rune) { + prec, pOk := fs.Precision() + if !pOk { + prec = -1 + } + width, wOk := fs.Width() + if !wOk { + width = -1 + } + switch c { + case 'v': + if fs.Flag('#') { + fmt.Fprintf(fs, "%T{Real:%#v, Dual:%#v}", d, d.Real, d.Dual) + return + } + if fs.Flag('+') { + fmt.Fprintf(fs, "{Real:%+v, Dual:%+v}", d.Real, d.Dual) + return + } + c = 'g' + prec = -1 + fallthrough + case 'e', 'E', 'f', 'F', 'g', 'G': + fre := fmtString(fs, c, prec, width, false) + fim := fmtString(fs, c, prec, width, true) + fmt.Fprintf(fs, fmt.Sprintf("(%s+%[2]sϵ)", fre, fim), d.Real, d.Dual) + default: + fmt.Fprintf(fs, "%%!%c(%T=%[2]v)", c, d) + return + } +} + +// This is horrible, but it's what we have. +func fmtString(fs fmt.State, c rune, prec, width int, wantPlus bool) string { + var b strings.Builder + b.WriteByte('%') + for _, f := range "0+- " { + if fs.Flag(int(f)) || (f == '+' && wantPlus) { + b.WriteByte(byte(f)) + } + } + if width >= 0 { + fmt.Fprint(&b, width) + } + if prec >= 0 { + b.WriteByte('.') + if prec > 0 { + fmt.Fprint(&b, prec) + } + } + b.WriteRune(c) + return b.String() +} + +// Add returns the sum of x and y. +func Add(x, y Number) Number { + return Number{ + Real: quat.Add(x.Real, y.Real), + Dual: quat.Add(x.Dual, y.Dual), + } +} + +// Sub returns the difference of x and y, x-y. +func Sub(x, y Number) Number { + return Number{ + Real: quat.Sub(x.Real, y.Real), + Dual: quat.Sub(x.Dual, y.Dual), + } +} + +// Mul returns the dual product of x and y. +func Mul(x, y Number) Number { + return Number{ + Real: quat.Mul(x.Real, y.Real), + Dual: quat.Add(quat.Mul(x.Real, y.Dual), quat.Mul(x.Dual, y.Real)), + } +} + +// Inv returns the dual inverse of d. +func Inv(d Number) Number { + return Number{ + Real: quat.Inv(d.Real), + Dual: quat.Scale(-1, quat.Mul(d.Dual, quat.Inv(quat.Mul(d.Real, d.Real)))), + } +} + +// Conj returns the dual quaternion conjugate of d₁+d₂ϵ, d̅₁-d̅₂ϵ. +func Conj(d Number) Number { + return Number{ + Real: quat.Conj(d.Real), + Dual: quat.Scale(-1, quat.Conj(d.Dual)), + } +} + +// ConjDual returns the dual conjugate of d₁+d₂ϵ, d₁-d₂ϵ. +func ConjDual(d Number) Number { + return Number{ + Real: d.Real, + Dual: quat.Scale(-1, d.Dual), + } +} + +// ConjQuat returns the quaternion conjugate of d₁+d₂ϵ, d̅₁+d̅₂ϵ. +func ConjQuat(d Number) Number { + return Number{ + Real: quat.Conj(d.Real), + Dual: quat.Conj(d.Dual), + } +} + +// Scale returns d scaled by f. +func Scale(f float64, d Number) Number { + return Number{Real: quat.Scale(f, d.Real), Dual: quat.Scale(f, d.Dual)} +} + +// Abs returns the absolute value of d. +func Abs(d Number) dual.Number { + return dual.Number{ + Real: quat.Abs(d.Real), + Emag: quat.Abs(d.Dual), + } +} + +func addRealQuat(r float64, q quat.Number) quat.Number { + q.Real += r + return q +} + +func addQuatReal(q quat.Number, r float64) quat.Number { + q.Real += r + return q +} + +func subRealQuat(r float64, q quat.Number) quat.Number { + q.Real = r - q.Real + return q +} + +func subQuatReal(q quat.Number, r float64) quat.Number { + q.Real -= r + return q +} diff --git a/vendor/gonum.org/v1/gonum/num/dualquat/dual_fike.go b/vendor/gonum.org/v1/gonum/num/dualquat/dual_fike.go new file mode 100644 index 0000000..1c8caa3 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/num/dualquat/dual_fike.go @@ -0,0 +1,152 @@ +// Copyright ©2018 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Derived from code by Jeffrey A. Fike at http://adl.stanford.edu/hyperdual/ + +// The MIT License (MIT) +// +// Copyright (c) 2006 Jeffrey A. Fike +// +// Permission is hereby granted, free of charge, to any person obtaining a copy +// of this software and associated documentation files (the "Software"), to deal +// in the Software without restriction, including without limitation the rights +// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +// copies of the Software, and to permit persons to whom the Software is +// furnished to do so, subject to the following conditions: +// +// The above copyright notice and this permission notice shall be included in +// all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +// THE SOFTWARE. + +package dualquat + +import ( + "math" + + "gonum.org/v1/gonum/num/quat" +) + +// PowReal returns d**p, the base-d exponential of p. +// +// Special cases are (in order): +// PowReal(NaN+xϵ, ±0) = 1+NaNϵ for any x +// PowReal(x, ±0) = 1 for any x +// PowReal(1+xϵ, y) = 1+xyϵ for any y +// PowReal(x, 1) = x for any x +// PowReal(NaN+xϵ, y) = NaN+NaNϵ +// PowReal(x, NaN) = NaN+NaNϵ +// PowReal(±0, y) = ±Inf for y an odd integer < 0 +// PowReal(±0, -Inf) = +Inf +// PowReal(±0, +Inf) = +0 +// PowReal(±0, y) = +Inf for finite y < 0 and not an odd integer +// PowReal(±0, y) = ±0 for y an odd integer > 0 +// PowReal(±0, y) = +0 for finite y > 0 and not an odd integer +// PowReal(-1, ±Inf) = 1 +// PowReal(x+0ϵ, +Inf) = +Inf+NaNϵ for |x| > 1 +// PowReal(x+yϵ, +Inf) = +Inf for |x| > 1 +// PowReal(x, -Inf) = +0+NaNϵ for |x| > 1 +// PowReal(x, +Inf) = +0+NaNϵ for |x| < 1 +// PowReal(x+0ϵ, -Inf) = +Inf+NaNϵ for |x| < 1 +// PowReal(x, -Inf) = +Inf-Infϵ for |x| < 1 +// PowReal(+Inf, y) = +Inf for y > 0 +// PowReal(+Inf, y) = +0 for y < 0 +// PowReal(-Inf, y) = Pow(-0, -y) +func PowReal(d Number, p float64) Number { + switch { + case p == 0: + switch { + case quat.IsNaN(d.Real): + return Number{Real: quat.Number{Real: 1}, Dual: quat.NaN()} + case d.Real == zeroQuat, quat.IsInf(d.Real): + return Number{Real: quat.Number{Real: 1}} + } + case p == 1: + return d + case math.IsInf(p, 1): + if Abs(d).Real > 1 { + if d.Dual == zeroQuat { + return Number{Real: quat.Inf(), Dual: quat.NaN()} + } + return Number{Real: quat.Inf(), Dual: quat.Inf()} + } + return Number{Real: zeroQuat, Dual: quat.NaN()} + case math.IsInf(p, -1): + if Abs(d).Real > 1 { + return Number{Real: zeroQuat, Dual: quat.NaN()} + } + if d.Dual == zeroQuat { + return Number{Real: quat.Inf(), Dual: quat.NaN()} + } + return Number{Real: quat.Inf(), Dual: quat.Inf()} + } + deriv := quat.Mul(quat.Number{Real: p}, quat.Pow(d.Real, quat.Number{Real: p - 1})) + return Number{ + Real: quat.Pow(d.Real, quat.Number{Real: p}), + Dual: quat.Mul(d.Dual, deriv), + } +} + +// Pow return d**p, the base-d exponential of p. +func Pow(d, p Number) Number { + return Exp(Mul(p, Log(d))) +} + +// Sqrt returns the square root of d +// +// Special cases are: +// Sqrt(+Inf) = +Inf +// Sqrt(±0) = (±0+Infϵ) +// Sqrt(x < 0) = NaN +// Sqrt(NaN) = NaN +func Sqrt(d Number) Number { + return PowReal(d, 0.5) +} + +// Exp returns e**d, the base-e exponential of d. +// +// Special cases are: +// Exp(+Inf) = +Inf +// Exp(NaN) = NaN +// Very large values overflow to 0 or +Inf. +// Very small values underflow to 1. +func Exp(d Number) Number { + fnDeriv := quat.Exp(d.Real) + return Number{ + Real: fnDeriv, + Dual: quat.Mul(fnDeriv, d.Dual), + } +} + +// Log returns the natural logarithm of d. +// +// Special cases are: +// Log(+Inf) = (+Inf+0ϵ) +// Log(0) = (-Inf±Infϵ) +// Log(x < 0) = NaN +// Log(NaN) = NaN +func Log(d Number) Number { + switch { + case d.Real == zeroQuat: + return Number{ + Real: quat.Log(d.Real), + Dual: quat.Inf(), + } + case quat.IsInf(d.Real): + return Number{ + Real: quat.Log(d.Real), + Dual: zeroQuat, + } + } + return Number{ + Real: quat.Log(d.Real), + Dual: quat.Mul(d.Dual, quat.Inv(d.Real)), + } +} diff --git a/vendor/gonum.org/v1/gonum/num/hyperdual/doc.go b/vendor/gonum.org/v1/gonum/num/hyperdual/doc.go new file mode 100644 index 0000000..52b2d7d --- /dev/null +++ b/vendor/gonum.org/v1/gonum/num/hyperdual/doc.go @@ -0,0 +1,14 @@ +// Copyright ©2018 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Package hyperdual provides the hyperdual numeric type and functions. Hyperdual +// numbers are an extension of the real numbers in the form a+bϵ₁+bϵ₂+dϵ₁ϵ₂ where +// ϵ₁^2=0 and ϵ₂^2=0, but ϵ₁≠0, ϵ₂≠0 and ϵ₁ϵ₂≠0. +// +// See https://doi.org/10.2514/6.2011-886 and http://adl.stanford.edu/hyperdual/ for +// details of their properties and uses. +package hyperdual // imports "gonum.org/v1/gonum/num/hyperdual" + +// TODO(kortschak): Handle special cases properly. +// - Pow diff --git a/vendor/gonum.org/v1/gonum/num/hyperdual/hyperdual.go b/vendor/gonum.org/v1/gonum/num/hyperdual/hyperdual.go new file mode 100644 index 0000000..2fb7a67 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/num/hyperdual/hyperdual.go @@ -0,0 +1,142 @@ +// Copyright ©2018 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package hyperdual + +import ( + "fmt" + "math" + "strings" +) + +// Number is a float64 precision hyperdual number. +type Number struct { + Real, E1mag, E2mag, E1E2mag float64 +} + +var ( + zero = Number{} + negZero = math.Float64frombits(1 << 63) +) + +// Format implements fmt.Formatter. +func (d Number) Format(fs fmt.State, c rune) { + prec, pOk := fs.Precision() + if !pOk { + prec = -1 + } + width, wOk := fs.Width() + if !wOk { + width = -1 + } + switch c { + case 'v': + if fs.Flag('#') { + fmt.Fprintf(fs, "%T{Real:%#v, E1mag:%#v, E2mag:%#v, E1E2mag:%#v}", d, d.Real, d.E1mag, d.E2mag, d.E1E2mag) + return + } + if fs.Flag('+') { + fmt.Fprintf(fs, "{Real:%+v, E1mag:%+v, E2mag:%+v, E1E2mag:%+v}", d.Real, d.E1mag, d.E2mag, d.E1E2mag) + return + } + c = 'g' + prec = -1 + fallthrough + case 'e', 'E', 'f', 'F', 'g', 'G': + fre := fmtString(fs, c, prec, width, false) + fim := fmtString(fs, c, prec, width, true) + fmt.Fprintf(fs, fmt.Sprintf("(%s%[2]sϵ₁%[2]sϵ₂%[2]sϵ₁ϵ₂)", fre, fim), d.Real, d.E1mag, d.E2mag, d.E1E2mag) + default: + fmt.Fprintf(fs, "%%!%c(%T=%[2]v)", c, d) + return + } +} + +// This is horrible, but it's what we have. +func fmtString(fs fmt.State, c rune, prec, width int, wantPlus bool) string { + var b strings.Builder + b.WriteByte('%') + for _, f := range "0+- " { + if fs.Flag(int(f)) || (f == '+' && wantPlus) { + b.WriteByte(byte(f)) + } + } + if width >= 0 { + fmt.Fprint(&b, width) + } + if prec >= 0 { + b.WriteByte('.') + if prec > 0 { + fmt.Fprint(&b, prec) + } + } + b.WriteRune(c) + return b.String() +} + +// Add returns the sum of x and y. +func Add(x, y Number) Number { + return Number{ + Real: x.Real + y.Real, + E1mag: x.E1mag + y.E1mag, + E2mag: x.E2mag + y.E2mag, + E1E2mag: x.E1E2mag + y.E1E2mag, + } +} + +// Sub returns the difference of x and y, x-y. +func Sub(x, y Number) Number { + return Number{ + Real: x.Real - y.Real, + E1mag: x.E1mag - y.E1mag, + E2mag: x.E2mag - y.E2mag, + E1E2mag: x.E1E2mag - y.E1E2mag, + } +} + +// Mul returns the hyperdual product of x and y. +func Mul(x, y Number) Number { + return Number{ + Real: x.Real * y.Real, + E1mag: x.Real*y.E1mag + x.E1mag*y.Real, + E2mag: x.Real*y.E2mag + x.E2mag*y.Real, + E1E2mag: x.Real*y.E1E2mag + x.E1mag*y.E2mag + x.E2mag*y.E1mag + x.E1E2mag*y.Real, + } +} + +// Inv returns the hyperdual inverse of d. +// +// Special cases are: +// Inv(±Inf) = ±0-0ϵ₁-0ϵ₂±0ϵ₁ϵ₂ +// Inv(±0) = ±Inf-Infϵ₁-Infϵ₂±Infϵ₁ϵ₂ +func Inv(d Number) Number { + if d.Real == 0 { + return Number{ + Real: 1 / d.Real, + E1mag: math.Inf(-1), + E2mag: math.Inf(-1), + E1E2mag: 1 / d.Real, // Return a signed inf from a signed zero. + } + } + d2 := d.Real * d.Real + return Number{ + Real: 1 / d.Real, + E1mag: -d.E1mag / d2, + E2mag: -d.E2mag / d2, + E1E2mag: -d.E1E2mag/d2 + 2*d.E1mag*d.E2mag/(d2*d.Real), + } +} + +// Scale returns d scaled by f. +func Scale(f float64, d Number) Number { + return Number{Real: f * d.Real, E1mag: f * d.E1mag, E2mag: f * d.E2mag, E1E2mag: f * d.E1E2mag} +} + +// Abs returns the absolute value of d. +func Abs(d Number) Number { + if math.Float64bits(d.Real)&(1<<63) == 0 { + return d + } + return Scale(-1, d) +} diff --git a/vendor/gonum.org/v1/gonum/num/hyperdual/hyperdual_fike.go b/vendor/gonum.org/v1/gonum/num/hyperdual/hyperdual_fike.go new file mode 100644 index 0000000..bf99131 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/num/hyperdual/hyperdual_fike.go @@ -0,0 +1,336 @@ +// Copyright ©2018 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Derived from code by Jeffrey A. Fike at http://adl.stanford.edu/hyperdual/ + +// The MIT License (MIT) +// +// Copyright (c) 2006 Jeffrey A. Fike +// +// Permission is hereby granted, free of charge, to any person obtaining a copy +// of this software and associated documentation files (the "Software"), to deal +// in the Software without restriction, including without limitation the rights +// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +// copies of the Software, and to permit persons to whom the Software is +// furnished to do so, subject to the following conditions: +// +// The above copyright notice and this permission notice shall be included in +// all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +// THE SOFTWARE. + +package hyperdual + +import "math" + +// PowReal returns x**p, the base-x exponential of p. +// +// Special cases are (in order): +// PowReal(NaN+xϵ₁+yϵ₂, ±0) = 1+NaNϵ₁+NaNϵ₂+NaNϵ₁ϵ₂ for any x and y +// PowReal(x, ±0) = 1 for any x +// PowReal(1+xϵ₁+yϵ₂, z) = 1+xzϵ₁+yzϵ₂+2xyzϵ₁ϵ₂ for any z +// PowReal(NaN+xϵ₁+yϵ₂, 1) = NaN+xϵ₁+yϵ₂+NaNϵ₁ϵ₂ for any x +// PowReal(x, 1) = x for any x +// PowReal(NaN+xϵ₁+xϵ₂, y) = NaN+NaNϵ₁+NaNϵ₂+NaNϵ₁ϵ₂ +// PowReal(x, NaN) = NaN+NaNϵ₁+NaNϵ₂+NaNϵ₁ϵ₂ +// PowReal(±0, y) = ±Inf for y an odd integer < 0 +// PowReal(±0, -Inf) = +Inf +// PowReal(±0, +Inf) = +0 +// PowReal(±0, y) = +Inf for finite y < 0 and not an odd integer +// PowReal(±0, y) = ±0 for y an odd integer > 0 +// PowReal(±0, y) = +0 for finite y > 0 and not an odd integer +// PowReal(-1, ±Inf) = 1 +// PowReal(x+0ϵ₁+0ϵ₂, +Inf) = +Inf+NaNϵ₁+NaNϵ₂+NaNϵ₁ϵ₂ for |x| > 1 +// PowReal(x+xϵ₁+yϵ₂, +Inf) = +Inf+Infϵ₁+Infϵ₂+NaNϵ₁ϵ₂ for |x| > 1 +// PowReal(x, -Inf) = +0+NaNϵ₁+NaNϵ₂+NaNϵ₁ϵ₂ for |x| > 1 +// PowReal(x+yϵ₁+zϵ₂, +Inf) = +0+NaNϵ₁+NaNϵ₂+NaNϵ₁ϵ₂ for |x| < 1 +// PowReal(x+0ϵ₁+0ϵ₂, -Inf) = +Inf+NaNϵ₁+NaNϵ₂+NaNϵ₁ϵ₂ for |x| < 1 +// PowReal(x, -Inf) = +Inf-Infϵ₁-Infϵ₂+NaNϵ₁ϵ₂ for |x| < 1 +// PowReal(+Inf, y) = +Inf for y > 0 +// PowReal(+Inf, y) = +0 for y < 0 +// PowReal(-Inf, y) = Pow(-0, -y) +// PowReal(x, y) = NaN+NaNϵ₁+NaNϵ₂+NaNϵ₁ϵ₂ for finite x < 0 and finite non-integer y +func PowReal(d Number, p float64) Number { + const tol = 1e-15 + + r := d.Real + if math.Abs(r) < tol { + if r >= 0 { + r = tol + } + if r < 0 { + r = -tol + } + } + deriv := p * math.Pow(r, p-1) + return Number{ + Real: math.Pow(d.Real, p), + E1mag: d.E1mag * deriv, + E2mag: d.E2mag * deriv, + E1E2mag: d.E1E2mag*deriv + p*(p-1)*d.E1mag*d.E2mag*math.Pow(r, (p-2)), + } +} + +// Pow returns x**p, the base-x exponential of p. +func Pow(d, p Number) Number { + return Exp(Mul(p, Log(d))) +} + +// Sqrt returns the square root of d. +// +// Special cases are: +// Sqrt(+Inf) = +Inf +// Sqrt(±0) = (±0+Infϵ₁+Infϵ₂-Infϵ₁ϵ₂) +// Sqrt(x < 0) = NaN +// Sqrt(NaN) = NaN +func Sqrt(d Number) Number { + if d.Real <= 0 { + if d.Real == 0 { + return Number{ + Real: d.Real, + E1mag: math.Inf(1), + E2mag: math.Inf(1), + E1E2mag: math.Inf(-1), + } + } + return Number{ + Real: math.NaN(), + E1mag: math.NaN(), + E2mag: math.NaN(), + E1E2mag: math.NaN(), + } + } + return PowReal(d, 0.5) +} + +// Exp returns e**q, the base-e exponential of d. +// +// Special cases are: +// Exp(+Inf) = +Inf +// Exp(NaN) = NaN +// Very large values overflow to 0 or +Inf. +// Very small values underflow to 1. +func Exp(d Number) Number { + exp := math.Exp(d.Real) // exp is also the derivative. + return Number{ + Real: exp, + E1mag: exp * d.E1mag, + E2mag: exp * d.E2mag, + E1E2mag: exp * (d.E1E2mag + d.E1mag*d.E2mag), + } +} + +// Log returns the natural logarithm of d. +// +// Special cases are: +// Log(+Inf) = (+Inf+0ϵ₁+0ϵ₂-0ϵ₁ϵ₂) +// Log(0) = (-Inf±Infϵ₁±Infϵ₂-Infϵ₁ϵ₂) +// Log(x < 0) = NaN +// Log(NaN) = NaN +func Log(d Number) Number { + switch d.Real { + case 0: + return Number{ + Real: math.Log(d.Real), + E1mag: math.Copysign(math.Inf(1), d.Real), + E2mag: math.Copysign(math.Inf(1), d.Real), + E1E2mag: math.Inf(-1), + } + case math.Inf(1): + return Number{ + Real: math.Log(d.Real), + E1mag: 0, + E2mag: 0, + E1E2mag: negZero, + } + } + if d.Real < 0 { + return Number{ + Real: math.NaN(), + E1mag: math.NaN(), + E2mag: math.NaN(), + E1E2mag: math.NaN(), + } + } + deriv1 := d.E1mag / d.Real + deriv2 := d.E2mag / d.Real + return Number{ + Real: math.Log(d.Real), + E1mag: deriv1, + E2mag: deriv2, + E1E2mag: d.E1E2mag/d.Real - (deriv1 * deriv2), + } +} + +// Sin returns the sine of d. +// +// Special cases are: +// Sin(±0) = (±0+Nϵ₁+Nϵ₂∓0ϵ₁ϵ₂) +// Sin(±Inf) = NaN +// Sin(NaN) = NaN +func Sin(d Number) Number { + if d.Real == 0 { + return Number{ + Real: d.Real, + E1mag: d.E1mag, + E2mag: d.E2mag, + E1E2mag: -d.Real, + } + } + fn := math.Sin(d.Real) + deriv := math.Cos(d.Real) + return Number{ + Real: fn, + E1mag: deriv * d.E1mag, + E2mag: deriv * d.E2mag, + E1E2mag: deriv*d.E1E2mag - fn*d.E1mag*d.E2mag, + } +} + +// Cos returns the cosine of d. +// +// Special cases are: +// Cos(±Inf) = NaN +// Cos(NaN) = NaN +func Cos(d Number) Number { + fn := math.Cos(d.Real) + deriv := -math.Sin(d.Real) + return Number{ + Real: fn, + E1mag: deriv * d.E1mag, + E2mag: deriv * d.E2mag, + E1E2mag: deriv*d.E1E2mag - fn*d.E1mag*d.E2mag, + } +} + +// Tan returns the tangent of d. +// +// Special cases are: +// Tan(±0) = (±0+Nϵ₁+Nϵ₂±0ϵ₁ϵ₂) +// Tan(±Inf) = NaN +// Tan(NaN) = NaN +func Tan(d Number) Number { + if d.Real == 0 { + return Number{ + Real: d.Real, + E1mag: d.E1mag, + E2mag: d.E2mag, + E1E2mag: d.Real, + } + } + fn := math.Tan(d.Real) + deriv := 1 + fn*fn + return Number{ + Real: fn, + E1mag: deriv * d.E1mag, + E2mag: deriv * d.E2mag, + E1E2mag: deriv*d.E1E2mag + d.E1mag*d.E2mag*(2*fn*deriv), + } +} + +// Asin returns the inverse sine of d. +// +// Special cases are: +// Asin(±0) = (±0+Nϵ₁+Nϵ₂±0ϵ₁ϵ₂) +// Asin(±1) = (±Inf+Infϵ₁+Infϵ₂±Infϵ₁ϵ₂) +// Asin(x) = NaN if x < -1 or x > 1 +func Asin(d Number) Number { + if d.Real == 0 { + return Number{ + Real: d.Real, + E1mag: d.E1mag, + E2mag: d.E2mag, + E1E2mag: d.Real, + } + } else if m := math.Abs(d.Real); m >= 1 { + if m == 1 { + return Number{ + Real: math.Asin(d.Real), + E1mag: math.Inf(1), + E2mag: math.Inf(1), + E1E2mag: math.Copysign(math.Inf(1), d.Real), + } + } + return Number{ + Real: math.NaN(), + E1mag: math.NaN(), + E2mag: math.NaN(), + E1E2mag: math.NaN(), + } + } + fn := math.Asin(d.Real) + deriv1 := 1 - d.Real*d.Real + deriv := 1 / math.Sqrt(deriv1) + return Number{ + Real: fn, + E1mag: deriv * d.E1mag, + E2mag: deriv * d.E2mag, + E1E2mag: deriv*d.E1E2mag + d.E1mag*d.E2mag*(d.Real*math.Pow(deriv1, -1.5)), + } +} + +// Acos returns the inverse cosine of d. +// +// Special cases are: +// Acos(-1) = (Pi-Infϵ₁-Infϵ₂+Infϵ₁ϵ₂) +// Acos(1) = (0-Infϵ₁-Infϵ₂-Infϵ₁ϵ₂) +// Acos(x) = NaN if x < -1 or x > 1 +func Acos(d Number) Number { + if m := math.Abs(d.Real); m >= 1 { + if m == 1 { + return Number{ + Real: math.Acos(d.Real), + E1mag: math.Inf(-1), + E2mag: math.Inf(-1), + E1E2mag: math.Copysign(math.Inf(1), -d.Real), + } + } + return Number{ + Real: math.NaN(), + E1mag: math.NaN(), + E2mag: math.NaN(), + E1E2mag: math.NaN(), + } + } + fn := math.Acos(d.Real) + deriv1 := 1 - d.Real*d.Real + deriv := -1 / math.Sqrt(deriv1) + return Number{ + Real: fn, + E1mag: deriv * d.E1mag, + E2mag: deriv * d.E2mag, + E1E2mag: deriv*d.E1E2mag + d.E1mag*d.E2mag*(-d.Real*math.Pow(deriv1, -1.5)), + } +} + +// Atan returns the inverse tangent of d. +// +// Special cases are: +// Atan(±0) = (±0+Nϵ₁+Nϵ₂∓0ϵ₁ϵ₂) +// Atan(±Inf) = (±Pi/2+0ϵ₁+0ϵ₂∓0ϵ₁ϵ₂) +func Atan(d Number) Number { + if d.Real == 0 { + return Number{ + Real: d.Real, + E1mag: d.E1mag, + E2mag: d.E2mag, + E1E2mag: -d.Real, + } + } + fn := math.Atan(d.Real) + deriv1 := 1 + d.Real*d.Real + deriv := 1 / deriv1 + return Number{ + Real: fn, + E1mag: deriv * d.E1mag, + E2mag: deriv * d.E2mag, + E1E2mag: deriv*d.E1E2mag + d.E1mag*d.E2mag*(-2*d.Real/(deriv1*deriv1)), + } +} diff --git a/vendor/gonum.org/v1/gonum/num/hyperdual/hyperdual_hyperbolic.go b/vendor/gonum.org/v1/gonum/num/hyperdual/hyperdual_hyperbolic.go new file mode 100644 index 0000000..5128789 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/num/hyperdual/hyperdual_hyperbolic.go @@ -0,0 +1,202 @@ +// Copyright ©2018 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package hyperdual + +import "math" + +// Sinh returns the hyperbolic sine of d. +// +// Special cases are: +// Sinh(±0) = (±0+Nϵ₁+Nϵ₂±0ϵ₁ϵ₂) +// Sinh(±Inf) = ±Inf +// Sinh(NaN) = NaN +func Sinh(d Number) Number { + if d.Real == 0 { + return Number{ + Real: d.Real, + E1mag: d.E1mag, + E2mag: d.E1mag, + E1E2mag: d.Real, + } + } + if math.IsInf(d.Real, 0) { + return Number{ + Real: d.Real, + E1mag: math.Inf(1), + E2mag: math.Inf(1), + E1E2mag: d.Real, + } + } + fn := math.Sinh(d.Real) + deriv := math.Cosh(d.Real) + return Number{ + Real: fn, + E1mag: deriv * d.E1mag, + E2mag: deriv * d.E2mag, + E1E2mag: deriv*d.E1E2mag + fn*d.E1mag*d.E2mag, + } +} + +// Cosh returns the hyperbolic cosine of d. +// +// Special cases are: +// Cosh(±0) = 1 +// Cosh(±Inf) = +Inf +// Cosh(NaN) = NaN +func Cosh(d Number) Number { + if math.IsInf(d.Real, 0) { + return Number{ + Real: math.Inf(1), + E1mag: d.Real, + E2mag: d.Real, + E1E2mag: math.Inf(1), + } + } + fn := math.Cosh(d.Real) + deriv := math.Sinh(d.Real) + return Number{ + Real: fn, + E1mag: deriv * d.E1mag, + E2mag: deriv * d.E2mag, + E1E2mag: deriv*d.E1E2mag + fn*d.E1mag*d.E2mag, + } +} + +// Tanh returns the hyperbolic tangent of d. +// +// Special cases are: +// Tanh(±0) = (±0+Nϵ₁+Nϵ₂∓0ϵ₁ϵ₂) +// Tanh(±Inf) = (±1+0ϵ₁+0ϵ₂∓0ϵ₁ϵ₂) +// Tanh(NaN) = NaN +func Tanh(d Number) Number { + switch d.Real { + case 0: + return Number{ + Real: d.Real, + E1mag: d.E1mag, + E2mag: d.E2mag, + E1E2mag: -d.Real, + } + case math.Inf(1): + return Number{ + Real: 1, + E1mag: 0, + E2mag: 0, + E1E2mag: negZero, + } + case math.Inf(-1): + return Number{ + Real: -1, + E1mag: 0, + E2mag: 0, + E1E2mag: 0, + } + } + fn := math.Tanh(d.Real) + deriv := 1 - fn*fn + return Number{ + Real: fn, + E1mag: deriv * d.E1mag, + E2mag: deriv * d.E2mag, + E1E2mag: deriv*d.E1E2mag - d.E1mag*d.E2mag*(2*fn*deriv), + } +} + +// Asinh returns the inverse hyperbolic sine of d. +// +// Special cases are: +// Asinh(±0) = (±0+Nϵ₁+Nϵ₂∓0ϵ₁ϵ₂) +// Asinh(±Inf) = ±Inf +// Asinh(NaN) = NaN +func Asinh(d Number) Number { + if d.Real == 0 { + return Number{ + Real: d.Real, + E1mag: d.E1mag, + E2mag: d.E2mag, + E1E2mag: -d.Real, + } + } + fn := math.Asinh(d.Real) + deriv1 := d.Real*d.Real + 1 + deriv := 1 / math.Sqrt(deriv1) + return Number{ + Real: fn, + E1mag: deriv * d.E1mag, + E2mag: deriv * d.E2mag, + E1E2mag: deriv*d.E1E2mag + d.E1mag*d.E2mag*(-d.Real*(deriv/deriv1)), + } +} + +// Acosh returns the inverse hyperbolic cosine of d. +// +// Special cases are: +// Acosh(+Inf) = +Inf +// Acosh(1) = (0+Infϵ₁+Infϵ₂-Infϵ₁ϵ₂) +// Acosh(x) = NaN if x < 1 +// Acosh(NaN) = NaN +func Acosh(d Number) Number { + if d.Real <= 1 { + if d.Real == 1 { + return Number{ + Real: 0, + E1mag: math.Inf(1), + E2mag: math.Inf(1), + E1E2mag: math.Inf(-1), + } + } + return Number{ + Real: math.NaN(), + E1mag: math.NaN(), + E2mag: math.NaN(), + E1E2mag: math.NaN(), + } + } + fn := math.Acosh(d.Real) + deriv1 := d.Real*d.Real - 1 + deriv := 1 / math.Sqrt(deriv1) + return Number{ + Real: fn, + E1mag: deriv * d.E1mag, + E2mag: deriv * d.E2mag, + E1E2mag: deriv*d.E1E2mag + d.E1mag*d.E2mag*(-d.Real*(deriv/deriv1)), + } +} + +// Atanh returns the inverse hyperbolic tangent of d. +// +// Special cases are: +// Atanh(1) = +Inf +// Atanh(±0) = (±0+Nϵ₁+Nϵ₂±0ϵ₁ϵ₂) +// Atanh(-1) = -Inf +// Atanh(x) = NaN if x < -1 or x > 1 +// Atanh(NaN) = NaN +func Atanh(d Number) Number { + if d.Real == 0 { + return Number{ + Real: d.Real, + E1mag: d.E1mag, + E2mag: d.E2mag, + E1E2mag: d.Real, + } + } + if math.Abs(d.Real) == 1 { + return Number{ + Real: math.Inf(int(d.Real)), + E1mag: math.NaN(), + E2mag: math.NaN(), + E1E2mag: math.Inf(int(d.Real)), + } + } + fn := math.Atanh(d.Real) + deriv1 := 1 - d.Real*d.Real + deriv := 1 / deriv1 + return Number{ + Real: fn, + E1mag: deriv * d.E1mag, + E2mag: deriv * d.E2mag, + E1E2mag: deriv*d.E1E2mag + d.E1mag*d.E2mag*(2*d.Real/(deriv1*deriv1)), + } +} diff --git a/vendor/gonum.org/v1/gonum/num/quat/abs.go b/vendor/gonum.org/v1/gonum/num/quat/abs.go new file mode 100644 index 0000000..6f83a76 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/num/quat/abs.go @@ -0,0 +1,52 @@ +// Copyright ©2018 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Copyright 2017 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package quat + +import "math" + +// Abs returns the absolute value (also called the modulus) of q. +func Abs(q Number) float64 { + // Special cases. + switch { + case IsInf(q): + return math.Inf(1) + case IsNaN(q): + return math.NaN() + } + + r, i, j, k := q.Real, q.Imag, q.Jmag, q.Kmag + if r < 0 { + r = -r + } + if i < 0 { + i = -i + } + if j < 0 { + j = -j + } + if k < 0 { + k = -k + } + if r < i { + r, i = i, r + } + if r < j { + r, j = j, r + } + if r < k { + r, k = k, r + } + if r == 0 { + return 0 + } + i /= r + j /= r + k /= r + return r * math.Sqrt(1+i*i+j*j+k*k) +} diff --git a/vendor/gonum.org/v1/gonum/num/quat/conj.go b/vendor/gonum.org/v1/gonum/num/quat/conj.go new file mode 100644 index 0000000..bcf6d01 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/num/quat/conj.go @@ -0,0 +1,23 @@ +// Copyright ©2018 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Copyright 2017 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package quat + +// Conj returns the quaternion conjugate of q. +func Conj(q Number) Number { + return Number{Real: q.Real, Imag: -q.Imag, Jmag: -q.Jmag, Kmag: -q.Kmag} +} + +// Inv returns the quaternion inverse of q. +func Inv(q Number) Number { + if IsInf(q) { + return zero + } + a := Abs(q) + return Scale(1/(a*a), Conj(q)) +} diff --git a/vendor/gonum.org/v1/gonum/num/quat/doc.go b/vendor/gonum.org/v1/gonum/num/quat/doc.go new file mode 100644 index 0000000..95af2f4 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/num/quat/doc.go @@ -0,0 +1,10 @@ +// Copyright ©2018 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Package quat provides the quaternion numeric type and functions. +// +// For a good treatment of uses and behaviors of quaternions, see +// the interactive videos by Ben Eater and Grant Sanderson here +// https://eater.net/quaternions. +package quat // imports "gonum.org/v1/gonum/num/quat" diff --git a/vendor/gonum.org/v1/gonum/num/quat/exp.go b/vendor/gonum.org/v1/gonum/num/quat/exp.go new file mode 100644 index 0000000..7310360 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/num/quat/exp.go @@ -0,0 +1,64 @@ +// Copyright ©2018 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Copyright 2017 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package quat + +import "math" + +// Exp returns e**q, the base-e exponential of q. +func Exp(q Number) Number { + w, uv := split(q) + if uv == zero { + return lift(math.Exp(w)) + } + v := Abs(uv) + e := math.Exp(w) + s, c := math.Sincos(v) + return join(e*c, Scale(e*s/v, uv)) +} + +// Log returns the natural logarithm of q. +func Log(q Number) Number { + w, uv := split(q) + if uv == zero { + return lift(math.Log(w)) + } + v := Abs(uv) + return join(math.Log(Abs(q)), Scale(math.Atan2(v, w)/v, uv)) +} + +// Pow return q**r, the base-q exponential of r. +// For generalized compatibility with math.Pow: +// Pow(0, ±0) returns 1+0i+0j+0k +// Pow(0, c) for real(c)<0 returns Inf+0i+0j+0k if imag(c), jmag(c), kmag(c) are zero, +// otherwise Inf+Inf i+Inf j+Inf k. +func Pow(q, r Number) Number { + if q == zero { + w, uv := split(r) + switch { + case w == 0: + return Number{Real: 1} + case w < 0: + if uv == zero { + return Number{Real: math.Inf(1)} + } + return Inf() + case w > 0: + return zero + } + } + return Exp(Mul(Log(q), r)) +} + +// Sqrt returns the square root of q. +func Sqrt(q Number) Number { + if q == zero { + return zero + } + return Pow(q, Number{Real: 0.5}) +} diff --git a/vendor/gonum.org/v1/gonum/num/quat/inf.go b/vendor/gonum.org/v1/gonum/num/quat/inf.go new file mode 100644 index 0000000..7b165c1 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/num/quat/inf.go @@ -0,0 +1,22 @@ +// Copyright ©2018 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Copyright 2017 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package quat + +import "math" + +// IsInf returns true if any of real(q), imag(q), jmag(q), or kmag(q) is an infinity. +func IsInf(q Number) bool { + return math.IsInf(q.Real, 0) || math.IsInf(q.Imag, 0) || math.IsInf(q.Jmag, 0) || math.IsInf(q.Kmag, 0) +} + +// Inf returns a quaternion infinity, quaternion(+Inf, +Inf, +Inf, +Inf). +func Inf() Number { + inf := math.Inf(1) + return Number{Real: inf, Imag: inf, Jmag: inf, Kmag: inf} +} diff --git a/vendor/gonum.org/v1/gonum/num/quat/nan.go b/vendor/gonum.org/v1/gonum/num/quat/nan.go new file mode 100644 index 0000000..02afe48 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/num/quat/nan.go @@ -0,0 +1,26 @@ +// Copyright ©2018 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Copyright 2017 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package quat + +import "math" + +// IsNaN returns true if any of real(q), imag(q), jmag(q), or kmag(q) is NaN +// and none are an infinity. +func IsNaN(q Number) bool { + if math.IsInf(q.Real, 0) || math.IsInf(q.Imag, 0) || math.IsInf(q.Jmag, 0) || math.IsInf(q.Kmag, 0) { + return false + } + return math.IsNaN(q.Real) || math.IsNaN(q.Imag) || math.IsNaN(q.Jmag) || math.IsNaN(q.Kmag) +} + +// NaN returns a quaternion ``not-a-number'' value. +func NaN() Number { + nan := math.NaN() + return Number{Real: nan, Imag: nan, Jmag: nan, Kmag: nan} +} diff --git a/vendor/gonum.org/v1/gonum/num/quat/quat.go b/vendor/gonum.org/v1/gonum/num/quat/quat.go new file mode 100644 index 0000000..f78176d --- /dev/null +++ b/vendor/gonum.org/v1/gonum/num/quat/quat.go @@ -0,0 +1,401 @@ +// Copyright ©2018 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package quat + +import ( + "fmt" + "strconv" + "strings" +) + +var zero Number + +// Number is a float64 precision quaternion. +type Number struct { + Real, Imag, Jmag, Kmag float64 +} + +// Format implements fmt.Formatter. +func (q Number) Format(fs fmt.State, c rune) { + prec, pOk := fs.Precision() + if !pOk { + prec = -1 + } + width, wOk := fs.Width() + if !wOk { + width = -1 + } + switch c { + case 'v': + if fs.Flag('#') { + fmt.Fprintf(fs, "%T{Real:%#v, Imag:%#v, Jmag:%#v, Kmag:%#v}", q, q.Real, q.Imag, q.Jmag, q.Kmag) + return + } + if fs.Flag('+') { + fmt.Fprintf(fs, "{Real:%+v, Imag:%+v, Jmag:%+v, Kmag:%+v}", q.Real, q.Imag, q.Jmag, q.Kmag) + return + } + c = 'g' + prec = -1 + fallthrough + case 'e', 'E', 'f', 'F', 'g', 'G': + fre := fmtString(fs, c, prec, width, false) + fim := fmtString(fs, c, prec, width, true) + fmt.Fprintf(fs, fmt.Sprintf("(%s%[2]si%[2]sj%[2]sk)", fre, fim), q.Real, q.Imag, q.Jmag, q.Kmag) + default: + fmt.Fprintf(fs, "%%!%c(%T=%[2]v)", c, q) + return + } +} + +// This is horrible, but it's what we have. +func fmtString(fs fmt.State, c rune, prec, width int, wantPlus bool) string { + var b strings.Builder + b.WriteByte('%') + for _, f := range "0+- " { + if fs.Flag(int(f)) || (f == '+' && wantPlus) { + b.WriteByte(byte(f)) + } + } + if width >= 0 { + fmt.Fprint(&b, width) + } + if prec >= 0 { + b.WriteByte('.') + if prec > 0 { + fmt.Fprint(&b, prec) + } + } + b.WriteRune(c) + return b.String() +} + +// Add returns the sum of x and y. +func Add(x, y Number) Number { + return Number{ + Real: x.Real + y.Real, + Imag: x.Imag + y.Imag, + Jmag: x.Jmag + y.Jmag, + Kmag: x.Kmag + y.Kmag, + } +} + +// Sub returns the difference of x and y, x-y. +func Sub(x, y Number) Number { + return Number{ + Real: x.Real - y.Real, + Imag: x.Imag - y.Imag, + Jmag: x.Jmag - y.Jmag, + Kmag: x.Kmag - y.Kmag, + } +} + +// Mul returns the Hamiltonian product of x and y. +func Mul(x, y Number) Number { + return Number{ + Real: x.Real*y.Real - x.Imag*y.Imag - x.Jmag*y.Jmag - x.Kmag*y.Kmag, + Imag: x.Real*y.Imag + x.Imag*y.Real + x.Jmag*y.Kmag - x.Kmag*y.Jmag, + Jmag: x.Real*y.Jmag - x.Imag*y.Kmag + x.Jmag*y.Real + x.Kmag*y.Imag, + Kmag: x.Real*y.Kmag + x.Imag*y.Jmag - x.Jmag*y.Imag + x.Kmag*y.Real, + } +} + +// Scale returns q scaled by f. +func Scale(f float64, q Number) Number { + return Number{Real: f * q.Real, Imag: f * q.Imag, Jmag: f * q.Jmag, Kmag: f * q.Kmag} +} + +// Parse converts the string s to a Number. The string may be parenthesized and +// has the format [±]N±Ni±Nj±Nk. The order of the components is not strict. +func Parse(s string) (Number, error) { + if len(s) == 0 { + return Number{}, parseError{state: -1} + } + orig := s + + wantClose := s[0] == '(' + if wantClose { + if s[len(s)-1] != ')' { + return Number{}, parseError{string: orig, state: -1} + } + s = s[1 : len(s)-1] + } + if len(s) == 0 { + return Number{}, parseError{string: orig, state: -1} + } + switch s[0] { + case 'n', 'N': + if strings.ToLower(s) == "nan" { + return NaN(), nil + } + case 'i', 'I': + if strings.ToLower(s) == "inf" { + return Inf(), nil + } + } + + var q Number + var parts byte + for i := 0; i < 4; i++ { + beg, end, p, err := floatPart(s) + if err != nil { + return q, parseError{string: orig, state: -1} + } + if parts&(1<= 0 { + panic("backtracking: initial derivative is non-negative") + } + + if b.ContractionFactor == 0 { + b.ContractionFactor = defaultBacktrackingContraction + } + if b.DecreaseFactor == 0 { + b.DecreaseFactor = defaultBacktrackingDecrease + } + if b.ContractionFactor <= 0 || b.ContractionFactor >= 1 { + panic("backtracking: ContractionFactor must be between 0 and 1") + } + if b.DecreaseFactor <= 0 || b.DecreaseFactor >= 1 { + panic("backtracking: DecreaseFactor must be between 0 and 1") + } + + b.stepSize = step + b.initF = f + b.initG = g + + b.lastOp = FuncEvaluation + return b.lastOp +} + +func (b *Backtracking) Iterate(f, _ float64) (Operation, float64, error) { + if b.lastOp != FuncEvaluation { + panic("backtracking: Init has not been called") + } + + if ArmijoConditionMet(f, b.initF, b.initG, b.stepSize, b.DecreaseFactor) { + b.lastOp = MajorIteration + return b.lastOp, b.stepSize, nil + } + b.stepSize *= b.ContractionFactor + if b.stepSize < minimumBacktrackingStepSize { + b.lastOp = NoOperation + return b.lastOp, b.stepSize, ErrLinesearcherFailure + } + b.lastOp = FuncEvaluation + return b.lastOp, b.stepSize, nil +} diff --git a/vendor/gonum.org/v1/gonum/optimize/bfgs.go b/vendor/gonum.org/v1/gonum/optimize/bfgs.go new file mode 100644 index 0000000..ad8947a --- /dev/null +++ b/vendor/gonum.org/v1/gonum/optimize/bfgs.go @@ -0,0 +1,192 @@ +// Copyright ©2014 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package optimize + +import ( + "math" + + "gonum.org/v1/gonum/mat" +) + +var ( + _ Method = (*BFGS)(nil) + _ localMethod = (*BFGS)(nil) +) + +// BFGS implements the Broyden–Fletcher–Goldfarb–Shanno optimization method. It +// is a quasi-Newton method that performs successive rank-one updates to an +// estimate of the inverse Hessian of the objective function. It exhibits +// super-linear convergence when in proximity to a local minimum. It has memory +// cost that is O(n^2) relative to the input dimension. +type BFGS struct { + // Linesearcher selects suitable steps along the descent direction. + // Accepted steps should satisfy the strong Wolfe conditions. + // If Linesearcher == nil, an appropriate default is chosen. + Linesearcher Linesearcher + // GradStopThreshold sets the threshold for stopping if the gradient norm + // gets too small. If GradStopThreshold is 0 it is defaulted to 1e-12, and + // if it is NaN the setting is not used. + GradStopThreshold float64 + + ls *LinesearchMethod + + status Status + err error + + dim int + x mat.VecDense // Location of the last major iteration. + grad mat.VecDense // Gradient at the last major iteration. + s mat.VecDense // Difference between locations in this and the previous iteration. + y mat.VecDense // Difference between gradients in this and the previous iteration. + tmp mat.VecDense + + invHess *mat.SymDense + + first bool // Indicator of the first iteration. +} + +func (b *BFGS) Status() (Status, error) { + return b.status, b.err +} + +func (*BFGS) Uses(has Available) (uses Available, err error) { + return has.gradient() +} + +func (b *BFGS) Init(dim, tasks int) int { + b.status = NotTerminated + b.err = nil + return 1 +} + +func (b *BFGS) Run(operation chan<- Task, result <-chan Task, tasks []Task) { + b.status, b.err = localOptimizer{}.run(b, b.GradStopThreshold, operation, result, tasks) + close(operation) + return +} + +func (b *BFGS) initLocal(loc *Location) (Operation, error) { + if b.Linesearcher == nil { + b.Linesearcher = &Bisection{} + } + if b.ls == nil { + b.ls = &LinesearchMethod{} + } + b.ls.Linesearcher = b.Linesearcher + b.ls.NextDirectioner = b + + return b.ls.Init(loc) +} + +func (b *BFGS) iterateLocal(loc *Location) (Operation, error) { + return b.ls.Iterate(loc) +} + +func (b *BFGS) InitDirection(loc *Location, dir []float64) (stepSize float64) { + dim := len(loc.X) + b.dim = dim + b.first = true + + x := mat.NewVecDense(dim, loc.X) + grad := mat.NewVecDense(dim, loc.Gradient) + b.x.CloneVec(x) + b.grad.CloneVec(grad) + + b.y.Reset() + b.s.Reset() + b.tmp.Reset() + + if b.invHess == nil || cap(b.invHess.RawSymmetric().Data) < dim*dim { + b.invHess = mat.NewSymDense(dim, nil) + } else { + b.invHess = mat.NewSymDense(dim, b.invHess.RawSymmetric().Data[:dim*dim]) + } + // The values of the inverse Hessian are initialized in the first call to + // NextDirection. + + // Initial direction is just negative of the gradient because the Hessian + // is an identity matrix. + d := mat.NewVecDense(dim, dir) + d.ScaleVec(-1, grad) + return 1 / mat.Norm(d, 2) +} + +func (b *BFGS) NextDirection(loc *Location, dir []float64) (stepSize float64) { + dim := b.dim + if len(loc.X) != dim { + panic("bfgs: unexpected size mismatch") + } + if len(loc.Gradient) != dim { + panic("bfgs: unexpected size mismatch") + } + if len(dir) != dim { + panic("bfgs: unexpected size mismatch") + } + + x := mat.NewVecDense(dim, loc.X) + grad := mat.NewVecDense(dim, loc.Gradient) + + // s = x_{k+1} - x_{k} + b.s.SubVec(x, &b.x) + // y = g_{k+1} - g_{k} + b.y.SubVec(grad, &b.grad) + + sDotY := mat.Dot(&b.s, &b.y) + + if b.first { + // Rescale the initial Hessian. + // From: Nocedal, J., Wright, S.: Numerical Optimization (2nd ed). + // Springer (2006), page 143, eq. 6.20. + yDotY := mat.Dot(&b.y, &b.y) + scale := sDotY / yDotY + for i := 0; i < dim; i++ { + for j := i; j < dim; j++ { + if i == j { + b.invHess.SetSym(i, i, scale) + } else { + b.invHess.SetSym(i, j, 0) + } + } + } + b.first = false + } + + if math.Abs(sDotY) != 0 { + // Update the inverse Hessian according to the formula + // + // B_{k+1}^-1 = B_k^-1 + // + (s_k^T y_k + y_k^T B_k^-1 y_k) / (s_k^T y_k)^2 * (s_k s_k^T) + // - (B_k^-1 y_k s_k^T + s_k y_k^T B_k^-1) / (s_k^T y_k). + // + // Note that y_k^T B_k^-1 y_k is a scalar, and that the third term is a + // rank-two update where B_k^-1 y_k is one vector and s_k is the other. + yBy := mat.Inner(&b.y, b.invHess, &b.y) + b.tmp.MulVec(b.invHess, &b.y) + scale := (1 + yBy/sDotY) / sDotY + b.invHess.SymRankOne(b.invHess, scale, &b.s) + b.invHess.RankTwo(b.invHess, -1/sDotY, &b.tmp, &b.s) + } + + // Update the stored BFGS data. + b.x.CopyVec(x) + b.grad.CopyVec(grad) + + // New direction is stored in dir. + d := mat.NewVecDense(dim, dir) + d.MulVec(b.invHess, grad) + d.ScaleVec(-1, d) + + return 1 +} + +func (*BFGS) needs() struct { + Gradient bool + Hessian bool +} { + return struct { + Gradient bool + Hessian bool + }{true, false} +} diff --git a/vendor/gonum.org/v1/gonum/optimize/bisection.go b/vendor/gonum.org/v1/gonum/optimize/bisection.go new file mode 100644 index 0000000..bce441f --- /dev/null +++ b/vendor/gonum.org/v1/gonum/optimize/bisection.go @@ -0,0 +1,146 @@ +// Copyright ©2014 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package optimize + +import "math" + +const ( + defaultBisectionCurvature = 0.9 +) + +// Bisection is a Linesearcher that uses a bisection to find a point that +// satisfies the strong Wolfe conditions with the given curvature factor and +// a decrease factor of zero. +type Bisection struct { + // CurvatureFactor is the constant factor in the curvature condition. + // Smaller values result in a more exact line search. + // A set value must be in the interval (0, 1), otherwise Init will panic. + // If it is zero, it will be defaulted to 0.9. + CurvatureFactor float64 + + minStep float64 + maxStep float64 + currStep float64 + + initF float64 + minF float64 + maxF float64 + lastF float64 + + initGrad float64 + + lastOp Operation +} + +func (b *Bisection) Init(f, g float64, step float64) Operation { + if step <= 0 { + panic("bisection: bad step size") + } + if g >= 0 { + panic("bisection: initial derivative is non-negative") + } + + if b.CurvatureFactor == 0 { + b.CurvatureFactor = defaultBisectionCurvature + } + if b.CurvatureFactor <= 0 || b.CurvatureFactor >= 1 { + panic("bisection: CurvatureFactor not between 0 and 1") + } + + b.minStep = 0 + b.maxStep = math.Inf(1) + b.currStep = step + + b.initF = f + b.minF = f + b.maxF = math.NaN() + + b.initGrad = g + + // Only evaluate the gradient when necessary. + b.lastOp = FuncEvaluation + return b.lastOp +} + +func (b *Bisection) Iterate(f, g float64) (Operation, float64, error) { + if b.lastOp != FuncEvaluation && b.lastOp != GradEvaluation { + panic("bisection: Init has not been called") + } + minF := b.initF + if b.maxF < minF { + minF = b.maxF + } + if b.minF < minF { + minF = b.minF + } + if b.lastOp == FuncEvaluation { + // See if the function value is good enough to make progress. If it is, + // evaluate the gradient. If not, set it to the upper bound if the bound + // has not yet been found, otherwise iterate toward the minimum location. + if f <= minF { + b.lastF = f + b.lastOp = GradEvaluation + return b.lastOp, b.currStep, nil + } + if math.IsInf(b.maxStep, 1) { + b.maxStep = b.currStep + b.maxF = f + return b.nextStep((b.minStep + b.maxStep) / 2) + } + if b.minF <= b.maxF { + b.maxStep = b.currStep + b.maxF = f + } else { + b.minStep = b.currStep + b.minF = f + } + return b.nextStep((b.minStep + b.maxStep) / 2) + } + f = b.lastF + // The function value was lower. Check if this location is sufficient to + // converge the linesearch, otherwise iterate. + if StrongWolfeConditionsMet(f, g, minF, b.initGrad, b.currStep, 0, b.CurvatureFactor) { + b.lastOp = MajorIteration + return b.lastOp, b.currStep, nil + } + if math.IsInf(b.maxStep, 1) { + // The function value is lower. If the gradient is positive, an upper bound + // of the minimum been found. If the gradient is negative, search farther + // in that direction. + if g > 0 { + b.maxStep = b.currStep + b.maxF = f + return b.nextStep((b.minStep + b.maxStep) / 2) + } + b.minStep = b.currStep + b.minF = f + return b.nextStep(b.currStep * 2) + } + // The interval has been bounded, and we have found a new lowest value. Use + // the gradient to decide which direction. + if g < 0 { + b.minStep = b.currStep + b.minF = f + } else { + b.maxStep = b.currStep + b.maxF = f + } + return b.nextStep((b.minStep + b.maxStep) / 2) +} + +// nextStep checks if the new step is equal to the old step. +// This can happen if min and max are the same, or if the step size is infinity, +// both of which indicate the minimization must stop. If the steps are different, +// it sets the new step size and returns the evaluation type and the step. If the steps +// are the same, it returns an error. +func (b *Bisection) nextStep(step float64) (Operation, float64, error) { + if b.currStep == step { + b.lastOp = NoOperation + return b.lastOp, b.currStep, ErrLinesearcherFailure + } + b.currStep = step + b.lastOp = FuncEvaluation + return b.lastOp, b.currStep, nil +} diff --git a/vendor/gonum.org/v1/gonum/optimize/cg.go b/vendor/gonum.org/v1/gonum/optimize/cg.go new file mode 100644 index 0000000..506c921 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/optimize/cg.go @@ -0,0 +1,349 @@ +// Copyright ©2014 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package optimize + +import ( + "math" + + "gonum.org/v1/gonum/floats" +) + +const ( + iterationRestartFactor = 6 + angleRestartThreshold = -0.9 +) + +var ( + _ Method = (*CG)(nil) + _ localMethod = (*CG)(nil) +) + +// CGVariant calculates the scaling parameter, β, used for updating the +// conjugate direction in the nonlinear conjugate gradient (CG) method. +type CGVariant interface { + // Init is called at the first iteration and provides a way to initialize + // any internal state. + Init(loc *Location) + // Beta returns the value of the scaling parameter that is computed + // according to the particular variant of the CG method. + Beta(grad, gradPrev, dirPrev []float64) float64 +} + +// CG implements the nonlinear conjugate gradient method for solving nonlinear +// unconstrained optimization problems. It is a line search method that +// generates the search directions d_k according to the formula +// d_{k+1} = -∇f_{k+1} + β_k*d_k, d_0 = -∇f_0. +// Variants of the conjugate gradient method differ in the choice of the +// parameter β_k. The conjugate gradient method usually requires fewer function +// evaluations than the gradient descent method and no matrix storage, but +// L-BFGS is usually more efficient. +// +// CG implements a restart strategy that takes the steepest descent direction +// (i.e., d_{k+1} = -∇f_{k+1}) whenever any of the following conditions holds: +// +// - A certain number of iterations has elapsed without a restart. This number +// is controllable via IterationRestartFactor and if equal to 0, it is set to +// a reasonable default based on the problem dimension. +// - The angle between the gradients at two consecutive iterations ∇f_k and +// ∇f_{k+1} is too large. +// - The direction d_{k+1} is not a descent direction. +// - β_k returned from CGVariant.Beta is equal to zero. +// +// The line search for CG must yield step sizes that satisfy the strong Wolfe +// conditions at every iteration, otherwise the generated search direction +// might fail to be a descent direction. The line search should be more +// stringent compared with those for Newton-like methods, which can be achieved +// by setting the gradient constant in the strong Wolfe conditions to a small +// value. +// +// See also William Hager, Hongchao Zhang, A survey of nonlinear conjugate +// gradient methods. Pacific Journal of Optimization, 2 (2006), pp. 35-58, and +// references therein. +type CG struct { + // Linesearcher must satisfy the strong Wolfe conditions at every iteration. + // If Linesearcher == nil, an appropriate default is chosen. + Linesearcher Linesearcher + // Variant implements the particular CG formula for computing β_k. + // If Variant is nil, an appropriate default is chosen. + Variant CGVariant + // InitialStep estimates the initial line search step size, because the CG + // method does not generate well-scaled search directions. + // If InitialStep is nil, an appropriate default is chosen. + InitialStep StepSizer + + // IterationRestartFactor determines the frequency of restarts based on the + // problem dimension. The negative gradient direction is taken whenever + // ceil(IterationRestartFactor*(problem dimension)) iterations have elapsed + // without a restart. For medium and large-scale problems + // IterationRestartFactor should be set to 1, low-dimensional problems a + // larger value should be chosen. Note that if the ceil function returns 1, + // CG will be identical to gradient descent. + // If IterationRestartFactor is 0, it will be set to 6. + // CG will panic if IterationRestartFactor is negative. + IterationRestartFactor float64 + // AngleRestartThreshold sets the threshold angle for restart. The method + // is restarted if the cosine of the angle between two consecutive + // gradients is smaller than or equal to AngleRestartThreshold, that is, if + // ∇f_k·∇f_{k+1} / (|∇f_k| |∇f_{k+1}|) <= AngleRestartThreshold. + // A value of AngleRestartThreshold closer to -1 (successive gradients in + // exact opposite directions) will tend to reduce the number of restarts. + // If AngleRestartThreshold is 0, it will be set to -0.9. + // CG will panic if AngleRestartThreshold is not in the interval [-1, 0]. + AngleRestartThreshold float64 + // GradStopThreshold sets the threshold for stopping if the gradient norm + // gets too small. If GradStopThreshold is 0 it is defaulted to 1e-12, and + // if it is NaN the setting is not used. + GradStopThreshold float64 + + ls *LinesearchMethod + + status Status + err error + + restartAfter int + iterFromRestart int + + dirPrev []float64 + gradPrev []float64 + gradPrevNorm float64 +} + +func (cg *CG) Status() (Status, error) { + return cg.status, cg.err +} + +func (*CG) Uses(has Available) (uses Available, err error) { + return has.gradient() +} + +func (cg *CG) Init(dim, tasks int) int { + cg.status = NotTerminated + cg.err = nil + return 1 +} + +func (cg *CG) Run(operation chan<- Task, result <-chan Task, tasks []Task) { + cg.status, cg.err = localOptimizer{}.run(cg, cg.GradStopThreshold, operation, result, tasks) + close(operation) + return +} + +func (cg *CG) initLocal(loc *Location) (Operation, error) { + if cg.IterationRestartFactor < 0 { + panic("cg: IterationRestartFactor is negative") + } + if cg.AngleRestartThreshold < -1 || cg.AngleRestartThreshold > 0 { + panic("cg: AngleRestartThreshold not in [-1, 0]") + } + + if cg.Linesearcher == nil { + cg.Linesearcher = &MoreThuente{CurvatureFactor: 0.1} + } + if cg.Variant == nil { + cg.Variant = &HestenesStiefel{} + } + if cg.InitialStep == nil { + cg.InitialStep = &FirstOrderStepSize{} + } + + if cg.IterationRestartFactor == 0 { + cg.IterationRestartFactor = iterationRestartFactor + } + if cg.AngleRestartThreshold == 0 { + cg.AngleRestartThreshold = angleRestartThreshold + } + + if cg.ls == nil { + cg.ls = &LinesearchMethod{} + } + cg.ls.Linesearcher = cg.Linesearcher + cg.ls.NextDirectioner = cg + + return cg.ls.Init(loc) +} + +func (cg *CG) iterateLocal(loc *Location) (Operation, error) { + return cg.ls.Iterate(loc) +} + +func (cg *CG) InitDirection(loc *Location, dir []float64) (stepSize float64) { + dim := len(loc.X) + + cg.restartAfter = int(math.Ceil(cg.IterationRestartFactor * float64(dim))) + cg.iterFromRestart = 0 + + // The initial direction is always the negative gradient. + copy(dir, loc.Gradient) + floats.Scale(-1, dir) + + cg.dirPrev = resize(cg.dirPrev, dim) + copy(cg.dirPrev, dir) + cg.gradPrev = resize(cg.gradPrev, dim) + copy(cg.gradPrev, loc.Gradient) + cg.gradPrevNorm = floats.Norm(loc.Gradient, 2) + + cg.Variant.Init(loc) + return cg.InitialStep.Init(loc, dir) +} + +func (cg *CG) NextDirection(loc *Location, dir []float64) (stepSize float64) { + copy(dir, loc.Gradient) + floats.Scale(-1, dir) + + cg.iterFromRestart++ + var restart bool + if cg.iterFromRestart == cg.restartAfter { + // Restart because too many iterations have been taken without a restart. + restart = true + } + + gDot := floats.Dot(loc.Gradient, cg.gradPrev) + gNorm := floats.Norm(loc.Gradient, 2) + if gDot <= cg.AngleRestartThreshold*gNorm*cg.gradPrevNorm { + // Restart because the angle between the last two gradients is too large. + restart = true + } + + // Compute the scaling factor β_k even when restarting, because cg.Variant + // may be keeping an inner state that needs to be updated at every iteration. + beta := cg.Variant.Beta(loc.Gradient, cg.gradPrev, cg.dirPrev) + if beta == 0 { + // β_k == 0 means that the steepest descent direction will be taken, so + // indicate that the method is in fact being restarted. + restart = true + } + if !restart { + // The method is not being restarted, so update the descent direction. + floats.AddScaled(dir, beta, cg.dirPrev) + if floats.Dot(loc.Gradient, dir) >= 0 { + // Restart because the new direction is not a descent direction. + restart = true + copy(dir, loc.Gradient) + floats.Scale(-1, dir) + } + } + + // Get the initial line search step size from the StepSizer even if the + // method was restarted, because StepSizers need to see every iteration. + stepSize = cg.InitialStep.StepSize(loc, dir) + if restart { + // The method was restarted and since the steepest descent direction is + // not related to the previous direction, discard the estimated step + // size from cg.InitialStep and use step size of 1 instead. + stepSize = 1 + // Reset to 0 the counter of iterations taken since the last restart. + cg.iterFromRestart = 0 + } + + copy(cg.gradPrev, loc.Gradient) + copy(cg.dirPrev, dir) + cg.gradPrevNorm = gNorm + return stepSize +} + +func (*CG) needs() struct { + Gradient bool + Hessian bool +} { + return struct { + Gradient bool + Hessian bool + }{true, false} +} + +// FletcherReeves implements the Fletcher-Reeves variant of the CG method that +// computes the scaling parameter β_k according to the formula +// β_k = |∇f_{k+1}|^2 / |∇f_k|^2. +type FletcherReeves struct { + prevNorm float64 +} + +func (fr *FletcherReeves) Init(loc *Location) { + fr.prevNorm = floats.Norm(loc.Gradient, 2) +} + +func (fr *FletcherReeves) Beta(grad, _, _ []float64) (beta float64) { + norm := floats.Norm(grad, 2) + beta = (norm / fr.prevNorm) * (norm / fr.prevNorm) + fr.prevNorm = norm + return beta +} + +// PolakRibierePolyak implements the Polak-Ribiere-Polyak variant of the CG +// method that computes the scaling parameter β_k according to the formula +// β_k = max(0, ∇f_{k+1}·y_k / |∇f_k|^2), +// where y_k = ∇f_{k+1} - ∇f_k. +type PolakRibierePolyak struct { + prevNorm float64 +} + +func (pr *PolakRibierePolyak) Init(loc *Location) { + pr.prevNorm = floats.Norm(loc.Gradient, 2) +} + +func (pr *PolakRibierePolyak) Beta(grad, gradPrev, _ []float64) (beta float64) { + norm := floats.Norm(grad, 2) + dot := floats.Dot(grad, gradPrev) + beta = (norm*norm - dot) / (pr.prevNorm * pr.prevNorm) + pr.prevNorm = norm + return math.Max(0, beta) +} + +// HestenesStiefel implements the Hestenes-Stiefel variant of the CG method +// that computes the scaling parameter β_k according to the formula +// β_k = max(0, ∇f_{k+1}·y_k / d_k·y_k), +// where y_k = ∇f_{k+1} - ∇f_k. +type HestenesStiefel struct { + y []float64 +} + +func (hs *HestenesStiefel) Init(loc *Location) { + hs.y = resize(hs.y, len(loc.Gradient)) +} + +func (hs *HestenesStiefel) Beta(grad, gradPrev, dirPrev []float64) (beta float64) { + floats.SubTo(hs.y, grad, gradPrev) + beta = floats.Dot(grad, hs.y) / floats.Dot(dirPrev, hs.y) + return math.Max(0, beta) +} + +// DaiYuan implements the Dai-Yuan variant of the CG method that computes the +// scaling parameter β_k according to the formula +// β_k = |∇f_{k+1}|^2 / d_k·y_k, +// where y_k = ∇f_{k+1} - ∇f_k. +type DaiYuan struct { + y []float64 +} + +func (dy *DaiYuan) Init(loc *Location) { + dy.y = resize(dy.y, len(loc.Gradient)) +} + +func (dy *DaiYuan) Beta(grad, gradPrev, dirPrev []float64) (beta float64) { + floats.SubTo(dy.y, grad, gradPrev) + norm := floats.Norm(grad, 2) + return norm * norm / floats.Dot(dirPrev, dy.y) +} + +// HagerZhang implements the Hager-Zhang variant of the CG method that computes the +// scaling parameter β_k according to the formula +// β_k = (y_k - 2 d_k |y_k|^2/(d_k·y_k))·∇f_{k+1} / (d_k·y_k), +// where y_k = ∇f_{k+1} - ∇f_k. +type HagerZhang struct { + y []float64 +} + +func (hz *HagerZhang) Init(loc *Location) { + hz.y = resize(hz.y, len(loc.Gradient)) +} + +func (hz *HagerZhang) Beta(grad, gradPrev, dirPrev []float64) (beta float64) { + floats.SubTo(hz.y, grad, gradPrev) + dirDotY := floats.Dot(dirPrev, hz.y) + gDotY := floats.Dot(grad, hz.y) + gDotDir := floats.Dot(grad, dirPrev) + yNorm := floats.Norm(hz.y, 2) + return (gDotY - 2*gDotDir*yNorm*yNorm/dirDotY) / dirDotY +} diff --git a/vendor/gonum.org/v1/gonum/optimize/cmaes.go b/vendor/gonum.org/v1/gonum/optimize/cmaes.go new file mode 100644 index 0000000..ba755bf --- /dev/null +++ b/vendor/gonum.org/v1/gonum/optimize/cmaes.go @@ -0,0 +1,464 @@ +// Copyright ©2017 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package optimize + +import ( + "math" + "sort" + + "golang.org/x/exp/rand" + + "gonum.org/v1/gonum/floats" + "gonum.org/v1/gonum/mat" + "gonum.org/v1/gonum/stat/distmv" +) + +// TODO(btracey): If we ever implement the traditional CMA-ES algorithm, provide +// the base explanation there, and modify this description to just +// describe the differences. + +// CmaEsChol implements the covariance matrix adaptation evolution strategy (CMA-ES) +// based on the Cholesky decomposition. The full algorithm is described in +// Krause, Oswin, Dídac Rodríguez Arbonès, and Christian Igel. "CMA-ES with +// optimal covariance update and storage complexity." Advances in Neural +// Information Processing Systems. 2016. +// https://papers.nips.cc/paper/6457-cma-es-with-optimal-covariance-update-and-storage-complexity.pdf +// CMA-ES is a global optimization method that progressively adapts a population +// of samples. CMA-ES combines techniques from local optimization with global +// optimization. Specifically, the CMA-ES algorithm uses an initial multivariate +// normal distribution to generate a population of input locations. The input locations +// with the lowest function values are used to update the parameters of the normal +// distribution, a new set of input locations are generated, and this procedure +// is iterated until convergence. The initial sampling distribution will have +// a mean specified by the initial x location, and a covariance specified by +// the InitCholesky field. +// +// As the normal distribution is progressively updated according to the best samples, +// it can be that the mean of the distribution is updated in a gradient-descent +// like fashion, followed by a shrinking covariance. +// It is recommended that the algorithm be run multiple times (with different +// InitMean) to have a better chance of finding the global minimum. +// +// The CMA-ES-Chol algorithm differs from the standard CMA-ES algorithm in that +// it directly updates the Cholesky decomposition of the normal distribution. +// This changes the runtime from O(dimension^3) to O(dimension^2*population) +// The evolution of the multi-variate normal will be similar to the baseline +// CMA-ES algorithm, but the covariance update equation is not identical. +// +// For more information about the CMA-ES algorithm, see +// https://en.wikipedia.org/wiki/CMA-ES +// https://arxiv.org/pdf/1604.00772.pdf +type CmaEsChol struct { + // InitStepSize sets the initial size of the covariance matrix adaptation. + // If InitStepSize is 0, a default value of 0.5 is used. InitStepSize cannot + // be negative, or CmaEsChol will panic. + InitStepSize float64 + // Population sets the population size for the algorithm. If Population is + // 0, a default value of 4 + math.Floor(3*math.Log(float64(dim))) is used. + // Population cannot be negative or CmaEsChol will panic. + Population int + // InitCholesky specifies the Cholesky decomposition of the covariance + // matrix for the initial sampling distribution. If InitCholesky is nil, + // a default value of I is used. If it is non-nil, then it must have + // InitCholesky.Size() be equal to the problem dimension. + InitCholesky *mat.Cholesky + // StopLogDet sets the threshold for stopping the optimization if the + // distribution becomes too peaked. The log determinant is a measure of the + // (log) "volume" of the normal distribution, and when it is too small + // the samples are almost the same. If the log determinant of the covariance + // matrix becomes less than StopLogDet, the optimization run is concluded. + // If StopLogDet is 0, a default value of dim*log(1e-16) is used. + // If StopLogDet is NaN, the stopping criterion is not used, though + // this can cause numeric instabilities in the algorithm. + StopLogDet float64 + // ForgetBest, when true, does not track the best overall function value found, + // instead returning the new best sample in each iteration. If ForgetBest + // is false, then the minimum value returned will be the lowest across all + // iterations, regardless of when that sample was generated. + ForgetBest bool + // Src allows a random number generator to be supplied for generating samples. + // If Src is nil the generator in golang.org/x/math/rand is used. + Src rand.Source + + // Fixed algorithm parameters. + dim int + pop int + weights []float64 + muEff float64 + cc, cs, c1, cmu, ds float64 + eChi float64 + + // Function data. + xs *mat.Dense + fs []float64 + + // Adaptive algorithm parameters. + invSigma float64 // inverse of the sigma parameter + pc, ps []float64 + mean []float64 + chol mat.Cholesky + + // Overall best. + bestX []float64 + bestF float64 + + // Synchronization. + sentIdx int + receivedIdx int + operation chan<- Task + updateErr error +} + +var ( + _ Statuser = (*CmaEsChol)(nil) + _ Method = (*CmaEsChol)(nil) +) + +func (cma *CmaEsChol) methodConverged() Status { + sd := cma.StopLogDet + switch { + case math.IsNaN(sd): + return NotTerminated + case sd == 0: + sd = float64(cma.dim) * -36.8413614879 // ln(1e-16) + } + if cma.chol.LogDet() < sd { + return MethodConverge + } + return NotTerminated +} + +// Status returns the status of the method. +func (cma *CmaEsChol) Status() (Status, error) { + if cma.updateErr != nil { + return Failure, cma.updateErr + } + return cma.methodConverged(), nil +} + +func (*CmaEsChol) Uses(has Available) (uses Available, err error) { + return has.function() +} + +func (cma *CmaEsChol) Init(dim, tasks int) int { + if dim <= 0 { + panic(nonpositiveDimension) + } + if tasks < 0 { + panic(negativeTasks) + } + + // Set fixed algorithm parameters. + // Parameter values are from https://arxiv.org/pdf/1604.00772.pdf . + cma.dim = dim + cma.pop = cma.Population + n := float64(dim) + if cma.pop == 0 { + cma.pop = 4 + int(3*math.Log(n)) // Note the implicit floor. + } else if cma.pop < 0 { + panic("cma-es-chol: negative population size") + } + mu := cma.pop / 2 + cma.weights = resize(cma.weights, mu) + for i := range cma.weights { + v := math.Log(float64(mu)+0.5) - math.Log(float64(i)+1) + cma.weights[i] = v + } + floats.Scale(1/floats.Sum(cma.weights), cma.weights) + cma.muEff = 0 + for _, v := range cma.weights { + cma.muEff += v * v + } + cma.muEff = 1 / cma.muEff + + cma.cc = (4 + cma.muEff/n) / (n + 4 + 2*cma.muEff/n) + cma.cs = (cma.muEff + 2) / (n + cma.muEff + 5) + cma.c1 = 2 / ((n+1.3)*(n+1.3) + cma.muEff) + cma.cmu = math.Min(1-cma.c1, 2*(cma.muEff-2+1/cma.muEff)/((n+2)*(n+2)+cma.muEff)) + cma.ds = 1 + 2*math.Max(0, math.Sqrt((cma.muEff-1)/(n+1))-1) + cma.cs + // E[chi] is taken from https://en.wikipedia.org/wiki/CMA-ES (there + // listed as E[||N(0,1)||]). + cma.eChi = math.Sqrt(n) * (1 - 1.0/(4*n) + 1/(21*n*n)) + + // Allocate memory for function data. + cma.xs = mat.NewDense(cma.pop, dim, nil) + cma.fs = resize(cma.fs, cma.pop) + + // Allocate and initialize adaptive parameters. + cma.invSigma = 1 / cma.InitStepSize + if cma.InitStepSize == 0 { + cma.invSigma = 10.0 / 3 + } else if cma.InitStepSize < 0 { + panic("cma-es-chol: negative initial step size") + } + cma.pc = resize(cma.pc, dim) + for i := range cma.pc { + cma.pc[i] = 0 + } + cma.ps = resize(cma.ps, dim) + for i := range cma.ps { + cma.ps[i] = 0 + } + cma.mean = resize(cma.mean, dim) // mean location initialized at the start of Run + + if cma.InitCholesky != nil { + if cma.InitCholesky.Symmetric() != dim { + panic("cma-es-chol: incorrect InitCholesky size") + } + cma.chol.Clone(cma.InitCholesky) + } else { + // Set the initial Cholesky to I. + b := mat.NewDiagDense(dim, nil) + for i := 0; i < dim; i++ { + b.SetDiag(i, 1) + } + var chol mat.Cholesky + ok := chol.Factorize(b) + if !ok { + panic("cma-es-chol: bad cholesky. shouldn't happen") + } + cma.chol = chol + } + + cma.bestX = resize(cma.bestX, dim) + cma.bestF = math.Inf(1) + + cma.sentIdx = 0 + cma.receivedIdx = 0 + cma.operation = nil + cma.updateErr = nil + t := min(tasks, cma.pop) + return t +} + +func (cma *CmaEsChol) sendInitTasks(tasks []Task) { + for i, task := range tasks { + cma.sendTask(i, task) + } + cma.sentIdx = len(tasks) +} + +// sendTask generates a sample and sends the task. It does not update the cma index. +func (cma *CmaEsChol) sendTask(idx int, task Task) { + task.ID = idx + task.Op = FuncEvaluation + distmv.NormalRand(cma.xs.RawRowView(idx), cma.mean, &cma.chol, cma.Src) + copy(task.X, cma.xs.RawRowView(idx)) + cma.operation <- task +} + +// bestIdx returns the best index in the functions. Returns -1 if all values +// are NaN. +func (cma *CmaEsChol) bestIdx() int { + best := -1 + bestVal := math.Inf(1) + for i, v := range cma.fs { + if math.IsNaN(v) { + continue + } + // Use equality in case somewhere evaluates to +inf. + if v <= bestVal { + best = i + bestVal = v + } + } + return best +} + +// findBestAndUpdateTask finds the best task in the current list, updates the +// new best overall, and then stores the best location into task. +func (cma *CmaEsChol) findBestAndUpdateTask(task Task) Task { + // Find and update the best location. + // Don't use floats because there may be NaN values. + best := cma.bestIdx() + bestF := math.NaN() + bestX := cma.xs.RawRowView(0) + if best != -1 { + bestF = cma.fs[best] + bestX = cma.xs.RawRowView(best) + } + if cma.ForgetBest { + task.F = bestF + copy(task.X, bestX) + } else { + if bestF < cma.bestF { + cma.bestF = bestF + copy(cma.bestX, bestX) + } + task.F = cma.bestF + copy(task.X, cma.bestX) + } + return task +} + +func (cma *CmaEsChol) Run(operations chan<- Task, results <-chan Task, tasks []Task) { + copy(cma.mean, tasks[0].X) + cma.operation = operations + // Send the initial tasks. We know there are at most as many tasks as elements + // of the population. + cma.sendInitTasks(tasks) + +Loop: + for { + result := <-results + switch result.Op { + default: + panic("unknown operation") + case PostIteration: + break Loop + case MajorIteration: + // The last thing we did was update all of the tasks and send the + // major iteration. Now we can send a group of tasks again. + cma.sendInitTasks(tasks) + case FuncEvaluation: + cma.receivedIdx++ + cma.fs[result.ID] = result.F + switch { + case cma.sentIdx < cma.pop: + // There are still tasks to evaluate. Send the next. + cma.sendTask(cma.sentIdx, result) + cma.sentIdx++ + case cma.receivedIdx < cma.pop: + // All the tasks have been sent, but not all of them have been received. + // Need to wait until all are back. + continue Loop + default: + // All of the evaluations have been received. + if cma.receivedIdx != cma.pop { + panic("bad logic") + } + cma.receivedIdx = 0 + cma.sentIdx = 0 + + task := cma.findBestAndUpdateTask(result) + // Update the parameters and send a MajorIteration or a convergence. + err := cma.update() + // Kill the existing data. + for i := range cma.fs { + cma.fs[i] = math.NaN() + cma.xs.Set(i, 0, math.NaN()) + } + switch { + case err != nil: + cma.updateErr = err + task.Op = MethodDone + case cma.methodConverged() != NotTerminated: + task.Op = MethodDone + default: + task.Op = MajorIteration + task.ID = -1 + } + operations <- task + } + } + } + + // Been told to stop. Clean up. + // Need to see best of our evaluated tasks so far. Should instead just + // collect, then see. + for task := range results { + switch task.Op { + case MajorIteration: + case FuncEvaluation: + cma.fs[task.ID] = task.F + default: + panic("unknown operation") + } + } + // Send the new best value if the evaluation is better than any we've + // found so far. Keep this separate from findBestAndUpdateTask so that + // we only send an iteration if we find a better location. + if !cma.ForgetBest { + best := cma.bestIdx() + if best != -1 && cma.fs[best] < cma.bestF { + task := tasks[0] + task.F = cma.fs[best] + copy(task.X, cma.xs.RawRowView(best)) + task.Op = MajorIteration + task.ID = -1 + operations <- task + } + } + close(operations) +} + +// update computes the new parameters (mean, cholesky, etc.). Does not update +// any of the synchronization parameters (taskIdx). +func (cma *CmaEsChol) update() error { + // Sort the function values to find the elite samples. + ftmp := make([]float64, cma.pop) + copy(ftmp, cma.fs) + indexes := make([]int, cma.pop) + for i := range indexes { + indexes[i] = i + } + sort.Sort(bestSorter{F: ftmp, Idx: indexes}) + + meanOld := make([]float64, len(cma.mean)) + copy(meanOld, cma.mean) + + // m_{t+1} = \sum_{i=1}^mu w_i x_i + for i := range cma.mean { + cma.mean[i] = 0 + } + for i, w := range cma.weights { + idx := indexes[i] // index of teh 1337 sample. + floats.AddScaled(cma.mean, w, cma.xs.RawRowView(idx)) + } + meanDiff := make([]float64, len(cma.mean)) + floats.SubTo(meanDiff, cma.mean, meanOld) + + // p_{c,t+1} = (1-c_c) p_{c,t} + \sqrt(c_c*(2-c_c)*mueff) (m_{t+1}-m_t)/sigma_t + floats.Scale(1-cma.cc, cma.pc) + scaleC := math.Sqrt(cma.cc*(2-cma.cc)*cma.muEff) * cma.invSigma + floats.AddScaled(cma.pc, scaleC, meanDiff) + + // p_{sigma, t+1} = (1-c_sigma) p_{sigma,t} + \sqrt(c_s*(2-c_s)*mueff) A_t^-1 (m_{t+1}-m_t)/sigma_t + floats.Scale(1-cma.cs, cma.ps) + // First compute A_t^-1 (m_{t+1}-m_t), then add the scaled vector. + tmp := make([]float64, cma.dim) + tmpVec := mat.NewVecDense(cma.dim, tmp) + diffVec := mat.NewVecDense(cma.dim, meanDiff) + err := tmpVec.SolveVec(cma.chol.RawU().T(), diffVec) + if err != nil { + return err + } + scaleS := math.Sqrt(cma.cs*(2-cma.cs)*cma.muEff) * cma.invSigma + floats.AddScaled(cma.ps, scaleS, tmp) + + // Compute the update to A. + scaleChol := 1 - cma.c1 - cma.cmu + if scaleChol == 0 { + scaleChol = math.SmallestNonzeroFloat64 // enough to kill the old data, but still non-zero. + } + cma.chol.Scale(scaleChol, &cma.chol) + cma.chol.SymRankOne(&cma.chol, cma.c1, mat.NewVecDense(cma.dim, cma.pc)) + for i, w := range cma.weights { + idx := indexes[i] + floats.SubTo(tmp, cma.xs.RawRowView(idx), meanOld) + cma.chol.SymRankOne(&cma.chol, cma.cmu*w*cma.invSigma, tmpVec) + } + + // sigma_{t+1} = sigma_t exp(c_sigma/d_sigma * norm(p_{sigma,t+1}/ E[chi] -1) + normPs := floats.Norm(cma.ps, 2) + cma.invSigma /= math.Exp(cma.cs / cma.ds * (normPs/cma.eChi - 1)) + return nil +} + +type bestSorter struct { + F []float64 + Idx []int +} + +func (b bestSorter) Len() int { + return len(b.F) +} +func (b bestSorter) Less(i, j int) bool { + return b.F[i] < b.F[j] +} +func (b bestSorter) Swap(i, j int) { + b.F[i], b.F[j] = b.F[j], b.F[i] + b.Idx[i], b.Idx[j] = b.Idx[j], b.Idx[i] +} diff --git a/vendor/gonum.org/v1/gonum/optimize/convex/lp/convert.go b/vendor/gonum.org/v1/gonum/optimize/convex/lp/convert.go new file mode 100644 index 0000000..72e8cba --- /dev/null +++ b/vendor/gonum.org/v1/gonum/optimize/convex/lp/convert.go @@ -0,0 +1,137 @@ +// Copyright ©2016 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package lp + +import ( + "gonum.org/v1/gonum/floats" + "gonum.org/v1/gonum/mat" +) + +// TODO(btracey): Have some sort of preprocessing step for helping to fix A to make it +// full rank? +// TODO(btracey): Reduce rows? Get rid of all zeros, places where only one variable +// is there, etc. Could be implemented with a Reduce function. +// TODO(btracey): Provide method of artificial variables for help when problem +// is infeasible? +// TODO(btracey): Add an lp.Solve that solves an LP in non-standard form. + +// Convert converts a General-form LP into a standard form LP. +// The general form of an LP is: +// minimize c^T * x +// s.t G * x <= h +// A * x = b +// And the standard form is: +// minimize cNew^T * x +// s.t aNew * x = bNew +// x >= 0 +// If there are no constraints of the given type, the inputs may be nil. +func Convert(c []float64, g mat.Matrix, h []float64, a mat.Matrix, b []float64) (cNew []float64, aNew *mat.Dense, bNew []float64) { + nVar := len(c) + nIneq := len(h) + + // Check input sizes. + if g == nil { + if nIneq != 0 { + panic(badShape) + } + } else { + gr, gc := g.Dims() + if gr != nIneq { + panic(badShape) + } + if gc != nVar { + panic(badShape) + } + } + + nEq := len(b) + if a == nil { + if nEq != 0 { + panic(badShape) + } + } else { + ar, ac := a.Dims() + if ar != nEq { + panic(badShape) + } + if ac != nVar { + panic(badShape) + } + } + + // Convert the general form LP. + // Derivation: + // 0. Start with general form + // min. c^T * x + // s.t. G * x <= h + // A * x = b + // 1. Introduce slack variables for each constraint + // min. c^T * x + // s.t. G * x + s = h + // A * x = b + // s >= 0 + // 2. Add non-negativity constraints for x by splitting x + // into positive and negative components. + // x = xp - xn + // xp >= 0, xn >= 0 + // This makes the LP + // min. c^T * xp - c^T xn + // s.t. G * xp - G * xn + s = h + // A * xp - A * xn = b + // xp >= 0, xn >= 0, s >= 0 + // 3. Write the above in standard form: + // xt = [xp + // xn + // s ] + // min. [c^T, -c^T, 0] xt + // s.t. [G, -G, I] xt = h + // [A, -A, 0] xt = b + // x >= 0 + + // In summary: + // Original LP: + // min. c^T * x + // s.t. G * x <= h + // A * x = b + // Standard Form: + // xt = [xp; xn; s] + // min. [c^T, -c^T, 0] xt + // s.t. [G, -G, I] xt = h + // [A, -A, 0] xt = b + // x >= 0 + + // New size of x is [xp, xn, s] + nNewVar := nVar + nVar + nIneq + + // Construct cNew = [c; -c; 0] + cNew = make([]float64, nNewVar) + copy(cNew, c) + copy(cNew[nVar:], c) + floats.Scale(-1, cNew[nVar:2*nVar]) + + // New number of equality constraints is the number of total constraints. + nNewEq := nIneq + nEq + + // Construct bNew = [h, b]. + bNew = make([]float64, nNewEq) + copy(bNew, h) + copy(bNew[nIneq:], b) + + // Construct aNew = [G, -G, I; A, -A, 0]. + aNew = mat.NewDense(nNewEq, nNewVar, nil) + if nIneq != 0 { + aNew.Slice(0, nIneq, 0, nVar).(*mat.Dense).Copy(g) + aNew.Slice(0, nIneq, nVar, 2*nVar).(*mat.Dense).Scale(-1, g) + aView := aNew.Slice(0, nIneq, 2*nVar, 2*nVar+nIneq).(*mat.Dense) + for i := 0; i < nIneq; i++ { + aView.Set(i, i, 1) + } + } + if nEq != 0 { + aNew.Slice(nIneq, nIneq+nEq, 0, nVar).(*mat.Dense).Copy(a) + aNew.Slice(nIneq, nIneq+nEq, nVar, 2*nVar).(*mat.Dense).Scale(-1, a) + } + return cNew, aNew, bNew +} diff --git a/vendor/gonum.org/v1/gonum/optimize/convex/lp/doc.go b/vendor/gonum.org/v1/gonum/optimize/convex/lp/doc.go new file mode 100644 index 0000000..4c8323c --- /dev/null +++ b/vendor/gonum.org/v1/gonum/optimize/convex/lp/doc.go @@ -0,0 +1,6 @@ +// Copyright ©2017 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Package lp implements routines to solve linear programming problems. +package lp // import "gonum.org/v1/gonum/optimize/convex/lp" diff --git a/vendor/gonum.org/v1/gonum/optimize/convex/lp/simplex.go b/vendor/gonum.org/v1/gonum/optimize/convex/lp/simplex.go new file mode 100644 index 0000000..9ebbe48 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/optimize/convex/lp/simplex.go @@ -0,0 +1,640 @@ +// Copyright ©2016 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// package lp implements routines for solving linear programs. +package lp + +import ( + "errors" + "fmt" + "math" + + "gonum.org/v1/gonum/floats" + "gonum.org/v1/gonum/mat" +) + +// TODO(btracey): Could have a solver structure with an abstract factorizer. With +// this transformation the same high-level code could handle both Dense and Sparse. +// TODO(btracey): Need to improve error handling. Only want to panic if condition number inf. +// TODO(btracey): Performance enhancements. There are currently lots of linear +// solves that can be improved by doing rank-one updates. For example, the swap +// step is just a rank-one update. +// TODO(btracey): Better handling on the linear solve errors. If the condition +// number is not inf and the equation solved "well", should keep moving. + +var ( + ErrBland = errors.New("lp: bland: all replacements are negative or cause ill-conditioned ab") + ErrInfeasible = errors.New("lp: problem is infeasible") + ErrLinSolve = errors.New("lp: linear solve failure") + ErrUnbounded = errors.New("lp: problem is unbounded") + ErrSingular = errors.New("lp: A is singular") + ErrZeroColumn = errors.New("lp: A has a column of all zeros") + ErrZeroRow = errors.New("lp: A has a row of all zeros") +) + +var ( + badShape = "lp: size mismatch" +) + +// TODO(btracey): Should these tolerances be part of a settings struct? + +const ( + // initPosTol is the tolerance on the initial condition being feasible. Strictly, + // the x should be positive, but instead it must be greater than -initPosTol. + initPosTol = 1e-13 + // blandNegTol is the tolerance on the value being greater than 0 in the bland test. + blandNegTol = 1e-14 + // rRoundTol is the tolerance for rounding values to zero when testing if + // constraints are met. + rRoundTol = 1e-13 + // dRoundTol is the tolerance for testing if values are zero for the problem + // being unbounded. + dRoundTol = 1e-13 + // phaseIZeroTol tests if the Phase I problem returned a feasible solution. + phaseIZeroTol = 1e-12 + // blandZeroTol is the tolerance on testing if the bland solution can move. + blandZeroTol = 1e-12 +) + +// Simplex solves a linear program in standard form using Danzig's Simplex +// algorithm. The standard form of a linear program is: +// minimize c^T x +// s.t. A*x = b +// x >= 0 . +// The input tol sets how close to the optimal solution is found (specifically, +// when the maximal reduced cost is below tol). An error will be returned if the +// problem is infeasible or unbounded. In rare cases, numeric errors can cause +// the Simplex to fail. In this case, an error will be returned along with the +// most recently found feasible solution. +// +// The Convert function can be used to transform a general LP into standard form. +// +// The input matrix A must have at least as many columns as rows, len(c) must +// equal the number of columns of A, and len(b) must equal the number of rows of +// A or Simplex will panic. A must also have full row rank and may not contain any +// columns with all zeros, or Simplex will return an error. +// +// initialBasic can be used to set the initial set of indices for a feasible +// solution to the LP. If an initial feasible solution is not known, initialBasic +// may be nil. If initialBasic is non-nil, len(initialBasic) must equal the number +// of rows of A and must be an actual feasible solution to the LP, otherwise +// Simplex will panic. +// +// A description of the Simplex algorithm can be found in Ch. 8 of +// Strang, Gilbert. "Linear Algebra and Applications." Academic, New York (1976). +// For a detailed video introduction, see lectures 11-13 of UC Math 352 +// https://www.youtube.com/watch?v=ESzYPFkY3og&index=11&list=PLh464gFUoJWOmBYla3zbZbc4nv2AXez6X. +func Simplex(c []float64, A mat.Matrix, b []float64, tol float64, initialBasic []int) (optF float64, optX []float64, err error) { + ans, x, _, err := simplex(initialBasic, c, A, b, tol) + return ans, x, err +} + +func simplex(initialBasic []int, c []float64, A mat.Matrix, b []float64, tol float64) (float64, []float64, []int, error) { + err := verifyInputs(initialBasic, c, A, b) + if err != nil { + if err == ErrUnbounded { + return math.Inf(-1), nil, nil, ErrUnbounded + } + return math.NaN(), nil, nil, err + } + m, n := A.Dims() + + if m == n { + // Problem is exactly constrained, perform a linear solve. + bVec := mat.NewVecDense(len(b), b) + x := make([]float64, n) + xVec := mat.NewVecDense(n, x) + err := xVec.SolveVec(A, bVec) + if err != nil { + return math.NaN(), nil, nil, ErrSingular + } + for _, v := range x { + if v < 0 { + return math.NaN(), nil, nil, ErrInfeasible + } + } + f := floats.Dot(x, c) + return f, x, nil, nil + } + + // There is at least one optimal solution to the LP which is at the intersection + // to a set of constraint boundaries. For a standard form LP with m variables + // and n equality constraints, at least m-n elements of x must equal zero + // at optimality. The Simplex algorithm solves the standard-form LP by starting + // at an initial constraint vertex and successively moving to adjacent constraint + // vertices. At every vertex, the set of non-zero x values is the "basic + // feasible solution". The list of non-zero x's are maintained in basicIdxs, + // the respective columns of A are in ab, and the actual non-zero values of + // x are in xb. + // + // The LP is equality constrained such that A * x = b. This can be expanded + // to + // ab * xb + an * xn = b + // where ab are the columns of a in the basic set, and an are all of the + // other columns. Since each element of xn is zero by definition, this means + // that for all feasible solutions xb = ab^-1 * b. + // + // Before the simplex algorithm can start, an initial feasible solution must + // be found. If initialBasic is non-nil a feasible solution has been supplied. + // Otherwise the "Phase I" problem must be solved to find an initial feasible + // solution. + + var basicIdxs []int // The indices of the non-zero x values. + var ab *mat.Dense // The subset of columns of A listed in basicIdxs. + var xb []float64 // The non-zero elements of x. xb = ab^-1 b + + if initialBasic != nil { + // InitialBasic supplied. Panic if incorrect length or infeasible. + if len(initialBasic) != m { + panic("lp: incorrect number of initial vectors") + } + ab = mat.NewDense(m, len(initialBasic), nil) + extractColumns(ab, A, initialBasic) + xb = make([]float64, m) + err = initializeFromBasic(xb, ab, b) + if err != nil { + panic(err) + } + basicIdxs = make([]int, len(initialBasic)) + copy(basicIdxs, initialBasic) + } else { + // No initial basis supplied. Solve the PhaseI problem. + basicIdxs, ab, xb, err = findInitialBasic(A, b) + if err != nil { + return math.NaN(), nil, nil, err + } + } + + // basicIdxs contains the indexes for an initial feasible solution, + // ab contains the extracted columns of A, and xb contains the feasible + // solution. All x not in the basic set are 0 by construction. + + // nonBasicIdx is the set of nonbasic variables. + nonBasicIdx := make([]int, 0, n-m) + inBasic := make(map[int]struct{}) + for _, v := range basicIdxs { + inBasic[v] = struct{}{} + } + for i := 0; i < n; i++ { + _, ok := inBasic[i] + if !ok { + nonBasicIdx = append(nonBasicIdx, i) + } + } + + // cb is the subset of c for the basic variables. an and cn + // are the equivalents to ab and cb but for the nonbasic variables. + cb := make([]float64, len(basicIdxs)) + for i, idx := range basicIdxs { + cb[i] = c[idx] + } + cn := make([]float64, len(nonBasicIdx)) + for i, idx := range nonBasicIdx { + cn[i] = c[idx] + } + an := mat.NewDense(m, len(nonBasicIdx), nil) + extractColumns(an, A, nonBasicIdx) + + bVec := mat.NewVecDense(len(b), b) + cbVec := mat.NewVecDense(len(cb), cb) + + // Temporary data needed each iteration. (Described later) + r := make([]float64, n-m) + move := make([]float64, m) + + // Solve the linear program starting from the initial feasible set. This is + // the "Phase 2" problem. + // + // Algorithm: + // 1) Compute the "reduced costs" for the non-basic variables. The reduced + // costs are the lagrange multipliers of the constraints. + // r = cn - an^T * ab^-T * cb + // 2) If all of the reduced costs are positive, no improvement is possible, + // and the solution is optimal (xn can only increase because of + // non-negativity constraints). Otherwise, the solution can be improved and + // one element will be exchanged in the basic set. + // 3) Choose the x_n with the most negative value of r. Call this value xe. + // This variable will be swapped into the basic set. + // 4) Increase xe until the next constraint boundary is met. This will happen + // when the first element in xb becomes 0. The distance xe can increase before + // a given element in xb becomes negative can be found from + // xb = Ab^-1 b - Ab^-1 An xn + // = Ab^-1 b - Ab^-1 Ae xe + // = bhat + d x_e + // xe = bhat_i / - d_i + // where Ae is the column of A corresponding to xe. + // The constraining basic index is the first index for which this is true, + // so remove the element which is min_i (bhat_i / -d_i), assuming d_i is negative. + // If no d_i is less than 0, then the problem is unbounded. + // 5) If the new xe is 0 (that is, bhat_i == 0), then this location is at + // the intersection of several constraints. Use the Bland rule instead + // of the rule in step 4 to avoid cycling. + for { + // Compute reduced costs -- r = cn - an^T ab^-T cb + var tmp mat.VecDense + err = tmp.SolveVec(ab.T(), cbVec) + if err != nil { + break + } + data := make([]float64, n-m) + tmp2 := mat.NewVecDense(n-m, data) + tmp2.MulVec(an.T(), &tmp) + floats.SubTo(r, cn, data) + + // Replace the most negative element in the simplex. If there are no + // negative entries then the optimal solution has been found. + minIdx := floats.MinIdx(r) + if r[minIdx] >= -tol { + break + } + + for i, v := range r { + if math.Abs(v) < rRoundTol { + r[i] = 0 + } + } + + // Compute the moving distance. + err = computeMove(move, minIdx, A, ab, xb, nonBasicIdx) + if err != nil { + if err == ErrUnbounded { + return math.Inf(-1), nil, nil, ErrUnbounded + } + break + } + + // Replace the basic index along the tightest constraint. + replace := floats.MinIdx(move) + if move[replace] <= 0 { + replace, minIdx, err = replaceBland(A, ab, xb, basicIdxs, nonBasicIdx, r, move) + if err != nil { + if err == ErrUnbounded { + return math.Inf(-1), nil, nil, ErrUnbounded + } + break + } + } + + // Replace the constrained basicIdx with the newIdx. + basicIdxs[replace], nonBasicIdx[minIdx] = nonBasicIdx[minIdx], basicIdxs[replace] + cb[replace], cn[minIdx] = cn[minIdx], cb[replace] + tmpCol1 := mat.Col(nil, replace, ab) + tmpCol2 := mat.Col(nil, minIdx, an) + ab.SetCol(replace, tmpCol2) + an.SetCol(minIdx, tmpCol1) + + // Compute the new xb. + xbVec := mat.NewVecDense(len(xb), xb) + err = xbVec.SolveVec(ab, bVec) + if err != nil { + break + } + } + // Found the optimum successfully or died trying. The basic variables get + // their values, and the non-basic variables are all zero. + opt := floats.Dot(cb, xb) + xopt := make([]float64, n) + for i, v := range basicIdxs { + xopt[v] = xb[i] + } + return opt, xopt, basicIdxs, err +} + +// computeMove computes how far can be moved replacing each index. The results +// are stored into move. +func computeMove(move []float64, minIdx int, A mat.Matrix, ab *mat.Dense, xb []float64, nonBasicIdx []int) error { + // Find ae. + col := mat.Col(nil, nonBasicIdx[minIdx], A) + aCol := mat.NewVecDense(len(col), col) + + // d = - Ab^-1 Ae + nb, _ := ab.Dims() + d := make([]float64, nb) + dVec := mat.NewVecDense(nb, d) + err := dVec.SolveVec(ab, aCol) + if err != nil { + return ErrLinSolve + } + floats.Scale(-1, d) + + for i, v := range d { + if math.Abs(v) < dRoundTol { + d[i] = 0 + } + } + + // If no di < 0, then problem is unbounded. + if floats.Min(d) >= 0 { + return ErrUnbounded + } + + // move = bhat_i / - d_i, assuming d is negative. + bHat := xb // ab^-1 b + for i, v := range d { + if v >= 0 { + move[i] = math.Inf(1) + } else { + move[i] = bHat[i] / math.Abs(v) + } + } + return nil +} + +// replaceBland uses the Bland rule to find the indices to swap if the minimum +// move is 0. The indices to be swapped are replace and minIdx (following the +// nomenclature in the main routine). +func replaceBland(A mat.Matrix, ab *mat.Dense, xb []float64, basicIdxs, nonBasicIdx []int, r, move []float64) (replace, minIdx int, err error) { + m, _ := A.Dims() + // Use the traditional bland rule, except don't replace a constraint which + // causes the new ab to be singular. + for i, v := range r { + if v > -blandNegTol { + continue + } + minIdx = i + err = computeMove(move, minIdx, A, ab, xb, nonBasicIdx) + if err != nil { + // Either unbounded or something went wrong. + return -1, -1, err + } + replace = floats.MinIdx(move) + if math.Abs(move[replace]) > blandZeroTol { + // Large enough that it shouldn't be a problem + return replace, minIdx, nil + } + // Find a zero index where replacement is non-singular. + biCopy := make([]int, len(basicIdxs)) + for replace, v := range move { + if v > blandZeroTol { + continue + } + copy(biCopy, basicIdxs) + biCopy[replace] = nonBasicIdx[minIdx] + abTmp := mat.NewDense(m, len(biCopy), nil) + extractColumns(abTmp, A, biCopy) + // If the condition number is reasonable, use this index. + if mat.Cond(abTmp, 1) < 1e16 { + return replace, minIdx, nil + } + } + } + return -1, -1, ErrBland +} + +func verifyInputs(initialBasic []int, c []float64, A mat.Matrix, b []float64) error { + m, n := A.Dims() + if m > n { + panic("lp: more equality constraints than variables") + } + if len(c) != n { + panic("lp: c vector incorrect length") + } + if len(b) != m { + panic("lp: b vector incorrect length") + } + if len(c) != n { + panic("lp: c vector incorrect length") + } + if len(initialBasic) != 0 && len(initialBasic) != m { + panic("lp: initialBasic incorrect length") + } + + // Do some sanity checks so that ab does not become singular during the + // simplex solution. If the ZeroRow checks are removed then the code for + // finding a set of linearly indepent columns must be improved. + + // Check that if a row of A only has zero elements that corresponding + // element in b is zero, otherwise the problem is infeasible. + // Otherwise return ErrZeroRow. + for i := 0; i < m; i++ { + isZero := true + for j := 0; j < n; j++ { + if A.At(i, j) != 0 { + isZero = false + break + } + } + if isZero && b[i] != 0 { + // Infeasible + return ErrInfeasible + } else if isZero { + return ErrZeroRow + } + } + // Check that if a column only has zero elements that the respective C vector + // is positive (otherwise unbounded). Otherwise return ErrZeroColumn. + for j := 0; j < n; j++ { + isZero := true + for i := 0; i < m; i++ { + if A.At(i, j) != 0 { + isZero = false + break + } + } + if isZero && c[j] < 0 { + return ErrUnbounded + } else if isZero { + return ErrZeroColumn + } + } + return nil +} + +// initializeFromBasic initializes the basic feasible solution given a set of +// basic indices. It extracts the columns of A specified by basicIdxs and finds +// the x values at that location. These are stored into xb. +// +// If the columns of A are not linearly independent or if the initial set is not +// feasible, an error is returned. +func initializeFromBasic(xb []float64, ab *mat.Dense, b []float64) error { + m, _ := ab.Dims() + if len(xb) != m { + panic("simplex: bad xb length") + } + xbMat := mat.NewVecDense(m, xb) + + err := xbMat.SolveVec(ab, mat.NewVecDense(m, b)) + if err != nil { + return errors.New("lp: subcolumns of A for supplied initial basic singular") + } + // The solve ensures that the equality constraints are met (ab * xb = b). + // Thus, the solution is feasible if and only if all of the x's are positive. + allPos := true + for _, v := range xb { + if v < -initPosTol { + allPos = false + break + } + } + if !allPos { + return errors.New("lp: supplied subcolumns not a feasible solution") + } + return nil +} + +// extractColumns copies the columns specified by cols into the columns of dst. +func extractColumns(dst *mat.Dense, A mat.Matrix, cols []int) { + r, c := dst.Dims() + ra, _ := A.Dims() + if ra != r { + panic("simplex: row mismatch") + } + if c != len(cols) { + panic("simplex: column mismatch") + } + col := make([]float64, r) + for j, idx := range cols { + mat.Col(col, idx, A) + dst.SetCol(j, col) + } +} + +// findInitialBasic finds an initial basic solution, and returns the basic +// indices, ab, and xb. +func findInitialBasic(A mat.Matrix, b []float64) ([]int, *mat.Dense, []float64, error) { + m, n := A.Dims() + basicIdxs := findLinearlyIndependent(A) + if len(basicIdxs) != m { + return nil, nil, nil, ErrSingular + } + + // It may be that this linearly independent basis is also a feasible set. If + // so, the Phase I problem can be avoided. + ab := mat.NewDense(m, len(basicIdxs), nil) + extractColumns(ab, A, basicIdxs) + xb := make([]float64, m) + err := initializeFromBasic(xb, ab, b) + if err == nil { + return basicIdxs, ab, xb, nil + } + + // This set was not feasible. Instead the "Phase I" problem must be solved + // to find an initial feasible set of basis. + // + // Method: Construct an LP whose optimal solution is a feasible solution + // to the original LP. + // 1) Introduce an artificial variable x_{n+1}. + // 2) Let x_j be the most negative element of x_b (largest constraint violation). + // 3) Add the artificial variable to A with: + // a_{n+1} = b - \sum_{i in basicIdxs} a_i + a_j + // swap j with n+1 in the basicIdxs. + // 4) Define a new LP: + // minimize x_{n+1} + // subject to [A A_{n+1}][x_1 ... x_{n+1}] = b + // x, x_{n+1} >= 0 + // 5) Solve this LP. If x_{n+1} != 0, then the problem is infeasible, otherwise + // the found basis can be used as an initial basis for phase II. + // + // The extra column in Step 3 is defined such that the vector of 1s is an + // initial feasible solution. + + // Find the largest constraint violator. + // Compute a_{n+1} = b - \sum{i in basicIdxs}a_i + a_j. j is in basicIDx, so + // instead just subtract the basicIdx columns that are not minIDx. + minIdx := floats.MinIdx(xb) + aX1 := make([]float64, m) + copy(aX1, b) + col := make([]float64, m) + for i, v := range basicIdxs { + if i == minIdx { + continue + } + mat.Col(col, v, A) + floats.Sub(aX1, col) + } + + // Construct the new LP. + // aNew = [A, a_{n+1}] + // bNew = b + // cNew = 1 for x_{n+1} + aNew := mat.NewDense(m, n+1, nil) + aNew.Copy(A) + aNew.SetCol(n, aX1) + basicIdxs[minIdx] = n // swap minIdx with n in the basic set. + c := make([]float64, n+1) + c[n] = 1 + + // Solve the Phase I linear program. + _, xOpt, newBasic, err := simplex(basicIdxs, c, aNew, b, 1e-10) + if err != nil { + return nil, nil, nil, fmt.Errorf("lp: error finding feasible basis: %s", err) + } + + // The original LP is infeasible if the added variable has non-zero value + // in the optimal solution to the Phase I problem. + if math.Abs(xOpt[n]) > phaseIZeroTol { + return nil, nil, nil, ErrInfeasible + } + + // The basis found in Phase I is a feasible solution to the original LP if + // the added variable is not in the basis. + addedIdx := -1 + for i, v := range newBasic { + if v == n { + addedIdx = i + } + xb[i] = xOpt[v] + } + if addedIdx == -1 { + extractColumns(ab, A, newBasic) + return newBasic, ab, xb, nil + } + + // The value of the added variable is in the basis, but it has a zero value. + // See if exchanging another variable into the basic set finds a feasible + // solution. + basicMap := make(map[int]struct{}) + for _, v := range newBasic { + basicMap[v] = struct{}{} + } + var set bool + for i := range xOpt { + if _, inBasic := basicMap[i]; inBasic { + continue + } + newBasic[addedIdx] = i + if set { + mat.Col(col, i, A) + ab.SetCol(addedIdx, col) + } else { + extractColumns(ab, A, newBasic) + set = true + } + err := initializeFromBasic(xb, ab, b) + if err == nil { + return newBasic, ab, xb, nil + } + } + return nil, nil, nil, ErrInfeasible +} + +// findLinearlyIndependnt finds a set of linearly independent columns of A, and +// returns the column indexes of the linearly independent columns. +func findLinearlyIndependent(A mat.Matrix) []int { + m, n := A.Dims() + idxs := make([]int, 0, m) + columns := mat.NewDense(m, m, nil) + newCol := make([]float64, m) + // Walk in reverse order because slack variables are typically the last columns + // of A. + for i := n - 1; i >= 0; i-- { + if len(idxs) == m { + break + } + mat.Col(newCol, i, A) + columns.SetCol(len(idxs), newCol) + if len(idxs) == 0 { + // A column is linearly independent from the null set. + // If all-zero column of A are allowed, this code needs to be adjusted. + idxs = append(idxs, i) + continue + } + if mat.Cond(columns.Slice(0, m, 0, len(idxs)+1), 1) > 1e12 { + // Not linearly independent. + continue + } + idxs = append(idxs, i) + } + return idxs +} diff --git a/vendor/gonum.org/v1/gonum/optimize/doc.go b/vendor/gonum.org/v1/gonum/optimize/doc.go new file mode 100644 index 0000000..667e8f9 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/optimize/doc.go @@ -0,0 +1,6 @@ +// Copyright ©2015 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Package optimize implements algorithms for finding the optimum value of functions. +package optimize // import "gonum.org/v1/gonum/optimize" diff --git a/vendor/gonum.org/v1/gonum/optimize/errors.go b/vendor/gonum.org/v1/gonum/optimize/errors.go new file mode 100644 index 0000000..a06645a --- /dev/null +++ b/vendor/gonum.org/v1/gonum/optimize/errors.go @@ -0,0 +1,80 @@ +// Copyright ©2014 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package optimize + +import ( + "errors" + "fmt" + "math" +) + +var ( + // ErrZeroDimensional signifies an optimization was called with an input of length 0. + ErrZeroDimensional = errors.New("optimize: zero dimensional input") + + // ErrLinesearcherFailure signifies that a Linesearcher has iterated too + // many times. This may occur if the gradient tolerance is set too low. + ErrLinesearcherFailure = errors.New("linesearch: failed to converge") + + // ErrNonDescentDirection signifies that LinesearchMethod has received a + // search direction from a NextDirectioner in which the function is not + // decreasing. + ErrNonDescentDirection = errors.New("linesearch: non-descent search direction") + + // ErrNoProgress signifies that LinesearchMethod cannot make further + // progress because there is no change in location after Linesearcher step + // due to floating-point arithmetic. + ErrNoProgress = errors.New("linesearch: no change in location after Linesearcher step") + + // ErrLinesearcherBound signifies that a Linesearcher reached a step that + // lies out of allowed bounds. + ErrLinesearcherBound = errors.New("linesearch: step out of bounds") + + // ErrMissingGrad signifies that a Method requires a Gradient function that + // is not supplied by Problem. + ErrMissingGrad = errors.New("optimize: problem does not provide needed Grad function") + + // ErrMissingHess signifies that a Method requires a Hessian function that + // is not supplied by Problem. + ErrMissingHess = errors.New("optimize: problem does not provide needed Hess function") +) + +// ErrFunc is returned when an initial function value is invalid. The error +// state may be either +Inf or NaN. ErrFunc satisfies the error interface. +type ErrFunc float64 + +func (err ErrFunc) Error() string { + switch { + case math.IsInf(float64(err), 1): + return "optimize: initial function value is infinite" + case math.IsNaN(float64(err)): + return "optimize: initial function value is NaN" + default: + panic("optimize: bad ErrFunc") + } +} + +// ErrGrad is returned when an initial gradient is invalid. The error gradient +// may be either ±Inf or NaN. ErrGrad satisfies the error interface. +type ErrGrad struct { + Grad float64 // Grad is the invalid gradient value. + Index int // Index is the position at which the invalid gradient was found. +} + +func (err ErrGrad) Error() string { + switch { + case math.IsInf(err.Grad, 0): + return fmt.Sprintf("optimize: initial gradient is infinite at position %d", err.Index) + case math.IsNaN(err.Grad): + return fmt.Sprintf("optimize: initial gradient is NaN at position %d", err.Index) + default: + panic("optimize: bad ErrGrad") + } +} + +// List of shared panic strings +var ( + badProblem = "optimize: objective function is undefined" +) diff --git a/vendor/gonum.org/v1/gonum/optimize/functionconvergence.go b/vendor/gonum.org/v1/gonum/optimize/functionconvergence.go new file mode 100644 index 0000000..1bde78f --- /dev/null +++ b/vendor/gonum.org/v1/gonum/optimize/functionconvergence.go @@ -0,0 +1,76 @@ +// Copyright ©2015 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package optimize + +import ( + "math" +) + +// Converger returns the convergence of the optimization based on +// locations found during optimization. Converger must not modify the value of +// the provided Location in any of the methods. +type Converger interface { + Init(dim int) + Converged(loc *Location) Status +} + +// NeverTerminate implements Converger, always reporting NotTerminated. +type NeverTerminate struct{} + +func (NeverTerminate) Init(dim int) {} + +func (NeverTerminate) Converged(loc *Location) Status { + return NotTerminated +} + +// FunctionConverge tests for insufficient improvement in the optimum value +// over the last iterations. A FunctionConvergence status is returned if +// there is no significant decrease for FunctionConverge.Iterations. A +// significant decrease is considered if +// f < f_best +// and +// f_best - f > FunctionConverge.Relative * maxabs(f, f_best) + FunctionConverge.Absolute +// If the decrease is significant, then the iteration counter is reset and +// f_best is updated. +// +// If FunctionConverge.Iterations == 0, it has no effect. +type FunctionConverge struct { + Absolute float64 + Relative float64 + Iterations int + + first bool + best float64 + iter int +} + +func (fc *FunctionConverge) Init(dim int) { + fc.first = true + fc.best = 0 + fc.iter = 0 +} + +func (fc *FunctionConverge) Converged(l *Location) Status { + f := l.F + if fc.first { + fc.best = f + fc.first = false + return NotTerminated + } + if fc.Iterations == 0 { + return NotTerminated + } + maxAbs := math.Max(math.Abs(f), math.Abs(fc.best)) + if f < fc.best && fc.best-f > fc.Relative*maxAbs+fc.Absolute { + fc.best = f + fc.iter = 0 + return NotTerminated + } + fc.iter++ + if fc.iter < fc.Iterations { + return NotTerminated + } + return FunctionConvergence +} diff --git a/vendor/gonum.org/v1/gonum/optimize/functions/doc.go b/vendor/gonum.org/v1/gonum/optimize/functions/doc.go new file mode 100644 index 0000000..3bdfe8b --- /dev/null +++ b/vendor/gonum.org/v1/gonum/optimize/functions/doc.go @@ -0,0 +1,15 @@ +// Copyright ©2017 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Package functions provides objective functions for testing optimization +// algorithms. +// +// We encourage outside contributions of additional test functions that exhibit +// properties not already covered in the testing suite or that have +// significance due to prior use as benchmark cases. +package functions // import "gonum.org/v1/gonum/optimize/functions" + +const ( + badInputDim = "functions: wrong input dimension" +) diff --git a/vendor/gonum.org/v1/gonum/optimize/functions/functions.go b/vendor/gonum.org/v1/gonum/optimize/functions/functions.go new file mode 100644 index 0000000..e806939 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/optimize/functions/functions.go @@ -0,0 +1,1819 @@ +// Copyright ©2015 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package functions + +import ( + "math" + + "gonum.org/v1/gonum/floats" + "gonum.org/v1/gonum/mat" +) + +// Beale implements the Beale's function. +// +// Standard starting points: +// Easy: [1, 1] +// Hard: [1, 4] +// +// References: +// - Beale, E.: On an Iterative Method for Finding a Local Minimum of a +// Function of More than One Variable. Technical Report 25, Statistical +// Techniques Research Group, Princeton University (1958) +// - More, J., Garbow, B.S., Hillstrom, K.E.: Testing unconstrained +// optimization software. ACM Trans Math Softw 7 (1981), 17-41 +type Beale struct{} + +func (Beale) Func(x []float64) float64 { + if len(x) != 2 { + panic("dimension of the problem must be 2") + } + + f1 := 1.5 - x[0]*(1-x[1]) + f2 := 2.25 - x[0]*(1-x[1]*x[1]) + f3 := 2.625 - x[0]*(1-x[1]*x[1]*x[1]) + return f1*f1 + f2*f2 + f3*f3 +} + +func (Beale) Grad(grad, x []float64) { + if len(x) != 2 { + panic("dimension of the problem must be 2") + } + if len(x) != len(grad) { + panic("incorrect size of the gradient") + } + + t1 := 1 - x[1] + t2 := 1 - x[1]*x[1] + t3 := 1 - x[1]*x[1]*x[1] + + f1 := 1.5 - x[0]*t1 + f2 := 2.25 - x[0]*t2 + f3 := 2.625 - x[0]*t3 + + grad[0] = -2 * (f1*t1 + f2*t2 + f3*t3) + grad[1] = 2 * x[0] * (f1 + 2*f2*x[1] + 3*f3*x[1]*x[1]) +} + +func (Beale) Hess(dst *mat.SymDense, x []float64) { + if len(x) != 2 { + panic("dimension of the problem must be 2") + } + if len(x) != dst.Symmetric() { + panic("incorrect size of the Hessian") + } + + t1 := 1 - x[1] + t2 := 1 - x[1]*x[1] + t3 := 1 - x[1]*x[1]*x[1] + f1 := 1.5 - x[1]*t1 + f2 := 2.25 - x[1]*t2 + f3 := 2.625 - x[1]*t3 + + h00 := 2 * (t1*t1 + t2*t2 + t3*t3) + h01 := 2 * (f1 + x[1]*(2*f2+3*x[1]*f3) - x[0]*(t1+x[1]*(2*t2+3*x[1]*t3))) + h11 := 2 * x[0] * (x[0] + 2*f2 + x[1]*(6*f3+x[0]*x[1]*(4+9*x[1]*x[1]))) + dst.SetSym(0, 0, h00) + dst.SetSym(0, 1, h01) + dst.SetSym(1, 1, h11) +} + +func (Beale) Minima() []Minimum { + return []Minimum{ + { + X: []float64{3, 0.5}, + F: 0, + Global: true, + }, + } +} + +// BiggsEXP2 implements the Biggs' EXP2 function. +// +// Standard starting point: +// [1, 2] +// +// Reference: +// Biggs, M.C.: Minimization algorithms making use of non-quadratic properties +// of the objective function. IMA J Appl Math 8 (1971), 315-327; doi:10.1093/imamat/8.3.315 +type BiggsEXP2 struct{} + +func (BiggsEXP2) Func(x []float64) (sum float64) { + if len(x) != 2 { + panic("dimension of the problem must be 2") + } + + for i := 1; i <= 10; i++ { + z := float64(i) / 10 + y := math.Exp(-z) - 5*math.Exp(-10*z) + f := math.Exp(-x[0]*z) - 5*math.Exp(-x[1]*z) - y + sum += f * f + } + return sum +} + +func (BiggsEXP2) Grad(grad, x []float64) { + if len(x) != 2 { + panic("dimension of the problem must be 2") + } + if len(x) != len(grad) { + panic("incorrect size of the gradient") + } + + for i := range grad { + grad[i] = 0 + } + for i := 1; i <= 10; i++ { + z := float64(i) / 10 + y := math.Exp(-z) - 5*math.Exp(-10*z) + f := math.Exp(-x[0]*z) - 5*math.Exp(-x[1]*z) - y + + dfdx0 := -z * math.Exp(-x[0]*z) + dfdx1 := 5 * z * math.Exp(-x[1]*z) + + grad[0] += 2 * f * dfdx0 + grad[1] += 2 * f * dfdx1 + } +} + +func (BiggsEXP2) Minima() []Minimum { + return []Minimum{ + { + X: []float64{1, 10}, + F: 0, + Global: true, + }, + } +} + +// BiggsEXP3 implements the Biggs' EXP3 function. +// +// Standard starting point: +// [1, 2, 1] +// +// Reference: +// Biggs, M.C.: Minimization algorithms making use of non-quadratic properties +// of the objective function. IMA J Appl Math 8 (1971), 315-327; doi:10.1093/imamat/8.3.315 +type BiggsEXP3 struct{} + +func (BiggsEXP3) Func(x []float64) (sum float64) { + if len(x) != 3 { + panic("dimension of the problem must be 3") + } + + for i := 1; i <= 10; i++ { + z := float64(i) / 10 + y := math.Exp(-z) - 5*math.Exp(-10*z) + f := math.Exp(-x[0]*z) - x[2]*math.Exp(-x[1]*z) - y + sum += f * f + } + return sum +} + +func (BiggsEXP3) Grad(grad, x []float64) { + if len(x) != 3 { + panic("dimension of the problem must be 3") + } + if len(x) != len(grad) { + panic("incorrect size of the gradient") + } + + for i := range grad { + grad[i] = 0 + } + for i := 1; i <= 10; i++ { + z := float64(i) / 10 + y := math.Exp(-z) - 5*math.Exp(-10*z) + f := math.Exp(-x[0]*z) - x[2]*math.Exp(-x[1]*z) - y + + dfdx0 := -z * math.Exp(-x[0]*z) + dfdx1 := x[2] * z * math.Exp(-x[1]*z) + dfdx2 := -math.Exp(-x[1] * z) + + grad[0] += 2 * f * dfdx0 + grad[1] += 2 * f * dfdx1 + grad[2] += 2 * f * dfdx2 + } +} + +func (BiggsEXP3) Minima() []Minimum { + return []Minimum{ + { + X: []float64{1, 10, 5}, + F: 0, + Global: true, + }, + } +} + +// BiggsEXP4 implements the Biggs' EXP4 function. +// +// Standard starting point: +// [1, 2, 1, 1] +// +// Reference: +// Biggs, M.C.: Minimization algorithms making use of non-quadratic properties +// of the objective function. IMA J Appl Math 8 (1971), 315-327; doi:10.1093/imamat/8.3.315 +type BiggsEXP4 struct{} + +func (BiggsEXP4) Func(x []float64) (sum float64) { + if len(x) != 4 { + panic("dimension of the problem must be 4") + } + + for i := 1; i <= 10; i++ { + z := float64(i) / 10 + y := math.Exp(-z) - 5*math.Exp(-10*z) + f := x[2]*math.Exp(-x[0]*z) - x[3]*math.Exp(-x[1]*z) - y + sum += f * f + } + return sum +} + +func (BiggsEXP4) Grad(grad, x []float64) { + if len(x) != 4 { + panic("dimension of the problem must be 4") + } + if len(x) != len(grad) { + panic("incorrect size of the gradient") + } + + for i := range grad { + grad[i] = 0 + } + for i := 1; i <= 10; i++ { + z := float64(i) / 10 + y := math.Exp(-z) - 5*math.Exp(-10*z) + f := x[2]*math.Exp(-x[0]*z) - x[3]*math.Exp(-x[1]*z) - y + + dfdx0 := -z * x[2] * math.Exp(-x[0]*z) + dfdx1 := z * x[3] * math.Exp(-x[1]*z) + dfdx2 := math.Exp(-x[0] * z) + dfdx3 := -math.Exp(-x[1] * z) + + grad[0] += 2 * f * dfdx0 + grad[1] += 2 * f * dfdx1 + grad[2] += 2 * f * dfdx2 + grad[3] += 2 * f * dfdx3 + } +} + +func (BiggsEXP4) Minima() []Minimum { + return []Minimum{ + { + X: []float64{1, 10, 1, 5}, + F: 0, + Global: true, + }, + } +} + +// BiggsEXP5 implements the Biggs' EXP5 function. +// +// Standard starting point: +// [1, 2, 1, 1, 1] +// +// Reference: +// Biggs, M.C.: Minimization algorithms making use of non-quadratic properties +// of the objective function. IMA J Appl Math 8 (1971), 315-327; doi:10.1093/imamat/8.3.315 +type BiggsEXP5 struct{} + +func (BiggsEXP5) Func(x []float64) (sum float64) { + if len(x) != 5 { + panic("dimension of the problem must be 5") + } + + for i := 1; i <= 11; i++ { + z := float64(i) / 10 + y := math.Exp(-z) - 5*math.Exp(-10*z) + 3*math.Exp(-4*z) + f := x[2]*math.Exp(-x[0]*z) - x[3]*math.Exp(-x[1]*z) + 3*math.Exp(-x[4]*z) - y + sum += f * f + } + return sum +} + +func (BiggsEXP5) Grad(grad, x []float64) { + if len(x) != 5 { + panic("dimension of the problem must be 5") + } + if len(x) != len(grad) { + panic("incorrect size of the gradient") + } + + for i := range grad { + grad[i] = 0 + } + for i := 1; i <= 11; i++ { + z := float64(i) / 10 + y := math.Exp(-z) - 5*math.Exp(-10*z) + 3*math.Exp(-4*z) + f := x[2]*math.Exp(-x[0]*z) - x[3]*math.Exp(-x[1]*z) + 3*math.Exp(-x[4]*z) - y + + dfdx0 := -z * x[2] * math.Exp(-x[0]*z) + dfdx1 := z * x[3] * math.Exp(-x[1]*z) + dfdx2 := math.Exp(-x[0] * z) + dfdx3 := -math.Exp(-x[1] * z) + dfdx4 := -3 * z * math.Exp(-x[4]*z) + + grad[0] += 2 * f * dfdx0 + grad[1] += 2 * f * dfdx1 + grad[2] += 2 * f * dfdx2 + grad[3] += 2 * f * dfdx3 + grad[4] += 2 * f * dfdx4 + } +} + +func (BiggsEXP5) Minima() []Minimum { + return []Minimum{ + { + X: []float64{1, 10, 1, 5, 4}, + F: 0, + Global: true, + }, + } +} + +// BiggsEXP6 implements the Biggs' EXP6 function. +// +// Standard starting point: +// [1, 2, 1, 1, 1, 1] +// +// References: +// - Biggs, M.C.: Minimization algorithms making use of non-quadratic +// properties of the objective function. IMA J Appl Math 8 (1971), 315-327; +// doi:10.1093/imamat/8.3.315 +// - More, J., Garbow, B.S., Hillstrom, K.E.: Testing unconstrained +// optimization software. ACM Trans Math Softw 7 (1981), 17-41 +type BiggsEXP6 struct{} + +func (BiggsEXP6) Func(x []float64) (sum float64) { + if len(x) != 6 { + panic("dimension of the problem must be 6") + } + + for i := 1; i <= 13; i++ { + z := float64(i) / 10 + y := math.Exp(-z) - 5*math.Exp(-10*z) + 3*math.Exp(-4*z) + f := x[2]*math.Exp(-x[0]*z) - x[3]*math.Exp(-x[1]*z) + x[5]*math.Exp(-x[4]*z) - y + sum += f * f + } + return sum +} + +func (BiggsEXP6) Grad(grad, x []float64) { + if len(x) != 6 { + panic("dimension of the problem must be 6") + } + if len(x) != len(grad) { + panic("incorrect size of the gradient") + } + + for i := range grad { + grad[i] = 0 + } + for i := 1; i <= 13; i++ { + z := float64(i) / 10 + y := math.Exp(-z) - 5*math.Exp(-10*z) + 3*math.Exp(-4*z) + f := x[2]*math.Exp(-x[0]*z) - x[3]*math.Exp(-x[1]*z) + x[5]*math.Exp(-x[4]*z) - y + + dfdx0 := -z * x[2] * math.Exp(-x[0]*z) + dfdx1 := z * x[3] * math.Exp(-x[1]*z) + dfdx2 := math.Exp(-x[0] * z) + dfdx3 := -math.Exp(-x[1] * z) + dfdx4 := -z * x[5] * math.Exp(-x[4]*z) + dfdx5 := math.Exp(-x[4] * z) + + grad[0] += 2 * f * dfdx0 + grad[1] += 2 * f * dfdx1 + grad[2] += 2 * f * dfdx2 + grad[3] += 2 * f * dfdx3 + grad[4] += 2 * f * dfdx4 + grad[5] += 2 * f * dfdx5 + } +} + +func (BiggsEXP6) Minima() []Minimum { + return []Minimum{ + { + X: []float64{1, 10, 1, 5, 4, 3}, + F: 0, + Global: true, + }, + { + X: []float64{1.7114159947956764, 17.68319817846745, 1.1631436609697268, + 5.1865615510738605, 1.7114159947949301, 1.1631436609697998}, + F: 0.005655649925499929, + Global: false, + }, + { + // X: []float64{1.22755594752403, X[1] >> 0, 0.83270306333466, X[3] << 0, X[4] = X[0], X[5] = X[2]}, + X: []float64{1.22755594752403, 1000, 0.83270306333466, -1000, 1.22755594752403, 0.83270306333466}, + F: 0.306366772624790, + Global: false, + }, + } +} + +// Box3D implements the Box' three-dimensional function. +// +// Standard starting point: +// [0, 10, 20] +// +// References: +// - Box, M.J.: A comparison of several current optimization methods, and the +// use of transformations in constrained problems. Comput J 9 (1966), 67-77 +// - More, J., Garbow, B.S., Hillstrom, K.E.: Testing unconstrained +// optimization software. ACM Trans Math Softw 7 (1981), 17-41 +type Box3D struct{} + +func (Box3D) Func(x []float64) (sum float64) { + if len(x) != 3 { + panic("dimension of the problem must be 3") + } + + for i := 1; i <= 10; i++ { + c := -float64(i) / 10 + y := math.Exp(c) - math.Exp(10*c) + f := math.Exp(c*x[0]) - math.Exp(c*x[1]) - x[2]*y + sum += f * f + } + return sum +} + +func (Box3D) Grad(grad, x []float64) { + if len(x) != 3 { + panic("dimension of the problem must be 3") + } + if len(x) != len(grad) { + panic("incorrect size of the gradient") + } + + grad[0] = 0 + grad[1] = 0 + grad[2] = 0 + for i := 1; i <= 10; i++ { + c := -float64(i) / 10 + y := math.Exp(c) - math.Exp(10*c) + f := math.Exp(c*x[0]) - math.Exp(c*x[1]) - x[2]*y + grad[0] += 2 * f * c * math.Exp(c*x[0]) + grad[1] += -2 * f * c * math.Exp(c*x[1]) + grad[2] += -2 * f * y + } +} + +func (Box3D) Minima() []Minimum { + return []Minimum{ + { + X: []float64{1, 10, 1}, + F: 0, + Global: true, + }, + { + X: []float64{10, 1, -1}, + F: 0, + Global: true, + }, + { + // Any point at the line {a, a, 0}. + X: []float64{1, 1, 0}, + F: 0, + Global: true, + }, + } +} + +// BraninHoo implements the Branin-Hoo function. BraninHoo is a 2-dimensional +// test function with three global minima. It is typically evaluated in the domain +// x_0 ∈ [-5, 10], x_1 ∈ [0, 15]. +// f(x) = (x_1 - (5.1/(4π^2))*x_0^2 + (5/π)*x_0 - 6)^2 + 10*(1-1/(8π))cos(x_0) + 10 +// It has a minimum value of 0.397887 at x^* = {(-π, 12.275), (π, 2.275), (9.424778, 2.475)} +// +// Reference: +// https://www.sfu.ca/~ssurjano/branin.html (obtained June 2017) +type BraninHoo struct{} + +func (BraninHoo) Func(x []float64) float64 { + if len(x) != 2 { + panic("functions: dimension of the problem must be 2") + } + a, b, c, r, s, t := 1.0, 5.1/(4*math.Pi*math.Pi), 5/math.Pi, 6.0, 10.0, 1/(8*math.Pi) + + term := x[1] - b*x[0]*x[0] + c*x[0] - r + return a*term*term + s*(1-t)*math.Cos(x[0]) + s +} + +func (BraninHoo) Minima() []Minimum { + return []Minimum{ + { + X: []float64{-math.Pi, 12.275}, + F: 0.397887, + Global: true, + }, + { + X: []float64{math.Pi, 2.275}, + F: 0.397887, + Global: true, + }, + { + X: []float64{9.424778, 2.475}, + F: 0.397887, + Global: true, + }, + } +} + +// BrownBadlyScaled implements the Brown's badly scaled function. +// +// Standard starting point: +// [1, 1] +// +// References: +// - More, J., Garbow, B.S., Hillstrom, K.E.: Testing unconstrained +// optimization software. ACM Trans Math Softw 7 (1981), 17-41 +type BrownBadlyScaled struct{} + +func (BrownBadlyScaled) Func(x []float64) float64 { + if len(x) != 2 { + panic("dimension of the problem must be 2") + } + + f1 := x[0] - 1e6 + f2 := x[1] - 2e-6 + f3 := x[0]*x[1] - 2 + return f1*f1 + f2*f2 + f3*f3 +} + +func (BrownBadlyScaled) Grad(grad, x []float64) { + if len(x) != 2 { + panic("dimension of the problem must be 2") + } + if len(x) != len(grad) { + panic("incorrect size of the gradient") + } + + f1 := x[0] - 1e6 + f2 := x[1] - 2e-6 + f3 := x[0]*x[1] - 2 + grad[0] = 2*f1 + 2*f3*x[1] + grad[1] = 2*f2 + 2*f3*x[0] +} + +func (BrownBadlyScaled) Hess(dst *mat.SymDense, x []float64) { + if len(x) != 2 { + panic("dimension of the problem must be 2") + } + if len(x) != dst.Symmetric() { + panic("incorrect size of the Hessian") + } + + h00 := 2 + 2*x[1]*x[1] + h01 := 4*x[0]*x[1] - 4 + h11 := 2 + 2*x[0]*x[0] + dst.SetSym(0, 0, h00) + dst.SetSym(0, 1, h01) + dst.SetSym(1, 1, h11) +} + +func (BrownBadlyScaled) Minima() []Minimum { + return []Minimum{ + { + X: []float64{1e6, 2e-6}, + F: 0, + Global: true, + }, + } +} + +// BrownAndDennis implements the Brown and Dennis function. +// +// Standard starting point: +// [25, 5, -5, -1] +// +// References: +// - Brown, K.M., Dennis, J.E.: New computational algorithms for minimizing a +// sum of squares of nonlinear functions. Research Report Number 71-6, Yale +// University (1971) +// - More, J., Garbow, B.S., Hillstrom, K.E.: Testing unconstrained +// optimization software. ACM Trans Math Softw 7 (1981), 17-41 +type BrownAndDennis struct{} + +func (BrownAndDennis) Func(x []float64) (sum float64) { + if len(x) != 4 { + panic("dimension of the problem must be 4") + } + + for i := 1; i <= 20; i++ { + c := float64(i) / 5 + f1 := x[0] + c*x[1] - math.Exp(c) + f2 := x[2] + x[3]*math.Sin(c) - math.Cos(c) + f := f1*f1 + f2*f2 + sum += f * f + } + return sum +} + +func (BrownAndDennis) Grad(grad, x []float64) { + if len(x) != 4 { + panic("dimension of the problem must be 4") + } + if len(x) != len(grad) { + panic("incorrect size of the gradient") + } + + for i := range grad { + grad[i] = 0 + } + for i := 1; i <= 20; i++ { + c := float64(i) / 5 + f1 := x[0] + c*x[1] - math.Exp(c) + f2 := x[2] + x[3]*math.Sin(c) - math.Cos(c) + f := f1*f1 + f2*f2 + grad[0] += 4 * f * f1 + grad[1] += 4 * f * f1 * c + grad[2] += 4 * f * f2 + grad[3] += 4 * f * f2 * math.Sin(c) + } +} + +func (BrownAndDennis) Hess(dst *mat.SymDense, x []float64) { + if len(x) != 4 { + panic("dimension of the problem must be 4") + } + if len(x) != dst.Symmetric() { + panic("incorrect size of the Hessian") + } + + for i := 0; i < 4; i++ { + for j := i; j < 4; j++ { + dst.SetSym(i, j, 0) + } + } + for i := 1; i <= 20; i++ { + d1 := float64(i) / 5 + d2 := math.Sin(d1) + t1 := x[0] + d1*x[1] - math.Exp(d1) + t2 := x[2] + d2*x[3] - math.Cos(d1) + t := t1*t1 + t2*t2 + s3 := 2 * t1 * t2 + r1 := t + 2*t1*t1 + r2 := t + 2*t2*t2 + dst.SetSym(0, 0, dst.At(0, 0)+r1) + dst.SetSym(0, 1, dst.At(0, 1)+d1*r1) + dst.SetSym(1, 1, dst.At(1, 1)+d1*d1*r1) + dst.SetSym(0, 2, dst.At(0, 2)+s3) + dst.SetSym(1, 2, dst.At(1, 2)+d1*s3) + dst.SetSym(2, 2, dst.At(2, 2)+r2) + dst.SetSym(0, 3, dst.At(0, 3)+d2*s3) + dst.SetSym(1, 3, dst.At(1, 3)+d1*d2*s3) + dst.SetSym(2, 3, dst.At(2, 3)+d2*r2) + dst.SetSym(3, 3, dst.At(3, 3)+d2*d2*r2) + } + for i := 0; i < 4; i++ { + for j := i; j < 4; j++ { + dst.SetSym(i, j, 4*dst.At(i, j)) + } + } +} + +func (BrownAndDennis) Minima() []Minimum { + return []Minimum{ + { + X: []float64{-11.594439904762162, 13.203630051207202, -0.4034394881768612, 0.2367787744557347}, + F: 85822.20162635634, + Global: true, + }, + } +} + +// ExtendedPowellSingular implements the extended Powell's function. +// Its Hessian matrix is singular at the minimizer. +// +// Standard starting point: +// [3, -1, 0, 3, 3, -1, 0, 3, ..., 3, -1, 0, 3] +// +// References: +// - Spedicato E.: Computational experience with quasi-Newton algorithms for +// minimization problems of moderatly large size. Towards Global +// Optimization 2 (1978), 209-219 +// - More, J., Garbow, B.S., Hillstrom, K.E.: Testing unconstrained +// optimization software. ACM Trans Math Softw 7 (1981), 17-41 +type ExtendedPowellSingular struct{} + +func (ExtendedPowellSingular) Func(x []float64) (sum float64) { + if len(x)%4 != 0 { + panic("dimension of the problem must be a multiple of 4") + } + + for i := 0; i < len(x); i += 4 { + f1 := x[i] + 10*x[i+1] + f2 := x[i+2] - x[i+3] + t := x[i+1] - 2*x[i+2] + f3 := t * t + t = x[i] - x[i+3] + f4 := t * t + sum += f1*f1 + 5*f2*f2 + f3*f3 + 10*f4*f4 + } + return sum +} + +func (ExtendedPowellSingular) Grad(grad, x []float64) { + if len(x)%4 != 0 { + panic("dimension of the problem must be a multiple of 4") + } + if len(x) != len(grad) { + panic("incorrect size of the gradient") + } + + for i := 0; i < len(x); i += 4 { + f1 := x[i] + 10*x[i+1] + f2 := x[i+2] - x[i+3] + t1 := x[i+1] - 2*x[i+2] + f3 := t1 * t1 + t2 := x[i] - x[i+3] + f4 := t2 * t2 + grad[i] = 2*f1 + 40*f4*t2 + grad[i+1] = 20*f1 + 4*f3*t1 + grad[i+2] = 10*f2 - 8*f3*t1 + grad[i+3] = -10*f2 - 40*f4*t2 + } +} + +func (ExtendedPowellSingular) Minima() []Minimum { + return []Minimum{ + { + X: []float64{0, 0, 0, 0}, + F: 0, + Global: true, + }, + { + X: []float64{0, 0, 0, 0, 0, 0, 0, 0}, + F: 0, + Global: true, + }, + { + X: []float64{0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0}, + F: 0, + Global: true, + }, + } +} + +// ExtendedRosenbrock implements the extended, multidimensional Rosenbrock +// function. +// +// Standard starting point: +// Easy: [-1.2, 1, -1.2, 1, ...] +// Hard: any point far from the minimum +// +// References: +// - Rosenbrock, H.H.: An Automatic Method for Finding the Greatest or Least +// Value of a Function. Computer J 3 (1960), 175-184 +// - http://en.wikipedia.org/wiki/Rosenbrock_function +type ExtendedRosenbrock struct{} + +func (ExtendedRosenbrock) Func(x []float64) (sum float64) { + for i := 0; i < len(x)-1; i++ { + a := 1 - x[i] + b := x[i+1] - x[i]*x[i] + sum += a*a + 100*b*b + } + return sum +} + +func (ExtendedRosenbrock) Grad(grad, x []float64) { + if len(x) != len(grad) { + panic("incorrect size of the gradient") + } + + dim := len(x) + for i := range grad { + grad[i] = 0 + } + for i := 0; i < dim-1; i++ { + grad[i] -= 2 * (1 - x[i]) + grad[i] -= 400 * (x[i+1] - x[i]*x[i]) * x[i] + } + for i := 1; i < dim; i++ { + grad[i] += 200 * (x[i] - x[i-1]*x[i-1]) + } +} + +func (ExtendedRosenbrock) Minima() []Minimum { + return []Minimum{ + { + X: []float64{1, 1}, + F: 0, + Global: true, + }, + { + X: []float64{1, 1, 1}, + F: 0, + Global: true, + }, + { + X: []float64{1, 1, 1, 1}, + F: 0, + Global: true, + }, + { + X: []float64{-0.7756592265653526, 0.6130933654850433, + 0.38206284633839305, 0.14597201855219452}, + F: 3.701428610430017, + Global: false, + }, + { + X: []float64{1, 1, 1, 1, 1}, + F: 0, + Global: true, + }, + { + X: []float64{-0.9620510206947502, 0.9357393959767103, + 0.8807136041943204, 0.7778776758544063, 0.6050936785926526}, + F: 3.930839434133027, + Global: false, + }, + { + X: []float64{-0.9865749795709938, 0.9833982288361819, 0.972106670053092, + 0.9474374368264362, 0.8986511848517299, 0.8075739520354182}, + F: 3.973940500930295, + Global: false, + }, + { + X: []float64{-0.9917225725614055, 0.9935553935033712, 0.992173321594692, + 0.9868987626903134, 0.975164756608872, 0.9514319827049906, 0.9052228177139495}, + F: 3.9836005364248543, + Global: false, + }, + { + X: []float64{1, 1, 1, 1, 1, 1, 1, 1, 1, 1}, + F: 0, + Global: true, + }, + } +} + +// Gaussian implements the Gaussian function. +// The function has one global minimum and a number of false local minima +// caused by the finite floating point precision. +// +// Standard starting point: +// [0.4, 1, 0] +// +// Reference: +// More, J., Garbow, B.S., Hillstrom, K.E.: Testing unconstrained optimization +// software. ACM Trans Math Softw 7 (1981), 17-41 +type Gaussian struct{} + +func (Gaussian) y(i int) (yi float64) { + switch i { + case 1, 15: + yi = 0.0009 + case 2, 14: + yi = 0.0044 + case 3, 13: + yi = 0.0175 + case 4, 12: + yi = 0.0540 + case 5, 11: + yi = 0.1295 + case 6, 10: + yi = 0.2420 + case 7, 9: + yi = 0.3521 + case 8: + yi = 0.3989 + } + return yi +} + +func (g Gaussian) Func(x []float64) (sum float64) { + if len(x) != 3 { + panic("dimension of the problem must be 3") + } + + for i := 1; i <= 15; i++ { + c := 0.5 * float64(8-i) + b := c - x[2] + d := b * b + e := math.Exp(-0.5 * x[1] * d) + f := x[0]*e - g.y(i) + sum += f * f + } + return sum +} + +func (g Gaussian) Grad(grad, x []float64) { + if len(x) != 3 { + panic("dimension of the problem must be 3") + } + if len(x) != len(grad) { + panic("incorrect size of the gradient") + } + + grad[0] = 0 + grad[1] = 0 + grad[2] = 0 + for i := 1; i <= 15; i++ { + c := 0.5 * float64(8-i) + b := c - x[2] + d := b * b + e := math.Exp(-0.5 * x[1] * d) + f := x[0]*e - g.y(i) + grad[0] += 2 * f * e + grad[1] -= f * e * d * x[0] + grad[2] += 2 * f * e * x[0] * x[1] * b + } +} + +func (Gaussian) Minima() []Minimum { + return []Minimum{ + { + X: []float64{0.398956137837997, 1.0000190844805048, 0}, + F: 1.12793276961912e-08, + Global: true, + }, + } +} + +// GulfResearchAndDevelopment implements the Gulf Research and Development function. +// +// Standard starting point: +// [5, 2.5, 0.15] +// +// References: +// - Cox, R.A.: Comparison of the performance of seven optimization algorithms +// on twelve unconstrained minimization problems. Ref. 1335CNO4, Gulf +// Research and Development Company, Pittsburg (1969) +// - More, J., Garbow, B.S., Hillstrom, K.E.: Testing unconstrained +// optimization software. ACM Trans Math Softw 7 (1981), 17-41 +type GulfResearchAndDevelopment struct{} + +func (GulfResearchAndDevelopment) Func(x []float64) (sum float64) { + if len(x) != 3 { + panic("dimension of the problem must be 3") + } + + for i := 1; i <= 99; i++ { + arg := float64(i) / 100 + r := math.Pow(-50*math.Log(arg), 2.0/3.0) + 25 - x[1] + t1 := math.Pow(math.Abs(r), x[2]) / x[0] + t2 := math.Exp(-t1) + t := t2 - arg + sum += t * t + } + return sum +} + +func (GulfResearchAndDevelopment) Grad(grad, x []float64) { + if len(x) != 3 { + panic("dimension of the problem must be 3") + } + if len(x) != len(grad) { + panic("incorrect size of the gradient") + } + + for i := range grad { + grad[i] = 0 + } + for i := 1; i <= 99; i++ { + arg := float64(i) / 100 + r := math.Pow(-50*math.Log(arg), 2.0/3.0) + 25 - x[1] + t1 := math.Pow(math.Abs(r), x[2]) / x[0] + t2 := math.Exp(-t1) + t := t2 - arg + s1 := t1 * t2 * t + grad[0] += s1 + grad[1] += s1 / r + grad[2] -= s1 * math.Log(math.Abs(r)) + } + grad[0] *= 2 / x[0] + grad[1] *= 2 * x[2] + grad[2] *= 2 +} + +func (GulfResearchAndDevelopment) Minima() []Minimum { + return []Minimum{ + { + X: []float64{50, 25, 1.5}, + F: 0, + Global: true, + }, + { + X: []float64{99.89529935174151, 60.61453902799833, 9.161242695144592}, + F: 32.8345, + Global: false, + }, + { + X: []float64{201.662589489426, 60.61633150468155, 10.224891158488965}, + F: 32.8345, + Global: false, + }, + } +} + +// HelicalValley implements the helical valley function of Fletcher and Powell. +// Function is not defined at x[0] = 0. +// +// Standard starting point: +// [-1, 0, 0] +// +// References: +// - Fletcher, R., Powell, M.J.D.: A rapidly convergent descent method for +// minimization. Comput J 6 (1963), 163-168 +// - More, J., Garbow, B.S., Hillstrom, K.E.: Testing unconstrained +// optimization software. ACM Trans Math Softw 7 (1981), 17-41 +type HelicalValley struct{} + +func (HelicalValley) Func(x []float64) float64 { + if len(x) != 3 { + panic("dimension of the problem must be 3") + } + if x[0] == 0 { + panic("function not defined at x[0] = 0") + } + + theta := 0.5 * math.Atan(x[1]/x[0]) / math.Pi + if x[0] < 0 { + theta += 0.5 + } + f1 := 10 * (x[2] - 10*theta) + f2 := 10 * (math.Hypot(x[0], x[1]) - 1) + f3 := x[2] + return f1*f1 + f2*f2 + f3*f3 +} + +func (HelicalValley) Grad(grad, x []float64) { + if len(x) != 3 { + panic("dimension of the problem must be 3") + } + if len(x) != len(grad) { + panic("incorrect size of the gradient") + } + if x[0] == 0 { + panic("function not defined at x[0] = 0") + } + + theta := 0.5 * math.Atan(x[1]/x[0]) / math.Pi + if x[0] < 0 { + theta += 0.5 + } + h := math.Hypot(x[0], x[1]) + r := 1 / h + q := r * r / math.Pi + s := x[2] - 10*theta + grad[0] = 200 * (5*s*q*x[1] + (h-1)*r*x[0]) + grad[1] = 200 * (-5*s*q*x[0] + (h-1)*r*x[1]) + grad[2] = 2 * (100*s + x[2]) +} + +func (HelicalValley) Minima() []Minimum { + return []Minimum{ + { + X: []float64{1, 0, 0}, + F: 0, + Global: true, + }, + } +} + +// Linear implements a linear function. +type Linear struct{} + +func (Linear) Func(x []float64) float64 { + return floats.Sum(x) +} + +func (Linear) Grad(grad, x []float64) []float64 { + if len(x) != len(grad) { + panic("incorrect size of the gradient") + } + + for i := range grad { + grad[i] = 1 + } + return grad +} + +// PenaltyI implements the first penalty function by Gill, Murray and Pitfield. +// +// Standard starting point: +// [1, ..., n] +// +// References: +// - Gill, P.E., Murray, W., Pitfield, R.A.: The implementation of two revised +// quasi-Newton algorithms for unconstrained optimization. Report NAC 11, +// National Phys Lab (1972), 82-83 +// - More, J., Garbow, B.S., Hillstrom, K.E.: Testing unconstrained +// optimization software. ACM Trans Math Softw 7 (1981), 17-41 +type PenaltyI struct{} + +func (PenaltyI) Func(x []float64) (sum float64) { + for _, v := range x { + sum += (v - 1) * (v - 1) + } + sum *= 1e-5 + + var s float64 + for _, v := range x { + s += v * v + } + sum += (s - 0.25) * (s - 0.25) + return sum +} + +func (PenaltyI) Grad(grad, x []float64) { + if len(x) != len(grad) { + panic("incorrect size of the gradient") + } + + s := -0.25 + for _, v := range x { + s += v * v + } + for i, v := range x { + grad[i] = 2 * (2*s*v + 1e-5*(v-1)) + } +} + +func (PenaltyI) Minima() []Minimum { + return []Minimum{ + { + X: []float64{0.2500074995875379, 0.2500074995875379, 0.2500074995875379, 0.2500074995875379}, + F: 2.2499775008999372e-05, + Global: true, + }, + { + X: []float64{0.15812230111311634, 0.15812230111311634, 0.15812230111311634, + 0.15812230111311634, 0.15812230111311634, 0.15812230111311634, + 0.15812230111311634, 0.15812230111311634, 0.15812230111311634, 0.15812230111311634}, + F: 7.087651467090369e-05, + Global: true, + }, + } +} + +// PenaltyII implements the second penalty function by Gill, Murray and Pitfield. +// +// Standard starting point: +// [0.5, ..., 0.5] +// +// References: +// - Gill, P.E., Murray, W., Pitfield, R.A.: The implementation of two revised +// quasi-Newton algorithms for unconstrained optimization. Report NAC 11, +// National Phys Lab (1972), 82-83 +// - More, J., Garbow, B.S., Hillstrom, K.E.: Testing unconstrained +// optimization software. ACM Trans Math Softw 7 (1981), 17-41 +type PenaltyII struct{} + +func (PenaltyII) Func(x []float64) (sum float64) { + dim := len(x) + s := -1.0 + for i, v := range x { + s += float64(dim-i) * v * v + } + for i := 1; i < dim; i++ { + yi := math.Exp(float64(i+1)/10) + math.Exp(float64(i)/10) + f := math.Exp(x[i]/10) + math.Exp(x[i-1]/10) - yi + sum += f * f + } + for i := 1; i < dim; i++ { + f := math.Exp(x[i]/10) - math.Exp(-1.0/10) + sum += f * f + } + sum *= 1e-5 + sum += (x[0] - 0.2) * (x[0] - 0.2) + sum += s * s + return sum +} + +func (PenaltyII) Grad(grad, x []float64) { + if len(x) != len(grad) { + panic("incorrect size of the gradient") + } + + dim := len(x) + s := -1.0 + for i, v := range x { + s += float64(dim-i) * v * v + } + for i, v := range x { + grad[i] = 4 * s * float64(dim-i) * v + } + for i := 1; i < dim; i++ { + yi := math.Exp(float64(i+1)/10) + math.Exp(float64(i)/10) + f := math.Exp(x[i]/10) + math.Exp(x[i-1]/10) - yi + grad[i] += 1e-5 * f * math.Exp(x[i]/10) / 5 + grad[i-1] += 1e-5 * f * math.Exp(x[i-1]/10) / 5 + } + for i := 1; i < dim; i++ { + f := math.Exp(x[i]/10) - math.Exp(-1.0/10) + grad[i] += 1e-5 * f * math.Exp(x[i]/10) / 5 + } + grad[0] += 2 * (x[0] - 0.2) +} + +func (PenaltyII) Minima() []Minimum { + return []Minimum{ + { + X: []float64{0.19999933335, 0.19131670128566283, 0.4801014860897, 0.5188454026659}, + F: 9.376293007355449e-06, + Global: true, + }, + { + X: []float64{0.19998360520892217, 0.010350644318663525, + 0.01960493546891094, 0.03208906550305253, 0.04993267593895693, + 0.07651399534454084, 0.11862407118600789, 0.1921448731780023, + 0.3473205862372022, 0.36916437893066273}, + F: 0.00029366053745674594, + Global: true, + }, + } +} + +// PowellBadlyScaled implements the Powell's badly scaled function. +// The function is very flat near the minimum. A satisfactory solution is one +// that gives f(x) ≅ 1e-13. +// +// Standard starting point: +// [0, 1] +// +// References: +// - Powell, M.J.D.: A Hybrid Method for Nonlinear Equations. Numerical +// Methods for Nonlinear Algebraic Equations, P. Rabinowitz (ed.), Gordon +// and Breach (1970) +// - More, J., Garbow, B.S., Hillstrom, K.E.: Testing unconstrained +// optimization software. ACM Trans Math Softw 7 (1981), 17-41 +type PowellBadlyScaled struct{} + +func (PowellBadlyScaled) Func(x []float64) float64 { + if len(x) != 2 { + panic("dimension of the problem must be 2") + } + + f1 := 1e4*x[0]*x[1] - 1 + f2 := math.Exp(-x[0]) + math.Exp(-x[1]) - 1.0001 + return f1*f1 + f2*f2 +} + +func (PowellBadlyScaled) Grad(grad, x []float64) { + if len(x) != 2 { + panic("dimension of the problem must be 2") + } + if len(x) != len(grad) { + panic("incorrect size of the gradient") + } + + f1 := 1e4*x[0]*x[1] - 1 + f2 := math.Exp(-x[0]) + math.Exp(-x[1]) - 1.0001 + grad[0] = 2 * (1e4*f1*x[1] - f2*math.Exp(-x[0])) + grad[1] = 2 * (1e4*f1*x[0] - f2*math.Exp(-x[1])) +} + +func (PowellBadlyScaled) Hess(dst *mat.SymDense, x []float64) { + if len(x) != 2 { + panic("dimension of the problem must be 2") + } + if len(x) != dst.Symmetric() { + panic("incorrect size of the Hessian") + } + + t1 := 1e4*x[0]*x[1] - 1 + s1 := math.Exp(-x[0]) + s2 := math.Exp(-x[1]) + t2 := s1 + s2 - 1.0001 + + h00 := 2 * (1e8*x[1]*x[1] + s1*(s1+t2)) + h01 := 2 * (1e4*(1+2*t1) + s1*s2) + h11 := 2 * (1e8*x[0]*x[0] + s2*(s2+t2)) + dst.SetSym(0, 0, h00) + dst.SetSym(0, 1, h01) + dst.SetSym(1, 1, h11) +} + +func (PowellBadlyScaled) Minima() []Minimum { + return []Minimum{ + { + X: []float64{1.0981593296997149e-05, 9.106146739867375}, + F: 0, + Global: true, + }, + } +} + +// Trigonometric implements the trigonometric function. +// +// Standard starting point: +// [1/dim, ..., 1/dim] +// +// References: +// - Spedicato E.: Computational experience with quasi-Newton algorithms for +// minimization problems of moderatly large size. Towards Global +// Optimization 2 (1978), 209-219 +// - More, J., Garbow, B.S., Hillstrom, K.E.: Testing unconstrained +// optimization software. ACM Trans Math Softw 7 (1981), 17-41 +type Trigonometric struct{} + +func (Trigonometric) Func(x []float64) (sum float64) { + var s1 float64 + for _, v := range x { + s1 += math.Cos(v) + } + for i, v := range x { + f := float64(len(x)+i+1) - float64(i+1)*math.Cos(v) - math.Sin(v) - s1 + sum += f * f + } + return sum +} + +func (Trigonometric) Grad(grad, x []float64) { + if len(x) != len(grad) { + panic("incorrect size of the gradient") + } + + var s1 float64 + for _, v := range x { + s1 += math.Cos(v) + } + + var s2 float64 + for i, v := range x { + f := float64(len(x)+i+1) - float64(i+1)*math.Cos(v) - math.Sin(v) - s1 + s2 += f + grad[i] = 2 * f * (float64(i+1)*math.Sin(v) - math.Cos(v)) + } + + for i, v := range x { + grad[i] += 2 * s2 * math.Sin(v) + } +} + +func (Trigonometric) Minima() []Minimum { + return []Minimum{ + { + X: []float64{0.04296456438227447, 0.043976287478192246, + 0.045093397949095684, 0.04633891624617569, 0.047744381782831, + 0.04935473251330618, 0.05123734850076505, 0.19520946391410446, + 0.1649776652761741, 0.06014857783799575}, + F: 0, + Global: true, + }, + { + // TODO(vladimir-ch): If we knew the location of this minimum more + // accurately, we could decrease defaultGradTol. + X: []float64{0.05515090434047145, 0.05684061730812344, + 0.05876400231100774, 0.060990608903034337, 0.06362621381044778, + 0.06684318087364617, 0.2081615177172172, 0.16436309604419047, + 0.08500689695564931, 0.09143145386293675}, + F: 2.795056121876575e-05, + Global: false, + }, + } +} + +// VariablyDimensioned implements a variably dimensioned function. +// +// Standard starting point: +// [..., (dim-i)/dim, ...], i=1,...,dim +// +// References: +// More, J., Garbow, B.S., Hillstrom, K.E.: Testing unconstrained optimization +// software. ACM Trans Math Softw 7 (1981), 17-41 +type VariablyDimensioned struct{} + +func (VariablyDimensioned) Func(x []float64) (sum float64) { + for _, v := range x { + t := v - 1 + sum += t * t + } + + var s float64 + for i, v := range x { + s += float64(i+1) * (v - 1) + } + s *= s + sum += s + s *= s + sum += s + return sum +} + +func (VariablyDimensioned) Grad(grad, x []float64) { + if len(x) != len(grad) { + panic("incorrect size of the gradient") + } + + var s float64 + for i, v := range x { + s += float64(i+1) * (v - 1) + } + for i, v := range x { + grad[i] = 2 * (v - 1 + s*float64(i+1)*(1+2*s*s)) + } +} + +func (VariablyDimensioned) Minima() []Minimum { + return []Minimum{ + { + X: []float64{1, 1}, + F: 0, + Global: true, + }, + { + X: []float64{1, 1, 1}, + F: 0, + Global: true, + }, + { + X: []float64{1, 1, 1, 1}, + F: 0, + Global: true, + }, + { + X: []float64{1, 1, 1, 1, 1}, + F: 0, + Global: true, + }, + { + X: []float64{1, 1, 1, 1, 1, 1, 1, 1, 1, 1}, + F: 0, + Global: true, + }, + } +} + +// Watson implements the Watson's function. +// Dimension of the problem should be 2 <= dim <= 31. For dim == 9, the problem +// of minimizing the function is very ill conditioned. +// +// Standard starting point: +// [0, ..., 0] +// +// References: +// - Kowalik, J.S., Osborne, M.R.: Methods for Unconstrained Optimization +// Problems. Elsevier North-Holland, New York, 1968 +// - More, J., Garbow, B.S., Hillstrom, K.E.: Testing unconstrained +// optimization software. ACM Trans Math Softw 7 (1981), 17-41 +type Watson struct{} + +func (Watson) Func(x []float64) (sum float64) { + for i := 1; i <= 29; i++ { + d1 := float64(i) / 29 + + d2 := 1.0 + var s1 float64 + for j := 1; j < len(x); j++ { + s1 += float64(j) * d2 * x[j] + d2 *= d1 + } + + d2 = 1.0 + var s2 float64 + for _, v := range x { + s2 += d2 * v + d2 *= d1 + } + + t := s1 - s2*s2 - 1 + sum += t * t + } + t := x[1] - x[0]*x[0] - 1 + sum += x[0]*x[0] + t*t + return sum +} + +func (Watson) Grad(grad, x []float64) { + if len(x) != len(grad) { + panic("incorrect size of the gradient") + } + + for i := range grad { + grad[i] = 0 + } + for i := 1; i <= 29; i++ { + d1 := float64(i) / 29 + + d2 := 1.0 + var s1 float64 + for j := 1; j < len(x); j++ { + s1 += float64(j) * d2 * x[j] + d2 *= d1 + } + + d2 = 1.0 + var s2 float64 + for _, v := range x { + s2 += d2 * v + d2 *= d1 + } + + t := s1 - s2*s2 - 1 + s3 := 2 * d1 * s2 + d2 = 2 / d1 + for j := range x { + grad[j] += d2 * (float64(j) - s3) * t + d2 *= d1 + } + } + t := x[1] - x[0]*x[0] - 1 + grad[0] += x[0] * (2 - 4*t) + grad[1] += 2 * t +} + +func (Watson) Hess(dst *mat.SymDense, x []float64) { + dim := len(x) + if len(x) != dst.Symmetric() { + panic("incorrect size of the Hessian") + } + + for j := 0; j < dim; j++ { + for k := j; k < dim; k++ { + dst.SetSym(j, k, 0) + } + } + for i := 1; i <= 29; i++ { + d1 := float64(i) / 29 + d2 := 1.0 + var s1 float64 + for j := 1; j < dim; j++ { + s1 += float64(j) * d2 * x[j] + d2 *= d1 + } + + d2 = 1.0 + var s2 float64 + for _, v := range x { + s2 += d2 * v + d2 *= d1 + } + + t := s1 - s2*s2 - 1 + s3 := 2 * d1 * s2 + d2 = 2 / d1 + th := 2 * d1 * d1 * t + for j := 0; j < dim; j++ { + v := float64(j) - s3 + d3 := 1 / d1 + for k := 0; k <= j; k++ { + dst.SetSym(k, j, dst.At(k, j)+d2*d3*(v*(float64(k)-s3)-th)) + d3 *= d1 + } + d2 *= d1 + } + } + t1 := x[1] - x[0]*x[0] - 1 + dst.SetSym(0, 0, dst.At(0, 0)+8*x[0]*x[0]+2-4*t1) + dst.SetSym(0, 1, dst.At(0, 1)-4*x[0]) + dst.SetSym(1, 1, dst.At(1, 1)+2) +} + +func (Watson) Minima() []Minimum { + return []Minimum{ + { + X: []float64{-0.01572508644590686, 1.012434869244884, -0.23299162372002916, + 1.2604300800978554, -1.51372891341701, 0.9929964286340117}, + F: 0.0022876700535523838, + Global: true, + }, + { + X: []float64{-1.5307036521992127e-05, 0.9997897039319495, 0.01476396369355022, + 0.14634232829939883, 1.0008211030046426, -2.617731140519101, 4.104403164479245, + -3.1436122785568514, 1.0526264080103074}, + F: 1.399760138096796e-06, + Global: true, + }, + // TODO(vladimir-ch): More, Garbow, Hillstrom list just the value, but + // not the location. Our minimizers find a minimum, but the value is + // different. + // { + // // For dim == 12 + // F: 4.72238e-10, + // Global: true, + // }, + // TODO(vladimir-ch): netlib/uncon report a value of 2.48631d-20 for dim == 20. + } +} + +// Wood implements the Wood's function. +// +// Standard starting point: +// [-3, -1, -3, -1] +// +// References: +// - Colville, A.R.: A comparative study of nonlinear programming codes. +// Report 320-2949, IBM New York Scientific Center (1968) +// - More, J., Garbow, B.S., Hillstrom, K.E.: Testing unconstrained +// optimization software. ACM Trans Math Softw 7 (1981), 17-41 +type Wood struct{} + +func (Wood) Func(x []float64) (sum float64) { + if len(x) != 4 { + panic("dimension of the problem must be 4") + } + + f1 := x[1] - x[0]*x[0] + f2 := 1 - x[0] + f3 := x[3] - x[2]*x[2] + f4 := 1 - x[2] + f5 := x[1] + x[3] - 2 + f6 := x[1] - x[3] + return 100*f1*f1 + f2*f2 + 90*f3*f3 + f4*f4 + 10*f5*f5 + 0.1*f6*f6 +} + +func (Wood) Grad(grad, x []float64) { + if len(x) != 4 { + panic("dimension of the problem must be 4") + } + if len(x) != len(grad) { + panic("incorrect size of the gradient") + } + + f1 := x[1] - x[0]*x[0] + f2 := 1 - x[0] + f3 := x[3] - x[2]*x[2] + f4 := 1 - x[2] + f5 := x[1] + x[3] - 2 + f6 := x[1] - x[3] + grad[0] = -2 * (200*f1*x[0] + f2) + grad[1] = 2 * (100*f1 + 10*f5 + 0.1*f6) + grad[2] = -2 * (180*f3*x[2] + f4) + grad[3] = 2 * (90*f3 + 10*f5 - 0.1*f6) +} + +func (Wood) Hess(dst *mat.SymDense, x []float64) { + if len(x) != 4 { + panic("dimension of the problem must be 4") + } + if len(x) != dst.Symmetric() { + panic("incorrect size of the Hessian") + } + + dst.SetSym(0, 0, 400*(3*x[0]*x[0]-x[1])+2) + dst.SetSym(0, 1, -400*x[0]) + dst.SetSym(1, 1, 220.2) + dst.SetSym(0, 2, 0) + dst.SetSym(1, 2, 0) + dst.SetSym(2, 2, 360*(3*x[2]*x[2]-x[3])+2) + dst.SetSym(0, 3, 0) + dst.SetSym(1, 3, 19.8) + dst.SetSym(2, 3, -360*x[2]) + dst.SetSym(3, 3, 200.2) +} + +func (Wood) Minima() []Minimum { + return []Minimum{ + { + X: []float64{1, 1, 1, 1}, + F: 0, + Global: true, + }, + } +} + +// ConcaveRight implements an univariate function that is concave to the right +// of the minimizer which is located at x=sqrt(2). +// +// References: +// More, J.J., and Thuente, D.J.: Line Search Algorithms with Guaranteed Sufficient Decrease. +// ACM Transactions on Mathematical Software 20(3) (1994), 286–307, eq. (5.1) +type ConcaveRight struct{} + +func (ConcaveRight) Func(x []float64) float64 { + if len(x) != 1 { + panic("dimension of the problem must be 1") + } + return -x[0] / (x[0]*x[0] + 2) +} + +func (ConcaveRight) Grad(grad, x []float64) { + if len(x) != 1 { + panic("dimension of the problem must be 1") + } + if len(x) != len(grad) { + panic("incorrect size of the gradient") + } + xSqr := x[0] * x[0] + grad[0] = (xSqr - 2) / (xSqr + 2) / (xSqr + 2) +} + +// ConcaveLeft implements an univariate function that is concave to the left of +// the minimizer which is located at x=399/250=1.596. +// +// References: +// More, J.J., and Thuente, D.J.: Line Search Algorithms with Guaranteed Sufficient Decrease. +// ACM Transactions on Mathematical Software 20(3) (1994), 286–307, eq. (5.2) +type ConcaveLeft struct{} + +func (ConcaveLeft) Func(x []float64) float64 { + if len(x) != 1 { + panic("dimension of the problem must be 1") + } + return math.Pow(x[0]+0.004, 4) * (x[0] - 1.996) +} + +func (ConcaveLeft) Grad(grad, x []float64) { + if len(x) != 1 { + panic("dimension of the problem must be 1") + } + if len(x) != len(grad) { + panic("incorrect size of the gradient") + } + grad[0] = math.Pow(x[0]+0.004, 3) * (5*x[0] - 7.98) +} + +// Plassmann implements an univariate oscillatory function where the value of L +// controls the number of oscillations. The value of Beta controls the size of +// the derivative at zero and the size of the interval where the strong Wolfe +// conditions can hold. For small values of Beta this function represents a +// difficult test problem for linesearchers also because the information based +// on the derivative is unreliable due to the oscillations. +// +// References: +// More, J.J., and Thuente, D.J.: Line Search Algorithms with Guaranteed Sufficient Decrease. +// ACM Transactions on Mathematical Software 20(3) (1994), 286–307, eq. (5.3) +type Plassmann struct { + L float64 // Number of oscillations for |x-1| ≥ Beta. + Beta float64 // Size of the derivative at zero, f'(0) = -Beta. +} + +func (f Plassmann) Func(x []float64) float64 { + if len(x) != 1 { + panic("dimension of the problem must be 1") + } + a := x[0] + b := f.Beta + l := f.L + r := 2 * (1 - b) / l / math.Pi * math.Sin(l*math.Pi/2*a) + switch { + case a <= 1-b: + r += 1 - a + case 1-b < a && a <= 1+b: + r += 0.5 * ((a-1)*(a-1)/b + b) + default: // a > 1+b + r += a - 1 + } + return r +} + +func (f Plassmann) Grad(grad, x []float64) { + if len(x) != 1 { + panic("dimension of the problem must be 1") + } + if len(x) != len(grad) { + panic("incorrect size of the gradient") + } + a := x[0] + b := f.Beta + l := f.L + grad[0] = (1 - b) * math.Cos(l*math.Pi/2*a) + switch { + case a <= 1-b: + grad[0]-- + case 1-b < a && a <= 1+b: + grad[0] += (a - 1) / b + default: // a > 1+b + grad[0]++ + } +} + +// YanaiOzawaKaneko is an univariate convex function where the values of Beta1 +// and Beta2 control the curvature around the minimum. Far away from the +// minimum the function approximates an absolute value function. Near the +// minimum, the function can either be sharply curved or flat, controlled by +// the parameter values. +// +// References: +// - More, J.J., and Thuente, D.J.: Line Search Algorithms with Guaranteed Sufficient Decrease. +// ACM Transactions on Mathematical Software 20(3) (1994), 286–307, eq. (5.4) +// - Yanai, H., Ozawa, M., and Kaneko, S.: Interpolation methods in one dimensional +// optimization. Computing 27 (1981), 155–163 +type YanaiOzawaKaneko struct { + Beta1 float64 + Beta2 float64 +} + +func (f YanaiOzawaKaneko) Func(x []float64) float64 { + if len(x) != 1 { + panic("dimension of the problem must be 1") + } + a := x[0] + b1 := f.Beta1 + b2 := f.Beta2 + g1 := math.Sqrt(1+b1*b1) - b1 + g2 := math.Sqrt(1+b2*b2) - b2 + return g1*math.Sqrt((a-1)*(a-1)+b2*b2) + g2*math.Sqrt(a*a+b1*b1) +} + +func (f YanaiOzawaKaneko) Grad(grad, x []float64) { + if len(x) != 1 { + panic("dimension of the problem must be 1") + } + if len(x) != len(grad) { + panic("incorrect size of the gradient") + } + a := x[0] + b1 := f.Beta1 + b2 := f.Beta2 + g1 := math.Sqrt(1+b1*b1) - b1 + g2 := math.Sqrt(1+b2*b2) - b2 + grad[0] = g1*(a-1)/math.Sqrt(b2*b2+(a-1)*(a-1)) + g2*a/math.Sqrt(b1*b1+a*a) +} diff --git a/vendor/gonum.org/v1/gonum/optimize/functions/minsurf.go b/vendor/gonum.org/v1/gonum/optimize/functions/minsurf.go new file mode 100644 index 0000000..140506f --- /dev/null +++ b/vendor/gonum.org/v1/gonum/optimize/functions/minsurf.go @@ -0,0 +1,256 @@ +// Copyright ©2015 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package functions + +import ( + "fmt" + "math" +) + +// MinimalSurface implements a finite element approximation to a minimal +// surface problem: determine the surface with minimal area and given boundary +// values in a unit square centered at the origin. +// +// References: +// Averick, M.B., Carter, R.G., Moré, J.J., Xue, G.-L.: The Minpack-2 Test +// Problem Collection. Preprint MCS-P153-0692, Argonne National Laboratory (1992) +type MinimalSurface struct { + bottom, top []float64 + left, right []float64 + origin, step [2]float64 +} + +// NewMinimalSurface creates a new discrete minimal surface problem and +// precomputes its boundary values. The problem is discretized on a rectilinear +// grid with nx×ny nodes which means that the problem dimension is (nx-2)(ny-2). +func NewMinimalSurface(nx, ny int) *MinimalSurface { + ms := &MinimalSurface{ + bottom: make([]float64, nx), + top: make([]float64, nx), + left: make([]float64, ny), + right: make([]float64, ny), + origin: [2]float64{-0.5, -0.5}, + step: [2]float64{1 / float64(nx-1), 1 / float64(ny-1)}, + } + + ms.initBoundary(ms.bottom, ms.origin[0], ms.origin[1], ms.step[0], 0) + startY := ms.origin[1] + float64(ny-1)*ms.step[1] + ms.initBoundary(ms.top, ms.origin[0], startY, ms.step[0], 0) + ms.initBoundary(ms.left, ms.origin[0], ms.origin[1], 0, ms.step[1]) + startX := ms.origin[0] + float64(nx-1)*ms.step[0] + ms.initBoundary(ms.right, startX, ms.origin[1], 0, ms.step[1]) + + return ms +} + +// Func returns the area of the surface represented by the vector x. +func (ms *MinimalSurface) Func(x []float64) (area float64) { + nx, ny := ms.Dims() + if len(x) != (nx-2)*(ny-2) { + panic("functions: problem size mismatch") + } + + hx, hy := ms.Steps() + for j := 0; j < ny-1; j++ { + for i := 0; i < nx-1; i++ { + vLL := ms.at(i, j, x) + vLR := ms.at(i+1, j, x) + vUL := ms.at(i, j+1, x) + vUR := ms.at(i+1, j+1, x) + + dvLdx := (vLR - vLL) / hx + dvLdy := (vUL - vLL) / hy + dvUdx := (vUR - vUL) / hx + dvUdy := (vUR - vLR) / hy + + fL := math.Sqrt(1 + dvLdx*dvLdx + dvLdy*dvLdy) + fU := math.Sqrt(1 + dvUdx*dvUdx + dvUdy*dvUdy) + area += fL + fU + } + } + area *= 0.5 * hx * hy + return area +} + +// Grad evaluates the area gradient of the surface represented by the vector. +func (ms *MinimalSurface) Grad(grad, x []float64) []float64 { + nx, ny := ms.Dims() + if len(x) != (nx-2)*(ny-2) { + panic("functions: problem size mismatch") + } + if grad == nil { + grad = make([]float64, len(x)) + } + if len(x) != len(grad) { + panic("functions: unexpected size mismatch") + } + + for i := range grad { + grad[i] = 0 + } + hx, hy := ms.Steps() + for j := 0; j < ny-1; j++ { + for i := 0; i < nx-1; i++ { + vLL := ms.at(i, j, x) + vLR := ms.at(i+1, j, x) + vUL := ms.at(i, j+1, x) + vUR := ms.at(i+1, j+1, x) + + dvLdx := (vLR - vLL) / hx + dvLdy := (vUL - vLL) / hy + dvUdx := (vUR - vUL) / hx + dvUdy := (vUR - vLR) / hy + + fL := math.Sqrt(1 + dvLdx*dvLdx + dvLdy*dvLdy) + fU := math.Sqrt(1 + dvUdx*dvUdx + dvUdy*dvUdy) + + if grad != nil { + if i > 0 { + if j > 0 { + grad[ms.index(i, j)] -= (dvLdx/hx + dvLdy/hy) / fL + } + if j < ny-2 { + grad[ms.index(i, j+1)] += (dvLdy/hy)/fL - (dvUdx/hx)/fU + } + } + if i < nx-2 { + if j > 0 { + grad[ms.index(i+1, j)] += (dvLdx/hx)/fL - (dvUdy/hy)/fU + } + if j < ny-2 { + grad[ms.index(i+1, j+1)] += (dvUdx/hx + dvUdy/hy) / fU + } + } + } + } + + } + cellSize := 0.5 * hx * hy + for i := range grad { + grad[i] *= cellSize + } + return grad +} + +// InitX returns a starting location for the minimization problem. Length of +// the returned slice is (nx-2)(ny-2). +func (ms *MinimalSurface) InitX() []float64 { + nx, ny := ms.Dims() + x := make([]float64, (nx-2)*(ny-2)) + for j := 1; j < ny-1; j++ { + for i := 1; i < nx-1; i++ { + x[ms.index(i, j)] = (ms.left[j] + ms.bottom[i]) / 2 + } + } + return x +} + +// ExactX returns the exact solution to the _continuous_ minimization problem +// projected on the interior nodes of the grid. Length of the returned slice is +// (nx-2)(ny-2). +func (ms *MinimalSurface) ExactX() []float64 { + nx, ny := ms.Dims() + v := make([]float64, (nx-2)*(ny-2)) + for j := 1; j < ny-1; j++ { + for i := 1; i < nx-1; i++ { + v[ms.index(i, j)] = ms.ExactSolution(ms.x(i), ms.y(j)) + } + } + return v +} + +// ExactSolution returns the value of the exact solution to the minimal surface +// problem at (x,y). The exact solution is +// F_exact(x,y) = U^2(x,y) - V^2(x,y), +// where U and V are the unique solutions to the equations +// x = u + uv^2 - u^3/3, +// y = -v - u^2v + v^3/3. +func (ms *MinimalSurface) ExactSolution(x, y float64) float64 { + var u = [2]float64{x, -y} + var f [2]float64 + var jac [2][2]float64 + for k := 0; k < 100; k++ { + f[0] = u[0] + u[0]*u[1]*u[1] - u[0]*u[0]*u[0]/3 - x + f[1] = -u[1] - u[0]*u[0]*u[1] + u[1]*u[1]*u[1]/3 - y + fNorm := math.Hypot(f[0], f[1]) + if fNorm < 1e-13 { + break + } + jac[0][0] = 1 + u[1]*u[1] - u[0]*u[0] + jac[0][1] = 2 * u[0] * u[1] + jac[1][0] = -2 * u[0] * u[1] + jac[1][1] = -1 - u[0]*u[0] + u[1]*u[1] + det := jac[0][0]*jac[1][1] - jac[0][1]*jac[1][0] + u[0] -= (jac[1][1]*f[0] - jac[0][1]*f[1]) / det + u[1] -= (jac[0][0]*f[1] - jac[1][0]*f[0]) / det + } + return u[0]*u[0] - u[1]*u[1] +} + +// Dims returns the size of the underlying rectilinear grid. +func (ms *MinimalSurface) Dims() (nx, ny int) { + return len(ms.bottom), len(ms.left) +} + +// Steps returns the spatial step sizes of the underlying rectilinear grid. +func (ms *MinimalSurface) Steps() (hx, hy float64) { + return ms.step[0], ms.step[1] +} + +func (ms *MinimalSurface) x(i int) float64 { + return ms.origin[0] + float64(i)*ms.step[0] +} + +func (ms *MinimalSurface) y(j int) float64 { + return ms.origin[1] + float64(j)*ms.step[1] +} + +func (ms *MinimalSurface) at(i, j int, x []float64) float64 { + nx, ny := ms.Dims() + if i < 0 || i >= nx { + panic(fmt.Sprintf("node [%v,%v] not on grid", i, j)) + } + if j < 0 || j >= ny { + panic(fmt.Sprintf("node [%v,%v] not on grid", i, j)) + } + + if i == 0 { + return ms.left[j] + } + if j == 0 { + return ms.bottom[i] + } + if i == nx-1 { + return ms.right[j] + } + if j == ny-1 { + return ms.top[i] + } + return x[ms.index(i, j)] +} + +// index maps an interior grid node (i, j) to a one-dimensional index and +// returns it. +func (ms *MinimalSurface) index(i, j int) int { + nx, ny := ms.Dims() + if i <= 0 || i >= nx-1 { + panic(fmt.Sprintf("[%v,%v] is not an interior node", i, j)) + } + if j <= 0 || j >= ny-1 { + panic(fmt.Sprintf("[%v,%v] is not an interior node", i, j)) + } + + return i - 1 + (j-1)*(nx-2) +} + +// initBoundary initializes with the exact solution the boundary b whose i-th +// element b[i] is located at [startX+i×hx, startY+i×hy]. +func (ms *MinimalSurface) initBoundary(b []float64, startX, startY, hx, hy float64) { + for i := range b { + x := startX + float64(i)*hx + y := startY + float64(i)*hy + b[i] = ms.ExactSolution(x, y) + } +} diff --git a/vendor/gonum.org/v1/gonum/optimize/functions/validate.go b/vendor/gonum.org/v1/gonum/optimize/functions/validate.go new file mode 100644 index 0000000..21dad86 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/optimize/functions/validate.go @@ -0,0 +1,127 @@ +// Copyright ©2015 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package functions + +import ( + "math" + "testing" + + "gonum.org/v1/gonum/diff/fd" + "gonum.org/v1/gonum/floats" +) + +// function represents an objective function. +type function interface { + Func(x []float64) float64 +} + +type gradient interface { + Grad(grad, x []float64) []float64 +} + +// minimumer is an objective function that can also provide information about +// its minima. +type minimumer interface { + function + + // Minima returns _known_ minima of the function. + Minima() []Minimum +} + +// Minimum represents information about an optimal location of a function. +type Minimum struct { + // X is the location of the minimum. X may not be nil. + X []float64 + // F is the value of the objective function at X. + F float64 + // Global indicates if the location is a global minimum. + Global bool +} + +type funcTest struct { + X []float64 + + // F is the expected function value at X. + F float64 + // Gradient is the expected gradient at X. If nil, it is not evaluated. + Gradient []float64 +} + +// TODO(vladimir-ch): Decide and implement an exported testing function: +// func Test(f Function, ??? ) ??? { +// } + +const ( + defaultTol = 1e-12 + defaultGradTol = 1e-9 + defaultFDGradTol = 1e-5 +) + +// testFunction checks that the function can evaluate itself (and its gradient) +// correctly. +func testFunction(f function, ftests []funcTest, t *testing.T) { + // Make a copy of tests because we may append to the slice. + tests := make([]funcTest, len(ftests)) + copy(tests, ftests) + + // Get information about the function. + fMinima, isMinimumer := f.(minimumer) + fGradient, isGradient := f.(gradient) + + // If the function is a Minimumer, append its minima to the tests. + if isMinimumer { + for _, minimum := range fMinima.Minima() { + // Allocate gradient only if the function can evaluate it. + var grad []float64 + if isGradient { + grad = make([]float64, len(minimum.X)) + } + tests = append(tests, funcTest{ + X: minimum.X, + F: minimum.F, + Gradient: grad, + }) + } + } + + for i, test := range tests { + F := f.Func(test.X) + + // Check that the function value is as expected. + if math.Abs(F-test.F) > defaultTol { + t.Errorf("Test #%d: function value given by Func is incorrect. Want: %v, Got: %v", + i, test.F, F) + } + + if test.Gradient == nil { + continue + } + + // Evaluate the finite difference gradient. + fdGrad := fd.Gradient(nil, f.Func, test.X, &fd.Settings{ + Formula: fd.Central, + Step: 1e-6, + }) + + // Check that the finite difference and expected gradients match. + if !floats.EqualApprox(fdGrad, test.Gradient, defaultFDGradTol) { + dist := floats.Distance(fdGrad, test.Gradient, math.Inf(1)) + t.Errorf("Test #%d: numerical and expected gradients do not match. |fdGrad - WantGrad|_∞ = %v", + i, dist) + } + + // If the function is a Gradient, check that it computes the gradient correctly. + if isGradient { + grad := make([]float64, len(test.Gradient)) + fGradient.Grad(grad, test.X) + + if !floats.EqualApprox(grad, test.Gradient, defaultGradTol) { + dist := floats.Distance(grad, test.Gradient, math.Inf(1)) + t.Errorf("Test #%d: gradient given by Grad is incorrect. |grad - WantGrad|_∞ = %v", + i, dist) + } + } + } +} diff --git a/vendor/gonum.org/v1/gonum/optimize/functions/vlse.go b/vendor/gonum.org/v1/gonum/optimize/functions/vlse.go new file mode 100644 index 0000000..16a1ab4 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/optimize/functions/vlse.go @@ -0,0 +1,395 @@ +// Copyright ©2017 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package functions + +import "math" + +// This file implements functions from the Virtual Library of Simulation Experiments. +// https://www.sfu.ca/~ssurjano/optimization.html +// In many cases gradients and Hessians have been added. In some cases, these +// are not defined at certain points or manifolds. The gradient in these locations +// has been set to 0. + +// Ackley implements the Ackley function, a function of arbitrary dimension that +// has many local minima. It has a single global minimum of 0 at 0. Its typical +// domain is the hypercube of [-32.768, 32.768]^d. +// f(x) = -20 * exp(-0.2 sqrt(1/d sum_i x_i^2)) - exp(1/d sum_i cos(2π x_i)) + 20 + exp(1) +// where d is the input dimension. +// +// Reference: +// https://www.sfu.ca/~ssurjano/ackley.html (obtained June 2017) +type Ackley struct{} + +func (Ackley) Func(x []float64) float64 { + var ss, sc float64 + for _, v := range x { + ss += v * v + sc += math.Cos(2 * math.Pi * v) + } + id := 1 / float64(len(x)) + return -20*math.Exp(-0.2*math.Sqrt(id*ss)) - math.Exp(id*sc) + 20 + math.E +} + +// Bukin6 implements Bukin's 6th function. The function is two-dimensional, with +// the typical domain as x_0 ∈ [-15, -5], x_1 ∈ [-3, 3]. The function has a unique +// global minimum at [-10, 1], and many local minima. +// f(x) = 100 * sqrt(|x_1 - 0.01*x_0^2|) + 0.01*|x_0+10| +// Reference: +// https://www.sfu.ca/~ssurjano/bukin6.html (obtained June 2017) +type Bukin6 struct{} + +func (Bukin6) Func(x []float64) float64 { + if len(x) != 2 { + panic(badInputDim) + } + return 100*math.Sqrt(math.Abs(x[1]-0.01*x[0]*x[0])) + 0.01*math.Abs(x[0]+10) +} + +// CamelThree implements the three-hump camel function, a two-dimensional function +// with three local minima, one of which is global. +// The function is given by +// f(x) = 2*x_0^2 - 1.05*x_0^4 + x_0^6/6 + x_0*x_1 + x_1^2 +// with the global minimum at +// x^* = (0, 0) +// f(x^*) = 0 +// The typical domain is x_i ∈ [-5, 5] for all i. +// Reference: +// https://www.sfu.ca/~ssurjano/camel3.html (obtained December 2017) +type CamelThree struct{} + +func (c CamelThree) Func(x []float64) float64 { + if len(x) != 2 { + panic("camelthree: dimension must be 2") + } + x0 := x[0] + x1 := x[1] + x02 := x0 * x0 + x04 := x02 * x02 + return 2*x02 - 1.05*x04 + x04*x02/6 + x0*x1 + x1*x1 +} + +// CamelSix implements the six-hump camel function, a two-dimensional function. +// with six local minima, two of which are global. +// The function is given by +// f(x) = (4 - 2.1*x_0^2 + x_0^4/3)*x_0^2 + x_0*x_1 + (-4 + 4*x_1^2)*x_1^2 +// with the global minima at +// x^* = (0.0898, -0.7126), (-0.0898, 0.7126) +// f(x^*) = -1.0316 +// The typical domain is x_0 ∈ [-3, 3], x_1 ∈ [-2, 2]. +// Reference: +// https://www.sfu.ca/~ssurjano/camel6.html (obtained December 2017) +type CamelSix struct{} + +func (c CamelSix) Func(x []float64) float64 { + if len(x) != 2 { + panic("camelsix: dimension must be 2") + } + x0 := x[0] + x1 := x[1] + x02 := x0 * x0 + x12 := x1 * x1 + return (4-2.1*x02+x02*x02/3)*x02 + x0*x1 + (-4+4*x12)*x12 +} + +// CrossInTray implements the cross-in-tray function. The cross-in-tray function +// is a two-dimensional function with many local minima, and four global minima +// at (±1.3491, ±1.3491). The function is typically evaluated in the square +// [-10,10]^2. +// f(x) = -0.001(|sin(x_0)sin(x_1)exp(|100-sqrt((x_0^2+x_1^2)/π)|)|+1)^0.1 +// Reference: +// https://www.sfu.ca/~ssurjano/crossit.html (obtained June 2017) +type CrossInTray struct{} + +func (CrossInTray) Func(x []float64) float64 { + if len(x) != 2 { + panic(badInputDim) + } + x0 := x[0] + x1 := x[1] + exp := math.Abs(100 - math.Sqrt((x0*x0+x1*x1)/math.Pi)) + return -0.0001 * math.Pow(math.Abs(math.Sin(x0)*math.Sin(x1)*math.Exp(exp))+1, 0.1) +} + +// DixonPrice implements the DixonPrice function, a function of arbitrary dimension +// Its typical domain is the hypercube of [-10, 10]^d. +// The function is given by +// f(x) = (x_0-1)^2 + \sum_{i=1}^{d-1} (i+1) * (2*x_i^2-x_{i-1})^2 +// where d is the input dimension. There is a single global minimum, which has +// a location and value of +// x_i^* = 2^{-(2^{i+1}-2)/(2^{i+1})} for i = 0, ..., d-1. +// f(x^*) = 0 +// Reference: +// https://www.sfu.ca/~ssurjano/dixonpr.html (obtained June 2017) +type DixonPrice struct{} + +func (DixonPrice) Func(x []float64) float64 { + xp := x[0] + v := (xp - 1) * (xp - 1) + for i := 1; i < len(x); i++ { + xn := x[i] + tmp := (2*xn*xn - xp) + v += float64(i+1) * tmp * tmp + xp = xn + } + return v +} + +// DropWave implements the drop-wave function, a two-dimensional function with +// many local minima and one global minimum at 0. The function is typically evaluated +// in the square [-5.12, 5.12]^2. +// f(x) = - (1+cos(12*sqrt(x0^2+x1^2))) / (0.5*(x0^2+x1^2)+2) +// Reference: +// https://www.sfu.ca/~ssurjano/drop.html (obtained June 2017) +type DropWave struct{} + +func (DropWave) Func(x []float64) float64 { + if len(x) != 2 { + panic(badInputDim) + } + x0 := x[0] + x1 := x[1] + num := 1 + math.Cos(12*math.Sqrt(x0*x0+x1*x1)) + den := 0.5*(x0*x0+x1*x1) + 2 + return -num / den +} + +// Eggholder implements the Eggholder function, a two-dimensional function with +// many local minima and one global minimum at [512, 404.2319]. The function +// is typically evaluated in the square [-512, 512]^2. +// f(x) = -(x_1+47)*sin(sqrt(|x_1+x_0/2+47|))-x_1*sin(sqrt(|x_0-(x_1+47)|)) +// Reference: +// https://www.sfu.ca/~ssurjano/egg.html (obtained June 2017) +type Eggholder struct{} + +func (Eggholder) Func(x []float64) float64 { + if len(x) != 2 { + panic(badInputDim) + } + x0 := x[0] + x1 := x[1] + return -(x1+47)*math.Sin(math.Sqrt(math.Abs(x1+x0/2+47))) - + x0*math.Sin(math.Sqrt(math.Abs(x0-x1-47))) +} + +// GramacyLee implements the Gramacy-Lee function, a one-dimensional function +// with many local minima. The function is typically evaluated on the domain [0.5, 2.5]. +// f(x) = sin(10πx)/(2x) + (x-1)^4 +// Reference: +// https://www.sfu.ca/~ssurjano/grlee12.html (obtained June 2017) +type GramacyLee struct{} + +func (GramacyLee) Func(x []float64) float64 { + if len(x) != 1 { + panic(badInputDim) + } + x0 := x[0] + return math.Sin(10*math.Pi*x0)/(2*x0) + math.Pow(x0-1, 4) +} + +// Griewank implements the Griewank function, a function of arbitrary dimension that +// has many local minima. It has a single global minimum of 0 at 0. Its typical +// domain is the hypercube of [-600, 600]^d. +// f(x) = \sum_i x_i^2/4000 - \prod_i cos(x_i/sqrt(i)) + 1 +// where d is the input dimension. +// +// Reference: +// https://www.sfu.ca/~ssurjano/griewank.html (obtained June 2017) +type Griewank struct{} + +func (Griewank) Func(x []float64) float64 { + var ss float64 + pc := 1.0 + for i, v := range x { + ss += v * v + pc *= math.Cos(v / math.Sqrt(float64(i+1))) + } + return ss/4000 - pc + 1 +} + +// HolderTable implements the Holder table function. The Holder table function +// is a two-dimensional function with many local minima, and four global minima +// at (±8.05502, ±9.66459). The function is typically evaluated in the square [-10,10]^2. +// f(x) = -|sin(x_0)cos(x1)exp(|1-sqrt(x_0^2+x1^2)/π|)| +// Reference: +// https://www.sfu.ca/~ssurjano/holder.html (obtained June 2017) +type HolderTable struct{} + +func (HolderTable) Func(x []float64) float64 { + if len(x) != 2 { + panic(badInputDim) + } + x0 := x[0] + x1 := x[1] + return -math.Abs(math.Sin(x0) * math.Cos(x1) * math.Exp(math.Abs(1-math.Sqrt(x0*x0+x1*x1)/math.Pi))) +} + +// Langermann2 implements the two-dimensional version of the Langermann function. +// The Langermann function has many local minima. The function is typically +// evaluated in the square [0,10]^2. +// f(x) = \sum_1^5 c_i exp(-(1/π)\sum_{j=1}^2(x_j-A_{ij})^2) * cos(π\sum_{j=1}^2 (x_j - A_{ij})^2) +// c = [5]float64{1,2,5,2,3} +// A = [5][2]float64{{3,5},{5,2},{2,1},{1,4},{7,9}} +// Reference: +// https://www.sfu.ca/~ssurjano/langer.html (obtained June 2017) +type Langermann2 struct{} + +func (Langermann2) Func(x []float64) float64 { + if len(x) != 2 { + panic(badInputDim) + } + var ( + c = [5]float64{1, 2, 5, 2, 3} + A = [5][2]float64{{3, 5}, {5, 2}, {2, 1}, {1, 4}, {7, 9}} + ) + var f float64 + for i, cv := range c { + var ss float64 + for j, av := range A[i] { + xja := x[j] - av + ss += xja * xja + } + f += cv * math.Exp(-(1/math.Pi)*ss) * math.Cos(math.Pi*ss) + } + return f +} + +// Levy implements the Levy function, a function of arbitrary dimension that +// has many local minima. It has a single global minimum of 0 at 1. Its typical +// domain is the hypercube of [-10, 10]^d. +// f(x) = sin^2(π*w_0) + \sum_{i=0}^{d-2}(w_i-1)^2*[1+10sin^2(π*w_i+1)] + +// (w_{d-1}-1)^2*[1+sin^2(2π*w_{d-1})] +// w_i = 1 + (x_i-1)/4 +// where d is the input dimension. +// +// Reference: +// https://www.sfu.ca/~ssurjano/levy.html (obtained June 2017) +type Levy struct{} + +func (Levy) Func(x []float64) float64 { + w1 := 1 + (x[0]-1)/4 + s1 := math.Sin(math.Pi * w1) + sum := s1 * s1 + for i := 0; i < len(x)-1; i++ { + wi := 1 + (x[i]-1)/4 + s := math.Sin(math.Pi*wi + 1) + sum += (wi - 1) * (wi - 1) * (1 + 10*s*s) + } + wd := 1 + (x[len(x)-1]-1)/4 + sd := math.Sin(2 * math.Pi * wd) + return sum + (wd-1)*(wd-1)*(1+sd*sd) +} + +// Levy13 implements the Levy-13 function, a two-dimensional function +// with many local minima. It has a single global minimum of 0 at 1. Its typical +// domain is the square [-10, 10]^2. +// f(x) = sin^2(3π*x_0) + (x_0-1)^2*[1+sin^2(3π*x_1)] + (x_1-1)^2*[1+sin^2(2π*x_1)] +// Reference: +// https://www.sfu.ca/~ssurjano/levy13.html (obtained June 2017) +type Levy13 struct{} + +func (Levy13) Func(x []float64) float64 { + if len(x) != 2 { + panic(badInputDim) + } + x0 := x[0] + x1 := x[1] + s0 := math.Sin(3 * math.Pi * x0) + s1 := math.Sin(3 * math.Pi * x1) + s2 := math.Sin(2 * math.Pi * x1) + return s0*s0 + (x0-1)*(x0-1)*(1+s1*s1) + (x1-1)*(x1-1)*(1+s2*s2) +} + +// Rastrigin implements the Rastrigen function, a function of arbitrary dimension +// that has many local minima. It has a single global minimum of 0 at 0. Its typical +// domain is the hypercube of [-5.12, 5.12]^d. +// f(x) = 10d + \sum_i [x_i^2 - 10cos(2π*x_i)] +// where d is the input dimension. +// +// Reference: +// https://www.sfu.ca/~ssurjano/rastr.html (obtained June 2017) +type Rastrigin struct{} + +func (Rastrigin) Func(x []float64) float64 { + sum := 10 * float64(len(x)) + for _, v := range x { + sum += v*v - 10*math.Cos(2*math.Pi*v) + } + return sum +} + +// Schaffer2 implements the second Schaffer function, a two-dimensional function +// with many local minima. It has a single global minimum of 0 at 0. Its typical +// domain is the square [-100, 100]^2. +// f(x) = 0.5 + (sin^2(x_0^2-x_1^2)-0.5) / (1+0.001*(x_0^2+x_1^2))^2 +// Reference: +// https://www.sfu.ca/~ssurjano/schaffer2.html (obtained June 2017) +type Schaffer2 struct{} + +func (Schaffer2) Func(x []float64) float64 { + if len(x) != 2 { + panic(badInputDim) + } + x0 := x[0] + x1 := x[1] + s := math.Sin(x0*x0 - x1*x1) + den := 1 + 0.001*(x0*x0+x1*x1) + return 0.5 + (s*s-0.5)/(den*den) +} + +// Schaffer4 implements the fourth Schaffer function, a two-dimensional function +// with many local minima. Its typical domain is the square [-100, 100]^2. +// f(x) = 0.5 + (cos(sin(|x_0^2-x_1^2|))-0.5) / (1+0.001*(x_0^2+x_1^2))^2 +// Reference: +// https://www.sfu.ca/~ssurjano/schaffer4.html (obtained June 2017) +type Schaffer4 struct{} + +func (Schaffer4) Func(x []float64) float64 { + if len(x) != 2 { + panic(badInputDim) + } + x0 := x[0] + x1 := x[1] + den := 1 + 0.001*(x0*x0+x1*x1) + return 0.5 + (math.Cos(math.Sin(math.Abs(x0*x0-x1*x1)))-0.5)/(den*den) +} + +// Schwefel implements the Schwefel function, a function of arbitrary dimension +// that has many local minima. Its typical domain is the hypercube of [-500, 500]^d. +// f(x) = 418.9829*d - \sum_i x_i*sin(sqrt(|x_i|)) +// where d is the input dimension. +// +// Reference: +// https://www.sfu.ca/~ssurjano/schwef.html (obtained June 2017) +type Schwefel struct{} + +func (Schwefel) Func(x []float64) float64 { + var sum float64 + for _, v := range x { + sum += v * math.Sin(math.Sqrt(math.Abs(v))) + } + return 418.9829*float64(len(x)) - sum +} + +// Shubert implements the Shubert function, a two-dimensional function +// with many local minima and many global minima. Its typical domain is the +// square [-10, 10]^2. +// f(x) = (sum_{i=1}^5 i cos((i+1)*x_0+i)) * (\sum_{i=1}^5 i cos((i+1)*x_1+i)) +// Reference: +// https://www.sfu.ca/~ssurjano/shubert.html (obtained June 2017) +type Shubert struct{} + +func (Shubert) Func(x []float64) float64 { + if len(x) != 2 { + panic(badInputDim) + } + x0 := x[0] + x1 := x[1] + var s0, s1 float64 + for i := 1.0; i <= 5.0; i++ { + s0 += i * math.Cos((i+1)*x0+i) + s1 += i * math.Cos((i+1)*x1+i) + } + return s0 * s1 +} diff --git a/vendor/gonum.org/v1/gonum/optimize/gradientdescent.go b/vendor/gonum.org/v1/gonum/optimize/gradientdescent.go new file mode 100644 index 0000000..4f1371f --- /dev/null +++ b/vendor/gonum.org/v1/gonum/optimize/gradientdescent.go @@ -0,0 +1,95 @@ +// Copyright ©2014 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package optimize + +import "gonum.org/v1/gonum/floats" + +var ( + _ Method = (*GradientDescent)(nil) + _ localMethod = (*GradientDescent)(nil) +) + +// GradientDescent implements the steepest descent optimization method that +// performs successive steps along the direction of the negative gradient. +type GradientDescent struct { + // Linesearcher selects suitable steps along the descent direction. + // If Linesearcher is nil, a reasonable default will be chosen. + Linesearcher Linesearcher + // StepSizer determines the initial step size along each direction. + // If StepSizer is nil, a reasonable default will be chosen. + StepSizer StepSizer + // GradStopThreshold sets the threshold for stopping if the gradient norm + // gets too small. If GradStopThreshold is 0 it is defaulted to 1e-12, and + // if it is NaN the setting is not used. + GradStopThreshold float64 + + ls *LinesearchMethod + + status Status + err error +} + +func (g *GradientDescent) Status() (Status, error) { + return g.status, g.err +} + +func (*GradientDescent) Uses(has Available) (uses Available, err error) { + return has.gradient() +} + +func (g *GradientDescent) Init(dim, tasks int) int { + g.status = NotTerminated + g.err = nil + return 1 +} + +func (g *GradientDescent) Run(operation chan<- Task, result <-chan Task, tasks []Task) { + g.status, g.err = localOptimizer{}.run(g, g.GradStopThreshold, operation, result, tasks) + close(operation) + return +} + +func (g *GradientDescent) initLocal(loc *Location) (Operation, error) { + if g.Linesearcher == nil { + g.Linesearcher = &Backtracking{} + } + if g.StepSizer == nil { + g.StepSizer = &QuadraticStepSize{} + } + + if g.ls == nil { + g.ls = &LinesearchMethod{} + } + g.ls.Linesearcher = g.Linesearcher + g.ls.NextDirectioner = g + + return g.ls.Init(loc) +} + +func (g *GradientDescent) iterateLocal(loc *Location) (Operation, error) { + return g.ls.Iterate(loc) +} + +func (g *GradientDescent) InitDirection(loc *Location, dir []float64) (stepSize float64) { + copy(dir, loc.Gradient) + floats.Scale(-1, dir) + return g.StepSizer.Init(loc, dir) +} + +func (g *GradientDescent) NextDirection(loc *Location, dir []float64) (stepSize float64) { + copy(dir, loc.Gradient) + floats.Scale(-1, dir) + return g.StepSizer.StepSize(loc, dir) +} + +func (*GradientDescent) needs() struct { + Gradient bool + Hessian bool +} { + return struct { + Gradient bool + Hessian bool + }{true, false} +} diff --git a/vendor/gonum.org/v1/gonum/optimize/guessandcheck.go b/vendor/gonum.org/v1/gonum/optimize/guessandcheck.go new file mode 100644 index 0000000..35d2792 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/optimize/guessandcheck.go @@ -0,0 +1,92 @@ +// Copyright ©2016 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package optimize + +import ( + "math" + + "gonum.org/v1/gonum/stat/distmv" +) + +var _ Method = (*GuessAndCheck)(nil) + +// GuessAndCheck is a global optimizer that evaluates the function at random +// locations. Not a good optimizer, but useful for comparison and debugging. +type GuessAndCheck struct { + Rander distmv.Rander + + bestF float64 + bestX []float64 +} + +func (*GuessAndCheck) Uses(has Available) (uses Available, err error) { + return has.function() +} + +func (g *GuessAndCheck) Init(dim, tasks int) int { + if dim <= 0 { + panic(nonpositiveDimension) + } + if tasks < 0 { + panic(negativeTasks) + } + g.bestF = math.Inf(1) + g.bestX = resize(g.bestX, dim) + return tasks +} + +func (g *GuessAndCheck) sendNewLoc(operation chan<- Task, task Task) { + g.Rander.Rand(task.X) + task.Op = FuncEvaluation + operation <- task +} + +func (g *GuessAndCheck) updateMajor(operation chan<- Task, task Task) { + // Update the best value seen so far, and send a MajorIteration. + if task.F < g.bestF { + g.bestF = task.F + copy(g.bestX, task.X) + } else { + task.F = g.bestF + copy(task.X, g.bestX) + } + task.Op = MajorIteration + operation <- task +} + +func (g *GuessAndCheck) Run(operation chan<- Task, result <-chan Task, tasks []Task) { + // Send initial tasks to evaluate + for _, task := range tasks { + g.sendNewLoc(operation, task) + } + + // Read from the channel until PostIteration is sent. +Loop: + for { + task := <-result + switch task.Op { + default: + panic("unknown operation") + case PostIteration: + break Loop + case MajorIteration: + g.sendNewLoc(operation, task) + case FuncEvaluation: + g.updateMajor(operation, task) + } + } + + // PostIteration was sent. Update the best new values. + for task := range result { + switch task.Op { + default: + panic("unknown operation") + case MajorIteration: + case FuncEvaluation: + g.updateMajor(operation, task) + } + } + close(operation) +} diff --git a/vendor/gonum.org/v1/gonum/optimize/interfaces.go b/vendor/gonum.org/v1/gonum/optimize/interfaces.go new file mode 100644 index 0000000..e28fa00 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/optimize/interfaces.go @@ -0,0 +1,130 @@ +// Copyright ©2014 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package optimize + +// A localMethod can optimize an objective function. +// +// It uses a reverse-communication interface between the optimization method +// and the caller. Method acts as a client that asks the caller to perform +// needed operations via Operation returned from Init and Iterate methods. +// This provides independence of the optimization algorithm on user-supplied +// data and their representation, and enables automation of common operations +// like checking for (various types of) convergence and maintaining statistics. +// +// A Method can command an Evaluation, a MajorIteration or NoOperation operations. +// +// An evaluation operation is one or more of the Evaluation operations +// (FuncEvaluation, GradEvaluation, etc.) which can be combined with +// the bitwise or operator. In an evaluation operation, the requested fields of +// Problem will be evaluated at the point specified in Location.X. +// The corresponding fields of Location will be filled with the results that +// can be retrieved upon the next call to Iterate. The Method interface +// requires that entries of Location are not modified aside from the commanded +// evaluations. Thus, the type implementing Method may use multiple Operations +// to set the Location fields at a particular x value. +// +// Instead of an Evaluation, a Method may declare MajorIteration. In +// a MajorIteration, the values in the fields of Location are treated as +// a potential optimizer. The convergence of the optimization routine +// (GradientThreshold, etc.) is checked at this new best point. In +// a MajorIteration, the fields of Location must be valid and consistent. +// +// A Method must not return InitIteration and PostIteration operations. These are +// reserved for the clients to be passed to Recorders. A Method must also not +// combine the Evaluation operations with the Iteration operations. +type localMethod interface { + // Init initializes the method based on the initial data in loc, updates it + // and returns the first operation to be carried out by the caller. + // The initial location must be valid as specified by Needs. + initLocal(loc *Location) (Operation, error) + + // Iterate retrieves data from loc, performs one iteration of the method, + // updates loc and returns the next operation. + iterateLocal(loc *Location) (Operation, error) + + needser +} + +type needser interface { + // needs specifies information about the objective function needed by the + // optimizer beyond just the function value. The information is used + // internally for initialization and must match evaluation types returned + // by Init and Iterate during the optimization process. + needs() struct { + Gradient bool + Hessian bool + } +} + +// Statuser can report the status and any error. It is intended for methods as +// an additional error reporting mechanism apart from the errors returned from +// Init and Iterate. +type Statuser interface { + Status() (Status, error) +} + +// Linesearcher is a type that can perform a line search. It tries to find an +// (approximate) minimum of the objective function along the search direction +// dir_k starting at the most recent location x_k, i.e., it tries to minimize +// the function +// φ(step) := f(x_k + step * dir_k) where step > 0. +// Typically, a Linesearcher will be used in conjunction with LinesearchMethod +// for performing gradient-based optimization through sequential line searches. +type Linesearcher interface { + // Init initializes the Linesearcher and a new line search. Value and + // derivative contain φ(0) and φ'(0), respectively, and step contains the + // first trial step length. It returns an Operation that must be one of + // FuncEvaluation, GradEvaluation, FuncEvaluation|GradEvaluation. The + // caller must evaluate φ(step), φ'(step), or both, respectively, and pass + // the result to Linesearcher in value and derivative arguments to Iterate. + Init(value, derivative float64, step float64) Operation + + // Iterate takes in the values of φ and φ' evaluated at the previous step + // and returns the next operation. + // + // If op is one of FuncEvaluation, GradEvaluation, + // FuncEvaluation|GradEvaluation, the caller must evaluate φ(step), + // φ'(step), or both, respectively, and pass the result to Linesearcher in + // value and derivative arguments on the next call to Iterate. + // + // If op is MajorIteration, a sufficiently accurate minimum of φ has been + // found at the previous step and the line search has concluded. Init must + // be called again to initialize a new line search. + // + // If err is nil, op must not specify another operation. If err is not nil, + // the values of op and step are undefined. + Iterate(value, derivative float64) (op Operation, step float64, err error) +} + +// NextDirectioner implements a strategy for computing a new line search +// direction at each major iteration. Typically, a NextDirectioner will be +// used in conjunction with LinesearchMethod for performing gradient-based +// optimization through sequential line searches. +type NextDirectioner interface { + // InitDirection initializes the NextDirectioner at the given starting location, + // putting the initial direction in place into dir, and returning the initial + // step size. InitDirection must not modify Location. + InitDirection(loc *Location, dir []float64) (step float64) + + // NextDirection updates the search direction and step size. Location is + // the location seen at the conclusion of the most recent linesearch. The + // next search direction is put in place into dir, and the next step size + // is returned. NextDirection must not modify Location. + NextDirection(loc *Location, dir []float64) (step float64) +} + +// StepSizer can set the next step size of the optimization given the last Location. +// Returned step size must be positive. +type StepSizer interface { + Init(loc *Location, dir []float64) float64 + StepSize(loc *Location, dir []float64) float64 +} + +// A Recorder can record the progress of the optimization, for example to print +// the progress to StdOut or to a log file. A Recorder must not modify any data. +type Recorder interface { + Init() error + Record(*Location, Operation, *Stats) error +} diff --git a/vendor/gonum.org/v1/gonum/optimize/lbfgs.go b/vendor/gonum.org/v1/gonum/optimize/lbfgs.go new file mode 100644 index 0000000..02359da --- /dev/null +++ b/vendor/gonum.org/v1/gonum/optimize/lbfgs.go @@ -0,0 +1,199 @@ +// Copyright ©2014 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package optimize + +import ( + "gonum.org/v1/gonum/floats" +) + +var ( + _ Method = (*LBFGS)(nil) + _ localMethod = (*LBFGS)(nil) +) + +// LBFGS implements the limited-memory BFGS method for gradient-based +// unconstrained minimization. +// +// It stores a modified version of the inverse Hessian approximation H +// implicitly from the last Store iterations while the normal BFGS method +// stores and manipulates H directly as a dense matrix. Therefore LBFGS is more +// appropriate than BFGS for large problems as the cost of LBFGS scales as +// O(Store * dim) while BFGS scales as O(dim^2). The "forgetful" nature of +// LBFGS may also make it perform better than BFGS for functions with Hessians +// that vary rapidly spatially. +type LBFGS struct { + // Linesearcher selects suitable steps along the descent direction. + // Accepted steps should satisfy the strong Wolfe conditions. + // If Linesearcher is nil, a reasonable default will be chosen. + Linesearcher Linesearcher + // Store is the size of the limited-memory storage. + // If Store is 0, it will be defaulted to 15. + Store int + // GradStopThreshold sets the threshold for stopping if the gradient norm + // gets too small. If GradStopThreshold is 0 it is defaulted to 1e-12, and + // if it is NaN the setting is not used. + GradStopThreshold float64 + + status Status + err error + + ls *LinesearchMethod + + dim int // Dimension of the problem + x []float64 // Location at the last major iteration + grad []float64 // Gradient at the last major iteration + + // History + oldest int // Index of the oldest element of the history + y [][]float64 // Last Store values of y + s [][]float64 // Last Store values of s + rho []float64 // Last Store values of rho + a []float64 // Cache of Hessian updates +} + +func (l *LBFGS) Status() (Status, error) { + return l.status, l.err +} + +func (*LBFGS) Uses(has Available) (uses Available, err error) { + return has.gradient() +} + +func (l *LBFGS) Init(dim, tasks int) int { + l.status = NotTerminated + l.err = nil + return 1 +} + +func (l *LBFGS) Run(operation chan<- Task, result <-chan Task, tasks []Task) { + l.status, l.err = localOptimizer{}.run(l, l.GradStopThreshold, operation, result, tasks) + close(operation) + return +} + +func (l *LBFGS) initLocal(loc *Location) (Operation, error) { + if l.Linesearcher == nil { + l.Linesearcher = &Bisection{} + } + if l.Store == 0 { + l.Store = 15 + } + + if l.ls == nil { + l.ls = &LinesearchMethod{} + } + l.ls.Linesearcher = l.Linesearcher + l.ls.NextDirectioner = l + + return l.ls.Init(loc) +} + +func (l *LBFGS) iterateLocal(loc *Location) (Operation, error) { + return l.ls.Iterate(loc) +} + +func (l *LBFGS) InitDirection(loc *Location, dir []float64) (stepSize float64) { + dim := len(loc.X) + l.dim = dim + l.oldest = 0 + + l.a = resize(l.a, l.Store) + l.rho = resize(l.rho, l.Store) + l.y = l.initHistory(l.y) + l.s = l.initHistory(l.s) + + l.x = resize(l.x, dim) + copy(l.x, loc.X) + + l.grad = resize(l.grad, dim) + copy(l.grad, loc.Gradient) + + copy(dir, loc.Gradient) + floats.Scale(-1, dir) + return 1 / floats.Norm(dir, 2) +} + +func (l *LBFGS) initHistory(hist [][]float64) [][]float64 { + c := cap(hist) + if c < l.Store { + n := make([][]float64, l.Store-c) + hist = append(hist[:c], n...) + } + hist = hist[:l.Store] + for i := range hist { + hist[i] = resize(hist[i], l.dim) + for j := range hist[i] { + hist[i][j] = 0 + } + } + return hist +} + +func (l *LBFGS) NextDirection(loc *Location, dir []float64) (stepSize float64) { + // Uses two-loop correction as described in + // Nocedal, J., Wright, S.: Numerical Optimization (2nd ed). Springer (2006), chapter 7, page 178. + + if len(loc.X) != l.dim { + panic("lbfgs: unexpected size mismatch") + } + if len(loc.Gradient) != l.dim { + panic("lbfgs: unexpected size mismatch") + } + if len(dir) != l.dim { + panic("lbfgs: unexpected size mismatch") + } + + y := l.y[l.oldest] + floats.SubTo(y, loc.Gradient, l.grad) + s := l.s[l.oldest] + floats.SubTo(s, loc.X, l.x) + sDotY := floats.Dot(s, y) + l.rho[l.oldest] = 1 / sDotY + + l.oldest = (l.oldest + 1) % l.Store + + copy(l.x, loc.X) + copy(l.grad, loc.Gradient) + copy(dir, loc.Gradient) + + // Start with the most recent element and go backward, + for i := 0; i < l.Store; i++ { + idx := l.oldest - i - 1 + if idx < 0 { + idx += l.Store + } + l.a[idx] = l.rho[idx] * floats.Dot(l.s[idx], dir) + floats.AddScaled(dir, -l.a[idx], l.y[idx]) + } + + // Scale the initial Hessian. + gamma := sDotY / floats.Dot(y, y) + floats.Scale(gamma, dir) + + // Start with the oldest element and go forward. + for i := 0; i < l.Store; i++ { + idx := i + l.oldest + if idx >= l.Store { + idx -= l.Store + } + beta := l.rho[idx] * floats.Dot(l.y[idx], dir) + floats.AddScaled(dir, l.a[idx]-beta, l.s[idx]) + } + + // dir contains H^{-1} * g, so flip the direction for minimization. + floats.Scale(-1, dir) + + return 1 +} + +func (*LBFGS) needs() struct { + Gradient bool + Hessian bool +} { + return struct { + Gradient bool + Hessian bool + }{true, false} +} diff --git a/vendor/gonum.org/v1/gonum/optimize/linesearch.go b/vendor/gonum.org/v1/gonum/optimize/linesearch.go new file mode 100644 index 0000000..740fcf6 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/optimize/linesearch.go @@ -0,0 +1,218 @@ +// Copyright ©2014 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package optimize + +import ( + "math" + + "gonum.org/v1/gonum/floats" +) + +// LinesearchMethod represents an abstract optimization method in which a +// function is optimized through successive line search optimizations. +type LinesearchMethod struct { + // NextDirectioner specifies the search direction of each linesearch. + NextDirectioner NextDirectioner + // Linesearcher performs a linesearch along the search direction. + Linesearcher Linesearcher + + x []float64 // Starting point for the current iteration. + dir []float64 // Search direction for the current iteration. + + first bool // Indicator of the first iteration. + nextMajor bool // Indicates that MajorIteration must be commanded at the next call to Iterate. + eval Operation // Indicator of valid fields in Location. + + lastStep float64 // Step taken from x in the previous call to Iterate. + lastOp Operation // Operation returned from the previous call to Iterate. +} + +func (ls *LinesearchMethod) Init(loc *Location) (Operation, error) { + if loc.Gradient == nil { + panic("linesearch: gradient is nil") + } + + dim := len(loc.X) + ls.x = resize(ls.x, dim) + ls.dir = resize(ls.dir, dim) + + ls.first = true + ls.nextMajor = false + + // Indicate that all fields of loc are valid. + ls.eval = FuncEvaluation | GradEvaluation + if loc.Hessian != nil { + ls.eval |= HessEvaluation + } + + ls.lastStep = math.NaN() + ls.lastOp = NoOperation + + return ls.initNextLinesearch(loc) +} + +func (ls *LinesearchMethod) Iterate(loc *Location) (Operation, error) { + switch ls.lastOp { + case NoOperation: + // TODO(vladimir-ch): Either Init has not been called, or the caller is + // trying to resume the optimization run after Iterate previously + // returned with an error. Decide what is the proper thing to do. See also #125. + + case MajorIteration: + // The previous updated location did not converge the full + // optimization. Initialize a new Linesearch. + return ls.initNextLinesearch(loc) + + default: + // Update the indicator of valid fields of loc. + ls.eval |= ls.lastOp + + if ls.nextMajor { + ls.nextMajor = false + + // Linesearcher previously finished, and the invalid fields of loc + // have now been validated. Announce MajorIteration. + ls.lastOp = MajorIteration + return ls.lastOp, nil + } + } + + // Continue the linesearch. + + f := math.NaN() + if ls.eval&FuncEvaluation != 0 { + f = loc.F + } + projGrad := math.NaN() + if ls.eval&GradEvaluation != 0 { + projGrad = floats.Dot(loc.Gradient, ls.dir) + } + op, step, err := ls.Linesearcher.Iterate(f, projGrad) + if err != nil { + return ls.error(err) + } + + switch op { + case MajorIteration: + // Linesearch has been finished. + + ls.lastOp = complementEval(loc, ls.eval) + if ls.lastOp == NoOperation { + // loc is complete, MajorIteration can be declared directly. + ls.lastOp = MajorIteration + } else { + // Declare MajorIteration on the next call to Iterate. + ls.nextMajor = true + } + + case FuncEvaluation, GradEvaluation, FuncEvaluation | GradEvaluation: + if step != ls.lastStep { + // We are moving to a new location, and not, say, evaluating extra + // information at the current location. + + // Compute the next evaluation point and store it in loc.X. + floats.AddScaledTo(loc.X, ls.x, step, ls.dir) + if floats.Equal(ls.x, loc.X) { + // Step size has become so small that the next evaluation point is + // indistinguishable from the starting point for the current + // iteration due to rounding errors. + return ls.error(ErrNoProgress) + } + ls.lastStep = step + ls.eval = NoOperation // Indicate all invalid fields of loc. + } + ls.lastOp = op + + default: + panic("linesearch: Linesearcher returned invalid operation") + } + + return ls.lastOp, nil +} + +func (ls *LinesearchMethod) error(err error) (Operation, error) { + ls.lastOp = NoOperation + return ls.lastOp, err +} + +// initNextLinesearch initializes the next linesearch using the previous +// complete location stored in loc. It fills loc.X and returns an evaluation +// to be performed at loc.X. +func (ls *LinesearchMethod) initNextLinesearch(loc *Location) (Operation, error) { + copy(ls.x, loc.X) + + var step float64 + if ls.first { + ls.first = false + step = ls.NextDirectioner.InitDirection(loc, ls.dir) + } else { + step = ls.NextDirectioner.NextDirection(loc, ls.dir) + } + + projGrad := floats.Dot(loc.Gradient, ls.dir) + if projGrad >= 0 { + return ls.error(ErrNonDescentDirection) + } + + op := ls.Linesearcher.Init(loc.F, projGrad, step) + switch op { + case FuncEvaluation, GradEvaluation, FuncEvaluation | GradEvaluation: + default: + panic("linesearch: Linesearcher returned invalid operation") + } + + floats.AddScaledTo(loc.X, ls.x, step, ls.dir) + if floats.Equal(ls.x, loc.X) { + // Step size is so small that the next evaluation point is + // indistinguishable from the starting point for the current iteration + // due to rounding errors. + return ls.error(ErrNoProgress) + } + + ls.lastStep = step + ls.eval = NoOperation // Invalidate all fields of loc. + + ls.lastOp = op + return ls.lastOp, nil +} + +// ArmijoConditionMet returns true if the Armijo condition (aka sufficient +// decrease) has been met. Under normal conditions, the following should be +// true, though this is not enforced: +// - initGrad < 0 +// - step > 0 +// - 0 < decrease < 1 +func ArmijoConditionMet(currObj, initObj, initGrad, step, decrease float64) bool { + return currObj <= initObj+decrease*step*initGrad +} + +// StrongWolfeConditionsMet returns true if the strong Wolfe conditions have been met. +// The strong Wolfe conditions ensure sufficient decrease in the function +// value, and sufficient decrease in the magnitude of the projected gradient. +// Under normal conditions, the following should be true, though this is not +// enforced: +// - initGrad < 0 +// - step > 0 +// - 0 <= decrease < curvature < 1 +func StrongWolfeConditionsMet(currObj, currGrad, initObj, initGrad, step, decrease, curvature float64) bool { + if currObj > initObj+decrease*step*initGrad { + return false + } + return math.Abs(currGrad) < curvature*math.Abs(initGrad) +} + +// WeakWolfeConditionsMet returns true if the weak Wolfe conditions have been met. +// The weak Wolfe conditions ensure sufficient decrease in the function value, +// and sufficient decrease in the value of the projected gradient. Under normal +// conditions, the following should be true, though this is not enforced: +// - initGrad < 0 +// - step > 0 +// - 0 <= decrease < curvature< 1 +func WeakWolfeConditionsMet(currObj, currGrad, initObj, initGrad, step, decrease, curvature float64) bool { + if currObj > initObj+decrease*step*initGrad { + return false + } + return currGrad >= curvature*initGrad +} diff --git a/vendor/gonum.org/v1/gonum/optimize/listsearch.go b/vendor/gonum.org/v1/gonum/optimize/listsearch.go new file mode 100644 index 0000000..78330ec --- /dev/null +++ b/vendor/gonum.org/v1/gonum/optimize/listsearch.go @@ -0,0 +1,123 @@ +// Copyright ©2018 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package optimize + +import ( + "math" + + "gonum.org/v1/gonum/mat" +) + +var _ Method = (*ListSearch)(nil) + +// ListSearch finds the optimum location from a specified list of possible +// optimum locations. +type ListSearch struct { + // Locs is the list of locations to optimize. Each row of Locs is a location + // to optimize. The number of columns of Locs must match the dimensions + // passed to InitGlobal, and Locs must have at least one row. + Locs mat.Matrix + + eval int + rows int + bestF float64 + bestIdx int +} + +func (*ListSearch) Uses(has Available) (uses Available, err error) { + return has.function() +} + +// InitGlobal initializes the method for optimization. The input dimension +// must match the number of columns of Locs. +func (l *ListSearch) Init(dim, tasks int) int { + if dim <= 0 { + panic(nonpositiveDimension) + } + if tasks < 0 { + panic(negativeTasks) + } + r, c := l.Locs.Dims() + if r == 0 { + panic("listsearch: list matrix has no rows") + } + if c != dim { + panic("listsearch: supplied dimension does not match list columns") + } + l.eval = 0 + l.rows = r + l.bestF = math.Inf(1) + l.bestIdx = -1 + return min(r, tasks) +} + +func (l *ListSearch) sendNewLoc(operation chan<- Task, task Task) { + task.Op = FuncEvaluation + task.ID = l.eval + mat.Row(task.X, l.eval, l.Locs) + l.eval++ + operation <- task +} + +func (l *ListSearch) updateMajor(operation chan<- Task, task Task) { + // Update the best value seen so far, and send a MajorIteration. + if task.F < l.bestF { + l.bestF = task.F + l.bestIdx = task.ID + } else { + task.F = l.bestF + mat.Row(task.X, l.bestIdx, l.Locs) + } + task.Op = MajorIteration + operation <- task +} + +func (l *ListSearch) Status() (Status, error) { + if l.eval < l.rows { + return NotTerminated, nil + } + return MethodConverge, nil +} + +func (l *ListSearch) Run(operation chan<- Task, result <-chan Task, tasks []Task) { + // Send initial tasks to evaluate + for _, task := range tasks { + l.sendNewLoc(operation, task) + } + // Read from the channel until PostIteration is sent or until the list of + // tasks is exhausted. +Loop: + for { + task := <-result + switch task.Op { + default: + panic("unknown operation") + case PostIteration: + break Loop + case MajorIteration: + if l.eval == l.rows { + task.Op = MethodDone + operation <- task + continue + } + l.sendNewLoc(operation, task) + case FuncEvaluation: + l.updateMajor(operation, task) + } + } + + // Post iteration was sent, or the list has been completed. Read in the final + // list of tasks. + for task := range result { + switch task.Op { + default: + panic("unknown operation") + case MajorIteration: + case FuncEvaluation: + l.updateMajor(operation, task) + } + } + close(operation) +} diff --git a/vendor/gonum.org/v1/gonum/optimize/local.go b/vendor/gonum.org/v1/gonum/optimize/local.go new file mode 100644 index 0000000..27177e7 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/optimize/local.go @@ -0,0 +1,146 @@ +// Copyright ©2014 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package optimize + +import ( + "math" + + "gonum.org/v1/gonum/floats" +) + +// localOptimizer is a helper type for running an optimization using a LocalMethod. +type localOptimizer struct{} + +// run controls the optimization run for a localMethod. The calling method +// must close the operation channel at the conclusion of the optimization. This +// provides a happens before relationship between the return of status and the +// closure of operation, and thus a call to method.Status (if necessary). +func (l localOptimizer) run(method localMethod, gradThresh float64, operation chan<- Task, result <-chan Task, tasks []Task) (Status, error) { + // Local methods start with a fully-specified initial location. + task := tasks[0] + task = l.initialLocation(operation, result, task, method) + if task.Op == PostIteration { + l.finish(operation, result) + return NotTerminated, nil + } + status, err := l.checkStartingLocation(task, gradThresh) + if err != nil { + l.finishMethodDone(operation, result, task) + return status, err + } + + // Send a major iteration with the starting location. + task.Op = MajorIteration + operation <- task + task = <-result + if task.Op == PostIteration { + l.finish(operation, result) + return NotTerminated, nil + } + op, err := method.initLocal(task.Location) + if err != nil { + l.finishMethodDone(operation, result, task) + return Failure, err + } + task.Op = op + operation <- task +Loop: + for { + r := <-result + switch r.Op { + case PostIteration: + break Loop + case MajorIteration: + // The last operation was a MajorIteration. Check if the gradient + // is below the threshold. + if status := l.checkGradientConvergence(r.Gradient, gradThresh); status != NotTerminated { + l.finishMethodDone(operation, result, task) + return GradientThreshold, nil + } + fallthrough + default: + op, err := method.iterateLocal(r.Location) + if err != nil { + l.finishMethodDone(operation, result, r) + return Failure, err + } + r.Op = op + operation <- r + } + } + l.finish(operation, result) + return NotTerminated, nil +} + +// initialOperation returns the Operation needed to fill the initial location +// based on the needs of the method and the values already supplied. +func (localOptimizer) initialOperation(task Task, n needser) Operation { + var newOp Operation + op := task.Op + if op&FuncEvaluation == 0 { + newOp |= FuncEvaluation + } + needs := n.needs() + if needs.Gradient && op&GradEvaluation == 0 { + newOp |= GradEvaluation + } + if needs.Hessian && op&HessEvaluation == 0 { + newOp |= HessEvaluation + } + return newOp +} + +// initialLocation fills the initial location based on the needs of the method. +// The task passed to initialLocation should be the first task sent in RunGlobal. +func (l localOptimizer) initialLocation(operation chan<- Task, result <-chan Task, task Task, needs needser) Task { + task.Op = l.initialOperation(task, needs) + operation <- task + return <-result +} + +func (l localOptimizer) checkStartingLocation(task Task, gradThresh float64) (Status, error) { + if math.IsInf(task.F, 1) || math.IsNaN(task.F) { + return Failure, ErrFunc(task.F) + } + for i, v := range task.Gradient { + if math.IsInf(v, 0) || math.IsNaN(v) { + return Failure, ErrGrad{Grad: v, Index: i} + } + } + status := l.checkGradientConvergence(task.Gradient, gradThresh) + return status, nil +} + +func (localOptimizer) checkGradientConvergence(gradient []float64, gradThresh float64) Status { + if gradient == nil || math.IsNaN(gradThresh) { + return NotTerminated + } + if gradThresh == 0 { + gradThresh = defaultGradientAbsTol + } + if norm := floats.Norm(gradient, math.Inf(1)); norm < gradThresh { + return GradientThreshold + } + return NotTerminated +} + +// finish completes the channel operations to finish an optimization. +func (localOptimizer) finish(operation chan<- Task, result <-chan Task) { + // Guarantee that result is closed before operation is closed. + for range result { + } +} + +// finishMethodDone sends a MethodDone signal on operation, reads the result, +// and completes the channel operations to finish an optimization. +func (l localOptimizer) finishMethodDone(operation chan<- Task, result <-chan Task, task Task) { + task.Op = MethodDone + operation <- task + task = <-result + if task.Op != PostIteration { + panic("optimize: task should have returned post iteration") + } + l.finish(operation, result) +} diff --git a/vendor/gonum.org/v1/gonum/optimize/minimize.go b/vendor/gonum.org/v1/gonum/optimize/minimize.go new file mode 100644 index 0000000..34e14c6 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/optimize/minimize.go @@ -0,0 +1,586 @@ +// Copyright ©2016 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package optimize + +import ( + "fmt" + "math" + "time" + + "gonum.org/v1/gonum/floats" + "gonum.org/v1/gonum/mat" +) + +const ( + nonpositiveDimension string = "optimize: non-positive input dimension" + negativeTasks string = "optimize: negative input number of tasks" +) + +func min(a, b int) int { + if a < b { + return a + } + return b +} + +// Task is a type to communicate between the Method and the outer +// calling script. +type Task struct { + ID int + Op Operation + *Location +} + +// Location represents a location in the optimization procedure. +type Location struct { + X []float64 + F float64 + Gradient []float64 + Hessian *mat.SymDense +} + +// Method is a type which can search for an optimum of an objective function. +type Method interface { + // Init initializes the method for optimization. The inputs are + // the problem dimension and number of available concurrent tasks. + // + // Init returns the number of concurrent processes to use, which must be + // less than or equal to tasks. + Init(dim, tasks int) (concurrent int) + // Run runs an optimization. The method sends Tasks on + // the operation channel (for performing function evaluations, major + // iterations, etc.). The result of the tasks will be returned on Result. + // See the documentation for Operation types for the possible operations. + // + // The caller of Run will signal the termination of the optimization + // (i.e. convergence from user settings) by sending a task with a PostIteration + // Op field on result. More tasks may still be sent on operation after this + // occurs, but only MajorIteration operations will still be conducted + // appropriately. Thus, it can not be guaranteed that all Evaluations sent + // on operation will be evaluated, however if an Evaluation is started, + // the results of that evaluation will be sent on results. + // + // The Method must read from the result channel until it is closed. + // During this, the Method may want to send new MajorIteration(s) on + // operation. Method then must close operation, and return from Run. + // These steps must establish a "happens-before" relationship between result + // being closed (externally) and Run closing operation, for example + // by using a range loop to read from result even if no results are expected. + // + // The last parameter to Run is a slice of tasks with length equal to + // the return from Init. Task has an ID field which may be + // set and modified by Method, and must not be modified by the caller. + // The first element of tasks contains information about the initial location. + // The Location.X field is always valid. The Operation field specifies which + // other values of Location are known. If Operation == NoOperation, none of + // the values should be used, otherwise the Evaluation operations will be + // composed to specify the valid fields. Methods are free to use or + // ignore these values. + // + // Successful execution of an Operation may require the Method to modify + // fields a Location. MajorIteration calls will not modify the values in + // the Location, but Evaluation operations will. Methods are encouraged to + // leave Location fields untouched to allow memory re-use. If data needs to + // be stored, the respective field should be set to nil -- Methods should + // not allocate Location memory themselves. + // + // Method may have its own specific convergence criteria, which can + // be communicated using a MethodDone operation. This will trigger a + // PostIteration to be sent on result, and the MethodDone task will not be + // returned on result. The Method must implement Statuser, and the + // call to Status must return a Status other than NotTerminated. + // + // The operation and result tasks are guaranteed to have a buffer length + // equal to the return from Init. + Run(operation chan<- Task, result <-chan Task, tasks []Task) + // Uses checks if the Method is suited to the optimization problem. The + // input is the available functions in Problem to call, and the returns are + // the functions which may be used and an error if there is a mismatch + // between the Problem and the Method's capabilities. + Uses(has Available) (uses Available, err error) +} + +// Minimize uses an optimizer to search for a minimum of a function. A +// maximization problem can be transformed into a minimization problem by +// multiplying the function by -1. +// +// The first argument represents the problem to be minimized. Its fields are +// routines that evaluate the objective function, gradient, and other +// quantities related to the problem. The objective function, p.Func, must not +// be nil. The optimization method used may require other fields to be non-nil +// as specified by method.Needs. Minimize will panic if these are not met. The +// method can be determined automatically from the supplied problem which is +// described below. +// +// If p.Status is not nil, it is called before every evaluation. If the +// returned Status is other than NotTerminated or if the error is not nil, the +// optimization run is terminated. +// +// The second argument specifies the initial location for the optimization. +// Some Methods do not require an initial location, but initX must still be +// specified for the dimension of the optimization problem. +// +// The third argument contains the settings for the minimization. If settings +// is nil, the zero value will be used, see the documentation of the Settings +// type for more information, and see the warning below. All settings will be +// honored for all Methods, even if that setting is counter-productive to the +// method. Minimize cannot guarantee strict adherence to the evaluation bounds +// specified when performing concurrent evaluations and updates. +// +// The final argument is the optimization method to use. If method == nil, then +// an appropriate default is chosen based on the properties of the other arguments +// (dimension, gradient-free or gradient-based, etc.). If method is not nil, +// Minimize panics if the Problem is not consistent with the Method (Uses +// returns an error). +// +// Minimize returns a Result struct and any error that occurred. See the +// documentation of Result for more information. +// +// See the documentation for Method for the details on implementing a method. +// +// Be aware that the default settings of Minimize are to accurately find the +// minimum. For certain functions and optimization methods, this can take many +// function evaluations. The Settings input struct can be used to limit this, +// for example by modifying the maximum function evaluations or gradient tolerance. +func Minimize(p Problem, initX []float64, settings *Settings, method Method) (*Result, error) { + startTime := time.Now() + if method == nil { + method = getDefaultMethod(&p) + } + if settings == nil { + settings = &Settings{} + } + stats := &Stats{} + dim := len(initX) + err := checkOptimization(p, dim, settings.Recorder) + if err != nil { + return nil, err + } + + optLoc := newLocation(dim) // This must have an allocated X field. + optLoc.F = math.Inf(1) + + initOp, initLoc := getInitLocation(dim, initX, settings.InitValues) + + converger := settings.Converger + if converger == nil { + converger = defaultFunctionConverge() + } + converger.Init(dim) + + stats.Runtime = time.Since(startTime) + + // Send initial location to Recorder + if settings.Recorder != nil { + err = settings.Recorder.Record(optLoc, InitIteration, stats) + if err != nil { + return nil, err + } + } + + // Run optimization + var status Status + status, err = minimize(&p, method, settings, converger, stats, initOp, initLoc, optLoc, startTime) + + // Cleanup and collect results + if settings.Recorder != nil && err == nil { + err = settings.Recorder.Record(optLoc, PostIteration, stats) + } + stats.Runtime = time.Since(startTime) + return &Result{ + Location: *optLoc, + Stats: *stats, + Status: status, + }, err +} + +func getDefaultMethod(p *Problem) Method { + if p.Grad != nil { + return &LBFGS{} + } + return &NelderMead{} +} + +// minimize performs an optimization. minimize updates the settings and optLoc, +// and returns the final Status and error. +func minimize(prob *Problem, method Method, settings *Settings, converger Converger, stats *Stats, initOp Operation, initLoc, optLoc *Location, startTime time.Time) (Status, error) { + dim := len(optLoc.X) + nTasks := settings.Concurrent + if nTasks == 0 { + nTasks = 1 + } + has := availFromProblem(*prob) + _, initErr := method.Uses(has) + if initErr != nil { + panic(fmt.Sprintf("optimize: specified method inconsistent with Problem: %v", initErr)) + } + newNTasks := method.Init(dim, nTasks) + if newNTasks > nTasks { + panic("optimize: too many tasks returned by Method") + } + nTasks = newNTasks + + // Launch the method. The method communicates tasks using the operations + // channel, and results is used to return the evaluated results. + operations := make(chan Task, nTasks) + results := make(chan Task, nTasks) + go func() { + tasks := make([]Task, nTasks) + tasks[0].Location = initLoc + tasks[0].Op = initOp + for i := 1; i < len(tasks); i++ { + tasks[i].Location = newLocation(dim) + } + method.Run(operations, results, tasks) + }() + + // Algorithmic Overview: + // There are three pieces to performing a concurrent optimization, + // the distributor, the workers, and the stats combiner. At a high level, + // the distributor reads in tasks sent by method, sending evaluations to the + // workers, and forwarding other operations to the statsCombiner. The workers + // read these forwarded evaluation tasks, evaluate the relevant parts of Problem + // and forward the results on to the stats combiner. The stats combiner reads + // in results from the workers, as well as tasks from the distributor, and + // uses them to update optimization statistics (function evaluations, etc.) + // and to check optimization convergence. + // + // The complicated part is correctly shutting down the optimization. The + // procedure is as follows. First, the stats combiner closes done and sends + // a PostIteration to the method. The distributor then reads that done has + // been closed, and closes the channel with the workers. At this point, no + // more evaluation operations will be executed. As the workers finish their + // evaluations, they forward the results onto the stats combiner, and then + // signal their shutdown to the stats combiner. When all workers have successfully + // finished, the stats combiner closes the results channel, signaling to the + // method that all results have been collected. At this point, the method + // may send MajorIteration(s) to update an optimum location based on these + // last returned results, and then the method will close the operations channel. + // The Method must ensure that the closing of results happens before the + // closing of operations in order to ensure proper shutdown order. + // Now that no more tasks will be commanded by the method, the distributor + // closes statsChan, and with no more statistics to update the optimization + // concludes. + + workerChan := make(chan Task) // Delegate tasks to the workers. + statsChan := make(chan Task) // Send evaluation updates. + done := make(chan struct{}) // Communicate the optimization is done. + + // Read tasks from the method and distribute as appropriate. + distributor := func() { + for { + select { + case task := <-operations: + switch task.Op { + case InitIteration: + panic("optimize: Method returned InitIteration") + case PostIteration: + panic("optimize: Method returned PostIteration") + case NoOperation, MajorIteration, MethodDone: + statsChan <- task + default: + if !task.Op.isEvaluation() { + panic("optimize: expecting evaluation operation") + } + workerChan <- task + } + case <-done: + // No more evaluations will be sent, shut down the workers, and + // read the final tasks. + close(workerChan) + for task := range operations { + if task.Op == MajorIteration { + statsChan <- task + } + } + close(statsChan) + return + } + } + } + go distributor() + + // Evaluate the Problem concurrently. + worker := func() { + x := make([]float64, dim) + for task := range workerChan { + evaluate(prob, task.Location, task.Op, x) + statsChan <- task + } + // Signal successful worker completion. + statsChan <- Task{Op: signalDone} + } + for i := 0; i < nTasks; i++ { + go worker() + } + + var ( + workersDone int // effective wg for the workers + status Status + err error + finalStatus Status + finalError error + ) + + // Update optimization statistics and check convergence. + var methodDone bool + for task := range statsChan { + switch task.Op { + default: + if !task.Op.isEvaluation() { + panic("minimize: evaluation task expected") + } + updateEvaluationStats(stats, task.Op) + status, err = checkEvaluationLimits(prob, stats, settings) + case signalDone: + workersDone++ + if workersDone == nTasks { + close(results) + } + continue + case NoOperation: + // Just send the task back. + case MajorIteration: + status = performMajorIteration(optLoc, task.Location, stats, converger, startTime, settings) + case MethodDone: + methodDone = true + status = MethodConverge + } + if settings.Recorder != nil && status == NotTerminated && err == nil { + stats.Runtime = time.Since(startTime) + // Allow err to be overloaded if the Recorder fails. + err = settings.Recorder.Record(task.Location, task.Op, stats) + if err != nil { + status = Failure + } + } + // If this is the first termination status, trigger the conclusion of + // the optimization. + if status != NotTerminated || err != nil { + select { + case <-done: + default: + finalStatus = status + finalError = err + results <- Task{ + Op: PostIteration, + } + close(done) + } + } + + // Send the result back to the Problem if there are still active workers. + if workersDone != nTasks && task.Op != MethodDone { + results <- task + } + } + // This code block is here rather than above to ensure Status() is not called + // before Method.Run closes operations. + if methodDone { + statuser, ok := method.(Statuser) + if !ok { + panic("optimize: method returned MethodDone but is not a Statuser") + } + finalStatus, finalError = statuser.Status() + if finalStatus == NotTerminated { + panic("optimize: method returned MethodDone but a NotTerminated status") + } + } + return finalStatus, finalError +} + +func defaultFunctionConverge() *FunctionConverge { + return &FunctionConverge{ + Absolute: 1e-10, + Iterations: 100, + } +} + +// newLocation allocates a new locatian structure with an X field of the +// appropriate size. +func newLocation(dim int) *Location { + return &Location{ + X: make([]float64, dim), + } +} + +// getInitLocation checks the validity of initLocation and initOperation and +// returns the initial values as a *Location. +func getInitLocation(dim int, initX []float64, initValues *Location) (Operation, *Location) { + loc := newLocation(dim) + if initX == nil { + if initValues != nil { + panic("optimize: initValues is non-nil but no initial location specified") + } + return NoOperation, loc + } + copy(loc.X, initX) + if initValues == nil { + return NoOperation, loc + } else { + if initValues.X != nil { + panic("optimize: location specified in InitValues (only use InitX)") + } + } + loc.F = initValues.F + op := FuncEvaluation + if initValues.Gradient != nil { + if len(initValues.Gradient) != dim { + panic("optimize: initial gradient does not match problem dimension") + } + loc.Gradient = initValues.Gradient + op |= GradEvaluation + } + if initValues.Hessian != nil { + if initValues.Hessian.Symmetric() != dim { + panic("optimize: initial Hessian does not match problem dimension") + } + loc.Hessian = initValues.Hessian + op |= HessEvaluation + } + return op, loc +} + +func checkOptimization(p Problem, dim int, recorder Recorder) error { + if p.Func == nil { + panic(badProblem) + } + if dim <= 0 { + panic("optimize: impossible problem dimension") + } + if p.Status != nil { + _, err := p.Status() + if err != nil { + return err + } + } + if recorder != nil { + err := recorder.Init() + if err != nil { + return err + } + } + return nil +} + +// evaluate evaluates the routines specified by the Operation at loc.X, and stores +// the answer into loc. loc.X is copied into x before evaluating in order to +// prevent the routines from modifying it. +func evaluate(p *Problem, loc *Location, op Operation, x []float64) { + if !op.isEvaluation() { + panic(fmt.Sprintf("optimize: invalid evaluation %v", op)) + } + copy(x, loc.X) + if op&FuncEvaluation != 0 { + loc.F = p.Func(x) + } + if op&GradEvaluation != 0 { + // Make sure we have a destination in which to place the gradient. + // TODO(kortschak): Consider making this a check of len(loc.Gradient) != 0 + // to allow reuse of the slice. + if loc.Gradient == nil { + loc.Gradient = make([]float64, len(x)) + } + p.Grad(loc.Gradient, x) + } + if op&HessEvaluation != 0 { + // Make sure we have a destination in which to place the Hessian. + // TODO(kortschak): Consider making this a check of loc.Hessian.IsZero() + // to allow reuse of the matrix. + if loc.Hessian == nil { + loc.Hessian = mat.NewSymDense(len(x), nil) + } + p.Hess(loc.Hessian, x) + } +} + +// updateEvaluationStats updates the statistics based on the operation. +func updateEvaluationStats(stats *Stats, op Operation) { + if op&FuncEvaluation != 0 { + stats.FuncEvaluations++ + } + if op&GradEvaluation != 0 { + stats.GradEvaluations++ + } + if op&HessEvaluation != 0 { + stats.HessEvaluations++ + } +} + +// checkLocationConvergence checks if the current optimal location satisfies +// any of the convergence criteria based on the function location. +// +// checkLocationConvergence returns NotTerminated if the Location does not satisfy +// the convergence criteria given by settings. Otherwise a corresponding status is +// returned. +// Unlike checkLimits, checkConvergence is called only at MajorIterations. +func checkLocationConvergence(loc *Location, settings *Settings, converger Converger) Status { + if math.IsInf(loc.F, -1) { + return FunctionNegativeInfinity + } + if loc.Gradient != nil && settings.GradientThreshold > 0 { + norm := floats.Norm(loc.Gradient, math.Inf(1)) + if norm < settings.GradientThreshold { + return GradientThreshold + } + } + return converger.Converged(loc) +} + +// checkEvaluationLimits checks the optimization limits after an evaluation +// Operation. It checks the number of evaluations (of various kinds) and checks +// the status of the Problem, if applicable. +func checkEvaluationLimits(p *Problem, stats *Stats, settings *Settings) (Status, error) { + if p.Status != nil { + status, err := p.Status() + if err != nil || status != NotTerminated { + return status, err + } + } + if settings.FuncEvaluations > 0 && stats.FuncEvaluations >= settings.FuncEvaluations { + return FunctionEvaluationLimit, nil + } + if settings.GradEvaluations > 0 && stats.GradEvaluations >= settings.GradEvaluations { + return GradientEvaluationLimit, nil + } + if settings.HessEvaluations > 0 && stats.HessEvaluations >= settings.HessEvaluations { + return HessianEvaluationLimit, nil + } + return NotTerminated, nil +} + +// checkIterationLimits checks the limits on iterations affected by MajorIteration. +func checkIterationLimits(loc *Location, stats *Stats, settings *Settings) Status { + if settings.MajorIterations > 0 && stats.MajorIterations >= settings.MajorIterations { + return IterationLimit + } + if settings.Runtime > 0 && stats.Runtime >= settings.Runtime { + return RuntimeLimit + } + return NotTerminated +} + +// performMajorIteration does all of the steps needed to perform a MajorIteration. +// It increments the iteration count, updates the optimal location, and checks +// the necessary convergence criteria. +func performMajorIteration(optLoc, loc *Location, stats *Stats, converger Converger, startTime time.Time, settings *Settings) Status { + optLoc.F = loc.F + copy(optLoc.X, loc.X) + if loc.Gradient == nil { + optLoc.Gradient = nil + } else { + if optLoc.Gradient == nil { + optLoc.Gradient = make([]float64, len(loc.Gradient)) + } + copy(optLoc.Gradient, loc.Gradient) + } + stats.MajorIterations++ + stats.Runtime = time.Since(startTime) + status := checkLocationConvergence(optLoc, settings, converger) + if status != NotTerminated { + return status + } + return checkIterationLimits(optLoc, stats, settings) +} diff --git a/vendor/gonum.org/v1/gonum/optimize/morethuente.go b/vendor/gonum.org/v1/gonum/optimize/morethuente.go new file mode 100644 index 0000000..a8cac5e --- /dev/null +++ b/vendor/gonum.org/v1/gonum/optimize/morethuente.go @@ -0,0 +1,385 @@ +// Copyright ©2015 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package optimize + +import "math" + +// MoreThuente is a Linesearcher that finds steps that satisfy both the +// sufficient decrease and curvature conditions (the strong Wolfe conditions). +// +// References: +// - More, J.J. and D.J. Thuente: Line Search Algorithms with Guaranteed Sufficient +// Decrease. ACM Transactions on Mathematical Software 20(3) (1994), 286-307 +type MoreThuente struct { + // DecreaseFactor is the constant factor in the sufficient decrease + // (Armijo) condition. + // It must be in the interval [0, 1). The default value is 0. + DecreaseFactor float64 + // CurvatureFactor is the constant factor in the Wolfe conditions. Smaller + // values result in a more exact line search. + // A set value must be in the interval (0, 1). If it is zero, it will be + // defaulted to 0.9. + CurvatureFactor float64 + // StepTolerance sets the minimum acceptable width for the linesearch + // interval. If the relative interval length is less than this value, + // ErrLinesearcherFailure is returned. + // It must be non-negative. If it is zero, it will be defaulted to 1e-10. + StepTolerance float64 + + // MinimumStep is the minimum step that the linesearcher will take. + // It must be non-negative and less than MaximumStep. Defaults to no + // minimum (a value of 0). + MinimumStep float64 + // MaximumStep is the maximum step that the linesearcher will take. + // It must be greater than MinimumStep. If it is zero, it will be defaulted + // to 1e20. + MaximumStep float64 + + bracketed bool // Indicates if a minimum has been bracketed. + fInit float64 // Function value at step = 0. + gInit float64 // Derivative value at step = 0. + + // When stage is 1, the algorithm updates the interval given by x and y + // so that it contains a minimizer of the modified function + // psi(step) = f(step) - f(0) - DecreaseFactor * step * f'(0). + // When stage is 2, the interval is updated so that it contains a minimizer + // of f. + stage int + + step float64 // Current step. + lower, upper float64 // Lower and upper bounds on the next step. + x float64 // Endpoint of the interval with a lower function value. + fx, gx float64 // Data at x. + y float64 // The other endpoint. + fy, gy float64 // Data at y. + width [2]float64 // Width of the interval at two previous iterations. +} + +const ( + mtMinGrowthFactor float64 = 1.1 + mtMaxGrowthFactor float64 = 4 +) + +func (mt *MoreThuente) Init(f, g float64, step float64) Operation { + // Based on the original Fortran code that is available, for example, from + // http://ftp.mcs.anl.gov/pub/MINPACK-2/csrch/ + // as part of + // MINPACK-2 Project. November 1993. + // Argonne National Laboratory and University of Minnesota. + // Brett M. Averick, Richard G. Carter, and Jorge J. Moré. + + if g >= 0 { + panic("morethuente: initial derivative is non-negative") + } + if step <= 0 { + panic("morethuente: invalid initial step") + } + + if mt.CurvatureFactor == 0 { + mt.CurvatureFactor = 0.9 + } + if mt.StepTolerance == 0 { + mt.StepTolerance = 1e-10 + } + if mt.MaximumStep == 0 { + mt.MaximumStep = 1e20 + } + + if mt.MinimumStep < 0 { + panic("morethuente: minimum step is negative") + } + if mt.MaximumStep <= mt.MinimumStep { + panic("morethuente: maximum step is not greater than minimum step") + } + if mt.DecreaseFactor < 0 || mt.DecreaseFactor >= 1 { + panic("morethuente: invalid decrease factor") + } + if mt.CurvatureFactor <= 0 || mt.CurvatureFactor >= 1 { + panic("morethuente: invalid curvature factor") + } + if mt.StepTolerance <= 0 { + panic("morethuente: step tolerance is not positive") + } + + if step < mt.MinimumStep { + step = mt.MinimumStep + } + if step > mt.MaximumStep { + step = mt.MaximumStep + } + + mt.bracketed = false + mt.stage = 1 + mt.fInit = f + mt.gInit = g + + mt.x, mt.fx, mt.gx = 0, f, g + mt.y, mt.fy, mt.gy = 0, f, g + + mt.lower = 0 + mt.upper = step + mtMaxGrowthFactor*step + + mt.width[0] = mt.MaximumStep - mt.MinimumStep + mt.width[1] = 2 * mt.width[0] + + mt.step = step + return FuncEvaluation | GradEvaluation +} + +func (mt *MoreThuente) Iterate(f, g float64) (Operation, float64, error) { + if mt.stage == 0 { + panic("morethuente: Init has not been called") + } + + gTest := mt.DecreaseFactor * mt.gInit + fTest := mt.fInit + mt.step*gTest + + if mt.bracketed { + if mt.step <= mt.lower || mt.step >= mt.upper || mt.upper-mt.lower <= mt.StepTolerance*mt.upper { + // step contains the best step found (see below). + return NoOperation, mt.step, ErrLinesearcherFailure + } + } + if mt.step == mt.MaximumStep && f <= fTest && g <= gTest { + return NoOperation, mt.step, ErrLinesearcherBound + } + if mt.step == mt.MinimumStep && (f > fTest || g >= gTest) { + return NoOperation, mt.step, ErrLinesearcherFailure + } + + // Test for convergence. + if f <= fTest && math.Abs(g) <= mt.CurvatureFactor*(-mt.gInit) { + mt.stage = 0 + return MajorIteration, mt.step, nil + } + + if mt.stage == 1 && f <= fTest && g >= 0 { + mt.stage = 2 + } + + if mt.stage == 1 && f <= mt.fx && f > fTest { + // Lower function value but the decrease is not sufficient . + + // Compute values and derivatives of the modified function at step, x, y. + fm := f - mt.step*gTest + fxm := mt.fx - mt.x*gTest + fym := mt.fy - mt.y*gTest + gm := g - gTest + gxm := mt.gx - gTest + gym := mt.gy - gTest + // Update x, y and step. + mt.nextStep(fxm, gxm, fym, gym, fm, gm) + // Recover values and derivates of the non-modified function at x and y. + mt.fx = fxm + mt.x*gTest + mt.fy = fym + mt.y*gTest + mt.gx = gxm + gTest + mt.gy = gym + gTest + } else { + // Update x, y and step. + mt.nextStep(mt.fx, mt.gx, mt.fy, mt.gy, f, g) + } + + if mt.bracketed { + // Monitor the length of the bracketing interval. If the interval has + // not been reduced sufficiently after two steps, use bisection to + // force its length to zero. + width := mt.y - mt.x + if math.Abs(width) >= 2.0/3*mt.width[1] { + mt.step = mt.x + 0.5*width + } + mt.width[0], mt.width[1] = math.Abs(width), mt.width[0] + } + + if mt.bracketed { + mt.lower = math.Min(mt.x, mt.y) + mt.upper = math.Max(mt.x, mt.y) + } else { + mt.lower = mt.step + mtMinGrowthFactor*(mt.step-mt.x) + mt.upper = mt.step + mtMaxGrowthFactor*(mt.step-mt.x) + } + + // Force the step to be in [MinimumStep, MaximumStep]. + mt.step = math.Max(mt.MinimumStep, math.Min(mt.step, mt.MaximumStep)) + + if mt.bracketed { + if mt.step <= mt.lower || mt.step >= mt.upper || mt.upper-mt.lower <= mt.StepTolerance*mt.upper { + // If further progress is not possible, set step to the best step + // obtained during the search. + mt.step = mt.x + } + } + + return FuncEvaluation | GradEvaluation, mt.step, nil +} + +// nextStep computes the next safeguarded step and updates the interval that +// contains a step that satisfies the sufficient decrease and curvature +// conditions. +func (mt *MoreThuente) nextStep(fx, gx, fy, gy, f, g float64) { + x := mt.x + y := mt.y + step := mt.step + + gNeg := g < 0 + if gx < 0 { + gNeg = !gNeg + } + + var next float64 + var bracketed bool + switch { + case f > fx: + // A higher function value. The minimum is bracketed between x and step. + // We want the next step to be closer to x because the function value + // there is lower. + + theta := 3*(fx-f)/(step-x) + gx + g + s := math.Max(math.Abs(gx), math.Abs(g)) + s = math.Max(s, math.Abs(theta)) + gamma := s * math.Sqrt((theta/s)*(theta/s)-(gx/s)*(g/s)) + if step < x { + gamma *= -1 + } + p := gamma - gx + theta + q := gamma - gx + gamma + g + r := p / q + stpc := x + r*(step-x) + stpq := x + gx/((fx-f)/(step-x)+gx)/2*(step-x) + + if math.Abs(stpc-x) < math.Abs(stpq-x) { + // The cubic step is closer to x than the quadratic step. + // Take the cubic step. + next = stpc + } else { + // If f is much larger than fx, then the quadratic step may be too + // close to x. Therefore heuristically take the average of the + // cubic and quadratic steps. + next = stpc + (stpq-stpc)/2 + } + bracketed = true + + case gNeg: + // A lower function value and derivatives of opposite sign. The minimum + // is bracketed between x and step. If we choose a step that is far + // from step, the next iteration will also likely fall in this case. + + theta := 3*(fx-f)/(step-x) + gx + g + s := math.Max(math.Abs(gx), math.Abs(g)) + s = math.Max(s, math.Abs(theta)) + gamma := s * math.Sqrt((theta/s)*(theta/s)-(gx/s)*(g/s)) + if step > x { + gamma *= -1 + } + p := gamma - g + theta + q := gamma - g + gamma + gx + r := p / q + stpc := step + r*(x-step) + stpq := step + g/(g-gx)*(x-step) + + if math.Abs(stpc-step) > math.Abs(stpq-step) { + // The cubic step is farther from x than the quadratic step. + // Take the cubic step. + next = stpc + } else { + // Take the quadratic step. + next = stpq + } + bracketed = true + + case math.Abs(g) < math.Abs(gx): + // A lower function value, derivatives of the same sign, and the + // magnitude of the derivative decreases. Extrapolate function values + // at x and step so that the next step lies between step and y. + + theta := 3*(fx-f)/(step-x) + gx + g + s := math.Max(math.Abs(gx), math.Abs(g)) + s = math.Max(s, math.Abs(theta)) + gamma := s * math.Sqrt(math.Max(0, (theta/s)*(theta/s)-(gx/s)*(g/s))) + if step > x { + gamma *= -1 + } + p := gamma - g + theta + q := gamma + gx - g + gamma + r := p / q + var stpc float64 + switch { + case r < 0 && gamma != 0: + stpc = step + r*(x-step) + case step > x: + stpc = mt.upper + default: + stpc = mt.lower + } + stpq := step + g/(g-gx)*(x-step) + + if mt.bracketed { + // We are extrapolating so be cautious and take the step that + // is closer to step. + if math.Abs(stpc-step) < math.Abs(stpq-step) { + next = stpc + } else { + next = stpq + } + // Modify next if it is close to or beyond y. + if step > x { + next = math.Min(step+2.0/3*(y-step), next) + } else { + next = math.Max(step+2.0/3*(y-step), next) + } + } else { + // Minimum has not been bracketed so take the larger step... + if math.Abs(stpc-step) > math.Abs(stpq-step) { + next = stpc + } else { + next = stpq + } + // ...but within reason. + next = math.Max(mt.lower, math.Min(next, mt.upper)) + } + + default: + // A lower function value, derivatives of the same sign, and the + // magnitude of the derivative does not decrease. The function seems to + // decrease rapidly in the direction of the step. + + switch { + case mt.bracketed: + theta := 3*(f-fy)/(y-step) + gy + g + s := math.Max(math.Abs(gy), math.Abs(g)) + s = math.Max(s, math.Abs(theta)) + gamma := s * math.Sqrt((theta/s)*(theta/s)-(gy/s)*(g/s)) + if step > y { + gamma *= -1 + } + p := gamma - g + theta + q := gamma - g + gamma + gy + r := p / q + next = step + r*(y-step) + case step > x: + next = mt.upper + default: + next = mt.lower + } + } + + if f > fx { + // x is still the best step. + mt.y = step + mt.fy = f + mt.gy = g + } else { + // step is the new best step. + if gNeg { + mt.y = x + mt.fy = fx + mt.gy = gx + } + mt.x = step + mt.fx = f + mt.gx = g + } + mt.bracketed = bracketed + mt.step = next +} diff --git a/vendor/gonum.org/v1/gonum/optimize/neldermead.go b/vendor/gonum.org/v1/gonum/optimize/neldermead.go new file mode 100644 index 0000000..4615bf4 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/optimize/neldermead.go @@ -0,0 +1,349 @@ +// Copyright ©2015 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package optimize + +import ( + "math" + "sort" + + "gonum.org/v1/gonum/floats" +) + +// nmIterType is a Nelder-Mead evaluation kind +type nmIterType int + +const ( + nmReflected = iota + nmExpanded + nmContractedInside + nmContractedOutside + nmInitialize + nmShrink + nmMajor +) + +type nmVertexSorter struct { + vertices [][]float64 + values []float64 +} + +func (n nmVertexSorter) Len() int { + return len(n.values) +} + +func (n nmVertexSorter) Less(i, j int) bool { + return n.values[i] < n.values[j] +} + +func (n nmVertexSorter) Swap(i, j int) { + n.values[i], n.values[j] = n.values[j], n.values[i] + n.vertices[i], n.vertices[j] = n.vertices[j], n.vertices[i] +} + +var _ Method = (*NelderMead)(nil) + +// NelderMead is an implementation of the Nelder-Mead simplex algorithm for +// gradient-free nonlinear optimization (not to be confused with Danzig's +// simplex algorithm for linear programming). The implementation follows the +// algorithm described in +// +// http://epubs.siam.org/doi/pdf/10.1137/S1052623496303470 +// +// If an initial simplex is provided, it is used and initLoc is ignored. If +// InitialVertices and InitialValues are both nil, an initial simplex will be +// generated automatically using the initial location as one vertex, and each +// additional vertex as SimplexSize away in one dimension. +// +// If the simplex update parameters (Reflection, etc.) +// are zero, they will be set automatically based on the dimension according to +// the recommendations in +// +// http://www.webpages.uidaho.edu/~fuchang/res/ANMS.pdf +type NelderMead struct { + InitialVertices [][]float64 + InitialValues []float64 + Reflection float64 // Reflection parameter (>0) + Expansion float64 // Expansion parameter (>1) + Contraction float64 // Contraction parameter (>0, <1) + Shrink float64 // Shrink parameter (>0, <1) + SimplexSize float64 // size of auto-constructed initial simplex + + status Status + err error + + reflection float64 + expansion float64 + contraction float64 + shrink float64 + + vertices [][]float64 // location of the vertices sorted in ascending f + values []float64 // function values at the vertices sorted in ascending f + centroid []float64 // centroid of all but the worst vertex + + fillIdx int // index for filling the simplex during initialization and shrinking + lastIter nmIterType // Last iteration + reflectedPoint []float64 // Storage of the reflected point location + reflectedValue float64 // Value at the last reflection point +} + +func (n *NelderMead) Status() (Status, error) { + return n.status, n.err +} + +func (*NelderMead) Uses(has Available) (uses Available, err error) { + return has.function() +} + +func (n *NelderMead) Init(dim, tasks int) int { + n.status = NotTerminated + n.err = nil + return 1 +} + +func (n *NelderMead) Run(operation chan<- Task, result <-chan Task, tasks []Task) { + n.status, n.err = localOptimizer{}.run(n, math.NaN(), operation, result, tasks) + close(operation) + return +} + +func (n *NelderMead) initLocal(loc *Location) (Operation, error) { + dim := len(loc.X) + if cap(n.vertices) < dim+1 { + n.vertices = make([][]float64, dim+1) + } + n.vertices = n.vertices[:dim+1] + for i := range n.vertices { + n.vertices[i] = resize(n.vertices[i], dim) + } + n.values = resize(n.values, dim+1) + n.centroid = resize(n.centroid, dim) + n.reflectedPoint = resize(n.reflectedPoint, dim) + + if n.SimplexSize == 0 { + n.SimplexSize = 0.05 + } + + // Default parameter choices are chosen in a dimension-dependent way + // from http://www.webpages.uidaho.edu/~fuchang/res/ANMS.pdf + n.reflection = n.Reflection + if n.reflection == 0 { + n.reflection = 1 + } + n.expansion = n.Expansion + if n.expansion == 0 { + n.expansion = 1 + 2/float64(dim) + if dim == 1 { + n.expansion = 2 + } + } + n.contraction = n.Contraction + if n.contraction == 0 { + n.contraction = 0.75 - 1/(2*float64(dim)) + if dim == 1 { + n.contraction = 0.5 + } + } + n.shrink = n.Shrink + if n.shrink == 0 { + n.shrink = 1 - 1/float64(dim) + if dim == 1 { + n.shrink = 0.5 + } + } + + if n.InitialVertices != nil { + // Initial simplex provided. Copy the locations and values, and sort them. + if len(n.InitialVertices) != dim+1 { + panic("neldermead: incorrect number of vertices in initial simplex") + } + if len(n.InitialValues) != dim+1 { + panic("neldermead: incorrect number of values in initial simplex") + } + for i := range n.InitialVertices { + if len(n.InitialVertices[i]) != dim { + panic("neldermead: vertex size mismatch") + } + copy(n.vertices[i], n.InitialVertices[i]) + } + copy(n.values, n.InitialValues) + sort.Sort(nmVertexSorter{n.vertices, n.values}) + computeCentroid(n.vertices, n.centroid) + return n.returnNext(nmMajor, loc) + } + + // No simplex provided. Begin initializing initial simplex. First simplex + // entry is the initial location, then step 1 in every direction. + copy(n.vertices[dim], loc.X) + n.values[dim] = loc.F + n.fillIdx = 0 + loc.X[n.fillIdx] += n.SimplexSize + n.lastIter = nmInitialize + return FuncEvaluation, nil +} + +// computeCentroid computes the centroid of all the simplex vertices except the +// final one +func computeCentroid(vertices [][]float64, centroid []float64) { + dim := len(centroid) + for i := range centroid { + centroid[i] = 0 + } + for i := 0; i < dim; i++ { + vertex := vertices[i] + for j, v := range vertex { + centroid[j] += v + } + } + for i := range centroid { + centroid[i] /= float64(dim) + } +} + +func (n *NelderMead) iterateLocal(loc *Location) (Operation, error) { + dim := len(loc.X) + switch n.lastIter { + case nmInitialize: + n.values[n.fillIdx] = loc.F + copy(n.vertices[n.fillIdx], loc.X) + n.fillIdx++ + if n.fillIdx == dim { + // Successfully finished building initial simplex. + sort.Sort(nmVertexSorter{n.vertices, n.values}) + computeCentroid(n.vertices, n.centroid) + return n.returnNext(nmMajor, loc) + } + copy(loc.X, n.vertices[dim]) + loc.X[n.fillIdx] += n.SimplexSize + return FuncEvaluation, nil + case nmMajor: + // Nelder Mead iterations start with Reflection step + return n.returnNext(nmReflected, loc) + case nmReflected: + n.reflectedValue = loc.F + switch { + case loc.F >= n.values[0] && loc.F < n.values[dim-1]: + n.replaceWorst(loc.X, loc.F) + return n.returnNext(nmMajor, loc) + case loc.F < n.values[0]: + return n.returnNext(nmExpanded, loc) + default: + if loc.F < n.values[dim] { + return n.returnNext(nmContractedOutside, loc) + } + return n.returnNext(nmContractedInside, loc) + } + case nmExpanded: + if loc.F < n.reflectedValue { + n.replaceWorst(loc.X, loc.F) + } else { + n.replaceWorst(n.reflectedPoint, n.reflectedValue) + } + return n.returnNext(nmMajor, loc) + case nmContractedOutside: + if loc.F <= n.reflectedValue { + n.replaceWorst(loc.X, loc.F) + return n.returnNext(nmMajor, loc) + } + n.fillIdx = 1 + return n.returnNext(nmShrink, loc) + case nmContractedInside: + if loc.F < n.values[dim] { + n.replaceWorst(loc.X, loc.F) + return n.returnNext(nmMajor, loc) + } + n.fillIdx = 1 + return n.returnNext(nmShrink, loc) + case nmShrink: + copy(n.vertices[n.fillIdx], loc.X) + n.values[n.fillIdx] = loc.F + n.fillIdx++ + if n.fillIdx != dim+1 { + return n.returnNext(nmShrink, loc) + } + sort.Sort(nmVertexSorter{n.vertices, n.values}) + computeCentroid(n.vertices, n.centroid) + return n.returnNext(nmMajor, loc) + default: + panic("unreachable") + } +} + +// returnNext updates the location based on the iteration type and the current +// simplex, and returns the next operation. +func (n *NelderMead) returnNext(iter nmIterType, loc *Location) (Operation, error) { + n.lastIter = iter + switch iter { + case nmMajor: + // Fill loc with the current best point and value, + // and command a convergence check. + copy(loc.X, n.vertices[0]) + loc.F = n.values[0] + return MajorIteration, nil + case nmReflected, nmExpanded, nmContractedOutside, nmContractedInside: + // x_new = x_centroid + scale * (x_centroid - x_worst) + var scale float64 + switch iter { + case nmReflected: + scale = n.reflection + case nmExpanded: + scale = n.reflection * n.expansion + case nmContractedOutside: + scale = n.reflection * n.contraction + case nmContractedInside: + scale = -n.contraction + } + dim := len(loc.X) + floats.SubTo(loc.X, n.centroid, n.vertices[dim]) + floats.Scale(scale, loc.X) + floats.Add(loc.X, n.centroid) + if iter == nmReflected { + copy(n.reflectedPoint, loc.X) + } + return FuncEvaluation, nil + case nmShrink: + // x_shrink = x_best + delta * (x_i + x_best) + floats.SubTo(loc.X, n.vertices[n.fillIdx], n.vertices[0]) + floats.Scale(n.shrink, loc.X) + floats.Add(loc.X, n.vertices[0]) + return FuncEvaluation, nil + default: + panic("unreachable") + } +} + +// replaceWorst removes the worst location in the simplex and adds the new +// {x, f} pair maintaining sorting. +func (n *NelderMead) replaceWorst(x []float64, f float64) { + dim := len(x) + if f >= n.values[dim] { + panic("increase in simplex value") + } + copy(n.vertices[dim], x) + n.values[dim] = f + + // Sort the newly-added value. + for i := dim - 1; i >= 0; i-- { + if n.values[i] < f { + break + } + n.vertices[i], n.vertices[i+1] = n.vertices[i+1], n.vertices[i] + n.values[i], n.values[i+1] = n.values[i+1], n.values[i] + } + + // Update the location of the centroid. Only one point has been replaced, so + // subtract the worst point and add the new one. + floats.AddScaled(n.centroid, -1/float64(dim), n.vertices[dim]) + floats.AddScaled(n.centroid, 1/float64(dim), x) +} + +func (*NelderMead) needs() struct { + Gradient bool + Hessian bool +} { + return struct { + Gradient bool + Hessian bool + }{false, false} +} diff --git a/vendor/gonum.org/v1/gonum/optimize/newton.go b/vendor/gonum.org/v1/gonum/optimize/newton.go new file mode 100644 index 0000000..9519baf --- /dev/null +++ b/vendor/gonum.org/v1/gonum/optimize/newton.go @@ -0,0 +1,178 @@ +// Copyright ©2015 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package optimize + +import ( + "math" + + "gonum.org/v1/gonum/mat" +) + +const maxNewtonModifications = 20 + +var ( + _ Method = (*Newton)(nil) + _ localMethod = (*Newton)(nil) +) + +// Newton implements a modified Newton's method for Hessian-based unconstrained +// minimization. It applies regularization when the Hessian is not positive +// definite, and it can converge to a local minimum from any starting point. +// +// Newton iteratively forms a quadratic model to the objective function f and +// tries to minimize this approximate model. It generates a sequence of +// locations x_k by means of +// solve H_k d_k = -∇f_k for d_k, +// x_{k+1} = x_k + α_k d_k, +// where H_k is the Hessian matrix of f at x_k and α_k is a step size found by +// a line search. +// +// Away from a minimizer H_k may not be positive definite and d_k may not be a +// descent direction. Newton implements a Hessian modification strategy that +// adds successively larger multiples of identity to H_k until it becomes +// positive definite. Note that the repeated trial factorization of the +// modified Hessian involved in this process can be computationally expensive. +// +// If the Hessian matrix cannot be formed explicitly or if the computational +// cost of its factorization is prohibitive, BFGS or L-BFGS quasi-Newton method +// can be used instead. +type Newton struct { + // Linesearcher is used for selecting suitable steps along the descent + // direction d. Accepted steps should satisfy at least one of the Wolfe, + // Goldstein or Armijo conditions. + // If Linesearcher == nil, an appropriate default is chosen. + Linesearcher Linesearcher + // Increase is the factor by which a scalar tau is successively increased + // so that (H + tau*I) is positive definite. Larger values reduce the + // number of trial Hessian factorizations, but also reduce the second-order + // information in H. + // Increase must be greater than 1. If Increase is 0, it is defaulted to 5. + Increase float64 + // GradStopThreshold sets the threshold for stopping if the gradient norm + // gets too small. If GradStopThreshold is 0 it is defaulted to 1e-12, and + // if it is NaN the setting is not used. + GradStopThreshold float64 + + status Status + err error + + ls *LinesearchMethod + + hess *mat.SymDense // Storage for a copy of the Hessian matrix. + chol mat.Cholesky // Storage for the Cholesky factorization. + tau float64 +} + +func (n *Newton) Status() (Status, error) { + return n.status, n.err +} + +func (*Newton) Uses(has Available) (uses Available, err error) { + return has.hessian() +} + +func (n *Newton) Init(dim, tasks int) int { + n.status = NotTerminated + n.err = nil + return 1 +} + +func (n *Newton) Run(operation chan<- Task, result <-chan Task, tasks []Task) { + n.status, n.err = localOptimizer{}.run(n, n.GradStopThreshold, operation, result, tasks) + close(operation) + return +} + +func (n *Newton) initLocal(loc *Location) (Operation, error) { + if n.Increase == 0 { + n.Increase = 5 + } + if n.Increase <= 1 { + panic("optimize: Newton.Increase must be greater than 1") + } + if n.Linesearcher == nil { + n.Linesearcher = &Bisection{} + } + if n.ls == nil { + n.ls = &LinesearchMethod{} + } + n.ls.Linesearcher = n.Linesearcher + n.ls.NextDirectioner = n + return n.ls.Init(loc) +} + +func (n *Newton) iterateLocal(loc *Location) (Operation, error) { + return n.ls.Iterate(loc) +} + +func (n *Newton) InitDirection(loc *Location, dir []float64) (stepSize float64) { + dim := len(loc.X) + n.hess = resizeSymDense(n.hess, dim) + n.tau = 0 + return n.NextDirection(loc, dir) +} + +func (n *Newton) NextDirection(loc *Location, dir []float64) (stepSize float64) { + // This method implements Algorithm 3.3 (Cholesky with Added Multiple of + // the Identity) from Nocedal, Wright (2006), 2nd edition. + + dim := len(loc.X) + d := mat.NewVecDense(dim, dir) + grad := mat.NewVecDense(dim, loc.Gradient) + n.hess.CopySym(loc.Hessian) + + // Find the smallest diagonal entry of the Hessian. + minA := n.hess.At(0, 0) + for i := 1; i < dim; i++ { + a := n.hess.At(i, i) + if a < minA { + minA = a + } + } + // If the smallest diagonal entry is positive, the Hessian may be positive + // definite, and so first attempt to apply the Cholesky factorization to + // the un-modified Hessian. If the smallest entry is negative, use the + // final tau from the last iteration if regularization was needed, + // otherwise guess an appropriate value for tau. + if minA > 0 { + n.tau = 0 + } else if n.tau == 0 { + n.tau = -minA + 0.001 + } + + for k := 0; k < maxNewtonModifications; k++ { + if n.tau != 0 { + // Add a multiple of identity to the Hessian. + for i := 0; i < dim; i++ { + n.hess.SetSym(i, i, loc.Hessian.At(i, i)+n.tau) + } + } + // Try to apply the Cholesky factorization. + pd := n.chol.Factorize(n.hess) + if pd { + // Store the solution in d's backing array, dir. + n.chol.SolveVecTo(d, grad) + d.ScaleVec(-1, d) + return 1 + } + // Modified Hessian is not PD, so increase tau. + n.tau = math.Max(n.Increase*n.tau, 0.001) + } + + // Hessian modification failed to get a PD matrix. Return the negative + // gradient as the descent direction. + d.ScaleVec(-1, grad) + return 1 +} + +func (n *Newton) needs() struct { + Gradient bool + Hessian bool +} { + return struct { + Gradient bool + Hessian bool + }{true, true} +} diff --git a/vendor/gonum.org/v1/gonum/optimize/printer.go b/vendor/gonum.org/v1/gonum/optimize/printer.go new file mode 100644 index 0000000..f4f79b0 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/optimize/printer.go @@ -0,0 +1,106 @@ +// Copyright ©2014 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package optimize + +import ( + "fmt" + "io" + "math" + "os" + "time" + + "gonum.org/v1/gonum/floats" +) + +var printerHeadings = [...]string{ + "Iter", + "Runtime", + "FuncEvals", + "Func", + "GradEvals", + "|Gradient|∞", + "HessEvals", +} + +const ( + printerBaseTmpl = "%9v %16v %9v %22v" // Base template for headings and values that are always printed. + printerGradTmpl = " %9v %22v" // Appended to base template when loc.Gradient != nil. + printerHessTmpl = " %9v" // Appended to base template when loc.Hessian != nil. +) + +// Printer writes column-format output to the specified writer as the optimization +// progresses. By default, it writes to os.Stdout. +type Printer struct { + Writer io.Writer + HeadingInterval int + ValueInterval time.Duration + + lastHeading int + lastValue time.Time +} + +func NewPrinter() *Printer { + return &Printer{ + Writer: os.Stdout, + HeadingInterval: 30, + ValueInterval: 500 * time.Millisecond, + } +} + +func (p *Printer) Init() error { + p.lastHeading = p.HeadingInterval // So the headings are printed the first time. + p.lastValue = time.Now().Add(-p.ValueInterval) // So the values are printed the first time. + return nil +} + +func (p *Printer) Record(loc *Location, op Operation, stats *Stats) error { + if op != MajorIteration && op != InitIteration && op != PostIteration { + return nil + } + + // Print values always on PostIteration or when ValueInterval has elapsed. + printValues := time.Since(p.lastValue) > p.ValueInterval || op == PostIteration + if !printValues { + // Return early if not printing anything. + return nil + } + + // Print heading when HeadingInterval lines have been printed, but never on PostIteration. + printHeading := p.lastHeading >= p.HeadingInterval && op != PostIteration + if printHeading { + p.lastHeading = 1 + } else { + p.lastHeading++ + } + + if printHeading { + headings := "\n" + fmt.Sprintf(printerBaseTmpl, printerHeadings[0], printerHeadings[1], printerHeadings[2], printerHeadings[3]) + if loc.Gradient != nil { + headings += fmt.Sprintf(printerGradTmpl, printerHeadings[4], printerHeadings[5]) + } + if loc.Hessian != nil { + headings += fmt.Sprintf(printerHessTmpl, printerHeadings[6]) + } + _, err := fmt.Fprintln(p.Writer, headings) + if err != nil { + return err + } + } + + values := fmt.Sprintf(printerBaseTmpl, stats.MajorIterations, stats.Runtime, stats.FuncEvaluations, loc.F) + if loc.Gradient != nil { + values += fmt.Sprintf(printerGradTmpl, stats.GradEvaluations, floats.Norm(loc.Gradient, math.Inf(1))) + } + if loc.Hessian != nil { + values += fmt.Sprintf(printerHessTmpl, stats.HessEvaluations) + } + _, err := fmt.Fprintln(p.Writer, values) + if err != nil { + return err + } + + p.lastValue = time.Now() + return nil +} diff --git a/vendor/gonum.org/v1/gonum/optimize/stepsizers.go b/vendor/gonum.org/v1/gonum/optimize/stepsizers.go new file mode 100644 index 0000000..c6a58c0 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/optimize/stepsizers.go @@ -0,0 +1,185 @@ +// Copyright ©2014 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package optimize + +import ( + "math" + + "gonum.org/v1/gonum/floats" +) + +const ( + initialStepFactor = 1 + + quadraticMinimumStepSize = 1e-3 + quadraticMaximumStepSize = 1 + quadraticThreshold = 1e-12 + + firstOrderMinimumStepSize = quadraticMinimumStepSize + firstOrderMaximumStepSize = quadraticMaximumStepSize +) + +// ConstantStepSize is a StepSizer that returns the same step size for +// every iteration. +type ConstantStepSize struct { + Size float64 +} + +func (c ConstantStepSize) Init(_ *Location, _ []float64) float64 { + return c.Size +} + +func (c ConstantStepSize) StepSize(_ *Location, _ []float64) float64 { + return c.Size +} + +// QuadraticStepSize estimates the initial line search step size as the minimum +// of a quadratic that interpolates f(x_{k-1}), f(x_k) and ∇f_k⋅p_k. +// This is useful for line search methods that do not produce well-scaled +// descent directions, such as gradient descent or conjugate gradient methods. +// The step size is bounded away from zero. +type QuadraticStepSize struct { + // Threshold determines that the initial step size should be estimated by + // quadratic interpolation when the relative change in the objective + // function is larger than Threshold. Otherwise the initial step size is + // set to 2*previous step size. + // If Threshold is zero, it will be set to 1e-12. + Threshold float64 + // InitialStepFactor sets the step size for the first iteration to be InitialStepFactor / |g|_∞. + // If InitialStepFactor is zero, it will be set to one. + InitialStepFactor float64 + // MinStepSize is the lower bound on the estimated step size. + // MinStepSize times GradientAbsTol should always be greater than machine epsilon. + // If MinStepSize is zero, it will be set to 1e-3. + MinStepSize float64 + // MaxStepSize is the upper bound on the estimated step size. + // If MaxStepSize is zero, it will be set to 1. + MaxStepSize float64 + + fPrev float64 + dirPrevNorm float64 + projGradPrev float64 + xPrev []float64 +} + +func (q *QuadraticStepSize) Init(loc *Location, dir []float64) (stepSize float64) { + if q.Threshold == 0 { + q.Threshold = quadraticThreshold + } + if q.InitialStepFactor == 0 { + q.InitialStepFactor = initialStepFactor + } + if q.MinStepSize == 0 { + q.MinStepSize = quadraticMinimumStepSize + } + if q.MaxStepSize == 0 { + q.MaxStepSize = quadraticMaximumStepSize + } + if q.MaxStepSize <= q.MinStepSize { + panic("optimize: MinStepSize not smaller than MaxStepSize") + } + + gNorm := floats.Norm(loc.Gradient, math.Inf(1)) + stepSize = math.Max(q.MinStepSize, math.Min(q.InitialStepFactor/gNorm, q.MaxStepSize)) + + q.fPrev = loc.F + q.dirPrevNorm = floats.Norm(dir, 2) + q.projGradPrev = floats.Dot(loc.Gradient, dir) + q.xPrev = resize(q.xPrev, len(loc.X)) + copy(q.xPrev, loc.X) + return stepSize +} + +func (q *QuadraticStepSize) StepSize(loc *Location, dir []float64) (stepSize float64) { + stepSizePrev := floats.Distance(loc.X, q.xPrev, 2) / q.dirPrevNorm + projGrad := floats.Dot(loc.Gradient, dir) + + stepSize = 2 * stepSizePrev + if !floats.EqualWithinRel(q.fPrev, loc.F, q.Threshold) { + // Two consecutive function values are not relatively equal, so + // computing the minimum of a quadratic interpolant might make sense + + df := (loc.F - q.fPrev) / stepSizePrev + quadTest := df - q.projGradPrev + if quadTest > 0 { + // There is a chance of approximating the function well by a + // quadratic only if the finite difference (f_k-f_{k-1})/stepSizePrev + // is larger than ∇f_{k-1}⋅p_{k-1} + + // Set the step size to the minimizer of the quadratic function that + // interpolates f_{k-1}, ∇f_{k-1}⋅p_{k-1} and f_k + stepSize = -q.projGradPrev * stepSizePrev / quadTest / 2 + } + } + // Bound the step size to lie in [MinStepSize, MaxStepSize] + stepSize = math.Max(q.MinStepSize, math.Min(stepSize, q.MaxStepSize)) + + q.fPrev = loc.F + q.dirPrevNorm = floats.Norm(dir, 2) + q.projGradPrev = projGrad + copy(q.xPrev, loc.X) + return stepSize +} + +// FirstOrderStepSize estimates the initial line search step size based on the +// assumption that the first-order change in the function will be the same as +// that obtained at the previous iteration. That is, the initial step size s^0_k +// is chosen so that +// s^0_k ∇f_k⋅p_k = s_{k-1} ∇f_{k-1}⋅p_{k-1} +// This is useful for line search methods that do not produce well-scaled +// descent directions, such as gradient descent or conjugate gradient methods. +type FirstOrderStepSize struct { + // InitialStepFactor sets the step size for the first iteration to be InitialStepFactor / |g|_∞. + // If InitialStepFactor is zero, it will be set to one. + InitialStepFactor float64 + // MinStepSize is the lower bound on the estimated step size. + // MinStepSize times GradientAbsTol should always be greater than machine epsilon. + // If MinStepSize is zero, it will be set to 1e-3. + MinStepSize float64 + // MaxStepSize is the upper bound on the estimated step size. + // If MaxStepSize is zero, it will be set to 1. + MaxStepSize float64 + + dirPrevNorm float64 + projGradPrev float64 + xPrev []float64 +} + +func (fo *FirstOrderStepSize) Init(loc *Location, dir []float64) (stepSize float64) { + if fo.InitialStepFactor == 0 { + fo.InitialStepFactor = initialStepFactor + } + if fo.MinStepSize == 0 { + fo.MinStepSize = firstOrderMinimumStepSize + } + if fo.MaxStepSize == 0 { + fo.MaxStepSize = firstOrderMaximumStepSize + } + if fo.MaxStepSize <= fo.MinStepSize { + panic("optimize: MinStepSize not smaller than MaxStepSize") + } + + gNorm := floats.Norm(loc.Gradient, math.Inf(1)) + stepSize = math.Max(fo.MinStepSize, math.Min(fo.InitialStepFactor/gNorm, fo.MaxStepSize)) + + fo.dirPrevNorm = floats.Norm(dir, 2) + fo.projGradPrev = floats.Dot(loc.Gradient, dir) + fo.xPrev = resize(fo.xPrev, len(loc.X)) + copy(fo.xPrev, loc.X) + return stepSize +} + +func (fo *FirstOrderStepSize) StepSize(loc *Location, dir []float64) (stepSize float64) { + stepSizePrev := floats.Distance(loc.X, fo.xPrev, 2) / fo.dirPrevNorm + projGrad := floats.Dot(loc.Gradient, dir) + + stepSize = stepSizePrev * fo.projGradPrev / projGrad + stepSize = math.Max(fo.MinStepSize, math.Min(stepSize, fo.MaxStepSize)) + + fo.dirPrevNorm = floats.Norm(dir, 2) + fo.projGradPrev = floats.Dot(loc.Gradient, dir) + copy(fo.xPrev, loc.X) + return stepSize +} diff --git a/vendor/gonum.org/v1/gonum/optimize/termination.go b/vendor/gonum.org/v1/gonum/optimize/termination.go new file mode 100644 index 0000000..df4bdb7 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/optimize/termination.go @@ -0,0 +1,123 @@ +// Copyright ©2014 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package optimize + +import "errors" + +// Status represents the status of the optimization. Programs +// should not rely on the underlying numeric value of the Status being constant. +type Status int + +const ( + NotTerminated Status = iota + Success + FunctionThreshold + FunctionConvergence + GradientThreshold + StepConvergence + FunctionNegativeInfinity + MethodConverge + Failure + IterationLimit + RuntimeLimit + FunctionEvaluationLimit + GradientEvaluationLimit + HessianEvaluationLimit +) + +func (s Status) String() string { + return statuses[s].name +} + +// Early returns true if the status indicates the optimization ended before a +// minimum was found. As an example, if the maximum iterations was reached, a +// minimum was not found, but if the gradient norm was reached then a minimum +// was found. +func (s Status) Early() bool { + return statuses[s].early +} + +// Err returns the error associated with an early ending to the minimization. If +// Early returns false, Err will return nil. +func (s Status) Err() error { + return statuses[s].err +} + +var statuses = []struct { + name string + early bool + err error +}{ + { + name: "NotTerminated", + }, + { + name: "Success", + }, + { + name: "FunctionThreshold", + }, + { + name: "FunctionConvergence", + }, + { + name: "GradientThreshold", + }, + { + name: "StepConvergence", + }, + { + name: "FunctionNegativeInfinity", + }, + { + name: "MethodConverge", + }, + { + name: "Failure", + early: true, + err: errors.New("optimize: termination ended in failure"), + }, + { + name: "IterationLimit", + early: true, + err: errors.New("optimize: maximum number of major iterations reached"), + }, + { + name: "RuntimeLimit", + early: true, + err: errors.New("optimize: maximum runtime reached"), + }, + { + name: "FunctionEvaluationLimit", + early: true, + err: errors.New("optimize: maximum number of function evaluations reached"), + }, + { + name: "GradientEvaluationLimit", + early: true, + err: errors.New("optimize: maximum number of gradient evaluations reached"), + }, + { + name: "HessianEvaluationLimit", + early: true, + err: errors.New("optimize: maximum number of Hessian evaluations reached"), + }, +} + +// NewStatus returns a unique Status variable to represent a custom status. +// NewStatus is intended to be called only during package initialization, and +// calls to NewStatus are not thread safe. +// +// NewStatus takes in three arguments, the string that should be output from +// Status.String, a boolean if the status indicates early optimization conclusion, +// and the error to return from Err (if any). +func NewStatus(name string, early bool, err error) Status { + statuses = append(statuses, struct { + name string + early bool + err error + }{name, early, err}) + return Status(len(statuses) - 1) +} diff --git a/vendor/gonum.org/v1/gonum/optimize/types.go b/vendor/gonum.org/v1/gonum/optimize/types.go new file mode 100644 index 0000000..e3172c1 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/optimize/types.go @@ -0,0 +1,273 @@ +// Copyright ©2014 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package optimize + +import ( + "fmt" + "time" + + "gonum.org/v1/gonum/mat" +) + +const defaultGradientAbsTol = 1e-12 + +// Operation represents the set of operations commanded by Method at each +// iteration. It is a bitmap of various Iteration and Evaluation constants. +// Individual constants must NOT be combined together by the binary OR operator +// except for the Evaluation operations. +type Operation uint64 + +// Supported Operations. +const ( + // NoOperation specifies that no evaluation or convergence check should + // take place. + NoOperation Operation = 0 + // InitIteration is sent to Recorder to indicate the initial location. + // All fields of the location to record must be valid. + // Method must not return it. + InitIteration Operation = 1 << (iota - 1) + // PostIteration is sent to Recorder to indicate the final location + // reached during an optimization run. + // All fields of the location to record must be valid. + // Method must not return it. + PostIteration + // MajorIteration indicates that the next candidate location for + // an optimum has been found and convergence should be checked. + MajorIteration + // MethodDone declares that the method is done running. A method must + // be a Statuser in order to use this iteration, and after returning + // MethodDone, the Status must return other than NotTerminated. + MethodDone + // FuncEvaluation specifies that the objective function + // should be evaluated. + FuncEvaluation + // GradEvaluation specifies that the gradient + // of the objective function should be evaluated. + GradEvaluation + // HessEvaluation specifies that the Hessian + // of the objective function should be evaluated. + HessEvaluation + // signalDone is used internally to signal completion. + signalDone + + // Mask for the evaluating operations. + evalMask = FuncEvaluation | GradEvaluation | HessEvaluation +) + +func (op Operation) isEvaluation() bool { + return op&evalMask != 0 && op&^evalMask == 0 +} + +func (op Operation) String() string { + if op&evalMask != 0 { + return fmt.Sprintf("Evaluation(Func: %t, Grad: %t, Hess: %t, Extra: 0b%b)", + op&FuncEvaluation != 0, + op&GradEvaluation != 0, + op&HessEvaluation != 0, + op&^(evalMask)) + } + s, ok := operationNames[op] + if ok { + return s + } + return fmt.Sprintf("Operation(%d)", op) +} + +var operationNames = map[Operation]string{ + NoOperation: "NoOperation", + InitIteration: "InitIteration", + MajorIteration: "MajorIteration", + PostIteration: "PostIteration", + MethodDone: "MethodDone", + signalDone: "signalDone", +} + +// Result represents the answer of an optimization run. It contains the optimum +// function value, X location, and gradient as well as the Status at convergence +// and Statistics taken during the run. +type Result struct { + Location + Stats + Status Status +} + +// Stats contains the statistics of the run. +type Stats struct { + MajorIterations int // Total number of major iterations + FuncEvaluations int // Number of evaluations of Func + GradEvaluations int // Number of evaluations of Grad + HessEvaluations int // Number of evaluations of Hess + Runtime time.Duration // Total runtime of the optimization +} + +// complementEval returns an evaluating operation that evaluates fields of loc +// not evaluated by eval. +func complementEval(loc *Location, eval Operation) (complEval Operation) { + if eval&FuncEvaluation == 0 { + complEval = FuncEvaluation + } + if loc.Gradient != nil && eval&GradEvaluation == 0 { + complEval |= GradEvaluation + } + if loc.Hessian != nil && eval&HessEvaluation == 0 { + complEval |= HessEvaluation + } + return complEval +} + +// Problem describes the optimization problem to be solved. +type Problem struct { + // Func evaluates the objective function at the given location. Func + // must not modify x. + Func func(x []float64) float64 + + // Grad evaluates the gradient at x and stores the result in grad which will + // be the same length as x. Grad must not modify x. + Grad func(grad, x []float64) + + // Hess evaluates the Hessian at x and stores the result in-place in hess which + // will have dimensions matching the length of x. Hess must not modify x. + Hess func(hess *mat.SymDense, x []float64) + + // Status reports the status of the objective function being optimized and any + // error. This can be used to terminate early, for example when the function is + // not able to evaluate itself. The user can use one of the pre-provided Status + // constants, or may call NewStatus to create a custom Status value. + Status func() (Status, error) +} + +// Available describes the functions available to call in Problem. +type Available struct { + Grad bool + Hess bool +} + +func availFromProblem(prob Problem) Available { + return Available{Grad: prob.Grad != nil, Hess: prob.Hess != nil} +} + +// function tests if the Problem described by the receiver is suitable for an +// unconstrained Method that only calls the function, and returns the result. +func (has Available) function() (uses Available, err error) { + // TODO(btracey): This needs to be modified when optimize supports + // constrained optimization. + return Available{}, nil +} + +// gradient tests if the Problem described by the receiver is suitable for an +// unconstrained gradient-based Method, and returns the result. +func (has Available) gradient() (uses Available, err error) { + // TODO(btracey): This needs to be modified when optimize supports + // constrained optimization. + if !has.Grad { + return Available{}, ErrMissingGrad + } + return Available{Grad: true}, nil +} + +// hessian tests if the Problem described by the receiver is suitable for an +// unconstrained Hessian-based Method, and returns the result. +func (has Available) hessian() (uses Available, err error) { + // TODO(btracey): This needs to be modified when optimize supports + // constrained optimization. + if !has.Grad { + return Available{}, ErrMissingGrad + } + if !has.Hess { + return Available{}, ErrMissingHess + } + return Available{Grad: true, Hess: true}, nil +} + +// Settings represents settings of the optimization run. It contains initial +// settings, convergence information, and Recorder information. Convergence +// settings are only checked at MajorIterations, while Evaluation thresholds +// are checked at every Operation. See the field comments for default values. +type Settings struct { + // InitValues specifies properties (function value, gradient, etc.) known + // at the initial location passed to Minimize. If InitValues is non-nil, then + // the function value F must be provided, the location X must not be specified + // and other fields may be specified. The values in Location may be modified + // during the call to Minimize. + InitValues *Location + + // GradientThreshold stops optimization with GradientThreshold status if the + // infinity norm of the gradient is less than this value. This defaults to + // a value of 0 (and so gradient convergence is not checked), however note + // that many Methods (LBFGS, CG, etc.) will converge with a small value of + // the gradient, and so to fully disable this setting the Method may need to + // be modified. + // This setting has no effect if the gradient is not used by the Method. + GradientThreshold float64 + + // Converger checks if the optimization has converged based on the (history + // of) locations found during the optimization. Minimize will pass the + // Location at every MajorIteration to the Converger. + // + // If the Converger is nil, a default value of + // FunctionConverge { + // Absolute: 1e-10, + // Iterations: 100, + // } + // will be used. NeverTerminated can be used to always return a + // NotTerminated status. + Converger Converger + + // MajorIterations is the maximum number of iterations allowed. + // IterationLimit status is returned if the number of major iterations + // equals or exceeds this value. + // If it equals zero, this setting has no effect. + // The default value is 0. + MajorIterations int + + // Runtime is the maximum runtime allowed. RuntimeLimit status is returned + // if the duration of the run is longer than this value. Runtime is only + // checked at MajorIterations of the Method. + // If it equals zero, this setting has no effect. + // The default value is 0. + Runtime time.Duration + + // FuncEvaluations is the maximum allowed number of function evaluations. + // FunctionEvaluationLimit status is returned if the total number of calls + // to Func equals or exceeds this number. + // If it equals zero, this setting has no effect. + // The default value is 0. + FuncEvaluations int + + // GradEvaluations is the maximum allowed number of gradient evaluations. + // GradientEvaluationLimit status is returned if the total number of calls + // to Grad equals or exceeds this number. + // If it equals zero, this setting has no effect. + // The default value is 0. + GradEvaluations int + + // HessEvaluations is the maximum allowed number of Hessian evaluations. + // HessianEvaluationLimit status is returned if the total number of calls + // to Hess equals or exceeds this number. + // If it equals zero, this setting has no effect. + // The default value is 0. + HessEvaluations int + + Recorder Recorder + + // Concurrent represents how many concurrent evaluations are possible. + Concurrent int +} + +// resize takes x and returns a slice of length dim. It returns a resliced x +// if cap(x) >= dim, and a new slice otherwise. +func resize(x []float64, dim int) []float64 { + if dim > cap(x) { + return make([]float64, dim) + } + return x[:dim] +} + +func resizeSymDense(m *mat.SymDense, dim int) *mat.SymDense { + if m == nil || cap(m.RawSymmetric().Data) < dim*dim { + return mat.NewSymDense(dim, nil) + } + return mat.NewSymDense(dim, m.RawSymmetric().Data[:dim*dim]) +} diff --git a/vendor/gonum.org/v1/gonum/spatial/barneshut/barneshut2.go b/vendor/gonum.org/v1/gonum/spatial/barneshut/barneshut2.go new file mode 100644 index 0000000..e229b65 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/spatial/barneshut/barneshut2.go @@ -0,0 +1,249 @@ +// Copyright ©2019 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package barneshut + +import ( + "fmt" + "math" + + "gonum.org/v1/gonum/spatial/r2" +) + +// Particle2 is a particle in a plane. +type Particle2 interface { + Coord2() r2.Vec + Mass() float64 +} + +// Force2 is a force modeling function for interactions between p1 and p2, +// m1 is the mass of p1 and m2 of p2. The vector v is the vector from p1 to +// p2. The returned value is the force vector acting on p1. +// +// In models where the identity of particles must be known, p1 and p2 may be +// compared. Force2 may be passed nil for p2 when the Barnes-Hut approximation +// is being used. A nil p2 indicates that the second mass center is an +// aggregate. +type Force2 func(p1, p2 Particle2, m1, m2 float64, v r2.Vec) r2.Vec + +// Gravity2 returns a vector force on m1 by m2, equal to (m1⋅m2)/‖v‖² +// in the directions of v. Gravity2 ignores the identity of the interacting +// particles and returns a zero vector when the two particles are +// coincident, but performs no other sanity checks. +func Gravity2(_, _ Particle2, m1, m2 float64, v r2.Vec) r2.Vec { + d2 := v.X*v.X + v.Y*v.Y + if d2 == 0 { + return r2.Vec{} + } + return v.Scale((m1 * m2) / (d2 * math.Sqrt(d2))) +} + +// Plane implements Barnes-Hut force approximation calculations. +type Plane struct { + root tile + + Particles []Particle2 +} + +// NewPlane returns a new Plane. +func NewPlane(p []Particle2) *Plane { + q := Plane{Particles: p} + q.Reset() + return &q +} + +// Reset reconstructs the Barnes-Hut tree. Reset must be called if the +// Particles field or elements of Particles have been altered, unless +// ForceOn is called with theta=0 or no data structures have been +// previously built. +func (q *Plane) Reset() { + if len(q.Particles) == 0 { + q.root = tile{} + return + } + + q.root = tile{ + particle: q.Particles[0], + center: q.Particles[0].Coord2(), + mass: q.Particles[0].Mass(), + } + q.root.bounds.Min = q.root.center + q.root.bounds.Max = q.root.center + for _, e := range q.Particles[1:] { + c := e.Coord2() + if c.X < q.root.bounds.Min.X { + q.root.bounds.Min.X = c.X + } + if c.X > q.root.bounds.Max.X { + q.root.bounds.Max.X = c.X + } + if c.Y < q.root.bounds.Min.Y { + q.root.bounds.Min.Y = c.Y + } + if c.Y > q.root.bounds.Max.Y { + q.root.bounds.Max.Y = c.Y + } + } + + // TODO(kortschak): Partially parallelise this by + // choosing the direction and using one of four + // goroutines to work on each root quadrant. + for _, e := range q.Particles[1:] { + q.root.insert(e) + } + q.root.summarize() +} + +// ForceOn returns a force vector on p given p's mass and the force function, f, +// using the Barnes-Hut theta approximation parameter. +// +// Calls to f will include p in the p1 position and a non-nil p2 if the force +// interaction is with a non-aggregate mass center, otherwise p2 will be nil. +// +// It is safe to call ForceOn concurrently. +func (q *Plane) ForceOn(p Particle2, theta float64, f Force2) (force r2.Vec) { + var empty tile + if theta > 0 && q.root != empty { + return q.root.forceOn(p, p.Coord2(), p.Mass(), theta, f) + } + + // For the degenerate case, just iterate over the + // slice of particles rather than walking the tree. + var v r2.Vec + m := p.Mass() + pv := p.Coord2() + for _, e := range q.Particles { + v = v.Add(f(p, e, m, e.Mass(), e.Coord2().Sub(pv))) + } + return v +} + +// tile is a quad tree quadrant with Barnes-Hut extensions. +type tile struct { + particle Particle2 + + bounds r2.Box + + nodes [4]*tile + + center r2.Vec + mass float64 +} + +// insert inserts p into the subtree rooted at t. +func (t *tile) insert(p Particle2) { + if t.particle == nil { + for _, q := range t.nodes { + if q != nil { + t.passDown(p) + return + } + } + t.particle = p + t.center = p.Coord2() + t.mass = p.Mass() + return + } + t.passDown(p) + t.passDown(t.particle) + t.particle = nil + t.center = r2.Vec{} + t.mass = 0 +} + +func (t *tile) passDown(p Particle2) { + dir := quadrantOf(t.bounds, p) + if t.nodes[dir] == nil { + t.nodes[dir] = &tile{bounds: splitPlane(t.bounds, dir)} + } + t.nodes[dir].insert(p) +} + +const ( + ne = iota + se + sw + nw +) + +// quadrantOf returns which quadrant of b that p should be placed in. +func quadrantOf(b r2.Box, p Particle2) int { + center := r2.Vec{ + X: (b.Min.X + b.Max.X) / 2, + Y: (b.Min.Y + b.Max.Y) / 2, + } + c := p.Coord2() + if checkBounds && (c.X < b.Min.X || b.Max.X < c.X || c.Y < b.Min.Y || b.Max.Y < c.Y) { + panic(fmt.Sprintf("p out of range %+v: %#v", b, p)) + } + if c.X < center.X { + if c.Y < center.Y { + return nw + } else { + return sw + } + } else { + if c.Y < center.Y { + return ne + } else { + return se + } + } +} + +// splitPlane returns a quadrant subdivision of b in the given direction. +func splitPlane(b r2.Box, dir int) r2.Box { + halfX := (b.Max.X - b.Min.X) / 2 + halfY := (b.Max.Y - b.Min.Y) / 2 + switch dir { + case ne: + b.Min.X += halfX + b.Max.Y -= halfY + case se: + b.Min.X += halfX + b.Min.Y += halfY + case sw: + b.Max.X -= halfX + b.Min.Y += halfY + case nw: + b.Max.X -= halfX + b.Max.Y -= halfY + } + return b +} + +// summarize updates node masses and centers of mass. +func (t *tile) summarize() (center r2.Vec, mass float64) { + for _, d := range &t.nodes { + if d == nil { + continue + } + c, m := d.summarize() + t.center.X += c.X * m + t.center.Y += c.Y * m + t.mass += m + } + t.center.X /= t.mass + t.center.Y /= t.mass + return t.center, t.mass +} + +// forceOn returns a force vector on p given p's mass m and the force +// calculation function, using the Barnes-Hut theta approximation parameter. +func (t *tile) forceOn(p Particle2, pt r2.Vec, m, theta float64, f Force2) (vector r2.Vec) { + s := ((t.bounds.Max.X - t.bounds.Min.X) + (t.bounds.Max.Y - t.bounds.Min.Y)) / 2 + d := math.Hypot(pt.X-t.center.X, pt.Y-t.center.Y) + if s/d < theta || t.particle != nil { + return f(p, t.particle, m, t.mass, t.center.Sub(pt)) + } + + var v r2.Vec + for _, d := range &t.nodes { + if d == nil { + continue + } + v = v.Add(d.forceOn(p, pt, m, theta, f)) + } + return v +} diff --git a/vendor/gonum.org/v1/gonum/spatial/barneshut/barneshut3.go b/vendor/gonum.org/v1/gonum/spatial/barneshut/barneshut3.go new file mode 100644 index 0000000..c19cf77 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/spatial/barneshut/barneshut3.go @@ -0,0 +1,300 @@ +// Copyright ©2019 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package barneshut + +import ( + "fmt" + "math" + + "gonum.org/v1/gonum/spatial/r3" +) + +// Particle3 is a particle in a volume. +type Particle3 interface { + Coord3() r3.Vec + Mass() float64 +} + +// Force3 is a force modeling function for interactions between p1 and p2, +// m1 is the mass of p1 and m2 of p2. The vector v is the vector from p1 to +// p2. The returned value is the force vector acting on p1. +// +// In models where the identity of particles must be known, p1 and p2 may be +// compared. Force3 may be passed nil for p2 when the Barnes-Hut approximation +// is being used. A nil p2 indicates that the second mass center is an +// aggregate. +type Force3 func(p1, p2 Particle3, m1, m2 float64, v r3.Vec) r3.Vec + +// Gravity3 returns a vector force on m1 by m2, equal to (m1⋅m2)/‖v‖² +// in the directions of v. Gravity3 ignores the identity of the interacting +// particles and returns a zero vector when the two particles are +// coincident, but performs no other sanity checks. +func Gravity3(_, _ Particle3, m1, m2 float64, v r3.Vec) r3.Vec { + d2 := v.X*v.X + v.Y*v.Y + v.Z*v.Z + if d2 == 0 { + return r3.Vec{} + } + return v.Scale((m1 * m2) / (d2 * math.Sqrt(d2))) +} + +// Volume implements Barnes-Hut force approximation calculations. +type Volume struct { + root bucket + + Particles []Particle3 +} + +// NewVolume returns a new Volume. +func NewVolume(p []Particle3) *Volume { + q := Volume{Particles: p} + q.Reset() + return &q +} + +// Reset reconstructs the Barnes-Hut tree. Reset must be called if the +// Particles field or elements of Particles have been altered, unless +// ForceOn is called with theta=0 or no data structures have been +// previously built. +func (q *Volume) Reset() { + if len(q.Particles) == 0 { + q.root = bucket{} + return + } + + q.root = bucket{ + particle: q.Particles[0], + center: q.Particles[0].Coord3(), + mass: q.Particles[0].Mass(), + } + q.root.bounds.Min = q.root.center + q.root.bounds.Max = q.root.center + for _, e := range q.Particles[1:] { + c := e.Coord3() + if c.X < q.root.bounds.Min.X { + q.root.bounds.Min.X = c.X + } + if c.X > q.root.bounds.Max.X { + q.root.bounds.Max.X = c.X + } + if c.Y < q.root.bounds.Min.Y { + q.root.bounds.Min.Y = c.Y + } + if c.Y > q.root.bounds.Max.Y { + q.root.bounds.Max.Y = c.Y + } + if c.Z < q.root.bounds.Min.Z { + q.root.bounds.Min.Z = c.Z + } + if c.Z > q.root.bounds.Max.Z { + q.root.bounds.Max.Z = c.Z + } + } + + // TODO(kortschak): Partially parallelise this by + // choosing the direction and using one of eight + // goroutines to work on each root octant. + for _, e := range q.Particles[1:] { + q.root.insert(e) + } + q.root.summarize() +} + +// ForceOn returns a force vector on p given p's mass and the force function, f, +// using the Barnes-Hut theta approximation parameter. +// +// Calls to f will include p in the p1 position and a non-nil p2 if the force +// interaction is with a non-aggregate mass center, otherwise p2 will be nil. +// +// It is safe to call ForceOn concurrently. +func (q *Volume) ForceOn(p Particle3, theta float64, f Force3) (force r3.Vec) { + var empty bucket + if theta > 0 && q.root != empty { + return q.root.forceOn(p, p.Coord3(), p.Mass(), theta, f) + } + + // For the degenerate case, just iterate over the + // slice of particles rather than walking the tree. + var v r3.Vec + m := p.Mass() + pv := p.Coord3() + for _, e := range q.Particles { + v = v.Add(f(p, e, m, e.Mass(), e.Coord3().Sub(pv))) + } + return v +} + +// bucket is an oct tree octant with Barnes-Hut extensions. +type bucket struct { + particle Particle3 + + bounds r3.Box + + nodes [8]*bucket + + center r3.Vec + mass float64 +} + +// insert inserts p into the subtree rooted at b. +func (b *bucket) insert(p Particle3) { + if b.particle == nil { + for _, q := range b.nodes { + if q != nil { + b.passDown(p) + return + } + } + b.particle = p + b.center = p.Coord3() + b.mass = p.Mass() + return + } + + b.passDown(p) + b.passDown(b.particle) + b.particle = nil + b.center = r3.Vec{} + b.mass = 0 +} + +func (b *bucket) passDown(p Particle3) { + dir := octantOf(b.bounds, p) + if b.nodes[dir] == nil { + b.nodes[dir] = &bucket{bounds: splitVolume(b.bounds, dir)} + } + b.nodes[dir].insert(p) +} + +const ( + lne = iota + lse + lsw + lnw + une + use + usw + unw +) + +// octantOf returns which octant of b that p should be placed in. +func octantOf(b r3.Box, p Particle3) int { + center := r3.Vec{ + X: (b.Min.X + b.Max.X) / 2, + Y: (b.Min.Y + b.Max.Y) / 2, + Z: (b.Min.Z + b.Max.Z) / 2, + } + c := p.Coord3() + if checkBounds && (c.X < b.Min.X || b.Max.X < c.X || c.Y < b.Min.Y || b.Max.Y < c.Y || c.Z < b.Min.Z || b.Max.Z < c.Z) { + panic(fmt.Sprintf("p out of range %+v: %#v", b, p)) + } + if c.X < center.X { + if c.Y < center.Y { + if c.Z < center.Z { + return lnw + } else { + return unw + } + } else { + if c.Z < center.Z { + return lsw + } else { + return usw + } + } + } else { + if c.Y < center.Y { + if c.Z < center.Z { + return lne + } else { + return une + } + } else { + if c.Z < center.Z { + return lse + } else { + return use + } + } + } +} + +// splitVolume returns an octant subdivision of b in the given direction. +func splitVolume(b r3.Box, dir int) r3.Box { + halfX := (b.Max.X - b.Min.X) / 2 + halfY := (b.Max.Y - b.Min.Y) / 2 + halfZ := (b.Max.Z - b.Min.Z) / 2 + switch dir { + case lne: + b.Min.X += halfX + b.Max.Y -= halfY + b.Max.Z -= halfZ + case lse: + b.Min.X += halfX + b.Min.Y += halfY + b.Max.Z -= halfZ + case lsw: + b.Max.X -= halfX + b.Min.Y += halfY + b.Max.Z -= halfZ + case lnw: + b.Max.X -= halfX + b.Max.Y -= halfY + b.Max.Z -= halfZ + case une: + b.Min.X += halfX + b.Max.Y -= halfY + b.Min.Z += halfZ + case use: + b.Min.X += halfX + b.Min.Y += halfY + b.Min.Z += halfZ + case usw: + b.Max.X -= halfX + b.Min.Y += halfY + b.Min.Z += halfZ + case unw: + b.Max.X -= halfX + b.Max.Y -= halfY + b.Min.Z += halfZ + } + return b +} + +// summarize updates node masses and centers of mass. +func (b *bucket) summarize() (center r3.Vec, mass float64) { + for _, d := range &b.nodes { + if d == nil { + continue + } + c, m := d.summarize() + b.center.X += c.X * m + b.center.Y += c.Y * m + b.center.Z += c.Z * m + b.mass += m + } + b.center.X /= b.mass + b.center.Y /= b.mass + b.center.Z /= b.mass + return b.center, b.mass +} + +// forceOn returns a force vector on p given p's mass m and the force +// calculation function, using the Barnes-Hut theta approximation parameter. +func (b *bucket) forceOn(p Particle3, pt r3.Vec, m, theta float64, f Force3) (vector r3.Vec) { + s := ((b.bounds.Max.X - b.bounds.Min.X) + (b.bounds.Max.Y - b.bounds.Min.Y) + (b.bounds.Max.Z - b.bounds.Min.Z)) / 3 + d := math.Hypot(math.Hypot(pt.X-b.center.X, pt.Y-b.center.Y), pt.Z-b.center.Z) + if s/d < theta || b.particle != nil { + return f(p, b.particle, m, b.mass, b.center.Sub(pt)) + } + + var v r3.Vec + for _, d := range &b.nodes { + if d == nil { + continue + } + v = v.Add(d.forceOn(p, pt, m, theta, f)) + } + return v +} diff --git a/vendor/gonum.org/v1/gonum/spatial/barneshut/bounds.go b/vendor/gonum.org/v1/gonum/spatial/barneshut/bounds.go new file mode 100644 index 0000000..841807d --- /dev/null +++ b/vendor/gonum.org/v1/gonum/spatial/barneshut/bounds.go @@ -0,0 +1,9 @@ +// Copyright ©2019 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// +build bounds + +package barneshut + +const checkBounds = true diff --git a/vendor/gonum.org/v1/gonum/spatial/barneshut/doc.go b/vendor/gonum.org/v1/gonum/spatial/barneshut/doc.go new file mode 100644 index 0000000..14aab74 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/spatial/barneshut/doc.go @@ -0,0 +1,10 @@ +// Copyright ©2019 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Package barneshut provides routines for calculating n-body force approximations +// using the Barnes-Hut algorithm. +// +// See https://en.wikipedia.org/wiki/Barnes–Hut_simulation, http://arborjs.org/docs/barnes-hut +// and https://jheer.github.io/barnes-hut/ for details. +package barneshut // import "gonum.org/v1/gonum/spatial/barneshut" diff --git a/vendor/gonum.org/v1/gonum/spatial/barneshut/no_bounds.go b/vendor/gonum.org/v1/gonum/spatial/barneshut/no_bounds.go new file mode 100644 index 0000000..03925df --- /dev/null +++ b/vendor/gonum.org/v1/gonum/spatial/barneshut/no_bounds.go @@ -0,0 +1,9 @@ +// Copyright ©2019 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// +build !bounds + +package barneshut + +const checkBounds = false diff --git a/vendor/gonum.org/v1/gonum/spatial/kdtree/doc.go b/vendor/gonum.org/v1/gonum/spatial/kdtree/doc.go new file mode 100644 index 0000000..667522c --- /dev/null +++ b/vendor/gonum.org/v1/gonum/spatial/kdtree/doc.go @@ -0,0 +1,8 @@ +// Copyright ©2019 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Package kdtree implements a k-d tree. +// +// See https://en.wikipedia.org/wiki/K-d_tree for more details of k-d tree functionality. +package kdtree // import "gonum.org/v1/gonum/spatial/kdtree" diff --git a/vendor/gonum.org/v1/gonum/spatial/kdtree/kdtree.go b/vendor/gonum.org/v1/gonum/spatial/kdtree/kdtree.go new file mode 100644 index 0000000..1bcfe1a --- /dev/null +++ b/vendor/gonum.org/v1/gonum/spatial/kdtree/kdtree.go @@ -0,0 +1,467 @@ +// Copyright ©2019 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package kdtree + +import ( + "container/heap" + "fmt" + "math" + "sort" +) + +// Interface is the set of methods required for construction of efficiently +// searchable k-d trees. A k-d tree may be constructed without using the +// Interface type, but it is likely to have reduced search performance. +type Interface interface { + // Index returns the ith element of the list of points. + Index(i int) Comparable + + // Len returns the length of the list. + Len() int + + // Pivot partitions the list based on the dimension specified. + Pivot(Dim) int + + // Slice returns a slice of the list using zero-based half + // open indexing equivalent to built-in slice indexing. + Slice(start, end int) Interface +} + +// Bounder returns a bounding volume containing the list of points. Bounds may return nil. +type Bounder interface { + Bounds() *Bounding +} + +type bounder interface { + Interface + Bounder +} + +// Dim is an index into a point's coordinates. +type Dim int + +// Comparable is the element interface for values stored in a k-d tree. +type Comparable interface { + // Compare returns the signed distance of a from the plane passing through + // b and perpendicular to the dimension d. + // + // Given c = a.Compare(b, d): + // c = a_d - b_d + // + Compare(Comparable, Dim) float64 + + // Dims returns the number of dimensions described in the Comparable. + Dims() int + + // Distance returns the squared Euclidean distance between the receiver and + // the parameter. + Distance(Comparable) float64 +} + +// Extender is a Comparable that can increase a bounding volume to include the +// point represented by the Comparable. +type Extender interface { + Comparable + + // Extend returns a bounding box that has been extended to include the + // receiver. Extend may return nil. + Extend(*Bounding) *Bounding +} + +// Bounding represents a volume bounding box. +type Bounding struct { + Min, Max Comparable +} + +// Contains returns whether c is within the volume of the Bounding. A nil Bounding +// returns true. +func (b *Bounding) Contains(c Comparable) bool { + if b == nil { + return true + } + for d := Dim(0); d < Dim(c.Dims()); d++ { + if c.Compare(b.Min, d) < 0 || 0 < c.Compare(b.Max, d) { + return false + } + } + return true +} + +// Node holds a single point value in a k-d tree. +type Node struct { + Point Comparable + Plane Dim + Left, Right *Node + *Bounding +} + +func (n *Node) String() string { + if n == nil { + return "" + } + return fmt.Sprintf("%.3f %d", n.Point, n.Plane) +} + +// Tree implements a k-d tree creation and nearest neighbor search. +type Tree struct { + Root *Node + Count int +} + +// New returns a k-d tree constructed from the values in p. If p is a Bounder and +// bounding is true, bounds are determined for each node. +// The ordering of elements in p may be altered after New returns. +func New(p Interface, bounding bool) *Tree { + if p, ok := p.(bounder); ok && bounding { + return &Tree{ + Root: buildBounded(p, 0, bounding), + Count: p.Len(), + } + } + return &Tree{ + Root: build(p, 0), + Count: p.Len(), + } +} + +func build(p Interface, plane Dim) *Node { + if p.Len() == 0 { + return nil + } + + piv := p.Pivot(plane) + d := p.Index(piv) + np := (plane + 1) % Dim(d.Dims()) + + return &Node{ + Point: d, + Plane: plane, + Left: build(p.Slice(0, piv), np), + Right: build(p.Slice(piv+1, p.Len()), np), + Bounding: nil, + } +} + +func buildBounded(p bounder, plane Dim, bounding bool) *Node { + if p.Len() == 0 { + return nil + } + + piv := p.Pivot(plane) + d := p.Index(piv) + np := (plane + 1) % Dim(d.Dims()) + + b := p.Bounds() + return &Node{ + Point: d, + Plane: plane, + Left: buildBounded(p.Slice(0, piv).(bounder), np, bounding), + Right: buildBounded(p.Slice(piv+1, p.Len()).(bounder), np, bounding), + Bounding: b, + } +} + +// Insert adds a point to the tree, updating the bounding volumes if bounding is +// true, and the tree is empty or the tree already has bounding volumes stored, +// and c is an Extender. No rebalancing of the tree is performed. +func (t *Tree) Insert(c Comparable, bounding bool) { + t.Count++ + if t.Root != nil { + bounding = t.Root.Bounding != nil + } + if c, ok := c.(Extender); ok && bounding { + t.Root = t.Root.insertBounded(c, 0, bounding) + return + } else if !ok && t.Root != nil { + // If we are not rebounding, mark the tree as non-bounded. + t.Root.Bounding = nil + } + t.Root = t.Root.insert(c, 0) +} + +func (n *Node) insert(c Comparable, d Dim) *Node { + if n == nil { + return &Node{ + Point: c, + Plane: d, + Bounding: nil, + } + } + + d = (n.Plane + 1) % Dim(c.Dims()) + if c.Compare(n.Point, n.Plane) <= 0 { + n.Left = n.Left.insert(c, d) + } else { + n.Right = n.Right.insert(c, d) + } + + return n +} + +func (n *Node) insertBounded(c Extender, d Dim, bounding bool) *Node { + if n == nil { + var b *Bounding + if bounding { + b = c.Extend(b) + } + return &Node{ + Point: c, + Plane: d, + Bounding: b, + } + } + + if bounding { + n.Bounding = c.Extend(n.Bounding) + } + d = (n.Plane + 1) % Dim(c.Dims()) + if c.Compare(n.Point, n.Plane) <= 0 { + n.Left = n.Left.insertBounded(c, d, bounding) + } else { + n.Right = n.Right.insertBounded(c, d, bounding) + } + + return n +} + +// Len returns the number of elements in the tree. +func (t *Tree) Len() int { return t.Count } + +// Contains returns whether a Comparable is in the bounds of the tree. If no bounding has +// been constructed Contains returns true. +func (t *Tree) Contains(c Comparable) bool { + if t.Root.Bounding == nil { + return true + } + return t.Root.Contains(c) +} + +var inf = math.Inf(1) + +// Nearest returns the nearest value to the query and the distance between them. +func (t *Tree) Nearest(q Comparable) (Comparable, float64) { + if t.Root == nil { + return nil, inf + } + n, dist := t.Root.search(q, inf) + if n == nil { + return nil, inf + } + return n.Point, dist +} + +func (n *Node) search(q Comparable, dist float64) (*Node, float64) { + if n == nil { + return nil, inf + } + + c := q.Compare(n.Point, n.Plane) + dist = math.Min(dist, q.Distance(n.Point)) + + bn := n + if c <= 0 { + ln, ld := n.Left.search(q, dist) + if ld < dist { + bn, dist = ln, ld + } + if c*c < dist { + rn, rd := n.Right.search(q, dist) + if rd < dist { + bn, dist = rn, rd + } + } + return bn, dist + } + rn, rd := n.Right.search(q, dist) + if rd < dist { + bn, dist = rn, rd + } + if c*c < dist { + ln, ld := n.Left.search(q, dist) + if ld < dist { + bn, dist = ln, ld + } + } + return bn, dist +} + +// ComparableDist holds a Comparable and a distance to a specific query. A nil Comparable +// is used to mark the end of the heap, so clients should not store nil values except for +// this purpose. +type ComparableDist struct { + Comparable Comparable + Dist float64 +} + +// Heap is a max heap sorted on Dist. +type Heap []ComparableDist + +func (h *Heap) Max() ComparableDist { return (*h)[0] } +func (h *Heap) Len() int { return len(*h) } +func (h *Heap) Less(i, j int) bool { return (*h)[i].Comparable == nil || (*h)[i].Dist > (*h)[j].Dist } +func (h *Heap) Swap(i, j int) { (*h)[i], (*h)[j] = (*h)[j], (*h)[i] } +func (h *Heap) Push(x interface{}) { (*h) = append(*h, x.(ComparableDist)) } +func (h *Heap) Pop() (i interface{}) { i, *h = (*h)[len(*h)-1], (*h)[:len(*h)-1]; return i } + +// NKeeper is a Keeper that retains the n best ComparableDists that have been passed to Keep. +type NKeeper struct { + Heap +} + +// NewNKeeper returns an NKeeper with the max value of the heap set to infinite distance. The +// returned NKeeper is able to retain at most n values. +func NewNKeeper(n int) *NKeeper { + k := NKeeper{make(Heap, 1, n)} + k.Heap[0].Dist = inf + return &k +} + +// Keep adds c to the heap if its distance is less than the maximum value of the heap. If adding +// c would increase the size of the heap beyond the initial maximum length, the maximum value of +// the heap is dropped. +func (k *NKeeper) Keep(c ComparableDist) { + if c.Dist <= k.Heap[0].Dist { // Favour later finds to displace sentinel. + if len(k.Heap) == cap(k.Heap) { + heap.Pop(k) + } + heap.Push(k, c) + } +} + +// DistKeeper is a Keeper that retains the ComparableDists within the specified distance of the +// query that it is called to Keep. +type DistKeeper struct { + Heap +} + +// NewDistKeeper returns an DistKeeper with the maximum value of the heap set to d. +func NewDistKeeper(d float64) *DistKeeper { return &DistKeeper{Heap{{Dist: d}}} } + +// Keep adds c to the heap if its distance is less than or equal to the max value of the heap. +func (k *DistKeeper) Keep(c ComparableDist) { + if c.Dist <= k.Heap[0].Dist { + heap.Push(k, c) + } +} + +// Keeper implements a conditional max heap sorted on the Dist field of the ComparableDist type. +// kd search is guided by the distance stored in the max value of the heap. +type Keeper interface { + Keep(ComparableDist) // Keep conditionally pushes the provided ComparableDist onto the heap. + Max() ComparableDist // Max returns the maximum element of the Keeper. + heap.Interface +} + +// NearestSet finds the nearest values to the query accepted by the provided Keeper, k. +// k must be able to return a ComparableDist specifying the maximum acceptable distance +// when Max() is called, and retains the results of the search in min sorted order after +// the call to NearestSet returns. +// If a sentinel ComparableDist with a nil Comparable is used by the Keeper to mark the +// maximum distance, NearestSet will remove it before returning. +func (t *Tree) NearestSet(k Keeper, q Comparable) { + if t.Root == nil { + return + } + t.Root.searchSet(q, k) + + // Check whether we have retained a sentinel + // and flag removal if we have. + removeSentinel := k.Len() != 0 && k.Max().Comparable == nil + + sort.Sort(sort.Reverse(k)) + + // This abuses the interface to drop the max. + // It is reasonable to do this because we know + // that the maximum value will now be at element + // zero, which is removed by the Pop method. + if removeSentinel { + k.Pop() + } +} + +func (n *Node) searchSet(q Comparable, k Keeper) { + if n == nil { + return + } + + c := q.Compare(n.Point, n.Plane) + k.Keep(ComparableDist{Comparable: n.Point, Dist: q.Distance(n.Point)}) + if c <= 0 { + n.Left.searchSet(q, k) + if c*c <= k.Max().Dist { + n.Right.searchSet(q, k) + } + return + } + n.Right.searchSet(q, k) + if c*c <= k.Max().Dist { + n.Left.searchSet(q, k) + } +} + +// Operation is a function that operates on a Comparable. The bounding volume and tree depth +// of the point is also provided. If done is returned true, the Operation is indicating that no +// further work needs to be done and so the Do function should traverse no further. +type Operation func(Comparable, *Bounding, int) (done bool) + +// Do performs fn on all values stored in the tree. A boolean is returned indicating whether the +// Do traversal was interrupted by an Operation returning true. If fn alters stored values' sort +// relationships, future tree operation behaviors are undefined. +func (t *Tree) Do(fn Operation) bool { + if t.Root == nil { + return false + } + return t.Root.do(fn, 0) +} + +func (n *Node) do(fn Operation, depth int) (done bool) { + if n.Left != nil { + done = n.Left.do(fn, depth+1) + if done { + return + } + } + done = fn(n.Point, n.Bounding, depth) + if done { + return + } + if n.Right != nil { + done = n.Right.do(fn, depth+1) + } + return +} + +// DoBounded performs fn on all values stored in the tree that are within the specified bound. +// If b is nil, the result is the same as a Do. A boolean is returned indicating whether the +// DoBounded traversal was interrupted by an Operation returning true. If fn alters stored +// values' sort relationships future tree operation behaviors are undefined. +func (t *Tree) DoBounded(b *Bounding, fn Operation) bool { + if t.Root == nil { + return false + } + if b == nil { + return t.Root.do(fn, 0) + } + return t.Root.doBounded(fn, b, 0) +} + +func (n *Node) doBounded(fn Operation, b *Bounding, depth int) (done bool) { + if n.Left != nil && b.Min.Compare(n.Point, n.Plane) < 0 { + done = n.Left.doBounded(fn, b, depth+1) + if done { + return + } + } + if b.Contains(n.Point) { + done = fn(n.Point, n.Bounding, depth) + if done { + return + } + } + if n.Right != nil && 0 < b.Max.Compare(n.Point, n.Plane) { + done = n.Right.doBounded(fn, b, depth+1) + } + return +} diff --git a/vendor/gonum.org/v1/gonum/spatial/kdtree/medians.go b/vendor/gonum.org/v1/gonum/spatial/kdtree/medians.go new file mode 100644 index 0000000..c5f6d8f --- /dev/null +++ b/vendor/gonum.org/v1/gonum/spatial/kdtree/medians.go @@ -0,0 +1,106 @@ +// Copyright ©2019 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package kdtree + +import ( + "sort" + + "golang.org/x/exp/rand" +) + +// Partition partitions list such that all elements less than the value at +// pivot prior to the call are placed before that element and all elements +// greater than that value are placed after it. The final location of the +// element at pivot prior to the call is returned. +func Partition(list sort.Interface, pivot int) int { + var index, last int + if last = list.Len() - 1; last < 0 { + return -1 + } + list.Swap(pivot, last) + for i := 0; i < last; i++ { + if !list.Less(last, i) { + list.Swap(index, i) + index++ + } + } + list.Swap(last, index) + return index +} + +// SortSlicer satisfies the sort.Interface and is able to slice itself. +type SortSlicer interface { + sort.Interface + Slice(start, end int) SortSlicer +} + +// Select partitions list such that all elements less than the kth element +// are placed before k in the resulting list and all elements greater than +// it are placed after the position k. +func Select(list SortSlicer, k int) int { + var ( + start int + end = list.Len() + ) + if k >= end { + if k == 0 { + return 0 + } + panic("kdtree: index out of range") + } + if start == end-1 { + return k + } + + for { + if start == end { + panic("kdtree: internal inconsistency") + } + sub := list.Slice(start, end) + pivot := Partition(sub, rand.Intn(sub.Len())) + switch { + case pivot == k: + return k + case k < pivot: + end = pivot + start + default: + k -= pivot + start += pivot + } + } +} + +func min(a, b int) int { + if a < b { + return a + } + return b +} + +// MedianOfMedians returns the index to the median value of the medians +// of groups of 5 consecutive elements. +func MedianOfMedians(list SortSlicer) int { + n := list.Len() / 5 + for i := 0; i < n; i++ { + left := i * 5 + sub := list.Slice(left, min(left+5, list.Len()-1)) + Select(sub, 2) + list.Swap(i, left+2) + } + Select(list.Slice(0, min(n, list.Len()-1)), min(list.Len(), n/2)) + return n / 2 +} + +// MedianOfRandoms returns the index to the median value of up to n randomly +// chosen elements in list. +func MedianOfRandoms(list SortSlicer, n int) int { + if l := list.Len(); l < n { + n = l + } else { + rand.Shuffle(n, func(i, j int) { list.Swap(i, j) }) + } + Select(list.Slice(0, n), n/2) + return n / 2 +} diff --git a/vendor/gonum.org/v1/gonum/spatial/kdtree/points.go b/vendor/gonum.org/v1/gonum/spatial/kdtree/points.go new file mode 100644 index 0000000..3785a4b --- /dev/null +++ b/vendor/gonum.org/v1/gonum/spatial/kdtree/points.go @@ -0,0 +1,88 @@ +// Copyright ©2019 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package kdtree + +import "math" + +var ( + _ Interface = Points(nil) + _ Comparable = Point(nil) +) + +// Point represents a point in a k-d space that satisfies the Comparable interface. +type Point []float64 + +// Compare returns the signed distance of p from the plane passing through c and +// perpendicular to the dimension d. The concrete type of c must be Point. +func (p Point) Compare(c Comparable, d Dim) float64 { q := c.(Point); return p[d] - q[d] } + +// Dims returns the number of dimensions described by the receiver. +func (p Point) Dims() int { return len(p) } + +// Distance returns the squared Euclidean distance between c and the receiver. The +// concrete type of c must be Point. +func (p Point) Distance(c Comparable) float64 { + q := c.(Point) + var sum float64 + for dim, c := range p { + d := c - q[dim] + sum += d * d + } + return sum +} + +// Extend returns a bounding box that has been extended to include the receiver. +func (p Point) Extend(b *Bounding) *Bounding { + if b == nil { + b = &Bounding{append(Point(nil), p...), append(Point(nil), p...)} + } + min := b.Min.(Point) + max := b.Max.(Point) + for d, v := range p { + min[d] = math.Min(min[d], v) + max[d] = math.Max(max[d], v) + } + *b = Bounding{Min: min, Max: max} + return b +} + +// Points is a collection of point values that satisfies the Interface. +type Points []Point + +func (p Points) Bounds() *Bounding { + if p.Len() == 0 { + return nil + } + min := append(Point(nil), p[0]...) + max := append(Point(nil), p[0]...) + for _, e := range p[1:] { + for d, v := range e { + min[d] = math.Min(min[d], v) + max[d] = math.Max(max[d], v) + } + } + return &Bounding{Min: min, Max: max} +} +func (p Points) Index(i int) Comparable { return p[i] } +func (p Points) Len() int { return len(p) } +func (p Points) Pivot(d Dim) int { return Plane{Points: p, Dim: d}.Pivot() } +func (p Points) Slice(start, end int) Interface { return p[start:end] } + +// Plane is a wrapping type that allows a Points type be pivoted on a dimension. +// The Pivot method of Plane uses MedianOfRandoms sampling at most 100 elements +// to find a pivot element. +type Plane struct { + Dim + Points +} + +// randoms is the maximum number of random values to sample for calculation of +// median of random elements. +const randoms = 100 + +func (p Plane) Less(i, j int) bool { return p.Points[i][p.Dim] < p.Points[j][p.Dim] } +func (p Plane) Pivot() int { return Partition(p, MedianOfRandoms(p, randoms)) } +func (p Plane) Slice(start, end int) SortSlicer { p.Points = p.Points[start:end]; return p } +func (p Plane) Swap(i, j int) { p.Points[i], p.Points[j] = p.Points[j], p.Points[i] } diff --git a/vendor/gonum.org/v1/gonum/spatial/r2/doc.go b/vendor/gonum.org/v1/gonum/spatial/r2/doc.go new file mode 100644 index 0000000..c21a529 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/spatial/r2/doc.go @@ -0,0 +1,6 @@ +// Copyright ©2019 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Package r2 provides 2D vectors and boxes and operations on them. +package r2 // import "gonum.org/v1/gonum/spatial/r2" diff --git a/vendor/gonum.org/v1/gonum/spatial/r2/vector.go b/vendor/gonum.org/v1/gonum/spatial/r2/vector.go new file mode 100644 index 0000000..b7b9cd6 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/spatial/r2/vector.go @@ -0,0 +1,36 @@ +// Copyright ©2019 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package r2 + +// Vec is a 2D vector. +type Vec struct { + X, Y float64 +} + +// Add returns the vector sum of p and q. +func (p Vec) Add(q Vec) Vec { + p.X += q.X + p.Y += q.Y + return p +} + +// Sub returns the vector sum of p and -q. +func (p Vec) Sub(q Vec) Vec { + p.X -= q.X + p.Y -= q.Y + return p +} + +// Scale returns the vector p scaled by f. +func (p Vec) Scale(f float64) Vec { + p.X *= f + p.Y *= f + return p +} + +// Box is a 2D bounding box. +type Box struct { + Min, Max Vec +} diff --git a/vendor/gonum.org/v1/gonum/spatial/r3/doc.go b/vendor/gonum.org/v1/gonum/spatial/r3/doc.go new file mode 100644 index 0000000..e67b5bd --- /dev/null +++ b/vendor/gonum.org/v1/gonum/spatial/r3/doc.go @@ -0,0 +1,6 @@ +// Copyright ©2019 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Package r3 provides 3D vectors and boxes and operations on them. +package r3 // import "gonum.org/v1/gonum/spatial/r3" diff --git a/vendor/gonum.org/v1/gonum/spatial/r3/vector.go b/vendor/gonum.org/v1/gonum/spatial/r3/vector.go new file mode 100644 index 0000000..39c14c1 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/spatial/r3/vector.go @@ -0,0 +1,39 @@ +// Copyright ©2019 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package r3 + +// Vec is a 3D vector. +type Vec struct { + X, Y, Z float64 +} + +// Add returns the vector sum of p and q. +func (p Vec) Add(q Vec) Vec { + p.X += q.X + p.Y += q.Y + p.Z += q.Z + return p +} + +// Sub returns the vector sum of p and -q. +func (p Vec) Sub(q Vec) Vec { + p.X -= q.X + p.Y -= q.Y + p.Z -= q.Z + return p +} + +// Scale returns the vector p scaled by f. +func (p Vec) Scale(f float64) Vec { + p.X *= f + p.Y *= f + p.Z *= f + return p +} + +// Box is a 3D bounding box. +type Box struct { + Min, Max Vec +} diff --git a/vendor/gonum.org/v1/gonum/stat/combin/combin.go b/vendor/gonum.org/v1/gonum/stat/combin/combin.go new file mode 100644 index 0000000..744911c --- /dev/null +++ b/vendor/gonum.org/v1/gonum/stat/combin/combin.go @@ -0,0 +1,298 @@ +// Copyright ©2016 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package combin + +import ( + "math" + + "gonum.org/v1/gonum/mat" +) + +const ( + badNegInput = "combin: negative input" + badSetSize = "combin: n < k" + badInput = "combin: wrong input slice length" + nonpositiveDimension = "combin: non-positive dimension" +) + +// Binomial returns the binomial coefficient of (n,k), also commonly referred to +// as "n choose k". +// +// The binomial coefficient, C(n,k), is the number of unordered combinations of +// k elements in a set that is n elements big, and is defined as +// +// C(n,k) = n!/((n-k)!k!) +// +// n and k must be non-negative with n >= k, otherwise Binomial will panic. +// No check is made for overflow. +func Binomial(n, k int) int { + if n < 0 || k < 0 { + panic(badNegInput) + } + if n < k { + panic(badSetSize) + } + // (n,k) = (n, n-k) + if k > n/2 { + k = n - k + } + b := 1 + for i := 1; i <= k; i++ { + b = (n - k + i) * b / i + } + return b +} + +// GeneralizedBinomial returns the generalized binomial coefficient of (n, k), +// defined as +// Γ(n+1) / (Γ(k+1) Γ(n-k+1)) +// where Γ is the Gamma function. GeneralizedBinomial is useful for continuous +// relaxations of the binomial coefficient, or when the binomial coefficient value +// may overflow int. In the latter case, one may use math/big for an exact +// computation. +// +// n and k must be non-negative with n >= k, otherwise GeneralizedBinomial will panic. +func GeneralizedBinomial(n, k float64) float64 { + return math.Exp(LogGeneralizedBinomial(n, k)) +} + +// LogGeneralizedBinomial returns the log of the generalized binomial coefficient. +// See GeneralizedBinomial for more information. +func LogGeneralizedBinomial(n, k float64) float64 { + if n < 0 || k < 0 { + panic(badNegInput) + } + if n < k { + panic(badSetSize) + } + a, _ := math.Lgamma(n + 1) + b, _ := math.Lgamma(k + 1) + c, _ := math.Lgamma(n - k + 1) + return a - b - c +} + +// CombinationGenerator generates combinations iteratively. Combinations may be +// called to generate all combinations collectively. +type CombinationGenerator struct { + n int + k int + previous []int + remaining int +} + +// NewCombinationGenerator returns a CombinationGenerator for generating the +// combinations of k elements from a set of size n. +// +// n and k must be non-negative with n >= k, otherwise NewCombinationGenerator +// will panic. +func NewCombinationGenerator(n, k int) *CombinationGenerator { + return &CombinationGenerator{ + n: n, + k: k, + remaining: Binomial(n, k), + } +} + +// Next advances the iterator if there are combinations remaining to be generated, +// and returns false if all combinations have been generated. Next must be called +// to initialize the first value before calling Combination or Combination will +// panic. The value returned by Combination is only changed during calls to Next. +func (c *CombinationGenerator) Next() bool { + if c.remaining <= 0 { + // Next is called before combination, so c.remaining is set to zero before + // Combination is called. Thus, Combination cannot panic on zero, and a + // second sentinel value is needed. + c.remaining = -1 + return false + } + if c.previous == nil { + c.previous = make([]int, c.k) + for i := range c.previous { + c.previous[i] = i + } + } else { + nextCombination(c.previous, c.n, c.k) + } + c.remaining-- + return true +} + +// Combination generates the next combination. If next is non-nil, it must have +// length k and the result will be stored in-place into combination. If combination +// is nil a new slice will be allocated and returned. If all of the combinations +// have already been constructed (Next() returns false), Combination will panic. +// +// Next must be called to initialize the first value before calling Combination +// or Combination will panic. The value returned by Combination is only changed +// during calls to Next. +func (c *CombinationGenerator) Combination(combination []int) []int { + if c.remaining == -1 { + panic("combin: all combinations have been generated") + } + if c.previous == nil { + panic("combin: Combination called before Next") + } + if combination == nil { + combination = make([]int, c.k) + } + if len(combination) != c.k { + panic(badInput) + } + copy(combination, c.previous) + return combination +} + +// Combinations generates all of the combinations of k elements from a +// set of size n. The returned slice has length Binomial(n,k) and each inner slice +// has length k. +// +// n and k must be non-negative with n >= k, otherwise Combinations will panic. +// +// CombinationGenerator may alternatively be used to generate the combinations +// iteratively instead of collectively. +func Combinations(n, k int) [][]int { + combins := Binomial(n, k) + data := make([][]int, combins) + if len(data) == 0 { + return data + } + data[0] = make([]int, k) + for i := range data[0] { + data[0][i] = i + } + for i := 1; i < combins; i++ { + next := make([]int, k) + copy(next, data[i-1]) + nextCombination(next, n, k) + data[i] = next + } + return data +} + +// nextCombination generates the combination after s, overwriting the input value. +func nextCombination(s []int, n, k int) { + for j := k - 1; j >= 0; j-- { + if s[j] == n+j-k { + continue + } + s[j]++ + for l := j + 1; l < k; l++ { + s[l] = s[j] + l - j + } + break + } +} + +// Cartesian returns the cartesian product of the slices in data. The Cartesian +// product of two sets is the set of all combinations of the items. For example, +// given the input +// [][]float64{{1,2},{3,4},{5,6}} +// the returned matrix will be +// [ 1 3 5 ] +// [ 1 3 6 ] +// [ 1 4 5 ] +// [ 1 4 6 ] +// [ 2 3 5 ] +// [ 2 3 6 ] +// [ 2 4 5 ] +// [ 2 4 6 ] +// If dst is nil, a new matrix will be allocated and returned, otherwise the number +// of rows of dst must equal \prod_i len(data[i]), and the number of columns in +// dst must equal len(data). Cartesian also panics if len(data) = 0. +func Cartesian(dst *mat.Dense, data [][]float64) *mat.Dense { + if len(data) == 0 { + panic("combin: empty data input") + } + cols := len(data) + rows := 1 + lens := make([]int, cols) + for i, d := range data { + v := len(d) + lens[i] = v + rows *= v + } + if dst == nil { + dst = mat.NewDense(rows, cols, nil) + } + r, c := dst.Dims() + if r != rows || c != cols { + panic("combin: destination matrix size mismatch") + } + idxs := make([]int, cols) + for i := 0; i < rows; i++ { + SubFor(idxs, i, lens) + for j := 0; j < len(data); j++ { + dst.Set(i, j, data[j][idxs[j]]) + } + } + return dst +} + +// IdxFor converts a multi-dimensional index into a linear index for a +// multi-dimensional space. sub specifies the index for each dimension, and dims +// specifies the size of each dimension. IdxFor is the inverse of SubFor. +// IdxFor panics if any of the entries of sub are negative, any of the entries +// of dim are non-positive, or if sub[i] >= dims[i] for any i. +func IdxFor(sub, dims []int) int { + // The index returned is "row-major", that is the last index of sub is + // continuous. + var idx int + stride := 1 + for i := len(dims) - 1; i >= 0; i-- { + v := sub[i] + d := dims[i] + if d <= 0 { + panic(nonpositiveDimension) + } + if v < 0 || v >= d { + panic("combin: invalid subscript") + } + idx += v * stride + stride *= d + } + return idx +} + +// SubFor returns the multi-dimensional subscript for the input linear index to +// the multi-dimensional space. dims specifies the size of each dimension, and +// idx specifies the linear index. SubFor is the inverse of IdxFor. +// +// If sub is non-nil the result is stored in-place into sub, and SubFor will panic +// if len(sub) != len(dims). If sub is nil a new slice of the appropriate length +// is allocated. SubFor panics if idx < 0 or if idx is greater than or equal to +// the product of the dimensions. +func SubFor(sub []int, idx int, dims []int) []int { + if sub == nil { + sub = make([]int, len(dims)) + } + if len(sub) != len(dims) { + panic(badInput) + } + if idx < 0 { + panic(badNegInput) + } + stride := 1 + for i := len(dims) - 1; i >= 1; i-- { + stride *= dims[i] + } + for i := 0; i < len(dims)-1; i++ { + v := idx / stride + d := dims[i] + if d < 0 { + panic(nonpositiveDimension) + } + if v >= dims[i] { + panic("combin: index too large") + } + sub[i] = v + idx -= v * stride + stride /= dims[i+1] + } + if idx > dims[len(sub)-1] { + panic("combin: index too large") + } + sub[len(sub)-1] = idx + return sub +} diff --git a/vendor/gonum.org/v1/gonum/stat/combin/doc.go b/vendor/gonum.org/v1/gonum/stat/combin/doc.go new file mode 100644 index 0000000..496045c --- /dev/null +++ b/vendor/gonum.org/v1/gonum/stat/combin/doc.go @@ -0,0 +1,7 @@ +// Copyright ©2017 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Package combin implements routines involving combinatorics (permutations, +// combinations, etc.). +package combin // import "gonum.org/v1/gonum/stat/combin" diff --git a/vendor/gonum.org/v1/gonum/stat/distmat/doc.go b/vendor/gonum.org/v1/gonum/stat/distmat/doc.go new file mode 100644 index 0000000..5503c3b --- /dev/null +++ b/vendor/gonum.org/v1/gonum/stat/distmat/doc.go @@ -0,0 +1,6 @@ +// Copyright ©2017 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Package distmat provides probability distributions over matrices. +package distmat // import "gonum.org/v1/gonum/stat/distmat" diff --git a/vendor/gonum.org/v1/gonum/stat/distmat/general.go b/vendor/gonum.org/v1/gonum/stat/distmat/general.go new file mode 100644 index 0000000..072ad77 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/stat/distmat/general.go @@ -0,0 +1,7 @@ +// Copyright ©2016 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package distmat + +var badDim = "distmat: dimension mismatch" diff --git a/vendor/gonum.org/v1/gonum/stat/distmat/wishart.go b/vendor/gonum.org/v1/gonum/stat/distmat/wishart.go new file mode 100644 index 0000000..38f9d6d --- /dev/null +++ b/vendor/gonum.org/v1/gonum/stat/distmat/wishart.go @@ -0,0 +1,210 @@ +// Copyright ©2016 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package distmat + +import ( + "math" + "sync" + + "golang.org/x/exp/rand" + + "gonum.org/v1/gonum/mat" + "gonum.org/v1/gonum/mathext" + "gonum.org/v1/gonum/stat/distuv" +) + +// Wishart is a distribution over d×d positive symmetric definite matrices. It +// is parametrized by a scalar degrees of freedom parameter ν and a d×d positive +// definite matrix V. +// +// The Wishart PDF is given by +// p(X) = [|X|^((ν-d-1)/2) * exp(-tr(V^-1 * X)/2)] / [2^(ν*d/2) * |V|^(ν/2) * Γ_d(ν/2)] +// where X is a d×d PSD matrix, ν > d-1, |·| denotes the determinant, tr is the +// trace and Γ_d is the multivariate gamma function. +// +// See https://en.wikipedia.org/wiki/Wishart_distribution for more information. +type Wishart struct { + nu float64 + src rand.Source + + dim int + cholv mat.Cholesky + logdetv float64 + upper mat.TriDense + + once sync.Once + v *mat.SymDense // only stored if needed +} + +// NewWishart returns a new Wishart distribution with the given shape matrix and +// degrees of freedom parameter. NewWishart returns whether the creation was +// successful. +// +// NewWishart panics if nu <= d - 1 where d is the order of v. +func NewWishart(v mat.Symmetric, nu float64, src rand.Source) (*Wishart, bool) { + dim := v.Symmetric() + if nu <= float64(dim-1) { + panic("wishart: nu must be greater than dim-1") + } + var chol mat.Cholesky + ok := chol.Factorize(v) + if !ok { + return nil, false + } + + var u mat.TriDense + chol.UTo(&u) + + w := &Wishart{ + nu: nu, + src: src, + + dim: dim, + cholv: chol, + logdetv: chol.LogDet(), + upper: u, + } + return w, true +} + +// MeanSym returns the mean matrix of the distribution as a symmetric matrix. +// If x is nil, a new matrix is allocated and returned. If x is not nil, the +// result is stored in-place into x and MeanSym will panic if the order of x +// is not equal to the order of the receiver. +func (w *Wishart) MeanSym(x *mat.SymDense) *mat.SymDense { + if x == nil { + x = mat.NewSymDense(w.dim, nil) + } + d := x.Symmetric() + if d != w.dim { + panic(badDim) + } + w.setV() + x.CopySym(w.v) + x.ScaleSym(w.nu, x) + return x +} + +// ProbSym returns the probability of the symmetric matrix x. If x is not positive +// definite (the Cholesky decomposition fails), it has 0 probability. +func (w *Wishart) ProbSym(x mat.Symmetric) float64 { + return math.Exp(w.LogProbSym(x)) +} + +// LogProbSym returns the log of the probability of the input symmetric matrix. +// +// LogProbSym returns -∞ if the input matrix is not positive definite (the Cholesky +// decomposition fails). +func (w *Wishart) LogProbSym(x mat.Symmetric) float64 { + dim := x.Symmetric() + if dim != w.dim { + panic(badDim) + } + var chol mat.Cholesky + ok := chol.Factorize(x) + if !ok { + return math.Inf(-1) + } + return w.logProbSymChol(&chol) +} + +// LogProbSymChol returns the log of the probability of the input symmetric matrix +// given its Cholesky decomposition. +func (w *Wishart) LogProbSymChol(cholX *mat.Cholesky) float64 { + dim := cholX.Symmetric() + if dim != w.dim { + panic(badDim) + } + return w.logProbSymChol(cholX) +} + +func (w *Wishart) logProbSymChol(cholX *mat.Cholesky) float64 { + // The PDF is + // p(X) = [|X|^((ν-d-1)/2) * exp(-tr(V^-1 * X)/2)] / [2^(ν*d/2) * |V|^(ν/2) * Γ_d(ν/2)] + // The LogPDF is thus + // (ν-d-1)/2 * log(|X|) - tr(V^-1 * X)/2 - (ν*d/2)*log(2) - ν/2 * log(|V|) - log(Γ_d(ν/2)) + logdetx := cholX.LogDet() + + // Compute tr(V^-1 * X), using the fact that X = U^T * U. + var u mat.TriDense + cholX.UTo(&u) + + var vinvx mat.Dense + err := w.cholv.SolveTo(&vinvx, u.T()) + if err != nil { + return math.Inf(-1) + } + vinvx.Mul(&vinvx, &u) + tr := mat.Trace(&vinvx) + + fnu := float64(w.nu) + fdim := float64(w.dim) + + return 0.5*((fnu-fdim-1)*logdetx-tr-fnu*fdim*math.Ln2-fnu*w.logdetv) - mathext.MvLgamma(0.5*fnu, w.dim) +} + +// RandSym generates a random symmetric matrix from the distribution. +func (w *Wishart) RandSym(x *mat.SymDense) *mat.SymDense { + if x == nil { + x = &mat.SymDense{} + } + var c mat.Cholesky + w.RandChol(&c) + c.ToSym(x) + return x +} + +// RandChol generates the Cholesky decomposition of a random matrix from the distribution. +func (w *Wishart) RandChol(c *mat.Cholesky) *mat.Cholesky { + // TODO(btracey): Modify the code if the underlying data from c is exposed + // to avoid the dim^2 allocation here. + + // Use the Bartlett Decomposition, which says that + // X ~ L A A^T L^T + // Where A is a lower triangular matrix in which the diagonal of A is + // generated from the square roots of χ^2 random variables, and the + // off-diagonals are generated from standard normal variables. + // The above gives the cholesky decomposition of X, where L_x = L A. + // + // mat64 works with the upper triagular decomposition, so we would like to do + // the same. We can instead say that + // U_x = L_x^T = (L * A)^T = A^T * L^T = A^T * U + // Instead, generate A^T, by using the procedure above, except as an upper + // triangular matrix. + norm := distuv.Normal{ + Mu: 0, + Sigma: 1, + Src: w.src, + } + + t := mat.NewTriDense(w.dim, mat.Upper, nil) + for i := 0; i < w.dim; i++ { + v := distuv.ChiSquared{ + K: w.nu - float64(i), + Src: w.src, + }.Rand() + t.SetTri(i, i, math.Sqrt(v)) + } + for i := 0; i < w.dim; i++ { + for j := i + 1; j < w.dim; j++ { + t.SetTri(i, j, norm.Rand()) + } + } + + t.MulTri(t, &w.upper) + if c == nil { + c = &mat.Cholesky{} + } + c.SetFromU(t) + return c +} + +// setV computes and stores the covariance matrix of the distribution. +func (w *Wishart) setV() { + w.once.Do(func() { + w.v = mat.NewSymDense(w.dim, nil) + w.cholv.ToSym(w.v) + }) +} diff --git a/vendor/gonum.org/v1/gonum/stat/distmv/dirichlet.go b/vendor/gonum.org/v1/gonum/stat/distmv/dirichlet.go new file mode 100644 index 0000000..7e68a9e --- /dev/null +++ b/vendor/gonum.org/v1/gonum/stat/distmv/dirichlet.go @@ -0,0 +1,143 @@ +// Copyright ©2016 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package distmv + +import ( + "math" + + "golang.org/x/exp/rand" + + "gonum.org/v1/gonum/floats" + "gonum.org/v1/gonum/mat" + "gonum.org/v1/gonum/stat/distuv" +) + +// Dirichlet implements the Dirichlet probability distribution. +// +// The Dirichlet distribution is a continuous probability distribution that +// generates elements over the probability simplex, i.e. ||x||_1 = 1. The Dirichlet +// distribution is the conjugate prior to the categorical distribution and the +// multivariate version of the beta distribution. The probability of a point x is +// 1/Beta(α) \prod_i x_i^(α_i - 1) +// where Beta(α) is the multivariate Beta function (see the mathext package). +// +// For more information see https://en.wikipedia.org/wiki/Dirichlet_distribution +type Dirichlet struct { + alpha []float64 + dim int + src rand.Source + + lbeta float64 + sumAlpha float64 +} + +// NewDirichlet creates a new dirichlet distribution with the given parameters alpha. +// NewDirichlet will panic if len(alpha) == 0, or if any alpha is <= 0. +func NewDirichlet(alpha []float64, src rand.Source) *Dirichlet { + dim := len(alpha) + if dim == 0 { + panic(badZeroDimension) + } + for _, v := range alpha { + if v <= 0 { + panic("dirichlet: non-positive alpha") + } + } + a := make([]float64, len(alpha)) + copy(a, alpha) + d := &Dirichlet{ + alpha: a, + dim: dim, + src: src, + } + d.lbeta, d.sumAlpha = d.genLBeta(a) + return d +} + +// CovarianceMatrix calculates the covariance matrix of the distribution, +// storing the result in dst. Upon return, the value at element {i, j} of the +// covariance matrix is equal to the covariance of the i^th and j^th variables. +// covariance(i, j) = E[(x_i - E[x_i])(x_j - E[x_j])] +// If the dst matrix is zero-sized it will be resized to the correct dimensions, +// otherwise dst must match the dimension of the receiver or CovarianceMatrix +// will panic. +func (d *Dirichlet) CovarianceMatrix(dst *mat.SymDense) { + if dst.IsZero() { + *dst = *(dst.GrowSym(d.dim).(*mat.SymDense)) + } else if dst.Symmetric() != d.dim { + panic("dirichelet: input matrix size mismatch") + } + scale := 1 / (d.sumAlpha * d.sumAlpha * (d.sumAlpha + 1)) + for i := 0; i < d.dim; i++ { + ai := d.alpha[i] + v := ai * (d.sumAlpha - ai) * scale + dst.SetSym(i, i, v) + for j := i + 1; j < d.dim; j++ { + aj := d.alpha[j] + v := -ai * aj * scale + dst.SetSym(i, j, v) + } + } +} + +// genLBeta computes the generalized LBeta function. +func (d *Dirichlet) genLBeta(alpha []float64) (lbeta, sumAlpha float64) { + for _, alpha := range d.alpha { + lg, _ := math.Lgamma(alpha) + lbeta += lg + sumAlpha += alpha + } + lg, _ := math.Lgamma(sumAlpha) + return lbeta - lg, sumAlpha +} + +// Dim returns the dimension of the distribution. +func (d *Dirichlet) Dim() int { + return d.dim +} + +// LogProb computes the log of the pdf of the point x. +// +// It does not check that ||x||_1 = 1. +func (d *Dirichlet) LogProb(x []float64) float64 { + dim := d.dim + if len(x) != dim { + panic(badSizeMismatch) + } + var lprob float64 + for i, x := range x { + lprob += (d.alpha[i] - 1) * math.Log(x) + } + lprob -= d.lbeta + return lprob +} + +// Mean returns the mean of the probability distribution at x. If the +// input argument is nil, a new slice will be allocated, otherwise the result +// will be put in-place into the receiver. +func (d *Dirichlet) Mean(x []float64) []float64 { + x = reuseAs(x, d.dim) + copy(x, d.alpha) + floats.Scale(1/d.sumAlpha, x) + return x +} + +// Prob computes the value of the probability density function at x. +func (d *Dirichlet) Prob(x []float64) float64 { + return math.Exp(d.LogProb(x)) +} + +// Rand generates a random number according to the distributon. +// If the input slice is nil, new memory is allocated, otherwise the result is stored +// in place. +func (d *Dirichlet) Rand(x []float64) []float64 { + x = reuseAs(x, d.dim) + for i := range x { + x[i] = distuv.Gamma{Alpha: d.alpha[i], Beta: 1, Src: d.src}.Rand() + } + sum := floats.Sum(x) + floats.Scale(1/sum, x) + return x +} diff --git a/vendor/gonum.org/v1/gonum/stat/distmv/doc.go b/vendor/gonum.org/v1/gonum/stat/distmv/doc.go new file mode 100644 index 0000000..142e056 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/stat/distmv/doc.go @@ -0,0 +1,6 @@ +// Copyright ©2017 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Package distmv provides multivariate random distribution types. +package distmv // import "gonum.org/v1/gonum/stat/distmv" diff --git a/vendor/gonum.org/v1/gonum/stat/distmv/general.go b/vendor/gonum.org/v1/gonum/stat/distmv/general.go new file mode 100644 index 0000000..704e299 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/stat/distmv/general.go @@ -0,0 +1,30 @@ +// Copyright ©2015 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package distmv + +var ( + badQuantile = "distmv: quantile not between 0 and 1" + badReceiver = "distmv: input slice is not nil or the correct length" + badSizeMismatch = "distmv: size mismatch" + badZeroDimension = "distmv: zero dimensional input" + nonPosDimension = "distmv: non-positive dimension input" +) + +const logTwoPi = 1.8378770664093454835606594728112352797227949472755668 + +// useAs gets a slice of size n. If len(x) == n, x is returned, if len(x) == 0 +// then a slice is returned of length n. +func reuseAs(x []float64, n int) []float64 { + if len(x) == n { + return x + } + if len(x) == 0 { + if cap(x) >= n { + return x[:n] + } + return make([]float64, n) + } + panic(badReceiver) +} diff --git a/vendor/gonum.org/v1/gonum/stat/distmv/interfaces.go b/vendor/gonum.org/v1/gonum/stat/distmv/interfaces.go new file mode 100644 index 0000000..35dd255 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/stat/distmv/interfaces.go @@ -0,0 +1,33 @@ +// Copyright ©2016 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package distmv + +// Quantiler returns the multi-dimensional inverse cumulative distribution function. +// len(x) must equal len(p), and if x is non-nil, len(x) must also equal len(p). +// If x is nil, a new slice will be allocated and returned, otherwise the quantile +// will be stored in-place into x. All of the values of p must be between 0 and 1, +// or Quantile will panic. +type Quantiler interface { + Quantile(x, p []float64) []float64 +} + +// LogProber computes the log of the probability of the point x. +type LogProber interface { + LogProb(x []float64) float64 +} + +// Rander generates a random number according to the distributon. +// If the input is non-nil, len(x) must equal len(p) and the dimension of the distribution, +// otherwise Quantile will panic. +// If the input is nil, a new slice will be allocated and returned. +type Rander interface { + Rand(x []float64) []float64 +} + +// RandLogProber is both a Rander and a LogProber. +type RandLogProber interface { + Rander + LogProber +} diff --git a/vendor/gonum.org/v1/gonum/stat/distmv/normal.go b/vendor/gonum.org/v1/gonum/stat/distmv/normal.go new file mode 100644 index 0000000..410a629 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/stat/distmv/normal.go @@ -0,0 +1,391 @@ +// Copyright ©2015 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package distmv + +import ( + "math" + + "golang.org/x/exp/rand" + + "gonum.org/v1/gonum/floats" + "gonum.org/v1/gonum/mat" + "gonum.org/v1/gonum/stat" + "gonum.org/v1/gonum/stat/distuv" +) + +var ( + badInputLength = "distmv: input slice length mismatch" +) + +// Normal is a multivariate normal distribution (also known as the multivariate +// Gaussian distribution). Its pdf in k dimensions is given by +// (2 π)^(-k/2) |Σ|^(-1/2) exp(-1/2 (x-μ)'Σ^-1(x-μ)) +// where μ is the mean vector and Σ the covariance matrix. Σ must be symmetric +// and positive definite. Use NewNormal to construct. +type Normal struct { + mu []float64 + + sigma mat.SymDense + + chol mat.Cholesky + logSqrtDet float64 + dim int + + // If src is altered, rnd must be updated. + src rand.Source + rnd *rand.Rand +} + +// NewNormal creates a new Normal with the given mean and covariance matrix. +// NewNormal panics if len(mu) == 0, or if len(mu) != sigma.N. If the covariance +// matrix is not positive-definite, the returned boolean is false. +func NewNormal(mu []float64, sigma mat.Symmetric, src rand.Source) (*Normal, bool) { + if len(mu) == 0 { + panic(badZeroDimension) + } + dim := sigma.Symmetric() + if dim != len(mu) { + panic(badSizeMismatch) + } + n := &Normal{ + src: src, + rnd: rand.New(src), + dim: dim, + mu: make([]float64, dim), + } + copy(n.mu, mu) + ok := n.chol.Factorize(sigma) + if !ok { + return nil, false + } + n.sigma = *mat.NewSymDense(dim, nil) + n.sigma.CopySym(sigma) + n.logSqrtDet = 0.5 * n.chol.LogDet() + return n, true +} + +// NewNormalChol creates a new Normal distribution with the given mean and +// covariance matrix represented by its Cholesky decomposition. NewNormalChol +// panics if len(mu) is not equal to chol.Size(). +func NewNormalChol(mu []float64, chol *mat.Cholesky, src rand.Source) *Normal { + dim := len(mu) + if dim != chol.Symmetric() { + panic(badSizeMismatch) + } + n := &Normal{ + src: src, + rnd: rand.New(src), + dim: dim, + mu: make([]float64, dim), + } + n.chol.Clone(chol) + copy(n.mu, mu) + n.logSqrtDet = 0.5 * n.chol.LogDet() + return n +} + +// NewNormalPrecision creates a new Normal distribution with the given mean and +// precision matrix (inverse of the covariance matrix). NewNormalPrecision +// panics if len(mu) is not equal to prec.Symmetric(). If the precision matrix +// is not positive-definite, NewNormalPrecision returns nil for norm and false +// for ok. +func NewNormalPrecision(mu []float64, prec *mat.SymDense, src rand.Source) (norm *Normal, ok bool) { + if len(mu) == 0 { + panic(badZeroDimension) + } + dim := prec.Symmetric() + if dim != len(mu) { + panic(badSizeMismatch) + } + // TODO(btracey): Computing a matrix inverse is generally numerically instable. + // This only has to compute the inverse of a positive definite matrix, which + // is much better, but this still loses precision. It is worth considering if + // instead the precision matrix should be stored explicitly and used instead + // of the Cholesky decomposition of the covariance matrix where appropriate. + var chol mat.Cholesky + ok = chol.Factorize(prec) + if !ok { + return nil, false + } + var sigma mat.SymDense + chol.InverseTo(&sigma) + return NewNormal(mu, &sigma, src) +} + +// ConditionNormal returns the Normal distribution that is the receiver conditioned +// on the input evidence. The returned multivariate normal has dimension +// n - len(observed), where n is the dimension of the original receiver. The updated +// mean and covariance are +// mu = mu_un + sigma_{ob,un}^T * sigma_{ob,ob}^-1 (v - mu_ob) +// sigma = sigma_{un,un} - sigma_{ob,un}^T * sigma_{ob,ob}^-1 * sigma_{ob,un} +// where mu_un and mu_ob are the original means of the unobserved and observed +// variables respectively, sigma_{un,un} is the unobserved subset of the covariance +// matrix, sigma_{ob,ob} is the observed subset of the covariance matrix, and +// sigma_{un,ob} are the cross terms. The elements of x_2 have been observed with +// values v. The dimension order is preserved during conditioning, so if the value +// of dimension 1 is observed, the returned normal represents dimensions {0, 2, ...} +// of the original Normal distribution. +// +// ConditionNormal returns {nil, false} if there is a failure during the update. +// Mathematically this is impossible, but can occur with finite precision arithmetic. +func (n *Normal) ConditionNormal(observed []int, values []float64, src rand.Source) (*Normal, bool) { + if len(observed) == 0 { + panic("normal: no observed value") + } + if len(observed) != len(values) { + panic(badInputLength) + } + for _, v := range observed { + if v < 0 || v >= n.Dim() { + panic("normal: observed value out of bounds") + } + } + + _, mu1, sigma11 := studentsTConditional(observed, values, math.Inf(1), n.mu, &n.sigma) + if mu1 == nil { + return nil, false + } + return NewNormal(mu1, sigma11, src) +} + +// CovarianceMatrix stores the covariance matrix of the distribution in dst. +// Upon return, the value at element {i, j} of the covariance matrix is equal +// to the covariance of the i^th and j^th variables. +// covariance(i, j) = E[(x_i - E[x_i])(x_j - E[x_j])] +// If the dst matrix is zero-sized it will be resized to the correct dimensions, +// otherwise dst must match the dimension of the receiver or CovarianceMatrix +// will panic. +func (n *Normal) CovarianceMatrix(dst *mat.SymDense) { + if dst.IsZero() { + *dst = *(dst.GrowSym(n.dim).(*mat.SymDense)) + } else if dst.Symmetric() != n.dim { + panic("normal: input matrix size mismatch") + } + dst.CopySym(&n.sigma) +} + +// Dim returns the dimension of the distribution. +func (n *Normal) Dim() int { + return n.dim +} + +// Entropy returns the differential entropy of the distribution. +func (n *Normal) Entropy() float64 { + return float64(n.dim)/2*(1+logTwoPi) + n.logSqrtDet +} + +// LogProb computes the log of the pdf of the point x. +func (n *Normal) LogProb(x []float64) float64 { + dim := n.dim + if len(x) != dim { + panic(badSizeMismatch) + } + return normalLogProb(x, n.mu, &n.chol, n.logSqrtDet) +} + +// NormalLogProb computes the log probability of the location x for a Normal +// distribution the given mean and Cholesky decomposition of the covariance matrix. +// NormalLogProb panics if len(x) is not equal to len(mu), or if len(mu) != chol.Size(). +// +// This function saves time and memory if the Cholesky decomposition is already +// available. Otherwise, the NewNormal function should be used. +func NormalLogProb(x, mu []float64, chol *mat.Cholesky) float64 { + dim := len(mu) + if len(x) != dim { + panic(badSizeMismatch) + } + if chol.Symmetric() != dim { + panic(badSizeMismatch) + } + logSqrtDet := 0.5 * chol.LogDet() + return normalLogProb(x, mu, chol, logSqrtDet) +} + +// normalLogProb is the same as NormalLogProb, but does not make size checks and +// additionally requires log(|Σ|^-0.5) +func normalLogProb(x, mu []float64, chol *mat.Cholesky, logSqrtDet float64) float64 { + dim := len(mu) + c := -0.5*float64(dim)*logTwoPi - logSqrtDet + dst := stat.Mahalanobis(mat.NewVecDense(dim, x), mat.NewVecDense(dim, mu), chol) + return c - 0.5*dst*dst +} + +// MarginalNormal returns the marginal distribution of the given input variables. +// That is, MarginalNormal returns +// p(x_i) = \int_{x_o} p(x_i | x_o) p(x_o) dx_o +// where x_i are the dimensions in the input, and x_o are the remaining dimensions. +// See https://en.wikipedia.org/wiki/Marginal_distribution for more information. +// +// The input src is passed to the call to NewNormal. +func (n *Normal) MarginalNormal(vars []int, src rand.Source) (*Normal, bool) { + newMean := make([]float64, len(vars)) + for i, v := range vars { + newMean[i] = n.mu[v] + } + var s mat.SymDense + s.SubsetSym(&n.sigma, vars) + return NewNormal(newMean, &s, src) +} + +// MarginalNormalSingle returns the marginal of the given input variable. +// That is, MarginalNormal returns +// p(x_i) = \int_{x_¬i} p(x_i | x_¬i) p(x_¬i) dx_¬i +// where i is the input index. +// See https://en.wikipedia.org/wiki/Marginal_distribution for more information. +// +// The input src is passed to the constructed distuv.Normal. +func (n *Normal) MarginalNormalSingle(i int, src rand.Source) distuv.Normal { + return distuv.Normal{ + Mu: n.mu[i], + Sigma: math.Sqrt(n.sigma.At(i, i)), + Src: src, + } +} + +// Mean returns the mean of the probability distribution at x. If the +// input argument is nil, a new slice will be allocated, otherwise the result +// will be put in-place into the receiver. +func (n *Normal) Mean(x []float64) []float64 { + x = reuseAs(x, n.dim) + copy(x, n.mu) + return x +} + +// Prob computes the value of the probability density function at x. +func (n *Normal) Prob(x []float64) float64 { + return math.Exp(n.LogProb(x)) +} + +// Quantile returns the multi-dimensional inverse cumulative distribution function. +// If x is nil, a new slice will be allocated and returned. If x is non-nil, +// len(x) must equal len(p) and the quantile will be stored in-place into x. +// All of the values of p must be between 0 and 1, inclusive, or Quantile will panic. +func (n *Normal) Quantile(x, p []float64) []float64 { + dim := n.Dim() + if len(p) != dim { + panic(badInputLength) + } + if x == nil { + x = make([]float64, dim) + } + if len(x) != len(p) { + panic(badInputLength) + } + + // Transform to a standard normal and then transform to a multivariate Gaussian. + tmp := make([]float64, len(x)) + for i, v := range p { + tmp[i] = distuv.UnitNormal.Quantile(v) + } + n.TransformNormal(x, tmp) + return x +} + +// Rand generates a random number according to the distributon. +// If the input slice is nil, new memory is allocated, otherwise the result is stored +// in place. +func (n *Normal) Rand(x []float64) []float64 { + return NormalRand(x, n.mu, &n.chol, n.src) +} + +// NormalRand generates a random number with the given mean and Cholesky +// decomposition of the covariance matrix. +// If x is nil, new memory is allocated and returned, otherwise the result is stored +// in place into x. NormalRand panics if x is non-nil and not equal to len(mu), +// or if len(mu) != chol.Size(). +// +// This function saves time and memory if the Cholesky decomposition is already +// available. Otherwise, the NewNormal function should be used. +func NormalRand(x, mean []float64, chol *mat.Cholesky, src rand.Source) []float64 { + x = reuseAs(x, len(mean)) + if len(mean) != chol.Symmetric() { + panic(badInputLength) + } + if src == nil { + for i := range x { + x[i] = rand.NormFloat64() + } + } else { + rnd := rand.New(src) + for i := range x { + x[i] = rnd.NormFloat64() + } + } + transformNormal(x, x, mean, chol) + return x +} + +// ScoreInput returns the gradient of the log-probability with respect to the +// input x. That is, ScoreInput computes +// ∇_x log(p(x)) +// If score is nil, a new slice will be allocated and returned. If score is of +// length the dimension of Normal, then the result will be put in-place into score. +// If neither of these is true, ScoreInput will panic. +func (n *Normal) ScoreInput(score, x []float64) []float64 { + // Normal log probability is + // c - 0.5*(x-μ)' Σ^-1 (x-μ). + // So the derivative is just + // -Σ^-1 (x-μ). + if len(x) != n.Dim() { + panic(badInputLength) + } + if score == nil { + score = make([]float64, len(x)) + } + if len(score) != len(x) { + panic(badSizeMismatch) + } + tmp := make([]float64, len(x)) + copy(tmp, x) + floats.Sub(tmp, n.mu) + + n.chol.SolveVecTo(mat.NewVecDense(len(score), score), mat.NewVecDense(len(tmp), tmp)) + floats.Scale(-1, score) + return score +} + +// SetMean changes the mean of the normal distribution. SetMean panics if len(mu) +// does not equal the dimension of the normal distribution. +func (n *Normal) SetMean(mu []float64) { + if len(mu) != n.Dim() { + panic(badSizeMismatch) + } + copy(n.mu, mu) +} + +// TransformNormal transforms the vector, normal, generated from a standard +// multidimensional normal into a vector that has been generated under the +// distribution of the receiver. +// +// If dst is non-nil, the result will be stored into dst, otherwise a new slice +// will be allocated. TransformNormal will panic if the length of normal is not +// the dimension of the receiver, or if dst is non-nil and len(dist) != len(normal). +func (n *Normal) TransformNormal(dst, normal []float64) []float64 { + if len(normal) != n.dim { + panic(badInputLength) + } + dst = reuseAs(dst, n.dim) + if len(dst) != len(normal) { + panic(badInputLength) + } + transformNormal(dst, normal, n.mu, &n.chol) + return dst +} + +// transformNormal performs the same operation as Normal.TransformNormal except +// no safety checks are performed and all memory must be provided. +func transformNormal(dst, normal, mu []float64, chol *mat.Cholesky) []float64 { + dim := len(mu) + dstVec := mat.NewVecDense(dim, dst) + srcVec := mat.NewVecDense(dim, normal) + // If dst and normal are the same slice, make them the same Vector otherwise + // mat complains about being tricky. + if &normal[0] == &dst[0] { + srcVec = dstVec + } + dstVec.MulVec(chol.RawU().T(), srcVec) + floats.Add(dst, mu) + return dst +} diff --git a/vendor/gonum.org/v1/gonum/stat/distmv/statdist.go b/vendor/gonum.org/v1/gonum/stat/distmv/statdist.go new file mode 100644 index 0000000..0ee1ab3 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/stat/distmv/statdist.go @@ -0,0 +1,353 @@ +// Copyright ©2016 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package distmv + +import ( + "math" + + "gonum.org/v1/gonum/bound" + "gonum.org/v1/gonum/floats" + "gonum.org/v1/gonum/mat" + "gonum.org/v1/gonum/mathext" + "gonum.org/v1/gonum/stat" +) + +// Bhattacharyya is a type for computing the Bhattacharyya distance between +// probability distributions. +// +// The Bhattacharyya distance is defined as +// D_B = -ln(BC(l,r)) +// BC = \int_-∞^∞ (p(x)q(x))^(1/2) dx +// Where BC is known as the Bhattacharyya coefficient. +// The Bhattacharyya distance is related to the Hellinger distance by +// H(l,r) = sqrt(1-BC(l,r)) +// For more information, see +// https://en.wikipedia.org/wiki/Bhattacharyya_distance +type Bhattacharyya struct{} + +// DistNormal computes the Bhattacharyya distance between normal distributions l and r. +// The dimensions of the input distributions must match or DistNormal will panic. +// +// For Normal distributions, the Bhattacharyya distance is +// Σ = (Σ_l + Σ_r)/2 +// D_B = (1/8)*(μ_l - μ_r)^T*Σ^-1*(μ_l - μ_r) + (1/2)*ln(det(Σ)/(det(Σ_l)*det(Σ_r))^(1/2)) +func (Bhattacharyya) DistNormal(l, r *Normal) float64 { + dim := l.Dim() + if dim != r.Dim() { + panic(badSizeMismatch) + } + + var sigma mat.SymDense + sigma.AddSym(&l.sigma, &r.sigma) + sigma.ScaleSym(0.5, &sigma) + + var chol mat.Cholesky + chol.Factorize(&sigma) + + mahalanobis := stat.Mahalanobis(mat.NewVecDense(dim, l.mu), mat.NewVecDense(dim, r.mu), &chol) + mahalanobisSq := mahalanobis * mahalanobis + + dl := l.chol.LogDet() + dr := r.chol.LogDet() + ds := chol.LogDet() + + return 0.125*mahalanobisSq + 0.5*ds - 0.25*dl - 0.25*dr +} + +// DistUniform computes the Bhattacharyya distance between uniform distributions l and r. +// The dimensions of the input distributions must match or DistUniform will panic. +func (Bhattacharyya) DistUniform(l, r *Uniform) float64 { + if len(l.bounds) != len(r.bounds) { + panic(badSizeMismatch) + } + // BC = \int \sqrt(p(x)q(x)), which for uniform distributions is a constant + // over the volume where both distributions have positive probability. + // Compute the overlap and the value of sqrt(p(x)q(x)). The entropy is the + // negative log probability of the distribution (use instead of LogProb so + // it is not necessary to construct an x value). + // + // BC = volume * sqrt(p(x)q(x)) + // logBC = log(volume) + 0.5*(logP + logQ) + // D_B = -logBC + return -unifLogVolOverlap(l.bounds, r.bounds) + 0.5*(l.Entropy()+r.Entropy()) +} + +// unifLogVolOverlap computes the log of the volume of the hyper-rectangle where +// both uniform distributions have positive probability. +func unifLogVolOverlap(b1, b2 []bound.Bound) float64 { + var logVolOverlap float64 + for dim, v1 := range b1 { + v2 := b2[dim] + // If the surfaces don't overlap, then the volume is 0 + if v1.Max <= v2.Min || v2.Max <= v1.Min { + return math.Inf(-1) + } + vol := math.Min(v1.Max, v2.Max) - math.Max(v1.Min, v2.Min) + logVolOverlap += math.Log(vol) + } + return logVolOverlap +} + +// CrossEntropy is a type for computing the cross-entropy between probability +// distributions. +// +// The cross-entropy is defined as +// - \int_x l(x) log(r(x)) dx = KL(l || r) + H(l) +// where KL is the Kullback-Leibler divergence and H is the entropy. +// For more information, see +// https://en.wikipedia.org/wiki/Cross_entropy +type CrossEntropy struct{} + +// DistNormal returns the cross-entropy between normal distributions l and r. +// The dimensions of the input distributions must match or DistNormal will panic. +func (CrossEntropy) DistNormal(l, r *Normal) float64 { + if l.Dim() != r.Dim() { + panic(badSizeMismatch) + } + kl := KullbackLeibler{}.DistNormal(l, r) + return kl + l.Entropy() +} + +// Hellinger is a type for computing the Hellinger distance between probability +// distributions. +// +// The Hellinger distance is defined as +// H^2(l,r) = 1/2 * int_x (\sqrt(l(x)) - \sqrt(r(x)))^2 dx +// and is bounded between 0 and 1. Note the above formula defines the squared +// Hellinger distance, while this returns the Hellinger distance itself. +// The Hellinger distance is related to the Bhattacharyya distance by +// H^2 = 1 - exp(-D_B) +// For more information, see +// https://en.wikipedia.org/wiki/Hellinger_distance +type Hellinger struct{} + +// DistNormal returns the Hellinger distance between normal distributions l and r. +// The dimensions of the input distributions must match or DistNormal will panic. +// +// See the documentation of Bhattacharyya.DistNormal for the formula for Normal +// distributions. +func (Hellinger) DistNormal(l, r *Normal) float64 { + if l.Dim() != r.Dim() { + panic(badSizeMismatch) + } + db := Bhattacharyya{}.DistNormal(l, r) + bc := math.Exp(-db) + return math.Sqrt(1 - bc) +} + +// KullbackLeibler is a type for computing the Kullback-Leibler divergence from l to r. +// +// The Kullback-Leibler divergence is defined as +// D_KL(l || r ) = \int_x p(x) log(p(x)/q(x)) dx +// Note that the Kullback-Leibler divergence is not symmetric with respect to +// the order of the input arguments. +type KullbackLeibler struct{} + +// DistDirichlet returns the Kullback-Leibler divergence between Dirichlet +// distributions l and r. The dimensions of the input distributions must match +// or DistDirichlet will panic. +// +// For two Dirichlet distributions, the KL divergence is computed as +// D_KL(l || r) = log Γ(α_0_l) - \sum_i log Γ(α_i_l) - log Γ(α_0_r) + \sum_i log Γ(α_i_r) +// + \sum_i (α_i_l - α_i_r)(ψ(α_i_l)- ψ(α_0_l)) +// Where Γ is the gamma function, ψ is the digamma function, and α_0 is the +// sum of the Dirichlet parameters. +func (KullbackLeibler) DistDirichlet(l, r *Dirichlet) float64 { + // http://bariskurt.com/kullback-leibler-divergence-between-two-dirichlet-and-beta-distributions/ + if l.Dim() != r.Dim() { + panic(badSizeMismatch) + } + l0, _ := math.Lgamma(l.sumAlpha) + r0, _ := math.Lgamma(r.sumAlpha) + dl := mathext.Digamma(l.sumAlpha) + + var l1, r1, c float64 + for i, al := range l.alpha { + ar := r.alpha[i] + vl, _ := math.Lgamma(al) + l1 += vl + vr, _ := math.Lgamma(ar) + r1 += vr + c += (al - ar) * (mathext.Digamma(al) - dl) + } + return l0 - l1 - r0 + r1 + c +} + +// DistNormal returns the KullbackLeibler divergence between normal distributions l and r. +// The dimensions of the input distributions must match or DistNormal will panic. +// +// For two normal distributions, the KL divergence is computed as +// D_KL(l || r) = 0.5*[ln(|Σ_r|) - ln(|Σ_l|) + (μ_l - μ_r)^T*Σ_r^-1*(μ_l - μ_r) + tr(Σ_r^-1*Σ_l)-d] +func (KullbackLeibler) DistNormal(l, r *Normal) float64 { + dim := l.Dim() + if dim != r.Dim() { + panic(badSizeMismatch) + } + + mahalanobis := stat.Mahalanobis(mat.NewVecDense(dim, l.mu), mat.NewVecDense(dim, r.mu), &r.chol) + mahalanobisSq := mahalanobis * mahalanobis + + // TODO(btracey): Optimize where there is a SolveCholeskySym + // TODO(btracey): There may be a more efficient way to just compute the trace + // Compute tr(Σ_r^-1*Σ_l) using the fact that Σ_l = U^T * U + var u mat.TriDense + l.chol.UTo(&u) + var m mat.Dense + err := r.chol.SolveTo(&m, u.T()) + if err != nil { + return math.NaN() + } + m.Mul(&m, &u) + tr := mat.Trace(&m) + + return r.logSqrtDet - l.logSqrtDet + 0.5*(mahalanobisSq+tr-float64(l.dim)) +} + +// DistUniform returns the KullbackLeibler divergence between uniform distributions +// l and r. The dimensions of the input distributions must match or DistUniform +// will panic. +func (KullbackLeibler) DistUniform(l, r *Uniform) float64 { + bl := l.Bounds(nil) + br := r.Bounds(nil) + if len(bl) != len(br) { + panic(badSizeMismatch) + } + + // The KL is ∞ if l is not completely contained within r, because then + // r(x) is zero when l(x) is non-zero for some x. + contained := true + for i, v := range bl { + if v.Min < br[i].Min || br[i].Max < v.Max { + contained = false + break + } + } + if !contained { + return math.Inf(1) + } + + // The KL divergence is finite. + // + // KL defines 0*ln(0) = 0, so there is no contribution to KL where l(x) = 0. + // Inside the region, l(x) and r(x) are constant (uniform distribution), and + // this constant is integrated over l(x), which integrates out to one. + // The entropy is -log(p(x)). + logPx := -l.Entropy() + logQx := -r.Entropy() + return logPx - logQx +} + +// Renyi is a type for computing the Rényi divergence of order α from l to r. +// +// The Rényi divergence with α > 0, α ≠ 1 is defined as +// D_α(l || r) = 1/(α-1) log(\int_-∞^∞ l(x)^α r(x)^(1-α)dx) +// The Rényi divergence has special forms for α = 0 and α = 1. This type does +// not implement α = ∞. For α = 0, +// D_0(l || r) = -log \int_-∞^∞ r(x)1{p(x)>0} dx +// that is, the negative log probability under r(x) that l(x) > 0. +// When α = 1, the Rényi divergence is equal to the Kullback-Leibler divergence. +// The Rényi divergence is also equal to half the Bhattacharyya distance when α = 0.5. +// +// The parameter α must be in 0 ≤ α < ∞ or the distance functions will panic. +type Renyi struct { + Alpha float64 +} + +// DistNormal returns the Rényi divergence between normal distributions l and r. +// The dimensions of the input distributions must match or DistNormal will panic. +// +// For two normal distributions, the Rényi divergence is computed as +// Σ_α = (1-α) Σ_l + αΣ_r +// D_α(l||r) = α/2 * (μ_l - μ_r)'*Σ_α^-1*(μ_l - μ_r) + 1/(2(α-1))*ln(|Σ_λ|/(|Σ_l|^(1-α)*|Σ_r|^α)) +// +// For a more nicely formatted version of the formula, see Eq. 15 of +// Kolchinsky, Artemy, and Brendan D. Tracey. "Estimating Mixture Entropy +// with Pairwise Distances." arXiv preprint arXiv:1706.02419 (2017). +// Note that the this formula is for Chernoff divergence, which differs from +// Rényi divergence by a factor of 1-α. Also be aware that most sources in +// the literature report this formula incorrectly. +func (renyi Renyi) DistNormal(l, r *Normal) float64 { + if renyi.Alpha < 0 { + panic("renyi: alpha < 0") + } + dim := l.Dim() + if dim != r.Dim() { + panic(badSizeMismatch) + } + if renyi.Alpha == 0 { + return 0 + } + if renyi.Alpha == 1 { + return KullbackLeibler{}.DistNormal(l, r) + } + + logDetL := l.chol.LogDet() + logDetR := r.chol.LogDet() + + // Σ_α = (1-α)Σ_l + αΣ_r. + sigA := mat.NewSymDense(dim, nil) + for i := 0; i < dim; i++ { + for j := i; j < dim; j++ { + v := (1-renyi.Alpha)*l.sigma.At(i, j) + renyi.Alpha*r.sigma.At(i, j) + sigA.SetSym(i, j, v) + } + } + + var chol mat.Cholesky + ok := chol.Factorize(sigA) + if !ok { + return math.NaN() + } + logDetA := chol.LogDet() + + mahalanobis := stat.Mahalanobis(mat.NewVecDense(dim, l.mu), mat.NewVecDense(dim, r.mu), &chol) + mahalanobisSq := mahalanobis * mahalanobis + + return (renyi.Alpha/2)*mahalanobisSq + 1/(2*(1-renyi.Alpha))*(logDetA-(1-renyi.Alpha)*logDetL-renyi.Alpha*logDetR) +} + +// Wasserstein is a type for computing the Wasserstein distance between two +// probability distributions. +// +// The Wasserstein distance is defined as +// W(l,r) := inf 𝔼(||X-Y||_2^2)^1/2 +// For more information, see +// https://en.wikipedia.org/wiki/Wasserstein_metric +type Wasserstein struct{} + +// DistNormal returns the Wasserstein distance between normal distributions l and r. +// The dimensions of the input distributions must match or DistNormal will panic. +// +// The Wasserstein distance for Normal distributions is +// d^2 = ||m_l - m_r||_2^2 + Tr(Σ_l + Σ_r - 2(Σ_l^(1/2)*Σ_r*Σ_l^(1/2))^(1/2)) +// For more information, see +// http://djalil.chafai.net/blog/2010/04/30/wasserstein-distance-between-two-gaussians/ +func (Wasserstein) DistNormal(l, r *Normal) float64 { + dim := l.Dim() + if dim != r.Dim() { + panic(badSizeMismatch) + } + + d := floats.Distance(l.mu, r.mu, 2) + d = d * d + + // Compute Σ_l^(1/2) + var ssl mat.SymDense + ssl.PowPSD(&l.sigma, 0.5) + // Compute Σ_l^(1/2)*Σ_r*Σ_l^(1/2) + var mean mat.Dense + mean.Mul(&ssl, &r.sigma) + mean.Mul(&mean, &ssl) + + // Reinterpret as symdense, and take Σ^(1/2) + meanSym := mat.NewSymDense(dim, mean.RawMatrix().Data) + ssl.PowPSD(meanSym, 0.5) + + tr := mat.Trace(&r.sigma) + tl := mat.Trace(&l.sigma) + tm := mat.Trace(&ssl) + + return d + tl + tr - 2*tm +} diff --git a/vendor/gonum.org/v1/gonum/stat/distmv/studentst.go b/vendor/gonum.org/v1/gonum/stat/distmv/studentst.go new file mode 100644 index 0000000..6b2f46f --- /dev/null +++ b/vendor/gonum.org/v1/gonum/stat/distmv/studentst.go @@ -0,0 +1,354 @@ +// Copyright ©2016 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package distmv + +import ( + "math" + "sort" + + "golang.org/x/exp/rand" + "golang.org/x/tools/container/intsets" + + "gonum.org/v1/gonum/floats" + "gonum.org/v1/gonum/mat" + "gonum.org/v1/gonum/stat" + "gonum.org/v1/gonum/stat/distuv" +) + +// StudentsT is a multivariate Student's T distribution. It is a distribution over +// ℝ^n with the probability density +// p(y) = (Γ((ν+n)/2) / Γ(ν/2)) * (νπ)^(-n/2) * |Ʃ|^(-1/2) * +// (1 + 1/ν * (y-μ)^T * Ʃ^-1 * (y-μ))^(-(ν+n)/2) +// where ν is a scalar greater than 2, μ is a vector in ℝ^n, and Ʃ is an n×n +// symmetric positive definite matrix. +// +// In this distribution, ν sets the spread of the distribution, similar to +// the degrees of freedom in a univariate Student's T distribution. As ν → ∞, +// the distribution approaches a multi-variate normal distribution. +// μ is the mean of the distribution, and the covariance is ν/(ν-2)*Ʃ. +// +// See https://en.wikipedia.org/wiki/Student%27s_t-distribution and +// http://users.isy.liu.se/en/rt/roth/student.pdf for more information. +type StudentsT struct { + nu float64 + mu []float64 + // If src is altered, rnd must be updated. + src rand.Source + rnd *rand.Rand + + sigma mat.SymDense // only stored if needed + + chol mat.Cholesky + lower mat.TriDense + logSqrtDet float64 + dim int +} + +// NewStudentsT creates a new StudentsT with the given nu, mu, and sigma +// parameters. +// +// NewStudentsT panics if len(mu) == 0, or if len(mu) != sigma.Symmetric(). If +// the covariance matrix is not positive-definite, nil is returned and ok is false. +func NewStudentsT(mu []float64, sigma mat.Symmetric, nu float64, src rand.Source) (dist *StudentsT, ok bool) { + if len(mu) == 0 { + panic(badZeroDimension) + } + dim := sigma.Symmetric() + if dim != len(mu) { + panic(badSizeMismatch) + } + + s := &StudentsT{ + nu: nu, + mu: make([]float64, dim), + dim: dim, + src: src, + } + if src != nil { + s.rnd = rand.New(src) + } + copy(s.mu, mu) + + ok = s.chol.Factorize(sigma) + if !ok { + return nil, false + } + s.sigma = *mat.NewSymDense(dim, nil) + s.sigma.CopySym(sigma) + s.chol.LTo(&s.lower) + s.logSqrtDet = 0.5 * s.chol.LogDet() + return s, true +} + +// ConditionStudentsT returns the Student's T distribution that is the receiver +// conditioned on the input evidence, and the success of the operation. +// The returned Student's T has dimension +// n - len(observed), where n is the dimension of the original receiver. +// The dimension order is preserved during conditioning, so if the value +// of dimension 1 is observed, the returned normal represents dimensions {0, 2, ...} +// of the original Student's T distribution. +// +// ok indicates whether there was a failure during the update. If ok is false +// the operation failed and dist is not usable. +// Mathematically this is impossible, but can occur with finite precision arithmetic. +func (s *StudentsT) ConditionStudentsT(observed []int, values []float64, src rand.Source) (dist *StudentsT, ok bool) { + if len(observed) == 0 { + panic("studentst: no observed value") + } + if len(observed) != len(values) { + panic(badInputLength) + } + + for _, v := range observed { + if v < 0 || v >= s.dim { + panic("studentst: observed value out of bounds") + } + } + + newNu, newMean, newSigma := studentsTConditional(observed, values, s.nu, s.mu, &s.sigma) + if newMean == nil { + return nil, false + } + + return NewStudentsT(newMean, newSigma, newNu, src) + +} + +// studentsTConditional updates a Student's T distribution based on the observed samples +// (see documentation for the public function). The Gaussian conditional update +// is treated as a special case when nu == math.Inf(1). +func studentsTConditional(observed []int, values []float64, nu float64, mu []float64, sigma mat.Symmetric) (newNu float64, newMean []float64, newSigma *mat.SymDense) { + dim := len(mu) + ob := len(observed) + + unobserved := findUnob(observed, dim) + + unob := len(unobserved) + if unob == 0 { + panic("stat: all dimensions observed") + } + + mu1 := make([]float64, unob) + for i, v := range unobserved { + mu1[i] = mu[v] + } + mu2 := make([]float64, ob) // really v - mu2 + for i, v := range observed { + mu2[i] = values[i] - mu[v] + } + + var sigma11, sigma22 mat.SymDense + sigma11.SubsetSym(sigma, unobserved) + sigma22.SubsetSym(sigma, observed) + + sigma21 := mat.NewDense(ob, unob, nil) + for i, r := range observed { + for j, c := range unobserved { + v := sigma.At(r, c) + sigma21.Set(i, j, v) + } + } + + var chol mat.Cholesky + ok := chol.Factorize(&sigma22) + if !ok { + return math.NaN(), nil, nil + } + + // Compute mu_1 + sigma_{2,1}^T * sigma_{2,2}^-1 (v - mu_2). + v := mat.NewVecDense(ob, mu2) + var tmp, tmp2 mat.VecDense + err := chol.SolveVecTo(&tmp, v) + if err != nil { + return math.NaN(), nil, nil + } + tmp2.MulVec(sigma21.T(), &tmp) + + for i := range mu1 { + mu1[i] += tmp2.At(i, 0) + } + + // Compute tmp4 = sigma_{2,1}^T * sigma_{2,2}^-1 * sigma_{2,1}. + // TODO(btracey): Should this be a method of SymDense? + var tmp3, tmp4 mat.Dense + err = chol.SolveTo(&tmp3, sigma21) + if err != nil { + return math.NaN(), nil, nil + } + tmp4.Mul(sigma21.T(), &tmp3) + + // Compute sigma_{1,1} - tmp4 + // TODO(btracey): If tmp4 can constructed with a method, then this can be + // replaced with SubSym. + for i := 0; i < len(unobserved); i++ { + for j := i; j < len(unobserved); j++ { + v := sigma11.At(i, j) + sigma11.SetSym(i, j, v-tmp4.At(i, j)) + } + } + + // The computed variables are accurate for a Normal. + if math.IsInf(nu, 1) { + return nu, mu1, &sigma11 + } + + // Compute beta = (v - mu_2)^T * sigma_{2,2}^-1 * (v - mu_2)^T + beta := mat.Dot(v, &tmp) + + // Scale the covariance matrix + sigma11.ScaleSym((nu+beta)/(nu+float64(ob)), &sigma11) + + return nu + float64(ob), mu1, &sigma11 +} + +// findUnob returns the unobserved variables (the complementary set to observed). +// findUnob panics if any value repeated in observed. +func findUnob(observed []int, dim int) (unobserved []int) { + var setOb intsets.Sparse + for _, v := range observed { + setOb.Insert(v) + } + var setAll intsets.Sparse + for i := 0; i < dim; i++ { + setAll.Insert(i) + } + var setUnob intsets.Sparse + setUnob.Difference(&setAll, &setOb) + unobserved = setUnob.AppendTo(nil) + sort.Ints(unobserved) + return unobserved +} + +// CovarianceMatrix calculates the covariance matrix of the distribution, +// storing the result in dst. Upon return, the value at element {i, j} of the +// covariance matrix is equal to the covariance of the i^th and j^th variables. +// covariance(i, j) = E[(x_i - E[x_i])(x_j - E[x_j])] +// If the dst matrix is zero-sized it will be resized to the correct dimensions, +// otherwise dst must match the dimension of the receiver or CovarianceMatrix +// will panic. +func (st *StudentsT) CovarianceMatrix(dst *mat.SymDense) { + if dst.IsZero() { + *dst = *(dst.GrowSym(st.dim).(*mat.SymDense)) + } else if dst.Symmetric() != st.dim { + panic("studentst: input matrix size mismatch") + } + dst.CopySym(&st.sigma) + dst.ScaleSym(st.nu/(st.nu-2), dst) +} + +// Dim returns the dimension of the distribution. +func (s *StudentsT) Dim() int { + return s.dim +} + +// LogProb computes the log of the pdf of the point x. +func (s *StudentsT) LogProb(y []float64) float64 { + if len(y) != s.dim { + panic(badInputLength) + } + + nu := s.nu + n := float64(s.dim) + lg1, _ := math.Lgamma((nu + n) / 2) + lg2, _ := math.Lgamma(nu / 2) + + t1 := lg1 - lg2 - n/2*math.Log(nu*math.Pi) - s.logSqrtDet + + mahal := stat.Mahalanobis(mat.NewVecDense(len(y), y), mat.NewVecDense(len(s.mu), s.mu), &s.chol) + mahal *= mahal + return t1 - ((nu+n)/2)*math.Log(1+mahal/nu) +} + +// MarginalStudentsT returns the marginal distribution of the given input variables, +// and the success of the operation. +// That is, MarginalStudentsT returns +// p(x_i) = \int_{x_o} p(x_i | x_o) p(x_o) dx_o +// where x_i are the dimensions in the input, and x_o are the remaining dimensions. +// See https://en.wikipedia.org/wiki/Marginal_distribution for more information. +// +// The input src is passed to the created StudentsT. +// +// ok indicates whether there was a failure during the marginalization. If ok is false +// the operation failed and dist is not usable. +// Mathematically this is impossible, but can occur with finite precision arithmetic. +func (s *StudentsT) MarginalStudentsT(vars []int, src rand.Source) (dist *StudentsT, ok bool) { + newMean := make([]float64, len(vars)) + for i, v := range vars { + newMean[i] = s.mu[v] + } + var newSigma mat.SymDense + newSigma.SubsetSym(&s.sigma, vars) + return NewStudentsT(newMean, &newSigma, s.nu, src) +} + +// MarginalStudentsTSingle returns the marginal distribution of the given input variable. +// That is, MarginalStudentsTSingle returns +// p(x_i) = \int_{x_o} p(x_i | x_o) p(x_o) dx_o +// where i is the input index, and x_o are the remaining dimensions. +// See https://en.wikipedia.org/wiki/Marginal_distribution for more information. +// +// The input src is passed to the call to NewStudentsT. +func (s *StudentsT) MarginalStudentsTSingle(i int, src rand.Source) distuv.StudentsT { + return distuv.StudentsT{ + Mu: s.mu[i], + Sigma: math.Sqrt(s.sigma.At(i, i)), + Nu: s.nu, + Src: src, + } +} + +// TODO(btracey): Implement marginal single. Need to modify univariate StudentsT +// to be three-parameter. + +// Mean returns the mean of the probability distribution at x. If the +// input argument is nil, a new slice will be allocated, otherwise the result +// will be put in-place into the receiver. +func (s *StudentsT) Mean(x []float64) []float64 { + x = reuseAs(x, s.dim) + copy(x, s.mu) + return x +} + +// Nu returns the degrees of freedom parameter of the distribution. +func (s *StudentsT) Nu() float64 { + return s.nu +} + +// Prob computes the value of the probability density function at x. +func (s *StudentsT) Prob(y []float64) float64 { + return math.Exp(s.LogProb(y)) +} + +// Rand generates a random number according to the distributon. +// If the input slice is nil, new memory is allocated, otherwise the result is stored +// in place. +func (s *StudentsT) Rand(x []float64) []float64 { + // If Y is distributed according to N(0,Sigma), and U is chi^2 with + // parameter ν, then + // X = mu + Y * sqrt(nu / U) + // X is distributed according to this distribution. + + // Generate Y. + x = reuseAs(x, s.dim) + tmp := make([]float64, s.dim) + if s.rnd == nil { + for i := range x { + tmp[i] = rand.NormFloat64() + } + } else { + for i := range x { + tmp[i] = s.rnd.NormFloat64() + } + } + xVec := mat.NewVecDense(s.dim, x) + tmpVec := mat.NewVecDense(s.dim, tmp) + xVec.MulVec(&s.lower, tmpVec) + + u := distuv.ChiSquared{K: s.nu, Src: s.src}.Rand() + floats.Scale(math.Sqrt(s.nu/u), x) + + floats.Add(x, s.mu) + return x +} diff --git a/vendor/gonum.org/v1/gonum/stat/distmv/uniform.go b/vendor/gonum.org/v1/gonum/stat/distmv/uniform.go new file mode 100644 index 0000000..7bf0d45 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/stat/distmv/uniform.go @@ -0,0 +1,198 @@ +// Copyright ©2015 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package distmv + +import ( + "math" + + "golang.org/x/exp/rand" + "gonum.org/v1/gonum/bound" +) + +// Uniform represents a multivariate uniform distribution. +type Uniform struct { + bounds []bound.Bound + dim int + rnd *rand.Rand +} + +// NewUniform creates a new uniform distribution with the given bounds. +func NewUniform(bnds []bound.Bound, src rand.Source) *Uniform { + dim := len(bnds) + if dim == 0 { + panic(badZeroDimension) + } + for _, b := range bnds { + if b.Max < b.Min { + panic("uniform: maximum less than minimum") + } + } + u := &Uniform{ + bounds: make([]bound.Bound, dim), + dim: dim, + } + if src != nil { + u.rnd = rand.New(src) + } + for i, b := range bnds { + u.bounds[i].Min = b.Min + u.bounds[i].Max = b.Max + } + return u +} + +// NewUnitUniform creates a new Uniform distribution over the dim-dimensional +// unit hypercube. That is, a uniform distribution where each dimension has +// Min = 0 and Max = 1. +func NewUnitUniform(dim int, src rand.Source) *Uniform { + if dim <= 0 { + panic(nonPosDimension) + } + bounds := make([]bound.Bound, dim) + for i := range bounds { + bounds[i].Min = 0 + bounds[i].Max = 1 + } + u := Uniform{ + bounds: bounds, + dim: dim, + } + if src != nil { + u.rnd = rand.New(src) + } + return &u +} + +// Bounds returns the bounds on the variables of the distribution. If the input +// is nil, a new slice is allocated and returned. If the input is non-nil, then +// the bounds are stored in-place into the input argument, and Bounds will panic +// if len(bounds) != u.Dim(). +func (u *Uniform) Bounds(bounds []bound.Bound) []bound.Bound { + if bounds == nil { + bounds = make([]bound.Bound, u.Dim()) + } + if len(bounds) != u.Dim() { + panic(badInputLength) + } + copy(bounds, u.bounds) + return bounds +} + +// CDF returns the multidimensional cumulative distribution function of the +// probability distribution at the point x. If p is non-nil, the CDF is stored +// in-place into the first argument, otherwise a new slice is allocated and +// returned. +// +// CDF will panic if len(x) is not equal to the dimension of the distribution, +// or if p is non-nil and len(p) is not equal to the dimension of the distribution. +func (u *Uniform) CDF(p, x []float64) []float64 { + if len(x) != u.dim { + panic(badSizeMismatch) + } + if p == nil { + p = make([]float64, u.dim) + } + if len(p) != u.dim { + panic(badSizeMismatch) + } + for i, v := range x { + if v < u.bounds[i].Min { + p[i] = 0 + } else if v > u.bounds[i].Max { + p[i] = 1 + } else { + p[i] = (v - u.bounds[i].Min) / (u.bounds[i].Max - u.bounds[i].Min) + } + } + return p +} + +// Dim returns the dimension of the distribution. +func (u *Uniform) Dim() int { + return u.dim +} + +// Entropy returns the differential entropy of the distribution. +func (u *Uniform) Entropy() float64 { + // Entropy is log of the volume. + var logVol float64 + for _, b := range u.bounds { + logVol += math.Log(b.Max - b.Min) + } + return logVol +} + +// LogProb computes the log of the pdf of the point x. +func (u *Uniform) LogProb(x []float64) float64 { + dim := u.dim + if len(x) != dim { + panic(badSizeMismatch) + } + var logprob float64 + for i, b := range u.bounds { + if x[i] < b.Min || x[i] > b.Max { + return math.Inf(-1) + } + logprob -= math.Log(b.Max - b.Min) + } + return logprob +} + +// Mean returns the mean of the probability distribution at x. If the +// input argument is nil, a new slice will be allocated, otherwise the result +// will be put in-place into the receiver. +func (u *Uniform) Mean(x []float64) []float64 { + x = reuseAs(x, u.dim) + for i, b := range u.bounds { + x[i] = (b.Max + b.Min) / 2 + } + return x +} + +// Prob computes the value of the probability density function at x. +func (u *Uniform) Prob(x []float64) float64 { + return math.Exp(u.LogProb(x)) +} + +// Rand generates a random number according to the distributon. +// If the input slice is nil, new memory is allocated, otherwise the result is stored +// in place. +func (u *Uniform) Rand(x []float64) []float64 { + x = reuseAs(x, u.dim) + if u.rnd == nil { + for i, b := range u.bounds { + x[i] = rand.Float64()*(b.Max-b.Min) + b.Min + } + return x + } + for i, b := range u.bounds { + x[i] = u.rnd.Float64()*(b.Max-b.Min) + b.Min + } + return x +} + +// Quantile returns the multi-dimensional inverse cumulative distribution function. +// len(x) must equal len(p), and if x is non-nil, len(x) must also equal len(p). +// If x is nil, a new slice will be allocated and returned, otherwise the quantile +// will be stored in-place into x. All of the values of p must be between 0 and 1, +// or Quantile will panic. +func (u *Uniform) Quantile(x, p []float64) []float64 { + if len(p) != u.dim { + panic(badSizeMismatch) + } + if x == nil { + x = make([]float64, u.dim) + } + if len(x) != u.dim { + panic(badSizeMismatch) + } + for i, v := range p { + if v < 0 || v > 1 { + panic(badQuantile) + } + x[i] = v*(u.bounds[i].Max-u.bounds[i].Min) + u.bounds[i].Min + } + return x +} diff --git a/vendor/gonum.org/v1/gonum/stat/distuv/bernoulli.go b/vendor/gonum.org/v1/gonum/stat/distuv/bernoulli.go new file mode 100644 index 0000000..c0dd7ec --- /dev/null +++ b/vendor/gonum.org/v1/gonum/stat/distuv/bernoulli.go @@ -0,0 +1,132 @@ +// Copyright ©2016 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package distuv + +import ( + "math" + + "golang.org/x/exp/rand" +) + +// Bernoulli represents a random variable whose value is 1 with probability p and +// value of zero with probability 1-P. The value of P must be between 0 and 1. +// More information at https://en.wikipedia.org/wiki/Bernoulli_distribution. +type Bernoulli struct { + P float64 + Src rand.Source +} + +// CDF computes the value of the cumulative density function at x. +func (b Bernoulli) CDF(x float64) float64 { + if x < 0 { + return 0 + } + if x < 1 { + return 1 - b.P + } + return 1 +} + +// Entropy returns the entropy of the distribution. +func (b Bernoulli) Entropy() float64 { + if b.P == 0 { + return 0 + } + if b.P == 1 { + return 1 + } + q := 1 - b.P + return -b.P*math.Log(b.P) - q*math.Log(q) +} + +// ExKurtosis returns the excess kurtosis of the distribution. +func (b Bernoulli) ExKurtosis() float64 { + pq := b.P * (1 - b.P) + return (1 - 6*pq) / pq +} + +// LogProb computes the natural logarithm of the value of the probability density function at x. +func (b Bernoulli) LogProb(x float64) float64 { + if x == 0 { + return math.Log(1 - b.P) + } + if x == 1 { + return math.Log(b.P) + } + return math.Inf(-1) +} + +// Mean returns the mean of the probability distribution. +func (b Bernoulli) Mean() float64 { + return b.P +} + +// Median returns the median of the probability distribution. +func (b Bernoulli) Median() float64 { + p := b.P + switch { + case p < 0.5: + return 0 + case p > 0.5: + return 1 + default: + return 0.5 + } +} + +// NumParameters returns the number of parameters in the distribution. +func (Bernoulli) NumParameters() int { + return 1 +} + +// Prob computes the value of the probability distribution at x. +func (b Bernoulli) Prob(x float64) float64 { + return math.Exp(b.LogProb(x)) +} + +// Quantile returns the inverse of the cumulative probability distribution. +func (b Bernoulli) Quantile(p float64) float64 { + if p < 0 || 1 < p { + panic(badPercentile) + } + if p < 1-b.P { + return 0 + } + return 1 +} + +// Rand returns a random sample drawn from the distribution. +func (b Bernoulli) Rand() float64 { + var rnd float64 + if b.Src == nil { + rnd = rand.Float64() + } else { + rnd = rand.New(b.Src).Float64() + } + if rnd < b.P { + return 1 + } + return 0 +} + +// Skewness returns the skewness of the distribution. +func (b Bernoulli) Skewness() float64 { + return (1 - 2*b.P) / math.Sqrt(b.P*(1-b.P)) +} + +// StdDev returns the standard deviation of the probability distribution. +func (b Bernoulli) StdDev() float64 { + return math.Sqrt(b.Variance()) +} + +// Survival returns the survival function (complementary CDF) at x. +func (b Bernoulli) Survival(x float64) float64 { + return 1 - b.CDF(x) +} + +// Variance returns the variance of the probability distribution. +func (b Bernoulli) Variance() float64 { + return b.P * (1 - b.P) +} diff --git a/vendor/gonum.org/v1/gonum/stat/distuv/beta.go b/vendor/gonum.org/v1/gonum/stat/distuv/beta.go new file mode 100644 index 0000000..0de01bc --- /dev/null +++ b/vendor/gonum.org/v1/gonum/stat/distuv/beta.go @@ -0,0 +1,136 @@ +// Copyright ©2016 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package distuv + +import ( + "math" + + "golang.org/x/exp/rand" + + "gonum.org/v1/gonum/mathext" +) + +// Beta implements the Beta distribution, a two-parameter continuous distribution +// with support between 0 and 1. +// +// The beta distribution has density function +// x^(α-1) * (1-x)^(β-1) * Γ(α+β) / (Γ(α)*Γ(β)) +// +// For more information, see https://en.wikipedia.org/wiki/Beta_distribution +type Beta struct { + // Alpha is the left shape parameter of the distribution. Alpha must be greater + // than 0. + Alpha float64 + // Beta is the right shape parameter of the distribution. Beta must be greater + // than 0. + Beta float64 + + Src rand.Source +} + +// CDF computes the value of the cumulative distribution function at x. +func (b Beta) CDF(x float64) float64 { + if x <= 0 { + return 0 + } + if x >= 1 { + return 1 + } + return mathext.RegIncBeta(b.Alpha, b.Beta, x) +} + +// Entropy returns the differential entropy of the distribution. +func (b Beta) Entropy() float64 { + if b.Alpha <= 0 || b.Beta <= 0 { + panic("beta: negative parameters") + } + return mathext.Lbeta(b.Alpha, b.Beta) - (b.Alpha-1)*mathext.Digamma(b.Alpha) - + (b.Beta-1)*mathext.Digamma(b.Beta) + (b.Alpha+b.Beta-2)*mathext.Digamma(b.Alpha+b.Beta) +} + +// ExKurtosis returns the excess kurtosis of the distribution. +func (b Beta) ExKurtosis() float64 { + num := 6 * ((b.Alpha-b.Beta)*(b.Alpha-b.Beta)*(b.Alpha+b.Beta+1) - b.Alpha*b.Beta*(b.Alpha+b.Beta+2)) + den := b.Alpha * b.Beta * (b.Alpha + b.Beta + 2) * (b.Alpha + b.Beta + 3) + return num / den +} + +// LogProb computes the natural logarithm of the value of the probability +// density function at x. +func (b Beta) LogProb(x float64) float64 { + if x < 0 || x > 1 { + return math.Inf(-1) + } + + if b.Alpha <= 0 || b.Beta <= 0 { + panic("beta: negative parameters") + } + + lab, _ := math.Lgamma(b.Alpha + b.Beta) + la, _ := math.Lgamma(b.Alpha) + lb, _ := math.Lgamma(b.Beta) + return lab - la - lb + (b.Alpha-1)*math.Log(x) + (b.Beta-1)*math.Log(1-x) +} + +// Mean returns the mean of the probability distribution. +func (b Beta) Mean() float64 { + return b.Alpha / (b.Alpha + b.Beta) +} + +// Mode returns the mode of the distribution. +// +// Mode returns NaN if either parameter is less than or equal to 1 as a special case. +func (b Beta) Mode() float64 { + if b.Alpha <= 1 || b.Beta <= 1 { + return math.NaN() + } + return (b.Alpha - 1) / (b.Alpha + b.Beta - 2) +} + +// NumParameters returns the number of parameters in the distribution. +func (b Beta) NumParameters() int { + return 2 +} + +// Prob computes the value of the probability density function at x. +func (b Beta) Prob(x float64) float64 { + return math.Exp(b.LogProb(x)) +} + +// Quantile returns the inverse of the cumulative distribution function. +func (b Beta) Quantile(p float64) float64 { + if p < 0 || p > 1 { + panic(badPercentile) + } + return mathext.InvRegIncBeta(b.Alpha, b.Beta, p) +} + +// Rand returns a random sample drawn from the distribution. +func (b Beta) Rand() float64 { + ga := Gamma{Alpha: b.Alpha, Beta: 1, Src: b.Src}.Rand() + gb := Gamma{Alpha: b.Beta, Beta: 1, Src: b.Src}.Rand() + return ga / (ga + gb) +} + +// StdDev returns the standard deviation of the probability distribution. +func (b Beta) StdDev() float64 { + return math.Sqrt(b.Variance()) +} + +// Survival returns the survival function (complementary CDF) at x. +func (b Beta) Survival(x float64) float64 { + switch { + case x <= 0: + return 1 + case x >= 1: + return 0 + } + return mathext.RegIncBeta(b.Beta, b.Alpha, 1-x) +} + +// Variance returns the variance of the probability distribution. +func (b Beta) Variance() float64 { + return b.Alpha * b.Beta / ((b.Alpha + b.Beta) * (b.Alpha + b.Beta) * (b.Alpha + b.Beta + 1)) +} diff --git a/vendor/gonum.org/v1/gonum/stat/distuv/binomial.go b/vendor/gonum.org/v1/gonum/stat/distuv/binomial.go new file mode 100644 index 0000000..4fb7072 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/stat/distuv/binomial.go @@ -0,0 +1,188 @@ +// Copyright ©2018 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package distuv + +import ( + "math" + + "golang.org/x/exp/rand" + + "gonum.org/v1/gonum/mathext" + "gonum.org/v1/gonum/stat/combin" +) + +// Binomial implements the binomial distribution, a discrete probability distribution +// that expresses the probability of a given number of successful Bernoulli trials +// out of a total of n, each with success probability p. +// The binomial distribution has the density function: +// f(k) = (n choose k) p^k (1-p)^(n-k) +// For more information, see https://en.wikipedia.org/wiki/Binomial_distribution. +type Binomial struct { + // N is the total number of Bernoulli trials. N must be greater than 0. + N float64 + // P is the probablity of success in any given trial. P must be in [0, 1]. + P float64 + + Src rand.Source +} + +// CDF computes the value of the cumulative distribution function at x. +func (b Binomial) CDF(x float64) float64 { + if x < 0 { + return 0 + } + if x >= b.N { + return 1 + } + x = math.Floor(x) + return mathext.RegIncBeta(b.N-x, x+1, 1-b.P) +} + +// ExKurtosis returns the excess kurtosis of the distribution. +func (b Binomial) ExKurtosis() float64 { + v := b.P * (1 - b.P) + return (1 - 6*v) / (b.N * v) +} + +// LogProb computes the natural logarithm of the value of the probability +// density function at x. +func (b Binomial) LogProb(x float64) float64 { + if x < 0 || x > b.N || math.Floor(x) != x { + return math.Inf(-1) + } + lb := combin.LogGeneralizedBinomial(b.N, x) + return lb + x*math.Log(b.P) + (b.N-x)*math.Log(1-b.P) +} + +// Mean returns the mean of the probability distribution. +func (b Binomial) Mean() float64 { + return b.N * b.P +} + +// NumParameters returns the number of parameters in the distribution. +func (Binomial) NumParameters() int { + return 2 +} + +// Prob computes the value of the probability density function at x. +func (b Binomial) Prob(x float64) float64 { + return math.Exp(b.LogProb(x)) +} + +// Rand returns a random sample drawn from the distribution. +func (b Binomial) Rand() float64 { + // NUMERICAL RECIPES IN C: THE ART OF SCIENTIFIC COMPUTING (ISBN 0-521-43108-5) + // p. 295-6 + // http://www.aip.de/groups/soe/local/numres/bookcpdf/c7-3.pdf + + runif := rand.Float64 + rexp := rand.ExpFloat64 + if b.Src != nil { + rnd := rand.New(b.Src) + runif = rnd.Float64 + rexp = rnd.ExpFloat64 + } + + p := b.P + if p > 0.5 { + p = 1 - p + } + am := b.N * p + + if b.N < 25 { + // Use direct method. + bnl := 0.0 + for i := 0; i < int(b.N); i++ { + if runif() < p { + bnl++ + } + } + if p != b.P { + return b.N - bnl + } + return bnl + } + + if am < 1 { + // Use rejection method with Poisson proposal. + const logM = 2.6e-2 // constant for rejection sampling (https://en.wikipedia.org/wiki/Rejection_sampling) + var bnl float64 + z := -p + pclog := (1 + 0.5*z) * z / (1 + (1+1.0/6*z)*z) // Padé approximant of log(1 + x) + for { + bnl = 0.0 + t := 0.0 + for i := 0; i < int(b.N); i++ { + t += rexp() + if t >= am { + break + } + bnl++ + } + bnlc := b.N - bnl + z = -bnl / b.N + log1p := (1 + 0.5*z) * z / (1 + (1+1.0/6*z)*z) + t = (bnlc+0.5)*log1p + bnl - bnlc*pclog + 1/(12*bnlc) - am + logM // Uses Stirling's expansion of log(n!) + if rexp() >= t { + break + } + } + if p != b.P { + return b.N - bnl + } + return bnl + } + // Original algorithm samples from a Poisson distribution with the + // appropriate expected value. However, the Poisson approximation is + // asymptotic such that the absolute deviation in probability is O(1/n). + // Rejection sampling produces exact variates with at worst less than 3% + // rejection with miminal additional computation. + + // Use rejection method with Cauchy proposal. + g, _ := math.Lgamma(b.N + 1) + plog := math.Log(p) + pclog := math.Log1p(-p) + sq := math.Sqrt(2 * am * (1 - p)) + for { + var em, y float64 + for { + y = math.Tan(math.Pi * runif()) + em = sq*y + am + if em >= 0 && em < b.N+1 { + break + } + } + em = math.Floor(em) + lg1, _ := math.Lgamma(em + 1) + lg2, _ := math.Lgamma(b.N - em + 1) + t := 1.2 * sq * (1 + y*y) * math.Exp(g-lg1-lg2+em*plog+(b.N-em)*pclog) + if runif() <= t { + if p != b.P { + return b.N - em + } + return em + } + } +} + +// Skewness returns the skewness of the distribution. +func (b Binomial) Skewness() float64 { + return (1 - 2*b.P) / b.StdDev() +} + +// StdDev returns the standard deviation of the probability distribution. +func (b Binomial) StdDev() float64 { + return math.Sqrt(b.Variance()) +} + +// Survival returns the survival function (complementary CDF) at x. +func (b Binomial) Survival(x float64) float64 { + return 1 - b.CDF(x) +} + +// Variance returns the variance of the probability distribution. +func (b Binomial) Variance() float64 { + return b.N * b.P * (1 - b.P) +} diff --git a/vendor/gonum.org/v1/gonum/stat/distuv/categorical.go b/vendor/gonum.org/v1/gonum/stat/distuv/categorical.go new file mode 100644 index 0000000..13a26b8 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/stat/distuv/categorical.go @@ -0,0 +1,185 @@ +// Copyright ©2015 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package distuv + +import ( + "math" + + "golang.org/x/exp/rand" +) + +// Categorical is an extension of the Bernoulli distribution where x takes +// values {0, 1, ..., len(w)-1} where w is the weight vector. Categorical must +// be initialized with NewCategorical. +type Categorical struct { + weights []float64 + + // heap is a weight heap. + // + // It keeps a heap-organised sum of remaining + // index weights that are available to be taken + // from. + // + // Each element holds the sum of weights for + // the corresponding index, plus the sum of + // its children's weights; the children of + // an element i can be found at positions + // 2*(i+1)-1 and 2*(i+1). The root of the + // weight heap is at element 0. + // + // See comments in container/heap for an + // explanation of the layout of a heap. + heap []float64 + + src rand.Source +} + +// NewCategorical constructs a new categorical distribution where the probability +// that x equals i is proportional to w[i]. All of the weights must be +// nonnegative, and at least one of the weights must be positive. +func NewCategorical(w []float64, src rand.Source) Categorical { + c := Categorical{ + weights: make([]float64, len(w)), + heap: make([]float64, len(w)), + src: src, + } + c.ReweightAll(w) + return c +} + +// CDF computes the value of the cumulative density function at x. +func (c Categorical) CDF(x float64) float64 { + var cdf float64 + for i, w := range c.weights { + if x < float64(i) { + break + } + cdf += w + } + return cdf / c.heap[0] +} + +// Entropy returns the entropy of the distribution. +func (c Categorical) Entropy() float64 { + var ent float64 + for _, w := range c.weights { + if w == 0 { + continue + } + p := w / c.heap[0] + ent += p * math.Log(p) + } + return -ent +} + +// Len returns the number of values x could possibly take (the length of the +// initial supplied weight vector). +func (c Categorical) Len() int { + return len(c.weights) +} + +// Mean returns the mean of the probability distribution. +func (c Categorical) Mean() float64 { + var mean float64 + for i, v := range c.weights { + mean += float64(i) * v + } + return mean / c.heap[0] +} + +// Prob computes the value of the probability density function at x. +func (c Categorical) Prob(x float64) float64 { + xi := int(x) + if float64(xi) != x { + return 0 + } + if xi < 0 || xi > len(c.weights)-1 { + return 0 + } + return c.weights[xi] / c.heap[0] +} + +// LogProb computes the natural logarithm of the value of the probability density function at x. +func (c Categorical) LogProb(x float64) float64 { + return math.Log(c.Prob(x)) +} + +// Rand returns a random draw from the categorical distribution. +func (c Categorical) Rand() float64 { + var r float64 + if c.src == nil { + r = c.heap[0] * rand.Float64() + } else { + r = c.heap[0] * rand.New(c.src).Float64() + } + i := 1 + last := -1 + left := len(c.weights) + for { + if r -= c.weights[i-1]; r <= 0 { + break // Fall within item i-1. + } + i <<= 1 // Move to left child. + if d := c.heap[i-1]; r > d { + r -= d + // If enough r to pass left child, + // move to right child state will + // be caught at break above. + i++ + } + if i == last || left < 0 { + panic("categorical: bad sample") + } + last = i + left-- + } + return float64(i - 1) +} + +// Reweight sets the weight of item idx to w. The input weight must be +// non-negative, and after reweighting at least one of the weights must be +// positive. +func (c Categorical) Reweight(idx int, w float64) { + if w < 0 { + panic("categorical: negative weight") + } + w, c.weights[idx] = c.weights[idx]-w, w + idx++ + for idx > 0 { + c.heap[idx-1] -= w + idx >>= 1 + } + if c.heap[0] <= 0 { + panic("categorical: sum of the weights non-positive") + } +} + +// ReweightAll resets the weights of the distribution. ReweightAll panics if +// len(w) != c.Len. All of the weights must be nonnegative, and at least one of +// the weights must be positive. +func (c Categorical) ReweightAll(w []float64) { + if len(w) != c.Len() { + panic("categorical: length of the slices do not match") + } + for _, v := range w { + if v < 0 { + panic("categorical: negative weight") + } + } + copy(c.weights, w) + c.reset() +} + +func (c Categorical) reset() { + copy(c.heap, c.weights) + for i := len(c.heap) - 1; i > 0; i-- { + // Sometimes 1-based counting makes sense. + c.heap[((i+1)>>1)-1] += c.heap[i] + } + // TODO(btracey): Renormalization for weird weights? + if c.heap[0] <= 0 { + panic("categorical: sum of the weights non-positive") + } +} diff --git a/vendor/gonum.org/v1/gonum/stat/distuv/chisquared.go b/vendor/gonum.org/v1/gonum/stat/distuv/chisquared.go new file mode 100644 index 0000000..868a5fb --- /dev/null +++ b/vendor/gonum.org/v1/gonum/stat/distuv/chisquared.go @@ -0,0 +1,100 @@ +// Copyright ©2016 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package distuv + +import ( + "math" + + "golang.org/x/exp/rand" + + "gonum.org/v1/gonum/mathext" +) + +// ChiSquared implements the χ² distribution, a one parameter distribution +// with support on the positive numbers. +// +// The density function is given by +// 1/(2^{k/2} * Γ(k/2)) * x^{k/2 - 1} * e^{-x/2} +// It is a special case of the Gamma distribution, Γ(k/2, 1/2). +// +// For more information, see https://en.wikipedia.org/wiki/Chi-squared_distribution. +type ChiSquared struct { + // K is the shape parameter, corresponding to the degrees of freedom. Must + // be greater than 0. + K float64 + + Src rand.Source +} + +// CDF computes the value of the cumulative density function at x. +func (c ChiSquared) CDF(x float64) float64 { + return mathext.GammaIncReg(c.K/2, x/2) +} + +// ExKurtosis returns the excess kurtosis of the distribution. +func (c ChiSquared) ExKurtosis() float64 { + return 12 / c.K +} + +// LogProb computes the natural logarithm of the value of the probability +// density function at x. +func (c ChiSquared) LogProb(x float64) float64 { + if x < 0 { + return math.Inf(-1) + } + lg, _ := math.Lgamma(c.K / 2) + return (c.K/2-1)*math.Log(x) - x/2 - (c.K/2)*math.Ln2 - lg +} + +// Mean returns the mean of the probability distribution. +func (c ChiSquared) Mean() float64 { + return c.K +} + +// Mode returns the mode of the distribution. +func (c ChiSquared) Mode() float64 { + return math.Min(c.K-2, 0) +} + +// NumParameters returns the number of parameters in the distribution. +func (c ChiSquared) NumParameters() int { + return 1 +} + +// Prob computes the value of the probability density function at x. +func (c ChiSquared) Prob(x float64) float64 { + return math.Exp(c.LogProb(x)) +} + +// Rand returns a random sample drawn from the distribution. +func (c ChiSquared) Rand() float64 { + return Gamma{c.K / 2, 0.5, c.Src}.Rand() +} + +// Quantile returns the inverse of the cumulative distribution function. +func (c ChiSquared) Quantile(p float64) float64 { + if p < 0 || p > 1 { + panic(badPercentile) + } + return mathext.GammaIncRegInv(0.5*c.K, p) * 2 +} + +// StdDev returns the standard deviation of the probability distribution. +func (c ChiSquared) StdDev() float64 { + return math.Sqrt(c.Variance()) +} + +// Survival returns the survival function (complementary CDF) at x. +func (c ChiSquared) Survival(x float64) float64 { + if x < 0 { + return 1 + } + return mathext.GammaIncRegComp(0.5*c.K, 0.5*x) +} + +// Variance returns the variance of the probability distribution. +func (c ChiSquared) Variance() float64 { + return 2 * c.K +} diff --git a/vendor/gonum.org/v1/gonum/stat/distuv/constants.go b/vendor/gonum.org/v1/gonum/stat/distuv/constants.go new file mode 100644 index 0000000..374d199 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/stat/distuv/constants.go @@ -0,0 +1,24 @@ +// Copyright ©2014 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package distuv + +const ( + // oneOverRoot2Pi is the value of 1/(2Pi)^(1/2) + // http://www.wolframalpha.com/input/?i=1%2F%282+*+pi%29%5E%281%2F2%29 + oneOverRoot2Pi = 0.39894228040143267793994605993438186847585863116493465766592582967065792589930183850125233390730693643030255886263518268 + + //LogRoot2Pi is the value of log(sqrt(2*Pi)) + logRoot2Pi = 0.91893853320467274178032973640561763986139747363778341281715154048276569592726039769474329863595419762200564662463433744 + negLogRoot2Pi = -logRoot2Pi + log2Pi = 1.8378770664093454835606594728112352797227949472755668 + ln2 = 0.69314718055994530941723212145817656807550013436025525412068000949339362196969471560586332699641868754200148102057068573368552023 + + // Euler–Mascheroni constant. + eulerGamma = 0.5772156649015328606065120900824024310421593359399235988057672348848677267776646709369470632917467495146314472498070824809605 +) + +const ( + panicNameMismatch = "parameter name mismatch" +) diff --git a/vendor/gonum.org/v1/gonum/stat/distuv/doc.go b/vendor/gonum.org/v1/gonum/stat/distuv/doc.go new file mode 100644 index 0000000..68aba2d --- /dev/null +++ b/vendor/gonum.org/v1/gonum/stat/distuv/doc.go @@ -0,0 +1,6 @@ +// Copyright ©2017 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Package distuv provides univariate random distribution types. +package distuv // import "gonum.org/v1/gonum/stat/distuv" diff --git a/vendor/gonum.org/v1/gonum/stat/distuv/exponential.go b/vendor/gonum.org/v1/gonum/stat/distuv/exponential.go new file mode 100644 index 0000000..2530e3b --- /dev/null +++ b/vendor/gonum.org/v1/gonum/stat/distuv/exponential.go @@ -0,0 +1,260 @@ +// Copyright ©2014 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package distuv + +import ( + "math" + + "golang.org/x/exp/rand" + + "gonum.org/v1/gonum/floats" + "gonum.org/v1/gonum/stat" +) + +// Exponential represents the exponential distribution (https://en.wikipedia.org/wiki/Exponential_distribution). +type Exponential struct { + Rate float64 + Src rand.Source +} + +// CDF computes the value of the cumulative density function at x. +func (e Exponential) CDF(x float64) float64 { + if x < 0 { + return 0 + } + return 1 - math.Exp(-e.Rate*x) +} + +// ConjugateUpdate updates the parameters of the distribution from the sufficient +// statistics of a set of samples. The sufficient statistics, suffStat, have been +// observed with nSamples observations. The prior values of the distribution are those +// currently in the distribution, and have been observed with priorStrength samples. +// +// For the exponential distribution, the sufficient statistic is the inverse of +// the mean of the samples. +// The prior is having seen priorStrength[0] samples with inverse mean Exponential.Rate +// As a result of this function, Exponential.Rate is updated based on the weighted +// samples, and priorStrength is modified to include the new number of samples observed. +// +// This function panics if len(suffStat) != 1 or len(priorStrength) != 1. +func (e *Exponential) ConjugateUpdate(suffStat []float64, nSamples float64, priorStrength []float64) { + if len(suffStat) != 1 { + panic("exponential: incorrect suffStat length") + } + if len(priorStrength) != 1 { + panic("exponential: incorrect priorStrength length") + } + + totalSamples := nSamples + priorStrength[0] + + totalSum := nSamples / suffStat[0] + if !(priorStrength[0] == 0) { + totalSum += priorStrength[0] / e.Rate + } + e.Rate = totalSamples / totalSum + priorStrength[0] = totalSamples +} + +// Entropy returns the entropy of the distribution. +func (e Exponential) Entropy() float64 { + return 1 - math.Log(e.Rate) +} + +// ExKurtosis returns the excess kurtosis of the distribution. +func (Exponential) ExKurtosis() float64 { + return 6 +} + +// Fit sets the parameters of the probability distribution from the +// data samples x with relative weights w. +// If weights is nil, then all the weights are 1. +// If weights is not nil, then the len(weights) must equal len(samples). +func (e *Exponential) Fit(samples, weights []float64) { + suffStat := make([]float64, e.NumSuffStat()) + nSamples := e.SuffStat(suffStat, samples, weights) + e.ConjugateUpdate(suffStat, nSamples, make([]float64, e.NumSuffStat())) +} + +// LogProb computes the natural logarithm of the value of the probability density function at x. +func (e Exponential) LogProb(x float64) float64 { + if x < 0 { + return math.Inf(-1) + } + return math.Log(e.Rate) - e.Rate*x +} + +// Mean returns the mean of the probability distribution. +func (e Exponential) Mean() float64 { + return 1 / e.Rate +} + +// Median returns the median of the probability distribution. +func (e Exponential) Median() float64 { + return math.Ln2 / e.Rate +} + +// Mode returns the mode of the probability distribution. +func (Exponential) Mode() float64 { + return 0 +} + +// NumParameters returns the number of parameters in the distribution. +func (Exponential) NumParameters() int { + return 1 +} + +// NumSuffStat returns the number of sufficient statistics for the distribution. +func (Exponential) NumSuffStat() int { + return 1 +} + +// Prob computes the value of the probability density function at x. +func (e Exponential) Prob(x float64) float64 { + return math.Exp(e.LogProb(x)) +} + +// Quantile returns the inverse of the cumulative probability distribution. +func (e Exponential) Quantile(p float64) float64 { + if p < 0 || p > 1 { + panic(badPercentile) + } + return -math.Log(1-p) / e.Rate +} + +// Rand returns a random sample drawn from the distribution. +func (e Exponential) Rand() float64 { + var rnd float64 + if e.Src == nil { + rnd = rand.ExpFloat64() + } else { + rnd = rand.New(e.Src).ExpFloat64() + } + return rnd / e.Rate +} + +// Score returns the score function with respect to the parameters of the +// distribution at the input location x. The score function is the derivative +// of the log-likelihood at x with respect to the parameters +// (∂/∂θ) log(p(x;θ)) +// If deriv is non-nil, len(deriv) must equal the number of parameters otherwise +// Score will panic, and the derivative is stored in-place into deriv. If deriv +// is nil a new slice will be allocated and returned. +// +// The order is [∂LogProb / ∂Rate]. +// +// For more information, see https://en.wikipedia.org/wiki/Score_%28statistics%29. +// +// Special cases: +// Score(0) = [NaN] +func (e Exponential) Score(deriv []float64, x float64) []float64 { + if deriv == nil { + deriv = make([]float64, e.NumParameters()) + } + if len(deriv) != e.NumParameters() { + panic(badLength) + } + if x > 0 { + deriv[0] = 1/e.Rate - x + return deriv + } + if x < 0 { + deriv[0] = 0 + return deriv + } + deriv[0] = math.NaN() + return deriv +} + +// ScoreInput returns the score function with respect to the input of the +// distribution at the input location specified by x. The score function is the +// derivative of the log-likelihood +// (d/dx) log(p(x)) . +// Special cases: +// ScoreInput(0) = NaN +func (e Exponential) ScoreInput(x float64) float64 { + if x > 0 { + return -e.Rate + } + if x < 0 { + return 0 + } + return math.NaN() +} + +// Skewness returns the skewness of the distribution. +func (Exponential) Skewness() float64 { + return 2 +} + +// StdDev returns the standard deviation of the probability distribution. +func (e Exponential) StdDev() float64 { + return 1 / e.Rate +} + +// SuffStat computes the sufficient statistics of set of samples to update +// the distribution. The sufficient statistics are stored in place, and the +// effective number of samples are returned. +// +// The exponential distribution has one sufficient statistic, the average rate +// of the samples. +// +// If weights is nil, the weights are assumed to be 1, otherwise panics if +// len(samples) != len(weights). Panics if len(suffStat) != NumSuffStat(). +func (Exponential) SuffStat(suffStat, samples, weights []float64) (nSamples float64) { + if len(weights) != 0 && len(samples) != len(weights) { + panic(badLength) + } + + if len(suffStat) != (Exponential{}).NumSuffStat() { + panic(badSuffStat) + } + + if len(weights) == 0 { + nSamples = float64(len(samples)) + } else { + nSamples = floats.Sum(weights) + } + + mean := stat.Mean(samples, weights) + suffStat[0] = 1 / mean + return nSamples +} + +// Survival returns the survival function (complementary CDF) at x. +func (e Exponential) Survival(x float64) float64 { + if x < 0 { + return 1 + } + return math.Exp(-e.Rate * x) +} + +// setParameters modifies the parameters of the distribution. +func (e *Exponential) setParameters(p []Parameter) { + if len(p) != e.NumParameters() { + panic("exponential: incorrect number of parameters to set") + } + if p[0].Name != "Rate" { + panic("exponential: " + panicNameMismatch) + } + e.Rate = p[0].Value +} + +// Variance returns the variance of the probability distribution. +func (e Exponential) Variance() float64 { + return 1 / (e.Rate * e.Rate) +} + +// parameters returns the parameters of the distribution. +func (e Exponential) parameters(p []Parameter) []Parameter { + nParam := e.NumParameters() + if p == nil { + p = make([]Parameter, nParam) + } else if len(p) != nParam { + panic("exponential: improper parameter length") + } + p[0].Name = "Rate" + p[0].Value = e.Rate + return p +} diff --git a/vendor/gonum.org/v1/gonum/stat/distuv/f.go b/vendor/gonum.org/v1/gonum/stat/distuv/f.go new file mode 100644 index 0000000..b9ae785 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/stat/distuv/f.go @@ -0,0 +1,133 @@ +// Copyright ©2017 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package distuv + +import ( + "math" + + "golang.org/x/exp/rand" + + "gonum.org/v1/gonum/mathext" +) + +// F implements the F-distribution, a two-parameter continuous distribution +// with support over the positive real numbers. +// +// The F-distribution has density function +// sqrt(((d1*x)^d1) * d2^d2 / ((d1*x+d2)^(d1+d2))) / (x * B(d1/2,d2/2)) +// where B is the beta function. +// +// For more information, see https://en.wikipedia.org/wiki/F-distribution +type F struct { + D1 float64 // Degrees of freedom for the numerator + D2 float64 // Degrees of freedom for the denominator + Src rand.Source +} + +// CDF computes the value of the cumulative density function at x. +func (f F) CDF(x float64) float64 { + return mathext.RegIncBeta(f.D1/2, f.D2/2, f.D1*x/(f.D1*x+f.D2)) +} + +// ExKurtosis returns the excess kurtosis of the distribution. +// +// ExKurtosis returns NaN if the D2 parameter is less or equal to 8. +func (f F) ExKurtosis() float64 { + if f.D2 <= 8 { + return math.NaN() + } + return (12 / (f.D2 - 6)) * ((5*f.D2-22)/(f.D2-8) + ((f.D2-4)/f.D1)*((f.D2-2)/(f.D2-8))*((f.D2-2)/(f.D1+f.D2-2))) +} + +// LogProb computes the natural logarithm of the value of the probability +// density function at x. +func (f F) LogProb(x float64) float64 { + return 0.5*(f.D1*math.Log(f.D1*x)+f.D2*math.Log(f.D2)-(f.D1+f.D2)*math.Log(f.D1*x+f.D2)) - math.Log(x) - mathext.Lbeta(f.D1/2, f.D2/2) +} + +// Mean returns the mean of the probability distribution. +// +// Mean returns NaN if the D2 parameter is less than or equal to 2. +func (f F) Mean() float64 { + if f.D2 <= 2 { + return math.NaN() + } + return f.D2 / (f.D2 - 2) +} + +// Mode returns the mode of the distribution. +// +// Mode returns NaN if the D1 parameter is less than or equal to 2. +func (f F) Mode() float64 { + if f.D1 <= 2 { + return math.NaN() + } + return ((f.D1 - 2) / f.D1) * (f.D2 / (f.D2 + 2)) +} + +// NumParameters returns the number of parameters in the distribution. +func (f F) NumParameters() int { + return 2 +} + +// Prob computes the value of the probability density function at x. +func (f F) Prob(x float64) float64 { + return math.Exp(f.LogProb(x)) +} + +// Quantile returns the inverse of the cumulative distribution function. +func (f F) Quantile(p float64) float64 { + if p < 0 || p > 1 { + panic(badPercentile) + } + y := mathext.InvRegIncBeta(0.5*f.D1, 0.5*f.D2, p) + return f.D2 * y / (f.D1 * (1 - y)) +} + +// Rand returns a random sample drawn from the distribution. +func (f F) Rand() float64 { + u1 := ChiSquared{f.D1, f.Src}.Rand() + u2 := ChiSquared{f.D2, f.Src}.Rand() + return (u1 / f.D1) / (u2 / f.D2) +} + +// Skewness returns the skewness of the distribution. +// +// Skewness returns NaN if the D2 parameter is less than or equal to 6. +func (f F) Skewness() float64 { + if f.D2 <= 6 { + return math.NaN() + } + num := (2*f.D1 + f.D2 - 2) * math.Sqrt(8*(f.D2-4)) + den := (f.D2 - 6) * math.Sqrt(f.D1*(f.D1+f.D2-2)) + return num / den +} + +// StdDev returns the standard deviation of the probability distribution. +// +// StdDev returns NaN if the D2 parameter is less than or equal to 4. +func (f F) StdDev() float64 { + if f.D2 <= 4 { + return math.NaN() + } + return math.Sqrt(f.Variance()) +} + +// Survival returns the survival function (complementary CDF) at x. +func (f F) Survival(x float64) float64 { + return 1 - f.CDF(x) +} + +// Variance returns the variance of the probability distribution. +// +// Variance returns NaN if the D2 parameter is less than or equal to 4. +func (f F) Variance() float64 { + if f.D2 <= 4 { + return math.NaN() + } + num := 2 * f.D2 * f.D2 * (f.D1 + f.D2 - 2) + den := f.D1 * (f.D2 - 2) * (f.D2 - 2) * (f.D2 - 4) + return num / den +} diff --git a/vendor/gonum.org/v1/gonum/stat/distuv/gamma.go b/vendor/gonum.org/v1/gonum/stat/distuv/gamma.go new file mode 100644 index 0000000..8aadee7 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/stat/distuv/gamma.go @@ -0,0 +1,246 @@ +// Copyright ©2016 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package distuv + +import ( + "math" + + "golang.org/x/exp/rand" + + "gonum.org/v1/gonum/mathext" +) + +// Gamma implements the Gamma distribution, a two-parameter continuous distribution +// with support over the positive real numbers. +// +// The gamma distribution has density function +// β^α / Γ(α) x^(α-1)e^(-βx) +// +// For more information, see https://en.wikipedia.org/wiki/Gamma_distribution +type Gamma struct { + // Alpha is the shape parameter of the distribution. Alpha must be greater + // than 0. If Alpha == 1, this is equivalent to an exponential distribution. + Alpha float64 + // Beta is the rate parameter of the distribution. Beta must be greater than 0. + // If Beta == 2, this is equivalent to a Chi-Squared distribution. + Beta float64 + + Src rand.Source +} + +// CDF computes the value of the cumulative distribution function at x. +func (g Gamma) CDF(x float64) float64 { + if x < 0 { + return 0 + } + return mathext.GammaIncReg(g.Alpha, g.Beta*x) +} + +// ExKurtosis returns the excess kurtosis of the distribution. +func (g Gamma) ExKurtosis() float64 { + return 6 / g.Alpha +} + +// LogProb computes the natural logarithm of the value of the probability +// density function at x. +func (g Gamma) LogProb(x float64) float64 { + if x <= 0 { + return math.Inf(-1) + } + a := g.Alpha + b := g.Beta + lg, _ := math.Lgamma(a) + return a*math.Log(b) - lg + (a-1)*math.Log(x) - b*x +} + +// Mean returns the mean of the probability distribution. +func (g Gamma) Mean() float64 { + return g.Alpha / g.Beta +} + +// Mode returns the mode of the normal distribution. +// +// The mode is NaN in the special case where the Alpha (shape) parameter +// is less than 1. +func (g Gamma) Mode() float64 { + if g.Alpha < 1 { + return math.NaN() + } + return (g.Alpha - 1) / g.Beta +} + +// NumParameters returns the number of parameters in the distribution. +func (Gamma) NumParameters() int { + return 2 +} + +// Prob computes the value of the probability density function at x. +func (g Gamma) Prob(x float64) float64 { + return math.Exp(g.LogProb(x)) +} + +// Quantile returns the inverse of the cumulative distribution function. +func (g Gamma) Quantile(p float64) float64 { + if p < 0 || p > 1 { + panic(badPercentile) + } + return mathext.GammaIncRegInv(g.Alpha, p) / g.Beta +} + +// Rand returns a random sample drawn from the distribution. +// +// Rand panics if either alpha or beta is <= 0. +func (g Gamma) Rand() float64 { + if g.Beta <= 0 { + panic("gamma: beta <= 0") + } + + unifrnd := rand.Float64 + exprnd := rand.ExpFloat64 + normrnd := rand.NormFloat64 + if g.Src != nil { + rnd := rand.New(g.Src) + unifrnd = rnd.Float64 + exprnd = rnd.ExpFloat64 + normrnd = rnd.NormFloat64 + } + + a := g.Alpha + b := g.Beta + switch { + case a <= 0: + panic("gamma: alpha < 0") + case a == 1: + // Generate from exponential + return exprnd() / b + case a < 0.3: + // Generate using + // Liu, Chuanhai, Martin, Ryan and Syring, Nick. "Simulating from a + // gamma distribution with small shape parameter" + // https://arxiv.org/abs/1302.1884 + // use this reference: http://link.springer.com/article/10.1007/s00180-016-0692-0 + + // Algorithm adjusted to work in log space as much as possible. + lambda := 1/a - 1 + lw := math.Log(a) - 1 - math.Log(1-a) + lr := -math.Log(1 + math.Exp(lw)) + lc, _ := math.Lgamma(a + 1) + for { + e := exprnd() + var z float64 + if e >= -lr { + z = e + lr + } else { + z = -exprnd() / lambda + } + lh := lc - z - math.Exp(-z/a) + var lEta float64 + if z >= 0 { + lEta = lc - z + } else { + lEta = lc + lw + math.Log(lambda) + lambda*z + } + if lh-lEta > -exprnd() { + return math.Exp(-z/a) / b + } + } + case a >= 0.3 && a < 1: + // Generate using: + // Kundu, Debasis, and Rameshwar D. Gupta. "A convenient way of generating + // gamma random variables using generalized exponential distribution." + // Computational Statistics & Data Analysis 51.6 (2007): 2796-2802. + + // TODO(btracey): Change to using Algorithm 3 if we can find the bug in + // the implementation below. + + // Algorithm 2. + alpha := g.Alpha + a := math.Pow(1-expNegOneHalf, alpha) / (math.Pow(1-expNegOneHalf, alpha) + alpha*math.Exp(-1)/math.Pow(2, alpha)) + b := math.Pow(1-expNegOneHalf, alpha) + alpha/math.E/math.Pow(2, alpha) + var x float64 + for { + u := unifrnd() + if u <= a { + x = -2 * math.Log(1-math.Pow(u*b, 1/alpha)) + } else { + x = -math.Log(math.Pow(2, alpha) / alpha * b * (1 - u)) + } + v := unifrnd() + if x <= 1 { + if v <= math.Pow(x, alpha-1)*math.Exp(-x/2)/(math.Pow(2, alpha-1)*math.Pow(1-math.Exp(-x/2), alpha-1)) { + break + } + } else { + if v <= math.Pow(x, alpha-1) { + break + } + } + } + return x / g.Beta + + /* + // Algorithm 3. + d := 1.0334 - 0.0766*math.Exp(2.2942*alpha) + a := math.Pow(2, alpha) * math.Pow(1-math.Exp(-d/2), alpha) + b := alpha * math.Pow(d, alpha-1) * math.Exp(-d) + c := a + b + var x float64 + for { + u := unifrnd() + if u <= a/(a+b) { + x = -2 * math.Log(1-math.Pow(c*u, 1/a)/2) + } else { + x = -math.Log(c * (1 - u) / (alpha * math.Pow(d, alpha-1))) + } + v := unifrnd() + if x <= d { + if v <= (math.Pow(x, alpha-1)*math.Exp(-x/2))/(math.Pow(2, alpha-1)*math.Pow(1-math.Exp(-x/2), alpha-1)) { + break + } + } else { + if v <= math.Pow(d/x, 1-alpha) { + break + } + } + } + return x / g.Beta + */ + case a > 1: + // Generate using: + // Marsaglia, George, and Wai Wan Tsang. "A simple method for generating + // gamma variables." ACM Transactions on Mathematical Software (TOMS) + // 26.3 (2000): 363-372. + d := a - 1.0/3 + c := 1 / (3 * math.Sqrt(d)) + for { + u := -exprnd() + x := normrnd() + v := 1 + x*c + v = v * v * v + if u < 0.5*x*x+d*(1-v+math.Log(v)) { + return d * v / b + } + } + } + panic("unreachable") +} + +// Survival returns the survival function (complementary CDF) at x. +func (g Gamma) Survival(x float64) float64 { + if x < 0 { + return 1 + } + return mathext.GammaIncRegComp(g.Alpha, g.Beta*x) +} + +// StdDev returns the standard deviation of the probability distribution. +func (g Gamma) StdDev() float64 { + return math.Sqrt(g.Variance()) +} + +// Variance returns the variance of the probability distribution. +func (g Gamma) Variance() float64 { + return g.Alpha / g.Beta / g.Beta +} diff --git a/vendor/gonum.org/v1/gonum/stat/distuv/general.go b/vendor/gonum.org/v1/gonum/stat/distuv/general.go new file mode 100644 index 0000000..5db299d --- /dev/null +++ b/vendor/gonum.org/v1/gonum/stat/distuv/general.go @@ -0,0 +1,24 @@ +// Copyright ©2014 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package distuv + +// Parameter represents a parameter of a probability distribution +type Parameter struct { + Name string + Value float64 +} + +var ( + badPercentile = "distuv: percentile out of bounds" + badLength = "distuv: slice length mismatch" + badSuffStat = "distuv: wrong suffStat length" + badNoSamples = "distuv: must have at least one sample" +) + +const ( + expNegOneHalf = 0.6065306597126334236037995349911804534419 // https://oeis.org/A092605 + eulerMascheroni = 0.5772156649015328606065120900824024310421 // https://oeis.org/A001620 + apery = 1.2020569031595942853997381615114499907649 // https://oeis.org/A002117 +) diff --git a/vendor/gonum.org/v1/gonum/stat/distuv/gumbel.go b/vendor/gonum.org/v1/gonum/stat/distuv/gumbel.go new file mode 100644 index 0000000..4f1cf27 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/stat/distuv/gumbel.go @@ -0,0 +1,117 @@ +// Copyright ©2018 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package distuv + +import ( + "math" + + "golang.org/x/exp/rand" +) + +// GumbelRight implements the right-skewed Gumbel distribution, a two-parameter +// continuous distribution with support over the real numbers. The right-skewed +// Gumbel distribution is also sometimes known as the Extreme Value distribution. +// +// The right-skewed Gumbel distribution has density function +// 1/beta * exp(-(z + exp(-z))) +// z = (x - mu)/beta +// Beta must be greater than 0. +// +// For more information, see https://en.wikipedia.org/wiki/Gumbel_distribution. +type GumbelRight struct { + Mu float64 + Beta float64 + Src rand.Source +} + +func (g GumbelRight) z(x float64) float64 { + return (x - g.Mu) / g.Beta +} + +// CDF computes the value of the cumulative density function at x. +func (g GumbelRight) CDF(x float64) float64 { + z := g.z(x) + return math.Exp(-math.Exp(-z)) +} + +// Entropy returns the differential entropy of the distribution. +func (g GumbelRight) Entropy() float64 { + return math.Log(g.Beta) + eulerMascheroni + 1 +} + +// ExKurtosis returns the excess kurtosis of the distribution. +func (g GumbelRight) ExKurtosis() float64 { + return 12.0 / 5 +} + +// LogProb computes the natural logarithm of the value of the probability density function at x. +func (g GumbelRight) LogProb(x float64) float64 { + z := g.z(x) + return -math.Log(g.Beta) - z - math.Exp(-z) +} + +// Mean returns the mean of the probability distribution. +func (g GumbelRight) Mean() float64 { + return g.Mu + g.Beta*eulerMascheroni +} + +// Median returns the median of the normal distribution. +func (g GumbelRight) Median() float64 { + return g.Mu - g.Beta*math.Log(math.Ln2) +} + +// Mode returns the mode of the normal distribution. +func (g GumbelRight) Mode() float64 { + return g.Mu +} + +// NumParameters returns the number of parameters in the distribution. +func (GumbelRight) NumParameters() int { + return 2 +} + +// Prob computes the value of the probability density function at x. +func (g GumbelRight) Prob(x float64) float64 { + return math.Exp(g.LogProb(x)) +} + +// Quantile returns the inverse of the cumulative probability distribution. +func (g GumbelRight) Quantile(p float64) float64 { + if p < 0 || 1 < p { + panic(badPercentile) + } + return g.Mu - g.Beta*math.Log(-math.Log(p)) +} + +// Rand returns a random sample drawn from the distribution. +func (g GumbelRight) Rand() float64 { + var rnd float64 + if g.Src == nil { + rnd = rand.ExpFloat64() + } else { + rnd = rand.New(g.Src).ExpFloat64() + } + return g.Mu - g.Beta*math.Log(rnd) +} + +// Skewness returns the skewness of the distribution. +func (GumbelRight) Skewness() float64 { + return 12 * math.Sqrt(6) * apery / (math.Pi * math.Pi * math.Pi) +} + +// StdDev returns the standard deviation of the probability distribution. +func (g GumbelRight) StdDev() float64 { + return (math.Pi / math.Sqrt(6)) * g.Beta +} + +// Survival returns the survival function (complementary CDF) at x. +func (g GumbelRight) Survival(x float64) float64 { + return 1 - g.CDF(x) +} + +// Variance returns the variance of the probability distribution. +func (g GumbelRight) Variance() float64 { + return math.Pi * math.Pi * g.Beta * g.Beta / 6 +} diff --git a/vendor/gonum.org/v1/gonum/stat/distuv/interfaces.go b/vendor/gonum.org/v1/gonum/stat/distuv/interfaces.go new file mode 100644 index 0000000..28d1b69 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/stat/distuv/interfaces.go @@ -0,0 +1,22 @@ +// Copyright ©2015 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package distuv + +type LogProber interface { + LogProb(float64) float64 +} + +type Rander interface { + Rand() float64 +} + +type RandLogProber interface { + Rander + LogProber +} + +type Quantiler interface { + Quantile(p float64) float64 +} diff --git a/vendor/gonum.org/v1/gonum/stat/distuv/inversegamma.go b/vendor/gonum.org/v1/gonum/stat/distuv/inversegamma.go new file mode 100644 index 0000000..1b91337 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/stat/distuv/inversegamma.go @@ -0,0 +1,123 @@ +// Copyright ©2018 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package distuv + +import ( + "math" + + "golang.org/x/exp/rand" + + "gonum.org/v1/gonum/mathext" +) + +// InverseGamma implements the inverse gamma distribution, a two-parameter +// continuous distribution with support over the positive real numbers. The +// inverse gamma distribution is the same as the distribution of the reciprocal +// of a gamma distributed random variable. +// +// The inverse gamma distribution has density function +// β^α / Γ(α) x^(-α-1)e^(-β/x) +// +// For more information, see https://en.wikipedia.org/wiki/Inverse-gamma_distribution +type InverseGamma struct { + // Alpha is the shape parameter of the distribution. Alpha must be greater than 0. + Alpha float64 + // Beta is the scale parameter of the distribution. Beta must be greater than 0. + Beta float64 + + Src rand.Source +} + +// CDF computes the value of the cumulative distribution function at x. +func (g InverseGamma) CDF(x float64) float64 { + if x < 0 { + return 0 + } + // TODO(btracey): Replace this with a direct call to the upper regularized + // gamma function if mathext gets it. + //return 1 - mathext.GammaInc(g.Alpha, g.Beta/x) + return mathext.GammaIncRegComp(g.Alpha, g.Beta/x) +} + +// ExKurtosis returns the excess kurtosis of the distribution. +func (g InverseGamma) ExKurtosis() float64 { + if g.Alpha <= 4 { + return math.Inf(1) + } + return (30*g.Alpha - 66) / (g.Alpha - 3) / (g.Alpha - 4) +} + +// LogProb computes the natural logarithm of the value of the probability +// density function at x. +func (g InverseGamma) LogProb(x float64) float64 { + if x <= 0 { + return math.Inf(-1) + } + a := g.Alpha + b := g.Beta + lg, _ := math.Lgamma(a) + return a*math.Log(b) - lg + (-a-1)*math.Log(x) - b/x +} + +// Mean returns the mean of the probability distribution. +func (g InverseGamma) Mean() float64 { + if g.Alpha <= 1 { + return math.Inf(1) + } + return g.Beta / (g.Alpha - 1) +} + +// Mode returns the mode of the distribution. +func (g InverseGamma) Mode() float64 { + return g.Beta / (g.Alpha + 1) +} + +// NumParameters returns the number of parameters in the distribution. +func (InverseGamma) NumParameters() int { + return 2 +} + +// Prob computes the value of the probability density function at x. +func (g InverseGamma) Prob(x float64) float64 { + return math.Exp(g.LogProb(x)) +} + +// Quantile returns the inverse of the cumulative distribution function. +func (g InverseGamma) Quantile(p float64) float64 { + if p < 0 || 1 < p { + panic(badPercentile) + } + return (1 / (mathext.GammaIncRegCompInv(g.Alpha, p))) * g.Beta +} + +// Rand returns a random sample drawn from the distribution. +// +// Rand panics if either alpha or beta is <= 0. +func (g InverseGamma) Rand() float64 { + // TODO(btracey): See if there is a more direct way to sample. + return 1 / Gamma{Alpha: g.Alpha, Beta: g.Beta, Src: g.Src}.Rand() +} + +// Survival returns the survival function (complementary CDF) at x. +func (g InverseGamma) Survival(x float64) float64 { + if x < 0 { + return 1 + } + return mathext.GammaIncReg(g.Alpha, g.Beta/x) +} + +// StdDev returns the standard deviation of the probability distribution. +func (g InverseGamma) StdDev() float64 { + return math.Sqrt(g.Variance()) +} + +// Variance returns the variance of the probability distribution. +func (g InverseGamma) Variance() float64 { + if g.Alpha <= 2 { + return math.Inf(1) + } + v := g.Beta / (g.Alpha - 1) + return v * v / (g.Alpha - 2) +} diff --git a/vendor/gonum.org/v1/gonum/stat/distuv/laplace.go b/vendor/gonum.org/v1/gonum/stat/distuv/laplace.go new file mode 100644 index 0000000..93048fa --- /dev/null +++ b/vendor/gonum.org/v1/gonum/stat/distuv/laplace.go @@ -0,0 +1,259 @@ +// Copyright ©2014 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package distuv + +import ( + "math" + "sort" + + "golang.org/x/exp/rand" + "gonum.org/v1/gonum/stat" +) + +// Laplace represents the Laplace distribution (https://en.wikipedia.org/wiki/Laplace_distribution). +type Laplace struct { + Mu float64 // Mean of the Laplace distribution + Scale float64 // Scale of the Laplace distribution + Src rand.Source +} + +// CDF computes the value of the cumulative density function at x. +func (l Laplace) CDF(x float64) float64 { + if x < l.Mu { + return 0.5 * math.Exp((x-l.Mu)/l.Scale) + } + return 1 - 0.5*math.Exp(-(x-l.Mu)/l.Scale) +} + +// Entropy returns the entropy of the distribution. +func (l Laplace) Entropy() float64 { + return 1 + math.Log(2*l.Scale) +} + +// ExKurtosis returns the excess kurtosis of the distribution. +func (l Laplace) ExKurtosis() float64 { + return 3 +} + +// Fit sets the parameters of the probability distribution from the +// data samples x with relative weights w. +// If weights is nil, then all the weights are 1. +// If weights is not nil, then the len(weights) must equal len(samples). +// +// Note: Laplace distribution has no FitPrior because it has no sufficient +// statistics. +func (l *Laplace) Fit(samples, weights []float64) { + if weights != nil && len(samples) != len(weights) { + panic(badLength) + } + + if len(samples) == 0 { + panic(badNoSamples) + } + if len(samples) == 1 { + l.Mu = samples[0] + l.Scale = 0 + return + } + + var ( + sortedSamples []float64 + sortedWeights []float64 + ) + if sort.Float64sAreSorted(samples) { + sortedSamples = samples + sortedWeights = weights + } else { + // Need to copy variables so the input variables aren't effected by the sorting + sortedSamples = make([]float64, len(samples)) + copy(sortedSamples, samples) + sortedWeights := make([]float64, len(samples)) + copy(sortedWeights, weights) + + stat.SortWeighted(sortedSamples, sortedWeights) + } + + // The (weighted) median of the samples is the maximum likelihood estimate + // of the mean parameter + // TODO: Rethink quantile type when stat has more options + l.Mu = stat.Quantile(0.5, stat.Empirical, sortedSamples, sortedWeights) + + // The scale parameter is the average absolute distance + // between the sample and the mean + var absError float64 + var sumWeights float64 + if weights != nil { + for i, v := range samples { + absError += weights[i] * math.Abs(l.Mu-v) + sumWeights += weights[i] + } + l.Scale = absError / sumWeights + } else { + for _, v := range samples { + absError += math.Abs(l.Mu - v) + } + l.Scale = absError / float64(len(samples)) + } +} + +// LogProb computes the natural logarithm of the value of the probability density +// function at x. +func (l Laplace) LogProb(x float64) float64 { + return -math.Ln2 - math.Log(l.Scale) - math.Abs(x-l.Mu)/l.Scale +} + +// MarshalParameters implements the ParameterMarshaler interface +func (l Laplace) MarshalParameters(p []Parameter) { + if len(p) != l.NumParameters() { + panic(badLength) + } + p[0].Name = "Mu" + p[0].Value = l.Mu + p[1].Name = "Scale" + p[1].Value = l.Scale +} + +// Mean returns the mean of the probability distribution. +func (l Laplace) Mean() float64 { + return l.Mu +} + +// Median returns the median of the LaPlace distribution. +func (l Laplace) Median() float64 { + return l.Mu +} + +// Mode returns the mode of the LaPlace distribution. +func (l Laplace) Mode() float64 { + return l.Mu +} + +// NumParameters returns the number of parameters in the distribution. +func (l Laplace) NumParameters() int { + return 2 +} + +// Quantile returns the inverse of the cumulative probability distribution. +func (l Laplace) Quantile(p float64) float64 { + if p < 0 || p > 1 { + panic(badPercentile) + } + if p < 0.5 { + return l.Mu + l.Scale*math.Log(1+2*(p-0.5)) + } + return l.Mu - l.Scale*math.Log(1-2*(p-0.5)) +} + +// Prob computes the value of the probability density function at x. +func (l Laplace) Prob(x float64) float64 { + return math.Exp(l.LogProb(x)) +} + +// Rand returns a random sample drawn from the distribution. +func (l Laplace) Rand() float64 { + var rnd float64 + if l.Src == nil { + rnd = rand.Float64() + } else { + rnd = rand.New(l.Src).Float64() + } + u := rnd - 0.5 + if u < 0 { + return l.Mu + l.Scale*math.Log(1+2*u) + } + return l.Mu - l.Scale*math.Log(1-2*u) +} + +// Score returns the score function with respect to the parameters of the +// distribution at the input location x. The score function is the derivative +// of the log-likelihood at x with respect to the parameters +// (∂/∂θ) log(p(x;θ)) +// If deriv is non-nil, len(deriv) must equal the number of parameters otherwise +// Score will panic, and the derivative is stored in-place into deriv. If deriv +// is nil a new slice will be allocated and returned. +// +// The order is [∂LogProb / ∂Mu, ∂LogProb / ∂Scale]. +// +// For more information, see https://en.wikipedia.org/wiki/Score_%28statistics%29. +// +// Special cases: +// Score(0) = [0, -0.5/l.Scale] +func (l Laplace) Score(deriv []float64, x float64) []float64 { + if deriv == nil { + deriv = make([]float64, l.NumParameters()) + } + if len(deriv) != l.NumParameters() { + panic(badLength) + } + diff := x - l.Mu + if diff > 0 { + deriv[0] = 1 / l.Scale + } else if diff < 0 { + deriv[0] = -1 / l.Scale + } else if diff == 0 { + deriv[0] = 0 + } else { + // must be NaN + deriv[0] = math.NaN() + } + + deriv[1] = math.Abs(diff)/(l.Scale*l.Scale) - 0.5/(l.Scale) + return deriv +} + +// ScoreInput returns the score function with respect to the input of the +// distribution at the input location specified by x. The score function is the +// derivative of the log-likelihood +// (d/dx) log(p(x)) . +// Special cases: +// ScoreInput(l.Mu) = 0 +func (l Laplace) ScoreInput(x float64) float64 { + diff := x - l.Mu + if diff == 0 { + return 0 + } + if diff > 0 { + return -1 / l.Scale + } + return 1 / l.Scale +} + +// Skewness returns the skewness of the distribution. +func (Laplace) Skewness() float64 { + return 0 +} + +// StdDev returns the standard deviation of the distribution. +func (l Laplace) StdDev() float64 { + return math.Sqrt2 * l.Scale +} + +// Survival returns the survival function (complementary CDF) at x. +func (l Laplace) Survival(x float64) float64 { + if x < l.Mu { + return 1 - 0.5*math.Exp((x-l.Mu)/l.Scale) + } + return 0.5 * math.Exp(-(x-l.Mu)/l.Scale) +} + +// UnmarshalParameters implements the ParameterMarshaler interface +func (l *Laplace) UnmarshalParameters(p []Parameter) { + if len(p) != l.NumParameters() { + panic(badLength) + } + if p[0].Name != "Mu" { + panic("laplace: " + panicNameMismatch) + } + if p[1].Name != "Scale" { + panic("laplace: " + panicNameMismatch) + } + l.Mu = p[0].Value + l.Scale = p[1].Value +} + +// Variance returns the variance of the probability distribution. +func (l Laplace) Variance() float64 { + return 2 * l.Scale * l.Scale +} diff --git a/vendor/gonum.org/v1/gonum/stat/distuv/lognormal.go b/vendor/gonum.org/v1/gonum/stat/distuv/lognormal.go new file mode 100644 index 0000000..85933e5 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/stat/distuv/lognormal.go @@ -0,0 +1,113 @@ +// Copyright ©2015 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package distuv + +import ( + "math" + + "golang.org/x/exp/rand" +) + +// LogNormal represents a random variable whose log is normally distributed. +// The probability density function is given by +// 1/(x σ √2π) exp(-(ln(x)-μ)^2)/(2σ^2)) +type LogNormal struct { + Mu float64 + Sigma float64 + Src rand.Source +} + +// CDF computes the value of the cumulative density function at x. +func (l LogNormal) CDF(x float64) float64 { + return 0.5 * math.Erfc(-(math.Log(x)-l.Mu)/(math.Sqrt2*l.Sigma)) +} + +// Entropy returns the differential entropy of the distribution. +func (l LogNormal) Entropy() float64 { + return 0.5 + 0.5*math.Log(2*math.Pi*l.Sigma*l.Sigma) + l.Mu +} + +// ExKurtosis returns the excess kurtosis of the distribution. +func (l LogNormal) ExKurtosis() float64 { + s2 := l.Sigma * l.Sigma + return math.Exp(4*s2) + 2*math.Exp(3*s2) + 3*math.Exp(2*s2) - 6 +} + +// LogProb computes the natural logarithm of the value of the probability density function at x. +func (l LogNormal) LogProb(x float64) float64 { + if x < 0 { + return math.Inf(-1) + } + logx := math.Log(x) + normdiff := (logx - l.Mu) / l.Sigma + return -0.5*normdiff*normdiff - logx - math.Log(l.Sigma) - logRoot2Pi +} + +// Mean returns the mean of the probability distribution. +func (l LogNormal) Mean() float64 { + return math.Exp(l.Mu + 0.5*l.Sigma*l.Sigma) +} + +// Median returns the median of the probability distribution. +func (l LogNormal) Median() float64 { + return math.Exp(l.Mu) +} + +// Mode returns the mode of the probability distribution. +func (l LogNormal) Mode() float64 { + return l.Mu +} + +// NumParameters returns the number of parameters in the distribution. +func (LogNormal) NumParameters() int { + return 2 +} + +// Prob computes the value of the probability density function at x. +func (l LogNormal) Prob(x float64) float64 { + return math.Exp(l.LogProb(x)) +} + +// Quantile returns the inverse of the cumulative probability distribution. +func (l LogNormal) Quantile(p float64) float64 { + if p < 0 || p > 1 { + panic(badPercentile) + } + // Formula from http://www.math.uah.edu/stat/special/LogNormal.html. + return math.Exp(l.Mu + l.Sigma*UnitNormal.Quantile(p)) +} + +// Rand returns a random sample drawn from the distribution. +func (l LogNormal) Rand() float64 { + var rnd float64 + if l.Src == nil { + rnd = rand.NormFloat64() + } else { + rnd = rand.New(l.Src).NormFloat64() + } + return math.Exp(rnd*l.Sigma + l.Mu) +} + +// Skewness returns the skewness of the distribution. +func (l LogNormal) Skewness() float64 { + s2 := l.Sigma * l.Sigma + return (math.Exp(s2) + 2) * math.Sqrt(math.Exp(s2)-1) +} + +// StdDev returns the standard deviation of the probability distribution. +func (l LogNormal) StdDev() float64 { + return math.Sqrt(l.Variance()) +} + +// Survival returns the survival function (complementary CDF) at x. +func (l LogNormal) Survival(x float64) float64 { + return 0.5 * (1 - math.Erf((math.Log(x)-l.Mu)/(math.Sqrt2*l.Sigma))) +} + +// Variance returns the variance of the probability distribution. +func (l LogNormal) Variance() float64 { + s2 := l.Sigma * l.Sigma + return (math.Exp(s2) - 1) * math.Exp(2*l.Mu+s2) +} diff --git a/vendor/gonum.org/v1/gonum/stat/distuv/norm.go b/vendor/gonum.org/v1/gonum/stat/distuv/norm.go new file mode 100644 index 0000000..a8afcee --- /dev/null +++ b/vendor/gonum.org/v1/gonum/stat/distuv/norm.go @@ -0,0 +1,254 @@ +// Copyright ©2014 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package distuv + +import ( + "math" + + "golang.org/x/exp/rand" + + "gonum.org/v1/gonum/floats" + "gonum.org/v1/gonum/mathext" + "gonum.org/v1/gonum/stat" +) + +// UnitNormal is an instantiation of the normal distribution with Mu = 0 and Sigma = 1. +var UnitNormal = Normal{Mu: 0, Sigma: 1} + +// Normal respresents a normal (Gaussian) distribution (https://en.wikipedia.org/wiki/Normal_distribution). +type Normal struct { + Mu float64 // Mean of the normal distribution + Sigma float64 // Standard deviation of the normal distribution + Src rand.Source + + // Needs to be Mu and Sigma and not Mean and StdDev because Normal has functions + // Mean and StdDev +} + +// CDF computes the value of the cumulative density function at x. +func (n Normal) CDF(x float64) float64 { + return 0.5 * math.Erfc(-(x-n.Mu)/(n.Sigma*math.Sqrt2)) +} + +// ConjugateUpdate updates the parameters of the distribution from the sufficient +// statistics of a set of samples. The sufficient statistics, suffStat, have been +// observed with nSamples observations. The prior values of the distribution are those +// currently in the distribution, and have been observed with priorStrength samples. +// +// For the normal distribution, the sufficient statistics are the mean and +// uncorrected standard deviation of the samples. +// The prior is having seen strength[0] samples with mean Normal.Mu +// and strength[1] samples with standard deviation Normal.Sigma. As a result of +// this function, Normal.Mu and Normal.Sigma are updated based on the weighted +// samples, and strength is modified to include the new number of samples observed. +// +// This function panics if len(suffStat) != 2 or len(priorStrength) != 2. +func (n *Normal) ConjugateUpdate(suffStat []float64, nSamples float64, priorStrength []float64) { + // TODO: Support prior strength with math.Inf(1) to allow updating with + // a known mean/standard deviation + + totalMeanSamples := nSamples + priorStrength[0] + totalSum := suffStat[0]*nSamples + n.Mu*priorStrength[0] + + totalVarianceSamples := nSamples + priorStrength[1] + // sample variance + totalVariance := nSamples * suffStat[1] * suffStat[1] + // add prior variance + totalVariance += priorStrength[1] * n.Sigma * n.Sigma + // add cross variance from the difference of the means + meanDiff := (suffStat[0] - n.Mu) + totalVariance += priorStrength[0] * nSamples * meanDiff * meanDiff / totalMeanSamples + + n.Mu = totalSum / totalMeanSamples + n.Sigma = math.Sqrt(totalVariance / totalVarianceSamples) + floats.AddConst(nSamples, priorStrength) +} + +// Entropy returns the differential entropy of the distribution. +func (n Normal) Entropy() float64 { + return 0.5 * (log2Pi + 1 + 2*math.Log(n.Sigma)) +} + +// ExKurtosis returns the excess kurtosis of the distribution. +func (Normal) ExKurtosis() float64 { + return 0 +} + +// Fit sets the parameters of the probability distribution from the +// data samples x with relative weights w. If weights is nil, then all the weights +// are 1. If weights is not nil, then the len(weights) must equal len(samples). +func (n *Normal) Fit(samples, weights []float64) { + suffStat := make([]float64, n.NumSuffStat()) + nSamples := n.SuffStat(suffStat, samples, weights) + n.ConjugateUpdate(suffStat, nSamples, make([]float64, n.NumSuffStat())) +} + +// LogProb computes the natural logarithm of the value of the probability density function at x. +func (n Normal) LogProb(x float64) float64 { + return negLogRoot2Pi - math.Log(n.Sigma) - (x-n.Mu)*(x-n.Mu)/(2*n.Sigma*n.Sigma) +} + +// Mean returns the mean of the probability distribution. +func (n Normal) Mean() float64 { + return n.Mu +} + +// Median returns the median of the normal distribution. +func (n Normal) Median() float64 { + return n.Mu +} + +// Mode returns the mode of the normal distribution. +func (n Normal) Mode() float64 { + return n.Mu +} + +// NumParameters returns the number of parameters in the distribution. +func (Normal) NumParameters() int { + return 2 +} + +// NumSuffStat returns the number of sufficient statistics for the distribution. +func (Normal) NumSuffStat() int { + return 2 +} + +// Prob computes the value of the probability density function at x. +func (n Normal) Prob(x float64) float64 { + return math.Exp(n.LogProb(x)) +} + +// Quantile returns the inverse of the cumulative probability distribution. +func (n Normal) Quantile(p float64) float64 { + if p < 0 || p > 1 { + panic(badPercentile) + } + return n.Mu + n.Sigma*mathext.NormalQuantile(p) +} + +// Rand returns a random sample drawn from the distribution. +func (n Normal) Rand() float64 { + var rnd float64 + if n.Src == nil { + rnd = rand.NormFloat64() + } else { + rnd = rand.New(n.Src).NormFloat64() + } + return rnd*n.Sigma + n.Mu +} + +// Score returns the score function with respect to the parameters of the +// distribution at the input location x. The score function is the derivative +// of the log-likelihood at x with respect to the parameters +// (∂/∂θ) log(p(x;θ)) +// If deriv is non-nil, len(deriv) must equal the number of parameters otherwise +// Score will panic, and the derivative is stored in-place into deriv. If deriv +// is nil a new slice will be allocated and returned. +// +// The order is [∂LogProb / ∂Mu, ∂LogProb / ∂Sigma]. +// +// For more information, see https://en.wikipedia.org/wiki/Score_%28statistics%29. +func (n Normal) Score(deriv []float64, x float64) []float64 { + if deriv == nil { + deriv = make([]float64, n.NumParameters()) + } + if len(deriv) != n.NumParameters() { + panic(badLength) + } + deriv[0] = (x - n.Mu) / (n.Sigma * n.Sigma) + deriv[1] = 1 / n.Sigma * (-1 + ((x-n.Mu)/n.Sigma)*((x-n.Mu)/n.Sigma)) + return deriv +} + +// ScoreInput returns the score function with respect to the input of the +// distribution at the input location specified by x. The score function is the +// derivative of the log-likelihood +// (d/dx) log(p(x)) . +func (n Normal) ScoreInput(x float64) float64 { + return -(1 / (2 * n.Sigma * n.Sigma)) * 2 * (x - n.Mu) +} + +// Skewness returns the skewness of the distribution. +func (Normal) Skewness() float64 { + return 0 +} + +// StdDev returns the standard deviation of the probability distribution. +func (n Normal) StdDev() float64 { + return n.Sigma +} + +// SuffStat computes the sufficient statistics of a set of samples to update +// the distribution. The sufficient statistics are stored in place, and the +// effective number of samples are returned. +// +// The normal distribution has two sufficient statistics, the mean of the samples +// and the standard deviation of the samples. +// +// If weights is nil, the weights are assumed to be 1, otherwise panics if +// len(samples) != len(weights). Panics if len(suffStat) != NumSuffStat(). +func (Normal) SuffStat(suffStat, samples, weights []float64) (nSamples float64) { + lenSamp := len(samples) + if len(weights) != 0 && len(samples) != len(weights) { + panic(badLength) + } + if len(suffStat) != (Normal{}).NumSuffStat() { + panic(badSuffStat) + } + + if len(weights) == 0 { + nSamples = float64(lenSamp) + } else { + nSamples = floats.Sum(weights) + } + + mean := stat.Mean(samples, weights) + suffStat[0] = mean + + // Use Moment and not StdDev because we want it to be uncorrected + variance := stat.MomentAbout(2, samples, mean, weights) + suffStat[1] = math.Sqrt(variance) + return nSamples +} + +// Survival returns the survival function (complementary CDF) at x. +func (n Normal) Survival(x float64) float64 { + return 0.5 * (1 - math.Erf((x-n.Mu)/(n.Sigma*math.Sqrt2))) +} + +// setParameters modifies the parameters of the distribution. +func (n *Normal) setParameters(p []Parameter) { + if len(p) != n.NumParameters() { + panic("normal: incorrect number of parameters to set") + } + if p[0].Name != "Mu" { + panic("normal: " + panicNameMismatch) + } + if p[1].Name != "Sigma" { + panic("normal: " + panicNameMismatch) + } + n.Mu = p[0].Value + n.Sigma = p[1].Value +} + +// Variance returns the variance of the probability distribution. +func (n Normal) Variance() float64 { + return n.Sigma * n.Sigma +} + +// parameters returns the parameters of the distribution. +func (n Normal) parameters(p []Parameter) []Parameter { + nParam := n.NumParameters() + if p == nil { + p = make([]Parameter, nParam) + } else if len(p) != nParam { + panic("normal: improper parameter length") + } + p[0].Name = "Mu" + p[0].Value = n.Mu + p[1].Name = "Sigma" + p[1].Value = n.Sigma + return p +} diff --git a/vendor/gonum.org/v1/gonum/stat/distuv/pareto.go b/vendor/gonum.org/v1/gonum/stat/distuv/pareto.go new file mode 100644 index 0000000..2c040fc --- /dev/null +++ b/vendor/gonum.org/v1/gonum/stat/distuv/pareto.go @@ -0,0 +1,122 @@ +// Copyright ©2017 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package distuv + +import ( + "math" + + "golang.org/x/exp/rand" +) + +// Pareto implements the Pareto (Type I) distribution, a one parameter distribution +// with support above the scale parameter. +// +// The density function is given by +// (α x_m^{α})/(x^{α+1}) for x >= x_m. +// +// For more information, see https://en.wikipedia.org/wiki/Pareto_distribution. +type Pareto struct { + // Xm is the scale parameter. + // Xm must be greater than 0. + Xm float64 + + // Alpha is the shape parameter. + // Alpha must be greater than 0. + Alpha float64 + + Src rand.Source +} + +// CDF computes the value of the cumulative density function at x. +func (p Pareto) CDF(x float64) float64 { + if x < p.Xm { + return 0 + } + return 1 - p.Survival(x) +} + +// Entropy returns the differential entropy of the distribution. +func (p Pareto) Entropy() float64 { + return math.Log(p.Xm) - math.Log(p.Alpha) + (1 + 1/p.Alpha) +} + +// ExKurtosis returns the excess kurtosis of the distribution. +func (p Pareto) ExKurtosis() float64 { + if p.Alpha <= 4 { + return 0 + } + return 6 * (p.Alpha*p.Alpha*p.Alpha + p.Alpha*p.Alpha - 6*p.Alpha - 2) / (p.Alpha * (p.Alpha - 3) * (p.Alpha - 4)) + +} + +// LogProb computes the natural logarithm of the value of the probability +// density function at x. +func (p Pareto) LogProb(x float64) float64 { + if x < p.Xm { + return math.Inf(-1) + } + return math.Log(p.Alpha) + p.Alpha*math.Log(p.Xm) - (p.Alpha+1)*math.Log(x) +} + +// Mean returns the mean of the probability distribution. +func (p Pareto) Mean() float64 { + if p.Alpha <= 1 { + return math.Inf(1) + } + return p.Alpha * p.Xm / (p.Alpha - 1) +} + +// Median returns the median of the pareto distribution. +func (p Pareto) Median() float64 { + return p.Xm * math.Pow(2, 1/p.Alpha) +} + +// Mode returns the mode of the distribution. +func (p Pareto) Mode() float64 { + return p.Xm +} + +// NumParameters returns the number of parameters in the distribution. +func (p Pareto) NumParameters() int { + return 2 +} + +// Prob computes the value of the probability density function at x. +func (p Pareto) Prob(x float64) float64 { + return math.Exp(p.LogProb(x)) +} + +// Rand returns a random sample drawn from the distribution. +func (p Pareto) Rand() float64 { + var rnd float64 + if p.Src == nil { + rnd = rand.ExpFloat64() + } else { + rnd = rand.New(p.Src).ExpFloat64() + } + return math.Exp(math.Log(p.Xm) + 1/p.Alpha*rnd) +} + +// StdDev returns the standard deviation of the probability distribution. +func (p Pareto) StdDev() float64 { + return math.Sqrt(p.Variance()) +} + +// Survival returns the survival function (complementary CDF) at x. +func (p Pareto) Survival(x float64) float64 { + if x < p.Xm { + return 1 + } + return math.Exp(p.Alpha * (math.Log(p.Xm) - math.Log(x))) +} + +// Variance returns the variance of the probability distribution. +func (p Pareto) Variance() float64 { + if p.Alpha <= 2 { + return math.Inf(1) + } + am1 := p.Alpha - 1 + return p.Xm * p.Xm * p.Alpha / (am1 * am1 * (p.Alpha - 2)) +} diff --git a/vendor/gonum.org/v1/gonum/stat/distuv/poisson.go b/vendor/gonum.org/v1/gonum/stat/distuv/poisson.go new file mode 100644 index 0000000..9a90b2e --- /dev/null +++ b/vendor/gonum.org/v1/gonum/stat/distuv/poisson.go @@ -0,0 +1,138 @@ +// Copyright ©2017 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package distuv + +import ( + "math" + + "golang.org/x/exp/rand" + + "gonum.org/v1/gonum/mathext" +) + +// Poisson implements the Poisson distribution, a discrete probability distribution +// that expresses the probability of a given number of events occurring in a fixed +// interval. +// The poisson distribution has density function: +// f(k) = λ^k / k! e^(-λ) +// For more information, see https://en.wikipedia.org/wiki/Poisson_distribution. +type Poisson struct { + // Lambda is the average number of events in an interval. + // Lambda must be greater than 0. + Lambda float64 + + Src rand.Source +} + +// CDF computes the value of the cumulative distribution function at x. +func (p Poisson) CDF(x float64) float64 { + if x < 0 { + return 0 + } + return mathext.GammaIncRegComp(math.Floor(x+1), p.Lambda) +} + +// ExKurtosis returns the excess kurtosis of the distribution. +func (p Poisson) ExKurtosis() float64 { + return 1 / p.Lambda +} + +// LogProb computes the natural logarithm of the value of the probability +// density function at x. +func (p Poisson) LogProb(x float64) float64 { + if x < 0 || math.Floor(x) != x { + return math.Inf(-1) + } + lg, _ := math.Lgamma(math.Floor(x) + 1) + return x*math.Log(p.Lambda) - p.Lambda - lg +} + +// Mean returns the mean of the probability distribution. +func (p Poisson) Mean() float64 { + return p.Lambda +} + +// NumParameters returns the number of parameters in the distribution. +func (Poisson) NumParameters() int { + return 1 +} + +// Prob computes the value of the probability density function at x. +func (p Poisson) Prob(x float64) float64 { + return math.Exp(p.LogProb(x)) +} + +// Rand returns a random sample drawn from the distribution. +func (p Poisson) Rand() float64 { + // NUMERICAL RECIPES IN C: THE ART OF SCIENTIFIC COMPUTING (ISBN 0-521-43108-5) + // p. 294 + // + + rnd := rand.ExpFloat64 + var rng *rand.Rand + if p.Src != nil { + rng = rand.New(p.Src) + rnd = rng.ExpFloat64 + } + + if p.Lambda < 10.0 { + // Use direct method. + var em float64 + t := 0.0 + for { + t += rnd() + if t >= p.Lambda { + break + } + em++ + } + return em + } + // Use rejection method. + rnd = rand.Float64 + if rng != nil { + rnd = rng.Float64 + } + sq := math.Sqrt(2.0 * p.Lambda) + alxm := math.Log(p.Lambda) + lg, _ := math.Lgamma(p.Lambda + 1) + g := p.Lambda*alxm - lg + for { + var em, y float64 + for { + y = math.Tan(math.Pi * rnd()) + em = sq*y + p.Lambda + if em >= 0 { + break + } + } + em = math.Floor(em) + lg, _ = math.Lgamma(em + 1) + t := 0.9 * (1.0 + y*y) * math.Exp(em*alxm-lg-g) + if rnd() <= t { + return em + } + } +} + +// Skewness returns the skewness of the distribution. +func (p Poisson) Skewness() float64 { + return 1 / math.Sqrt(p.Lambda) +} + +// StdDev returns the standard deviation of the probability distribution. +func (p Poisson) StdDev() float64 { + return math.Sqrt(p.Variance()) +} + +// Survival returns the survival function (complementary CDF) at x. +func (p Poisson) Survival(x float64) float64 { + return 1 - p.CDF(x) +} + +// Variance returns the variance of the probability distribution. +func (p Poisson) Variance() float64 { + return p.Lambda +} diff --git a/vendor/gonum.org/v1/gonum/stat/distuv/statdist.go b/vendor/gonum.org/v1/gonum/stat/distuv/statdist.go new file mode 100644 index 0000000..c0f8a29 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/stat/distuv/statdist.go @@ -0,0 +1,126 @@ +// Copyright ©2018 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package distuv + +import ( + "math" + + "gonum.org/v1/gonum/mathext" +) + +// Bhattacharyya is a type for computing the Bhattacharyya distance between +// probability distributions. +// +// The Bhattacharyya distance is defined as +// D_B = -ln(BC(l,r)) +// BC = \int_-∞^∞ (p(x)q(x))^(1/2) dx +// Where BC is known as the Bhattacharyya coefficient. +// The Bhattacharyya distance is related to the Hellinger distance by +// H(l,r) = sqrt(1-BC(l,r)) +// For more information, see +// https://en.wikipedia.org/wiki/Bhattacharyya_distance +type Bhattacharyya struct{} + +// DistBeta returns the Bhattacharyya distance between Beta distributions l and r. +// For Beta distributions, the Bhattacharyya distance is given by +// -ln(B((α_l + α_r)/2, (β_l + β_r)/2) / (B(α_l,β_l), B(α_r,β_r))) +// Where B is the Beta function. +func (Bhattacharyya) DistBeta(l, r Beta) float64 { + // Reference: https://en.wikipedia.org/wiki/Hellinger_distance#Examples + return -mathext.Lbeta((l.Alpha+r.Alpha)/2, (l.Beta+r.Beta)/2) + + 0.5*mathext.Lbeta(l.Alpha, l.Beta) + 0.5*mathext.Lbeta(r.Alpha, r.Beta) +} + +// DistNormal returns the Bhattacharyya distance Normal distributions l and r. +// For Normal distributions, the Bhattacharyya distance is given by +// s = (σ_l^2 + σ_r^2)/2 +// BC = 1/8 (μ_l-μ_r)^2/s + 1/2 ln(s/(σ_l*σ_r)) +func (Bhattacharyya) DistNormal(l, r Normal) float64 { + // Reference: https://en.wikipedia.org/wiki/Bhattacharyya_distance + m := l.Mu - r.Mu + s := (l.Sigma*l.Sigma + r.Sigma*r.Sigma) / 2 + return 0.125*m*m/s + 0.5*math.Log(s) - 0.5*math.Log(l.Sigma) - 0.5*math.Log(r.Sigma) +} + +// Hellinger is a type for computing the Hellinger distance between probability +// distributions. +// +// The Hellinger distance is defined as +// H^2(l,r) = 1/2 * int_x (\sqrt(l(x)) - \sqrt(r(x)))^2 dx +// and is bounded between 0 and 1. Note the above formula defines the squared +// Hellinger distance, while this returns the Hellinger distance itself. +// The Hellinger distance is related to the Bhattacharyya distance by +// H^2 = 1 - exp(-D_B) +// For more information, see +// https://en.wikipedia.org/wiki/Hellinger_distance +type Hellinger struct{} + +// DistBeta computes the Hellinger distance between Beta distributions l and r. +// See the documentation of Bhattacharyya.DistBeta for the distance formula. +func (Hellinger) DistBeta(l, r Beta) float64 { + db := Bhattacharyya{}.DistBeta(l, r) + bc := math.Exp(-db) + return math.Sqrt(1 - bc) +} + +// DistNormal computes the Hellinger distance between Normal distributions l and r. +// See the documentation of Bhattacharyya.DistNormal for the distance formula. +func (Hellinger) DistNormal(l, r Normal) float64 { + db := Bhattacharyya{}.DistNormal(l, r) + bc := math.Exp(-db) + return math.Sqrt(1 - bc) +} + +// KullbackLeibler is a type for computing the Kullback-Leibler divergence from l to r. +// +// The Kullback-Leibler divergence is defined as +// D_KL(l || r ) = \int_x p(x) log(p(x)/q(x)) dx +// Note that the Kullback-Leibler divergence is not symmetric with respect to +// the order of the input arguments. +type KullbackLeibler struct{} + +// DistBeta returns the Kullback-Leibler divergence between Beta distributions +// l and r. +// +// For two Beta distributions, the KL divergence is computed as +// D_KL(l || r) = log Γ(α_l+β_l) - log Γ(α_l) - log Γ(β_l) +// - log Γ(α_r+β_r) + log Γ(α_r) + log Γ(β_r) +// + (α_l-α_r)(ψ(α_l)-ψ(α_l+β_l)) + (β_l-β_r)(ψ(β_l)-ψ(α_l+β_l)) +// Where Γ is the gamma function and ψ is the digamma function. +func (KullbackLeibler) DistBeta(l, r Beta) float64 { + // http://bariskurt.com/kullback-leibler-divergence-between-two-dirichlet-and-beta-distributions/ + if l.Alpha <= 0 || l.Beta <= 0 { + panic("distuv: bad parameters for left distribution") + } + if r.Alpha <= 0 || r.Beta <= 0 { + panic("distuv: bad parameters for right distribution") + } + lab := l.Alpha + l.Beta + l1, _ := math.Lgamma(lab) + l2, _ := math.Lgamma(l.Alpha) + l3, _ := math.Lgamma(l.Beta) + lt := l1 - l2 - l3 + + r1, _ := math.Lgamma(r.Alpha + r.Beta) + r2, _ := math.Lgamma(r.Alpha) + r3, _ := math.Lgamma(r.Beta) + rt := r1 - r2 - r3 + + d0 := mathext.Digamma(l.Alpha + l.Beta) + ct := (l.Alpha-r.Alpha)*(mathext.Digamma(l.Alpha)-d0) + (l.Beta-r.Beta)*(mathext.Digamma(l.Beta)-d0) + + return lt - rt + ct +} + +// DistNormal returns the Kullback-Leibler divergence between Normal distributions +// l and r. +// +// For two Normal distributions, the KL divergence is computed as +// D_KL(l || r) = log(σ_r / σ_l) + (σ_l^2 + (μ_l-μ_r)^2)/(2 * σ_r^2) - 0.5 +func (KullbackLeibler) DistNormal(l, r Normal) float64 { + d := l.Mu - r.Mu + v := (l.Sigma*l.Sigma + d*d) / (2 * r.Sigma * r.Sigma) + return math.Log(r.Sigma) - math.Log(l.Sigma) + v - 0.5 +} diff --git a/vendor/gonum.org/v1/gonum/stat/distuv/studentst.go b/vendor/gonum.org/v1/gonum/stat/distuv/studentst.go new file mode 100644 index 0000000..7af48fc --- /dev/null +++ b/vendor/gonum.org/v1/gonum/stat/distuv/studentst.go @@ -0,0 +1,161 @@ +// Copyright ©2016 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package distuv + +import ( + "math" + + "golang.org/x/exp/rand" + + "gonum.org/v1/gonum/mathext" +) + +const logPi = 1.1447298858494001741 // http://oeis.org/A053510 + +// StudentsT implements the three-parameter Student's T distribution, a distribution +// over the real numbers. +// +// The Student's T distribution has density function +// Γ((ν+1)/2) / (sqrt(νπ) Γ(ν/2) σ) (1 + 1/ν * ((x-μ)/σ)^2)^(-(ν+1)/2) +// +// The Student's T distribution approaches the normal distribution as ν → ∞. +// +// For more information, see https://en.wikipedia.org/wiki/Student%27s_t-distribution, +// specifically https://en.wikipedia.org/wiki/Student%27s_t-distribution#Non-standardized_Student.27s_t-distribution . +// +// The standard Student's T distribution is with Mu = 0, and Sigma = 1. +type StudentsT struct { + // Mu is the location parameter of the distribution, and the mean of the + // distribution + Mu float64 + + // Sigma is the scale parameter of the distribution. It is related to the + // standard deviation by std = Sigma * sqrt(Nu/(Nu-2)) + Sigma float64 + + // Nu is the shape prameter of the distribution, representing the number of + // degrees of the distribution, and one less than the number of observations + // from a Normal distribution. + Nu float64 + + Src rand.Source +} + +// CDF computes the value of the cumulative distribution function at x. +func (s StudentsT) CDF(x float64) float64 { + // transform to standard normal + y := (x - s.Mu) / s.Sigma + if y == 0 { + return 0.5 + } + // For t > 0 + // F(y) = 1 - 0.5 * I_t(y)(nu/2, 1/2) + // t(y) = nu/(y^2 + nu) + // and 1 - F(y) for t < 0 + t := s.Nu / (y*y + s.Nu) + if y > 0 { + return 1 - 0.5*mathext.RegIncBeta(0.5*s.Nu, 0.5, t) + } + return 0.5 * mathext.RegIncBeta(s.Nu/2, 0.5, t) +} + +// LogProb computes the natural logarithm of the value of the probability +// density function at x. +func (s StudentsT) LogProb(x float64) float64 { + g1, _ := math.Lgamma((s.Nu + 1) / 2) + g2, _ := math.Lgamma(s.Nu / 2) + z := (x - s.Mu) / s.Sigma + return g1 - g2 - 0.5*math.Log(s.Nu) - 0.5*logPi - math.Log(s.Sigma) - ((s.Nu+1)/2)*math.Log(1+z*z/s.Nu) +} + +// Mean returns the mean of the probability distribution. +func (s StudentsT) Mean() float64 { + return s.Mu +} + +// Mode returns the mode of the distribution. +func (s StudentsT) Mode() float64 { + return s.Mu +} + +// NumParameters returns the number of parameters in the distribution. +func (StudentsT) NumParameters() int { + return 3 +} + +// Prob computes the value of the probability density function at x. +func (s StudentsT) Prob(x float64) float64 { + return math.Exp(s.LogProb(x)) +} + +// Quantile returns the inverse of the cumulative distribution function. +func (s StudentsT) Quantile(p float64) float64 { + if p < 0 || p > 1 { + panic(badPercentile) + } + // F(x) = 1 - 0.5 * I_t(x)(nu/2, 1/2) + // t(x) = nu/(t^2 + nu) + if p == 0.5 { + return s.Mu + } + var y float64 + if p > 0.5 { + // Know t > 0 + t := mathext.InvRegIncBeta(s.Nu/2, 0.5, 2*(1-p)) + y = math.Sqrt(s.Nu * (1 - t) / t) + } else { + t := mathext.InvRegIncBeta(s.Nu/2, 0.5, 2*p) + y = -math.Sqrt(s.Nu * (1 - t) / t) + } + // Convert out of standard normal + return y*s.Sigma + s.Mu +} + +// Rand returns a random sample drawn from the distribution. +func (s StudentsT) Rand() float64 { + // http://www.math.uah.edu/stat/special/Student.html + n := Normal{0, 1, s.Src}.Rand() + c := Gamma{s.Nu / 2, 0.5, s.Src}.Rand() + z := n / math.Sqrt(c/s.Nu) + return z*s.Sigma + s.Mu +} + +// StdDev returns the standard deviation of the probability distribution. +// +// The standard deviation is undefined for ν <= 1, and this returns math.NaN(). +func (s StudentsT) StdDev() float64 { + return math.Sqrt(s.Variance()) +} + +// Survival returns the survival function (complementary CDF) at x. +func (s StudentsT) Survival(x float64) float64 { + // transform to standard normal + y := (x - s.Mu) / s.Sigma + if y == 0 { + return 0.5 + } + // For t > 0 + // F(y) = 1 - 0.5 * I_t(y)(nu/2, 1/2) + // t(y) = nu/(y^2 + nu) + // and 1 - F(y) for t < 0 + t := s.Nu / (y*y + s.Nu) + if y > 0 { + return 0.5 * mathext.RegIncBeta(s.Nu/2, 0.5, t) + } + return 1 - 0.5*mathext.RegIncBeta(s.Nu/2, 0.5, t) +} + +// Variance returns the variance of the probability distribution. +// +// The variance is undefined for ν <= 1, and this returns math.NaN(). +func (s StudentsT) Variance() float64 { + if s.Nu < 1 { + return math.NaN() + } + if s.Nu <= 2 { + return math.Inf(1) + } + return s.Sigma * s.Sigma * s.Nu / (s.Nu - 2) +} diff --git a/vendor/gonum.org/v1/gonum/stat/distuv/triangle.go b/vendor/gonum.org/v1/gonum/stat/distuv/triangle.go new file mode 100644 index 0000000..65afd16 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/stat/distuv/triangle.go @@ -0,0 +1,193 @@ +// Copyright ©2017 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package distuv + +import ( + "math" + + "golang.org/x/exp/rand" +) + +// Triangle represents a triangle distribution (https://en.wikipedia.org/wiki/Triangular_distribution). +type Triangle struct { + a, b, c float64 + src rand.Source +} + +// NewTriangle constructs a new triangle distribution with lower limit a, upper limit b, and mode c. +// Constraints are a < b and a ≤ c ≤ b. +// This distribution is uncommon in nature, but may be useful for simulation. +func NewTriangle(a, b, c float64, src rand.Source) Triangle { + checkTriangleParameters(a, b, c) + return Triangle{a: a, b: b, c: c, src: src} +} + +func checkTriangleParameters(a, b, c float64) { + if a >= b { + panic("triangle: constraint of a < b violated") + } + if a > c { + panic("triangle: constraint of a <= c violated") + } + if c > b { + panic("triangle: constraint of c <= b violated") + } +} + +// CDF computes the value of the cumulative density function at x. +func (t Triangle) CDF(x float64) float64 { + switch { + case x <= t.a: + return 0 + case x <= t.c: + d := x - t.a + return (d * d) / ((t.b - t.a) * (t.c - t.a)) + case x < t.b: + d := t.b - x + return 1 - (d*d)/((t.b-t.a)*(t.b-t.c)) + default: + return 1 + } +} + +// Entropy returns the entropy of the distribution. +func (t Triangle) Entropy() float64 { + return 0.5 + math.Log(t.b-t.a) - math.Ln2 +} + +// ExKurtosis returns the excess kurtosis of the distribution. +func (Triangle) ExKurtosis() float64 { + return -3.0 / 5.0 +} + +// Fit is not appropriate for Triangle, because the distribution is generally used when there is little data. + +// LogProb computes the natural logarithm of the value of the probability density function at x. +func (t Triangle) LogProb(x float64) float64 { + return math.Log(t.Prob(x)) +} + +// Mean returns the mean of the probability distribution. +func (t Triangle) Mean() float64 { + return (t.a + t.b + t.c) / 3 +} + +// Median returns the median of the probability distribution. +func (t Triangle) Median() float64 { + if t.c >= (t.a+t.b)/2 { + return t.a + math.Sqrt((t.b-t.a)*(t.c-t.a)/2) + } + return t.b - math.Sqrt((t.b-t.a)*(t.b-t.c)/2) +} + +// Mode returns the mode of the probability distribution. +func (t Triangle) Mode() float64 { + return t.c +} + +// NumParameters returns the number of parameters in the distribution. +func (Triangle) NumParameters() int { + return 3 +} + +// Prob computes the value of the probability density function at x. +func (t Triangle) Prob(x float64) float64 { + switch { + case x < t.a: + return 0 + case x < t.c: + return 2 * (x - t.a) / ((t.b - t.a) * (t.c - t.a)) + case x == t.c: + return 2 / (t.b - t.a) + case x <= t.b: + return 2 * (t.b - x) / ((t.b - t.a) * (t.b - t.c)) + default: + return 0 + } +} + +// Quantile returns the inverse of the cumulative probability distribution. +func (t Triangle) Quantile(p float64) float64 { + if p < 0 || p > 1 { + panic(badPercentile) + } + + f := (t.c - t.a) / (t.b - t.a) + + if p < f { + return t.a + math.Sqrt(p*(t.b-t.a)*(t.c-t.a)) + } + return t.b - math.Sqrt((1-p)*(t.b-t.a)*(t.b-t.c)) +} + +// Rand returns a random sample drawn from the distribution. +func (t Triangle) Rand() float64 { + var rnd float64 + if t.src == nil { + rnd = rand.Float64() + } else { + rnd = rand.New(t.src).Float64() + } + + return t.Quantile(rnd) +} + +// Skewness returns the skewness of the distribution. +func (t Triangle) Skewness() float64 { + n := math.Sqrt2 * (t.a + t.b - 2*t.c) * (2*t.a - t.b - t.c) * (t.a - 2*t.b + t.c) + d := 5 * math.Pow(t.a*t.a+t.b*t.b+t.c*t.c-t.a*t.b-t.a*t.c-t.b*t.c, 3.0/2.0) + + return n / d +} + +// StdDev returns the standard deviation of the probability distribution. +func (t Triangle) StdDev() float64 { + return math.Sqrt(t.Variance()) +} + +// Survival returns the survival function (complementary CDF) at x. +func (t Triangle) Survival(x float64) float64 { + return 1 - t.CDF(x) +} + +// MarshalParameters implements the ParameterMarshaler interface +func (t Triangle) MarshalParameters(p []Parameter) { + if len(p) != t.NumParameters() { + panic("triangle: improper parameter length") + } + p[0].Name = "A" + p[0].Value = t.a + p[1].Name = "B" + p[1].Value = t.b + p[2].Name = "C" + p[2].Value = t.c +} + +// UnmarshalParameters implements the ParameterMarshaler interface +func (t *Triangle) UnmarshalParameters(p []Parameter) { + if len(p) != t.NumParameters() { + panic("triangle: incorrect number of parameters to set") + } + if p[0].Name != "A" { + panic("triangle: " + panicNameMismatch) + } + if p[1].Name != "B" { + panic("triangle: " + panicNameMismatch) + } + if p[2].Name != "C" { + panic("triangle: " + panicNameMismatch) + } + + checkTriangleParameters(p[0].Value, p[1].Value, p[2].Value) + + t.a = p[0].Value + t.b = p[1].Value + t.c = p[2].Value +} + +// Variance returns the variance of the probability distribution. +func (t Triangle) Variance() float64 { + return (t.a*t.a + t.b*t.b + t.c*t.c - t.a*t.b - t.a*t.c - t.b*t.c) / 18 +} diff --git a/vendor/gonum.org/v1/gonum/stat/distuv/uniform.go b/vendor/gonum.org/v1/gonum/stat/distuv/uniform.go new file mode 100644 index 0000000..cd6e253 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/stat/distuv/uniform.go @@ -0,0 +1,159 @@ +// Copyright ©2014 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package distuv + +import ( + "math" + + "golang.org/x/exp/rand" +) + +// UnitUniform is an instantiation of the uniform distribution with Min = 0 +// and Max = 1. +var UnitUniform = Uniform{Min: 0, Max: 1} + +// Uniform represents a continuous uniform distribution (https://en.wikipedia.org/wiki/Uniform_distribution_%28continuous%29). +type Uniform struct { + Min float64 + Max float64 + Src rand.Source +} + +// CDF computes the value of the cumulative density function at x. +func (u Uniform) CDF(x float64) float64 { + if x < u.Min { + return 0 + } + if x > u.Max { + return 1 + } + return (x - u.Min) / (u.Max - u.Min) +} + +// Uniform doesn't have any of the DLogProbD? because the derivative is 0 everywhere +// except where it's undefined + +// Entropy returns the entropy of the distribution. +func (u Uniform) Entropy() float64 { + return math.Log(u.Max - u.Min) +} + +// ExKurtosis returns the excess kurtosis of the distribution. +func (Uniform) ExKurtosis() float64 { + return -6.0 / 5.0 +} + +// Uniform doesn't have Fit because it's a bad idea to fit a uniform from data. + +// LogProb computes the natural logarithm of the value of the probability density function at x. +func (u Uniform) LogProb(x float64) float64 { + if x < u.Min { + return math.Inf(-1) + } + if x > u.Max { + return math.Inf(-1) + } + return -math.Log(u.Max - u.Min) +} + +// MarshalParameters implements the ParameterMarshaler interface +func (u Uniform) MarshalParameters(p []Parameter) { + if len(p) != u.NumParameters() { + panic("uniform: improper parameter length") + } + p[0].Name = "Min" + p[0].Value = u.Min + p[1].Name = "Max" + p[1].Value = u.Max +} + +// Mean returns the mean of the probability distribution. +func (u Uniform) Mean() float64 { + return (u.Max + u.Min) / 2 +} + +// Median returns the median of the probability distribution. +func (u Uniform) Median() float64 { + return (u.Max + u.Min) / 2 +} + +// Uniform doesn't have a mode because it's any value in the distribution + +// NumParameters returns the number of parameters in the distribution. +func (Uniform) NumParameters() int { + return 2 +} + +// Prob computes the value of the probability density function at x. +func (u Uniform) Prob(x float64) float64 { + if x < u.Min { + return 0 + } + if x > u.Max { + return 0 + } + return 1 / (u.Max - u.Min) +} + +// Quantile returns the inverse of the cumulative probability distribution. +func (u Uniform) Quantile(p float64) float64 { + if p < 0 || p > 1 { + panic(badPercentile) + } + return p*(u.Max-u.Min) + u.Min +} + +// Rand returns a random sample drawn from the distribution. +func (u Uniform) Rand() float64 { + var rnd float64 + if u.Src == nil { + rnd = rand.Float64() + } else { + rnd = rand.New(u.Src).Float64() + } + return rnd*(u.Max-u.Min) + u.Min +} + +// Skewness returns the skewness of the distribution. +func (Uniform) Skewness() float64 { + return 0 +} + +// StdDev returns the standard deviation of the probability distribution. +func (u Uniform) StdDev() float64 { + return math.Sqrt(u.Variance()) +} + +// Survival returns the survival function (complementary CDF) at x. +func (u Uniform) Survival(x float64) float64 { + if x < u.Min { + return 1 + } + if x > u.Max { + return 0 + } + return (u.Max - x) / (u.Max - u.Min) +} + +// UnmarshalParameters implements the ParameterMarshaler interface +func (u *Uniform) UnmarshalParameters(p []Parameter) { + if len(p) != u.NumParameters() { + panic("uniform: incorrect number of parameters to set") + } + if p[0].Name != "Min" { + panic("uniform: " + panicNameMismatch) + } + if p[1].Name != "Max" { + panic("uniform: " + panicNameMismatch) + } + + u.Min = p[0].Value + u.Max = p[1].Value +} + +// Variance returns the variance of the probability distribution. +func (u Uniform) Variance() float64 { + return 1.0 / 12.0 * (u.Max - u.Min) * (u.Max - u.Min) +} diff --git a/vendor/gonum.org/v1/gonum/stat/distuv/weibull.go b/vendor/gonum.org/v1/gonum/stat/distuv/weibull.go new file mode 100644 index 0000000..8368db1 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/stat/distuv/weibull.go @@ -0,0 +1,243 @@ +// Copyright ©2014 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package distuv + +import ( + "math" + "math/cmplx" + + "golang.org/x/exp/rand" +) + +// Weibull distribution. Valid range for x is [0,+∞). +type Weibull struct { + // Shape parameter of the distribution. A value of 1 represents + // the exponential distribution. A value of 2 represents the + // Rayleigh distribution. Valid range is (0,+∞). + K float64 + // Scale parameter of the distribution. Valid range is (0,+∞). + Lambda float64 + // Source of random numbers + Src rand.Source +} + +// CDF computes the value of the cumulative density function at x. +func (w Weibull) CDF(x float64) float64 { + if x < 0 { + return 0 + } + return 1 - cmplx.Abs(cmplx.Exp(w.LogCDF(x))) +} + +// Entropy returns the entropy of the distribution. +func (w Weibull) Entropy() float64 { + return eulerGamma*(1-1/w.K) + math.Log(w.Lambda/w.K) + 1 +} + +// ExKurtosis returns the excess kurtosis of the distribution. +func (w Weibull) ExKurtosis() float64 { + return (-6*w.gammaIPow(1, 4) + 12*w.gammaIPow(1, 2)*math.Gamma(1+2/w.K) - 3*w.gammaIPow(2, 2) - 4*math.Gamma(1+1/w.K)*math.Gamma(1+3/w.K) + math.Gamma(1+4/w.K)) / math.Pow(math.Gamma(1+2/w.K)-w.gammaIPow(1, 2), 2) +} + +// gammIPow is a shortcut for computing the gamma function to a power. +func (w Weibull) gammaIPow(i, pow float64) float64 { + return math.Pow(math.Gamma(1+i/w.K), pow) +} + +// LogCDF computes the value of the log of the cumulative density function at x. +func (w Weibull) LogCDF(x float64) complex128 { + if x < 0 { + return 0 + } + return cmplx.Log(-1) + complex(-math.Pow(x/w.Lambda, w.K), 0) +} + +// LogProb computes the natural logarithm of the value of the probability +// density function at x. Zero is returned if x is less than zero. +// +// Special cases occur when x == 0, and the result depends on the shape +// parameter as follows: +// If 0 < K < 1, LogProb returns +Inf. +// If K == 1, LogProb returns 0. +// If K > 1, LogProb returns -Inf. +func (w Weibull) LogProb(x float64) float64 { + if x < 0 { + return 0 + } + return math.Log(w.K) - math.Log(w.Lambda) + (w.K-1)*(math.Log(x)-math.Log(w.Lambda)) - math.Pow(x/w.Lambda, w.K) +} + +// LogSurvival returns the log of the survival function (complementary CDF) at x. +func (w Weibull) LogSurvival(x float64) float64 { + if x < 0 { + return 0 + } + return -math.Pow(x/w.Lambda, w.K) +} + +// Mean returns the mean of the probability distribution. +func (w Weibull) Mean() float64 { + return w.Lambda * math.Gamma(1+1/w.K) +} + +// Median returns the median of the normal distribution. +func (w Weibull) Median() float64 { + return w.Lambda * math.Pow(ln2, 1/w.K) +} + +// Mode returns the mode of the normal distribution. +// +// The mode is NaN in the special case where the K (shape) parameter +// is less than 1. +func (w Weibull) Mode() float64 { + if w.K > 1 { + return w.Lambda * math.Pow((w.K-1)/w.K, 1/w.K) + } else if w.K == 1 { + return 0 + } else { + return math.NaN() + } +} + +// NumParameters returns the number of parameters in the distribution. +func (Weibull) NumParameters() int { + return 2 +} + +// Prob computes the value of the probability density function at x. +func (w Weibull) Prob(x float64) float64 { + if x < 0 { + return 0 + } + return math.Exp(w.LogProb(x)) +} + +// Quantile returns the inverse of the cumulative probability distribution. +func (w Weibull) Quantile(p float64) float64 { + if p < 0 || p > 1 { + panic(badPercentile) + } + return w.Lambda * math.Pow(-math.Log(1-p), 1/w.K) +} + +// Rand returns a random sample drawn from the distribution. +func (w Weibull) Rand() float64 { + var rnd float64 + if w.Src == nil { + rnd = rand.Float64() + } else { + rnd = rand.New(w.Src).Float64() + } + return w.Quantile(rnd) +} + +// Score returns the score function with respect to the parameters of the +// distribution at the input location x. The score function is the derivative +// of the log-likelihood at x with respect to the parameters +// (∂/∂θ) log(p(x;θ)) +// If deriv is non-nil, len(deriv) must equal the number of parameters otherwise +// Score will panic, and the derivative is stored in-place into deriv. If deriv +// is nil a new slice will be allocated and returned. +// +// The order is [∂LogProb / ∂K, ∂LogProb / ∂λ]. +// +// For more information, see https://en.wikipedia.org/wiki/Score_%28statistics%29. +// +// Special cases: +// Score(0) = [NaN, NaN] +func (w Weibull) Score(deriv []float64, x float64) []float64 { + if deriv == nil { + deriv = make([]float64, w.NumParameters()) + } + if len(deriv) != w.NumParameters() { + panic(badLength) + } + if x > 0 { + deriv[0] = 1/w.K + math.Log(x) - math.Log(w.Lambda) - (math.Log(x)-math.Log(w.Lambda))*math.Pow(x/w.Lambda, w.K) + deriv[1] = (w.K * (math.Pow(x/w.Lambda, w.K) - 1)) / w.Lambda + return deriv + } + if x < 0 { + deriv[0] = 0 + deriv[1] = 0 + return deriv + } + deriv[0] = math.NaN() + deriv[1] = math.NaN() + return deriv +} + +// ScoreInput returns the score function with respect to the input of the +// distribution at the input location specified by x. The score function is the +// derivative of the log-likelihood +// (d/dx) log(p(x)) . +// +// Special cases: +// ScoreInput(0) = NaN +func (w Weibull) ScoreInput(x float64) float64 { + if x > 0 { + return (-w.K*math.Pow(x/w.Lambda, w.K) + w.K - 1) / x + } + if x < 0 { + return 0 + } + return math.NaN() +} + +// Skewness returns the skewness of the distribution. +func (w Weibull) Skewness() float64 { + stdDev := w.StdDev() + firstGamma, firstGammaSign := math.Lgamma(1 + 3/w.K) + logFirst := firstGamma + 3*(math.Log(w.Lambda)-math.Log(stdDev)) + logSecond := math.Log(3) + math.Log(w.Mean()) + 2*math.Log(stdDev) - 3*math.Log(stdDev) + logThird := 3 * (math.Log(w.Mean()) - math.Log(stdDev)) + return float64(firstGammaSign)*math.Exp(logFirst) - math.Exp(logSecond) - math.Exp(logThird) +} + +// StdDev returns the standard deviation of the probability distribution. +func (w Weibull) StdDev() float64 { + return math.Sqrt(w.Variance()) +} + +// Survival returns the survival function (complementary CDF) at x. +func (w Weibull) Survival(x float64) float64 { + return math.Exp(w.LogSurvival(x)) +} + +// setParameters modifies the parameters of the distribution. +func (w *Weibull) setParameters(p []Parameter) { + if len(p) != w.NumParameters() { + panic("weibull: incorrect number of parameters to set") + } + if p[0].Name != "K" { + panic("weibull: " + panicNameMismatch) + } + if p[1].Name != "λ" { + panic("weibull: " + panicNameMismatch) + } + w.K = p[0].Value + w.Lambda = p[1].Value +} + +// Variance returns the variance of the probability distribution. +func (w Weibull) Variance() float64 { + return math.Pow(w.Lambda, 2) * (math.Gamma(1+2/w.K) - w.gammaIPow(1, 2)) +} + +// parameters returns the parameters of the distribution. +func (w Weibull) parameters(p []Parameter) []Parameter { + nParam := w.NumParameters() + if p == nil { + p = make([]Parameter, nParam) + } else if len(p) != nParam { + panic("weibull: improper parameter length") + } + p[0].Name = "K" + p[0].Value = w.K + p[1].Name = "λ" + p[1].Value = w.Lambda + return p + +} diff --git a/vendor/gonum.org/v1/gonum/stat/doc.go b/vendor/gonum.org/v1/gonum/stat/doc.go new file mode 100644 index 0000000..d6916cb --- /dev/null +++ b/vendor/gonum.org/v1/gonum/stat/doc.go @@ -0,0 +1,6 @@ +// Copyright ©2017 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Package stat provides generalized statistical functions. +package stat // import "gonum.org/v1/gonum/stat" diff --git a/vendor/gonum.org/v1/gonum/stat/mds/doc.go b/vendor/gonum.org/v1/gonum/stat/mds/doc.go new file mode 100644 index 0000000..38cb9b2 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/stat/mds/doc.go @@ -0,0 +1,6 @@ +// Copyright ©2018 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Package mds provides multidimensional scaling functions. +package mds // import "gonum.org/v1/gonum/stat/mds" diff --git a/vendor/gonum.org/v1/gonum/stat/mds/mds.go b/vendor/gonum.org/v1/gonum/stat/mds/mds.go new file mode 100644 index 0000000..5994a89 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/stat/mds/mds.go @@ -0,0 +1,90 @@ +// Copyright ©2018 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package mds + +import ( + "math" + + "gonum.org/v1/gonum/blas/blas64" + "gonum.org/v1/gonum/mat" +) + +// TorgersonScaling converts a dissimilarity matrix to a matrix containing +// Euclidean coordinates. TorgersonScaling places the coordinates in dst and +// returns it and the number of positive Eigenvalues if successful. +// If the scaling is not successful, dst is returned, but will not be a valid scaling. +// When the scaling is successful, mds will be resized to k columns wide. +// Eigenvalues will be copied into eigdst and returned as eig if it is provided. +// +// If dst is nil, a new mat.Dense is allocated. If dst is not a zero matrix, +// the dimensions of dst and dis must match otherwise TorgersonScaling will panic. +// The dis matrix must be square or TorgersonScaling will panic. +func TorgersonScaling(dst *mat.Dense, eigdst []float64, dis mat.Symmetric) (k int, mds *mat.Dense, eig []float64) { + // https://doi.org/10.1007/0-387-28981-X_12 + + n := dis.Symmetric() + if dst == nil { + dst = mat.NewDense(n, n, nil) + } else if r, c := dst.Dims(); !dst.IsZero() && (r != n || c != n) { + panic(mat.ErrShape) + } + + b := mat.NewSymDense(n, nil) + for i := 0; i < n; i++ { + for j := i; j < n; j++ { + v := dis.At(i, j) + v *= v + b.SetSym(i, j, v) + } + } + c := mat.NewSymDense(n, nil) + s := -1 / float64(n) + for i := 0; i < n; i++ { + c.SetSym(i, i, 1+s) + for j := i + 1; j < n; j++ { + c.SetSym(i, j, s) + } + } + dst.Product(c, b, c) + for i := 0; i < n; i++ { + for j := i; j < n; j++ { + b.SetSym(i, j, -0.5*dst.At(i, j)) + } + } + + var ed mat.EigenSym + ok := ed.Factorize(b, true) + if !ok { + return 0, dst, eigdst + } + ed.VectorsTo(dst) + vals := ed.Values(nil) + reverse(vals, dst.RawMatrix()) + copy(eigdst, vals) + + for i, v := range vals { + if v < 0 { + vals[i] = 0 + continue + } + k = i + 1 + vals[i] = math.Sqrt(v) + } + + var tmp mat.Dense + tmp.Mul(dst, mat.NewDiagonalRect(n, k, vals[:k])) + dst = dst.Slice(0, n, 0, k).(*mat.Dense) + dst.Copy(&tmp) + + return k, dst, eigdst +} + +func reverse(values []float64, vectors blas64.General) { + for i, j := 0, len(values)-1; i < j; i, j = i+1, j-1 { + values[i], values[j] = values[j], values[i] + blas64.Swap(blas64.Vector{N: vectors.Rows, Inc: vectors.Stride, Data: vectors.Data[i:]}, + blas64.Vector{N: vectors.Rows, Inc: vectors.Stride, Data: vectors.Data[j:]}) + } +} diff --git a/vendor/gonum.org/v1/gonum/stat/pca_cca.go b/vendor/gonum.org/v1/gonum/stat/pca_cca.go new file mode 100644 index 0000000..2613d7a --- /dev/null +++ b/vendor/gonum.org/v1/gonum/stat/pca_cca.go @@ -0,0 +1,316 @@ +// Copyright ©2016 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package stat + +import ( + "errors" + "math" + + "gonum.org/v1/gonum/floats" + "gonum.org/v1/gonum/mat" +) + +// PC is a type for computing and extracting the principal components of a +// matrix. The results of the principal components analysis are only valid +// if the call to PrincipalComponents was successful. +type PC struct { + n, d int + weights []float64 + svd *mat.SVD + ok bool +} + +// PrincipalComponents performs a weighted principal components analysis on the +// matrix of the input data which is represented as an n×d matrix a where each +// row is an observation and each column is a variable. +// +// PrincipalComponents centers the variables but does not scale the variance. +// +// The weights slice is used to weight the observations. If weights is nil, each +// weight is considered to have a value of one, otherwise the length of weights +// must match the number of observations or PrincipalComponents will panic. +// +// PrincipalComponents returns whether the analysis was successful. +func (c *PC) PrincipalComponents(a mat.Matrix, weights []float64) (ok bool) { + c.n, c.d = a.Dims() + if weights != nil && len(weights) != c.n { + panic("stat: len(weights) != observations") + } + + c.svd, c.ok = svdFactorizeCentered(c.svd, a, weights) + if c.ok { + c.weights = append(c.weights[:0], weights...) + } + return c.ok +} + +// VectorsTo returns the component direction vectors of a principal components +// analysis. The vectors are returned in the columns of a d×min(n, d) matrix. +// If dst is not nil it must either be zero-sized or be a d×min(n, d) matrix. +// dst will be used as the destination for the direction vector data. If dst +// is nil, a new mat.Dense is allocated for the destination. +func (c *PC) VectorsTo(dst *mat.Dense) *mat.Dense { + if !c.ok { + panic("stat: use of unsuccessful principal components analysis") + } + + if dst != nil { + if d, n := dst.Dims(); !dst.IsZero() && (d != c.d || n != min(c.n, c.d)) { + panic(mat.ErrShape) + } + } + return c.svd.VTo(dst) +} + +// VarsTo returns the column variances of the principal component scores, +// b * vecs, where b is a matrix with centered columns. Variances are returned +// in descending order. +// If dst is not nil it is used to store the variances and returned. +// Vars will panic if the receiver has not successfully performed a principal +// components analysis or dst is not nil and the length of dst is not min(n, d). +func (c *PC) VarsTo(dst []float64) []float64 { + if !c.ok { + panic("stat: use of unsuccessful principal components analysis") + } + if dst != nil && len(dst) != min(c.n, c.d) { + panic("stat: length of slice does not match analysis") + } + + dst = c.svd.Values(dst) + var f float64 + if c.weights == nil { + f = 1 / float64(c.n-1) + } else { + f = 1 / (floats.Sum(c.weights) - 1) + } + for i, v := range dst { + dst[i] = f * v * v + } + return dst +} + +func min(a, b int) int { + if a < b { + return a + } + return b +} + +// CC is a type for computing the canonical correlations of a pair of matrices. +// The results of the canonical correlation analysis are only valid +// if the call to CanonicalCorrelations was successful. +type CC struct { + // n is the number of observations used to + // construct the canonical correlations. + n int + + // xd and yd are used for size checks. + xd, yd int + + x, y, c *mat.SVD + ok bool +} + +// CanonicalCorrelations performs a canonical correlation analysis of the +// input data x and y, columns of which should be interpretable as two sets +// of measurements on the same observations (rows). These observations are +// optionally weighted by weights. The result of the analysis is stored in +// the receiver if the analysis is successful. +// +// Canonical correlation analysis finds associations between two sets of +// variables on the same observations by finding linear combinations of the two +// sphered datasets that maximize the correlation between them. +// +// Some notation: let Xc and Yc denote the centered input data matrices x +// and y (column means subtracted from each column), let Sx and Sy denote the +// sample covariance matrices within x and y respectively, and let Sxy denote +// the covariance matrix between x and y. The sphered data can then be expressed +// as Xc * Sx^{-1/2} and Yc * Sy^{-1/2} respectively, and the correlation matrix +// between the sphered data is called the canonical correlation matrix, +// Sx^{-1/2} * Sxy * Sy^{-1/2}. In cases where S^{-1/2} is ambiguous for some +// covariance matrix S, S^{-1/2} is taken to be E * D^{-1/2} * E^T where S can +// be eigendecomposed as S = E * D * E^T. +// +// The canonical correlations are the correlations between the corresponding +// pairs of canonical variables and can be obtained with c.Corrs(). Canonical +// variables can be obtained by projecting the sphered data into the left and +// right eigenvectors of the canonical correlation matrix, and these +// eigenvectors can be obtained with c.Left(m, true) and c.Right(m, true) +// respectively. The canonical variables can also be obtained directly from the +// centered raw data by using the back-transformed eigenvectors which can be +// obtained with c.Left(m, false) and c.Right(m, false) respectively. +// +// The first pair of left and right eigenvectors of the canonical correlation +// matrix can be interpreted as directions into which the respective sphered +// data can be projected such that the correlation between the two projections +// is maximized. The second pair and onwards solve the same optimization but +// under the constraint that they are uncorrelated (orthogonal in sphered space) +// to previous projections. +// +// CanonicalCorrelations will panic if the inputs x and y do not have the same +// number of rows. +// +// The slice weights is used to weight the observations. If weights is nil, each +// weight is considered to have a value of one, otherwise the length of weights +// must match the number of observations (rows of both x and y) or +// CanonicalCorrelations will panic. +// +// More details can be found at +// https://en.wikipedia.org/wiki/Canonical_correlation +// or in Chapter 3 of +// Koch, Inge. Analysis of multivariate and high-dimensional data. +// Vol. 32. Cambridge University Press, 2013. ISBN: 9780521887939 +func (c *CC) CanonicalCorrelations(x, y mat.Matrix, weights []float64) error { + var yn int + c.n, c.xd = x.Dims() + yn, c.yd = y.Dims() + if c.n != yn { + panic("stat: unequal number of observations") + } + if weights != nil && len(weights) != c.n { + panic("stat: len(weights) != observations") + } + + // Center and factorize x and y. + c.x, c.ok = svdFactorizeCentered(c.x, x, weights) + if !c.ok { + return errors.New("stat: failed to factorize x") + } + c.y, c.ok = svdFactorizeCentered(c.y, y, weights) + if !c.ok { + return errors.New("stat: failed to factorize y") + } + xu := c.x.UTo(nil) + xv := c.x.VTo(nil) + yu := c.y.UTo(nil) + yv := c.y.VTo(nil) + + // Calculate and factorise the canonical correlation matrix. + var ccor mat.Dense + ccor.Product(xv, xu.T(), yu, yv.T()) + if c.c == nil { + c.c = &mat.SVD{} + } + c.ok = c.c.Factorize(&ccor, mat.SVDThin) + if !c.ok { + return errors.New("stat: failed to factorize ccor") + } + return nil +} + +// CorrsTo returns the canonical correlations, using dst if it is not nil. +// If dst is not nil and len(dst) does not match the number of columns in +// the y input matrix, Corrs will panic. +func (c *CC) CorrsTo(dst []float64) []float64 { + if !c.ok { + panic("stat: canonical correlations missing or invalid") + } + + if dst != nil && len(dst) != c.yd { + panic("stat: length of destination does not match input dimension") + } + return c.c.Values(dst) +} + +// LeftTo returns the left eigenvectors of the canonical correlation matrix if +// spheredSpace is true. If spheredSpace is false it returns these eigenvectors +// back-transformed to the original data space. +// If dst is not nil it must either be zero-sized or be an xd×yd matrix where xd +// and yd are the number of variables in the input x and y matrices. dst will +// be used as the destination for the vector data. If dst is nil, a new +// mat.Dense is allocated for the destination. +func (c *CC) LeftTo(dst *mat.Dense, spheredSpace bool) *mat.Dense { + if !c.ok || c.n < 2 { + panic("stat: canonical correlations missing or invalid") + } + + if dst != nil { + if d, n := dst.Dims(); !dst.IsZero() && (n != c.yd || d != c.xd) { + panic(mat.ErrShape) + } + } + dst = c.c.UTo(dst) + if spheredSpace { + return dst + } + + xs := c.x.Values(nil) + xv := c.x.VTo(nil) + + scaleColsReciSqrt(xv, xs) + + dst.Product(xv, xv.T(), dst) + dst.Scale(math.Sqrt(float64(c.n-1)), dst) + return dst +} + +// RightTo returns the right eigenvectors of the canonical correlation matrix if +// spheredSpace is true. If spheredSpace is false it returns these eigenvectors +// back-transformed to the original data space. +// If dst is not nil it must either be zero-sized or be an yd×yd matrix where yd +// is the number of variables in the input y matrix. dst will +// be used as the destination for the vector data. If dst is nil, a new +// mat.Dense is allocated for the destination. +func (c *CC) RightTo(dst *mat.Dense, spheredSpace bool) *mat.Dense { + if !c.ok || c.n < 2 { + panic("stat: canonical correlations missing or invalid") + } + + if dst != nil { + if d, n := dst.Dims(); (n != 0 || d != 0) && (n != c.yd || d != c.yd) { + panic(mat.ErrShape) + } + } + dst = c.c.VTo(dst) + if spheredSpace { + return dst + } + + ys := c.y.Values(nil) + yv := c.y.VTo(nil) + + scaleColsReciSqrt(yv, ys) + + dst.Product(yv, yv.T(), dst) + dst.Scale(math.Sqrt(float64(c.n-1)), dst) + return dst +} + +func svdFactorizeCentered(work *mat.SVD, m mat.Matrix, weights []float64) (svd *mat.SVD, ok bool) { + n, d := m.Dims() + centered := mat.NewDense(n, d, nil) + col := make([]float64, n) + for j := 0; j < d; j++ { + mat.Col(col, j, m) + floats.AddConst(-Mean(col, weights), col) + centered.SetCol(j, col) + } + for i, w := range weights { + floats.Scale(math.Sqrt(w), centered.RawRowView(i)) + } + if work == nil { + work = &mat.SVD{} + } + ok = work.Factorize(centered, mat.SVDThin) + return work, ok +} + +// scaleColsReciSqrt scales the columns of cols +// by the reciprocal square-root of vals. +func scaleColsReciSqrt(cols *mat.Dense, vals []float64) { + if cols == nil { + panic("stat: input nil") + } + n, d := cols.Dims() + if len(vals) != d { + panic("stat: input length mismatch") + } + col := make([]float64, n) + for j := 0; j < d; j++ { + mat.Col(col, j, cols) + floats.Scale(math.Sqrt(1/vals[j]), col) + cols.SetCol(j, col) + } +} diff --git a/vendor/gonum.org/v1/gonum/stat/roc.go b/vendor/gonum.org/v1/gonum/stat/roc.go new file mode 100644 index 0000000..516a100 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/stat/roc.go @@ -0,0 +1,125 @@ +// Copyright ©2016 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package stat + +import ( + "math" + "sort" +) + +// ROC returns paired false positive rate (FPR) and true positive rate +// (TPR) values corresponding to cutoff points on the receiver operator +// characteristic (ROC) curve obtained when y is treated as a binary +// classifier for classes with weights. The cutoff thresholds used to +// calculate the ROC are returned in thresh such that tpr[i] and fpr[i] +// are the true and false positive rates for y >= thresh[i]. +// +// The input y and cutoffs must be sorted, and values in y must correspond +// to values in classes and weights. SortWeightedLabeled can be used to +// sort y together with classes and weights. +// +// For a given cutoff value, observations corresponding to entries in y +// greater than the cutoff value are classified as false, while those +// less than or equal to the cutoff value are classified as true. These +// assigned class labels are compared with the true values in the classes +// slice and used to calculate the FPR and TPR. +// +// If weights is nil, all weights are treated as 1. +// +// If cutoffs is nil or empty, all possible cutoffs are calculated, +// resulting in fpr and tpr having length one greater than the number of +// unique values in y. Otherwise fpr and tpr will be returned with the +// same length as cutoffs. floats.Span can be used to generate equally +// spaced cutoffs. +// +// More details about ROC curves are available at +// https://en.wikipedia.org/wiki/Receiver_operating_characteristic +func ROC(cutoffs, y []float64, classes []bool, weights []float64) (tpr, fpr, thresh []float64) { + if len(y) != len(classes) { + panic("stat: slice length mismatch") + } + if weights != nil && len(y) != len(weights) { + panic("stat: slice length mismatch") + } + if !sort.Float64sAreSorted(y) { + panic("stat: input must be sorted ascending") + } + if !sort.Float64sAreSorted(cutoffs) { + panic("stat: cutoff values must be sorted ascending") + } + if len(y) == 0 { + return nil, nil, nil + } + if len(cutoffs) == 0 { + if cutoffs == nil || cap(cutoffs) < len(y)+1 { + cutoffs = make([]float64, len(y)+1) + } else { + cutoffs = cutoffs[:len(y)+1] + } + cutoffs[0] = math.Inf(-1) + // Choose all possible cutoffs for unique values in y. + bin := 1 + cutoffs[bin] = y[0] + for i, u := range y[1:] { + if u == y[i] { + continue + } + bin++ + cutoffs[bin] = u + } + cutoffs = cutoffs[:bin+1] + } else { + // Don't mutate the provided cutoffs. + tmp := cutoffs + cutoffs = make([]float64, len(cutoffs)) + copy(cutoffs, tmp) + } + + tpr = make([]float64, len(cutoffs)) + fpr = make([]float64, len(cutoffs)) + var bin int + var nPos, nNeg float64 + for i, u := range classes { + // Update the bin until it matches the next y value + // skipping empty bins. + for bin < len(cutoffs)-1 && y[i] > cutoffs[bin] { + bin++ + tpr[bin] = tpr[bin-1] + fpr[bin] = fpr[bin-1] + } + posWeight, negWeight := 1.0, 0.0 + if weights != nil { + posWeight = weights[i] + } + if !u { + posWeight, negWeight = negWeight, posWeight + } + nPos += posWeight + nNeg += negWeight + if y[i] <= cutoffs[bin] { + tpr[bin] += posWeight + fpr[bin] += negWeight + } + } + + invNeg := 1 / nNeg + invPos := 1 / nPos + for i := range tpr { + tpr[i] *= invPos + tpr[i] = 1 - tpr[i] + fpr[i] *= invNeg + fpr[i] = 1 - fpr[i] + } + for i, j := 0, len(tpr)-1; i < j; i, j = i+1, j-1 { + tpr[i], tpr[j] = tpr[j], tpr[i] + fpr[i], fpr[j] = fpr[j], fpr[i] + } + for i, j := 1, len(cutoffs)-1; i < j; i, j = i+1, j-1 { + cutoffs[i], cutoffs[j] = cutoffs[j], cutoffs[i] + } + cutoffs[0] = math.Inf(1) + + return tpr, fpr, cutoffs +} diff --git a/vendor/gonum.org/v1/gonum/stat/samplemv/doc.go b/vendor/gonum.org/v1/gonum/stat/samplemv/doc.go new file mode 100644 index 0000000..e0330c8 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/stat/samplemv/doc.go @@ -0,0 +1,7 @@ +// Copyright ©2017 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Package samplemv implements advanced sampling routines from explicit and implicit +// probability distributions. +package samplemv // import "gonum.org/v1/gonum/stat/samplemv" diff --git a/vendor/gonum.org/v1/gonum/stat/samplemv/halton.go b/vendor/gonum.org/v1/gonum/stat/samplemv/halton.go new file mode 100644 index 0000000..c80222e --- /dev/null +++ b/vendor/gonum.org/v1/gonum/stat/samplemv/halton.go @@ -0,0 +1,173 @@ +// Copyright ©2017 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package samplemv + +import ( + "fmt" + + "golang.org/x/exp/rand" + + "gonum.org/v1/gonum/mat" + "gonum.org/v1/gonum/stat/distmv" +) + +// Halton is a type for sampling using the Halton sequence from +// the given distribution. The specific method for scrambling (or lack thereof) +// is specified by the HaltonKind. If src is not nil, it will be used to generate +// the randomness needed to scramble the sequence (if necessary), otherwise +// the rand package will be used. Halton panics if the HaltonKind is unrecognized +// or if q is nil. +// +// Halton sequence random number generation is a quasi-Monte Carlo procedure +// where the samples are generated to be evenly spaced out across the distribution. +// Note that this means the sample locations are correlated with one another. +// The distmv.NewUnitUniform function can be used for easy sampling from the unit hypercube. +type Halton struct { + Kind HaltonKind + Q distmv.Quantiler + Src rand.Source +} + +// Sample generates rows(batch) samples using the Halton generation procedure. +func (h Halton) Sample(batch *mat.Dense) { + halton(batch, h.Kind, h.Q, h.Src) +} + +// HaltonKind specifies the type of algorithm used to generate Halton samples. +type HaltonKind int + +const ( + // Owen generates (scrambled) Halton samples using the Randomized van der Corput + // algorithm described in + // A randomized Halton algorithm + // Art Owen + // https://arxiv.org/pdf/1706.02808.pdf + // Currently limited to 1000 dimensional inputs. + Owen = iota + 1 +) + +func halton(batch *mat.Dense, kind HaltonKind, q distmv.Quantiler, src rand.Source) { + // Code based from https://arxiv.org/pdf/1706.02808.pdf . + perm := rand.Perm + if src != nil { + perm = rand.New(src).Perm + } + + n, d := batch.Dims() + // Each case should generate random numbers over the unit cube. + switch kind { + default: + panic("halton: unknown HaltonKind") + case Owen: + for j := 0; j < d; j++ { + b := nthPrime(j) + div := 1 + b2r := 1 / float64(b) + for 1-b2r < 1 { + p := perm(b) + for i := 0; i < n; i++ { + dig := (i / div) % b + pdig := float64(p[dig]) + v := batch.At(i, j) + v += pdig * b2r + batch.Set(i, j, v) + } + div *= b + b2r /= float64(b) + } + } + } + p := make([]float64, d) + for i := 0; i < n; i++ { + copy(p, batch.RawRowView(i)) + q.Quantile(batch.RawRowView(i), p) + } +} + +// nthPrime returns the nth prime number (0 indexed). +func nthPrime(n int) int { + if n > len(firstPrimes) { + panic(fmt.Sprintf("halton: dimension must be less than %d", len(firstPrimes))) + } + return firstPrimes[n] +} + +var firstPrimes = []int{2, 3, 5, 7, 11, 13, 17, 19, 23, 29, 31, 37, 41, 43, 47, + 53, 59, 61, 67, 71, 73, 79, 83, 89, 97, 101, 103, 107, 109, 113, 127, 131, + 137, 139, 149, 151, 157, 163, 167, 173, 179, 181, 191, 193, 197, 199, 211, + 223, 227, 229, 233, 239, 241, 251, 257, 263, 269, 271, 277, 281, 283, 293, + 307, 311, 313, 317, 331, 337, 347, 349, 353, 359, 367, 373, 379, 383, 389, + 397, 401, 409, 419, 421, 431, 433, 439, 443, 449, 457, 461, 463, 467, 479, + 487, 491, 499, 503, 509, 521, 523, 541, 547, 557, 563, 569, 571, 577, 587, + 593, 599, 601, 607, 613, 617, 619, 631, 641, 643, 647, 653, 659, 661, 673, + 677, 683, 691, 701, 709, 719, 727, 733, 739, 743, 751, 757, 761, 769, 773, + 787, 797, 809, 811, 821, 823, 827, 829, 839, 853, 857, 859, 863, 877, 881, + 883, 887, 907, 911, 919, 929, 937, 941, 947, 953, 967, 971, 977, 983, 991, + 997, 1009, 1013, 1019, 1021, 1031, 1033, 1039, 1049, 1051, 1061, 1063, 1069, + 1087, 1091, 1093, 1097, 1103, 1109, 1117, 1123, 1129, 1151, 1153, 1163, 1171, + 1181, 1187, 1193, 1201, 1213, 1217, 1223, 1229, 1231, 1237, 1249, 1259, 1277, + 1279, 1283, 1289, 1291, 1297, 1301, 1303, 1307, 1319, 1321, 1327, 1361, 1367, + 1373, 1381, 1399, 1409, 1423, 1427, 1429, 1433, 1439, 1447, 1451, 1453, 1459, + 1471, 1481, 1483, 1487, 1489, 1493, 1499, 1511, 1523, 1531, 1543, 1549, 1553, + 1559, 1567, 1571, 1579, 1583, 1597, 1601, 1607, 1609, 1613, 1619, 1621, 1627, + 1637, 1657, 1663, 1667, 1669, 1693, 1697, 1699, 1709, 1721, 1723, 1733, 1741, + 1747, 1753, 1759, 1777, 1783, 1787, 1789, 1801, 1811, 1823, 1831, 1847, 1861, + 1867, 1871, 1873, 1877, 1879, 1889, 1901, 1907, 1913, 1931, 1933, 1949, 1951, + 1973, 1979, 1987, 1993, 1997, 1999, 2003, 2011, 2017, 2027, 2029, 2039, 2053, + 2063, 2069, 2081, 2083, 2087, 2089, 2099, 2111, 2113, 2129, 2131, 2137, 2141, + 2143, 2153, 2161, 2179, 2203, 2207, 2213, 2221, 2237, 2239, 2243, 2251, 2267, + 2269, 2273, 2281, 2287, 2293, 2297, 2309, 2311, 2333, 2339, 2341, 2347, 2351, + 2357, 2371, 2377, 2381, 2383, 2389, 2393, 2399, 2411, 2417, 2423, 2437, 2441, + 2447, 2459, 2467, 2473, 2477, 2503, 2521, 2531, 2539, 2543, 2549, 2551, 2557, + 2579, 2591, 2593, 2609, 2617, 2621, 2633, 2647, 2657, 2659, 2663, 2671, 2677, + 2683, 2687, 2689, 2693, 2699, 2707, 2711, 2713, 2719, 2729, 2731, 2741, 2749, + 2753, 2767, 2777, 2789, 2791, 2797, 2801, 2803, 2819, 2833, 2837, 2843, 2851, + 2857, 2861, 2879, 2887, 2897, 2903, 2909, 2917, 2927, 2939, 2953, 2957, 2963, + 2969, 2971, 2999, 3001, 3011, 3019, 3023, 3037, 3041, 3049, 3061, 3067, 3079, + 3083, 3089, 3109, 3119, 3121, 3137, 3163, 3167, 3169, 3181, 3187, 3191, 3203, + 3209, 3217, 3221, 3229, 3251, 3253, 3257, 3259, 3271, 3299, 3301, 3307, 3313, + 3319, 3323, 3329, 3331, 3343, 3347, 3359, 3361, 3371, 3373, 3389, 3391, 3407, + 3413, 3433, 3449, 3457, 3461, 3463, 3467, 3469, 3491, 3499, 3511, 3517, 3527, + 3529, 3533, 3539, 3541, 3547, 3557, 3559, 3571, 3581, 3583, 3593, 3607, 3613, + 3617, 3623, 3631, 3637, 3643, 3659, 3671, 3673, 3677, 3691, 3697, 3701, 3709, + 3719, 3727, 3733, 3739, 3761, 3767, 3769, 3779, 3793, 3797, 3803, 3821, 3823, + 3833, 3847, 3851, 3853, 3863, 3877, 3881, 3889, 3907, 3911, 3917, 3919, 3923, + 3929, 3931, 3943, 3947, 3967, 3989, 4001, 4003, 4007, 4013, 4019, 4021, 4027, + 4049, 4051, 4057, 4073, 4079, 4091, 4093, 4099, 4111, 4127, 4129, 4133, 4139, + 4153, 4157, 4159, 4177, 4201, 4211, 4217, 4219, 4229, 4231, 4241, 4243, 4253, + 4259, 4261, 4271, 4273, 4283, 4289, 4297, 4327, 4337, 4339, 4349, 4357, 4363, + 4373, 4391, 4397, 4409, 4421, 4423, 4441, 4447, 4451, 4457, 4463, 4481, 4483, + 4493, 4507, 4513, 4517, 4519, 4523, 4547, 4549, 4561, 4567, 4583, 4591, 4597, + 4603, 4621, 4637, 4639, 4643, 4649, 4651, 4657, 4663, 4673, 4679, 4691, 4703, + 4721, 4723, 4729, 4733, 4751, 4759, 4783, 4787, 4789, 4793, 4799, 4801, 4813, + 4817, 4831, 4861, 4871, 4877, 4889, 4903, 4909, 4919, 4931, 4933, 4937, 4943, + 4951, 4957, 4967, 4969, 4973, 4987, 4993, 4999, 5003, 5009, 5011, 5021, 5023, + 5039, 5051, 5059, 5077, 5081, 5087, 5099, 5101, 5107, 5113, 5119, 5147, 5153, + 5167, 5171, 5179, 5189, 5197, 5209, 5227, 5231, 5233, 5237, 5261, 5273, 5279, + 5281, 5297, 5303, 5309, 5323, 5333, 5347, 5351, 5381, 5387, 5393, 5399, 5407, + 5413, 5417, 5419, 5431, 5437, 5441, 5443, 5449, 5471, 5477, 5479, 5483, 5501, + 5503, 5507, 5519, 5521, 5527, 5531, 5557, 5563, 5569, 5573, 5581, 5591, 5623, + 5639, 5641, 5647, 5651, 5653, 5657, 5659, 5669, 5683, 5689, 5693, 5701, 5711, + 5717, 5737, 5741, 5743, 5749, 5779, 5783, 5791, 5801, 5807, 5813, 5821, 5827, + 5839, 5843, 5849, 5851, 5857, 5861, 5867, 5869, 5879, 5881, 5897, 5903, 5923, + 5927, 5939, 5953, 5981, 5987, 6007, 6011, 6029, 6037, 6043, 6047, 6053, 6067, + 6073, 6079, 6089, 6091, 6101, 6113, 6121, 6131, 6133, 6143, 6151, 6163, 6173, + 6197, 6199, 6203, 6211, 6217, 6221, 6229, 6247, 6257, 6263, 6269, 6271, 6277, + 6287, 6299, 6301, 6311, 6317, 6323, 6329, 6337, 6343, 6353, 6359, 6361, 6367, + 6373, 6379, 6389, 6397, 6421, 6427, 6449, 6451, 6469, 6473, 6481, 6491, 6521, + 6529, 6547, 6551, 6553, 6563, 6569, 6571, 6577, 6581, 6599, 6607, 6619, 6637, + 6653, 6659, 6661, 6673, 6679, 6689, 6691, 6701, 6703, 6709, 6719, 6733, 6737, + 6761, 6763, 6779, 6781, 6791, 6793, 6803, 6823, 6827, 6829, 6833, 6841, 6857, + 6863, 6869, 6871, 6883, 6899, 6907, 6911, 6917, 6947, 6949, 6959, 6961, 6967, + 6971, 6977, 6983, 6991, 6997, 7001, 7013, 7019, 7027, 7039, 7043, 7057, 7069, + 7079, 7103, 7109, 7121, 7127, 7129, 7151, 7159, 7177, 7187, 7193, 7207, 7211, + 7213, 7219, 7229, 7237, 7243, 7247, 7253, 7283, 7297, 7307, 7309, 7321, 7331, + 7333, 7349, 7351, 7369, 7393, 7411, 7417, 7433, 7451, 7457, 7459, 7477, 7481, + 7487, 7489, 7499, 7507, 7517, 7523, 7529, 7537, 7541, 7547, 7549, 7559, 7561, + 7573, 7577, 7583, 7589, 7591, 7603, 7607, 7621, 7639, 7643, 7649, 7669, 7673, + 7681, 7687, 7691, 7699, 7703, 7717, 7723, 7727, 7741, 7753, 7757, 7759, 7789, + 7793, 7817, 7823, 7829, 7841, 7853, 7867, 7873, 7877, 7879, 7883, 7901, 7907, + 7919, +} diff --git a/vendor/gonum.org/v1/gonum/stat/samplemv/metropolishastings.go b/vendor/gonum.org/v1/gonum/stat/samplemv/metropolishastings.go new file mode 100644 index 0000000..e3451fb --- /dev/null +++ b/vendor/gonum.org/v1/gonum/stat/samplemv/metropolishastings.go @@ -0,0 +1,213 @@ +// Copyright ©2016 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package samplemv + +import ( + "math" + + "golang.org/x/exp/rand" + + "gonum.org/v1/gonum/mat" + "gonum.org/v1/gonum/stat/distmv" +) + +var _ Sampler = MetropolisHastingser{} + +// MHProposal defines a proposal distribution for Metropolis Hastings. +type MHProposal interface { + // ConditionalLogProb returns the probability of the first argument + // conditioned on being at the second argument. + // p(x|y) + // ConditionalLogProb panics if the input slices are not the same length. + ConditionalLogProb(x, y []float64) (prob float64) + + // ConditionalRand generates a new random location conditioned being at the + // location y. If the first argument is nil, a new slice is allocated and + // returned. Otherwise, the random location is stored in-place into the first + // argument, and ConditionalRand will panic if the input slice lengths differ. + ConditionalRand(x, y []float64) []float64 +} + +// MetropolisHastingser is a type for generating samples using the Metropolis Hastings +// algorithm (http://en.wikipedia.org/wiki/Metropolis%E2%80%93Hastings_algorithm), +// with the given target and proposal distributions, starting at the location +// specified by Initial. If src != nil, it will be used to generate random +// numbers, otherwise rand.Float64 will be used. +// +// Metropolis-Hastings is a Markov-chain Monte Carlo algorithm that generates +// samples according to the distribution specified by target using the Markov +// chain implicitly defined by the proposal distribution. At each +// iteration, a proposal point is generated randomly from the current location. +// This proposal point is accepted with probability +// p = min(1, (target(new) * proposal(current|new)) / (target(current) * proposal(new|current))) +// If the new location is accepted, it becomes the new current location. +// If it is rejected, the current location remains. This is the sample stored in +// batch, ignoring BurnIn and Rate (discussed below). +// +// The samples in Metropolis Hastings are correlated with one another through the +// Markov chain. As a result, the initial value can have a significant influence +// on the early samples, and so, typically, the first samples generated by the chain +// are ignored. This is known as "burn-in", and the number of samples ignored +// at the beginning is specified by BurnIn. The proper BurnIn value will depend +// on the mixing time of the Markov chain defined by the target and proposal +// distributions. +// +// Many choose to have a sampling "rate" where a number of samples +// are ignored in between each kept sample. This helps decorrelate +// the samples from one another, but also reduces the number of available samples. +// This value is specified by Rate. If Rate is 0 it is defaulted to 1 (keep +// every sample). +// +// The initial value is NOT changed during calls to Sample. +type MetropolisHastingser struct { + Initial []float64 + Target distmv.LogProber + Proposal MHProposal + Src rand.Source + + BurnIn int + Rate int +} + +// Sample generates rows(batch) samples using the Metropolis Hastings sample +// generation method. The initial location is NOT updated during the call to Sample. +// +// The number of columns in batch must equal len(m.Initial), otherwise Sample +// will panic. +func (m MetropolisHastingser) Sample(batch *mat.Dense) { + burnIn := m.BurnIn + rate := m.Rate + if rate == 0 { + rate = 1 + } + r, c := batch.Dims() + if len(m.Initial) != c { + panic("metropolishastings: length mismatch") + } + + // Use the optimal size for the temporary memory to allow the fewest calls + // to MetropolisHastings. The case where tmp shadows samples must be + // aligned with the logic after burn-in so that tmp does not shadow samples + // during the rate portion. + tmp := batch + if rate > r { + tmp = mat.NewDense(rate, c, nil) + } + rTmp, _ := tmp.Dims() + + // Perform burn-in. + remaining := burnIn + initial := make([]float64, c) + copy(initial, m.Initial) + for remaining != 0 { + newSamp := min(rTmp, remaining) + metropolisHastings(tmp.Slice(0, newSamp, 0, c).(*mat.Dense), initial, m.Target, m.Proposal, m.Src) + copy(initial, tmp.RawRowView(newSamp-1)) + remaining -= newSamp + } + + if rate == 1 { + metropolisHastings(batch, initial, m.Target, m.Proposal, m.Src) + return + } + + if rTmp <= r { + tmp = mat.NewDense(rate, c, nil) + } + + // Take a single sample from the chain. + metropolisHastings(batch.Slice(0, 1, 0, c).(*mat.Dense), initial, m.Target, m.Proposal, m.Src) + + copy(initial, batch.RawRowView(0)) + // For all of the other samples, first generate Rate samples and then actually + // accept the last one. + for i := 1; i < r; i++ { + metropolisHastings(tmp, initial, m.Target, m.Proposal, m.Src) + v := tmp.RawRowView(rate - 1) + batch.SetRow(i, v) + copy(initial, v) + } +} + +func metropolisHastings(batch *mat.Dense, initial []float64, target distmv.LogProber, proposal MHProposal, src rand.Source) { + f64 := rand.Float64 + if src != nil { + f64 = rand.New(src).Float64 + } + if len(initial) == 0 { + panic("metropolishastings: zero length initial") + } + r, _ := batch.Dims() + current := make([]float64, len(initial)) + copy(current, initial) + proposed := make([]float64, len(initial)) + currentLogProb := target.LogProb(initial) + for i := 0; i < r; i++ { + proposal.ConditionalRand(proposed, current) + proposedLogProb := target.LogProb(proposed) + probTo := proposal.ConditionalLogProb(proposed, current) + probBack := proposal.ConditionalLogProb(current, proposed) + + accept := math.Exp(proposedLogProb + probBack - probTo - currentLogProb) + if accept > f64() { + copy(current, proposed) + currentLogProb = proposedLogProb + } + batch.SetRow(i, current) + } +} + +// ProposalNormal is a sampling distribution for Metropolis-Hastings. It has a +// fixed covariance matrix and changes the mean based on the current sampling +// location. +type ProposalNormal struct { + normal *distmv.Normal +} + +// NewProposalNormal constructs a new ProposalNormal for use as a proposal +// distribution for Metropolis-Hastings. ProposalNormal is a multivariate normal +// distribution (implemented by distmv.Normal) where the covariance matrix is fixed +// and the mean of the distribution changes. +// +// NewProposalNormal returns {nil, false} if the covariance matrix is not positive-definite. +func NewProposalNormal(sigma *mat.SymDense, src rand.Source) (*ProposalNormal, bool) { + mu := make([]float64, sigma.Symmetric()) + normal, ok := distmv.NewNormal(mu, sigma, src) + if !ok { + return nil, false + } + p := &ProposalNormal{ + normal: normal, + } + return p, true +} + +// ConditionalLogProb returns the probability of the first argument conditioned on +// being at the second argument. +// p(x|y) +// ConditionalLogProb panics if the input slices are not the same length or +// are not equal to the dimension of the covariance matrix. +func (p *ProposalNormal) ConditionalLogProb(x, y []float64) (prob float64) { + // Either SetMean or LogProb will panic if the slice lengths are innaccurate. + p.normal.SetMean(y) + return p.normal.LogProb(x) +} + +// ConditionalRand generates a new random location conditioned being at the +// location y. If the first argument is nil, a new slice is allocated and +// returned. Otherwise, the random location is stored in-place into the first +// argument, and ConditionalRand will panic if the input slice lengths differ or +// if they are not equal to the dimension of the covariance matrix. +func (p *ProposalNormal) ConditionalRand(x, y []float64) []float64 { + if x == nil { + x = make([]float64, p.normal.Dim()) + } + if len(x) != len(y) { + panic(badLengthMismatch) + } + p.normal.SetMean(y) + p.normal.Rand(x) + return x +} diff --git a/vendor/gonum.org/v1/gonum/stat/samplemv/samplemv.go b/vendor/gonum.org/v1/gonum/stat/samplemv/samplemv.go new file mode 100644 index 0000000..20ac114 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/stat/samplemv/samplemv.go @@ -0,0 +1,263 @@ +// Copyright ©2016 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package samplemv + +import ( + "errors" + "math" + + "golang.org/x/exp/rand" + + "gonum.org/v1/gonum/mat" + "gonum.org/v1/gonum/stat/distmv" +) + +var ( + badLengthMismatch = "samplemv: slice length mismatch" +) + +var ( + _ Sampler = LatinHypercube{} + _ Sampler = (*Rejection)(nil) + _ Sampler = IID{} + + _ WeightedSampler = SampleUniformWeighted{} + _ WeightedSampler = Importance{} +) + +func min(a, b int) int { + if a < b { + return a + } + return b +} + +// Sampler generates a batch of samples according to the rule specified by the +// implementing type. The number of samples generated is equal to rows(batch), +// and the samples are stored in-place into the input. +type Sampler interface { + Sample(batch *mat.Dense) +} + +// WeightedSampler generates a batch of samples and their relative weights +// according to the rule specified by the implementing type. The number of samples +// generated is equal to rows(batch), and the samples and weights +// are stored in-place into the inputs. The length of weights must equal +// rows(batch), otherwise SampleWeighted will panic. +type WeightedSampler interface { + SampleWeighted(batch *mat.Dense, weights []float64) +} + +// SampleUniformWeighted wraps a Sampler type to create a WeightedSampler where all +// weights are equal. +type SampleUniformWeighted struct { + Sampler +} + +// SampleWeighted generates rows(batch) samples from the embedded Sampler type +// and sets all of the weights equal to 1. If rows(batch) and len(weights) +// of weights are not equal, SampleWeighted will panic. +func (w SampleUniformWeighted) SampleWeighted(batch *mat.Dense, weights []float64) { + r, _ := batch.Dims() + if r != len(weights) { + panic(badLengthMismatch) + } + w.Sample(batch) + for i := range weights { + weights[i] = 1 + } +} + +// LatinHypercube is a type for sampling using Latin hypercube sampling +// from the given distribution. If src is not nil, it will be used to generate +// random numbers, otherwise rand.Float64 will be used. +// +// Latin hypercube sampling divides the cumulative distribution function into equally +// spaced bins and guarantees that one sample is generated per bin. Within each bin, +// the location is randomly sampled. The distmv.NewUnitUniform function can be used +// for easy sampling from the unit hypercube. +type LatinHypercube struct { + Q distmv.Quantiler + Src rand.Source +} + +// Sample generates rows(batch) samples using the LatinHypercube generation +// procedure. +func (l LatinHypercube) Sample(batch *mat.Dense) { + latinHypercube(batch, l.Q, l.Src) +} + +func latinHypercube(batch *mat.Dense, q distmv.Quantiler, src rand.Source) { + r, c := batch.Dims() + var f64 func() float64 + var perm func(int) []int + if src != nil { + r := rand.New(src) + f64 = r.Float64 + perm = r.Perm + } else { + f64 = rand.Float64 + perm = rand.Perm + } + r64 := float64(r) + for i := 0; i < c; i++ { + p := perm(r) + for j := 0; j < r; j++ { + v := f64()/r64 + float64(j)/r64 + batch.Set(p[j], i, v) + } + } + p := make([]float64, c) + for i := 0; i < r; i++ { + copy(p, batch.RawRowView(i)) + q.Quantile(batch.RawRowView(i), p) + } +} + +// Importance is a type for performing importance sampling using the given +// Target and Proposal distributions. +// +// Importance sampling is a variance reduction technique where samples are +// generated from a proposal distribution, q(x), instead of the target distribution +// p(x). This allows relatively unlikely samples in p(x) to be generated more frequently. +// +// The importance sampling weight at x is given by p(x)/q(x). To reduce variance, +// a good proposal distribution will bound this sampling weight. This implies the +// support of q(x) should be at least as broad as p(x), and q(x) should be "fatter tailed" +// than p(x). +type Importance struct { + Target distmv.LogProber + Proposal distmv.RandLogProber +} + +// SampleWeighted generates rows(batch) samples using the Importance sampling +// generation procedure. +// +// The length of weights must equal the length of batch, otherwise Importance will panic. +func (l Importance) SampleWeighted(batch *mat.Dense, weights []float64) { + importance(batch, weights, l.Target, l.Proposal) +} + +func importance(batch *mat.Dense, weights []float64, target distmv.LogProber, proposal distmv.RandLogProber) { + r, _ := batch.Dims() + if r != len(weights) { + panic(badLengthMismatch) + } + for i := 0; i < r; i++ { + v := batch.RawRowView(i) + proposal.Rand(v) + weights[i] = math.Exp(target.LogProb(v) - proposal.LogProb(v)) + } +} + +// ErrRejection is returned when the constant in Rejection is not sufficiently high. +var ErrRejection = errors.New("rejection: acceptance ratio above 1") + +// Rejection is a type for sampling using the rejection sampling algorithm. +// +// Rejection sampling generates points from the target distribution by using +// the proposal distribution. At each step of the algorithm, the proposed point +// is accepted with probability +// p = target(x) / (proposal(x) * c) +// where target(x) is the probability of the point according to the target distribution +// and proposal(x) is the probability according to the proposal distribution. +// The constant c must be chosen such that target(x) < proposal(x) * c for all x. +// The expected number of proposed samples is len(samples) * c. +// +// The number of proposed locations during sampling can be found with a call to +// Proposed. If there was an error during sampling, all elements of samples are +// set to NaN and the error can be accesssed with the Err method. If src != nil, +// it will be used to generate random numbers, otherwise rand.Float64 will be used. +// +// Target may return the true (log of) the probablity of the location, or it may return +// a value that is proportional to the probability (logprob + constant). This is +// useful for cases where the probability distribution is only known up to a normalization +// constant. +type Rejection struct { + C float64 + Target distmv.LogProber + Proposal distmv.RandLogProber + Src rand.Source + + err error + proposed int +} + +// Err returns nil if the most recent call to sample was successful, and returns +// ErrRejection if it was not. +func (r *Rejection) Err() error { + return r.err +} + +// Proposed returns the number of samples proposed during the most recent call to +// Sample. +func (r *Rejection) Proposed() int { + return r.proposed +} + +// Sample generates rows(batch) using the Rejection sampling generation procedure. +// Rejection sampling may fail if the constant is insufficiently high, as described +// in the type comment for Rejection. If the generation fails, the samples +// are set to math.NaN(), and a call to Err will return a non-nil value. +func (r *Rejection) Sample(batch *mat.Dense) { + r.err = nil + r.proposed = 0 + proposed, ok := rejection(batch, r.Target, r.Proposal, r.C, r.Src) + if !ok { + r.err = ErrRejection + } + r.proposed = proposed +} + +func rejection(batch *mat.Dense, target distmv.LogProber, proposal distmv.RandLogProber, c float64, src rand.Source) (nProposed int, ok bool) { + if c < 1 { + panic("rejection: acceptance constant must be greater than 1") + } + f64 := rand.Float64 + if src != nil { + f64 = rand.New(src).Float64 + } + r, dim := batch.Dims() + v := make([]float64, dim) + var idx int + for { + nProposed++ + proposal.Rand(v) + qx := proposal.LogProb(v) + px := target.LogProb(v) + accept := math.Exp(px-qx) / c + if accept > 1 { + // Invalidate the whole result and return a failure. + for i := 0; i < r; i++ { + for j := 0; j < dim; j++ { + batch.Set(i, j, math.NaN()) + } + } + return nProposed, false + } + if accept > f64() { + batch.SetRow(idx, v) + idx++ + if idx == r { + break + } + } + } + return nProposed, true +} + +// IID generates a set of independently and identically distributed samples from +// the input distribution. +type IID struct { + Dist distmv.Rander +} + +// Sample generates a set of identically and independently distributed samples. +func (iid IID) Sample(batch *mat.Dense) { + r, _ := batch.Dims() + for i := 0; i < r; i++ { + iid.Dist.Rand(batch.RawRowView(i)) + } +} diff --git a/vendor/gonum.org/v1/gonum/stat/sampleuv/doc.go b/vendor/gonum.org/v1/gonum/stat/sampleuv/doc.go new file mode 100644 index 0000000..d989578 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/stat/sampleuv/doc.go @@ -0,0 +1,11 @@ +// Copyright ©2017 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Package sampleuv implements advanced sampling routines from explicit and implicit +// probability distributions. +// +// Each sampling routine is implemented as a stateless function with a +// complementary wrapper type. The wrapper types allow the sampling routines +// to implement interfaces. +package sampleuv // import "gonum.org/v1/gonum/stat/sampleuv" diff --git a/vendor/gonum.org/v1/gonum/stat/sampleuv/sample.go b/vendor/gonum.org/v1/gonum/stat/sampleuv/sample.go new file mode 100644 index 0000000..8fd3c11 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/stat/sampleuv/sample.go @@ -0,0 +1,373 @@ +// Copyright ©2015 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package sampleuv + +import ( + "errors" + "math" + + "golang.org/x/exp/rand" + + "gonum.org/v1/gonum/stat/distuv" +) + +var ( + badLengthMismatch = "sample: slice length mismatch" +) + +var ( + _ Sampler = LatinHypercube{} + _ Sampler = MetropolisHastings{} + _ Sampler = (*Rejection)(nil) + _ Sampler = IIDer{} + + _ WeightedSampler = SampleUniformWeighted{} + _ WeightedSampler = Importance{} +) + +func min(a, b int) int { + if a < b { + return a + } + return b +} + +// Sampler generates a batch of samples according to the rule specified by the +// implementing type. The number of samples generated is equal to len(batch), +// and the samples are stored in-place into the input. +type Sampler interface { + Sample(batch []float64) +} + +// WeightedSampler generates a batch of samples and their relative weights +// according to the rule specified by the implementing type. The number of samples +// generated is equal to len(batch), and the samples and weights +// are stored in-place into the inputs. The length of weights must equal +// len(batch), otherwise SampleWeighted will panic. +type WeightedSampler interface { + SampleWeighted(batch, weights []float64) +} + +// SampleUniformWeighted wraps a Sampler type to create a WeightedSampler where all +// weights are equal. +type SampleUniformWeighted struct { + Sampler +} + +// SampleWeighted generates len(batch) samples from the embedded Sampler type +// and sets all of the weights equal to 1. If len(batch) and len(weights) +// are not equal, SampleWeighted will panic. +func (w SampleUniformWeighted) SampleWeighted(batch, weights []float64) { + if len(batch) != len(weights) { + panic(badLengthMismatch) + } + w.Sample(batch) + for i := range weights { + weights[i] = 1 + } +} + +// LatinHypercube is a type for sampling using Latin hypercube sampling +// from the given distribution. If src is not nil, it will be used to generate +// random numbers, otherwise rand.Float64 will be used. +// +// Latin hypercube sampling divides the cumulative distribution function into equally +// spaced bins and guarantees that one sample is generated per bin. Within each bin, +// the location is randomly sampled. The distuv.UnitUniform variable can be used +// for easy sampling from the unit hypercube. +type LatinHypercube struct { + Q distuv.Quantiler + Src rand.Source +} + +// Sample generates len(batch) samples using the LatinHypercube generation +// procedure. +func (l LatinHypercube) Sample(batch []float64) { + latinHypercube(batch, l.Q, l.Src) +} + +func latinHypercube(batch []float64, q distuv.Quantiler, src rand.Source) { + n := len(batch) + var perm []int + var f64 func() float64 + if src != nil { + r := rand.New(src) + f64 = r.Float64 + perm = r.Perm(n) + } else { + f64 = rand.Float64 + perm = rand.Perm(n) + } + for i := range batch { + v := f64()/float64(n) + float64(i)/float64(n) + batch[perm[i]] = q.Quantile(v) + } +} + +// Importance is a type for performing importance sampling using the given +// Target and Proposal distributions. +// +// Importance sampling is a variance reduction technique where samples are +// generated from a proposal distribution, q(x), instead of the target distribution +// p(x). This allows relatively unlikely samples in p(x) to be generated more frequently. +// +// The importance sampling weight at x is given by p(x)/q(x). To reduce variance, +// a good proposal distribution will bound this sampling weight. This implies the +// support of q(x) should be at least as broad as p(x), and q(x) should be "fatter tailed" +// than p(x). +type Importance struct { + Target distuv.LogProber + Proposal distuv.RandLogProber +} + +// SampleWeighted generates len(batch) samples using the Importance sampling +// generation procedure. +// +// The length of weights must equal the length of batch, otherwise Importance will panic. +func (l Importance) SampleWeighted(batch, weights []float64) { + importance(batch, weights, l.Target, l.Proposal) +} + +func importance(batch, weights []float64, target distuv.LogProber, proposal distuv.RandLogProber) { + if len(batch) != len(weights) { + panic(badLengthMismatch) + } + for i := range batch { + v := proposal.Rand() + batch[i] = v + weights[i] = math.Exp(target.LogProb(v) - proposal.LogProb(v)) + } +} + +// ErrRejection is returned when the constant in Rejection is not sufficiently high. +var ErrRejection = errors.New("rejection: acceptance ratio above 1") + +// Rejection is a type for sampling using the rejection sampling algorithm. +// +// Rejection sampling generates points from the target distribution by using +// the proposal distribution. At each step of the algorithm, the proposed point +// is accepted with probability +// p = target(x) / (proposal(x) * c) +// where target(x) is the probability of the point according to the target distribution +// and proposal(x) is the probability according to the proposal distribution. +// The constant c must be chosen such that target(x) < proposal(x) * c for all x. +// The expected number of proposed samples is len(samples) * c. +// +// The number of proposed locations during sampling can be found with a call to +// Proposed. If there was an error during sampling, all elements of samples are +// set to NaN and the error can be accesssed with the Err method. If src != nil, +// it will be used to generate random numbers, otherwise rand.Float64 will be used. +// +// Target may return the true (log of) the probablity of the location, or it may return +// a value that is proportional to the probability (logprob + constant). This is +// useful for cases where the probability distribution is only known up to a normalization +// constant. +type Rejection struct { + C float64 + Target distuv.LogProber + Proposal distuv.RandLogProber + Src rand.Source + + err error + proposed int +} + +// Err returns nil if the most recent call to sample was successful, and returns +// ErrRejection if it was not. +func (r *Rejection) Err() error { + return r.err +} + +// Proposed returns the number of samples proposed during the most recent call to +// Sample. +func (r *Rejection) Proposed() int { + return r.proposed +} + +// Sample generates len(batch) using the Rejection sampling generation procedure. +// Rejection sampling may fail if the constant is insufficiently high, as described +// in the type comment for Rejection. If the generation fails, the samples +// are set to math.NaN(), and a call to Err will return a non-nil value. +func (r *Rejection) Sample(batch []float64) { + r.err = nil + r.proposed = 0 + proposed, ok := rejection(batch, r.Target, r.Proposal, r.C, r.Src) + if !ok { + r.err = ErrRejection + } + r.proposed = proposed +} + +func rejection(batch []float64, target distuv.LogProber, proposal distuv.RandLogProber, c float64, src rand.Source) (nProposed int, ok bool) { + if c < 1 { + panic("rejection: acceptance constant must be greater than 1") + } + f64 := rand.Float64 + if src != nil { + f64 = rand.New(src).Float64 + } + var idx int + for { + nProposed++ + v := proposal.Rand() + qx := proposal.LogProb(v) + px := target.LogProb(v) + accept := math.Exp(px-qx) / c + if accept > 1 { + // Invalidate the whole result and return a failure. + for i := range batch { + batch[i] = math.NaN() + } + return nProposed, false + } + if accept > f64() { + batch[idx] = v + idx++ + if idx == len(batch) { + break + } + } + } + return nProposed, true +} + +// MHProposal defines a proposal distribution for Metropolis Hastings. +type MHProposal interface { + // ConditionalDist returns the probability of the first argument conditioned on + // being at the second argument + // p(x|y) + ConditionalLogProb(x, y float64) (prob float64) + + // ConditionalRand generates a new random location conditioned being at the + // location y. + ConditionalRand(y float64) (x float64) +} + +// MetropolisHastings is a type for generating samples using the Metropolis Hastings +// algorithm (http://en.wikipedia.org/wiki/Metropolis%E2%80%93Hastings_algorithm), +// with the given target and proposal distributions, starting at the location +// specified by Initial. If src != nil, it will be used to generate random +// numbers, otherwise rand.Float64 will be used. +// +// Metropolis-Hastings is a Markov-chain Monte Carlo algorithm that generates +// samples according to the distribution specified by target using the Markov +// chain implicitly defined by the proposal distribution. At each +// iteration, a proposal point is generated randomly from the current location. +// This proposal point is accepted with probability +// p = min(1, (target(new) * proposal(current|new)) / (target(current) * proposal(new|current))) +// If the new location is accepted, it becomes the new current location. +// If it is rejected, the current location remains. This is the sample stored in +// batch, ignoring BurnIn and Rate (discussed below). +// +// The samples in Metropolis Hastings are correlated with one another through the +// Markov chain. As a result, the initial value can have a significant influence +// on the early samples, and so, typically, the first samples generated by the chain +// are ignored. This is known as "burn-in", and the number of samples ignored +// at the beginning is specified by BurnIn. The proper BurnIn value will depend +// on the mixing time of the Markov chain defined by the target and proposal +// distributions. +// +// Many choose to have a sampling "rate" where a number of samples +// are ignored in between each kept sample. This helps decorrelate +// the samples from one another, but also reduces the number of available samples. +// This value is specified by Rate. If Rate is 0 it is defaulted to 1 (keep +// every sample). +// +// The initial value is NOT changed during calls to Sample. +type MetropolisHastings struct { + Initial float64 + Target distuv.LogProber + Proposal MHProposal + Src rand.Source + + BurnIn int + Rate int +} + +// Sample generates len(batch) samples using the Metropolis Hastings sample +// generation method. The initial location is NOT updated during the call to Sample. +func (m MetropolisHastings) Sample(batch []float64) { + burnIn := m.BurnIn + rate := m.Rate + if rate == 0 { + rate = 1 + } + + // Use the optimal size for the temporary memory to allow the fewest calls + // to MetropolisHastings. The case where tmp shadows samples must be + // aligned with the logic after burn-in so that tmp does not shadow samples + // during the rate portion. + tmp := batch + if rate > len(batch) { + tmp = make([]float64, rate) + } + + // Perform burn-in. + remaining := burnIn + initial := m.Initial + for remaining != 0 { + newSamp := min(len(tmp), remaining) + metropolisHastings(tmp[newSamp:], initial, m.Target, m.Proposal, m.Src) + initial = tmp[newSamp-1] + remaining -= newSamp + } + + if rate == 1 { + metropolisHastings(batch, initial, m.Target, m.Proposal, m.Src) + return + } + + if len(tmp) <= len(batch) { + tmp = make([]float64, rate) + } + + // Take a single sample from the chain + metropolisHastings(batch[0:1], initial, m.Target, m.Proposal, m.Src) + initial = batch[0] + + // For all of the other samples, first generate Rate samples and then actually + // accept the last one. + for i := 1; i < len(batch); i++ { + metropolisHastings(tmp, initial, m.Target, m.Proposal, m.Src) + v := tmp[rate-1] + batch[i] = v + initial = v + } +} + +func metropolisHastings(batch []float64, initial float64, target distuv.LogProber, proposal MHProposal, src rand.Source) { + f64 := rand.Float64 + if src != nil { + f64 = rand.New(src).Float64 + } + current := initial + currentLogProb := target.LogProb(initial) + for i := range batch { + proposed := proposal.ConditionalRand(current) + proposedLogProb := target.LogProb(proposed) + probTo := proposal.ConditionalLogProb(proposed, current) + probBack := proposal.ConditionalLogProb(current, proposed) + + accept := math.Exp(proposedLogProb + probBack - probTo - currentLogProb) + if accept > f64() { + current = proposed + currentLogProb = proposedLogProb + } + batch[i] = current + } +} + +// IIDer generates a set of independently and identically distributed samples from +// the input distribution. +type IIDer struct { + Dist distuv.Rander +} + +// Sample generates a set of identically and independently distributed samples. +func (iid IIDer) Sample(batch []float64) { + for i := range batch { + batch[i] = iid.Dist.Rand() + } +} diff --git a/vendor/gonum.org/v1/gonum/stat/sampleuv/weighted.go b/vendor/gonum.org/v1/gonum/stat/sampleuv/weighted.go new file mode 100644 index 0000000..39124cd --- /dev/null +++ b/vendor/gonum.org/v1/gonum/stat/sampleuv/weighted.go @@ -0,0 +1,140 @@ +// Copyright ©2015 The Gonum Authors. All rights reserved. +// Use of this code is governed by a BSD-style +// license that can be found in the LICENSE file + +package sampleuv + +import ( + "golang.org/x/exp/rand" + + "gonum.org/v1/gonum/floats" +) + +// Weighted provides sampling without replacement from a collection of items with +// non-uniform probability. +type Weighted struct { + weights []float64 + // heap is a weight heap. + // + // It keeps a heap-organised sum of remaining + // index weights that are available to be taken + // from. + // + // Each element holds the sum of weights for + // the corresponding index, plus the sum of + // its children's weights; the children of + // an element i can be found at positions + // 2*(i+1)-1 and 2*(i+1). The root of the + // weight heap is at element 0. + // + // See comments in container/heap for an + // explanation of the layout of a heap. + heap []float64 + rnd *rand.Rand +} + +// NewWeighted returns a Weighted for the weights w. If src is nil, rand.Rand is +// used as the random number generator. +// +// Note that sampling from weights with a high variance or overall low absolute +// value sum may result in problems with numerical stability. +func NewWeighted(w []float64, src rand.Source) Weighted { + s := Weighted{ + weights: make([]float64, len(w)), + heap: make([]float64, len(w)), + } + if src != nil { + s.rnd = rand.New(src) + } + s.ReweightAll(w) + return s +} + +// Len returns the number of items held by the Weighted, including items +// already taken. +func (s Weighted) Len() int { return len(s.weights) } + +// Take returns an index from the Weighted with probability proportional +// to the weight of the item. The weight of the item is then set to zero. +// Take returns false if there are no items remaining. +func (s Weighted) Take() (idx int, ok bool) { + const small = 1e-12 + if floats.EqualWithinAbsOrRel(s.heap[0], 0, small, small) { + return -1, false + } + + var r float64 + if s.rnd == nil { + r = s.heap[0] * rand.Float64() + } else { + r = s.heap[0] * s.rnd.Float64() + } + i := 1 + last := -1 + left := len(s.weights) + for { + if r -= s.weights[i-1]; r <= 0 { + break // Fall within item i-1. + } + i <<= 1 // Move to left child. + if d := s.heap[i-1]; r > d { + r -= d + // If enough r to pass left child + // move to right child state will + // be caught at break above. + i++ + } + if i == last || left < 0 { + // No progression. + return -1, false + } + last = i + left-- + } + + w, idx := s.weights[i-1], i-1 + + s.weights[i-1] = 0 + for i > 0 { + s.heap[i-1] -= w + // The following condition is necessary to + // handle floating point error. If we see + // a heap value below zero, we know we need + // to rebuild it. + if s.heap[i-1] < 0 { + s.reset() + return idx, true + } + i >>= 1 + } + + return idx, true +} + +// Reweight sets the weight of item idx to w. +func (s Weighted) Reweight(idx int, w float64) { + w, s.weights[idx] = s.weights[idx]-w, w + idx++ + for idx > 0 { + s.heap[idx-1] -= w + idx >>= 1 + } +} + +// ReweightAll sets the weight of all items in the Weighted. ReweightAll +// panics if len(w) != s.Len. +func (s Weighted) ReweightAll(w []float64) { + if len(w) != s.Len() { + panic("floats: length of the slices do not match") + } + copy(s.weights, w) + s.reset() +} + +func (s Weighted) reset() { + copy(s.heap, s.weights) + for i := len(s.heap) - 1; i > 0; i-- { + // Sometimes 1-based counting makes sense. + s.heap[((i+1)>>1)-1] += s.heap[i] + } +} diff --git a/vendor/gonum.org/v1/gonum/stat/sampleuv/withoutreplacement.go b/vendor/gonum.org/v1/gonum/stat/sampleuv/withoutreplacement.go new file mode 100644 index 0000000..ec70da3 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/stat/sampleuv/withoutreplacement.go @@ -0,0 +1,61 @@ +// Copyright ©2017 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package sampleuv + +import ( + "sort" + + "golang.org/x/exp/rand" +) + +// WithoutReplacement samples len(idxs) integers from [0, n) without replacement. +// That is, upon return the elements of idxs will be unique integers. If source +// is non-nil it will be used to generate random numbers, otherwise the default +// source from the math/rand package will be used. +// +// WithoutReplacement will panic if len(idxs) > n. +func WithoutReplacement(idxs []int, n int, src rand.Source) { + if len(idxs) == 0 { + panic("withoutreplacement: zero length input") + } + if len(idxs) > n { + panic("withoutreplacement: impossible size inputs") + } + + // There are two algorithms. One is to generate a random permutation + // and take the first len(idxs) elements. The second is to generate + // individual random numbers for each element and check uniqueness. The first + // method scales as O(n), and the second scales as O(len(idxs)^2). Choose + // the algorithm accordingly. + if n < len(idxs)*len(idxs) { + var perm []int + if src != nil { + perm = rand.New(src).Perm(n) + } else { + perm = rand.Perm(n) + } + copy(idxs, perm) + return + } + + // Instead, generate the random numbers directly. + sorted := make([]int, 0, len(idxs)) + for i := range idxs { + var r int + if src != nil { + r = rand.New(src).Intn(n - i) + } else { + r = rand.Intn(n - i) + } + for _, v := range sorted { + if r >= v { + r++ + } + } + idxs[i] = r + sorted = append(sorted, r) + sort.Ints(sorted) + } +} diff --git a/vendor/gonum.org/v1/gonum/stat/spatial/doc.go b/vendor/gonum.org/v1/gonum/stat/spatial/doc.go new file mode 100644 index 0000000..36c997b --- /dev/null +++ b/vendor/gonum.org/v1/gonum/stat/spatial/doc.go @@ -0,0 +1,6 @@ +// Copyright ©2017 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Package spatial provides spatial statistical functions. +package spatial // import "gonum.org/v1/gonum/stat/spatial" diff --git a/vendor/gonum.org/v1/gonum/stat/spatial/spatial.go b/vendor/gonum.org/v1/gonum/stat/spatial/spatial.go new file mode 100644 index 0000000..994c085 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/stat/spatial/spatial.go @@ -0,0 +1,162 @@ +// Copyright ©2017 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package spatial + +import ( + "math" + + "gonum.org/v1/gonum/mat" + "gonum.org/v1/gonum/stat" +) + +// TODO(kortschak): Implement weighted routines. + +// GetisOrdGStar returns the Local Getis-Ord G*i statistic for element of the +// weighted data using the provided locality matrix. The returned value is a z-score. +// +// G^*_i = num_i / den_i +// +// num_i = \sum_j (w_{ij} x_j) - \bar X \sum_j w_{ij} +// den_i = S \sqrt(((n \sum_j w_{ij}^2 - (\sum_j w_{ij})^2))/(n - 1)) +// \bar X = (\sum_j x_j) / n +// S = \sqrt((\sum_j x_j^2)/n - (\bar X)^2) +// +// GetisOrdGStar will panic if locality is not a square matrix with dimensions the +// same as the length of data or if i is not a valid index into data. +// +// See doi.org/10.1111%2Fj.1538-4632.1995.tb00912.x. +// +// Weighted Getis-Ord G*i is not currently implemented and GetisOrdGStar will +// panic if weights is not nil. +func GetisOrdGStar(i int, data, weights []float64, locality mat.Matrix) float64 { + if weights != nil { + panic("spatial: weighted data not yet implemented") + } + r, c := locality.Dims() + if r != len(data) || c != len(data) { + panic("spatial: data length mismatch") + } + + n := float64(len(data)) + mean, std := stat.MeanStdDev(data, weights) + var dwd, dww, sw float64 + if doer, ok := locality.(mat.RowNonZeroDoer); ok { + doer.DoRowNonZero(i, func(_, j int, w float64) { + sw += w + dwd += w * data[j] + dww += w * w + }) + } else { + for j, v := range data { + w := locality.At(i, j) + sw += w + dwd += w * v + dww += w * w + } + } + s := std * math.Sqrt((n-1)/n) + + return (dwd - mean*sw) / (s * math.Sqrt((n*dww-sw*sw)/(n-1))) +} + +// GlobalMoransI performs Global Moran's I calculation of spatial autocorrelation +// for the given data using the provided locality matrix. GlobalMoransI returns +// Moran's I, Var(I) and the z-score associated with those values. +// GlobalMoransI will panic if locality is not a square matrix with dimensions the +// same as the length of data. +// +// See https://doi.org/10.1111%2Fj.1538-4632.2007.00708.x. +// +// Weighted Global Moran's I is not currently implemented and GlobalMoransI will +// panic if weights is not nil. +func GlobalMoransI(data, weights []float64, locality mat.Matrix) (i, v, z float64) { + if weights != nil { + panic("spatial: weighted data not yet implemented") + } + if r, c := locality.Dims(); r != len(data) || c != len(data) { + panic("spatial: data length mismatch") + } + mean := stat.Mean(data, nil) + + doer, isDoer := locality.(mat.RowNonZeroDoer) + + // Calculate Moran's I for the data. + var num, den, sum float64 + for i, xi := range data { + zi := xi - mean + den += zi * zi + if isDoer { + doer.DoRowNonZero(i, func(_, j int, w float64) { + sum += w + zj := data[j] - mean + num += w * zi * zj + }) + } else { + for j, xj := range data { + w := locality.At(i, j) + sum += w + zj := xj - mean + num += w * zi * zj + } + } + } + i = (float64(len(data)) / sum) * (num / den) + + // Calculate Moran's E(I) for the data. + e := -1 / float64(len(data)-1) + + // Calculate Moran's Var(I) for the data. + // http://pro.arcgis.com/en/pro-app/tool-reference/spatial-statistics/h-how-spatial-autocorrelation-moran-s-i-spatial-st.htm + // http://pro.arcgis.com/en/pro-app/tool-reference/spatial-statistics/h-global-morans-i-additional-math.htm + var s0, s1, s2 float64 + var var2, var4 float64 + for i, v := range data { + v -= mean + v *= v + var2 += v + var4 += v * v + + var p2 float64 + if isDoer { + doer.DoRowNonZero(i, func(i, j int, wij float64) { + wji := locality.At(j, i) + + s0 += wij + + v := wij + wji + s1 += v * v + + p2 += v + }) + } else { + for j := range data { + wij := locality.At(i, j) + wji := locality.At(j, i) + + s0 += wij + + v := wij + wji + s1 += v * v + + p2 += v + } + } + s2 += p2 * p2 + } + s1 *= 0.5 + + n := float64(len(data)) + a := n * ((n*n-3*n+3)*s1 - n*s2 + 3*s0*s0) + c := (n - 1) * (n - 2) * (n - 3) * s0 * s0 + d := var4 / (var2 * var2) + b := d * ((n*n-n)*s1 - 2*n*s2 + 6*s0*s0) + + v = (a-b)/c - e*e + + // Calculate z-score associated with Moran's I for the data. + z = (i - e) / math.Sqrt(v) + + return i, v, z +} diff --git a/vendor/gonum.org/v1/gonum/stat/stat.go b/vendor/gonum.org/v1/gonum/stat/stat.go new file mode 100644 index 0000000..be17702 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/stat/stat.go @@ -0,0 +1,1295 @@ +// Copyright ©2014 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package stat + +import ( + "math" + "sort" + + "gonum.org/v1/gonum/floats" +) + +// CumulantKind specifies the behavior for calculating the empirical CDF or Quantile +type CumulantKind int + +// List of supported CumulantKind values for the Quantile function. +// Constant values should match the R nomenclature. See +// https://en.wikipedia.org/wiki/Quantile#Estimating_the_quantiles_of_a_population +const ( + // Empirical treats the distribution as the actual empirical distribution. + Empirical CumulantKind = 1 + // LinInterp linearly interpolates the empirical distribution between sample values, with a flat extrapolation. + LinInterp CumulantKind = 4 +) + +// bhattacharyyaCoeff computes the Bhattacharyya Coefficient for probability distributions given by: +// \sum_i \sqrt{p_i q_i} +// +// It is assumed that p and q have equal length. +func bhattacharyyaCoeff(p, q []float64) float64 { + var bc float64 + for i, a := range p { + bc += math.Sqrt(a * q[i]) + } + return bc +} + +// Bhattacharyya computes the distance between the probability distributions p and q given by: +// -\ln ( \sum_i \sqrt{p_i q_i} ) +// +// The lengths of p and q must be equal. It is assumed that p and q sum to 1. +func Bhattacharyya(p, q []float64) float64 { + if len(p) != len(q) { + panic("stat: slice length mismatch") + } + bc := bhattacharyyaCoeff(p, q) + return -math.Log(bc) +} + +// CDF returns the empirical cumulative distribution function value of x, that is +// the fraction of the samples less than or equal to q. The +// exact behavior is determined by the CumulantKind. CDF is theoretically +// the inverse of the Quantile function, though it may not be the actual inverse +// for all values q and CumulantKinds. +// +// The x data must be sorted in increasing order. If weights is nil then all +// of the weights are 1. If weights is not nil, then len(x) must equal len(weights). +// +// CumulantKind behaviors: +// - Empirical: Returns the lowest fraction for which q is greater than or equal +// to that fraction of samples +func CDF(q float64, c CumulantKind, x, weights []float64) float64 { + if weights != nil && len(x) != len(weights) { + panic("stat: slice length mismatch") + } + if floats.HasNaN(x) { + return math.NaN() + } + if !sort.Float64sAreSorted(x) { + panic("x data are not sorted") + } + + if q < x[0] { + return 0 + } + if q >= x[len(x)-1] { + return 1 + } + + var sumWeights float64 + if weights == nil { + sumWeights = float64(len(x)) + } else { + sumWeights = floats.Sum(weights) + } + + // Calculate the index + switch c { + case Empirical: + // Find the smallest value that is greater than that percent of the samples + var w float64 + for i, v := range x { + if v > q { + return w / sumWeights + } + if weights == nil { + w++ + } else { + w += weights[i] + } + } + panic("impossible") + default: + panic("stat: bad cumulant kind") + } +} + +// ChiSquare computes the chi-square distance between the observed frequences 'obs' and +// expected frequences 'exp' given by: +// \sum_i (obs_i-exp_i)^2 / exp_i +// +// The lengths of obs and exp must be equal. +func ChiSquare(obs, exp []float64) float64 { + if len(obs) != len(exp) { + panic("stat: slice length mismatch") + } + var result float64 + for i, a := range obs { + b := exp[i] + if a == 0 && b == 0 { + continue + } + result += (a - b) * (a - b) / b + } + return result +} + +// CircularMean returns the circular mean of the dataset. +// atan2(\sum_i w_i * sin(alpha_i), \sum_i w_i * cos(alpha_i)) +// If weights is nil then all of the weights are 1. If weights is not nil, then +// len(x) must equal len(weights). +func CircularMean(x, weights []float64) float64 { + if weights != nil && len(x) != len(weights) { + panic("stat: slice length mismatch") + } + + var aX, aY float64 + if weights != nil { + for i, v := range x { + aX += weights[i] * math.Cos(v) + aY += weights[i] * math.Sin(v) + } + } else { + for _, v := range x { + aX += math.Cos(v) + aY += math.Sin(v) + } + } + + return math.Atan2(aY, aX) +} + +// Correlation returns the weighted correlation between the samples of x and y +// with the given means. +// sum_i {w_i (x_i - meanX) * (y_i - meanY)} / (stdX * stdY) +// The lengths of x and y must be equal. If weights is nil then all of the +// weights are 1. If weights is not nil, then len(x) must equal len(weights). +func Correlation(x, y, weights []float64) float64 { + // This is a two-pass corrected implementation. It is an adaptation of the + // algorithm used in the MeanVariance function, which applies a correction + // to the typical two pass approach. + + if len(x) != len(y) { + panic("stat: slice length mismatch") + } + xu := Mean(x, weights) + yu := Mean(y, weights) + var ( + sxx float64 + syy float64 + sxy float64 + xcompensation float64 + ycompensation float64 + ) + if weights == nil { + for i, xv := range x { + yv := y[i] + xd := xv - xu + yd := yv - yu + sxx += xd * xd + syy += yd * yd + sxy += xd * yd + xcompensation += xd + ycompensation += yd + } + // xcompensation and ycompensation are from Chan, et. al. + // referenced in the MeanVariance function. They are analogous + // to the second term in (1.7) in that paper. + sxx -= xcompensation * xcompensation / float64(len(x)) + syy -= ycompensation * ycompensation / float64(len(x)) + + return (sxy - xcompensation*ycompensation/float64(len(x))) / math.Sqrt(sxx*syy) + + } + + var sumWeights float64 + for i, xv := range x { + w := weights[i] + yv := y[i] + xd := xv - xu + wxd := w * xd + yd := yv - yu + wyd := w * yd + sxx += wxd * xd + syy += wyd * yd + sxy += wxd * yd + xcompensation += wxd + ycompensation += wyd + sumWeights += w + } + // xcompensation and ycompensation are from Chan, et. al. + // referenced in the MeanVariance function. They are analogous + // to the second term in (1.7) in that paper, except they use + // the sumWeights instead of the sample count. + sxx -= xcompensation * xcompensation / sumWeights + syy -= ycompensation * ycompensation / sumWeights + + return (sxy - xcompensation*ycompensation/sumWeights) / math.Sqrt(sxx*syy) +} + +// Kendall returns the weighted Tau-a Kendall correlation between the +// samples of x and y. The Kendall correlation measures the quantity of +// concordant and discordant pairs of numbers. If weights are specified then +// each pair is weighted by weights[i] * weights[j] and the final sum is +// normalized to stay between -1 and 1. +// The lengths of x and y must be equal. If weights is nil then all of the +// weights are 1. If weights is not nil, then len(x) must equal len(weights). +func Kendall(x, y, weights []float64) float64 { + if len(x) != len(y) { + panic("stat: slice length mismatch") + } + + var ( + cc float64 // number of concordant pairs + dc float64 // number of discordant pairs + n = len(x) + ) + + if weights == nil { + for i := 0; i < n; i++ { + for j := i; j < n; j++ { + if i == j { + continue + } + if math.Signbit(x[j]-x[i]) == math.Signbit(y[j]-y[i]) { + cc++ + } else { + dc++ + } + } + } + return (cc - dc) / float64(n*(n-1)/2) + } + + var sumWeights float64 + + for i := 0; i < n; i++ { + for j := i; j < n; j++ { + if i == j { + continue + } + weight := weights[i] * weights[j] + if math.Signbit(x[j]-x[i]) == math.Signbit(y[j]-y[i]) { + cc += weight + } else { + dc += weight + } + sumWeights += weight + } + } + return float64(cc-dc) / sumWeights +} + +// Covariance returns the weighted covariance between the samples of x and y. +// sum_i {w_i (x_i - meanX) * (y_i - meanY)} / (sum_j {w_j} - 1) +// The lengths of x and y must be equal. If weights is nil then all of the +// weights are 1. If weights is not nil, then len(x) must equal len(weights). +func Covariance(x, y, weights []float64) float64 { + // This is a two-pass corrected implementation. It is an adaptation of the + // algorithm used in the MeanVariance function, which applies a correction + // to the typical two pass approach. + + if len(x) != len(y) { + panic("stat: slice length mismatch") + } + xu := Mean(x, weights) + yu := Mean(y, weights) + return covarianceMeans(x, y, weights, xu, yu) +} + +// covarianceMeans returns the weighted covariance between x and y with the mean +// of x and y already specified. See the documentation of Covariance for more +// information. +func covarianceMeans(x, y, weights []float64, xu, yu float64) float64 { + var ( + ss float64 + xcompensation float64 + ycompensation float64 + ) + if weights == nil { + for i, xv := range x { + yv := y[i] + xd := xv - xu + yd := yv - yu + ss += xd * yd + xcompensation += xd + ycompensation += yd + } + // xcompensation and ycompensation are from Chan, et. al. + // referenced in the MeanVariance function. They are analogous + // to the second term in (1.7) in that paper. + return (ss - xcompensation*ycompensation/float64(len(x))) / float64(len(x)-1) + } + + var sumWeights float64 + + for i, xv := range x { + w := weights[i] + yv := y[i] + wxd := w * (xv - xu) + yd := (yv - yu) + ss += wxd * yd + xcompensation += wxd + ycompensation += w * yd + sumWeights += w + } + // xcompensation and ycompensation are from Chan, et. al. + // referenced in the MeanVariance function. They are analogous + // to the second term in (1.7) in that paper, except they use + // the sumWeights instead of the sample count. + return (ss - xcompensation*ycompensation/sumWeights) / (sumWeights - 1) +} + +// CrossEntropy computes the cross-entropy between the two distributions specified +// in p and q. +func CrossEntropy(p, q []float64) float64 { + if len(p) != len(q) { + panic("stat: slice length mismatch") + } + var ce float64 + for i, v := range p { + if v != 0 { + ce -= v * math.Log(q[i]) + } + } + return ce +} + +// Entropy computes the Shannon entropy of a distribution or the distance between +// two distributions. The natural logarithm is used. +// - sum_i (p_i * log_e(p_i)) +func Entropy(p []float64) float64 { + var e float64 + for _, v := range p { + if v != 0 { // Entropy needs 0 * log(0) == 0 + e -= v * math.Log(v) + } + } + return e +} + +// ExKurtosis returns the population excess kurtosis of the sample. +// The kurtosis is defined by the 4th moment of the mean divided by the squared +// variance. The excess kurtosis subtracts 3.0 so that the excess kurtosis of +// the normal distribution is zero. +// If weights is nil then all of the weights are 1. If weights is not nil, then +// len(x) must equal len(weights). +func ExKurtosis(x, weights []float64) float64 { + mean, std := MeanStdDev(x, weights) + if weights == nil { + var e float64 + for _, v := range x { + z := (v - mean) / std + e += z * z * z * z + } + mul, offset := kurtosisCorrection(float64(len(x))) + return e*mul - offset + } + + var ( + e float64 + sumWeights float64 + ) + for i, v := range x { + z := (v - mean) / std + e += weights[i] * z * z * z * z + sumWeights += weights[i] + } + mul, offset := kurtosisCorrection(sumWeights) + return e*mul - offset +} + +// n is the number of samples +// see https://en.wikipedia.org/wiki/Kurtosis +func kurtosisCorrection(n float64) (mul, offset float64) { + return ((n + 1) / (n - 1)) * (n / (n - 2)) * (1 / (n - 3)), 3 * ((n - 1) / (n - 2)) * ((n - 1) / (n - 3)) +} + +// GeometricMean returns the weighted geometric mean of the dataset +// \prod_i {x_i ^ w_i} +// This only applies with positive x and positive weights. If weights is nil +// then all of the weights are 1. If weights is not nil, then len(x) must equal +// len(weights). +func GeometricMean(x, weights []float64) float64 { + if weights == nil { + var s float64 + for _, v := range x { + s += math.Log(v) + } + s /= float64(len(x)) + return math.Exp(s) + } + if len(x) != len(weights) { + panic("stat: slice length mismatch") + } + var ( + s float64 + sumWeights float64 + ) + for i, v := range x { + s += weights[i] * math.Log(v) + sumWeights += weights[i] + } + s /= sumWeights + return math.Exp(s) +} + +// HarmonicMean returns the weighted harmonic mean of the dataset +// \sum_i {w_i} / ( sum_i {w_i / x_i} ) +// This only applies with positive x and positive weights. +// If weights is nil then all of the weights are 1. If weights is not nil, then +// len(x) must equal len(weights). +func HarmonicMean(x, weights []float64) float64 { + if weights != nil && len(x) != len(weights) { + panic("stat: slice length mismatch") + } + // TODO: Fix this to make it more efficient and avoid allocation + + // This can be numerically unstable (for example if x is very small) + // W = \sum_i {w_i} + // hm = exp(log(W) - log(\sum_i w_i / x_i)) + + logs := make([]float64, len(x)) + var W float64 + for i := range x { + if weights == nil { + logs[i] = -math.Log(x[i]) + W++ + continue + } + logs[i] = math.Log(weights[i]) - math.Log(x[i]) + W += weights[i] + } + + // Sum all of the logs + v := floats.LogSumExp(logs) // this computes log(\sum_i { w_i / x_i}) + return math.Exp(math.Log(W) - v) +} + +// Hellinger computes the distance between the probability distributions p and q given by: +// \sqrt{ 1 - \sum_i \sqrt{p_i q_i} } +// +// The lengths of p and q must be equal. It is assumed that p and q sum to 1. +func Hellinger(p, q []float64) float64 { + if len(p) != len(q) { + panic("stat: slice length mismatch") + } + bc := bhattacharyyaCoeff(p, q) + return math.Sqrt(1 - bc) +} + +// Histogram sums up the weighted number of data points in each bin. +// The weight of data point x[i] will be placed into count[j] if +// dividers[j] <= x < dividers[j+1]. The "span" function in the floats package can assist +// with bin creation. +// +// The following conditions on the inputs apply: +// - The count variable must either be nil or have length of one less than dividers. +// - The values in dividers must be sorted (use the sort package). +// - The x values must be sorted. +// - If weights is nil then all of the weights are 1. +// - If weights is not nil, then len(x) must equal len(weights). +func Histogram(count, dividers, x, weights []float64) []float64 { + if weights != nil && len(x) != len(weights) { + panic("stat: slice length mismatch") + } + if count == nil { + count = make([]float64, len(dividers)-1) + } + if len(dividers) < 2 { + panic("histogram: fewer than two dividers") + } + if len(count) != len(dividers)-1 { + panic("histogram: bin count mismatch") + } + if !sort.Float64sAreSorted(dividers) { + panic("histogram: dividers are not sorted") + } + if !sort.Float64sAreSorted(x) { + panic("histogram: x data are not sorted") + } + for i := range count { + count[i] = 0 + } + if len(x) == 0 { + return count + } + if x[0] < dividers[0] { + panic("histogram: minimum x value is less than lowest divider") + } + if dividers[len(dividers)-1] <= x[len(x)-1] { + panic("histogram: maximum x value is greater than or equal to highest divider") + } + + idx := 0 + comp := dividers[idx+1] + if weights == nil { + for _, v := range x { + if v < comp { + // Still in the current bucket + count[idx]++ + continue + } + // Find the next divider where v is less than the divider + for j := idx + 1; j < len(dividers); j++ { + if v < dividers[j+1] { + idx = j + comp = dividers[j+1] + break + } + } + count[idx]++ + } + return count + } + + for i, v := range x { + if v < comp { + // Still in the current bucket + count[idx] += weights[i] + continue + } + // Need to find the next divider where v is less than the divider. + for j := idx + 1; j < len(count); j++ { + if v < dividers[j+1] { + idx = j + comp = dividers[j+1] + break + } + } + count[idx] += weights[i] + } + return count +} + +// JensenShannon computes the JensenShannon divergence between the distributions +// p and q. The Jensen-Shannon divergence is defined as +// m = 0.5 * (p + q) +// JS(p, q) = 0.5 ( KL(p, m) + KL(q, m) ) +// Unlike Kullback-Liebler, the Jensen-Shannon distance is symmetric. The value +// is between 0 and ln(2). +func JensenShannon(p, q []float64) float64 { + if len(p) != len(q) { + panic("stat: slice length mismatch") + } + var js float64 + for i, v := range p { + qi := q[i] + m := 0.5 * (v + qi) + if v != 0 { + // add kl from p to m + js += 0.5 * v * (math.Log(v) - math.Log(m)) + } + if qi != 0 { + // add kl from q to m + js += 0.5 * qi * (math.Log(qi) - math.Log(m)) + } + } + return js +} + +// KolmogorovSmirnov computes the largest distance between two empirical CDFs. +// Each dataset x and y consists of sample locations and counts, xWeights and +// yWeights, respectively. +// +// x and y may have different lengths, though len(x) must equal len(xWeights), and +// len(y) must equal len(yWeights). Both x and y must be sorted. +// +// Special cases are: +// = 0 if len(x) == len(y) == 0 +// = 1 if len(x) == 0, len(y) != 0 or len(x) != 0 and len(y) == 0 +func KolmogorovSmirnov(x, xWeights, y, yWeights []float64) float64 { + if xWeights != nil && len(x) != len(xWeights) { + panic("stat: slice length mismatch") + } + if yWeights != nil && len(y) != len(yWeights) { + panic("stat: slice length mismatch") + } + if len(x) == 0 || len(y) == 0 { + if len(x) == 0 && len(y) == 0 { + return 0 + } + return 1 + } + + if floats.HasNaN(x) { + return math.NaN() + } + if floats.HasNaN(y) { + return math.NaN() + } + + if !sort.Float64sAreSorted(x) { + panic("x data are not sorted") + } + if !sort.Float64sAreSorted(y) { + panic("y data are not sorted") + } + + xWeightsNil := xWeights == nil + yWeightsNil := yWeights == nil + + var ( + maxDist float64 + xSum, ySum float64 + xCdf, yCdf float64 + xIdx, yIdx int + ) + + if xWeightsNil { + xSum = float64(len(x)) + } else { + xSum = floats.Sum(xWeights) + } + + if yWeightsNil { + ySum = float64(len(y)) + } else { + ySum = floats.Sum(yWeights) + } + + xVal := x[0] + yVal := y[0] + + // Algorithm description: + // The goal is to find the maximum difference in the empirical CDFs for the + // two datasets. The CDFs are piecewise-constant, and thus the distance + // between the CDFs will only change at the values themselves. + // + // To find the maximum distance, step through the data in ascending order + // of value between the two datasets. At each step, compute the empirical CDF + // and compare the local distance with the maximum distance. + // Due to some corner cases, equal data entries must be tallied simultaneously. + for { + switch { + case xVal < yVal: + xVal, xCdf, xIdx = updateKS(xIdx, xCdf, xSum, x, xWeights, xWeightsNil) + case yVal < xVal: + yVal, yCdf, yIdx = updateKS(yIdx, yCdf, ySum, y, yWeights, yWeightsNil) + case xVal == yVal: + newX := x[xIdx] + newY := y[yIdx] + if newX < newY { + xVal, xCdf, xIdx = updateKS(xIdx, xCdf, xSum, x, xWeights, xWeightsNil) + } else if newY < newX { + yVal, yCdf, yIdx = updateKS(yIdx, yCdf, ySum, y, yWeights, yWeightsNil) + } else { + // Update them both, they'll be equal next time and the right + // thing will happen + xVal, xCdf, xIdx = updateKS(xIdx, xCdf, xSum, x, xWeights, xWeightsNil) + yVal, yCdf, yIdx = updateKS(yIdx, yCdf, ySum, y, yWeights, yWeightsNil) + } + default: + panic("unreachable") + } + + dist := math.Abs(xCdf - yCdf) + if dist > maxDist { + maxDist = dist + } + + // Both xCdf and yCdf will equal 1 at the end, so if we have reached the + // end of either sample list, the distance is as large as it can be. + if xIdx == len(x) || yIdx == len(y) { + return maxDist + } + } +} + +// updateKS gets the next data point from one of the set. In doing so, it combines +// the weight of all the data points of equal value. Upon return, val is the new +// value of the data set, newCdf is the total combined CDF up until this point, +// and newIdx is the index of the next location in that sample to examine. +func updateKS(idx int, cdf, sum float64, values, weights []float64, isNil bool) (val, newCdf float64, newIdx int) { + // Sum up all the weights of consecutive values that are equal + if isNil { + newCdf = cdf + 1/sum + } else { + newCdf = cdf + weights[idx]/sum + } + newIdx = idx + 1 + for { + if newIdx == len(values) { + return values[newIdx-1], newCdf, newIdx + } + if values[newIdx-1] != values[newIdx] { + return values[newIdx], newCdf, newIdx + } + if isNil { + newCdf += 1 / sum + } else { + newCdf += weights[newIdx] / sum + } + newIdx++ + } +} + +// KullbackLeibler computes the Kullback-Leibler distance between the +// distributions p and q. The natural logarithm is used. +// sum_i(p_i * log(p_i / q_i)) +// Note that the Kullback-Leibler distance is not symmetric; +// KullbackLeibler(p,q) != KullbackLeibler(q,p) +func KullbackLeibler(p, q []float64) float64 { + if len(p) != len(q) { + panic("stat: slice length mismatch") + } + var kl float64 + for i, v := range p { + if v != 0 { // Entropy needs 0 * log(0) == 0 + kl += v * (math.Log(v) - math.Log(q[i])) + } + } + return kl +} + +// LinearRegression computes the best-fit line +// y = alpha + beta*x +// to the data in x and y with the given weights. If origin is true, the +// regression is forced to pass through the origin. +// +// Specifically, LinearRegression computes the values of alpha and +// beta such that the total residual +// \sum_i w[i]*(y[i] - alpha - beta*x[i])^2 +// is minimized. If origin is true, then alpha is forced to be zero. +// +// The lengths of x and y must be equal. If weights is nil then all of the +// weights are 1. If weights is not nil, then len(x) must equal len(weights). +func LinearRegression(x, y, weights []float64, origin bool) (alpha, beta float64) { + if len(x) != len(y) { + panic("stat: slice length mismatch") + } + if weights != nil && len(weights) != len(x) { + panic("stat: slice length mismatch") + } + + w := 1.0 + if origin { + var x2Sum, xySum float64 + for i, xi := range x { + if weights != nil { + w = weights[i] + } + yi := y[i] + xySum += w * xi * yi + x2Sum += w * xi * xi + } + beta = xySum / x2Sum + + return 0, beta + } + + xu, xv := MeanVariance(x, weights) + yu := Mean(y, weights) + cov := covarianceMeans(x, y, weights, xu, yu) + beta = cov / xv + alpha = yu - beta*xu + return alpha, beta +} + +// RSquared returns the coefficient of determination defined as +// R^2 = 1 - \sum_i w[i]*(y[i] - alpha - beta*x[i])^2 / \sum_i w[i]*(y[i] - mean(y))^2 +// for the line +// y = alpha + beta*x +// and the data in x and y with the given weights. +// +// The lengths of x and y must be equal. If weights is nil then all of the +// weights are 1. If weights is not nil, then len(x) must equal len(weights). +func RSquared(x, y, weights []float64, alpha, beta float64) float64 { + if len(x) != len(y) { + panic("stat: slice length mismatch") + } + if weights != nil && len(weights) != len(x) { + panic("stat: slice length mismatch") + } + + w := 1.0 + yMean := Mean(y, weights) + var res, tot, d float64 + for i, xi := range x { + if weights != nil { + w = weights[i] + } + yi := y[i] + fi := alpha + beta*xi + d = yi - fi + res += w * d * d + d = yi - yMean + tot += w * d * d + } + return 1 - res/tot +} + +// RSquaredFrom returns the coefficient of determination defined as +// R^2 = 1 - \sum_i w[i]*(estimate[i] - value[i])^2 / \sum_i w[i]*(value[i] - mean(values))^2 +// and the data in estimates and values with the given weights. +// +// The lengths of estimates and values must be equal. If weights is nil then +// all of the weights are 1. If weights is not nil, then len(values) must +// equal len(weights). +func RSquaredFrom(estimates, values, weights []float64) float64 { + if len(estimates) != len(values) { + panic("stat: slice length mismatch") + } + if weights != nil && len(weights) != len(values) { + panic("stat: slice length mismatch") + } + + w := 1.0 + mean := Mean(values, weights) + var res, tot, d float64 + for i, val := range values { + if weights != nil { + w = weights[i] + } + d = val - estimates[i] + res += w * d * d + d = val - mean + tot += w * d * d + } + return 1 - res/tot +} + +// RNoughtSquared returns the coefficient of determination defined as +// R₀^2 = \sum_i w[i]*(beta*x[i])^2 / \sum_i w[i]*y[i]^2 +// for the line +// y = beta*x +// and the data in x and y with the given weights. RNoughtSquared should +// only be used for best-fit lines regressed through the origin. +// +// The lengths of x and y must be equal. If weights is nil then all of the +// weights are 1. If weights is not nil, then len(x) must equal len(weights). +func RNoughtSquared(x, y, weights []float64, beta float64) float64 { + if len(x) != len(y) { + panic("stat: slice length mismatch") + } + if weights != nil && len(weights) != len(x) { + panic("stat: slice length mismatch") + } + + w := 1.0 + var ssr, tot float64 + for i, xi := range x { + if weights != nil { + w = weights[i] + } + fi := beta * xi + ssr += w * fi * fi + yi := y[i] + tot += w * yi * yi + } + return ssr / tot +} + +// Mean computes the weighted mean of the data set. +// sum_i {w_i * x_i} / sum_i {w_i} +// If weights is nil then all of the weights are 1. If weights is not nil, then +// len(x) must equal len(weights). +func Mean(x, weights []float64) float64 { + if weights == nil { + return floats.Sum(x) / float64(len(x)) + } + if len(x) != len(weights) { + panic("stat: slice length mismatch") + } + var ( + sumValues float64 + sumWeights float64 + ) + for i, w := range weights { + sumValues += w * x[i] + sumWeights += w + } + return sumValues / sumWeights +} + +// Mode returns the most common value in the dataset specified by x and the +// given weights. Strict float64 equality is used when comparing values, so users +// should take caution. If several values are the mode, any of them may be returned. +func Mode(x, weights []float64) (val float64, count float64) { + if weights != nil && len(x) != len(weights) { + panic("stat: slice length mismatch") + } + if len(x) == 0 { + return 0, 0 + } + m := make(map[float64]float64) + if weights == nil { + for _, v := range x { + m[v]++ + } + } else { + for i, v := range x { + m[v] += weights[i] + } + } + var ( + maxCount float64 + max float64 + ) + for val, count := range m { + if count > maxCount { + maxCount = count + max = val + } + } + return max, maxCount +} + +// BivariateMoment computes the weighted mixed moment between the samples x and y. +// E[(x - μ_x)^r*(y - μ_y)^s] +// No degrees of freedom correction is done. +// The lengths of x and y must be equal. If weights is nil then all of the +// weights are 1. If weights is not nil, then len(x) must equal len(weights). +func BivariateMoment(r, s float64, x, y, weights []float64) float64 { + meanX := Mean(x, weights) + meanY := Mean(y, weights) + if len(x) != len(y) { + panic("stat: slice length mismatch") + } + if weights == nil { + var m float64 + for i, vx := range x { + vy := y[i] + m += math.Pow(vx-meanX, r) * math.Pow(vy-meanY, s) + } + return m / float64(len(x)) + } + if len(weights) != len(x) { + panic("stat: slice length mismatch") + } + var ( + m float64 + sumWeights float64 + ) + for i, vx := range x { + vy := y[i] + w := weights[i] + m += w * math.Pow(vx-meanX, r) * math.Pow(vy-meanY, s) + sumWeights += w + } + return m / sumWeights +} + +// Moment computes the weighted n^th moment of the samples, +// E[(x - μ)^N] +// No degrees of freedom correction is done. +// If weights is nil then all of the weights are 1. If weights is not nil, then +// len(x) must equal len(weights). +func Moment(moment float64, x, weights []float64) float64 { + // This also checks that x and weights have the same length. + mean := Mean(x, weights) + if weights == nil { + var m float64 + for _, v := range x { + m += math.Pow(v-mean, moment) + } + return m / float64(len(x)) + } + var ( + m float64 + sumWeights float64 + ) + for i, v := range x { + w := weights[i] + m += w * math.Pow(v-mean, moment) + sumWeights += w + } + return m / sumWeights +} + +// MomentAbout computes the weighted n^th weighted moment of the samples about +// the given mean \mu, +// E[(x - μ)^N] +// No degrees of freedom correction is done. +// If weights is nil then all of the weights are 1. If weights is not nil, then +// len(x) must equal len(weights). +func MomentAbout(moment float64, x []float64, mean float64, weights []float64) float64 { + if weights == nil { + var m float64 + for _, v := range x { + m += math.Pow(v-mean, moment) + } + m /= float64(len(x)) + return m + } + if len(weights) != len(x) { + panic("stat: slice length mismatch") + } + var ( + m float64 + sumWeights float64 + ) + for i, v := range x { + m += weights[i] * math.Pow(v-mean, moment) + sumWeights += weights[i] + } + return m / sumWeights +} + +// Quantile returns the sample of x such that x is greater than or +// equal to the fraction p of samples. The exact behavior is determined by the +// CumulantKind, and p should be a number between 0 and 1. Quantile is theoretically +// the inverse of the CDF function, though it may not be the actual inverse +// for all values p and CumulantKinds. +// +// The x data must be sorted in increasing order. If weights is nil then all +// of the weights are 1. If weights is not nil, then len(x) must equal len(weights). +// +// CumulantKind behaviors: +// - Empirical: Returns the lowest value q for which q is greater than or equal +// to the fraction p of samples +// - LinInterp: Returns the linearly interpolated value +func Quantile(p float64, c CumulantKind, x, weights []float64) float64 { + if !(p >= 0 && p <= 1) { + panic("stat: percentile out of bounds") + } + + if weights != nil && len(x) != len(weights) { + panic("stat: slice length mismatch") + } + if floats.HasNaN(x) { + return math.NaN() // This is needed because the algorithm breaks otherwise + } + if !sort.Float64sAreSorted(x) { + panic("x data are not sorted") + } + + var sumWeights float64 + if weights == nil { + sumWeights = float64(len(x)) + } else { + sumWeights = floats.Sum(weights) + } + switch c { + case Empirical: + return empiricalQuantile(p, x, weights, sumWeights) + case LinInterp: + return linInterpQuantile(p, x, weights, sumWeights) + default: + panic("stat: bad cumulant kind") + } +} + +func empiricalQuantile(p float64, x, weights []float64, sumWeights float64) float64 { + var cumsum float64 + fidx := p * sumWeights + for i := range x { + if weights == nil { + cumsum++ + } else { + cumsum += weights[i] + } + if cumsum >= fidx { + return x[i] + } + } + panic("impossible") +} + +func linInterpQuantile(p float64, x, weights []float64, sumWeights float64) float64 { + var cumsum float64 + fidx := p * sumWeights + for i := range x { + if weights == nil { + cumsum++ + } else { + cumsum += weights[i] + } + if cumsum >= fidx { + if i == 0 { + return x[0] + } + t := cumsum - fidx + if weights != nil { + t /= weights[i] + } + return t*x[i-1] + (1-t)*x[i] + } + } + panic("impossible") +} + +// Skew computes the skewness of the sample data. +// If weights is nil then all of the weights are 1. If weights is not nil, then +// len(x) must equal len(weights). +// When weights sum to 1 or less, a biased variance estimator should be used. +func Skew(x, weights []float64) float64 { + + mean, std := MeanStdDev(x, weights) + if weights == nil { + var s float64 + for _, v := range x { + z := (v - mean) / std + s += z * z * z + } + return s * skewCorrection(float64(len(x))) + } + var ( + s float64 + sumWeights float64 + ) + for i, v := range x { + z := (v - mean) / std + s += weights[i] * z * z * z + sumWeights += weights[i] + } + return s * skewCorrection(sumWeights) +} + +// From: http://www.amstat.org/publications/jse/v19n2/doane.pdf page 7 +func skewCorrection(n float64) float64 { + return (n / (n - 1)) * (1 / (n - 2)) +} + +// SortWeighted rearranges the data in x along with their corresponding +// weights so that the x data are sorted. The data is sorted in place. +// Weights may be nil, but if weights is non-nil then it must have the same +// length as x. +func SortWeighted(x, weights []float64) { + if weights == nil { + sort.Float64s(x) + return + } + if len(x) != len(weights) { + panic("stat: slice length mismatch") + } + sort.Sort(weightSorter{ + x: x, + w: weights, + }) +} + +type weightSorter struct { + x []float64 + w []float64 +} + +func (w weightSorter) Len() int { return len(w.x) } +func (w weightSorter) Less(i, j int) bool { return w.x[i] < w.x[j] } +func (w weightSorter) Swap(i, j int) { + w.x[i], w.x[j] = w.x[j], w.x[i] + w.w[i], w.w[j] = w.w[j], w.w[i] +} + +// SortWeightedLabeled rearranges the data in x along with their +// corresponding weights and boolean labels so that the x data are sorted. +// The data is sorted in place. Weights and labels may be nil, if either +// is non-nil it must have the same length as x. +func SortWeightedLabeled(x []float64, labels []bool, weights []float64) { + if labels == nil { + SortWeighted(x, weights) + return + } + if weights == nil { + if len(x) != len(labels) { + panic("stat: slice length mismatch") + } + sort.Sort(labelSorter{ + x: x, + l: labels, + }) + return + } + if len(x) != len(labels) || len(x) != len(weights) { + panic("stat: slice length mismatch") + } + sort.Sort(weightLabelSorter{ + x: x, + l: labels, + w: weights, + }) +} + +type labelSorter struct { + x []float64 + l []bool +} + +func (a labelSorter) Len() int { return len(a.x) } +func (a labelSorter) Less(i, j int) bool { return a.x[i] < a.x[j] } +func (a labelSorter) Swap(i, j int) { + a.x[i], a.x[j] = a.x[j], a.x[i] + a.l[i], a.l[j] = a.l[j], a.l[i] +} + +type weightLabelSorter struct { + x []float64 + l []bool + w []float64 +} + +func (a weightLabelSorter) Len() int { return len(a.x) } +func (a weightLabelSorter) Less(i, j int) bool { return a.x[i] < a.x[j] } +func (a weightLabelSorter) Swap(i, j int) { + a.x[i], a.x[j] = a.x[j], a.x[i] + a.l[i], a.l[j] = a.l[j], a.l[i] + a.w[i], a.w[j] = a.w[j], a.w[i] +} + +// StdDev returns the sample standard deviation. +func StdDev(x, weights []float64) float64 { + _, std := MeanStdDev(x, weights) + return std +} + +// MeanStdDev returns the sample mean and unbiased standard deviation +// When weights sum to 1 or less, a biased variance estimator should be used. +func MeanStdDev(x, weights []float64) (mean, std float64) { + mean, variance := MeanVariance(x, weights) + return mean, math.Sqrt(variance) +} + +// StdErr returns the standard error in the mean with the given values. +func StdErr(std, sampleSize float64) float64 { + return std / math.Sqrt(sampleSize) +} + +// StdScore returns the standard score (a.k.a. z-score, z-value) for the value x +// with the givem mean and standard deviation, i.e. +// (x - mean) / std +func StdScore(x, mean, std float64) float64 { + return (x - mean) / std +} + +// Variance computes the unbiased weighted sample variance: +// \sum_i w_i (x_i - mean)^2 / (sum_i w_i - 1) +// If weights is nil then all of the weights are 1. If weights is not nil, then +// len(x) must equal len(weights). +// When weights sum to 1 or less, a biased variance estimator should be used. +func Variance(x, weights []float64) float64 { + _, variance := MeanVariance(x, weights) + return variance +} + +// MeanVariance computes the sample mean and unbiased variance, where the mean and variance are +// \sum_i w_i * x_i / (sum_i w_i) +// \sum_i w_i (x_i - mean)^2 / (sum_i w_i - 1) +// respectively. +// If weights is nil then all of the weights are 1. If weights is not nil, then +// len(x) must equal len(weights). +// When weights sum to 1 or less, a biased variance estimator should be used. +func MeanVariance(x, weights []float64) (mean, variance float64) { + // This uses the corrected two-pass algorithm (1.7), from "Algorithms for computing + // the sample variance: Analysis and recommendations" by Chan, Tony F., Gene H. Golub, + // and Randall J. LeVeque. + + // note that this will panic if the slice lengths do not match + mean = Mean(x, weights) + var ( + ss float64 + compensation float64 + ) + if weights == nil { + for _, v := range x { + d := v - mean + ss += d * d + compensation += d + } + variance = (ss - compensation*compensation/float64(len(x))) / float64(len(x)-1) + return + } + + var sumWeights float64 + for i, v := range x { + w := weights[i] + d := v - mean + wd := w * d + ss += wd * d + compensation += wd + sumWeights += w + } + variance = (ss - compensation*compensation/sumWeights) / (sumWeights - 1) + return +} diff --git a/vendor/gonum.org/v1/gonum/stat/statmat.go b/vendor/gonum.org/v1/gonum/stat/statmat.go new file mode 100644 index 0000000..d584b68 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/stat/statmat.go @@ -0,0 +1,140 @@ +// Copyright ©2014 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package stat + +import ( + "math" + + "gonum.org/v1/gonum/floats" + "gonum.org/v1/gonum/mat" +) + +// CovarianceMatrix calculates the covariance matrix (also known as the +// variance-covariance matrix) calculated from a matrix of data, x, using +// a two-pass algorithm. The result is stored in dst. +// +// If weights is not nil the weighted covariance of x is calculated. weights +// must have length equal to the number of rows in input data matrix and +// must not contain negative elements. +// The dst matrix must either be zero-sized or have the same number of +// columns as the input data matrix. +func CovarianceMatrix(dst *mat.SymDense, x mat.Matrix, weights []float64) { + // This is the matrix version of the two-pass algorithm. It doesn't use the + // additional floating point error correction that the Covariance function uses + // to reduce the impact of rounding during centering. + + r, c := x.Dims() + + if dst.IsZero() { + *dst = *(dst.GrowSym(c).(*mat.SymDense)) + } else if n := dst.Symmetric(); n != c { + panic(mat.ErrShape) + } + + var xt mat.Dense + xt.Clone(x.T()) + // Subtract the mean of each of the columns. + for i := 0; i < c; i++ { + v := xt.RawRowView(i) + // This will panic with ErrShape if len(weights) != len(v), so + // we don't have to check the size later. + mean := Mean(v, weights) + floats.AddConst(-mean, v) + } + + if weights == nil { + // Calculate the normalization factor + // scaled by the sample size. + dst.SymOuterK(1/(float64(r)-1), &xt) + return + } + + // Multiply by the sqrt of the weights, so that multiplication is symmetric. + sqrtwts := make([]float64, r) + for i, w := range weights { + if w < 0 { + panic("stat: negative covariance matrix weights") + } + sqrtwts[i] = math.Sqrt(w) + } + // Weight the rows. + for i := 0; i < c; i++ { + v := xt.RawRowView(i) + floats.Mul(v, sqrtwts) + } + + // Calculate the normalization factor + // scaled by the weighted sample size. + dst.SymOuterK(1/(floats.Sum(weights)-1), &xt) +} + +// CorrelationMatrix returns the correlation matrix calculated from a matrix +// of data, x, using a two-pass algorithm. The result is stored in dst. +// +// If weights is not nil the weighted correlation of x is calculated. weights +// must have length equal to the number of rows in input data matrix and +// must not contain negative elements. +// The dst matrix must either be zero-sized or have the same number of +// columns as the input data matrix. +func CorrelationMatrix(dst *mat.SymDense, x mat.Matrix, weights []float64) { + // This will panic if the sizes don't match, or if weights is the wrong size. + CovarianceMatrix(dst, x, weights) + covToCorr(dst) +} + +// covToCorr converts a covariance matrix to a correlation matrix. +func covToCorr(c *mat.SymDense) { + r := c.Symmetric() + + s := make([]float64, r) + for i := 0; i < r; i++ { + s[i] = 1 / math.Sqrt(c.At(i, i)) + } + for i, sx := range s { + // Ensure that the diagonal has exactly ones. + c.SetSym(i, i, 1) + for j := i + 1; j < r; j++ { + v := c.At(i, j) + c.SetSym(i, j, v*sx*s[j]) + } + } +} + +// corrToCov converts a correlation matrix to a covariance matrix. +// The input sigma should be vector of standard deviations corresponding +// to the covariance. It will panic if len(sigma) is not equal to the +// number of rows in the correlation matrix. +func corrToCov(c *mat.SymDense, sigma []float64) { + r, _ := c.Dims() + + if r != len(sigma) { + panic(mat.ErrShape) + } + for i, sx := range sigma { + // Ensure that the diagonal has exactly sigma squared. + c.SetSym(i, i, sx*sx) + for j := i + 1; j < r; j++ { + v := c.At(i, j) + c.SetSym(i, j, v*sx*sigma[j]) + } + } +} + +// Mahalanobis computes the Mahalanobis distance +// D = sqrt((x-y)^T * Σ^-1 * (x-y)) +// between the column vectors x and y given the cholesky decomposition of Σ. +// Mahalanobis returns NaN if the linear solve fails. +// +// See https://en.wikipedia.org/wiki/Mahalanobis_distance for more information. +func Mahalanobis(x, y mat.Vector, chol *mat.Cholesky) float64 { + var diff mat.VecDense + diff.SubVec(x, y) + var tmp mat.VecDense + err := chol.SolveVecTo(&tmp, &diff) + if err != nil { + return math.NaN() + } + return math.Sqrt(mat.Dot(&tmp, &diff)) +} diff --git a/vendor/gonum.org/v1/gonum/unit/absorbedradioactivedose.go b/vendor/gonum.org/v1/gonum/unit/absorbedradioactivedose.go new file mode 100644 index 0000000..01ef40d --- /dev/null +++ b/vendor/gonum.org/v1/gonum/unit/absorbedradioactivedose.go @@ -0,0 +1,71 @@ +// Code generated by "go generate gonum.org/v1/gonum/unit”; DO NOT EDIT. + +// Copyright ©2014 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package unit + +import ( + "errors" + "fmt" + "math" + "unicode/utf8" +) + +// AbsorbedRadioactiveDose is a measure of absorbed dose of ionizing radiation in grays. +type AbsorbedRadioactiveDose float64 + +const Gray AbsorbedRadioactiveDose = 1 + +// Unit converts the AbsorbedRadioactiveDose to a *Unit +func (a AbsorbedRadioactiveDose) Unit() *Unit { + return New(float64(a), Dimensions{ + LengthDim: 2, + TimeDim: -2, + }) +} + +// AbsorbedRadioactiveDose allows AbsorbedRadioactiveDose to implement a AbsorbedRadioactiveDoseer interface +func (a AbsorbedRadioactiveDose) AbsorbedRadioactiveDose() AbsorbedRadioactiveDose { + return a +} + +// From converts the unit into the receiver. From returns an +// error if there is a mismatch in dimension +func (a *AbsorbedRadioactiveDose) From(u Uniter) error { + if !DimensionsMatch(u, Gray) { + *a = AbsorbedRadioactiveDose(math.NaN()) + return errors.New("Dimension mismatch") + } + *a = AbsorbedRadioactiveDose(u.Unit().Value()) + return nil +} + +func (a AbsorbedRadioactiveDose) Format(fs fmt.State, c rune) { + switch c { + case 'v': + if fs.Flag('#') { + fmt.Fprintf(fs, "%T(%v)", a, float64(a)) + return + } + fallthrough + case 'e', 'E', 'f', 'F', 'g', 'G': + p, pOk := fs.Precision() + w, wOk := fs.Width() + const unit = " Gy" + switch { + case pOk && wOk: + fmt.Fprintf(fs, "%*.*"+string(c), pos(w-utf8.RuneCount([]byte(unit))), p, float64(a)) + case pOk: + fmt.Fprintf(fs, "%.*"+string(c), p, float64(a)) + case wOk: + fmt.Fprintf(fs, "%*"+string(c), pos(w-utf8.RuneCount([]byte(unit))), float64(a)) + default: + fmt.Fprintf(fs, "%"+string(c), float64(a)) + } + fmt.Fprint(fs, unit) + default: + fmt.Fprintf(fs, "%%!%c(%T=%g Gy)", c, a, float64(a)) + } +} diff --git a/vendor/gonum.org/v1/gonum/unit/acceleration.go b/vendor/gonum.org/v1/gonum/unit/acceleration.go new file mode 100644 index 0000000..59f9d5b --- /dev/null +++ b/vendor/gonum.org/v1/gonum/unit/acceleration.go @@ -0,0 +1,69 @@ +// Code generated by "go generate gonum.org/v1/gonum/unit”; DO NOT EDIT. + +// Copyright ©2014 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package unit + +import ( + "errors" + "fmt" + "math" + "unicode/utf8" +) + +// Acceleration represents an acceleration in metres per second squared. +type Acceleration float64 + +// Unit converts the Acceleration to a *Unit +func (a Acceleration) Unit() *Unit { + return New(float64(a), Dimensions{ + LengthDim: 1, + TimeDim: -2, + }) +} + +// Acceleration allows Acceleration to implement a Accelerationer interface +func (a Acceleration) Acceleration() Acceleration { + return a +} + +// From converts the unit into the receiver. From returns an +// error if there is a mismatch in dimension +func (a *Acceleration) From(u Uniter) error { + if !DimensionsMatch(u, Acceleration(0)) { + *a = Acceleration(math.NaN()) + return errors.New("Dimension mismatch") + } + *a = Acceleration(u.Unit().Value()) + return nil +} + +func (a Acceleration) Format(fs fmt.State, c rune) { + switch c { + case 'v': + if fs.Flag('#') { + fmt.Fprintf(fs, "%T(%v)", a, float64(a)) + return + } + fallthrough + case 'e', 'E', 'f', 'F', 'g', 'G': + p, pOk := fs.Precision() + w, wOk := fs.Width() + const unit = " m s^-2" + switch { + case pOk && wOk: + fmt.Fprintf(fs, "%*.*"+string(c), pos(w-utf8.RuneCount([]byte(unit))), p, float64(a)) + case pOk: + fmt.Fprintf(fs, "%.*"+string(c), p, float64(a)) + case wOk: + fmt.Fprintf(fs, "%*"+string(c), pos(w-utf8.RuneCount([]byte(unit))), float64(a)) + default: + fmt.Fprintf(fs, "%"+string(c), float64(a)) + } + fmt.Fprint(fs, unit) + default: + fmt.Fprintf(fs, "%%!%c(%T=%g m s^-2)", c, a, float64(a)) + } +} diff --git a/vendor/gonum.org/v1/gonum/unit/angle.go b/vendor/gonum.org/v1/gonum/unit/angle.go new file mode 100644 index 0000000..ece7b33 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/unit/angle.go @@ -0,0 +1,70 @@ +// Code generated by "go generate gonum.org/v1/gonum/unit”; DO NOT EDIT. + +// Copyright ©2014 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package unit + +import ( + "errors" + "fmt" + "math" + "unicode/utf8" +) + +// Angle represents an angle in radians. +type Angle float64 + +const Rad Angle = 1 + +// Unit converts the Angle to a *Unit +func (a Angle) Unit() *Unit { + return New(float64(a), Dimensions{ + AngleDim: 1, + }) +} + +// Angle allows Angle to implement a Angleer interface +func (a Angle) Angle() Angle { + return a +} + +// From converts the unit into the receiver. From returns an +// error if there is a mismatch in dimension +func (a *Angle) From(u Uniter) error { + if !DimensionsMatch(u, Rad) { + *a = Angle(math.NaN()) + return errors.New("Dimension mismatch") + } + *a = Angle(u.Unit().Value()) + return nil +} + +func (a Angle) Format(fs fmt.State, c rune) { + switch c { + case 'v': + if fs.Flag('#') { + fmt.Fprintf(fs, "%T(%v)", a, float64(a)) + return + } + fallthrough + case 'e', 'E', 'f', 'F', 'g', 'G': + p, pOk := fs.Precision() + w, wOk := fs.Width() + const unit = " rad" + switch { + case pOk && wOk: + fmt.Fprintf(fs, "%*.*"+string(c), pos(w-utf8.RuneCount([]byte(unit))), p, float64(a)) + case pOk: + fmt.Fprintf(fs, "%.*"+string(c), p, float64(a)) + case wOk: + fmt.Fprintf(fs, "%*"+string(c), pos(w-utf8.RuneCount([]byte(unit))), float64(a)) + default: + fmt.Fprintf(fs, "%"+string(c), float64(a)) + } + fmt.Fprint(fs, unit) + default: + fmt.Fprintf(fs, "%%!%c(%T=%g rad)", c, a, float64(a)) + } +} diff --git a/vendor/gonum.org/v1/gonum/unit/area.go b/vendor/gonum.org/v1/gonum/unit/area.go new file mode 100644 index 0000000..d52bf1f --- /dev/null +++ b/vendor/gonum.org/v1/gonum/unit/area.go @@ -0,0 +1,68 @@ +// Code generated by "go generate gonum.org/v1/gonum/unit”; DO NOT EDIT. + +// Copyright ©2014 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package unit + +import ( + "errors" + "fmt" + "math" + "unicode/utf8" +) + +// Area represents an area in square metres. +type Area float64 + +// Unit converts the Area to a *Unit +func (a Area) Unit() *Unit { + return New(float64(a), Dimensions{ + LengthDim: 2, + }) +} + +// Area allows Area to implement a Areaer interface +func (a Area) Area() Area { + return a +} + +// From converts the unit into the receiver. From returns an +// error if there is a mismatch in dimension +func (a *Area) From(u Uniter) error { + if !DimensionsMatch(u, Area(0)) { + *a = Area(math.NaN()) + return errors.New("Dimension mismatch") + } + *a = Area(u.Unit().Value()) + return nil +} + +func (a Area) Format(fs fmt.State, c rune) { + switch c { + case 'v': + if fs.Flag('#') { + fmt.Fprintf(fs, "%T(%v)", a, float64(a)) + return + } + fallthrough + case 'e', 'E', 'f', 'F', 'g', 'G': + p, pOk := fs.Precision() + w, wOk := fs.Width() + const unit = " m^2" + switch { + case pOk && wOk: + fmt.Fprintf(fs, "%*.*"+string(c), pos(w-utf8.RuneCount([]byte(unit))), p, float64(a)) + case pOk: + fmt.Fprintf(fs, "%.*"+string(c), p, float64(a)) + case wOk: + fmt.Fprintf(fs, "%*"+string(c), pos(w-utf8.RuneCount([]byte(unit))), float64(a)) + default: + fmt.Fprintf(fs, "%"+string(c), float64(a)) + } + fmt.Fprint(fs, unit) + default: + fmt.Fprintf(fs, "%%!%c(%T=%g m^2)", c, a, float64(a)) + } +} diff --git a/vendor/gonum.org/v1/gonum/unit/capacitance.go b/vendor/gonum.org/v1/gonum/unit/capacitance.go new file mode 100644 index 0000000..1ccca35 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/unit/capacitance.go @@ -0,0 +1,73 @@ +// Code generated by "go generate gonum.org/v1/gonum/unit”; DO NOT EDIT. + +// Copyright ©2014 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package unit + +import ( + "errors" + "fmt" + "math" + "unicode/utf8" +) + +// Capacitance represents an electrical capacitance in Farads. +type Capacitance float64 + +const Farad Capacitance = 1 + +// Unit converts the Capacitance to a *Unit +func (cp Capacitance) Unit() *Unit { + return New(float64(cp), Dimensions{ + CurrentDim: 2, + LengthDim: -2, + MassDim: -1, + TimeDim: 4, + }) +} + +// Capacitance allows Capacitance to implement a Capacitancer interface +func (cp Capacitance) Capacitance() Capacitance { + return cp +} + +// From converts the unit into the receiver. From returns an +// error if there is a mismatch in dimension +func (cp *Capacitance) From(u Uniter) error { + if !DimensionsMatch(u, Farad) { + *cp = Capacitance(math.NaN()) + return errors.New("Dimension mismatch") + } + *cp = Capacitance(u.Unit().Value()) + return nil +} + +func (cp Capacitance) Format(fs fmt.State, c rune) { + switch c { + case 'v': + if fs.Flag('#') { + fmt.Fprintf(fs, "%T(%v)", cp, float64(cp)) + return + } + fallthrough + case 'e', 'E', 'f', 'F', 'g', 'G': + p, pOk := fs.Precision() + w, wOk := fs.Width() + const unit = " F" + switch { + case pOk && wOk: + fmt.Fprintf(fs, "%*.*"+string(c), pos(w-utf8.RuneCount([]byte(unit))), p, float64(cp)) + case pOk: + fmt.Fprintf(fs, "%.*"+string(c), p, float64(cp)) + case wOk: + fmt.Fprintf(fs, "%*"+string(c), pos(w-utf8.RuneCount([]byte(unit))), float64(cp)) + default: + fmt.Fprintf(fs, "%"+string(c), float64(cp)) + } + fmt.Fprint(fs, unit) + default: + fmt.Fprintf(fs, "%%!%c(%T=%g F)", c, cp, float64(cp)) + } +} diff --git a/vendor/gonum.org/v1/gonum/unit/charge.go b/vendor/gonum.org/v1/gonum/unit/charge.go new file mode 100644 index 0000000..44eadb5 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/unit/charge.go @@ -0,0 +1,71 @@ +// Code generated by "go generate gonum.org/v1/gonum/unit”; DO NOT EDIT. + +// Copyright ©2014 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package unit + +import ( + "errors" + "fmt" + "math" + "unicode/utf8" +) + +// Charge represents an electric charge in Coulombs. +type Charge float64 + +const Coulomb Charge = 1 + +// Unit converts the Charge to a *Unit +func (ch Charge) Unit() *Unit { + return New(float64(ch), Dimensions{ + CurrentDim: 1, + TimeDim: 1, + }) +} + +// Charge allows Charge to implement a Charger interface +func (ch Charge) Charge() Charge { + return ch +} + +// From converts the unit into the receiver. From returns an +// error if there is a mismatch in dimension +func (ch *Charge) From(u Uniter) error { + if !DimensionsMatch(u, Coulomb) { + *ch = Charge(math.NaN()) + return errors.New("Dimension mismatch") + } + *ch = Charge(u.Unit().Value()) + return nil +} + +func (ch Charge) Format(fs fmt.State, c rune) { + switch c { + case 'v': + if fs.Flag('#') { + fmt.Fprintf(fs, "%T(%v)", ch, float64(ch)) + return + } + fallthrough + case 'e', 'E', 'f', 'F', 'g', 'G': + p, pOk := fs.Precision() + w, wOk := fs.Width() + const unit = " C" + switch { + case pOk && wOk: + fmt.Fprintf(fs, "%*.*"+string(c), pos(w-utf8.RuneCount([]byte(unit))), p, float64(ch)) + case pOk: + fmt.Fprintf(fs, "%.*"+string(c), p, float64(ch)) + case wOk: + fmt.Fprintf(fs, "%*"+string(c), pos(w-utf8.RuneCount([]byte(unit))), float64(ch)) + default: + fmt.Fprintf(fs, "%"+string(c), float64(ch)) + } + fmt.Fprint(fs, unit) + default: + fmt.Fprintf(fs, "%%!%c(%T=%g C)", c, ch, float64(ch)) + } +} diff --git a/vendor/gonum.org/v1/gonum/unit/conductance.go b/vendor/gonum.org/v1/gonum/unit/conductance.go new file mode 100644 index 0000000..ac1435b --- /dev/null +++ b/vendor/gonum.org/v1/gonum/unit/conductance.go @@ -0,0 +1,73 @@ +// Code generated by "go generate gonum.org/v1/gonum/unit”; DO NOT EDIT. + +// Copyright ©2014 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package unit + +import ( + "errors" + "fmt" + "math" + "unicode/utf8" +) + +// Conductance represents an electrical conductance in Siemens. +type Conductance float64 + +const Siemens Conductance = 1 + +// Unit converts the Conductance to a *Unit +func (co Conductance) Unit() *Unit { + return New(float64(co), Dimensions{ + CurrentDim: 2, + LengthDim: -2, + MassDim: -1, + TimeDim: 3, + }) +} + +// Conductance allows Conductance to implement a Conductancer interface +func (co Conductance) Conductance() Conductance { + return co +} + +// From converts the unit into the receiver. From returns an +// error if there is a mismatch in dimension +func (co *Conductance) From(u Uniter) error { + if !DimensionsMatch(u, Siemens) { + *co = Conductance(math.NaN()) + return errors.New("Dimension mismatch") + } + *co = Conductance(u.Unit().Value()) + return nil +} + +func (co Conductance) Format(fs fmt.State, c rune) { + switch c { + case 'v': + if fs.Flag('#') { + fmt.Fprintf(fs, "%T(%v)", co, float64(co)) + return + } + fallthrough + case 'e', 'E', 'f', 'F', 'g', 'G': + p, pOk := fs.Precision() + w, wOk := fs.Width() + const unit = " S" + switch { + case pOk && wOk: + fmt.Fprintf(fs, "%*.*"+string(c), pos(w-utf8.RuneCount([]byte(unit))), p, float64(co)) + case pOk: + fmt.Fprintf(fs, "%.*"+string(c), p, float64(co)) + case wOk: + fmt.Fprintf(fs, "%*"+string(c), pos(w-utf8.RuneCount([]byte(unit))), float64(co)) + default: + fmt.Fprintf(fs, "%"+string(c), float64(co)) + } + fmt.Fprint(fs, unit) + default: + fmt.Fprintf(fs, "%%!%c(%T=%g S)", c, co, float64(co)) + } +} diff --git a/vendor/gonum.org/v1/gonum/unit/constant/atomicmass.go b/vendor/gonum.org/v1/gonum/unit/constant/atomicmass.go new file mode 100644 index 0000000..8898d89 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/unit/constant/atomicmass.go @@ -0,0 +1,13 @@ +// Code generated by "go generate gonum.org/v1/gonum/unit/constant”; DO NOT EDIT. + +// Copyright ©2019 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package constant + +import "gonum.org/v1/gonum/unit" + +// AtomicMass is the atomic mass constant (mᵤ), one twelfth of the mass of an unbound atom of carbon-12 at rest and in its ground state. +// The dimension of AtomicMass is kg. The standard uncertainty of the constant is 2e-35 kg. +const AtomicMass = unit.Mass(1.66053904e-27) diff --git a/vendor/gonum.org/v1/gonum/unit/constant/avogadro.go b/vendor/gonum.org/v1/gonum/unit/constant/avogadro.go new file mode 100644 index 0000000..b10cd51 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/unit/constant/avogadro.go @@ -0,0 +1,52 @@ +// Code generated by "go generate gonum.org/v1/gonum/unit/constant”; DO NOT EDIT. + +// Copyright ©2019 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package constant + +import ( + "fmt" + + "gonum.org/v1/gonum/unit" +) + +// Avogadro is the Avogadro constant (A), the number of constituent particles contained in one mole of a substance. +// The dimension of Avogadro is mol^-1. The standard uncertainty of the constant is 7.4e+15 mol^-1. +const Avogadro = avogadroUnits(6.022140857e+23) + +type avogadroUnits float64 + +// Unit converts the avogadroUnits to a *unit.Unit +func (cnst avogadroUnits) Unit() *unit.Unit { + return unit.New(float64(cnst), unit.Dimensions{ + unit.MoleDim: -1, + }) +} + +func (cnst avogadroUnits) Format(fs fmt.State, c rune) { + switch c { + case 'v': + if fs.Flag('#') { + fmt.Fprintf(fs, "%T(%v)", cnst, float64(cnst)) + return + } + fallthrough + case 'e', 'E', 'f', 'F', 'g', 'G': + p, pOk := fs.Precision() + w, wOk := fs.Width() + switch { + case pOk && wOk: + fmt.Fprintf(fs, "%*.*"+string(c), w, p, cnst.Unit()) + case pOk: + fmt.Fprintf(fs, "%.*"+string(c), p, cnst.Unit()) + case wOk: + fmt.Fprintf(fs, "%*"+string(c), w, cnst.Unit()) + default: + fmt.Fprintf(fs, "%"+string(c), cnst.Unit()) + } + default: + fmt.Fprintf(fs, "%%!"+string(c)+"(constant.avogadroUnits=%v mol^-1)", float64(cnst)) + } +} diff --git a/vendor/gonum.org/v1/gonum/unit/constant/boltzmann.go b/vendor/gonum.org/v1/gonum/unit/constant/boltzmann.go new file mode 100644 index 0000000..158005a --- /dev/null +++ b/vendor/gonum.org/v1/gonum/unit/constant/boltzmann.go @@ -0,0 +1,55 @@ +// Code generated by "go generate gonum.org/v1/gonum/unit/constant”; DO NOT EDIT. + +// Copyright ©2019 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package constant + +import ( + "fmt" + + "gonum.org/v1/gonum/unit" +) + +// Boltzmann is the Boltzmann constant (k), it relates the average relative kinetic energy of particles in a gas with the temperature of the gas. +// The dimensions of Boltzmann are kg m^2 K^-1 s^-2. The standard uncertainty of the constant is 7.9e-30 kg m^2 K^-1 s^-2. +const Boltzmann = boltzmannUnits(1.38064852e-23) + +type boltzmannUnits float64 + +// Unit converts the boltzmannUnits to a *unit.Unit +func (cnst boltzmannUnits) Unit() *unit.Unit { + return unit.New(float64(cnst), unit.Dimensions{ + unit.MassDim: 1, + unit.LengthDim: 2, + unit.TimeDim: -2, + unit.TemperatureDim: -1, + }) +} + +func (cnst boltzmannUnits) Format(fs fmt.State, c rune) { + switch c { + case 'v': + if fs.Flag('#') { + fmt.Fprintf(fs, "%T(%v)", cnst, float64(cnst)) + return + } + fallthrough + case 'e', 'E', 'f', 'F', 'g', 'G': + p, pOk := fs.Precision() + w, wOk := fs.Width() + switch { + case pOk && wOk: + fmt.Fprintf(fs, "%*.*"+string(c), w, p, cnst.Unit()) + case pOk: + fmt.Fprintf(fs, "%.*"+string(c), p, cnst.Unit()) + case wOk: + fmt.Fprintf(fs, "%*"+string(c), w, cnst.Unit()) + default: + fmt.Fprintf(fs, "%"+string(c), cnst.Unit()) + } + default: + fmt.Fprintf(fs, "%%!"+string(c)+"(constant.boltzmannUnits=%v kg m^2 K^-1 s^-2)", float64(cnst)) + } +} diff --git a/vendor/gonum.org/v1/gonum/unit/constant/defined_types.go b/vendor/gonum.org/v1/gonum/unit/constant/defined_types.go new file mode 100644 index 0000000..cf23285 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/unit/constant/defined_types.go @@ -0,0 +1,56 @@ +// Code generated by "go generate gonum.org/v1/gonum/unit/constant”; DO NOT EDIT. + +// Copyright ©2019 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// +build ignore + +package main + +import "gonum.org/v1/gonum/unit" + +var definedTypes = []struct { + unit unit.Uniter + name string +}{ + {unit: unit.AbsorbedRadioactiveDose(1), name: "unit.AbsorbedRadioactiveDose"}, + {unit: unit.Acceleration(1), name: "unit.Acceleration"}, + {unit: unit.Angle(1), name: "unit.Angle"}, + {unit: unit.Area(1), name: "unit.Area"}, + {unit: unit.Capacitance(1), name: "unit.Capacitance"}, + {unit: unit.Charge(1), name: "unit.Charge"}, + {unit: unit.Conductance(1), name: "unit.Conductance"}, + {unit: unit.Current(1), name: "unit.Current"}, + {unit: unit.Dimless(1), name: "unit.Dimless"}, + {unit: unit.Energy(1), name: "unit.Energy"}, + {unit: unit.EquivalentRadioactiveDose(1), name: "unit.EquivalentRadioactiveDose"}, + {unit: unit.Force(1), name: "unit.Force"}, + {unit: unit.Frequency(1), name: "unit.Frequency"}, + {unit: unit.Inductance(1), name: "unit.Inductance"}, + {unit: unit.Length(1), name: "unit.Length"}, + {unit: unit.LuminousIntensity(1), name: "unit.LuminousIntensity"}, + {unit: unit.MagneticFlux(1), name: "unit.MagneticFlux"}, + {unit: unit.MagneticFluxDensity(1), name: "unit.MagneticFluxDensity"}, + {unit: unit.Mass(1), name: "unit.Mass"}, + {unit: unit.Mole(1), name: "unit.Mole"}, + {unit: unit.Power(1), name: "unit.Power"}, + {unit: unit.Pressure(1), name: "unit.Pressure"}, + {unit: unit.Radioactivity(1), name: "unit.Radioactivity"}, + {unit: unit.Resistance(1), name: "unit.Resistance"}, + {unit: unit.Temperature(1), name: "unit.Temperature"}, + {unit: unit.Time(1), name: "unit.Time"}, + {unit: unit.Torque(1), name: "unit.Torque"}, + {unit: unit.Velocity(1), name: "unit.Velocity"}, + {unit: unit.Voltage(1), name: "unit.Voltage"}, + {unit: unit.Volume(1), name: "unit.Volume"}, +} + +func definedEquivalentOf(q unit.Uniter) string { + for _, u := range definedTypes { + if unit.DimensionsMatch(q, u.unit) { + return u.name + } + } + return "" +} diff --git a/vendor/gonum.org/v1/gonum/unit/constant/doc.go b/vendor/gonum.org/v1/gonum/unit/constant/doc.go new file mode 100644 index 0000000..c688bd2 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/unit/constant/doc.go @@ -0,0 +1,12 @@ +// Copyright ©2019 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +//go:generate go run generate_defined_types.go +//go:generate go run generate_constants.go defined_types.go + +// Package constant provides fundamental constants satisfying unit.Uniter. +// +// Constant values reflect the values published at https://physics.nist.gov/cuu/index.html +// and are subject to change when the published values are updated. +package constant diff --git a/vendor/gonum.org/v1/gonum/unit/constant/electricconstant.go b/vendor/gonum.org/v1/gonum/unit/constant/electricconstant.go new file mode 100644 index 0000000..891f069 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/unit/constant/electricconstant.go @@ -0,0 +1,55 @@ +// Code generated by "go generate gonum.org/v1/gonum/unit/constant”; DO NOT EDIT. + +// Copyright ©2019 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package constant + +import ( + "fmt" + + "gonum.org/v1/gonum/unit" +) + +// ElectricConstant is the electric constant (ε₀), the value of the absolute dielectric permittivity of classical vacuum. +// The dimensions of ElectricConstant are A^2 s^4 kg^-1 m^-3. The constant is exact. +const ElectricConstant = electricConstantUnits(8.854187817620389e-12) + +type electricConstantUnits float64 + +// Unit converts the electricConstantUnits to a *unit.Unit +func (cnst electricConstantUnits) Unit() *unit.Unit { + return unit.New(float64(cnst), unit.Dimensions{ + unit.CurrentDim: 2, + unit.TimeDim: 4, + unit.MassDim: -1, + unit.LengthDim: -3, + }) +} + +func (cnst electricConstantUnits) Format(fs fmt.State, c rune) { + switch c { + case 'v': + if fs.Flag('#') { + fmt.Fprintf(fs, "%T(%v)", cnst, float64(cnst)) + return + } + fallthrough + case 'e', 'E', 'f', 'F', 'g', 'G': + p, pOk := fs.Precision() + w, wOk := fs.Width() + switch { + case pOk && wOk: + fmt.Fprintf(fs, "%*.*"+string(c), w, p, cnst.Unit()) + case pOk: + fmt.Fprintf(fs, "%.*"+string(c), p, cnst.Unit()) + case wOk: + fmt.Fprintf(fs, "%*"+string(c), w, cnst.Unit()) + default: + fmt.Fprintf(fs, "%"+string(c), cnst.Unit()) + } + default: + fmt.Fprintf(fs, "%%!"+string(c)+"(constant.electricConstantUnits=%v A^2 s^4 kg^-1 m^-3)", float64(cnst)) + } +} diff --git a/vendor/gonum.org/v1/gonum/unit/constant/elementarycharge.go b/vendor/gonum.org/v1/gonum/unit/constant/elementarycharge.go new file mode 100644 index 0000000..d09bbb8 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/unit/constant/elementarycharge.go @@ -0,0 +1,13 @@ +// Code generated by "go generate gonum.org/v1/gonum/unit/constant”; DO NOT EDIT. + +// Copyright ©2019 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package constant + +import "gonum.org/v1/gonum/unit" + +// ElementaryCharge, is the elementary charge constant (e), the magnitude of electric charge carried by a single proton or electron. +// The dimensions of ElementaryCharge are A s. The standard uncertainty of the constant is 9.8e-28 A s. +const ElementaryCharge = unit.Charge(1.6021766208e-19) diff --git a/vendor/gonum.org/v1/gonum/unit/constant/faraday.go b/vendor/gonum.org/v1/gonum/unit/constant/faraday.go new file mode 100644 index 0000000..dba8d72 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/unit/constant/faraday.go @@ -0,0 +1,54 @@ +// Code generated by "go generate gonum.org/v1/gonum/unit/constant”; DO NOT EDIT. + +// Copyright ©2019 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package constant + +import ( + "fmt" + + "gonum.org/v1/gonum/unit" +) + +// Faraday is the Faraday constant, the magnitude of electric charge per mole of electrons. +// The dimensions of Faraday are A s mol^-1. The standard uncertainty of the constant is 0.00059 A s mol^-1. +const Faraday = faradayUnits(96485.33289) + +type faradayUnits float64 + +// Unit converts the faradayUnits to a *unit.Unit +func (cnst faradayUnits) Unit() *unit.Unit { + return unit.New(float64(cnst), unit.Dimensions{ + unit.CurrentDim: 1, + unit.TimeDim: 1, + unit.MoleDim: -1, + }) +} + +func (cnst faradayUnits) Format(fs fmt.State, c rune) { + switch c { + case 'v': + if fs.Flag('#') { + fmt.Fprintf(fs, "%T(%v)", cnst, float64(cnst)) + return + } + fallthrough + case 'e', 'E', 'f', 'F', 'g', 'G': + p, pOk := fs.Precision() + w, wOk := fs.Width() + switch { + case pOk && wOk: + fmt.Fprintf(fs, "%*.*"+string(c), w, p, cnst.Unit()) + case pOk: + fmt.Fprintf(fs, "%.*"+string(c), p, cnst.Unit()) + case wOk: + fmt.Fprintf(fs, "%*"+string(c), w, cnst.Unit()) + default: + fmt.Fprintf(fs, "%"+string(c), cnst.Unit()) + } + default: + fmt.Fprintf(fs, "%%!"+string(c)+"(constant.faradayUnits=%v A s mol^-1)", float64(cnst)) + } +} diff --git a/vendor/gonum.org/v1/gonum/unit/constant/finestructure.go b/vendor/gonum.org/v1/gonum/unit/constant/finestructure.go new file mode 100644 index 0000000..92cb175 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/unit/constant/finestructure.go @@ -0,0 +1,12 @@ +// Code generated by "go generate gonum.org/v1/gonum/unit/constant”; DO NOT EDIT. + +// Copyright ©2019 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package constant + +import "gonum.org/v1/gonum/unit" + +// FineStructure is the fine structure constant (α), it describes the strength of the electromagnetic interaction between elementary charged particles. The standard uncertainty of the constant is 1.7e-12 . +const FineStructure = unit.Dimless(0.0072973525664) diff --git a/vendor/gonum.org/v1/gonum/unit/constant/generate_constants.go b/vendor/gonum.org/v1/gonum/unit/constant/generate_constants.go new file mode 100644 index 0000000..555a690 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/unit/constant/generate_constants.go @@ -0,0 +1,334 @@ +// Copyright ©2014 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// +build ignore + +package main + +import ( + "bytes" + "go/format" + "log" + "math" + "os" + "strings" + "text/template" + + "gonum.org/v1/gonum/unit" +) + +const ( + elementaryCharge = 1.6021766208e-19 + fineStructure = 7.2973525664e-3 + lightSpeed = 2.99792458e8 + planck = 6.626070040e-34 +) + +var constants = []Constant{ + { + Name: "AtomicMass", Value: 1.660539040e-27, + Dimensions: []Dimension{{massName, 1}}, + Comment: "AtomicMass is the atomic mass constant (mᵤ), one twelfth of the mass of an unbound atom of carbon-12 at rest and in its ground state.", + Uncertainty: 0.000000020e-27, + }, + { + Name: "Avogadro", Value: 6.022140857e23, + Dimensions: []Dimension{{moleName, -1}}, + Comment: "Avogadro is the Avogadro constant (A), the number of constituent particles contained in one mole of a substance.", + Uncertainty: 0.000000074e23, + }, + { + Name: "Boltzmann", Value: 1.38064852e-23, + Dimensions: []Dimension{{massName, 1}, {lengthName, 2}, {timeName, -2}, {temperatureName, -1}}, + Comment: "Boltzmann is the Boltzmann constant (k), it relates the average relative kinetic energy of particles in a gas with the temperature of the gas.", + Uncertainty: 0.00000079e-23, + }, + { + Name: "ElectricConstant", Value: 1 / (4 * math.Pi * 1e-7 * lightSpeed * lightSpeed), + Dimensions: []Dimension{{currentName, 2}, {timeName, 4}, {massName, -1}, {lengthName, -3}}, + Comment: "ElectricConstant is the electric constant (ε₀), the value of the absolute dielectric permittivity of classical vacuum.", + }, + { + Name: "ElementaryCharge", Value: elementaryCharge, + Dimensions: []Dimension{{currentName, 1}, {timeName, 1}}, + Comment: "ElementaryCharge, is the elementary charge constant (e), the magnitude of electric charge carried by a single proton or electron.", + Uncertainty: 0.0000000098e-19, + }, + { + Name: "Faraday", Value: 96485.33289, + Dimensions: []Dimension{{currentName, 1}, {timeName, 1}, {moleName, -1}}, + Comment: "Faraday is the Faraday constant, the magnitude of electric charge per mole of electrons.", + Uncertainty: 0.00059, + }, + { + Name: "FineStructure", Value: fineStructure, + Comment: "FineStructure is the fine structure constant (α), it describes the strength of the electromagnetic interaction between elementary charged particles.", + Uncertainty: 0.0000000017e-3, + }, + { + Name: "Gravitational", Value: 6.67408e-11, + Dimensions: []Dimension{{massName, -1}, {lengthName, 3}, {timeName, -2}}, + Comment: "Gravitational is the universal gravitational constant (G), the proportionality constant connecting the gravitational force between two bodies.", + Uncertainty: 0.00031e-11, + }, + { + Name: "LightSpeedInVacuum", Value: lightSpeed, + Dimensions: []Dimension{{lengthName, 1}, {timeName, -1}}, + Comment: "LightSpeedInVacuum is the c constant, the speed of light in a vacuum.", + }, + { + Name: "MagneticConstant", Value: 2 * fineStructure * planck / (elementaryCharge * elementaryCharge * lightSpeed), + Dimensions: []Dimension{{currentName, 2}, {timeName, 4}, {massName, -1}, {lengthName, -3}}, + Comment: "MagneticConstant is the magnetic constant (μ₀), the magnetic permeability in a classical vacuum.", + }, + { + Name: "Planck", Value: planck, + Dimensions: []Dimension{{massName, 1}, {lengthName, 2}, {timeName, -1}}, + Comment: "Planck is the Planck constant (h), it relates the energy carried by a photon to its frequency.", + Uncertainty: 0.000000081e-34, + }, + { + Name: "StandardGravity", Value: 9.80665, + Dimensions: []Dimension{{lengthName, 1}, {timeName, -2}}, + Comment: "StandardGravity is the standard gravity constant (g₀), the nominal gravitational acceleration of an object in a vacuum near the surface of the Earth", + }, +} + +const ( + angleName = "AngleDim" + currentName = "CurrentDim" + lengthName = "LengthDim" + luminousIntensityName = "LuminousIntensityDim" + massName = "MassDim" + moleName = "MoleDim" + temperatureName = "TemperatureDim" + timeName = "TimeDim" +) + +var dimOf = map[string]unit.Dimension{ + "AngleDim": unit.AngleDim, + "CurrentDim": unit.CurrentDim, + "LengthDim": unit.LengthDim, + "LuminousIntensityDim": unit.LuminousIntensityDim, + "MassDim": unit.MassDim, + "MoleDim": unit.MoleDim, + "TemperatureDim": unit.TemperatureDim, + "TimeDim": unit.TimeDim, +} + +type Constant struct { + Name string + Value float64 + Dimensions []Dimension + Comment string + Uncertainty float64 +} + +type Dimension struct { + Name string + Power int +} + +func (c Constant) IsDefined() bool { + return definedEquivalentOf(unit.New(1, c.dimensions())) != "" +} + +func (c Constant) Type() string { + typ := definedEquivalentOf(unit.New(1, c.dimensions())) + if typ == "" { + return strings.ToLower(c.Name[:1]) + c.Name[1:] + "Units" + } + return typ +} + +func (c Constant) Units() string { + return c.dimensions().String() +} + +func (c Constant) dimensions() unit.Dimensions { + dims := make(unit.Dimensions) + for _, d := range c.Dimensions { + dims[dimOf[d.Name]] = d.Power + } + return dims +} + +// Generate a file for each of the constants. +func main() { + for _, c := range constants { + generate(c) + generateTest(c) + } +} + +const baseUnitTemplate = `// Code generated by "go generate gonum.org/v1/gonum/unit/constant”; DO NOT EDIT. + +// Copyright ©2019 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package constant + +import "gonum.org/v1/gonum/unit" + +// {{.Comment}}{{if .Dimensions}} +// {{$n := len .Dimensions}}The dimension{{if gt $n 1}}s{{end}} of {{.Name}} {{if eq $n 1}}is{{else}}are{{end}} {{.Units}}.{{end}} {{if not .Uncertainty}}The constant is exact.{{else}}The standard uncertainty of the constant is {{.Uncertainty}} {{.Units}}.{{end}} +const {{.Name}} = {{.Type}}({{.Value}}) +` + +var baseUnit = template.Must(template.New("base").Parse(baseUnitTemplate)) + +const methodTemplate = `// Code generated by "go generate gonum.org/v1/gonum/unit/constant”; DO NOT EDIT. + +// Copyright ©2019 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package constant + +import ( + "fmt" + + "gonum.org/v1/gonum/unit" +) + +// {{.Comment}} +// {{$n := len .Dimensions}}The dimension{{if gt $n 1}}s{{end}} of {{.Name}} {{if eq $n 1}}is{{else}}are{{end}} {{.Units}}. {{if not .Uncertainty}}The constant is exact.{{else}}The standard uncertainty of the constant is {{.Uncertainty}} {{.Units}}.{{end}} +const {{.Name}} = {{.Type}}({{.Value}}) + +type {{.Type}} float64 + +// Unit converts the {{.Type}} to a *unit.Unit +func (cnst {{.Type}}) Unit() *unit.Unit { + return unit.New(float64(cnst), unit.Dimensions{ + {{range .Dimensions}} unit.{{.Name}}: {{.Power}}, + {{end}} + }) +} + +func (cnst {{.Type}}) Format(fs fmt.State, c rune) { + switch c { + case 'v': + if fs.Flag('#') { + fmt.Fprintf(fs, "%T(%v)", cnst, float64(cnst)) + return + } + fallthrough + case 'e', 'E', 'f', 'F', 'g', 'G': + p, pOk := fs.Precision() + w, wOk := fs.Width() + switch { + case pOk && wOk: + fmt.Fprintf(fs, "%*.*"+string(c), w, p, cnst.Unit()) + case pOk: + fmt.Fprintf(fs, "%.*"+string(c), p, cnst.Unit()) + case wOk: + fmt.Fprintf(fs, "%*"+string(c), w, cnst.Unit()) + default: + fmt.Fprintf(fs, "%"+string(c), cnst.Unit()) + } + default: + fmt.Fprintf(fs, "%%!"+string(c)+"(constant.{{.Type}}=%v {{.Units}})", float64(cnst)) + } +} +` + +var methods = template.Must(template.New("methods").Parse(methodTemplate)) + +func generate(c Constant) { + lowerName := strings.ToLower(c.Name) + filename := lowerName + ".go" + f, err := os.Create(filename) + if err != nil { + log.Fatal(err) + } + defer f.Close() + + var buf bytes.Buffer + + if c.IsDefined() { + err = baseUnit.Execute(&buf, c) + if err != nil { + log.Fatal(err) + } + } else { + err = methods.Execute(&buf, c) + if err != nil { + log.Fatal(err) + } + } + + b, err := format.Source(buf.Bytes()) + if err != nil { + f.Write(buf.Bytes()) // This is here to debug bad format. + log.Fatalf("error formatting %q: %s", c.Name, err) + } + + f.Write(b) +} + +const testTemplate = `// Code generated by "go generate gonum.org/v1/gonum/unit/constant”; DO NOT EDIT. + +// Copyright ©2019 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package constant + +import ( + "fmt" + "testing" +) + +func Test{{.Name}}Format(t *testing.T) { + for _, test := range []struct{ + format string + want string + }{ + {"%v", "{{.Value}} {{.Units}}"}, + {"%.1v", "{{printf "%.1v" .Value}} {{.Units}}"}, + {"%50.1v", "{{$s := printf "%.1v %s" .Value .Units}}{{printf "%50s" $s}}"}, + {"%50v", "{{$s := printf "%v %s" .Value .Units}}{{printf "%50s" $s}}"}, + {"%1v", "{{.Value}} {{.Units}}"}, + {"%#v", "constant.{{.Type}}({{.Value}})"}, + {"%s", "%!s(constant.{{.Type}}={{.Value}} {{.Units}})"}, + } { + got := fmt.Sprintf(test.format, {{.Name}}) + if got != test.want { + t.Errorf("Format %q: got: %q want: %q", test.format, got, test.want) + } + } +} +` + +var tests = template.Must(template.New("test").Parse(testTemplate)) + +func generateTest(c Constant) { + if c.IsDefined() { + return + } + + lowerName := strings.ToLower(c.Name) + filename := lowerName + "_test.go" + f, err := os.Create(filename) + if err != nil { + log.Fatal(err) + } + defer f.Close() + + var buf bytes.Buffer + + err = tests.Execute(&buf, c) + if err != nil { + log.Fatal(err) + } + + b, err := format.Source(buf.Bytes()) + if err != nil { + f.Write(buf.Bytes()) // This is here to debug bad format. + log.Fatalf("error formatting test for %q: %s", c.Name, err) + } + + f.Write(b) +} diff --git a/vendor/gonum.org/v1/gonum/unit/constant/generate_defined_types.go b/vendor/gonum.org/v1/gonum/unit/constant/generate_defined_types.go new file mode 100644 index 0000000..5e32891 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/unit/constant/generate_defined_types.go @@ -0,0 +1,110 @@ +// Copyright ©2019 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// +build ignore + +package main + +import ( + "bytes" + "go/ast" + "go/format" + "go/parser" + "go/token" + "io/ioutil" + "log" + "os" + "path/filepath" + "strings" + "text/template" +) + +const definedTemplate = `// Code generated by "go generate gonum.org/v1/gonum/unit/constant”; DO NOT EDIT. + +// Copyright ©2019 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// +build ignore + +package main + +import "gonum.org/v1/gonum/unit" + +var definedTypes = []struct{ + unit unit.Uniter + name string +}{ +{{range .}} {unit: unit.{{.}}(1), name: "unit.{{.}}"{{"}"}}, +{{end}}} + +func definedEquivalentOf(q unit.Uniter) string { + for _, u := range definedTypes { + if unit.DimensionsMatch(q, u.unit) { + return u.name + } + } + return "" +} +` + +var defined = template.Must(template.New("defined").Parse(definedTemplate)) + +func main() { + names, err := filepath.Glob("../*.go") + if err != nil { + log.Fatal(err) + } + + var units []string + fset := token.NewFileSet() + for _, fn := range names { + if strings.Contains(fn, "_test") { + continue + } + b, err := ioutil.ReadFile(fn) + if bytes.Contains(b, []byte("+build ignore")) { + continue + } + f, err := parser.ParseFile(fset, fn, nil, 0) + if err != nil { + log.Fatal("failed to parse %q: %v", fn, err) // parse error + } + if f.Name.Name != "unit" { + log.Fatalf("not parsing unit package: %q", f.Name.Name) + } + for _, d := range f.Decls { + if decl, ok := d.(*ast.GenDecl); ok { + for _, s := range decl.Specs { + if ts, ok := s.(*ast.TypeSpec); ok { + if id, ok := ts.Type.(*ast.Ident); !ok || id.Name != "float64" { + continue + } + units = append(units, ts.Name.Name) + } + } + } + } + } + + f, err := os.Create("defined_types.go") + if err != nil { + log.Fatal(err) + } + defer f.Close() + + var buf bytes.Buffer + err = defined.Execute(&buf, units) + if err != nil { + log.Fatal(err) + } + + b, err := format.Source(buf.Bytes()) + if err != nil { + f.Write(buf.Bytes()) // This is here to debug bad format. + log.Fatalf("error formatting %q: %s", f.Name(), err) + } + + f.Write(b) +} diff --git a/vendor/gonum.org/v1/gonum/unit/constant/gravitational.go b/vendor/gonum.org/v1/gonum/unit/constant/gravitational.go new file mode 100644 index 0000000..e62e27d --- /dev/null +++ b/vendor/gonum.org/v1/gonum/unit/constant/gravitational.go @@ -0,0 +1,54 @@ +// Code generated by "go generate gonum.org/v1/gonum/unit/constant”; DO NOT EDIT. + +// Copyright ©2019 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package constant + +import ( + "fmt" + + "gonum.org/v1/gonum/unit" +) + +// Gravitational is the universal gravitational constant (G), the proportionality constant connecting the gravitational force between two bodies. +// The dimensions of Gravitational are m^3 kg^-1 s^-2. The standard uncertainty of the constant is 3.1e-15 m^3 kg^-1 s^-2. +const Gravitational = gravitationalUnits(6.67408e-11) + +type gravitationalUnits float64 + +// Unit converts the gravitationalUnits to a *unit.Unit +func (cnst gravitationalUnits) Unit() *unit.Unit { + return unit.New(float64(cnst), unit.Dimensions{ + unit.MassDim: -1, + unit.LengthDim: 3, + unit.TimeDim: -2, + }) +} + +func (cnst gravitationalUnits) Format(fs fmt.State, c rune) { + switch c { + case 'v': + if fs.Flag('#') { + fmt.Fprintf(fs, "%T(%v)", cnst, float64(cnst)) + return + } + fallthrough + case 'e', 'E', 'f', 'F', 'g', 'G': + p, pOk := fs.Precision() + w, wOk := fs.Width() + switch { + case pOk && wOk: + fmt.Fprintf(fs, "%*.*"+string(c), w, p, cnst.Unit()) + case pOk: + fmt.Fprintf(fs, "%.*"+string(c), p, cnst.Unit()) + case wOk: + fmt.Fprintf(fs, "%*"+string(c), w, cnst.Unit()) + default: + fmt.Fprintf(fs, "%"+string(c), cnst.Unit()) + } + default: + fmt.Fprintf(fs, "%%!"+string(c)+"(constant.gravitationalUnits=%v m^3 kg^-1 s^-2)", float64(cnst)) + } +} diff --git a/vendor/gonum.org/v1/gonum/unit/constant/lightspeedinvacuum.go b/vendor/gonum.org/v1/gonum/unit/constant/lightspeedinvacuum.go new file mode 100644 index 0000000..0e07b86 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/unit/constant/lightspeedinvacuum.go @@ -0,0 +1,13 @@ +// Code generated by "go generate gonum.org/v1/gonum/unit/constant”; DO NOT EDIT. + +// Copyright ©2019 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package constant + +import "gonum.org/v1/gonum/unit" + +// LightSpeedInVacuum is the c constant, the speed of light in a vacuum. +// The dimensions of LightSpeedInVacuum are m s^-1. The constant is exact. +const LightSpeedInVacuum = unit.Velocity(2.99792458e+08) diff --git a/vendor/gonum.org/v1/gonum/unit/constant/magneticconstant.go b/vendor/gonum.org/v1/gonum/unit/constant/magneticconstant.go new file mode 100644 index 0000000..61469b3 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/unit/constant/magneticconstant.go @@ -0,0 +1,55 @@ +// Code generated by "go generate gonum.org/v1/gonum/unit/constant”; DO NOT EDIT. + +// Copyright ©2019 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package constant + +import ( + "fmt" + + "gonum.org/v1/gonum/unit" +) + +// MagneticConstant is the magnetic constant (μ₀), the magnetic permeability in a classical vacuum. +// The dimensions of MagneticConstant are A^2 s^4 kg^-1 m^-3. The constant is exact. +const MagneticConstant = magneticConstantUnits(1.2566370614692394e-06) + +type magneticConstantUnits float64 + +// Unit converts the magneticConstantUnits to a *unit.Unit +func (cnst magneticConstantUnits) Unit() *unit.Unit { + return unit.New(float64(cnst), unit.Dimensions{ + unit.CurrentDim: 2, + unit.TimeDim: 4, + unit.MassDim: -1, + unit.LengthDim: -3, + }) +} + +func (cnst magneticConstantUnits) Format(fs fmt.State, c rune) { + switch c { + case 'v': + if fs.Flag('#') { + fmt.Fprintf(fs, "%T(%v)", cnst, float64(cnst)) + return + } + fallthrough + case 'e', 'E', 'f', 'F', 'g', 'G': + p, pOk := fs.Precision() + w, wOk := fs.Width() + switch { + case pOk && wOk: + fmt.Fprintf(fs, "%*.*"+string(c), w, p, cnst.Unit()) + case pOk: + fmt.Fprintf(fs, "%.*"+string(c), p, cnst.Unit()) + case wOk: + fmt.Fprintf(fs, "%*"+string(c), w, cnst.Unit()) + default: + fmt.Fprintf(fs, "%"+string(c), cnst.Unit()) + } + default: + fmt.Fprintf(fs, "%%!"+string(c)+"(constant.magneticConstantUnits=%v A^2 s^4 kg^-1 m^-3)", float64(cnst)) + } +} diff --git a/vendor/gonum.org/v1/gonum/unit/constant/planck.go b/vendor/gonum.org/v1/gonum/unit/constant/planck.go new file mode 100644 index 0000000..e4d0e82 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/unit/constant/planck.go @@ -0,0 +1,54 @@ +// Code generated by "go generate gonum.org/v1/gonum/unit/constant”; DO NOT EDIT. + +// Copyright ©2019 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package constant + +import ( + "fmt" + + "gonum.org/v1/gonum/unit" +) + +// Planck is the Planck constant (h), it relates the energy carried by a photon to its frequency. +// The dimensions of Planck are kg m^2 s^-1. The standard uncertainty of the constant is 8.1e-42 kg m^2 s^-1. +const Planck = planckUnits(6.62607004e-34) + +type planckUnits float64 + +// Unit converts the planckUnits to a *unit.Unit +func (cnst planckUnits) Unit() *unit.Unit { + return unit.New(float64(cnst), unit.Dimensions{ + unit.MassDim: 1, + unit.LengthDim: 2, + unit.TimeDim: -1, + }) +} + +func (cnst planckUnits) Format(fs fmt.State, c rune) { + switch c { + case 'v': + if fs.Flag('#') { + fmt.Fprintf(fs, "%T(%v)", cnst, float64(cnst)) + return + } + fallthrough + case 'e', 'E', 'f', 'F', 'g', 'G': + p, pOk := fs.Precision() + w, wOk := fs.Width() + switch { + case pOk && wOk: + fmt.Fprintf(fs, "%*.*"+string(c), w, p, cnst.Unit()) + case pOk: + fmt.Fprintf(fs, "%.*"+string(c), p, cnst.Unit()) + case wOk: + fmt.Fprintf(fs, "%*"+string(c), w, cnst.Unit()) + default: + fmt.Fprintf(fs, "%"+string(c), cnst.Unit()) + } + default: + fmt.Fprintf(fs, "%%!"+string(c)+"(constant.planckUnits=%v kg m^2 s^-1)", float64(cnst)) + } +} diff --git a/vendor/gonum.org/v1/gonum/unit/constant/standardgravity.go b/vendor/gonum.org/v1/gonum/unit/constant/standardgravity.go new file mode 100644 index 0000000..efea6d6 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/unit/constant/standardgravity.go @@ -0,0 +1,13 @@ +// Code generated by "go generate gonum.org/v1/gonum/unit/constant”; DO NOT EDIT. + +// Copyright ©2019 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package constant + +import "gonum.org/v1/gonum/unit" + +// StandardGravity is the standard gravity constant (g₀), the nominal gravitational acceleration of an object in a vacuum near the surface of the Earth +// The dimensions of StandardGravity are m s^-2. The constant is exact. +const StandardGravity = unit.Acceleration(9.80665) diff --git a/vendor/gonum.org/v1/gonum/unit/current.go b/vendor/gonum.org/v1/gonum/unit/current.go new file mode 100644 index 0000000..139043b --- /dev/null +++ b/vendor/gonum.org/v1/gonum/unit/current.go @@ -0,0 +1,70 @@ +// Code generated by "go generate gonum.org/v1/gonum/unit”; DO NOT EDIT. + +// Copyright ©2014 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package unit + +import ( + "errors" + "fmt" + "math" + "unicode/utf8" +) + +// Current represents a current in Amperes. +type Current float64 + +const Ampere Current = 1 + +// Unit converts the Current to a *Unit +func (i Current) Unit() *Unit { + return New(float64(i), Dimensions{ + CurrentDim: 1, + }) +} + +// Current allows Current to implement a Currenter interface +func (i Current) Current() Current { + return i +} + +// From converts the unit into the receiver. From returns an +// error if there is a mismatch in dimension +func (i *Current) From(u Uniter) error { + if !DimensionsMatch(u, Ampere) { + *i = Current(math.NaN()) + return errors.New("Dimension mismatch") + } + *i = Current(u.Unit().Value()) + return nil +} + +func (i Current) Format(fs fmt.State, c rune) { + switch c { + case 'v': + if fs.Flag('#') { + fmt.Fprintf(fs, "%T(%v)", i, float64(i)) + return + } + fallthrough + case 'e', 'E', 'f', 'F', 'g', 'G': + p, pOk := fs.Precision() + w, wOk := fs.Width() + const unit = " A" + switch { + case pOk && wOk: + fmt.Fprintf(fs, "%*.*"+string(c), pos(w-utf8.RuneCount([]byte(unit))), p, float64(i)) + case pOk: + fmt.Fprintf(fs, "%.*"+string(c), p, float64(i)) + case wOk: + fmt.Fprintf(fs, "%*"+string(c), pos(w-utf8.RuneCount([]byte(unit))), float64(i)) + default: + fmt.Fprintf(fs, "%"+string(c), float64(i)) + } + fmt.Fprint(fs, unit) + default: + fmt.Fprintf(fs, "%%!%c(%T=%g A)", c, i, float64(i)) + } +} diff --git a/vendor/gonum.org/v1/gonum/unit/dimless.go b/vendor/gonum.org/v1/gonum/unit/dimless.go new file mode 100644 index 0000000..dc044a4 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/unit/dimless.go @@ -0,0 +1,65 @@ +// Copyright ©2013 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package unit + +import ( + "errors" + "fmt" + "math" +) + +// Dimless represents a dimensionless constant +type Dimless float64 + +const ( + One Dimless = 1.0 +) + +// Unit converts the Dimless to a unit +func (d Dimless) Unit() *Unit { + return New(float64(d), Dimensions{}) +} + +// Dimless allows Dimless to implement a Dimlesser interface +func (d Dimless) Dimless() Dimless { + return d +} + +// From converts the unit to a dimless. Returns an error if there +// is a mismatch in dimension +func (d *Dimless) From(u *Unit) error { + if !DimensionsMatch(u, One) { + (*d) = Dimless(math.NaN()) + return errors.New("Dimension mismatch") + } + (*d) = Dimless(u.Unit().Value()) + return nil +} + +func (d Dimless) Format(fs fmt.State, c rune) { + switch c { + case 'v': + if fs.Flag('#') { + fmt.Fprintf(fs, "%T(%v)", d, float64(d)) + return + } + fallthrough + case 'e', 'E', 'f', 'F', 'g', 'G': + p, pOk := fs.Precision() + w, wOk := fs.Width() + switch { + case pOk && wOk: + fmt.Fprintf(fs, "%*.*"+string(c), w, p, float64(d)) + case pOk: + fmt.Fprintf(fs, "%.*"+string(c), p, float64(d)) + case wOk: + fmt.Fprintf(fs, "%*"+string(c), w, float64(d)) + default: + fmt.Fprintf(fs, "%"+string(c), float64(d)) + } + default: + fmt.Fprintf(fs, "%%!%c(%T=%g)", c, d, float64(d)) + } +} diff --git a/vendor/gonum.org/v1/gonum/unit/doc.go b/vendor/gonum.org/v1/gonum/unit/doc.go new file mode 100644 index 0000000..631531f --- /dev/null +++ b/vendor/gonum.org/v1/gonum/unit/doc.go @@ -0,0 +1,118 @@ +// Copyright ©2013 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +//go:generate go run generate_unit.go + +// Package unit provides a set of types and constants that facilitate +// the use of the International System of Units (SI). +// +// The unit package provides two main functionalities: compile-time type-safe +// base SI units and common derived units; and a system for dynamically +// extensible user-defined units. +// +// Static SI units +// +// This package provides a number of types representing either an SI base +// unit or a common combination of base units, named for the physical quantity +// it represents (Length, Mass, Pressure, etc.). Each type is defined from +// float64. The value of the float64 represents the quantity of that unit as +// expressed in SI base units (kilogram, metre, Pascal, etc.). For example, +// +// height := 1.6 * unit.Metre +// acc := unit.Acceleration(9.8) +// +// creates a variable named 'height' with a value of 1.6 metres, and +// a variable named 'acc' with a value of 9.8 metres per second squared. +// These types can be used to add compile-time safety to code. For +// example, +// +// func unitVolume(t unit.Temperature, p unit.Pressure) unit.Volume { +// ... +// } +// +// func main(){ +// t := 300 * unit.Kelvin +// p := 500 * unit.Kilo * unit.Pascal +// v := unitVolume(p, t) // compile-time error +// } +// +// gives a compile-time error (temperature type does not match pressure type) +// while the corresponding code using float64 runs without error. +// +// func float64Volume(temperature, pressure float64) float64 { +// ... +// } +// +// func main(){ +// t := 300.0 // Kelvin +// p := 500000.0 // Pascals +// v := float64Volume(p, t) // no error +// } +// +// Many types have constants defined representing named SI units (Metre, +// Kilogram, etc. ) or SI derived units (Pascal, Hz, etc.). The unit package +// additionally provides untyped constants for SI prefixes, so the following +// are all equivalent. +// +// l := 0.001 * unit.Metre +// k := 1 * unit.Milli * unit.Metre +// j := unit.Length(0.001) +// +// Additional SI-derived static units can also be defined by adding types that +// satisfy the Uniter interface described below. +// +// Dynamic user-extensible unit system +// +// The unit package also provides the Unit type, a representation of a general +// dimensional value. Unit can be used to help prevent errors of dimensionality +// when multiplying or dividing dimensional numbers defined a run time. New +// variables of type Unit can be created with the New function and the +// Dimensions map. For example, the code +// +// rate := unit.New(1 * unit.Milli, Dimensions{MoleDim: 1, TimeDim: -1}) +// +// creates a variable "rate" which has a value of 1e-3 mol/s. Methods of +// unit can be used to modify this value, for example: +// +// rate.Mul(1 * unit.Centi * unit.Metre).Div(1 * unit.Milli * unit.Volt) +// +// To convert the unit back into a typed float64 value, the From methods +// of the dimensional types should be used. From will return an error if the +// dimensions do not match. +// +// var energy unit.Energy +// err := energy.From(acc) +// +// Domain-specific problems may need custom dimensions, and for this purpose +// NewDimension should be used to help avoid accidental overlap between +// packages. For example, results from a blood test may be measured in +// "White blood cells per slide". In this case, NewDimension should be +// used to create a 'WhiteBloodCell' dimension. NewDimension takes in a +// string which will be used for printing that dimension, and will return +// a unique dimension number. +// +// wbc := unit.NewDimension("WhiteBloodCell") +// +// NewDimension should not be used, however, to create the unit of 'Slide', +// because in this case slide is just a measurement of liquid volume. Instead, +// a constant could be defined. +// +// const Slide unit.Volume = 0.1 * unit.Micro * unit.Litre +// +// Note that unit cannot catch all errors related to dimensionality. +// Different physical ideas are sometimes expressed with the same dimensions +// and unit is incapable of catching these mismatches. For example, energy and +// torque are both expressed as force times distance (Newton-metres in SI), +// but it is wrong to say that a torque of 10 N·m is the same as 10 J, even +// though the dimensions agree. Despite this, using the defined types to +// represent units can help to catch errors at compile-time. For example, +// using unit.Torque allows you to define a statically typed function like so +// +// func LeverLength(apply unit.Force, want unit.Torque) unit.Length { +// return unit.Length(float64(want)/float64(apply)) +// } +// +// This will prevent an energy value being provided to LeverLength in place +// of a torque value. +package unit // import "gonum.org/v1/gonum/unit" diff --git a/vendor/gonum.org/v1/gonum/unit/energy.go b/vendor/gonum.org/v1/gonum/unit/energy.go new file mode 100644 index 0000000..c98503f --- /dev/null +++ b/vendor/gonum.org/v1/gonum/unit/energy.go @@ -0,0 +1,72 @@ +// Code generated by "go generate gonum.org/v1/gonum/unit”; DO NOT EDIT. + +// Copyright ©2014 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package unit + +import ( + "errors" + "fmt" + "math" + "unicode/utf8" +) + +// Energy represents a quantity of energy in Joules. +type Energy float64 + +const Joule Energy = 1 + +// Unit converts the Energy to a *Unit +func (e Energy) Unit() *Unit { + return New(float64(e), Dimensions{ + LengthDim: 2, + MassDim: 1, + TimeDim: -2, + }) +} + +// Energy allows Energy to implement a Energyer interface +func (e Energy) Energy() Energy { + return e +} + +// From converts the unit into the receiver. From returns an +// error if there is a mismatch in dimension +func (e *Energy) From(u Uniter) error { + if !DimensionsMatch(u, Joule) { + *e = Energy(math.NaN()) + return errors.New("Dimension mismatch") + } + *e = Energy(u.Unit().Value()) + return nil +} + +func (e Energy) Format(fs fmt.State, c rune) { + switch c { + case 'v': + if fs.Flag('#') { + fmt.Fprintf(fs, "%T(%v)", e, float64(e)) + return + } + fallthrough + case 'e', 'E', 'f', 'F', 'g', 'G': + p, pOk := fs.Precision() + w, wOk := fs.Width() + const unit = " J" + switch { + case pOk && wOk: + fmt.Fprintf(fs, "%*.*"+string(c), pos(w-utf8.RuneCount([]byte(unit))), p, float64(e)) + case pOk: + fmt.Fprintf(fs, "%.*"+string(c), p, float64(e)) + case wOk: + fmt.Fprintf(fs, "%*"+string(c), pos(w-utf8.RuneCount([]byte(unit))), float64(e)) + default: + fmt.Fprintf(fs, "%"+string(c), float64(e)) + } + fmt.Fprint(fs, unit) + default: + fmt.Fprintf(fs, "%%!%c(%T=%g J)", c, e, float64(e)) + } +} diff --git a/vendor/gonum.org/v1/gonum/unit/equivalentradioactivedose.go b/vendor/gonum.org/v1/gonum/unit/equivalentradioactivedose.go new file mode 100644 index 0000000..b8764be --- /dev/null +++ b/vendor/gonum.org/v1/gonum/unit/equivalentradioactivedose.go @@ -0,0 +1,71 @@ +// Code generated by "go generate gonum.org/v1/gonum/unit”; DO NOT EDIT. + +// Copyright ©2014 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package unit + +import ( + "errors" + "fmt" + "math" + "unicode/utf8" +) + +// EquivalentRadioactiveDose is a measure of equivalent dose of ionizing radiation in sieverts. +type EquivalentRadioactiveDose float64 + +const Sievert EquivalentRadioactiveDose = 1 + +// Unit converts the EquivalentRadioactiveDose to a *Unit +func (a EquivalentRadioactiveDose) Unit() *Unit { + return New(float64(a), Dimensions{ + LengthDim: 2, + TimeDim: -2, + }) +} + +// EquivalentRadioactiveDose allows EquivalentRadioactiveDose to implement a EquivalentRadioactiveDoseer interface +func (a EquivalentRadioactiveDose) EquivalentRadioactiveDose() EquivalentRadioactiveDose { + return a +} + +// From converts the unit into the receiver. From returns an +// error if there is a mismatch in dimension +func (a *EquivalentRadioactiveDose) From(u Uniter) error { + if !DimensionsMatch(u, Sievert) { + *a = EquivalentRadioactiveDose(math.NaN()) + return errors.New("Dimension mismatch") + } + *a = EquivalentRadioactiveDose(u.Unit().Value()) + return nil +} + +func (a EquivalentRadioactiveDose) Format(fs fmt.State, c rune) { + switch c { + case 'v': + if fs.Flag('#') { + fmt.Fprintf(fs, "%T(%v)", a, float64(a)) + return + } + fallthrough + case 'e', 'E', 'f', 'F', 'g', 'G': + p, pOk := fs.Precision() + w, wOk := fs.Width() + const unit = " Sy" + switch { + case pOk && wOk: + fmt.Fprintf(fs, "%*.*"+string(c), pos(w-utf8.RuneCount([]byte(unit))), p, float64(a)) + case pOk: + fmt.Fprintf(fs, "%.*"+string(c), p, float64(a)) + case wOk: + fmt.Fprintf(fs, "%*"+string(c), pos(w-utf8.RuneCount([]byte(unit))), float64(a)) + default: + fmt.Fprintf(fs, "%"+string(c), float64(a)) + } + fmt.Fprint(fs, unit) + default: + fmt.Fprintf(fs, "%%!%c(%T=%g Sy)", c, a, float64(a)) + } +} diff --git a/vendor/gonum.org/v1/gonum/unit/force.go b/vendor/gonum.org/v1/gonum/unit/force.go new file mode 100644 index 0000000..91f7ddc --- /dev/null +++ b/vendor/gonum.org/v1/gonum/unit/force.go @@ -0,0 +1,72 @@ +// Code generated by "go generate gonum.org/v1/gonum/unit”; DO NOT EDIT. + +// Copyright ©2014 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package unit + +import ( + "errors" + "fmt" + "math" + "unicode/utf8" +) + +// Force represents a force in Newtons. +type Force float64 + +const Newton Force = 1 + +// Unit converts the Force to a *Unit +func (f Force) Unit() *Unit { + return New(float64(f), Dimensions{ + LengthDim: 1, + MassDim: 1, + TimeDim: -2, + }) +} + +// Force allows Force to implement a Forcer interface +func (f Force) Force() Force { + return f +} + +// From converts the unit into the receiver. From returns an +// error if there is a mismatch in dimension +func (f *Force) From(u Uniter) error { + if !DimensionsMatch(u, Newton) { + *f = Force(math.NaN()) + return errors.New("Dimension mismatch") + } + *f = Force(u.Unit().Value()) + return nil +} + +func (f Force) Format(fs fmt.State, c rune) { + switch c { + case 'v': + if fs.Flag('#') { + fmt.Fprintf(fs, "%T(%v)", f, float64(f)) + return + } + fallthrough + case 'e', 'E', 'f', 'F', 'g', 'G': + p, pOk := fs.Precision() + w, wOk := fs.Width() + const unit = " N" + switch { + case pOk && wOk: + fmt.Fprintf(fs, "%*.*"+string(c), pos(w-utf8.RuneCount([]byte(unit))), p, float64(f)) + case pOk: + fmt.Fprintf(fs, "%.*"+string(c), p, float64(f)) + case wOk: + fmt.Fprintf(fs, "%*"+string(c), pos(w-utf8.RuneCount([]byte(unit))), float64(f)) + default: + fmt.Fprintf(fs, "%"+string(c), float64(f)) + } + fmt.Fprint(fs, unit) + default: + fmt.Fprintf(fs, "%%!%c(%T=%g N)", c, f, float64(f)) + } +} diff --git a/vendor/gonum.org/v1/gonum/unit/frequency.go b/vendor/gonum.org/v1/gonum/unit/frequency.go new file mode 100644 index 0000000..5fecc86 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/unit/frequency.go @@ -0,0 +1,70 @@ +// Code generated by "go generate gonum.org/v1/gonum/unit”; DO NOT EDIT. + +// Copyright ©2014 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package unit + +import ( + "errors" + "fmt" + "math" + "unicode/utf8" +) + +// Frequency represents a frequency in Hertz. +type Frequency float64 + +const Hertz Frequency = 1 + +// Unit converts the Frequency to a *Unit +func (f Frequency) Unit() *Unit { + return New(float64(f), Dimensions{ + TimeDim: -1, + }) +} + +// Frequency allows Frequency to implement a Frequencyer interface +func (f Frequency) Frequency() Frequency { + return f +} + +// From converts the unit into the receiver. From returns an +// error if there is a mismatch in dimension +func (f *Frequency) From(u Uniter) error { + if !DimensionsMatch(u, Hertz) { + *f = Frequency(math.NaN()) + return errors.New("Dimension mismatch") + } + *f = Frequency(u.Unit().Value()) + return nil +} + +func (f Frequency) Format(fs fmt.State, c rune) { + switch c { + case 'v': + if fs.Flag('#') { + fmt.Fprintf(fs, "%T(%v)", f, float64(f)) + return + } + fallthrough + case 'e', 'E', 'f', 'F', 'g', 'G': + p, pOk := fs.Precision() + w, wOk := fs.Width() + const unit = " Hz" + switch { + case pOk && wOk: + fmt.Fprintf(fs, "%*.*"+string(c), pos(w-utf8.RuneCount([]byte(unit))), p, float64(f)) + case pOk: + fmt.Fprintf(fs, "%.*"+string(c), p, float64(f)) + case wOk: + fmt.Fprintf(fs, "%*"+string(c), pos(w-utf8.RuneCount([]byte(unit))), float64(f)) + default: + fmt.Fprintf(fs, "%"+string(c), float64(f)) + } + fmt.Fprint(fs, unit) + default: + fmt.Fprintf(fs, "%%!%c(%T=%g Hz)", c, f, float64(f)) + } +} diff --git a/vendor/gonum.org/v1/gonum/unit/generate_unit.go b/vendor/gonum.org/v1/gonum/unit/generate_unit.go new file mode 100644 index 0000000..be204b0 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/unit/generate_unit.go @@ -0,0 +1,626 @@ +// Copyright ©2014 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// +build ignore + +package main + +import ( + "bytes" + "go/format" + "log" + "os" + "strings" + "text/template" + + "gonum.org/v1/gonum/unit" +) + +type Unit struct { + DimensionName string + Receiver string + PowerOffset int // from normal (for example, mass base unit is kg, not g) + PrintString string // print string for the unit (kg for mass) + ExtraConstant []Constant + Name string + TypeComment string // text to comment the type + Dimensions []Dimension + ErForm string // for Xxxer interface +} + +type Dimension struct { + Name string + Power int +} + +func (u Unit) Units() string { + dims := make(unit.Dimensions) + for _, d := range u.Dimensions { + dims[dimOf[d.Name]] = d.Power + } + return dims.String() +} + +const ( + AngleName string = "AngleDim" + CurrentName string = "CurrentDim" + LengthName string = "LengthDim" + LuminousIntensityName string = "LuminousIntensityDim" + MassName string = "MassDim" + MoleName string = "MoleDim" + TemperatureName string = "TemperatureDim" + TimeName string = "TimeDim" +) + +var dimOf = map[string]unit.Dimension{ + "AngleDim": unit.AngleDim, + "CurrentDim": unit.CurrentDim, + "LengthDim": unit.LengthDim, + "LuminousIntensityDim": unit.LuminousIntensityDim, + "MassDim": unit.MassDim, + "MoleDim": unit.MoleDim, + "TemperatureDim": unit.TemperatureDim, + "TimeDim": unit.TimeDim, +} + +type Constant struct { + Name string + Value string +} + +type Prefix struct { + Name string + Power int +} + +var Units = []Unit{ + // Base units. + { + DimensionName: "Angle", + Receiver: "a", + PrintString: "rad", + Name: "Rad", + TypeComment: "Angle represents an angle in radians", + Dimensions: []Dimension{ + {Name: AngleName, Power: 1}, + }, + }, + { + DimensionName: "Current", + Receiver: "i", + PrintString: "A", + Name: "Ampere", + TypeComment: "Current represents a current in Amperes", + Dimensions: []Dimension{ + {Name: CurrentName, Power: 1}, + }, + }, + { + DimensionName: "Length", + Receiver: "l", + PrintString: "m", + Name: "Metre", + TypeComment: "Length represents a length in metres", + Dimensions: []Dimension{ + {Name: LengthName, Power: 1}, + }, + }, + { + DimensionName: "LuminousIntensity", + Receiver: "j", + PrintString: "cd", + Name: "Candela", + TypeComment: "Candela represents a luminous intensity in candela", + Dimensions: []Dimension{ + {Name: LuminousIntensityName, Power: 1}, + }, + }, + { + DimensionName: "Mass", + Receiver: "m", + PowerOffset: -3, + PrintString: "kg", + Name: "Gram", + TypeComment: "Mass represents a mass in kilograms", + ExtraConstant: []Constant{ + {Name: "Kilogram", Value: "Kilo * Gram"}, + }, + Dimensions: []Dimension{ + {Name: MassName, Power: 1}, + }, + }, + { + DimensionName: "Mole", + Receiver: "n", + PrintString: "mol", + Name: "Mol", + TypeComment: "Mole represents an amount in moles", + Dimensions: []Dimension{ + {Name: MoleName, Power: 1}, + }, + }, + { + DimensionName: "Temperature", + Receiver: "t", + PrintString: "K", + Name: "Kelvin", + TypeComment: "Temperature represents a temperature in Kelvin", + Dimensions: []Dimension{ + {Name: TemperatureName, Power: 1}, + }, + ErForm: "Temperaturer", + }, + { + DimensionName: "Time", + Receiver: "t", + PrintString: "s", + Name: "Second", + TypeComment: "Time represents a duration in seconds", + ExtraConstant: []Constant{ + {Name: "Minute", Value: "60 * Second"}, + {Name: "Hour", Value: "60 * Minute"}, + }, + Dimensions: []Dimension{ + {Name: TimeName, Power: 1}, + }, + ErForm: "Timer", + }, + + // Derived units. + { + DimensionName: "AbsorbedRadioactiveDose", + Receiver: "a", + PrintString: "Gy", + Name: "Gray", + TypeComment: "AbsorbedRadioactiveDose is a measure of absorbed dose of ionizing radiation in grays", + Dimensions: []Dimension{ + {Name: LengthName, Power: 2}, + {Name: TimeName, Power: -2}, + }, + }, + { + DimensionName: "Acceleration", + Receiver: "a", + PrintString: "m s^-2", + TypeComment: "Acceleration represents an acceleration in metres per second squared", + Dimensions: []Dimension{ + {Name: LengthName, Power: 1}, + {Name: TimeName, Power: -2}, + }, + }, + { + DimensionName: "Area", + Receiver: "a", + PrintString: "m^2", + TypeComment: "Area represents an area in square metres", + Dimensions: []Dimension{ + {Name: LengthName, Power: 2}, + }, + }, + { + DimensionName: "Radioactivity", + Receiver: "r", + PrintString: "Bq", + Name: "Becquerel", + TypeComment: "Radioactivity represents a rate of radioactive decay in becquerels", + Dimensions: []Dimension{ + {Name: TimeName, Power: -1}, + }, + }, + { + DimensionName: "Capacitance", + Receiver: "cp", + PrintString: "F", + Name: "Farad", + TypeComment: "Capacitance represents an electrical capacitance in Farads", + Dimensions: []Dimension{ + {Name: CurrentName, Power: 2}, + {Name: LengthName, Power: -2}, + {Name: MassName, Power: -1}, + {Name: TimeName, Power: 4}, + }, + ErForm: "Capacitancer", + }, + { + DimensionName: "Charge", + Receiver: "ch", + PrintString: "C", + Name: "Coulomb", + TypeComment: "Charge represents an electric charge in Coulombs", + Dimensions: []Dimension{ + {Name: CurrentName, Power: 1}, + {Name: TimeName, Power: 1}, + }, + ErForm: "Charger", + }, + { + DimensionName: "Conductance", + Receiver: "co", + PrintString: "S", + Name: "Siemens", + TypeComment: "Conductance represents an electrical conductance in Siemens", + Dimensions: []Dimension{ + {Name: CurrentName, Power: 2}, + {Name: LengthName, Power: -2}, + {Name: MassName, Power: -1}, + {Name: TimeName, Power: 3}, + }, + ErForm: "Conductancer", + }, + { + DimensionName: "EquivalentRadioactiveDose", + Receiver: "a", + PrintString: "Sy", + Name: "Sievert", + TypeComment: "EquivalentRadioactiveDose is a measure of equivalent dose of ionizing radiation in sieverts", + Dimensions: []Dimension{ + {Name: LengthName, Power: 2}, + {Name: TimeName, Power: -2}, + }, + }, + { + DimensionName: "Energy", + Receiver: "e", + PrintString: "J", + Name: "Joule", + TypeComment: "Energy represents a quantity of energy in Joules", + Dimensions: []Dimension{ + {Name: LengthName, Power: 2}, + {Name: MassName, Power: 1}, + {Name: TimeName, Power: -2}, + }, + }, + { + DimensionName: "Frequency", + Receiver: "f", + PrintString: "Hz", + Name: "Hertz", + TypeComment: "Frequency represents a frequency in Hertz", + Dimensions: []Dimension{ + {Name: TimeName, Power: -1}, + }, + }, + { + DimensionName: "Force", + Receiver: "f", + PrintString: "N", + Name: "Newton", + TypeComment: "Force represents a force in Newtons", + Dimensions: []Dimension{ + {Name: LengthName, Power: 1}, + {Name: MassName, Power: 1}, + {Name: TimeName, Power: -2}, + }, + ErForm: "Forcer", + }, + { + DimensionName: "Inductance", + Receiver: "i", + PrintString: "H", + Name: "Henry", + TypeComment: "Inductance represents an electrical inductance in Henry", + Dimensions: []Dimension{ + {Name: CurrentName, Power: -2}, + {Name: LengthName, Power: 2}, + {Name: MassName, Power: 1}, + {Name: TimeName, Power: -2}, + }, + ErForm: "Inductancer", + }, + { + DimensionName: "Power", + Receiver: "pw", + PrintString: "W", + Name: "Watt", + TypeComment: "Power represents a power in Watts", + Dimensions: []Dimension{ + {Name: LengthName, Power: 2}, + {Name: MassName, Power: 1}, + {Name: TimeName, Power: -3}, + }, + }, + { + DimensionName: "Resistance", + Receiver: "r", + PrintString: "Ω", + Name: "Ohm", + TypeComment: "Resistance represents an electrical resistance, impedance or reactance in Ohms", + Dimensions: []Dimension{ + {Name: CurrentName, Power: -2}, + {Name: LengthName, Power: 2}, + {Name: MassName, Power: 1}, + {Name: TimeName, Power: -3}, + }, + ErForm: "Resistancer", + }, + { + DimensionName: "MagneticFlux", + Receiver: "m", + PrintString: "Wb", + Name: "Weber", + TypeComment: "MagneticFlux represents a magnetic flux in Weber", + Dimensions: []Dimension{ + {Name: CurrentName, Power: -1}, + {Name: LengthName, Power: 2}, + {Name: MassName, Power: 1}, + {Name: TimeName, Power: -2}, + }, + }, + { + DimensionName: "MagneticFluxDensity", + Receiver: "m", + PrintString: "T", + Name: "Tesla", + TypeComment: "MagneticFluxDensity represents a magnetic flux density in Tesla", + Dimensions: []Dimension{ + {Name: CurrentName, Power: -1}, + {Name: MassName, Power: 1}, + {Name: TimeName, Power: -2}, + }, + }, + { + DimensionName: "Pressure", + Receiver: "pr", + PrintString: "Pa", + Name: "Pascal", + TypeComment: "Pressure represents a pressure in Pascals", + Dimensions: []Dimension{ + {Name: LengthName, Power: -1}, + {Name: MassName, Power: 1}, + {Name: TimeName, Power: -2}, + }, + ErForm: "Pressurer", + }, + { + DimensionName: "Torque", + Receiver: "t", + PrintString: "N m", + Name: "Newtonmetre", + TypeComment: "Torque represents a torque in Newton metres", + Dimensions: []Dimension{ + {Name: LengthName, Power: 2}, + {Name: MassName, Power: 1}, + {Name: TimeName, Power: -2}, + }, + ErForm: "Torquer", + }, + { + DimensionName: "Velocity", + Receiver: "v", + PrintString: "m s^-1", + TypeComment: "Velocity represents a velocity in metres per second", + Dimensions: []Dimension{ + {Name: LengthName, Power: 1}, + {Name: TimeName, Power: -1}, + }, + }, + { + DimensionName: "Voltage", + Receiver: "v", + PrintString: "V", + Name: "Volt", + TypeComment: "Voltage represents a voltage in Volts", + Dimensions: []Dimension{ + {Name: CurrentName, Power: -1}, + {Name: LengthName, Power: 2}, + {Name: MassName, Power: 1}, + {Name: TimeName, Power: -3}, + }, + ErForm: "Voltager", + }, + { + DimensionName: "Volume", + Receiver: "v", + PowerOffset: -3, + PrintString: "m^3", + Name: "Litre", + TypeComment: "Volume represents a volume in cubic metres", + Dimensions: []Dimension{ + {Name: LengthName, Power: 3}, + }, + }, +} + +// Generate generates a file for each of the units +func main() { + for _, unit := range Units { + generate(unit) + generateTest(unit) + } +} + +const headerTemplate = `// Code generated by "go generate gonum.org/v1/gonum/unit”; DO NOT EDIT. + +// Copyright ©2014 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package unit + +import ( + "errors" + "fmt" + "math" + "unicode/utf8" +) + +// {{.TypeComment}}. +type {{.DimensionName}} float64 +` + +var header = template.Must(template.New("header").Parse(headerTemplate)) + +const constTemplate = ` +const {{if .ExtraConstant}}({{end}} + {{.Name}} {{.DimensionName}} = {{if .PowerOffset}} 1e{{.PowerOffset}} {{else}} 1 {{end}} + {{$name := .Name}} + {{range .ExtraConstant}} {{.Name}} = {{.Value}} + {{end}} +{{if .ExtraConstant}}){{end}} +` + +var prefix = template.Must(template.New("prefix").Parse(constTemplate)) + +const methodTemplate = ` +// Unit converts the {{.DimensionName}} to a *Unit +func ({{.Receiver}} {{.DimensionName}}) Unit() *Unit { + return New(float64({{.Receiver}}), Dimensions{ + {{range .Dimensions}} {{.Name}}: {{.Power}}, + {{end}} + }) +} + +// {{.DimensionName}} allows {{.DimensionName}} to implement a {{if .ErForm}}{{.ErForm}}{{else}}{{.DimensionName}}er{{end}} interface +func ({{.Receiver}} {{.DimensionName}}) {{.DimensionName}}() {{.DimensionName}} { + return {{.Receiver}} +} + +// From converts the unit into the receiver. From returns an +// error if there is a mismatch in dimension +func ({{.Receiver}} *{{.DimensionName}}) From(u Uniter) error { + if !DimensionsMatch(u, {{if .Name}}{{.Name}}{{else}}{{.DimensionName}}(0){{end}}){ + *{{.Receiver}} = {{.DimensionName}}(math.NaN()) + return errors.New("Dimension mismatch") + } + *{{.Receiver}} = {{.DimensionName}}(u.Unit().Value()) + return nil +} +` + +var methods = template.Must(template.New("methods").Parse(methodTemplate)) + +const formatTemplate = ` +func ({{.Receiver}} {{.DimensionName}}) Format(fs fmt.State, c rune) { + switch c { + case 'v': + if fs.Flag('#') { + fmt.Fprintf(fs, "%T(%v)", {{.Receiver}}, float64({{.Receiver}})) + return + } + fallthrough + case 'e', 'E', 'f', 'F', 'g', 'G': + p, pOk := fs.Precision() + w, wOk := fs.Width() + const unit = " {{.PrintString}}" + switch { + case pOk && wOk: + fmt.Fprintf(fs, "%*.*"+string(c), pos(w-utf8.RuneCount([]byte(unit))), p, float64({{.Receiver}})) + case pOk: + fmt.Fprintf(fs, "%.*"+string(c), p, float64({{.Receiver}})) + case wOk: + fmt.Fprintf(fs, "%*"+string(c), pos(w-utf8.RuneCount([]byte(unit))), float64({{.Receiver}})) + default: + fmt.Fprintf(fs, "%"+string(c), float64({{.Receiver}})) + } + fmt.Fprint(fs, unit) + default: + fmt.Fprintf(fs, "%%!%c(%T=%g {{.PrintString}})", c, {{.Receiver}}, float64({{.Receiver}})) + } +} +` + +var form = template.Must(template.New("format").Parse(formatTemplate)) + +func generate(unit Unit) { + lowerName := strings.ToLower(unit.DimensionName) + filename := lowerName + ".go" + f, err := os.Create(filename) + if err != nil { + log.Fatal(err) + } + defer f.Close() + + var buf bytes.Buffer + + err = header.Execute(&buf, unit) + if err != nil { + log.Fatal(err) + } + + if unit.Name != "" { + err = prefix.Execute(&buf, unit) + if err != nil { + log.Fatal(err) + } + } + + err = methods.Execute(&buf, unit) + if err != nil { + log.Fatal(err) + } + + err = form.Execute(&buf, unit) + if err != nil { + log.Fatal(err) + } + + b, err := format.Source(buf.Bytes()) + if err != nil { + f.Write(buf.Bytes()) // This is here to debug bad format + log.Fatalf("error formatting %q: %s", unit.DimensionName, err) + } + + f.Write(b) +} + +const testTemplate = `// Code generated by "go generate gonum.org/v1/gonum/unit; DO NOT EDIT. + +// Copyright ©2019 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package unit + +import ( + "fmt" + "testing" +) + +func Test{{.DimensionName}}Format(t *testing.T) { + for _, test := range []struct{ + value {{.DimensionName}} + format string + want string + }{ + {1.23456789, "%v", "1.23456789 {{.PrintString}}"}, + {1.23456789, "%.1v", "1 {{.PrintString}}"}, + {1.23456789, "%20.1v", "{{$s := printf "1 %s" .PrintString}}{{printf "%20s" $s}}"}, + {1.23456789, "%20v", "{{$s := printf "1.23456789 %s" .PrintString}}{{printf "%20s" $s}}"}, + {1.23456789, "%1v", "1.23456789 {{.PrintString}}"}, + {1.23456789, "%#v", "unit.{{.DimensionName}}(1.23456789)"}, + {1.23456789, "%s", "%!s(unit.{{.DimensionName}}=1.23456789 {{.PrintString}})"}, + } { + got := fmt.Sprintf(test.format, test.value) + if got != test.want { + t.Errorf("Format %q %v: got: %q want: %q", test.format, float64(test.value), got, test.want) + } + } +} +` + +var tests = template.Must(template.New("test").Parse(testTemplate)) + +func generateTest(unit Unit) { + lowerName := strings.ToLower(unit.DimensionName) + filename := lowerName + "_test.go" + f, err := os.Create(filename) + if err != nil { + log.Fatal(err) + } + defer f.Close() + + var buf bytes.Buffer + err = tests.Execute(&buf, unit) + if err != nil { + log.Fatal(err) + } + + b, err := format.Source(buf.Bytes()) + if err != nil { + f.Write(buf.Bytes()) // This is here to debug bad format. + log.Fatalf("error formatting test for %q: %s", unit.DimensionName, err) + } + + f.Write(b) +} diff --git a/vendor/gonum.org/v1/gonum/unit/inductance.go b/vendor/gonum.org/v1/gonum/unit/inductance.go new file mode 100644 index 0000000..4af6008 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/unit/inductance.go @@ -0,0 +1,73 @@ +// Code generated by "go generate gonum.org/v1/gonum/unit”; DO NOT EDIT. + +// Copyright ©2014 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package unit + +import ( + "errors" + "fmt" + "math" + "unicode/utf8" +) + +// Inductance represents an electrical inductance in Henry. +type Inductance float64 + +const Henry Inductance = 1 + +// Unit converts the Inductance to a *Unit +func (i Inductance) Unit() *Unit { + return New(float64(i), Dimensions{ + CurrentDim: -2, + LengthDim: 2, + MassDim: 1, + TimeDim: -2, + }) +} + +// Inductance allows Inductance to implement a Inductancer interface +func (i Inductance) Inductance() Inductance { + return i +} + +// From converts the unit into the receiver. From returns an +// error if there is a mismatch in dimension +func (i *Inductance) From(u Uniter) error { + if !DimensionsMatch(u, Henry) { + *i = Inductance(math.NaN()) + return errors.New("Dimension mismatch") + } + *i = Inductance(u.Unit().Value()) + return nil +} + +func (i Inductance) Format(fs fmt.State, c rune) { + switch c { + case 'v': + if fs.Flag('#') { + fmt.Fprintf(fs, "%T(%v)", i, float64(i)) + return + } + fallthrough + case 'e', 'E', 'f', 'F', 'g', 'G': + p, pOk := fs.Precision() + w, wOk := fs.Width() + const unit = " H" + switch { + case pOk && wOk: + fmt.Fprintf(fs, "%*.*"+string(c), pos(w-utf8.RuneCount([]byte(unit))), p, float64(i)) + case pOk: + fmt.Fprintf(fs, "%.*"+string(c), p, float64(i)) + case wOk: + fmt.Fprintf(fs, "%*"+string(c), pos(w-utf8.RuneCount([]byte(unit))), float64(i)) + default: + fmt.Fprintf(fs, "%"+string(c), float64(i)) + } + fmt.Fprint(fs, unit) + default: + fmt.Fprintf(fs, "%%!%c(%T=%g H)", c, i, float64(i)) + } +} diff --git a/vendor/gonum.org/v1/gonum/unit/length.go b/vendor/gonum.org/v1/gonum/unit/length.go new file mode 100644 index 0000000..5933d7f --- /dev/null +++ b/vendor/gonum.org/v1/gonum/unit/length.go @@ -0,0 +1,70 @@ +// Code generated by "go generate gonum.org/v1/gonum/unit”; DO NOT EDIT. + +// Copyright ©2014 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package unit + +import ( + "errors" + "fmt" + "math" + "unicode/utf8" +) + +// Length represents a length in metres. +type Length float64 + +const Metre Length = 1 + +// Unit converts the Length to a *Unit +func (l Length) Unit() *Unit { + return New(float64(l), Dimensions{ + LengthDim: 1, + }) +} + +// Length allows Length to implement a Lengther interface +func (l Length) Length() Length { + return l +} + +// From converts the unit into the receiver. From returns an +// error if there is a mismatch in dimension +func (l *Length) From(u Uniter) error { + if !DimensionsMatch(u, Metre) { + *l = Length(math.NaN()) + return errors.New("Dimension mismatch") + } + *l = Length(u.Unit().Value()) + return nil +} + +func (l Length) Format(fs fmt.State, c rune) { + switch c { + case 'v': + if fs.Flag('#') { + fmt.Fprintf(fs, "%T(%v)", l, float64(l)) + return + } + fallthrough + case 'e', 'E', 'f', 'F', 'g', 'G': + p, pOk := fs.Precision() + w, wOk := fs.Width() + const unit = " m" + switch { + case pOk && wOk: + fmt.Fprintf(fs, "%*.*"+string(c), pos(w-utf8.RuneCount([]byte(unit))), p, float64(l)) + case pOk: + fmt.Fprintf(fs, "%.*"+string(c), p, float64(l)) + case wOk: + fmt.Fprintf(fs, "%*"+string(c), pos(w-utf8.RuneCount([]byte(unit))), float64(l)) + default: + fmt.Fprintf(fs, "%"+string(c), float64(l)) + } + fmt.Fprint(fs, unit) + default: + fmt.Fprintf(fs, "%%!%c(%T=%g m)", c, l, float64(l)) + } +} diff --git a/vendor/gonum.org/v1/gonum/unit/luminousintensity.go b/vendor/gonum.org/v1/gonum/unit/luminousintensity.go new file mode 100644 index 0000000..8e34cf4 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/unit/luminousintensity.go @@ -0,0 +1,70 @@ +// Code generated by "go generate gonum.org/v1/gonum/unit”; DO NOT EDIT. + +// Copyright ©2014 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package unit + +import ( + "errors" + "fmt" + "math" + "unicode/utf8" +) + +// Candela represents a luminous intensity in candela. +type LuminousIntensity float64 + +const Candela LuminousIntensity = 1 + +// Unit converts the LuminousIntensity to a *Unit +func (j LuminousIntensity) Unit() *Unit { + return New(float64(j), Dimensions{ + LuminousIntensityDim: 1, + }) +} + +// LuminousIntensity allows LuminousIntensity to implement a LuminousIntensityer interface +func (j LuminousIntensity) LuminousIntensity() LuminousIntensity { + return j +} + +// From converts the unit into the receiver. From returns an +// error if there is a mismatch in dimension +func (j *LuminousIntensity) From(u Uniter) error { + if !DimensionsMatch(u, Candela) { + *j = LuminousIntensity(math.NaN()) + return errors.New("Dimension mismatch") + } + *j = LuminousIntensity(u.Unit().Value()) + return nil +} + +func (j LuminousIntensity) Format(fs fmt.State, c rune) { + switch c { + case 'v': + if fs.Flag('#') { + fmt.Fprintf(fs, "%T(%v)", j, float64(j)) + return + } + fallthrough + case 'e', 'E', 'f', 'F', 'g', 'G': + p, pOk := fs.Precision() + w, wOk := fs.Width() + const unit = " cd" + switch { + case pOk && wOk: + fmt.Fprintf(fs, "%*.*"+string(c), pos(w-utf8.RuneCount([]byte(unit))), p, float64(j)) + case pOk: + fmt.Fprintf(fs, "%.*"+string(c), p, float64(j)) + case wOk: + fmt.Fprintf(fs, "%*"+string(c), pos(w-utf8.RuneCount([]byte(unit))), float64(j)) + default: + fmt.Fprintf(fs, "%"+string(c), float64(j)) + } + fmt.Fprint(fs, unit) + default: + fmt.Fprintf(fs, "%%!%c(%T=%g cd)", c, j, float64(j)) + } +} diff --git a/vendor/gonum.org/v1/gonum/unit/magneticflux.go b/vendor/gonum.org/v1/gonum/unit/magneticflux.go new file mode 100644 index 0000000..8110abf --- /dev/null +++ b/vendor/gonum.org/v1/gonum/unit/magneticflux.go @@ -0,0 +1,73 @@ +// Code generated by "go generate gonum.org/v1/gonum/unit”; DO NOT EDIT. + +// Copyright ©2014 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package unit + +import ( + "errors" + "fmt" + "math" + "unicode/utf8" +) + +// MagneticFlux represents a magnetic flux in Weber. +type MagneticFlux float64 + +const Weber MagneticFlux = 1 + +// Unit converts the MagneticFlux to a *Unit +func (m MagneticFlux) Unit() *Unit { + return New(float64(m), Dimensions{ + CurrentDim: -1, + LengthDim: 2, + MassDim: 1, + TimeDim: -2, + }) +} + +// MagneticFlux allows MagneticFlux to implement a MagneticFluxer interface +func (m MagneticFlux) MagneticFlux() MagneticFlux { + return m +} + +// From converts the unit into the receiver. From returns an +// error if there is a mismatch in dimension +func (m *MagneticFlux) From(u Uniter) error { + if !DimensionsMatch(u, Weber) { + *m = MagneticFlux(math.NaN()) + return errors.New("Dimension mismatch") + } + *m = MagneticFlux(u.Unit().Value()) + return nil +} + +func (m MagneticFlux) Format(fs fmt.State, c rune) { + switch c { + case 'v': + if fs.Flag('#') { + fmt.Fprintf(fs, "%T(%v)", m, float64(m)) + return + } + fallthrough + case 'e', 'E', 'f', 'F', 'g', 'G': + p, pOk := fs.Precision() + w, wOk := fs.Width() + const unit = " Wb" + switch { + case pOk && wOk: + fmt.Fprintf(fs, "%*.*"+string(c), pos(w-utf8.RuneCount([]byte(unit))), p, float64(m)) + case pOk: + fmt.Fprintf(fs, "%.*"+string(c), p, float64(m)) + case wOk: + fmt.Fprintf(fs, "%*"+string(c), pos(w-utf8.RuneCount([]byte(unit))), float64(m)) + default: + fmt.Fprintf(fs, "%"+string(c), float64(m)) + } + fmt.Fprint(fs, unit) + default: + fmt.Fprintf(fs, "%%!%c(%T=%g Wb)", c, m, float64(m)) + } +} diff --git a/vendor/gonum.org/v1/gonum/unit/magneticfluxdensity.go b/vendor/gonum.org/v1/gonum/unit/magneticfluxdensity.go new file mode 100644 index 0000000..2404012 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/unit/magneticfluxdensity.go @@ -0,0 +1,72 @@ +// Code generated by "go generate gonum.org/v1/gonum/unit”; DO NOT EDIT. + +// Copyright ©2014 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package unit + +import ( + "errors" + "fmt" + "math" + "unicode/utf8" +) + +// MagneticFluxDensity represents a magnetic flux density in Tesla. +type MagneticFluxDensity float64 + +const Tesla MagneticFluxDensity = 1 + +// Unit converts the MagneticFluxDensity to a *Unit +func (m MagneticFluxDensity) Unit() *Unit { + return New(float64(m), Dimensions{ + CurrentDim: -1, + MassDim: 1, + TimeDim: -2, + }) +} + +// MagneticFluxDensity allows MagneticFluxDensity to implement a MagneticFluxDensityer interface +func (m MagneticFluxDensity) MagneticFluxDensity() MagneticFluxDensity { + return m +} + +// From converts the unit into the receiver. From returns an +// error if there is a mismatch in dimension +func (m *MagneticFluxDensity) From(u Uniter) error { + if !DimensionsMatch(u, Tesla) { + *m = MagneticFluxDensity(math.NaN()) + return errors.New("Dimension mismatch") + } + *m = MagneticFluxDensity(u.Unit().Value()) + return nil +} + +func (m MagneticFluxDensity) Format(fs fmt.State, c rune) { + switch c { + case 'v': + if fs.Flag('#') { + fmt.Fprintf(fs, "%T(%v)", m, float64(m)) + return + } + fallthrough + case 'e', 'E', 'f', 'F', 'g', 'G': + p, pOk := fs.Precision() + w, wOk := fs.Width() + const unit = " T" + switch { + case pOk && wOk: + fmt.Fprintf(fs, "%*.*"+string(c), pos(w-utf8.RuneCount([]byte(unit))), p, float64(m)) + case pOk: + fmt.Fprintf(fs, "%.*"+string(c), p, float64(m)) + case wOk: + fmt.Fprintf(fs, "%*"+string(c), pos(w-utf8.RuneCount([]byte(unit))), float64(m)) + default: + fmt.Fprintf(fs, "%"+string(c), float64(m)) + } + fmt.Fprint(fs, unit) + default: + fmt.Fprintf(fs, "%%!%c(%T=%g T)", c, m, float64(m)) + } +} diff --git a/vendor/gonum.org/v1/gonum/unit/mass.go b/vendor/gonum.org/v1/gonum/unit/mass.go new file mode 100644 index 0000000..479ec40 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/unit/mass.go @@ -0,0 +1,74 @@ +// Code generated by "go generate gonum.org/v1/gonum/unit”; DO NOT EDIT. + +// Copyright ©2014 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package unit + +import ( + "errors" + "fmt" + "math" + "unicode/utf8" +) + +// Mass represents a mass in kilograms. +type Mass float64 + +const ( + Gram Mass = 1e-3 + + Kilogram = Kilo * Gram +) + +// Unit converts the Mass to a *Unit +func (m Mass) Unit() *Unit { + return New(float64(m), Dimensions{ + MassDim: 1, + }) +} + +// Mass allows Mass to implement a Masser interface +func (m Mass) Mass() Mass { + return m +} + +// From converts the unit into the receiver. From returns an +// error if there is a mismatch in dimension +func (m *Mass) From(u Uniter) error { + if !DimensionsMatch(u, Gram) { + *m = Mass(math.NaN()) + return errors.New("Dimension mismatch") + } + *m = Mass(u.Unit().Value()) + return nil +} + +func (m Mass) Format(fs fmt.State, c rune) { + switch c { + case 'v': + if fs.Flag('#') { + fmt.Fprintf(fs, "%T(%v)", m, float64(m)) + return + } + fallthrough + case 'e', 'E', 'f', 'F', 'g', 'G': + p, pOk := fs.Precision() + w, wOk := fs.Width() + const unit = " kg" + switch { + case pOk && wOk: + fmt.Fprintf(fs, "%*.*"+string(c), pos(w-utf8.RuneCount([]byte(unit))), p, float64(m)) + case pOk: + fmt.Fprintf(fs, "%.*"+string(c), p, float64(m)) + case wOk: + fmt.Fprintf(fs, "%*"+string(c), pos(w-utf8.RuneCount([]byte(unit))), float64(m)) + default: + fmt.Fprintf(fs, "%"+string(c), float64(m)) + } + fmt.Fprint(fs, unit) + default: + fmt.Fprintf(fs, "%%!%c(%T=%g kg)", c, m, float64(m)) + } +} diff --git a/vendor/gonum.org/v1/gonum/unit/mole.go b/vendor/gonum.org/v1/gonum/unit/mole.go new file mode 100644 index 0000000..ed32a47 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/unit/mole.go @@ -0,0 +1,70 @@ +// Code generated by "go generate gonum.org/v1/gonum/unit”; DO NOT EDIT. + +// Copyright ©2014 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package unit + +import ( + "errors" + "fmt" + "math" + "unicode/utf8" +) + +// Mole represents an amount in moles. +type Mole float64 + +const Mol Mole = 1 + +// Unit converts the Mole to a *Unit +func (n Mole) Unit() *Unit { + return New(float64(n), Dimensions{ + MoleDim: 1, + }) +} + +// Mole allows Mole to implement a Moleer interface +func (n Mole) Mole() Mole { + return n +} + +// From converts the unit into the receiver. From returns an +// error if there is a mismatch in dimension +func (n *Mole) From(u Uniter) error { + if !DimensionsMatch(u, Mol) { + *n = Mole(math.NaN()) + return errors.New("Dimension mismatch") + } + *n = Mole(u.Unit().Value()) + return nil +} + +func (n Mole) Format(fs fmt.State, c rune) { + switch c { + case 'v': + if fs.Flag('#') { + fmt.Fprintf(fs, "%T(%v)", n, float64(n)) + return + } + fallthrough + case 'e', 'E', 'f', 'F', 'g', 'G': + p, pOk := fs.Precision() + w, wOk := fs.Width() + const unit = " mol" + switch { + case pOk && wOk: + fmt.Fprintf(fs, "%*.*"+string(c), pos(w-utf8.RuneCount([]byte(unit))), p, float64(n)) + case pOk: + fmt.Fprintf(fs, "%.*"+string(c), p, float64(n)) + case wOk: + fmt.Fprintf(fs, "%*"+string(c), pos(w-utf8.RuneCount([]byte(unit))), float64(n)) + default: + fmt.Fprintf(fs, "%"+string(c), float64(n)) + } + fmt.Fprint(fs, unit) + default: + fmt.Fprintf(fs, "%%!%c(%T=%g mol)", c, n, float64(n)) + } +} diff --git a/vendor/gonum.org/v1/gonum/unit/power.go b/vendor/gonum.org/v1/gonum/unit/power.go new file mode 100644 index 0000000..afccdbe --- /dev/null +++ b/vendor/gonum.org/v1/gonum/unit/power.go @@ -0,0 +1,72 @@ +// Code generated by "go generate gonum.org/v1/gonum/unit”; DO NOT EDIT. + +// Copyright ©2014 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package unit + +import ( + "errors" + "fmt" + "math" + "unicode/utf8" +) + +// Power represents a power in Watts. +type Power float64 + +const Watt Power = 1 + +// Unit converts the Power to a *Unit +func (pw Power) Unit() *Unit { + return New(float64(pw), Dimensions{ + LengthDim: 2, + MassDim: 1, + TimeDim: -3, + }) +} + +// Power allows Power to implement a Powerer interface +func (pw Power) Power() Power { + return pw +} + +// From converts the unit into the receiver. From returns an +// error if there is a mismatch in dimension +func (pw *Power) From(u Uniter) error { + if !DimensionsMatch(u, Watt) { + *pw = Power(math.NaN()) + return errors.New("Dimension mismatch") + } + *pw = Power(u.Unit().Value()) + return nil +} + +func (pw Power) Format(fs fmt.State, c rune) { + switch c { + case 'v': + if fs.Flag('#') { + fmt.Fprintf(fs, "%T(%v)", pw, float64(pw)) + return + } + fallthrough + case 'e', 'E', 'f', 'F', 'g', 'G': + p, pOk := fs.Precision() + w, wOk := fs.Width() + const unit = " W" + switch { + case pOk && wOk: + fmt.Fprintf(fs, "%*.*"+string(c), pos(w-utf8.RuneCount([]byte(unit))), p, float64(pw)) + case pOk: + fmt.Fprintf(fs, "%.*"+string(c), p, float64(pw)) + case wOk: + fmt.Fprintf(fs, "%*"+string(c), pos(w-utf8.RuneCount([]byte(unit))), float64(pw)) + default: + fmt.Fprintf(fs, "%"+string(c), float64(pw)) + } + fmt.Fprint(fs, unit) + default: + fmt.Fprintf(fs, "%%!%c(%T=%g W)", c, pw, float64(pw)) + } +} diff --git a/vendor/gonum.org/v1/gonum/unit/prefixes.go b/vendor/gonum.org/v1/gonum/unit/prefixes.go new file mode 100644 index 0000000..372a6b8 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/unit/prefixes.go @@ -0,0 +1,29 @@ +// Copyright ©2017 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package unit + +const ( + Yotta = 1e24 + Zetta = 1e21 + Exa = 1e18 + Peta = 1e15 + Tera = 1e12 + Giga = 1e9 + Mega = 1e6 + Kilo = 1e3 + Hecto = 1e2 + Deca = 1e1 + + Deci = 1e-1 + Centi = 1e-2 + Milli = 1e-3 + Micro = 1e-6 + Nano = 1e-9 + Pico = 1e-12 + Femto = 1e-15 + Atto = 1e-18 + Zepto = 1e-21 + Yocto = 1e-24 +) diff --git a/vendor/gonum.org/v1/gonum/unit/pressure.go b/vendor/gonum.org/v1/gonum/unit/pressure.go new file mode 100644 index 0000000..d5a4a29 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/unit/pressure.go @@ -0,0 +1,72 @@ +// Code generated by "go generate gonum.org/v1/gonum/unit”; DO NOT EDIT. + +// Copyright ©2014 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package unit + +import ( + "errors" + "fmt" + "math" + "unicode/utf8" +) + +// Pressure represents a pressure in Pascals. +type Pressure float64 + +const Pascal Pressure = 1 + +// Unit converts the Pressure to a *Unit +func (pr Pressure) Unit() *Unit { + return New(float64(pr), Dimensions{ + LengthDim: -1, + MassDim: 1, + TimeDim: -2, + }) +} + +// Pressure allows Pressure to implement a Pressurer interface +func (pr Pressure) Pressure() Pressure { + return pr +} + +// From converts the unit into the receiver. From returns an +// error if there is a mismatch in dimension +func (pr *Pressure) From(u Uniter) error { + if !DimensionsMatch(u, Pascal) { + *pr = Pressure(math.NaN()) + return errors.New("Dimension mismatch") + } + *pr = Pressure(u.Unit().Value()) + return nil +} + +func (pr Pressure) Format(fs fmt.State, c rune) { + switch c { + case 'v': + if fs.Flag('#') { + fmt.Fprintf(fs, "%T(%v)", pr, float64(pr)) + return + } + fallthrough + case 'e', 'E', 'f', 'F', 'g', 'G': + p, pOk := fs.Precision() + w, wOk := fs.Width() + const unit = " Pa" + switch { + case pOk && wOk: + fmt.Fprintf(fs, "%*.*"+string(c), pos(w-utf8.RuneCount([]byte(unit))), p, float64(pr)) + case pOk: + fmt.Fprintf(fs, "%.*"+string(c), p, float64(pr)) + case wOk: + fmt.Fprintf(fs, "%*"+string(c), pos(w-utf8.RuneCount([]byte(unit))), float64(pr)) + default: + fmt.Fprintf(fs, "%"+string(c), float64(pr)) + } + fmt.Fprint(fs, unit) + default: + fmt.Fprintf(fs, "%%!%c(%T=%g Pa)", c, pr, float64(pr)) + } +} diff --git a/vendor/gonum.org/v1/gonum/unit/radioactivity.go b/vendor/gonum.org/v1/gonum/unit/radioactivity.go new file mode 100644 index 0000000..db2c9c0 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/unit/radioactivity.go @@ -0,0 +1,70 @@ +// Code generated by "go generate gonum.org/v1/gonum/unit”; DO NOT EDIT. + +// Copyright ©2014 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package unit + +import ( + "errors" + "fmt" + "math" + "unicode/utf8" +) + +// Radioactivity represents a rate of radioactive decay in becquerels. +type Radioactivity float64 + +const Becquerel Radioactivity = 1 + +// Unit converts the Radioactivity to a *Unit +func (r Radioactivity) Unit() *Unit { + return New(float64(r), Dimensions{ + TimeDim: -1, + }) +} + +// Radioactivity allows Radioactivity to implement a Radioactivityer interface +func (r Radioactivity) Radioactivity() Radioactivity { + return r +} + +// From converts the unit into the receiver. From returns an +// error if there is a mismatch in dimension +func (r *Radioactivity) From(u Uniter) error { + if !DimensionsMatch(u, Becquerel) { + *r = Radioactivity(math.NaN()) + return errors.New("Dimension mismatch") + } + *r = Radioactivity(u.Unit().Value()) + return nil +} + +func (r Radioactivity) Format(fs fmt.State, c rune) { + switch c { + case 'v': + if fs.Flag('#') { + fmt.Fprintf(fs, "%T(%v)", r, float64(r)) + return + } + fallthrough + case 'e', 'E', 'f', 'F', 'g', 'G': + p, pOk := fs.Precision() + w, wOk := fs.Width() + const unit = " Bq" + switch { + case pOk && wOk: + fmt.Fprintf(fs, "%*.*"+string(c), pos(w-utf8.RuneCount([]byte(unit))), p, float64(r)) + case pOk: + fmt.Fprintf(fs, "%.*"+string(c), p, float64(r)) + case wOk: + fmt.Fprintf(fs, "%*"+string(c), pos(w-utf8.RuneCount([]byte(unit))), float64(r)) + default: + fmt.Fprintf(fs, "%"+string(c), float64(r)) + } + fmt.Fprint(fs, unit) + default: + fmt.Fprintf(fs, "%%!%c(%T=%g Bq)", c, r, float64(r)) + } +} diff --git a/vendor/gonum.org/v1/gonum/unit/resistance.go b/vendor/gonum.org/v1/gonum/unit/resistance.go new file mode 100644 index 0000000..af706a4 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/unit/resistance.go @@ -0,0 +1,73 @@ +// Code generated by "go generate gonum.org/v1/gonum/unit”; DO NOT EDIT. + +// Copyright ©2014 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package unit + +import ( + "errors" + "fmt" + "math" + "unicode/utf8" +) + +// Resistance represents an electrical resistance, impedance or reactance in Ohms. +type Resistance float64 + +const Ohm Resistance = 1 + +// Unit converts the Resistance to a *Unit +func (r Resistance) Unit() *Unit { + return New(float64(r), Dimensions{ + CurrentDim: -2, + LengthDim: 2, + MassDim: 1, + TimeDim: -3, + }) +} + +// Resistance allows Resistance to implement a Resistancer interface +func (r Resistance) Resistance() Resistance { + return r +} + +// From converts the unit into the receiver. From returns an +// error if there is a mismatch in dimension +func (r *Resistance) From(u Uniter) error { + if !DimensionsMatch(u, Ohm) { + *r = Resistance(math.NaN()) + return errors.New("Dimension mismatch") + } + *r = Resistance(u.Unit().Value()) + return nil +} + +func (r Resistance) Format(fs fmt.State, c rune) { + switch c { + case 'v': + if fs.Flag('#') { + fmt.Fprintf(fs, "%T(%v)", r, float64(r)) + return + } + fallthrough + case 'e', 'E', 'f', 'F', 'g', 'G': + p, pOk := fs.Precision() + w, wOk := fs.Width() + const unit = " Ω" + switch { + case pOk && wOk: + fmt.Fprintf(fs, "%*.*"+string(c), pos(w-utf8.RuneCount([]byte(unit))), p, float64(r)) + case pOk: + fmt.Fprintf(fs, "%.*"+string(c), p, float64(r)) + case wOk: + fmt.Fprintf(fs, "%*"+string(c), pos(w-utf8.RuneCount([]byte(unit))), float64(r)) + default: + fmt.Fprintf(fs, "%"+string(c), float64(r)) + } + fmt.Fprint(fs, unit) + default: + fmt.Fprintf(fs, "%%!%c(%T=%g Ω)", c, r, float64(r)) + } +} diff --git a/vendor/gonum.org/v1/gonum/unit/temperature.go b/vendor/gonum.org/v1/gonum/unit/temperature.go new file mode 100644 index 0000000..869c4d0 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/unit/temperature.go @@ -0,0 +1,70 @@ +// Code generated by "go generate gonum.org/v1/gonum/unit”; DO NOT EDIT. + +// Copyright ©2014 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package unit + +import ( + "errors" + "fmt" + "math" + "unicode/utf8" +) + +// Temperature represents a temperature in Kelvin. +type Temperature float64 + +const Kelvin Temperature = 1 + +// Unit converts the Temperature to a *Unit +func (t Temperature) Unit() *Unit { + return New(float64(t), Dimensions{ + TemperatureDim: 1, + }) +} + +// Temperature allows Temperature to implement a Temperaturer interface +func (t Temperature) Temperature() Temperature { + return t +} + +// From converts the unit into the receiver. From returns an +// error if there is a mismatch in dimension +func (t *Temperature) From(u Uniter) error { + if !DimensionsMatch(u, Kelvin) { + *t = Temperature(math.NaN()) + return errors.New("Dimension mismatch") + } + *t = Temperature(u.Unit().Value()) + return nil +} + +func (t Temperature) Format(fs fmt.State, c rune) { + switch c { + case 'v': + if fs.Flag('#') { + fmt.Fprintf(fs, "%T(%v)", t, float64(t)) + return + } + fallthrough + case 'e', 'E', 'f', 'F', 'g', 'G': + p, pOk := fs.Precision() + w, wOk := fs.Width() + const unit = " K" + switch { + case pOk && wOk: + fmt.Fprintf(fs, "%*.*"+string(c), pos(w-utf8.RuneCount([]byte(unit))), p, float64(t)) + case pOk: + fmt.Fprintf(fs, "%.*"+string(c), p, float64(t)) + case wOk: + fmt.Fprintf(fs, "%*"+string(c), pos(w-utf8.RuneCount([]byte(unit))), float64(t)) + default: + fmt.Fprintf(fs, "%"+string(c), float64(t)) + } + fmt.Fprint(fs, unit) + default: + fmt.Fprintf(fs, "%%!%c(%T=%g K)", c, t, float64(t)) + } +} diff --git a/vendor/gonum.org/v1/gonum/unit/time.go b/vendor/gonum.org/v1/gonum/unit/time.go new file mode 100644 index 0000000..7c4eaf1 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/unit/time.go @@ -0,0 +1,75 @@ +// Code generated by "go generate gonum.org/v1/gonum/unit”; DO NOT EDIT. + +// Copyright ©2014 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package unit + +import ( + "errors" + "fmt" + "math" + "unicode/utf8" +) + +// Time represents a duration in seconds. +type Time float64 + +const ( + Second Time = 1 + + Minute = 60 * Second + Hour = 60 * Minute +) + +// Unit converts the Time to a *Unit +func (t Time) Unit() *Unit { + return New(float64(t), Dimensions{ + TimeDim: 1, + }) +} + +// Time allows Time to implement a Timer interface +func (t Time) Time() Time { + return t +} + +// From converts the unit into the receiver. From returns an +// error if there is a mismatch in dimension +func (t *Time) From(u Uniter) error { + if !DimensionsMatch(u, Second) { + *t = Time(math.NaN()) + return errors.New("Dimension mismatch") + } + *t = Time(u.Unit().Value()) + return nil +} + +func (t Time) Format(fs fmt.State, c rune) { + switch c { + case 'v': + if fs.Flag('#') { + fmt.Fprintf(fs, "%T(%v)", t, float64(t)) + return + } + fallthrough + case 'e', 'E', 'f', 'F', 'g', 'G': + p, pOk := fs.Precision() + w, wOk := fs.Width() + const unit = " s" + switch { + case pOk && wOk: + fmt.Fprintf(fs, "%*.*"+string(c), pos(w-utf8.RuneCount([]byte(unit))), p, float64(t)) + case pOk: + fmt.Fprintf(fs, "%.*"+string(c), p, float64(t)) + case wOk: + fmt.Fprintf(fs, "%*"+string(c), pos(w-utf8.RuneCount([]byte(unit))), float64(t)) + default: + fmt.Fprintf(fs, "%"+string(c), float64(t)) + } + fmt.Fprint(fs, unit) + default: + fmt.Fprintf(fs, "%%!%c(%T=%g s)", c, t, float64(t)) + } +} diff --git a/vendor/gonum.org/v1/gonum/unit/torque.go b/vendor/gonum.org/v1/gonum/unit/torque.go new file mode 100644 index 0000000..a5ef9fd --- /dev/null +++ b/vendor/gonum.org/v1/gonum/unit/torque.go @@ -0,0 +1,72 @@ +// Code generated by "go generate gonum.org/v1/gonum/unit”; DO NOT EDIT. + +// Copyright ©2014 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package unit + +import ( + "errors" + "fmt" + "math" + "unicode/utf8" +) + +// Torque represents a torque in Newton metres. +type Torque float64 + +const Newtonmetre Torque = 1 + +// Unit converts the Torque to a *Unit +func (t Torque) Unit() *Unit { + return New(float64(t), Dimensions{ + LengthDim: 2, + MassDim: 1, + TimeDim: -2, + }) +} + +// Torque allows Torque to implement a Torquer interface +func (t Torque) Torque() Torque { + return t +} + +// From converts the unit into the receiver. From returns an +// error if there is a mismatch in dimension +func (t *Torque) From(u Uniter) error { + if !DimensionsMatch(u, Newtonmetre) { + *t = Torque(math.NaN()) + return errors.New("Dimension mismatch") + } + *t = Torque(u.Unit().Value()) + return nil +} + +func (t Torque) Format(fs fmt.State, c rune) { + switch c { + case 'v': + if fs.Flag('#') { + fmt.Fprintf(fs, "%T(%v)", t, float64(t)) + return + } + fallthrough + case 'e', 'E', 'f', 'F', 'g', 'G': + p, pOk := fs.Precision() + w, wOk := fs.Width() + const unit = " N m" + switch { + case pOk && wOk: + fmt.Fprintf(fs, "%*.*"+string(c), pos(w-utf8.RuneCount([]byte(unit))), p, float64(t)) + case pOk: + fmt.Fprintf(fs, "%.*"+string(c), p, float64(t)) + case wOk: + fmt.Fprintf(fs, "%*"+string(c), pos(w-utf8.RuneCount([]byte(unit))), float64(t)) + default: + fmt.Fprintf(fs, "%"+string(c), float64(t)) + } + fmt.Fprint(fs, unit) + default: + fmt.Fprintf(fs, "%%!%c(%T=%g N m)", c, t, float64(t)) + } +} diff --git a/vendor/gonum.org/v1/gonum/unit/unittype.go b/vendor/gonum.org/v1/gonum/unit/unittype.go new file mode 100644 index 0000000..af8ed36 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/unit/unittype.go @@ -0,0 +1,375 @@ +// Copyright ©2013 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package unit + +import ( + "bytes" + "fmt" + "sort" + "sync" + "unicode/utf8" +) + +// Uniter is a type that can be converted to a Unit. +type Uniter interface { + Unit() *Unit +} + +// Dimension is a type representing an SI base dimension or a distinct +// orthogonal dimension. Non-SI dimensions can be created using the NewDimension +// function, typically within an init function. +type Dimension int + +// NewDimension creates a new orthogonal dimension with the given symbol, and +// returns the value of that dimension. The input symbol must not overlap with +// any of the any of the SI base units or other symbols of common use in SI ("kg", +// "J", etc.), and must not overlap with any other dimensions created by calls +// to NewDimension. The SymbolExists function can check if the symbol exists. +// NewDimension will panic if the input symbol matches an existing symbol. +// +// NewDimension should only be called for unit types that are actually orthogonal +// to the base dimensions defined in this package. See the package-level +// documentation for further explanation. +func NewDimension(symbol string) Dimension { + defer mu.Unlock() + mu.Lock() + _, ok := dimensions[symbol] + if ok { + panic("unit: dimension string \"" + symbol + "\" already used") + } + d := Dimension(len(symbols)) + symbols = append(symbols, symbol) + dimensions[symbol] = d + return d +} + +// String returns the string for the dimension. +func (d Dimension) String() string { + if d == reserved { + return "reserved" + } + defer mu.RUnlock() + mu.RLock() + if int(d) < len(symbols) { + return symbols[d] + } + panic("unit: illegal dimension") +} + +// SymbolExists returns whether the given symbol is already in use. +func SymbolExists(symbol string) bool { + mu.RLock() + _, ok := dimensions[symbol] + mu.RUnlock() + return ok +} + +const ( + // SI Base Units + reserved Dimension = iota + CurrentDim + LengthDim + LuminousIntensityDim + MassDim + MoleDim + TemperatureDim + TimeDim + // Other common SI Dimensions + AngleDim // e.g. radians +) + +var ( + // mu protects symbols and dimensions for concurrent use. + mu sync.RWMutex + symbols = []string{ + CurrentDim: "A", + LengthDim: "m", + LuminousIntensityDim: "cd", + MassDim: "kg", + MoleDim: "mol", + TemperatureDim: "K", + TimeDim: "s", + AngleDim: "rad", + } + + // dimensions guarantees there aren't two identical symbols + // SI symbol list from http://lamar.colostate.edu/~hillger/basic.htm + dimensions = map[string]Dimension{ + "A": CurrentDim, + "m": LengthDim, + "cd": LuminousIntensityDim, + "kg": MassDim, + "mol": MoleDim, + "K": TemperatureDim, + "s": TimeDim, + "rad": AngleDim, + + // Reserve common SI symbols + // prefixes + "Y": reserved, + "Z": reserved, + "E": reserved, + "P": reserved, + "T": reserved, + "G": reserved, + "M": reserved, + "k": reserved, + "h": reserved, + "da": reserved, + "d": reserved, + "c": reserved, + "μ": reserved, + "n": reserved, + "p": reserved, + "f": reserved, + "a": reserved, + "z": reserved, + "y": reserved, + // SI Derived units with special symbols + "sr": reserved, + "F": reserved, + "C": reserved, + "S": reserved, + "H": reserved, + "V": reserved, + "Ω": reserved, + "J": reserved, + "N": reserved, + "Hz": reserved, + "lx": reserved, + "lm": reserved, + "Wb": reserved, + "W": reserved, + "Pa": reserved, + "Bq": reserved, + "Gy": reserved, + "Sv": reserved, + "kat": reserved, + // Units in use with SI + "ha": reserved, + "L": reserved, + "l": reserved, + // Units in Use Temporarily with SI + "bar": reserved, + "b": reserved, + "Ci": reserved, + "R": reserved, + "rd": reserved, + "rem": reserved, + } +) + +// Dimensions represent the dimensionality of the unit in powers +// of that dimension. If a key is not present, the power of that +// dimension is zero. Dimensions is used in conjunction with New. +type Dimensions map[Dimension]int + +func (d Dimensions) clone() Dimensions { + if len(d) == 0 { + return nil + } + c := make(Dimensions, len(d)) + for dim, pow := range d { + if pow != 0 { + c[dim] = pow + } + } + return c +} + +// matches reports whether the dimensions of d and o match. Zero power +// dimensions in d an o must be removed, otherwise matches may incorrectly +// report a mismatch. +func (d Dimensions) matches(o Dimensions) bool { + if len(d) != len(o) { + return false + } + for dim, pow := range d { + if o[dim] != pow { + return false + } + } + return true +} + +func (d Dimensions) String() string { + // Map iterates randomly, but print should be in a fixed order. Can't use + // dimension number, because for user-defined dimension that number may + // not be fixed from run to run. + atoms := make(unitPrinters, 0, len(d)) + for dimension, power := range d { + if power != 0 { + atoms = append(atoms, atom{dimension, power}) + } + } + sort.Sort(atoms) + var b bytes.Buffer + for i, a := range atoms { + if i > 0 { + b.WriteByte(' ') + } + fmt.Fprintf(&b, "%s", a.Dimension) + if a.pow != 1 { + fmt.Fprintf(&b, "^%d", a.pow) + } + } + + return b.String() +} + +type atom struct { + Dimension + pow int +} + +type unitPrinters []atom + +func (u unitPrinters) Len() int { + return len(u) +} + +func (u unitPrinters) Less(i, j int) bool { + // Order first by positive powers, then by name. + if u[i].pow*u[j].pow < 0 { + return u[i].pow > 0 + } + return u[i].String() < u[j].String() +} + +func (u unitPrinters) Swap(i, j int) { + u[i], u[j] = u[j], u[i] +} + +// Unit represents a dimensional value. The dimensions will typically be in SI +// units, but can also include dimensions created with NewDimension. The Unit type +// is most useful for ensuring dimensional consistency when manipulating types +// with different units, for example, by multiplying an acceleration with a +// mass to get a force. See the package documentation for further explanation. +type Unit struct { + dimensions Dimensions + value float64 +} + +// New creates a new variable of type Unit which has the value and dimensions +// specified by the inputs. The built-in dimensions are always in SI units +// (metres, kilograms, etc.). +func New(value float64, d Dimensions) *Unit { + return &Unit{ + dimensions: d.clone(), + value: value, + } +} + +// DimensionsMatch checks if the dimensions of two Uniters are the same. +func DimensionsMatch(a, b Uniter) bool { + return a.Unit().dimensions.matches(b.Unit().dimensions) +} + +// Dimensions returns a copy of the dimensions of the unit. +func (u *Unit) Dimensions() Dimensions { + return u.dimensions.clone() +} + +// Add adds the function argument to the receiver. Panics if the units of +// the receiver and the argument don't match. +func (u *Unit) Add(uniter Uniter) *Unit { + a := uniter.Unit() + if !DimensionsMatch(u, a) { + panic("unit: mismatched dimensions in addition") + } + u.value += a.value + return u +} + +// Unit implements the Uniter interface +func (u *Unit) Unit() *Unit { + return u +} + +// Mul multiply the receiver by the input changing the dimensions +// of the receiver as appropriate. The input is not changed. +func (u *Unit) Mul(uniter Uniter) *Unit { + a := uniter.Unit() + for key, val := range a.dimensions { + if d := u.dimensions[key]; d == -val { + delete(u.dimensions, key) + } else { + u.dimensions[key] = d + val + } + } + u.value *= a.value + return u +} + +// Div divides the receiver by the argument changing the +// dimensions of the receiver as appropriate. +func (u *Unit) Div(uniter Uniter) *Unit { + a := uniter.Unit() + u.value /= a.value + for key, val := range a.dimensions { + if d := u.dimensions[key]; d == val { + delete(u.dimensions, key) + } else { + u.dimensions[key] = d - val + } + } + return u +} + +// Value return the raw value of the unit as a float64. Use of this +// method is, in general, not recommended, though it can be useful +// for printing. Instead, the From method of a specific dimension +// should be used to guarantee dimension consistency. +func (u *Unit) Value() float64 { + return u.value +} + +// SetValue sets the value of the unit. +func (u *Unit) SetValue(v float64) { + u.value = v +} + +// Format makes Unit satisfy the fmt.Formatter interface. The unit is formatted +// with dimensions appended. If the power if the dimension is not zero or one, +// symbol^power is appended, if the power is one, just the symbol is appended +// and if the power is zero, nothing is appended. Dimensions are appended +// in order by symbol name with positive powers ahead of negative powers. +func (u *Unit) Format(fs fmt.State, c rune) { + if u == nil { + fmt.Fprint(fs, "") + } + switch c { + case 'v': + if fs.Flag('#') { + fmt.Fprintf(fs, "&%#v", *u) + return + } + fallthrough + case 'e', 'E', 'f', 'F', 'g', 'G': + p, pOk := fs.Precision() + w, wOk := fs.Width() + units := u.dimensions.String() + switch { + case pOk && wOk: + fmt.Fprintf(fs, "%*.*"+string(c), pos(w-utf8.RuneCount([]byte(units))-1), p, u.value) + case pOk: + fmt.Fprintf(fs, "%.*"+string(c), p, u.value) + case wOk: + fmt.Fprintf(fs, "%*"+string(c), pos(w-utf8.RuneCount([]byte(units))-1), u.value) + default: + fmt.Fprintf(fs, "%"+string(c), u.value) + } + fmt.Fprintf(fs, " %s", units) + default: + fmt.Fprintf(fs, "%%!%c(*Unit=%g)", c, u) + } +} + +func pos(a int) int { + if a < 0 { + return 0 + } + return a +} diff --git a/vendor/gonum.org/v1/gonum/unit/velocity.go b/vendor/gonum.org/v1/gonum/unit/velocity.go new file mode 100644 index 0000000..8425172 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/unit/velocity.go @@ -0,0 +1,69 @@ +// Code generated by "go generate gonum.org/v1/gonum/unit”; DO NOT EDIT. + +// Copyright ©2014 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package unit + +import ( + "errors" + "fmt" + "math" + "unicode/utf8" +) + +// Velocity represents a velocity in metres per second. +type Velocity float64 + +// Unit converts the Velocity to a *Unit +func (v Velocity) Unit() *Unit { + return New(float64(v), Dimensions{ + LengthDim: 1, + TimeDim: -1, + }) +} + +// Velocity allows Velocity to implement a Velocityer interface +func (v Velocity) Velocity() Velocity { + return v +} + +// From converts the unit into the receiver. From returns an +// error if there is a mismatch in dimension +func (v *Velocity) From(u Uniter) error { + if !DimensionsMatch(u, Velocity(0)) { + *v = Velocity(math.NaN()) + return errors.New("Dimension mismatch") + } + *v = Velocity(u.Unit().Value()) + return nil +} + +func (v Velocity) Format(fs fmt.State, c rune) { + switch c { + case 'v': + if fs.Flag('#') { + fmt.Fprintf(fs, "%T(%v)", v, float64(v)) + return + } + fallthrough + case 'e', 'E', 'f', 'F', 'g', 'G': + p, pOk := fs.Precision() + w, wOk := fs.Width() + const unit = " m s^-1" + switch { + case pOk && wOk: + fmt.Fprintf(fs, "%*.*"+string(c), pos(w-utf8.RuneCount([]byte(unit))), p, float64(v)) + case pOk: + fmt.Fprintf(fs, "%.*"+string(c), p, float64(v)) + case wOk: + fmt.Fprintf(fs, "%*"+string(c), pos(w-utf8.RuneCount([]byte(unit))), float64(v)) + default: + fmt.Fprintf(fs, "%"+string(c), float64(v)) + } + fmt.Fprint(fs, unit) + default: + fmt.Fprintf(fs, "%%!%c(%T=%g m s^-1)", c, v, float64(v)) + } +} diff --git a/vendor/gonum.org/v1/gonum/unit/voltage.go b/vendor/gonum.org/v1/gonum/unit/voltage.go new file mode 100644 index 0000000..320fbb9 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/unit/voltage.go @@ -0,0 +1,73 @@ +// Code generated by "go generate gonum.org/v1/gonum/unit”; DO NOT EDIT. + +// Copyright ©2014 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package unit + +import ( + "errors" + "fmt" + "math" + "unicode/utf8" +) + +// Voltage represents a voltage in Volts. +type Voltage float64 + +const Volt Voltage = 1 + +// Unit converts the Voltage to a *Unit +func (v Voltage) Unit() *Unit { + return New(float64(v), Dimensions{ + CurrentDim: -1, + LengthDim: 2, + MassDim: 1, + TimeDim: -3, + }) +} + +// Voltage allows Voltage to implement a Voltager interface +func (v Voltage) Voltage() Voltage { + return v +} + +// From converts the unit into the receiver. From returns an +// error if there is a mismatch in dimension +func (v *Voltage) From(u Uniter) error { + if !DimensionsMatch(u, Volt) { + *v = Voltage(math.NaN()) + return errors.New("Dimension mismatch") + } + *v = Voltage(u.Unit().Value()) + return nil +} + +func (v Voltage) Format(fs fmt.State, c rune) { + switch c { + case 'v': + if fs.Flag('#') { + fmt.Fprintf(fs, "%T(%v)", v, float64(v)) + return + } + fallthrough + case 'e', 'E', 'f', 'F', 'g', 'G': + p, pOk := fs.Precision() + w, wOk := fs.Width() + const unit = " V" + switch { + case pOk && wOk: + fmt.Fprintf(fs, "%*.*"+string(c), pos(w-utf8.RuneCount([]byte(unit))), p, float64(v)) + case pOk: + fmt.Fprintf(fs, "%.*"+string(c), p, float64(v)) + case wOk: + fmt.Fprintf(fs, "%*"+string(c), pos(w-utf8.RuneCount([]byte(unit))), float64(v)) + default: + fmt.Fprintf(fs, "%"+string(c), float64(v)) + } + fmt.Fprint(fs, unit) + default: + fmt.Fprintf(fs, "%%!%c(%T=%g V)", c, v, float64(v)) + } +} diff --git a/vendor/gonum.org/v1/gonum/unit/volume.go b/vendor/gonum.org/v1/gonum/unit/volume.go new file mode 100644 index 0000000..6e1e2cc --- /dev/null +++ b/vendor/gonum.org/v1/gonum/unit/volume.go @@ -0,0 +1,70 @@ +// Code generated by "go generate gonum.org/v1/gonum/unit”; DO NOT EDIT. + +// Copyright ©2014 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package unit + +import ( + "errors" + "fmt" + "math" + "unicode/utf8" +) + +// Volume represents a volume in cubic metres. +type Volume float64 + +const Litre Volume = 1e-3 + +// Unit converts the Volume to a *Unit +func (v Volume) Unit() *Unit { + return New(float64(v), Dimensions{ + LengthDim: 3, + }) +} + +// Volume allows Volume to implement a Volumeer interface +func (v Volume) Volume() Volume { + return v +} + +// From converts the unit into the receiver. From returns an +// error if there is a mismatch in dimension +func (v *Volume) From(u Uniter) error { + if !DimensionsMatch(u, Litre) { + *v = Volume(math.NaN()) + return errors.New("Dimension mismatch") + } + *v = Volume(u.Unit().Value()) + return nil +} + +func (v Volume) Format(fs fmt.State, c rune) { + switch c { + case 'v': + if fs.Flag('#') { + fmt.Fprintf(fs, "%T(%v)", v, float64(v)) + return + } + fallthrough + case 'e', 'E', 'f', 'F', 'g', 'G': + p, pOk := fs.Precision() + w, wOk := fs.Width() + const unit = " m^3" + switch { + case pOk && wOk: + fmt.Fprintf(fs, "%*.*"+string(c), pos(w-utf8.RuneCount([]byte(unit))), p, float64(v)) + case pOk: + fmt.Fprintf(fs, "%.*"+string(c), p, float64(v)) + case wOk: + fmt.Fprintf(fs, "%*"+string(c), pos(w-utf8.RuneCount([]byte(unit))), float64(v)) + default: + fmt.Fprintf(fs, "%"+string(c), float64(v)) + } + fmt.Fprint(fs, unit) + default: + fmt.Fprintf(fs, "%%!%c(%T=%g m^3)", c, v, float64(v)) + } +} diff --git a/vendor/gonum.org/v1/gonum/version.go b/vendor/gonum.org/v1/gonum/version.go new file mode 100644 index 0000000..cf07f04 --- /dev/null +++ b/vendor/gonum.org/v1/gonum/version.go @@ -0,0 +1,48 @@ +// Copyright ©2019 The Gonum Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// +build go1.12 + +package gonum + +import ( + "fmt" + "runtime/debug" +) + +const root = "gonum.org/v1/gonum" + +// Version returns the version of Gonum and its checksum. The returned +// values are only valid in binaries built with module support. +// +// If a replace directive exists in the Gonum go.mod, the replace will +// be reported in the version in the following format: +// "version=>[replace-path] [replace-version]" +// and the replace sum will be returned in place of the original sum. +// +// The exact version format returned by Version may change in future. +func Version() (version, sum string) { + b, ok := debug.ReadBuildInfo() + if !ok { + return "", "" + } + for _, m := range b.Deps { + if m.Path == root { + if m.Replace != nil { + switch { + case m.Replace.Version != "" && m.Replace.Path != "": + return fmt.Sprintf("%s=>%s %s", m.Version, m.Replace.Path, m.Replace.Version), m.Replace.Sum + case m.Replace.Version != "": + return fmt.Sprintf("%s=>%s", m.Version, m.Replace.Version), m.Replace.Sum + case m.Replace.Path != "": + return fmt.Sprintf("%s=>%s", m.Version, m.Replace.Path), m.Replace.Sum + default: + return m.Version + "*", m.Sum + "*" + } + } + return m.Version, m.Sum + } + } + return "", "" +} diff --git a/vendor/google.golang.org/genproto/LICENSE b/vendor/google.golang.org/genproto/LICENSE new file mode 100644 index 0000000..d645695 --- /dev/null +++ b/vendor/google.golang.org/genproto/LICENSE @@ -0,0 +1,202 @@ + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/common/ad_asset.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/common/ad_asset.pb.go new file mode 100644 index 0000000..4ed5cb8 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/common/ad_asset.pb.go @@ -0,0 +1,235 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/common/ad_asset.proto + +package common // import "google.golang.org/genproto/googleapis/ads/googleads/v1/common" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import wrappers "github.com/golang/protobuf/ptypes/wrappers" +import enums "google.golang.org/genproto/googleapis/ads/googleads/v1/enums" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// A text asset used inside an ad. +type AdTextAsset struct { + // Asset text. + Text *wrappers.StringValue `protobuf:"bytes,1,opt,name=text,proto3" json:"text,omitempty"` + // The pinned field of the asset. This restricts the asset to only serve + // within this field. Multiple assets can be pinned to the same field. An + // asset that is unpinned or pinned to a different field will not serve in a + // field where some other asset has been pinned. + PinnedField enums.ServedAssetFieldTypeEnum_ServedAssetFieldType `protobuf:"varint,2,opt,name=pinned_field,json=pinnedField,proto3,enum=google.ads.googleads.v1.enums.ServedAssetFieldTypeEnum_ServedAssetFieldType" json:"pinned_field,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *AdTextAsset) Reset() { *m = AdTextAsset{} } +func (m *AdTextAsset) String() string { return proto.CompactTextString(m) } +func (*AdTextAsset) ProtoMessage() {} +func (*AdTextAsset) Descriptor() ([]byte, []int) { + return fileDescriptor_ad_asset_07bce9e3ba61c559, []int{0} +} +func (m *AdTextAsset) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_AdTextAsset.Unmarshal(m, b) +} +func (m *AdTextAsset) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_AdTextAsset.Marshal(b, m, deterministic) +} +func (dst *AdTextAsset) XXX_Merge(src proto.Message) { + xxx_messageInfo_AdTextAsset.Merge(dst, src) +} +func (m *AdTextAsset) XXX_Size() int { + return xxx_messageInfo_AdTextAsset.Size(m) +} +func (m *AdTextAsset) XXX_DiscardUnknown() { + xxx_messageInfo_AdTextAsset.DiscardUnknown(m) +} + +var xxx_messageInfo_AdTextAsset proto.InternalMessageInfo + +func (m *AdTextAsset) GetText() *wrappers.StringValue { + if m != nil { + return m.Text + } + return nil +} + +func (m *AdTextAsset) GetPinnedField() enums.ServedAssetFieldTypeEnum_ServedAssetFieldType { + if m != nil { + return m.PinnedField + } + return enums.ServedAssetFieldTypeEnum_UNSPECIFIED +} + +// An image asset used inside an ad. +type AdImageAsset struct { + // The Asset resource name of this image. + Asset *wrappers.StringValue `protobuf:"bytes,1,opt,name=asset,proto3" json:"asset,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *AdImageAsset) Reset() { *m = AdImageAsset{} } +func (m *AdImageAsset) String() string { return proto.CompactTextString(m) } +func (*AdImageAsset) ProtoMessage() {} +func (*AdImageAsset) Descriptor() ([]byte, []int) { + return fileDescriptor_ad_asset_07bce9e3ba61c559, []int{1} +} +func (m *AdImageAsset) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_AdImageAsset.Unmarshal(m, b) +} +func (m *AdImageAsset) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_AdImageAsset.Marshal(b, m, deterministic) +} +func (dst *AdImageAsset) XXX_Merge(src proto.Message) { + xxx_messageInfo_AdImageAsset.Merge(dst, src) +} +func (m *AdImageAsset) XXX_Size() int { + return xxx_messageInfo_AdImageAsset.Size(m) +} +func (m *AdImageAsset) XXX_DiscardUnknown() { + xxx_messageInfo_AdImageAsset.DiscardUnknown(m) +} + +var xxx_messageInfo_AdImageAsset proto.InternalMessageInfo + +func (m *AdImageAsset) GetAsset() *wrappers.StringValue { + if m != nil { + return m.Asset + } + return nil +} + +// A video asset used inside an ad. +type AdVideoAsset struct { + // The Asset resource name of this video. + Asset *wrappers.StringValue `protobuf:"bytes,1,opt,name=asset,proto3" json:"asset,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *AdVideoAsset) Reset() { *m = AdVideoAsset{} } +func (m *AdVideoAsset) String() string { return proto.CompactTextString(m) } +func (*AdVideoAsset) ProtoMessage() {} +func (*AdVideoAsset) Descriptor() ([]byte, []int) { + return fileDescriptor_ad_asset_07bce9e3ba61c559, []int{2} +} +func (m *AdVideoAsset) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_AdVideoAsset.Unmarshal(m, b) +} +func (m *AdVideoAsset) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_AdVideoAsset.Marshal(b, m, deterministic) +} +func (dst *AdVideoAsset) XXX_Merge(src proto.Message) { + xxx_messageInfo_AdVideoAsset.Merge(dst, src) +} +func (m *AdVideoAsset) XXX_Size() int { + return xxx_messageInfo_AdVideoAsset.Size(m) +} +func (m *AdVideoAsset) XXX_DiscardUnknown() { + xxx_messageInfo_AdVideoAsset.DiscardUnknown(m) +} + +var xxx_messageInfo_AdVideoAsset proto.InternalMessageInfo + +func (m *AdVideoAsset) GetAsset() *wrappers.StringValue { + if m != nil { + return m.Asset + } + return nil +} + +// A media bundle asset used inside an ad. +type AdMediaBundleAsset struct { + // The Asset resource name of this media bundle. + Asset *wrappers.StringValue `protobuf:"bytes,1,opt,name=asset,proto3" json:"asset,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *AdMediaBundleAsset) Reset() { *m = AdMediaBundleAsset{} } +func (m *AdMediaBundleAsset) String() string { return proto.CompactTextString(m) } +func (*AdMediaBundleAsset) ProtoMessage() {} +func (*AdMediaBundleAsset) Descriptor() ([]byte, []int) { + return fileDescriptor_ad_asset_07bce9e3ba61c559, []int{3} +} +func (m *AdMediaBundleAsset) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_AdMediaBundleAsset.Unmarshal(m, b) +} +func (m *AdMediaBundleAsset) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_AdMediaBundleAsset.Marshal(b, m, deterministic) +} +func (dst *AdMediaBundleAsset) XXX_Merge(src proto.Message) { + xxx_messageInfo_AdMediaBundleAsset.Merge(dst, src) +} +func (m *AdMediaBundleAsset) XXX_Size() int { + return xxx_messageInfo_AdMediaBundleAsset.Size(m) +} +func (m *AdMediaBundleAsset) XXX_DiscardUnknown() { + xxx_messageInfo_AdMediaBundleAsset.DiscardUnknown(m) +} + +var xxx_messageInfo_AdMediaBundleAsset proto.InternalMessageInfo + +func (m *AdMediaBundleAsset) GetAsset() *wrappers.StringValue { + if m != nil { + return m.Asset + } + return nil +} + +func init() { + proto.RegisterType((*AdTextAsset)(nil), "google.ads.googleads.v1.common.AdTextAsset") + proto.RegisterType((*AdImageAsset)(nil), "google.ads.googleads.v1.common.AdImageAsset") + proto.RegisterType((*AdVideoAsset)(nil), "google.ads.googleads.v1.common.AdVideoAsset") + proto.RegisterType((*AdMediaBundleAsset)(nil), "google.ads.googleads.v1.common.AdMediaBundleAsset") +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/common/ad_asset.proto", fileDescriptor_ad_asset_07bce9e3ba61c559) +} + +var fileDescriptor_ad_asset_07bce9e3ba61c559 = []byte{ + // 401 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x9c, 0x92, 0xcd, 0x6a, 0xdc, 0x30, + 0x10, 0xc7, 0xf1, 0xf6, 0xe3, 0xa0, 0x5d, 0x7a, 0xf0, 0x29, 0x84, 0x10, 0x82, 0x4f, 0xb9, 0x54, + 0xaa, 0xb7, 0x37, 0xe5, 0x24, 0xf7, 0x23, 0x2d, 0xb4, 0x10, 0x92, 0xe0, 0x43, 0x31, 0x2c, 0x4a, + 0x34, 0x11, 0x02, 0x5b, 0x12, 0x96, 0xbc, 0x4d, 0x5e, 0xa7, 0xc7, 0x9e, 0xfa, 0x1c, 0x7d, 0x91, + 0x42, 0x9f, 0xa2, 0x58, 0x63, 0xef, 0xa9, 0x5b, 0x4a, 0x4e, 0x3b, 0x2b, 0xfd, 0xfc, 0x9b, 0xff, + 0x88, 0x21, 0x2f, 0xb5, 0x73, 0xba, 0x05, 0x26, 0x55, 0x60, 0x58, 0x8e, 0xd5, 0xb6, 0x64, 0xb7, + 0xae, 0xeb, 0x9c, 0x65, 0x52, 0x6d, 0x64, 0x08, 0x10, 0xa9, 0xef, 0x5d, 0x74, 0xf9, 0x31, 0x32, + 0x54, 0xaa, 0x40, 0x77, 0x38, 0xdd, 0x96, 0x14, 0xf1, 0xc3, 0xb3, 0x7d, 0x3a, 0xb0, 0x43, 0x17, + 0x58, 0x80, 0x7e, 0x0b, 0x93, 0x71, 0x73, 0x67, 0xa0, 0x55, 0x9b, 0xf8, 0xe0, 0x01, 0xe5, 0x87, + 0x93, 0x9c, 0xa5, 0x7f, 0x37, 0xc3, 0x1d, 0xfb, 0xda, 0x4b, 0xef, 0xa1, 0x0f, 0xd3, 0xfd, 0xd1, + 0x2c, 0xf7, 0x86, 0x49, 0x6b, 0x5d, 0x94, 0xd1, 0x38, 0x3b, 0xdd, 0x16, 0x3f, 0x32, 0xb2, 0x14, + 0xea, 0x1a, 0xee, 0xa3, 0x18, 0xf5, 0xf9, 0x2b, 0xf2, 0x34, 0xc2, 0x7d, 0x3c, 0xc8, 0x4e, 0xb2, + 0xd3, 0xe5, 0xfa, 0x68, 0x8a, 0x4b, 0x67, 0x39, 0xbd, 0x8a, 0xbd, 0xb1, 0xba, 0x96, 0xed, 0x00, + 0x97, 0x89, 0xcc, 0x1d, 0x59, 0x79, 0x63, 0x2d, 0x28, 0x8c, 0x76, 0xb0, 0x38, 0xc9, 0x4e, 0x5f, + 0xac, 0x3f, 0xd1, 0x7d, 0x33, 0xa7, 0x99, 0xe8, 0x55, 0x9a, 0x29, 0xf5, 0x7c, 0x3f, 0x7e, 0x76, + 0xfd, 0xe0, 0xe1, 0x9d, 0x1d, 0xba, 0xbf, 0x5e, 0x5c, 0x2e, 0xb1, 0x43, 0x3a, 0x28, 0x2a, 0xb2, + 0x12, 0xea, 0x63, 0x27, 0x35, 0x60, 0xe4, 0x35, 0x79, 0x96, 0x9e, 0xe6, 0xbf, 0x32, 0x23, 0x8a, + 0x8e, 0xda, 0x28, 0x70, 0x8f, 0x77, 0x7c, 0x20, 0xb9, 0x50, 0x9f, 0x41, 0x19, 0x59, 0x0d, 0x56, + 0xb5, 0x8f, 0x4f, 0x53, 0xfd, 0xca, 0x48, 0x71, 0xeb, 0x3a, 0xfa, 0xef, 0x35, 0xa9, 0x56, 0x02, + 0xdf, 0xe5, 0x62, 0x54, 0x5d, 0x64, 0x5f, 0xde, 0x4e, 0xbc, 0x76, 0xad, 0xb4, 0x9a, 0xba, 0x5e, + 0x33, 0x0d, 0x36, 0x35, 0x9a, 0xd7, 0xc8, 0x9b, 0xb0, 0x6f, 0x49, 0xcf, 0xf0, 0xe7, 0xdb, 0xe2, + 0xc9, 0xb9, 0x10, 0xdf, 0x17, 0xc7, 0xe7, 0x28, 0x13, 0x2a, 0x50, 0x2c, 0xc7, 0xaa, 0x2e, 0xe9, + 0x9b, 0x84, 0xfd, 0x9c, 0x81, 0x46, 0xa8, 0xd0, 0xec, 0x80, 0xa6, 0x2e, 0x1b, 0x04, 0x7e, 0x2f, + 0x0a, 0x3c, 0xe5, 0x5c, 0xa8, 0xc0, 0xf9, 0x0e, 0xe1, 0xbc, 0x2e, 0x39, 0x47, 0xe8, 0xe6, 0x79, + 0x4a, 0xf7, 0xfa, 0x4f, 0x00, 0x00, 0x00, 0xff, 0xff, 0xbe, 0x2c, 0x5b, 0x01, 0x41, 0x03, 0x00, + 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/common/ad_type_infos.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/common/ad_type_infos.pb.go new file mode 100644 index 0000000..a11c42f --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/common/ad_type_infos.pb.go @@ -0,0 +1,2449 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/common/ad_type_infos.proto + +package common // import "google.golang.org/genproto/googleapis/ads/googleads/v1/common" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import wrappers "github.com/golang/protobuf/ptypes/wrappers" +import enums "google.golang.org/genproto/googleapis/ads/googleads/v1/enums" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// A text ad. +type TextAdInfo struct { + // The headline of the ad. + Headline *wrappers.StringValue `protobuf:"bytes,1,opt,name=headline,proto3" json:"headline,omitempty"` + // The first line of the ad's description. + Description1 *wrappers.StringValue `protobuf:"bytes,2,opt,name=description1,proto3" json:"description1,omitempty"` + // The second line of the ad's description. + Description2 *wrappers.StringValue `protobuf:"bytes,3,opt,name=description2,proto3" json:"description2,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *TextAdInfo) Reset() { *m = TextAdInfo{} } +func (m *TextAdInfo) String() string { return proto.CompactTextString(m) } +func (*TextAdInfo) ProtoMessage() {} +func (*TextAdInfo) Descriptor() ([]byte, []int) { + return fileDescriptor_ad_type_infos_60e41938ab80fbca, []int{0} +} +func (m *TextAdInfo) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_TextAdInfo.Unmarshal(m, b) +} +func (m *TextAdInfo) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_TextAdInfo.Marshal(b, m, deterministic) +} +func (dst *TextAdInfo) XXX_Merge(src proto.Message) { + xxx_messageInfo_TextAdInfo.Merge(dst, src) +} +func (m *TextAdInfo) XXX_Size() int { + return xxx_messageInfo_TextAdInfo.Size(m) +} +func (m *TextAdInfo) XXX_DiscardUnknown() { + xxx_messageInfo_TextAdInfo.DiscardUnknown(m) +} + +var xxx_messageInfo_TextAdInfo proto.InternalMessageInfo + +func (m *TextAdInfo) GetHeadline() *wrappers.StringValue { + if m != nil { + return m.Headline + } + return nil +} + +func (m *TextAdInfo) GetDescription1() *wrappers.StringValue { + if m != nil { + return m.Description1 + } + return nil +} + +func (m *TextAdInfo) GetDescription2() *wrappers.StringValue { + if m != nil { + return m.Description2 + } + return nil +} + +// An expanded text ad. +type ExpandedTextAdInfo struct { + // The first part of the ad's headline. + HeadlinePart1 *wrappers.StringValue `protobuf:"bytes,1,opt,name=headline_part1,json=headlinePart1,proto3" json:"headline_part1,omitempty"` + // The second part of the ad's headline. + HeadlinePart2 *wrappers.StringValue `protobuf:"bytes,2,opt,name=headline_part2,json=headlinePart2,proto3" json:"headline_part2,omitempty"` + // The third part of the ad's headline. + HeadlinePart3 *wrappers.StringValue `protobuf:"bytes,6,opt,name=headline_part3,json=headlinePart3,proto3" json:"headline_part3,omitempty"` + // The description of the ad. + Description *wrappers.StringValue `protobuf:"bytes,3,opt,name=description,proto3" json:"description,omitempty"` + // The second description of the ad. + Description2 *wrappers.StringValue `protobuf:"bytes,7,opt,name=description2,proto3" json:"description2,omitempty"` + // The text that can appear alongside the ad's displayed URL. + Path1 *wrappers.StringValue `protobuf:"bytes,4,opt,name=path1,proto3" json:"path1,omitempty"` + // Additional text that can appear alongside the ad's displayed URL. + Path2 *wrappers.StringValue `protobuf:"bytes,5,opt,name=path2,proto3" json:"path2,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ExpandedTextAdInfo) Reset() { *m = ExpandedTextAdInfo{} } +func (m *ExpandedTextAdInfo) String() string { return proto.CompactTextString(m) } +func (*ExpandedTextAdInfo) ProtoMessage() {} +func (*ExpandedTextAdInfo) Descriptor() ([]byte, []int) { + return fileDescriptor_ad_type_infos_60e41938ab80fbca, []int{1} +} +func (m *ExpandedTextAdInfo) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ExpandedTextAdInfo.Unmarshal(m, b) +} +func (m *ExpandedTextAdInfo) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ExpandedTextAdInfo.Marshal(b, m, deterministic) +} +func (dst *ExpandedTextAdInfo) XXX_Merge(src proto.Message) { + xxx_messageInfo_ExpandedTextAdInfo.Merge(dst, src) +} +func (m *ExpandedTextAdInfo) XXX_Size() int { + return xxx_messageInfo_ExpandedTextAdInfo.Size(m) +} +func (m *ExpandedTextAdInfo) XXX_DiscardUnknown() { + xxx_messageInfo_ExpandedTextAdInfo.DiscardUnknown(m) +} + +var xxx_messageInfo_ExpandedTextAdInfo proto.InternalMessageInfo + +func (m *ExpandedTextAdInfo) GetHeadlinePart1() *wrappers.StringValue { + if m != nil { + return m.HeadlinePart1 + } + return nil +} + +func (m *ExpandedTextAdInfo) GetHeadlinePart2() *wrappers.StringValue { + if m != nil { + return m.HeadlinePart2 + } + return nil +} + +func (m *ExpandedTextAdInfo) GetHeadlinePart3() *wrappers.StringValue { + if m != nil { + return m.HeadlinePart3 + } + return nil +} + +func (m *ExpandedTextAdInfo) GetDescription() *wrappers.StringValue { + if m != nil { + return m.Description + } + return nil +} + +func (m *ExpandedTextAdInfo) GetDescription2() *wrappers.StringValue { + if m != nil { + return m.Description2 + } + return nil +} + +func (m *ExpandedTextAdInfo) GetPath1() *wrappers.StringValue { + if m != nil { + return m.Path1 + } + return nil +} + +func (m *ExpandedTextAdInfo) GetPath2() *wrappers.StringValue { + if m != nil { + return m.Path2 + } + return nil +} + +// A call-only ad. +type CallOnlyAdInfo struct { + // The country code in the ad. + CountryCode *wrappers.StringValue `protobuf:"bytes,1,opt,name=country_code,json=countryCode,proto3" json:"country_code,omitempty"` + // The phone number in the ad. + PhoneNumber *wrappers.StringValue `protobuf:"bytes,2,opt,name=phone_number,json=phoneNumber,proto3" json:"phone_number,omitempty"` + // The business name in the ad. + BusinessName *wrappers.StringValue `protobuf:"bytes,3,opt,name=business_name,json=businessName,proto3" json:"business_name,omitempty"` + // First headline in the ad. + Headline1 *wrappers.StringValue `protobuf:"bytes,11,opt,name=headline1,proto3" json:"headline1,omitempty"` + // Second headline in the ad. + Headline2 *wrappers.StringValue `protobuf:"bytes,12,opt,name=headline2,proto3" json:"headline2,omitempty"` + // The first line of the ad's description. + Description1 *wrappers.StringValue `protobuf:"bytes,4,opt,name=description1,proto3" json:"description1,omitempty"` + // The second line of the ad's description. + Description2 *wrappers.StringValue `protobuf:"bytes,5,opt,name=description2,proto3" json:"description2,omitempty"` + // Whether to enable call tracking for the creative. Enabling call + // tracking also enables call conversions. + CallTracked *wrappers.BoolValue `protobuf:"bytes,6,opt,name=call_tracked,json=callTracked,proto3" json:"call_tracked,omitempty"` + // Whether to disable call conversion for the creative. + // If set to `true`, disables call conversions even when `call_tracked` is + // `true`. + // If `call_tracked` is `false`, this field is ignored. + DisableCallConversion *wrappers.BoolValue `protobuf:"bytes,7,opt,name=disable_call_conversion,json=disableCallConversion,proto3" json:"disable_call_conversion,omitempty"` + // The URL to be used for phone number verification. + PhoneNumberVerificationUrl *wrappers.StringValue `protobuf:"bytes,8,opt,name=phone_number_verification_url,json=phoneNumberVerificationUrl,proto3" json:"phone_number_verification_url,omitempty"` + // The conversion action to attribute a call conversion to. If not set a + // default conversion action is used. This field only has effect if + // call_tracked is set to true. Otherwise this field is ignored. + ConversionAction *wrappers.StringValue `protobuf:"bytes,9,opt,name=conversion_action,json=conversionAction,proto3" json:"conversion_action,omitempty"` + // The call conversion behavior of this call only ad. It can use its own call + // conversion setting, inherit the account level setting, or be disabled. + ConversionReportingState enums.CallConversionReportingStateEnum_CallConversionReportingState `protobuf:"varint,10,opt,name=conversion_reporting_state,json=conversionReportingState,proto3,enum=google.ads.googleads.v1.enums.CallConversionReportingStateEnum_CallConversionReportingState" json:"conversion_reporting_state,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *CallOnlyAdInfo) Reset() { *m = CallOnlyAdInfo{} } +func (m *CallOnlyAdInfo) String() string { return proto.CompactTextString(m) } +func (*CallOnlyAdInfo) ProtoMessage() {} +func (*CallOnlyAdInfo) Descriptor() ([]byte, []int) { + return fileDescriptor_ad_type_infos_60e41938ab80fbca, []int{2} +} +func (m *CallOnlyAdInfo) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_CallOnlyAdInfo.Unmarshal(m, b) +} +func (m *CallOnlyAdInfo) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_CallOnlyAdInfo.Marshal(b, m, deterministic) +} +func (dst *CallOnlyAdInfo) XXX_Merge(src proto.Message) { + xxx_messageInfo_CallOnlyAdInfo.Merge(dst, src) +} +func (m *CallOnlyAdInfo) XXX_Size() int { + return xxx_messageInfo_CallOnlyAdInfo.Size(m) +} +func (m *CallOnlyAdInfo) XXX_DiscardUnknown() { + xxx_messageInfo_CallOnlyAdInfo.DiscardUnknown(m) +} + +var xxx_messageInfo_CallOnlyAdInfo proto.InternalMessageInfo + +func (m *CallOnlyAdInfo) GetCountryCode() *wrappers.StringValue { + if m != nil { + return m.CountryCode + } + return nil +} + +func (m *CallOnlyAdInfo) GetPhoneNumber() *wrappers.StringValue { + if m != nil { + return m.PhoneNumber + } + return nil +} + +func (m *CallOnlyAdInfo) GetBusinessName() *wrappers.StringValue { + if m != nil { + return m.BusinessName + } + return nil +} + +func (m *CallOnlyAdInfo) GetHeadline1() *wrappers.StringValue { + if m != nil { + return m.Headline1 + } + return nil +} + +func (m *CallOnlyAdInfo) GetHeadline2() *wrappers.StringValue { + if m != nil { + return m.Headline2 + } + return nil +} + +func (m *CallOnlyAdInfo) GetDescription1() *wrappers.StringValue { + if m != nil { + return m.Description1 + } + return nil +} + +func (m *CallOnlyAdInfo) GetDescription2() *wrappers.StringValue { + if m != nil { + return m.Description2 + } + return nil +} + +func (m *CallOnlyAdInfo) GetCallTracked() *wrappers.BoolValue { + if m != nil { + return m.CallTracked + } + return nil +} + +func (m *CallOnlyAdInfo) GetDisableCallConversion() *wrappers.BoolValue { + if m != nil { + return m.DisableCallConversion + } + return nil +} + +func (m *CallOnlyAdInfo) GetPhoneNumberVerificationUrl() *wrappers.StringValue { + if m != nil { + return m.PhoneNumberVerificationUrl + } + return nil +} + +func (m *CallOnlyAdInfo) GetConversionAction() *wrappers.StringValue { + if m != nil { + return m.ConversionAction + } + return nil +} + +func (m *CallOnlyAdInfo) GetConversionReportingState() enums.CallConversionReportingStateEnum_CallConversionReportingState { + if m != nil { + return m.ConversionReportingState + } + return enums.CallConversionReportingStateEnum_UNSPECIFIED +} + +// An expanded dynamic search ad. +type ExpandedDynamicSearchAdInfo struct { + // The description of the ad. + Description *wrappers.StringValue `protobuf:"bytes,1,opt,name=description,proto3" json:"description,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ExpandedDynamicSearchAdInfo) Reset() { *m = ExpandedDynamicSearchAdInfo{} } +func (m *ExpandedDynamicSearchAdInfo) String() string { return proto.CompactTextString(m) } +func (*ExpandedDynamicSearchAdInfo) ProtoMessage() {} +func (*ExpandedDynamicSearchAdInfo) Descriptor() ([]byte, []int) { + return fileDescriptor_ad_type_infos_60e41938ab80fbca, []int{3} +} +func (m *ExpandedDynamicSearchAdInfo) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ExpandedDynamicSearchAdInfo.Unmarshal(m, b) +} +func (m *ExpandedDynamicSearchAdInfo) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ExpandedDynamicSearchAdInfo.Marshal(b, m, deterministic) +} +func (dst *ExpandedDynamicSearchAdInfo) XXX_Merge(src proto.Message) { + xxx_messageInfo_ExpandedDynamicSearchAdInfo.Merge(dst, src) +} +func (m *ExpandedDynamicSearchAdInfo) XXX_Size() int { + return xxx_messageInfo_ExpandedDynamicSearchAdInfo.Size(m) +} +func (m *ExpandedDynamicSearchAdInfo) XXX_DiscardUnknown() { + xxx_messageInfo_ExpandedDynamicSearchAdInfo.DiscardUnknown(m) +} + +var xxx_messageInfo_ExpandedDynamicSearchAdInfo proto.InternalMessageInfo + +func (m *ExpandedDynamicSearchAdInfo) GetDescription() *wrappers.StringValue { + if m != nil { + return m.Description + } + return nil +} + +// A hotel ad. +type HotelAdInfo struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *HotelAdInfo) Reset() { *m = HotelAdInfo{} } +func (m *HotelAdInfo) String() string { return proto.CompactTextString(m) } +func (*HotelAdInfo) ProtoMessage() {} +func (*HotelAdInfo) Descriptor() ([]byte, []int) { + return fileDescriptor_ad_type_infos_60e41938ab80fbca, []int{4} +} +func (m *HotelAdInfo) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_HotelAdInfo.Unmarshal(m, b) +} +func (m *HotelAdInfo) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_HotelAdInfo.Marshal(b, m, deterministic) +} +func (dst *HotelAdInfo) XXX_Merge(src proto.Message) { + xxx_messageInfo_HotelAdInfo.Merge(dst, src) +} +func (m *HotelAdInfo) XXX_Size() int { + return xxx_messageInfo_HotelAdInfo.Size(m) +} +func (m *HotelAdInfo) XXX_DiscardUnknown() { + xxx_messageInfo_HotelAdInfo.DiscardUnknown(m) +} + +var xxx_messageInfo_HotelAdInfo proto.InternalMessageInfo + +// A Smart Shopping ad. +type ShoppingSmartAdInfo struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ShoppingSmartAdInfo) Reset() { *m = ShoppingSmartAdInfo{} } +func (m *ShoppingSmartAdInfo) String() string { return proto.CompactTextString(m) } +func (*ShoppingSmartAdInfo) ProtoMessage() {} +func (*ShoppingSmartAdInfo) Descriptor() ([]byte, []int) { + return fileDescriptor_ad_type_infos_60e41938ab80fbca, []int{5} +} +func (m *ShoppingSmartAdInfo) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ShoppingSmartAdInfo.Unmarshal(m, b) +} +func (m *ShoppingSmartAdInfo) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ShoppingSmartAdInfo.Marshal(b, m, deterministic) +} +func (dst *ShoppingSmartAdInfo) XXX_Merge(src proto.Message) { + xxx_messageInfo_ShoppingSmartAdInfo.Merge(dst, src) +} +func (m *ShoppingSmartAdInfo) XXX_Size() int { + return xxx_messageInfo_ShoppingSmartAdInfo.Size(m) +} +func (m *ShoppingSmartAdInfo) XXX_DiscardUnknown() { + xxx_messageInfo_ShoppingSmartAdInfo.DiscardUnknown(m) +} + +var xxx_messageInfo_ShoppingSmartAdInfo proto.InternalMessageInfo + +// A standard Shopping ad. +type ShoppingProductAdInfo struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ShoppingProductAdInfo) Reset() { *m = ShoppingProductAdInfo{} } +func (m *ShoppingProductAdInfo) String() string { return proto.CompactTextString(m) } +func (*ShoppingProductAdInfo) ProtoMessage() {} +func (*ShoppingProductAdInfo) Descriptor() ([]byte, []int) { + return fileDescriptor_ad_type_infos_60e41938ab80fbca, []int{6} +} +func (m *ShoppingProductAdInfo) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ShoppingProductAdInfo.Unmarshal(m, b) +} +func (m *ShoppingProductAdInfo) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ShoppingProductAdInfo.Marshal(b, m, deterministic) +} +func (dst *ShoppingProductAdInfo) XXX_Merge(src proto.Message) { + xxx_messageInfo_ShoppingProductAdInfo.Merge(dst, src) +} +func (m *ShoppingProductAdInfo) XXX_Size() int { + return xxx_messageInfo_ShoppingProductAdInfo.Size(m) +} +func (m *ShoppingProductAdInfo) XXX_DiscardUnknown() { + xxx_messageInfo_ShoppingProductAdInfo.DiscardUnknown(m) +} + +var xxx_messageInfo_ShoppingProductAdInfo proto.InternalMessageInfo + +// A Gmail ad. +type GmailAdInfo struct { + // The Gmail teaser. + Teaser *GmailTeaser `protobuf:"bytes,1,opt,name=teaser,proto3" json:"teaser,omitempty"` + // The MediaFile resource name of the header image. Valid image types are GIF, + // JPEG and PNG. The minimum size is 300x100 pixels and the aspect ratio must + // be between 3:1 and 5:1 (+-1%). + HeaderImage *wrappers.StringValue `protobuf:"bytes,2,opt,name=header_image,json=headerImage,proto3" json:"header_image,omitempty"` + // The MediaFile resource name of the marketing image. Valid image types are + // GIF, JPEG and PNG. The image must either be landscape with a minimum size + // of 600x314 pixels and aspect ratio of 600:314 (+-1%) or square with a + // minimum size of 300x300 pixels and aspect ratio of 1:1 (+-1%) + MarketingImage *wrappers.StringValue `protobuf:"bytes,3,opt,name=marketing_image,json=marketingImage,proto3" json:"marketing_image,omitempty"` + // Headline of the marketing image. + MarketingImageHeadline *wrappers.StringValue `protobuf:"bytes,4,opt,name=marketing_image_headline,json=marketingImageHeadline,proto3" json:"marketing_image_headline,omitempty"` + // Description of the marketing image. + MarketingImageDescription *wrappers.StringValue `protobuf:"bytes,5,opt,name=marketing_image_description,json=marketingImageDescription,proto3" json:"marketing_image_description,omitempty"` + // Display-call-to-action of the marketing image. + MarketingImageDisplayCallToAction *DisplayCallToAction `protobuf:"bytes,6,opt,name=marketing_image_display_call_to_action,json=marketingImageDisplayCallToAction,proto3" json:"marketing_image_display_call_to_action,omitempty"` + // Product images. Up to 15 images are supported. + ProductImages []*ProductImage `protobuf:"bytes,7,rep,name=product_images,json=productImages,proto3" json:"product_images,omitempty"` + // Product videos. Up to 7 videos are supported. At least one product video + // or a marketing image must be specified. + ProductVideos []*ProductVideo `protobuf:"bytes,8,rep,name=product_videos,json=productVideos,proto3" json:"product_videos,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GmailAdInfo) Reset() { *m = GmailAdInfo{} } +func (m *GmailAdInfo) String() string { return proto.CompactTextString(m) } +func (*GmailAdInfo) ProtoMessage() {} +func (*GmailAdInfo) Descriptor() ([]byte, []int) { + return fileDescriptor_ad_type_infos_60e41938ab80fbca, []int{7} +} +func (m *GmailAdInfo) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GmailAdInfo.Unmarshal(m, b) +} +func (m *GmailAdInfo) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GmailAdInfo.Marshal(b, m, deterministic) +} +func (dst *GmailAdInfo) XXX_Merge(src proto.Message) { + xxx_messageInfo_GmailAdInfo.Merge(dst, src) +} +func (m *GmailAdInfo) XXX_Size() int { + return xxx_messageInfo_GmailAdInfo.Size(m) +} +func (m *GmailAdInfo) XXX_DiscardUnknown() { + xxx_messageInfo_GmailAdInfo.DiscardUnknown(m) +} + +var xxx_messageInfo_GmailAdInfo proto.InternalMessageInfo + +func (m *GmailAdInfo) GetTeaser() *GmailTeaser { + if m != nil { + return m.Teaser + } + return nil +} + +func (m *GmailAdInfo) GetHeaderImage() *wrappers.StringValue { + if m != nil { + return m.HeaderImage + } + return nil +} + +func (m *GmailAdInfo) GetMarketingImage() *wrappers.StringValue { + if m != nil { + return m.MarketingImage + } + return nil +} + +func (m *GmailAdInfo) GetMarketingImageHeadline() *wrappers.StringValue { + if m != nil { + return m.MarketingImageHeadline + } + return nil +} + +func (m *GmailAdInfo) GetMarketingImageDescription() *wrappers.StringValue { + if m != nil { + return m.MarketingImageDescription + } + return nil +} + +func (m *GmailAdInfo) GetMarketingImageDisplayCallToAction() *DisplayCallToAction { + if m != nil { + return m.MarketingImageDisplayCallToAction + } + return nil +} + +func (m *GmailAdInfo) GetProductImages() []*ProductImage { + if m != nil { + return m.ProductImages + } + return nil +} + +func (m *GmailAdInfo) GetProductVideos() []*ProductVideo { + if m != nil { + return m.ProductVideos + } + return nil +} + +// Gmail teaser data. The teaser is a small header that acts as an invitation +// to view the rest of the ad (the body). +type GmailTeaser struct { + // Headline of the teaser. + Headline *wrappers.StringValue `protobuf:"bytes,1,opt,name=headline,proto3" json:"headline,omitempty"` + // Description of the teaser. + Description *wrappers.StringValue `protobuf:"bytes,2,opt,name=description,proto3" json:"description,omitempty"` + // Business name of the advertiser. + BusinessName *wrappers.StringValue `protobuf:"bytes,3,opt,name=business_name,json=businessName,proto3" json:"business_name,omitempty"` + // The MediaFile resource name of the logo image. Valid image types are GIF, + // JPEG and PNG. The minimum size is 144x144 pixels and the aspect ratio must + // be 1:1 (+-1%). + LogoImage *wrappers.StringValue `protobuf:"bytes,4,opt,name=logo_image,json=logoImage,proto3" json:"logo_image,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GmailTeaser) Reset() { *m = GmailTeaser{} } +func (m *GmailTeaser) String() string { return proto.CompactTextString(m) } +func (*GmailTeaser) ProtoMessage() {} +func (*GmailTeaser) Descriptor() ([]byte, []int) { + return fileDescriptor_ad_type_infos_60e41938ab80fbca, []int{8} +} +func (m *GmailTeaser) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GmailTeaser.Unmarshal(m, b) +} +func (m *GmailTeaser) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GmailTeaser.Marshal(b, m, deterministic) +} +func (dst *GmailTeaser) XXX_Merge(src proto.Message) { + xxx_messageInfo_GmailTeaser.Merge(dst, src) +} +func (m *GmailTeaser) XXX_Size() int { + return xxx_messageInfo_GmailTeaser.Size(m) +} +func (m *GmailTeaser) XXX_DiscardUnknown() { + xxx_messageInfo_GmailTeaser.DiscardUnknown(m) +} + +var xxx_messageInfo_GmailTeaser proto.InternalMessageInfo + +func (m *GmailTeaser) GetHeadline() *wrappers.StringValue { + if m != nil { + return m.Headline + } + return nil +} + +func (m *GmailTeaser) GetDescription() *wrappers.StringValue { + if m != nil { + return m.Description + } + return nil +} + +func (m *GmailTeaser) GetBusinessName() *wrappers.StringValue { + if m != nil { + return m.BusinessName + } + return nil +} + +func (m *GmailTeaser) GetLogoImage() *wrappers.StringValue { + if m != nil { + return m.LogoImage + } + return nil +} + +// Data for display call to action. The call to action is a piece of the ad +// that prompts the user to do something. Like clicking a link or making a phone +// call. +type DisplayCallToAction struct { + // Text for the display-call-to-action. + Text *wrappers.StringValue `protobuf:"bytes,1,opt,name=text,proto3" json:"text,omitempty"` + // Text color for the display-call-to-action in hexadecimal, e.g. #ffffff for + // white. + TextColor *wrappers.StringValue `protobuf:"bytes,2,opt,name=text_color,json=textColor,proto3" json:"text_color,omitempty"` + // Identifies the url collection in the ad.url_collections field. If not set + // the url defaults to final_url. + UrlCollectionId *wrappers.StringValue `protobuf:"bytes,3,opt,name=url_collection_id,json=urlCollectionId,proto3" json:"url_collection_id,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *DisplayCallToAction) Reset() { *m = DisplayCallToAction{} } +func (m *DisplayCallToAction) String() string { return proto.CompactTextString(m) } +func (*DisplayCallToAction) ProtoMessage() {} +func (*DisplayCallToAction) Descriptor() ([]byte, []int) { + return fileDescriptor_ad_type_infos_60e41938ab80fbca, []int{9} +} +func (m *DisplayCallToAction) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_DisplayCallToAction.Unmarshal(m, b) +} +func (m *DisplayCallToAction) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_DisplayCallToAction.Marshal(b, m, deterministic) +} +func (dst *DisplayCallToAction) XXX_Merge(src proto.Message) { + xxx_messageInfo_DisplayCallToAction.Merge(dst, src) +} +func (m *DisplayCallToAction) XXX_Size() int { + return xxx_messageInfo_DisplayCallToAction.Size(m) +} +func (m *DisplayCallToAction) XXX_DiscardUnknown() { + xxx_messageInfo_DisplayCallToAction.DiscardUnknown(m) +} + +var xxx_messageInfo_DisplayCallToAction proto.InternalMessageInfo + +func (m *DisplayCallToAction) GetText() *wrappers.StringValue { + if m != nil { + return m.Text + } + return nil +} + +func (m *DisplayCallToAction) GetTextColor() *wrappers.StringValue { + if m != nil { + return m.TextColor + } + return nil +} + +func (m *DisplayCallToAction) GetUrlCollectionId() *wrappers.StringValue { + if m != nil { + return m.UrlCollectionId + } + return nil +} + +// Product image specific data. +type ProductImage struct { + // The MediaFile resource name of the product image. Valid image types are + // GIF, JPEG and PNG. The minimum size is 300x300 pixels and the aspect ratio + // must be 1:1 (+-1%). + ProductImage *wrappers.StringValue `protobuf:"bytes,1,opt,name=product_image,json=productImage,proto3" json:"product_image,omitempty"` + // Description of the product. + Description *wrappers.StringValue `protobuf:"bytes,2,opt,name=description,proto3" json:"description,omitempty"` + // Display-call-to-action of the product image. + DisplayCallToAction *DisplayCallToAction `protobuf:"bytes,3,opt,name=display_call_to_action,json=displayCallToAction,proto3" json:"display_call_to_action,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ProductImage) Reset() { *m = ProductImage{} } +func (m *ProductImage) String() string { return proto.CompactTextString(m) } +func (*ProductImage) ProtoMessage() {} +func (*ProductImage) Descriptor() ([]byte, []int) { + return fileDescriptor_ad_type_infos_60e41938ab80fbca, []int{10} +} +func (m *ProductImage) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ProductImage.Unmarshal(m, b) +} +func (m *ProductImage) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ProductImage.Marshal(b, m, deterministic) +} +func (dst *ProductImage) XXX_Merge(src proto.Message) { + xxx_messageInfo_ProductImage.Merge(dst, src) +} +func (m *ProductImage) XXX_Size() int { + return xxx_messageInfo_ProductImage.Size(m) +} +func (m *ProductImage) XXX_DiscardUnknown() { + xxx_messageInfo_ProductImage.DiscardUnknown(m) +} + +var xxx_messageInfo_ProductImage proto.InternalMessageInfo + +func (m *ProductImage) GetProductImage() *wrappers.StringValue { + if m != nil { + return m.ProductImage + } + return nil +} + +func (m *ProductImage) GetDescription() *wrappers.StringValue { + if m != nil { + return m.Description + } + return nil +} + +func (m *ProductImage) GetDisplayCallToAction() *DisplayCallToAction { + if m != nil { + return m.DisplayCallToAction + } + return nil +} + +// Product video specific data. +type ProductVideo struct { + // The MediaFile resource name of a video which must be hosted on YouTube. + ProductVideo *wrappers.StringValue `protobuf:"bytes,1,opt,name=product_video,json=productVideo,proto3" json:"product_video,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ProductVideo) Reset() { *m = ProductVideo{} } +func (m *ProductVideo) String() string { return proto.CompactTextString(m) } +func (*ProductVideo) ProtoMessage() {} +func (*ProductVideo) Descriptor() ([]byte, []int) { + return fileDescriptor_ad_type_infos_60e41938ab80fbca, []int{11} +} +func (m *ProductVideo) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ProductVideo.Unmarshal(m, b) +} +func (m *ProductVideo) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ProductVideo.Marshal(b, m, deterministic) +} +func (dst *ProductVideo) XXX_Merge(src proto.Message) { + xxx_messageInfo_ProductVideo.Merge(dst, src) +} +func (m *ProductVideo) XXX_Size() int { + return xxx_messageInfo_ProductVideo.Size(m) +} +func (m *ProductVideo) XXX_DiscardUnknown() { + xxx_messageInfo_ProductVideo.DiscardUnknown(m) +} + +var xxx_messageInfo_ProductVideo proto.InternalMessageInfo + +func (m *ProductVideo) GetProductVideo() *wrappers.StringValue { + if m != nil { + return m.ProductVideo + } + return nil +} + +// An image ad. +type ImageAdInfo struct { + // Width in pixels of the full size image. + PixelWidth *wrappers.Int64Value `protobuf:"bytes,4,opt,name=pixel_width,json=pixelWidth,proto3" json:"pixel_width,omitempty"` + // Height in pixels of the full size image. + PixelHeight *wrappers.Int64Value `protobuf:"bytes,5,opt,name=pixel_height,json=pixelHeight,proto3" json:"pixel_height,omitempty"` + // URL of the full size image. + ImageUrl *wrappers.StringValue `protobuf:"bytes,6,opt,name=image_url,json=imageUrl,proto3" json:"image_url,omitempty"` + // Width in pixels of the preview size image. + PreviewPixelWidth *wrappers.Int64Value `protobuf:"bytes,7,opt,name=preview_pixel_width,json=previewPixelWidth,proto3" json:"preview_pixel_width,omitempty"` + // Height in pixels of the preview size image. + PreviewPixelHeight *wrappers.Int64Value `protobuf:"bytes,8,opt,name=preview_pixel_height,json=previewPixelHeight,proto3" json:"preview_pixel_height,omitempty"` + // URL of the preview size image. + PreviewImageUrl *wrappers.StringValue `protobuf:"bytes,9,opt,name=preview_image_url,json=previewImageUrl,proto3" json:"preview_image_url,omitempty"` + // The mime type of the image. + MimeType enums.MimeTypeEnum_MimeType `protobuf:"varint,10,opt,name=mime_type,json=mimeType,proto3,enum=google.ads.googleads.v1.enums.MimeTypeEnum_MimeType" json:"mime_type,omitempty"` + // The name of the image. If the image was created from a MediaFile, this is + // the MediaFile's name. If the image was created from bytes, this is empty. + Name *wrappers.StringValue `protobuf:"bytes,11,opt,name=name,proto3" json:"name,omitempty"` + // The image to create the ImageAd from. This can be specified in one of + // two ways. + // 1. An existing MediaFile resource. + // 2. The raw image data as bytes. + // + // Types that are valid to be assigned to Image: + // *ImageAdInfo_MediaFile + // *ImageAdInfo_Data + // *ImageAdInfo_AdIdToCopyImageFrom + Image isImageAdInfo_Image `protobuf_oneof:"image"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ImageAdInfo) Reset() { *m = ImageAdInfo{} } +func (m *ImageAdInfo) String() string { return proto.CompactTextString(m) } +func (*ImageAdInfo) ProtoMessage() {} +func (*ImageAdInfo) Descriptor() ([]byte, []int) { + return fileDescriptor_ad_type_infos_60e41938ab80fbca, []int{12} +} +func (m *ImageAdInfo) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ImageAdInfo.Unmarshal(m, b) +} +func (m *ImageAdInfo) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ImageAdInfo.Marshal(b, m, deterministic) +} +func (dst *ImageAdInfo) XXX_Merge(src proto.Message) { + xxx_messageInfo_ImageAdInfo.Merge(dst, src) +} +func (m *ImageAdInfo) XXX_Size() int { + return xxx_messageInfo_ImageAdInfo.Size(m) +} +func (m *ImageAdInfo) XXX_DiscardUnknown() { + xxx_messageInfo_ImageAdInfo.DiscardUnknown(m) +} + +var xxx_messageInfo_ImageAdInfo proto.InternalMessageInfo + +func (m *ImageAdInfo) GetPixelWidth() *wrappers.Int64Value { + if m != nil { + return m.PixelWidth + } + return nil +} + +func (m *ImageAdInfo) GetPixelHeight() *wrappers.Int64Value { + if m != nil { + return m.PixelHeight + } + return nil +} + +func (m *ImageAdInfo) GetImageUrl() *wrappers.StringValue { + if m != nil { + return m.ImageUrl + } + return nil +} + +func (m *ImageAdInfo) GetPreviewPixelWidth() *wrappers.Int64Value { + if m != nil { + return m.PreviewPixelWidth + } + return nil +} + +func (m *ImageAdInfo) GetPreviewPixelHeight() *wrappers.Int64Value { + if m != nil { + return m.PreviewPixelHeight + } + return nil +} + +func (m *ImageAdInfo) GetPreviewImageUrl() *wrappers.StringValue { + if m != nil { + return m.PreviewImageUrl + } + return nil +} + +func (m *ImageAdInfo) GetMimeType() enums.MimeTypeEnum_MimeType { + if m != nil { + return m.MimeType + } + return enums.MimeTypeEnum_UNSPECIFIED +} + +func (m *ImageAdInfo) GetName() *wrappers.StringValue { + if m != nil { + return m.Name + } + return nil +} + +type isImageAdInfo_Image interface { + isImageAdInfo_Image() +} + +type ImageAdInfo_MediaFile struct { + MediaFile *wrappers.StringValue `protobuf:"bytes,1,opt,name=media_file,json=mediaFile,proto3,oneof"` +} + +type ImageAdInfo_Data struct { + Data *wrappers.BytesValue `protobuf:"bytes,2,opt,name=data,proto3,oneof"` +} + +type ImageAdInfo_AdIdToCopyImageFrom struct { + AdIdToCopyImageFrom *wrappers.Int64Value `protobuf:"bytes,3,opt,name=ad_id_to_copy_image_from,json=adIdToCopyImageFrom,proto3,oneof"` +} + +func (*ImageAdInfo_MediaFile) isImageAdInfo_Image() {} + +func (*ImageAdInfo_Data) isImageAdInfo_Image() {} + +func (*ImageAdInfo_AdIdToCopyImageFrom) isImageAdInfo_Image() {} + +func (m *ImageAdInfo) GetImage() isImageAdInfo_Image { + if m != nil { + return m.Image + } + return nil +} + +func (m *ImageAdInfo) GetMediaFile() *wrappers.StringValue { + if x, ok := m.GetImage().(*ImageAdInfo_MediaFile); ok { + return x.MediaFile + } + return nil +} + +func (m *ImageAdInfo) GetData() *wrappers.BytesValue { + if x, ok := m.GetImage().(*ImageAdInfo_Data); ok { + return x.Data + } + return nil +} + +func (m *ImageAdInfo) GetAdIdToCopyImageFrom() *wrappers.Int64Value { + if x, ok := m.GetImage().(*ImageAdInfo_AdIdToCopyImageFrom); ok { + return x.AdIdToCopyImageFrom + } + return nil +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*ImageAdInfo) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _ImageAdInfo_OneofMarshaler, _ImageAdInfo_OneofUnmarshaler, _ImageAdInfo_OneofSizer, []interface{}{ + (*ImageAdInfo_MediaFile)(nil), + (*ImageAdInfo_Data)(nil), + (*ImageAdInfo_AdIdToCopyImageFrom)(nil), + } +} + +func _ImageAdInfo_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*ImageAdInfo) + // image + switch x := m.Image.(type) { + case *ImageAdInfo_MediaFile: + b.EncodeVarint(1<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.MediaFile); err != nil { + return err + } + case *ImageAdInfo_Data: + b.EncodeVarint(2<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.Data); err != nil { + return err + } + case *ImageAdInfo_AdIdToCopyImageFrom: + b.EncodeVarint(3<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.AdIdToCopyImageFrom); err != nil { + return err + } + case nil: + default: + return fmt.Errorf("ImageAdInfo.Image has unexpected type %T", x) + } + return nil +} + +func _ImageAdInfo_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*ImageAdInfo) + switch tag { + case 1: // image.media_file + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(wrappers.StringValue) + err := b.DecodeMessage(msg) + m.Image = &ImageAdInfo_MediaFile{msg} + return true, err + case 2: // image.data + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(wrappers.BytesValue) + err := b.DecodeMessage(msg) + m.Image = &ImageAdInfo_Data{msg} + return true, err + case 3: // image.ad_id_to_copy_image_from + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(wrappers.Int64Value) + err := b.DecodeMessage(msg) + m.Image = &ImageAdInfo_AdIdToCopyImageFrom{msg} + return true, err + default: + return false, nil + } +} + +func _ImageAdInfo_OneofSizer(msg proto.Message) (n int) { + m := msg.(*ImageAdInfo) + // image + switch x := m.Image.(type) { + case *ImageAdInfo_MediaFile: + s := proto.Size(x.MediaFile) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *ImageAdInfo_Data: + s := proto.Size(x.Data) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *ImageAdInfo_AdIdToCopyImageFrom: + s := proto.Size(x.AdIdToCopyImageFrom) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +// Representation of video bumper in-stream ad format (very short in-stream +// non-skippable video ad). +type VideoBumperInStreamAdInfo struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *VideoBumperInStreamAdInfo) Reset() { *m = VideoBumperInStreamAdInfo{} } +func (m *VideoBumperInStreamAdInfo) String() string { return proto.CompactTextString(m) } +func (*VideoBumperInStreamAdInfo) ProtoMessage() {} +func (*VideoBumperInStreamAdInfo) Descriptor() ([]byte, []int) { + return fileDescriptor_ad_type_infos_60e41938ab80fbca, []int{13} +} +func (m *VideoBumperInStreamAdInfo) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_VideoBumperInStreamAdInfo.Unmarshal(m, b) +} +func (m *VideoBumperInStreamAdInfo) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_VideoBumperInStreamAdInfo.Marshal(b, m, deterministic) +} +func (dst *VideoBumperInStreamAdInfo) XXX_Merge(src proto.Message) { + xxx_messageInfo_VideoBumperInStreamAdInfo.Merge(dst, src) +} +func (m *VideoBumperInStreamAdInfo) XXX_Size() int { + return xxx_messageInfo_VideoBumperInStreamAdInfo.Size(m) +} +func (m *VideoBumperInStreamAdInfo) XXX_DiscardUnknown() { + xxx_messageInfo_VideoBumperInStreamAdInfo.DiscardUnknown(m) +} + +var xxx_messageInfo_VideoBumperInStreamAdInfo proto.InternalMessageInfo + +// Representation of video non-skippable in-stream ad format (15 second +// in-stream non-skippable video ad). +type VideoNonSkippableInStreamAdInfo struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *VideoNonSkippableInStreamAdInfo) Reset() { *m = VideoNonSkippableInStreamAdInfo{} } +func (m *VideoNonSkippableInStreamAdInfo) String() string { return proto.CompactTextString(m) } +func (*VideoNonSkippableInStreamAdInfo) ProtoMessage() {} +func (*VideoNonSkippableInStreamAdInfo) Descriptor() ([]byte, []int) { + return fileDescriptor_ad_type_infos_60e41938ab80fbca, []int{14} +} +func (m *VideoNonSkippableInStreamAdInfo) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_VideoNonSkippableInStreamAdInfo.Unmarshal(m, b) +} +func (m *VideoNonSkippableInStreamAdInfo) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_VideoNonSkippableInStreamAdInfo.Marshal(b, m, deterministic) +} +func (dst *VideoNonSkippableInStreamAdInfo) XXX_Merge(src proto.Message) { + xxx_messageInfo_VideoNonSkippableInStreamAdInfo.Merge(dst, src) +} +func (m *VideoNonSkippableInStreamAdInfo) XXX_Size() int { + return xxx_messageInfo_VideoNonSkippableInStreamAdInfo.Size(m) +} +func (m *VideoNonSkippableInStreamAdInfo) XXX_DiscardUnknown() { + xxx_messageInfo_VideoNonSkippableInStreamAdInfo.DiscardUnknown(m) +} + +var xxx_messageInfo_VideoNonSkippableInStreamAdInfo proto.InternalMessageInfo + +// Representation of video TrueView in-stream ad format (ad shown during video +// playback, often at beginning, which displays a skip button a few seconds into +// the video). +type VideoTrueViewInStreamAdInfo struct { + // Label on the CTA (call-to-action) button taking the user to the video ad's + // final URL. + // Required for TrueView for action campaigns, optional otherwise. + ActionButtonLabel *wrappers.StringValue `protobuf:"bytes,1,opt,name=action_button_label,json=actionButtonLabel,proto3" json:"action_button_label,omitempty"` + // Additional text displayed with the CTA (call-to-action) button to give + // context and encourage clicking on the button. + ActionHeadline *wrappers.StringValue `protobuf:"bytes,2,opt,name=action_headline,json=actionHeadline,proto3" json:"action_headline,omitempty"` + // The MediaFile resource name of the companion banner used with the ad. + CompanionBanner *wrappers.StringValue `protobuf:"bytes,3,opt,name=companion_banner,json=companionBanner,proto3" json:"companion_banner,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *VideoTrueViewInStreamAdInfo) Reset() { *m = VideoTrueViewInStreamAdInfo{} } +func (m *VideoTrueViewInStreamAdInfo) String() string { return proto.CompactTextString(m) } +func (*VideoTrueViewInStreamAdInfo) ProtoMessage() {} +func (*VideoTrueViewInStreamAdInfo) Descriptor() ([]byte, []int) { + return fileDescriptor_ad_type_infos_60e41938ab80fbca, []int{15} +} +func (m *VideoTrueViewInStreamAdInfo) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_VideoTrueViewInStreamAdInfo.Unmarshal(m, b) +} +func (m *VideoTrueViewInStreamAdInfo) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_VideoTrueViewInStreamAdInfo.Marshal(b, m, deterministic) +} +func (dst *VideoTrueViewInStreamAdInfo) XXX_Merge(src proto.Message) { + xxx_messageInfo_VideoTrueViewInStreamAdInfo.Merge(dst, src) +} +func (m *VideoTrueViewInStreamAdInfo) XXX_Size() int { + return xxx_messageInfo_VideoTrueViewInStreamAdInfo.Size(m) +} +func (m *VideoTrueViewInStreamAdInfo) XXX_DiscardUnknown() { + xxx_messageInfo_VideoTrueViewInStreamAdInfo.DiscardUnknown(m) +} + +var xxx_messageInfo_VideoTrueViewInStreamAdInfo proto.InternalMessageInfo + +func (m *VideoTrueViewInStreamAdInfo) GetActionButtonLabel() *wrappers.StringValue { + if m != nil { + return m.ActionButtonLabel + } + return nil +} + +func (m *VideoTrueViewInStreamAdInfo) GetActionHeadline() *wrappers.StringValue { + if m != nil { + return m.ActionHeadline + } + return nil +} + +func (m *VideoTrueViewInStreamAdInfo) GetCompanionBanner() *wrappers.StringValue { + if m != nil { + return m.CompanionBanner + } + return nil +} + +// Representation of video out-stream ad format (ad shown alongside a feed +// with automatic playback, without sound). +type VideoOutstreamAdInfo struct { + // The headline of the ad. + Headline *wrappers.StringValue `protobuf:"bytes,1,opt,name=headline,proto3" json:"headline,omitempty"` + // The description line. + Description *wrappers.StringValue `protobuf:"bytes,2,opt,name=description,proto3" json:"description,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *VideoOutstreamAdInfo) Reset() { *m = VideoOutstreamAdInfo{} } +func (m *VideoOutstreamAdInfo) String() string { return proto.CompactTextString(m) } +func (*VideoOutstreamAdInfo) ProtoMessage() {} +func (*VideoOutstreamAdInfo) Descriptor() ([]byte, []int) { + return fileDescriptor_ad_type_infos_60e41938ab80fbca, []int{16} +} +func (m *VideoOutstreamAdInfo) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_VideoOutstreamAdInfo.Unmarshal(m, b) +} +func (m *VideoOutstreamAdInfo) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_VideoOutstreamAdInfo.Marshal(b, m, deterministic) +} +func (dst *VideoOutstreamAdInfo) XXX_Merge(src proto.Message) { + xxx_messageInfo_VideoOutstreamAdInfo.Merge(dst, src) +} +func (m *VideoOutstreamAdInfo) XXX_Size() int { + return xxx_messageInfo_VideoOutstreamAdInfo.Size(m) +} +func (m *VideoOutstreamAdInfo) XXX_DiscardUnknown() { + xxx_messageInfo_VideoOutstreamAdInfo.DiscardUnknown(m) +} + +var xxx_messageInfo_VideoOutstreamAdInfo proto.InternalMessageInfo + +func (m *VideoOutstreamAdInfo) GetHeadline() *wrappers.StringValue { + if m != nil { + return m.Headline + } + return nil +} + +func (m *VideoOutstreamAdInfo) GetDescription() *wrappers.StringValue { + if m != nil { + return m.Description + } + return nil +} + +// A video ad. +type VideoAdInfo struct { + // The MediaFile resource to use for the video. + MediaFile *wrappers.StringValue `protobuf:"bytes,1,opt,name=media_file,json=mediaFile,proto3" json:"media_file,omitempty"` + // Format-specific schema for the different video formats. + // + // Types that are valid to be assigned to Format: + // *VideoAdInfo_InStream + // *VideoAdInfo_Bumper + // *VideoAdInfo_OutStream + // *VideoAdInfo_NonSkippable + Format isVideoAdInfo_Format `protobuf_oneof:"format"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *VideoAdInfo) Reset() { *m = VideoAdInfo{} } +func (m *VideoAdInfo) String() string { return proto.CompactTextString(m) } +func (*VideoAdInfo) ProtoMessage() {} +func (*VideoAdInfo) Descriptor() ([]byte, []int) { + return fileDescriptor_ad_type_infos_60e41938ab80fbca, []int{17} +} +func (m *VideoAdInfo) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_VideoAdInfo.Unmarshal(m, b) +} +func (m *VideoAdInfo) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_VideoAdInfo.Marshal(b, m, deterministic) +} +func (dst *VideoAdInfo) XXX_Merge(src proto.Message) { + xxx_messageInfo_VideoAdInfo.Merge(dst, src) +} +func (m *VideoAdInfo) XXX_Size() int { + return xxx_messageInfo_VideoAdInfo.Size(m) +} +func (m *VideoAdInfo) XXX_DiscardUnknown() { + xxx_messageInfo_VideoAdInfo.DiscardUnknown(m) +} + +var xxx_messageInfo_VideoAdInfo proto.InternalMessageInfo + +func (m *VideoAdInfo) GetMediaFile() *wrappers.StringValue { + if m != nil { + return m.MediaFile + } + return nil +} + +type isVideoAdInfo_Format interface { + isVideoAdInfo_Format() +} + +type VideoAdInfo_InStream struct { + InStream *VideoTrueViewInStreamAdInfo `protobuf:"bytes,2,opt,name=in_stream,json=inStream,proto3,oneof"` +} + +type VideoAdInfo_Bumper struct { + Bumper *VideoBumperInStreamAdInfo `protobuf:"bytes,3,opt,name=bumper,proto3,oneof"` +} + +type VideoAdInfo_OutStream struct { + OutStream *VideoOutstreamAdInfo `protobuf:"bytes,4,opt,name=out_stream,json=outStream,proto3,oneof"` +} + +type VideoAdInfo_NonSkippable struct { + NonSkippable *VideoNonSkippableInStreamAdInfo `protobuf:"bytes,5,opt,name=non_skippable,json=nonSkippable,proto3,oneof"` +} + +func (*VideoAdInfo_InStream) isVideoAdInfo_Format() {} + +func (*VideoAdInfo_Bumper) isVideoAdInfo_Format() {} + +func (*VideoAdInfo_OutStream) isVideoAdInfo_Format() {} + +func (*VideoAdInfo_NonSkippable) isVideoAdInfo_Format() {} + +func (m *VideoAdInfo) GetFormat() isVideoAdInfo_Format { + if m != nil { + return m.Format + } + return nil +} + +func (m *VideoAdInfo) GetInStream() *VideoTrueViewInStreamAdInfo { + if x, ok := m.GetFormat().(*VideoAdInfo_InStream); ok { + return x.InStream + } + return nil +} + +func (m *VideoAdInfo) GetBumper() *VideoBumperInStreamAdInfo { + if x, ok := m.GetFormat().(*VideoAdInfo_Bumper); ok { + return x.Bumper + } + return nil +} + +func (m *VideoAdInfo) GetOutStream() *VideoOutstreamAdInfo { + if x, ok := m.GetFormat().(*VideoAdInfo_OutStream); ok { + return x.OutStream + } + return nil +} + +func (m *VideoAdInfo) GetNonSkippable() *VideoNonSkippableInStreamAdInfo { + if x, ok := m.GetFormat().(*VideoAdInfo_NonSkippable); ok { + return x.NonSkippable + } + return nil +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*VideoAdInfo) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _VideoAdInfo_OneofMarshaler, _VideoAdInfo_OneofUnmarshaler, _VideoAdInfo_OneofSizer, []interface{}{ + (*VideoAdInfo_InStream)(nil), + (*VideoAdInfo_Bumper)(nil), + (*VideoAdInfo_OutStream)(nil), + (*VideoAdInfo_NonSkippable)(nil), + } +} + +func _VideoAdInfo_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*VideoAdInfo) + // format + switch x := m.Format.(type) { + case *VideoAdInfo_InStream: + b.EncodeVarint(2<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.InStream); err != nil { + return err + } + case *VideoAdInfo_Bumper: + b.EncodeVarint(3<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.Bumper); err != nil { + return err + } + case *VideoAdInfo_OutStream: + b.EncodeVarint(4<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.OutStream); err != nil { + return err + } + case *VideoAdInfo_NonSkippable: + b.EncodeVarint(5<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.NonSkippable); err != nil { + return err + } + case nil: + default: + return fmt.Errorf("VideoAdInfo.Format has unexpected type %T", x) + } + return nil +} + +func _VideoAdInfo_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*VideoAdInfo) + switch tag { + case 2: // format.in_stream + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(VideoTrueViewInStreamAdInfo) + err := b.DecodeMessage(msg) + m.Format = &VideoAdInfo_InStream{msg} + return true, err + case 3: // format.bumper + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(VideoBumperInStreamAdInfo) + err := b.DecodeMessage(msg) + m.Format = &VideoAdInfo_Bumper{msg} + return true, err + case 4: // format.out_stream + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(VideoOutstreamAdInfo) + err := b.DecodeMessage(msg) + m.Format = &VideoAdInfo_OutStream{msg} + return true, err + case 5: // format.non_skippable + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(VideoNonSkippableInStreamAdInfo) + err := b.DecodeMessage(msg) + m.Format = &VideoAdInfo_NonSkippable{msg} + return true, err + default: + return false, nil + } +} + +func _VideoAdInfo_OneofSizer(msg proto.Message) (n int) { + m := msg.(*VideoAdInfo) + // format + switch x := m.Format.(type) { + case *VideoAdInfo_InStream: + s := proto.Size(x.InStream) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *VideoAdInfo_Bumper: + s := proto.Size(x.Bumper) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *VideoAdInfo_OutStream: + s := proto.Size(x.OutStream) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *VideoAdInfo_NonSkippable: + s := proto.Size(x.NonSkippable) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +// A responsive search ad. +// +// Responsive search ads let you create an ad that adapts to show more text, and +// more relevant messages, to your customers. Enter multiple headlines and +// descriptions when creating a responsive search ad, and over time, Google Ads +// will automatically test different combinations and learn which combinations +// perform best. By adapting your ad's content to more closely match potential +// customers' search terms, responsive search ads may improve your campaign's +// performance. +// +// More information at https://support.google.com/google-ads/answer/7684791 +type ResponsiveSearchAdInfo struct { + // List of text assets for headlines. When the ad serves the headlines will + // be selected from this list. + Headlines []*AdTextAsset `protobuf:"bytes,1,rep,name=headlines,proto3" json:"headlines,omitempty"` + // List of text assets for descriptions. When the ad serves the descriptions + // will be selected from this list. + Descriptions []*AdTextAsset `protobuf:"bytes,2,rep,name=descriptions,proto3" json:"descriptions,omitempty"` + // First part of text that may appear appended to the url displayed in the ad. + Path1 *wrappers.StringValue `protobuf:"bytes,3,opt,name=path1,proto3" json:"path1,omitempty"` + // Second part of text that may appear appended to the url displayed in the + // ad. This field can only be set when path1 is also set. + Path2 *wrappers.StringValue `protobuf:"bytes,4,opt,name=path2,proto3" json:"path2,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ResponsiveSearchAdInfo) Reset() { *m = ResponsiveSearchAdInfo{} } +func (m *ResponsiveSearchAdInfo) String() string { return proto.CompactTextString(m) } +func (*ResponsiveSearchAdInfo) ProtoMessage() {} +func (*ResponsiveSearchAdInfo) Descriptor() ([]byte, []int) { + return fileDescriptor_ad_type_infos_60e41938ab80fbca, []int{18} +} +func (m *ResponsiveSearchAdInfo) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ResponsiveSearchAdInfo.Unmarshal(m, b) +} +func (m *ResponsiveSearchAdInfo) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ResponsiveSearchAdInfo.Marshal(b, m, deterministic) +} +func (dst *ResponsiveSearchAdInfo) XXX_Merge(src proto.Message) { + xxx_messageInfo_ResponsiveSearchAdInfo.Merge(dst, src) +} +func (m *ResponsiveSearchAdInfo) XXX_Size() int { + return xxx_messageInfo_ResponsiveSearchAdInfo.Size(m) +} +func (m *ResponsiveSearchAdInfo) XXX_DiscardUnknown() { + xxx_messageInfo_ResponsiveSearchAdInfo.DiscardUnknown(m) +} + +var xxx_messageInfo_ResponsiveSearchAdInfo proto.InternalMessageInfo + +func (m *ResponsiveSearchAdInfo) GetHeadlines() []*AdTextAsset { + if m != nil { + return m.Headlines + } + return nil +} + +func (m *ResponsiveSearchAdInfo) GetDescriptions() []*AdTextAsset { + if m != nil { + return m.Descriptions + } + return nil +} + +func (m *ResponsiveSearchAdInfo) GetPath1() *wrappers.StringValue { + if m != nil { + return m.Path1 + } + return nil +} + +func (m *ResponsiveSearchAdInfo) GetPath2() *wrappers.StringValue { + if m != nil { + return m.Path2 + } + return nil +} + +// A legacy responsive display ad. Ads of this type are labeled 'Responsive ads' +// in the Google Ads UI. +type LegacyResponsiveDisplayAdInfo struct { + // The short version of the ad's headline. + ShortHeadline *wrappers.StringValue `protobuf:"bytes,1,opt,name=short_headline,json=shortHeadline,proto3" json:"short_headline,omitempty"` + // The long version of the ad's headline. + LongHeadline *wrappers.StringValue `protobuf:"bytes,2,opt,name=long_headline,json=longHeadline,proto3" json:"long_headline,omitempty"` + // The description of the ad. + Description *wrappers.StringValue `protobuf:"bytes,3,opt,name=description,proto3" json:"description,omitempty"` + // The business name in the ad. + BusinessName *wrappers.StringValue `protobuf:"bytes,4,opt,name=business_name,json=businessName,proto3" json:"business_name,omitempty"` + // Advertiser's consent to allow flexible color. When true, the ad may be + // served with different color if necessary. When false, the ad will be served + // with the specified colors or a neutral color. + // The default value is true. + // Must be true if main_color and accent_color are not set. + AllowFlexibleColor *wrappers.BoolValue `protobuf:"bytes,5,opt,name=allow_flexible_color,json=allowFlexibleColor,proto3" json:"allow_flexible_color,omitempty"` + // The accent color of the ad in hexadecimal, e.g. #ffffff for white. + // If one of main_color and accent_color is set, the other is required as + // well. + AccentColor *wrappers.StringValue `protobuf:"bytes,6,opt,name=accent_color,json=accentColor,proto3" json:"accent_color,omitempty"` + // The main color of the ad in hexadecimal, e.g. #ffffff for white. + // If one of main_color and accent_color is set, the other is required as + // well. + MainColor *wrappers.StringValue `protobuf:"bytes,7,opt,name=main_color,json=mainColor,proto3" json:"main_color,omitempty"` + // The call-to-action text for the ad. + CallToActionText *wrappers.StringValue `protobuf:"bytes,8,opt,name=call_to_action_text,json=callToActionText,proto3" json:"call_to_action_text,omitempty"` + // The MediaFile resource name of the logo image used in the ad. + LogoImage *wrappers.StringValue `protobuf:"bytes,9,opt,name=logo_image,json=logoImage,proto3" json:"logo_image,omitempty"` + // The MediaFile resource name of the square logo image used in the ad. + SquareLogoImage *wrappers.StringValue `protobuf:"bytes,10,opt,name=square_logo_image,json=squareLogoImage,proto3" json:"square_logo_image,omitempty"` + // The MediaFile resource name of the marketing image used in the ad. + MarketingImage *wrappers.StringValue `protobuf:"bytes,11,opt,name=marketing_image,json=marketingImage,proto3" json:"marketing_image,omitempty"` + // The MediaFile resource name of the square marketing image used in the ad. + SquareMarketingImage *wrappers.StringValue `protobuf:"bytes,12,opt,name=square_marketing_image,json=squareMarketingImage,proto3" json:"square_marketing_image,omitempty"` + // Specifies which format the ad will be served in. Default is ALL_FORMATS. + FormatSetting enums.DisplayAdFormatSettingEnum_DisplayAdFormatSetting `protobuf:"varint,13,opt,name=format_setting,json=formatSetting,proto3,enum=google.ads.googleads.v1.enums.DisplayAdFormatSettingEnum_DisplayAdFormatSetting" json:"format_setting,omitempty"` + // Prefix before price. E.g. 'as low as'. + PricePrefix *wrappers.StringValue `protobuf:"bytes,14,opt,name=price_prefix,json=pricePrefix,proto3" json:"price_prefix,omitempty"` + // Promotion text used for dyanmic formats of responsive ads. For example + // 'Free two-day shipping'. + PromoText *wrappers.StringValue `protobuf:"bytes,15,opt,name=promo_text,json=promoText,proto3" json:"promo_text,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *LegacyResponsiveDisplayAdInfo) Reset() { *m = LegacyResponsiveDisplayAdInfo{} } +func (m *LegacyResponsiveDisplayAdInfo) String() string { return proto.CompactTextString(m) } +func (*LegacyResponsiveDisplayAdInfo) ProtoMessage() {} +func (*LegacyResponsiveDisplayAdInfo) Descriptor() ([]byte, []int) { + return fileDescriptor_ad_type_infos_60e41938ab80fbca, []int{19} +} +func (m *LegacyResponsiveDisplayAdInfo) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_LegacyResponsiveDisplayAdInfo.Unmarshal(m, b) +} +func (m *LegacyResponsiveDisplayAdInfo) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_LegacyResponsiveDisplayAdInfo.Marshal(b, m, deterministic) +} +func (dst *LegacyResponsiveDisplayAdInfo) XXX_Merge(src proto.Message) { + xxx_messageInfo_LegacyResponsiveDisplayAdInfo.Merge(dst, src) +} +func (m *LegacyResponsiveDisplayAdInfo) XXX_Size() int { + return xxx_messageInfo_LegacyResponsiveDisplayAdInfo.Size(m) +} +func (m *LegacyResponsiveDisplayAdInfo) XXX_DiscardUnknown() { + xxx_messageInfo_LegacyResponsiveDisplayAdInfo.DiscardUnknown(m) +} + +var xxx_messageInfo_LegacyResponsiveDisplayAdInfo proto.InternalMessageInfo + +func (m *LegacyResponsiveDisplayAdInfo) GetShortHeadline() *wrappers.StringValue { + if m != nil { + return m.ShortHeadline + } + return nil +} + +func (m *LegacyResponsiveDisplayAdInfo) GetLongHeadline() *wrappers.StringValue { + if m != nil { + return m.LongHeadline + } + return nil +} + +func (m *LegacyResponsiveDisplayAdInfo) GetDescription() *wrappers.StringValue { + if m != nil { + return m.Description + } + return nil +} + +func (m *LegacyResponsiveDisplayAdInfo) GetBusinessName() *wrappers.StringValue { + if m != nil { + return m.BusinessName + } + return nil +} + +func (m *LegacyResponsiveDisplayAdInfo) GetAllowFlexibleColor() *wrappers.BoolValue { + if m != nil { + return m.AllowFlexibleColor + } + return nil +} + +func (m *LegacyResponsiveDisplayAdInfo) GetAccentColor() *wrappers.StringValue { + if m != nil { + return m.AccentColor + } + return nil +} + +func (m *LegacyResponsiveDisplayAdInfo) GetMainColor() *wrappers.StringValue { + if m != nil { + return m.MainColor + } + return nil +} + +func (m *LegacyResponsiveDisplayAdInfo) GetCallToActionText() *wrappers.StringValue { + if m != nil { + return m.CallToActionText + } + return nil +} + +func (m *LegacyResponsiveDisplayAdInfo) GetLogoImage() *wrappers.StringValue { + if m != nil { + return m.LogoImage + } + return nil +} + +func (m *LegacyResponsiveDisplayAdInfo) GetSquareLogoImage() *wrappers.StringValue { + if m != nil { + return m.SquareLogoImage + } + return nil +} + +func (m *LegacyResponsiveDisplayAdInfo) GetMarketingImage() *wrappers.StringValue { + if m != nil { + return m.MarketingImage + } + return nil +} + +func (m *LegacyResponsiveDisplayAdInfo) GetSquareMarketingImage() *wrappers.StringValue { + if m != nil { + return m.SquareMarketingImage + } + return nil +} + +func (m *LegacyResponsiveDisplayAdInfo) GetFormatSetting() enums.DisplayAdFormatSettingEnum_DisplayAdFormatSetting { + if m != nil { + return m.FormatSetting + } + return enums.DisplayAdFormatSettingEnum_UNSPECIFIED +} + +func (m *LegacyResponsiveDisplayAdInfo) GetPricePrefix() *wrappers.StringValue { + if m != nil { + return m.PricePrefix + } + return nil +} + +func (m *LegacyResponsiveDisplayAdInfo) GetPromoText() *wrappers.StringValue { + if m != nil { + return m.PromoText + } + return nil +} + +// An app ad. +type AppAdInfo struct { + // An optional text asset that, if specified, must always be displayed when + // the ad is served. + MandatoryAdText *AdTextAsset `protobuf:"bytes,1,opt,name=mandatory_ad_text,json=mandatoryAdText,proto3" json:"mandatory_ad_text,omitempty"` + // List of text assets for headlines. When the ad serves the headlines will + // be selected from this list. + Headlines []*AdTextAsset `protobuf:"bytes,2,rep,name=headlines,proto3" json:"headlines,omitempty"` + // List of text assets for descriptions. When the ad serves the descriptions + // will be selected from this list. + Descriptions []*AdTextAsset `protobuf:"bytes,3,rep,name=descriptions,proto3" json:"descriptions,omitempty"` + // List of image assets that may be displayed with the ad. + Images []*AdImageAsset `protobuf:"bytes,4,rep,name=images,proto3" json:"images,omitempty"` + // List of YouTube video assets that may be displayed with the ad. + YoutubeVideos []*AdVideoAsset `protobuf:"bytes,5,rep,name=youtube_videos,json=youtubeVideos,proto3" json:"youtube_videos,omitempty"` + // List of media bundle assets that may be used with the ad. + Html5MediaBundles []*AdMediaBundleAsset `protobuf:"bytes,6,rep,name=html5_media_bundles,json=html5MediaBundles,proto3" json:"html5_media_bundles,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *AppAdInfo) Reset() { *m = AppAdInfo{} } +func (m *AppAdInfo) String() string { return proto.CompactTextString(m) } +func (*AppAdInfo) ProtoMessage() {} +func (*AppAdInfo) Descriptor() ([]byte, []int) { + return fileDescriptor_ad_type_infos_60e41938ab80fbca, []int{20} +} +func (m *AppAdInfo) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_AppAdInfo.Unmarshal(m, b) +} +func (m *AppAdInfo) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_AppAdInfo.Marshal(b, m, deterministic) +} +func (dst *AppAdInfo) XXX_Merge(src proto.Message) { + xxx_messageInfo_AppAdInfo.Merge(dst, src) +} +func (m *AppAdInfo) XXX_Size() int { + return xxx_messageInfo_AppAdInfo.Size(m) +} +func (m *AppAdInfo) XXX_DiscardUnknown() { + xxx_messageInfo_AppAdInfo.DiscardUnknown(m) +} + +var xxx_messageInfo_AppAdInfo proto.InternalMessageInfo + +func (m *AppAdInfo) GetMandatoryAdText() *AdTextAsset { + if m != nil { + return m.MandatoryAdText + } + return nil +} + +func (m *AppAdInfo) GetHeadlines() []*AdTextAsset { + if m != nil { + return m.Headlines + } + return nil +} + +func (m *AppAdInfo) GetDescriptions() []*AdTextAsset { + if m != nil { + return m.Descriptions + } + return nil +} + +func (m *AppAdInfo) GetImages() []*AdImageAsset { + if m != nil { + return m.Images + } + return nil +} + +func (m *AppAdInfo) GetYoutubeVideos() []*AdVideoAsset { + if m != nil { + return m.YoutubeVideos + } + return nil +} + +func (m *AppAdInfo) GetHtml5MediaBundles() []*AdMediaBundleAsset { + if m != nil { + return m.Html5MediaBundles + } + return nil +} + +// App engagement ads allow you to write text encouraging a specific action in +// the app, like checking in, making a purchase, or booking a flight. +// They allow you to send users to a specific part of your app where they can +// find what they're looking for easier and faster. +type AppEngagementAdInfo struct { + // List of text assets for headlines. When the ad serves the headlines will + // be selected from this list. + Headlines []*AdTextAsset `protobuf:"bytes,1,rep,name=headlines,proto3" json:"headlines,omitempty"` + // List of text assets for descriptions. When the ad serves the descriptions + // will be selected from this list. + Descriptions []*AdTextAsset `protobuf:"bytes,2,rep,name=descriptions,proto3" json:"descriptions,omitempty"` + // List of image assets that may be displayed with the ad. + Images []*AdImageAsset `protobuf:"bytes,3,rep,name=images,proto3" json:"images,omitempty"` + // List of video assets that may be displayed with the ad. + Videos []*AdVideoAsset `protobuf:"bytes,4,rep,name=videos,proto3" json:"videos,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *AppEngagementAdInfo) Reset() { *m = AppEngagementAdInfo{} } +func (m *AppEngagementAdInfo) String() string { return proto.CompactTextString(m) } +func (*AppEngagementAdInfo) ProtoMessage() {} +func (*AppEngagementAdInfo) Descriptor() ([]byte, []int) { + return fileDescriptor_ad_type_infos_60e41938ab80fbca, []int{21} +} +func (m *AppEngagementAdInfo) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_AppEngagementAdInfo.Unmarshal(m, b) +} +func (m *AppEngagementAdInfo) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_AppEngagementAdInfo.Marshal(b, m, deterministic) +} +func (dst *AppEngagementAdInfo) XXX_Merge(src proto.Message) { + xxx_messageInfo_AppEngagementAdInfo.Merge(dst, src) +} +func (m *AppEngagementAdInfo) XXX_Size() int { + return xxx_messageInfo_AppEngagementAdInfo.Size(m) +} +func (m *AppEngagementAdInfo) XXX_DiscardUnknown() { + xxx_messageInfo_AppEngagementAdInfo.DiscardUnknown(m) +} + +var xxx_messageInfo_AppEngagementAdInfo proto.InternalMessageInfo + +func (m *AppEngagementAdInfo) GetHeadlines() []*AdTextAsset { + if m != nil { + return m.Headlines + } + return nil +} + +func (m *AppEngagementAdInfo) GetDescriptions() []*AdTextAsset { + if m != nil { + return m.Descriptions + } + return nil +} + +func (m *AppEngagementAdInfo) GetImages() []*AdImageAsset { + if m != nil { + return m.Images + } + return nil +} + +func (m *AppEngagementAdInfo) GetVideos() []*AdVideoAsset { + if m != nil { + return m.Videos + } + return nil +} + +// A legacy app install ad that only can be used by a few select customers. +type LegacyAppInstallAdInfo struct { + // The id of the mobile app. + AppId *wrappers.StringValue `protobuf:"bytes,1,opt,name=app_id,json=appId,proto3" json:"app_id,omitempty"` + // The app store the mobile app is available in. + AppStore enums.LegacyAppInstallAdAppStoreEnum_LegacyAppInstallAdAppStore `protobuf:"varint,2,opt,name=app_store,json=appStore,proto3,enum=google.ads.googleads.v1.enums.LegacyAppInstallAdAppStoreEnum_LegacyAppInstallAdAppStore" json:"app_store,omitempty"` + // The headline of the ad. + Headline *wrappers.StringValue `protobuf:"bytes,3,opt,name=headline,proto3" json:"headline,omitempty"` + // The first description line of the ad. + Description1 *wrappers.StringValue `protobuf:"bytes,4,opt,name=description1,proto3" json:"description1,omitempty"` + // The second description line of the ad. + Description2 *wrappers.StringValue `protobuf:"bytes,5,opt,name=description2,proto3" json:"description2,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *LegacyAppInstallAdInfo) Reset() { *m = LegacyAppInstallAdInfo{} } +func (m *LegacyAppInstallAdInfo) String() string { return proto.CompactTextString(m) } +func (*LegacyAppInstallAdInfo) ProtoMessage() {} +func (*LegacyAppInstallAdInfo) Descriptor() ([]byte, []int) { + return fileDescriptor_ad_type_infos_60e41938ab80fbca, []int{22} +} +func (m *LegacyAppInstallAdInfo) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_LegacyAppInstallAdInfo.Unmarshal(m, b) +} +func (m *LegacyAppInstallAdInfo) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_LegacyAppInstallAdInfo.Marshal(b, m, deterministic) +} +func (dst *LegacyAppInstallAdInfo) XXX_Merge(src proto.Message) { + xxx_messageInfo_LegacyAppInstallAdInfo.Merge(dst, src) +} +func (m *LegacyAppInstallAdInfo) XXX_Size() int { + return xxx_messageInfo_LegacyAppInstallAdInfo.Size(m) +} +func (m *LegacyAppInstallAdInfo) XXX_DiscardUnknown() { + xxx_messageInfo_LegacyAppInstallAdInfo.DiscardUnknown(m) +} + +var xxx_messageInfo_LegacyAppInstallAdInfo proto.InternalMessageInfo + +func (m *LegacyAppInstallAdInfo) GetAppId() *wrappers.StringValue { + if m != nil { + return m.AppId + } + return nil +} + +func (m *LegacyAppInstallAdInfo) GetAppStore() enums.LegacyAppInstallAdAppStoreEnum_LegacyAppInstallAdAppStore { + if m != nil { + return m.AppStore + } + return enums.LegacyAppInstallAdAppStoreEnum_UNSPECIFIED +} + +func (m *LegacyAppInstallAdInfo) GetHeadline() *wrappers.StringValue { + if m != nil { + return m.Headline + } + return nil +} + +func (m *LegacyAppInstallAdInfo) GetDescription1() *wrappers.StringValue { + if m != nil { + return m.Description1 + } + return nil +} + +func (m *LegacyAppInstallAdInfo) GetDescription2() *wrappers.StringValue { + if m != nil { + return m.Description2 + } + return nil +} + +// A responsive display ad. +type ResponsiveDisplayAdInfo struct { + // Marketing images to be used in the ad. Valid image types are GIF, + // JPEG, and PNG. The minimum size is 600x314 and the aspect ratio must + // be 1.91:1 (+-1%). At least one marketing_image is required. Combined with + // square_marketing_images the maximum is 15. + MarketingImages []*AdImageAsset `protobuf:"bytes,1,rep,name=marketing_images,json=marketingImages,proto3" json:"marketing_images,omitempty"` + // Square marketing images to be used in the ad. Valid image types are GIF, + // JPEG, and PNG. The minimum size is 300x300 and the aspect ratio must + // be 1:1 (+-1%). At least one square marketing_image is required. Combined + // with marketing_images the maximum is 15. + SquareMarketingImages []*AdImageAsset `protobuf:"bytes,2,rep,name=square_marketing_images,json=squareMarketingImages,proto3" json:"square_marketing_images,omitempty"` + // Logo images to be used in the ad. Valid image types are GIF, + // JPEG, and PNG. The minimum size is 512x128 and the aspect ratio must + // be 4:1 (+-1%). Combined with square_logo_images the maximum is 5. + LogoImages []*AdImageAsset `protobuf:"bytes,3,rep,name=logo_images,json=logoImages,proto3" json:"logo_images,omitempty"` + // Square logo images to be used in the ad. Valid image types are GIF, + // JPEG, and PNG. The minimum size is 128x128 and the aspect ratio must + // be 1:1 (+-1%). Combined with square_logo_images the maximum is 5. + SquareLogoImages []*AdImageAsset `protobuf:"bytes,4,rep,name=square_logo_images,json=squareLogoImages,proto3" json:"square_logo_images,omitempty"` + // Short format headlines for the ad. The maximum length is 30 characters. + // At least 1 and max 5 headlines can be specified. + Headlines []*AdTextAsset `protobuf:"bytes,5,rep,name=headlines,proto3" json:"headlines,omitempty"` + // A required long format headline. The maximum length is 90 characters. + LongHeadline *AdTextAsset `protobuf:"bytes,6,opt,name=long_headline,json=longHeadline,proto3" json:"long_headline,omitempty"` + // Descriptive texts for the ad. The maximum length is 90 characters. At + // least 1 and max 5 headlines can be specified. + Descriptions []*AdTextAsset `protobuf:"bytes,7,rep,name=descriptions,proto3" json:"descriptions,omitempty"` + // Optional YouTube vidoes for the ad. A maximum of 5 videos can be specified. + YoutubeVideos []*AdVideoAsset `protobuf:"bytes,8,rep,name=youtube_videos,json=youtubeVideos,proto3" json:"youtube_videos,omitempty"` + // The advertiser/brand name. Maximum display width is 25. + BusinessName *wrappers.StringValue `protobuf:"bytes,9,opt,name=business_name,json=businessName,proto3" json:"business_name,omitempty"` + // The main color of the ad in hexadecimal, e.g. #ffffff for white. + // If one of main_color and accent_color is set, the other is required as + // well. + MainColor *wrappers.StringValue `protobuf:"bytes,10,opt,name=main_color,json=mainColor,proto3" json:"main_color,omitempty"` + // The accent color of the ad in hexadecimal, e.g. #ffffff for white. + // If one of main_color and accent_color is set, the other is required as + // well. + AccentColor *wrappers.StringValue `protobuf:"bytes,11,opt,name=accent_color,json=accentColor,proto3" json:"accent_color,omitempty"` + // Advertiser's consent to allow flexible color. When true, the ad may be + // served with different color if necessary. When false, the ad will be served + // with the specified colors or a neutral color. + // The default value is true. + // Must be true if main_color and accent_color are not set. + AllowFlexibleColor *wrappers.BoolValue `protobuf:"bytes,12,opt,name=allow_flexible_color,json=allowFlexibleColor,proto3" json:"allow_flexible_color,omitempty"` + // The call-to-action text for the ad. Maximum display width is 30. + CallToActionText *wrappers.StringValue `protobuf:"bytes,13,opt,name=call_to_action_text,json=callToActionText,proto3" json:"call_to_action_text,omitempty"` + // Prefix before price. E.g. 'as low as'. + PricePrefix *wrappers.StringValue `protobuf:"bytes,14,opt,name=price_prefix,json=pricePrefix,proto3" json:"price_prefix,omitempty"` + // Promotion text used for dyanmic formats of responsive ads. For example + // 'Free two-day shipping'. + PromoText *wrappers.StringValue `protobuf:"bytes,15,opt,name=promo_text,json=promoText,proto3" json:"promo_text,omitempty"` + // Specifies which format the ad will be served in. Default is ALL_FORMATS. + FormatSetting enums.DisplayAdFormatSettingEnum_DisplayAdFormatSetting `protobuf:"varint,16,opt,name=format_setting,json=formatSetting,proto3,enum=google.ads.googleads.v1.enums.DisplayAdFormatSettingEnum_DisplayAdFormatSetting" json:"format_setting,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ResponsiveDisplayAdInfo) Reset() { *m = ResponsiveDisplayAdInfo{} } +func (m *ResponsiveDisplayAdInfo) String() string { return proto.CompactTextString(m) } +func (*ResponsiveDisplayAdInfo) ProtoMessage() {} +func (*ResponsiveDisplayAdInfo) Descriptor() ([]byte, []int) { + return fileDescriptor_ad_type_infos_60e41938ab80fbca, []int{23} +} +func (m *ResponsiveDisplayAdInfo) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ResponsiveDisplayAdInfo.Unmarshal(m, b) +} +func (m *ResponsiveDisplayAdInfo) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ResponsiveDisplayAdInfo.Marshal(b, m, deterministic) +} +func (dst *ResponsiveDisplayAdInfo) XXX_Merge(src proto.Message) { + xxx_messageInfo_ResponsiveDisplayAdInfo.Merge(dst, src) +} +func (m *ResponsiveDisplayAdInfo) XXX_Size() int { + return xxx_messageInfo_ResponsiveDisplayAdInfo.Size(m) +} +func (m *ResponsiveDisplayAdInfo) XXX_DiscardUnknown() { + xxx_messageInfo_ResponsiveDisplayAdInfo.DiscardUnknown(m) +} + +var xxx_messageInfo_ResponsiveDisplayAdInfo proto.InternalMessageInfo + +func (m *ResponsiveDisplayAdInfo) GetMarketingImages() []*AdImageAsset { + if m != nil { + return m.MarketingImages + } + return nil +} + +func (m *ResponsiveDisplayAdInfo) GetSquareMarketingImages() []*AdImageAsset { + if m != nil { + return m.SquareMarketingImages + } + return nil +} + +func (m *ResponsiveDisplayAdInfo) GetLogoImages() []*AdImageAsset { + if m != nil { + return m.LogoImages + } + return nil +} + +func (m *ResponsiveDisplayAdInfo) GetSquareLogoImages() []*AdImageAsset { + if m != nil { + return m.SquareLogoImages + } + return nil +} + +func (m *ResponsiveDisplayAdInfo) GetHeadlines() []*AdTextAsset { + if m != nil { + return m.Headlines + } + return nil +} + +func (m *ResponsiveDisplayAdInfo) GetLongHeadline() *AdTextAsset { + if m != nil { + return m.LongHeadline + } + return nil +} + +func (m *ResponsiveDisplayAdInfo) GetDescriptions() []*AdTextAsset { + if m != nil { + return m.Descriptions + } + return nil +} + +func (m *ResponsiveDisplayAdInfo) GetYoutubeVideos() []*AdVideoAsset { + if m != nil { + return m.YoutubeVideos + } + return nil +} + +func (m *ResponsiveDisplayAdInfo) GetBusinessName() *wrappers.StringValue { + if m != nil { + return m.BusinessName + } + return nil +} + +func (m *ResponsiveDisplayAdInfo) GetMainColor() *wrappers.StringValue { + if m != nil { + return m.MainColor + } + return nil +} + +func (m *ResponsiveDisplayAdInfo) GetAccentColor() *wrappers.StringValue { + if m != nil { + return m.AccentColor + } + return nil +} + +func (m *ResponsiveDisplayAdInfo) GetAllowFlexibleColor() *wrappers.BoolValue { + if m != nil { + return m.AllowFlexibleColor + } + return nil +} + +func (m *ResponsiveDisplayAdInfo) GetCallToActionText() *wrappers.StringValue { + if m != nil { + return m.CallToActionText + } + return nil +} + +func (m *ResponsiveDisplayAdInfo) GetPricePrefix() *wrappers.StringValue { + if m != nil { + return m.PricePrefix + } + return nil +} + +func (m *ResponsiveDisplayAdInfo) GetPromoText() *wrappers.StringValue { + if m != nil { + return m.PromoText + } + return nil +} + +func (m *ResponsiveDisplayAdInfo) GetFormatSetting() enums.DisplayAdFormatSettingEnum_DisplayAdFormatSetting { + if m != nil { + return m.FormatSetting + } + return enums.DisplayAdFormatSettingEnum_UNSPECIFIED +} + +// A generic type of display ad. The exact ad format is controlled by the +// display_upload_product_type field, which determines what kinds of data +// need to be included with the ad. +type DisplayUploadAdInfo struct { + // The product type of this ad. See comments on the enum for details. + DisplayUploadProductType enums.DisplayUploadProductTypeEnum_DisplayUploadProductType `protobuf:"varint,1,opt,name=display_upload_product_type,json=displayUploadProductType,proto3,enum=google.ads.googleads.v1.enums.DisplayUploadProductTypeEnum_DisplayUploadProductType" json:"display_upload_product_type,omitempty"` + // The asset data that makes up the ad. + // + // Types that are valid to be assigned to MediaAsset: + // *DisplayUploadAdInfo_MediaBundle + MediaAsset isDisplayUploadAdInfo_MediaAsset `protobuf_oneof:"media_asset"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *DisplayUploadAdInfo) Reset() { *m = DisplayUploadAdInfo{} } +func (m *DisplayUploadAdInfo) String() string { return proto.CompactTextString(m) } +func (*DisplayUploadAdInfo) ProtoMessage() {} +func (*DisplayUploadAdInfo) Descriptor() ([]byte, []int) { + return fileDescriptor_ad_type_infos_60e41938ab80fbca, []int{24} +} +func (m *DisplayUploadAdInfo) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_DisplayUploadAdInfo.Unmarshal(m, b) +} +func (m *DisplayUploadAdInfo) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_DisplayUploadAdInfo.Marshal(b, m, deterministic) +} +func (dst *DisplayUploadAdInfo) XXX_Merge(src proto.Message) { + xxx_messageInfo_DisplayUploadAdInfo.Merge(dst, src) +} +func (m *DisplayUploadAdInfo) XXX_Size() int { + return xxx_messageInfo_DisplayUploadAdInfo.Size(m) +} +func (m *DisplayUploadAdInfo) XXX_DiscardUnknown() { + xxx_messageInfo_DisplayUploadAdInfo.DiscardUnknown(m) +} + +var xxx_messageInfo_DisplayUploadAdInfo proto.InternalMessageInfo + +func (m *DisplayUploadAdInfo) GetDisplayUploadProductType() enums.DisplayUploadProductTypeEnum_DisplayUploadProductType { + if m != nil { + return m.DisplayUploadProductType + } + return enums.DisplayUploadProductTypeEnum_UNSPECIFIED +} + +type isDisplayUploadAdInfo_MediaAsset interface { + isDisplayUploadAdInfo_MediaAsset() +} + +type DisplayUploadAdInfo_MediaBundle struct { + MediaBundle *AdMediaBundleAsset `protobuf:"bytes,2,opt,name=media_bundle,json=mediaBundle,proto3,oneof"` +} + +func (*DisplayUploadAdInfo_MediaBundle) isDisplayUploadAdInfo_MediaAsset() {} + +func (m *DisplayUploadAdInfo) GetMediaAsset() isDisplayUploadAdInfo_MediaAsset { + if m != nil { + return m.MediaAsset + } + return nil +} + +func (m *DisplayUploadAdInfo) GetMediaBundle() *AdMediaBundleAsset { + if x, ok := m.GetMediaAsset().(*DisplayUploadAdInfo_MediaBundle); ok { + return x.MediaBundle + } + return nil +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*DisplayUploadAdInfo) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _DisplayUploadAdInfo_OneofMarshaler, _DisplayUploadAdInfo_OneofUnmarshaler, _DisplayUploadAdInfo_OneofSizer, []interface{}{ + (*DisplayUploadAdInfo_MediaBundle)(nil), + } +} + +func _DisplayUploadAdInfo_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*DisplayUploadAdInfo) + // media_asset + switch x := m.MediaAsset.(type) { + case *DisplayUploadAdInfo_MediaBundle: + b.EncodeVarint(2<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.MediaBundle); err != nil { + return err + } + case nil: + default: + return fmt.Errorf("DisplayUploadAdInfo.MediaAsset has unexpected type %T", x) + } + return nil +} + +func _DisplayUploadAdInfo_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*DisplayUploadAdInfo) + switch tag { + case 2: // media_asset.media_bundle + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(AdMediaBundleAsset) + err := b.DecodeMessage(msg) + m.MediaAsset = &DisplayUploadAdInfo_MediaBundle{msg} + return true, err + default: + return false, nil + } +} + +func _DisplayUploadAdInfo_OneofSizer(msg proto.Message) (n int) { + m := msg.(*DisplayUploadAdInfo) + // media_asset + switch x := m.MediaAsset.(type) { + case *DisplayUploadAdInfo_MediaBundle: + s := proto.Size(x.MediaBundle) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +func init() { + proto.RegisterType((*TextAdInfo)(nil), "google.ads.googleads.v1.common.TextAdInfo") + proto.RegisterType((*ExpandedTextAdInfo)(nil), "google.ads.googleads.v1.common.ExpandedTextAdInfo") + proto.RegisterType((*CallOnlyAdInfo)(nil), "google.ads.googleads.v1.common.CallOnlyAdInfo") + proto.RegisterType((*ExpandedDynamicSearchAdInfo)(nil), "google.ads.googleads.v1.common.ExpandedDynamicSearchAdInfo") + proto.RegisterType((*HotelAdInfo)(nil), "google.ads.googleads.v1.common.HotelAdInfo") + proto.RegisterType((*ShoppingSmartAdInfo)(nil), "google.ads.googleads.v1.common.ShoppingSmartAdInfo") + proto.RegisterType((*ShoppingProductAdInfo)(nil), "google.ads.googleads.v1.common.ShoppingProductAdInfo") + proto.RegisterType((*GmailAdInfo)(nil), "google.ads.googleads.v1.common.GmailAdInfo") + proto.RegisterType((*GmailTeaser)(nil), "google.ads.googleads.v1.common.GmailTeaser") + proto.RegisterType((*DisplayCallToAction)(nil), "google.ads.googleads.v1.common.DisplayCallToAction") + proto.RegisterType((*ProductImage)(nil), "google.ads.googleads.v1.common.ProductImage") + proto.RegisterType((*ProductVideo)(nil), "google.ads.googleads.v1.common.ProductVideo") + proto.RegisterType((*ImageAdInfo)(nil), "google.ads.googleads.v1.common.ImageAdInfo") + proto.RegisterType((*VideoBumperInStreamAdInfo)(nil), "google.ads.googleads.v1.common.VideoBumperInStreamAdInfo") + proto.RegisterType((*VideoNonSkippableInStreamAdInfo)(nil), "google.ads.googleads.v1.common.VideoNonSkippableInStreamAdInfo") + proto.RegisterType((*VideoTrueViewInStreamAdInfo)(nil), "google.ads.googleads.v1.common.VideoTrueViewInStreamAdInfo") + proto.RegisterType((*VideoOutstreamAdInfo)(nil), "google.ads.googleads.v1.common.VideoOutstreamAdInfo") + proto.RegisterType((*VideoAdInfo)(nil), "google.ads.googleads.v1.common.VideoAdInfo") + proto.RegisterType((*ResponsiveSearchAdInfo)(nil), "google.ads.googleads.v1.common.ResponsiveSearchAdInfo") + proto.RegisterType((*LegacyResponsiveDisplayAdInfo)(nil), "google.ads.googleads.v1.common.LegacyResponsiveDisplayAdInfo") + proto.RegisterType((*AppAdInfo)(nil), "google.ads.googleads.v1.common.AppAdInfo") + proto.RegisterType((*AppEngagementAdInfo)(nil), "google.ads.googleads.v1.common.AppEngagementAdInfo") + proto.RegisterType((*LegacyAppInstallAdInfo)(nil), "google.ads.googleads.v1.common.LegacyAppInstallAdInfo") + proto.RegisterType((*ResponsiveDisplayAdInfo)(nil), "google.ads.googleads.v1.common.ResponsiveDisplayAdInfo") + proto.RegisterType((*DisplayUploadAdInfo)(nil), "google.ads.googleads.v1.common.DisplayUploadAdInfo") +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/common/ad_type_infos.proto", fileDescriptor_ad_type_infos_60e41938ab80fbca) +} + +var fileDescriptor_ad_type_infos_60e41938ab80fbca = []byte{ + // 2358 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xd4, 0x5a, 0x4b, 0x6f, 0xdb, 0xc8, + 0x1d, 0xb7, 0x2c, 0x3f, 0xa4, 0xbf, 0x24, 0x3f, 0xe8, 0x3c, 0xb4, 0xf6, 0xee, 0x36, 0xcb, 0x43, + 0x11, 0xa0, 0x5d, 0xb9, 0x56, 0xd2, 0xa2, 0xeb, 0x34, 0x9b, 0xca, 0x76, 0x12, 0x19, 0x9b, 0x87, + 0x57, 0x76, 0x9c, 0x22, 0x70, 0x41, 0x8c, 0xc5, 0xb1, 0x4c, 0x84, 0x9c, 0x99, 0x72, 0x48, 0xc7, + 0xba, 0xee, 0xa1, 0xbd, 0x16, 0xd8, 0x53, 0x8b, 0x9e, 0x7a, 0x2c, 0xd0, 0x8f, 0xd0, 0x53, 0x7b, + 0x5a, 0xa0, 0xfd, 0x04, 0xfd, 0x04, 0xed, 0x47, 0xd8, 0x43, 0x8b, 0x79, 0x90, 0xa2, 0x24, 0xcb, + 0x1a, 0xd9, 0x8b, 0x06, 0x3d, 0x49, 0x22, 0xff, 0xbf, 0x1f, 0x67, 0xe6, 0xff, 0xfa, 0xcd, 0x50, + 0x50, 0xef, 0x50, 0xda, 0xf1, 0xf1, 0x3a, 0x72, 0xf9, 0xba, 0xfa, 0x2a, 0xbe, 0x9d, 0x6d, 0xac, + 0xb7, 0x69, 0x10, 0x50, 0xb2, 0x8e, 0x5c, 0x27, 0xea, 0x32, 0xec, 0x78, 0xe4, 0x84, 0xf2, 0x1a, + 0x0b, 0x69, 0x44, 0xad, 0x8f, 0x95, 0x61, 0x0d, 0xb9, 0xbc, 0x96, 0x62, 0x6a, 0x67, 0x1b, 0x35, + 0x85, 0x59, 0xfd, 0x74, 0x3c, 0x27, 0xe2, 0x1c, 0x47, 0x8a, 0x6e, 0x75, 0x7b, 0x94, 0x39, 0x26, + 0x71, 0xc0, 0xd7, 0xdb, 0xc8, 0xf7, 0x9d, 0x36, 0x25, 0x67, 0x38, 0xe4, 0x1e, 0x25, 0x4e, 0x88, + 0x19, 0x0d, 0x23, 0x8f, 0x74, 0x1c, 0x1e, 0xa1, 0x08, 0x6b, 0x92, 0x87, 0x97, 0x93, 0xb8, 0x1e, + 0x67, 0x3e, 0xea, 0x3a, 0xc8, 0x75, 0x4e, 0x68, 0x18, 0xa0, 0xc8, 0xe1, 0x38, 0x12, 0x24, 0x1a, + 0xfe, 0xc8, 0x0c, 0x1e, 0x33, 0x9f, 0x22, 0xd7, 0x61, 0x21, 0x75, 0xe3, 0x76, 0x24, 0x57, 0xc6, + 0x6c, 0x12, 0x3e, 0xee, 0xa0, 0x76, 0xd7, 0x41, 0x8c, 0x39, 0x1e, 0xe1, 0x91, 0x98, 0x92, 0x58, + 0x05, 0xc6, 0x1c, 0x1e, 0xd1, 0x30, 0x21, 0xf9, 0xf4, 0x72, 0x92, 0xc0, 0x0b, 0x70, 0xf6, 0x99, + 0xda, 0x0f, 0xeb, 0xf2, 0xd7, 0x71, 0x7c, 0xb2, 0xfe, 0x2e, 0x44, 0x8c, 0xe1, 0x50, 0xfb, 0x69, + 0xf5, 0xc3, 0x84, 0x8e, 0x79, 0xeb, 0x88, 0x10, 0x1a, 0xa1, 0xc8, 0xa3, 0x44, 0xdf, 0xb5, 0xbf, + 0xc9, 0x01, 0x1c, 0xe0, 0xf3, 0xa8, 0xe1, 0xee, 0x92, 0x13, 0x6a, 0xfd, 0x14, 0x0a, 0xa7, 0x18, + 0xb9, 0xbe, 0x47, 0x70, 0x35, 0x77, 0x27, 0x77, 0xb7, 0x54, 0xff, 0x50, 0x3b, 0xb7, 0x96, 0xf0, + 0xd7, 0xf6, 0xa3, 0xd0, 0x23, 0x9d, 0x43, 0xe4, 0xc7, 0xb8, 0x95, 0x5a, 0x5b, 0x3f, 0x87, 0xb2, + 0x8b, 0x79, 0x3b, 0xf4, 0x98, 0xa0, 0xdf, 0xa8, 0x4e, 0x1b, 0xa0, 0xfb, 0x10, 0x03, 0x0c, 0xf5, + 0x6a, 0x7e, 0x42, 0x86, 0xba, 0xfd, 0xf7, 0x3c, 0x58, 0x8f, 0xcf, 0x19, 0x22, 0x2e, 0x76, 0x33, + 0x93, 0xda, 0x86, 0x85, 0x64, 0x98, 0x0e, 0x43, 0x61, 0xb4, 0x61, 0x34, 0xb5, 0x4a, 0x82, 0xd9, + 0x13, 0x90, 0x21, 0x92, 0xba, 0xd1, 0x0c, 0xfb, 0x48, 0xea, 0x43, 0x24, 0xf7, 0xaa, 0x73, 0x93, + 0x92, 0xdc, 0xb3, 0x3e, 0x87, 0x52, 0x66, 0xd6, 0x46, 0xcb, 0x94, 0x05, 0x0c, 0xad, 0xf3, 0xfc, + 0xa4, 0xeb, 0x6c, 0xd5, 0x61, 0x96, 0xa1, 0xe8, 0x74, 0xa3, 0x3a, 0x63, 0x00, 0x55, 0xa6, 0x09, + 0xa6, 0x5e, 0x9d, 0x35, 0xc5, 0xd4, 0xed, 0x3f, 0xcf, 0xc3, 0xc2, 0x36, 0xf2, 0xfd, 0x97, 0xc4, + 0xef, 0x6a, 0x5f, 0x3e, 0x82, 0x72, 0x9b, 0xc6, 0x24, 0x0a, 0xbb, 0x4e, 0x9b, 0xba, 0x66, 0x41, + 0x5a, 0xd2, 0x88, 0x6d, 0xea, 0x62, 0x41, 0xc0, 0x4e, 0x29, 0xc1, 0x0e, 0x89, 0x83, 0x63, 0x1c, + 0x1a, 0x79, 0xb1, 0x24, 0x11, 0x2f, 0x24, 0xc0, 0x6a, 0x40, 0xe5, 0x38, 0xe6, 0x1e, 0xc1, 0x9c, + 0x3b, 0x04, 0x05, 0xd8, 0x2c, 0x4e, 0x13, 0xc8, 0x0b, 0x14, 0x60, 0x6b, 0x13, 0x8a, 0x89, 0x4b, + 0x37, 0xaa, 0x25, 0x03, 0x78, 0xcf, 0x3c, 0x8b, 0xad, 0x57, 0xcb, 0x93, 0x60, 0xeb, 0x43, 0x39, + 0x3a, 0x73, 0xed, 0x1c, 0x9d, 0x9d, 0x38, 0x76, 0x1e, 0x42, 0x59, 0xd6, 0xf2, 0x28, 0x44, 0xed, + 0xb7, 0xd8, 0xd5, 0x09, 0xb0, 0x3a, 0xc4, 0xb0, 0x45, 0xa9, 0x9f, 0xb8, 0x0f, 0xf9, 0xfe, 0x81, + 0x32, 0xb7, 0x5a, 0x70, 0xdb, 0xf5, 0x38, 0x3a, 0xf6, 0xb1, 0x33, 0xd0, 0x12, 0x74, 0x1c, 0x5f, + 0xc6, 0x74, 0x53, 0x43, 0x45, 0x4c, 0x6d, 0xa7, 0x40, 0xcb, 0x81, 0x8f, 0xb2, 0x21, 0xe1, 0x9c, + 0xe1, 0xd0, 0x3b, 0xf1, 0xda, 0xb2, 0x4e, 0x3a, 0x71, 0xe8, 0x57, 0x0b, 0x06, 0xb3, 0x5c, 0xcd, + 0xc4, 0xc8, 0x61, 0x86, 0xe0, 0x55, 0xe8, 0x5b, 0xbb, 0xb0, 0x9c, 0x69, 0x5d, 0xa8, 0x2d, 0xf3, + 0xb6, 0x68, 0x40, 0xba, 0xd4, 0x83, 0x35, 0x24, 0xca, 0xfa, 0x7d, 0x0e, 0x56, 0x47, 0xb7, 0xc1, + 0x2a, 0xdc, 0xc9, 0xdd, 0x5d, 0xa8, 0x1f, 0xd5, 0x46, 0xf5, 0x66, 0xd9, 0x42, 0x6a, 0xfd, 0xf3, + 0x6f, 0x25, 0x1c, 0xfb, 0x82, 0xe2, 0x31, 0x89, 0x83, 0x4b, 0x0d, 0x5a, 0xd5, 0xf6, 0x88, 0x3b, + 0xf6, 0x2f, 0x61, 0x2d, 0x29, 0xbf, 0x3b, 0x5d, 0x82, 0x02, 0xaf, 0xbd, 0x8f, 0x51, 0xd8, 0x3e, + 0xd5, 0xb9, 0x3b, 0x50, 0xb8, 0x72, 0x13, 0x16, 0x2e, 0xbb, 0x02, 0xa5, 0x26, 0x8d, 0xb0, 0xaf, + 0xe8, 0xec, 0x9b, 0xb0, 0xb2, 0x7f, 0x4a, 0x19, 0x13, 0x8f, 0x0f, 0x50, 0xa8, 0xab, 0xbd, 0x7d, + 0x1b, 0x6e, 0x26, 0x97, 0xf7, 0x54, 0x87, 0xd6, 0x37, 0xfe, 0x3a, 0x0b, 0xa5, 0xa7, 0x01, 0xf2, + 0xfc, 0xb4, 0x2d, 0xcc, 0x45, 0x18, 0x71, 0x1c, 0xea, 0x91, 0xfc, 0xa0, 0x76, 0xb9, 0xa2, 0xa9, + 0x49, 0xf0, 0x81, 0x84, 0xb4, 0x34, 0x54, 0x94, 0x13, 0x91, 0x5f, 0x38, 0x74, 0xbc, 0x00, 0x75, + 0xb0, 0x59, 0x39, 0x51, 0x88, 0x5d, 0x01, 0xb0, 0x1e, 0xc3, 0x62, 0x80, 0xc2, 0xb7, 0x58, 0x3a, + 0x51, 0x71, 0x98, 0x14, 0x94, 0x85, 0x14, 0xa4, 0x68, 0x0e, 0xa1, 0x3a, 0x40, 0xe3, 0xa4, 0x8d, + 0xdc, 0x24, 0xcd, 0x6f, 0xf5, 0xf3, 0x35, 0x93, 0xb6, 0x7e, 0x04, 0x6b, 0x83, 0xbc, 0x59, 0x1f, + 0x9a, 0xe4, 0xff, 0x07, 0xfd, 0xd4, 0x3b, 0x99, 0x56, 0xf4, 0xeb, 0x1c, 0x7c, 0x7f, 0x88, 0x5e, + 0xab, 0x2c, 0x55, 0x25, 0x68, 0x92, 0x2e, 0xaa, 0x4e, 0xdc, 0x1b, 0xe7, 0xa3, 0x1d, 0x85, 0x16, + 0x01, 0x7c, 0x40, 0x55, 0xce, 0xb4, 0x3e, 0x19, 0x18, 0xc0, 0xb0, 0x89, 0xb5, 0x0f, 0x0b, 0x89, + 0x9c, 0x93, 0xa3, 0xe0, 0xd5, 0xf9, 0x3b, 0xf9, 0xbb, 0xa5, 0xfa, 0x0f, 0xc7, 0x3d, 0x4f, 0x87, + 0x98, 0x24, 0x6e, 0x55, 0x58, 0xe6, 0x17, 0xcf, 0x92, 0x9e, 0x79, 0x2e, 0xa6, 0xbc, 0x5a, 0x98, + 0x88, 0xf4, 0x50, 0x80, 0x52, 0x52, 0xf9, 0x8b, 0xdb, 0xbf, 0x99, 0xd6, 0x51, 0xac, 0x02, 0xf1, + 0x1a, 0x8a, 0x6d, 0x20, 0x1d, 0xa7, 0x27, 0xd5, 0x11, 0xdf, 0x41, 0x23, 0x7c, 0x00, 0xe0, 0xd3, + 0x0e, 0xd5, 0x71, 0x6f, 0x12, 0xa7, 0x45, 0x61, 0x2f, 0xd7, 0xd7, 0xfe, 0x47, 0x0e, 0x56, 0x2e, + 0xf2, 0xe5, 0x8f, 0x60, 0x26, 0xc2, 0xe7, 0x91, 0xd1, 0x6a, 0x48, 0x4b, 0x31, 0x0c, 0xf1, 0xe9, + 0xb4, 0xa9, 0x4f, 0xcd, 0x14, 0x41, 0x51, 0xd8, 0x6f, 0x0b, 0x73, 0xab, 0x09, 0xcb, 0x71, 0x28, + 0x1a, 0x91, 0xef, 0x63, 0x39, 0x00, 0xc7, 0x73, 0x8d, 0x96, 0x62, 0x31, 0x0e, 0xfd, 0xed, 0x14, + 0xb5, 0xeb, 0xda, 0xdf, 0xe6, 0xa0, 0x9c, 0x8d, 0x27, 0xb1, 0xc2, 0x7d, 0x51, 0x69, 0x34, 0xa5, + 0x72, 0x36, 0x08, 0xaf, 0xed, 0xe4, 0x53, 0xb8, 0x35, 0x22, 0x21, 0xf3, 0x57, 0x4f, 0xc8, 0x15, + 0x77, 0xf8, 0xa2, 0xfd, 0x65, 0x3a, 0x79, 0x19, 0xe9, 0xd9, 0xc9, 0xcb, 0xec, 0x99, 0x68, 0xf2, + 0x92, 0xc2, 0xfe, 0x6a, 0x0e, 0x4a, 0x72, 0x19, 0x74, 0xc5, 0xff, 0x19, 0x94, 0x98, 0x77, 0x8e, + 0x7d, 0xe7, 0x9d, 0xe7, 0x46, 0xa7, 0x3a, 0xde, 0xd6, 0x86, 0x08, 0x77, 0x49, 0xf4, 0x93, 0xfb, + 0x8a, 0x0f, 0xa4, 0xfd, 0x6b, 0x61, 0x6e, 0x7d, 0x0e, 0x65, 0x85, 0x3e, 0xc5, 0x5e, 0xe7, 0x34, + 0xd2, 0xb5, 0xef, 0x52, 0xb8, 0x7a, 0x5c, 0x53, 0xda, 0x5b, 0x9f, 0x41, 0x51, 0x55, 0x38, 0x21, + 0x29, 0x4c, 0x74, 0x7f, 0x41, 0x9a, 0x0b, 0x01, 0xf1, 0x05, 0xac, 0xb0, 0x10, 0x9f, 0x79, 0xf8, + 0x9d, 0x93, 0x9d, 0xc0, 0xfc, 0xf8, 0x11, 0x2c, 0x6b, 0xdc, 0x5e, 0x6f, 0x1e, 0xcf, 0xe1, 0x46, + 0x3f, 0x99, 0x9e, 0x4f, 0x61, 0x3c, 0x9b, 0x95, 0x65, 0xd3, 0xd3, 0x6a, 0x42, 0xf2, 0x0c, 0xa7, + 0x37, 0x3d, 0x13, 0x71, 0xb3, 0xa8, 0x61, 0xbb, 0xc9, 0x2c, 0xbf, 0x84, 0x62, 0xba, 0xb9, 0xd5, + 0x4a, 0xe6, 0xfe, 0x18, 0x25, 0xf3, 0xdc, 0x0b, 0xf0, 0x41, 0x97, 0x29, 0xd5, 0x92, 0xfc, 0x68, + 0x15, 0x02, 0xfd, 0x4d, 0xd4, 0x02, 0x59, 0x9a, 0x4c, 0x44, 0xb6, 0xb4, 0xb4, 0x1e, 0x02, 0x04, + 0xd8, 0xf5, 0x90, 0x73, 0xe2, 0xf9, 0x46, 0x09, 0xd7, 0x9c, 0x6a, 0x15, 0x25, 0xe2, 0x89, 0xe7, + 0x63, 0x6b, 0x03, 0x66, 0x5c, 0x14, 0x21, 0x9d, 0x68, 0xc3, 0x8b, 0xb9, 0xd5, 0x8d, 0x30, 0x4f, + 0x70, 0xd2, 0xd4, 0x3a, 0x80, 0x2a, 0x72, 0x1d, 0xcf, 0x15, 0xb9, 0xd5, 0xa6, 0xac, 0xab, 0x97, + 0xf1, 0x24, 0xa4, 0x81, 0x4e, 0xb2, 0xcb, 0x7c, 0xd2, 0x9c, 0x6a, 0xad, 0x20, 0x77, 0xd7, 0x3d, + 0xa0, 0xdb, 0x94, 0x75, 0xe5, 0x5a, 0x3e, 0x09, 0x69, 0xb0, 0x35, 0x0f, 0xb3, 0x92, 0xc7, 0x5e, + 0x83, 0x0f, 0x64, 0x36, 0x6c, 0xc5, 0x01, 0xc3, 0xe1, 0x2e, 0xd9, 0x8f, 0x42, 0x8c, 0x02, 0xad, + 0x89, 0x3e, 0x81, 0xef, 0xc9, 0x9b, 0x2f, 0x28, 0xd9, 0x7f, 0xeb, 0x31, 0x26, 0xd4, 0xf1, 0x80, + 0xc9, 0xb7, 0x39, 0x58, 0x93, 0x36, 0x07, 0x61, 0x8c, 0x0f, 0x85, 0xbf, 0xfa, 0xee, 0x5b, 0xcf, + 0x60, 0x45, 0x55, 0x04, 0xe7, 0x38, 0x8e, 0x22, 0x4a, 0x1c, 0x1f, 0x1d, 0x63, 0xdf, 0x28, 0x5b, + 0x97, 0x15, 0x70, 0x4b, 0xe2, 0x9e, 0x09, 0x98, 0x90, 0x43, 0x9a, 0x2d, 0xed, 0x6a, 0x26, 0x35, + 0x6b, 0x41, 0x81, 0x52, 0xd9, 0xf2, 0x14, 0x96, 0xda, 0x34, 0x60, 0x88, 0xc8, 0x71, 0x21, 0x42, + 0x70, 0x68, 0x56, 0x93, 0x53, 0xd4, 0x96, 0x04, 0xd9, 0xbf, 0xcd, 0xc1, 0x0d, 0x39, 0xfb, 0x97, + 0x71, 0xc4, 0xb3, 0xd3, 0x7e, 0x6f, 0x7d, 0xd7, 0xfe, 0x5b, 0x1e, 0x4a, 0x72, 0x48, 0x7a, 0x24, + 0x0f, 0x26, 0x8d, 0xd8, 0x6c, 0xbc, 0xbe, 0x81, 0xa2, 0x47, 0x1c, 0x35, 0x33, 0x3d, 0x94, 0x07, + 0xe3, 0x4a, 0xfa, 0x25, 0xd1, 0xd0, 0x9c, 0x6a, 0x15, 0x3c, 0x7d, 0xc5, 0xda, 0x87, 0xb9, 0x63, + 0x19, 0x74, 0x7a, 0xe9, 0x3f, 0x33, 0x22, 0xbe, 0x28, 0x4e, 0x9b, 0x53, 0x2d, 0x4d, 0x65, 0xbd, + 0x02, 0xa0, 0x71, 0x94, 0x8c, 0x58, 0x95, 0xf0, 0xfb, 0x46, 0xc4, 0x03, 0x1e, 0x14, 0x79, 0x4b, + 0xe3, 0x48, 0x8f, 0xf5, 0x04, 0x2a, 0x84, 0x12, 0x87, 0x27, 0x49, 0xa0, 0xab, 0xfb, 0x23, 0x23, + 0xe6, 0xd1, 0xd9, 0xd3, 0x9c, 0x6a, 0x95, 0x49, 0xe6, 0xee, 0x56, 0x01, 0xe6, 0xd4, 0xd1, 0xa3, + 0xfd, 0x87, 0x69, 0xb8, 0xd5, 0xc2, 0x9c, 0x51, 0xc2, 0xbd, 0x33, 0xdc, 0xb7, 0x51, 0xda, 0xed, + 0xed, 0xf1, 0x79, 0x35, 0x27, 0x35, 0xe3, 0xd8, 0xcd, 0x49, 0x43, 0x9d, 0x78, 0x71, 0x8e, 0xa3, + 0xde, 0x96, 0x9f, 0x5b, 0x2f, 0xfb, 0x36, 0xec, 0xbc, 0x3a, 0x3d, 0x39, 0x5b, 0x1f, 0x41, 0xef, + 0xec, 0x27, 0x7f, 0x85, 0xb3, 0x9f, 0x19, 0xf3, 0xb3, 0x9f, 0x7f, 0x16, 0xe0, 0xa3, 0x67, 0xf2, + 0xbc, 0xb4, 0xb7, 0x48, 0x5a, 0x4b, 0xf4, 0x8e, 0xf5, 0xf8, 0x29, 0x0d, 0x23, 0x67, 0xa2, 0x3c, + 0xac, 0x48, 0x4c, 0x5a, 0x28, 0x1a, 0x50, 0xf1, 0x29, 0xe9, 0x4c, 0x56, 0x6d, 0xca, 0x02, 0xd2, + 0x1c, 0x91, 0xcf, 0xf9, 0x6b, 0xeb, 0xe8, 0x99, 0x89, 0x75, 0xf4, 0x33, 0xb8, 0x81, 0x7c, 0x9f, + 0xbe, 0x73, 0x4e, 0x7c, 0x7c, 0xee, 0xc9, 0xc3, 0x11, 0x29, 0x65, 0x67, 0xc7, 0x1e, 0x89, 0x58, + 0x12, 0xf7, 0x44, 0xc3, 0x94, 0xa2, 0x7d, 0x04, 0x65, 0xd4, 0x6e, 0x63, 0x92, 0x08, 0x62, 0x13, + 0xad, 0x52, 0x52, 0x08, 0x45, 0x20, 0x2a, 0x12, 0xf2, 0x88, 0x86, 0xcf, 0x1b, 0x55, 0x24, 0xe4, + 0x11, 0x05, 0xfe, 0x02, 0x56, 0xfa, 0x95, 0xa6, 0x23, 0xd5, 0x7c, 0xc1, 0xe8, 0xb8, 0x24, 0x23, + 0x28, 0x0f, 0xb4, 0xb2, 0xcf, 0x6c, 0x30, 0x8a, 0x13, 0x6d, 0x30, 0x84, 0xb2, 0xe1, 0xbf, 0x8a, + 0x51, 0x88, 0x9d, 0x0c, 0x07, 0x98, 0x74, 0x11, 0x05, 0x7b, 0x96, 0x32, 0x5d, 0xb0, 0xc9, 0x2f, + 0x5d, 0x61, 0x93, 0xdf, 0x82, 0x5b, 0x7a, 0x40, 0x83, 0x6c, 0x26, 0x07, 0x81, 0x37, 0x14, 0xf6, + 0x79, 0x3f, 0xe7, 0x3b, 0x58, 0xe8, 0x7f, 0x17, 0x52, 0xad, 0x48, 0xe5, 0xb5, 0x37, 0x46, 0x79, + 0xa5, 0xb9, 0xf8, 0x44, 0xa2, 0xf7, 0x15, 0x58, 0xea, 0xb0, 0x8b, 0x6f, 0xb5, 0x2a, 0x27, 0xd9, + 0x9f, 0xf2, 0x20, 0x36, 0xf4, 0xda, 0xd8, 0x61, 0x21, 0x3e, 0xf1, 0xce, 0xab, 0x0b, 0x46, 0x07, + 0xb1, 0x02, 0xb1, 0x27, 0x01, 0xc2, 0xb7, 0x2c, 0xa4, 0x01, 0x55, 0xf1, 0xb1, 0x68, 0xe2, 0x5b, + 0x69, 0x2f, 0x02, 0xc3, 0xfe, 0x4f, 0x1e, 0x8a, 0x0d, 0xc6, 0x74, 0x29, 0x79, 0x0d, 0xcb, 0x01, + 0x22, 0x2e, 0x8a, 0x68, 0x28, 0xdf, 0x0e, 0x65, 0xf6, 0x8f, 0x13, 0x95, 0xca, 0xc5, 0x94, 0x45, + 0x5d, 0xed, 0xaf, 0xe4, 0xd3, 0xdf, 0x69, 0x25, 0xcf, 0x5f, 0xb7, 0x92, 0xef, 0xc0, 0x9c, 0x3e, + 0xeb, 0x98, 0x31, 0x3b, 0x96, 0x68, 0xb8, 0x6a, 0x33, 0x25, 0xb9, 0x34, 0xd6, 0xda, 0x87, 0x85, + 0x2e, 0x8d, 0xa3, 0xf8, 0x18, 0x27, 0x87, 0x1c, 0xb3, 0xa6, 0x6c, 0x4a, 0xc4, 0x48, 0xb6, 0x8a, + 0xe6, 0x50, 0x87, 0x1c, 0xd6, 0x31, 0xac, 0x9c, 0x46, 0x81, 0xff, 0x63, 0x47, 0x09, 0x9b, 0xe3, + 0x98, 0xb8, 0x3e, 0xe6, 0xd5, 0x39, 0xc9, 0x5c, 0x1f, 0xcf, 0xfc, 0x5c, 0xc0, 0xb6, 0x24, 0x4a, + 0xf1, 0x2f, 0x4b, 0xba, 0xcc, 0x65, 0x6e, 0xff, 0x65, 0x1a, 0x56, 0x1a, 0x8c, 0x3d, 0x26, 0x1d, + 0xd4, 0xc1, 0x01, 0x26, 0xd1, 0xff, 0x41, 0xf3, 0xed, 0xb9, 0x2c, 0x7f, 0x0d, 0x97, 0xed, 0xc0, + 0x9c, 0x76, 0xd5, 0xcc, 0x15, 0x5c, 0xa5, 0xb1, 0xf6, 0x57, 0x79, 0xb8, 0xa5, 0x1a, 0x74, 0x83, + 0xb1, 0x5d, 0xf5, 0x3a, 0x53, 0x2f, 0xe1, 0x3d, 0x98, 0x93, 0xef, 0x38, 0x5d, 0xa3, 0x8e, 0x3c, + 0x8b, 0x18, 0xdb, 0x75, 0xad, 0x18, 0x8a, 0xe9, 0x9b, 0x50, 0xd9, 0x85, 0x17, 0xea, 0xbf, 0x18, + 0x53, 0x83, 0x86, 0x1f, 0xdf, 0x60, 0x6c, 0x5f, 0x10, 0xc8, 0x3a, 0x34, 0xfa, 0x76, 0xab, 0x80, + 0xf4, 0xb7, 0x3e, 0x1d, 0x9f, 0xbf, 0xd6, 0x1b, 0xcf, 0xf7, 0xf0, 0x36, 0xc5, 0xfe, 0x1d, 0xc0, + 0xed, 0x51, 0xfa, 0xe8, 0x35, 0x2c, 0x0d, 0xb4, 0x89, 0x24, 0x9e, 0x27, 0x0b, 0x9b, 0xc5, 0xfe, + 0x2e, 0xc4, 0x2d, 0x17, 0x6e, 0x5f, 0xdc, 0x86, 0x92, 0x08, 0x9f, 0x8c, 0xff, 0xe6, 0x45, 0x7d, + 0x89, 0x5b, 0xcf, 0xa1, 0xd4, 0x6b, 0xbb, 0x57, 0x0b, 0x78, 0x48, 0x7b, 0x39, 0xb7, 0xde, 0x80, + 0x35, 0xd4, 0xcc, 0xaf, 0x56, 0xf9, 0x96, 0x06, 0xba, 0x3b, 0xef, 0x2f, 0x19, 0xb3, 0xd7, 0x2a, + 0x19, 0x7b, 0x83, 0x7a, 0x74, 0x6e, 0xf2, 0x2e, 0xd4, 0x2f, 0x4f, 0x07, 0x8b, 0xd0, 0xfc, 0x75, + 0x8b, 0xd0, 0x70, 0xc5, 0x2f, 0x5c, 0xbf, 0xe2, 0x0f, 0x89, 0xe0, 0xe2, 0x55, 0x0e, 0x93, 0x33, + 0xaa, 0x13, 0x26, 0x53, 0x9d, 0x83, 0x9a, 0xb7, 0x34, 0xa9, 0xe6, 0x1d, 0x25, 0xc1, 0xcb, 0x57, + 0x92, 0xe0, 0x23, 0x44, 0x70, 0xe5, 0x4a, 0x22, 0xf8, 0xbd, 0x2a, 0xad, 0x0b, 0x04, 0xe6, 0xd2, + 0xff, 0x44, 0x60, 0xda, 0x5f, 0x4f, 0xa7, 0xef, 0x07, 0x5e, 0xc9, 0x7f, 0xec, 0xe8, 0xba, 0xf8, + 0x75, 0x0e, 0xd6, 0x2e, 0xf9, 0x2b, 0x8f, 0xec, 0x59, 0x0b, 0xf5, 0x03, 0xb3, 0xe1, 0x29, 0x66, + 0x7d, 0x70, 0x9d, 0x9e, 0x44, 0x8e, 0xba, 0xd9, 0xaa, 0xba, 0x23, 0xee, 0x58, 0xaf, 0xa1, 0x9c, + 0x15, 0x3b, 0x7a, 0x1f, 0x7a, 0x05, 0xad, 0xd3, 0x9c, 0x6a, 0x95, 0x82, 0xde, 0xb5, 0xad, 0x0a, + 0xa8, 0x9f, 0xea, 0xdf, 0x56, 0x5b, 0xff, 0xce, 0x81, 0xdd, 0xa6, 0xc1, 0x18, 0xde, 0xad, 0xa5, + 0x86, 0x2b, 0x86, 0x25, 0x16, 0x8c, 0xef, 0x09, 0x0f, 0xef, 0xe5, 0xde, 0xec, 0x68, 0x4c, 0x87, + 0xfa, 0x88, 0x74, 0x6a, 0x34, 0xec, 0xac, 0x77, 0x30, 0x91, 0xfe, 0x4f, 0xfe, 0xa8, 0xc4, 0x3c, + 0x3e, 0xea, 0x0f, 0x5f, 0x0f, 0xd4, 0xc7, 0x1f, 0xa7, 0xf3, 0x4f, 0x1b, 0x8d, 0x3f, 0x4d, 0x7f, + 0xfc, 0x54, 0x91, 0x35, 0x5c, 0x5e, 0x53, 0x5f, 0xc5, 0xb7, 0xc3, 0x8d, 0xda, 0xb6, 0x34, 0xfb, + 0x26, 0x31, 0x38, 0x6a, 0xb8, 0xfc, 0x28, 0x35, 0x38, 0x3a, 0xdc, 0x38, 0x52, 0x06, 0xff, 0x9a, + 0xb6, 0xd5, 0xd5, 0xcd, 0xcd, 0x86, 0xcb, 0x37, 0x37, 0x53, 0x93, 0xcd, 0xcd, 0xc3, 0x8d, 0xcd, + 0x4d, 0x65, 0x74, 0x3c, 0x27, 0x47, 0x77, 0xef, 0xbf, 0x01, 0x00, 0x00, 0xff, 0xff, 0xf3, 0x4f, + 0x9a, 0x27, 0xe1, 0x26, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/common/asset_types.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/common/asset_types.pb.go new file mode 100644 index 0000000..78b7244 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/common/asset_types.pb.go @@ -0,0 +1,321 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/common/asset_types.proto + +package common // import "google.golang.org/genproto/googleapis/ads/googleads/v1/common" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import wrappers "github.com/golang/protobuf/ptypes/wrappers" +import enums "google.golang.org/genproto/googleapis/ads/googleads/v1/enums" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// A YouTube asset. +type YoutubeVideoAsset struct { + // YouTube video id. This is the 11 character string value used in the + // YouTube video URL. + YoutubeVideoId *wrappers.StringValue `protobuf:"bytes,1,opt,name=youtube_video_id,json=youtubeVideoId,proto3" json:"youtube_video_id,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *YoutubeVideoAsset) Reset() { *m = YoutubeVideoAsset{} } +func (m *YoutubeVideoAsset) String() string { return proto.CompactTextString(m) } +func (*YoutubeVideoAsset) ProtoMessage() {} +func (*YoutubeVideoAsset) Descriptor() ([]byte, []int) { + return fileDescriptor_asset_types_8b202aa014f30014, []int{0} +} +func (m *YoutubeVideoAsset) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_YoutubeVideoAsset.Unmarshal(m, b) +} +func (m *YoutubeVideoAsset) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_YoutubeVideoAsset.Marshal(b, m, deterministic) +} +func (dst *YoutubeVideoAsset) XXX_Merge(src proto.Message) { + xxx_messageInfo_YoutubeVideoAsset.Merge(dst, src) +} +func (m *YoutubeVideoAsset) XXX_Size() int { + return xxx_messageInfo_YoutubeVideoAsset.Size(m) +} +func (m *YoutubeVideoAsset) XXX_DiscardUnknown() { + xxx_messageInfo_YoutubeVideoAsset.DiscardUnknown(m) +} + +var xxx_messageInfo_YoutubeVideoAsset proto.InternalMessageInfo + +func (m *YoutubeVideoAsset) GetYoutubeVideoId() *wrappers.StringValue { + if m != nil { + return m.YoutubeVideoId + } + return nil +} + +// A MediaBundle asset. +type MediaBundleAsset struct { + // Media bundle (ZIP file) asset data. The format of the uploaded ZIP file + // depends on the ad field where it will be used. For more information on the + // format, see the documentation of the ad field where you plan on using the + // MediaBundleAsset. This field is mutate only. + Data *wrappers.BytesValue `protobuf:"bytes,1,opt,name=data,proto3" json:"data,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *MediaBundleAsset) Reset() { *m = MediaBundleAsset{} } +func (m *MediaBundleAsset) String() string { return proto.CompactTextString(m) } +func (*MediaBundleAsset) ProtoMessage() {} +func (*MediaBundleAsset) Descriptor() ([]byte, []int) { + return fileDescriptor_asset_types_8b202aa014f30014, []int{1} +} +func (m *MediaBundleAsset) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_MediaBundleAsset.Unmarshal(m, b) +} +func (m *MediaBundleAsset) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_MediaBundleAsset.Marshal(b, m, deterministic) +} +func (dst *MediaBundleAsset) XXX_Merge(src proto.Message) { + xxx_messageInfo_MediaBundleAsset.Merge(dst, src) +} +func (m *MediaBundleAsset) XXX_Size() int { + return xxx_messageInfo_MediaBundleAsset.Size(m) +} +func (m *MediaBundleAsset) XXX_DiscardUnknown() { + xxx_messageInfo_MediaBundleAsset.DiscardUnknown(m) +} + +var xxx_messageInfo_MediaBundleAsset proto.InternalMessageInfo + +func (m *MediaBundleAsset) GetData() *wrappers.BytesValue { + if m != nil { + return m.Data + } + return nil +} + +// An Image asset. +type ImageAsset struct { + // The raw bytes data of an image. This field is mutate only. + Data *wrappers.BytesValue `protobuf:"bytes,1,opt,name=data,proto3" json:"data,omitempty"` + // File size of the image asset in bytes. + FileSize *wrappers.Int64Value `protobuf:"bytes,2,opt,name=file_size,json=fileSize,proto3" json:"file_size,omitempty"` + // MIME type of the image asset. + MimeType enums.MimeTypeEnum_MimeType `protobuf:"varint,3,opt,name=mime_type,json=mimeType,proto3,enum=google.ads.googleads.v1.enums.MimeTypeEnum_MimeType" json:"mime_type,omitempty"` + // Metadata for this image at its original size. + FullSize *ImageDimension `protobuf:"bytes,4,opt,name=full_size,json=fullSize,proto3" json:"full_size,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ImageAsset) Reset() { *m = ImageAsset{} } +func (m *ImageAsset) String() string { return proto.CompactTextString(m) } +func (*ImageAsset) ProtoMessage() {} +func (*ImageAsset) Descriptor() ([]byte, []int) { + return fileDescriptor_asset_types_8b202aa014f30014, []int{2} +} +func (m *ImageAsset) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ImageAsset.Unmarshal(m, b) +} +func (m *ImageAsset) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ImageAsset.Marshal(b, m, deterministic) +} +func (dst *ImageAsset) XXX_Merge(src proto.Message) { + xxx_messageInfo_ImageAsset.Merge(dst, src) +} +func (m *ImageAsset) XXX_Size() int { + return xxx_messageInfo_ImageAsset.Size(m) +} +func (m *ImageAsset) XXX_DiscardUnknown() { + xxx_messageInfo_ImageAsset.DiscardUnknown(m) +} + +var xxx_messageInfo_ImageAsset proto.InternalMessageInfo + +func (m *ImageAsset) GetData() *wrappers.BytesValue { + if m != nil { + return m.Data + } + return nil +} + +func (m *ImageAsset) GetFileSize() *wrappers.Int64Value { + if m != nil { + return m.FileSize + } + return nil +} + +func (m *ImageAsset) GetMimeType() enums.MimeTypeEnum_MimeType { + if m != nil { + return m.MimeType + } + return enums.MimeTypeEnum_UNSPECIFIED +} + +func (m *ImageAsset) GetFullSize() *ImageDimension { + if m != nil { + return m.FullSize + } + return nil +} + +// Metadata for an image at a certain size, either original or resized. +type ImageDimension struct { + // Height of the image. + HeightPixels *wrappers.Int64Value `protobuf:"bytes,1,opt,name=height_pixels,json=heightPixels,proto3" json:"height_pixels,omitempty"` + // Width of the image. + WidthPixels *wrappers.Int64Value `protobuf:"bytes,2,opt,name=width_pixels,json=widthPixels,proto3" json:"width_pixels,omitempty"` + // A URL that returns the image with this height and width. + Url *wrappers.StringValue `protobuf:"bytes,3,opt,name=url,proto3" json:"url,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ImageDimension) Reset() { *m = ImageDimension{} } +func (m *ImageDimension) String() string { return proto.CompactTextString(m) } +func (*ImageDimension) ProtoMessage() {} +func (*ImageDimension) Descriptor() ([]byte, []int) { + return fileDescriptor_asset_types_8b202aa014f30014, []int{3} +} +func (m *ImageDimension) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ImageDimension.Unmarshal(m, b) +} +func (m *ImageDimension) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ImageDimension.Marshal(b, m, deterministic) +} +func (dst *ImageDimension) XXX_Merge(src proto.Message) { + xxx_messageInfo_ImageDimension.Merge(dst, src) +} +func (m *ImageDimension) XXX_Size() int { + return xxx_messageInfo_ImageDimension.Size(m) +} +func (m *ImageDimension) XXX_DiscardUnknown() { + xxx_messageInfo_ImageDimension.DiscardUnknown(m) +} + +var xxx_messageInfo_ImageDimension proto.InternalMessageInfo + +func (m *ImageDimension) GetHeightPixels() *wrappers.Int64Value { + if m != nil { + return m.HeightPixels + } + return nil +} + +func (m *ImageDimension) GetWidthPixels() *wrappers.Int64Value { + if m != nil { + return m.WidthPixels + } + return nil +} + +func (m *ImageDimension) GetUrl() *wrappers.StringValue { + if m != nil { + return m.Url + } + return nil +} + +// A Text asset. +type TextAsset struct { + // Text content of the text asset. + Text *wrappers.StringValue `protobuf:"bytes,1,opt,name=text,proto3" json:"text,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *TextAsset) Reset() { *m = TextAsset{} } +func (m *TextAsset) String() string { return proto.CompactTextString(m) } +func (*TextAsset) ProtoMessage() {} +func (*TextAsset) Descriptor() ([]byte, []int) { + return fileDescriptor_asset_types_8b202aa014f30014, []int{4} +} +func (m *TextAsset) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_TextAsset.Unmarshal(m, b) +} +func (m *TextAsset) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_TextAsset.Marshal(b, m, deterministic) +} +func (dst *TextAsset) XXX_Merge(src proto.Message) { + xxx_messageInfo_TextAsset.Merge(dst, src) +} +func (m *TextAsset) XXX_Size() int { + return xxx_messageInfo_TextAsset.Size(m) +} +func (m *TextAsset) XXX_DiscardUnknown() { + xxx_messageInfo_TextAsset.DiscardUnknown(m) +} + +var xxx_messageInfo_TextAsset proto.InternalMessageInfo + +func (m *TextAsset) GetText() *wrappers.StringValue { + if m != nil { + return m.Text + } + return nil +} + +func init() { + proto.RegisterType((*YoutubeVideoAsset)(nil), "google.ads.googleads.v1.common.YoutubeVideoAsset") + proto.RegisterType((*MediaBundleAsset)(nil), "google.ads.googleads.v1.common.MediaBundleAsset") + proto.RegisterType((*ImageAsset)(nil), "google.ads.googleads.v1.common.ImageAsset") + proto.RegisterType((*ImageDimension)(nil), "google.ads.googleads.v1.common.ImageDimension") + proto.RegisterType((*TextAsset)(nil), "google.ads.googleads.v1.common.TextAsset") +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/common/asset_types.proto", fileDescriptor_asset_types_8b202aa014f30014) +} + +var fileDescriptor_asset_types_8b202aa014f30014 = []byte{ + // 537 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x9c, 0x93, 0x41, 0x6b, 0xdb, 0x30, + 0x14, 0xc7, 0x71, 0x12, 0x46, 0xa2, 0x76, 0x59, 0xe7, 0x53, 0xc8, 0x4a, 0x29, 0x3e, 0xf5, 0x32, + 0xb9, 0xe9, 0xca, 0x18, 0x1e, 0x1b, 0x73, 0xda, 0xad, 0x84, 0x51, 0xc8, 0xd2, 0x12, 0xd8, 0x16, + 0x08, 0x4a, 0xf5, 0xe2, 0x08, 0x2c, 0xc9, 0x58, 0x72, 0x9a, 0xf4, 0x8b, 0xec, 0xbe, 0xe3, 0x3e, + 0xc6, 0x8e, 0xfb, 0x28, 0xfd, 0x14, 0x43, 0x92, 0x13, 0x3a, 0x46, 0xd6, 0xb2, 0x93, 0x9f, 0xa5, + 0xff, 0xff, 0xa7, 0xa7, 0xf7, 0x9e, 0xd0, 0x61, 0x22, 0x65, 0x92, 0x42, 0x48, 0xa8, 0x0a, 0x5d, + 0x68, 0xa2, 0x79, 0x27, 0xbc, 0x92, 0x9c, 0x4b, 0x11, 0x12, 0xa5, 0x40, 0x8f, 0xf5, 0x32, 0x03, + 0x85, 0xb3, 0x5c, 0x6a, 0xe9, 0xef, 0x39, 0x19, 0x26, 0x54, 0xe1, 0xb5, 0x03, 0xcf, 0x3b, 0xd8, + 0x39, 0xda, 0xcf, 0x37, 0x11, 0x41, 0x14, 0x5c, 0x85, 0x9c, 0x71, 0xb0, 0x3c, 0x87, 0x6b, 0x97, + 0xb8, 0xd0, 0xfe, 0x4d, 0x8a, 0x69, 0x78, 0x9d, 0x93, 0x2c, 0x83, 0xbc, 0x3c, 0xae, 0xbd, 0xbb, + 0xc2, 0x65, 0x2c, 0x24, 0x42, 0x48, 0x4d, 0x34, 0x93, 0xa2, 0xdc, 0x0d, 0xbe, 0xa2, 0xa7, 0x9f, + 0x65, 0xa1, 0x8b, 0x09, 0x0c, 0x19, 0x05, 0x19, 0x9b, 0x6c, 0xfd, 0x0f, 0x68, 0x67, 0xe9, 0x16, + 0xc7, 0x73, 0xb3, 0x3a, 0x66, 0xb4, 0xe5, 0xed, 0x7b, 0x07, 0x5b, 0x47, 0xbb, 0x65, 0xc6, 0x78, + 0x75, 0x1a, 0xbe, 0xd0, 0x39, 0x13, 0xc9, 0x90, 0xa4, 0x05, 0x0c, 0x9a, 0xcb, 0x3b, 0xa8, 0x1e, + 0x0d, 0x4e, 0xd0, 0xce, 0x39, 0x50, 0x46, 0xba, 0x85, 0xa0, 0x29, 0x38, 0x76, 0x88, 0x6a, 0x94, + 0x68, 0x52, 0xf2, 0x9e, 0xfd, 0xc5, 0xeb, 0x2e, 0x35, 0x28, 0x87, 0xb3, 0xc2, 0xe0, 0x5b, 0x05, + 0xa1, 0x1e, 0x27, 0xc9, 0x7f, 0xfa, 0xfd, 0x57, 0xa8, 0x31, 0x65, 0x29, 0x8c, 0x15, 0xbb, 0x81, + 0x56, 0x65, 0x83, 0xab, 0x27, 0xf4, 0xcb, 0x63, 0xe7, 0xaa, 0x1b, 0xf5, 0x05, 0xbb, 0x01, 0xff, + 0x13, 0x6a, 0xac, 0x8b, 0xdd, 0xaa, 0xee, 0x7b, 0x07, 0xcd, 0xa3, 0x63, 0xbc, 0xa9, 0x79, 0xb6, + 0x39, 0xf8, 0x9c, 0x71, 0xb8, 0x5c, 0x66, 0xf0, 0x5e, 0x14, 0x7c, 0xfd, 0x33, 0xa8, 0xf3, 0x32, + 0xf2, 0x3f, 0xa2, 0xc6, 0xb4, 0x48, 0x53, 0x97, 0x4c, 0xcd, 0x26, 0x83, 0xf1, 0xbf, 0xe7, 0x01, + 0xdb, 0xcb, 0x9f, 0x32, 0x0e, 0x42, 0x31, 0x29, 0x06, 0x75, 0x03, 0x30, 0xf9, 0x05, 0x3f, 0x3d, + 0xd4, 0xfc, 0x73, 0xd3, 0x7f, 0x87, 0x1e, 0xcf, 0x80, 0x25, 0x33, 0x3d, 0xce, 0xd8, 0x02, 0x52, + 0xb5, 0xb1, 0x4c, 0x77, 0x2e, 0xbc, 0xed, 0x1c, 0x7d, 0x6b, 0xf0, 0xdf, 0xa2, 0xed, 0x6b, 0x46, + 0xf5, 0x6c, 0x05, 0x78, 0x40, 0xc5, 0xb6, 0xac, 0xa1, 0xf4, 0x63, 0x54, 0x2d, 0xf2, 0xd4, 0x96, + 0xeb, 0xbe, 0x71, 0x31, 0xc2, 0xe0, 0x0d, 0x6a, 0x5c, 0xc2, 0x42, 0xbb, 0xe6, 0x1e, 0xa2, 0x9a, + 0x86, 0x85, 0x7e, 0xd0, 0xb0, 0x59, 0x65, 0xf7, 0xd6, 0x43, 0xc1, 0x95, 0xe4, 0xf7, 0xd4, 0xb0, + 0xfb, 0xc4, 0xf2, 0x4d, 0x0b, 0x54, 0xdf, 0xc0, 0xfa, 0xde, 0x97, 0xd3, 0xd2, 0x92, 0xc8, 0x94, + 0x88, 0x04, 0xcb, 0x3c, 0x09, 0x13, 0x10, 0xf6, 0xa8, 0xd5, 0xb3, 0xcb, 0x98, 0xda, 0xf4, 0xae, + 0x5f, 0xbb, 0xcf, 0xf7, 0x4a, 0xf5, 0x2c, 0x8e, 0x7f, 0x54, 0xf6, 0xce, 0x1c, 0x2c, 0xa6, 0x0a, + 0xbb, 0xd0, 0x44, 0xc3, 0x0e, 0x3e, 0xb1, 0xb2, 0x5f, 0x2b, 0xc1, 0x28, 0xa6, 0x6a, 0xb4, 0x16, + 0x8c, 0x86, 0x9d, 0x91, 0x13, 0xdc, 0x56, 0x02, 0xb7, 0x1a, 0x45, 0x31, 0x55, 0x51, 0xb4, 0x96, + 0x44, 0xd1, 0xb0, 0x13, 0x45, 0x4e, 0x34, 0x79, 0x64, 0xb3, 0x7b, 0xf1, 0x3b, 0x00, 0x00, 0xff, + 0xff, 0xd8, 0x95, 0x59, 0x49, 0x74, 0x04, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/common/bidding.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/common/bidding.pb.go new file mode 100644 index 0000000..f32b832 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/common/bidding.pb.go @@ -0,0 +1,878 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/common/bidding.proto + +package common // import "google.golang.org/genproto/googleapis/ads/googleads/v1/common" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import wrappers "github.com/golang/protobuf/ptypes/wrappers" +import enums "google.golang.org/genproto/googleapis/ads/googleads/v1/enums" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Commission is an automatic bidding strategy in which the advertiser pays a +// certain portion of the conversion value. +type Commission struct { + // Commission rate defines the portion of the conversion value that the + // advertiser will be billed. A commission rate of x should be passed into + // this field as (x * 1,000,000). For example, 106,000 represents a commission + // rate of 0.106 (10.6%). + CommissionRateMicros *wrappers.Int64Value `protobuf:"bytes,1,opt,name=commission_rate_micros,json=commissionRateMicros,proto3" json:"commission_rate_micros,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Commission) Reset() { *m = Commission{} } +func (m *Commission) String() string { return proto.CompactTextString(m) } +func (*Commission) ProtoMessage() {} +func (*Commission) Descriptor() ([]byte, []int) { + return fileDescriptor_bidding_646e34114a6c9f78, []int{0} +} +func (m *Commission) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Commission.Unmarshal(m, b) +} +func (m *Commission) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Commission.Marshal(b, m, deterministic) +} +func (dst *Commission) XXX_Merge(src proto.Message) { + xxx_messageInfo_Commission.Merge(dst, src) +} +func (m *Commission) XXX_Size() int { + return xxx_messageInfo_Commission.Size(m) +} +func (m *Commission) XXX_DiscardUnknown() { + xxx_messageInfo_Commission.DiscardUnknown(m) +} + +var xxx_messageInfo_Commission proto.InternalMessageInfo + +func (m *Commission) GetCommissionRateMicros() *wrappers.Int64Value { + if m != nil { + return m.CommissionRateMicros + } + return nil +} + +// An automated bidding strategy that raises bids for clicks +// that seem more likely to lead to a conversion and lowers +// them for clicks where they seem less likely. +type EnhancedCpc struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *EnhancedCpc) Reset() { *m = EnhancedCpc{} } +func (m *EnhancedCpc) String() string { return proto.CompactTextString(m) } +func (*EnhancedCpc) ProtoMessage() {} +func (*EnhancedCpc) Descriptor() ([]byte, []int) { + return fileDescriptor_bidding_646e34114a6c9f78, []int{1} +} +func (m *EnhancedCpc) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_EnhancedCpc.Unmarshal(m, b) +} +func (m *EnhancedCpc) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_EnhancedCpc.Marshal(b, m, deterministic) +} +func (dst *EnhancedCpc) XXX_Merge(src proto.Message) { + xxx_messageInfo_EnhancedCpc.Merge(dst, src) +} +func (m *EnhancedCpc) XXX_Size() int { + return xxx_messageInfo_EnhancedCpc.Size(m) +} +func (m *EnhancedCpc) XXX_DiscardUnknown() { + xxx_messageInfo_EnhancedCpc.DiscardUnknown(m) +} + +var xxx_messageInfo_EnhancedCpc proto.InternalMessageInfo + +// Manual click-based bidding where user pays per click. +type ManualCpc struct { + // Whether bids are to be enhanced based on conversion optimizer data. + EnhancedCpcEnabled *wrappers.BoolValue `protobuf:"bytes,1,opt,name=enhanced_cpc_enabled,json=enhancedCpcEnabled,proto3" json:"enhanced_cpc_enabled,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ManualCpc) Reset() { *m = ManualCpc{} } +func (m *ManualCpc) String() string { return proto.CompactTextString(m) } +func (*ManualCpc) ProtoMessage() {} +func (*ManualCpc) Descriptor() ([]byte, []int) { + return fileDescriptor_bidding_646e34114a6c9f78, []int{2} +} +func (m *ManualCpc) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ManualCpc.Unmarshal(m, b) +} +func (m *ManualCpc) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ManualCpc.Marshal(b, m, deterministic) +} +func (dst *ManualCpc) XXX_Merge(src proto.Message) { + xxx_messageInfo_ManualCpc.Merge(dst, src) +} +func (m *ManualCpc) XXX_Size() int { + return xxx_messageInfo_ManualCpc.Size(m) +} +func (m *ManualCpc) XXX_DiscardUnknown() { + xxx_messageInfo_ManualCpc.DiscardUnknown(m) +} + +var xxx_messageInfo_ManualCpc proto.InternalMessageInfo + +func (m *ManualCpc) GetEnhancedCpcEnabled() *wrappers.BoolValue { + if m != nil { + return m.EnhancedCpcEnabled + } + return nil +} + +// Manual impression-based bidding where user pays per thousand impressions. +type ManualCpm struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ManualCpm) Reset() { *m = ManualCpm{} } +func (m *ManualCpm) String() string { return proto.CompactTextString(m) } +func (*ManualCpm) ProtoMessage() {} +func (*ManualCpm) Descriptor() ([]byte, []int) { + return fileDescriptor_bidding_646e34114a6c9f78, []int{3} +} +func (m *ManualCpm) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ManualCpm.Unmarshal(m, b) +} +func (m *ManualCpm) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ManualCpm.Marshal(b, m, deterministic) +} +func (dst *ManualCpm) XXX_Merge(src proto.Message) { + xxx_messageInfo_ManualCpm.Merge(dst, src) +} +func (m *ManualCpm) XXX_Size() int { + return xxx_messageInfo_ManualCpm.Size(m) +} +func (m *ManualCpm) XXX_DiscardUnknown() { + xxx_messageInfo_ManualCpm.DiscardUnknown(m) +} + +var xxx_messageInfo_ManualCpm proto.InternalMessageInfo + +// View based bidding where user pays per video view. +type ManualCpv struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ManualCpv) Reset() { *m = ManualCpv{} } +func (m *ManualCpv) String() string { return proto.CompactTextString(m) } +func (*ManualCpv) ProtoMessage() {} +func (*ManualCpv) Descriptor() ([]byte, []int) { + return fileDescriptor_bidding_646e34114a6c9f78, []int{4} +} +func (m *ManualCpv) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ManualCpv.Unmarshal(m, b) +} +func (m *ManualCpv) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ManualCpv.Marshal(b, m, deterministic) +} +func (dst *ManualCpv) XXX_Merge(src proto.Message) { + xxx_messageInfo_ManualCpv.Merge(dst, src) +} +func (m *ManualCpv) XXX_Size() int { + return xxx_messageInfo_ManualCpv.Size(m) +} +func (m *ManualCpv) XXX_DiscardUnknown() { + xxx_messageInfo_ManualCpv.DiscardUnknown(m) +} + +var xxx_messageInfo_ManualCpv proto.InternalMessageInfo + +// An automated bidding strategy that sets bids to help get the most conversions +// for your campaign while spending your budget. +type MaximizeConversions struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *MaximizeConversions) Reset() { *m = MaximizeConversions{} } +func (m *MaximizeConversions) String() string { return proto.CompactTextString(m) } +func (*MaximizeConversions) ProtoMessage() {} +func (*MaximizeConversions) Descriptor() ([]byte, []int) { + return fileDescriptor_bidding_646e34114a6c9f78, []int{5} +} +func (m *MaximizeConversions) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_MaximizeConversions.Unmarshal(m, b) +} +func (m *MaximizeConversions) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_MaximizeConversions.Marshal(b, m, deterministic) +} +func (dst *MaximizeConversions) XXX_Merge(src proto.Message) { + xxx_messageInfo_MaximizeConversions.Merge(dst, src) +} +func (m *MaximizeConversions) XXX_Size() int { + return xxx_messageInfo_MaximizeConversions.Size(m) +} +func (m *MaximizeConversions) XXX_DiscardUnknown() { + xxx_messageInfo_MaximizeConversions.DiscardUnknown(m) +} + +var xxx_messageInfo_MaximizeConversions proto.InternalMessageInfo + +// An automated bidding strategy which tries to maximize conversion value +// given a daily budget. +type MaximizeConversionValue struct { + // The target return on ad spend (ROAS) option. If set, the bid strategy will + // maximize revenue while averaging the target return on ad spend. If the + // target ROAS is high, the bid strategy may not be able to spend the full + // budget. If the target ROAS is not set, the bid strategy will aim to + // achieve the highest possible ROAS for the budget. + TargetRoas *wrappers.DoubleValue `protobuf:"bytes,1,opt,name=target_roas,json=targetRoas,proto3" json:"target_roas,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *MaximizeConversionValue) Reset() { *m = MaximizeConversionValue{} } +func (m *MaximizeConversionValue) String() string { return proto.CompactTextString(m) } +func (*MaximizeConversionValue) ProtoMessage() {} +func (*MaximizeConversionValue) Descriptor() ([]byte, []int) { + return fileDescriptor_bidding_646e34114a6c9f78, []int{6} +} +func (m *MaximizeConversionValue) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_MaximizeConversionValue.Unmarshal(m, b) +} +func (m *MaximizeConversionValue) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_MaximizeConversionValue.Marshal(b, m, deterministic) +} +func (dst *MaximizeConversionValue) XXX_Merge(src proto.Message) { + xxx_messageInfo_MaximizeConversionValue.Merge(dst, src) +} +func (m *MaximizeConversionValue) XXX_Size() int { + return xxx_messageInfo_MaximizeConversionValue.Size(m) +} +func (m *MaximizeConversionValue) XXX_DiscardUnknown() { + xxx_messageInfo_MaximizeConversionValue.DiscardUnknown(m) +} + +var xxx_messageInfo_MaximizeConversionValue proto.InternalMessageInfo + +func (m *MaximizeConversionValue) GetTargetRoas() *wrappers.DoubleValue { + if m != nil { + return m.TargetRoas + } + return nil +} + +// An automated bidding strategy which sets CPC bids to target impressions on +// page one, or page one promoted slots on google.com. +type PageOnePromoted struct { + // The strategy goal of where impressions are desired to be shown on + // search result pages. + StrategyGoal enums.PageOnePromotedStrategyGoalEnum_PageOnePromotedStrategyGoal `protobuf:"varint,1,opt,name=strategy_goal,json=strategyGoal,proto3,enum=google.ads.googleads.v1.enums.PageOnePromotedStrategyGoalEnum_PageOnePromotedStrategyGoal" json:"strategy_goal,omitempty"` + // Maximum bid limit that can be set by the bid strategy. + // The limit applies to all keywords managed by the strategy. + CpcBidCeilingMicros *wrappers.Int64Value `protobuf:"bytes,2,opt,name=cpc_bid_ceiling_micros,json=cpcBidCeilingMicros,proto3" json:"cpc_bid_ceiling_micros,omitempty"` + // Bid multiplier to be applied to the relevant bid estimate (depending on + // the `strategy_goal`) in determining a keyword's new CPC bid. + BidModifier *wrappers.DoubleValue `protobuf:"bytes,3,opt,name=bid_modifier,json=bidModifier,proto3" json:"bid_modifier,omitempty"` + // Whether the strategy should always follow bid estimate changes, or only + // increase. + // If false, always sets a keyword's new bid to the current bid estimate. + // If true, only updates a keyword's bid if the current bid estimate is + // greater than the current bid. + OnlyRaiseCpcBids *wrappers.BoolValue `protobuf:"bytes,4,opt,name=only_raise_cpc_bids,json=onlyRaiseCpcBids,proto3" json:"only_raise_cpc_bids,omitempty"` + // Whether the strategy is allowed to raise bids when the throttling + // rate of the budget it is serving out of rises above a threshold. + RaiseCpcBidWhenBudgetConstrained *wrappers.BoolValue `protobuf:"bytes,5,opt,name=raise_cpc_bid_when_budget_constrained,json=raiseCpcBidWhenBudgetConstrained,proto3" json:"raise_cpc_bid_when_budget_constrained,omitempty"` + // Whether the strategy is allowed to raise bids on keywords with + // lower-range quality scores. + RaiseCpcBidWhenQualityScoreIsLow *wrappers.BoolValue `protobuf:"bytes,6,opt,name=raise_cpc_bid_when_quality_score_is_low,json=raiseCpcBidWhenQualityScoreIsLow,proto3" json:"raise_cpc_bid_when_quality_score_is_low,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *PageOnePromoted) Reset() { *m = PageOnePromoted{} } +func (m *PageOnePromoted) String() string { return proto.CompactTextString(m) } +func (*PageOnePromoted) ProtoMessage() {} +func (*PageOnePromoted) Descriptor() ([]byte, []int) { + return fileDescriptor_bidding_646e34114a6c9f78, []int{7} +} +func (m *PageOnePromoted) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_PageOnePromoted.Unmarshal(m, b) +} +func (m *PageOnePromoted) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_PageOnePromoted.Marshal(b, m, deterministic) +} +func (dst *PageOnePromoted) XXX_Merge(src proto.Message) { + xxx_messageInfo_PageOnePromoted.Merge(dst, src) +} +func (m *PageOnePromoted) XXX_Size() int { + return xxx_messageInfo_PageOnePromoted.Size(m) +} +func (m *PageOnePromoted) XXX_DiscardUnknown() { + xxx_messageInfo_PageOnePromoted.DiscardUnknown(m) +} + +var xxx_messageInfo_PageOnePromoted proto.InternalMessageInfo + +func (m *PageOnePromoted) GetStrategyGoal() enums.PageOnePromotedStrategyGoalEnum_PageOnePromotedStrategyGoal { + if m != nil { + return m.StrategyGoal + } + return enums.PageOnePromotedStrategyGoalEnum_UNSPECIFIED +} + +func (m *PageOnePromoted) GetCpcBidCeilingMicros() *wrappers.Int64Value { + if m != nil { + return m.CpcBidCeilingMicros + } + return nil +} + +func (m *PageOnePromoted) GetBidModifier() *wrappers.DoubleValue { + if m != nil { + return m.BidModifier + } + return nil +} + +func (m *PageOnePromoted) GetOnlyRaiseCpcBids() *wrappers.BoolValue { + if m != nil { + return m.OnlyRaiseCpcBids + } + return nil +} + +func (m *PageOnePromoted) GetRaiseCpcBidWhenBudgetConstrained() *wrappers.BoolValue { + if m != nil { + return m.RaiseCpcBidWhenBudgetConstrained + } + return nil +} + +func (m *PageOnePromoted) GetRaiseCpcBidWhenQualityScoreIsLow() *wrappers.BoolValue { + if m != nil { + return m.RaiseCpcBidWhenQualityScoreIsLow + } + return nil +} + +// An automated bid strategy that sets bids to help get as many conversions as +// possible at the target cost-per-acquisition (CPA) you set. +type TargetCpa struct { + // Average CPA target. + // This target should be greater than or equal to minimum billable unit based + // on the currency for the account. + TargetCpaMicros *wrappers.Int64Value `protobuf:"bytes,1,opt,name=target_cpa_micros,json=targetCpaMicros,proto3" json:"target_cpa_micros,omitempty"` + // Maximum bid limit that can be set by the bid strategy. + // The limit applies to all keywords managed by the strategy. + CpcBidCeilingMicros *wrappers.Int64Value `protobuf:"bytes,2,opt,name=cpc_bid_ceiling_micros,json=cpcBidCeilingMicros,proto3" json:"cpc_bid_ceiling_micros,omitempty"` + // Minimum bid limit that can be set by the bid strategy. + // The limit applies to all keywords managed by the strategy. + CpcBidFloorMicros *wrappers.Int64Value `protobuf:"bytes,3,opt,name=cpc_bid_floor_micros,json=cpcBidFloorMicros,proto3" json:"cpc_bid_floor_micros,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *TargetCpa) Reset() { *m = TargetCpa{} } +func (m *TargetCpa) String() string { return proto.CompactTextString(m) } +func (*TargetCpa) ProtoMessage() {} +func (*TargetCpa) Descriptor() ([]byte, []int) { + return fileDescriptor_bidding_646e34114a6c9f78, []int{8} +} +func (m *TargetCpa) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_TargetCpa.Unmarshal(m, b) +} +func (m *TargetCpa) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_TargetCpa.Marshal(b, m, deterministic) +} +func (dst *TargetCpa) XXX_Merge(src proto.Message) { + xxx_messageInfo_TargetCpa.Merge(dst, src) +} +func (m *TargetCpa) XXX_Size() int { + return xxx_messageInfo_TargetCpa.Size(m) +} +func (m *TargetCpa) XXX_DiscardUnknown() { + xxx_messageInfo_TargetCpa.DiscardUnknown(m) +} + +var xxx_messageInfo_TargetCpa proto.InternalMessageInfo + +func (m *TargetCpa) GetTargetCpaMicros() *wrappers.Int64Value { + if m != nil { + return m.TargetCpaMicros + } + return nil +} + +func (m *TargetCpa) GetCpcBidCeilingMicros() *wrappers.Int64Value { + if m != nil { + return m.CpcBidCeilingMicros + } + return nil +} + +func (m *TargetCpa) GetCpcBidFloorMicros() *wrappers.Int64Value { + if m != nil { + return m.CpcBidFloorMicros + } + return nil +} + +// Target CPM (cost per thousand impressions) is an automated bidding strategy +// that sets bids to optimize performance given the target CPM you set. +type TargetCpm struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *TargetCpm) Reset() { *m = TargetCpm{} } +func (m *TargetCpm) String() string { return proto.CompactTextString(m) } +func (*TargetCpm) ProtoMessage() {} +func (*TargetCpm) Descriptor() ([]byte, []int) { + return fileDescriptor_bidding_646e34114a6c9f78, []int{9} +} +func (m *TargetCpm) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_TargetCpm.Unmarshal(m, b) +} +func (m *TargetCpm) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_TargetCpm.Marshal(b, m, deterministic) +} +func (dst *TargetCpm) XXX_Merge(src proto.Message) { + xxx_messageInfo_TargetCpm.Merge(dst, src) +} +func (m *TargetCpm) XXX_Size() int { + return xxx_messageInfo_TargetCpm.Size(m) +} +func (m *TargetCpm) XXX_DiscardUnknown() { + xxx_messageInfo_TargetCpm.DiscardUnknown(m) +} + +var xxx_messageInfo_TargetCpm proto.InternalMessageInfo + +// An automated bidding strategy that sets bids so that a certain percentage of +// search ads are shown at the top of the first page (or other targeted +// location). +// Next Id = 4 +type TargetImpressionShare struct { + // The targeted location on the search results page. + Location enums.TargetImpressionShareLocationEnum_TargetImpressionShareLocation `protobuf:"varint,1,opt,name=location,proto3,enum=google.ads.googleads.v1.enums.TargetImpressionShareLocationEnum_TargetImpressionShareLocation" json:"location,omitempty"` + // The desired fraction of ads to be shown in the targeted location in micros. + // E.g. 1% equals 10,000. + LocationFractionMicros *wrappers.Int64Value `protobuf:"bytes,2,opt,name=location_fraction_micros,json=locationFractionMicros,proto3" json:"location_fraction_micros,omitempty"` + // The highest CPC bid the automated bidding system is permitted to specify. + // This is a required field entered by the advertiser that sets the ceiling + // and specified in local micros. + CpcBidCeilingMicros *wrappers.Int64Value `protobuf:"bytes,3,opt,name=cpc_bid_ceiling_micros,json=cpcBidCeilingMicros,proto3" json:"cpc_bid_ceiling_micros,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *TargetImpressionShare) Reset() { *m = TargetImpressionShare{} } +func (m *TargetImpressionShare) String() string { return proto.CompactTextString(m) } +func (*TargetImpressionShare) ProtoMessage() {} +func (*TargetImpressionShare) Descriptor() ([]byte, []int) { + return fileDescriptor_bidding_646e34114a6c9f78, []int{10} +} +func (m *TargetImpressionShare) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_TargetImpressionShare.Unmarshal(m, b) +} +func (m *TargetImpressionShare) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_TargetImpressionShare.Marshal(b, m, deterministic) +} +func (dst *TargetImpressionShare) XXX_Merge(src proto.Message) { + xxx_messageInfo_TargetImpressionShare.Merge(dst, src) +} +func (m *TargetImpressionShare) XXX_Size() int { + return xxx_messageInfo_TargetImpressionShare.Size(m) +} +func (m *TargetImpressionShare) XXX_DiscardUnknown() { + xxx_messageInfo_TargetImpressionShare.DiscardUnknown(m) +} + +var xxx_messageInfo_TargetImpressionShare proto.InternalMessageInfo + +func (m *TargetImpressionShare) GetLocation() enums.TargetImpressionShareLocationEnum_TargetImpressionShareLocation { + if m != nil { + return m.Location + } + return enums.TargetImpressionShareLocationEnum_UNSPECIFIED +} + +func (m *TargetImpressionShare) GetLocationFractionMicros() *wrappers.Int64Value { + if m != nil { + return m.LocationFractionMicros + } + return nil +} + +func (m *TargetImpressionShare) GetCpcBidCeilingMicros() *wrappers.Int64Value { + if m != nil { + return m.CpcBidCeilingMicros + } + return nil +} + +// An automated bidding strategy that sets bids based on the target fraction of +// auctions where the advertiser should outrank a specific competitor. +type TargetOutrankShare struct { + // The target fraction of auctions where the advertiser should outrank the + // competitor. + // The advertiser outranks the competitor in an auction if either the + // advertiser appears above the competitor in the search results, or appears + // in the search results when the competitor does not. + // Value must be between 1 and 1000000, inclusive. + TargetOutrankShareMicros *wrappers.Int32Value `protobuf:"bytes,1,opt,name=target_outrank_share_micros,json=targetOutrankShareMicros,proto3" json:"target_outrank_share_micros,omitempty"` + // Competitor's visible domain URL. + CompetitorDomain *wrappers.StringValue `protobuf:"bytes,2,opt,name=competitor_domain,json=competitorDomain,proto3" json:"competitor_domain,omitempty"` + // Maximum bid limit that can be set by the bid strategy. + // The limit applies to all keywords managed by the strategy. + CpcBidCeilingMicros *wrappers.Int64Value `protobuf:"bytes,3,opt,name=cpc_bid_ceiling_micros,json=cpcBidCeilingMicros,proto3" json:"cpc_bid_ceiling_micros,omitempty"` + // Whether the strategy should always follow bid estimate changes, + // or only increase. + // If false, always set a keyword's new bid to the current bid estimate. + // If true, only updates a keyword's bid if the current bid estimate is + // greater than the current bid. + OnlyRaiseCpcBids *wrappers.BoolValue `protobuf:"bytes,4,opt,name=only_raise_cpc_bids,json=onlyRaiseCpcBids,proto3" json:"only_raise_cpc_bids,omitempty"` + // Whether the strategy is allowed to raise bids on keywords with + // lower-range quality scores. + RaiseCpcBidWhenQualityScoreIsLow *wrappers.BoolValue `protobuf:"bytes,5,opt,name=raise_cpc_bid_when_quality_score_is_low,json=raiseCpcBidWhenQualityScoreIsLow,proto3" json:"raise_cpc_bid_when_quality_score_is_low,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *TargetOutrankShare) Reset() { *m = TargetOutrankShare{} } +func (m *TargetOutrankShare) String() string { return proto.CompactTextString(m) } +func (*TargetOutrankShare) ProtoMessage() {} +func (*TargetOutrankShare) Descriptor() ([]byte, []int) { + return fileDescriptor_bidding_646e34114a6c9f78, []int{11} +} +func (m *TargetOutrankShare) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_TargetOutrankShare.Unmarshal(m, b) +} +func (m *TargetOutrankShare) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_TargetOutrankShare.Marshal(b, m, deterministic) +} +func (dst *TargetOutrankShare) XXX_Merge(src proto.Message) { + xxx_messageInfo_TargetOutrankShare.Merge(dst, src) +} +func (m *TargetOutrankShare) XXX_Size() int { + return xxx_messageInfo_TargetOutrankShare.Size(m) +} +func (m *TargetOutrankShare) XXX_DiscardUnknown() { + xxx_messageInfo_TargetOutrankShare.DiscardUnknown(m) +} + +var xxx_messageInfo_TargetOutrankShare proto.InternalMessageInfo + +func (m *TargetOutrankShare) GetTargetOutrankShareMicros() *wrappers.Int32Value { + if m != nil { + return m.TargetOutrankShareMicros + } + return nil +} + +func (m *TargetOutrankShare) GetCompetitorDomain() *wrappers.StringValue { + if m != nil { + return m.CompetitorDomain + } + return nil +} + +func (m *TargetOutrankShare) GetCpcBidCeilingMicros() *wrappers.Int64Value { + if m != nil { + return m.CpcBidCeilingMicros + } + return nil +} + +func (m *TargetOutrankShare) GetOnlyRaiseCpcBids() *wrappers.BoolValue { + if m != nil { + return m.OnlyRaiseCpcBids + } + return nil +} + +func (m *TargetOutrankShare) GetRaiseCpcBidWhenQualityScoreIsLow() *wrappers.BoolValue { + if m != nil { + return m.RaiseCpcBidWhenQualityScoreIsLow + } + return nil +} + +// An automated bidding strategy that helps you maximize revenue while +// averaging a specific target return on ad spend (ROAS). +type TargetRoas struct { + // Required. The desired revenue (based on conversion data) per unit of spend. + // Value must be between 0.01 and 1000.0, inclusive. + TargetRoas *wrappers.DoubleValue `protobuf:"bytes,1,opt,name=target_roas,json=targetRoas,proto3" json:"target_roas,omitempty"` + // Maximum bid limit that can be set by the bid strategy. + // The limit applies to all keywords managed by the strategy. + CpcBidCeilingMicros *wrappers.Int64Value `protobuf:"bytes,2,opt,name=cpc_bid_ceiling_micros,json=cpcBidCeilingMicros,proto3" json:"cpc_bid_ceiling_micros,omitempty"` + // Minimum bid limit that can be set by the bid strategy. + // The limit applies to all keywords managed by the strategy. + CpcBidFloorMicros *wrappers.Int64Value `protobuf:"bytes,3,opt,name=cpc_bid_floor_micros,json=cpcBidFloorMicros,proto3" json:"cpc_bid_floor_micros,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *TargetRoas) Reset() { *m = TargetRoas{} } +func (m *TargetRoas) String() string { return proto.CompactTextString(m) } +func (*TargetRoas) ProtoMessage() {} +func (*TargetRoas) Descriptor() ([]byte, []int) { + return fileDescriptor_bidding_646e34114a6c9f78, []int{12} +} +func (m *TargetRoas) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_TargetRoas.Unmarshal(m, b) +} +func (m *TargetRoas) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_TargetRoas.Marshal(b, m, deterministic) +} +func (dst *TargetRoas) XXX_Merge(src proto.Message) { + xxx_messageInfo_TargetRoas.Merge(dst, src) +} +func (m *TargetRoas) XXX_Size() int { + return xxx_messageInfo_TargetRoas.Size(m) +} +func (m *TargetRoas) XXX_DiscardUnknown() { + xxx_messageInfo_TargetRoas.DiscardUnknown(m) +} + +var xxx_messageInfo_TargetRoas proto.InternalMessageInfo + +func (m *TargetRoas) GetTargetRoas() *wrappers.DoubleValue { + if m != nil { + return m.TargetRoas + } + return nil +} + +func (m *TargetRoas) GetCpcBidCeilingMicros() *wrappers.Int64Value { + if m != nil { + return m.CpcBidCeilingMicros + } + return nil +} + +func (m *TargetRoas) GetCpcBidFloorMicros() *wrappers.Int64Value { + if m != nil { + return m.CpcBidFloorMicros + } + return nil +} + +// An automated bid strategy that sets your bids to help get as many clicks +// as possible within your budget. +type TargetSpend struct { + // The spend target under which to maximize clicks. + // A TargetSpend bidder will attempt to spend the smaller of this value + // or the natural throttling spend amount. + // If not specified, the budget is used as the spend target. + TargetSpendMicros *wrappers.Int64Value `protobuf:"bytes,1,opt,name=target_spend_micros,json=targetSpendMicros,proto3" json:"target_spend_micros,omitempty"` + // Maximum bid limit that can be set by the bid strategy. + // The limit applies to all keywords managed by the strategy. + CpcBidCeilingMicros *wrappers.Int64Value `protobuf:"bytes,2,opt,name=cpc_bid_ceiling_micros,json=cpcBidCeilingMicros,proto3" json:"cpc_bid_ceiling_micros,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *TargetSpend) Reset() { *m = TargetSpend{} } +func (m *TargetSpend) String() string { return proto.CompactTextString(m) } +func (*TargetSpend) ProtoMessage() {} +func (*TargetSpend) Descriptor() ([]byte, []int) { + return fileDescriptor_bidding_646e34114a6c9f78, []int{13} +} +func (m *TargetSpend) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_TargetSpend.Unmarshal(m, b) +} +func (m *TargetSpend) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_TargetSpend.Marshal(b, m, deterministic) +} +func (dst *TargetSpend) XXX_Merge(src proto.Message) { + xxx_messageInfo_TargetSpend.Merge(dst, src) +} +func (m *TargetSpend) XXX_Size() int { + return xxx_messageInfo_TargetSpend.Size(m) +} +func (m *TargetSpend) XXX_DiscardUnknown() { + xxx_messageInfo_TargetSpend.DiscardUnknown(m) +} + +var xxx_messageInfo_TargetSpend proto.InternalMessageInfo + +func (m *TargetSpend) GetTargetSpendMicros() *wrappers.Int64Value { + if m != nil { + return m.TargetSpendMicros + } + return nil +} + +func (m *TargetSpend) GetCpcBidCeilingMicros() *wrappers.Int64Value { + if m != nil { + return m.CpcBidCeilingMicros + } + return nil +} + +// A bidding strategy where bids are a fraction of the advertised price for +// some good or service. +type PercentCpc struct { + // Maximum bid limit that can be set by the bid strategy. This is + // an optional field entered by the advertiser and specified in local micros. + // Note: A zero value is interpreted in the same way as having bid_ceiling + // undefined. + CpcBidCeilingMicros *wrappers.Int64Value `protobuf:"bytes,1,opt,name=cpc_bid_ceiling_micros,json=cpcBidCeilingMicros,proto3" json:"cpc_bid_ceiling_micros,omitempty"` + // Adjusts the bid for each auction upward or downward, depending on the + // likelihood of a conversion. Individual bids may exceed + // cpc_bid_ceiling_micros, but the average bid amount for a campaign should + // not. + EnhancedCpcEnabled *wrappers.BoolValue `protobuf:"bytes,2,opt,name=enhanced_cpc_enabled,json=enhancedCpcEnabled,proto3" json:"enhanced_cpc_enabled,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *PercentCpc) Reset() { *m = PercentCpc{} } +func (m *PercentCpc) String() string { return proto.CompactTextString(m) } +func (*PercentCpc) ProtoMessage() {} +func (*PercentCpc) Descriptor() ([]byte, []int) { + return fileDescriptor_bidding_646e34114a6c9f78, []int{14} +} +func (m *PercentCpc) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_PercentCpc.Unmarshal(m, b) +} +func (m *PercentCpc) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_PercentCpc.Marshal(b, m, deterministic) +} +func (dst *PercentCpc) XXX_Merge(src proto.Message) { + xxx_messageInfo_PercentCpc.Merge(dst, src) +} +func (m *PercentCpc) XXX_Size() int { + return xxx_messageInfo_PercentCpc.Size(m) +} +func (m *PercentCpc) XXX_DiscardUnknown() { + xxx_messageInfo_PercentCpc.DiscardUnknown(m) +} + +var xxx_messageInfo_PercentCpc proto.InternalMessageInfo + +func (m *PercentCpc) GetCpcBidCeilingMicros() *wrappers.Int64Value { + if m != nil { + return m.CpcBidCeilingMicros + } + return nil +} + +func (m *PercentCpc) GetEnhancedCpcEnabled() *wrappers.BoolValue { + if m != nil { + return m.EnhancedCpcEnabled + } + return nil +} + +func init() { + proto.RegisterType((*Commission)(nil), "google.ads.googleads.v1.common.Commission") + proto.RegisterType((*EnhancedCpc)(nil), "google.ads.googleads.v1.common.EnhancedCpc") + proto.RegisterType((*ManualCpc)(nil), "google.ads.googleads.v1.common.ManualCpc") + proto.RegisterType((*ManualCpm)(nil), "google.ads.googleads.v1.common.ManualCpm") + proto.RegisterType((*ManualCpv)(nil), "google.ads.googleads.v1.common.ManualCpv") + proto.RegisterType((*MaximizeConversions)(nil), "google.ads.googleads.v1.common.MaximizeConversions") + proto.RegisterType((*MaximizeConversionValue)(nil), "google.ads.googleads.v1.common.MaximizeConversionValue") + proto.RegisterType((*PageOnePromoted)(nil), "google.ads.googleads.v1.common.PageOnePromoted") + proto.RegisterType((*TargetCpa)(nil), "google.ads.googleads.v1.common.TargetCpa") + proto.RegisterType((*TargetCpm)(nil), "google.ads.googleads.v1.common.TargetCpm") + proto.RegisterType((*TargetImpressionShare)(nil), "google.ads.googleads.v1.common.TargetImpressionShare") + proto.RegisterType((*TargetOutrankShare)(nil), "google.ads.googleads.v1.common.TargetOutrankShare") + proto.RegisterType((*TargetRoas)(nil), "google.ads.googleads.v1.common.TargetRoas") + proto.RegisterType((*TargetSpend)(nil), "google.ads.googleads.v1.common.TargetSpend") + proto.RegisterType((*PercentCpc)(nil), "google.ads.googleads.v1.common.PercentCpc") +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/common/bidding.proto", fileDescriptor_bidding_646e34114a6c9f78) +} + +var fileDescriptor_bidding_646e34114a6c9f78 = []byte{ + // 951 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xcc, 0x96, 0x51, 0x6f, 0x1b, 0x45, + 0x10, 0x80, 0x65, 0xbb, 0xad, 0xe8, 0x3a, 0xa5, 0xe4, 0x92, 0x06, 0x2b, 0xad, 0xaa, 0xea, 0x24, + 0x04, 0x0f, 0xe8, 0x2c, 0xa7, 0x88, 0x07, 0x23, 0x84, 0xec, 0x4b, 0x1a, 0x59, 0x24, 0xaa, 0x6b, + 0x97, 0x00, 0x51, 0xc4, 0x6a, 0xbd, 0xbb, 0xb9, 0xac, 0x7a, 0xb7, 0x73, 0xec, 0xee, 0x25, 0xa4, + 0x2f, 0xfc, 0x17, 0x1e, 0x11, 0x3f, 0x81, 0x77, 0x24, 0xc4, 0xff, 0x40, 0x82, 0x57, 0x7e, 0x00, + 0xba, 0xdd, 0x3d, 0xbb, 0xc5, 0x49, 0xdc, 0x84, 0x20, 0xf1, 0xe4, 0x5d, 0xef, 0xcc, 0x37, 0xb3, + 0x33, 0xb3, 0x73, 0x83, 0x3e, 0x4c, 0x00, 0x92, 0x94, 0xb7, 0x09, 0xd3, 0x6d, 0xb7, 0x2c, 0x57, + 0xc7, 0x9d, 0x36, 0x85, 0x2c, 0x03, 0xd9, 0x9e, 0x08, 0xc6, 0x84, 0x4c, 0xa2, 0x5c, 0x81, 0x81, + 0xe0, 0xa1, 0x13, 0x89, 0x08, 0xd3, 0xd1, 0x54, 0x3a, 0x3a, 0xee, 0x44, 0x4e, 0x7a, 0x3d, 0x3e, + 0x8f, 0xc6, 0x65, 0x91, 0xe9, 0x76, 0x4e, 0x12, 0x8e, 0x41, 0x72, 0x9c, 0x2b, 0xc8, 0xc0, 0x70, + 0x86, 0xb5, 0x51, 0xc4, 0xf0, 0xe4, 0x14, 0x27, 0x40, 0x52, 0x67, 0x64, 0x7d, 0xf3, 0x62, 0x88, + 0x21, 0x2a, 0xe1, 0x06, 0x8b, 0x2c, 0x57, 0x5c, 0x6b, 0x01, 0x12, 0xeb, 0x23, 0xa2, 0x38, 0x4e, + 0x81, 0x12, 0x23, 0x40, 0x7a, 0x8a, 0x77, 0xb5, 0x6d, 0x77, 0x93, 0xe2, 0xb0, 0x7d, 0xa2, 0x48, + 0x9e, 0x73, 0xa5, 0xfd, 0xf9, 0x83, 0xca, 0x4a, 0x2e, 0xda, 0x44, 0x4a, 0x30, 0x56, 0xd9, 0x9f, + 0x86, 0x18, 0xa1, 0x18, 0xb2, 0x4c, 0x58, 0x03, 0xc1, 0x33, 0xb4, 0x46, 0xa7, 0x3b, 0x5c, 0x7a, + 0x8c, 0x33, 0x41, 0x15, 0xe8, 0x56, 0xed, 0x51, 0xed, 0x83, 0xe6, 0xc6, 0x7d, 0x1f, 0x8c, 0xa8, + 0x32, 0x16, 0x0d, 0xa4, 0xf9, 0xf8, 0xa3, 0x3d, 0x92, 0x16, 0x7c, 0xb4, 0x3a, 0x53, 0x1d, 0x11, + 0xc3, 0x77, 0xad, 0x62, 0x78, 0x07, 0x35, 0xb7, 0xe4, 0x11, 0x91, 0x94, 0xb3, 0x38, 0xa7, 0xe1, + 0xd7, 0xe8, 0xf6, 0x2e, 0x91, 0x05, 0x49, 0xe3, 0x9c, 0x06, 0x3b, 0x68, 0x95, 0xfb, 0x33, 0x4c, + 0x73, 0x8a, 0xb9, 0x24, 0x93, 0x94, 0x33, 0x6f, 0x6c, 0x7d, 0xce, 0x58, 0x1f, 0x20, 0x75, 0xb6, + 0x02, 0x3e, 0x63, 0x6e, 0x39, 0xad, 0xb0, 0x39, 0x43, 0x67, 0xaf, 0x6e, 0x8e, 0xc3, 0x7b, 0x68, + 0x65, 0x97, 0x7c, 0x27, 0x32, 0xf1, 0x92, 0xc7, 0x20, 0x8f, 0xb9, 0x2a, 0x7d, 0xd4, 0xe1, 0x57, + 0xe8, 0xdd, 0xf9, 0xbf, 0x2d, 0x3f, 0xf8, 0x14, 0x35, 0x7d, 0xf8, 0x15, 0x90, 0xea, 0xf6, 0x0f, + 0xe6, 0x1c, 0xda, 0x84, 0x62, 0x92, 0x72, 0xe7, 0x12, 0x72, 0x0a, 0x23, 0x20, 0x3a, 0xfc, 0xed, + 0x06, 0xba, 0x3b, 0x24, 0x09, 0x7f, 0x2a, 0xf9, 0xd0, 0x57, 0x40, 0xf0, 0x3d, 0xba, 0xf3, 0x5a, + 0x11, 0x58, 0xe8, 0xdb, 0x1b, 0xfb, 0xd1, 0x79, 0xa5, 0x66, 0xab, 0x20, 0xfa, 0x07, 0x66, 0xec, + 0x11, 0xdb, 0x40, 0xd2, 0x2d, 0x59, 0x64, 0x17, 0x9d, 0x8f, 0x96, 0xf4, 0x2b, 0xbb, 0x60, 0x88, + 0xd6, 0xca, 0x20, 0x4f, 0x04, 0xc3, 0x94, 0x8b, 0x54, 0xc8, 0xa4, 0x4a, 0x6e, 0x7d, 0x71, 0x72, + 0x57, 0x68, 0x4e, 0xfb, 0x82, 0xc5, 0x4e, 0xd1, 0xe5, 0x36, 0xf8, 0x0c, 0x2d, 0x95, 0xb4, 0x0c, + 0x98, 0x38, 0x14, 0x5c, 0xb5, 0x1a, 0x6f, 0x10, 0xa6, 0xe6, 0x44, 0xb0, 0x5d, 0xaf, 0x10, 0x0c, + 0xd0, 0x0a, 0xc8, 0xf4, 0x14, 0x2b, 0x22, 0x34, 0xc7, 0xde, 0x3b, 0xdd, 0xba, 0xb1, 0x30, 0xff, + 0xef, 0x94, 0x6a, 0xa3, 0x52, 0x2b, 0xb6, 0x7e, 0xe9, 0xe0, 0x05, 0x7a, 0xef, 0x35, 0x0a, 0x3e, + 0x39, 0xe2, 0x12, 0x4f, 0x0a, 0x56, 0x26, 0x91, 0x82, 0x2c, 0x43, 0x21, 0x24, 0x67, 0xad, 0x9b, + 0x0b, 0xe1, 0x8f, 0xd4, 0x0c, 0xfc, 0xe5, 0x11, 0x97, 0x7d, 0x0b, 0x89, 0x67, 0x8c, 0x20, 0x43, + 0xef, 0x9f, 0x61, 0xec, 0xdb, 0x82, 0xa4, 0xc2, 0x9c, 0x62, 0x4d, 0x41, 0x71, 0x2c, 0x34, 0x4e, + 0xe1, 0xa4, 0x75, 0xeb, 0xd2, 0xe6, 0x9e, 0x39, 0xcc, 0xb8, 0xa4, 0x0c, 0xf4, 0x0e, 0x9c, 0x84, + 0x7f, 0xd5, 0xd0, 0xed, 0xe7, 0xb6, 0xba, 0xe2, 0x9c, 0x04, 0xdb, 0x68, 0xd9, 0xd7, 0x26, 0xcd, + 0xc9, 0x25, 0xde, 0xe7, 0x5d, 0x53, 0x21, 0x7c, 0xfa, 0xae, 0xbf, 0x20, 0x76, 0xd0, 0x6a, 0x45, + 0x3c, 0x4c, 0x01, 0x54, 0xc5, 0x6b, 0x2c, 0xe6, 0x2d, 0x3b, 0xde, 0x93, 0x52, 0xcd, 0xb7, 0x8e, + 0xe6, 0xec, 0xd6, 0x59, 0xf8, 0x73, 0x1d, 0xdd, 0x73, 0xbb, 0xc1, 0xb4, 0x21, 0x8e, 0xcb, 0x7e, + 0x18, 0xbc, 0x44, 0x6f, 0x55, 0x2d, 0xd1, 0xbf, 0xa9, 0x6f, 0x16, 0xbc, 0xa9, 0x33, 0x39, 0x3b, + 0x9e, 0x61, 0x5f, 0xd5, 0x85, 0x12, 0xa3, 0xa9, 0xbd, 0xe0, 0x0b, 0xd4, 0xaa, 0xd6, 0xf8, 0x50, + 0x11, 0x6a, 0x17, 0x6f, 0x1e, 0xc4, 0xb5, 0x4a, 0xf9, 0x89, 0xd7, 0x5d, 0x98, 0x99, 0xc6, 0xd5, + 0x32, 0x13, 0xfe, 0xd2, 0x40, 0x81, 0xbb, 0xd4, 0xd3, 0xc2, 0x28, 0x22, 0x5f, 0xb8, 0xd8, 0xed, + 0xa3, 0xfb, 0xbe, 0x96, 0xc0, 0xfd, 0xed, 0xbf, 0x31, 0x8b, 0xab, 0xea, 0xf1, 0x86, 0xb3, 0xd6, + 0x32, 0x73, 0x54, 0x7f, 0x89, 0x01, 0x5a, 0xa6, 0x90, 0xe5, 0xdc, 0x08, 0x03, 0x0a, 0x33, 0xc8, + 0x88, 0x90, 0x3e, 0x28, 0xf3, 0x2d, 0x62, 0x6c, 0x94, 0x90, 0x89, 0x7f, 0xdc, 0x33, 0xb5, 0x4d, + 0xab, 0x75, 0xfd, 0xf1, 0xb8, 0xce, 0xce, 0x73, 0x89, 0x66, 0x70, 0xf3, 0x1a, 0x9a, 0xc1, 0x9f, + 0x35, 0x84, 0x9e, 0x4f, 0x3f, 0x35, 0xff, 0xf2, 0x4b, 0xf5, 0xbf, 0xef, 0x01, 0x3f, 0xd5, 0x50, + 0xd3, 0xdd, 0x76, 0x9c, 0x73, 0xc9, 0x82, 0xcf, 0xd1, 0x8a, 0xbf, 0xae, 0x2e, 0xf7, 0x97, 0x68, + 0x7f, 0xbe, 0x69, 0x5a, 0xcc, 0x7f, 0xd5, 0x00, 0x4b, 0x77, 0xd1, 0x90, 0x2b, 0xca, 0xa5, 0x29, + 0x07, 0x9c, 0xf3, 0x0d, 0xd4, 0xae, 0x1e, 0xdd, 0x33, 0x47, 0xa6, 0xfa, 0x55, 0x46, 0xa6, 0xfe, + 0xef, 0x35, 0x14, 0x52, 0xc8, 0xa2, 0x8b, 0xa7, 0xdd, 0xfe, 0x52, 0xdf, 0x0d, 0xc7, 0xc3, 0x92, + 0x3a, 0xac, 0xed, 0xfb, 0xc1, 0x35, 0x4a, 0x20, 0x25, 0x32, 0x89, 0x40, 0x25, 0xed, 0x84, 0x4b, + 0x6b, 0xb3, 0x1a, 0x64, 0x73, 0xa1, 0xcf, 0x1b, 0xb5, 0x3f, 0x71, 0x3f, 0x3f, 0xd4, 0x1b, 0xdb, + 0xbd, 0xde, 0x8f, 0xf5, 0x87, 0xdb, 0x0e, 0xd6, 0x63, 0x3a, 0x72, 0xcb, 0x72, 0xb5, 0xd7, 0x89, + 0x62, 0x2b, 0xf6, 0x6b, 0x25, 0x70, 0xd0, 0x63, 0xfa, 0x60, 0x2a, 0x70, 0xb0, 0xd7, 0x39, 0x70, + 0x02, 0x7f, 0xd4, 0x43, 0xf7, 0x6f, 0xb7, 0xdb, 0x63, 0xba, 0xdb, 0x9d, 0x8a, 0x74, 0xbb, 0x7b, + 0x9d, 0x6e, 0xd7, 0x09, 0x4d, 0x6e, 0x59, 0xef, 0x1e, 0xff, 0x1d, 0x00, 0x00, 0xff, 0xff, 0xa1, + 0x89, 0x7c, 0x93, 0x07, 0x0c, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/common/click_location.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/common/click_location.pb.go new file mode 100644 index 0000000..9fefc61 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/common/click_location.pb.go @@ -0,0 +1,132 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/common/click_location.proto + +package common // import "google.golang.org/genproto/googleapis/ads/googleads/v1/common" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import wrappers "github.com/golang/protobuf/ptypes/wrappers" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Location criteria associated with a click. +type ClickLocation struct { + // The city location criterion associated with the impression. + City *wrappers.StringValue `protobuf:"bytes,1,opt,name=city,proto3" json:"city,omitempty"` + // The country location criterion associated with the impression. + Country *wrappers.StringValue `protobuf:"bytes,2,opt,name=country,proto3" json:"country,omitempty"` + // The metro location criterion associated with the impression. + Metro *wrappers.StringValue `protobuf:"bytes,3,opt,name=metro,proto3" json:"metro,omitempty"` + // The most specific location criterion associated with the impression. + MostSpecific *wrappers.StringValue `protobuf:"bytes,4,opt,name=most_specific,json=mostSpecific,proto3" json:"most_specific,omitempty"` + // The region location criterion associated with the impression. + Region *wrappers.StringValue `protobuf:"bytes,5,opt,name=region,proto3" json:"region,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ClickLocation) Reset() { *m = ClickLocation{} } +func (m *ClickLocation) String() string { return proto.CompactTextString(m) } +func (*ClickLocation) ProtoMessage() {} +func (*ClickLocation) Descriptor() ([]byte, []int) { + return fileDescriptor_click_location_19655ed14d26d9e2, []int{0} +} +func (m *ClickLocation) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ClickLocation.Unmarshal(m, b) +} +func (m *ClickLocation) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ClickLocation.Marshal(b, m, deterministic) +} +func (dst *ClickLocation) XXX_Merge(src proto.Message) { + xxx_messageInfo_ClickLocation.Merge(dst, src) +} +func (m *ClickLocation) XXX_Size() int { + return xxx_messageInfo_ClickLocation.Size(m) +} +func (m *ClickLocation) XXX_DiscardUnknown() { + xxx_messageInfo_ClickLocation.DiscardUnknown(m) +} + +var xxx_messageInfo_ClickLocation proto.InternalMessageInfo + +func (m *ClickLocation) GetCity() *wrappers.StringValue { + if m != nil { + return m.City + } + return nil +} + +func (m *ClickLocation) GetCountry() *wrappers.StringValue { + if m != nil { + return m.Country + } + return nil +} + +func (m *ClickLocation) GetMetro() *wrappers.StringValue { + if m != nil { + return m.Metro + } + return nil +} + +func (m *ClickLocation) GetMostSpecific() *wrappers.StringValue { + if m != nil { + return m.MostSpecific + } + return nil +} + +func (m *ClickLocation) GetRegion() *wrappers.StringValue { + if m != nil { + return m.Region + } + return nil +} + +func init() { + proto.RegisterType((*ClickLocation)(nil), "google.ads.googleads.v1.common.ClickLocation") +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/common/click_location.proto", fileDescriptor_click_location_19655ed14d26d9e2) +} + +var fileDescriptor_click_location_19655ed14d26d9e2 = []byte{ + // 354 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x8c, 0x92, 0xbf, 0x6a, 0xeb, 0x30, + 0x14, 0xc6, 0xb1, 0xf3, 0xe7, 0x82, 0xee, 0xcd, 0xe2, 0xc9, 0x84, 0x10, 0x2e, 0x99, 0xee, 0x24, + 0x5d, 0x27, 0xa5, 0x83, 0x3a, 0x39, 0x29, 0x64, 0xe9, 0x10, 0x1a, 0xf0, 0x50, 0x0c, 0xc1, 0x91, + 0x1d, 0x21, 0x6a, 0xeb, 0x18, 0x49, 0x49, 0xc9, 0xeb, 0x74, 0xe8, 0xd0, 0x47, 0xe9, 0xa3, 0x14, + 0xfa, 0x0e, 0xc5, 0x96, 0x1d, 0xe8, 0xd0, 0xe2, 0xc9, 0x07, 0xeb, 0xf7, 0xfb, 0x3e, 0x63, 0x1d, + 0xb4, 0xe0, 0x00, 0x3c, 0xcf, 0x48, 0x92, 0x6a, 0x62, 0xc7, 0x6a, 0x3a, 0x05, 0x84, 0x41, 0x51, + 0x80, 0x24, 0x2c, 0x17, 0xec, 0x71, 0x97, 0x03, 0x4b, 0x8c, 0x00, 0x89, 0x4b, 0x05, 0x06, 0xbc, + 0xa9, 0x25, 0x71, 0x92, 0x6a, 0x7c, 0x91, 0xf0, 0x29, 0xc0, 0x56, 0x1a, 0x37, 0xe7, 0xa4, 0xa6, + 0xf7, 0xc7, 0x03, 0x79, 0x52, 0x49, 0x59, 0x66, 0x4a, 0x5b, 0x7f, 0x3c, 0x69, 0x4b, 0x4b, 0x41, + 0x12, 0x29, 0xc1, 0xd4, 0xe1, 0xcd, 0xe9, 0xec, 0xc5, 0x45, 0xa3, 0x55, 0x55, 0x7b, 0xd7, 0xb4, + 0x7a, 0xff, 0x51, 0x9f, 0x09, 0x73, 0xf6, 0x9d, 0xbf, 0xce, 0xbf, 0xdf, 0xf3, 0x49, 0xd3, 0x89, + 0xdb, 0x78, 0xbc, 0x35, 0x4a, 0x48, 0x1e, 0x25, 0xf9, 0x31, 0xbb, 0xaf, 0x49, 0xef, 0x1a, 0xfd, + 0x62, 0x70, 0x94, 0x46, 0x9d, 0x7d, 0xb7, 0x83, 0xd4, 0xc2, 0xde, 0x1c, 0x0d, 0x8a, 0xcc, 0x28, + 0xf0, 0x7b, 0x1d, 0x2c, 0x8b, 0x7a, 0x21, 0x1a, 0x15, 0xa0, 0xcd, 0x4e, 0x97, 0x19, 0x13, 0x07, + 0xc1, 0xfc, 0x7e, 0x07, 0xf7, 0x4f, 0xa5, 0x6c, 0x1b, 0xc3, 0xbb, 0x42, 0x43, 0x95, 0x71, 0x01, + 0xd2, 0x1f, 0x74, 0x70, 0x1b, 0x76, 0xf9, 0xe1, 0xa0, 0x19, 0x83, 0x02, 0xff, 0x7c, 0x1b, 0x4b, + 0xef, 0xcb, 0xcf, 0xdc, 0x54, 0x89, 0x1b, 0xe7, 0xe1, 0xb6, 0xb1, 0x38, 0xe4, 0x89, 0xe4, 0x18, + 0x14, 0x27, 0x3c, 0x93, 0x75, 0x5f, 0xbb, 0x08, 0xa5, 0xd0, 0xdf, 0xed, 0xc5, 0x8d, 0x7d, 0x3c, + 0xbb, 0xbd, 0x75, 0x18, 0xbe, 0xba, 0xd3, 0xb5, 0x0d, 0x0b, 0x53, 0x8d, 0xed, 0x58, 0x4d, 0x51, + 0x80, 0x57, 0x35, 0xf6, 0xd6, 0x02, 0x71, 0x98, 0xea, 0xf8, 0x02, 0xc4, 0x51, 0x10, 0x5b, 0xe0, + 0xdd, 0x9d, 0xd9, 0xb7, 0x94, 0x86, 0xa9, 0xa6, 0xf4, 0x82, 0x50, 0x1a, 0x05, 0x94, 0x5a, 0x68, + 0x3f, 0xac, 0xbf, 0x6e, 0xf1, 0x19, 0x00, 0x00, 0xff, 0xff, 0x40, 0xe0, 0xe9, 0xb4, 0xb4, 0x02, + 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/common/criteria.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/common/criteria.pb.go new file mode 100644 index 0000000..1337a07 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/common/criteria.pb.go @@ -0,0 +1,3275 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/common/criteria.proto + +package common // import "google.golang.org/genproto/googleapis/ads/googleads/v1/common" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import wrappers "github.com/golang/protobuf/ptypes/wrappers" +import enums "google.golang.org/genproto/googleapis/ads/googleads/v1/enums" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// A keyword criterion. +type KeywordInfo struct { + // The text of the keyword (at most 80 characters and 10 words). + Text *wrappers.StringValue `protobuf:"bytes,1,opt,name=text,proto3" json:"text,omitempty"` + // The match type of the keyword. + MatchType enums.KeywordMatchTypeEnum_KeywordMatchType `protobuf:"varint,2,opt,name=match_type,json=matchType,proto3,enum=google.ads.googleads.v1.enums.KeywordMatchTypeEnum_KeywordMatchType" json:"match_type,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *KeywordInfo) Reset() { *m = KeywordInfo{} } +func (m *KeywordInfo) String() string { return proto.CompactTextString(m) } +func (*KeywordInfo) ProtoMessage() {} +func (*KeywordInfo) Descriptor() ([]byte, []int) { + return fileDescriptor_criteria_e78ca570234fbeae, []int{0} +} +func (m *KeywordInfo) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_KeywordInfo.Unmarshal(m, b) +} +func (m *KeywordInfo) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_KeywordInfo.Marshal(b, m, deterministic) +} +func (dst *KeywordInfo) XXX_Merge(src proto.Message) { + xxx_messageInfo_KeywordInfo.Merge(dst, src) +} +func (m *KeywordInfo) XXX_Size() int { + return xxx_messageInfo_KeywordInfo.Size(m) +} +func (m *KeywordInfo) XXX_DiscardUnknown() { + xxx_messageInfo_KeywordInfo.DiscardUnknown(m) +} + +var xxx_messageInfo_KeywordInfo proto.InternalMessageInfo + +func (m *KeywordInfo) GetText() *wrappers.StringValue { + if m != nil { + return m.Text + } + return nil +} + +func (m *KeywordInfo) GetMatchType() enums.KeywordMatchTypeEnum_KeywordMatchType { + if m != nil { + return m.MatchType + } + return enums.KeywordMatchTypeEnum_UNSPECIFIED +} + +// A placement criterion. This can be used to modify bids for sites when +// targeting the content network. +type PlacementInfo struct { + // URL of the placement. + // + // For example, "http://www.domain.com". + Url *wrappers.StringValue `protobuf:"bytes,1,opt,name=url,proto3" json:"url,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *PlacementInfo) Reset() { *m = PlacementInfo{} } +func (m *PlacementInfo) String() string { return proto.CompactTextString(m) } +func (*PlacementInfo) ProtoMessage() {} +func (*PlacementInfo) Descriptor() ([]byte, []int) { + return fileDescriptor_criteria_e78ca570234fbeae, []int{1} +} +func (m *PlacementInfo) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_PlacementInfo.Unmarshal(m, b) +} +func (m *PlacementInfo) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_PlacementInfo.Marshal(b, m, deterministic) +} +func (dst *PlacementInfo) XXX_Merge(src proto.Message) { + xxx_messageInfo_PlacementInfo.Merge(dst, src) +} +func (m *PlacementInfo) XXX_Size() int { + return xxx_messageInfo_PlacementInfo.Size(m) +} +func (m *PlacementInfo) XXX_DiscardUnknown() { + xxx_messageInfo_PlacementInfo.DiscardUnknown(m) +} + +var xxx_messageInfo_PlacementInfo proto.InternalMessageInfo + +func (m *PlacementInfo) GetUrl() *wrappers.StringValue { + if m != nil { + return m.Url + } + return nil +} + +// A mobile app category criterion. +type MobileAppCategoryInfo struct { + // The mobile app category constant resource name. + MobileAppCategoryConstant *wrappers.StringValue `protobuf:"bytes,1,opt,name=mobile_app_category_constant,json=mobileAppCategoryConstant,proto3" json:"mobile_app_category_constant,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *MobileAppCategoryInfo) Reset() { *m = MobileAppCategoryInfo{} } +func (m *MobileAppCategoryInfo) String() string { return proto.CompactTextString(m) } +func (*MobileAppCategoryInfo) ProtoMessage() {} +func (*MobileAppCategoryInfo) Descriptor() ([]byte, []int) { + return fileDescriptor_criteria_e78ca570234fbeae, []int{2} +} +func (m *MobileAppCategoryInfo) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_MobileAppCategoryInfo.Unmarshal(m, b) +} +func (m *MobileAppCategoryInfo) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_MobileAppCategoryInfo.Marshal(b, m, deterministic) +} +func (dst *MobileAppCategoryInfo) XXX_Merge(src proto.Message) { + xxx_messageInfo_MobileAppCategoryInfo.Merge(dst, src) +} +func (m *MobileAppCategoryInfo) XXX_Size() int { + return xxx_messageInfo_MobileAppCategoryInfo.Size(m) +} +func (m *MobileAppCategoryInfo) XXX_DiscardUnknown() { + xxx_messageInfo_MobileAppCategoryInfo.DiscardUnknown(m) +} + +var xxx_messageInfo_MobileAppCategoryInfo proto.InternalMessageInfo + +func (m *MobileAppCategoryInfo) GetMobileAppCategoryConstant() *wrappers.StringValue { + if m != nil { + return m.MobileAppCategoryConstant + } + return nil +} + +// A mobile application criterion. +type MobileApplicationInfo struct { + // A string that uniquely identifies a mobile application to Google Ads API. + // The format of this string is "{platform}-{platform_native_id}", where + // platform is "1" for iOS apps and "2" for Android apps, and where + // platform_native_id is the mobile application identifier native to the + // corresponding platform. + // For iOS, this native identifier is the 9 digit string that appears at the + // end of an App Store URL (e.g., "476943146" for "Flood-It! 2" whose App + // Store link is http://itunes.apple.com/us/app/flood-it!-2/id476943146). + // For Android, this native identifier is the application's package name + // (e.g., "com.labpixies.colordrips" for "Color Drips" given Google Play link + // https://play.google.com/store/apps/details?id=com.labpixies.colordrips). + // A well formed app id for Google Ads API would thus be "1-476943146" for iOS + // and "2-com.labpixies.colordrips" for Android. + // This field is required and must be set in CREATE operations. + AppId *wrappers.StringValue `protobuf:"bytes,2,opt,name=app_id,json=appId,proto3" json:"app_id,omitempty"` + // Name of this mobile application. + Name *wrappers.StringValue `protobuf:"bytes,3,opt,name=name,proto3" json:"name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *MobileApplicationInfo) Reset() { *m = MobileApplicationInfo{} } +func (m *MobileApplicationInfo) String() string { return proto.CompactTextString(m) } +func (*MobileApplicationInfo) ProtoMessage() {} +func (*MobileApplicationInfo) Descriptor() ([]byte, []int) { + return fileDescriptor_criteria_e78ca570234fbeae, []int{3} +} +func (m *MobileApplicationInfo) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_MobileApplicationInfo.Unmarshal(m, b) +} +func (m *MobileApplicationInfo) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_MobileApplicationInfo.Marshal(b, m, deterministic) +} +func (dst *MobileApplicationInfo) XXX_Merge(src proto.Message) { + xxx_messageInfo_MobileApplicationInfo.Merge(dst, src) +} +func (m *MobileApplicationInfo) XXX_Size() int { + return xxx_messageInfo_MobileApplicationInfo.Size(m) +} +func (m *MobileApplicationInfo) XXX_DiscardUnknown() { + xxx_messageInfo_MobileApplicationInfo.DiscardUnknown(m) +} + +var xxx_messageInfo_MobileApplicationInfo proto.InternalMessageInfo + +func (m *MobileApplicationInfo) GetAppId() *wrappers.StringValue { + if m != nil { + return m.AppId + } + return nil +} + +func (m *MobileApplicationInfo) GetName() *wrappers.StringValue { + if m != nil { + return m.Name + } + return nil +} + +// A location criterion. +type LocationInfo struct { + // The geo target constant resource name. + GeoTargetConstant *wrappers.StringValue `protobuf:"bytes,1,opt,name=geo_target_constant,json=geoTargetConstant,proto3" json:"geo_target_constant,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *LocationInfo) Reset() { *m = LocationInfo{} } +func (m *LocationInfo) String() string { return proto.CompactTextString(m) } +func (*LocationInfo) ProtoMessage() {} +func (*LocationInfo) Descriptor() ([]byte, []int) { + return fileDescriptor_criteria_e78ca570234fbeae, []int{4} +} +func (m *LocationInfo) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_LocationInfo.Unmarshal(m, b) +} +func (m *LocationInfo) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_LocationInfo.Marshal(b, m, deterministic) +} +func (dst *LocationInfo) XXX_Merge(src proto.Message) { + xxx_messageInfo_LocationInfo.Merge(dst, src) +} +func (m *LocationInfo) XXX_Size() int { + return xxx_messageInfo_LocationInfo.Size(m) +} +func (m *LocationInfo) XXX_DiscardUnknown() { + xxx_messageInfo_LocationInfo.DiscardUnknown(m) +} + +var xxx_messageInfo_LocationInfo proto.InternalMessageInfo + +func (m *LocationInfo) GetGeoTargetConstant() *wrappers.StringValue { + if m != nil { + return m.GeoTargetConstant + } + return nil +} + +// A device criterion. +type DeviceInfo struct { + // Type of the device. + Type enums.DeviceEnum_Device `protobuf:"varint,1,opt,name=type,proto3,enum=google.ads.googleads.v1.enums.DeviceEnum_Device" json:"type,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *DeviceInfo) Reset() { *m = DeviceInfo{} } +func (m *DeviceInfo) String() string { return proto.CompactTextString(m) } +func (*DeviceInfo) ProtoMessage() {} +func (*DeviceInfo) Descriptor() ([]byte, []int) { + return fileDescriptor_criteria_e78ca570234fbeae, []int{5} +} +func (m *DeviceInfo) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_DeviceInfo.Unmarshal(m, b) +} +func (m *DeviceInfo) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_DeviceInfo.Marshal(b, m, deterministic) +} +func (dst *DeviceInfo) XXX_Merge(src proto.Message) { + xxx_messageInfo_DeviceInfo.Merge(dst, src) +} +func (m *DeviceInfo) XXX_Size() int { + return xxx_messageInfo_DeviceInfo.Size(m) +} +func (m *DeviceInfo) XXX_DiscardUnknown() { + xxx_messageInfo_DeviceInfo.DiscardUnknown(m) +} + +var xxx_messageInfo_DeviceInfo proto.InternalMessageInfo + +func (m *DeviceInfo) GetType() enums.DeviceEnum_Device { + if m != nil { + return m.Type + } + return enums.DeviceEnum_UNSPECIFIED +} + +// A preferred content criterion. +type PreferredContentInfo struct { + // Type of the preferred content. + Type enums.PreferredContentTypeEnum_PreferredContentType `protobuf:"varint,2,opt,name=type,proto3,enum=google.ads.googleads.v1.enums.PreferredContentTypeEnum_PreferredContentType" json:"type,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *PreferredContentInfo) Reset() { *m = PreferredContentInfo{} } +func (m *PreferredContentInfo) String() string { return proto.CompactTextString(m) } +func (*PreferredContentInfo) ProtoMessage() {} +func (*PreferredContentInfo) Descriptor() ([]byte, []int) { + return fileDescriptor_criteria_e78ca570234fbeae, []int{6} +} +func (m *PreferredContentInfo) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_PreferredContentInfo.Unmarshal(m, b) +} +func (m *PreferredContentInfo) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_PreferredContentInfo.Marshal(b, m, deterministic) +} +func (dst *PreferredContentInfo) XXX_Merge(src proto.Message) { + xxx_messageInfo_PreferredContentInfo.Merge(dst, src) +} +func (m *PreferredContentInfo) XXX_Size() int { + return xxx_messageInfo_PreferredContentInfo.Size(m) +} +func (m *PreferredContentInfo) XXX_DiscardUnknown() { + xxx_messageInfo_PreferredContentInfo.DiscardUnknown(m) +} + +var xxx_messageInfo_PreferredContentInfo proto.InternalMessageInfo + +func (m *PreferredContentInfo) GetType() enums.PreferredContentTypeEnum_PreferredContentType { + if m != nil { + return m.Type + } + return enums.PreferredContentTypeEnum_UNSPECIFIED +} + +// A listing group criterion. +type ListingGroupInfo struct { + // Type of the listing group. + Type enums.ListingGroupTypeEnum_ListingGroupType `protobuf:"varint,1,opt,name=type,proto3,enum=google.ads.googleads.v1.enums.ListingGroupTypeEnum_ListingGroupType" json:"type,omitempty"` + // Dimension value with which this listing group is refining its parent. + // Undefined for the root group. + CaseValue *ListingDimensionInfo `protobuf:"bytes,2,opt,name=case_value,json=caseValue,proto3" json:"case_value,omitempty"` + // Resource name of ad group criterion which is the parent listing group + // subdivision. Null for the root group. + ParentAdGroupCriterion *wrappers.StringValue `protobuf:"bytes,3,opt,name=parent_ad_group_criterion,json=parentAdGroupCriterion,proto3" json:"parent_ad_group_criterion,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ListingGroupInfo) Reset() { *m = ListingGroupInfo{} } +func (m *ListingGroupInfo) String() string { return proto.CompactTextString(m) } +func (*ListingGroupInfo) ProtoMessage() {} +func (*ListingGroupInfo) Descriptor() ([]byte, []int) { + return fileDescriptor_criteria_e78ca570234fbeae, []int{7} +} +func (m *ListingGroupInfo) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ListingGroupInfo.Unmarshal(m, b) +} +func (m *ListingGroupInfo) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ListingGroupInfo.Marshal(b, m, deterministic) +} +func (dst *ListingGroupInfo) XXX_Merge(src proto.Message) { + xxx_messageInfo_ListingGroupInfo.Merge(dst, src) +} +func (m *ListingGroupInfo) XXX_Size() int { + return xxx_messageInfo_ListingGroupInfo.Size(m) +} +func (m *ListingGroupInfo) XXX_DiscardUnknown() { + xxx_messageInfo_ListingGroupInfo.DiscardUnknown(m) +} + +var xxx_messageInfo_ListingGroupInfo proto.InternalMessageInfo + +func (m *ListingGroupInfo) GetType() enums.ListingGroupTypeEnum_ListingGroupType { + if m != nil { + return m.Type + } + return enums.ListingGroupTypeEnum_UNSPECIFIED +} + +func (m *ListingGroupInfo) GetCaseValue() *ListingDimensionInfo { + if m != nil { + return m.CaseValue + } + return nil +} + +func (m *ListingGroupInfo) GetParentAdGroupCriterion() *wrappers.StringValue { + if m != nil { + return m.ParentAdGroupCriterion + } + return nil +} + +// A listing scope criterion. +type ListingScopeInfo struct { + // Scope of the campaign criterion. + Dimensions []*ListingDimensionInfo `protobuf:"bytes,2,rep,name=dimensions,proto3" json:"dimensions,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ListingScopeInfo) Reset() { *m = ListingScopeInfo{} } +func (m *ListingScopeInfo) String() string { return proto.CompactTextString(m) } +func (*ListingScopeInfo) ProtoMessage() {} +func (*ListingScopeInfo) Descriptor() ([]byte, []int) { + return fileDescriptor_criteria_e78ca570234fbeae, []int{8} +} +func (m *ListingScopeInfo) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ListingScopeInfo.Unmarshal(m, b) +} +func (m *ListingScopeInfo) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ListingScopeInfo.Marshal(b, m, deterministic) +} +func (dst *ListingScopeInfo) XXX_Merge(src proto.Message) { + xxx_messageInfo_ListingScopeInfo.Merge(dst, src) +} +func (m *ListingScopeInfo) XXX_Size() int { + return xxx_messageInfo_ListingScopeInfo.Size(m) +} +func (m *ListingScopeInfo) XXX_DiscardUnknown() { + xxx_messageInfo_ListingScopeInfo.DiscardUnknown(m) +} + +var xxx_messageInfo_ListingScopeInfo proto.InternalMessageInfo + +func (m *ListingScopeInfo) GetDimensions() []*ListingDimensionInfo { + if m != nil { + return m.Dimensions + } + return nil +} + +// Listing dimensions for listing group criterion. +type ListingDimensionInfo struct { + // Dimension of one of the types below is always present. + // + // Types that are valid to be assigned to Dimension: + // *ListingDimensionInfo_ListingBrand + // *ListingDimensionInfo_HotelId + // *ListingDimensionInfo_HotelClass + // *ListingDimensionInfo_HotelCountryRegion + // *ListingDimensionInfo_HotelState + // *ListingDimensionInfo_HotelCity + // *ListingDimensionInfo_ListingCustomAttribute + // *ListingDimensionInfo_ProductBiddingCategory + // *ListingDimensionInfo_ProductChannel + // *ListingDimensionInfo_ProductChannelExclusivity + // *ListingDimensionInfo_ProductCondition + // *ListingDimensionInfo_ProductItemId + // *ListingDimensionInfo_ProductType + // *ListingDimensionInfo_UnknownListingDimension + Dimension isListingDimensionInfo_Dimension `protobuf_oneof:"dimension"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ListingDimensionInfo) Reset() { *m = ListingDimensionInfo{} } +func (m *ListingDimensionInfo) String() string { return proto.CompactTextString(m) } +func (*ListingDimensionInfo) ProtoMessage() {} +func (*ListingDimensionInfo) Descriptor() ([]byte, []int) { + return fileDescriptor_criteria_e78ca570234fbeae, []int{9} +} +func (m *ListingDimensionInfo) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ListingDimensionInfo.Unmarshal(m, b) +} +func (m *ListingDimensionInfo) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ListingDimensionInfo.Marshal(b, m, deterministic) +} +func (dst *ListingDimensionInfo) XXX_Merge(src proto.Message) { + xxx_messageInfo_ListingDimensionInfo.Merge(dst, src) +} +func (m *ListingDimensionInfo) XXX_Size() int { + return xxx_messageInfo_ListingDimensionInfo.Size(m) +} +func (m *ListingDimensionInfo) XXX_DiscardUnknown() { + xxx_messageInfo_ListingDimensionInfo.DiscardUnknown(m) +} + +var xxx_messageInfo_ListingDimensionInfo proto.InternalMessageInfo + +type isListingDimensionInfo_Dimension interface { + isListingDimensionInfo_Dimension() +} + +type ListingDimensionInfo_ListingBrand struct { + ListingBrand *ListingBrandInfo `protobuf:"bytes,1,opt,name=listing_brand,json=listingBrand,proto3,oneof"` +} + +type ListingDimensionInfo_HotelId struct { + HotelId *HotelIdInfo `protobuf:"bytes,2,opt,name=hotel_id,json=hotelId,proto3,oneof"` +} + +type ListingDimensionInfo_HotelClass struct { + HotelClass *HotelClassInfo `protobuf:"bytes,3,opt,name=hotel_class,json=hotelClass,proto3,oneof"` +} + +type ListingDimensionInfo_HotelCountryRegion struct { + HotelCountryRegion *HotelCountryRegionInfo `protobuf:"bytes,4,opt,name=hotel_country_region,json=hotelCountryRegion,proto3,oneof"` +} + +type ListingDimensionInfo_HotelState struct { + HotelState *HotelStateInfo `protobuf:"bytes,5,opt,name=hotel_state,json=hotelState,proto3,oneof"` +} + +type ListingDimensionInfo_HotelCity struct { + HotelCity *HotelCityInfo `protobuf:"bytes,6,opt,name=hotel_city,json=hotelCity,proto3,oneof"` +} + +type ListingDimensionInfo_ListingCustomAttribute struct { + ListingCustomAttribute *ListingCustomAttributeInfo `protobuf:"bytes,7,opt,name=listing_custom_attribute,json=listingCustomAttribute,proto3,oneof"` +} + +type ListingDimensionInfo_ProductBiddingCategory struct { + ProductBiddingCategory *ProductBiddingCategoryInfo `protobuf:"bytes,13,opt,name=product_bidding_category,json=productBiddingCategory,proto3,oneof"` +} + +type ListingDimensionInfo_ProductChannel struct { + ProductChannel *ProductChannelInfo `protobuf:"bytes,8,opt,name=product_channel,json=productChannel,proto3,oneof"` +} + +type ListingDimensionInfo_ProductChannelExclusivity struct { + ProductChannelExclusivity *ProductChannelExclusivityInfo `protobuf:"bytes,9,opt,name=product_channel_exclusivity,json=productChannelExclusivity,proto3,oneof"` +} + +type ListingDimensionInfo_ProductCondition struct { + ProductCondition *ProductConditionInfo `protobuf:"bytes,10,opt,name=product_condition,json=productCondition,proto3,oneof"` +} + +type ListingDimensionInfo_ProductItemId struct { + ProductItemId *ProductItemIdInfo `protobuf:"bytes,11,opt,name=product_item_id,json=productItemId,proto3,oneof"` +} + +type ListingDimensionInfo_ProductType struct { + ProductType *ProductTypeInfo `protobuf:"bytes,12,opt,name=product_type,json=productType,proto3,oneof"` +} + +type ListingDimensionInfo_UnknownListingDimension struct { + UnknownListingDimension *UnknownListingDimensionInfo `protobuf:"bytes,14,opt,name=unknown_listing_dimension,json=unknownListingDimension,proto3,oneof"` +} + +func (*ListingDimensionInfo_ListingBrand) isListingDimensionInfo_Dimension() {} + +func (*ListingDimensionInfo_HotelId) isListingDimensionInfo_Dimension() {} + +func (*ListingDimensionInfo_HotelClass) isListingDimensionInfo_Dimension() {} + +func (*ListingDimensionInfo_HotelCountryRegion) isListingDimensionInfo_Dimension() {} + +func (*ListingDimensionInfo_HotelState) isListingDimensionInfo_Dimension() {} + +func (*ListingDimensionInfo_HotelCity) isListingDimensionInfo_Dimension() {} + +func (*ListingDimensionInfo_ListingCustomAttribute) isListingDimensionInfo_Dimension() {} + +func (*ListingDimensionInfo_ProductBiddingCategory) isListingDimensionInfo_Dimension() {} + +func (*ListingDimensionInfo_ProductChannel) isListingDimensionInfo_Dimension() {} + +func (*ListingDimensionInfo_ProductChannelExclusivity) isListingDimensionInfo_Dimension() {} + +func (*ListingDimensionInfo_ProductCondition) isListingDimensionInfo_Dimension() {} + +func (*ListingDimensionInfo_ProductItemId) isListingDimensionInfo_Dimension() {} + +func (*ListingDimensionInfo_ProductType) isListingDimensionInfo_Dimension() {} + +func (*ListingDimensionInfo_UnknownListingDimension) isListingDimensionInfo_Dimension() {} + +func (m *ListingDimensionInfo) GetDimension() isListingDimensionInfo_Dimension { + if m != nil { + return m.Dimension + } + return nil +} + +func (m *ListingDimensionInfo) GetListingBrand() *ListingBrandInfo { + if x, ok := m.GetDimension().(*ListingDimensionInfo_ListingBrand); ok { + return x.ListingBrand + } + return nil +} + +func (m *ListingDimensionInfo) GetHotelId() *HotelIdInfo { + if x, ok := m.GetDimension().(*ListingDimensionInfo_HotelId); ok { + return x.HotelId + } + return nil +} + +func (m *ListingDimensionInfo) GetHotelClass() *HotelClassInfo { + if x, ok := m.GetDimension().(*ListingDimensionInfo_HotelClass); ok { + return x.HotelClass + } + return nil +} + +func (m *ListingDimensionInfo) GetHotelCountryRegion() *HotelCountryRegionInfo { + if x, ok := m.GetDimension().(*ListingDimensionInfo_HotelCountryRegion); ok { + return x.HotelCountryRegion + } + return nil +} + +func (m *ListingDimensionInfo) GetHotelState() *HotelStateInfo { + if x, ok := m.GetDimension().(*ListingDimensionInfo_HotelState); ok { + return x.HotelState + } + return nil +} + +func (m *ListingDimensionInfo) GetHotelCity() *HotelCityInfo { + if x, ok := m.GetDimension().(*ListingDimensionInfo_HotelCity); ok { + return x.HotelCity + } + return nil +} + +func (m *ListingDimensionInfo) GetListingCustomAttribute() *ListingCustomAttributeInfo { + if x, ok := m.GetDimension().(*ListingDimensionInfo_ListingCustomAttribute); ok { + return x.ListingCustomAttribute + } + return nil +} + +func (m *ListingDimensionInfo) GetProductBiddingCategory() *ProductBiddingCategoryInfo { + if x, ok := m.GetDimension().(*ListingDimensionInfo_ProductBiddingCategory); ok { + return x.ProductBiddingCategory + } + return nil +} + +func (m *ListingDimensionInfo) GetProductChannel() *ProductChannelInfo { + if x, ok := m.GetDimension().(*ListingDimensionInfo_ProductChannel); ok { + return x.ProductChannel + } + return nil +} + +func (m *ListingDimensionInfo) GetProductChannelExclusivity() *ProductChannelExclusivityInfo { + if x, ok := m.GetDimension().(*ListingDimensionInfo_ProductChannelExclusivity); ok { + return x.ProductChannelExclusivity + } + return nil +} + +func (m *ListingDimensionInfo) GetProductCondition() *ProductConditionInfo { + if x, ok := m.GetDimension().(*ListingDimensionInfo_ProductCondition); ok { + return x.ProductCondition + } + return nil +} + +func (m *ListingDimensionInfo) GetProductItemId() *ProductItemIdInfo { + if x, ok := m.GetDimension().(*ListingDimensionInfo_ProductItemId); ok { + return x.ProductItemId + } + return nil +} + +func (m *ListingDimensionInfo) GetProductType() *ProductTypeInfo { + if x, ok := m.GetDimension().(*ListingDimensionInfo_ProductType); ok { + return x.ProductType + } + return nil +} + +func (m *ListingDimensionInfo) GetUnknownListingDimension() *UnknownListingDimensionInfo { + if x, ok := m.GetDimension().(*ListingDimensionInfo_UnknownListingDimension); ok { + return x.UnknownListingDimension + } + return nil +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*ListingDimensionInfo) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _ListingDimensionInfo_OneofMarshaler, _ListingDimensionInfo_OneofUnmarshaler, _ListingDimensionInfo_OneofSizer, []interface{}{ + (*ListingDimensionInfo_ListingBrand)(nil), + (*ListingDimensionInfo_HotelId)(nil), + (*ListingDimensionInfo_HotelClass)(nil), + (*ListingDimensionInfo_HotelCountryRegion)(nil), + (*ListingDimensionInfo_HotelState)(nil), + (*ListingDimensionInfo_HotelCity)(nil), + (*ListingDimensionInfo_ListingCustomAttribute)(nil), + (*ListingDimensionInfo_ProductBiddingCategory)(nil), + (*ListingDimensionInfo_ProductChannel)(nil), + (*ListingDimensionInfo_ProductChannelExclusivity)(nil), + (*ListingDimensionInfo_ProductCondition)(nil), + (*ListingDimensionInfo_ProductItemId)(nil), + (*ListingDimensionInfo_ProductType)(nil), + (*ListingDimensionInfo_UnknownListingDimension)(nil), + } +} + +func _ListingDimensionInfo_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*ListingDimensionInfo) + // dimension + switch x := m.Dimension.(type) { + case *ListingDimensionInfo_ListingBrand: + b.EncodeVarint(1<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.ListingBrand); err != nil { + return err + } + case *ListingDimensionInfo_HotelId: + b.EncodeVarint(2<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.HotelId); err != nil { + return err + } + case *ListingDimensionInfo_HotelClass: + b.EncodeVarint(3<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.HotelClass); err != nil { + return err + } + case *ListingDimensionInfo_HotelCountryRegion: + b.EncodeVarint(4<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.HotelCountryRegion); err != nil { + return err + } + case *ListingDimensionInfo_HotelState: + b.EncodeVarint(5<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.HotelState); err != nil { + return err + } + case *ListingDimensionInfo_HotelCity: + b.EncodeVarint(6<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.HotelCity); err != nil { + return err + } + case *ListingDimensionInfo_ListingCustomAttribute: + b.EncodeVarint(7<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.ListingCustomAttribute); err != nil { + return err + } + case *ListingDimensionInfo_ProductBiddingCategory: + b.EncodeVarint(13<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.ProductBiddingCategory); err != nil { + return err + } + case *ListingDimensionInfo_ProductChannel: + b.EncodeVarint(8<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.ProductChannel); err != nil { + return err + } + case *ListingDimensionInfo_ProductChannelExclusivity: + b.EncodeVarint(9<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.ProductChannelExclusivity); err != nil { + return err + } + case *ListingDimensionInfo_ProductCondition: + b.EncodeVarint(10<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.ProductCondition); err != nil { + return err + } + case *ListingDimensionInfo_ProductItemId: + b.EncodeVarint(11<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.ProductItemId); err != nil { + return err + } + case *ListingDimensionInfo_ProductType: + b.EncodeVarint(12<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.ProductType); err != nil { + return err + } + case *ListingDimensionInfo_UnknownListingDimension: + b.EncodeVarint(14<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.UnknownListingDimension); err != nil { + return err + } + case nil: + default: + return fmt.Errorf("ListingDimensionInfo.Dimension has unexpected type %T", x) + } + return nil +} + +func _ListingDimensionInfo_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*ListingDimensionInfo) + switch tag { + case 1: // dimension.listing_brand + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(ListingBrandInfo) + err := b.DecodeMessage(msg) + m.Dimension = &ListingDimensionInfo_ListingBrand{msg} + return true, err + case 2: // dimension.hotel_id + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(HotelIdInfo) + err := b.DecodeMessage(msg) + m.Dimension = &ListingDimensionInfo_HotelId{msg} + return true, err + case 3: // dimension.hotel_class + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(HotelClassInfo) + err := b.DecodeMessage(msg) + m.Dimension = &ListingDimensionInfo_HotelClass{msg} + return true, err + case 4: // dimension.hotel_country_region + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(HotelCountryRegionInfo) + err := b.DecodeMessage(msg) + m.Dimension = &ListingDimensionInfo_HotelCountryRegion{msg} + return true, err + case 5: // dimension.hotel_state + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(HotelStateInfo) + err := b.DecodeMessage(msg) + m.Dimension = &ListingDimensionInfo_HotelState{msg} + return true, err + case 6: // dimension.hotel_city + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(HotelCityInfo) + err := b.DecodeMessage(msg) + m.Dimension = &ListingDimensionInfo_HotelCity{msg} + return true, err + case 7: // dimension.listing_custom_attribute + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(ListingCustomAttributeInfo) + err := b.DecodeMessage(msg) + m.Dimension = &ListingDimensionInfo_ListingCustomAttribute{msg} + return true, err + case 13: // dimension.product_bidding_category + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(ProductBiddingCategoryInfo) + err := b.DecodeMessage(msg) + m.Dimension = &ListingDimensionInfo_ProductBiddingCategory{msg} + return true, err + case 8: // dimension.product_channel + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(ProductChannelInfo) + err := b.DecodeMessage(msg) + m.Dimension = &ListingDimensionInfo_ProductChannel{msg} + return true, err + case 9: // dimension.product_channel_exclusivity + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(ProductChannelExclusivityInfo) + err := b.DecodeMessage(msg) + m.Dimension = &ListingDimensionInfo_ProductChannelExclusivity{msg} + return true, err + case 10: // dimension.product_condition + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(ProductConditionInfo) + err := b.DecodeMessage(msg) + m.Dimension = &ListingDimensionInfo_ProductCondition{msg} + return true, err + case 11: // dimension.product_item_id + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(ProductItemIdInfo) + err := b.DecodeMessage(msg) + m.Dimension = &ListingDimensionInfo_ProductItemId{msg} + return true, err + case 12: // dimension.product_type + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(ProductTypeInfo) + err := b.DecodeMessage(msg) + m.Dimension = &ListingDimensionInfo_ProductType{msg} + return true, err + case 14: // dimension.unknown_listing_dimension + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(UnknownListingDimensionInfo) + err := b.DecodeMessage(msg) + m.Dimension = &ListingDimensionInfo_UnknownListingDimension{msg} + return true, err + default: + return false, nil + } +} + +func _ListingDimensionInfo_OneofSizer(msg proto.Message) (n int) { + m := msg.(*ListingDimensionInfo) + // dimension + switch x := m.Dimension.(type) { + case *ListingDimensionInfo_ListingBrand: + s := proto.Size(x.ListingBrand) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *ListingDimensionInfo_HotelId: + s := proto.Size(x.HotelId) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *ListingDimensionInfo_HotelClass: + s := proto.Size(x.HotelClass) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *ListingDimensionInfo_HotelCountryRegion: + s := proto.Size(x.HotelCountryRegion) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *ListingDimensionInfo_HotelState: + s := proto.Size(x.HotelState) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *ListingDimensionInfo_HotelCity: + s := proto.Size(x.HotelCity) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *ListingDimensionInfo_ListingCustomAttribute: + s := proto.Size(x.ListingCustomAttribute) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *ListingDimensionInfo_ProductBiddingCategory: + s := proto.Size(x.ProductBiddingCategory) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *ListingDimensionInfo_ProductChannel: + s := proto.Size(x.ProductChannel) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *ListingDimensionInfo_ProductChannelExclusivity: + s := proto.Size(x.ProductChannelExclusivity) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *ListingDimensionInfo_ProductCondition: + s := proto.Size(x.ProductCondition) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *ListingDimensionInfo_ProductItemId: + s := proto.Size(x.ProductItemId) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *ListingDimensionInfo_ProductType: + s := proto.Size(x.ProductType) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *ListingDimensionInfo_UnknownListingDimension: + s := proto.Size(x.UnknownListingDimension) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +// Brand of the listing. +type ListingBrandInfo struct { + // String value of the listing brand. + Value *wrappers.StringValue `protobuf:"bytes,1,opt,name=value,proto3" json:"value,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ListingBrandInfo) Reset() { *m = ListingBrandInfo{} } +func (m *ListingBrandInfo) String() string { return proto.CompactTextString(m) } +func (*ListingBrandInfo) ProtoMessage() {} +func (*ListingBrandInfo) Descriptor() ([]byte, []int) { + return fileDescriptor_criteria_e78ca570234fbeae, []int{10} +} +func (m *ListingBrandInfo) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ListingBrandInfo.Unmarshal(m, b) +} +func (m *ListingBrandInfo) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ListingBrandInfo.Marshal(b, m, deterministic) +} +func (dst *ListingBrandInfo) XXX_Merge(src proto.Message) { + xxx_messageInfo_ListingBrandInfo.Merge(dst, src) +} +func (m *ListingBrandInfo) XXX_Size() int { + return xxx_messageInfo_ListingBrandInfo.Size(m) +} +func (m *ListingBrandInfo) XXX_DiscardUnknown() { + xxx_messageInfo_ListingBrandInfo.DiscardUnknown(m) +} + +var xxx_messageInfo_ListingBrandInfo proto.InternalMessageInfo + +func (m *ListingBrandInfo) GetValue() *wrappers.StringValue { + if m != nil { + return m.Value + } + return nil +} + +// Advertiser-specific hotel ID. +type HotelIdInfo struct { + // String value of the hotel ID. + Value *wrappers.StringValue `protobuf:"bytes,1,opt,name=value,proto3" json:"value,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *HotelIdInfo) Reset() { *m = HotelIdInfo{} } +func (m *HotelIdInfo) String() string { return proto.CompactTextString(m) } +func (*HotelIdInfo) ProtoMessage() {} +func (*HotelIdInfo) Descriptor() ([]byte, []int) { + return fileDescriptor_criteria_e78ca570234fbeae, []int{11} +} +func (m *HotelIdInfo) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_HotelIdInfo.Unmarshal(m, b) +} +func (m *HotelIdInfo) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_HotelIdInfo.Marshal(b, m, deterministic) +} +func (dst *HotelIdInfo) XXX_Merge(src proto.Message) { + xxx_messageInfo_HotelIdInfo.Merge(dst, src) +} +func (m *HotelIdInfo) XXX_Size() int { + return xxx_messageInfo_HotelIdInfo.Size(m) +} +func (m *HotelIdInfo) XXX_DiscardUnknown() { + xxx_messageInfo_HotelIdInfo.DiscardUnknown(m) +} + +var xxx_messageInfo_HotelIdInfo proto.InternalMessageInfo + +func (m *HotelIdInfo) GetValue() *wrappers.StringValue { + if m != nil { + return m.Value + } + return nil +} + +// Class of the hotel as a number of stars 1 to 5. +type HotelClassInfo struct { + // Long value of the hotel class. + Value *wrappers.Int64Value `protobuf:"bytes,1,opt,name=value,proto3" json:"value,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *HotelClassInfo) Reset() { *m = HotelClassInfo{} } +func (m *HotelClassInfo) String() string { return proto.CompactTextString(m) } +func (*HotelClassInfo) ProtoMessage() {} +func (*HotelClassInfo) Descriptor() ([]byte, []int) { + return fileDescriptor_criteria_e78ca570234fbeae, []int{12} +} +func (m *HotelClassInfo) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_HotelClassInfo.Unmarshal(m, b) +} +func (m *HotelClassInfo) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_HotelClassInfo.Marshal(b, m, deterministic) +} +func (dst *HotelClassInfo) XXX_Merge(src proto.Message) { + xxx_messageInfo_HotelClassInfo.Merge(dst, src) +} +func (m *HotelClassInfo) XXX_Size() int { + return xxx_messageInfo_HotelClassInfo.Size(m) +} +func (m *HotelClassInfo) XXX_DiscardUnknown() { + xxx_messageInfo_HotelClassInfo.DiscardUnknown(m) +} + +var xxx_messageInfo_HotelClassInfo proto.InternalMessageInfo + +func (m *HotelClassInfo) GetValue() *wrappers.Int64Value { + if m != nil { + return m.Value + } + return nil +} + +// Country or Region the hotel is located in. +type HotelCountryRegionInfo struct { + // The Geo Target Constant resource name. + CountryRegionCriterion *wrappers.StringValue `protobuf:"bytes,1,opt,name=country_region_criterion,json=countryRegionCriterion,proto3" json:"country_region_criterion,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *HotelCountryRegionInfo) Reset() { *m = HotelCountryRegionInfo{} } +func (m *HotelCountryRegionInfo) String() string { return proto.CompactTextString(m) } +func (*HotelCountryRegionInfo) ProtoMessage() {} +func (*HotelCountryRegionInfo) Descriptor() ([]byte, []int) { + return fileDescriptor_criteria_e78ca570234fbeae, []int{13} +} +func (m *HotelCountryRegionInfo) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_HotelCountryRegionInfo.Unmarshal(m, b) +} +func (m *HotelCountryRegionInfo) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_HotelCountryRegionInfo.Marshal(b, m, deterministic) +} +func (dst *HotelCountryRegionInfo) XXX_Merge(src proto.Message) { + xxx_messageInfo_HotelCountryRegionInfo.Merge(dst, src) +} +func (m *HotelCountryRegionInfo) XXX_Size() int { + return xxx_messageInfo_HotelCountryRegionInfo.Size(m) +} +func (m *HotelCountryRegionInfo) XXX_DiscardUnknown() { + xxx_messageInfo_HotelCountryRegionInfo.DiscardUnknown(m) +} + +var xxx_messageInfo_HotelCountryRegionInfo proto.InternalMessageInfo + +func (m *HotelCountryRegionInfo) GetCountryRegionCriterion() *wrappers.StringValue { + if m != nil { + return m.CountryRegionCriterion + } + return nil +} + +// State the hotel is located in. +type HotelStateInfo struct { + // The Geo Target Constant resource name. + StateCriterion *wrappers.StringValue `protobuf:"bytes,1,opt,name=state_criterion,json=stateCriterion,proto3" json:"state_criterion,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *HotelStateInfo) Reset() { *m = HotelStateInfo{} } +func (m *HotelStateInfo) String() string { return proto.CompactTextString(m) } +func (*HotelStateInfo) ProtoMessage() {} +func (*HotelStateInfo) Descriptor() ([]byte, []int) { + return fileDescriptor_criteria_e78ca570234fbeae, []int{14} +} +func (m *HotelStateInfo) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_HotelStateInfo.Unmarshal(m, b) +} +func (m *HotelStateInfo) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_HotelStateInfo.Marshal(b, m, deterministic) +} +func (dst *HotelStateInfo) XXX_Merge(src proto.Message) { + xxx_messageInfo_HotelStateInfo.Merge(dst, src) +} +func (m *HotelStateInfo) XXX_Size() int { + return xxx_messageInfo_HotelStateInfo.Size(m) +} +func (m *HotelStateInfo) XXX_DiscardUnknown() { + xxx_messageInfo_HotelStateInfo.DiscardUnknown(m) +} + +var xxx_messageInfo_HotelStateInfo proto.InternalMessageInfo + +func (m *HotelStateInfo) GetStateCriterion() *wrappers.StringValue { + if m != nil { + return m.StateCriterion + } + return nil +} + +// City the hotel is located in. +type HotelCityInfo struct { + // The Geo Target Constant resource name. + CityCriterion *wrappers.StringValue `protobuf:"bytes,1,opt,name=city_criterion,json=cityCriterion,proto3" json:"city_criterion,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *HotelCityInfo) Reset() { *m = HotelCityInfo{} } +func (m *HotelCityInfo) String() string { return proto.CompactTextString(m) } +func (*HotelCityInfo) ProtoMessage() {} +func (*HotelCityInfo) Descriptor() ([]byte, []int) { + return fileDescriptor_criteria_e78ca570234fbeae, []int{15} +} +func (m *HotelCityInfo) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_HotelCityInfo.Unmarshal(m, b) +} +func (m *HotelCityInfo) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_HotelCityInfo.Marshal(b, m, deterministic) +} +func (dst *HotelCityInfo) XXX_Merge(src proto.Message) { + xxx_messageInfo_HotelCityInfo.Merge(dst, src) +} +func (m *HotelCityInfo) XXX_Size() int { + return xxx_messageInfo_HotelCityInfo.Size(m) +} +func (m *HotelCityInfo) XXX_DiscardUnknown() { + xxx_messageInfo_HotelCityInfo.DiscardUnknown(m) +} + +var xxx_messageInfo_HotelCityInfo proto.InternalMessageInfo + +func (m *HotelCityInfo) GetCityCriterion() *wrappers.StringValue { + if m != nil { + return m.CityCriterion + } + return nil +} + +// Listing custom attribute. +type ListingCustomAttributeInfo struct { + // String value of the listing custom attribute. + Value *wrappers.StringValue `protobuf:"bytes,1,opt,name=value,proto3" json:"value,omitempty"` + // Indicates the index of the custom attribute. + Index enums.ListingCustomAttributeIndexEnum_ListingCustomAttributeIndex `protobuf:"varint,2,opt,name=index,proto3,enum=google.ads.googleads.v1.enums.ListingCustomAttributeIndexEnum_ListingCustomAttributeIndex" json:"index,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ListingCustomAttributeInfo) Reset() { *m = ListingCustomAttributeInfo{} } +func (m *ListingCustomAttributeInfo) String() string { return proto.CompactTextString(m) } +func (*ListingCustomAttributeInfo) ProtoMessage() {} +func (*ListingCustomAttributeInfo) Descriptor() ([]byte, []int) { + return fileDescriptor_criteria_e78ca570234fbeae, []int{16} +} +func (m *ListingCustomAttributeInfo) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ListingCustomAttributeInfo.Unmarshal(m, b) +} +func (m *ListingCustomAttributeInfo) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ListingCustomAttributeInfo.Marshal(b, m, deterministic) +} +func (dst *ListingCustomAttributeInfo) XXX_Merge(src proto.Message) { + xxx_messageInfo_ListingCustomAttributeInfo.Merge(dst, src) +} +func (m *ListingCustomAttributeInfo) XXX_Size() int { + return xxx_messageInfo_ListingCustomAttributeInfo.Size(m) +} +func (m *ListingCustomAttributeInfo) XXX_DiscardUnknown() { + xxx_messageInfo_ListingCustomAttributeInfo.DiscardUnknown(m) +} + +var xxx_messageInfo_ListingCustomAttributeInfo proto.InternalMessageInfo + +func (m *ListingCustomAttributeInfo) GetValue() *wrappers.StringValue { + if m != nil { + return m.Value + } + return nil +} + +func (m *ListingCustomAttributeInfo) GetIndex() enums.ListingCustomAttributeIndexEnum_ListingCustomAttributeIndex { + if m != nil { + return m.Index + } + return enums.ListingCustomAttributeIndexEnum_UNSPECIFIED +} + +// Bidding category of a product offer. +type ProductBiddingCategoryInfo struct { + // ID of the product bidding category. + // + // This ID is equivalent to the google_product_category ID as described in + // this article: https://support.google.com/merchants/answer/6324436. + Id *wrappers.Int64Value `protobuf:"bytes,1,opt,name=id,proto3" json:"id,omitempty"` + // Two-letter upper-case country code of the product bidding category. It must + // match the campaign.shopping_setting.sales_country field. + CountryCode *wrappers.StringValue `protobuf:"bytes,2,opt,name=country_code,json=countryCode,proto3" json:"country_code,omitempty"` + // Level of the product bidding category. + Level enums.ProductBiddingCategoryLevelEnum_ProductBiddingCategoryLevel `protobuf:"varint,3,opt,name=level,proto3,enum=google.ads.googleads.v1.enums.ProductBiddingCategoryLevelEnum_ProductBiddingCategoryLevel" json:"level,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ProductBiddingCategoryInfo) Reset() { *m = ProductBiddingCategoryInfo{} } +func (m *ProductBiddingCategoryInfo) String() string { return proto.CompactTextString(m) } +func (*ProductBiddingCategoryInfo) ProtoMessage() {} +func (*ProductBiddingCategoryInfo) Descriptor() ([]byte, []int) { + return fileDescriptor_criteria_e78ca570234fbeae, []int{17} +} +func (m *ProductBiddingCategoryInfo) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ProductBiddingCategoryInfo.Unmarshal(m, b) +} +func (m *ProductBiddingCategoryInfo) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ProductBiddingCategoryInfo.Marshal(b, m, deterministic) +} +func (dst *ProductBiddingCategoryInfo) XXX_Merge(src proto.Message) { + xxx_messageInfo_ProductBiddingCategoryInfo.Merge(dst, src) +} +func (m *ProductBiddingCategoryInfo) XXX_Size() int { + return xxx_messageInfo_ProductBiddingCategoryInfo.Size(m) +} +func (m *ProductBiddingCategoryInfo) XXX_DiscardUnknown() { + xxx_messageInfo_ProductBiddingCategoryInfo.DiscardUnknown(m) +} + +var xxx_messageInfo_ProductBiddingCategoryInfo proto.InternalMessageInfo + +func (m *ProductBiddingCategoryInfo) GetId() *wrappers.Int64Value { + if m != nil { + return m.Id + } + return nil +} + +func (m *ProductBiddingCategoryInfo) GetCountryCode() *wrappers.StringValue { + if m != nil { + return m.CountryCode + } + return nil +} + +func (m *ProductBiddingCategoryInfo) GetLevel() enums.ProductBiddingCategoryLevelEnum_ProductBiddingCategoryLevel { + if m != nil { + return m.Level + } + return enums.ProductBiddingCategoryLevelEnum_UNSPECIFIED +} + +// Locality of a product offer. +type ProductChannelInfo struct { + // Value of the locality. + Channel enums.ProductChannelEnum_ProductChannel `protobuf:"varint,1,opt,name=channel,proto3,enum=google.ads.googleads.v1.enums.ProductChannelEnum_ProductChannel" json:"channel,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ProductChannelInfo) Reset() { *m = ProductChannelInfo{} } +func (m *ProductChannelInfo) String() string { return proto.CompactTextString(m) } +func (*ProductChannelInfo) ProtoMessage() {} +func (*ProductChannelInfo) Descriptor() ([]byte, []int) { + return fileDescriptor_criteria_e78ca570234fbeae, []int{18} +} +func (m *ProductChannelInfo) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ProductChannelInfo.Unmarshal(m, b) +} +func (m *ProductChannelInfo) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ProductChannelInfo.Marshal(b, m, deterministic) +} +func (dst *ProductChannelInfo) XXX_Merge(src proto.Message) { + xxx_messageInfo_ProductChannelInfo.Merge(dst, src) +} +func (m *ProductChannelInfo) XXX_Size() int { + return xxx_messageInfo_ProductChannelInfo.Size(m) +} +func (m *ProductChannelInfo) XXX_DiscardUnknown() { + xxx_messageInfo_ProductChannelInfo.DiscardUnknown(m) +} + +var xxx_messageInfo_ProductChannelInfo proto.InternalMessageInfo + +func (m *ProductChannelInfo) GetChannel() enums.ProductChannelEnum_ProductChannel { + if m != nil { + return m.Channel + } + return enums.ProductChannelEnum_UNSPECIFIED +} + +// Availability of a product offer. +type ProductChannelExclusivityInfo struct { + // Value of the availability. + ChannelExclusivity enums.ProductChannelExclusivityEnum_ProductChannelExclusivity `protobuf:"varint,1,opt,name=channel_exclusivity,json=channelExclusivity,proto3,enum=google.ads.googleads.v1.enums.ProductChannelExclusivityEnum_ProductChannelExclusivity" json:"channel_exclusivity,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ProductChannelExclusivityInfo) Reset() { *m = ProductChannelExclusivityInfo{} } +func (m *ProductChannelExclusivityInfo) String() string { return proto.CompactTextString(m) } +func (*ProductChannelExclusivityInfo) ProtoMessage() {} +func (*ProductChannelExclusivityInfo) Descriptor() ([]byte, []int) { + return fileDescriptor_criteria_e78ca570234fbeae, []int{19} +} +func (m *ProductChannelExclusivityInfo) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ProductChannelExclusivityInfo.Unmarshal(m, b) +} +func (m *ProductChannelExclusivityInfo) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ProductChannelExclusivityInfo.Marshal(b, m, deterministic) +} +func (dst *ProductChannelExclusivityInfo) XXX_Merge(src proto.Message) { + xxx_messageInfo_ProductChannelExclusivityInfo.Merge(dst, src) +} +func (m *ProductChannelExclusivityInfo) XXX_Size() int { + return xxx_messageInfo_ProductChannelExclusivityInfo.Size(m) +} +func (m *ProductChannelExclusivityInfo) XXX_DiscardUnknown() { + xxx_messageInfo_ProductChannelExclusivityInfo.DiscardUnknown(m) +} + +var xxx_messageInfo_ProductChannelExclusivityInfo proto.InternalMessageInfo + +func (m *ProductChannelExclusivityInfo) GetChannelExclusivity() enums.ProductChannelExclusivityEnum_ProductChannelExclusivity { + if m != nil { + return m.ChannelExclusivity + } + return enums.ProductChannelExclusivityEnum_UNSPECIFIED +} + +// Condition of a product offer. +type ProductConditionInfo struct { + // Value of the condition. + Condition enums.ProductConditionEnum_ProductCondition `protobuf:"varint,1,opt,name=condition,proto3,enum=google.ads.googleads.v1.enums.ProductConditionEnum_ProductCondition" json:"condition,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ProductConditionInfo) Reset() { *m = ProductConditionInfo{} } +func (m *ProductConditionInfo) String() string { return proto.CompactTextString(m) } +func (*ProductConditionInfo) ProtoMessage() {} +func (*ProductConditionInfo) Descriptor() ([]byte, []int) { + return fileDescriptor_criteria_e78ca570234fbeae, []int{20} +} +func (m *ProductConditionInfo) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ProductConditionInfo.Unmarshal(m, b) +} +func (m *ProductConditionInfo) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ProductConditionInfo.Marshal(b, m, deterministic) +} +func (dst *ProductConditionInfo) XXX_Merge(src proto.Message) { + xxx_messageInfo_ProductConditionInfo.Merge(dst, src) +} +func (m *ProductConditionInfo) XXX_Size() int { + return xxx_messageInfo_ProductConditionInfo.Size(m) +} +func (m *ProductConditionInfo) XXX_DiscardUnknown() { + xxx_messageInfo_ProductConditionInfo.DiscardUnknown(m) +} + +var xxx_messageInfo_ProductConditionInfo proto.InternalMessageInfo + +func (m *ProductConditionInfo) GetCondition() enums.ProductConditionEnum_ProductCondition { + if m != nil { + return m.Condition + } + return enums.ProductConditionEnum_UNSPECIFIED +} + +// Item id of a product offer. +type ProductItemIdInfo struct { + // Value of the id. + Value *wrappers.StringValue `protobuf:"bytes,1,opt,name=value,proto3" json:"value,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ProductItemIdInfo) Reset() { *m = ProductItemIdInfo{} } +func (m *ProductItemIdInfo) String() string { return proto.CompactTextString(m) } +func (*ProductItemIdInfo) ProtoMessage() {} +func (*ProductItemIdInfo) Descriptor() ([]byte, []int) { + return fileDescriptor_criteria_e78ca570234fbeae, []int{21} +} +func (m *ProductItemIdInfo) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ProductItemIdInfo.Unmarshal(m, b) +} +func (m *ProductItemIdInfo) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ProductItemIdInfo.Marshal(b, m, deterministic) +} +func (dst *ProductItemIdInfo) XXX_Merge(src proto.Message) { + xxx_messageInfo_ProductItemIdInfo.Merge(dst, src) +} +func (m *ProductItemIdInfo) XXX_Size() int { + return xxx_messageInfo_ProductItemIdInfo.Size(m) +} +func (m *ProductItemIdInfo) XXX_DiscardUnknown() { + xxx_messageInfo_ProductItemIdInfo.DiscardUnknown(m) +} + +var xxx_messageInfo_ProductItemIdInfo proto.InternalMessageInfo + +func (m *ProductItemIdInfo) GetValue() *wrappers.StringValue { + if m != nil { + return m.Value + } + return nil +} + +// Type of a product offer. +type ProductTypeInfo struct { + // Value of the type. + Value *wrappers.StringValue `protobuf:"bytes,1,opt,name=value,proto3" json:"value,omitempty"` + // Level of the type. + Level enums.ProductTypeLevelEnum_ProductTypeLevel `protobuf:"varint,2,opt,name=level,proto3,enum=google.ads.googleads.v1.enums.ProductTypeLevelEnum_ProductTypeLevel" json:"level,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ProductTypeInfo) Reset() { *m = ProductTypeInfo{} } +func (m *ProductTypeInfo) String() string { return proto.CompactTextString(m) } +func (*ProductTypeInfo) ProtoMessage() {} +func (*ProductTypeInfo) Descriptor() ([]byte, []int) { + return fileDescriptor_criteria_e78ca570234fbeae, []int{22} +} +func (m *ProductTypeInfo) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ProductTypeInfo.Unmarshal(m, b) +} +func (m *ProductTypeInfo) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ProductTypeInfo.Marshal(b, m, deterministic) +} +func (dst *ProductTypeInfo) XXX_Merge(src proto.Message) { + xxx_messageInfo_ProductTypeInfo.Merge(dst, src) +} +func (m *ProductTypeInfo) XXX_Size() int { + return xxx_messageInfo_ProductTypeInfo.Size(m) +} +func (m *ProductTypeInfo) XXX_DiscardUnknown() { + xxx_messageInfo_ProductTypeInfo.DiscardUnknown(m) +} + +var xxx_messageInfo_ProductTypeInfo proto.InternalMessageInfo + +func (m *ProductTypeInfo) GetValue() *wrappers.StringValue { + if m != nil { + return m.Value + } + return nil +} + +func (m *ProductTypeInfo) GetLevel() enums.ProductTypeLevelEnum_ProductTypeLevel { + if m != nil { + return m.Level + } + return enums.ProductTypeLevelEnum_UNSPECIFIED +} + +// Unknown listing dimension. +type UnknownListingDimensionInfo struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *UnknownListingDimensionInfo) Reset() { *m = UnknownListingDimensionInfo{} } +func (m *UnknownListingDimensionInfo) String() string { return proto.CompactTextString(m) } +func (*UnknownListingDimensionInfo) ProtoMessage() {} +func (*UnknownListingDimensionInfo) Descriptor() ([]byte, []int) { + return fileDescriptor_criteria_e78ca570234fbeae, []int{23} +} +func (m *UnknownListingDimensionInfo) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_UnknownListingDimensionInfo.Unmarshal(m, b) +} +func (m *UnknownListingDimensionInfo) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_UnknownListingDimensionInfo.Marshal(b, m, deterministic) +} +func (dst *UnknownListingDimensionInfo) XXX_Merge(src proto.Message) { + xxx_messageInfo_UnknownListingDimensionInfo.Merge(dst, src) +} +func (m *UnknownListingDimensionInfo) XXX_Size() int { + return xxx_messageInfo_UnknownListingDimensionInfo.Size(m) +} +func (m *UnknownListingDimensionInfo) XXX_DiscardUnknown() { + xxx_messageInfo_UnknownListingDimensionInfo.DiscardUnknown(m) +} + +var xxx_messageInfo_UnknownListingDimensionInfo proto.InternalMessageInfo + +// Criterion for hotel date selection (default dates vs. user selected). +type HotelDateSelectionTypeInfo struct { + // Type of the hotel date selection + Type enums.HotelDateSelectionTypeEnum_HotelDateSelectionType `protobuf:"varint,1,opt,name=type,proto3,enum=google.ads.googleads.v1.enums.HotelDateSelectionTypeEnum_HotelDateSelectionType" json:"type,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *HotelDateSelectionTypeInfo) Reset() { *m = HotelDateSelectionTypeInfo{} } +func (m *HotelDateSelectionTypeInfo) String() string { return proto.CompactTextString(m) } +func (*HotelDateSelectionTypeInfo) ProtoMessage() {} +func (*HotelDateSelectionTypeInfo) Descriptor() ([]byte, []int) { + return fileDescriptor_criteria_e78ca570234fbeae, []int{24} +} +func (m *HotelDateSelectionTypeInfo) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_HotelDateSelectionTypeInfo.Unmarshal(m, b) +} +func (m *HotelDateSelectionTypeInfo) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_HotelDateSelectionTypeInfo.Marshal(b, m, deterministic) +} +func (dst *HotelDateSelectionTypeInfo) XXX_Merge(src proto.Message) { + xxx_messageInfo_HotelDateSelectionTypeInfo.Merge(dst, src) +} +func (m *HotelDateSelectionTypeInfo) XXX_Size() int { + return xxx_messageInfo_HotelDateSelectionTypeInfo.Size(m) +} +func (m *HotelDateSelectionTypeInfo) XXX_DiscardUnknown() { + xxx_messageInfo_HotelDateSelectionTypeInfo.DiscardUnknown(m) +} + +var xxx_messageInfo_HotelDateSelectionTypeInfo proto.InternalMessageInfo + +func (m *HotelDateSelectionTypeInfo) GetType() enums.HotelDateSelectionTypeEnum_HotelDateSelectionType { + if m != nil { + return m.Type + } + return enums.HotelDateSelectionTypeEnum_UNSPECIFIED +} + +// Criterion for number of days prior to the stay the booking is being made. +type HotelAdvanceBookingWindowInfo struct { + // Low end of the number of days prior to the stay. + MinDays *wrappers.Int64Value `protobuf:"bytes,1,opt,name=min_days,json=minDays,proto3" json:"min_days,omitempty"` + // High end of the number of days prior to the stay. + MaxDays *wrappers.Int64Value `protobuf:"bytes,2,opt,name=max_days,json=maxDays,proto3" json:"max_days,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *HotelAdvanceBookingWindowInfo) Reset() { *m = HotelAdvanceBookingWindowInfo{} } +func (m *HotelAdvanceBookingWindowInfo) String() string { return proto.CompactTextString(m) } +func (*HotelAdvanceBookingWindowInfo) ProtoMessage() {} +func (*HotelAdvanceBookingWindowInfo) Descriptor() ([]byte, []int) { + return fileDescriptor_criteria_e78ca570234fbeae, []int{25} +} +func (m *HotelAdvanceBookingWindowInfo) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_HotelAdvanceBookingWindowInfo.Unmarshal(m, b) +} +func (m *HotelAdvanceBookingWindowInfo) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_HotelAdvanceBookingWindowInfo.Marshal(b, m, deterministic) +} +func (dst *HotelAdvanceBookingWindowInfo) XXX_Merge(src proto.Message) { + xxx_messageInfo_HotelAdvanceBookingWindowInfo.Merge(dst, src) +} +func (m *HotelAdvanceBookingWindowInfo) XXX_Size() int { + return xxx_messageInfo_HotelAdvanceBookingWindowInfo.Size(m) +} +func (m *HotelAdvanceBookingWindowInfo) XXX_DiscardUnknown() { + xxx_messageInfo_HotelAdvanceBookingWindowInfo.DiscardUnknown(m) +} + +var xxx_messageInfo_HotelAdvanceBookingWindowInfo proto.InternalMessageInfo + +func (m *HotelAdvanceBookingWindowInfo) GetMinDays() *wrappers.Int64Value { + if m != nil { + return m.MinDays + } + return nil +} + +func (m *HotelAdvanceBookingWindowInfo) GetMaxDays() *wrappers.Int64Value { + if m != nil { + return m.MaxDays + } + return nil +} + +// Criterion for length of hotel stay in nights. +type HotelLengthOfStayInfo struct { + // Low end of the number of nights in the stay. + MinNights *wrappers.Int64Value `protobuf:"bytes,1,opt,name=min_nights,json=minNights,proto3" json:"min_nights,omitempty"` + // High end of the number of nights in the stay. + MaxNights *wrappers.Int64Value `protobuf:"bytes,2,opt,name=max_nights,json=maxNights,proto3" json:"max_nights,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *HotelLengthOfStayInfo) Reset() { *m = HotelLengthOfStayInfo{} } +func (m *HotelLengthOfStayInfo) String() string { return proto.CompactTextString(m) } +func (*HotelLengthOfStayInfo) ProtoMessage() {} +func (*HotelLengthOfStayInfo) Descriptor() ([]byte, []int) { + return fileDescriptor_criteria_e78ca570234fbeae, []int{26} +} +func (m *HotelLengthOfStayInfo) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_HotelLengthOfStayInfo.Unmarshal(m, b) +} +func (m *HotelLengthOfStayInfo) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_HotelLengthOfStayInfo.Marshal(b, m, deterministic) +} +func (dst *HotelLengthOfStayInfo) XXX_Merge(src proto.Message) { + xxx_messageInfo_HotelLengthOfStayInfo.Merge(dst, src) +} +func (m *HotelLengthOfStayInfo) XXX_Size() int { + return xxx_messageInfo_HotelLengthOfStayInfo.Size(m) +} +func (m *HotelLengthOfStayInfo) XXX_DiscardUnknown() { + xxx_messageInfo_HotelLengthOfStayInfo.DiscardUnknown(m) +} + +var xxx_messageInfo_HotelLengthOfStayInfo proto.InternalMessageInfo + +func (m *HotelLengthOfStayInfo) GetMinNights() *wrappers.Int64Value { + if m != nil { + return m.MinNights + } + return nil +} + +func (m *HotelLengthOfStayInfo) GetMaxNights() *wrappers.Int64Value { + if m != nil { + return m.MaxNights + } + return nil +} + +// Criterion for day of the week the booking is for. +type HotelCheckInDayInfo struct { + // The day of the week. + DayOfWeek enums.DayOfWeekEnum_DayOfWeek `protobuf:"varint,1,opt,name=day_of_week,json=dayOfWeek,proto3,enum=google.ads.googleads.v1.enums.DayOfWeekEnum_DayOfWeek" json:"day_of_week,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *HotelCheckInDayInfo) Reset() { *m = HotelCheckInDayInfo{} } +func (m *HotelCheckInDayInfo) String() string { return proto.CompactTextString(m) } +func (*HotelCheckInDayInfo) ProtoMessage() {} +func (*HotelCheckInDayInfo) Descriptor() ([]byte, []int) { + return fileDescriptor_criteria_e78ca570234fbeae, []int{27} +} +func (m *HotelCheckInDayInfo) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_HotelCheckInDayInfo.Unmarshal(m, b) +} +func (m *HotelCheckInDayInfo) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_HotelCheckInDayInfo.Marshal(b, m, deterministic) +} +func (dst *HotelCheckInDayInfo) XXX_Merge(src proto.Message) { + xxx_messageInfo_HotelCheckInDayInfo.Merge(dst, src) +} +func (m *HotelCheckInDayInfo) XXX_Size() int { + return xxx_messageInfo_HotelCheckInDayInfo.Size(m) +} +func (m *HotelCheckInDayInfo) XXX_DiscardUnknown() { + xxx_messageInfo_HotelCheckInDayInfo.DiscardUnknown(m) +} + +var xxx_messageInfo_HotelCheckInDayInfo proto.InternalMessageInfo + +func (m *HotelCheckInDayInfo) GetDayOfWeek() enums.DayOfWeekEnum_DayOfWeek { + if m != nil { + return m.DayOfWeek + } + return enums.DayOfWeekEnum_UNSPECIFIED +} + +// Criterion for Interaction Type. +type InteractionTypeInfo struct { + // The interaction type. + Type enums.InteractionTypeEnum_InteractionType `protobuf:"varint,1,opt,name=type,proto3,enum=google.ads.googleads.v1.enums.InteractionTypeEnum_InteractionType" json:"type,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *InteractionTypeInfo) Reset() { *m = InteractionTypeInfo{} } +func (m *InteractionTypeInfo) String() string { return proto.CompactTextString(m) } +func (*InteractionTypeInfo) ProtoMessage() {} +func (*InteractionTypeInfo) Descriptor() ([]byte, []int) { + return fileDescriptor_criteria_e78ca570234fbeae, []int{28} +} +func (m *InteractionTypeInfo) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_InteractionTypeInfo.Unmarshal(m, b) +} +func (m *InteractionTypeInfo) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_InteractionTypeInfo.Marshal(b, m, deterministic) +} +func (dst *InteractionTypeInfo) XXX_Merge(src proto.Message) { + xxx_messageInfo_InteractionTypeInfo.Merge(dst, src) +} +func (m *InteractionTypeInfo) XXX_Size() int { + return xxx_messageInfo_InteractionTypeInfo.Size(m) +} +func (m *InteractionTypeInfo) XXX_DiscardUnknown() { + xxx_messageInfo_InteractionTypeInfo.DiscardUnknown(m) +} + +var xxx_messageInfo_InteractionTypeInfo proto.InternalMessageInfo + +func (m *InteractionTypeInfo) GetType() enums.InteractionTypeEnum_InteractionType { + if m != nil { + return m.Type + } + return enums.InteractionTypeEnum_UNSPECIFIED +} + +// Represents an AdSchedule criterion. +// +// AdSchedule is specified as the day of the week and a time interval +// within which ads will be shown. +// +// No more than six AdSchedules can be added for the same day. +type AdScheduleInfo struct { + // Minutes after the start hour at which this schedule starts. + // + // This field is required for CREATE operations and is prohibited on UPDATE + // operations. + StartMinute enums.MinuteOfHourEnum_MinuteOfHour `protobuf:"varint,1,opt,name=start_minute,json=startMinute,proto3,enum=google.ads.googleads.v1.enums.MinuteOfHourEnum_MinuteOfHour" json:"start_minute,omitempty"` + // Minutes after the end hour at which this schedule ends. The schedule is + // exclusive of the end minute. + // + // This field is required for CREATE operations and is prohibited on UPDATE + // operations. + EndMinute enums.MinuteOfHourEnum_MinuteOfHour `protobuf:"varint,2,opt,name=end_minute,json=endMinute,proto3,enum=google.ads.googleads.v1.enums.MinuteOfHourEnum_MinuteOfHour" json:"end_minute,omitempty"` + // Starting hour in 24 hour time. + // This field must be between 0 and 23, inclusive. + // + // This field is required for CREATE operations and is prohibited on UPDATE + // operations. + StartHour *wrappers.Int32Value `protobuf:"bytes,3,opt,name=start_hour,json=startHour,proto3" json:"start_hour,omitempty"` + // Ending hour in 24 hour time; 24 signifies end of the day. + // This field must be between 0 and 24, inclusive. + // + // This field is required for CREATE operations and is prohibited on UPDATE + // operations. + EndHour *wrappers.Int32Value `protobuf:"bytes,4,opt,name=end_hour,json=endHour,proto3" json:"end_hour,omitempty"` + // Day of the week the schedule applies to. + // + // This field is required for CREATE operations and is prohibited on UPDATE + // operations. + DayOfWeek enums.DayOfWeekEnum_DayOfWeek `protobuf:"varint,5,opt,name=day_of_week,json=dayOfWeek,proto3,enum=google.ads.googleads.v1.enums.DayOfWeekEnum_DayOfWeek" json:"day_of_week,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *AdScheduleInfo) Reset() { *m = AdScheduleInfo{} } +func (m *AdScheduleInfo) String() string { return proto.CompactTextString(m) } +func (*AdScheduleInfo) ProtoMessage() {} +func (*AdScheduleInfo) Descriptor() ([]byte, []int) { + return fileDescriptor_criteria_e78ca570234fbeae, []int{29} +} +func (m *AdScheduleInfo) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_AdScheduleInfo.Unmarshal(m, b) +} +func (m *AdScheduleInfo) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_AdScheduleInfo.Marshal(b, m, deterministic) +} +func (dst *AdScheduleInfo) XXX_Merge(src proto.Message) { + xxx_messageInfo_AdScheduleInfo.Merge(dst, src) +} +func (m *AdScheduleInfo) XXX_Size() int { + return xxx_messageInfo_AdScheduleInfo.Size(m) +} +func (m *AdScheduleInfo) XXX_DiscardUnknown() { + xxx_messageInfo_AdScheduleInfo.DiscardUnknown(m) +} + +var xxx_messageInfo_AdScheduleInfo proto.InternalMessageInfo + +func (m *AdScheduleInfo) GetStartMinute() enums.MinuteOfHourEnum_MinuteOfHour { + if m != nil { + return m.StartMinute + } + return enums.MinuteOfHourEnum_UNSPECIFIED +} + +func (m *AdScheduleInfo) GetEndMinute() enums.MinuteOfHourEnum_MinuteOfHour { + if m != nil { + return m.EndMinute + } + return enums.MinuteOfHourEnum_UNSPECIFIED +} + +func (m *AdScheduleInfo) GetStartHour() *wrappers.Int32Value { + if m != nil { + return m.StartHour + } + return nil +} + +func (m *AdScheduleInfo) GetEndHour() *wrappers.Int32Value { + if m != nil { + return m.EndHour + } + return nil +} + +func (m *AdScheduleInfo) GetDayOfWeek() enums.DayOfWeekEnum_DayOfWeek { + if m != nil { + return m.DayOfWeek + } + return enums.DayOfWeekEnum_UNSPECIFIED +} + +// An age range criterion. +type AgeRangeInfo struct { + // Type of the age range. + Type enums.AgeRangeTypeEnum_AgeRangeType `protobuf:"varint,1,opt,name=type,proto3,enum=google.ads.googleads.v1.enums.AgeRangeTypeEnum_AgeRangeType" json:"type,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *AgeRangeInfo) Reset() { *m = AgeRangeInfo{} } +func (m *AgeRangeInfo) String() string { return proto.CompactTextString(m) } +func (*AgeRangeInfo) ProtoMessage() {} +func (*AgeRangeInfo) Descriptor() ([]byte, []int) { + return fileDescriptor_criteria_e78ca570234fbeae, []int{30} +} +func (m *AgeRangeInfo) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_AgeRangeInfo.Unmarshal(m, b) +} +func (m *AgeRangeInfo) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_AgeRangeInfo.Marshal(b, m, deterministic) +} +func (dst *AgeRangeInfo) XXX_Merge(src proto.Message) { + xxx_messageInfo_AgeRangeInfo.Merge(dst, src) +} +func (m *AgeRangeInfo) XXX_Size() int { + return xxx_messageInfo_AgeRangeInfo.Size(m) +} +func (m *AgeRangeInfo) XXX_DiscardUnknown() { + xxx_messageInfo_AgeRangeInfo.DiscardUnknown(m) +} + +var xxx_messageInfo_AgeRangeInfo proto.InternalMessageInfo + +func (m *AgeRangeInfo) GetType() enums.AgeRangeTypeEnum_AgeRangeType { + if m != nil { + return m.Type + } + return enums.AgeRangeTypeEnum_UNSPECIFIED +} + +// A gender criterion. +type GenderInfo struct { + // Type of the gender. + Type enums.GenderTypeEnum_GenderType `protobuf:"varint,1,opt,name=type,proto3,enum=google.ads.googleads.v1.enums.GenderTypeEnum_GenderType" json:"type,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GenderInfo) Reset() { *m = GenderInfo{} } +func (m *GenderInfo) String() string { return proto.CompactTextString(m) } +func (*GenderInfo) ProtoMessage() {} +func (*GenderInfo) Descriptor() ([]byte, []int) { + return fileDescriptor_criteria_e78ca570234fbeae, []int{31} +} +func (m *GenderInfo) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GenderInfo.Unmarshal(m, b) +} +func (m *GenderInfo) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GenderInfo.Marshal(b, m, deterministic) +} +func (dst *GenderInfo) XXX_Merge(src proto.Message) { + xxx_messageInfo_GenderInfo.Merge(dst, src) +} +func (m *GenderInfo) XXX_Size() int { + return xxx_messageInfo_GenderInfo.Size(m) +} +func (m *GenderInfo) XXX_DiscardUnknown() { + xxx_messageInfo_GenderInfo.DiscardUnknown(m) +} + +var xxx_messageInfo_GenderInfo proto.InternalMessageInfo + +func (m *GenderInfo) GetType() enums.GenderTypeEnum_GenderType { + if m != nil { + return m.Type + } + return enums.GenderTypeEnum_UNSPECIFIED +} + +// An income range criterion. +type IncomeRangeInfo struct { + // Type of the income range. + Type enums.IncomeRangeTypeEnum_IncomeRangeType `protobuf:"varint,1,opt,name=type,proto3,enum=google.ads.googleads.v1.enums.IncomeRangeTypeEnum_IncomeRangeType" json:"type,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *IncomeRangeInfo) Reset() { *m = IncomeRangeInfo{} } +func (m *IncomeRangeInfo) String() string { return proto.CompactTextString(m) } +func (*IncomeRangeInfo) ProtoMessage() {} +func (*IncomeRangeInfo) Descriptor() ([]byte, []int) { + return fileDescriptor_criteria_e78ca570234fbeae, []int{32} +} +func (m *IncomeRangeInfo) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_IncomeRangeInfo.Unmarshal(m, b) +} +func (m *IncomeRangeInfo) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_IncomeRangeInfo.Marshal(b, m, deterministic) +} +func (dst *IncomeRangeInfo) XXX_Merge(src proto.Message) { + xxx_messageInfo_IncomeRangeInfo.Merge(dst, src) +} +func (m *IncomeRangeInfo) XXX_Size() int { + return xxx_messageInfo_IncomeRangeInfo.Size(m) +} +func (m *IncomeRangeInfo) XXX_DiscardUnknown() { + xxx_messageInfo_IncomeRangeInfo.DiscardUnknown(m) +} + +var xxx_messageInfo_IncomeRangeInfo proto.InternalMessageInfo + +func (m *IncomeRangeInfo) GetType() enums.IncomeRangeTypeEnum_IncomeRangeType { + if m != nil { + return m.Type + } + return enums.IncomeRangeTypeEnum_UNSPECIFIED +} + +// A parental status criterion. +type ParentalStatusInfo struct { + // Type of the parental status. + Type enums.ParentalStatusTypeEnum_ParentalStatusType `protobuf:"varint,1,opt,name=type,proto3,enum=google.ads.googleads.v1.enums.ParentalStatusTypeEnum_ParentalStatusType" json:"type,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ParentalStatusInfo) Reset() { *m = ParentalStatusInfo{} } +func (m *ParentalStatusInfo) String() string { return proto.CompactTextString(m) } +func (*ParentalStatusInfo) ProtoMessage() {} +func (*ParentalStatusInfo) Descriptor() ([]byte, []int) { + return fileDescriptor_criteria_e78ca570234fbeae, []int{33} +} +func (m *ParentalStatusInfo) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ParentalStatusInfo.Unmarshal(m, b) +} +func (m *ParentalStatusInfo) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ParentalStatusInfo.Marshal(b, m, deterministic) +} +func (dst *ParentalStatusInfo) XXX_Merge(src proto.Message) { + xxx_messageInfo_ParentalStatusInfo.Merge(dst, src) +} +func (m *ParentalStatusInfo) XXX_Size() int { + return xxx_messageInfo_ParentalStatusInfo.Size(m) +} +func (m *ParentalStatusInfo) XXX_DiscardUnknown() { + xxx_messageInfo_ParentalStatusInfo.DiscardUnknown(m) +} + +var xxx_messageInfo_ParentalStatusInfo proto.InternalMessageInfo + +func (m *ParentalStatusInfo) GetType() enums.ParentalStatusTypeEnum_ParentalStatusType { + if m != nil { + return m.Type + } + return enums.ParentalStatusTypeEnum_UNSPECIFIED +} + +// A YouTube Video criterion. +type YouTubeVideoInfo struct { + // YouTube video id as it appears on the YouTube watch page. + VideoId *wrappers.StringValue `protobuf:"bytes,1,opt,name=video_id,json=videoId,proto3" json:"video_id,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *YouTubeVideoInfo) Reset() { *m = YouTubeVideoInfo{} } +func (m *YouTubeVideoInfo) String() string { return proto.CompactTextString(m) } +func (*YouTubeVideoInfo) ProtoMessage() {} +func (*YouTubeVideoInfo) Descriptor() ([]byte, []int) { + return fileDescriptor_criteria_e78ca570234fbeae, []int{34} +} +func (m *YouTubeVideoInfo) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_YouTubeVideoInfo.Unmarshal(m, b) +} +func (m *YouTubeVideoInfo) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_YouTubeVideoInfo.Marshal(b, m, deterministic) +} +func (dst *YouTubeVideoInfo) XXX_Merge(src proto.Message) { + xxx_messageInfo_YouTubeVideoInfo.Merge(dst, src) +} +func (m *YouTubeVideoInfo) XXX_Size() int { + return xxx_messageInfo_YouTubeVideoInfo.Size(m) +} +func (m *YouTubeVideoInfo) XXX_DiscardUnknown() { + xxx_messageInfo_YouTubeVideoInfo.DiscardUnknown(m) +} + +var xxx_messageInfo_YouTubeVideoInfo proto.InternalMessageInfo + +func (m *YouTubeVideoInfo) GetVideoId() *wrappers.StringValue { + if m != nil { + return m.VideoId + } + return nil +} + +// A YouTube Channel criterion. +type YouTubeChannelInfo struct { + // The YouTube uploader channel id or the channel code of a YouTube channel. + ChannelId *wrappers.StringValue `protobuf:"bytes,1,opt,name=channel_id,json=channelId,proto3" json:"channel_id,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *YouTubeChannelInfo) Reset() { *m = YouTubeChannelInfo{} } +func (m *YouTubeChannelInfo) String() string { return proto.CompactTextString(m) } +func (*YouTubeChannelInfo) ProtoMessage() {} +func (*YouTubeChannelInfo) Descriptor() ([]byte, []int) { + return fileDescriptor_criteria_e78ca570234fbeae, []int{35} +} +func (m *YouTubeChannelInfo) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_YouTubeChannelInfo.Unmarshal(m, b) +} +func (m *YouTubeChannelInfo) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_YouTubeChannelInfo.Marshal(b, m, deterministic) +} +func (dst *YouTubeChannelInfo) XXX_Merge(src proto.Message) { + xxx_messageInfo_YouTubeChannelInfo.Merge(dst, src) +} +func (m *YouTubeChannelInfo) XXX_Size() int { + return xxx_messageInfo_YouTubeChannelInfo.Size(m) +} +func (m *YouTubeChannelInfo) XXX_DiscardUnknown() { + xxx_messageInfo_YouTubeChannelInfo.DiscardUnknown(m) +} + +var xxx_messageInfo_YouTubeChannelInfo proto.InternalMessageInfo + +func (m *YouTubeChannelInfo) GetChannelId() *wrappers.StringValue { + if m != nil { + return m.ChannelId + } + return nil +} + +// A User List criterion. Represents a user list that is defined by the +// advertiser to be targeted. +type UserListInfo struct { + // The User List resource name. + UserList *wrappers.StringValue `protobuf:"bytes,1,opt,name=user_list,json=userList,proto3" json:"user_list,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *UserListInfo) Reset() { *m = UserListInfo{} } +func (m *UserListInfo) String() string { return proto.CompactTextString(m) } +func (*UserListInfo) ProtoMessage() {} +func (*UserListInfo) Descriptor() ([]byte, []int) { + return fileDescriptor_criteria_e78ca570234fbeae, []int{36} +} +func (m *UserListInfo) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_UserListInfo.Unmarshal(m, b) +} +func (m *UserListInfo) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_UserListInfo.Marshal(b, m, deterministic) +} +func (dst *UserListInfo) XXX_Merge(src proto.Message) { + xxx_messageInfo_UserListInfo.Merge(dst, src) +} +func (m *UserListInfo) XXX_Size() int { + return xxx_messageInfo_UserListInfo.Size(m) +} +func (m *UserListInfo) XXX_DiscardUnknown() { + xxx_messageInfo_UserListInfo.DiscardUnknown(m) +} + +var xxx_messageInfo_UserListInfo proto.InternalMessageInfo + +func (m *UserListInfo) GetUserList() *wrappers.StringValue { + if m != nil { + return m.UserList + } + return nil +} + +// A Proximity criterion. The geo point and radius determine what geographical +// area is included. The address is a description of the geo point that does +// not affect ad serving. +// +// There are two ways to create a proximity. First, by setting an address +// and radius. The geo point will be automatically computed. Second, by +// setting a geo point and radius. The address is an optional label that won't +// be validated. +type ProximityInfo struct { + // Latitude and longitude. + GeoPoint *GeoPointInfo `protobuf:"bytes,1,opt,name=geo_point,json=geoPoint,proto3" json:"geo_point,omitempty"` + // The radius of the proximity. + Radius *wrappers.DoubleValue `protobuf:"bytes,2,opt,name=radius,proto3" json:"radius,omitempty"` + // The unit of measurement of the radius. Default is KILOMETERS. + RadiusUnits enums.ProximityRadiusUnitsEnum_ProximityRadiusUnits `protobuf:"varint,3,opt,name=radius_units,json=radiusUnits,proto3,enum=google.ads.googleads.v1.enums.ProximityRadiusUnitsEnum_ProximityRadiusUnits" json:"radius_units,omitempty"` + // Full address. + Address *AddressInfo `protobuf:"bytes,4,opt,name=address,proto3" json:"address,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ProximityInfo) Reset() { *m = ProximityInfo{} } +func (m *ProximityInfo) String() string { return proto.CompactTextString(m) } +func (*ProximityInfo) ProtoMessage() {} +func (*ProximityInfo) Descriptor() ([]byte, []int) { + return fileDescriptor_criteria_e78ca570234fbeae, []int{37} +} +func (m *ProximityInfo) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ProximityInfo.Unmarshal(m, b) +} +func (m *ProximityInfo) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ProximityInfo.Marshal(b, m, deterministic) +} +func (dst *ProximityInfo) XXX_Merge(src proto.Message) { + xxx_messageInfo_ProximityInfo.Merge(dst, src) +} +func (m *ProximityInfo) XXX_Size() int { + return xxx_messageInfo_ProximityInfo.Size(m) +} +func (m *ProximityInfo) XXX_DiscardUnknown() { + xxx_messageInfo_ProximityInfo.DiscardUnknown(m) +} + +var xxx_messageInfo_ProximityInfo proto.InternalMessageInfo + +func (m *ProximityInfo) GetGeoPoint() *GeoPointInfo { + if m != nil { + return m.GeoPoint + } + return nil +} + +func (m *ProximityInfo) GetRadius() *wrappers.DoubleValue { + if m != nil { + return m.Radius + } + return nil +} + +func (m *ProximityInfo) GetRadiusUnits() enums.ProximityRadiusUnitsEnum_ProximityRadiusUnits { + if m != nil { + return m.RadiusUnits + } + return enums.ProximityRadiusUnitsEnum_UNSPECIFIED +} + +func (m *ProximityInfo) GetAddress() *AddressInfo { + if m != nil { + return m.Address + } + return nil +} + +// Geo point for proximity criterion. +type GeoPointInfo struct { + // Micro degrees for the longitude. + LongitudeInMicroDegrees *wrappers.Int32Value `protobuf:"bytes,1,opt,name=longitude_in_micro_degrees,json=longitudeInMicroDegrees,proto3" json:"longitude_in_micro_degrees,omitempty"` + // Micro degrees for the latitude. + LatitudeInMicroDegrees *wrappers.Int32Value `protobuf:"bytes,2,opt,name=latitude_in_micro_degrees,json=latitudeInMicroDegrees,proto3" json:"latitude_in_micro_degrees,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GeoPointInfo) Reset() { *m = GeoPointInfo{} } +func (m *GeoPointInfo) String() string { return proto.CompactTextString(m) } +func (*GeoPointInfo) ProtoMessage() {} +func (*GeoPointInfo) Descriptor() ([]byte, []int) { + return fileDescriptor_criteria_e78ca570234fbeae, []int{38} +} +func (m *GeoPointInfo) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GeoPointInfo.Unmarshal(m, b) +} +func (m *GeoPointInfo) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GeoPointInfo.Marshal(b, m, deterministic) +} +func (dst *GeoPointInfo) XXX_Merge(src proto.Message) { + xxx_messageInfo_GeoPointInfo.Merge(dst, src) +} +func (m *GeoPointInfo) XXX_Size() int { + return xxx_messageInfo_GeoPointInfo.Size(m) +} +func (m *GeoPointInfo) XXX_DiscardUnknown() { + xxx_messageInfo_GeoPointInfo.DiscardUnknown(m) +} + +var xxx_messageInfo_GeoPointInfo proto.InternalMessageInfo + +func (m *GeoPointInfo) GetLongitudeInMicroDegrees() *wrappers.Int32Value { + if m != nil { + return m.LongitudeInMicroDegrees + } + return nil +} + +func (m *GeoPointInfo) GetLatitudeInMicroDegrees() *wrappers.Int32Value { + if m != nil { + return m.LatitudeInMicroDegrees + } + return nil +} + +// Address for proximity criterion. +type AddressInfo struct { + // Postal code. + PostalCode *wrappers.StringValue `protobuf:"bytes,1,opt,name=postal_code,json=postalCode,proto3" json:"postal_code,omitempty"` + // Province or state code. + ProvinceCode *wrappers.StringValue `protobuf:"bytes,2,opt,name=province_code,json=provinceCode,proto3" json:"province_code,omitempty"` + // Country code. + CountryCode *wrappers.StringValue `protobuf:"bytes,3,opt,name=country_code,json=countryCode,proto3" json:"country_code,omitempty"` + // Province or state name. + ProvinceName *wrappers.StringValue `protobuf:"bytes,4,opt,name=province_name,json=provinceName,proto3" json:"province_name,omitempty"` + // Street address line 1. + StreetAddress *wrappers.StringValue `protobuf:"bytes,5,opt,name=street_address,json=streetAddress,proto3" json:"street_address,omitempty"` + // Street address line 2. This field is write-only. It is only used for + // calculating the longitude and latitude of an address when geo_point is + // empty. + StreetAddress2 *wrappers.StringValue `protobuf:"bytes,6,opt,name=street_address2,json=streetAddress2,proto3" json:"street_address2,omitempty"` + // Name of the city. + CityName *wrappers.StringValue `protobuf:"bytes,7,opt,name=city_name,json=cityName,proto3" json:"city_name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *AddressInfo) Reset() { *m = AddressInfo{} } +func (m *AddressInfo) String() string { return proto.CompactTextString(m) } +func (*AddressInfo) ProtoMessage() {} +func (*AddressInfo) Descriptor() ([]byte, []int) { + return fileDescriptor_criteria_e78ca570234fbeae, []int{39} +} +func (m *AddressInfo) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_AddressInfo.Unmarshal(m, b) +} +func (m *AddressInfo) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_AddressInfo.Marshal(b, m, deterministic) +} +func (dst *AddressInfo) XXX_Merge(src proto.Message) { + xxx_messageInfo_AddressInfo.Merge(dst, src) +} +func (m *AddressInfo) XXX_Size() int { + return xxx_messageInfo_AddressInfo.Size(m) +} +func (m *AddressInfo) XXX_DiscardUnknown() { + xxx_messageInfo_AddressInfo.DiscardUnknown(m) +} + +var xxx_messageInfo_AddressInfo proto.InternalMessageInfo + +func (m *AddressInfo) GetPostalCode() *wrappers.StringValue { + if m != nil { + return m.PostalCode + } + return nil +} + +func (m *AddressInfo) GetProvinceCode() *wrappers.StringValue { + if m != nil { + return m.ProvinceCode + } + return nil +} + +func (m *AddressInfo) GetCountryCode() *wrappers.StringValue { + if m != nil { + return m.CountryCode + } + return nil +} + +func (m *AddressInfo) GetProvinceName() *wrappers.StringValue { + if m != nil { + return m.ProvinceName + } + return nil +} + +func (m *AddressInfo) GetStreetAddress() *wrappers.StringValue { + if m != nil { + return m.StreetAddress + } + return nil +} + +func (m *AddressInfo) GetStreetAddress2() *wrappers.StringValue { + if m != nil { + return m.StreetAddress2 + } + return nil +} + +func (m *AddressInfo) GetCityName() *wrappers.StringValue { + if m != nil { + return m.CityName + } + return nil +} + +// A topic criterion. Use topics to target or exclude placements in the +// Google Display Network based on the category into which the placement falls +// (for example, "Pets & Animals/Pets/Dogs"). +type TopicInfo struct { + // The Topic Constant resource name. + TopicConstant *wrappers.StringValue `protobuf:"bytes,1,opt,name=topic_constant,json=topicConstant,proto3" json:"topic_constant,omitempty"` + // The category to target or exclude. Each subsequent element in the array + // describes a more specific sub-category. For example, + // "Pets & Animals", "Pets", "Dogs" represents the "Pets & Animals/Pets/Dogs" + // category. + Path []*wrappers.StringValue `protobuf:"bytes,2,rep,name=path,proto3" json:"path,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *TopicInfo) Reset() { *m = TopicInfo{} } +func (m *TopicInfo) String() string { return proto.CompactTextString(m) } +func (*TopicInfo) ProtoMessage() {} +func (*TopicInfo) Descriptor() ([]byte, []int) { + return fileDescriptor_criteria_e78ca570234fbeae, []int{40} +} +func (m *TopicInfo) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_TopicInfo.Unmarshal(m, b) +} +func (m *TopicInfo) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_TopicInfo.Marshal(b, m, deterministic) +} +func (dst *TopicInfo) XXX_Merge(src proto.Message) { + xxx_messageInfo_TopicInfo.Merge(dst, src) +} +func (m *TopicInfo) XXX_Size() int { + return xxx_messageInfo_TopicInfo.Size(m) +} +func (m *TopicInfo) XXX_DiscardUnknown() { + xxx_messageInfo_TopicInfo.DiscardUnknown(m) +} + +var xxx_messageInfo_TopicInfo proto.InternalMessageInfo + +func (m *TopicInfo) GetTopicConstant() *wrappers.StringValue { + if m != nil { + return m.TopicConstant + } + return nil +} + +func (m *TopicInfo) GetPath() []*wrappers.StringValue { + if m != nil { + return m.Path + } + return nil +} + +// A language criterion. +type LanguageInfo struct { + // The language constant resource name. + LanguageConstant *wrappers.StringValue `protobuf:"bytes,1,opt,name=language_constant,json=languageConstant,proto3" json:"language_constant,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *LanguageInfo) Reset() { *m = LanguageInfo{} } +func (m *LanguageInfo) String() string { return proto.CompactTextString(m) } +func (*LanguageInfo) ProtoMessage() {} +func (*LanguageInfo) Descriptor() ([]byte, []int) { + return fileDescriptor_criteria_e78ca570234fbeae, []int{41} +} +func (m *LanguageInfo) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_LanguageInfo.Unmarshal(m, b) +} +func (m *LanguageInfo) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_LanguageInfo.Marshal(b, m, deterministic) +} +func (dst *LanguageInfo) XXX_Merge(src proto.Message) { + xxx_messageInfo_LanguageInfo.Merge(dst, src) +} +func (m *LanguageInfo) XXX_Size() int { + return xxx_messageInfo_LanguageInfo.Size(m) +} +func (m *LanguageInfo) XXX_DiscardUnknown() { + xxx_messageInfo_LanguageInfo.DiscardUnknown(m) +} + +var xxx_messageInfo_LanguageInfo proto.InternalMessageInfo + +func (m *LanguageInfo) GetLanguageConstant() *wrappers.StringValue { + if m != nil { + return m.LanguageConstant + } + return nil +} + +// An IpBlock criterion used for IP exclusions. We allow: +// - IPv4 and IPv6 addresses +// - individual addresses (192.168.0.1) +// - masks for individual addresses (192.168.0.1/32) +// - masks for Class C networks (192.168.0.1/24) +type IpBlockInfo struct { + // The IP address of this IP block. + IpAddress *wrappers.StringValue `protobuf:"bytes,1,opt,name=ip_address,json=ipAddress,proto3" json:"ip_address,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *IpBlockInfo) Reset() { *m = IpBlockInfo{} } +func (m *IpBlockInfo) String() string { return proto.CompactTextString(m) } +func (*IpBlockInfo) ProtoMessage() {} +func (*IpBlockInfo) Descriptor() ([]byte, []int) { + return fileDescriptor_criteria_e78ca570234fbeae, []int{42} +} +func (m *IpBlockInfo) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_IpBlockInfo.Unmarshal(m, b) +} +func (m *IpBlockInfo) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_IpBlockInfo.Marshal(b, m, deterministic) +} +func (dst *IpBlockInfo) XXX_Merge(src proto.Message) { + xxx_messageInfo_IpBlockInfo.Merge(dst, src) +} +func (m *IpBlockInfo) XXX_Size() int { + return xxx_messageInfo_IpBlockInfo.Size(m) +} +func (m *IpBlockInfo) XXX_DiscardUnknown() { + xxx_messageInfo_IpBlockInfo.DiscardUnknown(m) +} + +var xxx_messageInfo_IpBlockInfo proto.InternalMessageInfo + +func (m *IpBlockInfo) GetIpAddress() *wrappers.StringValue { + if m != nil { + return m.IpAddress + } + return nil +} + +// Content Label for category exclusion. +type ContentLabelInfo struct { + // Content label type, required for CREATE operations. + Type enums.ContentLabelTypeEnum_ContentLabelType `protobuf:"varint,1,opt,name=type,proto3,enum=google.ads.googleads.v1.enums.ContentLabelTypeEnum_ContentLabelType" json:"type,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ContentLabelInfo) Reset() { *m = ContentLabelInfo{} } +func (m *ContentLabelInfo) String() string { return proto.CompactTextString(m) } +func (*ContentLabelInfo) ProtoMessage() {} +func (*ContentLabelInfo) Descriptor() ([]byte, []int) { + return fileDescriptor_criteria_e78ca570234fbeae, []int{43} +} +func (m *ContentLabelInfo) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ContentLabelInfo.Unmarshal(m, b) +} +func (m *ContentLabelInfo) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ContentLabelInfo.Marshal(b, m, deterministic) +} +func (dst *ContentLabelInfo) XXX_Merge(src proto.Message) { + xxx_messageInfo_ContentLabelInfo.Merge(dst, src) +} +func (m *ContentLabelInfo) XXX_Size() int { + return xxx_messageInfo_ContentLabelInfo.Size(m) +} +func (m *ContentLabelInfo) XXX_DiscardUnknown() { + xxx_messageInfo_ContentLabelInfo.DiscardUnknown(m) +} + +var xxx_messageInfo_ContentLabelInfo proto.InternalMessageInfo + +func (m *ContentLabelInfo) GetType() enums.ContentLabelTypeEnum_ContentLabelType { + if m != nil { + return m.Type + } + return enums.ContentLabelTypeEnum_UNSPECIFIED +} + +// Represents a Carrier Criterion. +type CarrierInfo struct { + // The Carrier constant resource name. + CarrierConstant *wrappers.StringValue `protobuf:"bytes,1,opt,name=carrier_constant,json=carrierConstant,proto3" json:"carrier_constant,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *CarrierInfo) Reset() { *m = CarrierInfo{} } +func (m *CarrierInfo) String() string { return proto.CompactTextString(m) } +func (*CarrierInfo) ProtoMessage() {} +func (*CarrierInfo) Descriptor() ([]byte, []int) { + return fileDescriptor_criteria_e78ca570234fbeae, []int{44} +} +func (m *CarrierInfo) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_CarrierInfo.Unmarshal(m, b) +} +func (m *CarrierInfo) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_CarrierInfo.Marshal(b, m, deterministic) +} +func (dst *CarrierInfo) XXX_Merge(src proto.Message) { + xxx_messageInfo_CarrierInfo.Merge(dst, src) +} +func (m *CarrierInfo) XXX_Size() int { + return xxx_messageInfo_CarrierInfo.Size(m) +} +func (m *CarrierInfo) XXX_DiscardUnknown() { + xxx_messageInfo_CarrierInfo.DiscardUnknown(m) +} + +var xxx_messageInfo_CarrierInfo proto.InternalMessageInfo + +func (m *CarrierInfo) GetCarrierConstant() *wrappers.StringValue { + if m != nil { + return m.CarrierConstant + } + return nil +} + +// Represents a particular interest-based topic to be targeted. +type UserInterestInfo struct { + // The UserInterest resource name. + UserInterestCategory *wrappers.StringValue `protobuf:"bytes,1,opt,name=user_interest_category,json=userInterestCategory,proto3" json:"user_interest_category,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *UserInterestInfo) Reset() { *m = UserInterestInfo{} } +func (m *UserInterestInfo) String() string { return proto.CompactTextString(m) } +func (*UserInterestInfo) ProtoMessage() {} +func (*UserInterestInfo) Descriptor() ([]byte, []int) { + return fileDescriptor_criteria_e78ca570234fbeae, []int{45} +} +func (m *UserInterestInfo) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_UserInterestInfo.Unmarshal(m, b) +} +func (m *UserInterestInfo) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_UserInterestInfo.Marshal(b, m, deterministic) +} +func (dst *UserInterestInfo) XXX_Merge(src proto.Message) { + xxx_messageInfo_UserInterestInfo.Merge(dst, src) +} +func (m *UserInterestInfo) XXX_Size() int { + return xxx_messageInfo_UserInterestInfo.Size(m) +} +func (m *UserInterestInfo) XXX_DiscardUnknown() { + xxx_messageInfo_UserInterestInfo.DiscardUnknown(m) +} + +var xxx_messageInfo_UserInterestInfo proto.InternalMessageInfo + +func (m *UserInterestInfo) GetUserInterestCategory() *wrappers.StringValue { + if m != nil { + return m.UserInterestCategory + } + return nil +} + +// Represents a criterion for targeting webpages of an advertiser's website. +type WebpageInfo struct { + // The name of the criterion that is defined by this parameter. The name value + // will be used for identifying, sorting and filtering criteria with this type + // of parameters. + // + // This field is required for CREATE operations and is prohibited on UPDATE + // operations. + CriterionName *wrappers.StringValue `protobuf:"bytes,1,opt,name=criterion_name,json=criterionName,proto3" json:"criterion_name,omitempty"` + // Conditions, or logical expressions, for webpage targeting. The list of + // webpage targeting conditions are and-ed together when evaluated + // for targeting. + // + // This field is required for CREATE operations and is prohibited on UPDATE + // operations. + Conditions []*WebpageConditionInfo `protobuf:"bytes,2,rep,name=conditions,proto3" json:"conditions,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *WebpageInfo) Reset() { *m = WebpageInfo{} } +func (m *WebpageInfo) String() string { return proto.CompactTextString(m) } +func (*WebpageInfo) ProtoMessage() {} +func (*WebpageInfo) Descriptor() ([]byte, []int) { + return fileDescriptor_criteria_e78ca570234fbeae, []int{46} +} +func (m *WebpageInfo) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_WebpageInfo.Unmarshal(m, b) +} +func (m *WebpageInfo) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_WebpageInfo.Marshal(b, m, deterministic) +} +func (dst *WebpageInfo) XXX_Merge(src proto.Message) { + xxx_messageInfo_WebpageInfo.Merge(dst, src) +} +func (m *WebpageInfo) XXX_Size() int { + return xxx_messageInfo_WebpageInfo.Size(m) +} +func (m *WebpageInfo) XXX_DiscardUnknown() { + xxx_messageInfo_WebpageInfo.DiscardUnknown(m) +} + +var xxx_messageInfo_WebpageInfo proto.InternalMessageInfo + +func (m *WebpageInfo) GetCriterionName() *wrappers.StringValue { + if m != nil { + return m.CriterionName + } + return nil +} + +func (m *WebpageInfo) GetConditions() []*WebpageConditionInfo { + if m != nil { + return m.Conditions + } + return nil +} + +// Logical expression for targeting webpages of an advertiser's website. +type WebpageConditionInfo struct { + // Operand of webpage targeting condition. + Operand enums.WebpageConditionOperandEnum_WebpageConditionOperand `protobuf:"varint,1,opt,name=operand,proto3,enum=google.ads.googleads.v1.enums.WebpageConditionOperandEnum_WebpageConditionOperand" json:"operand,omitempty"` + // Operator of webpage targeting condition. + Operator enums.WebpageConditionOperatorEnum_WebpageConditionOperator `protobuf:"varint,2,opt,name=operator,proto3,enum=google.ads.googleads.v1.enums.WebpageConditionOperatorEnum_WebpageConditionOperator" json:"operator,omitempty"` + // Argument of webpage targeting condition. + Argument *wrappers.StringValue `protobuf:"bytes,3,opt,name=argument,proto3" json:"argument,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *WebpageConditionInfo) Reset() { *m = WebpageConditionInfo{} } +func (m *WebpageConditionInfo) String() string { return proto.CompactTextString(m) } +func (*WebpageConditionInfo) ProtoMessage() {} +func (*WebpageConditionInfo) Descriptor() ([]byte, []int) { + return fileDescriptor_criteria_e78ca570234fbeae, []int{47} +} +func (m *WebpageConditionInfo) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_WebpageConditionInfo.Unmarshal(m, b) +} +func (m *WebpageConditionInfo) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_WebpageConditionInfo.Marshal(b, m, deterministic) +} +func (dst *WebpageConditionInfo) XXX_Merge(src proto.Message) { + xxx_messageInfo_WebpageConditionInfo.Merge(dst, src) +} +func (m *WebpageConditionInfo) XXX_Size() int { + return xxx_messageInfo_WebpageConditionInfo.Size(m) +} +func (m *WebpageConditionInfo) XXX_DiscardUnknown() { + xxx_messageInfo_WebpageConditionInfo.DiscardUnknown(m) +} + +var xxx_messageInfo_WebpageConditionInfo proto.InternalMessageInfo + +func (m *WebpageConditionInfo) GetOperand() enums.WebpageConditionOperandEnum_WebpageConditionOperand { + if m != nil { + return m.Operand + } + return enums.WebpageConditionOperandEnum_UNSPECIFIED +} + +func (m *WebpageConditionInfo) GetOperator() enums.WebpageConditionOperatorEnum_WebpageConditionOperator { + if m != nil { + return m.Operator + } + return enums.WebpageConditionOperatorEnum_UNSPECIFIED +} + +func (m *WebpageConditionInfo) GetArgument() *wrappers.StringValue { + if m != nil { + return m.Argument + } + return nil +} + +// Represents an operating system version to be targeted. +type OperatingSystemVersionInfo struct { + // The operating system version constant resource name. + OperatingSystemVersionConstant *wrappers.StringValue `protobuf:"bytes,1,opt,name=operating_system_version_constant,json=operatingSystemVersionConstant,proto3" json:"operating_system_version_constant,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *OperatingSystemVersionInfo) Reset() { *m = OperatingSystemVersionInfo{} } +func (m *OperatingSystemVersionInfo) String() string { return proto.CompactTextString(m) } +func (*OperatingSystemVersionInfo) ProtoMessage() {} +func (*OperatingSystemVersionInfo) Descriptor() ([]byte, []int) { + return fileDescriptor_criteria_e78ca570234fbeae, []int{48} +} +func (m *OperatingSystemVersionInfo) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_OperatingSystemVersionInfo.Unmarshal(m, b) +} +func (m *OperatingSystemVersionInfo) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_OperatingSystemVersionInfo.Marshal(b, m, deterministic) +} +func (dst *OperatingSystemVersionInfo) XXX_Merge(src proto.Message) { + xxx_messageInfo_OperatingSystemVersionInfo.Merge(dst, src) +} +func (m *OperatingSystemVersionInfo) XXX_Size() int { + return xxx_messageInfo_OperatingSystemVersionInfo.Size(m) +} +func (m *OperatingSystemVersionInfo) XXX_DiscardUnknown() { + xxx_messageInfo_OperatingSystemVersionInfo.DiscardUnknown(m) +} + +var xxx_messageInfo_OperatingSystemVersionInfo proto.InternalMessageInfo + +func (m *OperatingSystemVersionInfo) GetOperatingSystemVersionConstant() *wrappers.StringValue { + if m != nil { + return m.OperatingSystemVersionConstant + } + return nil +} + +// An app payment model criterion. +type AppPaymentModelInfo struct { + // Type of the app payment model. + Type enums.AppPaymentModelTypeEnum_AppPaymentModelType `protobuf:"varint,1,opt,name=type,proto3,enum=google.ads.googleads.v1.enums.AppPaymentModelTypeEnum_AppPaymentModelType" json:"type,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *AppPaymentModelInfo) Reset() { *m = AppPaymentModelInfo{} } +func (m *AppPaymentModelInfo) String() string { return proto.CompactTextString(m) } +func (*AppPaymentModelInfo) ProtoMessage() {} +func (*AppPaymentModelInfo) Descriptor() ([]byte, []int) { + return fileDescriptor_criteria_e78ca570234fbeae, []int{49} +} +func (m *AppPaymentModelInfo) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_AppPaymentModelInfo.Unmarshal(m, b) +} +func (m *AppPaymentModelInfo) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_AppPaymentModelInfo.Marshal(b, m, deterministic) +} +func (dst *AppPaymentModelInfo) XXX_Merge(src proto.Message) { + xxx_messageInfo_AppPaymentModelInfo.Merge(dst, src) +} +func (m *AppPaymentModelInfo) XXX_Size() int { + return xxx_messageInfo_AppPaymentModelInfo.Size(m) +} +func (m *AppPaymentModelInfo) XXX_DiscardUnknown() { + xxx_messageInfo_AppPaymentModelInfo.DiscardUnknown(m) +} + +var xxx_messageInfo_AppPaymentModelInfo proto.InternalMessageInfo + +func (m *AppPaymentModelInfo) GetType() enums.AppPaymentModelTypeEnum_AppPaymentModelType { + if m != nil { + return m.Type + } + return enums.AppPaymentModelTypeEnum_UNSPECIFIED +} + +// A mobile device criterion. +type MobileDeviceInfo struct { + // The mobile device constant resource name. + MobileDeviceConstant *wrappers.StringValue `protobuf:"bytes,1,opt,name=mobile_device_constant,json=mobileDeviceConstant,proto3" json:"mobile_device_constant,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *MobileDeviceInfo) Reset() { *m = MobileDeviceInfo{} } +func (m *MobileDeviceInfo) String() string { return proto.CompactTextString(m) } +func (*MobileDeviceInfo) ProtoMessage() {} +func (*MobileDeviceInfo) Descriptor() ([]byte, []int) { + return fileDescriptor_criteria_e78ca570234fbeae, []int{50} +} +func (m *MobileDeviceInfo) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_MobileDeviceInfo.Unmarshal(m, b) +} +func (m *MobileDeviceInfo) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_MobileDeviceInfo.Marshal(b, m, deterministic) +} +func (dst *MobileDeviceInfo) XXX_Merge(src proto.Message) { + xxx_messageInfo_MobileDeviceInfo.Merge(dst, src) +} +func (m *MobileDeviceInfo) XXX_Size() int { + return xxx_messageInfo_MobileDeviceInfo.Size(m) +} +func (m *MobileDeviceInfo) XXX_DiscardUnknown() { + xxx_messageInfo_MobileDeviceInfo.DiscardUnknown(m) +} + +var xxx_messageInfo_MobileDeviceInfo proto.InternalMessageInfo + +func (m *MobileDeviceInfo) GetMobileDeviceConstant() *wrappers.StringValue { + if m != nil { + return m.MobileDeviceConstant + } + return nil +} + +// A custom affinity criterion. +// A criterion of this type is only targetable. +type CustomAffinityInfo struct { + // The CustomInterest resource name. + CustomAffinity *wrappers.StringValue `protobuf:"bytes,1,opt,name=custom_affinity,json=customAffinity,proto3" json:"custom_affinity,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *CustomAffinityInfo) Reset() { *m = CustomAffinityInfo{} } +func (m *CustomAffinityInfo) String() string { return proto.CompactTextString(m) } +func (*CustomAffinityInfo) ProtoMessage() {} +func (*CustomAffinityInfo) Descriptor() ([]byte, []int) { + return fileDescriptor_criteria_e78ca570234fbeae, []int{51} +} +func (m *CustomAffinityInfo) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_CustomAffinityInfo.Unmarshal(m, b) +} +func (m *CustomAffinityInfo) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_CustomAffinityInfo.Marshal(b, m, deterministic) +} +func (dst *CustomAffinityInfo) XXX_Merge(src proto.Message) { + xxx_messageInfo_CustomAffinityInfo.Merge(dst, src) +} +func (m *CustomAffinityInfo) XXX_Size() int { + return xxx_messageInfo_CustomAffinityInfo.Size(m) +} +func (m *CustomAffinityInfo) XXX_DiscardUnknown() { + xxx_messageInfo_CustomAffinityInfo.DiscardUnknown(m) +} + +var xxx_messageInfo_CustomAffinityInfo proto.InternalMessageInfo + +func (m *CustomAffinityInfo) GetCustomAffinity() *wrappers.StringValue { + if m != nil { + return m.CustomAffinity + } + return nil +} + +// A custom intent criterion. +// A criterion of this type is only targetable. +type CustomIntentInfo struct { + // The CustomInterest resource name. + CustomIntent *wrappers.StringValue `protobuf:"bytes,1,opt,name=custom_intent,json=customIntent,proto3" json:"custom_intent,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *CustomIntentInfo) Reset() { *m = CustomIntentInfo{} } +func (m *CustomIntentInfo) String() string { return proto.CompactTextString(m) } +func (*CustomIntentInfo) ProtoMessage() {} +func (*CustomIntentInfo) Descriptor() ([]byte, []int) { + return fileDescriptor_criteria_e78ca570234fbeae, []int{52} +} +func (m *CustomIntentInfo) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_CustomIntentInfo.Unmarshal(m, b) +} +func (m *CustomIntentInfo) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_CustomIntentInfo.Marshal(b, m, deterministic) +} +func (dst *CustomIntentInfo) XXX_Merge(src proto.Message) { + xxx_messageInfo_CustomIntentInfo.Merge(dst, src) +} +func (m *CustomIntentInfo) XXX_Size() int { + return xxx_messageInfo_CustomIntentInfo.Size(m) +} +func (m *CustomIntentInfo) XXX_DiscardUnknown() { + xxx_messageInfo_CustomIntentInfo.DiscardUnknown(m) +} + +var xxx_messageInfo_CustomIntentInfo proto.InternalMessageInfo + +func (m *CustomIntentInfo) GetCustomIntent() *wrappers.StringValue { + if m != nil { + return m.CustomIntent + } + return nil +} + +// A radius around a list of locations specified via a feed. +type LocationGroupInfo struct { + // Feed specifying locations for targeting. + // This is required and must be set in CREATE operations. + Feed *wrappers.StringValue `protobuf:"bytes,1,opt,name=feed,proto3" json:"feed,omitempty"` + // Geo target constant(s) restricting the scope of the geographic area within + // the feed. Currently only one geo target constant is allowed. + GeoTargetConstants []*wrappers.StringValue `protobuf:"bytes,2,rep,name=geo_target_constants,json=geoTargetConstants,proto3" json:"geo_target_constants,omitempty"` + // Distance in units specifying the radius around targeted locations. + // This is required and must be set in CREATE operations. + Radius *wrappers.Int64Value `protobuf:"bytes,3,opt,name=radius,proto3" json:"radius,omitempty"` + // Unit of the radius, miles and meters supported currently. + // This is required and must be set in CREATE operations. + RadiusUnits enums.LocationGroupRadiusUnitsEnum_LocationGroupRadiusUnits `protobuf:"varint,4,opt,name=radius_units,json=radiusUnits,proto3,enum=google.ads.googleads.v1.enums.LocationGroupRadiusUnitsEnum_LocationGroupRadiusUnits" json:"radius_units,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *LocationGroupInfo) Reset() { *m = LocationGroupInfo{} } +func (m *LocationGroupInfo) String() string { return proto.CompactTextString(m) } +func (*LocationGroupInfo) ProtoMessage() {} +func (*LocationGroupInfo) Descriptor() ([]byte, []int) { + return fileDescriptor_criteria_e78ca570234fbeae, []int{53} +} +func (m *LocationGroupInfo) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_LocationGroupInfo.Unmarshal(m, b) +} +func (m *LocationGroupInfo) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_LocationGroupInfo.Marshal(b, m, deterministic) +} +func (dst *LocationGroupInfo) XXX_Merge(src proto.Message) { + xxx_messageInfo_LocationGroupInfo.Merge(dst, src) +} +func (m *LocationGroupInfo) XXX_Size() int { + return xxx_messageInfo_LocationGroupInfo.Size(m) +} +func (m *LocationGroupInfo) XXX_DiscardUnknown() { + xxx_messageInfo_LocationGroupInfo.DiscardUnknown(m) +} + +var xxx_messageInfo_LocationGroupInfo proto.InternalMessageInfo + +func (m *LocationGroupInfo) GetFeed() *wrappers.StringValue { + if m != nil { + return m.Feed + } + return nil +} + +func (m *LocationGroupInfo) GetGeoTargetConstants() []*wrappers.StringValue { + if m != nil { + return m.GeoTargetConstants + } + return nil +} + +func (m *LocationGroupInfo) GetRadius() *wrappers.Int64Value { + if m != nil { + return m.Radius + } + return nil +} + +func (m *LocationGroupInfo) GetRadiusUnits() enums.LocationGroupRadiusUnitsEnum_LocationGroupRadiusUnits { + if m != nil { + return m.RadiusUnits + } + return enums.LocationGroupRadiusUnitsEnum_UNSPECIFIED +} + +func init() { + proto.RegisterType((*KeywordInfo)(nil), "google.ads.googleads.v1.common.KeywordInfo") + proto.RegisterType((*PlacementInfo)(nil), "google.ads.googleads.v1.common.PlacementInfo") + proto.RegisterType((*MobileAppCategoryInfo)(nil), "google.ads.googleads.v1.common.MobileAppCategoryInfo") + proto.RegisterType((*MobileApplicationInfo)(nil), "google.ads.googleads.v1.common.MobileApplicationInfo") + proto.RegisterType((*LocationInfo)(nil), "google.ads.googleads.v1.common.LocationInfo") + proto.RegisterType((*DeviceInfo)(nil), "google.ads.googleads.v1.common.DeviceInfo") + proto.RegisterType((*PreferredContentInfo)(nil), "google.ads.googleads.v1.common.PreferredContentInfo") + proto.RegisterType((*ListingGroupInfo)(nil), "google.ads.googleads.v1.common.ListingGroupInfo") + proto.RegisterType((*ListingScopeInfo)(nil), "google.ads.googleads.v1.common.ListingScopeInfo") + proto.RegisterType((*ListingDimensionInfo)(nil), "google.ads.googleads.v1.common.ListingDimensionInfo") + proto.RegisterType((*ListingBrandInfo)(nil), "google.ads.googleads.v1.common.ListingBrandInfo") + proto.RegisterType((*HotelIdInfo)(nil), "google.ads.googleads.v1.common.HotelIdInfo") + proto.RegisterType((*HotelClassInfo)(nil), "google.ads.googleads.v1.common.HotelClassInfo") + proto.RegisterType((*HotelCountryRegionInfo)(nil), "google.ads.googleads.v1.common.HotelCountryRegionInfo") + proto.RegisterType((*HotelStateInfo)(nil), "google.ads.googleads.v1.common.HotelStateInfo") + proto.RegisterType((*HotelCityInfo)(nil), "google.ads.googleads.v1.common.HotelCityInfo") + proto.RegisterType((*ListingCustomAttributeInfo)(nil), "google.ads.googleads.v1.common.ListingCustomAttributeInfo") + proto.RegisterType((*ProductBiddingCategoryInfo)(nil), "google.ads.googleads.v1.common.ProductBiddingCategoryInfo") + proto.RegisterType((*ProductChannelInfo)(nil), "google.ads.googleads.v1.common.ProductChannelInfo") + proto.RegisterType((*ProductChannelExclusivityInfo)(nil), "google.ads.googleads.v1.common.ProductChannelExclusivityInfo") + proto.RegisterType((*ProductConditionInfo)(nil), "google.ads.googleads.v1.common.ProductConditionInfo") + proto.RegisterType((*ProductItemIdInfo)(nil), "google.ads.googleads.v1.common.ProductItemIdInfo") + proto.RegisterType((*ProductTypeInfo)(nil), "google.ads.googleads.v1.common.ProductTypeInfo") + proto.RegisterType((*UnknownListingDimensionInfo)(nil), "google.ads.googleads.v1.common.UnknownListingDimensionInfo") + proto.RegisterType((*HotelDateSelectionTypeInfo)(nil), "google.ads.googleads.v1.common.HotelDateSelectionTypeInfo") + proto.RegisterType((*HotelAdvanceBookingWindowInfo)(nil), "google.ads.googleads.v1.common.HotelAdvanceBookingWindowInfo") + proto.RegisterType((*HotelLengthOfStayInfo)(nil), "google.ads.googleads.v1.common.HotelLengthOfStayInfo") + proto.RegisterType((*HotelCheckInDayInfo)(nil), "google.ads.googleads.v1.common.HotelCheckInDayInfo") + proto.RegisterType((*InteractionTypeInfo)(nil), "google.ads.googleads.v1.common.InteractionTypeInfo") + proto.RegisterType((*AdScheduleInfo)(nil), "google.ads.googleads.v1.common.AdScheduleInfo") + proto.RegisterType((*AgeRangeInfo)(nil), "google.ads.googleads.v1.common.AgeRangeInfo") + proto.RegisterType((*GenderInfo)(nil), "google.ads.googleads.v1.common.GenderInfo") + proto.RegisterType((*IncomeRangeInfo)(nil), "google.ads.googleads.v1.common.IncomeRangeInfo") + proto.RegisterType((*ParentalStatusInfo)(nil), "google.ads.googleads.v1.common.ParentalStatusInfo") + proto.RegisterType((*YouTubeVideoInfo)(nil), "google.ads.googleads.v1.common.YouTubeVideoInfo") + proto.RegisterType((*YouTubeChannelInfo)(nil), "google.ads.googleads.v1.common.YouTubeChannelInfo") + proto.RegisterType((*UserListInfo)(nil), "google.ads.googleads.v1.common.UserListInfo") + proto.RegisterType((*ProximityInfo)(nil), "google.ads.googleads.v1.common.ProximityInfo") + proto.RegisterType((*GeoPointInfo)(nil), "google.ads.googleads.v1.common.GeoPointInfo") + proto.RegisterType((*AddressInfo)(nil), "google.ads.googleads.v1.common.AddressInfo") + proto.RegisterType((*TopicInfo)(nil), "google.ads.googleads.v1.common.TopicInfo") + proto.RegisterType((*LanguageInfo)(nil), "google.ads.googleads.v1.common.LanguageInfo") + proto.RegisterType((*IpBlockInfo)(nil), "google.ads.googleads.v1.common.IpBlockInfo") + proto.RegisterType((*ContentLabelInfo)(nil), "google.ads.googleads.v1.common.ContentLabelInfo") + proto.RegisterType((*CarrierInfo)(nil), "google.ads.googleads.v1.common.CarrierInfo") + proto.RegisterType((*UserInterestInfo)(nil), "google.ads.googleads.v1.common.UserInterestInfo") + proto.RegisterType((*WebpageInfo)(nil), "google.ads.googleads.v1.common.WebpageInfo") + proto.RegisterType((*WebpageConditionInfo)(nil), "google.ads.googleads.v1.common.WebpageConditionInfo") + proto.RegisterType((*OperatingSystemVersionInfo)(nil), "google.ads.googleads.v1.common.OperatingSystemVersionInfo") + proto.RegisterType((*AppPaymentModelInfo)(nil), "google.ads.googleads.v1.common.AppPaymentModelInfo") + proto.RegisterType((*MobileDeviceInfo)(nil), "google.ads.googleads.v1.common.MobileDeviceInfo") + proto.RegisterType((*CustomAffinityInfo)(nil), "google.ads.googleads.v1.common.CustomAffinityInfo") + proto.RegisterType((*CustomIntentInfo)(nil), "google.ads.googleads.v1.common.CustomIntentInfo") + proto.RegisterType((*LocationGroupInfo)(nil), "google.ads.googleads.v1.common.LocationGroupInfo") +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/common/criteria.proto", fileDescriptor_criteria_e78ca570234fbeae) +} + +var fileDescriptor_criteria_e78ca570234fbeae = []byte{ + // 2774 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xac, 0x5a, 0x4f, 0x73, 0x1c, 0x47, + 0x15, 0xcf, 0xae, 0x64, 0x4b, 0x7a, 0x2b, 0xc9, 0xf2, 0x58, 0x51, 0xd6, 0xca, 0x9f, 0x0a, 0x73, + 0x4a, 0x11, 0xb2, 0xb2, 0x64, 0x63, 0x82, 0x42, 0x30, 0x2b, 0xc9, 0xc8, 0x9b, 0xc8, 0xb6, 0xb2, + 0x92, 0xe5, 0xc4, 0x71, 0x98, 0xb4, 0x66, 0x7a, 0x67, 0x1b, 0xcf, 0x74, 0x4f, 0xcd, 0xf4, 0x48, + 0x5a, 0x0e, 0x50, 0x45, 0x15, 0xc5, 0x81, 0x03, 0x77, 0x6e, 0x50, 0x1c, 0x80, 0x13, 0x1f, 0x81, + 0x82, 0x0b, 0xc5, 0x47, 0xe0, 0x13, 0x50, 0x7c, 0x06, 0x0e, 0x54, 0xff, 0x9b, 0x9d, 0xfd, 0xa7, + 0x99, 0x55, 0xe5, 0xa4, 0x9d, 0xee, 0xf7, 0xfb, 0xbd, 0xd7, 0xdd, 0xaf, 0xdf, 0x7b, 0xdd, 0x2d, + 0xf8, 0xc0, 0x67, 0xcc, 0x0f, 0xf0, 0x06, 0xf2, 0x92, 0x0d, 0xf5, 0x53, 0xfc, 0x3a, 0xdb, 0xdc, + 0x70, 0x59, 0x18, 0x32, 0xba, 0xe1, 0xc6, 0x84, 0xe3, 0x98, 0xa0, 0x46, 0x14, 0x33, 0xce, 0xac, + 0x77, 0x94, 0x4c, 0x03, 0x79, 0x49, 0x23, 0x13, 0x6f, 0x9c, 0x6d, 0x36, 0x94, 0xf8, 0xfa, 0xd6, + 0x24, 0x3a, 0x4c, 0xd3, 0x30, 0xd9, 0x40, 0x3e, 0x76, 0x62, 0x44, 0x7d, 0xec, 0xf0, 0x5e, 0x84, + 0x15, 0xe7, 0xfa, 0x76, 0x01, 0x26, 0x8a, 0x9c, 0x08, 0xf5, 0x42, 0x4c, 0xb9, 0x13, 0x32, 0x0f, + 0x07, 0x79, 0xec, 0xfd, 0xcb, 0xb1, 0x2e, 0xa3, 0x5c, 0xe0, 0x02, 0x74, 0x3a, 0x88, 0xdb, 0xb8, + 0x1c, 0xe7, 0xa1, 0x9e, 0xc3, 0x3a, 0xce, 0x39, 0xc6, 0xaf, 0x34, 0xe0, 0xdb, 0x05, 0x00, 0x7c, + 0x46, 0xdc, 0x92, 0xe4, 0x3e, 0xa6, 0x1e, 0x8e, 0xf3, 0xd6, 0x7c, 0x7c, 0x39, 0xa0, 0xcb, 0x38, + 0x0e, 0x1c, 0x0f, 0x71, 0xec, 0x24, 0x38, 0xc0, 0x2e, 0x27, 0x8c, 0xe6, 0xe1, 0xdf, 0xbd, 0x1c, + 0x4e, 0xa8, 0xcb, 0xc2, 0x31, 0xf3, 0x7e, 0xaf, 0x08, 0xc6, 0x71, 0x8c, 0x46, 0x94, 0x15, 0xcc, + 0xf8, 0x2b, 0xdc, 0x3b, 0x67, 0xb1, 0xe7, 0x84, 0x88, 0xbb, 0xdd, 0x3c, 0x6e, 0xe7, 0x72, 0x5c, + 0x40, 0x12, 0x4e, 0xa8, 0xef, 0xb8, 0x69, 0xc2, 0x59, 0xe8, 0x20, 0xce, 0x63, 0x72, 0x9a, 0x72, + 0xec, 0x10, 0xea, 0xe1, 0x8b, 0x72, 0xba, 0x0d, 0x87, 0x1f, 0xb3, 0x34, 0xca, 0xeb, 0x7e, 0x50, + 0x80, 0x63, 0x2e, 0x92, 0xc3, 0x54, 0xc0, 0x18, 0x79, 0x24, 0x4d, 0x9c, 0x94, 0x12, 0x9e, 0x68, + 0x82, 0x02, 0xb7, 0x0e, 0x09, 0x15, 0xa6, 0xb2, 0x8e, 0xd3, 0x65, 0x69, 0xac, 0x31, 0x1f, 0x5e, + 0x8e, 0x89, 0x50, 0x8c, 0x29, 0x47, 0x81, 0x93, 0x70, 0xc4, 0xd3, 0x64, 0x8a, 0x0d, 0x11, 0xc5, + 0xb8, 0x83, 0xe3, 0x18, 0x7b, 0x8e, 0x71, 0xef, 0xf2, 0xd3, 0x1c, 0xc5, 0xcc, 0x4b, 0x5d, 0xee, + 0x9c, 0x12, 0xcf, 0x93, 0xd3, 0x8d, 0x38, 0xf6, 0x59, 0xdc, 0x73, 0x02, 0x7c, 0x86, 0x03, 0xcd, + 0x71, 0xb7, 0x1c, 0x87, 0xdb, 0x45, 0x94, 0x66, 0xa0, 0x07, 0x53, 0x81, 0x1c, 0x7c, 0xe1, 0x06, + 0x69, 0x42, 0xce, 0x08, 0xef, 0x95, 0xf3, 0xe2, 0x8c, 0x80, 0x51, 0x8f, 0x88, 0xd5, 0x2a, 0xe7, + 0x13, 0x06, 0x26, 0xa6, 0x68, 0x60, 0x90, 0x85, 0x93, 0xcc, 0x2e, 0x48, 0x48, 0x78, 0x6f, 0x9c, + 0x3b, 0x14, 0xec, 0xd7, 0x73, 0x7c, 0x1a, 0x89, 0x48, 0x97, 0x99, 0xea, 0xb0, 0x08, 0xc7, 0x88, + 0x7a, 0x1a, 0xfe, 0xc3, 0x2b, 0xc1, 0x39, 0x33, 0x9e, 0xa5, 0x83, 0xf0, 0x86, 0xfc, 0x3a, 0x4d, + 0x3b, 0x1b, 0xe7, 0x31, 0x8a, 0x22, 0x1c, 0x1b, 0xf3, 0xde, 0x32, 0xfc, 0x11, 0xd9, 0x40, 0x94, + 0x32, 0x2e, 0xbd, 0x5b, 0xf7, 0xda, 0x7f, 0xac, 0x40, 0xed, 0x53, 0xb5, 0x4b, 0x5b, 0xb4, 0xc3, + 0xac, 0x3b, 0x30, 0xcb, 0xf1, 0x05, 0xaf, 0x57, 0xde, 0xad, 0xbc, 0x57, 0xdb, 0x7a, 0x4b, 0x87, + 0xf5, 0x86, 0x21, 0x6f, 0x1c, 0xf1, 0x98, 0x50, 0xff, 0x04, 0x05, 0x29, 0x6e, 0x4b, 0x49, 0xcb, + 0x05, 0xe8, 0x6f, 0xef, 0x7a, 0xf5, 0xdd, 0xca, 0x7b, 0xcb, 0x5b, 0x7b, 0x8d, 0x49, 0x99, 0x41, + 0x0e, 0xaa, 0xa1, 0x35, 0x3e, 0x16, 0xb8, 0xe3, 0x5e, 0x84, 0x1f, 0xd2, 0x34, 0x1c, 0x69, 0x6c, + 0x2f, 0x84, 0xe6, 0xa7, 0xfd, 0x00, 0x96, 0x0e, 0x03, 0xe4, 0x62, 0x11, 0xf7, 0xa5, 0x9d, 0x0d, + 0x98, 0x49, 0xe3, 0xa0, 0x94, 0x99, 0x42, 0xd0, 0x3e, 0x83, 0xd7, 0x1f, 0xb3, 0x53, 0x12, 0xe0, + 0x66, 0x14, 0xed, 0x6a, 0x37, 0x97, 0x44, 0x5f, 0xc1, 0x5b, 0xa1, 0xec, 0x70, 0x44, 0x6a, 0xc9, + 0x76, 0x80, 0xcb, 0x68, 0xc2, 0x11, 0x2d, 0x37, 0x11, 0xb7, 0xc3, 0x61, 0xea, 0x5d, 0x0d, 0xb7, + 0x7f, 0x9e, 0xd3, 0x1b, 0x10, 0x15, 0x59, 0xa4, 0xde, 0xbb, 0x70, 0x5d, 0x28, 0x24, 0x9e, 0x9c, + 0xb2, 0x22, 0x0d, 0xd7, 0x50, 0x14, 0xb5, 0x3c, 0xb1, 0x3a, 0x14, 0x85, 0xb8, 0x3e, 0x53, 0x66, + 0x75, 0x84, 0xa4, 0xfd, 0x12, 0x16, 0x0f, 0x58, 0x4e, 0xed, 0x01, 0xdc, 0xf2, 0x31, 0x73, 0x38, + 0x8a, 0x7d, 0xcc, 0xa7, 0x1b, 0xe5, 0x4d, 0x1f, 0xb3, 0x63, 0x89, 0xcb, 0x46, 0xd7, 0x06, 0xd8, + 0x93, 0xb9, 0x4e, 0x72, 0xef, 0xc1, 0xac, 0xf4, 0x81, 0x8a, 0xf4, 0x81, 0x3b, 0x05, 0x3e, 0xa0, + 0x80, 0x72, 0xe5, 0xd5, 0xcf, 0xb6, 0x44, 0xdb, 0x17, 0xb0, 0x7a, 0x68, 0x62, 0xda, 0xae, 0x0a, + 0x69, 0x92, 0xfd, 0x6b, 0xcd, 0xae, 0x3c, 0xec, 0xa0, 0x80, 0x7d, 0x98, 0x22, 0xf3, 0xb2, 0x71, + 0x1d, 0x5a, 0xf3, 0xef, 0xaa, 0xb0, 0x72, 0xa0, 0xb2, 0xc6, 0xbe, 0x88, 0xfd, 0x52, 0xed, 0xe7, + 0x03, 0x83, 0x2a, 0x72, 0xec, 0x3c, 0x3c, 0x53, 0x39, 0xdc, 0xa8, 0xd4, 0x59, 0x47, 0x00, 0x2e, + 0x4a, 0xb0, 0x73, 0x26, 0x66, 0x57, 0x7b, 0xc1, 0xbd, 0xc6, 0xe5, 0x25, 0x95, 0xe1, 0xda, 0x23, + 0x21, 0xa6, 0x89, 0x5e, 0xd4, 0xf6, 0x82, 0xe0, 0x91, 0x8b, 0x64, 0x3d, 0x87, 0xdb, 0x2a, 0x97, + 0x38, 0xc8, 0xd3, 0x19, 0x4c, 0x17, 0x6d, 0x8c, 0x96, 0x72, 0x9b, 0x35, 0x05, 0x6f, 0x7a, 0xd2, + 0xdc, 0x5d, 0x83, 0xb5, 0xbb, 0xd9, 0xdc, 0x1c, 0xb9, 0x2c, 0x52, 0x0b, 0x7e, 0x0c, 0xe0, 0x19, + 0x43, 0x92, 0x7a, 0xf5, 0xdd, 0x99, 0x2b, 0x8f, 0x20, 0xc7, 0x63, 0xff, 0xa6, 0x06, 0xab, 0xe3, + 0x84, 0xac, 0xe7, 0xb0, 0x64, 0x92, 0xfa, 0xa9, 0x08, 0xa0, 0xda, 0x6b, 0xef, 0x94, 0xd4, 0xb8, + 0x23, 0x30, 0x82, 0xe8, 0xd1, 0x6b, 0xed, 0xc5, 0x20, 0xd7, 0x66, 0x3d, 0x82, 0x79, 0x55, 0x55, + 0x65, 0xbb, 0xf1, 0xfd, 0x22, 0xce, 0x47, 0x42, 0xbe, 0x65, 0xe8, 0xe6, 0xba, 0xea, 0xd3, 0xfa, + 0x0c, 0x6a, 0x8a, 0xc9, 0x0d, 0x50, 0x92, 0xe8, 0x09, 0x6f, 0x94, 0x22, 0xdb, 0x15, 0x08, 0xcd, + 0x07, 0xdd, 0xac, 0xc5, 0xfa, 0x29, 0xac, 0x6a, 0x4a, 0x96, 0x52, 0x1e, 0xf7, 0x9c, 0x18, 0xfb, + 0x62, 0x31, 0x67, 0x25, 0xf7, 0xfd, 0x72, 0xdc, 0x0a, 0xda, 0x96, 0x48, 0xad, 0xc3, 0xea, 0x8e, + 0xf4, 0xf4, 0xcd, 0x17, 0x65, 0x08, 0xae, 0x5f, 0x9b, 0xc2, 0xfc, 0x23, 0x81, 0x18, 0x30, 0x5f, + 0xb6, 0x58, 0x4f, 0x00, 0xb4, 0xf9, 0x84, 0xf7, 0xea, 0xd7, 0x25, 0xe3, 0x07, 0xe5, 0x8c, 0x26, + 0xbc, 0xa7, 0x09, 0x17, 0xba, 0xa6, 0xc1, 0x3a, 0x83, 0xfa, 0xa4, 0xea, 0xb0, 0x3e, 0x27, 0xd9, + 0xb7, 0x4b, 0xfa, 0xc3, 0xae, 0x84, 0x37, 0x0d, 0x5a, 0xab, 0x5a, 0x0b, 0xc6, 0xf6, 0x0a, 0xbd, + 0x93, 0xca, 0xa5, 0xfa, 0x52, 0x39, 0xbd, 0x87, 0x0a, 0xbf, 0xa3, 0xe0, 0xf9, 0x2c, 0x24, 0xf4, + 0x46, 0x63, 0x7b, 0xad, 0xaf, 0xe0, 0xc6, 0x50, 0xb5, 0x54, 0x9f, 0x97, 0xea, 0xb6, 0x4a, 0xaa, + 0xdb, 0x55, 0x28, 0xad, 0x66, 0x39, 0x1a, 0x68, 0xb5, 0x7e, 0x01, 0x6f, 0x5e, 0x52, 0x8c, 0xd5, + 0x17, 0xa4, 0xaa, 0x8f, 0xa7, 0x53, 0xf5, 0xb0, 0x4f, 0xa0, 0xb5, 0xde, 0x8e, 0x26, 0x09, 0x58, + 0x2e, 0xdc, 0x1c, 0x29, 0xe6, 0xea, 0x50, 0x2e, 0x18, 0x1a, 0xb5, 0x06, 0xa7, 0xb5, 0xad, 0x44, + 0x43, 0xed, 0xd6, 0x97, 0xfd, 0x49, 0x24, 0x1c, 0x87, 0x62, 0x9f, 0xd7, 0xa4, 0x8a, 0xcd, 0x92, + 0x2a, 0x5a, 0x1c, 0x87, 0xd9, 0x6e, 0x5f, 0x8a, 0xf2, 0x8d, 0xd6, 0x31, 0x2c, 0xe6, 0xeb, 0xca, + 0xfa, 0xa2, 0x64, 0xde, 0x28, 0xc9, 0x2c, 0x12, 0x82, 0xe6, 0xad, 0x45, 0xfd, 0x26, 0xab, 0x07, + 0xb7, 0x53, 0xfa, 0x8a, 0xb2, 0x73, 0xea, 0x18, 0x7f, 0xcf, 0x62, 0x64, 0x7d, 0x59, 0xaa, 0xf8, + 0xa8, 0x48, 0xc5, 0x33, 0x45, 0x30, 0x2e, 0x98, 0x3e, 0x7a, 0xad, 0xfd, 0x46, 0x3a, 0xbe, 0x7b, + 0xa7, 0x06, 0x0b, 0x99, 0x2a, 0xfb, 0xc7, 0x59, 0xdc, 0xcf, 0xe2, 0xa7, 0xb5, 0x05, 0xd7, 0x54, + 0xd2, 0x2a, 0x53, 0x36, 0x28, 0x51, 0xbb, 0x09, 0xb5, 0x5c, 0xcc, 0xbc, 0x12, 0xc5, 0x2e, 0x2c, + 0x0f, 0x46, 0x4a, 0x6b, 0x73, 0x90, 0xe5, 0xcd, 0x11, 0x96, 0x16, 0xe5, 0xf7, 0xef, 0x0d, 0x90, + 0x44, 0xb0, 0x36, 0x3e, 0x24, 0x5a, 0x27, 0x50, 0x1f, 0x0c, 0xb1, 0xb9, 0xcc, 0x59, 0xc6, 0xca, + 0x35, 0x37, 0x4f, 0xd9, 0xcf, 0x9c, 0xcf, 0xb5, 0xd9, 0x59, 0x84, 0xb4, 0x1e, 0xc2, 0x0d, 0x19, + 0x60, 0xa7, 0x54, 0xb0, 0x2c, 0x41, 0x7d, 0xe2, 0x63, 0x58, 0x1a, 0x08, 0x94, 0xd6, 0x2e, 0x2c, + 0x8b, 0x28, 0x3b, 0x25, 0xed, 0x92, 0xc0, 0xf4, 0x59, 0xff, 0x5e, 0x81, 0xf5, 0xc9, 0x11, 0xf2, + 0x2a, 0x0b, 0x67, 0x45, 0x70, 0x4d, 0x1e, 0xdc, 0x75, 0xed, 0xf6, 0xa2, 0x5c, 0x11, 0x35, 0xa2, + 0xdd, 0xc3, 0x17, 0xf9, 0x7a, 0x6a, 0x5c, 0x7f, 0x5b, 0x29, 0xb2, 0xff, 0x57, 0x81, 0xf5, 0xc9, + 0xe1, 0xd6, 0x7a, 0x1f, 0xaa, 0xc4, 0x2b, 0xe3, 0x34, 0x55, 0xe2, 0x59, 0x0f, 0x60, 0xd1, 0xf8, + 0x85, 0xcb, 0x3c, 0x5c, 0xaa, 0x5e, 0xaf, 0x69, 0xc4, 0x2e, 0xf3, 0xe4, 0xf0, 0xe5, 0x59, 0x53, + 0x96, 0x03, 0xc5, 0xc3, 0x1f, 0x6f, 0xf7, 0x81, 0x60, 0xd0, 0x15, 0xec, 0xc4, 0xfe, 0xb6, 0x52, + 0x64, 0x47, 0x60, 0x8d, 0x46, 0x7f, 0xeb, 0x05, 0xcc, 0x99, 0x14, 0xa2, 0xaa, 0xd9, 0x1f, 0x95, + 0xb3, 0xc4, 0x44, 0xed, 0x9c, 0x72, 0xdd, 0xd4, 0x36, 0x84, 0xf6, 0x9f, 0x2b, 0xf0, 0xf6, 0xa5, + 0x59, 0xc0, 0xfa, 0x75, 0x05, 0x6e, 0x8d, 0x4b, 0x31, 0xca, 0x94, 0x93, 0xe9, 0x4c, 0xe9, 0xe3, + 0xc7, 0x58, 0x95, 0xeb, 0x6d, 0x5b, 0xee, 0x48, 0x9b, 0xfd, 0x33, 0x71, 0xc2, 0x18, 0xcd, 0x1c, + 0xd6, 0x29, 0x2c, 0xf4, 0x53, 0x50, 0xb9, 0x7a, 0x7f, 0x98, 0x67, 0xc0, 0x1a, 0xd3, 0xd8, 0xee, + 0xd3, 0xda, 0xfb, 0x70, 0x73, 0x24, 0xa5, 0x5c, 0x29, 0x18, 0xfe, 0xbe, 0x02, 0x37, 0x86, 0x52, + 0xc8, 0x95, 0xf6, 0xe6, 0x0b, 0xe3, 0x9c, 0xd5, 0x69, 0x06, 0x2c, 0x54, 0x8e, 0x78, 0x64, 0xd6, + 0x68, 0xdc, 0xf0, 0x6d, 0x78, 0xf3, 0x92, 0x14, 0x64, 0xff, 0xb2, 0x02, 0xeb, 0x32, 0x80, 0xed, + 0x21, 0x8e, 0x8f, 0xcc, 0x5d, 0x66, 0x36, 0x1a, 0x6f, 0xe0, 0xe4, 0x75, 0x58, 0x60, 0xd8, 0x78, + 0x22, 0x69, 0xde, 0xf8, 0x2e, 0x7d, 0xe8, 0xfb, 0x6d, 0x05, 0xde, 0x96, 0x02, 0x4d, 0xef, 0x0c, + 0x51, 0x17, 0xef, 0x30, 0xf6, 0x8a, 0x50, 0xff, 0x39, 0xa1, 0x1e, 0x3b, 0x97, 0x76, 0xdc, 0x87, + 0xf9, 0x90, 0x50, 0xc7, 0x43, 0xbd, 0xa4, 0x4c, 0xc8, 0x98, 0x0b, 0x09, 0xdd, 0x43, 0xbd, 0x44, + 0xe2, 0xd0, 0x85, 0xc2, 0x55, 0xcb, 0xe0, 0xd0, 0x85, 0xc0, 0x09, 0x8b, 0x5e, 0x97, 0x16, 0x1d, + 0x60, 0xea, 0xf3, 0xee, 0xd3, 0xce, 0x11, 0x47, 0x6a, 0x0b, 0x6d, 0x03, 0x08, 0x4b, 0x28, 0xf1, + 0xbb, 0xbc, 0x94, 0x2d, 0x0b, 0x21, 0xa1, 0x4f, 0xa4, 0xb4, 0xc4, 0xa2, 0x0b, 0x83, 0xad, 0x96, + 0xc1, 0xa2, 0x0b, 0x85, 0xb5, 0x43, 0xb8, 0xa5, 0x12, 0x4d, 0x17, 0xbb, 0xaf, 0x5a, 0x62, 0x74, + 0x3a, 0x61, 0xd6, 0x72, 0x57, 0xe3, 0x7a, 0x9d, 0xee, 0x17, 0x1d, 0xfb, 0x51, 0xef, 0x69, 0xe7, + 0x39, 0xc6, 0xaf, 0xd4, 0xc9, 0xdf, 0x7c, 0xb5, 0x17, 0x3c, 0xf3, 0x53, 0xa8, 0x6b, 0xf5, 0xaf, + 0x9b, 0x33, 0x7f, 0x38, 0x19, 0xf0, 0x87, 0x9d, 0x02, 0x3d, 0x43, 0x0c, 0x52, 0xdb, 0x50, 0x9b, + 0x39, 0xf6, 0xcf, 0xc0, 0x72, 0xd3, 0x3b, 0x72, 0xbb, 0xd8, 0x4b, 0x03, 0xa5, 0xca, 0x81, 0xc5, + 0x84, 0xa3, 0x98, 0x3b, 0xea, 0x2e, 0x57, 0xab, 0xfc, 0x41, 0x81, 0xca, 0xc7, 0x52, 0xf8, 0x69, + 0xe7, 0x11, 0x4b, 0x63, 0xa9, 0x2f, 0xdf, 0xd0, 0xae, 0x49, 0x46, 0xd5, 0x64, 0x7d, 0x09, 0x80, + 0xa9, 0x67, 0xe8, 0xab, 0xdf, 0x00, 0xfd, 0x02, 0xa6, 0x9e, 0x26, 0xdf, 0x06, 0x50, 0xd6, 0x77, + 0x59, 0x1a, 0xeb, 0x33, 0xe8, 0xd8, 0xa5, 0xbe, 0xbb, 0xa5, 0x97, 0x5a, 0x8a, 0x0b, 0x1a, 0xe1, + 0xb4, 0xc2, 0x30, 0x89, 0x9c, 0x2d, 0x46, 0xce, 0x61, 0xea, 0x49, 0xdc, 0x90, 0x2f, 0x5c, 0xfb, + 0xa6, 0x7c, 0xe1, 0x6b, 0x58, 0x6c, 0xfa, 0xb8, 0x8d, 0xa8, 0xaf, 0x56, 0xe6, 0x70, 0xc0, 0x09, + 0x8a, 0xa6, 0xcc, 0x40, 0x33, 0x0f, 0xc8, 0x37, 0xe8, 0xe5, 0x7f, 0x01, 0xb0, 0x2f, 0xdf, 0x60, + 0xf4, 0xfd, 0x58, 0x9e, 0xff, 0xc3, 0x02, 0x7e, 0x05, 0xcc, 0xd8, 0xfb, 0x9f, 0x9a, 0x9b, 0xc0, + 0x8d, 0x96, 0x7c, 0x6f, 0xe9, 0x0f, 0x60, 0x5a, 0x2f, 0xce, 0xd0, 0x39, 0x2f, 0x1e, 0x68, 0xd3, + 0xaa, 0x62, 0xb0, 0x0e, 0xf5, 0x23, 0xc2, 0x91, 0x7c, 0x43, 0x90, 0xda, 0x5e, 0x0e, 0x68, 0x7b, + 0x54, 0x14, 0xdc, 0x07, 0x08, 0xfa, 0x57, 0x66, 0x23, 0xcd, 0x5a, 0xe7, 0xa7, 0xb0, 0xf2, 0x05, + 0x4b, 0x8f, 0xd3, 0x53, 0x7c, 0x42, 0x3c, 0xcc, 0xa4, 0xc6, 0xef, 0xc1, 0xfc, 0x99, 0xf8, 0x70, + 0xb2, 0x02, 0xeb, 0xf2, 0x34, 0x34, 0x27, 0xa5, 0x5b, 0x9e, 0xfd, 0x19, 0x58, 0x9a, 0x2c, 0x5f, + 0xb3, 0x7c, 0x04, 0x60, 0x8a, 0x86, 0x92, 0x84, 0x0b, 0x5a, 0xbe, 0xe5, 0xd9, 0x2d, 0x58, 0x7c, + 0x96, 0xe0, 0x58, 0x24, 0x1f, 0x49, 0xf6, 0x7d, 0x58, 0x48, 0x13, 0x1c, 0xcb, 0x03, 0x55, 0x29, + 0xae, 0xf9, 0x54, 0xc3, 0xed, 0x7f, 0x56, 0x61, 0xe9, 0xd0, 0xbc, 0x02, 0x48, 0xb2, 0x16, 0x2c, + 0xf8, 0x98, 0x39, 0x11, 0x23, 0xd9, 0xfd, 0xe9, 0x77, 0x8a, 0x0e, 0x64, 0xfb, 0x98, 0x1d, 0x0a, + 0x79, 0x79, 0xe7, 0x35, 0xef, 0xeb, 0x2f, 0xeb, 0x1e, 0x5c, 0x57, 0xef, 0x0a, 0x13, 0x6b, 0xcb, + 0x3d, 0x96, 0x9e, 0x06, 0xea, 0x8a, 0xaf, 0xad, 0x65, 0x2d, 0x06, 0x8b, 0xf9, 0xd7, 0x08, 0x5d, + 0x5d, 0x16, 0x5f, 0x8c, 0xea, 0x41, 0xb4, 0x25, 0xf6, 0x99, 0x80, 0x9a, 0x24, 0x3e, 0xd2, 0xd1, + 0xae, 0xc5, 0xfd, 0x0f, 0xeb, 0x21, 0xcc, 0x21, 0xcf, 0x8b, 0x71, 0x92, 0xe8, 0xd0, 0x50, 0x78, + 0x4b, 0xd6, 0x54, 0xe2, 0x72, 0xb8, 0x06, 0x6b, 0xff, 0xad, 0x02, 0x8b, 0xf9, 0x89, 0xb0, 0x3e, + 0x87, 0xf5, 0x80, 0x51, 0x9f, 0xf0, 0xd4, 0xc3, 0x0e, 0xa1, 0x4e, 0x48, 0xdc, 0x98, 0x39, 0x1e, + 0xf6, 0x63, 0x8c, 0x2f, 0x4d, 0x73, 0x26, 0x0a, 0xbd, 0x91, 0xc1, 0x5b, 0xf4, 0xb1, 0x00, 0xef, + 0x29, 0xac, 0x75, 0x02, 0xb7, 0x03, 0xc4, 0x27, 0x10, 0x57, 0x8b, 0x89, 0xd7, 0x0c, 0x7a, 0x90, + 0xd7, 0xfe, 0xf7, 0x0c, 0xd4, 0x72, 0x63, 0xb3, 0x3e, 0x86, 0x5a, 0xc4, 0x12, 0x8e, 0x02, 0x75, + 0x42, 0x28, 0xe3, 0x5a, 0xa0, 0x00, 0xf2, 0x80, 0xd0, 0x84, 0xa5, 0x28, 0x66, 0x67, 0x84, 0xba, + 0xb8, 0xfc, 0x11, 0x63, 0xd1, 0x40, 0x24, 0xc5, 0xf0, 0x21, 0x65, 0x66, 0xda, 0x43, 0x4a, 0xde, + 0x06, 0xf9, 0xc6, 0x30, 0x3b, 0x8d, 0x0d, 0x4f, 0x50, 0x88, 0xc5, 0xf1, 0x33, 0xe1, 0x31, 0xc6, + 0xdc, 0x31, 0x6e, 0x72, 0xad, 0xcc, 0xf1, 0x53, 0x61, 0xf4, 0x74, 0xaa, 0xb3, 0x71, 0x9e, 0x64, + 0x4b, 0x5f, 0x1a, 0x16, 0x9e, 0x8d, 0x73, 0x2c, 0x5b, 0x62, 0xab, 0xcb, 0xa3, 0xb0, 0x1c, 0xca, + 0x5c, 0x99, 0xad, 0x2e, 0xc4, 0xc5, 0x30, 0x44, 0x59, 0xba, 0x70, 0xcc, 0x22, 0xe2, 0x9a, 0x33, + 0x35, 0x17, 0x1f, 0xd3, 0xbd, 0x95, 0x2c, 0x49, 0x8c, 0x79, 0x27, 0xb1, 0xee, 0xc0, 0x6c, 0x84, + 0x78, 0x57, 0x5f, 0x91, 0x17, 0xbc, 0xdb, 0x08, 0x49, 0xfb, 0x0b, 0x58, 0x3c, 0x40, 0xd4, 0x4f, + 0x91, 0x4e, 0x1b, 0x2d, 0xb8, 0x19, 0xe8, 0xef, 0xe9, 0x2c, 0x59, 0x31, 0xb0, 0xec, 0xd1, 0xe6, + 0x13, 0xa8, 0xb5, 0xa2, 0x9d, 0x80, 0x89, 0x52, 0x4e, 0x45, 0x58, 0x12, 0x65, 0x2b, 0x56, 0x2a, + 0xc2, 0x92, 0x48, 0xcf, 0xb3, 0x1d, 0xc0, 0x8a, 0x7e, 0x47, 0x39, 0x40, 0xa7, 0x3a, 0x64, 0x4f, + 0xf7, 0x62, 0x92, 0x87, 0x67, 0x19, 0x67, 0xb8, 0x51, 0xe7, 0x9b, 0x13, 0xa8, 0xed, 0xa2, 0x38, + 0x26, 0x3a, 0x57, 0xef, 0xc3, 0x8a, 0xab, 0x3e, 0xa7, 0x9b, 0x92, 0x1b, 0x1a, 0x95, 0xcd, 0x48, + 0x07, 0x56, 0x44, 0x9e, 0x90, 0xe5, 0x21, 0xd6, 0xb9, 0xa2, 0x0d, 0x6b, 0x32, 0x57, 0x10, 0xdd, + 0xd8, 0xbf, 0xed, 0x2d, 0xa3, 0x62, 0x35, 0xcd, 0xf1, 0x99, 0x23, 0xba, 0xfd, 0xa7, 0x0a, 0xd4, + 0x9e, 0xab, 0xf7, 0xdc, 0xec, 0xbe, 0xc6, 0xdc, 0xbb, 0x28, 0x4f, 0x2d, 0x77, 0x5f, 0x63, 0x30, + 0x72, 0xd7, 0x1d, 0x03, 0x64, 0xc7, 0xcb, 0xd2, 0x8f, 0x30, 0xda, 0x8a, 0x81, 0xf3, 0x6f, 0x3b, + 0xc7, 0x63, 0xff, 0xb5, 0x0a, 0xab, 0xe3, 0x84, 0xac, 0x00, 0xe6, 0xf4, 0xfb, 0xb5, 0x5e, 0xe0, + 0x76, 0xc1, 0x02, 0x0f, 0xb3, 0x3c, 0x55, 0x68, 0xb9, 0xce, 0x13, 0xfa, 0xda, 0x46, 0x85, 0x15, + 0xc1, 0xbc, 0x79, 0xee, 0xd6, 0x55, 0xf2, 0xf1, 0x55, 0xd4, 0x71, 0x16, 0x4f, 0xd6, 0xc7, 0x59, + 0xdc, 0xce, 0xb4, 0x58, 0x1f, 0xc2, 0x3c, 0x8a, 0xfd, 0x34, 0xc4, 0x94, 0x97, 0x0a, 0xa2, 0x99, + 0xb4, 0xfd, 0xab, 0x0a, 0xac, 0x2b, 0x42, 0x42, 0xfd, 0xa3, 0x5e, 0xc2, 0x71, 0x78, 0x82, 0xe3, + 0xec, 0xf5, 0xca, 0x87, 0x6f, 0x31, 0xd3, 0xeb, 0x24, 0xb2, 0xdb, 0x39, 0x53, 0xfd, 0xd3, 0xb9, + 0xef, 0x3b, 0x6c, 0xac, 0x92, 0xcc, 0x9b, 0x53, 0xb8, 0xd5, 0x8c, 0xa2, 0x43, 0xf5, 0x4f, 0x52, + 0x8f, 0x99, 0xa7, 0xb7, 0xe5, 0x4f, 0x06, 0xb6, 0xe5, 0x27, 0x45, 0x95, 0xf3, 0x20, 0x43, 0xbf, + 0x80, 0x1e, 0x6d, 0xd7, 0x9b, 0xb3, 0x03, 0x2b, 0xea, 0xa5, 0x3b, 0xf7, 0x22, 0xdc, 0x86, 0x35, + 0xfd, 0xb8, 0xae, 0xfe, 0x25, 0x6a, 0xba, 0x81, 0xae, 0x86, 0x39, 0xbe, 0x6c, 0x78, 0x5f, 0x82, + 0xa5, 0x6f, 0xfe, 0x3a, 0x1d, 0x42, 0x4d, 0x35, 0xf6, 0x10, 0x6e, 0x98, 0xe7, 0x20, 0xdd, 0x5c, + 0xee, 0x4a, 0xd5, 0x1d, 0xa0, 0xb2, 0x9f, 0xc1, 0x8a, 0x22, 0x6f, 0xf5, 0x1f, 0x9e, 0x9b, 0xb0, + 0xa4, 0xa9, 0x89, 0x6c, 0x2c, 0x45, 0xbc, 0xe8, 0xe6, 0x68, 0xec, 0x7f, 0x54, 0xe1, 0xa6, 0x79, + 0x86, 0xef, 0x3f, 0x2d, 0xdf, 0x81, 0xd9, 0x0e, 0xc6, 0xe5, 0xaa, 0x5a, 0x29, 0x69, 0x3d, 0x81, + 0xd5, 0x31, 0xaf, 0xf7, 0x49, 0xa9, 0xbc, 0x62, 0x8d, 0x3c, 0xdf, 0x27, 0xd6, 0xdd, 0xac, 0xf0, + 0x9c, 0x29, 0xbe, 0x10, 0x30, 0x75, 0xe7, 0xf9, 0x50, 0xdd, 0x39, 0x5b, 0x6a, 0x5f, 0x0e, 0x0c, + 0x7f, 0xb8, 0xf6, 0x9c, 0xd4, 0x39, 0x50, 0x7f, 0xee, 0xfc, 0xa7, 0x02, 0xb6, 0xcb, 0xc2, 0x82, + 0xd8, 0xb6, 0xb3, 0xa4, 0xef, 0xb2, 0xd1, 0xa1, 0x18, 0xc4, 0x61, 0xe5, 0xc5, 0x9e, 0x06, 0xf8, + 0x4c, 0xe4, 0xc2, 0x06, 0x8b, 0xfd, 0x0d, 0x1f, 0x53, 0x39, 0x44, 0xf3, 0x1f, 0x37, 0x11, 0x49, + 0x26, 0xfd, 0xd3, 0xe3, 0x47, 0xea, 0xcf, 0x1f, 0xaa, 0x33, 0xfb, 0xcd, 0xe6, 0x5f, 0xaa, 0xef, + 0xec, 0x2b, 0xb2, 0xa6, 0x97, 0x34, 0xd4, 0x4f, 0xf1, 0xeb, 0x64, 0xb3, 0xb1, 0x2b, 0xc5, 0xfe, + 0x65, 0x04, 0x5e, 0x36, 0xbd, 0xe4, 0x65, 0x26, 0xf0, 0xf2, 0x64, 0xf3, 0xa5, 0x12, 0xf8, 0x6f, + 0xd5, 0x56, 0xad, 0xdb, 0xdb, 0x4d, 0x2f, 0xd9, 0xde, 0xce, 0x44, 0xb6, 0xb7, 0x4f, 0x36, 0xb7, + 0xb7, 0x95, 0xd0, 0xe9, 0x75, 0x69, 0xdd, 0xdd, 0xff, 0x07, 0x00, 0x00, 0xff, 0xff, 0x7e, 0x9f, + 0x0b, 0x30, 0x91, 0x29, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/common/criterion_category_availability.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/common/criterion_category_availability.pb.go new file mode 100644 index 0000000..6b82dbd --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/common/criterion_category_availability.pb.go @@ -0,0 +1,261 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/common/criterion_category_availability.proto + +package common // import "google.golang.org/genproto/googleapis/ads/googleads/v1/common" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import wrappers "github.com/golang/protobuf/ptypes/wrappers" +import enums "google.golang.org/genproto/googleapis/ads/googleads/v1/enums" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Information of category availability, per advertising channel. +type CriterionCategoryAvailability struct { + // Channel types and subtypes that are available to the category. + Channel *CriterionCategoryChannelAvailability `protobuf:"bytes,1,opt,name=channel,proto3" json:"channel,omitempty"` + // Locales that are available to the category for the channel. + Locale []*CriterionCategoryLocaleAvailability `protobuf:"bytes,2,rep,name=locale,proto3" json:"locale,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *CriterionCategoryAvailability) Reset() { *m = CriterionCategoryAvailability{} } +func (m *CriterionCategoryAvailability) String() string { return proto.CompactTextString(m) } +func (*CriterionCategoryAvailability) ProtoMessage() {} +func (*CriterionCategoryAvailability) Descriptor() ([]byte, []int) { + return fileDescriptor_criterion_category_availability_1772a6f75bfda39e, []int{0} +} +func (m *CriterionCategoryAvailability) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_CriterionCategoryAvailability.Unmarshal(m, b) +} +func (m *CriterionCategoryAvailability) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_CriterionCategoryAvailability.Marshal(b, m, deterministic) +} +func (dst *CriterionCategoryAvailability) XXX_Merge(src proto.Message) { + xxx_messageInfo_CriterionCategoryAvailability.Merge(dst, src) +} +func (m *CriterionCategoryAvailability) XXX_Size() int { + return xxx_messageInfo_CriterionCategoryAvailability.Size(m) +} +func (m *CriterionCategoryAvailability) XXX_DiscardUnknown() { + xxx_messageInfo_CriterionCategoryAvailability.DiscardUnknown(m) +} + +var xxx_messageInfo_CriterionCategoryAvailability proto.InternalMessageInfo + +func (m *CriterionCategoryAvailability) GetChannel() *CriterionCategoryChannelAvailability { + if m != nil { + return m.Channel + } + return nil +} + +func (m *CriterionCategoryAvailability) GetLocale() []*CriterionCategoryLocaleAvailability { + if m != nil { + return m.Locale + } + return nil +} + +// Information of advertising channel type and subtypes a category is available +// in. +type CriterionCategoryChannelAvailability struct { + // Format of the channel availability. Can be ALL_CHANNELS (the rest of the + // fields will not be set), CHANNEL_TYPE (only advertising_channel_type type + // will be set, the category is available to all sub types under it) or + // CHANNEL_TYPE_AND_SUBTYPES (advertising_channel_type, + // advertising_channel_sub_type, and include_default_channel_sub_type will all + // be set). + AvailabilityMode enums.CriterionCategoryChannelAvailabilityModeEnum_CriterionCategoryChannelAvailabilityMode `protobuf:"varint,1,opt,name=availability_mode,json=availabilityMode,proto3,enum=google.ads.googleads.v1.enums.CriterionCategoryChannelAvailabilityModeEnum_CriterionCategoryChannelAvailabilityMode" json:"availability_mode,omitempty"` + // Channel type the category is available to. + AdvertisingChannelType enums.AdvertisingChannelTypeEnum_AdvertisingChannelType `protobuf:"varint,2,opt,name=advertising_channel_type,json=advertisingChannelType,proto3,enum=google.ads.googleads.v1.enums.AdvertisingChannelTypeEnum_AdvertisingChannelType" json:"advertising_channel_type,omitempty"` + // Channel subtypes under the channel type the category is available to. + AdvertisingChannelSubType []enums.AdvertisingChannelSubTypeEnum_AdvertisingChannelSubType `protobuf:"varint,3,rep,packed,name=advertising_channel_sub_type,json=advertisingChannelSubType,proto3,enum=google.ads.googleads.v1.enums.AdvertisingChannelSubTypeEnum_AdvertisingChannelSubType" json:"advertising_channel_sub_type,omitempty"` + // Whether default channel sub type is included. For example, + // advertising_channel_type being DISPLAY and include_default_channel_sub_type + // being false means that the default display campaign where channel sub type + // is not set is not included in this availability configuration. + IncludeDefaultChannelSubType *wrappers.BoolValue `protobuf:"bytes,4,opt,name=include_default_channel_sub_type,json=includeDefaultChannelSubType,proto3" json:"include_default_channel_sub_type,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *CriterionCategoryChannelAvailability) Reset() { *m = CriterionCategoryChannelAvailability{} } +func (m *CriterionCategoryChannelAvailability) String() string { return proto.CompactTextString(m) } +func (*CriterionCategoryChannelAvailability) ProtoMessage() {} +func (*CriterionCategoryChannelAvailability) Descriptor() ([]byte, []int) { + return fileDescriptor_criterion_category_availability_1772a6f75bfda39e, []int{1} +} +func (m *CriterionCategoryChannelAvailability) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_CriterionCategoryChannelAvailability.Unmarshal(m, b) +} +func (m *CriterionCategoryChannelAvailability) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_CriterionCategoryChannelAvailability.Marshal(b, m, deterministic) +} +func (dst *CriterionCategoryChannelAvailability) XXX_Merge(src proto.Message) { + xxx_messageInfo_CriterionCategoryChannelAvailability.Merge(dst, src) +} +func (m *CriterionCategoryChannelAvailability) XXX_Size() int { + return xxx_messageInfo_CriterionCategoryChannelAvailability.Size(m) +} +func (m *CriterionCategoryChannelAvailability) XXX_DiscardUnknown() { + xxx_messageInfo_CriterionCategoryChannelAvailability.DiscardUnknown(m) +} + +var xxx_messageInfo_CriterionCategoryChannelAvailability proto.InternalMessageInfo + +func (m *CriterionCategoryChannelAvailability) GetAvailabilityMode() enums.CriterionCategoryChannelAvailabilityModeEnum_CriterionCategoryChannelAvailabilityMode { + if m != nil { + return m.AvailabilityMode + } + return enums.CriterionCategoryChannelAvailabilityModeEnum_UNSPECIFIED +} + +func (m *CriterionCategoryChannelAvailability) GetAdvertisingChannelType() enums.AdvertisingChannelTypeEnum_AdvertisingChannelType { + if m != nil { + return m.AdvertisingChannelType + } + return enums.AdvertisingChannelTypeEnum_UNSPECIFIED +} + +func (m *CriterionCategoryChannelAvailability) GetAdvertisingChannelSubType() []enums.AdvertisingChannelSubTypeEnum_AdvertisingChannelSubType { + if m != nil { + return m.AdvertisingChannelSubType + } + return nil +} + +func (m *CriterionCategoryChannelAvailability) GetIncludeDefaultChannelSubType() *wrappers.BoolValue { + if m != nil { + return m.IncludeDefaultChannelSubType + } + return nil +} + +// Information about which locales a category is available in. +type CriterionCategoryLocaleAvailability struct { + // Format of the locale availability. Can be LAUNCHED_TO_ALL (both country and + // language will be empty), COUNTRY (only country will be set), LANGUAGE (only + // language wil be set), COUNTRY_AND_LANGUAGE (both country and language will + // be set). + AvailabilityMode enums.CriterionCategoryLocaleAvailabilityModeEnum_CriterionCategoryLocaleAvailabilityMode `protobuf:"varint,1,opt,name=availability_mode,json=availabilityMode,proto3,enum=google.ads.googleads.v1.enums.CriterionCategoryLocaleAvailabilityModeEnum_CriterionCategoryLocaleAvailabilityMode" json:"availability_mode,omitempty"` + // Code of the country. + CountryCode *wrappers.StringValue `protobuf:"bytes,2,opt,name=country_code,json=countryCode,proto3" json:"country_code,omitempty"` + // Code of the language. + LanguageCode *wrappers.StringValue `protobuf:"bytes,3,opt,name=language_code,json=languageCode,proto3" json:"language_code,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *CriterionCategoryLocaleAvailability) Reset() { *m = CriterionCategoryLocaleAvailability{} } +func (m *CriterionCategoryLocaleAvailability) String() string { return proto.CompactTextString(m) } +func (*CriterionCategoryLocaleAvailability) ProtoMessage() {} +func (*CriterionCategoryLocaleAvailability) Descriptor() ([]byte, []int) { + return fileDescriptor_criterion_category_availability_1772a6f75bfda39e, []int{2} +} +func (m *CriterionCategoryLocaleAvailability) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_CriterionCategoryLocaleAvailability.Unmarshal(m, b) +} +func (m *CriterionCategoryLocaleAvailability) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_CriterionCategoryLocaleAvailability.Marshal(b, m, deterministic) +} +func (dst *CriterionCategoryLocaleAvailability) XXX_Merge(src proto.Message) { + xxx_messageInfo_CriterionCategoryLocaleAvailability.Merge(dst, src) +} +func (m *CriterionCategoryLocaleAvailability) XXX_Size() int { + return xxx_messageInfo_CriterionCategoryLocaleAvailability.Size(m) +} +func (m *CriterionCategoryLocaleAvailability) XXX_DiscardUnknown() { + xxx_messageInfo_CriterionCategoryLocaleAvailability.DiscardUnknown(m) +} + +var xxx_messageInfo_CriterionCategoryLocaleAvailability proto.InternalMessageInfo + +func (m *CriterionCategoryLocaleAvailability) GetAvailabilityMode() enums.CriterionCategoryLocaleAvailabilityModeEnum_CriterionCategoryLocaleAvailabilityMode { + if m != nil { + return m.AvailabilityMode + } + return enums.CriterionCategoryLocaleAvailabilityModeEnum_UNSPECIFIED +} + +func (m *CriterionCategoryLocaleAvailability) GetCountryCode() *wrappers.StringValue { + if m != nil { + return m.CountryCode + } + return nil +} + +func (m *CriterionCategoryLocaleAvailability) GetLanguageCode() *wrappers.StringValue { + if m != nil { + return m.LanguageCode + } + return nil +} + +func init() { + proto.RegisterType((*CriterionCategoryAvailability)(nil), "google.ads.googleads.v1.common.CriterionCategoryAvailability") + proto.RegisterType((*CriterionCategoryChannelAvailability)(nil), "google.ads.googleads.v1.common.CriterionCategoryChannelAvailability") + proto.RegisterType((*CriterionCategoryLocaleAvailability)(nil), "google.ads.googleads.v1.common.CriterionCategoryLocaleAvailability") +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/common/criterion_category_availability.proto", fileDescriptor_criterion_category_availability_1772a6f75bfda39e) +} + +var fileDescriptor_criterion_category_availability_1772a6f75bfda39e = []byte{ + // 609 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x94, 0x95, 0x41, 0x8e, 0xd3, 0x30, + 0x14, 0x86, 0x95, 0x74, 0x34, 0x48, 0xee, 0x30, 0x82, 0x2c, 0x50, 0xa9, 0xca, 0xa8, 0x0a, 0x2c, + 0xba, 0x72, 0xd4, 0xb2, 0x0b, 0x48, 0x90, 0xa6, 0x68, 0x36, 0x20, 0xaa, 0x0e, 0xea, 0x02, 0x2a, + 0x22, 0x27, 0xf1, 0x04, 0x4b, 0x8e, 0x1d, 0x25, 0x4e, 0x51, 0xaf, 0xc0, 0x05, 0x58, 0x22, 0x58, + 0x72, 0x14, 0x58, 0xb3, 0xe3, 0x04, 0xdc, 0x01, 0x09, 0x25, 0x76, 0x4a, 0x67, 0xda, 0x64, 0xda, + 0x55, 0xdd, 0xf8, 0xbd, 0xef, 0xff, 0xdf, 0xcb, 0xb3, 0x03, 0x26, 0x11, 0xe7, 0x11, 0xc5, 0x16, + 0x0a, 0x33, 0x4b, 0x2e, 0x8b, 0xd5, 0x72, 0x68, 0x05, 0x3c, 0x8e, 0x39, 0xb3, 0x82, 0x94, 0x08, + 0x9c, 0x12, 0xce, 0xbc, 0x00, 0x09, 0x1c, 0xf1, 0x74, 0xe5, 0xa1, 0x25, 0x22, 0x14, 0xf9, 0x84, + 0x12, 0xb1, 0x82, 0x49, 0xca, 0x05, 0x37, 0xce, 0x64, 0x2a, 0x44, 0x61, 0x06, 0xd7, 0x14, 0xb8, + 0x1c, 0x42, 0x49, 0xe9, 0x3e, 0xaf, 0x53, 0xc1, 0x2c, 0x8f, 0x33, 0x0b, 0x85, 0x4b, 0x9c, 0x0a, + 0x92, 0x11, 0x16, 0x79, 0xc1, 0x07, 0xc4, 0x18, 0xa6, 0x5e, 0x96, 0xfb, 0x9e, 0x58, 0x25, 0x58, + 0x2a, 0x74, 0x9f, 0x1e, 0x4e, 0xd8, 0xc8, 0x9e, 0x36, 0x67, 0xef, 0x28, 0xb2, 0x82, 0x6c, 0x16, + 0xeb, 0xc5, 0x3c, 0xac, 0x88, 0xaf, 0x0f, 0x26, 0x52, 0x1e, 0x20, 0x8a, 0x6b, 0x81, 0xaa, 0x85, + 0x56, 0xf9, 0xcf, 0xcf, 0x2f, 0xad, 0x8f, 0x29, 0x4a, 0x12, 0x9c, 0x66, 0x6a, 0xbf, 0x57, 0x09, + 0x26, 0xc4, 0x42, 0x8c, 0x71, 0x81, 0x04, 0xe1, 0x4c, 0xed, 0x9a, 0xbf, 0x35, 0xf0, 0xc0, 0xad, + 0x34, 0x5d, 0x25, 0xe9, 0x6c, 0x48, 0x19, 0xef, 0xc1, 0x2d, 0x55, 0x53, 0x47, 0xeb, 0x6b, 0x83, + 0xf6, 0x68, 0x02, 0x9b, 0x5f, 0x1a, 0xdc, 0xe2, 0xb9, 0x32, 0x7f, 0x13, 0x3b, 0xab, 0xa0, 0xc6, + 0x3b, 0x70, 0x2c, 0x2b, 0xec, 0xe8, 0xfd, 0xd6, 0xa0, 0x3d, 0x72, 0x0f, 0xc6, 0xbf, 0x2c, 0xd3, + 0xaf, 0xd0, 0x15, 0xd2, 0xfc, 0x75, 0x04, 0x1e, 0xed, 0x63, 0xc7, 0xf8, 0xaa, 0x81, 0xbb, 0x5b, + 0x1d, 0x2e, 0x0b, 0x3e, 0x1d, 0x89, 0x5a, 0x47, 0xe5, 0x3b, 0xdb, 0xab, 0xde, 0x57, 0x3c, 0xc4, + 0x2f, 0x58, 0x1e, 0xef, 0x1d, 0x3c, 0xbb, 0x83, 0xae, 0x3d, 0x31, 0x3e, 0x69, 0xa0, 0x53, 0x37, + 0xaf, 0x1d, 0xbd, 0xb4, 0x3a, 0xbd, 0xc1, 0xaa, 0xf3, 0x3f, 0x5d, 0xe9, 0xbe, 0x59, 0x25, 0xd2, + 0xd8, 0xee, 0xad, 0xd9, 0x3d, 0xb4, 0xf3, 0xb9, 0xf1, 0x59, 0x03, 0xbd, 0xa6, 0xe3, 0xd7, 0x69, + 0xf5, 0x5b, 0x83, 0xd3, 0xd1, 0xfc, 0x60, 0x43, 0x17, 0xb9, 0xdf, 0xe0, 0x49, 0xed, 0xce, 0xee, + 0xa3, 0xba, 0x2d, 0xc3, 0x07, 0x7d, 0xc2, 0x02, 0x9a, 0x87, 0xd8, 0x0b, 0xf1, 0x25, 0xca, 0xa9, + 0xd8, 0x36, 0x77, 0x54, 0x4e, 0x72, 0xb7, 0x32, 0x57, 0x9d, 0x1d, 0x38, 0xe6, 0x9c, 0xce, 0x11, + 0xcd, 0xf1, 0xac, 0xa7, 0x18, 0x13, 0x89, 0xb8, 0xaa, 0x61, 0xfe, 0xd4, 0xc1, 0xc3, 0x3d, 0xe6, + 0xd0, 0xf8, 0xd2, 0x30, 0x56, 0xe9, 0xa1, 0x63, 0xb5, 0xcd, 0xaf, 0x9f, 0xaa, 0xdd, 0xb1, 0x3b, + 0x86, 0xea, 0x19, 0x38, 0x09, 0x78, 0xce, 0x44, 0x71, 0x75, 0x15, 0xde, 0xf4, 0xb2, 0x33, 0xbd, + 0xad, 0xce, 0x5c, 0x88, 0x94, 0xb0, 0x48, 0xf6, 0xa6, 0xad, 0x32, 0xdc, 0x02, 0xe0, 0x80, 0xdb, + 0x14, 0xb1, 0x28, 0x47, 0x11, 0x96, 0x84, 0xd6, 0x1e, 0x84, 0x93, 0x2a, 0xa5, 0x40, 0x8c, 0xff, + 0x6a, 0xc0, 0x0c, 0x78, 0x7c, 0xc3, 0xc1, 0x1f, 0x9b, 0x8d, 0x17, 0xd5, 0xb4, 0xd0, 0x99, 0x6a, + 0x6f, 0xd5, 0x87, 0x09, 0x46, 0xbc, 0xd0, 0x80, 0x3c, 0x8d, 0xac, 0x08, 0xb3, 0xd2, 0x45, 0x75, + 0xe1, 0x26, 0x24, 0xab, 0xfb, 0x6e, 0x3d, 0x91, 0x3f, 0xdf, 0xf4, 0xd6, 0xb9, 0xe3, 0x7c, 0xd7, + 0xcf, 0xce, 0x25, 0xcc, 0x09, 0x33, 0x28, 0x97, 0xc5, 0x6a, 0x3e, 0x84, 0x6e, 0x19, 0xf6, 0xa3, + 0x0a, 0x58, 0x38, 0x61, 0xb6, 0x58, 0x07, 0x2c, 0xe6, 0xc3, 0x85, 0x0c, 0xf8, 0xa3, 0x9b, 0xf2, + 0xa9, 0x6d, 0x3b, 0x61, 0x66, 0xdb, 0xeb, 0x10, 0xdb, 0x9e, 0x0f, 0x6d, 0x5b, 0x06, 0xf9, 0xc7, + 0xa5, 0xbb, 0xc7, 0xff, 0x02, 0x00, 0x00, 0xff, 0xff, 0x7a, 0x59, 0x0b, 0xe4, 0x54, 0x07, 0x00, + 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/common/custom_parameter.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/common/custom_parameter.pb.go new file mode 100644 index 0000000..21d85b8 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/common/custom_parameter.pb.go @@ -0,0 +1,102 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/common/custom_parameter.proto + +package common // import "google.golang.org/genproto/googleapis/ads/googleads/v1/common" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import wrappers "github.com/golang/protobuf/ptypes/wrappers" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// A mapping that can be used by custom parameter tags in a +// `tracking_url_template`, `final_urls`, or `mobile_final_urls`. +type CustomParameter struct { + // The key matching the parameter tag name. + Key *wrappers.StringValue `protobuf:"bytes,1,opt,name=key,proto3" json:"key,omitempty"` + // The value to be substituted. + Value *wrappers.StringValue `protobuf:"bytes,2,opt,name=value,proto3" json:"value,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *CustomParameter) Reset() { *m = CustomParameter{} } +func (m *CustomParameter) String() string { return proto.CompactTextString(m) } +func (*CustomParameter) ProtoMessage() {} +func (*CustomParameter) Descriptor() ([]byte, []int) { + return fileDescriptor_custom_parameter_e9102b323b1b6917, []int{0} +} +func (m *CustomParameter) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_CustomParameter.Unmarshal(m, b) +} +func (m *CustomParameter) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_CustomParameter.Marshal(b, m, deterministic) +} +func (dst *CustomParameter) XXX_Merge(src proto.Message) { + xxx_messageInfo_CustomParameter.Merge(dst, src) +} +func (m *CustomParameter) XXX_Size() int { + return xxx_messageInfo_CustomParameter.Size(m) +} +func (m *CustomParameter) XXX_DiscardUnknown() { + xxx_messageInfo_CustomParameter.DiscardUnknown(m) +} + +var xxx_messageInfo_CustomParameter proto.InternalMessageInfo + +func (m *CustomParameter) GetKey() *wrappers.StringValue { + if m != nil { + return m.Key + } + return nil +} + +func (m *CustomParameter) GetValue() *wrappers.StringValue { + if m != nil { + return m.Value + } + return nil +} + +func init() { + proto.RegisterType((*CustomParameter)(nil), "google.ads.googleads.v1.common.CustomParameter") +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/common/custom_parameter.proto", fileDescriptor_custom_parameter_e9102b323b1b6917) +} + +var fileDescriptor_custom_parameter_e9102b323b1b6917 = []byte{ + // 302 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x8c, 0x90, 0xc1, 0x4a, 0xfc, 0x30, + 0x10, 0xc6, 0x69, 0x97, 0xff, 0xff, 0x50, 0x0f, 0x42, 0xf1, 0xb0, 0x2c, 0xcb, 0x22, 0x3d, 0x79, + 0x9a, 0xd0, 0x15, 0x2f, 0xf1, 0xd4, 0x5d, 0x61, 0xaf, 0x45, 0xa1, 0x07, 0x29, 0x48, 0xb6, 0x8d, + 0xa1, 0xd8, 0x66, 0x42, 0x92, 0x56, 0x7c, 0x1d, 0x8f, 0x3e, 0x8a, 0x8f, 0xe2, 0xc5, 0x57, 0x90, + 0x36, 0x6d, 0x0f, 0x82, 0xe2, 0x29, 0x5f, 0x32, 0xbf, 0x6f, 0xbe, 0xc9, 0x04, 0x57, 0x02, 0x51, + 0xd4, 0x9c, 0xb0, 0xd2, 0x10, 0x27, 0x7b, 0xd5, 0xc5, 0xa4, 0xc0, 0xa6, 0x41, 0x49, 0x8a, 0xd6, + 0x58, 0x6c, 0x1e, 0x14, 0xd3, 0xac, 0xe1, 0x96, 0x6b, 0x50, 0x1a, 0x2d, 0x86, 0x1b, 0xc7, 0x02, + 0x2b, 0x0d, 0xcc, 0x36, 0xe8, 0x62, 0x70, 0xb6, 0xd5, 0x58, 0x27, 0x03, 0x7d, 0x6c, 0x1f, 0xc9, + 0xb3, 0x66, 0x4a, 0x71, 0x6d, 0x9c, 0x7f, 0xb5, 0x9e, 0x62, 0x55, 0x45, 0x98, 0x94, 0x68, 0x99, + 0xad, 0x50, 0x8e, 0xd5, 0xa8, 0x0d, 0x4e, 0xf7, 0x43, 0x6e, 0x3a, 0xc5, 0x86, 0x10, 0x2c, 0x9e, + 0xf8, 0xcb, 0xd2, 0x3b, 0xf7, 0x2e, 0x4e, 0xb6, 0xeb, 0x31, 0x13, 0xa6, 0xf6, 0x70, 0x67, 0x75, + 0x25, 0x45, 0xc6, 0xea, 0x96, 0xdf, 0xf6, 0x60, 0xb8, 0x0d, 0xfe, 0x75, 0xfd, 0x6d, 0xe9, 0xff, + 0xc1, 0xe1, 0xd0, 0xdd, 0xa7, 0x17, 0x44, 0x05, 0x36, 0xf0, 0xfb, 0xdf, 0x76, 0x67, 0xdf, 0x66, + 0x4b, 0xfb, 0x96, 0xa9, 0x77, 0x7f, 0x33, 0xfa, 0x04, 0xd6, 0x4c, 0x0a, 0x40, 0x2d, 0x88, 0xe0, + 0x72, 0x08, 0x9c, 0x56, 0xab, 0x2a, 0xf3, 0xd3, 0xa6, 0xaf, 0xdd, 0xf1, 0xea, 0x2f, 0x0e, 0x49, + 0xf2, 0xe6, 0x6f, 0x0e, 0xae, 0x59, 0x52, 0x1a, 0x70, 0xb2, 0x57, 0x59, 0x0c, 0xfb, 0x01, 0x7b, + 0x9f, 0x80, 0x3c, 0x29, 0x4d, 0x3e, 0x03, 0x79, 0x16, 0xe7, 0x0e, 0xf8, 0xf0, 0x23, 0xf7, 0x4a, + 0x69, 0x52, 0x1a, 0x4a, 0x67, 0x84, 0xd2, 0x2c, 0xa6, 0xd4, 0x41, 0xc7, 0xff, 0xc3, 0x74, 0x97, + 0x5f, 0x01, 0x00, 0x00, 0xff, 0xff, 0x39, 0x90, 0x40, 0x9c, 0x06, 0x02, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/common/dates.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/common/dates.pb.go new file mode 100644 index 0000000..c89c43c --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/common/dates.pb.go @@ -0,0 +1,101 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/common/dates.proto + +package common // import "google.golang.org/genproto/googleapis/ads/googleads/v1/common" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import wrappers "github.com/golang/protobuf/ptypes/wrappers" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// A date range. +type DateRange struct { + // The start date, in yyyy-mm-dd format. + StartDate *wrappers.StringValue `protobuf:"bytes,1,opt,name=start_date,json=startDate,proto3" json:"start_date,omitempty"` + // The end date, in yyyy-mm-dd format. + EndDate *wrappers.StringValue `protobuf:"bytes,2,opt,name=end_date,json=endDate,proto3" json:"end_date,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *DateRange) Reset() { *m = DateRange{} } +func (m *DateRange) String() string { return proto.CompactTextString(m) } +func (*DateRange) ProtoMessage() {} +func (*DateRange) Descriptor() ([]byte, []int) { + return fileDescriptor_dates_66330ea013cf984e, []int{0} +} +func (m *DateRange) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_DateRange.Unmarshal(m, b) +} +func (m *DateRange) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_DateRange.Marshal(b, m, deterministic) +} +func (dst *DateRange) XXX_Merge(src proto.Message) { + xxx_messageInfo_DateRange.Merge(dst, src) +} +func (m *DateRange) XXX_Size() int { + return xxx_messageInfo_DateRange.Size(m) +} +func (m *DateRange) XXX_DiscardUnknown() { + xxx_messageInfo_DateRange.DiscardUnknown(m) +} + +var xxx_messageInfo_DateRange proto.InternalMessageInfo + +func (m *DateRange) GetStartDate() *wrappers.StringValue { + if m != nil { + return m.StartDate + } + return nil +} + +func (m *DateRange) GetEndDate() *wrappers.StringValue { + if m != nil { + return m.EndDate + } + return nil +} + +func init() { + proto.RegisterType((*DateRange)(nil), "google.ads.googleads.v1.common.DateRange") +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/common/dates.proto", fileDescriptor_dates_66330ea013cf984e) +} + +var fileDescriptor_dates_66330ea013cf984e = []byte{ + // 302 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x8c, 0x90, 0x41, 0x4b, 0xc3, 0x30, + 0x14, 0xc7, 0x69, 0x05, 0x75, 0xf1, 0xd6, 0x93, 0x8c, 0x31, 0xa4, 0x27, 0xf1, 0x90, 0x50, 0x3d, + 0x08, 0xd9, 0xa9, 0x73, 0xb0, 0xeb, 0x98, 0xd0, 0x83, 0x14, 0xe4, 0x6d, 0x89, 0xa1, 0xd0, 0xe6, + 0x95, 0x24, 0x9b, 0x67, 0xbf, 0x8a, 0x47, 0x3f, 0x8a, 0xdf, 0xc3, 0x8b, 0x9f, 0x42, 0xd2, 0xb4, + 0xbd, 0x29, 0x9e, 0xfa, 0xa7, 0xef, 0xf7, 0xff, 0xff, 0x5f, 0x1e, 0xb9, 0x51, 0x88, 0xaa, 0x96, + 0x0c, 0x84, 0x65, 0x41, 0x7a, 0x75, 0xcc, 0xd8, 0x1e, 0x9b, 0x06, 0x35, 0x13, 0xe0, 0xa4, 0xa5, + 0xad, 0x41, 0x87, 0xc9, 0x3c, 0x00, 0x14, 0x84, 0xa5, 0x23, 0x4b, 0x8f, 0x19, 0x0d, 0xec, 0xb4, + 0x9f, 0xb3, 0x8e, 0xde, 0x1d, 0x5e, 0xd8, 0xab, 0x81, 0xb6, 0x95, 0xa6, 0xf7, 0x4f, 0x67, 0x43, + 0x57, 0x5b, 0x31, 0xd0, 0x1a, 0x1d, 0xb8, 0x0a, 0x75, 0x3f, 0x4d, 0xdf, 0x22, 0x32, 0x59, 0x81, + 0x93, 0x5b, 0xd0, 0x4a, 0x26, 0x0b, 0x42, 0xac, 0x03, 0xe3, 0x9e, 0xfd, 0x02, 0x97, 0xd1, 0x55, + 0x74, 0x7d, 0x71, 0x3b, 0xeb, 0x5b, 0xe9, 0x50, 0x40, 0x1f, 0x9d, 0xa9, 0xb4, 0x2a, 0xa0, 0x3e, + 0xc8, 0xed, 0xa4, 0xe3, 0x7d, 0x42, 0x72, 0x4f, 0xce, 0xa5, 0x16, 0xc1, 0x1a, 0xff, 0xc3, 0x7a, + 0x26, 0xb5, 0xf0, 0xc6, 0xe5, 0x57, 0x44, 0xd2, 0x3d, 0x36, 0xf4, 0xef, 0x87, 0x2e, 0x89, 0x87, + 0xed, 0xc6, 0x47, 0x6d, 0xa2, 0xa7, 0x55, 0x4f, 0x2b, 0xac, 0x41, 0x2b, 0x8a, 0x46, 0x31, 0x25, + 0x75, 0x57, 0x34, 0x9c, 0xb4, 0xad, 0xec, 0x6f, 0x17, 0x5e, 0x84, 0xcf, 0x7b, 0x7c, 0xb2, 0xce, + 0xf3, 0x8f, 0x78, 0xbe, 0x0e, 0x61, 0xb9, 0xb0, 0x34, 0x48, 0xaf, 0x8a, 0x8c, 0x3e, 0x74, 0xd8, + 0xe7, 0x00, 0x94, 0xb9, 0xb0, 0xe5, 0x08, 0x94, 0x45, 0x56, 0x06, 0xe0, 0x3b, 0x4e, 0xc3, 0x5f, + 0xce, 0x73, 0x61, 0x39, 0x1f, 0x11, 0xce, 0x8b, 0x8c, 0xf3, 0x00, 0xed, 0x4e, 0xbb, 0xed, 0xee, + 0x7e, 0x02, 0x00, 0x00, 0xff, 0xff, 0xa7, 0x8e, 0xf7, 0xb8, 0xfe, 0x01, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/common/explorer_auto_optimizer_setting.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/common/explorer_auto_optimizer_setting.pb.go new file mode 100644 index 0000000..b8d7e29 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/common/explorer_auto_optimizer_setting.pb.go @@ -0,0 +1,95 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/common/explorer_auto_optimizer_setting.proto + +package common // import "google.golang.org/genproto/googleapis/ads/googleads/v1/common" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import wrappers "github.com/golang/protobuf/ptypes/wrappers" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Settings for the +// +// Display Campaign Optimizer, initially termed "Explorer". +type ExplorerAutoOptimizerSetting struct { + // Indicates whether the optimizer is turned on. + OptIn *wrappers.BoolValue `protobuf:"bytes,1,opt,name=opt_in,json=optIn,proto3" json:"opt_in,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ExplorerAutoOptimizerSetting) Reset() { *m = ExplorerAutoOptimizerSetting{} } +func (m *ExplorerAutoOptimizerSetting) String() string { return proto.CompactTextString(m) } +func (*ExplorerAutoOptimizerSetting) ProtoMessage() {} +func (*ExplorerAutoOptimizerSetting) Descriptor() ([]byte, []int) { + return fileDescriptor_explorer_auto_optimizer_setting_f77a88f13cc516e3, []int{0} +} +func (m *ExplorerAutoOptimizerSetting) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ExplorerAutoOptimizerSetting.Unmarshal(m, b) +} +func (m *ExplorerAutoOptimizerSetting) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ExplorerAutoOptimizerSetting.Marshal(b, m, deterministic) +} +func (dst *ExplorerAutoOptimizerSetting) XXX_Merge(src proto.Message) { + xxx_messageInfo_ExplorerAutoOptimizerSetting.Merge(dst, src) +} +func (m *ExplorerAutoOptimizerSetting) XXX_Size() int { + return xxx_messageInfo_ExplorerAutoOptimizerSetting.Size(m) +} +func (m *ExplorerAutoOptimizerSetting) XXX_DiscardUnknown() { + xxx_messageInfo_ExplorerAutoOptimizerSetting.DiscardUnknown(m) +} + +var xxx_messageInfo_ExplorerAutoOptimizerSetting proto.InternalMessageInfo + +func (m *ExplorerAutoOptimizerSetting) GetOptIn() *wrappers.BoolValue { + if m != nil { + return m.OptIn + } + return nil +} + +func init() { + proto.RegisterType((*ExplorerAutoOptimizerSetting)(nil), "google.ads.googleads.v1.common.ExplorerAutoOptimizerSetting") +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/common/explorer_auto_optimizer_setting.proto", fileDescriptor_explorer_auto_optimizer_setting_f77a88f13cc516e3) +} + +var fileDescriptor_explorer_auto_optimizer_setting_f77a88f13cc516e3 = []byte{ + // 306 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x7c, 0x90, 0xb1, 0x4e, 0xc3, 0x30, + 0x10, 0x86, 0x95, 0x22, 0x3a, 0x84, 0xad, 0x13, 0xaa, 0xaa, 0x0a, 0x3a, 0x31, 0x9d, 0x65, 0xd8, + 0xcc, 0xe4, 0x02, 0xaa, 0x98, 0x28, 0x20, 0x65, 0x40, 0x91, 0x22, 0xb7, 0x31, 0x96, 0xa5, 0xc4, + 0x67, 0xd9, 0x4e, 0x41, 0x3c, 0x0e, 0x23, 0x8f, 0xc2, 0xa3, 0xf0, 0x0c, 0x0c, 0xa8, 0x71, 0x92, + 0x8d, 0x4e, 0xf9, 0x15, 0x7f, 0xf7, 0xdf, 0xa7, 0x4b, 0x6f, 0x15, 0xa2, 0xaa, 0x24, 0x11, 0xa5, + 0x27, 0x31, 0xee, 0xd3, 0x8e, 0x92, 0x2d, 0xd6, 0x35, 0x1a, 0x22, 0xdf, 0x6d, 0x85, 0x4e, 0xba, + 0x42, 0x34, 0x01, 0x0b, 0xb4, 0x41, 0xd7, 0xfa, 0x43, 0xba, 0xc2, 0xcb, 0x10, 0xb4, 0x51, 0x60, + 0x1d, 0x06, 0x9c, 0xcc, 0xe3, 0x28, 0x88, 0xd2, 0xc3, 0xd0, 0x02, 0x3b, 0x0a, 0xb1, 0x65, 0xda, + 0xbd, 0x93, 0x96, 0xde, 0x34, 0xaf, 0xe4, 0xcd, 0x09, 0x6b, 0xa5, 0xf3, 0x71, 0x7e, 0x3a, 0xeb, + 0x2d, 0xac, 0x26, 0xc2, 0x18, 0x0c, 0x22, 0x68, 0x34, 0xdd, 0xeb, 0xe2, 0x31, 0x9d, 0xdd, 0x75, + 0x1a, 0xbc, 0x09, 0xf8, 0xd0, 0x4b, 0x3c, 0x47, 0x87, 0x09, 0x4d, 0xc7, 0x68, 0x43, 0xa1, 0xcd, + 0x69, 0x72, 0x96, 0x5c, 0x9c, 0x5c, 0x4e, 0x3b, 0x07, 0xe8, 0xd7, 0xc1, 0x12, 0xb1, 0xca, 0x44, + 0xd5, 0xc8, 0xa7, 0x63, 0xb4, 0xe1, 0xde, 0x2c, 0x7f, 0x93, 0x74, 0xb1, 0xc5, 0x1a, 0x0e, 0x7b, + 0x2f, 0xcf, 0x0f, 0xed, 0x5d, 0xef, 0xdb, 0xd7, 0xc9, 0x4b, 0x77, 0x42, 0x50, 0x58, 0x09, 0xa3, + 0x00, 0x9d, 0x22, 0x4a, 0x9a, 0x76, 0x77, 0x7f, 0x52, 0xab, 0xfd, 0x7f, 0x17, 0xbe, 0x8e, 0x9f, + 0xcf, 0xd1, 0xd1, 0x8a, 0xf3, 0xaf, 0xd1, 0x7c, 0x15, 0xcb, 0x78, 0xe9, 0x21, 0xc6, 0x7d, 0xca, + 0x28, 0xdc, 0xb4, 0xd8, 0x77, 0x0f, 0xe4, 0xbc, 0xf4, 0xf9, 0x00, 0xe4, 0x19, 0xcd, 0x23, 0xf0, + 0x33, 0x5a, 0xc4, 0xbf, 0x8c, 0xf1, 0xd2, 0x33, 0x36, 0x20, 0x8c, 0x65, 0x94, 0xb1, 0x08, 0x6d, + 0xc6, 0xad, 0xdd, 0xd5, 0x5f, 0x00, 0x00, 0x00, 0xff, 0xff, 0x48, 0x10, 0x73, 0x40, 0xfe, 0x01, + 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/common/extensions.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/common/extensions.pb.go new file mode 100644 index 0000000..07ee144 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/common/extensions.pb.go @@ -0,0 +1,1368 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/common/extensions.proto + +package common // import "google.golang.org/genproto/googleapis/ads/googleads/v1/common" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import wrappers "github.com/golang/protobuf/ptypes/wrappers" +import enums "google.golang.org/genproto/googleapis/ads/googleads/v1/enums" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Represents an App extension. +type AppFeedItem struct { + // The visible text displayed when the link is rendered in an ad. + // The length of this string should be between 1 and 25, inclusive. + LinkText *wrappers.StringValue `protobuf:"bytes,1,opt,name=link_text,json=linkText,proto3" json:"link_text,omitempty"` + // The store-specific ID for the target application. + // This string must not be empty. + AppId *wrappers.StringValue `protobuf:"bytes,2,opt,name=app_id,json=appId,proto3" json:"app_id,omitempty"` + // The application store that the target application belongs to. + AppStore enums.AppStoreEnum_AppStore `protobuf:"varint,3,opt,name=app_store,json=appStore,proto3,enum=google.ads.googleads.v1.enums.AppStoreEnum_AppStore" json:"app_store,omitempty"` + // A list of possible final URLs after all cross domain redirects. + FinalUrls []*wrappers.StringValue `protobuf:"bytes,4,rep,name=final_urls,json=finalUrls,proto3" json:"final_urls,omitempty"` + // A list of possible final mobile URLs after all cross domain redirects. + FinalMobileUrls []*wrappers.StringValue `protobuf:"bytes,5,rep,name=final_mobile_urls,json=finalMobileUrls,proto3" json:"final_mobile_urls,omitempty"` + // URL template for constructing a tracking URL. Default value is "{lpurl}". + TrackingUrlTemplate *wrappers.StringValue `protobuf:"bytes,6,opt,name=tracking_url_template,json=trackingUrlTemplate,proto3" json:"tracking_url_template,omitempty"` + // A list of mappings to be used for substituting URL custom parameter tags in + // the tracking_url_template, final_urls, and/or final_mobile_urls. + UrlCustomParameters []*CustomParameter `protobuf:"bytes,7,rep,name=url_custom_parameters,json=urlCustomParameters,proto3" json:"url_custom_parameters,omitempty"` + // URL template for appending params to landing page URLs served with parallel + // tracking. + FinalUrlSuffix *wrappers.StringValue `protobuf:"bytes,8,opt,name=final_url_suffix,json=finalUrlSuffix,proto3" json:"final_url_suffix,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *AppFeedItem) Reset() { *m = AppFeedItem{} } +func (m *AppFeedItem) String() string { return proto.CompactTextString(m) } +func (*AppFeedItem) ProtoMessage() {} +func (*AppFeedItem) Descriptor() ([]byte, []int) { + return fileDescriptor_extensions_29e88682b0f69f9b, []int{0} +} +func (m *AppFeedItem) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_AppFeedItem.Unmarshal(m, b) +} +func (m *AppFeedItem) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_AppFeedItem.Marshal(b, m, deterministic) +} +func (dst *AppFeedItem) XXX_Merge(src proto.Message) { + xxx_messageInfo_AppFeedItem.Merge(dst, src) +} +func (m *AppFeedItem) XXX_Size() int { + return xxx_messageInfo_AppFeedItem.Size(m) +} +func (m *AppFeedItem) XXX_DiscardUnknown() { + xxx_messageInfo_AppFeedItem.DiscardUnknown(m) +} + +var xxx_messageInfo_AppFeedItem proto.InternalMessageInfo + +func (m *AppFeedItem) GetLinkText() *wrappers.StringValue { + if m != nil { + return m.LinkText + } + return nil +} + +func (m *AppFeedItem) GetAppId() *wrappers.StringValue { + if m != nil { + return m.AppId + } + return nil +} + +func (m *AppFeedItem) GetAppStore() enums.AppStoreEnum_AppStore { + if m != nil { + return m.AppStore + } + return enums.AppStoreEnum_UNSPECIFIED +} + +func (m *AppFeedItem) GetFinalUrls() []*wrappers.StringValue { + if m != nil { + return m.FinalUrls + } + return nil +} + +func (m *AppFeedItem) GetFinalMobileUrls() []*wrappers.StringValue { + if m != nil { + return m.FinalMobileUrls + } + return nil +} + +func (m *AppFeedItem) GetTrackingUrlTemplate() *wrappers.StringValue { + if m != nil { + return m.TrackingUrlTemplate + } + return nil +} + +func (m *AppFeedItem) GetUrlCustomParameters() []*CustomParameter { + if m != nil { + return m.UrlCustomParameters + } + return nil +} + +func (m *AppFeedItem) GetFinalUrlSuffix() *wrappers.StringValue { + if m != nil { + return m.FinalUrlSuffix + } + return nil +} + +// Represents a Call extension. +type CallFeedItem struct { + // The advertiser's phone number to append to the ad. + // This string must not be empty. + PhoneNumber *wrappers.StringValue `protobuf:"bytes,1,opt,name=phone_number,json=phoneNumber,proto3" json:"phone_number,omitempty"` + // Uppercase two-letter country code of the advertiser's phone number. + // This string must not be empty. + CountryCode *wrappers.StringValue `protobuf:"bytes,2,opt,name=country_code,json=countryCode,proto3" json:"country_code,omitempty"` + // Indicates whether call tracking is enabled. By default, call tracking is + // not enabled. + CallTrackingEnabled *wrappers.BoolValue `protobuf:"bytes,3,opt,name=call_tracking_enabled,json=callTrackingEnabled,proto3" json:"call_tracking_enabled,omitempty"` + // The conversion action to attribute a call conversion to. If not set a + // default conversion action is used. This field only has effect if + // call_tracking_enabled is set to true. Otherwise this field is ignored. + CallConversionAction *wrappers.StringValue `protobuf:"bytes,4,opt,name=call_conversion_action,json=callConversionAction,proto3" json:"call_conversion_action,omitempty"` + // If true, disable call conversion tracking. call_conversion_action should + // not be set if this is true. Optional. + CallConversionTrackingDisabled *wrappers.BoolValue `protobuf:"bytes,5,opt,name=call_conversion_tracking_disabled,json=callConversionTrackingDisabled,proto3" json:"call_conversion_tracking_disabled,omitempty"` + // Enum value that indicates whether this call extension uses its own call + // conversion setting (or just have call conversion disabled), or following + // the account level setting. + CallConversionReportingState enums.CallConversionReportingStateEnum_CallConversionReportingState `protobuf:"varint,6,opt,name=call_conversion_reporting_state,json=callConversionReportingState,proto3,enum=google.ads.googleads.v1.enums.CallConversionReportingStateEnum_CallConversionReportingState" json:"call_conversion_reporting_state,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *CallFeedItem) Reset() { *m = CallFeedItem{} } +func (m *CallFeedItem) String() string { return proto.CompactTextString(m) } +func (*CallFeedItem) ProtoMessage() {} +func (*CallFeedItem) Descriptor() ([]byte, []int) { + return fileDescriptor_extensions_29e88682b0f69f9b, []int{1} +} +func (m *CallFeedItem) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_CallFeedItem.Unmarshal(m, b) +} +func (m *CallFeedItem) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_CallFeedItem.Marshal(b, m, deterministic) +} +func (dst *CallFeedItem) XXX_Merge(src proto.Message) { + xxx_messageInfo_CallFeedItem.Merge(dst, src) +} +func (m *CallFeedItem) XXX_Size() int { + return xxx_messageInfo_CallFeedItem.Size(m) +} +func (m *CallFeedItem) XXX_DiscardUnknown() { + xxx_messageInfo_CallFeedItem.DiscardUnknown(m) +} + +var xxx_messageInfo_CallFeedItem proto.InternalMessageInfo + +func (m *CallFeedItem) GetPhoneNumber() *wrappers.StringValue { + if m != nil { + return m.PhoneNumber + } + return nil +} + +func (m *CallFeedItem) GetCountryCode() *wrappers.StringValue { + if m != nil { + return m.CountryCode + } + return nil +} + +func (m *CallFeedItem) GetCallTrackingEnabled() *wrappers.BoolValue { + if m != nil { + return m.CallTrackingEnabled + } + return nil +} + +func (m *CallFeedItem) GetCallConversionAction() *wrappers.StringValue { + if m != nil { + return m.CallConversionAction + } + return nil +} + +func (m *CallFeedItem) GetCallConversionTrackingDisabled() *wrappers.BoolValue { + if m != nil { + return m.CallConversionTrackingDisabled + } + return nil +} + +func (m *CallFeedItem) GetCallConversionReportingState() enums.CallConversionReportingStateEnum_CallConversionReportingState { + if m != nil { + return m.CallConversionReportingState + } + return enums.CallConversionReportingStateEnum_UNSPECIFIED +} + +// Represents a callout extension. +type CalloutFeedItem struct { + // The callout text. + // The length of this string should be between 1 and 25, inclusive. + CalloutText *wrappers.StringValue `protobuf:"bytes,1,opt,name=callout_text,json=calloutText,proto3" json:"callout_text,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *CalloutFeedItem) Reset() { *m = CalloutFeedItem{} } +func (m *CalloutFeedItem) String() string { return proto.CompactTextString(m) } +func (*CalloutFeedItem) ProtoMessage() {} +func (*CalloutFeedItem) Descriptor() ([]byte, []int) { + return fileDescriptor_extensions_29e88682b0f69f9b, []int{2} +} +func (m *CalloutFeedItem) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_CalloutFeedItem.Unmarshal(m, b) +} +func (m *CalloutFeedItem) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_CalloutFeedItem.Marshal(b, m, deterministic) +} +func (dst *CalloutFeedItem) XXX_Merge(src proto.Message) { + xxx_messageInfo_CalloutFeedItem.Merge(dst, src) +} +func (m *CalloutFeedItem) XXX_Size() int { + return xxx_messageInfo_CalloutFeedItem.Size(m) +} +func (m *CalloutFeedItem) XXX_DiscardUnknown() { + xxx_messageInfo_CalloutFeedItem.DiscardUnknown(m) +} + +var xxx_messageInfo_CalloutFeedItem proto.InternalMessageInfo + +func (m *CalloutFeedItem) GetCalloutText() *wrappers.StringValue { + if m != nil { + return m.CalloutText + } + return nil +} + +// Represents a location extension. +type LocationFeedItem struct { + // The name of the business. + BusinessName *wrappers.StringValue `protobuf:"bytes,1,opt,name=business_name,json=businessName,proto3" json:"business_name,omitempty"` + // Line 1 of the business address. + AddressLine_1 *wrappers.StringValue `protobuf:"bytes,2,opt,name=address_line_1,json=addressLine1,proto3" json:"address_line_1,omitempty"` + // Line 2 of the business address. + AddressLine_2 *wrappers.StringValue `protobuf:"bytes,3,opt,name=address_line_2,json=addressLine2,proto3" json:"address_line_2,omitempty"` + // City of the business address. + City *wrappers.StringValue `protobuf:"bytes,4,opt,name=city,proto3" json:"city,omitempty"` + // Province of the business address. + Province *wrappers.StringValue `protobuf:"bytes,5,opt,name=province,proto3" json:"province,omitempty"` + // Postal code of the business address. + PostalCode *wrappers.StringValue `protobuf:"bytes,6,opt,name=postal_code,json=postalCode,proto3" json:"postal_code,omitempty"` + // Country code of the business address. + CountryCode *wrappers.StringValue `protobuf:"bytes,7,opt,name=country_code,json=countryCode,proto3" json:"country_code,omitempty"` + // Phone number of the business. + PhoneNumber *wrappers.StringValue `protobuf:"bytes,8,opt,name=phone_number,json=phoneNumber,proto3" json:"phone_number,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *LocationFeedItem) Reset() { *m = LocationFeedItem{} } +func (m *LocationFeedItem) String() string { return proto.CompactTextString(m) } +func (*LocationFeedItem) ProtoMessage() {} +func (*LocationFeedItem) Descriptor() ([]byte, []int) { + return fileDescriptor_extensions_29e88682b0f69f9b, []int{3} +} +func (m *LocationFeedItem) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_LocationFeedItem.Unmarshal(m, b) +} +func (m *LocationFeedItem) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_LocationFeedItem.Marshal(b, m, deterministic) +} +func (dst *LocationFeedItem) XXX_Merge(src proto.Message) { + xxx_messageInfo_LocationFeedItem.Merge(dst, src) +} +func (m *LocationFeedItem) XXX_Size() int { + return xxx_messageInfo_LocationFeedItem.Size(m) +} +func (m *LocationFeedItem) XXX_DiscardUnknown() { + xxx_messageInfo_LocationFeedItem.DiscardUnknown(m) +} + +var xxx_messageInfo_LocationFeedItem proto.InternalMessageInfo + +func (m *LocationFeedItem) GetBusinessName() *wrappers.StringValue { + if m != nil { + return m.BusinessName + } + return nil +} + +func (m *LocationFeedItem) GetAddressLine_1() *wrappers.StringValue { + if m != nil { + return m.AddressLine_1 + } + return nil +} + +func (m *LocationFeedItem) GetAddressLine_2() *wrappers.StringValue { + if m != nil { + return m.AddressLine_2 + } + return nil +} + +func (m *LocationFeedItem) GetCity() *wrappers.StringValue { + if m != nil { + return m.City + } + return nil +} + +func (m *LocationFeedItem) GetProvince() *wrappers.StringValue { + if m != nil { + return m.Province + } + return nil +} + +func (m *LocationFeedItem) GetPostalCode() *wrappers.StringValue { + if m != nil { + return m.PostalCode + } + return nil +} + +func (m *LocationFeedItem) GetCountryCode() *wrappers.StringValue { + if m != nil { + return m.CountryCode + } + return nil +} + +func (m *LocationFeedItem) GetPhoneNumber() *wrappers.StringValue { + if m != nil { + return m.PhoneNumber + } + return nil +} + +// Represents an affiliate location extension. +type AffiliateLocationFeedItem struct { + // The name of the business. + BusinessName *wrappers.StringValue `protobuf:"bytes,1,opt,name=business_name,json=businessName,proto3" json:"business_name,omitempty"` + // Line 1 of the business address. + AddressLine_1 *wrappers.StringValue `protobuf:"bytes,2,opt,name=address_line_1,json=addressLine1,proto3" json:"address_line_1,omitempty"` + // Line 2 of the business address. + AddressLine_2 *wrappers.StringValue `protobuf:"bytes,3,opt,name=address_line_2,json=addressLine2,proto3" json:"address_line_2,omitempty"` + // City of the business address. + City *wrappers.StringValue `protobuf:"bytes,4,opt,name=city,proto3" json:"city,omitempty"` + // Province of the business address. + Province *wrappers.StringValue `protobuf:"bytes,5,opt,name=province,proto3" json:"province,omitempty"` + // Postal code of the business address. + PostalCode *wrappers.StringValue `protobuf:"bytes,6,opt,name=postal_code,json=postalCode,proto3" json:"postal_code,omitempty"` + // Country code of the business address. + CountryCode *wrappers.StringValue `protobuf:"bytes,7,opt,name=country_code,json=countryCode,proto3" json:"country_code,omitempty"` + // Phone number of the business. + PhoneNumber *wrappers.StringValue `protobuf:"bytes,8,opt,name=phone_number,json=phoneNumber,proto3" json:"phone_number,omitempty"` + // Id of the retail chain that is advertised as a seller of your product. + ChainId *wrappers.Int64Value `protobuf:"bytes,9,opt,name=chain_id,json=chainId,proto3" json:"chain_id,omitempty"` + // Name of chain. + ChainName *wrappers.StringValue `protobuf:"bytes,10,opt,name=chain_name,json=chainName,proto3" json:"chain_name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *AffiliateLocationFeedItem) Reset() { *m = AffiliateLocationFeedItem{} } +func (m *AffiliateLocationFeedItem) String() string { return proto.CompactTextString(m) } +func (*AffiliateLocationFeedItem) ProtoMessage() {} +func (*AffiliateLocationFeedItem) Descriptor() ([]byte, []int) { + return fileDescriptor_extensions_29e88682b0f69f9b, []int{4} +} +func (m *AffiliateLocationFeedItem) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_AffiliateLocationFeedItem.Unmarshal(m, b) +} +func (m *AffiliateLocationFeedItem) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_AffiliateLocationFeedItem.Marshal(b, m, deterministic) +} +func (dst *AffiliateLocationFeedItem) XXX_Merge(src proto.Message) { + xxx_messageInfo_AffiliateLocationFeedItem.Merge(dst, src) +} +func (m *AffiliateLocationFeedItem) XXX_Size() int { + return xxx_messageInfo_AffiliateLocationFeedItem.Size(m) +} +func (m *AffiliateLocationFeedItem) XXX_DiscardUnknown() { + xxx_messageInfo_AffiliateLocationFeedItem.DiscardUnknown(m) +} + +var xxx_messageInfo_AffiliateLocationFeedItem proto.InternalMessageInfo + +func (m *AffiliateLocationFeedItem) GetBusinessName() *wrappers.StringValue { + if m != nil { + return m.BusinessName + } + return nil +} + +func (m *AffiliateLocationFeedItem) GetAddressLine_1() *wrappers.StringValue { + if m != nil { + return m.AddressLine_1 + } + return nil +} + +func (m *AffiliateLocationFeedItem) GetAddressLine_2() *wrappers.StringValue { + if m != nil { + return m.AddressLine_2 + } + return nil +} + +func (m *AffiliateLocationFeedItem) GetCity() *wrappers.StringValue { + if m != nil { + return m.City + } + return nil +} + +func (m *AffiliateLocationFeedItem) GetProvince() *wrappers.StringValue { + if m != nil { + return m.Province + } + return nil +} + +func (m *AffiliateLocationFeedItem) GetPostalCode() *wrappers.StringValue { + if m != nil { + return m.PostalCode + } + return nil +} + +func (m *AffiliateLocationFeedItem) GetCountryCode() *wrappers.StringValue { + if m != nil { + return m.CountryCode + } + return nil +} + +func (m *AffiliateLocationFeedItem) GetPhoneNumber() *wrappers.StringValue { + if m != nil { + return m.PhoneNumber + } + return nil +} + +func (m *AffiliateLocationFeedItem) GetChainId() *wrappers.Int64Value { + if m != nil { + return m.ChainId + } + return nil +} + +func (m *AffiliateLocationFeedItem) GetChainName() *wrappers.StringValue { + if m != nil { + return m.ChainName + } + return nil +} + +// An extension that users can click on to send a text message to the +// advertiser. +type TextMessageFeedItem struct { + // The business name to prepend to the message text. + // This field is required. + BusinessName *wrappers.StringValue `protobuf:"bytes,1,opt,name=business_name,json=businessName,proto3" json:"business_name,omitempty"` + // Uppercase two-letter country code of the advertiser's phone number. + // This field is required. + CountryCode *wrappers.StringValue `protobuf:"bytes,2,opt,name=country_code,json=countryCode,proto3" json:"country_code,omitempty"` + // The advertiser's phone number the message will be sent to. Required. + PhoneNumber *wrappers.StringValue `protobuf:"bytes,3,opt,name=phone_number,json=phoneNumber,proto3" json:"phone_number,omitempty"` + // The text to show in the ad. + // This field is required. + Text *wrappers.StringValue `protobuf:"bytes,4,opt,name=text,proto3" json:"text,omitempty"` + // The message text populated in the messaging app. + ExtensionText *wrappers.StringValue `protobuf:"bytes,5,opt,name=extension_text,json=extensionText,proto3" json:"extension_text,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *TextMessageFeedItem) Reset() { *m = TextMessageFeedItem{} } +func (m *TextMessageFeedItem) String() string { return proto.CompactTextString(m) } +func (*TextMessageFeedItem) ProtoMessage() {} +func (*TextMessageFeedItem) Descriptor() ([]byte, []int) { + return fileDescriptor_extensions_29e88682b0f69f9b, []int{5} +} +func (m *TextMessageFeedItem) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_TextMessageFeedItem.Unmarshal(m, b) +} +func (m *TextMessageFeedItem) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_TextMessageFeedItem.Marshal(b, m, deterministic) +} +func (dst *TextMessageFeedItem) XXX_Merge(src proto.Message) { + xxx_messageInfo_TextMessageFeedItem.Merge(dst, src) +} +func (m *TextMessageFeedItem) XXX_Size() int { + return xxx_messageInfo_TextMessageFeedItem.Size(m) +} +func (m *TextMessageFeedItem) XXX_DiscardUnknown() { + xxx_messageInfo_TextMessageFeedItem.DiscardUnknown(m) +} + +var xxx_messageInfo_TextMessageFeedItem proto.InternalMessageInfo + +func (m *TextMessageFeedItem) GetBusinessName() *wrappers.StringValue { + if m != nil { + return m.BusinessName + } + return nil +} + +func (m *TextMessageFeedItem) GetCountryCode() *wrappers.StringValue { + if m != nil { + return m.CountryCode + } + return nil +} + +func (m *TextMessageFeedItem) GetPhoneNumber() *wrappers.StringValue { + if m != nil { + return m.PhoneNumber + } + return nil +} + +func (m *TextMessageFeedItem) GetText() *wrappers.StringValue { + if m != nil { + return m.Text + } + return nil +} + +func (m *TextMessageFeedItem) GetExtensionText() *wrappers.StringValue { + if m != nil { + return m.ExtensionText + } + return nil +} + +// Represents a Price extension. +type PriceFeedItem struct { + // Price extension type of this extension. + Type enums.PriceExtensionTypeEnum_PriceExtensionType `protobuf:"varint,1,opt,name=type,proto3,enum=google.ads.googleads.v1.enums.PriceExtensionTypeEnum_PriceExtensionType" json:"type,omitempty"` + // Price qualifier for all offers of this price extension. + PriceQualifier enums.PriceExtensionPriceQualifierEnum_PriceExtensionPriceQualifier `protobuf:"varint,2,opt,name=price_qualifier,json=priceQualifier,proto3,enum=google.ads.googleads.v1.enums.PriceExtensionPriceQualifierEnum_PriceExtensionPriceQualifier" json:"price_qualifier,omitempty"` + // Tracking URL template for all offers of this price extension. + TrackingUrlTemplate *wrappers.StringValue `protobuf:"bytes,3,opt,name=tracking_url_template,json=trackingUrlTemplate,proto3" json:"tracking_url_template,omitempty"` + // The code of the language used for this price extension. + LanguageCode *wrappers.StringValue `protobuf:"bytes,4,opt,name=language_code,json=languageCode,proto3" json:"language_code,omitempty"` + // The price offerings in this price extension. + PriceOfferings []*PriceOffer `protobuf:"bytes,5,rep,name=price_offerings,json=priceOfferings,proto3" json:"price_offerings,omitempty"` + // URL template for appending params to landing page URLs served with parallel + // tracking. + FinalUrlSuffix *wrappers.StringValue `protobuf:"bytes,6,opt,name=final_url_suffix,json=finalUrlSuffix,proto3" json:"final_url_suffix,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *PriceFeedItem) Reset() { *m = PriceFeedItem{} } +func (m *PriceFeedItem) String() string { return proto.CompactTextString(m) } +func (*PriceFeedItem) ProtoMessage() {} +func (*PriceFeedItem) Descriptor() ([]byte, []int) { + return fileDescriptor_extensions_29e88682b0f69f9b, []int{6} +} +func (m *PriceFeedItem) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_PriceFeedItem.Unmarshal(m, b) +} +func (m *PriceFeedItem) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_PriceFeedItem.Marshal(b, m, deterministic) +} +func (dst *PriceFeedItem) XXX_Merge(src proto.Message) { + xxx_messageInfo_PriceFeedItem.Merge(dst, src) +} +func (m *PriceFeedItem) XXX_Size() int { + return xxx_messageInfo_PriceFeedItem.Size(m) +} +func (m *PriceFeedItem) XXX_DiscardUnknown() { + xxx_messageInfo_PriceFeedItem.DiscardUnknown(m) +} + +var xxx_messageInfo_PriceFeedItem proto.InternalMessageInfo + +func (m *PriceFeedItem) GetType() enums.PriceExtensionTypeEnum_PriceExtensionType { + if m != nil { + return m.Type + } + return enums.PriceExtensionTypeEnum_UNSPECIFIED +} + +func (m *PriceFeedItem) GetPriceQualifier() enums.PriceExtensionPriceQualifierEnum_PriceExtensionPriceQualifier { + if m != nil { + return m.PriceQualifier + } + return enums.PriceExtensionPriceQualifierEnum_UNSPECIFIED +} + +func (m *PriceFeedItem) GetTrackingUrlTemplate() *wrappers.StringValue { + if m != nil { + return m.TrackingUrlTemplate + } + return nil +} + +func (m *PriceFeedItem) GetLanguageCode() *wrappers.StringValue { + if m != nil { + return m.LanguageCode + } + return nil +} + +func (m *PriceFeedItem) GetPriceOfferings() []*PriceOffer { + if m != nil { + return m.PriceOfferings + } + return nil +} + +func (m *PriceFeedItem) GetFinalUrlSuffix() *wrappers.StringValue { + if m != nil { + return m.FinalUrlSuffix + } + return nil +} + +// Represents one price offer in a price extension. +type PriceOffer struct { + // Header text of this offer. + Header *wrappers.StringValue `protobuf:"bytes,1,opt,name=header,proto3" json:"header,omitempty"` + // Description text of this offer. + Description *wrappers.StringValue `protobuf:"bytes,2,opt,name=description,proto3" json:"description,omitempty"` + // Price value of this offer. + Price *Money `protobuf:"bytes,3,opt,name=price,proto3" json:"price,omitempty"` + // Price unit for this offer. + Unit enums.PriceExtensionPriceUnitEnum_PriceExtensionPriceUnit `protobuf:"varint,4,opt,name=unit,proto3,enum=google.ads.googleads.v1.enums.PriceExtensionPriceUnitEnum_PriceExtensionPriceUnit" json:"unit,omitempty"` + // A list of possible final URLs after all cross domain redirects. + FinalUrls []*wrappers.StringValue `protobuf:"bytes,5,rep,name=final_urls,json=finalUrls,proto3" json:"final_urls,omitempty"` + // A list of possible final mobile URLs after all cross domain redirects. + FinalMobileUrls []*wrappers.StringValue `protobuf:"bytes,6,rep,name=final_mobile_urls,json=finalMobileUrls,proto3" json:"final_mobile_urls,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *PriceOffer) Reset() { *m = PriceOffer{} } +func (m *PriceOffer) String() string { return proto.CompactTextString(m) } +func (*PriceOffer) ProtoMessage() {} +func (*PriceOffer) Descriptor() ([]byte, []int) { + return fileDescriptor_extensions_29e88682b0f69f9b, []int{7} +} +func (m *PriceOffer) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_PriceOffer.Unmarshal(m, b) +} +func (m *PriceOffer) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_PriceOffer.Marshal(b, m, deterministic) +} +func (dst *PriceOffer) XXX_Merge(src proto.Message) { + xxx_messageInfo_PriceOffer.Merge(dst, src) +} +func (m *PriceOffer) XXX_Size() int { + return xxx_messageInfo_PriceOffer.Size(m) +} +func (m *PriceOffer) XXX_DiscardUnknown() { + xxx_messageInfo_PriceOffer.DiscardUnknown(m) +} + +var xxx_messageInfo_PriceOffer proto.InternalMessageInfo + +func (m *PriceOffer) GetHeader() *wrappers.StringValue { + if m != nil { + return m.Header + } + return nil +} + +func (m *PriceOffer) GetDescription() *wrappers.StringValue { + if m != nil { + return m.Description + } + return nil +} + +func (m *PriceOffer) GetPrice() *Money { + if m != nil { + return m.Price + } + return nil +} + +func (m *PriceOffer) GetUnit() enums.PriceExtensionPriceUnitEnum_PriceExtensionPriceUnit { + if m != nil { + return m.Unit + } + return enums.PriceExtensionPriceUnitEnum_UNSPECIFIED +} + +func (m *PriceOffer) GetFinalUrls() []*wrappers.StringValue { + if m != nil { + return m.FinalUrls + } + return nil +} + +func (m *PriceOffer) GetFinalMobileUrls() []*wrappers.StringValue { + if m != nil { + return m.FinalMobileUrls + } + return nil +} + +// Represents a Promotion extension. +type PromotionFeedItem struct { + // A freeform description of what the promotion is targeting. + // This field is required. + PromotionTarget *wrappers.StringValue `protobuf:"bytes,1,opt,name=promotion_target,json=promotionTarget,proto3" json:"promotion_target,omitempty"` + // Enum that modifies the qualification of the discount. + DiscountModifier enums.PromotionExtensionDiscountModifierEnum_PromotionExtensionDiscountModifier `protobuf:"varint,2,opt,name=discount_modifier,json=discountModifier,proto3,enum=google.ads.googleads.v1.enums.PromotionExtensionDiscountModifierEnum_PromotionExtensionDiscountModifier" json:"discount_modifier,omitempty"` + // Start date of when the promotion is eligible to be redeemed. + PromotionStartDate *wrappers.StringValue `protobuf:"bytes,7,opt,name=promotion_start_date,json=promotionStartDate,proto3" json:"promotion_start_date,omitempty"` + // End date of when the promotion is eligible to be redeemed. + PromotionEndDate *wrappers.StringValue `protobuf:"bytes,8,opt,name=promotion_end_date,json=promotionEndDate,proto3" json:"promotion_end_date,omitempty"` + // The occasion the promotion was intended for. + // If an occasion is set, the redemption window will need to fall within + // the date range associated with the occasion. + Occasion enums.PromotionExtensionOccasionEnum_PromotionExtensionOccasion `protobuf:"varint,9,opt,name=occasion,proto3,enum=google.ads.googleads.v1.enums.PromotionExtensionOccasionEnum_PromotionExtensionOccasion" json:"occasion,omitempty"` + // A list of possible final URLs after all cross domain redirects. + // This field is required. + FinalUrls []*wrappers.StringValue `protobuf:"bytes,10,rep,name=final_urls,json=finalUrls,proto3" json:"final_urls,omitempty"` + // A list of possible final mobile URLs after all cross domain redirects. + FinalMobileUrls []*wrappers.StringValue `protobuf:"bytes,11,rep,name=final_mobile_urls,json=finalMobileUrls,proto3" json:"final_mobile_urls,omitempty"` + // URL template for constructing a tracking URL. + TrackingUrlTemplate *wrappers.StringValue `protobuf:"bytes,12,opt,name=tracking_url_template,json=trackingUrlTemplate,proto3" json:"tracking_url_template,omitempty"` + // A list of mappings to be used for substituting URL custom parameter tags in + // the tracking_url_template, final_urls, and/or final_mobile_urls. + UrlCustomParameters []*CustomParameter `protobuf:"bytes,13,rep,name=url_custom_parameters,json=urlCustomParameters,proto3" json:"url_custom_parameters,omitempty"` + // URL template for appending params to landing page URLs served with parallel + // tracking. + FinalUrlSuffix *wrappers.StringValue `protobuf:"bytes,14,opt,name=final_url_suffix,json=finalUrlSuffix,proto3" json:"final_url_suffix,omitempty"` + // The language of the promotion. + // Represented as BCP 47 language tag. + LanguageCode *wrappers.StringValue `protobuf:"bytes,15,opt,name=language_code,json=languageCode,proto3" json:"language_code,omitempty"` + // Discount type, can be percentage off or amount off. + // + // Types that are valid to be assigned to DiscountType: + // *PromotionFeedItem_PercentOff + // *PromotionFeedItem_MoneyAmountOff + DiscountType isPromotionFeedItem_DiscountType `protobuf_oneof:"discount_type"` + // Promotion trigger. Can be by promotion code or promo by eligible order + // amount. + // + // Types that are valid to be assigned to PromotionTrigger: + // *PromotionFeedItem_PromotionCode + // *PromotionFeedItem_OrdersOverAmount + PromotionTrigger isPromotionFeedItem_PromotionTrigger `protobuf_oneof:"promotion_trigger"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *PromotionFeedItem) Reset() { *m = PromotionFeedItem{} } +func (m *PromotionFeedItem) String() string { return proto.CompactTextString(m) } +func (*PromotionFeedItem) ProtoMessage() {} +func (*PromotionFeedItem) Descriptor() ([]byte, []int) { + return fileDescriptor_extensions_29e88682b0f69f9b, []int{8} +} +func (m *PromotionFeedItem) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_PromotionFeedItem.Unmarshal(m, b) +} +func (m *PromotionFeedItem) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_PromotionFeedItem.Marshal(b, m, deterministic) +} +func (dst *PromotionFeedItem) XXX_Merge(src proto.Message) { + xxx_messageInfo_PromotionFeedItem.Merge(dst, src) +} +func (m *PromotionFeedItem) XXX_Size() int { + return xxx_messageInfo_PromotionFeedItem.Size(m) +} +func (m *PromotionFeedItem) XXX_DiscardUnknown() { + xxx_messageInfo_PromotionFeedItem.DiscardUnknown(m) +} + +var xxx_messageInfo_PromotionFeedItem proto.InternalMessageInfo + +func (m *PromotionFeedItem) GetPromotionTarget() *wrappers.StringValue { + if m != nil { + return m.PromotionTarget + } + return nil +} + +func (m *PromotionFeedItem) GetDiscountModifier() enums.PromotionExtensionDiscountModifierEnum_PromotionExtensionDiscountModifier { + if m != nil { + return m.DiscountModifier + } + return enums.PromotionExtensionDiscountModifierEnum_UNSPECIFIED +} + +func (m *PromotionFeedItem) GetPromotionStartDate() *wrappers.StringValue { + if m != nil { + return m.PromotionStartDate + } + return nil +} + +func (m *PromotionFeedItem) GetPromotionEndDate() *wrappers.StringValue { + if m != nil { + return m.PromotionEndDate + } + return nil +} + +func (m *PromotionFeedItem) GetOccasion() enums.PromotionExtensionOccasionEnum_PromotionExtensionOccasion { + if m != nil { + return m.Occasion + } + return enums.PromotionExtensionOccasionEnum_UNSPECIFIED +} + +func (m *PromotionFeedItem) GetFinalUrls() []*wrappers.StringValue { + if m != nil { + return m.FinalUrls + } + return nil +} + +func (m *PromotionFeedItem) GetFinalMobileUrls() []*wrappers.StringValue { + if m != nil { + return m.FinalMobileUrls + } + return nil +} + +func (m *PromotionFeedItem) GetTrackingUrlTemplate() *wrappers.StringValue { + if m != nil { + return m.TrackingUrlTemplate + } + return nil +} + +func (m *PromotionFeedItem) GetUrlCustomParameters() []*CustomParameter { + if m != nil { + return m.UrlCustomParameters + } + return nil +} + +func (m *PromotionFeedItem) GetFinalUrlSuffix() *wrappers.StringValue { + if m != nil { + return m.FinalUrlSuffix + } + return nil +} + +func (m *PromotionFeedItem) GetLanguageCode() *wrappers.StringValue { + if m != nil { + return m.LanguageCode + } + return nil +} + +type isPromotionFeedItem_DiscountType interface { + isPromotionFeedItem_DiscountType() +} + +type PromotionFeedItem_PercentOff struct { + PercentOff *wrappers.Int64Value `protobuf:"bytes,3,opt,name=percent_off,json=percentOff,proto3,oneof"` +} + +type PromotionFeedItem_MoneyAmountOff struct { + MoneyAmountOff *Money `protobuf:"bytes,4,opt,name=money_amount_off,json=moneyAmountOff,proto3,oneof"` +} + +func (*PromotionFeedItem_PercentOff) isPromotionFeedItem_DiscountType() {} + +func (*PromotionFeedItem_MoneyAmountOff) isPromotionFeedItem_DiscountType() {} + +func (m *PromotionFeedItem) GetDiscountType() isPromotionFeedItem_DiscountType { + if m != nil { + return m.DiscountType + } + return nil +} + +func (m *PromotionFeedItem) GetPercentOff() *wrappers.Int64Value { + if x, ok := m.GetDiscountType().(*PromotionFeedItem_PercentOff); ok { + return x.PercentOff + } + return nil +} + +func (m *PromotionFeedItem) GetMoneyAmountOff() *Money { + if x, ok := m.GetDiscountType().(*PromotionFeedItem_MoneyAmountOff); ok { + return x.MoneyAmountOff + } + return nil +} + +type isPromotionFeedItem_PromotionTrigger interface { + isPromotionFeedItem_PromotionTrigger() +} + +type PromotionFeedItem_PromotionCode struct { + PromotionCode *wrappers.StringValue `protobuf:"bytes,5,opt,name=promotion_code,json=promotionCode,proto3,oneof"` +} + +type PromotionFeedItem_OrdersOverAmount struct { + OrdersOverAmount *Money `protobuf:"bytes,6,opt,name=orders_over_amount,json=ordersOverAmount,proto3,oneof"` +} + +func (*PromotionFeedItem_PromotionCode) isPromotionFeedItem_PromotionTrigger() {} + +func (*PromotionFeedItem_OrdersOverAmount) isPromotionFeedItem_PromotionTrigger() {} + +func (m *PromotionFeedItem) GetPromotionTrigger() isPromotionFeedItem_PromotionTrigger { + if m != nil { + return m.PromotionTrigger + } + return nil +} + +func (m *PromotionFeedItem) GetPromotionCode() *wrappers.StringValue { + if x, ok := m.GetPromotionTrigger().(*PromotionFeedItem_PromotionCode); ok { + return x.PromotionCode + } + return nil +} + +func (m *PromotionFeedItem) GetOrdersOverAmount() *Money { + if x, ok := m.GetPromotionTrigger().(*PromotionFeedItem_OrdersOverAmount); ok { + return x.OrdersOverAmount + } + return nil +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*PromotionFeedItem) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _PromotionFeedItem_OneofMarshaler, _PromotionFeedItem_OneofUnmarshaler, _PromotionFeedItem_OneofSizer, []interface{}{ + (*PromotionFeedItem_PercentOff)(nil), + (*PromotionFeedItem_MoneyAmountOff)(nil), + (*PromotionFeedItem_PromotionCode)(nil), + (*PromotionFeedItem_OrdersOverAmount)(nil), + } +} + +func _PromotionFeedItem_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*PromotionFeedItem) + // discount_type + switch x := m.DiscountType.(type) { + case *PromotionFeedItem_PercentOff: + b.EncodeVarint(3<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.PercentOff); err != nil { + return err + } + case *PromotionFeedItem_MoneyAmountOff: + b.EncodeVarint(4<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.MoneyAmountOff); err != nil { + return err + } + case nil: + default: + return fmt.Errorf("PromotionFeedItem.DiscountType has unexpected type %T", x) + } + // promotion_trigger + switch x := m.PromotionTrigger.(type) { + case *PromotionFeedItem_PromotionCode: + b.EncodeVarint(5<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.PromotionCode); err != nil { + return err + } + case *PromotionFeedItem_OrdersOverAmount: + b.EncodeVarint(6<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.OrdersOverAmount); err != nil { + return err + } + case nil: + default: + return fmt.Errorf("PromotionFeedItem.PromotionTrigger has unexpected type %T", x) + } + return nil +} + +func _PromotionFeedItem_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*PromotionFeedItem) + switch tag { + case 3: // discount_type.percent_off + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(wrappers.Int64Value) + err := b.DecodeMessage(msg) + m.DiscountType = &PromotionFeedItem_PercentOff{msg} + return true, err + case 4: // discount_type.money_amount_off + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(Money) + err := b.DecodeMessage(msg) + m.DiscountType = &PromotionFeedItem_MoneyAmountOff{msg} + return true, err + case 5: // promotion_trigger.promotion_code + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(wrappers.StringValue) + err := b.DecodeMessage(msg) + m.PromotionTrigger = &PromotionFeedItem_PromotionCode{msg} + return true, err + case 6: // promotion_trigger.orders_over_amount + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(Money) + err := b.DecodeMessage(msg) + m.PromotionTrigger = &PromotionFeedItem_OrdersOverAmount{msg} + return true, err + default: + return false, nil + } +} + +func _PromotionFeedItem_OneofSizer(msg proto.Message) (n int) { + m := msg.(*PromotionFeedItem) + // discount_type + switch x := m.DiscountType.(type) { + case *PromotionFeedItem_PercentOff: + s := proto.Size(x.PercentOff) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *PromotionFeedItem_MoneyAmountOff: + s := proto.Size(x.MoneyAmountOff) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + // promotion_trigger + switch x := m.PromotionTrigger.(type) { + case *PromotionFeedItem_PromotionCode: + s := proto.Size(x.PromotionCode) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *PromotionFeedItem_OrdersOverAmount: + s := proto.Size(x.OrdersOverAmount) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +// Represents a structured snippet extension. +type StructuredSnippetFeedItem struct { + // The header of the snippet. + // This string must not be empty. + Header *wrappers.StringValue `protobuf:"bytes,1,opt,name=header,proto3" json:"header,omitempty"` + // The values in the snippet. + // The maximum size of this collection is 10. + Values []*wrappers.StringValue `protobuf:"bytes,2,rep,name=values,proto3" json:"values,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *StructuredSnippetFeedItem) Reset() { *m = StructuredSnippetFeedItem{} } +func (m *StructuredSnippetFeedItem) String() string { return proto.CompactTextString(m) } +func (*StructuredSnippetFeedItem) ProtoMessage() {} +func (*StructuredSnippetFeedItem) Descriptor() ([]byte, []int) { + return fileDescriptor_extensions_29e88682b0f69f9b, []int{9} +} +func (m *StructuredSnippetFeedItem) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_StructuredSnippetFeedItem.Unmarshal(m, b) +} +func (m *StructuredSnippetFeedItem) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_StructuredSnippetFeedItem.Marshal(b, m, deterministic) +} +func (dst *StructuredSnippetFeedItem) XXX_Merge(src proto.Message) { + xxx_messageInfo_StructuredSnippetFeedItem.Merge(dst, src) +} +func (m *StructuredSnippetFeedItem) XXX_Size() int { + return xxx_messageInfo_StructuredSnippetFeedItem.Size(m) +} +func (m *StructuredSnippetFeedItem) XXX_DiscardUnknown() { + xxx_messageInfo_StructuredSnippetFeedItem.DiscardUnknown(m) +} + +var xxx_messageInfo_StructuredSnippetFeedItem proto.InternalMessageInfo + +func (m *StructuredSnippetFeedItem) GetHeader() *wrappers.StringValue { + if m != nil { + return m.Header + } + return nil +} + +func (m *StructuredSnippetFeedItem) GetValues() []*wrappers.StringValue { + if m != nil { + return m.Values + } + return nil +} + +// Represents a sitelink extension. +type SitelinkFeedItem struct { + // URL display text for the sitelink. + // The length of this string should be between 1 and 25, inclusive. + LinkText *wrappers.StringValue `protobuf:"bytes,1,opt,name=link_text,json=linkText,proto3" json:"link_text,omitempty"` + // First line of the description for the sitelink. + // If this value is set, line2 must also be set. + // The length of this string should be between 0 and 35, inclusive. + Line1 *wrappers.StringValue `protobuf:"bytes,2,opt,name=line1,proto3" json:"line1,omitempty"` + // Second line of the description for the sitelink. + // If this value is set, line1 must also be set. + // The length of this string should be between 0 and 35, inclusive. + Line2 *wrappers.StringValue `protobuf:"bytes,3,opt,name=line2,proto3" json:"line2,omitempty"` + // A list of possible final URLs after all cross domain redirects. + FinalUrls []*wrappers.StringValue `protobuf:"bytes,4,rep,name=final_urls,json=finalUrls,proto3" json:"final_urls,omitempty"` + // A list of possible final mobile URLs after all cross domain redirects. + FinalMobileUrls []*wrappers.StringValue `protobuf:"bytes,5,rep,name=final_mobile_urls,json=finalMobileUrls,proto3" json:"final_mobile_urls,omitempty"` + // URL template for constructing a tracking URL. + TrackingUrlTemplate *wrappers.StringValue `protobuf:"bytes,6,opt,name=tracking_url_template,json=trackingUrlTemplate,proto3" json:"tracking_url_template,omitempty"` + // A list of mappings to be used for substituting URL custom parameter tags in + // the tracking_url_template, final_urls, and/or final_mobile_urls. + UrlCustomParameters []*CustomParameter `protobuf:"bytes,7,rep,name=url_custom_parameters,json=urlCustomParameters,proto3" json:"url_custom_parameters,omitempty"` + // Final URL suffix to be appended to landing page URLs served with + // parallel tracking. + FinalUrlSuffix *wrappers.StringValue `protobuf:"bytes,8,opt,name=final_url_suffix,json=finalUrlSuffix,proto3" json:"final_url_suffix,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *SitelinkFeedItem) Reset() { *m = SitelinkFeedItem{} } +func (m *SitelinkFeedItem) String() string { return proto.CompactTextString(m) } +func (*SitelinkFeedItem) ProtoMessage() {} +func (*SitelinkFeedItem) Descriptor() ([]byte, []int) { + return fileDescriptor_extensions_29e88682b0f69f9b, []int{10} +} +func (m *SitelinkFeedItem) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_SitelinkFeedItem.Unmarshal(m, b) +} +func (m *SitelinkFeedItem) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_SitelinkFeedItem.Marshal(b, m, deterministic) +} +func (dst *SitelinkFeedItem) XXX_Merge(src proto.Message) { + xxx_messageInfo_SitelinkFeedItem.Merge(dst, src) +} +func (m *SitelinkFeedItem) XXX_Size() int { + return xxx_messageInfo_SitelinkFeedItem.Size(m) +} +func (m *SitelinkFeedItem) XXX_DiscardUnknown() { + xxx_messageInfo_SitelinkFeedItem.DiscardUnknown(m) +} + +var xxx_messageInfo_SitelinkFeedItem proto.InternalMessageInfo + +func (m *SitelinkFeedItem) GetLinkText() *wrappers.StringValue { + if m != nil { + return m.LinkText + } + return nil +} + +func (m *SitelinkFeedItem) GetLine1() *wrappers.StringValue { + if m != nil { + return m.Line1 + } + return nil +} + +func (m *SitelinkFeedItem) GetLine2() *wrappers.StringValue { + if m != nil { + return m.Line2 + } + return nil +} + +func (m *SitelinkFeedItem) GetFinalUrls() []*wrappers.StringValue { + if m != nil { + return m.FinalUrls + } + return nil +} + +func (m *SitelinkFeedItem) GetFinalMobileUrls() []*wrappers.StringValue { + if m != nil { + return m.FinalMobileUrls + } + return nil +} + +func (m *SitelinkFeedItem) GetTrackingUrlTemplate() *wrappers.StringValue { + if m != nil { + return m.TrackingUrlTemplate + } + return nil +} + +func (m *SitelinkFeedItem) GetUrlCustomParameters() []*CustomParameter { + if m != nil { + return m.UrlCustomParameters + } + return nil +} + +func (m *SitelinkFeedItem) GetFinalUrlSuffix() *wrappers.StringValue { + if m != nil { + return m.FinalUrlSuffix + } + return nil +} + +func init() { + proto.RegisterType((*AppFeedItem)(nil), "google.ads.googleads.v1.common.AppFeedItem") + proto.RegisterType((*CallFeedItem)(nil), "google.ads.googleads.v1.common.CallFeedItem") + proto.RegisterType((*CalloutFeedItem)(nil), "google.ads.googleads.v1.common.CalloutFeedItem") + proto.RegisterType((*LocationFeedItem)(nil), "google.ads.googleads.v1.common.LocationFeedItem") + proto.RegisterType((*AffiliateLocationFeedItem)(nil), "google.ads.googleads.v1.common.AffiliateLocationFeedItem") + proto.RegisterType((*TextMessageFeedItem)(nil), "google.ads.googleads.v1.common.TextMessageFeedItem") + proto.RegisterType((*PriceFeedItem)(nil), "google.ads.googleads.v1.common.PriceFeedItem") + proto.RegisterType((*PriceOffer)(nil), "google.ads.googleads.v1.common.PriceOffer") + proto.RegisterType((*PromotionFeedItem)(nil), "google.ads.googleads.v1.common.PromotionFeedItem") + proto.RegisterType((*StructuredSnippetFeedItem)(nil), "google.ads.googleads.v1.common.StructuredSnippetFeedItem") + proto.RegisterType((*SitelinkFeedItem)(nil), "google.ads.googleads.v1.common.SitelinkFeedItem") +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/common/extensions.proto", fileDescriptor_extensions_29e88682b0f69f9b) +} + +var fileDescriptor_extensions_29e88682b0f69f9b = []byte{ + // 1575 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xec, 0x59, 0xdf, 0x4e, 0x1b, 0xc7, + 0x1a, 0x8f, 0xc1, 0x10, 0xf8, 0x0c, 0x06, 0x96, 0xe4, 0xc8, 0xe1, 0x44, 0x39, 0x39, 0x96, 0x8e, + 0x14, 0x1d, 0xa9, 0x76, 0x70, 0x68, 0x94, 0x3a, 0x6a, 0x5a, 0x63, 0x48, 0xa0, 0x0a, 0x7f, 0xb2, + 0x06, 0x54, 0x55, 0x48, 0xab, 0x61, 0x77, 0x6c, 0x46, 0xd9, 0x9d, 0xd9, 0xce, 0xcc, 0x52, 0x78, + 0x80, 0xaa, 0x55, 0xef, 0xfb, 0x02, 0xad, 0xd4, 0x4a, 0xbd, 0xe8, 0x0b, 0xf4, 0xaa, 0xb7, 0xbd, + 0xeb, 0x4d, 0x1f, 0x22, 0x4f, 0x51, 0xcd, 0xec, 0xec, 0x1a, 0x4c, 0x60, 0xd7, 0x94, 0x4a, 0xbd, + 0xc8, 0x15, 0xbb, 0xeb, 0xef, 0xf7, 0x9b, 0x99, 0x6f, 0x7f, 0xdf, 0xbf, 0x05, 0xea, 0x3d, 0xc6, + 0x7a, 0x3e, 0xae, 0x23, 0x4f, 0x98, 0x4b, 0x75, 0x75, 0xb4, 0x58, 0x77, 0x59, 0x10, 0x30, 0x5a, + 0xc7, 0xc7, 0x12, 0x53, 0x41, 0x18, 0x15, 0xb5, 0x90, 0x33, 0xc9, 0xac, 0x7b, 0xb1, 0x55, 0x0d, + 0x79, 0xa2, 0x96, 0x02, 0x6a, 0x47, 0x8b, 0xb5, 0x18, 0xb0, 0xf0, 0x7e, 0x06, 0xa1, 0x1b, 0x09, + 0xc9, 0x02, 0x27, 0x44, 0x1c, 0x05, 0x58, 0x62, 0x1e, 0xd3, 0x2e, 0x3c, 0xcc, 0x80, 0x75, 0x31, + 0xf6, 0x9c, 0xf8, 0xda, 0x20, 0xde, 0xbb, 0x08, 0x81, 0x69, 0x14, 0x88, 0x3a, 0x0a, 0x43, 0x47, + 0x48, 0xc6, 0xb1, 0x31, 0x6f, 0x5f, 0x6e, 0xee, 0x22, 0xdf, 0x77, 0x5c, 0x46, 0x8f, 0x30, 0x57, + 0xa7, 0x75, 0x38, 0x0e, 0x19, 0x97, 0x84, 0xf6, 0x1c, 0x21, 0x91, 0xcc, 0x49, 0x12, 0x72, 0xe2, + 0x62, 0x27, 0x75, 0x99, 0x13, 0xdf, 0x7f, 0x1e, 0x21, 0x9f, 0x74, 0x49, 0x7a, 0xd4, 0x67, 0x57, + 0x21, 0x89, 0x28, 0x91, 0x06, 0xff, 0x64, 0x38, 0xbc, 0x3c, 0x09, 0x93, 0xed, 0xaf, 0x67, 0x21, + 0x59, 0xc0, 0xa4, 0xc2, 0xf4, 0xd1, 0x1e, 0x11, 0x2e, 0x8b, 0xa8, 0x74, 0x02, 0xe6, 0x9d, 0x3e, + 0xc4, 0xc7, 0xc3, 0x53, 0x31, 0xd7, 0x45, 0xea, 0xc2, 0x30, 0x18, 0x21, 0xd5, 0xf5, 0xdd, 0x41, + 0xd4, 0xad, 0x7f, 0xc1, 0x51, 0x18, 0x62, 0x6e, 0x84, 0xb6, 0x70, 0x37, 0x59, 0x21, 0x24, 0x75, + 0x44, 0x29, 0x93, 0x48, 0xf6, 0x65, 0x58, 0xfd, 0xa3, 0x08, 0xa5, 0x56, 0x18, 0x3e, 0xc7, 0xd8, + 0x5b, 0x97, 0x38, 0xb0, 0x3e, 0x80, 0x49, 0x9f, 0xd0, 0xd7, 0x8e, 0xc4, 0xc7, 0xb2, 0x52, 0xb8, + 0x5f, 0x78, 0x50, 0x6a, 0xdc, 0x35, 0xfa, 0xac, 0x25, 0x2b, 0xd4, 0x3a, 0x92, 0x13, 0xda, 0xdb, + 0x43, 0x7e, 0x84, 0xed, 0x09, 0x65, 0xbe, 0x83, 0x8f, 0xa5, 0xf5, 0x08, 0xc6, 0x95, 0x58, 0x88, + 0x57, 0x19, 0xc9, 0x81, 0x1b, 0x43, 0x61, 0xb8, 0xee, 0x59, 0xaf, 0x60, 0x32, 0x55, 0x58, 0x65, + 0xf4, 0x7e, 0xe1, 0x41, 0xb9, 0xb1, 0x54, 0xbb, 0x28, 0x34, 0xb4, 0x4f, 0x6a, 0xad, 0x30, 0xec, + 0x28, 0xf3, 0x55, 0x1a, 0x05, 0xe9, 0x8d, 0x3d, 0x81, 0xcc, 0x95, 0xf5, 0x14, 0xa0, 0x4b, 0x28, + 0xf2, 0x9d, 0x88, 0xfb, 0xa2, 0x52, 0xbc, 0x3f, 0x9a, 0xb9, 0x97, 0x49, 0x6d, 0xbf, 0xcb, 0x7d, + 0x61, 0xad, 0xc1, 0x5c, 0x0c, 0x0e, 0xd8, 0x01, 0xf1, 0x71, 0xcc, 0x31, 0x96, 0x83, 0x63, 0x46, + 0xc3, 0x36, 0x34, 0x4a, 0x33, 0x6d, 0xc3, 0x6d, 0xc9, 0x91, 0xfb, 0x5a, 0x69, 0x3f, 0xe2, 0xbe, + 0x23, 0x71, 0x10, 0xfa, 0x48, 0xe2, 0xca, 0x78, 0x0e, 0xef, 0xcc, 0x27, 0xd0, 0x5d, 0xee, 0xef, + 0x18, 0xa0, 0xe5, 0xc2, 0x6d, 0x45, 0x34, 0x18, 0xf9, 0xa2, 0x72, 0x53, 0xef, 0xaf, 0x5e, 0xbb, + 0x3c, 0xa5, 0xd4, 0xda, 0x1a, 0xb8, 0x9d, 0xe0, 0xec, 0xf9, 0x88, 0xfb, 0x03, 0xcf, 0x84, 0xf5, + 0x1c, 0x66, 0x53, 0xef, 0x39, 0x22, 0xea, 0x76, 0xc9, 0x71, 0x65, 0x22, 0xc7, 0x8e, 0xcb, 0x89, + 0x0f, 0x3b, 0x1a, 0x53, 0xfd, 0xb5, 0x08, 0x53, 0x6d, 0xe4, 0xfb, 0xa9, 0xb2, 0x3e, 0x82, 0xa9, + 0xf0, 0x90, 0x51, 0xec, 0xd0, 0x28, 0x38, 0xc0, 0x3c, 0x97, 0xb8, 0x4a, 0x1a, 0xb1, 0xa9, 0x01, + 0x8a, 0x40, 0x87, 0x10, 0x3f, 0x71, 0x5c, 0xe6, 0xe1, 0x5c, 0x2a, 0x2b, 0x19, 0x44, 0x9b, 0x79, + 0xd8, 0xda, 0x84, 0xdb, 0x3a, 0x3d, 0xa5, 0xaf, 0x05, 0x53, 0x74, 0xe0, 0x63, 0x4f, 0xeb, 0xae, + 0xd4, 0x58, 0x38, 0xc7, 0xb4, 0xcc, 0x98, 0x6f, 0xde, 0x87, 0x02, 0xee, 0x18, 0xdc, 0x6a, 0x0c, + 0xb3, 0x6c, 0xf8, 0xd7, 0x60, 0xba, 0x43, 0xae, 0x0a, 0xae, 0x4a, 0x31, 0xc7, 0xd6, 0x6e, 0x29, + 0x6c, 0x3b, 0x85, 0xb6, 0x34, 0xd2, 0xc2, 0xf0, 0xdf, 0x41, 0xce, 0x74, 0xbb, 0x1e, 0x11, 0xf1, + 0x7e, 0xc7, 0x32, 0xf7, 0x7b, 0xef, 0x2c, 0x79, 0xb2, 0xf3, 0x15, 0xc3, 0x60, 0x7d, 0x5f, 0x80, + 0xff, 0x64, 0xa4, 0x6a, 0xad, 0xd3, 0x72, 0x63, 0x3f, 0x23, 0x1a, 0xdb, 0x67, 0x16, 0xb2, 0x13, + 0x8e, 0x8e, 0xa2, 0xd0, 0x11, 0x7a, 0x99, 0x81, 0x7d, 0xd7, 0xbd, 0xe4, 0xd7, 0xaa, 0x0d, 0x33, + 0x0a, 0xcd, 0x22, 0x79, 0x5a, 0x45, 0x6e, 0xfc, 0x28, 0x7f, 0x8a, 0x2a, 0x19, 0x84, 0xca, 0x52, + 0xd5, 0x6f, 0x8a, 0x30, 0xfb, 0x92, 0xb9, 0x3a, 0x09, 0xa6, 0xac, 0x2d, 0x98, 0x3e, 0x88, 0x04, + 0xa1, 0x58, 0x08, 0x87, 0xa2, 0x00, 0xe7, 0xa2, 0x9d, 0x4a, 0x20, 0x9b, 0x28, 0xc0, 0xd6, 0x32, + 0x94, 0x91, 0xe7, 0x71, 0xc5, 0xe0, 0x13, 0x8a, 0x9d, 0xc5, 0x5c, 0xfa, 0x9c, 0x32, 0x98, 0x97, + 0x84, 0xe2, 0xc5, 0x73, 0x1c, 0x0d, 0xa3, 0xcc, 0xfc, 0x1c, 0x0d, 0xeb, 0x21, 0x14, 0x5d, 0x22, + 0x4f, 0x72, 0x49, 0x50, 0x5b, 0x5a, 0x4f, 0x60, 0x22, 0xe4, 0xec, 0x88, 0x50, 0x17, 0x1b, 0x65, + 0x65, 0x64, 0xfc, 0xc4, 0xda, 0xfa, 0x10, 0x4a, 0x21, 0x13, 0x12, 0xf9, 0x71, 0x40, 0xe6, 0x49, + 0x6c, 0x10, 0x03, 0x74, 0x3c, 0x0e, 0x06, 0xf4, 0xcd, 0x61, 0x03, 0x7a, 0x30, 0xa5, 0x4c, 0x0c, + 0x99, 0x52, 0xaa, 0x5f, 0x8f, 0xc1, 0x9d, 0x56, 0xb7, 0x4b, 0x7c, 0x82, 0x24, 0x7e, 0xa7, 0x8a, + 0x77, 0xaa, 0xd0, 0x85, 0xe6, 0x31, 0x4c, 0xb8, 0x87, 0x88, 0x50, 0xd5, 0xca, 0x4c, 0x6a, 0xf0, + 0xbf, 0xcf, 0x81, 0xd7, 0xa9, 0x7c, 0xbc, 0x14, 0x63, 0x6f, 0x6a, 0xe3, 0x75, 0x4f, 0x35, 0x1e, + 0x31, 0x4e, 0x8b, 0x05, 0x72, 0x2c, 0x3b, 0xa9, 0xed, 0x95, 0x52, 0xaa, 0xbf, 0x8f, 0xc0, 0xbc, + 0x4a, 0x50, 0x1b, 0x58, 0x08, 0xd4, 0xc3, 0xd7, 0x29, 0xc2, 0xbf, 0x5c, 0x38, 0x07, 0x3d, 0x3a, + 0x3a, 0xac, 0x47, 0x1f, 0x42, 0x51, 0x67, 0xeb, 0x5c, 0xf2, 0x53, 0x96, 0x56, 0x1b, 0xca, 0xa7, + 0x5a, 0x6f, 0x85, 0xcd, 0x23, 0xc2, 0xe9, 0x14, 0xa3, 0x73, 0xfd, 0x8f, 0x45, 0x98, 0xde, 0x56, + 0x6d, 0x7c, 0xea, 0xcd, 0x7d, 0x28, 0xaa, 0x3e, 0x5e, 0x3b, 0xb1, 0xdc, 0x58, 0xcb, 0xa8, 0x6d, + 0x1a, 0xbb, 0x9a, 0x52, 0x9e, 0x84, 0x71, 0x45, 0x3b, 0xff, 0xd8, 0xd6, 0xac, 0xd6, 0x97, 0x05, + 0x98, 0x19, 0x98, 0x55, 0xb4, 0xb3, 0xb3, 0xab, 0xe8, 0x59, 0x4a, 0x7d, 0xf7, 0x2a, 0xa1, 0x78, + 0xcb, 0x9a, 0x67, 0x0d, 0xec, 0x72, 0x78, 0xe6, 0xfe, 0xe2, 0xd6, 0x73, 0xf4, 0xaa, 0xad, 0x67, + 0x0b, 0xa6, 0x7d, 0x44, 0x7b, 0x11, 0xea, 0xe1, 0x58, 0x43, 0x79, 0xde, 0xe4, 0x54, 0x02, 0xd1, + 0x22, 0xea, 0x24, 0xbe, 0x61, 0xdd, 0x2e, 0x56, 0x46, 0x49, 0x5f, 0xfd, 0xff, 0xac, 0xbe, 0x55, + 0x9f, 0x76, 0x4b, 0xa1, 0xcc, 0x49, 0xb7, 0x12, 0x86, 0xb7, 0x76, 0xab, 0xe3, 0x57, 0xe8, 0x56, + 0x7f, 0x1e, 0x05, 0xe8, 0x2f, 0x63, 0x2d, 0xc1, 0xf8, 0x21, 0x46, 0x5e, 0xce, 0x2e, 0xd5, 0xd8, + 0x5a, 0xcf, 0xa0, 0xe4, 0x61, 0xe1, 0x72, 0x12, 0xea, 0x26, 0x30, 0x57, 0x98, 0x9d, 0x02, 0x58, + 0x4f, 0x61, 0x4c, 0x1f, 0xcf, 0xbc, 0xa6, 0xff, 0x65, 0xf9, 0x65, 0x83, 0x51, 0x7c, 0x62, 0xc7, + 0x18, 0xab, 0x0b, 0x45, 0x35, 0xdb, 0xea, 0x17, 0x53, 0x6e, 0xd8, 0xc3, 0xeb, 0x6d, 0x97, 0x12, + 0x79, 0x91, 0xd4, 0xd4, 0x6f, 0xb6, 0xe6, 0x1f, 0x98, 0xae, 0xc6, 0xae, 0x61, 0xba, 0x1a, 0xbf, + 0xc2, 0x74, 0x55, 0xfd, 0x01, 0x60, 0x6e, 0x3b, 0x19, 0x8e, 0xd3, 0xf0, 0x7e, 0x01, 0xb3, 0xfd, + 0x89, 0x59, 0x22, 0xde, 0xc3, 0xf9, 0x3a, 0xc4, 0x99, 0x14, 0xb5, 0xa3, 0x41, 0xd6, 0xb7, 0x05, + 0x98, 0x3b, 0x37, 0xb2, 0x9b, 0x58, 0x3e, 0xcc, 0xf4, 0xad, 0xe1, 0x4a, 0x7d, 0xb8, 0x62, 0x98, + 0x36, 0x0c, 0x91, 0x71, 0x73, 0x96, 0x99, 0x3d, 0xeb, 0x0d, 0x3c, 0xb1, 0x36, 0xe1, 0x56, 0xff, + 0x80, 0x42, 0x22, 0x2e, 0x1d, 0x4f, 0x05, 0x76, 0x9e, 0x22, 0x69, 0xa5, 0xc8, 0x8e, 0x02, 0xae, + 0xa8, 0xb8, 0xfe, 0x04, 0xac, 0x53, 0x9f, 0x18, 0xa8, 0x17, 0xb3, 0xe5, 0xa9, 0x98, 0x7d, 0x47, + 0xaf, 0x52, 0x4f, 0x73, 0x49, 0x98, 0x48, 0x3e, 0x4d, 0xe8, 0xb2, 0x59, 0x6e, 0x7c, 0x3a, 0xb4, + 0xa7, 0xb6, 0x0c, 0xc1, 0x05, 0x1e, 0x4a, 0x7e, 0xb6, 0xd3, 0x95, 0x06, 0xf4, 0x08, 0xd7, 0xa0, + 0xc7, 0xd2, 0xb5, 0x4e, 0xfb, 0x53, 0xd7, 0x3e, 0xed, 0x4f, 0xff, 0xcd, 0xd3, 0x7e, 0x79, 0xf8, + 0xfc, 0x79, 0xbe, 0x3e, 0xcc, 0x0c, 0x5d, 0x1f, 0x9e, 0x41, 0x29, 0xc4, 0xdc, 0xc5, 0x54, 0xaa, + 0x0a, 0x61, 0x72, 0xe0, 0x65, 0x8d, 0xd7, 0xda, 0x0d, 0x1b, 0x0c, 0x62, 0xab, 0xdb, 0xb5, 0x5e, + 0xc1, 0x6c, 0xa0, 0x12, 0xa2, 0x83, 0x02, 0x1d, 0xb5, 0x8a, 0xa4, 0x38, 0x44, 0x22, 0x5d, 0xbb, + 0x61, 0x97, 0x35, 0x41, 0x4b, 0xe3, 0x15, 0xe5, 0x2a, 0x94, 0xfb, 0xd1, 0xa1, 0x8f, 0x95, 0xa3, + 0x09, 0x59, 0x2b, 0xd8, 0xd3, 0x29, 0x4a, 0x9f, 0x6c, 0x17, 0x2c, 0xc6, 0x3d, 0xcc, 0x85, 0xc3, + 0x8e, 0x30, 0x37, 0xfb, 0x33, 0x65, 0x2a, 0xe7, 0xde, 0x0a, 0xf6, 0x6c, 0x4c, 0xb1, 0x75, 0x84, + 0x79, 0xbc, 0xc1, 0xe5, 0x19, 0x98, 0x4e, 0x53, 0x94, 0x6a, 0x3f, 0x96, 0xe7, 0x61, 0xee, 0x54, + 0xf6, 0xe3, 0xa4, 0xd7, 0xc3, 0xbc, 0xfa, 0x55, 0x01, 0xee, 0x74, 0x24, 0x8f, 0x5c, 0x19, 0x71, + 0xec, 0x75, 0x28, 0x09, 0x43, 0xdc, 0x1f, 0xa7, 0xaf, 0x56, 0xe8, 0x96, 0x60, 0xfc, 0x48, 0x3d, + 0x10, 0x95, 0x91, 0x1c, 0xb1, 0x62, 0x6c, 0xab, 0xbf, 0x14, 0x61, 0xb6, 0x43, 0x24, 0xf6, 0x09, + 0x7d, 0x7d, 0x1d, 0xdf, 0x1b, 0x1b, 0x30, 0xa6, 0xe6, 0xa1, 0x7c, 0x23, 0x55, 0x6c, 0x9a, 0x60, + 0xf2, 0x8d, 0x50, 0xb1, 0xe9, 0xbb, 0xef, 0x89, 0xff, 0x84, 0xef, 0x89, 0xcb, 0x6f, 0x0a, 0x50, + 0x75, 0x59, 0x90, 0xb1, 0xa7, 0xe5, 0x99, 0xb4, 0x56, 0x88, 0x6d, 0x45, 0xbb, 0x5d, 0xf8, 0x6c, + 0xc5, 0x40, 0x7a, 0x4c, 0xe5, 0x9b, 0x1a, 0xe3, 0xbd, 0x7a, 0x0f, 0x53, 0xbd, 0x68, 0xf2, 0xc9, + 0x3d, 0x24, 0xe2, 0xa2, 0xff, 0x98, 0x3c, 0x8d, 0xff, 0x7c, 0x37, 0x32, 0xfa, 0xa2, 0xd5, 0xfa, + 0x69, 0xe4, 0xde, 0x8b, 0x98, 0xac, 0xe5, 0x89, 0x5a, 0x7c, 0xa9, 0xae, 0xf6, 0x16, 0x6b, 0x6d, + 0x6d, 0xf6, 0x5b, 0x62, 0xb0, 0xdf, 0xf2, 0xc4, 0x7e, 0x6a, 0xb0, 0xbf, 0xb7, 0xb8, 0x1f, 0x1b, + 0xbc, 0x19, 0xa9, 0xc6, 0x4f, 0x9b, 0xcd, 0x96, 0x27, 0x9a, 0xcd, 0xd4, 0xa4, 0xd9, 0xdc, 0x5b, + 0x6c, 0x36, 0x63, 0xa3, 0x83, 0x71, 0xbd, 0xbb, 0x47, 0x7f, 0x06, 0x00, 0x00, 0xff, 0xff, 0xac, + 0x90, 0x60, 0xa0, 0x56, 0x1a, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/common/feed_common.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/common/feed_common.pb.go new file mode 100644 index 0000000..aac75b7 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/common/feed_common.pb.go @@ -0,0 +1,103 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/common/feed_common.proto + +package common // import "google.golang.org/genproto/googleapis/ads/googleads/v1/common" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import wrappers "github.com/golang/protobuf/ptypes/wrappers" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Represents a price in a particular currency. +type Money struct { + // Three-character ISO 4217 currency code. + CurrencyCode *wrappers.StringValue `protobuf:"bytes,1,opt,name=currency_code,json=currencyCode,proto3" json:"currency_code,omitempty"` + // Amount in micros. One million is equivalent to one unit. + AmountMicros *wrappers.Int64Value `protobuf:"bytes,2,opt,name=amount_micros,json=amountMicros,proto3" json:"amount_micros,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Money) Reset() { *m = Money{} } +func (m *Money) String() string { return proto.CompactTextString(m) } +func (*Money) ProtoMessage() {} +func (*Money) Descriptor() ([]byte, []int) { + return fileDescriptor_feed_common_f503f359313a2ced, []int{0} +} +func (m *Money) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Money.Unmarshal(m, b) +} +func (m *Money) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Money.Marshal(b, m, deterministic) +} +func (dst *Money) XXX_Merge(src proto.Message) { + xxx_messageInfo_Money.Merge(dst, src) +} +func (m *Money) XXX_Size() int { + return xxx_messageInfo_Money.Size(m) +} +func (m *Money) XXX_DiscardUnknown() { + xxx_messageInfo_Money.DiscardUnknown(m) +} + +var xxx_messageInfo_Money proto.InternalMessageInfo + +func (m *Money) GetCurrencyCode() *wrappers.StringValue { + if m != nil { + return m.CurrencyCode + } + return nil +} + +func (m *Money) GetAmountMicros() *wrappers.Int64Value { + if m != nil { + return m.AmountMicros + } + return nil +} + +func init() { + proto.RegisterType((*Money)(nil), "google.ads.googleads.v1.common.Money") +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/common/feed_common.proto", fileDescriptor_feed_common_f503f359313a2ced) +} + +var fileDescriptor_feed_common_f503f359313a2ced = []byte{ + // 328 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x7c, 0x90, 0xc1, 0x4a, 0xf3, 0x40, + 0x14, 0x85, 0x49, 0x7e, 0x7e, 0x17, 0xd1, 0x22, 0x74, 0x55, 0x6a, 0x29, 0xd2, 0x95, 0xab, 0x19, + 0xa3, 0xe2, 0x62, 0xdc, 0x98, 0x56, 0x2c, 0x2e, 0x0a, 0x45, 0x21, 0x0b, 0x09, 0x94, 0x69, 0xe6, + 0x76, 0x08, 0x34, 0x73, 0xc3, 0x4c, 0x52, 0xe9, 0x3b, 0xf8, 0x14, 0x2e, 0x7d, 0x14, 0x1f, 0xa5, + 0x4f, 0x21, 0xcd, 0x4d, 0xba, 0x11, 0x5d, 0xcd, 0x61, 0xee, 0x77, 0xce, 0x99, 0xb9, 0xc1, 0xa5, + 0x46, 0xd4, 0x6b, 0xe0, 0x52, 0x39, 0x4e, 0x72, 0xaf, 0x36, 0x21, 0x4f, 0x31, 0xcf, 0xd1, 0xf0, + 0x15, 0x80, 0x5a, 0x90, 0x66, 0x85, 0xc5, 0x12, 0xbb, 0x43, 0xc2, 0x98, 0x54, 0x8e, 0x1d, 0x1c, + 0x6c, 0x13, 0x32, 0xa2, 0xfa, 0xcd, 0x9c, 0xd7, 0xf4, 0xb2, 0x5a, 0xf1, 0x37, 0x2b, 0x8b, 0x02, + 0xac, 0x23, 0x7f, 0x7f, 0xd0, 0x36, 0x16, 0x19, 0x97, 0xc6, 0x60, 0x29, 0xcb, 0x0c, 0x4d, 0x33, + 0x1d, 0xbd, 0x7b, 0xc1, 0xff, 0x19, 0x1a, 0xd8, 0x76, 0xa3, 0xa0, 0x93, 0x56, 0xd6, 0x82, 0x49, + 0xb7, 0x8b, 0x14, 0x15, 0xf4, 0xbc, 0x73, 0xef, 0xe2, 0xf8, 0x6a, 0xd0, 0x94, 0xb2, 0x36, 0x9f, + 0xbd, 0x94, 0x36, 0x33, 0x3a, 0x96, 0xeb, 0x0a, 0x9e, 0x4f, 0x5a, 0xcb, 0x04, 0x15, 0x74, 0xef, + 0x83, 0x8e, 0xcc, 0xb1, 0x32, 0xe5, 0x22, 0xcf, 0x52, 0x8b, 0xae, 0xe7, 0xd7, 0x11, 0x67, 0x3f, + 0x22, 0x9e, 0x4c, 0x79, 0x7b, 0xd3, 0x24, 0x90, 0x63, 0x56, 0x1b, 0xc6, 0x3b, 0x2f, 0x18, 0xa5, + 0x98, 0xb3, 0xbf, 0xff, 0x3c, 0x3e, 0x7d, 0x04, 0x50, 0x93, 0x5a, 0xcf, 0xf7, 0x99, 0x73, 0xef, + 0xf5, 0xa1, 0xb1, 0x68, 0x5c, 0x4b, 0xa3, 0x19, 0x5a, 0xcd, 0x35, 0x98, 0xba, 0xb1, 0x5d, 0x74, + 0x91, 0xb9, 0xdf, 0xf6, 0x7e, 0x47, 0xc7, 0x87, 0xff, 0x6f, 0x1a, 0x45, 0x9f, 0xfe, 0x70, 0x4a, + 0x61, 0x91, 0x72, 0x8c, 0xe4, 0x5e, 0xc5, 0x21, 0xa3, 0xce, 0xaf, 0x16, 0x48, 0x22, 0xe5, 0x92, + 0x03, 0x90, 0xc4, 0x61, 0x42, 0xc0, 0xce, 0x1f, 0xd1, 0xad, 0x10, 0x91, 0x72, 0x42, 0x1c, 0x10, + 0x21, 0xe2, 0x50, 0x08, 0x82, 0x96, 0x47, 0xf5, 0xeb, 0xae, 0xbf, 0x03, 0x00, 0x00, 0xff, 0xff, + 0x4e, 0xc6, 0xd0, 0x04, 0x14, 0x02, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/common/final_app_url.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/common/final_app_url.pb.go new file mode 100644 index 0000000..a34b7ee --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/common/final_app_url.pb.go @@ -0,0 +1,112 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/common/final_app_url.proto + +package common // import "google.golang.org/genproto/googleapis/ads/googleads/v1/common" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import wrappers "github.com/golang/protobuf/ptypes/wrappers" +import enums "google.golang.org/genproto/googleapis/ads/googleads/v1/enums" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// A URL for deep linking into an app for the given operating system. +type FinalAppUrl struct { + // The operating system targeted by this URL. Required. + OsType enums.AppUrlOperatingSystemTypeEnum_AppUrlOperatingSystemType `protobuf:"varint,1,opt,name=os_type,json=osType,proto3,enum=google.ads.googleads.v1.enums.AppUrlOperatingSystemTypeEnum_AppUrlOperatingSystemType" json:"os_type,omitempty"` + // The app deep link URL. Deep links specify a location in an app that + // corresponds to the content you'd like to show, and should be of the form + // {scheme}://{host_path} + // The scheme identifies which app to open. For your app, you can use a custom + // scheme that starts with the app's name. The host and path specify the + // unique location in the app where your content exists. + // Example: "exampleapp://productid_1234". Required. + Url *wrappers.StringValue `protobuf:"bytes,2,opt,name=url,proto3" json:"url,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *FinalAppUrl) Reset() { *m = FinalAppUrl{} } +func (m *FinalAppUrl) String() string { return proto.CompactTextString(m) } +func (*FinalAppUrl) ProtoMessage() {} +func (*FinalAppUrl) Descriptor() ([]byte, []int) { + return fileDescriptor_final_app_url_5bb78dc5e57caeaa, []int{0} +} +func (m *FinalAppUrl) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_FinalAppUrl.Unmarshal(m, b) +} +func (m *FinalAppUrl) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_FinalAppUrl.Marshal(b, m, deterministic) +} +func (dst *FinalAppUrl) XXX_Merge(src proto.Message) { + xxx_messageInfo_FinalAppUrl.Merge(dst, src) +} +func (m *FinalAppUrl) XXX_Size() int { + return xxx_messageInfo_FinalAppUrl.Size(m) +} +func (m *FinalAppUrl) XXX_DiscardUnknown() { + xxx_messageInfo_FinalAppUrl.DiscardUnknown(m) +} + +var xxx_messageInfo_FinalAppUrl proto.InternalMessageInfo + +func (m *FinalAppUrl) GetOsType() enums.AppUrlOperatingSystemTypeEnum_AppUrlOperatingSystemType { + if m != nil { + return m.OsType + } + return enums.AppUrlOperatingSystemTypeEnum_UNSPECIFIED +} + +func (m *FinalAppUrl) GetUrl() *wrappers.StringValue { + if m != nil { + return m.Url + } + return nil +} + +func init() { + proto.RegisterType((*FinalAppUrl)(nil), "google.ads.googleads.v1.common.FinalAppUrl") +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/common/final_app_url.proto", fileDescriptor_final_app_url_5bb78dc5e57caeaa) +} + +var fileDescriptor_final_app_url_5bb78dc5e57caeaa = []byte{ + // 357 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x7c, 0x91, 0xbf, 0x6a, 0xf3, 0x30, + 0x14, 0xc5, 0xb1, 0x03, 0xf9, 0xc0, 0x81, 0x8f, 0x92, 0x29, 0x84, 0x10, 0x42, 0xa6, 0x4c, 0x12, + 0x4e, 0x37, 0x75, 0x72, 0xfa, 0x27, 0x63, 0x43, 0xd2, 0x7a, 0x28, 0x06, 0xa3, 0xc4, 0x8a, 0x30, + 0xd8, 0xba, 0x42, 0x92, 0x53, 0xf2, 0x3a, 0x1d, 0x3b, 0xf4, 0x41, 0xfa, 0x28, 0xed, 0x4b, 0x14, + 0x4b, 0x76, 0xe8, 0xe2, 0x4e, 0xba, 0xf6, 0xfd, 0xdd, 0x73, 0xce, 0x95, 0x82, 0x25, 0x07, 0xe0, + 0x05, 0xc3, 0x34, 0xd3, 0xd8, 0x95, 0x75, 0x75, 0x0a, 0xf1, 0x01, 0xca, 0x12, 0x04, 0x3e, 0xe6, + 0x82, 0x16, 0x29, 0x95, 0x32, 0xad, 0x54, 0x81, 0xa4, 0x02, 0x03, 0xc3, 0xa9, 0x03, 0x11, 0xcd, + 0x34, 0xba, 0xcc, 0xa0, 0x53, 0x88, 0xdc, 0xcc, 0x38, 0xea, 0xd2, 0x64, 0xa2, 0x2a, 0x35, 0x6e, + 0xc4, 0x52, 0x90, 0x4c, 0x51, 0x93, 0x0b, 0x9e, 0xea, 0xb3, 0x36, 0xac, 0x4c, 0xcd, 0x59, 0x32, + 0x67, 0x31, 0x6e, 0x2c, 0xb0, 0xfd, 0xda, 0x57, 0x47, 0xfc, 0xaa, 0xa8, 0x94, 0x4c, 0xe9, 0xa6, + 0x3f, 0x69, 0x2d, 0x64, 0x8e, 0xa9, 0x10, 0x60, 0xa8, 0xc9, 0x41, 0x34, 0xdd, 0xf9, 0x87, 0x17, + 0x0c, 0x1e, 0xea, 0xe0, 0x91, 0x94, 0xcf, 0xaa, 0x18, 0x42, 0xf0, 0x0f, 0xb4, 0x95, 0x1f, 0x79, + 0x33, 0x6f, 0xf1, 0x7f, 0x19, 0xa3, 0xae, 0x15, 0x6c, 0x44, 0xe4, 0xe6, 0x1e, 0xdb, 0x80, 0x3b, + 0x9b, 0xef, 0xe9, 0x2c, 0xd9, 0xbd, 0xa8, 0xca, 0xee, 0xee, 0xb6, 0x0f, 0xba, 0x3e, 0x87, 0x28, + 0xe8, 0x55, 0xaa, 0x18, 0xf9, 0x33, 0x6f, 0x31, 0x58, 0x4e, 0x5a, 0xb3, 0x76, 0x19, 0xb4, 0x33, + 0x2a, 0x17, 0x3c, 0xa6, 0x45, 0xc5, 0xb6, 0x35, 0xb8, 0xfa, 0xf6, 0x82, 0xf9, 0x01, 0x4a, 0xf4, + 0xf7, 0xc5, 0xae, 0xae, 0x7e, 0x2d, 0xb5, 0xa9, 0xc5, 0x36, 0xde, 0xcb, 0x5d, 0x33, 0xc3, 0xa1, + 0xa0, 0x82, 0x23, 0x50, 0x1c, 0x73, 0x26, 0xac, 0x55, 0x7b, 0xf9, 0x32, 0xd7, 0x5d, 0xef, 0x7b, + 0xe3, 0x8e, 0x37, 0xbf, 0xb7, 0x8e, 0xa2, 0x77, 0x7f, 0xba, 0x76, 0x62, 0x51, 0xa6, 0x91, 0x2b, + 0xeb, 0x2a, 0x0e, 0xd1, 0xad, 0xc5, 0x3e, 0x5b, 0x20, 0x89, 0x32, 0x9d, 0x5c, 0x80, 0x24, 0x0e, + 0x13, 0x07, 0x7c, 0xf9, 0x73, 0xf7, 0x97, 0x90, 0x28, 0xd3, 0x84, 0x5c, 0x10, 0x42, 0xe2, 0x90, + 0x10, 0x07, 0xed, 0xfb, 0x36, 0xdd, 0xf5, 0x4f, 0x00, 0x00, 0x00, 0xff, 0xff, 0xad, 0x08, 0x8b, + 0xc9, 0x7c, 0x02, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/common/frequency_cap.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/common/frequency_cap.pb.go new file mode 100644 index 0000000..5d301ee --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/common/frequency_cap.pb.go @@ -0,0 +1,185 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/common/frequency_cap.proto + +package common // import "google.golang.org/genproto/googleapis/ads/googleads/v1/common" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import wrappers "github.com/golang/protobuf/ptypes/wrappers" +import enums "google.golang.org/genproto/googleapis/ads/googleads/v1/enums" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// A rule specifying the maximum number of times an ad (or some set of ads) can +// be shown to a user over a particular time period. +type FrequencyCapEntry struct { + // The key of a particular frequency cap. There can be no more + // than one frequency cap with the same key. + Key *FrequencyCapKey `protobuf:"bytes,1,opt,name=key,proto3" json:"key,omitempty"` + // Maximum number of events allowed during the time range by this cap. + Cap *wrappers.Int32Value `protobuf:"bytes,2,opt,name=cap,proto3" json:"cap,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *FrequencyCapEntry) Reset() { *m = FrequencyCapEntry{} } +func (m *FrequencyCapEntry) String() string { return proto.CompactTextString(m) } +func (*FrequencyCapEntry) ProtoMessage() {} +func (*FrequencyCapEntry) Descriptor() ([]byte, []int) { + return fileDescriptor_frequency_cap_77c2844bd792a9b5, []int{0} +} +func (m *FrequencyCapEntry) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_FrequencyCapEntry.Unmarshal(m, b) +} +func (m *FrequencyCapEntry) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_FrequencyCapEntry.Marshal(b, m, deterministic) +} +func (dst *FrequencyCapEntry) XXX_Merge(src proto.Message) { + xxx_messageInfo_FrequencyCapEntry.Merge(dst, src) +} +func (m *FrequencyCapEntry) XXX_Size() int { + return xxx_messageInfo_FrequencyCapEntry.Size(m) +} +func (m *FrequencyCapEntry) XXX_DiscardUnknown() { + xxx_messageInfo_FrequencyCapEntry.DiscardUnknown(m) +} + +var xxx_messageInfo_FrequencyCapEntry proto.InternalMessageInfo + +func (m *FrequencyCapEntry) GetKey() *FrequencyCapKey { + if m != nil { + return m.Key + } + return nil +} + +func (m *FrequencyCapEntry) GetCap() *wrappers.Int32Value { + if m != nil { + return m.Cap + } + return nil +} + +// A group of fields used as keys for a frequency cap. +// There can be no more than one frequency cap with the same key. +type FrequencyCapKey struct { + // The level on which the cap is to be applied (e.g. ad group ad, ad group). + // The cap is applied to all the entities of this level. + Level enums.FrequencyCapLevelEnum_FrequencyCapLevel `protobuf:"varint,1,opt,name=level,proto3,enum=google.ads.googleads.v1.enums.FrequencyCapLevelEnum_FrequencyCapLevel" json:"level,omitempty"` + // The type of event that the cap applies to (e.g. impression). + EventType enums.FrequencyCapEventTypeEnum_FrequencyCapEventType `protobuf:"varint,3,opt,name=event_type,json=eventType,proto3,enum=google.ads.googleads.v1.enums.FrequencyCapEventTypeEnum_FrequencyCapEventType" json:"event_type,omitempty"` + // Unit of time the cap is defined at (e.g. day, week). + TimeUnit enums.FrequencyCapTimeUnitEnum_FrequencyCapTimeUnit `protobuf:"varint,2,opt,name=time_unit,json=timeUnit,proto3,enum=google.ads.googleads.v1.enums.FrequencyCapTimeUnitEnum_FrequencyCapTimeUnit" json:"time_unit,omitempty"` + // Number of time units the cap lasts. + TimeLength *wrappers.Int32Value `protobuf:"bytes,4,opt,name=time_length,json=timeLength,proto3" json:"time_length,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *FrequencyCapKey) Reset() { *m = FrequencyCapKey{} } +func (m *FrequencyCapKey) String() string { return proto.CompactTextString(m) } +func (*FrequencyCapKey) ProtoMessage() {} +func (*FrequencyCapKey) Descriptor() ([]byte, []int) { + return fileDescriptor_frequency_cap_77c2844bd792a9b5, []int{1} +} +func (m *FrequencyCapKey) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_FrequencyCapKey.Unmarshal(m, b) +} +func (m *FrequencyCapKey) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_FrequencyCapKey.Marshal(b, m, deterministic) +} +func (dst *FrequencyCapKey) XXX_Merge(src proto.Message) { + xxx_messageInfo_FrequencyCapKey.Merge(dst, src) +} +func (m *FrequencyCapKey) XXX_Size() int { + return xxx_messageInfo_FrequencyCapKey.Size(m) +} +func (m *FrequencyCapKey) XXX_DiscardUnknown() { + xxx_messageInfo_FrequencyCapKey.DiscardUnknown(m) +} + +var xxx_messageInfo_FrequencyCapKey proto.InternalMessageInfo + +func (m *FrequencyCapKey) GetLevel() enums.FrequencyCapLevelEnum_FrequencyCapLevel { + if m != nil { + return m.Level + } + return enums.FrequencyCapLevelEnum_UNSPECIFIED +} + +func (m *FrequencyCapKey) GetEventType() enums.FrequencyCapEventTypeEnum_FrequencyCapEventType { + if m != nil { + return m.EventType + } + return enums.FrequencyCapEventTypeEnum_UNSPECIFIED +} + +func (m *FrequencyCapKey) GetTimeUnit() enums.FrequencyCapTimeUnitEnum_FrequencyCapTimeUnit { + if m != nil { + return m.TimeUnit + } + return enums.FrequencyCapTimeUnitEnum_UNSPECIFIED +} + +func (m *FrequencyCapKey) GetTimeLength() *wrappers.Int32Value { + if m != nil { + return m.TimeLength + } + return nil +} + +func init() { + proto.RegisterType((*FrequencyCapEntry)(nil), "google.ads.googleads.v1.common.FrequencyCapEntry") + proto.RegisterType((*FrequencyCapKey)(nil), "google.ads.googleads.v1.common.FrequencyCapKey") +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/common/frequency_cap.proto", fileDescriptor_frequency_cap_77c2844bd792a9b5) +} + +var fileDescriptor_frequency_cap_77c2844bd792a9b5 = []byte{ + // 466 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x94, 0x53, 0xcf, 0x6a, 0xd4, 0x40, + 0x18, 0x27, 0x1b, 0x15, 0x3b, 0x85, 0x8a, 0x39, 0x2d, 0x55, 0x8a, 0xec, 0xc9, 0x8b, 0x33, 0x24, + 0x3d, 0x08, 0x69, 0x2f, 0x69, 0xdd, 0x16, 0x71, 0x91, 0x12, 0xea, 0x1e, 0x24, 0xb0, 0x4c, 0x93, + 0xaf, 0x31, 0x98, 0xcc, 0x8c, 0xc9, 0x64, 0x25, 0x0f, 0x20, 0xbe, 0x87, 0x47, 0x1f, 0xc5, 0x47, + 0x11, 0x1f, 0x42, 0x66, 0x26, 0x13, 0x5d, 0x97, 0xb5, 0xe4, 0x94, 0x5f, 0xe6, 0xfb, 0xfd, 0xf9, + 0xf2, 0x7d, 0x13, 0x14, 0xe4, 0x9c, 0xe7, 0x25, 0x10, 0x9a, 0x35, 0xc4, 0x40, 0x85, 0xd6, 0x3e, + 0x49, 0x79, 0x55, 0x71, 0x46, 0x6e, 0x6b, 0xf8, 0xd4, 0x02, 0x4b, 0xbb, 0x55, 0x4a, 0x05, 0x16, + 0x35, 0x97, 0xdc, 0x3b, 0x32, 0x44, 0x4c, 0xb3, 0x06, 0x0f, 0x1a, 0xbc, 0xf6, 0xb1, 0xd1, 0x1c, + 0x9e, 0xee, 0xf2, 0x04, 0xd6, 0x56, 0xcd, 0xa6, 0xe5, 0x0a, 0xd6, 0xc0, 0xe4, 0x4a, 0x76, 0x02, + 0x8c, 0xfb, 0xe1, 0xcb, 0x31, 0xea, 0x12, 0xd6, 0x50, 0xf6, 0xc2, 0x93, 0x31, 0x42, 0x59, 0x54, + 0xb0, 0x6a, 0x59, 0x21, 0x7b, 0x71, 0xff, 0x4d, 0x44, 0xbf, 0xdd, 0xb4, 0xb7, 0xe4, 0x73, 0x4d, + 0x85, 0x80, 0xba, 0xe9, 0xeb, 0x4f, 0xad, 0xb9, 0x28, 0x08, 0x65, 0x8c, 0x4b, 0x2a, 0x0b, 0xce, + 0xfa, 0xea, 0xec, 0x8b, 0x83, 0x1e, 0x5f, 0x58, 0xff, 0x73, 0x2a, 0xe6, 0x4c, 0xd6, 0x9d, 0x17, + 0x21, 0xf7, 0x23, 0x74, 0x53, 0xe7, 0x99, 0xf3, 0x7c, 0x3f, 0x20, 0xf8, 0xff, 0x53, 0xc3, 0x7f, + 0xeb, 0xdf, 0x40, 0x17, 0x2b, 0xad, 0xf7, 0x02, 0xb9, 0x29, 0x15, 0xd3, 0x89, 0xb6, 0x78, 0x62, + 0x2d, 0x6c, 0x93, 0xf8, 0x35, 0x93, 0xc7, 0xc1, 0x92, 0x96, 0x2d, 0xc4, 0x8a, 0x37, 0xfb, 0xea, + 0xa2, 0x47, 0xff, 0xf8, 0x78, 0x09, 0xba, 0xaf, 0xa7, 0xa4, 0xfb, 0x38, 0x08, 0x2e, 0x76, 0xf6, + 0xa1, 0xc7, 0xb4, 0xd1, 0xc6, 0x42, 0xe9, 0xe6, 0xac, 0xad, 0xb6, 0x4f, 0x63, 0x63, 0xea, 0x55, + 0x08, 0xfd, 0xd9, 0xe0, 0xd4, 0xd5, 0x11, 0x6f, 0x47, 0x44, 0xcc, 0x95, 0xf8, 0xba, 0x13, 0xb0, + 0x15, 0x33, 0x54, 0xe2, 0x3d, 0xb0, 0xd0, 0x2b, 0xd0, 0xde, 0xb0, 0x39, 0x3d, 0x95, 0x83, 0x60, + 0x31, 0x22, 0xed, 0xba, 0xa8, 0xe0, 0x1d, 0x2b, 0xe4, 0x56, 0x98, 0x2d, 0xc4, 0x0f, 0x65, 0x8f, + 0xbc, 0x53, 0xb4, 0xaf, 0xa3, 0x4a, 0x60, 0xb9, 0xfc, 0x30, 0xbd, 0x77, 0xf7, 0x0a, 0x90, 0xe2, + 0x2f, 0x34, 0xfd, 0xec, 0x97, 0x83, 0x66, 0x29, 0xaf, 0xee, 0x58, 0xfa, 0xd9, 0xc6, 0xad, 0xb9, + 0x52, 0x9e, 0x57, 0xce, 0xfb, 0x57, 0xbd, 0x28, 0xe7, 0x25, 0x65, 0x39, 0xe6, 0x75, 0x4e, 0x72, + 0x60, 0x3a, 0xd1, 0x5e, 0x6c, 0x51, 0x34, 0xbb, 0x7e, 0xd9, 0x13, 0xf3, 0xf8, 0x36, 0x71, 0x2f, + 0xa3, 0xe8, 0xfb, 0xe4, 0xe8, 0xd2, 0x98, 0x45, 0x59, 0x83, 0x0d, 0x54, 0x68, 0xe9, 0xe3, 0x73, + 0x4d, 0xfb, 0x61, 0x09, 0x49, 0x94, 0x35, 0xc9, 0x40, 0x48, 0x96, 0x7e, 0x62, 0x08, 0x3f, 0x27, + 0x33, 0x73, 0x1a, 0x86, 0x51, 0xd6, 0x84, 0xe1, 0x40, 0x09, 0xc3, 0xa5, 0x1f, 0x86, 0x86, 0x74, + 0xf3, 0x40, 0x77, 0x77, 0xfc, 0x3b, 0x00, 0x00, 0xff, 0xff, 0xcd, 0xa5, 0x50, 0x57, 0x4f, 0x04, + 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/common/keyword_plan_common.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/common/keyword_plan_common.pb.go new file mode 100644 index 0000000..05de623 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/common/keyword_plan_common.pb.go @@ -0,0 +1,108 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/common/keyword_plan_common.proto + +package common // import "google.golang.org/genproto/googleapis/ads/googleads/v1/common" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import wrappers "github.com/golang/protobuf/ptypes/wrappers" +import enums "google.golang.org/genproto/googleapis/ads/googleads/v1/enums" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Historical metrics. +type KeywordPlanHistoricalMetrics struct { + // Average monthly searches for the past 12 months. + AvgMonthlySearches *wrappers.Int64Value `protobuf:"bytes,1,opt,name=avg_monthly_searches,json=avgMonthlySearches,proto3" json:"avg_monthly_searches,omitempty"` + // The competition level for the query. + Competition enums.KeywordPlanCompetitionLevelEnum_KeywordPlanCompetitionLevel `protobuf:"varint,2,opt,name=competition,proto3,enum=google.ads.googleads.v1.enums.KeywordPlanCompetitionLevelEnum_KeywordPlanCompetitionLevel" json:"competition,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *KeywordPlanHistoricalMetrics) Reset() { *m = KeywordPlanHistoricalMetrics{} } +func (m *KeywordPlanHistoricalMetrics) String() string { return proto.CompactTextString(m) } +func (*KeywordPlanHistoricalMetrics) ProtoMessage() {} +func (*KeywordPlanHistoricalMetrics) Descriptor() ([]byte, []int) { + return fileDescriptor_keyword_plan_common_ccb20e2e02a00391, []int{0} +} +func (m *KeywordPlanHistoricalMetrics) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_KeywordPlanHistoricalMetrics.Unmarshal(m, b) +} +func (m *KeywordPlanHistoricalMetrics) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_KeywordPlanHistoricalMetrics.Marshal(b, m, deterministic) +} +func (dst *KeywordPlanHistoricalMetrics) XXX_Merge(src proto.Message) { + xxx_messageInfo_KeywordPlanHistoricalMetrics.Merge(dst, src) +} +func (m *KeywordPlanHistoricalMetrics) XXX_Size() int { + return xxx_messageInfo_KeywordPlanHistoricalMetrics.Size(m) +} +func (m *KeywordPlanHistoricalMetrics) XXX_DiscardUnknown() { + xxx_messageInfo_KeywordPlanHistoricalMetrics.DiscardUnknown(m) +} + +var xxx_messageInfo_KeywordPlanHistoricalMetrics proto.InternalMessageInfo + +func (m *KeywordPlanHistoricalMetrics) GetAvgMonthlySearches() *wrappers.Int64Value { + if m != nil { + return m.AvgMonthlySearches + } + return nil +} + +func (m *KeywordPlanHistoricalMetrics) GetCompetition() enums.KeywordPlanCompetitionLevelEnum_KeywordPlanCompetitionLevel { + if m != nil { + return m.Competition + } + return enums.KeywordPlanCompetitionLevelEnum_UNSPECIFIED +} + +func init() { + proto.RegisterType((*KeywordPlanHistoricalMetrics)(nil), "google.ads.googleads.v1.common.KeywordPlanHistoricalMetrics") +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/common/keyword_plan_common.proto", fileDescriptor_keyword_plan_common_ccb20e2e02a00391) +} + +var fileDescriptor_keyword_plan_common_ccb20e2e02a00391 = []byte{ + // 388 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x7c, 0x52, 0xcf, 0x6a, 0xdb, 0x30, + 0x18, 0xc7, 0x1e, 0xec, 0xe0, 0xc0, 0x0e, 0x66, 0x8c, 0x90, 0x85, 0x10, 0x72, 0xca, 0x49, 0xc2, + 0xd9, 0x18, 0x43, 0x3b, 0x39, 0x6b, 0x49, 0x4b, 0x1b, 0x08, 0x29, 0xf8, 0x10, 0x0c, 0x46, 0xb1, + 0x55, 0xc5, 0x54, 0x96, 0x8c, 0x24, 0x3b, 0x04, 0xfa, 0x34, 0x3d, 0xf6, 0x51, 0xfa, 0x28, 0xed, + 0xa9, 0x6f, 0x50, 0x6c, 0xd9, 0x69, 0x4a, 0x49, 0x4e, 0xfe, 0xac, 0xef, 0xf7, 0x47, 0xbf, 0xef, + 0x93, 0xf3, 0x97, 0x0a, 0x41, 0x19, 0x81, 0x38, 0x51, 0xd0, 0x94, 0x55, 0x55, 0x7a, 0x30, 0x16, + 0x59, 0x26, 0x38, 0xbc, 0x23, 0xbb, 0xad, 0x90, 0x49, 0x94, 0x33, 0xcc, 0x23, 0x73, 0x06, 0x72, + 0x29, 0xb4, 0x70, 0x07, 0x06, 0x0e, 0x70, 0xa2, 0xc0, 0x9e, 0x09, 0x4a, 0x0f, 0x18, 0x54, 0x6f, + 0x7a, 0x4c, 0x99, 0xf0, 0x22, 0x53, 0x9f, 0x84, 0x73, 0xa2, 0x53, 0x9d, 0x0a, 0x1e, 0x31, 0x52, + 0x12, 0x66, 0x3c, 0x7a, 0x8d, 0x07, 0xac, 0xff, 0xd6, 0xc5, 0x2d, 0xdc, 0x4a, 0x9c, 0xe7, 0x44, + 0xaa, 0xa6, 0xdf, 0x6f, 0x3d, 0xf2, 0x14, 0x62, 0xce, 0x85, 0xc6, 0x95, 0x44, 0xd3, 0x1d, 0xbd, + 0x58, 0x4e, 0xff, 0xca, 0xd8, 0x2c, 0x18, 0xe6, 0x17, 0xa9, 0xd2, 0x42, 0xa6, 0x31, 0x66, 0x73, + 0xa2, 0x65, 0x1a, 0x2b, 0x77, 0xee, 0x7c, 0xc7, 0x25, 0x8d, 0x32, 0xc1, 0xf5, 0x86, 0xed, 0x22, + 0x45, 0xb0, 0x8c, 0x37, 0x44, 0x75, 0xad, 0xa1, 0x35, 0xee, 0x4c, 0x7e, 0x36, 0xb1, 0x40, 0xeb, + 0x0e, 0x2e, 0xb9, 0xfe, 0xf3, 0x3b, 0xc0, 0xac, 0x20, 0x4b, 0x17, 0x97, 0x74, 0x6e, 0x78, 0x37, + 0x0d, 0xcd, 0xbd, 0x77, 0x3a, 0x07, 0x41, 0xba, 0xf6, 0xd0, 0x1a, 0x7f, 0x9b, 0xac, 0xc0, 0xb1, + 0x39, 0xd5, 0x73, 0x00, 0x07, 0x17, 0xfc, 0xff, 0x4e, 0xbe, 0xae, 0x86, 0x70, 0xce, 0x8b, 0xec, + 0x54, 0x7f, 0x79, 0x68, 0x37, 0x7d, 0xb5, 0x9c, 0x51, 0x2c, 0x32, 0x70, 0x7a, 0x2d, 0xd3, 0x1f, + 0x1f, 0x05, 0x33, 0xc1, 0x17, 0x55, 0xbc, 0x85, 0xb5, 0x3a, 0x6b, 0x98, 0x54, 0x30, 0xcc, 0x29, + 0x10, 0x92, 0x42, 0x4a, 0x78, 0x1d, 0xbe, 0x5d, 0x60, 0x9e, 0xaa, 0x63, 0x2f, 0xe5, 0x9f, 0xf9, + 0x3c, 0xd8, 0x5f, 0x66, 0xbe, 0xff, 0x68, 0x0f, 0x66, 0x46, 0xcc, 0x4f, 0x14, 0x30, 0x65, 0x55, + 0x05, 0x1e, 0x30, 0x9e, 0x4f, 0x2d, 0x20, 0xf4, 0x13, 0x15, 0xee, 0x01, 0x61, 0xe0, 0x85, 0x06, + 0xf0, 0x6c, 0x8f, 0xcc, 0x29, 0x42, 0x7e, 0xa2, 0x10, 0xda, 0x43, 0x10, 0x0a, 0x3c, 0x84, 0x0c, + 0x68, 0xfd, 0xb5, 0xbe, 0xdd, 0xaf, 0xb7, 0x00, 0x00, 0x00, 0xff, 0xff, 0x22, 0x94, 0x59, 0xd1, + 0xc6, 0x02, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/common/matching_function.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/common/matching_function.pb.go new file mode 100644 index 0000000..9bc71f2 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/common/matching_function.pb.go @@ -0,0 +1,731 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/common/matching_function.proto + +package common // import "google.golang.org/genproto/googleapis/ads/googleads/v1/common" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import wrappers "github.com/golang/protobuf/ptypes/wrappers" +import enums "google.golang.org/genproto/googleapis/ads/googleads/v1/enums" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Matching function associated with a +// CustomerFeed, CampaignFeed, or AdGroupFeed. The matching function is used +// to filter the set of feed items selected. +type MatchingFunction struct { + // String representation of the Function. + // + // Examples: + // 1) IDENTITY(true) or IDENTITY(false). All or none feed items serve. + // 2) EQUALS(CONTEXT.DEVICE,"Mobile") + // 3) IN(FEED_ITEM_ID,{1000001,1000002,1000003}) + // 4) CONTAINS_ANY(FeedAttribute[12345678,0],{"Mars cruise","Venus cruise"}) + // 5) AND(IN(FEED_ITEM_ID,{10001,10002}),EQUALS(CONTEXT.DEVICE,"Mobile")) + // See + // + // https: + // //developers.google.com/adwords/api/docs/guides/feed-matching-functions + // + // Note that because multiple strings may represent the same underlying + // function (whitespace and single versus double quotation marks, for + // example), the value returned may not be identical to the string sent in a + // mutate request. + FunctionString *wrappers.StringValue `protobuf:"bytes,1,opt,name=function_string,json=functionString,proto3" json:"function_string,omitempty"` + // Operator for a function. + Operator enums.MatchingFunctionOperatorEnum_MatchingFunctionOperator `protobuf:"varint,4,opt,name=operator,proto3,enum=google.ads.googleads.v1.enums.MatchingFunctionOperatorEnum_MatchingFunctionOperator" json:"operator,omitempty"` + // The operands on the left hand side of the equation. This is also the + // operand to be used for single operand expressions such as NOT. + LeftOperands []*Operand `protobuf:"bytes,2,rep,name=left_operands,json=leftOperands,proto3" json:"left_operands,omitempty"` + // The operands on the right hand side of the equation. + RightOperands []*Operand `protobuf:"bytes,3,rep,name=right_operands,json=rightOperands,proto3" json:"right_operands,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *MatchingFunction) Reset() { *m = MatchingFunction{} } +func (m *MatchingFunction) String() string { return proto.CompactTextString(m) } +func (*MatchingFunction) ProtoMessage() {} +func (*MatchingFunction) Descriptor() ([]byte, []int) { + return fileDescriptor_matching_function_724626b702bec12d, []int{0} +} +func (m *MatchingFunction) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_MatchingFunction.Unmarshal(m, b) +} +func (m *MatchingFunction) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_MatchingFunction.Marshal(b, m, deterministic) +} +func (dst *MatchingFunction) XXX_Merge(src proto.Message) { + xxx_messageInfo_MatchingFunction.Merge(dst, src) +} +func (m *MatchingFunction) XXX_Size() int { + return xxx_messageInfo_MatchingFunction.Size(m) +} +func (m *MatchingFunction) XXX_DiscardUnknown() { + xxx_messageInfo_MatchingFunction.DiscardUnknown(m) +} + +var xxx_messageInfo_MatchingFunction proto.InternalMessageInfo + +func (m *MatchingFunction) GetFunctionString() *wrappers.StringValue { + if m != nil { + return m.FunctionString + } + return nil +} + +func (m *MatchingFunction) GetOperator() enums.MatchingFunctionOperatorEnum_MatchingFunctionOperator { + if m != nil { + return m.Operator + } + return enums.MatchingFunctionOperatorEnum_UNSPECIFIED +} + +func (m *MatchingFunction) GetLeftOperands() []*Operand { + if m != nil { + return m.LeftOperands + } + return nil +} + +func (m *MatchingFunction) GetRightOperands() []*Operand { + if m != nil { + return m.RightOperands + } + return nil +} + +// An operand in a matching function. +type Operand struct { + // Different operands that can be used in a matching function. Required. + // + // Types that are valid to be assigned to FunctionArgumentOperand: + // *Operand_ConstantOperand_ + // *Operand_FeedAttributeOperand_ + // *Operand_FunctionOperand_ + // *Operand_RequestContextOperand_ + FunctionArgumentOperand isOperand_FunctionArgumentOperand `protobuf_oneof:"function_argument_operand"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Operand) Reset() { *m = Operand{} } +func (m *Operand) String() string { return proto.CompactTextString(m) } +func (*Operand) ProtoMessage() {} +func (*Operand) Descriptor() ([]byte, []int) { + return fileDescriptor_matching_function_724626b702bec12d, []int{1} +} +func (m *Operand) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Operand.Unmarshal(m, b) +} +func (m *Operand) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Operand.Marshal(b, m, deterministic) +} +func (dst *Operand) XXX_Merge(src proto.Message) { + xxx_messageInfo_Operand.Merge(dst, src) +} +func (m *Operand) XXX_Size() int { + return xxx_messageInfo_Operand.Size(m) +} +func (m *Operand) XXX_DiscardUnknown() { + xxx_messageInfo_Operand.DiscardUnknown(m) +} + +var xxx_messageInfo_Operand proto.InternalMessageInfo + +type isOperand_FunctionArgumentOperand interface { + isOperand_FunctionArgumentOperand() +} + +type Operand_ConstantOperand_ struct { + ConstantOperand *Operand_ConstantOperand `protobuf:"bytes,1,opt,name=constant_operand,json=constantOperand,proto3,oneof"` +} + +type Operand_FeedAttributeOperand_ struct { + FeedAttributeOperand *Operand_FeedAttributeOperand `protobuf:"bytes,2,opt,name=feed_attribute_operand,json=feedAttributeOperand,proto3,oneof"` +} + +type Operand_FunctionOperand_ struct { + FunctionOperand *Operand_FunctionOperand `protobuf:"bytes,3,opt,name=function_operand,json=functionOperand,proto3,oneof"` +} + +type Operand_RequestContextOperand_ struct { + RequestContextOperand *Operand_RequestContextOperand `protobuf:"bytes,4,opt,name=request_context_operand,json=requestContextOperand,proto3,oneof"` +} + +func (*Operand_ConstantOperand_) isOperand_FunctionArgumentOperand() {} + +func (*Operand_FeedAttributeOperand_) isOperand_FunctionArgumentOperand() {} + +func (*Operand_FunctionOperand_) isOperand_FunctionArgumentOperand() {} + +func (*Operand_RequestContextOperand_) isOperand_FunctionArgumentOperand() {} + +func (m *Operand) GetFunctionArgumentOperand() isOperand_FunctionArgumentOperand { + if m != nil { + return m.FunctionArgumentOperand + } + return nil +} + +func (m *Operand) GetConstantOperand() *Operand_ConstantOperand { + if x, ok := m.GetFunctionArgumentOperand().(*Operand_ConstantOperand_); ok { + return x.ConstantOperand + } + return nil +} + +func (m *Operand) GetFeedAttributeOperand() *Operand_FeedAttributeOperand { + if x, ok := m.GetFunctionArgumentOperand().(*Operand_FeedAttributeOperand_); ok { + return x.FeedAttributeOperand + } + return nil +} + +func (m *Operand) GetFunctionOperand() *Operand_FunctionOperand { + if x, ok := m.GetFunctionArgumentOperand().(*Operand_FunctionOperand_); ok { + return x.FunctionOperand + } + return nil +} + +func (m *Operand) GetRequestContextOperand() *Operand_RequestContextOperand { + if x, ok := m.GetFunctionArgumentOperand().(*Operand_RequestContextOperand_); ok { + return x.RequestContextOperand + } + return nil +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*Operand) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _Operand_OneofMarshaler, _Operand_OneofUnmarshaler, _Operand_OneofSizer, []interface{}{ + (*Operand_ConstantOperand_)(nil), + (*Operand_FeedAttributeOperand_)(nil), + (*Operand_FunctionOperand_)(nil), + (*Operand_RequestContextOperand_)(nil), + } +} + +func _Operand_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*Operand) + // function_argument_operand + switch x := m.FunctionArgumentOperand.(type) { + case *Operand_ConstantOperand_: + b.EncodeVarint(1<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.ConstantOperand); err != nil { + return err + } + case *Operand_FeedAttributeOperand_: + b.EncodeVarint(2<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.FeedAttributeOperand); err != nil { + return err + } + case *Operand_FunctionOperand_: + b.EncodeVarint(3<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.FunctionOperand); err != nil { + return err + } + case *Operand_RequestContextOperand_: + b.EncodeVarint(4<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.RequestContextOperand); err != nil { + return err + } + case nil: + default: + return fmt.Errorf("Operand.FunctionArgumentOperand has unexpected type %T", x) + } + return nil +} + +func _Operand_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*Operand) + switch tag { + case 1: // function_argument_operand.constant_operand + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(Operand_ConstantOperand) + err := b.DecodeMessage(msg) + m.FunctionArgumentOperand = &Operand_ConstantOperand_{msg} + return true, err + case 2: // function_argument_operand.feed_attribute_operand + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(Operand_FeedAttributeOperand) + err := b.DecodeMessage(msg) + m.FunctionArgumentOperand = &Operand_FeedAttributeOperand_{msg} + return true, err + case 3: // function_argument_operand.function_operand + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(Operand_FunctionOperand) + err := b.DecodeMessage(msg) + m.FunctionArgumentOperand = &Operand_FunctionOperand_{msg} + return true, err + case 4: // function_argument_operand.request_context_operand + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(Operand_RequestContextOperand) + err := b.DecodeMessage(msg) + m.FunctionArgumentOperand = &Operand_RequestContextOperand_{msg} + return true, err + default: + return false, nil + } +} + +func _Operand_OneofSizer(msg proto.Message) (n int) { + m := msg.(*Operand) + // function_argument_operand + switch x := m.FunctionArgumentOperand.(type) { + case *Operand_ConstantOperand_: + s := proto.Size(x.ConstantOperand) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *Operand_FeedAttributeOperand_: + s := proto.Size(x.FeedAttributeOperand) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *Operand_FunctionOperand_: + s := proto.Size(x.FunctionOperand) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *Operand_RequestContextOperand_: + s := proto.Size(x.RequestContextOperand) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +// A constant operand in a matching function. +type Operand_ConstantOperand struct { + // Constant operand values. Required. + // + // Types that are valid to be assigned to ConstantOperandValue: + // *Operand_ConstantOperand_StringValue + // *Operand_ConstantOperand_LongValue + // *Operand_ConstantOperand_BooleanValue + // *Operand_ConstantOperand_DoubleValue + ConstantOperandValue isOperand_ConstantOperand_ConstantOperandValue `protobuf_oneof:"constant_operand_value"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Operand_ConstantOperand) Reset() { *m = Operand_ConstantOperand{} } +func (m *Operand_ConstantOperand) String() string { return proto.CompactTextString(m) } +func (*Operand_ConstantOperand) ProtoMessage() {} +func (*Operand_ConstantOperand) Descriptor() ([]byte, []int) { + return fileDescriptor_matching_function_724626b702bec12d, []int{1, 0} +} +func (m *Operand_ConstantOperand) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Operand_ConstantOperand.Unmarshal(m, b) +} +func (m *Operand_ConstantOperand) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Operand_ConstantOperand.Marshal(b, m, deterministic) +} +func (dst *Operand_ConstantOperand) XXX_Merge(src proto.Message) { + xxx_messageInfo_Operand_ConstantOperand.Merge(dst, src) +} +func (m *Operand_ConstantOperand) XXX_Size() int { + return xxx_messageInfo_Operand_ConstantOperand.Size(m) +} +func (m *Operand_ConstantOperand) XXX_DiscardUnknown() { + xxx_messageInfo_Operand_ConstantOperand.DiscardUnknown(m) +} + +var xxx_messageInfo_Operand_ConstantOperand proto.InternalMessageInfo + +type isOperand_ConstantOperand_ConstantOperandValue interface { + isOperand_ConstantOperand_ConstantOperandValue() +} + +type Operand_ConstantOperand_StringValue struct { + StringValue *wrappers.StringValue `protobuf:"bytes,1,opt,name=string_value,json=stringValue,proto3,oneof"` +} + +type Operand_ConstantOperand_LongValue struct { + LongValue *wrappers.Int64Value `protobuf:"bytes,2,opt,name=long_value,json=longValue,proto3,oneof"` +} + +type Operand_ConstantOperand_BooleanValue struct { + BooleanValue *wrappers.BoolValue `protobuf:"bytes,3,opt,name=boolean_value,json=booleanValue,proto3,oneof"` +} + +type Operand_ConstantOperand_DoubleValue struct { + DoubleValue *wrappers.DoubleValue `protobuf:"bytes,4,opt,name=double_value,json=doubleValue,proto3,oneof"` +} + +func (*Operand_ConstantOperand_StringValue) isOperand_ConstantOperand_ConstantOperandValue() {} + +func (*Operand_ConstantOperand_LongValue) isOperand_ConstantOperand_ConstantOperandValue() {} + +func (*Operand_ConstantOperand_BooleanValue) isOperand_ConstantOperand_ConstantOperandValue() {} + +func (*Operand_ConstantOperand_DoubleValue) isOperand_ConstantOperand_ConstantOperandValue() {} + +func (m *Operand_ConstantOperand) GetConstantOperandValue() isOperand_ConstantOperand_ConstantOperandValue { + if m != nil { + return m.ConstantOperandValue + } + return nil +} + +func (m *Operand_ConstantOperand) GetStringValue() *wrappers.StringValue { + if x, ok := m.GetConstantOperandValue().(*Operand_ConstantOperand_StringValue); ok { + return x.StringValue + } + return nil +} + +func (m *Operand_ConstantOperand) GetLongValue() *wrappers.Int64Value { + if x, ok := m.GetConstantOperandValue().(*Operand_ConstantOperand_LongValue); ok { + return x.LongValue + } + return nil +} + +func (m *Operand_ConstantOperand) GetBooleanValue() *wrappers.BoolValue { + if x, ok := m.GetConstantOperandValue().(*Operand_ConstantOperand_BooleanValue); ok { + return x.BooleanValue + } + return nil +} + +func (m *Operand_ConstantOperand) GetDoubleValue() *wrappers.DoubleValue { + if x, ok := m.GetConstantOperandValue().(*Operand_ConstantOperand_DoubleValue); ok { + return x.DoubleValue + } + return nil +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*Operand_ConstantOperand) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _Operand_ConstantOperand_OneofMarshaler, _Operand_ConstantOperand_OneofUnmarshaler, _Operand_ConstantOperand_OneofSizer, []interface{}{ + (*Operand_ConstantOperand_StringValue)(nil), + (*Operand_ConstantOperand_LongValue)(nil), + (*Operand_ConstantOperand_BooleanValue)(nil), + (*Operand_ConstantOperand_DoubleValue)(nil), + } +} + +func _Operand_ConstantOperand_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*Operand_ConstantOperand) + // constant_operand_value + switch x := m.ConstantOperandValue.(type) { + case *Operand_ConstantOperand_StringValue: + b.EncodeVarint(1<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.StringValue); err != nil { + return err + } + case *Operand_ConstantOperand_LongValue: + b.EncodeVarint(2<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.LongValue); err != nil { + return err + } + case *Operand_ConstantOperand_BooleanValue: + b.EncodeVarint(3<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.BooleanValue); err != nil { + return err + } + case *Operand_ConstantOperand_DoubleValue: + b.EncodeVarint(4<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.DoubleValue); err != nil { + return err + } + case nil: + default: + return fmt.Errorf("Operand_ConstantOperand.ConstantOperandValue has unexpected type %T", x) + } + return nil +} + +func _Operand_ConstantOperand_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*Operand_ConstantOperand) + switch tag { + case 1: // constant_operand_value.string_value + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(wrappers.StringValue) + err := b.DecodeMessage(msg) + m.ConstantOperandValue = &Operand_ConstantOperand_StringValue{msg} + return true, err + case 2: // constant_operand_value.long_value + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(wrappers.Int64Value) + err := b.DecodeMessage(msg) + m.ConstantOperandValue = &Operand_ConstantOperand_LongValue{msg} + return true, err + case 3: // constant_operand_value.boolean_value + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(wrappers.BoolValue) + err := b.DecodeMessage(msg) + m.ConstantOperandValue = &Operand_ConstantOperand_BooleanValue{msg} + return true, err + case 4: // constant_operand_value.double_value + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(wrappers.DoubleValue) + err := b.DecodeMessage(msg) + m.ConstantOperandValue = &Operand_ConstantOperand_DoubleValue{msg} + return true, err + default: + return false, nil + } +} + +func _Operand_ConstantOperand_OneofSizer(msg proto.Message) (n int) { + m := msg.(*Operand_ConstantOperand) + // constant_operand_value + switch x := m.ConstantOperandValue.(type) { + case *Operand_ConstantOperand_StringValue: + s := proto.Size(x.StringValue) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *Operand_ConstantOperand_LongValue: + s := proto.Size(x.LongValue) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *Operand_ConstantOperand_BooleanValue: + s := proto.Size(x.BooleanValue) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *Operand_ConstantOperand_DoubleValue: + s := proto.Size(x.DoubleValue) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +// A feed attribute operand in a matching function. +// Used to represent a feed attribute in feed. +type Operand_FeedAttributeOperand struct { + // The associated feed. Required. + FeedId *wrappers.Int64Value `protobuf:"bytes,1,opt,name=feed_id,json=feedId,proto3" json:"feed_id,omitempty"` + // Id of the referenced feed attribute. Required. + FeedAttributeId *wrappers.Int64Value `protobuf:"bytes,2,opt,name=feed_attribute_id,json=feedAttributeId,proto3" json:"feed_attribute_id,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Operand_FeedAttributeOperand) Reset() { *m = Operand_FeedAttributeOperand{} } +func (m *Operand_FeedAttributeOperand) String() string { return proto.CompactTextString(m) } +func (*Operand_FeedAttributeOperand) ProtoMessage() {} +func (*Operand_FeedAttributeOperand) Descriptor() ([]byte, []int) { + return fileDescriptor_matching_function_724626b702bec12d, []int{1, 1} +} +func (m *Operand_FeedAttributeOperand) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Operand_FeedAttributeOperand.Unmarshal(m, b) +} +func (m *Operand_FeedAttributeOperand) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Operand_FeedAttributeOperand.Marshal(b, m, deterministic) +} +func (dst *Operand_FeedAttributeOperand) XXX_Merge(src proto.Message) { + xxx_messageInfo_Operand_FeedAttributeOperand.Merge(dst, src) +} +func (m *Operand_FeedAttributeOperand) XXX_Size() int { + return xxx_messageInfo_Operand_FeedAttributeOperand.Size(m) +} +func (m *Operand_FeedAttributeOperand) XXX_DiscardUnknown() { + xxx_messageInfo_Operand_FeedAttributeOperand.DiscardUnknown(m) +} + +var xxx_messageInfo_Operand_FeedAttributeOperand proto.InternalMessageInfo + +func (m *Operand_FeedAttributeOperand) GetFeedId() *wrappers.Int64Value { + if m != nil { + return m.FeedId + } + return nil +} + +func (m *Operand_FeedAttributeOperand) GetFeedAttributeId() *wrappers.Int64Value { + if m != nil { + return m.FeedAttributeId + } + return nil +} + +// A function operand in a matching function. +// Used to represent nested functions. +type Operand_FunctionOperand struct { + // The matching function held in this operand. + MatchingFunction *MatchingFunction `protobuf:"bytes,1,opt,name=matching_function,json=matchingFunction,proto3" json:"matching_function,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Operand_FunctionOperand) Reset() { *m = Operand_FunctionOperand{} } +func (m *Operand_FunctionOperand) String() string { return proto.CompactTextString(m) } +func (*Operand_FunctionOperand) ProtoMessage() {} +func (*Operand_FunctionOperand) Descriptor() ([]byte, []int) { + return fileDescriptor_matching_function_724626b702bec12d, []int{1, 2} +} +func (m *Operand_FunctionOperand) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Operand_FunctionOperand.Unmarshal(m, b) +} +func (m *Operand_FunctionOperand) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Operand_FunctionOperand.Marshal(b, m, deterministic) +} +func (dst *Operand_FunctionOperand) XXX_Merge(src proto.Message) { + xxx_messageInfo_Operand_FunctionOperand.Merge(dst, src) +} +func (m *Operand_FunctionOperand) XXX_Size() int { + return xxx_messageInfo_Operand_FunctionOperand.Size(m) +} +func (m *Operand_FunctionOperand) XXX_DiscardUnknown() { + xxx_messageInfo_Operand_FunctionOperand.DiscardUnknown(m) +} + +var xxx_messageInfo_Operand_FunctionOperand proto.InternalMessageInfo + +func (m *Operand_FunctionOperand) GetMatchingFunction() *MatchingFunction { + if m != nil { + return m.MatchingFunction + } + return nil +} + +// An operand in a function referring to a value in the request context. +type Operand_RequestContextOperand struct { + // Type of value to be referred in the request context. + ContextType enums.MatchingFunctionContextTypeEnum_MatchingFunctionContextType `protobuf:"varint,1,opt,name=context_type,json=contextType,proto3,enum=google.ads.googleads.v1.enums.MatchingFunctionContextTypeEnum_MatchingFunctionContextType" json:"context_type,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Operand_RequestContextOperand) Reset() { *m = Operand_RequestContextOperand{} } +func (m *Operand_RequestContextOperand) String() string { return proto.CompactTextString(m) } +func (*Operand_RequestContextOperand) ProtoMessage() {} +func (*Operand_RequestContextOperand) Descriptor() ([]byte, []int) { + return fileDescriptor_matching_function_724626b702bec12d, []int{1, 3} +} +func (m *Operand_RequestContextOperand) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Operand_RequestContextOperand.Unmarshal(m, b) +} +func (m *Operand_RequestContextOperand) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Operand_RequestContextOperand.Marshal(b, m, deterministic) +} +func (dst *Operand_RequestContextOperand) XXX_Merge(src proto.Message) { + xxx_messageInfo_Operand_RequestContextOperand.Merge(dst, src) +} +func (m *Operand_RequestContextOperand) XXX_Size() int { + return xxx_messageInfo_Operand_RequestContextOperand.Size(m) +} +func (m *Operand_RequestContextOperand) XXX_DiscardUnknown() { + xxx_messageInfo_Operand_RequestContextOperand.DiscardUnknown(m) +} + +var xxx_messageInfo_Operand_RequestContextOperand proto.InternalMessageInfo + +func (m *Operand_RequestContextOperand) GetContextType() enums.MatchingFunctionContextTypeEnum_MatchingFunctionContextType { + if m != nil { + return m.ContextType + } + return enums.MatchingFunctionContextTypeEnum_UNSPECIFIED +} + +func init() { + proto.RegisterType((*MatchingFunction)(nil), "google.ads.googleads.v1.common.MatchingFunction") + proto.RegisterType((*Operand)(nil), "google.ads.googleads.v1.common.Operand") + proto.RegisterType((*Operand_ConstantOperand)(nil), "google.ads.googleads.v1.common.Operand.ConstantOperand") + proto.RegisterType((*Operand_FeedAttributeOperand)(nil), "google.ads.googleads.v1.common.Operand.FeedAttributeOperand") + proto.RegisterType((*Operand_FunctionOperand)(nil), "google.ads.googleads.v1.common.Operand.FunctionOperand") + proto.RegisterType((*Operand_RequestContextOperand)(nil), "google.ads.googleads.v1.common.Operand.RequestContextOperand") +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/common/matching_function.proto", fileDescriptor_matching_function_724626b702bec12d) +} + +var fileDescriptor_matching_function_724626b702bec12d = []byte{ + // 750 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x94, 0x55, 0xcd, 0x4e, 0xdb, 0x4a, + 0x14, 0x26, 0x09, 0x82, 0xcb, 0x24, 0x24, 0x60, 0x01, 0x37, 0x37, 0x20, 0x84, 0xb2, 0xb9, 0xac, + 0xc6, 0x37, 0xb9, 0x88, 0x4a, 0x2e, 0xad, 0xe4, 0xf0, 0x2f, 0xb5, 0x05, 0xb9, 0x28, 0x0b, 0x94, + 0x2a, 0x72, 0x3c, 0x13, 0x63, 0xc9, 0x9e, 0x71, 0x3d, 0x63, 0x28, 0x8b, 0x3e, 0x46, 0xfb, 0x00, + 0x5d, 0x76, 0xd9, 0x5d, 0x5f, 0xa1, 0x6f, 0xd2, 0xae, 0xfa, 0x08, 0x95, 0x3d, 0x63, 0x87, 0x38, + 0x09, 0x24, 0x2b, 0xcf, 0x99, 0x73, 0xbe, 0xef, 0x9b, 0xf3, 0x33, 0x63, 0xb0, 0x6f, 0x53, 0x6a, + 0xbb, 0x58, 0x35, 0x11, 0x53, 0xc5, 0x32, 0x5a, 0xdd, 0x36, 0x54, 0x8b, 0x7a, 0x1e, 0x25, 0xaa, + 0x67, 0x72, 0xeb, 0xc6, 0x21, 0x76, 0xb7, 0x1f, 0x12, 0x8b, 0x3b, 0x94, 0x40, 0x3f, 0xa0, 0x9c, + 0x2a, 0xdb, 0x22, 0x18, 0x9a, 0x88, 0xc1, 0x14, 0x07, 0x6f, 0x1b, 0x50, 0xe0, 0x6a, 0xad, 0x49, + 0xbc, 0x98, 0x84, 0x1e, 0x1b, 0xa5, 0xed, 0x5a, 0x94, 0x70, 0xfc, 0x81, 0x77, 0xf9, 0xbd, 0x8f, + 0x85, 0x46, 0xed, 0xe5, 0xac, 0x1c, 0xd4, 0xc7, 0x81, 0xc9, 0x69, 0x20, 0xf1, 0xf2, 0x8c, 0x6a, + 0x6c, 0xf5, 0xc2, 0xbe, 0x7a, 0x17, 0x98, 0xbe, 0x8f, 0x03, 0x26, 0xfd, 0x5b, 0x09, 0xbf, 0xef, + 0xa8, 0x26, 0x21, 0x94, 0x9b, 0x11, 0x8b, 0xf4, 0xd6, 0x7f, 0xe6, 0xc1, 0xca, 0x6b, 0x29, 0x71, + 0x22, 0x15, 0x94, 0x63, 0x50, 0x49, 0xd5, 0x18, 0x0f, 0x1c, 0x62, 0x57, 0x73, 0x3b, 0xb9, 0xdd, + 0x62, 0x73, 0x4b, 0x56, 0x01, 0x26, 0x62, 0xf0, 0x6d, 0xec, 0x6e, 0x9b, 0x6e, 0x88, 0x8d, 0x72, + 0x02, 0x12, 0x9b, 0x8a, 0x0f, 0xfe, 0x4a, 0xce, 0x5a, 0x9d, 0xdf, 0xc9, 0xed, 0x96, 0x9b, 0x57, + 0x70, 0x52, 0x41, 0xe3, 0x64, 0x61, 0xf6, 0x24, 0x17, 0x12, 0x7e, 0x4c, 0x42, 0x6f, 0xa2, 0xd3, + 0x48, 0x55, 0x94, 0x57, 0x60, 0xd9, 0xc5, 0x7d, 0x2e, 0x4a, 0x44, 0x10, 0xab, 0xe6, 0x77, 0x0a, + 0xbb, 0xc5, 0xe6, 0xbf, 0xf0, 0xf1, 0x3e, 0xc2, 0x0b, 0x11, 0x6f, 0x94, 0x22, 0xb4, 0x34, 0x98, + 0xf2, 0x06, 0x94, 0x03, 0xc7, 0xbe, 0x79, 0x40, 0x57, 0x98, 0x8d, 0x6e, 0x39, 0x86, 0x27, 0x7c, + 0xf5, 0xef, 0x4b, 0x60, 0x51, 0x1a, 0x0a, 0x02, 0x2b, 0x16, 0x25, 0x8c, 0x9b, 0x24, 0xa5, 0x97, + 0x35, 0x7e, 0x36, 0x25, 0x3b, 0x3c, 0x94, 0x78, 0x69, 0x9f, 0xcd, 0x19, 0x15, 0x6b, 0x78, 0x4b, + 0xe1, 0x60, 0xa3, 0x8f, 0x31, 0xea, 0x9a, 0x9c, 0x07, 0x4e, 0x2f, 0xe4, 0x38, 0xd5, 0xca, 0xc7, + 0x5a, 0x07, 0xd3, 0x6a, 0x9d, 0x60, 0x8c, 0xf4, 0x84, 0x64, 0x20, 0xb8, 0xd6, 0x1f, 0xb3, 0x1f, + 0xe5, 0x36, 0x3c, 0xac, 0x04, 0x55, 0x0b, 0xb3, 0xe5, 0x36, 0xd4, 0x63, 0x91, 0x5b, 0x7f, 0x78, + 0x4b, 0xb9, 0x03, 0x7f, 0x07, 0xf8, 0x7d, 0x88, 0x19, 0x4f, 0x6f, 0x55, 0x22, 0x36, 0x1f, 0x8b, + 0xbd, 0x98, 0x56, 0xcc, 0x10, 0x34, 0x87, 0x82, 0x65, 0x20, 0xb9, 0x1e, 0x8c, 0x73, 0xd4, 0xbe, + 0xe5, 0x41, 0x25, 0x53, 0x7b, 0x45, 0x07, 0x25, 0x71, 0x51, 0xba, 0xb7, 0xd1, 0x55, 0x98, 0xe6, + 0xba, 0x9c, 0xcd, 0x19, 0x45, 0x36, 0x30, 0x95, 0x03, 0x00, 0x5c, 0x9a, 0x12, 0x88, 0xfe, 0x6c, + 0x8e, 0x10, 0x9c, 0x13, 0xbe, 0xbf, 0x97, 0xe0, 0x97, 0x22, 0x80, 0x40, 0xeb, 0x60, 0xb9, 0x47, + 0xa9, 0x8b, 0x4d, 0x22, 0x09, 0x44, 0xc1, 0x6b, 0x23, 0x04, 0x2d, 0x4a, 0xdd, 0x04, 0x5f, 0x92, + 0x90, 0x84, 0xa2, 0x84, 0x68, 0xd8, 0x73, 0xb1, 0x64, 0x98, 0x9f, 0x90, 0xc3, 0x51, 0x1c, 0x94, + 0xe6, 0x80, 0x06, 0x66, 0xab, 0x0a, 0x36, 0xb2, 0x53, 0x2d, 0xc8, 0x6a, 0x9f, 0x72, 0x60, 0x6d, + 0xdc, 0x10, 0x29, 0x7b, 0x60, 0x31, 0x1e, 0x51, 0x27, 0x99, 0xff, 0xc7, 0x72, 0x36, 0x16, 0xa2, + 0xd8, 0x73, 0xa4, 0x9c, 0x82, 0xd5, 0xcc, 0x60, 0x3b, 0x68, 0x8a, 0x9a, 0x19, 0x95, 0xa1, 0x81, + 0x3d, 0x47, 0x35, 0x1f, 0x54, 0x32, 0xb3, 0xa6, 0xbc, 0x03, 0xab, 0x23, 0x8f, 0xae, 0x3c, 0xdb, + 0x7f, 0x4f, 0x8d, 0x54, 0xf6, 0x8d, 0x32, 0x56, 0xbc, 0xcc, 0x4e, 0xed, 0x73, 0x0e, 0xac, 0x8f, + 0x9d, 0x38, 0xe5, 0x23, 0x28, 0x3d, 0xfc, 0x3f, 0xc4, 0x9a, 0xe5, 0xe6, 0xf5, 0x8c, 0x6f, 0xa6, + 0x24, 0xbd, 0xba, 0xf7, 0xf1, 0xd8, 0x67, 0xf3, 0x81, 0xdf, 0x28, 0x5a, 0x03, 0xa3, 0xb5, 0x09, + 0xfe, 0x49, 0xaf, 0xad, 0x19, 0xd8, 0xa1, 0x87, 0x07, 0x5d, 0x6c, 0xfd, 0xce, 0x81, 0xba, 0x45, + 0xbd, 0x27, 0xf2, 0x6f, 0xad, 0x67, 0xd5, 0x2e, 0xa3, 0x26, 0x5c, 0xe6, 0xae, 0x8f, 0x24, 0xd0, + 0xa6, 0xae, 0x49, 0x6c, 0x48, 0x03, 0x5b, 0xb5, 0x31, 0x89, 0x5b, 0x94, 0xfc, 0xf5, 0x7c, 0x87, + 0x4d, 0xfa, 0x41, 0x3f, 0x17, 0x9f, 0x2f, 0xf9, 0xc2, 0xa9, 0xae, 0x7f, 0xcd, 0x6f, 0x9f, 0x0a, + 0x32, 0x1d, 0x31, 0x28, 0x96, 0xd1, 0xaa, 0xdd, 0x80, 0x87, 0x71, 0xd8, 0x8f, 0x24, 0xa0, 0xa3, + 0x23, 0xd6, 0x49, 0x03, 0x3a, 0xed, 0x46, 0x47, 0x04, 0xfc, 0xca, 0xd7, 0xc5, 0xae, 0xa6, 0xe9, + 0x88, 0x69, 0x5a, 0x1a, 0xa2, 0x69, 0xed, 0x86, 0xa6, 0x89, 0xa0, 0xde, 0x42, 0x7c, 0xba, 0xff, + 0xff, 0x04, 0x00, 0x00, 0xff, 0xff, 0xaa, 0xcd, 0xa6, 0x98, 0x3d, 0x08, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/common/metrics.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/common/metrics.pb.go new file mode 100644 index 0000000..b8672cc --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/common/metrics.pb.go @@ -0,0 +1,1375 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/common/metrics.proto + +package common // import "google.golang.org/genproto/googleapis/ads/googleads/v1/common" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import wrappers "github.com/golang/protobuf/ptypes/wrappers" +import enums "google.golang.org/genproto/googleapis/ads/googleads/v1/enums" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Metrics data. +type Metrics struct { + // The percent of your ad impressions that are shown as the very first ad + // above the organic search results. + AbsoluteTopImpressionPercentage *wrappers.DoubleValue `protobuf:"bytes,95,opt,name=absolute_top_impression_percentage,json=absoluteTopImpressionPercentage,proto3" json:"absolute_top_impression_percentage,omitempty"` + // Average cost of viewable impressions (`active_view_impressions`). + ActiveViewCpm *wrappers.DoubleValue `protobuf:"bytes,1,opt,name=active_view_cpm,json=activeViewCpm,proto3" json:"active_view_cpm,omitempty"` + // Active view measurable clicks divided by active view viewable impressions. + // This metric is reported only for display network. + ActiveViewCtr *wrappers.DoubleValue `protobuf:"bytes,79,opt,name=active_view_ctr,json=activeViewCtr,proto3" json:"active_view_ctr,omitempty"` + // A measurement of how often your ad has become viewable on a Display + // Network site. + ActiveViewImpressions *wrappers.Int64Value `protobuf:"bytes,2,opt,name=active_view_impressions,json=activeViewImpressions,proto3" json:"active_view_impressions,omitempty"` + // The ratio of impressions that could be measured by Active View over the + // number of served impressions. + ActiveViewMeasurability *wrappers.DoubleValue `protobuf:"bytes,96,opt,name=active_view_measurability,json=activeViewMeasurability,proto3" json:"active_view_measurability,omitempty"` + // The cost of the impressions you received that were measurable by Active + // View. + ActiveViewMeasurableCostMicros *wrappers.Int64Value `protobuf:"bytes,3,opt,name=active_view_measurable_cost_micros,json=activeViewMeasurableCostMicros,proto3" json:"active_view_measurable_cost_micros,omitempty"` + // The number of times your ads are appearing on placements in positions + // where they can be seen. + ActiveViewMeasurableImpressions *wrappers.Int64Value `protobuf:"bytes,4,opt,name=active_view_measurable_impressions,json=activeViewMeasurableImpressions,proto3" json:"active_view_measurable_impressions,omitempty"` + // The percentage of time when your ad appeared on an Active View enabled site + // (measurable impressions) and was viewable (viewable impressions). + ActiveViewViewability *wrappers.DoubleValue `protobuf:"bytes,97,opt,name=active_view_viewability,json=activeViewViewability,proto3" json:"active_view_viewability,omitempty"` + // All conversions from interactions (as oppose to view through conversions) + // divided by the number of ad interactions. + AllConversionsFromInteractionsRate *wrappers.DoubleValue `protobuf:"bytes,65,opt,name=all_conversions_from_interactions_rate,json=allConversionsFromInteractionsRate,proto3" json:"all_conversions_from_interactions_rate,omitempty"` + // The total value of all conversions. + AllConversionsValue *wrappers.DoubleValue `protobuf:"bytes,66,opt,name=all_conversions_value,json=allConversionsValue,proto3" json:"all_conversions_value,omitempty"` + // The total number of conversions. This only includes conversion actions + // which include_in_conversions_metric attribute is set to true. + AllConversions *wrappers.DoubleValue `protobuf:"bytes,7,opt,name=all_conversions,json=allConversions,proto3" json:"all_conversions,omitempty"` + // The value of all conversions divided by the total cost of ad interactions + // (such as clicks for text ads or views for video ads). + AllConversionsValuePerCost *wrappers.DoubleValue `protobuf:"bytes,62,opt,name=all_conversions_value_per_cost,json=allConversionsValuePerCost,proto3" json:"all_conversions_value_per_cost,omitempty"` + // The number of times people clicked the "Call" button to call a store during + // or after clicking an ad. This number doesn’t include whether or not calls + // were connected, or the duration of any calls. + // This metric applies to feed items only. + AllConversionsFromClickToCall *wrappers.DoubleValue `protobuf:"bytes,118,opt,name=all_conversions_from_click_to_call,json=allConversionsFromClickToCall,proto3" json:"all_conversions_from_click_to_call,omitempty"` + // The number of times people clicked a "Get directions" button to navigate to + // a store after clicking an ad. + // This metric applies to feed items only. + AllConversionsFromDirections *wrappers.DoubleValue `protobuf:"bytes,119,opt,name=all_conversions_from_directions,json=allConversionsFromDirections,proto3" json:"all_conversions_from_directions,omitempty"` + // The value of all conversions from interactions divided by the total number + // of interactions. + AllConversionsFromInteractionsValuePerInteraction *wrappers.DoubleValue `protobuf:"bytes,67,opt,name=all_conversions_from_interactions_value_per_interaction,json=allConversionsFromInteractionsValuePerInteraction,proto3" json:"all_conversions_from_interactions_value_per_interaction,omitempty"` + // The number of times people clicked a link to view a store’s menu after + // clicking an ad. + // This metric applies to feed items only. + AllConversionsFromMenu *wrappers.DoubleValue `protobuf:"bytes,120,opt,name=all_conversions_from_menu,json=allConversionsFromMenu,proto3" json:"all_conversions_from_menu,omitempty"` + // The number of times people placed an order at a store after clicking an ad. + // This metric applies to feed items only. + AllConversionsFromOrder *wrappers.DoubleValue `protobuf:"bytes,121,opt,name=all_conversions_from_order,json=allConversionsFromOrder,proto3" json:"all_conversions_from_order,omitempty"` + // The number of other conversions (for example, posting a review or saving a + // location for a store) that occurred after people clicked an ad. + // This metric applies to feed items only. + AllConversionsFromOtherEngagement *wrappers.DoubleValue `protobuf:"bytes,122,opt,name=all_conversions_from_other_engagement,json=allConversionsFromOtherEngagement,proto3" json:"all_conversions_from_other_engagement,omitempty"` + // Estimated number of times people visited a store after clicking an ad. + // This metric applies to feed items only. + AllConversionsFromStoreVisit *wrappers.DoubleValue `protobuf:"bytes,123,opt,name=all_conversions_from_store_visit,json=allConversionsFromStoreVisit,proto3" json:"all_conversions_from_store_visit,omitempty"` + // The number of times that people were taken to a store's URL after clicking + // an ad. + // This metric applies to feed items only. + AllConversionsFromStoreWebsite *wrappers.DoubleValue `protobuf:"bytes,124,opt,name=all_conversions_from_store_website,json=allConversionsFromStoreWebsite,proto3" json:"all_conversions_from_store_website,omitempty"` + // The average amount you pay per interaction. This amount is the total cost + // of your ads divided by the total number of interactions. + AverageCost *wrappers.DoubleValue `protobuf:"bytes,8,opt,name=average_cost,json=averageCost,proto3" json:"average_cost,omitempty"` + // The total cost of all clicks divided by the total number of clicks + // received. + AverageCpc *wrappers.DoubleValue `protobuf:"bytes,9,opt,name=average_cpc,json=averageCpc,proto3" json:"average_cpc,omitempty"` + // The average amount that you've been charged for an ad engagement. This + // amount is the total cost of all ad engagements divided by the total number + // of ad engagements. + AverageCpe *wrappers.DoubleValue `protobuf:"bytes,98,opt,name=average_cpe,json=averageCpe,proto3" json:"average_cpe,omitempty"` + // Average cost-per-thousand impressions (CPM). + AverageCpm *wrappers.DoubleValue `protobuf:"bytes,10,opt,name=average_cpm,json=averageCpm,proto3" json:"average_cpm,omitempty"` + // The average amount you pay each time someone views your ad. + // The average CPV is defined by the total cost of all ad views divided by + // the number of views. + AverageCpv *wrappers.DoubleValue `protobuf:"bytes,11,opt,name=average_cpv,json=averageCpv,proto3" json:"average_cpv,omitempty"` + // Average number of times a unique cookie was exposed to your ad + // over a given time period. Imported from Google Analytics. + AverageFrequency *wrappers.DoubleValue `protobuf:"bytes,12,opt,name=average_frequency,json=averageFrequency,proto3" json:"average_frequency,omitempty"` + // Average number of pages viewed per session. + AveragePageViews *wrappers.DoubleValue `protobuf:"bytes,99,opt,name=average_page_views,json=averagePageViews,proto3" json:"average_page_views,omitempty"` + // Your ad's position relative to those of other advertisers. + AveragePosition *wrappers.DoubleValue `protobuf:"bytes,13,opt,name=average_position,json=averagePosition,proto3" json:"average_position,omitempty"` + // Total duration of all sessions (in seconds) / number of sessions. Imported + // from Google Analytics. + AverageTimeOnSite *wrappers.DoubleValue `protobuf:"bytes,84,opt,name=average_time_on_site,json=averageTimeOnSite,proto3" json:"average_time_on_site,omitempty"` + // An indication of how other advertisers are bidding on similar products. + BenchmarkAverageMaxCpc *wrappers.DoubleValue `protobuf:"bytes,14,opt,name=benchmark_average_max_cpc,json=benchmarkAverageMaxCpc,proto3" json:"benchmark_average_max_cpc,omitempty"` + // An indication on how other advertisers' Shopping ads for similar products + // are performing based on how often people who see their ad click on it. + BenchmarkCtr *wrappers.DoubleValue `protobuf:"bytes,77,opt,name=benchmark_ctr,json=benchmarkCtr,proto3" json:"benchmark_ctr,omitempty"` + // Percentage of clicks where the user only visited a single page on your + // site. Imported from Google Analytics. + BounceRate *wrappers.DoubleValue `protobuf:"bytes,15,opt,name=bounce_rate,json=bounceRate,proto3" json:"bounce_rate,omitempty"` + // The number of clicks. + Clicks *wrappers.Int64Value `protobuf:"bytes,19,opt,name=clicks,proto3" json:"clicks,omitempty"` + // The number of times your ad or your site's listing in the unpaid + // results was clicked. See the help page at + // https://support.google.com/google-ads/answer/3097241 for details. + CombinedClicks *wrappers.Int64Value `protobuf:"bytes,115,opt,name=combined_clicks,json=combinedClicks,proto3" json:"combined_clicks,omitempty"` + // The number of times your ad or your site's listing in the unpaid + // results was clicked (combined_clicks) divided by combined_queries. See the + // help page at https://support.google.com/google-ads/answer/3097241 for + // details. + CombinedClicksPerQuery *wrappers.DoubleValue `protobuf:"bytes,116,opt,name=combined_clicks_per_query,json=combinedClicksPerQuery,proto3" json:"combined_clicks_per_query,omitempty"` + // The number of searches that returned pages from your site in the unpaid + // results or showed one of your text ads. See the help page at + // https://support.google.com/google-ads/answer/3097241 for details. + CombinedQueries *wrappers.Int64Value `protobuf:"bytes,117,opt,name=combined_queries,json=combinedQueries,proto3" json:"combined_queries,omitempty"` + // The estimated percent of times that your ad was eligible to show + // on the Display Network but didn't because your budget was too low. + // Note: Content budget lost impression share is reported in the range of 0 + // to 0.9. Any value above 0.9 is reported as 0.9001. + ContentBudgetLostImpressionShare *wrappers.DoubleValue `protobuf:"bytes,20,opt,name=content_budget_lost_impression_share,json=contentBudgetLostImpressionShare,proto3" json:"content_budget_lost_impression_share,omitempty"` + // The impressions you've received on the Display Network divided + // by the estimated number of impressions you were eligible to receive. + // Note: Content impression share is reported in the range of 0.1 to 1. Any + // value below 0.1 is reported as 0.0999. + ContentImpressionShare *wrappers.DoubleValue `protobuf:"bytes,21,opt,name=content_impression_share,json=contentImpressionShare,proto3" json:"content_impression_share,omitempty"` + // The last date/time a conversion tag for this conversion action successfully + // fired and was seen by Google Ads. This firing event may not have been the + // result of an attributable conversion (e.g. because the tag was fired from a + // browser that did not previously click an ad from an appropriate + // advertiser). The date/time is in the customer's time zone. + ConversionLastReceivedRequestDateTime *wrappers.StringValue `protobuf:"bytes,73,opt,name=conversion_last_received_request_date_time,json=conversionLastReceivedRequestDateTime,proto3" json:"conversion_last_received_request_date_time,omitempty"` + // The date of the most recent conversion for this conversion action. The date + // is in the customer's time zone. + ConversionLastConversionDate *wrappers.StringValue `protobuf:"bytes,74,opt,name=conversion_last_conversion_date,json=conversionLastConversionDate,proto3" json:"conversion_last_conversion_date,omitempty"` + // The estimated percentage of impressions on the Display Network + // that your ads didn't receive due to poor Ad Rank. + // Note: Content rank lost impression share is reported in the range of 0 + // to 0.9. Any value above 0.9 is reported as 0.9001. + ContentRankLostImpressionShare *wrappers.DoubleValue `protobuf:"bytes,22,opt,name=content_rank_lost_impression_share,json=contentRankLostImpressionShare,proto3" json:"content_rank_lost_impression_share,omitempty"` + // Conversions from interactions divided by the number of ad interactions + // (such as clicks for text ads or views for video ads). This only includes + // conversion actions which include_in_conversions_metric attribute is set to + // true. + ConversionsFromInteractionsRate *wrappers.DoubleValue `protobuf:"bytes,69,opt,name=conversions_from_interactions_rate,json=conversionsFromInteractionsRate,proto3" json:"conversions_from_interactions_rate,omitempty"` + // The total value of conversions. This only includes conversion actions which + // include_in_conversions_metric attribute is set to true. + ConversionsValue *wrappers.DoubleValue `protobuf:"bytes,70,opt,name=conversions_value,json=conversionsValue,proto3" json:"conversions_value,omitempty"` + // The value of conversions divided by the cost of ad interactions. This only + // includes conversion actions which include_in_conversions_metric attribute + // is set to true. + ConversionsValuePerCost *wrappers.DoubleValue `protobuf:"bytes,71,opt,name=conversions_value_per_cost,json=conversionsValuePerCost,proto3" json:"conversions_value_per_cost,omitempty"` + // The value of conversions from interactions divided by the number of ad + // interactions. This only includes conversion actions which + // include_in_conversions_metric attribute is set to true. + ConversionsFromInteractionsValuePerInteraction *wrappers.DoubleValue `protobuf:"bytes,72,opt,name=conversions_from_interactions_value_per_interaction,json=conversionsFromInteractionsValuePerInteraction,proto3" json:"conversions_from_interactions_value_per_interaction,omitempty"` + // The number of conversions. This only includes conversion actions which + // include_in_conversions_metric attribute is set to true. + Conversions *wrappers.DoubleValue `protobuf:"bytes,25,opt,name=conversions,proto3" json:"conversions,omitempty"` + // The sum of your cost-per-click (CPC) and cost-per-thousand impressions + // (CPM) costs during this period. + CostMicros *wrappers.Int64Value `protobuf:"bytes,26,opt,name=cost_micros,json=costMicros,proto3" json:"cost_micros,omitempty"` + // The cost of ad interactions divided by all conversions. + CostPerAllConversions *wrappers.DoubleValue `protobuf:"bytes,68,opt,name=cost_per_all_conversions,json=costPerAllConversions,proto3" json:"cost_per_all_conversions,omitempty"` + // The cost of ad interactions divided by conversions. This only includes + // conversion actions which include_in_conversions_metric attribute is set to + // true. + CostPerConversion *wrappers.DoubleValue `protobuf:"bytes,28,opt,name=cost_per_conversion,json=costPerConversion,proto3" json:"cost_per_conversion,omitempty"` + // The cost of ad interactions divided by current model attributed + // conversions. This only includes conversion actions which + // include_in_conversions_metric attribute is set to true. + CostPerCurrentModelAttributedConversion *wrappers.DoubleValue `protobuf:"bytes,106,opt,name=cost_per_current_model_attributed_conversion,json=costPerCurrentModelAttributedConversion,proto3" json:"cost_per_current_model_attributed_conversion,omitempty"` + // Conversions from when a customer clicks on a Google Ads ad on one device, + // then converts on a different device or browser. + // Cross-device conversions are already included in all_conversions. + CrossDeviceConversions *wrappers.DoubleValue `protobuf:"bytes,29,opt,name=cross_device_conversions,json=crossDeviceConversions,proto3" json:"cross_device_conversions,omitempty"` + // The number of clicks your ad receives (Clicks) divided by the number + // of times your ad is shown (Impressions). + Ctr *wrappers.DoubleValue `protobuf:"bytes,30,opt,name=ctr,proto3" json:"ctr,omitempty"` + // Shows how your historic conversions data would look under the attribution + // model you've currently selected. This only includes conversion actions + // which include_in_conversions_metric attribute is set to true. + CurrentModelAttributedConversions *wrappers.DoubleValue `protobuf:"bytes,101,opt,name=current_model_attributed_conversions,json=currentModelAttributedConversions,proto3" json:"current_model_attributed_conversions,omitempty"` + // Current model attributed conversions from interactions divided by the + // number of ad interactions (such as clicks for text ads or views for video + // ads). This only includes conversion actions which + // include_in_conversions_metric attribute is set to true. + CurrentModelAttributedConversionsFromInteractionsRate *wrappers.DoubleValue `protobuf:"bytes,102,opt,name=current_model_attributed_conversions_from_interactions_rate,json=currentModelAttributedConversionsFromInteractionsRate,proto3" json:"current_model_attributed_conversions_from_interactions_rate,omitempty"` + // The value of current model attributed conversions from interactions divided + // by the number of ad interactions. This only includes conversion actions + // which include_in_conversions_metric attribute is set to true. + CurrentModelAttributedConversionsFromInteractionsValuePerInteraction *wrappers.DoubleValue `protobuf:"bytes,103,opt,name=current_model_attributed_conversions_from_interactions_value_per_interaction,json=currentModelAttributedConversionsFromInteractionsValuePerInteraction,proto3" json:"current_model_attributed_conversions_from_interactions_value_per_interaction,omitempty"` + // The total value of current model attributed conversions. This only includes + // conversion actions which include_in_conversions_metric attribute is set to + // true. + CurrentModelAttributedConversionsValue *wrappers.DoubleValue `protobuf:"bytes,104,opt,name=current_model_attributed_conversions_value,json=currentModelAttributedConversionsValue,proto3" json:"current_model_attributed_conversions_value,omitempty"` + // The value of current model attributed conversions divided by the cost of ad + // interactions. This only includes conversion actions which + // include_in_conversions_metric attribute is set to true. + CurrentModelAttributedConversionsValuePerCost *wrappers.DoubleValue `protobuf:"bytes,105,opt,name=current_model_attributed_conversions_value_per_cost,json=currentModelAttributedConversionsValuePerCost,proto3" json:"current_model_attributed_conversions_value_per_cost,omitempty"` + // How often people engage with your ad after it's shown to them. This is the + // number of ad expansions divided by the number of times your ad is shown. + EngagementRate *wrappers.DoubleValue `protobuf:"bytes,31,opt,name=engagement_rate,json=engagementRate,proto3" json:"engagement_rate,omitempty"` + // The number of engagements. + // An engagement occurs when a viewer expands your Lightbox ad. Also, in the + // future, other ad types may support engagement metrics. + Engagements *wrappers.Int64Value `protobuf:"bytes,32,opt,name=engagements,proto3" json:"engagements,omitempty"` + // Average lead value of hotel. + HotelAverageLeadValueMicros *wrappers.DoubleValue `protobuf:"bytes,75,opt,name=hotel_average_lead_value_micros,json=hotelAverageLeadValueMicros,proto3" json:"hotel_average_lead_value_micros,omitempty"` + // The creative historical quality score. + HistoricalCreativeQualityScore enums.QualityScoreBucketEnum_QualityScoreBucket `protobuf:"varint,80,opt,name=historical_creative_quality_score,json=historicalCreativeQualityScore,proto3,enum=google.ads.googleads.v1.enums.QualityScoreBucketEnum_QualityScoreBucket" json:"historical_creative_quality_score,omitempty"` + // The quality of historical landing page experience. + HistoricalLandingPageQualityScore enums.QualityScoreBucketEnum_QualityScoreBucket `protobuf:"varint,81,opt,name=historical_landing_page_quality_score,json=historicalLandingPageQualityScore,proto3,enum=google.ads.googleads.v1.enums.QualityScoreBucketEnum_QualityScoreBucket" json:"historical_landing_page_quality_score,omitempty"` + // The historical quality score. + HistoricalQualityScore *wrappers.Int64Value `protobuf:"bytes,82,opt,name=historical_quality_score,json=historicalQualityScore,proto3" json:"historical_quality_score,omitempty"` + // The historical search predicted click through rate (CTR). + HistoricalSearchPredictedCtr enums.QualityScoreBucketEnum_QualityScoreBucket `protobuf:"varint,83,opt,name=historical_search_predicted_ctr,json=historicalSearchPredictedCtr,proto3,enum=google.ads.googleads.v1.enums.QualityScoreBucketEnum_QualityScoreBucket" json:"historical_search_predicted_ctr,omitempty"` + // The number of times the ad was forwarded to someone else as a message. + GmailForwards *wrappers.Int64Value `protobuf:"bytes,85,opt,name=gmail_forwards,json=gmailForwards,proto3" json:"gmail_forwards,omitempty"` + // The number of times someone has saved your Gmail ad to their inbox as a + // message. + GmailSaves *wrappers.Int64Value `protobuf:"bytes,86,opt,name=gmail_saves,json=gmailSaves,proto3" json:"gmail_saves,omitempty"` + // The number of clicks to the landing page on the expanded state of Gmail + // ads. + GmailSecondaryClicks *wrappers.Int64Value `protobuf:"bytes,87,opt,name=gmail_secondary_clicks,json=gmailSecondaryClicks,proto3" json:"gmail_secondary_clicks,omitempty"` + // Number of unique cookies that were exposed to your ad over a given time + // period. + ImpressionReach *wrappers.Int64Value `protobuf:"bytes,36,opt,name=impression_reach,json=impressionReach,proto3" json:"impression_reach,omitempty"` + // The number of times a store's location-based ad was shown. + // This metric applies to feed items only. + ImpressionsFromStoreReach *wrappers.Int64Value `protobuf:"bytes,125,opt,name=impressions_from_store_reach,json=impressionsFromStoreReach,proto3" json:"impressions_from_store_reach,omitempty"` + // Count of how often your ad has appeared on a search results page or + // website on the Google Network. + Impressions *wrappers.Int64Value `protobuf:"bytes,37,opt,name=impressions,proto3" json:"impressions,omitempty"` + // How often people interact with your ad after it is shown to them. + // This is the number of interactions divided by the number of times your ad + // is shown. + InteractionRate *wrappers.DoubleValue `protobuf:"bytes,38,opt,name=interaction_rate,json=interactionRate,proto3" json:"interaction_rate,omitempty"` + // The number of interactions. + // An interaction is the main user action associated with an ad format-clicks + // for text and shopping ads, views for video ads, and so on. + Interactions *wrappers.Int64Value `protobuf:"bytes,39,opt,name=interactions,proto3" json:"interactions,omitempty"` + // The types of payable and free interactions. + InteractionEventTypes []enums.InteractionEventTypeEnum_InteractionEventType `protobuf:"varint,100,rep,packed,name=interaction_event_types,json=interactionEventTypes,proto3,enum=google.ads.googleads.v1.enums.InteractionEventTypeEnum_InteractionEventType" json:"interaction_event_types,omitempty"` + // The percentage of clicks filtered out of your total number of clicks + // (filtered + non-filtered clicks) during the reporting period. + InvalidClickRate *wrappers.DoubleValue `protobuf:"bytes,40,opt,name=invalid_click_rate,json=invalidClickRate,proto3" json:"invalid_click_rate,omitempty"` + // Number of clicks Google considers illegitimate and doesn't charge you for. + InvalidClicks *wrappers.Int64Value `protobuf:"bytes,41,opt,name=invalid_clicks,json=invalidClicks,proto3" json:"invalid_clicks,omitempty"` + // The percentage of mobile clicks that go to a mobile-friendly page. + MobileFriendlyClicksPercentage *wrappers.DoubleValue `protobuf:"bytes,109,opt,name=mobile_friendly_clicks_percentage,json=mobileFriendlyClicksPercentage,proto3" json:"mobile_friendly_clicks_percentage,omitempty"` + // The number of times someone clicked your site's listing in the unpaid + // results for a particular query. See the help page at + // https://support.google.com/google-ads/answer/3097241 for details. + OrganicClicks *wrappers.Int64Value `protobuf:"bytes,110,opt,name=organic_clicks,json=organicClicks,proto3" json:"organic_clicks,omitempty"` + // The number of times someone clicked your site's listing in the unpaid + // results (organic_clicks) divided by the total number of searches that + // returned pages from your site (organic_queries). See the help page at + // https://support.google.com/google-ads/answer/3097241 for details. + OrganicClicksPerQuery *wrappers.DoubleValue `protobuf:"bytes,111,opt,name=organic_clicks_per_query,json=organicClicksPerQuery,proto3" json:"organic_clicks_per_query,omitempty"` + // The number of listings for your site in the unpaid search results. See the + // help page at https://support.google.com/google-ads/answer/3097241 for + // details. + OrganicImpressions *wrappers.Int64Value `protobuf:"bytes,112,opt,name=organic_impressions,json=organicImpressions,proto3" json:"organic_impressions,omitempty"` + // The number of times a page from your site was listed in the unpaid search + // results (organic_impressions) divided by the number of searches returning + // your site's listing in the unpaid results (organic_queries). See the help + // page at https://support.google.com/google-ads/answer/3097241 for details. + OrganicImpressionsPerQuery *wrappers.DoubleValue `protobuf:"bytes,113,opt,name=organic_impressions_per_query,json=organicImpressionsPerQuery,proto3" json:"organic_impressions_per_query,omitempty"` + // The total number of searches that returned your site's listing in the + // unpaid results. See the help page at + // https://support.google.com/google-ads/answer/3097241 for details. + OrganicQueries *wrappers.Int64Value `protobuf:"bytes,114,opt,name=organic_queries,json=organicQueries,proto3" json:"organic_queries,omitempty"` + // Percentage of first-time sessions (from people who had never visited your + // site before). Imported from Google Analytics. + PercentNewVisitors *wrappers.DoubleValue `protobuf:"bytes,42,opt,name=percent_new_visitors,json=percentNewVisitors,proto3" json:"percent_new_visitors,omitempty"` + // Number of offline phone calls. + PhoneCalls *wrappers.Int64Value `protobuf:"bytes,43,opt,name=phone_calls,json=phoneCalls,proto3" json:"phone_calls,omitempty"` + // Number of offline phone impressions. + PhoneImpressions *wrappers.Int64Value `protobuf:"bytes,44,opt,name=phone_impressions,json=phoneImpressions,proto3" json:"phone_impressions,omitempty"` + // Number of phone calls received (phone_calls) divided by the number of + // times your phone number is shown (phone_impressions). + PhoneThroughRate *wrappers.DoubleValue `protobuf:"bytes,45,opt,name=phone_through_rate,json=phoneThroughRate,proto3" json:"phone_through_rate,omitempty"` + // Your clickthrough rate (Ctr) divided by the average clickthrough rate of + // all advertisers on the websites that show your ads. Measures how your ads + // perform on Display Network sites compared to other ads on the same sites. + RelativeCtr *wrappers.DoubleValue `protobuf:"bytes,46,opt,name=relative_ctr,json=relativeCtr,proto3" json:"relative_ctr,omitempty"` + // The percentage of the customer's Shopping or Search ad impressions that are + // shown in the most prominent Shopping position. See + // this Merchant + // Center article for details. Any value below 0.1 is reported as 0.0999. + SearchAbsoluteTopImpressionShare *wrappers.DoubleValue `protobuf:"bytes,78,opt,name=search_absolute_top_impression_share,json=searchAbsoluteTopImpressionShare,proto3" json:"search_absolute_top_impression_share,omitempty"` + // The number estimating how often your ad wasn't the very first ad above the + // organic search results due to a low budget. Note: Search + // budget lost absolute top impression share is reported in the range of 0 to + // 0.9. Any value above 0.9 is reported as 0.9001. + SearchBudgetLostAbsoluteTopImpressionShare *wrappers.DoubleValue `protobuf:"bytes,88,opt,name=search_budget_lost_absolute_top_impression_share,json=searchBudgetLostAbsoluteTopImpressionShare,proto3" json:"search_budget_lost_absolute_top_impression_share,omitempty"` + // The estimated percent of times that your ad was eligible to show on the + // Search Network but didn't because your budget was too low. Note: Search + // budget lost impression share is reported in the range of 0 to 0.9. Any + // value above 0.9 is reported as 0.9001. + SearchBudgetLostImpressionShare *wrappers.DoubleValue `protobuf:"bytes,47,opt,name=search_budget_lost_impression_share,json=searchBudgetLostImpressionShare,proto3" json:"search_budget_lost_impression_share,omitempty"` + // The number estimating how often your ad didn't show anywhere above the + // organic search results due to a low budget. Note: Search + // budget lost top impression share is reported in the range of 0 to 0.9. Any + // value above 0.9 is reported as 0.9001. + SearchBudgetLostTopImpressionShare *wrappers.DoubleValue `protobuf:"bytes,89,opt,name=search_budget_lost_top_impression_share,json=searchBudgetLostTopImpressionShare,proto3" json:"search_budget_lost_top_impression_share,omitempty"` + // The number of clicks you've received on the Search Network + // divided by the estimated number of clicks you were eligible to receive. + // Note: Search click share is reported in the range of 0.1 to 1. Any value + // below 0.1 is reported as 0.0999. + SearchClickShare *wrappers.DoubleValue `protobuf:"bytes,48,opt,name=search_click_share,json=searchClickShare,proto3" json:"search_click_share,omitempty"` + // The impressions you've received divided by the estimated number of + // impressions you were eligible to receive on the Search Network for search + // terms that matched your keywords exactly (or were close variants of your + // keyword), regardless of your keyword match types. Note: Search exact match + // impression share is reported in the range of 0.1 to 1. Any value below 0.1 + // is reported as 0.0999. + SearchExactMatchImpressionShare *wrappers.DoubleValue `protobuf:"bytes,49,opt,name=search_exact_match_impression_share,json=searchExactMatchImpressionShare,proto3" json:"search_exact_match_impression_share,omitempty"` + // The impressions you've received on the Search Network divided + // by the estimated number of impressions you were eligible to receive. + // Note: Search impression share is reported in the range of 0.1 to 1. Any + // value below 0.1 is reported as 0.0999. + SearchImpressionShare *wrappers.DoubleValue `protobuf:"bytes,50,opt,name=search_impression_share,json=searchImpressionShare,proto3" json:"search_impression_share,omitempty"` + // The number estimating how often your ad wasn't the very first ad above the + // organic search results due to poor Ad Rank. + // Note: Search rank lost absolute top impression share is reported in the + // range of 0 to 0.9. Any value above 0.9 is reported as 0.9001. + SearchRankLostAbsoluteTopImpressionShare *wrappers.DoubleValue `protobuf:"bytes,90,opt,name=search_rank_lost_absolute_top_impression_share,json=searchRankLostAbsoluteTopImpressionShare,proto3" json:"search_rank_lost_absolute_top_impression_share,omitempty"` + // The estimated percentage of impressions on the Search Network + // that your ads didn't receive due to poor Ad Rank. + // Note: Search rank lost impression share is reported in the range of 0 to + // 0.9. Any value above 0.9 is reported as 0.9001. + SearchRankLostImpressionShare *wrappers.DoubleValue `protobuf:"bytes,51,opt,name=search_rank_lost_impression_share,json=searchRankLostImpressionShare,proto3" json:"search_rank_lost_impression_share,omitempty"` + // The number estimating how often your ad didn't show anywhere above the + // organic search results due to poor Ad Rank. + // Note: Search rank lost top impression share is reported in the range of 0 + // to 0.9. Any value above 0.9 is reported as 0.9001. + SearchRankLostTopImpressionShare *wrappers.DoubleValue `protobuf:"bytes,91,opt,name=search_rank_lost_top_impression_share,json=searchRankLostTopImpressionShare,proto3" json:"search_rank_lost_top_impression_share,omitempty"` + // The impressions you've received in the top location (anywhere above the + // organic search results) compared to the estimated number of impressions you + // were eligible to receive in the top location. + // Note: Search top impression share is reported in the range of 0.1 to 1. Any + // value below 0.1 is reported as 0.0999. + SearchTopImpressionShare *wrappers.DoubleValue `protobuf:"bytes,92,opt,name=search_top_impression_share,json=searchTopImpressionShare,proto3" json:"search_top_impression_share,omitempty"` + // A measure of how quickly your page loads after clicks on your mobile ads. + // The score is a range from 1 to 10, 10 being the fastest. + SpeedScore *wrappers.Int64Value `protobuf:"bytes,107,opt,name=speed_score,json=speedScore,proto3" json:"speed_score,omitempty"` + // The percent of your ad impressions that are shown anywhere above the + // organic search results. + TopImpressionPercentage *wrappers.DoubleValue `protobuf:"bytes,93,opt,name=top_impression_percentage,json=topImpressionPercentage,proto3" json:"top_impression_percentage,omitempty"` + // The percentage of ad clicks to Accelerated Mobile Pages (AMP) landing pages + // that reach a valid AMP page. + ValidAcceleratedMobilePagesClicksPercentage *wrappers.DoubleValue `protobuf:"bytes,108,opt,name=valid_accelerated_mobile_pages_clicks_percentage,json=validAcceleratedMobilePagesClicksPercentage,proto3" json:"valid_accelerated_mobile_pages_clicks_percentage,omitempty"` + // The value of all conversions divided by the number of all conversions. + ValuePerAllConversions *wrappers.DoubleValue `protobuf:"bytes,52,opt,name=value_per_all_conversions,json=valuePerAllConversions,proto3" json:"value_per_all_conversions,omitempty"` + // The value of conversions divided by the number of conversions. This only + // includes conversion actions which include_in_conversions_metric attribute + // is set to true. + ValuePerConversion *wrappers.DoubleValue `protobuf:"bytes,53,opt,name=value_per_conversion,json=valuePerConversion,proto3" json:"value_per_conversion,omitempty"` + // The value of current model attributed conversions divided by the number of + // the conversions. This only includes conversion actions which + // include_in_conversions_metric attribute is set to true. + ValuePerCurrentModelAttributedConversion *wrappers.DoubleValue `protobuf:"bytes,94,opt,name=value_per_current_model_attributed_conversion,json=valuePerCurrentModelAttributedConversion,proto3" json:"value_per_current_model_attributed_conversion,omitempty"` + // Percentage of impressions where the viewer watched all of your video. + VideoQuartile_100Rate *wrappers.DoubleValue `protobuf:"bytes,54,opt,name=video_quartile_100_rate,json=videoQuartile100Rate,proto3" json:"video_quartile_100_rate,omitempty"` + // Percentage of impressions where the viewer watched 25% of your video. + VideoQuartile_25Rate *wrappers.DoubleValue `protobuf:"bytes,55,opt,name=video_quartile_25_rate,json=videoQuartile25Rate,proto3" json:"video_quartile_25_rate,omitempty"` + // Percentage of impressions where the viewer watched 50% of your video. + VideoQuartile_50Rate *wrappers.DoubleValue `protobuf:"bytes,56,opt,name=video_quartile_50_rate,json=videoQuartile50Rate,proto3" json:"video_quartile_50_rate,omitempty"` + // Percentage of impressions where the viewer watched 75% of your video. + VideoQuartile_75Rate *wrappers.DoubleValue `protobuf:"bytes,57,opt,name=video_quartile_75_rate,json=videoQuartile75Rate,proto3" json:"video_quartile_75_rate,omitempty"` + // The number of views your TrueView video ad receives divided by its number + // of impressions, including thumbnail impressions for TrueView in-display + // ads. + VideoViewRate *wrappers.DoubleValue `protobuf:"bytes,58,opt,name=video_view_rate,json=videoViewRate,proto3" json:"video_view_rate,omitempty"` + // The number of times your video ads were viewed. + VideoViews *wrappers.Int64Value `protobuf:"bytes,59,opt,name=video_views,json=videoViews,proto3" json:"video_views,omitempty"` + // The total number of view-through conversions. + // These happen when a customer sees an image or rich media ad, then later + // completes a conversion on your site without interacting with (e.g., + // clicking on) another ad. + ViewThroughConversions *wrappers.Int64Value `protobuf:"bytes,60,opt,name=view_through_conversions,json=viewThroughConversions,proto3" json:"view_through_conversions,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Metrics) Reset() { *m = Metrics{} } +func (m *Metrics) String() string { return proto.CompactTextString(m) } +func (*Metrics) ProtoMessage() {} +func (*Metrics) Descriptor() ([]byte, []int) { + return fileDescriptor_metrics_a6092c916b5f5d74, []int{0} +} +func (m *Metrics) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Metrics.Unmarshal(m, b) +} +func (m *Metrics) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Metrics.Marshal(b, m, deterministic) +} +func (dst *Metrics) XXX_Merge(src proto.Message) { + xxx_messageInfo_Metrics.Merge(dst, src) +} +func (m *Metrics) XXX_Size() int { + return xxx_messageInfo_Metrics.Size(m) +} +func (m *Metrics) XXX_DiscardUnknown() { + xxx_messageInfo_Metrics.DiscardUnknown(m) +} + +var xxx_messageInfo_Metrics proto.InternalMessageInfo + +func (m *Metrics) GetAbsoluteTopImpressionPercentage() *wrappers.DoubleValue { + if m != nil { + return m.AbsoluteTopImpressionPercentage + } + return nil +} + +func (m *Metrics) GetActiveViewCpm() *wrappers.DoubleValue { + if m != nil { + return m.ActiveViewCpm + } + return nil +} + +func (m *Metrics) GetActiveViewCtr() *wrappers.DoubleValue { + if m != nil { + return m.ActiveViewCtr + } + return nil +} + +func (m *Metrics) GetActiveViewImpressions() *wrappers.Int64Value { + if m != nil { + return m.ActiveViewImpressions + } + return nil +} + +func (m *Metrics) GetActiveViewMeasurability() *wrappers.DoubleValue { + if m != nil { + return m.ActiveViewMeasurability + } + return nil +} + +func (m *Metrics) GetActiveViewMeasurableCostMicros() *wrappers.Int64Value { + if m != nil { + return m.ActiveViewMeasurableCostMicros + } + return nil +} + +func (m *Metrics) GetActiveViewMeasurableImpressions() *wrappers.Int64Value { + if m != nil { + return m.ActiveViewMeasurableImpressions + } + return nil +} + +func (m *Metrics) GetActiveViewViewability() *wrappers.DoubleValue { + if m != nil { + return m.ActiveViewViewability + } + return nil +} + +func (m *Metrics) GetAllConversionsFromInteractionsRate() *wrappers.DoubleValue { + if m != nil { + return m.AllConversionsFromInteractionsRate + } + return nil +} + +func (m *Metrics) GetAllConversionsValue() *wrappers.DoubleValue { + if m != nil { + return m.AllConversionsValue + } + return nil +} + +func (m *Metrics) GetAllConversions() *wrappers.DoubleValue { + if m != nil { + return m.AllConversions + } + return nil +} + +func (m *Metrics) GetAllConversionsValuePerCost() *wrappers.DoubleValue { + if m != nil { + return m.AllConversionsValuePerCost + } + return nil +} + +func (m *Metrics) GetAllConversionsFromClickToCall() *wrappers.DoubleValue { + if m != nil { + return m.AllConversionsFromClickToCall + } + return nil +} + +func (m *Metrics) GetAllConversionsFromDirections() *wrappers.DoubleValue { + if m != nil { + return m.AllConversionsFromDirections + } + return nil +} + +func (m *Metrics) GetAllConversionsFromInteractionsValuePerInteraction() *wrappers.DoubleValue { + if m != nil { + return m.AllConversionsFromInteractionsValuePerInteraction + } + return nil +} + +func (m *Metrics) GetAllConversionsFromMenu() *wrappers.DoubleValue { + if m != nil { + return m.AllConversionsFromMenu + } + return nil +} + +func (m *Metrics) GetAllConversionsFromOrder() *wrappers.DoubleValue { + if m != nil { + return m.AllConversionsFromOrder + } + return nil +} + +func (m *Metrics) GetAllConversionsFromOtherEngagement() *wrappers.DoubleValue { + if m != nil { + return m.AllConversionsFromOtherEngagement + } + return nil +} + +func (m *Metrics) GetAllConversionsFromStoreVisit() *wrappers.DoubleValue { + if m != nil { + return m.AllConversionsFromStoreVisit + } + return nil +} + +func (m *Metrics) GetAllConversionsFromStoreWebsite() *wrappers.DoubleValue { + if m != nil { + return m.AllConversionsFromStoreWebsite + } + return nil +} + +func (m *Metrics) GetAverageCost() *wrappers.DoubleValue { + if m != nil { + return m.AverageCost + } + return nil +} + +func (m *Metrics) GetAverageCpc() *wrappers.DoubleValue { + if m != nil { + return m.AverageCpc + } + return nil +} + +func (m *Metrics) GetAverageCpe() *wrappers.DoubleValue { + if m != nil { + return m.AverageCpe + } + return nil +} + +func (m *Metrics) GetAverageCpm() *wrappers.DoubleValue { + if m != nil { + return m.AverageCpm + } + return nil +} + +func (m *Metrics) GetAverageCpv() *wrappers.DoubleValue { + if m != nil { + return m.AverageCpv + } + return nil +} + +func (m *Metrics) GetAverageFrequency() *wrappers.DoubleValue { + if m != nil { + return m.AverageFrequency + } + return nil +} + +func (m *Metrics) GetAveragePageViews() *wrappers.DoubleValue { + if m != nil { + return m.AveragePageViews + } + return nil +} + +func (m *Metrics) GetAveragePosition() *wrappers.DoubleValue { + if m != nil { + return m.AveragePosition + } + return nil +} + +func (m *Metrics) GetAverageTimeOnSite() *wrappers.DoubleValue { + if m != nil { + return m.AverageTimeOnSite + } + return nil +} + +func (m *Metrics) GetBenchmarkAverageMaxCpc() *wrappers.DoubleValue { + if m != nil { + return m.BenchmarkAverageMaxCpc + } + return nil +} + +func (m *Metrics) GetBenchmarkCtr() *wrappers.DoubleValue { + if m != nil { + return m.BenchmarkCtr + } + return nil +} + +func (m *Metrics) GetBounceRate() *wrappers.DoubleValue { + if m != nil { + return m.BounceRate + } + return nil +} + +func (m *Metrics) GetClicks() *wrappers.Int64Value { + if m != nil { + return m.Clicks + } + return nil +} + +func (m *Metrics) GetCombinedClicks() *wrappers.Int64Value { + if m != nil { + return m.CombinedClicks + } + return nil +} + +func (m *Metrics) GetCombinedClicksPerQuery() *wrappers.DoubleValue { + if m != nil { + return m.CombinedClicksPerQuery + } + return nil +} + +func (m *Metrics) GetCombinedQueries() *wrappers.Int64Value { + if m != nil { + return m.CombinedQueries + } + return nil +} + +func (m *Metrics) GetContentBudgetLostImpressionShare() *wrappers.DoubleValue { + if m != nil { + return m.ContentBudgetLostImpressionShare + } + return nil +} + +func (m *Metrics) GetContentImpressionShare() *wrappers.DoubleValue { + if m != nil { + return m.ContentImpressionShare + } + return nil +} + +func (m *Metrics) GetConversionLastReceivedRequestDateTime() *wrappers.StringValue { + if m != nil { + return m.ConversionLastReceivedRequestDateTime + } + return nil +} + +func (m *Metrics) GetConversionLastConversionDate() *wrappers.StringValue { + if m != nil { + return m.ConversionLastConversionDate + } + return nil +} + +func (m *Metrics) GetContentRankLostImpressionShare() *wrappers.DoubleValue { + if m != nil { + return m.ContentRankLostImpressionShare + } + return nil +} + +func (m *Metrics) GetConversionsFromInteractionsRate() *wrappers.DoubleValue { + if m != nil { + return m.ConversionsFromInteractionsRate + } + return nil +} + +func (m *Metrics) GetConversionsValue() *wrappers.DoubleValue { + if m != nil { + return m.ConversionsValue + } + return nil +} + +func (m *Metrics) GetConversionsValuePerCost() *wrappers.DoubleValue { + if m != nil { + return m.ConversionsValuePerCost + } + return nil +} + +func (m *Metrics) GetConversionsFromInteractionsValuePerInteraction() *wrappers.DoubleValue { + if m != nil { + return m.ConversionsFromInteractionsValuePerInteraction + } + return nil +} + +func (m *Metrics) GetConversions() *wrappers.DoubleValue { + if m != nil { + return m.Conversions + } + return nil +} + +func (m *Metrics) GetCostMicros() *wrappers.Int64Value { + if m != nil { + return m.CostMicros + } + return nil +} + +func (m *Metrics) GetCostPerAllConversions() *wrappers.DoubleValue { + if m != nil { + return m.CostPerAllConversions + } + return nil +} + +func (m *Metrics) GetCostPerConversion() *wrappers.DoubleValue { + if m != nil { + return m.CostPerConversion + } + return nil +} + +func (m *Metrics) GetCostPerCurrentModelAttributedConversion() *wrappers.DoubleValue { + if m != nil { + return m.CostPerCurrentModelAttributedConversion + } + return nil +} + +func (m *Metrics) GetCrossDeviceConversions() *wrappers.DoubleValue { + if m != nil { + return m.CrossDeviceConversions + } + return nil +} + +func (m *Metrics) GetCtr() *wrappers.DoubleValue { + if m != nil { + return m.Ctr + } + return nil +} + +func (m *Metrics) GetCurrentModelAttributedConversions() *wrappers.DoubleValue { + if m != nil { + return m.CurrentModelAttributedConversions + } + return nil +} + +func (m *Metrics) GetCurrentModelAttributedConversionsFromInteractionsRate() *wrappers.DoubleValue { + if m != nil { + return m.CurrentModelAttributedConversionsFromInteractionsRate + } + return nil +} + +func (m *Metrics) GetCurrentModelAttributedConversionsFromInteractionsValuePerInteraction() *wrappers.DoubleValue { + if m != nil { + return m.CurrentModelAttributedConversionsFromInteractionsValuePerInteraction + } + return nil +} + +func (m *Metrics) GetCurrentModelAttributedConversionsValue() *wrappers.DoubleValue { + if m != nil { + return m.CurrentModelAttributedConversionsValue + } + return nil +} + +func (m *Metrics) GetCurrentModelAttributedConversionsValuePerCost() *wrappers.DoubleValue { + if m != nil { + return m.CurrentModelAttributedConversionsValuePerCost + } + return nil +} + +func (m *Metrics) GetEngagementRate() *wrappers.DoubleValue { + if m != nil { + return m.EngagementRate + } + return nil +} + +func (m *Metrics) GetEngagements() *wrappers.Int64Value { + if m != nil { + return m.Engagements + } + return nil +} + +func (m *Metrics) GetHotelAverageLeadValueMicros() *wrappers.DoubleValue { + if m != nil { + return m.HotelAverageLeadValueMicros + } + return nil +} + +func (m *Metrics) GetHistoricalCreativeQualityScore() enums.QualityScoreBucketEnum_QualityScoreBucket { + if m != nil { + return m.HistoricalCreativeQualityScore + } + return enums.QualityScoreBucketEnum_UNSPECIFIED +} + +func (m *Metrics) GetHistoricalLandingPageQualityScore() enums.QualityScoreBucketEnum_QualityScoreBucket { + if m != nil { + return m.HistoricalLandingPageQualityScore + } + return enums.QualityScoreBucketEnum_UNSPECIFIED +} + +func (m *Metrics) GetHistoricalQualityScore() *wrappers.Int64Value { + if m != nil { + return m.HistoricalQualityScore + } + return nil +} + +func (m *Metrics) GetHistoricalSearchPredictedCtr() enums.QualityScoreBucketEnum_QualityScoreBucket { + if m != nil { + return m.HistoricalSearchPredictedCtr + } + return enums.QualityScoreBucketEnum_UNSPECIFIED +} + +func (m *Metrics) GetGmailForwards() *wrappers.Int64Value { + if m != nil { + return m.GmailForwards + } + return nil +} + +func (m *Metrics) GetGmailSaves() *wrappers.Int64Value { + if m != nil { + return m.GmailSaves + } + return nil +} + +func (m *Metrics) GetGmailSecondaryClicks() *wrappers.Int64Value { + if m != nil { + return m.GmailSecondaryClicks + } + return nil +} + +func (m *Metrics) GetImpressionReach() *wrappers.Int64Value { + if m != nil { + return m.ImpressionReach + } + return nil +} + +func (m *Metrics) GetImpressionsFromStoreReach() *wrappers.Int64Value { + if m != nil { + return m.ImpressionsFromStoreReach + } + return nil +} + +func (m *Metrics) GetImpressions() *wrappers.Int64Value { + if m != nil { + return m.Impressions + } + return nil +} + +func (m *Metrics) GetInteractionRate() *wrappers.DoubleValue { + if m != nil { + return m.InteractionRate + } + return nil +} + +func (m *Metrics) GetInteractions() *wrappers.Int64Value { + if m != nil { + return m.Interactions + } + return nil +} + +func (m *Metrics) GetInteractionEventTypes() []enums.InteractionEventTypeEnum_InteractionEventType { + if m != nil { + return m.InteractionEventTypes + } + return nil +} + +func (m *Metrics) GetInvalidClickRate() *wrappers.DoubleValue { + if m != nil { + return m.InvalidClickRate + } + return nil +} + +func (m *Metrics) GetInvalidClicks() *wrappers.Int64Value { + if m != nil { + return m.InvalidClicks + } + return nil +} + +func (m *Metrics) GetMobileFriendlyClicksPercentage() *wrappers.DoubleValue { + if m != nil { + return m.MobileFriendlyClicksPercentage + } + return nil +} + +func (m *Metrics) GetOrganicClicks() *wrappers.Int64Value { + if m != nil { + return m.OrganicClicks + } + return nil +} + +func (m *Metrics) GetOrganicClicksPerQuery() *wrappers.DoubleValue { + if m != nil { + return m.OrganicClicksPerQuery + } + return nil +} + +func (m *Metrics) GetOrganicImpressions() *wrappers.Int64Value { + if m != nil { + return m.OrganicImpressions + } + return nil +} + +func (m *Metrics) GetOrganicImpressionsPerQuery() *wrappers.DoubleValue { + if m != nil { + return m.OrganicImpressionsPerQuery + } + return nil +} + +func (m *Metrics) GetOrganicQueries() *wrappers.Int64Value { + if m != nil { + return m.OrganicQueries + } + return nil +} + +func (m *Metrics) GetPercentNewVisitors() *wrappers.DoubleValue { + if m != nil { + return m.PercentNewVisitors + } + return nil +} + +func (m *Metrics) GetPhoneCalls() *wrappers.Int64Value { + if m != nil { + return m.PhoneCalls + } + return nil +} + +func (m *Metrics) GetPhoneImpressions() *wrappers.Int64Value { + if m != nil { + return m.PhoneImpressions + } + return nil +} + +func (m *Metrics) GetPhoneThroughRate() *wrappers.DoubleValue { + if m != nil { + return m.PhoneThroughRate + } + return nil +} + +func (m *Metrics) GetRelativeCtr() *wrappers.DoubleValue { + if m != nil { + return m.RelativeCtr + } + return nil +} + +func (m *Metrics) GetSearchAbsoluteTopImpressionShare() *wrappers.DoubleValue { + if m != nil { + return m.SearchAbsoluteTopImpressionShare + } + return nil +} + +func (m *Metrics) GetSearchBudgetLostAbsoluteTopImpressionShare() *wrappers.DoubleValue { + if m != nil { + return m.SearchBudgetLostAbsoluteTopImpressionShare + } + return nil +} + +func (m *Metrics) GetSearchBudgetLostImpressionShare() *wrappers.DoubleValue { + if m != nil { + return m.SearchBudgetLostImpressionShare + } + return nil +} + +func (m *Metrics) GetSearchBudgetLostTopImpressionShare() *wrappers.DoubleValue { + if m != nil { + return m.SearchBudgetLostTopImpressionShare + } + return nil +} + +func (m *Metrics) GetSearchClickShare() *wrappers.DoubleValue { + if m != nil { + return m.SearchClickShare + } + return nil +} + +func (m *Metrics) GetSearchExactMatchImpressionShare() *wrappers.DoubleValue { + if m != nil { + return m.SearchExactMatchImpressionShare + } + return nil +} + +func (m *Metrics) GetSearchImpressionShare() *wrappers.DoubleValue { + if m != nil { + return m.SearchImpressionShare + } + return nil +} + +func (m *Metrics) GetSearchRankLostAbsoluteTopImpressionShare() *wrappers.DoubleValue { + if m != nil { + return m.SearchRankLostAbsoluteTopImpressionShare + } + return nil +} + +func (m *Metrics) GetSearchRankLostImpressionShare() *wrappers.DoubleValue { + if m != nil { + return m.SearchRankLostImpressionShare + } + return nil +} + +func (m *Metrics) GetSearchRankLostTopImpressionShare() *wrappers.DoubleValue { + if m != nil { + return m.SearchRankLostTopImpressionShare + } + return nil +} + +func (m *Metrics) GetSearchTopImpressionShare() *wrappers.DoubleValue { + if m != nil { + return m.SearchTopImpressionShare + } + return nil +} + +func (m *Metrics) GetSpeedScore() *wrappers.Int64Value { + if m != nil { + return m.SpeedScore + } + return nil +} + +func (m *Metrics) GetTopImpressionPercentage() *wrappers.DoubleValue { + if m != nil { + return m.TopImpressionPercentage + } + return nil +} + +func (m *Metrics) GetValidAcceleratedMobilePagesClicksPercentage() *wrappers.DoubleValue { + if m != nil { + return m.ValidAcceleratedMobilePagesClicksPercentage + } + return nil +} + +func (m *Metrics) GetValuePerAllConversions() *wrappers.DoubleValue { + if m != nil { + return m.ValuePerAllConversions + } + return nil +} + +func (m *Metrics) GetValuePerConversion() *wrappers.DoubleValue { + if m != nil { + return m.ValuePerConversion + } + return nil +} + +func (m *Metrics) GetValuePerCurrentModelAttributedConversion() *wrappers.DoubleValue { + if m != nil { + return m.ValuePerCurrentModelAttributedConversion + } + return nil +} + +func (m *Metrics) GetVideoQuartile_100Rate() *wrappers.DoubleValue { + if m != nil { + return m.VideoQuartile_100Rate + } + return nil +} + +func (m *Metrics) GetVideoQuartile_25Rate() *wrappers.DoubleValue { + if m != nil { + return m.VideoQuartile_25Rate + } + return nil +} + +func (m *Metrics) GetVideoQuartile_50Rate() *wrappers.DoubleValue { + if m != nil { + return m.VideoQuartile_50Rate + } + return nil +} + +func (m *Metrics) GetVideoQuartile_75Rate() *wrappers.DoubleValue { + if m != nil { + return m.VideoQuartile_75Rate + } + return nil +} + +func (m *Metrics) GetVideoViewRate() *wrappers.DoubleValue { + if m != nil { + return m.VideoViewRate + } + return nil +} + +func (m *Metrics) GetVideoViews() *wrappers.Int64Value { + if m != nil { + return m.VideoViews + } + return nil +} + +func (m *Metrics) GetViewThroughConversions() *wrappers.Int64Value { + if m != nil { + return m.ViewThroughConversions + } + return nil +} + +func init() { + proto.RegisterType((*Metrics)(nil), "google.ads.googleads.v1.common.Metrics") +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/common/metrics.proto", fileDescriptor_metrics_a6092c916b5f5d74) +} + +var fileDescriptor_metrics_a6092c916b5f5d74 = []byte{ + // 2556 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xac, 0x9a, 0xdb, 0x72, 0x14, 0xc7, + 0x19, 0xc7, 0x6b, 0x21, 0x65, 0x27, 0x9f, 0x40, 0xc2, 0x83, 0x80, 0x96, 0x2c, 0x4b, 0x42, 0x01, + 0x43, 0x30, 0xac, 0x0e, 0x58, 0xc6, 0x11, 0x60, 0xd7, 0xea, 0x04, 0x22, 0x12, 0xe8, 0x84, 0x04, + 0xb6, 0x92, 0x71, 0xef, 0x4c, 0x6b, 0x77, 0xac, 0x39, 0x2c, 0xdd, 0xbd, 0x2b, 0x14, 0xc7, 0xb9, + 0x48, 0x72, 0x91, 0x5c, 0x25, 0x55, 0xb9, 0x49, 0xe5, 0x32, 0x97, 0x79, 0x82, 0x3c, 0x43, 0x9e, + 0x21, 0xf7, 0xa9, 0xca, 0x53, 0xa4, 0xfa, 0x30, 0x33, 0xbd, 0xbb, 0xb3, 0xda, 0x16, 0xe5, 0x1b, + 0x58, 0x86, 0xfe, 0xff, 0xfe, 0xdf, 0xf4, 0xf4, 0xe1, 0xeb, 0x6f, 0x06, 0xee, 0xd6, 0x92, 0xa4, + 0x16, 0x92, 0x69, 0xec, 0xb3, 0x69, 0xf5, 0x53, 0xfc, 0x6a, 0xcd, 0x4e, 0x7b, 0x49, 0x14, 0x25, + 0xf1, 0x74, 0x44, 0x38, 0x0d, 0x3c, 0x56, 0x6e, 0xd0, 0x84, 0x27, 0xce, 0xb8, 0x6a, 0x52, 0xc6, + 0x3e, 0x2b, 0x67, 0xad, 0xcb, 0xad, 0xd9, 0xb2, 0x6a, 0x3d, 0xba, 0xd0, 0x8b, 0x46, 0xe2, 0x66, + 0xc4, 0xa6, 0x83, 0x98, 0x13, 0x8a, 0x3d, 0x1e, 0x24, 0xb1, 0x4b, 0x5a, 0x24, 0xe6, 0x2e, 0x3f, + 0x69, 0x10, 0xc5, 0x1e, 0xfd, 0xfc, 0x74, 0xed, 0x9b, 0x26, 0x0e, 0x03, 0x7e, 0xe2, 0x32, 0x2f, + 0xa1, 0xc4, 0xad, 0x36, 0xbd, 0x23, 0xc2, 0xb5, 0x52, 0x47, 0x35, 0x2d, 0xff, 0x55, 0x6d, 0x1e, + 0x4e, 0x1f, 0x53, 0xdc, 0x68, 0x10, 0xaa, 0xa3, 0x1e, 0x1d, 0x4b, 0xc9, 0x8d, 0x60, 0x1a, 0xc7, + 0x71, 0xc2, 0xb1, 0x88, 0x40, 0xff, 0xef, 0xd4, 0x7f, 0x9e, 0xc3, 0xfb, 0x1b, 0xea, 0x2e, 0x9d, + 0x00, 0xa6, 0x70, 0x95, 0x25, 0x61, 0x93, 0x13, 0x97, 0x27, 0x0d, 0x37, 0x88, 0x1a, 0x94, 0x30, + 0x26, 0xe2, 0x6d, 0x10, 0xea, 0x91, 0x98, 0xe3, 0x1a, 0x41, 0xee, 0x64, 0xe9, 0xf6, 0xc0, 0xdc, + 0x98, 0xee, 0x81, 0x72, 0x6a, 0x5b, 0x5e, 0x4e, 0x9a, 0xd5, 0x90, 0xec, 0xe1, 0xb0, 0x49, 0xb6, + 0x27, 0x52, 0xce, 0x6e, 0xd2, 0x58, 0xcb, 0x28, 0x9b, 0x19, 0xc4, 0x59, 0x86, 0x21, 0xd1, 0x13, + 0x2d, 0xe2, 0xb6, 0x02, 0x72, 0xec, 0x7a, 0x8d, 0x08, 0x95, 0x2c, 0xb8, 0x17, 0x95, 0x68, 0x2f, + 0x20, 0xc7, 0x4b, 0x8d, 0xa8, 0x8b, 0xc2, 0x29, 0x7a, 0x71, 0x46, 0x0a, 0xa7, 0xce, 0x0e, 0x5c, + 0x33, 0x29, 0xf9, 0x5d, 0x33, 0x74, 0x4e, 0xd2, 0x3e, 0xec, 0xa2, 0xad, 0xc5, 0xfc, 0xb3, 0x4f, + 0x15, 0xec, 0x4a, 0x0e, 0xcb, 0xef, 0x94, 0x39, 0xaf, 0x60, 0xc4, 0x84, 0x46, 0x04, 0xb3, 0x26, + 0xc5, 0xd5, 0x40, 0x3c, 0x45, 0xf4, 0x8d, 0x45, 0x90, 0xd7, 0x72, 0xee, 0x86, 0x29, 0x76, 0x6a, + 0x30, 0x55, 0x44, 0x0e, 0x89, 0xeb, 0x25, 0x8c, 0xbb, 0x51, 0xe0, 0xd1, 0x84, 0xa1, 0xf3, 0xfd, + 0x23, 0x1f, 0xef, 0x76, 0x08, 0xc9, 0x52, 0xc2, 0xf8, 0x86, 0x44, 0x38, 0xf5, 0x9e, 0x46, 0x66, + 0x17, 0xfd, 0xa8, 0xbf, 0xd1, 0x44, 0x91, 0x91, 0xd9, 0x59, 0xbb, 0xed, 0x4f, 0x40, 0xfc, 0x91, + 0x76, 0x15, 0xb6, 0xe8, 0x2a, 0xe3, 0x11, 0xec, 0xe5, 0x52, 0xa7, 0x01, 0x1f, 0xe3, 0x30, 0x74, + 0xbd, 0x24, 0x6e, 0x11, 0x2a, 0x8d, 0xdc, 0x43, 0x9a, 0x44, 0xae, 0x31, 0x0f, 0x99, 0x4b, 0x31, + 0x27, 0xa8, 0x62, 0x61, 0x32, 0x85, 0xc3, 0x70, 0x29, 0x47, 0xad, 0xd2, 0x24, 0x5a, 0x33, 0x40, + 0xdb, 0x98, 0x13, 0x67, 0x13, 0xae, 0x74, 0x3a, 0xb6, 0x84, 0x18, 0x2d, 0x5a, 0x18, 0x5c, 0x6e, + 0x37, 0x90, 0x17, 0x9d, 0x15, 0x18, 0xea, 0x20, 0xa2, 0xf7, 0x2d, 0x58, 0x83, 0xed, 0x2c, 0xe7, + 0x1b, 0x18, 0x2f, 0x0c, 0x4c, 0xcc, 0x6b, 0x39, 0x6c, 0xd0, 0x17, 0x16, 0xd4, 0xd1, 0x82, 0x08, + 0x37, 0x09, 0x15, 0x43, 0x46, 0x8e, 0xca, 0xa2, 0xce, 0xf6, 0xc2, 0xc0, 0x3b, 0x72, 0x79, 0xe2, + 0x7a, 0x38, 0x0c, 0x51, 0xcb, 0xc2, 0xe5, 0xa3, 0xee, 0x8e, 0x5e, 0x12, 0x90, 0xdd, 0x64, 0x09, + 0x87, 0xa1, 0xe3, 0xc1, 0x44, 0xa1, 0x91, 0x1f, 0x50, 0xa2, 0x1e, 0x05, 0x3a, 0xb6, 0x70, 0x19, + 0xeb, 0x76, 0x59, 0xce, 0x08, 0xce, 0x5f, 0x4a, 0xf0, 0xa0, 0xff, 0xd8, 0xc9, 0xbb, 0xd0, 0xb8, + 0x8c, 0x96, 0x2c, 0xdc, 0x67, 0x4f, 0x1f, 0x4c, 0x69, 0xcf, 0x1a, 0xd7, 0x9c, 0x7d, 0x18, 0x29, + 0x8c, 0x28, 0x22, 0x71, 0x13, 0xbd, 0xb5, 0xf0, 0xbc, 0xda, 0xed, 0xb9, 0x41, 0xe2, 0xa6, 0xf3, + 0x1a, 0x46, 0x0b, 0xc1, 0x09, 0xf5, 0x09, 0x45, 0x27, 0x56, 0x4b, 0x55, 0x17, 0xf9, 0x85, 0x10, + 0x3b, 0x31, 0xdc, 0x2c, 0x46, 0xf3, 0x3a, 0xa1, 0x2e, 0x89, 0x6b, 0xb8, 0x46, 0x22, 0x12, 0x73, + 0xf4, 0x6b, 0x0b, 0x97, 0xeb, 0x05, 0x2e, 0x82, 0xb3, 0x92, 0x61, 0x1c, 0x1f, 0x26, 0x0b, 0xfd, + 0x18, 0x17, 0xbb, 0x66, 0x2b, 0x60, 0x01, 0x47, 0xdf, 0xbd, 0xdb, 0xe0, 0xd8, 0x11, 0x88, 0x3d, + 0x41, 0x90, 0xeb, 0x62, 0x6f, 0x97, 0x63, 0x52, 0x65, 0x01, 0x27, 0xe8, 0x37, 0x16, 0x3e, 0xe3, + 0x3d, 0x7c, 0xf6, 0x15, 0xc3, 0xf9, 0x12, 0x2e, 0xe0, 0x16, 0xa1, 0xb8, 0xa6, 0xd6, 0x76, 0xf4, + 0x63, 0x0b, 0xe6, 0x80, 0x56, 0xc8, 0x59, 0xf9, 0x18, 0x06, 0x32, 0x40, 0xc3, 0x43, 0x3f, 0xb1, + 0xd0, 0x43, 0xaa, 0x6f, 0x78, 0xed, 0x72, 0x82, 0xaa, 0x67, 0x92, 0x93, 0x76, 0x79, 0x84, 0xe0, + 0x4c, 0xf2, 0xa8, 0x5d, 0xde, 0x42, 0x03, 0x67, 0x92, 0xb7, 0x9c, 0x35, 0xf8, 0x20, 0x95, 0x1f, + 0x52, 0xf2, 0xa6, 0x49, 0x62, 0xef, 0x04, 0x5d, 0xb0, 0x80, 0x5c, 0xd2, 0xb2, 0xd5, 0x54, 0xe5, + 0x3c, 0x03, 0x27, 0x45, 0x35, 0xc4, 0x1f, 0x62, 0x83, 0x62, 0xc8, 0x3b, 0x03, 0x6b, 0x13, 0xd7, + 0xe4, 0xfe, 0xc4, 0x9c, 0x27, 0x70, 0x29, 0x63, 0x25, 0x2c, 0x90, 0x4b, 0xc6, 0x45, 0x0b, 0xd2, + 0x50, 0x4a, 0xd2, 0x22, 0x67, 0x03, 0x86, 0x53, 0x10, 0x0f, 0x22, 0xe2, 0x26, 0xb1, 0x2b, 0x07, + 0xde, 0xae, 0x05, 0x2c, 0xed, 0x99, 0xdd, 0x20, 0x22, 0x2f, 0xe2, 0x1d, 0x31, 0xd6, 0xf6, 0x61, + 0xa4, 0x4a, 0x62, 0xaf, 0x1e, 0x61, 0x7a, 0xe4, 0xa6, 0xe0, 0x08, 0xbf, 0x95, 0x03, 0x67, 0xd0, + 0x66, 0x7d, 0xc9, 0xe4, 0x15, 0xa5, 0xde, 0xc0, 0x6f, 0xc5, 0x20, 0xaa, 0xc0, 0xc5, 0x1c, 0x2c, + 0x52, 0xb4, 0x0d, 0x0b, 0xd8, 0x85, 0x4c, 0x22, 0x32, 0xb4, 0xc7, 0x30, 0x50, 0x4d, 0x9a, 0xb1, + 0x47, 0xd4, 0x76, 0x3d, 0x64, 0x33, 0x12, 0x94, 0x40, 0x6e, 0xcb, 0xf7, 0xe1, 0x3d, 0xb9, 0x0d, + 0x31, 0x74, 0xb9, 0x7f, 0xb2, 0xa2, 0x9b, 0x8a, 0xdc, 0xd2, 0x4b, 0xa2, 0x6a, 0x10, 0x13, 0xdf, + 0xd5, 0x6a, 0xd6, 0x5f, 0x3d, 0x98, 0x6a, 0x96, 0x14, 0x65, 0x1f, 0x46, 0x3a, 0x28, 0x72, 0xbf, + 0x78, 0xd3, 0x24, 0xf4, 0x04, 0x71, 0x9b, 0x5e, 0x6d, 0x07, 0x6e, 0x12, 0xba, 0x25, 0xb4, 0xce, + 0x2a, 0x5c, 0xca, 0xc0, 0x82, 0x16, 0x10, 0x86, 0x9a, 0xfd, 0xe3, 0xcb, 0xee, 0x69, 0x4b, 0x69, + 0x9c, 0x10, 0x6e, 0x78, 0x49, 0xcc, 0xc5, 0x69, 0xa4, 0xda, 0xf4, 0x6b, 0x84, 0xbb, 0xa1, 0xc8, + 0x22, 0x8d, 0xd4, 0x9f, 0xd5, 0x31, 0x25, 0x68, 0xd8, 0x22, 0xd6, 0x49, 0x4d, 0x5a, 0x94, 0xa0, + 0xf5, 0x84, 0xf1, 0x3c, 0xc9, 0xdb, 0x11, 0x14, 0x67, 0x0f, 0x50, 0xea, 0xd6, 0xe5, 0x70, 0xc5, + 0xae, 0x37, 0xa4, 0xba, 0x93, 0x7b, 0x0c, 0x77, 0xf2, 0xe5, 0xd8, 0x0d, 0x31, 0xe3, 0x2e, 0x25, + 0x1e, 0x09, 0x5a, 0xc4, 0x77, 0xe5, 0x2c, 0x66, 0xdc, 0xf5, 0x31, 0x57, 0x33, 0x05, 0xad, 0xf5, + 0x70, 0xda, 0xe1, 0x34, 0x88, 0x6b, 0xca, 0xe9, 0x66, 0xce, 0x5b, 0xc7, 0x8c, 0x6f, 0x6b, 0xda, + 0xb6, 0x82, 0x2d, 0x63, 0x2e, 0xe7, 0x8e, 0xc8, 0x46, 0x3a, 0x8d, 0x8d, 0x7f, 0x0b, 0x4b, 0xf4, + 0xcc, 0xc2, 0x6d, 0xac, 0xdd, 0x2d, 0xdf, 0x13, 0x84, 0x91, 0xd8, 0x70, 0xd2, 0x5e, 0xa3, 0x38, + 0x3e, 0xea, 0xf1, 0x84, 0xae, 0xda, 0x6c, 0x38, 0x9a, 0xb3, 0x8d, 0xe3, 0xa3, 0xa2, 0xe7, 0x13, + 0x48, 0xa7, 0x7e, 0xe9, 0xf2, 0x8a, 0xcd, 0x09, 0xd0, 0xeb, 0x93, 0x2b, 0xaf, 0xc1, 0x07, 0xdd, + 0x79, 0xf2, 0xaa, 0xcd, 0x92, 0xea, 0x75, 0x26, 0xc9, 0xaf, 0x61, 0xf4, 0x94, 0xcc, 0xf6, 0x89, + 0x4d, 0x06, 0xe3, 0xf5, 0x48, 0x6b, 0xff, 0x54, 0x82, 0xfb, 0xef, 0x92, 0x04, 0x3e, 0xb5, 0x30, + 0x2d, 0x7b, 0x67, 0xcb, 0x00, 0xbf, 0x80, 0x01, 0xf3, 0x1c, 0x30, 0x62, 0x93, 0x0c, 0x18, 0x02, + 0xe7, 0x91, 0xd0, 0xe7, 0x27, 0xc4, 0xd1, 0xfe, 0xab, 0x05, 0x78, 0xf9, 0x69, 0xf0, 0xa5, 0x98, + 0xba, 0x8c, 0xcb, 0x3b, 0xed, 0x3c, 0x92, 0x2c, 0xdb, 0x1c, 0xd2, 0x84, 0x7a, 0x93, 0xd0, 0x4a, + 0xfb, 0xc9, 0x64, 0x1d, 0x2e, 0x67, 0xd8, 0x1c, 0x89, 0xc6, 0x6c, 0x36, 0x31, 0x4d, 0xcc, 0x71, + 0xce, 0x77, 0x70, 0x37, 0xa7, 0x35, 0x29, 0x15, 0x53, 0x26, 0x4a, 0x7c, 0x12, 0xba, 0x98, 0x73, + 0x1a, 0x54, 0x9b, 0x5c, 0xac, 0xc3, 0xb9, 0xcd, 0xb7, 0x16, 0x36, 0xb7, 0x52, 0x1b, 0xc5, 0xdb, + 0x10, 0xb8, 0x4a, 0x46, 0x33, 0xcc, 0xc5, 0xe2, 0x46, 0x13, 0xc6, 0x5c, 0x9f, 0xb4, 0x02, 0x8f, + 0xb4, 0xf5, 0xd0, 0x47, 0x56, 0x8b, 0x9b, 0x50, 0x2f, 0x4b, 0xb1, 0xd9, 0x45, 0x65, 0x38, 0x2f, + 0xb6, 0xcd, 0x71, 0x0b, 0x84, 0x68, 0xe8, 0x44, 0x70, 0xc3, 0xe2, 0xde, 0x19, 0x22, 0x36, 0x49, + 0xb7, 0xd7, 0xe7, 0xae, 0x99, 0xf3, 0xb7, 0x12, 0x3c, 0xb4, 0xf1, 0xeb, 0xb5, 0x9a, 0x1c, 0x5a, + 0x84, 0x31, 0xdf, 0x37, 0x8c, 0xc2, 0x35, 0xe6, 0x5f, 0x25, 0x58, 0x7f, 0xc7, 0xd0, 0x8a, 0xa7, + 0x75, 0xcd, 0x22, 0xd6, 0xe5, 0x33, 0xc7, 0x5a, 0x34, 0xd9, 0xdf, 0xc2, 0x1d, 0xab, 0xc8, 0xd5, + 0xba, 0x59, 0xb7, 0x88, 0xeb, 0xe3, 0xbe, 0x71, 0xa9, 0xd5, 0xf4, 0x8f, 0x62, 0xc9, 0xb3, 0xb6, + 0xce, 0xd7, 0xd9, 0xc0, 0x22, 0x86, 0x7b, 0x76, 0x31, 0xa4, 0xab, 0xef, 0x0a, 0x0c, 0xe5, 0x87, + 0x44, 0x35, 0x5a, 0x26, 0x6c, 0xaa, 0x1f, 0xb9, 0x48, 0x0e, 0x83, 0xc7, 0x30, 0x90, 0x5f, 0x61, + 0x68, 0xb2, 0xff, 0xc2, 0x67, 0xb6, 0x77, 0xaa, 0x30, 0x51, 0x4f, 0xb8, 0xe8, 0x07, 0x9d, 0x15, + 0x87, 0x04, 0xfb, 0xfa, 0xee, 0xf5, 0x5a, 0xfa, 0x0b, 0x8b, 0xa8, 0x3e, 0x94, 0x10, 0x9d, 0x1b, + 0xaf, 0x13, 0xec, 0xcb, 0xcb, 0x7a, 0x75, 0xfd, 0x6b, 0x09, 0xae, 0xd7, 0x03, 0x71, 0x82, 0x0c, + 0x3c, 0x1c, 0xba, 0x1e, 0x25, 0x58, 0xd6, 0xc3, 0xda, 0xea, 0xbe, 0x68, 0x73, 0xb2, 0x74, 0x7b, + 0x70, 0xee, 0x69, 0xb9, 0x57, 0x1d, 0x5a, 0xd6, 0x8a, 0xcb, 0x5b, 0x4a, 0xb3, 0x23, 0x24, 0x8b, + 0xb2, 0x52, 0xbc, 0x12, 0x37, 0xa3, 0x82, 0xcb, 0xdb, 0xe3, 0xb9, 0xe5, 0x92, 0x76, 0x34, 0x5b, + 0x39, 0x7f, 0x2f, 0xc1, 0x4d, 0x23, 0xaa, 0x10, 0xc7, 0x7e, 0x10, 0xd7, 0xd4, 0x19, 0xa8, 0x3d, + 0xb2, 0xad, 0x1f, 0x38, 0x32, 0xa3, 0x33, 0xd6, 0x95, 0xab, 0x38, 0x41, 0xb5, 0x05, 0xf7, 0x12, + 0x90, 0x11, 0x5b, 0x7b, 0x38, 0xdb, 0xfd, 0x1f, 0xf1, 0xd5, 0x5c, 0xdc, 0x86, 0xfd, 0x73, 0x09, + 0x26, 0x0c, 0x2e, 0x23, 0x98, 0x7a, 0x75, 0xb7, 0x41, 0x89, 0x1f, 0x78, 0x72, 0x06, 0x70, 0x8a, + 0x76, 0x7e, 0xe0, 0xbb, 0x1d, 0xcb, 0x0d, 0x77, 0xa4, 0xdf, 0x66, 0x6a, 0x27, 0x4e, 0x3f, 0x8b, + 0x30, 0x58, 0x8b, 0x70, 0x10, 0xba, 0x87, 0x09, 0x3d, 0xc6, 0xd4, 0x67, 0xe8, 0x65, 0xff, 0xdb, + 0xbb, 0x28, 0x25, 0xab, 0x5a, 0x21, 0xf6, 0x7e, 0xc5, 0x60, 0xb8, 0x45, 0x18, 0xda, 0xb3, 0xd8, + 0xfb, 0x65, 0xfb, 0x1d, 0xd1, 0xdc, 0xd9, 0x82, 0xab, 0x5a, 0x4d, 0xbc, 0x24, 0xf6, 0x31, 0x3d, + 0x49, 0x8f, 0x44, 0xfb, 0xfd, 0x41, 0xc3, 0x0a, 0x94, 0x2a, 0xf5, 0xc1, 0x68, 0x15, 0x2e, 0x19, + 0x19, 0x2c, 0x25, 0xd8, 0xab, 0xa3, 0x1b, 0x16, 0xe7, 0x97, 0x5c, 0xb4, 0x2d, 0x34, 0xce, 0x01, + 0x8c, 0x19, 0xd5, 0x68, 0xb3, 0x10, 0xa3, 0x98, 0xdf, 0xf7, 0x67, 0x8e, 0x18, 0x80, 0xac, 0x04, + 0xa3, 0xe8, 0x8f, 0x61, 0xc0, 0xac, 0x75, 0xdf, 0xb4, 0x58, 0x39, 0x8c, 0xf6, 0xe2, 0xac, 0x6f, + 0xbe, 0xf4, 0x91, 0x0b, 0xd8, 0xc7, 0x36, 0x67, 0x7d, 0x43, 0x25, 0x57, 0xb0, 0x2f, 0xe1, 0x82, + 0xb9, 0x3b, 0xa1, 0x5b, 0xfd, 0x03, 0x69, 0x13, 0x38, 0x7f, 0x28, 0xc1, 0xb5, 0xe2, 0xf7, 0x4f, + 0x0c, 0xf9, 0x93, 0xe7, 0x6f, 0x0f, 0xce, 0xad, 0xf7, 0x19, 0xcd, 0xc6, 0xe6, 0xb4, 0x22, 0xc4, + 0xbb, 0x27, 0x0d, 0x22, 0xc7, 0x73, 0xd1, 0x7f, 0x6c, 0x5f, 0x09, 0x0a, 0xae, 0x32, 0xe7, 0x19, + 0x38, 0x41, 0xdc, 0xc2, 0x61, 0xa0, 0x4f, 0xc3, 0xaa, 0x4b, 0x6e, 0xdb, 0x64, 0xfd, 0x5a, 0x27, + 0x87, 0x8f, 0xec, 0x93, 0x45, 0x18, 0x6c, 0x63, 0x31, 0xf4, 0x33, 0x8b, 0x69, 0x61, 0x62, 0x98, + 0x53, 0x83, 0xeb, 0x51, 0x52, 0x0d, 0x42, 0xe2, 0x1e, 0xd2, 0x80, 0xc4, 0x7e, 0x78, 0x62, 0x9c, + 0xd2, 0xd3, 0x17, 0x5e, 0x91, 0xcd, 0xc1, 0x4a, 0x61, 0x56, 0x35, 0x25, 0x3b, 0xac, 0xa7, 0xef, + 0xbb, 0x16, 0x61, 0x30, 0xa1, 0x35, 0x1c, 0x07, 0x5e, 0x1a, 0x6c, 0x6c, 0x11, 0xac, 0x96, 0xe8, + 0x60, 0x5f, 0x02, 0x6a, 0x67, 0x18, 0xa5, 0x84, 0xc4, 0x26, 0x03, 0x6f, 0xc3, 0x65, 0x95, 0x84, + 0x75, 0xb8, 0x9c, 0x62, 0xcd, 0xb1, 0xde, 0xe8, 0x1f, 0x9f, 0xa3, 0x75, 0xe6, 0xab, 0x1c, 0x17, + 0x3e, 0x2a, 0xa0, 0x19, 0x91, 0xbe, 0xb1, 0x79, 0xd1, 0xd0, 0x0d, 0xce, 0xc2, 0x5d, 0x86, 0xa1, + 0xd4, 0x20, 0xad, 0x7b, 0x50, 0x8b, 0xba, 0x8c, 0xd6, 0xa4, 0x65, 0x8f, 0xe7, 0x30, 0xac, 0x9f, + 0xb0, 0x1b, 0xcb, 0x37, 0x4e, 0x2c, 0xe0, 0x09, 0x65, 0xe8, 0x8e, 0x45, 0x74, 0x8e, 0x56, 0x3e, + 0x27, 0xc7, 0x7b, 0x5a, 0x27, 0xd6, 0xd7, 0x46, 0x3d, 0x89, 0x89, 0x7c, 0xcd, 0xc1, 0xd0, 0x27, + 0x16, 0xeb, 0xab, 0x6c, 0xbf, 0x24, 0x9a, 0x3b, 0x4f, 0xe1, 0x03, 0xa5, 0x36, 0x1f, 0xc0, 0xdd, + 0xfe, 0x8c, 0x4b, 0x52, 0x65, 0x76, 0xff, 0x33, 0x70, 0x14, 0x89, 0xd7, 0x69, 0xd2, 0xac, 0xd5, + 0xd5, 0x04, 0xbb, 0x67, 0x33, 0xc1, 0xa4, 0x6e, 0x57, 0xc9, 0xd2, 0x45, 0x87, 0x92, 0x50, 0xe5, + 0x21, 0x62, 0xd7, 0x2b, 0xdb, 0x1c, 0x38, 0x53, 0x85, 0xd8, 0xb8, 0x42, 0xb8, 0xa1, 0xb7, 0xcf, + 0x5e, 0xaf, 0x95, 0x55, 0xe5, 0xe2, 0xb9, 0x4d, 0x6d, 0x49, 0x91, 0x2a, 0x45, 0xef, 0x95, 0x55, + 0xed, 0xe2, 0x77, 0x25, 0x98, 0xd1, 0x76, 0x66, 0x25, 0xeb, 0x74, 0xeb, 0x57, 0x16, 0xd6, 0x77, + 0x14, 0x35, 0xaf, 0x6a, 0x9d, 0x12, 0xc4, 0xb7, 0xf0, 0xd3, 0x82, 0x18, 0xba, 0x6c, 0xa7, 0x6d, + 0x2a, 0x28, 0x9d, 0xb6, 0x9d, 0x5e, 0x6f, 0xe0, 0x56, 0x81, 0x57, 0xe1, 0x6d, 0xbe, 0xb6, 0x79, + 0xc1, 0xd9, 0xe9, 0x57, 0x70, 0x7b, 0xcf, 0xc0, 0xd1, 0x96, 0x6a, 0xf9, 0x56, 0xf4, 0x19, 0x9b, + 0xe1, 0xa5, 0x74, 0x72, 0xed, 0xe9, 0xec, 0x2a, 0xf2, 0x16, 0x7b, 0xdc, 0x8d, 0x30, 0xf7, 0xea, + 0xdd, 0xa1, 0xcf, 0xda, 0x77, 0xd5, 0x8a, 0xe0, 0x6c, 0x08, 0x4c, 0x67, 0xdc, 0xbb, 0x70, 0x4d, + 0x7b, 0x75, 0xf1, 0xe7, 0x6c, 0x56, 0x4e, 0x25, 0xee, 0xa4, 0xfe, 0x16, 0xca, 0x9a, 0x9a, 0x97, + 0xe5, 0x4e, 0x1f, 0x6e, 0x5f, 0x59, 0x98, 0xdd, 0x56, 0xcc, 0xb4, 0x44, 0x77, 0xca, 0x60, 0x3b, + 0x84, 0xeb, 0x5d, 0xfe, 0x5d, 0x96, 0xf7, 0x6d, 0x5e, 0xb9, 0xb6, 0x5b, 0x76, 0xfa, 0x44, 0x70, + 0xb3, 0xcb, 0xa7, 0xf0, 0xf6, 0xbe, 0xb6, 0x9f, 0xc8, 0xa9, 0x57, 0xc1, 0x6d, 0x7d, 0x0d, 0x1f, + 0x6a, 0xbb, 0x42, 0x93, 0x03, 0x0b, 0x13, 0xa4, 0x00, 0x05, 0xf0, 0x47, 0x30, 0xc0, 0x1a, 0x84, + 0xf8, 0xfa, 0xa0, 0x70, 0x64, 0xb1, 0x50, 0xcb, 0xf6, 0xea, 0x70, 0xf0, 0x0a, 0x46, 0x7a, 0x7f, + 0x18, 0xf3, 0x4b, 0x9b, 0x42, 0x23, 0xef, 0xf1, 0x41, 0xcc, 0xef, 0x4b, 0x30, 0xa3, 0x92, 0x19, + 0xec, 0x79, 0x24, 0x24, 0x62, 0xdd, 0xf6, 0x5d, 0x9d, 0x9c, 0x88, 0x03, 0x17, 0x2b, 0xc8, 0x4c, + 0x42, 0x0b, 0xc7, 0x4f, 0x24, 0xb5, 0x92, 0x43, 0x37, 0x24, 0x53, 0x1c, 0xa7, 0x58, 0x57, 0x9a, + 0xb2, 0x0f, 0x23, 0xf9, 0xa9, 0xbe, 0xb3, 0xca, 0xf7, 0xa9, 0x4d, 0x0d, 0xab, 0xa5, 0xcf, 0xef, + 0x1d, 0x65, 0xbe, 0xe7, 0x30, 0x6c, 0x96, 0x0b, 0xb2, 0x02, 0xdc, 0xbc, 0xcd, 0x7e, 0xdb, 0xca, + 0x6a, 0x02, 0x59, 0xad, 0xed, 0x7b, 0xb8, 0x67, 0xf0, 0x2c, 0x2a, 0x7d, 0xbf, 0xb2, 0x99, 0x79, + 0x99, 0x51, 0xbf, 0x52, 0xdf, 0x0e, 0x5c, 0x6b, 0x05, 0x3e, 0x49, 0xc4, 0xb1, 0x93, 0x72, 0xf1, + 0x88, 0x66, 0x67, 0x66, 0xd4, 0x5e, 0xfb, 0x99, 0x85, 0xd1, 0xb0, 0x14, 0x6f, 0x69, 0xed, 0xec, + 0xcc, 0x8c, 0xdc, 0x6f, 0xb7, 0xe0, 0x6a, 0x07, 0x74, 0x6e, 0x5e, 0x31, 0x1f, 0xd8, 0x7c, 0x3e, + 0xd2, 0xc6, 0x9c, 0x9b, 0xef, 0x81, 0x9c, 0xd7, 0x61, 0x7e, 0x7e, 0x66, 0xe4, 0x7c, 0xaf, 0x28, + 0x1f, 0xe8, 0x28, 0x7f, 0x7e, 0x66, 0xe4, 0x03, 0x15, 0xe5, 0x32, 0x0c, 0x29, 0xa4, 0xfc, 0xfa, + 0x47, 0xb2, 0x16, 0x6c, 0x3e, 0xe3, 0x92, 0xa2, 0xbd, 0x80, 0x1c, 0x4b, 0xca, 0x23, 0x18, 0xc8, + 0x29, 0x0c, 0x3d, 0xb4, 0x98, 0xd9, 0x19, 0x40, 0x26, 0xd7, 0xd2, 0x3d, 0xcd, 0x9b, 0xcc, 0x81, + 0xff, 0xc8, 0xa2, 0x9a, 0x20, 0xc4, 0x3a, 0x79, 0x32, 0xc6, 0xfd, 0xe2, 0x7f, 0x4b, 0x30, 0xe5, + 0x25, 0x51, 0xf9, 0xf4, 0x2f, 0x07, 0x17, 0x2f, 0xe8, 0x4f, 0xf0, 0x36, 0x05, 0x7a, 0xb3, 0xf4, + 0xd5, 0xb2, 0x6e, 0x5f, 0x4b, 0x42, 0x1c, 0xd7, 0xca, 0x09, 0xad, 0x4d, 0xd7, 0x48, 0x2c, 0x8d, + 0xd3, 0xaf, 0x03, 0x1b, 0x01, 0xeb, 0xf5, 0xd9, 0xe2, 0x43, 0xf5, 0xd7, 0x3f, 0xce, 0x9d, 0x7f, + 0x52, 0xa9, 0xfc, 0xf3, 0xdc, 0xf8, 0x13, 0x05, 0xab, 0xf8, 0xac, 0xac, 0x7e, 0x8a, 0x5f, 0x7b, + 0xb3, 0xe5, 0x25, 0xd9, 0xec, 0xdf, 0x69, 0x83, 0x83, 0x8a, 0xcf, 0x0e, 0xb2, 0x06, 0x07, 0x7b, + 0xb3, 0x07, 0xaa, 0xc1, 0xff, 0xce, 0x4d, 0xa9, 0xab, 0x0b, 0x0b, 0x15, 0x9f, 0x2d, 0x2c, 0x64, + 0x4d, 0x16, 0x16, 0xf6, 0x66, 0x17, 0x16, 0x54, 0xa3, 0xea, 0x7b, 0x32, 0xba, 0xfb, 0xff, 0x0f, + 0x00, 0x00, 0xff, 0xff, 0x7b, 0xfc, 0xe8, 0xe6, 0x53, 0x29, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/common/policy.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/common/policy.pb.go new file mode 100644 index 0000000..c2656f1 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/common/policy.pb.go @@ -0,0 +1,1207 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/common/policy.proto + +package common // import "google.golang.org/genproto/googleapis/ads/googleads/v1/common" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import wrappers "github.com/golang/protobuf/ptypes/wrappers" +import enums "google.golang.org/genproto/googleapis/ads/googleads/v1/enums" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Key of the violation. The key is used for referring to a violation +// when filing an exemption request. +type PolicyViolationKey struct { + // Unique ID of the violated policy. + PolicyName *wrappers.StringValue `protobuf:"bytes,1,opt,name=policy_name,json=policyName,proto3" json:"policy_name,omitempty"` + // The text that violates the policy if specified. + // Otherwise, refers to the policy in general + // (e.g., when requesting to be exempt from the whole policy). + // If not specified for criterion exemptions, the whole policy is implied. + // Must be specified for ad exemptions. + ViolatingText *wrappers.StringValue `protobuf:"bytes,2,opt,name=violating_text,json=violatingText,proto3" json:"violating_text,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *PolicyViolationKey) Reset() { *m = PolicyViolationKey{} } +func (m *PolicyViolationKey) String() string { return proto.CompactTextString(m) } +func (*PolicyViolationKey) ProtoMessage() {} +func (*PolicyViolationKey) Descriptor() ([]byte, []int) { + return fileDescriptor_policy_0b37253ad551feaf, []int{0} +} +func (m *PolicyViolationKey) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_PolicyViolationKey.Unmarshal(m, b) +} +func (m *PolicyViolationKey) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_PolicyViolationKey.Marshal(b, m, deterministic) +} +func (dst *PolicyViolationKey) XXX_Merge(src proto.Message) { + xxx_messageInfo_PolicyViolationKey.Merge(dst, src) +} +func (m *PolicyViolationKey) XXX_Size() int { + return xxx_messageInfo_PolicyViolationKey.Size(m) +} +func (m *PolicyViolationKey) XXX_DiscardUnknown() { + xxx_messageInfo_PolicyViolationKey.DiscardUnknown(m) +} + +var xxx_messageInfo_PolicyViolationKey proto.InternalMessageInfo + +func (m *PolicyViolationKey) GetPolicyName() *wrappers.StringValue { + if m != nil { + return m.PolicyName + } + return nil +} + +func (m *PolicyViolationKey) GetViolatingText() *wrappers.StringValue { + if m != nil { + return m.ViolatingText + } + return nil +} + +// Parameter for controlling how policy exemption is done. Ignorable policy +// topics are only usable with expanded text ads and responsive search ads. All +// other ad types must use policy violation keys. +type PolicyValidationParameter struct { + // The list of policy topics that should not cause a PolicyFindingError to + // be reported. This field is currently only compatible with Enhanced Text Ad. + // It corresponds to the PolicyTopicEntry.topic field. + // + // Resources violating these policies will be saved, but will not be eligible + // to serve. They may begin serving at a later time due to a change in + // policies, re-review of the resource, or a change in advertiser + // certificates. + IgnorablePolicyTopics []*wrappers.StringValue `protobuf:"bytes,1,rep,name=ignorable_policy_topics,json=ignorablePolicyTopics,proto3" json:"ignorable_policy_topics,omitempty"` + // The list of policy violation keys that should not cause a + // PolicyViolationError to be reported. Not all policy violations are + // exemptable, please refer to the is_exemptible field in the returned + // PolicyViolationError. + // + // Resources violating these polices will be saved, but will not be eligible + // to serve. They may begin serving at a later time due to a change in + // policies, re-review of the resource, or a change in advertiser + // certificates. + ExemptPolicyViolationKeys []*PolicyViolationKey `protobuf:"bytes,2,rep,name=exempt_policy_violation_keys,json=exemptPolicyViolationKeys,proto3" json:"exempt_policy_violation_keys,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *PolicyValidationParameter) Reset() { *m = PolicyValidationParameter{} } +func (m *PolicyValidationParameter) String() string { return proto.CompactTextString(m) } +func (*PolicyValidationParameter) ProtoMessage() {} +func (*PolicyValidationParameter) Descriptor() ([]byte, []int) { + return fileDescriptor_policy_0b37253ad551feaf, []int{1} +} +func (m *PolicyValidationParameter) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_PolicyValidationParameter.Unmarshal(m, b) +} +func (m *PolicyValidationParameter) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_PolicyValidationParameter.Marshal(b, m, deterministic) +} +func (dst *PolicyValidationParameter) XXX_Merge(src proto.Message) { + xxx_messageInfo_PolicyValidationParameter.Merge(dst, src) +} +func (m *PolicyValidationParameter) XXX_Size() int { + return xxx_messageInfo_PolicyValidationParameter.Size(m) +} +func (m *PolicyValidationParameter) XXX_DiscardUnknown() { + xxx_messageInfo_PolicyValidationParameter.DiscardUnknown(m) +} + +var xxx_messageInfo_PolicyValidationParameter proto.InternalMessageInfo + +func (m *PolicyValidationParameter) GetIgnorablePolicyTopics() []*wrappers.StringValue { + if m != nil { + return m.IgnorablePolicyTopics + } + return nil +} + +func (m *PolicyValidationParameter) GetExemptPolicyViolationKeys() []*PolicyViolationKey { + if m != nil { + return m.ExemptPolicyViolationKeys + } + return nil +} + +// Policy finding attached to a resource (e.g. alcohol policy associated with +// a site that sells alcohol). +// +// Each PolicyTopicEntry has a topic that indicates the specific ads policy +// the entry is about and a type to indicate the effect that the entry will have +// on serving. It may optionally have one or more evidences that indicate the +// reason for the finding. It may also optionally have one or more constraints +// that provide details about how serving may be restricted. +type PolicyTopicEntry struct { + // Policy topic this finding refers to. For example, "ALCOHOL", + // "TRADEMARKS_IN_AD_TEXT", or "DESTINATION_NOT_WORKING". The set of possible + // policy topics is not fixed for a particular API version and may change + // at any time. + Topic *wrappers.StringValue `protobuf:"bytes,1,opt,name=topic,proto3" json:"topic,omitempty"` + // Describes the negative or positive effect this policy will have on serving. + Type enums.PolicyTopicEntryTypeEnum_PolicyTopicEntryType `protobuf:"varint,2,opt,name=type,proto3,enum=google.ads.googleads.v1.enums.PolicyTopicEntryTypeEnum_PolicyTopicEntryType" json:"type,omitempty"` + // Additional information that explains policy finding + // (e.g. the brand name for a trademark finding). + Evidences []*PolicyTopicEvidence `protobuf:"bytes,3,rep,name=evidences,proto3" json:"evidences,omitempty"` + // Indicates how serving of this resource may be affected (e.g. not serving + // in a country). + Constraints []*PolicyTopicConstraint `protobuf:"bytes,4,rep,name=constraints,proto3" json:"constraints,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *PolicyTopicEntry) Reset() { *m = PolicyTopicEntry{} } +func (m *PolicyTopicEntry) String() string { return proto.CompactTextString(m) } +func (*PolicyTopicEntry) ProtoMessage() {} +func (*PolicyTopicEntry) Descriptor() ([]byte, []int) { + return fileDescriptor_policy_0b37253ad551feaf, []int{2} +} +func (m *PolicyTopicEntry) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_PolicyTopicEntry.Unmarshal(m, b) +} +func (m *PolicyTopicEntry) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_PolicyTopicEntry.Marshal(b, m, deterministic) +} +func (dst *PolicyTopicEntry) XXX_Merge(src proto.Message) { + xxx_messageInfo_PolicyTopicEntry.Merge(dst, src) +} +func (m *PolicyTopicEntry) XXX_Size() int { + return xxx_messageInfo_PolicyTopicEntry.Size(m) +} +func (m *PolicyTopicEntry) XXX_DiscardUnknown() { + xxx_messageInfo_PolicyTopicEntry.DiscardUnknown(m) +} + +var xxx_messageInfo_PolicyTopicEntry proto.InternalMessageInfo + +func (m *PolicyTopicEntry) GetTopic() *wrappers.StringValue { + if m != nil { + return m.Topic + } + return nil +} + +func (m *PolicyTopicEntry) GetType() enums.PolicyTopicEntryTypeEnum_PolicyTopicEntryType { + if m != nil { + return m.Type + } + return enums.PolicyTopicEntryTypeEnum_UNSPECIFIED +} + +func (m *PolicyTopicEntry) GetEvidences() []*PolicyTopicEvidence { + if m != nil { + return m.Evidences + } + return nil +} + +func (m *PolicyTopicEntry) GetConstraints() []*PolicyTopicConstraint { + if m != nil { + return m.Constraints + } + return nil +} + +// Additional information that explains a policy finding. +type PolicyTopicEvidence struct { + // Specific evidence information depending on the evidence type. + // + // Types that are valid to be assigned to Value: + // *PolicyTopicEvidence_HttpCode + // *PolicyTopicEvidence_WebsiteList_ + // *PolicyTopicEvidence_TextList_ + // *PolicyTopicEvidence_LanguageCode + // *PolicyTopicEvidence_DestinationTextList_ + // *PolicyTopicEvidence_DestinationMismatch_ + // *PolicyTopicEvidence_DestinationNotWorking_ + Value isPolicyTopicEvidence_Value `protobuf_oneof:"value"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *PolicyTopicEvidence) Reset() { *m = PolicyTopicEvidence{} } +func (m *PolicyTopicEvidence) String() string { return proto.CompactTextString(m) } +func (*PolicyTopicEvidence) ProtoMessage() {} +func (*PolicyTopicEvidence) Descriptor() ([]byte, []int) { + return fileDescriptor_policy_0b37253ad551feaf, []int{3} +} +func (m *PolicyTopicEvidence) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_PolicyTopicEvidence.Unmarshal(m, b) +} +func (m *PolicyTopicEvidence) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_PolicyTopicEvidence.Marshal(b, m, deterministic) +} +func (dst *PolicyTopicEvidence) XXX_Merge(src proto.Message) { + xxx_messageInfo_PolicyTopicEvidence.Merge(dst, src) +} +func (m *PolicyTopicEvidence) XXX_Size() int { + return xxx_messageInfo_PolicyTopicEvidence.Size(m) +} +func (m *PolicyTopicEvidence) XXX_DiscardUnknown() { + xxx_messageInfo_PolicyTopicEvidence.DiscardUnknown(m) +} + +var xxx_messageInfo_PolicyTopicEvidence proto.InternalMessageInfo + +type isPolicyTopicEvidence_Value interface { + isPolicyTopicEvidence_Value() +} + +type PolicyTopicEvidence_HttpCode struct { + HttpCode *wrappers.Int32Value `protobuf:"bytes,2,opt,name=http_code,json=httpCode,proto3,oneof"` +} + +type PolicyTopicEvidence_WebsiteList_ struct { + WebsiteList *PolicyTopicEvidence_WebsiteList `protobuf:"bytes,3,opt,name=website_list,json=websiteList,proto3,oneof"` +} + +type PolicyTopicEvidence_TextList_ struct { + TextList *PolicyTopicEvidence_TextList `protobuf:"bytes,4,opt,name=text_list,json=textList,proto3,oneof"` +} + +type PolicyTopicEvidence_LanguageCode struct { + LanguageCode *wrappers.StringValue `protobuf:"bytes,5,opt,name=language_code,json=languageCode,proto3,oneof"` +} + +type PolicyTopicEvidence_DestinationTextList_ struct { + DestinationTextList *PolicyTopicEvidence_DestinationTextList `protobuf:"bytes,6,opt,name=destination_text_list,json=destinationTextList,proto3,oneof"` +} + +type PolicyTopicEvidence_DestinationMismatch_ struct { + DestinationMismatch *PolicyTopicEvidence_DestinationMismatch `protobuf:"bytes,7,opt,name=destination_mismatch,json=destinationMismatch,proto3,oneof"` +} + +type PolicyTopicEvidence_DestinationNotWorking_ struct { + DestinationNotWorking *PolicyTopicEvidence_DestinationNotWorking `protobuf:"bytes,8,opt,name=destination_not_working,json=destinationNotWorking,proto3,oneof"` +} + +func (*PolicyTopicEvidence_HttpCode) isPolicyTopicEvidence_Value() {} + +func (*PolicyTopicEvidence_WebsiteList_) isPolicyTopicEvidence_Value() {} + +func (*PolicyTopicEvidence_TextList_) isPolicyTopicEvidence_Value() {} + +func (*PolicyTopicEvidence_LanguageCode) isPolicyTopicEvidence_Value() {} + +func (*PolicyTopicEvidence_DestinationTextList_) isPolicyTopicEvidence_Value() {} + +func (*PolicyTopicEvidence_DestinationMismatch_) isPolicyTopicEvidence_Value() {} + +func (*PolicyTopicEvidence_DestinationNotWorking_) isPolicyTopicEvidence_Value() {} + +func (m *PolicyTopicEvidence) GetValue() isPolicyTopicEvidence_Value { + if m != nil { + return m.Value + } + return nil +} + +func (m *PolicyTopicEvidence) GetHttpCode() *wrappers.Int32Value { + if x, ok := m.GetValue().(*PolicyTopicEvidence_HttpCode); ok { + return x.HttpCode + } + return nil +} + +func (m *PolicyTopicEvidence) GetWebsiteList() *PolicyTopicEvidence_WebsiteList { + if x, ok := m.GetValue().(*PolicyTopicEvidence_WebsiteList_); ok { + return x.WebsiteList + } + return nil +} + +func (m *PolicyTopicEvidence) GetTextList() *PolicyTopicEvidence_TextList { + if x, ok := m.GetValue().(*PolicyTopicEvidence_TextList_); ok { + return x.TextList + } + return nil +} + +func (m *PolicyTopicEvidence) GetLanguageCode() *wrappers.StringValue { + if x, ok := m.GetValue().(*PolicyTopicEvidence_LanguageCode); ok { + return x.LanguageCode + } + return nil +} + +func (m *PolicyTopicEvidence) GetDestinationTextList() *PolicyTopicEvidence_DestinationTextList { + if x, ok := m.GetValue().(*PolicyTopicEvidence_DestinationTextList_); ok { + return x.DestinationTextList + } + return nil +} + +func (m *PolicyTopicEvidence) GetDestinationMismatch() *PolicyTopicEvidence_DestinationMismatch { + if x, ok := m.GetValue().(*PolicyTopicEvidence_DestinationMismatch_); ok { + return x.DestinationMismatch + } + return nil +} + +func (m *PolicyTopicEvidence) GetDestinationNotWorking() *PolicyTopicEvidence_DestinationNotWorking { + if x, ok := m.GetValue().(*PolicyTopicEvidence_DestinationNotWorking_); ok { + return x.DestinationNotWorking + } + return nil +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*PolicyTopicEvidence) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _PolicyTopicEvidence_OneofMarshaler, _PolicyTopicEvidence_OneofUnmarshaler, _PolicyTopicEvidence_OneofSizer, []interface{}{ + (*PolicyTopicEvidence_HttpCode)(nil), + (*PolicyTopicEvidence_WebsiteList_)(nil), + (*PolicyTopicEvidence_TextList_)(nil), + (*PolicyTopicEvidence_LanguageCode)(nil), + (*PolicyTopicEvidence_DestinationTextList_)(nil), + (*PolicyTopicEvidence_DestinationMismatch_)(nil), + (*PolicyTopicEvidence_DestinationNotWorking_)(nil), + } +} + +func _PolicyTopicEvidence_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*PolicyTopicEvidence) + // value + switch x := m.Value.(type) { + case *PolicyTopicEvidence_HttpCode: + b.EncodeVarint(2<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.HttpCode); err != nil { + return err + } + case *PolicyTopicEvidence_WebsiteList_: + b.EncodeVarint(3<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.WebsiteList); err != nil { + return err + } + case *PolicyTopicEvidence_TextList_: + b.EncodeVarint(4<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.TextList); err != nil { + return err + } + case *PolicyTopicEvidence_LanguageCode: + b.EncodeVarint(5<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.LanguageCode); err != nil { + return err + } + case *PolicyTopicEvidence_DestinationTextList_: + b.EncodeVarint(6<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.DestinationTextList); err != nil { + return err + } + case *PolicyTopicEvidence_DestinationMismatch_: + b.EncodeVarint(7<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.DestinationMismatch); err != nil { + return err + } + case *PolicyTopicEvidence_DestinationNotWorking_: + b.EncodeVarint(8<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.DestinationNotWorking); err != nil { + return err + } + case nil: + default: + return fmt.Errorf("PolicyTopicEvidence.Value has unexpected type %T", x) + } + return nil +} + +func _PolicyTopicEvidence_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*PolicyTopicEvidence) + switch tag { + case 2: // value.http_code + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(wrappers.Int32Value) + err := b.DecodeMessage(msg) + m.Value = &PolicyTopicEvidence_HttpCode{msg} + return true, err + case 3: // value.website_list + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(PolicyTopicEvidence_WebsiteList) + err := b.DecodeMessage(msg) + m.Value = &PolicyTopicEvidence_WebsiteList_{msg} + return true, err + case 4: // value.text_list + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(PolicyTopicEvidence_TextList) + err := b.DecodeMessage(msg) + m.Value = &PolicyTopicEvidence_TextList_{msg} + return true, err + case 5: // value.language_code + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(wrappers.StringValue) + err := b.DecodeMessage(msg) + m.Value = &PolicyTopicEvidence_LanguageCode{msg} + return true, err + case 6: // value.destination_text_list + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(PolicyTopicEvidence_DestinationTextList) + err := b.DecodeMessage(msg) + m.Value = &PolicyTopicEvidence_DestinationTextList_{msg} + return true, err + case 7: // value.destination_mismatch + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(PolicyTopicEvidence_DestinationMismatch) + err := b.DecodeMessage(msg) + m.Value = &PolicyTopicEvidence_DestinationMismatch_{msg} + return true, err + case 8: // value.destination_not_working + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(PolicyTopicEvidence_DestinationNotWorking) + err := b.DecodeMessage(msg) + m.Value = &PolicyTopicEvidence_DestinationNotWorking_{msg} + return true, err + default: + return false, nil + } +} + +func _PolicyTopicEvidence_OneofSizer(msg proto.Message) (n int) { + m := msg.(*PolicyTopicEvidence) + // value + switch x := m.Value.(type) { + case *PolicyTopicEvidence_HttpCode: + s := proto.Size(x.HttpCode) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *PolicyTopicEvidence_WebsiteList_: + s := proto.Size(x.WebsiteList) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *PolicyTopicEvidence_TextList_: + s := proto.Size(x.TextList) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *PolicyTopicEvidence_LanguageCode: + s := proto.Size(x.LanguageCode) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *PolicyTopicEvidence_DestinationTextList_: + s := proto.Size(x.DestinationTextList) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *PolicyTopicEvidence_DestinationMismatch_: + s := proto.Size(x.DestinationMismatch) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *PolicyTopicEvidence_DestinationNotWorking_: + s := proto.Size(x.DestinationNotWorking) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +// A list of fragments of text that violated a policy. +type PolicyTopicEvidence_TextList struct { + // The fragments of text from the resource that caused the policy finding. + Texts []*wrappers.StringValue `protobuf:"bytes,1,rep,name=texts,proto3" json:"texts,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *PolicyTopicEvidence_TextList) Reset() { *m = PolicyTopicEvidence_TextList{} } +func (m *PolicyTopicEvidence_TextList) String() string { return proto.CompactTextString(m) } +func (*PolicyTopicEvidence_TextList) ProtoMessage() {} +func (*PolicyTopicEvidence_TextList) Descriptor() ([]byte, []int) { + return fileDescriptor_policy_0b37253ad551feaf, []int{3, 0} +} +func (m *PolicyTopicEvidence_TextList) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_PolicyTopicEvidence_TextList.Unmarshal(m, b) +} +func (m *PolicyTopicEvidence_TextList) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_PolicyTopicEvidence_TextList.Marshal(b, m, deterministic) +} +func (dst *PolicyTopicEvidence_TextList) XXX_Merge(src proto.Message) { + xxx_messageInfo_PolicyTopicEvidence_TextList.Merge(dst, src) +} +func (m *PolicyTopicEvidence_TextList) XXX_Size() int { + return xxx_messageInfo_PolicyTopicEvidence_TextList.Size(m) +} +func (m *PolicyTopicEvidence_TextList) XXX_DiscardUnknown() { + xxx_messageInfo_PolicyTopicEvidence_TextList.DiscardUnknown(m) +} + +var xxx_messageInfo_PolicyTopicEvidence_TextList proto.InternalMessageInfo + +func (m *PolicyTopicEvidence_TextList) GetTexts() []*wrappers.StringValue { + if m != nil { + return m.Texts + } + return nil +} + +// A list of websites that caused a policy finding. Used for +// ONE_WEBSITE_PER_AD_GROUP policy topic, for example. In case there are more +// than five websites, only the top five (those that appear in resources the +// most) will be listed here. +type PolicyTopicEvidence_WebsiteList struct { + // Websites that caused the policy finding. + Websites []*wrappers.StringValue `protobuf:"bytes,1,rep,name=websites,proto3" json:"websites,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *PolicyTopicEvidence_WebsiteList) Reset() { *m = PolicyTopicEvidence_WebsiteList{} } +func (m *PolicyTopicEvidence_WebsiteList) String() string { return proto.CompactTextString(m) } +func (*PolicyTopicEvidence_WebsiteList) ProtoMessage() {} +func (*PolicyTopicEvidence_WebsiteList) Descriptor() ([]byte, []int) { + return fileDescriptor_policy_0b37253ad551feaf, []int{3, 1} +} +func (m *PolicyTopicEvidence_WebsiteList) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_PolicyTopicEvidence_WebsiteList.Unmarshal(m, b) +} +func (m *PolicyTopicEvidence_WebsiteList) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_PolicyTopicEvidence_WebsiteList.Marshal(b, m, deterministic) +} +func (dst *PolicyTopicEvidence_WebsiteList) XXX_Merge(src proto.Message) { + xxx_messageInfo_PolicyTopicEvidence_WebsiteList.Merge(dst, src) +} +func (m *PolicyTopicEvidence_WebsiteList) XXX_Size() int { + return xxx_messageInfo_PolicyTopicEvidence_WebsiteList.Size(m) +} +func (m *PolicyTopicEvidence_WebsiteList) XXX_DiscardUnknown() { + xxx_messageInfo_PolicyTopicEvidence_WebsiteList.DiscardUnknown(m) +} + +var xxx_messageInfo_PolicyTopicEvidence_WebsiteList proto.InternalMessageInfo + +func (m *PolicyTopicEvidence_WebsiteList) GetWebsites() []*wrappers.StringValue { + if m != nil { + return m.Websites + } + return nil +} + +// A list of strings found in a destination page that caused a policy +// finding. +type PolicyTopicEvidence_DestinationTextList struct { + // List of text found in the resource's destination page. + DestinationTexts []*wrappers.StringValue `protobuf:"bytes,1,rep,name=destination_texts,json=destinationTexts,proto3" json:"destination_texts,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *PolicyTopicEvidence_DestinationTextList) Reset() { + *m = PolicyTopicEvidence_DestinationTextList{} +} +func (m *PolicyTopicEvidence_DestinationTextList) String() string { return proto.CompactTextString(m) } +func (*PolicyTopicEvidence_DestinationTextList) ProtoMessage() {} +func (*PolicyTopicEvidence_DestinationTextList) Descriptor() ([]byte, []int) { + return fileDescriptor_policy_0b37253ad551feaf, []int{3, 2} +} +func (m *PolicyTopicEvidence_DestinationTextList) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_PolicyTopicEvidence_DestinationTextList.Unmarshal(m, b) +} +func (m *PolicyTopicEvidence_DestinationTextList) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_PolicyTopicEvidence_DestinationTextList.Marshal(b, m, deterministic) +} +func (dst *PolicyTopicEvidence_DestinationTextList) XXX_Merge(src proto.Message) { + xxx_messageInfo_PolicyTopicEvidence_DestinationTextList.Merge(dst, src) +} +func (m *PolicyTopicEvidence_DestinationTextList) XXX_Size() int { + return xxx_messageInfo_PolicyTopicEvidence_DestinationTextList.Size(m) +} +func (m *PolicyTopicEvidence_DestinationTextList) XXX_DiscardUnknown() { + xxx_messageInfo_PolicyTopicEvidence_DestinationTextList.DiscardUnknown(m) +} + +var xxx_messageInfo_PolicyTopicEvidence_DestinationTextList proto.InternalMessageInfo + +func (m *PolicyTopicEvidence_DestinationTextList) GetDestinationTexts() []*wrappers.StringValue { + if m != nil { + return m.DestinationTexts + } + return nil +} + +// Evidence of mismatches between the URLs of a resource. +type PolicyTopicEvidence_DestinationMismatch struct { + // The set of URLs that did not match each other. + UrlTypes []enums.PolicyTopicEvidenceDestinationMismatchUrlTypeEnum_PolicyTopicEvidenceDestinationMismatchUrlType `protobuf:"varint,1,rep,packed,name=url_types,json=urlTypes,proto3,enum=google.ads.googleads.v1.enums.PolicyTopicEvidenceDestinationMismatchUrlTypeEnum_PolicyTopicEvidenceDestinationMismatchUrlType" json:"url_types,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *PolicyTopicEvidence_DestinationMismatch) Reset() { + *m = PolicyTopicEvidence_DestinationMismatch{} +} +func (m *PolicyTopicEvidence_DestinationMismatch) String() string { return proto.CompactTextString(m) } +func (*PolicyTopicEvidence_DestinationMismatch) ProtoMessage() {} +func (*PolicyTopicEvidence_DestinationMismatch) Descriptor() ([]byte, []int) { + return fileDescriptor_policy_0b37253ad551feaf, []int{3, 3} +} +func (m *PolicyTopicEvidence_DestinationMismatch) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_PolicyTopicEvidence_DestinationMismatch.Unmarshal(m, b) +} +func (m *PolicyTopicEvidence_DestinationMismatch) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_PolicyTopicEvidence_DestinationMismatch.Marshal(b, m, deterministic) +} +func (dst *PolicyTopicEvidence_DestinationMismatch) XXX_Merge(src proto.Message) { + xxx_messageInfo_PolicyTopicEvidence_DestinationMismatch.Merge(dst, src) +} +func (m *PolicyTopicEvidence_DestinationMismatch) XXX_Size() int { + return xxx_messageInfo_PolicyTopicEvidence_DestinationMismatch.Size(m) +} +func (m *PolicyTopicEvidence_DestinationMismatch) XXX_DiscardUnknown() { + xxx_messageInfo_PolicyTopicEvidence_DestinationMismatch.DiscardUnknown(m) +} + +var xxx_messageInfo_PolicyTopicEvidence_DestinationMismatch proto.InternalMessageInfo + +func (m *PolicyTopicEvidence_DestinationMismatch) GetUrlTypes() []enums.PolicyTopicEvidenceDestinationMismatchUrlTypeEnum_PolicyTopicEvidenceDestinationMismatchUrlType { + if m != nil { + return m.UrlTypes + } + return nil +} + +// Evidence details when the destination is returning an HTTP error +// code or isn't functional in all locations for commonly used devices. +type PolicyTopicEvidence_DestinationNotWorking struct { + // The full URL that didn't work. + ExpandedUrl *wrappers.StringValue `protobuf:"bytes,3,opt,name=expanded_url,json=expandedUrl,proto3" json:"expanded_url,omitempty"` + // The type of device that failed to load the URL. + Device enums.PolicyTopicEvidenceDestinationNotWorkingDeviceEnum_PolicyTopicEvidenceDestinationNotWorkingDevice `protobuf:"varint,4,opt,name=device,proto3,enum=google.ads.googleads.v1.enums.PolicyTopicEvidenceDestinationNotWorkingDeviceEnum_PolicyTopicEvidenceDestinationNotWorkingDevice" json:"device,omitempty"` + // The time the URL was last checked. + // The format is "YYYY-MM-DD HH:MM:SS". + // Examples: "2018-03-05 09:15:00" or "2018-02-01 14:34:30" + LastCheckedDateTime *wrappers.StringValue `protobuf:"bytes,5,opt,name=last_checked_date_time,json=lastCheckedDateTime,proto3" json:"last_checked_date_time,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *PolicyTopicEvidence_DestinationNotWorking) Reset() { + *m = PolicyTopicEvidence_DestinationNotWorking{} +} +func (m *PolicyTopicEvidence_DestinationNotWorking) String() string { return proto.CompactTextString(m) } +func (*PolicyTopicEvidence_DestinationNotWorking) ProtoMessage() {} +func (*PolicyTopicEvidence_DestinationNotWorking) Descriptor() ([]byte, []int) { + return fileDescriptor_policy_0b37253ad551feaf, []int{3, 4} +} +func (m *PolicyTopicEvidence_DestinationNotWorking) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_PolicyTopicEvidence_DestinationNotWorking.Unmarshal(m, b) +} +func (m *PolicyTopicEvidence_DestinationNotWorking) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_PolicyTopicEvidence_DestinationNotWorking.Marshal(b, m, deterministic) +} +func (dst *PolicyTopicEvidence_DestinationNotWorking) XXX_Merge(src proto.Message) { + xxx_messageInfo_PolicyTopicEvidence_DestinationNotWorking.Merge(dst, src) +} +func (m *PolicyTopicEvidence_DestinationNotWorking) XXX_Size() int { + return xxx_messageInfo_PolicyTopicEvidence_DestinationNotWorking.Size(m) +} +func (m *PolicyTopicEvidence_DestinationNotWorking) XXX_DiscardUnknown() { + xxx_messageInfo_PolicyTopicEvidence_DestinationNotWorking.DiscardUnknown(m) +} + +var xxx_messageInfo_PolicyTopicEvidence_DestinationNotWorking proto.InternalMessageInfo + +func (m *PolicyTopicEvidence_DestinationNotWorking) GetExpandedUrl() *wrappers.StringValue { + if m != nil { + return m.ExpandedUrl + } + return nil +} + +func (m *PolicyTopicEvidence_DestinationNotWorking) GetDevice() enums.PolicyTopicEvidenceDestinationNotWorkingDeviceEnum_PolicyTopicEvidenceDestinationNotWorkingDevice { + if m != nil { + return m.Device + } + return enums.PolicyTopicEvidenceDestinationNotWorkingDeviceEnum_UNSPECIFIED +} + +func (m *PolicyTopicEvidence_DestinationNotWorking) GetLastCheckedDateTime() *wrappers.StringValue { + if m != nil { + return m.LastCheckedDateTime + } + return nil +} + +// Describes the effect on serving that a policy topic entry will have. +type PolicyTopicConstraint struct { + // Specific information about the constraint. + // + // Types that are valid to be assigned to Value: + // *PolicyTopicConstraint_CountryConstraintList_ + // *PolicyTopicConstraint_ResellerConstraint_ + // *PolicyTopicConstraint_CertificateMissingInCountryList + // *PolicyTopicConstraint_CertificateDomainMismatchInCountryList + Value isPolicyTopicConstraint_Value `protobuf_oneof:"value"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *PolicyTopicConstraint) Reset() { *m = PolicyTopicConstraint{} } +func (m *PolicyTopicConstraint) String() string { return proto.CompactTextString(m) } +func (*PolicyTopicConstraint) ProtoMessage() {} +func (*PolicyTopicConstraint) Descriptor() ([]byte, []int) { + return fileDescriptor_policy_0b37253ad551feaf, []int{4} +} +func (m *PolicyTopicConstraint) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_PolicyTopicConstraint.Unmarshal(m, b) +} +func (m *PolicyTopicConstraint) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_PolicyTopicConstraint.Marshal(b, m, deterministic) +} +func (dst *PolicyTopicConstraint) XXX_Merge(src proto.Message) { + xxx_messageInfo_PolicyTopicConstraint.Merge(dst, src) +} +func (m *PolicyTopicConstraint) XXX_Size() int { + return xxx_messageInfo_PolicyTopicConstraint.Size(m) +} +func (m *PolicyTopicConstraint) XXX_DiscardUnknown() { + xxx_messageInfo_PolicyTopicConstraint.DiscardUnknown(m) +} + +var xxx_messageInfo_PolicyTopicConstraint proto.InternalMessageInfo + +type isPolicyTopicConstraint_Value interface { + isPolicyTopicConstraint_Value() +} + +type PolicyTopicConstraint_CountryConstraintList_ struct { + CountryConstraintList *PolicyTopicConstraint_CountryConstraintList `protobuf:"bytes,1,opt,name=country_constraint_list,json=countryConstraintList,proto3,oneof"` +} + +type PolicyTopicConstraint_ResellerConstraint_ struct { + ResellerConstraint *PolicyTopicConstraint_ResellerConstraint `protobuf:"bytes,2,opt,name=reseller_constraint,json=resellerConstraint,proto3,oneof"` +} + +type PolicyTopicConstraint_CertificateMissingInCountryList struct { + CertificateMissingInCountryList *PolicyTopicConstraint_CountryConstraintList `protobuf:"bytes,3,opt,name=certificate_missing_in_country_list,json=certificateMissingInCountryList,proto3,oneof"` +} + +type PolicyTopicConstraint_CertificateDomainMismatchInCountryList struct { + CertificateDomainMismatchInCountryList *PolicyTopicConstraint_CountryConstraintList `protobuf:"bytes,4,opt,name=certificate_domain_mismatch_in_country_list,json=certificateDomainMismatchInCountryList,proto3,oneof"` +} + +func (*PolicyTopicConstraint_CountryConstraintList_) isPolicyTopicConstraint_Value() {} + +func (*PolicyTopicConstraint_ResellerConstraint_) isPolicyTopicConstraint_Value() {} + +func (*PolicyTopicConstraint_CertificateMissingInCountryList) isPolicyTopicConstraint_Value() {} + +func (*PolicyTopicConstraint_CertificateDomainMismatchInCountryList) isPolicyTopicConstraint_Value() {} + +func (m *PolicyTopicConstraint) GetValue() isPolicyTopicConstraint_Value { + if m != nil { + return m.Value + } + return nil +} + +func (m *PolicyTopicConstraint) GetCountryConstraintList() *PolicyTopicConstraint_CountryConstraintList { + if x, ok := m.GetValue().(*PolicyTopicConstraint_CountryConstraintList_); ok { + return x.CountryConstraintList + } + return nil +} + +func (m *PolicyTopicConstraint) GetResellerConstraint() *PolicyTopicConstraint_ResellerConstraint { + if x, ok := m.GetValue().(*PolicyTopicConstraint_ResellerConstraint_); ok { + return x.ResellerConstraint + } + return nil +} + +func (m *PolicyTopicConstraint) GetCertificateMissingInCountryList() *PolicyTopicConstraint_CountryConstraintList { + if x, ok := m.GetValue().(*PolicyTopicConstraint_CertificateMissingInCountryList); ok { + return x.CertificateMissingInCountryList + } + return nil +} + +func (m *PolicyTopicConstraint) GetCertificateDomainMismatchInCountryList() *PolicyTopicConstraint_CountryConstraintList { + if x, ok := m.GetValue().(*PolicyTopicConstraint_CertificateDomainMismatchInCountryList); ok { + return x.CertificateDomainMismatchInCountryList + } + return nil +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*PolicyTopicConstraint) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _PolicyTopicConstraint_OneofMarshaler, _PolicyTopicConstraint_OneofUnmarshaler, _PolicyTopicConstraint_OneofSizer, []interface{}{ + (*PolicyTopicConstraint_CountryConstraintList_)(nil), + (*PolicyTopicConstraint_ResellerConstraint_)(nil), + (*PolicyTopicConstraint_CertificateMissingInCountryList)(nil), + (*PolicyTopicConstraint_CertificateDomainMismatchInCountryList)(nil), + } +} + +func _PolicyTopicConstraint_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*PolicyTopicConstraint) + // value + switch x := m.Value.(type) { + case *PolicyTopicConstraint_CountryConstraintList_: + b.EncodeVarint(1<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.CountryConstraintList); err != nil { + return err + } + case *PolicyTopicConstraint_ResellerConstraint_: + b.EncodeVarint(2<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.ResellerConstraint); err != nil { + return err + } + case *PolicyTopicConstraint_CertificateMissingInCountryList: + b.EncodeVarint(3<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.CertificateMissingInCountryList); err != nil { + return err + } + case *PolicyTopicConstraint_CertificateDomainMismatchInCountryList: + b.EncodeVarint(4<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.CertificateDomainMismatchInCountryList); err != nil { + return err + } + case nil: + default: + return fmt.Errorf("PolicyTopicConstraint.Value has unexpected type %T", x) + } + return nil +} + +func _PolicyTopicConstraint_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*PolicyTopicConstraint) + switch tag { + case 1: // value.country_constraint_list + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(PolicyTopicConstraint_CountryConstraintList) + err := b.DecodeMessage(msg) + m.Value = &PolicyTopicConstraint_CountryConstraintList_{msg} + return true, err + case 2: // value.reseller_constraint + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(PolicyTopicConstraint_ResellerConstraint) + err := b.DecodeMessage(msg) + m.Value = &PolicyTopicConstraint_ResellerConstraint_{msg} + return true, err + case 3: // value.certificate_missing_in_country_list + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(PolicyTopicConstraint_CountryConstraintList) + err := b.DecodeMessage(msg) + m.Value = &PolicyTopicConstraint_CertificateMissingInCountryList{msg} + return true, err + case 4: // value.certificate_domain_mismatch_in_country_list + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(PolicyTopicConstraint_CountryConstraintList) + err := b.DecodeMessage(msg) + m.Value = &PolicyTopicConstraint_CertificateDomainMismatchInCountryList{msg} + return true, err + default: + return false, nil + } +} + +func _PolicyTopicConstraint_OneofSizer(msg proto.Message) (n int) { + m := msg.(*PolicyTopicConstraint) + // value + switch x := m.Value.(type) { + case *PolicyTopicConstraint_CountryConstraintList_: + s := proto.Size(x.CountryConstraintList) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *PolicyTopicConstraint_ResellerConstraint_: + s := proto.Size(x.ResellerConstraint) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *PolicyTopicConstraint_CertificateMissingInCountryList: + s := proto.Size(x.CertificateMissingInCountryList) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *PolicyTopicConstraint_CertificateDomainMismatchInCountryList: + s := proto.Size(x.CertificateDomainMismatchInCountryList) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +// A list of countries where a resource's serving is constrained. +type PolicyTopicConstraint_CountryConstraintList struct { + // Total number of countries targeted by the resource. + TotalTargetedCountries *wrappers.Int32Value `protobuf:"bytes,1,opt,name=total_targeted_countries,json=totalTargetedCountries,proto3" json:"total_targeted_countries,omitempty"` + // Countries in which serving is restricted. + Countries []*PolicyTopicConstraint_CountryConstraint `protobuf:"bytes,2,rep,name=countries,proto3" json:"countries,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *PolicyTopicConstraint_CountryConstraintList) Reset() { + *m = PolicyTopicConstraint_CountryConstraintList{} +} +func (m *PolicyTopicConstraint_CountryConstraintList) String() string { + return proto.CompactTextString(m) +} +func (*PolicyTopicConstraint_CountryConstraintList) ProtoMessage() {} +func (*PolicyTopicConstraint_CountryConstraintList) Descriptor() ([]byte, []int) { + return fileDescriptor_policy_0b37253ad551feaf, []int{4, 0} +} +func (m *PolicyTopicConstraint_CountryConstraintList) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_PolicyTopicConstraint_CountryConstraintList.Unmarshal(m, b) +} +func (m *PolicyTopicConstraint_CountryConstraintList) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_PolicyTopicConstraint_CountryConstraintList.Marshal(b, m, deterministic) +} +func (dst *PolicyTopicConstraint_CountryConstraintList) XXX_Merge(src proto.Message) { + xxx_messageInfo_PolicyTopicConstraint_CountryConstraintList.Merge(dst, src) +} +func (m *PolicyTopicConstraint_CountryConstraintList) XXX_Size() int { + return xxx_messageInfo_PolicyTopicConstraint_CountryConstraintList.Size(m) +} +func (m *PolicyTopicConstraint_CountryConstraintList) XXX_DiscardUnknown() { + xxx_messageInfo_PolicyTopicConstraint_CountryConstraintList.DiscardUnknown(m) +} + +var xxx_messageInfo_PolicyTopicConstraint_CountryConstraintList proto.InternalMessageInfo + +func (m *PolicyTopicConstraint_CountryConstraintList) GetTotalTargetedCountries() *wrappers.Int32Value { + if m != nil { + return m.TotalTargetedCountries + } + return nil +} + +func (m *PolicyTopicConstraint_CountryConstraintList) GetCountries() []*PolicyTopicConstraint_CountryConstraint { + if m != nil { + return m.Countries + } + return nil +} + +// Indicates that a policy topic was constrained due to disapproval of the +// website for reseller purposes. +type PolicyTopicConstraint_ResellerConstraint struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *PolicyTopicConstraint_ResellerConstraint) Reset() { + *m = PolicyTopicConstraint_ResellerConstraint{} +} +func (m *PolicyTopicConstraint_ResellerConstraint) String() string { return proto.CompactTextString(m) } +func (*PolicyTopicConstraint_ResellerConstraint) ProtoMessage() {} +func (*PolicyTopicConstraint_ResellerConstraint) Descriptor() ([]byte, []int) { + return fileDescriptor_policy_0b37253ad551feaf, []int{4, 1} +} +func (m *PolicyTopicConstraint_ResellerConstraint) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_PolicyTopicConstraint_ResellerConstraint.Unmarshal(m, b) +} +func (m *PolicyTopicConstraint_ResellerConstraint) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_PolicyTopicConstraint_ResellerConstraint.Marshal(b, m, deterministic) +} +func (dst *PolicyTopicConstraint_ResellerConstraint) XXX_Merge(src proto.Message) { + xxx_messageInfo_PolicyTopicConstraint_ResellerConstraint.Merge(dst, src) +} +func (m *PolicyTopicConstraint_ResellerConstraint) XXX_Size() int { + return xxx_messageInfo_PolicyTopicConstraint_ResellerConstraint.Size(m) +} +func (m *PolicyTopicConstraint_ResellerConstraint) XXX_DiscardUnknown() { + xxx_messageInfo_PolicyTopicConstraint_ResellerConstraint.DiscardUnknown(m) +} + +var xxx_messageInfo_PolicyTopicConstraint_ResellerConstraint proto.InternalMessageInfo + +// Indicates that a resource's ability to serve in a particular country is +// constrained. +type PolicyTopicConstraint_CountryConstraint struct { + // Geo target constant resource name of the country in which serving is + // constrained. + CountryCriterion *wrappers.StringValue `protobuf:"bytes,1,opt,name=country_criterion,json=countryCriterion,proto3" json:"country_criterion,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *PolicyTopicConstraint_CountryConstraint) Reset() { + *m = PolicyTopicConstraint_CountryConstraint{} +} +func (m *PolicyTopicConstraint_CountryConstraint) String() string { return proto.CompactTextString(m) } +func (*PolicyTopicConstraint_CountryConstraint) ProtoMessage() {} +func (*PolicyTopicConstraint_CountryConstraint) Descriptor() ([]byte, []int) { + return fileDescriptor_policy_0b37253ad551feaf, []int{4, 2} +} +func (m *PolicyTopicConstraint_CountryConstraint) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_PolicyTopicConstraint_CountryConstraint.Unmarshal(m, b) +} +func (m *PolicyTopicConstraint_CountryConstraint) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_PolicyTopicConstraint_CountryConstraint.Marshal(b, m, deterministic) +} +func (dst *PolicyTopicConstraint_CountryConstraint) XXX_Merge(src proto.Message) { + xxx_messageInfo_PolicyTopicConstraint_CountryConstraint.Merge(dst, src) +} +func (m *PolicyTopicConstraint_CountryConstraint) XXX_Size() int { + return xxx_messageInfo_PolicyTopicConstraint_CountryConstraint.Size(m) +} +func (m *PolicyTopicConstraint_CountryConstraint) XXX_DiscardUnknown() { + xxx_messageInfo_PolicyTopicConstraint_CountryConstraint.DiscardUnknown(m) +} + +var xxx_messageInfo_PolicyTopicConstraint_CountryConstraint proto.InternalMessageInfo + +func (m *PolicyTopicConstraint_CountryConstraint) GetCountryCriterion() *wrappers.StringValue { + if m != nil { + return m.CountryCriterion + } + return nil +} + +func init() { + proto.RegisterType((*PolicyViolationKey)(nil), "google.ads.googleads.v1.common.PolicyViolationKey") + proto.RegisterType((*PolicyValidationParameter)(nil), "google.ads.googleads.v1.common.PolicyValidationParameter") + proto.RegisterType((*PolicyTopicEntry)(nil), "google.ads.googleads.v1.common.PolicyTopicEntry") + proto.RegisterType((*PolicyTopicEvidence)(nil), "google.ads.googleads.v1.common.PolicyTopicEvidence") + proto.RegisterType((*PolicyTopicEvidence_TextList)(nil), "google.ads.googleads.v1.common.PolicyTopicEvidence.TextList") + proto.RegisterType((*PolicyTopicEvidence_WebsiteList)(nil), "google.ads.googleads.v1.common.PolicyTopicEvidence.WebsiteList") + proto.RegisterType((*PolicyTopicEvidence_DestinationTextList)(nil), "google.ads.googleads.v1.common.PolicyTopicEvidence.DestinationTextList") + proto.RegisterType((*PolicyTopicEvidence_DestinationMismatch)(nil), "google.ads.googleads.v1.common.PolicyTopicEvidence.DestinationMismatch") + proto.RegisterType((*PolicyTopicEvidence_DestinationNotWorking)(nil), "google.ads.googleads.v1.common.PolicyTopicEvidence.DestinationNotWorking") + proto.RegisterType((*PolicyTopicConstraint)(nil), "google.ads.googleads.v1.common.PolicyTopicConstraint") + proto.RegisterType((*PolicyTopicConstraint_CountryConstraintList)(nil), "google.ads.googleads.v1.common.PolicyTopicConstraint.CountryConstraintList") + proto.RegisterType((*PolicyTopicConstraint_ResellerConstraint)(nil), "google.ads.googleads.v1.common.PolicyTopicConstraint.ResellerConstraint") + proto.RegisterType((*PolicyTopicConstraint_CountryConstraint)(nil), "google.ads.googleads.v1.common.PolicyTopicConstraint.CountryConstraint") +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/common/policy.proto", fileDescriptor_policy_0b37253ad551feaf) +} + +var fileDescriptor_policy_0b37253ad551feaf = []byte{ + // 1167 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xb4, 0x57, 0xcb, 0x6e, 0x1b, 0x37, + 0x14, 0xf5, 0xc8, 0x2f, 0x99, 0x72, 0x0c, 0x87, 0x8e, 0x62, 0x45, 0x35, 0xd2, 0x40, 0x05, 0x8a, + 0x02, 0x01, 0x46, 0xb0, 0x8c, 0x02, 0x85, 0xd2, 0x36, 0xb0, 0xe5, 0xc0, 0x32, 0xf2, 0x80, 0x33, + 0x95, 0xed, 0xa2, 0x35, 0x3a, 0xa1, 0x67, 0x98, 0x31, 0xe1, 0x19, 0x72, 0x30, 0xa4, 0x64, 0x0b, + 0x7d, 0xa0, 0x40, 0xd1, 0x7f, 0x68, 0xd1, 0x5d, 0x97, 0xd9, 0x76, 0xdf, 0x6d, 0x81, 0xae, 0xba, + 0x68, 0x7f, 0x21, 0x8b, 0x7e, 0x45, 0x31, 0x24, 0xe7, 0x11, 0x49, 0x8e, 0xe5, 0x47, 0x57, 0xa6, + 0xc9, 0x7b, 0xce, 0x3d, 0x3c, 0x73, 0x79, 0x49, 0x81, 0xfb, 0x1e, 0x63, 0x9e, 0x8f, 0xeb, 0xc8, + 0xe5, 0x75, 0x35, 0x8c, 0x47, 0xbd, 0xd5, 0xba, 0xc3, 0x82, 0x80, 0xd1, 0x7a, 0xc8, 0x7c, 0xe2, + 0xf4, 0xcd, 0x30, 0x62, 0x82, 0xc1, 0xbb, 0x2a, 0xc2, 0x44, 0x2e, 0x37, 0xd3, 0x60, 0xb3, 0xb7, + 0x6a, 0xaa, 0xe0, 0xea, 0x83, 0xb3, 0xc8, 0x30, 0xed, 0x06, 0x5c, 0x73, 0xd9, 0x82, 0x85, 0xc4, + 0xb1, 0x31, 0x15, 0x51, 0xdf, 0x16, 0xfd, 0x10, 0x2b, 0xf2, 0xea, 0xfe, 0x45, 0xc0, 0x3d, 0xe2, + 0x62, 0xea, 0x60, 0xdb, 0xc5, 0x5c, 0x10, 0x8a, 0x04, 0x61, 0xd4, 0x0e, 0x08, 0x0f, 0x90, 0x70, + 0x8e, 0xec, 0x6e, 0xe4, 0xe7, 0x89, 0x3f, 0xbf, 0x2a, 0x31, 0x65, 0xc2, 0x3e, 0x61, 0xd1, 0x31, + 0xa1, 0x9e, 0xed, 0xe2, 0x1e, 0x71, 0x12, 0x66, 0xed, 0x47, 0x5d, 0xfe, 0x77, 0xd8, 0x7d, 0x59, + 0x3f, 0x89, 0x50, 0x18, 0xe2, 0x88, 0xeb, 0xf5, 0x95, 0x24, 0x73, 0x48, 0xea, 0x88, 0x52, 0x26, + 0x24, 0x9f, 0x5e, 0xad, 0xfd, 0x64, 0x00, 0xb8, 0x23, 0x93, 0xef, 0x11, 0xe6, 0xcb, 0xa5, 0xc7, + 0xb8, 0x0f, 0x3f, 0x01, 0x25, 0x2d, 0x89, 0xa2, 0x00, 0x57, 0x8c, 0x7b, 0xc6, 0x07, 0xa5, 0xc6, + 0x8a, 0xf6, 0xdb, 0x4c, 0x52, 0x99, 0x9f, 0x89, 0x88, 0x50, 0x6f, 0x0f, 0xf9, 0x5d, 0x6c, 0x01, + 0x05, 0x78, 0x86, 0x02, 0x0c, 0x5b, 0x60, 0xa1, 0xa7, 0xe8, 0xa8, 0x67, 0x0b, 0x7c, 0x2a, 0x2a, + 0x85, 0x31, 0x18, 0x6e, 0xa4, 0x98, 0x0e, 0x3e, 0x15, 0xb5, 0xd7, 0x06, 0xb8, 0xa3, 0xa5, 0x21, + 0x9f, 0xb8, 0x52, 0xdb, 0x0e, 0x8a, 0x50, 0x80, 0x05, 0x8e, 0x60, 0x07, 0x2c, 0x13, 0x8f, 0xb2, + 0x08, 0x1d, 0xfa, 0xd8, 0xce, 0xdb, 0xc7, 0x2b, 0xc6, 0xbd, 0xc9, 0x73, 0x73, 0x95, 0x53, 0xb0, + 0x4a, 0xd1, 0x91, 0x50, 0xc8, 0xc1, 0x0a, 0x3e, 0xc5, 0x41, 0x28, 0x12, 0xca, 0x5e, 0xe2, 0x8a, + 0x7d, 0x8c, 0xfb, 0xbc, 0x52, 0x90, 0xd4, 0x0d, 0xf3, 0xed, 0x35, 0x68, 0x0e, 0x3b, 0x6a, 0xdd, + 0x51, 0xbc, 0xc3, 0x2b, 0xbc, 0xf6, 0x77, 0x01, 0x2c, 0xe6, 0x54, 0x3c, 0x8a, 0x8b, 0x12, 0x36, + 0xc0, 0xb4, 0xdc, 0xce, 0x58, 0xde, 0xab, 0x50, 0xf8, 0x02, 0x4c, 0xc5, 0x25, 0x27, 0xcd, 0x5e, + 0x68, 0x3c, 0x39, 0x53, 0xa5, 0xac, 0x39, 0x73, 0x30, 0x65, 0xa7, 0x1f, 0xe2, 0x47, 0xb4, 0x1b, + 0x8c, 0x5c, 0xb0, 0x24, 0x33, 0x7c, 0x0e, 0xe6, 0x92, 0xea, 0xe4, 0x95, 0x49, 0x69, 0xc6, 0xda, + 0x78, 0x66, 0x28, 0x3a, 0x8d, 0xb5, 0x32, 0x16, 0xb8, 0x0f, 0x4a, 0x0e, 0xa3, 0x5c, 0x44, 0x88, + 0x50, 0xc1, 0x2b, 0x53, 0x92, 0xf4, 0xc3, 0x0b, 0x90, 0xb6, 0x52, 0xb4, 0x95, 0x67, 0xaa, 0xfd, + 0x55, 0x02, 0x4b, 0x23, 0x72, 0xc3, 0x26, 0x98, 0x3b, 0x12, 0x22, 0xb4, 0x1d, 0xe6, 0x62, 0x5d, + 0x97, 0xef, 0x0c, 0xb9, 0xbb, 0x4d, 0xc5, 0x5a, 0x43, 0x9a, 0xdb, 0x9e, 0xb0, 0x8a, 0x71, 0x7c, + 0x8b, 0xb9, 0x18, 0xba, 0x60, 0xfe, 0x04, 0x1f, 0x72, 0x22, 0xb0, 0xed, 0x13, 0x2e, 0x2a, 0x93, + 0x12, 0xfe, 0xf0, 0x12, 0x16, 0x98, 0xfb, 0x8a, 0xe7, 0x09, 0xe1, 0xa2, 0x3d, 0x61, 0x95, 0x4e, + 0xb2, 0x7f, 0xe1, 0x97, 0x60, 0x2e, 0x3e, 0x34, 0x2a, 0xc5, 0x94, 0x4c, 0xf1, 0xf1, 0x65, 0x52, + 0xc4, 0xc7, 0x48, 0xf3, 0x17, 0x85, 0x1e, 0xc3, 0x16, 0xb8, 0xe1, 0x23, 0xea, 0x75, 0x91, 0x87, + 0x95, 0x05, 0xd3, 0xe7, 0x17, 0x58, 0x7b, 0xc2, 0x9a, 0x4f, 0x40, 0xd2, 0x87, 0x6f, 0x41, 0x39, + 0xdf, 0x9c, 0x32, 0xb5, 0x33, 0x92, 0x6c, 0xeb, 0x32, 0x6a, 0x37, 0x33, 0xc2, 0x9c, 0xf0, 0x25, + 0x77, 0x78, 0x1a, 0x7e, 0x03, 0x6e, 0x8d, 0x6a, 0xba, 0x95, 0xd9, 0x6b, 0xc9, 0xfe, 0x54, 0xd3, + 0x0d, 0x64, 0x4f, 0xa6, 0xe1, 0x0f, 0x06, 0x58, 0x3e, 0xa3, 0x35, 0x57, 0x8a, 0x52, 0xc1, 0xf6, + 0x15, 0x15, 0x3c, 0x63, 0x62, 0x5f, 0x11, 0xb6, 0x27, 0xac, 0xbc, 0xd3, 0xd9, 0x42, 0xf5, 0x53, + 0x50, 0x4c, 0xfd, 0x88, 0x9b, 0x05, 0x3e, 0x15, 0xe3, 0xb5, 0x3e, 0x15, 0x5a, 0xdd, 0x02, 0xa5, + 0x5c, 0x09, 0xc2, 0x8f, 0x40, 0x51, 0x97, 0xe0, 0x78, 0x2c, 0x69, 0x74, 0xf5, 0x05, 0x58, 0x1a, + 0xf1, 0xe9, 0xe0, 0x36, 0xb8, 0x39, 0x58, 0x22, 0xe3, 0x31, 0x2f, 0x0e, 0x7c, 0x71, 0x5e, 0xfd, + 0xcd, 0x78, 0x23, 0x45, 0xfa, 0x21, 0x7e, 0x31, 0xc0, 0x5c, 0x72, 0xcf, 0x2a, 0xee, 0x85, 0xc6, + 0x77, 0x17, 0xe8, 0x7a, 0xda, 0xf9, 0x11, 0xd4, 0xbb, 0x91, 0x3f, 0xb2, 0x1d, 0x9e, 0x8b, 0xb0, + 0x8a, 0x5d, 0x35, 0xe0, 0xd5, 0x3f, 0x0a, 0xa0, 0x3c, 0xf2, 0x9b, 0xc2, 0x87, 0x60, 0x1e, 0x9f, + 0x86, 0x88, 0xba, 0xd8, 0x8d, 0xdf, 0x09, 0xba, 0x8b, 0xbc, 0xdd, 0x95, 0x52, 0x82, 0xd8, 0x8d, + 0x7c, 0xf8, 0xb3, 0x01, 0x66, 0xd4, 0x23, 0x40, 0xb6, 0x87, 0x85, 0xc6, 0xf7, 0xc6, 0x95, 0xb6, + 0x9d, 0x69, 0xdb, 0x94, 0xac, 0x63, 0xec, 0x7b, 0x10, 0x62, 0x69, 0x41, 0xf0, 0x39, 0xb8, 0xed, + 0x23, 0x2e, 0x6c, 0xe7, 0x08, 0x3b, 0xc7, 0xd8, 0xb5, 0x5d, 0x24, 0xb0, 0x2d, 0x48, 0x30, 0x56, + 0xa3, 0xb1, 0x96, 0x62, 0x6c, 0x4b, 0x41, 0x37, 0x91, 0xc0, 0x1d, 0x12, 0xe0, 0x8d, 0x59, 0x30, + 0xdd, 0x8b, 0x57, 0x6b, 0xbf, 0xcf, 0x82, 0xf2, 0xc8, 0xce, 0x0f, 0x7f, 0x34, 0xc0, 0xb2, 0xc3, + 0xba, 0xf2, 0x3d, 0x97, 0x5d, 0x02, 0xaa, 0x27, 0xa9, 0x1b, 0xf4, 0xf1, 0xa5, 0xae, 0x14, 0xb3, + 0xa5, 0x48, 0xb3, 0x19, 0xdd, 0x97, 0xca, 0xce, 0xa8, 0x05, 0xf8, 0x35, 0x58, 0x8a, 0x30, 0xc7, + 0xbe, 0x8f, 0xa3, 0x9c, 0x0e, 0x7d, 0xcd, 0xb4, 0x2f, 0x27, 0xc1, 0xd2, 0x84, 0xd9, 0x54, 0x7b, + 0xc2, 0x82, 0xd1, 0xd0, 0x6c, 0x7c, 0x1e, 0xde, 0x73, 0x70, 0x24, 0xc8, 0x4b, 0xe2, 0xc4, 0xae, + 0x07, 0x84, 0xf3, 0xf8, 0x05, 0x46, 0xa8, 0x9d, 0x78, 0x93, 0xbb, 0xb5, 0xae, 0xd9, 0x90, 0x77, + 0x73, 0x99, 0x9f, 0xaa, 0xc4, 0xdb, 0x54, 0x07, 0x4b, 0x6b, 0x5e, 0x19, 0xe0, 0x7e, 0x5e, 0x9d, + 0xcb, 0x02, 0x44, 0x72, 0x2f, 0xe6, 0x41, 0x95, 0x53, 0xff, 0x87, 0xca, 0xf7, 0x73, 0x0a, 0x36, + 0xa5, 0x80, 0xe4, 0x00, 0xbf, 0x21, 0xb6, 0xfa, 0x8f, 0x01, 0xca, 0x23, 0x39, 0xe0, 0x2e, 0xa8, + 0x08, 0x26, 0x90, 0x6f, 0x0b, 0x14, 0x79, 0x58, 0x60, 0x57, 0xab, 0x26, 0xb2, 0x05, 0x9d, 0xf7, + 0x9a, 0xb0, 0x6e, 0x4b, 0x70, 0x47, 0x63, 0x5b, 0x09, 0x14, 0x62, 0x30, 0x97, 0xf1, 0xa8, 0x67, + 0xe6, 0xd6, 0x35, 0x6d, 0xdd, 0xca, 0x98, 0xab, 0xb7, 0x00, 0x1c, 0x2e, 0xa7, 0xea, 0x57, 0xe0, + 0xe6, 0x10, 0x2a, 0x6e, 0xe0, 0xe9, 0x89, 0x8a, 0x88, 0xc0, 0x11, 0x61, 0x74, 0xac, 0xd7, 0xe8, + 0x62, 0x72, 0x34, 0x12, 0x54, 0x7a, 0x80, 0x37, 0x5e, 0x1b, 0xa0, 0xe6, 0xb0, 0xe0, 0x9c, 0x8d, + 0x6d, 0x94, 0xd4, 0xce, 0x76, 0x62, 0xf6, 0x1d, 0xe3, 0x8b, 0x4d, 0x1d, 0xee, 0xb1, 0xf8, 0x11, + 0x62, 0xb2, 0xc8, 0xab, 0x7b, 0x98, 0xca, 0xdc, 0xc9, 0x8f, 0xa9, 0x90, 0xf0, 0xb3, 0x7e, 0x3e, + 0x3e, 0x50, 0x7f, 0x7e, 0x2d, 0x4c, 0x6e, 0xad, 0xaf, 0xbf, 0x2a, 0xdc, 0xdd, 0x52, 0x64, 0xeb, + 0x2e, 0x37, 0xd5, 0x30, 0x1e, 0xed, 0xad, 0x9a, 0x2d, 0x19, 0xf6, 0x67, 0x12, 0x70, 0xb0, 0xee, + 0xf2, 0x83, 0x34, 0xe0, 0x60, 0x6f, 0xf5, 0x40, 0x05, 0xfc, 0x5b, 0xa8, 0xa9, 0xd9, 0x66, 0x73, + 0xdd, 0xe5, 0xcd, 0x66, 0x1a, 0xd2, 0x6c, 0xee, 0xad, 0x36, 0x9b, 0x2a, 0xe8, 0x70, 0x46, 0xaa, + 0x5b, 0xfb, 0x2f, 0x00, 0x00, 0xff, 0xff, 0x66, 0xb9, 0x40, 0x8e, 0xdb, 0x0e, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/common/real_time_bidding_setting.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/common/real_time_bidding_setting.pb.go new file mode 100644 index 0000000..11901c8 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/common/real_time_bidding_setting.pb.go @@ -0,0 +1,93 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/common/real_time_bidding_setting.proto + +package common // import "google.golang.org/genproto/googleapis/ads/googleads/v1/common" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import wrappers "github.com/golang/protobuf/ptypes/wrappers" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Settings for Real-Time Bidding, a feature only available for campaigns +// targeting the Ad Exchange network. +type RealTimeBiddingSetting struct { + // Whether the campaign is opted in to real-time bidding. + OptIn *wrappers.BoolValue `protobuf:"bytes,1,opt,name=opt_in,json=optIn,proto3" json:"opt_in,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *RealTimeBiddingSetting) Reset() { *m = RealTimeBiddingSetting{} } +func (m *RealTimeBiddingSetting) String() string { return proto.CompactTextString(m) } +func (*RealTimeBiddingSetting) ProtoMessage() {} +func (*RealTimeBiddingSetting) Descriptor() ([]byte, []int) { + return fileDescriptor_real_time_bidding_setting_e97ac2ff6d978f8d, []int{0} +} +func (m *RealTimeBiddingSetting) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_RealTimeBiddingSetting.Unmarshal(m, b) +} +func (m *RealTimeBiddingSetting) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_RealTimeBiddingSetting.Marshal(b, m, deterministic) +} +func (dst *RealTimeBiddingSetting) XXX_Merge(src proto.Message) { + xxx_messageInfo_RealTimeBiddingSetting.Merge(dst, src) +} +func (m *RealTimeBiddingSetting) XXX_Size() int { + return xxx_messageInfo_RealTimeBiddingSetting.Size(m) +} +func (m *RealTimeBiddingSetting) XXX_DiscardUnknown() { + xxx_messageInfo_RealTimeBiddingSetting.DiscardUnknown(m) +} + +var xxx_messageInfo_RealTimeBiddingSetting proto.InternalMessageInfo + +func (m *RealTimeBiddingSetting) GetOptIn() *wrappers.BoolValue { + if m != nil { + return m.OptIn + } + return nil +} + +func init() { + proto.RegisterType((*RealTimeBiddingSetting)(nil), "google.ads.googleads.v1.common.RealTimeBiddingSetting") +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/common/real_time_bidding_setting.proto", fileDescriptor_real_time_bidding_setting_e97ac2ff6d978f8d) +} + +var fileDescriptor_real_time_bidding_setting_e97ac2ff6d978f8d = []byte{ + // 303 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x7c, 0x90, 0xc1, 0x4a, 0xec, 0x30, + 0x14, 0x86, 0xe9, 0x5c, 0xee, 0x2c, 0xea, 0x6e, 0x16, 0x22, 0xa3, 0x0c, 0x32, 0x2b, 0x57, 0x09, + 0xd1, 0x5d, 0x04, 0xa1, 0x55, 0x18, 0xc4, 0xcd, 0x30, 0x4a, 0x17, 0x52, 0x28, 0x99, 0x49, 0x0c, + 0x81, 0x34, 0x27, 0x34, 0x99, 0xf1, 0x7d, 0x5c, 0xfa, 0x28, 0x3e, 0x8a, 0x0f, 0xe0, 0x5a, 0xda, + 0xd3, 0x76, 0xa5, 0xae, 0xfa, 0xd3, 0x7c, 0xff, 0xf9, 0x0e, 0x27, 0xbd, 0xd1, 0x00, 0xda, 0x2a, + 0x2a, 0x64, 0xa0, 0x18, 0xdb, 0x74, 0x60, 0x74, 0x07, 0x75, 0x0d, 0x8e, 0x36, 0x4a, 0xd8, 0x2a, + 0x9a, 0x5a, 0x55, 0x5b, 0x23, 0xa5, 0x71, 0xba, 0x0a, 0x2a, 0x46, 0xe3, 0x34, 0xf1, 0x0d, 0x44, + 0x98, 0x2d, 0xb0, 0x44, 0x84, 0x0c, 0x64, 0xec, 0x93, 0x03, 0x23, 0xd8, 0x9f, 0xf7, 0xef, 0xb4, + 0xa3, 0xb7, 0xfb, 0x17, 0xfa, 0xda, 0x08, 0xef, 0x55, 0x13, 0xb0, 0x3f, 0x3f, 0x1b, 0xfc, 0xde, + 0x50, 0xe1, 0x1c, 0x44, 0x11, 0x0d, 0xb8, 0xfe, 0x75, 0xf9, 0x90, 0x1e, 0x6f, 0x94, 0xb0, 0x4f, + 0xa6, 0x56, 0x39, 0xea, 0x1f, 0xd1, 0x3e, 0x63, 0xe9, 0x14, 0x7c, 0xac, 0x8c, 0x3b, 0x49, 0xce, + 0x93, 0x8b, 0xa3, 0xcb, 0x79, 0x6f, 0x27, 0x83, 0x88, 0xe4, 0x00, 0xb6, 0x10, 0x76, 0xaf, 0x36, + 0xff, 0xc1, 0xc7, 0x7b, 0x97, 0x7f, 0x25, 0xe9, 0x72, 0x07, 0x35, 0xf9, 0x7b, 0xe3, 0xfc, 0xf4, + 0x67, 0xe3, 0xba, 0x9d, 0xbb, 0x4e, 0x9e, 0xef, 0xfa, 0xba, 0x06, 0x2b, 0x9c, 0x26, 0xd0, 0x68, + 0xaa, 0x95, 0xeb, 0xac, 0xc3, 0x01, 0xbd, 0x09, 0xbf, 0xdd, 0xf3, 0x1a, 0x3f, 0x6f, 0x93, 0x7f, + 0xab, 0x2c, 0x7b, 0x9f, 0x2c, 0x56, 0x38, 0x2c, 0x93, 0x81, 0x60, 0x6c, 0x53, 0xc1, 0xc8, 0x6d, + 0x87, 0x7d, 0x0c, 0x40, 0x99, 0xc9, 0x50, 0x8e, 0x40, 0x59, 0xb0, 0x12, 0x81, 0xcf, 0xc9, 0x12, + 0xff, 0x72, 0x9e, 0xc9, 0xc0, 0xf9, 0x88, 0x70, 0x5e, 0x30, 0xce, 0x11, 0xda, 0x4e, 0xbb, 0xed, + 0xae, 0xbe, 0x03, 0x00, 0x00, 0xff, 0xff, 0xce, 0x84, 0x14, 0x8e, 0xec, 0x01, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/common/segments.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/common/segments.pb.go new file mode 100644 index 0000000..6afc569 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/common/segments.pb.go @@ -0,0 +1,905 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/common/segments.proto + +package common // import "google.golang.org/genproto/googleapis/ads/googleads/v1/common" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import wrappers "github.com/golang/protobuf/ptypes/wrappers" +import enums "google.golang.org/genproto/googleapis/ads/googleads/v1/enums" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Segment only fields. +type Segments struct { + // Ad network type. + AdNetworkType enums.AdNetworkTypeEnum_AdNetworkType `protobuf:"varint,3,opt,name=ad_network_type,json=adNetworkType,proto3,enum=google.ads.googleads.v1.enums.AdNetworkTypeEnum_AdNetworkType" json:"ad_network_type,omitempty"` + // Click type. + ClickType enums.ClickTypeEnum_ClickType `protobuf:"varint,26,opt,name=click_type,json=clickType,proto3,enum=google.ads.googleads.v1.enums.ClickTypeEnum_ClickType" json:"click_type,omitempty"` + // Resource name of the conversion action. + ConversionAction *wrappers.StringValue `protobuf:"bytes,52,opt,name=conversion_action,json=conversionAction,proto3" json:"conversion_action,omitempty"` + // Conversion action category. + ConversionActionCategory enums.ConversionActionCategoryEnum_ConversionActionCategory `protobuf:"varint,53,opt,name=conversion_action_category,json=conversionActionCategory,proto3,enum=google.ads.googleads.v1.enums.ConversionActionCategoryEnum_ConversionActionCategory" json:"conversion_action_category,omitempty"` + // Conversion action name. + ConversionActionName *wrappers.StringValue `protobuf:"bytes,54,opt,name=conversion_action_name,json=conversionActionName,proto3" json:"conversion_action_name,omitempty"` + // This segments your conversion columns by the original conversion and + // conversion value vs. the delta if conversions were adjusted. False row has + // the data as originally stated; While true row has the delta between data + // now and the data as originally stated. Summing the two together results + // post-adjustment data. + ConversionAdjustment *wrappers.BoolValue `protobuf:"bytes,27,opt,name=conversion_adjustment,json=conversionAdjustment,proto3" json:"conversion_adjustment,omitempty"` + // Conversion attribution event type. + ConversionAttributionEventType enums.ConversionAttributionEventTypeEnum_ConversionAttributionEventType `protobuf:"varint,2,opt,name=conversion_attribution_event_type,json=conversionAttributionEventType,proto3,enum=google.ads.googleads.v1.enums.ConversionAttributionEventTypeEnum_ConversionAttributionEventType" json:"conversion_attribution_event_type,omitempty"` + // An enum value representing the number of days between the impression and + // the conversion. + ConversionLagBucket enums.ConversionLagBucketEnum_ConversionLagBucket `protobuf:"varint,50,opt,name=conversion_lag_bucket,json=conversionLagBucket,proto3,enum=google.ads.googleads.v1.enums.ConversionLagBucketEnum_ConversionLagBucket" json:"conversion_lag_bucket,omitempty"` + // An enum value representing the number of days between the impression and + // the conversion or between the impression and adjustments to the conversion. + ConversionOrAdjustmentLagBucket enums.ConversionOrAdjustmentLagBucketEnum_ConversionOrAdjustmentLagBucket `protobuf:"varint,51,opt,name=conversion_or_adjustment_lag_bucket,json=conversionOrAdjustmentLagBucket,proto3,enum=google.ads.googleads.v1.enums.ConversionOrAdjustmentLagBucketEnum_ConversionOrAdjustmentLagBucket" json:"conversion_or_adjustment_lag_bucket,omitempty"` + // Date to which metrics apply. + // yyyy-MM-dd format, e.g., 2018-04-17. + Date *wrappers.StringValue `protobuf:"bytes,4,opt,name=date,proto3" json:"date,omitempty"` + // Day of the week, e.g., MONDAY. + DayOfWeek enums.DayOfWeekEnum_DayOfWeek `protobuf:"varint,5,opt,name=day_of_week,json=dayOfWeek,proto3,enum=google.ads.googleads.v1.enums.DayOfWeekEnum_DayOfWeek" json:"day_of_week,omitempty"` + // Device to which metrics apply. + Device enums.DeviceEnum_Device `protobuf:"varint,1,opt,name=device,proto3,enum=google.ads.googleads.v1.enums.DeviceEnum_Device" json:"device,omitempty"` + // External conversion source. + ExternalConversionSource enums.ExternalConversionSourceEnum_ExternalConversionSource `protobuf:"varint,55,opt,name=external_conversion_source,json=externalConversionSource,proto3,enum=google.ads.googleads.v1.enums.ExternalConversionSourceEnum_ExternalConversionSource" json:"external_conversion_source,omitempty"` + // Resource name of the geo target constant that represents an airport. + GeoTargetAirport *wrappers.StringValue `protobuf:"bytes,65,opt,name=geo_target_airport,json=geoTargetAirport,proto3" json:"geo_target_airport,omitempty"` + // Resource name of the geo target constant that represents a city. + GeoTargetCity *wrappers.StringValue `protobuf:"bytes,62,opt,name=geo_target_city,json=geoTargetCity,proto3" json:"geo_target_city,omitempty"` + // Resource name of the geo target constant that represents a county. + GeoTargetCounty *wrappers.StringValue `protobuf:"bytes,68,opt,name=geo_target_county,json=geoTargetCounty,proto3" json:"geo_target_county,omitempty"` + // Resource name of the geo target constant that represents a district. + GeoTargetDistrict *wrappers.StringValue `protobuf:"bytes,69,opt,name=geo_target_district,json=geoTargetDistrict,proto3" json:"geo_target_district,omitempty"` + // Resource name of the geo target constant that represents a metro. + GeoTargetMetro *wrappers.StringValue `protobuf:"bytes,63,opt,name=geo_target_metro,json=geoTargetMetro,proto3" json:"geo_target_metro,omitempty"` + // Resource name of the geo target constant that represents the most + // specific location. + GeoTargetMostSpecificLocation *wrappers.StringValue `protobuf:"bytes,72,opt,name=geo_target_most_specific_location,json=geoTargetMostSpecificLocation,proto3" json:"geo_target_most_specific_location,omitempty"` + // Resource name of the geo target constant that represents a postal code. + GeoTargetPostalCode *wrappers.StringValue `protobuf:"bytes,71,opt,name=geo_target_postal_code,json=geoTargetPostalCode,proto3" json:"geo_target_postal_code,omitempty"` + // Resource name of the geo target constant that represents a region. + GeoTargetRegion *wrappers.StringValue `protobuf:"bytes,64,opt,name=geo_target_region,json=geoTargetRegion,proto3" json:"geo_target_region,omitempty"` + // Resource name of the geo target constant that represents a state. + GeoTargetState *wrappers.StringValue `protobuf:"bytes,67,opt,name=geo_target_state,json=geoTargetState,proto3" json:"geo_target_state,omitempty"` + // Hotel booking window in days. + HotelBookingWindowDays *wrappers.Int64Value `protobuf:"bytes,6,opt,name=hotel_booking_window_days,json=hotelBookingWindowDays,proto3" json:"hotel_booking_window_days,omitempty"` + // Hotel center ID. + HotelCenterId *wrappers.Int64Value `protobuf:"bytes,7,opt,name=hotel_center_id,json=hotelCenterId,proto3" json:"hotel_center_id,omitempty"` + // Hotel check-in date. Formatted as yyyy-MM-dd. + HotelCheckInDate *wrappers.StringValue `protobuf:"bytes,8,opt,name=hotel_check_in_date,json=hotelCheckInDate,proto3" json:"hotel_check_in_date,omitempty"` + // Hotel check-in day of week. + HotelCheckInDayOfWeek enums.DayOfWeekEnum_DayOfWeek `protobuf:"varint,9,opt,name=hotel_check_in_day_of_week,json=hotelCheckInDayOfWeek,proto3,enum=google.ads.googleads.v1.enums.DayOfWeekEnum_DayOfWeek" json:"hotel_check_in_day_of_week,omitempty"` + // Hotel city. + HotelCity *wrappers.StringValue `protobuf:"bytes,10,opt,name=hotel_city,json=hotelCity,proto3" json:"hotel_city,omitempty"` + // Hotel class. + HotelClass *wrappers.Int32Value `protobuf:"bytes,11,opt,name=hotel_class,json=hotelClass,proto3" json:"hotel_class,omitempty"` + // Hotel country. + HotelCountry *wrappers.StringValue `protobuf:"bytes,12,opt,name=hotel_country,json=hotelCountry,proto3" json:"hotel_country,omitempty"` + // Hotel date selection type. + HotelDateSelectionType enums.HotelDateSelectionTypeEnum_HotelDateSelectionType `protobuf:"varint,13,opt,name=hotel_date_selection_type,json=hotelDateSelectionType,proto3,enum=google.ads.googleads.v1.enums.HotelDateSelectionTypeEnum_HotelDateSelectionType" json:"hotel_date_selection_type,omitempty"` + // Hotel length of stay. + HotelLengthOfStay *wrappers.Int32Value `protobuf:"bytes,14,opt,name=hotel_length_of_stay,json=hotelLengthOfStay,proto3" json:"hotel_length_of_stay,omitempty"` + // Hotel state. + HotelState *wrappers.StringValue `protobuf:"bytes,15,opt,name=hotel_state,json=hotelState,proto3" json:"hotel_state,omitempty"` + // Hour of day as a number between 0 and 23, inclusive. + Hour *wrappers.Int32Value `protobuf:"bytes,16,opt,name=hour,proto3" json:"hour,omitempty"` + // Only used with feed item metrics. + // Indicates whether the interaction metrics occurred on the feed item itself + // or a different extension or ad unit. + InteractionOnThisExtension *wrappers.BoolValue `protobuf:"bytes,49,opt,name=interaction_on_this_extension,json=interactionOnThisExtension,proto3" json:"interaction_on_this_extension,omitempty"` + // Keyword criterion. + Keyword *Keyword `protobuf:"bytes,61,opt,name=keyword,proto3" json:"keyword,omitempty"` + // Month as represented by the date of the first day of a month. Formatted as + // yyyy-MM-dd. + Month *wrappers.StringValue `protobuf:"bytes,17,opt,name=month,proto3" json:"month,omitempty"` + // Month of the year, e.g., January. + MonthOfYear enums.MonthOfYearEnum_MonthOfYear `protobuf:"varint,18,opt,name=month_of_year,json=monthOfYear,proto3,enum=google.ads.googleads.v1.enums.MonthOfYearEnum_MonthOfYear" json:"month_of_year,omitempty"` + // Partner hotel ID. + PartnerHotelId *wrappers.StringValue `protobuf:"bytes,19,opt,name=partner_hotel_id,json=partnerHotelId,proto3" json:"partner_hotel_id,omitempty"` + // Placeholder type. This is only used with feed item metrics. + PlaceholderType enums.PlaceholderTypeEnum_PlaceholderType `protobuf:"varint,20,opt,name=placeholder_type,json=placeholderType,proto3,enum=google.ads.googleads.v1.enums.PlaceholderTypeEnum_PlaceholderType" json:"placeholder_type,omitempty"` + // Aggregator ID of the product. + ProductAggregatorId *wrappers.UInt64Value `protobuf:"bytes,28,opt,name=product_aggregator_id,json=productAggregatorId,proto3" json:"product_aggregator_id,omitempty"` + // Bidding category (level 1) of the product. + ProductBiddingCategoryLevel1 *wrappers.StringValue `protobuf:"bytes,56,opt,name=product_bidding_category_level1,json=productBiddingCategoryLevel1,proto3" json:"product_bidding_category_level1,omitempty"` + // Bidding category (level 2) of the product. + ProductBiddingCategoryLevel2 *wrappers.StringValue `protobuf:"bytes,57,opt,name=product_bidding_category_level2,json=productBiddingCategoryLevel2,proto3" json:"product_bidding_category_level2,omitempty"` + // Bidding category (level 3) of the product. + ProductBiddingCategoryLevel3 *wrappers.StringValue `protobuf:"bytes,58,opt,name=product_bidding_category_level3,json=productBiddingCategoryLevel3,proto3" json:"product_bidding_category_level3,omitempty"` + // Bidding category (level 4) of the product. + ProductBiddingCategoryLevel4 *wrappers.StringValue `protobuf:"bytes,59,opt,name=product_bidding_category_level4,json=productBiddingCategoryLevel4,proto3" json:"product_bidding_category_level4,omitempty"` + // Bidding category (level 5) of the product. + ProductBiddingCategoryLevel5 *wrappers.StringValue `protobuf:"bytes,60,opt,name=product_bidding_category_level5,json=productBiddingCategoryLevel5,proto3" json:"product_bidding_category_level5,omitempty"` + // Brand of the product. + ProductBrand *wrappers.StringValue `protobuf:"bytes,29,opt,name=product_brand,json=productBrand,proto3" json:"product_brand,omitempty"` + // Channel of the product. + ProductChannel enums.ProductChannelEnum_ProductChannel `protobuf:"varint,30,opt,name=product_channel,json=productChannel,proto3,enum=google.ads.googleads.v1.enums.ProductChannelEnum_ProductChannel" json:"product_channel,omitempty"` + // Channel exclusivity of the product. + ProductChannelExclusivity enums.ProductChannelExclusivityEnum_ProductChannelExclusivity `protobuf:"varint,31,opt,name=product_channel_exclusivity,json=productChannelExclusivity,proto3,enum=google.ads.googleads.v1.enums.ProductChannelExclusivityEnum_ProductChannelExclusivity" json:"product_channel_exclusivity,omitempty"` + // Condition of the product. + ProductCondition enums.ProductConditionEnum_ProductCondition `protobuf:"varint,32,opt,name=product_condition,json=productCondition,proto3,enum=google.ads.googleads.v1.enums.ProductConditionEnum_ProductCondition" json:"product_condition,omitempty"` + // Resource name of the geo target constant for the country of sale of the + // product. + ProductCountry *wrappers.StringValue `protobuf:"bytes,33,opt,name=product_country,json=productCountry,proto3" json:"product_country,omitempty"` + // Custom attribute 0 of the product. + ProductCustomAttribute0 *wrappers.StringValue `protobuf:"bytes,34,opt,name=product_custom_attribute0,json=productCustomAttribute0,proto3" json:"product_custom_attribute0,omitempty"` + // Custom attribute 1 of the product. + ProductCustomAttribute1 *wrappers.StringValue `protobuf:"bytes,35,opt,name=product_custom_attribute1,json=productCustomAttribute1,proto3" json:"product_custom_attribute1,omitempty"` + // Custom attribute 2 of the product. + ProductCustomAttribute2 *wrappers.StringValue `protobuf:"bytes,36,opt,name=product_custom_attribute2,json=productCustomAttribute2,proto3" json:"product_custom_attribute2,omitempty"` + // Custom attribute 3 of the product. + ProductCustomAttribute3 *wrappers.StringValue `protobuf:"bytes,37,opt,name=product_custom_attribute3,json=productCustomAttribute3,proto3" json:"product_custom_attribute3,omitempty"` + // Custom attribute 4 of the product. + ProductCustomAttribute4 *wrappers.StringValue `protobuf:"bytes,38,opt,name=product_custom_attribute4,json=productCustomAttribute4,proto3" json:"product_custom_attribute4,omitempty"` + // Item ID of the product. + ProductItemId *wrappers.StringValue `protobuf:"bytes,39,opt,name=product_item_id,json=productItemId,proto3" json:"product_item_id,omitempty"` + // Resource name of the language constant for the language of the product. + ProductLanguage *wrappers.StringValue `protobuf:"bytes,40,opt,name=product_language,json=productLanguage,proto3" json:"product_language,omitempty"` + // Merchant ID of the product. + ProductMerchantId *wrappers.UInt64Value `protobuf:"bytes,41,opt,name=product_merchant_id,json=productMerchantId,proto3" json:"product_merchant_id,omitempty"` + // Store ID of the product. + ProductStoreId *wrappers.StringValue `protobuf:"bytes,42,opt,name=product_store_id,json=productStoreId,proto3" json:"product_store_id,omitempty"` + // Title of the product. + ProductTitle *wrappers.StringValue `protobuf:"bytes,43,opt,name=product_title,json=productTitle,proto3" json:"product_title,omitempty"` + // Type (level 1) of the product. + ProductTypeL1 *wrappers.StringValue `protobuf:"bytes,44,opt,name=product_type_l1,json=productTypeL1,proto3" json:"product_type_l1,omitempty"` + // Type (level 2) of the product. + ProductTypeL2 *wrappers.StringValue `protobuf:"bytes,45,opt,name=product_type_l2,json=productTypeL2,proto3" json:"product_type_l2,omitempty"` + // Type (level 3) of the product. + ProductTypeL3 *wrappers.StringValue `protobuf:"bytes,46,opt,name=product_type_l3,json=productTypeL3,proto3" json:"product_type_l3,omitempty"` + // Type (level 4) of the product. + ProductTypeL4 *wrappers.StringValue `protobuf:"bytes,47,opt,name=product_type_l4,json=productTypeL4,proto3" json:"product_type_l4,omitempty"` + // Type (level 5) of the product. + ProductTypeL5 *wrappers.StringValue `protobuf:"bytes,48,opt,name=product_type_l5,json=productTypeL5,proto3" json:"product_type_l5,omitempty"` + // Quarter as represented by the date of the first day of a quarter. + // Uses the calendar year for quarters, e.g., the second quarter of 2018 + // starts on 2018-04-01. Formatted as yyyy-MM-dd. + Quarter *wrappers.StringValue `protobuf:"bytes,21,opt,name=quarter,proto3" json:"quarter,omitempty"` + // Type of the search engine results page. + SearchEngineResultsPageType enums.SearchEngineResultsPageTypeEnum_SearchEngineResultsPageType `protobuf:"varint,70,opt,name=search_engine_results_page_type,json=searchEngineResultsPageType,proto3,enum=google.ads.googleads.v1.enums.SearchEngineResultsPageTypeEnum_SearchEngineResultsPageType" json:"search_engine_results_page_type,omitempty"` + // Match type of the keyword that triggered the ad, including variants. + SearchTermMatchType enums.SearchTermMatchTypeEnum_SearchTermMatchType `protobuf:"varint,22,opt,name=search_term_match_type,json=searchTermMatchType,proto3,enum=google.ads.googleads.v1.enums.SearchTermMatchTypeEnum_SearchTermMatchType" json:"search_term_match_type,omitempty"` + // Position of the ad. + Slot enums.SlotEnum_Slot `protobuf:"varint,23,opt,name=slot,proto3,enum=google.ads.googleads.v1.enums.SlotEnum_Slot" json:"slot,omitempty"` + // Resource name of the ad group criterion that represents webpage criterion. + Webpage *wrappers.StringValue `protobuf:"bytes,66,opt,name=webpage,proto3" json:"webpage,omitempty"` + // Week as defined as Monday through Sunday, and represented by the date of + // Monday. Formatted as yyyy-MM-dd. + Week *wrappers.StringValue `protobuf:"bytes,24,opt,name=week,proto3" json:"week,omitempty"` + // Year, formatted as yyyy. + Year *wrappers.Int32Value `protobuf:"bytes,25,opt,name=year,proto3" json:"year,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Segments) Reset() { *m = Segments{} } +func (m *Segments) String() string { return proto.CompactTextString(m) } +func (*Segments) ProtoMessage() {} +func (*Segments) Descriptor() ([]byte, []int) { + return fileDescriptor_segments_284023ab6b41c387, []int{0} +} +func (m *Segments) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Segments.Unmarshal(m, b) +} +func (m *Segments) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Segments.Marshal(b, m, deterministic) +} +func (dst *Segments) XXX_Merge(src proto.Message) { + xxx_messageInfo_Segments.Merge(dst, src) +} +func (m *Segments) XXX_Size() int { + return xxx_messageInfo_Segments.Size(m) +} +func (m *Segments) XXX_DiscardUnknown() { + xxx_messageInfo_Segments.DiscardUnknown(m) +} + +var xxx_messageInfo_Segments proto.InternalMessageInfo + +func (m *Segments) GetAdNetworkType() enums.AdNetworkTypeEnum_AdNetworkType { + if m != nil { + return m.AdNetworkType + } + return enums.AdNetworkTypeEnum_UNSPECIFIED +} + +func (m *Segments) GetClickType() enums.ClickTypeEnum_ClickType { + if m != nil { + return m.ClickType + } + return enums.ClickTypeEnum_UNSPECIFIED +} + +func (m *Segments) GetConversionAction() *wrappers.StringValue { + if m != nil { + return m.ConversionAction + } + return nil +} + +func (m *Segments) GetConversionActionCategory() enums.ConversionActionCategoryEnum_ConversionActionCategory { + if m != nil { + return m.ConversionActionCategory + } + return enums.ConversionActionCategoryEnum_UNSPECIFIED +} + +func (m *Segments) GetConversionActionName() *wrappers.StringValue { + if m != nil { + return m.ConversionActionName + } + return nil +} + +func (m *Segments) GetConversionAdjustment() *wrappers.BoolValue { + if m != nil { + return m.ConversionAdjustment + } + return nil +} + +func (m *Segments) GetConversionAttributionEventType() enums.ConversionAttributionEventTypeEnum_ConversionAttributionEventType { + if m != nil { + return m.ConversionAttributionEventType + } + return enums.ConversionAttributionEventTypeEnum_UNSPECIFIED +} + +func (m *Segments) GetConversionLagBucket() enums.ConversionLagBucketEnum_ConversionLagBucket { + if m != nil { + return m.ConversionLagBucket + } + return enums.ConversionLagBucketEnum_UNSPECIFIED +} + +func (m *Segments) GetConversionOrAdjustmentLagBucket() enums.ConversionOrAdjustmentLagBucketEnum_ConversionOrAdjustmentLagBucket { + if m != nil { + return m.ConversionOrAdjustmentLagBucket + } + return enums.ConversionOrAdjustmentLagBucketEnum_UNSPECIFIED +} + +func (m *Segments) GetDate() *wrappers.StringValue { + if m != nil { + return m.Date + } + return nil +} + +func (m *Segments) GetDayOfWeek() enums.DayOfWeekEnum_DayOfWeek { + if m != nil { + return m.DayOfWeek + } + return enums.DayOfWeekEnum_UNSPECIFIED +} + +func (m *Segments) GetDevice() enums.DeviceEnum_Device { + if m != nil { + return m.Device + } + return enums.DeviceEnum_UNSPECIFIED +} + +func (m *Segments) GetExternalConversionSource() enums.ExternalConversionSourceEnum_ExternalConversionSource { + if m != nil { + return m.ExternalConversionSource + } + return enums.ExternalConversionSourceEnum_UNSPECIFIED +} + +func (m *Segments) GetGeoTargetAirport() *wrappers.StringValue { + if m != nil { + return m.GeoTargetAirport + } + return nil +} + +func (m *Segments) GetGeoTargetCity() *wrappers.StringValue { + if m != nil { + return m.GeoTargetCity + } + return nil +} + +func (m *Segments) GetGeoTargetCounty() *wrappers.StringValue { + if m != nil { + return m.GeoTargetCounty + } + return nil +} + +func (m *Segments) GetGeoTargetDistrict() *wrappers.StringValue { + if m != nil { + return m.GeoTargetDistrict + } + return nil +} + +func (m *Segments) GetGeoTargetMetro() *wrappers.StringValue { + if m != nil { + return m.GeoTargetMetro + } + return nil +} + +func (m *Segments) GetGeoTargetMostSpecificLocation() *wrappers.StringValue { + if m != nil { + return m.GeoTargetMostSpecificLocation + } + return nil +} + +func (m *Segments) GetGeoTargetPostalCode() *wrappers.StringValue { + if m != nil { + return m.GeoTargetPostalCode + } + return nil +} + +func (m *Segments) GetGeoTargetRegion() *wrappers.StringValue { + if m != nil { + return m.GeoTargetRegion + } + return nil +} + +func (m *Segments) GetGeoTargetState() *wrappers.StringValue { + if m != nil { + return m.GeoTargetState + } + return nil +} + +func (m *Segments) GetHotelBookingWindowDays() *wrappers.Int64Value { + if m != nil { + return m.HotelBookingWindowDays + } + return nil +} + +func (m *Segments) GetHotelCenterId() *wrappers.Int64Value { + if m != nil { + return m.HotelCenterId + } + return nil +} + +func (m *Segments) GetHotelCheckInDate() *wrappers.StringValue { + if m != nil { + return m.HotelCheckInDate + } + return nil +} + +func (m *Segments) GetHotelCheckInDayOfWeek() enums.DayOfWeekEnum_DayOfWeek { + if m != nil { + return m.HotelCheckInDayOfWeek + } + return enums.DayOfWeekEnum_UNSPECIFIED +} + +func (m *Segments) GetHotelCity() *wrappers.StringValue { + if m != nil { + return m.HotelCity + } + return nil +} + +func (m *Segments) GetHotelClass() *wrappers.Int32Value { + if m != nil { + return m.HotelClass + } + return nil +} + +func (m *Segments) GetHotelCountry() *wrappers.StringValue { + if m != nil { + return m.HotelCountry + } + return nil +} + +func (m *Segments) GetHotelDateSelectionType() enums.HotelDateSelectionTypeEnum_HotelDateSelectionType { + if m != nil { + return m.HotelDateSelectionType + } + return enums.HotelDateSelectionTypeEnum_UNSPECIFIED +} + +func (m *Segments) GetHotelLengthOfStay() *wrappers.Int32Value { + if m != nil { + return m.HotelLengthOfStay + } + return nil +} + +func (m *Segments) GetHotelState() *wrappers.StringValue { + if m != nil { + return m.HotelState + } + return nil +} + +func (m *Segments) GetHour() *wrappers.Int32Value { + if m != nil { + return m.Hour + } + return nil +} + +func (m *Segments) GetInteractionOnThisExtension() *wrappers.BoolValue { + if m != nil { + return m.InteractionOnThisExtension + } + return nil +} + +func (m *Segments) GetKeyword() *Keyword { + if m != nil { + return m.Keyword + } + return nil +} + +func (m *Segments) GetMonth() *wrappers.StringValue { + if m != nil { + return m.Month + } + return nil +} + +func (m *Segments) GetMonthOfYear() enums.MonthOfYearEnum_MonthOfYear { + if m != nil { + return m.MonthOfYear + } + return enums.MonthOfYearEnum_UNSPECIFIED +} + +func (m *Segments) GetPartnerHotelId() *wrappers.StringValue { + if m != nil { + return m.PartnerHotelId + } + return nil +} + +func (m *Segments) GetPlaceholderType() enums.PlaceholderTypeEnum_PlaceholderType { + if m != nil { + return m.PlaceholderType + } + return enums.PlaceholderTypeEnum_UNSPECIFIED +} + +func (m *Segments) GetProductAggregatorId() *wrappers.UInt64Value { + if m != nil { + return m.ProductAggregatorId + } + return nil +} + +func (m *Segments) GetProductBiddingCategoryLevel1() *wrappers.StringValue { + if m != nil { + return m.ProductBiddingCategoryLevel1 + } + return nil +} + +func (m *Segments) GetProductBiddingCategoryLevel2() *wrappers.StringValue { + if m != nil { + return m.ProductBiddingCategoryLevel2 + } + return nil +} + +func (m *Segments) GetProductBiddingCategoryLevel3() *wrappers.StringValue { + if m != nil { + return m.ProductBiddingCategoryLevel3 + } + return nil +} + +func (m *Segments) GetProductBiddingCategoryLevel4() *wrappers.StringValue { + if m != nil { + return m.ProductBiddingCategoryLevel4 + } + return nil +} + +func (m *Segments) GetProductBiddingCategoryLevel5() *wrappers.StringValue { + if m != nil { + return m.ProductBiddingCategoryLevel5 + } + return nil +} + +func (m *Segments) GetProductBrand() *wrappers.StringValue { + if m != nil { + return m.ProductBrand + } + return nil +} + +func (m *Segments) GetProductChannel() enums.ProductChannelEnum_ProductChannel { + if m != nil { + return m.ProductChannel + } + return enums.ProductChannelEnum_UNSPECIFIED +} + +func (m *Segments) GetProductChannelExclusivity() enums.ProductChannelExclusivityEnum_ProductChannelExclusivity { + if m != nil { + return m.ProductChannelExclusivity + } + return enums.ProductChannelExclusivityEnum_UNSPECIFIED +} + +func (m *Segments) GetProductCondition() enums.ProductConditionEnum_ProductCondition { + if m != nil { + return m.ProductCondition + } + return enums.ProductConditionEnum_UNSPECIFIED +} + +func (m *Segments) GetProductCountry() *wrappers.StringValue { + if m != nil { + return m.ProductCountry + } + return nil +} + +func (m *Segments) GetProductCustomAttribute0() *wrappers.StringValue { + if m != nil { + return m.ProductCustomAttribute0 + } + return nil +} + +func (m *Segments) GetProductCustomAttribute1() *wrappers.StringValue { + if m != nil { + return m.ProductCustomAttribute1 + } + return nil +} + +func (m *Segments) GetProductCustomAttribute2() *wrappers.StringValue { + if m != nil { + return m.ProductCustomAttribute2 + } + return nil +} + +func (m *Segments) GetProductCustomAttribute3() *wrappers.StringValue { + if m != nil { + return m.ProductCustomAttribute3 + } + return nil +} + +func (m *Segments) GetProductCustomAttribute4() *wrappers.StringValue { + if m != nil { + return m.ProductCustomAttribute4 + } + return nil +} + +func (m *Segments) GetProductItemId() *wrappers.StringValue { + if m != nil { + return m.ProductItemId + } + return nil +} + +func (m *Segments) GetProductLanguage() *wrappers.StringValue { + if m != nil { + return m.ProductLanguage + } + return nil +} + +func (m *Segments) GetProductMerchantId() *wrappers.UInt64Value { + if m != nil { + return m.ProductMerchantId + } + return nil +} + +func (m *Segments) GetProductStoreId() *wrappers.StringValue { + if m != nil { + return m.ProductStoreId + } + return nil +} + +func (m *Segments) GetProductTitle() *wrappers.StringValue { + if m != nil { + return m.ProductTitle + } + return nil +} + +func (m *Segments) GetProductTypeL1() *wrappers.StringValue { + if m != nil { + return m.ProductTypeL1 + } + return nil +} + +func (m *Segments) GetProductTypeL2() *wrappers.StringValue { + if m != nil { + return m.ProductTypeL2 + } + return nil +} + +func (m *Segments) GetProductTypeL3() *wrappers.StringValue { + if m != nil { + return m.ProductTypeL3 + } + return nil +} + +func (m *Segments) GetProductTypeL4() *wrappers.StringValue { + if m != nil { + return m.ProductTypeL4 + } + return nil +} + +func (m *Segments) GetProductTypeL5() *wrappers.StringValue { + if m != nil { + return m.ProductTypeL5 + } + return nil +} + +func (m *Segments) GetQuarter() *wrappers.StringValue { + if m != nil { + return m.Quarter + } + return nil +} + +func (m *Segments) GetSearchEngineResultsPageType() enums.SearchEngineResultsPageTypeEnum_SearchEngineResultsPageType { + if m != nil { + return m.SearchEngineResultsPageType + } + return enums.SearchEngineResultsPageTypeEnum_UNSPECIFIED +} + +func (m *Segments) GetSearchTermMatchType() enums.SearchTermMatchTypeEnum_SearchTermMatchType { + if m != nil { + return m.SearchTermMatchType + } + return enums.SearchTermMatchTypeEnum_UNSPECIFIED +} + +func (m *Segments) GetSlot() enums.SlotEnum_Slot { + if m != nil { + return m.Slot + } + return enums.SlotEnum_UNSPECIFIED +} + +func (m *Segments) GetWebpage() *wrappers.StringValue { + if m != nil { + return m.Webpage + } + return nil +} + +func (m *Segments) GetWeek() *wrappers.StringValue { + if m != nil { + return m.Week + } + return nil +} + +func (m *Segments) GetYear() *wrappers.Int32Value { + if m != nil { + return m.Year + } + return nil +} + +// A Keyword criterion segment. +type Keyword struct { + // The AdGroupCriterion resource name. + AdGroupCriterion *wrappers.StringValue `protobuf:"bytes,1,opt,name=ad_group_criterion,json=adGroupCriterion,proto3" json:"ad_group_criterion,omitempty"` + // Keyword info. + Info *KeywordInfo `protobuf:"bytes,2,opt,name=info,proto3" json:"info,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Keyword) Reset() { *m = Keyword{} } +func (m *Keyword) String() string { return proto.CompactTextString(m) } +func (*Keyword) ProtoMessage() {} +func (*Keyword) Descriptor() ([]byte, []int) { + return fileDescriptor_segments_284023ab6b41c387, []int{1} +} +func (m *Keyword) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Keyword.Unmarshal(m, b) +} +func (m *Keyword) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Keyword.Marshal(b, m, deterministic) +} +func (dst *Keyword) XXX_Merge(src proto.Message) { + xxx_messageInfo_Keyword.Merge(dst, src) +} +func (m *Keyword) XXX_Size() int { + return xxx_messageInfo_Keyword.Size(m) +} +func (m *Keyword) XXX_DiscardUnknown() { + xxx_messageInfo_Keyword.DiscardUnknown(m) +} + +var xxx_messageInfo_Keyword proto.InternalMessageInfo + +func (m *Keyword) GetAdGroupCriterion() *wrappers.StringValue { + if m != nil { + return m.AdGroupCriterion + } + return nil +} + +func (m *Keyword) GetInfo() *KeywordInfo { + if m != nil { + return m.Info + } + return nil +} + +func init() { + proto.RegisterType((*Segments)(nil), "google.ads.googleads.v1.common.Segments") + proto.RegisterType((*Keyword)(nil), "google.ads.googleads.v1.common.Keyword") +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/common/segments.proto", fileDescriptor_segments_284023ab6b41c387) +} + +var fileDescriptor_segments_284023ab6b41c387 = []byte{ + // 2031 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xa4, 0x99, 0x5d, 0x7b, 0xdb, 0xb6, + 0x15, 0xc7, 0x1f, 0xa5, 0x6e, 0xd2, 0xc0, 0x75, 0x6c, 0xc3, 0x89, 0x8b, 0x38, 0xef, 0xee, 0xb6, + 0x66, 0x7d, 0x91, 0xac, 0x17, 0x67, 0xab, 0xd2, 0xbc, 0xc8, 0xb2, 0xeb, 0x28, 0x75, 0x6a, 0x8f, + 0x72, 0xdd, 0xad, 0x4f, 0x36, 0x0e, 0x26, 0x21, 0x8a, 0x33, 0x49, 0x30, 0x20, 0x64, 0x57, 0x37, + 0xdb, 0x07, 0xd8, 0xdd, 0x2e, 0x72, 0xbd, 0x67, 0x97, 0xdb, 0xcd, 0x2e, 0xf7, 0x19, 0xf6, 0x49, + 0xf6, 0xec, 0x53, 0xf4, 0x01, 0x40, 0x52, 0x94, 0x64, 0x89, 0x50, 0x7c, 0x25, 0x12, 0x38, 0xff, + 0xdf, 0x39, 0x3c, 0x04, 0x0e, 0x08, 0x08, 0x7c, 0xe1, 0x50, 0xea, 0x78, 0xa4, 0x84, 0xed, 0xa8, + 0xa4, 0x2e, 0xc5, 0xd5, 0x69, 0xb9, 0x64, 0x51, 0xdf, 0xa7, 0x41, 0x29, 0x22, 0x8e, 0x4f, 0x02, + 0x1e, 0x15, 0x43, 0x46, 0x39, 0x85, 0x77, 0x95, 0x4d, 0x11, 0xdb, 0x51, 0x31, 0x35, 0x2f, 0x9e, + 0x96, 0x8b, 0xca, 0x7c, 0x2d, 0x0f, 0x67, 0x31, 0x97, 0x13, 0xe6, 0x62, 0x85, 0x5b, 0xab, 0x4e, + 0x32, 0x27, 0x41, 0xcf, 0x8f, 0x4a, 0xd8, 0x36, 0x03, 0xc2, 0xcf, 0x28, 0x3b, 0x31, 0x79, 0x3f, + 0x24, 0xb1, 0xa8, 0x38, 0x5d, 0x64, 0x79, 0xae, 0x35, 0x64, 0xff, 0x34, 0xc7, 0x9e, 0x06, 0xa7, + 0x84, 0x45, 0x2e, 0x0d, 0x4c, 0x6c, 0x71, 0xf1, 0x63, 0x61, 0x4e, 0x1c, 0xca, 0xfa, 0xb1, 0x7e, + 0x47, 0x5f, 0xcf, 0x39, 0x73, 0x8f, 0x7b, 0x12, 0x42, 0x4e, 0x49, 0xc0, 0xb3, 0x61, 0x7c, 0xa9, + 0x8d, 0xf1, 0xb0, 0x63, 0x1e, 0xf7, 0xac, 0x13, 0xc2, 0x63, 0xe9, 0xae, 0xb6, 0x94, 0x32, 0x13, + 0xdb, 0x7f, 0xea, 0x45, 0x5c, 0xbc, 0xb4, 0x71, 0x50, 0x69, 0x3a, 0xc8, 0xc6, 0x7d, 0x93, 0x76, + 0xcc, 0x33, 0x42, 0x4e, 0x62, 0xc1, 0xa7, 0x39, 0x02, 0x72, 0xea, 0x5a, 0x9a, 0x79, 0x26, 0x3f, + 0x72, 0xc2, 0x02, 0xec, 0x99, 0x99, 0x70, 0x23, 0xda, 0x63, 0xa9, 0xfe, 0xc9, 0x74, 0x7d, 0x97, + 0x72, 0xe2, 0x99, 0x36, 0xe6, 0xc4, 0x8c, 0x88, 0x47, 0xd4, 0xab, 0xca, 0xe4, 0xb7, 0x3c, 0x5d, + 0xee, 0xd3, 0x80, 0x77, 0xc5, 0xd3, 0xf5, 0x09, 0x66, 0xb1, 0xa4, 0x36, 0x5d, 0x12, 0x7a, 0xd8, + 0x22, 0x5d, 0xea, 0xd9, 0x84, 0x65, 0x1d, 0xe5, 0x0c, 0xda, 0x90, 0x51, 0xbb, 0x67, 0x71, 0xd3, + 0xea, 0xe2, 0x20, 0x20, 0x5e, 0x2c, 0x7a, 0x36, 0x93, 0xc8, 0x24, 0x3f, 0x5a, 0x5e, 0x2f, 0x72, + 0x4f, 0x5d, 0x9e, 0x8c, 0xc2, 0x4d, 0x4d, 0x00, 0x0d, 0x6c, 0x57, 0xa4, 0x26, 0x96, 0x35, 0xa7, + 0xcb, 0x22, 0x82, 0x99, 0xd5, 0x35, 0x49, 0xe0, 0xb8, 0x01, 0x31, 0x19, 0x89, 0x7a, 0x1e, 0x8f, + 0xcc, 0x10, 0x3b, 0x24, 0xfb, 0xc4, 0x75, 0x2d, 0x08, 0x27, 0xcc, 0x37, 0x7d, 0xcc, 0xc5, 0xe5, + 0x40, 0xfb, 0x30, 0x47, 0xeb, 0xd1, 0x64, 0x70, 0xc6, 0xb5, 0xa5, 0x24, 0xef, 0x8e, 0x7b, 0x9d, + 0xd2, 0x19, 0xc3, 0x61, 0x48, 0x58, 0x5c, 0x7b, 0xd6, 0x6e, 0x27, 0xa4, 0xd0, 0x2d, 0xe1, 0x20, + 0xa0, 0x1c, 0x8b, 0xe7, 0x8c, 0x7b, 0xd7, 0xff, 0x53, 0x06, 0x1f, 0xb4, 0xe3, 0x62, 0x05, 0x3b, + 0x60, 0x71, 0xa4, 0x76, 0xa0, 0xf7, 0xee, 0x17, 0x1e, 0x5e, 0xab, 0x3c, 0x2d, 0x4e, 0x2a, 0x60, + 0x32, 0x9c, 0x62, 0xc3, 0xfe, 0x56, 0x89, 0x0e, 0xfb, 0x21, 0xd9, 0x09, 0x7a, 0xfe, 0x70, 0x8b, + 0xb1, 0x80, 0xb3, 0xb7, 0xf0, 0x3b, 0x00, 0x06, 0xe5, 0x06, 0xad, 0x49, 0x17, 0x8f, 0x72, 0x5c, + 0x34, 0x85, 0x20, 0xc5, 0xa7, 0x77, 0xc6, 0x55, 0x2b, 0xb9, 0x84, 0x2d, 0xb0, 0x3c, 0x56, 0x95, + 0x50, 0xed, 0x7e, 0xe1, 0xe1, 0x7c, 0xe5, 0x76, 0x42, 0x4f, 0xb2, 0x54, 0x6c, 0x73, 0xe6, 0x06, + 0xce, 0x11, 0xf6, 0x7a, 0xc4, 0x58, 0x1a, 0xc8, 0x1a, 0x52, 0x05, 0xff, 0x56, 0x00, 0x6b, 0x93, + 0x2b, 0x1c, 0xda, 0x94, 0x21, 0x1f, 0xe6, 0x85, 0x3c, 0x42, 0x6d, 0xc6, 0x72, 0xf5, 0x04, 0x13, + 0x3a, 0x0d, 0x64, 0x4d, 0xe8, 0x81, 0x06, 0x58, 0x1d, 0x8f, 0x29, 0xc0, 0x3e, 0x41, 0x8f, 0x34, + 0x1e, 0xf2, 0xfa, 0x28, 0xf7, 0x5b, 0xec, 0x13, 0xb8, 0x0f, 0x6e, 0x64, 0x99, 0x69, 0x11, 0x44, + 0xb7, 0x24, 0x72, 0x6d, 0x0c, 0xb9, 0x45, 0xa9, 0x37, 0x0e, 0x4c, 0x75, 0xf0, 0x5f, 0x05, 0xf0, + 0x20, 0xb7, 0xb6, 0xa3, 0x4b, 0x32, 0x81, 0x7f, 0xd4, 0x4f, 0xe0, 0x00, 0xb3, 0x23, 0x28, 0x83, + 0x81, 0x30, 0xd5, 0xc4, 0xb8, 0x6b, 0x4d, 0xed, 0x87, 0x7f, 0x1e, 0x7a, 0xfc, 0x41, 0xe1, 0x47, + 0x15, 0x19, 0xe0, 0x4b, 0xed, 0x00, 0xf7, 0xb0, 0xb3, 0x25, 0x95, 0x23, 0x51, 0xa5, 0xed, 0xc6, + 0x8a, 0x35, 0xde, 0x08, 0xff, 0x5d, 0x00, 0x1f, 0x6b, 0xac, 0x43, 0xa8, 0x2a, 0xc3, 0x39, 0xd6, + 0x0e, 0x67, 0x9f, 0x0d, 0x5e, 0xc9, 0xa4, 0xd0, 0xce, 0xb5, 0x31, 0xee, 0x59, 0xd3, 0x0d, 0xe0, + 0x06, 0x98, 0x13, 0xab, 0x09, 0x9a, 0xd3, 0x18, 0x73, 0xd2, 0x12, 0x1e, 0x81, 0xf9, 0xcc, 0x12, + 0x89, 0xde, 0xd7, 0x9a, 0xef, 0xdb, 0xb8, 0xbf, 0xdf, 0xf9, 0x9e, 0x90, 0x13, 0x19, 0x75, 0x7a, + 0x67, 0x5c, 0xb5, 0x93, 0x4b, 0xf8, 0x02, 0x5c, 0x56, 0x2b, 0x29, 0x2a, 0x48, 0xe4, 0x46, 0x1e, + 0x52, 0x1a, 0x2b, 0x9e, 0xbc, 0x34, 0x62, 0xbd, 0x9c, 0xee, 0x93, 0x17, 0x5a, 0xf4, 0x2b, 0xad, + 0xe9, 0xbe, 0x13, 0x03, 0x06, 0x19, 0x6e, 0x4b, 0xb9, 0x74, 0x38, 0xa9, 0xd3, 0x40, 0x64, 0x42, + 0x0f, 0x7c, 0x09, 0xa0, 0x43, 0xa8, 0xc9, 0x31, 0x73, 0x08, 0x37, 0xb1, 0xcb, 0x42, 0xca, 0x38, + 0x6a, 0xe8, 0xd4, 0x33, 0x87, 0xd0, 0x43, 0x29, 0x6b, 0x28, 0x15, 0xdc, 0x06, 0x8b, 0x19, 0x96, + 0xe5, 0xf2, 0x3e, 0x7a, 0xaa, 0x01, 0x5a, 0x48, 0x41, 0x4d, 0x97, 0xf7, 0xe1, 0x0b, 0xb0, 0x9c, + 0xa5, 0xd0, 0x5e, 0xc0, 0xfb, 0x68, 0x5b, 0x83, 0xb3, 0x38, 0xe0, 0x48, 0x11, 0xdc, 0x03, 0x2b, + 0x19, 0x92, 0xed, 0x46, 0x9c, 0xb9, 0x16, 0x47, 0x3b, 0x1a, 0xac, 0xe5, 0x94, 0xb5, 0x1d, 0xcb, + 0xe0, 0xd7, 0x60, 0x29, 0x43, 0xf3, 0x09, 0x67, 0x14, 0x3d, 0xd3, 0x40, 0x5d, 0x4b, 0x51, 0xaf, + 0x84, 0x06, 0x76, 0xc0, 0x83, 0x2c, 0x87, 0x46, 0xdc, 0x8c, 0x42, 0x62, 0xb9, 0x1d, 0xd7, 0x32, + 0x3d, 0x6a, 0xc9, 0x85, 0x13, 0xbd, 0xd0, 0x00, 0xdf, 0x19, 0x80, 0x69, 0xc4, 0xdb, 0x31, 0x63, + 0x2f, 0x46, 0xc0, 0xdf, 0x80, 0xd5, 0x8c, 0x9f, 0x90, 0x46, 0x5c, 0x0e, 0x3b, 0x9b, 0xa0, 0x5d, + 0x0d, 0xf8, 0x4a, 0x0a, 0x3f, 0x90, 0xca, 0x26, 0xb5, 0xc9, 0xc8, 0xab, 0x61, 0xc4, 0x11, 0xa1, + 0x3e, 0x9f, 0xe9, 0xd5, 0x18, 0x52, 0x34, 0x92, 0xcc, 0x88, 0x8b, 0xb9, 0xde, 0x9c, 0x29, 0x99, + 0x6d, 0xae, 0x66, 0xfd, 0x4d, 0xf5, 0xed, 0x79, 0x4c, 0xe9, 0x89, 0x1b, 0x38, 0xe6, 0x99, 0x1b, + 0xd8, 0xf4, 0xcc, 0xb4, 0x71, 0x3f, 0x42, 0x97, 0x25, 0xf0, 0xd6, 0x18, 0xb0, 0x15, 0xf0, 0x47, + 0x35, 0xc5, 0x5b, 0x95, 0xea, 0x2d, 0x25, 0xfe, 0x5e, 0x6a, 0xb7, 0x71, 0x3f, 0x82, 0x4d, 0xb0, + 0xa8, 0xb8, 0x16, 0x09, 0x38, 0x61, 0xa6, 0x6b, 0xa3, 0x2b, 0xf9, 0xb4, 0x05, 0xa9, 0x69, 0x4a, + 0x49, 0xcb, 0x86, 0xdf, 0x80, 0x95, 0x18, 0xd2, 0x25, 0xd6, 0x89, 0xe9, 0x06, 0xf2, 0x0b, 0x19, + 0x7d, 0xa0, 0x33, 0xb9, 0x14, 0x49, 0xe8, 0x5a, 0xc1, 0xb6, 0x78, 0xd2, 0x37, 0x60, 0x6d, 0x0c, + 0x36, 0x28, 0x77, 0x57, 0x2f, 0x54, 0xee, 0x6e, 0x0c, 0x7b, 0x4b, 0x4a, 0xdf, 0x63, 0x00, 0x62, + 0x97, 0x62, 0x2a, 0x03, 0x8d, 0xb0, 0xaf, 0x2a, 0x90, 0x98, 0xc6, 0x5f, 0x81, 0xf9, 0x58, 0xec, + 0xe1, 0x28, 0x42, 0xf3, 0x93, 0xb3, 0x57, 0xad, 0x28, 0xb1, 0x72, 0xd6, 0x14, 0xe6, 0xb0, 0x01, + 0x16, 0x62, 0xb5, 0x98, 0xca, 0xac, 0x8f, 0x3e, 0xd4, 0xf0, 0xfe, 0xa1, 0x02, 0x28, 0x05, 0xfc, + 0x6b, 0x21, 0x19, 0x1b, 0xe7, 0xec, 0x4b, 0xd0, 0x82, 0x4c, 0xd8, 0x41, 0x4e, 0xc2, 0x5e, 0x08, + 0xbd, 0x48, 0x7f, 0x3b, 0x51, 0xa7, 0xdf, 0x04, 0xe7, 0x77, 0xc5, 0x03, 0x6a, 0xac, 0x1d, 0xee, + 0x81, 0xeb, 0x2a, 0x18, 0x8f, 0x04, 0x8e, 0xda, 0xec, 0x44, 0x1c, 0xf7, 0xd1, 0xb5, 0xfc, 0xbc, + 0x2c, 0x4b, 0xe1, 0x9e, 0xd4, 0xed, 0x77, 0xda, 0x1c, 0xf7, 0xe1, 0x93, 0x24, 0xb9, 0x6a, 0xe6, + 0x2c, 0x6a, 0x24, 0x47, 0x65, 0x57, 0xcd, 0x9a, 0x12, 0x98, 0xeb, 0xd2, 0x1e, 0x43, 0x4b, 0xf9, + 0xce, 0xa5, 0x21, 0xfc, 0x3d, 0xb8, 0xe3, 0x8a, 0x41, 0x1d, 0x7f, 0x0e, 0x8a, 0x24, 0x76, 0xdd, + 0xc8, 0x14, 0xab, 0x4a, 0x20, 0x16, 0x13, 0x54, 0xce, 0xfd, 0x90, 0x5b, 0xcb, 0x00, 0xf6, 0x83, + 0xc3, 0xae, 0x1b, 0xed, 0x24, 0x6a, 0xd8, 0x00, 0x57, 0x4e, 0x48, 0xff, 0x8c, 0x32, 0x1b, 0x3d, + 0x91, 0xa0, 0x4f, 0x8a, 0xd3, 0xcf, 0x32, 0x8a, 0xdf, 0x28, 0x73, 0x23, 0xd1, 0xc1, 0x0a, 0x78, + 0x5f, 0xee, 0x22, 0xd1, 0xb2, 0x46, 0x2e, 0x94, 0x29, 0xfc, 0x03, 0x58, 0x18, 0xda, 0x79, 0x22, + 0x28, 0x07, 0x45, 0x3d, 0x67, 0x50, 0xbc, 0x12, 0x9a, 0xfd, 0xce, 0xef, 0x08, 0x66, 0x72, 0x24, + 0x64, 0xee, 0x8d, 0x79, 0x7f, 0x70, 0x23, 0x8a, 0x5c, 0x88, 0x19, 0x0f, 0x08, 0x33, 0xd5, 0xdb, + 0x72, 0x6d, 0xb4, 0xa2, 0x53, 0xe4, 0x62, 0x95, 0x1c, 0x62, 0x2d, 0x1b, 0xfa, 0x60, 0x69, 0x74, + 0xbb, 0x8b, 0xae, 0xcb, 0x50, 0xb7, 0x72, 0x42, 0x3d, 0x18, 0xc8, 0xd2, 0x81, 0x3b, 0xd2, 0x66, + 0x2c, 0x86, 0xc3, 0x0d, 0xf0, 0x00, 0xdc, 0x48, 0x76, 0xac, 0xd8, 0x71, 0x18, 0x71, 0x30, 0xa7, + 0xb2, 0x02, 0xde, 0x9e, 0x10, 0xfb, 0x77, 0x99, 0x12, 0xb8, 0x12, 0x4b, 0x1b, 0xa9, 0xb2, 0x65, + 0x43, 0x0b, 0xdc, 0x4b, 0x88, 0xc7, 0xae, 0x6d, 0x8b, 0x3a, 0x9d, 0xec, 0x72, 0x4c, 0x8f, 0x9c, + 0x12, 0xaf, 0x8c, 0x7e, 0xad, 0x91, 0x97, 0xdb, 0x31, 0x64, 0x4b, 0x31, 0x92, 0x2d, 0xcb, 0x9e, + 0x24, 0xe4, 0x3b, 0xa9, 0xa0, 0x2f, 0x2f, 0xe8, 0xa4, 0x92, 0xef, 0xa4, 0x8a, 0xea, 0x17, 0x74, + 0x52, 0xcd, 0x77, 0x52, 0x43, 0x8f, 0x2f, 0xe8, 0xa4, 0x96, 0xef, 0x64, 0x13, 0x7d, 0x75, 0x41, + 0x27, 0x9b, 0xa2, 0x8c, 0xa7, 0x4e, 0x18, 0x0e, 0x6c, 0x74, 0x47, 0xa7, 0x8c, 0x27, 0x48, 0xa1, + 0x80, 0x2e, 0x58, 0x1c, 0x39, 0x80, 0x41, 0x77, 0xe5, 0xd8, 0x7f, 0x9e, 0x37, 0xf6, 0x95, 0xaa, + 0xa9, 0x44, 0x6a, 0xe8, 0x0f, 0x35, 0x19, 0xd7, 0xc2, 0xa1, 0x7b, 0xf8, 0xb6, 0x00, 0x6e, 0x4d, + 0x39, 0xec, 0x41, 0xf7, 0xa4, 0xdf, 0xa3, 0xd9, 0xfc, 0x0e, 0xf4, 0xe7, 0x84, 0x90, 0xe9, 0x35, + 0x6e, 0x86, 0x93, 0xba, 0xe0, 0x1b, 0xb0, 0x3c, 0x76, 0x86, 0x84, 0xee, 0xcb, 0x68, 0xb6, 0x35, + 0xa3, 0x49, 0x64, 0x43, 0x41, 0x24, 0x8d, 0xc6, 0x52, 0x38, 0xd2, 0x02, 0x77, 0x32, 0x69, 0x8f, + 0x97, 0xe0, 0x07, 0x5a, 0xa5, 0x2b, 0x01, 0xa9, 0x45, 0xf8, 0xb7, 0xe0, 0x66, 0x8a, 0xe9, 0x45, + 0x9c, 0xfa, 0xe9, 0x5e, 0x9d, 0x6c, 0xa0, 0x75, 0x0d, 0xe0, 0x47, 0x09, 0x50, 0xaa, 0x1b, 0xa9, + 0x78, 0x1a, 0xb9, 0x8c, 0x3e, 0x7e, 0x77, 0x72, 0x79, 0x1a, 0xb9, 0x82, 0x7e, 0xf6, 0xee, 0xe4, + 0xca, 0x34, 0x72, 0x15, 0xfd, 0xfc, 0xdd, 0xc9, 0xd5, 0x69, 0xe4, 0x1a, 0xfa, 0xc5, 0xbb, 0x93, + 0x6b, 0x62, 0x53, 0x97, 0x90, 0x5d, 0x4e, 0x7c, 0xb1, 0x0e, 0x7c, 0xa2, 0xb3, 0xa9, 0x8b, 0x45, + 0x2d, 0x4e, 0xfc, 0x96, 0x0d, 0x77, 0x41, 0x32, 0xc4, 0x4c, 0x0f, 0x07, 0x4e, 0x0f, 0x3b, 0x04, + 0x3d, 0xd4, 0xd9, 0x38, 0xc4, 0xaa, 0xbd, 0x58, 0x24, 0xf6, 0x74, 0x09, 0xc8, 0x27, 0x4c, 0xcc, + 0x52, 0x2e, 0x42, 0xfa, 0xa5, 0xc6, 0xd2, 0x94, 0xcc, 0xa1, 0x57, 0xb1, 0xae, 0x65, 0xcb, 0x15, + 0x3a, 0xa6, 0x45, 0x9c, 0x32, 0x22, 0x50, 0x9f, 0xce, 0x30, 0xcc, 0xdb, 0x42, 0xd4, 0xb2, 0xb3, + 0x75, 0x8e, 0xbb, 0xdc, 0x23, 0xe8, 0xb3, 0x19, 0xea, 0xdc, 0xa1, 0x50, 0x64, 0xf3, 0x2c, 0x16, + 0x78, 0xd3, 0x2b, 0xa3, 0xcf, 0x67, 0xc8, 0xb3, 0x58, 0xb9, 0xf7, 0xca, 0xe3, 0x94, 0x0a, 0xfa, + 0x62, 0x56, 0x4a, 0x65, 0x9c, 0x52, 0x45, 0xc5, 0x59, 0x29, 0xd5, 0x71, 0x4a, 0x0d, 0x95, 0x66, + 0xa5, 0xd4, 0xc6, 0x29, 0x9b, 0x68, 0x63, 0x56, 0xca, 0x26, 0x7c, 0x04, 0xae, 0xbc, 0xe9, 0x61, + 0xc6, 0x09, 0x43, 0x37, 0x34, 0xd4, 0x89, 0x31, 0xfc, 0x7b, 0x01, 0xdc, 0xcb, 0x39, 0x87, 0x47, + 0x5f, 0xcb, 0x42, 0xfc, 0x43, 0x4e, 0x21, 0x6e, 0x4b, 0xca, 0x8e, 0x84, 0x18, 0x8a, 0x71, 0x80, + 0x1d, 0x92, 0x7e, 0x96, 0x4d, 0xe9, 0x37, 0x6e, 0x45, 0x93, 0x3b, 0xe1, 0x5f, 0xc0, 0xea, 0xf9, + 0x87, 0xfc, 0x68, 0x55, 0xeb, 0x78, 0x51, 0x39, 0x3e, 0x24, 0xcc, 0x7f, 0x25, 0xa4, 0x23, 0x01, + 0x0d, 0xb5, 0x1b, 0x2b, 0xd1, 0x78, 0x23, 0x7c, 0x0e, 0xe6, 0x22, 0x8f, 0x72, 0xf4, 0x91, 0x74, + 0xf7, 0x79, 0x9e, 0x3b, 0x8f, 0xaa, 0x33, 0x42, 0x71, 0x61, 0x48, 0xa5, 0x78, 0x3b, 0x67, 0xe4, + 0x58, 0x64, 0x14, 0x6d, 0xe9, 0xbc, 0x9d, 0xd8, 0x18, 0x6e, 0x80, 0x39, 0xb9, 0xfb, 0x45, 0x3a, + 0xa7, 0x84, 0xc2, 0x52, 0xec, 0x7c, 0xe4, 0x97, 0xfe, 0x4d, 0x8d, 0x9d, 0x8f, 0x30, 0x5c, 0x7f, + 0x5b, 0x00, 0x57, 0xe2, 0xcd, 0x06, 0x7c, 0x09, 0x20, 0xb6, 0x4d, 0x87, 0xd1, 0x5e, 0x68, 0xc6, + 0x7f, 0x96, 0xd2, 0x40, 0x1e, 0x0b, 0xe6, 0x6e, 0xe7, 0xb1, 0xbd, 0x2b, 0x64, 0xcd, 0x44, 0x05, + 0x9f, 0x81, 0x39, 0x37, 0xe8, 0x50, 0x79, 0x46, 0x3d, 0x5f, 0xf9, 0x4c, 0x73, 0xbf, 0xd3, 0x0a, + 0x3a, 0xd4, 0x90, 0xc2, 0xad, 0xff, 0x15, 0xc0, 0xba, 0x45, 0xfd, 0x1c, 0xe1, 0xd6, 0x42, 0xf2, + 0xbf, 0xcb, 0x81, 0x88, 0xeb, 0xa0, 0xf0, 0xc3, 0x76, 0x2c, 0x70, 0xa8, 0x28, 0xc4, 0x45, 0xca, + 0x9c, 0x92, 0x43, 0x02, 0x19, 0x75, 0xf2, 0x1f, 0x50, 0xe8, 0x46, 0x93, 0xfe, 0x24, 0x7e, 0xac, + 0x7e, 0xfe, 0x71, 0xe9, 0xbd, 0xdd, 0x46, 0xe3, 0x9f, 0x97, 0xee, 0xee, 0x2a, 0x58, 0xc3, 0x8e, + 0x8a, 0xea, 0x52, 0x5c, 0x1d, 0x95, 0x8b, 0x4d, 0x69, 0xf6, 0xdf, 0xc4, 0xe0, 0x75, 0xc3, 0x8e, + 0x5e, 0xa7, 0x06, 0xaf, 0x8f, 0xca, 0xaf, 0x95, 0xc1, 0xff, 0x2f, 0xad, 0xab, 0xd6, 0x7a, 0xbd, + 0x61, 0x47, 0xf5, 0x7a, 0x6a, 0x52, 0xaf, 0x1f, 0x95, 0xeb, 0x75, 0x65, 0x74, 0x7c, 0x59, 0x46, + 0x57, 0xfd, 0x29, 0x00, 0x00, 0xff, 0xff, 0x15, 0xf6, 0x4f, 0x34, 0x10, 0x1f, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/common/simulation.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/common/simulation.pb.go new file mode 100644 index 0000000..c089b8a --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/common/simulation.pb.go @@ -0,0 +1,478 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/common/simulation.proto + +package common // import "google.golang.org/genproto/googleapis/ads/googleads/v1/common" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import wrappers "github.com/golang/protobuf/ptypes/wrappers" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// A container for simulation points for simulations of type BID_MODIFIER. +type BidModifierSimulationPointList struct { + // Projected metrics for a series of bid modifier amounts. + Points []*BidModifierSimulationPoint `protobuf:"bytes,1,rep,name=points,proto3" json:"points,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *BidModifierSimulationPointList) Reset() { *m = BidModifierSimulationPointList{} } +func (m *BidModifierSimulationPointList) String() string { return proto.CompactTextString(m) } +func (*BidModifierSimulationPointList) ProtoMessage() {} +func (*BidModifierSimulationPointList) Descriptor() ([]byte, []int) { + return fileDescriptor_simulation_236f10bcdfcd99b7, []int{0} +} +func (m *BidModifierSimulationPointList) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_BidModifierSimulationPointList.Unmarshal(m, b) +} +func (m *BidModifierSimulationPointList) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_BidModifierSimulationPointList.Marshal(b, m, deterministic) +} +func (dst *BidModifierSimulationPointList) XXX_Merge(src proto.Message) { + xxx_messageInfo_BidModifierSimulationPointList.Merge(dst, src) +} +func (m *BidModifierSimulationPointList) XXX_Size() int { + return xxx_messageInfo_BidModifierSimulationPointList.Size(m) +} +func (m *BidModifierSimulationPointList) XXX_DiscardUnknown() { + xxx_messageInfo_BidModifierSimulationPointList.DiscardUnknown(m) +} + +var xxx_messageInfo_BidModifierSimulationPointList proto.InternalMessageInfo + +func (m *BidModifierSimulationPointList) GetPoints() []*BidModifierSimulationPoint { + if m != nil { + return m.Points + } + return nil +} + +// A container for simulation points for simulations of type CPC_BID. +type CpcBidSimulationPointList struct { + // Projected metrics for a series of cpc bid amounts. + Points []*CpcBidSimulationPoint `protobuf:"bytes,1,rep,name=points,proto3" json:"points,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *CpcBidSimulationPointList) Reset() { *m = CpcBidSimulationPointList{} } +func (m *CpcBidSimulationPointList) String() string { return proto.CompactTextString(m) } +func (*CpcBidSimulationPointList) ProtoMessage() {} +func (*CpcBidSimulationPointList) Descriptor() ([]byte, []int) { + return fileDescriptor_simulation_236f10bcdfcd99b7, []int{1} +} +func (m *CpcBidSimulationPointList) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_CpcBidSimulationPointList.Unmarshal(m, b) +} +func (m *CpcBidSimulationPointList) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_CpcBidSimulationPointList.Marshal(b, m, deterministic) +} +func (dst *CpcBidSimulationPointList) XXX_Merge(src proto.Message) { + xxx_messageInfo_CpcBidSimulationPointList.Merge(dst, src) +} +func (m *CpcBidSimulationPointList) XXX_Size() int { + return xxx_messageInfo_CpcBidSimulationPointList.Size(m) +} +func (m *CpcBidSimulationPointList) XXX_DiscardUnknown() { + xxx_messageInfo_CpcBidSimulationPointList.DiscardUnknown(m) +} + +var xxx_messageInfo_CpcBidSimulationPointList proto.InternalMessageInfo + +func (m *CpcBidSimulationPointList) GetPoints() []*CpcBidSimulationPoint { + if m != nil { + return m.Points + } + return nil +} + +// A container for simulation points for simulations of type TARGET_CPA. +type TargetCpaSimulationPointList struct { + // Projected metrics for a series of target cpa amounts. + Points []*TargetCpaSimulationPoint `protobuf:"bytes,1,rep,name=points,proto3" json:"points,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *TargetCpaSimulationPointList) Reset() { *m = TargetCpaSimulationPointList{} } +func (m *TargetCpaSimulationPointList) String() string { return proto.CompactTextString(m) } +func (*TargetCpaSimulationPointList) ProtoMessage() {} +func (*TargetCpaSimulationPointList) Descriptor() ([]byte, []int) { + return fileDescriptor_simulation_236f10bcdfcd99b7, []int{2} +} +func (m *TargetCpaSimulationPointList) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_TargetCpaSimulationPointList.Unmarshal(m, b) +} +func (m *TargetCpaSimulationPointList) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_TargetCpaSimulationPointList.Marshal(b, m, deterministic) +} +func (dst *TargetCpaSimulationPointList) XXX_Merge(src proto.Message) { + xxx_messageInfo_TargetCpaSimulationPointList.Merge(dst, src) +} +func (m *TargetCpaSimulationPointList) XXX_Size() int { + return xxx_messageInfo_TargetCpaSimulationPointList.Size(m) +} +func (m *TargetCpaSimulationPointList) XXX_DiscardUnknown() { + xxx_messageInfo_TargetCpaSimulationPointList.DiscardUnknown(m) +} + +var xxx_messageInfo_TargetCpaSimulationPointList proto.InternalMessageInfo + +func (m *TargetCpaSimulationPointList) GetPoints() []*TargetCpaSimulationPoint { + if m != nil { + return m.Points + } + return nil +} + +// Projected metrics for a specific bid modifier amount. +type BidModifierSimulationPoint struct { + // The simulated bid modifier upon which projected metrics are based. + BidModifier *wrappers.DoubleValue `protobuf:"bytes,1,opt,name=bid_modifier,json=bidModifier,proto3" json:"bid_modifier,omitempty"` + // Projected number of biddable conversions. + BiddableConversions *wrappers.DoubleValue `protobuf:"bytes,2,opt,name=biddable_conversions,json=biddableConversions,proto3" json:"biddable_conversions,omitempty"` + // Projected total value of biddable conversions. + BiddableConversionsValue *wrappers.DoubleValue `protobuf:"bytes,3,opt,name=biddable_conversions_value,json=biddableConversionsValue,proto3" json:"biddable_conversions_value,omitempty"` + // Projected number of clicks. + Clicks *wrappers.Int64Value `protobuf:"bytes,4,opt,name=clicks,proto3" json:"clicks,omitempty"` + // Projected cost in micros. + CostMicros *wrappers.Int64Value `protobuf:"bytes,5,opt,name=cost_micros,json=costMicros,proto3" json:"cost_micros,omitempty"` + // Projected number of impressions. + Impressions *wrappers.Int64Value `protobuf:"bytes,6,opt,name=impressions,proto3" json:"impressions,omitempty"` + // Projected number of top slot impressions. + TopSlotImpressions *wrappers.Int64Value `protobuf:"bytes,7,opt,name=top_slot_impressions,json=topSlotImpressions,proto3" json:"top_slot_impressions,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *BidModifierSimulationPoint) Reset() { *m = BidModifierSimulationPoint{} } +func (m *BidModifierSimulationPoint) String() string { return proto.CompactTextString(m) } +func (*BidModifierSimulationPoint) ProtoMessage() {} +func (*BidModifierSimulationPoint) Descriptor() ([]byte, []int) { + return fileDescriptor_simulation_236f10bcdfcd99b7, []int{3} +} +func (m *BidModifierSimulationPoint) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_BidModifierSimulationPoint.Unmarshal(m, b) +} +func (m *BidModifierSimulationPoint) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_BidModifierSimulationPoint.Marshal(b, m, deterministic) +} +func (dst *BidModifierSimulationPoint) XXX_Merge(src proto.Message) { + xxx_messageInfo_BidModifierSimulationPoint.Merge(dst, src) +} +func (m *BidModifierSimulationPoint) XXX_Size() int { + return xxx_messageInfo_BidModifierSimulationPoint.Size(m) +} +func (m *BidModifierSimulationPoint) XXX_DiscardUnknown() { + xxx_messageInfo_BidModifierSimulationPoint.DiscardUnknown(m) +} + +var xxx_messageInfo_BidModifierSimulationPoint proto.InternalMessageInfo + +func (m *BidModifierSimulationPoint) GetBidModifier() *wrappers.DoubleValue { + if m != nil { + return m.BidModifier + } + return nil +} + +func (m *BidModifierSimulationPoint) GetBiddableConversions() *wrappers.DoubleValue { + if m != nil { + return m.BiddableConversions + } + return nil +} + +func (m *BidModifierSimulationPoint) GetBiddableConversionsValue() *wrappers.DoubleValue { + if m != nil { + return m.BiddableConversionsValue + } + return nil +} + +func (m *BidModifierSimulationPoint) GetClicks() *wrappers.Int64Value { + if m != nil { + return m.Clicks + } + return nil +} + +func (m *BidModifierSimulationPoint) GetCostMicros() *wrappers.Int64Value { + if m != nil { + return m.CostMicros + } + return nil +} + +func (m *BidModifierSimulationPoint) GetImpressions() *wrappers.Int64Value { + if m != nil { + return m.Impressions + } + return nil +} + +func (m *BidModifierSimulationPoint) GetTopSlotImpressions() *wrappers.Int64Value { + if m != nil { + return m.TopSlotImpressions + } + return nil +} + +// Projected metrics for a specific cpc bid amount. +type CpcBidSimulationPoint struct { + // The simulated cpc bid upon which projected metrics are based. + CpcBidMicros *wrappers.Int64Value `protobuf:"bytes,1,opt,name=cpc_bid_micros,json=cpcBidMicros,proto3" json:"cpc_bid_micros,omitempty"` + // Projected number of biddable conversions. + BiddableConversions *wrappers.DoubleValue `protobuf:"bytes,2,opt,name=biddable_conversions,json=biddableConversions,proto3" json:"biddable_conversions,omitempty"` + // Projected total value of biddable conversions. + BiddableConversionsValue *wrappers.DoubleValue `protobuf:"bytes,3,opt,name=biddable_conversions_value,json=biddableConversionsValue,proto3" json:"biddable_conversions_value,omitempty"` + // Projected number of clicks. + Clicks *wrappers.Int64Value `protobuf:"bytes,4,opt,name=clicks,proto3" json:"clicks,omitempty"` + // Projected cost in micros. + CostMicros *wrappers.Int64Value `protobuf:"bytes,5,opt,name=cost_micros,json=costMicros,proto3" json:"cost_micros,omitempty"` + // Projected number of impressions. + Impressions *wrappers.Int64Value `protobuf:"bytes,6,opt,name=impressions,proto3" json:"impressions,omitempty"` + // Projected number of top slot impressions. + // Display network does not support this field at the ad group level. + TopSlotImpressions *wrappers.Int64Value `protobuf:"bytes,7,opt,name=top_slot_impressions,json=topSlotImpressions,proto3" json:"top_slot_impressions,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *CpcBidSimulationPoint) Reset() { *m = CpcBidSimulationPoint{} } +func (m *CpcBidSimulationPoint) String() string { return proto.CompactTextString(m) } +func (*CpcBidSimulationPoint) ProtoMessage() {} +func (*CpcBidSimulationPoint) Descriptor() ([]byte, []int) { + return fileDescriptor_simulation_236f10bcdfcd99b7, []int{4} +} +func (m *CpcBidSimulationPoint) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_CpcBidSimulationPoint.Unmarshal(m, b) +} +func (m *CpcBidSimulationPoint) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_CpcBidSimulationPoint.Marshal(b, m, deterministic) +} +func (dst *CpcBidSimulationPoint) XXX_Merge(src proto.Message) { + xxx_messageInfo_CpcBidSimulationPoint.Merge(dst, src) +} +func (m *CpcBidSimulationPoint) XXX_Size() int { + return xxx_messageInfo_CpcBidSimulationPoint.Size(m) +} +func (m *CpcBidSimulationPoint) XXX_DiscardUnknown() { + xxx_messageInfo_CpcBidSimulationPoint.DiscardUnknown(m) +} + +var xxx_messageInfo_CpcBidSimulationPoint proto.InternalMessageInfo + +func (m *CpcBidSimulationPoint) GetCpcBidMicros() *wrappers.Int64Value { + if m != nil { + return m.CpcBidMicros + } + return nil +} + +func (m *CpcBidSimulationPoint) GetBiddableConversions() *wrappers.DoubleValue { + if m != nil { + return m.BiddableConversions + } + return nil +} + +func (m *CpcBidSimulationPoint) GetBiddableConversionsValue() *wrappers.DoubleValue { + if m != nil { + return m.BiddableConversionsValue + } + return nil +} + +func (m *CpcBidSimulationPoint) GetClicks() *wrappers.Int64Value { + if m != nil { + return m.Clicks + } + return nil +} + +func (m *CpcBidSimulationPoint) GetCostMicros() *wrappers.Int64Value { + if m != nil { + return m.CostMicros + } + return nil +} + +func (m *CpcBidSimulationPoint) GetImpressions() *wrappers.Int64Value { + if m != nil { + return m.Impressions + } + return nil +} + +func (m *CpcBidSimulationPoint) GetTopSlotImpressions() *wrappers.Int64Value { + if m != nil { + return m.TopSlotImpressions + } + return nil +} + +// Projected metrics for a specific target cpa amount. +type TargetCpaSimulationPoint struct { + // The simulated target cpa upon which projected metrics are based. + TargetCpaMicros *wrappers.Int64Value `protobuf:"bytes,1,opt,name=target_cpa_micros,json=targetCpaMicros,proto3" json:"target_cpa_micros,omitempty"` + // Projected number of biddable conversions. + BiddableConversions *wrappers.DoubleValue `protobuf:"bytes,2,opt,name=biddable_conversions,json=biddableConversions,proto3" json:"biddable_conversions,omitempty"` + // Projected total value of biddable conversions. + BiddableConversionsValue *wrappers.DoubleValue `protobuf:"bytes,3,opt,name=biddable_conversions_value,json=biddableConversionsValue,proto3" json:"biddable_conversions_value,omitempty"` + // Projected number of clicks. + Clicks *wrappers.Int64Value `protobuf:"bytes,4,opt,name=clicks,proto3" json:"clicks,omitempty"` + // Projected cost in micros. + CostMicros *wrappers.Int64Value `protobuf:"bytes,5,opt,name=cost_micros,json=costMicros,proto3" json:"cost_micros,omitempty"` + // Projected number of impressions. + Impressions *wrappers.Int64Value `protobuf:"bytes,6,opt,name=impressions,proto3" json:"impressions,omitempty"` + // Projected number of top slot impressions. + // Display network does not support this field at the ad group level. + TopSlotImpressions *wrappers.Int64Value `protobuf:"bytes,7,opt,name=top_slot_impressions,json=topSlotImpressions,proto3" json:"top_slot_impressions,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *TargetCpaSimulationPoint) Reset() { *m = TargetCpaSimulationPoint{} } +func (m *TargetCpaSimulationPoint) String() string { return proto.CompactTextString(m) } +func (*TargetCpaSimulationPoint) ProtoMessage() {} +func (*TargetCpaSimulationPoint) Descriptor() ([]byte, []int) { + return fileDescriptor_simulation_236f10bcdfcd99b7, []int{5} +} +func (m *TargetCpaSimulationPoint) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_TargetCpaSimulationPoint.Unmarshal(m, b) +} +func (m *TargetCpaSimulationPoint) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_TargetCpaSimulationPoint.Marshal(b, m, deterministic) +} +func (dst *TargetCpaSimulationPoint) XXX_Merge(src proto.Message) { + xxx_messageInfo_TargetCpaSimulationPoint.Merge(dst, src) +} +func (m *TargetCpaSimulationPoint) XXX_Size() int { + return xxx_messageInfo_TargetCpaSimulationPoint.Size(m) +} +func (m *TargetCpaSimulationPoint) XXX_DiscardUnknown() { + xxx_messageInfo_TargetCpaSimulationPoint.DiscardUnknown(m) +} + +var xxx_messageInfo_TargetCpaSimulationPoint proto.InternalMessageInfo + +func (m *TargetCpaSimulationPoint) GetTargetCpaMicros() *wrappers.Int64Value { + if m != nil { + return m.TargetCpaMicros + } + return nil +} + +func (m *TargetCpaSimulationPoint) GetBiddableConversions() *wrappers.DoubleValue { + if m != nil { + return m.BiddableConversions + } + return nil +} + +func (m *TargetCpaSimulationPoint) GetBiddableConversionsValue() *wrappers.DoubleValue { + if m != nil { + return m.BiddableConversionsValue + } + return nil +} + +func (m *TargetCpaSimulationPoint) GetClicks() *wrappers.Int64Value { + if m != nil { + return m.Clicks + } + return nil +} + +func (m *TargetCpaSimulationPoint) GetCostMicros() *wrappers.Int64Value { + if m != nil { + return m.CostMicros + } + return nil +} + +func (m *TargetCpaSimulationPoint) GetImpressions() *wrappers.Int64Value { + if m != nil { + return m.Impressions + } + return nil +} + +func (m *TargetCpaSimulationPoint) GetTopSlotImpressions() *wrappers.Int64Value { + if m != nil { + return m.TopSlotImpressions + } + return nil +} + +func init() { + proto.RegisterType((*BidModifierSimulationPointList)(nil), "google.ads.googleads.v1.common.BidModifierSimulationPointList") + proto.RegisterType((*CpcBidSimulationPointList)(nil), "google.ads.googleads.v1.common.CpcBidSimulationPointList") + proto.RegisterType((*TargetCpaSimulationPointList)(nil), "google.ads.googleads.v1.common.TargetCpaSimulationPointList") + proto.RegisterType((*BidModifierSimulationPoint)(nil), "google.ads.googleads.v1.common.BidModifierSimulationPoint") + proto.RegisterType((*CpcBidSimulationPoint)(nil), "google.ads.googleads.v1.common.CpcBidSimulationPoint") + proto.RegisterType((*TargetCpaSimulationPoint)(nil), "google.ads.googleads.v1.common.TargetCpaSimulationPoint") +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/common/simulation.proto", fileDescriptor_simulation_236f10bcdfcd99b7) +} + +var fileDescriptor_simulation_236f10bcdfcd99b7 = []byte{ + // 570 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xec, 0x96, 0xdf, 0x6a, 0xdb, 0x30, + 0x18, 0xc5, 0x71, 0x9a, 0x65, 0xa0, 0x94, 0x95, 0x69, 0x1d, 0x78, 0x59, 0x29, 0x25, 0x57, 0xbd, + 0x92, 0x49, 0xbb, 0x8d, 0xe1, 0x6d, 0x0c, 0x27, 0x85, 0x50, 0x58, 0x58, 0x48, 0x47, 0x2e, 0x4a, + 0xc0, 0xc8, 0x92, 0x6b, 0xb4, 0xd9, 0x96, 0xb0, 0x94, 0xec, 0x15, 0x06, 0xbb, 0xda, 0x2b, 0xec, + 0x72, 0x8f, 0xb2, 0x47, 0xe9, 0x53, 0x0c, 0x4b, 0xb6, 0x13, 0x3a, 0x67, 0xee, 0x2e, 0x0b, 0xbd, + 0xca, 0x97, 0x7c, 0xe7, 0xfc, 0x8e, 0xff, 0x1c, 0x62, 0x03, 0x27, 0xe2, 0x3c, 0x8a, 0x43, 0x07, + 0x53, 0x59, 0x8c, 0xf9, 0xb4, 0x1a, 0x38, 0x84, 0x27, 0x09, 0x4f, 0x1d, 0xc9, 0x92, 0x65, 0x8c, + 0x15, 0xe3, 0x29, 0x12, 0x19, 0x57, 0x1c, 0x1e, 0x1a, 0x15, 0xc2, 0x54, 0xa2, 0xca, 0x80, 0x56, + 0x03, 0x64, 0x0c, 0xbd, 0x62, 0xef, 0x68, 0x75, 0xb0, 0xbc, 0x72, 0xbe, 0x66, 0x58, 0x88, 0x30, + 0x93, 0xc6, 0xdf, 0x3b, 0x28, 0x03, 0x05, 0x73, 0x70, 0x9a, 0x72, 0xa5, 0xe1, 0xc5, 0xb6, 0xaf, + 0xc0, 0xe1, 0x90, 0xd1, 0x09, 0xa7, 0xec, 0x8a, 0x85, 0xd9, 0x45, 0x15, 0x3e, 0xe5, 0x2c, 0x55, + 0x1f, 0x98, 0x54, 0x70, 0x06, 0x3a, 0x22, 0xff, 0x22, 0x6d, 0xeb, 0x68, 0xe7, 0xb8, 0x7b, 0xe2, + 0xa2, 0x7f, 0x1f, 0x10, 0xda, 0xce, 0x9b, 0x15, 0xa4, 0xfe, 0x67, 0xf0, 0x6c, 0x24, 0xc8, 0x90, + 0xd1, 0xba, 0xc0, 0xc9, 0x8d, 0xc0, 0x97, 0x4d, 0x81, 0xb5, 0xa8, 0x2a, 0x4b, 0x80, 0x83, 0x4f, + 0x38, 0x8b, 0x42, 0x35, 0x12, 0xb8, 0x2e, 0x6e, 0x7a, 0x23, 0xee, 0x75, 0x53, 0xdc, 0x36, 0x5a, + 0x95, 0xf8, 0xbd, 0x0d, 0x7a, 0xdb, 0x2f, 0x02, 0x7c, 0x0f, 0x76, 0x03, 0x46, 0xfd, 0xa4, 0x58, + 0xdb, 0xd6, 0x91, 0x75, 0xdc, 0x3d, 0x39, 0x28, 0x63, 0xcb, 0xfb, 0x88, 0xce, 0xf8, 0x32, 0x88, + 0xc3, 0x39, 0x8e, 0x97, 0xe1, 0xac, 0x1b, 0xac, 0x79, 0xf0, 0x23, 0xd8, 0x0f, 0x18, 0xa5, 0x38, + 0x88, 0x43, 0x9f, 0xf0, 0x74, 0x15, 0x66, 0x32, 0xbf, 0xa3, 0x76, 0xeb, 0x16, 0xa0, 0x27, 0xa5, + 0x73, 0xb4, 0x36, 0xc2, 0x4b, 0xd0, 0xab, 0x03, 0xfa, 0xab, 0xdc, 0x62, 0xef, 0xdc, 0x02, 0x6b, + 0xd7, 0x60, 0xf5, 0x06, 0x9e, 0x82, 0x0e, 0x89, 0x19, 0xf9, 0x22, 0xed, 0xb6, 0xe6, 0x3c, 0xff, + 0x8b, 0x73, 0x9e, 0xaa, 0x57, 0x2f, 0x0c, 0xa6, 0x90, 0xc2, 0xb7, 0xa0, 0x4b, 0xb8, 0x54, 0x7e, + 0xc2, 0x48, 0xc6, 0xa5, 0xfd, 0xa0, 0xd9, 0x09, 0x72, 0xfd, 0x44, 0xcb, 0xe1, 0x3b, 0xd0, 0x65, + 0x89, 0xc8, 0x42, 0x69, 0x2e, 0x4b, 0xa7, 0xd9, 0xbd, 0xa9, 0x87, 0x13, 0xb0, 0xaf, 0xb8, 0xf0, + 0x65, 0xcc, 0x95, 0xbf, 0xc9, 0x79, 0xd8, 0xcc, 0x81, 0x8a, 0x8b, 0x8b, 0x98, 0xab, 0xf3, 0xb5, + 0xad, 0xff, 0xad, 0x0d, 0x9e, 0xd6, 0x36, 0x14, 0x7a, 0xe0, 0x11, 0x11, 0xc4, 0xd7, 0x65, 0x30, + 0x27, 0x6a, 0x35, 0x47, 0xec, 0x12, 0xcd, 0x2a, 0x4e, 0xf5, 0xbe, 0x0a, 0x77, 0xa9, 0x0a, 0x3f, + 0xda, 0xc0, 0xde, 0xf6, 0xef, 0x01, 0xc7, 0xe0, 0xb1, 0xd2, 0x3b, 0x9f, 0x08, 0xfc, 0x1f, 0x85, + 0xd8, 0x53, 0x25, 0xf1, 0xbe, 0x13, 0x77, 0xaf, 0x13, 0xc3, 0x6b, 0x0b, 0xf4, 0x09, 0x4f, 0x1a, + 0x1e, 0x3a, 0xc3, 0xbd, 0x8d, 0xba, 0xe4, 0xe4, 0xa9, 0x75, 0x79, 0x56, 0x58, 0x22, 0x1e, 0xe3, + 0x34, 0x42, 0x3c, 0x8b, 0x9c, 0x28, 0x4c, 0x75, 0x6e, 0xf9, 0x66, 0x21, 0x98, 0xdc, 0xf6, 0xa2, + 0xf1, 0xc6, 0x7c, 0xfc, 0x6c, 0xed, 0x8c, 0x3d, 0xef, 0x57, 0xeb, 0x70, 0x6c, 0x60, 0x1e, 0x95, + 0xc8, 0x8c, 0xf9, 0x34, 0x1f, 0xa0, 0x91, 0x96, 0xfd, 0x2e, 0x05, 0x0b, 0x8f, 0xca, 0x45, 0x25, + 0x58, 0xcc, 0x07, 0x0b, 0x23, 0xb8, 0x6e, 0xf5, 0xcd, 0xaf, 0xae, 0xeb, 0x51, 0xe9, 0xba, 0x95, + 0xc4, 0x75, 0xe7, 0x03, 0xd7, 0x35, 0xa2, 0xa0, 0xa3, 0x8f, 0xee, 0xf4, 0x4f, 0x00, 0x00, 0x00, + 0xff, 0xff, 0xf9, 0xd3, 0x30, 0x77, 0x05, 0x09, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/common/tag_snippet.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/common/tag_snippet.pb.go new file mode 100644 index 0000000..5cd92f8 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/common/tag_snippet.pb.go @@ -0,0 +1,131 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/common/tag_snippet.proto + +package common // import "google.golang.org/genproto/googleapis/ads/googleads/v1/common" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import wrappers "github.com/golang/protobuf/ptypes/wrappers" +import enums "google.golang.org/genproto/googleapis/ads/googleads/v1/enums" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// The site tag and event snippet pair for a TrackingCodeType. +type TagSnippet struct { + // The type of the generated tag snippets for tracking conversions. + Type enums.TrackingCodeTypeEnum_TrackingCodeType `protobuf:"varint,1,opt,name=type,proto3,enum=google.ads.googleads.v1.enums.TrackingCodeTypeEnum_TrackingCodeType" json:"type,omitempty"` + // The format of the web page where the tracking tag and snippet will be + // installed, e.g. HTML. + PageFormat enums.TrackingCodePageFormatEnum_TrackingCodePageFormat `protobuf:"varint,2,opt,name=page_format,json=pageFormat,proto3,enum=google.ads.googleads.v1.enums.TrackingCodePageFormatEnum_TrackingCodePageFormat" json:"page_format,omitempty"` + // The site tag that adds visitors to your basic remarketing lists and sets + // new cookies on your domain. + GlobalSiteTag *wrappers.StringValue `protobuf:"bytes,3,opt,name=global_site_tag,json=globalSiteTag,proto3" json:"global_site_tag,omitempty"` + // The event snippet that works with the site tag to track actions that + // should be counted as conversions. + EventSnippet *wrappers.StringValue `protobuf:"bytes,4,opt,name=event_snippet,json=eventSnippet,proto3" json:"event_snippet,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *TagSnippet) Reset() { *m = TagSnippet{} } +func (m *TagSnippet) String() string { return proto.CompactTextString(m) } +func (*TagSnippet) ProtoMessage() {} +func (*TagSnippet) Descriptor() ([]byte, []int) { + return fileDescriptor_tag_snippet_8ac1c7c28c0f8743, []int{0} +} +func (m *TagSnippet) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_TagSnippet.Unmarshal(m, b) +} +func (m *TagSnippet) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_TagSnippet.Marshal(b, m, deterministic) +} +func (dst *TagSnippet) XXX_Merge(src proto.Message) { + xxx_messageInfo_TagSnippet.Merge(dst, src) +} +func (m *TagSnippet) XXX_Size() int { + return xxx_messageInfo_TagSnippet.Size(m) +} +func (m *TagSnippet) XXX_DiscardUnknown() { + xxx_messageInfo_TagSnippet.DiscardUnknown(m) +} + +var xxx_messageInfo_TagSnippet proto.InternalMessageInfo + +func (m *TagSnippet) GetType() enums.TrackingCodeTypeEnum_TrackingCodeType { + if m != nil { + return m.Type + } + return enums.TrackingCodeTypeEnum_UNSPECIFIED +} + +func (m *TagSnippet) GetPageFormat() enums.TrackingCodePageFormatEnum_TrackingCodePageFormat { + if m != nil { + return m.PageFormat + } + return enums.TrackingCodePageFormatEnum_UNSPECIFIED +} + +func (m *TagSnippet) GetGlobalSiteTag() *wrappers.StringValue { + if m != nil { + return m.GlobalSiteTag + } + return nil +} + +func (m *TagSnippet) GetEventSnippet() *wrappers.StringValue { + if m != nil { + return m.EventSnippet + } + return nil +} + +func init() { + proto.RegisterType((*TagSnippet)(nil), "google.ads.googleads.v1.common.TagSnippet") +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/common/tag_snippet.proto", fileDescriptor_tag_snippet_8ac1c7c28c0f8743) +} + +var fileDescriptor_tag_snippet_8ac1c7c28c0f8743 = []byte{ + // 421 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x94, 0x52, 0x4f, 0x6b, 0xdb, 0x30, + 0x14, 0xc7, 0x6e, 0xd9, 0x41, 0x5d, 0x57, 0xf0, 0x29, 0x94, 0x52, 0x4a, 0x4e, 0x3d, 0x49, 0x73, + 0x07, 0x3b, 0x68, 0xec, 0xe0, 0x36, 0x5b, 0xaf, 0x21, 0x09, 0x61, 0x8c, 0x80, 0x79, 0x89, 0x5f, + 0x85, 0x98, 0x2d, 0x69, 0x96, 0x92, 0xd1, 0xaf, 0xb3, 0xe3, 0x3e, 0xca, 0x3e, 0xc6, 0x8e, 0xfd, + 0x14, 0xc3, 0x92, 0xec, 0x0e, 0x4a, 0x46, 0x7b, 0xf2, 0xcf, 0x7a, 0xbf, 0x3f, 0xef, 0x49, 0x8f, + 0xbc, 0x15, 0x5a, 0x8b, 0x1a, 0x19, 0x54, 0x96, 0x05, 0xd8, 0xa1, 0x5d, 0xce, 0x36, 0xba, 0x69, + 0xb4, 0x62, 0x0e, 0x44, 0x69, 0x95, 0x34, 0x06, 0x1d, 0x35, 0xad, 0x76, 0x3a, 0x3b, 0x0f, 0x34, + 0x0a, 0x95, 0xa5, 0x83, 0x82, 0xee, 0x72, 0x1a, 0x14, 0xa7, 0x1f, 0xf7, 0x39, 0xa2, 0xda, 0x36, + 0x96, 0xb9, 0x16, 0x36, 0xdf, 0xa4, 0x12, 0xe5, 0x46, 0x57, 0x58, 0x1a, 0x10, 0x58, 0xde, 0xe9, + 0xb6, 0x81, 0x68, 0x7f, 0xfa, 0xfe, 0x25, 0x72, 0x77, 0x6f, 0x30, 0xea, 0x62, 0x5b, 0xcc, 0xff, + 0xad, 0xb7, 0x77, 0xec, 0x47, 0x0b, 0xc6, 0x60, 0x6b, 0x63, 0xfd, 0xac, 0xf7, 0x35, 0x92, 0x81, + 0x52, 0xda, 0x81, 0x93, 0x5a, 0xc5, 0xea, 0xf8, 0x4f, 0x4a, 0xc8, 0x02, 0xc4, 0x3c, 0x4c, 0x9a, + 0x7d, 0x21, 0x87, 0x9d, 0xf5, 0x28, 0xb9, 0x48, 0x2e, 0xdf, 0x5c, 0x4d, 0xe8, 0xbe, 0x91, 0x7d, + 0x4f, 0x74, 0x11, 0x7b, 0xba, 0xd1, 0x15, 0x2e, 0xee, 0x0d, 0x7e, 0x52, 0xdb, 0xe6, 0xc9, 0xe1, + 0xcc, 0x3b, 0x66, 0xdf, 0xc9, 0xd1, 0x3f, 0x33, 0x8f, 0x52, 0x1f, 0x30, 0x7d, 0x41, 0xc0, 0x14, + 0x04, 0x7e, 0xf6, 0xe2, 0x27, 0x31, 0x8f, 0xa5, 0x19, 0x31, 0x03, 0xce, 0x26, 0xe4, 0x44, 0xd4, + 0x7a, 0x0d, 0x75, 0x69, 0xa5, 0xc3, 0xd2, 0x81, 0x18, 0x1d, 0x5c, 0x24, 0x97, 0x47, 0x57, 0x67, + 0x7d, 0x6c, 0x7f, 0x67, 0x74, 0xee, 0x5a, 0xa9, 0xc4, 0x12, 0xea, 0x2d, 0xce, 0x8e, 0x83, 0x68, + 0x2e, 0x1d, 0x2e, 0x40, 0x64, 0x05, 0x39, 0xc6, 0x1d, 0x2a, 0xd7, 0x6f, 0xc3, 0xe8, 0xf0, 0x19, + 0x1e, 0xaf, 0xbd, 0x24, 0xde, 0xea, 0xf5, 0x43, 0x42, 0xc6, 0x1b, 0xdd, 0xd0, 0xff, 0x2f, 0xd0, + 0xf5, 0xc9, 0xe3, 0x43, 0x4c, 0x3b, 0xd3, 0x69, 0xf2, 0x75, 0x12, 0x25, 0x42, 0xd7, 0xa0, 0x04, + 0xd5, 0xad, 0x60, 0x02, 0x95, 0x8f, 0xec, 0x97, 0xc4, 0x48, 0xbb, 0x6f, 0x89, 0x3f, 0x84, 0xcf, + 0xcf, 0xf4, 0xe0, 0xb6, 0x28, 0x7e, 0xa5, 0xe7, 0xb7, 0xc1, 0xac, 0xa8, 0x2c, 0x0d, 0xb0, 0x43, + 0xcb, 0x9c, 0xde, 0x78, 0xda, 0xef, 0x9e, 0xb0, 0x2a, 0x2a, 0xbb, 0x1a, 0x08, 0xab, 0x65, 0xbe, + 0x0a, 0x84, 0x87, 0x74, 0x1c, 0x4e, 0x39, 0x2f, 0x2a, 0xcb, 0xf9, 0x40, 0xe1, 0x7c, 0x99, 0x73, + 0x1e, 0x48, 0xeb, 0x57, 0xbe, 0xbb, 0x77, 0x7f, 0x03, 0x00, 0x00, 0xff, 0xff, 0x9c, 0x9e, 0x62, + 0x27, 0x61, 0x03, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/common/targeting_setting.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/common/targeting_setting.pb.go new file mode 100644 index 0000000..4fd2722 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/common/targeting_setting.pb.go @@ -0,0 +1,158 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/common/targeting_setting.proto + +package common // import "google.golang.org/genproto/googleapis/ads/googleads/v1/common" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import wrappers "github.com/golang/protobuf/ptypes/wrappers" +import enums "google.golang.org/genproto/googleapis/ads/googleads/v1/enums" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Settings for the +// +// targeting related features, at Campaign and AdGroup level. +type TargetingSetting struct { + // The per-targeting-dimension setting to restrict the reach of your campaign + // or ad group. + TargetRestrictions []*TargetRestriction `protobuf:"bytes,1,rep,name=target_restrictions,json=targetRestrictions,proto3" json:"target_restrictions,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *TargetingSetting) Reset() { *m = TargetingSetting{} } +func (m *TargetingSetting) String() string { return proto.CompactTextString(m) } +func (*TargetingSetting) ProtoMessage() {} +func (*TargetingSetting) Descriptor() ([]byte, []int) { + return fileDescriptor_targeting_setting_4ec905eba5714fb0, []int{0} +} +func (m *TargetingSetting) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_TargetingSetting.Unmarshal(m, b) +} +func (m *TargetingSetting) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_TargetingSetting.Marshal(b, m, deterministic) +} +func (dst *TargetingSetting) XXX_Merge(src proto.Message) { + xxx_messageInfo_TargetingSetting.Merge(dst, src) +} +func (m *TargetingSetting) XXX_Size() int { + return xxx_messageInfo_TargetingSetting.Size(m) +} +func (m *TargetingSetting) XXX_DiscardUnknown() { + xxx_messageInfo_TargetingSetting.DiscardUnknown(m) +} + +var xxx_messageInfo_TargetingSetting proto.InternalMessageInfo + +func (m *TargetingSetting) GetTargetRestrictions() []*TargetRestriction { + if m != nil { + return m.TargetRestrictions + } + return nil +} + +// The list of per-targeting-dimension targeting settings. +type TargetRestriction struct { + // The targeting dimension that these settings apply to. + TargetingDimension enums.TargetingDimensionEnum_TargetingDimension `protobuf:"varint,1,opt,name=targeting_dimension,json=targetingDimension,proto3,enum=google.ads.googleads.v1.enums.TargetingDimensionEnum_TargetingDimension" json:"targeting_dimension,omitempty"` + // Indicates whether to restrict your ads to show only for the criteria you + // have selected for this targeting_dimension, or to target all values for + // this targeting_dimension and show ads based on your targeting in other + // TargetingDimensions. A value of 'true' means that these criteria will only + // apply bid modifiers, and not affect targeting. A value of 'false' means + // that these criteria will restrict targeting as well as applying bid + // modifiers. + BidOnly *wrappers.BoolValue `protobuf:"bytes,2,opt,name=bid_only,json=bidOnly,proto3" json:"bid_only,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *TargetRestriction) Reset() { *m = TargetRestriction{} } +func (m *TargetRestriction) String() string { return proto.CompactTextString(m) } +func (*TargetRestriction) ProtoMessage() {} +func (*TargetRestriction) Descriptor() ([]byte, []int) { + return fileDescriptor_targeting_setting_4ec905eba5714fb0, []int{1} +} +func (m *TargetRestriction) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_TargetRestriction.Unmarshal(m, b) +} +func (m *TargetRestriction) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_TargetRestriction.Marshal(b, m, deterministic) +} +func (dst *TargetRestriction) XXX_Merge(src proto.Message) { + xxx_messageInfo_TargetRestriction.Merge(dst, src) +} +func (m *TargetRestriction) XXX_Size() int { + return xxx_messageInfo_TargetRestriction.Size(m) +} +func (m *TargetRestriction) XXX_DiscardUnknown() { + xxx_messageInfo_TargetRestriction.DiscardUnknown(m) +} + +var xxx_messageInfo_TargetRestriction proto.InternalMessageInfo + +func (m *TargetRestriction) GetTargetingDimension() enums.TargetingDimensionEnum_TargetingDimension { + if m != nil { + return m.TargetingDimension + } + return enums.TargetingDimensionEnum_UNSPECIFIED +} + +func (m *TargetRestriction) GetBidOnly() *wrappers.BoolValue { + if m != nil { + return m.BidOnly + } + return nil +} + +func init() { + proto.RegisterType((*TargetingSetting)(nil), "google.ads.googleads.v1.common.TargetingSetting") + proto.RegisterType((*TargetRestriction)(nil), "google.ads.googleads.v1.common.TargetRestriction") +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/common/targeting_setting.proto", fileDescriptor_targeting_setting_4ec905eba5714fb0) +} + +var fileDescriptor_targeting_setting_4ec905eba5714fb0 = []byte{ + // 392 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x84, 0x92, 0x4f, 0x6b, 0xdb, 0x30, + 0x18, 0xc6, 0xb1, 0x03, 0xdb, 0x70, 0x60, 0x6c, 0x1e, 0x83, 0x10, 0x46, 0x08, 0x3e, 0xe5, 0x24, + 0xe1, 0x8c, 0x6d, 0xa0, 0x9d, 0x9c, 0x65, 0xa4, 0xb7, 0x86, 0xb4, 0xf8, 0x50, 0x0c, 0x41, 0x8e, + 0x54, 0x21, 0xb0, 0x25, 0x63, 0xc9, 0x29, 0xf9, 0x3a, 0x3d, 0xf6, 0x7b, 0xf4, 0xd2, 0x8f, 0xd2, + 0x53, 0x3f, 0x42, 0xb1, 0x65, 0xb9, 0x69, 0xda, 0xb4, 0x27, 0xbd, 0x7a, 0xf5, 0x3c, 0xbf, 0xf7, + 0x0f, 0xf2, 0x7e, 0x33, 0x29, 0x59, 0x46, 0x21, 0x26, 0x0a, 0x9a, 0xb0, 0x8e, 0xb6, 0x21, 0xdc, + 0xc8, 0x3c, 0x97, 0x02, 0x6a, 0x5c, 0x32, 0xaa, 0xb9, 0x60, 0x6b, 0x45, 0x75, 0x7d, 0x82, 0xa2, + 0x94, 0x5a, 0xfa, 0x23, 0x23, 0x06, 0x98, 0x28, 0xd0, 0xf9, 0xc0, 0x36, 0x04, 0xc6, 0x37, 0xfc, + 0x73, 0x8c, 0x4b, 0x45, 0x95, 0xab, 0x3d, 0x2c, 0xe1, 0x39, 0x15, 0x8a, 0x4b, 0x61, 0xc0, 0xc3, + 0x16, 0x0c, 0x9b, 0x5b, 0x5a, 0x5d, 0xc2, 0xab, 0x12, 0x17, 0x05, 0x2d, 0x55, 0xfb, 0xfe, 0xc3, + 0x82, 0x0b, 0x0e, 0xb1, 0x10, 0x52, 0x63, 0xcd, 0xa5, 0x68, 0x5f, 0x83, 0xad, 0xf7, 0xe5, 0xdc, + 0xa2, 0xcf, 0x4c, 0xc3, 0x7e, 0xea, 0x7d, 0x33, 0xe5, 0xd6, 0x25, 0x55, 0xba, 0xe4, 0x9b, 0xc6, + 0x30, 0x70, 0xc6, 0xbd, 0x49, 0x7f, 0x1a, 0x82, 0xb7, 0x07, 0x01, 0x06, 0xb7, 0x7a, 0x72, 0xae, + 0x7c, 0x7d, 0x98, 0x52, 0xc1, 0xad, 0xe3, 0x7d, 0x7d, 0xa1, 0xf4, 0x77, 0xb6, 0xf2, 0xb3, 0x41, + 0x07, 0xce, 0xd8, 0x99, 0x7c, 0x9e, 0x9e, 0x1c, 0xad, 0xdc, 0xac, 0x08, 0x74, 0x73, 0xcc, 0xad, + 0xf1, 0xbf, 0xa8, 0xf2, 0x57, 0xd2, 0xb6, 0xa1, 0xfd, 0x9c, 0xff, 0xcb, 0xfb, 0x94, 0x72, 0xb2, + 0x96, 0x22, 0xdb, 0x0d, 0xdc, 0xb1, 0x33, 0xe9, 0x4f, 0x87, 0xb6, 0x9e, 0xdd, 0x2c, 0x98, 0x49, + 0x99, 0xc5, 0x38, 0xab, 0xe8, 0xea, 0x63, 0xca, 0xc9, 0xa9, 0xc8, 0x76, 0xb3, 0x07, 0xc7, 0x0b, + 0x36, 0x32, 0x7f, 0x67, 0x29, 0xb3, 0xef, 0x87, 0x4b, 0x5e, 0xd6, 0xcc, 0xa5, 0x73, 0x31, 0x6f, + 0x8d, 0x4c, 0x66, 0x58, 0x30, 0x20, 0x4b, 0x06, 0x19, 0x15, 0x4d, 0x45, 0xfb, 0x0d, 0x0a, 0xae, + 0x8e, 0xfd, 0xb6, 0xbf, 0xe6, 0xb8, 0x76, 0x7b, 0x8b, 0x28, 0xba, 0x71, 0x47, 0x0b, 0x03, 0x8b, + 0x88, 0x02, 0x26, 0xac, 0xa3, 0x38, 0x04, 0xff, 0x1a, 0xd9, 0x9d, 0x15, 0x24, 0x11, 0x51, 0x49, + 0x27, 0x48, 0xe2, 0x30, 0x31, 0x82, 0x7b, 0x37, 0x30, 0x59, 0x84, 0x22, 0xa2, 0x10, 0xea, 0x24, + 0x08, 0xc5, 0x21, 0x42, 0x46, 0x94, 0x7e, 0x68, 0xba, 0xfb, 0xf9, 0x18, 0x00, 0x00, 0xff, 0xff, + 0x1a, 0xf2, 0x7a, 0x09, 0x0a, 0x03, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/common/text_label.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/common/text_label.pb.go new file mode 100644 index 0000000..abc9857 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/common/text_label.pb.go @@ -0,0 +1,105 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/common/text_label.proto + +package common // import "google.golang.org/genproto/googleapis/ads/googleads/v1/common" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import wrappers "github.com/golang/protobuf/ptypes/wrappers" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// A type of label displaying text on a colored background. +type TextLabel struct { + // Background color of the label in RGB format. This string must match the + // regular expression '^\#([a-fA-F0-9]{6}|[a-fA-F0-9]{3})$'. + // Note: The background color may not be visible for manager accounts. + BackgroundColor *wrappers.StringValue `protobuf:"bytes,1,opt,name=background_color,json=backgroundColor,proto3" json:"background_color,omitempty"` + // A short description of the label. The length must be no more than 200 + // characters. + Description *wrappers.StringValue `protobuf:"bytes,2,opt,name=description,proto3" json:"description,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *TextLabel) Reset() { *m = TextLabel{} } +func (m *TextLabel) String() string { return proto.CompactTextString(m) } +func (*TextLabel) ProtoMessage() {} +func (*TextLabel) Descriptor() ([]byte, []int) { + return fileDescriptor_text_label_0fdcf32aa0b0b273, []int{0} +} +func (m *TextLabel) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_TextLabel.Unmarshal(m, b) +} +func (m *TextLabel) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_TextLabel.Marshal(b, m, deterministic) +} +func (dst *TextLabel) XXX_Merge(src proto.Message) { + xxx_messageInfo_TextLabel.Merge(dst, src) +} +func (m *TextLabel) XXX_Size() int { + return xxx_messageInfo_TextLabel.Size(m) +} +func (m *TextLabel) XXX_DiscardUnknown() { + xxx_messageInfo_TextLabel.DiscardUnknown(m) +} + +var xxx_messageInfo_TextLabel proto.InternalMessageInfo + +func (m *TextLabel) GetBackgroundColor() *wrappers.StringValue { + if m != nil { + return m.BackgroundColor + } + return nil +} + +func (m *TextLabel) GetDescription() *wrappers.StringValue { + if m != nil { + return m.Description + } + return nil +} + +func init() { + proto.RegisterType((*TextLabel)(nil), "google.ads.googleads.v1.common.TextLabel") +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/common/text_label.proto", fileDescriptor_text_label_0fdcf32aa0b0b273) +} + +var fileDescriptor_text_label_0fdcf32aa0b0b273 = []byte{ + // 318 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x8c, 0x90, 0xc1, 0x4a, 0xfb, 0x30, + 0x1c, 0xc7, 0x69, 0xff, 0xf0, 0x07, 0x3b, 0x50, 0xd9, 0x69, 0x8c, 0x31, 0x64, 0x27, 0x4f, 0x09, + 0xd5, 0x5b, 0x04, 0xa1, 0x9b, 0xb0, 0x8b, 0x87, 0xa1, 0xd2, 0x83, 0x14, 0x46, 0xda, 0xc4, 0x50, + 0xcc, 0xf2, 0x0b, 0x49, 0x36, 0xf7, 0x20, 0x3e, 0x81, 0x47, 0x1f, 0xc5, 0x37, 0xd1, 0xa7, 0x90, + 0x34, 0x6d, 0xf5, 0xa2, 0x78, 0xea, 0x97, 0xe6, 0xf3, 0xf9, 0x7d, 0x93, 0x5f, 0x82, 0x05, 0x80, + 0x90, 0x1c, 0x53, 0x66, 0xdb, 0xe8, 0xd3, 0x2e, 0xc5, 0x15, 0x6c, 0x36, 0xa0, 0xb0, 0xe3, 0x7b, + 0xb7, 0x96, 0xb4, 0xe4, 0x12, 0x69, 0x03, 0x0e, 0x86, 0xd3, 0x40, 0x21, 0xca, 0x2c, 0xea, 0x05, + 0xb4, 0x4b, 0x51, 0x10, 0xc6, 0xed, 0x39, 0x6e, 0xe8, 0x72, 0xfb, 0x80, 0x9f, 0x0c, 0xd5, 0x9a, + 0x1b, 0x1b, 0xfc, 0xf1, 0xa4, 0x2b, 0xd4, 0x35, 0xa6, 0x4a, 0x81, 0xa3, 0xae, 0x06, 0xd5, 0x9e, + 0xce, 0x9e, 0xa3, 0xe4, 0xe0, 0x8e, 0xef, 0xdd, 0xb5, 0x6f, 0x1c, 0x2e, 0x93, 0xe3, 0x92, 0x56, + 0x8f, 0xc2, 0xc0, 0x56, 0xb1, 0x75, 0x05, 0x12, 0xcc, 0x28, 0x3a, 0x89, 0x4e, 0x07, 0x67, 0x93, + 0xb6, 0x1b, 0x75, 0x35, 0xe8, 0xd6, 0x99, 0x5a, 0x89, 0x9c, 0xca, 0x2d, 0xbf, 0x39, 0xfa, 0xb2, + 0x16, 0x5e, 0x1a, 0x5e, 0x26, 0x03, 0xc6, 0x6d, 0x65, 0x6a, 0xed, 0xcb, 0x46, 0xf1, 0x1f, 0x66, + 0x7c, 0x17, 0xe6, 0xef, 0x51, 0x32, 0xab, 0x60, 0x83, 0x7e, 0x7f, 0xfb, 0xfc, 0xb0, 0xbf, 0xfa, + 0xca, 0x8f, 0x5c, 0x45, 0xf7, 0x57, 0xad, 0x21, 0x40, 0x52, 0x25, 0x10, 0x18, 0x81, 0x05, 0x57, + 0x4d, 0x61, 0xb7, 0x6e, 0x5d, 0xdb, 0x9f, 0xb6, 0x7f, 0x11, 0x3e, 0x2f, 0xf1, 0xbf, 0x65, 0x96, + 0xbd, 0xc6, 0xd3, 0x65, 0x18, 0x96, 0x31, 0x8b, 0x42, 0xf4, 0x29, 0x4f, 0xd1, 0xa2, 0xc1, 0xde, + 0x3a, 0xa0, 0xc8, 0x98, 0x2d, 0x7a, 0xa0, 0xc8, 0xd3, 0x22, 0x00, 0x1f, 0xf1, 0x2c, 0xfc, 0x25, + 0x24, 0x63, 0x96, 0x90, 0x1e, 0x21, 0x24, 0x4f, 0x09, 0x09, 0x50, 0xf9, 0xbf, 0xb9, 0xdd, 0xf9, + 0x67, 0x00, 0x00, 0x00, 0xff, 0xff, 0xae, 0x01, 0x9e, 0x77, 0x1a, 0x02, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/common/url_collection.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/common/url_collection.pb.go new file mode 100644 index 0000000..76bd7fe --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/common/url_collection.pb.go @@ -0,0 +1,124 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/common/url_collection.proto + +package common // import "google.golang.org/genproto/googleapis/ads/googleads/v1/common" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import wrappers "github.com/golang/protobuf/ptypes/wrappers" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Collection of urls that is tagged with a unique identifier. +type UrlCollection struct { + // Unique identifier for this UrlCollection instance. + UrlCollectionId *wrappers.StringValue `protobuf:"bytes,1,opt,name=url_collection_id,json=urlCollectionId,proto3" json:"url_collection_id,omitempty"` + // A list of possible final URLs. + FinalUrls []*wrappers.StringValue `protobuf:"bytes,2,rep,name=final_urls,json=finalUrls,proto3" json:"final_urls,omitempty"` + // A list of possible final mobile URLs. + FinalMobileUrls []*wrappers.StringValue `protobuf:"bytes,3,rep,name=final_mobile_urls,json=finalMobileUrls,proto3" json:"final_mobile_urls,omitempty"` + // URL template for constructing a tracking URL. + TrackingUrlTemplate *wrappers.StringValue `protobuf:"bytes,4,opt,name=tracking_url_template,json=trackingUrlTemplate,proto3" json:"tracking_url_template,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *UrlCollection) Reset() { *m = UrlCollection{} } +func (m *UrlCollection) String() string { return proto.CompactTextString(m) } +func (*UrlCollection) ProtoMessage() {} +func (*UrlCollection) Descriptor() ([]byte, []int) { + return fileDescriptor_url_collection_4410b50aa3630e5e, []int{0} +} +func (m *UrlCollection) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_UrlCollection.Unmarshal(m, b) +} +func (m *UrlCollection) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_UrlCollection.Marshal(b, m, deterministic) +} +func (dst *UrlCollection) XXX_Merge(src proto.Message) { + xxx_messageInfo_UrlCollection.Merge(dst, src) +} +func (m *UrlCollection) XXX_Size() int { + return xxx_messageInfo_UrlCollection.Size(m) +} +func (m *UrlCollection) XXX_DiscardUnknown() { + xxx_messageInfo_UrlCollection.DiscardUnknown(m) +} + +var xxx_messageInfo_UrlCollection proto.InternalMessageInfo + +func (m *UrlCollection) GetUrlCollectionId() *wrappers.StringValue { + if m != nil { + return m.UrlCollectionId + } + return nil +} + +func (m *UrlCollection) GetFinalUrls() []*wrappers.StringValue { + if m != nil { + return m.FinalUrls + } + return nil +} + +func (m *UrlCollection) GetFinalMobileUrls() []*wrappers.StringValue { + if m != nil { + return m.FinalMobileUrls + } + return nil +} + +func (m *UrlCollection) GetTrackingUrlTemplate() *wrappers.StringValue { + if m != nil { + return m.TrackingUrlTemplate + } + return nil +} + +func init() { + proto.RegisterType((*UrlCollection)(nil), "google.ads.googleads.v1.common.UrlCollection") +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/common/url_collection.proto", fileDescriptor_url_collection_4410b50aa3630e5e) +} + +var fileDescriptor_url_collection_4410b50aa3630e5e = []byte{ + // 371 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x8c, 0x92, 0xb1, 0x6a, 0xeb, 0x30, + 0x14, 0x86, 0xb1, 0x73, 0xb9, 0x70, 0x7d, 0xb9, 0x84, 0xeb, 0x52, 0x08, 0x21, 0x84, 0x90, 0xa9, + 0x93, 0x84, 0x9b, 0x4d, 0x99, 0x9c, 0x14, 0xd2, 0x0e, 0x85, 0xd0, 0x36, 0x1e, 0x8a, 0xc1, 0x28, + 0xb6, 0x22, 0x44, 0x65, 0xc9, 0x48, 0x76, 0xfa, 0x3e, 0x1d, 0x3b, 0xf6, 0x31, 0xfa, 0x28, 0x85, + 0xbe, 0x43, 0xb1, 0x64, 0x9b, 0x66, 0x68, 0x9b, 0xc9, 0x07, 0xeb, 0xff, 0xbe, 0x73, 0x6c, 0x1d, + 0x6f, 0x46, 0xa5, 0xa4, 0x9c, 0x40, 0x9c, 0x69, 0x68, 0xcb, 0xba, 0xda, 0x07, 0x30, 0x95, 0x79, + 0x2e, 0x05, 0xac, 0x14, 0x4f, 0x52, 0xc9, 0x39, 0x49, 0x4b, 0x26, 0x05, 0x28, 0x94, 0x2c, 0xa5, + 0x3f, 0xb6, 0x49, 0x80, 0x33, 0x0d, 0x3a, 0x08, 0xec, 0x03, 0x60, 0xa1, 0x61, 0x73, 0x0e, 0x4d, + 0x7a, 0x5b, 0xed, 0xe0, 0xa3, 0xc2, 0x45, 0x41, 0x94, 0xb6, 0xfc, 0x70, 0xd4, 0x36, 0x2d, 0x18, + 0xc4, 0x42, 0xc8, 0x12, 0xd7, 0xf2, 0xe6, 0x74, 0xfa, 0xe2, 0x7a, 0xff, 0x36, 0x8a, 0x2f, 0xbb, + 0xae, 0xfe, 0xa5, 0xf7, 0xff, 0x70, 0x8e, 0x84, 0x65, 0x03, 0x67, 0xe2, 0x9c, 0xfd, 0x3d, 0x1f, + 0x35, 0x03, 0x80, 0xb6, 0x17, 0xb8, 0x2d, 0x15, 0x13, 0x34, 0xc2, 0xbc, 0x22, 0x37, 0xfd, 0xea, + 0xb3, 0xe7, 0x2a, 0xf3, 0xe7, 0x9e, 0xb7, 0x63, 0x02, 0xf3, 0xa4, 0x52, 0x5c, 0x0f, 0xdc, 0x49, + 0xef, 0x47, 0xc5, 0x1f, 0x93, 0xdf, 0x28, 0xae, 0xeb, 0x31, 0x2c, 0x9c, 0xcb, 0x2d, 0xe3, 0xc4, + 0x3a, 0x7a, 0x47, 0x38, 0xfa, 0x06, 0xbb, 0x36, 0x94, 0x31, 0xad, 0xbd, 0xd3, 0x52, 0xe1, 0xf4, + 0x81, 0x09, 0x5a, 0x5b, 0x92, 0x92, 0xe4, 0x05, 0xc7, 0x25, 0x19, 0xfc, 0x3a, 0xe2, 0xa3, 0x4e, + 0x5a, 0x74, 0xa3, 0xf8, 0x5d, 0x03, 0x2e, 0xde, 0x1d, 0x6f, 0x9a, 0xca, 0x1c, 0x7c, 0x7f, 0x33, + 0x0b, 0xff, 0xe0, 0xc7, 0xae, 0x6b, 0xfd, 0xda, 0xb9, 0xbf, 0x68, 0x28, 0x2a, 0x39, 0x16, 0x14, + 0x48, 0x45, 0x21, 0x25, 0xc2, 0x34, 0x6f, 0x97, 0xa2, 0x60, 0xfa, 0xab, 0x1d, 0x99, 0xdb, 0xc7, + 0x93, 0xdb, 0x5b, 0x85, 0xe1, 0xb3, 0x3b, 0x5e, 0x59, 0x59, 0x98, 0x69, 0x60, 0xcb, 0xba, 0x8a, + 0x02, 0xb0, 0x34, 0xb1, 0xd7, 0x36, 0x10, 0x87, 0x99, 0x8e, 0xbb, 0x40, 0x1c, 0x05, 0xb1, 0x0d, + 0xbc, 0xb9, 0x53, 0xfb, 0x16, 0xa1, 0x30, 0xd3, 0x08, 0x75, 0x11, 0x84, 0xa2, 0x00, 0x21, 0x1b, + 0xda, 0xfe, 0x36, 0xd3, 0xcd, 0x3e, 0x02, 0x00, 0x00, 0xff, 0xff, 0x9f, 0xa1, 0x6a, 0x0e, 0xc0, + 0x02, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/common/user_lists.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/common/user_lists.pb.go new file mode 100644 index 0000000..342cdfb --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/common/user_lists.pb.go @@ -0,0 +1,1405 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/common/user_lists.proto + +package common // import "google.golang.org/genproto/googleapis/ads/googleads/v1/common" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import wrappers "github.com/golang/protobuf/ptypes/wrappers" +import enums "google.golang.org/genproto/googleapis/ads/googleads/v1/enums" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// SimilarUserList is a list of users which are similar to users from another +// UserList. These lists are read-only and automatically created by Google. +type SimilarUserListInfo struct { + // Seed UserList from which this list is derived. + SeedUserList *wrappers.StringValue `protobuf:"bytes,1,opt,name=seed_user_list,json=seedUserList,proto3" json:"seed_user_list,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *SimilarUserListInfo) Reset() { *m = SimilarUserListInfo{} } +func (m *SimilarUserListInfo) String() string { return proto.CompactTextString(m) } +func (*SimilarUserListInfo) ProtoMessage() {} +func (*SimilarUserListInfo) Descriptor() ([]byte, []int) { + return fileDescriptor_user_lists_e2e607ee8d2b2ff7, []int{0} +} +func (m *SimilarUserListInfo) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_SimilarUserListInfo.Unmarshal(m, b) +} +func (m *SimilarUserListInfo) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_SimilarUserListInfo.Marshal(b, m, deterministic) +} +func (dst *SimilarUserListInfo) XXX_Merge(src proto.Message) { + xxx_messageInfo_SimilarUserListInfo.Merge(dst, src) +} +func (m *SimilarUserListInfo) XXX_Size() int { + return xxx_messageInfo_SimilarUserListInfo.Size(m) +} +func (m *SimilarUserListInfo) XXX_DiscardUnknown() { + xxx_messageInfo_SimilarUserListInfo.DiscardUnknown(m) +} + +var xxx_messageInfo_SimilarUserListInfo proto.InternalMessageInfo + +func (m *SimilarUserListInfo) GetSeedUserList() *wrappers.StringValue { + if m != nil { + return m.SeedUserList + } + return nil +} + +// UserList of CRM users provided by the advertiser. +type CrmBasedUserListInfo struct { + // A string that uniquely identifies a mobile application from which the data + // was collected to the Google Ads API. + // For iOS, the ID string is the 9 digit string that appears at the end of an + // App Store URL (e.g., "476943146" for "Flood-It! 2" whose App Store link is + // http://itunes.apple.com/us/app/flood-it!-2/id476943146). + // For Android, the ID string is the application's package name + // (e.g., "com.labpixies.colordrips" for "Color Drips" given Google Play link + // https://play.google.com/store/apps/details?id=com.labpixies.colordrips). + // Required when creating CrmBasedUserList for uploading mobile advertising + // IDs. + AppId *wrappers.StringValue `protobuf:"bytes,1,opt,name=app_id,json=appId,proto3" json:"app_id,omitempty"` + // Matching key type of the list. + // Mixed data types are not allowed on the same list. + // This field is required for an ADD operation. + UploadKeyType enums.CustomerMatchUploadKeyTypeEnum_CustomerMatchUploadKeyType `protobuf:"varint,2,opt,name=upload_key_type,json=uploadKeyType,proto3,enum=google.ads.googleads.v1.enums.CustomerMatchUploadKeyTypeEnum_CustomerMatchUploadKeyType" json:"upload_key_type,omitempty"` + // Data source of the list. Default value is FIRST_PARTY. + // Only whitelisted customers can create third-party sourced CRM lists. + DataSourceType enums.UserListCrmDataSourceTypeEnum_UserListCrmDataSourceType `protobuf:"varint,3,opt,name=data_source_type,json=dataSourceType,proto3,enum=google.ads.googleads.v1.enums.UserListCrmDataSourceTypeEnum_UserListCrmDataSourceType" json:"data_source_type,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *CrmBasedUserListInfo) Reset() { *m = CrmBasedUserListInfo{} } +func (m *CrmBasedUserListInfo) String() string { return proto.CompactTextString(m) } +func (*CrmBasedUserListInfo) ProtoMessage() {} +func (*CrmBasedUserListInfo) Descriptor() ([]byte, []int) { + return fileDescriptor_user_lists_e2e607ee8d2b2ff7, []int{1} +} +func (m *CrmBasedUserListInfo) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_CrmBasedUserListInfo.Unmarshal(m, b) +} +func (m *CrmBasedUserListInfo) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_CrmBasedUserListInfo.Marshal(b, m, deterministic) +} +func (dst *CrmBasedUserListInfo) XXX_Merge(src proto.Message) { + xxx_messageInfo_CrmBasedUserListInfo.Merge(dst, src) +} +func (m *CrmBasedUserListInfo) XXX_Size() int { + return xxx_messageInfo_CrmBasedUserListInfo.Size(m) +} +func (m *CrmBasedUserListInfo) XXX_DiscardUnknown() { + xxx_messageInfo_CrmBasedUserListInfo.DiscardUnknown(m) +} + +var xxx_messageInfo_CrmBasedUserListInfo proto.InternalMessageInfo + +func (m *CrmBasedUserListInfo) GetAppId() *wrappers.StringValue { + if m != nil { + return m.AppId + } + return nil +} + +func (m *CrmBasedUserListInfo) GetUploadKeyType() enums.CustomerMatchUploadKeyTypeEnum_CustomerMatchUploadKeyType { + if m != nil { + return m.UploadKeyType + } + return enums.CustomerMatchUploadKeyTypeEnum_UNSPECIFIED +} + +func (m *CrmBasedUserListInfo) GetDataSourceType() enums.UserListCrmDataSourceTypeEnum_UserListCrmDataSourceType { + if m != nil { + return m.DataSourceType + } + return enums.UserListCrmDataSourceTypeEnum_UNSPECIFIED +} + +// A client defined rule based on custom parameters sent by web sites or +// uploaded by the advertiser. +type UserListRuleInfo struct { + // Rule type is used to determine how to group rule items. + // + // The default is OR of ANDs (disjunctive normal form). + // That is, rule items will be ANDed together within rule item groups and the + // groups themselves will be ORed together. + // + // Currently AND of ORs (conjunctive normal form) is only supported for + // ExpressionRuleUserList. + RuleType enums.UserListRuleTypeEnum_UserListRuleType `protobuf:"varint,1,opt,name=rule_type,json=ruleType,proto3,enum=google.ads.googleads.v1.enums.UserListRuleTypeEnum_UserListRuleType" json:"rule_type,omitempty"` + // List of rule item groups that defines this rule. + // Rule item groups are grouped together based on rule_type. + RuleItemGroups []*UserListRuleItemGroupInfo `protobuf:"bytes,2,rep,name=rule_item_groups,json=ruleItemGroups,proto3" json:"rule_item_groups,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *UserListRuleInfo) Reset() { *m = UserListRuleInfo{} } +func (m *UserListRuleInfo) String() string { return proto.CompactTextString(m) } +func (*UserListRuleInfo) ProtoMessage() {} +func (*UserListRuleInfo) Descriptor() ([]byte, []int) { + return fileDescriptor_user_lists_e2e607ee8d2b2ff7, []int{2} +} +func (m *UserListRuleInfo) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_UserListRuleInfo.Unmarshal(m, b) +} +func (m *UserListRuleInfo) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_UserListRuleInfo.Marshal(b, m, deterministic) +} +func (dst *UserListRuleInfo) XXX_Merge(src proto.Message) { + xxx_messageInfo_UserListRuleInfo.Merge(dst, src) +} +func (m *UserListRuleInfo) XXX_Size() int { + return xxx_messageInfo_UserListRuleInfo.Size(m) +} +func (m *UserListRuleInfo) XXX_DiscardUnknown() { + xxx_messageInfo_UserListRuleInfo.DiscardUnknown(m) +} + +var xxx_messageInfo_UserListRuleInfo proto.InternalMessageInfo + +func (m *UserListRuleInfo) GetRuleType() enums.UserListRuleTypeEnum_UserListRuleType { + if m != nil { + return m.RuleType + } + return enums.UserListRuleTypeEnum_UNSPECIFIED +} + +func (m *UserListRuleInfo) GetRuleItemGroups() []*UserListRuleItemGroupInfo { + if m != nil { + return m.RuleItemGroups + } + return nil +} + +// A group of rule items. +type UserListRuleItemGroupInfo struct { + // Rule items that will be grouped together based on rule_type. + RuleItems []*UserListRuleItemInfo `protobuf:"bytes,1,rep,name=rule_items,json=ruleItems,proto3" json:"rule_items,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *UserListRuleItemGroupInfo) Reset() { *m = UserListRuleItemGroupInfo{} } +func (m *UserListRuleItemGroupInfo) String() string { return proto.CompactTextString(m) } +func (*UserListRuleItemGroupInfo) ProtoMessage() {} +func (*UserListRuleItemGroupInfo) Descriptor() ([]byte, []int) { + return fileDescriptor_user_lists_e2e607ee8d2b2ff7, []int{3} +} +func (m *UserListRuleItemGroupInfo) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_UserListRuleItemGroupInfo.Unmarshal(m, b) +} +func (m *UserListRuleItemGroupInfo) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_UserListRuleItemGroupInfo.Marshal(b, m, deterministic) +} +func (dst *UserListRuleItemGroupInfo) XXX_Merge(src proto.Message) { + xxx_messageInfo_UserListRuleItemGroupInfo.Merge(dst, src) +} +func (m *UserListRuleItemGroupInfo) XXX_Size() int { + return xxx_messageInfo_UserListRuleItemGroupInfo.Size(m) +} +func (m *UserListRuleItemGroupInfo) XXX_DiscardUnknown() { + xxx_messageInfo_UserListRuleItemGroupInfo.DiscardUnknown(m) +} + +var xxx_messageInfo_UserListRuleItemGroupInfo proto.InternalMessageInfo + +func (m *UserListRuleItemGroupInfo) GetRuleItems() []*UserListRuleItemInfo { + if m != nil { + return m.RuleItems + } + return nil +} + +// An atomic rule fragment. +type UserListRuleItemInfo struct { + // Rule variable name. It should match the corresponding key name fired + // by the pixel. + // A name must begin with US-ascii letters or underscore or UTF8 code that is + // greater than 127 and consist of US-ascii letters or digits or underscore or + // UTF8 code that is greater than 127. + // For websites, there are two built-in variable URL (name = 'url__') and + // referrer URL (name = 'ref_url__'). + // This field must be populated when creating a new rule item. + Name *wrappers.StringValue `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // An atomic rule fragment. + // + // Types that are valid to be assigned to RuleItem: + // *UserListRuleItemInfo_NumberRuleItem + // *UserListRuleItemInfo_StringRuleItem + // *UserListRuleItemInfo_DateRuleItem + RuleItem isUserListRuleItemInfo_RuleItem `protobuf_oneof:"rule_item"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *UserListRuleItemInfo) Reset() { *m = UserListRuleItemInfo{} } +func (m *UserListRuleItemInfo) String() string { return proto.CompactTextString(m) } +func (*UserListRuleItemInfo) ProtoMessage() {} +func (*UserListRuleItemInfo) Descriptor() ([]byte, []int) { + return fileDescriptor_user_lists_e2e607ee8d2b2ff7, []int{4} +} +func (m *UserListRuleItemInfo) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_UserListRuleItemInfo.Unmarshal(m, b) +} +func (m *UserListRuleItemInfo) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_UserListRuleItemInfo.Marshal(b, m, deterministic) +} +func (dst *UserListRuleItemInfo) XXX_Merge(src proto.Message) { + xxx_messageInfo_UserListRuleItemInfo.Merge(dst, src) +} +func (m *UserListRuleItemInfo) XXX_Size() int { + return xxx_messageInfo_UserListRuleItemInfo.Size(m) +} +func (m *UserListRuleItemInfo) XXX_DiscardUnknown() { + xxx_messageInfo_UserListRuleItemInfo.DiscardUnknown(m) +} + +var xxx_messageInfo_UserListRuleItemInfo proto.InternalMessageInfo + +func (m *UserListRuleItemInfo) GetName() *wrappers.StringValue { + if m != nil { + return m.Name + } + return nil +} + +type isUserListRuleItemInfo_RuleItem interface { + isUserListRuleItemInfo_RuleItem() +} + +type UserListRuleItemInfo_NumberRuleItem struct { + NumberRuleItem *UserListNumberRuleItemInfo `protobuf:"bytes,2,opt,name=number_rule_item,json=numberRuleItem,proto3,oneof"` +} + +type UserListRuleItemInfo_StringRuleItem struct { + StringRuleItem *UserListStringRuleItemInfo `protobuf:"bytes,3,opt,name=string_rule_item,json=stringRuleItem,proto3,oneof"` +} + +type UserListRuleItemInfo_DateRuleItem struct { + DateRuleItem *UserListDateRuleItemInfo `protobuf:"bytes,4,opt,name=date_rule_item,json=dateRuleItem,proto3,oneof"` +} + +func (*UserListRuleItemInfo_NumberRuleItem) isUserListRuleItemInfo_RuleItem() {} + +func (*UserListRuleItemInfo_StringRuleItem) isUserListRuleItemInfo_RuleItem() {} + +func (*UserListRuleItemInfo_DateRuleItem) isUserListRuleItemInfo_RuleItem() {} + +func (m *UserListRuleItemInfo) GetRuleItem() isUserListRuleItemInfo_RuleItem { + if m != nil { + return m.RuleItem + } + return nil +} + +func (m *UserListRuleItemInfo) GetNumberRuleItem() *UserListNumberRuleItemInfo { + if x, ok := m.GetRuleItem().(*UserListRuleItemInfo_NumberRuleItem); ok { + return x.NumberRuleItem + } + return nil +} + +func (m *UserListRuleItemInfo) GetStringRuleItem() *UserListStringRuleItemInfo { + if x, ok := m.GetRuleItem().(*UserListRuleItemInfo_StringRuleItem); ok { + return x.StringRuleItem + } + return nil +} + +func (m *UserListRuleItemInfo) GetDateRuleItem() *UserListDateRuleItemInfo { + if x, ok := m.GetRuleItem().(*UserListRuleItemInfo_DateRuleItem); ok { + return x.DateRuleItem + } + return nil +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*UserListRuleItemInfo) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _UserListRuleItemInfo_OneofMarshaler, _UserListRuleItemInfo_OneofUnmarshaler, _UserListRuleItemInfo_OneofSizer, []interface{}{ + (*UserListRuleItemInfo_NumberRuleItem)(nil), + (*UserListRuleItemInfo_StringRuleItem)(nil), + (*UserListRuleItemInfo_DateRuleItem)(nil), + } +} + +func _UserListRuleItemInfo_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*UserListRuleItemInfo) + // rule_item + switch x := m.RuleItem.(type) { + case *UserListRuleItemInfo_NumberRuleItem: + b.EncodeVarint(2<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.NumberRuleItem); err != nil { + return err + } + case *UserListRuleItemInfo_StringRuleItem: + b.EncodeVarint(3<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.StringRuleItem); err != nil { + return err + } + case *UserListRuleItemInfo_DateRuleItem: + b.EncodeVarint(4<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.DateRuleItem); err != nil { + return err + } + case nil: + default: + return fmt.Errorf("UserListRuleItemInfo.RuleItem has unexpected type %T", x) + } + return nil +} + +func _UserListRuleItemInfo_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*UserListRuleItemInfo) + switch tag { + case 2: // rule_item.number_rule_item + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(UserListNumberRuleItemInfo) + err := b.DecodeMessage(msg) + m.RuleItem = &UserListRuleItemInfo_NumberRuleItem{msg} + return true, err + case 3: // rule_item.string_rule_item + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(UserListStringRuleItemInfo) + err := b.DecodeMessage(msg) + m.RuleItem = &UserListRuleItemInfo_StringRuleItem{msg} + return true, err + case 4: // rule_item.date_rule_item + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(UserListDateRuleItemInfo) + err := b.DecodeMessage(msg) + m.RuleItem = &UserListRuleItemInfo_DateRuleItem{msg} + return true, err + default: + return false, nil + } +} + +func _UserListRuleItemInfo_OneofSizer(msg proto.Message) (n int) { + m := msg.(*UserListRuleItemInfo) + // rule_item + switch x := m.RuleItem.(type) { + case *UserListRuleItemInfo_NumberRuleItem: + s := proto.Size(x.NumberRuleItem) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *UserListRuleItemInfo_StringRuleItem: + s := proto.Size(x.StringRuleItem) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *UserListRuleItemInfo_DateRuleItem: + s := proto.Size(x.DateRuleItem) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +// A rule item composed of date operation. +type UserListDateRuleItemInfo struct { + // Date comparison operator. + // This field is required and must be populated when creating new date + // rule item. + Operator enums.UserListDateRuleItemOperatorEnum_UserListDateRuleItemOperator `protobuf:"varint,1,opt,name=operator,proto3,enum=google.ads.googleads.v1.enums.UserListDateRuleItemOperatorEnum_UserListDateRuleItemOperator" json:"operator,omitempty"` + // String representing date value to be compared with the rule variable. + // Supported date format is YYYY-MM-DD. + // Times are reported in the customer's time zone. + Value *wrappers.StringValue `protobuf:"bytes,2,opt,name=value,proto3" json:"value,omitempty"` + // The relative date value of the right hand side denoted by number of days + // offset from now. The value field will override this field when both are + // present. + OffsetInDays *wrappers.Int64Value `protobuf:"bytes,3,opt,name=offset_in_days,json=offsetInDays,proto3" json:"offset_in_days,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *UserListDateRuleItemInfo) Reset() { *m = UserListDateRuleItemInfo{} } +func (m *UserListDateRuleItemInfo) String() string { return proto.CompactTextString(m) } +func (*UserListDateRuleItemInfo) ProtoMessage() {} +func (*UserListDateRuleItemInfo) Descriptor() ([]byte, []int) { + return fileDescriptor_user_lists_e2e607ee8d2b2ff7, []int{5} +} +func (m *UserListDateRuleItemInfo) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_UserListDateRuleItemInfo.Unmarshal(m, b) +} +func (m *UserListDateRuleItemInfo) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_UserListDateRuleItemInfo.Marshal(b, m, deterministic) +} +func (dst *UserListDateRuleItemInfo) XXX_Merge(src proto.Message) { + xxx_messageInfo_UserListDateRuleItemInfo.Merge(dst, src) +} +func (m *UserListDateRuleItemInfo) XXX_Size() int { + return xxx_messageInfo_UserListDateRuleItemInfo.Size(m) +} +func (m *UserListDateRuleItemInfo) XXX_DiscardUnknown() { + xxx_messageInfo_UserListDateRuleItemInfo.DiscardUnknown(m) +} + +var xxx_messageInfo_UserListDateRuleItemInfo proto.InternalMessageInfo + +func (m *UserListDateRuleItemInfo) GetOperator() enums.UserListDateRuleItemOperatorEnum_UserListDateRuleItemOperator { + if m != nil { + return m.Operator + } + return enums.UserListDateRuleItemOperatorEnum_UNSPECIFIED +} + +func (m *UserListDateRuleItemInfo) GetValue() *wrappers.StringValue { + if m != nil { + return m.Value + } + return nil +} + +func (m *UserListDateRuleItemInfo) GetOffsetInDays() *wrappers.Int64Value { + if m != nil { + return m.OffsetInDays + } + return nil +} + +// A rule item composed of number operation. +type UserListNumberRuleItemInfo struct { + // Number comparison operator. + // This field is required and must be populated when creating a new number + // rule item. + Operator enums.UserListNumberRuleItemOperatorEnum_UserListNumberRuleItemOperator `protobuf:"varint,1,opt,name=operator,proto3,enum=google.ads.googleads.v1.enums.UserListNumberRuleItemOperatorEnum_UserListNumberRuleItemOperator" json:"operator,omitempty"` + // Number value to be compared with the variable. + // This field is required and must be populated when creating a new number + // rule item. + Value *wrappers.DoubleValue `protobuf:"bytes,2,opt,name=value,proto3" json:"value,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *UserListNumberRuleItemInfo) Reset() { *m = UserListNumberRuleItemInfo{} } +func (m *UserListNumberRuleItemInfo) String() string { return proto.CompactTextString(m) } +func (*UserListNumberRuleItemInfo) ProtoMessage() {} +func (*UserListNumberRuleItemInfo) Descriptor() ([]byte, []int) { + return fileDescriptor_user_lists_e2e607ee8d2b2ff7, []int{6} +} +func (m *UserListNumberRuleItemInfo) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_UserListNumberRuleItemInfo.Unmarshal(m, b) +} +func (m *UserListNumberRuleItemInfo) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_UserListNumberRuleItemInfo.Marshal(b, m, deterministic) +} +func (dst *UserListNumberRuleItemInfo) XXX_Merge(src proto.Message) { + xxx_messageInfo_UserListNumberRuleItemInfo.Merge(dst, src) +} +func (m *UserListNumberRuleItemInfo) XXX_Size() int { + return xxx_messageInfo_UserListNumberRuleItemInfo.Size(m) +} +func (m *UserListNumberRuleItemInfo) XXX_DiscardUnknown() { + xxx_messageInfo_UserListNumberRuleItemInfo.DiscardUnknown(m) +} + +var xxx_messageInfo_UserListNumberRuleItemInfo proto.InternalMessageInfo + +func (m *UserListNumberRuleItemInfo) GetOperator() enums.UserListNumberRuleItemOperatorEnum_UserListNumberRuleItemOperator { + if m != nil { + return m.Operator + } + return enums.UserListNumberRuleItemOperatorEnum_UNSPECIFIED +} + +func (m *UserListNumberRuleItemInfo) GetValue() *wrappers.DoubleValue { + if m != nil { + return m.Value + } + return nil +} + +// A rule item fragment composed of date operation. +type UserListStringRuleItemInfo struct { + // String comparison operator. + // This field is required and must be populated when creating a new string + // rule item. + Operator enums.UserListStringRuleItemOperatorEnum_UserListStringRuleItemOperator `protobuf:"varint,1,opt,name=operator,proto3,enum=google.ads.googleads.v1.enums.UserListStringRuleItemOperatorEnum_UserListStringRuleItemOperator" json:"operator,omitempty"` + // The right hand side of the string rule item. For URLs or referrer URLs, + // the value can not contain illegal URL chars such as newlines, quotes, + // tabs, or parentheses. This field is required and must be populated when + // creating a new string rule item. + Value *wrappers.StringValue `protobuf:"bytes,2,opt,name=value,proto3" json:"value,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *UserListStringRuleItemInfo) Reset() { *m = UserListStringRuleItemInfo{} } +func (m *UserListStringRuleItemInfo) String() string { return proto.CompactTextString(m) } +func (*UserListStringRuleItemInfo) ProtoMessage() {} +func (*UserListStringRuleItemInfo) Descriptor() ([]byte, []int) { + return fileDescriptor_user_lists_e2e607ee8d2b2ff7, []int{7} +} +func (m *UserListStringRuleItemInfo) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_UserListStringRuleItemInfo.Unmarshal(m, b) +} +func (m *UserListStringRuleItemInfo) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_UserListStringRuleItemInfo.Marshal(b, m, deterministic) +} +func (dst *UserListStringRuleItemInfo) XXX_Merge(src proto.Message) { + xxx_messageInfo_UserListStringRuleItemInfo.Merge(dst, src) +} +func (m *UserListStringRuleItemInfo) XXX_Size() int { + return xxx_messageInfo_UserListStringRuleItemInfo.Size(m) +} +func (m *UserListStringRuleItemInfo) XXX_DiscardUnknown() { + xxx_messageInfo_UserListStringRuleItemInfo.DiscardUnknown(m) +} + +var xxx_messageInfo_UserListStringRuleItemInfo proto.InternalMessageInfo + +func (m *UserListStringRuleItemInfo) GetOperator() enums.UserListStringRuleItemOperatorEnum_UserListStringRuleItemOperator { + if m != nil { + return m.Operator + } + return enums.UserListStringRuleItemOperatorEnum_UNSPECIFIED +} + +func (m *UserListStringRuleItemInfo) GetValue() *wrappers.StringValue { + if m != nil { + return m.Value + } + return nil +} + +// User lists defined by combining two rules, left operand and right operand. +// There are two operators: AND where left operand and right operand have to be +// true; AND_NOT where left operand is true but right operand is false. +type CombinedRuleUserListInfo struct { + // Left operand of the combined rule. + // This field is required and must be populated when creating new combined + // rule based user list. + LeftOperand *UserListRuleInfo `protobuf:"bytes,1,opt,name=left_operand,json=leftOperand,proto3" json:"left_operand,omitempty"` + // Right operand of the combined rule. + // This field is required and must be populated when creating new combined + // rule based user list. + RightOperand *UserListRuleInfo `protobuf:"bytes,2,opt,name=right_operand,json=rightOperand,proto3" json:"right_operand,omitempty"` + // Operator to connect the two operands. + // + // Required for creating a combined rule user list. + RuleOperator enums.UserListCombinedRuleOperatorEnum_UserListCombinedRuleOperator `protobuf:"varint,3,opt,name=rule_operator,json=ruleOperator,proto3,enum=google.ads.googleads.v1.enums.UserListCombinedRuleOperatorEnum_UserListCombinedRuleOperator" json:"rule_operator,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *CombinedRuleUserListInfo) Reset() { *m = CombinedRuleUserListInfo{} } +func (m *CombinedRuleUserListInfo) String() string { return proto.CompactTextString(m) } +func (*CombinedRuleUserListInfo) ProtoMessage() {} +func (*CombinedRuleUserListInfo) Descriptor() ([]byte, []int) { + return fileDescriptor_user_lists_e2e607ee8d2b2ff7, []int{8} +} +func (m *CombinedRuleUserListInfo) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_CombinedRuleUserListInfo.Unmarshal(m, b) +} +func (m *CombinedRuleUserListInfo) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_CombinedRuleUserListInfo.Marshal(b, m, deterministic) +} +func (dst *CombinedRuleUserListInfo) XXX_Merge(src proto.Message) { + xxx_messageInfo_CombinedRuleUserListInfo.Merge(dst, src) +} +func (m *CombinedRuleUserListInfo) XXX_Size() int { + return xxx_messageInfo_CombinedRuleUserListInfo.Size(m) +} +func (m *CombinedRuleUserListInfo) XXX_DiscardUnknown() { + xxx_messageInfo_CombinedRuleUserListInfo.DiscardUnknown(m) +} + +var xxx_messageInfo_CombinedRuleUserListInfo proto.InternalMessageInfo + +func (m *CombinedRuleUserListInfo) GetLeftOperand() *UserListRuleInfo { + if m != nil { + return m.LeftOperand + } + return nil +} + +func (m *CombinedRuleUserListInfo) GetRightOperand() *UserListRuleInfo { + if m != nil { + return m.RightOperand + } + return nil +} + +func (m *CombinedRuleUserListInfo) GetRuleOperator() enums.UserListCombinedRuleOperatorEnum_UserListCombinedRuleOperator { + if m != nil { + return m.RuleOperator + } + return enums.UserListCombinedRuleOperatorEnum_UNSPECIFIED +} + +// Visitors of a page during specific dates. +type DateSpecificRuleUserListInfo struct { + // Boolean rule that defines visitor of a page. + // + // Required for creating a date specific rule user list. + Rule *UserListRuleInfo `protobuf:"bytes,1,opt,name=rule,proto3" json:"rule,omitempty"` + // Start date of users visit. If set to 2000-01-01, then the list includes all + // users before end_date. The date's format should be YYYY-MM-DD. + // + // Required for creating a data specific rule user list. + StartDate *wrappers.StringValue `protobuf:"bytes,2,opt,name=start_date,json=startDate,proto3" json:"start_date,omitempty"` + // End date of users visit. If set to 2037-12-30, then the list includes all + // users after start_date. The date's format should be YYYY-MM-DD. + // + // Required for creating a data specific rule user list. + EndDate *wrappers.StringValue `protobuf:"bytes,3,opt,name=end_date,json=endDate,proto3" json:"end_date,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *DateSpecificRuleUserListInfo) Reset() { *m = DateSpecificRuleUserListInfo{} } +func (m *DateSpecificRuleUserListInfo) String() string { return proto.CompactTextString(m) } +func (*DateSpecificRuleUserListInfo) ProtoMessage() {} +func (*DateSpecificRuleUserListInfo) Descriptor() ([]byte, []int) { + return fileDescriptor_user_lists_e2e607ee8d2b2ff7, []int{9} +} +func (m *DateSpecificRuleUserListInfo) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_DateSpecificRuleUserListInfo.Unmarshal(m, b) +} +func (m *DateSpecificRuleUserListInfo) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_DateSpecificRuleUserListInfo.Marshal(b, m, deterministic) +} +func (dst *DateSpecificRuleUserListInfo) XXX_Merge(src proto.Message) { + xxx_messageInfo_DateSpecificRuleUserListInfo.Merge(dst, src) +} +func (m *DateSpecificRuleUserListInfo) XXX_Size() int { + return xxx_messageInfo_DateSpecificRuleUserListInfo.Size(m) +} +func (m *DateSpecificRuleUserListInfo) XXX_DiscardUnknown() { + xxx_messageInfo_DateSpecificRuleUserListInfo.DiscardUnknown(m) +} + +var xxx_messageInfo_DateSpecificRuleUserListInfo proto.InternalMessageInfo + +func (m *DateSpecificRuleUserListInfo) GetRule() *UserListRuleInfo { + if m != nil { + return m.Rule + } + return nil +} + +func (m *DateSpecificRuleUserListInfo) GetStartDate() *wrappers.StringValue { + if m != nil { + return m.StartDate + } + return nil +} + +func (m *DateSpecificRuleUserListInfo) GetEndDate() *wrappers.StringValue { + if m != nil { + return m.EndDate + } + return nil +} + +// Visitors of a page. The page visit is defined by one boolean rule expression. +type ExpressionRuleUserListInfo struct { + // Boolean rule that defines this user list. The rule consists of a list of + // rule item groups and each rule item group consists of a list of rule items. + // All the rule item groups are ORed or ANDed together for evaluation based on + // rule.rule_type. + // + // Required for creating an expression rule user list. + Rule *UserListRuleInfo `protobuf:"bytes,1,opt,name=rule,proto3" json:"rule,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ExpressionRuleUserListInfo) Reset() { *m = ExpressionRuleUserListInfo{} } +func (m *ExpressionRuleUserListInfo) String() string { return proto.CompactTextString(m) } +func (*ExpressionRuleUserListInfo) ProtoMessage() {} +func (*ExpressionRuleUserListInfo) Descriptor() ([]byte, []int) { + return fileDescriptor_user_lists_e2e607ee8d2b2ff7, []int{10} +} +func (m *ExpressionRuleUserListInfo) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ExpressionRuleUserListInfo.Unmarshal(m, b) +} +func (m *ExpressionRuleUserListInfo) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ExpressionRuleUserListInfo.Marshal(b, m, deterministic) +} +func (dst *ExpressionRuleUserListInfo) XXX_Merge(src proto.Message) { + xxx_messageInfo_ExpressionRuleUserListInfo.Merge(dst, src) +} +func (m *ExpressionRuleUserListInfo) XXX_Size() int { + return xxx_messageInfo_ExpressionRuleUserListInfo.Size(m) +} +func (m *ExpressionRuleUserListInfo) XXX_DiscardUnknown() { + xxx_messageInfo_ExpressionRuleUserListInfo.DiscardUnknown(m) +} + +var xxx_messageInfo_ExpressionRuleUserListInfo proto.InternalMessageInfo + +func (m *ExpressionRuleUserListInfo) GetRule() *UserListRuleInfo { + if m != nil { + return m.Rule + } + return nil +} + +// Representation of a userlist that is generated by a rule. +type RuleBasedUserListInfo struct { + // The status of pre-population. The field is default to NONE if not set which + // means the previous users will not be considered. If set to REQUESTED, past + // site visitors or app users who match the list definition will be included + // in the list (works on the Display Network only). This will only + // add past users from within the last 30 days, depending on the + // list's membership duration and the date when the remarketing tag is added. + // The status will be updated to FINISHED once request is processed, or FAILED + // if the request fails. + PrepopulationStatus enums.UserListPrepopulationStatusEnum_UserListPrepopulationStatus `protobuf:"varint,1,opt,name=prepopulation_status,json=prepopulationStatus,proto3,enum=google.ads.googleads.v1.enums.UserListPrepopulationStatusEnum_UserListPrepopulationStatus" json:"prepopulation_status,omitempty"` + // Subtypes of rule based user lists. + // + // Types that are valid to be assigned to RuleBasedUserList: + // *RuleBasedUserListInfo_CombinedRuleUserList + // *RuleBasedUserListInfo_DateSpecificRuleUserList + // *RuleBasedUserListInfo_ExpressionRuleUserList + RuleBasedUserList isRuleBasedUserListInfo_RuleBasedUserList `protobuf_oneof:"rule_based_user_list"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *RuleBasedUserListInfo) Reset() { *m = RuleBasedUserListInfo{} } +func (m *RuleBasedUserListInfo) String() string { return proto.CompactTextString(m) } +func (*RuleBasedUserListInfo) ProtoMessage() {} +func (*RuleBasedUserListInfo) Descriptor() ([]byte, []int) { + return fileDescriptor_user_lists_e2e607ee8d2b2ff7, []int{11} +} +func (m *RuleBasedUserListInfo) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_RuleBasedUserListInfo.Unmarshal(m, b) +} +func (m *RuleBasedUserListInfo) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_RuleBasedUserListInfo.Marshal(b, m, deterministic) +} +func (dst *RuleBasedUserListInfo) XXX_Merge(src proto.Message) { + xxx_messageInfo_RuleBasedUserListInfo.Merge(dst, src) +} +func (m *RuleBasedUserListInfo) XXX_Size() int { + return xxx_messageInfo_RuleBasedUserListInfo.Size(m) +} +func (m *RuleBasedUserListInfo) XXX_DiscardUnknown() { + xxx_messageInfo_RuleBasedUserListInfo.DiscardUnknown(m) +} + +var xxx_messageInfo_RuleBasedUserListInfo proto.InternalMessageInfo + +func (m *RuleBasedUserListInfo) GetPrepopulationStatus() enums.UserListPrepopulationStatusEnum_UserListPrepopulationStatus { + if m != nil { + return m.PrepopulationStatus + } + return enums.UserListPrepopulationStatusEnum_UNSPECIFIED +} + +type isRuleBasedUserListInfo_RuleBasedUserList interface { + isRuleBasedUserListInfo_RuleBasedUserList() +} + +type RuleBasedUserListInfo_CombinedRuleUserList struct { + CombinedRuleUserList *CombinedRuleUserListInfo `protobuf:"bytes,2,opt,name=combined_rule_user_list,json=combinedRuleUserList,proto3,oneof"` +} + +type RuleBasedUserListInfo_DateSpecificRuleUserList struct { + DateSpecificRuleUserList *DateSpecificRuleUserListInfo `protobuf:"bytes,3,opt,name=date_specific_rule_user_list,json=dateSpecificRuleUserList,proto3,oneof"` +} + +type RuleBasedUserListInfo_ExpressionRuleUserList struct { + ExpressionRuleUserList *ExpressionRuleUserListInfo `protobuf:"bytes,4,opt,name=expression_rule_user_list,json=expressionRuleUserList,proto3,oneof"` +} + +func (*RuleBasedUserListInfo_CombinedRuleUserList) isRuleBasedUserListInfo_RuleBasedUserList() {} + +func (*RuleBasedUserListInfo_DateSpecificRuleUserList) isRuleBasedUserListInfo_RuleBasedUserList() {} + +func (*RuleBasedUserListInfo_ExpressionRuleUserList) isRuleBasedUserListInfo_RuleBasedUserList() {} + +func (m *RuleBasedUserListInfo) GetRuleBasedUserList() isRuleBasedUserListInfo_RuleBasedUserList { + if m != nil { + return m.RuleBasedUserList + } + return nil +} + +func (m *RuleBasedUserListInfo) GetCombinedRuleUserList() *CombinedRuleUserListInfo { + if x, ok := m.GetRuleBasedUserList().(*RuleBasedUserListInfo_CombinedRuleUserList); ok { + return x.CombinedRuleUserList + } + return nil +} + +func (m *RuleBasedUserListInfo) GetDateSpecificRuleUserList() *DateSpecificRuleUserListInfo { + if x, ok := m.GetRuleBasedUserList().(*RuleBasedUserListInfo_DateSpecificRuleUserList); ok { + return x.DateSpecificRuleUserList + } + return nil +} + +func (m *RuleBasedUserListInfo) GetExpressionRuleUserList() *ExpressionRuleUserListInfo { + if x, ok := m.GetRuleBasedUserList().(*RuleBasedUserListInfo_ExpressionRuleUserList); ok { + return x.ExpressionRuleUserList + } + return nil +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*RuleBasedUserListInfo) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _RuleBasedUserListInfo_OneofMarshaler, _RuleBasedUserListInfo_OneofUnmarshaler, _RuleBasedUserListInfo_OneofSizer, []interface{}{ + (*RuleBasedUserListInfo_CombinedRuleUserList)(nil), + (*RuleBasedUserListInfo_DateSpecificRuleUserList)(nil), + (*RuleBasedUserListInfo_ExpressionRuleUserList)(nil), + } +} + +func _RuleBasedUserListInfo_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*RuleBasedUserListInfo) + // rule_based_user_list + switch x := m.RuleBasedUserList.(type) { + case *RuleBasedUserListInfo_CombinedRuleUserList: + b.EncodeVarint(2<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.CombinedRuleUserList); err != nil { + return err + } + case *RuleBasedUserListInfo_DateSpecificRuleUserList: + b.EncodeVarint(3<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.DateSpecificRuleUserList); err != nil { + return err + } + case *RuleBasedUserListInfo_ExpressionRuleUserList: + b.EncodeVarint(4<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.ExpressionRuleUserList); err != nil { + return err + } + case nil: + default: + return fmt.Errorf("RuleBasedUserListInfo.RuleBasedUserList has unexpected type %T", x) + } + return nil +} + +func _RuleBasedUserListInfo_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*RuleBasedUserListInfo) + switch tag { + case 2: // rule_based_user_list.combined_rule_user_list + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(CombinedRuleUserListInfo) + err := b.DecodeMessage(msg) + m.RuleBasedUserList = &RuleBasedUserListInfo_CombinedRuleUserList{msg} + return true, err + case 3: // rule_based_user_list.date_specific_rule_user_list + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(DateSpecificRuleUserListInfo) + err := b.DecodeMessage(msg) + m.RuleBasedUserList = &RuleBasedUserListInfo_DateSpecificRuleUserList{msg} + return true, err + case 4: // rule_based_user_list.expression_rule_user_list + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(ExpressionRuleUserListInfo) + err := b.DecodeMessage(msg) + m.RuleBasedUserList = &RuleBasedUserListInfo_ExpressionRuleUserList{msg} + return true, err + default: + return false, nil + } +} + +func _RuleBasedUserListInfo_OneofSizer(msg proto.Message) (n int) { + m := msg.(*RuleBasedUserListInfo) + // rule_based_user_list + switch x := m.RuleBasedUserList.(type) { + case *RuleBasedUserListInfo_CombinedRuleUserList: + s := proto.Size(x.CombinedRuleUserList) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *RuleBasedUserListInfo_DateSpecificRuleUserList: + s := proto.Size(x.DateSpecificRuleUserList) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *RuleBasedUserListInfo_ExpressionRuleUserList: + s := proto.Size(x.ExpressionRuleUserList) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +// Represents a user list that is a custom combination of user lists. +type LogicalUserListInfo struct { + // Logical list rules that define this user list. The rules are defined as a + // logical operator (ALL/ANY/NONE) and a list of user lists. All the rules are + // ANDed when they are evaluated. + // + // Required for creating a logical user list. + Rules []*UserListLogicalRuleInfo `protobuf:"bytes,1,rep,name=rules,proto3" json:"rules,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *LogicalUserListInfo) Reset() { *m = LogicalUserListInfo{} } +func (m *LogicalUserListInfo) String() string { return proto.CompactTextString(m) } +func (*LogicalUserListInfo) ProtoMessage() {} +func (*LogicalUserListInfo) Descriptor() ([]byte, []int) { + return fileDescriptor_user_lists_e2e607ee8d2b2ff7, []int{12} +} +func (m *LogicalUserListInfo) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_LogicalUserListInfo.Unmarshal(m, b) +} +func (m *LogicalUserListInfo) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_LogicalUserListInfo.Marshal(b, m, deterministic) +} +func (dst *LogicalUserListInfo) XXX_Merge(src proto.Message) { + xxx_messageInfo_LogicalUserListInfo.Merge(dst, src) +} +func (m *LogicalUserListInfo) XXX_Size() int { + return xxx_messageInfo_LogicalUserListInfo.Size(m) +} +func (m *LogicalUserListInfo) XXX_DiscardUnknown() { + xxx_messageInfo_LogicalUserListInfo.DiscardUnknown(m) +} + +var xxx_messageInfo_LogicalUserListInfo proto.InternalMessageInfo + +func (m *LogicalUserListInfo) GetRules() []*UserListLogicalRuleInfo { + if m != nil { + return m.Rules + } + return nil +} + +// A user list logical rule. A rule has a logical operator (and/or/not) and a +// list of user lists as operands. +type UserListLogicalRuleInfo struct { + // The logical operator of the rule. + Operator enums.UserListLogicalRuleOperatorEnum_UserListLogicalRuleOperator `protobuf:"varint,1,opt,name=operator,proto3,enum=google.ads.googleads.v1.enums.UserListLogicalRuleOperatorEnum_UserListLogicalRuleOperator" json:"operator,omitempty"` + // The list of operands of the rule. + RuleOperands []*LogicalUserListOperandInfo `protobuf:"bytes,2,rep,name=rule_operands,json=ruleOperands,proto3" json:"rule_operands,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *UserListLogicalRuleInfo) Reset() { *m = UserListLogicalRuleInfo{} } +func (m *UserListLogicalRuleInfo) String() string { return proto.CompactTextString(m) } +func (*UserListLogicalRuleInfo) ProtoMessage() {} +func (*UserListLogicalRuleInfo) Descriptor() ([]byte, []int) { + return fileDescriptor_user_lists_e2e607ee8d2b2ff7, []int{13} +} +func (m *UserListLogicalRuleInfo) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_UserListLogicalRuleInfo.Unmarshal(m, b) +} +func (m *UserListLogicalRuleInfo) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_UserListLogicalRuleInfo.Marshal(b, m, deterministic) +} +func (dst *UserListLogicalRuleInfo) XXX_Merge(src proto.Message) { + xxx_messageInfo_UserListLogicalRuleInfo.Merge(dst, src) +} +func (m *UserListLogicalRuleInfo) XXX_Size() int { + return xxx_messageInfo_UserListLogicalRuleInfo.Size(m) +} +func (m *UserListLogicalRuleInfo) XXX_DiscardUnknown() { + xxx_messageInfo_UserListLogicalRuleInfo.DiscardUnknown(m) +} + +var xxx_messageInfo_UserListLogicalRuleInfo proto.InternalMessageInfo + +func (m *UserListLogicalRuleInfo) GetOperator() enums.UserListLogicalRuleOperatorEnum_UserListLogicalRuleOperator { + if m != nil { + return m.Operator + } + return enums.UserListLogicalRuleOperatorEnum_UNSPECIFIED +} + +func (m *UserListLogicalRuleInfo) GetRuleOperands() []*LogicalUserListOperandInfo { + if m != nil { + return m.RuleOperands + } + return nil +} + +// Operand of logical user list that consists of a user list. +type LogicalUserListOperandInfo struct { + // Resource name of a user list as an operand. + UserList *wrappers.StringValue `protobuf:"bytes,1,opt,name=user_list,json=userList,proto3" json:"user_list,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *LogicalUserListOperandInfo) Reset() { *m = LogicalUserListOperandInfo{} } +func (m *LogicalUserListOperandInfo) String() string { return proto.CompactTextString(m) } +func (*LogicalUserListOperandInfo) ProtoMessage() {} +func (*LogicalUserListOperandInfo) Descriptor() ([]byte, []int) { + return fileDescriptor_user_lists_e2e607ee8d2b2ff7, []int{14} +} +func (m *LogicalUserListOperandInfo) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_LogicalUserListOperandInfo.Unmarshal(m, b) +} +func (m *LogicalUserListOperandInfo) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_LogicalUserListOperandInfo.Marshal(b, m, deterministic) +} +func (dst *LogicalUserListOperandInfo) XXX_Merge(src proto.Message) { + xxx_messageInfo_LogicalUserListOperandInfo.Merge(dst, src) +} +func (m *LogicalUserListOperandInfo) XXX_Size() int { + return xxx_messageInfo_LogicalUserListOperandInfo.Size(m) +} +func (m *LogicalUserListOperandInfo) XXX_DiscardUnknown() { + xxx_messageInfo_LogicalUserListOperandInfo.DiscardUnknown(m) +} + +var xxx_messageInfo_LogicalUserListOperandInfo proto.InternalMessageInfo + +func (m *LogicalUserListOperandInfo) GetUserList() *wrappers.StringValue { + if m != nil { + return m.UserList + } + return nil +} + +// User list targeting as a collection of conversions or remarketing actions. +type BasicUserListInfo struct { + // Actions associated with this user list. + Actions []*UserListActionInfo `protobuf:"bytes,1,rep,name=actions,proto3" json:"actions,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *BasicUserListInfo) Reset() { *m = BasicUserListInfo{} } +func (m *BasicUserListInfo) String() string { return proto.CompactTextString(m) } +func (*BasicUserListInfo) ProtoMessage() {} +func (*BasicUserListInfo) Descriptor() ([]byte, []int) { + return fileDescriptor_user_lists_e2e607ee8d2b2ff7, []int{15} +} +func (m *BasicUserListInfo) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_BasicUserListInfo.Unmarshal(m, b) +} +func (m *BasicUserListInfo) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_BasicUserListInfo.Marshal(b, m, deterministic) +} +func (dst *BasicUserListInfo) XXX_Merge(src proto.Message) { + xxx_messageInfo_BasicUserListInfo.Merge(dst, src) +} +func (m *BasicUserListInfo) XXX_Size() int { + return xxx_messageInfo_BasicUserListInfo.Size(m) +} +func (m *BasicUserListInfo) XXX_DiscardUnknown() { + xxx_messageInfo_BasicUserListInfo.DiscardUnknown(m) +} + +var xxx_messageInfo_BasicUserListInfo proto.InternalMessageInfo + +func (m *BasicUserListInfo) GetActions() []*UserListActionInfo { + if m != nil { + return m.Actions + } + return nil +} + +// Represents an action type used for building remarketing user lists. +type UserListActionInfo struct { + // Subtypes of user list action. + // + // Types that are valid to be assigned to UserListAction: + // *UserListActionInfo_ConversionAction + // *UserListActionInfo_RemarketingAction + UserListAction isUserListActionInfo_UserListAction `protobuf_oneof:"user_list_action"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *UserListActionInfo) Reset() { *m = UserListActionInfo{} } +func (m *UserListActionInfo) String() string { return proto.CompactTextString(m) } +func (*UserListActionInfo) ProtoMessage() {} +func (*UserListActionInfo) Descriptor() ([]byte, []int) { + return fileDescriptor_user_lists_e2e607ee8d2b2ff7, []int{16} +} +func (m *UserListActionInfo) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_UserListActionInfo.Unmarshal(m, b) +} +func (m *UserListActionInfo) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_UserListActionInfo.Marshal(b, m, deterministic) +} +func (dst *UserListActionInfo) XXX_Merge(src proto.Message) { + xxx_messageInfo_UserListActionInfo.Merge(dst, src) +} +func (m *UserListActionInfo) XXX_Size() int { + return xxx_messageInfo_UserListActionInfo.Size(m) +} +func (m *UserListActionInfo) XXX_DiscardUnknown() { + xxx_messageInfo_UserListActionInfo.DiscardUnknown(m) +} + +var xxx_messageInfo_UserListActionInfo proto.InternalMessageInfo + +type isUserListActionInfo_UserListAction interface { + isUserListActionInfo_UserListAction() +} + +type UserListActionInfo_ConversionAction struct { + ConversionAction *wrappers.StringValue `protobuf:"bytes,1,opt,name=conversion_action,json=conversionAction,proto3,oneof"` +} + +type UserListActionInfo_RemarketingAction struct { + RemarketingAction *wrappers.StringValue `protobuf:"bytes,2,opt,name=remarketing_action,json=remarketingAction,proto3,oneof"` +} + +func (*UserListActionInfo_ConversionAction) isUserListActionInfo_UserListAction() {} + +func (*UserListActionInfo_RemarketingAction) isUserListActionInfo_UserListAction() {} + +func (m *UserListActionInfo) GetUserListAction() isUserListActionInfo_UserListAction { + if m != nil { + return m.UserListAction + } + return nil +} + +func (m *UserListActionInfo) GetConversionAction() *wrappers.StringValue { + if x, ok := m.GetUserListAction().(*UserListActionInfo_ConversionAction); ok { + return x.ConversionAction + } + return nil +} + +func (m *UserListActionInfo) GetRemarketingAction() *wrappers.StringValue { + if x, ok := m.GetUserListAction().(*UserListActionInfo_RemarketingAction); ok { + return x.RemarketingAction + } + return nil +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*UserListActionInfo) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _UserListActionInfo_OneofMarshaler, _UserListActionInfo_OneofUnmarshaler, _UserListActionInfo_OneofSizer, []interface{}{ + (*UserListActionInfo_ConversionAction)(nil), + (*UserListActionInfo_RemarketingAction)(nil), + } +} + +func _UserListActionInfo_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*UserListActionInfo) + // user_list_action + switch x := m.UserListAction.(type) { + case *UserListActionInfo_ConversionAction: + b.EncodeVarint(1<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.ConversionAction); err != nil { + return err + } + case *UserListActionInfo_RemarketingAction: + b.EncodeVarint(2<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.RemarketingAction); err != nil { + return err + } + case nil: + default: + return fmt.Errorf("UserListActionInfo.UserListAction has unexpected type %T", x) + } + return nil +} + +func _UserListActionInfo_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*UserListActionInfo) + switch tag { + case 1: // user_list_action.conversion_action + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(wrappers.StringValue) + err := b.DecodeMessage(msg) + m.UserListAction = &UserListActionInfo_ConversionAction{msg} + return true, err + case 2: // user_list_action.remarketing_action + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(wrappers.StringValue) + err := b.DecodeMessage(msg) + m.UserListAction = &UserListActionInfo_RemarketingAction{msg} + return true, err + default: + return false, nil + } +} + +func _UserListActionInfo_OneofSizer(msg proto.Message) (n int) { + m := msg.(*UserListActionInfo) + // user_list_action + switch x := m.UserListAction.(type) { + case *UserListActionInfo_ConversionAction: + s := proto.Size(x.ConversionAction) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *UserListActionInfo_RemarketingAction: + s := proto.Size(x.RemarketingAction) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +func init() { + proto.RegisterType((*SimilarUserListInfo)(nil), "google.ads.googleads.v1.common.SimilarUserListInfo") + proto.RegisterType((*CrmBasedUserListInfo)(nil), "google.ads.googleads.v1.common.CrmBasedUserListInfo") + proto.RegisterType((*UserListRuleInfo)(nil), "google.ads.googleads.v1.common.UserListRuleInfo") + proto.RegisterType((*UserListRuleItemGroupInfo)(nil), "google.ads.googleads.v1.common.UserListRuleItemGroupInfo") + proto.RegisterType((*UserListRuleItemInfo)(nil), "google.ads.googleads.v1.common.UserListRuleItemInfo") + proto.RegisterType((*UserListDateRuleItemInfo)(nil), "google.ads.googleads.v1.common.UserListDateRuleItemInfo") + proto.RegisterType((*UserListNumberRuleItemInfo)(nil), "google.ads.googleads.v1.common.UserListNumberRuleItemInfo") + proto.RegisterType((*UserListStringRuleItemInfo)(nil), "google.ads.googleads.v1.common.UserListStringRuleItemInfo") + proto.RegisterType((*CombinedRuleUserListInfo)(nil), "google.ads.googleads.v1.common.CombinedRuleUserListInfo") + proto.RegisterType((*DateSpecificRuleUserListInfo)(nil), "google.ads.googleads.v1.common.DateSpecificRuleUserListInfo") + proto.RegisterType((*ExpressionRuleUserListInfo)(nil), "google.ads.googleads.v1.common.ExpressionRuleUserListInfo") + proto.RegisterType((*RuleBasedUserListInfo)(nil), "google.ads.googleads.v1.common.RuleBasedUserListInfo") + proto.RegisterType((*LogicalUserListInfo)(nil), "google.ads.googleads.v1.common.LogicalUserListInfo") + proto.RegisterType((*UserListLogicalRuleInfo)(nil), "google.ads.googleads.v1.common.UserListLogicalRuleInfo") + proto.RegisterType((*LogicalUserListOperandInfo)(nil), "google.ads.googleads.v1.common.LogicalUserListOperandInfo") + proto.RegisterType((*BasicUserListInfo)(nil), "google.ads.googleads.v1.common.BasicUserListInfo") + proto.RegisterType((*UserListActionInfo)(nil), "google.ads.googleads.v1.common.UserListActionInfo") +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/common/user_lists.proto", fileDescriptor_user_lists_e2e607ee8d2b2ff7) +} + +var fileDescriptor_user_lists_e2e607ee8d2b2ff7 = []byte{ + // 1306 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xb4, 0x58, 0x41, 0x6f, 0xdb, 0xb6, + 0x17, 0x8f, 0x9c, 0xb4, 0x4d, 0x98, 0xd4, 0xff, 0x94, 0xcd, 0xbf, 0x75, 0xbd, 0xa0, 0x28, 0x74, + 0xea, 0x49, 0x6e, 0xdc, 0x62, 0x5d, 0xdd, 0x5d, 0xec, 0xb8, 0x48, 0x82, 0xb6, 0x6b, 0x21, 0x2f, + 0xd9, 0x56, 0x04, 0x50, 0x69, 0x89, 0x76, 0x85, 0x4a, 0xa4, 0x46, 0x4a, 0x69, 0x0d, 0x0c, 0xc3, + 0x76, 0xda, 0x61, 0xd8, 0x97, 0xd8, 0x71, 0xfb, 0x1a, 0x3b, 0xf5, 0x30, 0x60, 0xd7, 0x7d, 0x83, + 0xed, 0xb8, 0x4f, 0x30, 0x90, 0xa2, 0x6c, 0xc9, 0x96, 0x12, 0xa6, 0xc5, 0x4e, 0xa6, 0xc8, 0xf7, + 0x7e, 0xef, 0xc7, 0xc7, 0x1f, 0x1f, 0x49, 0x83, 0xd6, 0x98, 0xd2, 0x71, 0x80, 0x5b, 0xc8, 0xe3, + 0xaa, 0x29, 0x5a, 0x27, 0x3b, 0x2d, 0x97, 0x86, 0x21, 0x25, 0xad, 0x84, 0x63, 0xe6, 0x04, 0x3e, + 0x8f, 0xb9, 0x15, 0x31, 0x1a, 0x53, 0x78, 0x33, 0xb5, 0xb2, 0x90, 0xc7, 0xad, 0xa9, 0x83, 0x75, + 0xb2, 0x63, 0xa5, 0x0e, 0xcd, 0x5e, 0x15, 0x20, 0x26, 0x49, 0xc8, 0x5b, 0x6e, 0xc2, 0x63, 0x1a, + 0x62, 0xe6, 0x84, 0x28, 0x76, 0x5f, 0x39, 0x49, 0x14, 0x50, 0xe4, 0x39, 0xaf, 0xf1, 0xc4, 0x89, + 0x27, 0x11, 0x4e, 0x63, 0x34, 0xfb, 0xa7, 0x63, 0x4c, 0x39, 0x39, 0x2e, 0x0d, 0x87, 0x3e, 0xc1, + 0x9e, 0xc3, 0x92, 0x00, 0x3b, 0x34, 0xc2, 0x0c, 0xc5, 0x94, 0x29, 0x94, 0x9e, 0x36, 0x0a, 0x0b, + 0x1d, 0x0f, 0xc5, 0xc8, 0xe1, 0x34, 0x61, 0x2e, 0xce, 0x33, 0x79, 0xa4, 0x8b, 0xe1, 0xa1, 0x18, + 0xa7, 0x2c, 0xfc, 0x18, 0x87, 0xf3, 0x54, 0x76, 0x75, 0x61, 0x02, 0x3a, 0xf6, 0x5d, 0x14, 0x94, + 0xce, 0x67, 0x4f, 0x17, 0x84, 0x24, 0xe1, 0x10, 0xb3, 0x6a, 0x36, 0xda, 0x89, 0x89, 0x18, 0x8e, + 0x68, 0x94, 0x04, 0x28, 0xf6, 0x29, 0x71, 0x78, 0x8c, 0xe2, 0x44, 0xc9, 0xa0, 0x79, 0x5f, 0x17, + 0x43, 0xb2, 0xc8, 0x65, 0x54, 0x7b, 0x16, 0x3c, 0x66, 0x3e, 0x19, 0x57, 0xcf, 0x42, 0x09, 0xb1, + 0x25, 0xbf, 0x86, 0xc9, 0xa8, 0xf5, 0x86, 0xa1, 0x28, 0xc2, 0x2c, 0x63, 0xb8, 0x9d, 0x05, 0x8a, + 0xfc, 0x16, 0x22, 0x84, 0xc6, 0x72, 0x16, 0x6a, 0xd4, 0xfc, 0x0a, 0x5c, 0x1d, 0xf8, 0xa1, 0x1f, + 0x20, 0x76, 0xc8, 0x31, 0x7b, 0xe2, 0xf3, 0xf8, 0x80, 0x8c, 0x28, 0xec, 0x81, 0x3a, 0xc7, 0xd8, + 0x73, 0xa6, 0x34, 0x1a, 0xc6, 0x2d, 0xe3, 0xf6, 0x7a, 0x7b, 0x5b, 0x69, 0xdd, 0xca, 0xa2, 0x59, + 0x03, 0x49, 0xef, 0x08, 0x05, 0x09, 0xb6, 0x37, 0x84, 0x4f, 0x86, 0x63, 0xfe, 0x51, 0x03, 0x5b, + 0xbb, 0x2c, 0xec, 0x21, 0x3e, 0xeb, 0x94, 0xe0, 0x77, 0xc1, 0x45, 0x14, 0x45, 0x8e, 0xef, 0x69, + 0x81, 0x5e, 0x40, 0x51, 0x74, 0xe0, 0xc1, 0xef, 0x0c, 0xf0, 0xbf, 0xb9, 0x5d, 0xd2, 0xa8, 0xdd, + 0x32, 0x6e, 0xd7, 0xdb, 0x5f, 0x5a, 0x55, 0x5b, 0x51, 0xa6, 0xd2, 0xda, 0x55, 0x5b, 0xed, 0xa9, + 0xd8, 0x69, 0x87, 0x12, 0xe2, 0x31, 0x9e, 0x7c, 0x3e, 0x89, 0xf0, 0x23, 0x92, 0x84, 0xa7, 0x0c, + 0xdb, 0x97, 0x93, 0xfc, 0xa7, 0xa0, 0xb0, 0x39, 0xbf, 0x3f, 0x1a, 0xcb, 0x92, 0xc3, 0xd1, 0x19, + 0x1c, 0xb2, 0xf9, 0xef, 0xb2, 0xb0, 0x8f, 0x62, 0x34, 0x90, 0x00, 0x53, 0x0a, 0x95, 0xa3, 0x76, + 0xdd, 0x2b, 0x7c, 0x9b, 0x7f, 0x1a, 0x60, 0x33, 0xb3, 0xb6, 0x93, 0x00, 0xcb, 0x7c, 0x22, 0xb0, + 0x36, 0x55, 0x97, 0x4c, 0x69, 0xbd, 0xdd, 0xd7, 0xe4, 0x23, 0x30, 0x16, 0x68, 0x64, 0x9d, 0xf6, + 0x2a, 0x53, 0x2d, 0xe8, 0x82, 0xcd, 0x99, 0x00, 0xc7, 0x8c, 0x26, 0x11, 0x6f, 0xd4, 0x6e, 0x2d, + 0xdf, 0x5e, 0x6f, 0x3f, 0xb0, 0x4e, 0x2f, 0x84, 0x05, 0xd4, 0x83, 0x18, 0x87, 0x7b, 0xc2, 0x5b, + 0xf0, 0xb6, 0xeb, 0x2c, 0xdf, 0xc5, 0xcd, 0x08, 0xdc, 0xa8, 0x34, 0x86, 0x03, 0x00, 0xa6, 0x0c, + 0x78, 0xc3, 0x90, 0xb1, 0xef, 0x9d, 0x37, 0xb6, 0x0c, 0xbb, 0x96, 0x85, 0xe5, 0xe6, 0x0f, 0xcb, + 0x60, 0xab, 0xcc, 0x06, 0xde, 0x01, 0x2b, 0x04, 0x85, 0x58, 0x4b, 0xa0, 0xd2, 0x12, 0x8e, 0xc0, + 0xe6, 0x7c, 0xbd, 0x91, 0xfa, 0x5c, 0x6f, 0x77, 0x74, 0x59, 0x7e, 0x26, 0xfd, 0xf3, 0x3c, 0xf6, + 0x97, 0xec, 0x3a, 0x29, 0xf4, 0x8a, 0x38, 0xf3, 0x15, 0x41, 0x6a, 0xf0, 0x1c, 0x71, 0x52, 0xf2, + 0xf3, 0x71, 0x78, 0xa1, 0x17, 0xbe, 0x04, 0xf5, 0x62, 0x2d, 0x6f, 0xac, 0xc8, 0x28, 0x9f, 0xe8, + 0x46, 0xe9, 0xa3, 0x18, 0xcf, 0xc5, 0xd8, 0xf0, 0x72, 0x7d, 0xbd, 0x75, 0x25, 0x5b, 0x01, 0x6e, + 0xfe, 0x58, 0x03, 0x8d, 0x2a, 0x4f, 0xf8, 0x16, 0xac, 0x66, 0x45, 0x4f, 0xe9, 0xfb, 0x58, 0x53, + 0xdf, 0x79, 0xa8, 0x67, 0x0a, 0xa2, 0xa0, 0xf5, 0x32, 0x03, 0x7b, 0x1a, 0x0d, 0xb6, 0xc1, 0x85, + 0x13, 0xb1, 0xc8, 0x6a, 0x29, 0xcf, 0xa8, 0x54, 0xd2, 0x14, 0x76, 0x41, 0x9d, 0x8e, 0x46, 0x1c, + 0xc7, 0x8e, 0x4f, 0x1c, 0x0f, 0x4d, 0xb8, 0x5a, 0x9f, 0x8f, 0x16, 0x9c, 0x0f, 0x48, 0xfc, 0xf1, + 0x3d, 0x55, 0x3a, 0x53, 0x97, 0x03, 0xd2, 0x47, 0x13, 0x6e, 0xfe, 0x6e, 0x80, 0x66, 0xb5, 0x2a, + 0xe0, 0x37, 0x0b, 0xf9, 0x78, 0xa9, 0x99, 0x8f, 0x22, 0x58, 0x69, 0x46, 0xca, 0x4d, 0xce, 0x93, + 0x93, 0x3e, 0x4d, 0x86, 0x01, 0xce, 0xe7, 0xa4, 0x30, 0xa1, 0x45, 0xf9, 0x7d, 0xc0, 0x84, 0x8a, + 0x60, 0xa5, 0x13, 0x2a, 0x37, 0xf9, 0xb0, 0x45, 0x36, 0xdf, 0xd5, 0x40, 0x63, 0x57, 0xdd, 0xba, + 0x04, 0x74, 0xe1, 0x80, 0x1b, 0x80, 0x8d, 0x00, 0x8f, 0xe2, 0xf4, 0xa4, 0x26, 0xd9, 0x31, 0x77, + 0xe7, 0x5c, 0xd5, 0x4a, 0x54, 0xaa, 0x75, 0x81, 0xf2, 0x2c, 0x05, 0x81, 0x87, 0xe0, 0x32, 0xf3, + 0xc7, 0xaf, 0x66, 0xa8, 0xb5, 0xf7, 0x44, 0xdd, 0x90, 0x30, 0x19, 0xec, 0xf7, 0x06, 0xb8, 0x5c, + 0xb8, 0x65, 0xa9, 0x13, 0x4d, 0x77, 0x87, 0xe5, 0x93, 0x50, 0x9a, 0xfe, 0x32, 0x03, 0x7b, 0x83, + 0xe5, 0xbe, 0xc4, 0xa9, 0xb6, 0x2d, 0x36, 0xe2, 0x20, 0xc2, 0xae, 0x3f, 0xf2, 0xdd, 0x85, 0x84, + 0xf6, 0xc1, 0x8a, 0x70, 0x78, 0xef, 0x44, 0x4a, 0x6f, 0xf8, 0x10, 0x00, 0x1e, 0x23, 0x96, 0x5e, + 0x52, 0xb5, 0x16, 0x7b, 0x4d, 0xda, 0x0b, 0x6a, 0xf0, 0x3e, 0x58, 0xc5, 0xc4, 0x4b, 0x5d, 0x97, + 0x35, 0x5c, 0x2f, 0x61, 0xe2, 0x09, 0x47, 0x73, 0x08, 0x9a, 0x8f, 0xde, 0x46, 0x0c, 0x73, 0xee, + 0x53, 0xf2, 0xdf, 0xcc, 0xcc, 0xfc, 0x75, 0x05, 0xfc, 0x5f, 0x74, 0x2d, 0xde, 0xb5, 0x7e, 0x32, + 0xc0, 0x56, 0xd9, 0xf5, 0x55, 0x6d, 0xb3, 0x17, 0x9a, 0xab, 0xfc, 0x3c, 0x0f, 0x31, 0x90, 0x08, + 0x85, 0x45, 0x2e, 0x19, 0xb7, 0xaf, 0x46, 0x8b, 0x9d, 0xf0, 0x6b, 0x70, 0xbd, 0xf8, 0x58, 0x99, + 0xdd, 0x30, 0x6b, 0x7a, 0xe7, 0x4b, 0xd5, 0xae, 0xdb, 0x5f, 0xb2, 0xb7, 0xdc, 0x92, 0x31, 0xf8, + 0x2d, 0xd8, 0x96, 0x27, 0x19, 0x57, 0xea, 0x9a, 0x8f, 0x9b, 0xae, 0xe6, 0xa7, 0x67, 0xc5, 0x3d, + 0x4d, 0xa0, 0xfb, 0x4b, 0x76, 0xc3, 0xab, 0x18, 0x87, 0x6f, 0xc0, 0x0d, 0x3c, 0x15, 0xc0, 0x7c, + 0xf0, 0x15, 0xbd, 0xa3, 0xbb, 0x5a, 0x41, 0xfb, 0x4b, 0xf6, 0x35, 0x5c, 0x3a, 0xda, 0xbb, 0x06, + 0xb6, 0x64, 0xb4, 0xa1, 0x50, 0xc5, 0x2c, 0xa6, 0xe9, 0x81, 0xab, 0x4f, 0xd2, 0xf7, 0x55, 0x41, + 0x2a, 0x4f, 0xc1, 0x05, 0x61, 0x9e, 0x5d, 0xae, 0xee, 0xeb, 0x6a, 0x51, 0x61, 0x4d, 0x25, 0x99, + 0xa2, 0x98, 0xff, 0x18, 0xe0, 0x7a, 0x85, 0x09, 0x3c, 0x59, 0xa8, 0xf7, 0xba, 0x42, 0xcc, 0x21, + 0x95, 0x56, 0x9b, 0x92, 0xf1, 0x5c, 0xa5, 0x77, 0xf2, 0xb5, 0x8e, 0x78, 0xd9, 0x1d, 0xf6, 0xcc, + 0xf4, 0xcf, 0xa5, 0x4b, 0xd5, 0x4e, 0x55, 0x4d, 0xb3, 0x50, 0xc4, 0xe3, 0xe6, 0x17, 0xa0, 0x59, + 0x6d, 0x0b, 0x1f, 0x80, 0xb5, 0xf3, 0x3d, 0xa8, 0x56, 0x93, 0xec, 0x31, 0x85, 0xc0, 0x95, 0x1e, + 0xe2, 0xbe, 0x5b, 0x58, 0xb1, 0x27, 0xe0, 0x12, 0x72, 0xe5, 0x6b, 0x4e, 0xad, 0x59, 0x5b, 0x77, + 0xcd, 0xba, 0xd2, 0x4d, 0x4e, 0x20, 0x83, 0x30, 0x7f, 0x33, 0x00, 0x5c, 0x1c, 0x87, 0x8f, 0xc1, + 0x15, 0x97, 0x92, 0x13, 0xcc, 0xa4, 0x7c, 0x53, 0x63, 0x1d, 0xf2, 0xfb, 0x4b, 0xf6, 0xe6, 0xcc, + 0x31, 0x05, 0x84, 0x4f, 0x01, 0x64, 0x38, 0x44, 0xec, 0x35, 0x8e, 0xc5, 0x15, 0x56, 0xa1, 0xd5, + 0xb4, 0xd0, 0xae, 0xe4, 0x3c, 0x53, 0xb8, 0x1e, 0x04, 0x9b, 0xb3, 0x87, 0x72, 0x0a, 0xd6, 0xfb, + 0xcb, 0x00, 0xa6, 0x4b, 0xc3, 0x33, 0x32, 0xd1, 0xab, 0x67, 0x53, 0xe5, 0xcf, 0x45, 0xb8, 0xe7, + 0xc6, 0x0b, 0xf5, 0x6f, 0x8b, 0x35, 0xa6, 0x01, 0x22, 0x63, 0x8b, 0xb2, 0x71, 0x6b, 0x8c, 0x89, + 0x24, 0x93, 0xbd, 0xd0, 0x23, 0x9f, 0x57, 0xfd, 0x43, 0xf4, 0x30, 0xfd, 0xf9, 0xb9, 0xb6, 0xbc, + 0xd7, 0xed, 0xfe, 0x52, 0xbb, 0xb9, 0x97, 0x82, 0x75, 0x3d, 0x6e, 0xa5, 0x4d, 0xd1, 0x3a, 0xda, + 0x11, 0xe5, 0x2b, 0xa4, 0xe4, 0x5d, 0x66, 0x70, 0xdc, 0xf5, 0xf8, 0xf1, 0xd4, 0xe0, 0xf8, 0x68, + 0xe7, 0x38, 0x35, 0xf8, 0xbb, 0x66, 0xa6, 0xbd, 0x9d, 0x4e, 0xd7, 0xe3, 0x9d, 0xce, 0xd4, 0xa4, + 0xd3, 0x39, 0xda, 0xe9, 0x74, 0x52, 0xa3, 0xe1, 0x45, 0xc9, 0xee, 0xee, 0xbf, 0x01, 0x00, 0x00, + 0xff, 0xff, 0x6b, 0xb8, 0x50, 0x9f, 0xbe, 0x12, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/common/value.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/common/value.pb.go new file mode 100644 index 0000000..906e3c5 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/common/value.pb.go @@ -0,0 +1,280 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/common/value.proto + +package common // import "google.golang.org/genproto/googleapis/ads/googleads/v1/common" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// A generic data container. +type Value struct { + // A value. + // + // Types that are valid to be assigned to Value: + // *Value_BooleanValue + // *Value_Int64Value + // *Value_FloatValue + // *Value_DoubleValue + // *Value_StringValue + Value isValue_Value `protobuf_oneof:"value"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Value) Reset() { *m = Value{} } +func (m *Value) String() string { return proto.CompactTextString(m) } +func (*Value) ProtoMessage() {} +func (*Value) Descriptor() ([]byte, []int) { + return fileDescriptor_value_b597d7af186472f5, []int{0} +} +func (m *Value) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Value.Unmarshal(m, b) +} +func (m *Value) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Value.Marshal(b, m, deterministic) +} +func (dst *Value) XXX_Merge(src proto.Message) { + xxx_messageInfo_Value.Merge(dst, src) +} +func (m *Value) XXX_Size() int { + return xxx_messageInfo_Value.Size(m) +} +func (m *Value) XXX_DiscardUnknown() { + xxx_messageInfo_Value.DiscardUnknown(m) +} + +var xxx_messageInfo_Value proto.InternalMessageInfo + +type isValue_Value interface { + isValue_Value() +} + +type Value_BooleanValue struct { + BooleanValue bool `protobuf:"varint,1,opt,name=boolean_value,json=booleanValue,proto3,oneof"` +} + +type Value_Int64Value struct { + Int64Value int64 `protobuf:"varint,2,opt,name=int64_value,json=int64Value,proto3,oneof"` +} + +type Value_FloatValue struct { + FloatValue float32 `protobuf:"fixed32,3,opt,name=float_value,json=floatValue,proto3,oneof"` +} + +type Value_DoubleValue struct { + DoubleValue float64 `protobuf:"fixed64,4,opt,name=double_value,json=doubleValue,proto3,oneof"` +} + +type Value_StringValue struct { + StringValue string `protobuf:"bytes,5,opt,name=string_value,json=stringValue,proto3,oneof"` +} + +func (*Value_BooleanValue) isValue_Value() {} + +func (*Value_Int64Value) isValue_Value() {} + +func (*Value_FloatValue) isValue_Value() {} + +func (*Value_DoubleValue) isValue_Value() {} + +func (*Value_StringValue) isValue_Value() {} + +func (m *Value) GetValue() isValue_Value { + if m != nil { + return m.Value + } + return nil +} + +func (m *Value) GetBooleanValue() bool { + if x, ok := m.GetValue().(*Value_BooleanValue); ok { + return x.BooleanValue + } + return false +} + +func (m *Value) GetInt64Value() int64 { + if x, ok := m.GetValue().(*Value_Int64Value); ok { + return x.Int64Value + } + return 0 +} + +func (m *Value) GetFloatValue() float32 { + if x, ok := m.GetValue().(*Value_FloatValue); ok { + return x.FloatValue + } + return 0 +} + +func (m *Value) GetDoubleValue() float64 { + if x, ok := m.GetValue().(*Value_DoubleValue); ok { + return x.DoubleValue + } + return 0 +} + +func (m *Value) GetStringValue() string { + if x, ok := m.GetValue().(*Value_StringValue); ok { + return x.StringValue + } + return "" +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*Value) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _Value_OneofMarshaler, _Value_OneofUnmarshaler, _Value_OneofSizer, []interface{}{ + (*Value_BooleanValue)(nil), + (*Value_Int64Value)(nil), + (*Value_FloatValue)(nil), + (*Value_DoubleValue)(nil), + (*Value_StringValue)(nil), + } +} + +func _Value_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*Value) + // value + switch x := m.Value.(type) { + case *Value_BooleanValue: + t := uint64(0) + if x.BooleanValue { + t = 1 + } + b.EncodeVarint(1<<3 | proto.WireVarint) + b.EncodeVarint(t) + case *Value_Int64Value: + b.EncodeVarint(2<<3 | proto.WireVarint) + b.EncodeVarint(uint64(x.Int64Value)) + case *Value_FloatValue: + b.EncodeVarint(3<<3 | proto.WireFixed32) + b.EncodeFixed32(uint64(math.Float32bits(x.FloatValue))) + case *Value_DoubleValue: + b.EncodeVarint(4<<3 | proto.WireFixed64) + b.EncodeFixed64(math.Float64bits(x.DoubleValue)) + case *Value_StringValue: + b.EncodeVarint(5<<3 | proto.WireBytes) + b.EncodeStringBytes(x.StringValue) + case nil: + default: + return fmt.Errorf("Value.Value has unexpected type %T", x) + } + return nil +} + +func _Value_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*Value) + switch tag { + case 1: // value.boolean_value + if wire != proto.WireVarint { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeVarint() + m.Value = &Value_BooleanValue{x != 0} + return true, err + case 2: // value.int64_value + if wire != proto.WireVarint { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeVarint() + m.Value = &Value_Int64Value{int64(x)} + return true, err + case 3: // value.float_value + if wire != proto.WireFixed32 { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeFixed32() + m.Value = &Value_FloatValue{math.Float32frombits(uint32(x))} + return true, err + case 4: // value.double_value + if wire != proto.WireFixed64 { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeFixed64() + m.Value = &Value_DoubleValue{math.Float64frombits(x)} + return true, err + case 5: // value.string_value + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeStringBytes() + m.Value = &Value_StringValue{x} + return true, err + default: + return false, nil + } +} + +func _Value_OneofSizer(msg proto.Message) (n int) { + m := msg.(*Value) + // value + switch x := m.Value.(type) { + case *Value_BooleanValue: + n += 1 // tag and wire + n += 1 + case *Value_Int64Value: + n += 1 // tag and wire + n += proto.SizeVarint(uint64(x.Int64Value)) + case *Value_FloatValue: + n += 1 // tag and wire + n += 4 + case *Value_DoubleValue: + n += 1 // tag and wire + n += 8 + case *Value_StringValue: + n += 1 // tag and wire + n += proto.SizeVarint(uint64(len(x.StringValue))) + n += len(x.StringValue) + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +func init() { + proto.RegisterType((*Value)(nil), "google.ads.googleads.v1.common.Value") +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/common/value.proto", fileDescriptor_value_b597d7af186472f5) +} + +var fileDescriptor_value_b597d7af186472f5 = []byte{ + // 329 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x7c, 0xd1, 0x41, 0x4b, 0xf3, 0x30, + 0x18, 0x07, 0xf0, 0xa5, 0x7b, 0xf7, 0xaa, 0xd9, 0xbc, 0xec, 0x24, 0x22, 0xa3, 0x4e, 0x84, 0xe2, + 0x21, 0xa5, 0x28, 0x1e, 0xe2, 0xa9, 0x53, 0xd8, 0x8e, 0x63, 0x87, 0x1e, 0xa4, 0x20, 0xd9, 0x5a, + 0x43, 0x21, 0xcb, 0x33, 0x96, 0x6c, 0x1f, 0xc8, 0xa3, 0x5f, 0x44, 0xf0, 0x7b, 0x78, 0xf1, 0x53, + 0x48, 0xf2, 0x64, 0xbd, 0xe9, 0xa9, 0x0f, 0xff, 0xfe, 0xf2, 0xa7, 0x7d, 0x42, 0x6f, 0x24, 0x80, + 0x54, 0x75, 0x2a, 0x2a, 0x93, 0xe2, 0xe8, 0xa6, 0x7d, 0x96, 0xae, 0x60, 0xbd, 0x06, 0x9d, 0xee, + 0x85, 0xda, 0xd5, 0x6c, 0xb3, 0x05, 0x0b, 0xc3, 0x11, 0x02, 0x26, 0x2a, 0xc3, 0x5a, 0xcb, 0xf6, + 0x19, 0x43, 0x7b, 0x7e, 0x71, 0xe8, 0xda, 0x34, 0xa9, 0xd0, 0x1a, 0xac, 0xb0, 0x0d, 0x68, 0x83, + 0xa7, 0xc7, 0x1f, 0x84, 0xf6, 0x0a, 0xd7, 0x36, 0xbc, 0xa6, 0xa7, 0x4b, 0x00, 0x55, 0x0b, 0xfd, + 0xe2, 0xeb, 0xcf, 0x48, 0x4c, 0x92, 0xe3, 0x59, 0x67, 0x31, 0x08, 0x31, 0xb2, 0x4b, 0xda, 0x6f, + 0xb4, 0xbd, 0xbf, 0x0b, 0x28, 0x8a, 0x49, 0xd2, 0x9d, 0x75, 0x16, 0xd4, 0x87, 0x2d, 0x79, 0x55, + 0x20, 0x6c, 0x20, 0xdd, 0x98, 0x24, 0x91, 0x23, 0x3e, 0x44, 0x72, 0x45, 0x07, 0x15, 0xec, 0x96, + 0xaa, 0x0e, 0xe6, 0x5f, 0x4c, 0x12, 0x32, 0xeb, 0x2c, 0xfa, 0x98, 0xb6, 0xc8, 0xd8, 0x6d, 0xa3, + 0x65, 0x40, 0xbd, 0x98, 0x24, 0x27, 0x0e, 0x61, 0xea, 0xd1, 0xe4, 0x88, 0xf6, 0xfc, 0xdb, 0xc9, + 0x17, 0xa1, 0xe3, 0x15, 0xac, 0xd9, 0xdf, 0xeb, 0x98, 0x50, 0x7f, 0x6c, 0xee, 0x7e, 0x7e, 0x4e, + 0x9e, 0x9f, 0x82, 0x96, 0xa0, 0x84, 0x96, 0x0c, 0xb6, 0x32, 0x95, 0xb5, 0xf6, 0xab, 0x39, 0x2c, + 0x7e, 0xd3, 0x98, 0xdf, 0xee, 0xe1, 0x01, 0x1f, 0x6f, 0x51, 0x77, 0x9a, 0xe7, 0xef, 0xd1, 0x68, + 0x8a, 0x65, 0x79, 0x65, 0x18, 0x8e, 0x6e, 0x2a, 0x32, 0xf6, 0xe8, 0xd9, 0xe7, 0x01, 0x94, 0x79, + 0x65, 0xca, 0x16, 0x94, 0x45, 0x56, 0x22, 0xf8, 0x8e, 0xc6, 0x98, 0x72, 0x9e, 0x57, 0x86, 0xf3, + 0x96, 0x70, 0x5e, 0x64, 0x9c, 0x23, 0x5a, 0xfe, 0xf7, 0x5f, 0x77, 0xfb, 0x13, 0x00, 0x00, 0xff, + 0xff, 0xce, 0x35, 0xdb, 0xd7, 0x24, 0x02, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/access_reason.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/access_reason.pb.go new file mode 100644 index 0000000..f8ee3ed --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/access_reason.pb.go @@ -0,0 +1,131 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/enums/access_reason.proto + +package enums // import "google.golang.org/genproto/googleapis/ads/googleads/v1/enums" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Enum describing possible access reasons. +type AccessReasonEnum_AccessReason int32 + +const ( + // Not specified. + AccessReasonEnum_UNSPECIFIED AccessReasonEnum_AccessReason = 0 + // Used for return value only. Represents value unknown in this version. + AccessReasonEnum_UNKNOWN AccessReasonEnum_AccessReason = 1 + // The resource is owned by the user. + AccessReasonEnum_OWNED AccessReasonEnum_AccessReason = 2 + // The resource is shared to the user. + AccessReasonEnum_SHARED AccessReasonEnum_AccessReason = 3 + // The resource is licensed to the user. + AccessReasonEnum_LICENSED AccessReasonEnum_AccessReason = 4 + // The user subscribed to the resource. + AccessReasonEnum_SUBSCRIBED AccessReasonEnum_AccessReason = 5 + // The resource is accessible to the user. + AccessReasonEnum_AFFILIATED AccessReasonEnum_AccessReason = 6 +) + +var AccessReasonEnum_AccessReason_name = map[int32]string{ + 0: "UNSPECIFIED", + 1: "UNKNOWN", + 2: "OWNED", + 3: "SHARED", + 4: "LICENSED", + 5: "SUBSCRIBED", + 6: "AFFILIATED", +} +var AccessReasonEnum_AccessReason_value = map[string]int32{ + "UNSPECIFIED": 0, + "UNKNOWN": 1, + "OWNED": 2, + "SHARED": 3, + "LICENSED": 4, + "SUBSCRIBED": 5, + "AFFILIATED": 6, +} + +func (x AccessReasonEnum_AccessReason) String() string { + return proto.EnumName(AccessReasonEnum_AccessReason_name, int32(x)) +} +func (AccessReasonEnum_AccessReason) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_access_reason_f540ca2e6dd316e1, []int{0, 0} +} + +// Indicates the way the resource such as user list is related to a user. +type AccessReasonEnum struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *AccessReasonEnum) Reset() { *m = AccessReasonEnum{} } +func (m *AccessReasonEnum) String() string { return proto.CompactTextString(m) } +func (*AccessReasonEnum) ProtoMessage() {} +func (*AccessReasonEnum) Descriptor() ([]byte, []int) { + return fileDescriptor_access_reason_f540ca2e6dd316e1, []int{0} +} +func (m *AccessReasonEnum) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_AccessReasonEnum.Unmarshal(m, b) +} +func (m *AccessReasonEnum) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_AccessReasonEnum.Marshal(b, m, deterministic) +} +func (dst *AccessReasonEnum) XXX_Merge(src proto.Message) { + xxx_messageInfo_AccessReasonEnum.Merge(dst, src) +} +func (m *AccessReasonEnum) XXX_Size() int { + return xxx_messageInfo_AccessReasonEnum.Size(m) +} +func (m *AccessReasonEnum) XXX_DiscardUnknown() { + xxx_messageInfo_AccessReasonEnum.DiscardUnknown(m) +} + +var xxx_messageInfo_AccessReasonEnum proto.InternalMessageInfo + +func init() { + proto.RegisterType((*AccessReasonEnum)(nil), "google.ads.googleads.v1.enums.AccessReasonEnum") + proto.RegisterEnum("google.ads.googleads.v1.enums.AccessReasonEnum_AccessReason", AccessReasonEnum_AccessReason_name, AccessReasonEnum_AccessReason_value) +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/enums/access_reason.proto", fileDescriptor_access_reason_f540ca2e6dd316e1) +} + +var fileDescriptor_access_reason_f540ca2e6dd316e1 = []byte{ + // 336 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x7c, 0x50, 0xd1, 0x4e, 0xf2, 0x30, + 0x18, 0xfd, 0x37, 0x7e, 0x50, 0x0b, 0xd1, 0xda, 0x4b, 0x23, 0x17, 0xf0, 0x00, 0x5d, 0x16, 0xef, + 0xea, 0x55, 0xc7, 0x0a, 0x2e, 0x92, 0x41, 0x98, 0x40, 0x62, 0x96, 0x98, 0xca, 0x96, 0x85, 0x04, + 0x5a, 0xe4, 0x03, 0xde, 0xc0, 0x17, 0xf1, 0xd2, 0x47, 0xf1, 0x41, 0xbc, 0xf0, 0x29, 0xcc, 0x5a, + 0x21, 0xdc, 0xe8, 0x4d, 0x73, 0xfa, 0x7d, 0xe7, 0x9c, 0x7c, 0xe7, 0x20, 0xbf, 0xd0, 0xba, 0x58, + 0xe4, 0x9e, 0xcc, 0xc0, 0xb3, 0xb0, 0x44, 0x3b, 0xdf, 0xcb, 0xd5, 0x76, 0x09, 0x9e, 0x9c, 0xcd, + 0x72, 0x80, 0xa7, 0x75, 0x2e, 0x41, 0x2b, 0xba, 0x5a, 0xeb, 0x8d, 0x26, 0x4d, 0xcb, 0xa3, 0x32, + 0x03, 0x7a, 0x90, 0xd0, 0x9d, 0x4f, 0x8d, 0xe4, 0xea, 0x7a, 0xef, 0xb8, 0x9a, 0x7b, 0x52, 0x29, + 0xbd, 0x91, 0x9b, 0xb9, 0x56, 0x60, 0xc5, 0xed, 0x57, 0x07, 0x61, 0x6e, 0x4c, 0x47, 0xc6, 0x53, + 0xa8, 0xed, 0xb2, 0xfd, 0x82, 0x1a, 0xc7, 0x33, 0x72, 0x81, 0xea, 0xe3, 0x38, 0x19, 0x8a, 0x4e, + 0xd4, 0x8d, 0x44, 0x88, 0xff, 0x91, 0x3a, 0x3a, 0x19, 0xc7, 0xf7, 0xf1, 0x60, 0x1a, 0x63, 0x87, + 0x9c, 0xa1, 0xea, 0x60, 0x1a, 0x8b, 0x10, 0xbb, 0x04, 0xa1, 0x5a, 0x72, 0xc7, 0x47, 0x22, 0xc4, + 0x15, 0xd2, 0x40, 0xa7, 0xfd, 0xa8, 0x23, 0xe2, 0x44, 0x84, 0xf8, 0x3f, 0x39, 0x47, 0x28, 0x19, + 0x07, 0x49, 0x67, 0x14, 0x05, 0x22, 0xc4, 0xd5, 0xf2, 0xcf, 0xbb, 0xdd, 0xa8, 0x1f, 0xf1, 0x07, + 0x11, 0xe2, 0x5a, 0xf0, 0xe9, 0xa0, 0xd6, 0x4c, 0x2f, 0xe9, 0x9f, 0x59, 0x82, 0xcb, 0xe3, 0xb3, + 0x86, 0x65, 0x80, 0xa1, 0xf3, 0x18, 0xfc, 0x68, 0x0a, 0xbd, 0x90, 0xaa, 0xa0, 0x7a, 0x5d, 0x78, + 0x45, 0xae, 0x4c, 0xbc, 0x7d, 0x85, 0xab, 0x39, 0xfc, 0xd2, 0xe8, 0xad, 0x79, 0xdf, 0xdc, 0x4a, + 0x8f, 0xf3, 0x77, 0xb7, 0xd9, 0xb3, 0x56, 0x3c, 0x03, 0x6a, 0x61, 0x89, 0x26, 0x3e, 0x2d, 0x6b, + 0x81, 0x8f, 0xfd, 0x3e, 0xe5, 0x19, 0xa4, 0x87, 0x7d, 0x3a, 0xf1, 0x53, 0xb3, 0xff, 0x72, 0x5b, + 0x76, 0xc8, 0x18, 0xcf, 0x80, 0xb1, 0x03, 0x83, 0xb1, 0x89, 0xcf, 0x98, 0xe1, 0x3c, 0xd7, 0xcc, + 0x61, 0x37, 0xdf, 0x01, 0x00, 0x00, 0xff, 0xff, 0x89, 0x92, 0xba, 0x1a, 0xe9, 0x01, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/account_budget_proposal_status.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/account_budget_proposal_status.pb.go new file mode 100644 index 0000000..65f0e7b --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/account_budget_proposal_status.pb.go @@ -0,0 +1,137 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/enums/account_budget_proposal_status.proto + +package enums // import "google.golang.org/genproto/googleapis/ads/googleads/v1/enums" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// The possible statuses of an AccountBudgetProposal. +type AccountBudgetProposalStatusEnum_AccountBudgetProposalStatus int32 + +const ( + // Not specified. + AccountBudgetProposalStatusEnum_UNSPECIFIED AccountBudgetProposalStatusEnum_AccountBudgetProposalStatus = 0 + // Used for return value only. Represents value unknown in this version. + AccountBudgetProposalStatusEnum_UNKNOWN AccountBudgetProposalStatusEnum_AccountBudgetProposalStatus = 1 + // The proposal is pending approval. + AccountBudgetProposalStatusEnum_PENDING AccountBudgetProposalStatusEnum_AccountBudgetProposalStatus = 2 + // The proposal has been approved but the corresponding billing setup + // has not. This can occur for proposals that set up the first budget + // when signing up for billing or when performing a change of bill-to + // operation. + AccountBudgetProposalStatusEnum_APPROVED_HELD AccountBudgetProposalStatusEnum_AccountBudgetProposalStatus = 3 + // The proposal has been approved. + AccountBudgetProposalStatusEnum_APPROVED AccountBudgetProposalStatusEnum_AccountBudgetProposalStatus = 4 + // The proposal has been cancelled by the user. + AccountBudgetProposalStatusEnum_CANCELLED AccountBudgetProposalStatusEnum_AccountBudgetProposalStatus = 5 + // The proposal has been rejected by the user, e.g. by rejecting an + // acceptance email. + AccountBudgetProposalStatusEnum_REJECTED AccountBudgetProposalStatusEnum_AccountBudgetProposalStatus = 6 +) + +var AccountBudgetProposalStatusEnum_AccountBudgetProposalStatus_name = map[int32]string{ + 0: "UNSPECIFIED", + 1: "UNKNOWN", + 2: "PENDING", + 3: "APPROVED_HELD", + 4: "APPROVED", + 5: "CANCELLED", + 6: "REJECTED", +} +var AccountBudgetProposalStatusEnum_AccountBudgetProposalStatus_value = map[string]int32{ + "UNSPECIFIED": 0, + "UNKNOWN": 1, + "PENDING": 2, + "APPROVED_HELD": 3, + "APPROVED": 4, + "CANCELLED": 5, + "REJECTED": 6, +} + +func (x AccountBudgetProposalStatusEnum_AccountBudgetProposalStatus) String() string { + return proto.EnumName(AccountBudgetProposalStatusEnum_AccountBudgetProposalStatus_name, int32(x)) +} +func (AccountBudgetProposalStatusEnum_AccountBudgetProposalStatus) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_account_budget_proposal_status_26386523d21926fe, []int{0, 0} +} + +// Message describing AccountBudgetProposal statuses. +type AccountBudgetProposalStatusEnum struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *AccountBudgetProposalStatusEnum) Reset() { *m = AccountBudgetProposalStatusEnum{} } +func (m *AccountBudgetProposalStatusEnum) String() string { return proto.CompactTextString(m) } +func (*AccountBudgetProposalStatusEnum) ProtoMessage() {} +func (*AccountBudgetProposalStatusEnum) Descriptor() ([]byte, []int) { + return fileDescriptor_account_budget_proposal_status_26386523d21926fe, []int{0} +} +func (m *AccountBudgetProposalStatusEnum) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_AccountBudgetProposalStatusEnum.Unmarshal(m, b) +} +func (m *AccountBudgetProposalStatusEnum) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_AccountBudgetProposalStatusEnum.Marshal(b, m, deterministic) +} +func (dst *AccountBudgetProposalStatusEnum) XXX_Merge(src proto.Message) { + xxx_messageInfo_AccountBudgetProposalStatusEnum.Merge(dst, src) +} +func (m *AccountBudgetProposalStatusEnum) XXX_Size() int { + return xxx_messageInfo_AccountBudgetProposalStatusEnum.Size(m) +} +func (m *AccountBudgetProposalStatusEnum) XXX_DiscardUnknown() { + xxx_messageInfo_AccountBudgetProposalStatusEnum.DiscardUnknown(m) +} + +var xxx_messageInfo_AccountBudgetProposalStatusEnum proto.InternalMessageInfo + +func init() { + proto.RegisterType((*AccountBudgetProposalStatusEnum)(nil), "google.ads.googleads.v1.enums.AccountBudgetProposalStatusEnum") + proto.RegisterEnum("google.ads.googleads.v1.enums.AccountBudgetProposalStatusEnum_AccountBudgetProposalStatus", AccountBudgetProposalStatusEnum_AccountBudgetProposalStatus_name, AccountBudgetProposalStatusEnum_AccountBudgetProposalStatus_value) +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/enums/account_budget_proposal_status.proto", fileDescriptor_account_budget_proposal_status_26386523d21926fe) +} + +var fileDescriptor_account_budget_proposal_status_26386523d21926fe = []byte{ + // 354 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x7c, 0x51, 0xc1, 0x4e, 0xc2, 0x40, + 0x10, 0xb5, 0x45, 0x51, 0x17, 0x89, 0xb5, 0x47, 0x95, 0x28, 0x7c, 0xc0, 0x36, 0x8d, 0xb7, 0xf5, + 0xb4, 0x6d, 0x57, 0x44, 0x49, 0x69, 0x40, 0x6a, 0x62, 0x9a, 0x34, 0x0b, 0x6d, 0x1a, 0x12, 0xd8, + 0x6d, 0xd8, 0x2d, 0x9f, 0xe0, 0x87, 0x78, 0xe4, 0x53, 0xfc, 0x14, 0xef, 0xde, 0x4d, 0xb7, 0x94, + 0x9b, 0x5c, 0x36, 0x6f, 0xe7, 0xcd, 0xcc, 0x9b, 0x79, 0x03, 0x9c, 0x8c, 0xf3, 0x6c, 0x99, 0x5a, + 0x34, 0x11, 0x56, 0x05, 0x4b, 0xb4, 0xb1, 0xad, 0x94, 0x15, 0x2b, 0x61, 0xd1, 0xf9, 0x9c, 0x17, + 0x4c, 0xc6, 0xb3, 0x22, 0xc9, 0x52, 0x19, 0xe7, 0x6b, 0x9e, 0x73, 0x41, 0x97, 0xb1, 0x90, 0x54, + 0x16, 0x02, 0xe6, 0x6b, 0x2e, 0xb9, 0xd9, 0xa9, 0x0a, 0x21, 0x4d, 0x04, 0xdc, 0xf7, 0x80, 0x1b, + 0x1b, 0xaa, 0x1e, 0xd7, 0xb7, 0xb5, 0x44, 0xbe, 0xb0, 0x28, 0x63, 0x5c, 0x52, 0xb9, 0xe0, 0x6c, + 0x57, 0xdc, 0xdb, 0x6a, 0xe0, 0x0e, 0x57, 0x2a, 0x8e, 0x12, 0x09, 0x76, 0x1a, 0x13, 0x25, 0x41, + 0x58, 0xb1, 0xea, 0x7d, 0x6a, 0xe0, 0xe6, 0x40, 0x8e, 0x79, 0x09, 0x5a, 0x53, 0x7f, 0x12, 0x10, + 0x77, 0xf0, 0x34, 0x20, 0x9e, 0x71, 0x64, 0xb6, 0xc0, 0xe9, 0xd4, 0x7f, 0xf5, 0x47, 0xef, 0xbe, + 0xa1, 0x95, 0x9f, 0x80, 0xf8, 0xde, 0xc0, 0xef, 0x1b, 0xba, 0x79, 0x05, 0xda, 0x38, 0x08, 0xc6, + 0xa3, 0x90, 0x78, 0xf1, 0x33, 0x19, 0x7a, 0x46, 0xc3, 0xbc, 0x00, 0x67, 0x75, 0xc8, 0x38, 0x36, + 0xdb, 0xe0, 0xdc, 0xc5, 0xbe, 0x4b, 0x86, 0x43, 0xe2, 0x19, 0x27, 0x25, 0x39, 0x26, 0x2f, 0xc4, + 0x7d, 0x23, 0x9e, 0xd1, 0x74, 0x7e, 0x35, 0xd0, 0x9d, 0xf3, 0x15, 0x3c, 0xb8, 0xb0, 0x73, 0x7f, + 0x60, 0xd6, 0xa0, 0x5c, 0x3a, 0xd0, 0x3e, 0x76, 0xbe, 0xc3, 0x8c, 0x2f, 0x29, 0xcb, 0x20, 0x5f, + 0x67, 0x56, 0x96, 0x32, 0x65, 0x49, 0x7d, 0x87, 0x7c, 0x21, 0xfe, 0x39, 0xcb, 0xa3, 0x7a, 0xbf, + 0xf4, 0x46, 0x1f, 0xe3, 0xad, 0xde, 0xe9, 0x57, 0xad, 0x70, 0x22, 0x60, 0x05, 0x4b, 0x14, 0xda, + 0xb0, 0xf4, 0x4e, 0x7c, 0xd7, 0x7c, 0x84, 0x13, 0x11, 0xed, 0xf9, 0x28, 0xb4, 0x23, 0xc5, 0xff, + 0xe8, 0xdd, 0x2a, 0x88, 0x10, 0x4e, 0x04, 0x42, 0xfb, 0x0c, 0x84, 0x42, 0x1b, 0x21, 0x95, 0x33, + 0x6b, 0xaa, 0xc1, 0x1e, 0xfe, 0x02, 0x00, 0x00, 0xff, 0xff, 0xf8, 0x9f, 0xc3, 0xa1, 0x2e, 0x02, + 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/account_budget_proposal_type.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/account_budget_proposal_type.pb.go new file mode 100644 index 0000000..d9aac2c --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/account_budget_proposal_type.pb.go @@ -0,0 +1,127 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/enums/account_budget_proposal_type.proto + +package enums // import "google.golang.org/genproto/googleapis/ads/googleads/v1/enums" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// The possible types of an AccountBudgetProposal. +type AccountBudgetProposalTypeEnum_AccountBudgetProposalType int32 + +const ( + // Not specified. + AccountBudgetProposalTypeEnum_UNSPECIFIED AccountBudgetProposalTypeEnum_AccountBudgetProposalType = 0 + // Used for return value only. Represents value unknown in this version. + AccountBudgetProposalTypeEnum_UNKNOWN AccountBudgetProposalTypeEnum_AccountBudgetProposalType = 1 + // Identifies a request to create a new budget. + AccountBudgetProposalTypeEnum_CREATE AccountBudgetProposalTypeEnum_AccountBudgetProposalType = 2 + // Identifies a request to edit an existing budget. + AccountBudgetProposalTypeEnum_UPDATE AccountBudgetProposalTypeEnum_AccountBudgetProposalType = 3 + // Identifies a request to end a budget that has already started. + AccountBudgetProposalTypeEnum_END AccountBudgetProposalTypeEnum_AccountBudgetProposalType = 4 + // Identifies a request to remove a budget that hasn't started yet. + AccountBudgetProposalTypeEnum_REMOVE AccountBudgetProposalTypeEnum_AccountBudgetProposalType = 5 +) + +var AccountBudgetProposalTypeEnum_AccountBudgetProposalType_name = map[int32]string{ + 0: "UNSPECIFIED", + 1: "UNKNOWN", + 2: "CREATE", + 3: "UPDATE", + 4: "END", + 5: "REMOVE", +} +var AccountBudgetProposalTypeEnum_AccountBudgetProposalType_value = map[string]int32{ + "UNSPECIFIED": 0, + "UNKNOWN": 1, + "CREATE": 2, + "UPDATE": 3, + "END": 4, + "REMOVE": 5, +} + +func (x AccountBudgetProposalTypeEnum_AccountBudgetProposalType) String() string { + return proto.EnumName(AccountBudgetProposalTypeEnum_AccountBudgetProposalType_name, int32(x)) +} +func (AccountBudgetProposalTypeEnum_AccountBudgetProposalType) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_account_budget_proposal_type_a2ce3cf7765c59ed, []int{0, 0} +} + +// Message describing AccountBudgetProposal types. +type AccountBudgetProposalTypeEnum struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *AccountBudgetProposalTypeEnum) Reset() { *m = AccountBudgetProposalTypeEnum{} } +func (m *AccountBudgetProposalTypeEnum) String() string { return proto.CompactTextString(m) } +func (*AccountBudgetProposalTypeEnum) ProtoMessage() {} +func (*AccountBudgetProposalTypeEnum) Descriptor() ([]byte, []int) { + return fileDescriptor_account_budget_proposal_type_a2ce3cf7765c59ed, []int{0} +} +func (m *AccountBudgetProposalTypeEnum) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_AccountBudgetProposalTypeEnum.Unmarshal(m, b) +} +func (m *AccountBudgetProposalTypeEnum) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_AccountBudgetProposalTypeEnum.Marshal(b, m, deterministic) +} +func (dst *AccountBudgetProposalTypeEnum) XXX_Merge(src proto.Message) { + xxx_messageInfo_AccountBudgetProposalTypeEnum.Merge(dst, src) +} +func (m *AccountBudgetProposalTypeEnum) XXX_Size() int { + return xxx_messageInfo_AccountBudgetProposalTypeEnum.Size(m) +} +func (m *AccountBudgetProposalTypeEnum) XXX_DiscardUnknown() { + xxx_messageInfo_AccountBudgetProposalTypeEnum.DiscardUnknown(m) +} + +var xxx_messageInfo_AccountBudgetProposalTypeEnum proto.InternalMessageInfo + +func init() { + proto.RegisterType((*AccountBudgetProposalTypeEnum)(nil), "google.ads.googleads.v1.enums.AccountBudgetProposalTypeEnum") + proto.RegisterEnum("google.ads.googleads.v1.enums.AccountBudgetProposalTypeEnum_AccountBudgetProposalType", AccountBudgetProposalTypeEnum_AccountBudgetProposalType_name, AccountBudgetProposalTypeEnum_AccountBudgetProposalType_value) +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/enums/account_budget_proposal_type.proto", fileDescriptor_account_budget_proposal_type_a2ce3cf7765c59ed) +} + +var fileDescriptor_account_budget_proposal_type_a2ce3cf7765c59ed = []byte{ + // 333 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x7c, 0x90, 0xd1, 0x6a, 0xc2, 0x30, + 0x14, 0x86, 0xd7, 0xba, 0x29, 0xc4, 0x8b, 0x95, 0xde, 0x6d, 0xcc, 0x81, 0x3e, 0x40, 0x4a, 0xd9, + 0x5d, 0x76, 0xb3, 0x54, 0x33, 0x91, 0xb1, 0x5a, 0x9c, 0x76, 0x30, 0x0a, 0x12, 0x6d, 0x17, 0x04, + 0x4d, 0x82, 0x69, 0x05, 0x9f, 0x60, 0xef, 0xb1, 0xcb, 0x3d, 0xca, 0x1e, 0x65, 0xb7, 0x7b, 0x81, + 0x91, 0x44, 0xbd, 0xeb, 0x6e, 0xc2, 0x47, 0xce, 0x39, 0xff, 0x7f, 0xce, 0x0f, 0x1e, 0x98, 0x10, + 0x6c, 0x5d, 0x04, 0x34, 0x57, 0x81, 0x45, 0x4d, 0xbb, 0x30, 0x28, 0x78, 0xb5, 0x51, 0x01, 0x5d, + 0x2e, 0x45, 0xc5, 0xcb, 0xf9, 0xa2, 0xca, 0x59, 0x51, 0xce, 0xe5, 0x56, 0x48, 0xa1, 0xe8, 0x7a, + 0x5e, 0xee, 0x65, 0x01, 0xe5, 0x56, 0x94, 0xc2, 0xef, 0xd8, 0x31, 0x48, 0x73, 0x05, 0x4f, 0x0a, + 0x70, 0x17, 0x42, 0xa3, 0x70, 0x7d, 0x73, 0x34, 0x90, 0xab, 0x80, 0x72, 0x2e, 0x4a, 0x5a, 0xae, + 0x04, 0x57, 0x76, 0xb8, 0xf7, 0xe1, 0x80, 0x0e, 0xb6, 0x1e, 0x91, 0xb1, 0x48, 0x0e, 0x0e, 0xd3, + 0xbd, 0x2c, 0x08, 0xaf, 0x36, 0xbd, 0x77, 0x70, 0x55, 0xdb, 0xe0, 0x5f, 0x82, 0xf6, 0x2c, 0x7e, + 0x49, 0x48, 0x7f, 0xf4, 0x38, 0x22, 0x03, 0xef, 0xcc, 0x6f, 0x83, 0xd6, 0x2c, 0x7e, 0x8a, 0xc7, + 0xaf, 0xb1, 0xe7, 0xf8, 0x00, 0x34, 0xfb, 0x13, 0x82, 0xa7, 0xc4, 0x73, 0x35, 0xcf, 0x92, 0x81, + 0xe6, 0x86, 0xdf, 0x02, 0x0d, 0x12, 0x0f, 0xbc, 0x73, 0xfd, 0x39, 0x21, 0xcf, 0xe3, 0x94, 0x78, + 0x17, 0xd1, 0xaf, 0x03, 0xba, 0x4b, 0xb1, 0x81, 0xff, 0x5e, 0x13, 0xdd, 0xd6, 0xee, 0x92, 0xe8, + 0x7b, 0x12, 0xe7, 0x2d, 0x3a, 0x08, 0x30, 0xb1, 0xa6, 0x9c, 0x41, 0xb1, 0x65, 0x01, 0x2b, 0xb8, + 0xb9, 0xf6, 0x18, 0xb0, 0x5c, 0xa9, 0x9a, 0xbc, 0xef, 0xcd, 0xfb, 0xe9, 0x36, 0x86, 0x18, 0x7f, + 0xb9, 0x9d, 0xa1, 0x95, 0xc2, 0xb9, 0x82, 0x16, 0x35, 0xa5, 0x21, 0xd4, 0xc1, 0xa8, 0xef, 0x63, + 0x3d, 0xc3, 0xb9, 0xca, 0x4e, 0xf5, 0x2c, 0x0d, 0x33, 0x53, 0xff, 0x71, 0xbb, 0xf6, 0x13, 0x21, + 0x9c, 0x2b, 0x84, 0x4e, 0x1d, 0x08, 0xa5, 0x21, 0x42, 0xa6, 0x67, 0xd1, 0x34, 0x8b, 0xdd, 0xfd, + 0x05, 0x00, 0x00, 0xff, 0xff, 0xaf, 0xbb, 0xd0, 0xa0, 0x07, 0x02, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/account_budget_status.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/account_budget_status.pb.go new file mode 100644 index 0000000..fdcbd3a --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/account_budget_status.pb.go @@ -0,0 +1,122 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/enums/account_budget_status.proto + +package enums // import "google.golang.org/genproto/googleapis/ads/googleads/v1/enums" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// The possible statuses of an AccountBudget. +type AccountBudgetStatusEnum_AccountBudgetStatus int32 + +const ( + // Not specified. + AccountBudgetStatusEnum_UNSPECIFIED AccountBudgetStatusEnum_AccountBudgetStatus = 0 + // Used for return value only. Represents value unknown in this version. + AccountBudgetStatusEnum_UNKNOWN AccountBudgetStatusEnum_AccountBudgetStatus = 1 + // The account budget is pending approval. + AccountBudgetStatusEnum_PENDING AccountBudgetStatusEnum_AccountBudgetStatus = 2 + // The account budget has been approved. + AccountBudgetStatusEnum_APPROVED AccountBudgetStatusEnum_AccountBudgetStatus = 3 + // The account budget has been cancelled by the user. + AccountBudgetStatusEnum_CANCELLED AccountBudgetStatusEnum_AccountBudgetStatus = 4 +) + +var AccountBudgetStatusEnum_AccountBudgetStatus_name = map[int32]string{ + 0: "UNSPECIFIED", + 1: "UNKNOWN", + 2: "PENDING", + 3: "APPROVED", + 4: "CANCELLED", +} +var AccountBudgetStatusEnum_AccountBudgetStatus_value = map[string]int32{ + "UNSPECIFIED": 0, + "UNKNOWN": 1, + "PENDING": 2, + "APPROVED": 3, + "CANCELLED": 4, +} + +func (x AccountBudgetStatusEnum_AccountBudgetStatus) String() string { + return proto.EnumName(AccountBudgetStatusEnum_AccountBudgetStatus_name, int32(x)) +} +func (AccountBudgetStatusEnum_AccountBudgetStatus) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_account_budget_status_1e96112e98d86ccd, []int{0, 0} +} + +// Message describing AccountBudget statuses. +type AccountBudgetStatusEnum struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *AccountBudgetStatusEnum) Reset() { *m = AccountBudgetStatusEnum{} } +func (m *AccountBudgetStatusEnum) String() string { return proto.CompactTextString(m) } +func (*AccountBudgetStatusEnum) ProtoMessage() {} +func (*AccountBudgetStatusEnum) Descriptor() ([]byte, []int) { + return fileDescriptor_account_budget_status_1e96112e98d86ccd, []int{0} +} +func (m *AccountBudgetStatusEnum) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_AccountBudgetStatusEnum.Unmarshal(m, b) +} +func (m *AccountBudgetStatusEnum) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_AccountBudgetStatusEnum.Marshal(b, m, deterministic) +} +func (dst *AccountBudgetStatusEnum) XXX_Merge(src proto.Message) { + xxx_messageInfo_AccountBudgetStatusEnum.Merge(dst, src) +} +func (m *AccountBudgetStatusEnum) XXX_Size() int { + return xxx_messageInfo_AccountBudgetStatusEnum.Size(m) +} +func (m *AccountBudgetStatusEnum) XXX_DiscardUnknown() { + xxx_messageInfo_AccountBudgetStatusEnum.DiscardUnknown(m) +} + +var xxx_messageInfo_AccountBudgetStatusEnum proto.InternalMessageInfo + +func init() { + proto.RegisterType((*AccountBudgetStatusEnum)(nil), "google.ads.googleads.v1.enums.AccountBudgetStatusEnum") + proto.RegisterEnum("google.ads.googleads.v1.enums.AccountBudgetStatusEnum_AccountBudgetStatus", AccountBudgetStatusEnum_AccountBudgetStatus_name, AccountBudgetStatusEnum_AccountBudgetStatus_value) +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/enums/account_budget_status.proto", fileDescriptor_account_budget_status_1e96112e98d86ccd) +} + +var fileDescriptor_account_budget_status_1e96112e98d86ccd = []byte{ + // 319 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x7c, 0x50, 0xcd, 0x4a, 0xc3, 0x30, + 0x00, 0x76, 0x9d, 0xf8, 0x93, 0x29, 0x96, 0x7a, 0x50, 0xc4, 0x1d, 0xb6, 0x07, 0x48, 0x29, 0x9e, + 0x8c, 0xa7, 0x74, 0x8d, 0x63, 0x38, 0xb2, 0xe2, 0x58, 0x05, 0xa9, 0x8c, 0x6c, 0x29, 0x61, 0xb0, + 0x25, 0x63, 0x49, 0x87, 0xcf, 0xe3, 0xd1, 0x47, 0xf1, 0x51, 0x04, 0xdf, 0x41, 0x9a, 0x6c, 0x3b, + 0x4d, 0x2f, 0xe1, 0x4b, 0xbe, 0x1f, 0xbe, 0x7c, 0xe0, 0x5e, 0x28, 0x25, 0xe6, 0x45, 0xc8, 0xb8, + 0x0e, 0x1d, 0xac, 0xd0, 0x3a, 0x0a, 0x0b, 0x59, 0x2e, 0x74, 0xc8, 0xa6, 0x53, 0x55, 0x4a, 0x33, + 0x9e, 0x94, 0x5c, 0x14, 0x66, 0xac, 0x0d, 0x33, 0xa5, 0x86, 0xcb, 0x95, 0x32, 0x2a, 0x68, 0x3a, + 0x3d, 0x64, 0x5c, 0xc3, 0x9d, 0x15, 0xae, 0x23, 0x68, 0xad, 0x37, 0xb7, 0xdb, 0xe4, 0xe5, 0x2c, + 0x64, 0x52, 0x2a, 0xc3, 0xcc, 0x4c, 0xc9, 0x8d, 0xb9, 0xfd, 0x0e, 0xae, 0xb0, 0xcb, 0x8e, 0x6d, + 0xf4, 0xd0, 0x26, 0x13, 0x59, 0x2e, 0xda, 0x6f, 0xe0, 0x72, 0x0f, 0x15, 0x5c, 0x80, 0xc6, 0x88, + 0x0e, 0x53, 0xd2, 0xe9, 0x3d, 0xf6, 0x48, 0xe2, 0x1f, 0x04, 0x0d, 0x70, 0x3c, 0xa2, 0x4f, 0x74, + 0xf0, 0x42, 0xfd, 0x5a, 0x75, 0x49, 0x09, 0x4d, 0x7a, 0xb4, 0xeb, 0x7b, 0xc1, 0x19, 0x38, 0xc1, + 0x69, 0xfa, 0x3c, 0xc8, 0x48, 0xe2, 0xd7, 0x83, 0x73, 0x70, 0xda, 0xc1, 0xb4, 0x43, 0xfa, 0x7d, + 0x92, 0xf8, 0x87, 0xf1, 0x4f, 0x0d, 0xb4, 0xa6, 0x6a, 0x01, 0xff, 0x6d, 0x1f, 0x5f, 0xef, 0xa9, + 0x90, 0x56, 0xcd, 0xd3, 0xda, 0x6b, 0xbc, 0xb1, 0x0a, 0x35, 0x67, 0x52, 0x40, 0xb5, 0x12, 0xa1, + 0x28, 0xa4, 0xfd, 0xd7, 0x76, 0xc3, 0xe5, 0x4c, 0xff, 0x31, 0xe9, 0x83, 0x3d, 0x3f, 0xbc, 0x7a, + 0x17, 0xe3, 0x4f, 0xaf, 0xd9, 0x75, 0x51, 0x98, 0x6b, 0xe8, 0x60, 0x85, 0xb2, 0x08, 0x56, 0x4b, + 0xe8, 0xaf, 0x2d, 0x9f, 0x63, 0xae, 0xf3, 0x1d, 0x9f, 0x67, 0x51, 0x6e, 0xf9, 0x6f, 0xaf, 0xe5, + 0x1e, 0x11, 0xc2, 0x5c, 0x23, 0xb4, 0x53, 0x20, 0x94, 0x45, 0x08, 0x59, 0xcd, 0xe4, 0xc8, 0x16, + 0xbb, 0xfb, 0x0d, 0x00, 0x00, 0xff, 0xff, 0x90, 0xce, 0x17, 0xf2, 0xea, 0x01, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/ad_customizer_placeholder_field.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/ad_customizer_placeholder_field.pb.go new file mode 100644 index 0000000..9404ae9 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/ad_customizer_placeholder_field.pb.go @@ -0,0 +1,128 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/enums/ad_customizer_placeholder_field.proto + +package enums // import "google.golang.org/genproto/googleapis/ads/googleads/v1/enums" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Possible values for Ad Customizers placeholder fields. +type AdCustomizerPlaceholderFieldEnum_AdCustomizerPlaceholderField int32 + +const ( + // Not specified. + AdCustomizerPlaceholderFieldEnum_UNSPECIFIED AdCustomizerPlaceholderFieldEnum_AdCustomizerPlaceholderField = 0 + // Used for return value only. Represents value unknown in this version. + AdCustomizerPlaceholderFieldEnum_UNKNOWN AdCustomizerPlaceholderFieldEnum_AdCustomizerPlaceholderField = 1 + // Data Type: INT64. Integer value to be inserted. + AdCustomizerPlaceholderFieldEnum_INTEGER AdCustomizerPlaceholderFieldEnum_AdCustomizerPlaceholderField = 2 + // Data Type: STRING. Price value to be inserted. + AdCustomizerPlaceholderFieldEnum_PRICE AdCustomizerPlaceholderFieldEnum_AdCustomizerPlaceholderField = 3 + // Data Type: DATE_TIME. Date value to be inserted. + AdCustomizerPlaceholderFieldEnum_DATE AdCustomizerPlaceholderFieldEnum_AdCustomizerPlaceholderField = 4 + // Data Type: STRING. String value to be inserted. + AdCustomizerPlaceholderFieldEnum_STRING AdCustomizerPlaceholderFieldEnum_AdCustomizerPlaceholderField = 5 +) + +var AdCustomizerPlaceholderFieldEnum_AdCustomizerPlaceholderField_name = map[int32]string{ + 0: "UNSPECIFIED", + 1: "UNKNOWN", + 2: "INTEGER", + 3: "PRICE", + 4: "DATE", + 5: "STRING", +} +var AdCustomizerPlaceholderFieldEnum_AdCustomizerPlaceholderField_value = map[string]int32{ + "UNSPECIFIED": 0, + "UNKNOWN": 1, + "INTEGER": 2, + "PRICE": 3, + "DATE": 4, + "STRING": 5, +} + +func (x AdCustomizerPlaceholderFieldEnum_AdCustomizerPlaceholderField) String() string { + return proto.EnumName(AdCustomizerPlaceholderFieldEnum_AdCustomizerPlaceholderField_name, int32(x)) +} +func (AdCustomizerPlaceholderFieldEnum_AdCustomizerPlaceholderField) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_ad_customizer_placeholder_field_1a21cd682c8e0db6, []int{0, 0} +} + +// Values for Ad Customizer placeholder fields. +type AdCustomizerPlaceholderFieldEnum struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *AdCustomizerPlaceholderFieldEnum) Reset() { *m = AdCustomizerPlaceholderFieldEnum{} } +func (m *AdCustomizerPlaceholderFieldEnum) String() string { return proto.CompactTextString(m) } +func (*AdCustomizerPlaceholderFieldEnum) ProtoMessage() {} +func (*AdCustomizerPlaceholderFieldEnum) Descriptor() ([]byte, []int) { + return fileDescriptor_ad_customizer_placeholder_field_1a21cd682c8e0db6, []int{0} +} +func (m *AdCustomizerPlaceholderFieldEnum) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_AdCustomizerPlaceholderFieldEnum.Unmarshal(m, b) +} +func (m *AdCustomizerPlaceholderFieldEnum) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_AdCustomizerPlaceholderFieldEnum.Marshal(b, m, deterministic) +} +func (dst *AdCustomizerPlaceholderFieldEnum) XXX_Merge(src proto.Message) { + xxx_messageInfo_AdCustomizerPlaceholderFieldEnum.Merge(dst, src) +} +func (m *AdCustomizerPlaceholderFieldEnum) XXX_Size() int { + return xxx_messageInfo_AdCustomizerPlaceholderFieldEnum.Size(m) +} +func (m *AdCustomizerPlaceholderFieldEnum) XXX_DiscardUnknown() { + xxx_messageInfo_AdCustomizerPlaceholderFieldEnum.DiscardUnknown(m) +} + +var xxx_messageInfo_AdCustomizerPlaceholderFieldEnum proto.InternalMessageInfo + +func init() { + proto.RegisterType((*AdCustomizerPlaceholderFieldEnum)(nil), "google.ads.googleads.v1.enums.AdCustomizerPlaceholderFieldEnum") + proto.RegisterEnum("google.ads.googleads.v1.enums.AdCustomizerPlaceholderFieldEnum_AdCustomizerPlaceholderField", AdCustomizerPlaceholderFieldEnum_AdCustomizerPlaceholderField_name, AdCustomizerPlaceholderFieldEnum_AdCustomizerPlaceholderField_value) +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/enums/ad_customizer_placeholder_field.proto", fileDescriptor_ad_customizer_placeholder_field_1a21cd682c8e0db6) +} + +var fileDescriptor_ad_customizer_placeholder_field_1a21cd682c8e0db6 = []byte{ + // 337 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x7c, 0x50, 0x4d, 0x4e, 0x83, 0x40, + 0x18, 0x15, 0xfa, 0xa3, 0x4e, 0x17, 0x12, 0x96, 0xa6, 0x4d, 0x6c, 0x0f, 0x30, 0x84, 0xb8, 0x1b, + 0x57, 0x53, 0x3a, 0x25, 0xc4, 0x04, 0x49, 0xff, 0x4c, 0x0c, 0x49, 0x33, 0x76, 0x46, 0xc4, 0xc0, + 0x0c, 0x61, 0x68, 0x17, 0x1e, 0xc2, 0x43, 0xb8, 0xf4, 0x28, 0x1e, 0xc5, 0x03, 0xb8, 0x36, 0x80, + 0xc5, 0x95, 0x6c, 0xc8, 0x63, 0xbe, 0xf7, 0xbd, 0xf7, 0xbd, 0x07, 0x9c, 0x48, 0xca, 0x28, 0xe1, + 0x16, 0x65, 0xca, 0xaa, 0x61, 0x89, 0x0e, 0xb6, 0xc5, 0xc5, 0x3e, 0x55, 0x16, 0x65, 0xdb, 0xdd, + 0x5e, 0x15, 0x32, 0x8d, 0x5f, 0x79, 0xbe, 0xcd, 0x12, 0xba, 0xe3, 0xcf, 0x32, 0x61, 0x3c, 0xdf, + 0x3e, 0xc5, 0x3c, 0x61, 0x30, 0xcb, 0x65, 0x21, 0xcd, 0x51, 0xbd, 0x09, 0x29, 0x53, 0xb0, 0x11, + 0x81, 0x07, 0x1b, 0x56, 0x22, 0x97, 0xc3, 0xa3, 0x47, 0x16, 0x5b, 0x54, 0x08, 0x59, 0xd0, 0x22, + 0x96, 0x42, 0xd5, 0xcb, 0x93, 0x37, 0x0d, 0x5c, 0x61, 0xe6, 0x34, 0x2e, 0xc1, 0x9f, 0xc9, 0xbc, + 0xf4, 0x20, 0x62, 0x9f, 0x4e, 0x5e, 0xc0, 0xb0, 0x8d, 0x63, 0x5e, 0x80, 0xc1, 0xda, 0x5f, 0x06, + 0xc4, 0xf1, 0xe6, 0x1e, 0x99, 0x19, 0x27, 0xe6, 0x00, 0x9c, 0xae, 0xfd, 0x5b, 0xff, 0xee, 0xde, + 0x37, 0xb4, 0xf2, 0xc7, 0xf3, 0x57, 0xc4, 0x25, 0x0b, 0x43, 0x37, 0xcf, 0x41, 0x2f, 0x58, 0x78, + 0x0e, 0x31, 0x3a, 0xe6, 0x19, 0xe8, 0xce, 0xf0, 0x8a, 0x18, 0x5d, 0x13, 0x80, 0xfe, 0x72, 0xb5, + 0xf0, 0x7c, 0xd7, 0xe8, 0x4d, 0xbf, 0x35, 0x30, 0xde, 0xc9, 0x14, 0xb6, 0x86, 0x9a, 0x8e, 0xdb, + 0xee, 0x09, 0xca, 0x64, 0x81, 0xf6, 0x30, 0xfd, 0xd5, 0x88, 0x64, 0x42, 0x45, 0x04, 0x65, 0x1e, + 0x59, 0x11, 0x17, 0x55, 0xee, 0x63, 0xdb, 0x59, 0xac, 0xfe, 0x29, 0xff, 0xa6, 0xfa, 0xbe, 0xeb, + 0x1d, 0x17, 0xe3, 0x0f, 0x7d, 0xe4, 0xd6, 0x52, 0x98, 0x29, 0x58, 0xc3, 0x12, 0x6d, 0x6c, 0x58, + 0xf6, 0xa3, 0x3e, 0x8f, 0xf3, 0x10, 0x33, 0x15, 0x36, 0xf3, 0x70, 0x63, 0x87, 0xd5, 0xfc, 0x4b, + 0x1f, 0xd7, 0x8f, 0x08, 0x61, 0xa6, 0x10, 0x6a, 0x18, 0x08, 0x6d, 0x6c, 0x84, 0x2a, 0xce, 0x63, + 0xbf, 0x3a, 0xec, 0xfa, 0x27, 0x00, 0x00, 0xff, 0xff, 0x1d, 0x5f, 0x60, 0x56, 0x14, 0x02, 0x00, + 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/ad_group_ad_rotation_mode.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/ad_group_ad_rotation_mode.pb.go new file mode 100644 index 0000000..f4d9df7 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/ad_group_ad_rotation_mode.pb.go @@ -0,0 +1,122 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/enums/ad_group_ad_rotation_mode.proto + +package enums // import "google.golang.org/genproto/googleapis/ads/googleads/v1/enums" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// The possible ad rotation modes of an ad group. +type AdGroupAdRotationModeEnum_AdGroupAdRotationMode int32 + +const ( + // The ad rotation mode has not been specified. + AdGroupAdRotationModeEnum_UNSPECIFIED AdGroupAdRotationModeEnum_AdGroupAdRotationMode = 0 + // The received value is not known in this version. + // + // This is a response-only value. + AdGroupAdRotationModeEnum_UNKNOWN AdGroupAdRotationModeEnum_AdGroupAdRotationMode = 1 + // Optimize ad group ads based on clicks or conversions. + AdGroupAdRotationModeEnum_OPTIMIZE AdGroupAdRotationModeEnum_AdGroupAdRotationMode = 2 + // Rotate evenly forever. + AdGroupAdRotationModeEnum_ROTATE_FOREVER AdGroupAdRotationModeEnum_AdGroupAdRotationMode = 3 +) + +var AdGroupAdRotationModeEnum_AdGroupAdRotationMode_name = map[int32]string{ + 0: "UNSPECIFIED", + 1: "UNKNOWN", + 2: "OPTIMIZE", + 3: "ROTATE_FOREVER", +} +var AdGroupAdRotationModeEnum_AdGroupAdRotationMode_value = map[string]int32{ + "UNSPECIFIED": 0, + "UNKNOWN": 1, + "OPTIMIZE": 2, + "ROTATE_FOREVER": 3, +} + +func (x AdGroupAdRotationModeEnum_AdGroupAdRotationMode) String() string { + return proto.EnumName(AdGroupAdRotationModeEnum_AdGroupAdRotationMode_name, int32(x)) +} +func (AdGroupAdRotationModeEnum_AdGroupAdRotationMode) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_ad_group_ad_rotation_mode_1bfda688bd69645c, []int{0, 0} +} + +// Container for enum describing possible ad rotation modes of ads within an +// ad group. +type AdGroupAdRotationModeEnum struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *AdGroupAdRotationModeEnum) Reset() { *m = AdGroupAdRotationModeEnum{} } +func (m *AdGroupAdRotationModeEnum) String() string { return proto.CompactTextString(m) } +func (*AdGroupAdRotationModeEnum) ProtoMessage() {} +func (*AdGroupAdRotationModeEnum) Descriptor() ([]byte, []int) { + return fileDescriptor_ad_group_ad_rotation_mode_1bfda688bd69645c, []int{0} +} +func (m *AdGroupAdRotationModeEnum) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_AdGroupAdRotationModeEnum.Unmarshal(m, b) +} +func (m *AdGroupAdRotationModeEnum) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_AdGroupAdRotationModeEnum.Marshal(b, m, deterministic) +} +func (dst *AdGroupAdRotationModeEnum) XXX_Merge(src proto.Message) { + xxx_messageInfo_AdGroupAdRotationModeEnum.Merge(dst, src) +} +func (m *AdGroupAdRotationModeEnum) XXX_Size() int { + return xxx_messageInfo_AdGroupAdRotationModeEnum.Size(m) +} +func (m *AdGroupAdRotationModeEnum) XXX_DiscardUnknown() { + xxx_messageInfo_AdGroupAdRotationModeEnum.DiscardUnknown(m) +} + +var xxx_messageInfo_AdGroupAdRotationModeEnum proto.InternalMessageInfo + +func init() { + proto.RegisterType((*AdGroupAdRotationModeEnum)(nil), "google.ads.googleads.v1.enums.AdGroupAdRotationModeEnum") + proto.RegisterEnum("google.ads.googleads.v1.enums.AdGroupAdRotationModeEnum_AdGroupAdRotationMode", AdGroupAdRotationModeEnum_AdGroupAdRotationMode_name, AdGroupAdRotationModeEnum_AdGroupAdRotationMode_value) +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/enums/ad_group_ad_rotation_mode.proto", fileDescriptor_ad_group_ad_rotation_mode_1bfda688bd69645c) +} + +var fileDescriptor_ad_group_ad_rotation_mode_1bfda688bd69645c = []byte{ + // 321 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x7c, 0x90, 0xd1, 0x4a, 0xfb, 0x30, + 0x18, 0xc5, 0xff, 0xeb, 0xe0, 0xaf, 0x64, 0xa2, 0xa5, 0xe0, 0x85, 0xc3, 0x5d, 0x6c, 0x0f, 0x90, + 0x52, 0xbc, 0x8b, 0x78, 0x91, 0x69, 0x36, 0x8a, 0xac, 0x2d, 0x75, 0xeb, 0x60, 0x14, 0x4a, 0x34, + 0x25, 0x0c, 0xd6, 0x7c, 0xa5, 0xe9, 0xf6, 0x40, 0x5e, 0xfa, 0x28, 0x3e, 0x8a, 0x37, 0xbe, 0x82, + 0xb4, 0xd9, 0x76, 0x35, 0xbd, 0x09, 0x87, 0x9c, 0xef, 0x77, 0xf2, 0xe5, 0xa0, 0x07, 0x09, 0x20, + 0x37, 0xb9, 0xcb, 0x85, 0x76, 0x8d, 0x6c, 0xd4, 0xce, 0x73, 0x73, 0xb5, 0x2d, 0xb4, 0xcb, 0x45, + 0x26, 0x2b, 0xd8, 0x96, 0x19, 0x17, 0x59, 0x05, 0x35, 0xaf, 0xd7, 0xa0, 0xb2, 0x02, 0x44, 0x8e, + 0xcb, 0x0a, 0x6a, 0x70, 0x06, 0x86, 0xc1, 0x5c, 0x68, 0x7c, 0xc4, 0xf1, 0xce, 0xc3, 0x2d, 0xde, + 0xbf, 0x3d, 0xa4, 0x97, 0x6b, 0x97, 0x2b, 0xb5, 0x0f, 0xd0, 0x06, 0x1e, 0xd5, 0xe8, 0x86, 0x8a, + 0x69, 0x13, 0x4f, 0x45, 0xbc, 0xf7, 0x66, 0x20, 0x72, 0xa6, 0xb6, 0xc5, 0x68, 0x89, 0xae, 0x4f, + 0x9a, 0xce, 0x15, 0xea, 0x2d, 0x82, 0x97, 0x88, 0x3d, 0xfa, 0x13, 0x9f, 0x3d, 0xd9, 0xff, 0x9c, + 0x1e, 0x3a, 0x5b, 0x04, 0xcf, 0x41, 0xb8, 0x0c, 0xec, 0x8e, 0x73, 0x81, 0xce, 0xc3, 0x68, 0xee, + 0xcf, 0xfc, 0x15, 0xb3, 0x2d, 0xc7, 0x41, 0x97, 0x71, 0x38, 0xa7, 0x73, 0x96, 0x4d, 0xc2, 0x98, + 0x25, 0x2c, 0xb6, 0xbb, 0xe3, 0xef, 0x0e, 0x1a, 0xbe, 0x41, 0x81, 0xff, 0xdc, 0x7c, 0xdc, 0x3f, + 0xf9, 0x78, 0xd4, 0xec, 0x1d, 0x75, 0x56, 0xe3, 0x3d, 0x2c, 0x61, 0xc3, 0x95, 0xc4, 0x50, 0x49, + 0x57, 0xe6, 0xaa, 0xfd, 0xd5, 0xa1, 0xc5, 0x72, 0xad, 0x7f, 0x29, 0xf5, 0xbe, 0x3d, 0xdf, 0xad, + 0xee, 0x94, 0xd2, 0x0f, 0x6b, 0x30, 0x35, 0x51, 0x54, 0x68, 0x6c, 0x64, 0xa3, 0x12, 0x0f, 0x37, + 0x2d, 0xe8, 0xcf, 0x83, 0x9f, 0x52, 0xa1, 0xd3, 0xa3, 0x9f, 0x26, 0x5e, 0xda, 0xfa, 0x5f, 0xd6, + 0xd0, 0x5c, 0x12, 0x42, 0x85, 0x26, 0xe4, 0x38, 0x41, 0x48, 0xe2, 0x11, 0xd2, 0xce, 0xbc, 0xfe, + 0x6f, 0x17, 0xbb, 0xfb, 0x09, 0x00, 0x00, 0xff, 0xff, 0x22, 0xa6, 0xd8, 0x5e, 0xec, 0x01, 0x00, + 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/ad_group_ad_status.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/ad_group_ad_status.pb.go new file mode 100644 index 0000000..1b067dc --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/ad_group_ad_status.pb.go @@ -0,0 +1,124 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/enums/ad_group_ad_status.proto + +package enums // import "google.golang.org/genproto/googleapis/ads/googleads/v1/enums" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// The possible statuses of an AdGroupAd. +type AdGroupAdStatusEnum_AdGroupAdStatus int32 + +const ( + // No value has been specified. + AdGroupAdStatusEnum_UNSPECIFIED AdGroupAdStatusEnum_AdGroupAdStatus = 0 + // The received value is not known in this version. + // + // This is a response-only value. + AdGroupAdStatusEnum_UNKNOWN AdGroupAdStatusEnum_AdGroupAdStatus = 1 + // The ad group ad is enabled. + AdGroupAdStatusEnum_ENABLED AdGroupAdStatusEnum_AdGroupAdStatus = 2 + // The ad group ad is paused. + AdGroupAdStatusEnum_PAUSED AdGroupAdStatusEnum_AdGroupAdStatus = 3 + // The ad group ad is removed. + AdGroupAdStatusEnum_REMOVED AdGroupAdStatusEnum_AdGroupAdStatus = 4 +) + +var AdGroupAdStatusEnum_AdGroupAdStatus_name = map[int32]string{ + 0: "UNSPECIFIED", + 1: "UNKNOWN", + 2: "ENABLED", + 3: "PAUSED", + 4: "REMOVED", +} +var AdGroupAdStatusEnum_AdGroupAdStatus_value = map[string]int32{ + "UNSPECIFIED": 0, + "UNKNOWN": 1, + "ENABLED": 2, + "PAUSED": 3, + "REMOVED": 4, +} + +func (x AdGroupAdStatusEnum_AdGroupAdStatus) String() string { + return proto.EnumName(AdGroupAdStatusEnum_AdGroupAdStatus_name, int32(x)) +} +func (AdGroupAdStatusEnum_AdGroupAdStatus) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_ad_group_ad_status_b660490e35bdde48, []int{0, 0} +} + +// Container for enum describing possible statuses of an AdGroupAd. +type AdGroupAdStatusEnum struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *AdGroupAdStatusEnum) Reset() { *m = AdGroupAdStatusEnum{} } +func (m *AdGroupAdStatusEnum) String() string { return proto.CompactTextString(m) } +func (*AdGroupAdStatusEnum) ProtoMessage() {} +func (*AdGroupAdStatusEnum) Descriptor() ([]byte, []int) { + return fileDescriptor_ad_group_ad_status_b660490e35bdde48, []int{0} +} +func (m *AdGroupAdStatusEnum) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_AdGroupAdStatusEnum.Unmarshal(m, b) +} +func (m *AdGroupAdStatusEnum) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_AdGroupAdStatusEnum.Marshal(b, m, deterministic) +} +func (dst *AdGroupAdStatusEnum) XXX_Merge(src proto.Message) { + xxx_messageInfo_AdGroupAdStatusEnum.Merge(dst, src) +} +func (m *AdGroupAdStatusEnum) XXX_Size() int { + return xxx_messageInfo_AdGroupAdStatusEnum.Size(m) +} +func (m *AdGroupAdStatusEnum) XXX_DiscardUnknown() { + xxx_messageInfo_AdGroupAdStatusEnum.DiscardUnknown(m) +} + +var xxx_messageInfo_AdGroupAdStatusEnum proto.InternalMessageInfo + +func init() { + proto.RegisterType((*AdGroupAdStatusEnum)(nil), "google.ads.googleads.v1.enums.AdGroupAdStatusEnum") + proto.RegisterEnum("google.ads.googleads.v1.enums.AdGroupAdStatusEnum_AdGroupAdStatus", AdGroupAdStatusEnum_AdGroupAdStatus_name, AdGroupAdStatusEnum_AdGroupAdStatus_value) +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/enums/ad_group_ad_status.proto", fileDescriptor_ad_group_ad_status_b660490e35bdde48) +} + +var fileDescriptor_ad_group_ad_status_b660490e35bdde48 = []byte{ + // 311 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x7c, 0x50, 0xcf, 0x4a, 0xc3, 0x30, + 0x18, 0x77, 0x9d, 0x4c, 0xc8, 0x0e, 0x2b, 0xd5, 0x93, 0xb8, 0xc3, 0xf6, 0x00, 0x09, 0x45, 0xf0, + 0x10, 0x4f, 0xa9, 0x8d, 0x63, 0xa8, 0x5d, 0x71, 0xb4, 0x82, 0x14, 0x46, 0x34, 0x23, 0x0c, 0xda, + 0xa4, 0x34, 0xed, 0x1e, 0xc8, 0xa3, 0x8f, 0xe2, 0x9b, 0xe8, 0x53, 0x48, 0xd2, 0xb5, 0x87, 0x81, + 0x5e, 0xca, 0xaf, 0xdf, 0xef, 0x4f, 0x7e, 0xdf, 0x07, 0x6e, 0x84, 0x52, 0x22, 0xdf, 0x22, 0xc6, + 0x35, 0x6a, 0xa1, 0x41, 0x7b, 0x1f, 0x6d, 0x65, 0x53, 0x68, 0xc4, 0xf8, 0x46, 0x54, 0xaa, 0x29, + 0x37, 0x8c, 0x6f, 0x74, 0xcd, 0xea, 0x46, 0xc3, 0xb2, 0x52, 0xb5, 0xf2, 0xa6, 0xad, 0x18, 0x32, + 0xae, 0x61, 0xef, 0x83, 0x7b, 0x1f, 0x5a, 0xdf, 0xe5, 0x55, 0x17, 0x5b, 0xee, 0x10, 0x93, 0x52, + 0xd5, 0xac, 0xde, 0x29, 0x79, 0x30, 0xcf, 0x73, 0x70, 0x4e, 0xf8, 0xc2, 0xe4, 0x12, 0xbe, 0xb6, + 0xa9, 0x54, 0x36, 0xc5, 0x3c, 0x01, 0x93, 0xa3, 0xb1, 0x37, 0x01, 0xe3, 0x24, 0x5a, 0xc7, 0xf4, + 0x6e, 0x79, 0xbf, 0xa4, 0xa1, 0x7b, 0xe2, 0x8d, 0xc1, 0x59, 0x12, 0x3d, 0x44, 0xab, 0x97, 0xc8, + 0x1d, 0x98, 0x1f, 0x1a, 0x91, 0xe0, 0x91, 0x86, 0xae, 0xe3, 0x01, 0x30, 0x8a, 0x49, 0xb2, 0xa6, + 0xa1, 0x3b, 0x34, 0xc4, 0x33, 0x7d, 0x5a, 0xa5, 0x34, 0x74, 0x4f, 0x83, 0xef, 0x01, 0x98, 0xbd, + 0xab, 0x02, 0xfe, 0xdb, 0x38, 0xb8, 0x38, 0x7a, 0x3a, 0x36, 0x4d, 0xe3, 0xc1, 0x6b, 0x70, 0xb0, + 0x09, 0x95, 0x33, 0x29, 0xa0, 0xaa, 0x04, 0x12, 0x5b, 0x69, 0xf7, 0xe8, 0x0e, 0x56, 0xee, 0xf4, + 0x1f, 0xf7, 0xbb, 0xb5, 0xdf, 0x0f, 0x67, 0xb8, 0x20, 0xe4, 0xd3, 0x99, 0x2e, 0xda, 0x28, 0xc2, + 0x35, 0x6c, 0xa1, 0x41, 0xa9, 0x0f, 0xcd, 0xf6, 0xfa, 0xab, 0xe3, 0x33, 0xc2, 0x75, 0xd6, 0xf3, + 0x59, 0xea, 0x67, 0x96, 0xff, 0x71, 0x66, 0xed, 0x10, 0x63, 0xc2, 0x35, 0xc6, 0xbd, 0x02, 0xe3, + 0xd4, 0xc7, 0xd8, 0x6a, 0xde, 0x46, 0xb6, 0xd8, 0xf5, 0x6f, 0x00, 0x00, 0x00, 0xff, 0xff, 0x54, + 0x46, 0xf3, 0x86, 0xd7, 0x01, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/ad_group_criterion_approval_status.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/ad_group_criterion_approval_status.pb.go new file mode 100644 index 0000000..ac7edae --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/ad_group_criterion_approval_status.pb.go @@ -0,0 +1,128 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/enums/ad_group_criterion_approval_status.proto + +package enums // import "google.golang.org/genproto/googleapis/ads/googleads/v1/enums" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Enumerates AdGroupCriterion approval statuses. +type AdGroupCriterionApprovalStatusEnum_AdGroupCriterionApprovalStatus int32 + +const ( + // Not specified. + AdGroupCriterionApprovalStatusEnum_UNSPECIFIED AdGroupCriterionApprovalStatusEnum_AdGroupCriterionApprovalStatus = 0 + // The value is unknown in this version. + AdGroupCriterionApprovalStatusEnum_UNKNOWN AdGroupCriterionApprovalStatusEnum_AdGroupCriterionApprovalStatus = 1 + // Approved. + AdGroupCriterionApprovalStatusEnum_APPROVED AdGroupCriterionApprovalStatusEnum_AdGroupCriterionApprovalStatus = 2 + // Disapproved. + AdGroupCriterionApprovalStatusEnum_DISAPPROVED AdGroupCriterionApprovalStatusEnum_AdGroupCriterionApprovalStatus = 3 + // Pending Review. + AdGroupCriterionApprovalStatusEnum_PENDING_REVIEW AdGroupCriterionApprovalStatusEnum_AdGroupCriterionApprovalStatus = 4 + // Under review. + AdGroupCriterionApprovalStatusEnum_UNDER_REVIEW AdGroupCriterionApprovalStatusEnum_AdGroupCriterionApprovalStatus = 5 +) + +var AdGroupCriterionApprovalStatusEnum_AdGroupCriterionApprovalStatus_name = map[int32]string{ + 0: "UNSPECIFIED", + 1: "UNKNOWN", + 2: "APPROVED", + 3: "DISAPPROVED", + 4: "PENDING_REVIEW", + 5: "UNDER_REVIEW", +} +var AdGroupCriterionApprovalStatusEnum_AdGroupCriterionApprovalStatus_value = map[string]int32{ + "UNSPECIFIED": 0, + "UNKNOWN": 1, + "APPROVED": 2, + "DISAPPROVED": 3, + "PENDING_REVIEW": 4, + "UNDER_REVIEW": 5, +} + +func (x AdGroupCriterionApprovalStatusEnum_AdGroupCriterionApprovalStatus) String() string { + return proto.EnumName(AdGroupCriterionApprovalStatusEnum_AdGroupCriterionApprovalStatus_name, int32(x)) +} +func (AdGroupCriterionApprovalStatusEnum_AdGroupCriterionApprovalStatus) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_ad_group_criterion_approval_status_57d27da9f8e80211, []int{0, 0} +} + +// Container for enum describing possible AdGroupCriterion approval statuses. +type AdGroupCriterionApprovalStatusEnum struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *AdGroupCriterionApprovalStatusEnum) Reset() { *m = AdGroupCriterionApprovalStatusEnum{} } +func (m *AdGroupCriterionApprovalStatusEnum) String() string { return proto.CompactTextString(m) } +func (*AdGroupCriterionApprovalStatusEnum) ProtoMessage() {} +func (*AdGroupCriterionApprovalStatusEnum) Descriptor() ([]byte, []int) { + return fileDescriptor_ad_group_criterion_approval_status_57d27da9f8e80211, []int{0} +} +func (m *AdGroupCriterionApprovalStatusEnum) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_AdGroupCriterionApprovalStatusEnum.Unmarshal(m, b) +} +func (m *AdGroupCriterionApprovalStatusEnum) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_AdGroupCriterionApprovalStatusEnum.Marshal(b, m, deterministic) +} +func (dst *AdGroupCriterionApprovalStatusEnum) XXX_Merge(src proto.Message) { + xxx_messageInfo_AdGroupCriterionApprovalStatusEnum.Merge(dst, src) +} +func (m *AdGroupCriterionApprovalStatusEnum) XXX_Size() int { + return xxx_messageInfo_AdGroupCriterionApprovalStatusEnum.Size(m) +} +func (m *AdGroupCriterionApprovalStatusEnum) XXX_DiscardUnknown() { + xxx_messageInfo_AdGroupCriterionApprovalStatusEnum.DiscardUnknown(m) +} + +var xxx_messageInfo_AdGroupCriterionApprovalStatusEnum proto.InternalMessageInfo + +func init() { + proto.RegisterType((*AdGroupCriterionApprovalStatusEnum)(nil), "google.ads.googleads.v1.enums.AdGroupCriterionApprovalStatusEnum") + proto.RegisterEnum("google.ads.googleads.v1.enums.AdGroupCriterionApprovalStatusEnum_AdGroupCriterionApprovalStatus", AdGroupCriterionApprovalStatusEnum_AdGroupCriterionApprovalStatus_name, AdGroupCriterionApprovalStatusEnum_AdGroupCriterionApprovalStatus_value) +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/enums/ad_group_criterion_approval_status.proto", fileDescriptor_ad_group_criterion_approval_status_57d27da9f8e80211) +} + +var fileDescriptor_ad_group_criterion_approval_status_57d27da9f8e80211 = []byte{ + // 349 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x7c, 0x51, 0xcd, 0x6a, 0xab, 0x40, + 0x18, 0xbd, 0x9a, 0x7b, 0x6f, 0xcb, 0x24, 0xb4, 0xe2, 0xb2, 0x34, 0x85, 0xa4, 0xfb, 0x11, 0xe9, + 0x6e, 0xba, 0x9a, 0xc4, 0x89, 0x48, 0xc1, 0x48, 0x42, 0x0c, 0x14, 0x41, 0xa6, 0x19, 0x19, 0x84, + 0x64, 0x46, 0x1c, 0xcd, 0x0b, 0xf4, 0x4d, 0xba, 0xcc, 0xa3, 0xf4, 0x51, 0xfa, 0x04, 0x5d, 0x16, + 0xc7, 0xe8, 0xae, 0xd9, 0x0c, 0x87, 0xef, 0x9c, 0x39, 0xe7, 0xfb, 0x01, 0x0b, 0x2e, 0x25, 0xdf, + 0x67, 0x0e, 0x65, 0xca, 0x69, 0x61, 0x83, 0x8e, 0xae, 0x93, 0x89, 0xfa, 0xa0, 0x1c, 0xca, 0x52, + 0x5e, 0xca, 0xba, 0x48, 0x77, 0x65, 0x5e, 0x65, 0x65, 0x2e, 0x45, 0x4a, 0x8b, 0xa2, 0x94, 0x47, + 0xba, 0x4f, 0x55, 0x45, 0xab, 0x5a, 0xc1, 0xa2, 0x94, 0x95, 0xb4, 0xc7, 0xed, 0x67, 0x48, 0x99, + 0x82, 0xbd, 0x0f, 0x3c, 0xba, 0x50, 0xfb, 0xdc, 0xdd, 0x77, 0x31, 0x45, 0xee, 0x50, 0x21, 0x64, + 0x45, 0xab, 0x5c, 0x8a, 0xf3, 0xe7, 0xe9, 0xc9, 0x00, 0x53, 0xcc, 0xfc, 0x26, 0x68, 0xde, 0xe5, + 0xe0, 0x73, 0xcc, 0x5a, 0xa7, 0x10, 0x51, 0x1f, 0xa6, 0xef, 0x06, 0x78, 0xb8, 0x2c, 0xb3, 0x6f, + 0xc1, 0x70, 0x13, 0xae, 0x23, 0x32, 0x0f, 0x16, 0x01, 0xf1, 0xac, 0x3f, 0xf6, 0x10, 0x5c, 0x6d, + 0xc2, 0x97, 0x70, 0xb9, 0x0d, 0x2d, 0xc3, 0x1e, 0x81, 0x6b, 0x1c, 0x45, 0xab, 0x65, 0x4c, 0x3c, + 0xcb, 0x6c, 0xb4, 0x5e, 0xb0, 0xee, 0x0b, 0x03, 0xdb, 0x06, 0x37, 0x11, 0x09, 0xbd, 0x20, 0xf4, + 0xd3, 0x15, 0x89, 0x03, 0xb2, 0xb5, 0xfe, 0xda, 0x16, 0x18, 0x6d, 0x42, 0x8f, 0xac, 0xba, 0xca, + 0xbf, 0xd9, 0xb7, 0x01, 0x26, 0x3b, 0x79, 0x80, 0x17, 0x07, 0x9e, 0x3d, 0x5e, 0x6e, 0x34, 0x6a, + 0xe6, 0x8e, 0x8c, 0xd7, 0xd9, 0xd9, 0x85, 0xcb, 0x3d, 0x15, 0x1c, 0xca, 0x92, 0x3b, 0x3c, 0x13, + 0x7a, 0x2b, 0xdd, 0x39, 0x8a, 0x5c, 0xfd, 0x72, 0x9d, 0x67, 0xfd, 0x7e, 0x98, 0x03, 0x1f, 0xe3, + 0x93, 0x39, 0xf6, 0x5b, 0x2b, 0xcc, 0x14, 0x6c, 0x61, 0x83, 0x62, 0x17, 0x36, 0xbb, 0x53, 0x9f, + 0x1d, 0x9f, 0x60, 0xa6, 0x92, 0x9e, 0x4f, 0x62, 0x37, 0xd1, 0xfc, 0x97, 0x39, 0x69, 0x8b, 0x08, + 0x61, 0xa6, 0x10, 0xea, 0x15, 0x08, 0xc5, 0x2e, 0x42, 0x5a, 0xf3, 0xf6, 0x5f, 0x37, 0xf6, 0xf4, + 0x13, 0x00, 0x00, 0xff, 0xff, 0x9e, 0x57, 0xc9, 0x4e, 0x35, 0x02, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/ad_group_criterion_status.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/ad_group_criterion_status.pb.go new file mode 100644 index 0000000..8cb3155 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/ad_group_criterion_status.pb.go @@ -0,0 +1,125 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/enums/ad_group_criterion_status.proto + +package enums // import "google.golang.org/genproto/googleapis/ads/googleads/v1/enums" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// The possible statuses of an AdGroupCriterion. +type AdGroupCriterionStatusEnum_AdGroupCriterionStatus int32 + +const ( + // No value has been specified. + AdGroupCriterionStatusEnum_UNSPECIFIED AdGroupCriterionStatusEnum_AdGroupCriterionStatus = 0 + // The received value is not known in this version. + // + // This is a response-only value. + AdGroupCriterionStatusEnum_UNKNOWN AdGroupCriterionStatusEnum_AdGroupCriterionStatus = 1 + // The ad group criterion is enabled. + AdGroupCriterionStatusEnum_ENABLED AdGroupCriterionStatusEnum_AdGroupCriterionStatus = 2 + // The ad group criterion is paused. + AdGroupCriterionStatusEnum_PAUSED AdGroupCriterionStatusEnum_AdGroupCriterionStatus = 3 + // The ad group criterion is removed. + AdGroupCriterionStatusEnum_REMOVED AdGroupCriterionStatusEnum_AdGroupCriterionStatus = 4 +) + +var AdGroupCriterionStatusEnum_AdGroupCriterionStatus_name = map[int32]string{ + 0: "UNSPECIFIED", + 1: "UNKNOWN", + 2: "ENABLED", + 3: "PAUSED", + 4: "REMOVED", +} +var AdGroupCriterionStatusEnum_AdGroupCriterionStatus_value = map[string]int32{ + "UNSPECIFIED": 0, + "UNKNOWN": 1, + "ENABLED": 2, + "PAUSED": 3, + "REMOVED": 4, +} + +func (x AdGroupCriterionStatusEnum_AdGroupCriterionStatus) String() string { + return proto.EnumName(AdGroupCriterionStatusEnum_AdGroupCriterionStatus_name, int32(x)) +} +func (AdGroupCriterionStatusEnum_AdGroupCriterionStatus) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_ad_group_criterion_status_38c725642d0e60d1, []int{0, 0} +} + +// Message describing AdGroupCriterion statuses. +type AdGroupCriterionStatusEnum struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *AdGroupCriterionStatusEnum) Reset() { *m = AdGroupCriterionStatusEnum{} } +func (m *AdGroupCriterionStatusEnum) String() string { return proto.CompactTextString(m) } +func (*AdGroupCriterionStatusEnum) ProtoMessage() {} +func (*AdGroupCriterionStatusEnum) Descriptor() ([]byte, []int) { + return fileDescriptor_ad_group_criterion_status_38c725642d0e60d1, []int{0} +} +func (m *AdGroupCriterionStatusEnum) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_AdGroupCriterionStatusEnum.Unmarshal(m, b) +} +func (m *AdGroupCriterionStatusEnum) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_AdGroupCriterionStatusEnum.Marshal(b, m, deterministic) +} +func (dst *AdGroupCriterionStatusEnum) XXX_Merge(src proto.Message) { + xxx_messageInfo_AdGroupCriterionStatusEnum.Merge(dst, src) +} +func (m *AdGroupCriterionStatusEnum) XXX_Size() int { + return xxx_messageInfo_AdGroupCriterionStatusEnum.Size(m) +} +func (m *AdGroupCriterionStatusEnum) XXX_DiscardUnknown() { + xxx_messageInfo_AdGroupCriterionStatusEnum.DiscardUnknown(m) +} + +var xxx_messageInfo_AdGroupCriterionStatusEnum proto.InternalMessageInfo + +func init() { + proto.RegisterType((*AdGroupCriterionStatusEnum)(nil), "google.ads.googleads.v1.enums.AdGroupCriterionStatusEnum") + proto.RegisterEnum("google.ads.googleads.v1.enums.AdGroupCriterionStatusEnum_AdGroupCriterionStatus", AdGroupCriterionStatusEnum_AdGroupCriterionStatus_name, AdGroupCriterionStatusEnum_AdGroupCriterionStatus_value) +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/enums/ad_group_criterion_status.proto", fileDescriptor_ad_group_criterion_status_38c725642d0e60d1) +} + +var fileDescriptor_ad_group_criterion_status_38c725642d0e60d1 = []byte{ + // 322 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x7c, 0x90, 0x41, 0x4b, 0xf3, 0x30, + 0x1c, 0xc6, 0xdf, 0x75, 0x2f, 0x13, 0xb2, 0x83, 0xa5, 0x07, 0x0f, 0xd3, 0x1d, 0xb6, 0x0f, 0x90, + 0x52, 0xbc, 0x45, 0x3c, 0xa4, 0x6b, 0x1c, 0x43, 0xed, 0x8a, 0x63, 0x15, 0xa4, 0x30, 0xe2, 0x52, + 0x42, 0x61, 0x4b, 0x4a, 0x93, 0xee, 0xe0, 0xc7, 0xf1, 0xe8, 0x47, 0xf1, 0xa3, 0x78, 0xf2, 0x23, + 0x48, 0xd2, 0xb5, 0xa7, 0xe9, 0x25, 0x3c, 0xc9, 0xf3, 0x7f, 0x7e, 0x3c, 0xf9, 0x83, 0x5b, 0x2e, + 0x25, 0xdf, 0xe5, 0x3e, 0x65, 0xca, 0x6f, 0xa4, 0x51, 0x87, 0xc0, 0xcf, 0x45, 0xbd, 0x57, 0x3e, + 0x65, 0x1b, 0x5e, 0xc9, 0xba, 0xdc, 0x6c, 0xab, 0x42, 0xe7, 0x55, 0x21, 0xc5, 0x46, 0x69, 0xaa, + 0x6b, 0x05, 0xcb, 0x4a, 0x6a, 0xe9, 0x8d, 0x9b, 0x0c, 0xa4, 0x4c, 0xc1, 0x2e, 0x0e, 0x0f, 0x01, + 0xb4, 0xf1, 0xd1, 0x55, 0x4b, 0x2f, 0x0b, 0x9f, 0x0a, 0x21, 0x35, 0xd5, 0x85, 0x14, 0xc7, 0xf0, + 0xf4, 0x0d, 0x8c, 0x30, 0x9b, 0x1b, 0xfc, 0xac, 0xa5, 0xaf, 0x2c, 0x9c, 0x88, 0x7a, 0x3f, 0xcd, + 0xc0, 0xc5, 0x69, 0xd7, 0x3b, 0x07, 0xc3, 0x75, 0xbc, 0x4a, 0xc8, 0x6c, 0x71, 0xb7, 0x20, 0x91, + 0xfb, 0xcf, 0x1b, 0x82, 0xb3, 0x75, 0x7c, 0x1f, 0x2f, 0x9f, 0x63, 0xb7, 0x67, 0x2e, 0x24, 0xc6, + 0xe1, 0x03, 0x89, 0x5c, 0xc7, 0x03, 0x60, 0x90, 0xe0, 0xf5, 0x8a, 0x44, 0x6e, 0xdf, 0x18, 0x4f, + 0xe4, 0x71, 0x99, 0x92, 0xc8, 0xfd, 0x1f, 0x7e, 0xf7, 0xc0, 0x64, 0x2b, 0xf7, 0xf0, 0xcf, 0xfe, + 0xe1, 0xe5, 0xe9, 0x06, 0x89, 0xa9, 0x9f, 0xf4, 0x5e, 0xc2, 0x63, 0x9a, 0xcb, 0x1d, 0x15, 0x1c, + 0xca, 0x8a, 0xfb, 0x3c, 0x17, 0xf6, 0x73, 0xed, 0x32, 0xcb, 0x42, 0xfd, 0xb2, 0xdb, 0x1b, 0x7b, + 0xbe, 0x3b, 0xfd, 0x39, 0xc6, 0x1f, 0xce, 0x78, 0xde, 0xa0, 0x30, 0x53, 0xb0, 0x91, 0x46, 0xa5, + 0x01, 0x34, 0xbb, 0x50, 0x9f, 0xad, 0x9f, 0x61, 0xa6, 0xb2, 0xce, 0xcf, 0xd2, 0x20, 0xb3, 0xfe, + 0x97, 0x33, 0x69, 0x1e, 0x11, 0xc2, 0x4c, 0x21, 0xd4, 0x4d, 0x20, 0x94, 0x06, 0x08, 0xd9, 0x99, + 0xd7, 0x81, 0x2d, 0x76, 0xfd, 0x13, 0x00, 0x00, 0xff, 0xff, 0xa2, 0xfe, 0x68, 0x64, 0xf3, 0x01, + 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/ad_group_status.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/ad_group_status.pb.go new file mode 100644 index 0000000..6d668ab --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/ad_group_status.pb.go @@ -0,0 +1,124 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/enums/ad_group_status.proto + +package enums // import "google.golang.org/genproto/googleapis/ads/googleads/v1/enums" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// The possible statuses of an ad group. +type AdGroupStatusEnum_AdGroupStatus int32 + +const ( + // The status has not been specified. + AdGroupStatusEnum_UNSPECIFIED AdGroupStatusEnum_AdGroupStatus = 0 + // The received value is not known in this version. + // + // This is a response-only value. + AdGroupStatusEnum_UNKNOWN AdGroupStatusEnum_AdGroupStatus = 1 + // The ad group is enabled. + AdGroupStatusEnum_ENABLED AdGroupStatusEnum_AdGroupStatus = 2 + // The ad group is paused. + AdGroupStatusEnum_PAUSED AdGroupStatusEnum_AdGroupStatus = 3 + // The ad group is removed. + AdGroupStatusEnum_REMOVED AdGroupStatusEnum_AdGroupStatus = 4 +) + +var AdGroupStatusEnum_AdGroupStatus_name = map[int32]string{ + 0: "UNSPECIFIED", + 1: "UNKNOWN", + 2: "ENABLED", + 3: "PAUSED", + 4: "REMOVED", +} +var AdGroupStatusEnum_AdGroupStatus_value = map[string]int32{ + "UNSPECIFIED": 0, + "UNKNOWN": 1, + "ENABLED": 2, + "PAUSED": 3, + "REMOVED": 4, +} + +func (x AdGroupStatusEnum_AdGroupStatus) String() string { + return proto.EnumName(AdGroupStatusEnum_AdGroupStatus_name, int32(x)) +} +func (AdGroupStatusEnum_AdGroupStatus) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_ad_group_status_de784cd4ed39993a, []int{0, 0} +} + +// Container for enum describing possible statuses of an ad group. +type AdGroupStatusEnum struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *AdGroupStatusEnum) Reset() { *m = AdGroupStatusEnum{} } +func (m *AdGroupStatusEnum) String() string { return proto.CompactTextString(m) } +func (*AdGroupStatusEnum) ProtoMessage() {} +func (*AdGroupStatusEnum) Descriptor() ([]byte, []int) { + return fileDescriptor_ad_group_status_de784cd4ed39993a, []int{0} +} +func (m *AdGroupStatusEnum) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_AdGroupStatusEnum.Unmarshal(m, b) +} +func (m *AdGroupStatusEnum) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_AdGroupStatusEnum.Marshal(b, m, deterministic) +} +func (dst *AdGroupStatusEnum) XXX_Merge(src proto.Message) { + xxx_messageInfo_AdGroupStatusEnum.Merge(dst, src) +} +func (m *AdGroupStatusEnum) XXX_Size() int { + return xxx_messageInfo_AdGroupStatusEnum.Size(m) +} +func (m *AdGroupStatusEnum) XXX_DiscardUnknown() { + xxx_messageInfo_AdGroupStatusEnum.DiscardUnknown(m) +} + +var xxx_messageInfo_AdGroupStatusEnum proto.InternalMessageInfo + +func init() { + proto.RegisterType((*AdGroupStatusEnum)(nil), "google.ads.googleads.v1.enums.AdGroupStatusEnum") + proto.RegisterEnum("google.ads.googleads.v1.enums.AdGroupStatusEnum_AdGroupStatus", AdGroupStatusEnum_AdGroupStatus_name, AdGroupStatusEnum_AdGroupStatus_value) +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/enums/ad_group_status.proto", fileDescriptor_ad_group_status_de784cd4ed39993a) +} + +var fileDescriptor_ad_group_status_de784cd4ed39993a = []byte{ + // 306 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x7c, 0x50, 0xcf, 0x4a, 0xc3, 0x30, + 0x18, 0x77, 0x9d, 0x4c, 0xc8, 0x10, 0x6b, 0x8f, 0xe2, 0x0e, 0xdb, 0x03, 0x24, 0x94, 0xdd, 0xe2, + 0x29, 0xb5, 0x71, 0x0c, 0xb5, 0x2b, 0x96, 0x55, 0x90, 0xc2, 0x88, 0x66, 0xc4, 0xc2, 0x9a, 0x94, + 0xa6, 0xdd, 0x03, 0x79, 0xf4, 0x51, 0x7c, 0x11, 0xc1, 0xa7, 0x90, 0xa4, 0x6b, 0x61, 0x07, 0xbd, + 0x94, 0x5f, 0xbf, 0xdf, 0x9f, 0xfc, 0xbe, 0x0f, 0xcc, 0x85, 0x52, 0x62, 0xb7, 0x45, 0x8c, 0x6b, + 0xd4, 0x42, 0x83, 0xf6, 0x3e, 0xda, 0xca, 0xa6, 0xd0, 0x88, 0xf1, 0x8d, 0xa8, 0x54, 0x53, 0x6e, + 0x74, 0xcd, 0xea, 0x46, 0xc3, 0xb2, 0x52, 0xb5, 0xf2, 0x26, 0xad, 0x12, 0x32, 0xae, 0x61, 0x6f, + 0x82, 0x7b, 0x1f, 0x5a, 0xd3, 0xd5, 0x75, 0x97, 0x59, 0xe6, 0x88, 0x49, 0xa9, 0x6a, 0x56, 0xe7, + 0x4a, 0x1e, 0xcc, 0xb3, 0x77, 0x70, 0x49, 0xf8, 0xc2, 0x84, 0x26, 0x36, 0x93, 0xca, 0xa6, 0x98, + 0x25, 0xe0, 0xfc, 0x68, 0xe8, 0x5d, 0x80, 0xf1, 0x3a, 0x4a, 0x62, 0x7a, 0xbb, 0xbc, 0x5b, 0xd2, + 0xd0, 0x3d, 0xf1, 0xc6, 0xe0, 0x6c, 0x1d, 0xdd, 0x47, 0xab, 0xe7, 0xc8, 0x1d, 0x98, 0x1f, 0x1a, + 0x91, 0xe0, 0x81, 0x86, 0xae, 0xe3, 0x01, 0x30, 0x8a, 0xc9, 0x3a, 0xa1, 0xa1, 0x3b, 0x34, 0xc4, + 0x13, 0x7d, 0x5c, 0xa5, 0x34, 0x74, 0x4f, 0x83, 0xef, 0x01, 0x98, 0xbe, 0xa9, 0x02, 0xfe, 0xdb, + 0x36, 0xf0, 0x8e, 0x1e, 0x8e, 0x4d, 0xc7, 0x78, 0xf0, 0x12, 0x1c, 0x4c, 0x42, 0xed, 0x98, 0x14, + 0x50, 0x55, 0x02, 0x89, 0xad, 0xb4, 0x1b, 0x74, 0x77, 0x2a, 0x73, 0xfd, 0xc7, 0xd9, 0x6e, 0xec, + 0xf7, 0xc3, 0x19, 0x2e, 0x08, 0xf9, 0x74, 0x26, 0x8b, 0x36, 0x8a, 0x70, 0x0d, 0x5b, 0x68, 0x50, + 0xea, 0x43, 0xb3, 0xb9, 0xfe, 0xea, 0xf8, 0x8c, 0x70, 0x9d, 0xf5, 0x7c, 0x96, 0xfa, 0x99, 0xe5, + 0x7f, 0x9c, 0x69, 0x3b, 0xc4, 0x98, 0x70, 0x8d, 0x71, 0xaf, 0xc0, 0x38, 0xf5, 0x31, 0xb6, 0x9a, + 0xd7, 0x91, 0x2d, 0x36, 0xff, 0x0d, 0x00, 0x00, 0xff, 0xff, 0x23, 0x2a, 0x6d, 0x08, 0xce, 0x01, + 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/ad_group_type.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/ad_group_type.pb.go new file mode 100644 index 0000000..d974e72 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/ad_group_type.pb.go @@ -0,0 +1,166 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/enums/ad_group_type.proto + +package enums // import "google.golang.org/genproto/googleapis/ads/googleads/v1/enums" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Enum listing the possible types of an ad group. +type AdGroupTypeEnum_AdGroupType int32 + +const ( + // The type has not been specified. + AdGroupTypeEnum_UNSPECIFIED AdGroupTypeEnum_AdGroupType = 0 + // The received value is not known in this version. + // + // This is a response-only value. + AdGroupTypeEnum_UNKNOWN AdGroupTypeEnum_AdGroupType = 1 + // The default ad group type for Search campaigns. + AdGroupTypeEnum_SEARCH_STANDARD AdGroupTypeEnum_AdGroupType = 2 + // The default ad group type for Display campaigns. + AdGroupTypeEnum_DISPLAY_STANDARD AdGroupTypeEnum_AdGroupType = 3 + // The ad group type for Shopping campaigns serving standard product ads. + AdGroupTypeEnum_SHOPPING_PRODUCT_ADS AdGroupTypeEnum_AdGroupType = 4 + // The default ad group type for Hotel campaigns. + AdGroupTypeEnum_HOTEL_ADS AdGroupTypeEnum_AdGroupType = 6 + // The type for ad groups in Smart Shopping campaigns. + AdGroupTypeEnum_SHOPPING_SMART_ADS AdGroupTypeEnum_AdGroupType = 7 + // Short unskippable in-stream video ads. + AdGroupTypeEnum_VIDEO_BUMPER AdGroupTypeEnum_AdGroupType = 8 + // TrueView (skippable) in-stream video ads. + AdGroupTypeEnum_VIDEO_TRUE_VIEW_IN_STREAM AdGroupTypeEnum_AdGroupType = 9 + // TrueView in-display video ads. + AdGroupTypeEnum_VIDEO_TRUE_VIEW_IN_DISPLAY AdGroupTypeEnum_AdGroupType = 10 + // Unskippable in-stream video ads. + AdGroupTypeEnum_VIDEO_NON_SKIPPABLE_IN_STREAM AdGroupTypeEnum_AdGroupType = 11 + // Outstream video ads. + AdGroupTypeEnum_VIDEO_OUTSTREAM AdGroupTypeEnum_AdGroupType = 12 + // Ad group type for Dynamic Search Ads ad groups. + AdGroupTypeEnum_SEARCH_DYNAMIC_ADS AdGroupTypeEnum_AdGroupType = 13 +) + +var AdGroupTypeEnum_AdGroupType_name = map[int32]string{ + 0: "UNSPECIFIED", + 1: "UNKNOWN", + 2: "SEARCH_STANDARD", + 3: "DISPLAY_STANDARD", + 4: "SHOPPING_PRODUCT_ADS", + 6: "HOTEL_ADS", + 7: "SHOPPING_SMART_ADS", + 8: "VIDEO_BUMPER", + 9: "VIDEO_TRUE_VIEW_IN_STREAM", + 10: "VIDEO_TRUE_VIEW_IN_DISPLAY", + 11: "VIDEO_NON_SKIPPABLE_IN_STREAM", + 12: "VIDEO_OUTSTREAM", + 13: "SEARCH_DYNAMIC_ADS", +} +var AdGroupTypeEnum_AdGroupType_value = map[string]int32{ + "UNSPECIFIED": 0, + "UNKNOWN": 1, + "SEARCH_STANDARD": 2, + "DISPLAY_STANDARD": 3, + "SHOPPING_PRODUCT_ADS": 4, + "HOTEL_ADS": 6, + "SHOPPING_SMART_ADS": 7, + "VIDEO_BUMPER": 8, + "VIDEO_TRUE_VIEW_IN_STREAM": 9, + "VIDEO_TRUE_VIEW_IN_DISPLAY": 10, + "VIDEO_NON_SKIPPABLE_IN_STREAM": 11, + "VIDEO_OUTSTREAM": 12, + "SEARCH_DYNAMIC_ADS": 13, +} + +func (x AdGroupTypeEnum_AdGroupType) String() string { + return proto.EnumName(AdGroupTypeEnum_AdGroupType_name, int32(x)) +} +func (AdGroupTypeEnum_AdGroupType) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_ad_group_type_8038a67ed0ccfa9a, []int{0, 0} +} + +// Defines types of an ad group, specific to a particular campaign channel +// type. This type drives validations that restrict which entities can be +// added to the ad group. +type AdGroupTypeEnum struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *AdGroupTypeEnum) Reset() { *m = AdGroupTypeEnum{} } +func (m *AdGroupTypeEnum) String() string { return proto.CompactTextString(m) } +func (*AdGroupTypeEnum) ProtoMessage() {} +func (*AdGroupTypeEnum) Descriptor() ([]byte, []int) { + return fileDescriptor_ad_group_type_8038a67ed0ccfa9a, []int{0} +} +func (m *AdGroupTypeEnum) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_AdGroupTypeEnum.Unmarshal(m, b) +} +func (m *AdGroupTypeEnum) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_AdGroupTypeEnum.Marshal(b, m, deterministic) +} +func (dst *AdGroupTypeEnum) XXX_Merge(src proto.Message) { + xxx_messageInfo_AdGroupTypeEnum.Merge(dst, src) +} +func (m *AdGroupTypeEnum) XXX_Size() int { + return xxx_messageInfo_AdGroupTypeEnum.Size(m) +} +func (m *AdGroupTypeEnum) XXX_DiscardUnknown() { + xxx_messageInfo_AdGroupTypeEnum.DiscardUnknown(m) +} + +var xxx_messageInfo_AdGroupTypeEnum proto.InternalMessageInfo + +func init() { + proto.RegisterType((*AdGroupTypeEnum)(nil), "google.ads.googleads.v1.enums.AdGroupTypeEnum") + proto.RegisterEnum("google.ads.googleads.v1.enums.AdGroupTypeEnum_AdGroupType", AdGroupTypeEnum_AdGroupType_name, AdGroupTypeEnum_AdGroupType_value) +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/enums/ad_group_type.proto", fileDescriptor_ad_group_type_8038a67ed0ccfa9a) +} + +var fileDescriptor_ad_group_type_8038a67ed0ccfa9a = []byte{ + // 448 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x7c, 0x52, 0xd1, 0x6a, 0xdb, 0x3c, + 0x18, 0xfd, 0xe3, 0xfe, 0xb4, 0xab, 0xd2, 0x12, 0xa1, 0x95, 0xb1, 0x95, 0x65, 0x90, 0x3e, 0x80, + 0x8d, 0xd9, 0x9d, 0x77, 0x25, 0xc7, 0x5a, 0x22, 0x9a, 0xc8, 0xc2, 0xb2, 0x5d, 0x3a, 0x02, 0xc2, + 0x9b, 0x8d, 0x09, 0x34, 0x96, 0x89, 0x92, 0x42, 0x5f, 0x67, 0x97, 0x7b, 0x82, 0x3d, 0xc3, 0xd8, + 0x6b, 0xec, 0x66, 0x4f, 0x31, 0x2c, 0x25, 0x59, 0x2e, 0xb6, 0xdd, 0x98, 0xcf, 0xe7, 0x9c, 0xef, + 0x70, 0xa4, 0x23, 0xe0, 0xd7, 0x4a, 0xd5, 0x0f, 0x95, 0x57, 0x94, 0xda, 0xb3, 0x63, 0x37, 0x3d, + 0xfa, 0x5e, 0xd5, 0x6c, 0x57, 0xda, 0x2b, 0x4a, 0x59, 0xaf, 0xd5, 0xb6, 0x95, 0x9b, 0xa7, 0xb6, + 0x72, 0xdb, 0xb5, 0xda, 0x28, 0x34, 0xb4, 0x3a, 0xb7, 0x28, 0xb5, 0x7b, 0x58, 0x71, 0x1f, 0x7d, + 0xd7, 0xac, 0x5c, 0xbf, 0xde, 0x3b, 0xb6, 0x4b, 0xaf, 0x68, 0x1a, 0xb5, 0x29, 0x36, 0x4b, 0xd5, + 0x68, 0xbb, 0x7c, 0xf3, 0xdd, 0x01, 0x03, 0x5c, 0x4e, 0x3a, 0xcf, 0xf4, 0xa9, 0xad, 0x48, 0xb3, + 0x5d, 0xdd, 0x7c, 0x75, 0x40, 0xff, 0x08, 0x43, 0x03, 0xd0, 0xcf, 0x98, 0xe0, 0x64, 0x4c, 0xdf, + 0x53, 0x12, 0xc1, 0xff, 0x50, 0x1f, 0x9c, 0x65, 0xec, 0x96, 0xc5, 0x77, 0x0c, 0xf6, 0xd0, 0x73, + 0x30, 0x10, 0x04, 0x27, 0xe3, 0xa9, 0x14, 0x29, 0x66, 0x11, 0x4e, 0x22, 0xe8, 0xa0, 0x2b, 0x00, + 0x23, 0x2a, 0xf8, 0x0c, 0xdf, 0xff, 0x46, 0x4f, 0xd0, 0x4b, 0x70, 0x25, 0xa6, 0x31, 0xe7, 0x94, + 0x4d, 0x24, 0x4f, 0xe2, 0x28, 0x1b, 0xa7, 0x12, 0x47, 0x02, 0xfe, 0x8f, 0x2e, 0xc1, 0xf9, 0x34, + 0x4e, 0xc9, 0xcc, 0xfc, 0x9e, 0xa2, 0x17, 0x00, 0x1d, 0x84, 0x62, 0x8e, 0x13, 0x2b, 0x3b, 0x43, + 0x10, 0x5c, 0xe4, 0x34, 0x22, 0xb1, 0x0c, 0xb3, 0x39, 0x27, 0x09, 0x7c, 0x86, 0x86, 0xe0, 0x95, + 0x45, 0xd2, 0x24, 0x23, 0x32, 0xa7, 0xe4, 0x4e, 0x52, 0x26, 0x45, 0x9a, 0x10, 0x3c, 0x87, 0xe7, + 0xe8, 0x0d, 0xb8, 0xfe, 0x03, 0xbd, 0x8b, 0x06, 0x01, 0x1a, 0x81, 0xa1, 0xe5, 0x59, 0xcc, 0xa4, + 0xb8, 0xa5, 0x9c, 0xe3, 0x70, 0x46, 0x8e, 0x2c, 0xfa, 0xdd, 0xf9, 0xac, 0x24, 0xce, 0xd2, 0x1d, + 0x78, 0x61, 0x02, 0xda, 0x43, 0x47, 0xf7, 0x0c, 0xcf, 0xe9, 0xd8, 0x04, 0xbc, 0x0c, 0x7f, 0xf4, + 0xc0, 0xe8, 0x93, 0x5a, 0xb9, 0xff, 0xac, 0x24, 0x84, 0x47, 0xb7, 0xcb, 0xbb, 0x1a, 0x78, 0xef, + 0x43, 0xb8, 0x5b, 0xa9, 0xd5, 0x43, 0xd1, 0xd4, 0xae, 0x5a, 0xd7, 0x5e, 0x5d, 0x35, 0xa6, 0xa4, + 0xfd, 0x43, 0x68, 0x97, 0xfa, 0x2f, 0xef, 0xe2, 0x9d, 0xf9, 0x7e, 0x76, 0x4e, 0x26, 0x18, 0x7f, + 0x71, 0x86, 0x13, 0x6b, 0x85, 0x4b, 0xed, 0xda, 0xb1, 0x9b, 0x72, 0xdf, 0xed, 0xda, 0xd5, 0xdf, + 0xf6, 0xfc, 0x02, 0x97, 0x7a, 0x71, 0xe0, 0x17, 0xb9, 0xbf, 0x30, 0xfc, 0x4f, 0x67, 0x64, 0xc1, + 0x20, 0xc0, 0xa5, 0x0e, 0x82, 0x83, 0x22, 0x08, 0x72, 0x3f, 0x08, 0x8c, 0xe6, 0xe3, 0xa9, 0x09, + 0xf6, 0xf6, 0x57, 0x00, 0x00, 0x00, 0xff, 0xff, 0x92, 0x7b, 0x50, 0x3f, 0xaf, 0x02, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/ad_network_type.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/ad_network_type.pb.go new file mode 100644 index 0000000..f1432ba --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/ad_network_type.pb.go @@ -0,0 +1,137 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/enums/ad_network_type.proto + +package enums // import "google.golang.org/genproto/googleapis/ads/googleads/v1/enums" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Enumerates Google Ads network types. +type AdNetworkTypeEnum_AdNetworkType int32 + +const ( + // Not specified. + AdNetworkTypeEnum_UNSPECIFIED AdNetworkTypeEnum_AdNetworkType = 0 + // The value is unknown in this version. + AdNetworkTypeEnum_UNKNOWN AdNetworkTypeEnum_AdNetworkType = 1 + // Google search. + AdNetworkTypeEnum_SEARCH AdNetworkTypeEnum_AdNetworkType = 2 + // Search partners. + AdNetworkTypeEnum_SEARCH_PARTNERS AdNetworkTypeEnum_AdNetworkType = 3 + // Display Network. + AdNetworkTypeEnum_CONTENT AdNetworkTypeEnum_AdNetworkType = 4 + // YouTube Search. + AdNetworkTypeEnum_YOUTUBE_SEARCH AdNetworkTypeEnum_AdNetworkType = 5 + // YouTube Videos + AdNetworkTypeEnum_YOUTUBE_WATCH AdNetworkTypeEnum_AdNetworkType = 6 + // Cross-network. + AdNetworkTypeEnum_MIXED AdNetworkTypeEnum_AdNetworkType = 7 +) + +var AdNetworkTypeEnum_AdNetworkType_name = map[int32]string{ + 0: "UNSPECIFIED", + 1: "UNKNOWN", + 2: "SEARCH", + 3: "SEARCH_PARTNERS", + 4: "CONTENT", + 5: "YOUTUBE_SEARCH", + 6: "YOUTUBE_WATCH", + 7: "MIXED", +} +var AdNetworkTypeEnum_AdNetworkType_value = map[string]int32{ + "UNSPECIFIED": 0, + "UNKNOWN": 1, + "SEARCH": 2, + "SEARCH_PARTNERS": 3, + "CONTENT": 4, + "YOUTUBE_SEARCH": 5, + "YOUTUBE_WATCH": 6, + "MIXED": 7, +} + +func (x AdNetworkTypeEnum_AdNetworkType) String() string { + return proto.EnumName(AdNetworkTypeEnum_AdNetworkType_name, int32(x)) +} +func (AdNetworkTypeEnum_AdNetworkType) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_ad_network_type_1a1e7d9b181d8408, []int{0, 0} +} + +// Container for enumeration of Google Ads network types. +type AdNetworkTypeEnum struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *AdNetworkTypeEnum) Reset() { *m = AdNetworkTypeEnum{} } +func (m *AdNetworkTypeEnum) String() string { return proto.CompactTextString(m) } +func (*AdNetworkTypeEnum) ProtoMessage() {} +func (*AdNetworkTypeEnum) Descriptor() ([]byte, []int) { + return fileDescriptor_ad_network_type_1a1e7d9b181d8408, []int{0} +} +func (m *AdNetworkTypeEnum) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_AdNetworkTypeEnum.Unmarshal(m, b) +} +func (m *AdNetworkTypeEnum) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_AdNetworkTypeEnum.Marshal(b, m, deterministic) +} +func (dst *AdNetworkTypeEnum) XXX_Merge(src proto.Message) { + xxx_messageInfo_AdNetworkTypeEnum.Merge(dst, src) +} +func (m *AdNetworkTypeEnum) XXX_Size() int { + return xxx_messageInfo_AdNetworkTypeEnum.Size(m) +} +func (m *AdNetworkTypeEnum) XXX_DiscardUnknown() { + xxx_messageInfo_AdNetworkTypeEnum.DiscardUnknown(m) +} + +var xxx_messageInfo_AdNetworkTypeEnum proto.InternalMessageInfo + +func init() { + proto.RegisterType((*AdNetworkTypeEnum)(nil), "google.ads.googleads.v1.enums.AdNetworkTypeEnum") + proto.RegisterEnum("google.ads.googleads.v1.enums.AdNetworkTypeEnum_AdNetworkType", AdNetworkTypeEnum_AdNetworkType_name, AdNetworkTypeEnum_AdNetworkType_value) +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/enums/ad_network_type.proto", fileDescriptor_ad_network_type_1a1e7d9b181d8408) +} + +var fileDescriptor_ad_network_type_1a1e7d9b181d8408 = []byte{ + // 356 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x7c, 0x50, 0xcf, 0x6a, 0xe2, 0x40, + 0x1c, 0xde, 0xc4, 0x55, 0xd9, 0x11, 0xd7, 0x38, 0x7b, 0x5b, 0xd6, 0x83, 0x3e, 0xc0, 0x84, 0xe0, + 0x6d, 0xf6, 0x34, 0x89, 0xb3, 0x2a, 0xcb, 0x8e, 0x41, 0x13, 0xdd, 0x96, 0x40, 0x48, 0x9b, 0x10, + 0xa4, 0x3a, 0x13, 0x9c, 0x68, 0xf1, 0x21, 0xfa, 0x12, 0xed, 0xad, 0x8f, 0xd2, 0x17, 0x29, 0xf4, + 0x29, 0x4a, 0x32, 0x46, 0xf0, 0xd0, 0x5e, 0x86, 0x8f, 0xdf, 0xf7, 0x87, 0xf9, 0x3e, 0x30, 0x4c, + 0x85, 0x48, 0x37, 0x89, 0x19, 0xc5, 0xd2, 0x54, 0xb0, 0x40, 0x07, 0xcb, 0x4c, 0xf8, 0x7e, 0x2b, + 0xcd, 0x28, 0x0e, 0x79, 0x92, 0xdf, 0x8b, 0xdd, 0x5d, 0x98, 0x1f, 0xb3, 0x04, 0x65, 0x3b, 0x91, + 0x0b, 0xd8, 0x53, 0x4a, 0x14, 0xc5, 0x12, 0x9d, 0x4d, 0xe8, 0x60, 0xa1, 0xd2, 0xf4, 0xf3, 0x57, + 0x95, 0x99, 0xad, 0xcd, 0x88, 0x73, 0x91, 0x47, 0xf9, 0x5a, 0x70, 0xa9, 0xcc, 0x83, 0x27, 0x0d, + 0x74, 0x49, 0xcc, 0x54, 0xaa, 0x77, 0xcc, 0x12, 0xca, 0xf7, 0xdb, 0xc1, 0x83, 0x06, 0xda, 0x17, + 0x57, 0xd8, 0x01, 0x2d, 0x9f, 0x2d, 0x5c, 0xea, 0x4c, 0xff, 0x4c, 0xe9, 0xc8, 0xf8, 0x02, 0x5b, + 0xa0, 0xe9, 0xb3, 0xbf, 0x6c, 0xb6, 0x62, 0x86, 0x06, 0x01, 0x68, 0x2c, 0x28, 0x99, 0x3b, 0x13, + 0x43, 0x87, 0x3f, 0x40, 0x47, 0xe1, 0xd0, 0x25, 0x73, 0x8f, 0xd1, 0xf9, 0xc2, 0xa8, 0x15, 0x6a, + 0x67, 0xc6, 0x3c, 0xca, 0x3c, 0xe3, 0x2b, 0x84, 0xe0, 0xfb, 0xd5, 0xcc, 0xf7, 0x7c, 0x9b, 0x86, + 0x27, 0x57, 0x1d, 0x76, 0x41, 0xbb, 0xba, 0xad, 0x88, 0xe7, 0x4c, 0x8c, 0x06, 0xfc, 0x06, 0xea, + 0xff, 0xa6, 0xff, 0xe9, 0xc8, 0x68, 0xda, 0xaf, 0x1a, 0xe8, 0xdf, 0x8a, 0x2d, 0xfa, 0xb4, 0xa9, + 0x0d, 0x2f, 0xbe, 0xec, 0x16, 0xfd, 0x5c, 0xed, 0xda, 0x3e, 0x99, 0x52, 0xb1, 0x89, 0x78, 0x8a, + 0xc4, 0x2e, 0x35, 0xd3, 0x84, 0x97, 0xed, 0xab, 0x8d, 0xb3, 0xb5, 0xfc, 0x60, 0xf2, 0xdf, 0xe5, + 0xfb, 0xa8, 0xd7, 0xc6, 0x84, 0x3c, 0xeb, 0xbd, 0xb1, 0x8a, 0x22, 0xb1, 0x44, 0x0a, 0x16, 0x68, + 0x69, 0xa1, 0x62, 0x34, 0xf9, 0x52, 0xf1, 0x01, 0x89, 0x65, 0x70, 0xe6, 0x83, 0xa5, 0x15, 0x94, + 0xfc, 0x9b, 0xde, 0x57, 0x47, 0x8c, 0x49, 0x2c, 0x31, 0x3e, 0x2b, 0x30, 0x5e, 0x5a, 0x18, 0x97, + 0x9a, 0x9b, 0x46, 0xf9, 0xb1, 0xe1, 0x7b, 0x00, 0x00, 0x00, 0xff, 0xff, 0x16, 0xfc, 0x47, 0xc5, + 0x0a, 0x02, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/ad_serving_optimization_status.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/ad_serving_optimization_status.pb.go new file mode 100644 index 0000000..e6ee70c --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/ad_serving_optimization_status.pb.go @@ -0,0 +1,138 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/enums/ad_serving_optimization_status.proto + +package enums // import "google.golang.org/genproto/googleapis/ads/googleads/v1/enums" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Enum describing possible serving statuses. +type AdServingOptimizationStatusEnum_AdServingOptimizationStatus int32 + +const ( + // No value has been specified. + AdServingOptimizationStatusEnum_UNSPECIFIED AdServingOptimizationStatusEnum_AdServingOptimizationStatus = 0 + // The received value is not known in this version. + // + // This is a response-only value. + AdServingOptimizationStatusEnum_UNKNOWN AdServingOptimizationStatusEnum_AdServingOptimizationStatus = 1 + // Ad serving is optimized based on CTR for the campaign. + AdServingOptimizationStatusEnum_OPTIMIZE AdServingOptimizationStatusEnum_AdServingOptimizationStatus = 2 + // Ad serving is optimized based on CTR * Conversion for the campaign. If + // the campaign is not in the conversion optimizer bidding strategy, it will + // default to OPTIMIZED. + AdServingOptimizationStatusEnum_CONVERSION_OPTIMIZE AdServingOptimizationStatusEnum_AdServingOptimizationStatus = 3 + // Ads are rotated evenly for 90 days, then optimized for clicks. + AdServingOptimizationStatusEnum_ROTATE AdServingOptimizationStatusEnum_AdServingOptimizationStatus = 4 + // Show lower performing ads more evenly with higher performing ads, and do + // not optimize. + AdServingOptimizationStatusEnum_ROTATE_INDEFINITELY AdServingOptimizationStatusEnum_AdServingOptimizationStatus = 5 + // Ad serving optimization status is not available. + AdServingOptimizationStatusEnum_UNAVAILABLE AdServingOptimizationStatusEnum_AdServingOptimizationStatus = 6 +) + +var AdServingOptimizationStatusEnum_AdServingOptimizationStatus_name = map[int32]string{ + 0: "UNSPECIFIED", + 1: "UNKNOWN", + 2: "OPTIMIZE", + 3: "CONVERSION_OPTIMIZE", + 4: "ROTATE", + 5: "ROTATE_INDEFINITELY", + 6: "UNAVAILABLE", +} +var AdServingOptimizationStatusEnum_AdServingOptimizationStatus_value = map[string]int32{ + "UNSPECIFIED": 0, + "UNKNOWN": 1, + "OPTIMIZE": 2, + "CONVERSION_OPTIMIZE": 3, + "ROTATE": 4, + "ROTATE_INDEFINITELY": 5, + "UNAVAILABLE": 6, +} + +func (x AdServingOptimizationStatusEnum_AdServingOptimizationStatus) String() string { + return proto.EnumName(AdServingOptimizationStatusEnum_AdServingOptimizationStatus_name, int32(x)) +} +func (AdServingOptimizationStatusEnum_AdServingOptimizationStatus) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_ad_serving_optimization_status_41633d967f063778, []int{0, 0} +} + +// Possible ad serving statuses of a campaign. +type AdServingOptimizationStatusEnum struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *AdServingOptimizationStatusEnum) Reset() { *m = AdServingOptimizationStatusEnum{} } +func (m *AdServingOptimizationStatusEnum) String() string { return proto.CompactTextString(m) } +func (*AdServingOptimizationStatusEnum) ProtoMessage() {} +func (*AdServingOptimizationStatusEnum) Descriptor() ([]byte, []int) { + return fileDescriptor_ad_serving_optimization_status_41633d967f063778, []int{0} +} +func (m *AdServingOptimizationStatusEnum) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_AdServingOptimizationStatusEnum.Unmarshal(m, b) +} +func (m *AdServingOptimizationStatusEnum) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_AdServingOptimizationStatusEnum.Marshal(b, m, deterministic) +} +func (dst *AdServingOptimizationStatusEnum) XXX_Merge(src proto.Message) { + xxx_messageInfo_AdServingOptimizationStatusEnum.Merge(dst, src) +} +func (m *AdServingOptimizationStatusEnum) XXX_Size() int { + return xxx_messageInfo_AdServingOptimizationStatusEnum.Size(m) +} +func (m *AdServingOptimizationStatusEnum) XXX_DiscardUnknown() { + xxx_messageInfo_AdServingOptimizationStatusEnum.DiscardUnknown(m) +} + +var xxx_messageInfo_AdServingOptimizationStatusEnum proto.InternalMessageInfo + +func init() { + proto.RegisterType((*AdServingOptimizationStatusEnum)(nil), "google.ads.googleads.v1.enums.AdServingOptimizationStatusEnum") + proto.RegisterEnum("google.ads.googleads.v1.enums.AdServingOptimizationStatusEnum_AdServingOptimizationStatus", AdServingOptimizationStatusEnum_AdServingOptimizationStatus_name, AdServingOptimizationStatusEnum_AdServingOptimizationStatus_value) +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/enums/ad_serving_optimization_status.proto", fileDescriptor_ad_serving_optimization_status_41633d967f063778) +} + +var fileDescriptor_ad_serving_optimization_status_41633d967f063778 = []byte{ + // 367 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x7c, 0x51, 0xc1, 0x8a, 0xdb, 0x30, + 0x14, 0xac, 0x9d, 0x36, 0x2d, 0x4a, 0xa1, 0xc6, 0x3d, 0x14, 0xda, 0x86, 0x36, 0xf9, 0x00, 0x19, + 0xd3, 0x9b, 0x7a, 0x92, 0x13, 0x25, 0x88, 0xa6, 0xb2, 0x89, 0x1d, 0x97, 0x06, 0x83, 0x71, 0x6b, + 0x63, 0x0c, 0xb1, 0x64, 0x22, 0x27, 0x87, 0xfd, 0x92, 0x3d, 0xef, 0x71, 0xbf, 0x61, 0xbf, 0x60, + 0x3f, 0x65, 0xef, 0x7b, 0x5f, 0x2c, 0x25, 0x66, 0x2f, 0x9b, 0x8b, 0x18, 0x34, 0xf3, 0x66, 0x1e, + 0xf3, 0x80, 0x57, 0x0a, 0x51, 0xee, 0x0a, 0x27, 0xcb, 0xa5, 0xa3, 0x61, 0x87, 0x8e, 0xae, 0x53, + 0xf0, 0x43, 0x2d, 0x9d, 0x2c, 0x4f, 0x65, 0xb1, 0x3f, 0x56, 0xbc, 0x4c, 0x45, 0xd3, 0x56, 0x75, + 0x75, 0x95, 0xb5, 0x95, 0xe0, 0xa9, 0x6c, 0xb3, 0xf6, 0x20, 0x61, 0xb3, 0x17, 0xad, 0xb0, 0xc7, + 0x7a, 0x10, 0x66, 0xb9, 0x84, 0xbd, 0x07, 0x3c, 0xba, 0x50, 0x79, 0x7c, 0xfe, 0x7a, 0x8e, 0x68, + 0x2a, 0x27, 0xe3, 0x5c, 0xb4, 0xca, 0xe2, 0x34, 0x3c, 0xbd, 0x33, 0xc0, 0x37, 0x9c, 0x87, 0x3a, + 0xc4, 0x7f, 0x96, 0x11, 0xaa, 0x08, 0xc2, 0x0f, 0xf5, 0xf4, 0xda, 0x00, 0x5f, 0x2e, 0x68, 0xec, + 0x0f, 0x60, 0xb4, 0x61, 0x61, 0x40, 0x66, 0x74, 0x41, 0xc9, 0xdc, 0x7a, 0x65, 0x8f, 0xc0, 0xdb, + 0x0d, 0xfb, 0xc5, 0xfc, 0x3f, 0xcc, 0x32, 0xec, 0xf7, 0xe0, 0x9d, 0x1f, 0x44, 0xf4, 0x37, 0xdd, + 0x12, 0xcb, 0xb4, 0x3f, 0x81, 0x8f, 0x33, 0x9f, 0xc5, 0x64, 0x1d, 0x52, 0x9f, 0xa5, 0x3d, 0x31, + 0xb0, 0x01, 0x18, 0xae, 0xfd, 0x08, 0x47, 0xc4, 0x7a, 0xdd, 0x89, 0x34, 0x4e, 0x29, 0x9b, 0x93, + 0x05, 0x65, 0x34, 0x22, 0xab, 0xbf, 0xd6, 0x1b, 0x9d, 0x84, 0x63, 0x4c, 0x57, 0xd8, 0x5b, 0x11, + 0x6b, 0xe8, 0x3d, 0x1a, 0x60, 0xf2, 0x5f, 0xd4, 0xf0, 0x62, 0x05, 0xde, 0xf7, 0x0b, 0xdb, 0x07, + 0x5d, 0x0d, 0x81, 0xb1, 0x3d, 0x5d, 0x02, 0x96, 0x62, 0x97, 0xf1, 0x12, 0x8a, 0x7d, 0xe9, 0x94, + 0x05, 0x57, 0x25, 0x9d, 0x2f, 0xd3, 0x54, 0xf2, 0x85, 0x43, 0xfd, 0x54, 0xef, 0x8d, 0x39, 0x58, + 0x62, 0x7c, 0x6b, 0x8e, 0x97, 0xda, 0x0a, 0xe7, 0x12, 0x6a, 0xd8, 0xa1, 0xd8, 0x85, 0x5d, 0x9b, + 0xf2, 0xfe, 0xcc, 0x27, 0x38, 0x97, 0x49, 0xcf, 0x27, 0xb1, 0x9b, 0x28, 0xfe, 0xc1, 0x9c, 0xe8, + 0x4f, 0x84, 0x70, 0x2e, 0x11, 0xea, 0x15, 0x08, 0xc5, 0x2e, 0x42, 0x4a, 0xf3, 0x6f, 0xa8, 0x16, + 0xfb, 0xf1, 0x14, 0x00, 0x00, 0xff, 0xff, 0xce, 0xde, 0x81, 0xb1, 0x40, 0x02, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/ad_strength.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/ad_strength.pb.go new file mode 100644 index 0000000..9813475 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/ad_strength.pb.go @@ -0,0 +1,135 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/enums/ad_strength.proto + +package enums // import "google.golang.org/genproto/googleapis/ads/googleads/v1/enums" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Enum listing the possible ad strengths. +type AdStrengthEnum_AdStrength int32 + +const ( + // Not specified. + AdStrengthEnum_UNSPECIFIED AdStrengthEnum_AdStrength = 0 + // Used for return value only. Represents value unknown in this version. + AdStrengthEnum_UNKNOWN AdStrengthEnum_AdStrength = 1 + // The ad strength is currently pending. + AdStrengthEnum_PENDING AdStrengthEnum_AdStrength = 2 + // No ads could be generated. + AdStrengthEnum_NO_ADS AdStrengthEnum_AdStrength = 3 + // Poor strength. + AdStrengthEnum_POOR AdStrengthEnum_AdStrength = 4 + // Average strength. + AdStrengthEnum_AVERAGE AdStrengthEnum_AdStrength = 5 + // Good strength. + AdStrengthEnum_GOOD AdStrengthEnum_AdStrength = 6 + // Excellent strength. + AdStrengthEnum_EXCELLENT AdStrengthEnum_AdStrength = 7 +) + +var AdStrengthEnum_AdStrength_name = map[int32]string{ + 0: "UNSPECIFIED", + 1: "UNKNOWN", + 2: "PENDING", + 3: "NO_ADS", + 4: "POOR", + 5: "AVERAGE", + 6: "GOOD", + 7: "EXCELLENT", +} +var AdStrengthEnum_AdStrength_value = map[string]int32{ + "UNSPECIFIED": 0, + "UNKNOWN": 1, + "PENDING": 2, + "NO_ADS": 3, + "POOR": 4, + "AVERAGE": 5, + "GOOD": 6, + "EXCELLENT": 7, +} + +func (x AdStrengthEnum_AdStrength) String() string { + return proto.EnumName(AdStrengthEnum_AdStrength_name, int32(x)) +} +func (AdStrengthEnum_AdStrength) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_ad_strength_5cb008be111e595d, []int{0, 0} +} + +// Container for enum describing possible ad strengths. +type AdStrengthEnum struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *AdStrengthEnum) Reset() { *m = AdStrengthEnum{} } +func (m *AdStrengthEnum) String() string { return proto.CompactTextString(m) } +func (*AdStrengthEnum) ProtoMessage() {} +func (*AdStrengthEnum) Descriptor() ([]byte, []int) { + return fileDescriptor_ad_strength_5cb008be111e595d, []int{0} +} +func (m *AdStrengthEnum) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_AdStrengthEnum.Unmarshal(m, b) +} +func (m *AdStrengthEnum) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_AdStrengthEnum.Marshal(b, m, deterministic) +} +func (dst *AdStrengthEnum) XXX_Merge(src proto.Message) { + xxx_messageInfo_AdStrengthEnum.Merge(dst, src) +} +func (m *AdStrengthEnum) XXX_Size() int { + return xxx_messageInfo_AdStrengthEnum.Size(m) +} +func (m *AdStrengthEnum) XXX_DiscardUnknown() { + xxx_messageInfo_AdStrengthEnum.DiscardUnknown(m) +} + +var xxx_messageInfo_AdStrengthEnum proto.InternalMessageInfo + +func init() { + proto.RegisterType((*AdStrengthEnum)(nil), "google.ads.googleads.v1.enums.AdStrengthEnum") + proto.RegisterEnum("google.ads.googleads.v1.enums.AdStrengthEnum_AdStrength", AdStrengthEnum_AdStrength_name, AdStrengthEnum_AdStrength_value) +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/enums/ad_strength.proto", fileDescriptor_ad_strength_5cb008be111e595d) +} + +var fileDescriptor_ad_strength_5cb008be111e595d = []byte{ + // 331 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x7c, 0x50, 0xdd, 0x4e, 0xc2, 0x30, + 0x18, 0x75, 0x03, 0x41, 0x4b, 0x94, 0x66, 0x97, 0x46, 0x2e, 0xe0, 0x01, 0xba, 0x2c, 0xde, 0xd5, + 0xab, 0xc2, 0xea, 0x42, 0x24, 0xdd, 0x02, 0x32, 0x8d, 0x59, 0x42, 0xa6, 0x5d, 0x2a, 0x09, 0xb4, + 0x84, 0x0e, 0xde, 0xc0, 0x17, 0xf1, 0xd2, 0x47, 0xf1, 0x39, 0xbc, 0xf2, 0x29, 0x4c, 0x3b, 0x7e, + 0xae, 0xf4, 0xa6, 0x39, 0xfd, 0xbe, 0x73, 0x4e, 0xce, 0x77, 0x80, 0x2f, 0x94, 0x12, 0x8b, 0xc2, + 0xcf, 0xb9, 0xde, 0x41, 0x83, 0xb6, 0x81, 0x5f, 0xc8, 0xcd, 0x52, 0xfb, 0x39, 0x9f, 0xe9, 0x72, + 0x5d, 0x48, 0x51, 0xbe, 0xa1, 0xd5, 0x5a, 0x95, 0xca, 0xeb, 0x54, 0x2c, 0x94, 0x73, 0x8d, 0x0e, + 0x02, 0xb4, 0x0d, 0x90, 0x15, 0x5c, 0x5d, 0xef, 0xfd, 0x56, 0x73, 0x3f, 0x97, 0x52, 0x95, 0x79, + 0x39, 0x57, 0x52, 0x57, 0xe2, 0xde, 0xbb, 0x03, 0x2e, 0x09, 0x9f, 0xec, 0x1c, 0xa9, 0xdc, 0x2c, + 0x7b, 0x1a, 0x80, 0xe3, 0xc4, 0x6b, 0x83, 0xd6, 0x94, 0x4d, 0x12, 0x3a, 0x18, 0xde, 0x0d, 0x69, + 0x08, 0x4f, 0xbc, 0x16, 0x68, 0x4e, 0xd9, 0x3d, 0x8b, 0x1f, 0x19, 0x74, 0xcc, 0x27, 0xa1, 0x2c, + 0x1c, 0xb2, 0x08, 0xba, 0x1e, 0x00, 0x0d, 0x16, 0xcf, 0x48, 0x38, 0x81, 0x35, 0xef, 0x0c, 0xd4, + 0x93, 0x38, 0x1e, 0xc3, 0xba, 0xa1, 0x90, 0x94, 0x8e, 0x49, 0x44, 0xe1, 0xa9, 0x19, 0x47, 0x71, + 0x1c, 0xc2, 0x86, 0x77, 0x01, 0xce, 0xe9, 0xd3, 0x80, 0x8e, 0x46, 0x94, 0x3d, 0xc0, 0x66, 0xff, + 0xdb, 0x01, 0xdd, 0x57, 0xb5, 0x44, 0xff, 0xde, 0xd2, 0x6f, 0x1f, 0x83, 0x25, 0x26, 0x7e, 0xe2, + 0x3c, 0xf7, 0x77, 0x0a, 0xa1, 0x16, 0xb9, 0x14, 0x48, 0xad, 0x85, 0x2f, 0x0a, 0x69, 0x8f, 0xdb, + 0xd7, 0xb7, 0x9a, 0xeb, 0x3f, 0xda, 0xbc, 0xb5, 0xef, 0x87, 0x5b, 0x8b, 0x08, 0xf9, 0x74, 0x3b, + 0x51, 0x65, 0x45, 0xb8, 0x46, 0x15, 0x34, 0x28, 0x0d, 0x90, 0xa9, 0x45, 0x7f, 0xed, 0xf7, 0x19, + 0xe1, 0x3a, 0x3b, 0xec, 0xb3, 0x34, 0xc8, 0xec, 0xfe, 0xc7, 0xed, 0x56, 0x43, 0x8c, 0x09, 0xd7, + 0x18, 0x1f, 0x18, 0x18, 0xa7, 0x01, 0xc6, 0x96, 0xf3, 0xd2, 0xb0, 0xc1, 0x6e, 0x7e, 0x03, 0x00, + 0x00, 0xff, 0xff, 0xc2, 0xde, 0xb6, 0xae, 0xe5, 0x01, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/ad_type.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/ad_type.pb.go new file mode 100644 index 0000000..c55cd83 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/ad_type.pb.go @@ -0,0 +1,196 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/enums/ad_type.proto + +package enums // import "google.golang.org/genproto/googleapis/ads/googleads/v1/enums" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// The possible types of an ad. +type AdTypeEnum_AdType int32 + +const ( + // No value has been specified. + AdTypeEnum_UNSPECIFIED AdTypeEnum_AdType = 0 + // The received value is not known in this version. + // + // This is a response-only value. + AdTypeEnum_UNKNOWN AdTypeEnum_AdType = 1 + // The ad is a text ad. + AdTypeEnum_TEXT_AD AdTypeEnum_AdType = 2 + // The ad is an expanded text ad. + AdTypeEnum_EXPANDED_TEXT_AD AdTypeEnum_AdType = 3 + // The ad is a call only ad. + AdTypeEnum_CALL_ONLY_AD AdTypeEnum_AdType = 6 + // The ad is an expanded dynamic search ad. + AdTypeEnum_EXPANDED_DYNAMIC_SEARCH_AD AdTypeEnum_AdType = 7 + // The ad is a hotel ad. + AdTypeEnum_HOTEL_AD AdTypeEnum_AdType = 8 + // The ad is a Smart Shopping ad. + AdTypeEnum_SHOPPING_SMART_AD AdTypeEnum_AdType = 9 + // The ad is a standard Shopping ad. + AdTypeEnum_SHOPPING_PRODUCT_AD AdTypeEnum_AdType = 10 + // The ad is a video ad. + AdTypeEnum_VIDEO_AD AdTypeEnum_AdType = 12 + // This ad is a Gmail ad. + AdTypeEnum_GMAIL_AD AdTypeEnum_AdType = 13 + // This ad is an Image ad. + AdTypeEnum_IMAGE_AD AdTypeEnum_AdType = 14 + // The ad is a responsive search ad. + AdTypeEnum_RESPONSIVE_SEARCH_AD AdTypeEnum_AdType = 15 + // The ad is a legacy responsive display ad. + AdTypeEnum_LEGACY_RESPONSIVE_DISPLAY_AD AdTypeEnum_AdType = 16 + // The ad is an app ad. + AdTypeEnum_APP_AD AdTypeEnum_AdType = 17 + // The ad is a legacy app install ad. + AdTypeEnum_LEGACY_APP_INSTALL_AD AdTypeEnum_AdType = 18 + // The ad is a responsive display ad. + AdTypeEnum_RESPONSIVE_DISPLAY_AD AdTypeEnum_AdType = 19 + // The ad is a display upload ad with the HTML5_UPLOAD_AD product type. + AdTypeEnum_HTML5_UPLOAD_AD AdTypeEnum_AdType = 21 + // The ad is a display upload ad with one of the DYNAMIC_HTML5_* product + // types. + AdTypeEnum_DYNAMIC_HTML5_AD AdTypeEnum_AdType = 22 + // The ad is an app engagement ad. + AdTypeEnum_APP_ENGAGEMENT_AD AdTypeEnum_AdType = 23 +) + +var AdTypeEnum_AdType_name = map[int32]string{ + 0: "UNSPECIFIED", + 1: "UNKNOWN", + 2: "TEXT_AD", + 3: "EXPANDED_TEXT_AD", + 6: "CALL_ONLY_AD", + 7: "EXPANDED_DYNAMIC_SEARCH_AD", + 8: "HOTEL_AD", + 9: "SHOPPING_SMART_AD", + 10: "SHOPPING_PRODUCT_AD", + 12: "VIDEO_AD", + 13: "GMAIL_AD", + 14: "IMAGE_AD", + 15: "RESPONSIVE_SEARCH_AD", + 16: "LEGACY_RESPONSIVE_DISPLAY_AD", + 17: "APP_AD", + 18: "LEGACY_APP_INSTALL_AD", + 19: "RESPONSIVE_DISPLAY_AD", + 21: "HTML5_UPLOAD_AD", + 22: "DYNAMIC_HTML5_AD", + 23: "APP_ENGAGEMENT_AD", +} +var AdTypeEnum_AdType_value = map[string]int32{ + "UNSPECIFIED": 0, + "UNKNOWN": 1, + "TEXT_AD": 2, + "EXPANDED_TEXT_AD": 3, + "CALL_ONLY_AD": 6, + "EXPANDED_DYNAMIC_SEARCH_AD": 7, + "HOTEL_AD": 8, + "SHOPPING_SMART_AD": 9, + "SHOPPING_PRODUCT_AD": 10, + "VIDEO_AD": 12, + "GMAIL_AD": 13, + "IMAGE_AD": 14, + "RESPONSIVE_SEARCH_AD": 15, + "LEGACY_RESPONSIVE_DISPLAY_AD": 16, + "APP_AD": 17, + "LEGACY_APP_INSTALL_AD": 18, + "RESPONSIVE_DISPLAY_AD": 19, + "HTML5_UPLOAD_AD": 21, + "DYNAMIC_HTML5_AD": 22, + "APP_ENGAGEMENT_AD": 23, +} + +func (x AdTypeEnum_AdType) String() string { + return proto.EnumName(AdTypeEnum_AdType_name, int32(x)) +} +func (AdTypeEnum_AdType) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_ad_type_8da0c2fb74489b69, []int{0, 0} +} + +// Container for enum describing possible types of an ad. +type AdTypeEnum struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *AdTypeEnum) Reset() { *m = AdTypeEnum{} } +func (m *AdTypeEnum) String() string { return proto.CompactTextString(m) } +func (*AdTypeEnum) ProtoMessage() {} +func (*AdTypeEnum) Descriptor() ([]byte, []int) { + return fileDescriptor_ad_type_8da0c2fb74489b69, []int{0} +} +func (m *AdTypeEnum) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_AdTypeEnum.Unmarshal(m, b) +} +func (m *AdTypeEnum) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_AdTypeEnum.Marshal(b, m, deterministic) +} +func (dst *AdTypeEnum) XXX_Merge(src proto.Message) { + xxx_messageInfo_AdTypeEnum.Merge(dst, src) +} +func (m *AdTypeEnum) XXX_Size() int { + return xxx_messageInfo_AdTypeEnum.Size(m) +} +func (m *AdTypeEnum) XXX_DiscardUnknown() { + xxx_messageInfo_AdTypeEnum.DiscardUnknown(m) +} + +var xxx_messageInfo_AdTypeEnum proto.InternalMessageInfo + +func init() { + proto.RegisterType((*AdTypeEnum)(nil), "google.ads.googleads.v1.enums.AdTypeEnum") + proto.RegisterEnum("google.ads.googleads.v1.enums.AdTypeEnum_AdType", AdTypeEnum_AdType_name, AdTypeEnum_AdType_value) +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/enums/ad_type.proto", fileDescriptor_ad_type_8da0c2fb74489b69) +} + +var fileDescriptor_ad_type_8da0c2fb74489b69 = []byte{ + // 495 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x7c, 0x52, 0xcd, 0x6e, 0xd3, 0x40, + 0x10, 0xa6, 0x89, 0x94, 0x96, 0x49, 0x20, 0xee, 0xa6, 0xa1, 0x50, 0xb5, 0x88, 0xf6, 0x8a, 0x64, + 0x2b, 0x42, 0x5c, 0xcc, 0x69, 0xe2, 0x5d, 0x1c, 0x0b, 0x7b, 0xbd, 0x8a, 0x9d, 0xd0, 0xa0, 0x48, + 0x96, 0xc1, 0x91, 0x15, 0xa9, 0xb1, 0xa3, 0x3a, 0xad, 0xd4, 0xd7, 0x81, 0x1b, 0x47, 0x1e, 0x83, + 0xa7, 0xe0, 0xcc, 0x53, 0xa0, 0x59, 0x37, 0x51, 0x0f, 0xc0, 0xc5, 0x9a, 0xef, 0xc7, 0x9f, 0x76, + 0x7e, 0xe0, 0x75, 0x5e, 0x96, 0xf9, 0xd5, 0xc2, 0x4a, 0xb3, 0xca, 0xaa, 0x4b, 0xaa, 0x6e, 0x07, + 0xd6, 0xa2, 0xb8, 0x59, 0x55, 0x56, 0x9a, 0x25, 0x9b, 0xbb, 0xf5, 0xc2, 0x5c, 0x5f, 0x97, 0x9b, + 0x92, 0x9d, 0xd5, 0x0e, 0x33, 0xcd, 0x2a, 0x73, 0x67, 0x36, 0x6f, 0x07, 0xa6, 0x36, 0x9f, 0x9c, + 0x6e, 0xb3, 0xd6, 0x4b, 0x2b, 0x2d, 0x8a, 0x72, 0x93, 0x6e, 0x96, 0x65, 0x51, 0xd5, 0x3f, 0x5f, + 0xfc, 0x68, 0x02, 0x60, 0x16, 0xdf, 0xad, 0x17, 0xa2, 0xb8, 0x59, 0x5d, 0x7c, 0x6b, 0x42, 0xab, + 0x86, 0xac, 0x0b, 0xed, 0x89, 0x8c, 0x94, 0x70, 0xbc, 0xf7, 0x9e, 0xe0, 0xc6, 0x23, 0xd6, 0x86, + 0xfd, 0x89, 0xfc, 0x20, 0xc3, 0x8f, 0xd2, 0xd8, 0x23, 0x10, 0x8b, 0xcb, 0x38, 0x41, 0x6e, 0x34, + 0xd8, 0x11, 0x18, 0xe2, 0x52, 0xa1, 0xe4, 0x82, 0x27, 0x5b, 0xb6, 0xc9, 0x0c, 0xe8, 0x38, 0xe8, + 0xfb, 0x49, 0x28, 0xfd, 0x19, 0x31, 0x2d, 0xf6, 0x12, 0x4e, 0x76, 0x3e, 0x3e, 0x93, 0x18, 0x78, + 0x4e, 0x12, 0x09, 0x1c, 0x3b, 0x23, 0xd2, 0xf7, 0x59, 0x07, 0x0e, 0x46, 0x61, 0x2c, 0x7c, 0x42, + 0x07, 0xac, 0x0f, 0x87, 0xd1, 0x28, 0x54, 0xca, 0x93, 0x6e, 0x12, 0x05, 0x38, 0xd6, 0xb1, 0x8f, + 0xd9, 0x31, 0xf4, 0x76, 0xb4, 0x1a, 0x87, 0x7c, 0xe2, 0x68, 0x01, 0xe8, 0xef, 0xa9, 0xc7, 0x45, + 0x48, 0xa8, 0x43, 0xc8, 0x0d, 0xd0, 0xd3, 0x59, 0x4f, 0x08, 0x79, 0x01, 0xba, 0x82, 0xd0, 0x53, + 0xf6, 0x1c, 0x8e, 0xc6, 0x22, 0x52, 0xa1, 0x8c, 0xbc, 0xa9, 0x78, 0xf0, 0x82, 0x2e, 0x7b, 0x05, + 0xa7, 0xbe, 0x70, 0xd1, 0x99, 0x25, 0x0f, 0x0c, 0xdc, 0x8b, 0x94, 0x8f, 0xba, 0x07, 0x83, 0x01, + 0xb4, 0x50, 0x29, 0xaa, 0x0f, 0xd9, 0x0b, 0xe8, 0xdf, 0xbb, 0x89, 0xf2, 0x64, 0x14, 0x53, 0xbf, + 0xc8, 0x0d, 0x46, 0xd2, 0xdf, 0x13, 0x7a, 0xac, 0x07, 0xdd, 0x51, 0x1c, 0xf8, 0x6f, 0x93, 0x89, + 0xf2, 0x43, 0xe4, 0x44, 0xf6, 0x69, 0x84, 0xdb, 0x89, 0xd4, 0x22, 0x72, 0xe3, 0x19, 0x8d, 0x80, + 0x92, 0x85, 0x74, 0xd1, 0x15, 0x81, 0x90, 0xba, 0xd3, 0xe3, 0xe1, 0xaf, 0x3d, 0x38, 0xff, 0x52, + 0xae, 0xcc, 0xff, 0x2e, 0x7e, 0xd8, 0xae, 0x17, 0xa9, 0x68, 0xcf, 0x6a, 0xef, 0xd3, 0xf0, 0xde, + 0x9d, 0x97, 0x57, 0x69, 0x91, 0x9b, 0xe5, 0x75, 0x6e, 0xe5, 0x8b, 0x42, 0x5f, 0xc1, 0xf6, 0xc6, + 0xd6, 0xcb, 0xea, 0x1f, 0x27, 0xf7, 0x4e, 0x7f, 0xbf, 0x36, 0x9a, 0x2e, 0xe2, 0xf7, 0xc6, 0x99, + 0x5b, 0x47, 0x61, 0x56, 0x99, 0x75, 0x49, 0xd5, 0x74, 0x60, 0xd2, 0x0d, 0x55, 0x3f, 0xb7, 0xfa, + 0x1c, 0xb3, 0x6a, 0xbe, 0xd3, 0xe7, 0xd3, 0xc1, 0x5c, 0xeb, 0xbf, 0x1b, 0xe7, 0x35, 0x69, 0xdb, + 0x98, 0x55, 0xb6, 0xbd, 0x73, 0xd8, 0xf6, 0x74, 0x60, 0xdb, 0xda, 0xf3, 0xb9, 0xa5, 0x1f, 0xf6, + 0xe6, 0x4f, 0x00, 0x00, 0x00, 0xff, 0xff, 0x27, 0x88, 0xc1, 0xa2, 0x0a, 0x03, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/advertising_channel_sub_type.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/advertising_channel_sub_type.pb.go new file mode 100644 index 0000000..88ae3e6 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/advertising_channel_sub_type.pb.go @@ -0,0 +1,172 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/enums/advertising_channel_sub_type.proto + +package enums // import "google.golang.org/genproto/googleapis/ads/googleads/v1/enums" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Enum describing the different channel subtypes. +type AdvertisingChannelSubTypeEnum_AdvertisingChannelSubType int32 + +const ( + // Not specified. + AdvertisingChannelSubTypeEnum_UNSPECIFIED AdvertisingChannelSubTypeEnum_AdvertisingChannelSubType = 0 + // Used as a return value only. Represents value unknown in this version. + AdvertisingChannelSubTypeEnum_UNKNOWN AdvertisingChannelSubTypeEnum_AdvertisingChannelSubType = 1 + // Mobile app campaigns for Search. + AdvertisingChannelSubTypeEnum_SEARCH_MOBILE_APP AdvertisingChannelSubTypeEnum_AdvertisingChannelSubType = 2 + // Mobile app campaigns for Display. + AdvertisingChannelSubTypeEnum_DISPLAY_MOBILE_APP AdvertisingChannelSubTypeEnum_AdvertisingChannelSubType = 3 + // AdWords express campaigns for search. + AdvertisingChannelSubTypeEnum_SEARCH_EXPRESS AdvertisingChannelSubTypeEnum_AdvertisingChannelSubType = 4 + // AdWords Express campaigns for display. + AdvertisingChannelSubTypeEnum_DISPLAY_EXPRESS AdvertisingChannelSubTypeEnum_AdvertisingChannelSubType = 5 + // Smart Shopping campaigns. + AdvertisingChannelSubTypeEnum_SHOPPING_SMART_ADS AdvertisingChannelSubTypeEnum_AdvertisingChannelSubType = 6 + // Gmail Ad campaigns. + AdvertisingChannelSubTypeEnum_DISPLAY_GMAIL_AD AdvertisingChannelSubTypeEnum_AdvertisingChannelSubType = 7 + // Smart display campaigns. + AdvertisingChannelSubTypeEnum_DISPLAY_SMART_CAMPAIGN AdvertisingChannelSubTypeEnum_AdvertisingChannelSubType = 8 + // Video Outstream campaigns. + AdvertisingChannelSubTypeEnum_VIDEO_OUTSTREAM AdvertisingChannelSubTypeEnum_AdvertisingChannelSubType = 9 + // Video TrueView for Action campaigns. + AdvertisingChannelSubTypeEnum_VIDEO_ACTION AdvertisingChannelSubTypeEnum_AdvertisingChannelSubType = 10 + // Video campaigns with non-skippable video ads. + AdvertisingChannelSubTypeEnum_VIDEO_NON_SKIPPABLE AdvertisingChannelSubTypeEnum_AdvertisingChannelSubType = 11 + // App Campaign that allows you to easily promote your Android or iOS app + // across Google's top properties including Search, Play, YouTube, and the + // Google Display Network. + AdvertisingChannelSubTypeEnum_APP_CAMPAIGN AdvertisingChannelSubTypeEnum_AdvertisingChannelSubType = 12 + // App Campaign for engagement, focused on driving re-engagement with the + // app across several of Google’s top properties including Search, YouTube, + // and the Google Display Network. + AdvertisingChannelSubTypeEnum_APP_CAMPAIGN_FOR_ENGAGEMENT AdvertisingChannelSubTypeEnum_AdvertisingChannelSubType = 13 +) + +var AdvertisingChannelSubTypeEnum_AdvertisingChannelSubType_name = map[int32]string{ + 0: "UNSPECIFIED", + 1: "UNKNOWN", + 2: "SEARCH_MOBILE_APP", + 3: "DISPLAY_MOBILE_APP", + 4: "SEARCH_EXPRESS", + 5: "DISPLAY_EXPRESS", + 6: "SHOPPING_SMART_ADS", + 7: "DISPLAY_GMAIL_AD", + 8: "DISPLAY_SMART_CAMPAIGN", + 9: "VIDEO_OUTSTREAM", + 10: "VIDEO_ACTION", + 11: "VIDEO_NON_SKIPPABLE", + 12: "APP_CAMPAIGN", + 13: "APP_CAMPAIGN_FOR_ENGAGEMENT", +} +var AdvertisingChannelSubTypeEnum_AdvertisingChannelSubType_value = map[string]int32{ + "UNSPECIFIED": 0, + "UNKNOWN": 1, + "SEARCH_MOBILE_APP": 2, + "DISPLAY_MOBILE_APP": 3, + "SEARCH_EXPRESS": 4, + "DISPLAY_EXPRESS": 5, + "SHOPPING_SMART_ADS": 6, + "DISPLAY_GMAIL_AD": 7, + "DISPLAY_SMART_CAMPAIGN": 8, + "VIDEO_OUTSTREAM": 9, + "VIDEO_ACTION": 10, + "VIDEO_NON_SKIPPABLE": 11, + "APP_CAMPAIGN": 12, + "APP_CAMPAIGN_FOR_ENGAGEMENT": 13, +} + +func (x AdvertisingChannelSubTypeEnum_AdvertisingChannelSubType) String() string { + return proto.EnumName(AdvertisingChannelSubTypeEnum_AdvertisingChannelSubType_name, int32(x)) +} +func (AdvertisingChannelSubTypeEnum_AdvertisingChannelSubType) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_advertising_channel_sub_type_85066977ea55d72d, []int{0, 0} +} + +// An immutable specialization of an Advertising Channel. +type AdvertisingChannelSubTypeEnum struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *AdvertisingChannelSubTypeEnum) Reset() { *m = AdvertisingChannelSubTypeEnum{} } +func (m *AdvertisingChannelSubTypeEnum) String() string { return proto.CompactTextString(m) } +func (*AdvertisingChannelSubTypeEnum) ProtoMessage() {} +func (*AdvertisingChannelSubTypeEnum) Descriptor() ([]byte, []int) { + return fileDescriptor_advertising_channel_sub_type_85066977ea55d72d, []int{0} +} +func (m *AdvertisingChannelSubTypeEnum) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_AdvertisingChannelSubTypeEnum.Unmarshal(m, b) +} +func (m *AdvertisingChannelSubTypeEnum) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_AdvertisingChannelSubTypeEnum.Marshal(b, m, deterministic) +} +func (dst *AdvertisingChannelSubTypeEnum) XXX_Merge(src proto.Message) { + xxx_messageInfo_AdvertisingChannelSubTypeEnum.Merge(dst, src) +} +func (m *AdvertisingChannelSubTypeEnum) XXX_Size() int { + return xxx_messageInfo_AdvertisingChannelSubTypeEnum.Size(m) +} +func (m *AdvertisingChannelSubTypeEnum) XXX_DiscardUnknown() { + xxx_messageInfo_AdvertisingChannelSubTypeEnum.DiscardUnknown(m) +} + +var xxx_messageInfo_AdvertisingChannelSubTypeEnum proto.InternalMessageInfo + +func init() { + proto.RegisterType((*AdvertisingChannelSubTypeEnum)(nil), "google.ads.googleads.v1.enums.AdvertisingChannelSubTypeEnum") + proto.RegisterEnum("google.ads.googleads.v1.enums.AdvertisingChannelSubTypeEnum_AdvertisingChannelSubType", AdvertisingChannelSubTypeEnum_AdvertisingChannelSubType_name, AdvertisingChannelSubTypeEnum_AdvertisingChannelSubType_value) +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/enums/advertising_channel_sub_type.proto", fileDescriptor_advertising_channel_sub_type_85066977ea55d72d) +} + +var fileDescriptor_advertising_channel_sub_type_85066977ea55d72d = []byte{ + // 476 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x7c, 0x92, 0xcf, 0x6e, 0xd3, 0x4c, + 0x14, 0xc5, 0xbf, 0xb8, 0x1f, 0x2d, 0x4c, 0x0a, 0x1d, 0xa6, 0x50, 0x44, 0x21, 0xa0, 0xf6, 0x01, + 0x6c, 0x45, 0xec, 0xcc, 0x86, 0x9b, 0x64, 0xea, 0x8e, 0x1a, 0x8f, 0x47, 0x19, 0x27, 0xfc, 0x51, + 0xa4, 0x91, 0x53, 0x5b, 0x26, 0x52, 0x32, 0x8e, 0x32, 0x49, 0xa4, 0xbe, 0x0e, 0x4b, 0x1e, 0x85, + 0x05, 0x0f, 0xc2, 0x0a, 0x89, 0x17, 0x40, 0xf6, 0xd4, 0x69, 0x37, 0x61, 0x63, 0x1d, 0x9d, 0xfb, + 0xbb, 0xe7, 0x5a, 0x73, 0x2f, 0xfa, 0x90, 0x17, 0x45, 0x3e, 0xcb, 0xbc, 0x24, 0x35, 0x9e, 0x95, + 0xa5, 0xda, 0xb4, 0xbd, 0x4c, 0xaf, 0xe7, 0xc6, 0x4b, 0xd2, 0x4d, 0xb6, 0x5c, 0x4d, 0xcd, 0x54, + 0xe7, 0xea, 0xfa, 0x6b, 0xa2, 0x75, 0x36, 0x53, 0x66, 0x3d, 0x51, 0xab, 0x9b, 0x45, 0xe6, 0x2e, + 0x96, 0xc5, 0xaa, 0x20, 0x2d, 0xdb, 0xe6, 0x26, 0xa9, 0x71, 0xb7, 0x09, 0xee, 0xa6, 0xed, 0x56, + 0x09, 0xa7, 0xaf, 0xeb, 0x01, 0x8b, 0xa9, 0x97, 0x68, 0x5d, 0xac, 0x92, 0xd5, 0xb4, 0xd0, 0xc6, + 0x36, 0x9f, 0xff, 0x76, 0x50, 0x0b, 0xee, 0x66, 0x74, 0xed, 0x08, 0xb9, 0x9e, 0xc4, 0x37, 0x8b, + 0x8c, 0xea, 0xf5, 0xfc, 0xfc, 0xa7, 0x83, 0x5e, 0xee, 0x24, 0xc8, 0x11, 0x6a, 0x0e, 0xb9, 0x14, + 0xb4, 0xcb, 0x2e, 0x18, 0xed, 0xe1, 0xff, 0x48, 0x13, 0x1d, 0x0c, 0xf9, 0x15, 0x8f, 0x3e, 0x72, + 0xdc, 0x20, 0xcf, 0xd1, 0x53, 0x49, 0x61, 0xd0, 0xbd, 0x54, 0x61, 0xd4, 0x61, 0x7d, 0xaa, 0x40, + 0x08, 0xec, 0x90, 0x13, 0x44, 0x7a, 0x4c, 0x8a, 0x3e, 0x7c, 0xbe, 0xef, 0xef, 0x11, 0x82, 0x9e, + 0xdc, 0xe2, 0xf4, 0x93, 0x18, 0x50, 0x29, 0xf1, 0xff, 0xe4, 0x18, 0x1d, 0xd5, 0x6c, 0x6d, 0x3e, + 0x28, 0x03, 0xe4, 0x65, 0x24, 0x04, 0xe3, 0x81, 0x92, 0x21, 0x0c, 0x62, 0x05, 0x3d, 0x89, 0xf7, + 0xc9, 0x33, 0x84, 0x6b, 0x38, 0x08, 0x81, 0xf5, 0x15, 0xf4, 0xf0, 0x01, 0x39, 0x45, 0x27, 0xb5, + 0x6b, 0xe1, 0x2e, 0x84, 0x02, 0x58, 0xc0, 0xf1, 0xc3, 0x32, 0x7e, 0xc4, 0x7a, 0x34, 0x52, 0xd1, + 0x30, 0x96, 0xf1, 0x80, 0x42, 0x88, 0x1f, 0x11, 0x8c, 0x0e, 0xad, 0x09, 0xdd, 0x98, 0x45, 0x1c, + 0x23, 0xf2, 0x02, 0x1d, 0x5b, 0x87, 0x47, 0x5c, 0xc9, 0x2b, 0x26, 0x04, 0x74, 0xfa, 0x14, 0x37, + 0x4b, 0x14, 0x84, 0xb8, 0x4b, 0x3c, 0x24, 0x6f, 0xd1, 0xab, 0xfb, 0x8e, 0xba, 0x88, 0x06, 0x8a, + 0xf2, 0x00, 0x02, 0x1a, 0x52, 0x1e, 0xe3, 0xc7, 0x9d, 0x3f, 0x0d, 0x74, 0x76, 0x5d, 0xcc, 0xdd, + 0x7f, 0xae, 0xad, 0xf3, 0x66, 0xe7, 0x9b, 0x8b, 0x72, 0x71, 0xa2, 0xf1, 0xa5, 0x73, 0x1b, 0x90, + 0x17, 0xb3, 0x44, 0xe7, 0x6e, 0xb1, 0xcc, 0xbd, 0x3c, 0xd3, 0xd5, 0x5a, 0xeb, 0x4b, 0x5a, 0x4c, + 0xcd, 0x8e, 0xc3, 0x7a, 0x5f, 0x7d, 0xbf, 0x39, 0x7b, 0x01, 0xc0, 0x77, 0xa7, 0x15, 0xd8, 0x28, + 0x48, 0x8d, 0x6b, 0x65, 0xa9, 0x46, 0x6d, 0xb7, 0xbc, 0x00, 0xf3, 0xa3, 0xae, 0x8f, 0x21, 0x35, + 0xe3, 0x6d, 0x7d, 0x3c, 0x6a, 0x8f, 0xab, 0xfa, 0x2f, 0xe7, 0xcc, 0x9a, 0xbe, 0x0f, 0xa9, 0xf1, + 0xfd, 0x2d, 0xe1, 0xfb, 0xa3, 0xb6, 0xef, 0x57, 0xcc, 0x64, 0xbf, 0xfa, 0xb1, 0x77, 0x7f, 0x03, + 0x00, 0x00, 0xff, 0xff, 0xc1, 0x16, 0x40, 0x78, 0xf0, 0x02, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/advertising_channel_type.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/advertising_channel_type.pb.go new file mode 100644 index 0000000..5ab30b1 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/advertising_channel_type.pb.go @@ -0,0 +1,139 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/enums/advertising_channel_type.proto + +package enums // import "google.golang.org/genproto/googleapis/ads/googleads/v1/enums" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Enum describing the various advertising channel types. +type AdvertisingChannelTypeEnum_AdvertisingChannelType int32 + +const ( + // Not specified. + AdvertisingChannelTypeEnum_UNSPECIFIED AdvertisingChannelTypeEnum_AdvertisingChannelType = 0 + // Used for return value only. Represents value unknown in this version. + AdvertisingChannelTypeEnum_UNKNOWN AdvertisingChannelTypeEnum_AdvertisingChannelType = 1 + // Search Network. Includes display bundled, and Search+ campaigns. + AdvertisingChannelTypeEnum_SEARCH AdvertisingChannelTypeEnum_AdvertisingChannelType = 2 + // Google Display Network only. + AdvertisingChannelTypeEnum_DISPLAY AdvertisingChannelTypeEnum_AdvertisingChannelType = 3 + // Shopping campaigns serve on the shopping property + // and on google.com search results. + AdvertisingChannelTypeEnum_SHOPPING AdvertisingChannelTypeEnum_AdvertisingChannelType = 4 + // Hotel Ads campaigns. + AdvertisingChannelTypeEnum_HOTEL AdvertisingChannelTypeEnum_AdvertisingChannelType = 5 + // Video campaigns. + AdvertisingChannelTypeEnum_VIDEO AdvertisingChannelTypeEnum_AdvertisingChannelType = 6 + // App Campaigns, and App Campaigns for Engagement, that run + // across multiple channels. + AdvertisingChannelTypeEnum_MULTI_CHANNEL AdvertisingChannelTypeEnum_AdvertisingChannelType = 7 +) + +var AdvertisingChannelTypeEnum_AdvertisingChannelType_name = map[int32]string{ + 0: "UNSPECIFIED", + 1: "UNKNOWN", + 2: "SEARCH", + 3: "DISPLAY", + 4: "SHOPPING", + 5: "HOTEL", + 6: "VIDEO", + 7: "MULTI_CHANNEL", +} +var AdvertisingChannelTypeEnum_AdvertisingChannelType_value = map[string]int32{ + "UNSPECIFIED": 0, + "UNKNOWN": 1, + "SEARCH": 2, + "DISPLAY": 3, + "SHOPPING": 4, + "HOTEL": 5, + "VIDEO": 6, + "MULTI_CHANNEL": 7, +} + +func (x AdvertisingChannelTypeEnum_AdvertisingChannelType) String() string { + return proto.EnumName(AdvertisingChannelTypeEnum_AdvertisingChannelType_name, int32(x)) +} +func (AdvertisingChannelTypeEnum_AdvertisingChannelType) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_advertising_channel_type_fa6d1db862c02c2d, []int{0, 0} +} + +// The channel type a campaign may target to serve on. +type AdvertisingChannelTypeEnum struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *AdvertisingChannelTypeEnum) Reset() { *m = AdvertisingChannelTypeEnum{} } +func (m *AdvertisingChannelTypeEnum) String() string { return proto.CompactTextString(m) } +func (*AdvertisingChannelTypeEnum) ProtoMessage() {} +func (*AdvertisingChannelTypeEnum) Descriptor() ([]byte, []int) { + return fileDescriptor_advertising_channel_type_fa6d1db862c02c2d, []int{0} +} +func (m *AdvertisingChannelTypeEnum) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_AdvertisingChannelTypeEnum.Unmarshal(m, b) +} +func (m *AdvertisingChannelTypeEnum) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_AdvertisingChannelTypeEnum.Marshal(b, m, deterministic) +} +func (dst *AdvertisingChannelTypeEnum) XXX_Merge(src proto.Message) { + xxx_messageInfo_AdvertisingChannelTypeEnum.Merge(dst, src) +} +func (m *AdvertisingChannelTypeEnum) XXX_Size() int { + return xxx_messageInfo_AdvertisingChannelTypeEnum.Size(m) +} +func (m *AdvertisingChannelTypeEnum) XXX_DiscardUnknown() { + xxx_messageInfo_AdvertisingChannelTypeEnum.DiscardUnknown(m) +} + +var xxx_messageInfo_AdvertisingChannelTypeEnum proto.InternalMessageInfo + +func init() { + proto.RegisterType((*AdvertisingChannelTypeEnum)(nil), "google.ads.googleads.v1.enums.AdvertisingChannelTypeEnum") + proto.RegisterEnum("google.ads.googleads.v1.enums.AdvertisingChannelTypeEnum_AdvertisingChannelType", AdvertisingChannelTypeEnum_AdvertisingChannelType_name, AdvertisingChannelTypeEnum_AdvertisingChannelType_value) +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/enums/advertising_channel_type.proto", fileDescriptor_advertising_channel_type_fa6d1db862c02c2d) +} + +var fileDescriptor_advertising_channel_type_fa6d1db862c02c2d = []byte{ + // 364 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x7c, 0x91, 0x41, 0x4e, 0xbb, 0x40, + 0x18, 0xc5, 0xff, 0xd0, 0x7f, 0x5b, 0x9d, 0x6a, 0x1c, 0x59, 0xb8, 0xa8, 0x76, 0xd1, 0x1e, 0x60, + 0x08, 0x71, 0x37, 0xba, 0x99, 0x52, 0x2c, 0x44, 0x04, 0x22, 0x2d, 0x46, 0x43, 0xd2, 0x60, 0x21, + 0x48, 0xd2, 0xce, 0x90, 0x0e, 0x6d, 0xd2, 0x0b, 0x78, 0x10, 0x13, 0x37, 0x1e, 0xc5, 0xa3, 0xb8, + 0xf2, 0x08, 0x06, 0xc6, 0xd6, 0x4d, 0x75, 0x43, 0x5e, 0x78, 0xdf, 0xef, 0xe5, 0xfb, 0xde, 0x80, + 0xcb, 0x94, 0xb1, 0x74, 0x96, 0xa8, 0x51, 0xcc, 0x55, 0x21, 0x4b, 0xb5, 0xd2, 0xd4, 0x84, 0x2e, + 0xe7, 0x5c, 0x8d, 0xe2, 0x55, 0xb2, 0x28, 0x32, 0x9e, 0xd1, 0x74, 0x32, 0x7d, 0x8a, 0x28, 0x4d, + 0x66, 0x93, 0x62, 0x9d, 0x27, 0x28, 0x5f, 0xb0, 0x82, 0x29, 0x1d, 0x81, 0xa0, 0x28, 0xe6, 0x68, + 0x4b, 0xa3, 0x95, 0x86, 0x2a, 0xba, 0x7d, 0xb6, 0x09, 0xcf, 0x33, 0x35, 0xa2, 0x94, 0x15, 0x51, + 0x91, 0x31, 0xca, 0x05, 0xdc, 0x7b, 0x95, 0x40, 0x9b, 0xfc, 0xe4, 0xeb, 0x22, 0x7e, 0xb4, 0xce, + 0x13, 0x83, 0x2e, 0xe7, 0xbd, 0x67, 0x09, 0x9c, 0xec, 0xb6, 0x95, 0x23, 0xd0, 0x1a, 0x3b, 0xbe, + 0x67, 0xe8, 0xd6, 0x95, 0x65, 0x0c, 0xe0, 0x3f, 0xa5, 0x05, 0x9a, 0x63, 0xe7, 0xda, 0x71, 0xef, + 0x1c, 0x28, 0x29, 0x00, 0x34, 0x7c, 0x83, 0xdc, 0xea, 0x26, 0x94, 0x4b, 0x63, 0x60, 0xf9, 0x9e, + 0x4d, 0xee, 0x61, 0x4d, 0x39, 0x00, 0x7b, 0xbe, 0xe9, 0x7a, 0x9e, 0xe5, 0x0c, 0xe1, 0x7f, 0x65, + 0x1f, 0xd4, 0x4d, 0x77, 0x64, 0xd8, 0xb0, 0x5e, 0xca, 0xc0, 0x1a, 0x18, 0x2e, 0x6c, 0x28, 0xc7, + 0xe0, 0xf0, 0x66, 0x6c, 0x8f, 0xac, 0x89, 0x6e, 0x12, 0xc7, 0x31, 0x6c, 0xd8, 0xec, 0x7f, 0x4a, + 0xa0, 0x3b, 0x65, 0x73, 0xf4, 0xe7, 0xad, 0xfd, 0xd3, 0xdd, 0xbb, 0x7a, 0xe5, 0xa9, 0x9e, 0xf4, + 0xd0, 0xff, 0xa6, 0x53, 0x36, 0x8b, 0x68, 0x8a, 0xd8, 0x22, 0x55, 0xd3, 0x84, 0x56, 0x45, 0x6c, + 0x7a, 0xcf, 0x33, 0xfe, 0xcb, 0x33, 0x5c, 0x54, 0xdf, 0x17, 0xb9, 0x36, 0x24, 0xe4, 0x4d, 0xee, + 0x0c, 0x45, 0x14, 0x89, 0x39, 0x12, 0xb2, 0x54, 0x81, 0x86, 0xca, 0xda, 0xf8, 0xfb, 0xc6, 0x0f, + 0x49, 0xcc, 0xc3, 0xad, 0x1f, 0x06, 0x5a, 0x58, 0xf9, 0x1f, 0x72, 0x57, 0xfc, 0xc4, 0x98, 0xc4, + 0x1c, 0xe3, 0xed, 0x04, 0xc6, 0x81, 0x86, 0x71, 0x35, 0xf3, 0xd8, 0xa8, 0x16, 0x3b, 0xff, 0x0a, + 0x00, 0x00, 0xff, 0xff, 0xc4, 0x32, 0x41, 0x6f, 0x1e, 0x02, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/affiliate_location_feed_relationship_type.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/affiliate_location_feed_relationship_type.pb.go new file mode 100644 index 0000000..8fff72d --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/affiliate_location_feed_relationship_type.pb.go @@ -0,0 +1,118 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/enums/affiliate_location_feed_relationship_type.proto + +package enums // import "google.golang.org/genproto/googleapis/ads/googleads/v1/enums" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Possible values for a relationship type for an affiliate location feed. +type AffiliateLocationFeedRelationshipTypeEnum_AffiliateLocationFeedRelationshipType int32 + +const ( + // Not specified. + AffiliateLocationFeedRelationshipTypeEnum_UNSPECIFIED AffiliateLocationFeedRelationshipTypeEnum_AffiliateLocationFeedRelationshipType = 0 + // Used for return value only. Represents value unknown in this version. + AffiliateLocationFeedRelationshipTypeEnum_UNKNOWN AffiliateLocationFeedRelationshipTypeEnum_AffiliateLocationFeedRelationshipType = 1 + // General retailer relationship. + AffiliateLocationFeedRelationshipTypeEnum_GENERAL_RETAILER AffiliateLocationFeedRelationshipTypeEnum_AffiliateLocationFeedRelationshipType = 2 +) + +var AffiliateLocationFeedRelationshipTypeEnum_AffiliateLocationFeedRelationshipType_name = map[int32]string{ + 0: "UNSPECIFIED", + 1: "UNKNOWN", + 2: "GENERAL_RETAILER", +} +var AffiliateLocationFeedRelationshipTypeEnum_AffiliateLocationFeedRelationshipType_value = map[string]int32{ + "UNSPECIFIED": 0, + "UNKNOWN": 1, + "GENERAL_RETAILER": 2, +} + +func (x AffiliateLocationFeedRelationshipTypeEnum_AffiliateLocationFeedRelationshipType) String() string { + return proto.EnumName(AffiliateLocationFeedRelationshipTypeEnum_AffiliateLocationFeedRelationshipType_name, int32(x)) +} +func (AffiliateLocationFeedRelationshipTypeEnum_AffiliateLocationFeedRelationshipType) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_affiliate_location_feed_relationship_type_3af3bb377476510a, []int{0, 0} +} + +// Container for enum describing possible values for a relationship type for +// an affiliate location feed. +type AffiliateLocationFeedRelationshipTypeEnum struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *AffiliateLocationFeedRelationshipTypeEnum) Reset() { + *m = AffiliateLocationFeedRelationshipTypeEnum{} +} +func (m *AffiliateLocationFeedRelationshipTypeEnum) String() string { return proto.CompactTextString(m) } +func (*AffiliateLocationFeedRelationshipTypeEnum) ProtoMessage() {} +func (*AffiliateLocationFeedRelationshipTypeEnum) Descriptor() ([]byte, []int) { + return fileDescriptor_affiliate_location_feed_relationship_type_3af3bb377476510a, []int{0} +} +func (m *AffiliateLocationFeedRelationshipTypeEnum) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_AffiliateLocationFeedRelationshipTypeEnum.Unmarshal(m, b) +} +func (m *AffiliateLocationFeedRelationshipTypeEnum) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_AffiliateLocationFeedRelationshipTypeEnum.Marshal(b, m, deterministic) +} +func (dst *AffiliateLocationFeedRelationshipTypeEnum) XXX_Merge(src proto.Message) { + xxx_messageInfo_AffiliateLocationFeedRelationshipTypeEnum.Merge(dst, src) +} +func (m *AffiliateLocationFeedRelationshipTypeEnum) XXX_Size() int { + return xxx_messageInfo_AffiliateLocationFeedRelationshipTypeEnum.Size(m) +} +func (m *AffiliateLocationFeedRelationshipTypeEnum) XXX_DiscardUnknown() { + xxx_messageInfo_AffiliateLocationFeedRelationshipTypeEnum.DiscardUnknown(m) +} + +var xxx_messageInfo_AffiliateLocationFeedRelationshipTypeEnum proto.InternalMessageInfo + +func init() { + proto.RegisterType((*AffiliateLocationFeedRelationshipTypeEnum)(nil), "google.ads.googleads.v1.enums.AffiliateLocationFeedRelationshipTypeEnum") + proto.RegisterEnum("google.ads.googleads.v1.enums.AffiliateLocationFeedRelationshipTypeEnum_AffiliateLocationFeedRelationshipType", AffiliateLocationFeedRelationshipTypeEnum_AffiliateLocationFeedRelationshipType_name, AffiliateLocationFeedRelationshipTypeEnum_AffiliateLocationFeedRelationshipType_value) +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/enums/affiliate_location_feed_relationship_type.proto", fileDescriptor_affiliate_location_feed_relationship_type_3af3bb377476510a) +} + +var fileDescriptor_affiliate_location_feed_relationship_type_3af3bb377476510a = []byte{ + // 332 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x8c, 0x91, 0xc1, 0x4a, 0xc3, 0x30, + 0x18, 0xc7, 0x6d, 0x05, 0x85, 0xec, 0x60, 0x29, 0x9e, 0xc4, 0x1d, 0x36, 0xf0, 0xa0, 0x87, 0x94, + 0xe2, 0x2d, 0x9e, 0x32, 0xcd, 0xc6, 0x70, 0xd6, 0x51, 0xb7, 0x09, 0x5a, 0x28, 0x71, 0xc9, 0x6a, + 0xa1, 0x4b, 0xc2, 0xd2, 0x0d, 0xf6, 0x06, 0x3e, 0x87, 0x47, 0x1f, 0xc5, 0x47, 0xf1, 0x25, 0x94, + 0x26, 0xeb, 0xf0, 0xa2, 0xec, 0x52, 0xfe, 0xcd, 0xf7, 0xe5, 0xf7, 0xff, 0xfe, 0x5f, 0xc0, 0x5d, + 0x26, 0x65, 0x56, 0xf0, 0x80, 0x32, 0x1d, 0x58, 0x59, 0xa9, 0x55, 0x18, 0x70, 0xb1, 0x9c, 0xeb, + 0x80, 0xce, 0x66, 0x79, 0x91, 0xd3, 0x92, 0xa7, 0x85, 0x9c, 0xd2, 0x32, 0x97, 0x22, 0x9d, 0x71, + 0xce, 0xd2, 0x05, 0x2f, 0xcc, 0x9f, 0x7e, 0xcd, 0x55, 0x5a, 0xae, 0x15, 0x87, 0x6a, 0x21, 0x4b, + 0xe9, 0x37, 0x2d, 0x03, 0x52, 0xa6, 0xe1, 0x16, 0x07, 0x57, 0x21, 0x34, 0xb8, 0x93, 0xd3, 0xda, + 0x4d, 0xe5, 0x01, 0x15, 0x42, 0x96, 0x16, 0x62, 0x2f, 0xb7, 0xdf, 0x1c, 0x70, 0x8e, 0x6b, 0xc3, + 0xc1, 0xc6, 0xaf, 0xcb, 0x39, 0x8b, 0x7f, 0xb9, 0x8d, 0xd6, 0x8a, 0x13, 0xb1, 0x9c, 0xb7, 0x9f, + 0xc1, 0xd9, 0x4e, 0xcd, 0xfe, 0x11, 0x68, 0x8c, 0xa3, 0x87, 0x21, 0xb9, 0xee, 0x77, 0xfb, 0xe4, + 0xc6, 0xdb, 0xf3, 0x1b, 0xe0, 0x70, 0x1c, 0xdd, 0x46, 0xf7, 0x8f, 0x91, 0xe7, 0xf8, 0xc7, 0xc0, + 0xeb, 0x91, 0x88, 0xc4, 0x78, 0x90, 0xc6, 0x64, 0x84, 0xfb, 0x03, 0x12, 0x7b, 0x6e, 0xe7, 0xdb, + 0x01, 0xad, 0xa9, 0x9c, 0xc3, 0x7f, 0xe3, 0x74, 0x2e, 0x76, 0x1a, 0x60, 0x58, 0x85, 0x1b, 0x3a, + 0x4f, 0x9d, 0x0d, 0x2c, 0x93, 0x05, 0x15, 0x19, 0x94, 0x8b, 0x2c, 0xc8, 0xb8, 0x30, 0xd1, 0xeb, + 0xd5, 0xab, 0x5c, 0xff, 0xf1, 0x12, 0x57, 0xe6, 0xfb, 0xee, 0xee, 0xf7, 0x30, 0xfe, 0x70, 0x9b, + 0x3d, 0x8b, 0xc2, 0x4c, 0x43, 0x2b, 0x2b, 0x35, 0x09, 0x61, 0xb5, 0x19, 0xfd, 0x59, 0xd7, 0x13, + 0xcc, 0x74, 0xb2, 0xad, 0x27, 0x93, 0x30, 0x31, 0xf5, 0x2f, 0xb7, 0x65, 0x0f, 0x11, 0xc2, 0x4c, + 0x23, 0xb4, 0xed, 0x40, 0x68, 0x12, 0x22, 0x64, 0x7a, 0x5e, 0x0e, 0xcc, 0x60, 0x97, 0x3f, 0x01, + 0x00, 0x00, 0xff, 0xff, 0x67, 0xb4, 0x2f, 0x6b, 0x21, 0x02, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/affiliate_location_placeholder_field.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/affiliate_location_placeholder_field.pb.go new file mode 100644 index 0000000..9ab7c1b --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/affiliate_location_placeholder_field.pb.go @@ -0,0 +1,161 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/enums/affiliate_location_placeholder_field.proto + +package enums // import "google.golang.org/genproto/googleapis/ads/googleads/v1/enums" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Possible values for Affiliate Location placeholder fields. +type AffiliateLocationPlaceholderFieldEnum_AffiliateLocationPlaceholderField int32 + +const ( + // Not specified. + AffiliateLocationPlaceholderFieldEnum_UNSPECIFIED AffiliateLocationPlaceholderFieldEnum_AffiliateLocationPlaceholderField = 0 + // Used for return value only. Represents value unknown in this version. + AffiliateLocationPlaceholderFieldEnum_UNKNOWN AffiliateLocationPlaceholderFieldEnum_AffiliateLocationPlaceholderField = 1 + // Data Type: STRING. The name of the business. + AffiliateLocationPlaceholderFieldEnum_BUSINESS_NAME AffiliateLocationPlaceholderFieldEnum_AffiliateLocationPlaceholderField = 2 + // Data Type: STRING. Line 1 of the business address. + AffiliateLocationPlaceholderFieldEnum_ADDRESS_LINE_1 AffiliateLocationPlaceholderFieldEnum_AffiliateLocationPlaceholderField = 3 + // Data Type: STRING. Line 2 of the business address. + AffiliateLocationPlaceholderFieldEnum_ADDRESS_LINE_2 AffiliateLocationPlaceholderFieldEnum_AffiliateLocationPlaceholderField = 4 + // Data Type: STRING. City of the business address. + AffiliateLocationPlaceholderFieldEnum_CITY AffiliateLocationPlaceholderFieldEnum_AffiliateLocationPlaceholderField = 5 + // Data Type: STRING. Province of the business address. + AffiliateLocationPlaceholderFieldEnum_PROVINCE AffiliateLocationPlaceholderFieldEnum_AffiliateLocationPlaceholderField = 6 + // Data Type: STRING. Postal code of the business address. + AffiliateLocationPlaceholderFieldEnum_POSTAL_CODE AffiliateLocationPlaceholderFieldEnum_AffiliateLocationPlaceholderField = 7 + // Data Type: STRING. Country code of the business address. + AffiliateLocationPlaceholderFieldEnum_COUNTRY_CODE AffiliateLocationPlaceholderFieldEnum_AffiliateLocationPlaceholderField = 8 + // Data Type: STRING. Phone number of the business. + AffiliateLocationPlaceholderFieldEnum_PHONE_NUMBER AffiliateLocationPlaceholderFieldEnum_AffiliateLocationPlaceholderField = 9 + // Data Type: STRING. Language code of the business. + AffiliateLocationPlaceholderFieldEnum_LANGUAGE_CODE AffiliateLocationPlaceholderFieldEnum_AffiliateLocationPlaceholderField = 10 + // Data Type: INT64. ID of the chain. + AffiliateLocationPlaceholderFieldEnum_CHAIN_ID AffiliateLocationPlaceholderFieldEnum_AffiliateLocationPlaceholderField = 11 + // Data Type: STRING. Name of the chain. + AffiliateLocationPlaceholderFieldEnum_CHAIN_NAME AffiliateLocationPlaceholderFieldEnum_AffiliateLocationPlaceholderField = 12 +) + +var AffiliateLocationPlaceholderFieldEnum_AffiliateLocationPlaceholderField_name = map[int32]string{ + 0: "UNSPECIFIED", + 1: "UNKNOWN", + 2: "BUSINESS_NAME", + 3: "ADDRESS_LINE_1", + 4: "ADDRESS_LINE_2", + 5: "CITY", + 6: "PROVINCE", + 7: "POSTAL_CODE", + 8: "COUNTRY_CODE", + 9: "PHONE_NUMBER", + 10: "LANGUAGE_CODE", + 11: "CHAIN_ID", + 12: "CHAIN_NAME", +} +var AffiliateLocationPlaceholderFieldEnum_AffiliateLocationPlaceholderField_value = map[string]int32{ + "UNSPECIFIED": 0, + "UNKNOWN": 1, + "BUSINESS_NAME": 2, + "ADDRESS_LINE_1": 3, + "ADDRESS_LINE_2": 4, + "CITY": 5, + "PROVINCE": 6, + "POSTAL_CODE": 7, + "COUNTRY_CODE": 8, + "PHONE_NUMBER": 9, + "LANGUAGE_CODE": 10, + "CHAIN_ID": 11, + "CHAIN_NAME": 12, +} + +func (x AffiliateLocationPlaceholderFieldEnum_AffiliateLocationPlaceholderField) String() string { + return proto.EnumName(AffiliateLocationPlaceholderFieldEnum_AffiliateLocationPlaceholderField_name, int32(x)) +} +func (AffiliateLocationPlaceholderFieldEnum_AffiliateLocationPlaceholderField) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_affiliate_location_placeholder_field_9a684762b064fbe0, []int{0, 0} +} + +// Values for Affiliate Location placeholder fields. +type AffiliateLocationPlaceholderFieldEnum struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *AffiliateLocationPlaceholderFieldEnum) Reset() { *m = AffiliateLocationPlaceholderFieldEnum{} } +func (m *AffiliateLocationPlaceholderFieldEnum) String() string { return proto.CompactTextString(m) } +func (*AffiliateLocationPlaceholderFieldEnum) ProtoMessage() {} +func (*AffiliateLocationPlaceholderFieldEnum) Descriptor() ([]byte, []int) { + return fileDescriptor_affiliate_location_placeholder_field_9a684762b064fbe0, []int{0} +} +func (m *AffiliateLocationPlaceholderFieldEnum) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_AffiliateLocationPlaceholderFieldEnum.Unmarshal(m, b) +} +func (m *AffiliateLocationPlaceholderFieldEnum) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_AffiliateLocationPlaceholderFieldEnum.Marshal(b, m, deterministic) +} +func (dst *AffiliateLocationPlaceholderFieldEnum) XXX_Merge(src proto.Message) { + xxx_messageInfo_AffiliateLocationPlaceholderFieldEnum.Merge(dst, src) +} +func (m *AffiliateLocationPlaceholderFieldEnum) XXX_Size() int { + return xxx_messageInfo_AffiliateLocationPlaceholderFieldEnum.Size(m) +} +func (m *AffiliateLocationPlaceholderFieldEnum) XXX_DiscardUnknown() { + xxx_messageInfo_AffiliateLocationPlaceholderFieldEnum.DiscardUnknown(m) +} + +var xxx_messageInfo_AffiliateLocationPlaceholderFieldEnum proto.InternalMessageInfo + +func init() { + proto.RegisterType((*AffiliateLocationPlaceholderFieldEnum)(nil), "google.ads.googleads.v1.enums.AffiliateLocationPlaceholderFieldEnum") + proto.RegisterEnum("google.ads.googleads.v1.enums.AffiliateLocationPlaceholderFieldEnum_AffiliateLocationPlaceholderField", AffiliateLocationPlaceholderFieldEnum_AffiliateLocationPlaceholderField_name, AffiliateLocationPlaceholderFieldEnum_AffiliateLocationPlaceholderField_value) +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/enums/affiliate_location_placeholder_field.proto", fileDescriptor_affiliate_location_placeholder_field_9a684762b064fbe0) +} + +var fileDescriptor_affiliate_location_placeholder_field_9a684762b064fbe0 = []byte{ + // 431 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x84, 0x52, 0xc1, 0x8e, 0xd3, 0x30, + 0x10, 0xa5, 0xd9, 0x65, 0xb7, 0xb8, 0x65, 0x31, 0x3e, 0x22, 0xf6, 0xd0, 0x95, 0xe0, 0xe8, 0x28, + 0x70, 0x0b, 0x27, 0x27, 0xf1, 0xb6, 0x11, 0x5d, 0x27, 0x6a, 0x9a, 0xa2, 0x45, 0x95, 0x2c, 0xd3, + 0xa4, 0x21, 0x52, 0x1a, 0x57, 0x75, 0x77, 0x3f, 0x08, 0x71, 0xe2, 0x53, 0xf8, 0x14, 0x3e, 0x01, + 0x2e, 0xc8, 0xf6, 0xa6, 0x1c, 0x10, 0xf4, 0x12, 0xbd, 0xbc, 0x79, 0x7e, 0x6f, 0x34, 0x33, 0x60, + 0x52, 0x49, 0x59, 0x35, 0xa5, 0x2b, 0x0a, 0xe5, 0x5a, 0xa8, 0xd1, 0xbd, 0xe7, 0x96, 0xed, 0xdd, + 0x46, 0xb9, 0x62, 0xbd, 0xae, 0x9b, 0x5a, 0xec, 0x4b, 0xde, 0xc8, 0x95, 0xd8, 0xd7, 0xb2, 0xe5, + 0xdb, 0x46, 0xac, 0xca, 0xcf, 0xb2, 0x29, 0xca, 0x1d, 0x5f, 0xd7, 0x65, 0x53, 0xe0, 0xed, 0x4e, + 0xee, 0x25, 0xba, 0xb4, 0xcf, 0xb1, 0x28, 0x14, 0x3e, 0x38, 0xe1, 0x7b, 0x0f, 0x1b, 0xa7, 0x17, + 0x2f, 0xbb, 0xa0, 0x6d, 0xed, 0x8a, 0xb6, 0x95, 0x7b, 0xe3, 0xa6, 0xec, 0xe3, 0xab, 0xaf, 0x0e, + 0x78, 0x45, 0xba, 0xac, 0xe9, 0x43, 0x54, 0xfa, 0x27, 0xe9, 0x5a, 0x07, 0xd1, 0xf6, 0x6e, 0x73, + 0xf5, 0xb3, 0x07, 0x46, 0x47, 0x95, 0xe8, 0x19, 0x18, 0xe4, 0x2c, 0x4b, 0x69, 0x18, 0x5f, 0xc7, + 0x34, 0x82, 0x8f, 0xd0, 0x00, 0x9c, 0xe7, 0xec, 0x3d, 0x4b, 0x3e, 0x30, 0xd8, 0x43, 0xcf, 0xc1, + 0xd3, 0x20, 0xcf, 0x62, 0x46, 0xb3, 0x8c, 0x33, 0x72, 0x43, 0xa1, 0x83, 0x10, 0xb8, 0x20, 0x51, + 0x34, 0xd3, 0xcc, 0x34, 0x66, 0x94, 0x7b, 0xf0, 0xe4, 0x2f, 0xee, 0x0d, 0x3c, 0x45, 0x7d, 0x70, + 0x1a, 0xc6, 0xf3, 0x5b, 0xf8, 0x18, 0x0d, 0x41, 0x3f, 0x9d, 0x25, 0x8b, 0x98, 0x85, 0x14, 0x9e, + 0xe9, 0xc0, 0x34, 0xc9, 0xe6, 0x64, 0xca, 0xc3, 0x24, 0xa2, 0xf0, 0x1c, 0x41, 0x30, 0x0c, 0x93, + 0x9c, 0xcd, 0x67, 0xb7, 0x96, 0xe9, 0x6b, 0x26, 0x9d, 0x24, 0x8c, 0x72, 0x96, 0xdf, 0x04, 0x74, + 0x06, 0x9f, 0xe8, 0x3e, 0xa6, 0x84, 0x8d, 0x73, 0x32, 0xa6, 0x56, 0x04, 0xb4, 0x6b, 0x38, 0x21, + 0x31, 0xe3, 0x71, 0x04, 0x07, 0xe8, 0x02, 0x00, 0xfb, 0x67, 0xba, 0x1c, 0x06, 0xbf, 0x7a, 0x60, + 0xb4, 0x92, 0x1b, 0xfc, 0xdf, 0x51, 0x07, 0xaf, 0x8f, 0xce, 0x27, 0xd5, 0x43, 0x4f, 0x7b, 0x1f, + 0x83, 0x07, 0xa3, 0x4a, 0x36, 0xa2, 0xad, 0xb0, 0xdc, 0x55, 0x6e, 0x55, 0xb6, 0x66, 0x25, 0xdd, + 0x35, 0x6c, 0x6b, 0xf5, 0x8f, 0xe3, 0x78, 0x67, 0xbe, 0x5f, 0x9c, 0x93, 0x31, 0x21, 0xdf, 0x9c, + 0xcb, 0xb1, 0xb5, 0x22, 0x85, 0xc2, 0x16, 0x6a, 0xb4, 0xf0, 0xb0, 0xde, 0x9a, 0xfa, 0xde, 0xd5, + 0x97, 0xa4, 0x50, 0xcb, 0x43, 0x7d, 0xb9, 0xf0, 0x96, 0xa6, 0xfe, 0xc3, 0x19, 0x59, 0xd2, 0xf7, + 0x49, 0xa1, 0x7c, 0xff, 0xa0, 0xf0, 0xfd, 0x85, 0xe7, 0xfb, 0x46, 0xf3, 0xe9, 0xcc, 0x34, 0xf6, + 0xf6, 0x77, 0x00, 0x00, 0x00, 0xff, 0xff, 0x18, 0x25, 0xe9, 0x48, 0xb4, 0x02, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/age_range_type.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/age_range_type.pb.go new file mode 100644 index 0000000..7ef3e94 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/age_range_type.pb.go @@ -0,0 +1,142 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/enums/age_range_type.proto + +package enums // import "google.golang.org/genproto/googleapis/ads/googleads/v1/enums" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// The type of demographic age ranges (e.g. between 18 and 24 years old). +type AgeRangeTypeEnum_AgeRangeType int32 + +const ( + // Not specified. + AgeRangeTypeEnum_UNSPECIFIED AgeRangeTypeEnum_AgeRangeType = 0 + // Used for return value only. Represents value unknown in this version. + AgeRangeTypeEnum_UNKNOWN AgeRangeTypeEnum_AgeRangeType = 1 + // Between 18 and 24 years old. + AgeRangeTypeEnum_AGE_RANGE_18_24 AgeRangeTypeEnum_AgeRangeType = 503001 + // Between 25 and 34 years old. + AgeRangeTypeEnum_AGE_RANGE_25_34 AgeRangeTypeEnum_AgeRangeType = 503002 + // Between 35 and 44 years old. + AgeRangeTypeEnum_AGE_RANGE_35_44 AgeRangeTypeEnum_AgeRangeType = 503003 + // Between 45 and 54 years old. + AgeRangeTypeEnum_AGE_RANGE_45_54 AgeRangeTypeEnum_AgeRangeType = 503004 + // Between 55 and 64 years old. + AgeRangeTypeEnum_AGE_RANGE_55_64 AgeRangeTypeEnum_AgeRangeType = 503005 + // 65 years old and beyond. + AgeRangeTypeEnum_AGE_RANGE_65_UP AgeRangeTypeEnum_AgeRangeType = 503006 + // Undetermined age range. + AgeRangeTypeEnum_AGE_RANGE_UNDETERMINED AgeRangeTypeEnum_AgeRangeType = 503999 +) + +var AgeRangeTypeEnum_AgeRangeType_name = map[int32]string{ + 0: "UNSPECIFIED", + 1: "UNKNOWN", + 503001: "AGE_RANGE_18_24", + 503002: "AGE_RANGE_25_34", + 503003: "AGE_RANGE_35_44", + 503004: "AGE_RANGE_45_54", + 503005: "AGE_RANGE_55_64", + 503006: "AGE_RANGE_65_UP", + 503999: "AGE_RANGE_UNDETERMINED", +} +var AgeRangeTypeEnum_AgeRangeType_value = map[string]int32{ + "UNSPECIFIED": 0, + "UNKNOWN": 1, + "AGE_RANGE_18_24": 503001, + "AGE_RANGE_25_34": 503002, + "AGE_RANGE_35_44": 503003, + "AGE_RANGE_45_54": 503004, + "AGE_RANGE_55_64": 503005, + "AGE_RANGE_65_UP": 503006, + "AGE_RANGE_UNDETERMINED": 503999, +} + +func (x AgeRangeTypeEnum_AgeRangeType) String() string { + return proto.EnumName(AgeRangeTypeEnum_AgeRangeType_name, int32(x)) +} +func (AgeRangeTypeEnum_AgeRangeType) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_age_range_type_6aa2521db2f95ab5, []int{0, 0} +} + +// Container for enum describing the type of demographic age ranges. +type AgeRangeTypeEnum struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *AgeRangeTypeEnum) Reset() { *m = AgeRangeTypeEnum{} } +func (m *AgeRangeTypeEnum) String() string { return proto.CompactTextString(m) } +func (*AgeRangeTypeEnum) ProtoMessage() {} +func (*AgeRangeTypeEnum) Descriptor() ([]byte, []int) { + return fileDescriptor_age_range_type_6aa2521db2f95ab5, []int{0} +} +func (m *AgeRangeTypeEnum) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_AgeRangeTypeEnum.Unmarshal(m, b) +} +func (m *AgeRangeTypeEnum) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_AgeRangeTypeEnum.Marshal(b, m, deterministic) +} +func (dst *AgeRangeTypeEnum) XXX_Merge(src proto.Message) { + xxx_messageInfo_AgeRangeTypeEnum.Merge(dst, src) +} +func (m *AgeRangeTypeEnum) XXX_Size() int { + return xxx_messageInfo_AgeRangeTypeEnum.Size(m) +} +func (m *AgeRangeTypeEnum) XXX_DiscardUnknown() { + xxx_messageInfo_AgeRangeTypeEnum.DiscardUnknown(m) +} + +var xxx_messageInfo_AgeRangeTypeEnum proto.InternalMessageInfo + +func init() { + proto.RegisterType((*AgeRangeTypeEnum)(nil), "google.ads.googleads.v1.enums.AgeRangeTypeEnum") + proto.RegisterEnum("google.ads.googleads.v1.enums.AgeRangeTypeEnum_AgeRangeType", AgeRangeTypeEnum_AgeRangeType_name, AgeRangeTypeEnum_AgeRangeType_value) +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/enums/age_range_type.proto", fileDescriptor_age_range_type_6aa2521db2f95ab5) +} + +var fileDescriptor_age_range_type_6aa2521db2f95ab5 = []byte{ + // 378 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xe2, 0x32, 0x4a, 0xcf, 0xcf, 0x4f, + 0xcf, 0x49, 0xd5, 0x4f, 0x4c, 0x29, 0xd6, 0x87, 0x30, 0x41, 0xac, 0x32, 0x43, 0xfd, 0xd4, 0xbc, + 0xd2, 0xdc, 0x62, 0xfd, 0xc4, 0xf4, 0xd4, 0xf8, 0xa2, 0xc4, 0xbc, 0xf4, 0xd4, 0xf8, 0x92, 0xca, + 0x82, 0x54, 0xbd, 0x82, 0xa2, 0xfc, 0x92, 0x7c, 0x21, 0x59, 0x88, 0x42, 0xbd, 0xc4, 0x94, 0x62, + 0x3d, 0xb8, 0x1e, 0xbd, 0x32, 0x43, 0x3d, 0xb0, 0x1e, 0x29, 0x19, 0x98, 0x91, 0x05, 0x99, 0xfa, + 0x89, 0x79, 0x79, 0xf9, 0x25, 0x89, 0x25, 0x99, 0xf9, 0x79, 0xc5, 0x10, 0xcd, 0x4a, 0x2f, 0x19, + 0xb9, 0x04, 0x1c, 0xd3, 0x53, 0x83, 0x40, 0x86, 0x86, 0x54, 0x16, 0xa4, 0xba, 0xe6, 0x95, 0xe6, + 0x2a, 0x5d, 0x61, 0xe4, 0xe2, 0x41, 0x16, 0x14, 0xe2, 0xe7, 0xe2, 0x0e, 0xf5, 0x0b, 0x0e, 0x70, + 0x75, 0xf6, 0x74, 0xf3, 0x74, 0x75, 0x11, 0x60, 0x10, 0xe2, 0xe6, 0x62, 0x0f, 0xf5, 0xf3, 0xf6, + 0xf3, 0x0f, 0xf7, 0x13, 0x60, 0x14, 0x12, 0xe5, 0xe2, 0x77, 0x74, 0x77, 0x8d, 0x0f, 0x72, 0xf4, + 0x73, 0x77, 0x8d, 0x37, 0xb4, 0x88, 0x37, 0x32, 0x11, 0xb8, 0x79, 0x53, 0x0e, 0x55, 0xd8, 0xc8, + 0x34, 0xde, 0xd8, 0x44, 0xe0, 0x16, 0xba, 0xb0, 0xb1, 0x69, 0xbc, 0x89, 0x89, 0xc0, 0x6d, 0x74, + 0x61, 0x13, 0xd3, 0x78, 0x53, 0x13, 0x81, 0x3b, 0xe8, 0xc2, 0xa6, 0xa6, 0xf1, 0x66, 0x26, 0x02, + 0x77, 0xd1, 0x85, 0xcd, 0x4c, 0xe3, 0x43, 0x03, 0x04, 0xee, 0xdd, 0x94, 0x13, 0x92, 0xe1, 0x12, + 0x43, 0x08, 0x87, 0xfa, 0xb9, 0xb8, 0x86, 0xb8, 0x06, 0xf9, 0x7a, 0xfa, 0xb9, 0xba, 0x08, 0xec, + 0x7f, 0x28, 0xe7, 0xf4, 0x8c, 0x91, 0x4b, 0x31, 0x39, 0x3f, 0x57, 0x0f, 0x6f, 0x78, 0x39, 0x09, + 0x22, 0xfb, 0x3c, 0x00, 0x14, 0x48, 0x01, 0x8c, 0x51, 0x4e, 0x50, 0x3d, 0xe9, 0xf9, 0x39, 0x89, + 0x79, 0xe9, 0x7a, 0xf9, 0x45, 0xe9, 0xfa, 0xe9, 0xa9, 0x79, 0xe0, 0x20, 0x84, 0xc5, 0x53, 0x41, + 0x66, 0x31, 0x8e, 0x68, 0xb3, 0x06, 0x93, 0x8b, 0x98, 0x98, 0xdd, 0x1d, 0x1d, 0x57, 0x31, 0xc9, + 0xba, 0x43, 0x8c, 0x72, 0x4c, 0x29, 0xd6, 0x83, 0x30, 0x41, 0xac, 0x30, 0x43, 0x3d, 0x50, 0xd0, + 0x17, 0x9f, 0x82, 0xc9, 0xc7, 0x38, 0xa6, 0x14, 0xc7, 0xc0, 0xe5, 0x63, 0xc2, 0x0c, 0x63, 0xc0, + 0xf2, 0xaf, 0x98, 0x14, 0x21, 0x82, 0x56, 0x56, 0x8e, 0x29, 0xc5, 0x56, 0x56, 0x70, 0x15, 0x56, + 0x56, 0x61, 0x86, 0x56, 0x56, 0x60, 0x35, 0x49, 0x6c, 0x60, 0x87, 0x19, 0x03, 0x02, 0x00, 0x00, + 0xff, 0xff, 0x6d, 0x32, 0xb7, 0xd1, 0x4e, 0x02, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/app_campaign_app_store.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/app_campaign_app_store.pb.go new file mode 100644 index 0000000..67b750b --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/app_campaign_app_store.pb.go @@ -0,0 +1,118 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/enums/app_campaign_app_store.proto + +package enums // import "google.golang.org/genproto/googleapis/ads/googleads/v1/enums" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Enum describing app campaign app store. +type AppCampaignAppStoreEnum_AppCampaignAppStore int32 + +const ( + // Not specified. + AppCampaignAppStoreEnum_UNSPECIFIED AppCampaignAppStoreEnum_AppCampaignAppStore = 0 + // Used for return value only. Represents value unknown in this version. + AppCampaignAppStoreEnum_UNKNOWN AppCampaignAppStoreEnum_AppCampaignAppStore = 1 + // Apple app store. + AppCampaignAppStoreEnum_APPLE_APP_STORE AppCampaignAppStoreEnum_AppCampaignAppStore = 2 + // Google play. + AppCampaignAppStoreEnum_GOOGLE_APP_STORE AppCampaignAppStoreEnum_AppCampaignAppStore = 3 +) + +var AppCampaignAppStoreEnum_AppCampaignAppStore_name = map[int32]string{ + 0: "UNSPECIFIED", + 1: "UNKNOWN", + 2: "APPLE_APP_STORE", + 3: "GOOGLE_APP_STORE", +} +var AppCampaignAppStoreEnum_AppCampaignAppStore_value = map[string]int32{ + "UNSPECIFIED": 0, + "UNKNOWN": 1, + "APPLE_APP_STORE": 2, + "GOOGLE_APP_STORE": 3, +} + +func (x AppCampaignAppStoreEnum_AppCampaignAppStore) String() string { + return proto.EnumName(AppCampaignAppStoreEnum_AppCampaignAppStore_name, int32(x)) +} +func (AppCampaignAppStoreEnum_AppCampaignAppStore) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_app_campaign_app_store_a6bc8941816fd21e, []int{0, 0} +} + +// The application store that distributes mobile applications. +type AppCampaignAppStoreEnum struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *AppCampaignAppStoreEnum) Reset() { *m = AppCampaignAppStoreEnum{} } +func (m *AppCampaignAppStoreEnum) String() string { return proto.CompactTextString(m) } +func (*AppCampaignAppStoreEnum) ProtoMessage() {} +func (*AppCampaignAppStoreEnum) Descriptor() ([]byte, []int) { + return fileDescriptor_app_campaign_app_store_a6bc8941816fd21e, []int{0} +} +func (m *AppCampaignAppStoreEnum) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_AppCampaignAppStoreEnum.Unmarshal(m, b) +} +func (m *AppCampaignAppStoreEnum) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_AppCampaignAppStoreEnum.Marshal(b, m, deterministic) +} +func (dst *AppCampaignAppStoreEnum) XXX_Merge(src proto.Message) { + xxx_messageInfo_AppCampaignAppStoreEnum.Merge(dst, src) +} +func (m *AppCampaignAppStoreEnum) XXX_Size() int { + return xxx_messageInfo_AppCampaignAppStoreEnum.Size(m) +} +func (m *AppCampaignAppStoreEnum) XXX_DiscardUnknown() { + xxx_messageInfo_AppCampaignAppStoreEnum.DiscardUnknown(m) +} + +var xxx_messageInfo_AppCampaignAppStoreEnum proto.InternalMessageInfo + +func init() { + proto.RegisterType((*AppCampaignAppStoreEnum)(nil), "google.ads.googleads.v1.enums.AppCampaignAppStoreEnum") + proto.RegisterEnum("google.ads.googleads.v1.enums.AppCampaignAppStoreEnum_AppCampaignAppStore", AppCampaignAppStoreEnum_AppCampaignAppStore_name, AppCampaignAppStoreEnum_AppCampaignAppStore_value) +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/enums/app_campaign_app_store.proto", fileDescriptor_app_campaign_app_store_a6bc8941816fd21e) +} + +var fileDescriptor_app_campaign_app_store_a6bc8941816fd21e = []byte{ + // 315 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x7c, 0x50, 0xd1, 0x4a, 0xc3, 0x30, + 0x14, 0x75, 0x1d, 0x28, 0x64, 0x0f, 0x2b, 0x9d, 0xa0, 0x88, 0x7b, 0xd8, 0x3e, 0x20, 0xa1, 0xf8, + 0x16, 0x9f, 0xb2, 0x59, 0xcb, 0x50, 0xda, 0xe0, 0xdc, 0x04, 0x29, 0x96, 0xb8, 0x96, 0x50, 0x58, + 0x93, 0xb0, 0x74, 0x03, 0x7f, 0xc7, 0x47, 0x3f, 0xc5, 0x4f, 0x11, 0xfc, 0x07, 0x49, 0xb2, 0x0d, + 0x84, 0xe9, 0x4b, 0x38, 0xb9, 0xe7, 0x9e, 0xc3, 0xb9, 0x07, 0x60, 0x2e, 0x25, 0x5f, 0x96, 0x88, + 0x15, 0x1a, 0x39, 0x68, 0xd0, 0x26, 0x44, 0xa5, 0x58, 0xd7, 0x1a, 0x31, 0xa5, 0xf2, 0x05, 0xab, + 0x15, 0xab, 0xb8, 0xc8, 0xcd, 0x47, 0x37, 0x72, 0x55, 0x42, 0xb5, 0x92, 0x8d, 0x0c, 0xfa, 0x4e, + 0x00, 0x59, 0xa1, 0xe1, 0x5e, 0x0b, 0x37, 0x21, 0xb4, 0xda, 0x8b, 0xcb, 0x9d, 0xb5, 0xaa, 0x10, + 0x13, 0x42, 0x36, 0xac, 0xa9, 0xa4, 0xd0, 0x4e, 0x3c, 0x7c, 0x03, 0x67, 0x44, 0xa9, 0xf1, 0xd6, + 0x9b, 0x28, 0x35, 0x35, 0xce, 0x91, 0x58, 0xd7, 0xc3, 0x17, 0xd0, 0x3b, 0x40, 0x05, 0x5d, 0xd0, + 0x99, 0x25, 0x53, 0x1a, 0x8d, 0x27, 0xb7, 0x93, 0xe8, 0xc6, 0x3f, 0x0a, 0x3a, 0xe0, 0x64, 0x96, + 0xdc, 0x25, 0xe9, 0x53, 0xe2, 0xb7, 0x82, 0x1e, 0xe8, 0x12, 0x4a, 0xef, 0xa3, 0x9c, 0x50, 0x9a, + 0x4f, 0x1f, 0xd3, 0x87, 0xc8, 0xf7, 0x82, 0x53, 0xe0, 0xc7, 0x69, 0x1a, 0xff, 0x9a, 0xb6, 0x47, + 0xdf, 0x2d, 0x30, 0x58, 0xc8, 0x1a, 0xfe, 0x1b, 0x7f, 0x74, 0x7e, 0x20, 0x03, 0x35, 0xd1, 0x69, + 0xeb, 0x79, 0xb4, 0x95, 0x72, 0xb9, 0x64, 0x82, 0x43, 0xb9, 0xe2, 0x88, 0x97, 0xc2, 0x1e, 0xb6, + 0x6b, 0x51, 0x55, 0xfa, 0x8f, 0x52, 0xaf, 0xed, 0xfb, 0xee, 0xb5, 0x63, 0x42, 0x3e, 0xbc, 0x7e, + 0xec, 0xac, 0x48, 0xa1, 0xa1, 0x83, 0x06, 0xcd, 0x43, 0x68, 0xaa, 0xd0, 0x9f, 0x3b, 0x3e, 0x23, + 0x85, 0xce, 0xf6, 0x7c, 0x36, 0x0f, 0x33, 0xcb, 0x7f, 0x79, 0x03, 0x37, 0xc4, 0x98, 0x14, 0x1a, + 0xe3, 0xfd, 0x06, 0xc6, 0xf3, 0x10, 0x63, 0xbb, 0xf3, 0x7a, 0x6c, 0x83, 0x5d, 0xfd, 0x04, 0x00, + 0x00, 0xff, 0xff, 0xe3, 0xcc, 0x06, 0x38, 0xec, 0x01, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/app_campaign_bidding_strategy_goal_type.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/app_campaign_bidding_strategy_goal_type.pb.go new file mode 100644 index 0000000..2b4f9fb --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/app_campaign_bidding_strategy_goal_type.pb.go @@ -0,0 +1,142 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/enums/app_campaign_bidding_strategy_goal_type.proto + +package enums // import "google.golang.org/genproto/googleapis/ads/googleads/v1/enums" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Goal type of App campaign BiddingStrategy. +type AppCampaignBiddingStrategyGoalTypeEnum_AppCampaignBiddingStrategyGoalType int32 + +const ( + // Not specified. + AppCampaignBiddingStrategyGoalTypeEnum_UNSPECIFIED AppCampaignBiddingStrategyGoalTypeEnum_AppCampaignBiddingStrategyGoalType = 0 + // Used for return value only. Represents value unknown in this version. + AppCampaignBiddingStrategyGoalTypeEnum_UNKNOWN AppCampaignBiddingStrategyGoalTypeEnum_AppCampaignBiddingStrategyGoalType = 1 + // Aim to maximize the number of app installs. The cpa bid is the + // target cost per install. + AppCampaignBiddingStrategyGoalTypeEnum_OPTIMIZE_INSTALLS_TARGET_INSTALL_COST AppCampaignBiddingStrategyGoalTypeEnum_AppCampaignBiddingStrategyGoalType = 2 + // Aim to maximize the long term number of selected in-app conversions from + // app installs. The cpa bid is the target cost per install. + AppCampaignBiddingStrategyGoalTypeEnum_OPTIMIZE_IN_APP_CONVERSIONS_TARGET_INSTALL_COST AppCampaignBiddingStrategyGoalTypeEnum_AppCampaignBiddingStrategyGoalType = 3 + // Aim to maximize the long term number of selected in-app conversions from + // app installs. The cpa bid is the target cost per in-app conversion. Note + // that the actual cpa may seem higher than the target cpa at first, since + // the long term conversions haven’t happened yet. + AppCampaignBiddingStrategyGoalTypeEnum_OPTIMIZE_IN_APP_CONVERSIONS_TARGET_CONVERSION_COST AppCampaignBiddingStrategyGoalTypeEnum_AppCampaignBiddingStrategyGoalType = 4 + // Aim to maximize all conversions' value, i.e. install + selected in-app + // conversions while achieving or exceeding target return on advertising + // spend. + AppCampaignBiddingStrategyGoalTypeEnum_OPTIMIZE_RETURN_ON_ADVERTISING_SPEND AppCampaignBiddingStrategyGoalTypeEnum_AppCampaignBiddingStrategyGoalType = 5 +) + +var AppCampaignBiddingStrategyGoalTypeEnum_AppCampaignBiddingStrategyGoalType_name = map[int32]string{ + 0: "UNSPECIFIED", + 1: "UNKNOWN", + 2: "OPTIMIZE_INSTALLS_TARGET_INSTALL_COST", + 3: "OPTIMIZE_IN_APP_CONVERSIONS_TARGET_INSTALL_COST", + 4: "OPTIMIZE_IN_APP_CONVERSIONS_TARGET_CONVERSION_COST", + 5: "OPTIMIZE_RETURN_ON_ADVERTISING_SPEND", +} +var AppCampaignBiddingStrategyGoalTypeEnum_AppCampaignBiddingStrategyGoalType_value = map[string]int32{ + "UNSPECIFIED": 0, + "UNKNOWN": 1, + "OPTIMIZE_INSTALLS_TARGET_INSTALL_COST": 2, + "OPTIMIZE_IN_APP_CONVERSIONS_TARGET_INSTALL_COST": 3, + "OPTIMIZE_IN_APP_CONVERSIONS_TARGET_CONVERSION_COST": 4, + "OPTIMIZE_RETURN_ON_ADVERTISING_SPEND": 5, +} + +func (x AppCampaignBiddingStrategyGoalTypeEnum_AppCampaignBiddingStrategyGoalType) String() string { + return proto.EnumName(AppCampaignBiddingStrategyGoalTypeEnum_AppCampaignBiddingStrategyGoalType_name, int32(x)) +} +func (AppCampaignBiddingStrategyGoalTypeEnum_AppCampaignBiddingStrategyGoalType) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_app_campaign_bidding_strategy_goal_type_f6a7d1f1531eb5da, []int{0, 0} +} + +// Container for enum describing goal towards which the bidding strategy of an +// app campaign should optimize for. +type AppCampaignBiddingStrategyGoalTypeEnum struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *AppCampaignBiddingStrategyGoalTypeEnum) Reset() { + *m = AppCampaignBiddingStrategyGoalTypeEnum{} +} +func (m *AppCampaignBiddingStrategyGoalTypeEnum) String() string { return proto.CompactTextString(m) } +func (*AppCampaignBiddingStrategyGoalTypeEnum) ProtoMessage() {} +func (*AppCampaignBiddingStrategyGoalTypeEnum) Descriptor() ([]byte, []int) { + return fileDescriptor_app_campaign_bidding_strategy_goal_type_f6a7d1f1531eb5da, []int{0} +} +func (m *AppCampaignBiddingStrategyGoalTypeEnum) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_AppCampaignBiddingStrategyGoalTypeEnum.Unmarshal(m, b) +} +func (m *AppCampaignBiddingStrategyGoalTypeEnum) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_AppCampaignBiddingStrategyGoalTypeEnum.Marshal(b, m, deterministic) +} +func (dst *AppCampaignBiddingStrategyGoalTypeEnum) XXX_Merge(src proto.Message) { + xxx_messageInfo_AppCampaignBiddingStrategyGoalTypeEnum.Merge(dst, src) +} +func (m *AppCampaignBiddingStrategyGoalTypeEnum) XXX_Size() int { + return xxx_messageInfo_AppCampaignBiddingStrategyGoalTypeEnum.Size(m) +} +func (m *AppCampaignBiddingStrategyGoalTypeEnum) XXX_DiscardUnknown() { + xxx_messageInfo_AppCampaignBiddingStrategyGoalTypeEnum.DiscardUnknown(m) +} + +var xxx_messageInfo_AppCampaignBiddingStrategyGoalTypeEnum proto.InternalMessageInfo + +func init() { + proto.RegisterType((*AppCampaignBiddingStrategyGoalTypeEnum)(nil), "google.ads.googleads.v1.enums.AppCampaignBiddingStrategyGoalTypeEnum") + proto.RegisterEnum("google.ads.googleads.v1.enums.AppCampaignBiddingStrategyGoalTypeEnum_AppCampaignBiddingStrategyGoalType", AppCampaignBiddingStrategyGoalTypeEnum_AppCampaignBiddingStrategyGoalType_name, AppCampaignBiddingStrategyGoalTypeEnum_AppCampaignBiddingStrategyGoalType_value) +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/enums/app_campaign_bidding_strategy_goal_type.proto", fileDescriptor_app_campaign_bidding_strategy_goal_type_f6a7d1f1531eb5da) +} + +var fileDescriptor_app_campaign_bidding_strategy_goal_type_f6a7d1f1531eb5da = []byte{ + // 413 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x8c, 0x52, 0xcd, 0x8a, 0xdb, 0x30, + 0x10, 0x6e, 0xbc, 0xfd, 0x01, 0xed, 0xa1, 0xc1, 0xc7, 0xd2, 0x3d, 0x6c, 0xe8, 0xef, 0x45, 0xc6, + 0x5d, 0xe8, 0x41, 0x3d, 0x29, 0x89, 0x6a, 0xc4, 0x6e, 0x65, 0x63, 0x29, 0x2e, 0x2c, 0x01, 0xa1, + 0x5d, 0x1b, 0x61, 0x48, 0x24, 0x11, 0x79, 0x17, 0x72, 0xef, 0x93, 0xf4, 0xd8, 0x4b, 0xdf, 0xa3, + 0x8f, 0xd2, 0x67, 0xe8, 0xa1, 0xc4, 0x4a, 0xdc, 0x1e, 0xda, 0x6e, 0x2f, 0xe6, 0xf3, 0xcc, 0xf7, + 0x23, 0x66, 0x06, 0x9c, 0x6b, 0x6b, 0xf5, 0xaa, 0x49, 0x54, 0xed, 0x93, 0x00, 0x77, 0xe8, 0x36, + 0x4d, 0x1a, 0x73, 0xb3, 0xf6, 0x89, 0x72, 0x4e, 0x5e, 0xab, 0xb5, 0x53, 0xad, 0x36, 0xf2, 0xaa, + 0xad, 0xeb, 0xd6, 0x68, 0xe9, 0xbb, 0x8d, 0xea, 0x1a, 0xbd, 0x95, 0xda, 0xaa, 0x95, 0xec, 0xb6, + 0xae, 0x81, 0x6e, 0x63, 0x3b, 0x1b, 0x9f, 0x04, 0x07, 0xa8, 0x6a, 0x0f, 0x07, 0x33, 0x78, 0x9b, + 0xc2, 0xde, 0xec, 0xc9, 0xd3, 0x43, 0x96, 0x6b, 0x13, 0x65, 0x8c, 0xed, 0x54, 0xd7, 0x5a, 0xe3, + 0x83, 0x78, 0xf2, 0x35, 0x02, 0x2f, 0xb0, 0x73, 0xb3, 0x7d, 0xda, 0x34, 0x84, 0xf1, 0x7d, 0x56, + 0x66, 0xd5, 0x4a, 0x6c, 0x5d, 0x43, 0xcc, 0xcd, 0x7a, 0xf2, 0x29, 0x02, 0x93, 0xbb, 0xa9, 0xf1, + 0x63, 0x70, 0xbc, 0x60, 0xbc, 0x20, 0x33, 0xfa, 0x9e, 0x92, 0xf9, 0xf8, 0x5e, 0x7c, 0x0c, 0x1e, + 0x2d, 0xd8, 0x39, 0xcb, 0x3f, 0xb2, 0xf1, 0x28, 0x7e, 0x0d, 0x9e, 0xe7, 0x85, 0xa0, 0x1f, 0xe8, + 0x25, 0x91, 0x94, 0x71, 0x81, 0x2f, 0x2e, 0xb8, 0x14, 0xb8, 0xcc, 0x88, 0x38, 0xfc, 0xcb, 0x59, + 0xce, 0xc5, 0x38, 0x8a, 0xcf, 0x40, 0xf2, 0x1b, 0x55, 0xe2, 0xa2, 0x90, 0xb3, 0x9c, 0x55, 0xa4, + 0xe4, 0x34, 0x67, 0x7f, 0x16, 0x1d, 0xc5, 0x6f, 0xc1, 0x9b, 0xff, 0x10, 0xfd, 0x2a, 0x05, 0xdd, + 0xfd, 0xf8, 0x15, 0x78, 0x36, 0xe8, 0x4a, 0x22, 0x16, 0x25, 0x93, 0x39, 0x93, 0x78, 0x5e, 0x91, + 0x52, 0x50, 0x4e, 0x59, 0x26, 0x79, 0x41, 0xd8, 0x7c, 0xfc, 0x60, 0xfa, 0x63, 0x04, 0x4e, 0xaf, + 0xed, 0x1a, 0xfe, 0x73, 0xea, 0xd3, 0x97, 0x77, 0x4f, 0xaa, 0xd8, 0x2d, 0xa0, 0x18, 0x5d, 0x4e, + 0xf7, 0x4e, 0xda, 0xae, 0x94, 0xd1, 0xd0, 0x6e, 0x74, 0xa2, 0x1b, 0xd3, 0xaf, 0xe7, 0x70, 0x1c, + 0xae, 0xf5, 0x7f, 0xb9, 0x95, 0x77, 0xfd, 0xf7, 0x73, 0x74, 0x94, 0x61, 0xfc, 0x25, 0x3a, 0xc9, + 0x82, 0x15, 0xae, 0x3d, 0x0c, 0x70, 0x87, 0xaa, 0x14, 0xee, 0x16, 0xe8, 0xbf, 0x1d, 0xfa, 0x4b, + 0x5c, 0xfb, 0xe5, 0xd0, 0x5f, 0x56, 0xe9, 0xb2, 0xef, 0x7f, 0x8f, 0x4e, 0x43, 0x11, 0x21, 0x5c, + 0x7b, 0x84, 0x06, 0x06, 0x42, 0x55, 0x8a, 0x50, 0xcf, 0xb9, 0x7a, 0xd8, 0x3f, 0xec, 0xec, 0x67, + 0x00, 0x00, 0x00, 0xff, 0xff, 0xa7, 0x7a, 0x3b, 0x7b, 0xc3, 0x02, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/app_payment_model_type.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/app_payment_model_type.pb.go new file mode 100644 index 0000000..f2264e1 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/app_payment_model_type.pb.go @@ -0,0 +1,113 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/enums/app_payment_model_type.proto + +package enums // import "google.golang.org/genproto/googleapis/ads/googleads/v1/enums" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Enum describing possible app payment models. +type AppPaymentModelTypeEnum_AppPaymentModelType int32 + +const ( + // Not specified. + AppPaymentModelTypeEnum_UNSPECIFIED AppPaymentModelTypeEnum_AppPaymentModelType = 0 + // Used for return value only. Represents value unknown in this version. + AppPaymentModelTypeEnum_UNKNOWN AppPaymentModelTypeEnum_AppPaymentModelType = 1 + // Represents paid-for apps. + AppPaymentModelTypeEnum_PAID AppPaymentModelTypeEnum_AppPaymentModelType = 30 +) + +var AppPaymentModelTypeEnum_AppPaymentModelType_name = map[int32]string{ + 0: "UNSPECIFIED", + 1: "UNKNOWN", + 30: "PAID", +} +var AppPaymentModelTypeEnum_AppPaymentModelType_value = map[string]int32{ + "UNSPECIFIED": 0, + "UNKNOWN": 1, + "PAID": 30, +} + +func (x AppPaymentModelTypeEnum_AppPaymentModelType) String() string { + return proto.EnumName(AppPaymentModelTypeEnum_AppPaymentModelType_name, int32(x)) +} +func (AppPaymentModelTypeEnum_AppPaymentModelType) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_app_payment_model_type_2d44013d63f90d11, []int{0, 0} +} + +// Represents a criterion for targeting paid apps. +type AppPaymentModelTypeEnum struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *AppPaymentModelTypeEnum) Reset() { *m = AppPaymentModelTypeEnum{} } +func (m *AppPaymentModelTypeEnum) String() string { return proto.CompactTextString(m) } +func (*AppPaymentModelTypeEnum) ProtoMessage() {} +func (*AppPaymentModelTypeEnum) Descriptor() ([]byte, []int) { + return fileDescriptor_app_payment_model_type_2d44013d63f90d11, []int{0} +} +func (m *AppPaymentModelTypeEnum) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_AppPaymentModelTypeEnum.Unmarshal(m, b) +} +func (m *AppPaymentModelTypeEnum) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_AppPaymentModelTypeEnum.Marshal(b, m, deterministic) +} +func (dst *AppPaymentModelTypeEnum) XXX_Merge(src proto.Message) { + xxx_messageInfo_AppPaymentModelTypeEnum.Merge(dst, src) +} +func (m *AppPaymentModelTypeEnum) XXX_Size() int { + return xxx_messageInfo_AppPaymentModelTypeEnum.Size(m) +} +func (m *AppPaymentModelTypeEnum) XXX_DiscardUnknown() { + xxx_messageInfo_AppPaymentModelTypeEnum.DiscardUnknown(m) +} + +var xxx_messageInfo_AppPaymentModelTypeEnum proto.InternalMessageInfo + +func init() { + proto.RegisterType((*AppPaymentModelTypeEnum)(nil), "google.ads.googleads.v1.enums.AppPaymentModelTypeEnum") + proto.RegisterEnum("google.ads.googleads.v1.enums.AppPaymentModelTypeEnum_AppPaymentModelType", AppPaymentModelTypeEnum_AppPaymentModelType_name, AppPaymentModelTypeEnum_AppPaymentModelType_value) +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/enums/app_payment_model_type.proto", fileDescriptor_app_payment_model_type_2d44013d63f90d11) +} + +var fileDescriptor_app_payment_model_type_2d44013d63f90d11 = []byte{ + // 302 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x7c, 0x90, 0xd1, 0x4a, 0xf3, 0x30, + 0x1c, 0xc5, 0xbf, 0xed, 0x13, 0x95, 0xec, 0xc2, 0x51, 0x2f, 0x14, 0x71, 0xc2, 0xf6, 0x00, 0x09, + 0xc5, 0xbb, 0x88, 0x17, 0xa9, 0xab, 0xa3, 0x88, 0xb5, 0xa0, 0xab, 0x22, 0x85, 0x12, 0x4d, 0x08, + 0x85, 0x36, 0x09, 0x4b, 0x37, 0xe8, 0xeb, 0x78, 0xe9, 0xa3, 0xf8, 0x28, 0x82, 0xef, 0x20, 0x4d, + 0x6c, 0xaf, 0xa6, 0x37, 0xe1, 0x90, 0xf3, 0xff, 0x9d, 0x9c, 0xfc, 0x01, 0x16, 0x4a, 0x89, 0x92, + 0x23, 0xca, 0x0c, 0x72, 0xb2, 0x55, 0x1b, 0x1f, 0x71, 0xb9, 0xae, 0x0c, 0xa2, 0x5a, 0xe7, 0x9a, + 0x36, 0x15, 0x97, 0x75, 0x5e, 0x29, 0xc6, 0xcb, 0xbc, 0x6e, 0x34, 0x87, 0x7a, 0xa5, 0x6a, 0xe5, + 0x4d, 0x1c, 0x00, 0x29, 0x33, 0xb0, 0x67, 0xe1, 0xc6, 0x87, 0x96, 0x3d, 0x39, 0xed, 0xa2, 0x75, + 0x81, 0xa8, 0x94, 0xaa, 0xa6, 0x75, 0xa1, 0xa4, 0x71, 0xf0, 0xec, 0x09, 0x1c, 0x11, 0xad, 0x13, + 0x97, 0x7d, 0xdb, 0x46, 0x3f, 0x34, 0x9a, 0x87, 0x72, 0x5d, 0xcd, 0x2e, 0xc1, 0xe1, 0x16, 0xcb, + 0x3b, 0x00, 0xa3, 0x65, 0x7c, 0x9f, 0x84, 0x57, 0xd1, 0x75, 0x14, 0xce, 0xc7, 0xff, 0xbc, 0x11, + 0xd8, 0x5b, 0xc6, 0x37, 0xf1, 0xdd, 0x63, 0x3c, 0x1e, 0x78, 0xfb, 0x60, 0x27, 0x21, 0xd1, 0x7c, + 0x7c, 0x16, 0x7c, 0x0d, 0xc0, 0xf4, 0x55, 0x55, 0xf0, 0xcf, 0x76, 0xc1, 0xf1, 0x96, 0x27, 0x92, + 0xb6, 0x59, 0x32, 0x78, 0x0e, 0x7e, 0x50, 0xa1, 0x4a, 0x2a, 0x05, 0x54, 0x2b, 0x81, 0x04, 0x97, + 0xb6, 0x77, 0xb7, 0x24, 0x5d, 0x98, 0x5f, 0x76, 0x76, 0x61, 0xcf, 0xb7, 0xe1, 0xff, 0x05, 0x21, + 0xef, 0xc3, 0xc9, 0xc2, 0x45, 0x11, 0x66, 0xa0, 0x93, 0xad, 0x4a, 0x7d, 0xd8, 0xfe, 0xd4, 0x7c, + 0x74, 0x7e, 0x46, 0x98, 0xc9, 0x7a, 0x3f, 0x4b, 0xfd, 0xcc, 0xfa, 0x9f, 0xc3, 0xa9, 0xbb, 0xc4, + 0x98, 0x30, 0x83, 0x71, 0x3f, 0x81, 0x71, 0xea, 0x63, 0x6c, 0x67, 0x5e, 0x76, 0x6d, 0xb1, 0xf3, + 0xef, 0x00, 0x00, 0x00, 0xff, 0xff, 0x7c, 0xf1, 0xd7, 0x51, 0xcb, 0x01, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/app_placeholder_field.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/app_placeholder_field.pb.go new file mode 100644 index 0000000..5221f6a --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/app_placeholder_field.pb.go @@ -0,0 +1,153 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/enums/app_placeholder_field.proto + +package enums // import "google.golang.org/genproto/googleapis/ads/googleads/v1/enums" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Possible values for App placeholder fields. +type AppPlaceholderFieldEnum_AppPlaceholderField int32 + +const ( + // Not specified. + AppPlaceholderFieldEnum_UNSPECIFIED AppPlaceholderFieldEnum_AppPlaceholderField = 0 + // Used for return value only. Represents value unknown in this version. + AppPlaceholderFieldEnum_UNKNOWN AppPlaceholderFieldEnum_AppPlaceholderField = 1 + // Data Type: INT64. The application store that the target application + // belongs to. Valid values are: 1 = Apple iTunes Store; 2 = Google Play + // Store. + AppPlaceholderFieldEnum_STORE AppPlaceholderFieldEnum_AppPlaceholderField = 2 + // Data Type: STRING. The store-specific ID for the target application. + AppPlaceholderFieldEnum_ID AppPlaceholderFieldEnum_AppPlaceholderField = 3 + // Data Type: STRING. The visible text displayed when the link is rendered + // in an ad. + AppPlaceholderFieldEnum_LINK_TEXT AppPlaceholderFieldEnum_AppPlaceholderField = 4 + // Data Type: STRING. The destination URL of the in-app link. + AppPlaceholderFieldEnum_URL AppPlaceholderFieldEnum_AppPlaceholderField = 5 + // Data Type: URL_LIST. Final URLs for the in-app link when using Upgraded + // URLs. + AppPlaceholderFieldEnum_FINAL_URLS AppPlaceholderFieldEnum_AppPlaceholderField = 6 + // Data Type: URL_LIST. Final Mobile URLs for the in-app link when using + // Upgraded URLs. + AppPlaceholderFieldEnum_FINAL_MOBILE_URLS AppPlaceholderFieldEnum_AppPlaceholderField = 7 + // Data Type: URL. Tracking template for the in-app link when using Upgraded + // URLs. + AppPlaceholderFieldEnum_TRACKING_URL AppPlaceholderFieldEnum_AppPlaceholderField = 8 + // Data Type: STRING. Final URL suffix for the in-app link when using + // parallel tracking. + AppPlaceholderFieldEnum_FINAL_URL_SUFFIX AppPlaceholderFieldEnum_AppPlaceholderField = 9 +) + +var AppPlaceholderFieldEnum_AppPlaceholderField_name = map[int32]string{ + 0: "UNSPECIFIED", + 1: "UNKNOWN", + 2: "STORE", + 3: "ID", + 4: "LINK_TEXT", + 5: "URL", + 6: "FINAL_URLS", + 7: "FINAL_MOBILE_URLS", + 8: "TRACKING_URL", + 9: "FINAL_URL_SUFFIX", +} +var AppPlaceholderFieldEnum_AppPlaceholderField_value = map[string]int32{ + "UNSPECIFIED": 0, + "UNKNOWN": 1, + "STORE": 2, + "ID": 3, + "LINK_TEXT": 4, + "URL": 5, + "FINAL_URLS": 6, + "FINAL_MOBILE_URLS": 7, + "TRACKING_URL": 8, + "FINAL_URL_SUFFIX": 9, +} + +func (x AppPlaceholderFieldEnum_AppPlaceholderField) String() string { + return proto.EnumName(AppPlaceholderFieldEnum_AppPlaceholderField_name, int32(x)) +} +func (AppPlaceholderFieldEnum_AppPlaceholderField) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_app_placeholder_field_18f7848a1cee338f, []int{0, 0} +} + +// Values for App placeholder fields. +type AppPlaceholderFieldEnum struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *AppPlaceholderFieldEnum) Reset() { *m = AppPlaceholderFieldEnum{} } +func (m *AppPlaceholderFieldEnum) String() string { return proto.CompactTextString(m) } +func (*AppPlaceholderFieldEnum) ProtoMessage() {} +func (*AppPlaceholderFieldEnum) Descriptor() ([]byte, []int) { + return fileDescriptor_app_placeholder_field_18f7848a1cee338f, []int{0} +} +func (m *AppPlaceholderFieldEnum) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_AppPlaceholderFieldEnum.Unmarshal(m, b) +} +func (m *AppPlaceholderFieldEnum) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_AppPlaceholderFieldEnum.Marshal(b, m, deterministic) +} +func (dst *AppPlaceholderFieldEnum) XXX_Merge(src proto.Message) { + xxx_messageInfo_AppPlaceholderFieldEnum.Merge(dst, src) +} +func (m *AppPlaceholderFieldEnum) XXX_Size() int { + return xxx_messageInfo_AppPlaceholderFieldEnum.Size(m) +} +func (m *AppPlaceholderFieldEnum) XXX_DiscardUnknown() { + xxx_messageInfo_AppPlaceholderFieldEnum.DiscardUnknown(m) +} + +var xxx_messageInfo_AppPlaceholderFieldEnum proto.InternalMessageInfo + +func init() { + proto.RegisterType((*AppPlaceholderFieldEnum)(nil), "google.ads.googleads.v1.enums.AppPlaceholderFieldEnum") + proto.RegisterEnum("google.ads.googleads.v1.enums.AppPlaceholderFieldEnum_AppPlaceholderField", AppPlaceholderFieldEnum_AppPlaceholderField_name, AppPlaceholderFieldEnum_AppPlaceholderField_value) +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/enums/app_placeholder_field.proto", fileDescriptor_app_placeholder_field_18f7848a1cee338f) +} + +var fileDescriptor_app_placeholder_field_18f7848a1cee338f = []byte{ + // 382 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x7c, 0x51, 0xdd, 0xce, 0x93, 0x30, + 0x18, 0x16, 0xe6, 0xb7, 0xb9, 0x7e, 0xfe, 0xd4, 0xaa, 0xd1, 0x18, 0x77, 0xb0, 0x5d, 0x40, 0x09, + 0xf1, 0xc8, 0x7a, 0x54, 0x36, 0x58, 0x9a, 0x21, 0x23, 0xfc, 0xcc, 0xc5, 0x90, 0x10, 0x1c, 0x88, + 0x24, 0x8c, 0x36, 0xeb, 0xb6, 0x0b, 0xf2, 0xd0, 0xc4, 0x0b, 0xd1, 0x4b, 0x31, 0xf1, 0x1e, 0x0c, + 0xd4, 0xe1, 0xc9, 0xf4, 0xa4, 0x79, 0xfa, 0x3e, 0x3f, 0x69, 0x9f, 0x17, 0xbc, 0x29, 0x39, 0x2f, + 0xeb, 0xc2, 0xc8, 0x72, 0x69, 0x28, 0xd8, 0xa2, 0xb3, 0x69, 0x14, 0xcd, 0x69, 0x2f, 0x8d, 0x4c, + 0x88, 0x54, 0xd4, 0xd9, 0xae, 0xf8, 0xcc, 0xeb, 0xbc, 0x38, 0xa4, 0x9f, 0xaa, 0xa2, 0xce, 0xb1, + 0x38, 0xf0, 0x23, 0x47, 0x13, 0xa5, 0xc7, 0x59, 0x2e, 0x71, 0x6f, 0xc5, 0x67, 0x13, 0x77, 0xd6, + 0x97, 0xaf, 0x2e, 0xc9, 0xa2, 0x32, 0xb2, 0xa6, 0xe1, 0xc7, 0xec, 0x58, 0xf1, 0x46, 0x2a, 0xf3, + 0xec, 0xbb, 0x06, 0x9e, 0x53, 0x21, 0xfc, 0xbf, 0xd9, 0x4e, 0x1b, 0x6d, 0x37, 0xa7, 0xfd, 0xec, + 0x9b, 0x06, 0x9e, 0x5c, 0xe1, 0xd0, 0x23, 0x70, 0x1b, 0x7b, 0xa1, 0x6f, 0xcf, 0x99, 0xc3, 0xec, + 0x05, 0xbc, 0x83, 0x6e, 0xc1, 0x28, 0xf6, 0x56, 0xde, 0xfa, 0xbd, 0x07, 0x35, 0x34, 0x06, 0x37, + 0x61, 0xb4, 0x0e, 0x6c, 0xa8, 0xa3, 0x21, 0xd0, 0xd9, 0x02, 0x0e, 0xd0, 0x03, 0x30, 0x76, 0x99, + 0xb7, 0x4a, 0x23, 0x7b, 0x1b, 0xc1, 0xbb, 0x68, 0x04, 0x06, 0x71, 0xe0, 0xc2, 0x1b, 0xf4, 0x10, + 0x00, 0x87, 0x79, 0xd4, 0x4d, 0xe3, 0xc0, 0x0d, 0xe1, 0x10, 0x3d, 0x03, 0x8f, 0xd5, 0xfd, 0xdd, + 0xda, 0x62, 0xae, 0xad, 0xc6, 0x23, 0x04, 0xc1, 0xfd, 0x28, 0xa0, 0xf3, 0x15, 0xf3, 0x96, 0xed, + 0x08, 0xde, 0x43, 0x4f, 0x01, 0xec, 0x8d, 0x69, 0x18, 0x3b, 0x0e, 0xdb, 0xc2, 0xb1, 0xf5, 0x4b, + 0x03, 0xd3, 0x1d, 0xdf, 0xe3, 0xff, 0xf6, 0x61, 0xbd, 0xb8, 0xf2, 0x25, 0xbf, 0xed, 0xc2, 0xd7, + 0x3e, 0x58, 0x7f, 0xac, 0x25, 0xaf, 0xb3, 0xa6, 0xc4, 0xfc, 0x50, 0x1a, 0x65, 0xd1, 0x74, 0x4d, + 0x5d, 0xb6, 0x22, 0x2a, 0xf9, 0x8f, 0x25, 0xbd, 0xed, 0xce, 0x2f, 0xfa, 0x60, 0x49, 0xe9, 0x57, + 0x7d, 0xb2, 0x54, 0x51, 0x34, 0x97, 0x58, 0xc1, 0x16, 0x6d, 0x4c, 0xdc, 0x56, 0x2b, 0x7f, 0x5c, + 0xf8, 0x84, 0xe6, 0x32, 0xe9, 0xf9, 0x64, 0x63, 0x26, 0x1d, 0xff, 0x53, 0x9f, 0xaa, 0x21, 0x21, + 0x34, 0x97, 0x84, 0xf4, 0x0a, 0x42, 0x36, 0x26, 0x21, 0x9d, 0xe6, 0xe3, 0xb0, 0x7b, 0xd8, 0xeb, + 0xdf, 0x01, 0x00, 0x00, 0xff, 0xff, 0xb9, 0xa4, 0x7c, 0x93, 0x3c, 0x02, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/app_store.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/app_store.pb.go new file mode 100644 index 0000000..f86baf7 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/app_store.pb.go @@ -0,0 +1,117 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/enums/app_store.proto + +package enums // import "google.golang.org/genproto/googleapis/ads/googleads/v1/enums" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// App store type in an app extension. +type AppStoreEnum_AppStore int32 + +const ( + // Not specified. + AppStoreEnum_UNSPECIFIED AppStoreEnum_AppStore = 0 + // Used for return value only. Represents value unknown in this version. + AppStoreEnum_UNKNOWN AppStoreEnum_AppStore = 1 + // Apple iTunes. + AppStoreEnum_APPLE_ITUNES AppStoreEnum_AppStore = 2 + // Google Play. + AppStoreEnum_GOOGLE_PLAY AppStoreEnum_AppStore = 3 +) + +var AppStoreEnum_AppStore_name = map[int32]string{ + 0: "UNSPECIFIED", + 1: "UNKNOWN", + 2: "APPLE_ITUNES", + 3: "GOOGLE_PLAY", +} +var AppStoreEnum_AppStore_value = map[string]int32{ + "UNSPECIFIED": 0, + "UNKNOWN": 1, + "APPLE_ITUNES": 2, + "GOOGLE_PLAY": 3, +} + +func (x AppStoreEnum_AppStore) String() string { + return proto.EnumName(AppStoreEnum_AppStore_name, int32(x)) +} +func (AppStoreEnum_AppStore) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_app_store_e122b0acc6870c18, []int{0, 0} +} + +// Container for enum describing app store type in an app extension. +type AppStoreEnum struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *AppStoreEnum) Reset() { *m = AppStoreEnum{} } +func (m *AppStoreEnum) String() string { return proto.CompactTextString(m) } +func (*AppStoreEnum) ProtoMessage() {} +func (*AppStoreEnum) Descriptor() ([]byte, []int) { + return fileDescriptor_app_store_e122b0acc6870c18, []int{0} +} +func (m *AppStoreEnum) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_AppStoreEnum.Unmarshal(m, b) +} +func (m *AppStoreEnum) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_AppStoreEnum.Marshal(b, m, deterministic) +} +func (dst *AppStoreEnum) XXX_Merge(src proto.Message) { + xxx_messageInfo_AppStoreEnum.Merge(dst, src) +} +func (m *AppStoreEnum) XXX_Size() int { + return xxx_messageInfo_AppStoreEnum.Size(m) +} +func (m *AppStoreEnum) XXX_DiscardUnknown() { + xxx_messageInfo_AppStoreEnum.DiscardUnknown(m) +} + +var xxx_messageInfo_AppStoreEnum proto.InternalMessageInfo + +func init() { + proto.RegisterType((*AppStoreEnum)(nil), "google.ads.googleads.v1.enums.AppStoreEnum") + proto.RegisterEnum("google.ads.googleads.v1.enums.AppStoreEnum_AppStore", AppStoreEnum_AppStore_name, AppStoreEnum_AppStore_value) +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/enums/app_store.proto", fileDescriptor_app_store_e122b0acc6870c18) +} + +var fileDescriptor_app_store_e122b0acc6870c18 = []byte{ + // 299 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x7c, 0x90, 0xc1, 0x4a, 0xc3, 0x30, + 0x1c, 0xc6, 0x5d, 0x07, 0x2a, 0xd9, 0xc4, 0xd2, 0xa3, 0xb8, 0xc3, 0x76, 0x37, 0xa1, 0x78, 0x8b, + 0xa7, 0x4c, 0x63, 0x19, 0x1b, 0x5d, 0x60, 0x6e, 0xa2, 0x16, 0x46, 0xb4, 0x25, 0x14, 0xd6, 0x24, + 0x34, 0xdd, 0x1e, 0xc8, 0xa3, 0x8f, 0xe2, 0x63, 0x78, 0xf4, 0x29, 0x24, 0x89, 0xed, 0x4d, 0x2f, + 0xe1, 0x4b, 0xbe, 0x5f, 0xbe, 0x7c, 0xf9, 0x83, 0x2b, 0xa1, 0x94, 0xd8, 0x15, 0x88, 0xe7, 0x06, + 0x79, 0x69, 0xd5, 0x21, 0x46, 0x85, 0xdc, 0x57, 0x06, 0x71, 0xad, 0xb7, 0xa6, 0x51, 0x75, 0x01, + 0x75, 0xad, 0x1a, 0x15, 0x8d, 0x3c, 0x03, 0x79, 0x6e, 0x60, 0x87, 0xc3, 0x43, 0x0c, 0x1d, 0x7e, + 0x71, 0xd9, 0xa6, 0xe9, 0x12, 0x71, 0x29, 0x55, 0xc3, 0x9b, 0x52, 0x49, 0xe3, 0x2f, 0x4f, 0x5e, + 0xc0, 0x90, 0x68, 0xbd, 0xb2, 0x71, 0x54, 0xee, 0xab, 0xc9, 0x1c, 0x9c, 0xb6, 0xfb, 0xe8, 0x1c, + 0x0c, 0xd6, 0xe9, 0x8a, 0xd1, 0xdb, 0xd9, 0xfd, 0x8c, 0xde, 0x85, 0x47, 0xd1, 0x00, 0x9c, 0xac, + 0xd3, 0x79, 0xba, 0x7c, 0x4c, 0xc3, 0x5e, 0x14, 0x82, 0x21, 0x61, 0x6c, 0x41, 0xb7, 0xb3, 0x87, + 0x75, 0x4a, 0x57, 0x61, 0x60, 0xf9, 0x64, 0xb9, 0x4c, 0x16, 0x74, 0xcb, 0x16, 0xe4, 0x29, 0xec, + 0x4f, 0xbf, 0x7a, 0x60, 0xfc, 0xa6, 0x2a, 0xf8, 0x6f, 0xc1, 0xe9, 0x59, 0xfb, 0x20, 0xb3, 0x8d, + 0x58, 0xef, 0x79, 0xfa, 0xcb, 0x0b, 0xb5, 0xe3, 0x52, 0x40, 0x55, 0x0b, 0x24, 0x0a, 0xe9, 0xfa, + 0xb6, 0xf3, 0xd0, 0xa5, 0xf9, 0x63, 0x3c, 0x37, 0x6e, 0x7d, 0x0f, 0xfa, 0x09, 0x21, 0x1f, 0xc1, + 0x28, 0xf1, 0x51, 0x24, 0x37, 0xd0, 0x4b, 0xab, 0x36, 0x31, 0xb4, 0x9f, 0x35, 0x9f, 0xad, 0x9f, + 0x91, 0xdc, 0x64, 0x9d, 0x9f, 0x6d, 0xe2, 0xcc, 0xf9, 0xdf, 0xc1, 0xd8, 0x1f, 0x62, 0x4c, 0x72, + 0x83, 0x71, 0x47, 0x60, 0xbc, 0x89, 0x31, 0x76, 0xcc, 0xeb, 0xb1, 0x2b, 0x76, 0xfd, 0x13, 0x00, + 0x00, 0xff, 0xff, 0xb1, 0x7b, 0x5e, 0x2a, 0xb6, 0x01, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/app_url_operating_system_type.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/app_url_operating_system_type.pb.go new file mode 100644 index 0000000..7000ec3 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/app_url_operating_system_type.pb.go @@ -0,0 +1,118 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/enums/app_url_operating_system_type.proto + +package enums // import "google.golang.org/genproto/googleapis/ads/googleads/v1/enums" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Operating System +type AppUrlOperatingSystemTypeEnum_AppUrlOperatingSystemType int32 + +const ( + // Not specified. + AppUrlOperatingSystemTypeEnum_UNSPECIFIED AppUrlOperatingSystemTypeEnum_AppUrlOperatingSystemType = 0 + // Used for return value only. Represents value unknown in this version. + AppUrlOperatingSystemTypeEnum_UNKNOWN AppUrlOperatingSystemTypeEnum_AppUrlOperatingSystemType = 1 + // The Apple IOS operating system. + AppUrlOperatingSystemTypeEnum_IOS AppUrlOperatingSystemTypeEnum_AppUrlOperatingSystemType = 2 + // The Android operating system. + AppUrlOperatingSystemTypeEnum_ANDROID AppUrlOperatingSystemTypeEnum_AppUrlOperatingSystemType = 3 +) + +var AppUrlOperatingSystemTypeEnum_AppUrlOperatingSystemType_name = map[int32]string{ + 0: "UNSPECIFIED", + 1: "UNKNOWN", + 2: "IOS", + 3: "ANDROID", +} +var AppUrlOperatingSystemTypeEnum_AppUrlOperatingSystemType_value = map[string]int32{ + "UNSPECIFIED": 0, + "UNKNOWN": 1, + "IOS": 2, + "ANDROID": 3, +} + +func (x AppUrlOperatingSystemTypeEnum_AppUrlOperatingSystemType) String() string { + return proto.EnumName(AppUrlOperatingSystemTypeEnum_AppUrlOperatingSystemType_name, int32(x)) +} +func (AppUrlOperatingSystemTypeEnum_AppUrlOperatingSystemType) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_app_url_operating_system_type_383fa34862fb8242, []int{0, 0} +} + +// The possible OS types for a deeplink AppUrl. +type AppUrlOperatingSystemTypeEnum struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *AppUrlOperatingSystemTypeEnum) Reset() { *m = AppUrlOperatingSystemTypeEnum{} } +func (m *AppUrlOperatingSystemTypeEnum) String() string { return proto.CompactTextString(m) } +func (*AppUrlOperatingSystemTypeEnum) ProtoMessage() {} +func (*AppUrlOperatingSystemTypeEnum) Descriptor() ([]byte, []int) { + return fileDescriptor_app_url_operating_system_type_383fa34862fb8242, []int{0} +} +func (m *AppUrlOperatingSystemTypeEnum) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_AppUrlOperatingSystemTypeEnum.Unmarshal(m, b) +} +func (m *AppUrlOperatingSystemTypeEnum) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_AppUrlOperatingSystemTypeEnum.Marshal(b, m, deterministic) +} +func (dst *AppUrlOperatingSystemTypeEnum) XXX_Merge(src proto.Message) { + xxx_messageInfo_AppUrlOperatingSystemTypeEnum.Merge(dst, src) +} +func (m *AppUrlOperatingSystemTypeEnum) XXX_Size() int { + return xxx_messageInfo_AppUrlOperatingSystemTypeEnum.Size(m) +} +func (m *AppUrlOperatingSystemTypeEnum) XXX_DiscardUnknown() { + xxx_messageInfo_AppUrlOperatingSystemTypeEnum.DiscardUnknown(m) +} + +var xxx_messageInfo_AppUrlOperatingSystemTypeEnum proto.InternalMessageInfo + +func init() { + proto.RegisterType((*AppUrlOperatingSystemTypeEnum)(nil), "google.ads.googleads.v1.enums.AppUrlOperatingSystemTypeEnum") + proto.RegisterEnum("google.ads.googleads.v1.enums.AppUrlOperatingSystemTypeEnum_AppUrlOperatingSystemType", AppUrlOperatingSystemTypeEnum_AppUrlOperatingSystemType_name, AppUrlOperatingSystemTypeEnum_AppUrlOperatingSystemType_value) +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/enums/app_url_operating_system_type.proto", fileDescriptor_app_url_operating_system_type_383fa34862fb8242) +} + +var fileDescriptor_app_url_operating_system_type_383fa34862fb8242 = []byte{ + // 314 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x7c, 0x50, 0x41, 0x4b, 0xc3, 0x30, + 0x18, 0x75, 0x1d, 0x38, 0xc8, 0x0e, 0x96, 0xde, 0x14, 0x27, 0x6c, 0x3f, 0x20, 0xa1, 0x78, 0x8b, + 0xa7, 0xcc, 0xcd, 0x51, 0x84, 0x76, 0x38, 0x37, 0x41, 0x0a, 0x25, 0xda, 0x10, 0x0a, 0x6d, 0x12, + 0x9a, 0x6c, 0xd0, 0xbf, 0xe3, 0xd1, 0x9f, 0xe2, 0x4f, 0xf1, 0xea, 0x1f, 0x90, 0x26, 0xb6, 0xb7, + 0x7a, 0x09, 0x8f, 0xef, 0x7b, 0xdf, 0x7b, 0x2f, 0x0f, 0x10, 0x2e, 0x25, 0x2f, 0x19, 0xa2, 0xb9, + 0x46, 0x0e, 0xb6, 0xe8, 0x14, 0x22, 0x26, 0x8e, 0x95, 0x46, 0x54, 0xa9, 0xec, 0x58, 0x97, 0x99, + 0x54, 0xac, 0xa6, 0xa6, 0x10, 0x3c, 0xd3, 0x8d, 0x36, 0xac, 0xca, 0x4c, 0xa3, 0x18, 0x54, 0xb5, + 0x34, 0x32, 0x98, 0xb9, 0x3b, 0x48, 0x73, 0x0d, 0x7b, 0x09, 0x78, 0x0a, 0xa1, 0x95, 0xb8, 0xba, + 0xee, 0x1c, 0x54, 0x81, 0xa8, 0x10, 0xd2, 0x50, 0x53, 0x48, 0xa1, 0xdd, 0xf1, 0x42, 0x81, 0x19, + 0x51, 0x6a, 0x5f, 0x97, 0x49, 0xe7, 0xb0, 0xb3, 0x06, 0xcf, 0x8d, 0x62, 0x6b, 0x71, 0xac, 0x16, + 0x09, 0xb8, 0x1c, 0x24, 0x04, 0x17, 0x60, 0xba, 0x8f, 0x77, 0xdb, 0xf5, 0x7d, 0xf4, 0x10, 0xad, + 0x57, 0xfe, 0x59, 0x30, 0x05, 0x93, 0x7d, 0xfc, 0x18, 0x27, 0x2f, 0xb1, 0x3f, 0x0a, 0x26, 0x60, + 0x1c, 0x25, 0x3b, 0xdf, 0x6b, 0xa7, 0x24, 0x5e, 0x3d, 0x25, 0xd1, 0xca, 0x1f, 0x2f, 0x7f, 0x46, + 0x60, 0xfe, 0x2e, 0x2b, 0xf8, 0x6f, 0xea, 0xe5, 0xcd, 0xa0, 0xe9, 0xb6, 0xcd, 0xbd, 0x1d, 0xbd, + 0x2e, 0xff, 0x04, 0xb8, 0x2c, 0xa9, 0xe0, 0x50, 0xd6, 0x1c, 0x71, 0x26, 0xec, 0xaf, 0xba, 0x26, + 0x55, 0xa1, 0x07, 0x8a, 0xbd, 0xb3, 0xef, 0x87, 0x37, 0xde, 0x10, 0xf2, 0xe9, 0xcd, 0x36, 0x4e, + 0x8a, 0xe4, 0x1a, 0x3a, 0xd8, 0xa2, 0x43, 0x08, 0xdb, 0x06, 0xf4, 0x57, 0xb7, 0x4f, 0x49, 0xae, + 0xd3, 0x7e, 0x9f, 0x1e, 0xc2, 0xd4, 0xee, 0xbf, 0xbd, 0xb9, 0x1b, 0x62, 0x4c, 0x72, 0x8d, 0x71, + 0xcf, 0xc0, 0xf8, 0x10, 0x62, 0x6c, 0x39, 0x6f, 0xe7, 0x36, 0xd8, 0xed, 0x6f, 0x00, 0x00, 0x00, + 0xff, 0xff, 0x20, 0x39, 0x84, 0x74, 0xf0, 0x01, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/asset_type.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/asset_type.pb.go new file mode 100644 index 0000000..a200004 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/asset_type.pb.go @@ -0,0 +1,127 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/enums/asset_type.proto + +package enums // import "google.golang.org/genproto/googleapis/ads/googleads/v1/enums" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Enum describing possible types of asset. +type AssetTypeEnum_AssetType int32 + +const ( + // Not specified. + AssetTypeEnum_UNSPECIFIED AssetTypeEnum_AssetType = 0 + // Used for return value only. Represents value unknown in this version. + AssetTypeEnum_UNKNOWN AssetTypeEnum_AssetType = 1 + // YouTube video asset. + AssetTypeEnum_YOUTUBE_VIDEO AssetTypeEnum_AssetType = 2 + // Media bundle asset. + AssetTypeEnum_MEDIA_BUNDLE AssetTypeEnum_AssetType = 3 + // Image asset. + AssetTypeEnum_IMAGE AssetTypeEnum_AssetType = 4 + // Text asset. + AssetTypeEnum_TEXT AssetTypeEnum_AssetType = 5 +) + +var AssetTypeEnum_AssetType_name = map[int32]string{ + 0: "UNSPECIFIED", + 1: "UNKNOWN", + 2: "YOUTUBE_VIDEO", + 3: "MEDIA_BUNDLE", + 4: "IMAGE", + 5: "TEXT", +} +var AssetTypeEnum_AssetType_value = map[string]int32{ + "UNSPECIFIED": 0, + "UNKNOWN": 1, + "YOUTUBE_VIDEO": 2, + "MEDIA_BUNDLE": 3, + "IMAGE": 4, + "TEXT": 5, +} + +func (x AssetTypeEnum_AssetType) String() string { + return proto.EnumName(AssetTypeEnum_AssetType_name, int32(x)) +} +func (AssetTypeEnum_AssetType) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_asset_type_6ee0f36116fdf786, []int{0, 0} +} + +// Container for enum describing the types of asset. +type AssetTypeEnum struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *AssetTypeEnum) Reset() { *m = AssetTypeEnum{} } +func (m *AssetTypeEnum) String() string { return proto.CompactTextString(m) } +func (*AssetTypeEnum) ProtoMessage() {} +func (*AssetTypeEnum) Descriptor() ([]byte, []int) { + return fileDescriptor_asset_type_6ee0f36116fdf786, []int{0} +} +func (m *AssetTypeEnum) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_AssetTypeEnum.Unmarshal(m, b) +} +func (m *AssetTypeEnum) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_AssetTypeEnum.Marshal(b, m, deterministic) +} +func (dst *AssetTypeEnum) XXX_Merge(src proto.Message) { + xxx_messageInfo_AssetTypeEnum.Merge(dst, src) +} +func (m *AssetTypeEnum) XXX_Size() int { + return xxx_messageInfo_AssetTypeEnum.Size(m) +} +func (m *AssetTypeEnum) XXX_DiscardUnknown() { + xxx_messageInfo_AssetTypeEnum.DiscardUnknown(m) +} + +var xxx_messageInfo_AssetTypeEnum proto.InternalMessageInfo + +func init() { + proto.RegisterType((*AssetTypeEnum)(nil), "google.ads.googleads.v1.enums.AssetTypeEnum") + proto.RegisterEnum("google.ads.googleads.v1.enums.AssetTypeEnum_AssetType", AssetTypeEnum_AssetType_name, AssetTypeEnum_AssetType_value) +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/enums/asset_type.proto", fileDescriptor_asset_type_6ee0f36116fdf786) +} + +var fileDescriptor_asset_type_6ee0f36116fdf786 = []byte{ + // 326 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x7c, 0x90, 0x4f, 0x4e, 0xf3, 0x30, + 0x14, 0xc4, 0xbf, 0xa6, 0xed, 0x07, 0x75, 0x29, 0x18, 0x2f, 0x11, 0x5d, 0xb4, 0x07, 0x70, 0x14, + 0xb1, 0x33, 0x2b, 0x87, 0x98, 0x2a, 0x82, 0xa6, 0x95, 0x48, 0xc2, 0x1f, 0x45, 0xaa, 0x42, 0x13, + 0x45, 0x95, 0x5a, 0x3b, 0xaa, 0xdd, 0x4a, 0xbd, 0x0e, 0x4b, 0x8e, 0xc2, 0x35, 0xd8, 0x71, 0x0a, + 0x14, 0x9b, 0x64, 0x07, 0x1b, 0x6b, 0xf4, 0xde, 0xef, 0x8d, 0xc6, 0x03, 0x70, 0x21, 0x44, 0xb1, + 0xce, 0xed, 0x34, 0x93, 0xb6, 0x91, 0x95, 0xda, 0x3b, 0x76, 0xce, 0x77, 0x1b, 0x69, 0xa7, 0x52, + 0xe6, 0x6a, 0xa1, 0x0e, 0x65, 0x8e, 0xcb, 0xad, 0x50, 0x02, 0x0d, 0x0d, 0x84, 0xd3, 0x4c, 0xe2, + 0x86, 0xc7, 0x7b, 0x07, 0x6b, 0xfe, 0xe2, 0xb2, 0xb6, 0x2b, 0x57, 0x76, 0xca, 0xb9, 0x50, 0xa9, + 0x5a, 0x09, 0x2e, 0xcd, 0xf1, 0x58, 0x81, 0x01, 0xad, 0x0c, 0xc3, 0x43, 0x99, 0x33, 0xbe, 0xdb, + 0x8c, 0x97, 0xa0, 0xd7, 0x0c, 0xd0, 0x19, 0xe8, 0x47, 0xc1, 0xc3, 0x9c, 0xdd, 0xf8, 0xb7, 0x3e, + 0xf3, 0xe0, 0x3f, 0xd4, 0x07, 0x47, 0x51, 0x70, 0x17, 0xcc, 0x1e, 0x03, 0xd8, 0x42, 0xe7, 0x60, + 0xf0, 0x3c, 0x8b, 0xc2, 0xc8, 0x65, 0x8b, 0xd8, 0xf7, 0xd8, 0x0c, 0x5a, 0x08, 0x82, 0x93, 0x29, + 0xf3, 0x7c, 0xba, 0x70, 0xa3, 0xc0, 0xbb, 0x67, 0xb0, 0x8d, 0x7a, 0xa0, 0xeb, 0x4f, 0xe9, 0x84, + 0xc1, 0x0e, 0x3a, 0x06, 0x9d, 0x90, 0x3d, 0x85, 0xb0, 0xeb, 0x7e, 0xb6, 0xc0, 0x68, 0x29, 0x36, + 0xf8, 0xcf, 0xe4, 0xee, 0x69, 0x13, 0x64, 0x5e, 0x65, 0x9d, 0xb7, 0x5e, 0xdc, 0x9f, 0x83, 0x42, + 0xac, 0x53, 0x5e, 0x60, 0xb1, 0x2d, 0xec, 0x22, 0xe7, 0xfa, 0x27, 0x75, 0x55, 0xe5, 0x4a, 0xfe, + 0xd2, 0xdc, 0xb5, 0x7e, 0xdf, 0xac, 0xf6, 0x84, 0xd2, 0x77, 0x6b, 0x38, 0x31, 0x56, 0x34, 0x93, + 0xd8, 0xc8, 0x4a, 0xc5, 0x0e, 0xae, 0x5a, 0x90, 0x1f, 0xf5, 0x3e, 0xa1, 0x99, 0x4c, 0x9a, 0x7d, + 0x12, 0x3b, 0x89, 0xde, 0x7f, 0x59, 0x23, 0x33, 0x24, 0x84, 0x66, 0x92, 0x90, 0x86, 0x20, 0x24, + 0x76, 0x08, 0xd1, 0xcc, 0xeb, 0x7f, 0x1d, 0xec, 0xea, 0x3b, 0x00, 0x00, 0xff, 0xff, 0x60, 0xe7, + 0xfb, 0x60, 0xd1, 0x01, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/attribution_model.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/attribution_model.pb.go new file mode 100644 index 0000000..a3fca63 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/attribution_model.pb.go @@ -0,0 +1,153 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/enums/attribution_model.proto + +package enums // import "google.golang.org/genproto/googleapis/ads/googleads/v1/enums" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// The attribution model that describes how to distribute credit for a +// particular conversion across potentially many prior interactions. +type AttributionModelEnum_AttributionModel int32 + +const ( + // Not specified. + AttributionModelEnum_UNSPECIFIED AttributionModelEnum_AttributionModel = 0 + // Used for return value only. Represents value unknown in this version. + AttributionModelEnum_UNKNOWN AttributionModelEnum_AttributionModel = 1 + // Uses external attribution. + AttributionModelEnum_EXTERNAL AttributionModelEnum_AttributionModel = 100 + // Attributes all credit for a conversion to its last click. + AttributionModelEnum_GOOGLE_ADS_LAST_CLICK AttributionModelEnum_AttributionModel = 101 + // Attributes all credit for a conversion to its first click using Google + // Search attribution. + AttributionModelEnum_GOOGLE_SEARCH_ATTRIBUTION_FIRST_CLICK AttributionModelEnum_AttributionModel = 102 + // Attributes credit for a conversion equally across all of its clicks using + // Google Search attribution. + AttributionModelEnum_GOOGLE_SEARCH_ATTRIBUTION_LINEAR AttributionModelEnum_AttributionModel = 103 + // Attributes exponentially more credit for a conversion to its more recent + // clicks using Google Search attribution (half-life is 1 week). + AttributionModelEnum_GOOGLE_SEARCH_ATTRIBUTION_TIME_DECAY AttributionModelEnum_AttributionModel = 104 + // Attributes 40% of the credit for a conversion to its first and last + // clicks. Remaining 20% is evenly distributed across all other clicks. This + // uses Google Search attribution. + AttributionModelEnum_GOOGLE_SEARCH_ATTRIBUTION_POSITION_BASED AttributionModelEnum_AttributionModel = 105 + // Flexible model that uses machine learning to determine the appropriate + // distribution of credit among clicks using Google Search attribution. + AttributionModelEnum_GOOGLE_SEARCH_ATTRIBUTION_DATA_DRIVEN AttributionModelEnum_AttributionModel = 106 +) + +var AttributionModelEnum_AttributionModel_name = map[int32]string{ + 0: "UNSPECIFIED", + 1: "UNKNOWN", + 100: "EXTERNAL", + 101: "GOOGLE_ADS_LAST_CLICK", + 102: "GOOGLE_SEARCH_ATTRIBUTION_FIRST_CLICK", + 103: "GOOGLE_SEARCH_ATTRIBUTION_LINEAR", + 104: "GOOGLE_SEARCH_ATTRIBUTION_TIME_DECAY", + 105: "GOOGLE_SEARCH_ATTRIBUTION_POSITION_BASED", + 106: "GOOGLE_SEARCH_ATTRIBUTION_DATA_DRIVEN", +} +var AttributionModelEnum_AttributionModel_value = map[string]int32{ + "UNSPECIFIED": 0, + "UNKNOWN": 1, + "EXTERNAL": 100, + "GOOGLE_ADS_LAST_CLICK": 101, + "GOOGLE_SEARCH_ATTRIBUTION_FIRST_CLICK": 102, + "GOOGLE_SEARCH_ATTRIBUTION_LINEAR": 103, + "GOOGLE_SEARCH_ATTRIBUTION_TIME_DECAY": 104, + "GOOGLE_SEARCH_ATTRIBUTION_POSITION_BASED": 105, + "GOOGLE_SEARCH_ATTRIBUTION_DATA_DRIVEN": 106, +} + +func (x AttributionModelEnum_AttributionModel) String() string { + return proto.EnumName(AttributionModelEnum_AttributionModel_name, int32(x)) +} +func (AttributionModelEnum_AttributionModel) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_attribution_model_6e5f9ecff5a128d2, []int{0, 0} +} + +// Container for enum representing the attribution model that describes how to +// distribute credit for a particular conversion across potentially many prior +// interactions. +type AttributionModelEnum struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *AttributionModelEnum) Reset() { *m = AttributionModelEnum{} } +func (m *AttributionModelEnum) String() string { return proto.CompactTextString(m) } +func (*AttributionModelEnum) ProtoMessage() {} +func (*AttributionModelEnum) Descriptor() ([]byte, []int) { + return fileDescriptor_attribution_model_6e5f9ecff5a128d2, []int{0} +} +func (m *AttributionModelEnum) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_AttributionModelEnum.Unmarshal(m, b) +} +func (m *AttributionModelEnum) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_AttributionModelEnum.Marshal(b, m, deterministic) +} +func (dst *AttributionModelEnum) XXX_Merge(src proto.Message) { + xxx_messageInfo_AttributionModelEnum.Merge(dst, src) +} +func (m *AttributionModelEnum) XXX_Size() int { + return xxx_messageInfo_AttributionModelEnum.Size(m) +} +func (m *AttributionModelEnum) XXX_DiscardUnknown() { + xxx_messageInfo_AttributionModelEnum.DiscardUnknown(m) +} + +var xxx_messageInfo_AttributionModelEnum proto.InternalMessageInfo + +func init() { + proto.RegisterType((*AttributionModelEnum)(nil), "google.ads.googleads.v1.enums.AttributionModelEnum") + proto.RegisterEnum("google.ads.googleads.v1.enums.AttributionModelEnum_AttributionModel", AttributionModelEnum_AttributionModel_name, AttributionModelEnum_AttributionModel_value) +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/enums/attribution_model.proto", fileDescriptor_attribution_model_6e5f9ecff5a128d2) +} + +var fileDescriptor_attribution_model_6e5f9ecff5a128d2 = []byte{ + // 407 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x7c, 0x92, 0xdd, 0x6a, 0xd4, 0x40, + 0x1c, 0xc5, 0xdd, 0x08, 0x2a, 0x53, 0xc1, 0x61, 0xb0, 0x17, 0x8a, 0x05, 0x5b, 0x2a, 0x54, 0x90, + 0x09, 0x41, 0xbc, 0x19, 0xaf, 0xfe, 0x49, 0xa6, 0xeb, 0xd0, 0x74, 0xb2, 0x24, 0xd9, 0xf5, 0x83, + 0x85, 0x21, 0x35, 0x31, 0x46, 0x36, 0x99, 0x65, 0x27, 0xdb, 0x07, 0xf2, 0x52, 0xf0, 0x35, 0xbc, + 0xf0, 0x51, 0xfa, 0x14, 0x92, 0xa4, 0x1b, 0xa1, 0x90, 0xbd, 0x09, 0x87, 0x9c, 0xdf, 0x39, 0x0c, + 0x73, 0x06, 0xbd, 0x2b, 0xb4, 0x2e, 0x56, 0xb9, 0x9d, 0x66, 0xc6, 0xee, 0x65, 0xab, 0xae, 0x1d, + 0x3b, 0xaf, 0xb7, 0x95, 0xb1, 0xd3, 0xa6, 0xd9, 0x94, 0x57, 0xdb, 0xa6, 0xd4, 0xb5, 0xaa, 0x74, + 0x96, 0xaf, 0xe8, 0x7a, 0xa3, 0x1b, 0x4d, 0x8e, 0x7a, 0x96, 0xa6, 0x99, 0xa1, 0x43, 0x8c, 0x5e, + 0x3b, 0xb4, 0x8b, 0x3d, 0x7f, 0xb1, 0x6b, 0x5d, 0x97, 0x76, 0x5a, 0xd7, 0xba, 0x49, 0xdb, 0x06, + 0xd3, 0x87, 0x4f, 0xfe, 0x58, 0xe8, 0x29, 0xfc, 0x2f, 0xbe, 0x6c, 0x7b, 0x79, 0xbd, 0xad, 0x4e, + 0x7e, 0x5b, 0x08, 0xdf, 0x35, 0xc8, 0x13, 0x74, 0x30, 0x97, 0xf1, 0x8c, 0x7b, 0xe2, 0x5c, 0x70, + 0x1f, 0xdf, 0x23, 0x07, 0xe8, 0xe1, 0x5c, 0x5e, 0xc8, 0xf0, 0xa3, 0xc4, 0x13, 0xf2, 0x18, 0x3d, + 0xe2, 0x9f, 0x12, 0x1e, 0x49, 0x08, 0x70, 0x46, 0x9e, 0xa1, 0xc3, 0x69, 0x18, 0x4e, 0x03, 0xae, + 0xc0, 0x8f, 0x55, 0x00, 0x71, 0xa2, 0xbc, 0x40, 0x78, 0x17, 0x38, 0x27, 0xaf, 0xd1, 0xab, 0x5b, + 0x2b, 0xe6, 0x10, 0x79, 0x1f, 0x14, 0x24, 0x49, 0x24, 0xdc, 0x79, 0x22, 0x42, 0xa9, 0xce, 0x45, + 0x34, 0xa0, 0xdf, 0xc8, 0x29, 0x7a, 0x39, 0x8e, 0x06, 0x42, 0x72, 0x88, 0x70, 0x41, 0xce, 0xd0, + 0xe9, 0x38, 0x95, 0x88, 0x4b, 0xae, 0x7c, 0xee, 0xc1, 0x67, 0xfc, 0x9d, 0xbc, 0x41, 0x67, 0xe3, + 0xe4, 0x2c, 0x8c, 0x45, 0x27, 0x5c, 0x88, 0xb9, 0x8f, 0xcb, 0xfd, 0x07, 0xf5, 0x21, 0x01, 0xe5, + 0x47, 0x62, 0xc1, 0x25, 0xfe, 0xe1, 0xde, 0x4c, 0xd0, 0xf1, 0x57, 0x5d, 0xd1, 0xbd, 0x63, 0xb8, + 0x87, 0x77, 0xaf, 0x74, 0xd6, 0xae, 0x30, 0x9b, 0x7c, 0x71, 0x6f, 0x73, 0x85, 0x5e, 0xa5, 0x75, + 0x41, 0xf5, 0xa6, 0xb0, 0x8b, 0xbc, 0xee, 0x36, 0xda, 0xbd, 0x85, 0x75, 0x69, 0x46, 0x9e, 0xc6, + 0xfb, 0xee, 0xfb, 0xd3, 0xba, 0x3f, 0x05, 0xf8, 0x65, 0x1d, 0x4d, 0xfb, 0x2a, 0xc8, 0x0c, 0xed, + 0x65, 0xab, 0x16, 0x0e, 0x6d, 0x77, 0x35, 0x7f, 0x77, 0xfe, 0x12, 0x32, 0xb3, 0x1c, 0xfc, 0xe5, + 0xc2, 0x59, 0x76, 0xfe, 0x8d, 0x75, 0xdc, 0xff, 0x64, 0x0c, 0x32, 0xc3, 0xd8, 0x40, 0x30, 0xb6, + 0x70, 0x18, 0xeb, 0x98, 0xab, 0x07, 0xdd, 0xc1, 0xde, 0xfe, 0x0b, 0x00, 0x00, 0xff, 0xff, 0xd9, + 0xf0, 0xcb, 0x05, 0xb2, 0x02, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/bid_modifier_source.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/bid_modifier_source.pb.go new file mode 100644 index 0000000..9450a37 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/bid_modifier_source.pb.go @@ -0,0 +1,119 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/enums/bid_modifier_source.proto + +package enums // import "google.golang.org/genproto/googleapis/ads/googleads/v1/enums" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Enum describing possible bid modifier sources. +type BidModifierSourceEnum_BidModifierSource int32 + +const ( + // Not specified. + BidModifierSourceEnum_UNSPECIFIED BidModifierSourceEnum_BidModifierSource = 0 + // Used for return value only. Represents value unknown in this version. + BidModifierSourceEnum_UNKNOWN BidModifierSourceEnum_BidModifierSource = 1 + // The bid modifier is specified at the campaign level, on the campaign + // level criterion. + BidModifierSourceEnum_CAMPAIGN BidModifierSourceEnum_BidModifierSource = 2 + // The bid modifier is specified (overridden) at the ad group level. + BidModifierSourceEnum_AD_GROUP BidModifierSourceEnum_BidModifierSource = 3 +) + +var BidModifierSourceEnum_BidModifierSource_name = map[int32]string{ + 0: "UNSPECIFIED", + 1: "UNKNOWN", + 2: "CAMPAIGN", + 3: "AD_GROUP", +} +var BidModifierSourceEnum_BidModifierSource_value = map[string]int32{ + "UNSPECIFIED": 0, + "UNKNOWN": 1, + "CAMPAIGN": 2, + "AD_GROUP": 3, +} + +func (x BidModifierSourceEnum_BidModifierSource) String() string { + return proto.EnumName(BidModifierSourceEnum_BidModifierSource_name, int32(x)) +} +func (BidModifierSourceEnum_BidModifierSource) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_bid_modifier_source_e3eae9c0e23dc5d0, []int{0, 0} +} + +// Container for enum describing possible bid modifier sources. +type BidModifierSourceEnum struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *BidModifierSourceEnum) Reset() { *m = BidModifierSourceEnum{} } +func (m *BidModifierSourceEnum) String() string { return proto.CompactTextString(m) } +func (*BidModifierSourceEnum) ProtoMessage() {} +func (*BidModifierSourceEnum) Descriptor() ([]byte, []int) { + return fileDescriptor_bid_modifier_source_e3eae9c0e23dc5d0, []int{0} +} +func (m *BidModifierSourceEnum) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_BidModifierSourceEnum.Unmarshal(m, b) +} +func (m *BidModifierSourceEnum) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_BidModifierSourceEnum.Marshal(b, m, deterministic) +} +func (dst *BidModifierSourceEnum) XXX_Merge(src proto.Message) { + xxx_messageInfo_BidModifierSourceEnum.Merge(dst, src) +} +func (m *BidModifierSourceEnum) XXX_Size() int { + return xxx_messageInfo_BidModifierSourceEnum.Size(m) +} +func (m *BidModifierSourceEnum) XXX_DiscardUnknown() { + xxx_messageInfo_BidModifierSourceEnum.DiscardUnknown(m) +} + +var xxx_messageInfo_BidModifierSourceEnum proto.InternalMessageInfo + +func init() { + proto.RegisterType((*BidModifierSourceEnum)(nil), "google.ads.googleads.v1.enums.BidModifierSourceEnum") + proto.RegisterEnum("google.ads.googleads.v1.enums.BidModifierSourceEnum_BidModifierSource", BidModifierSourceEnum_BidModifierSource_name, BidModifierSourceEnum_BidModifierSource_value) +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/enums/bid_modifier_source.proto", fileDescriptor_bid_modifier_source_e3eae9c0e23dc5d0) +} + +var fileDescriptor_bid_modifier_source_e3eae9c0e23dc5d0 = []byte{ + // 309 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x7c, 0x50, 0xdf, 0x4a, 0xf3, 0x30, + 0x1c, 0xfd, 0xd6, 0xc1, 0xa7, 0x64, 0x82, 0xb5, 0xa0, 0x17, 0xe2, 0x2e, 0xb6, 0x07, 0x48, 0x28, + 0x5e, 0x08, 0xf1, 0x2a, 0xdd, 0x66, 0x19, 0xb2, 0xae, 0x38, 0x56, 0x41, 0x0a, 0xa5, 0x5b, 0xba, + 0x10, 0x58, 0x93, 0xd2, 0xb4, 0x7b, 0x20, 0x2f, 0x7d, 0x14, 0x1f, 0x45, 0x5f, 0x42, 0x9a, 0xac, + 0xbd, 0x19, 0x7a, 0x13, 0x4e, 0x72, 0xfe, 0xe4, 0xfc, 0x7e, 0xe0, 0x81, 0x49, 0xc9, 0xf6, 0x19, + 0x4a, 0xa9, 0x42, 0x06, 0x36, 0xe8, 0xe0, 0xa2, 0x4c, 0xd4, 0xb9, 0x42, 0x1b, 0x4e, 0x93, 0x5c, + 0x52, 0xbe, 0xe3, 0x59, 0x99, 0x28, 0x59, 0x97, 0xdb, 0x0c, 0x16, 0xa5, 0xac, 0xa4, 0x33, 0x34, + 0x6a, 0x98, 0x52, 0x05, 0x3b, 0x23, 0x3c, 0xb8, 0x50, 0x1b, 0x6f, 0xef, 0xda, 0xdc, 0x82, 0xa3, + 0x54, 0x08, 0x59, 0xa5, 0x15, 0x97, 0x42, 0x19, 0xf3, 0x78, 0x07, 0xae, 0x3d, 0x4e, 0x17, 0xc7, + 0xe0, 0x95, 0xce, 0x9d, 0x89, 0x3a, 0x1f, 0x2f, 0xc0, 0xd5, 0x09, 0xe1, 0x5c, 0x82, 0xc1, 0x3a, + 0x58, 0x85, 0xb3, 0xc9, 0xfc, 0x69, 0x3e, 0x9b, 0xda, 0xff, 0x9c, 0x01, 0x38, 0x5b, 0x07, 0xcf, + 0xc1, 0xf2, 0x35, 0xb0, 0x7b, 0xce, 0x05, 0x38, 0x9f, 0x90, 0x45, 0x48, 0xe6, 0x7e, 0x60, 0x5b, + 0xcd, 0x8d, 0x4c, 0x13, 0xff, 0x65, 0xb9, 0x0e, 0xed, 0xbe, 0xf7, 0xdd, 0x03, 0xa3, 0xad, 0xcc, + 0xe1, 0x9f, 0x5d, 0xbd, 0x9b, 0x93, 0x2f, 0xc3, 0xa6, 0x65, 0xd8, 0x7b, 0xf3, 0x8e, 0x46, 0x26, + 0xf7, 0xa9, 0x60, 0x50, 0x96, 0x0c, 0xb1, 0x4c, 0xe8, 0x19, 0xda, 0x6d, 0x15, 0x5c, 0xfd, 0xb2, + 0xbc, 0x47, 0x7d, 0xbe, 0x5b, 0x7d, 0x9f, 0x90, 0x0f, 0x6b, 0xe8, 0x9b, 0x28, 0x42, 0x15, 0x34, + 0xb0, 0x41, 0x91, 0x0b, 0x9b, 0xb9, 0xd5, 0x67, 0xcb, 0xc7, 0x84, 0xaa, 0xb8, 0xe3, 0xe3, 0xc8, + 0x8d, 0x35, 0xff, 0x65, 0x8d, 0xcc, 0x23, 0xc6, 0x84, 0x2a, 0x8c, 0x3b, 0x05, 0xc6, 0x91, 0x8b, + 0xb1, 0xd6, 0x6c, 0xfe, 0xeb, 0x62, 0xf7, 0x3f, 0x01, 0x00, 0x00, 0xff, 0xff, 0x0b, 0x35, 0xc2, + 0xa5, 0xd4, 0x01, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/bidding_source.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/bidding_source.pb.go new file mode 100644 index 0000000..53816c9 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/bidding_source.pb.go @@ -0,0 +1,125 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/enums/bidding_source.proto + +package enums // import "google.golang.org/genproto/googleapis/ads/googleads/v1/enums" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Indicates where a bid or target is defined. For example, an ad group +// criterion may define a cpc bid directly, or it can inherit its cpc bid from +// the ad group. +type BiddingSourceEnum_BiddingSource int32 + +const ( + // Not specified. + BiddingSourceEnum_UNSPECIFIED BiddingSourceEnum_BiddingSource = 0 + // Used for return value only. Represents value unknown in this version. + BiddingSourceEnum_UNKNOWN BiddingSourceEnum_BiddingSource = 1 + // Effective bid or target is inherited from campaign bidding strategy. + BiddingSourceEnum_CAMPAIGN_BIDDING_STRATEGY BiddingSourceEnum_BiddingSource = 5 + // The bid or target is defined on the ad group. + BiddingSourceEnum_AD_GROUP BiddingSourceEnum_BiddingSource = 6 + // The bid or target is defined on the ad group criterion. + BiddingSourceEnum_AD_GROUP_CRITERION BiddingSourceEnum_BiddingSource = 7 +) + +var BiddingSourceEnum_BiddingSource_name = map[int32]string{ + 0: "UNSPECIFIED", + 1: "UNKNOWN", + 5: "CAMPAIGN_BIDDING_STRATEGY", + 6: "AD_GROUP", + 7: "AD_GROUP_CRITERION", +} +var BiddingSourceEnum_BiddingSource_value = map[string]int32{ + "UNSPECIFIED": 0, + "UNKNOWN": 1, + "CAMPAIGN_BIDDING_STRATEGY": 5, + "AD_GROUP": 6, + "AD_GROUP_CRITERION": 7, +} + +func (x BiddingSourceEnum_BiddingSource) String() string { + return proto.EnumName(BiddingSourceEnum_BiddingSource_name, int32(x)) +} +func (BiddingSourceEnum_BiddingSource) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_bidding_source_c61d46f2f54ad011, []int{0, 0} +} + +// Container for enum describing possible bidding sources. +type BiddingSourceEnum struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *BiddingSourceEnum) Reset() { *m = BiddingSourceEnum{} } +func (m *BiddingSourceEnum) String() string { return proto.CompactTextString(m) } +func (*BiddingSourceEnum) ProtoMessage() {} +func (*BiddingSourceEnum) Descriptor() ([]byte, []int) { + return fileDescriptor_bidding_source_c61d46f2f54ad011, []int{0} +} +func (m *BiddingSourceEnum) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_BiddingSourceEnum.Unmarshal(m, b) +} +func (m *BiddingSourceEnum) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_BiddingSourceEnum.Marshal(b, m, deterministic) +} +func (dst *BiddingSourceEnum) XXX_Merge(src proto.Message) { + xxx_messageInfo_BiddingSourceEnum.Merge(dst, src) +} +func (m *BiddingSourceEnum) XXX_Size() int { + return xxx_messageInfo_BiddingSourceEnum.Size(m) +} +func (m *BiddingSourceEnum) XXX_DiscardUnknown() { + xxx_messageInfo_BiddingSourceEnum.DiscardUnknown(m) +} + +var xxx_messageInfo_BiddingSourceEnum proto.InternalMessageInfo + +func init() { + proto.RegisterType((*BiddingSourceEnum)(nil), "google.ads.googleads.v1.enums.BiddingSourceEnum") + proto.RegisterEnum("google.ads.googleads.v1.enums.BiddingSourceEnum_BiddingSource", BiddingSourceEnum_BiddingSource_name, BiddingSourceEnum_BiddingSource_value) +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/enums/bidding_source.proto", fileDescriptor_bidding_source_c61d46f2f54ad011) +} + +var fileDescriptor_bidding_source_c61d46f2f54ad011 = []byte{ + // 336 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x7c, 0x50, 0xdf, 0x4a, 0xfb, 0x30, + 0x18, 0xfd, 0x75, 0x3f, 0xdc, 0x24, 0x53, 0xac, 0xb9, 0x10, 0x14, 0x77, 0xb1, 0x3d, 0x40, 0x4a, + 0xf5, 0x2e, 0x5e, 0xa5, 0x6b, 0x2d, 0x45, 0xcc, 0x4a, 0xf7, 0x47, 0x94, 0x42, 0xe9, 0x96, 0x12, + 0x0a, 0x5b, 0x32, 0x9a, 0x6d, 0xaf, 0xe0, 0x7b, 0x78, 0xe9, 0xa3, 0xf8, 0x22, 0x82, 0x4f, 0x21, + 0x4d, 0xec, 0x60, 0x17, 0x7a, 0x13, 0x4e, 0xbe, 0xef, 0x9c, 0xc3, 0x77, 0x0e, 0xb8, 0xe1, 0x52, + 0xf2, 0x65, 0xe1, 0xe4, 0x4c, 0x39, 0x06, 0xd6, 0x68, 0xe7, 0x3a, 0x85, 0xd8, 0xae, 0x94, 0x33, + 0x2f, 0x19, 0x2b, 0x05, 0xcf, 0x94, 0xdc, 0x56, 0x8b, 0x02, 0xad, 0x2b, 0xb9, 0x91, 0xb0, 0x67, + 0x88, 0x28, 0x67, 0x0a, 0xed, 0x35, 0x68, 0xe7, 0x22, 0xad, 0xb9, 0xba, 0x6e, 0x2c, 0xd7, 0xa5, + 0x93, 0x0b, 0x21, 0x37, 0xf9, 0xa6, 0x94, 0x42, 0x19, 0xf1, 0xe0, 0xd5, 0x02, 0xe7, 0x9e, 0x71, + 0x1d, 0x6b, 0xd3, 0x40, 0x6c, 0x57, 0x83, 0x0a, 0x9c, 0x1e, 0x0c, 0xe1, 0x19, 0xe8, 0x4e, 0xe9, + 0x38, 0x0e, 0x86, 0xd1, 0x7d, 0x14, 0xf8, 0xf6, 0x3f, 0xd8, 0x05, 0x9d, 0x29, 0x7d, 0xa0, 0xa3, + 0x27, 0x6a, 0x5b, 0xb0, 0x07, 0x2e, 0x87, 0xe4, 0x31, 0x26, 0x51, 0x48, 0x33, 0x2f, 0xf2, 0xfd, + 0x88, 0x86, 0xd9, 0x78, 0x92, 0x90, 0x49, 0x10, 0x3e, 0xdb, 0x47, 0xf0, 0x04, 0x1c, 0x13, 0x3f, + 0x0b, 0x93, 0xd1, 0x34, 0xb6, 0xdb, 0xf0, 0x02, 0xc0, 0xe6, 0x97, 0x0d, 0x93, 0x68, 0x12, 0x24, + 0xd1, 0x88, 0xda, 0x1d, 0xef, 0xd3, 0x02, 0xfd, 0x85, 0x5c, 0xa1, 0x3f, 0xd3, 0x78, 0xf0, 0xe0, + 0xae, 0xb8, 0xce, 0x10, 0x5b, 0x2f, 0xde, 0x8f, 0x88, 0xcb, 0x65, 0x2e, 0x38, 0x92, 0x15, 0x77, + 0x78, 0x21, 0x74, 0xc2, 0xa6, 0xc6, 0x75, 0xa9, 0x7e, 0x69, 0xf5, 0x4e, 0xbf, 0x6f, 0xad, 0xff, + 0x21, 0x21, 0xef, 0xad, 0x5e, 0x68, 0xac, 0x08, 0x53, 0xc8, 0xc0, 0x1a, 0xcd, 0x5c, 0x54, 0x17, + 0xa3, 0x3e, 0x9a, 0x7d, 0x4a, 0x98, 0x4a, 0xf7, 0xfb, 0x74, 0xe6, 0xa6, 0x7a, 0xff, 0xd5, 0xea, + 0x9b, 0x21, 0xc6, 0x84, 0x29, 0x8c, 0xf7, 0x0c, 0x8c, 0x67, 0x2e, 0xc6, 0x9a, 0x33, 0x6f, 0xeb, + 0xc3, 0x6e, 0xbf, 0x03, 0x00, 0x00, 0xff, 0xff, 0xa2, 0xc9, 0xa8, 0x91, 0xed, 0x01, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/bidding_strategy_status.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/bidding_strategy_status.pb.go new file mode 100644 index 0000000..b7ef662 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/bidding_strategy_status.pb.go @@ -0,0 +1,120 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/enums/bidding_strategy_status.proto + +package enums // import "google.golang.org/genproto/googleapis/ads/googleads/v1/enums" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// The possible statuses of a BiddingStrategy. +type BiddingStrategyStatusEnum_BiddingStrategyStatus int32 + +const ( + // No value has been specified. + BiddingStrategyStatusEnum_UNSPECIFIED BiddingStrategyStatusEnum_BiddingStrategyStatus = 0 + // The received value is not known in this version. + // + // This is a response-only value. + BiddingStrategyStatusEnum_UNKNOWN BiddingStrategyStatusEnum_BiddingStrategyStatus = 1 + // The bidding strategy is enabled. + BiddingStrategyStatusEnum_ENABLED BiddingStrategyStatusEnum_BiddingStrategyStatus = 2 + // The bidding strategy is removed. + BiddingStrategyStatusEnum_REMOVED BiddingStrategyStatusEnum_BiddingStrategyStatus = 4 +) + +var BiddingStrategyStatusEnum_BiddingStrategyStatus_name = map[int32]string{ + 0: "UNSPECIFIED", + 1: "UNKNOWN", + 2: "ENABLED", + 4: "REMOVED", +} +var BiddingStrategyStatusEnum_BiddingStrategyStatus_value = map[string]int32{ + "UNSPECIFIED": 0, + "UNKNOWN": 1, + "ENABLED": 2, + "REMOVED": 4, +} + +func (x BiddingStrategyStatusEnum_BiddingStrategyStatus) String() string { + return proto.EnumName(BiddingStrategyStatusEnum_BiddingStrategyStatus_name, int32(x)) +} +func (BiddingStrategyStatusEnum_BiddingStrategyStatus) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_bidding_strategy_status_47be9e1502989548, []int{0, 0} +} + +// Message describing BiddingStrategy statuses. +type BiddingStrategyStatusEnum struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *BiddingStrategyStatusEnum) Reset() { *m = BiddingStrategyStatusEnum{} } +func (m *BiddingStrategyStatusEnum) String() string { return proto.CompactTextString(m) } +func (*BiddingStrategyStatusEnum) ProtoMessage() {} +func (*BiddingStrategyStatusEnum) Descriptor() ([]byte, []int) { + return fileDescriptor_bidding_strategy_status_47be9e1502989548, []int{0} +} +func (m *BiddingStrategyStatusEnum) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_BiddingStrategyStatusEnum.Unmarshal(m, b) +} +func (m *BiddingStrategyStatusEnum) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_BiddingStrategyStatusEnum.Marshal(b, m, deterministic) +} +func (dst *BiddingStrategyStatusEnum) XXX_Merge(src proto.Message) { + xxx_messageInfo_BiddingStrategyStatusEnum.Merge(dst, src) +} +func (m *BiddingStrategyStatusEnum) XXX_Size() int { + return xxx_messageInfo_BiddingStrategyStatusEnum.Size(m) +} +func (m *BiddingStrategyStatusEnum) XXX_DiscardUnknown() { + xxx_messageInfo_BiddingStrategyStatusEnum.DiscardUnknown(m) +} + +var xxx_messageInfo_BiddingStrategyStatusEnum proto.InternalMessageInfo + +func init() { + proto.RegisterType((*BiddingStrategyStatusEnum)(nil), "google.ads.googleads.v1.enums.BiddingStrategyStatusEnum") + proto.RegisterEnum("google.ads.googleads.v1.enums.BiddingStrategyStatusEnum_BiddingStrategyStatus", BiddingStrategyStatusEnum_BiddingStrategyStatus_name, BiddingStrategyStatusEnum_BiddingStrategyStatus_value) +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/enums/bidding_strategy_status.proto", fileDescriptor_bidding_strategy_status_47be9e1502989548) +} + +var fileDescriptor_bidding_strategy_status_47be9e1502989548 = []byte{ + // 307 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x7c, 0x50, 0xd1, 0x4a, 0xc3, 0x30, + 0x14, 0x75, 0x55, 0x14, 0xb2, 0x07, 0xcb, 0xc0, 0x07, 0x87, 0x7b, 0xd8, 0x3e, 0x20, 0xa1, 0xf8, + 0x96, 0x3d, 0xa5, 0x2e, 0x8e, 0xa1, 0x76, 0xc3, 0xb1, 0x0a, 0x52, 0x18, 0x99, 0x29, 0xa1, 0xb0, + 0x25, 0xa3, 0x49, 0x07, 0xfe, 0x8e, 0x8f, 0x7e, 0x8a, 0x9f, 0xe2, 0x8b, 0xbf, 0x20, 0x49, 0xda, + 0x3e, 0x4d, 0x5f, 0xc2, 0xb9, 0x39, 0xf7, 0x9c, 0x7b, 0xee, 0x05, 0x63, 0xa1, 0x94, 0xd8, 0xe6, + 0x88, 0x71, 0x8d, 0x3c, 0xb4, 0xe8, 0x10, 0xa1, 0x5c, 0x56, 0x3b, 0x8d, 0x36, 0x05, 0xe7, 0x85, + 0x14, 0x6b, 0x6d, 0x4a, 0x66, 0x72, 0xf1, 0xbe, 0xd6, 0x86, 0x99, 0x4a, 0xc3, 0x7d, 0xa9, 0x8c, + 0xea, 0x0d, 0xbc, 0x02, 0x32, 0xae, 0x61, 0x2b, 0x86, 0x87, 0x08, 0x3a, 0x71, 0xff, 0xa6, 0xf1, + 0xde, 0x17, 0x88, 0x49, 0xa9, 0x0c, 0x33, 0x85, 0x92, 0xb5, 0x78, 0xb4, 0x05, 0xd7, 0xb1, 0x77, + 0x5f, 0xd6, 0xe6, 0x4b, 0xe7, 0x4d, 0x65, 0xb5, 0x1b, 0xcd, 0xc1, 0xd5, 0x51, 0xb2, 0x77, 0x09, + 0xba, 0xab, 0x64, 0xb9, 0xa0, 0x77, 0xb3, 0xfb, 0x19, 0x9d, 0x84, 0x27, 0xbd, 0x2e, 0xb8, 0x58, + 0x25, 0x0f, 0xc9, 0xfc, 0x25, 0x09, 0x3b, 0xb6, 0xa0, 0x09, 0x89, 0x1f, 0xe9, 0x24, 0x0c, 0x6c, + 0xf1, 0x4c, 0x9f, 0xe6, 0x29, 0x9d, 0x84, 0x67, 0xf1, 0x4f, 0x07, 0x0c, 0xdf, 0xd4, 0x0e, 0xfe, + 0x9b, 0x38, 0xee, 0x1f, 0x1d, 0xba, 0xb0, 0x79, 0x17, 0x9d, 0xd7, 0xb8, 0x16, 0x0b, 0xb5, 0x65, + 0x52, 0x40, 0x55, 0x0a, 0x24, 0x72, 0xe9, 0xb6, 0x69, 0x6e, 0xb7, 0x2f, 0xf4, 0x1f, 0xa7, 0x1c, + 0xbb, 0xf7, 0x23, 0x38, 0x9d, 0x12, 0xf2, 0x19, 0x0c, 0xa6, 0xde, 0x8a, 0x70, 0x0d, 0x3d, 0xb4, + 0x28, 0x8d, 0xa0, 0xdd, 0x5e, 0x7f, 0x35, 0x7c, 0x46, 0xb8, 0xce, 0x5a, 0x3e, 0x4b, 0xa3, 0xcc, + 0xf1, 0xdf, 0xc1, 0xd0, 0x7f, 0x62, 0x4c, 0xb8, 0xc6, 0xb8, 0xed, 0xc0, 0x38, 0x8d, 0x30, 0x76, + 0x3d, 0x9b, 0x73, 0x17, 0xec, 0xf6, 0x37, 0x00, 0x00, 0xff, 0xff, 0xe6, 0xb7, 0x88, 0x78, 0xe2, + 0x01, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/bidding_strategy_type.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/bidding_strategy_type.pb.go new file mode 100644 index 0000000..95fa2ed --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/bidding_strategy_type.pb.go @@ -0,0 +1,193 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/enums/bidding_strategy_type.proto + +package enums // import "google.golang.org/genproto/googleapis/ads/googleads/v1/enums" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Enum describing possible bidding strategy types. +type BiddingStrategyTypeEnum_BiddingStrategyType int32 + +const ( + // Not specified. + BiddingStrategyTypeEnum_UNSPECIFIED BiddingStrategyTypeEnum_BiddingStrategyType = 0 + // Used for return value only. Represents value unknown in this version. + BiddingStrategyTypeEnum_UNKNOWN BiddingStrategyTypeEnum_BiddingStrategyType = 1 + // Enhanced CPC is a bidding strategy that raises bids for clicks + // that seem more likely to lead to a conversion and lowers + // them for clicks where they seem less likely. + BiddingStrategyTypeEnum_ENHANCED_CPC BiddingStrategyTypeEnum_BiddingStrategyType = 2 + // Manual click based bidding where user pays per click. + BiddingStrategyTypeEnum_MANUAL_CPC BiddingStrategyTypeEnum_BiddingStrategyType = 3 + // Manual impression based bidding + // where user pays per thousand impressions. + BiddingStrategyTypeEnum_MANUAL_CPM BiddingStrategyTypeEnum_BiddingStrategyType = 4 + // A bidding strategy that pays a configurable amount per video view. + BiddingStrategyTypeEnum_MANUAL_CPV BiddingStrategyTypeEnum_BiddingStrategyType = 13 + // A bidding strategy that automatically maximizes number of conversions + // given a daily budget. + BiddingStrategyTypeEnum_MAXIMIZE_CONVERSIONS BiddingStrategyTypeEnum_BiddingStrategyType = 10 + // An automated bidding strategy that automatically sets bids to maximize + // revenue while spending your budget. + BiddingStrategyTypeEnum_MAXIMIZE_CONVERSION_VALUE BiddingStrategyTypeEnum_BiddingStrategyType = 11 + // Page-One Promoted bidding scheme, which sets max cpc bids to + // target impressions on page one or page one promoted slots on google.com. + BiddingStrategyTypeEnum_PAGE_ONE_PROMOTED BiddingStrategyTypeEnum_BiddingStrategyType = 5 + // Percent Cpc is bidding strategy where bids are a fraction of the + // advertised price for some good or service. + BiddingStrategyTypeEnum_PERCENT_CPC BiddingStrategyTypeEnum_BiddingStrategyType = 12 + // Target CPA is an automated bid strategy that sets bids + // to help get as many conversions as possible + // at the target cost-per-acquisition (CPA) you set. + BiddingStrategyTypeEnum_TARGET_CPA BiddingStrategyTypeEnum_BiddingStrategyType = 6 + // Target CPM is an automated bid strategy that sets bids to help get + // as many impressions as possible at the target cost per one thousand + // impressions (CPM) you set. + BiddingStrategyTypeEnum_TARGET_CPM BiddingStrategyTypeEnum_BiddingStrategyType = 14 + // An automated bidding strategy that sets bids so that a certain percentage + // of search ads are shown at the top of the first page (or other targeted + // location). + BiddingStrategyTypeEnum_TARGET_IMPRESSION_SHARE BiddingStrategyTypeEnum_BiddingStrategyType = 15 + // Target Outrank Share is an automated bidding strategy that sets bids + // based on the target fraction of auctions where the advertiser + // should outrank a specific competitor. + BiddingStrategyTypeEnum_TARGET_OUTRANK_SHARE BiddingStrategyTypeEnum_BiddingStrategyType = 7 + // Target ROAS is an automated bidding strategy + // that helps you maximize revenue while averaging + // a specific target Return On Average Spend (ROAS). + BiddingStrategyTypeEnum_TARGET_ROAS BiddingStrategyTypeEnum_BiddingStrategyType = 8 + // Target Spend is an automated bid strategy that sets your bids + // to help get as many clicks as possible within your budget. + BiddingStrategyTypeEnum_TARGET_SPEND BiddingStrategyTypeEnum_BiddingStrategyType = 9 +) + +var BiddingStrategyTypeEnum_BiddingStrategyType_name = map[int32]string{ + 0: "UNSPECIFIED", + 1: "UNKNOWN", + 2: "ENHANCED_CPC", + 3: "MANUAL_CPC", + 4: "MANUAL_CPM", + 13: "MANUAL_CPV", + 10: "MAXIMIZE_CONVERSIONS", + 11: "MAXIMIZE_CONVERSION_VALUE", + 5: "PAGE_ONE_PROMOTED", + 12: "PERCENT_CPC", + 6: "TARGET_CPA", + 14: "TARGET_CPM", + 15: "TARGET_IMPRESSION_SHARE", + 7: "TARGET_OUTRANK_SHARE", + 8: "TARGET_ROAS", + 9: "TARGET_SPEND", +} +var BiddingStrategyTypeEnum_BiddingStrategyType_value = map[string]int32{ + "UNSPECIFIED": 0, + "UNKNOWN": 1, + "ENHANCED_CPC": 2, + "MANUAL_CPC": 3, + "MANUAL_CPM": 4, + "MANUAL_CPV": 13, + "MAXIMIZE_CONVERSIONS": 10, + "MAXIMIZE_CONVERSION_VALUE": 11, + "PAGE_ONE_PROMOTED": 5, + "PERCENT_CPC": 12, + "TARGET_CPA": 6, + "TARGET_CPM": 14, + "TARGET_IMPRESSION_SHARE": 15, + "TARGET_OUTRANK_SHARE": 7, + "TARGET_ROAS": 8, + "TARGET_SPEND": 9, +} + +func (x BiddingStrategyTypeEnum_BiddingStrategyType) String() string { + return proto.EnumName(BiddingStrategyTypeEnum_BiddingStrategyType_name, int32(x)) +} +func (BiddingStrategyTypeEnum_BiddingStrategyType) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_bidding_strategy_type_5a1ecac8ab3d1803, []int{0, 0} +} + +// Container for enum describing possible bidding strategy types. +type BiddingStrategyTypeEnum struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *BiddingStrategyTypeEnum) Reset() { *m = BiddingStrategyTypeEnum{} } +func (m *BiddingStrategyTypeEnum) String() string { return proto.CompactTextString(m) } +func (*BiddingStrategyTypeEnum) ProtoMessage() {} +func (*BiddingStrategyTypeEnum) Descriptor() ([]byte, []int) { + return fileDescriptor_bidding_strategy_type_5a1ecac8ab3d1803, []int{0} +} +func (m *BiddingStrategyTypeEnum) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_BiddingStrategyTypeEnum.Unmarshal(m, b) +} +func (m *BiddingStrategyTypeEnum) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_BiddingStrategyTypeEnum.Marshal(b, m, deterministic) +} +func (dst *BiddingStrategyTypeEnum) XXX_Merge(src proto.Message) { + xxx_messageInfo_BiddingStrategyTypeEnum.Merge(dst, src) +} +func (m *BiddingStrategyTypeEnum) XXX_Size() int { + return xxx_messageInfo_BiddingStrategyTypeEnum.Size(m) +} +func (m *BiddingStrategyTypeEnum) XXX_DiscardUnknown() { + xxx_messageInfo_BiddingStrategyTypeEnum.DiscardUnknown(m) +} + +var xxx_messageInfo_BiddingStrategyTypeEnum proto.InternalMessageInfo + +func init() { + proto.RegisterType((*BiddingStrategyTypeEnum)(nil), "google.ads.googleads.v1.enums.BiddingStrategyTypeEnum") + proto.RegisterEnum("google.ads.googleads.v1.enums.BiddingStrategyTypeEnum_BiddingStrategyType", BiddingStrategyTypeEnum_BiddingStrategyType_name, BiddingStrategyTypeEnum_BiddingStrategyType_value) +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/enums/bidding_strategy_type.proto", fileDescriptor_bidding_strategy_type_5a1ecac8ab3d1803) +} + +var fileDescriptor_bidding_strategy_type_5a1ecac8ab3d1803 = []byte{ + // 455 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x7c, 0x92, 0xdb, 0x6e, 0xd3, 0x30, + 0x18, 0xc7, 0x69, 0x06, 0x1b, 0xb8, 0x63, 0x33, 0x06, 0xb4, 0x71, 0xe8, 0xc5, 0xf6, 0x00, 0x8e, + 0x22, 0xae, 0x08, 0x57, 0x6e, 0x6a, 0xba, 0x68, 0x8b, 0x63, 0xe5, 0x04, 0x9a, 0x22, 0x45, 0x19, + 0x89, 0xa2, 0x48, 0x6b, 0x1c, 0xd5, 0xd9, 0xa4, 0xbe, 0x0e, 0x97, 0x3c, 0x0a, 0x57, 0x3c, 0x07, + 0x82, 0x77, 0x40, 0x76, 0xd2, 0x4a, 0x95, 0xca, 0x6e, 0xac, 0xef, 0xef, 0xdf, 0x77, 0x92, 0xff, + 0x06, 0x1f, 0x2b, 0x21, 0xaa, 0xdb, 0xd2, 0xcc, 0x0b, 0x69, 0xf6, 0xa1, 0x8a, 0xee, 0x2d, 0xb3, + 0x6c, 0xee, 0x16, 0xd2, 0xbc, 0xa9, 0x8b, 0xa2, 0x6e, 0xaa, 0x4c, 0x76, 0xcb, 0xbc, 0x2b, 0xab, + 0x55, 0xd6, 0xad, 0xda, 0x12, 0xb7, 0x4b, 0xd1, 0x09, 0x34, 0xe9, 0xf3, 0x71, 0x5e, 0x48, 0xbc, + 0x29, 0xc5, 0xf7, 0x16, 0xd6, 0xa5, 0x6f, 0xdf, 0xaf, 0x3b, 0xb7, 0xb5, 0x99, 0x37, 0x8d, 0xe8, + 0xf2, 0xae, 0x16, 0x8d, 0xec, 0x8b, 0xcf, 0xff, 0x18, 0xe0, 0x64, 0xda, 0x37, 0x0f, 0x87, 0xde, + 0xd1, 0xaa, 0x2d, 0x69, 0x73, 0xb7, 0x38, 0xff, 0x65, 0x80, 0x97, 0x3b, 0x18, 0x3a, 0x06, 0xe3, + 0x98, 0x85, 0x9c, 0x3a, 0xee, 0x67, 0x97, 0xce, 0xe0, 0x23, 0x34, 0x06, 0x07, 0x31, 0xbb, 0x64, + 0xfe, 0x17, 0x06, 0x47, 0x08, 0x82, 0x43, 0xca, 0x2e, 0x08, 0x73, 0xe8, 0x2c, 0x73, 0xb8, 0x03, + 0x0d, 0x74, 0x04, 0x80, 0x47, 0x58, 0x4c, 0xae, 0xb4, 0xde, 0xdb, 0xd2, 0x1e, 0x7c, 0xbc, 0xa5, + 0x13, 0xf8, 0x1c, 0x9d, 0x82, 0x57, 0x1e, 0xf9, 0xea, 0x7a, 0xee, 0x35, 0xcd, 0x1c, 0x9f, 0x25, + 0x34, 0x08, 0x5d, 0x9f, 0x85, 0x10, 0xa0, 0x09, 0x78, 0xb3, 0x83, 0x64, 0x09, 0xb9, 0x8a, 0x29, + 0x1c, 0xa3, 0xd7, 0xe0, 0x05, 0x27, 0x73, 0x9a, 0xf9, 0x8c, 0x66, 0x3c, 0xf0, 0x3d, 0x3f, 0xa2, + 0x33, 0xf8, 0x44, 0xed, 0xcb, 0x69, 0xe0, 0x50, 0x16, 0xe9, 0x05, 0x0e, 0xd5, 0xc0, 0x88, 0x04, + 0x73, 0xaa, 0x34, 0x81, 0xfb, 0x5b, 0xda, 0x83, 0x47, 0xe8, 0x1d, 0x38, 0x19, 0xb4, 0xeb, 0xf1, + 0x80, 0x86, 0x7a, 0x48, 0x78, 0x41, 0x02, 0x0a, 0x8f, 0xd5, 0x76, 0x03, 0xf4, 0xe3, 0x28, 0x20, + 0xec, 0x72, 0x20, 0x07, 0x6a, 0xce, 0x40, 0x02, 0x9f, 0x84, 0xf0, 0xa9, 0x7a, 0x8a, 0xe1, 0x22, + 0xe4, 0x94, 0xcd, 0xe0, 0xb3, 0xe9, 0xdf, 0x11, 0x38, 0xfb, 0x26, 0x16, 0xf8, 0x41, 0xcb, 0xa6, + 0xa7, 0x3b, 0x5e, 0x9d, 0x2b, 0xbb, 0xf8, 0xe8, 0x7a, 0x3a, 0x94, 0x56, 0xe2, 0x36, 0x6f, 0x2a, + 0x2c, 0x96, 0x95, 0x59, 0x95, 0x8d, 0x36, 0x73, 0xfd, 0x71, 0xda, 0x5a, 0xfe, 0xe7, 0x1f, 0x7d, + 0xd2, 0xe7, 0x77, 0x63, 0x6f, 0x4e, 0xc8, 0x0f, 0x63, 0x32, 0xef, 0x5b, 0x91, 0x42, 0xe2, 0x3e, + 0x54, 0x51, 0x62, 0x61, 0xe5, 0xbe, 0xfc, 0xb9, 0xe6, 0x29, 0x29, 0x64, 0xba, 0xe1, 0x69, 0x62, + 0xa5, 0x9a, 0xff, 0x36, 0xce, 0xfa, 0x4b, 0xdb, 0x26, 0x85, 0xb4, 0xed, 0x4d, 0x86, 0x6d, 0x27, + 0x96, 0x6d, 0xeb, 0x9c, 0x9b, 0x7d, 0xbd, 0xd8, 0x87, 0x7f, 0x01, 0x00, 0x00, 0xff, 0xff, 0xdd, + 0x75, 0xc4, 0x3f, 0xdf, 0x02, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/billing_setup_status.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/billing_setup_status.pb.go new file mode 100644 index 0000000..c696748 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/billing_setup_status.pb.go @@ -0,0 +1,129 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/enums/billing_setup_status.proto + +package enums // import "google.golang.org/genproto/googleapis/ads/googleads/v1/enums" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// The possible statuses of a BillingSetup. +type BillingSetupStatusEnum_BillingSetupStatus int32 + +const ( + // Not specified. + BillingSetupStatusEnum_UNSPECIFIED BillingSetupStatusEnum_BillingSetupStatus = 0 + // Used for return value only. Represents value unknown in this version. + BillingSetupStatusEnum_UNKNOWN BillingSetupStatusEnum_BillingSetupStatus = 1 + // The billing setup is pending approval. + BillingSetupStatusEnum_PENDING BillingSetupStatusEnum_BillingSetupStatus = 2 + // The billing setup has been approved but the corresponding first budget + // has not. This can only occur for billing setups configured for monthly + // invoicing. + BillingSetupStatusEnum_APPROVED_HELD BillingSetupStatusEnum_BillingSetupStatus = 3 + // The billing setup has been approved. + BillingSetupStatusEnum_APPROVED BillingSetupStatusEnum_BillingSetupStatus = 4 + // The billing setup was cancelled by the user prior to approval. + BillingSetupStatusEnum_CANCELLED BillingSetupStatusEnum_BillingSetupStatus = 5 +) + +var BillingSetupStatusEnum_BillingSetupStatus_name = map[int32]string{ + 0: "UNSPECIFIED", + 1: "UNKNOWN", + 2: "PENDING", + 3: "APPROVED_HELD", + 4: "APPROVED", + 5: "CANCELLED", +} +var BillingSetupStatusEnum_BillingSetupStatus_value = map[string]int32{ + "UNSPECIFIED": 0, + "UNKNOWN": 1, + "PENDING": 2, + "APPROVED_HELD": 3, + "APPROVED": 4, + "CANCELLED": 5, +} + +func (x BillingSetupStatusEnum_BillingSetupStatus) String() string { + return proto.EnumName(BillingSetupStatusEnum_BillingSetupStatus_name, int32(x)) +} +func (BillingSetupStatusEnum_BillingSetupStatus) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_billing_setup_status_98112529998e699e, []int{0, 0} +} + +// Message describing BillingSetup statuses. +type BillingSetupStatusEnum struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *BillingSetupStatusEnum) Reset() { *m = BillingSetupStatusEnum{} } +func (m *BillingSetupStatusEnum) String() string { return proto.CompactTextString(m) } +func (*BillingSetupStatusEnum) ProtoMessage() {} +func (*BillingSetupStatusEnum) Descriptor() ([]byte, []int) { + return fileDescriptor_billing_setup_status_98112529998e699e, []int{0} +} +func (m *BillingSetupStatusEnum) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_BillingSetupStatusEnum.Unmarshal(m, b) +} +func (m *BillingSetupStatusEnum) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_BillingSetupStatusEnum.Marshal(b, m, deterministic) +} +func (dst *BillingSetupStatusEnum) XXX_Merge(src proto.Message) { + xxx_messageInfo_BillingSetupStatusEnum.Merge(dst, src) +} +func (m *BillingSetupStatusEnum) XXX_Size() int { + return xxx_messageInfo_BillingSetupStatusEnum.Size(m) +} +func (m *BillingSetupStatusEnum) XXX_DiscardUnknown() { + xxx_messageInfo_BillingSetupStatusEnum.DiscardUnknown(m) +} + +var xxx_messageInfo_BillingSetupStatusEnum proto.InternalMessageInfo + +func init() { + proto.RegisterType((*BillingSetupStatusEnum)(nil), "google.ads.googleads.v1.enums.BillingSetupStatusEnum") + proto.RegisterEnum("google.ads.googleads.v1.enums.BillingSetupStatusEnum_BillingSetupStatus", BillingSetupStatusEnum_BillingSetupStatus_name, BillingSetupStatusEnum_BillingSetupStatus_value) +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/enums/billing_setup_status.proto", fileDescriptor_billing_setup_status_98112529998e699e) +} + +var fileDescriptor_billing_setup_status_98112529998e699e = []byte{ + // 335 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x7c, 0x50, 0xdf, 0x4a, 0xfb, 0x30, + 0x18, 0xfd, 0xb5, 0xfb, 0xf9, 0x2f, 0x73, 0x58, 0x73, 0xa1, 0x20, 0xee, 0x62, 0x7b, 0x80, 0x94, + 0xe2, 0x8d, 0xc4, 0xab, 0x74, 0x8d, 0x73, 0x38, 0xb2, 0xe2, 0x58, 0x05, 0x29, 0x8c, 0xce, 0x96, + 0x50, 0xe8, 0x92, 0xb2, 0xb4, 0x7b, 0x07, 0x5f, 0xc3, 0x4b, 0x1f, 0xc5, 0x47, 0x11, 0x1f, 0x42, + 0x9a, 0xae, 0xbd, 0x19, 0x7a, 0x13, 0x4e, 0xbe, 0xf3, 0x9d, 0xc3, 0x77, 0x0e, 0xb8, 0xe5, 0x52, + 0xf2, 0x2c, 0xb1, 0xa3, 0x58, 0xd9, 0x35, 0xac, 0xd0, 0xd6, 0xb1, 0x13, 0x51, 0xae, 0x95, 0xbd, + 0x4a, 0xb3, 0x2c, 0x15, 0x7c, 0xa9, 0x92, 0xa2, 0xcc, 0x97, 0xaa, 0x88, 0x8a, 0x52, 0xa1, 0x7c, + 0x23, 0x0b, 0x09, 0xfb, 0xf5, 0x3a, 0x8a, 0x62, 0x85, 0x5a, 0x25, 0xda, 0x3a, 0x48, 0x2b, 0xaf, + 0xae, 0x1b, 0xe3, 0x3c, 0xb5, 0x23, 0x21, 0x64, 0x11, 0x15, 0xa9, 0x14, 0x3b, 0xf1, 0xf0, 0xcd, + 0x00, 0x17, 0x6e, 0xed, 0x3d, 0xaf, 0xac, 0xe7, 0xda, 0x99, 0x8a, 0x72, 0x3d, 0x94, 0x00, 0xee, + 0x33, 0xf0, 0x0c, 0x74, 0x17, 0x6c, 0xee, 0xd3, 0xd1, 0xe4, 0x7e, 0x42, 0x3d, 0xeb, 0x1f, 0xec, + 0x82, 0xa3, 0x05, 0x7b, 0x64, 0xb3, 0x67, 0x66, 0x19, 0xd5, 0xc7, 0xa7, 0xcc, 0x9b, 0xb0, 0xb1, + 0x65, 0xc2, 0x73, 0xd0, 0x23, 0xbe, 0xff, 0x34, 0x0b, 0xa8, 0xb7, 0x7c, 0xa0, 0x53, 0xcf, 0xea, + 0xc0, 0x53, 0x70, 0xdc, 0x8c, 0xac, 0xff, 0xb0, 0x07, 0x4e, 0x46, 0x84, 0x8d, 0xe8, 0x74, 0x4a, + 0x3d, 0xeb, 0xc0, 0xfd, 0x36, 0xc0, 0xe0, 0x55, 0xae, 0xd1, 0x9f, 0x79, 0xdc, 0xcb, 0xfd, 0xa3, + 0xfc, 0x2a, 0x8a, 0x6f, 0xbc, 0xb8, 0x3b, 0x25, 0x97, 0x59, 0x24, 0x38, 0x92, 0x1b, 0x6e, 0xf3, + 0x44, 0xe8, 0xa0, 0x4d, 0xa7, 0x79, 0xaa, 0x7e, 0xa9, 0xf8, 0x4e, 0xbf, 0xef, 0x66, 0x67, 0x4c, + 0xc8, 0x87, 0xd9, 0x1f, 0xd7, 0x56, 0x24, 0x56, 0xa8, 0x86, 0x15, 0x0a, 0x1c, 0x54, 0x55, 0xa3, + 0x3e, 0x1b, 0x3e, 0x24, 0xb1, 0x0a, 0x5b, 0x3e, 0x0c, 0x9c, 0x50, 0xf3, 0x5f, 0xe6, 0xa0, 0x1e, + 0x62, 0x4c, 0x62, 0x85, 0x71, 0xbb, 0x81, 0x71, 0xe0, 0x60, 0xac, 0x77, 0x56, 0x87, 0xfa, 0xb0, + 0x9b, 0x9f, 0x00, 0x00, 0x00, 0xff, 0xff, 0xd4, 0x95, 0x7e, 0x6c, 0xfa, 0x01, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/brand_safety_suitability.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/brand_safety_suitability.pb.go new file mode 100644 index 0000000..0197fb2 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/brand_safety_suitability.pb.go @@ -0,0 +1,144 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/enums/brand_safety_suitability.proto + +package enums // import "google.golang.org/genproto/googleapis/ads/googleads/v1/enums" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// 3-Tier brand safety suitability control. +type BrandSafetySuitabilityEnum_BrandSafetySuitability int32 + +const ( + // Not specified. + BrandSafetySuitabilityEnum_UNSPECIFIED BrandSafetySuitabilityEnum_BrandSafetySuitability = 0 + // Used for return value only. Represents value unknown in this version. + BrandSafetySuitabilityEnum_UNKNOWN BrandSafetySuitabilityEnum_BrandSafetySuitability = 1 + // This option lets you show ads across all inventory on YouTube and video + // partners that meet our standards for monetization. This option may be an + // appropriate choice for brands that want maximum access to the full + // breadth of videos eligible for ads, including, for example, videos that + // have strong profanity in the context of comedy or a documentary, or + // excessive violence as featured in video games. + BrandSafetySuitabilityEnum_EXPANDED_INVENTORY BrandSafetySuitabilityEnum_BrandSafetySuitability = 2 + // This option lets you show ads across a wide range of content that's + // appropriate for most brands, such as popular music videos, documentaries, + // and movie trailers. The content you can show ads on is based on YouTube's + // advertiser-friendly content guidelines that take into account, for + // example, the strength or frequency of profanity, or the appropriateness + // of subject matter like sensitive events. Ads won't show, for example, on + // content with repeated strong profanity, strong sexual content, or graphic + // violence. + BrandSafetySuitabilityEnum_STANDARD_INVENTORY BrandSafetySuitabilityEnum_BrandSafetySuitability = 3 + // This option lets you show ads on a reduced range of content that's + // appropriate for brands with particularly strict guidelines around + // inappropriate language and sexual suggestiveness; above and beyond what + // YouTube's advertiser-friendly content guidelines address. The videos + // accessible in this sensitive category meet heightened requirements, + // especially for inappropriate language and sexual suggestiveness. For + // example, your ads will be excluded from showing on some of YouTube's most + // popular music videos and other pop culture content across YouTube and + // Google video partners. + BrandSafetySuitabilityEnum_LIMITED_INVENTORY BrandSafetySuitabilityEnum_BrandSafetySuitability = 4 +) + +var BrandSafetySuitabilityEnum_BrandSafetySuitability_name = map[int32]string{ + 0: "UNSPECIFIED", + 1: "UNKNOWN", + 2: "EXPANDED_INVENTORY", + 3: "STANDARD_INVENTORY", + 4: "LIMITED_INVENTORY", +} +var BrandSafetySuitabilityEnum_BrandSafetySuitability_value = map[string]int32{ + "UNSPECIFIED": 0, + "UNKNOWN": 1, + "EXPANDED_INVENTORY": 2, + "STANDARD_INVENTORY": 3, + "LIMITED_INVENTORY": 4, +} + +func (x BrandSafetySuitabilityEnum_BrandSafetySuitability) String() string { + return proto.EnumName(BrandSafetySuitabilityEnum_BrandSafetySuitability_name, int32(x)) +} +func (BrandSafetySuitabilityEnum_BrandSafetySuitability) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_brand_safety_suitability_8b6fc888662e6ffb, []int{0, 0} +} + +// Container for enum with 3-Tier brand safety suitability control. +type BrandSafetySuitabilityEnum struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *BrandSafetySuitabilityEnum) Reset() { *m = BrandSafetySuitabilityEnum{} } +func (m *BrandSafetySuitabilityEnum) String() string { return proto.CompactTextString(m) } +func (*BrandSafetySuitabilityEnum) ProtoMessage() {} +func (*BrandSafetySuitabilityEnum) Descriptor() ([]byte, []int) { + return fileDescriptor_brand_safety_suitability_8b6fc888662e6ffb, []int{0} +} +func (m *BrandSafetySuitabilityEnum) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_BrandSafetySuitabilityEnum.Unmarshal(m, b) +} +func (m *BrandSafetySuitabilityEnum) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_BrandSafetySuitabilityEnum.Marshal(b, m, deterministic) +} +func (dst *BrandSafetySuitabilityEnum) XXX_Merge(src proto.Message) { + xxx_messageInfo_BrandSafetySuitabilityEnum.Merge(dst, src) +} +func (m *BrandSafetySuitabilityEnum) XXX_Size() int { + return xxx_messageInfo_BrandSafetySuitabilityEnum.Size(m) +} +func (m *BrandSafetySuitabilityEnum) XXX_DiscardUnknown() { + xxx_messageInfo_BrandSafetySuitabilityEnum.DiscardUnknown(m) +} + +var xxx_messageInfo_BrandSafetySuitabilityEnum proto.InternalMessageInfo + +func init() { + proto.RegisterType((*BrandSafetySuitabilityEnum)(nil), "google.ads.googleads.v1.enums.BrandSafetySuitabilityEnum") + proto.RegisterEnum("google.ads.googleads.v1.enums.BrandSafetySuitabilityEnum_BrandSafetySuitability", BrandSafetySuitabilityEnum_BrandSafetySuitability_name, BrandSafetySuitabilityEnum_BrandSafetySuitability_value) +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/enums/brand_safety_suitability.proto", fileDescriptor_brand_safety_suitability_8b6fc888662e6ffb) +} + +var fileDescriptor_brand_safety_suitability_8b6fc888662e6ffb = []byte{ + // 339 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x7c, 0x50, 0x4d, 0x4b, 0xfb, 0x30, + 0x1c, 0xfe, 0xb7, 0xfb, 0xa3, 0x90, 0x1d, 0x9c, 0x05, 0x77, 0x98, 0xee, 0xb0, 0x7d, 0x80, 0x94, + 0xe2, 0x2d, 0x7a, 0x49, 0x6d, 0x1d, 0x45, 0xcd, 0xca, 0xba, 0xd5, 0x17, 0x0a, 0x23, 0xb3, 0xb5, + 0x14, 0xb6, 0x64, 0x2c, 0xdd, 0x60, 0x07, 0x3f, 0x89, 0x37, 0x8f, 0x7e, 0x14, 0x3f, 0x8a, 0x27, + 0x3f, 0x82, 0x24, 0x71, 0x55, 0x61, 0x7a, 0x09, 0x0f, 0xbf, 0xe7, 0x85, 0x3c, 0x0f, 0x38, 0xcd, + 0x39, 0xcf, 0xa7, 0x99, 0x4d, 0x53, 0x61, 0x6b, 0x28, 0xd1, 0xca, 0xb1, 0x33, 0xb6, 0x9c, 0x09, + 0x7b, 0xb2, 0xa0, 0x2c, 0x1d, 0x0b, 0xfa, 0x90, 0x95, 0xeb, 0xb1, 0x58, 0x16, 0x25, 0x9d, 0x14, + 0xd3, 0xa2, 0x5c, 0xc3, 0xf9, 0x82, 0x97, 0xdc, 0x6a, 0x6b, 0x0b, 0xa4, 0xa9, 0x80, 0x95, 0x1b, + 0xae, 0x1c, 0xa8, 0xdc, 0xad, 0xa3, 0x4d, 0xf8, 0xbc, 0xb0, 0x29, 0x63, 0xbc, 0xa4, 0x65, 0xc1, + 0x99, 0xd0, 0xe6, 0xee, 0x93, 0x01, 0x5a, 0xae, 0xcc, 0x8f, 0x54, 0x7c, 0xf4, 0x95, 0xee, 0xb3, + 0xe5, 0xac, 0xfb, 0x08, 0x9a, 0xdb, 0x59, 0x6b, 0x0f, 0xd4, 0x47, 0x24, 0x0a, 0xfd, 0xb3, 0xe0, + 0x3c, 0xf0, 0xbd, 0xc6, 0x3f, 0xab, 0x0e, 0x76, 0x47, 0xe4, 0x82, 0xf4, 0xaf, 0x49, 0xc3, 0xb0, + 0x9a, 0xc0, 0xf2, 0x6f, 0x42, 0x4c, 0x3c, 0xdf, 0x1b, 0x07, 0x24, 0xf6, 0xc9, 0xb0, 0x3f, 0xb8, + 0x6d, 0x98, 0xf2, 0x1e, 0x0d, 0x31, 0xf1, 0xf0, 0xe0, 0xfb, 0xbd, 0x66, 0x1d, 0x80, 0xfd, 0xcb, + 0xe0, 0x2a, 0x18, 0xfe, 0x90, 0xff, 0x77, 0xdf, 0x0d, 0xd0, 0xb9, 0xe7, 0x33, 0xf8, 0x67, 0x43, + 0xf7, 0x70, 0xfb, 0x17, 0x43, 0x59, 0x30, 0x34, 0xee, 0xdc, 0x4f, 0x77, 0xce, 0xa7, 0x94, 0xe5, + 0x90, 0x2f, 0x72, 0x3b, 0xcf, 0x98, 0xaa, 0xbf, 0x59, 0x7b, 0x5e, 0x88, 0x5f, 0xc6, 0x3f, 0x51, + 0xef, 0xb3, 0x59, 0xeb, 0x61, 0xfc, 0x62, 0xb6, 0x7b, 0x3a, 0x0a, 0xa7, 0x02, 0x6a, 0x28, 0x51, + 0xec, 0x40, 0x39, 0x96, 0x78, 0xdd, 0xf0, 0x09, 0x4e, 0x45, 0x52, 0xf1, 0x49, 0xec, 0x24, 0x8a, + 0x7f, 0x33, 0x3b, 0xfa, 0x88, 0x10, 0x4e, 0x05, 0x42, 0x95, 0x02, 0xa1, 0xd8, 0x41, 0x48, 0x69, + 0x26, 0x3b, 0xea, 0x63, 0xc7, 0x1f, 0x01, 0x00, 0x00, 0xff, 0xff, 0x21, 0x5e, 0x5b, 0x6e, 0x14, + 0x02, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/budget_delivery_method.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/budget_delivery_method.pb.go new file mode 100644 index 0000000..9e708b1 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/budget_delivery_method.pb.go @@ -0,0 +1,121 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/enums/budget_delivery_method.proto + +package enums // import "google.golang.org/genproto/googleapis/ads/googleads/v1/enums" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Possible delivery methods of a Budget. +type BudgetDeliveryMethodEnum_BudgetDeliveryMethod int32 + +const ( + // Not specified. + BudgetDeliveryMethodEnum_UNSPECIFIED BudgetDeliveryMethodEnum_BudgetDeliveryMethod = 0 + // Used for return value only. Represents value unknown in this version. + BudgetDeliveryMethodEnum_UNKNOWN BudgetDeliveryMethodEnum_BudgetDeliveryMethod = 1 + // The budget server will throttle serving evenly across + // the entire time period. + BudgetDeliveryMethodEnum_STANDARD BudgetDeliveryMethodEnum_BudgetDeliveryMethod = 2 + // The budget server will not throttle serving, + // and ads will serve as fast as possible. + BudgetDeliveryMethodEnum_ACCELERATED BudgetDeliveryMethodEnum_BudgetDeliveryMethod = 3 +) + +var BudgetDeliveryMethodEnum_BudgetDeliveryMethod_name = map[int32]string{ + 0: "UNSPECIFIED", + 1: "UNKNOWN", + 2: "STANDARD", + 3: "ACCELERATED", +} +var BudgetDeliveryMethodEnum_BudgetDeliveryMethod_value = map[string]int32{ + "UNSPECIFIED": 0, + "UNKNOWN": 1, + "STANDARD": 2, + "ACCELERATED": 3, +} + +func (x BudgetDeliveryMethodEnum_BudgetDeliveryMethod) String() string { + return proto.EnumName(BudgetDeliveryMethodEnum_BudgetDeliveryMethod_name, int32(x)) +} +func (BudgetDeliveryMethodEnum_BudgetDeliveryMethod) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_budget_delivery_method_f2a3e0bbe704f256, []int{0, 0} +} + +// Message describing Budget delivery methods. A delivery method determines the +// rate at which the Budget is spent. +type BudgetDeliveryMethodEnum struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *BudgetDeliveryMethodEnum) Reset() { *m = BudgetDeliveryMethodEnum{} } +func (m *BudgetDeliveryMethodEnum) String() string { return proto.CompactTextString(m) } +func (*BudgetDeliveryMethodEnum) ProtoMessage() {} +func (*BudgetDeliveryMethodEnum) Descriptor() ([]byte, []int) { + return fileDescriptor_budget_delivery_method_f2a3e0bbe704f256, []int{0} +} +func (m *BudgetDeliveryMethodEnum) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_BudgetDeliveryMethodEnum.Unmarshal(m, b) +} +func (m *BudgetDeliveryMethodEnum) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_BudgetDeliveryMethodEnum.Marshal(b, m, deterministic) +} +func (dst *BudgetDeliveryMethodEnum) XXX_Merge(src proto.Message) { + xxx_messageInfo_BudgetDeliveryMethodEnum.Merge(dst, src) +} +func (m *BudgetDeliveryMethodEnum) XXX_Size() int { + return xxx_messageInfo_BudgetDeliveryMethodEnum.Size(m) +} +func (m *BudgetDeliveryMethodEnum) XXX_DiscardUnknown() { + xxx_messageInfo_BudgetDeliveryMethodEnum.DiscardUnknown(m) +} + +var xxx_messageInfo_BudgetDeliveryMethodEnum proto.InternalMessageInfo + +func init() { + proto.RegisterType((*BudgetDeliveryMethodEnum)(nil), "google.ads.googleads.v1.enums.BudgetDeliveryMethodEnum") + proto.RegisterEnum("google.ads.googleads.v1.enums.BudgetDeliveryMethodEnum_BudgetDeliveryMethod", BudgetDeliveryMethodEnum_BudgetDeliveryMethod_name, BudgetDeliveryMethodEnum_BudgetDeliveryMethod_value) +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/enums/budget_delivery_method.proto", fileDescriptor_budget_delivery_method_f2a3e0bbe704f256) +} + +var fileDescriptor_budget_delivery_method_f2a3e0bbe704f256 = []byte{ + // 313 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x7c, 0x50, 0xcf, 0x4a, 0xfb, 0x30, + 0x00, 0xfe, 0xad, 0x83, 0x9f, 0x92, 0x09, 0x96, 0xe2, 0x41, 0xc5, 0x1d, 0xb6, 0x07, 0x48, 0x28, + 0xde, 0xe2, 0x29, 0x5d, 0xe3, 0x18, 0x6a, 0x1d, 0xfb, 0x27, 0x48, 0x61, 0x74, 0x26, 0xc4, 0xc2, + 0x9a, 0x8c, 0xa6, 0x2d, 0xf8, 0x3a, 0x1e, 0x7d, 0x14, 0x1f, 0xc5, 0x83, 0xcf, 0x20, 0x4d, 0xd6, + 0x9e, 0xa6, 0x97, 0xf0, 0x91, 0xef, 0x4f, 0xbe, 0x7c, 0x00, 0x0b, 0xa5, 0xc4, 0x96, 0xa3, 0x84, + 0x69, 0x64, 0x61, 0x8d, 0x2a, 0x1f, 0x71, 0x59, 0x66, 0x1a, 0x6d, 0x4a, 0x26, 0x78, 0xb1, 0x66, + 0x7c, 0x9b, 0x56, 0x3c, 0x7f, 0x5b, 0x67, 0xbc, 0x78, 0x55, 0x0c, 0xee, 0x72, 0x55, 0x28, 0xaf, + 0x6f, 0x0d, 0x30, 0x61, 0x1a, 0xb6, 0x5e, 0x58, 0xf9, 0xd0, 0x78, 0x2f, 0xaf, 0x9a, 0xe8, 0x5d, + 0x8a, 0x12, 0x29, 0x55, 0x91, 0x14, 0xa9, 0x92, 0xda, 0x9a, 0x87, 0x0a, 0x9c, 0x07, 0x26, 0x3c, + 0xdc, 0x67, 0x3f, 0x98, 0x68, 0x2a, 0xcb, 0x6c, 0x38, 0x07, 0x67, 0x87, 0x38, 0xef, 0x14, 0xf4, + 0x96, 0xd1, 0x7c, 0x4a, 0x47, 0x93, 0xdb, 0x09, 0x0d, 0xdd, 0x7f, 0x5e, 0x0f, 0x1c, 0x2d, 0xa3, + 0xbb, 0xe8, 0xf1, 0x29, 0x72, 0x3b, 0xde, 0x09, 0x38, 0x9e, 0x2f, 0x48, 0x14, 0x92, 0x59, 0xe8, + 0x3a, 0xb5, 0x96, 0x8c, 0x46, 0xf4, 0x9e, 0xce, 0xc8, 0x82, 0x86, 0x6e, 0x37, 0xf8, 0xee, 0x80, + 0xc1, 0x8b, 0xca, 0xe0, 0x9f, 0xa5, 0x83, 0x8b, 0x43, 0x0f, 0x4f, 0xeb, 0xc6, 0xd3, 0xce, 0x73, + 0xb0, 0xf7, 0x0a, 0xb5, 0x4d, 0xa4, 0x80, 0x2a, 0x17, 0x48, 0x70, 0x69, 0xfe, 0xd3, 0x8c, 0xb7, + 0x4b, 0xf5, 0x2f, 0x5b, 0xde, 0x98, 0xf3, 0xdd, 0xe9, 0x8e, 0x09, 0xf9, 0x70, 0xfa, 0x63, 0x1b, + 0x45, 0x98, 0x86, 0x16, 0xd6, 0x68, 0xe5, 0xc3, 0x7a, 0x00, 0xfd, 0xd9, 0xf0, 0x31, 0x61, 0x3a, + 0x6e, 0xf9, 0x78, 0xe5, 0xc7, 0x86, 0xff, 0x72, 0x06, 0xf6, 0x12, 0x63, 0xc2, 0x34, 0xc6, 0xad, + 0x02, 0xe3, 0x95, 0x8f, 0xb1, 0xd1, 0x6c, 0xfe, 0x9b, 0x62, 0xd7, 0x3f, 0x01, 0x00, 0x00, 0xff, + 0xff, 0x11, 0x07, 0xf6, 0x4b, 0xe3, 0x01, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/budget_period.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/budget_period.pb.go new file mode 100644 index 0000000..e403c48 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/budget_period.pb.go @@ -0,0 +1,122 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/enums/budget_period.proto + +package enums // import "google.golang.org/genproto/googleapis/ads/googleads/v1/enums" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Possible period of a Budget. +type BudgetPeriodEnum_BudgetPeriod int32 + +const ( + // Not specified. + BudgetPeriodEnum_UNSPECIFIED BudgetPeriodEnum_BudgetPeriod = 0 + // Used for return value only. Represents value unknown in this version. + BudgetPeriodEnum_UNKNOWN BudgetPeriodEnum_BudgetPeriod = 1 + // Daily budget. + BudgetPeriodEnum_DAILY BudgetPeriodEnum_BudgetPeriod = 2 + // Custom budget. + BudgetPeriodEnum_CUSTOM BudgetPeriodEnum_BudgetPeriod = 3 + // Fixed daily budget. + BudgetPeriodEnum_FIXED_DAILY BudgetPeriodEnum_BudgetPeriod = 4 +) + +var BudgetPeriodEnum_BudgetPeriod_name = map[int32]string{ + 0: "UNSPECIFIED", + 1: "UNKNOWN", + 2: "DAILY", + 3: "CUSTOM", + 4: "FIXED_DAILY", +} +var BudgetPeriodEnum_BudgetPeriod_value = map[string]int32{ + "UNSPECIFIED": 0, + "UNKNOWN": 1, + "DAILY": 2, + "CUSTOM": 3, + "FIXED_DAILY": 4, +} + +func (x BudgetPeriodEnum_BudgetPeriod) String() string { + return proto.EnumName(BudgetPeriodEnum_BudgetPeriod_name, int32(x)) +} +func (BudgetPeriodEnum_BudgetPeriod) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_budget_period_32db56380851a2e1, []int{0, 0} +} + +// Message describing Budget period. +type BudgetPeriodEnum struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *BudgetPeriodEnum) Reset() { *m = BudgetPeriodEnum{} } +func (m *BudgetPeriodEnum) String() string { return proto.CompactTextString(m) } +func (*BudgetPeriodEnum) ProtoMessage() {} +func (*BudgetPeriodEnum) Descriptor() ([]byte, []int) { + return fileDescriptor_budget_period_32db56380851a2e1, []int{0} +} +func (m *BudgetPeriodEnum) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_BudgetPeriodEnum.Unmarshal(m, b) +} +func (m *BudgetPeriodEnum) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_BudgetPeriodEnum.Marshal(b, m, deterministic) +} +func (dst *BudgetPeriodEnum) XXX_Merge(src proto.Message) { + xxx_messageInfo_BudgetPeriodEnum.Merge(dst, src) +} +func (m *BudgetPeriodEnum) XXX_Size() int { + return xxx_messageInfo_BudgetPeriodEnum.Size(m) +} +func (m *BudgetPeriodEnum) XXX_DiscardUnknown() { + xxx_messageInfo_BudgetPeriodEnum.DiscardUnknown(m) +} + +var xxx_messageInfo_BudgetPeriodEnum proto.InternalMessageInfo + +func init() { + proto.RegisterType((*BudgetPeriodEnum)(nil), "google.ads.googleads.v1.enums.BudgetPeriodEnum") + proto.RegisterEnum("google.ads.googleads.v1.enums.BudgetPeriodEnum_BudgetPeriod", BudgetPeriodEnum_BudgetPeriod_name, BudgetPeriodEnum_BudgetPeriod_value) +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/enums/budget_period.proto", fileDescriptor_budget_period_32db56380851a2e1) +} + +var fileDescriptor_budget_period_32db56380851a2e1 = []byte{ + // 307 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x7c, 0x50, 0x4d, 0x4b, 0xc3, 0x30, + 0x18, 0x76, 0x9d, 0x4e, 0xcc, 0x04, 0x6b, 0x8f, 0xe2, 0x0e, 0xdb, 0x0f, 0x48, 0x08, 0xde, 0xe2, + 0x29, 0xdd, 0xba, 0x51, 0xd4, 0xae, 0xb0, 0x0f, 0x3f, 0x28, 0x8c, 0xce, 0x94, 0x58, 0x58, 0x93, + 0xd2, 0xb4, 0xfb, 0x41, 0x1e, 0xfd, 0x29, 0xfe, 0x10, 0x0f, 0xfe, 0x0a, 0x69, 0x62, 0xcb, 0x2e, + 0x7a, 0x09, 0x0f, 0xef, 0xf3, 0x91, 0xe7, 0x7d, 0x01, 0xe6, 0x52, 0xf2, 0x5d, 0x82, 0x62, 0xa6, + 0x90, 0x81, 0x35, 0xda, 0x63, 0x94, 0x88, 0x2a, 0x53, 0x68, 0x5b, 0x31, 0x9e, 0x94, 0x9b, 0x3c, + 0x29, 0x52, 0xc9, 0x60, 0x5e, 0xc8, 0x52, 0x3a, 0x03, 0xa3, 0x83, 0x31, 0x53, 0xb0, 0xb5, 0xc0, + 0x3d, 0x86, 0xda, 0x72, 0x75, 0xdd, 0x24, 0xe6, 0x29, 0x8a, 0x85, 0x90, 0x65, 0x5c, 0xa6, 0x52, + 0x28, 0x63, 0x1e, 0xbd, 0x01, 0xdb, 0xd5, 0x99, 0xa1, 0x8e, 0xf4, 0x44, 0x95, 0x8d, 0x96, 0xe0, + 0xfc, 0x70, 0xe6, 0x5c, 0x80, 0xfe, 0x2a, 0x58, 0x84, 0xde, 0xd8, 0x9f, 0xfa, 0xde, 0xc4, 0x3e, + 0x72, 0xfa, 0xe0, 0x74, 0x15, 0xdc, 0x05, 0xf3, 0xc7, 0xc0, 0xee, 0x38, 0x67, 0xe0, 0x64, 0x42, + 0xfd, 0xfb, 0x67, 0xdb, 0x72, 0x00, 0xe8, 0x8d, 0x57, 0x8b, 0xe5, 0xfc, 0xc1, 0xee, 0xd6, 0xa6, + 0xa9, 0xff, 0xe4, 0x4d, 0x36, 0x86, 0x3c, 0x76, 0xbf, 0x3a, 0x60, 0xf8, 0x2a, 0x33, 0xf8, 0x6f, + 0x5b, 0xf7, 0xf2, 0xf0, 0xe7, 0xb0, 0xae, 0x18, 0x76, 0x5e, 0xdc, 0x5f, 0x0f, 0x97, 0xbb, 0x58, + 0x70, 0x28, 0x0b, 0x8e, 0x78, 0x22, 0xf4, 0x02, 0xcd, 0x91, 0xf2, 0x54, 0xfd, 0x71, 0xb3, 0x5b, + 0xfd, 0xbe, 0x5b, 0xdd, 0x19, 0xa5, 0x1f, 0xd6, 0x60, 0x66, 0xa2, 0x28, 0x53, 0xd0, 0xc0, 0x1a, + 0xad, 0x31, 0xac, 0x37, 0x57, 0x9f, 0x0d, 0x1f, 0x51, 0xa6, 0xa2, 0x96, 0x8f, 0xd6, 0x38, 0xd2, + 0xfc, 0xb7, 0x35, 0x34, 0x43, 0x42, 0x28, 0x53, 0x84, 0xb4, 0x0a, 0x42, 0xd6, 0x98, 0x10, 0xad, + 0xd9, 0xf6, 0x74, 0xb1, 0x9b, 0x9f, 0x00, 0x00, 0x00, 0xff, 0xff, 0xf9, 0xa5, 0x03, 0x16, 0xcb, + 0x01, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/budget_status.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/budget_status.pb.go new file mode 100644 index 0000000..86214c8 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/budget_status.pb.go @@ -0,0 +1,117 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/enums/budget_status.proto + +package enums // import "google.golang.org/genproto/googleapis/ads/googleads/v1/enums" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Possible statuses of a Budget. +type BudgetStatusEnum_BudgetStatus int32 + +const ( + // Not specified. + BudgetStatusEnum_UNSPECIFIED BudgetStatusEnum_BudgetStatus = 0 + // Used for return value only. Represents value unknown in this version. + BudgetStatusEnum_UNKNOWN BudgetStatusEnum_BudgetStatus = 1 + // Budget is enabled. + BudgetStatusEnum_ENABLED BudgetStatusEnum_BudgetStatus = 2 + // Budget is removed. + BudgetStatusEnum_REMOVED BudgetStatusEnum_BudgetStatus = 3 +) + +var BudgetStatusEnum_BudgetStatus_name = map[int32]string{ + 0: "UNSPECIFIED", + 1: "UNKNOWN", + 2: "ENABLED", + 3: "REMOVED", +} +var BudgetStatusEnum_BudgetStatus_value = map[string]int32{ + "UNSPECIFIED": 0, + "UNKNOWN": 1, + "ENABLED": 2, + "REMOVED": 3, +} + +func (x BudgetStatusEnum_BudgetStatus) String() string { + return proto.EnumName(BudgetStatusEnum_BudgetStatus_name, int32(x)) +} +func (BudgetStatusEnum_BudgetStatus) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_budget_status_c0cacb75be9fe794, []int{0, 0} +} + +// Message describing a Budget status +type BudgetStatusEnum struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *BudgetStatusEnum) Reset() { *m = BudgetStatusEnum{} } +func (m *BudgetStatusEnum) String() string { return proto.CompactTextString(m) } +func (*BudgetStatusEnum) ProtoMessage() {} +func (*BudgetStatusEnum) Descriptor() ([]byte, []int) { + return fileDescriptor_budget_status_c0cacb75be9fe794, []int{0} +} +func (m *BudgetStatusEnum) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_BudgetStatusEnum.Unmarshal(m, b) +} +func (m *BudgetStatusEnum) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_BudgetStatusEnum.Marshal(b, m, deterministic) +} +func (dst *BudgetStatusEnum) XXX_Merge(src proto.Message) { + xxx_messageInfo_BudgetStatusEnum.Merge(dst, src) +} +func (m *BudgetStatusEnum) XXX_Size() int { + return xxx_messageInfo_BudgetStatusEnum.Size(m) +} +func (m *BudgetStatusEnum) XXX_DiscardUnknown() { + xxx_messageInfo_BudgetStatusEnum.DiscardUnknown(m) +} + +var xxx_messageInfo_BudgetStatusEnum proto.InternalMessageInfo + +func init() { + proto.RegisterType((*BudgetStatusEnum)(nil), "google.ads.googleads.v1.enums.BudgetStatusEnum") + proto.RegisterEnum("google.ads.googleads.v1.enums.BudgetStatusEnum_BudgetStatus", BudgetStatusEnum_BudgetStatus_name, BudgetStatusEnum_BudgetStatus_value) +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/enums/budget_status.proto", fileDescriptor_budget_status_c0cacb75be9fe794) +} + +var fileDescriptor_budget_status_c0cacb75be9fe794 = []byte{ + // 294 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x7c, 0x50, 0xcf, 0x4a, 0xc3, 0x30, + 0x18, 0x77, 0x1d, 0x28, 0x64, 0x82, 0x75, 0x47, 0x71, 0x87, 0xed, 0x01, 0x12, 0x82, 0xb7, 0x78, + 0x4a, 0x5d, 0x36, 0x86, 0xda, 0x15, 0xc7, 0x2a, 0x8c, 0x82, 0x64, 0xa6, 0x84, 0xc2, 0x9a, 0x94, + 0x25, 0xdd, 0x03, 0x79, 0xf4, 0x51, 0x7c, 0x10, 0x0f, 0x3e, 0x85, 0x24, 0x5d, 0xcb, 0x2e, 0x7a, + 0x29, 0xbf, 0xef, 0xfb, 0xfd, 0xe9, 0x2f, 0x1f, 0xc0, 0x52, 0x6b, 0xb9, 0xcb, 0x11, 0x17, 0x06, + 0x35, 0xd0, 0xa1, 0x03, 0x46, 0xb9, 0xaa, 0x4b, 0x83, 0xb6, 0xb5, 0x90, 0xb9, 0x7d, 0x33, 0x96, + 0xdb, 0xda, 0xc0, 0x6a, 0xaf, 0xad, 0x1e, 0x8e, 0x1a, 0x1d, 0xe4, 0xc2, 0xc0, 0xce, 0x02, 0x0f, + 0x18, 0x7a, 0xcb, 0xcd, 0x6d, 0x9b, 0x58, 0x15, 0x88, 0x2b, 0xa5, 0x2d, 0xb7, 0x85, 0x56, 0x47, + 0xf3, 0x64, 0x03, 0xc2, 0xc8, 0x67, 0xae, 0x7c, 0x24, 0x53, 0x75, 0x39, 0x99, 0x81, 0xcb, 0xd3, + 0xdd, 0xf0, 0x0a, 0x0c, 0xd6, 0xf1, 0x2a, 0x61, 0x0f, 0x8b, 0xd9, 0x82, 0x4d, 0xc3, 0xb3, 0xe1, + 0x00, 0x5c, 0xac, 0xe3, 0xc7, 0x78, 0xf9, 0x1a, 0x87, 0x3d, 0x37, 0xb0, 0x98, 0x46, 0x4f, 0x6c, + 0x1a, 0x06, 0x6e, 0x78, 0x61, 0xcf, 0xcb, 0x94, 0x4d, 0xc3, 0x7e, 0xf4, 0xdd, 0x03, 0xe3, 0x77, + 0x5d, 0xc2, 0x7f, 0xfb, 0x45, 0xd7, 0xa7, 0xff, 0x4a, 0x5c, 0xa9, 0xa4, 0xb7, 0x89, 0x8e, 0x1e, + 0xa9, 0x77, 0x5c, 0x49, 0xa8, 0xf7, 0x12, 0xc9, 0x5c, 0xf9, 0xca, 0xed, 0x59, 0xaa, 0xc2, 0xfc, + 0x71, 0xa5, 0x7b, 0xff, 0xfd, 0x08, 0xfa, 0x73, 0x4a, 0x3f, 0x83, 0xd1, 0xbc, 0x89, 0xa2, 0xc2, + 0xc0, 0x06, 0x3a, 0x94, 0x62, 0xe8, 0xde, 0x6a, 0xbe, 0x5a, 0x3e, 0xa3, 0xc2, 0x64, 0x1d, 0x9f, + 0xa5, 0x38, 0xf3, 0xfc, 0x4f, 0x30, 0x6e, 0x96, 0x84, 0x50, 0x61, 0x08, 0xe9, 0x14, 0x84, 0xa4, + 0x98, 0x10, 0xaf, 0xd9, 0x9e, 0xfb, 0x62, 0x77, 0xbf, 0x01, 0x00, 0x00, 0xff, 0xff, 0xee, 0xbd, + 0x27, 0x0e, 0xbd, 0x01, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/budget_type.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/budget_type.pb.go new file mode 100644 index 0000000..c003898 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/budget_type.pb.go @@ -0,0 +1,136 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/enums/budget_type.proto + +package enums // import "google.golang.org/genproto/googleapis/ads/googleads/v1/enums" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Possible Budget types. +type BudgetTypeEnum_BudgetType int32 + +const ( + // Not specified. + BudgetTypeEnum_UNSPECIFIED BudgetTypeEnum_BudgetType = 0 + // Used for return value only. Represents value unknown in this version. + BudgetTypeEnum_UNKNOWN BudgetTypeEnum_BudgetType = 1 + // Budget type for standard Google Ads usage. + // Caps daily spend at two times the specified budget amount. + // Full details: https://support.google.com/google-ads/answer/6385083 + BudgetTypeEnum_STANDARD BudgetTypeEnum_BudgetType = 2 + // Budget type for Hotels Ads commission program. + // Full details: https://support.google.com/google-ads/answer/9243945 + // + // This type is only supported by campaigns with + // AdvertisingChannelType.HOTEL, BiddingStrategyType.COMMISSION and + // PaymentMode.CONVERSION_VALUE. + BudgetTypeEnum_HOTEL_ADS_COMMISSION BudgetTypeEnum_BudgetType = 3 + // Budget type with a fixed cost-per-acquisition (conversion). + // Full details: https://support.google.com/google-ads/answer/7528254 + // + // This type is only supported by campaigns with + // AdvertisingChannelType.DISPLAY (excluding + // AdvertisingChannelSubType.DISPLAY_GMAIL), + // BiddingStrategyType.TARGET_CPA and PaymentMode.CONVERSIONS. + BudgetTypeEnum_FIXED_CPA BudgetTypeEnum_BudgetType = 4 +) + +var BudgetTypeEnum_BudgetType_name = map[int32]string{ + 0: "UNSPECIFIED", + 1: "UNKNOWN", + 2: "STANDARD", + 3: "HOTEL_ADS_COMMISSION", + 4: "FIXED_CPA", +} +var BudgetTypeEnum_BudgetType_value = map[string]int32{ + "UNSPECIFIED": 0, + "UNKNOWN": 1, + "STANDARD": 2, + "HOTEL_ADS_COMMISSION": 3, + "FIXED_CPA": 4, +} + +func (x BudgetTypeEnum_BudgetType) String() string { + return proto.EnumName(BudgetTypeEnum_BudgetType_name, int32(x)) +} +func (BudgetTypeEnum_BudgetType) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_budget_type_ef871b5d9330c0c2, []int{0, 0} +} + +// Describes Budget types. +type BudgetTypeEnum struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *BudgetTypeEnum) Reset() { *m = BudgetTypeEnum{} } +func (m *BudgetTypeEnum) String() string { return proto.CompactTextString(m) } +func (*BudgetTypeEnum) ProtoMessage() {} +func (*BudgetTypeEnum) Descriptor() ([]byte, []int) { + return fileDescriptor_budget_type_ef871b5d9330c0c2, []int{0} +} +func (m *BudgetTypeEnum) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_BudgetTypeEnum.Unmarshal(m, b) +} +func (m *BudgetTypeEnum) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_BudgetTypeEnum.Marshal(b, m, deterministic) +} +func (dst *BudgetTypeEnum) XXX_Merge(src proto.Message) { + xxx_messageInfo_BudgetTypeEnum.Merge(dst, src) +} +func (m *BudgetTypeEnum) XXX_Size() int { + return xxx_messageInfo_BudgetTypeEnum.Size(m) +} +func (m *BudgetTypeEnum) XXX_DiscardUnknown() { + xxx_messageInfo_BudgetTypeEnum.DiscardUnknown(m) +} + +var xxx_messageInfo_BudgetTypeEnum proto.InternalMessageInfo + +func init() { + proto.RegisterType((*BudgetTypeEnum)(nil), "google.ads.googleads.v1.enums.BudgetTypeEnum") + proto.RegisterEnum("google.ads.googleads.v1.enums.BudgetTypeEnum_BudgetType", BudgetTypeEnum_BudgetType_name, BudgetTypeEnum_BudgetType_value) +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/enums/budget_type.proto", fileDescriptor_budget_type_ef871b5d9330c0c2) +} + +var fileDescriptor_budget_type_ef871b5d9330c0c2 = []byte{ + // 325 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x7c, 0x50, 0xdd, 0x4e, 0xfa, 0x30, + 0x1c, 0xfd, 0x33, 0xfe, 0xf1, 0xa3, 0xa8, 0x2c, 0x8b, 0x17, 0xc6, 0xc8, 0x05, 0x3c, 0x40, 0x97, + 0xc5, 0xbb, 0x7a, 0xd5, 0xb1, 0x81, 0x8b, 0xb2, 0x2d, 0x0e, 0xd0, 0x98, 0x25, 0x4b, 0xb1, 0x4d, + 0x43, 0x02, 0xed, 0x42, 0x07, 0x09, 0xaf, 0xe3, 0xa5, 0x8f, 0xe2, 0x73, 0x78, 0xe5, 0x53, 0x98, + 0x75, 0x30, 0xae, 0xf4, 0xa6, 0x39, 0xf9, 0x9d, 0x8f, 0x9c, 0x1e, 0x60, 0x73, 0x29, 0xf9, 0x82, + 0xd9, 0x84, 0xaa, 0x1d, 0x2c, 0xd1, 0xc6, 0xb1, 0x99, 0x58, 0x2f, 0x95, 0x3d, 0x5b, 0x53, 0xce, + 0x8a, 0xac, 0xd8, 0xe6, 0x0c, 0xe6, 0x2b, 0x59, 0x48, 0xab, 0x53, 0xa9, 0x20, 0xa1, 0x0a, 0xd6, + 0x06, 0xb8, 0x71, 0xa0, 0x36, 0x5c, 0xdf, 0xec, 0xf3, 0xf2, 0xb9, 0x4d, 0x84, 0x90, 0x05, 0x29, + 0xe6, 0x52, 0xa8, 0xca, 0xdc, 0x53, 0xe0, 0xc2, 0xd5, 0x89, 0xe3, 0x6d, 0xce, 0x7c, 0xb1, 0x5e, + 0xf6, 0x08, 0x00, 0x87, 0x8b, 0xd5, 0x06, 0xad, 0x49, 0x98, 0xc4, 0x7e, 0x3f, 0x18, 0x04, 0xbe, + 0x67, 0xfe, 0xb3, 0x5a, 0xe0, 0x78, 0x12, 0x3e, 0x84, 0xd1, 0x73, 0x68, 0x36, 0xac, 0x33, 0x70, + 0x92, 0x8c, 0x71, 0xe8, 0xe1, 0x27, 0xcf, 0x34, 0xac, 0x2b, 0x70, 0x79, 0x1f, 0x8d, 0xfd, 0xc7, + 0x0c, 0x7b, 0x49, 0xd6, 0x8f, 0x46, 0xa3, 0x20, 0x49, 0x82, 0x28, 0x34, 0x9b, 0xd6, 0x39, 0x38, + 0x1d, 0x04, 0x2f, 0xbe, 0x97, 0xf5, 0x63, 0x6c, 0xfe, 0x77, 0xbf, 0x1a, 0xa0, 0xfb, 0x26, 0x97, + 0xf0, 0xcf, 0xe2, 0x6e, 0xfb, 0x50, 0x23, 0x2e, 0xbb, 0xc6, 0x8d, 0x57, 0x77, 0xe7, 0xe0, 0x72, + 0x41, 0x04, 0x87, 0x72, 0xc5, 0x6d, 0xce, 0x84, 0xfe, 0xc9, 0x7e, 0xab, 0x7c, 0xae, 0x7e, 0x99, + 0xee, 0x4e, 0xbf, 0xef, 0x46, 0x73, 0x88, 0xf1, 0x87, 0xd1, 0x19, 0x56, 0x51, 0x98, 0x2a, 0x58, + 0xc1, 0x12, 0x4d, 0x1d, 0x58, 0x8e, 0xa0, 0x3e, 0xf7, 0x7c, 0x8a, 0xa9, 0x4a, 0x6b, 0x3e, 0x9d, + 0x3a, 0xa9, 0xe6, 0xbf, 0x8d, 0x6e, 0x75, 0x44, 0x08, 0x53, 0x85, 0x50, 0xad, 0x40, 0x68, 0xea, + 0x20, 0xa4, 0x35, 0xb3, 0x23, 0x5d, 0xec, 0xf6, 0x27, 0x00, 0x00, 0xff, 0xff, 0x9a, 0xec, 0x70, + 0xb2, 0xd2, 0x01, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/call_conversion_reporting_state.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/call_conversion_reporting_state.pb.go new file mode 100644 index 0000000..53934db --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/call_conversion_reporting_state.pb.go @@ -0,0 +1,128 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/enums/call_conversion_reporting_state.proto + +package enums // import "google.golang.org/genproto/googleapis/ads/googleads/v1/enums" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Possible data types for a call conversion action state. +type CallConversionReportingStateEnum_CallConversionReportingState int32 + +const ( + // Not specified. + CallConversionReportingStateEnum_UNSPECIFIED CallConversionReportingStateEnum_CallConversionReportingState = 0 + // Used for return value only. Represents value unknown in this version. + CallConversionReportingStateEnum_UNKNOWN CallConversionReportingStateEnum_CallConversionReportingState = 1 + // Call conversion action is disabled. + CallConversionReportingStateEnum_DISABLED CallConversionReportingStateEnum_CallConversionReportingState = 2 + // Call conversion action will use call conversion type set at the + // account level. + CallConversionReportingStateEnum_USE_ACCOUNT_LEVEL_CALL_CONVERSION_ACTION CallConversionReportingStateEnum_CallConversionReportingState = 3 + // Call conversion action will use call conversion type set at the resource + // (call only ads/call extensions) level. + CallConversionReportingStateEnum_USE_RESOURCE_LEVEL_CALL_CONVERSION_ACTION CallConversionReportingStateEnum_CallConversionReportingState = 4 +) + +var CallConversionReportingStateEnum_CallConversionReportingState_name = map[int32]string{ + 0: "UNSPECIFIED", + 1: "UNKNOWN", + 2: "DISABLED", + 3: "USE_ACCOUNT_LEVEL_CALL_CONVERSION_ACTION", + 4: "USE_RESOURCE_LEVEL_CALL_CONVERSION_ACTION", +} +var CallConversionReportingStateEnum_CallConversionReportingState_value = map[string]int32{ + "UNSPECIFIED": 0, + "UNKNOWN": 1, + "DISABLED": 2, + "USE_ACCOUNT_LEVEL_CALL_CONVERSION_ACTION": 3, + "USE_RESOURCE_LEVEL_CALL_CONVERSION_ACTION": 4, +} + +func (x CallConversionReportingStateEnum_CallConversionReportingState) String() string { + return proto.EnumName(CallConversionReportingStateEnum_CallConversionReportingState_name, int32(x)) +} +func (CallConversionReportingStateEnum_CallConversionReportingState) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_call_conversion_reporting_state_c7c215557a8b7336, []int{0, 0} +} + +// Container for enum describing possible data types for call conversion +// reporting state. +type CallConversionReportingStateEnum struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *CallConversionReportingStateEnum) Reset() { *m = CallConversionReportingStateEnum{} } +func (m *CallConversionReportingStateEnum) String() string { return proto.CompactTextString(m) } +func (*CallConversionReportingStateEnum) ProtoMessage() {} +func (*CallConversionReportingStateEnum) Descriptor() ([]byte, []int) { + return fileDescriptor_call_conversion_reporting_state_c7c215557a8b7336, []int{0} +} +func (m *CallConversionReportingStateEnum) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_CallConversionReportingStateEnum.Unmarshal(m, b) +} +func (m *CallConversionReportingStateEnum) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_CallConversionReportingStateEnum.Marshal(b, m, deterministic) +} +func (dst *CallConversionReportingStateEnum) XXX_Merge(src proto.Message) { + xxx_messageInfo_CallConversionReportingStateEnum.Merge(dst, src) +} +func (m *CallConversionReportingStateEnum) XXX_Size() int { + return xxx_messageInfo_CallConversionReportingStateEnum.Size(m) +} +func (m *CallConversionReportingStateEnum) XXX_DiscardUnknown() { + xxx_messageInfo_CallConversionReportingStateEnum.DiscardUnknown(m) +} + +var xxx_messageInfo_CallConversionReportingStateEnum proto.InternalMessageInfo + +func init() { + proto.RegisterType((*CallConversionReportingStateEnum)(nil), "google.ads.googleads.v1.enums.CallConversionReportingStateEnum") + proto.RegisterEnum("google.ads.googleads.v1.enums.CallConversionReportingStateEnum_CallConversionReportingState", CallConversionReportingStateEnum_CallConversionReportingState_name, CallConversionReportingStateEnum_CallConversionReportingState_value) +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/enums/call_conversion_reporting_state.proto", fileDescriptor_call_conversion_reporting_state_c7c215557a8b7336) +} + +var fileDescriptor_call_conversion_reporting_state_c7c215557a8b7336 = []byte{ + // 365 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x7c, 0x51, 0x41, 0xeb, 0x9b, 0x30, + 0x1c, 0x9d, 0x76, 0x6c, 0x23, 0x1d, 0x4c, 0x3c, 0x8e, 0x16, 0xd6, 0x9e, 0x36, 0xd8, 0x22, 0xb2, + 0x5b, 0x76, 0x8a, 0x69, 0x56, 0x64, 0x12, 0x8b, 0x56, 0x07, 0x43, 0x08, 0x59, 0x15, 0x11, 0x6c, + 0x22, 0xc6, 0xf6, 0x03, 0xed, 0x32, 0xd8, 0x47, 0x19, 0xfb, 0x24, 0xfb, 0x00, 0x3b, 0x0f, 0xf5, + 0xaf, 0xb7, 0xbf, 0x97, 0xf0, 0xc8, 0x7b, 0xbf, 0xf7, 0x92, 0xf7, 0x03, 0xa4, 0x54, 0xaa, 0xac, + 0x0b, 0x47, 0xe4, 0xda, 0x19, 0x61, 0x8f, 0xee, 0xae, 0x53, 0xc8, 0xdb, 0x55, 0x3b, 0x17, 0x51, + 0xd7, 0xfc, 0xa2, 0xe4, 0xbd, 0x68, 0x75, 0xa5, 0x24, 0x6f, 0x8b, 0x46, 0xb5, 0x5d, 0x25, 0x4b, + 0xae, 0x3b, 0xd1, 0x15, 0xb0, 0x69, 0x55, 0xa7, 0xec, 0xed, 0x38, 0x09, 0x45, 0xae, 0xe1, 0x6c, + 0x02, 0xef, 0x2e, 0x1c, 0x4c, 0x5e, 0x6f, 0xa6, 0x8c, 0xa6, 0x72, 0x84, 0x94, 0xaa, 0x13, 0x5d, + 0xa5, 0xa4, 0x1e, 0x87, 0xf7, 0x7f, 0x0c, 0xf0, 0x86, 0x88, 0xba, 0x26, 0x73, 0x4a, 0x34, 0x85, + 0xc4, 0x7d, 0x06, 0x95, 0xb7, 0xeb, 0xfe, 0xa7, 0x01, 0x36, 0x4b, 0x22, 0xfb, 0x15, 0x58, 0x27, + 0x2c, 0x3e, 0x51, 0xe2, 0x7f, 0xf6, 0xe9, 0xc1, 0x7a, 0x62, 0xaf, 0xc1, 0xf3, 0x84, 0x7d, 0x61, + 0xe1, 0x57, 0x66, 0x19, 0xf6, 0x4b, 0xf0, 0xe2, 0xe0, 0xc7, 0xd8, 0x0b, 0xe8, 0xc1, 0x32, 0xed, + 0xf7, 0xe0, 0x6d, 0x12, 0x53, 0x8e, 0x09, 0x09, 0x13, 0x76, 0xe6, 0x01, 0x4d, 0x69, 0xc0, 0x09, + 0x0e, 0x02, 0x4e, 0x42, 0x96, 0xd2, 0x28, 0xf6, 0x43, 0xc6, 0x31, 0x39, 0xfb, 0x21, 0xb3, 0x56, + 0xf6, 0x07, 0xf0, 0xae, 0x57, 0x47, 0x34, 0x0e, 0x93, 0x88, 0xd0, 0x65, 0xf9, 0x53, 0xef, 0x9f, + 0x01, 0x76, 0x17, 0x75, 0x85, 0x8b, 0x95, 0x78, 0xbb, 0xa5, 0xcf, 0x9c, 0xfa, 0x5e, 0x4e, 0xc6, + 0x37, 0xef, 0xc1, 0xa3, 0x54, 0xb5, 0x90, 0x25, 0x54, 0x6d, 0xe9, 0x94, 0x85, 0x1c, 0x5a, 0x9b, + 0x76, 0xd5, 0x54, 0xfa, 0x91, 0xd5, 0x7d, 0x1a, 0xce, 0x1f, 0xe6, 0xea, 0x88, 0xf1, 0x2f, 0x73, + 0x7b, 0x1c, 0xad, 0x70, 0xae, 0xe1, 0x08, 0x7b, 0x94, 0xba, 0xb0, 0x6f, 0x57, 0xff, 0x9e, 0xf8, + 0x0c, 0xe7, 0x3a, 0x9b, 0xf9, 0x2c, 0x75, 0xb3, 0x81, 0xff, 0x6b, 0xee, 0xc6, 0x4b, 0x84, 0x70, + 0xae, 0x11, 0x9a, 0x15, 0x08, 0xa5, 0x2e, 0x42, 0x83, 0xe6, 0xfb, 0xb3, 0xe1, 0x61, 0x1f, 0xff, + 0x07, 0x00, 0x00, 0xff, 0xff, 0xe6, 0xd3, 0x6d, 0xe0, 0x52, 0x02, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/call_placeholder_field.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/call_placeholder_field.pb.go new file mode 100644 index 0000000..059d736 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/call_placeholder_field.pb.go @@ -0,0 +1,141 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/enums/call_placeholder_field.proto + +package enums // import "google.golang.org/genproto/googleapis/ads/googleads/v1/enums" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Possible values for Call placeholder fields. +type CallPlaceholderFieldEnum_CallPlaceholderField int32 + +const ( + // Not specified. + CallPlaceholderFieldEnum_UNSPECIFIED CallPlaceholderFieldEnum_CallPlaceholderField = 0 + // Used for return value only. Represents value unknown in this version. + CallPlaceholderFieldEnum_UNKNOWN CallPlaceholderFieldEnum_CallPlaceholderField = 1 + // Data Type: STRING. The advertiser's phone number to append to the ad. + CallPlaceholderFieldEnum_PHONE_NUMBER CallPlaceholderFieldEnum_CallPlaceholderField = 2 + // Data Type: STRING. Uppercase two-letter country code of the advertiser's + // phone number. + CallPlaceholderFieldEnum_COUNTRY_CODE CallPlaceholderFieldEnum_CallPlaceholderField = 3 + // Data Type: BOOLEAN. Indicates whether call tracking is enabled. Default: + // true. + CallPlaceholderFieldEnum_TRACKED CallPlaceholderFieldEnum_CallPlaceholderField = 4 + // Data Type: INT64. The ID of an AdCallMetricsConversion object. This + // object contains the phoneCallDurationfield which is the minimum duration + // (in seconds) of a call to be considered a conversion. + CallPlaceholderFieldEnum_CONVERSION_TYPE_ID CallPlaceholderFieldEnum_CallPlaceholderField = 5 + // Data Type: STRING. Indicates whether this call extension uses its own + // call conversion setting or follows the account level setting. + // Valid values are: USE_ACCOUNT_LEVEL_CALL_CONVERSION_ACTION and + // USE_RESOURCE_LEVEL_CALL_CONVERSION_ACTION. + CallPlaceholderFieldEnum_CONVERSION_REPORTING_STATE CallPlaceholderFieldEnum_CallPlaceholderField = 6 +) + +var CallPlaceholderFieldEnum_CallPlaceholderField_name = map[int32]string{ + 0: "UNSPECIFIED", + 1: "UNKNOWN", + 2: "PHONE_NUMBER", + 3: "COUNTRY_CODE", + 4: "TRACKED", + 5: "CONVERSION_TYPE_ID", + 6: "CONVERSION_REPORTING_STATE", +} +var CallPlaceholderFieldEnum_CallPlaceholderField_value = map[string]int32{ + "UNSPECIFIED": 0, + "UNKNOWN": 1, + "PHONE_NUMBER": 2, + "COUNTRY_CODE": 3, + "TRACKED": 4, + "CONVERSION_TYPE_ID": 5, + "CONVERSION_REPORTING_STATE": 6, +} + +func (x CallPlaceholderFieldEnum_CallPlaceholderField) String() string { + return proto.EnumName(CallPlaceholderFieldEnum_CallPlaceholderField_name, int32(x)) +} +func (CallPlaceholderFieldEnum_CallPlaceholderField) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_call_placeholder_field_406dc30ab4e5d7bc, []int{0, 0} +} + +// Values for Call placeholder fields. +type CallPlaceholderFieldEnum struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *CallPlaceholderFieldEnum) Reset() { *m = CallPlaceholderFieldEnum{} } +func (m *CallPlaceholderFieldEnum) String() string { return proto.CompactTextString(m) } +func (*CallPlaceholderFieldEnum) ProtoMessage() {} +func (*CallPlaceholderFieldEnum) Descriptor() ([]byte, []int) { + return fileDescriptor_call_placeholder_field_406dc30ab4e5d7bc, []int{0} +} +func (m *CallPlaceholderFieldEnum) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_CallPlaceholderFieldEnum.Unmarshal(m, b) +} +func (m *CallPlaceholderFieldEnum) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_CallPlaceholderFieldEnum.Marshal(b, m, deterministic) +} +func (dst *CallPlaceholderFieldEnum) XXX_Merge(src proto.Message) { + xxx_messageInfo_CallPlaceholderFieldEnum.Merge(dst, src) +} +func (m *CallPlaceholderFieldEnum) XXX_Size() int { + return xxx_messageInfo_CallPlaceholderFieldEnum.Size(m) +} +func (m *CallPlaceholderFieldEnum) XXX_DiscardUnknown() { + xxx_messageInfo_CallPlaceholderFieldEnum.DiscardUnknown(m) +} + +var xxx_messageInfo_CallPlaceholderFieldEnum proto.InternalMessageInfo + +func init() { + proto.RegisterType((*CallPlaceholderFieldEnum)(nil), "google.ads.googleads.v1.enums.CallPlaceholderFieldEnum") + proto.RegisterEnum("google.ads.googleads.v1.enums.CallPlaceholderFieldEnum_CallPlaceholderField", CallPlaceholderFieldEnum_CallPlaceholderField_name, CallPlaceholderFieldEnum_CallPlaceholderField_value) +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/enums/call_placeholder_field.proto", fileDescriptor_call_placeholder_field_406dc30ab4e5d7bc) +} + +var fileDescriptor_call_placeholder_field_406dc30ab4e5d7bc = []byte{ + // 377 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x7c, 0x91, 0x4f, 0x6a, 0xdb, 0x40, + 0x14, 0xc6, 0x2b, 0xb9, 0x75, 0x61, 0x5c, 0xe8, 0x20, 0x4a, 0x69, 0x4d, 0x5d, 0xb0, 0x0f, 0x30, + 0x42, 0x74, 0x37, 0x5d, 0xe9, 0xcf, 0xd8, 0x15, 0xa6, 0x23, 0x21, 0x4b, 0x0a, 0x0e, 0x02, 0x31, + 0xb1, 0x14, 0x45, 0x30, 0xd6, 0x08, 0x8f, 0xed, 0xdb, 0x64, 0x93, 0x65, 0x8e, 0x90, 0x23, 0xe4, + 0x28, 0x59, 0xe4, 0x0c, 0x41, 0x52, 0xec, 0x64, 0xe1, 0x64, 0x33, 0x7c, 0xbc, 0xef, 0xfd, 0x3e, + 0xe6, 0xbd, 0x07, 0x70, 0x21, 0x44, 0xc1, 0x73, 0x9d, 0x65, 0x52, 0xef, 0x64, 0xa3, 0xf6, 0x86, + 0x9e, 0x57, 0xbb, 0xb5, 0xd4, 0x57, 0x8c, 0xf3, 0xb4, 0xe6, 0x6c, 0x95, 0x5f, 0x09, 0x9e, 0xe5, + 0x9b, 0xf4, 0xb2, 0xcc, 0x79, 0x86, 0xea, 0x8d, 0xd8, 0x0a, 0x6d, 0xd4, 0x01, 0x88, 0x65, 0x12, + 0x1d, 0x59, 0xb4, 0x37, 0x50, 0xcb, 0x0e, 0x7f, 0x1d, 0xa2, 0xeb, 0x52, 0x67, 0x55, 0x25, 0xb6, + 0x6c, 0x5b, 0x8a, 0x4a, 0x76, 0xf0, 0xe4, 0x4e, 0x01, 0x3f, 0x6c, 0xc6, 0xb9, 0xff, 0x12, 0x3e, + 0x6d, 0xb2, 0x49, 0xb5, 0x5b, 0x4f, 0xae, 0x15, 0xf0, 0xed, 0x94, 0xa9, 0x7d, 0x05, 0x83, 0x88, + 0x2e, 0x7c, 0x62, 0xbb, 0x53, 0x97, 0x38, 0xf0, 0x83, 0x36, 0x00, 0x9f, 0x23, 0x3a, 0xa7, 0xde, + 0x19, 0x85, 0x8a, 0x06, 0xc1, 0x17, 0xff, 0x9f, 0x47, 0x49, 0x4a, 0xa3, 0xff, 0x16, 0x09, 0xa0, + 0xda, 0x54, 0x6c, 0x2f, 0xa2, 0x61, 0xb0, 0x4c, 0x6d, 0xcf, 0x21, 0xb0, 0xd7, 0x00, 0x61, 0x60, + 0xda, 0x73, 0xe2, 0xc0, 0x8f, 0xda, 0x77, 0xa0, 0xd9, 0x1e, 0x8d, 0x49, 0xb0, 0x70, 0x3d, 0x9a, + 0x86, 0x4b, 0x9f, 0xa4, 0xae, 0x03, 0x3f, 0x69, 0xbf, 0xc1, 0xf0, 0x55, 0x3d, 0x20, 0xbe, 0x17, + 0x84, 0x2e, 0x9d, 0xa5, 0x8b, 0xd0, 0x0c, 0x09, 0xec, 0x5b, 0x8f, 0x0a, 0x18, 0xaf, 0xc4, 0x1a, + 0xbd, 0xbb, 0x00, 0xeb, 0xe7, 0xa9, 0x11, 0xfc, 0x66, 0x7a, 0x5f, 0x39, 0xb7, 0x9e, 0xd9, 0x42, + 0x70, 0x56, 0x15, 0x48, 0x6c, 0x0a, 0xbd, 0xc8, 0xab, 0x76, 0x37, 0x87, 0x43, 0xd4, 0xa5, 0x7c, + 0xe3, 0x2e, 0x7f, 0xdb, 0xf7, 0x46, 0xed, 0xcd, 0x4c, 0xf3, 0x56, 0x1d, 0xcd, 0xba, 0x28, 0x33, + 0x93, 0xa8, 0x93, 0x8d, 0x8a, 0x0d, 0xd4, 0xec, 0x52, 0xde, 0x1f, 0xfc, 0xc4, 0xcc, 0x64, 0x72, + 0xf4, 0x93, 0xd8, 0x48, 0x5a, 0xff, 0x41, 0x1d, 0x77, 0x45, 0x8c, 0xcd, 0x4c, 0x62, 0x7c, 0xec, + 0xc0, 0x38, 0x36, 0x30, 0x6e, 0x7b, 0x2e, 0xfa, 0xed, 0xc7, 0xfe, 0x3c, 0x05, 0x00, 0x00, 0xff, + 0xff, 0x44, 0xc8, 0x4a, 0x76, 0x2f, 0x02, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/callout_placeholder_field.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/callout_placeholder_field.pb.go new file mode 100644 index 0000000..dda3bd9 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/callout_placeholder_field.pb.go @@ -0,0 +1,114 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/enums/callout_placeholder_field.proto + +package enums // import "google.golang.org/genproto/googleapis/ads/googleads/v1/enums" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Possible values for Callout placeholder fields. +type CalloutPlaceholderFieldEnum_CalloutPlaceholderField int32 + +const ( + // Not specified. + CalloutPlaceholderFieldEnum_UNSPECIFIED CalloutPlaceholderFieldEnum_CalloutPlaceholderField = 0 + // Used for return value only. Represents value unknown in this version. + CalloutPlaceholderFieldEnum_UNKNOWN CalloutPlaceholderFieldEnum_CalloutPlaceholderField = 1 + // Data Type: STRING. Callout text. + CalloutPlaceholderFieldEnum_CALLOUT_TEXT CalloutPlaceholderFieldEnum_CalloutPlaceholderField = 2 +) + +var CalloutPlaceholderFieldEnum_CalloutPlaceholderField_name = map[int32]string{ + 0: "UNSPECIFIED", + 1: "UNKNOWN", + 2: "CALLOUT_TEXT", +} +var CalloutPlaceholderFieldEnum_CalloutPlaceholderField_value = map[string]int32{ + "UNSPECIFIED": 0, + "UNKNOWN": 1, + "CALLOUT_TEXT": 2, +} + +func (x CalloutPlaceholderFieldEnum_CalloutPlaceholderField) String() string { + return proto.EnumName(CalloutPlaceholderFieldEnum_CalloutPlaceholderField_name, int32(x)) +} +func (CalloutPlaceholderFieldEnum_CalloutPlaceholderField) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_callout_placeholder_field_16aa589fff00a281, []int{0, 0} +} + +// Values for Callout placeholder fields. +type CalloutPlaceholderFieldEnum struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *CalloutPlaceholderFieldEnum) Reset() { *m = CalloutPlaceholderFieldEnum{} } +func (m *CalloutPlaceholderFieldEnum) String() string { return proto.CompactTextString(m) } +func (*CalloutPlaceholderFieldEnum) ProtoMessage() {} +func (*CalloutPlaceholderFieldEnum) Descriptor() ([]byte, []int) { + return fileDescriptor_callout_placeholder_field_16aa589fff00a281, []int{0} +} +func (m *CalloutPlaceholderFieldEnum) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_CalloutPlaceholderFieldEnum.Unmarshal(m, b) +} +func (m *CalloutPlaceholderFieldEnum) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_CalloutPlaceholderFieldEnum.Marshal(b, m, deterministic) +} +func (dst *CalloutPlaceholderFieldEnum) XXX_Merge(src proto.Message) { + xxx_messageInfo_CalloutPlaceholderFieldEnum.Merge(dst, src) +} +func (m *CalloutPlaceholderFieldEnum) XXX_Size() int { + return xxx_messageInfo_CalloutPlaceholderFieldEnum.Size(m) +} +func (m *CalloutPlaceholderFieldEnum) XXX_DiscardUnknown() { + xxx_messageInfo_CalloutPlaceholderFieldEnum.DiscardUnknown(m) +} + +var xxx_messageInfo_CalloutPlaceholderFieldEnum proto.InternalMessageInfo + +func init() { + proto.RegisterType((*CalloutPlaceholderFieldEnum)(nil), "google.ads.googleads.v1.enums.CalloutPlaceholderFieldEnum") + proto.RegisterEnum("google.ads.googleads.v1.enums.CalloutPlaceholderFieldEnum_CalloutPlaceholderField", CalloutPlaceholderFieldEnum_CalloutPlaceholderField_name, CalloutPlaceholderFieldEnum_CalloutPlaceholderField_value) +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/enums/callout_placeholder_field.proto", fileDescriptor_callout_placeholder_field_16aa589fff00a281) +} + +var fileDescriptor_callout_placeholder_field_16aa589fff00a281 = []byte{ + // 310 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x7c, 0x90, 0xd1, 0x4a, 0xfb, 0x30, + 0x18, 0xc5, 0xff, 0xeb, 0x1f, 0x14, 0x32, 0xc1, 0xd2, 0x1b, 0x41, 0xb7, 0x8b, 0xed, 0x01, 0x12, + 0x8a, 0x77, 0x11, 0x2f, 0xb2, 0xd9, 0x8d, 0xe1, 0xe8, 0x0a, 0x6e, 0x53, 0xa4, 0x30, 0xe2, 0x12, + 0xb3, 0x42, 0x96, 0x94, 0xa5, 0xdd, 0x03, 0x79, 0xe9, 0xa3, 0xf8, 0x28, 0xde, 0xf9, 0x06, 0xd2, + 0xc4, 0xd6, 0xab, 0x7a, 0x53, 0x0e, 0x3d, 0xdf, 0xf9, 0xe5, 0x7c, 0x1f, 0xb8, 0x15, 0x5a, 0x0b, + 0xc9, 0x11, 0x65, 0x06, 0x39, 0x59, 0xa9, 0x63, 0x88, 0xb8, 0x2a, 0xf7, 0x06, 0x6d, 0xa9, 0x94, + 0xba, 0x2c, 0x36, 0xb9, 0xa4, 0x5b, 0xbe, 0xd3, 0x92, 0xf1, 0xc3, 0xe6, 0x35, 0xe3, 0x92, 0xc1, + 0xfc, 0xa0, 0x0b, 0x1d, 0xf4, 0x5d, 0x06, 0x52, 0x66, 0x60, 0x13, 0x87, 0xc7, 0x10, 0xda, 0xf8, + 0x65, 0xaf, 0xa6, 0xe7, 0x19, 0xa2, 0x4a, 0xe9, 0x82, 0x16, 0x99, 0x56, 0xc6, 0x85, 0x87, 0x3b, + 0x70, 0x35, 0x76, 0xfc, 0xe4, 0x17, 0x3f, 0xa9, 0xe8, 0x91, 0x2a, 0xf7, 0xc3, 0x19, 0xb8, 0x68, + 0xb1, 0x83, 0x73, 0xd0, 0x5d, 0xc5, 0x0f, 0x49, 0x34, 0x9e, 0x4d, 0x66, 0xd1, 0x9d, 0xff, 0x2f, + 0xe8, 0x82, 0xd3, 0x55, 0x7c, 0x1f, 0x2f, 0x1e, 0x63, 0xbf, 0x13, 0xf8, 0xe0, 0x6c, 0x4c, 0xe6, + 0xf3, 0xc5, 0x6a, 0xb9, 0x59, 0x46, 0x4f, 0x4b, 0xdf, 0x1b, 0x7d, 0x75, 0xc0, 0x60, 0xab, 0xf7, + 0xf0, 0xcf, 0xb6, 0xa3, 0x5e, 0xcb, 0x73, 0x49, 0xd5, 0x36, 0xe9, 0x3c, 0x8f, 0x7e, 0xe2, 0x42, + 0x4b, 0xaa, 0x04, 0xd4, 0x07, 0x81, 0x04, 0x57, 0x76, 0x97, 0xfa, 0x76, 0x79, 0x66, 0x5a, 0x4e, + 0x79, 0x63, 0xbf, 0x6f, 0xde, 0xff, 0x29, 0x21, 0xef, 0x5e, 0x7f, 0xea, 0x50, 0x84, 0x19, 0xe8, + 0x64, 0xa5, 0xd6, 0x21, 0xac, 0x36, 0x37, 0x1f, 0xb5, 0x9f, 0x12, 0x66, 0xd2, 0xc6, 0x4f, 0xd7, + 0x61, 0x6a, 0xfd, 0x4f, 0x6f, 0xe0, 0x7e, 0x62, 0x4c, 0x98, 0xc1, 0xb8, 0x99, 0xc0, 0x78, 0x1d, + 0x62, 0x6c, 0x67, 0x5e, 0x4e, 0x6c, 0xb1, 0xeb, 0xef, 0x00, 0x00, 0x00, 0xff, 0xff, 0xd7, 0x0a, + 0xbf, 0xd3, 0xe2, 0x01, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/campaign_experiment_type.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/campaign_experiment_type.pb.go new file mode 100644 index 0000000..2bf98e9 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/campaign_experiment_type.pb.go @@ -0,0 +1,129 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/enums/campaign_experiment_type.proto + +package enums // import "google.golang.org/genproto/googleapis/ads/googleads/v1/enums" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Indicates if this campaign is a normal campaign, +// a draft campaign, or an experiment campaign. +type CampaignExperimentTypeEnum_CampaignExperimentType int32 + +const ( + // Not specified. + CampaignExperimentTypeEnum_UNSPECIFIED CampaignExperimentTypeEnum_CampaignExperimentType = 0 + // Used for return value only. Represents value unknown in this version. + CampaignExperimentTypeEnum_UNKNOWN CampaignExperimentTypeEnum_CampaignExperimentType = 1 + // This is a regular campaign. + CampaignExperimentTypeEnum_BASE CampaignExperimentTypeEnum_CampaignExperimentType = 2 + // This is a draft version of a campaign. + // It has some modifications from a base campaign, + // but it does not serve or accrue metrics. + CampaignExperimentTypeEnum_DRAFT CampaignExperimentTypeEnum_CampaignExperimentType = 3 + // This is an experiment version of a campaign. + // It has some modifications from a base campaign, + // and a percentage of traffic is being diverted + // from the BASE campaign to this experiment campaign. + CampaignExperimentTypeEnum_EXPERIMENT CampaignExperimentTypeEnum_CampaignExperimentType = 4 +) + +var CampaignExperimentTypeEnum_CampaignExperimentType_name = map[int32]string{ + 0: "UNSPECIFIED", + 1: "UNKNOWN", + 2: "BASE", + 3: "DRAFT", + 4: "EXPERIMENT", +} +var CampaignExperimentTypeEnum_CampaignExperimentType_value = map[string]int32{ + "UNSPECIFIED": 0, + "UNKNOWN": 1, + "BASE": 2, + "DRAFT": 3, + "EXPERIMENT": 4, +} + +func (x CampaignExperimentTypeEnum_CampaignExperimentType) String() string { + return proto.EnumName(CampaignExperimentTypeEnum_CampaignExperimentType_name, int32(x)) +} +func (CampaignExperimentTypeEnum_CampaignExperimentType) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_campaign_experiment_type_1f7d6a53994657ce, []int{0, 0} +} + +// Container for enum describing campaign experiment type. +type CampaignExperimentTypeEnum struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *CampaignExperimentTypeEnum) Reset() { *m = CampaignExperimentTypeEnum{} } +func (m *CampaignExperimentTypeEnum) String() string { return proto.CompactTextString(m) } +func (*CampaignExperimentTypeEnum) ProtoMessage() {} +func (*CampaignExperimentTypeEnum) Descriptor() ([]byte, []int) { + return fileDescriptor_campaign_experiment_type_1f7d6a53994657ce, []int{0} +} +func (m *CampaignExperimentTypeEnum) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_CampaignExperimentTypeEnum.Unmarshal(m, b) +} +func (m *CampaignExperimentTypeEnum) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_CampaignExperimentTypeEnum.Marshal(b, m, deterministic) +} +func (dst *CampaignExperimentTypeEnum) XXX_Merge(src proto.Message) { + xxx_messageInfo_CampaignExperimentTypeEnum.Merge(dst, src) +} +func (m *CampaignExperimentTypeEnum) XXX_Size() int { + return xxx_messageInfo_CampaignExperimentTypeEnum.Size(m) +} +func (m *CampaignExperimentTypeEnum) XXX_DiscardUnknown() { + xxx_messageInfo_CampaignExperimentTypeEnum.DiscardUnknown(m) +} + +var xxx_messageInfo_CampaignExperimentTypeEnum proto.InternalMessageInfo + +func init() { + proto.RegisterType((*CampaignExperimentTypeEnum)(nil), "google.ads.googleads.v1.enums.CampaignExperimentTypeEnum") + proto.RegisterEnum("google.ads.googleads.v1.enums.CampaignExperimentTypeEnum_CampaignExperimentType", CampaignExperimentTypeEnum_CampaignExperimentType_name, CampaignExperimentTypeEnum_CampaignExperimentType_value) +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/enums/campaign_experiment_type.proto", fileDescriptor_campaign_experiment_type_1f7d6a53994657ce) +} + +var fileDescriptor_campaign_experiment_type_1f7d6a53994657ce = []byte{ + // 326 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x7c, 0x90, 0x4b, 0x4e, 0xf3, 0x30, + 0x1c, 0xc4, 0xbf, 0xa6, 0xfd, 0x78, 0xb8, 0x12, 0x44, 0x59, 0xb0, 0x28, 0x74, 0xd1, 0x1e, 0xc0, + 0x51, 0xc4, 0xce, 0xb0, 0x71, 0x5a, 0xb7, 0xaa, 0x10, 0x21, 0xea, 0x0b, 0x04, 0x91, 0x2a, 0xd3, + 0x58, 0x56, 0xa4, 0xc6, 0xb6, 0xea, 0xb4, 0xa2, 0xd7, 0x61, 0xc9, 0x51, 0x38, 0x0a, 0x2b, 0x8e, + 0x80, 0x12, 0x37, 0x59, 0x15, 0x36, 0xd1, 0x28, 0xf3, 0xff, 0x8d, 0xc6, 0x03, 0x6e, 0xb9, 0x94, + 0x7c, 0xc5, 0x5c, 0x1a, 0x6b, 0xd7, 0xc8, 0x5c, 0x6d, 0x3d, 0x97, 0x89, 0x4d, 0xaa, 0xdd, 0x25, + 0x4d, 0x15, 0x4d, 0xb8, 0x58, 0xb0, 0x37, 0xc5, 0xd6, 0x49, 0xca, 0x44, 0xb6, 0xc8, 0x76, 0x8a, + 0x41, 0xb5, 0x96, 0x99, 0x74, 0xda, 0x06, 0x81, 0x34, 0xd6, 0xb0, 0xa2, 0xe1, 0xd6, 0x83, 0x05, + 0xdd, 0xba, 0x2a, 0xc3, 0x55, 0xe2, 0x52, 0x21, 0x64, 0x46, 0xb3, 0x44, 0x0a, 0x6d, 0xe0, 0xee, + 0x0e, 0xb4, 0x7a, 0xfb, 0x78, 0x52, 0xa5, 0x4f, 0x77, 0x8a, 0x11, 0xb1, 0x49, 0xbb, 0x2f, 0xe0, + 0xe2, 0xb0, 0xeb, 0x9c, 0x83, 0xe6, 0x2c, 0x98, 0x84, 0xa4, 0x37, 0x1a, 0x8c, 0x48, 0xdf, 0xfe, + 0xe7, 0x34, 0xc1, 0xf1, 0x2c, 0xb8, 0x0b, 0x1e, 0x1e, 0x03, 0xbb, 0xe6, 0x9c, 0x80, 0x86, 0x8f, + 0x27, 0xc4, 0xb6, 0x9c, 0x53, 0xf0, 0xbf, 0x3f, 0xc6, 0x83, 0xa9, 0x5d, 0x77, 0xce, 0x00, 0x20, + 0x4f, 0x21, 0x19, 0x8f, 0xee, 0x49, 0x30, 0xb5, 0x1b, 0xfe, 0x77, 0x0d, 0x74, 0x96, 0x32, 0x85, + 0x7f, 0xd6, 0xf7, 0x2f, 0x0f, 0x17, 0x08, 0xf3, 0xf6, 0x61, 0xed, 0xd9, 0xdf, 0xd3, 0x5c, 0xae, + 0xa8, 0xe0, 0x50, 0xae, 0xb9, 0xcb, 0x99, 0x28, 0xde, 0x56, 0x4e, 0xa9, 0x12, 0xfd, 0xcb, 0xb2, + 0x37, 0xc5, 0xf7, 0xdd, 0xaa, 0x0f, 0x31, 0xfe, 0xb0, 0xda, 0x43, 0x13, 0x85, 0x63, 0x0d, 0x8d, + 0xcc, 0xd5, 0xdc, 0x83, 0xf9, 0x14, 0xfa, 0xb3, 0xf4, 0x23, 0x1c, 0xeb, 0xa8, 0xf2, 0xa3, 0xb9, + 0x17, 0x15, 0xfe, 0x97, 0xd5, 0x31, 0x3f, 0x11, 0xc2, 0xb1, 0x46, 0xa8, 0xba, 0x40, 0x68, 0xee, + 0x21, 0x54, 0xdc, 0xbc, 0x1e, 0x15, 0xc5, 0xae, 0x7f, 0x02, 0x00, 0x00, 0xff, 0xff, 0x50, 0x48, + 0x21, 0xf1, 0xf1, 0x01, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/campaign_serving_status.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/campaign_serving_status.pb.go new file mode 100644 index 0000000..64e8c95 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/campaign_serving_status.pb.go @@ -0,0 +1,133 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/enums/campaign_serving_status.proto + +package enums // import "google.golang.org/genproto/googleapis/ads/googleads/v1/enums" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Possible serving statuses of a campaign. +type CampaignServingStatusEnum_CampaignServingStatus int32 + +const ( + // No value has been specified. + CampaignServingStatusEnum_UNSPECIFIED CampaignServingStatusEnum_CampaignServingStatus = 0 + // The received value is not known in this version. + // + // This is a response-only value. + CampaignServingStatusEnum_UNKNOWN CampaignServingStatusEnum_CampaignServingStatus = 1 + // Serving. + CampaignServingStatusEnum_SERVING CampaignServingStatusEnum_CampaignServingStatus = 2 + // None. + CampaignServingStatusEnum_NONE CampaignServingStatusEnum_CampaignServingStatus = 3 + // Ended. + CampaignServingStatusEnum_ENDED CampaignServingStatusEnum_CampaignServingStatus = 4 + // Pending. + CampaignServingStatusEnum_PENDING CampaignServingStatusEnum_CampaignServingStatus = 5 + // Suspended. + CampaignServingStatusEnum_SUSPENDED CampaignServingStatusEnum_CampaignServingStatus = 6 +) + +var CampaignServingStatusEnum_CampaignServingStatus_name = map[int32]string{ + 0: "UNSPECIFIED", + 1: "UNKNOWN", + 2: "SERVING", + 3: "NONE", + 4: "ENDED", + 5: "PENDING", + 6: "SUSPENDED", +} +var CampaignServingStatusEnum_CampaignServingStatus_value = map[string]int32{ + "UNSPECIFIED": 0, + "UNKNOWN": 1, + "SERVING": 2, + "NONE": 3, + "ENDED": 4, + "PENDING": 5, + "SUSPENDED": 6, +} + +func (x CampaignServingStatusEnum_CampaignServingStatus) String() string { + return proto.EnumName(CampaignServingStatusEnum_CampaignServingStatus_name, int32(x)) +} +func (CampaignServingStatusEnum_CampaignServingStatus) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_campaign_serving_status_afe2dc785917db06, []int{0, 0} +} + +// Message describing Campaign serving statuses. +type CampaignServingStatusEnum struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *CampaignServingStatusEnum) Reset() { *m = CampaignServingStatusEnum{} } +func (m *CampaignServingStatusEnum) String() string { return proto.CompactTextString(m) } +func (*CampaignServingStatusEnum) ProtoMessage() {} +func (*CampaignServingStatusEnum) Descriptor() ([]byte, []int) { + return fileDescriptor_campaign_serving_status_afe2dc785917db06, []int{0} +} +func (m *CampaignServingStatusEnum) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_CampaignServingStatusEnum.Unmarshal(m, b) +} +func (m *CampaignServingStatusEnum) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_CampaignServingStatusEnum.Marshal(b, m, deterministic) +} +func (dst *CampaignServingStatusEnum) XXX_Merge(src proto.Message) { + xxx_messageInfo_CampaignServingStatusEnum.Merge(dst, src) +} +func (m *CampaignServingStatusEnum) XXX_Size() int { + return xxx_messageInfo_CampaignServingStatusEnum.Size(m) +} +func (m *CampaignServingStatusEnum) XXX_DiscardUnknown() { + xxx_messageInfo_CampaignServingStatusEnum.DiscardUnknown(m) +} + +var xxx_messageInfo_CampaignServingStatusEnum proto.InternalMessageInfo + +func init() { + proto.RegisterType((*CampaignServingStatusEnum)(nil), "google.ads.googleads.v1.enums.CampaignServingStatusEnum") + proto.RegisterEnum("google.ads.googleads.v1.enums.CampaignServingStatusEnum_CampaignServingStatus", CampaignServingStatusEnum_CampaignServingStatus_name, CampaignServingStatusEnum_CampaignServingStatus_value) +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/enums/campaign_serving_status.proto", fileDescriptor_campaign_serving_status_afe2dc785917db06) +} + +var fileDescriptor_campaign_serving_status_afe2dc785917db06 = []byte{ + // 332 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x7c, 0x50, 0xcf, 0x4a, 0xc3, 0x30, + 0x1c, 0xb6, 0xdd, 0x1f, 0x5d, 0x86, 0x58, 0x0a, 0x1e, 0x1c, 0xee, 0xb0, 0x3d, 0x40, 0x4a, 0xf1, + 0x96, 0x9d, 0xba, 0x35, 0x8e, 0x21, 0x64, 0xc5, 0xb2, 0x0a, 0x52, 0x18, 0x71, 0x2d, 0xa1, 0xb0, + 0x26, 0x65, 0xe9, 0xf6, 0x1c, 0x3e, 0x83, 0x47, 0x1f, 0xc5, 0x47, 0xf1, 0xe2, 0x2b, 0x48, 0x92, + 0xad, 0xa7, 0xe9, 0xa5, 0x7c, 0xcd, 0xf7, 0x87, 0xdf, 0xf7, 0x81, 0x09, 0x13, 0x82, 0x6d, 0x73, + 0x8f, 0x66, 0xd2, 0x33, 0x50, 0xa1, 0x83, 0xef, 0xe5, 0x7c, 0x5f, 0x4a, 0x6f, 0x43, 0xcb, 0x8a, + 0x16, 0x8c, 0xaf, 0x65, 0xbe, 0x3b, 0x14, 0x9c, 0xad, 0x65, 0x4d, 0xeb, 0xbd, 0x84, 0xd5, 0x4e, + 0xd4, 0xc2, 0x1d, 0x1a, 0x07, 0xa4, 0x99, 0x84, 0x8d, 0x19, 0x1e, 0x7c, 0xa8, 0xcd, 0x83, 0xfb, + 0x53, 0x76, 0x55, 0x78, 0x94, 0x73, 0x51, 0xd3, 0xba, 0x10, 0xfc, 0x68, 0x1e, 0xbf, 0x5b, 0xe0, + 0x6e, 0x76, 0x8c, 0x8f, 0x4d, 0x7a, 0xac, 0xc3, 0x31, 0xdf, 0x97, 0x63, 0x09, 0x6e, 0xcf, 0x92, + 0xee, 0x0d, 0xe8, 0xaf, 0x48, 0x1c, 0xe1, 0xd9, 0xe2, 0x71, 0x81, 0x43, 0xe7, 0xc2, 0xed, 0x83, + 0xcb, 0x15, 0x79, 0x22, 0xcb, 0x17, 0xe2, 0x58, 0xea, 0x27, 0xc6, 0xcf, 0xc9, 0x82, 0xcc, 0x1d, + 0xdb, 0xbd, 0x02, 0x6d, 0xb2, 0x24, 0xd8, 0x69, 0xb9, 0x3d, 0xd0, 0xc1, 0x24, 0xc4, 0xa1, 0xd3, + 0x56, 0x8a, 0x08, 0x93, 0x50, 0x29, 0x3a, 0xee, 0x35, 0xe8, 0xc5, 0xab, 0x38, 0x32, 0x5c, 0x77, + 0xfa, 0x63, 0x81, 0xd1, 0x46, 0x94, 0xf0, 0xdf, 0x5a, 0xd3, 0xc1, 0xd9, 0xc3, 0x22, 0x55, 0x2a, + 0xb2, 0x5e, 0xa7, 0x47, 0x33, 0x13, 0x5b, 0xca, 0x19, 0x14, 0x3b, 0xe6, 0xb1, 0x9c, 0xeb, 0xca, + 0xa7, 0x81, 0xab, 0x42, 0xfe, 0xb1, 0xf7, 0x44, 0x7f, 0x3f, 0xec, 0xd6, 0x3c, 0x08, 0x3e, 0xed, + 0xe1, 0xdc, 0x44, 0x05, 0x99, 0x84, 0x06, 0x2a, 0x94, 0xf8, 0x50, 0x2d, 0x24, 0xbf, 0x4e, 0x7c, + 0x1a, 0x64, 0x32, 0x6d, 0xf8, 0x34, 0xf1, 0x53, 0xcd, 0x7f, 0xdb, 0x23, 0xf3, 0x88, 0x50, 0x90, + 0x49, 0x84, 0x1a, 0x05, 0x42, 0x89, 0x8f, 0x90, 0xd6, 0xbc, 0x75, 0xf5, 0x61, 0x0f, 0xbf, 0x01, + 0x00, 0x00, 0xff, 0xff, 0x98, 0x3d, 0x25, 0x2d, 0x07, 0x02, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/campaign_shared_set_status.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/campaign_shared_set_status.pb.go new file mode 100644 index 0000000..7fea400 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/campaign_shared_set_status.pb.go @@ -0,0 +1,118 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/enums/campaign_shared_set_status.proto + +package enums // import "google.golang.org/genproto/googleapis/ads/googleads/v1/enums" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Enum listing the possible campaign shared set statuses. +type CampaignSharedSetStatusEnum_CampaignSharedSetStatus int32 + +const ( + // Not specified. + CampaignSharedSetStatusEnum_UNSPECIFIED CampaignSharedSetStatusEnum_CampaignSharedSetStatus = 0 + // Used for return value only. Represents value unknown in this version. + CampaignSharedSetStatusEnum_UNKNOWN CampaignSharedSetStatusEnum_CampaignSharedSetStatus = 1 + // The campaign shared set is enabled. + CampaignSharedSetStatusEnum_ENABLED CampaignSharedSetStatusEnum_CampaignSharedSetStatus = 2 + // The campaign shared set is removed and can no longer be used. + CampaignSharedSetStatusEnum_REMOVED CampaignSharedSetStatusEnum_CampaignSharedSetStatus = 3 +) + +var CampaignSharedSetStatusEnum_CampaignSharedSetStatus_name = map[int32]string{ + 0: "UNSPECIFIED", + 1: "UNKNOWN", + 2: "ENABLED", + 3: "REMOVED", +} +var CampaignSharedSetStatusEnum_CampaignSharedSetStatus_value = map[string]int32{ + "UNSPECIFIED": 0, + "UNKNOWN": 1, + "ENABLED": 2, + "REMOVED": 3, +} + +func (x CampaignSharedSetStatusEnum_CampaignSharedSetStatus) String() string { + return proto.EnumName(CampaignSharedSetStatusEnum_CampaignSharedSetStatus_name, int32(x)) +} +func (CampaignSharedSetStatusEnum_CampaignSharedSetStatus) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_campaign_shared_set_status_88d95ddd26d28aba, []int{0, 0} +} + +// Container for enum describing types of campaign shared set statuses. +type CampaignSharedSetStatusEnum struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *CampaignSharedSetStatusEnum) Reset() { *m = CampaignSharedSetStatusEnum{} } +func (m *CampaignSharedSetStatusEnum) String() string { return proto.CompactTextString(m) } +func (*CampaignSharedSetStatusEnum) ProtoMessage() {} +func (*CampaignSharedSetStatusEnum) Descriptor() ([]byte, []int) { + return fileDescriptor_campaign_shared_set_status_88d95ddd26d28aba, []int{0} +} +func (m *CampaignSharedSetStatusEnum) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_CampaignSharedSetStatusEnum.Unmarshal(m, b) +} +func (m *CampaignSharedSetStatusEnum) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_CampaignSharedSetStatusEnum.Marshal(b, m, deterministic) +} +func (dst *CampaignSharedSetStatusEnum) XXX_Merge(src proto.Message) { + xxx_messageInfo_CampaignSharedSetStatusEnum.Merge(dst, src) +} +func (m *CampaignSharedSetStatusEnum) XXX_Size() int { + return xxx_messageInfo_CampaignSharedSetStatusEnum.Size(m) +} +func (m *CampaignSharedSetStatusEnum) XXX_DiscardUnknown() { + xxx_messageInfo_CampaignSharedSetStatusEnum.DiscardUnknown(m) +} + +var xxx_messageInfo_CampaignSharedSetStatusEnum proto.InternalMessageInfo + +func init() { + proto.RegisterType((*CampaignSharedSetStatusEnum)(nil), "google.ads.googleads.v1.enums.CampaignSharedSetStatusEnum") + proto.RegisterEnum("google.ads.googleads.v1.enums.CampaignSharedSetStatusEnum_CampaignSharedSetStatus", CampaignSharedSetStatusEnum_CampaignSharedSetStatus_name, CampaignSharedSetStatusEnum_CampaignSharedSetStatus_value) +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/enums/campaign_shared_set_status.proto", fileDescriptor_campaign_shared_set_status_88d95ddd26d28aba) +} + +var fileDescriptor_campaign_shared_set_status_88d95ddd26d28aba = []byte{ + // 313 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x7c, 0x50, 0xd1, 0x4a, 0xf3, 0x30, + 0x18, 0xfd, 0xd7, 0xc1, 0x2f, 0x74, 0x17, 0x96, 0xdd, 0x08, 0xba, 0x5d, 0x6c, 0x0f, 0x90, 0x50, + 0xbc, 0x8b, 0x20, 0xa4, 0x5b, 0x1c, 0x43, 0xed, 0xa6, 0x65, 0x15, 0xa4, 0x30, 0xe2, 0x12, 0x62, + 0x61, 0x4d, 0xca, 0xbe, 0x6c, 0x0f, 0xe4, 0xa5, 0x8f, 0xe2, 0xa3, 0x78, 0xe7, 0x1b, 0x48, 0xd2, + 0x6d, 0x77, 0xf3, 0xa6, 0x9c, 0xaf, 0xe7, 0x3b, 0xe7, 0x3b, 0x39, 0xe1, 0xad, 0x32, 0x46, 0xad, + 0x25, 0xe6, 0x02, 0x70, 0x03, 0x1d, 0xda, 0xc5, 0x58, 0xea, 0x6d, 0x05, 0x78, 0xc5, 0xab, 0x9a, + 0x97, 0x4a, 0x2f, 0xe1, 0x9d, 0x6f, 0xa4, 0x58, 0x82, 0xb4, 0x4b, 0xb0, 0xdc, 0x6e, 0x01, 0xd5, + 0x1b, 0x63, 0x4d, 0xb7, 0xdf, 0x88, 0x10, 0x17, 0x80, 0x8e, 0x7a, 0xb4, 0x8b, 0x91, 0xd7, 0x5f, + 0xf6, 0x0e, 0xf6, 0x75, 0x89, 0xb9, 0xd6, 0xc6, 0x72, 0x5b, 0x1a, 0xbd, 0x17, 0x0f, 0xeb, 0xf0, + 0x6a, 0xb4, 0x3f, 0x90, 0x79, 0xff, 0x4c, 0xda, 0xcc, 0xbb, 0x33, 0xbd, 0xad, 0x86, 0x4f, 0xe1, + 0xc5, 0x09, 0xba, 0x7b, 0x1e, 0x76, 0x16, 0x69, 0x36, 0x67, 0xa3, 0xe9, 0xdd, 0x94, 0x8d, 0xa3, + 0x7f, 0xdd, 0x4e, 0x78, 0xb6, 0x48, 0xef, 0xd3, 0xd9, 0x4b, 0x1a, 0xb5, 0xdc, 0xc0, 0x52, 0x9a, + 0x3c, 0xb0, 0x71, 0x14, 0xb8, 0xe1, 0x99, 0x3d, 0xce, 0x72, 0x36, 0x8e, 0xda, 0xc9, 0x4f, 0x2b, + 0x1c, 0xac, 0x4c, 0x85, 0xfe, 0x4c, 0x9d, 0xf4, 0x4e, 0x9c, 0x9d, 0xbb, 0xd4, 0xf3, 0xd6, 0x6b, + 0xb2, 0x97, 0x2b, 0xb3, 0xe6, 0x5a, 0x21, 0xb3, 0x51, 0x58, 0x49, 0xed, 0xdf, 0x74, 0x28, 0xb1, + 0x2e, 0xe1, 0x44, 0xa7, 0x37, 0xfe, 0xfb, 0x11, 0xb4, 0x27, 0x94, 0x7e, 0x06, 0xfd, 0x49, 0x63, + 0x45, 0x05, 0xa0, 0x06, 0x3a, 0x94, 0xc7, 0xc8, 0x35, 0x00, 0x5f, 0x07, 0xbe, 0xa0, 0x02, 0x8a, + 0x23, 0x5f, 0xe4, 0x71, 0xe1, 0xf9, 0xef, 0x60, 0xd0, 0xfc, 0x24, 0x84, 0x0a, 0x20, 0xe4, 0xb8, + 0x41, 0x48, 0x1e, 0x13, 0xe2, 0x77, 0xde, 0xfe, 0xfb, 0x60, 0xd7, 0xbf, 0x01, 0x00, 0x00, 0xff, + 0xff, 0xfa, 0x2b, 0xea, 0x0c, 0xeb, 0x01, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/campaign_status.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/campaign_status.pb.go new file mode 100644 index 0000000..816da9e --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/campaign_status.pb.go @@ -0,0 +1,122 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/enums/campaign_status.proto + +package enums // import "google.golang.org/genproto/googleapis/ads/googleads/v1/enums" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Possible statuses of a campaign. +type CampaignStatusEnum_CampaignStatus int32 + +const ( + // Not specified. + CampaignStatusEnum_UNSPECIFIED CampaignStatusEnum_CampaignStatus = 0 + // Used for return value only. Represents value unknown in this version. + CampaignStatusEnum_UNKNOWN CampaignStatusEnum_CampaignStatus = 1 + // Campaign is currently serving ads depending on budget information. + CampaignStatusEnum_ENABLED CampaignStatusEnum_CampaignStatus = 2 + // Campaign has been paused by the user. + CampaignStatusEnum_PAUSED CampaignStatusEnum_CampaignStatus = 3 + // Campaign has been removed. + CampaignStatusEnum_REMOVED CampaignStatusEnum_CampaignStatus = 4 +) + +var CampaignStatusEnum_CampaignStatus_name = map[int32]string{ + 0: "UNSPECIFIED", + 1: "UNKNOWN", + 2: "ENABLED", + 3: "PAUSED", + 4: "REMOVED", +} +var CampaignStatusEnum_CampaignStatus_value = map[string]int32{ + "UNSPECIFIED": 0, + "UNKNOWN": 1, + "ENABLED": 2, + "PAUSED": 3, + "REMOVED": 4, +} + +func (x CampaignStatusEnum_CampaignStatus) String() string { + return proto.EnumName(CampaignStatusEnum_CampaignStatus_name, int32(x)) +} +func (CampaignStatusEnum_CampaignStatus) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_campaign_status_fffe5ae9c510246e, []int{0, 0} +} + +// Container for enum describing possible statuses of a campaign. +type CampaignStatusEnum struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *CampaignStatusEnum) Reset() { *m = CampaignStatusEnum{} } +func (m *CampaignStatusEnum) String() string { return proto.CompactTextString(m) } +func (*CampaignStatusEnum) ProtoMessage() {} +func (*CampaignStatusEnum) Descriptor() ([]byte, []int) { + return fileDescriptor_campaign_status_fffe5ae9c510246e, []int{0} +} +func (m *CampaignStatusEnum) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_CampaignStatusEnum.Unmarshal(m, b) +} +func (m *CampaignStatusEnum) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_CampaignStatusEnum.Marshal(b, m, deterministic) +} +func (dst *CampaignStatusEnum) XXX_Merge(src proto.Message) { + xxx_messageInfo_CampaignStatusEnum.Merge(dst, src) +} +func (m *CampaignStatusEnum) XXX_Size() int { + return xxx_messageInfo_CampaignStatusEnum.Size(m) +} +func (m *CampaignStatusEnum) XXX_DiscardUnknown() { + xxx_messageInfo_CampaignStatusEnum.DiscardUnknown(m) +} + +var xxx_messageInfo_CampaignStatusEnum proto.InternalMessageInfo + +func init() { + proto.RegisterType((*CampaignStatusEnum)(nil), "google.ads.googleads.v1.enums.CampaignStatusEnum") + proto.RegisterEnum("google.ads.googleads.v1.enums.CampaignStatusEnum_CampaignStatus", CampaignStatusEnum_CampaignStatus_name, CampaignStatusEnum_CampaignStatus_value) +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/enums/campaign_status.proto", fileDescriptor_campaign_status_fffe5ae9c510246e) +} + +var fileDescriptor_campaign_status_fffe5ae9c510246e = []byte{ + // 305 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x7c, 0x50, 0xdd, 0x4a, 0xc3, 0x30, + 0x14, 0x76, 0x9d, 0x4c, 0xc8, 0x40, 0x4b, 0xbd, 0x13, 0x77, 0xb1, 0x3d, 0x40, 0x42, 0xd9, 0x5d, + 0xbc, 0x4a, 0xd7, 0x38, 0x86, 0xda, 0x15, 0x67, 0x2b, 0x48, 0x41, 0xe2, 0x5a, 0x42, 0x65, 0x4d, + 0xca, 0xd2, 0xee, 0x81, 0xbc, 0xf4, 0x51, 0x7c, 0x12, 0xf1, 0x29, 0x24, 0xe9, 0x0f, 0xec, 0x42, + 0x6f, 0xca, 0xd7, 0xf3, 0xfd, 0xe4, 0x3b, 0x07, 0xcc, 0xb9, 0x94, 0x7c, 0x97, 0x21, 0x96, 0x2a, + 0xd4, 0x40, 0x8d, 0x0e, 0x2e, 0xca, 0x44, 0x5d, 0x28, 0xb4, 0x65, 0x45, 0xc9, 0x72, 0x2e, 0x5e, + 0x55, 0xc5, 0xaa, 0x5a, 0xc1, 0x72, 0x2f, 0x2b, 0xe9, 0x4c, 0x1a, 0x25, 0x64, 0xa9, 0x82, 0xbd, + 0x09, 0x1e, 0x5c, 0x68, 0x4c, 0x57, 0xd7, 0x5d, 0x66, 0x99, 0x23, 0x26, 0x84, 0xac, 0x58, 0x95, + 0x4b, 0xd1, 0x9a, 0x67, 0xef, 0xc0, 0x59, 0xb4, 0xa9, 0x1b, 0x13, 0x4a, 0x45, 0x5d, 0xcc, 0x9e, + 0xc0, 0xf9, 0xf1, 0xd4, 0xb9, 0x00, 0xe3, 0x28, 0xd8, 0x84, 0x74, 0xb1, 0xba, 0x5d, 0x51, 0xdf, + 0x3e, 0x71, 0xc6, 0xe0, 0x2c, 0x0a, 0xee, 0x82, 0xf5, 0x73, 0x60, 0x0f, 0xf4, 0x0f, 0x0d, 0x88, + 0x77, 0x4f, 0x7d, 0xdb, 0x72, 0x00, 0x18, 0x85, 0x24, 0xda, 0x50, 0xdf, 0x1e, 0x6a, 0xe2, 0x91, + 0x3e, 0xac, 0x63, 0xea, 0xdb, 0xa7, 0xde, 0xf7, 0x00, 0x4c, 0xb7, 0xb2, 0x80, 0xff, 0xf6, 0xf5, + 0x2e, 0x8f, 0x5f, 0x0e, 0x75, 0xcd, 0x70, 0xf0, 0xe2, 0xb5, 0x2e, 0x2e, 0x77, 0x4c, 0x70, 0x28, + 0xf7, 0x1c, 0xf1, 0x4c, 0x98, 0x25, 0xba, 0x53, 0x95, 0xb9, 0xfa, 0xe3, 0x72, 0x37, 0xe6, 0xfb, + 0x61, 0x0d, 0x97, 0x84, 0x7c, 0x5a, 0x93, 0x65, 0x13, 0x45, 0x52, 0x05, 0x1b, 0xa8, 0x51, 0xec, + 0x42, 0xbd, 0xbb, 0xfa, 0xea, 0xf8, 0x84, 0xa4, 0x2a, 0xe9, 0xf9, 0x24, 0x76, 0x13, 0xc3, 0xff, + 0x58, 0xd3, 0x66, 0x88, 0x31, 0x49, 0x15, 0xc6, 0xbd, 0x02, 0xe3, 0xd8, 0xc5, 0xd8, 0x68, 0xde, + 0x46, 0xa6, 0xd8, 0xfc, 0x37, 0x00, 0x00, 0xff, 0xff, 0xc1, 0x1f, 0xe1, 0x6b, 0xd1, 0x01, 0x00, + 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/change_status_operation.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/change_status_operation.pb.go new file mode 100644 index 0000000..b54fcc3 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/change_status_operation.pb.go @@ -0,0 +1,123 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/enums/change_status_operation.proto + +package enums // import "google.golang.org/genproto/googleapis/ads/googleads/v1/enums" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Status of the changed resource +type ChangeStatusOperationEnum_ChangeStatusOperation int32 + +const ( + // No value has been specified. + ChangeStatusOperationEnum_UNSPECIFIED ChangeStatusOperationEnum_ChangeStatusOperation = 0 + // Used for return value only. Represents an unclassified resource unknown + // in this version. + ChangeStatusOperationEnum_UNKNOWN ChangeStatusOperationEnum_ChangeStatusOperation = 1 + // The resource was created. + ChangeStatusOperationEnum_ADDED ChangeStatusOperationEnum_ChangeStatusOperation = 2 + // The resource was modified. + ChangeStatusOperationEnum_CHANGED ChangeStatusOperationEnum_ChangeStatusOperation = 3 + // The resource was removed. + ChangeStatusOperationEnum_REMOVED ChangeStatusOperationEnum_ChangeStatusOperation = 4 +) + +var ChangeStatusOperationEnum_ChangeStatusOperation_name = map[int32]string{ + 0: "UNSPECIFIED", + 1: "UNKNOWN", + 2: "ADDED", + 3: "CHANGED", + 4: "REMOVED", +} +var ChangeStatusOperationEnum_ChangeStatusOperation_value = map[string]int32{ + "UNSPECIFIED": 0, + "UNKNOWN": 1, + "ADDED": 2, + "CHANGED": 3, + "REMOVED": 4, +} + +func (x ChangeStatusOperationEnum_ChangeStatusOperation) String() string { + return proto.EnumName(ChangeStatusOperationEnum_ChangeStatusOperation_name, int32(x)) +} +func (ChangeStatusOperationEnum_ChangeStatusOperation) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_change_status_operation_4fcc3a6e9d69541d, []int{0, 0} +} + +// Container for enum describing operations for the ChangeStatus resource. +type ChangeStatusOperationEnum struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ChangeStatusOperationEnum) Reset() { *m = ChangeStatusOperationEnum{} } +func (m *ChangeStatusOperationEnum) String() string { return proto.CompactTextString(m) } +func (*ChangeStatusOperationEnum) ProtoMessage() {} +func (*ChangeStatusOperationEnum) Descriptor() ([]byte, []int) { + return fileDescriptor_change_status_operation_4fcc3a6e9d69541d, []int{0} +} +func (m *ChangeStatusOperationEnum) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ChangeStatusOperationEnum.Unmarshal(m, b) +} +func (m *ChangeStatusOperationEnum) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ChangeStatusOperationEnum.Marshal(b, m, deterministic) +} +func (dst *ChangeStatusOperationEnum) XXX_Merge(src proto.Message) { + xxx_messageInfo_ChangeStatusOperationEnum.Merge(dst, src) +} +func (m *ChangeStatusOperationEnum) XXX_Size() int { + return xxx_messageInfo_ChangeStatusOperationEnum.Size(m) +} +func (m *ChangeStatusOperationEnum) XXX_DiscardUnknown() { + xxx_messageInfo_ChangeStatusOperationEnum.DiscardUnknown(m) +} + +var xxx_messageInfo_ChangeStatusOperationEnum proto.InternalMessageInfo + +func init() { + proto.RegisterType((*ChangeStatusOperationEnum)(nil), "google.ads.googleads.v1.enums.ChangeStatusOperationEnum") + proto.RegisterEnum("google.ads.googleads.v1.enums.ChangeStatusOperationEnum_ChangeStatusOperation", ChangeStatusOperationEnum_ChangeStatusOperation_name, ChangeStatusOperationEnum_ChangeStatusOperation_value) +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/enums/change_status_operation.proto", fileDescriptor_change_status_operation_4fcc3a6e9d69541d) +} + +var fileDescriptor_change_status_operation_4fcc3a6e9d69541d = []byte{ + // 318 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x7c, 0x90, 0x41, 0x4f, 0xfa, 0x30, + 0x18, 0xc6, 0xff, 0x8c, 0xbf, 0x1a, 0xcb, 0xc1, 0x65, 0x89, 0x07, 0x89, 0x1c, 0xe0, 0x03, 0xb4, + 0x59, 0xbc, 0x95, 0x53, 0xa1, 0x15, 0x89, 0x71, 0x10, 0x09, 0x33, 0x21, 0x4b, 0x48, 0x65, 0x4b, + 0x25, 0x81, 0x76, 0x59, 0x37, 0xfc, 0x3e, 0x1e, 0xfd, 0x28, 0x7e, 0x14, 0x2f, 0x7e, 0x05, 0xd3, + 0xd6, 0xed, 0x84, 0x5e, 0x9a, 0xa7, 0xef, 0xf3, 0xfe, 0x9e, 0xbe, 0x7d, 0xc1, 0x50, 0x28, 0x25, + 0x76, 0x19, 0xe2, 0xa9, 0x46, 0x4e, 0x1a, 0x75, 0x08, 0x51, 0x26, 0xab, 0xbd, 0x46, 0x9b, 0x17, + 0x2e, 0x45, 0xb6, 0xd6, 0x25, 0x2f, 0x2b, 0xbd, 0x56, 0x79, 0x56, 0xf0, 0x72, 0xab, 0x24, 0xcc, + 0x0b, 0x55, 0xaa, 0xa0, 0xe7, 0x08, 0xc8, 0x53, 0x0d, 0x1b, 0x18, 0x1e, 0x42, 0x68, 0xe1, 0xee, + 0x75, 0x9d, 0x9d, 0x6f, 0x11, 0x97, 0x52, 0x95, 0x96, 0xd5, 0x0e, 0x1e, 0xbc, 0x82, 0xab, 0xb1, + 0x4d, 0x5f, 0xd8, 0xf0, 0x59, 0x9d, 0xcd, 0x64, 0xb5, 0x1f, 0xac, 0xc0, 0xe5, 0x51, 0x33, 0xb8, + 0x00, 0x9d, 0x65, 0xb4, 0x98, 0xb3, 0xf1, 0xf4, 0x76, 0xca, 0xa8, 0xff, 0x2f, 0xe8, 0x80, 0xb3, + 0x65, 0x74, 0x1f, 0xcd, 0x9e, 0x22, 0xbf, 0x15, 0x9c, 0x83, 0x13, 0x42, 0x29, 0xa3, 0xbe, 0x67, + 0xea, 0xe3, 0x3b, 0x12, 0x4d, 0x18, 0xf5, 0xdb, 0xe6, 0xf2, 0xc8, 0x1e, 0x66, 0x31, 0xa3, 0xfe, + 0xff, 0xd1, 0x57, 0x0b, 0xf4, 0x37, 0x6a, 0x0f, 0xff, 0x1c, 0x7e, 0xd4, 0x3d, 0xfa, 0xfe, 0xdc, + 0x8c, 0x3e, 0x6f, 0xad, 0x46, 0x3f, 0xb0, 0x50, 0x3b, 0x2e, 0x05, 0x54, 0x85, 0x40, 0x22, 0x93, + 0xf6, 0x63, 0xf5, 0x1a, 0xf3, 0xad, 0xfe, 0x65, 0xab, 0x43, 0x7b, 0xbe, 0x79, 0xed, 0x09, 0x21, + 0xef, 0x5e, 0x6f, 0xe2, 0xa2, 0x48, 0xaa, 0xa1, 0x93, 0x46, 0xc5, 0x21, 0x34, 0x8b, 0xd0, 0x1f, + 0xb5, 0x9f, 0x90, 0x54, 0x27, 0x8d, 0x9f, 0xc4, 0x61, 0x62, 0xfd, 0x4f, 0xaf, 0xef, 0x8a, 0x18, + 0x93, 0x54, 0x63, 0xdc, 0x74, 0x60, 0x1c, 0x87, 0x18, 0xdb, 0x9e, 0xe7, 0x53, 0x3b, 0xd8, 0xcd, + 0x77, 0x00, 0x00, 0x00, 0xff, 0xff, 0x5a, 0x98, 0xf8, 0x7d, 0xed, 0x01, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/change_status_resource_type.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/change_status_resource_type.pb.go new file mode 100644 index 0000000..77a84f9 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/change_status_resource_type.pb.go @@ -0,0 +1,158 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/enums/change_status_resource_type.proto + +package enums // import "google.golang.org/genproto/googleapis/ads/googleads/v1/enums" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Enum listing the resource types support by the ChangeStatus resource. +type ChangeStatusResourceTypeEnum_ChangeStatusResourceType int32 + +const ( + // No value has been specified. + ChangeStatusResourceTypeEnum_UNSPECIFIED ChangeStatusResourceTypeEnum_ChangeStatusResourceType = 0 + // Used for return value only. Represents an unclassified resource unknown + // in this version. + ChangeStatusResourceTypeEnum_UNKNOWN ChangeStatusResourceTypeEnum_ChangeStatusResourceType = 1 + // An AdGroup resource change. + ChangeStatusResourceTypeEnum_AD_GROUP ChangeStatusResourceTypeEnum_ChangeStatusResourceType = 3 + // An AdGroupAd resource change. + ChangeStatusResourceTypeEnum_AD_GROUP_AD ChangeStatusResourceTypeEnum_ChangeStatusResourceType = 4 + // An AdGroupCriterion resource change. + ChangeStatusResourceTypeEnum_AD_GROUP_CRITERION ChangeStatusResourceTypeEnum_ChangeStatusResourceType = 5 + // A Campaign resource change. + ChangeStatusResourceTypeEnum_CAMPAIGN ChangeStatusResourceTypeEnum_ChangeStatusResourceType = 6 + // A CampaignCriterion resource change. + ChangeStatusResourceTypeEnum_CAMPAIGN_CRITERION ChangeStatusResourceTypeEnum_ChangeStatusResourceType = 7 + // A Feed resource change. + ChangeStatusResourceTypeEnum_FEED ChangeStatusResourceTypeEnum_ChangeStatusResourceType = 9 + // A FeedItem resource change. + ChangeStatusResourceTypeEnum_FEED_ITEM ChangeStatusResourceTypeEnum_ChangeStatusResourceType = 10 + // An AdGroupFeed resource change. + ChangeStatusResourceTypeEnum_AD_GROUP_FEED ChangeStatusResourceTypeEnum_ChangeStatusResourceType = 11 + // A CampaignFeed resource change. + ChangeStatusResourceTypeEnum_CAMPAIGN_FEED ChangeStatusResourceTypeEnum_ChangeStatusResourceType = 12 + // An AdGroupBidModifier resource change. + ChangeStatusResourceTypeEnum_AD_GROUP_BID_MODIFIER ChangeStatusResourceTypeEnum_ChangeStatusResourceType = 13 +) + +var ChangeStatusResourceTypeEnum_ChangeStatusResourceType_name = map[int32]string{ + 0: "UNSPECIFIED", + 1: "UNKNOWN", + 3: "AD_GROUP", + 4: "AD_GROUP_AD", + 5: "AD_GROUP_CRITERION", + 6: "CAMPAIGN", + 7: "CAMPAIGN_CRITERION", + 9: "FEED", + 10: "FEED_ITEM", + 11: "AD_GROUP_FEED", + 12: "CAMPAIGN_FEED", + 13: "AD_GROUP_BID_MODIFIER", +} +var ChangeStatusResourceTypeEnum_ChangeStatusResourceType_value = map[string]int32{ + "UNSPECIFIED": 0, + "UNKNOWN": 1, + "AD_GROUP": 3, + "AD_GROUP_AD": 4, + "AD_GROUP_CRITERION": 5, + "CAMPAIGN": 6, + "CAMPAIGN_CRITERION": 7, + "FEED": 9, + "FEED_ITEM": 10, + "AD_GROUP_FEED": 11, + "CAMPAIGN_FEED": 12, + "AD_GROUP_BID_MODIFIER": 13, +} + +func (x ChangeStatusResourceTypeEnum_ChangeStatusResourceType) String() string { + return proto.EnumName(ChangeStatusResourceTypeEnum_ChangeStatusResourceType_name, int32(x)) +} +func (ChangeStatusResourceTypeEnum_ChangeStatusResourceType) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_change_status_resource_type_6cf9a467f229f19c, []int{0, 0} +} + +// Container for enum describing supported resource types for the ChangeStatus +// resource. +type ChangeStatusResourceTypeEnum struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ChangeStatusResourceTypeEnum) Reset() { *m = ChangeStatusResourceTypeEnum{} } +func (m *ChangeStatusResourceTypeEnum) String() string { return proto.CompactTextString(m) } +func (*ChangeStatusResourceTypeEnum) ProtoMessage() {} +func (*ChangeStatusResourceTypeEnum) Descriptor() ([]byte, []int) { + return fileDescriptor_change_status_resource_type_6cf9a467f229f19c, []int{0} +} +func (m *ChangeStatusResourceTypeEnum) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ChangeStatusResourceTypeEnum.Unmarshal(m, b) +} +func (m *ChangeStatusResourceTypeEnum) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ChangeStatusResourceTypeEnum.Marshal(b, m, deterministic) +} +func (dst *ChangeStatusResourceTypeEnum) XXX_Merge(src proto.Message) { + xxx_messageInfo_ChangeStatusResourceTypeEnum.Merge(dst, src) +} +func (m *ChangeStatusResourceTypeEnum) XXX_Size() int { + return xxx_messageInfo_ChangeStatusResourceTypeEnum.Size(m) +} +func (m *ChangeStatusResourceTypeEnum) XXX_DiscardUnknown() { + xxx_messageInfo_ChangeStatusResourceTypeEnum.DiscardUnknown(m) +} + +var xxx_messageInfo_ChangeStatusResourceTypeEnum proto.InternalMessageInfo + +func init() { + proto.RegisterType((*ChangeStatusResourceTypeEnum)(nil), "google.ads.googleads.v1.enums.ChangeStatusResourceTypeEnum") + proto.RegisterEnum("google.ads.googleads.v1.enums.ChangeStatusResourceTypeEnum_ChangeStatusResourceType", ChangeStatusResourceTypeEnum_ChangeStatusResourceType_name, ChangeStatusResourceTypeEnum_ChangeStatusResourceType_value) +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/enums/change_status_resource_type.proto", fileDescriptor_change_status_resource_type_6cf9a467f229f19c) +} + +var fileDescriptor_change_status_resource_type_6cf9a467f229f19c = []byte{ + // 407 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x7c, 0x91, 0xcd, 0x8a, 0xdb, 0x30, + 0x10, 0xc7, 0x6b, 0xef, 0x76, 0x3f, 0x94, 0x0d, 0x55, 0x05, 0x2d, 0x6d, 0xd9, 0x1c, 0x76, 0x1f, + 0x40, 0xc6, 0xf4, 0xa6, 0x1e, 0x8a, 0xfc, 0xb1, 0xc6, 0x94, 0xd8, 0xc6, 0x9b, 0xa4, 0x50, 0x0c, + 0xc6, 0x8d, 0x85, 0x1b, 0x48, 0x24, 0x63, 0xd9, 0x81, 0xbc, 0x45, 0x9f, 0xa1, 0xc7, 0x3e, 0x4a, + 0x1f, 0xa5, 0x97, 0x42, 0x9f, 0xa0, 0x48, 0x8e, 0x4d, 0x2f, 0xd9, 0x8b, 0xf9, 0xcf, 0xcc, 0x6f, + 0xfe, 0x63, 0xcd, 0x80, 0x8f, 0x95, 0x10, 0xd5, 0x96, 0x59, 0x45, 0x29, 0xad, 0x5e, 0x2a, 0xb5, + 0xb7, 0x2d, 0xc6, 0xbb, 0x9d, 0xb4, 0xd6, 0xdf, 0x0a, 0x5e, 0xb1, 0x5c, 0xb6, 0x45, 0xdb, 0xc9, + 0xbc, 0x61, 0x52, 0x74, 0xcd, 0x9a, 0xe5, 0xed, 0xa1, 0x66, 0xb8, 0x6e, 0x44, 0x2b, 0xd0, 0xac, + 0xef, 0xc2, 0x45, 0x29, 0xf1, 0x68, 0x80, 0xf7, 0x36, 0xd6, 0x06, 0xef, 0x6e, 0x07, 0xff, 0x7a, + 0x63, 0x15, 0x9c, 0x8b, 0xb6, 0x68, 0x37, 0x82, 0xcb, 0xbe, 0xf9, 0xfe, 0xbb, 0x09, 0x6e, 0x5d, + 0x3d, 0xe2, 0x51, 0x4f, 0x48, 0x8f, 0x03, 0x16, 0x87, 0x9a, 0xf9, 0xbc, 0xdb, 0xdd, 0xff, 0x31, + 0xc0, 0x9b, 0x53, 0x00, 0x7a, 0x01, 0x26, 0xcb, 0xe8, 0x31, 0xf1, 0xdd, 0xf0, 0x21, 0xf4, 0x3d, + 0xf8, 0x0c, 0x4d, 0xc0, 0xe5, 0x32, 0xfa, 0x14, 0xc5, 0x9f, 0x23, 0x68, 0xa0, 0x1b, 0x70, 0x45, + 0xbd, 0x3c, 0x48, 0xe3, 0x65, 0x02, 0xcf, 0x14, 0x3b, 0x44, 0x39, 0xf5, 0xe0, 0x39, 0x7a, 0x0d, + 0xd0, 0x98, 0x70, 0xd3, 0x70, 0xe1, 0xa7, 0x61, 0x1c, 0xc1, 0xe7, 0xaa, 0xcd, 0xa5, 0xf3, 0x84, + 0x86, 0x41, 0x04, 0x2f, 0x14, 0x35, 0x44, 0xff, 0x51, 0x97, 0xe8, 0x0a, 0x9c, 0x3f, 0xf8, 0xbe, + 0x07, 0xaf, 0xd1, 0x14, 0x5c, 0x2b, 0x95, 0x87, 0x0b, 0x7f, 0x0e, 0x01, 0x7a, 0x09, 0xa6, 0xa3, + 0xad, 0x26, 0x26, 0x2a, 0x35, 0x7a, 0xe8, 0xd4, 0x0d, 0x7a, 0x0b, 0x5e, 0x8d, 0x94, 0x13, 0x7a, + 0xf9, 0x3c, 0xf6, 0xd4, 0x1b, 0x52, 0x38, 0x75, 0xfe, 0x1a, 0xe0, 0x6e, 0x2d, 0x76, 0xf8, 0xc9, + 0xb5, 0x3a, 0xb3, 0x53, 0x4b, 0x49, 0xd4, 0x5e, 0x13, 0xe3, 0x8b, 0x73, 0xec, 0xaf, 0xc4, 0xb6, + 0xe0, 0x15, 0x16, 0x4d, 0x65, 0x55, 0x8c, 0xeb, 0xad, 0x0f, 0x77, 0xae, 0x37, 0xf2, 0xc4, 0xd9, + 0x3f, 0xe8, 0xef, 0x0f, 0xf3, 0x2c, 0xa0, 0xf4, 0xa7, 0x39, 0x0b, 0x7a, 0x2b, 0x5a, 0x4a, 0xdc, + 0x4b, 0xa5, 0x56, 0x36, 0x56, 0x17, 0x92, 0xbf, 0x86, 0x7a, 0x46, 0x4b, 0x99, 0x8d, 0xf5, 0x6c, + 0x65, 0x67, 0xba, 0xfe, 0xdb, 0xbc, 0xeb, 0x93, 0x84, 0xd0, 0x52, 0x12, 0x32, 0x12, 0x84, 0xac, + 0x6c, 0x42, 0x34, 0xf3, 0xf5, 0x42, 0xff, 0xd8, 0xfb, 0x7f, 0x01, 0x00, 0x00, 0xff, 0xff, 0x45, + 0xcb, 0xd2, 0x17, 0x8e, 0x02, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/click_type.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/click_type.pb.go new file mode 100644 index 0000000..22b444a --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/click_type.pb.go @@ -0,0 +1,357 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/enums/click_type.proto + +package enums // import "google.golang.org/genproto/googleapis/ads/googleads/v1/enums" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Enumerates Google Ads click types. +type ClickTypeEnum_ClickType int32 + +const ( + // Not specified. + ClickTypeEnum_UNSPECIFIED ClickTypeEnum_ClickType = 0 + // The value is unknown in this version. + ClickTypeEnum_UNKNOWN ClickTypeEnum_ClickType = 1 + // App engagement ad deep link. + ClickTypeEnum_APP_DEEPLINK ClickTypeEnum_ClickType = 2 + // Breadcrumbs. + ClickTypeEnum_BREADCRUMBS ClickTypeEnum_ClickType = 3 + // Broadband Plan. + ClickTypeEnum_BROADBAND_PLAN ClickTypeEnum_ClickType = 4 + // Manually dialed phone calls. + ClickTypeEnum_CALL_TRACKING ClickTypeEnum_ClickType = 5 + // Phone calls. + ClickTypeEnum_CALLS ClickTypeEnum_ClickType = 6 + // Click on engagement ad. + ClickTypeEnum_CLICK_ON_ENGAGEMENT_AD ClickTypeEnum_ClickType = 7 + // Driving direction. + ClickTypeEnum_GET_DIRECTIONS ClickTypeEnum_ClickType = 8 + // Get location details. + ClickTypeEnum_LOCATION_EXPANSION ClickTypeEnum_ClickType = 9 + // Call. + ClickTypeEnum_LOCATION_FORMAT_CALL ClickTypeEnum_ClickType = 10 + // Directions. + ClickTypeEnum_LOCATION_FORMAT_DIRECTIONS ClickTypeEnum_ClickType = 11 + // Image(s). + ClickTypeEnum_LOCATION_FORMAT_IMAGE ClickTypeEnum_ClickType = 12 + // Go to landing page. + ClickTypeEnum_LOCATION_FORMAT_LANDING_PAGE ClickTypeEnum_ClickType = 13 + // Map. + ClickTypeEnum_LOCATION_FORMAT_MAP ClickTypeEnum_ClickType = 14 + // Go to store info. + ClickTypeEnum_LOCATION_FORMAT_STORE_INFO ClickTypeEnum_ClickType = 15 + // Text. + ClickTypeEnum_LOCATION_FORMAT_TEXT ClickTypeEnum_ClickType = 16 + // Mobile phone calls. + ClickTypeEnum_MOBILE_CALL_TRACKING ClickTypeEnum_ClickType = 17 + // Print offer. + ClickTypeEnum_OFFER_PRINTS ClickTypeEnum_ClickType = 18 + // Other. + ClickTypeEnum_OTHER ClickTypeEnum_ClickType = 19 + // Product plusbox offer. + ClickTypeEnum_PRODUCT_EXTENSION_CLICKS ClickTypeEnum_ClickType = 20 + // Shopping - Product - Online. + ClickTypeEnum_PRODUCT_LISTING_AD_CLICKS ClickTypeEnum_ClickType = 21 + // Sitelink. + ClickTypeEnum_SITELINKS ClickTypeEnum_ClickType = 22 + // Show nearby locations. + ClickTypeEnum_STORE_LOCATOR ClickTypeEnum_ClickType = 23 + // Headline. + ClickTypeEnum_URL_CLICKS ClickTypeEnum_ClickType = 25 + // App store. + ClickTypeEnum_VIDEO_APP_STORE_CLICKS ClickTypeEnum_ClickType = 26 + // Call-to-Action overlay. + ClickTypeEnum_VIDEO_CALL_TO_ACTION_CLICKS ClickTypeEnum_ClickType = 27 + // Cards. + ClickTypeEnum_VIDEO_CARD_ACTION_HEADLINE_CLICKS ClickTypeEnum_ClickType = 28 + // End cap. + ClickTypeEnum_VIDEO_END_CAP_CLICKS ClickTypeEnum_ClickType = 29 + // Website. + ClickTypeEnum_VIDEO_WEBSITE_CLICKS ClickTypeEnum_ClickType = 30 + // Visual Sitelinks. + ClickTypeEnum_VISUAL_SITELINKS ClickTypeEnum_ClickType = 31 + // Wireless Plan. + ClickTypeEnum_WIRELESS_PLAN ClickTypeEnum_ClickType = 32 + // Shopping - Product - Local. + ClickTypeEnum_PRODUCT_LISTING_AD_LOCAL ClickTypeEnum_ClickType = 33 + // Shopping - Product - MultiChannel Local. + ClickTypeEnum_PRODUCT_LISTING_AD_MULTICHANNEL_LOCAL ClickTypeEnum_ClickType = 34 + // Shopping - Product - MultiChannel Online. + ClickTypeEnum_PRODUCT_LISTING_AD_MULTICHANNEL_ONLINE ClickTypeEnum_ClickType = 35 + // Shopping - Product - Coupon. + ClickTypeEnum_PRODUCT_LISTING_ADS_COUPON ClickTypeEnum_ClickType = 36 + // Shopping - Product - Sell on Google. + ClickTypeEnum_PRODUCT_LISTING_AD_TRANSACTABLE ClickTypeEnum_ClickType = 37 + // Shopping - Product - App engagement ad deep link. + ClickTypeEnum_PRODUCT_AD_APP_DEEPLINK ClickTypeEnum_ClickType = 38 + // Shopping - Showcase - Category. + ClickTypeEnum_SHOWCASE_AD_CATEGORY_LINK ClickTypeEnum_ClickType = 39 + // Shopping - Showcase - Local storefront. + ClickTypeEnum_SHOWCASE_AD_LOCAL_STOREFRONT_LINK ClickTypeEnum_ClickType = 40 + // Shopping - Showcase - Online product. + ClickTypeEnum_SHOWCASE_AD_ONLINE_PRODUCT_LINK ClickTypeEnum_ClickType = 42 + // Shopping - Showcase - Local product. + ClickTypeEnum_SHOWCASE_AD_LOCAL_PRODUCT_LINK ClickTypeEnum_ClickType = 43 + // Promotion Extension. + ClickTypeEnum_PROMOTION_EXTENSION ClickTypeEnum_ClickType = 44 + // Ad Headline. + ClickTypeEnum_SWIPEABLE_GALLERY_AD_HEADLINE ClickTypeEnum_ClickType = 45 + // Swipes. + ClickTypeEnum_SWIPEABLE_GALLERY_AD_SWIPES ClickTypeEnum_ClickType = 46 + // See More. + ClickTypeEnum_SWIPEABLE_GALLERY_AD_SEE_MORE ClickTypeEnum_ClickType = 47 + // Sitelink 1. + ClickTypeEnum_SWIPEABLE_GALLERY_AD_SITELINK_ONE ClickTypeEnum_ClickType = 48 + // Sitelink 2. + ClickTypeEnum_SWIPEABLE_GALLERY_AD_SITELINK_TWO ClickTypeEnum_ClickType = 49 + // Sitelink 3. + ClickTypeEnum_SWIPEABLE_GALLERY_AD_SITELINK_THREE ClickTypeEnum_ClickType = 50 + // Sitelink 4. + ClickTypeEnum_SWIPEABLE_GALLERY_AD_SITELINK_FOUR ClickTypeEnum_ClickType = 51 + // Sitelink 5. + ClickTypeEnum_SWIPEABLE_GALLERY_AD_SITELINK_FIVE ClickTypeEnum_ClickType = 52 + // Hotel price. + ClickTypeEnum_HOTEL_PRICE ClickTypeEnum_ClickType = 53 + // Price Extension. + ClickTypeEnum_PRICE_EXTENSION ClickTypeEnum_ClickType = 54 + // Book on Google hotel room selection. + ClickTypeEnum_HOTEL_BOOK_ON_GOOGLE_ROOM_SELECTION ClickTypeEnum_ClickType = 55 +) + +var ClickTypeEnum_ClickType_name = map[int32]string{ + 0: "UNSPECIFIED", + 1: "UNKNOWN", + 2: "APP_DEEPLINK", + 3: "BREADCRUMBS", + 4: "BROADBAND_PLAN", + 5: "CALL_TRACKING", + 6: "CALLS", + 7: "CLICK_ON_ENGAGEMENT_AD", + 8: "GET_DIRECTIONS", + 9: "LOCATION_EXPANSION", + 10: "LOCATION_FORMAT_CALL", + 11: "LOCATION_FORMAT_DIRECTIONS", + 12: "LOCATION_FORMAT_IMAGE", + 13: "LOCATION_FORMAT_LANDING_PAGE", + 14: "LOCATION_FORMAT_MAP", + 15: "LOCATION_FORMAT_STORE_INFO", + 16: "LOCATION_FORMAT_TEXT", + 17: "MOBILE_CALL_TRACKING", + 18: "OFFER_PRINTS", + 19: "OTHER", + 20: "PRODUCT_EXTENSION_CLICKS", + 21: "PRODUCT_LISTING_AD_CLICKS", + 22: "SITELINKS", + 23: "STORE_LOCATOR", + 25: "URL_CLICKS", + 26: "VIDEO_APP_STORE_CLICKS", + 27: "VIDEO_CALL_TO_ACTION_CLICKS", + 28: "VIDEO_CARD_ACTION_HEADLINE_CLICKS", + 29: "VIDEO_END_CAP_CLICKS", + 30: "VIDEO_WEBSITE_CLICKS", + 31: "VISUAL_SITELINKS", + 32: "WIRELESS_PLAN", + 33: "PRODUCT_LISTING_AD_LOCAL", + 34: "PRODUCT_LISTING_AD_MULTICHANNEL_LOCAL", + 35: "PRODUCT_LISTING_AD_MULTICHANNEL_ONLINE", + 36: "PRODUCT_LISTING_ADS_COUPON", + 37: "PRODUCT_LISTING_AD_TRANSACTABLE", + 38: "PRODUCT_AD_APP_DEEPLINK", + 39: "SHOWCASE_AD_CATEGORY_LINK", + 40: "SHOWCASE_AD_LOCAL_STOREFRONT_LINK", + 42: "SHOWCASE_AD_ONLINE_PRODUCT_LINK", + 43: "SHOWCASE_AD_LOCAL_PRODUCT_LINK", + 44: "PROMOTION_EXTENSION", + 45: "SWIPEABLE_GALLERY_AD_HEADLINE", + 46: "SWIPEABLE_GALLERY_AD_SWIPES", + 47: "SWIPEABLE_GALLERY_AD_SEE_MORE", + 48: "SWIPEABLE_GALLERY_AD_SITELINK_ONE", + 49: "SWIPEABLE_GALLERY_AD_SITELINK_TWO", + 50: "SWIPEABLE_GALLERY_AD_SITELINK_THREE", + 51: "SWIPEABLE_GALLERY_AD_SITELINK_FOUR", + 52: "SWIPEABLE_GALLERY_AD_SITELINK_FIVE", + 53: "HOTEL_PRICE", + 54: "PRICE_EXTENSION", + 55: "HOTEL_BOOK_ON_GOOGLE_ROOM_SELECTION", +} +var ClickTypeEnum_ClickType_value = map[string]int32{ + "UNSPECIFIED": 0, + "UNKNOWN": 1, + "APP_DEEPLINK": 2, + "BREADCRUMBS": 3, + "BROADBAND_PLAN": 4, + "CALL_TRACKING": 5, + "CALLS": 6, + "CLICK_ON_ENGAGEMENT_AD": 7, + "GET_DIRECTIONS": 8, + "LOCATION_EXPANSION": 9, + "LOCATION_FORMAT_CALL": 10, + "LOCATION_FORMAT_DIRECTIONS": 11, + "LOCATION_FORMAT_IMAGE": 12, + "LOCATION_FORMAT_LANDING_PAGE": 13, + "LOCATION_FORMAT_MAP": 14, + "LOCATION_FORMAT_STORE_INFO": 15, + "LOCATION_FORMAT_TEXT": 16, + "MOBILE_CALL_TRACKING": 17, + "OFFER_PRINTS": 18, + "OTHER": 19, + "PRODUCT_EXTENSION_CLICKS": 20, + "PRODUCT_LISTING_AD_CLICKS": 21, + "SITELINKS": 22, + "STORE_LOCATOR": 23, + "URL_CLICKS": 25, + "VIDEO_APP_STORE_CLICKS": 26, + "VIDEO_CALL_TO_ACTION_CLICKS": 27, + "VIDEO_CARD_ACTION_HEADLINE_CLICKS": 28, + "VIDEO_END_CAP_CLICKS": 29, + "VIDEO_WEBSITE_CLICKS": 30, + "VISUAL_SITELINKS": 31, + "WIRELESS_PLAN": 32, + "PRODUCT_LISTING_AD_LOCAL": 33, + "PRODUCT_LISTING_AD_MULTICHANNEL_LOCAL": 34, + "PRODUCT_LISTING_AD_MULTICHANNEL_ONLINE": 35, + "PRODUCT_LISTING_ADS_COUPON": 36, + "PRODUCT_LISTING_AD_TRANSACTABLE": 37, + "PRODUCT_AD_APP_DEEPLINK": 38, + "SHOWCASE_AD_CATEGORY_LINK": 39, + "SHOWCASE_AD_LOCAL_STOREFRONT_LINK": 40, + "SHOWCASE_AD_ONLINE_PRODUCT_LINK": 42, + "SHOWCASE_AD_LOCAL_PRODUCT_LINK": 43, + "PROMOTION_EXTENSION": 44, + "SWIPEABLE_GALLERY_AD_HEADLINE": 45, + "SWIPEABLE_GALLERY_AD_SWIPES": 46, + "SWIPEABLE_GALLERY_AD_SEE_MORE": 47, + "SWIPEABLE_GALLERY_AD_SITELINK_ONE": 48, + "SWIPEABLE_GALLERY_AD_SITELINK_TWO": 49, + "SWIPEABLE_GALLERY_AD_SITELINK_THREE": 50, + "SWIPEABLE_GALLERY_AD_SITELINK_FOUR": 51, + "SWIPEABLE_GALLERY_AD_SITELINK_FIVE": 52, + "HOTEL_PRICE": 53, + "PRICE_EXTENSION": 54, + "HOTEL_BOOK_ON_GOOGLE_ROOM_SELECTION": 55, +} + +func (x ClickTypeEnum_ClickType) String() string { + return proto.EnumName(ClickTypeEnum_ClickType_name, int32(x)) +} +func (ClickTypeEnum_ClickType) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_click_type_094021787dbc0905, []int{0, 0} +} + +// Container for enumeration of Google Ads click types. +type ClickTypeEnum struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ClickTypeEnum) Reset() { *m = ClickTypeEnum{} } +func (m *ClickTypeEnum) String() string { return proto.CompactTextString(m) } +func (*ClickTypeEnum) ProtoMessage() {} +func (*ClickTypeEnum) Descriptor() ([]byte, []int) { + return fileDescriptor_click_type_094021787dbc0905, []int{0} +} +func (m *ClickTypeEnum) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ClickTypeEnum.Unmarshal(m, b) +} +func (m *ClickTypeEnum) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ClickTypeEnum.Marshal(b, m, deterministic) +} +func (dst *ClickTypeEnum) XXX_Merge(src proto.Message) { + xxx_messageInfo_ClickTypeEnum.Merge(dst, src) +} +func (m *ClickTypeEnum) XXX_Size() int { + return xxx_messageInfo_ClickTypeEnum.Size(m) +} +func (m *ClickTypeEnum) XXX_DiscardUnknown() { + xxx_messageInfo_ClickTypeEnum.DiscardUnknown(m) +} + +var xxx_messageInfo_ClickTypeEnum proto.InternalMessageInfo + +func init() { + proto.RegisterType((*ClickTypeEnum)(nil), "google.ads.googleads.v1.enums.ClickTypeEnum") + proto.RegisterEnum("google.ads.googleads.v1.enums.ClickTypeEnum_ClickType", ClickTypeEnum_ClickType_name, ClickTypeEnum_ClickType_value) +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/enums/click_type.proto", fileDescriptor_click_type_094021787dbc0905) +} + +var fileDescriptor_click_type_094021787dbc0905 = []byte{ + // 930 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x8c, 0x95, 0xdb, 0x6e, 0xdb, 0x36, + 0x18, 0xc7, 0xd7, 0x74, 0x6d, 0x17, 0xe6, 0xc4, 0x32, 0x69, 0xd2, 0x1c, 0x9c, 0xd4, 0xce, 0x92, + 0x6e, 0xdd, 0x26, 0xcf, 0xeb, 0x0e, 0x80, 0x77, 0x45, 0x49, 0x9f, 0x65, 0xc2, 0x14, 0x29, 0x90, + 0xb4, 0x9d, 0x0e, 0x01, 0x08, 0x2f, 0x36, 0x8c, 0x60, 0x89, 0x6d, 0xd4, 0x69, 0x81, 0x3e, 0xc0, + 0x5e, 0x64, 0x97, 0x7b, 0x94, 0xbd, 0xc6, 0xee, 0x76, 0xbb, 0x17, 0x18, 0x28, 0x59, 0x8e, 0x93, + 0xba, 0xcd, 0x6e, 0x0c, 0x9a, 0xff, 0x1f, 0x3f, 0x7d, 0x27, 0xf2, 0x43, 0x5e, 0x7f, 0x38, 0xec, + 0x5f, 0xf4, 0xca, 0x9d, 0xee, 0xb8, 0x9c, 0x2d, 0xdd, 0xea, 0x6d, 0xa5, 0xdc, 0x1b, 0xbc, 0xb9, + 0x1c, 0x97, 0xcf, 0x2e, 0xce, 0xcf, 0x7e, 0xb3, 0x57, 0xef, 0x46, 0x3d, 0x6f, 0xf4, 0x7a, 0x78, + 0x35, 0x24, 0x85, 0x0c, 0xf2, 0x3a, 0xdd, 0xb1, 0x37, 0xe5, 0xbd, 0xb7, 0x15, 0x2f, 0xe5, 0x77, + 0xf6, 0x72, 0x73, 0xa3, 0xf3, 0x72, 0x67, 0x30, 0x18, 0x5e, 0x75, 0xae, 0xce, 0x87, 0x83, 0x71, + 0x76, 0xb8, 0xf4, 0xfb, 0x32, 0x5a, 0x09, 0x9c, 0x45, 0xf3, 0x6e, 0xd4, 0x83, 0xc1, 0x9b, 0xcb, + 0xd2, 0xbf, 0x4b, 0x68, 0x71, 0xba, 0x43, 0xd6, 0xd0, 0x52, 0x53, 0xe8, 0x04, 0x02, 0x56, 0x63, + 0x10, 0xe2, 0x4f, 0xc8, 0x12, 0x7a, 0xd4, 0x14, 0x0d, 0x21, 0xdb, 0x02, 0xdf, 0x23, 0x18, 0x2d, + 0xd3, 0x24, 0xb1, 0x21, 0x40, 0xc2, 0x99, 0x68, 0xe0, 0x05, 0xc7, 0xfb, 0x0a, 0x68, 0x18, 0xa8, + 0x66, 0xec, 0x6b, 0x7c, 0x9f, 0x10, 0xb4, 0xea, 0x2b, 0x49, 0x43, 0x9f, 0x8a, 0xd0, 0x26, 0x9c, + 0x0a, 0xfc, 0x29, 0x79, 0x8c, 0x56, 0x02, 0xca, 0xb9, 0x35, 0x8a, 0x06, 0x0d, 0x26, 0x22, 0xfc, + 0x80, 0x2c, 0xa2, 0x07, 0x6e, 0x4b, 0xe3, 0x87, 0x64, 0x07, 0x6d, 0x06, 0x9c, 0x05, 0x0d, 0x2b, + 0x85, 0x05, 0x11, 0xd1, 0x08, 0x62, 0x10, 0xc6, 0xd2, 0x10, 0x3f, 0x72, 0xd6, 0x22, 0x30, 0x36, + 0x64, 0x0a, 0x02, 0xc3, 0xa4, 0xd0, 0xf8, 0x33, 0xb2, 0x89, 0x08, 0x97, 0x01, 0x75, 0x7f, 0x2d, + 0x9c, 0x24, 0x54, 0x68, 0x26, 0x05, 0x5e, 0x24, 0x4f, 0xd1, 0xc6, 0x74, 0xbf, 0x26, 0x55, 0x4c, + 0x8d, 0x75, 0x9f, 0xc0, 0x88, 0xec, 0xa3, 0x9d, 0xdb, 0xca, 0x8c, 0xc5, 0x25, 0xb2, 0x8d, 0x9e, + 0xdc, 0xd6, 0x59, 0x4c, 0x23, 0xc0, 0xcb, 0xe4, 0x19, 0xda, 0xbb, 0x2d, 0x71, 0x2a, 0x42, 0x26, + 0x22, 0x9b, 0x38, 0x62, 0x85, 0x6c, 0xa1, 0xf5, 0xdb, 0x44, 0x4c, 0x13, 0xbc, 0x3a, 0xef, 0xab, + 0xda, 0x48, 0x05, 0x96, 0x89, 0x9a, 0xc4, 0x6b, 0xf3, 0xfc, 0x35, 0x70, 0x62, 0x30, 0x76, 0x4a, + 0x2c, 0x7d, 0xc6, 0xc1, 0xde, 0x4c, 0xdb, 0x63, 0x57, 0x00, 0x59, 0xab, 0x81, 0xb2, 0x89, 0x62, + 0xc2, 0x68, 0x4c, 0x5c, 0x22, 0xa5, 0xa9, 0x83, 0xc2, 0xeb, 0x64, 0x0f, 0x3d, 0x4d, 0x94, 0x0c, + 0x9b, 0x81, 0xb1, 0x70, 0x62, 0x20, 0xcd, 0x8b, 0x4d, 0x53, 0xab, 0xf1, 0x06, 0x29, 0xa0, 0xed, + 0x5c, 0xe5, 0x4c, 0x1b, 0x17, 0x01, 0x0d, 0x73, 0xf9, 0x09, 0x59, 0x41, 0x8b, 0x9a, 0x19, 0x70, + 0x65, 0xd5, 0x78, 0xd3, 0x95, 0x2c, 0x73, 0x36, 0x75, 0x51, 0x2a, 0xbc, 0x45, 0x56, 0x11, 0x6a, + 0x2a, 0x9e, 0x9f, 0xd8, 0x76, 0x75, 0x6b, 0xb1, 0x10, 0xa4, 0x75, 0x2d, 0x91, 0xc1, 0x13, 0x6d, + 0x87, 0x1c, 0xa0, 0xdd, 0x4c, 0xcb, 0x02, 0x90, 0x96, 0xa6, 0xc9, 0xce, 0x81, 0x5d, 0x72, 0x84, + 0x8a, 0x39, 0xa0, 0xc2, 0x5c, 0xad, 0x03, 0x0d, 0x39, 0x13, 0x53, 0x3b, 0x7b, 0x2e, 0x13, 0x19, + 0x06, 0x22, 0xb4, 0x01, 0x4d, 0x72, 0xa5, 0x70, 0xad, 0xb4, 0xc1, 0x77, 0x8e, 0xe7, 0xca, 0x3e, + 0xd9, 0x40, 0xb8, 0xc5, 0x74, 0x93, 0x72, 0x7b, 0x1d, 0xd0, 0x81, 0x0b, 0xa8, 0xcd, 0x14, 0x70, + 0xd0, 0x3a, 0x6b, 0xcb, 0x67, 0xb3, 0xf9, 0x9a, 0xc9, 0x88, 0x0b, 0x98, 0xe3, 0x22, 0xf9, 0x12, + 0x1d, 0xcd, 0x51, 0xe3, 0x26, 0x37, 0x2c, 0xa8, 0x53, 0x21, 0x80, 0x4f, 0xd0, 0x12, 0x79, 0x81, + 0x8e, 0xef, 0x42, 0xa5, 0x70, 0x81, 0xe1, 0x43, 0xd7, 0x15, 0xef, 0xb3, 0xda, 0x06, 0xb2, 0x99, + 0x48, 0x81, 0x3f, 0x27, 0x87, 0xe8, 0x60, 0x8e, 0x2d, 0xa3, 0xa8, 0xd0, 0x34, 0x30, 0xd4, 0xe7, + 0x80, 0x8f, 0xc8, 0x2e, 0xda, 0xca, 0x21, 0x1a, 0xda, 0x1b, 0x57, 0xf2, 0xd8, 0x15, 0x5a, 0xd7, + 0x65, 0x3b, 0xa0, 0x1a, 0xd2, 0x0a, 0x53, 0x03, 0x91, 0x54, 0xaf, 0x6c, 0x2a, 0x3f, 0x77, 0x99, + 0x9f, 0x95, 0xd3, 0x18, 0xb2, 0xf2, 0xd5, 0x94, 0x14, 0x26, 0xc3, 0xbe, 0x70, 0x7e, 0xcc, 0x62, + 0x99, 0xff, 0xf6, 0xda, 0x35, 0xd1, 0xc0, 0x2f, 0x48, 0x09, 0xed, 0xbf, 0x6f, 0xeb, 0x06, 0xf3, + 0x95, 0xbb, 0x1f, 0x89, 0x92, 0xb1, 0x9c, 0xdc, 0xd7, 0x49, 0x5f, 0xe2, 0xaf, 0x49, 0x11, 0x15, + 0x74, 0x9b, 0x25, 0xe0, 0x62, 0xb2, 0x11, 0xe5, 0x1c, 0xd4, 0x2b, 0x67, 0x25, 0xef, 0x02, 0xfc, + 0x8d, 0x6b, 0xa3, 0xb9, 0x48, 0xba, 0xa9, 0xb1, 0xf7, 0x41, 0x1b, 0x1a, 0xc0, 0xc6, 0x52, 0x01, + 0x2e, 0xa7, 0xf1, 0xce, 0x45, 0x26, 0xcd, 0x61, 0xa5, 0x00, 0xfc, 0xed, 0xdd, 0x98, 0x69, 0x4b, + 0x5c, 0x21, 0xcf, 0xd1, 0xe1, 0x1d, 0x58, 0x5d, 0x01, 0xe0, 0xef, 0xc8, 0x31, 0x2a, 0x7d, 0x1c, + 0xac, 0xc9, 0xa6, 0xc2, 0x2f, 0xff, 0x07, 0xc7, 0x5a, 0x80, 0xbf, 0x77, 0x0f, 0x6d, 0x5d, 0x1a, + 0x70, 0xe9, 0x65, 0x01, 0xe0, 0x1f, 0xc8, 0x3a, 0x5a, 0x4b, 0x97, 0x33, 0x39, 0xfd, 0xd1, 0xb9, + 0x97, 0x51, 0xbe, 0x94, 0xe9, 0x83, 0x1a, 0x49, 0x19, 0x71, 0xb0, 0x4a, 0xca, 0xd8, 0x6a, 0xe0, + 0xd9, 0x9b, 0x87, 0x7f, 0xf2, 0xff, 0xbe, 0x87, 0x8a, 0x67, 0xc3, 0x4b, 0xef, 0xa3, 0xb3, 0xc4, + 0x5f, 0x9d, 0x0e, 0x86, 0xc4, 0x4d, 0x8f, 0xe4, 0xde, 0x2f, 0xfe, 0xe4, 0x40, 0x7f, 0x78, 0xd1, + 0x19, 0xf4, 0xbd, 0xe1, 0xeb, 0x7e, 0xb9, 0xdf, 0x1b, 0xa4, 0xb3, 0x25, 0x1f, 0x5e, 0xa3, 0xf3, + 0xf1, 0x07, 0x66, 0xd9, 0xcf, 0xe9, 0xef, 0x1f, 0x0b, 0xf7, 0x23, 0x4a, 0xff, 0x5c, 0x28, 0x44, + 0x99, 0x29, 0xda, 0x1d, 0x7b, 0xd9, 0xd2, 0xad, 0x5a, 0x15, 0xcf, 0x8d, 0xa5, 0xf1, 0x5f, 0xb9, + 0x7e, 0x4a, 0xbb, 0xe3, 0xd3, 0xa9, 0x7e, 0xda, 0xaa, 0x9c, 0xa6, 0xfa, 0x3f, 0x0b, 0xc5, 0x6c, + 0xb3, 0x5a, 0xa5, 0xdd, 0x71, 0xb5, 0x3a, 0x25, 0xaa, 0xd5, 0x56, 0xa5, 0x5a, 0x4d, 0x99, 0x5f, + 0x1f, 0xa6, 0x8e, 0xbd, 0xfc, 0x2f, 0x00, 0x00, 0xff, 0xff, 0x6a, 0x7d, 0x69, 0x8a, 0x63, 0x07, + 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/content_label_type.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/content_label_type.pb.go new file mode 100644 index 0000000..7d6c2c2 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/content_label_type.pb.go @@ -0,0 +1,180 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/enums/content_label_type.proto + +package enums // import "google.golang.org/genproto/googleapis/ads/googleads/v1/enums" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Enum listing the content label types supported by ContentLabel criterion. +type ContentLabelTypeEnum_ContentLabelType int32 + +const ( + // Not specified. + ContentLabelTypeEnum_UNSPECIFIED ContentLabelTypeEnum_ContentLabelType = 0 + // Used for return value only. Represents value unknown in this version. + ContentLabelTypeEnum_UNKNOWN ContentLabelTypeEnum_ContentLabelType = 1 + // Sexually suggestive content. + ContentLabelTypeEnum_SEXUALLY_SUGGESTIVE ContentLabelTypeEnum_ContentLabelType = 2 + // Below the fold placement. + ContentLabelTypeEnum_BELOW_THE_FOLD ContentLabelTypeEnum_ContentLabelType = 3 + // Parked domain. + ContentLabelTypeEnum_PARKED_DOMAIN ContentLabelTypeEnum_ContentLabelType = 4 + // Game. + ContentLabelTypeEnum_GAME ContentLabelTypeEnum_ContentLabelType = 5 + // Juvenile, gross & bizarre content. + ContentLabelTypeEnum_JUVENILE ContentLabelTypeEnum_ContentLabelType = 6 + // Profanity & rough language. + ContentLabelTypeEnum_PROFANITY ContentLabelTypeEnum_ContentLabelType = 7 + // Death & tragedy. + ContentLabelTypeEnum_TRAGEDY ContentLabelTypeEnum_ContentLabelType = 8 + // Video. + ContentLabelTypeEnum_VIDEO ContentLabelTypeEnum_ContentLabelType = 9 + // Content rating: G. + ContentLabelTypeEnum_VIDEO_RATING_DV_G ContentLabelTypeEnum_ContentLabelType = 10 + // Content rating: PG. + ContentLabelTypeEnum_VIDEO_RATING_DV_PG ContentLabelTypeEnum_ContentLabelType = 11 + // Content rating: T. + ContentLabelTypeEnum_VIDEO_RATING_DV_T ContentLabelTypeEnum_ContentLabelType = 12 + // Content rating: MA. + ContentLabelTypeEnum_VIDEO_RATING_DV_MA ContentLabelTypeEnum_ContentLabelType = 13 + // Content rating: not yet rated. + ContentLabelTypeEnum_VIDEO_NOT_YET_RATED ContentLabelTypeEnum_ContentLabelType = 14 + // Embedded video. + ContentLabelTypeEnum_EMBEDDED_VIDEO ContentLabelTypeEnum_ContentLabelType = 15 + // Live streaming video. + ContentLabelTypeEnum_LIVE_STREAMING_VIDEO ContentLabelTypeEnum_ContentLabelType = 16 +) + +var ContentLabelTypeEnum_ContentLabelType_name = map[int32]string{ + 0: "UNSPECIFIED", + 1: "UNKNOWN", + 2: "SEXUALLY_SUGGESTIVE", + 3: "BELOW_THE_FOLD", + 4: "PARKED_DOMAIN", + 5: "GAME", + 6: "JUVENILE", + 7: "PROFANITY", + 8: "TRAGEDY", + 9: "VIDEO", + 10: "VIDEO_RATING_DV_G", + 11: "VIDEO_RATING_DV_PG", + 12: "VIDEO_RATING_DV_T", + 13: "VIDEO_RATING_DV_MA", + 14: "VIDEO_NOT_YET_RATED", + 15: "EMBEDDED_VIDEO", + 16: "LIVE_STREAMING_VIDEO", +} +var ContentLabelTypeEnum_ContentLabelType_value = map[string]int32{ + "UNSPECIFIED": 0, + "UNKNOWN": 1, + "SEXUALLY_SUGGESTIVE": 2, + "BELOW_THE_FOLD": 3, + "PARKED_DOMAIN": 4, + "GAME": 5, + "JUVENILE": 6, + "PROFANITY": 7, + "TRAGEDY": 8, + "VIDEO": 9, + "VIDEO_RATING_DV_G": 10, + "VIDEO_RATING_DV_PG": 11, + "VIDEO_RATING_DV_T": 12, + "VIDEO_RATING_DV_MA": 13, + "VIDEO_NOT_YET_RATED": 14, + "EMBEDDED_VIDEO": 15, + "LIVE_STREAMING_VIDEO": 16, +} + +func (x ContentLabelTypeEnum_ContentLabelType) String() string { + return proto.EnumName(ContentLabelTypeEnum_ContentLabelType_name, int32(x)) +} +func (ContentLabelTypeEnum_ContentLabelType) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_content_label_type_fbdc3184d1a69374, []int{0, 0} +} + +// Container for enum describing content label types in ContentLabel. +type ContentLabelTypeEnum struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ContentLabelTypeEnum) Reset() { *m = ContentLabelTypeEnum{} } +func (m *ContentLabelTypeEnum) String() string { return proto.CompactTextString(m) } +func (*ContentLabelTypeEnum) ProtoMessage() {} +func (*ContentLabelTypeEnum) Descriptor() ([]byte, []int) { + return fileDescriptor_content_label_type_fbdc3184d1a69374, []int{0} +} +func (m *ContentLabelTypeEnum) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ContentLabelTypeEnum.Unmarshal(m, b) +} +func (m *ContentLabelTypeEnum) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ContentLabelTypeEnum.Marshal(b, m, deterministic) +} +func (dst *ContentLabelTypeEnum) XXX_Merge(src proto.Message) { + xxx_messageInfo_ContentLabelTypeEnum.Merge(dst, src) +} +func (m *ContentLabelTypeEnum) XXX_Size() int { + return xxx_messageInfo_ContentLabelTypeEnum.Size(m) +} +func (m *ContentLabelTypeEnum) XXX_DiscardUnknown() { + xxx_messageInfo_ContentLabelTypeEnum.DiscardUnknown(m) +} + +var xxx_messageInfo_ContentLabelTypeEnum proto.InternalMessageInfo + +func init() { + proto.RegisterType((*ContentLabelTypeEnum)(nil), "google.ads.googleads.v1.enums.ContentLabelTypeEnum") + proto.RegisterEnum("google.ads.googleads.v1.enums.ContentLabelTypeEnum_ContentLabelType", ContentLabelTypeEnum_ContentLabelType_name, ContentLabelTypeEnum_ContentLabelType_value) +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/enums/content_label_type.proto", fileDescriptor_content_label_type_fbdc3184d1a69374) +} + +var fileDescriptor_content_label_type_fbdc3184d1a69374 = []byte{ + // 475 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x7c, 0x52, 0xdd, 0x6e, 0x9b, 0x3c, + 0x18, 0xfe, 0x42, 0xff, 0x12, 0xa7, 0x69, 0x5d, 0x7f, 0xed, 0x36, 0x4d, 0xeb, 0x41, 0x7b, 0x01, + 0xa0, 0x68, 0xd2, 0x0e, 0xd8, 0x91, 0x53, 0xbf, 0x61, 0xac, 0x04, 0x50, 0x30, 0x74, 0x99, 0x22, + 0x59, 0xb4, 0x20, 0x14, 0x29, 0xc1, 0xa8, 0xd0, 0x4a, 0xbd, 0x9d, 0x1d, 0xee, 0x52, 0x76, 0xbc, + 0xab, 0xa8, 0xb4, 0x7b, 0x98, 0x8c, 0x97, 0x1c, 0x44, 0xdd, 0x4e, 0xd0, 0xa3, 0xf7, 0xf9, 0xc1, + 0xf6, 0xf3, 0xa2, 0x0f, 0x85, 0x94, 0xc5, 0x32, 0xb7, 0xd2, 0xac, 0xb6, 0x34, 0x54, 0xe8, 0x71, + 0x68, 0xe5, 0xe5, 0xc3, 0xaa, 0xb6, 0xee, 0x64, 0xd9, 0xe4, 0x65, 0x23, 0x96, 0xe9, 0x6d, 0xbe, + 0x14, 0xcd, 0x53, 0x95, 0x9b, 0xd5, 0xbd, 0x6c, 0x24, 0x39, 0xd7, 0x62, 0x33, 0xcd, 0x6a, 0x73, + 0xe3, 0x33, 0x1f, 0x87, 0x66, 0xeb, 0x7b, 0xfb, 0x6e, 0x1d, 0x5b, 0x2d, 0xac, 0xb4, 0x2c, 0x65, + 0x93, 0x36, 0x0b, 0x59, 0xd6, 0xda, 0x7c, 0xf9, 0xcb, 0x40, 0xa7, 0x57, 0x3a, 0xd9, 0x53, 0xc1, + 0xfc, 0xa9, 0xca, 0xa1, 0x7c, 0x58, 0x5d, 0xfe, 0x34, 0x10, 0xde, 0x26, 0xc8, 0x31, 0xea, 0xc7, + 0x7e, 0x14, 0xc2, 0x95, 0x3b, 0x76, 0x81, 0xe1, 0xff, 0x48, 0x1f, 0x1d, 0xc4, 0xfe, 0xb5, 0x1f, + 0xdc, 0xf8, 0xb8, 0x43, 0x5e, 0xa3, 0xff, 0x23, 0xf8, 0x12, 0x53, 0xcf, 0x9b, 0x89, 0x28, 0x76, + 0x1c, 0x88, 0xb8, 0x9b, 0x00, 0x36, 0x08, 0x41, 0x47, 0x23, 0xf0, 0x82, 0x1b, 0xc1, 0x3f, 0x81, + 0x18, 0x07, 0x1e, 0xc3, 0x3b, 0xe4, 0x04, 0x0d, 0x42, 0x3a, 0xbd, 0x06, 0x26, 0x58, 0x30, 0xa1, + 0xae, 0x8f, 0x77, 0x49, 0x17, 0xed, 0x3a, 0x74, 0x02, 0x78, 0x8f, 0x1c, 0xa2, 0xee, 0xe7, 0x38, + 0x01, 0xdf, 0xf5, 0x00, 0xef, 0x93, 0x01, 0xea, 0x85, 0xd3, 0x60, 0x4c, 0x7d, 0x97, 0xcf, 0xf0, + 0x81, 0xfa, 0x27, 0x9f, 0x52, 0x07, 0xd8, 0x0c, 0x77, 0x49, 0x0f, 0xed, 0x25, 0x2e, 0x83, 0x00, + 0xf7, 0xc8, 0x19, 0x3a, 0x69, 0xa1, 0x98, 0x52, 0xee, 0xfa, 0x8e, 0x60, 0x89, 0x70, 0x30, 0x22, + 0xaf, 0x10, 0xd9, 0x1e, 0x87, 0x0e, 0xee, 0xbf, 0x24, 0xe7, 0xf8, 0xf0, 0x25, 0xf9, 0x84, 0xe2, + 0x81, 0xba, 0x9c, 0x9e, 0xfb, 0x01, 0x17, 0x33, 0xe0, 0x8a, 0x07, 0x86, 0x8f, 0xd4, 0xe5, 0x60, + 0x32, 0x02, 0xc6, 0x80, 0x09, 0x7d, 0x94, 0x63, 0xf2, 0x06, 0x9d, 0x7a, 0x6e, 0x02, 0x22, 0xe2, + 0x53, 0xa0, 0x13, 0x15, 0xa3, 0x19, 0x3c, 0x7a, 0xee, 0xa0, 0x8b, 0x3b, 0xb9, 0x32, 0xff, 0xd9, + 0xd9, 0xe8, 0x6c, 0xfb, 0xe5, 0x43, 0x55, 0x56, 0xd8, 0xf9, 0x3a, 0xfa, 0xe3, 0x2b, 0xe4, 0x32, + 0x2d, 0x0b, 0x53, 0xde, 0x17, 0x56, 0x91, 0x97, 0x6d, 0x95, 0xeb, 0x9d, 0xa9, 0x16, 0xf5, 0x5f, + 0x56, 0xe8, 0x63, 0xfb, 0xfd, 0x66, 0xec, 0x38, 0x94, 0x7e, 0x37, 0xce, 0x1d, 0x1d, 0x45, 0xb3, + 0xda, 0xd4, 0x50, 0xa1, 0x64, 0x68, 0xaa, 0xfa, 0xeb, 0x1f, 0x6b, 0x7e, 0x4e, 0xb3, 0x7a, 0xbe, + 0xe1, 0xe7, 0xc9, 0x70, 0xde, 0xf2, 0xcf, 0xc6, 0x85, 0x1e, 0xda, 0x36, 0xcd, 0x6a, 0xdb, 0xde, + 0x28, 0x6c, 0x3b, 0x19, 0xda, 0x76, 0xab, 0xb9, 0xdd, 0x6f, 0x0f, 0xf6, 0xfe, 0x77, 0x00, 0x00, + 0x00, 0xff, 0xff, 0x20, 0x89, 0xed, 0x7b, 0xda, 0x02, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/conversion_action_category.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/conversion_action_category.pb.go new file mode 100644 index 0000000..f7ccf0b --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/conversion_action_category.pb.go @@ -0,0 +1,138 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/enums/conversion_action_category.proto + +package enums // import "google.golang.org/genproto/googleapis/ads/googleads/v1/enums" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// The category of conversions that are associated with a ConversionAction. +type ConversionActionCategoryEnum_ConversionActionCategory int32 + +const ( + // Not specified. + ConversionActionCategoryEnum_UNSPECIFIED ConversionActionCategoryEnum_ConversionActionCategory = 0 + // Used for return value only. Represents value unknown in this version. + ConversionActionCategoryEnum_UNKNOWN ConversionActionCategoryEnum_ConversionActionCategory = 1 + // Default category. + ConversionActionCategoryEnum_DEFAULT ConversionActionCategoryEnum_ConversionActionCategory = 2 + // User visiting a page. + ConversionActionCategoryEnum_PAGE_VIEW ConversionActionCategoryEnum_ConversionActionCategory = 3 + // Purchase, sales, or "order placed" event. + ConversionActionCategoryEnum_PURCHASE ConversionActionCategoryEnum_ConversionActionCategory = 4 + // Signup user action. + ConversionActionCategoryEnum_SIGNUP ConversionActionCategoryEnum_ConversionActionCategory = 5 + // Lead-generating action. + ConversionActionCategoryEnum_LEAD ConversionActionCategoryEnum_ConversionActionCategory = 6 + // Software download action (as for an app). + ConversionActionCategoryEnum_DOWNLOAD ConversionActionCategoryEnum_ConversionActionCategory = 7 +) + +var ConversionActionCategoryEnum_ConversionActionCategory_name = map[int32]string{ + 0: "UNSPECIFIED", + 1: "UNKNOWN", + 2: "DEFAULT", + 3: "PAGE_VIEW", + 4: "PURCHASE", + 5: "SIGNUP", + 6: "LEAD", + 7: "DOWNLOAD", +} +var ConversionActionCategoryEnum_ConversionActionCategory_value = map[string]int32{ + "UNSPECIFIED": 0, + "UNKNOWN": 1, + "DEFAULT": 2, + "PAGE_VIEW": 3, + "PURCHASE": 4, + "SIGNUP": 5, + "LEAD": 6, + "DOWNLOAD": 7, +} + +func (x ConversionActionCategoryEnum_ConversionActionCategory) String() string { + return proto.EnumName(ConversionActionCategoryEnum_ConversionActionCategory_name, int32(x)) +} +func (ConversionActionCategoryEnum_ConversionActionCategory) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_conversion_action_category_025dfe4d5b0cb5da, []int{0, 0} +} + +// Container for enum describing the category of conversions that are associated +// with a ConversionAction. +type ConversionActionCategoryEnum struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ConversionActionCategoryEnum) Reset() { *m = ConversionActionCategoryEnum{} } +func (m *ConversionActionCategoryEnum) String() string { return proto.CompactTextString(m) } +func (*ConversionActionCategoryEnum) ProtoMessage() {} +func (*ConversionActionCategoryEnum) Descriptor() ([]byte, []int) { + return fileDescriptor_conversion_action_category_025dfe4d5b0cb5da, []int{0} +} +func (m *ConversionActionCategoryEnum) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ConversionActionCategoryEnum.Unmarshal(m, b) +} +func (m *ConversionActionCategoryEnum) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ConversionActionCategoryEnum.Marshal(b, m, deterministic) +} +func (dst *ConversionActionCategoryEnum) XXX_Merge(src proto.Message) { + xxx_messageInfo_ConversionActionCategoryEnum.Merge(dst, src) +} +func (m *ConversionActionCategoryEnum) XXX_Size() int { + return xxx_messageInfo_ConversionActionCategoryEnum.Size(m) +} +func (m *ConversionActionCategoryEnum) XXX_DiscardUnknown() { + xxx_messageInfo_ConversionActionCategoryEnum.DiscardUnknown(m) +} + +var xxx_messageInfo_ConversionActionCategoryEnum proto.InternalMessageInfo + +func init() { + proto.RegisterType((*ConversionActionCategoryEnum)(nil), "google.ads.googleads.v1.enums.ConversionActionCategoryEnum") + proto.RegisterEnum("google.ads.googleads.v1.enums.ConversionActionCategoryEnum_ConversionActionCategory", ConversionActionCategoryEnum_ConversionActionCategory_name, ConversionActionCategoryEnum_ConversionActionCategory_value) +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/enums/conversion_action_category.proto", fileDescriptor_conversion_action_category_025dfe4d5b0cb5da) +} + +var fileDescriptor_conversion_action_category_025dfe4d5b0cb5da = []byte{ + // 360 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x7c, 0x51, 0xc1, 0x6a, 0xea, 0x40, + 0x14, 0x7d, 0x89, 0x3e, 0xf5, 0x8d, 0xef, 0xf1, 0x86, 0xac, 0x4a, 0xd1, 0x85, 0x7e, 0xc0, 0x84, + 0xd0, 0xdd, 0x14, 0x0a, 0x63, 0x32, 0xa6, 0xa1, 0x12, 0x43, 0x6d, 0x22, 0x94, 0x80, 0xa4, 0x49, + 0x08, 0x82, 0xce, 0x48, 0x26, 0x0a, 0xfd, 0x81, 0x7e, 0x48, 0x37, 0x85, 0x7e, 0x4a, 0x3f, 0xa5, + 0xcb, 0x7e, 0x41, 0x99, 0x89, 0x66, 0x67, 0x37, 0xc9, 0x99, 0xb9, 0xe7, 0x9c, 0x7b, 0xef, 0x19, + 0x70, 0x53, 0x70, 0x5e, 0x6c, 0x72, 0x33, 0xc9, 0x84, 0x59, 0x43, 0x89, 0x0e, 0x96, 0x99, 0xb3, + 0xfd, 0x56, 0x98, 0x29, 0x67, 0x87, 0xbc, 0x14, 0x6b, 0xce, 0x56, 0x49, 0x5a, 0xc9, 0x5f, 0x9a, + 0x54, 0x79, 0xc1, 0xcb, 0x67, 0xb4, 0x2b, 0x79, 0xc5, 0x8d, 0x61, 0x2d, 0x42, 0x49, 0x26, 0x50, + 0xa3, 0x47, 0x07, 0x0b, 0x29, 0xfd, 0xe5, 0xe0, 0x64, 0xbf, 0x5b, 0x9b, 0x09, 0x63, 0xbc, 0x4a, + 0xa4, 0x89, 0xa8, 0xc5, 0xe3, 0x37, 0x0d, 0x0c, 0xec, 0xa6, 0x03, 0x51, 0x0d, 0xec, 0xa3, 0x3f, + 0x65, 0xfb, 0xed, 0xf8, 0x45, 0x03, 0x17, 0xe7, 0x08, 0xc6, 0x7f, 0xd0, 0x0f, 0xfd, 0x45, 0x40, + 0x6d, 0x6f, 0xea, 0x51, 0x07, 0xfe, 0x32, 0xfa, 0xa0, 0x1b, 0xfa, 0x77, 0xfe, 0x7c, 0xe9, 0x43, + 0x4d, 0x1e, 0x1c, 0x3a, 0x25, 0xe1, 0xec, 0x01, 0xea, 0xc6, 0x3f, 0xf0, 0x27, 0x20, 0x2e, 0x5d, + 0x45, 0x1e, 0x5d, 0xc2, 0x96, 0xf1, 0x17, 0xf4, 0x82, 0xf0, 0xde, 0xbe, 0x25, 0x0b, 0x0a, 0xdb, + 0x06, 0x00, 0x9d, 0x85, 0xe7, 0xfa, 0x61, 0x00, 0x7f, 0x1b, 0x3d, 0xd0, 0x9e, 0x51, 0xe2, 0xc0, + 0x8e, 0xe4, 0x38, 0xf3, 0xa5, 0x3f, 0x9b, 0x13, 0x07, 0x76, 0x27, 0x5f, 0x1a, 0x18, 0xa5, 0x7c, + 0x8b, 0x7e, 0xdc, 0x76, 0x32, 0x3c, 0x37, 0x6b, 0x20, 0xd7, 0x0d, 0xb4, 0xc7, 0xc9, 0x51, 0x5f, + 0xf0, 0x4d, 0xc2, 0x0a, 0xc4, 0xcb, 0xc2, 0x2c, 0x72, 0xa6, 0xc2, 0x38, 0xa5, 0xbf, 0x5b, 0x8b, + 0x33, 0x8f, 0x71, 0xad, 0xbe, 0xaf, 0x7a, 0xcb, 0x25, 0xe4, 0x5d, 0x1f, 0xba, 0xb5, 0x15, 0xc9, + 0x04, 0xaa, 0xa1, 0x44, 0x91, 0x85, 0x64, 0x70, 0xe2, 0xe3, 0x54, 0x8f, 0x49, 0x26, 0xe2, 0xa6, + 0x1e, 0x47, 0x56, 0xac, 0xea, 0x9f, 0xfa, 0xa8, 0xbe, 0xc4, 0x98, 0x64, 0x02, 0xe3, 0x86, 0x81, + 0x71, 0x64, 0x61, 0xac, 0x38, 0x4f, 0x1d, 0x35, 0xd8, 0xd5, 0x77, 0x00, 0x00, 0x00, 0xff, 0xff, + 0xf5, 0xae, 0x28, 0xe8, 0x24, 0x02, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/conversion_action_counting_type.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/conversion_action_counting_type.pb.go new file mode 100644 index 0000000..5304a47 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/conversion_action_counting_type.pb.go @@ -0,0 +1,121 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/enums/conversion_action_counting_type.proto + +package enums // import "google.golang.org/genproto/googleapis/ads/googleads/v1/enums" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Indicates how conversions for this action will be counted. For more +// information, see https://support.google.com/google-ads/answer/3438531. +type ConversionActionCountingTypeEnum_ConversionActionCountingType int32 + +const ( + // Not specified. + ConversionActionCountingTypeEnum_UNSPECIFIED ConversionActionCountingTypeEnum_ConversionActionCountingType = 0 + // Used for return value only. Represents value unknown in this version. + ConversionActionCountingTypeEnum_UNKNOWN ConversionActionCountingTypeEnum_ConversionActionCountingType = 1 + // Count only one conversion per click. + ConversionActionCountingTypeEnum_ONE_PER_CLICK ConversionActionCountingTypeEnum_ConversionActionCountingType = 2 + // Count all conversions per click. + ConversionActionCountingTypeEnum_MANY_PER_CLICK ConversionActionCountingTypeEnum_ConversionActionCountingType = 3 +) + +var ConversionActionCountingTypeEnum_ConversionActionCountingType_name = map[int32]string{ + 0: "UNSPECIFIED", + 1: "UNKNOWN", + 2: "ONE_PER_CLICK", + 3: "MANY_PER_CLICK", +} +var ConversionActionCountingTypeEnum_ConversionActionCountingType_value = map[string]int32{ + "UNSPECIFIED": 0, + "UNKNOWN": 1, + "ONE_PER_CLICK": 2, + "MANY_PER_CLICK": 3, +} + +func (x ConversionActionCountingTypeEnum_ConversionActionCountingType) String() string { + return proto.EnumName(ConversionActionCountingTypeEnum_ConversionActionCountingType_name, int32(x)) +} +func (ConversionActionCountingTypeEnum_ConversionActionCountingType) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_conversion_action_counting_type_8318613c00d341f1, []int{0, 0} +} + +// Container for enum describing the conversion deduplication mode for +// conversion optimizer. +type ConversionActionCountingTypeEnum struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ConversionActionCountingTypeEnum) Reset() { *m = ConversionActionCountingTypeEnum{} } +func (m *ConversionActionCountingTypeEnum) String() string { return proto.CompactTextString(m) } +func (*ConversionActionCountingTypeEnum) ProtoMessage() {} +func (*ConversionActionCountingTypeEnum) Descriptor() ([]byte, []int) { + return fileDescriptor_conversion_action_counting_type_8318613c00d341f1, []int{0} +} +func (m *ConversionActionCountingTypeEnum) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ConversionActionCountingTypeEnum.Unmarshal(m, b) +} +func (m *ConversionActionCountingTypeEnum) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ConversionActionCountingTypeEnum.Marshal(b, m, deterministic) +} +func (dst *ConversionActionCountingTypeEnum) XXX_Merge(src proto.Message) { + xxx_messageInfo_ConversionActionCountingTypeEnum.Merge(dst, src) +} +func (m *ConversionActionCountingTypeEnum) XXX_Size() int { + return xxx_messageInfo_ConversionActionCountingTypeEnum.Size(m) +} +func (m *ConversionActionCountingTypeEnum) XXX_DiscardUnknown() { + xxx_messageInfo_ConversionActionCountingTypeEnum.DiscardUnknown(m) +} + +var xxx_messageInfo_ConversionActionCountingTypeEnum proto.InternalMessageInfo + +func init() { + proto.RegisterType((*ConversionActionCountingTypeEnum)(nil), "google.ads.googleads.v1.enums.ConversionActionCountingTypeEnum") + proto.RegisterEnum("google.ads.googleads.v1.enums.ConversionActionCountingTypeEnum_ConversionActionCountingType", ConversionActionCountingTypeEnum_ConversionActionCountingType_name, ConversionActionCountingTypeEnum_ConversionActionCountingType_value) +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/enums/conversion_action_counting_type.proto", fileDescriptor_conversion_action_counting_type_8318613c00d341f1) +} + +var fileDescriptor_conversion_action_counting_type_8318613c00d341f1 = []byte{ + // 326 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x7c, 0x50, 0xdf, 0x4a, 0xf3, 0x30, + 0x1c, 0xfd, 0xda, 0xc1, 0x27, 0x64, 0xa8, 0xb3, 0x97, 0xb2, 0x81, 0xdb, 0x03, 0xa4, 0x14, 0xef, + 0xe2, 0x55, 0x16, 0xeb, 0x18, 0xd3, 0xac, 0xa8, 0x9b, 0x28, 0x85, 0x12, 0xdb, 0x10, 0x0a, 0x5b, + 0x52, 0x96, 0x6e, 0xb0, 0x27, 0xf0, 0x3d, 0xbc, 0xf4, 0x51, 0x7c, 0x14, 0x1f, 0xc0, 0x6b, 0x49, + 0xb2, 0x15, 0x6f, 0xdc, 0x4d, 0x72, 0xf8, 0xfd, 0x39, 0xe7, 0x77, 0x0e, 0x20, 0x42, 0x29, 0xb1, + 0xe0, 0x21, 0x2b, 0x74, 0xe8, 0xa0, 0x41, 0x9b, 0x28, 0xe4, 0x72, 0xbd, 0xd4, 0x61, 0xae, 0xe4, + 0x86, 0xaf, 0x74, 0xa9, 0x64, 0xc6, 0xf2, 0xda, 0x7c, 0xb9, 0x5a, 0xcb, 0xba, 0x94, 0x22, 0xab, + 0xb7, 0x15, 0x87, 0xd5, 0x4a, 0xd5, 0x2a, 0xe8, 0xb9, 0x4d, 0xc8, 0x0a, 0x0d, 0x1b, 0x12, 0xb8, + 0x89, 0xa0, 0x25, 0x39, 0xef, 0xee, 0x35, 0xaa, 0x32, 0x64, 0x52, 0xaa, 0x9a, 0x19, 0x26, 0xed, + 0x96, 0x07, 0x6f, 0x1e, 0xb8, 0x20, 0x8d, 0x0c, 0xb6, 0x2a, 0x64, 0x27, 0xf2, 0xb8, 0xad, 0x78, + 0x2c, 0xd7, 0xcb, 0x41, 0x0e, 0xba, 0x87, 0x66, 0x82, 0x53, 0xd0, 0x9e, 0xd1, 0x87, 0x24, 0x26, + 0xe3, 0x9b, 0x71, 0x7c, 0xdd, 0xf9, 0x17, 0xb4, 0xc1, 0xd1, 0x8c, 0x4e, 0xe8, 0xf4, 0x89, 0x76, + 0xbc, 0xe0, 0x0c, 0x1c, 0x4f, 0x69, 0x9c, 0x25, 0xf1, 0x7d, 0x46, 0x6e, 0xc7, 0x64, 0xd2, 0xf1, + 0x83, 0x00, 0x9c, 0xdc, 0x61, 0xfa, 0xfc, 0xab, 0xd6, 0x1a, 0x7e, 0x7b, 0xa0, 0x9f, 0xab, 0x25, + 0x3c, 0xe8, 0x66, 0xd8, 0x3f, 0x74, 0x48, 0x62, 0x2c, 0x25, 0xde, 0xcb, 0x70, 0xc7, 0x21, 0xd4, + 0x82, 0x49, 0x01, 0xd5, 0x4a, 0x84, 0x82, 0x4b, 0x6b, 0x78, 0x1f, 0x73, 0x55, 0xea, 0x3f, 0x52, + 0xbf, 0xb2, 0xef, 0xbb, 0xdf, 0x1a, 0x61, 0xfc, 0xe1, 0xf7, 0x46, 0x8e, 0x0a, 0x17, 0x1a, 0x3a, + 0x68, 0xd0, 0x3c, 0x82, 0x26, 0x18, 0xfd, 0xb9, 0xef, 0xa7, 0xb8, 0xd0, 0x69, 0xd3, 0x4f, 0xe7, + 0x51, 0x6a, 0xfb, 0x5f, 0x7e, 0xdf, 0x15, 0x11, 0xc2, 0x85, 0x46, 0xa8, 0x99, 0x40, 0x68, 0x1e, + 0x21, 0x64, 0x67, 0x5e, 0xff, 0xdb, 0xc3, 0x2e, 0x7f, 0x02, 0x00, 0x00, 0xff, 0xff, 0x15, 0xba, + 0x71, 0x06, 0x0d, 0x02, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/conversion_action_status.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/conversion_action_status.pb.go new file mode 100644 index 0000000..7872440 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/conversion_action_status.pb.go @@ -0,0 +1,123 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/enums/conversion_action_status.proto + +package enums // import "google.golang.org/genproto/googleapis/ads/googleads/v1/enums" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Possible statuses of a conversion action. +type ConversionActionStatusEnum_ConversionActionStatus int32 + +const ( + // Not specified. + ConversionActionStatusEnum_UNSPECIFIED ConversionActionStatusEnum_ConversionActionStatus = 0 + // Used for return value only. Represents value unknown in this version. + ConversionActionStatusEnum_UNKNOWN ConversionActionStatusEnum_ConversionActionStatus = 1 + // Conversions will be recorded. + ConversionActionStatusEnum_ENABLED ConversionActionStatusEnum_ConversionActionStatus = 2 + // Conversions will not be recorded. + ConversionActionStatusEnum_REMOVED ConversionActionStatusEnum_ConversionActionStatus = 3 + // Conversions will not be recorded and the conversion action will not + // appear in the UI. + ConversionActionStatusEnum_HIDDEN ConversionActionStatusEnum_ConversionActionStatus = 4 +) + +var ConversionActionStatusEnum_ConversionActionStatus_name = map[int32]string{ + 0: "UNSPECIFIED", + 1: "UNKNOWN", + 2: "ENABLED", + 3: "REMOVED", + 4: "HIDDEN", +} +var ConversionActionStatusEnum_ConversionActionStatus_value = map[string]int32{ + "UNSPECIFIED": 0, + "UNKNOWN": 1, + "ENABLED": 2, + "REMOVED": 3, + "HIDDEN": 4, +} + +func (x ConversionActionStatusEnum_ConversionActionStatus) String() string { + return proto.EnumName(ConversionActionStatusEnum_ConversionActionStatus_name, int32(x)) +} +func (ConversionActionStatusEnum_ConversionActionStatus) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_conversion_action_status_9c04a392c42bd2a7, []int{0, 0} +} + +// Container for enum describing possible statuses of a conversion action. +type ConversionActionStatusEnum struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ConversionActionStatusEnum) Reset() { *m = ConversionActionStatusEnum{} } +func (m *ConversionActionStatusEnum) String() string { return proto.CompactTextString(m) } +func (*ConversionActionStatusEnum) ProtoMessage() {} +func (*ConversionActionStatusEnum) Descriptor() ([]byte, []int) { + return fileDescriptor_conversion_action_status_9c04a392c42bd2a7, []int{0} +} +func (m *ConversionActionStatusEnum) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ConversionActionStatusEnum.Unmarshal(m, b) +} +func (m *ConversionActionStatusEnum) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ConversionActionStatusEnum.Marshal(b, m, deterministic) +} +func (dst *ConversionActionStatusEnum) XXX_Merge(src proto.Message) { + xxx_messageInfo_ConversionActionStatusEnum.Merge(dst, src) +} +func (m *ConversionActionStatusEnum) XXX_Size() int { + return xxx_messageInfo_ConversionActionStatusEnum.Size(m) +} +func (m *ConversionActionStatusEnum) XXX_DiscardUnknown() { + xxx_messageInfo_ConversionActionStatusEnum.DiscardUnknown(m) +} + +var xxx_messageInfo_ConversionActionStatusEnum proto.InternalMessageInfo + +func init() { + proto.RegisterType((*ConversionActionStatusEnum)(nil), "google.ads.googleads.v1.enums.ConversionActionStatusEnum") + proto.RegisterEnum("google.ads.googleads.v1.enums.ConversionActionStatusEnum_ConversionActionStatus", ConversionActionStatusEnum_ConversionActionStatus_name, ConversionActionStatusEnum_ConversionActionStatus_value) +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/enums/conversion_action_status.proto", fileDescriptor_conversion_action_status_9c04a392c42bd2a7) +} + +var fileDescriptor_conversion_action_status_9c04a392c42bd2a7 = []byte{ + // 316 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x7c, 0x50, 0xb1, 0x4e, 0xf3, 0x30, + 0x18, 0xfc, 0x9b, 0xfe, 0x2a, 0x92, 0x3b, 0x10, 0x65, 0x60, 0x28, 0x74, 0x68, 0x1f, 0xc0, 0x56, + 0xc4, 0x66, 0x58, 0x9c, 0xc6, 0x94, 0x0a, 0x70, 0x2b, 0xaa, 0x06, 0x09, 0x45, 0x42, 0xa6, 0x89, + 0xac, 0x4a, 0x8d, 0x1d, 0xc5, 0x49, 0x06, 0x1e, 0x87, 0x91, 0x47, 0xe1, 0x51, 0x98, 0x78, 0x04, + 0x64, 0xa7, 0xc9, 0x54, 0x58, 0xec, 0xfb, 0x7c, 0xdf, 0x9d, 0xce, 0x07, 0xae, 0x85, 0x52, 0x62, + 0x9f, 0x22, 0x9e, 0x68, 0xd4, 0x40, 0x83, 0x6a, 0x1f, 0xa5, 0xb2, 0xca, 0x34, 0xda, 0x2a, 0x59, + 0xa7, 0x85, 0xde, 0x29, 0xf9, 0xc2, 0xb7, 0xa5, 0xb9, 0x74, 0xc9, 0xcb, 0x4a, 0xc3, 0xbc, 0x50, + 0xa5, 0xf2, 0xc6, 0x8d, 0x04, 0xf2, 0x44, 0xc3, 0x4e, 0x0d, 0x6b, 0x1f, 0x5a, 0xf5, 0xe8, 0xa2, + 0x35, 0xcf, 0x77, 0x88, 0x4b, 0xa9, 0x4a, 0x6e, 0x2c, 0x0e, 0xe2, 0xe9, 0x1b, 0x18, 0xcd, 0x3a, + 0x7b, 0x62, 0xdd, 0xd7, 0xd6, 0x9c, 0xca, 0x2a, 0x9b, 0xc6, 0xe0, 0xec, 0x38, 0xeb, 0x9d, 0x82, + 0xe1, 0x86, 0xad, 0x57, 0x74, 0xb6, 0xb8, 0x59, 0xd0, 0xd0, 0xfd, 0xe7, 0x0d, 0xc1, 0xc9, 0x86, + 0xdd, 0xb1, 0xe5, 0x13, 0x73, 0x7b, 0x66, 0xa0, 0x8c, 0x04, 0xf7, 0x34, 0x74, 0x1d, 0x33, 0x3c, + 0xd2, 0x87, 0x65, 0x44, 0x43, 0xb7, 0xef, 0x01, 0x30, 0xb8, 0x5d, 0x84, 0x21, 0x65, 0xee, 0xff, + 0xe0, 0xbb, 0x07, 0x26, 0x5b, 0x95, 0xc1, 0x3f, 0xf3, 0x07, 0xe7, 0xc7, 0x13, 0xac, 0x4c, 0xfc, + 0x55, 0xef, 0x39, 0x38, 0xa8, 0x85, 0xda, 0x73, 0x29, 0xa0, 0x2a, 0x04, 0x12, 0xa9, 0xb4, 0x9f, + 0x6b, 0xbb, 0xcc, 0x77, 0xfa, 0x97, 0x6a, 0xaf, 0xec, 0xf9, 0xee, 0xf4, 0xe7, 0x84, 0x7c, 0x38, + 0xe3, 0x79, 0x63, 0x45, 0x12, 0x0d, 0x1b, 0x68, 0x50, 0xe4, 0x43, 0xd3, 0x85, 0xfe, 0x6c, 0xf9, + 0x98, 0x24, 0x3a, 0xee, 0xf8, 0x38, 0xf2, 0x63, 0xcb, 0x7f, 0x39, 0x93, 0xe6, 0x11, 0x63, 0x92, + 0x68, 0x8c, 0xbb, 0x0d, 0x8c, 0x23, 0x1f, 0x63, 0xbb, 0xf3, 0x3a, 0xb0, 0xc1, 0x2e, 0x7f, 0x02, + 0x00, 0x00, 0xff, 0xff, 0xc5, 0x42, 0x97, 0x65, 0xf2, 0x01, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/conversion_action_type.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/conversion_action_type.pb.go new file mode 100644 index 0000000..2af8b5a --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/conversion_action_type.pb.go @@ -0,0 +1,152 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/enums/conversion_action_type.proto + +package enums // import "google.golang.org/genproto/googleapis/ads/googleads/v1/enums" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Possible types of a conversion action. +type ConversionActionTypeEnum_ConversionActionType int32 + +const ( + // Not specified. + ConversionActionTypeEnum_UNSPECIFIED ConversionActionTypeEnum_ConversionActionType = 0 + // Used for return value only. Represents value unknown in this version. + ConversionActionTypeEnum_UNKNOWN ConversionActionTypeEnum_ConversionActionType = 1 + // Conversions that occur when a user clicks on an ad's call extension. + ConversionActionTypeEnum_AD_CALL ConversionActionTypeEnum_ConversionActionType = 2 + // Conversions that occur when a user on a mobile device clicks a phone + // number. + ConversionActionTypeEnum_CLICK_TO_CALL ConversionActionTypeEnum_ConversionActionType = 3 + // Conversions that occur when a user downloads a mobile app from the Google + // Play Store. + ConversionActionTypeEnum_GOOGLE_PLAY_DOWNLOAD ConversionActionTypeEnum_ConversionActionType = 4 + // Conversions that occur when a user makes a purchase in an app through + // Android billing. + ConversionActionTypeEnum_GOOGLE_PLAY_IN_APP_PURCHASE ConversionActionTypeEnum_ConversionActionType = 5 + // Call conversions that are tracked by the advertiser and uploaded. + ConversionActionTypeEnum_UPLOAD_CALLS ConversionActionTypeEnum_ConversionActionType = 6 + // Conversions that are tracked by the advertiser and uploaded with + // attributed clicks. + ConversionActionTypeEnum_UPLOAD_CLICKS ConversionActionTypeEnum_ConversionActionType = 7 + // Conversions that occur on a webpage. + ConversionActionTypeEnum_WEBPAGE ConversionActionTypeEnum_ConversionActionType = 8 + // Conversions that occur when a user calls a dynamically-generated phone + // number from an advertiser's website. + ConversionActionTypeEnum_WEBSITE_CALL ConversionActionTypeEnum_ConversionActionType = 9 +) + +var ConversionActionTypeEnum_ConversionActionType_name = map[int32]string{ + 0: "UNSPECIFIED", + 1: "UNKNOWN", + 2: "AD_CALL", + 3: "CLICK_TO_CALL", + 4: "GOOGLE_PLAY_DOWNLOAD", + 5: "GOOGLE_PLAY_IN_APP_PURCHASE", + 6: "UPLOAD_CALLS", + 7: "UPLOAD_CLICKS", + 8: "WEBPAGE", + 9: "WEBSITE_CALL", +} +var ConversionActionTypeEnum_ConversionActionType_value = map[string]int32{ + "UNSPECIFIED": 0, + "UNKNOWN": 1, + "AD_CALL": 2, + "CLICK_TO_CALL": 3, + "GOOGLE_PLAY_DOWNLOAD": 4, + "GOOGLE_PLAY_IN_APP_PURCHASE": 5, + "UPLOAD_CALLS": 6, + "UPLOAD_CLICKS": 7, + "WEBPAGE": 8, + "WEBSITE_CALL": 9, +} + +func (x ConversionActionTypeEnum_ConversionActionType) String() string { + return proto.EnumName(ConversionActionTypeEnum_ConversionActionType_name, int32(x)) +} +func (ConversionActionTypeEnum_ConversionActionType) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_conversion_action_type_ef6e965e011b0243, []int{0, 0} +} + +// Container for enum describing possible types of a conversion action. +type ConversionActionTypeEnum struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ConversionActionTypeEnum) Reset() { *m = ConversionActionTypeEnum{} } +func (m *ConversionActionTypeEnum) String() string { return proto.CompactTextString(m) } +func (*ConversionActionTypeEnum) ProtoMessage() {} +func (*ConversionActionTypeEnum) Descriptor() ([]byte, []int) { + return fileDescriptor_conversion_action_type_ef6e965e011b0243, []int{0} +} +func (m *ConversionActionTypeEnum) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ConversionActionTypeEnum.Unmarshal(m, b) +} +func (m *ConversionActionTypeEnum) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ConversionActionTypeEnum.Marshal(b, m, deterministic) +} +func (dst *ConversionActionTypeEnum) XXX_Merge(src proto.Message) { + xxx_messageInfo_ConversionActionTypeEnum.Merge(dst, src) +} +func (m *ConversionActionTypeEnum) XXX_Size() int { + return xxx_messageInfo_ConversionActionTypeEnum.Size(m) +} +func (m *ConversionActionTypeEnum) XXX_DiscardUnknown() { + xxx_messageInfo_ConversionActionTypeEnum.DiscardUnknown(m) +} + +var xxx_messageInfo_ConversionActionTypeEnum proto.InternalMessageInfo + +func init() { + proto.RegisterType((*ConversionActionTypeEnum)(nil), "google.ads.googleads.v1.enums.ConversionActionTypeEnum") + proto.RegisterEnum("google.ads.googleads.v1.enums.ConversionActionTypeEnum_ConversionActionType", ConversionActionTypeEnum_ConversionActionType_name, ConversionActionTypeEnum_ConversionActionType_value) +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/enums/conversion_action_type.proto", fileDescriptor_conversion_action_type_ef6e965e011b0243) +} + +var fileDescriptor_conversion_action_type_ef6e965e011b0243 = []byte{ + // 398 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x7c, 0x91, 0xdf, 0x8e, 0x94, 0x30, + 0x14, 0xc6, 0x85, 0xd5, 0x5d, 0xed, 0x6a, 0xac, 0x64, 0x2f, 0xd6, 0x3f, 0x1b, 0xb3, 0xfb, 0x00, + 0x25, 0xc4, 0xbb, 0x7a, 0x55, 0xa0, 0x22, 0x59, 0x02, 0x8d, 0x0c, 0x43, 0x34, 0x24, 0x04, 0x07, + 0x42, 0x26, 0xd9, 0x69, 0xc9, 0x94, 0x99, 0x64, 0x5e, 0xc7, 0x4b, 0x1f, 0xc5, 0x5b, 0xdf, 0xc2, + 0x0b, 0xe3, 0x23, 0x98, 0xb6, 0x42, 0xbc, 0x18, 0xbd, 0x81, 0xd3, 0xf3, 0x9d, 0xef, 0x77, 0xda, + 0x73, 0x00, 0xee, 0x85, 0xe8, 0xef, 0x3a, 0xb7, 0x69, 0xa5, 0x6b, 0x42, 0x15, 0xed, 0x3d, 0xb7, + 0xe3, 0xbb, 0x8d, 0x74, 0x57, 0x82, 0xef, 0xbb, 0xad, 0x5c, 0x0b, 0x5e, 0x37, 0xab, 0x51, 0xfd, + 0xc6, 0xc3, 0xd0, 0xa1, 0x61, 0x2b, 0x46, 0xe1, 0x5c, 0x19, 0x03, 0x6a, 0x5a, 0x89, 0x66, 0x2f, + 0xda, 0x7b, 0x48, 0x7b, 0x5f, 0xbc, 0x9a, 0xd0, 0xc3, 0xda, 0x6d, 0x38, 0x17, 0x63, 0xa3, 0x00, + 0xd2, 0x98, 0x6f, 0x7e, 0x59, 0xe0, 0x32, 0x98, 0xe9, 0x44, 0xc3, 0x17, 0x87, 0xa1, 0xa3, 0x7c, + 0xb7, 0xb9, 0xf9, 0x6e, 0x81, 0x8b, 0x63, 0xa2, 0xf3, 0x14, 0x9c, 0x17, 0x69, 0xce, 0x68, 0x10, + 0xbf, 0x8b, 0x69, 0x08, 0xef, 0x39, 0xe7, 0xe0, 0xac, 0x48, 0x6f, 0xd3, 0xac, 0x4c, 0xa1, 0xa5, + 0x0e, 0x24, 0xac, 0x03, 0x92, 0x24, 0xd0, 0x76, 0x9e, 0x81, 0x27, 0x41, 0x12, 0x07, 0xb7, 0xf5, + 0x22, 0x33, 0xa9, 0x13, 0xe7, 0x12, 0x5c, 0x44, 0x59, 0x16, 0x25, 0xb4, 0x66, 0x09, 0xf9, 0x58, + 0x87, 0x59, 0x99, 0x26, 0x19, 0x09, 0xe1, 0x7d, 0xe7, 0x35, 0x78, 0xf9, 0xb7, 0x12, 0xa7, 0x35, + 0x61, 0xac, 0x66, 0xc5, 0x87, 0xe0, 0x3d, 0xc9, 0x29, 0x7c, 0xe0, 0x40, 0xf0, 0xb8, 0x60, 0xaa, + 0x58, 0xb3, 0x72, 0x78, 0xaa, 0xf8, 0x53, 0x46, 0xb5, 0xc9, 0xe1, 0x99, 0xea, 0x5f, 0x52, 0x9f, + 0x91, 0x88, 0xc2, 0x87, 0xca, 0x51, 0x52, 0x3f, 0x8f, 0x17, 0xd4, 0xb4, 0x7f, 0xe4, 0xff, 0xb4, + 0xc0, 0xf5, 0x4a, 0x6c, 0xd0, 0x7f, 0xc7, 0xe6, 0x3f, 0x3f, 0xf6, 0x70, 0xa6, 0x66, 0xc6, 0xac, + 0x4f, 0xfe, 0x1f, 0x6f, 0x2f, 0xee, 0x1a, 0xde, 0x23, 0xb1, 0xed, 0xdd, 0xbe, 0xe3, 0x7a, 0xa2, + 0xd3, 0xfa, 0x86, 0xb5, 0xfc, 0xc7, 0x36, 0xdf, 0xea, 0xef, 0x17, 0xfb, 0x24, 0x22, 0xe4, 0xab, + 0x7d, 0x15, 0x19, 0x14, 0x69, 0x25, 0x32, 0xa1, 0x8a, 0x96, 0x1e, 0x52, 0x1b, 0x90, 0xdf, 0x26, + 0xbd, 0x22, 0xad, 0xac, 0x66, 0xbd, 0x5a, 0x7a, 0x95, 0xd6, 0x7f, 0xd8, 0xd7, 0x26, 0x89, 0x31, + 0x69, 0x25, 0xc6, 0x73, 0x05, 0xc6, 0x4b, 0x0f, 0x63, 0x5d, 0xf3, 0xf9, 0x54, 0x5f, 0xec, 0xcd, + 0xef, 0x00, 0x00, 0x00, 0xff, 0xff, 0x58, 0xf5, 0x21, 0xad, 0x65, 0x02, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/conversion_adjustment_type.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/conversion_adjustment_type.pb.go new file mode 100644 index 0000000..9ed6359 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/conversion_adjustment_type.pb.go @@ -0,0 +1,120 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/enums/conversion_adjustment_type.proto + +package enums // import "google.golang.org/genproto/googleapis/ads/googleads/v1/enums" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// The different actions advertisers can take to adjust the conversions that +// they already reported. Retractions negate a conversion. Restatements change +// the value of a conversion. +type ConversionAdjustmentTypeEnum_ConversionAdjustmentType int32 + +const ( + // Not specified. + ConversionAdjustmentTypeEnum_UNSPECIFIED ConversionAdjustmentTypeEnum_ConversionAdjustmentType = 0 + // Represents value unknown in this version. + ConversionAdjustmentTypeEnum_UNKNOWN ConversionAdjustmentTypeEnum_ConversionAdjustmentType = 1 + // Negates a conversion so that its total value and count are both zero. + ConversionAdjustmentTypeEnum_RETRACTION ConversionAdjustmentTypeEnum_ConversionAdjustmentType = 2 + // Changes the value of a conversion. + ConversionAdjustmentTypeEnum_RESTATEMENT ConversionAdjustmentTypeEnum_ConversionAdjustmentType = 3 +) + +var ConversionAdjustmentTypeEnum_ConversionAdjustmentType_name = map[int32]string{ + 0: "UNSPECIFIED", + 1: "UNKNOWN", + 2: "RETRACTION", + 3: "RESTATEMENT", +} +var ConversionAdjustmentTypeEnum_ConversionAdjustmentType_value = map[string]int32{ + "UNSPECIFIED": 0, + "UNKNOWN": 1, + "RETRACTION": 2, + "RESTATEMENT": 3, +} + +func (x ConversionAdjustmentTypeEnum_ConversionAdjustmentType) String() string { + return proto.EnumName(ConversionAdjustmentTypeEnum_ConversionAdjustmentType_name, int32(x)) +} +func (ConversionAdjustmentTypeEnum_ConversionAdjustmentType) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_conversion_adjustment_type_aa7254239d7954f6, []int{0, 0} +} + +// Container for enum describing conversion adjustment types. +type ConversionAdjustmentTypeEnum struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ConversionAdjustmentTypeEnum) Reset() { *m = ConversionAdjustmentTypeEnum{} } +func (m *ConversionAdjustmentTypeEnum) String() string { return proto.CompactTextString(m) } +func (*ConversionAdjustmentTypeEnum) ProtoMessage() {} +func (*ConversionAdjustmentTypeEnum) Descriptor() ([]byte, []int) { + return fileDescriptor_conversion_adjustment_type_aa7254239d7954f6, []int{0} +} +func (m *ConversionAdjustmentTypeEnum) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ConversionAdjustmentTypeEnum.Unmarshal(m, b) +} +func (m *ConversionAdjustmentTypeEnum) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ConversionAdjustmentTypeEnum.Marshal(b, m, deterministic) +} +func (dst *ConversionAdjustmentTypeEnum) XXX_Merge(src proto.Message) { + xxx_messageInfo_ConversionAdjustmentTypeEnum.Merge(dst, src) +} +func (m *ConversionAdjustmentTypeEnum) XXX_Size() int { + return xxx_messageInfo_ConversionAdjustmentTypeEnum.Size(m) +} +func (m *ConversionAdjustmentTypeEnum) XXX_DiscardUnknown() { + xxx_messageInfo_ConversionAdjustmentTypeEnum.DiscardUnknown(m) +} + +var xxx_messageInfo_ConversionAdjustmentTypeEnum proto.InternalMessageInfo + +func init() { + proto.RegisterType((*ConversionAdjustmentTypeEnum)(nil), "google.ads.googleads.v1.enums.ConversionAdjustmentTypeEnum") + proto.RegisterEnum("google.ads.googleads.v1.enums.ConversionAdjustmentTypeEnum_ConversionAdjustmentType", ConversionAdjustmentTypeEnum_ConversionAdjustmentType_name, ConversionAdjustmentTypeEnum_ConversionAdjustmentType_value) +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/enums/conversion_adjustment_type.proto", fileDescriptor_conversion_adjustment_type_aa7254239d7954f6) +} + +var fileDescriptor_conversion_adjustment_type_aa7254239d7954f6 = []byte{ + // 318 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x7c, 0x50, 0xdf, 0x4a, 0xf3, 0x30, + 0x1c, 0xfd, 0xd6, 0xc1, 0x27, 0x64, 0xa0, 0xa5, 0x57, 0x22, 0xdb, 0xc5, 0xf6, 0x00, 0x29, 0xc5, + 0xbb, 0x08, 0x42, 0x36, 0xe3, 0x18, 0x62, 0x37, 0xb6, 0x6c, 0xa2, 0x14, 0x46, 0x5c, 0x42, 0xa8, + 0xac, 0x49, 0x59, 0xb2, 0xc1, 0x5e, 0xc7, 0x4b, 0x1f, 0xc5, 0x47, 0xf1, 0xd2, 0x27, 0x90, 0x26, + 0xae, 0x77, 0xf5, 0xa6, 0x1c, 0x7a, 0xfe, 0xfc, 0x4e, 0x0e, 0xb8, 0x95, 0x5a, 0xcb, 0xad, 0x88, + 0x19, 0x37, 0xb1, 0x87, 0x15, 0x3a, 0x24, 0xb1, 0x50, 0xfb, 0xc2, 0xc4, 0x1b, 0xad, 0x0e, 0x62, + 0x67, 0x72, 0xad, 0xd6, 0x8c, 0xbf, 0xed, 0x8d, 0x2d, 0x84, 0xb2, 0x6b, 0x7b, 0x2c, 0x05, 0x2c, + 0x77, 0xda, 0xea, 0xa8, 0xe7, 0x4d, 0x90, 0x71, 0x03, 0x6b, 0x3f, 0x3c, 0x24, 0xd0, 0xf9, 0xaf, + 0xba, 0xa7, 0xf8, 0x32, 0x8f, 0x99, 0x52, 0xda, 0x32, 0x9b, 0x6b, 0x65, 0xbc, 0x79, 0x70, 0x04, + 0xdd, 0x51, 0x7d, 0x00, 0xd7, 0xf9, 0xf4, 0x58, 0x0a, 0xa2, 0xf6, 0xc5, 0xe0, 0x19, 0x5c, 0x36, + 0xf1, 0xd1, 0x05, 0xe8, 0x2c, 0xd3, 0xc5, 0x8c, 0x8c, 0x26, 0xf7, 0x13, 0x72, 0x17, 0xfe, 0x8b, + 0x3a, 0xe0, 0x6c, 0x99, 0x3e, 0xa4, 0xd3, 0xa7, 0x34, 0x6c, 0x45, 0xe7, 0x00, 0xcc, 0x09, 0x9d, + 0xe3, 0x11, 0x9d, 0x4c, 0xd3, 0x30, 0xa8, 0xd4, 0x73, 0xb2, 0xa0, 0x98, 0x92, 0x47, 0x92, 0xd2, + 0xb0, 0x3d, 0xfc, 0x6e, 0x81, 0xfe, 0x46, 0x17, 0xf0, 0xcf, 0xfa, 0xc3, 0x5e, 0xd3, 0xf9, 0x59, + 0xd5, 0x7f, 0xd6, 0x7a, 0x19, 0xfe, 0xfa, 0xa5, 0xde, 0x32, 0x25, 0xa1, 0xde, 0xc9, 0x58, 0x0a, + 0xe5, 0x5e, 0x77, 0x9a, 0xb3, 0xcc, 0x4d, 0xc3, 0xba, 0x37, 0xee, 0xfb, 0x1e, 0xb4, 0xc7, 0x18, + 0x7f, 0x04, 0xbd, 0xb1, 0x8f, 0xc2, 0xdc, 0x40, 0x0f, 0x2b, 0xb4, 0x4a, 0x60, 0x35, 0x85, 0xf9, + 0x3c, 0xf1, 0x19, 0xe6, 0x26, 0xab, 0xf9, 0x6c, 0x95, 0x64, 0x8e, 0xff, 0x0a, 0xfa, 0xfe, 0x27, + 0x42, 0x98, 0x1b, 0x84, 0x6a, 0x05, 0x42, 0xab, 0x04, 0x21, 0xa7, 0x79, 0xfd, 0xef, 0x8a, 0x5d, + 0xff, 0x04, 0x00, 0x00, 0xff, 0xff, 0x8c, 0xca, 0xf8, 0xcb, 0xf5, 0x01, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/conversion_attribution_event_type.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/conversion_attribution_event_type.pb.go new file mode 100644 index 0000000..239f6a3 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/conversion_attribution_event_type.pb.go @@ -0,0 +1,119 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/enums/conversion_attribution_event_type.proto + +package enums // import "google.golang.org/genproto/googleapis/ads/googleads/v1/enums" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// The event type of conversions that are attributed to. +type ConversionAttributionEventTypeEnum_ConversionAttributionEventType int32 + +const ( + // Not specified. + ConversionAttributionEventTypeEnum_UNSPECIFIED ConversionAttributionEventTypeEnum_ConversionAttributionEventType = 0 + // Represents value unknown in this version. + ConversionAttributionEventTypeEnum_UNKNOWN ConversionAttributionEventTypeEnum_ConversionAttributionEventType = 1 + // The conversion is attributed to an impression. + ConversionAttributionEventTypeEnum_IMPRESSION ConversionAttributionEventTypeEnum_ConversionAttributionEventType = 2 + // The conversion is attributed to an interaction. + ConversionAttributionEventTypeEnum_INTERACTION ConversionAttributionEventTypeEnum_ConversionAttributionEventType = 3 +) + +var ConversionAttributionEventTypeEnum_ConversionAttributionEventType_name = map[int32]string{ + 0: "UNSPECIFIED", + 1: "UNKNOWN", + 2: "IMPRESSION", + 3: "INTERACTION", +} +var ConversionAttributionEventTypeEnum_ConversionAttributionEventType_value = map[string]int32{ + "UNSPECIFIED": 0, + "UNKNOWN": 1, + "IMPRESSION": 2, + "INTERACTION": 3, +} + +func (x ConversionAttributionEventTypeEnum_ConversionAttributionEventType) String() string { + return proto.EnumName(ConversionAttributionEventTypeEnum_ConversionAttributionEventType_name, int32(x)) +} +func (ConversionAttributionEventTypeEnum_ConversionAttributionEventType) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_conversion_attribution_event_type_7490467adc98b209, []int{0, 0} +} + +// Container for enum indicating the event type the conversion is attributed to. +type ConversionAttributionEventTypeEnum struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ConversionAttributionEventTypeEnum) Reset() { *m = ConversionAttributionEventTypeEnum{} } +func (m *ConversionAttributionEventTypeEnum) String() string { return proto.CompactTextString(m) } +func (*ConversionAttributionEventTypeEnum) ProtoMessage() {} +func (*ConversionAttributionEventTypeEnum) Descriptor() ([]byte, []int) { + return fileDescriptor_conversion_attribution_event_type_7490467adc98b209, []int{0} +} +func (m *ConversionAttributionEventTypeEnum) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ConversionAttributionEventTypeEnum.Unmarshal(m, b) +} +func (m *ConversionAttributionEventTypeEnum) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ConversionAttributionEventTypeEnum.Marshal(b, m, deterministic) +} +func (dst *ConversionAttributionEventTypeEnum) XXX_Merge(src proto.Message) { + xxx_messageInfo_ConversionAttributionEventTypeEnum.Merge(dst, src) +} +func (m *ConversionAttributionEventTypeEnum) XXX_Size() int { + return xxx_messageInfo_ConversionAttributionEventTypeEnum.Size(m) +} +func (m *ConversionAttributionEventTypeEnum) XXX_DiscardUnknown() { + xxx_messageInfo_ConversionAttributionEventTypeEnum.DiscardUnknown(m) +} + +var xxx_messageInfo_ConversionAttributionEventTypeEnum proto.InternalMessageInfo + +func init() { + proto.RegisterType((*ConversionAttributionEventTypeEnum)(nil), "google.ads.googleads.v1.enums.ConversionAttributionEventTypeEnum") + proto.RegisterEnum("google.ads.googleads.v1.enums.ConversionAttributionEventTypeEnum_ConversionAttributionEventType", ConversionAttributionEventTypeEnum_ConversionAttributionEventType_name, ConversionAttributionEventTypeEnum_ConversionAttributionEventType_value) +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/enums/conversion_attribution_event_type.proto", fileDescriptor_conversion_attribution_event_type_7490467adc98b209) +} + +var fileDescriptor_conversion_attribution_event_type_7490467adc98b209 = []byte{ + // 325 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x7c, 0x50, 0xdf, 0x4a, 0xfb, 0x30, + 0x18, 0xfd, 0xb5, 0x83, 0x9f, 0x90, 0x81, 0x8e, 0x5e, 0x8a, 0x13, 0x36, 0xef, 0x53, 0x8a, 0x77, + 0xf1, 0x2a, 0x9b, 0x71, 0x14, 0xb1, 0x2b, 0xfb, 0x27, 0x48, 0x61, 0x64, 0x6b, 0x08, 0x85, 0x2d, + 0x29, 0x4d, 0x56, 0xd8, 0x03, 0xf8, 0x22, 0x5e, 0xfa, 0x28, 0x3e, 0x8a, 0x4f, 0xe0, 0xa5, 0x24, + 0x71, 0xf5, 0xca, 0xde, 0x84, 0x43, 0xbe, 0xf3, 0x9d, 0xf3, 0x9d, 0x03, 0x08, 0x97, 0x92, 0xef, + 0x58, 0x48, 0x73, 0x15, 0x3a, 0x68, 0x50, 0x1d, 0x85, 0x4c, 0x1c, 0xf6, 0x2a, 0xdc, 0x4a, 0x51, + 0xb3, 0x4a, 0x15, 0x52, 0xac, 0xa9, 0xd6, 0x55, 0xb1, 0x39, 0x68, 0x83, 0x59, 0xcd, 0x84, 0x5e, + 0xeb, 0x63, 0xc9, 0x60, 0x59, 0x49, 0x2d, 0x83, 0xbe, 0xdb, 0x85, 0x34, 0x57, 0xb0, 0x91, 0x81, + 0x75, 0x04, 0xad, 0xcc, 0xe5, 0xd5, 0xc9, 0xa5, 0x2c, 0x42, 0x2a, 0x84, 0xd4, 0xd4, 0xc8, 0x28, + 0xb7, 0x3c, 0x7c, 0xf5, 0xc0, 0x70, 0xdc, 0x18, 0xe1, 0x5f, 0x1f, 0x62, 0x6c, 0x16, 0xc7, 0x92, + 0x11, 0x71, 0xd8, 0x0f, 0xd7, 0xe0, 0xba, 0x9d, 0x15, 0x5c, 0x80, 0xee, 0x32, 0x99, 0xa7, 0x64, + 0x1c, 0x3f, 0xc4, 0xe4, 0xbe, 0xf7, 0x2f, 0xe8, 0x82, 0xb3, 0x65, 0xf2, 0x98, 0x4c, 0x9f, 0x93, + 0x9e, 0x17, 0x9c, 0x03, 0x10, 0x3f, 0xa5, 0x33, 0x32, 0x9f, 0xc7, 0xd3, 0xa4, 0xe7, 0x1b, 0x76, + 0x9c, 0x2c, 0xc8, 0x0c, 0x8f, 0x17, 0xe6, 0xa3, 0x33, 0xfa, 0xf2, 0xc0, 0x60, 0x2b, 0xf7, 0xb0, + 0x35, 0xcb, 0xe8, 0xa6, 0xfd, 0x88, 0xd4, 0x44, 0x4a, 0xbd, 0x97, 0xd1, 0x8f, 0x0a, 0x97, 0x3b, + 0x2a, 0x38, 0x94, 0x15, 0x0f, 0x39, 0x13, 0x36, 0xf0, 0xa9, 0xe8, 0xb2, 0x50, 0x7f, 0xf4, 0x7e, + 0x67, 0xdf, 0x37, 0xbf, 0x33, 0xc1, 0xf8, 0xdd, 0xef, 0x4f, 0x9c, 0x14, 0xce, 0x15, 0x74, 0xd0, + 0xa0, 0x55, 0x04, 0x4d, 0x2d, 0xea, 0xe3, 0x34, 0xcf, 0x70, 0xae, 0xb2, 0x66, 0x9e, 0xad, 0xa2, + 0xcc, 0xce, 0x3f, 0xfd, 0x81, 0xfb, 0x44, 0x08, 0xe7, 0x0a, 0xa1, 0x86, 0x81, 0xd0, 0x2a, 0x42, + 0xc8, 0x72, 0x36, 0xff, 0xed, 0x61, 0xb7, 0xdf, 0x01, 0x00, 0x00, 0xff, 0xff, 0x2d, 0xc5, 0x1f, + 0x04, 0x0f, 0x02, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/conversion_lag_bucket.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/conversion_lag_bucket.pb.go new file mode 100644 index 0000000..6a05d54 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/conversion_lag_bucket.pb.go @@ -0,0 +1,216 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/enums/conversion_lag_bucket.proto + +package enums // import "google.golang.org/genproto/googleapis/ads/googleads/v1/enums" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Enum representing the number of days between impression and conversion. +type ConversionLagBucketEnum_ConversionLagBucket int32 + +const ( + // Not specified. + ConversionLagBucketEnum_UNSPECIFIED ConversionLagBucketEnum_ConversionLagBucket = 0 + // Used for return value only. Represents value unknown in this version. + ConversionLagBucketEnum_UNKNOWN ConversionLagBucketEnum_ConversionLagBucket = 1 + // Conversion lag bucket from 0 to 1 day. 0 day is included, 1 day is not. + ConversionLagBucketEnum_LESS_THAN_ONE_DAY ConversionLagBucketEnum_ConversionLagBucket = 2 + // Conversion lag bucket from 1 to 2 days. 1 day is included, 2 days is not. + ConversionLagBucketEnum_ONE_TO_TWO_DAYS ConversionLagBucketEnum_ConversionLagBucket = 3 + // Conversion lag bucket from 2 to 3 days. 2 days is included, + // 3 days is not. + ConversionLagBucketEnum_TWO_TO_THREE_DAYS ConversionLagBucketEnum_ConversionLagBucket = 4 + // Conversion lag bucket from 3 to 4 days. 3 days is included, + // 4 days is not. + ConversionLagBucketEnum_THREE_TO_FOUR_DAYS ConversionLagBucketEnum_ConversionLagBucket = 5 + // Conversion lag bucket from 4 to 5 days. 4 days is included, + // 5 days is not. + ConversionLagBucketEnum_FOUR_TO_FIVE_DAYS ConversionLagBucketEnum_ConversionLagBucket = 6 + // Conversion lag bucket from 5 to 6 days. 5 days is included, + // 6 days is not. + ConversionLagBucketEnum_FIVE_TO_SIX_DAYS ConversionLagBucketEnum_ConversionLagBucket = 7 + // Conversion lag bucket from 6 to 7 days. 6 days is included, + // 7 days is not. + ConversionLagBucketEnum_SIX_TO_SEVEN_DAYS ConversionLagBucketEnum_ConversionLagBucket = 8 + // Conversion lag bucket from 7 to 8 days. 7 days is included, + // 8 days is not. + ConversionLagBucketEnum_SEVEN_TO_EIGHT_DAYS ConversionLagBucketEnum_ConversionLagBucket = 9 + // Conversion lag bucket from 8 to 9 days. 8 days is included, + // 9 days is not. + ConversionLagBucketEnum_EIGHT_TO_NINE_DAYS ConversionLagBucketEnum_ConversionLagBucket = 10 + // Conversion lag bucket from 9 to 10 days. 9 days is included, + // 10 days is not. + ConversionLagBucketEnum_NINE_TO_TEN_DAYS ConversionLagBucketEnum_ConversionLagBucket = 11 + // Conversion lag bucket from 10 to 11 days. 10 days is included, + // 11 days is not. + ConversionLagBucketEnum_TEN_TO_ELEVEN_DAYS ConversionLagBucketEnum_ConversionLagBucket = 12 + // Conversion lag bucket from 11 to 12 days. 11 days is included, + // 12 days is not. + ConversionLagBucketEnum_ELEVEN_TO_TWELVE_DAYS ConversionLagBucketEnum_ConversionLagBucket = 13 + // Conversion lag bucket from 12 to 13 days. 12 days is included, + // 13 days is not. + ConversionLagBucketEnum_TWELVE_TO_THIRTEEN_DAYS ConversionLagBucketEnum_ConversionLagBucket = 14 + // Conversion lag bucket from 13 to 14 days. 13 days is included, + // 14 days is not. + ConversionLagBucketEnum_THIRTEEN_TO_FOURTEEN_DAYS ConversionLagBucketEnum_ConversionLagBucket = 15 + // Conversion lag bucket from 14 to 21 days. 14 days is included, + // 21 days is not. + ConversionLagBucketEnum_FOURTEEN_TO_TWENTY_ONE_DAYS ConversionLagBucketEnum_ConversionLagBucket = 16 + // Conversion lag bucket from 21 to 30 days. 21 days is included, + // 30 days is not. + ConversionLagBucketEnum_TWENTY_ONE_TO_THIRTY_DAYS ConversionLagBucketEnum_ConversionLagBucket = 17 + // Conversion lag bucket from 30 to 45 days. 30 days is included, + // 45 days is not. + ConversionLagBucketEnum_THIRTY_TO_FORTY_FIVE_DAYS ConversionLagBucketEnum_ConversionLagBucket = 18 + // Conversion lag bucket from 45 to 60 days. 45 days is included, + // 60 days is not. + ConversionLagBucketEnum_FORTY_FIVE_TO_SIXTY_DAYS ConversionLagBucketEnum_ConversionLagBucket = 19 + // Conversion lag bucket from 60 to 90 days. 60 days is included, + // 90 days is not. + ConversionLagBucketEnum_SIXTY_TO_NINETY_DAYS ConversionLagBucketEnum_ConversionLagBucket = 20 +) + +var ConversionLagBucketEnum_ConversionLagBucket_name = map[int32]string{ + 0: "UNSPECIFIED", + 1: "UNKNOWN", + 2: "LESS_THAN_ONE_DAY", + 3: "ONE_TO_TWO_DAYS", + 4: "TWO_TO_THREE_DAYS", + 5: "THREE_TO_FOUR_DAYS", + 6: "FOUR_TO_FIVE_DAYS", + 7: "FIVE_TO_SIX_DAYS", + 8: "SIX_TO_SEVEN_DAYS", + 9: "SEVEN_TO_EIGHT_DAYS", + 10: "EIGHT_TO_NINE_DAYS", + 11: "NINE_TO_TEN_DAYS", + 12: "TEN_TO_ELEVEN_DAYS", + 13: "ELEVEN_TO_TWELVE_DAYS", + 14: "TWELVE_TO_THIRTEEN_DAYS", + 15: "THIRTEEN_TO_FOURTEEN_DAYS", + 16: "FOURTEEN_TO_TWENTY_ONE_DAYS", + 17: "TWENTY_ONE_TO_THIRTY_DAYS", + 18: "THIRTY_TO_FORTY_FIVE_DAYS", + 19: "FORTY_FIVE_TO_SIXTY_DAYS", + 20: "SIXTY_TO_NINETY_DAYS", +} +var ConversionLagBucketEnum_ConversionLagBucket_value = map[string]int32{ + "UNSPECIFIED": 0, + "UNKNOWN": 1, + "LESS_THAN_ONE_DAY": 2, + "ONE_TO_TWO_DAYS": 3, + "TWO_TO_THREE_DAYS": 4, + "THREE_TO_FOUR_DAYS": 5, + "FOUR_TO_FIVE_DAYS": 6, + "FIVE_TO_SIX_DAYS": 7, + "SIX_TO_SEVEN_DAYS": 8, + "SEVEN_TO_EIGHT_DAYS": 9, + "EIGHT_TO_NINE_DAYS": 10, + "NINE_TO_TEN_DAYS": 11, + "TEN_TO_ELEVEN_DAYS": 12, + "ELEVEN_TO_TWELVE_DAYS": 13, + "TWELVE_TO_THIRTEEN_DAYS": 14, + "THIRTEEN_TO_FOURTEEN_DAYS": 15, + "FOURTEEN_TO_TWENTY_ONE_DAYS": 16, + "TWENTY_ONE_TO_THIRTY_DAYS": 17, + "THIRTY_TO_FORTY_FIVE_DAYS": 18, + "FORTY_FIVE_TO_SIXTY_DAYS": 19, + "SIXTY_TO_NINETY_DAYS": 20, +} + +func (x ConversionLagBucketEnum_ConversionLagBucket) String() string { + return proto.EnumName(ConversionLagBucketEnum_ConversionLagBucket_name, int32(x)) +} +func (ConversionLagBucketEnum_ConversionLagBucket) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_conversion_lag_bucket_f7860160ba5d8d24, []int{0, 0} +} + +// Container for enum representing the number of days between impression and +// conversion. +type ConversionLagBucketEnum struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ConversionLagBucketEnum) Reset() { *m = ConversionLagBucketEnum{} } +func (m *ConversionLagBucketEnum) String() string { return proto.CompactTextString(m) } +func (*ConversionLagBucketEnum) ProtoMessage() {} +func (*ConversionLagBucketEnum) Descriptor() ([]byte, []int) { + return fileDescriptor_conversion_lag_bucket_f7860160ba5d8d24, []int{0} +} +func (m *ConversionLagBucketEnum) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ConversionLagBucketEnum.Unmarshal(m, b) +} +func (m *ConversionLagBucketEnum) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ConversionLagBucketEnum.Marshal(b, m, deterministic) +} +func (dst *ConversionLagBucketEnum) XXX_Merge(src proto.Message) { + xxx_messageInfo_ConversionLagBucketEnum.Merge(dst, src) +} +func (m *ConversionLagBucketEnum) XXX_Size() int { + return xxx_messageInfo_ConversionLagBucketEnum.Size(m) +} +func (m *ConversionLagBucketEnum) XXX_DiscardUnknown() { + xxx_messageInfo_ConversionLagBucketEnum.DiscardUnknown(m) +} + +var xxx_messageInfo_ConversionLagBucketEnum proto.InternalMessageInfo + +func init() { + proto.RegisterType((*ConversionLagBucketEnum)(nil), "google.ads.googleads.v1.enums.ConversionLagBucketEnum") + proto.RegisterEnum("google.ads.googleads.v1.enums.ConversionLagBucketEnum_ConversionLagBucket", ConversionLagBucketEnum_ConversionLagBucket_name, ConversionLagBucketEnum_ConversionLagBucket_value) +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/enums/conversion_lag_bucket.proto", fileDescriptor_conversion_lag_bucket_f7860160ba5d8d24) +} + +var fileDescriptor_conversion_lag_bucket_f7860160ba5d8d24 = []byte{ + // 512 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x7c, 0x93, 0xd1, 0x6e, 0xda, 0x30, + 0x14, 0x86, 0x07, 0x65, 0xed, 0x66, 0xb6, 0x91, 0x1a, 0x3a, 0xe8, 0x5a, 0x34, 0xb5, 0x0f, 0x90, + 0x08, 0xed, 0x6a, 0xd9, 0x55, 0x68, 0x5d, 0x88, 0x86, 0x1c, 0x44, 0x0c, 0x1d, 0x13, 0x92, 0x95, + 0x92, 0x28, 0x42, 0x83, 0x18, 0x61, 0xe0, 0x75, 0x26, 0xed, 0x72, 0xaf, 0xb0, 0x37, 0xd8, 0xa3, + 0x4c, 0xda, 0x3b, 0x4c, 0xf6, 0x89, 0xc3, 0x2e, 0xba, 0xdd, 0x44, 0xe7, 0xff, 0xce, 0x39, 0x7f, + 0xec, 0xa3, 0x63, 0xf4, 0x3e, 0x15, 0x22, 0x5d, 0x26, 0x4e, 0x14, 0x4b, 0x07, 0x42, 0x15, 0xed, + 0x3b, 0x4e, 0x92, 0xed, 0x56, 0xd2, 0x99, 0x8b, 0x6c, 0x9f, 0x6c, 0xe4, 0x42, 0x64, 0x7c, 0x19, + 0xa5, 0xfc, 0x61, 0x37, 0xff, 0x92, 0x6c, 0xed, 0xf5, 0x46, 0x6c, 0x05, 0x6e, 0x43, 0xbd, 0x1d, + 0xc5, 0xd2, 0x2e, 0x5a, 0xed, 0x7d, 0xc7, 0xd6, 0xad, 0x6f, 0x2e, 0x8d, 0xf3, 0x7a, 0xe1, 0x44, + 0x59, 0x26, 0xb6, 0xd1, 0x76, 0x21, 0x32, 0x09, 0xcd, 0xd7, 0x3f, 0x2a, 0xa8, 0x79, 0x53, 0x98, + 0x0f, 0xa2, 0xb4, 0xab, 0xad, 0x49, 0xb6, 0x5b, 0x5d, 0x7f, 0xad, 0xa0, 0xfa, 0x23, 0x39, 0x5c, + 0x43, 0xd5, 0x31, 0x0d, 0x87, 0xe4, 0xc6, 0xbf, 0xf3, 0xc9, 0xad, 0xf5, 0x04, 0x57, 0xd1, 0xc9, + 0x98, 0x7e, 0xa4, 0xc1, 0x3d, 0xb5, 0x4a, 0xf8, 0x0c, 0x9d, 0x0e, 0x48, 0x18, 0x72, 0xd6, 0xf7, + 0x28, 0x0f, 0x28, 0xe1, 0xb7, 0xde, 0xd4, 0x2a, 0xe3, 0x3a, 0xaa, 0x29, 0xc1, 0x02, 0xce, 0xee, + 0x03, 0xc5, 0x42, 0xeb, 0x48, 0xd5, 0x2a, 0xa5, 0x60, 0x7f, 0x44, 0x08, 0xe0, 0x0a, 0x7e, 0x8d, + 0x30, 0x68, 0x16, 0xf0, 0xbb, 0x60, 0x3c, 0x02, 0xfe, 0x54, 0x95, 0x6b, 0xa9, 0xb0, 0x3f, 0xc9, + 0xcb, 0x8f, 0x71, 0x03, 0x59, 0x5a, 0xb2, 0x80, 0x87, 0xfe, 0x27, 0xa0, 0x27, 0xaa, 0x58, 0x29, + 0x05, 0xc9, 0x84, 0x50, 0xc0, 0xcf, 0x70, 0x13, 0xd5, 0x41, 0xb3, 0x80, 0x13, 0xbf, 0xd7, 0x67, + 0x90, 0x78, 0xae, 0x7e, 0x0a, 0x9a, 0x05, 0x9c, 0xfa, 0x34, 0x77, 0x47, 0xca, 0x5d, 0x4b, 0x75, + 0x48, 0x63, 0x53, 0xd5, 0x47, 0xcc, 0x4d, 0x06, 0x07, 0xfb, 0x17, 0xf8, 0x1c, 0x9d, 0xe5, 0x40, + 0xdf, 0x94, 0x0c, 0xcc, 0x31, 0x5f, 0xe2, 0x0b, 0xd4, 0xcc, 0x81, 0xbe, 0xaf, 0x3f, 0x62, 0xc4, + 0xf4, 0xbd, 0xc2, 0x6d, 0x74, 0x5e, 0xa0, 0xfc, 0xd6, 0x87, 0x74, 0x0d, 0xbf, 0x45, 0x17, 0x05, + 0x02, 0x63, 0xca, 0xa6, 0x66, 0xba, 0xa1, 0x65, 0xe9, 0xfe, 0x03, 0x34, 0x3f, 0x98, 0x42, 0xfa, + 0xb4, 0xb0, 0x9f, 0x82, 0xb9, 0x0a, 0x0e, 0x13, 0xc4, 0xf8, 0x12, 0xb5, 0xfe, 0x82, 0x30, 0x47, + 0xd3, 0x5c, 0xc7, 0x2d, 0xd4, 0x00, 0x9d, 0x4f, 0xc6, 0x64, 0x1a, 0xdd, 0xdf, 0x25, 0x74, 0x35, + 0x17, 0x2b, 0xfb, 0xbf, 0x1b, 0xd8, 0x6d, 0x3d, 0xb2, 0x44, 0x43, 0xb5, 0x7d, 0xc3, 0xd2, 0xe7, + 0x6e, 0xde, 0x9a, 0x8a, 0x65, 0x94, 0xa5, 0xb6, 0xd8, 0xa4, 0x4e, 0x9a, 0x64, 0x7a, 0x37, 0xcd, + 0x3b, 0x58, 0x2f, 0xe4, 0x3f, 0x9e, 0xc5, 0x07, 0xfd, 0xfd, 0x56, 0x3e, 0xea, 0x79, 0xde, 0xf7, + 0x72, 0xbb, 0x07, 0x56, 0x5e, 0x2c, 0x6d, 0x08, 0x55, 0x34, 0xe9, 0xd8, 0x6a, 0x99, 0xe5, 0x4f, + 0x93, 0x9f, 0x79, 0xb1, 0x9c, 0x15, 0xf9, 0xd9, 0xa4, 0x33, 0xd3, 0xf9, 0x5f, 0xe5, 0x2b, 0x80, + 0xae, 0xeb, 0xc5, 0xd2, 0x75, 0x8b, 0x0a, 0xd7, 0x9d, 0x74, 0x5c, 0x57, 0xd7, 0x3c, 0x1c, 0xeb, + 0x83, 0xbd, 0xfb, 0x13, 0x00, 0x00, 0xff, 0xff, 0x29, 0xdd, 0xa7, 0xb6, 0xae, 0x03, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/conversion_or_adjustment_lag_bucket.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/conversion_or_adjustment_lag_bucket.pb.go new file mode 100644 index 0000000..ac89e6e --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/conversion_or_adjustment_lag_bucket.pb.go @@ -0,0 +1,342 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/enums/conversion_or_adjustment_lag_bucket.proto + +package enums // import "google.golang.org/genproto/googleapis/ads/googleads/v1/enums" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Enum representing the number of days between the impression and the +// conversion or between the impression and adjustments to the conversion. +type ConversionOrAdjustmentLagBucketEnum_ConversionOrAdjustmentLagBucket int32 + +const ( + // Not specified. + ConversionOrAdjustmentLagBucketEnum_UNSPECIFIED ConversionOrAdjustmentLagBucketEnum_ConversionOrAdjustmentLagBucket = 0 + // Used for return value only. Represents value unknown in this version. + ConversionOrAdjustmentLagBucketEnum_UNKNOWN ConversionOrAdjustmentLagBucketEnum_ConversionOrAdjustmentLagBucket = 1 + // Conversion lag bucket from 0 to 1 day. 0 day is included, 1 day is not. + ConversionOrAdjustmentLagBucketEnum_CONVERSION_LESS_THAN_ONE_DAY ConversionOrAdjustmentLagBucketEnum_ConversionOrAdjustmentLagBucket = 2 + // Conversion lag bucket from 1 to 2 days. 1 day is included, 2 days is not. + ConversionOrAdjustmentLagBucketEnum_CONVERSION_ONE_TO_TWO_DAYS ConversionOrAdjustmentLagBucketEnum_ConversionOrAdjustmentLagBucket = 3 + // Conversion lag bucket from 2 to 3 days. 2 days is included, + // 3 days is not. + ConversionOrAdjustmentLagBucketEnum_CONVERSION_TWO_TO_THREE_DAYS ConversionOrAdjustmentLagBucketEnum_ConversionOrAdjustmentLagBucket = 4 + // Conversion lag bucket from 3 to 4 days. 3 days is included, + // 4 days is not. + ConversionOrAdjustmentLagBucketEnum_CONVERSION_THREE_TO_FOUR_DAYS ConversionOrAdjustmentLagBucketEnum_ConversionOrAdjustmentLagBucket = 5 + // Conversion lag bucket from 4 to 5 days. 4 days is included, + // 5 days is not. + ConversionOrAdjustmentLagBucketEnum_CONVERSION_FOUR_TO_FIVE_DAYS ConversionOrAdjustmentLagBucketEnum_ConversionOrAdjustmentLagBucket = 6 + // Conversion lag bucket from 5 to 6 days. 5 days is included, + // 6 days is not. + ConversionOrAdjustmentLagBucketEnum_CONVERSION_FIVE_TO_SIX_DAYS ConversionOrAdjustmentLagBucketEnum_ConversionOrAdjustmentLagBucket = 7 + // Conversion lag bucket from 6 to 7 days. 6 days is included, + // 7 days is not. + ConversionOrAdjustmentLagBucketEnum_CONVERSION_SIX_TO_SEVEN_DAYS ConversionOrAdjustmentLagBucketEnum_ConversionOrAdjustmentLagBucket = 8 + // Conversion lag bucket from 7 to 8 days. 7 days is included, + // 8 days is not. + ConversionOrAdjustmentLagBucketEnum_CONVERSION_SEVEN_TO_EIGHT_DAYS ConversionOrAdjustmentLagBucketEnum_ConversionOrAdjustmentLagBucket = 9 + // Conversion lag bucket from 8 to 9 days. 8 days is included, + // 9 days is not. + ConversionOrAdjustmentLagBucketEnum_CONVERSION_EIGHT_TO_NINE_DAYS ConversionOrAdjustmentLagBucketEnum_ConversionOrAdjustmentLagBucket = 10 + // Conversion lag bucket from 9 to 10 days. 9 days is included, + // 10 days is not. + ConversionOrAdjustmentLagBucketEnum_CONVERSION_NINE_TO_TEN_DAYS ConversionOrAdjustmentLagBucketEnum_ConversionOrAdjustmentLagBucket = 11 + // Conversion lag bucket from 10 to 11 days. 10 days is included, + // 11 days is not. + ConversionOrAdjustmentLagBucketEnum_CONVERSION_TEN_TO_ELEVEN_DAYS ConversionOrAdjustmentLagBucketEnum_ConversionOrAdjustmentLagBucket = 12 + // Conversion lag bucket from 11 to 12 days. 11 days is included, + // 12 days is not. + ConversionOrAdjustmentLagBucketEnum_CONVERSION_ELEVEN_TO_TWELVE_DAYS ConversionOrAdjustmentLagBucketEnum_ConversionOrAdjustmentLagBucket = 13 + // Conversion lag bucket from 12 to 13 days. 12 days is included, + // 13 days is not. + ConversionOrAdjustmentLagBucketEnum_CONVERSION_TWELVE_TO_THIRTEEN_DAYS ConversionOrAdjustmentLagBucketEnum_ConversionOrAdjustmentLagBucket = 14 + // Conversion lag bucket from 13 to 14 days. 13 days is included, + // 14 days is not. + ConversionOrAdjustmentLagBucketEnum_CONVERSION_THIRTEEN_TO_FOURTEEN_DAYS ConversionOrAdjustmentLagBucketEnum_ConversionOrAdjustmentLagBucket = 15 + // Conversion lag bucket from 14 to 21 days. 14 days is included, + // 21 days is not. + ConversionOrAdjustmentLagBucketEnum_CONVERSION_FOURTEEN_TO_TWENTY_ONE_DAYS ConversionOrAdjustmentLagBucketEnum_ConversionOrAdjustmentLagBucket = 16 + // Conversion lag bucket from 21 to 30 days. 21 days is included, + // 30 days is not. + ConversionOrAdjustmentLagBucketEnum_CONVERSION_TWENTY_ONE_TO_THIRTY_DAYS ConversionOrAdjustmentLagBucketEnum_ConversionOrAdjustmentLagBucket = 17 + // Conversion lag bucket from 30 to 45 days. 30 days is included, + // 45 days is not. + ConversionOrAdjustmentLagBucketEnum_CONVERSION_THIRTY_TO_FORTY_FIVE_DAYS ConversionOrAdjustmentLagBucketEnum_ConversionOrAdjustmentLagBucket = 18 + // Conversion lag bucket from 45 to 60 days. 45 days is included, + // 60 days is not. + ConversionOrAdjustmentLagBucketEnum_CONVERSION_FORTY_FIVE_TO_SIXTY_DAYS ConversionOrAdjustmentLagBucketEnum_ConversionOrAdjustmentLagBucket = 19 + // Conversion lag bucket from 60 to 90 days. 60 days is included, + // 90 days is not. + ConversionOrAdjustmentLagBucketEnum_CONVERSION_SIXTY_TO_NINETY_DAYS ConversionOrAdjustmentLagBucketEnum_ConversionOrAdjustmentLagBucket = 20 + // Conversion adjustment lag bucket from 0 to 1 day. 0 day is included, + // 1 day is not. + ConversionOrAdjustmentLagBucketEnum_ADJUSTMENT_LESS_THAN_ONE_DAY ConversionOrAdjustmentLagBucketEnum_ConversionOrAdjustmentLagBucket = 21 + // Conversion adjustment lag bucket from 1 to 2 days. 1 day is included, + // 2 days is not. + ConversionOrAdjustmentLagBucketEnum_ADJUSTMENT_ONE_TO_TWO_DAYS ConversionOrAdjustmentLagBucketEnum_ConversionOrAdjustmentLagBucket = 22 + // Conversion adjustment lag bucket from 2 to 3 days. 2 days is included, + // 3 days is not. + ConversionOrAdjustmentLagBucketEnum_ADJUSTMENT_TWO_TO_THREE_DAYS ConversionOrAdjustmentLagBucketEnum_ConversionOrAdjustmentLagBucket = 23 + // Conversion adjustment lag bucket from 3 to 4 days. 3 days is included, + // 4 days is not. + ConversionOrAdjustmentLagBucketEnum_ADJUSTMENT_THREE_TO_FOUR_DAYS ConversionOrAdjustmentLagBucketEnum_ConversionOrAdjustmentLagBucket = 24 + // Conversion adjustment lag bucket from 4 to 5 days. 4 days is included, + // 5 days is not. + ConversionOrAdjustmentLagBucketEnum_ADJUSTMENT_FOUR_TO_FIVE_DAYS ConversionOrAdjustmentLagBucketEnum_ConversionOrAdjustmentLagBucket = 25 + // Conversion adjustment lag bucket from 5 to 6 days. 5 days is included, + // 6 days is not. + ConversionOrAdjustmentLagBucketEnum_ADJUSTMENT_FIVE_TO_SIX_DAYS ConversionOrAdjustmentLagBucketEnum_ConversionOrAdjustmentLagBucket = 26 + // Conversion adjustment lag bucket from 6 to 7 days. 6 days is included, + // 7 days is not. + ConversionOrAdjustmentLagBucketEnum_ADJUSTMENT_SIX_TO_SEVEN_DAYS ConversionOrAdjustmentLagBucketEnum_ConversionOrAdjustmentLagBucket = 27 + // Conversion adjustment lag bucket from 7 to 8 days. 7 days is included, + // 8 days is not. + ConversionOrAdjustmentLagBucketEnum_ADJUSTMENT_SEVEN_TO_EIGHT_DAYS ConversionOrAdjustmentLagBucketEnum_ConversionOrAdjustmentLagBucket = 28 + // Conversion adjustment lag bucket from 8 to 9 days. 8 days is included, + // 9 days is not. + ConversionOrAdjustmentLagBucketEnum_ADJUSTMENT_EIGHT_TO_NINE_DAYS ConversionOrAdjustmentLagBucketEnum_ConversionOrAdjustmentLagBucket = 29 + // Conversion adjustment lag bucket from 9 to 10 days. 9 days is included, + // 10 days is not. + ConversionOrAdjustmentLagBucketEnum_ADJUSTMENT_NINE_TO_TEN_DAYS ConversionOrAdjustmentLagBucketEnum_ConversionOrAdjustmentLagBucket = 30 + // Conversion adjustment lag bucket from 10 to 11 days. 10 days is included, + // 11 days is not. + ConversionOrAdjustmentLagBucketEnum_ADJUSTMENT_TEN_TO_ELEVEN_DAYS ConversionOrAdjustmentLagBucketEnum_ConversionOrAdjustmentLagBucket = 31 + // Conversion adjustment lag bucket from 11 to 12 days. 11 days is included, + // 12 days is not. + ConversionOrAdjustmentLagBucketEnum_ADJUSTMENT_ELEVEN_TO_TWELVE_DAYS ConversionOrAdjustmentLagBucketEnum_ConversionOrAdjustmentLagBucket = 32 + // Conversion adjustment lag bucket from 12 to 13 days. 12 days is included, + // 13 days is not. + ConversionOrAdjustmentLagBucketEnum_ADJUSTMENT_TWELVE_TO_THIRTEEN_DAYS ConversionOrAdjustmentLagBucketEnum_ConversionOrAdjustmentLagBucket = 33 + // Conversion adjustment lag bucket from 13 to 14 days. 13 days is included, + // 14 days is not. + ConversionOrAdjustmentLagBucketEnum_ADJUSTMENT_THIRTEEN_TO_FOURTEEN_DAYS ConversionOrAdjustmentLagBucketEnum_ConversionOrAdjustmentLagBucket = 34 + // Conversion adjustment lag bucket from 14 to 21 days. 14 days is included, + // 21 days is not. + ConversionOrAdjustmentLagBucketEnum_ADJUSTMENT_FOURTEEN_TO_TWENTY_ONE_DAYS ConversionOrAdjustmentLagBucketEnum_ConversionOrAdjustmentLagBucket = 35 + // Conversion adjustment lag bucket from 21 to 30 days. 21 days is included, + // 30 days is not. + ConversionOrAdjustmentLagBucketEnum_ADJUSTMENT_TWENTY_ONE_TO_THIRTY_DAYS ConversionOrAdjustmentLagBucketEnum_ConversionOrAdjustmentLagBucket = 36 + // Conversion adjustment lag bucket from 30 to 45 days. 30 days is included, + // 45 days is not. + ConversionOrAdjustmentLagBucketEnum_ADJUSTMENT_THIRTY_TO_FORTY_FIVE_DAYS ConversionOrAdjustmentLagBucketEnum_ConversionOrAdjustmentLagBucket = 37 + // Conversion adjustment lag bucket from 45 to 60 days. 45 days is included, + // 60 days is not. + ConversionOrAdjustmentLagBucketEnum_ADJUSTMENT_FORTY_FIVE_TO_SIXTY_DAYS ConversionOrAdjustmentLagBucketEnum_ConversionOrAdjustmentLagBucket = 38 + // Conversion adjustment lag bucket from 60 to 90 days. 60 days is included, + // 90 days is not. + ConversionOrAdjustmentLagBucketEnum_ADJUSTMENT_SIXTY_TO_NINETY_DAYS ConversionOrAdjustmentLagBucketEnum_ConversionOrAdjustmentLagBucket = 39 + // Conversion adjustment lag bucket from 90 to 145 days. 90 days is + // included, 145 days is not. + ConversionOrAdjustmentLagBucketEnum_ADJUSTMENT_NINETY_TO_ONE_HUNDRED_AND_FORTY_FIVE_DAYS ConversionOrAdjustmentLagBucketEnum_ConversionOrAdjustmentLagBucket = 40 + // Conversion lag bucket UNKNOWN. This is for dates before conversion lag + // bucket was available in Google Ads. + ConversionOrAdjustmentLagBucketEnum_CONVERSION_UNKNOWN ConversionOrAdjustmentLagBucketEnum_ConversionOrAdjustmentLagBucket = 41 + // Conversion adjustment lag bucket UNKNOWN. This is for dates before + // conversion adjustment lag bucket was available in Google Ads. + ConversionOrAdjustmentLagBucketEnum_ADJUSTMENT_UNKNOWN ConversionOrAdjustmentLagBucketEnum_ConversionOrAdjustmentLagBucket = 42 +) + +var ConversionOrAdjustmentLagBucketEnum_ConversionOrAdjustmentLagBucket_name = map[int32]string{ + 0: "UNSPECIFIED", + 1: "UNKNOWN", + 2: "CONVERSION_LESS_THAN_ONE_DAY", + 3: "CONVERSION_ONE_TO_TWO_DAYS", + 4: "CONVERSION_TWO_TO_THREE_DAYS", + 5: "CONVERSION_THREE_TO_FOUR_DAYS", + 6: "CONVERSION_FOUR_TO_FIVE_DAYS", + 7: "CONVERSION_FIVE_TO_SIX_DAYS", + 8: "CONVERSION_SIX_TO_SEVEN_DAYS", + 9: "CONVERSION_SEVEN_TO_EIGHT_DAYS", + 10: "CONVERSION_EIGHT_TO_NINE_DAYS", + 11: "CONVERSION_NINE_TO_TEN_DAYS", + 12: "CONVERSION_TEN_TO_ELEVEN_DAYS", + 13: "CONVERSION_ELEVEN_TO_TWELVE_DAYS", + 14: "CONVERSION_TWELVE_TO_THIRTEEN_DAYS", + 15: "CONVERSION_THIRTEEN_TO_FOURTEEN_DAYS", + 16: "CONVERSION_FOURTEEN_TO_TWENTY_ONE_DAYS", + 17: "CONVERSION_TWENTY_ONE_TO_THIRTY_DAYS", + 18: "CONVERSION_THIRTY_TO_FORTY_FIVE_DAYS", + 19: "CONVERSION_FORTY_FIVE_TO_SIXTY_DAYS", + 20: "CONVERSION_SIXTY_TO_NINETY_DAYS", + 21: "ADJUSTMENT_LESS_THAN_ONE_DAY", + 22: "ADJUSTMENT_ONE_TO_TWO_DAYS", + 23: "ADJUSTMENT_TWO_TO_THREE_DAYS", + 24: "ADJUSTMENT_THREE_TO_FOUR_DAYS", + 25: "ADJUSTMENT_FOUR_TO_FIVE_DAYS", + 26: "ADJUSTMENT_FIVE_TO_SIX_DAYS", + 27: "ADJUSTMENT_SIX_TO_SEVEN_DAYS", + 28: "ADJUSTMENT_SEVEN_TO_EIGHT_DAYS", + 29: "ADJUSTMENT_EIGHT_TO_NINE_DAYS", + 30: "ADJUSTMENT_NINE_TO_TEN_DAYS", + 31: "ADJUSTMENT_TEN_TO_ELEVEN_DAYS", + 32: "ADJUSTMENT_ELEVEN_TO_TWELVE_DAYS", + 33: "ADJUSTMENT_TWELVE_TO_THIRTEEN_DAYS", + 34: "ADJUSTMENT_THIRTEEN_TO_FOURTEEN_DAYS", + 35: "ADJUSTMENT_FOURTEEN_TO_TWENTY_ONE_DAYS", + 36: "ADJUSTMENT_TWENTY_ONE_TO_THIRTY_DAYS", + 37: "ADJUSTMENT_THIRTY_TO_FORTY_FIVE_DAYS", + 38: "ADJUSTMENT_FORTY_FIVE_TO_SIXTY_DAYS", + 39: "ADJUSTMENT_SIXTY_TO_NINETY_DAYS", + 40: "ADJUSTMENT_NINETY_TO_ONE_HUNDRED_AND_FORTY_FIVE_DAYS", + 41: "CONVERSION_UNKNOWN", + 42: "ADJUSTMENT_UNKNOWN", +} +var ConversionOrAdjustmentLagBucketEnum_ConversionOrAdjustmentLagBucket_value = map[string]int32{ + "UNSPECIFIED": 0, + "UNKNOWN": 1, + "CONVERSION_LESS_THAN_ONE_DAY": 2, + "CONVERSION_ONE_TO_TWO_DAYS": 3, + "CONVERSION_TWO_TO_THREE_DAYS": 4, + "CONVERSION_THREE_TO_FOUR_DAYS": 5, + "CONVERSION_FOUR_TO_FIVE_DAYS": 6, + "CONVERSION_FIVE_TO_SIX_DAYS": 7, + "CONVERSION_SIX_TO_SEVEN_DAYS": 8, + "CONVERSION_SEVEN_TO_EIGHT_DAYS": 9, + "CONVERSION_EIGHT_TO_NINE_DAYS": 10, + "CONVERSION_NINE_TO_TEN_DAYS": 11, + "CONVERSION_TEN_TO_ELEVEN_DAYS": 12, + "CONVERSION_ELEVEN_TO_TWELVE_DAYS": 13, + "CONVERSION_TWELVE_TO_THIRTEEN_DAYS": 14, + "CONVERSION_THIRTEEN_TO_FOURTEEN_DAYS": 15, + "CONVERSION_FOURTEEN_TO_TWENTY_ONE_DAYS": 16, + "CONVERSION_TWENTY_ONE_TO_THIRTY_DAYS": 17, + "CONVERSION_THIRTY_TO_FORTY_FIVE_DAYS": 18, + "CONVERSION_FORTY_FIVE_TO_SIXTY_DAYS": 19, + "CONVERSION_SIXTY_TO_NINETY_DAYS": 20, + "ADJUSTMENT_LESS_THAN_ONE_DAY": 21, + "ADJUSTMENT_ONE_TO_TWO_DAYS": 22, + "ADJUSTMENT_TWO_TO_THREE_DAYS": 23, + "ADJUSTMENT_THREE_TO_FOUR_DAYS": 24, + "ADJUSTMENT_FOUR_TO_FIVE_DAYS": 25, + "ADJUSTMENT_FIVE_TO_SIX_DAYS": 26, + "ADJUSTMENT_SIX_TO_SEVEN_DAYS": 27, + "ADJUSTMENT_SEVEN_TO_EIGHT_DAYS": 28, + "ADJUSTMENT_EIGHT_TO_NINE_DAYS": 29, + "ADJUSTMENT_NINE_TO_TEN_DAYS": 30, + "ADJUSTMENT_TEN_TO_ELEVEN_DAYS": 31, + "ADJUSTMENT_ELEVEN_TO_TWELVE_DAYS": 32, + "ADJUSTMENT_TWELVE_TO_THIRTEEN_DAYS": 33, + "ADJUSTMENT_THIRTEEN_TO_FOURTEEN_DAYS": 34, + "ADJUSTMENT_FOURTEEN_TO_TWENTY_ONE_DAYS": 35, + "ADJUSTMENT_TWENTY_ONE_TO_THIRTY_DAYS": 36, + "ADJUSTMENT_THIRTY_TO_FORTY_FIVE_DAYS": 37, + "ADJUSTMENT_FORTY_FIVE_TO_SIXTY_DAYS": 38, + "ADJUSTMENT_SIXTY_TO_NINETY_DAYS": 39, + "ADJUSTMENT_NINETY_TO_ONE_HUNDRED_AND_FORTY_FIVE_DAYS": 40, + "CONVERSION_UNKNOWN": 41, + "ADJUSTMENT_UNKNOWN": 42, +} + +func (x ConversionOrAdjustmentLagBucketEnum_ConversionOrAdjustmentLagBucket) String() string { + return proto.EnumName(ConversionOrAdjustmentLagBucketEnum_ConversionOrAdjustmentLagBucket_name, int32(x)) +} +func (ConversionOrAdjustmentLagBucketEnum_ConversionOrAdjustmentLagBucket) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_conversion_or_adjustment_lag_bucket_ea9c79e631bbd90c, []int{0, 0} +} + +// Container for enum representing the number of days between the impression and +// the conversion or between the impression and adjustments to the conversion. +type ConversionOrAdjustmentLagBucketEnum struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ConversionOrAdjustmentLagBucketEnum) Reset() { *m = ConversionOrAdjustmentLagBucketEnum{} } +func (m *ConversionOrAdjustmentLagBucketEnum) String() string { return proto.CompactTextString(m) } +func (*ConversionOrAdjustmentLagBucketEnum) ProtoMessage() {} +func (*ConversionOrAdjustmentLagBucketEnum) Descriptor() ([]byte, []int) { + return fileDescriptor_conversion_or_adjustment_lag_bucket_ea9c79e631bbd90c, []int{0} +} +func (m *ConversionOrAdjustmentLagBucketEnum) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ConversionOrAdjustmentLagBucketEnum.Unmarshal(m, b) +} +func (m *ConversionOrAdjustmentLagBucketEnum) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ConversionOrAdjustmentLagBucketEnum.Marshal(b, m, deterministic) +} +func (dst *ConversionOrAdjustmentLagBucketEnum) XXX_Merge(src proto.Message) { + xxx_messageInfo_ConversionOrAdjustmentLagBucketEnum.Merge(dst, src) +} +func (m *ConversionOrAdjustmentLagBucketEnum) XXX_Size() int { + return xxx_messageInfo_ConversionOrAdjustmentLagBucketEnum.Size(m) +} +func (m *ConversionOrAdjustmentLagBucketEnum) XXX_DiscardUnknown() { + xxx_messageInfo_ConversionOrAdjustmentLagBucketEnum.DiscardUnknown(m) +} + +var xxx_messageInfo_ConversionOrAdjustmentLagBucketEnum proto.InternalMessageInfo + +func init() { + proto.RegisterType((*ConversionOrAdjustmentLagBucketEnum)(nil), "google.ads.googleads.v1.enums.ConversionOrAdjustmentLagBucketEnum") + proto.RegisterEnum("google.ads.googleads.v1.enums.ConversionOrAdjustmentLagBucketEnum_ConversionOrAdjustmentLagBucket", ConversionOrAdjustmentLagBucketEnum_ConversionOrAdjustmentLagBucket_name, ConversionOrAdjustmentLagBucketEnum_ConversionOrAdjustmentLagBucket_value) +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/enums/conversion_or_adjustment_lag_bucket.proto", fileDescriptor_conversion_or_adjustment_lag_bucket_ea9c79e631bbd90c) +} + +var fileDescriptor_conversion_or_adjustment_lag_bucket_ea9c79e631bbd90c = []byte{ + // 741 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x84, 0x95, 0xdf, 0x72, 0xd2, 0x40, + 0x14, 0xc6, 0x2d, 0xd5, 0x56, 0xb7, 0xd5, 0xe2, 0xaa, 0x55, 0xdb, 0x52, 0xca, 0x1f, 0xdb, 0xda, + 0x8b, 0x30, 0x8c, 0x5e, 0x38, 0xf1, 0x2a, 0x94, 0x2d, 0x44, 0x71, 0xd3, 0x21, 0x01, 0xc4, 0x61, + 0x66, 0x27, 0x2d, 0x4c, 0x06, 0x2d, 0x49, 0x87, 0x40, 0x1f, 0x48, 0xef, 0x7c, 0x14, 0x7d, 0x13, + 0xdf, 0xc0, 0x3b, 0xe7, 0x64, 0x59, 0x48, 0x36, 0xc1, 0xde, 0x74, 0x76, 0xf2, 0x7d, 0xf9, 0xce, + 0xc9, 0x39, 0xbf, 0xb2, 0xa8, 0xe6, 0x78, 0x9e, 0x73, 0x35, 0x28, 0xd9, 0x7d, 0xbf, 0xc4, 0x8f, + 0x70, 0xba, 0x29, 0x97, 0x06, 0xee, 0x74, 0xe4, 0x97, 0x2e, 0x3d, 0xf7, 0x66, 0x30, 0xf6, 0x87, + 0x9e, 0xcb, 0xbc, 0x31, 0xb3, 0xfb, 0x5f, 0xa7, 0xfe, 0x64, 0x34, 0x70, 0x27, 0xec, 0xca, 0x76, + 0xd8, 0xc5, 0xf4, 0xf2, 0xdb, 0x60, 0xa2, 0x5c, 0x8f, 0xbd, 0x89, 0x87, 0x33, 0xfc, 0x6d, 0xc5, + 0xee, 0xfb, 0xca, 0x3c, 0x48, 0xb9, 0x29, 0x2b, 0x41, 0xd0, 0xce, 0x9e, 0xa8, 0x73, 0x3d, 0x2c, + 0xd9, 0xae, 0xeb, 0x4d, 0xec, 0xc9, 0xd0, 0x73, 0x7d, 0xfe, 0x72, 0xfe, 0xf7, 0x26, 0x2a, 0x9c, + 0xce, 0x4b, 0x19, 0x63, 0x6d, 0x5e, 0xa8, 0x61, 0x3b, 0x95, 0xa0, 0x0c, 0x71, 0xa7, 0xa3, 0xfc, + 0x8f, 0x4d, 0x94, 0xbd, 0xc5, 0x87, 0xb7, 0xd0, 0x46, 0x8b, 0x9a, 0xe7, 0xe4, 0x54, 0x3f, 0xd3, + 0x49, 0x35, 0x7d, 0x07, 0x6f, 0xa0, 0xf5, 0x16, 0xfd, 0x48, 0x8d, 0x0e, 0x4d, 0xaf, 0xe0, 0x03, + 0xb4, 0x77, 0x6a, 0xd0, 0x36, 0x69, 0x9a, 0xba, 0x41, 0x59, 0x83, 0x98, 0x26, 0xb3, 0xea, 0x1a, + 0x65, 0x06, 0x25, 0xac, 0xaa, 0x75, 0xd3, 0x29, 0xbc, 0x8f, 0x76, 0x42, 0x0e, 0x78, 0x6e, 0x19, + 0xcc, 0xea, 0x18, 0x20, 0x9b, 0xe9, 0x55, 0x29, 0x01, 0x04, 0xd0, 0xeb, 0x4d, 0x42, 0xb8, 0xe3, + 0x2e, 0xce, 0xa1, 0x4c, 0xd8, 0x11, 0x48, 0x96, 0xc1, 0xce, 0x8c, 0x56, 0x93, 0x5b, 0xee, 0x49, + 0x21, 0x81, 0x02, 0x0e, 0xbd, 0x3d, 0x0b, 0x59, 0xc3, 0x59, 0xb4, 0x1b, 0x76, 0x80, 0x62, 0x19, + 0xcc, 0xd4, 0x3f, 0x73, 0xc3, 0xba, 0x14, 0x01, 0x02, 0xe8, 0xa4, 0x4d, 0x28, 0x77, 0xdc, 0xc7, + 0x79, 0xb4, 0x1f, 0x76, 0x04, 0x92, 0x65, 0x30, 0xa2, 0xd7, 0xea, 0x16, 0xf7, 0x3c, 0x90, 0x7a, + 0xe5, 0x92, 0x65, 0x30, 0xaa, 0xd3, 0x59, 0x27, 0x48, 0xea, 0x24, 0x50, 0xe0, 0x8b, 0x45, 0x9d, + 0x0d, 0xf9, 0x7b, 0x67, 0x55, 0x1a, 0x8b, 0x56, 0x36, 0x71, 0x11, 0x1d, 0x84, 0xcb, 0x34, 0x44, + 0x2f, 0x56, 0x87, 0x34, 0xc4, 0x37, 0x3f, 0xc4, 0x87, 0x28, 0x1f, 0x19, 0x6d, 0xa0, 0x05, 0xd3, + 0xd5, 0x9b, 0x16, 0x11, 0x69, 0x8f, 0xf0, 0x31, 0x2a, 0x46, 0x06, 0x3c, 0x53, 0x67, 0x33, 0x5e, + 0x38, 0xb7, 0xf0, 0x09, 0x3a, 0x94, 0xe6, 0x2c, 0x9c, 0x56, 0x87, 0x50, 0xab, 0x2b, 0xf6, 0x6e, + 0xa6, 0xd3, 0x72, 0xea, 0x42, 0x17, 0x1d, 0x74, 0xb9, 0xf3, 0x71, 0x52, 0xfd, 0x2e, 0xaf, 0x0e, + 0x87, 0xc5, 0x16, 0x31, 0x3e, 0x42, 0x85, 0x48, 0xfd, 0xb9, 0xce, 0x77, 0x29, 0x22, 0x9f, 0xe0, + 0x02, 0xca, 0x46, 0xb7, 0xc9, 0x13, 0x61, 0xda, 0xc2, 0xf4, 0x14, 0x56, 0xae, 0x55, 0x3f, 0xb4, + 0x4c, 0xeb, 0x13, 0xa1, 0x56, 0x02, 0xbc, 0xcf, 0x00, 0xde, 0x90, 0x43, 0x86, 0x77, 0x5b, 0x4a, + 0x88, 0xc3, 0xfb, 0x1c, 0x96, 0x19, 0x76, 0xc4, 0xe1, 0x7d, 0x21, 0x85, 0xc4, 0xe1, 0x7d, 0x09, + 0xc8, 0x84, 0x1d, 0x32, 0xbc, 0x3b, 0x52, 0x44, 0x1c, 0xde, 0x5d, 0x80, 0x37, 0xec, 0x48, 0x80, + 0x77, 0x4f, 0xea, 0x35, 0x01, 0xde, 0x8c, 0xd4, 0x49, 0x0c, 0xde, 0x7d, 0xf9, 0x7b, 0xe3, 0xf0, + 0x66, 0x01, 0xde, 0x70, 0x99, 0x44, 0x78, 0x0f, 0x00, 0xde, 0xc8, 0x68, 0x93, 0xe1, 0xcd, 0x01, + 0x3c, 0x91, 0x01, 0x2f, 0x83, 0x37, 0x0f, 0xf0, 0x4a, 0x73, 0x5e, 0x06, 0x6f, 0x41, 0x4e, 0x5d, + 0x0a, 0x6f, 0x31, 0xa9, 0x7e, 0x22, 0xbc, 0xaf, 0x00, 0xde, 0x48, 0xfd, 0x25, 0xf0, 0x1e, 0x02, + 0xbc, 0xd1, 0x6d, 0xc6, 0xe1, 0x3d, 0xc2, 0xef, 0xd0, 0x5b, 0x69, 0x13, 0xdc, 0x05, 0x4d, 0xd6, + 0x5b, 0xb4, 0xda, 0x24, 0x55, 0xa6, 0xd1, 0x6a, 0xac, 0x8f, 0x63, 0xbc, 0x8d, 0x70, 0xe8, 0x7f, + 0x43, 0xfc, 0x96, 0xbf, 0x86, 0xe7, 0xa1, 0x44, 0xf1, 0xfc, 0xa4, 0xf2, 0x77, 0x05, 0xe5, 0x2e, + 0xbd, 0x91, 0xf2, 0xdf, 0x1b, 0xa9, 0x52, 0xbc, 0xe5, 0x22, 0x39, 0x87, 0x9b, 0xe9, 0x7c, 0xe5, + 0x4b, 0x65, 0x16, 0xe3, 0x78, 0x57, 0xb6, 0xeb, 0x28, 0xde, 0xd8, 0x29, 0x39, 0x03, 0x37, 0xb8, + 0xb7, 0xc4, 0x8d, 0x79, 0x3d, 0xf4, 0x97, 0x5c, 0xa0, 0xef, 0x83, 0xbf, 0xdf, 0x53, 0xab, 0x35, + 0x4d, 0xfb, 0x99, 0xca, 0xd4, 0x78, 0x94, 0xd6, 0xf7, 0x15, 0x7e, 0x84, 0x53, 0xbb, 0xac, 0xc0, + 0xe5, 0xe6, 0xff, 0x12, 0x7a, 0x4f, 0xeb, 0xfb, 0xbd, 0xb9, 0xde, 0x6b, 0x97, 0x7b, 0x81, 0xfe, + 0x27, 0x95, 0xe3, 0x0f, 0x55, 0x55, 0xeb, 0xfb, 0xaa, 0x3a, 0x77, 0xa8, 0x6a, 0xbb, 0xac, 0xaa, + 0x81, 0xe7, 0x62, 0x2d, 0x68, 0xec, 0xcd, 0xbf, 0x00, 0x00, 0x00, 0xff, 0xff, 0x1b, 0xcd, 0x9e, + 0xae, 0xd8, 0x07, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/criterion_category_channel_availability_mode.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/criterion_category_channel_availability_mode.pb.go new file mode 100644 index 0000000..fefbee9 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/criterion_category_channel_availability_mode.pb.go @@ -0,0 +1,135 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/enums/criterion_category_channel_availability_mode.proto + +package enums // import "google.golang.org/genproto/googleapis/ads/googleads/v1/enums" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Enum containing the possible CriterionCategoryChannelAvailabilityMode. +type CriterionCategoryChannelAvailabilityModeEnum_CriterionCategoryChannelAvailabilityMode int32 + +const ( + // Not specified. + CriterionCategoryChannelAvailabilityModeEnum_UNSPECIFIED CriterionCategoryChannelAvailabilityModeEnum_CriterionCategoryChannelAvailabilityMode = 0 + // Used for return value only. Represents value unknown in this version. + CriterionCategoryChannelAvailabilityModeEnum_UNKNOWN CriterionCategoryChannelAvailabilityModeEnum_CriterionCategoryChannelAvailabilityMode = 1 + // The category is available to campaigns of all channel types and subtypes. + CriterionCategoryChannelAvailabilityModeEnum_ALL_CHANNELS CriterionCategoryChannelAvailabilityModeEnum_CriterionCategoryChannelAvailabilityMode = 2 + // The category is available to campaigns of a specific channel type, + // including all subtypes under it. + CriterionCategoryChannelAvailabilityModeEnum_CHANNEL_TYPE_AND_ALL_SUBTYPES CriterionCategoryChannelAvailabilityModeEnum_CriterionCategoryChannelAvailabilityMode = 3 + // The category is available to campaigns of a specific channel type and + // subtype(s). + CriterionCategoryChannelAvailabilityModeEnum_CHANNEL_TYPE_AND_SUBSET_SUBTYPES CriterionCategoryChannelAvailabilityModeEnum_CriterionCategoryChannelAvailabilityMode = 4 +) + +var CriterionCategoryChannelAvailabilityModeEnum_CriterionCategoryChannelAvailabilityMode_name = map[int32]string{ + 0: "UNSPECIFIED", + 1: "UNKNOWN", + 2: "ALL_CHANNELS", + 3: "CHANNEL_TYPE_AND_ALL_SUBTYPES", + 4: "CHANNEL_TYPE_AND_SUBSET_SUBTYPES", +} +var CriterionCategoryChannelAvailabilityModeEnum_CriterionCategoryChannelAvailabilityMode_value = map[string]int32{ + "UNSPECIFIED": 0, + "UNKNOWN": 1, + "ALL_CHANNELS": 2, + "CHANNEL_TYPE_AND_ALL_SUBTYPES": 3, + "CHANNEL_TYPE_AND_SUBSET_SUBTYPES": 4, +} + +func (x CriterionCategoryChannelAvailabilityModeEnum_CriterionCategoryChannelAvailabilityMode) String() string { + return proto.EnumName(CriterionCategoryChannelAvailabilityModeEnum_CriterionCategoryChannelAvailabilityMode_name, int32(x)) +} +func (CriterionCategoryChannelAvailabilityModeEnum_CriterionCategoryChannelAvailabilityMode) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_criterion_category_channel_availability_mode_54bffde09aaf912b, []int{0, 0} +} + +// Describes channel availability mode for a criterion availability - whether +// the availability is meant to include all advertising channels, or a +// particular channel with all its channel subtypes, or a channel with a certain +// subset of channel subtypes. +type CriterionCategoryChannelAvailabilityModeEnum struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *CriterionCategoryChannelAvailabilityModeEnum) Reset() { + *m = CriterionCategoryChannelAvailabilityModeEnum{} +} +func (m *CriterionCategoryChannelAvailabilityModeEnum) String() string { + return proto.CompactTextString(m) +} +func (*CriterionCategoryChannelAvailabilityModeEnum) ProtoMessage() {} +func (*CriterionCategoryChannelAvailabilityModeEnum) Descriptor() ([]byte, []int) { + return fileDescriptor_criterion_category_channel_availability_mode_54bffde09aaf912b, []int{0} +} +func (m *CriterionCategoryChannelAvailabilityModeEnum) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_CriterionCategoryChannelAvailabilityModeEnum.Unmarshal(m, b) +} +func (m *CriterionCategoryChannelAvailabilityModeEnum) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_CriterionCategoryChannelAvailabilityModeEnum.Marshal(b, m, deterministic) +} +func (dst *CriterionCategoryChannelAvailabilityModeEnum) XXX_Merge(src proto.Message) { + xxx_messageInfo_CriterionCategoryChannelAvailabilityModeEnum.Merge(dst, src) +} +func (m *CriterionCategoryChannelAvailabilityModeEnum) XXX_Size() int { + return xxx_messageInfo_CriterionCategoryChannelAvailabilityModeEnum.Size(m) +} +func (m *CriterionCategoryChannelAvailabilityModeEnum) XXX_DiscardUnknown() { + xxx_messageInfo_CriterionCategoryChannelAvailabilityModeEnum.DiscardUnknown(m) +} + +var xxx_messageInfo_CriterionCategoryChannelAvailabilityModeEnum proto.InternalMessageInfo + +func init() { + proto.RegisterType((*CriterionCategoryChannelAvailabilityModeEnum)(nil), "google.ads.googleads.v1.enums.CriterionCategoryChannelAvailabilityModeEnum") + proto.RegisterEnum("google.ads.googleads.v1.enums.CriterionCategoryChannelAvailabilityModeEnum_CriterionCategoryChannelAvailabilityMode", CriterionCategoryChannelAvailabilityModeEnum_CriterionCategoryChannelAvailabilityMode_name, CriterionCategoryChannelAvailabilityModeEnum_CriterionCategoryChannelAvailabilityMode_value) +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/enums/criterion_category_channel_availability_mode.proto", fileDescriptor_criterion_category_channel_availability_mode_54bffde09aaf912b) +} + +var fileDescriptor_criterion_category_channel_availability_mode_54bffde09aaf912b = []byte{ + // 374 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x8c, 0x51, 0x41, 0x6b, 0xdb, 0x30, + 0x18, 0x9d, 0x9d, 0xb1, 0x81, 0x32, 0x98, 0xf1, 0x71, 0x2c, 0xb0, 0x84, 0x1d, 0x76, 0xd8, 0x64, + 0xcc, 0x6e, 0xea, 0x49, 0x76, 0xdc, 0x34, 0x34, 0x75, 0x0d, 0x8e, 0x53, 0x5a, 0x0c, 0x42, 0xb1, + 0x85, 0x2b, 0xb0, 0xa5, 0x60, 0x39, 0x81, 0x5c, 0xfb, 0x53, 0xda, 0x5b, 0x7f, 0x4a, 0xcf, 0xfd, + 0x15, 0xfd, 0x15, 0xc5, 0x76, 0x9c, 0x16, 0x4a, 0x4b, 0x2e, 0xe2, 0x7d, 0xfa, 0xde, 0xf7, 0x9e, + 0xf4, 0x3e, 0x10, 0x64, 0x52, 0x66, 0x39, 0xb3, 0x68, 0xaa, 0xac, 0x16, 0xd6, 0x68, 0x63, 0x5b, + 0x4c, 0xac, 0x0b, 0x65, 0x25, 0x25, 0xaf, 0x58, 0xc9, 0xa5, 0x20, 0x09, 0xad, 0x58, 0x26, 0xcb, + 0x2d, 0x49, 0xae, 0xa9, 0x10, 0x2c, 0x27, 0x74, 0x43, 0x79, 0x4e, 0x97, 0x3c, 0xe7, 0xd5, 0x96, + 0x14, 0x32, 0x65, 0x70, 0x55, 0xca, 0x4a, 0x9a, 0x83, 0x56, 0x06, 0xd2, 0x54, 0xc1, 0xbd, 0x22, + 0xdc, 0xd8, 0xb0, 0x51, 0xfc, 0xf1, 0xb3, 0x33, 0x5c, 0x71, 0x8b, 0x0a, 0x21, 0x2b, 0x5a, 0x71, + 0x29, 0x54, 0x3b, 0x3c, 0x7a, 0xd4, 0xc0, 0x5f, 0xb7, 0xf3, 0x74, 0x77, 0x96, 0x6e, 0xeb, 0x88, + 0x5f, 0x19, 0x9e, 0xc9, 0x94, 0x79, 0x62, 0x5d, 0x8c, 0xee, 0x34, 0xf0, 0xe7, 0xd0, 0x01, 0xf3, + 0x3b, 0xe8, 0x47, 0x7e, 0x18, 0x78, 0xee, 0xf4, 0x78, 0xea, 0x8d, 0x8d, 0x4f, 0x66, 0x1f, 0x7c, + 0x8d, 0xfc, 0x53, 0xff, 0xfc, 0xc2, 0x37, 0x34, 0xd3, 0x00, 0xdf, 0xf0, 0x6c, 0x46, 0xdc, 0x13, + 0xec, 0xfb, 0xde, 0x2c, 0x34, 0x74, 0x73, 0x08, 0x06, 0xbb, 0x8a, 0xcc, 0x2f, 0x03, 0x8f, 0x60, + 0x7f, 0x4c, 0x6a, 0x4a, 0x18, 0x39, 0x75, 0x1d, 0x1a, 0x3d, 0xf3, 0x37, 0xf8, 0xf5, 0x86, 0x12, + 0x46, 0x4e, 0xe8, 0xcd, 0x5f, 0x58, 0x9f, 0x9d, 0x1b, 0x1d, 0x0c, 0x13, 0x59, 0xc0, 0x0f, 0xa3, + 0x71, 0xfe, 0x1d, 0xfa, 0x91, 0xa0, 0xce, 0x2a, 0xd0, 0xae, 0x9c, 0x9d, 0x5e, 0x26, 0x73, 0x2a, + 0x32, 0x28, 0xcb, 0xcc, 0xca, 0x98, 0x68, 0x92, 0xec, 0x96, 0xb9, 0xe2, 0xea, 0x9d, 0xdd, 0x1e, + 0x35, 0xe7, 0xad, 0xde, 0x9b, 0x60, 0x7c, 0xaf, 0x0f, 0x26, 0xad, 0x14, 0x4e, 0x15, 0x6c, 0x61, + 0x8d, 0x16, 0x36, 0xac, 0x53, 0x56, 0x0f, 0x5d, 0x3f, 0xc6, 0xa9, 0x8a, 0xf7, 0xfd, 0x78, 0x61, + 0xc7, 0x4d, 0xff, 0x49, 0x1f, 0xb6, 0x97, 0x08, 0xe1, 0x54, 0x21, 0xb4, 0x67, 0x20, 0xb4, 0xb0, + 0x11, 0x6a, 0x38, 0xcb, 0x2f, 0xcd, 0xc3, 0xfe, 0x3f, 0x07, 0x00, 0x00, 0xff, 0xff, 0x69, 0x2b, + 0x2e, 0x1a, 0x73, 0x02, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/criterion_category_locale_availability_mode.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/criterion_category_locale_availability_mode.pb.go new file mode 100644 index 0000000..b513569 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/criterion_category_locale_availability_mode.pb.go @@ -0,0 +1,139 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/enums/criterion_category_locale_availability_mode.proto + +package enums // import "google.golang.org/genproto/googleapis/ads/googleads/v1/enums" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Enum containing the possible CriterionCategoryLocaleAvailabilityMode. +type CriterionCategoryLocaleAvailabilityModeEnum_CriterionCategoryLocaleAvailabilityMode int32 + +const ( + // Not specified. + CriterionCategoryLocaleAvailabilityModeEnum_UNSPECIFIED CriterionCategoryLocaleAvailabilityModeEnum_CriterionCategoryLocaleAvailabilityMode = 0 + // Used for return value only. Represents value unknown in this version. + CriterionCategoryLocaleAvailabilityModeEnum_UNKNOWN CriterionCategoryLocaleAvailabilityModeEnum_CriterionCategoryLocaleAvailabilityMode = 1 + // The category is available to campaigns of all locales. + CriterionCategoryLocaleAvailabilityModeEnum_ALL_LOCALES CriterionCategoryLocaleAvailabilityModeEnum_CriterionCategoryLocaleAvailabilityMode = 2 + // The category is available to campaigns within a list of countries, + // regardless of language. + CriterionCategoryLocaleAvailabilityModeEnum_COUNTRY_AND_ALL_LANGUAGES CriterionCategoryLocaleAvailabilityModeEnum_CriterionCategoryLocaleAvailabilityMode = 3 + // The category is available to campaigns within a list of languages, + // regardless of country. + CriterionCategoryLocaleAvailabilityModeEnum_LANGUAGE_AND_ALL_COUNTRIES CriterionCategoryLocaleAvailabilityModeEnum_CriterionCategoryLocaleAvailabilityMode = 4 + // The category is available to campaigns within a list of country, language + // pairs. + CriterionCategoryLocaleAvailabilityModeEnum_COUNTRY_AND_LANGUAGE CriterionCategoryLocaleAvailabilityModeEnum_CriterionCategoryLocaleAvailabilityMode = 5 +) + +var CriterionCategoryLocaleAvailabilityModeEnum_CriterionCategoryLocaleAvailabilityMode_name = map[int32]string{ + 0: "UNSPECIFIED", + 1: "UNKNOWN", + 2: "ALL_LOCALES", + 3: "COUNTRY_AND_ALL_LANGUAGES", + 4: "LANGUAGE_AND_ALL_COUNTRIES", + 5: "COUNTRY_AND_LANGUAGE", +} +var CriterionCategoryLocaleAvailabilityModeEnum_CriterionCategoryLocaleAvailabilityMode_value = map[string]int32{ + "UNSPECIFIED": 0, + "UNKNOWN": 1, + "ALL_LOCALES": 2, + "COUNTRY_AND_ALL_LANGUAGES": 3, + "LANGUAGE_AND_ALL_COUNTRIES": 4, + "COUNTRY_AND_LANGUAGE": 5, +} + +func (x CriterionCategoryLocaleAvailabilityModeEnum_CriterionCategoryLocaleAvailabilityMode) String() string { + return proto.EnumName(CriterionCategoryLocaleAvailabilityModeEnum_CriterionCategoryLocaleAvailabilityMode_name, int32(x)) +} +func (CriterionCategoryLocaleAvailabilityModeEnum_CriterionCategoryLocaleAvailabilityMode) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_criterion_category_locale_availability_mode_3573c0447f09f6e1, []int{0, 0} +} + +// Describes locale availabilty mode for a criterion availability - whether +// it's available globally, or a particular country with all languages, or a +// particular language with all countries, or a country-language pair. +type CriterionCategoryLocaleAvailabilityModeEnum struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *CriterionCategoryLocaleAvailabilityModeEnum) Reset() { + *m = CriterionCategoryLocaleAvailabilityModeEnum{} +} +func (m *CriterionCategoryLocaleAvailabilityModeEnum) String() string { + return proto.CompactTextString(m) +} +func (*CriterionCategoryLocaleAvailabilityModeEnum) ProtoMessage() {} +func (*CriterionCategoryLocaleAvailabilityModeEnum) Descriptor() ([]byte, []int) { + return fileDescriptor_criterion_category_locale_availability_mode_3573c0447f09f6e1, []int{0} +} +func (m *CriterionCategoryLocaleAvailabilityModeEnum) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_CriterionCategoryLocaleAvailabilityModeEnum.Unmarshal(m, b) +} +func (m *CriterionCategoryLocaleAvailabilityModeEnum) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_CriterionCategoryLocaleAvailabilityModeEnum.Marshal(b, m, deterministic) +} +func (dst *CriterionCategoryLocaleAvailabilityModeEnum) XXX_Merge(src proto.Message) { + xxx_messageInfo_CriterionCategoryLocaleAvailabilityModeEnum.Merge(dst, src) +} +func (m *CriterionCategoryLocaleAvailabilityModeEnum) XXX_Size() int { + return xxx_messageInfo_CriterionCategoryLocaleAvailabilityModeEnum.Size(m) +} +func (m *CriterionCategoryLocaleAvailabilityModeEnum) XXX_DiscardUnknown() { + xxx_messageInfo_CriterionCategoryLocaleAvailabilityModeEnum.DiscardUnknown(m) +} + +var xxx_messageInfo_CriterionCategoryLocaleAvailabilityModeEnum proto.InternalMessageInfo + +func init() { + proto.RegisterType((*CriterionCategoryLocaleAvailabilityModeEnum)(nil), "google.ads.googleads.v1.enums.CriterionCategoryLocaleAvailabilityModeEnum") + proto.RegisterEnum("google.ads.googleads.v1.enums.CriterionCategoryLocaleAvailabilityModeEnum_CriterionCategoryLocaleAvailabilityMode", CriterionCategoryLocaleAvailabilityModeEnum_CriterionCategoryLocaleAvailabilityMode_name, CriterionCategoryLocaleAvailabilityModeEnum_CriterionCategoryLocaleAvailabilityMode_value) +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/enums/criterion_category_locale_availability_mode.proto", fileDescriptor_criterion_category_locale_availability_mode_3573c0447f09f6e1) +} + +var fileDescriptor_criterion_category_locale_availability_mode_3573c0447f09f6e1 = []byte{ + // 381 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x8c, 0x51, 0x4f, 0x8b, 0x9b, 0x40, + 0x1c, 0xad, 0xa6, 0x7f, 0x60, 0x72, 0xa8, 0x48, 0x0f, 0x6d, 0x68, 0x0a, 0xc9, 0xa5, 0x87, 0x96, + 0x11, 0xe9, 0x6d, 0x7a, 0x9a, 0x18, 0x2b, 0xa1, 0x56, 0x43, 0xad, 0x29, 0x2d, 0x82, 0x4c, 0x74, + 0x90, 0x01, 0x9d, 0x09, 0x8e, 0x09, 0xe4, 0xb8, 0x5f, 0x65, 0x8f, 0x7b, 0xdb, 0xaf, 0xb1, 0xdf, + 0x62, 0xaf, 0xfb, 0x29, 0x16, 0xc7, 0x28, 0xb9, 0xec, 0x92, 0x8b, 0x3c, 0xe7, 0xbd, 0xdf, 0x7b, + 0x33, 0xef, 0x07, 0xc2, 0x42, 0x88, 0xa2, 0xa4, 0x16, 0xc9, 0xa5, 0xd5, 0xc1, 0x16, 0x1d, 0x6c, + 0x8b, 0xf2, 0x7d, 0x25, 0xad, 0xac, 0x66, 0x0d, 0xad, 0x99, 0xe0, 0x69, 0x46, 0x1a, 0x5a, 0x88, + 0xfa, 0x98, 0x96, 0x22, 0x23, 0x25, 0x4d, 0xc9, 0x81, 0xb0, 0x92, 0x6c, 0x59, 0xc9, 0x9a, 0x63, + 0x5a, 0x89, 0x9c, 0xc2, 0x5d, 0x2d, 0x1a, 0x61, 0x4e, 0x3b, 0x17, 0x48, 0x72, 0x09, 0x07, 0x43, + 0x78, 0xb0, 0xa1, 0x32, 0x9c, 0x7c, 0xec, 0xf3, 0x76, 0xcc, 0x22, 0x9c, 0x8b, 0x86, 0x34, 0x4c, + 0x70, 0xd9, 0x0d, 0xcf, 0xef, 0x35, 0xf0, 0xc5, 0xe9, 0x23, 0x9d, 0x53, 0xa2, 0xaf, 0x02, 0xf1, + 0x59, 0xde, 0x2f, 0x91, 0x53, 0x97, 0xef, 0xab, 0xf9, 0xad, 0x06, 0x3e, 0x5f, 0xa8, 0x37, 0xdf, + 0x82, 0x71, 0x1c, 0x44, 0x6b, 0xd7, 0x59, 0xfd, 0x58, 0xb9, 0x4b, 0xe3, 0x85, 0x39, 0x06, 0x6f, + 0xe2, 0xe0, 0x67, 0x10, 0xfe, 0x0d, 0x0c, 0xad, 0x65, 0xb1, 0xef, 0xa7, 0x7e, 0xe8, 0x60, 0xdf, + 0x8d, 0x0c, 0xdd, 0x9c, 0x82, 0x0f, 0x4e, 0x18, 0x07, 0x7f, 0x7e, 0xff, 0x4b, 0x71, 0xb0, 0x4c, + 0x15, 0x89, 0x03, 0x2f, 0xc6, 0x9e, 0x1b, 0x19, 0x23, 0xf3, 0x13, 0x98, 0xf4, 0xbf, 0x03, 0xdf, + 0xe9, 0x57, 0x6e, 0x64, 0xbc, 0x34, 0xdf, 0x83, 0x77, 0xe7, 0xe3, 0xbd, 0xd6, 0x78, 0xb5, 0xb8, + 0xd2, 0xc1, 0x2c, 0x13, 0x15, 0x7c, 0xb6, 0xa7, 0xc5, 0xd7, 0x0b, 0x9f, 0xb5, 0x6e, 0x7b, 0x5b, + 0x6b, 0xff, 0x17, 0x27, 0xbb, 0x42, 0x94, 0x84, 0x17, 0x50, 0xd4, 0x85, 0x55, 0x50, 0xae, 0x5a, + 0xed, 0xf7, 0xba, 0x63, 0xf2, 0x89, 0x35, 0x7f, 0x57, 0xdf, 0x6b, 0x7d, 0xe4, 0x61, 0x7c, 0xa3, + 0x4f, 0xbd, 0xce, 0x0a, 0xe7, 0x12, 0x76, 0xb0, 0x45, 0x1b, 0x1b, 0xb6, 0x95, 0xcb, 0xbb, 0x9e, + 0x4f, 0x70, 0x2e, 0x93, 0x81, 0x4f, 0x36, 0x76, 0xa2, 0xf8, 0x07, 0x7d, 0xd6, 0x1d, 0x22, 0x84, + 0x73, 0x89, 0xd0, 0xa0, 0x40, 0x68, 0x63, 0x23, 0xa4, 0x34, 0xdb, 0xd7, 0xea, 0x62, 0xdf, 0x1e, + 0x03, 0x00, 0x00, 0xff, 0xff, 0xb4, 0xb7, 0x95, 0xe6, 0x7e, 0x02, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/criterion_system_serving_status.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/criterion_system_serving_status.pb.go new file mode 100644 index 0000000..0d3724e --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/criterion_system_serving_status.pb.go @@ -0,0 +1,119 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/enums/criterion_system_serving_status.proto + +package enums // import "google.golang.org/genproto/googleapis/ads/googleads/v1/enums" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Enumerates criterion system serving statuses. +type CriterionSystemServingStatusEnum_CriterionSystemServingStatus int32 + +const ( + // Not specified. + CriterionSystemServingStatusEnum_UNSPECIFIED CriterionSystemServingStatusEnum_CriterionSystemServingStatus = 0 + // The value is unknown in this version. + CriterionSystemServingStatusEnum_UNKNOWN CriterionSystemServingStatusEnum_CriterionSystemServingStatus = 1 + // Eligible. + CriterionSystemServingStatusEnum_ELIGIBLE CriterionSystemServingStatusEnum_CriterionSystemServingStatus = 2 + // Low search volume. + CriterionSystemServingStatusEnum_RARELY_SERVED CriterionSystemServingStatusEnum_CriterionSystemServingStatus = 3 +) + +var CriterionSystemServingStatusEnum_CriterionSystemServingStatus_name = map[int32]string{ + 0: "UNSPECIFIED", + 1: "UNKNOWN", + 2: "ELIGIBLE", + 3: "RARELY_SERVED", +} +var CriterionSystemServingStatusEnum_CriterionSystemServingStatus_value = map[string]int32{ + "UNSPECIFIED": 0, + "UNKNOWN": 1, + "ELIGIBLE": 2, + "RARELY_SERVED": 3, +} + +func (x CriterionSystemServingStatusEnum_CriterionSystemServingStatus) String() string { + return proto.EnumName(CriterionSystemServingStatusEnum_CriterionSystemServingStatus_name, int32(x)) +} +func (CriterionSystemServingStatusEnum_CriterionSystemServingStatus) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_criterion_system_serving_status_8ef5a705312ec933, []int{0, 0} +} + +// Container for enum describing possible criterion system serving statuses. +type CriterionSystemServingStatusEnum struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *CriterionSystemServingStatusEnum) Reset() { *m = CriterionSystemServingStatusEnum{} } +func (m *CriterionSystemServingStatusEnum) String() string { return proto.CompactTextString(m) } +func (*CriterionSystemServingStatusEnum) ProtoMessage() {} +func (*CriterionSystemServingStatusEnum) Descriptor() ([]byte, []int) { + return fileDescriptor_criterion_system_serving_status_8ef5a705312ec933, []int{0} +} +func (m *CriterionSystemServingStatusEnum) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_CriterionSystemServingStatusEnum.Unmarshal(m, b) +} +func (m *CriterionSystemServingStatusEnum) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_CriterionSystemServingStatusEnum.Marshal(b, m, deterministic) +} +func (dst *CriterionSystemServingStatusEnum) XXX_Merge(src proto.Message) { + xxx_messageInfo_CriterionSystemServingStatusEnum.Merge(dst, src) +} +func (m *CriterionSystemServingStatusEnum) XXX_Size() int { + return xxx_messageInfo_CriterionSystemServingStatusEnum.Size(m) +} +func (m *CriterionSystemServingStatusEnum) XXX_DiscardUnknown() { + xxx_messageInfo_CriterionSystemServingStatusEnum.DiscardUnknown(m) +} + +var xxx_messageInfo_CriterionSystemServingStatusEnum proto.InternalMessageInfo + +func init() { + proto.RegisterType((*CriterionSystemServingStatusEnum)(nil), "google.ads.googleads.v1.enums.CriterionSystemServingStatusEnum") + proto.RegisterEnum("google.ads.googleads.v1.enums.CriterionSystemServingStatusEnum_CriterionSystemServingStatus", CriterionSystemServingStatusEnum_CriterionSystemServingStatus_name, CriterionSystemServingStatusEnum_CriterionSystemServingStatus_value) +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/enums/criterion_system_serving_status.proto", fileDescriptor_criterion_system_serving_status_8ef5a705312ec933) +} + +var fileDescriptor_criterion_system_serving_status_8ef5a705312ec933 = []byte{ + // 328 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x7c, 0x50, 0xc1, 0x4a, 0xf3, 0x40, + 0x18, 0xfc, 0x93, 0xc2, 0xaf, 0x6c, 0x15, 0x63, 0x8e, 0xd2, 0x82, 0xed, 0x03, 0x6c, 0x08, 0xde, + 0xd6, 0x53, 0xd2, 0xae, 0x25, 0x58, 0x62, 0x69, 0x68, 0x44, 0x89, 0x84, 0xd8, 0x84, 0x25, 0xd0, + 0xec, 0x96, 0x7c, 0xdb, 0x82, 0x47, 0x5f, 0xc5, 0xa3, 0x8f, 0xe2, 0xa3, 0xf8, 0x00, 0x9e, 0x25, + 0xbb, 0x4d, 0x6f, 0xe6, 0xb2, 0x0c, 0xfb, 0xcd, 0x37, 0xf3, 0xcd, 0xa0, 0x09, 0x13, 0x82, 0x6d, + 0x0a, 0x27, 0xcb, 0xc1, 0xd1, 0xb0, 0x41, 0x7b, 0xd7, 0x29, 0xf8, 0xae, 0x02, 0x67, 0x5d, 0x97, + 0xb2, 0xa8, 0x4b, 0xc1, 0x53, 0x78, 0x03, 0x59, 0x54, 0x29, 0x14, 0xf5, 0xbe, 0xe4, 0x2c, 0x05, + 0x99, 0xc9, 0x1d, 0xe0, 0x6d, 0x2d, 0xa4, 0xb0, 0x87, 0x7a, 0x13, 0x67, 0x39, 0xe0, 0xa3, 0x08, + 0xde, 0xbb, 0x58, 0x89, 0x5c, 0x0d, 0x5a, 0x8f, 0x6d, 0xe9, 0x64, 0x9c, 0x0b, 0x99, 0xc9, 0x52, + 0xf0, 0xc3, 0xf2, 0xf8, 0xdd, 0x40, 0xd7, 0x93, 0xd6, 0x26, 0x52, 0x2e, 0x91, 0x36, 0x89, 0x94, + 0x07, 0xe5, 0xbb, 0x6a, 0xfc, 0x82, 0x06, 0x5d, 0x1c, 0xfb, 0x02, 0xf5, 0x57, 0x61, 0xb4, 0xa0, + 0x93, 0xe0, 0x2e, 0xa0, 0x53, 0xeb, 0x9f, 0xdd, 0x47, 0x27, 0xab, 0xf0, 0x3e, 0x7c, 0x78, 0x0c, + 0x2d, 0xc3, 0x3e, 0x43, 0xa7, 0x74, 0x1e, 0xcc, 0x02, 0x7f, 0x4e, 0x2d, 0xd3, 0xbe, 0x44, 0xe7, + 0x4b, 0x6f, 0x49, 0xe7, 0x4f, 0x69, 0x44, 0x97, 0x31, 0x9d, 0x5a, 0x3d, 0xff, 0xc7, 0x40, 0xa3, + 0xb5, 0xa8, 0x70, 0x67, 0x0e, 0x7f, 0xd4, 0x75, 0xc2, 0xa2, 0x09, 0xb3, 0x30, 0x9e, 0xfd, 0x83, + 0x06, 0x13, 0x9b, 0x8c, 0x33, 0x2c, 0x6a, 0xe6, 0xb0, 0x82, 0xab, 0xa8, 0x6d, 0xc1, 0xdb, 0x12, + 0xfe, 0xe8, 0xfb, 0x56, 0xbd, 0x1f, 0x66, 0x6f, 0xe6, 0x79, 0x9f, 0xe6, 0x70, 0xa6, 0xa5, 0xbc, + 0x1c, 0xb0, 0x86, 0x0d, 0x8a, 0x5d, 0xdc, 0x54, 0x02, 0x5f, 0xed, 0x3c, 0xf1, 0x72, 0x48, 0x8e, + 0xf3, 0x24, 0x76, 0x13, 0x35, 0xff, 0x36, 0x47, 0xfa, 0x93, 0x10, 0x2f, 0x07, 0x42, 0x8e, 0x0c, + 0x42, 0x62, 0x97, 0x10, 0xc5, 0x79, 0xfd, 0xaf, 0x0e, 0xbb, 0xf9, 0x0d, 0x00, 0x00, 0xff, 0xff, + 0x2c, 0x98, 0x27, 0x97, 0x07, 0x02, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/criterion_type.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/criterion_type.pb.go new file mode 100644 index 0000000..b8b317a --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/criterion_type.pb.go @@ -0,0 +1,249 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/enums/criterion_type.proto + +package enums // import "google.golang.org/genproto/googleapis/ads/googleads/v1/enums" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Enum describing possible criterion types. +type CriterionTypeEnum_CriterionType int32 + +const ( + // Not specified. + CriterionTypeEnum_UNSPECIFIED CriterionTypeEnum_CriterionType = 0 + // Used for return value only. Represents value unknown in this version. + CriterionTypeEnum_UNKNOWN CriterionTypeEnum_CriterionType = 1 + // Keyword. e.g. 'mars cruise'. + CriterionTypeEnum_KEYWORD CriterionTypeEnum_CriterionType = 2 + // Placement, aka Website. e.g. 'www.flowers4sale.com' + CriterionTypeEnum_PLACEMENT CriterionTypeEnum_CriterionType = 3 + // Mobile application categories to target. + CriterionTypeEnum_MOBILE_APP_CATEGORY CriterionTypeEnum_CriterionType = 4 + // Mobile applications to target. + CriterionTypeEnum_MOBILE_APPLICATION CriterionTypeEnum_CriterionType = 5 + // Devices to target. + CriterionTypeEnum_DEVICE CriterionTypeEnum_CriterionType = 6 + // Locations to target. + CriterionTypeEnum_LOCATION CriterionTypeEnum_CriterionType = 7 + // Listing groups to target. + CriterionTypeEnum_LISTING_GROUP CriterionTypeEnum_CriterionType = 8 + // Ad Schedule. + CriterionTypeEnum_AD_SCHEDULE CriterionTypeEnum_CriterionType = 9 + // Age range. + CriterionTypeEnum_AGE_RANGE CriterionTypeEnum_CriterionType = 10 + // Gender. + CriterionTypeEnum_GENDER CriterionTypeEnum_CriterionType = 11 + // Income Range. + CriterionTypeEnum_INCOME_RANGE CriterionTypeEnum_CriterionType = 12 + // Parental status. + CriterionTypeEnum_PARENTAL_STATUS CriterionTypeEnum_CriterionType = 13 + // YouTube Video. + CriterionTypeEnum_YOUTUBE_VIDEO CriterionTypeEnum_CriterionType = 14 + // YouTube Channel. + CriterionTypeEnum_YOUTUBE_CHANNEL CriterionTypeEnum_CriterionType = 15 + // User list. + CriterionTypeEnum_USER_LIST CriterionTypeEnum_CriterionType = 16 + // Proximity. + CriterionTypeEnum_PROXIMITY CriterionTypeEnum_CriterionType = 17 + // A topic target on the display network (e.g. "Pets & Animals"). + CriterionTypeEnum_TOPIC CriterionTypeEnum_CriterionType = 18 + // Listing scope to target. + CriterionTypeEnum_LISTING_SCOPE CriterionTypeEnum_CriterionType = 19 + // Language. + CriterionTypeEnum_LANGUAGE CriterionTypeEnum_CriterionType = 20 + // IpBlock. + CriterionTypeEnum_IP_BLOCK CriterionTypeEnum_CriterionType = 21 + // Content Label for category exclusion. + CriterionTypeEnum_CONTENT_LABEL CriterionTypeEnum_CriterionType = 22 + // Carrier. + CriterionTypeEnum_CARRIER CriterionTypeEnum_CriterionType = 23 + // A category the user is interested in. + CriterionTypeEnum_USER_INTEREST CriterionTypeEnum_CriterionType = 24 + // Webpage criterion for dynamic search ads. + CriterionTypeEnum_WEBPAGE CriterionTypeEnum_CriterionType = 25 + // Operating system version. + CriterionTypeEnum_OPERATING_SYSTEM_VERSION CriterionTypeEnum_CriterionType = 26 + // App payment model. + CriterionTypeEnum_APP_PAYMENT_MODEL CriterionTypeEnum_CriterionType = 27 + // Mobile device. + CriterionTypeEnum_MOBILE_DEVICE CriterionTypeEnum_CriterionType = 28 + // Custom affinity. + CriterionTypeEnum_CUSTOM_AFFINITY CriterionTypeEnum_CriterionType = 29 + // Custom intent. + CriterionTypeEnum_CUSTOM_INTENT CriterionTypeEnum_CriterionType = 30 + // Location group. + CriterionTypeEnum_LOCATION_GROUP CriterionTypeEnum_CriterionType = 31 +) + +var CriterionTypeEnum_CriterionType_name = map[int32]string{ + 0: "UNSPECIFIED", + 1: "UNKNOWN", + 2: "KEYWORD", + 3: "PLACEMENT", + 4: "MOBILE_APP_CATEGORY", + 5: "MOBILE_APPLICATION", + 6: "DEVICE", + 7: "LOCATION", + 8: "LISTING_GROUP", + 9: "AD_SCHEDULE", + 10: "AGE_RANGE", + 11: "GENDER", + 12: "INCOME_RANGE", + 13: "PARENTAL_STATUS", + 14: "YOUTUBE_VIDEO", + 15: "YOUTUBE_CHANNEL", + 16: "USER_LIST", + 17: "PROXIMITY", + 18: "TOPIC", + 19: "LISTING_SCOPE", + 20: "LANGUAGE", + 21: "IP_BLOCK", + 22: "CONTENT_LABEL", + 23: "CARRIER", + 24: "USER_INTEREST", + 25: "WEBPAGE", + 26: "OPERATING_SYSTEM_VERSION", + 27: "APP_PAYMENT_MODEL", + 28: "MOBILE_DEVICE", + 29: "CUSTOM_AFFINITY", + 30: "CUSTOM_INTENT", + 31: "LOCATION_GROUP", +} +var CriterionTypeEnum_CriterionType_value = map[string]int32{ + "UNSPECIFIED": 0, + "UNKNOWN": 1, + "KEYWORD": 2, + "PLACEMENT": 3, + "MOBILE_APP_CATEGORY": 4, + "MOBILE_APPLICATION": 5, + "DEVICE": 6, + "LOCATION": 7, + "LISTING_GROUP": 8, + "AD_SCHEDULE": 9, + "AGE_RANGE": 10, + "GENDER": 11, + "INCOME_RANGE": 12, + "PARENTAL_STATUS": 13, + "YOUTUBE_VIDEO": 14, + "YOUTUBE_CHANNEL": 15, + "USER_LIST": 16, + "PROXIMITY": 17, + "TOPIC": 18, + "LISTING_SCOPE": 19, + "LANGUAGE": 20, + "IP_BLOCK": 21, + "CONTENT_LABEL": 22, + "CARRIER": 23, + "USER_INTEREST": 24, + "WEBPAGE": 25, + "OPERATING_SYSTEM_VERSION": 26, + "APP_PAYMENT_MODEL": 27, + "MOBILE_DEVICE": 28, + "CUSTOM_AFFINITY": 29, + "CUSTOM_INTENT": 30, + "LOCATION_GROUP": 31, +} + +func (x CriterionTypeEnum_CriterionType) String() string { + return proto.EnumName(CriterionTypeEnum_CriterionType_name, int32(x)) +} +func (CriterionTypeEnum_CriterionType) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_criterion_type_6c3b4f09ae204a99, []int{0, 0} +} + +// The possible types of a criterion. +type CriterionTypeEnum struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *CriterionTypeEnum) Reset() { *m = CriterionTypeEnum{} } +func (m *CriterionTypeEnum) String() string { return proto.CompactTextString(m) } +func (*CriterionTypeEnum) ProtoMessage() {} +func (*CriterionTypeEnum) Descriptor() ([]byte, []int) { + return fileDescriptor_criterion_type_6c3b4f09ae204a99, []int{0} +} +func (m *CriterionTypeEnum) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_CriterionTypeEnum.Unmarshal(m, b) +} +func (m *CriterionTypeEnum) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_CriterionTypeEnum.Marshal(b, m, deterministic) +} +func (dst *CriterionTypeEnum) XXX_Merge(src proto.Message) { + xxx_messageInfo_CriterionTypeEnum.Merge(dst, src) +} +func (m *CriterionTypeEnum) XXX_Size() int { + return xxx_messageInfo_CriterionTypeEnum.Size(m) +} +func (m *CriterionTypeEnum) XXX_DiscardUnknown() { + xxx_messageInfo_CriterionTypeEnum.DiscardUnknown(m) +} + +var xxx_messageInfo_CriterionTypeEnum proto.InternalMessageInfo + +func init() { + proto.RegisterType((*CriterionTypeEnum)(nil), "google.ads.googleads.v1.enums.CriterionTypeEnum") + proto.RegisterEnum("google.ads.googleads.v1.enums.CriterionTypeEnum_CriterionType", CriterionTypeEnum_CriterionType_name, CriterionTypeEnum_CriterionType_value) +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/enums/criterion_type.proto", fileDescriptor_criterion_type_6c3b4f09ae204a99) +} + +var fileDescriptor_criterion_type_6c3b4f09ae204a99 = []byte{ + // 621 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x7c, 0x53, 0xdd, 0x4e, 0xdb, 0x30, + 0x14, 0x1e, 0xe5, 0xdf, 0x50, 0x70, 0xcd, 0xf8, 0x19, 0x83, 0x4d, 0xf0, 0x00, 0xa9, 0xba, 0xdd, + 0x65, 0x57, 0x8e, 0x73, 0x08, 0x16, 0xa9, 0x6d, 0x39, 0x4e, 0x59, 0xa7, 0x4a, 0x56, 0x47, 0xab, + 0xaa, 0x12, 0x24, 0x55, 0x53, 0x90, 0x78, 0x9d, 0x5d, 0xee, 0x21, 0xf6, 0x00, 0xbb, 0xde, 0x3b, + 0x4c, 0xda, 0x53, 0x4c, 0x4e, 0x5a, 0x36, 0x2e, 0xb6, 0x9b, 0xe8, 0xe4, 0x7c, 0xe7, 0x7c, 0xe7, + 0x3b, 0x3f, 0x46, 0xef, 0x46, 0x79, 0x3e, 0xba, 0x1d, 0x36, 0xfb, 0x83, 0xa2, 0x59, 0x99, 0xce, + 0x7a, 0x68, 0x35, 0x87, 0xd9, 0xfd, 0x5d, 0xd1, 0xbc, 0x99, 0x8e, 0x67, 0xc3, 0xe9, 0x38, 0xcf, + 0xec, 0xec, 0x71, 0x32, 0xf4, 0x26, 0xd3, 0x7c, 0x96, 0x93, 0xd3, 0x2a, 0xd0, 0xeb, 0x0f, 0x0a, + 0xef, 0x29, 0xc7, 0x7b, 0x68, 0x79, 0x65, 0xce, 0xf1, 0xc9, 0x82, 0x72, 0x32, 0x6e, 0xf6, 0xb3, + 0x2c, 0x9f, 0xf5, 0x67, 0xe3, 0x3c, 0x2b, 0xaa, 0xe4, 0xf3, 0x1f, 0x2b, 0xa8, 0xc1, 0x16, 0xac, + 0xe6, 0x71, 0x32, 0x84, 0xec, 0xfe, 0xee, 0xfc, 0xdb, 0x0a, 0xaa, 0x3f, 0xf3, 0x92, 0x5d, 0xb4, + 0x95, 0x8a, 0x44, 0x01, 0xe3, 0x17, 0x1c, 0x42, 0xfc, 0x82, 0x6c, 0xa1, 0xf5, 0x54, 0x5c, 0x09, + 0x79, 0x2d, 0xf0, 0x92, 0xfb, 0xb9, 0x82, 0xee, 0xb5, 0xd4, 0x21, 0xae, 0x91, 0x3a, 0xda, 0x54, + 0x31, 0x65, 0xd0, 0x06, 0x61, 0xf0, 0x32, 0x39, 0x44, 0x7b, 0x6d, 0x19, 0xf0, 0x18, 0x2c, 0x55, + 0xca, 0x32, 0x6a, 0x20, 0x92, 0xba, 0x8b, 0x57, 0xc8, 0x01, 0x22, 0x7f, 0x80, 0x98, 0x33, 0x6a, + 0xb8, 0x14, 0x78, 0x95, 0x20, 0xb4, 0x16, 0x42, 0x87, 0x33, 0xc0, 0x6b, 0x64, 0x1b, 0x6d, 0xc4, + 0x72, 0x8e, 0xac, 0x93, 0x06, 0xaa, 0xc7, 0x3c, 0x31, 0x5c, 0x44, 0x36, 0xd2, 0x32, 0x55, 0x78, + 0xc3, 0xe9, 0xa2, 0xa1, 0x4d, 0xd8, 0x25, 0x84, 0x69, 0x0c, 0x78, 0xd3, 0x55, 0xa7, 0x11, 0x58, + 0x4d, 0x45, 0x04, 0x18, 0x39, 0xb2, 0x08, 0x44, 0x08, 0x1a, 0x6f, 0x11, 0x8c, 0xb6, 0xb9, 0x60, + 0xb2, 0xbd, 0x40, 0xb7, 0xc9, 0x1e, 0xda, 0x55, 0x54, 0x83, 0x30, 0x34, 0xb6, 0x89, 0xa1, 0x26, + 0x4d, 0x70, 0xdd, 0x55, 0xe9, 0xca, 0xd4, 0xa4, 0x01, 0xd8, 0x0e, 0x0f, 0x41, 0xe2, 0x1d, 0x17, + 0xb7, 0x70, 0xb1, 0x4b, 0x2a, 0x04, 0xc4, 0x78, 0xd7, 0x55, 0x4a, 0x13, 0xd0, 0xd6, 0x49, 0xc2, + 0xb8, 0x6c, 0x5b, 0xcb, 0x8f, 0xbc, 0xcd, 0x4d, 0x17, 0x37, 0xc8, 0x26, 0x5a, 0x35, 0x52, 0x71, + 0x86, 0xc9, 0xdf, 0xb2, 0x13, 0x26, 0x15, 0xe0, 0xbd, 0xb2, 0x2f, 0x2a, 0xa2, 0x94, 0x46, 0x80, + 0x5f, 0xba, 0x3f, 0xae, 0x6c, 0x10, 0x4b, 0x76, 0x85, 0xf7, 0x5d, 0x38, 0x93, 0xc2, 0x80, 0x30, + 0x36, 0xa6, 0x01, 0xc4, 0xf8, 0xc0, 0xcd, 0x97, 0x51, 0xad, 0x39, 0x68, 0x7c, 0xe8, 0xf0, 0xb2, + 0x2e, 0x17, 0x06, 0x34, 0x24, 0x06, 0x1f, 0x39, 0xfc, 0x1a, 0x02, 0xe5, 0xd8, 0x5e, 0x91, 0x13, + 0x74, 0x24, 0x15, 0x68, 0x5a, 0x15, 0xec, 0x26, 0x06, 0xda, 0xb6, 0x03, 0x3a, 0x71, 0x33, 0x3c, + 0x26, 0xfb, 0xa8, 0xe1, 0xf6, 0xa0, 0x68, 0xd7, 0xed, 0xc7, 0xb6, 0x65, 0x08, 0x31, 0x7e, 0xed, + 0x48, 0xe7, 0xcb, 0x98, 0xcf, 0xfe, 0xc4, 0x35, 0xcd, 0xd2, 0xc4, 0xc8, 0xb6, 0xa5, 0x17, 0x17, + 0x5c, 0xb8, 0xb6, 0x4e, 0x4b, 0x71, 0x95, 0x93, 0x97, 0x12, 0xf1, 0x1b, 0x42, 0xd0, 0xce, 0x62, + 0x47, 0xf3, 0xb5, 0xbc, 0x0d, 0x7e, 0x2e, 0xa1, 0xb3, 0x9b, 0xfc, 0xce, 0xfb, 0xef, 0x69, 0x06, + 0xe4, 0xd9, 0x8d, 0x29, 0x77, 0x90, 0x6a, 0xe9, 0x53, 0x30, 0x4f, 0x1a, 0xe5, 0xb7, 0xfd, 0x6c, + 0xe4, 0xe5, 0xd3, 0x51, 0x73, 0x34, 0xcc, 0xca, 0x73, 0x5d, 0xbc, 0x89, 0xc9, 0xb8, 0xf8, 0xc7, + 0x13, 0xf9, 0x50, 0x7e, 0xbf, 0xd4, 0x96, 0x23, 0x4a, 0xbf, 0xd6, 0x4e, 0xa3, 0x8a, 0x8a, 0x0e, + 0x0a, 0xaf, 0x32, 0x9d, 0xd5, 0x69, 0x79, 0xee, 0xca, 0x8b, 0xef, 0x0b, 0xbc, 0x47, 0x07, 0x45, + 0xef, 0x09, 0xef, 0x75, 0x5a, 0xbd, 0x12, 0xff, 0x55, 0x3b, 0xab, 0x9c, 0xbe, 0x4f, 0x07, 0x85, + 0xef, 0x3f, 0x45, 0xf8, 0x7e, 0xa7, 0xe5, 0xfb, 0x65, 0xcc, 0xe7, 0xb5, 0x52, 0xd8, 0xfb, 0xdf, + 0x01, 0x00, 0x00, 0xff, 0xff, 0x56, 0x3e, 0x57, 0xb9, 0xba, 0x03, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/custom_interest_member_type.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/custom_interest_member_type.pb.go new file mode 100644 index 0000000..957633e --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/custom_interest_member_type.pb.go @@ -0,0 +1,118 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/enums/custom_interest_member_type.proto + +package enums // import "google.golang.org/genproto/googleapis/ads/googleads/v1/enums" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Enum containing possible custom interest member types. +type CustomInterestMemberTypeEnum_CustomInterestMemberType int32 + +const ( + // Not specified. + CustomInterestMemberTypeEnum_UNSPECIFIED CustomInterestMemberTypeEnum_CustomInterestMemberType = 0 + // Used for return value only. Represents value unknown in this version. + CustomInterestMemberTypeEnum_UNKNOWN CustomInterestMemberTypeEnum_CustomInterestMemberType = 1 + // Custom interest member type KEYWORD. + CustomInterestMemberTypeEnum_KEYWORD CustomInterestMemberTypeEnum_CustomInterestMemberType = 2 + // Custom interest member type URL. + CustomInterestMemberTypeEnum_URL CustomInterestMemberTypeEnum_CustomInterestMemberType = 3 +) + +var CustomInterestMemberTypeEnum_CustomInterestMemberType_name = map[int32]string{ + 0: "UNSPECIFIED", + 1: "UNKNOWN", + 2: "KEYWORD", + 3: "URL", +} +var CustomInterestMemberTypeEnum_CustomInterestMemberType_value = map[string]int32{ + "UNSPECIFIED": 0, + "UNKNOWN": 1, + "KEYWORD": 2, + "URL": 3, +} + +func (x CustomInterestMemberTypeEnum_CustomInterestMemberType) String() string { + return proto.EnumName(CustomInterestMemberTypeEnum_CustomInterestMemberType_name, int32(x)) +} +func (CustomInterestMemberTypeEnum_CustomInterestMemberType) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_custom_interest_member_type_320b7c5ee93d2530, []int{0, 0} +} + +// The types of custom interest member, either KEYWORD or URL. +type CustomInterestMemberTypeEnum struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *CustomInterestMemberTypeEnum) Reset() { *m = CustomInterestMemberTypeEnum{} } +func (m *CustomInterestMemberTypeEnum) String() string { return proto.CompactTextString(m) } +func (*CustomInterestMemberTypeEnum) ProtoMessage() {} +func (*CustomInterestMemberTypeEnum) Descriptor() ([]byte, []int) { + return fileDescriptor_custom_interest_member_type_320b7c5ee93d2530, []int{0} +} +func (m *CustomInterestMemberTypeEnum) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_CustomInterestMemberTypeEnum.Unmarshal(m, b) +} +func (m *CustomInterestMemberTypeEnum) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_CustomInterestMemberTypeEnum.Marshal(b, m, deterministic) +} +func (dst *CustomInterestMemberTypeEnum) XXX_Merge(src proto.Message) { + xxx_messageInfo_CustomInterestMemberTypeEnum.Merge(dst, src) +} +func (m *CustomInterestMemberTypeEnum) XXX_Size() int { + return xxx_messageInfo_CustomInterestMemberTypeEnum.Size(m) +} +func (m *CustomInterestMemberTypeEnum) XXX_DiscardUnknown() { + xxx_messageInfo_CustomInterestMemberTypeEnum.DiscardUnknown(m) +} + +var xxx_messageInfo_CustomInterestMemberTypeEnum proto.InternalMessageInfo + +func init() { + proto.RegisterType((*CustomInterestMemberTypeEnum)(nil), "google.ads.googleads.v1.enums.CustomInterestMemberTypeEnum") + proto.RegisterEnum("google.ads.googleads.v1.enums.CustomInterestMemberTypeEnum_CustomInterestMemberType", CustomInterestMemberTypeEnum_CustomInterestMemberType_name, CustomInterestMemberTypeEnum_CustomInterestMemberType_value) +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/enums/custom_interest_member_type.proto", fileDescriptor_custom_interest_member_type_320b7c5ee93d2530) +} + +var fileDescriptor_custom_interest_member_type_320b7c5ee93d2530 = []byte{ + // 313 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x7c, 0x50, 0xcf, 0x6a, 0xc2, 0x30, + 0x18, 0x9f, 0x15, 0x26, 0xc4, 0xc3, 0x4a, 0x4f, 0x63, 0xe8, 0x41, 0x1f, 0x20, 0xa1, 0xec, 0x96, + 0x1d, 0x46, 0xd5, 0x4e, 0xc4, 0xad, 0x8a, 0x9b, 0x95, 0x8d, 0x42, 0xa9, 0x36, 0x84, 0x82, 0x49, + 0x4a, 0x93, 0x0a, 0xbe, 0xce, 0x8e, 0x7b, 0x94, 0x3d, 0xca, 0x8e, 0x7b, 0x82, 0xd1, 0xc4, 0xf6, + 0xd6, 0x5d, 0xca, 0xaf, 0xf9, 0x7d, 0xbf, 0x3f, 0xdf, 0x07, 0x1e, 0xa9, 0x10, 0xf4, 0x48, 0x50, + 0x92, 0x4a, 0x64, 0x60, 0x85, 0x4e, 0x2e, 0x22, 0xbc, 0x64, 0x12, 0x1d, 0x4a, 0xa9, 0x04, 0x8b, + 0x33, 0xae, 0x48, 0x41, 0xa4, 0x8a, 0x19, 0x61, 0x7b, 0x52, 0xc4, 0xea, 0x9c, 0x13, 0x98, 0x17, + 0x42, 0x09, 0x67, 0x68, 0x54, 0x30, 0x49, 0x25, 0x6c, 0x0c, 0xe0, 0xc9, 0x85, 0xda, 0xe0, 0x6e, + 0x50, 0xfb, 0xe7, 0x19, 0x4a, 0x38, 0x17, 0x2a, 0x51, 0x99, 0xe0, 0xd2, 0x88, 0xc7, 0x1c, 0x0c, + 0xa6, 0x3a, 0x61, 0x71, 0x09, 0x78, 0xd1, 0xfe, 0x6f, 0xe7, 0x9c, 0xf8, 0xbc, 0x64, 0xe3, 0x00, + 0xdc, 0xb6, 0xf1, 0xce, 0x0d, 0xe8, 0x6f, 0x83, 0xd7, 0xb5, 0x3f, 0x5d, 0x3c, 0x2d, 0xfc, 0x99, + 0x7d, 0xe5, 0xf4, 0x41, 0x6f, 0x1b, 0x2c, 0x83, 0xd5, 0x2e, 0xb0, 0x3b, 0xd5, 0xcf, 0xd2, 0x7f, + 0xdf, 0xad, 0x36, 0x33, 0xdb, 0x72, 0x7a, 0xa0, 0xbb, 0xdd, 0x3c, 0xdb, 0xdd, 0xc9, 0x6f, 0x07, + 0x8c, 0x0e, 0x82, 0xc1, 0x7f, 0x3b, 0x4f, 0x86, 0x6d, 0x99, 0xeb, 0xaa, 0xf4, 0xba, 0xf3, 0x31, + 0xb9, 0xe8, 0xa9, 0x38, 0x26, 0x9c, 0x42, 0x51, 0x50, 0x44, 0x09, 0xd7, 0x2b, 0xd5, 0x47, 0xcc, + 0x33, 0xd9, 0x72, 0xd3, 0x07, 0xfd, 0xfd, 0xb4, 0xba, 0x73, 0xcf, 0xfb, 0xb2, 0x86, 0x73, 0x63, + 0xe5, 0xa5, 0x12, 0x1a, 0x58, 0xa1, 0xd0, 0x85, 0xd5, 0xfe, 0xf2, 0xbb, 0xe6, 0x23, 0x2f, 0x95, + 0x51, 0xc3, 0x47, 0xa1, 0x1b, 0x69, 0xfe, 0xc7, 0x1a, 0x99, 0x47, 0x8c, 0xbd, 0x54, 0x62, 0xdc, + 0x4c, 0x60, 0x1c, 0xba, 0x18, 0xeb, 0x99, 0xfd, 0xb5, 0x2e, 0x76, 0xff, 0x17, 0x00, 0x00, 0xff, + 0xff, 0x7b, 0xc1, 0x91, 0x36, 0xeb, 0x01, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/custom_interest_status.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/custom_interest_status.pb.go new file mode 100644 index 0000000..d248bb0 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/custom_interest_status.pb.go @@ -0,0 +1,119 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/enums/custom_interest_status.proto + +package enums // import "google.golang.org/genproto/googleapis/ads/googleads/v1/enums" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Enum containing possible custom interest types. +type CustomInterestStatusEnum_CustomInterestStatus int32 + +const ( + // Not specified. + CustomInterestStatusEnum_UNSPECIFIED CustomInterestStatusEnum_CustomInterestStatus = 0 + // Used for return value only. Represents value unknown in this version. + CustomInterestStatusEnum_UNKNOWN CustomInterestStatusEnum_CustomInterestStatus = 1 + // Enabled status - custom interest is enabled and can be targeted to. + CustomInterestStatusEnum_ENABLED CustomInterestStatusEnum_CustomInterestStatus = 2 + // Removed status - custom interest is removed and cannot be used for + // targeting. + CustomInterestStatusEnum_REMOVED CustomInterestStatusEnum_CustomInterestStatus = 3 +) + +var CustomInterestStatusEnum_CustomInterestStatus_name = map[int32]string{ + 0: "UNSPECIFIED", + 1: "UNKNOWN", + 2: "ENABLED", + 3: "REMOVED", +} +var CustomInterestStatusEnum_CustomInterestStatus_value = map[string]int32{ + "UNSPECIFIED": 0, + "UNKNOWN": 1, + "ENABLED": 2, + "REMOVED": 3, +} + +func (x CustomInterestStatusEnum_CustomInterestStatus) String() string { + return proto.EnumName(CustomInterestStatusEnum_CustomInterestStatus_name, int32(x)) +} +func (CustomInterestStatusEnum_CustomInterestStatus) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_custom_interest_status_4f118426068e3218, []int{0, 0} +} + +// The status of custom interest. +type CustomInterestStatusEnum struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *CustomInterestStatusEnum) Reset() { *m = CustomInterestStatusEnum{} } +func (m *CustomInterestStatusEnum) String() string { return proto.CompactTextString(m) } +func (*CustomInterestStatusEnum) ProtoMessage() {} +func (*CustomInterestStatusEnum) Descriptor() ([]byte, []int) { + return fileDescriptor_custom_interest_status_4f118426068e3218, []int{0} +} +func (m *CustomInterestStatusEnum) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_CustomInterestStatusEnum.Unmarshal(m, b) +} +func (m *CustomInterestStatusEnum) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_CustomInterestStatusEnum.Marshal(b, m, deterministic) +} +func (dst *CustomInterestStatusEnum) XXX_Merge(src proto.Message) { + xxx_messageInfo_CustomInterestStatusEnum.Merge(dst, src) +} +func (m *CustomInterestStatusEnum) XXX_Size() int { + return xxx_messageInfo_CustomInterestStatusEnum.Size(m) +} +func (m *CustomInterestStatusEnum) XXX_DiscardUnknown() { + xxx_messageInfo_CustomInterestStatusEnum.DiscardUnknown(m) +} + +var xxx_messageInfo_CustomInterestStatusEnum proto.InternalMessageInfo + +func init() { + proto.RegisterType((*CustomInterestStatusEnum)(nil), "google.ads.googleads.v1.enums.CustomInterestStatusEnum") + proto.RegisterEnum("google.ads.googleads.v1.enums.CustomInterestStatusEnum_CustomInterestStatus", CustomInterestStatusEnum_CustomInterestStatus_name, CustomInterestStatusEnum_CustomInterestStatus_value) +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/enums/custom_interest_status.proto", fileDescriptor_custom_interest_status_4f118426068e3218) +} + +var fileDescriptor_custom_interest_status_4f118426068e3218 = []byte{ + // 306 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x7c, 0x50, 0xc1, 0x4a, 0x03, 0x31, + 0x10, 0xb5, 0x5b, 0x50, 0x48, 0x0f, 0x2e, 0xc5, 0x83, 0x8a, 0x3d, 0xb4, 0x1f, 0x90, 0xb0, 0x78, + 0x8b, 0xa7, 0x6c, 0x1b, 0x4b, 0x51, 0xd3, 0x62, 0xe9, 0x0a, 0xb2, 0x50, 0x62, 0x77, 0x09, 0x2b, + 0xdd, 0xa4, 0x74, 0xb2, 0xfd, 0x20, 0x8f, 0x7e, 0x8a, 0x9f, 0xe2, 0xc1, 0x6f, 0x90, 0x64, 0xdb, + 0x3d, 0x55, 0x2f, 0xe1, 0x4d, 0xde, 0xbc, 0x37, 0x6f, 0x06, 0x51, 0x65, 0x8c, 0x5a, 0xe7, 0x44, + 0x66, 0x40, 0x6a, 0xe8, 0xd0, 0x2e, 0x22, 0xb9, 0xae, 0x4a, 0x20, 0xab, 0x0a, 0xac, 0x29, 0x97, + 0x85, 0xb6, 0xf9, 0x36, 0x07, 0xbb, 0x04, 0x2b, 0x6d, 0x05, 0x78, 0xb3, 0x35, 0xd6, 0x74, 0x7b, + 0xb5, 0x00, 0xcb, 0x0c, 0x70, 0xa3, 0xc5, 0xbb, 0x08, 0x7b, 0xed, 0xf5, 0xcd, 0xc1, 0x7a, 0x53, + 0x10, 0xa9, 0xb5, 0xb1, 0xd2, 0x16, 0x46, 0xef, 0xc5, 0x83, 0x77, 0x74, 0x39, 0xf4, 0xe6, 0x93, + 0xbd, 0xf7, 0xdc, 0x5b, 0x73, 0x5d, 0x95, 0x03, 0x81, 0x2e, 0x8e, 0x71, 0xdd, 0x73, 0xd4, 0x59, + 0x88, 0xf9, 0x8c, 0x0f, 0x27, 0xf7, 0x13, 0x3e, 0x0a, 0x4f, 0xba, 0x1d, 0x74, 0xb6, 0x10, 0x0f, + 0x62, 0xfa, 0x22, 0xc2, 0x96, 0x2b, 0xb8, 0x60, 0xf1, 0x23, 0x1f, 0x85, 0x81, 0x2b, 0x9e, 0xf9, + 0xd3, 0x34, 0xe1, 0xa3, 0xb0, 0x1d, 0xff, 0xb4, 0x50, 0x7f, 0x65, 0x4a, 0xfc, 0x6f, 0xde, 0xf8, + 0xea, 0xd8, 0xcc, 0x99, 0x0b, 0x3b, 0x6b, 0xbd, 0xc6, 0x7b, 0xad, 0x32, 0x6b, 0xa9, 0x15, 0x36, + 0x5b, 0x45, 0x54, 0xae, 0xfd, 0x2a, 0x87, 0xbb, 0x6d, 0x0a, 0xf8, 0xe3, 0x8c, 0x77, 0xfe, 0xfd, + 0x08, 0xda, 0x63, 0xc6, 0x3e, 0x83, 0xde, 0xb8, 0xb6, 0x62, 0x19, 0xe0, 0x1a, 0x3a, 0x94, 0x44, + 0xd8, 0xed, 0x0e, 0x5f, 0x07, 0x3e, 0x65, 0x19, 0xa4, 0x0d, 0x9f, 0x26, 0x51, 0xea, 0xf9, 0xef, + 0xa0, 0x5f, 0x7f, 0x52, 0xca, 0x32, 0xa0, 0xb4, 0xe9, 0xa0, 0x34, 0x89, 0x28, 0xf5, 0x3d, 0x6f, + 0xa7, 0x3e, 0xd8, 0xed, 0x6f, 0x00, 0x00, 0x00, 0xff, 0xff, 0xfa, 0xa2, 0x12, 0x76, 0xde, 0x01, + 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/custom_interest_type.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/custom_interest_type.pb.go new file mode 100644 index 0000000..13f762a --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/custom_interest_type.pb.go @@ -0,0 +1,118 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/enums/custom_interest_type.proto + +package enums // import "google.golang.org/genproto/googleapis/ads/googleads/v1/enums" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Enum containing possible custom interest types. +type CustomInterestTypeEnum_CustomInterestType int32 + +const ( + // Not specified. + CustomInterestTypeEnum_UNSPECIFIED CustomInterestTypeEnum_CustomInterestType = 0 + // Used for return value only. Represents value unknown in this version. + CustomInterestTypeEnum_UNKNOWN CustomInterestTypeEnum_CustomInterestType = 1 + // Allows brand advertisers to define custom affinity audience lists. + CustomInterestTypeEnum_CUSTOM_AFFINITY CustomInterestTypeEnum_CustomInterestType = 2 + // Allows advertisers to define custom intent audience lists. + CustomInterestTypeEnum_CUSTOM_INTENT CustomInterestTypeEnum_CustomInterestType = 3 +) + +var CustomInterestTypeEnum_CustomInterestType_name = map[int32]string{ + 0: "UNSPECIFIED", + 1: "UNKNOWN", + 2: "CUSTOM_AFFINITY", + 3: "CUSTOM_INTENT", +} +var CustomInterestTypeEnum_CustomInterestType_value = map[string]int32{ + "UNSPECIFIED": 0, + "UNKNOWN": 1, + "CUSTOM_AFFINITY": 2, + "CUSTOM_INTENT": 3, +} + +func (x CustomInterestTypeEnum_CustomInterestType) String() string { + return proto.EnumName(CustomInterestTypeEnum_CustomInterestType_name, int32(x)) +} +func (CustomInterestTypeEnum_CustomInterestType) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_custom_interest_type_50baa3a4060ff668, []int{0, 0} +} + +// The types of custom interest. +type CustomInterestTypeEnum struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *CustomInterestTypeEnum) Reset() { *m = CustomInterestTypeEnum{} } +func (m *CustomInterestTypeEnum) String() string { return proto.CompactTextString(m) } +func (*CustomInterestTypeEnum) ProtoMessage() {} +func (*CustomInterestTypeEnum) Descriptor() ([]byte, []int) { + return fileDescriptor_custom_interest_type_50baa3a4060ff668, []int{0} +} +func (m *CustomInterestTypeEnum) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_CustomInterestTypeEnum.Unmarshal(m, b) +} +func (m *CustomInterestTypeEnum) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_CustomInterestTypeEnum.Marshal(b, m, deterministic) +} +func (dst *CustomInterestTypeEnum) XXX_Merge(src proto.Message) { + xxx_messageInfo_CustomInterestTypeEnum.Merge(dst, src) +} +func (m *CustomInterestTypeEnum) XXX_Size() int { + return xxx_messageInfo_CustomInterestTypeEnum.Size(m) +} +func (m *CustomInterestTypeEnum) XXX_DiscardUnknown() { + xxx_messageInfo_CustomInterestTypeEnum.DiscardUnknown(m) +} + +var xxx_messageInfo_CustomInterestTypeEnum proto.InternalMessageInfo + +func init() { + proto.RegisterType((*CustomInterestTypeEnum)(nil), "google.ads.googleads.v1.enums.CustomInterestTypeEnum") + proto.RegisterEnum("google.ads.googleads.v1.enums.CustomInterestTypeEnum_CustomInterestType", CustomInterestTypeEnum_CustomInterestType_name, CustomInterestTypeEnum_CustomInterestType_value) +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/enums/custom_interest_type.proto", fileDescriptor_custom_interest_type_50baa3a4060ff668) +} + +var fileDescriptor_custom_interest_type_50baa3a4060ff668 = []byte{ + // 318 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x7c, 0x50, 0xdf, 0x6a, 0xf2, 0x30, + 0x1c, 0xfd, 0xac, 0xf0, 0x0d, 0x22, 0xc3, 0xae, 0x83, 0x0d, 0xc6, 0xbc, 0xd0, 0x07, 0x48, 0x28, + 0xbb, 0x19, 0xd9, 0x55, 0x74, 0x55, 0xc2, 0x58, 0x14, 0xac, 0x8e, 0x49, 0x41, 0x3a, 0x1b, 0x42, + 0xc1, 0x26, 0xc5, 0x44, 0xc1, 0xd7, 0xd9, 0xe5, 0x1e, 0x65, 0x8f, 0x32, 0xf6, 0x10, 0xa3, 0x89, + 0xf6, 0x46, 0xb6, 0x9b, 0x72, 0xf8, 0x9d, 0x3f, 0x3d, 0x39, 0xe0, 0x5e, 0x28, 0x25, 0xd6, 0x1c, + 0xa5, 0x99, 0x46, 0x0e, 0x56, 0x68, 0x17, 0x22, 0x2e, 0xb7, 0x85, 0x46, 0xab, 0xad, 0x36, 0xaa, + 0x58, 0xe6, 0xd2, 0xf0, 0x0d, 0xd7, 0x66, 0x69, 0xf6, 0x25, 0x87, 0xe5, 0x46, 0x19, 0x15, 0x74, + 0x9c, 0x1c, 0xa6, 0x99, 0x86, 0xb5, 0x13, 0xee, 0x42, 0x68, 0x9d, 0x37, 0xb7, 0xc7, 0xe0, 0x32, + 0x47, 0xa9, 0x94, 0xca, 0xa4, 0x26, 0x57, 0x52, 0x3b, 0x73, 0xcf, 0x80, 0xab, 0x81, 0x8d, 0xa6, + 0x87, 0xe4, 0x78, 0x5f, 0xf2, 0x48, 0x6e, 0x8b, 0xde, 0x02, 0x04, 0xa7, 0x4c, 0xd0, 0x06, 0xad, + 0x19, 0x9b, 0x4e, 0xa2, 0x01, 0x1d, 0xd2, 0xe8, 0xd1, 0xff, 0x17, 0xb4, 0xc0, 0xd9, 0x8c, 0x3d, + 0xb1, 0xf1, 0x0b, 0xf3, 0x1b, 0xc1, 0x25, 0x68, 0x0f, 0x66, 0xd3, 0x78, 0xfc, 0xbc, 0x24, 0xc3, + 0x21, 0x65, 0x34, 0x7e, 0xf5, 0xbd, 0xe0, 0x02, 0x9c, 0x1f, 0x8e, 0x94, 0xc5, 0x11, 0x8b, 0xfd, + 0x66, 0xff, 0xbb, 0x01, 0xba, 0x2b, 0x55, 0xc0, 0x3f, 0x9b, 0xf7, 0xaf, 0x4f, 0xff, 0x3f, 0xa9, + 0x4a, 0x4f, 0x1a, 0x8b, 0xfe, 0xc1, 0x29, 0xd4, 0x3a, 0x95, 0x02, 0xaa, 0x8d, 0x40, 0x82, 0x4b, + 0xfb, 0xa4, 0xe3, 0x7a, 0x65, 0xae, 0x7f, 0x19, 0xf3, 0xc1, 0x7e, 0xdf, 0xbd, 0xe6, 0x88, 0x90, + 0x0f, 0xaf, 0x33, 0x72, 0x51, 0x24, 0xd3, 0xd0, 0xc1, 0x0a, 0xcd, 0x43, 0x58, 0xad, 0xa0, 0x3f, + 0x8f, 0x7c, 0x42, 0x32, 0x9d, 0xd4, 0x7c, 0x32, 0x0f, 0x13, 0xcb, 0x7f, 0x79, 0x5d, 0x77, 0xc4, + 0x98, 0x64, 0x1a, 0xe3, 0x5a, 0x81, 0xf1, 0x3c, 0xc4, 0xd8, 0x6a, 0xde, 0xfe, 0xdb, 0x62, 0x77, + 0x3f, 0x01, 0x00, 0x00, 0xff, 0xff, 0x2a, 0x73, 0xe3, 0x5b, 0xe4, 0x01, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/custom_placeholder_field.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/custom_placeholder_field.pb.go new file mode 100644 index 0000000..62bd823 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/custom_placeholder_field.pb.go @@ -0,0 +1,228 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/enums/custom_placeholder_field.proto + +package enums // import "google.golang.org/genproto/googleapis/ads/googleads/v1/enums" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Possible values for Custom placeholder fields. +type CustomPlaceholderFieldEnum_CustomPlaceholderField int32 + +const ( + // Not specified. + CustomPlaceholderFieldEnum_UNSPECIFIED CustomPlaceholderFieldEnum_CustomPlaceholderField = 0 + // Used for return value only. Represents value unknown in this version. + CustomPlaceholderFieldEnum_UNKNOWN CustomPlaceholderFieldEnum_CustomPlaceholderField = 1 + // Data Type: STRING. Required. Combination ID and ID2 must be unique per + // offer. + CustomPlaceholderFieldEnum_ID CustomPlaceholderFieldEnum_CustomPlaceholderField = 2 + // Data Type: STRING. Combination ID and ID2 must be unique per offer. + CustomPlaceholderFieldEnum_ID2 CustomPlaceholderFieldEnum_CustomPlaceholderField = 3 + // Data Type: STRING. Required. Main headline with product name to be shown + // in dynamic ad. + CustomPlaceholderFieldEnum_ITEM_TITLE CustomPlaceholderFieldEnum_CustomPlaceholderField = 4 + // Data Type: STRING. Optional text to be shown in the image ad. + CustomPlaceholderFieldEnum_ITEM_SUBTITLE CustomPlaceholderFieldEnum_CustomPlaceholderField = 5 + // Data Type: STRING. Optional description of the product to be shown in the + // ad. + CustomPlaceholderFieldEnum_ITEM_DESCRIPTION CustomPlaceholderFieldEnum_CustomPlaceholderField = 6 + // Data Type: STRING. Full address of your offer or service, including + // postal code. This will be used to identify the closest product to the + // user when there are multiple offers in the feed that are relevant to the + // user. + CustomPlaceholderFieldEnum_ITEM_ADDRESS CustomPlaceholderFieldEnum_CustomPlaceholderField = 7 + // Data Type: STRING. Price to be shown in the ad. + // Example: "100.00 USD" + CustomPlaceholderFieldEnum_PRICE CustomPlaceholderFieldEnum_CustomPlaceholderField = 8 + // Data Type: STRING. Formatted price to be shown in the ad. + // Example: "Starting at $100.00 USD", "$80 - $100" + CustomPlaceholderFieldEnum_FORMATTED_PRICE CustomPlaceholderFieldEnum_CustomPlaceholderField = 9 + // Data Type: STRING. Sale price to be shown in the ad. + // Example: "80.00 USD" + CustomPlaceholderFieldEnum_SALE_PRICE CustomPlaceholderFieldEnum_CustomPlaceholderField = 10 + // Data Type: STRING. Formatted sale price to be shown in the ad. + // Example: "On sale for $80.00", "$60 - $80" + CustomPlaceholderFieldEnum_FORMATTED_SALE_PRICE CustomPlaceholderFieldEnum_CustomPlaceholderField = 11 + // Data Type: URL. Image to be displayed in the ad. Highly recommended for + // image ads. + CustomPlaceholderFieldEnum_IMAGE_URL CustomPlaceholderFieldEnum_CustomPlaceholderField = 12 + // Data Type: STRING. Used as a recommendation engine signal to serve items + // in the same category. + CustomPlaceholderFieldEnum_ITEM_CATEGORY CustomPlaceholderFieldEnum_CustomPlaceholderField = 13 + // Data Type: URL_LIST. Final URLs for the ad when using Upgraded + // URLs. User will be redirected to these URLs when they click on an ad, or + // when they click on a specific product for ads that have multiple + // products. + CustomPlaceholderFieldEnum_FINAL_URLS CustomPlaceholderFieldEnum_CustomPlaceholderField = 14 + // Data Type: URL_LIST. Final mobile URLs for the ad when using Upgraded + // URLs. + CustomPlaceholderFieldEnum_FINAL_MOBILE_URLS CustomPlaceholderFieldEnum_CustomPlaceholderField = 15 + // Data Type: URL. Tracking template for the ad when using Upgraded URLs. + CustomPlaceholderFieldEnum_TRACKING_URL CustomPlaceholderFieldEnum_CustomPlaceholderField = 16 + // Data Type: STRING_LIST. Keywords used for product retrieval. + CustomPlaceholderFieldEnum_CONTEXTUAL_KEYWORDS CustomPlaceholderFieldEnum_CustomPlaceholderField = 17 + // Data Type: STRING. Android app link. Must be formatted as: + // android-app://{package_id}/{scheme}/{host_path}. + // The components are defined as follows: + // package_id: app ID as specified in Google Play. + // scheme: the scheme to pass to the application. Can be HTTP, or a custom + // scheme. + // host_path: identifies the specific content within your application. + CustomPlaceholderFieldEnum_ANDROID_APP_LINK CustomPlaceholderFieldEnum_CustomPlaceholderField = 18 + // Data Type: STRING_LIST. List of recommended IDs to show together with + // this item. + CustomPlaceholderFieldEnum_SIMILAR_IDS CustomPlaceholderFieldEnum_CustomPlaceholderField = 19 + // Data Type: STRING. iOS app link. + CustomPlaceholderFieldEnum_IOS_APP_LINK CustomPlaceholderFieldEnum_CustomPlaceholderField = 20 + // Data Type: INT64. iOS app store ID. + CustomPlaceholderFieldEnum_IOS_APP_STORE_ID CustomPlaceholderFieldEnum_CustomPlaceholderField = 21 +) + +var CustomPlaceholderFieldEnum_CustomPlaceholderField_name = map[int32]string{ + 0: "UNSPECIFIED", + 1: "UNKNOWN", + 2: "ID", + 3: "ID2", + 4: "ITEM_TITLE", + 5: "ITEM_SUBTITLE", + 6: "ITEM_DESCRIPTION", + 7: "ITEM_ADDRESS", + 8: "PRICE", + 9: "FORMATTED_PRICE", + 10: "SALE_PRICE", + 11: "FORMATTED_SALE_PRICE", + 12: "IMAGE_URL", + 13: "ITEM_CATEGORY", + 14: "FINAL_URLS", + 15: "FINAL_MOBILE_URLS", + 16: "TRACKING_URL", + 17: "CONTEXTUAL_KEYWORDS", + 18: "ANDROID_APP_LINK", + 19: "SIMILAR_IDS", + 20: "IOS_APP_LINK", + 21: "IOS_APP_STORE_ID", +} +var CustomPlaceholderFieldEnum_CustomPlaceholderField_value = map[string]int32{ + "UNSPECIFIED": 0, + "UNKNOWN": 1, + "ID": 2, + "ID2": 3, + "ITEM_TITLE": 4, + "ITEM_SUBTITLE": 5, + "ITEM_DESCRIPTION": 6, + "ITEM_ADDRESS": 7, + "PRICE": 8, + "FORMATTED_PRICE": 9, + "SALE_PRICE": 10, + "FORMATTED_SALE_PRICE": 11, + "IMAGE_URL": 12, + "ITEM_CATEGORY": 13, + "FINAL_URLS": 14, + "FINAL_MOBILE_URLS": 15, + "TRACKING_URL": 16, + "CONTEXTUAL_KEYWORDS": 17, + "ANDROID_APP_LINK": 18, + "SIMILAR_IDS": 19, + "IOS_APP_LINK": 20, + "IOS_APP_STORE_ID": 21, +} + +func (x CustomPlaceholderFieldEnum_CustomPlaceholderField) String() string { + return proto.EnumName(CustomPlaceholderFieldEnum_CustomPlaceholderField_name, int32(x)) +} +func (CustomPlaceholderFieldEnum_CustomPlaceholderField) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_custom_placeholder_field_60b4307cd1510ec1, []int{0, 0} +} + +// Values for Custom placeholder fields. +// For more information about dynamic remarketing feeds, see +// https://support.google.com/google-ads/answer/6053288. +type CustomPlaceholderFieldEnum struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *CustomPlaceholderFieldEnum) Reset() { *m = CustomPlaceholderFieldEnum{} } +func (m *CustomPlaceholderFieldEnum) String() string { return proto.CompactTextString(m) } +func (*CustomPlaceholderFieldEnum) ProtoMessage() {} +func (*CustomPlaceholderFieldEnum) Descriptor() ([]byte, []int) { + return fileDescriptor_custom_placeholder_field_60b4307cd1510ec1, []int{0} +} +func (m *CustomPlaceholderFieldEnum) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_CustomPlaceholderFieldEnum.Unmarshal(m, b) +} +func (m *CustomPlaceholderFieldEnum) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_CustomPlaceholderFieldEnum.Marshal(b, m, deterministic) +} +func (dst *CustomPlaceholderFieldEnum) XXX_Merge(src proto.Message) { + xxx_messageInfo_CustomPlaceholderFieldEnum.Merge(dst, src) +} +func (m *CustomPlaceholderFieldEnum) XXX_Size() int { + return xxx_messageInfo_CustomPlaceholderFieldEnum.Size(m) +} +func (m *CustomPlaceholderFieldEnum) XXX_DiscardUnknown() { + xxx_messageInfo_CustomPlaceholderFieldEnum.DiscardUnknown(m) +} + +var xxx_messageInfo_CustomPlaceholderFieldEnum proto.InternalMessageInfo + +func init() { + proto.RegisterType((*CustomPlaceholderFieldEnum)(nil), "google.ads.googleads.v1.enums.CustomPlaceholderFieldEnum") + proto.RegisterEnum("google.ads.googleads.v1.enums.CustomPlaceholderFieldEnum_CustomPlaceholderField", CustomPlaceholderFieldEnum_CustomPlaceholderField_name, CustomPlaceholderFieldEnum_CustomPlaceholderField_value) +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/enums/custom_placeholder_field.proto", fileDescriptor_custom_placeholder_field_60b4307cd1510ec1) +} + +var fileDescriptor_custom_placeholder_field_60b4307cd1510ec1 = []byte{ + // 524 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x7c, 0x52, 0xdd, 0x8e, 0xd2, 0x40, + 0x18, 0x15, 0x70, 0x41, 0x86, 0x65, 0x19, 0x06, 0x56, 0xcd, 0xea, 0x5e, 0xec, 0x3e, 0x40, 0x1b, + 0xf4, 0xae, 0x7a, 0x33, 0x74, 0x06, 0x32, 0xa1, 0xb4, 0x4d, 0x67, 0x60, 0x5d, 0x43, 0xd2, 0x54, + 0x5a, 0x2b, 0x09, 0x74, 0x08, 0x85, 0x7d, 0x1d, 0x13, 0x2f, 0x7d, 0x08, 0x1f, 0xc0, 0x47, 0xf1, + 0xca, 0x47, 0x30, 0x33, 0xe5, 0xc7, 0x8b, 0xd5, 0x9b, 0xe6, 0xfb, 0xce, 0x77, 0xce, 0xe9, 0x37, + 0x33, 0x07, 0xbc, 0x4f, 0xa5, 0x4c, 0x97, 0x89, 0x19, 0xc5, 0xb9, 0x59, 0x94, 0xaa, 0x7a, 0xe8, + 0x99, 0x49, 0xb6, 0x5b, 0xe5, 0xe6, 0x7c, 0x97, 0x6f, 0xe5, 0x2a, 0x5c, 0x2f, 0xa3, 0x79, 0xf2, + 0x45, 0x2e, 0xe3, 0x64, 0x13, 0x7e, 0x5e, 0x24, 0xcb, 0xd8, 0x58, 0x6f, 0xe4, 0x56, 0xa2, 0xeb, + 0x42, 0x62, 0x44, 0x71, 0x6e, 0x1c, 0xd5, 0xc6, 0x43, 0xcf, 0xd0, 0xea, 0xab, 0xd7, 0x07, 0xf3, + 0xf5, 0xc2, 0x8c, 0xb2, 0x4c, 0x6e, 0xa3, 0xed, 0x42, 0x66, 0x79, 0x21, 0xbe, 0xfd, 0x51, 0x01, + 0x57, 0xb6, 0xf6, 0xf7, 0x4f, 0xf6, 0x03, 0xe5, 0x4e, 0xb3, 0xdd, 0xea, 0xf6, 0x6b, 0x05, 0x3c, + 0x7f, 0x7c, 0x8c, 0x5a, 0xa0, 0x31, 0x71, 0xb9, 0x4f, 0x6d, 0x36, 0x60, 0x94, 0xc0, 0x27, 0xa8, + 0x01, 0x6a, 0x13, 0x77, 0xe4, 0x7a, 0x77, 0x2e, 0x2c, 0xa1, 0x2a, 0x28, 0x33, 0x02, 0xcb, 0xa8, + 0x06, 0x2a, 0x8c, 0xbc, 0x81, 0x15, 0x74, 0x01, 0x00, 0x13, 0x74, 0x1c, 0x0a, 0x26, 0x1c, 0x0a, + 0x9f, 0xa2, 0x36, 0x68, 0xea, 0x9e, 0x4f, 0xfa, 0x05, 0x74, 0x86, 0xba, 0x00, 0x6a, 0x88, 0x50, + 0x6e, 0x07, 0xcc, 0x17, 0xcc, 0x73, 0x61, 0x15, 0x41, 0x70, 0xae, 0x51, 0x4c, 0x48, 0x40, 0x39, + 0x87, 0x35, 0x54, 0x07, 0x67, 0x7e, 0xc0, 0x6c, 0x0a, 0x9f, 0xa1, 0x0e, 0x68, 0x0d, 0xbc, 0x60, + 0x8c, 0x85, 0xa0, 0x24, 0x2c, 0xc0, 0xba, 0xfa, 0x15, 0xc7, 0x0e, 0xdd, 0xf7, 0x00, 0xbd, 0x04, + 0xdd, 0x13, 0xe9, 0xaf, 0x49, 0x03, 0x35, 0x41, 0x9d, 0x8d, 0xf1, 0x90, 0x86, 0x93, 0xc0, 0x81, + 0xe7, 0xc7, 0x9d, 0x6c, 0x2c, 0xe8, 0xd0, 0x0b, 0xee, 0x61, 0x53, 0x79, 0x0d, 0x98, 0x8b, 0x1d, + 0xc5, 0xe0, 0xf0, 0x02, 0x5d, 0x82, 0x76, 0xd1, 0x8f, 0xbd, 0x3e, 0x73, 0x68, 0x01, 0xb7, 0xd4, + 0x92, 0x22, 0xc0, 0xf6, 0x88, 0xb9, 0x43, 0xed, 0x05, 0xd1, 0x0b, 0xd0, 0xb1, 0x3d, 0x57, 0xd0, + 0x0f, 0x62, 0x82, 0x9d, 0x70, 0x44, 0xef, 0xef, 0xbc, 0x80, 0x70, 0xd8, 0x56, 0xa7, 0xc4, 0x2e, + 0x09, 0x3c, 0x46, 0x42, 0xec, 0xfb, 0xa1, 0xc3, 0xdc, 0x11, 0x44, 0xea, 0x36, 0x39, 0x1b, 0x33, + 0x07, 0x07, 0x21, 0x23, 0x1c, 0x76, 0xf4, 0xb1, 0x3d, 0x7e, 0xa2, 0x74, 0xf5, 0xf5, 0xec, 0x11, + 0x2e, 0xbc, 0x80, 0x86, 0x8c, 0xc0, 0xcb, 0xfe, 0xef, 0x12, 0xb8, 0x99, 0xcb, 0x95, 0xf1, 0xdf, + 0x10, 0xf4, 0x5f, 0x3d, 0xfe, 0x88, 0xbe, 0xca, 0x80, 0x5f, 0xfa, 0xd8, 0xdf, 0xab, 0x53, 0xb9, + 0x8c, 0xb2, 0xd4, 0x90, 0x9b, 0xd4, 0x4c, 0x93, 0x4c, 0x27, 0xe4, 0x10, 0xc8, 0xf5, 0x22, 0xff, + 0x47, 0x3e, 0xdf, 0xe9, 0xef, 0xb7, 0x72, 0x65, 0x88, 0xf1, 0xf7, 0xf2, 0xf5, 0xb0, 0xb0, 0xc2, + 0x71, 0x6e, 0x14, 0xa5, 0xaa, 0xa6, 0x3d, 0x43, 0xe5, 0x29, 0xff, 0x79, 0x98, 0xcf, 0x70, 0x9c, + 0xcf, 0x8e, 0xf3, 0xd9, 0xb4, 0x37, 0xd3, 0xf3, 0x5f, 0xe5, 0x9b, 0x02, 0xb4, 0x2c, 0x1c, 0xe7, + 0x96, 0x75, 0x64, 0x58, 0xd6, 0xb4, 0x67, 0x59, 0x9a, 0xf3, 0xa9, 0xaa, 0x17, 0x7b, 0xfb, 0x27, + 0x00, 0x00, 0xff, 0xff, 0x74, 0x08, 0x91, 0xb6, 0x37, 0x03, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/customer_match_upload_key_type.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/customer_match_upload_key_type.pb.go new file mode 100644 index 0000000..4af3e29 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/customer_match_upload_key_type.pb.go @@ -0,0 +1,126 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/enums/customer_match_upload_key_type.proto + +package enums // import "google.golang.org/genproto/googleapis/ads/googleads/v1/enums" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Enum describing possible customer match upload key types. +type CustomerMatchUploadKeyTypeEnum_CustomerMatchUploadKeyType int32 + +const ( + // Not specified. + CustomerMatchUploadKeyTypeEnum_UNSPECIFIED CustomerMatchUploadKeyTypeEnum_CustomerMatchUploadKeyType = 0 + // Used for return value only. Represents value unknown in this version. + CustomerMatchUploadKeyTypeEnum_UNKNOWN CustomerMatchUploadKeyTypeEnum_CustomerMatchUploadKeyType = 1 + // Members are matched from customer info such as email address, phone + // number or physical address. + CustomerMatchUploadKeyTypeEnum_CONTACT_INFO CustomerMatchUploadKeyTypeEnum_CustomerMatchUploadKeyType = 2 + // Members are matched from a user id generated and assigned by the + // advertiser. + CustomerMatchUploadKeyTypeEnum_CRM_ID CustomerMatchUploadKeyTypeEnum_CustomerMatchUploadKeyType = 3 + // Members are matched from mobile advertising ids. + CustomerMatchUploadKeyTypeEnum_MOBILE_ADVERTISING_ID CustomerMatchUploadKeyTypeEnum_CustomerMatchUploadKeyType = 4 +) + +var CustomerMatchUploadKeyTypeEnum_CustomerMatchUploadKeyType_name = map[int32]string{ + 0: "UNSPECIFIED", + 1: "UNKNOWN", + 2: "CONTACT_INFO", + 3: "CRM_ID", + 4: "MOBILE_ADVERTISING_ID", +} +var CustomerMatchUploadKeyTypeEnum_CustomerMatchUploadKeyType_value = map[string]int32{ + "UNSPECIFIED": 0, + "UNKNOWN": 1, + "CONTACT_INFO": 2, + "CRM_ID": 3, + "MOBILE_ADVERTISING_ID": 4, +} + +func (x CustomerMatchUploadKeyTypeEnum_CustomerMatchUploadKeyType) String() string { + return proto.EnumName(CustomerMatchUploadKeyTypeEnum_CustomerMatchUploadKeyType_name, int32(x)) +} +func (CustomerMatchUploadKeyTypeEnum_CustomerMatchUploadKeyType) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_customer_match_upload_key_type_baf8b72a7a5cd145, []int{0, 0} +} + +// Indicates what type of data are the user list's members matched from. +type CustomerMatchUploadKeyTypeEnum struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *CustomerMatchUploadKeyTypeEnum) Reset() { *m = CustomerMatchUploadKeyTypeEnum{} } +func (m *CustomerMatchUploadKeyTypeEnum) String() string { return proto.CompactTextString(m) } +func (*CustomerMatchUploadKeyTypeEnum) ProtoMessage() {} +func (*CustomerMatchUploadKeyTypeEnum) Descriptor() ([]byte, []int) { + return fileDescriptor_customer_match_upload_key_type_baf8b72a7a5cd145, []int{0} +} +func (m *CustomerMatchUploadKeyTypeEnum) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_CustomerMatchUploadKeyTypeEnum.Unmarshal(m, b) +} +func (m *CustomerMatchUploadKeyTypeEnum) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_CustomerMatchUploadKeyTypeEnum.Marshal(b, m, deterministic) +} +func (dst *CustomerMatchUploadKeyTypeEnum) XXX_Merge(src proto.Message) { + xxx_messageInfo_CustomerMatchUploadKeyTypeEnum.Merge(dst, src) +} +func (m *CustomerMatchUploadKeyTypeEnum) XXX_Size() int { + return xxx_messageInfo_CustomerMatchUploadKeyTypeEnum.Size(m) +} +func (m *CustomerMatchUploadKeyTypeEnum) XXX_DiscardUnknown() { + xxx_messageInfo_CustomerMatchUploadKeyTypeEnum.DiscardUnknown(m) +} + +var xxx_messageInfo_CustomerMatchUploadKeyTypeEnum proto.InternalMessageInfo + +func init() { + proto.RegisterType((*CustomerMatchUploadKeyTypeEnum)(nil), "google.ads.googleads.v1.enums.CustomerMatchUploadKeyTypeEnum") + proto.RegisterEnum("google.ads.googleads.v1.enums.CustomerMatchUploadKeyTypeEnum_CustomerMatchUploadKeyType", CustomerMatchUploadKeyTypeEnum_CustomerMatchUploadKeyType_name, CustomerMatchUploadKeyTypeEnum_CustomerMatchUploadKeyType_value) +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/enums/customer_match_upload_key_type.proto", fileDescriptor_customer_match_upload_key_type_baf8b72a7a5cd145) +} + +var fileDescriptor_customer_match_upload_key_type_baf8b72a7a5cd145 = []byte{ + // 352 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x7c, 0x51, 0x41, 0x6a, 0x83, 0x40, + 0x14, 0xad, 0xa6, 0xa4, 0x30, 0x29, 0x54, 0x84, 0x2e, 0x1a, 0x9a, 0x96, 0xe4, 0x00, 0x23, 0xd2, + 0xdd, 0x74, 0xa5, 0xc6, 0x04, 0x49, 0xa3, 0x21, 0x31, 0x16, 0x8a, 0x30, 0x4c, 0xe3, 0x60, 0x43, + 0xe3, 0x8c, 0x64, 0x34, 0xe0, 0x41, 0x7a, 0x81, 0x2e, 0x7b, 0x94, 0x1e, 0xa5, 0xeb, 0x1e, 0xa0, + 0x38, 0x26, 0xd9, 0xa5, 0x1b, 0x79, 0xf8, 0xfe, 0x7f, 0xef, 0xbf, 0x37, 0xc0, 0x4e, 0x39, 0x4f, + 0x37, 0xd4, 0x20, 0x89, 0x30, 0x1a, 0x58, 0xa3, 0x9d, 0x69, 0x50, 0x56, 0x66, 0xc2, 0x58, 0x95, + 0xa2, 0xe0, 0x19, 0xdd, 0xe2, 0x8c, 0x14, 0xab, 0x37, 0x5c, 0xe6, 0x1b, 0x4e, 0x12, 0xfc, 0x4e, + 0x2b, 0x5c, 0x54, 0x39, 0x85, 0xf9, 0x96, 0x17, 0x5c, 0xef, 0x35, 0x8b, 0x90, 0x24, 0x02, 0x1e, + 0x35, 0xe0, 0xce, 0x84, 0x52, 0xa3, 0x7b, 0x7b, 0xb0, 0xc8, 0xd7, 0x06, 0x61, 0x8c, 0x17, 0xa4, + 0x58, 0x73, 0x26, 0x9a, 0xe5, 0xc1, 0x87, 0x02, 0xee, 0x9c, 0xbd, 0xcb, 0xb4, 0x36, 0x59, 0x4a, + 0x8f, 0x09, 0xad, 0xc2, 0x2a, 0xa7, 0x2e, 0x2b, 0xb3, 0x81, 0x00, 0xdd, 0xd3, 0x13, 0xfa, 0x15, + 0xe8, 0x2c, 0xfd, 0xc5, 0xcc, 0x75, 0xbc, 0x91, 0xe7, 0x0e, 0xb5, 0x33, 0xbd, 0x03, 0x2e, 0x96, + 0xfe, 0xc4, 0x0f, 0x9e, 0x7d, 0x4d, 0xd1, 0x35, 0x70, 0xe9, 0x04, 0x7e, 0x68, 0x39, 0x21, 0xf6, + 0xfc, 0x51, 0xa0, 0xa9, 0x3a, 0x00, 0x6d, 0x67, 0x3e, 0xc5, 0xde, 0x50, 0x6b, 0xe9, 0x37, 0xe0, + 0x7a, 0x1a, 0xd8, 0xde, 0x93, 0x8b, 0xad, 0x61, 0xe4, 0xce, 0x43, 0x6f, 0xe1, 0xf9, 0xe3, 0x9a, + 0x3a, 0xb7, 0x7f, 0x15, 0xd0, 0x5f, 0xf1, 0x0c, 0xfe, 0x9b, 0xcd, 0xbe, 0x3f, 0x7d, 0xd8, 0xac, + 0x8e, 0x37, 0x53, 0x5e, 0xf6, 0x0d, 0xc3, 0x94, 0x6f, 0x08, 0x4b, 0x21, 0xdf, 0xa6, 0x46, 0x4a, + 0x99, 0x0c, 0x7f, 0x68, 0x3c, 0x5f, 0x8b, 0x13, 0x0f, 0xf0, 0x28, 0xbf, 0x9f, 0x6a, 0x6b, 0x6c, + 0x59, 0x5f, 0x6a, 0x6f, 0xdc, 0x48, 0x59, 0x89, 0x80, 0x0d, 0xac, 0x51, 0x64, 0xc2, 0xba, 0x26, + 0xf1, 0x7d, 0xe0, 0x63, 0x2b, 0x11, 0xf1, 0x91, 0x8f, 0x23, 0x33, 0x96, 0xfc, 0x8f, 0xda, 0x6f, + 0x7e, 0x22, 0x64, 0x25, 0x02, 0xa1, 0xe3, 0x04, 0x42, 0x91, 0x89, 0x90, 0x9c, 0x79, 0x6d, 0xcb, + 0xc3, 0x1e, 0xfe, 0x02, 0x00, 0x00, 0xff, 0xff, 0xd4, 0xb5, 0x62, 0x20, 0x18, 0x02, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/customer_pay_per_conversion_eligibility_failure_reason.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/customer_pay_per_conversion_eligibility_failure_reason.pb.go new file mode 100644 index 0000000..f394fd1 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/customer_pay_per_conversion_eligibility_failure_reason.pb.go @@ -0,0 +1,155 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/enums/customer_pay_per_conversion_eligibility_failure_reason.proto + +package enums // import "google.golang.org/genproto/googleapis/ads/googleads/v1/enums" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Enum describing possible reasons a customer is not eligible to use +// PaymentMode.CONVERSIONS. +type CustomerPayPerConversionEligibilityFailureReasonEnum_CustomerPayPerConversionEligibilityFailureReason int32 + +const ( + // Not specified. + CustomerPayPerConversionEligibilityFailureReasonEnum_UNSPECIFIED CustomerPayPerConversionEligibilityFailureReasonEnum_CustomerPayPerConversionEligibilityFailureReason = 0 + // Used for return value only. Represents value unknown in this version. + CustomerPayPerConversionEligibilityFailureReasonEnum_UNKNOWN CustomerPayPerConversionEligibilityFailureReasonEnum_CustomerPayPerConversionEligibilityFailureReason = 1 + // Customer does not have enough conversions. + CustomerPayPerConversionEligibilityFailureReasonEnum_NOT_ENOUGH_CONVERSIONS CustomerPayPerConversionEligibilityFailureReasonEnum_CustomerPayPerConversionEligibilityFailureReason = 2 + // Customer's conversion lag is too high. + CustomerPayPerConversionEligibilityFailureReasonEnum_CONVERSION_LAG_TOO_HIGH CustomerPayPerConversionEligibilityFailureReasonEnum_CustomerPayPerConversionEligibilityFailureReason = 3 + // Customer uses shared budgets. + CustomerPayPerConversionEligibilityFailureReasonEnum_HAS_CAMPAIGN_WITH_SHARED_BUDGET CustomerPayPerConversionEligibilityFailureReasonEnum_CustomerPayPerConversionEligibilityFailureReason = 4 + // Customer has conversions with ConversionActionType.UPLOAD_CLICKS. + CustomerPayPerConversionEligibilityFailureReasonEnum_HAS_UPLOAD_CLICKS_CONVERSION CustomerPayPerConversionEligibilityFailureReasonEnum_CustomerPayPerConversionEligibilityFailureReason = 5 + // Customer's average daily spend is too high. + CustomerPayPerConversionEligibilityFailureReasonEnum_AVERAGE_DAILY_SPEND_TOO_HIGH CustomerPayPerConversionEligibilityFailureReasonEnum_CustomerPayPerConversionEligibilityFailureReason = 6 + // Customer's eligibility has not yet been calculated by the Google Ads + // backend. Check back soon. + CustomerPayPerConversionEligibilityFailureReasonEnum_ANALYSIS_NOT_COMPLETE CustomerPayPerConversionEligibilityFailureReasonEnum_CustomerPayPerConversionEligibilityFailureReason = 7 + // Customer is not eligible due to other reasons. + CustomerPayPerConversionEligibilityFailureReasonEnum_OTHER CustomerPayPerConversionEligibilityFailureReasonEnum_CustomerPayPerConversionEligibilityFailureReason = 8 +) + +var CustomerPayPerConversionEligibilityFailureReasonEnum_CustomerPayPerConversionEligibilityFailureReason_name = map[int32]string{ + 0: "UNSPECIFIED", + 1: "UNKNOWN", + 2: "NOT_ENOUGH_CONVERSIONS", + 3: "CONVERSION_LAG_TOO_HIGH", + 4: "HAS_CAMPAIGN_WITH_SHARED_BUDGET", + 5: "HAS_UPLOAD_CLICKS_CONVERSION", + 6: "AVERAGE_DAILY_SPEND_TOO_HIGH", + 7: "ANALYSIS_NOT_COMPLETE", + 8: "OTHER", +} +var CustomerPayPerConversionEligibilityFailureReasonEnum_CustomerPayPerConversionEligibilityFailureReason_value = map[string]int32{ + "UNSPECIFIED": 0, + "UNKNOWN": 1, + "NOT_ENOUGH_CONVERSIONS": 2, + "CONVERSION_LAG_TOO_HIGH": 3, + "HAS_CAMPAIGN_WITH_SHARED_BUDGET": 4, + "HAS_UPLOAD_CLICKS_CONVERSION": 5, + "AVERAGE_DAILY_SPEND_TOO_HIGH": 6, + "ANALYSIS_NOT_COMPLETE": 7, + "OTHER": 8, +} + +func (x CustomerPayPerConversionEligibilityFailureReasonEnum_CustomerPayPerConversionEligibilityFailureReason) String() string { + return proto.EnumName(CustomerPayPerConversionEligibilityFailureReasonEnum_CustomerPayPerConversionEligibilityFailureReason_name, int32(x)) +} +func (CustomerPayPerConversionEligibilityFailureReasonEnum_CustomerPayPerConversionEligibilityFailureReason) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_customer_pay_per_conversion_eligibility_failure_reason_fe69e3f1bce20ba1, []int{0, 0} +} + +// Container for enum describing reasons why a customer is not eligible to use +// PaymentMode.CONVERSIONS. +type CustomerPayPerConversionEligibilityFailureReasonEnum struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *CustomerPayPerConversionEligibilityFailureReasonEnum) Reset() { + *m = CustomerPayPerConversionEligibilityFailureReasonEnum{} +} +func (m *CustomerPayPerConversionEligibilityFailureReasonEnum) String() string { + return proto.CompactTextString(m) +} +func (*CustomerPayPerConversionEligibilityFailureReasonEnum) ProtoMessage() {} +func (*CustomerPayPerConversionEligibilityFailureReasonEnum) Descriptor() ([]byte, []int) { + return fileDescriptor_customer_pay_per_conversion_eligibility_failure_reason_fe69e3f1bce20ba1, []int{0} +} +func (m *CustomerPayPerConversionEligibilityFailureReasonEnum) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_CustomerPayPerConversionEligibilityFailureReasonEnum.Unmarshal(m, b) +} +func (m *CustomerPayPerConversionEligibilityFailureReasonEnum) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_CustomerPayPerConversionEligibilityFailureReasonEnum.Marshal(b, m, deterministic) +} +func (dst *CustomerPayPerConversionEligibilityFailureReasonEnum) XXX_Merge(src proto.Message) { + xxx_messageInfo_CustomerPayPerConversionEligibilityFailureReasonEnum.Merge(dst, src) +} +func (m *CustomerPayPerConversionEligibilityFailureReasonEnum) XXX_Size() int { + return xxx_messageInfo_CustomerPayPerConversionEligibilityFailureReasonEnum.Size(m) +} +func (m *CustomerPayPerConversionEligibilityFailureReasonEnum) XXX_DiscardUnknown() { + xxx_messageInfo_CustomerPayPerConversionEligibilityFailureReasonEnum.DiscardUnknown(m) +} + +var xxx_messageInfo_CustomerPayPerConversionEligibilityFailureReasonEnum proto.InternalMessageInfo + +func init() { + proto.RegisterType((*CustomerPayPerConversionEligibilityFailureReasonEnum)(nil), "google.ads.googleads.v1.enums.CustomerPayPerConversionEligibilityFailureReasonEnum") + proto.RegisterEnum("google.ads.googleads.v1.enums.CustomerPayPerConversionEligibilityFailureReasonEnum_CustomerPayPerConversionEligibilityFailureReason", CustomerPayPerConversionEligibilityFailureReasonEnum_CustomerPayPerConversionEligibilityFailureReason_name, CustomerPayPerConversionEligibilityFailureReasonEnum_CustomerPayPerConversionEligibilityFailureReason_value) +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/enums/customer_pay_per_conversion_eligibility_failure_reason.proto", fileDescriptor_customer_pay_per_conversion_eligibility_failure_reason_fe69e3f1bce20ba1) +} + +var fileDescriptor_customer_pay_per_conversion_eligibility_failure_reason_fe69e3f1bce20ba1 = []byte{ + // 476 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x94, 0x92, 0x4f, 0x6f, 0x94, 0x40, + 0x18, 0xc6, 0x2d, 0xb5, 0xad, 0x4e, 0x0f, 0x12, 0x12, 0x35, 0xd6, 0x36, 0xda, 0x7a, 0x07, 0x89, + 0x7a, 0xc1, 0xd3, 0x2c, 0x4c, 0x81, 0x94, 0xce, 0x10, 0xfe, 0x35, 0x6d, 0x36, 0x99, 0xd0, 0x65, + 0x24, 0x24, 0xec, 0x0c, 0x61, 0xd8, 0x4d, 0xf6, 0x2b, 0xf8, 0x09, 0x3c, 0x7b, 0xf4, 0xa3, 0x78, + 0xf3, 0x6b, 0xf8, 0x29, 0x0c, 0xd0, 0xdd, 0xf5, 0xa2, 0xc9, 0x5e, 0xc8, 0x93, 0x79, 0x9f, 0x79, + 0x7e, 0x13, 0x9e, 0x17, 0xdc, 0x95, 0x42, 0x94, 0x35, 0x33, 0xf2, 0x42, 0x1a, 0xa3, 0xec, 0xd5, + 0xd2, 0x34, 0x18, 0x5f, 0xcc, 0xa5, 0x31, 0x5b, 0xc8, 0x4e, 0xcc, 0x59, 0x4b, 0x9b, 0x7c, 0x45, + 0x1b, 0xd6, 0xd2, 0x99, 0xe0, 0x4b, 0xd6, 0xca, 0x4a, 0x70, 0xca, 0xea, 0xaa, 0xac, 0xee, 0xab, + 0xba, 0xea, 0x56, 0xf4, 0x4b, 0x5e, 0xd5, 0x8b, 0x96, 0xd1, 0x96, 0xe5, 0x52, 0x70, 0xbd, 0x69, + 0x45, 0x27, 0xb4, 0xb3, 0x31, 0x50, 0xcf, 0x0b, 0xa9, 0x6f, 0xb2, 0xf5, 0xa5, 0xa9, 0x0f, 0xd9, + 0x27, 0xa7, 0x6b, 0x74, 0x53, 0x19, 0x39, 0xe7, 0xa2, 0xcb, 0xbb, 0x4a, 0x70, 0x39, 0x5e, 0xbe, + 0xf8, 0xa5, 0x80, 0x8f, 0xf6, 0x03, 0x3d, 0xcc, 0x57, 0x21, 0x6b, 0xed, 0x0d, 0x1a, 0x6d, 0xc9, + 0x97, 0x23, 0x38, 0x1a, 0xb8, 0x88, 0x2f, 0xe6, 0x17, 0xdf, 0x14, 0xf0, 0x7e, 0xd7, 0x8b, 0xda, + 0x33, 0x70, 0x9c, 0xe2, 0x38, 0x44, 0xb6, 0x7f, 0xe9, 0x23, 0x47, 0x7d, 0xa4, 0x1d, 0x83, 0xa3, + 0x14, 0x5f, 0x61, 0x72, 0x83, 0xd5, 0x3d, 0xed, 0x04, 0xbc, 0xc0, 0x24, 0xa1, 0x08, 0x93, 0xd4, + 0xf5, 0xa8, 0x4d, 0x70, 0x86, 0xa2, 0xd8, 0x27, 0x38, 0x56, 0x15, 0xed, 0x35, 0x78, 0xb9, 0x3d, + 0xa0, 0x01, 0x74, 0x69, 0x42, 0x08, 0xf5, 0x7c, 0xd7, 0x53, 0xf7, 0xb5, 0x77, 0xe0, 0x8d, 0x07, + 0x63, 0x6a, 0xc3, 0xeb, 0x10, 0xfa, 0x2e, 0xa6, 0x37, 0x7e, 0xe2, 0xd1, 0xd8, 0x83, 0x11, 0x72, + 0xe8, 0x24, 0x75, 0x5c, 0x94, 0xa8, 0x8f, 0xb5, 0xb7, 0xe0, 0xb4, 0x37, 0xa5, 0x61, 0x40, 0xa0, + 0x43, 0xed, 0xc0, 0xb7, 0xaf, 0xe2, 0xbf, 0x20, 0xea, 0x41, 0xef, 0x80, 0x19, 0x8a, 0xa0, 0x8b, + 0xa8, 0x03, 0xfd, 0xe0, 0x96, 0xc6, 0x21, 0xc2, 0xce, 0x16, 0x74, 0xa8, 0xbd, 0x02, 0xcf, 0x21, + 0x86, 0xc1, 0x6d, 0xec, 0xc7, 0xb4, 0x7f, 0xaa, 0x4d, 0xae, 0xc3, 0x00, 0x25, 0x48, 0x3d, 0xd2, + 0x9e, 0x82, 0x03, 0x92, 0x78, 0x28, 0x52, 0x9f, 0x4c, 0xbe, 0x2a, 0xe0, 0x7c, 0x26, 0xe6, 0xfa, + 0x7f, 0x7b, 0x99, 0x7c, 0xda, 0xf5, 0xef, 0x85, 0x7d, 0x61, 0xe1, 0xde, 0xdd, 0xe4, 0x21, 0xb7, + 0x14, 0x75, 0xce, 0x4b, 0x5d, 0xb4, 0xa5, 0x51, 0x32, 0x3e, 0xd4, 0xb9, 0xde, 0xad, 0xa6, 0x92, + 0xff, 0x58, 0xb5, 0xcf, 0xc3, 0xf7, 0xbb, 0xb2, 0xef, 0x42, 0xf8, 0x43, 0x39, 0x73, 0xc7, 0x28, + 0x58, 0x48, 0x7d, 0x94, 0xbd, 0xca, 0x4c, 0xbd, 0xaf, 0x58, 0xfe, 0x5c, 0xcf, 0xa7, 0xb0, 0x90, + 0xd3, 0xcd, 0x7c, 0x9a, 0x99, 0xd3, 0x61, 0xfe, 0x5b, 0x39, 0x1f, 0x0f, 0x2d, 0x0b, 0x16, 0xd2, + 0xb2, 0x36, 0x0e, 0xcb, 0xca, 0x4c, 0xcb, 0x1a, 0x3c, 0xf7, 0x87, 0xc3, 0xc3, 0x3e, 0xfc, 0x09, + 0x00, 0x00, 0xff, 0xff, 0x04, 0xd5, 0x05, 0x30, 0x02, 0x03, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/data_driven_model_status.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/data_driven_model_status.pb.go new file mode 100644 index 0000000..6b5fc8b --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/data_driven_model_status.pb.go @@ -0,0 +1,134 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/enums/data_driven_model_status.proto + +package enums // import "google.golang.org/genproto/googleapis/ads/googleads/v1/enums" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Enumerates data driven model statuses. +type DataDrivenModelStatusEnum_DataDrivenModelStatus int32 + +const ( + // Not specified. + DataDrivenModelStatusEnum_UNSPECIFIED DataDrivenModelStatusEnum_DataDrivenModelStatus = 0 + // Used for return value only. Represents value unknown in this version. + DataDrivenModelStatusEnum_UNKNOWN DataDrivenModelStatusEnum_DataDrivenModelStatus = 1 + // The data driven model is available. + DataDrivenModelStatusEnum_AVAILABLE DataDrivenModelStatusEnum_DataDrivenModelStatus = 2 + // The data driven model is stale. It hasn't been updated for at least 7 + // days. It is still being used, but will become expired if it does not get + // updated for 30 days. + DataDrivenModelStatusEnum_STALE DataDrivenModelStatusEnum_DataDrivenModelStatus = 3 + // The data driven model expired. It hasn't been updated for at least 30 + // days and cannot be used. Most commonly this is because there hasn't been + // the required number of events in a recent 30-day period. + DataDrivenModelStatusEnum_EXPIRED DataDrivenModelStatusEnum_DataDrivenModelStatus = 4 + // The data driven model has never been generated. Most commonly this is + // because there has never been the required number of events in any 30-day + // period. + DataDrivenModelStatusEnum_NEVER_GENERATED DataDrivenModelStatusEnum_DataDrivenModelStatus = 5 +) + +var DataDrivenModelStatusEnum_DataDrivenModelStatus_name = map[int32]string{ + 0: "UNSPECIFIED", + 1: "UNKNOWN", + 2: "AVAILABLE", + 3: "STALE", + 4: "EXPIRED", + 5: "NEVER_GENERATED", +} +var DataDrivenModelStatusEnum_DataDrivenModelStatus_value = map[string]int32{ + "UNSPECIFIED": 0, + "UNKNOWN": 1, + "AVAILABLE": 2, + "STALE": 3, + "EXPIRED": 4, + "NEVER_GENERATED": 5, +} + +func (x DataDrivenModelStatusEnum_DataDrivenModelStatus) String() string { + return proto.EnumName(DataDrivenModelStatusEnum_DataDrivenModelStatus_name, int32(x)) +} +func (DataDrivenModelStatusEnum_DataDrivenModelStatus) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_data_driven_model_status_cb3109dff421a3f4, []int{0, 0} +} + +// Container for enum indicating data driven model status. +type DataDrivenModelStatusEnum struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *DataDrivenModelStatusEnum) Reset() { *m = DataDrivenModelStatusEnum{} } +func (m *DataDrivenModelStatusEnum) String() string { return proto.CompactTextString(m) } +func (*DataDrivenModelStatusEnum) ProtoMessage() {} +func (*DataDrivenModelStatusEnum) Descriptor() ([]byte, []int) { + return fileDescriptor_data_driven_model_status_cb3109dff421a3f4, []int{0} +} +func (m *DataDrivenModelStatusEnum) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_DataDrivenModelStatusEnum.Unmarshal(m, b) +} +func (m *DataDrivenModelStatusEnum) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_DataDrivenModelStatusEnum.Marshal(b, m, deterministic) +} +func (dst *DataDrivenModelStatusEnum) XXX_Merge(src proto.Message) { + xxx_messageInfo_DataDrivenModelStatusEnum.Merge(dst, src) +} +func (m *DataDrivenModelStatusEnum) XXX_Size() int { + return xxx_messageInfo_DataDrivenModelStatusEnum.Size(m) +} +func (m *DataDrivenModelStatusEnum) XXX_DiscardUnknown() { + xxx_messageInfo_DataDrivenModelStatusEnum.DiscardUnknown(m) +} + +var xxx_messageInfo_DataDrivenModelStatusEnum proto.InternalMessageInfo + +func init() { + proto.RegisterType((*DataDrivenModelStatusEnum)(nil), "google.ads.googleads.v1.enums.DataDrivenModelStatusEnum") + proto.RegisterEnum("google.ads.googleads.v1.enums.DataDrivenModelStatusEnum_DataDrivenModelStatus", DataDrivenModelStatusEnum_DataDrivenModelStatus_name, DataDrivenModelStatusEnum_DataDrivenModelStatus_value) +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/enums/data_driven_model_status.proto", fileDescriptor_data_driven_model_status_cb3109dff421a3f4) +} + +var fileDescriptor_data_driven_model_status_cb3109dff421a3f4 = []byte{ + // 341 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x7c, 0x50, 0xcf, 0x4a, 0xc3, 0x30, + 0x1c, 0xb6, 0x9d, 0x53, 0x96, 0x21, 0x2b, 0x15, 0x0f, 0x0e, 0x77, 0xd8, 0x1e, 0x20, 0xa5, 0x78, + 0x8b, 0x5e, 0x52, 0x1b, 0x47, 0x71, 0xd6, 0xb2, 0x3f, 0x55, 0xa4, 0x50, 0xa2, 0x29, 0xa5, 0xb0, + 0x26, 0x73, 0xc9, 0xf6, 0x18, 0x3e, 0x84, 0x47, 0x1f, 0xc5, 0x47, 0xf1, 0xe2, 0x2b, 0x48, 0xd3, + 0x6d, 0xa7, 0xe9, 0x25, 0x7c, 0xe4, 0xfb, 0xc3, 0xef, 0xfb, 0xc0, 0x75, 0x2e, 0x44, 0x3e, 0xcf, + 0x1c, 0xca, 0xa4, 0x53, 0xc3, 0x0a, 0xad, 0x5d, 0x27, 0xe3, 0xab, 0x52, 0x3a, 0x8c, 0x2a, 0x9a, + 0xb2, 0x65, 0xb1, 0xce, 0x78, 0x5a, 0x0a, 0x96, 0xcd, 0x53, 0xa9, 0xa8, 0x5a, 0x49, 0xb8, 0x58, + 0x0a, 0x25, 0xec, 0x5e, 0x6d, 0x81, 0x94, 0x49, 0xb8, 0x73, 0xc3, 0xb5, 0x0b, 0xb5, 0xbb, 0x7b, + 0xb1, 0x0d, 0x5f, 0x14, 0x0e, 0xe5, 0x5c, 0x28, 0xaa, 0x0a, 0xc1, 0x37, 0xe6, 0xc1, 0xbb, 0x01, + 0xce, 0x7d, 0xaa, 0xa8, 0xaf, 0xe3, 0xef, 0xab, 0xf4, 0x89, 0x0e, 0x27, 0x7c, 0x55, 0x0e, 0xde, + 0xc0, 0xd9, 0x5e, 0xd2, 0xee, 0x80, 0xf6, 0x2c, 0x9c, 0x44, 0xe4, 0x26, 0xb8, 0x0d, 0x88, 0x6f, + 0x1d, 0xd8, 0x6d, 0x70, 0x3c, 0x0b, 0xef, 0xc2, 0x87, 0xc7, 0xd0, 0x32, 0xec, 0x13, 0xd0, 0xc2, + 0x31, 0x0e, 0x46, 0xd8, 0x1b, 0x11, 0xcb, 0xb4, 0x5b, 0xa0, 0x39, 0x99, 0xe2, 0x11, 0xb1, 0x1a, + 0x95, 0x8c, 0x3c, 0x45, 0xc1, 0x98, 0xf8, 0xd6, 0xa1, 0x7d, 0x0a, 0x3a, 0x21, 0x89, 0xc9, 0x38, + 0x1d, 0x92, 0x90, 0x8c, 0xf1, 0x94, 0xf8, 0x56, 0xd3, 0xfb, 0x31, 0x40, 0xff, 0x55, 0x94, 0xf0, + 0xdf, 0x52, 0x5e, 0x77, 0xef, 0x59, 0x51, 0x55, 0x29, 0x32, 0x9e, 0xbd, 0x8d, 0x39, 0x17, 0x73, + 0xca, 0x73, 0x28, 0x96, 0xb9, 0x93, 0x67, 0x5c, 0x17, 0xde, 0xee, 0xbb, 0x28, 0xe4, 0x1f, 0x73, + 0x5f, 0xe9, 0xf7, 0xc3, 0x6c, 0x0c, 0x31, 0xfe, 0x34, 0x7b, 0xc3, 0x3a, 0x0a, 0x33, 0x09, 0x6b, + 0x58, 0xa1, 0xd8, 0x85, 0xd5, 0x3e, 0xf2, 0x6b, 0xcb, 0x27, 0x98, 0xc9, 0x64, 0xc7, 0x27, 0xb1, + 0x9b, 0x68, 0xfe, 0xdb, 0xec, 0xd7, 0x9f, 0x08, 0x61, 0x26, 0x11, 0xda, 0x29, 0x10, 0x8a, 0x5d, + 0x84, 0xb4, 0xe6, 0xe5, 0x48, 0x1f, 0x76, 0xf9, 0x1b, 0x00, 0x00, 0xff, 0xff, 0x42, 0x91, 0xd9, + 0xbf, 0x06, 0x02, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/day_of_week.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/day_of_week.pb.go new file mode 100644 index 0000000..1f42506 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/day_of_week.pb.go @@ -0,0 +1,140 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/enums/day_of_week.proto + +package enums // import "google.golang.org/genproto/googleapis/ads/googleads/v1/enums" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Enumerates days of the week, e.g., "Monday". +type DayOfWeekEnum_DayOfWeek int32 + +const ( + // Not specified. + DayOfWeekEnum_UNSPECIFIED DayOfWeekEnum_DayOfWeek = 0 + // The value is unknown in this version. + DayOfWeekEnum_UNKNOWN DayOfWeekEnum_DayOfWeek = 1 + // Monday. + DayOfWeekEnum_MONDAY DayOfWeekEnum_DayOfWeek = 2 + // Tuesday. + DayOfWeekEnum_TUESDAY DayOfWeekEnum_DayOfWeek = 3 + // Wednesday. + DayOfWeekEnum_WEDNESDAY DayOfWeekEnum_DayOfWeek = 4 + // Thursday. + DayOfWeekEnum_THURSDAY DayOfWeekEnum_DayOfWeek = 5 + // Friday. + DayOfWeekEnum_FRIDAY DayOfWeekEnum_DayOfWeek = 6 + // Saturday. + DayOfWeekEnum_SATURDAY DayOfWeekEnum_DayOfWeek = 7 + // Sunday. + DayOfWeekEnum_SUNDAY DayOfWeekEnum_DayOfWeek = 8 +) + +var DayOfWeekEnum_DayOfWeek_name = map[int32]string{ + 0: "UNSPECIFIED", + 1: "UNKNOWN", + 2: "MONDAY", + 3: "TUESDAY", + 4: "WEDNESDAY", + 5: "THURSDAY", + 6: "FRIDAY", + 7: "SATURDAY", + 8: "SUNDAY", +} +var DayOfWeekEnum_DayOfWeek_value = map[string]int32{ + "UNSPECIFIED": 0, + "UNKNOWN": 1, + "MONDAY": 2, + "TUESDAY": 3, + "WEDNESDAY": 4, + "THURSDAY": 5, + "FRIDAY": 6, + "SATURDAY": 7, + "SUNDAY": 8, +} + +func (x DayOfWeekEnum_DayOfWeek) String() string { + return proto.EnumName(DayOfWeekEnum_DayOfWeek_name, int32(x)) +} +func (DayOfWeekEnum_DayOfWeek) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_day_of_week_f77dfafb40312473, []int{0, 0} +} + +// Container for enumeration of days of the week, e.g., "Monday". +type DayOfWeekEnum struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *DayOfWeekEnum) Reset() { *m = DayOfWeekEnum{} } +func (m *DayOfWeekEnum) String() string { return proto.CompactTextString(m) } +func (*DayOfWeekEnum) ProtoMessage() {} +func (*DayOfWeekEnum) Descriptor() ([]byte, []int) { + return fileDescriptor_day_of_week_f77dfafb40312473, []int{0} +} +func (m *DayOfWeekEnum) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_DayOfWeekEnum.Unmarshal(m, b) +} +func (m *DayOfWeekEnum) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_DayOfWeekEnum.Marshal(b, m, deterministic) +} +func (dst *DayOfWeekEnum) XXX_Merge(src proto.Message) { + xxx_messageInfo_DayOfWeekEnum.Merge(dst, src) +} +func (m *DayOfWeekEnum) XXX_Size() int { + return xxx_messageInfo_DayOfWeekEnum.Size(m) +} +func (m *DayOfWeekEnum) XXX_DiscardUnknown() { + xxx_messageInfo_DayOfWeekEnum.DiscardUnknown(m) +} + +var xxx_messageInfo_DayOfWeekEnum proto.InternalMessageInfo + +func init() { + proto.RegisterType((*DayOfWeekEnum)(nil), "google.ads.googleads.v1.enums.DayOfWeekEnum") + proto.RegisterEnum("google.ads.googleads.v1.enums.DayOfWeekEnum_DayOfWeek", DayOfWeekEnum_DayOfWeek_name, DayOfWeekEnum_DayOfWeek_value) +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/enums/day_of_week.proto", fileDescriptor_day_of_week_f77dfafb40312473) +} + +var fileDescriptor_day_of_week_f77dfafb40312473 = []byte{ + // 343 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x7c, 0x90, 0xcf, 0x4e, 0x32, 0x31, + 0x14, 0xc5, 0xbf, 0x19, 0x3e, 0xf9, 0x53, 0x44, 0xc9, 0x2c, 0x8d, 0x2c, 0xe0, 0x01, 0x3a, 0x99, + 0xb8, 0xab, 0xab, 0xe2, 0x0c, 0x48, 0x8c, 0x03, 0x61, 0x18, 0x88, 0x66, 0x12, 0x52, 0x9d, 0xd2, + 0x10, 0xa0, 0x25, 0x14, 0x30, 0x3c, 0x80, 0xcf, 0xe0, 0xde, 0xa5, 0x8f, 0xe2, 0x6b, 0xb8, 0xf3, + 0x29, 0x4c, 0x5b, 0x66, 0x76, 0xba, 0x69, 0xce, 0xe9, 0xef, 0x9e, 0x9b, 0x7b, 0x2f, 0x70, 0x99, + 0x10, 0x6c, 0x49, 0x5d, 0x92, 0xca, 0xa3, 0x54, 0x6a, 0xef, 0xb9, 0x94, 0xef, 0x56, 0xd2, 0x4d, + 0xc9, 0x61, 0x2a, 0x66, 0xd3, 0x17, 0x4a, 0x17, 0x70, 0xbd, 0x11, 0x5b, 0xe1, 0x34, 0x4c, 0x15, + 0x24, 0xa9, 0x84, 0x79, 0x00, 0xee, 0x3d, 0xa8, 0x03, 0x17, 0x97, 0x59, 0xbf, 0xf5, 0xdc, 0x25, + 0x9c, 0x8b, 0x2d, 0xd9, 0xce, 0x05, 0x97, 0x26, 0xdc, 0x7a, 0xb3, 0x40, 0xcd, 0x27, 0x87, 0xfe, + 0x6c, 0x42, 0xe9, 0x22, 0xe0, 0xbb, 0x55, 0xeb, 0xd5, 0x02, 0x95, 0xfc, 0xc7, 0x39, 0x07, 0xd5, + 0x38, 0x8c, 0x06, 0xc1, 0x4d, 0xaf, 0xd3, 0x0b, 0xfc, 0xfa, 0x3f, 0xa7, 0x0a, 0x4a, 0x71, 0x78, + 0x17, 0xf6, 0x27, 0x61, 0xdd, 0x72, 0x00, 0x28, 0xde, 0xf7, 0x43, 0x1f, 0x3f, 0xd4, 0x6d, 0x05, + 0x46, 0x71, 0x10, 0x29, 0x53, 0x70, 0x6a, 0xa0, 0x32, 0x09, 0xfc, 0xd0, 0xd8, 0xff, 0xce, 0x29, + 0x28, 0x8f, 0x6e, 0xe3, 0xa1, 0x76, 0x27, 0x2a, 0xd5, 0x19, 0xf6, 0x94, 0x2e, 0x2a, 0x12, 0xe1, + 0x51, 0x3c, 0x54, 0xae, 0xa4, 0x48, 0x14, 0xeb, 0x7e, 0xe5, 0xf6, 0x97, 0x05, 0x9a, 0xcf, 0x62, + 0x05, 0xff, 0xdc, 0xae, 0x7d, 0x96, 0x8f, 0x3a, 0x50, 0xfb, 0x0c, 0xac, 0xc7, 0xf6, 0x31, 0xc0, + 0xc4, 0x92, 0x70, 0x06, 0xc5, 0x86, 0xb9, 0x8c, 0x72, 0xbd, 0x6d, 0x76, 0xcf, 0xf5, 0x5c, 0xfe, + 0x72, 0xde, 0x6b, 0xfd, 0xbe, 0xdb, 0x85, 0x2e, 0xc6, 0x1f, 0x76, 0xa3, 0x6b, 0x5a, 0xe1, 0x54, + 0x42, 0x23, 0x95, 0x1a, 0x7b, 0x50, 0x1d, 0x4a, 0x7e, 0x66, 0x3c, 0xc1, 0xa9, 0x4c, 0x72, 0x9e, + 0x8c, 0xbd, 0x44, 0xf3, 0x6f, 0xbb, 0x69, 0x3e, 0x11, 0xc2, 0xa9, 0x44, 0x28, 0xaf, 0x40, 0x68, + 0xec, 0x21, 0xa4, 0x6b, 0x9e, 0x8a, 0x7a, 0xb0, 0xab, 0x9f, 0x00, 0x00, 0x00, 0xff, 0xff, 0x14, + 0x7b, 0x20, 0x2d, 0xf6, 0x01, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/device.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/device.pb.go new file mode 100644 index 0000000..b49d9bb --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/device.pb.go @@ -0,0 +1,126 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/enums/device.proto + +package enums // import "google.golang.org/genproto/googleapis/ads/googleads/v1/enums" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Enumerates Google Ads devices available for targeting. +type DeviceEnum_Device int32 + +const ( + // Not specified. + DeviceEnum_UNSPECIFIED DeviceEnum_Device = 0 + // The value is unknown in this version. + DeviceEnum_UNKNOWN DeviceEnum_Device = 1 + // Mobile devices with full browsers. + DeviceEnum_MOBILE DeviceEnum_Device = 2 + // Tablets with full browsers. + DeviceEnum_TABLET DeviceEnum_Device = 3 + // Computers. + DeviceEnum_DESKTOP DeviceEnum_Device = 4 + // Other device types. + DeviceEnum_OTHER DeviceEnum_Device = 5 +) + +var DeviceEnum_Device_name = map[int32]string{ + 0: "UNSPECIFIED", + 1: "UNKNOWN", + 2: "MOBILE", + 3: "TABLET", + 4: "DESKTOP", + 5: "OTHER", +} +var DeviceEnum_Device_value = map[string]int32{ + "UNSPECIFIED": 0, + "UNKNOWN": 1, + "MOBILE": 2, + "TABLET": 3, + "DESKTOP": 4, + "OTHER": 5, +} + +func (x DeviceEnum_Device) String() string { + return proto.EnumName(DeviceEnum_Device_name, int32(x)) +} +func (DeviceEnum_Device) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_device_af26abde288b5cfb, []int{0, 0} +} + +// Container for enumeration of Google Ads devices available for targeting. +type DeviceEnum struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *DeviceEnum) Reset() { *m = DeviceEnum{} } +func (m *DeviceEnum) String() string { return proto.CompactTextString(m) } +func (*DeviceEnum) ProtoMessage() {} +func (*DeviceEnum) Descriptor() ([]byte, []int) { + return fileDescriptor_device_af26abde288b5cfb, []int{0} +} +func (m *DeviceEnum) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_DeviceEnum.Unmarshal(m, b) +} +func (m *DeviceEnum) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_DeviceEnum.Marshal(b, m, deterministic) +} +func (dst *DeviceEnum) XXX_Merge(src proto.Message) { + xxx_messageInfo_DeviceEnum.Merge(dst, src) +} +func (m *DeviceEnum) XXX_Size() int { + return xxx_messageInfo_DeviceEnum.Size(m) +} +func (m *DeviceEnum) XXX_DiscardUnknown() { + xxx_messageInfo_DeviceEnum.DiscardUnknown(m) +} + +var xxx_messageInfo_DeviceEnum proto.InternalMessageInfo + +func init() { + proto.RegisterType((*DeviceEnum)(nil), "google.ads.googleads.v1.enums.DeviceEnum") + proto.RegisterEnum("google.ads.googleads.v1.enums.DeviceEnum_Device", DeviceEnum_Device_name, DeviceEnum_Device_value) +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/enums/device.proto", fileDescriptor_device_af26abde288b5cfb) +} + +var fileDescriptor_device_af26abde288b5cfb = []byte{ + // 305 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x7c, 0x90, 0x31, 0x4e, 0xf3, 0x30, + 0x1c, 0xc5, 0xbf, 0xa6, 0x5f, 0x8b, 0x70, 0x07, 0xac, 0x8c, 0x88, 0x0e, 0xed, 0xc8, 0x60, 0x2b, + 0x62, 0x33, 0x93, 0x43, 0x4d, 0xa9, 0x5a, 0x92, 0x88, 0xa6, 0x41, 0x42, 0x59, 0x42, 0x1d, 0x59, + 0x91, 0x5a, 0xbb, 0xaa, 0xd3, 0x1c, 0x88, 0x91, 0xa3, 0x70, 0x0a, 0x66, 0x4e, 0x81, 0x6c, 0x93, + 0x6c, 0xb0, 0x58, 0xcf, 0x7e, 0x3f, 0x3f, 0x3f, 0xff, 0xc1, 0xb5, 0x50, 0x4a, 0xec, 0x4a, 0x5c, + 0x70, 0x8d, 0x9d, 0x34, 0xaa, 0x09, 0x70, 0x29, 0x4f, 0x7b, 0x8d, 0x79, 0xd9, 0x54, 0xdb, 0x12, + 0x1d, 0x8e, 0xaa, 0x56, 0xfe, 0xd8, 0x01, 0xa8, 0xe0, 0x1a, 0x75, 0x2c, 0x6a, 0x02, 0x64, 0xd9, + 0xcb, 0xab, 0x36, 0xea, 0x50, 0xe1, 0x42, 0x4a, 0x55, 0x17, 0x75, 0xa5, 0xa4, 0x76, 0x97, 0xa7, + 0x1c, 0x80, 0x99, 0x0d, 0x63, 0xf2, 0xb4, 0x9f, 0x66, 0x60, 0xe8, 0x76, 0xfe, 0x05, 0x18, 0x6d, + 0xa2, 0x75, 0xc2, 0xee, 0x16, 0xf7, 0x0b, 0x36, 0x83, 0xff, 0xfc, 0x11, 0x38, 0xdb, 0x44, 0xcb, + 0x28, 0x7e, 0x8e, 0x60, 0xcf, 0x07, 0x60, 0xf8, 0x18, 0x87, 0x8b, 0x15, 0x83, 0x9e, 0xd1, 0x29, + 0x0d, 0x57, 0x2c, 0x85, 0x7d, 0x03, 0xcd, 0xd8, 0x7a, 0x99, 0xc6, 0x09, 0xfc, 0xef, 0x9f, 0x83, + 0x41, 0x9c, 0x3e, 0xb0, 0x27, 0x38, 0x08, 0x3f, 0x7b, 0x60, 0xb2, 0x55, 0x7b, 0xf4, 0x67, 0xd3, + 0x70, 0xe4, 0xde, 0x4e, 0x4c, 0xb1, 0xa4, 0xf7, 0x12, 0xfe, 0xd0, 0x42, 0xed, 0x0a, 0x29, 0x90, + 0x3a, 0x0a, 0x2c, 0x4a, 0x69, 0x6b, 0xb7, 0x33, 0x39, 0x54, 0xfa, 0x97, 0x11, 0xdd, 0xda, 0xf5, + 0xcd, 0xeb, 0xcf, 0x29, 0x7d, 0xf7, 0xc6, 0x73, 0x17, 0x45, 0xb9, 0x46, 0x4e, 0x1a, 0x95, 0x05, + 0xc8, 0xfc, 0x5a, 0x7f, 0xb4, 0x7e, 0x4e, 0xb9, 0xce, 0x3b, 0x3f, 0xcf, 0x82, 0xdc, 0xfa, 0x5f, + 0xde, 0xc4, 0x1d, 0x12, 0x42, 0xb9, 0x26, 0xa4, 0x23, 0x08, 0xc9, 0x02, 0x42, 0x2c, 0xf3, 0x3a, + 0xb4, 0xc5, 0x6e, 0xbe, 0x03, 0x00, 0x00, 0xff, 0xff, 0x38, 0x5e, 0x04, 0x03, 0xba, 0x01, 0x00, + 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/display_ad_format_setting.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/display_ad_format_setting.pb.go new file mode 100644 index 0000000..b91edb8 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/display_ad_format_setting.pb.go @@ -0,0 +1,124 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/enums/display_ad_format_setting.proto + +package enums // import "google.golang.org/genproto/googleapis/ads/googleads/v1/enums" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Enumerates display ad format settings. +type DisplayAdFormatSettingEnum_DisplayAdFormatSetting int32 + +const ( + // Not specified. + DisplayAdFormatSettingEnum_UNSPECIFIED DisplayAdFormatSettingEnum_DisplayAdFormatSetting = 0 + // The value is unknown in this version. + DisplayAdFormatSettingEnum_UNKNOWN DisplayAdFormatSettingEnum_DisplayAdFormatSetting = 1 + // Text, image and native formats. + DisplayAdFormatSettingEnum_ALL_FORMATS DisplayAdFormatSettingEnum_DisplayAdFormatSetting = 2 + // Text and image formats. + DisplayAdFormatSettingEnum_NON_NATIVE DisplayAdFormatSettingEnum_DisplayAdFormatSetting = 3 + // Native format, i.e. the format rendering is controlled by the publisher + // and not by Google. + DisplayAdFormatSettingEnum_NATIVE DisplayAdFormatSettingEnum_DisplayAdFormatSetting = 4 +) + +var DisplayAdFormatSettingEnum_DisplayAdFormatSetting_name = map[int32]string{ + 0: "UNSPECIFIED", + 1: "UNKNOWN", + 2: "ALL_FORMATS", + 3: "NON_NATIVE", + 4: "NATIVE", +} +var DisplayAdFormatSettingEnum_DisplayAdFormatSetting_value = map[string]int32{ + "UNSPECIFIED": 0, + "UNKNOWN": 1, + "ALL_FORMATS": 2, + "NON_NATIVE": 3, + "NATIVE": 4, +} + +func (x DisplayAdFormatSettingEnum_DisplayAdFormatSetting) String() string { + return proto.EnumName(DisplayAdFormatSettingEnum_DisplayAdFormatSetting_name, int32(x)) +} +func (DisplayAdFormatSettingEnum_DisplayAdFormatSetting) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_display_ad_format_setting_4bd791b4be3a0ccc, []int{0, 0} +} + +// Container for display ad format settings. +type DisplayAdFormatSettingEnum struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *DisplayAdFormatSettingEnum) Reset() { *m = DisplayAdFormatSettingEnum{} } +func (m *DisplayAdFormatSettingEnum) String() string { return proto.CompactTextString(m) } +func (*DisplayAdFormatSettingEnum) ProtoMessage() {} +func (*DisplayAdFormatSettingEnum) Descriptor() ([]byte, []int) { + return fileDescriptor_display_ad_format_setting_4bd791b4be3a0ccc, []int{0} +} +func (m *DisplayAdFormatSettingEnum) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_DisplayAdFormatSettingEnum.Unmarshal(m, b) +} +func (m *DisplayAdFormatSettingEnum) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_DisplayAdFormatSettingEnum.Marshal(b, m, deterministic) +} +func (dst *DisplayAdFormatSettingEnum) XXX_Merge(src proto.Message) { + xxx_messageInfo_DisplayAdFormatSettingEnum.Merge(dst, src) +} +func (m *DisplayAdFormatSettingEnum) XXX_Size() int { + return xxx_messageInfo_DisplayAdFormatSettingEnum.Size(m) +} +func (m *DisplayAdFormatSettingEnum) XXX_DiscardUnknown() { + xxx_messageInfo_DisplayAdFormatSettingEnum.DiscardUnknown(m) +} + +var xxx_messageInfo_DisplayAdFormatSettingEnum proto.InternalMessageInfo + +func init() { + proto.RegisterType((*DisplayAdFormatSettingEnum)(nil), "google.ads.googleads.v1.enums.DisplayAdFormatSettingEnum") + proto.RegisterEnum("google.ads.googleads.v1.enums.DisplayAdFormatSettingEnum_DisplayAdFormatSetting", DisplayAdFormatSettingEnum_DisplayAdFormatSetting_name, DisplayAdFormatSettingEnum_DisplayAdFormatSetting_value) +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/enums/display_ad_format_setting.proto", fileDescriptor_display_ad_format_setting_4bd791b4be3a0ccc) +} + +var fileDescriptor_display_ad_format_setting_4bd791b4be3a0ccc = []byte{ + // 329 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x7c, 0x50, 0x41, 0x4b, 0xfb, 0x30, + 0x1c, 0xfd, 0xb7, 0xfb, 0x33, 0x21, 0x03, 0x2d, 0x3d, 0x78, 0x98, 0xee, 0xb0, 0x7d, 0x80, 0x94, + 0xe2, 0x2d, 0xe2, 0x21, 0x73, 0xdd, 0x18, 0xce, 0x6c, 0xb8, 0xad, 0x82, 0x14, 0x4a, 0x5c, 0x6a, + 0x28, 0xac, 0x49, 0x59, 0xb2, 0x81, 0x47, 0xbf, 0x8a, 0x47, 0x3f, 0x8a, 0x1f, 0xc5, 0x93, 0x1f, + 0x41, 0x9a, 0x6c, 0x3d, 0x4d, 0x2f, 0xe1, 0x91, 0xf7, 0x7b, 0xef, 0xf7, 0x7b, 0x0f, 0xdc, 0x70, + 0x29, 0xf9, 0x3a, 0x0b, 0x28, 0x53, 0x81, 0x85, 0x15, 0xda, 0x85, 0x41, 0x26, 0xb6, 0x85, 0x0a, + 0x58, 0xae, 0xca, 0x35, 0x7d, 0x4d, 0x29, 0x4b, 0x5f, 0xe4, 0xa6, 0xa0, 0x3a, 0x55, 0x99, 0xd6, + 0xb9, 0xe0, 0xb0, 0xdc, 0x48, 0x2d, 0xfd, 0x8e, 0xd5, 0x40, 0xca, 0x14, 0xac, 0xe5, 0x70, 0x17, + 0x42, 0x23, 0x6f, 0x5f, 0x1e, 0xdc, 0xcb, 0x3c, 0xa0, 0x42, 0x48, 0x4d, 0x75, 0x2e, 0x85, 0xb2, + 0xe2, 0xde, 0x9b, 0x03, 0xda, 0x03, 0xbb, 0x00, 0xb3, 0xa1, 0xb1, 0x9f, 0x5b, 0xf7, 0x48, 0x6c, + 0x8b, 0xde, 0x0a, 0x9c, 0x1f, 0x67, 0xfd, 0x33, 0xd0, 0x5a, 0x92, 0xf9, 0x2c, 0xba, 0x1d, 0x0f, + 0xc7, 0xd1, 0xc0, 0xfb, 0xe7, 0xb7, 0xc0, 0xc9, 0x92, 0xdc, 0x91, 0xe9, 0x23, 0xf1, 0x9c, 0x8a, + 0xc5, 0x93, 0x49, 0x3a, 0x9c, 0x3e, 0xdc, 0xe3, 0xc5, 0xdc, 0x73, 0xfd, 0x53, 0x00, 0xc8, 0x94, + 0xa4, 0x04, 0x2f, 0xc6, 0x71, 0xe4, 0x35, 0x7c, 0x00, 0x9a, 0x7b, 0xfc, 0xbf, 0xff, 0xed, 0x80, + 0xee, 0x4a, 0x16, 0xf0, 0xcf, 0x1c, 0xfd, 0x8b, 0xe3, 0x87, 0xcc, 0xaa, 0x18, 0x33, 0xe7, 0xa9, + 0xbf, 0x57, 0x73, 0xb9, 0xa6, 0x82, 0x43, 0xb9, 0xe1, 0x01, 0xcf, 0x84, 0x09, 0x79, 0x28, 0xb5, + 0xcc, 0xd5, 0x2f, 0x1d, 0x5f, 0x9b, 0xf7, 0xdd, 0x6d, 0x8c, 0x30, 0xfe, 0x70, 0x3b, 0x23, 0x6b, + 0x85, 0x99, 0x82, 0x16, 0x56, 0x28, 0x0e, 0x61, 0x55, 0x89, 0xfa, 0x3c, 0xf0, 0x09, 0x66, 0x2a, + 0xa9, 0xf9, 0x24, 0x0e, 0x13, 0xc3, 0x7f, 0xb9, 0x5d, 0xfb, 0x89, 0x10, 0x66, 0x0a, 0xa1, 0x7a, + 0x02, 0xa1, 0x38, 0x44, 0xc8, 0xcc, 0x3c, 0x37, 0xcd, 0x61, 0x57, 0x3f, 0x01, 0x00, 0x00, 0xff, + 0xff, 0xb8, 0xeb, 0x40, 0x86, 0xfb, 0x01, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/display_upload_product_type.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/display_upload_product_type.pb.go new file mode 100644 index 0000000..582aef3 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/display_upload_product_type.pb.go @@ -0,0 +1,181 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/enums/display_upload_product_type.proto + +package enums // import "google.golang.org/genproto/googleapis/ads/googleads/v1/enums" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Enumerates display upload product types. +type DisplayUploadProductTypeEnum_DisplayUploadProductType int32 + +const ( + // Not specified. + DisplayUploadProductTypeEnum_UNSPECIFIED DisplayUploadProductTypeEnum_DisplayUploadProductType = 0 + // The value is unknown in this version. + DisplayUploadProductTypeEnum_UNKNOWN DisplayUploadProductTypeEnum_DisplayUploadProductType = 1 + // HTML5 upload ad. This product type requires the upload_media_bundle + // field in DisplayUploadAdInfo to be set. + DisplayUploadProductTypeEnum_HTML5_UPLOAD_AD DisplayUploadProductTypeEnum_DisplayUploadProductType = 2 + // Dynamic HTML5 education ad. This product type requires the + // upload_media_bundle field in DisplayUploadAdInfo to be set. Can only be + // used in an education campaign. + DisplayUploadProductTypeEnum_DYNAMIC_HTML5_EDUCATION_AD DisplayUploadProductTypeEnum_DisplayUploadProductType = 3 + // Dynamic HTML5 flight ad. This product type requires the + // upload_media_bundle field in DisplayUploadAdInfo to be set. Can only be + // used in a flight campaign. + DisplayUploadProductTypeEnum_DYNAMIC_HTML5_FLIGHT_AD DisplayUploadProductTypeEnum_DisplayUploadProductType = 4 + // Dynamic HTML5 hotel and rental ad. This product type requires the + // upload_media_bundle field in DisplayUploadAdInfo to be set. Can only be + // used in a hotel campaign. + DisplayUploadProductTypeEnum_DYNAMIC_HTML5_HOTEL_RENTAL_AD DisplayUploadProductTypeEnum_DisplayUploadProductType = 5 + // Dynamic HTML5 job ad. This product type requires the + // upload_media_bundle field in DisplayUploadAdInfo to be set. Can only be + // used in a job campaign. + DisplayUploadProductTypeEnum_DYNAMIC_HTML5_JOB_AD DisplayUploadProductTypeEnum_DisplayUploadProductType = 6 + // Dynamic HTML5 local ad. This product type requires the + // upload_media_bundle field in DisplayUploadAdInfo to be set. Can only be + // used in a local campaign. + DisplayUploadProductTypeEnum_DYNAMIC_HTML5_LOCAL_AD DisplayUploadProductTypeEnum_DisplayUploadProductType = 7 + // Dynamic HTML5 real estate ad. This product type requires the + // upload_media_bundle field in DisplayUploadAdInfo to be set. Can only be + // used in a real estate campaign. + DisplayUploadProductTypeEnum_DYNAMIC_HTML5_REAL_ESTATE_AD DisplayUploadProductTypeEnum_DisplayUploadProductType = 8 + // Dynamic HTML5 custom ad. This product type requires the + // upload_media_bundle field in DisplayUploadAdInfo to be set. Can only be + // used in a custom campaign. + DisplayUploadProductTypeEnum_DYNAMIC_HTML5_CUSTOM_AD DisplayUploadProductTypeEnum_DisplayUploadProductType = 9 + // Dynamic HTML5 travel ad. This product type requires the + // upload_media_bundle field in DisplayUploadAdInfo to be set. Can only be + // used in a travel campaign. + DisplayUploadProductTypeEnum_DYNAMIC_HTML5_TRAVEL_AD DisplayUploadProductTypeEnum_DisplayUploadProductType = 10 + // Dynamic HTML5 hotel ad. This product type requires the + // upload_media_bundle field in DisplayUploadAdInfo to be set. Can only be + // used in a hotel campaign. + DisplayUploadProductTypeEnum_DYNAMIC_HTML5_HOTEL_AD DisplayUploadProductTypeEnum_DisplayUploadProductType = 11 +) + +var DisplayUploadProductTypeEnum_DisplayUploadProductType_name = map[int32]string{ + 0: "UNSPECIFIED", + 1: "UNKNOWN", + 2: "HTML5_UPLOAD_AD", + 3: "DYNAMIC_HTML5_EDUCATION_AD", + 4: "DYNAMIC_HTML5_FLIGHT_AD", + 5: "DYNAMIC_HTML5_HOTEL_RENTAL_AD", + 6: "DYNAMIC_HTML5_JOB_AD", + 7: "DYNAMIC_HTML5_LOCAL_AD", + 8: "DYNAMIC_HTML5_REAL_ESTATE_AD", + 9: "DYNAMIC_HTML5_CUSTOM_AD", + 10: "DYNAMIC_HTML5_TRAVEL_AD", + 11: "DYNAMIC_HTML5_HOTEL_AD", +} +var DisplayUploadProductTypeEnum_DisplayUploadProductType_value = map[string]int32{ + "UNSPECIFIED": 0, + "UNKNOWN": 1, + "HTML5_UPLOAD_AD": 2, + "DYNAMIC_HTML5_EDUCATION_AD": 3, + "DYNAMIC_HTML5_FLIGHT_AD": 4, + "DYNAMIC_HTML5_HOTEL_RENTAL_AD": 5, + "DYNAMIC_HTML5_JOB_AD": 6, + "DYNAMIC_HTML5_LOCAL_AD": 7, + "DYNAMIC_HTML5_REAL_ESTATE_AD": 8, + "DYNAMIC_HTML5_CUSTOM_AD": 9, + "DYNAMIC_HTML5_TRAVEL_AD": 10, + "DYNAMIC_HTML5_HOTEL_AD": 11, +} + +func (x DisplayUploadProductTypeEnum_DisplayUploadProductType) String() string { + return proto.EnumName(DisplayUploadProductTypeEnum_DisplayUploadProductType_name, int32(x)) +} +func (DisplayUploadProductTypeEnum_DisplayUploadProductType) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_display_upload_product_type_d7c5a6f338709612, []int{0, 0} +} + +// Container for display upload product types. Product types that have the word +// "DYNAMIC" in them must be associated with a campaign that has a dynamic +// remarketing feed. See https://support.google.com/google-ads/answer/6053288 +// for more info about dynamic remarketing. Other product types are regarded +// as "static" and do not have this requirement. +type DisplayUploadProductTypeEnum struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *DisplayUploadProductTypeEnum) Reset() { *m = DisplayUploadProductTypeEnum{} } +func (m *DisplayUploadProductTypeEnum) String() string { return proto.CompactTextString(m) } +func (*DisplayUploadProductTypeEnum) ProtoMessage() {} +func (*DisplayUploadProductTypeEnum) Descriptor() ([]byte, []int) { + return fileDescriptor_display_upload_product_type_d7c5a6f338709612, []int{0} +} +func (m *DisplayUploadProductTypeEnum) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_DisplayUploadProductTypeEnum.Unmarshal(m, b) +} +func (m *DisplayUploadProductTypeEnum) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_DisplayUploadProductTypeEnum.Marshal(b, m, deterministic) +} +func (dst *DisplayUploadProductTypeEnum) XXX_Merge(src proto.Message) { + xxx_messageInfo_DisplayUploadProductTypeEnum.Merge(dst, src) +} +func (m *DisplayUploadProductTypeEnum) XXX_Size() int { + return xxx_messageInfo_DisplayUploadProductTypeEnum.Size(m) +} +func (m *DisplayUploadProductTypeEnum) XXX_DiscardUnknown() { + xxx_messageInfo_DisplayUploadProductTypeEnum.DiscardUnknown(m) +} + +var xxx_messageInfo_DisplayUploadProductTypeEnum proto.InternalMessageInfo + +func init() { + proto.RegisterType((*DisplayUploadProductTypeEnum)(nil), "google.ads.googleads.v1.enums.DisplayUploadProductTypeEnum") + proto.RegisterEnum("google.ads.googleads.v1.enums.DisplayUploadProductTypeEnum_DisplayUploadProductType", DisplayUploadProductTypeEnum_DisplayUploadProductType_name, DisplayUploadProductTypeEnum_DisplayUploadProductType_value) +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/enums/display_upload_product_type.proto", fileDescriptor_display_upload_product_type_d7c5a6f338709612) +} + +var fileDescriptor_display_upload_product_type_d7c5a6f338709612 = []byte{ + // 441 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x7c, 0x92, 0xc1, 0xaa, 0xd3, 0x40, + 0x14, 0x86, 0x6d, 0xae, 0xde, 0xab, 0xd3, 0xc5, 0x2d, 0xa3, 0xe8, 0xa5, 0xb6, 0x62, 0xef, 0x03, + 0x4c, 0x08, 0xe2, 0x26, 0x2e, 0x64, 0x9a, 0xcc, 0x6d, 0xa3, 0x69, 0x12, 0xda, 0x49, 0x45, 0x29, + 0x84, 0x78, 0x27, 0x84, 0x42, 0x9b, 0x19, 0x3a, 0x69, 0xa1, 0xaf, 0xe3, 0xd2, 0x47, 0xf1, 0x15, + 0x7c, 0x03, 0x97, 0xae, 0x5d, 0xc8, 0xcc, 0xd8, 0x42, 0x28, 0x75, 0x33, 0x1c, 0xe6, 0xfb, 0xcf, + 0x7f, 0x0e, 0xfc, 0x07, 0xbc, 0x2f, 0x39, 0x2f, 0x57, 0x85, 0x9d, 0x33, 0x69, 0x9b, 0x52, 0x55, + 0x3b, 0xc7, 0x2e, 0xaa, 0xed, 0x5a, 0xda, 0x6c, 0x29, 0xc5, 0x2a, 0xdf, 0x67, 0x5b, 0xb1, 0xe2, + 0x39, 0xcb, 0xc4, 0x86, 0xb3, 0xed, 0x7d, 0x9d, 0xd5, 0x7b, 0x51, 0x20, 0xb1, 0xe1, 0x35, 0x87, + 0x7d, 0xd3, 0x85, 0x72, 0x26, 0xd1, 0xd1, 0x00, 0xed, 0x1c, 0xa4, 0x0d, 0xba, 0xbd, 0x83, 0xbf, + 0x58, 0xda, 0x79, 0x55, 0xf1, 0x3a, 0xaf, 0x97, 0xbc, 0x92, 0xa6, 0xf9, 0xf6, 0x8f, 0x05, 0x7a, + 0xbe, 0x19, 0x91, 0xea, 0x09, 0x89, 0x19, 0x40, 0xf7, 0xa2, 0x20, 0xd5, 0x76, 0x7d, 0xfb, 0xd3, + 0x02, 0x37, 0xe7, 0x04, 0xf0, 0x1a, 0xb4, 0xd3, 0x68, 0x96, 0x10, 0x2f, 0xb8, 0x0b, 0x88, 0xdf, + 0x79, 0x00, 0xdb, 0xe0, 0x2a, 0x8d, 0x3e, 0x46, 0xf1, 0xa7, 0xa8, 0xd3, 0x82, 0x4f, 0xc1, 0xf5, + 0x98, 0x4e, 0xc2, 0xb7, 0x59, 0x9a, 0x84, 0x31, 0xf6, 0x33, 0xec, 0x77, 0x2c, 0xf8, 0x0a, 0x74, + 0xfd, 0xcf, 0x11, 0x9e, 0x04, 0x5e, 0x66, 0x20, 0xf1, 0x53, 0x0f, 0xd3, 0x20, 0x8e, 0x14, 0xbf, + 0x80, 0x2f, 0xc1, 0x8b, 0x26, 0xbf, 0x0b, 0x83, 0xd1, 0x98, 0x2a, 0xf8, 0x10, 0x0e, 0x40, 0xbf, + 0x09, 0xc7, 0x31, 0x25, 0x61, 0x36, 0x25, 0x11, 0xc5, 0xa1, 0x92, 0x3c, 0x82, 0x37, 0xe0, 0x59, + 0x53, 0xf2, 0x21, 0x1e, 0x2a, 0x72, 0x09, 0xbb, 0xe0, 0x79, 0x93, 0x84, 0xb1, 0x67, 0xba, 0xae, + 0xe0, 0x6b, 0xd0, 0x6b, 0xb2, 0x29, 0xc1, 0x61, 0x46, 0x66, 0x14, 0x53, 0xa2, 0x14, 0x8f, 0x4f, + 0xf7, 0xf2, 0xd2, 0x19, 0x8d, 0x27, 0x0a, 0x3e, 0x39, 0x85, 0x74, 0x8a, 0xe7, 0x44, 0x7b, 0x83, + 0xd3, 0xb9, 0x66, 0x69, 0xec, 0x77, 0xda, 0xc3, 0xdf, 0x2d, 0x30, 0xb8, 0xe7, 0x6b, 0xf4, 0xdf, + 0x08, 0x87, 0xfd, 0x73, 0x01, 0x24, 0x2a, 0xc3, 0xa4, 0xf5, 0x65, 0xf8, 0xaf, 0xbf, 0xe4, 0xab, + 0xbc, 0x2a, 0x11, 0xdf, 0x94, 0x76, 0x59, 0x54, 0x3a, 0xe1, 0xc3, 0x4d, 0x89, 0xa5, 0x3c, 0x73, + 0x62, 0xef, 0xf4, 0xfb, 0xcd, 0xba, 0x18, 0x61, 0xfc, 0xdd, 0xea, 0x8f, 0x8c, 0x15, 0x66, 0x12, + 0x99, 0x52, 0x55, 0x73, 0x07, 0xa9, 0x6b, 0x90, 0x3f, 0x0e, 0x7c, 0x81, 0x99, 0x5c, 0x1c, 0xf9, + 0x62, 0xee, 0x2c, 0x34, 0xff, 0x65, 0x0d, 0xcc, 0xa7, 0xeb, 0x62, 0x26, 0x5d, 0xf7, 0xa8, 0x70, + 0xdd, 0xb9, 0xe3, 0xba, 0x5a, 0xf3, 0xf5, 0x52, 0x2f, 0xf6, 0xe6, 0x6f, 0x00, 0x00, 0x00, 0xff, + 0xff, 0xb9, 0x0d, 0x64, 0x38, 0xfa, 0x02, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/dsa_page_feed_criterion_field.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/dsa_page_feed_criterion_field.pb.go new file mode 100644 index 0000000..93e5026 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/dsa_page_feed_criterion_field.pb.go @@ -0,0 +1,119 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/enums/dsa_page_feed_criterion_field.proto + +package enums // import "google.golang.org/genproto/googleapis/ads/googleads/v1/enums" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Possible values for Dynamic Search Ad Page Feed criterion fields. +type DsaPageFeedCriterionFieldEnum_DsaPageFeedCriterionField int32 + +const ( + // Not specified. + DsaPageFeedCriterionFieldEnum_UNSPECIFIED DsaPageFeedCriterionFieldEnum_DsaPageFeedCriterionField = 0 + // Used for return value only. Represents value unknown in this version. + DsaPageFeedCriterionFieldEnum_UNKNOWN DsaPageFeedCriterionFieldEnum_DsaPageFeedCriterionField = 1 + // Data Type: URL or URL_LIST. URL of the web page you want to target. + DsaPageFeedCriterionFieldEnum_PAGE_URL DsaPageFeedCriterionFieldEnum_DsaPageFeedCriterionField = 2 + // Data Type: STRING_LIST. The labels that will help you target ads within + // your page feed. + DsaPageFeedCriterionFieldEnum_LABEL DsaPageFeedCriterionFieldEnum_DsaPageFeedCriterionField = 3 +) + +var DsaPageFeedCriterionFieldEnum_DsaPageFeedCriterionField_name = map[int32]string{ + 0: "UNSPECIFIED", + 1: "UNKNOWN", + 2: "PAGE_URL", + 3: "LABEL", +} +var DsaPageFeedCriterionFieldEnum_DsaPageFeedCriterionField_value = map[string]int32{ + "UNSPECIFIED": 0, + "UNKNOWN": 1, + "PAGE_URL": 2, + "LABEL": 3, +} + +func (x DsaPageFeedCriterionFieldEnum_DsaPageFeedCriterionField) String() string { + return proto.EnumName(DsaPageFeedCriterionFieldEnum_DsaPageFeedCriterionField_name, int32(x)) +} +func (DsaPageFeedCriterionFieldEnum_DsaPageFeedCriterionField) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_dsa_page_feed_criterion_field_5246b93b382746b3, []int{0, 0} +} + +// Values for Dynamic Search Ad Page Feed criterion fields. +type DsaPageFeedCriterionFieldEnum struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *DsaPageFeedCriterionFieldEnum) Reset() { *m = DsaPageFeedCriterionFieldEnum{} } +func (m *DsaPageFeedCriterionFieldEnum) String() string { return proto.CompactTextString(m) } +func (*DsaPageFeedCriterionFieldEnum) ProtoMessage() {} +func (*DsaPageFeedCriterionFieldEnum) Descriptor() ([]byte, []int) { + return fileDescriptor_dsa_page_feed_criterion_field_5246b93b382746b3, []int{0} +} +func (m *DsaPageFeedCriterionFieldEnum) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_DsaPageFeedCriterionFieldEnum.Unmarshal(m, b) +} +func (m *DsaPageFeedCriterionFieldEnum) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_DsaPageFeedCriterionFieldEnum.Marshal(b, m, deterministic) +} +func (dst *DsaPageFeedCriterionFieldEnum) XXX_Merge(src proto.Message) { + xxx_messageInfo_DsaPageFeedCriterionFieldEnum.Merge(dst, src) +} +func (m *DsaPageFeedCriterionFieldEnum) XXX_Size() int { + return xxx_messageInfo_DsaPageFeedCriterionFieldEnum.Size(m) +} +func (m *DsaPageFeedCriterionFieldEnum) XXX_DiscardUnknown() { + xxx_messageInfo_DsaPageFeedCriterionFieldEnum.DiscardUnknown(m) +} + +var xxx_messageInfo_DsaPageFeedCriterionFieldEnum proto.InternalMessageInfo + +func init() { + proto.RegisterType((*DsaPageFeedCriterionFieldEnum)(nil), "google.ads.googleads.v1.enums.DsaPageFeedCriterionFieldEnum") + proto.RegisterEnum("google.ads.googleads.v1.enums.DsaPageFeedCriterionFieldEnum_DsaPageFeedCriterionField", DsaPageFeedCriterionFieldEnum_DsaPageFeedCriterionField_name, DsaPageFeedCriterionFieldEnum_DsaPageFeedCriterionField_value) +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/enums/dsa_page_feed_criterion_field.proto", fileDescriptor_dsa_page_feed_criterion_field_5246b93b382746b3) +} + +var fileDescriptor_dsa_page_feed_criterion_field_5246b93b382746b3 = []byte{ + // 318 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x7c, 0x50, 0xcd, 0x4a, 0xc3, 0x40, + 0x18, 0xb4, 0x29, 0xfe, 0x6d, 0x05, 0x43, 0x6e, 0x8a, 0x15, 0xda, 0x07, 0xd8, 0x10, 0xbc, 0xad, + 0xa7, 0x4d, 0x9b, 0x96, 0x62, 0x89, 0xa1, 0xd2, 0x0a, 0x12, 0x08, 0x6b, 0x77, 0xbb, 0x04, 0xda, + 0xdd, 0x90, 0x2f, 0xed, 0x03, 0x79, 0xf4, 0x51, 0x7c, 0x14, 0xaf, 0xbe, 0x80, 0x64, 0xd7, 0xf4, + 0x16, 0x2f, 0x61, 0xc8, 0xcc, 0x37, 0x33, 0x3b, 0x88, 0x4a, 0xad, 0xe5, 0x56, 0xf8, 0x8c, 0x83, + 0x6f, 0x61, 0x8d, 0x0e, 0x81, 0x2f, 0xd4, 0x7e, 0x07, 0x3e, 0x07, 0x96, 0x15, 0x4c, 0x8a, 0x6c, + 0x23, 0x04, 0xcf, 0xd6, 0x65, 0x5e, 0x89, 0x32, 0xd7, 0x2a, 0xdb, 0xe4, 0x62, 0xcb, 0x71, 0x51, + 0xea, 0x4a, 0x7b, 0x7d, 0x7b, 0x87, 0x19, 0x07, 0x7c, 0xb4, 0xc0, 0x87, 0x00, 0x1b, 0x8b, 0xdb, + 0xbb, 0x26, 0xa1, 0xc8, 0x7d, 0xa6, 0x94, 0xae, 0x58, 0x95, 0x6b, 0x05, 0xf6, 0x78, 0x08, 0xa8, + 0x3f, 0x06, 0x96, 0x30, 0x29, 0x26, 0x42, 0xf0, 0x51, 0x13, 0x30, 0xa9, 0xfd, 0x23, 0xb5, 0xdf, + 0x0d, 0x17, 0xe8, 0xa6, 0x55, 0xe0, 0x5d, 0xa3, 0xde, 0x32, 0x7e, 0x49, 0xa2, 0xd1, 0x6c, 0x32, + 0x8b, 0xc6, 0xee, 0x89, 0xd7, 0x43, 0xe7, 0xcb, 0xf8, 0x29, 0x7e, 0x7e, 0x8d, 0xdd, 0x8e, 0x77, + 0x85, 0x2e, 0x12, 0x3a, 0x8d, 0xb2, 0xe5, 0x62, 0xee, 0x3a, 0xde, 0x25, 0x3a, 0x9d, 0xd3, 0x30, + 0x9a, 0xbb, 0xdd, 0xf0, 0xa7, 0x83, 0x06, 0x6b, 0xbd, 0xc3, 0xff, 0x16, 0x0f, 0xef, 0x5b, 0x73, + 0x93, 0xba, 0x7a, 0xd2, 0x79, 0x0b, 0xff, 0x0c, 0xa4, 0xde, 0x32, 0x25, 0xb1, 0x2e, 0xa5, 0x2f, + 0x85, 0x32, 0x0f, 0x6b, 0xc6, 0x2c, 0x72, 0x68, 0xd9, 0xf6, 0xd1, 0x7c, 0x3f, 0x9c, 0xee, 0x94, + 0xd2, 0x4f, 0xa7, 0x3f, 0xb5, 0x56, 0x94, 0x03, 0xb6, 0xb0, 0x46, 0xab, 0x00, 0xd7, 0x23, 0xc0, + 0x57, 0xc3, 0xa7, 0x94, 0x43, 0x7a, 0xe4, 0xd3, 0x55, 0x90, 0x1a, 0xfe, 0xdb, 0x19, 0xd8, 0x9f, + 0x84, 0x50, 0x0e, 0x84, 0x1c, 0x15, 0x84, 0xac, 0x02, 0x42, 0x8c, 0xe6, 0xfd, 0xcc, 0x14, 0x7b, + 0xf8, 0x0d, 0x00, 0x00, 0xff, 0xff, 0x3c, 0xf4, 0x41, 0xe5, 0xf3, 0x01, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/education_placeholder_field.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/education_placeholder_field.pb.go new file mode 100644 index 0000000..ca4f2d4 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/education_placeholder_field.pb.go @@ -0,0 +1,207 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/enums/education_placeholder_field.proto + +package enums // import "google.golang.org/genproto/googleapis/ads/googleads/v1/enums" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Possible values for Education placeholder fields. +type EducationPlaceholderFieldEnum_EducationPlaceholderField int32 + +const ( + // Not specified. + EducationPlaceholderFieldEnum_UNSPECIFIED EducationPlaceholderFieldEnum_EducationPlaceholderField = 0 + // Used for return value only. Represents value unknown in this version. + EducationPlaceholderFieldEnum_UNKNOWN EducationPlaceholderFieldEnum_EducationPlaceholderField = 1 + // Data Type: STRING. Required. Combination of PROGRAM ID and LOCATION ID + // must be unique per offer. + EducationPlaceholderFieldEnum_PROGRAM_ID EducationPlaceholderFieldEnum_EducationPlaceholderField = 2 + // Data Type: STRING. Combination of PROGRAM ID and LOCATION ID must be + // unique per offer. + EducationPlaceholderFieldEnum_LOCATION_ID EducationPlaceholderFieldEnum_EducationPlaceholderField = 3 + // Data Type: STRING. Required. Main headline with program name to be shown + // in dynamic ad. + EducationPlaceholderFieldEnum_PROGRAM_NAME EducationPlaceholderFieldEnum_EducationPlaceholderField = 4 + // Data Type: STRING. Area of study that can be shown in dynamic ad. + EducationPlaceholderFieldEnum_AREA_OF_STUDY EducationPlaceholderFieldEnum_EducationPlaceholderField = 5 + // Data Type: STRING. Description of program that can be shown in dynamic + // ad. + EducationPlaceholderFieldEnum_PROGRAM_DESCRIPTION EducationPlaceholderFieldEnum_EducationPlaceholderField = 6 + // Data Type: STRING. Name of school that can be shown in dynamic ad. + EducationPlaceholderFieldEnum_SCHOOL_NAME EducationPlaceholderFieldEnum_EducationPlaceholderField = 7 + // Data Type: STRING. Complete school address, including postal code. + EducationPlaceholderFieldEnum_ADDRESS EducationPlaceholderFieldEnum_EducationPlaceholderField = 8 + // Data Type: URL. Image to be displayed in ads. + EducationPlaceholderFieldEnum_THUMBNAIL_IMAGE_URL EducationPlaceholderFieldEnum_EducationPlaceholderField = 9 + // Data Type: URL. Alternative hosted file of image to be used in the ad. + EducationPlaceholderFieldEnum_ALTERNATIVE_THUMBNAIL_IMAGE_URL EducationPlaceholderFieldEnum_EducationPlaceholderField = 10 + // Data Type: URL_LIST. Required. Final URLs to be used in ad when using + // Upgraded URLs; the more specific the better (e.g. the individual URL of a + // specific program and its location). + EducationPlaceholderFieldEnum_FINAL_URLS EducationPlaceholderFieldEnum_EducationPlaceholderField = 11 + // Data Type: URL_LIST. Final mobile URLs for the ad when using Upgraded + // URLs. + EducationPlaceholderFieldEnum_FINAL_MOBILE_URLS EducationPlaceholderFieldEnum_EducationPlaceholderField = 12 + // Data Type: URL. Tracking template for the ad when using Upgraded URLs. + EducationPlaceholderFieldEnum_TRACKING_URL EducationPlaceholderFieldEnum_EducationPlaceholderField = 13 + // Data Type: STRING_LIST. Keywords used for product retrieval. + EducationPlaceholderFieldEnum_CONTEXTUAL_KEYWORDS EducationPlaceholderFieldEnum_EducationPlaceholderField = 14 + // Data Type: STRING. Android app link. Must be formatted as: + // android-app://{package_id}/{scheme}/{host_path}. + // The components are defined as follows: + // package_id: app ID as specified in Google Play. + // scheme: the scheme to pass to the application. Can be HTTP, or a custom + // scheme. + // host_path: identifies the specific content within your application. + EducationPlaceholderFieldEnum_ANDROID_APP_LINK EducationPlaceholderFieldEnum_EducationPlaceholderField = 15 + // Data Type: STRING_LIST. List of recommended program IDs to show together + // with this item. + EducationPlaceholderFieldEnum_SIMILAR_PROGRAM_IDS EducationPlaceholderFieldEnum_EducationPlaceholderField = 16 + // Data Type: STRING. iOS app link. + EducationPlaceholderFieldEnum_IOS_APP_LINK EducationPlaceholderFieldEnum_EducationPlaceholderField = 17 + // Data Type: INT64. iOS app store ID. + EducationPlaceholderFieldEnum_IOS_APP_STORE_ID EducationPlaceholderFieldEnum_EducationPlaceholderField = 18 +) + +var EducationPlaceholderFieldEnum_EducationPlaceholderField_name = map[int32]string{ + 0: "UNSPECIFIED", + 1: "UNKNOWN", + 2: "PROGRAM_ID", + 3: "LOCATION_ID", + 4: "PROGRAM_NAME", + 5: "AREA_OF_STUDY", + 6: "PROGRAM_DESCRIPTION", + 7: "SCHOOL_NAME", + 8: "ADDRESS", + 9: "THUMBNAIL_IMAGE_URL", + 10: "ALTERNATIVE_THUMBNAIL_IMAGE_URL", + 11: "FINAL_URLS", + 12: "FINAL_MOBILE_URLS", + 13: "TRACKING_URL", + 14: "CONTEXTUAL_KEYWORDS", + 15: "ANDROID_APP_LINK", + 16: "SIMILAR_PROGRAM_IDS", + 17: "IOS_APP_LINK", + 18: "IOS_APP_STORE_ID", +} +var EducationPlaceholderFieldEnum_EducationPlaceholderField_value = map[string]int32{ + "UNSPECIFIED": 0, + "UNKNOWN": 1, + "PROGRAM_ID": 2, + "LOCATION_ID": 3, + "PROGRAM_NAME": 4, + "AREA_OF_STUDY": 5, + "PROGRAM_DESCRIPTION": 6, + "SCHOOL_NAME": 7, + "ADDRESS": 8, + "THUMBNAIL_IMAGE_URL": 9, + "ALTERNATIVE_THUMBNAIL_IMAGE_URL": 10, + "FINAL_URLS": 11, + "FINAL_MOBILE_URLS": 12, + "TRACKING_URL": 13, + "CONTEXTUAL_KEYWORDS": 14, + "ANDROID_APP_LINK": 15, + "SIMILAR_PROGRAM_IDS": 16, + "IOS_APP_LINK": 17, + "IOS_APP_STORE_ID": 18, +} + +func (x EducationPlaceholderFieldEnum_EducationPlaceholderField) String() string { + return proto.EnumName(EducationPlaceholderFieldEnum_EducationPlaceholderField_name, int32(x)) +} +func (EducationPlaceholderFieldEnum_EducationPlaceholderField) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_education_placeholder_field_7f235e522c33dfac, []int{0, 0} +} + +// Values for Education placeholder fields. +// For more information about dynamic remarketing feeds, see +// https://support.google.com/google-ads/answer/6053288. +type EducationPlaceholderFieldEnum struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *EducationPlaceholderFieldEnum) Reset() { *m = EducationPlaceholderFieldEnum{} } +func (m *EducationPlaceholderFieldEnum) String() string { return proto.CompactTextString(m) } +func (*EducationPlaceholderFieldEnum) ProtoMessage() {} +func (*EducationPlaceholderFieldEnum) Descriptor() ([]byte, []int) { + return fileDescriptor_education_placeholder_field_7f235e522c33dfac, []int{0} +} +func (m *EducationPlaceholderFieldEnum) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_EducationPlaceholderFieldEnum.Unmarshal(m, b) +} +func (m *EducationPlaceholderFieldEnum) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_EducationPlaceholderFieldEnum.Marshal(b, m, deterministic) +} +func (dst *EducationPlaceholderFieldEnum) XXX_Merge(src proto.Message) { + xxx_messageInfo_EducationPlaceholderFieldEnum.Merge(dst, src) +} +func (m *EducationPlaceholderFieldEnum) XXX_Size() int { + return xxx_messageInfo_EducationPlaceholderFieldEnum.Size(m) +} +func (m *EducationPlaceholderFieldEnum) XXX_DiscardUnknown() { + xxx_messageInfo_EducationPlaceholderFieldEnum.DiscardUnknown(m) +} + +var xxx_messageInfo_EducationPlaceholderFieldEnum proto.InternalMessageInfo + +func init() { + proto.RegisterType((*EducationPlaceholderFieldEnum)(nil), "google.ads.googleads.v1.enums.EducationPlaceholderFieldEnum") + proto.RegisterEnum("google.ads.googleads.v1.enums.EducationPlaceholderFieldEnum_EducationPlaceholderField", EducationPlaceholderFieldEnum_EducationPlaceholderField_name, EducationPlaceholderFieldEnum_EducationPlaceholderField_value) +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/enums/education_placeholder_field.proto", fileDescriptor_education_placeholder_field_7f235e522c33dfac) +} + +var fileDescriptor_education_placeholder_field_7f235e522c33dfac = []byte{ + // 514 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x7c, 0x92, 0xcf, 0x8e, 0xd3, 0x30, + 0x10, 0xc6, 0xd9, 0x16, 0x76, 0xc1, 0xdd, 0x3f, 0x6e, 0x00, 0x21, 0x10, 0x05, 0xed, 0x72, 0x4f, + 0x54, 0x71, 0x0b, 0x07, 0x34, 0x4d, 0xdc, 0xae, 0xd5, 0xc4, 0x8e, 0xec, 0xa4, 0xcb, 0xa2, 0x4a, + 0x56, 0x68, 0x42, 0xa8, 0xd4, 0x26, 0x55, 0xd3, 0xee, 0xdb, 0x70, 0xe1, 0xc8, 0x4b, 0x70, 0xe7, + 0x51, 0xb8, 0xf2, 0x02, 0xc8, 0x09, 0x69, 0x2f, 0x94, 0x4b, 0x35, 0x9e, 0xf9, 0xe6, 0xf7, 0xb9, + 0xce, 0x87, 0xde, 0x67, 0x45, 0x91, 0x2d, 0x52, 0x2b, 0x4e, 0x4a, 0xab, 0x2e, 0x75, 0x75, 0xd7, + 0xb7, 0xd2, 0x7c, 0xbb, 0x2c, 0xad, 0x34, 0xd9, 0xce, 0xe2, 0xcd, 0xbc, 0xc8, 0xd5, 0x6a, 0x11, + 0xcf, 0xd2, 0x2f, 0xc5, 0x22, 0x49, 0xd7, 0xea, 0xf3, 0x3c, 0x5d, 0x24, 0xe6, 0x6a, 0x5d, 0x6c, + 0x0a, 0xa3, 0x57, 0x6f, 0x99, 0x71, 0x52, 0x9a, 0x3b, 0x80, 0x79, 0xd7, 0x37, 0x2b, 0xc0, 0x8b, + 0x97, 0x0d, 0x7f, 0x35, 0xb7, 0xe2, 0x3c, 0x2f, 0x36, 0x15, 0xad, 0xac, 0x97, 0xaf, 0x7e, 0xb4, + 0x51, 0x8f, 0x34, 0x16, 0xc1, 0xde, 0x61, 0xa8, 0x0d, 0x48, 0xbe, 0x5d, 0x5e, 0x7d, 0x6d, 0xa3, + 0xe7, 0x07, 0x15, 0xc6, 0x05, 0xea, 0x44, 0x4c, 0x06, 0xc4, 0xa1, 0x43, 0x4a, 0x5c, 0x7c, 0xcf, + 0xe8, 0xa0, 0x93, 0x88, 0x8d, 0x19, 0xbf, 0x61, 0xf8, 0xc8, 0x38, 0x47, 0x28, 0x10, 0x7c, 0x24, + 0xc0, 0x57, 0xd4, 0xc5, 0x2d, 0xad, 0xf6, 0xb8, 0x03, 0x21, 0xe5, 0x4c, 0x37, 0xda, 0x06, 0x46, + 0xa7, 0x8d, 0x80, 0x81, 0x4f, 0xf0, 0x7d, 0xa3, 0x8b, 0xce, 0x40, 0x10, 0x50, 0x7c, 0xa8, 0x64, + 0x18, 0xb9, 0xb7, 0xf8, 0x81, 0xf1, 0x0c, 0x3d, 0x6e, 0x44, 0x2e, 0x91, 0x8e, 0xa0, 0x81, 0x06, + 0xe0, 0x63, 0x8d, 0x93, 0xce, 0x35, 0xe7, 0x5e, 0xbd, 0x7c, 0xa2, 0xcd, 0xc1, 0x75, 0x05, 0x91, + 0x12, 0x3f, 0xd4, 0x6b, 0xe1, 0x75, 0xe4, 0x0f, 0x18, 0x50, 0x4f, 0x51, 0x1f, 0x46, 0x44, 0x45, + 0xc2, 0xc3, 0x8f, 0x8c, 0x37, 0xe8, 0x35, 0x78, 0x21, 0x11, 0x0c, 0x42, 0x3a, 0x21, 0xea, 0x5f, + 0x22, 0xa4, 0xaf, 0x3e, 0xa4, 0x0c, 0x3c, 0x7d, 0x94, 0xb8, 0x63, 0x3c, 0x45, 0xdd, 0xfa, 0xec, + 0xf3, 0x01, 0xf5, 0x48, 0xdd, 0x3e, 0xd5, 0x7f, 0x20, 0x14, 0xe0, 0x8c, 0x29, 0x1b, 0x55, 0x8b, + 0x67, 0xda, 0xd6, 0xe1, 0x2c, 0x24, 0x1f, 0xc2, 0x08, 0x3c, 0x35, 0x26, 0xb7, 0x37, 0x5c, 0xb8, + 0x12, 0x9f, 0x1b, 0x4f, 0x10, 0x06, 0xe6, 0x0a, 0x4e, 0x5d, 0x05, 0x41, 0xa0, 0x3c, 0xca, 0xc6, + 0xf8, 0x42, 0xcb, 0x25, 0xf5, 0xa9, 0x07, 0x42, 0xed, 0x9f, 0x4a, 0x62, 0xac, 0xc9, 0x94, 0xcb, + 0xbd, 0xb4, 0xab, 0x01, 0x4d, 0x47, 0x86, 0x5c, 0x10, 0xfd, 0x84, 0xc6, 0xe0, 0xf7, 0x11, 0xba, + 0x9c, 0x15, 0x4b, 0xf3, 0xbf, 0x29, 0x18, 0xbc, 0x3a, 0xf8, 0x09, 0x03, 0x9d, 0x83, 0xe0, 0xe8, + 0xe3, 0xe0, 0x2f, 0x20, 0x2b, 0x16, 0x71, 0x9e, 0x99, 0xc5, 0x3a, 0xb3, 0xb2, 0x34, 0xaf, 0x52, + 0xd2, 0xe4, 0x72, 0x35, 0x2f, 0x0f, 0xc4, 0xf4, 0x5d, 0xf5, 0xfb, 0xad, 0xd5, 0x1e, 0x01, 0x7c, + 0x6f, 0xf5, 0x46, 0x35, 0x0a, 0x92, 0xd2, 0xac, 0x4b, 0x5d, 0x4d, 0xfa, 0xa6, 0x0e, 0x54, 0xf9, + 0xb3, 0x99, 0x4f, 0x21, 0x29, 0xa7, 0xbb, 0xf9, 0x74, 0xd2, 0x9f, 0x56, 0xf3, 0x5f, 0xad, 0xcb, + 0xba, 0x69, 0xdb, 0x90, 0x94, 0xb6, 0xbd, 0x53, 0xd8, 0xf6, 0xa4, 0x6f, 0xdb, 0x95, 0xe6, 0xd3, + 0x71, 0x75, 0xb1, 0xb7, 0x7f, 0x02, 0x00, 0x00, 0xff, 0xff, 0xfa, 0x24, 0xab, 0x32, 0x3e, 0x03, + 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/extension_setting_device.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/extension_setting_device.pb.go new file mode 100644 index 0000000..112475c --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/extension_setting_device.pb.go @@ -0,0 +1,120 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/enums/extension_setting_device.proto + +package enums // import "google.golang.org/genproto/googleapis/ads/googleads/v1/enums" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Possbile device types for an extension setting. +type ExtensionSettingDeviceEnum_ExtensionSettingDevice int32 + +const ( + // Not specified. + ExtensionSettingDeviceEnum_UNSPECIFIED ExtensionSettingDeviceEnum_ExtensionSettingDevice = 0 + // The value is unknown in this version. + ExtensionSettingDeviceEnum_UNKNOWN ExtensionSettingDeviceEnum_ExtensionSettingDevice = 1 + // Mobile. The extensions in the extension setting will only serve on + // mobile devices. + ExtensionSettingDeviceEnum_MOBILE ExtensionSettingDeviceEnum_ExtensionSettingDevice = 2 + // Desktop. The extensions in the extension setting will only serve on + // desktop devices. + ExtensionSettingDeviceEnum_DESKTOP ExtensionSettingDeviceEnum_ExtensionSettingDevice = 3 +) + +var ExtensionSettingDeviceEnum_ExtensionSettingDevice_name = map[int32]string{ + 0: "UNSPECIFIED", + 1: "UNKNOWN", + 2: "MOBILE", + 3: "DESKTOP", +} +var ExtensionSettingDeviceEnum_ExtensionSettingDevice_value = map[string]int32{ + "UNSPECIFIED": 0, + "UNKNOWN": 1, + "MOBILE": 2, + "DESKTOP": 3, +} + +func (x ExtensionSettingDeviceEnum_ExtensionSettingDevice) String() string { + return proto.EnumName(ExtensionSettingDeviceEnum_ExtensionSettingDevice_name, int32(x)) +} +func (ExtensionSettingDeviceEnum_ExtensionSettingDevice) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_extension_setting_device_c8f5db7acb01d08d, []int{0, 0} +} + +// Container for enum describing extension setting device types. +type ExtensionSettingDeviceEnum struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ExtensionSettingDeviceEnum) Reset() { *m = ExtensionSettingDeviceEnum{} } +func (m *ExtensionSettingDeviceEnum) String() string { return proto.CompactTextString(m) } +func (*ExtensionSettingDeviceEnum) ProtoMessage() {} +func (*ExtensionSettingDeviceEnum) Descriptor() ([]byte, []int) { + return fileDescriptor_extension_setting_device_c8f5db7acb01d08d, []int{0} +} +func (m *ExtensionSettingDeviceEnum) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ExtensionSettingDeviceEnum.Unmarshal(m, b) +} +func (m *ExtensionSettingDeviceEnum) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ExtensionSettingDeviceEnum.Marshal(b, m, deterministic) +} +func (dst *ExtensionSettingDeviceEnum) XXX_Merge(src proto.Message) { + xxx_messageInfo_ExtensionSettingDeviceEnum.Merge(dst, src) +} +func (m *ExtensionSettingDeviceEnum) XXX_Size() int { + return xxx_messageInfo_ExtensionSettingDeviceEnum.Size(m) +} +func (m *ExtensionSettingDeviceEnum) XXX_DiscardUnknown() { + xxx_messageInfo_ExtensionSettingDeviceEnum.DiscardUnknown(m) +} + +var xxx_messageInfo_ExtensionSettingDeviceEnum proto.InternalMessageInfo + +func init() { + proto.RegisterType((*ExtensionSettingDeviceEnum)(nil), "google.ads.googleads.v1.enums.ExtensionSettingDeviceEnum") + proto.RegisterEnum("google.ads.googleads.v1.enums.ExtensionSettingDeviceEnum_ExtensionSettingDevice", ExtensionSettingDeviceEnum_ExtensionSettingDevice_name, ExtensionSettingDeviceEnum_ExtensionSettingDevice_value) +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/enums/extension_setting_device.proto", fileDescriptor_extension_setting_device_c8f5db7acb01d08d) +} + +var fileDescriptor_extension_setting_device_c8f5db7acb01d08d = []byte{ + // 311 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x7c, 0x90, 0x41, 0x4b, 0xfb, 0x30, + 0x18, 0xc6, 0xff, 0xeb, 0x60, 0x7f, 0xc8, 0x0e, 0x96, 0x1e, 0x3c, 0x4c, 0x77, 0xd8, 0x3e, 0x40, + 0x42, 0xf1, 0x16, 0xbd, 0xb4, 0x2e, 0x8e, 0x31, 0x6d, 0x0b, 0x73, 0x13, 0xa4, 0x30, 0xea, 0x12, + 0x42, 0x60, 0x4d, 0xca, 0x92, 0x15, 0x3f, 0x8f, 0x47, 0x3f, 0x8a, 0x1f, 0xc5, 0x93, 0x1f, 0x41, + 0x9a, 0xd8, 0x9e, 0xa6, 0x97, 0xf2, 0xd0, 0xe7, 0x7d, 0x7e, 0x79, 0xde, 0x17, 0xdc, 0x70, 0xa5, + 0xf8, 0x9e, 0xa1, 0x82, 0x6a, 0xe4, 0x64, 0xa3, 0xea, 0x10, 0x31, 0x79, 0x2c, 0x35, 0x62, 0xaf, + 0x86, 0x49, 0x2d, 0x94, 0xdc, 0x6a, 0x66, 0x8c, 0x90, 0x7c, 0x4b, 0x59, 0x2d, 0x76, 0x0c, 0x56, + 0x07, 0x65, 0x54, 0x30, 0x76, 0x11, 0x58, 0x50, 0x0d, 0xbb, 0x34, 0xac, 0x43, 0x68, 0xd3, 0xa3, + 0xcb, 0x16, 0x5e, 0x09, 0x54, 0x48, 0xa9, 0x4c, 0x61, 0x84, 0x92, 0xda, 0x85, 0xa7, 0x25, 0x18, + 0x91, 0x16, 0xbf, 0x72, 0xf4, 0x99, 0x85, 0x13, 0x79, 0x2c, 0xa7, 0x29, 0x38, 0x3f, 0xed, 0x06, + 0x67, 0x60, 0xb8, 0x4e, 0x56, 0x19, 0xb9, 0x5d, 0xdc, 0x2d, 0xc8, 0xcc, 0xff, 0x17, 0x0c, 0xc1, + 0xff, 0x75, 0xb2, 0x4c, 0xd2, 0xa7, 0xc4, 0xef, 0x05, 0x00, 0x0c, 0x1e, 0xd2, 0x78, 0x71, 0x4f, + 0x7c, 0xaf, 0x31, 0x66, 0x64, 0xb5, 0x7c, 0x4c, 0x33, 0xbf, 0x1f, 0x7f, 0xf5, 0xc0, 0x64, 0xa7, + 0x4a, 0xf8, 0x67, 0xe5, 0xf8, 0xe2, 0xf4, 0xa3, 0x59, 0xd3, 0x38, 0xeb, 0x3d, 0xc7, 0x3f, 0x69, + 0xae, 0xf6, 0x85, 0xe4, 0x50, 0x1d, 0x38, 0xe2, 0x4c, 0xda, 0x7d, 0xda, 0xf3, 0x55, 0x42, 0xff, + 0x72, 0xcd, 0x6b, 0xfb, 0x7d, 0xf3, 0xfa, 0xf3, 0x28, 0x7a, 0xf7, 0xc6, 0x73, 0x87, 0x8a, 0xa8, + 0x86, 0x4e, 0x36, 0x6a, 0x13, 0xc2, 0x66, 0x7d, 0xfd, 0xd1, 0xfa, 0x79, 0x44, 0x75, 0xde, 0xf9, + 0xf9, 0x26, 0xcc, 0xad, 0xff, 0xe9, 0x4d, 0xdc, 0x4f, 0x8c, 0x23, 0xaa, 0x31, 0xee, 0x26, 0x30, + 0xde, 0x84, 0x18, 0xdb, 0x99, 0x97, 0x81, 0x2d, 0x76, 0xf5, 0x1d, 0x00, 0x00, 0xff, 0xff, 0x09, + 0x45, 0x82, 0xac, 0xe5, 0x01, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/extension_type.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/extension_type.pb.go new file mode 100644 index 0000000..57cd2f0 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/extension_type.pb.go @@ -0,0 +1,165 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/enums/extension_type.proto + +package enums // import "google.golang.org/genproto/googleapis/ads/googleads/v1/enums" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Possible data types for an extension in an extension setting. +type ExtensionTypeEnum_ExtensionType int32 + +const ( + // Not specified. + ExtensionTypeEnum_UNSPECIFIED ExtensionTypeEnum_ExtensionType = 0 + // Used for return value only. Represents value unknown in this version. + ExtensionTypeEnum_UNKNOWN ExtensionTypeEnum_ExtensionType = 1 + // None. + ExtensionTypeEnum_NONE ExtensionTypeEnum_ExtensionType = 2 + // App. + ExtensionTypeEnum_APP ExtensionTypeEnum_ExtensionType = 3 + // Call. + ExtensionTypeEnum_CALL ExtensionTypeEnum_ExtensionType = 4 + // Callout. + ExtensionTypeEnum_CALLOUT ExtensionTypeEnum_ExtensionType = 5 + // Message. + ExtensionTypeEnum_MESSAGE ExtensionTypeEnum_ExtensionType = 6 + // Price. + ExtensionTypeEnum_PRICE ExtensionTypeEnum_ExtensionType = 7 + // Promotion. + ExtensionTypeEnum_PROMOTION ExtensionTypeEnum_ExtensionType = 8 + // Review. + ExtensionTypeEnum_REVIEW ExtensionTypeEnum_ExtensionType = 9 + // Sitelink. + ExtensionTypeEnum_SITELINK ExtensionTypeEnum_ExtensionType = 10 + // Structured snippet. + ExtensionTypeEnum_STRUCTURED_SNIPPET ExtensionTypeEnum_ExtensionType = 11 + // Location. + ExtensionTypeEnum_LOCATION ExtensionTypeEnum_ExtensionType = 12 + // Affiliate location. + ExtensionTypeEnum_AFFILIATE_LOCATION ExtensionTypeEnum_ExtensionType = 13 +) + +var ExtensionTypeEnum_ExtensionType_name = map[int32]string{ + 0: "UNSPECIFIED", + 1: "UNKNOWN", + 2: "NONE", + 3: "APP", + 4: "CALL", + 5: "CALLOUT", + 6: "MESSAGE", + 7: "PRICE", + 8: "PROMOTION", + 9: "REVIEW", + 10: "SITELINK", + 11: "STRUCTURED_SNIPPET", + 12: "LOCATION", + 13: "AFFILIATE_LOCATION", +} +var ExtensionTypeEnum_ExtensionType_value = map[string]int32{ + "UNSPECIFIED": 0, + "UNKNOWN": 1, + "NONE": 2, + "APP": 3, + "CALL": 4, + "CALLOUT": 5, + "MESSAGE": 6, + "PRICE": 7, + "PROMOTION": 8, + "REVIEW": 9, + "SITELINK": 10, + "STRUCTURED_SNIPPET": 11, + "LOCATION": 12, + "AFFILIATE_LOCATION": 13, +} + +func (x ExtensionTypeEnum_ExtensionType) String() string { + return proto.EnumName(ExtensionTypeEnum_ExtensionType_name, int32(x)) +} +func (ExtensionTypeEnum_ExtensionType) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_extension_type_daed22fceb6f35da, []int{0, 0} +} + +// Container for enum describing possible data types for an extension in an +// extension setting. +type ExtensionTypeEnum struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ExtensionTypeEnum) Reset() { *m = ExtensionTypeEnum{} } +func (m *ExtensionTypeEnum) String() string { return proto.CompactTextString(m) } +func (*ExtensionTypeEnum) ProtoMessage() {} +func (*ExtensionTypeEnum) Descriptor() ([]byte, []int) { + return fileDescriptor_extension_type_daed22fceb6f35da, []int{0} +} +func (m *ExtensionTypeEnum) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ExtensionTypeEnum.Unmarshal(m, b) +} +func (m *ExtensionTypeEnum) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ExtensionTypeEnum.Marshal(b, m, deterministic) +} +func (dst *ExtensionTypeEnum) XXX_Merge(src proto.Message) { + xxx_messageInfo_ExtensionTypeEnum.Merge(dst, src) +} +func (m *ExtensionTypeEnum) XXX_Size() int { + return xxx_messageInfo_ExtensionTypeEnum.Size(m) +} +func (m *ExtensionTypeEnum) XXX_DiscardUnknown() { + xxx_messageInfo_ExtensionTypeEnum.DiscardUnknown(m) +} + +var xxx_messageInfo_ExtensionTypeEnum proto.InternalMessageInfo + +func init() { + proto.RegisterType((*ExtensionTypeEnum)(nil), "google.ads.googleads.v1.enums.ExtensionTypeEnum") + proto.RegisterEnum("google.ads.googleads.v1.enums.ExtensionTypeEnum_ExtensionType", ExtensionTypeEnum_ExtensionType_name, ExtensionTypeEnum_ExtensionType_value) +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/enums/extension_type.proto", fileDescriptor_extension_type_daed22fceb6f35da) +} + +var fileDescriptor_extension_type_daed22fceb6f35da = []byte{ + // 407 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x7c, 0x51, 0x4d, 0x6a, 0xdc, 0x30, + 0x18, 0xad, 0x67, 0x92, 0xf9, 0xd1, 0x64, 0xa8, 0xaa, 0x45, 0x17, 0xa5, 0x59, 0x24, 0x07, 0x90, + 0x71, 0xbb, 0x53, 0x57, 0x1a, 0x47, 0x33, 0x88, 0x38, 0xb2, 0xb0, 0xe5, 0x09, 0x94, 0x81, 0xc1, + 0xad, 0x8d, 0x19, 0xc8, 0x48, 0x26, 0x72, 0x42, 0x73, 0x9d, 0x2e, 0x7b, 0x94, 0x1e, 0xa0, 0x57, + 0x28, 0x94, 0x1e, 0xa2, 0xc8, 0xae, 0x0d, 0x59, 0xb4, 0x1b, 0xf1, 0xf4, 0xbe, 0xf7, 0x1e, 0xd2, + 0xfb, 0xc0, 0xbb, 0xca, 0x98, 0xea, 0xae, 0xf4, 0xf3, 0xc2, 0xfa, 0x1d, 0x74, 0xe8, 0x31, 0xf0, + 0x4b, 0xfd, 0x70, 0xb4, 0x7e, 0xf9, 0xa5, 0x29, 0xb5, 0x3d, 0x18, 0xbd, 0x6f, 0x9e, 0xea, 0x12, + 0xd7, 0xf7, 0xa6, 0x31, 0xe8, 0xbc, 0x13, 0xe2, 0xbc, 0xb0, 0x78, 0xf0, 0xe0, 0xc7, 0x00, 0xb7, + 0x9e, 0x37, 0x6f, 0xfb, 0xc8, 0xfa, 0xe0, 0xe7, 0x5a, 0x9b, 0x26, 0x6f, 0x0e, 0x46, 0xdb, 0xce, + 0x7c, 0xf9, 0xdb, 0x03, 0xaf, 0x58, 0x9f, 0xaa, 0x9e, 0xea, 0x92, 0xe9, 0x87, 0xe3, 0xe5, 0x0f, + 0x0f, 0x2c, 0x9f, 0xb1, 0xe8, 0x25, 0x58, 0x64, 0x22, 0x95, 0x2c, 0xe4, 0x6b, 0xce, 0xae, 0xe0, + 0x0b, 0xb4, 0x00, 0xd3, 0x4c, 0x5c, 0x8b, 0xf8, 0x56, 0x40, 0x0f, 0xcd, 0xc0, 0x89, 0x88, 0x05, + 0x83, 0x23, 0x34, 0x05, 0x63, 0x2a, 0x25, 0x1c, 0x3b, 0x2a, 0xa4, 0x51, 0x04, 0x4f, 0x9c, 0xd2, + 0xa1, 0x38, 0x53, 0xf0, 0xd4, 0x5d, 0x6e, 0x58, 0x9a, 0xd2, 0x0d, 0x83, 0x13, 0x34, 0x07, 0xa7, + 0x32, 0xe1, 0x21, 0x83, 0x53, 0xb4, 0x04, 0x73, 0x99, 0xc4, 0x37, 0xb1, 0xe2, 0xb1, 0x80, 0x33, + 0x04, 0xc0, 0x24, 0x61, 0x5b, 0xce, 0x6e, 0xe1, 0x1c, 0x9d, 0x81, 0x59, 0xca, 0x15, 0x8b, 0xb8, + 0xb8, 0x86, 0x00, 0xbd, 0x06, 0x28, 0x55, 0x49, 0x16, 0xaa, 0x2c, 0x61, 0x57, 0xfb, 0x54, 0x70, + 0x29, 0x99, 0x82, 0x0b, 0xa7, 0x8a, 0xe2, 0x90, 0xb6, 0xfe, 0x33, 0xa7, 0xa2, 0xeb, 0x35, 0x8f, + 0x38, 0x55, 0x6c, 0x3f, 0xf0, 0xcb, 0xd5, 0x4f, 0x0f, 0x5c, 0x7c, 0x36, 0x47, 0xfc, 0xdf, 0xca, + 0x56, 0xe8, 0xd9, 0xdf, 0xa5, 0x2b, 0x4a, 0x7a, 0x1f, 0x57, 0x7f, 0x4d, 0x95, 0xb9, 0xcb, 0x75, + 0x85, 0xcd, 0x7d, 0xe5, 0x57, 0xa5, 0x6e, 0x6b, 0xec, 0x77, 0x55, 0x1f, 0xec, 0x3f, 0x56, 0xf7, + 0xa1, 0x3d, 0xbf, 0x8e, 0xc6, 0x1b, 0x4a, 0xbf, 0x8d, 0xce, 0x37, 0x5d, 0x14, 0x2d, 0x2c, 0xee, + 0xa0, 0x43, 0xdb, 0x00, 0xbb, 0xf6, 0xed, 0xf7, 0x7e, 0xbe, 0xa3, 0x85, 0xdd, 0x0d, 0xf3, 0xdd, + 0x36, 0xd8, 0xb5, 0xf3, 0x5f, 0xa3, 0x8b, 0x8e, 0x24, 0x84, 0x16, 0x96, 0x90, 0x41, 0x41, 0xc8, + 0x36, 0x20, 0xa4, 0xd5, 0x7c, 0x9a, 0xb4, 0x0f, 0x7b, 0xff, 0x27, 0x00, 0x00, 0xff, 0xff, 0xb6, + 0x23, 0x53, 0xc3, 0x52, 0x02, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/external_conversion_source.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/external_conversion_source.pb.go new file mode 100644 index 0000000..e60db53 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/external_conversion_source.pb.go @@ -0,0 +1,234 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/enums/external_conversion_source.proto + +package enums // import "google.golang.org/genproto/googleapis/ads/googleads/v1/enums" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// The external conversion source that is associated with a ConversionAction. +type ExternalConversionSourceEnum_ExternalConversionSource int32 + +const ( + // Not specified. + ExternalConversionSourceEnum_UNSPECIFIED ExternalConversionSourceEnum_ExternalConversionSource = 0 + // Represents value unknown in this version. + ExternalConversionSourceEnum_UNKNOWN ExternalConversionSourceEnum_ExternalConversionSource = 1 + // Conversion that occurs when a user navigates to a particular webpage + // after viewing an ad; Displayed in Google Ads UI as 'Website'. + ExternalConversionSourceEnum_WEBPAGE ExternalConversionSourceEnum_ExternalConversionSource = 2 + // Conversion that comes from linked Google Analytics goal or transaction; + // Displayed in Google Ads UI as 'Analytics'. + ExternalConversionSourceEnum_ANALYTICS ExternalConversionSourceEnum_ExternalConversionSource = 3 + // Website conversion that is uploaded through ConversionUploadService; + // Displayed in Google Ads UI as 'Import from clicks'. + ExternalConversionSourceEnum_UPLOAD ExternalConversionSourceEnum_ExternalConversionSource = 4 + // Conversion that occurs when a user clicks on a call extension directly on + // an ad; Displayed in Google Ads UI as 'Calls from ads'. + ExternalConversionSourceEnum_AD_CALL_METRICS ExternalConversionSourceEnum_ExternalConversionSource = 5 + // Conversion that occurs when a user calls a dynamically-generated phone + // number (by installed javascript) from an advertiser's website after + // clicking on an ad; Displayed in Google Ads UI as 'Calls from website'. + ExternalConversionSourceEnum_WEBSITE_CALL_METRICS ExternalConversionSourceEnum_ExternalConversionSource = 6 + // Conversion that occurs when a user visits an advertiser's retail store + // after clicking on a Google ad; + // Displayed in Google Ads UI as 'Store visits'. + ExternalConversionSourceEnum_STORE_VISITS ExternalConversionSourceEnum_ExternalConversionSource = 7 + // Conversion that occurs when a user takes an in-app action such as a + // purchase in an Android app; + // Displayed in Google Ads UI as 'Android in-app action'. + ExternalConversionSourceEnum_ANDROID_IN_APP ExternalConversionSourceEnum_ExternalConversionSource = 8 + // Conversion that occurs when a user takes an in-app action such as a + // purchase in an iOS app; + // Displayed in Google Ads UI as 'iOS in-app action'. + ExternalConversionSourceEnum_IOS_IN_APP ExternalConversionSourceEnum_ExternalConversionSource = 9 + // Conversion that occurs when a user opens an iOS app for the first time; + // Displayed in Google Ads UI as 'iOS app install (first open)'. + ExternalConversionSourceEnum_IOS_FIRST_OPEN ExternalConversionSourceEnum_ExternalConversionSource = 10 + // Legacy app conversions that do not have an AppPlatform provided; + // Displayed in Google Ads UI as 'Mobile app'. + ExternalConversionSourceEnum_APP_UNSPECIFIED ExternalConversionSourceEnum_ExternalConversionSource = 11 + // Conversion that occurs when a user opens an Android app for the first + // time; Displayed in Google Ads UI as 'Android app install (first open)'. + ExternalConversionSourceEnum_ANDROID_FIRST_OPEN ExternalConversionSourceEnum_ExternalConversionSource = 12 + // Call conversion that is uploaded through ConversionUploadService; + // Displayed in Google Ads UI as 'Import from calls'. + ExternalConversionSourceEnum_UPLOAD_CALLS ExternalConversionSourceEnum_ExternalConversionSource = 13 + // Conversion that comes from a linked Firebase event; + // Displayed in Google Ads UI as 'Firebase'. + ExternalConversionSourceEnum_FIREBASE ExternalConversionSourceEnum_ExternalConversionSource = 14 + // Conversion that occurs when a user clicks on a mobile phone number; + // Displayed in Google Ads UI as 'Phone number clicks'. + ExternalConversionSourceEnum_CLICK_TO_CALL ExternalConversionSourceEnum_ExternalConversionSource = 15 + // Conversion that comes from Salesforce; + // Displayed in Google Ads UI as 'Salesforce.com'. + ExternalConversionSourceEnum_SALESFORCE ExternalConversionSourceEnum_ExternalConversionSource = 16 + // Conversion that comes from in-store purchases recorded by CRM; + // Displayed in Google Ads UI as 'Store sales (data partner)'. + ExternalConversionSourceEnum_STORE_SALES_CRM ExternalConversionSourceEnum_ExternalConversionSource = 17 + // Conversion that comes from in-store purchases from payment network; + // Displayed in Google Ads UI as 'Store sales (payment network)'. + ExternalConversionSourceEnum_STORE_SALES_PAYMENT_NETWORK ExternalConversionSourceEnum_ExternalConversionSource = 18 + // Codeless Google Play conversion; + // Displayed in Google Ads UI as 'Google Play'. + ExternalConversionSourceEnum_GOOGLE_PLAY ExternalConversionSourceEnum_ExternalConversionSource = 19 + // Conversion that comes from a linked third-party app analytics event; + // Displayed in Google Ads UI as 'Third-party app analytics'. + ExternalConversionSourceEnum_THIRD_PARTY_APP_ANALYTICS ExternalConversionSourceEnum_ExternalConversionSource = 20 + // Conversion that is controlled by Google Attribution. + ExternalConversionSourceEnum_GOOGLE_ATTRIBUTION ExternalConversionSourceEnum_ExternalConversionSource = 21 + // Store Sales conversion based on first-party or third-party merchant data + // uploads. Displayed in Google Ads UI as 'Store sales (direct)'. + ExternalConversionSourceEnum_STORE_SALES_DIRECT ExternalConversionSourceEnum_ExternalConversionSource = 22 +) + +var ExternalConversionSourceEnum_ExternalConversionSource_name = map[int32]string{ + 0: "UNSPECIFIED", + 1: "UNKNOWN", + 2: "WEBPAGE", + 3: "ANALYTICS", + 4: "UPLOAD", + 5: "AD_CALL_METRICS", + 6: "WEBSITE_CALL_METRICS", + 7: "STORE_VISITS", + 8: "ANDROID_IN_APP", + 9: "IOS_IN_APP", + 10: "IOS_FIRST_OPEN", + 11: "APP_UNSPECIFIED", + 12: "ANDROID_FIRST_OPEN", + 13: "UPLOAD_CALLS", + 14: "FIREBASE", + 15: "CLICK_TO_CALL", + 16: "SALESFORCE", + 17: "STORE_SALES_CRM", + 18: "STORE_SALES_PAYMENT_NETWORK", + 19: "GOOGLE_PLAY", + 20: "THIRD_PARTY_APP_ANALYTICS", + 21: "GOOGLE_ATTRIBUTION", + 22: "STORE_SALES_DIRECT", +} +var ExternalConversionSourceEnum_ExternalConversionSource_value = map[string]int32{ + "UNSPECIFIED": 0, + "UNKNOWN": 1, + "WEBPAGE": 2, + "ANALYTICS": 3, + "UPLOAD": 4, + "AD_CALL_METRICS": 5, + "WEBSITE_CALL_METRICS": 6, + "STORE_VISITS": 7, + "ANDROID_IN_APP": 8, + "IOS_IN_APP": 9, + "IOS_FIRST_OPEN": 10, + "APP_UNSPECIFIED": 11, + "ANDROID_FIRST_OPEN": 12, + "UPLOAD_CALLS": 13, + "FIREBASE": 14, + "CLICK_TO_CALL": 15, + "SALESFORCE": 16, + "STORE_SALES_CRM": 17, + "STORE_SALES_PAYMENT_NETWORK": 18, + "GOOGLE_PLAY": 19, + "THIRD_PARTY_APP_ANALYTICS": 20, + "GOOGLE_ATTRIBUTION": 21, + "STORE_SALES_DIRECT": 22, +} + +func (x ExternalConversionSourceEnum_ExternalConversionSource) String() string { + return proto.EnumName(ExternalConversionSourceEnum_ExternalConversionSource_name, int32(x)) +} +func (ExternalConversionSourceEnum_ExternalConversionSource) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_external_conversion_source_4493126392cc16df, []int{0, 0} +} + +// Container for enum describing the external conversion source that is +// associated with a ConversionAction. +type ExternalConversionSourceEnum struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ExternalConversionSourceEnum) Reset() { *m = ExternalConversionSourceEnum{} } +func (m *ExternalConversionSourceEnum) String() string { return proto.CompactTextString(m) } +func (*ExternalConversionSourceEnum) ProtoMessage() {} +func (*ExternalConversionSourceEnum) Descriptor() ([]byte, []int) { + return fileDescriptor_external_conversion_source_4493126392cc16df, []int{0} +} +func (m *ExternalConversionSourceEnum) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ExternalConversionSourceEnum.Unmarshal(m, b) +} +func (m *ExternalConversionSourceEnum) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ExternalConversionSourceEnum.Marshal(b, m, deterministic) +} +func (dst *ExternalConversionSourceEnum) XXX_Merge(src proto.Message) { + xxx_messageInfo_ExternalConversionSourceEnum.Merge(dst, src) +} +func (m *ExternalConversionSourceEnum) XXX_Size() int { + return xxx_messageInfo_ExternalConversionSourceEnum.Size(m) +} +func (m *ExternalConversionSourceEnum) XXX_DiscardUnknown() { + xxx_messageInfo_ExternalConversionSourceEnum.DiscardUnknown(m) +} + +var xxx_messageInfo_ExternalConversionSourceEnum proto.InternalMessageInfo + +func init() { + proto.RegisterType((*ExternalConversionSourceEnum)(nil), "google.ads.googleads.v1.enums.ExternalConversionSourceEnum") + proto.RegisterEnum("google.ads.googleads.v1.enums.ExternalConversionSourceEnum_ExternalConversionSource", ExternalConversionSourceEnum_ExternalConversionSource_name, ExternalConversionSourceEnum_ExternalConversionSource_value) +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/enums/external_conversion_source.proto", fileDescriptor_external_conversion_source_4493126392cc16df) +} + +var fileDescriptor_external_conversion_source_4493126392cc16df = []byte{ + // 553 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x7c, 0x52, 0xdd, 0x6e, 0xda, 0x3c, + 0x18, 0xfe, 0x4a, 0xfb, 0xf5, 0xc7, 0xfd, 0x73, 0xdd, 0xae, 0xea, 0xb6, 0xa2, 0xa9, 0xbd, 0x80, + 0x44, 0x68, 0x67, 0x99, 0x34, 0xc9, 0x49, 0x0c, 0xb3, 0x9a, 0xc6, 0x96, 0x6d, 0x40, 0x4c, 0x48, + 0x56, 0x06, 0x51, 0x84, 0x04, 0x31, 0x22, 0x80, 0x76, 0xb8, 0x6b, 0xd9, 0xe1, 0x2e, 0x65, 0x37, + 0xb1, 0xf3, 0x1d, 0xee, 0x0a, 0x26, 0x27, 0xc0, 0xd8, 0x01, 0x3b, 0xb1, 0xde, 0x9f, 0xe7, 0x7d, + 0x9f, 0xe7, 0xb5, 0x1e, 0xf0, 0x3e, 0x33, 0x26, 0x1b, 0xa7, 0x6e, 0x32, 0x2c, 0xdc, 0x2a, 0xb4, + 0xd1, 0xb2, 0xe1, 0xa6, 0xf9, 0x62, 0x52, 0xb8, 0xe9, 0xe7, 0x79, 0x3a, 0xcb, 0x93, 0xb1, 0x1e, + 0x98, 0x7c, 0x99, 0xce, 0x8a, 0x91, 0xc9, 0x75, 0x61, 0x16, 0xb3, 0x41, 0xea, 0x4c, 0x67, 0x66, + 0x6e, 0x50, 0xbd, 0x1a, 0x72, 0x92, 0x61, 0xe1, 0x6c, 0xe6, 0x9d, 0x65, 0xc3, 0x29, 0xe7, 0x5f, + 0xdd, 0xaf, 0xd7, 0x4f, 0x47, 0x6e, 0x92, 0xe7, 0x66, 0x9e, 0xcc, 0x47, 0x26, 0x2f, 0xaa, 0xe1, + 0xc7, 0x2f, 0x07, 0xe0, 0x9e, 0xac, 0x18, 0x82, 0x0d, 0x81, 0x2c, 0xf7, 0x93, 0x7c, 0x31, 0x79, + 0xfc, 0xb1, 0x0f, 0xee, 0x76, 0x01, 0xd0, 0x25, 0x38, 0x6d, 0xc7, 0x92, 0x93, 0x80, 0x36, 0x29, + 0x09, 0xe1, 0x7f, 0xe8, 0x14, 0x1c, 0xb5, 0xe3, 0xa7, 0x98, 0x75, 0x63, 0xb8, 0x67, 0x93, 0x2e, + 0xf1, 0x39, 0x6e, 0x11, 0x58, 0x43, 0xe7, 0xe0, 0x04, 0xc7, 0x38, 0xea, 0x29, 0x1a, 0x48, 0xb8, + 0x8f, 0x00, 0x38, 0x6c, 0xf3, 0x88, 0xe1, 0x10, 0x1e, 0xa0, 0x6b, 0x70, 0x89, 0x43, 0x1d, 0xe0, + 0x28, 0xd2, 0xcf, 0x44, 0x09, 0x0b, 0xf8, 0x1f, 0xdd, 0x81, 0x9b, 0x2e, 0xf1, 0x25, 0x55, 0xe4, + 0xef, 0xce, 0x21, 0x82, 0xe0, 0x4c, 0x2a, 0x26, 0x88, 0xee, 0x50, 0x49, 0x95, 0x84, 0x47, 0x08, + 0x81, 0x0b, 0x1c, 0x87, 0x82, 0xd1, 0x50, 0xd3, 0x58, 0x63, 0xce, 0xe1, 0x31, 0xba, 0x00, 0x80, + 0x32, 0xb9, 0xce, 0x4f, 0x2c, 0xc6, 0xe6, 0x4d, 0x2a, 0xa4, 0xd2, 0x8c, 0x93, 0x18, 0x82, 0x92, + 0x98, 0x73, 0xbd, 0x7d, 0xc2, 0x29, 0xba, 0x05, 0x68, 0xbd, 0x6c, 0x0b, 0x7c, 0x66, 0x69, 0x2b, + 0xc5, 0xa5, 0x1e, 0x09, 0xcf, 0xd1, 0x19, 0x38, 0x6e, 0x52, 0x41, 0x7c, 0x2c, 0x09, 0xbc, 0x40, + 0x57, 0xe0, 0x3c, 0x88, 0x68, 0xf0, 0xa4, 0x15, 0x2b, 0x11, 0xf0, 0xd2, 0x6a, 0x90, 0x38, 0x22, + 0xb2, 0xc9, 0x44, 0x40, 0x20, 0xb4, 0x7c, 0x95, 0xf2, 0xb2, 0xaa, 0x03, 0xf1, 0x0c, 0xaf, 0xd0, + 0x1b, 0xf0, 0x7a, 0xbb, 0xc8, 0x71, 0xef, 0x99, 0xc4, 0x4a, 0xc7, 0x44, 0x75, 0x99, 0x78, 0x82, + 0xc8, 0x7e, 0x72, 0x8b, 0xb1, 0x56, 0x44, 0x34, 0x8f, 0x70, 0x0f, 0x5e, 0xa3, 0x3a, 0x78, 0xa9, + 0x3e, 0x50, 0x11, 0x6a, 0x8e, 0x85, 0xea, 0xd9, 0xfb, 0xf4, 0x9f, 0xaf, 0xbd, 0xb1, 0x07, 0xac, + 0xf0, 0x58, 0x29, 0x41, 0xfd, 0xb6, 0xa2, 0x2c, 0x86, 0x2f, 0x6c, 0x7d, 0x9b, 0x28, 0xa4, 0x82, + 0x04, 0x0a, 0xde, 0xfa, 0xbf, 0xf6, 0xc0, 0xc3, 0xc0, 0x4c, 0x9c, 0x7f, 0xda, 0xc8, 0xaf, 0xef, + 0x32, 0x01, 0xb7, 0x3e, 0xe2, 0x7b, 0x1f, 0xfd, 0xd5, 0x7c, 0x66, 0xc6, 0x49, 0x9e, 0x39, 0x66, + 0x96, 0xb9, 0x59, 0x9a, 0x97, 0x2e, 0x5b, 0xdb, 0x7a, 0x3a, 0x2a, 0x76, 0xb8, 0xfc, 0x5d, 0xf9, + 0x7e, 0xad, 0xed, 0xb7, 0x30, 0xfe, 0x56, 0xab, 0xb7, 0xaa, 0x55, 0x78, 0x58, 0x38, 0x55, 0x68, + 0xa3, 0x4e, 0xc3, 0xb1, 0x8e, 0x2c, 0xbe, 0xaf, 0xfb, 0x7d, 0x3c, 0x2c, 0xfa, 0x9b, 0x7e, 0xbf, + 0xd3, 0xe8, 0x97, 0xfd, 0x9f, 0xb5, 0x87, 0xaa, 0xe8, 0x79, 0x78, 0x58, 0x78, 0xde, 0x06, 0xe1, + 0x79, 0x9d, 0x86, 0xe7, 0x95, 0x98, 0x4f, 0x87, 0xa5, 0xb0, 0xb7, 0xbf, 0x03, 0x00, 0x00, 0xff, + 0xff, 0xd2, 0x89, 0x5a, 0x3b, 0x7d, 0x03, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/feed_attribute_type.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/feed_attribute_type.pb.go new file mode 100644 index 0000000..a4a41c1 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/feed_attribute_type.pb.go @@ -0,0 +1,168 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/enums/feed_attribute_type.proto + +package enums // import "google.golang.org/genproto/googleapis/ads/googleads/v1/enums" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Possible data types for a feed attribute. +type FeedAttributeTypeEnum_FeedAttributeType int32 + +const ( + // Not specified. + FeedAttributeTypeEnum_UNSPECIFIED FeedAttributeTypeEnum_FeedAttributeType = 0 + // Used for return value only. Represents value unknown in this version. + FeedAttributeTypeEnum_UNKNOWN FeedAttributeTypeEnum_FeedAttributeType = 1 + // Int64. + FeedAttributeTypeEnum_INT64 FeedAttributeTypeEnum_FeedAttributeType = 2 + // Double. + FeedAttributeTypeEnum_DOUBLE FeedAttributeTypeEnum_FeedAttributeType = 3 + // String. + FeedAttributeTypeEnum_STRING FeedAttributeTypeEnum_FeedAttributeType = 4 + // Boolean. + FeedAttributeTypeEnum_BOOLEAN FeedAttributeTypeEnum_FeedAttributeType = 5 + // Url. + FeedAttributeTypeEnum_URL FeedAttributeTypeEnum_FeedAttributeType = 6 + // Datetime. + FeedAttributeTypeEnum_DATE_TIME FeedAttributeTypeEnum_FeedAttributeType = 7 + // Int64 list. + FeedAttributeTypeEnum_INT64_LIST FeedAttributeTypeEnum_FeedAttributeType = 8 + // Double (8 bytes) list. + FeedAttributeTypeEnum_DOUBLE_LIST FeedAttributeTypeEnum_FeedAttributeType = 9 + // String list. + FeedAttributeTypeEnum_STRING_LIST FeedAttributeTypeEnum_FeedAttributeType = 10 + // Boolean list. + FeedAttributeTypeEnum_BOOLEAN_LIST FeedAttributeTypeEnum_FeedAttributeType = 11 + // Url list. + FeedAttributeTypeEnum_URL_LIST FeedAttributeTypeEnum_FeedAttributeType = 12 + // Datetime list. + FeedAttributeTypeEnum_DATE_TIME_LIST FeedAttributeTypeEnum_FeedAttributeType = 13 + // Price. + FeedAttributeTypeEnum_PRICE FeedAttributeTypeEnum_FeedAttributeType = 14 +) + +var FeedAttributeTypeEnum_FeedAttributeType_name = map[int32]string{ + 0: "UNSPECIFIED", + 1: "UNKNOWN", + 2: "INT64", + 3: "DOUBLE", + 4: "STRING", + 5: "BOOLEAN", + 6: "URL", + 7: "DATE_TIME", + 8: "INT64_LIST", + 9: "DOUBLE_LIST", + 10: "STRING_LIST", + 11: "BOOLEAN_LIST", + 12: "URL_LIST", + 13: "DATE_TIME_LIST", + 14: "PRICE", +} +var FeedAttributeTypeEnum_FeedAttributeType_value = map[string]int32{ + "UNSPECIFIED": 0, + "UNKNOWN": 1, + "INT64": 2, + "DOUBLE": 3, + "STRING": 4, + "BOOLEAN": 5, + "URL": 6, + "DATE_TIME": 7, + "INT64_LIST": 8, + "DOUBLE_LIST": 9, + "STRING_LIST": 10, + "BOOLEAN_LIST": 11, + "URL_LIST": 12, + "DATE_TIME_LIST": 13, + "PRICE": 14, +} + +func (x FeedAttributeTypeEnum_FeedAttributeType) String() string { + return proto.EnumName(FeedAttributeTypeEnum_FeedAttributeType_name, int32(x)) +} +func (FeedAttributeTypeEnum_FeedAttributeType) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_feed_attribute_type_282150463692a3a8, []int{0, 0} +} + +// Container for enum describing possible data types for a feed attribute. +type FeedAttributeTypeEnum struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *FeedAttributeTypeEnum) Reset() { *m = FeedAttributeTypeEnum{} } +func (m *FeedAttributeTypeEnum) String() string { return proto.CompactTextString(m) } +func (*FeedAttributeTypeEnum) ProtoMessage() {} +func (*FeedAttributeTypeEnum) Descriptor() ([]byte, []int) { + return fileDescriptor_feed_attribute_type_282150463692a3a8, []int{0} +} +func (m *FeedAttributeTypeEnum) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_FeedAttributeTypeEnum.Unmarshal(m, b) +} +func (m *FeedAttributeTypeEnum) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_FeedAttributeTypeEnum.Marshal(b, m, deterministic) +} +func (dst *FeedAttributeTypeEnum) XXX_Merge(src proto.Message) { + xxx_messageInfo_FeedAttributeTypeEnum.Merge(dst, src) +} +func (m *FeedAttributeTypeEnum) XXX_Size() int { + return xxx_messageInfo_FeedAttributeTypeEnum.Size(m) +} +func (m *FeedAttributeTypeEnum) XXX_DiscardUnknown() { + xxx_messageInfo_FeedAttributeTypeEnum.DiscardUnknown(m) +} + +var xxx_messageInfo_FeedAttributeTypeEnum proto.InternalMessageInfo + +func init() { + proto.RegisterType((*FeedAttributeTypeEnum)(nil), "google.ads.googleads.v1.enums.FeedAttributeTypeEnum") + proto.RegisterEnum("google.ads.googleads.v1.enums.FeedAttributeTypeEnum_FeedAttributeType", FeedAttributeTypeEnum_FeedAttributeType_name, FeedAttributeTypeEnum_FeedAttributeType_value) +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/enums/feed_attribute_type.proto", fileDescriptor_feed_attribute_type_282150463692a3a8) +} + +var fileDescriptor_feed_attribute_type_282150463692a3a8 = []byte{ + // 407 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x7c, 0x51, 0xc1, 0x6a, 0xdb, 0x40, + 0x14, 0xac, 0xe5, 0xc6, 0x8e, 0x9f, 0x1d, 0x77, 0xbb, 0xd0, 0x1e, 0x4a, 0x73, 0x48, 0x3e, 0x60, + 0x85, 0x68, 0x69, 0x61, 0x7b, 0x5a, 0xc5, 0x1b, 0x23, 0xaa, 0xca, 0x46, 0x96, 0x5c, 0x28, 0x02, + 0xa1, 0x54, 0x5b, 0x21, 0x88, 0xb5, 0xc2, 0x2b, 0x07, 0x72, 0xef, 0x97, 0xf4, 0xd8, 0x4f, 0xe9, + 0xa7, 0xa4, 0x3f, 0x51, 0x56, 0x1b, 0xe9, 0x62, 0x9a, 0x8b, 0x18, 0xcd, 0x9b, 0x99, 0x7d, 0xcc, + 0x83, 0x8f, 0x85, 0x94, 0xc5, 0xad, 0xb0, 0xb3, 0x5c, 0xd9, 0x06, 0x6a, 0x74, 0xe7, 0xd8, 0xa2, + 0x3a, 0xec, 0x94, 0xfd, 0x43, 0x88, 0x3c, 0xcd, 0x9a, 0x66, 0x5f, 0xde, 0x1c, 0x1a, 0x91, 0x36, + 0xf7, 0xb5, 0x20, 0xf5, 0x5e, 0x36, 0x12, 0x9f, 0x1b, 0x35, 0xc9, 0x72, 0x45, 0x7a, 0x23, 0xb9, + 0x73, 0x48, 0x6b, 0x7c, 0xf3, 0xb6, 0xcb, 0xad, 0x4b, 0x3b, 0xab, 0x2a, 0xd9, 0x64, 0x4d, 0x29, + 0x2b, 0x65, 0xcc, 0x97, 0x3f, 0x2d, 0x78, 0x75, 0x2d, 0x44, 0xce, 0xba, 0xe4, 0xe8, 0xbe, 0x16, + 0xbc, 0x3a, 0xec, 0x2e, 0x1f, 0x06, 0xf0, 0xf2, 0x68, 0x82, 0x5f, 0xc0, 0x34, 0x0e, 0x36, 0x6b, + 0x7e, 0xe5, 0x5d, 0x7b, 0x7c, 0x81, 0x9e, 0xe1, 0x29, 0x8c, 0xe3, 0xe0, 0x73, 0xb0, 0xfa, 0x1a, + 0xa0, 0x01, 0x9e, 0xc0, 0x89, 0x17, 0x44, 0x1f, 0xde, 0x23, 0x0b, 0x03, 0x8c, 0x16, 0xab, 0xd8, + 0xf5, 0x39, 0x1a, 0x6a, 0xbc, 0x89, 0x42, 0x2f, 0x58, 0xa2, 0xe7, 0x5a, 0xef, 0xae, 0x56, 0x3e, + 0x67, 0x01, 0x3a, 0xc1, 0x63, 0x18, 0xc6, 0xa1, 0x8f, 0x46, 0xf8, 0x0c, 0x26, 0x0b, 0x16, 0xf1, + 0x34, 0xf2, 0xbe, 0x70, 0x34, 0xc6, 0x73, 0x80, 0x36, 0x27, 0xf5, 0xbd, 0x4d, 0x84, 0x4e, 0xf5, + 0xab, 0x26, 0xcc, 0x10, 0x13, 0x4d, 0x98, 0x44, 0x43, 0x00, 0x46, 0x30, 0x7b, 0x8c, 0x35, 0xcc, + 0x14, 0xcf, 0xe0, 0x34, 0x0e, 0x7d, 0xf3, 0x37, 0xc3, 0x18, 0xe6, 0xfd, 0x03, 0x86, 0x3b, 0xd3, + 0xdb, 0xae, 0x43, 0xef, 0x8a, 0xa3, 0xb9, 0xfb, 0x77, 0x00, 0x17, 0xdf, 0xe5, 0x8e, 0x3c, 0x59, + 0xa5, 0xfb, 0xfa, 0xa8, 0x8f, 0xb5, 0x2e, 0x71, 0x3d, 0xf8, 0xe6, 0x3e, 0x1a, 0x0b, 0x79, 0x9b, + 0x55, 0x05, 0x91, 0xfb, 0xc2, 0x2e, 0x44, 0xd5, 0x56, 0xdc, 0x1d, 0xb3, 0x2e, 0xd5, 0x7f, 0x6e, + 0xfb, 0xa9, 0xfd, 0xfe, 0xb2, 0x86, 0x4b, 0xc6, 0x7e, 0x5b, 0xe7, 0x4b, 0x13, 0xc5, 0x72, 0x45, + 0x0c, 0xd4, 0x68, 0xeb, 0x10, 0x7d, 0x15, 0xf5, 0xa7, 0x9b, 0x27, 0x2c, 0x57, 0x49, 0x3f, 0x4f, + 0xb6, 0x4e, 0xd2, 0xce, 0x1f, 0xac, 0x0b, 0x43, 0x52, 0xca, 0x72, 0x45, 0x69, 0xaf, 0xa0, 0x74, + 0xeb, 0x50, 0xda, 0x6a, 0x6e, 0x46, 0xed, 0x62, 0xef, 0xfe, 0x05, 0x00, 0x00, 0xff, 0xff, 0xe7, + 0xfe, 0x62, 0xd5, 0x73, 0x02, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/feed_item_quality_approval_status.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/feed_item_quality_approval_status.pb.go new file mode 100644 index 0000000..dbb04c5 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/feed_item_quality_approval_status.pb.go @@ -0,0 +1,121 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/enums/feed_item_quality_approval_status.proto + +package enums // import "google.golang.org/genproto/googleapis/ads/googleads/v1/enums" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// The possible quality evaluation approval statuses of a feed item. +type FeedItemQualityApprovalStatusEnum_FeedItemQualityApprovalStatus int32 + +const ( + // No value has been specified. + FeedItemQualityApprovalStatusEnum_UNSPECIFIED FeedItemQualityApprovalStatusEnum_FeedItemQualityApprovalStatus = 0 + // Used for return value only. Represents value unknown in this version. + FeedItemQualityApprovalStatusEnum_UNKNOWN FeedItemQualityApprovalStatusEnum_FeedItemQualityApprovalStatus = 1 + // Meets all quality expectations. + FeedItemQualityApprovalStatusEnum_APPROVED FeedItemQualityApprovalStatusEnum_FeedItemQualityApprovalStatus = 2 + // Does not meet some quality expectations. The specific reason is found in + // the quality_disapproval_reasons field. + FeedItemQualityApprovalStatusEnum_DISAPPROVED FeedItemQualityApprovalStatusEnum_FeedItemQualityApprovalStatus = 3 +) + +var FeedItemQualityApprovalStatusEnum_FeedItemQualityApprovalStatus_name = map[int32]string{ + 0: "UNSPECIFIED", + 1: "UNKNOWN", + 2: "APPROVED", + 3: "DISAPPROVED", +} +var FeedItemQualityApprovalStatusEnum_FeedItemQualityApprovalStatus_value = map[string]int32{ + "UNSPECIFIED": 0, + "UNKNOWN": 1, + "APPROVED": 2, + "DISAPPROVED": 3, +} + +func (x FeedItemQualityApprovalStatusEnum_FeedItemQualityApprovalStatus) String() string { + return proto.EnumName(FeedItemQualityApprovalStatusEnum_FeedItemQualityApprovalStatus_name, int32(x)) +} +func (FeedItemQualityApprovalStatusEnum_FeedItemQualityApprovalStatus) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_feed_item_quality_approval_status_6b620c95a3e51336, []int{0, 0} +} + +// Container for enum describing possible quality evaluation approval statuses +// of a feed item. +type FeedItemQualityApprovalStatusEnum struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *FeedItemQualityApprovalStatusEnum) Reset() { *m = FeedItemQualityApprovalStatusEnum{} } +func (m *FeedItemQualityApprovalStatusEnum) String() string { return proto.CompactTextString(m) } +func (*FeedItemQualityApprovalStatusEnum) ProtoMessage() {} +func (*FeedItemQualityApprovalStatusEnum) Descriptor() ([]byte, []int) { + return fileDescriptor_feed_item_quality_approval_status_6b620c95a3e51336, []int{0} +} +func (m *FeedItemQualityApprovalStatusEnum) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_FeedItemQualityApprovalStatusEnum.Unmarshal(m, b) +} +func (m *FeedItemQualityApprovalStatusEnum) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_FeedItemQualityApprovalStatusEnum.Marshal(b, m, deterministic) +} +func (dst *FeedItemQualityApprovalStatusEnum) XXX_Merge(src proto.Message) { + xxx_messageInfo_FeedItemQualityApprovalStatusEnum.Merge(dst, src) +} +func (m *FeedItemQualityApprovalStatusEnum) XXX_Size() int { + return xxx_messageInfo_FeedItemQualityApprovalStatusEnum.Size(m) +} +func (m *FeedItemQualityApprovalStatusEnum) XXX_DiscardUnknown() { + xxx_messageInfo_FeedItemQualityApprovalStatusEnum.DiscardUnknown(m) +} + +var xxx_messageInfo_FeedItemQualityApprovalStatusEnum proto.InternalMessageInfo + +func init() { + proto.RegisterType((*FeedItemQualityApprovalStatusEnum)(nil), "google.ads.googleads.v1.enums.FeedItemQualityApprovalStatusEnum") + proto.RegisterEnum("google.ads.googleads.v1.enums.FeedItemQualityApprovalStatusEnum_FeedItemQualityApprovalStatus", FeedItemQualityApprovalStatusEnum_FeedItemQualityApprovalStatus_name, FeedItemQualityApprovalStatusEnum_FeedItemQualityApprovalStatus_value) +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/enums/feed_item_quality_approval_status.proto", fileDescriptor_feed_item_quality_approval_status_6b620c95a3e51336) +} + +var fileDescriptor_feed_item_quality_approval_status_6b620c95a3e51336 = []byte{ + // 324 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x7c, 0x50, 0xd1, 0x4a, 0xf3, 0x30, + 0x18, 0xfd, 0xdb, 0xc1, 0xaf, 0x64, 0x82, 0xa3, 0x97, 0xe2, 0x2e, 0xb6, 0x07, 0x48, 0x28, 0xde, + 0xc5, 0xab, 0xcc, 0x75, 0xa3, 0x08, 0x5d, 0x75, 0xac, 0x82, 0x14, 0x4a, 0x34, 0x31, 0x14, 0xda, + 0xa4, 0x2e, 0xe9, 0xc0, 0x4b, 0x5f, 0xc5, 0x4b, 0x1f, 0xc5, 0x47, 0xf1, 0x05, 0xbc, 0x95, 0x26, + 0x5b, 0xef, 0xdc, 0x4d, 0x38, 0xe4, 0x3b, 0xdf, 0x39, 0xdf, 0x39, 0x20, 0x12, 0x4a, 0x89, 0x8a, + 0x23, 0xca, 0x34, 0x72, 0xb0, 0x43, 0xbb, 0x10, 0x71, 0xd9, 0xd6, 0x1a, 0xbd, 0x70, 0xce, 0x8a, + 0xd2, 0xf0, 0xba, 0x78, 0x6d, 0x69, 0x55, 0x9a, 0xb7, 0x82, 0x36, 0xcd, 0x56, 0xed, 0x68, 0x55, + 0x68, 0x43, 0x4d, 0xab, 0x61, 0xb3, 0x55, 0x46, 0x05, 0x63, 0xb7, 0x0b, 0x29, 0xd3, 0xb0, 0x97, + 0x81, 0xbb, 0x10, 0x5a, 0x99, 0x8b, 0xcb, 0x83, 0x4b, 0x53, 0x22, 0x2a, 0xa5, 0x32, 0xd4, 0x94, + 0x4a, 0xee, 0x97, 0xa7, 0xef, 0x1e, 0x98, 0x2c, 0x38, 0x67, 0xb1, 0xe1, 0xf5, 0x9d, 0xb3, 0x21, + 0x7b, 0x97, 0xb5, 0x35, 0x89, 0x64, 0x5b, 0x4f, 0x73, 0x30, 0x3e, 0x4a, 0x0a, 0xce, 0xc1, 0x70, + 0x93, 0xac, 0xd3, 0xe8, 0x26, 0x5e, 0xc4, 0xd1, 0x7c, 0xf4, 0x2f, 0x18, 0x82, 0x93, 0x4d, 0x72, + 0x9b, 0xac, 0x1e, 0x92, 0x91, 0x17, 0x9c, 0x81, 0x53, 0x92, 0xa6, 0xf7, 0xab, 0x2c, 0x9a, 0x8f, + 0xfc, 0x8e, 0x3b, 0x8f, 0xd7, 0xfd, 0xc7, 0x60, 0xf6, 0xe3, 0x81, 0xc9, 0xb3, 0xaa, 0xe1, 0xd1, + 0x1c, 0xb3, 0xe9, 0xd1, 0x0b, 0xd2, 0x2e, 0x4d, 0xea, 0x3d, 0xce, 0xf6, 0x22, 0x42, 0x55, 0x54, + 0x0a, 0xa8, 0xb6, 0x02, 0x09, 0x2e, 0x6d, 0xd6, 0x43, 0xc7, 0x4d, 0xa9, 0xff, 0xa8, 0xfc, 0xda, + 0xbe, 0x1f, 0xfe, 0x60, 0x49, 0xc8, 0xa7, 0x3f, 0x5e, 0x3a, 0x29, 0xc2, 0x34, 0x74, 0xb0, 0x43, + 0x59, 0x08, 0xbb, 0x4a, 0xf4, 0xd7, 0x61, 0x9e, 0x13, 0xa6, 0xf3, 0x7e, 0x9e, 0x67, 0x61, 0x6e, + 0xe7, 0xdf, 0xfe, 0xc4, 0x7d, 0x62, 0x4c, 0x98, 0xc6, 0xb8, 0x67, 0x60, 0x9c, 0x85, 0x18, 0x5b, + 0xce, 0xd3, 0x7f, 0x7b, 0xd8, 0xd5, 0x6f, 0x00, 0x00, 0x00, 0xff, 0xff, 0x3a, 0x83, 0xd4, 0xbb, + 0x0a, 0x02, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/feed_item_quality_disapproval_reason.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/feed_item_quality_disapproval_reason.pb.go new file mode 100644 index 0000000..95ae264 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/feed_item_quality_disapproval_reason.pb.go @@ -0,0 +1,203 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/enums/feed_item_quality_disapproval_reason.proto + +package enums // import "google.golang.org/genproto/googleapis/ads/googleads/v1/enums" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// The possible quality evaluation disapproval reasons of a feed item. +type FeedItemQualityDisapprovalReasonEnum_FeedItemQualityDisapprovalReason int32 + +const ( + // No value has been specified. + FeedItemQualityDisapprovalReasonEnum_UNSPECIFIED FeedItemQualityDisapprovalReasonEnum_FeedItemQualityDisapprovalReason = 0 + // Used for return value only. Represents value unknown in this version. + FeedItemQualityDisapprovalReasonEnum_UNKNOWN FeedItemQualityDisapprovalReasonEnum_FeedItemQualityDisapprovalReason = 1 + // Price contains repetitive headers. + FeedItemQualityDisapprovalReasonEnum_PRICE_TABLE_REPETITIVE_HEADERS FeedItemQualityDisapprovalReasonEnum_FeedItemQualityDisapprovalReason = 2 + // Price contains repetitive description. + FeedItemQualityDisapprovalReasonEnum_PRICE_TABLE_REPETITIVE_DESCRIPTION FeedItemQualityDisapprovalReasonEnum_FeedItemQualityDisapprovalReason = 3 + // Price contains inconsistent items. + FeedItemQualityDisapprovalReasonEnum_PRICE_TABLE_INCONSISTENT_ROWS FeedItemQualityDisapprovalReasonEnum_FeedItemQualityDisapprovalReason = 4 + // Price contains qualifiers in description. + FeedItemQualityDisapprovalReasonEnum_PRICE_DESCRIPTION_HAS_PRICE_QUALIFIERS FeedItemQualityDisapprovalReasonEnum_FeedItemQualityDisapprovalReason = 5 + // Price contains an unsupported language. + FeedItemQualityDisapprovalReasonEnum_PRICE_UNSUPPORTED_LANGUAGE FeedItemQualityDisapprovalReasonEnum_FeedItemQualityDisapprovalReason = 6 + // Price item header is not relevant to the price type. + FeedItemQualityDisapprovalReasonEnum_PRICE_TABLE_ROW_HEADER_TABLE_TYPE_MISMATCH FeedItemQualityDisapprovalReasonEnum_FeedItemQualityDisapprovalReason = 7 + // Price item header has promotional text. + FeedItemQualityDisapprovalReasonEnum_PRICE_TABLE_ROW_HEADER_HAS_PROMOTIONAL_TEXT FeedItemQualityDisapprovalReasonEnum_FeedItemQualityDisapprovalReason = 8 + // Price item description is not relevant to the item header. + FeedItemQualityDisapprovalReasonEnum_PRICE_TABLE_ROW_DESCRIPTION_NOT_RELEVANT FeedItemQualityDisapprovalReasonEnum_FeedItemQualityDisapprovalReason = 9 + // Price item description contains promotional text. + FeedItemQualityDisapprovalReasonEnum_PRICE_TABLE_ROW_DESCRIPTION_HAS_PROMOTIONAL_TEXT FeedItemQualityDisapprovalReasonEnum_FeedItemQualityDisapprovalReason = 10 + // Price item header and description are repetitive. + FeedItemQualityDisapprovalReasonEnum_PRICE_TABLE_ROW_HEADER_DESCRIPTION_REPETITIVE FeedItemQualityDisapprovalReasonEnum_FeedItemQualityDisapprovalReason = 11 + // Price item is in a foreign language, nonsense, or can't be rated. + FeedItemQualityDisapprovalReasonEnum_PRICE_TABLE_ROW_UNRATEABLE FeedItemQualityDisapprovalReasonEnum_FeedItemQualityDisapprovalReason = 12 + // Price item price is invalid or inaccurate. + FeedItemQualityDisapprovalReasonEnum_PRICE_TABLE_ROW_PRICE_INVALID FeedItemQualityDisapprovalReasonEnum_FeedItemQualityDisapprovalReason = 13 + // Price item URL is invalid or irrelevant. + FeedItemQualityDisapprovalReasonEnum_PRICE_TABLE_ROW_URL_INVALID FeedItemQualityDisapprovalReasonEnum_FeedItemQualityDisapprovalReason = 14 + // Price item header or description has price. + FeedItemQualityDisapprovalReasonEnum_PRICE_HEADER_OR_DESCRIPTION_HAS_PRICE FeedItemQualityDisapprovalReasonEnum_FeedItemQualityDisapprovalReason = 15 + // Structured snippet values do not match the header. + FeedItemQualityDisapprovalReasonEnum_STRUCTURED_SNIPPETS_HEADER_POLICY_VIOLATED FeedItemQualityDisapprovalReasonEnum_FeedItemQualityDisapprovalReason = 16 + // Structured snippet values are repeated. + FeedItemQualityDisapprovalReasonEnum_STRUCTURED_SNIPPETS_REPEATED_VALUES FeedItemQualityDisapprovalReasonEnum_FeedItemQualityDisapprovalReason = 17 + // Structured snippet values violate editorial guidelines like punctuation. + FeedItemQualityDisapprovalReasonEnum_STRUCTURED_SNIPPETS_EDITORIAL_GUIDELINES FeedItemQualityDisapprovalReasonEnum_FeedItemQualityDisapprovalReason = 18 + // Structured snippet contain promotional text. + FeedItemQualityDisapprovalReasonEnum_STRUCTURED_SNIPPETS_HAS_PROMOTIONAL_TEXT FeedItemQualityDisapprovalReasonEnum_FeedItemQualityDisapprovalReason = 19 +) + +var FeedItemQualityDisapprovalReasonEnum_FeedItemQualityDisapprovalReason_name = map[int32]string{ + 0: "UNSPECIFIED", + 1: "UNKNOWN", + 2: "PRICE_TABLE_REPETITIVE_HEADERS", + 3: "PRICE_TABLE_REPETITIVE_DESCRIPTION", + 4: "PRICE_TABLE_INCONSISTENT_ROWS", + 5: "PRICE_DESCRIPTION_HAS_PRICE_QUALIFIERS", + 6: "PRICE_UNSUPPORTED_LANGUAGE", + 7: "PRICE_TABLE_ROW_HEADER_TABLE_TYPE_MISMATCH", + 8: "PRICE_TABLE_ROW_HEADER_HAS_PROMOTIONAL_TEXT", + 9: "PRICE_TABLE_ROW_DESCRIPTION_NOT_RELEVANT", + 10: "PRICE_TABLE_ROW_DESCRIPTION_HAS_PROMOTIONAL_TEXT", + 11: "PRICE_TABLE_ROW_HEADER_DESCRIPTION_REPETITIVE", + 12: "PRICE_TABLE_ROW_UNRATEABLE", + 13: "PRICE_TABLE_ROW_PRICE_INVALID", + 14: "PRICE_TABLE_ROW_URL_INVALID", + 15: "PRICE_HEADER_OR_DESCRIPTION_HAS_PRICE", + 16: "STRUCTURED_SNIPPETS_HEADER_POLICY_VIOLATED", + 17: "STRUCTURED_SNIPPETS_REPEATED_VALUES", + 18: "STRUCTURED_SNIPPETS_EDITORIAL_GUIDELINES", + 19: "STRUCTURED_SNIPPETS_HAS_PROMOTIONAL_TEXT", +} +var FeedItemQualityDisapprovalReasonEnum_FeedItemQualityDisapprovalReason_value = map[string]int32{ + "UNSPECIFIED": 0, + "UNKNOWN": 1, + "PRICE_TABLE_REPETITIVE_HEADERS": 2, + "PRICE_TABLE_REPETITIVE_DESCRIPTION": 3, + "PRICE_TABLE_INCONSISTENT_ROWS": 4, + "PRICE_DESCRIPTION_HAS_PRICE_QUALIFIERS": 5, + "PRICE_UNSUPPORTED_LANGUAGE": 6, + "PRICE_TABLE_ROW_HEADER_TABLE_TYPE_MISMATCH": 7, + "PRICE_TABLE_ROW_HEADER_HAS_PROMOTIONAL_TEXT": 8, + "PRICE_TABLE_ROW_DESCRIPTION_NOT_RELEVANT": 9, + "PRICE_TABLE_ROW_DESCRIPTION_HAS_PROMOTIONAL_TEXT": 10, + "PRICE_TABLE_ROW_HEADER_DESCRIPTION_REPETITIVE": 11, + "PRICE_TABLE_ROW_UNRATEABLE": 12, + "PRICE_TABLE_ROW_PRICE_INVALID": 13, + "PRICE_TABLE_ROW_URL_INVALID": 14, + "PRICE_HEADER_OR_DESCRIPTION_HAS_PRICE": 15, + "STRUCTURED_SNIPPETS_HEADER_POLICY_VIOLATED": 16, + "STRUCTURED_SNIPPETS_REPEATED_VALUES": 17, + "STRUCTURED_SNIPPETS_EDITORIAL_GUIDELINES": 18, + "STRUCTURED_SNIPPETS_HAS_PROMOTIONAL_TEXT": 19, +} + +func (x FeedItemQualityDisapprovalReasonEnum_FeedItemQualityDisapprovalReason) String() string { + return proto.EnumName(FeedItemQualityDisapprovalReasonEnum_FeedItemQualityDisapprovalReason_name, int32(x)) +} +func (FeedItemQualityDisapprovalReasonEnum_FeedItemQualityDisapprovalReason) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_feed_item_quality_disapproval_reason_592822322f4816f9, []int{0, 0} +} + +// Container for enum describing possible quality evaluation disapproval reasons +// of a feed item. +type FeedItemQualityDisapprovalReasonEnum struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *FeedItemQualityDisapprovalReasonEnum) Reset() { *m = FeedItemQualityDisapprovalReasonEnum{} } +func (m *FeedItemQualityDisapprovalReasonEnum) String() string { return proto.CompactTextString(m) } +func (*FeedItemQualityDisapprovalReasonEnum) ProtoMessage() {} +func (*FeedItemQualityDisapprovalReasonEnum) Descriptor() ([]byte, []int) { + return fileDescriptor_feed_item_quality_disapproval_reason_592822322f4816f9, []int{0} +} +func (m *FeedItemQualityDisapprovalReasonEnum) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_FeedItemQualityDisapprovalReasonEnum.Unmarshal(m, b) +} +func (m *FeedItemQualityDisapprovalReasonEnum) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_FeedItemQualityDisapprovalReasonEnum.Marshal(b, m, deterministic) +} +func (dst *FeedItemQualityDisapprovalReasonEnum) XXX_Merge(src proto.Message) { + xxx_messageInfo_FeedItemQualityDisapprovalReasonEnum.Merge(dst, src) +} +func (m *FeedItemQualityDisapprovalReasonEnum) XXX_Size() int { + return xxx_messageInfo_FeedItemQualityDisapprovalReasonEnum.Size(m) +} +func (m *FeedItemQualityDisapprovalReasonEnum) XXX_DiscardUnknown() { + xxx_messageInfo_FeedItemQualityDisapprovalReasonEnum.DiscardUnknown(m) +} + +var xxx_messageInfo_FeedItemQualityDisapprovalReasonEnum proto.InternalMessageInfo + +func init() { + proto.RegisterType((*FeedItemQualityDisapprovalReasonEnum)(nil), "google.ads.googleads.v1.enums.FeedItemQualityDisapprovalReasonEnum") + proto.RegisterEnum("google.ads.googleads.v1.enums.FeedItemQualityDisapprovalReasonEnum_FeedItemQualityDisapprovalReason", FeedItemQualityDisapprovalReasonEnum_FeedItemQualityDisapprovalReason_name, FeedItemQualityDisapprovalReasonEnum_FeedItemQualityDisapprovalReason_value) +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/enums/feed_item_quality_disapproval_reason.proto", fileDescriptor_feed_item_quality_disapproval_reason_592822322f4816f9) +} + +var fileDescriptor_feed_item_quality_disapproval_reason_592822322f4816f9 = []byte{ + // 631 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x84, 0x94, 0xcd, 0x72, 0xd3, 0x30, + 0x10, 0xc7, 0x69, 0x0a, 0x29, 0xa8, 0x40, 0x85, 0xb8, 0x15, 0x5a, 0x68, 0xa0, 0x05, 0x0a, 0xd8, + 0x64, 0xe0, 0x64, 0x4e, 0x8a, 0xad, 0x26, 0x1a, 0x5c, 0xd9, 0x95, 0x64, 0x97, 0x32, 0x99, 0xd1, + 0x18, 0x6c, 0x32, 0x99, 0x49, 0xec, 0x10, 0xa7, 0x9d, 0xe1, 0x75, 0x38, 0xf2, 0x02, 0xbc, 0x03, + 0x4f, 0xc1, 0x99, 0x47, 0xe0, 0xc4, 0xd8, 0xca, 0x17, 0x25, 0x69, 0x2f, 0x9e, 0xd5, 0xee, 0x6f, + 0x77, 0xff, 0x5e, 0xcd, 0x0a, 0xb4, 0x3a, 0x59, 0xd6, 0xe9, 0x25, 0x66, 0x14, 0xe7, 0xa6, 0x36, + 0x0b, 0xeb, 0xac, 0x6e, 0x26, 0xe9, 0x69, 0x3f, 0x37, 0x3f, 0x27, 0x49, 0xac, 0xba, 0xa3, 0xa4, + 0xaf, 0xbe, 0x9c, 0x46, 0xbd, 0xee, 0xe8, 0xab, 0x8a, 0xbb, 0x79, 0x34, 0x18, 0x0c, 0xb3, 0xb3, + 0xa8, 0xa7, 0x86, 0x49, 0x94, 0x67, 0xa9, 0x31, 0x18, 0x66, 0xa3, 0x0c, 0x6d, 0xe9, 0x74, 0x23, + 0x8a, 0x73, 0x63, 0x5a, 0xc9, 0x38, 0xab, 0x1b, 0x65, 0xa5, 0xcd, 0xfb, 0x93, 0x46, 0x83, 0xae, + 0x19, 0xa5, 0x69, 0x36, 0x8a, 0x46, 0xdd, 0x2c, 0xcd, 0x75, 0x72, 0xed, 0x57, 0x15, 0x3c, 0x3e, + 0x48, 0x92, 0x98, 0x8e, 0x92, 0xfe, 0x91, 0xee, 0xe4, 0xcc, 0x1a, 0xf1, 0xb2, 0x0f, 0x49, 0x4f, + 0xfb, 0xb5, 0x1f, 0x55, 0xf0, 0xf0, 0x32, 0x10, 0x6d, 0x80, 0xf5, 0x80, 0x09, 0x9f, 0xd8, 0xf4, + 0x80, 0x12, 0x07, 0x5e, 0x41, 0xeb, 0x60, 0x2d, 0x60, 0xef, 0x98, 0x77, 0xcc, 0xe0, 0x0a, 0xaa, + 0x81, 0x6d, 0x9f, 0x53, 0x9b, 0x28, 0x89, 0x1b, 0x2e, 0x51, 0x9c, 0xf8, 0x44, 0x52, 0x49, 0x43, + 0xa2, 0x5a, 0x04, 0x3b, 0x84, 0x0b, 0x58, 0x41, 0x7b, 0xa0, 0xb6, 0x84, 0x71, 0x88, 0xb0, 0x39, + 0xf5, 0x25, 0xf5, 0x18, 0x5c, 0x45, 0x3b, 0x60, 0x6b, 0x9e, 0xa3, 0xcc, 0xf6, 0x98, 0xa0, 0x42, + 0x12, 0x26, 0x15, 0xf7, 0x8e, 0x05, 0xbc, 0x8a, 0xf6, 0xc1, 0x9e, 0x46, 0xe6, 0x32, 0x55, 0x0b, + 0x0b, 0xa5, 0xbd, 0x47, 0x01, 0x76, 0x0b, 0x99, 0x5c, 0xc0, 0x6b, 0x68, 0x1b, 0x6c, 0x6a, 0x6f, + 0xc0, 0x44, 0xe0, 0xfb, 0x1e, 0x97, 0xc4, 0x51, 0x2e, 0x66, 0xcd, 0x00, 0x37, 0x09, 0xac, 0x22, + 0x03, 0xec, 0xff, 0x23, 0xcb, 0x3b, 0x1e, 0x6b, 0x1e, 0x3b, 0xe4, 0x89, 0x4f, 0xd4, 0x21, 0x15, + 0x87, 0x58, 0xda, 0x2d, 0xb8, 0x86, 0x4c, 0xf0, 0x7c, 0x09, 0xaf, 0x05, 0x78, 0x87, 0x5e, 0x21, + 0x07, 0xbb, 0x4a, 0x92, 0xf7, 0x12, 0x5e, 0x47, 0x2f, 0xc0, 0xd3, 0xf3, 0x09, 0xf3, 0xb2, 0x99, + 0x27, 0x15, 0x27, 0x2e, 0x09, 0x31, 0x93, 0xf0, 0x06, 0x7a, 0x03, 0x5e, 0x5d, 0x44, 0x2f, 0xec, + 0x01, 0x50, 0x1d, 0xbc, 0x5c, 0x22, 0x6a, 0x3e, 0x79, 0x36, 0x72, 0xb8, 0x3e, 0x9b, 0xcb, 0x2c, + 0x25, 0x60, 0x1c, 0x4b, 0x52, 0x1c, 0xe1, 0xcd, 0xf3, 0xd7, 0x50, 0xc4, 0xf5, 0x99, 0xb2, 0x10, + 0xbb, 0xd4, 0x81, 0xb7, 0xd0, 0x03, 0x70, 0xef, 0xbf, 0x12, 0xdc, 0x9d, 0x02, 0xb7, 0xd1, 0x33, + 0xb0, 0xab, 0x81, 0xb1, 0x18, 0x8f, 0x2f, 0xbe, 0x31, 0xb8, 0x51, 0x5c, 0x83, 0x90, 0x3c, 0xb0, + 0x65, 0xc0, 0x89, 0xa3, 0x04, 0xa3, 0xbe, 0x4f, 0xa4, 0x98, 0x24, 0xfa, 0x9e, 0x4b, 0xed, 0x13, + 0x15, 0x52, 0xcf, 0xc5, 0x92, 0x38, 0x10, 0xa2, 0x27, 0xe0, 0xd1, 0x22, 0xbe, 0xf8, 0xc5, 0x82, + 0x50, 0x21, 0x76, 0x03, 0x22, 0xe0, 0x9d, 0x62, 0xfc, 0x8b, 0x40, 0xe2, 0x50, 0xe9, 0x71, 0x8a, + 0x5d, 0xd5, 0x0c, 0xa8, 0x43, 0x5c, 0xca, 0x88, 0x80, 0x68, 0x19, 0xbd, 0x70, 0xec, 0x77, 0x1b, + 0x7f, 0x56, 0xc0, 0xce, 0xa7, 0xac, 0x6f, 0x5c, 0xb8, 0xa6, 0x8d, 0xdd, 0xcb, 0x96, 0xcb, 0x2f, + 0xf6, 0xd5, 0x5f, 0xf9, 0xd0, 0x18, 0xd7, 0xe9, 0x64, 0xbd, 0x28, 0xed, 0x18, 0xd9, 0xb0, 0x63, + 0x76, 0x92, 0xb4, 0xdc, 0xe6, 0xc9, 0x43, 0x32, 0xe8, 0xe6, 0x4b, 0xde, 0x95, 0xb7, 0xe5, 0xf7, + 0x5b, 0x65, 0xb5, 0x89, 0xf1, 0xf7, 0xca, 0x56, 0x53, 0x97, 0xc2, 0x71, 0x6e, 0x68, 0xb3, 0xb0, + 0xc2, 0xba, 0x51, 0x6c, 0x7c, 0xfe, 0x73, 0x12, 0x6f, 0xe3, 0x38, 0x6f, 0x4f, 0xe3, 0xed, 0xb0, + 0xde, 0x2e, 0xe3, 0xbf, 0x2b, 0x3b, 0xda, 0x69, 0x59, 0x38, 0xce, 0x2d, 0x6b, 0x4a, 0x58, 0x56, + 0x58, 0xb7, 0xac, 0x92, 0xf9, 0x58, 0x2d, 0x85, 0xbd, 0xfe, 0x1b, 0x00, 0x00, 0xff, 0xff, 0xdd, + 0xcf, 0x49, 0x08, 0xef, 0x04, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/feed_item_status.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/feed_item_status.pb.go new file mode 100644 index 0000000..34c9486 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/feed_item_status.pb.go @@ -0,0 +1,117 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/enums/feed_item_status.proto + +package enums // import "google.golang.org/genproto/googleapis/ads/googleads/v1/enums" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Possible statuses of a feed item. +type FeedItemStatusEnum_FeedItemStatus int32 + +const ( + // Not specified. + FeedItemStatusEnum_UNSPECIFIED FeedItemStatusEnum_FeedItemStatus = 0 + // Used for return value only. Represents value unknown in this version. + FeedItemStatusEnum_UNKNOWN FeedItemStatusEnum_FeedItemStatus = 1 + // Feed item is enabled. + FeedItemStatusEnum_ENABLED FeedItemStatusEnum_FeedItemStatus = 2 + // Feed item has been removed. + FeedItemStatusEnum_REMOVED FeedItemStatusEnum_FeedItemStatus = 3 +) + +var FeedItemStatusEnum_FeedItemStatus_name = map[int32]string{ + 0: "UNSPECIFIED", + 1: "UNKNOWN", + 2: "ENABLED", + 3: "REMOVED", +} +var FeedItemStatusEnum_FeedItemStatus_value = map[string]int32{ + "UNSPECIFIED": 0, + "UNKNOWN": 1, + "ENABLED": 2, + "REMOVED": 3, +} + +func (x FeedItemStatusEnum_FeedItemStatus) String() string { + return proto.EnumName(FeedItemStatusEnum_FeedItemStatus_name, int32(x)) +} +func (FeedItemStatusEnum_FeedItemStatus) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_feed_item_status_38675fbdac6e61e9, []int{0, 0} +} + +// Container for enum describing possible statuses of a feed item. +type FeedItemStatusEnum struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *FeedItemStatusEnum) Reset() { *m = FeedItemStatusEnum{} } +func (m *FeedItemStatusEnum) String() string { return proto.CompactTextString(m) } +func (*FeedItemStatusEnum) ProtoMessage() {} +func (*FeedItemStatusEnum) Descriptor() ([]byte, []int) { + return fileDescriptor_feed_item_status_38675fbdac6e61e9, []int{0} +} +func (m *FeedItemStatusEnum) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_FeedItemStatusEnum.Unmarshal(m, b) +} +func (m *FeedItemStatusEnum) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_FeedItemStatusEnum.Marshal(b, m, deterministic) +} +func (dst *FeedItemStatusEnum) XXX_Merge(src proto.Message) { + xxx_messageInfo_FeedItemStatusEnum.Merge(dst, src) +} +func (m *FeedItemStatusEnum) XXX_Size() int { + return xxx_messageInfo_FeedItemStatusEnum.Size(m) +} +func (m *FeedItemStatusEnum) XXX_DiscardUnknown() { + xxx_messageInfo_FeedItemStatusEnum.DiscardUnknown(m) +} + +var xxx_messageInfo_FeedItemStatusEnum proto.InternalMessageInfo + +func init() { + proto.RegisterType((*FeedItemStatusEnum)(nil), "google.ads.googleads.v1.enums.FeedItemStatusEnum") + proto.RegisterEnum("google.ads.googleads.v1.enums.FeedItemStatusEnum_FeedItemStatus", FeedItemStatusEnum_FeedItemStatus_name, FeedItemStatusEnum_FeedItemStatus_value) +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/enums/feed_item_status.proto", fileDescriptor_feed_item_status_38675fbdac6e61e9) +} + +var fileDescriptor_feed_item_status_38675fbdac6e61e9 = []byte{ + // 302 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x7c, 0x50, 0xdf, 0x4a, 0xfb, 0x30, + 0x14, 0xfe, 0xad, 0x83, 0x9f, 0x90, 0x81, 0x96, 0x7a, 0x27, 0xee, 0x62, 0x7b, 0x80, 0x84, 0xa2, + 0x57, 0xf1, 0x2a, 0x75, 0xd9, 0x1c, 0x6a, 0x37, 0x1c, 0xab, 0x20, 0xc5, 0x11, 0x4d, 0x0c, 0x85, + 0x35, 0x19, 0x4b, 0xba, 0x07, 0xf2, 0xd2, 0x47, 0xf1, 0x49, 0xc4, 0xa7, 0x90, 0xa4, 0x6b, 0x61, + 0x17, 0x7a, 0x53, 0xbe, 0x73, 0xbe, 0x3f, 0xfd, 0x72, 0xc0, 0xa5, 0xd4, 0x5a, 0xae, 0x05, 0x62, + 0xdc, 0xa0, 0x1a, 0x3a, 0xb4, 0x8b, 0x91, 0x50, 0x55, 0x69, 0xd0, 0x9b, 0x10, 0x7c, 0x55, 0x58, + 0x51, 0xae, 0x8c, 0x65, 0xb6, 0x32, 0x70, 0xb3, 0xd5, 0x56, 0x47, 0xfd, 0x5a, 0x0a, 0x19, 0x37, + 0xb0, 0x75, 0xc1, 0x5d, 0x0c, 0xbd, 0xeb, 0xec, 0xbc, 0x09, 0xdd, 0x14, 0x88, 0x29, 0xa5, 0x2d, + 0xb3, 0x85, 0x56, 0x7b, 0xf3, 0xf0, 0x19, 0x44, 0x63, 0x21, 0xf8, 0xd4, 0x8a, 0x72, 0xe1, 0x43, + 0xa9, 0xaa, 0xca, 0xe1, 0x0d, 0x38, 0x3e, 0xdc, 0x46, 0x27, 0xa0, 0xb7, 0x4c, 0x17, 0x73, 0x7a, + 0x3d, 0x1d, 0x4f, 0xe9, 0x28, 0xfc, 0x17, 0xf5, 0xc0, 0xd1, 0x32, 0xbd, 0x4d, 0x67, 0x8f, 0x69, + 0xd8, 0x71, 0x03, 0x4d, 0x49, 0x72, 0x47, 0x47, 0x61, 0xe0, 0x86, 0x07, 0x7a, 0x3f, 0xcb, 0xe8, + 0x28, 0xec, 0x26, 0x5f, 0x1d, 0x30, 0x78, 0xd5, 0x25, 0xfc, 0xb3, 0x63, 0x72, 0x7a, 0xf8, 0xb7, + 0xb9, 0xab, 0x36, 0xef, 0x3c, 0x25, 0x7b, 0x97, 0xd4, 0x6b, 0xa6, 0x24, 0xd4, 0x5b, 0x89, 0xa4, + 0x50, 0xbe, 0x78, 0x73, 0x9f, 0x4d, 0x61, 0x7e, 0x39, 0xd7, 0x95, 0xff, 0xbe, 0x07, 0xdd, 0x09, + 0x21, 0x1f, 0x41, 0x7f, 0x52, 0x47, 0x11, 0x6e, 0x60, 0x0d, 0x1d, 0xca, 0x62, 0xe8, 0xde, 0x6b, + 0x3e, 0x1b, 0x3e, 0x27, 0xdc, 0xe4, 0x2d, 0x9f, 0x67, 0x71, 0xee, 0xf9, 0xef, 0x60, 0x50, 0x2f, + 0x31, 0x26, 0xdc, 0x60, 0xdc, 0x2a, 0x30, 0xce, 0x62, 0x8c, 0xbd, 0xe6, 0xe5, 0xbf, 0x2f, 0x76, + 0xf1, 0x13, 0x00, 0x00, 0xff, 0xff, 0x42, 0x25, 0x8e, 0x70, 0xc6, 0x01, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/feed_item_target_device.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/feed_item_target_device.pb.go new file mode 100644 index 0000000..5b6e15f --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/feed_item_target_device.pb.go @@ -0,0 +1,114 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/enums/feed_item_target_device.proto + +package enums // import "google.golang.org/genproto/googleapis/ads/googleads/v1/enums" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Possible data types for a feed item target device. +type FeedItemTargetDeviceEnum_FeedItemTargetDevice int32 + +const ( + // Not specified. + FeedItemTargetDeviceEnum_UNSPECIFIED FeedItemTargetDeviceEnum_FeedItemTargetDevice = 0 + // Used for return value only. Represents value unknown in this version. + FeedItemTargetDeviceEnum_UNKNOWN FeedItemTargetDeviceEnum_FeedItemTargetDevice = 1 + // Mobile. + FeedItemTargetDeviceEnum_MOBILE FeedItemTargetDeviceEnum_FeedItemTargetDevice = 2 +) + +var FeedItemTargetDeviceEnum_FeedItemTargetDevice_name = map[int32]string{ + 0: "UNSPECIFIED", + 1: "UNKNOWN", + 2: "MOBILE", +} +var FeedItemTargetDeviceEnum_FeedItemTargetDevice_value = map[string]int32{ + "UNSPECIFIED": 0, + "UNKNOWN": 1, + "MOBILE": 2, +} + +func (x FeedItemTargetDeviceEnum_FeedItemTargetDevice) String() string { + return proto.EnumName(FeedItemTargetDeviceEnum_FeedItemTargetDevice_name, int32(x)) +} +func (FeedItemTargetDeviceEnum_FeedItemTargetDevice) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_feed_item_target_device_343b6c944046560e, []int{0, 0} +} + +// Container for enum describing possible data types for a feed item target +// device. +type FeedItemTargetDeviceEnum struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *FeedItemTargetDeviceEnum) Reset() { *m = FeedItemTargetDeviceEnum{} } +func (m *FeedItemTargetDeviceEnum) String() string { return proto.CompactTextString(m) } +func (*FeedItemTargetDeviceEnum) ProtoMessage() {} +func (*FeedItemTargetDeviceEnum) Descriptor() ([]byte, []int) { + return fileDescriptor_feed_item_target_device_343b6c944046560e, []int{0} +} +func (m *FeedItemTargetDeviceEnum) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_FeedItemTargetDeviceEnum.Unmarshal(m, b) +} +func (m *FeedItemTargetDeviceEnum) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_FeedItemTargetDeviceEnum.Marshal(b, m, deterministic) +} +func (dst *FeedItemTargetDeviceEnum) XXX_Merge(src proto.Message) { + xxx_messageInfo_FeedItemTargetDeviceEnum.Merge(dst, src) +} +func (m *FeedItemTargetDeviceEnum) XXX_Size() int { + return xxx_messageInfo_FeedItemTargetDeviceEnum.Size(m) +} +func (m *FeedItemTargetDeviceEnum) XXX_DiscardUnknown() { + xxx_messageInfo_FeedItemTargetDeviceEnum.DiscardUnknown(m) +} + +var xxx_messageInfo_FeedItemTargetDeviceEnum proto.InternalMessageInfo + +func init() { + proto.RegisterType((*FeedItemTargetDeviceEnum)(nil), "google.ads.googleads.v1.enums.FeedItemTargetDeviceEnum") + proto.RegisterEnum("google.ads.googleads.v1.enums.FeedItemTargetDeviceEnum_FeedItemTargetDevice", FeedItemTargetDeviceEnum_FeedItemTargetDevice_name, FeedItemTargetDeviceEnum_FeedItemTargetDevice_value) +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/enums/feed_item_target_device.proto", fileDescriptor_feed_item_target_device_343b6c944046560e) +} + +var fileDescriptor_feed_item_target_device_343b6c944046560e = []byte{ + // 303 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x7c, 0x90, 0xc1, 0x4a, 0xc3, 0x30, + 0x1c, 0xc6, 0x5d, 0x85, 0x09, 0xd9, 0xc1, 0x51, 0x3c, 0xa8, 0xb8, 0xc3, 0xf6, 0x00, 0x09, 0xc5, + 0x5b, 0x76, 0xb1, 0x75, 0xdd, 0x28, 0x6a, 0x37, 0xd0, 0x4d, 0x90, 0xc2, 0x88, 0xcb, 0xdf, 0x50, + 0x58, 0x93, 0xd1, 0x64, 0x7d, 0x20, 0x8f, 0x3e, 0x8a, 0x8f, 0xe2, 0xc1, 0x67, 0x90, 0x26, 0xb6, + 0xa7, 0xe9, 0x25, 0x7c, 0xe4, 0xfb, 0xff, 0xbe, 0x7c, 0xf9, 0xa3, 0xb1, 0x50, 0x4a, 0x6c, 0x81, + 0x30, 0xae, 0x89, 0x93, 0xb5, 0xaa, 0x02, 0x02, 0x72, 0x5f, 0x68, 0xf2, 0x06, 0xc0, 0xd7, 0xb9, + 0x81, 0x62, 0x6d, 0x58, 0x29, 0xc0, 0xac, 0x39, 0x54, 0xf9, 0x06, 0xf0, 0xae, 0x54, 0x46, 0xf9, + 0x03, 0x47, 0x60, 0xc6, 0x35, 0x6e, 0x61, 0x5c, 0x05, 0xd8, 0xc2, 0x97, 0x57, 0x4d, 0xf6, 0x2e, + 0x27, 0x4c, 0x4a, 0x65, 0x98, 0xc9, 0x95, 0xd4, 0x0e, 0x1e, 0x65, 0xe8, 0x7c, 0x0a, 0xc0, 0x13, + 0x03, 0xc5, 0x93, 0xcd, 0x9e, 0xd8, 0xe8, 0x58, 0xee, 0x8b, 0xd1, 0x0d, 0x3a, 0x3b, 0xe4, 0xf9, + 0xa7, 0xa8, 0xb7, 0x4c, 0x1f, 0x17, 0xf1, 0x6d, 0x32, 0x4d, 0xe2, 0x49, 0xff, 0xc8, 0xef, 0xa1, + 0x93, 0x65, 0x7a, 0x97, 0xce, 0x9f, 0xd3, 0x7e, 0xc7, 0x47, 0xa8, 0xfb, 0x30, 0x8f, 0x92, 0xfb, + 0xb8, 0xef, 0x45, 0xdf, 0x1d, 0x34, 0xdc, 0xa8, 0x02, 0xff, 0xdb, 0x30, 0xba, 0x38, 0xf4, 0xca, + 0xa2, 0xae, 0xb7, 0xe8, 0xbc, 0x44, 0xbf, 0xac, 0x50, 0x5b, 0x26, 0x05, 0x56, 0xa5, 0x20, 0x02, + 0xa4, 0x2d, 0xdf, 0xac, 0x6a, 0x97, 0xeb, 0x3f, 0x36, 0x37, 0xb6, 0xe7, 0xbb, 0x77, 0x3c, 0x0b, + 0xc3, 0x0f, 0x6f, 0x30, 0x73, 0x51, 0x21, 0xd7, 0xd8, 0xc9, 0x5a, 0xad, 0x02, 0x5c, 0xff, 0x56, + 0x7f, 0x36, 0x7e, 0x16, 0x72, 0x9d, 0xb5, 0x7e, 0xb6, 0x0a, 0x32, 0xeb, 0x7f, 0x79, 0x43, 0x77, + 0x49, 0x69, 0xc8, 0x35, 0xa5, 0xed, 0x04, 0xa5, 0xab, 0x80, 0x52, 0x3b, 0xf3, 0xda, 0xb5, 0xc5, + 0xae, 0x7f, 0x02, 0x00, 0x00, 0xff, 0xff, 0x72, 0x4a, 0xe8, 0x09, 0xd1, 0x01, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/feed_item_target_type.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/feed_item_target_type.pb.go new file mode 100644 index 0000000..200f012 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/feed_item_target_type.pb.go @@ -0,0 +1,123 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/enums/feed_item_target_type.proto + +package enums // import "google.golang.org/genproto/googleapis/ads/googleads/v1/enums" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Possible type of a feed item target. +type FeedItemTargetTypeEnum_FeedItemTargetType int32 + +const ( + // Not specified. + FeedItemTargetTypeEnum_UNSPECIFIED FeedItemTargetTypeEnum_FeedItemTargetType = 0 + // Used for return value only. Represents value unknown in this version. + FeedItemTargetTypeEnum_UNKNOWN FeedItemTargetTypeEnum_FeedItemTargetType = 1 + // Feed item targets a campaign. + FeedItemTargetTypeEnum_CAMPAIGN FeedItemTargetTypeEnum_FeedItemTargetType = 2 + // Feed item targets an ad group. + FeedItemTargetTypeEnum_AD_GROUP FeedItemTargetTypeEnum_FeedItemTargetType = 3 + // Feed item targets a criterion. + FeedItemTargetTypeEnum_CRITERION FeedItemTargetTypeEnum_FeedItemTargetType = 4 +) + +var FeedItemTargetTypeEnum_FeedItemTargetType_name = map[int32]string{ + 0: "UNSPECIFIED", + 1: "UNKNOWN", + 2: "CAMPAIGN", + 3: "AD_GROUP", + 4: "CRITERION", +} +var FeedItemTargetTypeEnum_FeedItemTargetType_value = map[string]int32{ + "UNSPECIFIED": 0, + "UNKNOWN": 1, + "CAMPAIGN": 2, + "AD_GROUP": 3, + "CRITERION": 4, +} + +func (x FeedItemTargetTypeEnum_FeedItemTargetType) String() string { + return proto.EnumName(FeedItemTargetTypeEnum_FeedItemTargetType_name, int32(x)) +} +func (FeedItemTargetTypeEnum_FeedItemTargetType) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_feed_item_target_type_a71e42a041a59521, []int{0, 0} +} + +// Container for enum describing possible types of a feed item target. +type FeedItemTargetTypeEnum struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *FeedItemTargetTypeEnum) Reset() { *m = FeedItemTargetTypeEnum{} } +func (m *FeedItemTargetTypeEnum) String() string { return proto.CompactTextString(m) } +func (*FeedItemTargetTypeEnum) ProtoMessage() {} +func (*FeedItemTargetTypeEnum) Descriptor() ([]byte, []int) { + return fileDescriptor_feed_item_target_type_a71e42a041a59521, []int{0} +} +func (m *FeedItemTargetTypeEnum) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_FeedItemTargetTypeEnum.Unmarshal(m, b) +} +func (m *FeedItemTargetTypeEnum) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_FeedItemTargetTypeEnum.Marshal(b, m, deterministic) +} +func (dst *FeedItemTargetTypeEnum) XXX_Merge(src proto.Message) { + xxx_messageInfo_FeedItemTargetTypeEnum.Merge(dst, src) +} +func (m *FeedItemTargetTypeEnum) XXX_Size() int { + return xxx_messageInfo_FeedItemTargetTypeEnum.Size(m) +} +func (m *FeedItemTargetTypeEnum) XXX_DiscardUnknown() { + xxx_messageInfo_FeedItemTargetTypeEnum.DiscardUnknown(m) +} + +var xxx_messageInfo_FeedItemTargetTypeEnum proto.InternalMessageInfo + +func init() { + proto.RegisterType((*FeedItemTargetTypeEnum)(nil), "google.ads.googleads.v1.enums.FeedItemTargetTypeEnum") + proto.RegisterEnum("google.ads.googleads.v1.enums.FeedItemTargetTypeEnum_FeedItemTargetType", FeedItemTargetTypeEnum_FeedItemTargetType_name, FeedItemTargetTypeEnum_FeedItemTargetType_value) +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/enums/feed_item_target_type.proto", fileDescriptor_feed_item_target_type_a71e42a041a59521) +} + +var fileDescriptor_feed_item_target_type_a71e42a041a59521 = []byte{ + // 328 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x7c, 0x50, 0xdd, 0x4a, 0xfb, 0x30, + 0x1c, 0xfd, 0xaf, 0xfb, 0xe3, 0x47, 0xa6, 0x58, 0x7a, 0xa1, 0x20, 0xee, 0x62, 0x7b, 0x80, 0x94, + 0xe2, 0x95, 0xf1, 0x2a, 0xdb, 0xba, 0x52, 0xc4, 0xae, 0xcc, 0x6d, 0x82, 0x54, 0x4a, 0x35, 0x3f, + 0x43, 0x61, 0x4d, 0xca, 0x92, 0x4d, 0xf6, 0x3a, 0x5e, 0xfa, 0x28, 0x3e, 0x8a, 0xf8, 0x10, 0xd2, + 0xd4, 0xed, 0x66, 0xe8, 0x4d, 0x38, 0xc9, 0xf9, 0xe0, 0xe4, 0xa0, 0x2b, 0x2e, 0x25, 0x9f, 0x83, + 0x9b, 0x31, 0xe5, 0xd6, 0xb0, 0x42, 0x2b, 0xcf, 0x05, 0xb1, 0x2c, 0x94, 0xfb, 0x02, 0xc0, 0xd2, + 0x5c, 0x43, 0x91, 0xea, 0x6c, 0xc1, 0x41, 0xa7, 0x7a, 0x5d, 0x02, 0x2e, 0x17, 0x52, 0x4b, 0xa7, + 0x5d, 0xeb, 0x71, 0xc6, 0x14, 0xde, 0x5a, 0xf1, 0xca, 0xc3, 0xc6, 0x7a, 0x7e, 0xb1, 0x49, 0x2e, + 0x73, 0x37, 0x13, 0x42, 0xea, 0x4c, 0xe7, 0x52, 0xa8, 0xda, 0xdc, 0x7d, 0x45, 0xa7, 0x43, 0x00, + 0x16, 0x6a, 0x28, 0x26, 0x26, 0x79, 0xb2, 0x2e, 0xc1, 0x17, 0xcb, 0xa2, 0xfb, 0x88, 0x9c, 0x5d, + 0xc6, 0x39, 0x41, 0xad, 0x69, 0x74, 0x17, 0xfb, 0xfd, 0x70, 0x18, 0xfa, 0x03, 0xfb, 0x9f, 0xd3, + 0x42, 0xfb, 0xd3, 0xe8, 0x26, 0x1a, 0xdd, 0x47, 0x76, 0xc3, 0x39, 0x42, 0x07, 0x7d, 0x7a, 0x1b, + 0xd3, 0x30, 0x88, 0x6c, 0xab, 0xba, 0xd1, 0x41, 0x1a, 0x8c, 0x47, 0xd3, 0xd8, 0x6e, 0x3a, 0xc7, + 0xe8, 0xb0, 0x3f, 0x0e, 0x27, 0xfe, 0x38, 0x1c, 0x45, 0xf6, 0xff, 0xde, 0x57, 0x03, 0x75, 0x9e, + 0x65, 0x81, 0xff, 0x2c, 0xdf, 0x3b, 0xdb, 0xad, 0x10, 0x57, 0xbd, 0xe3, 0xc6, 0x43, 0xef, 0xc7, + 0xc9, 0xe5, 0x3c, 0x13, 0x1c, 0xcb, 0x05, 0x77, 0x39, 0x08, 0xf3, 0xab, 0xcd, 0x82, 0x65, 0xae, + 0x7e, 0x19, 0xf4, 0xda, 0x9c, 0x6f, 0x56, 0x33, 0xa0, 0xf4, 0xdd, 0x6a, 0x07, 0x75, 0x14, 0x65, + 0x0a, 0xd7, 0xb0, 0x42, 0x33, 0x0f, 0x57, 0x43, 0xa8, 0x8f, 0x0d, 0x9f, 0x50, 0xa6, 0x92, 0x2d, + 0x9f, 0xcc, 0xbc, 0xc4, 0xf0, 0x9f, 0x56, 0xa7, 0x7e, 0x24, 0x84, 0x32, 0x45, 0xc8, 0x56, 0x41, + 0xc8, 0xcc, 0x23, 0xc4, 0x68, 0x9e, 0xf6, 0x4c, 0xb1, 0xcb, 0xef, 0x00, 0x00, 0x00, 0xff, 0xff, + 0xf8, 0x72, 0xf9, 0x30, 0xe8, 0x01, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/feed_item_validation_status.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/feed_item_validation_status.pb.go new file mode 100644 index 0000000..0e35562 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/feed_item_validation_status.pb.go @@ -0,0 +1,122 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/enums/feed_item_validation_status.proto + +package enums // import "google.golang.org/genproto/googleapis/ads/googleads/v1/enums" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// The possible validation statuses of a feed item. +type FeedItemValidationStatusEnum_FeedItemValidationStatus int32 + +const ( + // No value has been specified. + FeedItemValidationStatusEnum_UNSPECIFIED FeedItemValidationStatusEnum_FeedItemValidationStatus = 0 + // Used for return value only. Represents value unknown in this version. + FeedItemValidationStatusEnum_UNKNOWN FeedItemValidationStatusEnum_FeedItemValidationStatus = 1 + // Validation pending. + FeedItemValidationStatusEnum_PENDING FeedItemValidationStatusEnum_FeedItemValidationStatus = 2 + // An error was found. + FeedItemValidationStatusEnum_INVALID FeedItemValidationStatusEnum_FeedItemValidationStatus = 3 + // Feed item is semantically well-formed. + FeedItemValidationStatusEnum_VALID FeedItemValidationStatusEnum_FeedItemValidationStatus = 4 +) + +var FeedItemValidationStatusEnum_FeedItemValidationStatus_name = map[int32]string{ + 0: "UNSPECIFIED", + 1: "UNKNOWN", + 2: "PENDING", + 3: "INVALID", + 4: "VALID", +} +var FeedItemValidationStatusEnum_FeedItemValidationStatus_value = map[string]int32{ + "UNSPECIFIED": 0, + "UNKNOWN": 1, + "PENDING": 2, + "INVALID": 3, + "VALID": 4, +} + +func (x FeedItemValidationStatusEnum_FeedItemValidationStatus) String() string { + return proto.EnumName(FeedItemValidationStatusEnum_FeedItemValidationStatus_name, int32(x)) +} +func (FeedItemValidationStatusEnum_FeedItemValidationStatus) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_feed_item_validation_status_2b319562ff85f2e3, []int{0, 0} +} + +// Container for enum describing possible validation statuses of a feed item. +type FeedItemValidationStatusEnum struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *FeedItemValidationStatusEnum) Reset() { *m = FeedItemValidationStatusEnum{} } +func (m *FeedItemValidationStatusEnum) String() string { return proto.CompactTextString(m) } +func (*FeedItemValidationStatusEnum) ProtoMessage() {} +func (*FeedItemValidationStatusEnum) Descriptor() ([]byte, []int) { + return fileDescriptor_feed_item_validation_status_2b319562ff85f2e3, []int{0} +} +func (m *FeedItemValidationStatusEnum) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_FeedItemValidationStatusEnum.Unmarshal(m, b) +} +func (m *FeedItemValidationStatusEnum) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_FeedItemValidationStatusEnum.Marshal(b, m, deterministic) +} +func (dst *FeedItemValidationStatusEnum) XXX_Merge(src proto.Message) { + xxx_messageInfo_FeedItemValidationStatusEnum.Merge(dst, src) +} +func (m *FeedItemValidationStatusEnum) XXX_Size() int { + return xxx_messageInfo_FeedItemValidationStatusEnum.Size(m) +} +func (m *FeedItemValidationStatusEnum) XXX_DiscardUnknown() { + xxx_messageInfo_FeedItemValidationStatusEnum.DiscardUnknown(m) +} + +var xxx_messageInfo_FeedItemValidationStatusEnum proto.InternalMessageInfo + +func init() { + proto.RegisterType((*FeedItemValidationStatusEnum)(nil), "google.ads.googleads.v1.enums.FeedItemValidationStatusEnum") + proto.RegisterEnum("google.ads.googleads.v1.enums.FeedItemValidationStatusEnum_FeedItemValidationStatus", FeedItemValidationStatusEnum_FeedItemValidationStatus_name, FeedItemValidationStatusEnum_FeedItemValidationStatus_value) +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/enums/feed_item_validation_status.proto", fileDescriptor_feed_item_validation_status_2b319562ff85f2e3) +} + +var fileDescriptor_feed_item_validation_status_2b319562ff85f2e3 = []byte{ + // 319 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x7c, 0x90, 0x4f, 0x4a, 0x03, 0x31, + 0x14, 0xc6, 0xed, 0xd4, 0x3f, 0x98, 0x2e, 0x1c, 0x66, 0x25, 0xd2, 0x2e, 0xda, 0x03, 0x64, 0x18, + 0xdc, 0xc5, 0x85, 0x4c, 0xed, 0xb4, 0x04, 0x25, 0x0e, 0x94, 0x8e, 0x20, 0x23, 0x25, 0x9a, 0x18, + 0x06, 0x3a, 0x49, 0x69, 0xd2, 0xee, 0xbc, 0x8c, 0x4b, 0x8f, 0xe2, 0x51, 0x5c, 0x7a, 0x02, 0x49, + 0xd2, 0xce, 0x6e, 0xdc, 0x84, 0xef, 0xe5, 0x7b, 0xef, 0xc7, 0x7b, 0x1f, 0xb8, 0x15, 0x4a, 0x89, + 0x15, 0x8f, 0x29, 0xd3, 0xb1, 0x97, 0x56, 0xed, 0x92, 0x98, 0xcb, 0x6d, 0xad, 0xe3, 0x77, 0xce, + 0xd9, 0xb2, 0x32, 0xbc, 0x5e, 0xee, 0xe8, 0xaa, 0x62, 0xd4, 0x54, 0x4a, 0x2e, 0xb5, 0xa1, 0x66, + 0xab, 0xe1, 0x7a, 0xa3, 0x8c, 0x8a, 0x06, 0x7e, 0x0a, 0x52, 0xa6, 0x61, 0x03, 0x80, 0xbb, 0x04, + 0x3a, 0xc0, 0x55, 0xff, 0xc0, 0x5f, 0x57, 0x31, 0x95, 0x52, 0x19, 0x87, 0xd8, 0x0f, 0x8f, 0x3e, + 0x40, 0x7f, 0xca, 0x39, 0xc3, 0x86, 0xd7, 0x45, 0xc3, 0x9f, 0x3b, 0x7c, 0x26, 0xb7, 0xf5, 0xe8, + 0x05, 0x5c, 0xb6, 0xf9, 0xd1, 0x05, 0xe8, 0x2d, 0xc8, 0x3c, 0xcf, 0xee, 0xf0, 0x14, 0x67, 0x93, + 0xf0, 0x28, 0xea, 0x81, 0xb3, 0x05, 0xb9, 0x27, 0x8f, 0x4f, 0x24, 0xec, 0xd8, 0x22, 0xcf, 0xc8, + 0x04, 0x93, 0x59, 0x18, 0xd8, 0x02, 0x93, 0x22, 0x7d, 0xc0, 0x93, 0xb0, 0x1b, 0x9d, 0x83, 0x13, + 0x2f, 0x8f, 0xc7, 0xbf, 0x1d, 0x30, 0x7c, 0x53, 0x35, 0xfc, 0xf7, 0x84, 0xf1, 0xa0, 0x6d, 0x85, + 0xdc, 0xde, 0x90, 0x77, 0x9e, 0xc7, 0xfb, 0x79, 0xa1, 0x56, 0x54, 0x0a, 0xa8, 0x36, 0x22, 0x16, + 0x5c, 0xba, 0x0b, 0x0f, 0x99, 0xae, 0x2b, 0xdd, 0x12, 0xf1, 0x8d, 0x7b, 0x3f, 0x83, 0xee, 0x2c, + 0x4d, 0xbf, 0x82, 0xc1, 0xcc, 0xa3, 0x52, 0xa6, 0xa1, 0x97, 0x56, 0x15, 0x09, 0xb4, 0x71, 0xe8, + 0xef, 0x83, 0x5f, 0xa6, 0x4c, 0x97, 0x8d, 0x5f, 0x16, 0x49, 0xe9, 0xfc, 0x9f, 0x60, 0xe8, 0x3f, + 0x11, 0x4a, 0x99, 0x46, 0xa8, 0xe9, 0x40, 0xa8, 0x48, 0x10, 0x72, 0x3d, 0xaf, 0xa7, 0x6e, 0xb1, + 0xeb, 0xbf, 0x00, 0x00, 0x00, 0xff, 0xff, 0x0e, 0x79, 0x89, 0xf3, 0xfa, 0x01, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/feed_link_status.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/feed_link_status.pb.go new file mode 100644 index 0000000..458093d --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/feed_link_status.pb.go @@ -0,0 +1,117 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/enums/feed_link_status.proto + +package enums // import "google.golang.org/genproto/googleapis/ads/googleads/v1/enums" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Possible statuses of a feed link. +type FeedLinkStatusEnum_FeedLinkStatus int32 + +const ( + // Not specified. + FeedLinkStatusEnum_UNSPECIFIED FeedLinkStatusEnum_FeedLinkStatus = 0 + // Used for return value only. Represents value unknown in this version. + FeedLinkStatusEnum_UNKNOWN FeedLinkStatusEnum_FeedLinkStatus = 1 + // Feed link is enabled. + FeedLinkStatusEnum_ENABLED FeedLinkStatusEnum_FeedLinkStatus = 2 + // Feed link has been removed. + FeedLinkStatusEnum_REMOVED FeedLinkStatusEnum_FeedLinkStatus = 3 +) + +var FeedLinkStatusEnum_FeedLinkStatus_name = map[int32]string{ + 0: "UNSPECIFIED", + 1: "UNKNOWN", + 2: "ENABLED", + 3: "REMOVED", +} +var FeedLinkStatusEnum_FeedLinkStatus_value = map[string]int32{ + "UNSPECIFIED": 0, + "UNKNOWN": 1, + "ENABLED": 2, + "REMOVED": 3, +} + +func (x FeedLinkStatusEnum_FeedLinkStatus) String() string { + return proto.EnumName(FeedLinkStatusEnum_FeedLinkStatus_name, int32(x)) +} +func (FeedLinkStatusEnum_FeedLinkStatus) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_feed_link_status_8ca9533e883427cd, []int{0, 0} +} + +// Container for an enum describing possible statuses of a feed link. +type FeedLinkStatusEnum struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *FeedLinkStatusEnum) Reset() { *m = FeedLinkStatusEnum{} } +func (m *FeedLinkStatusEnum) String() string { return proto.CompactTextString(m) } +func (*FeedLinkStatusEnum) ProtoMessage() {} +func (*FeedLinkStatusEnum) Descriptor() ([]byte, []int) { + return fileDescriptor_feed_link_status_8ca9533e883427cd, []int{0} +} +func (m *FeedLinkStatusEnum) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_FeedLinkStatusEnum.Unmarshal(m, b) +} +func (m *FeedLinkStatusEnum) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_FeedLinkStatusEnum.Marshal(b, m, deterministic) +} +func (dst *FeedLinkStatusEnum) XXX_Merge(src proto.Message) { + xxx_messageInfo_FeedLinkStatusEnum.Merge(dst, src) +} +func (m *FeedLinkStatusEnum) XXX_Size() int { + return xxx_messageInfo_FeedLinkStatusEnum.Size(m) +} +func (m *FeedLinkStatusEnum) XXX_DiscardUnknown() { + xxx_messageInfo_FeedLinkStatusEnum.DiscardUnknown(m) +} + +var xxx_messageInfo_FeedLinkStatusEnum proto.InternalMessageInfo + +func init() { + proto.RegisterType((*FeedLinkStatusEnum)(nil), "google.ads.googleads.v1.enums.FeedLinkStatusEnum") + proto.RegisterEnum("google.ads.googleads.v1.enums.FeedLinkStatusEnum_FeedLinkStatus", FeedLinkStatusEnum_FeedLinkStatus_name, FeedLinkStatusEnum_FeedLinkStatus_value) +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/enums/feed_link_status.proto", fileDescriptor_feed_link_status_8ca9533e883427cd) +} + +var fileDescriptor_feed_link_status_8ca9533e883427cd = []byte{ + // 302 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x7c, 0x50, 0xcf, 0x4a, 0xc3, 0x30, + 0x18, 0x77, 0x1d, 0x28, 0x64, 0xa0, 0xa5, 0xde, 0xc4, 0x1d, 0xb6, 0x07, 0x48, 0x28, 0x7a, 0x8a, + 0xa7, 0xd4, 0x65, 0x73, 0x38, 0xbb, 0xe1, 0x58, 0x05, 0x29, 0x8e, 0x68, 0x62, 0x28, 0xdb, 0x92, + 0xb2, 0xb4, 0x7b, 0x20, 0x8f, 0x3e, 0x8a, 0x4f, 0x22, 0x3e, 0x85, 0x24, 0x5d, 0x0b, 0x3b, 0xe8, + 0xa5, 0xfc, 0xbe, 0xef, 0xf7, 0xa7, 0xbf, 0x7c, 0xe0, 0x5a, 0x6a, 0x2d, 0xd7, 0x02, 0x31, 0x6e, + 0x50, 0x05, 0x2d, 0xda, 0x85, 0x48, 0xa8, 0x72, 0x63, 0xd0, 0xbb, 0x10, 0x7c, 0xb9, 0xce, 0xd4, + 0x6a, 0x69, 0x0a, 0x56, 0x94, 0x06, 0xe6, 0x5b, 0x5d, 0xe8, 0xa0, 0x5b, 0x49, 0x21, 0xe3, 0x06, + 0x36, 0x2e, 0xb8, 0x0b, 0xa1, 0x73, 0x5d, 0x5c, 0xd6, 0xa1, 0x79, 0x86, 0x98, 0x52, 0xba, 0x60, + 0x45, 0xa6, 0xd5, 0xde, 0xdc, 0x7f, 0x01, 0xc1, 0x50, 0x08, 0x3e, 0xc9, 0xd4, 0x6a, 0xee, 0x42, + 0xa9, 0x2a, 0x37, 0xfd, 0x3b, 0x70, 0x7a, 0xb8, 0x0d, 0xce, 0x40, 0x67, 0x11, 0xcf, 0x67, 0xf4, + 0x76, 0x3c, 0x1c, 0xd3, 0x81, 0x7f, 0x14, 0x74, 0xc0, 0xc9, 0x22, 0xbe, 0x8f, 0xa7, 0x4f, 0xb1, + 0xdf, 0xb2, 0x03, 0x8d, 0x49, 0x34, 0xa1, 0x03, 0xdf, 0xb3, 0xc3, 0x23, 0x7d, 0x98, 0x26, 0x74, + 0xe0, 0xb7, 0xa3, 0xef, 0x16, 0xe8, 0xbd, 0xe9, 0x0d, 0xfc, 0xb7, 0x63, 0x74, 0x7e, 0xf8, 0xb7, + 0x99, 0xad, 0x36, 0x6b, 0x3d, 0x47, 0x7b, 0x97, 0xd4, 0x6b, 0xa6, 0x24, 0xd4, 0x5b, 0x89, 0xa4, + 0x50, 0xae, 0x78, 0x7d, 0x9f, 0x3c, 0x33, 0x7f, 0x9c, 0xeb, 0xc6, 0x7d, 0x3f, 0xbc, 0xf6, 0x88, + 0x90, 0x4f, 0xaf, 0x3b, 0xaa, 0xa2, 0x08, 0x37, 0xb0, 0x82, 0x16, 0x25, 0x21, 0xb4, 0xef, 0x35, + 0x5f, 0x35, 0x9f, 0x12, 0x6e, 0xd2, 0x86, 0x4f, 0x93, 0x30, 0x75, 0xfc, 0x8f, 0xd7, 0xab, 0x96, + 0x18, 0x13, 0x6e, 0x30, 0x6e, 0x14, 0x18, 0x27, 0x21, 0xc6, 0x4e, 0xf3, 0x7a, 0xec, 0x8a, 0x5d, + 0xfd, 0x06, 0x00, 0x00, 0xff, 0xff, 0xe1, 0x0e, 0x39, 0x40, 0xc6, 0x01, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/feed_mapping_criterion_type.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/feed_mapping_criterion_type.pb.go new file mode 100644 index 0000000..1c28616 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/feed_mapping_criterion_type.pb.go @@ -0,0 +1,120 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/enums/feed_mapping_criterion_type.proto + +package enums // import "google.golang.org/genproto/googleapis/ads/googleads/v1/enums" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Possible placeholder types for a feed mapping. +type FeedMappingCriterionTypeEnum_FeedMappingCriterionType int32 + +const ( + // Not specified. + FeedMappingCriterionTypeEnum_UNSPECIFIED FeedMappingCriterionTypeEnum_FeedMappingCriterionType = 0 + // Used for return value only. Represents value unknown in this version. + FeedMappingCriterionTypeEnum_UNKNOWN FeedMappingCriterionTypeEnum_FeedMappingCriterionType = 1 + // Allows campaign targeting at locations within a location feed. + FeedMappingCriterionTypeEnum_LOCATION_EXTENSION_TARGETING FeedMappingCriterionTypeEnum_FeedMappingCriterionType = 4 + // Allows url targeting for your dynamic search ads within a page feed. + FeedMappingCriterionTypeEnum_DSA_PAGE_FEED FeedMappingCriterionTypeEnum_FeedMappingCriterionType = 3 +) + +var FeedMappingCriterionTypeEnum_FeedMappingCriterionType_name = map[int32]string{ + 0: "UNSPECIFIED", + 1: "UNKNOWN", + 4: "LOCATION_EXTENSION_TARGETING", + 3: "DSA_PAGE_FEED", +} +var FeedMappingCriterionTypeEnum_FeedMappingCriterionType_value = map[string]int32{ + "UNSPECIFIED": 0, + "UNKNOWN": 1, + "LOCATION_EXTENSION_TARGETING": 4, + "DSA_PAGE_FEED": 3, +} + +func (x FeedMappingCriterionTypeEnum_FeedMappingCriterionType) String() string { + return proto.EnumName(FeedMappingCriterionTypeEnum_FeedMappingCriterionType_name, int32(x)) +} +func (FeedMappingCriterionTypeEnum_FeedMappingCriterionType) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_feed_mapping_criterion_type_6f259ec09688fc9e, []int{0, 0} +} + +// Container for enum describing possible criterion types for a feed mapping. +type FeedMappingCriterionTypeEnum struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *FeedMappingCriterionTypeEnum) Reset() { *m = FeedMappingCriterionTypeEnum{} } +func (m *FeedMappingCriterionTypeEnum) String() string { return proto.CompactTextString(m) } +func (*FeedMappingCriterionTypeEnum) ProtoMessage() {} +func (*FeedMappingCriterionTypeEnum) Descriptor() ([]byte, []int) { + return fileDescriptor_feed_mapping_criterion_type_6f259ec09688fc9e, []int{0} +} +func (m *FeedMappingCriterionTypeEnum) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_FeedMappingCriterionTypeEnum.Unmarshal(m, b) +} +func (m *FeedMappingCriterionTypeEnum) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_FeedMappingCriterionTypeEnum.Marshal(b, m, deterministic) +} +func (dst *FeedMappingCriterionTypeEnum) XXX_Merge(src proto.Message) { + xxx_messageInfo_FeedMappingCriterionTypeEnum.Merge(dst, src) +} +func (m *FeedMappingCriterionTypeEnum) XXX_Size() int { + return xxx_messageInfo_FeedMappingCriterionTypeEnum.Size(m) +} +func (m *FeedMappingCriterionTypeEnum) XXX_DiscardUnknown() { + xxx_messageInfo_FeedMappingCriterionTypeEnum.DiscardUnknown(m) +} + +var xxx_messageInfo_FeedMappingCriterionTypeEnum proto.InternalMessageInfo + +func init() { + proto.RegisterType((*FeedMappingCriterionTypeEnum)(nil), "google.ads.googleads.v1.enums.FeedMappingCriterionTypeEnum") + proto.RegisterEnum("google.ads.googleads.v1.enums.FeedMappingCriterionTypeEnum_FeedMappingCriterionType", FeedMappingCriterionTypeEnum_FeedMappingCriterionType_name, FeedMappingCriterionTypeEnum_FeedMappingCriterionType_value) +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/enums/feed_mapping_criterion_type.proto", fileDescriptor_feed_mapping_criterion_type_6f259ec09688fc9e) +} + +var fileDescriptor_feed_mapping_criterion_type_6f259ec09688fc9e = []byte{ + // 344 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x7c, 0x50, 0x41, 0x6a, 0xe3, 0x30, + 0x14, 0x1d, 0x27, 0xc3, 0x0c, 0x28, 0x0c, 0xe3, 0x7a, 0x55, 0x4a, 0x02, 0x4d, 0x0e, 0x20, 0x63, + 0xba, 0x53, 0x17, 0x45, 0x49, 0x14, 0x63, 0xda, 0x3a, 0xa6, 0x71, 0xd2, 0x52, 0x0c, 0xc6, 0x8d, + 0x54, 0x61, 0x88, 0x25, 0x61, 0x39, 0x81, 0x1c, 0xa2, 0x97, 0xe8, 0xb2, 0x47, 0xe9, 0x51, 0xba, + 0xec, 0x09, 0x8a, 0xad, 0x3a, 0xbb, 0x74, 0x23, 0x1e, 0x7a, 0xff, 0xbd, 0xff, 0xdf, 0x03, 0x57, + 0x5c, 0x4a, 0xbe, 0x61, 0x6e, 0x46, 0xb5, 0x6b, 0x60, 0x8d, 0x76, 0x9e, 0xcb, 0xc4, 0xb6, 0xd0, + 0xee, 0x33, 0x63, 0x34, 0x2d, 0x32, 0xa5, 0x72, 0xc1, 0xd3, 0x75, 0x99, 0x57, 0xac, 0xcc, 0xa5, + 0x48, 0xab, 0xbd, 0x62, 0x50, 0x95, 0xb2, 0x92, 0xce, 0xc0, 0xa8, 0x60, 0x46, 0x35, 0x3c, 0x18, + 0xc0, 0x9d, 0x07, 0x1b, 0x83, 0xb3, 0x7e, 0xeb, 0xaf, 0x72, 0x37, 0x13, 0x42, 0x56, 0x59, 0x95, + 0x4b, 0xa1, 0x8d, 0x78, 0xf4, 0x62, 0x81, 0xfe, 0x8c, 0x31, 0x7a, 0x6b, 0x36, 0x4c, 0xda, 0x05, + 0xf1, 0x5e, 0x31, 0x22, 0xb6, 0xc5, 0xa8, 0x00, 0xa7, 0xc7, 0x78, 0xe7, 0x3f, 0xe8, 0x2d, 0xc3, + 0x45, 0x44, 0x26, 0xc1, 0x2c, 0x20, 0x53, 0xfb, 0x97, 0xd3, 0x03, 0x7f, 0x97, 0xe1, 0x75, 0x38, + 0xbf, 0x0f, 0x6d, 0xcb, 0x39, 0x07, 0xfd, 0x9b, 0xf9, 0x04, 0xc7, 0xc1, 0x3c, 0x4c, 0xc9, 0x43, + 0x4c, 0xc2, 0x45, 0x8d, 0x62, 0x7c, 0xe7, 0x93, 0x38, 0x08, 0x7d, 0xfb, 0xb7, 0x73, 0x02, 0xfe, + 0x4d, 0x17, 0x38, 0x8d, 0xb0, 0x4f, 0xd2, 0x19, 0x21, 0x53, 0xbb, 0x3b, 0xfe, 0xb4, 0xc0, 0x70, + 0x2d, 0x0b, 0xf8, 0x63, 0xa6, 0xf1, 0xe0, 0xd8, 0x49, 0x51, 0x1d, 0x2a, 0xb2, 0x1e, 0xc7, 0xdf, + 0x7a, 0x2e, 0x37, 0x99, 0xe0, 0x50, 0x96, 0xdc, 0xe5, 0x4c, 0x34, 0x91, 0xdb, 0x92, 0x55, 0xae, + 0x8f, 0x74, 0x7e, 0xd9, 0xbc, 0xaf, 0x9d, 0xae, 0x8f, 0xf1, 0x5b, 0x67, 0xe0, 0x1b, 0x2b, 0x4c, + 0x35, 0x34, 0xb0, 0x46, 0x2b, 0x0f, 0xd6, 0xf5, 0xe8, 0xf7, 0x96, 0x4f, 0x30, 0xd5, 0xc9, 0x81, + 0x4f, 0x56, 0x5e, 0xd2, 0xf0, 0x1f, 0x9d, 0xa1, 0xf9, 0x44, 0x08, 0x53, 0x8d, 0xd0, 0x61, 0x02, + 0xa1, 0x95, 0x87, 0x50, 0x33, 0xf3, 0xf4, 0xa7, 0x39, 0xec, 0xe2, 0x2b, 0x00, 0x00, 0xff, 0xff, + 0xd1, 0x3e, 0xc1, 0xb5, 0x0b, 0x02, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/feed_mapping_status.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/feed_mapping_status.pb.go new file mode 100644 index 0000000..f37bc39 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/feed_mapping_status.pb.go @@ -0,0 +1,117 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/enums/feed_mapping_status.proto + +package enums // import "google.golang.org/genproto/googleapis/ads/googleads/v1/enums" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Possible statuses of a feed mapping. +type FeedMappingStatusEnum_FeedMappingStatus int32 + +const ( + // Not specified. + FeedMappingStatusEnum_UNSPECIFIED FeedMappingStatusEnum_FeedMappingStatus = 0 + // Used for return value only. Represents value unknown in this version. + FeedMappingStatusEnum_UNKNOWN FeedMappingStatusEnum_FeedMappingStatus = 1 + // Feed mapping is enabled. + FeedMappingStatusEnum_ENABLED FeedMappingStatusEnum_FeedMappingStatus = 2 + // Feed mapping has been removed. + FeedMappingStatusEnum_REMOVED FeedMappingStatusEnum_FeedMappingStatus = 3 +) + +var FeedMappingStatusEnum_FeedMappingStatus_name = map[int32]string{ + 0: "UNSPECIFIED", + 1: "UNKNOWN", + 2: "ENABLED", + 3: "REMOVED", +} +var FeedMappingStatusEnum_FeedMappingStatus_value = map[string]int32{ + "UNSPECIFIED": 0, + "UNKNOWN": 1, + "ENABLED": 2, + "REMOVED": 3, +} + +func (x FeedMappingStatusEnum_FeedMappingStatus) String() string { + return proto.EnumName(FeedMappingStatusEnum_FeedMappingStatus_name, int32(x)) +} +func (FeedMappingStatusEnum_FeedMappingStatus) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_feed_mapping_status_4cbd153096d872ad, []int{0, 0} +} + +// Container for enum describing possible statuses of a feed mapping. +type FeedMappingStatusEnum struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *FeedMappingStatusEnum) Reset() { *m = FeedMappingStatusEnum{} } +func (m *FeedMappingStatusEnum) String() string { return proto.CompactTextString(m) } +func (*FeedMappingStatusEnum) ProtoMessage() {} +func (*FeedMappingStatusEnum) Descriptor() ([]byte, []int) { + return fileDescriptor_feed_mapping_status_4cbd153096d872ad, []int{0} +} +func (m *FeedMappingStatusEnum) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_FeedMappingStatusEnum.Unmarshal(m, b) +} +func (m *FeedMappingStatusEnum) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_FeedMappingStatusEnum.Marshal(b, m, deterministic) +} +func (dst *FeedMappingStatusEnum) XXX_Merge(src proto.Message) { + xxx_messageInfo_FeedMappingStatusEnum.Merge(dst, src) +} +func (m *FeedMappingStatusEnum) XXX_Size() int { + return xxx_messageInfo_FeedMappingStatusEnum.Size(m) +} +func (m *FeedMappingStatusEnum) XXX_DiscardUnknown() { + xxx_messageInfo_FeedMappingStatusEnum.DiscardUnknown(m) +} + +var xxx_messageInfo_FeedMappingStatusEnum proto.InternalMessageInfo + +func init() { + proto.RegisterType((*FeedMappingStatusEnum)(nil), "google.ads.googleads.v1.enums.FeedMappingStatusEnum") + proto.RegisterEnum("google.ads.googleads.v1.enums.FeedMappingStatusEnum_FeedMappingStatus", FeedMappingStatusEnum_FeedMappingStatus_name, FeedMappingStatusEnum_FeedMappingStatus_value) +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/enums/feed_mapping_status.proto", fileDescriptor_feed_mapping_status_4cbd153096d872ad) +} + +var fileDescriptor_feed_mapping_status_4cbd153096d872ad = []byte{ + // 303 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x7c, 0x50, 0xcf, 0x4a, 0xc3, 0x30, + 0x18, 0x77, 0x1d, 0x28, 0x64, 0x07, 0xeb, 0x40, 0x0f, 0xe2, 0x0e, 0xdb, 0x03, 0x24, 0x14, 0x0f, + 0x42, 0x3c, 0xa5, 0x2e, 0x1b, 0x63, 0xae, 0x1b, 0x8e, 0x55, 0x90, 0xc2, 0x88, 0x26, 0x86, 0xc2, + 0x9a, 0x94, 0xa5, 0xdb, 0x03, 0x79, 0xf4, 0x51, 0x7c, 0x14, 0x7d, 0x09, 0x49, 0xb2, 0xf6, 0x32, + 0xf4, 0x52, 0x7e, 0x5f, 0x7f, 0x7f, 0xf2, 0xfb, 0x3e, 0x70, 0x27, 0xb5, 0x96, 0x1b, 0x81, 0x18, + 0x37, 0xc8, 0x43, 0x8b, 0xf6, 0x11, 0x12, 0x6a, 0x57, 0x18, 0xf4, 0x2e, 0x04, 0x5f, 0x17, 0xac, + 0x2c, 0x73, 0x25, 0xd7, 0xa6, 0x62, 0xd5, 0xce, 0xc0, 0x72, 0xab, 0x2b, 0xdd, 0xed, 0x79, 0x35, + 0x64, 0xdc, 0xc0, 0xc6, 0x08, 0xf7, 0x11, 0x74, 0xc6, 0xeb, 0x9b, 0x3a, 0xb7, 0xcc, 0x11, 0x53, + 0x4a, 0x57, 0xac, 0xca, 0xb5, 0x3a, 0x98, 0x07, 0x1c, 0x5c, 0x8e, 0x84, 0xe0, 0x33, 0x1f, 0xbc, + 0x74, 0xb9, 0x54, 0xed, 0x8a, 0xc1, 0x14, 0x5c, 0x1c, 0x11, 0xdd, 0x73, 0xd0, 0x59, 0x25, 0xcb, + 0x05, 0x7d, 0x98, 0x8c, 0x26, 0x74, 0x18, 0x9e, 0x74, 0x3b, 0xe0, 0x6c, 0x95, 0x4c, 0x93, 0xf9, + 0x73, 0x12, 0xb6, 0xec, 0x40, 0x13, 0x12, 0x3f, 0xd2, 0x61, 0x18, 0xd8, 0xe1, 0x89, 0xce, 0xe6, + 0x29, 0x1d, 0x86, 0xed, 0xf8, 0xa7, 0x05, 0xfa, 0x6f, 0xba, 0x80, 0xff, 0x36, 0x8d, 0xaf, 0x8e, + 0x1e, 0x5c, 0xd8, 0x8e, 0x8b, 0xd6, 0x4b, 0x7c, 0x30, 0x4a, 0xbd, 0x61, 0x4a, 0x42, 0xbd, 0x95, + 0x48, 0x0a, 0xe5, 0x36, 0xa8, 0x6f, 0x55, 0xe6, 0xe6, 0x8f, 0xd3, 0xdd, 0xbb, 0xef, 0x47, 0xd0, + 0x1e, 0x13, 0xf2, 0x19, 0xf4, 0xc6, 0x3e, 0x8a, 0x70, 0x03, 0x3d, 0xb4, 0x28, 0x8d, 0xa0, 0xdd, + 0xda, 0x7c, 0xd5, 0x7c, 0x46, 0xb8, 0xc9, 0x1a, 0x3e, 0x4b, 0xa3, 0xcc, 0xf1, 0xdf, 0x41, 0xdf, + 0xff, 0xc4, 0x98, 0x70, 0x83, 0x71, 0xa3, 0xc0, 0x38, 0x8d, 0x30, 0x76, 0x9a, 0xd7, 0x53, 0x57, + 0xec, 0xf6, 0x37, 0x00, 0x00, 0xff, 0xff, 0x04, 0x08, 0x15, 0x89, 0xd2, 0x01, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/feed_origin.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/feed_origin.pb.go new file mode 100644 index 0000000..1ee5298 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/feed_origin.pb.go @@ -0,0 +1,120 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/enums/feed_origin.proto + +package enums // import "google.golang.org/genproto/googleapis/ads/googleads/v1/enums" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Possible values for a feed origin. +type FeedOriginEnum_FeedOrigin int32 + +const ( + // Not specified. + FeedOriginEnum_UNSPECIFIED FeedOriginEnum_FeedOrigin = 0 + // Used for return value only. Represents value unknown in this version. + FeedOriginEnum_UNKNOWN FeedOriginEnum_FeedOrigin = 1 + // The FeedAttributes for this Feed are managed by the + // user. Users can add FeedAttributes to this Feed. + FeedOriginEnum_USER FeedOriginEnum_FeedOrigin = 2 + // The FeedAttributes for an GOOGLE Feed are created by Google. A feed of + // this type is maintained by Google and will have the correct attributes + // for the placeholder type of the feed. + FeedOriginEnum_GOOGLE FeedOriginEnum_FeedOrigin = 3 +) + +var FeedOriginEnum_FeedOrigin_name = map[int32]string{ + 0: "UNSPECIFIED", + 1: "UNKNOWN", + 2: "USER", + 3: "GOOGLE", +} +var FeedOriginEnum_FeedOrigin_value = map[string]int32{ + "UNSPECIFIED": 0, + "UNKNOWN": 1, + "USER": 2, + "GOOGLE": 3, +} + +func (x FeedOriginEnum_FeedOrigin) String() string { + return proto.EnumName(FeedOriginEnum_FeedOrigin_name, int32(x)) +} +func (FeedOriginEnum_FeedOrigin) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_feed_origin_e721bc225ad36d8a, []int{0, 0} +} + +// Container for enum describing possible values for a feed origin. +type FeedOriginEnum struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *FeedOriginEnum) Reset() { *m = FeedOriginEnum{} } +func (m *FeedOriginEnum) String() string { return proto.CompactTextString(m) } +func (*FeedOriginEnum) ProtoMessage() {} +func (*FeedOriginEnum) Descriptor() ([]byte, []int) { + return fileDescriptor_feed_origin_e721bc225ad36d8a, []int{0} +} +func (m *FeedOriginEnum) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_FeedOriginEnum.Unmarshal(m, b) +} +func (m *FeedOriginEnum) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_FeedOriginEnum.Marshal(b, m, deterministic) +} +func (dst *FeedOriginEnum) XXX_Merge(src proto.Message) { + xxx_messageInfo_FeedOriginEnum.Merge(dst, src) +} +func (m *FeedOriginEnum) XXX_Size() int { + return xxx_messageInfo_FeedOriginEnum.Size(m) +} +func (m *FeedOriginEnum) XXX_DiscardUnknown() { + xxx_messageInfo_FeedOriginEnum.DiscardUnknown(m) +} + +var xxx_messageInfo_FeedOriginEnum proto.InternalMessageInfo + +func init() { + proto.RegisterType((*FeedOriginEnum)(nil), "google.ads.googleads.v1.enums.FeedOriginEnum") + proto.RegisterEnum("google.ads.googleads.v1.enums.FeedOriginEnum_FeedOrigin", FeedOriginEnum_FeedOrigin_name, FeedOriginEnum_FeedOrigin_value) +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/enums/feed_origin.proto", fileDescriptor_feed_origin_e721bc225ad36d8a) +} + +var fileDescriptor_feed_origin_e721bc225ad36d8a = []byte{ + // 293 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x7c, 0x50, 0x4d, 0x4a, 0xc3, 0x40, + 0x14, 0x36, 0xa9, 0x54, 0x99, 0x82, 0x0d, 0x59, 0x8a, 0x5d, 0xb4, 0x07, 0x98, 0x21, 0xb8, 0x1b, + 0x37, 0x26, 0x9a, 0x86, 0xa2, 0x24, 0xa1, 0x25, 0x11, 0x24, 0x20, 0xd1, 0x19, 0x87, 0x40, 0x33, + 0x2f, 0x64, 0xd2, 0x1e, 0xc8, 0xa5, 0x47, 0xf1, 0x1c, 0xae, 0x3c, 0x85, 0x64, 0xd2, 0xa4, 0x2b, + 0xdd, 0x0c, 0x1f, 0xef, 0xfb, 0x99, 0xef, 0x3d, 0x44, 0x04, 0x80, 0xd8, 0x72, 0x92, 0x33, 0x75, + 0x80, 0x2d, 0xda, 0x3b, 0x84, 0xcb, 0x5d, 0xa9, 0xc8, 0x3b, 0xe7, 0xec, 0x05, 0xea, 0x42, 0x14, + 0x12, 0x57, 0x35, 0x34, 0x60, 0xcf, 0x3a, 0x15, 0xce, 0x99, 0xc2, 0x83, 0x01, 0xef, 0x1d, 0xac, + 0x0d, 0x97, 0x57, 0x7d, 0x5e, 0x55, 0x90, 0x5c, 0x4a, 0x68, 0xf2, 0xa6, 0x00, 0xa9, 0x3a, 0xf3, + 0x62, 0x8d, 0x2e, 0x96, 0x9c, 0xb3, 0x48, 0x07, 0xfa, 0x72, 0x57, 0x2e, 0x6e, 0x11, 0x3a, 0x4e, + 0xec, 0x29, 0x9a, 0x24, 0xe1, 0x26, 0xf6, 0xef, 0x56, 0xcb, 0x95, 0x7f, 0x6f, 0x9d, 0xd8, 0x13, + 0x74, 0x96, 0x84, 0x0f, 0x61, 0xf4, 0x14, 0x5a, 0x86, 0x7d, 0x8e, 0x4e, 0x93, 0x8d, 0xbf, 0xb6, + 0x4c, 0x1b, 0xa1, 0x71, 0x10, 0x45, 0xc1, 0xa3, 0x6f, 0x8d, 0xbc, 0x6f, 0x03, 0xcd, 0xdf, 0xa0, + 0xc4, 0xff, 0xf6, 0xf2, 0xa6, 0xc7, 0x5f, 0xe2, 0xb6, 0x4a, 0x6c, 0x3c, 0x7b, 0x07, 0x87, 0x80, + 0x6d, 0x2e, 0x05, 0x86, 0x5a, 0x10, 0xc1, 0xa5, 0x2e, 0xda, 0x9f, 0xa2, 0x2a, 0xd4, 0x1f, 0x97, + 0xb9, 0xd1, 0xef, 0x87, 0x39, 0x0a, 0x5c, 0xf7, 0xd3, 0x9c, 0x05, 0x5d, 0x94, 0xcb, 0x14, 0xee, + 0x60, 0x8b, 0x52, 0x07, 0xb7, 0x3b, 0xaa, 0xaf, 0x9e, 0xcf, 0x5c, 0xa6, 0xb2, 0x81, 0xcf, 0x52, + 0x27, 0xd3, 0xfc, 0x8f, 0x39, 0xef, 0x86, 0x94, 0xba, 0x4c, 0x51, 0x3a, 0x28, 0x28, 0x4d, 0x1d, + 0x4a, 0xb5, 0xe6, 0x75, 0xac, 0x8b, 0x5d, 0xff, 0x06, 0x00, 0x00, 0xff, 0xff, 0x13, 0x7f, 0x4d, + 0xec, 0xb1, 0x01, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/feed_status.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/feed_status.pb.go new file mode 100644 index 0000000..55b31ab --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/feed_status.pb.go @@ -0,0 +1,117 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/enums/feed_status.proto + +package enums // import "google.golang.org/genproto/googleapis/ads/googleads/v1/enums" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Possible statuses of a feed. +type FeedStatusEnum_FeedStatus int32 + +const ( + // Not specified. + FeedStatusEnum_UNSPECIFIED FeedStatusEnum_FeedStatus = 0 + // Used for return value only. Represents value unknown in this version. + FeedStatusEnum_UNKNOWN FeedStatusEnum_FeedStatus = 1 + // Feed is enabled. + FeedStatusEnum_ENABLED FeedStatusEnum_FeedStatus = 2 + // Feed has been removed. + FeedStatusEnum_REMOVED FeedStatusEnum_FeedStatus = 3 +) + +var FeedStatusEnum_FeedStatus_name = map[int32]string{ + 0: "UNSPECIFIED", + 1: "UNKNOWN", + 2: "ENABLED", + 3: "REMOVED", +} +var FeedStatusEnum_FeedStatus_value = map[string]int32{ + "UNSPECIFIED": 0, + "UNKNOWN": 1, + "ENABLED": 2, + "REMOVED": 3, +} + +func (x FeedStatusEnum_FeedStatus) String() string { + return proto.EnumName(FeedStatusEnum_FeedStatus_name, int32(x)) +} +func (FeedStatusEnum_FeedStatus) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_feed_status_bcef086af951c148, []int{0, 0} +} + +// Container for enum describing possible statuses of a feed. +type FeedStatusEnum struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *FeedStatusEnum) Reset() { *m = FeedStatusEnum{} } +func (m *FeedStatusEnum) String() string { return proto.CompactTextString(m) } +func (*FeedStatusEnum) ProtoMessage() {} +func (*FeedStatusEnum) Descriptor() ([]byte, []int) { + return fileDescriptor_feed_status_bcef086af951c148, []int{0} +} +func (m *FeedStatusEnum) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_FeedStatusEnum.Unmarshal(m, b) +} +func (m *FeedStatusEnum) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_FeedStatusEnum.Marshal(b, m, deterministic) +} +func (dst *FeedStatusEnum) XXX_Merge(src proto.Message) { + xxx_messageInfo_FeedStatusEnum.Merge(dst, src) +} +func (m *FeedStatusEnum) XXX_Size() int { + return xxx_messageInfo_FeedStatusEnum.Size(m) +} +func (m *FeedStatusEnum) XXX_DiscardUnknown() { + xxx_messageInfo_FeedStatusEnum.DiscardUnknown(m) +} + +var xxx_messageInfo_FeedStatusEnum proto.InternalMessageInfo + +func init() { + proto.RegisterType((*FeedStatusEnum)(nil), "google.ads.googleads.v1.enums.FeedStatusEnum") + proto.RegisterEnum("google.ads.googleads.v1.enums.FeedStatusEnum_FeedStatus", FeedStatusEnum_FeedStatus_name, FeedStatusEnum_FeedStatus_value) +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/enums/feed_status.proto", fileDescriptor_feed_status_bcef086af951c148) +} + +var fileDescriptor_feed_status_bcef086af951c148 = []byte{ + // 293 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x7c, 0x50, 0xdd, 0x4a, 0xf3, 0x30, + 0x18, 0xfe, 0xd6, 0xc1, 0x27, 0x64, 0xe0, 0x4a, 0x0f, 0xc5, 0x1d, 0x6c, 0x17, 0x90, 0x50, 0x3c, + 0x8b, 0x47, 0xa9, 0xcd, 0xc6, 0x50, 0xbb, 0xe2, 0x58, 0x05, 0x29, 0x48, 0x34, 0x31, 0x14, 0xd6, + 0xa4, 0x2c, 0xed, 0x2e, 0xc8, 0x43, 0x2f, 0xc5, 0xeb, 0xf0, 0xc8, 0xab, 0x90, 0xa4, 0x3f, 0x3b, + 0xd2, 0x93, 0xf2, 0xbc, 0xef, 0xf3, 0xd3, 0x27, 0x2f, 0x40, 0x52, 0x6b, 0xb9, 0x17, 0x88, 0x71, + 0xd3, 0x41, 0x8b, 0x8e, 0x21, 0x12, 0xaa, 0x29, 0x0d, 0x7a, 0x13, 0x82, 0x3f, 0x9b, 0x9a, 0xd5, + 0x8d, 0x81, 0xd5, 0x41, 0xd7, 0x3a, 0x98, 0xb5, 0x2a, 0xc8, 0xb8, 0x81, 0x83, 0x01, 0x1e, 0x43, + 0xe8, 0x0c, 0x17, 0x97, 0x7d, 0x5e, 0x55, 0x20, 0xa6, 0x94, 0xae, 0x59, 0x5d, 0x68, 0xd5, 0x99, + 0x17, 0x19, 0x38, 0x5f, 0x0a, 0xc1, 0xb7, 0x2e, 0x90, 0xaa, 0xa6, 0x5c, 0xc4, 0x00, 0x9c, 0x36, + 0xc1, 0x14, 0x4c, 0x76, 0xc9, 0x36, 0xa5, 0x37, 0xeb, 0xe5, 0x9a, 0xc6, 0xfe, 0xbf, 0x60, 0x02, + 0xce, 0x76, 0xc9, 0x6d, 0xb2, 0x79, 0x4c, 0xfc, 0x91, 0x1d, 0x68, 0x42, 0xa2, 0x3b, 0x1a, 0xfb, + 0x9e, 0x1d, 0x1e, 0xe8, 0xfd, 0x26, 0xa3, 0xb1, 0x3f, 0x8e, 0xbe, 0x46, 0x60, 0xfe, 0xaa, 0x4b, + 0xf8, 0x67, 0xb7, 0x68, 0x7a, 0xfa, 0x53, 0x6a, 0xeb, 0xa4, 0xa3, 0xa7, 0xa8, 0x73, 0x48, 0xbd, + 0x67, 0x4a, 0x42, 0x7d, 0x90, 0x48, 0x0a, 0xe5, 0xca, 0xf6, 0xe7, 0xa8, 0x0a, 0xf3, 0xcb, 0x75, + 0xae, 0xdd, 0xf7, 0xdd, 0x1b, 0xaf, 0x08, 0xf9, 0xf0, 0x66, 0xab, 0x36, 0x8a, 0x70, 0x03, 0x5b, + 0x68, 0x51, 0x16, 0x42, 0xfb, 0x4e, 0xf3, 0xd9, 0xf3, 0x39, 0xe1, 0x26, 0x1f, 0xf8, 0x3c, 0x0b, + 0x73, 0xc7, 0x7f, 0x7b, 0xf3, 0x76, 0x89, 0x31, 0xe1, 0x06, 0xe3, 0x41, 0x81, 0x71, 0x16, 0x62, + 0xec, 0x34, 0x2f, 0xff, 0x5d, 0xb1, 0xab, 0x9f, 0x00, 0x00, 0x00, 0xff, 0xff, 0xad, 0xd3, 0x44, + 0x1c, 0xb5, 0x01, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/flight_placeholder_field.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/flight_placeholder_field.pb.go new file mode 100644 index 0000000..31ddb41 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/flight_placeholder_field.pb.go @@ -0,0 +1,213 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/enums/flight_placeholder_field.proto + +package enums // import "google.golang.org/genproto/googleapis/ads/googleads/v1/enums" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Possible values for Flight placeholder fields. +type FlightPlaceholderFieldEnum_FlightPlaceholderField int32 + +const ( + // Not specified. + FlightPlaceholderFieldEnum_UNSPECIFIED FlightPlaceholderFieldEnum_FlightPlaceholderField = 0 + // Used for return value only. Represents value unknown in this version. + FlightPlaceholderFieldEnum_UNKNOWN FlightPlaceholderFieldEnum_FlightPlaceholderField = 1 + // Data Type: STRING. Required. Destination id. Example: PAR, LON. + // For feed items that only have destination id, destination id must be a + // unique key. For feed items that have both destination id and origin id, + // then the combination must be a unique key. + FlightPlaceholderFieldEnum_DESTINATION_ID FlightPlaceholderFieldEnum_FlightPlaceholderField = 2 + // Data Type: STRING. Origin id. Example: PAR, LON. + // Optional. Combination of destination id and origin id must be unique per + // offer. + FlightPlaceholderFieldEnum_ORIGIN_ID FlightPlaceholderFieldEnum_FlightPlaceholderField = 3 + // Data Type: STRING. Required. Main headline with product name to be shown + // in dynamic ad. + FlightPlaceholderFieldEnum_FLIGHT_DESCRIPTION FlightPlaceholderFieldEnum_FlightPlaceholderField = 4 + // Data Type: STRING. Shorter names are recommended. + FlightPlaceholderFieldEnum_ORIGIN_NAME FlightPlaceholderFieldEnum_FlightPlaceholderField = 5 + // Data Type: STRING. Shorter names are recommended. + FlightPlaceholderFieldEnum_DESTINATION_NAME FlightPlaceholderFieldEnum_FlightPlaceholderField = 6 + // Data Type: STRING. Price to be shown in the ad. + // Example: "100.00 USD" + FlightPlaceholderFieldEnum_FLIGHT_PRICE FlightPlaceholderFieldEnum_FlightPlaceholderField = 7 + // Data Type: STRING. Formatted price to be shown in the ad. + // Example: "Starting at $100.00 USD", "$80 - $100" + FlightPlaceholderFieldEnum_FORMATTED_PRICE FlightPlaceholderFieldEnum_FlightPlaceholderField = 8 + // Data Type: STRING. Sale price to be shown in the ad. + // Example: "80.00 USD" + FlightPlaceholderFieldEnum_FLIGHT_SALE_PRICE FlightPlaceholderFieldEnum_FlightPlaceholderField = 9 + // Data Type: STRING. Formatted sale price to be shown in the ad. + // Example: "On sale for $80.00", "$60 - $80" + FlightPlaceholderFieldEnum_FORMATTED_SALE_PRICE FlightPlaceholderFieldEnum_FlightPlaceholderField = 10 + // Data Type: URL. Image to be displayed in the ad. + FlightPlaceholderFieldEnum_IMAGE_URL FlightPlaceholderFieldEnum_FlightPlaceholderField = 11 + // Data Type: URL_LIST. Required. Final URLs for the ad when using Upgraded + // URLs. User will be redirected to these URLs when they click on an ad, or + // when they click on a specific flight for ads that show multiple + // flights. + FlightPlaceholderFieldEnum_FINAL_URLS FlightPlaceholderFieldEnum_FlightPlaceholderField = 12 + // Data Type: URL_LIST. Final mobile URLs for the ad when using Upgraded + // URLs. + FlightPlaceholderFieldEnum_FINAL_MOBILE_URLS FlightPlaceholderFieldEnum_FlightPlaceholderField = 13 + // Data Type: URL. Tracking template for the ad when using Upgraded URLs. + FlightPlaceholderFieldEnum_TRACKING_URL FlightPlaceholderFieldEnum_FlightPlaceholderField = 14 + // Data Type: STRING. Android app link. Must be formatted as: + // android-app://{package_id}/{scheme}/{host_path}. + // The components are defined as follows: + // package_id: app ID as specified in Google Play. + // scheme: the scheme to pass to the application. Can be HTTP, or a custom + // scheme. + // host_path: identifies the specific content within your application. + FlightPlaceholderFieldEnum_ANDROID_APP_LINK FlightPlaceholderFieldEnum_FlightPlaceholderField = 15 + // Data Type: STRING_LIST. List of recommended destination IDs to show + // together with this item. + FlightPlaceholderFieldEnum_SIMILAR_DESTINATION_IDS FlightPlaceholderFieldEnum_FlightPlaceholderField = 16 + // Data Type: STRING. iOS app link. + FlightPlaceholderFieldEnum_IOS_APP_LINK FlightPlaceholderFieldEnum_FlightPlaceholderField = 17 + // Data Type: INT64. iOS app store ID. + FlightPlaceholderFieldEnum_IOS_APP_STORE_ID FlightPlaceholderFieldEnum_FlightPlaceholderField = 18 +) + +var FlightPlaceholderFieldEnum_FlightPlaceholderField_name = map[int32]string{ + 0: "UNSPECIFIED", + 1: "UNKNOWN", + 2: "DESTINATION_ID", + 3: "ORIGIN_ID", + 4: "FLIGHT_DESCRIPTION", + 5: "ORIGIN_NAME", + 6: "DESTINATION_NAME", + 7: "FLIGHT_PRICE", + 8: "FORMATTED_PRICE", + 9: "FLIGHT_SALE_PRICE", + 10: "FORMATTED_SALE_PRICE", + 11: "IMAGE_URL", + 12: "FINAL_URLS", + 13: "FINAL_MOBILE_URLS", + 14: "TRACKING_URL", + 15: "ANDROID_APP_LINK", + 16: "SIMILAR_DESTINATION_IDS", + 17: "IOS_APP_LINK", + 18: "IOS_APP_STORE_ID", +} +var FlightPlaceholderFieldEnum_FlightPlaceholderField_value = map[string]int32{ + "UNSPECIFIED": 0, + "UNKNOWN": 1, + "DESTINATION_ID": 2, + "ORIGIN_ID": 3, + "FLIGHT_DESCRIPTION": 4, + "ORIGIN_NAME": 5, + "DESTINATION_NAME": 6, + "FLIGHT_PRICE": 7, + "FORMATTED_PRICE": 8, + "FLIGHT_SALE_PRICE": 9, + "FORMATTED_SALE_PRICE": 10, + "IMAGE_URL": 11, + "FINAL_URLS": 12, + "FINAL_MOBILE_URLS": 13, + "TRACKING_URL": 14, + "ANDROID_APP_LINK": 15, + "SIMILAR_DESTINATION_IDS": 16, + "IOS_APP_LINK": 17, + "IOS_APP_STORE_ID": 18, +} + +func (x FlightPlaceholderFieldEnum_FlightPlaceholderField) String() string { + return proto.EnumName(FlightPlaceholderFieldEnum_FlightPlaceholderField_name, int32(x)) +} +func (FlightPlaceholderFieldEnum_FlightPlaceholderField) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_flight_placeholder_field_aa0dc850b5bf1d4d, []int{0, 0} +} + +// Values for Flight placeholder fields. +// For more information about dynamic remarketing feeds, see +// https://support.google.com/google-ads/answer/6053288. +type FlightPlaceholderFieldEnum struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *FlightPlaceholderFieldEnum) Reset() { *m = FlightPlaceholderFieldEnum{} } +func (m *FlightPlaceholderFieldEnum) String() string { return proto.CompactTextString(m) } +func (*FlightPlaceholderFieldEnum) ProtoMessage() {} +func (*FlightPlaceholderFieldEnum) Descriptor() ([]byte, []int) { + return fileDescriptor_flight_placeholder_field_aa0dc850b5bf1d4d, []int{0} +} +func (m *FlightPlaceholderFieldEnum) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_FlightPlaceholderFieldEnum.Unmarshal(m, b) +} +func (m *FlightPlaceholderFieldEnum) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_FlightPlaceholderFieldEnum.Marshal(b, m, deterministic) +} +func (dst *FlightPlaceholderFieldEnum) XXX_Merge(src proto.Message) { + xxx_messageInfo_FlightPlaceholderFieldEnum.Merge(dst, src) +} +func (m *FlightPlaceholderFieldEnum) XXX_Size() int { + return xxx_messageInfo_FlightPlaceholderFieldEnum.Size(m) +} +func (m *FlightPlaceholderFieldEnum) XXX_DiscardUnknown() { + xxx_messageInfo_FlightPlaceholderFieldEnum.DiscardUnknown(m) +} + +var xxx_messageInfo_FlightPlaceholderFieldEnum proto.InternalMessageInfo + +func init() { + proto.RegisterType((*FlightPlaceholderFieldEnum)(nil), "google.ads.googleads.v1.enums.FlightPlaceholderFieldEnum") + proto.RegisterEnum("google.ads.googleads.v1.enums.FlightPlaceholderFieldEnum_FlightPlaceholderField", FlightPlaceholderFieldEnum_FlightPlaceholderField_name, FlightPlaceholderFieldEnum_FlightPlaceholderField_value) +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/enums/flight_placeholder_field.proto", fileDescriptor_flight_placeholder_field_aa0dc850b5bf1d4d) +} + +var fileDescriptor_flight_placeholder_field_aa0dc850b5bf1d4d = []byte{ + // 497 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x7c, 0x52, 0xcf, 0x6e, 0xd3, 0x30, + 0x18, 0xa7, 0x2d, 0x6c, 0xcc, 0xdd, 0x5a, 0xcf, 0x8c, 0x81, 0xc6, 0x76, 0xd8, 0x1e, 0x20, 0x51, + 0xc4, 0x2d, 0x70, 0x71, 0x1b, 0x27, 0x58, 0x4d, 0x9c, 0x28, 0x49, 0x8b, 0x84, 0x2a, 0x45, 0x61, + 0xc9, 0xb2, 0x48, 0x69, 0x5c, 0xd5, 0xdd, 0x5e, 0x84, 0x37, 0xe0, 0xc8, 0x91, 0xc7, 0xe0, 0x51, + 0xb8, 0xf1, 0x06, 0xc8, 0x4e, 0xda, 0x0e, 0x04, 0xbb, 0x58, 0xdf, 0xf7, 0xfb, 0xa7, 0x4f, 0xfe, + 0x3e, 0xf0, 0xbe, 0xe0, 0xbc, 0xa8, 0x72, 0x3d, 0xcd, 0x84, 0xde, 0x94, 0xb2, 0xba, 0x37, 0xf4, + 0xbc, 0xbe, 0x5b, 0x08, 0xfd, 0xa6, 0x2a, 0x8b, 0xdb, 0x75, 0xb2, 0xac, 0xd2, 0xeb, 0xfc, 0x96, + 0x57, 0x59, 0xbe, 0x4a, 0x6e, 0xca, 0xbc, 0xca, 0xb4, 0xe5, 0x8a, 0xaf, 0x39, 0xba, 0x68, 0x2c, + 0x5a, 0x9a, 0x09, 0x6d, 0xeb, 0xd6, 0xee, 0x0d, 0x4d, 0xb9, 0xcf, 0xce, 0x37, 0xe1, 0xcb, 0x52, + 0x4f, 0xeb, 0x9a, 0xaf, 0xd3, 0x75, 0xc9, 0x6b, 0xd1, 0x98, 0xaf, 0xbe, 0xf7, 0xc0, 0x99, 0xad, + 0xf2, 0x83, 0x5d, 0xbc, 0x2d, 0xd3, 0x49, 0x7d, 0xb7, 0xb8, 0xfa, 0xd2, 0x03, 0xa7, 0xff, 0xa6, + 0xd1, 0x10, 0xf4, 0xa7, 0x2c, 0x0a, 0xc8, 0x98, 0xda, 0x94, 0x58, 0xf0, 0x09, 0xea, 0x83, 0xfd, + 0x29, 0x9b, 0x30, 0xff, 0x23, 0x83, 0x1d, 0x84, 0xc0, 0xc0, 0x22, 0x51, 0x4c, 0x19, 0x8e, 0xa9, + 0xcf, 0x12, 0x6a, 0xc1, 0x2e, 0x3a, 0x02, 0x07, 0x7e, 0x48, 0x1d, 0xaa, 0xda, 0x1e, 0x3a, 0x05, + 0xc8, 0x76, 0xa9, 0xf3, 0x21, 0x4e, 0x2c, 0x12, 0x8d, 0x43, 0x1a, 0x48, 0x25, 0x7c, 0x2a, 0x83, + 0x5b, 0x19, 0xc3, 0x1e, 0x81, 0xcf, 0xd0, 0x09, 0x80, 0x0f, 0xb3, 0x14, 0xba, 0x87, 0x20, 0x38, + 0x6c, 0xed, 0x41, 0x48, 0xc7, 0x04, 0xee, 0xa3, 0x17, 0x60, 0x68, 0xfb, 0xa1, 0x87, 0xe3, 0x98, + 0x58, 0x2d, 0xf8, 0x1c, 0xbd, 0x04, 0xc7, 0xad, 0x2c, 0xc2, 0x2e, 0x69, 0xe1, 0x03, 0xf4, 0x1a, + 0x9c, 0xec, 0xb4, 0x0f, 0x18, 0x20, 0xa7, 0xa4, 0x1e, 0x76, 0x48, 0x32, 0x0d, 0x5d, 0xd8, 0x47, + 0x03, 0x00, 0x6c, 0xca, 0xb0, 0x2b, 0xdb, 0x08, 0x1e, 0xaa, 0x3c, 0xd5, 0x7b, 0xfe, 0x88, 0xba, + 0xa4, 0x81, 0x8f, 0xe4, 0x34, 0x71, 0x88, 0xc7, 0x13, 0xca, 0x1c, 0x65, 0x1c, 0xc8, 0xa9, 0x31, + 0xb3, 0x42, 0x9f, 0x5a, 0x09, 0x0e, 0x82, 0xc4, 0xa5, 0x6c, 0x02, 0x87, 0xe8, 0x0d, 0x78, 0x15, + 0x51, 0x8f, 0xba, 0x38, 0x4c, 0xfe, 0xfc, 0x9f, 0x08, 0x42, 0x19, 0x42, 0xfd, 0x68, 0x27, 0x3f, + 0x96, 0x21, 0x1b, 0x24, 0x8a, 0xfd, 0x90, 0xc8, 0x9f, 0x43, 0xa3, 0x5f, 0x1d, 0x70, 0x79, 0xcd, + 0x17, 0xda, 0xa3, 0x8b, 0x1f, 0x9d, 0x37, 0x8b, 0x13, 0x7f, 0x6f, 0x2e, 0x90, 0x8b, 0x0f, 0x3a, + 0x9f, 0x46, 0xad, 0xbd, 0xe0, 0x55, 0x5a, 0x17, 0x1a, 0x5f, 0x15, 0x7a, 0x91, 0xd7, 0xea, 0x2c, + 0x36, 0x57, 0xb8, 0x2c, 0xc5, 0x7f, 0x8e, 0xf2, 0x9d, 0x7a, 0xbf, 0x76, 0x7b, 0x0e, 0xc6, 0xdf, + 0xba, 0x17, 0x4e, 0x13, 0x85, 0x33, 0xa1, 0x35, 0xa5, 0xac, 0x66, 0x86, 0x26, 0x8f, 0x48, 0xfc, + 0xd8, 0xf0, 0x73, 0x9c, 0x89, 0xf9, 0x96, 0x9f, 0xcf, 0x8c, 0xb9, 0xe2, 0x7f, 0x76, 0x2f, 0x1b, + 0xd0, 0x34, 0x71, 0x26, 0x4c, 0x73, 0xab, 0x30, 0xcd, 0x99, 0x61, 0x9a, 0x4a, 0xf3, 0x79, 0x4f, + 0x0d, 0xf6, 0xf6, 0x77, 0x00, 0x00, 0x00, 0xff, 0xff, 0xd3, 0x30, 0xe1, 0x83, 0x2c, 0x03, 0x00, + 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/frequency_cap_event_type.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/frequency_cap_event_type.pb.go new file mode 100644 index 0000000..cc8b862 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/frequency_cap_event_type.pb.go @@ -0,0 +1,118 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/enums/frequency_cap_event_type.proto + +package enums // import "google.golang.org/genproto/googleapis/ads/googleads/v1/enums" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// The type of event that the cap applies to (e.g. impression). +type FrequencyCapEventTypeEnum_FrequencyCapEventType int32 + +const ( + // Not specified. + FrequencyCapEventTypeEnum_UNSPECIFIED FrequencyCapEventTypeEnum_FrequencyCapEventType = 0 + // Used for return value only. Represents value unknown in this version. + FrequencyCapEventTypeEnum_UNKNOWN FrequencyCapEventTypeEnum_FrequencyCapEventType = 1 + // The cap applies on ad impressions. + FrequencyCapEventTypeEnum_IMPRESSION FrequencyCapEventTypeEnum_FrequencyCapEventType = 2 + // The cap applies on video ad views. + FrequencyCapEventTypeEnum_VIDEO_VIEW FrequencyCapEventTypeEnum_FrequencyCapEventType = 3 +) + +var FrequencyCapEventTypeEnum_FrequencyCapEventType_name = map[int32]string{ + 0: "UNSPECIFIED", + 1: "UNKNOWN", + 2: "IMPRESSION", + 3: "VIDEO_VIEW", +} +var FrequencyCapEventTypeEnum_FrequencyCapEventType_value = map[string]int32{ + "UNSPECIFIED": 0, + "UNKNOWN": 1, + "IMPRESSION": 2, + "VIDEO_VIEW": 3, +} + +func (x FrequencyCapEventTypeEnum_FrequencyCapEventType) String() string { + return proto.EnumName(FrequencyCapEventTypeEnum_FrequencyCapEventType_name, int32(x)) +} +func (FrequencyCapEventTypeEnum_FrequencyCapEventType) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_frequency_cap_event_type_0dbeda690db4d8fa, []int{0, 0} +} + +// Container for enum describing the type of event that the cap applies to. +type FrequencyCapEventTypeEnum struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *FrequencyCapEventTypeEnum) Reset() { *m = FrequencyCapEventTypeEnum{} } +func (m *FrequencyCapEventTypeEnum) String() string { return proto.CompactTextString(m) } +func (*FrequencyCapEventTypeEnum) ProtoMessage() {} +func (*FrequencyCapEventTypeEnum) Descriptor() ([]byte, []int) { + return fileDescriptor_frequency_cap_event_type_0dbeda690db4d8fa, []int{0} +} +func (m *FrequencyCapEventTypeEnum) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_FrequencyCapEventTypeEnum.Unmarshal(m, b) +} +func (m *FrequencyCapEventTypeEnum) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_FrequencyCapEventTypeEnum.Marshal(b, m, deterministic) +} +func (dst *FrequencyCapEventTypeEnum) XXX_Merge(src proto.Message) { + xxx_messageInfo_FrequencyCapEventTypeEnum.Merge(dst, src) +} +func (m *FrequencyCapEventTypeEnum) XXX_Size() int { + return xxx_messageInfo_FrequencyCapEventTypeEnum.Size(m) +} +func (m *FrequencyCapEventTypeEnum) XXX_DiscardUnknown() { + xxx_messageInfo_FrequencyCapEventTypeEnum.DiscardUnknown(m) +} + +var xxx_messageInfo_FrequencyCapEventTypeEnum proto.InternalMessageInfo + +func init() { + proto.RegisterType((*FrequencyCapEventTypeEnum)(nil), "google.ads.googleads.v1.enums.FrequencyCapEventTypeEnum") + proto.RegisterEnum("google.ads.googleads.v1.enums.FrequencyCapEventTypeEnum_FrequencyCapEventType", FrequencyCapEventTypeEnum_FrequencyCapEventType_name, FrequencyCapEventTypeEnum_FrequencyCapEventType_value) +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/enums/frequency_cap_event_type.proto", fileDescriptor_frequency_cap_event_type_0dbeda690db4d8fa) +} + +var fileDescriptor_frequency_cap_event_type_0dbeda690db4d8fa = []byte{ + // 320 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x7c, 0x50, 0x41, 0x4b, 0xc3, 0x30, + 0x18, 0x75, 0x1d, 0x28, 0x64, 0xa0, 0xa5, 0xe0, 0xc1, 0xe1, 0x0e, 0xdb, 0x0f, 0x48, 0x29, 0xde, + 0xa2, 0x97, 0x6e, 0xcb, 0x46, 0x11, 0xbb, 0xe2, 0x5c, 0x07, 0x52, 0x28, 0x71, 0x8d, 0x61, 0xb0, + 0x25, 0xb1, 0xe9, 0x06, 0xfb, 0x3b, 0x1e, 0xfd, 0x29, 0xfe, 0x14, 0x2f, 0xfe, 0x05, 0x49, 0x62, + 0x7b, 0x9a, 0x5e, 0xc2, 0x4b, 0xde, 0xf7, 0x5e, 0xde, 0xf7, 0xc0, 0x1d, 0x13, 0x82, 0x6d, 0xa8, + 0x4f, 0x0a, 0xe5, 0x5b, 0xa8, 0xd1, 0x3e, 0xf0, 0x29, 0xdf, 0x6d, 0x95, 0xff, 0x5a, 0xd2, 0xb7, + 0x1d, 0xe5, 0xab, 0x43, 0xbe, 0x22, 0x32, 0xa7, 0x7b, 0xca, 0xab, 0xbc, 0x3a, 0x48, 0x0a, 0x65, + 0x29, 0x2a, 0xe1, 0xf5, 0xac, 0x04, 0x92, 0x42, 0xc1, 0x46, 0x0d, 0xf7, 0x01, 0x34, 0xea, 0xee, + 0x75, 0x6d, 0x2e, 0xd7, 0x3e, 0xe1, 0x5c, 0x54, 0xa4, 0x5a, 0x0b, 0xae, 0xac, 0x78, 0x50, 0x82, + 0xab, 0x49, 0x6d, 0x3f, 0x22, 0x12, 0x6b, 0xf3, 0xa7, 0x83, 0xa4, 0x98, 0xef, 0xb6, 0x83, 0x05, + 0xb8, 0x3c, 0x4a, 0x7a, 0x17, 0xa0, 0xb3, 0x88, 0xe7, 0x09, 0x1e, 0x45, 0x93, 0x08, 0x8f, 0xdd, + 0x13, 0xaf, 0x03, 0xce, 0x16, 0xf1, 0x7d, 0x3c, 0x5b, 0xc6, 0x6e, 0xcb, 0x3b, 0x07, 0x20, 0x7a, + 0x48, 0x1e, 0xf1, 0x7c, 0x1e, 0xcd, 0x62, 0xd7, 0xd1, 0xf7, 0x34, 0x1a, 0xe3, 0x59, 0x9e, 0x46, + 0x78, 0xe9, 0xb6, 0x87, 0xdf, 0x2d, 0xd0, 0x5f, 0x89, 0x2d, 0xfc, 0x37, 0xf7, 0xb0, 0x7b, 0xf4, + 0xeb, 0x44, 0xa7, 0x4e, 0x5a, 0xcf, 0xc3, 0x5f, 0x31, 0x13, 0x1b, 0xc2, 0x19, 0x14, 0x25, 0xf3, + 0x19, 0xe5, 0x66, 0xa7, 0xba, 0x42, 0xb9, 0x56, 0x7f, 0x34, 0x7a, 0x6b, 0xce, 0x77, 0xa7, 0x3d, + 0x0d, 0xc3, 0x0f, 0xa7, 0x37, 0xb5, 0x56, 0x61, 0xa1, 0xa0, 0x85, 0x1a, 0xa5, 0x01, 0xd4, 0x1d, + 0xa8, 0xcf, 0x9a, 0xcf, 0xc2, 0x42, 0x65, 0x0d, 0x9f, 0xa5, 0x41, 0x66, 0xf8, 0x2f, 0xa7, 0x6f, + 0x1f, 0x11, 0x0a, 0x0b, 0x85, 0x50, 0x33, 0x81, 0x50, 0x1a, 0x20, 0x64, 0x66, 0x5e, 0x4e, 0x4d, + 0xb0, 0x9b, 0x9f, 0x00, 0x00, 0x00, 0xff, 0xff, 0x76, 0x40, 0x80, 0xe2, 0xe9, 0x01, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/frequency_cap_level.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/frequency_cap_level.pb.go new file mode 100644 index 0000000..58549bf --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/frequency_cap_level.pb.go @@ -0,0 +1,123 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/enums/frequency_cap_level.proto + +package enums // import "google.golang.org/genproto/googleapis/ads/googleads/v1/enums" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// The level on which the cap is to be applied (e.g ad group ad, ad group). +// Cap is applied to all the resources of this level. +type FrequencyCapLevelEnum_FrequencyCapLevel int32 + +const ( + // Not specified. + FrequencyCapLevelEnum_UNSPECIFIED FrequencyCapLevelEnum_FrequencyCapLevel = 0 + // Used for return value only. Represents value unknown in this version. + FrequencyCapLevelEnum_UNKNOWN FrequencyCapLevelEnum_FrequencyCapLevel = 1 + // The cap is applied at the ad group ad level. + FrequencyCapLevelEnum_AD_GROUP_AD FrequencyCapLevelEnum_FrequencyCapLevel = 2 + // The cap is applied at the ad group level. + FrequencyCapLevelEnum_AD_GROUP FrequencyCapLevelEnum_FrequencyCapLevel = 3 + // The cap is applied at the campaign level. + FrequencyCapLevelEnum_CAMPAIGN FrequencyCapLevelEnum_FrequencyCapLevel = 4 +) + +var FrequencyCapLevelEnum_FrequencyCapLevel_name = map[int32]string{ + 0: "UNSPECIFIED", + 1: "UNKNOWN", + 2: "AD_GROUP_AD", + 3: "AD_GROUP", + 4: "CAMPAIGN", +} +var FrequencyCapLevelEnum_FrequencyCapLevel_value = map[string]int32{ + "UNSPECIFIED": 0, + "UNKNOWN": 1, + "AD_GROUP_AD": 2, + "AD_GROUP": 3, + "CAMPAIGN": 4, +} + +func (x FrequencyCapLevelEnum_FrequencyCapLevel) String() string { + return proto.EnumName(FrequencyCapLevelEnum_FrequencyCapLevel_name, int32(x)) +} +func (FrequencyCapLevelEnum_FrequencyCapLevel) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_frequency_cap_level_44ed939bc5ce921b, []int{0, 0} +} + +// Container for enum describing the level on which the cap is to be applied. +type FrequencyCapLevelEnum struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *FrequencyCapLevelEnum) Reset() { *m = FrequencyCapLevelEnum{} } +func (m *FrequencyCapLevelEnum) String() string { return proto.CompactTextString(m) } +func (*FrequencyCapLevelEnum) ProtoMessage() {} +func (*FrequencyCapLevelEnum) Descriptor() ([]byte, []int) { + return fileDescriptor_frequency_cap_level_44ed939bc5ce921b, []int{0} +} +func (m *FrequencyCapLevelEnum) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_FrequencyCapLevelEnum.Unmarshal(m, b) +} +func (m *FrequencyCapLevelEnum) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_FrequencyCapLevelEnum.Marshal(b, m, deterministic) +} +func (dst *FrequencyCapLevelEnum) XXX_Merge(src proto.Message) { + xxx_messageInfo_FrequencyCapLevelEnum.Merge(dst, src) +} +func (m *FrequencyCapLevelEnum) XXX_Size() int { + return xxx_messageInfo_FrequencyCapLevelEnum.Size(m) +} +func (m *FrequencyCapLevelEnum) XXX_DiscardUnknown() { + xxx_messageInfo_FrequencyCapLevelEnum.DiscardUnknown(m) +} + +var xxx_messageInfo_FrequencyCapLevelEnum proto.InternalMessageInfo + +func init() { + proto.RegisterType((*FrequencyCapLevelEnum)(nil), "google.ads.googleads.v1.enums.FrequencyCapLevelEnum") + proto.RegisterEnum("google.ads.googleads.v1.enums.FrequencyCapLevelEnum_FrequencyCapLevel", FrequencyCapLevelEnum_FrequencyCapLevel_name, FrequencyCapLevelEnum_FrequencyCapLevel_value) +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/enums/frequency_cap_level.proto", fileDescriptor_frequency_cap_level_44ed939bc5ce921b) +} + +var fileDescriptor_frequency_cap_level_44ed939bc5ce921b = []byte{ + // 318 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x7c, 0x50, 0xdd, 0x4a, 0xfb, 0x30, + 0x1c, 0xfd, 0xaf, 0xfb, 0xa3, 0x92, 0x09, 0xd6, 0x82, 0x5e, 0x88, 0xbb, 0xd8, 0x1e, 0x20, 0xa1, + 0x78, 0x21, 0xc4, 0xab, 0xec, 0xab, 0x0c, 0xb5, 0x2b, 0xca, 0x26, 0x48, 0xb1, 0xc4, 0x35, 0x86, + 0x41, 0x97, 0xd4, 0xa5, 0xad, 0xf8, 0x3a, 0x5e, 0xfa, 0x28, 0x3e, 0x8a, 0xbe, 0x84, 0x24, 0xb1, + 0xbd, 0x19, 0x7a, 0x53, 0x4e, 0x7f, 0xe7, 0x83, 0x93, 0x03, 0xce, 0xb9, 0x94, 0x3c, 0x63, 0x88, + 0xa6, 0x0a, 0x59, 0xa8, 0x51, 0xe5, 0x23, 0x26, 0xca, 0xb5, 0x42, 0x4f, 0x1b, 0xf6, 0x5c, 0x32, + 0xb1, 0x7c, 0x4d, 0x96, 0x34, 0x4f, 0x32, 0x56, 0xb1, 0x0c, 0xe6, 0x1b, 0x59, 0x48, 0xaf, 0x6b, + 0xd5, 0x90, 0xa6, 0x0a, 0x36, 0x46, 0x58, 0xf9, 0xd0, 0x18, 0x4f, 0x4e, 0xeb, 0xdc, 0x7c, 0x85, + 0xa8, 0x10, 0xb2, 0xa0, 0xc5, 0x4a, 0x0a, 0x65, 0xcd, 0xfd, 0x17, 0x70, 0x34, 0xa9, 0x93, 0x87, + 0x34, 0xbf, 0xd2, 0xb9, 0x63, 0x51, 0xae, 0xfb, 0x0f, 0xe0, 0x70, 0x8b, 0xf0, 0x0e, 0x40, 0x67, + 0x1e, 0xde, 0x46, 0xe3, 0xe1, 0x74, 0x32, 0x1d, 0x8f, 0xdc, 0x7f, 0x5e, 0x07, 0xec, 0xce, 0xc3, + 0xcb, 0x70, 0x76, 0x17, 0xba, 0x2d, 0xcd, 0x92, 0x51, 0x12, 0xdc, 0xcc, 0xe6, 0x51, 0x42, 0x46, + 0xae, 0xe3, 0xed, 0x83, 0xbd, 0xfa, 0xe0, 0xb6, 0xf5, 0xdf, 0x90, 0x5c, 0x47, 0x64, 0x1a, 0x84, + 0xee, 0xff, 0xc1, 0x57, 0x0b, 0xf4, 0x96, 0x72, 0x0d, 0xff, 0x2c, 0x3f, 0x38, 0xde, 0xea, 0x10, + 0xe9, 0xda, 0x51, 0xeb, 0x7e, 0xf0, 0x63, 0xe4, 0x32, 0xa3, 0x82, 0x43, 0xb9, 0xe1, 0x88, 0x33, + 0x61, 0x1e, 0x55, 0xcf, 0x97, 0xaf, 0xd4, 0x2f, 0x6b, 0x5e, 0x98, 0xef, 0x9b, 0xd3, 0x0e, 0x08, + 0x79, 0x77, 0xba, 0x81, 0x8d, 0x22, 0xa9, 0x82, 0x16, 0x6a, 0xb4, 0xf0, 0xa1, 0x1e, 0x42, 0x7d, + 0xd4, 0x7c, 0x4c, 0x52, 0x15, 0x37, 0x7c, 0xbc, 0xf0, 0x63, 0xc3, 0x7f, 0x3a, 0x3d, 0x7b, 0xc4, + 0x98, 0xa4, 0x0a, 0xe3, 0x46, 0x81, 0xf1, 0xc2, 0xc7, 0xd8, 0x68, 0x1e, 0x77, 0x4c, 0xb1, 0xb3, + 0xef, 0x00, 0x00, 0x00, 0xff, 0xff, 0x30, 0xa7, 0xd0, 0xfb, 0xe5, 0x01, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/frequency_cap_time_unit.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/frequency_cap_time_unit.pb.go new file mode 100644 index 0000000..ae603cd --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/frequency_cap_time_unit.pb.go @@ -0,0 +1,123 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/enums/frequency_cap_time_unit.proto + +package enums // import "google.golang.org/genproto/googleapis/ads/googleads/v1/enums" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Unit of time the cap is defined at (e.g. day, week). +type FrequencyCapTimeUnitEnum_FrequencyCapTimeUnit int32 + +const ( + // Not specified. + FrequencyCapTimeUnitEnum_UNSPECIFIED FrequencyCapTimeUnitEnum_FrequencyCapTimeUnit = 0 + // Used for return value only. Represents value unknown in this version. + FrequencyCapTimeUnitEnum_UNKNOWN FrequencyCapTimeUnitEnum_FrequencyCapTimeUnit = 1 + // The cap would define limit per one day. + FrequencyCapTimeUnitEnum_DAY FrequencyCapTimeUnitEnum_FrequencyCapTimeUnit = 2 + // The cap would define limit per one week. + FrequencyCapTimeUnitEnum_WEEK FrequencyCapTimeUnitEnum_FrequencyCapTimeUnit = 3 + // The cap would define limit per one month. + FrequencyCapTimeUnitEnum_MONTH FrequencyCapTimeUnitEnum_FrequencyCapTimeUnit = 4 +) + +var FrequencyCapTimeUnitEnum_FrequencyCapTimeUnit_name = map[int32]string{ + 0: "UNSPECIFIED", + 1: "UNKNOWN", + 2: "DAY", + 3: "WEEK", + 4: "MONTH", +} +var FrequencyCapTimeUnitEnum_FrequencyCapTimeUnit_value = map[string]int32{ + "UNSPECIFIED": 0, + "UNKNOWN": 1, + "DAY": 2, + "WEEK": 3, + "MONTH": 4, +} + +func (x FrequencyCapTimeUnitEnum_FrequencyCapTimeUnit) String() string { + return proto.EnumName(FrequencyCapTimeUnitEnum_FrequencyCapTimeUnit_name, int32(x)) +} +func (FrequencyCapTimeUnitEnum_FrequencyCapTimeUnit) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_frequency_cap_time_unit_8ca53ecbfb0650e3, []int{0, 0} +} + +// Container for enum describing the unit of time the cap is defined at. +type FrequencyCapTimeUnitEnum struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *FrequencyCapTimeUnitEnum) Reset() { *m = FrequencyCapTimeUnitEnum{} } +func (m *FrequencyCapTimeUnitEnum) String() string { return proto.CompactTextString(m) } +func (*FrequencyCapTimeUnitEnum) ProtoMessage() {} +func (*FrequencyCapTimeUnitEnum) Descriptor() ([]byte, []int) { + return fileDescriptor_frequency_cap_time_unit_8ca53ecbfb0650e3, []int{0} +} +func (m *FrequencyCapTimeUnitEnum) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_FrequencyCapTimeUnitEnum.Unmarshal(m, b) +} +func (m *FrequencyCapTimeUnitEnum) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_FrequencyCapTimeUnitEnum.Marshal(b, m, deterministic) +} +func (dst *FrequencyCapTimeUnitEnum) XXX_Merge(src proto.Message) { + xxx_messageInfo_FrequencyCapTimeUnitEnum.Merge(dst, src) +} +func (m *FrequencyCapTimeUnitEnum) XXX_Size() int { + return xxx_messageInfo_FrequencyCapTimeUnitEnum.Size(m) +} +func (m *FrequencyCapTimeUnitEnum) XXX_DiscardUnknown() { + xxx_messageInfo_FrequencyCapTimeUnitEnum.DiscardUnknown(m) +} + +var xxx_messageInfo_FrequencyCapTimeUnitEnum proto.InternalMessageInfo + +func init() { + proto.RegisterType((*FrequencyCapTimeUnitEnum)(nil), "google.ads.googleads.v1.enums.FrequencyCapTimeUnitEnum") + proto.RegisterEnum("google.ads.googleads.v1.enums.FrequencyCapTimeUnitEnum_FrequencyCapTimeUnit", FrequencyCapTimeUnitEnum_FrequencyCapTimeUnit_name, FrequencyCapTimeUnitEnum_FrequencyCapTimeUnit_value) +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/enums/frequency_cap_time_unit.proto", fileDescriptor_frequency_cap_time_unit_8ca53ecbfb0650e3) +} + +var fileDescriptor_frequency_cap_time_unit_8ca53ecbfb0650e3 = []byte{ + // 324 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x7c, 0x90, 0xcf, 0x4e, 0xf2, 0x40, + 0x14, 0xc5, 0x3f, 0x0a, 0x9f, 0xe8, 0xb0, 0xb0, 0x69, 0x5c, 0xa8, 0x91, 0x05, 0x3c, 0xc0, 0x34, + 0x8d, 0xbb, 0x61, 0x35, 0x40, 0x41, 0x42, 0x2c, 0x44, 0xf9, 0x13, 0x4d, 0x13, 0x32, 0xd2, 0x71, + 0x32, 0x09, 0x9d, 0xa9, 0xcc, 0x94, 0xc4, 0xd7, 0x71, 0xe9, 0xa3, 0xf8, 0x28, 0x2e, 0x7c, 0x06, + 0xd3, 0x19, 0xcb, 0x0a, 0xdd, 0x34, 0x27, 0x3d, 0xf7, 0x77, 0xe6, 0xdc, 0x0b, 0x3a, 0x4c, 0x4a, + 0xb6, 0xa1, 0x3e, 0x49, 0x94, 0x6f, 0x65, 0xa1, 0x76, 0x81, 0x4f, 0x45, 0x9e, 0x2a, 0xff, 0x79, + 0x4b, 0x5f, 0x72, 0x2a, 0xd6, 0xaf, 0xab, 0x35, 0xc9, 0x56, 0x9a, 0xa7, 0x74, 0x95, 0x0b, 0xae, + 0x61, 0xb6, 0x95, 0x5a, 0x7a, 0x4d, 0x4b, 0x40, 0x92, 0x28, 0xb8, 0x87, 0xe1, 0x2e, 0x80, 0x06, + 0xbe, 0xbc, 0x2a, 0xb3, 0x33, 0xee, 0x13, 0x21, 0xa4, 0x26, 0x9a, 0x4b, 0xa1, 0x2c, 0xdc, 0x16, + 0xe0, 0x7c, 0x50, 0xa6, 0xf7, 0x48, 0x36, 0xe3, 0x29, 0x9d, 0x0b, 0xae, 0x43, 0x91, 0xa7, 0xed, + 0x3b, 0x70, 0x76, 0xc8, 0xf3, 0x4e, 0x41, 0x63, 0x1e, 0xdd, 0x4f, 0xc3, 0xde, 0x68, 0x30, 0x0a, + 0xfb, 0xee, 0x3f, 0xaf, 0x01, 0xea, 0xf3, 0x68, 0x1c, 0x4d, 0x96, 0x91, 0x5b, 0xf1, 0xea, 0xa0, + 0xda, 0xc7, 0x0f, 0xae, 0xe3, 0x1d, 0x83, 0xda, 0x32, 0x0c, 0xc7, 0x6e, 0xd5, 0x3b, 0x01, 0xff, + 0x6f, 0x27, 0xd1, 0xec, 0xc6, 0xad, 0x75, 0xbf, 0x2a, 0xa0, 0xb5, 0x96, 0x29, 0xfc, 0xb3, 0x73, + 0xf7, 0xe2, 0xd0, 0xbb, 0xd3, 0xa2, 0xf0, 0xb4, 0xf2, 0xd8, 0xfd, 0x61, 0x99, 0xdc, 0x10, 0xc1, + 0xa0, 0xdc, 0x32, 0x9f, 0x51, 0x61, 0xd6, 0x29, 0x8f, 0x97, 0x71, 0xf5, 0xcb, 0x2d, 0x3b, 0xe6, + 0xfb, 0xe6, 0x54, 0x87, 0x18, 0xbf, 0x3b, 0xcd, 0xa1, 0x8d, 0xc2, 0x89, 0x82, 0x56, 0x16, 0x6a, + 0x11, 0xc0, 0x62, 0x7f, 0xf5, 0x51, 0xfa, 0x31, 0x4e, 0x54, 0xbc, 0xf7, 0xe3, 0x45, 0x10, 0x1b, + 0xff, 0xd3, 0x69, 0xd9, 0x9f, 0x08, 0xe1, 0x44, 0x21, 0xb4, 0x9f, 0x40, 0x68, 0x11, 0x20, 0x64, + 0x66, 0x9e, 0x8e, 0x4c, 0xb1, 0xeb, 0xef, 0x00, 0x00, 0x00, 0xff, 0xff, 0x7c, 0x8d, 0xe0, 0xdf, + 0xe3, 0x01, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/gender_type.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/gender_type.pb.go new file mode 100644 index 0000000..fe3a3bc --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/gender_type.pb.go @@ -0,0 +1,122 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/enums/gender_type.proto + +package enums // import "google.golang.org/genproto/googleapis/ads/googleads/v1/enums" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// The type of demographic genders (e.g. female). +type GenderTypeEnum_GenderType int32 + +const ( + // Not specified. + GenderTypeEnum_UNSPECIFIED GenderTypeEnum_GenderType = 0 + // Used for return value only. Represents value unknown in this version. + GenderTypeEnum_UNKNOWN GenderTypeEnum_GenderType = 1 + // Male. + GenderTypeEnum_MALE GenderTypeEnum_GenderType = 10 + // Female. + GenderTypeEnum_FEMALE GenderTypeEnum_GenderType = 11 + // Undetermined gender. + GenderTypeEnum_UNDETERMINED GenderTypeEnum_GenderType = 20 +) + +var GenderTypeEnum_GenderType_name = map[int32]string{ + 0: "UNSPECIFIED", + 1: "UNKNOWN", + 10: "MALE", + 11: "FEMALE", + 20: "UNDETERMINED", +} +var GenderTypeEnum_GenderType_value = map[string]int32{ + "UNSPECIFIED": 0, + "UNKNOWN": 1, + "MALE": 10, + "FEMALE": 11, + "UNDETERMINED": 20, +} + +func (x GenderTypeEnum_GenderType) String() string { + return proto.EnumName(GenderTypeEnum_GenderType_name, int32(x)) +} +func (GenderTypeEnum_GenderType) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_gender_type_4c2fe868c930bc6b, []int{0, 0} +} + +// Container for enum describing the type of demographic genders. +type GenderTypeEnum struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GenderTypeEnum) Reset() { *m = GenderTypeEnum{} } +func (m *GenderTypeEnum) String() string { return proto.CompactTextString(m) } +func (*GenderTypeEnum) ProtoMessage() {} +func (*GenderTypeEnum) Descriptor() ([]byte, []int) { + return fileDescriptor_gender_type_4c2fe868c930bc6b, []int{0} +} +func (m *GenderTypeEnum) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GenderTypeEnum.Unmarshal(m, b) +} +func (m *GenderTypeEnum) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GenderTypeEnum.Marshal(b, m, deterministic) +} +func (dst *GenderTypeEnum) XXX_Merge(src proto.Message) { + xxx_messageInfo_GenderTypeEnum.Merge(dst, src) +} +func (m *GenderTypeEnum) XXX_Size() int { + return xxx_messageInfo_GenderTypeEnum.Size(m) +} +func (m *GenderTypeEnum) XXX_DiscardUnknown() { + xxx_messageInfo_GenderTypeEnum.DiscardUnknown(m) +} + +var xxx_messageInfo_GenderTypeEnum proto.InternalMessageInfo + +func init() { + proto.RegisterType((*GenderTypeEnum)(nil), "google.ads.googleads.v1.enums.GenderTypeEnum") + proto.RegisterEnum("google.ads.googleads.v1.enums.GenderTypeEnum_GenderType", GenderTypeEnum_GenderType_name, GenderTypeEnum_GenderType_value) +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/enums/gender_type.proto", fileDescriptor_gender_type_4c2fe868c930bc6b) +} + +var fileDescriptor_gender_type_4c2fe868c930bc6b = []byte{ + // 305 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x7c, 0x50, 0xcf, 0x4a, 0xc3, 0x30, + 0x1c, 0x76, 0x53, 0xa6, 0xfc, 0x26, 0xae, 0x04, 0x4f, 0xe2, 0x0e, 0xdb, 0x03, 0xa4, 0x14, 0x6f, + 0xf1, 0x94, 0xb9, 0x6c, 0x0c, 0x5d, 0x1c, 0x73, 0x9b, 0x20, 0x05, 0xa9, 0x26, 0x84, 0xc2, 0x9a, + 0x94, 0xa6, 0x1b, 0xec, 0x75, 0x3c, 0xfa, 0x28, 0x3e, 0x87, 0x27, 0x9f, 0x42, 0x9a, 0xae, 0xdd, + 0x49, 0x2f, 0xe5, 0xe3, 0xf7, 0xfd, 0xe9, 0x97, 0x0f, 0x7c, 0x65, 0x8c, 0x5a, 0x4b, 0x3f, 0x12, + 0x76, 0x0f, 0x0b, 0xb4, 0x0d, 0x7c, 0xa9, 0x37, 0x89, 0xf5, 0x95, 0xd4, 0x42, 0x66, 0xaf, 0xf9, + 0x2e, 0x95, 0x38, 0xcd, 0x4c, 0x6e, 0x50, 0xb7, 0x54, 0xe1, 0x48, 0x58, 0x5c, 0x1b, 0xf0, 0x36, + 0xc0, 0xce, 0x70, 0x75, 0x5d, 0xe5, 0xa5, 0xb1, 0x1f, 0x69, 0x6d, 0xf2, 0x28, 0x8f, 0x8d, 0xb6, + 0xa5, 0xb9, 0x2f, 0xe0, 0x62, 0xec, 0x12, 0x17, 0xbb, 0x54, 0x32, 0xbd, 0x49, 0xfa, 0x73, 0x80, + 0xc3, 0x05, 0x75, 0xa0, 0xbd, 0xe4, 0x4f, 0x33, 0x76, 0x37, 0x19, 0x4d, 0xd8, 0xd0, 0x3b, 0x42, + 0x6d, 0x38, 0x5d, 0xf2, 0x7b, 0xfe, 0xf8, 0xcc, 0xbd, 0x06, 0x3a, 0x83, 0x93, 0x29, 0x7d, 0x60, + 0x1e, 0x20, 0x80, 0xd6, 0x88, 0x39, 0xdc, 0x46, 0x1e, 0x9c, 0x2f, 0xf9, 0x90, 0x2d, 0xd8, 0x7c, + 0x3a, 0xe1, 0x6c, 0xe8, 0x5d, 0x0e, 0xbe, 0x1b, 0xd0, 0x7b, 0x37, 0x09, 0xfe, 0xb7, 0xe9, 0xa0, + 0x73, 0xf8, 0xef, 0xac, 0x28, 0x37, 0x6b, 0xbc, 0x0c, 0xf6, 0x0e, 0x65, 0xd6, 0x91, 0x56, 0xd8, + 0x64, 0xaa, 0x18, 0xc0, 0x55, 0xaf, 0xc6, 0x49, 0x63, 0xfb, 0xc7, 0x56, 0xb7, 0xee, 0xfb, 0xd1, + 0x3c, 0x1e, 0x53, 0xfa, 0xd9, 0xec, 0x8e, 0xcb, 0x28, 0x2a, 0x2c, 0x2e, 0x61, 0x81, 0x56, 0x01, + 0x2e, 0x5e, 0x6d, 0xbf, 0x2a, 0x3e, 0xa4, 0xc2, 0x86, 0x35, 0x1f, 0xae, 0x82, 0xd0, 0xf1, 0x3f, + 0xcd, 0x5e, 0x79, 0x24, 0x84, 0x0a, 0x4b, 0x48, 0xad, 0x20, 0x64, 0x15, 0x10, 0xe2, 0x34, 0x6f, + 0x2d, 0x57, 0xec, 0xe6, 0x37, 0x00, 0x00, 0xff, 0xff, 0xc1, 0xc9, 0xf8, 0x93, 0xc3, 0x01, 0x00, + 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/geo_target_constant_status.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/geo_target_constant_status.pb.go new file mode 100644 index 0000000..f338b9a --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/geo_target_constant_status.pb.go @@ -0,0 +1,121 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/enums/geo_target_constant_status.proto + +package enums // import "google.golang.org/genproto/googleapis/ads/googleads/v1/enums" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// The possible statuses of a geo target constant. +type GeoTargetConstantStatusEnum_GeoTargetConstantStatus int32 + +const ( + // No value has been specified. + GeoTargetConstantStatusEnum_UNSPECIFIED GeoTargetConstantStatusEnum_GeoTargetConstantStatus = 0 + // The received value is not known in this version. + // + // This is a response-only value. + GeoTargetConstantStatusEnum_UNKNOWN GeoTargetConstantStatusEnum_GeoTargetConstantStatus = 1 + // The geo target constant is valid. + GeoTargetConstantStatusEnum_ENABLED GeoTargetConstantStatusEnum_GeoTargetConstantStatus = 2 + // The geo target constant is obsolete and will be removed. + GeoTargetConstantStatusEnum_REMOVAL_PLANNED GeoTargetConstantStatusEnum_GeoTargetConstantStatus = 3 +) + +var GeoTargetConstantStatusEnum_GeoTargetConstantStatus_name = map[int32]string{ + 0: "UNSPECIFIED", + 1: "UNKNOWN", + 2: "ENABLED", + 3: "REMOVAL_PLANNED", +} +var GeoTargetConstantStatusEnum_GeoTargetConstantStatus_value = map[string]int32{ + "UNSPECIFIED": 0, + "UNKNOWN": 1, + "ENABLED": 2, + "REMOVAL_PLANNED": 3, +} + +func (x GeoTargetConstantStatusEnum_GeoTargetConstantStatus) String() string { + return proto.EnumName(GeoTargetConstantStatusEnum_GeoTargetConstantStatus_name, int32(x)) +} +func (GeoTargetConstantStatusEnum_GeoTargetConstantStatus) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_geo_target_constant_status_d39400cdaf0b42fd, []int{0, 0} +} + +// Container for describing the status of a geo target constant. +type GeoTargetConstantStatusEnum struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GeoTargetConstantStatusEnum) Reset() { *m = GeoTargetConstantStatusEnum{} } +func (m *GeoTargetConstantStatusEnum) String() string { return proto.CompactTextString(m) } +func (*GeoTargetConstantStatusEnum) ProtoMessage() {} +func (*GeoTargetConstantStatusEnum) Descriptor() ([]byte, []int) { + return fileDescriptor_geo_target_constant_status_d39400cdaf0b42fd, []int{0} +} +func (m *GeoTargetConstantStatusEnum) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GeoTargetConstantStatusEnum.Unmarshal(m, b) +} +func (m *GeoTargetConstantStatusEnum) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GeoTargetConstantStatusEnum.Marshal(b, m, deterministic) +} +func (dst *GeoTargetConstantStatusEnum) XXX_Merge(src proto.Message) { + xxx_messageInfo_GeoTargetConstantStatusEnum.Merge(dst, src) +} +func (m *GeoTargetConstantStatusEnum) XXX_Size() int { + return xxx_messageInfo_GeoTargetConstantStatusEnum.Size(m) +} +func (m *GeoTargetConstantStatusEnum) XXX_DiscardUnknown() { + xxx_messageInfo_GeoTargetConstantStatusEnum.DiscardUnknown(m) +} + +var xxx_messageInfo_GeoTargetConstantStatusEnum proto.InternalMessageInfo + +func init() { + proto.RegisterType((*GeoTargetConstantStatusEnum)(nil), "google.ads.googleads.v1.enums.GeoTargetConstantStatusEnum") + proto.RegisterEnum("google.ads.googleads.v1.enums.GeoTargetConstantStatusEnum_GeoTargetConstantStatus", GeoTargetConstantStatusEnum_GeoTargetConstantStatus_name, GeoTargetConstantStatusEnum_GeoTargetConstantStatus_value) +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/enums/geo_target_constant_status.proto", fileDescriptor_geo_target_constant_status_d39400cdaf0b42fd) +} + +var fileDescriptor_geo_target_constant_status_d39400cdaf0b42fd = []byte{ + // 321 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x7c, 0x50, 0xd1, 0x6a, 0xf2, 0x30, + 0x18, 0xfd, 0xad, 0xf0, 0x0f, 0xe2, 0x85, 0xa5, 0xbb, 0x18, 0x6c, 0x7a, 0xa1, 0x0f, 0x90, 0x52, + 0x76, 0x97, 0xc1, 0x20, 0xd5, 0x4c, 0x64, 0x2e, 0x96, 0x39, 0x3b, 0x36, 0x0a, 0x25, 0xb3, 0x21, + 0x08, 0x9a, 0x4f, 0x4c, 0x94, 0x3d, 0xcf, 0x2e, 0xf7, 0x28, 0x7b, 0x94, 0xdd, 0xed, 0x0d, 0x46, + 0x53, 0xf5, 0xae, 0xbb, 0x09, 0x27, 0x39, 0xdf, 0x39, 0x39, 0xdf, 0x41, 0xb7, 0x0a, 0x40, 0xad, + 0x64, 0x28, 0x0a, 0x13, 0x56, 0xb0, 0x44, 0xfb, 0x28, 0x94, 0x7a, 0xb7, 0x36, 0xa1, 0x92, 0x90, + 0x5b, 0xb1, 0x55, 0xd2, 0xe6, 0x0b, 0xd0, 0xc6, 0x0a, 0x6d, 0x73, 0x63, 0x85, 0xdd, 0x19, 0xbc, + 0xd9, 0x82, 0x85, 0xa0, 0x5b, 0x89, 0xb0, 0x28, 0x0c, 0x3e, 0xe9, 0xf1, 0x3e, 0xc2, 0x4e, 0x7f, + 0xd9, 0x39, 0xda, 0x6f, 0x96, 0xa1, 0xd0, 0x1a, 0xac, 0xb0, 0x4b, 0xd0, 0x07, 0x71, 0xff, 0x1d, + 0x5d, 0x8d, 0x24, 0x3c, 0x39, 0xff, 0xc1, 0xc1, 0x7e, 0xe6, 0xdc, 0x99, 0xde, 0xad, 0xfb, 0x2f, + 0xe8, 0xa2, 0x86, 0x0e, 0xda, 0xa8, 0x35, 0xe7, 0xb3, 0x84, 0x0d, 0xc6, 0x77, 0x63, 0x36, 0xf4, + 0xff, 0x05, 0x2d, 0x74, 0x36, 0xe7, 0xf7, 0x7c, 0xfa, 0xcc, 0xfd, 0x46, 0x79, 0x61, 0x9c, 0xc6, + 0x13, 0x36, 0xf4, 0xbd, 0xe0, 0x1c, 0xb5, 0x1f, 0xd9, 0xc3, 0x34, 0xa5, 0x93, 0x3c, 0x99, 0x50, + 0xce, 0xd9, 0xd0, 0x6f, 0xc6, 0x3f, 0x0d, 0xd4, 0x5b, 0xc0, 0x1a, 0xff, 0x99, 0x3e, 0xee, 0xd4, + 0x7c, 0x9f, 0x94, 0xe9, 0x93, 0xc6, 0x6b, 0x7c, 0x90, 0x2b, 0x58, 0x09, 0xad, 0x30, 0x6c, 0x55, + 0xa8, 0xa4, 0x76, 0xbb, 0x1d, 0xcb, 0xdc, 0x2c, 0x4d, 0x4d, 0xb7, 0x37, 0xee, 0xfc, 0xf0, 0x9a, + 0x23, 0x4a, 0x3f, 0xbd, 0xee, 0xa8, 0xb2, 0xa2, 0x85, 0xc1, 0x15, 0x2c, 0x51, 0x1a, 0xe1, 0xb2, + 0x09, 0xf3, 0x75, 0xe4, 0x33, 0x5a, 0x98, 0xec, 0xc4, 0x67, 0x69, 0x94, 0x39, 0xfe, 0xdb, 0xeb, + 0x55, 0x8f, 0x84, 0xd0, 0xc2, 0x10, 0x72, 0x9a, 0x20, 0x24, 0x8d, 0x08, 0x71, 0x33, 0x6f, 0xff, + 0x5d, 0xb0, 0xeb, 0xdf, 0x00, 0x00, 0x00, 0xff, 0xff, 0xd1, 0x6d, 0x90, 0x4b, 0xf3, 0x01, 0x00, + 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/geo_targeting_restriction.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/geo_targeting_restriction.pb.go new file mode 100644 index 0000000..9106e9b --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/geo_targeting_restriction.pb.go @@ -0,0 +1,116 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/enums/geo_targeting_restriction.proto + +package enums // import "google.golang.org/genproto/googleapis/ads/googleads/v1/enums" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// A restriction used to determine if the request context's +// geo should be matched. +type GeoTargetingRestrictionEnum_GeoTargetingRestriction int32 + +const ( + // Not specified. + GeoTargetingRestrictionEnum_UNSPECIFIED GeoTargetingRestrictionEnum_GeoTargetingRestriction = 0 + // Used for return value only. Represents value unknown in this version. + GeoTargetingRestrictionEnum_UNKNOWN GeoTargetingRestrictionEnum_GeoTargetingRestriction = 1 + // Indicates that request context should match the physical location of + // the user. + GeoTargetingRestrictionEnum_LOCATION_OF_PRESENCE GeoTargetingRestrictionEnum_GeoTargetingRestriction = 2 +) + +var GeoTargetingRestrictionEnum_GeoTargetingRestriction_name = map[int32]string{ + 0: "UNSPECIFIED", + 1: "UNKNOWN", + 2: "LOCATION_OF_PRESENCE", +} +var GeoTargetingRestrictionEnum_GeoTargetingRestriction_value = map[string]int32{ + "UNSPECIFIED": 0, + "UNKNOWN": 1, + "LOCATION_OF_PRESENCE": 2, +} + +func (x GeoTargetingRestrictionEnum_GeoTargetingRestriction) String() string { + return proto.EnumName(GeoTargetingRestrictionEnum_GeoTargetingRestriction_name, int32(x)) +} +func (GeoTargetingRestrictionEnum_GeoTargetingRestriction) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_geo_targeting_restriction_40a83d06ddfa34b9, []int{0, 0} +} + +// Message describing feed item geo targeting restriction. +type GeoTargetingRestrictionEnum struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GeoTargetingRestrictionEnum) Reset() { *m = GeoTargetingRestrictionEnum{} } +func (m *GeoTargetingRestrictionEnum) String() string { return proto.CompactTextString(m) } +func (*GeoTargetingRestrictionEnum) ProtoMessage() {} +func (*GeoTargetingRestrictionEnum) Descriptor() ([]byte, []int) { + return fileDescriptor_geo_targeting_restriction_40a83d06ddfa34b9, []int{0} +} +func (m *GeoTargetingRestrictionEnum) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GeoTargetingRestrictionEnum.Unmarshal(m, b) +} +func (m *GeoTargetingRestrictionEnum) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GeoTargetingRestrictionEnum.Marshal(b, m, deterministic) +} +func (dst *GeoTargetingRestrictionEnum) XXX_Merge(src proto.Message) { + xxx_messageInfo_GeoTargetingRestrictionEnum.Merge(dst, src) +} +func (m *GeoTargetingRestrictionEnum) XXX_Size() int { + return xxx_messageInfo_GeoTargetingRestrictionEnum.Size(m) +} +func (m *GeoTargetingRestrictionEnum) XXX_DiscardUnknown() { + xxx_messageInfo_GeoTargetingRestrictionEnum.DiscardUnknown(m) +} + +var xxx_messageInfo_GeoTargetingRestrictionEnum proto.InternalMessageInfo + +func init() { + proto.RegisterType((*GeoTargetingRestrictionEnum)(nil), "google.ads.googleads.v1.enums.GeoTargetingRestrictionEnum") + proto.RegisterEnum("google.ads.googleads.v1.enums.GeoTargetingRestrictionEnum_GeoTargetingRestriction", GeoTargetingRestrictionEnum_GeoTargetingRestriction_name, GeoTargetingRestrictionEnum_GeoTargetingRestriction_value) +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/enums/geo_targeting_restriction.proto", fileDescriptor_geo_targeting_restriction_40a83d06ddfa34b9) +} + +var fileDescriptor_geo_targeting_restriction_40a83d06ddfa34b9 = []byte{ + // 313 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x7c, 0x90, 0xcf, 0x4a, 0x2b, 0x31, + 0x18, 0xc5, 0x6f, 0xe7, 0x82, 0x42, 0xba, 0xb0, 0x0c, 0x82, 0xa2, 0xed, 0xa2, 0x7d, 0x80, 0x84, + 0xc1, 0x5d, 0xc4, 0x45, 0x5a, 0xd3, 0x52, 0x94, 0xcc, 0xd8, 0x7f, 0x82, 0x0c, 0x0c, 0xb1, 0x13, + 0xc2, 0x40, 0x9b, 0x0c, 0x93, 0xb4, 0x0f, 0xe4, 0xd2, 0x47, 0xf1, 0x51, 0xdc, 0xf9, 0x06, 0x32, + 0x89, 0x53, 0x57, 0x75, 0x13, 0x0e, 0x39, 0xdf, 0xef, 0xe4, 0xcb, 0x01, 0x77, 0x52, 0x6b, 0xb9, + 0x11, 0x88, 0xe7, 0x06, 0x79, 0x59, 0xab, 0x7d, 0x84, 0x84, 0xda, 0x6d, 0x0d, 0x92, 0x42, 0x67, + 0x96, 0x57, 0x52, 0xd8, 0x42, 0xc9, 0xac, 0x12, 0xc6, 0x56, 0xc5, 0xda, 0x16, 0x5a, 0xc1, 0xb2, + 0xd2, 0x56, 0x87, 0x3d, 0xcf, 0x40, 0x9e, 0x1b, 0x78, 0xc0, 0xe1, 0x3e, 0x82, 0x0e, 0xbf, 0xea, + 0x36, 0xe9, 0x65, 0x81, 0xb8, 0x52, 0xda, 0xf2, 0x9a, 0x35, 0x1e, 0x1e, 0x94, 0xe0, 0x7a, 0x22, + 0xf4, 0xa2, 0x89, 0x9f, 0xfd, 0xa6, 0x53, 0xb5, 0xdb, 0x0e, 0x9e, 0xc0, 0xc5, 0x11, 0x3b, 0x3c, + 0x03, 0xed, 0x25, 0x9b, 0x27, 0x74, 0x34, 0x1d, 0x4f, 0xe9, 0x7d, 0xe7, 0x5f, 0xd8, 0x06, 0xa7, + 0x4b, 0xf6, 0xc0, 0xe2, 0x67, 0xd6, 0x69, 0x85, 0x97, 0xe0, 0xfc, 0x31, 0x1e, 0x91, 0xc5, 0x34, + 0x66, 0x59, 0x3c, 0xce, 0x92, 0x19, 0x9d, 0x53, 0x36, 0xa2, 0x9d, 0x60, 0xf8, 0xd5, 0x02, 0xfd, + 0xb5, 0xde, 0xc2, 0x3f, 0xb7, 0x1e, 0x76, 0x8f, 0x3c, 0x9b, 0xd4, 0x5b, 0x27, 0xad, 0x97, 0xe1, + 0x0f, 0x2e, 0xf5, 0x86, 0x2b, 0x09, 0x75, 0x25, 0x91, 0x14, 0xca, 0xfd, 0xa9, 0xe9, 0xb0, 0x2c, + 0xcc, 0x91, 0x4a, 0x6f, 0xdd, 0xf9, 0x16, 0xfc, 0x9f, 0x10, 0xf2, 0x1e, 0xf4, 0x26, 0x3e, 0x8a, + 0xe4, 0x06, 0x7a, 0x59, 0xab, 0x55, 0x04, 0xeb, 0x06, 0xcc, 0x47, 0xe3, 0xa7, 0x24, 0x37, 0xe9, + 0xc1, 0x4f, 0x57, 0x51, 0xea, 0xfc, 0xcf, 0xa0, 0xef, 0x2f, 0x31, 0x26, 0xb9, 0xc1, 0xf8, 0x30, + 0x81, 0xf1, 0x2a, 0xc2, 0xd8, 0xcd, 0xbc, 0x9e, 0xb8, 0xc5, 0x6e, 0xbe, 0x03, 0x00, 0x00, 0xff, + 0xff, 0x50, 0xdf, 0x18, 0xa1, 0xea, 0x01, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/geo_targeting_type.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/geo_targeting_type.pb.go new file mode 100644 index 0000000..7f9f902 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/geo_targeting_type.pb.go @@ -0,0 +1,119 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/enums/geo_targeting_type.proto + +package enums // import "google.golang.org/genproto/googleapis/ads/googleads/v1/enums" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// The possible geo targeting types. +type GeoTargetingTypeEnum_GeoTargetingType int32 + +const ( + // Not specified. + GeoTargetingTypeEnum_UNSPECIFIED GeoTargetingTypeEnum_GeoTargetingType = 0 + // The value is unknown in this version. + GeoTargetingTypeEnum_UNKNOWN GeoTargetingTypeEnum_GeoTargetingType = 1 + // Location the user is interested in while making the query. + GeoTargetingTypeEnum_AREA_OF_INTEREST GeoTargetingTypeEnum_GeoTargetingType = 2 + // Location of the user issuing the query. + GeoTargetingTypeEnum_LOCATION_OF_PRESENCE GeoTargetingTypeEnum_GeoTargetingType = 3 +) + +var GeoTargetingTypeEnum_GeoTargetingType_name = map[int32]string{ + 0: "UNSPECIFIED", + 1: "UNKNOWN", + 2: "AREA_OF_INTEREST", + 3: "LOCATION_OF_PRESENCE", +} +var GeoTargetingTypeEnum_GeoTargetingType_value = map[string]int32{ + "UNSPECIFIED": 0, + "UNKNOWN": 1, + "AREA_OF_INTEREST": 2, + "LOCATION_OF_PRESENCE": 3, +} + +func (x GeoTargetingTypeEnum_GeoTargetingType) String() string { + return proto.EnumName(GeoTargetingTypeEnum_GeoTargetingType_name, int32(x)) +} +func (GeoTargetingTypeEnum_GeoTargetingType) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_geo_targeting_type_16c0620d49cd437f, []int{0, 0} +} + +// Container for enum describing possible geo targeting types. +type GeoTargetingTypeEnum struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GeoTargetingTypeEnum) Reset() { *m = GeoTargetingTypeEnum{} } +func (m *GeoTargetingTypeEnum) String() string { return proto.CompactTextString(m) } +func (*GeoTargetingTypeEnum) ProtoMessage() {} +func (*GeoTargetingTypeEnum) Descriptor() ([]byte, []int) { + return fileDescriptor_geo_targeting_type_16c0620d49cd437f, []int{0} +} +func (m *GeoTargetingTypeEnum) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GeoTargetingTypeEnum.Unmarshal(m, b) +} +func (m *GeoTargetingTypeEnum) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GeoTargetingTypeEnum.Marshal(b, m, deterministic) +} +func (dst *GeoTargetingTypeEnum) XXX_Merge(src proto.Message) { + xxx_messageInfo_GeoTargetingTypeEnum.Merge(dst, src) +} +func (m *GeoTargetingTypeEnum) XXX_Size() int { + return xxx_messageInfo_GeoTargetingTypeEnum.Size(m) +} +func (m *GeoTargetingTypeEnum) XXX_DiscardUnknown() { + xxx_messageInfo_GeoTargetingTypeEnum.DiscardUnknown(m) +} + +var xxx_messageInfo_GeoTargetingTypeEnum proto.InternalMessageInfo + +func init() { + proto.RegisterType((*GeoTargetingTypeEnum)(nil), "google.ads.googleads.v1.enums.GeoTargetingTypeEnum") + proto.RegisterEnum("google.ads.googleads.v1.enums.GeoTargetingTypeEnum_GeoTargetingType", GeoTargetingTypeEnum_GeoTargetingType_name, GeoTargetingTypeEnum_GeoTargetingType_value) +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/enums/geo_targeting_type.proto", fileDescriptor_geo_targeting_type_16c0620d49cd437f) +} + +var fileDescriptor_geo_targeting_type_16c0620d49cd437f = []byte{ + // 323 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x7c, 0x50, 0xdf, 0x4a, 0xf3, 0x30, + 0x1c, 0xfd, 0xd6, 0xc1, 0x27, 0x64, 0x17, 0x96, 0x32, 0x41, 0xc4, 0x5d, 0x6c, 0x0f, 0x90, 0x52, + 0x04, 0x2f, 0xe2, 0x55, 0x36, 0xb3, 0x51, 0x94, 0xb4, 0x6c, 0xdd, 0x04, 0x29, 0xd4, 0x6a, 0x43, + 0x28, 0x6c, 0x49, 0x59, 0xb2, 0xe1, 0x5e, 0xc7, 0x4b, 0x1f, 0xc5, 0x47, 0xd9, 0x53, 0x48, 0x13, + 0xdb, 0x8b, 0x81, 0xde, 0x84, 0xc3, 0xef, 0xfc, 0xe1, 0xe4, 0x80, 0x5b, 0x2e, 0x25, 0x5f, 0x33, + 0x3f, 0x2f, 0x94, 0x6f, 0x61, 0x8d, 0xf6, 0x81, 0xcf, 0xc4, 0x6e, 0xa3, 0x7c, 0xce, 0x64, 0xa6, + 0xf3, 0x2d, 0x67, 0xba, 0x14, 0x3c, 0xd3, 0x87, 0x8a, 0xc1, 0x6a, 0x2b, 0xb5, 0xf4, 0x06, 0x56, + 0x0c, 0xf3, 0x42, 0xc1, 0xd6, 0x07, 0xf7, 0x01, 0x34, 0xbe, 0xab, 0xeb, 0x26, 0xb6, 0x2a, 0xfd, + 0x5c, 0x08, 0xa9, 0x73, 0x5d, 0x4a, 0xa1, 0xac, 0x79, 0xf4, 0x0e, 0xfa, 0x33, 0x26, 0x93, 0x26, + 0x37, 0x39, 0x54, 0x8c, 0x88, 0xdd, 0x66, 0xf4, 0x02, 0xdc, 0xd3, 0xbb, 0x77, 0x0e, 0x7a, 0x4b, + 0xba, 0x88, 0xc9, 0x24, 0x9c, 0x86, 0xe4, 0xde, 0xfd, 0xe7, 0xf5, 0xc0, 0xd9, 0x92, 0x3e, 0xd0, + 0xe8, 0x89, 0xba, 0x1d, 0xaf, 0x0f, 0x5c, 0x3c, 0x27, 0x38, 0x8b, 0xa6, 0x59, 0x48, 0x13, 0x32, + 0x27, 0x8b, 0xc4, 0x75, 0xbc, 0x4b, 0xd0, 0x7f, 0x8c, 0x26, 0x38, 0x09, 0x23, 0x5a, 0x33, 0xf1, + 0x9c, 0x2c, 0x08, 0x9d, 0x10, 0xb7, 0x3b, 0x3e, 0x76, 0xc0, 0xf0, 0x4d, 0x6e, 0xe0, 0x9f, 0xed, + 0xc7, 0x17, 0xa7, 0x2d, 0xe2, 0xba, 0x76, 0xdc, 0x79, 0x1e, 0xff, 0xf8, 0xb8, 0x5c, 0xe7, 0x82, + 0x43, 0xb9, 0xe5, 0x3e, 0x67, 0xc2, 0x7c, 0xaa, 0x59, 0xaf, 0x2a, 0xd5, 0x2f, 0x63, 0xde, 0x99, + 0xf7, 0xc3, 0xe9, 0xce, 0x30, 0xfe, 0x74, 0x06, 0x33, 0x1b, 0x85, 0x0b, 0x05, 0x2d, 0xac, 0xd1, + 0x2a, 0x80, 0xf5, 0x12, 0xea, 0xab, 0xe1, 0x53, 0x5c, 0xa8, 0xb4, 0xe5, 0xd3, 0x55, 0x90, 0x1a, + 0xfe, 0xe8, 0x0c, 0xed, 0x11, 0x21, 0x5c, 0x28, 0x84, 0x5a, 0x05, 0x42, 0xab, 0x00, 0x21, 0xa3, + 0x79, 0xfd, 0x6f, 0x8a, 0xdd, 0x7c, 0x07, 0x00, 0x00, 0xff, 0xff, 0xe3, 0x2c, 0xcb, 0xe2, 0xe4, + 0x01, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/google_ads_field_category.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/google_ads_field_category.pb.go new file mode 100644 index 0000000..d6557fa --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/google_ads_field_category.pb.go @@ -0,0 +1,132 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/enums/google_ads_field_category.proto + +package enums // import "google.golang.org/genproto/googleapis/ads/googleads/v1/enums" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// The category of the artifact. +type GoogleAdsFieldCategoryEnum_GoogleAdsFieldCategory int32 + +const ( + // Unspecified + GoogleAdsFieldCategoryEnum_UNSPECIFIED GoogleAdsFieldCategoryEnum_GoogleAdsFieldCategory = 0 + // Unknown + GoogleAdsFieldCategoryEnum_UNKNOWN GoogleAdsFieldCategoryEnum_GoogleAdsFieldCategory = 1 + // The described artifact is a resource. + GoogleAdsFieldCategoryEnum_RESOURCE GoogleAdsFieldCategoryEnum_GoogleAdsFieldCategory = 2 + // The described artifact is a field and is an attribute of a resource. + // Including a resource attribute field in a query may segment the query if + // the resource to which it is attributed segments the resource found in + // the FROM clause. + GoogleAdsFieldCategoryEnum_ATTRIBUTE GoogleAdsFieldCategoryEnum_GoogleAdsFieldCategory = 3 + // The described artifact is a field and always segments search queries. + GoogleAdsFieldCategoryEnum_SEGMENT GoogleAdsFieldCategoryEnum_GoogleAdsFieldCategory = 5 + // The described artifact is a field and is a metric. It never segments + // search queries. + GoogleAdsFieldCategoryEnum_METRIC GoogleAdsFieldCategoryEnum_GoogleAdsFieldCategory = 6 +) + +var GoogleAdsFieldCategoryEnum_GoogleAdsFieldCategory_name = map[int32]string{ + 0: "UNSPECIFIED", + 1: "UNKNOWN", + 2: "RESOURCE", + 3: "ATTRIBUTE", + 5: "SEGMENT", + 6: "METRIC", +} +var GoogleAdsFieldCategoryEnum_GoogleAdsFieldCategory_value = map[string]int32{ + "UNSPECIFIED": 0, + "UNKNOWN": 1, + "RESOURCE": 2, + "ATTRIBUTE": 3, + "SEGMENT": 5, + "METRIC": 6, +} + +func (x GoogleAdsFieldCategoryEnum_GoogleAdsFieldCategory) String() string { + return proto.EnumName(GoogleAdsFieldCategoryEnum_GoogleAdsFieldCategory_name, int32(x)) +} +func (GoogleAdsFieldCategoryEnum_GoogleAdsFieldCategory) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_google_ads_field_category_e668ad533e427a2f, []int{0, 0} +} + +// Container for enum that determines if the described artifact is a resource +// or a field, and if it is a field, when it segments search queries. +type GoogleAdsFieldCategoryEnum struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GoogleAdsFieldCategoryEnum) Reset() { *m = GoogleAdsFieldCategoryEnum{} } +func (m *GoogleAdsFieldCategoryEnum) String() string { return proto.CompactTextString(m) } +func (*GoogleAdsFieldCategoryEnum) ProtoMessage() {} +func (*GoogleAdsFieldCategoryEnum) Descriptor() ([]byte, []int) { + return fileDescriptor_google_ads_field_category_e668ad533e427a2f, []int{0} +} +func (m *GoogleAdsFieldCategoryEnum) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GoogleAdsFieldCategoryEnum.Unmarshal(m, b) +} +func (m *GoogleAdsFieldCategoryEnum) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GoogleAdsFieldCategoryEnum.Marshal(b, m, deterministic) +} +func (dst *GoogleAdsFieldCategoryEnum) XXX_Merge(src proto.Message) { + xxx_messageInfo_GoogleAdsFieldCategoryEnum.Merge(dst, src) +} +func (m *GoogleAdsFieldCategoryEnum) XXX_Size() int { + return xxx_messageInfo_GoogleAdsFieldCategoryEnum.Size(m) +} +func (m *GoogleAdsFieldCategoryEnum) XXX_DiscardUnknown() { + xxx_messageInfo_GoogleAdsFieldCategoryEnum.DiscardUnknown(m) +} + +var xxx_messageInfo_GoogleAdsFieldCategoryEnum proto.InternalMessageInfo + +func init() { + proto.RegisterType((*GoogleAdsFieldCategoryEnum)(nil), "google.ads.googleads.v1.enums.GoogleAdsFieldCategoryEnum") + proto.RegisterEnum("google.ads.googleads.v1.enums.GoogleAdsFieldCategoryEnum_GoogleAdsFieldCategory", GoogleAdsFieldCategoryEnum_GoogleAdsFieldCategory_name, GoogleAdsFieldCategoryEnum_GoogleAdsFieldCategory_value) +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/enums/google_ads_field_category.proto", fileDescriptor_google_ads_field_category_e668ad533e427a2f) +} + +var fileDescriptor_google_ads_field_category_e668ad533e427a2f = []byte{ + // 336 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x7c, 0x50, 0xcf, 0x4a, 0xc3, 0x30, + 0x18, 0xb7, 0x1d, 0x4e, 0xcd, 0x14, 0x4b, 0x0f, 0x1e, 0xa6, 0x3b, 0x6c, 0x0f, 0x90, 0x52, 0xbc, + 0x45, 0x3c, 0xb4, 0x35, 0x1b, 0x45, 0xd6, 0x8d, 0xae, 0x9d, 0x20, 0x85, 0x11, 0x97, 0x1a, 0x0a, + 0x5d, 0x32, 0x96, 0x6e, 0xe0, 0x2b, 0xf8, 0x18, 0x1e, 0x7d, 0x14, 0x1f, 0xc5, 0x93, 0x8f, 0x20, + 0x4d, 0xd7, 0x9e, 0xa6, 0x97, 0x10, 0xbe, 0xdf, 0x9f, 0xef, 0xfb, 0xfd, 0xc0, 0x3d, 0x13, 0x82, + 0xe5, 0xa9, 0x45, 0xa8, 0xb4, 0xaa, 0x6f, 0xf9, 0xdb, 0xd9, 0x56, 0xca, 0xb7, 0xab, 0x7a, 0xb4, + 0x20, 0x54, 0x2e, 0x5e, 0xb3, 0x34, 0xa7, 0x8b, 0x25, 0x29, 0x52, 0x26, 0x36, 0x6f, 0x70, 0xbd, + 0x11, 0x85, 0x30, 0x7b, 0x15, 0x01, 0x12, 0x2a, 0x61, 0x23, 0x87, 0x3b, 0x1b, 0x2a, 0x79, 0xf7, + 0xa6, 0x76, 0x5f, 0x67, 0x16, 0xe1, 0x5c, 0x14, 0xa4, 0xc8, 0x04, 0x97, 0x95, 0x78, 0xf0, 0xae, + 0x81, 0xee, 0x48, 0x11, 0x1c, 0x2a, 0x87, 0xa5, 0xbd, 0xb7, 0x77, 0xc7, 0x7c, 0xbb, 0x1a, 0xe4, + 0xe0, 0xea, 0x30, 0x6a, 0x5e, 0x82, 0x4e, 0x1c, 0xcc, 0xa6, 0xd8, 0xf3, 0x87, 0x3e, 0x7e, 0x30, + 0x8e, 0xcc, 0x0e, 0x38, 0x89, 0x83, 0xc7, 0x60, 0xf2, 0x14, 0x18, 0x9a, 0x79, 0x0e, 0x4e, 0x43, + 0x3c, 0x9b, 0xc4, 0xa1, 0x87, 0x0d, 0xdd, 0xbc, 0x00, 0x67, 0x4e, 0x14, 0x85, 0xbe, 0x1b, 0x47, + 0xd8, 0x68, 0x95, 0xcc, 0x19, 0x1e, 0x8d, 0x71, 0x10, 0x19, 0xc7, 0x26, 0x00, 0xed, 0x31, 0x8e, + 0x42, 0xdf, 0x33, 0xda, 0xee, 0x8f, 0x06, 0xfa, 0x4b, 0xb1, 0x82, 0xff, 0x06, 0x72, 0xaf, 0x0f, + 0x5f, 0x34, 0x2d, 0xf3, 0x4c, 0xb5, 0x67, 0x77, 0xaf, 0x66, 0x22, 0x27, 0x9c, 0x41, 0xb1, 0x61, + 0x16, 0x4b, 0xb9, 0x4a, 0x5b, 0xb7, 0xbb, 0xce, 0xe4, 0x1f, 0x65, 0xdf, 0xa9, 0xf7, 0x43, 0x6f, + 0x8d, 0x1c, 0xe7, 0x53, 0xef, 0x55, 0x9b, 0xa0, 0x43, 0x25, 0x6c, 0x96, 0xc2, 0xb9, 0x0d, 0xcb, + 0x6e, 0xe4, 0x57, 0x8d, 0x27, 0x0e, 0x95, 0x49, 0x83, 0x27, 0x73, 0x3b, 0x51, 0xf8, 0xb7, 0xde, + 0xaf, 0x86, 0x08, 0x39, 0x54, 0x22, 0xd4, 0x30, 0x10, 0x9a, 0xdb, 0x08, 0x29, 0xce, 0x4b, 0x5b, + 0x1d, 0x76, 0xfb, 0x1b, 0x00, 0x00, 0xff, 0xff, 0xba, 0xef, 0x9f, 0x9b, 0x04, 0x02, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/google_ads_field_data_type.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/google_ads_field_data_type.pb.go new file mode 100644 index 0000000..db4acc2 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/google_ads_field_data_type.pb.go @@ -0,0 +1,178 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/enums/google_ads_field_data_type.proto + +package enums // import "google.golang.org/genproto/googleapis/ads/googleads/v1/enums" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// These are the various types a GoogleAdsService artifact may take on. +type GoogleAdsFieldDataTypeEnum_GoogleAdsFieldDataType int32 + +const ( + // Unspecified + GoogleAdsFieldDataTypeEnum_UNSPECIFIED GoogleAdsFieldDataTypeEnum_GoogleAdsFieldDataType = 0 + // Unknown + GoogleAdsFieldDataTypeEnum_UNKNOWN GoogleAdsFieldDataTypeEnum_GoogleAdsFieldDataType = 1 + // Maps to google.protobuf.BoolValue + // + // Applicable operators: =, != + GoogleAdsFieldDataTypeEnum_BOOLEAN GoogleAdsFieldDataTypeEnum_GoogleAdsFieldDataType = 2 + // Maps to google.protobuf.StringValue. It can be compared using the set of + // operators specific to dates however. + // + // Applicable operators: =, <, >, <=, >=, BETWEEN, DURING, and IN + GoogleAdsFieldDataTypeEnum_DATE GoogleAdsFieldDataTypeEnum_GoogleAdsFieldDataType = 3 + // Maps to google.protobuf.DoubleValue + // + // Applicable operators: =, !=, <, >, IN, NOT IN + GoogleAdsFieldDataTypeEnum_DOUBLE GoogleAdsFieldDataTypeEnum_GoogleAdsFieldDataType = 4 + // Maps to an enum. It's specific definition can be found at type_url. + // + // Applicable operators: =, !=, IN, NOT IN + GoogleAdsFieldDataTypeEnum_ENUM GoogleAdsFieldDataTypeEnum_GoogleAdsFieldDataType = 5 + // Maps to google.protobuf.FloatValue + // + // Applicable operators: =, !=, <, >, IN, NOT IN + GoogleAdsFieldDataTypeEnum_FLOAT GoogleAdsFieldDataTypeEnum_GoogleAdsFieldDataType = 6 + // Maps to google.protobuf.Int32Value + // + // Applicable operators: =, !=, <, >, <=, >=, BETWEEN, IN, NOT IN + GoogleAdsFieldDataTypeEnum_INT32 GoogleAdsFieldDataTypeEnum_GoogleAdsFieldDataType = 7 + // Maps to google.protobuf.Int64Value + // + // Applicable operators: =, !=, <, >, <=, >=, BETWEEN, IN, NOT IN + GoogleAdsFieldDataTypeEnum_INT64 GoogleAdsFieldDataTypeEnum_GoogleAdsFieldDataType = 8 + // Maps to a protocol buffer message type. The data type's details can be + // found in type_url. + // + // No operators work with MESSAGE fields. + GoogleAdsFieldDataTypeEnum_MESSAGE GoogleAdsFieldDataTypeEnum_GoogleAdsFieldDataType = 9 + // Maps to google.protobuf.StringValue. Represents the resource name + // (unique id) of a resource or one of its foreign keys. + // + // No operators work with RESOURCE_NAME fields. + GoogleAdsFieldDataTypeEnum_RESOURCE_NAME GoogleAdsFieldDataTypeEnum_GoogleAdsFieldDataType = 10 + // Maps to google.protobuf.StringValue. + // + // Applicable operators: =, !=, LIKE, NOT LIKE, IN, NOT IN + GoogleAdsFieldDataTypeEnum_STRING GoogleAdsFieldDataTypeEnum_GoogleAdsFieldDataType = 11 +) + +var GoogleAdsFieldDataTypeEnum_GoogleAdsFieldDataType_name = map[int32]string{ + 0: "UNSPECIFIED", + 1: "UNKNOWN", + 2: "BOOLEAN", + 3: "DATE", + 4: "DOUBLE", + 5: "ENUM", + 6: "FLOAT", + 7: "INT32", + 8: "INT64", + 9: "MESSAGE", + 10: "RESOURCE_NAME", + 11: "STRING", +} +var GoogleAdsFieldDataTypeEnum_GoogleAdsFieldDataType_value = map[string]int32{ + "UNSPECIFIED": 0, + "UNKNOWN": 1, + "BOOLEAN": 2, + "DATE": 3, + "DOUBLE": 4, + "ENUM": 5, + "FLOAT": 6, + "INT32": 7, + "INT64": 8, + "MESSAGE": 9, + "RESOURCE_NAME": 10, + "STRING": 11, +} + +func (x GoogleAdsFieldDataTypeEnum_GoogleAdsFieldDataType) String() string { + return proto.EnumName(GoogleAdsFieldDataTypeEnum_GoogleAdsFieldDataType_name, int32(x)) +} +func (GoogleAdsFieldDataTypeEnum_GoogleAdsFieldDataType) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_google_ads_field_data_type_3292bb2809ac3276, []int{0, 0} +} + +// Container holding the various data types. +type GoogleAdsFieldDataTypeEnum struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GoogleAdsFieldDataTypeEnum) Reset() { *m = GoogleAdsFieldDataTypeEnum{} } +func (m *GoogleAdsFieldDataTypeEnum) String() string { return proto.CompactTextString(m) } +func (*GoogleAdsFieldDataTypeEnum) ProtoMessage() {} +func (*GoogleAdsFieldDataTypeEnum) Descriptor() ([]byte, []int) { + return fileDescriptor_google_ads_field_data_type_3292bb2809ac3276, []int{0} +} +func (m *GoogleAdsFieldDataTypeEnum) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GoogleAdsFieldDataTypeEnum.Unmarshal(m, b) +} +func (m *GoogleAdsFieldDataTypeEnum) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GoogleAdsFieldDataTypeEnum.Marshal(b, m, deterministic) +} +func (dst *GoogleAdsFieldDataTypeEnum) XXX_Merge(src proto.Message) { + xxx_messageInfo_GoogleAdsFieldDataTypeEnum.Merge(dst, src) +} +func (m *GoogleAdsFieldDataTypeEnum) XXX_Size() int { + return xxx_messageInfo_GoogleAdsFieldDataTypeEnum.Size(m) +} +func (m *GoogleAdsFieldDataTypeEnum) XXX_DiscardUnknown() { + xxx_messageInfo_GoogleAdsFieldDataTypeEnum.DiscardUnknown(m) +} + +var xxx_messageInfo_GoogleAdsFieldDataTypeEnum proto.InternalMessageInfo + +func init() { + proto.RegisterType((*GoogleAdsFieldDataTypeEnum)(nil), "google.ads.googleads.v1.enums.GoogleAdsFieldDataTypeEnum") + proto.RegisterEnum("google.ads.googleads.v1.enums.GoogleAdsFieldDataTypeEnum_GoogleAdsFieldDataType", GoogleAdsFieldDataTypeEnum_GoogleAdsFieldDataType_name, GoogleAdsFieldDataTypeEnum_GoogleAdsFieldDataType_value) +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/enums/google_ads_field_data_type.proto", fileDescriptor_google_ads_field_data_type_3292bb2809ac3276) +} + +var fileDescriptor_google_ads_field_data_type_3292bb2809ac3276 = []byte{ + // 392 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x7c, 0x51, 0xcf, 0x8e, 0x93, 0x40, + 0x18, 0x17, 0x76, 0xb7, 0xbb, 0x3b, 0x8d, 0x71, 0xe4, 0xe0, 0x61, 0x75, 0x0f, 0xbb, 0x0f, 0x30, + 0x04, 0x6b, 0x3c, 0x8c, 0x89, 0xc9, 0x50, 0xa6, 0x84, 0xd8, 0x0e, 0x4d, 0x81, 0x9a, 0x18, 0x12, + 0x32, 0x3a, 0x48, 0x48, 0x5a, 0x86, 0x74, 0x68, 0x93, 0xbe, 0x8e, 0x47, 0x4f, 0x3e, 0x87, 0x17, + 0xdf, 0xc3, 0x93, 0x8f, 0x60, 0x06, 0x0a, 0xa7, 0xea, 0x85, 0xfc, 0xf8, 0x7e, 0x7f, 0x3e, 0xf8, + 0x7d, 0xe0, 0x7d, 0x21, 0x65, 0xb1, 0xc9, 0x6d, 0x2e, 0x94, 0xdd, 0x41, 0x8d, 0x0e, 0x8e, 0x9d, + 0x57, 0xfb, 0x6d, 0x3f, 0xca, 0xb8, 0x50, 0xd9, 0xd7, 0x32, 0xdf, 0x88, 0x4c, 0xf0, 0x86, 0x67, + 0xcd, 0xb1, 0xce, 0x51, 0xbd, 0x93, 0x8d, 0xb4, 0xee, 0x3b, 0x05, 0xe2, 0x42, 0xa1, 0xc1, 0x8f, + 0x0e, 0x0e, 0x6a, 0xfd, 0x77, 0xaf, 0xfa, 0xf8, 0xba, 0xb4, 0x79, 0x55, 0xc9, 0x86, 0x37, 0xa5, + 0xac, 0x54, 0x67, 0x7e, 0xfc, 0x65, 0x80, 0x3b, 0xbf, 0x15, 0x10, 0xa1, 0x66, 0x3a, 0xdf, 0xe3, + 0x0d, 0x8f, 0x8f, 0x75, 0x4e, 0xab, 0xfd, 0xf6, 0xf1, 0x87, 0x01, 0x5e, 0x9c, 0xa7, 0xad, 0x67, + 0x60, 0x9c, 0xb0, 0x68, 0x49, 0xa7, 0xc1, 0x2c, 0xa0, 0x1e, 0x7c, 0x62, 0x8d, 0xc1, 0x75, 0xc2, + 0x3e, 0xb0, 0xf0, 0x23, 0x83, 0x86, 0x7e, 0x71, 0xc3, 0x70, 0x4e, 0x09, 0x83, 0xa6, 0x75, 0x03, + 0x2e, 0x3d, 0x12, 0x53, 0x78, 0x61, 0x01, 0x30, 0xf2, 0xc2, 0xc4, 0x9d, 0x53, 0x78, 0xa9, 0xa7, + 0x94, 0x25, 0x0b, 0x78, 0x65, 0xdd, 0x82, 0xab, 0xd9, 0x3c, 0x24, 0x31, 0x1c, 0x69, 0x18, 0xb0, + 0x78, 0xf2, 0x1a, 0x5e, 0x9f, 0xe0, 0xdb, 0x37, 0xf0, 0x46, 0xa7, 0x2d, 0x68, 0x14, 0x11, 0x9f, + 0xc2, 0x5b, 0xeb, 0x39, 0x78, 0xba, 0xa2, 0x51, 0x98, 0xac, 0xa6, 0x34, 0x63, 0x64, 0x41, 0x21, + 0xd0, 0xb1, 0x51, 0xbc, 0x0a, 0x98, 0x0f, 0xc7, 0xee, 0x1f, 0x03, 0x3c, 0x7c, 0x91, 0x5b, 0xf4, + 0xdf, 0x56, 0xdc, 0x97, 0xe7, 0xff, 0x6a, 0xa9, 0x4b, 0x59, 0x1a, 0x9f, 0xdc, 0x93, 0xbb, 0x90, + 0x1b, 0x5e, 0x15, 0x48, 0xee, 0x0a, 0xbb, 0xc8, 0xab, 0xb6, 0xb2, 0xfe, 0x46, 0x75, 0xa9, 0xfe, + 0x71, 0xb2, 0x77, 0xed, 0xf3, 0x9b, 0x79, 0xe1, 0x13, 0xf2, 0xdd, 0xbc, 0xef, 0x36, 0x21, 0x22, + 0x14, 0x1a, 0x96, 0xa2, 0xb5, 0x83, 0x74, 0xc1, 0xea, 0x67, 0xcf, 0xa7, 0x44, 0xa8, 0x74, 0xe0, + 0xd3, 0xb5, 0x93, 0xb6, 0xfc, 0x6f, 0xf3, 0xa1, 0x1b, 0x62, 0x4c, 0x84, 0xc2, 0x78, 0x50, 0x60, + 0xbc, 0x76, 0x30, 0x6e, 0x35, 0x9f, 0x47, 0xed, 0x87, 0x4d, 0xfe, 0x06, 0x00, 0x00, 0xff, 0xff, + 0x3a, 0xb3, 0x98, 0xee, 0x4a, 0x02, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/hotel_date_selection_type.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/hotel_date_selection_type.pb.go new file mode 100644 index 0000000..08ad04c --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/hotel_date_selection_type.pb.go @@ -0,0 +1,119 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/enums/hotel_date_selection_type.proto + +package enums // import "google.golang.org/genproto/googleapis/ads/googleads/v1/enums" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Enum describing possible hotel date selection types. +type HotelDateSelectionTypeEnum_HotelDateSelectionType int32 + +const ( + // Not specified. + HotelDateSelectionTypeEnum_UNSPECIFIED HotelDateSelectionTypeEnum_HotelDateSelectionType = 0 + // Used for return value only. Represents value unknown in this version. + HotelDateSelectionTypeEnum_UNKNOWN HotelDateSelectionTypeEnum_HotelDateSelectionType = 1 + // Dates selected by default. + HotelDateSelectionTypeEnum_DEFAULT_SELECTION HotelDateSelectionTypeEnum_HotelDateSelectionType = 50 + // Dates selected by the user. + HotelDateSelectionTypeEnum_USER_SELECTED HotelDateSelectionTypeEnum_HotelDateSelectionType = 51 +) + +var HotelDateSelectionTypeEnum_HotelDateSelectionType_name = map[int32]string{ + 0: "UNSPECIFIED", + 1: "UNKNOWN", + 50: "DEFAULT_SELECTION", + 51: "USER_SELECTED", +} +var HotelDateSelectionTypeEnum_HotelDateSelectionType_value = map[string]int32{ + "UNSPECIFIED": 0, + "UNKNOWN": 1, + "DEFAULT_SELECTION": 50, + "USER_SELECTED": 51, +} + +func (x HotelDateSelectionTypeEnum_HotelDateSelectionType) String() string { + return proto.EnumName(HotelDateSelectionTypeEnum_HotelDateSelectionType_name, int32(x)) +} +func (HotelDateSelectionTypeEnum_HotelDateSelectionType) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_hotel_date_selection_type_19278eff2b88f900, []int{0, 0} +} + +// Container for enum describing possible hotel date selection types +type HotelDateSelectionTypeEnum struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *HotelDateSelectionTypeEnum) Reset() { *m = HotelDateSelectionTypeEnum{} } +func (m *HotelDateSelectionTypeEnum) String() string { return proto.CompactTextString(m) } +func (*HotelDateSelectionTypeEnum) ProtoMessage() {} +func (*HotelDateSelectionTypeEnum) Descriptor() ([]byte, []int) { + return fileDescriptor_hotel_date_selection_type_19278eff2b88f900, []int{0} +} +func (m *HotelDateSelectionTypeEnum) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_HotelDateSelectionTypeEnum.Unmarshal(m, b) +} +func (m *HotelDateSelectionTypeEnum) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_HotelDateSelectionTypeEnum.Marshal(b, m, deterministic) +} +func (dst *HotelDateSelectionTypeEnum) XXX_Merge(src proto.Message) { + xxx_messageInfo_HotelDateSelectionTypeEnum.Merge(dst, src) +} +func (m *HotelDateSelectionTypeEnum) XXX_Size() int { + return xxx_messageInfo_HotelDateSelectionTypeEnum.Size(m) +} +func (m *HotelDateSelectionTypeEnum) XXX_DiscardUnknown() { + xxx_messageInfo_HotelDateSelectionTypeEnum.DiscardUnknown(m) +} + +var xxx_messageInfo_HotelDateSelectionTypeEnum proto.InternalMessageInfo + +func init() { + proto.RegisterType((*HotelDateSelectionTypeEnum)(nil), "google.ads.googleads.v1.enums.HotelDateSelectionTypeEnum") + proto.RegisterEnum("google.ads.googleads.v1.enums.HotelDateSelectionTypeEnum_HotelDateSelectionType", HotelDateSelectionTypeEnum_HotelDateSelectionType_name, HotelDateSelectionTypeEnum_HotelDateSelectionType_value) +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/enums/hotel_date_selection_type.proto", fileDescriptor_hotel_date_selection_type_19278eff2b88f900) +} + +var fileDescriptor_hotel_date_selection_type_19278eff2b88f900 = []byte{ + // 329 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x7c, 0x50, 0xc1, 0x4e, 0xc2, 0x40, + 0x14, 0x14, 0x4c, 0x34, 0x59, 0x62, 0x2c, 0x4d, 0xf4, 0x80, 0x72, 0x80, 0x0f, 0xd8, 0xa6, 0x72, + 0x5b, 0xe3, 0xa1, 0xd0, 0x05, 0x89, 0xa4, 0x10, 0x4b, 0x31, 0x31, 0x4d, 0xea, 0x4a, 0x37, 0x95, + 0xa4, 0xec, 0x36, 0xec, 0x42, 0xc2, 0xc5, 0x8f, 0xf1, 0xe8, 0xa7, 0xf8, 0x29, 0x9e, 0xfc, 0x04, + 0xb3, 0xbb, 0x94, 0x13, 0x7a, 0x79, 0x99, 0xbc, 0x79, 0x33, 0x79, 0x33, 0xe0, 0x2e, 0xe3, 0x3c, + 0xcb, 0xa9, 0x43, 0x52, 0xe1, 0x18, 0xa8, 0xd0, 0xc6, 0x75, 0x28, 0x5b, 0x2f, 0x85, 0xf3, 0xc6, + 0x25, 0xcd, 0x93, 0x94, 0x48, 0x9a, 0x08, 0x9a, 0xd3, 0xb9, 0x5c, 0x70, 0x96, 0xc8, 0x6d, 0x41, + 0x61, 0xb1, 0xe2, 0x92, 0xdb, 0x4d, 0xa3, 0x81, 0x24, 0x15, 0x70, 0x2f, 0x87, 0x1b, 0x17, 0x6a, + 0x79, 0xe3, 0xba, 0x74, 0x2f, 0x16, 0x0e, 0x61, 0x8c, 0x4b, 0xa2, 0x0c, 0x84, 0x11, 0xb7, 0xdf, + 0x41, 0xe3, 0x5e, 0xf9, 0xfb, 0x44, 0xd2, 0xb0, 0x74, 0x9f, 0x6e, 0x0b, 0x8a, 0xd9, 0x7a, 0xd9, + 0x7e, 0x01, 0x97, 0x87, 0x59, 0xfb, 0x1c, 0xd4, 0xa2, 0x20, 0x9c, 0xe0, 0xde, 0xb0, 0x3f, 0xc4, + 0xbe, 0x75, 0x64, 0xd7, 0xc0, 0x69, 0x14, 0x3c, 0x04, 0xe3, 0xa7, 0xc0, 0xaa, 0xd8, 0x17, 0xa0, + 0xee, 0xe3, 0xbe, 0x17, 0x8d, 0xa6, 0x49, 0x88, 0x47, 0xb8, 0x37, 0x1d, 0x8e, 0x03, 0xeb, 0xc6, + 0xae, 0x83, 0xb3, 0x28, 0xc4, 0x8f, 0xbb, 0x1d, 0xf6, 0xad, 0x4e, 0xf7, 0xa7, 0x02, 0x5a, 0x73, + 0xbe, 0x84, 0xff, 0x66, 0xe8, 0x5e, 0x1d, 0xfe, 0x62, 0xa2, 0x22, 0x4c, 0x2a, 0xcf, 0xdd, 0x9d, + 0x3a, 0xe3, 0x39, 0x61, 0x19, 0xe4, 0xab, 0xcc, 0xc9, 0x28, 0xd3, 0x01, 0xcb, 0x42, 0x8b, 0x85, + 0xf8, 0xa3, 0xdf, 0x5b, 0x3d, 0x3f, 0xaa, 0xc7, 0x03, 0xcf, 0xfb, 0xac, 0x36, 0x07, 0xc6, 0xca, + 0x4b, 0x05, 0x34, 0x50, 0xa1, 0x99, 0x0b, 0x55, 0x1f, 0xe2, 0xab, 0xe4, 0x63, 0x2f, 0x15, 0xf1, + 0x9e, 0x8f, 0x67, 0x6e, 0xac, 0xf9, 0xef, 0x6a, 0xcb, 0x2c, 0x11, 0xf2, 0x52, 0x81, 0xd0, 0xfe, + 0x02, 0xa1, 0x99, 0x8b, 0x90, 0xbe, 0x79, 0x3d, 0xd1, 0x8f, 0x75, 0x7e, 0x03, 0x00, 0x00, 0xff, + 0xff, 0x46, 0xaf, 0xfd, 0xab, 0xf7, 0x01, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/hotel_placeholder_field.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/hotel_placeholder_field.pb.go new file mode 100644 index 0000000..b43af9b --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/hotel_placeholder_field.pb.go @@ -0,0 +1,224 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/enums/hotel_placeholder_field.proto + +package enums // import "google.golang.org/genproto/googleapis/ads/googleads/v1/enums" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Possible values for Hotel placeholder fields. +type HotelPlaceholderFieldEnum_HotelPlaceholderField int32 + +const ( + // Not specified. + HotelPlaceholderFieldEnum_UNSPECIFIED HotelPlaceholderFieldEnum_HotelPlaceholderField = 0 + // Used for return value only. Represents value unknown in this version. + HotelPlaceholderFieldEnum_UNKNOWN HotelPlaceholderFieldEnum_HotelPlaceholderField = 1 + // Data Type: STRING. Required. Unique ID. + HotelPlaceholderFieldEnum_PROPERTY_ID HotelPlaceholderFieldEnum_HotelPlaceholderField = 2 + // Data Type: STRING. Required. Main headline with property name to be shown + // in dynamic ad. + HotelPlaceholderFieldEnum_PROPERTY_NAME HotelPlaceholderFieldEnum_HotelPlaceholderField = 3 + // Data Type: STRING. Name of destination to be shown in dynamic ad. + HotelPlaceholderFieldEnum_DESTINATION_NAME HotelPlaceholderFieldEnum_HotelPlaceholderField = 4 + // Data Type: STRING. Description of destination to be shown in dynamic ad. + HotelPlaceholderFieldEnum_DESCRIPTION HotelPlaceholderFieldEnum_HotelPlaceholderField = 5 + // Data Type: STRING. Complete property address, including postal code. + HotelPlaceholderFieldEnum_ADDRESS HotelPlaceholderFieldEnum_HotelPlaceholderField = 6 + // Data Type: STRING. Price to be shown in the ad. + // Example: "100.00 USD" + HotelPlaceholderFieldEnum_PRICE HotelPlaceholderFieldEnum_HotelPlaceholderField = 7 + // Data Type: STRING. Formatted price to be shown in the ad. + // Example: "Starting at $100.00 USD", "$80 - $100" + HotelPlaceholderFieldEnum_FORMATTED_PRICE HotelPlaceholderFieldEnum_HotelPlaceholderField = 8 + // Data Type: STRING. Sale price to be shown in the ad. + // Example: "80.00 USD" + HotelPlaceholderFieldEnum_SALE_PRICE HotelPlaceholderFieldEnum_HotelPlaceholderField = 9 + // Data Type: STRING. Formatted sale price to be shown in the ad. + // Example: "On sale for $80.00", "$60 - $80" + HotelPlaceholderFieldEnum_FORMATTED_SALE_PRICE HotelPlaceholderFieldEnum_HotelPlaceholderField = 10 + // Data Type: URL. Image to be displayed in the ad. + HotelPlaceholderFieldEnum_IMAGE_URL HotelPlaceholderFieldEnum_HotelPlaceholderField = 11 + // Data Type: STRING. Category of property used to group like items together + // for recommendation engine. + HotelPlaceholderFieldEnum_CATEGORY HotelPlaceholderFieldEnum_HotelPlaceholderField = 12 + // Data Type: INT64. Star rating (1 to 5) used to group like items + // together for recommendation engine. + HotelPlaceholderFieldEnum_STAR_RATING HotelPlaceholderFieldEnum_HotelPlaceholderField = 13 + // Data Type: STRING_LIST. Keywords used for product retrieval. + HotelPlaceholderFieldEnum_CONTEXTUAL_KEYWORDS HotelPlaceholderFieldEnum_HotelPlaceholderField = 14 + // Data Type: URL_LIST. Required. Final URLs for the ad when using Upgraded + // URLs. User will be redirected to these URLs when they click on an ad, or + // when they click on a specific flight for ads that show multiple + // flights. + HotelPlaceholderFieldEnum_FINAL_URLS HotelPlaceholderFieldEnum_HotelPlaceholderField = 15 + // Data Type: URL_LIST. Final mobile URLs for the ad when using Upgraded + // URLs. + HotelPlaceholderFieldEnum_FINAL_MOBILE_URLS HotelPlaceholderFieldEnum_HotelPlaceholderField = 16 + // Data Type: URL. Tracking template for the ad when using Upgraded URLs. + HotelPlaceholderFieldEnum_TRACKING_URL HotelPlaceholderFieldEnum_HotelPlaceholderField = 17 + // Data Type: STRING. Android app link. Must be formatted as: + // android-app://{package_id}/{scheme}/{host_path}. + // The components are defined as follows: + // package_id: app ID as specified in Google Play. + // scheme: the scheme to pass to the application. Can be HTTP, or a custom + // scheme. + // host_path: identifies the specific content within your application. + HotelPlaceholderFieldEnum_ANDROID_APP_LINK HotelPlaceholderFieldEnum_HotelPlaceholderField = 18 + // Data Type: STRING_LIST. List of recommended property IDs to show together + // with this item. + HotelPlaceholderFieldEnum_SIMILAR_PROPERTY_IDS HotelPlaceholderFieldEnum_HotelPlaceholderField = 19 + // Data Type: STRING. iOS app link. + HotelPlaceholderFieldEnum_IOS_APP_LINK HotelPlaceholderFieldEnum_HotelPlaceholderField = 20 + // Data Type: INT64. iOS app store ID. + HotelPlaceholderFieldEnum_IOS_APP_STORE_ID HotelPlaceholderFieldEnum_HotelPlaceholderField = 21 +) + +var HotelPlaceholderFieldEnum_HotelPlaceholderField_name = map[int32]string{ + 0: "UNSPECIFIED", + 1: "UNKNOWN", + 2: "PROPERTY_ID", + 3: "PROPERTY_NAME", + 4: "DESTINATION_NAME", + 5: "DESCRIPTION", + 6: "ADDRESS", + 7: "PRICE", + 8: "FORMATTED_PRICE", + 9: "SALE_PRICE", + 10: "FORMATTED_SALE_PRICE", + 11: "IMAGE_URL", + 12: "CATEGORY", + 13: "STAR_RATING", + 14: "CONTEXTUAL_KEYWORDS", + 15: "FINAL_URLS", + 16: "FINAL_MOBILE_URLS", + 17: "TRACKING_URL", + 18: "ANDROID_APP_LINK", + 19: "SIMILAR_PROPERTY_IDS", + 20: "IOS_APP_LINK", + 21: "IOS_APP_STORE_ID", +} +var HotelPlaceholderFieldEnum_HotelPlaceholderField_value = map[string]int32{ + "UNSPECIFIED": 0, + "UNKNOWN": 1, + "PROPERTY_ID": 2, + "PROPERTY_NAME": 3, + "DESTINATION_NAME": 4, + "DESCRIPTION": 5, + "ADDRESS": 6, + "PRICE": 7, + "FORMATTED_PRICE": 8, + "SALE_PRICE": 9, + "FORMATTED_SALE_PRICE": 10, + "IMAGE_URL": 11, + "CATEGORY": 12, + "STAR_RATING": 13, + "CONTEXTUAL_KEYWORDS": 14, + "FINAL_URLS": 15, + "FINAL_MOBILE_URLS": 16, + "TRACKING_URL": 17, + "ANDROID_APP_LINK": 18, + "SIMILAR_PROPERTY_IDS": 19, + "IOS_APP_LINK": 20, + "IOS_APP_STORE_ID": 21, +} + +func (x HotelPlaceholderFieldEnum_HotelPlaceholderField) String() string { + return proto.EnumName(HotelPlaceholderFieldEnum_HotelPlaceholderField_name, int32(x)) +} +func (HotelPlaceholderFieldEnum_HotelPlaceholderField) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_hotel_placeholder_field_f3cb215d888e27b9, []int{0, 0} +} + +// Values for Hotel placeholder fields. +// For more information about dynamic remarketing feeds, see +// https://support.google.com/google-ads/answer/6053288. +type HotelPlaceholderFieldEnum struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *HotelPlaceholderFieldEnum) Reset() { *m = HotelPlaceholderFieldEnum{} } +func (m *HotelPlaceholderFieldEnum) String() string { return proto.CompactTextString(m) } +func (*HotelPlaceholderFieldEnum) ProtoMessage() {} +func (*HotelPlaceholderFieldEnum) Descriptor() ([]byte, []int) { + return fileDescriptor_hotel_placeholder_field_f3cb215d888e27b9, []int{0} +} +func (m *HotelPlaceholderFieldEnum) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_HotelPlaceholderFieldEnum.Unmarshal(m, b) +} +func (m *HotelPlaceholderFieldEnum) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_HotelPlaceholderFieldEnum.Marshal(b, m, deterministic) +} +func (dst *HotelPlaceholderFieldEnum) XXX_Merge(src proto.Message) { + xxx_messageInfo_HotelPlaceholderFieldEnum.Merge(dst, src) +} +func (m *HotelPlaceholderFieldEnum) XXX_Size() int { + return xxx_messageInfo_HotelPlaceholderFieldEnum.Size(m) +} +func (m *HotelPlaceholderFieldEnum) XXX_DiscardUnknown() { + xxx_messageInfo_HotelPlaceholderFieldEnum.DiscardUnknown(m) +} + +var xxx_messageInfo_HotelPlaceholderFieldEnum proto.InternalMessageInfo + +func init() { + proto.RegisterType((*HotelPlaceholderFieldEnum)(nil), "google.ads.googleads.v1.enums.HotelPlaceholderFieldEnum") + proto.RegisterEnum("google.ads.googleads.v1.enums.HotelPlaceholderFieldEnum_HotelPlaceholderField", HotelPlaceholderFieldEnum_HotelPlaceholderField_name, HotelPlaceholderFieldEnum_HotelPlaceholderField_value) +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/enums/hotel_placeholder_field.proto", fileDescriptor_hotel_placeholder_field_f3cb215d888e27b9) +} + +var fileDescriptor_hotel_placeholder_field_f3cb215d888e27b9 = []byte{ + // 531 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x7c, 0x52, 0xdd, 0x6e, 0xda, 0x30, + 0x18, 0x5d, 0x61, 0xfd, 0x73, 0xa1, 0x18, 0x03, 0xda, 0x6f, 0x2f, 0xda, 0x07, 0x48, 0x84, 0x76, + 0x97, 0x5e, 0x99, 0xc4, 0x64, 0x16, 0xc1, 0x89, 0x6c, 0x43, 0xc7, 0x84, 0x14, 0x65, 0x4d, 0x96, + 0x22, 0x85, 0x18, 0x11, 0xda, 0x07, 0xda, 0xe5, 0x6e, 0xf6, 0x1e, 0x93, 0xf6, 0x22, 0xbb, 0xda, + 0x23, 0x4c, 0x4e, 0x0a, 0x54, 0xd3, 0xb6, 0x9b, 0xe8, 0xf3, 0x39, 0xdf, 0x39, 0xfe, 0xe2, 0xef, + 0x80, 0xeb, 0x54, 0xa9, 0x34, 0x4b, 0xcc, 0x28, 0x2e, 0xcc, 0xaa, 0xd4, 0xd5, 0x43, 0xdf, 0x4c, + 0xf2, 0xfb, 0x65, 0x61, 0xde, 0xa9, 0x4d, 0x92, 0x85, 0xab, 0x2c, 0xba, 0x4d, 0xee, 0x54, 0x16, + 0x27, 0xeb, 0xf0, 0xf3, 0x22, 0xc9, 0x62, 0x63, 0xb5, 0x56, 0x1b, 0x85, 0x2e, 0x2a, 0x85, 0x11, + 0xc5, 0x85, 0xb1, 0x13, 0x1b, 0x0f, 0x7d, 0xa3, 0x14, 0xbf, 0x7e, 0xbb, 0xf5, 0x5e, 0x2d, 0xcc, + 0x28, 0xcf, 0xd5, 0x26, 0xda, 0x2c, 0x54, 0x5e, 0x54, 0xe2, 0xab, 0x1f, 0x75, 0xf0, 0xea, 0xbd, + 0xb6, 0x0f, 0xf6, 0xee, 0x43, 0x6d, 0x4e, 0xf2, 0xfb, 0xe5, 0xd5, 0xb7, 0x3a, 0xe8, 0xfd, 0x95, + 0x45, 0x2d, 0x70, 0x36, 0x61, 0x22, 0x20, 0x36, 0x1d, 0x52, 0xe2, 0xc0, 0x67, 0xe8, 0x0c, 0x1c, + 0x4f, 0xd8, 0x88, 0xf9, 0x37, 0x0c, 0x1e, 0x68, 0x36, 0xe0, 0x7e, 0x40, 0xb8, 0x9c, 0x85, 0xd4, + 0x81, 0x35, 0xd4, 0x06, 0xcd, 0x1d, 0xc0, 0xf0, 0x98, 0xc0, 0x3a, 0xea, 0x02, 0xe8, 0x10, 0x21, + 0x29, 0xc3, 0x92, 0xfa, 0xac, 0x42, 0x9f, 0x6b, 0xa5, 0x43, 0x84, 0xcd, 0x69, 0xa0, 0x51, 0x78, + 0xa8, 0x7d, 0xb1, 0xe3, 0x70, 0x22, 0x04, 0x3c, 0x42, 0xa7, 0xe0, 0x30, 0xe0, 0xd4, 0x26, 0xf0, + 0x18, 0x75, 0x40, 0x6b, 0xe8, 0xf3, 0x31, 0x96, 0x92, 0x38, 0x61, 0x05, 0x9e, 0xa0, 0x73, 0x00, + 0x04, 0xf6, 0xc8, 0xe3, 0xf9, 0x14, 0xbd, 0x04, 0xdd, 0x7d, 0xd3, 0x13, 0x06, 0xa0, 0x26, 0x38, + 0xa5, 0x63, 0xec, 0x92, 0x70, 0xc2, 0x3d, 0x78, 0x86, 0x1a, 0xe0, 0xc4, 0xc6, 0x92, 0xb8, 0x3e, + 0x9f, 0xc1, 0x86, 0x1e, 0x42, 0x48, 0xcc, 0x43, 0x8e, 0x25, 0x65, 0x2e, 0x6c, 0xa2, 0x17, 0xa0, + 0x63, 0xfb, 0x4c, 0x92, 0x0f, 0x72, 0x82, 0xbd, 0x70, 0x44, 0x66, 0x37, 0x3e, 0x77, 0x04, 0x3c, + 0xd7, 0x17, 0x0e, 0x29, 0xc3, 0x9e, 0xb6, 0x11, 0xb0, 0x85, 0x7a, 0xa0, 0x5d, 0x9d, 0xc7, 0xfe, + 0x80, 0x7a, 0xa4, 0x82, 0x21, 0x82, 0xa0, 0x21, 0x39, 0xb6, 0x47, 0x94, 0xb9, 0xe5, 0x85, 0x6d, + 0xfd, 0xf7, 0x98, 0x39, 0xdc, 0xa7, 0x4e, 0x88, 0x83, 0x20, 0xf4, 0x28, 0x1b, 0x41, 0xa4, 0xe7, + 0x15, 0x74, 0x4c, 0x3d, 0xcc, 0xc3, 0x27, 0xef, 0x27, 0x60, 0x47, 0x3b, 0x50, 0x5f, 0xec, 0x7b, + 0xbb, 0xda, 0x61, 0x8b, 0x08, 0xe9, 0x73, 0xa2, 0x1f, 0xba, 0x37, 0xf8, 0x75, 0x00, 0x2e, 0x6f, + 0xd5, 0xd2, 0xf8, 0x6f, 0x26, 0x06, 0x6f, 0xca, 0xa5, 0x16, 0x7f, 0x6e, 0x35, 0xd0, 0x91, 0x08, + 0x0e, 0x3e, 0x0e, 0x1e, 0xd5, 0xa9, 0xca, 0xa2, 0x3c, 0x35, 0xd4, 0x3a, 0x35, 0xd3, 0x24, 0x2f, + 0x03, 0xb3, 0x8d, 0xe7, 0x6a, 0x51, 0xfc, 0x23, 0xad, 0xd7, 0xe5, 0xf7, 0x4b, 0xad, 0xee, 0x62, + 0xfc, 0xb5, 0x76, 0xe1, 0x56, 0x56, 0x38, 0x2e, 0x8c, 0xaa, 0xd4, 0xd5, 0xb4, 0x6f, 0xe8, 0x7c, + 0x15, 0xdf, 0xb7, 0xfc, 0x1c, 0xc7, 0xc5, 0x7c, 0xc7, 0xcf, 0xa7, 0xfd, 0x79, 0xc9, 0xff, 0xac, + 0x5d, 0x56, 0xa0, 0x65, 0xe1, 0xb8, 0xb0, 0xac, 0x5d, 0x87, 0x65, 0x4d, 0xfb, 0x96, 0x55, 0xf6, + 0x7c, 0x3a, 0x2a, 0x07, 0x7b, 0xf7, 0x3b, 0x00, 0x00, 0xff, 0xff, 0xe6, 0xcb, 0x77, 0x6c, 0x45, + 0x03, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/income_range_type.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/income_range_type.pb.go new file mode 100644 index 0000000..f0c7b62 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/income_range_type.pb.go @@ -0,0 +1,143 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/enums/income_range_type.proto + +package enums // import "google.golang.org/genproto/googleapis/ads/googleads/v1/enums" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// The type of demographic income ranges (e.g. between 0% to 50%). +type IncomeRangeTypeEnum_IncomeRangeType int32 + +const ( + // Not specified. + IncomeRangeTypeEnum_UNSPECIFIED IncomeRangeTypeEnum_IncomeRangeType = 0 + // Used for return value only. Represents value unknown in this version. + IncomeRangeTypeEnum_UNKNOWN IncomeRangeTypeEnum_IncomeRangeType = 1 + // 0%-50%. + IncomeRangeTypeEnum_INCOME_RANGE_0_50 IncomeRangeTypeEnum_IncomeRangeType = 510001 + // 50% to 60%. + IncomeRangeTypeEnum_INCOME_RANGE_50_60 IncomeRangeTypeEnum_IncomeRangeType = 510002 + // 60% to 70%. + IncomeRangeTypeEnum_INCOME_RANGE_60_70 IncomeRangeTypeEnum_IncomeRangeType = 510003 + // 70% to 80%. + IncomeRangeTypeEnum_INCOME_RANGE_70_80 IncomeRangeTypeEnum_IncomeRangeType = 510004 + // 80% to 90%. + IncomeRangeTypeEnum_INCOME_RANGE_80_90 IncomeRangeTypeEnum_IncomeRangeType = 510005 + // Greater than 90%. + IncomeRangeTypeEnum_INCOME_RANGE_90_UP IncomeRangeTypeEnum_IncomeRangeType = 510006 + // Undetermined income range. + IncomeRangeTypeEnum_INCOME_RANGE_UNDETERMINED IncomeRangeTypeEnum_IncomeRangeType = 510000 +) + +var IncomeRangeTypeEnum_IncomeRangeType_name = map[int32]string{ + 0: "UNSPECIFIED", + 1: "UNKNOWN", + 510001: "INCOME_RANGE_0_50", + 510002: "INCOME_RANGE_50_60", + 510003: "INCOME_RANGE_60_70", + 510004: "INCOME_RANGE_70_80", + 510005: "INCOME_RANGE_80_90", + 510006: "INCOME_RANGE_90_UP", + 510000: "INCOME_RANGE_UNDETERMINED", +} +var IncomeRangeTypeEnum_IncomeRangeType_value = map[string]int32{ + "UNSPECIFIED": 0, + "UNKNOWN": 1, + "INCOME_RANGE_0_50": 510001, + "INCOME_RANGE_50_60": 510002, + "INCOME_RANGE_60_70": 510003, + "INCOME_RANGE_70_80": 510004, + "INCOME_RANGE_80_90": 510005, + "INCOME_RANGE_90_UP": 510006, + "INCOME_RANGE_UNDETERMINED": 510000, +} + +func (x IncomeRangeTypeEnum_IncomeRangeType) String() string { + return proto.EnumName(IncomeRangeTypeEnum_IncomeRangeType_name, int32(x)) +} +func (IncomeRangeTypeEnum_IncomeRangeType) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_income_range_type_739e42fc80e579be, []int{0, 0} +} + +// Container for enum describing the type of demographic income ranges. +type IncomeRangeTypeEnum struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *IncomeRangeTypeEnum) Reset() { *m = IncomeRangeTypeEnum{} } +func (m *IncomeRangeTypeEnum) String() string { return proto.CompactTextString(m) } +func (*IncomeRangeTypeEnum) ProtoMessage() {} +func (*IncomeRangeTypeEnum) Descriptor() ([]byte, []int) { + return fileDescriptor_income_range_type_739e42fc80e579be, []int{0} +} +func (m *IncomeRangeTypeEnum) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_IncomeRangeTypeEnum.Unmarshal(m, b) +} +func (m *IncomeRangeTypeEnum) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_IncomeRangeTypeEnum.Marshal(b, m, deterministic) +} +func (dst *IncomeRangeTypeEnum) XXX_Merge(src proto.Message) { + xxx_messageInfo_IncomeRangeTypeEnum.Merge(dst, src) +} +func (m *IncomeRangeTypeEnum) XXX_Size() int { + return xxx_messageInfo_IncomeRangeTypeEnum.Size(m) +} +func (m *IncomeRangeTypeEnum) XXX_DiscardUnknown() { + xxx_messageInfo_IncomeRangeTypeEnum.DiscardUnknown(m) +} + +var xxx_messageInfo_IncomeRangeTypeEnum proto.InternalMessageInfo + +func init() { + proto.RegisterType((*IncomeRangeTypeEnum)(nil), "google.ads.googleads.v1.enums.IncomeRangeTypeEnum") + proto.RegisterEnum("google.ads.googleads.v1.enums.IncomeRangeTypeEnum_IncomeRangeType", IncomeRangeTypeEnum_IncomeRangeType_name, IncomeRangeTypeEnum_IncomeRangeType_value) +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/enums/income_range_type.proto", fileDescriptor_income_range_type_739e42fc80e579be) +} + +var fileDescriptor_income_range_type_739e42fc80e579be = []byte{ + // 385 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x7c, 0x91, 0x5d, 0xca, 0xd3, 0x40, + 0x18, 0x85, 0x4d, 0x04, 0x85, 0xf9, 0x2e, 0xbe, 0x31, 0x0a, 0xfe, 0xe0, 0x47, 0x68, 0x17, 0x30, + 0x99, 0x20, 0xfd, 0x1b, 0xaf, 0xd2, 0x76, 0x2c, 0x41, 0x3a, 0x0d, 0xb5, 0xa9, 0x20, 0x81, 0x21, + 0x36, 0x21, 0x04, 0x9a, 0x99, 0xd0, 0x49, 0x0b, 0xbd, 0x76, 0x03, 0x5d, 0x83, 0x97, 0x5e, 0xf9, + 0xbf, 0x08, 0x77, 0xa2, 0x6e, 0x42, 0x92, 0xd8, 0x42, 0x4b, 0xf4, 0x66, 0x38, 0xbc, 0xcf, 0x7b, + 0x0e, 0xc3, 0x79, 0x41, 0x27, 0x91, 0x32, 0x59, 0xc7, 0x56, 0x18, 0x29, 0xab, 0x96, 0xa5, 0xda, + 0xd9, 0x56, 0x2c, 0xb6, 0x99, 0xb2, 0x52, 0xb1, 0x92, 0x59, 0xcc, 0x37, 0xa1, 0x48, 0x62, 0x5e, + 0xec, 0xf3, 0x18, 0xe5, 0x1b, 0x59, 0x48, 0xe3, 0xa6, 0xde, 0x45, 0x61, 0xa4, 0xd0, 0xc9, 0x86, + 0x76, 0x36, 0xaa, 0x6c, 0x4f, 0x9e, 0x1e, 0x53, 0xf3, 0xd4, 0x0a, 0x85, 0x90, 0x45, 0x58, 0xa4, + 0x52, 0xa8, 0xda, 0xdc, 0x7e, 0xa7, 0x83, 0xfb, 0x6e, 0x15, 0x3c, 0x2f, 0x73, 0x17, 0xfb, 0x3c, + 0xa6, 0x62, 0x9b, 0xb5, 0x7f, 0x6b, 0xe0, 0xfa, 0x62, 0x6e, 0x5c, 0x83, 0x2b, 0x9f, 0xbd, 0xf2, + 0xe8, 0xc8, 0x7d, 0xe1, 0xd2, 0x31, 0xbc, 0x65, 0x5c, 0x81, 0xbb, 0x3e, 0x7b, 0xc9, 0x66, 0xaf, + 0x19, 0xd4, 0x8c, 0x87, 0xe0, 0x9e, 0xcb, 0x46, 0xb3, 0x29, 0xe5, 0x73, 0x87, 0x4d, 0x28, 0xc7, + 0xbc, 0x83, 0xe1, 0xa7, 0x83, 0x69, 0x3c, 0x02, 0xc6, 0x19, 0xe8, 0x60, 0xde, 0xc5, 0xf0, 0x73, + 0x03, 0xe9, 0x62, 0xde, 0xc3, 0xf0, 0x4b, 0x03, 0xe9, 0x61, 0xde, 0xc7, 0xf0, 0x6b, 0x03, 0xe9, + 0x63, 0x3e, 0xc0, 0xf0, 0x5b, 0x03, 0x19, 0x60, 0xee, 0x7b, 0xf0, 0xfb, 0xc1, 0x34, 0x4c, 0xf0, + 0xf8, 0x8c, 0xf8, 0x6c, 0x4c, 0x17, 0x74, 0x3e, 0x75, 0x19, 0x1d, 0xc3, 0x8f, 0x07, 0x73, 0xf8, + 0x53, 0x03, 0xad, 0x95, 0xcc, 0xd0, 0x7f, 0x9b, 0x1c, 0x3e, 0xb8, 0x28, 0xc4, 0x2b, 0x1b, 0xf4, + 0xb4, 0x37, 0xc3, 0xbf, 0xb6, 0x44, 0xae, 0x43, 0x91, 0x20, 0xb9, 0x49, 0xac, 0x24, 0x16, 0x55, + 0xbf, 0xc7, 0x3b, 0xe6, 0xa9, 0xfa, 0xc7, 0x59, 0x9f, 0x57, 0xef, 0x7b, 0xfd, 0xf6, 0xc4, 0x71, + 0x3e, 0xe8, 0x37, 0x93, 0x3a, 0xca, 0x89, 0x14, 0xaa, 0x65, 0xa9, 0x96, 0x36, 0x2a, 0x8f, 0xa2, + 0x7e, 0x1c, 0x79, 0xe0, 0x44, 0x2a, 0x38, 0xf1, 0x60, 0x69, 0x07, 0x15, 0xff, 0xa5, 0xb7, 0xea, + 0x21, 0x21, 0x4e, 0xa4, 0x08, 0x39, 0x6d, 0x10, 0xb2, 0xb4, 0x09, 0xa9, 0x76, 0xde, 0xde, 0xa9, + 0x3e, 0xf6, 0xec, 0x4f, 0x00, 0x00, 0x00, 0xff, 0xff, 0x6b, 0x2f, 0x0a, 0x92, 0x6e, 0x02, 0x00, + 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/interaction_event_type.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/interaction_event_type.pb.go new file mode 100644 index 0000000..6f8b1b5 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/interaction_event_type.pb.go @@ -0,0 +1,133 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/enums/interaction_event_type.proto + +package enums // import "google.golang.org/genproto/googleapis/ads/googleads/v1/enums" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Enum describing possible types of payable and free interactions. +type InteractionEventTypeEnum_InteractionEventType int32 + +const ( + // Not specified. + InteractionEventTypeEnum_UNSPECIFIED InteractionEventTypeEnum_InteractionEventType = 0 + // Used for return value only. Represents value unknown in this version. + InteractionEventTypeEnum_UNKNOWN InteractionEventTypeEnum_InteractionEventType = 1 + // Click to site. In most cases, this interaction navigates to an external + // location, usually the advertiser's landing page. This is also the default + // InteractionEventType for click events. + InteractionEventTypeEnum_CLICK InteractionEventTypeEnum_InteractionEventType = 2 + // The user's expressed intent to engage with the ad in-place. + InteractionEventTypeEnum_ENGAGEMENT InteractionEventTypeEnum_InteractionEventType = 3 + // User viewed a video ad. + InteractionEventTypeEnum_VIDEO_VIEW InteractionEventTypeEnum_InteractionEventType = 4 + // The default InteractionEventType for ad conversion events. + // This is used when an ad conversion row does NOT indicate + // that the free interactions (i.e., the ad conversions) + // should be 'promoted' and reported as part of the core metrics. + // These are simply other (ad) conversions. + InteractionEventTypeEnum_NONE InteractionEventTypeEnum_InteractionEventType = 5 +) + +var InteractionEventTypeEnum_InteractionEventType_name = map[int32]string{ + 0: "UNSPECIFIED", + 1: "UNKNOWN", + 2: "CLICK", + 3: "ENGAGEMENT", + 4: "VIDEO_VIEW", + 5: "NONE", +} +var InteractionEventTypeEnum_InteractionEventType_value = map[string]int32{ + "UNSPECIFIED": 0, + "UNKNOWN": 1, + "CLICK": 2, + "ENGAGEMENT": 3, + "VIDEO_VIEW": 4, + "NONE": 5, +} + +func (x InteractionEventTypeEnum_InteractionEventType) String() string { + return proto.EnumName(InteractionEventTypeEnum_InteractionEventType_name, int32(x)) +} +func (InteractionEventTypeEnum_InteractionEventType) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_interaction_event_type_18ad80b1ed99b3b2, []int{0, 0} +} + +// Container for enum describing types of payable and free interactions. +type InteractionEventTypeEnum struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *InteractionEventTypeEnum) Reset() { *m = InteractionEventTypeEnum{} } +func (m *InteractionEventTypeEnum) String() string { return proto.CompactTextString(m) } +func (*InteractionEventTypeEnum) ProtoMessage() {} +func (*InteractionEventTypeEnum) Descriptor() ([]byte, []int) { + return fileDescriptor_interaction_event_type_18ad80b1ed99b3b2, []int{0} +} +func (m *InteractionEventTypeEnum) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_InteractionEventTypeEnum.Unmarshal(m, b) +} +func (m *InteractionEventTypeEnum) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_InteractionEventTypeEnum.Marshal(b, m, deterministic) +} +func (dst *InteractionEventTypeEnum) XXX_Merge(src proto.Message) { + xxx_messageInfo_InteractionEventTypeEnum.Merge(dst, src) +} +func (m *InteractionEventTypeEnum) XXX_Size() int { + return xxx_messageInfo_InteractionEventTypeEnum.Size(m) +} +func (m *InteractionEventTypeEnum) XXX_DiscardUnknown() { + xxx_messageInfo_InteractionEventTypeEnum.DiscardUnknown(m) +} + +var xxx_messageInfo_InteractionEventTypeEnum proto.InternalMessageInfo + +func init() { + proto.RegisterType((*InteractionEventTypeEnum)(nil), "google.ads.googleads.v1.enums.InteractionEventTypeEnum") + proto.RegisterEnum("google.ads.googleads.v1.enums.InteractionEventTypeEnum_InteractionEventType", InteractionEventTypeEnum_InteractionEventType_name, InteractionEventTypeEnum_InteractionEventType_value) +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/enums/interaction_event_type.proto", fileDescriptor_interaction_event_type_18ad80b1ed99b3b2) +} + +var fileDescriptor_interaction_event_type_18ad80b1ed99b3b2 = []byte{ + // 335 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x7c, 0x50, 0xd1, 0x4e, 0xf2, 0x30, + 0x18, 0xfd, 0x37, 0xe0, 0x57, 0x4b, 0xa2, 0xcb, 0xe2, 0x85, 0x1a, 0xb9, 0x80, 0x07, 0xe8, 0xb2, + 0x78, 0x57, 0xaf, 0x06, 0xd4, 0x65, 0x41, 0x0b, 0x89, 0x30, 0x12, 0xb3, 0x84, 0x4c, 0xd6, 0x34, + 0x4b, 0xa0, 0x5d, 0x68, 0x21, 0xe1, 0x01, 0x7c, 0x11, 0x2f, 0x7d, 0x14, 0x1f, 0xc5, 0x0b, 0x9f, + 0xc1, 0xb4, 0x15, 0xbc, 0x41, 0x6f, 0x9a, 0xd3, 0xef, 0x7c, 0xe7, 0xe4, 0x3b, 0x07, 0x20, 0x26, + 0x04, 0x5b, 0xd0, 0x20, 0x2f, 0x64, 0x60, 0xa1, 0x46, 0x9b, 0x30, 0xa0, 0x7c, 0xbd, 0x94, 0x41, + 0xc9, 0x15, 0x5d, 0xe5, 0x73, 0x55, 0x0a, 0x3e, 0xa3, 0x1b, 0xca, 0xd5, 0x4c, 0x6d, 0x2b, 0x0a, + 0xab, 0x95, 0x50, 0xc2, 0x6f, 0x59, 0x01, 0xcc, 0x0b, 0x09, 0xf7, 0x5a, 0xb8, 0x09, 0xa1, 0xd1, + 0x5e, 0x5d, 0xef, 0xac, 0xab, 0x32, 0xc8, 0x39, 0x17, 0x2a, 0xd7, 0x36, 0xd2, 0x8a, 0x3b, 0x2f, + 0x0e, 0xb8, 0x48, 0x7e, 0xdc, 0xb1, 0x36, 0x1f, 0x6f, 0x2b, 0x8a, 0xf9, 0x7a, 0xd9, 0x29, 0xc1, + 0xf9, 0x21, 0xce, 0x3f, 0x03, 0xcd, 0x09, 0x79, 0x1c, 0xe1, 0x5e, 0x72, 0x97, 0xe0, 0xbe, 0xf7, + 0xcf, 0x6f, 0x82, 0xa3, 0x09, 0x19, 0x90, 0xe1, 0x94, 0x78, 0x8e, 0x7f, 0x02, 0x1a, 0xbd, 0xfb, + 0xa4, 0x37, 0xf0, 0x5c, 0xff, 0x14, 0x00, 0x4c, 0xe2, 0x28, 0xc6, 0x0f, 0x98, 0x8c, 0xbd, 0x9a, + 0xfe, 0xa7, 0x49, 0x1f, 0x0f, 0x67, 0x69, 0x82, 0xa7, 0x5e, 0xdd, 0x3f, 0x06, 0x75, 0x32, 0x24, + 0xd8, 0x6b, 0x74, 0x3f, 0x1d, 0xd0, 0x9e, 0x8b, 0x25, 0xfc, 0x33, 0x4b, 0xf7, 0xf2, 0xd0, 0x39, + 0x23, 0x1d, 0x64, 0xe4, 0x3c, 0x75, 0xbf, 0xb5, 0x4c, 0x2c, 0x72, 0xce, 0xa0, 0x58, 0xb1, 0x80, + 0x51, 0x6e, 0x62, 0xee, 0x3a, 0xad, 0x4a, 0xf9, 0x4b, 0xc5, 0xb7, 0xe6, 0x7d, 0x75, 0x6b, 0x71, + 0x14, 0xbd, 0xb9, 0xad, 0xd8, 0x5a, 0x45, 0x85, 0x84, 0x16, 0x6a, 0x94, 0x86, 0x50, 0xd7, 0x22, + 0xdf, 0x77, 0x7c, 0x16, 0x15, 0x32, 0xdb, 0xf3, 0x59, 0x1a, 0x66, 0x86, 0xff, 0x70, 0xdb, 0x76, + 0x88, 0x50, 0x54, 0x48, 0x84, 0xf6, 0x1b, 0x08, 0xa5, 0x21, 0x42, 0x66, 0xe7, 0xf9, 0xbf, 0x39, + 0xec, 0xe6, 0x2b, 0x00, 0x00, 0xff, 0xff, 0x9c, 0x7b, 0x64, 0x4b, 0xfa, 0x01, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/interaction_type.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/interaction_type.pb.go new file mode 100644 index 0000000..5ebe574 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/interaction_type.pb.go @@ -0,0 +1,113 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/enums/interaction_type.proto + +package enums // import "google.golang.org/genproto/googleapis/ads/googleads/v1/enums" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Enum describing possible interaction types. +type InteractionTypeEnum_InteractionType int32 + +const ( + // Not specified. + InteractionTypeEnum_UNSPECIFIED InteractionTypeEnum_InteractionType = 0 + // Used for return value only. Represents value unknown in this version. + InteractionTypeEnum_UNKNOWN InteractionTypeEnum_InteractionType = 1 + // Calls. + InteractionTypeEnum_CALLS InteractionTypeEnum_InteractionType = 8000 +) + +var InteractionTypeEnum_InteractionType_name = map[int32]string{ + 0: "UNSPECIFIED", + 1: "UNKNOWN", + 8000: "CALLS", +} +var InteractionTypeEnum_InteractionType_value = map[string]int32{ + "UNSPECIFIED": 0, + "UNKNOWN": 1, + "CALLS": 8000, +} + +func (x InteractionTypeEnum_InteractionType) String() string { + return proto.EnumName(InteractionTypeEnum_InteractionType_name, int32(x)) +} +func (InteractionTypeEnum_InteractionType) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_interaction_type_253dbad63b95615d, []int{0, 0} +} + +// Container for enum describing possible interaction types. +type InteractionTypeEnum struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *InteractionTypeEnum) Reset() { *m = InteractionTypeEnum{} } +func (m *InteractionTypeEnum) String() string { return proto.CompactTextString(m) } +func (*InteractionTypeEnum) ProtoMessage() {} +func (*InteractionTypeEnum) Descriptor() ([]byte, []int) { + return fileDescriptor_interaction_type_253dbad63b95615d, []int{0} +} +func (m *InteractionTypeEnum) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_InteractionTypeEnum.Unmarshal(m, b) +} +func (m *InteractionTypeEnum) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_InteractionTypeEnum.Marshal(b, m, deterministic) +} +func (dst *InteractionTypeEnum) XXX_Merge(src proto.Message) { + xxx_messageInfo_InteractionTypeEnum.Merge(dst, src) +} +func (m *InteractionTypeEnum) XXX_Size() int { + return xxx_messageInfo_InteractionTypeEnum.Size(m) +} +func (m *InteractionTypeEnum) XXX_DiscardUnknown() { + xxx_messageInfo_InteractionTypeEnum.DiscardUnknown(m) +} + +var xxx_messageInfo_InteractionTypeEnum proto.InternalMessageInfo + +func init() { + proto.RegisterType((*InteractionTypeEnum)(nil), "google.ads.googleads.v1.enums.InteractionTypeEnum") + proto.RegisterEnum("google.ads.googleads.v1.enums.InteractionTypeEnum_InteractionType", InteractionTypeEnum_InteractionType_name, InteractionTypeEnum_InteractionType_value) +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/enums/interaction_type.proto", fileDescriptor_interaction_type_253dbad63b95615d) +} + +var fileDescriptor_interaction_type_253dbad63b95615d = []byte{ + // 291 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xe2, 0x32, 0x49, 0xcf, 0xcf, 0x4f, + 0xcf, 0x49, 0xd5, 0x4f, 0x4c, 0x29, 0xd6, 0x87, 0x30, 0x41, 0xac, 0x32, 0x43, 0xfd, 0xd4, 0xbc, + 0xd2, 0xdc, 0x62, 0xfd, 0xcc, 0xbc, 0x92, 0xd4, 0xa2, 0xc4, 0xe4, 0x92, 0xcc, 0xfc, 0xbc, 0xf8, + 0x92, 0xca, 0x82, 0x54, 0xbd, 0x82, 0xa2, 0xfc, 0x92, 0x7c, 0x21, 0x59, 0x88, 0x52, 0xbd, 0xc4, + 0x94, 0x62, 0x3d, 0xb8, 0x2e, 0xbd, 0x32, 0x43, 0x3d, 0xb0, 0x2e, 0x29, 0x19, 0x98, 0xa1, 0x05, + 0x99, 0xfa, 0x89, 0x79, 0x79, 0xf9, 0x25, 0x89, 0x20, 0x03, 0x8a, 0x21, 0x9a, 0x95, 0x82, 0xb8, + 0x84, 0x3d, 0x11, 0xc6, 0x86, 0x54, 0x16, 0xa4, 0xba, 0xe6, 0x95, 0xe6, 0x2a, 0x59, 0x73, 0xf1, + 0xa3, 0x09, 0x0b, 0xf1, 0x73, 0x71, 0x87, 0xfa, 0x05, 0x07, 0xb8, 0x3a, 0x7b, 0xba, 0x79, 0xba, + 0xba, 0x08, 0x30, 0x08, 0x71, 0x73, 0xb1, 0x87, 0xfa, 0x79, 0xfb, 0xf9, 0x87, 0xfb, 0x09, 0x30, + 0x0a, 0x71, 0x71, 0xb1, 0x3a, 0x3b, 0xfa, 0xf8, 0x04, 0x0b, 0x1c, 0xb0, 0x73, 0x7a, 0xc9, 0xc8, + 0xa5, 0x98, 0x9c, 0x9f, 0xab, 0x87, 0xd7, 0x5d, 0x4e, 0x22, 0x68, 0x16, 0x04, 0x80, 0xdc, 0x13, + 0xc0, 0x18, 0xe5, 0x04, 0xd5, 0x96, 0x9e, 0x9f, 0x93, 0x98, 0x97, 0xae, 0x97, 0x5f, 0x94, 0xae, + 0x9f, 0x9e, 0x9a, 0x07, 0x76, 0x2d, 0x2c, 0x50, 0x0a, 0x32, 0x8b, 0x71, 0x84, 0x91, 0x35, 0x98, + 0x5c, 0xc4, 0xc4, 0xec, 0xee, 0xe8, 0xb8, 0x8a, 0x49, 0xd6, 0x1d, 0x62, 0x94, 0x63, 0x4a, 0xb1, + 0x1e, 0x84, 0x09, 0x62, 0x85, 0x19, 0xea, 0x81, 0xfc, 0x58, 0x7c, 0x0a, 0x26, 0x1f, 0xe3, 0x98, + 0x52, 0x1c, 0x03, 0x97, 0x8f, 0x09, 0x33, 0x8c, 0x01, 0xcb, 0xbf, 0x62, 0x52, 0x84, 0x08, 0x5a, + 0x59, 0x39, 0xa6, 0x14, 0x5b, 0x59, 0xc1, 0x55, 0x58, 0x59, 0x85, 0x19, 0x5a, 0x59, 0x81, 0xd5, + 0x24, 0xb1, 0x81, 0x1d, 0x66, 0x0c, 0x08, 0x00, 0x00, 0xff, 0xff, 0x71, 0x78, 0x48, 0x48, 0xbb, + 0x01, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/job_placeholder_field.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/job_placeholder_field.pb.go new file mode 100644 index 0000000..1128981 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/job_placeholder_field.pb.go @@ -0,0 +1,209 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/enums/job_placeholder_field.proto + +package enums // import "google.golang.org/genproto/googleapis/ads/googleads/v1/enums" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Possible values for Job placeholder fields. +type JobPlaceholderFieldEnum_JobPlaceholderField int32 + +const ( + // Not specified. + JobPlaceholderFieldEnum_UNSPECIFIED JobPlaceholderFieldEnum_JobPlaceholderField = 0 + // Used for return value only. Represents value unknown in this version. + JobPlaceholderFieldEnum_UNKNOWN JobPlaceholderFieldEnum_JobPlaceholderField = 1 + // Data Type: STRING. Required. If only JOB_ID is specified, then it must be + // unique. If both JOB_ID and LOCATION_ID are specified, then the + // pair must be unique. + // ID) pair must be unique. + JobPlaceholderFieldEnum_JOB_ID JobPlaceholderFieldEnum_JobPlaceholderField = 2 + // Data Type: STRING. Combination of JOB_ID and LOCATION_ID must be unique + // per offer. + JobPlaceholderFieldEnum_LOCATION_ID JobPlaceholderFieldEnum_JobPlaceholderField = 3 + // Data Type: STRING. Required. Main headline with job title to be shown in + // dynamic ad. + JobPlaceholderFieldEnum_TITLE JobPlaceholderFieldEnum_JobPlaceholderField = 4 + // Data Type: STRING. Job subtitle to be shown in dynamic ad. + JobPlaceholderFieldEnum_SUBTITLE JobPlaceholderFieldEnum_JobPlaceholderField = 5 + // Data Type: STRING. Description of job to be shown in dynamic ad. + JobPlaceholderFieldEnum_DESCRIPTION JobPlaceholderFieldEnum_JobPlaceholderField = 6 + // Data Type: URL. Image to be displayed in the ad. Highly recommended for + // image ads. + JobPlaceholderFieldEnum_IMAGE_URL JobPlaceholderFieldEnum_JobPlaceholderField = 7 + // Data Type: STRING. Category of property used to group like items together + // for recommendation engine. + JobPlaceholderFieldEnum_CATEGORY JobPlaceholderFieldEnum_JobPlaceholderField = 8 + // Data Type: STRING_LIST. Keywords used for product retrieval. + JobPlaceholderFieldEnum_CONTEXTUAL_KEYWORDS JobPlaceholderFieldEnum_JobPlaceholderField = 9 + // Data Type: STRING. Complete property address, including postal code. + JobPlaceholderFieldEnum_ADDRESS JobPlaceholderFieldEnum_JobPlaceholderField = 10 + // Data Type: STRING. Salary or salary range of job to be shown in dynamic + // ad. + JobPlaceholderFieldEnum_SALARY JobPlaceholderFieldEnum_JobPlaceholderField = 11 + // Data Type: URL_LIST. Required. Final URLs to be used in ad when using + // Upgraded URLs; the more specific the better (e.g. the individual URL of a + // specific job and its location). + JobPlaceholderFieldEnum_FINAL_URLS JobPlaceholderFieldEnum_JobPlaceholderField = 12 + // Data Type: URL_LIST. Final mobile URLs for the ad when using Upgraded + // URLs. + JobPlaceholderFieldEnum_FINAL_MOBILE_URLS JobPlaceholderFieldEnum_JobPlaceholderField = 14 + // Data Type: URL. Tracking template for the ad when using Upgraded URLs. + JobPlaceholderFieldEnum_TRACKING_URL JobPlaceholderFieldEnum_JobPlaceholderField = 15 + // Data Type: STRING. Android app link. Must be formatted as: + // android-app://{package_id}/{scheme}/{host_path}. + // The components are defined as follows: + // package_id: app ID as specified in Google Play. + // scheme: the scheme to pass to the application. Can be HTTP, or a custom + // scheme. + // host_path: identifies the specific content within your application. + JobPlaceholderFieldEnum_ANDROID_APP_LINK JobPlaceholderFieldEnum_JobPlaceholderField = 16 + // Data Type: STRING_LIST. List of recommended job IDs to show together with + // this item. + JobPlaceholderFieldEnum_SIMILAR_JOB_IDS JobPlaceholderFieldEnum_JobPlaceholderField = 17 + // Data Type: STRING. iOS app link. + JobPlaceholderFieldEnum_IOS_APP_LINK JobPlaceholderFieldEnum_JobPlaceholderField = 18 + // Data Type: INT64. iOS app store ID. + JobPlaceholderFieldEnum_IOS_APP_STORE_ID JobPlaceholderFieldEnum_JobPlaceholderField = 19 +) + +var JobPlaceholderFieldEnum_JobPlaceholderField_name = map[int32]string{ + 0: "UNSPECIFIED", + 1: "UNKNOWN", + 2: "JOB_ID", + 3: "LOCATION_ID", + 4: "TITLE", + 5: "SUBTITLE", + 6: "DESCRIPTION", + 7: "IMAGE_URL", + 8: "CATEGORY", + 9: "CONTEXTUAL_KEYWORDS", + 10: "ADDRESS", + 11: "SALARY", + 12: "FINAL_URLS", + 14: "FINAL_MOBILE_URLS", + 15: "TRACKING_URL", + 16: "ANDROID_APP_LINK", + 17: "SIMILAR_JOB_IDS", + 18: "IOS_APP_LINK", + 19: "IOS_APP_STORE_ID", +} +var JobPlaceholderFieldEnum_JobPlaceholderField_value = map[string]int32{ + "UNSPECIFIED": 0, + "UNKNOWN": 1, + "JOB_ID": 2, + "LOCATION_ID": 3, + "TITLE": 4, + "SUBTITLE": 5, + "DESCRIPTION": 6, + "IMAGE_URL": 7, + "CATEGORY": 8, + "CONTEXTUAL_KEYWORDS": 9, + "ADDRESS": 10, + "SALARY": 11, + "FINAL_URLS": 12, + "FINAL_MOBILE_URLS": 14, + "TRACKING_URL": 15, + "ANDROID_APP_LINK": 16, + "SIMILAR_JOB_IDS": 17, + "IOS_APP_LINK": 18, + "IOS_APP_STORE_ID": 19, +} + +func (x JobPlaceholderFieldEnum_JobPlaceholderField) String() string { + return proto.EnumName(JobPlaceholderFieldEnum_JobPlaceholderField_name, int32(x)) +} +func (JobPlaceholderFieldEnum_JobPlaceholderField) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_job_placeholder_field_cf233a1bf95849ac, []int{0, 0} +} + +// Values for Job placeholder fields. +// For more information about dynamic remarketing feeds, see +// https://support.google.com/google-ads/answer/6053288. +type JobPlaceholderFieldEnum struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *JobPlaceholderFieldEnum) Reset() { *m = JobPlaceholderFieldEnum{} } +func (m *JobPlaceholderFieldEnum) String() string { return proto.CompactTextString(m) } +func (*JobPlaceholderFieldEnum) ProtoMessage() {} +func (*JobPlaceholderFieldEnum) Descriptor() ([]byte, []int) { + return fileDescriptor_job_placeholder_field_cf233a1bf95849ac, []int{0} +} +func (m *JobPlaceholderFieldEnum) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_JobPlaceholderFieldEnum.Unmarshal(m, b) +} +func (m *JobPlaceholderFieldEnum) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_JobPlaceholderFieldEnum.Marshal(b, m, deterministic) +} +func (dst *JobPlaceholderFieldEnum) XXX_Merge(src proto.Message) { + xxx_messageInfo_JobPlaceholderFieldEnum.Merge(dst, src) +} +func (m *JobPlaceholderFieldEnum) XXX_Size() int { + return xxx_messageInfo_JobPlaceholderFieldEnum.Size(m) +} +func (m *JobPlaceholderFieldEnum) XXX_DiscardUnknown() { + xxx_messageInfo_JobPlaceholderFieldEnum.DiscardUnknown(m) +} + +var xxx_messageInfo_JobPlaceholderFieldEnum proto.InternalMessageInfo + +func init() { + proto.RegisterType((*JobPlaceholderFieldEnum)(nil), "google.ads.googleads.v1.enums.JobPlaceholderFieldEnum") + proto.RegisterEnum("google.ads.googleads.v1.enums.JobPlaceholderFieldEnum_JobPlaceholderField", JobPlaceholderFieldEnum_JobPlaceholderField_name, JobPlaceholderFieldEnum_JobPlaceholderField_value) +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/enums/job_placeholder_field.proto", fileDescriptor_job_placeholder_field_cf233a1bf95849ac) +} + +var fileDescriptor_job_placeholder_field_cf233a1bf95849ac = []byte{ + // 494 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x7c, 0x92, 0xd1, 0x6e, 0xd3, 0x30, + 0x14, 0x86, 0x69, 0xc6, 0xba, 0xd5, 0x2d, 0xab, 0xe7, 0x82, 0x26, 0x10, 0xbb, 0xd8, 0x1e, 0x20, + 0x51, 0xc5, 0x15, 0xe1, 0xca, 0x49, 0xdc, 0xca, 0x6d, 0x1a, 0x47, 0x71, 0xda, 0x51, 0x54, 0x29, + 0x4a, 0x97, 0x10, 0x8a, 0xd2, 0xb8, 0xaa, 0xbb, 0x3d, 0x10, 0x97, 0x3c, 0x0a, 0xf7, 0xbc, 0x04, + 0x17, 0x48, 0xbc, 0x01, 0x72, 0x4c, 0x3b, 0x09, 0xc1, 0x6e, 0xa2, 0xe3, 0x73, 0xbe, 0xff, 0xcf, + 0xb1, 0xcf, 0x01, 0x6f, 0x0b, 0x21, 0x8a, 0x32, 0xb7, 0xd2, 0x4c, 0x5a, 0x3a, 0x54, 0xd1, 0x7d, + 0xdf, 0xca, 0xab, 0xbb, 0xb5, 0xb4, 0x3e, 0x8b, 0x65, 0xb2, 0x29, 0xd3, 0xdb, 0xfc, 0x93, 0x28, + 0xb3, 0x7c, 0x9b, 0x7c, 0x5c, 0xe5, 0x65, 0x66, 0x6e, 0xb6, 0x62, 0x27, 0xd0, 0xa5, 0xe6, 0xcd, + 0x34, 0x93, 0xe6, 0x41, 0x6a, 0xde, 0xf7, 0xcd, 0x5a, 0xfa, 0xea, 0xf5, 0xde, 0x79, 0xb3, 0xb2, + 0xd2, 0xaa, 0x12, 0xbb, 0x74, 0xb7, 0x12, 0x95, 0xd4, 0xe2, 0xeb, 0x5f, 0x06, 0xb8, 0x18, 0x89, + 0x65, 0xf8, 0xe0, 0x3d, 0x50, 0xd6, 0xa4, 0xba, 0x5b, 0x5f, 0x7f, 0x37, 0x40, 0xef, 0x1f, 0x35, + 0xd4, 0x05, 0xed, 0x69, 0xc0, 0x43, 0xe2, 0xd2, 0x01, 0x25, 0x1e, 0x7c, 0x82, 0xda, 0xe0, 0x64, + 0x1a, 0x8c, 0x03, 0x76, 0x13, 0xc0, 0x06, 0x02, 0xa0, 0x39, 0x62, 0x4e, 0x42, 0x3d, 0x68, 0x28, + 0xd2, 0x67, 0x2e, 0x8e, 0x29, 0x0b, 0x54, 0xe2, 0x08, 0xb5, 0xc0, 0x71, 0x4c, 0x63, 0x9f, 0xc0, + 0xa7, 0xa8, 0x03, 0x4e, 0xf9, 0xd4, 0xd1, 0xa7, 0x63, 0x45, 0x7a, 0x84, 0xbb, 0x11, 0x0d, 0x15, + 0x0c, 0x9b, 0xe8, 0x19, 0x68, 0xd1, 0x09, 0x1e, 0x92, 0x64, 0x1a, 0xf9, 0xf0, 0x44, 0xd1, 0x2e, + 0x8e, 0xc9, 0x90, 0x45, 0x73, 0x78, 0x8a, 0x2e, 0x40, 0xcf, 0x65, 0x41, 0x4c, 0xde, 0xc7, 0x53, + 0xec, 0x27, 0x63, 0x32, 0xbf, 0x61, 0x91, 0xc7, 0x61, 0x4b, 0x75, 0x82, 0x3d, 0x2f, 0x22, 0x9c, + 0x43, 0xa0, 0x3a, 0xe1, 0xd8, 0xc7, 0xd1, 0x1c, 0xb6, 0xd1, 0x19, 0x00, 0x03, 0x1a, 0x60, 0x5f, + 0xd9, 0x71, 0xd8, 0x41, 0x2f, 0xc0, 0xb9, 0x3e, 0x4f, 0x98, 0x43, 0x7d, 0xa2, 0xd3, 0x67, 0x08, + 0x82, 0x4e, 0x1c, 0x61, 0x77, 0x4c, 0x83, 0x61, 0xfd, 0xe3, 0x2e, 0x7a, 0x0e, 0x20, 0x0e, 0xbc, + 0x88, 0x51, 0x2f, 0xc1, 0x61, 0x98, 0xf8, 0x34, 0x18, 0x43, 0x88, 0x7a, 0xa0, 0xcb, 0xe9, 0x84, + 0xfa, 0x38, 0x4a, 0xf4, 0x65, 0x39, 0x3c, 0x57, 0x62, 0xca, 0xf8, 0x03, 0x86, 0x94, 0x78, 0x9f, + 0xe1, 0x31, 0x8b, 0x88, 0x7a, 0x84, 0x9e, 0xf3, 0xb3, 0x01, 0xae, 0x6e, 0xc5, 0xda, 0x7c, 0x74, + 0x6e, 0xce, 0xcb, 0x91, 0x58, 0xca, 0xbf, 0xdf, 0x3e, 0x54, 0x43, 0x0b, 0x1b, 0x1f, 0x9c, 0x3f, + 0xda, 0x42, 0x94, 0x69, 0x55, 0x98, 0x62, 0x5b, 0x58, 0x45, 0x5e, 0xd5, 0x23, 0xdd, 0xaf, 0xcf, + 0x66, 0x25, 0xff, 0xb3, 0x4d, 0xef, 0xea, 0xef, 0x17, 0xe3, 0x68, 0x88, 0xf1, 0x57, 0xe3, 0x72, + 0xa8, 0xad, 0x70, 0x26, 0x4d, 0x1d, 0xaa, 0x68, 0xd6, 0x37, 0xd5, 0x0e, 0xc8, 0x6f, 0xfb, 0xfa, + 0x02, 0x67, 0x72, 0x71, 0xa8, 0x2f, 0x66, 0xfd, 0x45, 0x5d, 0xff, 0x61, 0x5c, 0xe9, 0xa4, 0x6d, + 0xe3, 0x4c, 0xda, 0xf6, 0x81, 0xb0, 0xed, 0x59, 0xdf, 0xb6, 0x6b, 0x66, 0xd9, 0xac, 0x1b, 0x7b, + 0xf3, 0x3b, 0x00, 0x00, 0xff, 0xff, 0x9c, 0xc4, 0x8b, 0x2f, 0xe5, 0x02, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/keyword_match_type.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/keyword_match_type.pb.go new file mode 100644 index 0000000..5789397 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/keyword_match_type.pb.go @@ -0,0 +1,122 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/enums/keyword_match_type.proto + +package enums // import "google.golang.org/genproto/googleapis/ads/googleads/v1/enums" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Possible Keyword match types. +type KeywordMatchTypeEnum_KeywordMatchType int32 + +const ( + // Not specified. + KeywordMatchTypeEnum_UNSPECIFIED KeywordMatchTypeEnum_KeywordMatchType = 0 + // Used for return value only. Represents value unknown in this version. + KeywordMatchTypeEnum_UNKNOWN KeywordMatchTypeEnum_KeywordMatchType = 1 + // Exact match. + KeywordMatchTypeEnum_EXACT KeywordMatchTypeEnum_KeywordMatchType = 2 + // Phrase match. + KeywordMatchTypeEnum_PHRASE KeywordMatchTypeEnum_KeywordMatchType = 3 + // Broad match. + KeywordMatchTypeEnum_BROAD KeywordMatchTypeEnum_KeywordMatchType = 4 +) + +var KeywordMatchTypeEnum_KeywordMatchType_name = map[int32]string{ + 0: "UNSPECIFIED", + 1: "UNKNOWN", + 2: "EXACT", + 3: "PHRASE", + 4: "BROAD", +} +var KeywordMatchTypeEnum_KeywordMatchType_value = map[string]int32{ + "UNSPECIFIED": 0, + "UNKNOWN": 1, + "EXACT": 2, + "PHRASE": 3, + "BROAD": 4, +} + +func (x KeywordMatchTypeEnum_KeywordMatchType) String() string { + return proto.EnumName(KeywordMatchTypeEnum_KeywordMatchType_name, int32(x)) +} +func (KeywordMatchTypeEnum_KeywordMatchType) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_keyword_match_type_61366195944217b7, []int{0, 0} +} + +// Message describing Keyword match types. +type KeywordMatchTypeEnum struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *KeywordMatchTypeEnum) Reset() { *m = KeywordMatchTypeEnum{} } +func (m *KeywordMatchTypeEnum) String() string { return proto.CompactTextString(m) } +func (*KeywordMatchTypeEnum) ProtoMessage() {} +func (*KeywordMatchTypeEnum) Descriptor() ([]byte, []int) { + return fileDescriptor_keyword_match_type_61366195944217b7, []int{0} +} +func (m *KeywordMatchTypeEnum) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_KeywordMatchTypeEnum.Unmarshal(m, b) +} +func (m *KeywordMatchTypeEnum) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_KeywordMatchTypeEnum.Marshal(b, m, deterministic) +} +func (dst *KeywordMatchTypeEnum) XXX_Merge(src proto.Message) { + xxx_messageInfo_KeywordMatchTypeEnum.Merge(dst, src) +} +func (m *KeywordMatchTypeEnum) XXX_Size() int { + return xxx_messageInfo_KeywordMatchTypeEnum.Size(m) +} +func (m *KeywordMatchTypeEnum) XXX_DiscardUnknown() { + xxx_messageInfo_KeywordMatchTypeEnum.DiscardUnknown(m) +} + +var xxx_messageInfo_KeywordMatchTypeEnum proto.InternalMessageInfo + +func init() { + proto.RegisterType((*KeywordMatchTypeEnum)(nil), "google.ads.googleads.v1.enums.KeywordMatchTypeEnum") + proto.RegisterEnum("google.ads.googleads.v1.enums.KeywordMatchTypeEnum_KeywordMatchType", KeywordMatchTypeEnum_KeywordMatchType_name, KeywordMatchTypeEnum_KeywordMatchType_value) +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/enums/keyword_match_type.proto", fileDescriptor_keyword_match_type_61366195944217b7) +} + +var fileDescriptor_keyword_match_type_61366195944217b7 = []byte{ + // 317 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x7c, 0x50, 0xdd, 0x4e, 0xf2, 0x30, + 0x18, 0xfe, 0x18, 0x9f, 0x18, 0xcb, 0x81, 0xcb, 0xa2, 0x27, 0x46, 0x0e, 0xe0, 0x02, 0xba, 0x2c, + 0x26, 0x1e, 0xd4, 0xa3, 0x0e, 0x26, 0x12, 0xe2, 0x58, 0xf8, 0xd3, 0x98, 0x25, 0xa4, 0xd2, 0xa6, + 0xa2, 0xac, 0x5d, 0xe8, 0xc0, 0x70, 0x3b, 0x1e, 0x7a, 0x29, 0x5e, 0x0a, 0x57, 0x61, 0xda, 0xca, + 0x0e, 0x48, 0xf4, 0xa4, 0x79, 0xf2, 0x3e, 0x3f, 0x7d, 0xde, 0x17, 0x5c, 0x73, 0x29, 0xf9, 0x92, + 0xf9, 0x84, 0x2a, 0xdf, 0x42, 0x8d, 0x36, 0x81, 0xcf, 0xc4, 0x3a, 0x53, 0xfe, 0x1b, 0xdb, 0xbe, + 0xcb, 0x15, 0x9d, 0x65, 0xa4, 0x98, 0xbf, 0xcc, 0x8a, 0x6d, 0xce, 0x60, 0xbe, 0x92, 0x85, 0xf4, + 0x1a, 0x56, 0x0c, 0x09, 0x55, 0xb0, 0xf4, 0xc1, 0x4d, 0x00, 0x8d, 0xef, 0xe2, 0x72, 0x1f, 0x9b, + 0x2f, 0x7c, 0x22, 0x84, 0x2c, 0x48, 0xb1, 0x90, 0x42, 0x59, 0x73, 0xeb, 0x15, 0x9c, 0xf5, 0x6d, + 0xf0, 0xbd, 0xce, 0x1d, 0x6f, 0x73, 0x16, 0x89, 0x75, 0xd6, 0x1a, 0x02, 0xf7, 0x70, 0xee, 0x9d, + 0x82, 0xfa, 0x24, 0x1e, 0x25, 0x51, 0xbb, 0x77, 0xdb, 0x8b, 0x3a, 0xee, 0x3f, 0xaf, 0x0e, 0x8e, + 0x27, 0x71, 0x3f, 0x1e, 0x3c, 0xc4, 0x6e, 0xc5, 0x3b, 0x01, 0x47, 0xd1, 0x23, 0x6e, 0x8f, 0x5d, + 0xc7, 0x03, 0xa0, 0x96, 0xdc, 0x0d, 0xf1, 0x28, 0x72, 0xab, 0x7a, 0x1c, 0x0e, 0x07, 0xb8, 0xe3, + 0xfe, 0x0f, 0x77, 0x15, 0xd0, 0x9c, 0xcb, 0x0c, 0xfe, 0xd9, 0x37, 0x3c, 0x3f, 0xfc, 0x37, 0xd1, + 0x45, 0x93, 0xca, 0x53, 0xf8, 0xe3, 0xe3, 0x72, 0x49, 0x04, 0x87, 0x72, 0xc5, 0x7d, 0xce, 0x84, + 0x59, 0x63, 0x7f, 0xaf, 0x7c, 0xa1, 0x7e, 0x39, 0xdf, 0x8d, 0x79, 0x3f, 0x9c, 0x6a, 0x17, 0xe3, + 0x4f, 0xa7, 0xd1, 0xb5, 0x51, 0x98, 0x2a, 0x68, 0xa1, 0x46, 0xd3, 0x00, 0xea, 0xdd, 0xd5, 0xd7, + 0x9e, 0x4f, 0x31, 0x55, 0x69, 0xc9, 0xa7, 0xd3, 0x20, 0x35, 0xfc, 0xce, 0x69, 0xda, 0x21, 0x42, + 0x98, 0x2a, 0x84, 0x4a, 0x05, 0x42, 0xd3, 0x00, 0x21, 0xa3, 0x79, 0xae, 0x99, 0x62, 0x57, 0xdf, + 0x01, 0x00, 0x00, 0xff, 0xff, 0x57, 0x9e, 0x75, 0xf2, 0xd6, 0x01, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/keyword_plan_competition_level.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/keyword_plan_competition_level.pb.go new file mode 100644 index 0000000..e93741b --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/keyword_plan_competition_level.pb.go @@ -0,0 +1,127 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/enums/keyword_plan_competition_level.proto + +package enums // import "google.golang.org/genproto/googleapis/ads/googleads/v1/enums" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Competition level of a keyword. +type KeywordPlanCompetitionLevelEnum_KeywordPlanCompetitionLevel int32 + +const ( + // Not specified. + KeywordPlanCompetitionLevelEnum_UNSPECIFIED KeywordPlanCompetitionLevelEnum_KeywordPlanCompetitionLevel = 0 + // The value is unknown in this version. + KeywordPlanCompetitionLevelEnum_UNKNOWN KeywordPlanCompetitionLevelEnum_KeywordPlanCompetitionLevel = 1 + // Low competition. + KeywordPlanCompetitionLevelEnum_LOW KeywordPlanCompetitionLevelEnum_KeywordPlanCompetitionLevel = 2 + // Medium competition. + KeywordPlanCompetitionLevelEnum_MEDIUM KeywordPlanCompetitionLevelEnum_KeywordPlanCompetitionLevel = 3 + // High competition. + KeywordPlanCompetitionLevelEnum_HIGH KeywordPlanCompetitionLevelEnum_KeywordPlanCompetitionLevel = 4 +) + +var KeywordPlanCompetitionLevelEnum_KeywordPlanCompetitionLevel_name = map[int32]string{ + 0: "UNSPECIFIED", + 1: "UNKNOWN", + 2: "LOW", + 3: "MEDIUM", + 4: "HIGH", +} +var KeywordPlanCompetitionLevelEnum_KeywordPlanCompetitionLevel_value = map[string]int32{ + "UNSPECIFIED": 0, + "UNKNOWN": 1, + "LOW": 2, + "MEDIUM": 3, + "HIGH": 4, +} + +func (x KeywordPlanCompetitionLevelEnum_KeywordPlanCompetitionLevel) String() string { + return proto.EnumName(KeywordPlanCompetitionLevelEnum_KeywordPlanCompetitionLevel_name, int32(x)) +} +func (KeywordPlanCompetitionLevelEnum_KeywordPlanCompetitionLevel) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_keyword_plan_competition_level_be32af162d2cdabf, []int{0, 0} +} + +// Container for enumeration of keyword competition levels. The competition +// level indicates how competitive ad placement is for a keyword and +// is determined by the number of advertisers bidding on that keyword relative +// to all keywords across Google. The competition level can depend on the +// location and Search Network targeting options you've selected. +type KeywordPlanCompetitionLevelEnum struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *KeywordPlanCompetitionLevelEnum) Reset() { *m = KeywordPlanCompetitionLevelEnum{} } +func (m *KeywordPlanCompetitionLevelEnum) String() string { return proto.CompactTextString(m) } +func (*KeywordPlanCompetitionLevelEnum) ProtoMessage() {} +func (*KeywordPlanCompetitionLevelEnum) Descriptor() ([]byte, []int) { + return fileDescriptor_keyword_plan_competition_level_be32af162d2cdabf, []int{0} +} +func (m *KeywordPlanCompetitionLevelEnum) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_KeywordPlanCompetitionLevelEnum.Unmarshal(m, b) +} +func (m *KeywordPlanCompetitionLevelEnum) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_KeywordPlanCompetitionLevelEnum.Marshal(b, m, deterministic) +} +func (dst *KeywordPlanCompetitionLevelEnum) XXX_Merge(src proto.Message) { + xxx_messageInfo_KeywordPlanCompetitionLevelEnum.Merge(dst, src) +} +func (m *KeywordPlanCompetitionLevelEnum) XXX_Size() int { + return xxx_messageInfo_KeywordPlanCompetitionLevelEnum.Size(m) +} +func (m *KeywordPlanCompetitionLevelEnum) XXX_DiscardUnknown() { + xxx_messageInfo_KeywordPlanCompetitionLevelEnum.DiscardUnknown(m) +} + +var xxx_messageInfo_KeywordPlanCompetitionLevelEnum proto.InternalMessageInfo + +func init() { + proto.RegisterType((*KeywordPlanCompetitionLevelEnum)(nil), "google.ads.googleads.v1.enums.KeywordPlanCompetitionLevelEnum") + proto.RegisterEnum("google.ads.googleads.v1.enums.KeywordPlanCompetitionLevelEnum_KeywordPlanCompetitionLevel", KeywordPlanCompetitionLevelEnum_KeywordPlanCompetitionLevel_name, KeywordPlanCompetitionLevelEnum_KeywordPlanCompetitionLevel_value) +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/enums/keyword_plan_competition_level.proto", fileDescriptor_keyword_plan_competition_level_be32af162d2cdabf) +} + +var fileDescriptor_keyword_plan_competition_level_be32af162d2cdabf = []byte{ + // 325 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x7c, 0x50, 0x41, 0x4f, 0xc2, 0x30, + 0x18, 0x95, 0x41, 0xc0, 0x94, 0x83, 0xcb, 0x8e, 0x2a, 0x51, 0xf8, 0x01, 0x5d, 0x16, 0x6f, 0xf5, + 0x34, 0x60, 0xc2, 0x02, 0x8c, 0x25, 0x06, 0x48, 0xc8, 0x12, 0x52, 0x59, 0xd3, 0x2c, 0x76, 0xed, + 0x42, 0x07, 0xc6, 0x83, 0x7f, 0xc6, 0xa3, 0x3f, 0xc5, 0x9f, 0xe2, 0xdd, 0xbb, 0x69, 0x2b, 0x78, + 0x72, 0x97, 0xe5, 0x65, 0xef, 0x7d, 0xef, 0xf5, 0x3d, 0xd0, 0xa7, 0x42, 0x50, 0x46, 0x5c, 0x9c, + 0x4a, 0xd7, 0x40, 0x85, 0x0e, 0x9e, 0x4b, 0xf8, 0x3e, 0x97, 0xee, 0x33, 0x79, 0x7d, 0x11, 0xbb, + 0x74, 0x53, 0x30, 0xcc, 0x37, 0x5b, 0x91, 0x17, 0xa4, 0xcc, 0xca, 0x4c, 0xf0, 0x0d, 0x23, 0x07, + 0xc2, 0x60, 0xb1, 0x13, 0xa5, 0x70, 0x3a, 0xe6, 0x10, 0xe2, 0x54, 0xc2, 0x93, 0x07, 0x3c, 0x78, + 0x50, 0x7b, 0x5c, 0x5e, 0x1f, 0x23, 0x8a, 0xcc, 0xc5, 0x9c, 0x8b, 0x12, 0x2b, 0x07, 0x69, 0x8e, + 0x7b, 0x6f, 0xe0, 0x66, 0x62, 0x42, 0x62, 0x86, 0xf9, 0xe0, 0x2f, 0x62, 0xaa, 0x12, 0x02, 0xbe, + 0xcf, 0x7b, 0x6b, 0x70, 0x55, 0x21, 0x71, 0x2e, 0x40, 0x7b, 0x11, 0x3d, 0xc6, 0xc1, 0x20, 0x7c, + 0x08, 0x83, 0xa1, 0x7d, 0xe6, 0xb4, 0x41, 0x6b, 0x11, 0x4d, 0xa2, 0xf9, 0x2a, 0xb2, 0x6b, 0x4e, + 0x0b, 0xd4, 0xa7, 0xf3, 0x95, 0x6d, 0x39, 0x00, 0x34, 0x67, 0xc1, 0x30, 0x5c, 0xcc, 0xec, 0xba, + 0x73, 0x0e, 0x1a, 0xe3, 0x70, 0x34, 0xb6, 0x1b, 0xfd, 0xef, 0x1a, 0xe8, 0x6e, 0x45, 0x0e, 0x2b, + 0x2b, 0xf4, 0x6f, 0x2b, 0xf2, 0x63, 0x55, 0x23, 0xae, 0xad, 0x7f, 0x97, 0x84, 0x54, 0x30, 0xcc, + 0x29, 0x14, 0x3b, 0xea, 0x52, 0xc2, 0x75, 0xc9, 0xe3, 0xb2, 0x45, 0x26, 0xff, 0x19, 0xfa, 0x5e, + 0x7f, 0xdf, 0xad, 0xfa, 0xc8, 0xf7, 0x3f, 0xac, 0xce, 0xc8, 0x58, 0xf9, 0xa9, 0x84, 0x06, 0x2a, + 0xb4, 0xf4, 0xa0, 0x9a, 0x43, 0x7e, 0x1e, 0xf9, 0xc4, 0x4f, 0x65, 0x72, 0xe2, 0x93, 0xa5, 0x97, + 0x68, 0xfe, 0xcb, 0xea, 0x9a, 0x9f, 0x08, 0xf9, 0xa9, 0x44, 0xe8, 0xa4, 0x40, 0x68, 0xe9, 0x21, + 0xa4, 0x35, 0x4f, 0x4d, 0xfd, 0xb0, 0xbb, 0x9f, 0x00, 0x00, 0x00, 0xff, 0xff, 0xd4, 0x77, 0x8e, + 0xaa, 0x00, 0x02, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/keyword_plan_forecast_interval.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/keyword_plan_forecast_interval.pb.go new file mode 100644 index 0000000..13b018d --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/keyword_plan_forecast_interval.pb.go @@ -0,0 +1,127 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/enums/keyword_plan_forecast_interval.proto + +package enums // import "google.golang.org/genproto/googleapis/ads/googleads/v1/enums" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Forecast intervals. +type KeywordPlanForecastIntervalEnum_KeywordPlanForecastInterval int32 + +const ( + // Not specified. + KeywordPlanForecastIntervalEnum_UNSPECIFIED KeywordPlanForecastIntervalEnum_KeywordPlanForecastInterval = 0 + // The value is unknown in this version. + KeywordPlanForecastIntervalEnum_UNKNOWN KeywordPlanForecastIntervalEnum_KeywordPlanForecastInterval = 1 + // The next week date range for keyword plan. The next week is based + // on the default locale of the user's account and is mostly SUN-SAT or + // MON-SUN. + // This can be different from next-7 days. + KeywordPlanForecastIntervalEnum_NEXT_WEEK KeywordPlanForecastIntervalEnum_KeywordPlanForecastInterval = 3 + // The next month date range for keyword plan. + KeywordPlanForecastIntervalEnum_NEXT_MONTH KeywordPlanForecastIntervalEnum_KeywordPlanForecastInterval = 4 + // The next quarter date range for keyword plan. + KeywordPlanForecastIntervalEnum_NEXT_QUARTER KeywordPlanForecastIntervalEnum_KeywordPlanForecastInterval = 5 +) + +var KeywordPlanForecastIntervalEnum_KeywordPlanForecastInterval_name = map[int32]string{ + 0: "UNSPECIFIED", + 1: "UNKNOWN", + 3: "NEXT_WEEK", + 4: "NEXT_MONTH", + 5: "NEXT_QUARTER", +} +var KeywordPlanForecastIntervalEnum_KeywordPlanForecastInterval_value = map[string]int32{ + "UNSPECIFIED": 0, + "UNKNOWN": 1, + "NEXT_WEEK": 3, + "NEXT_MONTH": 4, + "NEXT_QUARTER": 5, +} + +func (x KeywordPlanForecastIntervalEnum_KeywordPlanForecastInterval) String() string { + return proto.EnumName(KeywordPlanForecastIntervalEnum_KeywordPlanForecastInterval_name, int32(x)) +} +func (KeywordPlanForecastIntervalEnum_KeywordPlanForecastInterval) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_keyword_plan_forecast_interval_81df08cdb2ab986b, []int{0, 0} +} + +// Container for enumeration of forecast intervals. +type KeywordPlanForecastIntervalEnum struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *KeywordPlanForecastIntervalEnum) Reset() { *m = KeywordPlanForecastIntervalEnum{} } +func (m *KeywordPlanForecastIntervalEnum) String() string { return proto.CompactTextString(m) } +func (*KeywordPlanForecastIntervalEnum) ProtoMessage() {} +func (*KeywordPlanForecastIntervalEnum) Descriptor() ([]byte, []int) { + return fileDescriptor_keyword_plan_forecast_interval_81df08cdb2ab986b, []int{0} +} +func (m *KeywordPlanForecastIntervalEnum) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_KeywordPlanForecastIntervalEnum.Unmarshal(m, b) +} +func (m *KeywordPlanForecastIntervalEnum) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_KeywordPlanForecastIntervalEnum.Marshal(b, m, deterministic) +} +func (dst *KeywordPlanForecastIntervalEnum) XXX_Merge(src proto.Message) { + xxx_messageInfo_KeywordPlanForecastIntervalEnum.Merge(dst, src) +} +func (m *KeywordPlanForecastIntervalEnum) XXX_Size() int { + return xxx_messageInfo_KeywordPlanForecastIntervalEnum.Size(m) +} +func (m *KeywordPlanForecastIntervalEnum) XXX_DiscardUnknown() { + xxx_messageInfo_KeywordPlanForecastIntervalEnum.DiscardUnknown(m) +} + +var xxx_messageInfo_KeywordPlanForecastIntervalEnum proto.InternalMessageInfo + +func init() { + proto.RegisterType((*KeywordPlanForecastIntervalEnum)(nil), "google.ads.googleads.v1.enums.KeywordPlanForecastIntervalEnum") + proto.RegisterEnum("google.ads.googleads.v1.enums.KeywordPlanForecastIntervalEnum_KeywordPlanForecastInterval", KeywordPlanForecastIntervalEnum_KeywordPlanForecastInterval_name, KeywordPlanForecastIntervalEnum_KeywordPlanForecastInterval_value) +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/enums/keyword_plan_forecast_interval.proto", fileDescriptor_keyword_plan_forecast_interval_81df08cdb2ab986b) +} + +var fileDescriptor_keyword_plan_forecast_interval_81df08cdb2ab986b = []byte{ + // 339 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x7c, 0x50, 0x4b, 0x4e, 0xc3, 0x30, + 0x10, 0x25, 0x2d, 0x1f, 0xe1, 0xf2, 0x89, 0xb2, 0x04, 0x2a, 0x68, 0x0f, 0xe0, 0x28, 0x62, 0x67, + 0x56, 0x29, 0xb8, 0xa5, 0xaa, 0x48, 0x43, 0xe9, 0x07, 0xa1, 0x48, 0x91, 0x69, 0x8c, 0x15, 0xe1, + 0xda, 0x51, 0x9c, 0x16, 0x71, 0x0a, 0xee, 0xc0, 0x92, 0xa3, 0x70, 0x14, 0xf6, 0xec, 0x51, 0xec, + 0xb6, 0x3b, 0xba, 0xb1, 0xde, 0x78, 0x66, 0xde, 0x9b, 0xf7, 0x40, 0x8b, 0x49, 0xc9, 0x38, 0x75, + 0x49, 0xa2, 0x5c, 0x03, 0x4b, 0xb4, 0xf0, 0x5c, 0x2a, 0xe6, 0x33, 0xe5, 0xbe, 0xd2, 0xf7, 0x37, + 0x99, 0x27, 0x71, 0xc6, 0x89, 0x88, 0x5f, 0x64, 0x4e, 0xa7, 0x44, 0x15, 0x71, 0x2a, 0x0a, 0x9a, + 0x2f, 0x08, 0x87, 0x59, 0x2e, 0x0b, 0xe9, 0xd4, 0xcd, 0x22, 0x24, 0x89, 0x82, 0x6b, 0x0e, 0xb8, + 0xf0, 0xa0, 0xe6, 0x38, 0x39, 0x5b, 0x49, 0x64, 0xa9, 0x4b, 0x84, 0x90, 0x05, 0x29, 0x52, 0x29, + 0x94, 0x59, 0x6e, 0x7e, 0x58, 0xe0, 0xbc, 0x67, 0x54, 0x42, 0x4e, 0x44, 0x7b, 0xa9, 0xd1, 0x5d, + 0x4a, 0x60, 0x31, 0x9f, 0x35, 0x39, 0x38, 0xdd, 0x30, 0xe2, 0x1c, 0x83, 0xda, 0x28, 0x78, 0x08, + 0xf1, 0x75, 0xb7, 0xdd, 0xc5, 0x37, 0xf6, 0x96, 0x53, 0x03, 0x7b, 0xa3, 0xa0, 0x17, 0xf4, 0x27, + 0x81, 0x6d, 0x39, 0x87, 0x60, 0x3f, 0xc0, 0x8f, 0xc3, 0x78, 0x82, 0x71, 0xcf, 0xae, 0x3a, 0x47, + 0x00, 0xe8, 0xf2, 0xae, 0x1f, 0x0c, 0x6f, 0xed, 0x6d, 0xc7, 0x06, 0x07, 0xba, 0xbe, 0x1f, 0xf9, + 0x83, 0x21, 0x1e, 0xd8, 0x3b, 0xad, 0x5f, 0x0b, 0x34, 0xa6, 0x72, 0x06, 0x37, 0xba, 0x6a, 0x5d, + 0x6c, 0xb8, 0x28, 0x2c, 0x9d, 0x85, 0xd6, 0xd3, 0x32, 0x5c, 0xc8, 0x24, 0x27, 0x82, 0x41, 0x99, + 0x33, 0x97, 0x51, 0xa1, 0x7d, 0xaf, 0xc2, 0xce, 0x52, 0xf5, 0x4f, 0xf6, 0x57, 0xfa, 0xfd, 0xac, + 0x54, 0x3b, 0xbe, 0xff, 0x55, 0xa9, 0x77, 0x0c, 0x95, 0x9f, 0x28, 0x68, 0x60, 0x89, 0xc6, 0x1e, + 0x2c, 0x03, 0x52, 0xdf, 0xab, 0x7e, 0xe4, 0x27, 0x2a, 0x5a, 0xf7, 0xa3, 0xb1, 0x17, 0xe9, 0xfe, + 0x4f, 0xa5, 0x61, 0x3e, 0x11, 0xf2, 0x13, 0x85, 0xd0, 0x7a, 0x02, 0xa1, 0xb1, 0x87, 0x90, 0x9e, + 0x79, 0xde, 0xd5, 0x87, 0x5d, 0xfe, 0x05, 0x00, 0x00, 0xff, 0xff, 0x3a, 0x3f, 0x29, 0x0e, 0x13, + 0x02, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/keyword_plan_network.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/keyword_plan_network.pb.go new file mode 100644 index 0000000..31f39ab --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/keyword_plan_network.pb.go @@ -0,0 +1,119 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/enums/keyword_plan_network.proto + +package enums // import "google.golang.org/genproto/googleapis/ads/googleads/v1/enums" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Enumerates keyword plan forecastable network types. +type KeywordPlanNetworkEnum_KeywordPlanNetwork int32 + +const ( + // Not specified. + KeywordPlanNetworkEnum_UNSPECIFIED KeywordPlanNetworkEnum_KeywordPlanNetwork = 0 + // The value is unknown in this version. + KeywordPlanNetworkEnum_UNKNOWN KeywordPlanNetworkEnum_KeywordPlanNetwork = 1 + // Google Search. + KeywordPlanNetworkEnum_GOOGLE_SEARCH KeywordPlanNetworkEnum_KeywordPlanNetwork = 2 + // Google Search + Search partners. + KeywordPlanNetworkEnum_GOOGLE_SEARCH_AND_PARTNERS KeywordPlanNetworkEnum_KeywordPlanNetwork = 3 +) + +var KeywordPlanNetworkEnum_KeywordPlanNetwork_name = map[int32]string{ + 0: "UNSPECIFIED", + 1: "UNKNOWN", + 2: "GOOGLE_SEARCH", + 3: "GOOGLE_SEARCH_AND_PARTNERS", +} +var KeywordPlanNetworkEnum_KeywordPlanNetwork_value = map[string]int32{ + "UNSPECIFIED": 0, + "UNKNOWN": 1, + "GOOGLE_SEARCH": 2, + "GOOGLE_SEARCH_AND_PARTNERS": 3, +} + +func (x KeywordPlanNetworkEnum_KeywordPlanNetwork) String() string { + return proto.EnumName(KeywordPlanNetworkEnum_KeywordPlanNetwork_name, int32(x)) +} +func (KeywordPlanNetworkEnum_KeywordPlanNetwork) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_keyword_plan_network_dbfff2566f4fba42, []int{0, 0} +} + +// Container for enumeration of keyword plan forecastable network types. +type KeywordPlanNetworkEnum struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *KeywordPlanNetworkEnum) Reset() { *m = KeywordPlanNetworkEnum{} } +func (m *KeywordPlanNetworkEnum) String() string { return proto.CompactTextString(m) } +func (*KeywordPlanNetworkEnum) ProtoMessage() {} +func (*KeywordPlanNetworkEnum) Descriptor() ([]byte, []int) { + return fileDescriptor_keyword_plan_network_dbfff2566f4fba42, []int{0} +} +func (m *KeywordPlanNetworkEnum) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_KeywordPlanNetworkEnum.Unmarshal(m, b) +} +func (m *KeywordPlanNetworkEnum) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_KeywordPlanNetworkEnum.Marshal(b, m, deterministic) +} +func (dst *KeywordPlanNetworkEnum) XXX_Merge(src proto.Message) { + xxx_messageInfo_KeywordPlanNetworkEnum.Merge(dst, src) +} +func (m *KeywordPlanNetworkEnum) XXX_Size() int { + return xxx_messageInfo_KeywordPlanNetworkEnum.Size(m) +} +func (m *KeywordPlanNetworkEnum) XXX_DiscardUnknown() { + xxx_messageInfo_KeywordPlanNetworkEnum.DiscardUnknown(m) +} + +var xxx_messageInfo_KeywordPlanNetworkEnum proto.InternalMessageInfo + +func init() { + proto.RegisterType((*KeywordPlanNetworkEnum)(nil), "google.ads.googleads.v1.enums.KeywordPlanNetworkEnum") + proto.RegisterEnum("google.ads.googleads.v1.enums.KeywordPlanNetworkEnum_KeywordPlanNetwork", KeywordPlanNetworkEnum_KeywordPlanNetwork_name, KeywordPlanNetworkEnum_KeywordPlanNetwork_value) +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/enums/keyword_plan_network.proto", fileDescriptor_keyword_plan_network_dbfff2566f4fba42) +} + +var fileDescriptor_keyword_plan_network_dbfff2566f4fba42 = []byte{ + // 324 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x7c, 0x50, 0xdf, 0x4a, 0xf3, 0x30, + 0x1c, 0xfd, 0xd6, 0xc1, 0x27, 0x64, 0x88, 0xb5, 0x17, 0x0a, 0xc3, 0x09, 0xdb, 0x03, 0xa4, 0x14, + 0x6f, 0x24, 0x5e, 0x65, 0x5b, 0xad, 0x63, 0x92, 0x95, 0xcd, 0x4d, 0x90, 0x42, 0x89, 0x26, 0x84, + 0xb1, 0x2e, 0x29, 0x4d, 0xb7, 0xe1, 0x95, 0xef, 0xe2, 0xa5, 0x8f, 0xe2, 0xa3, 0x88, 0x0f, 0x21, + 0x4d, 0xdc, 0x40, 0x86, 0xde, 0x84, 0x43, 0xce, 0x1f, 0xce, 0xef, 0x80, 0x4b, 0xa1, 0x94, 0xc8, + 0xb8, 0x4f, 0x99, 0xf6, 0x2d, 0xac, 0xd0, 0x3a, 0xf0, 0xb9, 0x5c, 0x2d, 0xb5, 0xbf, 0xe0, 0xcf, + 0x1b, 0x55, 0xb0, 0x34, 0xcf, 0xa8, 0x4c, 0x25, 0x2f, 0x37, 0xaa, 0x58, 0xc0, 0xbc, 0x50, 0xa5, + 0xf2, 0x5a, 0x56, 0x0e, 0x29, 0xd3, 0x70, 0xe7, 0x84, 0xeb, 0x00, 0x1a, 0x67, 0xf3, 0x6c, 0x1b, + 0x9c, 0xcf, 0x7d, 0x2a, 0xa5, 0x2a, 0x69, 0x39, 0x57, 0x52, 0x5b, 0x73, 0xe7, 0x05, 0x9c, 0x0c, + 0x6d, 0x74, 0x9c, 0x51, 0x49, 0x6c, 0x70, 0x28, 0x57, 0xcb, 0x0e, 0x07, 0xde, 0x3e, 0xe3, 0x1d, + 0x81, 0xc6, 0x94, 0x4c, 0xe2, 0xb0, 0x37, 0xb8, 0x1e, 0x84, 0x7d, 0xf7, 0x9f, 0xd7, 0x00, 0x07, + 0x53, 0x32, 0x24, 0xa3, 0x7b, 0xe2, 0xd6, 0xbc, 0x63, 0x70, 0x18, 0x8d, 0x46, 0xd1, 0x6d, 0x98, + 0x4e, 0x42, 0x3c, 0xee, 0xdd, 0xb8, 0x8e, 0x77, 0x0e, 0x9a, 0x3f, 0xbe, 0x52, 0x4c, 0xfa, 0x69, + 0x8c, 0xc7, 0x77, 0x24, 0x1c, 0x4f, 0xdc, 0x7a, 0xf7, 0xb3, 0x06, 0xda, 0x4f, 0x6a, 0x09, 0xff, + 0x3c, 0xa2, 0x7b, 0xba, 0x5f, 0x25, 0xae, 0xfa, 0xc7, 0xb5, 0x87, 0xee, 0xb7, 0x53, 0xa8, 0x8c, + 0x4a, 0x01, 0x55, 0x21, 0x7c, 0xc1, 0xa5, 0xb9, 0x6e, 0x3b, 0x64, 0x3e, 0xd7, 0xbf, 0xec, 0x7a, + 0x65, 0xde, 0x57, 0xa7, 0x1e, 0x61, 0xfc, 0xe6, 0xb4, 0x22, 0x1b, 0x85, 0x99, 0x86, 0x16, 0x56, + 0x68, 0x16, 0xc0, 0x6a, 0x10, 0xfd, 0xbe, 0xe5, 0x13, 0xcc, 0x74, 0xb2, 0xe3, 0x93, 0x59, 0x90, + 0x18, 0xfe, 0xc3, 0x69, 0xdb, 0x4f, 0x84, 0x30, 0xd3, 0x08, 0xed, 0x14, 0x08, 0xcd, 0x02, 0x84, + 0x8c, 0xe6, 0xf1, 0xbf, 0x29, 0x76, 0xf1, 0x15, 0x00, 0x00, 0xff, 0xff, 0x63, 0x52, 0x0e, 0xff, + 0xef, 0x01, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/label_status.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/label_status.pb.go new file mode 100644 index 0000000..25d3c9a --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/label_status.pb.go @@ -0,0 +1,117 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/enums/label_status.proto + +package enums // import "google.golang.org/genproto/googleapis/ads/googleads/v1/enums" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Possible statuses of a label. +type LabelStatusEnum_LabelStatus int32 + +const ( + // Not specified. + LabelStatusEnum_UNSPECIFIED LabelStatusEnum_LabelStatus = 0 + // Used for return value only. Represents value unknown in this version. + LabelStatusEnum_UNKNOWN LabelStatusEnum_LabelStatus = 1 + // Label is enabled. + LabelStatusEnum_ENABLED LabelStatusEnum_LabelStatus = 2 + // Label is removed. + LabelStatusEnum_REMOVED LabelStatusEnum_LabelStatus = 3 +) + +var LabelStatusEnum_LabelStatus_name = map[int32]string{ + 0: "UNSPECIFIED", + 1: "UNKNOWN", + 2: "ENABLED", + 3: "REMOVED", +} +var LabelStatusEnum_LabelStatus_value = map[string]int32{ + "UNSPECIFIED": 0, + "UNKNOWN": 1, + "ENABLED": 2, + "REMOVED": 3, +} + +func (x LabelStatusEnum_LabelStatus) String() string { + return proto.EnumName(LabelStatusEnum_LabelStatus_name, int32(x)) +} +func (LabelStatusEnum_LabelStatus) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_label_status_8f8aec166f117b1a, []int{0, 0} +} + +// Container for enum describing possible status of a label. +type LabelStatusEnum struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *LabelStatusEnum) Reset() { *m = LabelStatusEnum{} } +func (m *LabelStatusEnum) String() string { return proto.CompactTextString(m) } +func (*LabelStatusEnum) ProtoMessage() {} +func (*LabelStatusEnum) Descriptor() ([]byte, []int) { + return fileDescriptor_label_status_8f8aec166f117b1a, []int{0} +} +func (m *LabelStatusEnum) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_LabelStatusEnum.Unmarshal(m, b) +} +func (m *LabelStatusEnum) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_LabelStatusEnum.Marshal(b, m, deterministic) +} +func (dst *LabelStatusEnum) XXX_Merge(src proto.Message) { + xxx_messageInfo_LabelStatusEnum.Merge(dst, src) +} +func (m *LabelStatusEnum) XXX_Size() int { + return xxx_messageInfo_LabelStatusEnum.Size(m) +} +func (m *LabelStatusEnum) XXX_DiscardUnknown() { + xxx_messageInfo_LabelStatusEnum.DiscardUnknown(m) +} + +var xxx_messageInfo_LabelStatusEnum proto.InternalMessageInfo + +func init() { + proto.RegisterType((*LabelStatusEnum)(nil), "google.ads.googleads.v1.enums.LabelStatusEnum") + proto.RegisterEnum("google.ads.googleads.v1.enums.LabelStatusEnum_LabelStatus", LabelStatusEnum_LabelStatus_name, LabelStatusEnum_LabelStatus_value) +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/enums/label_status.proto", fileDescriptor_label_status_8f8aec166f117b1a) +} + +var fileDescriptor_label_status_8f8aec166f117b1a = []byte{ + // 293 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x7c, 0x50, 0xcf, 0x4a, 0xc3, 0x30, + 0x18, 0x77, 0x1d, 0x28, 0xa4, 0x87, 0x95, 0x1e, 0xc5, 0x1d, 0xb6, 0x07, 0x48, 0x2c, 0xde, 0xe2, + 0x29, 0xb5, 0x71, 0x0c, 0x67, 0x57, 0x1c, 0xab, 0x22, 0x05, 0xc9, 0x6c, 0x09, 0x85, 0x36, 0x29, + 0x4b, 0xbb, 0x07, 0xf2, 0xe8, 0xa3, 0xf8, 0x1e, 0x5e, 0x7c, 0x0a, 0x49, 0xb2, 0x96, 0x5d, 0xf4, + 0x52, 0x7e, 0xdf, 0xf7, 0xfb, 0xd3, 0x5f, 0x3e, 0x70, 0xcd, 0xa5, 0xe4, 0x55, 0x81, 0x58, 0xae, + 0x90, 0x85, 0x1a, 0x1d, 0x02, 0x54, 0x88, 0xae, 0x56, 0xa8, 0x62, 0xbb, 0xa2, 0x7a, 0x53, 0x2d, + 0x6b, 0x3b, 0x05, 0x9b, 0xbd, 0x6c, 0xa5, 0x3f, 0xb5, 0x32, 0xc8, 0x72, 0x05, 0x07, 0x07, 0x3c, + 0x04, 0xd0, 0x38, 0x2e, 0xaf, 0xfa, 0xc0, 0xa6, 0x44, 0x4c, 0x08, 0xd9, 0xb2, 0xb6, 0x94, 0xe2, + 0x68, 0x9e, 0xbf, 0x80, 0xc9, 0x4a, 0x47, 0x6e, 0x4c, 0x22, 0x15, 0x5d, 0x3d, 0xa7, 0xc0, 0x3d, + 0x59, 0xf9, 0x13, 0xe0, 0x6e, 0xe3, 0x4d, 0x42, 0xef, 0x96, 0xf7, 0x4b, 0x1a, 0x79, 0x67, 0xbe, + 0x0b, 0x2e, 0xb6, 0xf1, 0x43, 0xbc, 0x7e, 0x8e, 0xbd, 0x91, 0x1e, 0x68, 0x4c, 0xc2, 0x15, 0x8d, + 0x3c, 0x47, 0x0f, 0x4f, 0xf4, 0x71, 0x9d, 0xd2, 0xc8, 0x1b, 0x87, 0xdf, 0x23, 0x30, 0x7b, 0x97, + 0x35, 0xfc, 0xb7, 0x5d, 0xe8, 0x9d, 0xfc, 0x2a, 0xd1, 0x8d, 0x92, 0xd1, 0x6b, 0x78, 0xb4, 0x70, + 0x59, 0x31, 0xc1, 0xa1, 0xdc, 0x73, 0xc4, 0x0b, 0x61, 0xfa, 0xf6, 0x27, 0x69, 0x4a, 0xf5, 0xc7, + 0x85, 0x6e, 0xcd, 0xf7, 0xc3, 0x19, 0x2f, 0x08, 0xf9, 0x74, 0xa6, 0x0b, 0x1b, 0x45, 0x72, 0x05, + 0x2d, 0xd4, 0x28, 0x0d, 0xa0, 0x7e, 0xa9, 0xfa, 0xea, 0xf9, 0x8c, 0xe4, 0x2a, 0x1b, 0xf8, 0x2c, + 0x0d, 0x32, 0xc3, 0xff, 0x38, 0x33, 0xbb, 0xc4, 0x98, 0xe4, 0x0a, 0xe3, 0x41, 0x81, 0x71, 0x1a, + 0x60, 0x6c, 0x34, 0xbb, 0x73, 0x53, 0xec, 0xe6, 0x37, 0x00, 0x00, 0xff, 0xff, 0x40, 0xe2, 0xbe, + 0xb1, 0xb9, 0x01, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/legacy_app_install_ad_app_store.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/legacy_app_install_ad_app_store.pb.go new file mode 100644 index 0000000..b2519af --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/legacy_app_install_ad_app_store.pb.go @@ -0,0 +1,134 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/enums/legacy_app_install_ad_app_store.proto + +package enums // import "google.golang.org/genproto/googleapis/ads/googleads/v1/enums" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// App store type in a legacy app install ad. +type LegacyAppInstallAdAppStoreEnum_LegacyAppInstallAdAppStore int32 + +const ( + // Not specified. + LegacyAppInstallAdAppStoreEnum_UNSPECIFIED LegacyAppInstallAdAppStoreEnum_LegacyAppInstallAdAppStore = 0 + // Used for return value only. Represents value unknown in this version. + LegacyAppInstallAdAppStoreEnum_UNKNOWN LegacyAppInstallAdAppStoreEnum_LegacyAppInstallAdAppStore = 1 + // Apple iTunes. + LegacyAppInstallAdAppStoreEnum_APPLE_APP_STORE LegacyAppInstallAdAppStoreEnum_LegacyAppInstallAdAppStore = 2 + // Google Play. + LegacyAppInstallAdAppStoreEnum_GOOGLE_PLAY LegacyAppInstallAdAppStoreEnum_LegacyAppInstallAdAppStore = 3 + // Windows Store. + LegacyAppInstallAdAppStoreEnum_WINDOWS_STORE LegacyAppInstallAdAppStoreEnum_LegacyAppInstallAdAppStore = 4 + // Windows Phone Store. + LegacyAppInstallAdAppStoreEnum_WINDOWS_PHONE_STORE LegacyAppInstallAdAppStoreEnum_LegacyAppInstallAdAppStore = 5 + // The app is hosted in a Chinese app store. + LegacyAppInstallAdAppStoreEnum_CN_APP_STORE LegacyAppInstallAdAppStoreEnum_LegacyAppInstallAdAppStore = 6 +) + +var LegacyAppInstallAdAppStoreEnum_LegacyAppInstallAdAppStore_name = map[int32]string{ + 0: "UNSPECIFIED", + 1: "UNKNOWN", + 2: "APPLE_APP_STORE", + 3: "GOOGLE_PLAY", + 4: "WINDOWS_STORE", + 5: "WINDOWS_PHONE_STORE", + 6: "CN_APP_STORE", +} +var LegacyAppInstallAdAppStoreEnum_LegacyAppInstallAdAppStore_value = map[string]int32{ + "UNSPECIFIED": 0, + "UNKNOWN": 1, + "APPLE_APP_STORE": 2, + "GOOGLE_PLAY": 3, + "WINDOWS_STORE": 4, + "WINDOWS_PHONE_STORE": 5, + "CN_APP_STORE": 6, +} + +func (x LegacyAppInstallAdAppStoreEnum_LegacyAppInstallAdAppStore) String() string { + return proto.EnumName(LegacyAppInstallAdAppStoreEnum_LegacyAppInstallAdAppStore_name, int32(x)) +} +func (LegacyAppInstallAdAppStoreEnum_LegacyAppInstallAdAppStore) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_legacy_app_install_ad_app_store_985540f294b6708f, []int{0, 0} +} + +// Container for enum describing app store type in a legacy app install ad. +type LegacyAppInstallAdAppStoreEnum struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *LegacyAppInstallAdAppStoreEnum) Reset() { *m = LegacyAppInstallAdAppStoreEnum{} } +func (m *LegacyAppInstallAdAppStoreEnum) String() string { return proto.CompactTextString(m) } +func (*LegacyAppInstallAdAppStoreEnum) ProtoMessage() {} +func (*LegacyAppInstallAdAppStoreEnum) Descriptor() ([]byte, []int) { + return fileDescriptor_legacy_app_install_ad_app_store_985540f294b6708f, []int{0} +} +func (m *LegacyAppInstallAdAppStoreEnum) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_LegacyAppInstallAdAppStoreEnum.Unmarshal(m, b) +} +func (m *LegacyAppInstallAdAppStoreEnum) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_LegacyAppInstallAdAppStoreEnum.Marshal(b, m, deterministic) +} +func (dst *LegacyAppInstallAdAppStoreEnum) XXX_Merge(src proto.Message) { + xxx_messageInfo_LegacyAppInstallAdAppStoreEnum.Merge(dst, src) +} +func (m *LegacyAppInstallAdAppStoreEnum) XXX_Size() int { + return xxx_messageInfo_LegacyAppInstallAdAppStoreEnum.Size(m) +} +func (m *LegacyAppInstallAdAppStoreEnum) XXX_DiscardUnknown() { + xxx_messageInfo_LegacyAppInstallAdAppStoreEnum.DiscardUnknown(m) +} + +var xxx_messageInfo_LegacyAppInstallAdAppStoreEnum proto.InternalMessageInfo + +func init() { + proto.RegisterType((*LegacyAppInstallAdAppStoreEnum)(nil), "google.ads.googleads.v1.enums.LegacyAppInstallAdAppStoreEnum") + proto.RegisterEnum("google.ads.googleads.v1.enums.LegacyAppInstallAdAppStoreEnum_LegacyAppInstallAdAppStore", LegacyAppInstallAdAppStoreEnum_LegacyAppInstallAdAppStore_name, LegacyAppInstallAdAppStoreEnum_LegacyAppInstallAdAppStore_value) +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/enums/legacy_app_install_ad_app_store.proto", fileDescriptor_legacy_app_install_ad_app_store_985540f294b6708f) +} + +var fileDescriptor_legacy_app_install_ad_app_store_985540f294b6708f = []byte{ + // 371 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x7c, 0x51, 0x41, 0x6b, 0xa3, 0x40, + 0x18, 0x5d, 0xcd, 0x6e, 0x16, 0x26, 0xbb, 0xc4, 0x35, 0x87, 0x85, 0xb0, 0xd9, 0x25, 0xf9, 0x01, + 0x23, 0xb2, 0xb7, 0xe9, 0x69, 0x92, 0x58, 0x2b, 0x15, 0x1d, 0x6a, 0x93, 0xd0, 0x22, 0xc8, 0x34, + 0x8a, 0x08, 0x66, 0x66, 0xc8, 0x98, 0x40, 0xff, 0x4c, 0x0f, 0x3d, 0xf6, 0x5f, 0xf4, 0xda, 0x9f, + 0xd2, 0x73, 0x7f, 0x40, 0x71, 0x8c, 0xa1, 0x97, 0xf4, 0x22, 0xcf, 0xef, 0xbd, 0xef, 0x3d, 0x7d, + 0x1f, 0x98, 0xe5, 0x9c, 0xe7, 0x65, 0x66, 0xd1, 0x54, 0x5a, 0x0d, 0xac, 0xd1, 0xde, 0xb6, 0x32, + 0xb6, 0xdb, 0x48, 0xab, 0xcc, 0x72, 0xba, 0xbe, 0x4f, 0xa8, 0x10, 0x49, 0xc1, 0x64, 0x45, 0xcb, + 0x32, 0xa1, 0xa9, 0x7a, 0x95, 0x15, 0xdf, 0x66, 0x50, 0x6c, 0x79, 0xc5, 0xcd, 0x51, 0xb3, 0x09, + 0x69, 0x2a, 0xe1, 0xd1, 0x04, 0xee, 0x6d, 0xa8, 0x4c, 0x86, 0x7f, 0xda, 0x0c, 0x51, 0x58, 0x94, + 0x31, 0x5e, 0xd1, 0xaa, 0xe0, 0x4c, 0x36, 0xcb, 0x93, 0x67, 0x0d, 0xfc, 0xf5, 0x55, 0x0c, 0x16, + 0xc2, 0x6b, 0x42, 0x70, 0x8a, 0x85, 0x88, 0xea, 0x04, 0x87, 0xed, 0x36, 0x93, 0x07, 0x0d, 0x0c, + 0x4f, 0x4b, 0xcc, 0x3e, 0xe8, 0x2d, 0x82, 0x88, 0x38, 0x33, 0xef, 0xdc, 0x73, 0xe6, 0xc6, 0x17, + 0xb3, 0x07, 0xbe, 0x2f, 0x82, 0xcb, 0x20, 0x5c, 0x05, 0x86, 0x66, 0x0e, 0x40, 0x1f, 0x13, 0xe2, + 0x3b, 0x09, 0x26, 0x24, 0x89, 0xae, 0xc3, 0x2b, 0xc7, 0xd0, 0xeb, 0x15, 0x37, 0x0c, 0x5d, 0xdf, + 0x49, 0x88, 0x8f, 0x6f, 0x8c, 0x8e, 0xf9, 0x0b, 0xfc, 0x5c, 0x79, 0xc1, 0x3c, 0x5c, 0x45, 0x07, + 0xcd, 0x57, 0xf3, 0x37, 0x18, 0xb4, 0x23, 0x72, 0x11, 0x06, 0xce, 0x81, 0xf8, 0x66, 0x1a, 0xe0, + 0xc7, 0x2c, 0xf8, 0x60, 0xd7, 0x9d, 0xbe, 0x69, 0x60, 0xbc, 0xe6, 0x1b, 0xf8, 0x69, 0x0f, 0xd3, + 0x7f, 0xa7, 0xff, 0x81, 0xd4, 0x55, 0x10, 0xed, 0x76, 0x7a, 0x70, 0xc8, 0x79, 0x49, 0x59, 0x0e, + 0xf9, 0x36, 0xb7, 0xf2, 0x8c, 0xa9, 0xa2, 0xda, 0xf3, 0x88, 0x42, 0x9e, 0xb8, 0xd6, 0x99, 0x7a, + 0x3e, 0xea, 0x1d, 0x17, 0xe3, 0x27, 0x7d, 0xe4, 0x36, 0x56, 0x38, 0x95, 0xb0, 0x81, 0x35, 0x5a, + 0xda, 0xb0, 0xae, 0x54, 0xbe, 0xb4, 0x7c, 0x8c, 0x53, 0x19, 0x1f, 0xf9, 0x78, 0x69, 0xc7, 0x8a, + 0x7f, 0xd5, 0xc7, 0xcd, 0x10, 0x21, 0x9c, 0x4a, 0x84, 0x8e, 0x0a, 0x84, 0x96, 0x36, 0x42, 0x4a, + 0x73, 0xd7, 0x55, 0x1f, 0xf6, 0xff, 0x3d, 0x00, 0x00, 0xff, 0xff, 0x0c, 0x96, 0x83, 0x7a, 0x45, + 0x02, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/listing_custom_attribute_index.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/listing_custom_attribute_index.pb.go new file mode 100644 index 0000000..15690b5 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/listing_custom_attribute_index.pb.go @@ -0,0 +1,131 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/enums/listing_custom_attribute_index.proto + +package enums // import "google.golang.org/genproto/googleapis/ads/googleads/v1/enums" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// The index of the listing custom attribute. +type ListingCustomAttributeIndexEnum_ListingCustomAttributeIndex int32 + +const ( + // Not specified. + ListingCustomAttributeIndexEnum_UNSPECIFIED ListingCustomAttributeIndexEnum_ListingCustomAttributeIndex = 0 + // Used for return value only. Represents value unknown in this version. + ListingCustomAttributeIndexEnum_UNKNOWN ListingCustomAttributeIndexEnum_ListingCustomAttributeIndex = 1 + // First listing custom attribute. + ListingCustomAttributeIndexEnum_INDEX0 ListingCustomAttributeIndexEnum_ListingCustomAttributeIndex = 7 + // Second listing custom attribute. + ListingCustomAttributeIndexEnum_INDEX1 ListingCustomAttributeIndexEnum_ListingCustomAttributeIndex = 8 + // Third listing custom attribute. + ListingCustomAttributeIndexEnum_INDEX2 ListingCustomAttributeIndexEnum_ListingCustomAttributeIndex = 9 + // Fourth listing custom attribute. + ListingCustomAttributeIndexEnum_INDEX3 ListingCustomAttributeIndexEnum_ListingCustomAttributeIndex = 10 + // Fifth listing custom attribute. + ListingCustomAttributeIndexEnum_INDEX4 ListingCustomAttributeIndexEnum_ListingCustomAttributeIndex = 11 +) + +var ListingCustomAttributeIndexEnum_ListingCustomAttributeIndex_name = map[int32]string{ + 0: "UNSPECIFIED", + 1: "UNKNOWN", + 7: "INDEX0", + 8: "INDEX1", + 9: "INDEX2", + 10: "INDEX3", + 11: "INDEX4", +} +var ListingCustomAttributeIndexEnum_ListingCustomAttributeIndex_value = map[string]int32{ + "UNSPECIFIED": 0, + "UNKNOWN": 1, + "INDEX0": 7, + "INDEX1": 8, + "INDEX2": 9, + "INDEX3": 10, + "INDEX4": 11, +} + +func (x ListingCustomAttributeIndexEnum_ListingCustomAttributeIndex) String() string { + return proto.EnumName(ListingCustomAttributeIndexEnum_ListingCustomAttributeIndex_name, int32(x)) +} +func (ListingCustomAttributeIndexEnum_ListingCustomAttributeIndex) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_listing_custom_attribute_index_306d4bdbda07a18e, []int{0, 0} +} + +// Container for enum describing the index of the listing custom attribute. +type ListingCustomAttributeIndexEnum struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ListingCustomAttributeIndexEnum) Reset() { *m = ListingCustomAttributeIndexEnum{} } +func (m *ListingCustomAttributeIndexEnum) String() string { return proto.CompactTextString(m) } +func (*ListingCustomAttributeIndexEnum) ProtoMessage() {} +func (*ListingCustomAttributeIndexEnum) Descriptor() ([]byte, []int) { + return fileDescriptor_listing_custom_attribute_index_306d4bdbda07a18e, []int{0} +} +func (m *ListingCustomAttributeIndexEnum) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ListingCustomAttributeIndexEnum.Unmarshal(m, b) +} +func (m *ListingCustomAttributeIndexEnum) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ListingCustomAttributeIndexEnum.Marshal(b, m, deterministic) +} +func (dst *ListingCustomAttributeIndexEnum) XXX_Merge(src proto.Message) { + xxx_messageInfo_ListingCustomAttributeIndexEnum.Merge(dst, src) +} +func (m *ListingCustomAttributeIndexEnum) XXX_Size() int { + return xxx_messageInfo_ListingCustomAttributeIndexEnum.Size(m) +} +func (m *ListingCustomAttributeIndexEnum) XXX_DiscardUnknown() { + xxx_messageInfo_ListingCustomAttributeIndexEnum.DiscardUnknown(m) +} + +var xxx_messageInfo_ListingCustomAttributeIndexEnum proto.InternalMessageInfo + +func init() { + proto.RegisterType((*ListingCustomAttributeIndexEnum)(nil), "google.ads.googleads.v1.enums.ListingCustomAttributeIndexEnum") + proto.RegisterEnum("google.ads.googleads.v1.enums.ListingCustomAttributeIndexEnum_ListingCustomAttributeIndex", ListingCustomAttributeIndexEnum_ListingCustomAttributeIndex_name, ListingCustomAttributeIndexEnum_ListingCustomAttributeIndex_value) +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/enums/listing_custom_attribute_index.proto", fileDescriptor_listing_custom_attribute_index_306d4bdbda07a18e) +} + +var fileDescriptor_listing_custom_attribute_index_306d4bdbda07a18e = []byte{ + // 328 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x7c, 0x91, 0xb1, 0x4e, 0xf3, 0x30, + 0x14, 0x85, 0xff, 0xf4, 0x97, 0x5a, 0x70, 0x07, 0xac, 0x8c, 0x40, 0x05, 0xed, 0x03, 0x38, 0x84, + 0x32, 0x99, 0x29, 0x6d, 0x43, 0x15, 0x81, 0x42, 0x25, 0xd4, 0x82, 0x50, 0xa4, 0xca, 0xad, 0x23, + 0xcb, 0x52, 0x63, 0x57, 0xb5, 0x53, 0x78, 0x16, 0x46, 0x46, 0x1e, 0x85, 0x47, 0x61, 0x67, 0x47, + 0xb1, 0x89, 0x37, 0xb2, 0x44, 0x9f, 0x72, 0xef, 0x3d, 0xe7, 0xde, 0x63, 0x30, 0x62, 0x52, 0xb2, + 0x4d, 0x1e, 0x10, 0xaa, 0x02, 0x8b, 0x15, 0xed, 0xc3, 0x20, 0x17, 0x65, 0xa1, 0x82, 0x0d, 0x57, + 0x9a, 0x0b, 0xb6, 0x5c, 0x97, 0x4a, 0xcb, 0x62, 0x49, 0xb4, 0xde, 0xf1, 0x55, 0xa9, 0xf3, 0x25, + 0x17, 0x34, 0x7f, 0x45, 0xdb, 0x9d, 0xd4, 0xd2, 0xef, 0xd9, 0x41, 0x44, 0xa8, 0x42, 0x4e, 0x03, + 0xed, 0x43, 0x64, 0x34, 0x8e, 0x4f, 0x6b, 0x8b, 0x2d, 0x0f, 0x88, 0x10, 0x52, 0x13, 0xcd, 0xa5, + 0x50, 0x76, 0x78, 0xf0, 0xe6, 0x81, 0xb3, 0x3b, 0xeb, 0x32, 0x36, 0x26, 0x51, 0xed, 0x91, 0x54, + 0x16, 0xb1, 0x28, 0x8b, 0xc1, 0x0b, 0x38, 0x69, 0x68, 0xf1, 0x8f, 0x40, 0x77, 0x9e, 0x3e, 0xcc, + 0xe2, 0x71, 0x72, 0x93, 0xc4, 0x13, 0xf8, 0xcf, 0xef, 0x82, 0xce, 0x3c, 0xbd, 0x4d, 0xef, 0x1f, + 0x53, 0xe8, 0xf9, 0x00, 0xb4, 0x93, 0x74, 0x12, 0x3f, 0x5d, 0xc0, 0x8e, 0xe3, 0x10, 0x1e, 0x38, + 0xbe, 0x84, 0x87, 0x8e, 0x87, 0x10, 0x38, 0xbe, 0x82, 0xdd, 0xd1, 0xb7, 0x07, 0xfa, 0x6b, 0x59, + 0xa0, 0xc6, 0x03, 0x47, 0xe7, 0x0d, 0xcb, 0xcd, 0xaa, 0x23, 0x67, 0xde, 0xf3, 0x6f, 0xce, 0x88, + 0xc9, 0x0d, 0x11, 0x0c, 0xc9, 0x1d, 0x0b, 0x58, 0x2e, 0x4c, 0x04, 0x75, 0xee, 0x5b, 0xae, 0xfe, + 0x78, 0x86, 0x6b, 0xf3, 0x7d, 0x6f, 0xfd, 0x9f, 0x46, 0xd1, 0x47, 0xab, 0x37, 0xb5, 0x52, 0x11, + 0x55, 0xc8, 0x62, 0x45, 0x8b, 0x10, 0x55, 0x59, 0xa9, 0xcf, 0xba, 0x9e, 0x45, 0x54, 0x65, 0xae, + 0x9e, 0x2d, 0xc2, 0xcc, 0xd4, 0xbf, 0x5a, 0x7d, 0xfb, 0x13, 0xe3, 0x88, 0x2a, 0x8c, 0x5d, 0x07, + 0xc6, 0x8b, 0x10, 0x63, 0xd3, 0xb3, 0x6a, 0x9b, 0xc5, 0x86, 0x3f, 0x01, 0x00, 0x00, 0xff, 0xff, + 0x2e, 0xac, 0x0a, 0x2e, 0x1e, 0x02, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/listing_group_type.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/listing_group_type.pb.go new file mode 100644 index 0000000..fb5d7c9 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/listing_group_type.pb.go @@ -0,0 +1,120 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/enums/listing_group_type.proto + +package enums // import "google.golang.org/genproto/googleapis/ads/googleads/v1/enums" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// The type of the listing group. +type ListingGroupTypeEnum_ListingGroupType int32 + +const ( + // Not specified. + ListingGroupTypeEnum_UNSPECIFIED ListingGroupTypeEnum_ListingGroupType = 0 + // Used for return value only. Represents value unknown in this version. + ListingGroupTypeEnum_UNKNOWN ListingGroupTypeEnum_ListingGroupType = 1 + // Subdivision of products along some listing dimension. These nodes + // are not used by serving to target listing entries, but is purely + // to define the structure of the tree. + ListingGroupTypeEnum_SUBDIVISION ListingGroupTypeEnum_ListingGroupType = 2 + // Listing group unit that defines a bid. + ListingGroupTypeEnum_UNIT ListingGroupTypeEnum_ListingGroupType = 3 +) + +var ListingGroupTypeEnum_ListingGroupType_name = map[int32]string{ + 0: "UNSPECIFIED", + 1: "UNKNOWN", + 2: "SUBDIVISION", + 3: "UNIT", +} +var ListingGroupTypeEnum_ListingGroupType_value = map[string]int32{ + "UNSPECIFIED": 0, + "UNKNOWN": 1, + "SUBDIVISION": 2, + "UNIT": 3, +} + +func (x ListingGroupTypeEnum_ListingGroupType) String() string { + return proto.EnumName(ListingGroupTypeEnum_ListingGroupType_name, int32(x)) +} +func (ListingGroupTypeEnum_ListingGroupType) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_listing_group_type_702778b765e2222d, []int{0, 0} +} + +// Container for enum describing the type of the listing group. +type ListingGroupTypeEnum struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ListingGroupTypeEnum) Reset() { *m = ListingGroupTypeEnum{} } +func (m *ListingGroupTypeEnum) String() string { return proto.CompactTextString(m) } +func (*ListingGroupTypeEnum) ProtoMessage() {} +func (*ListingGroupTypeEnum) Descriptor() ([]byte, []int) { + return fileDescriptor_listing_group_type_702778b765e2222d, []int{0} +} +func (m *ListingGroupTypeEnum) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ListingGroupTypeEnum.Unmarshal(m, b) +} +func (m *ListingGroupTypeEnum) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ListingGroupTypeEnum.Marshal(b, m, deterministic) +} +func (dst *ListingGroupTypeEnum) XXX_Merge(src proto.Message) { + xxx_messageInfo_ListingGroupTypeEnum.Merge(dst, src) +} +func (m *ListingGroupTypeEnum) XXX_Size() int { + return xxx_messageInfo_ListingGroupTypeEnum.Size(m) +} +func (m *ListingGroupTypeEnum) XXX_DiscardUnknown() { + xxx_messageInfo_ListingGroupTypeEnum.DiscardUnknown(m) +} + +var xxx_messageInfo_ListingGroupTypeEnum proto.InternalMessageInfo + +func init() { + proto.RegisterType((*ListingGroupTypeEnum)(nil), "google.ads.googleads.v1.enums.ListingGroupTypeEnum") + proto.RegisterEnum("google.ads.googleads.v1.enums.ListingGroupTypeEnum_ListingGroupType", ListingGroupTypeEnum_ListingGroupType_name, ListingGroupTypeEnum_ListingGroupType_value) +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/enums/listing_group_type.proto", fileDescriptor_listing_group_type_702778b765e2222d) +} + +var fileDescriptor_listing_group_type_702778b765e2222d = []byte{ + // 306 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x7c, 0x50, 0xcf, 0x4a, 0xc3, 0x30, + 0x18, 0x77, 0x9d, 0xa8, 0x64, 0x07, 0x4b, 0xd1, 0x8b, 0xb8, 0xc3, 0xf6, 0x00, 0x09, 0x45, 0xf0, + 0x10, 0x4f, 0xad, 0x9b, 0x23, 0x4c, 0xb2, 0xc1, 0xd6, 0x0a, 0x52, 0x18, 0x75, 0x2d, 0xa1, 0xb0, + 0x26, 0xa1, 0x69, 0x07, 0x7b, 0x1d, 0x8f, 0x3e, 0x8a, 0x8f, 0xb2, 0xa7, 0x90, 0x24, 0xb6, 0x87, + 0x81, 0x5e, 0xc2, 0x8f, 0xef, 0xf7, 0x27, 0xbf, 0xef, 0x03, 0x8f, 0x4c, 0x08, 0xb6, 0xcb, 0x51, + 0x9a, 0x29, 0x64, 0xa1, 0x46, 0x7b, 0x1f, 0xe5, 0xbc, 0x29, 0x15, 0xda, 0x15, 0xaa, 0x2e, 0x38, + 0xdb, 0xb0, 0x4a, 0x34, 0x72, 0x53, 0x1f, 0x64, 0x0e, 0x65, 0x25, 0x6a, 0xe1, 0x0d, 0xad, 0x18, + 0xa6, 0x99, 0x82, 0x9d, 0x0f, 0xee, 0x7d, 0x68, 0x7c, 0x77, 0xf7, 0x6d, 0xac, 0x2c, 0x50, 0xca, + 0xb9, 0xa8, 0xd3, 0xba, 0x10, 0x5c, 0x59, 0xf3, 0x78, 0x0b, 0x6e, 0x5e, 0x6d, 0xf0, 0x4c, 0xe7, + 0xae, 0x0f, 0x32, 0x9f, 0xf2, 0xa6, 0x1c, 0xcf, 0x81, 0x7b, 0x3a, 0xf7, 0xae, 0xc1, 0x20, 0xa2, + 0xab, 0xe5, 0xf4, 0x99, 0xbc, 0x90, 0xe9, 0xc4, 0x3d, 0xf3, 0x06, 0xe0, 0x32, 0xa2, 0x73, 0xba, + 0x78, 0xa3, 0x6e, 0x4f, 0xb3, 0xab, 0x28, 0x9c, 0x90, 0x98, 0xac, 0xc8, 0x82, 0xba, 0x8e, 0x77, + 0x05, 0xce, 0x23, 0x4a, 0xd6, 0x6e, 0x3f, 0x3c, 0xf6, 0xc0, 0x68, 0x2b, 0x4a, 0xf8, 0x6f, 0xd1, + 0xf0, 0xf6, 0xf4, 0xc3, 0xa5, 0x6e, 0xb8, 0xec, 0xbd, 0x87, 0xbf, 0x3e, 0x26, 0x76, 0x29, 0x67, + 0x50, 0x54, 0x0c, 0xb1, 0x9c, 0x9b, 0xfe, 0xed, 0xa1, 0x64, 0xa1, 0xfe, 0xb8, 0xdb, 0x93, 0x79, + 0x3f, 0x9d, 0xfe, 0x2c, 0x08, 0xbe, 0x9c, 0xe1, 0xcc, 0x46, 0x05, 0x99, 0x82, 0x16, 0x6a, 0x14, + 0xfb, 0x50, 0x2f, 0xad, 0xbe, 0x5b, 0x3e, 0x09, 0x32, 0x95, 0x74, 0x7c, 0x12, 0xfb, 0x89, 0xe1, + 0x8f, 0xce, 0xc8, 0x0e, 0x31, 0x0e, 0x32, 0x85, 0x71, 0xa7, 0xc0, 0x38, 0xf6, 0x31, 0x36, 0x9a, + 0x8f, 0x0b, 0x53, 0xec, 0xe1, 0x27, 0x00, 0x00, 0xff, 0xff, 0x5c, 0xbf, 0x3f, 0x92, 0xcf, 0x01, + 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/local_placeholder_field.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/local_placeholder_field.pb.go new file mode 100644 index 0000000..4001018 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/local_placeholder_field.pb.go @@ -0,0 +1,216 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/enums/local_placeholder_field.proto + +package enums // import "google.golang.org/genproto/googleapis/ads/googleads/v1/enums" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Possible values for Local placeholder fields. +type LocalPlaceholderFieldEnum_LocalPlaceholderField int32 + +const ( + // Not specified. + LocalPlaceholderFieldEnum_UNSPECIFIED LocalPlaceholderFieldEnum_LocalPlaceholderField = 0 + // Used for return value only. Represents value unknown in this version. + LocalPlaceholderFieldEnum_UNKNOWN LocalPlaceholderFieldEnum_LocalPlaceholderField = 1 + // Data Type: STRING. Required. Unique ID. + LocalPlaceholderFieldEnum_DEAL_ID LocalPlaceholderFieldEnum_LocalPlaceholderField = 2 + // Data Type: STRING. Required. Main headline with local deal title to be + // shown in dynamic ad. + LocalPlaceholderFieldEnum_DEAL_NAME LocalPlaceholderFieldEnum_LocalPlaceholderField = 3 + // Data Type: STRING. Local deal subtitle to be shown in dynamic ad. + LocalPlaceholderFieldEnum_SUBTITLE LocalPlaceholderFieldEnum_LocalPlaceholderField = 4 + // Data Type: STRING. Description of local deal to be shown in dynamic ad. + LocalPlaceholderFieldEnum_DESCRIPTION LocalPlaceholderFieldEnum_LocalPlaceholderField = 5 + // Data Type: STRING. Price to be shown in the ad. Highly recommended for + // dynamic ads. Example: "100.00 USD" + LocalPlaceholderFieldEnum_PRICE LocalPlaceholderFieldEnum_LocalPlaceholderField = 6 + // Data Type: STRING. Formatted price to be shown in the ad. + // Example: "Starting at $100.00 USD", "$80 - $100" + LocalPlaceholderFieldEnum_FORMATTED_PRICE LocalPlaceholderFieldEnum_LocalPlaceholderField = 7 + // Data Type: STRING. Sale price to be shown in the ad. + // Example: "80.00 USD" + LocalPlaceholderFieldEnum_SALE_PRICE LocalPlaceholderFieldEnum_LocalPlaceholderField = 8 + // Data Type: STRING. Formatted sale price to be shown in the ad. + // Example: "On sale for $80.00", "$60 - $80" + LocalPlaceholderFieldEnum_FORMATTED_SALE_PRICE LocalPlaceholderFieldEnum_LocalPlaceholderField = 9 + // Data Type: URL. Image to be displayed in the ad. + LocalPlaceholderFieldEnum_IMAGE_URL LocalPlaceholderFieldEnum_LocalPlaceholderField = 10 + // Data Type: STRING. Complete property address, including postal code. + LocalPlaceholderFieldEnum_ADDRESS LocalPlaceholderFieldEnum_LocalPlaceholderField = 11 + // Data Type: STRING. Category of local deal used to group like items + // together for recommendation engine. + LocalPlaceholderFieldEnum_CATEGORY LocalPlaceholderFieldEnum_LocalPlaceholderField = 12 + // Data Type: STRING_LIST. Keywords used for product retrieval. + LocalPlaceholderFieldEnum_CONTEXTUAL_KEYWORDS LocalPlaceholderFieldEnum_LocalPlaceholderField = 13 + // Data Type: URL_LIST. Required. Final URLs to be used in ad when using + // Upgraded URLs; the more specific the better (e.g. the individual URL of a + // specific local deal and its location). + LocalPlaceholderFieldEnum_FINAL_URLS LocalPlaceholderFieldEnum_LocalPlaceholderField = 14 + // Data Type: URL_LIST. Final mobile URLs for the ad when using Upgraded + // URLs. + LocalPlaceholderFieldEnum_FINAL_MOBILE_URLS LocalPlaceholderFieldEnum_LocalPlaceholderField = 15 + // Data Type: URL. Tracking template for the ad when using Upgraded URLs. + LocalPlaceholderFieldEnum_TRACKING_URL LocalPlaceholderFieldEnum_LocalPlaceholderField = 16 + // Data Type: STRING. Android app link. Must be formatted as: + // android-app://{package_id}/{scheme}/{host_path}. + // The components are defined as follows: + // package_id: app ID as specified in Google Play. + // scheme: the scheme to pass to the application. Can be HTTP, or a custom + // scheme. + // host_path: identifies the specific content within your application. + LocalPlaceholderFieldEnum_ANDROID_APP_LINK LocalPlaceholderFieldEnum_LocalPlaceholderField = 17 + // Data Type: STRING_LIST. List of recommended local deal IDs to show + // together with this item. + LocalPlaceholderFieldEnum_SIMILAR_DEAL_IDS LocalPlaceholderFieldEnum_LocalPlaceholderField = 18 + // Data Type: STRING. iOS app link. + LocalPlaceholderFieldEnum_IOS_APP_LINK LocalPlaceholderFieldEnum_LocalPlaceholderField = 19 + // Data Type: INT64. iOS app store ID. + LocalPlaceholderFieldEnum_IOS_APP_STORE_ID LocalPlaceholderFieldEnum_LocalPlaceholderField = 20 +) + +var LocalPlaceholderFieldEnum_LocalPlaceholderField_name = map[int32]string{ + 0: "UNSPECIFIED", + 1: "UNKNOWN", + 2: "DEAL_ID", + 3: "DEAL_NAME", + 4: "SUBTITLE", + 5: "DESCRIPTION", + 6: "PRICE", + 7: "FORMATTED_PRICE", + 8: "SALE_PRICE", + 9: "FORMATTED_SALE_PRICE", + 10: "IMAGE_URL", + 11: "ADDRESS", + 12: "CATEGORY", + 13: "CONTEXTUAL_KEYWORDS", + 14: "FINAL_URLS", + 15: "FINAL_MOBILE_URLS", + 16: "TRACKING_URL", + 17: "ANDROID_APP_LINK", + 18: "SIMILAR_DEAL_IDS", + 19: "IOS_APP_LINK", + 20: "IOS_APP_STORE_ID", +} +var LocalPlaceholderFieldEnum_LocalPlaceholderField_value = map[string]int32{ + "UNSPECIFIED": 0, + "UNKNOWN": 1, + "DEAL_ID": 2, + "DEAL_NAME": 3, + "SUBTITLE": 4, + "DESCRIPTION": 5, + "PRICE": 6, + "FORMATTED_PRICE": 7, + "SALE_PRICE": 8, + "FORMATTED_SALE_PRICE": 9, + "IMAGE_URL": 10, + "ADDRESS": 11, + "CATEGORY": 12, + "CONTEXTUAL_KEYWORDS": 13, + "FINAL_URLS": 14, + "FINAL_MOBILE_URLS": 15, + "TRACKING_URL": 16, + "ANDROID_APP_LINK": 17, + "SIMILAR_DEAL_IDS": 18, + "IOS_APP_LINK": 19, + "IOS_APP_STORE_ID": 20, +} + +func (x LocalPlaceholderFieldEnum_LocalPlaceholderField) String() string { + return proto.EnumName(LocalPlaceholderFieldEnum_LocalPlaceholderField_name, int32(x)) +} +func (LocalPlaceholderFieldEnum_LocalPlaceholderField) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_local_placeholder_field_ded9d998e2bec814, []int{0, 0} +} + +// Values for Local placeholder fields. +// For more information about dynamic remarketing feeds, see +// https://support.google.com/google-ads/answer/6053288. +type LocalPlaceholderFieldEnum struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *LocalPlaceholderFieldEnum) Reset() { *m = LocalPlaceholderFieldEnum{} } +func (m *LocalPlaceholderFieldEnum) String() string { return proto.CompactTextString(m) } +func (*LocalPlaceholderFieldEnum) ProtoMessage() {} +func (*LocalPlaceholderFieldEnum) Descriptor() ([]byte, []int) { + return fileDescriptor_local_placeholder_field_ded9d998e2bec814, []int{0} +} +func (m *LocalPlaceholderFieldEnum) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_LocalPlaceholderFieldEnum.Unmarshal(m, b) +} +func (m *LocalPlaceholderFieldEnum) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_LocalPlaceholderFieldEnum.Marshal(b, m, deterministic) +} +func (dst *LocalPlaceholderFieldEnum) XXX_Merge(src proto.Message) { + xxx_messageInfo_LocalPlaceholderFieldEnum.Merge(dst, src) +} +func (m *LocalPlaceholderFieldEnum) XXX_Size() int { + return xxx_messageInfo_LocalPlaceholderFieldEnum.Size(m) +} +func (m *LocalPlaceholderFieldEnum) XXX_DiscardUnknown() { + xxx_messageInfo_LocalPlaceholderFieldEnum.DiscardUnknown(m) +} + +var xxx_messageInfo_LocalPlaceholderFieldEnum proto.InternalMessageInfo + +func init() { + proto.RegisterType((*LocalPlaceholderFieldEnum)(nil), "google.ads.googleads.v1.enums.LocalPlaceholderFieldEnum") + proto.RegisterEnum("google.ads.googleads.v1.enums.LocalPlaceholderFieldEnum_LocalPlaceholderField", LocalPlaceholderFieldEnum_LocalPlaceholderField_name, LocalPlaceholderFieldEnum_LocalPlaceholderField_value) +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/enums/local_placeholder_field.proto", fileDescriptor_local_placeholder_field_ded9d998e2bec814) +} + +var fileDescriptor_local_placeholder_field_ded9d998e2bec814 = []byte{ + // 511 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x7c, 0x52, 0xdd, 0x6e, 0xda, 0x30, + 0x14, 0x1e, 0xb0, 0xfe, 0x60, 0x68, 0x71, 0x0d, 0xd5, 0xb6, 0x6a, 0xbd, 0x68, 0x1f, 0x20, 0x11, + 0xda, 0x5d, 0x7a, 0x65, 0x12, 0x83, 0x2c, 0x82, 0x13, 0xd9, 0x0e, 0x5d, 0x27, 0xa4, 0x28, 0x23, + 0x59, 0x86, 0x14, 0x62, 0x44, 0x68, 0x1f, 0x62, 0x8f, 0xb1, 0xab, 0x69, 0x8f, 0xb2, 0x47, 0xd9, + 0xcd, 0x5e, 0x61, 0x72, 0xc2, 0xcf, 0x2e, 0xba, 0xde, 0x58, 0xe7, 0x9c, 0xef, 0xe7, 0x1c, 0xf9, + 0x1c, 0x70, 0x97, 0x2a, 0x95, 0x66, 0x89, 0x19, 0xc5, 0x85, 0x59, 0x85, 0x3a, 0x7a, 0xea, 0x9b, + 0x49, 0xfe, 0xb8, 0x2c, 0xcc, 0x4c, 0xcd, 0xa3, 0x2c, 0x5c, 0x65, 0xd1, 0x3c, 0xf9, 0xaa, 0xb2, + 0x38, 0x59, 0x87, 0x5f, 0x16, 0x49, 0x16, 0x1b, 0xab, 0xb5, 0xda, 0x28, 0x74, 0x5d, 0x29, 0x8c, + 0x28, 0x2e, 0x8c, 0xbd, 0xd8, 0x78, 0xea, 0x1b, 0xa5, 0xf8, 0xea, 0xfd, 0xce, 0x7b, 0xb5, 0x30, + 0xa3, 0x3c, 0x57, 0x9b, 0x68, 0xb3, 0x50, 0x79, 0x51, 0x89, 0x6f, 0x7f, 0x34, 0xc0, 0x3b, 0x57, + 0xdb, 0xfb, 0x07, 0xf7, 0xa1, 0x36, 0x27, 0xf9, 0xe3, 0xf2, 0xf6, 0x5b, 0x03, 0x5c, 0x3e, 0x8b, + 0xa2, 0x0e, 0x68, 0x05, 0x4c, 0xf8, 0xc4, 0xa6, 0x43, 0x4a, 0x1c, 0xf8, 0x0a, 0xb5, 0xc0, 0x49, + 0xc0, 0xc6, 0xcc, 0xbb, 0x67, 0xb0, 0xa6, 0x13, 0x87, 0x60, 0x37, 0xa4, 0x0e, 0xac, 0xa3, 0x33, + 0xd0, 0x2c, 0x13, 0x86, 0x27, 0x04, 0x36, 0x50, 0x1b, 0x9c, 0x8a, 0x60, 0x20, 0xa9, 0x74, 0x09, + 0x7c, 0xad, 0x7d, 0x1c, 0x22, 0x6c, 0x4e, 0x7d, 0x49, 0x3d, 0x06, 0x8f, 0x50, 0x13, 0x1c, 0xf9, + 0x9c, 0xda, 0x04, 0x1e, 0xa3, 0x2e, 0xe8, 0x0c, 0x3d, 0x3e, 0xc1, 0x52, 0x12, 0x27, 0xac, 0x8a, + 0x27, 0xe8, 0x1c, 0x00, 0x81, 0x5d, 0xb2, 0xcd, 0x4f, 0xd1, 0x5b, 0xd0, 0x3b, 0x90, 0xfe, 0x41, + 0x9a, 0xba, 0x2f, 0x9d, 0xe0, 0x11, 0x09, 0x03, 0xee, 0x42, 0xa0, 0x67, 0xc2, 0x8e, 0xc3, 0x89, + 0x10, 0xb0, 0xa5, 0x87, 0xb0, 0xb1, 0x24, 0x23, 0x8f, 0x3f, 0xc0, 0x36, 0x7a, 0x03, 0xba, 0xb6, + 0xc7, 0x24, 0xf9, 0x28, 0x03, 0xec, 0x86, 0x63, 0xf2, 0x70, 0xef, 0x71, 0x47, 0xc0, 0x33, 0xdd, + 0x6c, 0x48, 0x19, 0x76, 0xb5, 0x85, 0x80, 0xe7, 0xe8, 0x12, 0x5c, 0x54, 0xf9, 0xc4, 0x1b, 0x50, + 0x97, 0x54, 0xe5, 0x0e, 0x82, 0xa0, 0x2d, 0x39, 0xb6, 0xc7, 0x94, 0x8d, 0xca, 0x66, 0x10, 0xf5, + 0x00, 0xc4, 0xcc, 0xe1, 0x1e, 0x75, 0x42, 0xec, 0xfb, 0xa1, 0x4b, 0xd9, 0x18, 0x5e, 0xe8, 0xaa, + 0xa0, 0x13, 0xea, 0x62, 0x1e, 0x6e, 0xbf, 0x47, 0x40, 0xa4, 0xd5, 0xd4, 0x13, 0x07, 0x5e, 0x57, + 0xf3, 0x76, 0x15, 0x21, 0x3d, 0x4e, 0xf4, 0x3f, 0xf6, 0x06, 0x7f, 0x6a, 0xe0, 0x66, 0xae, 0x96, + 0xc6, 0x8b, 0xeb, 0x1e, 0x5c, 0x3d, 0xbb, 0x2f, 0x5f, 0x2f, 0xdb, 0xaf, 0x7d, 0x1a, 0x6c, 0xc5, + 0xa9, 0xca, 0xa2, 0x3c, 0x35, 0xd4, 0x3a, 0x35, 0xd3, 0x24, 0x2f, 0x4f, 0x61, 0x77, 0x78, 0xab, + 0x45, 0xf1, 0x9f, 0x3b, 0xbc, 0x2b, 0xdf, 0xef, 0xf5, 0xc6, 0x08, 0xe3, 0x9f, 0xf5, 0xeb, 0x51, + 0x65, 0x85, 0xe3, 0xc2, 0xa8, 0x42, 0x1d, 0x4d, 0xfb, 0x86, 0xbe, 0x9c, 0xe2, 0xd7, 0x0e, 0x9f, + 0xe1, 0xb8, 0x98, 0xed, 0xf1, 0xd9, 0xb4, 0x3f, 0x2b, 0xf1, 0xdf, 0xf5, 0x9b, 0xaa, 0x68, 0x59, + 0x38, 0x2e, 0x2c, 0x6b, 0xcf, 0xb0, 0xac, 0x69, 0xdf, 0xb2, 0x4a, 0xce, 0xe7, 0xe3, 0x72, 0xb0, + 0x0f, 0x7f, 0x03, 0x00, 0x00, 0xff, 0xff, 0x5e, 0x7d, 0xf3, 0x33, 0x1f, 0x03, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/location_extension_targeting_criterion_field.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/location_extension_targeting_criterion_field.pb.go new file mode 100644 index 0000000..4ea0c05 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/location_extension_targeting_criterion_field.pb.go @@ -0,0 +1,142 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/enums/location_extension_targeting_criterion_field.proto + +package enums // import "google.golang.org/genproto/googleapis/ads/googleads/v1/enums" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Possible values for Location Extension Targeting criterion fields. +type LocationExtensionTargetingCriterionFieldEnum_LocationExtensionTargetingCriterionField int32 + +const ( + // Not specified. + LocationExtensionTargetingCriterionFieldEnum_UNSPECIFIED LocationExtensionTargetingCriterionFieldEnum_LocationExtensionTargetingCriterionField = 0 + // Used for return value only. Represents value unknown in this version. + LocationExtensionTargetingCriterionFieldEnum_UNKNOWN LocationExtensionTargetingCriterionFieldEnum_LocationExtensionTargetingCriterionField = 1 + // Data Type: STRING. Line 1 of the business address. + LocationExtensionTargetingCriterionFieldEnum_ADDRESS_LINE_1 LocationExtensionTargetingCriterionFieldEnum_LocationExtensionTargetingCriterionField = 2 + // Data Type: STRING. Line 2 of the business address. + LocationExtensionTargetingCriterionFieldEnum_ADDRESS_LINE_2 LocationExtensionTargetingCriterionFieldEnum_LocationExtensionTargetingCriterionField = 3 + // Data Type: STRING. City of the business address. + LocationExtensionTargetingCriterionFieldEnum_CITY LocationExtensionTargetingCriterionFieldEnum_LocationExtensionTargetingCriterionField = 4 + // Data Type: STRING. Province of the business address. + LocationExtensionTargetingCriterionFieldEnum_PROVINCE LocationExtensionTargetingCriterionFieldEnum_LocationExtensionTargetingCriterionField = 5 + // Data Type: STRING. Postal code of the business address. + LocationExtensionTargetingCriterionFieldEnum_POSTAL_CODE LocationExtensionTargetingCriterionFieldEnum_LocationExtensionTargetingCriterionField = 6 + // Data Type: STRING. Country code of the business address. + LocationExtensionTargetingCriterionFieldEnum_COUNTRY_CODE LocationExtensionTargetingCriterionFieldEnum_LocationExtensionTargetingCriterionField = 7 +) + +var LocationExtensionTargetingCriterionFieldEnum_LocationExtensionTargetingCriterionField_name = map[int32]string{ + 0: "UNSPECIFIED", + 1: "UNKNOWN", + 2: "ADDRESS_LINE_1", + 3: "ADDRESS_LINE_2", + 4: "CITY", + 5: "PROVINCE", + 6: "POSTAL_CODE", + 7: "COUNTRY_CODE", +} +var LocationExtensionTargetingCriterionFieldEnum_LocationExtensionTargetingCriterionField_value = map[string]int32{ + "UNSPECIFIED": 0, + "UNKNOWN": 1, + "ADDRESS_LINE_1": 2, + "ADDRESS_LINE_2": 3, + "CITY": 4, + "PROVINCE": 5, + "POSTAL_CODE": 6, + "COUNTRY_CODE": 7, +} + +func (x LocationExtensionTargetingCriterionFieldEnum_LocationExtensionTargetingCriterionField) String() string { + return proto.EnumName(LocationExtensionTargetingCriterionFieldEnum_LocationExtensionTargetingCriterionField_name, int32(x)) +} +func (LocationExtensionTargetingCriterionFieldEnum_LocationExtensionTargetingCriterionField) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_location_extension_targeting_criterion_field_ae2bb4571840bb1c, []int{0, 0} +} + +// Values for Location Extension Targeting criterion fields. +type LocationExtensionTargetingCriterionFieldEnum struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *LocationExtensionTargetingCriterionFieldEnum) Reset() { + *m = LocationExtensionTargetingCriterionFieldEnum{} +} +func (m *LocationExtensionTargetingCriterionFieldEnum) String() string { + return proto.CompactTextString(m) +} +func (*LocationExtensionTargetingCriterionFieldEnum) ProtoMessage() {} +func (*LocationExtensionTargetingCriterionFieldEnum) Descriptor() ([]byte, []int) { + return fileDescriptor_location_extension_targeting_criterion_field_ae2bb4571840bb1c, []int{0} +} +func (m *LocationExtensionTargetingCriterionFieldEnum) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_LocationExtensionTargetingCriterionFieldEnum.Unmarshal(m, b) +} +func (m *LocationExtensionTargetingCriterionFieldEnum) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_LocationExtensionTargetingCriterionFieldEnum.Marshal(b, m, deterministic) +} +func (dst *LocationExtensionTargetingCriterionFieldEnum) XXX_Merge(src proto.Message) { + xxx_messageInfo_LocationExtensionTargetingCriterionFieldEnum.Merge(dst, src) +} +func (m *LocationExtensionTargetingCriterionFieldEnum) XXX_Size() int { + return xxx_messageInfo_LocationExtensionTargetingCriterionFieldEnum.Size(m) +} +func (m *LocationExtensionTargetingCriterionFieldEnum) XXX_DiscardUnknown() { + xxx_messageInfo_LocationExtensionTargetingCriterionFieldEnum.DiscardUnknown(m) +} + +var xxx_messageInfo_LocationExtensionTargetingCriterionFieldEnum proto.InternalMessageInfo + +func init() { + proto.RegisterType((*LocationExtensionTargetingCriterionFieldEnum)(nil), "google.ads.googleads.v1.enums.LocationExtensionTargetingCriterionFieldEnum") + proto.RegisterEnum("google.ads.googleads.v1.enums.LocationExtensionTargetingCriterionFieldEnum_LocationExtensionTargetingCriterionField", LocationExtensionTargetingCriterionFieldEnum_LocationExtensionTargetingCriterionField_name, LocationExtensionTargetingCriterionFieldEnum_LocationExtensionTargetingCriterionField_value) +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/enums/location_extension_targeting_criterion_field.proto", fileDescriptor_location_extension_targeting_criterion_field_ae2bb4571840bb1c) +} + +var fileDescriptor_location_extension_targeting_criterion_field_ae2bb4571840bb1c = []byte{ + // 383 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x8c, 0x91, 0xc1, 0xae, 0x93, 0x40, + 0x18, 0x85, 0x85, 0x7b, 0xbd, 0xf7, 0x66, 0xda, 0xe8, 0x64, 0x96, 0xc6, 0x2e, 0xda, 0x95, 0x0b, + 0x1d, 0x82, 0xee, 0xc6, 0x15, 0x85, 0x69, 0x43, 0x6c, 0x80, 0x14, 0x8a, 0xa9, 0x21, 0x21, 0x58, + 0xc6, 0x09, 0x09, 0x9d, 0x69, 0x18, 0xda, 0xb8, 0xf6, 0x51, 0x5c, 0xea, 0x9b, 0xf8, 0x0c, 0x3e, + 0x81, 0x4f, 0x61, 0x06, 0x4a, 0x37, 0xc6, 0x9b, 0x6e, 0xc8, 0xc9, 0xf9, 0x67, 0xbe, 0xc3, 0x9c, + 0x1f, 0x44, 0x5c, 0x4a, 0x5e, 0x33, 0xab, 0x28, 0x95, 0xd5, 0x4b, 0xad, 0x4e, 0xb6, 0xc5, 0xc4, + 0x71, 0xaf, 0xac, 0x5a, 0xee, 0x8a, 0xb6, 0x92, 0x22, 0x67, 0x5f, 0x5b, 0x26, 0x94, 0x56, 0x6d, + 0xd1, 0x70, 0xd6, 0x56, 0x82, 0xe7, 0xbb, 0xa6, 0x6a, 0x59, 0xa3, 0xbd, 0x2f, 0x15, 0xab, 0x4b, + 0x7c, 0x68, 0x64, 0x2b, 0xd1, 0xa4, 0xc7, 0xe0, 0xa2, 0x54, 0xf8, 0x42, 0xc4, 0x27, 0x1b, 0x77, + 0xc4, 0x17, 0x2f, 0x87, 0xc0, 0x43, 0x65, 0x15, 0x42, 0xc8, 0xb6, 0xe3, 0xab, 0xfe, 0xf2, 0xec, + 0xb7, 0x01, 0x5e, 0xaf, 0xce, 0x99, 0x74, 0x88, 0x4c, 0x86, 0x44, 0x77, 0x08, 0x5c, 0xe8, 0x3c, + 0x2a, 0x8e, 0xfb, 0xd9, 0x4f, 0x03, 0xbc, 0xba, 0xf6, 0x02, 0x7a, 0x0e, 0x46, 0x9b, 0x20, 0x8e, + 0xa8, 0xeb, 0x2f, 0x7c, 0xea, 0xc1, 0x27, 0x68, 0x04, 0xee, 0x37, 0xc1, 0x87, 0x20, 0xfc, 0x18, + 0x40, 0x03, 0x21, 0xf0, 0xcc, 0xf1, 0xbc, 0x35, 0x8d, 0xe3, 0x7c, 0xe5, 0x07, 0x34, 0xb7, 0xa1, + 0xf9, 0x8f, 0xf7, 0x16, 0xde, 0xa0, 0x07, 0x70, 0xeb, 0xfa, 0xc9, 0x16, 0xde, 0xa2, 0x31, 0x78, + 0x88, 0xd6, 0x61, 0xea, 0x07, 0x2e, 0x85, 0x4f, 0x35, 0x3d, 0x0a, 0xe3, 0xc4, 0x59, 0xe5, 0x6e, + 0xe8, 0x51, 0x78, 0x87, 0x20, 0x18, 0xbb, 0xe1, 0x26, 0x48, 0xd6, 0xdb, 0xde, 0xb9, 0x9f, 0x7f, + 0x33, 0xc1, 0x74, 0x27, 0xf7, 0xf8, 0xd1, 0x8a, 0xe6, 0x6f, 0xae, 0x7d, 0x50, 0xa4, 0x3b, 0x8b, + 0x8c, 0x4f, 0xf3, 0x33, 0x8f, 0xcb, 0xba, 0x10, 0x1c, 0xcb, 0x86, 0x5b, 0x9c, 0x89, 0xae, 0xd1, + 0x61, 0xa9, 0x87, 0x4a, 0xfd, 0x67, 0xc7, 0xef, 0xbb, 0xef, 0x77, 0xf3, 0x66, 0xe9, 0x38, 0x3f, + 0xcc, 0xc9, 0xb2, 0x47, 0x39, 0xa5, 0xc2, 0xbd, 0xd4, 0x2a, 0xb5, 0xb1, 0x6e, 0x5b, 0xfd, 0x1a, + 0xe6, 0x99, 0x53, 0xaa, 0xec, 0x32, 0xcf, 0x52, 0x3b, 0xeb, 0xe6, 0x7f, 0xcc, 0x69, 0x6f, 0x12, + 0xe2, 0x94, 0x8a, 0x90, 0xcb, 0x09, 0x42, 0x52, 0x9b, 0x90, 0xee, 0xcc, 0xe7, 0xbb, 0xee, 0xc7, + 0xde, 0xfd, 0x0d, 0x00, 0x00, 0xff, 0xff, 0x13, 0x06, 0x99, 0xed, 0x7b, 0x02, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/location_group_radius_units.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/location_group_radius_units.pb.go new file mode 100644 index 0000000..f27938e --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/location_group_radius_units.pb.go @@ -0,0 +1,118 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/enums/location_group_radius_units.proto + +package enums // import "google.golang.org/genproto/googleapis/ads/googleads/v1/enums" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// The unit of radius distance in location group (e.g. MILES) +type LocationGroupRadiusUnitsEnum_LocationGroupRadiusUnits int32 + +const ( + // Not specified. + LocationGroupRadiusUnitsEnum_UNSPECIFIED LocationGroupRadiusUnitsEnum_LocationGroupRadiusUnits = 0 + // Used for return value only. Represents value unknown in this version. + LocationGroupRadiusUnitsEnum_UNKNOWN LocationGroupRadiusUnitsEnum_LocationGroupRadiusUnits = 1 + // Meters + LocationGroupRadiusUnitsEnum_METERS LocationGroupRadiusUnitsEnum_LocationGroupRadiusUnits = 2 + // Miles + LocationGroupRadiusUnitsEnum_MILES LocationGroupRadiusUnitsEnum_LocationGroupRadiusUnits = 3 +) + +var LocationGroupRadiusUnitsEnum_LocationGroupRadiusUnits_name = map[int32]string{ + 0: "UNSPECIFIED", + 1: "UNKNOWN", + 2: "METERS", + 3: "MILES", +} +var LocationGroupRadiusUnitsEnum_LocationGroupRadiusUnits_value = map[string]int32{ + "UNSPECIFIED": 0, + "UNKNOWN": 1, + "METERS": 2, + "MILES": 3, +} + +func (x LocationGroupRadiusUnitsEnum_LocationGroupRadiusUnits) String() string { + return proto.EnumName(LocationGroupRadiusUnitsEnum_LocationGroupRadiusUnits_name, int32(x)) +} +func (LocationGroupRadiusUnitsEnum_LocationGroupRadiusUnits) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_location_group_radius_units_431f4b934ef27033, []int{0, 0} +} + +// Container for enum describing unit of radius in location group. +type LocationGroupRadiusUnitsEnum struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *LocationGroupRadiusUnitsEnum) Reset() { *m = LocationGroupRadiusUnitsEnum{} } +func (m *LocationGroupRadiusUnitsEnum) String() string { return proto.CompactTextString(m) } +func (*LocationGroupRadiusUnitsEnum) ProtoMessage() {} +func (*LocationGroupRadiusUnitsEnum) Descriptor() ([]byte, []int) { + return fileDescriptor_location_group_radius_units_431f4b934ef27033, []int{0} +} +func (m *LocationGroupRadiusUnitsEnum) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_LocationGroupRadiusUnitsEnum.Unmarshal(m, b) +} +func (m *LocationGroupRadiusUnitsEnum) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_LocationGroupRadiusUnitsEnum.Marshal(b, m, deterministic) +} +func (dst *LocationGroupRadiusUnitsEnum) XXX_Merge(src proto.Message) { + xxx_messageInfo_LocationGroupRadiusUnitsEnum.Merge(dst, src) +} +func (m *LocationGroupRadiusUnitsEnum) XXX_Size() int { + return xxx_messageInfo_LocationGroupRadiusUnitsEnum.Size(m) +} +func (m *LocationGroupRadiusUnitsEnum) XXX_DiscardUnknown() { + xxx_messageInfo_LocationGroupRadiusUnitsEnum.DiscardUnknown(m) +} + +var xxx_messageInfo_LocationGroupRadiusUnitsEnum proto.InternalMessageInfo + +func init() { + proto.RegisterType((*LocationGroupRadiusUnitsEnum)(nil), "google.ads.googleads.v1.enums.LocationGroupRadiusUnitsEnum") + proto.RegisterEnum("google.ads.googleads.v1.enums.LocationGroupRadiusUnitsEnum_LocationGroupRadiusUnits", LocationGroupRadiusUnitsEnum_LocationGroupRadiusUnits_name, LocationGroupRadiusUnitsEnum_LocationGroupRadiusUnits_value) +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/enums/location_group_radius_units.proto", fileDescriptor_location_group_radius_units_431f4b934ef27033) +} + +var fileDescriptor_location_group_radius_units_431f4b934ef27033 = []byte{ + // 314 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x7c, 0x90, 0xcf, 0x6a, 0xf2, 0x40, + 0x14, 0xc5, 0x3f, 0x23, 0x9f, 0xa5, 0xe3, 0xa2, 0x21, 0xab, 0x52, 0x74, 0xa1, 0x0f, 0x30, 0x43, + 0xe8, 0x6e, 0xba, 0x28, 0xb1, 0x9d, 0x8a, 0x54, 0xa3, 0x68, 0xb5, 0x50, 0x02, 0x32, 0x75, 0x64, + 0x08, 0x98, 0xb9, 0x21, 0x93, 0xf8, 0x40, 0x5d, 0xf6, 0x51, 0xfa, 0x28, 0x5d, 0xf6, 0x09, 0xca, + 0x4c, 0xfe, 0xec, 0xd2, 0x4d, 0x38, 0xe4, 0xdc, 0xfb, 0xbb, 0x67, 0x0e, 0xba, 0x97, 0x00, 0xf2, + 0x74, 0x24, 0x5c, 0x68, 0x52, 0x4a, 0xa3, 0xce, 0x3e, 0x39, 0xaa, 0x22, 0xd1, 0xe4, 0x04, 0x07, + 0x9e, 0xc7, 0xa0, 0xf6, 0x32, 0x83, 0x22, 0xdd, 0x67, 0x5c, 0xc4, 0x85, 0xde, 0x17, 0x2a, 0xce, + 0x35, 0x4e, 0x33, 0xc8, 0xc1, 0x1b, 0x96, 0x5b, 0x98, 0x0b, 0x8d, 0x1b, 0x00, 0x3e, 0xfb, 0xd8, + 0x02, 0x6e, 0x06, 0x35, 0x3f, 0x8d, 0x09, 0x57, 0x0a, 0x72, 0x8b, 0xab, 0x96, 0xc7, 0x80, 0x06, + 0xf3, 0xea, 0xc2, 0xd4, 0x1c, 0x58, 0x5b, 0xfe, 0xd6, 0xe0, 0x99, 0x2a, 0x92, 0xf1, 0x12, 0x5d, + 0xb7, 0xf9, 0xde, 0x15, 0xea, 0x6f, 0xc3, 0xcd, 0x8a, 0x3d, 0xcc, 0x9e, 0x66, 0xec, 0xd1, 0xfd, + 0xe7, 0xf5, 0xd1, 0xc5, 0x36, 0x7c, 0x0e, 0x97, 0xaf, 0xa1, 0xdb, 0xf1, 0x10, 0xea, 0x2d, 0xd8, + 0x0b, 0x5b, 0x6f, 0x5c, 0xc7, 0xbb, 0x44, 0xff, 0x17, 0xb3, 0x39, 0xdb, 0xb8, 0xdd, 0xc9, 0x4f, + 0x07, 0x8d, 0x0e, 0x90, 0xe0, 0x3f, 0x43, 0x4f, 0x86, 0x6d, 0x47, 0x57, 0x26, 0xf5, 0xaa, 0xf3, + 0x36, 0xa9, 0xf6, 0x25, 0x9c, 0xb8, 0x92, 0x18, 0x32, 0x49, 0xe4, 0x51, 0xd9, 0x37, 0xd5, 0x2d, + 0xa6, 0xb1, 0x6e, 0x29, 0xf5, 0xce, 0x7e, 0x3f, 0x9c, 0xee, 0x34, 0x08, 0x3e, 0x9d, 0xe1, 0xb4, + 0x44, 0x05, 0x42, 0xe3, 0x52, 0x1a, 0xb5, 0xf3, 0xb1, 0x29, 0x40, 0x7f, 0xd5, 0x7e, 0x14, 0x08, + 0x1d, 0x35, 0x7e, 0xb4, 0xf3, 0x23, 0xeb, 0x7f, 0x3b, 0xa3, 0xf2, 0x27, 0xa5, 0x81, 0xd0, 0x94, + 0x36, 0x13, 0x94, 0xee, 0x7c, 0x4a, 0xed, 0xcc, 0x7b, 0xcf, 0x06, 0xbb, 0xfd, 0x0d, 0x00, 0x00, + 0xff, 0xff, 0x92, 0x60, 0x22, 0x8a, 0xec, 0x01, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/location_placeholder_field.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/location_placeholder_field.pb.go new file mode 100644 index 0000000..bef7188 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/location_placeholder_field.pb.go @@ -0,0 +1,147 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/enums/location_placeholder_field.proto + +package enums // import "google.golang.org/genproto/googleapis/ads/googleads/v1/enums" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Possible values for Location placeholder fields. +type LocationPlaceholderFieldEnum_LocationPlaceholderField int32 + +const ( + // Not specified. + LocationPlaceholderFieldEnum_UNSPECIFIED LocationPlaceholderFieldEnum_LocationPlaceholderField = 0 + // Used for return value only. Represents value unknown in this version. + LocationPlaceholderFieldEnum_UNKNOWN LocationPlaceholderFieldEnum_LocationPlaceholderField = 1 + // Data Type: STRING. The name of the business. + LocationPlaceholderFieldEnum_BUSINESS_NAME LocationPlaceholderFieldEnum_LocationPlaceholderField = 2 + // Data Type: STRING. Line 1 of the business address. + LocationPlaceholderFieldEnum_ADDRESS_LINE_1 LocationPlaceholderFieldEnum_LocationPlaceholderField = 3 + // Data Type: STRING. Line 2 of the business address. + LocationPlaceholderFieldEnum_ADDRESS_LINE_2 LocationPlaceholderFieldEnum_LocationPlaceholderField = 4 + // Data Type: STRING. City of the business address. + LocationPlaceholderFieldEnum_CITY LocationPlaceholderFieldEnum_LocationPlaceholderField = 5 + // Data Type: STRING. Province of the business address. + LocationPlaceholderFieldEnum_PROVINCE LocationPlaceholderFieldEnum_LocationPlaceholderField = 6 + // Data Type: STRING. Postal code of the business address. + LocationPlaceholderFieldEnum_POSTAL_CODE LocationPlaceholderFieldEnum_LocationPlaceholderField = 7 + // Data Type: STRING. Country code of the business address. + LocationPlaceholderFieldEnum_COUNTRY_CODE LocationPlaceholderFieldEnum_LocationPlaceholderField = 8 + // Data Type: STRING. Phone number of the business. + LocationPlaceholderFieldEnum_PHONE_NUMBER LocationPlaceholderFieldEnum_LocationPlaceholderField = 9 +) + +var LocationPlaceholderFieldEnum_LocationPlaceholderField_name = map[int32]string{ + 0: "UNSPECIFIED", + 1: "UNKNOWN", + 2: "BUSINESS_NAME", + 3: "ADDRESS_LINE_1", + 4: "ADDRESS_LINE_2", + 5: "CITY", + 6: "PROVINCE", + 7: "POSTAL_CODE", + 8: "COUNTRY_CODE", + 9: "PHONE_NUMBER", +} +var LocationPlaceholderFieldEnum_LocationPlaceholderField_value = map[string]int32{ + "UNSPECIFIED": 0, + "UNKNOWN": 1, + "BUSINESS_NAME": 2, + "ADDRESS_LINE_1": 3, + "ADDRESS_LINE_2": 4, + "CITY": 5, + "PROVINCE": 6, + "POSTAL_CODE": 7, + "COUNTRY_CODE": 8, + "PHONE_NUMBER": 9, +} + +func (x LocationPlaceholderFieldEnum_LocationPlaceholderField) String() string { + return proto.EnumName(LocationPlaceholderFieldEnum_LocationPlaceholderField_name, int32(x)) +} +func (LocationPlaceholderFieldEnum_LocationPlaceholderField) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_location_placeholder_field_c804fc5510f17f55, []int{0, 0} +} + +// Values for Location placeholder fields. +type LocationPlaceholderFieldEnum struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *LocationPlaceholderFieldEnum) Reset() { *m = LocationPlaceholderFieldEnum{} } +func (m *LocationPlaceholderFieldEnum) String() string { return proto.CompactTextString(m) } +func (*LocationPlaceholderFieldEnum) ProtoMessage() {} +func (*LocationPlaceholderFieldEnum) Descriptor() ([]byte, []int) { + return fileDescriptor_location_placeholder_field_c804fc5510f17f55, []int{0} +} +func (m *LocationPlaceholderFieldEnum) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_LocationPlaceholderFieldEnum.Unmarshal(m, b) +} +func (m *LocationPlaceholderFieldEnum) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_LocationPlaceholderFieldEnum.Marshal(b, m, deterministic) +} +func (dst *LocationPlaceholderFieldEnum) XXX_Merge(src proto.Message) { + xxx_messageInfo_LocationPlaceholderFieldEnum.Merge(dst, src) +} +func (m *LocationPlaceholderFieldEnum) XXX_Size() int { + return xxx_messageInfo_LocationPlaceholderFieldEnum.Size(m) +} +func (m *LocationPlaceholderFieldEnum) XXX_DiscardUnknown() { + xxx_messageInfo_LocationPlaceholderFieldEnum.DiscardUnknown(m) +} + +var xxx_messageInfo_LocationPlaceholderFieldEnum proto.InternalMessageInfo + +func init() { + proto.RegisterType((*LocationPlaceholderFieldEnum)(nil), "google.ads.googleads.v1.enums.LocationPlaceholderFieldEnum") + proto.RegisterEnum("google.ads.googleads.v1.enums.LocationPlaceholderFieldEnum_LocationPlaceholderField", LocationPlaceholderFieldEnum_LocationPlaceholderField_name, LocationPlaceholderFieldEnum_LocationPlaceholderField_value) +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/enums/location_placeholder_field.proto", fileDescriptor_location_placeholder_field_c804fc5510f17f55) +} + +var fileDescriptor_location_placeholder_field_c804fc5510f17f55 = []byte{ + // 392 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x7c, 0x51, 0xc1, 0xae, 0x93, 0x40, + 0x14, 0x15, 0xde, 0xf3, 0xbd, 0x3a, 0xef, 0xa9, 0xe3, 0xac, 0x8c, 0x69, 0x17, 0xed, 0x07, 0x0c, + 0x41, 0x77, 0x63, 0x62, 0x32, 0xc0, 0xb4, 0x12, 0xdb, 0x81, 0x40, 0xc1, 0xd4, 0x90, 0x10, 0x2c, + 0x88, 0x24, 0x94, 0x21, 0x9d, 0xb6, 0x1f, 0xe4, 0xd2, 0xaf, 0x70, 0xed, 0x5f, 0xb8, 0x75, 0xe9, + 0x17, 0x98, 0x81, 0xb6, 0x2e, 0x4c, 0xdf, 0x86, 0x9c, 0x9c, 0x7b, 0xcf, 0x39, 0xcc, 0xb9, 0xe0, + 0x5d, 0x29, 0x44, 0x59, 0x17, 0x46, 0x96, 0x4b, 0xa3, 0x87, 0x0a, 0x1d, 0x4c, 0xa3, 0x68, 0xf6, + 0x1b, 0x69, 0xd4, 0x62, 0x9d, 0xed, 0x2a, 0xd1, 0xa4, 0x6d, 0x9d, 0xad, 0x8b, 0xaf, 0xa2, 0xce, + 0x8b, 0x6d, 0xfa, 0xa5, 0x2a, 0xea, 0x1c, 0xb7, 0x5b, 0xb1, 0x13, 0x68, 0xd4, 0x8b, 0x70, 0x96, + 0x4b, 0x7c, 0xd6, 0xe3, 0x83, 0x89, 0x3b, 0xfd, 0xab, 0xe1, 0xc9, 0xbe, 0xad, 0x8c, 0xac, 0x69, + 0xc4, 0xae, 0x73, 0x93, 0xbd, 0x78, 0xf2, 0x4b, 0x03, 0xc3, 0xf9, 0x31, 0xc1, 0xff, 0x17, 0x30, + 0x55, 0xfe, 0xac, 0xd9, 0x6f, 0x26, 0x3f, 0x34, 0xf0, 0xf2, 0xd2, 0x02, 0x7a, 0x0e, 0xee, 0x22, + 0x1e, 0xfa, 0xcc, 0x76, 0xa7, 0x2e, 0x73, 0xe0, 0x23, 0x74, 0x07, 0x6e, 0x23, 0xfe, 0x81, 0x7b, + 0x1f, 0x39, 0xd4, 0xd0, 0x0b, 0xf0, 0xd4, 0x8a, 0x42, 0x97, 0xb3, 0x30, 0x4c, 0x39, 0x5d, 0x30, + 0xa8, 0x23, 0x04, 0x9e, 0x51, 0xc7, 0x09, 0x14, 0x33, 0x77, 0x39, 0x4b, 0x4d, 0x78, 0xf5, 0x1f, + 0xf7, 0x1a, 0x5e, 0xa3, 0x01, 0xb8, 0xb6, 0xdd, 0xe5, 0x0a, 0x3e, 0x46, 0xf7, 0x60, 0xe0, 0x07, + 0x5e, 0xec, 0x72, 0x9b, 0xc1, 0x1b, 0x15, 0xe8, 0x7b, 0xe1, 0x92, 0xce, 0x53, 0xdb, 0x73, 0x18, + 0xbc, 0x45, 0x10, 0xdc, 0xdb, 0x5e, 0xc4, 0x97, 0xc1, 0xaa, 0x67, 0x06, 0x8a, 0xf1, 0xdf, 0x7b, + 0x9c, 0xa5, 0x3c, 0x5a, 0x58, 0x2c, 0x80, 0x4f, 0xac, 0x3f, 0x1a, 0x18, 0xaf, 0xc5, 0x06, 0x3f, + 0xd8, 0x93, 0x35, 0xba, 0xf4, 0x4a, 0x5f, 0x15, 0xe5, 0x6b, 0x9f, 0xac, 0xa3, 0xbe, 0x14, 0x75, + 0xd6, 0x94, 0x58, 0x6c, 0x4b, 0xa3, 0x2c, 0x9a, 0xae, 0xc6, 0xd3, 0xdd, 0xda, 0x4a, 0x5e, 0x38, + 0xe3, 0xdb, 0xee, 0xfb, 0x4d, 0xbf, 0x9a, 0x51, 0xfa, 0x5d, 0x1f, 0xcd, 0x7a, 0x2b, 0x9a, 0x4b, + 0xdc, 0x43, 0x85, 0x62, 0x13, 0xab, 0xca, 0xe5, 0xcf, 0xd3, 0x3c, 0xa1, 0xb9, 0x4c, 0xce, 0xf3, + 0x24, 0x36, 0x93, 0x6e, 0xfe, 0x5b, 0x1f, 0xf7, 0x24, 0x21, 0x34, 0x97, 0x84, 0x9c, 0x37, 0x08, + 0x89, 0x4d, 0x42, 0xba, 0x9d, 0xcf, 0x37, 0xdd, 0x8f, 0xbd, 0xf9, 0x1b, 0x00, 0x00, 0xff, 0xff, + 0x63, 0x67, 0x22, 0x40, 0x5e, 0x02, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/manager_link_status.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/manager_link_status.pb.go new file mode 100644 index 0000000..3a5dde9 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/manager_link_status.pb.go @@ -0,0 +1,133 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/enums/manager_link_status.proto + +package enums // import "google.golang.org/genproto/googleapis/ads/googleads/v1/enums" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Possible statuses of a link. +type ManagerLinkStatusEnum_ManagerLinkStatus int32 + +const ( + // Not specified. + ManagerLinkStatusEnum_UNSPECIFIED ManagerLinkStatusEnum_ManagerLinkStatus = 0 + // Used for return value only. Represents value unknown in this version. + ManagerLinkStatusEnum_UNKNOWN ManagerLinkStatusEnum_ManagerLinkStatus = 1 + // Indicates current in-effect relationship + ManagerLinkStatusEnum_ACTIVE ManagerLinkStatusEnum_ManagerLinkStatus = 2 + // Indicates terminated relationship + ManagerLinkStatusEnum_INACTIVE ManagerLinkStatusEnum_ManagerLinkStatus = 3 + // Indicates relationship has been requested by manager, but the client + // hasn't accepted yet. + ManagerLinkStatusEnum_PENDING ManagerLinkStatusEnum_ManagerLinkStatus = 4 + // Relationship was requested by the manager, but the client has refused. + ManagerLinkStatusEnum_REFUSED ManagerLinkStatusEnum_ManagerLinkStatus = 5 + // Indicates relationship has been requested by manager, but manager + // canceled it. + ManagerLinkStatusEnum_CANCELED ManagerLinkStatusEnum_ManagerLinkStatus = 6 +) + +var ManagerLinkStatusEnum_ManagerLinkStatus_name = map[int32]string{ + 0: "UNSPECIFIED", + 1: "UNKNOWN", + 2: "ACTIVE", + 3: "INACTIVE", + 4: "PENDING", + 5: "REFUSED", + 6: "CANCELED", +} +var ManagerLinkStatusEnum_ManagerLinkStatus_value = map[string]int32{ + "UNSPECIFIED": 0, + "UNKNOWN": 1, + "ACTIVE": 2, + "INACTIVE": 3, + "PENDING": 4, + "REFUSED": 5, + "CANCELED": 6, +} + +func (x ManagerLinkStatusEnum_ManagerLinkStatus) String() string { + return proto.EnumName(ManagerLinkStatusEnum_ManagerLinkStatus_name, int32(x)) +} +func (ManagerLinkStatusEnum_ManagerLinkStatus) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_manager_link_status_93d56526ebe80286, []int{0, 0} +} + +// Container for enum describing possible status of a manager and client link. +type ManagerLinkStatusEnum struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ManagerLinkStatusEnum) Reset() { *m = ManagerLinkStatusEnum{} } +func (m *ManagerLinkStatusEnum) String() string { return proto.CompactTextString(m) } +func (*ManagerLinkStatusEnum) ProtoMessage() {} +func (*ManagerLinkStatusEnum) Descriptor() ([]byte, []int) { + return fileDescriptor_manager_link_status_93d56526ebe80286, []int{0} +} +func (m *ManagerLinkStatusEnum) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ManagerLinkStatusEnum.Unmarshal(m, b) +} +func (m *ManagerLinkStatusEnum) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ManagerLinkStatusEnum.Marshal(b, m, deterministic) +} +func (dst *ManagerLinkStatusEnum) XXX_Merge(src proto.Message) { + xxx_messageInfo_ManagerLinkStatusEnum.Merge(dst, src) +} +func (m *ManagerLinkStatusEnum) XXX_Size() int { + return xxx_messageInfo_ManagerLinkStatusEnum.Size(m) +} +func (m *ManagerLinkStatusEnum) XXX_DiscardUnknown() { + xxx_messageInfo_ManagerLinkStatusEnum.DiscardUnknown(m) +} + +var xxx_messageInfo_ManagerLinkStatusEnum proto.InternalMessageInfo + +func init() { + proto.RegisterType((*ManagerLinkStatusEnum)(nil), "google.ads.googleads.v1.enums.ManagerLinkStatusEnum") + proto.RegisterEnum("google.ads.googleads.v1.enums.ManagerLinkStatusEnum_ManagerLinkStatus", ManagerLinkStatusEnum_ManagerLinkStatus_name, ManagerLinkStatusEnum_ManagerLinkStatus_value) +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/enums/manager_link_status.proto", fileDescriptor_manager_link_status_93d56526ebe80286) +} + +var fileDescriptor_manager_link_status_93d56526ebe80286 = []byte{ + // 335 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x7c, 0x50, 0xdf, 0x4a, 0xfb, 0x30, + 0x14, 0xfe, 0xb5, 0xfb, 0x39, 0x25, 0x13, 0xac, 0x05, 0xbd, 0x10, 0x77, 0xb1, 0x3d, 0x40, 0x4a, + 0xf1, 0x42, 0x88, 0x57, 0x59, 0x9b, 0x8d, 0xe2, 0x8c, 0xc5, 0xb9, 0x0a, 0x52, 0x18, 0xd1, 0x96, + 0x50, 0xb6, 0x26, 0x63, 0xe9, 0xf6, 0x14, 0x3e, 0x85, 0x97, 0x3e, 0x8a, 0x8f, 0xa2, 0x2f, 0x21, + 0x49, 0xb7, 0xde, 0x0c, 0xbd, 0x09, 0xdf, 0xc9, 0xf7, 0x87, 0x73, 0x3e, 0x70, 0xcd, 0xa5, 0xe4, + 0x8b, 0xdc, 0x63, 0x99, 0xf2, 0x6a, 0xa8, 0xd1, 0xc6, 0xf7, 0x72, 0xb1, 0x2e, 0x95, 0x57, 0x32, + 0xc1, 0x78, 0xbe, 0x9a, 0x2d, 0x0a, 0x31, 0x9f, 0xa9, 0x8a, 0x55, 0x6b, 0x05, 0x97, 0x2b, 0x59, + 0x49, 0xb7, 0x5b, 0xab, 0x21, 0xcb, 0x14, 0x6c, 0x8c, 0x70, 0xe3, 0x43, 0x63, 0xbc, 0xb8, 0xdc, + 0xe5, 0x2e, 0x0b, 0x8f, 0x09, 0x21, 0x2b, 0x56, 0x15, 0x52, 0x6c, 0xcd, 0xfd, 0x37, 0x0b, 0x9c, + 0xdd, 0xd5, 0xd1, 0xe3, 0x42, 0xcc, 0x27, 0x26, 0x98, 0x88, 0x75, 0xd9, 0x57, 0xe0, 0x74, 0x8f, + 0x70, 0x4f, 0x40, 0x67, 0x4a, 0x27, 0x31, 0x09, 0xa2, 0x61, 0x44, 0x42, 0xe7, 0x9f, 0xdb, 0x01, + 0x87, 0x53, 0x7a, 0x4b, 0xef, 0x9f, 0xa8, 0x63, 0xb9, 0x00, 0xb4, 0x71, 0xf0, 0x18, 0x25, 0xc4, + 0xb1, 0xdd, 0x63, 0x70, 0x14, 0xd1, 0xed, 0xd4, 0xd2, 0xb2, 0x98, 0xd0, 0x30, 0xa2, 0x23, 0xe7, + 0xbf, 0x1e, 0x1e, 0xc8, 0x70, 0x3a, 0x21, 0xa1, 0x73, 0xa0, 0x75, 0x01, 0xa6, 0x01, 0x19, 0x93, + 0xd0, 0x69, 0x0f, 0xbe, 0x2d, 0xd0, 0x7b, 0x95, 0x25, 0xfc, 0xf3, 0xa4, 0xc1, 0xf9, 0xde, 0x62, + 0xb1, 0x3e, 0x26, 0xb6, 0x9e, 0x07, 0x5b, 0x23, 0x97, 0x0b, 0x26, 0x38, 0x94, 0x2b, 0xee, 0xf1, + 0x5c, 0x98, 0x53, 0x77, 0xa5, 0x2e, 0x0b, 0xf5, 0x4b, 0xc7, 0x37, 0xe6, 0x7d, 0xb7, 0x5b, 0x23, + 0x8c, 0x3f, 0xec, 0xee, 0xa8, 0x8e, 0xc2, 0x99, 0x82, 0x35, 0xd4, 0x28, 0xf1, 0xa1, 0x6e, 0x47, + 0x7d, 0xee, 0xf8, 0x14, 0x67, 0x2a, 0x6d, 0xf8, 0x34, 0xf1, 0x53, 0xc3, 0x7f, 0xd9, 0xbd, 0xfa, + 0x13, 0x21, 0x9c, 0x29, 0x84, 0x1a, 0x05, 0x42, 0x89, 0x8f, 0x90, 0xd1, 0xbc, 0xb4, 0xcd, 0x62, + 0x57, 0x3f, 0x01, 0x00, 0x00, 0xff, 0xff, 0x4a, 0x76, 0xf8, 0xde, 0xfb, 0x01, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/matching_function_context_type.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/matching_function_context_type.pb.go new file mode 100644 index 0000000..614fe67 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/matching_function_context_type.pb.go @@ -0,0 +1,119 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/enums/matching_function_context_type.proto + +package enums // import "google.golang.org/genproto/googleapis/ads/googleads/v1/enums" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Possible context types for an operand in a matching function. +type MatchingFunctionContextTypeEnum_MatchingFunctionContextType int32 + +const ( + // Not specified. + MatchingFunctionContextTypeEnum_UNSPECIFIED MatchingFunctionContextTypeEnum_MatchingFunctionContextType = 0 + // Used for return value only. Represents value unknown in this version. + MatchingFunctionContextTypeEnum_UNKNOWN MatchingFunctionContextTypeEnum_MatchingFunctionContextType = 1 + // Feed item id in the request context. + MatchingFunctionContextTypeEnum_FEED_ITEM_ID MatchingFunctionContextTypeEnum_MatchingFunctionContextType = 2 + // The device being used (possible values are 'Desktop' or 'Mobile'). + MatchingFunctionContextTypeEnum_DEVICE_NAME MatchingFunctionContextTypeEnum_MatchingFunctionContextType = 3 +) + +var MatchingFunctionContextTypeEnum_MatchingFunctionContextType_name = map[int32]string{ + 0: "UNSPECIFIED", + 1: "UNKNOWN", + 2: "FEED_ITEM_ID", + 3: "DEVICE_NAME", +} +var MatchingFunctionContextTypeEnum_MatchingFunctionContextType_value = map[string]int32{ + "UNSPECIFIED": 0, + "UNKNOWN": 1, + "FEED_ITEM_ID": 2, + "DEVICE_NAME": 3, +} + +func (x MatchingFunctionContextTypeEnum_MatchingFunctionContextType) String() string { + return proto.EnumName(MatchingFunctionContextTypeEnum_MatchingFunctionContextType_name, int32(x)) +} +func (MatchingFunctionContextTypeEnum_MatchingFunctionContextType) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_matching_function_context_type_1f6d87ceb71b483c, []int{0, 0} +} + +// Container for context types for an operand in a matching function. +type MatchingFunctionContextTypeEnum struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *MatchingFunctionContextTypeEnum) Reset() { *m = MatchingFunctionContextTypeEnum{} } +func (m *MatchingFunctionContextTypeEnum) String() string { return proto.CompactTextString(m) } +func (*MatchingFunctionContextTypeEnum) ProtoMessage() {} +func (*MatchingFunctionContextTypeEnum) Descriptor() ([]byte, []int) { + return fileDescriptor_matching_function_context_type_1f6d87ceb71b483c, []int{0} +} +func (m *MatchingFunctionContextTypeEnum) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_MatchingFunctionContextTypeEnum.Unmarshal(m, b) +} +func (m *MatchingFunctionContextTypeEnum) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_MatchingFunctionContextTypeEnum.Marshal(b, m, deterministic) +} +func (dst *MatchingFunctionContextTypeEnum) XXX_Merge(src proto.Message) { + xxx_messageInfo_MatchingFunctionContextTypeEnum.Merge(dst, src) +} +func (m *MatchingFunctionContextTypeEnum) XXX_Size() int { + return xxx_messageInfo_MatchingFunctionContextTypeEnum.Size(m) +} +func (m *MatchingFunctionContextTypeEnum) XXX_DiscardUnknown() { + xxx_messageInfo_MatchingFunctionContextTypeEnum.DiscardUnknown(m) +} + +var xxx_messageInfo_MatchingFunctionContextTypeEnum proto.InternalMessageInfo + +func init() { + proto.RegisterType((*MatchingFunctionContextTypeEnum)(nil), "google.ads.googleads.v1.enums.MatchingFunctionContextTypeEnum") + proto.RegisterEnum("google.ads.googleads.v1.enums.MatchingFunctionContextTypeEnum_MatchingFunctionContextType", MatchingFunctionContextTypeEnum_MatchingFunctionContextType_name, MatchingFunctionContextTypeEnum_MatchingFunctionContextType_value) +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/enums/matching_function_context_type.proto", fileDescriptor_matching_function_context_type_1f6d87ceb71b483c) +} + +var fileDescriptor_matching_function_context_type_1f6d87ceb71b483c = []byte{ + // 326 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x7c, 0x50, 0x41, 0x4b, 0xc3, 0x30, + 0x18, 0xb5, 0x1d, 0x28, 0x64, 0x82, 0xa5, 0x47, 0x75, 0xe8, 0xf6, 0x03, 0x52, 0x8a, 0xb7, 0x78, + 0xea, 0xd6, 0x6c, 0x14, 0x59, 0x1d, 0xb8, 0x55, 0x90, 0x62, 0xa9, 0x6d, 0x8c, 0x85, 0x35, 0x29, + 0x4b, 0x36, 0xdc, 0xd1, 0xbf, 0xe2, 0xd1, 0x9f, 0xe2, 0x4f, 0xf1, 0xee, 0x5d, 0x92, 0x6c, 0xbb, + 0xd9, 0x4b, 0x78, 0xe4, 0x7b, 0xdf, 0x7b, 0xdf, 0x7b, 0x60, 0x48, 0x39, 0xa7, 0x4b, 0xe2, 0xe5, + 0xa5, 0xf0, 0x0c, 0x54, 0x68, 0xe3, 0x7b, 0x84, 0xad, 0x6b, 0xe1, 0xd5, 0xb9, 0x2c, 0xde, 0x2a, + 0x46, 0xb3, 0xd7, 0x35, 0x2b, 0x64, 0xc5, 0x59, 0x56, 0x70, 0x26, 0xc9, 0xbb, 0xcc, 0xe4, 0xb6, + 0x21, 0xb0, 0x59, 0x71, 0xc9, 0xdd, 0x9e, 0x59, 0x84, 0x79, 0x29, 0xe0, 0x41, 0x03, 0x6e, 0x7c, + 0xa8, 0x35, 0xce, 0x2f, 0xf7, 0x16, 0x4d, 0xe5, 0xe5, 0x8c, 0x71, 0x99, 0x2b, 0x21, 0x61, 0x96, + 0x07, 0x1f, 0x16, 0xb8, 0x9a, 0xee, 0x5c, 0xc6, 0x3b, 0x93, 0x91, 0xf1, 0x98, 0x6f, 0x1b, 0x82, + 0xd9, 0xba, 0x1e, 0x3c, 0x83, 0x8b, 0x16, 0x8a, 0x7b, 0x06, 0xba, 0x8b, 0xf8, 0x61, 0x86, 0x47, + 0xd1, 0x38, 0xc2, 0xa1, 0x73, 0xe4, 0x76, 0xc1, 0xc9, 0x22, 0xbe, 0x8b, 0xef, 0x1f, 0x63, 0xc7, + 0x72, 0x1d, 0x70, 0x3a, 0xc6, 0x38, 0xcc, 0xa2, 0x39, 0x9e, 0x66, 0x51, 0xe8, 0xd8, 0x8a, 0x1f, + 0xe2, 0x24, 0x1a, 0xe1, 0x2c, 0x0e, 0xa6, 0xd8, 0xe9, 0x0c, 0x7f, 0x2d, 0xd0, 0x2f, 0x78, 0x0d, + 0x5b, 0x73, 0x0c, 0xaf, 0x5b, 0x6e, 0x98, 0xa9, 0x2c, 0x33, 0xeb, 0x69, 0x57, 0x27, 0xa4, 0x7c, + 0x99, 0x33, 0x0a, 0xf9, 0x8a, 0x7a, 0x94, 0x30, 0x9d, 0x74, 0x5f, 0x6f, 0x53, 0x89, 0x7f, 0xda, + 0xbe, 0xd5, 0xef, 0xa7, 0xdd, 0x99, 0x04, 0xc1, 0x97, 0xdd, 0x9b, 0x18, 0xa9, 0xa0, 0x14, 0xd0, + 0x40, 0x85, 0x12, 0x1f, 0xaa, 0x4a, 0xc4, 0xf7, 0x7e, 0x9e, 0x06, 0xa5, 0x48, 0x0f, 0xf3, 0x34, + 0xf1, 0x53, 0x3d, 0xff, 0xb1, 0xfb, 0xe6, 0x13, 0xa1, 0xa0, 0x14, 0x08, 0x1d, 0x18, 0x08, 0x25, + 0x3e, 0x42, 0x9a, 0xf3, 0x72, 0xac, 0x0f, 0xbb, 0xf9, 0x0b, 0x00, 0x00, 0xff, 0xff, 0xea, 0x26, + 0xb5, 0xd3, 0x05, 0x02, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/matching_function_operator.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/matching_function_operator.pb.go new file mode 100644 index 0000000..76fd8f1 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/matching_function_operator.pb.go @@ -0,0 +1,138 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/enums/matching_function_operator.proto + +package enums // import "google.golang.org/genproto/googleapis/ads/googleads/v1/enums" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Possible operators in a matching function. +type MatchingFunctionOperatorEnum_MatchingFunctionOperator int32 + +const ( + // Not specified. + MatchingFunctionOperatorEnum_UNSPECIFIED MatchingFunctionOperatorEnum_MatchingFunctionOperator = 0 + // Used for return value only. Represents value unknown in this version. + MatchingFunctionOperatorEnum_UNKNOWN MatchingFunctionOperatorEnum_MatchingFunctionOperator = 1 + // The IN operator. + MatchingFunctionOperatorEnum_IN MatchingFunctionOperatorEnum_MatchingFunctionOperator = 2 + // The IDENTITY operator. + MatchingFunctionOperatorEnum_IDENTITY MatchingFunctionOperatorEnum_MatchingFunctionOperator = 3 + // The EQUALS operator + MatchingFunctionOperatorEnum_EQUALS MatchingFunctionOperatorEnum_MatchingFunctionOperator = 4 + // Operator that takes two or more operands that are of type + // FunctionOperand and checks that all the operands evaluate to true. + // For functions related to ad formats, all the operands must be in + // left_operands. + MatchingFunctionOperatorEnum_AND MatchingFunctionOperatorEnum_MatchingFunctionOperator = 5 + // Operator that returns true if the elements in left_operands contain any + // of the elements in right_operands. Otherwise, return false. The + // right_operands must contain at least 1 and no more than 3 + // ConstantOperands. + MatchingFunctionOperatorEnum_CONTAINS_ANY MatchingFunctionOperatorEnum_MatchingFunctionOperator = 6 +) + +var MatchingFunctionOperatorEnum_MatchingFunctionOperator_name = map[int32]string{ + 0: "UNSPECIFIED", + 1: "UNKNOWN", + 2: "IN", + 3: "IDENTITY", + 4: "EQUALS", + 5: "AND", + 6: "CONTAINS_ANY", +} +var MatchingFunctionOperatorEnum_MatchingFunctionOperator_value = map[string]int32{ + "UNSPECIFIED": 0, + "UNKNOWN": 1, + "IN": 2, + "IDENTITY": 3, + "EQUALS": 4, + "AND": 5, + "CONTAINS_ANY": 6, +} + +func (x MatchingFunctionOperatorEnum_MatchingFunctionOperator) String() string { + return proto.EnumName(MatchingFunctionOperatorEnum_MatchingFunctionOperator_name, int32(x)) +} +func (MatchingFunctionOperatorEnum_MatchingFunctionOperator) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_matching_function_operator_2b1a251212d826f2, []int{0, 0} +} + +// Container for enum describing matching function operator. +type MatchingFunctionOperatorEnum struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *MatchingFunctionOperatorEnum) Reset() { *m = MatchingFunctionOperatorEnum{} } +func (m *MatchingFunctionOperatorEnum) String() string { return proto.CompactTextString(m) } +func (*MatchingFunctionOperatorEnum) ProtoMessage() {} +func (*MatchingFunctionOperatorEnum) Descriptor() ([]byte, []int) { + return fileDescriptor_matching_function_operator_2b1a251212d826f2, []int{0} +} +func (m *MatchingFunctionOperatorEnum) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_MatchingFunctionOperatorEnum.Unmarshal(m, b) +} +func (m *MatchingFunctionOperatorEnum) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_MatchingFunctionOperatorEnum.Marshal(b, m, deterministic) +} +func (dst *MatchingFunctionOperatorEnum) XXX_Merge(src proto.Message) { + xxx_messageInfo_MatchingFunctionOperatorEnum.Merge(dst, src) +} +func (m *MatchingFunctionOperatorEnum) XXX_Size() int { + return xxx_messageInfo_MatchingFunctionOperatorEnum.Size(m) +} +func (m *MatchingFunctionOperatorEnum) XXX_DiscardUnknown() { + xxx_messageInfo_MatchingFunctionOperatorEnum.DiscardUnknown(m) +} + +var xxx_messageInfo_MatchingFunctionOperatorEnum proto.InternalMessageInfo + +func init() { + proto.RegisterType((*MatchingFunctionOperatorEnum)(nil), "google.ads.googleads.v1.enums.MatchingFunctionOperatorEnum") + proto.RegisterEnum("google.ads.googleads.v1.enums.MatchingFunctionOperatorEnum_MatchingFunctionOperator", MatchingFunctionOperatorEnum_MatchingFunctionOperator_name, MatchingFunctionOperatorEnum_MatchingFunctionOperator_value) +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/enums/matching_function_operator.proto", fileDescriptor_matching_function_operator_2b1a251212d826f2) +} + +var fileDescriptor_matching_function_operator_2b1a251212d826f2 = []byte{ + // 345 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x7c, 0x50, 0x4d, 0x6a, 0xf3, 0x30, + 0x14, 0xfc, 0xec, 0x7c, 0x75, 0x8a, 0x12, 0xa8, 0xd0, 0xaa, 0x94, 0x64, 0x91, 0x1c, 0x40, 0xc6, + 0x74, 0xa7, 0x42, 0x41, 0x49, 0x9c, 0x60, 0xda, 0x2a, 0x29, 0xf9, 0x29, 0x29, 0x86, 0xa0, 0xc6, + 0xae, 0x6a, 0x88, 0x25, 0x63, 0xd9, 0xb9, 0x49, 0x2f, 0xd0, 0x65, 0x8f, 0xd2, 0xa3, 0x74, 0xd9, + 0x13, 0x14, 0xff, 0x24, 0xbb, 0x74, 0x23, 0x06, 0xcd, 0x9b, 0x79, 0x6f, 0x06, 0xdc, 0x0a, 0xa5, + 0xc4, 0x2e, 0xb4, 0x79, 0xa0, 0xed, 0x0a, 0x16, 0x68, 0xef, 0xd8, 0xa1, 0xcc, 0x63, 0x6d, 0xc7, + 0x3c, 0xdb, 0xbe, 0x45, 0x52, 0x6c, 0x5e, 0x73, 0xb9, 0xcd, 0x22, 0x25, 0x37, 0x2a, 0x09, 0x53, + 0x9e, 0xa9, 0x14, 0x27, 0xa9, 0xca, 0x14, 0xea, 0x56, 0x22, 0xcc, 0x03, 0x8d, 0x8f, 0x7a, 0xbc, + 0x77, 0x70, 0xa9, 0xbf, 0xea, 0x1c, 0xec, 0x93, 0xc8, 0xe6, 0x52, 0xaa, 0x8c, 0x17, 0x26, 0xba, + 0x12, 0xf7, 0xdf, 0x0d, 0xd0, 0x79, 0xa8, 0x37, 0x8c, 0xeb, 0x05, 0xd3, 0xda, 0xdf, 0x95, 0x79, + 0xdc, 0xcf, 0xc1, 0xe5, 0x29, 0x1e, 0x5d, 0x80, 0xd6, 0x92, 0xcd, 0x67, 0xee, 0xd0, 0x1b, 0x7b, + 0xee, 0x08, 0xfe, 0x43, 0x2d, 0xd0, 0x5c, 0xb2, 0x3b, 0x36, 0x7d, 0x62, 0xd0, 0x40, 0x16, 0x30, + 0x3d, 0x06, 0x4d, 0xd4, 0x06, 0xe7, 0xde, 0xc8, 0x65, 0x0b, 0x6f, 0xb1, 0x86, 0x0d, 0x04, 0x80, + 0xe5, 0x3e, 0x2e, 0xe9, 0xfd, 0x1c, 0xfe, 0x47, 0x4d, 0xd0, 0xa0, 0x6c, 0x04, 0xcf, 0x10, 0x04, + 0xed, 0xe1, 0x94, 0x2d, 0xa8, 0xc7, 0xe6, 0x1b, 0xca, 0xd6, 0xd0, 0x1a, 0xfc, 0x18, 0xa0, 0xb7, + 0x55, 0x31, 0xfe, 0x33, 0xdb, 0xa0, 0x7b, 0xea, 0xb4, 0x59, 0x11, 0x6e, 0x66, 0x3c, 0x0f, 0x6a, + 0xbd, 0x50, 0x3b, 0x2e, 0x05, 0x56, 0xa9, 0xb0, 0x45, 0x28, 0xcb, 0xe8, 0x87, 0xae, 0x93, 0x48, + 0x9f, 0xa8, 0xfe, 0xa6, 0x7c, 0x3f, 0xcc, 0xc6, 0x84, 0xd2, 0x4f, 0xb3, 0x3b, 0xa9, 0xac, 0x68, + 0xa0, 0x71, 0x05, 0x0b, 0xb4, 0x72, 0x70, 0x51, 0x93, 0xfe, 0x3a, 0xf0, 0x3e, 0x0d, 0xb4, 0x7f, + 0xe4, 0xfd, 0x95, 0xe3, 0x97, 0xfc, 0xb7, 0xd9, 0xab, 0x3e, 0x09, 0xa1, 0x81, 0x26, 0xe4, 0x38, + 0x41, 0xc8, 0xca, 0x21, 0xa4, 0x9c, 0x79, 0xb1, 0xca, 0xc3, 0xae, 0x7f, 0x03, 0x00, 0x00, 0xff, + 0xff, 0x5b, 0x67, 0x87, 0x8d, 0x12, 0x02, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/media_type.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/media_type.pb.go new file mode 100644 index 0000000..bab0033 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/media_type.pb.go @@ -0,0 +1,138 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/enums/media_type.proto + +package enums // import "google.golang.org/genproto/googleapis/ads/googleads/v1/enums" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// The type of media. +type MediaTypeEnum_MediaType int32 + +const ( + // The media type has not been specified. + MediaTypeEnum_UNSPECIFIED MediaTypeEnum_MediaType = 0 + // The received value is not known in this version. + // + // This is a response-only value. + MediaTypeEnum_UNKNOWN MediaTypeEnum_MediaType = 1 + // Static image, used for image ad. + MediaTypeEnum_IMAGE MediaTypeEnum_MediaType = 2 + // Small image, used for map ad. + MediaTypeEnum_ICON MediaTypeEnum_MediaType = 3 + // ZIP file, used in fields of template ads. + MediaTypeEnum_MEDIA_BUNDLE MediaTypeEnum_MediaType = 4 + // Audio file. + MediaTypeEnum_AUDIO MediaTypeEnum_MediaType = 5 + // Video file. + MediaTypeEnum_VIDEO MediaTypeEnum_MediaType = 6 + // Animated image, such as animated GIF. + MediaTypeEnum_DYNAMIC_IMAGE MediaTypeEnum_MediaType = 7 +) + +var MediaTypeEnum_MediaType_name = map[int32]string{ + 0: "UNSPECIFIED", + 1: "UNKNOWN", + 2: "IMAGE", + 3: "ICON", + 4: "MEDIA_BUNDLE", + 5: "AUDIO", + 6: "VIDEO", + 7: "DYNAMIC_IMAGE", +} +var MediaTypeEnum_MediaType_value = map[string]int32{ + "UNSPECIFIED": 0, + "UNKNOWN": 1, + "IMAGE": 2, + "ICON": 3, + "MEDIA_BUNDLE": 4, + "AUDIO": 5, + "VIDEO": 6, + "DYNAMIC_IMAGE": 7, +} + +func (x MediaTypeEnum_MediaType) String() string { + return proto.EnumName(MediaTypeEnum_MediaType_name, int32(x)) +} +func (MediaTypeEnum_MediaType) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_media_type_6fdd7d6f52c48143, []int{0, 0} +} + +// Container for enum describing the types of media. +type MediaTypeEnum struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *MediaTypeEnum) Reset() { *m = MediaTypeEnum{} } +func (m *MediaTypeEnum) String() string { return proto.CompactTextString(m) } +func (*MediaTypeEnum) ProtoMessage() {} +func (*MediaTypeEnum) Descriptor() ([]byte, []int) { + return fileDescriptor_media_type_6fdd7d6f52c48143, []int{0} +} +func (m *MediaTypeEnum) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_MediaTypeEnum.Unmarshal(m, b) +} +func (m *MediaTypeEnum) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_MediaTypeEnum.Marshal(b, m, deterministic) +} +func (dst *MediaTypeEnum) XXX_Merge(src proto.Message) { + xxx_messageInfo_MediaTypeEnum.Merge(dst, src) +} +func (m *MediaTypeEnum) XXX_Size() int { + return xxx_messageInfo_MediaTypeEnum.Size(m) +} +func (m *MediaTypeEnum) XXX_DiscardUnknown() { + xxx_messageInfo_MediaTypeEnum.DiscardUnknown(m) +} + +var xxx_messageInfo_MediaTypeEnum proto.InternalMessageInfo + +func init() { + proto.RegisterType((*MediaTypeEnum)(nil), "google.ads.googleads.v1.enums.MediaTypeEnum") + proto.RegisterEnum("google.ads.googleads.v1.enums.MediaTypeEnum_MediaType", MediaTypeEnum_MediaType_name, MediaTypeEnum_MediaType_value) +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/enums/media_type.proto", fileDescriptor_media_type_6fdd7d6f52c48143) +} + +var fileDescriptor_media_type_6fdd7d6f52c48143 = []byte{ + // 338 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x7c, 0x90, 0x4f, 0x4e, 0x83, 0x40, + 0x18, 0xc5, 0x85, 0xfe, 0xb3, 0x53, 0xab, 0x23, 0x4b, 0x63, 0x17, 0xed, 0x01, 0x86, 0x10, 0x77, + 0xe3, 0x6a, 0x28, 0xd8, 0x4c, 0x14, 0x68, 0xa2, 0x60, 0x34, 0x24, 0x0d, 0x0a, 0x21, 0x24, 0x65, + 0x86, 0x74, 0x68, 0x93, 0x5e, 0xc1, 0x63, 0xb8, 0xf4, 0x28, 0x5e, 0xc3, 0x9d, 0xa7, 0x30, 0xc3, + 0x58, 0x76, 0xba, 0x99, 0xbc, 0xcc, 0xf7, 0xfb, 0x5e, 0xbe, 0xf7, 0x00, 0xca, 0x39, 0xcf, 0xd7, + 0x99, 0x99, 0xa4, 0xc2, 0x54, 0x52, 0xaa, 0x9d, 0x65, 0x66, 0x6c, 0x5b, 0x0a, 0xb3, 0xcc, 0xd2, + 0x22, 0x59, 0xd5, 0xfb, 0x2a, 0x43, 0xd5, 0x86, 0xd7, 0xdc, 0x98, 0x28, 0x08, 0x25, 0xa9, 0x40, + 0x2d, 0x8f, 0x76, 0x16, 0x6a, 0xf8, 0x8b, 0xcb, 0x83, 0x5d, 0x55, 0x98, 0x09, 0x63, 0xbc, 0x4e, + 0xea, 0x82, 0x33, 0xa1, 0x96, 0x67, 0x6f, 0x1a, 0x18, 0x7b, 0xd2, 0xf1, 0x61, 0x5f, 0x65, 0x2e, + 0xdb, 0x96, 0xb3, 0x3d, 0x18, 0xb6, 0x1f, 0xc6, 0x19, 0x18, 0x85, 0xfe, 0xfd, 0xd2, 0x9d, 0xd3, + 0x1b, 0xea, 0x3a, 0xf0, 0xc8, 0x18, 0x81, 0x41, 0xe8, 0xdf, 0xfa, 0xc1, 0xa3, 0x0f, 0x35, 0x63, + 0x08, 0x7a, 0xd4, 0x23, 0x0b, 0x17, 0xea, 0xc6, 0x31, 0xe8, 0xd2, 0x79, 0xe0, 0xc3, 0x8e, 0x01, + 0xc1, 0x89, 0xe7, 0x3a, 0x94, 0xac, 0xec, 0xd0, 0x77, 0xee, 0x5c, 0xd8, 0x95, 0x18, 0x09, 0x1d, + 0x1a, 0xc0, 0x9e, 0x94, 0x11, 0x75, 0xdc, 0x00, 0xf6, 0x8d, 0x73, 0x30, 0x76, 0x9e, 0x7c, 0xe2, + 0xd1, 0xf9, 0x4a, 0x99, 0x0c, 0xec, 0x2f, 0x0d, 0x4c, 0x5f, 0x79, 0x89, 0xfe, 0x0d, 0x64, 0x9f, + 0xb6, 0xe7, 0x2d, 0x65, 0x84, 0xa5, 0xf6, 0x6c, 0xff, 0x2e, 0xe4, 0x7c, 0x9d, 0xb0, 0x1c, 0xf1, + 0x4d, 0x6e, 0xe6, 0x19, 0x6b, 0x02, 0x1e, 0x1a, 0xac, 0x0a, 0xf1, 0x47, 0xa1, 0xd7, 0xcd, 0xfb, + 0xae, 0x77, 0x16, 0x84, 0x7c, 0xe8, 0x93, 0x85, 0xb2, 0x22, 0xa9, 0x40, 0x4a, 0x4a, 0x15, 0x59, + 0x48, 0x76, 0x23, 0x3e, 0x0f, 0xf3, 0x98, 0xa4, 0x22, 0x6e, 0xe7, 0x71, 0x64, 0xc5, 0xcd, 0xfc, + 0x5b, 0x9f, 0xaa, 0x4f, 0x8c, 0x49, 0x2a, 0x30, 0x6e, 0x09, 0x8c, 0x23, 0x0b, 0xe3, 0x86, 0x79, + 0xe9, 0x37, 0x87, 0x5d, 0xfd, 0x04, 0x00, 0x00, 0xff, 0xff, 0x4b, 0xd1, 0x03, 0x89, 0xe8, 0x01, + 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/merchant_center_link_status.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/merchant_center_link_status.pb.go new file mode 100644 index 0000000..73fb4fc --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/merchant_center_link_status.pb.go @@ -0,0 +1,121 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/enums/merchant_center_link_status.proto + +package enums // import "google.golang.org/genproto/googleapis/ads/googleads/v1/enums" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Describes the possible statuses for a link between a Google Ads customer +// and a Google Merchant Center account. +type MerchantCenterLinkStatusEnum_MerchantCenterLinkStatus int32 + +const ( + // Not specified. + MerchantCenterLinkStatusEnum_UNSPECIFIED MerchantCenterLinkStatusEnum_MerchantCenterLinkStatus = 0 + // Used for return value only. Represents value unknown in this version. + MerchantCenterLinkStatusEnum_UNKNOWN MerchantCenterLinkStatusEnum_MerchantCenterLinkStatus = 1 + // The link is enabled. + MerchantCenterLinkStatusEnum_ENABLED MerchantCenterLinkStatusEnum_MerchantCenterLinkStatus = 2 + // The link has no effect. It was proposed by the Merchant Center Account + // owner and hasn't been confirmed by the customer. + MerchantCenterLinkStatusEnum_PENDING MerchantCenterLinkStatusEnum_MerchantCenterLinkStatus = 3 +) + +var MerchantCenterLinkStatusEnum_MerchantCenterLinkStatus_name = map[int32]string{ + 0: "UNSPECIFIED", + 1: "UNKNOWN", + 2: "ENABLED", + 3: "PENDING", +} +var MerchantCenterLinkStatusEnum_MerchantCenterLinkStatus_value = map[string]int32{ + "UNSPECIFIED": 0, + "UNKNOWN": 1, + "ENABLED": 2, + "PENDING": 3, +} + +func (x MerchantCenterLinkStatusEnum_MerchantCenterLinkStatus) String() string { + return proto.EnumName(MerchantCenterLinkStatusEnum_MerchantCenterLinkStatus_name, int32(x)) +} +func (MerchantCenterLinkStatusEnum_MerchantCenterLinkStatus) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_merchant_center_link_status_ed4bf61ab73ed71c, []int{0, 0} +} + +// Container for enum describing possible statuses of a Google Merchant Center +// link. +type MerchantCenterLinkStatusEnum struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *MerchantCenterLinkStatusEnum) Reset() { *m = MerchantCenterLinkStatusEnum{} } +func (m *MerchantCenterLinkStatusEnum) String() string { return proto.CompactTextString(m) } +func (*MerchantCenterLinkStatusEnum) ProtoMessage() {} +func (*MerchantCenterLinkStatusEnum) Descriptor() ([]byte, []int) { + return fileDescriptor_merchant_center_link_status_ed4bf61ab73ed71c, []int{0} +} +func (m *MerchantCenterLinkStatusEnum) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_MerchantCenterLinkStatusEnum.Unmarshal(m, b) +} +func (m *MerchantCenterLinkStatusEnum) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_MerchantCenterLinkStatusEnum.Marshal(b, m, deterministic) +} +func (dst *MerchantCenterLinkStatusEnum) XXX_Merge(src proto.Message) { + xxx_messageInfo_MerchantCenterLinkStatusEnum.Merge(dst, src) +} +func (m *MerchantCenterLinkStatusEnum) XXX_Size() int { + return xxx_messageInfo_MerchantCenterLinkStatusEnum.Size(m) +} +func (m *MerchantCenterLinkStatusEnum) XXX_DiscardUnknown() { + xxx_messageInfo_MerchantCenterLinkStatusEnum.DiscardUnknown(m) +} + +var xxx_messageInfo_MerchantCenterLinkStatusEnum proto.InternalMessageInfo + +func init() { + proto.RegisterType((*MerchantCenterLinkStatusEnum)(nil), "google.ads.googleads.v1.enums.MerchantCenterLinkStatusEnum") + proto.RegisterEnum("google.ads.googleads.v1.enums.MerchantCenterLinkStatusEnum_MerchantCenterLinkStatus", MerchantCenterLinkStatusEnum_MerchantCenterLinkStatus_name, MerchantCenterLinkStatusEnum_MerchantCenterLinkStatus_value) +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/enums/merchant_center_link_status.proto", fileDescriptor_merchant_center_link_status_ed4bf61ab73ed71c) +} + +var fileDescriptor_merchant_center_link_status_ed4bf61ab73ed71c = []byte{ + // 315 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x7c, 0x50, 0x41, 0x6a, 0xeb, 0x30, + 0x14, 0xfc, 0x71, 0xe0, 0x17, 0x94, 0x45, 0x4d, 0x56, 0xa5, 0x24, 0x8b, 0xe4, 0x00, 0x12, 0xa6, + 0x3b, 0x75, 0x51, 0xe4, 0x44, 0x0d, 0xa1, 0xa9, 0x6a, 0x1a, 0x92, 0x42, 0x31, 0x04, 0x35, 0x16, + 0xaa, 0x49, 0x2c, 0x05, 0x4b, 0xc9, 0x81, 0xba, 0xec, 0x51, 0x7a, 0x94, 0x2e, 0x7b, 0x82, 0x22, + 0x39, 0xf6, 0xce, 0xdd, 0x98, 0x79, 0x9e, 0xf7, 0x66, 0x46, 0x03, 0xee, 0xa4, 0xd6, 0x72, 0x2f, + 0x10, 0xcf, 0x0c, 0xaa, 0xa0, 0x43, 0xa7, 0x08, 0x09, 0x75, 0x2c, 0x0c, 0x2a, 0x44, 0xb9, 0x7d, + 0xe7, 0xca, 0x6e, 0xb6, 0x42, 0x59, 0x51, 0x6e, 0xf6, 0xb9, 0xda, 0x6d, 0x8c, 0xe5, 0xf6, 0x68, + 0xe0, 0xa1, 0xd4, 0x56, 0xf7, 0x87, 0xd5, 0x15, 0xe4, 0x99, 0x81, 0x8d, 0x00, 0x3c, 0x45, 0xd0, + 0x0b, 0x5c, 0x0f, 0x6a, 0xfd, 0x43, 0x8e, 0xb8, 0x52, 0xda, 0x72, 0x9b, 0x6b, 0x75, 0x3e, 0x1e, + 0x97, 0x60, 0xf0, 0x78, 0x76, 0x98, 0x78, 0x83, 0x45, 0xae, 0x76, 0x4b, 0x2f, 0x4f, 0xd5, 0xb1, + 0x18, 0x3f, 0x83, 0xab, 0x36, 0xbe, 0x7f, 0x09, 0x7a, 0x2b, 0xb6, 0x4c, 0xe8, 0x64, 0x7e, 0x3f, + 0xa7, 0xd3, 0xf0, 0x5f, 0xbf, 0x07, 0x2e, 0x56, 0xec, 0x81, 0x3d, 0xbd, 0xb0, 0xb0, 0xe3, 0x06, + 0xca, 0x48, 0xbc, 0xa0, 0xd3, 0x30, 0x70, 0x43, 0x42, 0xd9, 0x74, 0xce, 0x66, 0x61, 0x37, 0xfe, + 0xe9, 0x80, 0xd1, 0x56, 0x17, 0xf0, 0xcf, 0xdc, 0xf1, 0xb0, 0xcd, 0x37, 0x71, 0xc1, 0x93, 0xce, + 0x6b, 0x7c, 0xbe, 0x97, 0x7a, 0xcf, 0x95, 0x84, 0xba, 0x94, 0x48, 0x0a, 0xe5, 0x9f, 0x55, 0x17, + 0x79, 0xc8, 0x4d, 0x4b, 0xaf, 0xb7, 0xfe, 0xfb, 0x11, 0x74, 0x67, 0x84, 0x7c, 0x06, 0xc3, 0x59, + 0x25, 0x45, 0x32, 0x03, 0x2b, 0xe8, 0xd0, 0x3a, 0x82, 0xae, 0x03, 0xf3, 0x55, 0xf3, 0x29, 0xc9, + 0x4c, 0xda, 0xf0, 0xe9, 0x3a, 0x4a, 0x3d, 0xff, 0x1d, 0x8c, 0xaa, 0x9f, 0x18, 0x93, 0xcc, 0x60, + 0xdc, 0x6c, 0x60, 0xbc, 0x8e, 0x30, 0xf6, 0x3b, 0x6f, 0xff, 0x7d, 0xb0, 0x9b, 0xdf, 0x00, 0x00, + 0x00, 0xff, 0xff, 0x17, 0xd0, 0xaf, 0x66, 0xef, 0x01, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/message_placeholder_field.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/message_placeholder_field.pb.go new file mode 100644 index 0000000..6d79c81 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/message_placeholder_field.pb.go @@ -0,0 +1,136 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/enums/message_placeholder_field.proto + +package enums // import "google.golang.org/genproto/googleapis/ads/googleads/v1/enums" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Possible values for Message placeholder fields. +type MessagePlaceholderFieldEnum_MessagePlaceholderField int32 + +const ( + // Not specified. + MessagePlaceholderFieldEnum_UNSPECIFIED MessagePlaceholderFieldEnum_MessagePlaceholderField = 0 + // Used for return value only. Represents value unknown in this version. + MessagePlaceholderFieldEnum_UNKNOWN MessagePlaceholderFieldEnum_MessagePlaceholderField = 1 + // Data Type: STRING. The name of your business. + MessagePlaceholderFieldEnum_BUSINESS_NAME MessagePlaceholderFieldEnum_MessagePlaceholderField = 2 + // Data Type: STRING. Country code of phone number. + MessagePlaceholderFieldEnum_COUNTRY_CODE MessagePlaceholderFieldEnum_MessagePlaceholderField = 3 + // Data Type: STRING. A phone number that's capable of sending and receiving + // text messages. + MessagePlaceholderFieldEnum_PHONE_NUMBER MessagePlaceholderFieldEnum_MessagePlaceholderField = 4 + // Data Type: STRING. The text that will go in your click-to-message ad. + MessagePlaceholderFieldEnum_MESSAGE_EXTENSION_TEXT MessagePlaceholderFieldEnum_MessagePlaceholderField = 5 + // Data Type: STRING. The message text automatically shows in people's + // messaging apps when they tap to send you a message. + MessagePlaceholderFieldEnum_MESSAGE_TEXT MessagePlaceholderFieldEnum_MessagePlaceholderField = 6 +) + +var MessagePlaceholderFieldEnum_MessagePlaceholderField_name = map[int32]string{ + 0: "UNSPECIFIED", + 1: "UNKNOWN", + 2: "BUSINESS_NAME", + 3: "COUNTRY_CODE", + 4: "PHONE_NUMBER", + 5: "MESSAGE_EXTENSION_TEXT", + 6: "MESSAGE_TEXT", +} +var MessagePlaceholderFieldEnum_MessagePlaceholderField_value = map[string]int32{ + "UNSPECIFIED": 0, + "UNKNOWN": 1, + "BUSINESS_NAME": 2, + "COUNTRY_CODE": 3, + "PHONE_NUMBER": 4, + "MESSAGE_EXTENSION_TEXT": 5, + "MESSAGE_TEXT": 6, +} + +func (x MessagePlaceholderFieldEnum_MessagePlaceholderField) String() string { + return proto.EnumName(MessagePlaceholderFieldEnum_MessagePlaceholderField_name, int32(x)) +} +func (MessagePlaceholderFieldEnum_MessagePlaceholderField) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_message_placeholder_field_4faf76ed816f88ca, []int{0, 0} +} + +// Values for Message placeholder fields. +type MessagePlaceholderFieldEnum struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *MessagePlaceholderFieldEnum) Reset() { *m = MessagePlaceholderFieldEnum{} } +func (m *MessagePlaceholderFieldEnum) String() string { return proto.CompactTextString(m) } +func (*MessagePlaceholderFieldEnum) ProtoMessage() {} +func (*MessagePlaceholderFieldEnum) Descriptor() ([]byte, []int) { + return fileDescriptor_message_placeholder_field_4faf76ed816f88ca, []int{0} +} +func (m *MessagePlaceholderFieldEnum) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_MessagePlaceholderFieldEnum.Unmarshal(m, b) +} +func (m *MessagePlaceholderFieldEnum) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_MessagePlaceholderFieldEnum.Marshal(b, m, deterministic) +} +func (dst *MessagePlaceholderFieldEnum) XXX_Merge(src proto.Message) { + xxx_messageInfo_MessagePlaceholderFieldEnum.Merge(dst, src) +} +func (m *MessagePlaceholderFieldEnum) XXX_Size() int { + return xxx_messageInfo_MessagePlaceholderFieldEnum.Size(m) +} +func (m *MessagePlaceholderFieldEnum) XXX_DiscardUnknown() { + xxx_messageInfo_MessagePlaceholderFieldEnum.DiscardUnknown(m) +} + +var xxx_messageInfo_MessagePlaceholderFieldEnum proto.InternalMessageInfo + +func init() { + proto.RegisterType((*MessagePlaceholderFieldEnum)(nil), "google.ads.googleads.v1.enums.MessagePlaceholderFieldEnum") + proto.RegisterEnum("google.ads.googleads.v1.enums.MessagePlaceholderFieldEnum_MessagePlaceholderField", MessagePlaceholderFieldEnum_MessagePlaceholderField_name, MessagePlaceholderFieldEnum_MessagePlaceholderField_value) +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/enums/message_placeholder_field.proto", fileDescriptor_message_placeholder_field_4faf76ed816f88ca) +} + +var fileDescriptor_message_placeholder_field_4faf76ed816f88ca = []byte{ + // 370 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x7c, 0x51, 0x4f, 0x4e, 0xb3, 0x40, + 0x1c, 0xfd, 0xa0, 0x9f, 0x35, 0x99, 0x6a, 0x44, 0x16, 0x9a, 0xd4, 0x76, 0xd1, 0x1e, 0x60, 0x08, + 0x71, 0x37, 0xc6, 0x05, 0xb4, 0xd3, 0x4a, 0x0c, 0x03, 0x29, 0x50, 0xab, 0x21, 0x21, 0x58, 0x46, + 0x24, 0x01, 0x86, 0x74, 0xda, 0x9e, 0xc6, 0x95, 0x4b, 0xcf, 0xe0, 0x09, 0x3c, 0x8a, 0x3b, 0x6f, + 0x60, 0x00, 0xa9, 0xab, 0xba, 0x99, 0xbc, 0xfc, 0xde, 0x9f, 0x99, 0x79, 0x3f, 0x70, 0x1d, 0x33, + 0x16, 0xa7, 0x54, 0x09, 0x23, 0xae, 0xd4, 0xb0, 0x44, 0x5b, 0x55, 0xa1, 0xf9, 0x26, 0xe3, 0x4a, + 0x46, 0x39, 0x0f, 0x63, 0x1a, 0x14, 0x69, 0xb8, 0xa4, 0xcf, 0x2c, 0x8d, 0xe8, 0x2a, 0x78, 0x4a, + 0x68, 0x1a, 0xc1, 0x62, 0xc5, 0xd6, 0x4c, 0xee, 0xd7, 0x1e, 0x18, 0x46, 0x1c, 0xee, 0xec, 0x70, + 0xab, 0xc2, 0xca, 0xde, 0xed, 0x35, 0xe9, 0x45, 0xa2, 0x84, 0x79, 0xce, 0xd6, 0xe1, 0x3a, 0x61, + 0x39, 0xaf, 0xcd, 0xc3, 0x77, 0x01, 0x5c, 0x98, 0xf5, 0x05, 0xf6, 0x6f, 0xfe, 0xa4, 0x8c, 0xc7, + 0xf9, 0x26, 0x1b, 0xbe, 0x08, 0xe0, 0x7c, 0x0f, 0x2f, 0x9f, 0x80, 0x8e, 0x47, 0x1c, 0x1b, 0x8f, + 0x8c, 0x89, 0x81, 0xc7, 0xd2, 0x3f, 0xb9, 0x03, 0x0e, 0x3d, 0x72, 0x4b, 0xac, 0x3b, 0x22, 0x09, + 0xf2, 0x29, 0x38, 0xd6, 0x3d, 0xc7, 0x20, 0xd8, 0x71, 0x02, 0xa2, 0x99, 0x58, 0x12, 0x65, 0x09, + 0x1c, 0x8d, 0x2c, 0x8f, 0xb8, 0xb3, 0xfb, 0x60, 0x64, 0x8d, 0xb1, 0xd4, 0x2a, 0x27, 0xf6, 0x8d, + 0x45, 0x70, 0x40, 0x3c, 0x53, 0xc7, 0x33, 0xe9, 0xbf, 0xdc, 0x05, 0x67, 0x26, 0x76, 0x1c, 0x6d, + 0x8a, 0x03, 0xbc, 0x70, 0x31, 0x71, 0x0c, 0x8b, 0x04, 0x2e, 0x5e, 0xb8, 0xd2, 0x41, 0xa9, 0x6e, + 0xb8, 0x6a, 0xd2, 0xd6, 0xbf, 0x04, 0x30, 0x58, 0xb2, 0x0c, 0xfe, 0x59, 0x81, 0xde, 0xdb, 0xf3, + 0x03, 0xbb, 0xac, 0xc0, 0x16, 0x1e, 0xf4, 0x1f, 0x7b, 0xcc, 0xd2, 0x30, 0x8f, 0x21, 0x5b, 0xc5, + 0x4a, 0x4c, 0xf3, 0xaa, 0xa0, 0x66, 0x21, 0x45, 0xc2, 0xf7, 0xec, 0xe7, 0xaa, 0x3a, 0x5f, 0xc5, + 0xd6, 0x54, 0xd3, 0xde, 0xc4, 0xfe, 0xb4, 0x8e, 0xd2, 0x22, 0x0e, 0x6b, 0x58, 0xa2, 0xb9, 0x0a, + 0xcb, 0x36, 0xf9, 0x47, 0xc3, 0xfb, 0x5a, 0xc4, 0xfd, 0x1d, 0xef, 0xcf, 0x55, 0xbf, 0xe2, 0x3f, + 0xc5, 0x41, 0x3d, 0x44, 0x48, 0x8b, 0x38, 0x42, 0x3b, 0x05, 0x42, 0x73, 0x15, 0xa1, 0x4a, 0xf3, + 0xd8, 0xae, 0x1e, 0x76, 0xf9, 0x1d, 0x00, 0x00, 0xff, 0xff, 0xea, 0xa9, 0x43, 0xed, 0x37, 0x02, + 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/mime_type.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/mime_type.pb.go new file mode 100644 index 0000000..4b3b0cd --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/mime_type.pb.go @@ -0,0 +1,165 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/enums/mime_type.proto + +package enums // import "google.golang.org/genproto/googleapis/ads/googleads/v1/enums" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// The mime type +type MimeTypeEnum_MimeType int32 + +const ( + // The mime type has not been specified. + MimeTypeEnum_UNSPECIFIED MimeTypeEnum_MimeType = 0 + // The received value is not known in this version. + // + // This is a response-only value. + MimeTypeEnum_UNKNOWN MimeTypeEnum_MimeType = 1 + // MIME type of image/jpeg. + MimeTypeEnum_IMAGE_JPEG MimeTypeEnum_MimeType = 2 + // MIME type of image/gif. + MimeTypeEnum_IMAGE_GIF MimeTypeEnum_MimeType = 3 + // MIME type of image/png. + MimeTypeEnum_IMAGE_PNG MimeTypeEnum_MimeType = 4 + // MIME type of application/x-shockwave-flash. + MimeTypeEnum_FLASH MimeTypeEnum_MimeType = 5 + // MIME type of text/html. + MimeTypeEnum_TEXT_HTML MimeTypeEnum_MimeType = 6 + // MIME type of application/pdf. + MimeTypeEnum_PDF MimeTypeEnum_MimeType = 7 + // MIME type of application/msword. + MimeTypeEnum_MSWORD MimeTypeEnum_MimeType = 8 + // MIME type of application/vnd.ms-excel. + MimeTypeEnum_MSEXCEL MimeTypeEnum_MimeType = 9 + // MIME type of application/rtf. + MimeTypeEnum_RTF MimeTypeEnum_MimeType = 10 + // MIME type of audio/wav. + MimeTypeEnum_AUDIO_WAV MimeTypeEnum_MimeType = 11 + // MIME type of audio/mp3. + MimeTypeEnum_AUDIO_MP3 MimeTypeEnum_MimeType = 12 + // MIME type of application/x-html5-ad-zip. + MimeTypeEnum_HTML5_AD_ZIP MimeTypeEnum_MimeType = 13 +) + +var MimeTypeEnum_MimeType_name = map[int32]string{ + 0: "UNSPECIFIED", + 1: "UNKNOWN", + 2: "IMAGE_JPEG", + 3: "IMAGE_GIF", + 4: "IMAGE_PNG", + 5: "FLASH", + 6: "TEXT_HTML", + 7: "PDF", + 8: "MSWORD", + 9: "MSEXCEL", + 10: "RTF", + 11: "AUDIO_WAV", + 12: "AUDIO_MP3", + 13: "HTML5_AD_ZIP", +} +var MimeTypeEnum_MimeType_value = map[string]int32{ + "UNSPECIFIED": 0, + "UNKNOWN": 1, + "IMAGE_JPEG": 2, + "IMAGE_GIF": 3, + "IMAGE_PNG": 4, + "FLASH": 5, + "TEXT_HTML": 6, + "PDF": 7, + "MSWORD": 8, + "MSEXCEL": 9, + "RTF": 10, + "AUDIO_WAV": 11, + "AUDIO_MP3": 12, + "HTML5_AD_ZIP": 13, +} + +func (x MimeTypeEnum_MimeType) String() string { + return proto.EnumName(MimeTypeEnum_MimeType_name, int32(x)) +} +func (MimeTypeEnum_MimeType) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_mime_type_e0cd9d9a97a10483, []int{0, 0} +} + +// Container for enum describing the mime types. +type MimeTypeEnum struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *MimeTypeEnum) Reset() { *m = MimeTypeEnum{} } +func (m *MimeTypeEnum) String() string { return proto.CompactTextString(m) } +func (*MimeTypeEnum) ProtoMessage() {} +func (*MimeTypeEnum) Descriptor() ([]byte, []int) { + return fileDescriptor_mime_type_e0cd9d9a97a10483, []int{0} +} +func (m *MimeTypeEnum) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_MimeTypeEnum.Unmarshal(m, b) +} +func (m *MimeTypeEnum) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_MimeTypeEnum.Marshal(b, m, deterministic) +} +func (dst *MimeTypeEnum) XXX_Merge(src proto.Message) { + xxx_messageInfo_MimeTypeEnum.Merge(dst, src) +} +func (m *MimeTypeEnum) XXX_Size() int { + return xxx_messageInfo_MimeTypeEnum.Size(m) +} +func (m *MimeTypeEnum) XXX_DiscardUnknown() { + xxx_messageInfo_MimeTypeEnum.DiscardUnknown(m) +} + +var xxx_messageInfo_MimeTypeEnum proto.InternalMessageInfo + +func init() { + proto.RegisterType((*MimeTypeEnum)(nil), "google.ads.googleads.v1.enums.MimeTypeEnum") + proto.RegisterEnum("google.ads.googleads.v1.enums.MimeTypeEnum_MimeType", MimeTypeEnum_MimeType_name, MimeTypeEnum_MimeType_value) +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/enums/mime_type.proto", fileDescriptor_mime_type_e0cd9d9a97a10483) +} + +var fileDescriptor_mime_type_e0cd9d9a97a10483 = []byte{ + // 396 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x7c, 0x91, 0xc1, 0x6e, 0x9b, 0x40, + 0x10, 0x86, 0x0b, 0x6e, 0xec, 0x78, 0x6d, 0xb7, 0xab, 0x3d, 0x56, 0xcd, 0x21, 0xb9, 0x77, 0x11, + 0x8a, 0x7a, 0xd9, 0x9e, 0xd6, 0x61, 0x21, 0xb4, 0x06, 0xaf, 0x02, 0xc6, 0x51, 0x84, 0x84, 0x68, + 0x41, 0x08, 0x29, 0xb0, 0xc8, 0x8b, 0x2d, 0xf9, 0x75, 0x7a, 0xec, 0xa3, 0xb4, 0xaf, 0xd0, 0x53, + 0x8f, 0x7d, 0x8a, 0x6a, 0xa1, 0xd8, 0xa7, 0xf6, 0x82, 0xfe, 0x99, 0xff, 0x9b, 0x01, 0xfe, 0x01, + 0xef, 0x0a, 0x21, 0x8a, 0xe7, 0xdc, 0x48, 0x33, 0x69, 0xf4, 0x52, 0xa9, 0x83, 0x69, 0xe4, 0xf5, + 0xbe, 0x92, 0x46, 0x55, 0x56, 0x79, 0xd2, 0x1e, 0x9b, 0x1c, 0x37, 0x3b, 0xd1, 0x0a, 0x74, 0xd5, + 0x33, 0x38, 0xcd, 0x24, 0x3e, 0xe1, 0xf8, 0x60, 0xe2, 0x0e, 0x7f, 0xf3, 0x76, 0xd8, 0xd6, 0x94, + 0x46, 0x5a, 0xd7, 0xa2, 0x4d, 0xdb, 0x52, 0xd4, 0xb2, 0x1f, 0xbe, 0xf9, 0xa9, 0x81, 0xb9, 0x57, + 0x56, 0x79, 0x78, 0x6c, 0x72, 0x56, 0xef, 0xab, 0x9b, 0x1f, 0x1a, 0xb8, 0x1c, 0x1a, 0xe8, 0x35, + 0x98, 0x6d, 0xfc, 0x80, 0xb3, 0x3b, 0xd7, 0x76, 0x99, 0x05, 0x5f, 0xa0, 0x19, 0x98, 0x6c, 0xfc, + 0x4f, 0xfe, 0x7a, 0xeb, 0x43, 0x0d, 0xbd, 0x02, 0xc0, 0xf5, 0xa8, 0xc3, 0x92, 0x8f, 0x9c, 0x39, + 0x50, 0x47, 0x0b, 0x30, 0xed, 0x6b, 0xc7, 0xb5, 0xe1, 0xe8, 0x5c, 0x72, 0xdf, 0x81, 0x2f, 0xd1, + 0x14, 0x5c, 0xd8, 0x2b, 0x1a, 0xdc, 0xc3, 0x0b, 0xe5, 0x84, 0xec, 0x31, 0x4c, 0xee, 0x43, 0x6f, + 0x05, 0xc7, 0x68, 0x02, 0x46, 0xdc, 0xb2, 0xe1, 0x04, 0x01, 0x30, 0xf6, 0x82, 0xed, 0xfa, 0xc1, + 0x82, 0x97, 0xea, 0x4d, 0x5e, 0xc0, 0x1e, 0xef, 0xd8, 0x0a, 0x4e, 0x15, 0xf1, 0x10, 0xda, 0x10, + 0xa8, 0x49, 0xba, 0xb1, 0xdc, 0x75, 0xb2, 0xa5, 0x11, 0x9c, 0x9d, 0x4b, 0x8f, 0xdf, 0xc2, 0x39, + 0x82, 0x60, 0xae, 0x56, 0xbe, 0x4f, 0xa8, 0x95, 0x3c, 0xb9, 0x1c, 0x2e, 0x96, 0xbf, 0x34, 0x70, + 0xfd, 0x45, 0x54, 0xf8, 0xbf, 0x11, 0x2d, 0x17, 0xc3, 0x0f, 0x73, 0x95, 0x09, 0xd7, 0x9e, 0x96, + 0x7f, 0xf9, 0x42, 0x3c, 0xa7, 0x75, 0x81, 0xc5, 0xae, 0x30, 0x8a, 0xbc, 0xee, 0x12, 0x1b, 0x2e, + 0xd2, 0x94, 0xf2, 0x1f, 0x07, 0xfa, 0xd0, 0x3d, 0xbf, 0xea, 0x23, 0x87, 0xd2, 0x6f, 0xfa, 0x95, + 0xd3, 0xaf, 0xa2, 0x99, 0xc4, 0xbd, 0x54, 0x2a, 0x32, 0xb1, 0x4a, 0x5b, 0x7e, 0x1f, 0xfc, 0x98, + 0x66, 0x32, 0x3e, 0xf9, 0x71, 0x64, 0xc6, 0x9d, 0xff, 0x5b, 0xbf, 0xee, 0x9b, 0x84, 0xd0, 0x4c, + 0x12, 0x72, 0x22, 0x08, 0x89, 0x4c, 0x42, 0x3a, 0xe6, 0xf3, 0xb8, 0xfb, 0xb0, 0xdb, 0x3f, 0x01, + 0x00, 0x00, 0xff, 0xff, 0xc4, 0xe8, 0xbe, 0x08, 0x38, 0x02, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/minute_of_hour.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/minute_of_hour.pb.go new file mode 100644 index 0000000..f0b5412 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/minute_of_hour.pb.go @@ -0,0 +1,127 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/enums/minute_of_hour.proto + +package enums // import "google.golang.org/genproto/googleapis/ads/googleads/v1/enums" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Enumerates of quarter-hours. E.g. "FIFTEEN" +type MinuteOfHourEnum_MinuteOfHour int32 + +const ( + // Not specified. + MinuteOfHourEnum_UNSPECIFIED MinuteOfHourEnum_MinuteOfHour = 0 + // The value is unknown in this version. + MinuteOfHourEnum_UNKNOWN MinuteOfHourEnum_MinuteOfHour = 1 + // Zero minutes past the hour. + MinuteOfHourEnum_ZERO MinuteOfHourEnum_MinuteOfHour = 2 + // Fifteen minutes past the hour. + MinuteOfHourEnum_FIFTEEN MinuteOfHourEnum_MinuteOfHour = 3 + // Thirty minutes past the hour. + MinuteOfHourEnum_THIRTY MinuteOfHourEnum_MinuteOfHour = 4 + // Forty-five minutes past the hour. + MinuteOfHourEnum_FORTY_FIVE MinuteOfHourEnum_MinuteOfHour = 5 +) + +var MinuteOfHourEnum_MinuteOfHour_name = map[int32]string{ + 0: "UNSPECIFIED", + 1: "UNKNOWN", + 2: "ZERO", + 3: "FIFTEEN", + 4: "THIRTY", + 5: "FORTY_FIVE", +} +var MinuteOfHourEnum_MinuteOfHour_value = map[string]int32{ + "UNSPECIFIED": 0, + "UNKNOWN": 1, + "ZERO": 2, + "FIFTEEN": 3, + "THIRTY": 4, + "FORTY_FIVE": 5, +} + +func (x MinuteOfHourEnum_MinuteOfHour) String() string { + return proto.EnumName(MinuteOfHourEnum_MinuteOfHour_name, int32(x)) +} +func (MinuteOfHourEnum_MinuteOfHour) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_minute_of_hour_4dcf36a7c1549c65, []int{0, 0} +} + +// Container for enumeration of quarter-hours. +type MinuteOfHourEnum struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *MinuteOfHourEnum) Reset() { *m = MinuteOfHourEnum{} } +func (m *MinuteOfHourEnum) String() string { return proto.CompactTextString(m) } +func (*MinuteOfHourEnum) ProtoMessage() {} +func (*MinuteOfHourEnum) Descriptor() ([]byte, []int) { + return fileDescriptor_minute_of_hour_4dcf36a7c1549c65, []int{0} +} +func (m *MinuteOfHourEnum) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_MinuteOfHourEnum.Unmarshal(m, b) +} +func (m *MinuteOfHourEnum) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_MinuteOfHourEnum.Marshal(b, m, deterministic) +} +func (dst *MinuteOfHourEnum) XXX_Merge(src proto.Message) { + xxx_messageInfo_MinuteOfHourEnum.Merge(dst, src) +} +func (m *MinuteOfHourEnum) XXX_Size() int { + return xxx_messageInfo_MinuteOfHourEnum.Size(m) +} +func (m *MinuteOfHourEnum) XXX_DiscardUnknown() { + xxx_messageInfo_MinuteOfHourEnum.DiscardUnknown(m) +} + +var xxx_messageInfo_MinuteOfHourEnum proto.InternalMessageInfo + +func init() { + proto.RegisterType((*MinuteOfHourEnum)(nil), "google.ads.googleads.v1.enums.MinuteOfHourEnum") + proto.RegisterEnum("google.ads.googleads.v1.enums.MinuteOfHourEnum_MinuteOfHour", MinuteOfHourEnum_MinuteOfHour_name, MinuteOfHourEnum_MinuteOfHour_value) +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/enums/minute_of_hour.proto", fileDescriptor_minute_of_hour_4dcf36a7c1549c65) +} + +var fileDescriptor_minute_of_hour_4dcf36a7c1549c65 = []byte{ + // 323 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x7c, 0x50, 0x4f, 0x4e, 0xf2, 0x40, + 0x1c, 0xfd, 0x28, 0x7c, 0x68, 0x06, 0xa3, 0x63, 0x97, 0x46, 0x16, 0x70, 0x80, 0x69, 0xaa, 0xbb, + 0x71, 0x55, 0x74, 0x0a, 0x8d, 0xb1, 0x25, 0x58, 0x6a, 0x20, 0x4d, 0x9a, 0x6a, 0x4b, 0x6d, 0x42, + 0xe7, 0x47, 0x3a, 0x2d, 0x07, 0x72, 0xe9, 0x51, 0x3c, 0x88, 0x0b, 0x4f, 0x61, 0x3a, 0x23, 0x84, + 0x8d, 0x6e, 0x26, 0x2f, 0xbf, 0xf7, 0x27, 0x6f, 0x1e, 0xba, 0xca, 0x00, 0xb2, 0x75, 0x6a, 0xc4, + 0x89, 0x30, 0x14, 0x6c, 0xd0, 0xd6, 0x34, 0x52, 0x5e, 0x17, 0xc2, 0x28, 0x72, 0x5e, 0x57, 0x69, + 0x04, 0xab, 0xe8, 0x15, 0xea, 0x92, 0x6c, 0x4a, 0xa8, 0x40, 0xef, 0x2b, 0x21, 0x89, 0x13, 0x41, + 0xf6, 0x1e, 0xb2, 0x35, 0x89, 0xf4, 0x5c, 0x5c, 0xee, 0x22, 0x37, 0xb9, 0x11, 0x73, 0x0e, 0x55, + 0x5c, 0xe5, 0xc0, 0x85, 0x32, 0x0f, 0x05, 0xc2, 0x0f, 0x32, 0xd4, 0x5b, 0x4d, 0xa0, 0x2e, 0x19, + 0xaf, 0x8b, 0x61, 0x84, 0x4e, 0x0e, 0x6f, 0xfa, 0x19, 0xea, 0xcd, 0xdd, 0xc7, 0x29, 0xbb, 0x75, + 0x6c, 0x87, 0xdd, 0xe1, 0x7f, 0x7a, 0x0f, 0x1d, 0xcd, 0xdd, 0x7b, 0xd7, 0x7b, 0x72, 0x71, 0x4b, + 0x3f, 0x46, 0x9d, 0x25, 0x9b, 0x79, 0x58, 0x6b, 0xce, 0xb6, 0x63, 0xfb, 0x8c, 0xb9, 0xb8, 0xad, + 0x23, 0xd4, 0xf5, 0x27, 0xce, 0xcc, 0x5f, 0xe0, 0x8e, 0x7e, 0x8a, 0x90, 0xed, 0xcd, 0xfc, 0x45, + 0x64, 0x3b, 0x01, 0xc3, 0xff, 0x47, 0x9f, 0x2d, 0x34, 0x78, 0x81, 0x82, 0xfc, 0x59, 0x7c, 0x74, + 0x7e, 0x58, 0x62, 0xda, 0xb4, 0x9d, 0xb6, 0x96, 0xa3, 0x1f, 0x4f, 0x06, 0xeb, 0x98, 0x67, 0x04, + 0xca, 0xcc, 0xc8, 0x52, 0x2e, 0xff, 0xb2, 0x1b, 0x6c, 0x93, 0x8b, 0x5f, 0xf6, 0xbb, 0x91, 0xef, + 0x9b, 0xd6, 0x1e, 0x5b, 0xd6, 0xbb, 0xd6, 0x1f, 0xab, 0x28, 0x2b, 0x11, 0x44, 0xc1, 0x06, 0x05, + 0x26, 0x69, 0x46, 0x10, 0x1f, 0x3b, 0x3e, 0xb4, 0x12, 0x11, 0xee, 0xf9, 0x30, 0x30, 0x43, 0xc9, + 0x7f, 0x69, 0x03, 0x75, 0xa4, 0xd4, 0x4a, 0x04, 0xa5, 0x7b, 0x05, 0xa5, 0x81, 0x49, 0xa9, 0xd4, + 0x3c, 0x77, 0x65, 0xb1, 0xeb, 0xef, 0x00, 0x00, 0x00, 0xff, 0xff, 0x63, 0xe0, 0x63, 0x00, 0xd7, + 0x01, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/mobile_device_type.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/mobile_device_type.pb.go new file mode 100644 index 0000000..5d10cd8 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/mobile_device_type.pb.go @@ -0,0 +1,118 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/enums/mobile_device_type.proto + +package enums // import "google.golang.org/genproto/googleapis/ads/googleads/v1/enums" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// The type of mobile device. +type MobileDeviceTypeEnum_MobileDeviceType int32 + +const ( + // Not specified. + MobileDeviceTypeEnum_UNSPECIFIED MobileDeviceTypeEnum_MobileDeviceType = 0 + // Used for return value only. Represents value unknown in this version. + MobileDeviceTypeEnum_UNKNOWN MobileDeviceTypeEnum_MobileDeviceType = 1 + // Mobile phones. + MobileDeviceTypeEnum_MOBILE MobileDeviceTypeEnum_MobileDeviceType = 2 + // Tablets. + MobileDeviceTypeEnum_TABLET MobileDeviceTypeEnum_MobileDeviceType = 3 +) + +var MobileDeviceTypeEnum_MobileDeviceType_name = map[int32]string{ + 0: "UNSPECIFIED", + 1: "UNKNOWN", + 2: "MOBILE", + 3: "TABLET", +} +var MobileDeviceTypeEnum_MobileDeviceType_value = map[string]int32{ + "UNSPECIFIED": 0, + "UNKNOWN": 1, + "MOBILE": 2, + "TABLET": 3, +} + +func (x MobileDeviceTypeEnum_MobileDeviceType) String() string { + return proto.EnumName(MobileDeviceTypeEnum_MobileDeviceType_name, int32(x)) +} +func (MobileDeviceTypeEnum_MobileDeviceType) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_mobile_device_type_36c69dbb38a4b590, []int{0, 0} +} + +// Container for enum describing the types of mobile device. +type MobileDeviceTypeEnum struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *MobileDeviceTypeEnum) Reset() { *m = MobileDeviceTypeEnum{} } +func (m *MobileDeviceTypeEnum) String() string { return proto.CompactTextString(m) } +func (*MobileDeviceTypeEnum) ProtoMessage() {} +func (*MobileDeviceTypeEnum) Descriptor() ([]byte, []int) { + return fileDescriptor_mobile_device_type_36c69dbb38a4b590, []int{0} +} +func (m *MobileDeviceTypeEnum) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_MobileDeviceTypeEnum.Unmarshal(m, b) +} +func (m *MobileDeviceTypeEnum) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_MobileDeviceTypeEnum.Marshal(b, m, deterministic) +} +func (dst *MobileDeviceTypeEnum) XXX_Merge(src proto.Message) { + xxx_messageInfo_MobileDeviceTypeEnum.Merge(dst, src) +} +func (m *MobileDeviceTypeEnum) XXX_Size() int { + return xxx_messageInfo_MobileDeviceTypeEnum.Size(m) +} +func (m *MobileDeviceTypeEnum) XXX_DiscardUnknown() { + xxx_messageInfo_MobileDeviceTypeEnum.DiscardUnknown(m) +} + +var xxx_messageInfo_MobileDeviceTypeEnum proto.InternalMessageInfo + +func init() { + proto.RegisterType((*MobileDeviceTypeEnum)(nil), "google.ads.googleads.v1.enums.MobileDeviceTypeEnum") + proto.RegisterEnum("google.ads.googleads.v1.enums.MobileDeviceTypeEnum_MobileDeviceType", MobileDeviceTypeEnum_MobileDeviceType_name, MobileDeviceTypeEnum_MobileDeviceType_value) +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/enums/mobile_device_type.proto", fileDescriptor_mobile_device_type_36c69dbb38a4b590) +} + +var fileDescriptor_mobile_device_type_36c69dbb38a4b590 = []byte{ + // 306 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x7c, 0x50, 0x4f, 0x4b, 0xfb, 0x30, + 0x18, 0xfe, 0xad, 0x83, 0xfd, 0x20, 0x3b, 0x58, 0x8a, 0x5e, 0xc4, 0x1d, 0xb6, 0x0f, 0x90, 0x50, + 0x04, 0x0f, 0xf1, 0x94, 0xba, 0x3a, 0x87, 0x5b, 0x57, 0xb0, 0xab, 0x20, 0x85, 0xd9, 0x2d, 0x21, + 0x14, 0xda, 0xa4, 0x2c, 0x5d, 0x61, 0x5f, 0xc7, 0xa3, 0x1f, 0xc5, 0x8f, 0xb2, 0x4f, 0x21, 0x4d, + 0x6c, 0x0f, 0x03, 0xbd, 0x84, 0x87, 0xf7, 0xf9, 0x93, 0xe7, 0x7d, 0xc1, 0x1d, 0x97, 0x92, 0xe7, + 0x0c, 0xa5, 0x54, 0x21, 0x03, 0x1b, 0x54, 0xbb, 0x88, 0x89, 0x43, 0xa1, 0x50, 0x21, 0xb7, 0x59, + 0xce, 0x36, 0x94, 0xd5, 0xd9, 0x8e, 0x6d, 0xaa, 0x63, 0xc9, 0x60, 0xb9, 0x97, 0x95, 0x74, 0x46, + 0x46, 0x0c, 0x53, 0xaa, 0x60, 0xe7, 0x83, 0xb5, 0x0b, 0xb5, 0xef, 0xfa, 0xa6, 0x8d, 0x2d, 0x33, + 0x94, 0x0a, 0x21, 0xab, 0xb4, 0xca, 0xa4, 0x50, 0xc6, 0x3c, 0x79, 0x07, 0x97, 0x4b, 0x1d, 0x3c, + 0xd5, 0xb9, 0xd1, 0xb1, 0x64, 0xbe, 0x38, 0x14, 0x93, 0x27, 0x60, 0x9f, 0xcf, 0x9d, 0x0b, 0x30, + 0x5c, 0x07, 0x2f, 0xa1, 0xff, 0x30, 0x7f, 0x9c, 0xfb, 0x53, 0xfb, 0x9f, 0x33, 0x04, 0xff, 0xd7, + 0xc1, 0x73, 0xb0, 0x7a, 0x0d, 0xec, 0x9e, 0x03, 0xc0, 0x60, 0xb9, 0xf2, 0xe6, 0x0b, 0xdf, 0xb6, + 0x1a, 0x1c, 0x11, 0x6f, 0xe1, 0x47, 0x76, 0xdf, 0x3b, 0xf5, 0xc0, 0x78, 0x27, 0x0b, 0xf8, 0x67, + 0x4b, 0xef, 0xea, 0xfc, 0xb7, 0xb0, 0xa9, 0x17, 0xf6, 0xde, 0xbc, 0x1f, 0x1f, 0x97, 0x79, 0x2a, + 0x38, 0x94, 0x7b, 0x8e, 0x38, 0x13, 0xba, 0x7c, 0x7b, 0xa5, 0x32, 0x53, 0xbf, 0x1c, 0xed, 0x5e, + 0xbf, 0x1f, 0x56, 0x7f, 0x46, 0xc8, 0xa7, 0x35, 0x9a, 0x99, 0x28, 0x42, 0x15, 0x34, 0xb0, 0x41, + 0xb1, 0x0b, 0x9b, 0x8d, 0xd5, 0x57, 0xcb, 0x27, 0x84, 0xaa, 0xa4, 0xe3, 0x93, 0xd8, 0x4d, 0x34, + 0x7f, 0xb2, 0xc6, 0x66, 0x88, 0x31, 0xa1, 0x0a, 0xe3, 0x4e, 0x81, 0x71, 0xec, 0x62, 0xac, 0x35, + 0xdb, 0x81, 0x2e, 0x76, 0xfb, 0x1d, 0x00, 0x00, 0xff, 0xff, 0x12, 0x2c, 0x1d, 0x2c, 0xcc, 0x01, + 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/month_of_year.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/month_of_year.pb.go new file mode 100644 index 0000000..34fdc0b --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/month_of_year.pb.go @@ -0,0 +1,163 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/enums/month_of_year.proto + +package enums // import "google.golang.org/genproto/googleapis/ads/googleads/v1/enums" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Enumerates months of the year, e.g., "January". +type MonthOfYearEnum_MonthOfYear int32 + +const ( + // Not specified. + MonthOfYearEnum_UNSPECIFIED MonthOfYearEnum_MonthOfYear = 0 + // The value is unknown in this version. + MonthOfYearEnum_UNKNOWN MonthOfYearEnum_MonthOfYear = 1 + // January. + MonthOfYearEnum_JANUARY MonthOfYearEnum_MonthOfYear = 2 + // February. + MonthOfYearEnum_FEBRUARY MonthOfYearEnum_MonthOfYear = 3 + // March. + MonthOfYearEnum_MARCH MonthOfYearEnum_MonthOfYear = 4 + // April. + MonthOfYearEnum_APRIL MonthOfYearEnum_MonthOfYear = 5 + // May. + MonthOfYearEnum_MAY MonthOfYearEnum_MonthOfYear = 6 + // June. + MonthOfYearEnum_JUNE MonthOfYearEnum_MonthOfYear = 7 + // July. + MonthOfYearEnum_JULY MonthOfYearEnum_MonthOfYear = 8 + // August. + MonthOfYearEnum_AUGUST MonthOfYearEnum_MonthOfYear = 9 + // September. + MonthOfYearEnum_SEPTEMBER MonthOfYearEnum_MonthOfYear = 10 + // October. + MonthOfYearEnum_OCTOBER MonthOfYearEnum_MonthOfYear = 11 + // November. + MonthOfYearEnum_NOVEMBER MonthOfYearEnum_MonthOfYear = 12 + // December. + MonthOfYearEnum_DECEMBER MonthOfYearEnum_MonthOfYear = 13 +) + +var MonthOfYearEnum_MonthOfYear_name = map[int32]string{ + 0: "UNSPECIFIED", + 1: "UNKNOWN", + 2: "JANUARY", + 3: "FEBRUARY", + 4: "MARCH", + 5: "APRIL", + 6: "MAY", + 7: "JUNE", + 8: "JULY", + 9: "AUGUST", + 10: "SEPTEMBER", + 11: "OCTOBER", + 12: "NOVEMBER", + 13: "DECEMBER", +} +var MonthOfYearEnum_MonthOfYear_value = map[string]int32{ + "UNSPECIFIED": 0, + "UNKNOWN": 1, + "JANUARY": 2, + "FEBRUARY": 3, + "MARCH": 4, + "APRIL": 5, + "MAY": 6, + "JUNE": 7, + "JULY": 8, + "AUGUST": 9, + "SEPTEMBER": 10, + "OCTOBER": 11, + "NOVEMBER": 12, + "DECEMBER": 13, +} + +func (x MonthOfYearEnum_MonthOfYear) String() string { + return proto.EnumName(MonthOfYearEnum_MonthOfYear_name, int32(x)) +} +func (MonthOfYearEnum_MonthOfYear) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_month_of_year_833b1270d63ddf92, []int{0, 0} +} + +// Container for enumeration of months of the year, e.g., "January". +type MonthOfYearEnum struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *MonthOfYearEnum) Reset() { *m = MonthOfYearEnum{} } +func (m *MonthOfYearEnum) String() string { return proto.CompactTextString(m) } +func (*MonthOfYearEnum) ProtoMessage() {} +func (*MonthOfYearEnum) Descriptor() ([]byte, []int) { + return fileDescriptor_month_of_year_833b1270d63ddf92, []int{0} +} +func (m *MonthOfYearEnum) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_MonthOfYearEnum.Unmarshal(m, b) +} +func (m *MonthOfYearEnum) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_MonthOfYearEnum.Marshal(b, m, deterministic) +} +func (dst *MonthOfYearEnum) XXX_Merge(src proto.Message) { + xxx_messageInfo_MonthOfYearEnum.Merge(dst, src) +} +func (m *MonthOfYearEnum) XXX_Size() int { + return xxx_messageInfo_MonthOfYearEnum.Size(m) +} +func (m *MonthOfYearEnum) XXX_DiscardUnknown() { + xxx_messageInfo_MonthOfYearEnum.DiscardUnknown(m) +} + +var xxx_messageInfo_MonthOfYearEnum proto.InternalMessageInfo + +func init() { + proto.RegisterType((*MonthOfYearEnum)(nil), "google.ads.googleads.v1.enums.MonthOfYearEnum") + proto.RegisterEnum("google.ads.googleads.v1.enums.MonthOfYearEnum_MonthOfYear", MonthOfYearEnum_MonthOfYear_name, MonthOfYearEnum_MonthOfYear_value) +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/enums/month_of_year.proto", fileDescriptor_month_of_year_833b1270d63ddf92) +} + +var fileDescriptor_month_of_year_833b1270d63ddf92 = []byte{ + // 389 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x7c, 0x51, 0xcb, 0x8e, 0xd3, 0x30, + 0x14, 0x25, 0xe9, 0x4c, 0x1f, 0xce, 0x8c, 0xc6, 0xf2, 0x12, 0x31, 0x8b, 0x99, 0x0f, 0x70, 0x14, + 0xb1, 0x33, 0x2b, 0x27, 0xe3, 0x29, 0x2d, 0xcd, 0x43, 0x69, 0x13, 0x14, 0x14, 0xa9, 0x0a, 0x24, + 0x0d, 0x95, 0x1a, 0xbb, 0x8a, 0xd3, 0x4a, 0xfc, 0x0e, 0x4b, 0xfe, 0x81, 0x1f, 0x60, 0xc7, 0x47, + 0xb0, 0xe1, 0x2b, 0x90, 0x63, 0x5a, 0x75, 0x03, 0x9b, 0xe8, 0x9c, 0x7b, 0xee, 0xb9, 0x37, 0x3e, + 0x17, 0x38, 0xb5, 0x10, 0xf5, 0xae, 0xb2, 0x8b, 0x52, 0xda, 0x1a, 0x2a, 0x74, 0x74, 0xec, 0x8a, + 0x1f, 0x1a, 0x69, 0x37, 0x82, 0x77, 0x9f, 0xd7, 0x62, 0xb3, 0xfe, 0x52, 0x15, 0x2d, 0xde, 0xb7, + 0xa2, 0x13, 0xe8, 0x5e, 0xf7, 0xe1, 0xa2, 0x94, 0xf8, 0x6c, 0xc1, 0x47, 0x07, 0xf7, 0x96, 0x97, + 0xaf, 0x4e, 0x13, 0xf7, 0x5b, 0xbb, 0xe0, 0x5c, 0x74, 0x45, 0xb7, 0x15, 0x5c, 0x6a, 0xf3, 0xe3, + 0x4f, 0x03, 0xdc, 0xf9, 0x6a, 0x68, 0xb8, 0xc9, 0xaa, 0xa2, 0x65, 0xfc, 0xd0, 0x3c, 0x7e, 0x37, + 0x80, 0x75, 0x51, 0x43, 0x77, 0xc0, 0x4a, 0x82, 0x65, 0xc4, 0xbc, 0xd9, 0xf3, 0x8c, 0x3d, 0xc1, + 0x17, 0xc8, 0x02, 0xa3, 0x24, 0x78, 0x17, 0x84, 0xef, 0x03, 0x68, 0x28, 0x32, 0xa7, 0x41, 0x42, + 0xe3, 0x0c, 0x9a, 0xe8, 0x06, 0x8c, 0x9f, 0x99, 0x1b, 0xf7, 0x6c, 0x80, 0x26, 0xe0, 0xda, 0xa7, + 0xb1, 0xf7, 0x16, 0x5e, 0x29, 0x48, 0xa3, 0x78, 0xb6, 0x80, 0xd7, 0x68, 0x04, 0x06, 0x3e, 0xcd, + 0xe0, 0x10, 0x8d, 0xc1, 0xd5, 0x3c, 0x09, 0x18, 0x1c, 0x69, 0xb4, 0xc8, 0xe0, 0x18, 0x01, 0x30, + 0xa4, 0xc9, 0x34, 0x59, 0xae, 0xe0, 0x04, 0xdd, 0x82, 0xc9, 0x92, 0x45, 0x2b, 0xe6, 0xbb, 0x2c, + 0x86, 0x40, 0x2d, 0x0a, 0xbd, 0x55, 0xa8, 0x88, 0xa5, 0x16, 0x05, 0x61, 0xaa, 0xa5, 0x1b, 0xc5, + 0x9e, 0x98, 0xa7, 0xd9, 0xad, 0xfb, 0xcb, 0x00, 0x0f, 0x9f, 0x44, 0x83, 0xff, 0x9b, 0x8b, 0x0b, + 0x2f, 0x9e, 0x18, 0xa9, 0x2c, 0x22, 0xe3, 0x83, 0xfb, 0xd7, 0x52, 0x8b, 0x5d, 0xc1, 0x6b, 0x2c, + 0xda, 0xda, 0xae, 0x2b, 0xde, 0x27, 0x75, 0xba, 0xc6, 0x7e, 0x2b, 0xff, 0x71, 0x9c, 0x37, 0xfd, + 0xf7, 0xab, 0x39, 0x98, 0x52, 0xfa, 0xcd, 0xbc, 0x9f, 0xea, 0x51, 0xb4, 0x94, 0x58, 0x43, 0x85, + 0x52, 0x07, 0xab, 0x88, 0xe5, 0x8f, 0x93, 0x9e, 0xd3, 0x52, 0xe6, 0x67, 0x3d, 0x4f, 0x9d, 0xbc, + 0xd7, 0x7f, 0x9b, 0x0f, 0xba, 0x48, 0x08, 0x2d, 0x25, 0x21, 0xe7, 0x0e, 0x42, 0x52, 0x87, 0x90, + 0xbe, 0xe7, 0xe3, 0xb0, 0xff, 0xb1, 0xd7, 0x7f, 0x02, 0x00, 0x00, 0xff, 0xff, 0x2c, 0xa9, 0x63, + 0xff, 0x34, 0x02, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/mutate_job_status.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/mutate_job_status.pb.go new file mode 100644 index 0000000..7349e83 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/mutate_job_status.pb.go @@ -0,0 +1,122 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/enums/mutate_job_status.proto + +package enums // import "google.golang.org/genproto/googleapis/ads/googleads/v1/enums" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// The mutate job statuses. +type MutateJobStatusEnum_MutateJobStatus int32 + +const ( + // Not specified. + MutateJobStatusEnum_UNSPECIFIED MutateJobStatusEnum_MutateJobStatus = 0 + // Used for return value only. Represents value unknown in this version. + MutateJobStatusEnum_UNKNOWN MutateJobStatusEnum_MutateJobStatus = 1 + // The job is not currently running. + MutateJobStatusEnum_PENDING MutateJobStatusEnum_MutateJobStatus = 2 + // The job is running. + MutateJobStatusEnum_RUNNING MutateJobStatusEnum_MutateJobStatus = 3 + // The job is done. + MutateJobStatusEnum_DONE MutateJobStatusEnum_MutateJobStatus = 4 +) + +var MutateJobStatusEnum_MutateJobStatus_name = map[int32]string{ + 0: "UNSPECIFIED", + 1: "UNKNOWN", + 2: "PENDING", + 3: "RUNNING", + 4: "DONE", +} +var MutateJobStatusEnum_MutateJobStatus_value = map[string]int32{ + "UNSPECIFIED": 0, + "UNKNOWN": 1, + "PENDING": 2, + "RUNNING": 3, + "DONE": 4, +} + +func (x MutateJobStatusEnum_MutateJobStatus) String() string { + return proto.EnumName(MutateJobStatusEnum_MutateJobStatus_name, int32(x)) +} +func (MutateJobStatusEnum_MutateJobStatus) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_mutate_job_status_c1054ab47bf7cadd, []int{0, 0} +} + +// Container for enum describing possible mutate job statuses. +type MutateJobStatusEnum struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *MutateJobStatusEnum) Reset() { *m = MutateJobStatusEnum{} } +func (m *MutateJobStatusEnum) String() string { return proto.CompactTextString(m) } +func (*MutateJobStatusEnum) ProtoMessage() {} +func (*MutateJobStatusEnum) Descriptor() ([]byte, []int) { + return fileDescriptor_mutate_job_status_c1054ab47bf7cadd, []int{0} +} +func (m *MutateJobStatusEnum) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_MutateJobStatusEnum.Unmarshal(m, b) +} +func (m *MutateJobStatusEnum) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_MutateJobStatusEnum.Marshal(b, m, deterministic) +} +func (dst *MutateJobStatusEnum) XXX_Merge(src proto.Message) { + xxx_messageInfo_MutateJobStatusEnum.Merge(dst, src) +} +func (m *MutateJobStatusEnum) XXX_Size() int { + return xxx_messageInfo_MutateJobStatusEnum.Size(m) +} +func (m *MutateJobStatusEnum) XXX_DiscardUnknown() { + xxx_messageInfo_MutateJobStatusEnum.DiscardUnknown(m) +} + +var xxx_messageInfo_MutateJobStatusEnum proto.InternalMessageInfo + +func init() { + proto.RegisterType((*MutateJobStatusEnum)(nil), "google.ads.googleads.v1.enums.MutateJobStatusEnum") + proto.RegisterEnum("google.ads.googleads.v1.enums.MutateJobStatusEnum_MutateJobStatus", MutateJobStatusEnum_MutateJobStatus_name, MutateJobStatusEnum_MutateJobStatus_value) +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/enums/mutate_job_status.proto", fileDescriptor_mutate_job_status_c1054ab47bf7cadd) +} + +var fileDescriptor_mutate_job_status_c1054ab47bf7cadd = []byte{ + // 309 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x7c, 0x50, 0xd1, 0x4a, 0xc3, 0x30, + 0x14, 0x75, 0xdd, 0x50, 0xc9, 0x1e, 0x56, 0xaa, 0x4f, 0xe2, 0x1e, 0xb6, 0x0f, 0x48, 0x28, 0xe2, + 0x4b, 0x7c, 0xea, 0x5c, 0x1d, 0x53, 0xcc, 0x8a, 0xa5, 0x15, 0xa4, 0x30, 0x52, 0x5b, 0x42, 0xc7, + 0x9a, 0x94, 0x25, 0xdd, 0x07, 0xf9, 0xe8, 0xa7, 0xf8, 0x27, 0xfa, 0x15, 0x92, 0x74, 0xed, 0xc3, + 0x40, 0x5f, 0xc2, 0xb9, 0xf7, 0x9e, 0x73, 0x72, 0xee, 0x05, 0xb7, 0x4c, 0x08, 0xb6, 0xcd, 0x11, + 0xcd, 0x24, 0x6a, 0xa0, 0x46, 0x7b, 0x17, 0xe5, 0xbc, 0x2e, 0x25, 0x2a, 0x6b, 0x45, 0x55, 0xbe, + 0xde, 0x88, 0x74, 0x2d, 0x15, 0x55, 0xb5, 0x84, 0xd5, 0x4e, 0x28, 0xe1, 0x8c, 0x1b, 0x2e, 0xa4, + 0x99, 0x84, 0x9d, 0x0c, 0xee, 0x5d, 0x68, 0x64, 0x57, 0xd7, 0xad, 0x6b, 0x55, 0x20, 0xca, 0xb9, + 0x50, 0x54, 0x15, 0x82, 0x1f, 0xc4, 0xd3, 0x0d, 0xb8, 0x78, 0x36, 0xbe, 0x8f, 0x22, 0x0d, 0x8d, + 0xab, 0xcf, 0xeb, 0x72, 0x1a, 0x82, 0xd1, 0x51, 0xdb, 0x19, 0x81, 0x61, 0x44, 0xc2, 0xc0, 0xbf, + 0x5f, 0x3e, 0x2c, 0xfd, 0xb9, 0x7d, 0xe2, 0x0c, 0xc1, 0x59, 0x44, 0x9e, 0xc8, 0xea, 0x95, 0xd8, + 0x3d, 0x5d, 0x04, 0x3e, 0x99, 0x2f, 0xc9, 0xc2, 0xb6, 0x74, 0xf1, 0x12, 0x11, 0xa2, 0x8b, 0xbe, + 0x73, 0x0e, 0x06, 0xf3, 0x15, 0xf1, 0xed, 0xc1, 0xec, 0xbb, 0x07, 0x26, 0xef, 0xa2, 0x84, 0xff, + 0xe6, 0x9d, 0x5d, 0x1e, 0x7d, 0x1c, 0xe8, 0x9c, 0x41, 0xef, 0x6d, 0x76, 0x90, 0x31, 0xb1, 0xa5, + 0x9c, 0x41, 0xb1, 0x63, 0x88, 0xe5, 0xdc, 0x6c, 0xd1, 0x5e, 0xab, 0x2a, 0xe4, 0x1f, 0xc7, 0xbb, + 0x33, 0xef, 0x87, 0xd5, 0x5f, 0x78, 0xde, 0xa7, 0x35, 0x5e, 0x34, 0x56, 0x5e, 0x26, 0x61, 0x03, + 0x35, 0x8a, 0x5d, 0xa8, 0x77, 0x97, 0x5f, 0xed, 0x3c, 0xf1, 0x32, 0x99, 0x74, 0xf3, 0x24, 0x76, + 0x13, 0x33, 0xff, 0xb1, 0x26, 0x4d, 0x13, 0x63, 0x2f, 0x93, 0x18, 0x77, 0x0c, 0x8c, 0x63, 0x17, + 0x63, 0xc3, 0x49, 0x4f, 0x4d, 0xb0, 0x9b, 0xdf, 0x00, 0x00, 0x00, 0xff, 0xff, 0x4a, 0x36, 0x88, + 0xfe, 0xd4, 0x01, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/negative_geo_target_type.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/negative_geo_target_type.pb.go new file mode 100644 index 0000000..7e096f2 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/negative_geo_target_type.pb.go @@ -0,0 +1,121 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/enums/negative_geo_target_type.proto + +package enums // import "google.golang.org/genproto/googleapis/ads/googleads/v1/enums" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// The possible negative geo target types. +type NegativeGeoTargetTypeEnum_NegativeGeoTargetType int32 + +const ( + // Not specified. + NegativeGeoTargetTypeEnum_UNSPECIFIED NegativeGeoTargetTypeEnum_NegativeGeoTargetType = 0 + // The value is unknown in this version. + NegativeGeoTargetTypeEnum_UNKNOWN NegativeGeoTargetTypeEnum_NegativeGeoTargetType = 1 + // Specifies that a user is excluded from seeing the ad if they + // are in, or show interest in, advertiser's excluded locations. + NegativeGeoTargetTypeEnum_DONT_CARE NegativeGeoTargetTypeEnum_NegativeGeoTargetType = 2 + // Specifies that a user is excluded from seeing the ad if they + // are in advertiser's excluded locations. + NegativeGeoTargetTypeEnum_LOCATION_OF_PRESENCE NegativeGeoTargetTypeEnum_NegativeGeoTargetType = 3 +) + +var NegativeGeoTargetTypeEnum_NegativeGeoTargetType_name = map[int32]string{ + 0: "UNSPECIFIED", + 1: "UNKNOWN", + 2: "DONT_CARE", + 3: "LOCATION_OF_PRESENCE", +} +var NegativeGeoTargetTypeEnum_NegativeGeoTargetType_value = map[string]int32{ + "UNSPECIFIED": 0, + "UNKNOWN": 1, + "DONT_CARE": 2, + "LOCATION_OF_PRESENCE": 3, +} + +func (x NegativeGeoTargetTypeEnum_NegativeGeoTargetType) String() string { + return proto.EnumName(NegativeGeoTargetTypeEnum_NegativeGeoTargetType_name, int32(x)) +} +func (NegativeGeoTargetTypeEnum_NegativeGeoTargetType) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_negative_geo_target_type_835f17c79fca4c8e, []int{0, 0} +} + +// Container for enum describing possible negative geo target types. +type NegativeGeoTargetTypeEnum struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *NegativeGeoTargetTypeEnum) Reset() { *m = NegativeGeoTargetTypeEnum{} } +func (m *NegativeGeoTargetTypeEnum) String() string { return proto.CompactTextString(m) } +func (*NegativeGeoTargetTypeEnum) ProtoMessage() {} +func (*NegativeGeoTargetTypeEnum) Descriptor() ([]byte, []int) { + return fileDescriptor_negative_geo_target_type_835f17c79fca4c8e, []int{0} +} +func (m *NegativeGeoTargetTypeEnum) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_NegativeGeoTargetTypeEnum.Unmarshal(m, b) +} +func (m *NegativeGeoTargetTypeEnum) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_NegativeGeoTargetTypeEnum.Marshal(b, m, deterministic) +} +func (dst *NegativeGeoTargetTypeEnum) XXX_Merge(src proto.Message) { + xxx_messageInfo_NegativeGeoTargetTypeEnum.Merge(dst, src) +} +func (m *NegativeGeoTargetTypeEnum) XXX_Size() int { + return xxx_messageInfo_NegativeGeoTargetTypeEnum.Size(m) +} +func (m *NegativeGeoTargetTypeEnum) XXX_DiscardUnknown() { + xxx_messageInfo_NegativeGeoTargetTypeEnum.DiscardUnknown(m) +} + +var xxx_messageInfo_NegativeGeoTargetTypeEnum proto.InternalMessageInfo + +func init() { + proto.RegisterType((*NegativeGeoTargetTypeEnum)(nil), "google.ads.googleads.v1.enums.NegativeGeoTargetTypeEnum") + proto.RegisterEnum("google.ads.googleads.v1.enums.NegativeGeoTargetTypeEnum_NegativeGeoTargetType", NegativeGeoTargetTypeEnum_NegativeGeoTargetType_name, NegativeGeoTargetTypeEnum_NegativeGeoTargetType_value) +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/enums/negative_geo_target_type.proto", fileDescriptor_negative_geo_target_type_835f17c79fca4c8e) +} + +var fileDescriptor_negative_geo_target_type_835f17c79fca4c8e = []byte{ + // 332 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x7c, 0x90, 0xd1, 0x4a, 0xeb, 0x30, + 0x1c, 0xc6, 0xcf, 0x3a, 0x38, 0x87, 0x93, 0x21, 0x96, 0xa2, 0xa0, 0xc3, 0x5d, 0x6c, 0x0f, 0x90, + 0x52, 0xbc, 0x8b, 0xde, 0x64, 0x5d, 0x36, 0x86, 0x92, 0x96, 0xad, 0x9b, 0x20, 0xc5, 0x12, 0x6d, + 0x08, 0x85, 0x2d, 0x29, 0x4b, 0x36, 0x18, 0xbe, 0x8d, 0x97, 0x3e, 0x8a, 0x8f, 0xe2, 0x8d, 0xaf, + 0x20, 0x4d, 0xb7, 0x5d, 0x4d, 0x6f, 0xc2, 0x47, 0xbe, 0xff, 0xef, 0xe3, 0xff, 0xff, 0xc0, 0xad, + 0x50, 0x4a, 0x2c, 0xb8, 0xcf, 0x72, 0xed, 0xd7, 0xb2, 0x52, 0x9b, 0xc0, 0xe7, 0x72, 0xbd, 0xd4, + 0xbe, 0xe4, 0x82, 0x99, 0x62, 0xc3, 0x33, 0xc1, 0x55, 0x66, 0xd8, 0x4a, 0x70, 0x93, 0x99, 0x6d, + 0xc9, 0x61, 0xb9, 0x52, 0x46, 0x79, 0x9d, 0x1a, 0x81, 0x2c, 0xd7, 0xf0, 0x40, 0xc3, 0x4d, 0x00, + 0x2d, 0xdd, 0xbe, 0xda, 0x87, 0x97, 0x85, 0xcf, 0xa4, 0x54, 0x86, 0x99, 0x42, 0x49, 0x5d, 0xc3, + 0xbd, 0x57, 0x70, 0x49, 0x77, 0xf1, 0x23, 0xae, 0x12, 0x1b, 0x9e, 0x6c, 0x4b, 0x4e, 0xe4, 0x7a, + 0xd9, 0x7b, 0x02, 0xe7, 0x47, 0x4d, 0xef, 0x14, 0xb4, 0x66, 0x74, 0x1a, 0x93, 0x70, 0x3c, 0x1c, + 0x93, 0x81, 0xfb, 0xc7, 0x6b, 0x81, 0x7f, 0x33, 0x7a, 0x47, 0xa3, 0x07, 0xea, 0x36, 0xbc, 0x13, + 0xf0, 0x7f, 0x10, 0xd1, 0x24, 0x0b, 0xf1, 0x84, 0xb8, 0x8e, 0x77, 0x01, 0xce, 0xee, 0xa3, 0x10, + 0x27, 0xe3, 0x88, 0x66, 0xd1, 0x30, 0x8b, 0x27, 0x64, 0x4a, 0x68, 0x48, 0xdc, 0x66, 0xff, 0xab, + 0x01, 0xba, 0x2f, 0x6a, 0x09, 0x7f, 0x3d, 0xa0, 0xdf, 0x3e, 0xba, 0x43, 0x5c, 0xad, 0x1f, 0x37, + 0x1e, 0xfb, 0x3b, 0x58, 0xa8, 0x05, 0x93, 0x02, 0xaa, 0x95, 0xf0, 0x05, 0x97, 0xf6, 0xb8, 0x7d, + 0x97, 0x65, 0xa1, 0x7f, 0xa8, 0xf6, 0xc6, 0xbe, 0x6f, 0x4e, 0x73, 0x84, 0xf1, 0xbb, 0xd3, 0x19, + 0xd5, 0x51, 0x38, 0xd7, 0xb0, 0x96, 0x95, 0x9a, 0x07, 0xb0, 0x2a, 0x43, 0x7f, 0xec, 0xfd, 0x14, + 0xe7, 0x3a, 0x3d, 0xf8, 0xe9, 0x3c, 0x48, 0xad, 0xff, 0xe9, 0x74, 0xeb, 0x4f, 0x84, 0x70, 0xae, + 0x11, 0x3a, 0x4c, 0x20, 0x34, 0x0f, 0x10, 0xb2, 0x33, 0xcf, 0x7f, 0xed, 0x62, 0xd7, 0xdf, 0x01, + 0x00, 0x00, 0xff, 0xff, 0xdd, 0xf9, 0x7e, 0x8a, 0xf2, 0x01, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/operating_system_version_operator_type.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/operating_system_version_operator_type.pb.go new file mode 100644 index 0000000..8eb10aa --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/operating_system_version_operator_type.pb.go @@ -0,0 +1,122 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/enums/operating_system_version_operator_type.proto + +package enums // import "google.golang.org/genproto/googleapis/ads/googleads/v1/enums" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// The type of operating system version. +type OperatingSystemVersionOperatorTypeEnum_OperatingSystemVersionOperatorType int32 + +const ( + // Not specified. + OperatingSystemVersionOperatorTypeEnum_UNSPECIFIED OperatingSystemVersionOperatorTypeEnum_OperatingSystemVersionOperatorType = 0 + // Used for return value only. Represents value unknown in this version. + OperatingSystemVersionOperatorTypeEnum_UNKNOWN OperatingSystemVersionOperatorTypeEnum_OperatingSystemVersionOperatorType = 1 + // Equals to the specified version. + OperatingSystemVersionOperatorTypeEnum_EQUALS_TO OperatingSystemVersionOperatorTypeEnum_OperatingSystemVersionOperatorType = 2 + // Greater than or equals to the specified version. + OperatingSystemVersionOperatorTypeEnum_GREATER_THAN_EQUALS_TO OperatingSystemVersionOperatorTypeEnum_OperatingSystemVersionOperatorType = 4 +) + +var OperatingSystemVersionOperatorTypeEnum_OperatingSystemVersionOperatorType_name = map[int32]string{ + 0: "UNSPECIFIED", + 1: "UNKNOWN", + 2: "EQUALS_TO", + 4: "GREATER_THAN_EQUALS_TO", +} +var OperatingSystemVersionOperatorTypeEnum_OperatingSystemVersionOperatorType_value = map[string]int32{ + "UNSPECIFIED": 0, + "UNKNOWN": 1, + "EQUALS_TO": 2, + "GREATER_THAN_EQUALS_TO": 4, +} + +func (x OperatingSystemVersionOperatorTypeEnum_OperatingSystemVersionOperatorType) String() string { + return proto.EnumName(OperatingSystemVersionOperatorTypeEnum_OperatingSystemVersionOperatorType_name, int32(x)) +} +func (OperatingSystemVersionOperatorTypeEnum_OperatingSystemVersionOperatorType) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_operating_system_version_operator_type_03c667b04f322a09, []int{0, 0} +} + +// Container for enum describing the type of OS operators. +type OperatingSystemVersionOperatorTypeEnum struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *OperatingSystemVersionOperatorTypeEnum) Reset() { + *m = OperatingSystemVersionOperatorTypeEnum{} +} +func (m *OperatingSystemVersionOperatorTypeEnum) String() string { return proto.CompactTextString(m) } +func (*OperatingSystemVersionOperatorTypeEnum) ProtoMessage() {} +func (*OperatingSystemVersionOperatorTypeEnum) Descriptor() ([]byte, []int) { + return fileDescriptor_operating_system_version_operator_type_03c667b04f322a09, []int{0} +} +func (m *OperatingSystemVersionOperatorTypeEnum) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_OperatingSystemVersionOperatorTypeEnum.Unmarshal(m, b) +} +func (m *OperatingSystemVersionOperatorTypeEnum) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_OperatingSystemVersionOperatorTypeEnum.Marshal(b, m, deterministic) +} +func (dst *OperatingSystemVersionOperatorTypeEnum) XXX_Merge(src proto.Message) { + xxx_messageInfo_OperatingSystemVersionOperatorTypeEnum.Merge(dst, src) +} +func (m *OperatingSystemVersionOperatorTypeEnum) XXX_Size() int { + return xxx_messageInfo_OperatingSystemVersionOperatorTypeEnum.Size(m) +} +func (m *OperatingSystemVersionOperatorTypeEnum) XXX_DiscardUnknown() { + xxx_messageInfo_OperatingSystemVersionOperatorTypeEnum.DiscardUnknown(m) +} + +var xxx_messageInfo_OperatingSystemVersionOperatorTypeEnum proto.InternalMessageInfo + +func init() { + proto.RegisterType((*OperatingSystemVersionOperatorTypeEnum)(nil), "google.ads.googleads.v1.enums.OperatingSystemVersionOperatorTypeEnum") + proto.RegisterEnum("google.ads.googleads.v1.enums.OperatingSystemVersionOperatorTypeEnum_OperatingSystemVersionOperatorType", OperatingSystemVersionOperatorTypeEnum_OperatingSystemVersionOperatorType_name, OperatingSystemVersionOperatorTypeEnum_OperatingSystemVersionOperatorType_value) +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/enums/operating_system_version_operator_type.proto", fileDescriptor_operating_system_version_operator_type_03c667b04f322a09) +} + +var fileDescriptor_operating_system_version_operator_type_03c667b04f322a09 = []byte{ + // 338 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x84, 0x51, 0x4f, 0x4b, 0xc3, 0x30, + 0x1c, 0xb5, 0x55, 0x14, 0x33, 0xc4, 0xd1, 0x83, 0x87, 0xe1, 0x0e, 0xdb, 0x41, 0x6f, 0x29, 0xc5, + 0x5b, 0x3c, 0x65, 0x5a, 0xe7, 0x54, 0xda, 0xb9, 0x75, 0x15, 0xa4, 0x10, 0xaa, 0x0d, 0xa1, 0xb0, + 0x26, 0xa5, 0xc9, 0x06, 0xfb, 0x24, 0xde, 0x3d, 0xfa, 0x51, 0xfc, 0x28, 0x7e, 0x06, 0x0f, 0xd2, + 0x64, 0x9d, 0x27, 0xdd, 0x25, 0x3c, 0xf2, 0xde, 0xef, 0xbd, 0xdf, 0x1f, 0x70, 0xc7, 0x84, 0x60, + 0x73, 0xea, 0xa6, 0x99, 0x74, 0x0d, 0xac, 0xd1, 0xd2, 0x73, 0x29, 0x5f, 0x14, 0xd2, 0x15, 0x25, + 0xad, 0x52, 0x95, 0x73, 0x46, 0xe4, 0x4a, 0x2a, 0x5a, 0x90, 0x25, 0xad, 0x64, 0x2e, 0x38, 0x31, + 0x84, 0xa8, 0x88, 0x5a, 0x95, 0x14, 0x96, 0x95, 0x50, 0xc2, 0xe9, 0x1a, 0x03, 0x98, 0x66, 0x12, + 0x6e, 0xbc, 0xe0, 0xd2, 0x83, 0xda, 0xab, 0x73, 0xda, 0x44, 0x95, 0xb9, 0x9b, 0x72, 0x2e, 0x54, + 0xaa, 0x72, 0xc1, 0xa5, 0x29, 0xee, 0xbf, 0x59, 0xe0, 0x2c, 0x6c, 0xd2, 0xa6, 0x3a, 0x2c, 0x36, + 0x59, 0xe1, 0x3a, 0x2a, 0x5a, 0x95, 0xd4, 0xe7, 0x8b, 0xa2, 0x5f, 0x80, 0xfe, 0x76, 0xa5, 0x73, + 0x0c, 0x5a, 0xb3, 0x60, 0x3a, 0xf6, 0xaf, 0x46, 0x37, 0x23, 0xff, 0xba, 0xbd, 0xe3, 0xb4, 0xc0, + 0xc1, 0x2c, 0xb8, 0x0f, 0xc2, 0xa7, 0xa0, 0x6d, 0x39, 0x47, 0xe0, 0xd0, 0x7f, 0x9c, 0xe1, 0x87, + 0x29, 0x89, 0xc2, 0xb6, 0xed, 0x74, 0xc0, 0xc9, 0x70, 0xe2, 0xe3, 0xc8, 0x9f, 0x90, 0xe8, 0x16, + 0x07, 0xe4, 0x97, 0xdb, 0x1b, 0x7c, 0x5b, 0xa0, 0xf7, 0x2a, 0x0a, 0xf8, 0xef, 0x74, 0x83, 0xf3, + 0xed, 0x2d, 0x8d, 0xeb, 0x41, 0xc7, 0xd6, 0xf3, 0x60, 0xed, 0xc4, 0xc4, 0x3c, 0xe5, 0x0c, 0x8a, + 0x8a, 0xb9, 0x8c, 0x72, 0xbd, 0x86, 0xe6, 0x06, 0x65, 0x2e, 0xff, 0x38, 0xc9, 0xa5, 0x7e, 0xdf, + 0xed, 0xdd, 0x21, 0xc6, 0x1f, 0x76, 0x77, 0x68, 0xac, 0x70, 0x26, 0xa1, 0x81, 0x35, 0x8a, 0x3d, + 0x58, 0x2f, 0x4a, 0x7e, 0x36, 0x7c, 0x82, 0x33, 0x99, 0x6c, 0xf8, 0x24, 0xf6, 0x12, 0xcd, 0x7f, + 0xd9, 0x3d, 0xf3, 0x89, 0x10, 0xce, 0x24, 0x42, 0x1b, 0x05, 0x42, 0xb1, 0x87, 0x90, 0xd6, 0xbc, + 0xec, 0xeb, 0xc6, 0x2e, 0x7e, 0x02, 0x00, 0x00, 0xff, 0xff, 0x40, 0xf6, 0xcb, 0x8a, 0x2a, 0x02, + 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/page_one_promoted_strategy_goal.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/page_one_promoted_strategy_goal.pb.go new file mode 100644 index 0000000..dd031df --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/page_one_promoted_strategy_goal.pb.go @@ -0,0 +1,120 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/enums/page_one_promoted_strategy_goal.proto + +package enums // import "google.golang.org/genproto/googleapis/ads/googleads/v1/enums" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Enum describing possible strategy goals. +type PageOnePromotedStrategyGoalEnum_PageOnePromotedStrategyGoal int32 + +const ( + // Not specified. + PageOnePromotedStrategyGoalEnum_UNSPECIFIED PageOnePromotedStrategyGoalEnum_PageOnePromotedStrategyGoal = 0 + // Used for return value only. Represents value unknown in this version. + PageOnePromotedStrategyGoalEnum_UNKNOWN PageOnePromotedStrategyGoalEnum_PageOnePromotedStrategyGoal = 1 + // First page on google.com. + PageOnePromotedStrategyGoalEnum_FIRST_PAGE PageOnePromotedStrategyGoalEnum_PageOnePromotedStrategyGoal = 2 + // Top slots of the first page on google.com. + PageOnePromotedStrategyGoalEnum_FIRST_PAGE_PROMOTED PageOnePromotedStrategyGoalEnum_PageOnePromotedStrategyGoal = 3 +) + +var PageOnePromotedStrategyGoalEnum_PageOnePromotedStrategyGoal_name = map[int32]string{ + 0: "UNSPECIFIED", + 1: "UNKNOWN", + 2: "FIRST_PAGE", + 3: "FIRST_PAGE_PROMOTED", +} +var PageOnePromotedStrategyGoalEnum_PageOnePromotedStrategyGoal_value = map[string]int32{ + "UNSPECIFIED": 0, + "UNKNOWN": 1, + "FIRST_PAGE": 2, + "FIRST_PAGE_PROMOTED": 3, +} + +func (x PageOnePromotedStrategyGoalEnum_PageOnePromotedStrategyGoal) String() string { + return proto.EnumName(PageOnePromotedStrategyGoalEnum_PageOnePromotedStrategyGoal_name, int32(x)) +} +func (PageOnePromotedStrategyGoalEnum_PageOnePromotedStrategyGoal) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_page_one_promoted_strategy_goal_af27d4f7a36bbf23, []int{0, 0} +} + +// Container for enum describing possible strategy goals: where impressions are +// desired to be shown on search result pages. +type PageOnePromotedStrategyGoalEnum struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *PageOnePromotedStrategyGoalEnum) Reset() { *m = PageOnePromotedStrategyGoalEnum{} } +func (m *PageOnePromotedStrategyGoalEnum) String() string { return proto.CompactTextString(m) } +func (*PageOnePromotedStrategyGoalEnum) ProtoMessage() {} +func (*PageOnePromotedStrategyGoalEnum) Descriptor() ([]byte, []int) { + return fileDescriptor_page_one_promoted_strategy_goal_af27d4f7a36bbf23, []int{0} +} +func (m *PageOnePromotedStrategyGoalEnum) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_PageOnePromotedStrategyGoalEnum.Unmarshal(m, b) +} +func (m *PageOnePromotedStrategyGoalEnum) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_PageOnePromotedStrategyGoalEnum.Marshal(b, m, deterministic) +} +func (dst *PageOnePromotedStrategyGoalEnum) XXX_Merge(src proto.Message) { + xxx_messageInfo_PageOnePromotedStrategyGoalEnum.Merge(dst, src) +} +func (m *PageOnePromotedStrategyGoalEnum) XXX_Size() int { + return xxx_messageInfo_PageOnePromotedStrategyGoalEnum.Size(m) +} +func (m *PageOnePromotedStrategyGoalEnum) XXX_DiscardUnknown() { + xxx_messageInfo_PageOnePromotedStrategyGoalEnum.DiscardUnknown(m) +} + +var xxx_messageInfo_PageOnePromotedStrategyGoalEnum proto.InternalMessageInfo + +func init() { + proto.RegisterType((*PageOnePromotedStrategyGoalEnum)(nil), "google.ads.googleads.v1.enums.PageOnePromotedStrategyGoalEnum") + proto.RegisterEnum("google.ads.googleads.v1.enums.PageOnePromotedStrategyGoalEnum_PageOnePromotedStrategyGoal", PageOnePromotedStrategyGoalEnum_PageOnePromotedStrategyGoal_name, PageOnePromotedStrategyGoalEnum_PageOnePromotedStrategyGoal_value) +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/enums/page_one_promoted_strategy_goal.proto", fileDescriptor_page_one_promoted_strategy_goal_af27d4f7a36bbf23) +} + +var fileDescriptor_page_one_promoted_strategy_goal_af27d4f7a36bbf23 = []byte{ + // 331 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x7c, 0x50, 0xcd, 0x4a, 0xc3, 0x40, + 0x18, 0x34, 0x29, 0x28, 0x6c, 0x41, 0x43, 0x3c, 0x08, 0x6a, 0xd1, 0xf6, 0x01, 0x36, 0x04, 0x6f, + 0xeb, 0x29, 0x6d, 0xd3, 0x50, 0xc4, 0x64, 0xe9, 0x9f, 0x20, 0x81, 0xb0, 0xba, 0xcb, 0x52, 0x48, + 0xf6, 0x0b, 0xd9, 0xb4, 0xe0, 0x13, 0xf8, 0x1e, 0x1e, 0x7d, 0x14, 0x1f, 0xc5, 0xbb, 0x77, 0x49, + 0xd2, 0xd6, 0x93, 0xb9, 0x84, 0xc9, 0x7e, 0xf3, 0xcd, 0x7c, 0x33, 0x68, 0x24, 0x01, 0x64, 0x2a, + 0x1c, 0xc6, 0xb5, 0xd3, 0xc0, 0x0a, 0x6d, 0x5d, 0x47, 0xa8, 0x4d, 0xa6, 0x9d, 0x9c, 0x49, 0x91, + 0x80, 0x12, 0x49, 0x5e, 0x40, 0x06, 0xa5, 0xe0, 0x89, 0x2e, 0x0b, 0x56, 0x0a, 0xf9, 0x96, 0x48, + 0x60, 0x29, 0xce, 0x0b, 0x28, 0xc1, 0xee, 0x35, 0x9b, 0x98, 0x71, 0x8d, 0x0f, 0x22, 0x78, 0xeb, + 0xe2, 0x5a, 0xe4, 0xf2, 0x7a, 0xef, 0x91, 0xaf, 0x1d, 0xa6, 0x14, 0x94, 0xac, 0x5c, 0x83, 0xd2, + 0xcd, 0xf2, 0xe0, 0xdd, 0x40, 0x37, 0x94, 0x49, 0x11, 0x29, 0x41, 0x77, 0x26, 0xf3, 0x9d, 0x47, + 0x00, 0x2c, 0xf5, 0xd5, 0x26, 0x1b, 0x70, 0x74, 0xd5, 0x42, 0xb1, 0xcf, 0x50, 0x77, 0x19, 0xce, + 0xa9, 0x3f, 0x9a, 0x4e, 0xa6, 0xfe, 0xd8, 0x3a, 0xb2, 0xbb, 0xe8, 0x64, 0x19, 0x3e, 0x84, 0xd1, + 0x53, 0x68, 0x19, 0xf6, 0x29, 0x42, 0x93, 0xe9, 0x6c, 0xbe, 0x48, 0xa8, 0x17, 0xf8, 0x96, 0x69, + 0x5f, 0xa0, 0xf3, 0xbf, 0xff, 0x84, 0xce, 0xa2, 0xc7, 0x68, 0xe1, 0x8f, 0xad, 0xce, 0xf0, 0xc7, + 0x40, 0xfd, 0x57, 0xc8, 0x70, 0x6b, 0x9a, 0xe1, 0x6d, 0xcb, 0x25, 0xb4, 0x4a, 0x44, 0x8d, 0xe7, + 0xe1, 0x4e, 0x42, 0x42, 0xca, 0x94, 0xc4, 0x50, 0x48, 0x47, 0x0a, 0x55, 0xe7, 0xdd, 0xb7, 0x9c, + 0xaf, 0xf5, 0x3f, 0xa5, 0xdf, 0xd7, 0xdf, 0x0f, 0xb3, 0x13, 0x78, 0xde, 0xa7, 0xd9, 0x0b, 0x1a, + 0x29, 0x8f, 0x6b, 0xdc, 0xc0, 0x0a, 0xad, 0x5c, 0x5c, 0x15, 0xa3, 0xbf, 0xf6, 0xf3, 0xd8, 0xe3, + 0x3a, 0x3e, 0xcc, 0xe3, 0x95, 0x1b, 0xd7, 0xf3, 0x6f, 0xb3, 0xdf, 0x3c, 0x12, 0xe2, 0x71, 0x4d, + 0xc8, 0x81, 0x41, 0xc8, 0xca, 0x25, 0xa4, 0xe6, 0xbc, 0x1c, 0xd7, 0x87, 0xdd, 0xfd, 0x06, 0x00, + 0x00, 0xff, 0xff, 0x14, 0xde, 0x68, 0xf5, 0x0c, 0x02, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/parental_status_type.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/parental_status_type.pb.go new file mode 100644 index 0000000..24c9138 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/parental_status_type.pb.go @@ -0,0 +1,123 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/enums/parental_status_type.proto + +package enums // import "google.golang.org/genproto/googleapis/ads/googleads/v1/enums" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// The type of parental statuses (e.g. not a parent). +type ParentalStatusTypeEnum_ParentalStatusType int32 + +const ( + // Not specified. + ParentalStatusTypeEnum_UNSPECIFIED ParentalStatusTypeEnum_ParentalStatusType = 0 + // Used for return value only. Represents value unknown in this version. + ParentalStatusTypeEnum_UNKNOWN ParentalStatusTypeEnum_ParentalStatusType = 1 + // Parent. + ParentalStatusTypeEnum_PARENT ParentalStatusTypeEnum_ParentalStatusType = 300 + // Not a parent. + ParentalStatusTypeEnum_NOT_A_PARENT ParentalStatusTypeEnum_ParentalStatusType = 301 + // Undetermined parental status. + ParentalStatusTypeEnum_UNDETERMINED ParentalStatusTypeEnum_ParentalStatusType = 302 +) + +var ParentalStatusTypeEnum_ParentalStatusType_name = map[int32]string{ + 0: "UNSPECIFIED", + 1: "UNKNOWN", + 300: "PARENT", + 301: "NOT_A_PARENT", + 302: "UNDETERMINED", +} +var ParentalStatusTypeEnum_ParentalStatusType_value = map[string]int32{ + "UNSPECIFIED": 0, + "UNKNOWN": 1, + "PARENT": 300, + "NOT_A_PARENT": 301, + "UNDETERMINED": 302, +} + +func (x ParentalStatusTypeEnum_ParentalStatusType) String() string { + return proto.EnumName(ParentalStatusTypeEnum_ParentalStatusType_name, int32(x)) +} +func (ParentalStatusTypeEnum_ParentalStatusType) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_parental_status_type_7dc2fa597ff4162a, []int{0, 0} +} + +// Container for enum describing the type of demographic parental statuses. +type ParentalStatusTypeEnum struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ParentalStatusTypeEnum) Reset() { *m = ParentalStatusTypeEnum{} } +func (m *ParentalStatusTypeEnum) String() string { return proto.CompactTextString(m) } +func (*ParentalStatusTypeEnum) ProtoMessage() {} +func (*ParentalStatusTypeEnum) Descriptor() ([]byte, []int) { + return fileDescriptor_parental_status_type_7dc2fa597ff4162a, []int{0} +} +func (m *ParentalStatusTypeEnum) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ParentalStatusTypeEnum.Unmarshal(m, b) +} +func (m *ParentalStatusTypeEnum) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ParentalStatusTypeEnum.Marshal(b, m, deterministic) +} +func (dst *ParentalStatusTypeEnum) XXX_Merge(src proto.Message) { + xxx_messageInfo_ParentalStatusTypeEnum.Merge(dst, src) +} +func (m *ParentalStatusTypeEnum) XXX_Size() int { + return xxx_messageInfo_ParentalStatusTypeEnum.Size(m) +} +func (m *ParentalStatusTypeEnum) XXX_DiscardUnknown() { + xxx_messageInfo_ParentalStatusTypeEnum.DiscardUnknown(m) +} + +var xxx_messageInfo_ParentalStatusTypeEnum proto.InternalMessageInfo + +func init() { + proto.RegisterType((*ParentalStatusTypeEnum)(nil), "google.ads.googleads.v1.enums.ParentalStatusTypeEnum") + proto.RegisterEnum("google.ads.googleads.v1.enums.ParentalStatusTypeEnum_ParentalStatusType", ParentalStatusTypeEnum_ParentalStatusType_name, ParentalStatusTypeEnum_ParentalStatusType_value) +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/enums/parental_status_type.proto", fileDescriptor_parental_status_type_7dc2fa597ff4162a) +} + +var fileDescriptor_parental_status_type_7dc2fa597ff4162a = []byte{ + // 326 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x7c, 0x50, 0x4f, 0x4a, 0x33, 0x31, + 0x1c, 0xfd, 0x3a, 0x1f, 0x54, 0x48, 0x05, 0xc7, 0x59, 0x28, 0x88, 0x5d, 0xb4, 0x07, 0xc8, 0x30, + 0xb8, 0x91, 0xb8, 0x4a, 0x6d, 0x2c, 0x45, 0x4c, 0x87, 0x76, 0x5a, 0x41, 0x06, 0x86, 0xe8, 0x84, + 0x50, 0x68, 0x93, 0xd0, 0xa4, 0x85, 0xae, 0xbc, 0x8b, 0x4b, 0x51, 0x0f, 0xe2, 0x51, 0xc4, 0x43, + 0xc8, 0x24, 0x6d, 0x37, 0x45, 0x37, 0xe1, 0xf1, 0x7b, 0x7f, 0x78, 0x79, 0xe0, 0x52, 0x28, 0x25, + 0x66, 0x3c, 0x66, 0xa5, 0x89, 0x3d, 0xac, 0xd0, 0x2a, 0x89, 0xb9, 0x5c, 0xce, 0x4d, 0xac, 0xd9, + 0x82, 0x4b, 0xcb, 0x66, 0x85, 0xb1, 0xcc, 0x2e, 0x4d, 0x61, 0xd7, 0x9a, 0x43, 0xbd, 0x50, 0x56, + 0x45, 0x4d, 0x2f, 0x87, 0xac, 0x34, 0x70, 0xe7, 0x84, 0xab, 0x04, 0x3a, 0xe7, 0xd9, 0xf9, 0x36, + 0x58, 0x4f, 0x63, 0x26, 0xa5, 0xb2, 0xcc, 0x4e, 0x95, 0x34, 0xde, 0xdc, 0x7e, 0x06, 0x27, 0xe9, + 0x26, 0x7a, 0xe4, 0x92, 0xb3, 0xb5, 0xe6, 0x44, 0x2e, 0xe7, 0x6d, 0x0e, 0xa2, 0x7d, 0x26, 0x3a, + 0x02, 0x8d, 0x31, 0x1d, 0xa5, 0xe4, 0xba, 0x7f, 0xd3, 0x27, 0xdd, 0xf0, 0x5f, 0xd4, 0x00, 0x07, + 0x63, 0x7a, 0x4b, 0x07, 0xf7, 0x34, 0xac, 0x45, 0x0d, 0x50, 0x4f, 0xf1, 0x90, 0xd0, 0x2c, 0x7c, + 0x0b, 0xa2, 0x63, 0x70, 0x48, 0x07, 0x59, 0x81, 0x8b, 0xcd, 0xe9, 0xdd, 0x9d, 0xc6, 0xb4, 0x4b, + 0x32, 0x32, 0xbc, 0xeb, 0x53, 0xd2, 0x0d, 0x3f, 0x82, 0xce, 0x77, 0x0d, 0xb4, 0x9e, 0xd4, 0x1c, + 0xfe, 0xf9, 0x89, 0xce, 0xe9, 0x7e, 0x95, 0xb4, 0xea, 0x9f, 0xd6, 0x1e, 0x3a, 0x1b, 0xa7, 0x50, + 0x33, 0x26, 0x05, 0x54, 0x0b, 0x11, 0x0b, 0x2e, 0xdd, 0xef, 0xb6, 0x43, 0xea, 0xa9, 0xf9, 0x65, + 0xd7, 0x2b, 0xf7, 0xbe, 0x04, 0xff, 0x7b, 0x18, 0xbf, 0x06, 0xcd, 0x9e, 0x8f, 0xc2, 0xa5, 0x81, + 0x1e, 0x56, 0x68, 0x92, 0xc0, 0x6a, 0x10, 0xf3, 0xb9, 0xe5, 0x73, 0x5c, 0x9a, 0x7c, 0xc7, 0xe7, + 0x93, 0x24, 0x77, 0xfc, 0x57, 0xd0, 0xf2, 0x47, 0x84, 0x70, 0x69, 0x10, 0xda, 0x29, 0x10, 0x9a, + 0x24, 0x08, 0x39, 0xcd, 0x63, 0xdd, 0x15, 0xbb, 0xf8, 0x09, 0x00, 0x00, 0xff, 0xff, 0xf8, 0x49, + 0x96, 0x75, 0xef, 0x01, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/payment_mode.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/payment_mode.pb.go new file mode 100644 index 0000000..82c4c54 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/payment_mode.pb.go @@ -0,0 +1,128 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/enums/payment_mode.proto + +package enums // import "google.golang.org/genproto/googleapis/ads/googleads/v1/enums" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Enum describing possible payment modes. +type PaymentModeEnum_PaymentMode int32 + +const ( + // Not specified. + PaymentModeEnum_UNSPECIFIED PaymentModeEnum_PaymentMode = 0 + // Used for return value only. Represents value unknown in this version. + PaymentModeEnum_UNKNOWN PaymentModeEnum_PaymentMode = 1 + // Pay per click. + PaymentModeEnum_CLICKS PaymentModeEnum_PaymentMode = 4 + // Pay per conversion value. This mode is only supported by campaigns with + // AdvertisingChannelType.HOTEL, BiddingStrategyType.COMMISSION, and + // BudgetType.HOTEL_ADS_COMMISSION. + PaymentModeEnum_CONVERSION_VALUE PaymentModeEnum_PaymentMode = 5 + // Pay per conversion. This mode is only supported by campaigns with + // AdvertisingChannelType.DISPLAY (excluding + // AdvertisingChannelSubType.DISPLAY_GMAIL), BiddingStrategyType.TARGET_CPA, + // and BudgetType.FIXED_CPA. The customer must also be eligible for this + // mode. See Customer.eligibility_failure_reasons for details. + PaymentModeEnum_CONVERSIONS PaymentModeEnum_PaymentMode = 6 +) + +var PaymentModeEnum_PaymentMode_name = map[int32]string{ + 0: "UNSPECIFIED", + 1: "UNKNOWN", + 4: "CLICKS", + 5: "CONVERSION_VALUE", + 6: "CONVERSIONS", +} +var PaymentModeEnum_PaymentMode_value = map[string]int32{ + "UNSPECIFIED": 0, + "UNKNOWN": 1, + "CLICKS": 4, + "CONVERSION_VALUE": 5, + "CONVERSIONS": 6, +} + +func (x PaymentModeEnum_PaymentMode) String() string { + return proto.EnumName(PaymentModeEnum_PaymentMode_name, int32(x)) +} +func (PaymentModeEnum_PaymentMode) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_payment_mode_1e9387136819e800, []int{0, 0} +} + +// Container for enum describing possible payment modes. +type PaymentModeEnum struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *PaymentModeEnum) Reset() { *m = PaymentModeEnum{} } +func (m *PaymentModeEnum) String() string { return proto.CompactTextString(m) } +func (*PaymentModeEnum) ProtoMessage() {} +func (*PaymentModeEnum) Descriptor() ([]byte, []int) { + return fileDescriptor_payment_mode_1e9387136819e800, []int{0} +} +func (m *PaymentModeEnum) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_PaymentModeEnum.Unmarshal(m, b) +} +func (m *PaymentModeEnum) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_PaymentModeEnum.Marshal(b, m, deterministic) +} +func (dst *PaymentModeEnum) XXX_Merge(src proto.Message) { + xxx_messageInfo_PaymentModeEnum.Merge(dst, src) +} +func (m *PaymentModeEnum) XXX_Size() int { + return xxx_messageInfo_PaymentModeEnum.Size(m) +} +func (m *PaymentModeEnum) XXX_DiscardUnknown() { + xxx_messageInfo_PaymentModeEnum.DiscardUnknown(m) +} + +var xxx_messageInfo_PaymentModeEnum proto.InternalMessageInfo + +func init() { + proto.RegisterType((*PaymentModeEnum)(nil), "google.ads.googleads.v1.enums.PaymentModeEnum") + proto.RegisterEnum("google.ads.googleads.v1.enums.PaymentModeEnum_PaymentMode", PaymentModeEnum_PaymentMode_name, PaymentModeEnum_PaymentMode_value) +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/enums/payment_mode.proto", fileDescriptor_payment_mode_1e9387136819e800) +} + +var fileDescriptor_payment_mode_1e9387136819e800 = []byte{ + // 312 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x7c, 0x50, 0xcf, 0x4a, 0xc3, 0x30, + 0x1c, 0x76, 0x53, 0x27, 0x64, 0x87, 0x85, 0xe2, 0x49, 0xdc, 0x61, 0x7b, 0x80, 0xd4, 0xe2, 0x2d, + 0x9e, 0xb2, 0x5a, 0x47, 0xd9, 0xcc, 0x8a, 0x65, 0x15, 0xa4, 0x38, 0xa2, 0x09, 0x61, 0xb0, 0x26, + 0x75, 0xe9, 0x06, 0xbe, 0x8e, 0x47, 0x1f, 0xc5, 0xf7, 0xf0, 0xe2, 0x53, 0x48, 0x13, 0x57, 0x77, + 0xd1, 0x4b, 0xf8, 0xf8, 0x7d, 0x7f, 0xf8, 0xf2, 0x81, 0x0b, 0xa9, 0xb5, 0x5c, 0x09, 0x9f, 0x71, + 0xe3, 0x3b, 0x58, 0xa3, 0x6d, 0xe0, 0x0b, 0xb5, 0x29, 0x8c, 0x5f, 0xb2, 0xd7, 0x42, 0xa8, 0x6a, + 0x51, 0x68, 0x2e, 0x50, 0xb9, 0xd6, 0x95, 0xf6, 0xfa, 0x4e, 0x86, 0x18, 0x37, 0xa8, 0x71, 0xa0, + 0x6d, 0x80, 0xac, 0xe3, 0xec, 0x7c, 0x17, 0x58, 0x2e, 0x7d, 0xa6, 0x94, 0xae, 0x58, 0xb5, 0xd4, + 0xca, 0x38, 0xf3, 0xf0, 0x05, 0xf4, 0x12, 0x17, 0x79, 0xab, 0xb9, 0x88, 0xd4, 0xa6, 0x18, 0x3e, + 0x82, 0xee, 0xde, 0xc9, 0xeb, 0x81, 0xee, 0x9c, 0xa6, 0x49, 0x14, 0xc6, 0x37, 0x71, 0x74, 0x0d, + 0x0f, 0xbc, 0x2e, 0x38, 0x99, 0xd3, 0x09, 0x9d, 0xdd, 0x53, 0xd8, 0xf2, 0x00, 0xe8, 0x84, 0xd3, + 0x38, 0x9c, 0xa4, 0xf0, 0xc8, 0x3b, 0x05, 0x30, 0x9c, 0xd1, 0x2c, 0xba, 0x4b, 0xe3, 0x19, 0x5d, + 0x64, 0x64, 0x3a, 0x8f, 0xe0, 0x71, 0xed, 0xff, 0xbd, 0xa6, 0xb0, 0x33, 0xfa, 0x6c, 0x81, 0xc1, + 0xb3, 0x2e, 0xd0, 0xbf, 0xb5, 0x47, 0x70, 0xaf, 0x43, 0x52, 0x57, 0x4d, 0x5a, 0x0f, 0xa3, 0x1f, + 0x8b, 0xd4, 0x2b, 0xa6, 0x24, 0xd2, 0x6b, 0xe9, 0x4b, 0xa1, 0xec, 0x47, 0x76, 0x5b, 0x95, 0x4b, + 0xf3, 0xc7, 0x74, 0x57, 0xf6, 0x7d, 0x6b, 0x1f, 0x8e, 0x09, 0x79, 0x6f, 0xf7, 0xc7, 0x2e, 0x8a, + 0x70, 0x83, 0x1c, 0xac, 0x51, 0x16, 0xa0, 0x7a, 0x02, 0xf3, 0xb1, 0xe3, 0x73, 0xc2, 0x4d, 0xde, + 0xf0, 0x79, 0x16, 0xe4, 0x96, 0xff, 0x6a, 0x0f, 0xdc, 0x11, 0x63, 0xc2, 0x0d, 0xc6, 0x8d, 0x02, + 0xe3, 0x2c, 0xc0, 0xd8, 0x6a, 0x9e, 0x3a, 0xb6, 0xd8, 0xe5, 0x77, 0x00, 0x00, 0x00, 0xff, 0xff, + 0xf3, 0x82, 0x9d, 0xa0, 0xd2, 0x01, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/placeholder_type.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/placeholder_type.pb.go new file mode 100644 index 0000000..8f8ecb5 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/placeholder_type.pb.go @@ -0,0 +1,217 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/enums/placeholder_type.proto + +package enums // import "google.golang.org/genproto/googleapis/ads/googleads/v1/enums" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Possible placeholder types for a feed mapping. +type PlaceholderTypeEnum_PlaceholderType int32 + +const ( + // Not specified. + PlaceholderTypeEnum_UNSPECIFIED PlaceholderTypeEnum_PlaceholderType = 0 + // Used for return value only. Represents value unknown in this version. + PlaceholderTypeEnum_UNKNOWN PlaceholderTypeEnum_PlaceholderType = 1 + // Lets you show links in your ad to pages from your website, including the + // main landing page. + PlaceholderTypeEnum_SITELINK PlaceholderTypeEnum_PlaceholderType = 2 + // Lets you attach a phone number to an ad, allowing customers to call + // directly from the ad. + PlaceholderTypeEnum_CALL PlaceholderTypeEnum_PlaceholderType = 3 + // Lets you provide users with a link that points to a mobile app in + // addition to a website. + PlaceholderTypeEnum_APP PlaceholderTypeEnum_PlaceholderType = 4 + // Lets you show locations of businesses from your Google My Business + // account in your ad. This helps people find your locations by showing your + // ads with your address, a map to your location, or the distance to your + // business. This extension type is useful to draw customers to your + // brick-and-mortar location. + PlaceholderTypeEnum_LOCATION PlaceholderTypeEnum_PlaceholderType = 5 + // If you sell your product through retail chains, affiliate location + // extensions let you show nearby stores that carry your products. + PlaceholderTypeEnum_AFFILIATE_LOCATION PlaceholderTypeEnum_PlaceholderType = 6 + // Lets you include additional text with your search ads that provide + // detailed information about your business, including products and services + // you offer. Callouts appear in ads at the top and bottom of Google search + // results. + PlaceholderTypeEnum_CALLOUT PlaceholderTypeEnum_PlaceholderType = 7 + // Lets you add more info to your ad, specific to some predefined categories + // such as types, brands, styles, etc. A minimum of 3 text (SNIPPETS) values + // are required. + PlaceholderTypeEnum_STRUCTURED_SNIPPET PlaceholderTypeEnum_PlaceholderType = 8 + // Allows users to see your ad, click an icon, and contact you directly by + // text message. With one tap on your ad, people can contact you to book an + // appointment, get a quote, ask for information, or request a service. + PlaceholderTypeEnum_MESSAGE PlaceholderTypeEnum_PlaceholderType = 9 + // Lets you display prices for a list of items along with your ads. A price + // feed is composed of three to eight price table rows. + PlaceholderTypeEnum_PRICE PlaceholderTypeEnum_PlaceholderType = 10 + // Allows you to highlight sales and other promotions that let users see how + // they can save by buying now. + PlaceholderTypeEnum_PROMOTION PlaceholderTypeEnum_PlaceholderType = 11 + // Lets you dynamically inject custom data into the title and description + // of your ads. + PlaceholderTypeEnum_AD_CUSTOMIZER PlaceholderTypeEnum_PlaceholderType = 12 + // Indicates that this feed is for education dynamic remarketing. + PlaceholderTypeEnum_DYNAMIC_EDUCATION PlaceholderTypeEnum_PlaceholderType = 13 + // Indicates that this feed is for flight dynamic remarketing. + PlaceholderTypeEnum_DYNAMIC_FLIGHT PlaceholderTypeEnum_PlaceholderType = 14 + // Indicates that this feed is for a custom dynamic remarketing type. Use + // this only if the other business types don't apply to your products or + // services. + PlaceholderTypeEnum_DYNAMIC_CUSTOM PlaceholderTypeEnum_PlaceholderType = 15 + // Indicates that this feed is for hotels and rentals dynamic remarketing. + PlaceholderTypeEnum_DYNAMIC_HOTEL PlaceholderTypeEnum_PlaceholderType = 16 + // Indicates that this feed is for real estate dynamic remarketing. + PlaceholderTypeEnum_DYNAMIC_REAL_ESTATE PlaceholderTypeEnum_PlaceholderType = 17 + // Indicates that this feed is for travel dynamic remarketing. + PlaceholderTypeEnum_DYNAMIC_TRAVEL PlaceholderTypeEnum_PlaceholderType = 18 + // Indicates that this feed is for local deals dynamic remarketing. + PlaceholderTypeEnum_DYNAMIC_LOCAL PlaceholderTypeEnum_PlaceholderType = 19 + // Indicates that this feed is for job dynamic remarketing. + PlaceholderTypeEnum_DYNAMIC_JOB PlaceholderTypeEnum_PlaceholderType = 20 +) + +var PlaceholderTypeEnum_PlaceholderType_name = map[int32]string{ + 0: "UNSPECIFIED", + 1: "UNKNOWN", + 2: "SITELINK", + 3: "CALL", + 4: "APP", + 5: "LOCATION", + 6: "AFFILIATE_LOCATION", + 7: "CALLOUT", + 8: "STRUCTURED_SNIPPET", + 9: "MESSAGE", + 10: "PRICE", + 11: "PROMOTION", + 12: "AD_CUSTOMIZER", + 13: "DYNAMIC_EDUCATION", + 14: "DYNAMIC_FLIGHT", + 15: "DYNAMIC_CUSTOM", + 16: "DYNAMIC_HOTEL", + 17: "DYNAMIC_REAL_ESTATE", + 18: "DYNAMIC_TRAVEL", + 19: "DYNAMIC_LOCAL", + 20: "DYNAMIC_JOB", +} +var PlaceholderTypeEnum_PlaceholderType_value = map[string]int32{ + "UNSPECIFIED": 0, + "UNKNOWN": 1, + "SITELINK": 2, + "CALL": 3, + "APP": 4, + "LOCATION": 5, + "AFFILIATE_LOCATION": 6, + "CALLOUT": 7, + "STRUCTURED_SNIPPET": 8, + "MESSAGE": 9, + "PRICE": 10, + "PROMOTION": 11, + "AD_CUSTOMIZER": 12, + "DYNAMIC_EDUCATION": 13, + "DYNAMIC_FLIGHT": 14, + "DYNAMIC_CUSTOM": 15, + "DYNAMIC_HOTEL": 16, + "DYNAMIC_REAL_ESTATE": 17, + "DYNAMIC_TRAVEL": 18, + "DYNAMIC_LOCAL": 19, + "DYNAMIC_JOB": 20, +} + +func (x PlaceholderTypeEnum_PlaceholderType) String() string { + return proto.EnumName(PlaceholderTypeEnum_PlaceholderType_name, int32(x)) +} +func (PlaceholderTypeEnum_PlaceholderType) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_placeholder_type_3e5fe1fcc2d8440e, []int{0, 0} +} + +// Container for enum describing possible placeholder types for a feed mapping. +type PlaceholderTypeEnum struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *PlaceholderTypeEnum) Reset() { *m = PlaceholderTypeEnum{} } +func (m *PlaceholderTypeEnum) String() string { return proto.CompactTextString(m) } +func (*PlaceholderTypeEnum) ProtoMessage() {} +func (*PlaceholderTypeEnum) Descriptor() ([]byte, []int) { + return fileDescriptor_placeholder_type_3e5fe1fcc2d8440e, []int{0} +} +func (m *PlaceholderTypeEnum) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_PlaceholderTypeEnum.Unmarshal(m, b) +} +func (m *PlaceholderTypeEnum) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_PlaceholderTypeEnum.Marshal(b, m, deterministic) +} +func (dst *PlaceholderTypeEnum) XXX_Merge(src proto.Message) { + xxx_messageInfo_PlaceholderTypeEnum.Merge(dst, src) +} +func (m *PlaceholderTypeEnum) XXX_Size() int { + return xxx_messageInfo_PlaceholderTypeEnum.Size(m) +} +func (m *PlaceholderTypeEnum) XXX_DiscardUnknown() { + xxx_messageInfo_PlaceholderTypeEnum.DiscardUnknown(m) +} + +var xxx_messageInfo_PlaceholderTypeEnum proto.InternalMessageInfo + +func init() { + proto.RegisterType((*PlaceholderTypeEnum)(nil), "google.ads.googleads.v1.enums.PlaceholderTypeEnum") + proto.RegisterEnum("google.ads.googleads.v1.enums.PlaceholderTypeEnum_PlaceholderType", PlaceholderTypeEnum_PlaceholderType_name, PlaceholderTypeEnum_PlaceholderType_value) +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/enums/placeholder_type.proto", fileDescriptor_placeholder_type_3e5fe1fcc2d8440e) +} + +var fileDescriptor_placeholder_type_3e5fe1fcc2d8440e = []byte{ + // 492 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x7c, 0x92, 0xd1, 0x6e, 0x9b, 0x3e, + 0x14, 0xc6, 0xff, 0x21, 0x6d, 0x93, 0x38, 0x4d, 0xe3, 0x38, 0xfd, 0x6f, 0xd2, 0xb4, 0x5e, 0xb4, + 0x0f, 0x00, 0x42, 0xdb, 0x15, 0xbb, 0x72, 0xc0, 0x49, 0xbd, 0x3a, 0x60, 0x81, 0xc9, 0xb4, 0x2a, + 0x12, 0x62, 0x05, 0xb1, 0x48, 0x09, 0x46, 0x21, 0xad, 0xd4, 0xb7, 0xd8, 0x33, 0xec, 0x72, 0x8f, + 0xb2, 0x37, 0xd9, 0x9e, 0x60, 0x97, 0x93, 0xa1, 0x64, 0x51, 0xa5, 0xed, 0x06, 0x1d, 0xfd, 0xce, + 0xf7, 0x7d, 0xd8, 0x3e, 0x07, 0xbc, 0xcd, 0xa4, 0xcc, 0xd6, 0xa9, 0x11, 0x27, 0xa5, 0x51, 0x97, + 0xaa, 0x7a, 0x30, 0x8d, 0x34, 0xbf, 0xdf, 0x94, 0x46, 0xb1, 0x8e, 0xef, 0xd2, 0xcf, 0x72, 0x9d, + 0xa4, 0xdb, 0x68, 0xf7, 0x58, 0xa4, 0x7a, 0xb1, 0x95, 0x3b, 0x89, 0x2e, 0x6a, 0xa9, 0x1e, 0x27, + 0xa5, 0xbe, 0x77, 0xe9, 0x0f, 0xa6, 0x5e, 0xb9, 0x5e, 0xbd, 0x6e, 0x42, 0x8b, 0x95, 0x11, 0xe7, + 0xb9, 0xdc, 0xc5, 0xbb, 0x95, 0xcc, 0xcb, 0xda, 0x7c, 0xf5, 0xa5, 0x0d, 0xc6, 0xfc, 0x4f, 0xae, + 0x78, 0x2c, 0x52, 0x92, 0xdf, 0x6f, 0xae, 0x7e, 0x69, 0x60, 0xf8, 0x8c, 0xa3, 0x21, 0xe8, 0x87, + 0x6e, 0xc0, 0x89, 0x4d, 0xa7, 0x94, 0x38, 0xf0, 0x3f, 0xd4, 0x07, 0x9d, 0xd0, 0xbd, 0x71, 0xbd, + 0x0f, 0x2e, 0x6c, 0xa1, 0x53, 0xd0, 0x0d, 0xa8, 0x20, 0x8c, 0xba, 0x37, 0x50, 0x43, 0x5d, 0x70, + 0x64, 0x63, 0xc6, 0x60, 0x1b, 0x75, 0x40, 0x1b, 0x73, 0x0e, 0x8f, 0x94, 0x80, 0x79, 0x36, 0x16, + 0xd4, 0x73, 0xe1, 0x31, 0x7a, 0x01, 0x10, 0x9e, 0x4e, 0x29, 0xa3, 0x58, 0x90, 0x68, 0xcf, 0x4f, + 0x54, 0xa6, 0x32, 0x7a, 0xa1, 0x80, 0x1d, 0x25, 0x0a, 0x84, 0x1f, 0xda, 0x22, 0xf4, 0x89, 0x13, + 0x05, 0x2e, 0xe5, 0x9c, 0x08, 0xd8, 0x55, 0xa2, 0x39, 0x09, 0x02, 0x3c, 0x23, 0xb0, 0x87, 0x7a, + 0xe0, 0x98, 0xfb, 0xd4, 0x26, 0x10, 0xa0, 0x01, 0xe8, 0x71, 0xdf, 0x9b, 0x7b, 0x55, 0x56, 0x1f, + 0x8d, 0xc0, 0x00, 0x3b, 0x91, 0x1d, 0x06, 0xc2, 0x9b, 0xd3, 0x5b, 0xe2, 0xc3, 0x53, 0xf4, 0x3f, + 0x18, 0x39, 0x1f, 0x5d, 0x3c, 0xa7, 0x76, 0x44, 0x9c, 0xf0, 0xe9, 0xaf, 0x03, 0x84, 0xc0, 0x59, + 0x83, 0xa7, 0x8c, 0xce, 0xae, 0x05, 0x3c, 0x3b, 0x64, 0x75, 0x04, 0x1c, 0xaa, 0xc4, 0x86, 0x5d, + 0x7b, 0x82, 0x30, 0x08, 0xd1, 0x4b, 0x30, 0x6e, 0x90, 0x4f, 0x30, 0x8b, 0x48, 0x20, 0xb0, 0x20, + 0x70, 0x74, 0xe8, 0x17, 0x3e, 0x5e, 0x10, 0x06, 0xd1, 0xa1, 0x5f, 0xdd, 0x99, 0xc1, 0xb1, 0x7a, + 0xd5, 0x06, 0xbd, 0xf7, 0x26, 0xf0, 0x7c, 0xf2, 0xa3, 0x05, 0x2e, 0xef, 0xe4, 0x46, 0xff, 0xe7, + 0x58, 0x27, 0xe7, 0xcf, 0xa6, 0xc3, 0xd5, 0x38, 0x79, 0xeb, 0x76, 0xf2, 0x64, 0xcb, 0xe4, 0x3a, + 0xce, 0x33, 0x5d, 0x6e, 0x33, 0x23, 0x4b, 0xf3, 0x6a, 0xd8, 0xcd, 0x4e, 0x15, 0xab, 0xf2, 0x2f, + 0x2b, 0xf6, 0xae, 0xfa, 0x7e, 0xd5, 0xda, 0x33, 0x8c, 0xbf, 0x69, 0x17, 0xb3, 0x3a, 0x0a, 0x27, + 0xa5, 0x5e, 0x97, 0xaa, 0x5a, 0x98, 0xba, 0xda, 0x90, 0xf2, 0x7b, 0xd3, 0x5f, 0xe2, 0xa4, 0x5c, + 0xee, 0xfb, 0xcb, 0x85, 0xb9, 0xac, 0xfa, 0x3f, 0xb5, 0xcb, 0x1a, 0x5a, 0x16, 0x4e, 0x4a, 0xcb, + 0xda, 0x2b, 0x2c, 0x6b, 0x61, 0x5a, 0x56, 0xa5, 0xf9, 0x74, 0x52, 0x1d, 0xec, 0xcd, 0xef, 0x00, + 0x00, 0x00, 0xff, 0xff, 0xb1, 0x55, 0x4f, 0x8d, 0xfa, 0x02, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/placement_type.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/placement_type.pb.go new file mode 100644 index 0000000..a7f08e3 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/placement_type.pb.go @@ -0,0 +1,133 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/enums/placement_type.proto + +package enums // import "google.golang.org/genproto/googleapis/ads/googleads/v1/enums" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Possible placement types for a feed mapping. +type PlacementTypeEnum_PlacementType int32 + +const ( + // Not specified. + PlacementTypeEnum_UNSPECIFIED PlacementTypeEnum_PlacementType = 0 + // Used for return value only. Represents value unknown in this version. + PlacementTypeEnum_UNKNOWN PlacementTypeEnum_PlacementType = 1 + // Websites(e.g. 'www.flowers4sale.com'). + PlacementTypeEnum_WEBSITE PlacementTypeEnum_PlacementType = 2 + // Mobile application categories(e.g. 'Games'). + PlacementTypeEnum_MOBILE_APP_CATEGORY PlacementTypeEnum_PlacementType = 3 + // mobile applications(e.g. 'mobileapp::2-com.whatsthewordanswers'). + PlacementTypeEnum_MOBILE_APPLICATION PlacementTypeEnum_PlacementType = 4 + // YouTube videos(e.g. 'youtube.com/video/wtLJPvx7-ys'). + PlacementTypeEnum_YOUTUBE_VIDEO PlacementTypeEnum_PlacementType = 5 + // YouTube channels(e.g. 'youtube.com::L8ZULXASCc1I_oaOT0NaOQ'). + PlacementTypeEnum_YOUTUBE_CHANNEL PlacementTypeEnum_PlacementType = 6 +) + +var PlacementTypeEnum_PlacementType_name = map[int32]string{ + 0: "UNSPECIFIED", + 1: "UNKNOWN", + 2: "WEBSITE", + 3: "MOBILE_APP_CATEGORY", + 4: "MOBILE_APPLICATION", + 5: "YOUTUBE_VIDEO", + 6: "YOUTUBE_CHANNEL", +} +var PlacementTypeEnum_PlacementType_value = map[string]int32{ + "UNSPECIFIED": 0, + "UNKNOWN": 1, + "WEBSITE": 2, + "MOBILE_APP_CATEGORY": 3, + "MOBILE_APPLICATION": 4, + "YOUTUBE_VIDEO": 5, + "YOUTUBE_CHANNEL": 6, +} + +func (x PlacementTypeEnum_PlacementType) String() string { + return proto.EnumName(PlacementTypeEnum_PlacementType_name, int32(x)) +} +func (PlacementTypeEnum_PlacementType) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_placement_type_dc725b3e437565ca, []int{0, 0} +} + +// Container for enum describing possible placement types. +type PlacementTypeEnum struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *PlacementTypeEnum) Reset() { *m = PlacementTypeEnum{} } +func (m *PlacementTypeEnum) String() string { return proto.CompactTextString(m) } +func (*PlacementTypeEnum) ProtoMessage() {} +func (*PlacementTypeEnum) Descriptor() ([]byte, []int) { + return fileDescriptor_placement_type_dc725b3e437565ca, []int{0} +} +func (m *PlacementTypeEnum) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_PlacementTypeEnum.Unmarshal(m, b) +} +func (m *PlacementTypeEnum) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_PlacementTypeEnum.Marshal(b, m, deterministic) +} +func (dst *PlacementTypeEnum) XXX_Merge(src proto.Message) { + xxx_messageInfo_PlacementTypeEnum.Merge(dst, src) +} +func (m *PlacementTypeEnum) XXX_Size() int { + return xxx_messageInfo_PlacementTypeEnum.Size(m) +} +func (m *PlacementTypeEnum) XXX_DiscardUnknown() { + xxx_messageInfo_PlacementTypeEnum.DiscardUnknown(m) +} + +var xxx_messageInfo_PlacementTypeEnum proto.InternalMessageInfo + +func init() { + proto.RegisterType((*PlacementTypeEnum)(nil), "google.ads.googleads.v1.enums.PlacementTypeEnum") + proto.RegisterEnum("google.ads.googleads.v1.enums.PlacementTypeEnum_PlacementType", PlacementTypeEnum_PlacementType_name, PlacementTypeEnum_PlacementType_value) +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/enums/placement_type.proto", fileDescriptor_placement_type_dc725b3e437565ca) +} + +var fileDescriptor_placement_type_dc725b3e437565ca = []byte{ + // 360 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x7c, 0x50, 0xdd, 0x6a, 0xa3, 0x40, + 0x14, 0x5e, 0xcd, 0x6e, 0x16, 0x26, 0x84, 0x98, 0x09, 0xec, 0xc2, 0xb2, 0xb9, 0x48, 0x1e, 0x60, + 0xc4, 0xf6, 0x6e, 0x7a, 0x35, 0x9a, 0x69, 0x2a, 0x4d, 0x55, 0x1a, 0x35, 0xa4, 0x08, 0x62, 0xa3, + 0x48, 0x20, 0xce, 0x48, 0xc6, 0x04, 0xf2, 0x1c, 0x7d, 0x83, 0xde, 0xb5, 0x8f, 0xd2, 0x17, 0x29, + 0xf4, 0x29, 0x8a, 0x5a, 0x53, 0x72, 0xd1, 0xde, 0x0c, 0xdf, 0x9c, 0xef, 0x87, 0x73, 0x3e, 0x70, + 0x96, 0x72, 0x9e, 0x6e, 0x12, 0x35, 0x8a, 0x85, 0x5a, 0xc3, 0x12, 0xed, 0x35, 0x35, 0x61, 0xbb, + 0x4c, 0xa8, 0xf9, 0x26, 0x5a, 0x25, 0x59, 0xc2, 0x8a, 0xb0, 0x38, 0xe4, 0x09, 0xca, 0xb7, 0xbc, + 0xe0, 0x70, 0x58, 0x0b, 0x51, 0x14, 0x0b, 0x74, 0xf4, 0xa0, 0xbd, 0x86, 0x2a, 0xcf, 0xbf, 0xff, + 0x4d, 0x64, 0xbe, 0x56, 0x23, 0xc6, 0x78, 0x11, 0x15, 0x6b, 0xce, 0x44, 0x6d, 0x1e, 0x3f, 0x49, + 0xa0, 0xef, 0x34, 0xa9, 0xee, 0x21, 0x4f, 0x28, 0xdb, 0x65, 0xe3, 0x07, 0x09, 0x74, 0x4f, 0xa6, + 0xb0, 0x07, 0x3a, 0x9e, 0x35, 0x77, 0xa8, 0x61, 0x5e, 0x9a, 0x74, 0xa2, 0xfc, 0x80, 0x1d, 0xf0, + 0xdb, 0xb3, 0xae, 0x2d, 0x7b, 0x61, 0x29, 0x52, 0xf9, 0x59, 0x50, 0x7d, 0x6e, 0xba, 0x54, 0x91, + 0xe1, 0x5f, 0x30, 0xb8, 0xb1, 0x75, 0x73, 0x46, 0x43, 0xe2, 0x38, 0xa1, 0x41, 0x5c, 0x3a, 0xb5, + 0x6f, 0x97, 0x4a, 0x0b, 0xfe, 0x01, 0xf0, 0x93, 0x98, 0x99, 0x06, 0x71, 0x4d, 0xdb, 0x52, 0x7e, + 0xc2, 0x3e, 0xe8, 0x2e, 0x6d, 0xcf, 0xf5, 0x74, 0x1a, 0xfa, 0xe6, 0x84, 0xda, 0xca, 0x2f, 0x38, + 0x00, 0xbd, 0x66, 0x64, 0x5c, 0x11, 0xcb, 0xa2, 0x33, 0xa5, 0xad, 0xbf, 0x4a, 0x60, 0xb4, 0xe2, + 0x19, 0xfa, 0xf6, 0x5e, 0x1d, 0x9e, 0x2c, 0xee, 0x94, 0x57, 0x3a, 0xd2, 0x9d, 0xfe, 0x61, 0x4a, + 0xf9, 0x26, 0x62, 0x29, 0xe2, 0xdb, 0x54, 0x4d, 0x13, 0x56, 0x75, 0xd0, 0x14, 0x9d, 0xaf, 0xc5, + 0x17, 0xbd, 0x5f, 0x54, 0xef, 0xa3, 0xdc, 0x9a, 0x12, 0xf2, 0x2c, 0x0f, 0xa7, 0x75, 0x14, 0x89, + 0x05, 0xaa, 0x61, 0x89, 0x7c, 0x0d, 0x95, 0xd5, 0x89, 0x97, 0x86, 0x0f, 0x48, 0x2c, 0x82, 0x23, + 0x1f, 0xf8, 0x5a, 0x50, 0xf1, 0x6f, 0xf2, 0xa8, 0x1e, 0x62, 0x4c, 0x62, 0x81, 0xf1, 0x51, 0x81, + 0xb1, 0xaf, 0x61, 0x5c, 0x69, 0xee, 0xdb, 0xd5, 0x62, 0xe7, 0xef, 0x01, 0x00, 0x00, 0xff, 0xff, + 0x08, 0x71, 0xbc, 0x14, 0x0f, 0x02, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/policy_approval_status.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/policy_approval_status.pb.go new file mode 100644 index 0000000..5e53070 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/policy_approval_status.pb.go @@ -0,0 +1,133 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/enums/policy_approval_status.proto + +package enums // import "google.golang.org/genproto/googleapis/ads/googleads/v1/enums" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// The possible policy approval statuses. When there are several approval +// statuses available the most severe one will be used. The order of severity +// is DISAPPROVED, AREA_OF_INTEREST_ONLY, APPROVED_LIMITED and APPROVED. +type PolicyApprovalStatusEnum_PolicyApprovalStatus int32 + +const ( + // No value has been specified. + PolicyApprovalStatusEnum_UNSPECIFIED PolicyApprovalStatusEnum_PolicyApprovalStatus = 0 + // The received value is not known in this version. + // + // This is a response-only value. + PolicyApprovalStatusEnum_UNKNOWN PolicyApprovalStatusEnum_PolicyApprovalStatus = 1 + // Will not serve. + PolicyApprovalStatusEnum_DISAPPROVED PolicyApprovalStatusEnum_PolicyApprovalStatus = 2 + // Serves with restrictions. + PolicyApprovalStatusEnum_APPROVED_LIMITED PolicyApprovalStatusEnum_PolicyApprovalStatus = 3 + // Serves without restrictions. + PolicyApprovalStatusEnum_APPROVED PolicyApprovalStatusEnum_PolicyApprovalStatus = 4 + // Will not serve in targeted countries, but may serve for users who are + // searching for information about the targeted countries. + PolicyApprovalStatusEnum_AREA_OF_INTEREST_ONLY PolicyApprovalStatusEnum_PolicyApprovalStatus = 5 +) + +var PolicyApprovalStatusEnum_PolicyApprovalStatus_name = map[int32]string{ + 0: "UNSPECIFIED", + 1: "UNKNOWN", + 2: "DISAPPROVED", + 3: "APPROVED_LIMITED", + 4: "APPROVED", + 5: "AREA_OF_INTEREST_ONLY", +} +var PolicyApprovalStatusEnum_PolicyApprovalStatus_value = map[string]int32{ + "UNSPECIFIED": 0, + "UNKNOWN": 1, + "DISAPPROVED": 2, + "APPROVED_LIMITED": 3, + "APPROVED": 4, + "AREA_OF_INTEREST_ONLY": 5, +} + +func (x PolicyApprovalStatusEnum_PolicyApprovalStatus) String() string { + return proto.EnumName(PolicyApprovalStatusEnum_PolicyApprovalStatus_name, int32(x)) +} +func (PolicyApprovalStatusEnum_PolicyApprovalStatus) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_policy_approval_status_de38eaf2e45ee77d, []int{0, 0} +} + +// Container for enum describing possible policy approval statuses. +type PolicyApprovalStatusEnum struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *PolicyApprovalStatusEnum) Reset() { *m = PolicyApprovalStatusEnum{} } +func (m *PolicyApprovalStatusEnum) String() string { return proto.CompactTextString(m) } +func (*PolicyApprovalStatusEnum) ProtoMessage() {} +func (*PolicyApprovalStatusEnum) Descriptor() ([]byte, []int) { + return fileDescriptor_policy_approval_status_de38eaf2e45ee77d, []int{0} +} +func (m *PolicyApprovalStatusEnum) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_PolicyApprovalStatusEnum.Unmarshal(m, b) +} +func (m *PolicyApprovalStatusEnum) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_PolicyApprovalStatusEnum.Marshal(b, m, deterministic) +} +func (dst *PolicyApprovalStatusEnum) XXX_Merge(src proto.Message) { + xxx_messageInfo_PolicyApprovalStatusEnum.Merge(dst, src) +} +func (m *PolicyApprovalStatusEnum) XXX_Size() int { + return xxx_messageInfo_PolicyApprovalStatusEnum.Size(m) +} +func (m *PolicyApprovalStatusEnum) XXX_DiscardUnknown() { + xxx_messageInfo_PolicyApprovalStatusEnum.DiscardUnknown(m) +} + +var xxx_messageInfo_PolicyApprovalStatusEnum proto.InternalMessageInfo + +func init() { + proto.RegisterType((*PolicyApprovalStatusEnum)(nil), "google.ads.googleads.v1.enums.PolicyApprovalStatusEnum") + proto.RegisterEnum("google.ads.googleads.v1.enums.PolicyApprovalStatusEnum_PolicyApprovalStatus", PolicyApprovalStatusEnum_PolicyApprovalStatus_name, PolicyApprovalStatusEnum_PolicyApprovalStatus_value) +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/enums/policy_approval_status.proto", fileDescriptor_policy_approval_status_de38eaf2e45ee77d) +} + +var fileDescriptor_policy_approval_status_de38eaf2e45ee77d = []byte{ + // 346 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x7c, 0x50, 0x4d, 0x4a, 0xc3, 0x40, + 0x14, 0x36, 0xa9, 0x7f, 0x4c, 0x05, 0x43, 0xa8, 0x60, 0xc5, 0x2e, 0xda, 0x03, 0x4c, 0x08, 0xee, + 0xc6, 0xd5, 0xd4, 0x4c, 0x4b, 0xb0, 0x26, 0xa1, 0x69, 0x23, 0x4a, 0x20, 0x8c, 0x4d, 0x09, 0x81, + 0x74, 0x26, 0x74, 0xd2, 0x82, 0x7b, 0x2f, 0xa2, 0x4b, 0x8f, 0xe2, 0x51, 0x5c, 0x78, 0x06, 0xc9, + 0xa4, 0xc9, 0xaa, 0xba, 0x19, 0xbe, 0x79, 0xdf, 0x0f, 0xef, 0x7d, 0x00, 0x25, 0x9c, 0x27, 0xd9, + 0xd2, 0xa0, 0xb1, 0x30, 0x2a, 0x58, 0xa2, 0xad, 0x69, 0x2c, 0xd9, 0x66, 0x25, 0x8c, 0x9c, 0x67, + 0xe9, 0xe2, 0x35, 0xa2, 0x79, 0xbe, 0xe6, 0x5b, 0x9a, 0x45, 0xa2, 0xa0, 0xc5, 0x46, 0xc0, 0x7c, + 0xcd, 0x0b, 0xae, 0xf7, 0x2a, 0x03, 0xa4, 0xb1, 0x80, 0x8d, 0x17, 0x6e, 0x4d, 0x28, 0xbd, 0x57, + 0xd7, 0x75, 0x74, 0x9e, 0x1a, 0x94, 0x31, 0x5e, 0xd0, 0x22, 0xe5, 0x6c, 0x67, 0x1e, 0xbc, 0x2b, + 0xe0, 0xd2, 0x93, 0xe9, 0x78, 0x17, 0xee, 0xcb, 0x6c, 0xc2, 0x36, 0xab, 0xc1, 0x9b, 0x02, 0x3a, + 0xfb, 0x48, 0xfd, 0x1c, 0xb4, 0xe7, 0x8e, 0xef, 0x91, 0x3b, 0x7b, 0x64, 0x13, 0x4b, 0x3b, 0xd0, + 0xdb, 0xe0, 0x64, 0xee, 0xdc, 0x3b, 0xee, 0xa3, 0xa3, 0x29, 0x25, 0x6b, 0xd9, 0x3e, 0xf6, 0xbc, + 0xa9, 0x1b, 0x10, 0x4b, 0x53, 0xf5, 0x0e, 0xd0, 0xea, 0x5f, 0x34, 0xb1, 0x1f, 0xec, 0x19, 0xb1, + 0xb4, 0x96, 0x7e, 0x06, 0x4e, 0x1b, 0xcd, 0xa1, 0xde, 0x05, 0x17, 0x78, 0x4a, 0x70, 0xe4, 0x8e, + 0x22, 0xdb, 0x99, 0x91, 0x29, 0xf1, 0x67, 0x91, 0xeb, 0x4c, 0x9e, 0xb4, 0xa3, 0xe1, 0x8f, 0x02, + 0xfa, 0x0b, 0xbe, 0x82, 0xff, 0xde, 0x39, 0xec, 0xee, 0xdb, 0xd4, 0x2b, 0x8f, 0xf4, 0x94, 0xe7, + 0xe1, 0xce, 0x9b, 0xf0, 0x8c, 0xb2, 0x04, 0xf2, 0x75, 0x62, 0x24, 0x4b, 0x26, 0x2b, 0xa8, 0xfb, + 0xce, 0x53, 0xf1, 0x47, 0xfd, 0xb7, 0xf2, 0xfd, 0x50, 0x5b, 0x63, 0x8c, 0x3f, 0xd5, 0xde, 0xb8, + 0x8a, 0xc2, 0xb1, 0x80, 0x15, 0x2c, 0x51, 0x60, 0xc2, 0xb2, 0x32, 0xf1, 0x55, 0xf3, 0x21, 0x8e, + 0x45, 0xd8, 0xf0, 0x61, 0x60, 0x86, 0x92, 0xff, 0x56, 0xfb, 0xd5, 0x10, 0x21, 0x1c, 0x0b, 0x84, + 0x1a, 0x05, 0x42, 0x81, 0x89, 0x90, 0xd4, 0xbc, 0x1c, 0xcb, 0xc5, 0x6e, 0x7e, 0x03, 0x00, 0x00, + 0xff, 0xff, 0x7a, 0x2c, 0x12, 0x8c, 0x16, 0x02, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/policy_review_status.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/policy_review_status.pb.go new file mode 100644 index 0000000..ca52103 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/policy_review_status.pb.go @@ -0,0 +1,126 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/enums/policy_review_status.proto + +package enums // import "google.golang.org/genproto/googleapis/ads/googleads/v1/enums" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// The possible policy review statuses. +type PolicyReviewStatusEnum_PolicyReviewStatus int32 + +const ( + // No value has been specified. + PolicyReviewStatusEnum_UNSPECIFIED PolicyReviewStatusEnum_PolicyReviewStatus = 0 + // The received value is not known in this version. + // + // This is a response-only value. + PolicyReviewStatusEnum_UNKNOWN PolicyReviewStatusEnum_PolicyReviewStatus = 1 + // Currently under review. + PolicyReviewStatusEnum_REVIEW_IN_PROGRESS PolicyReviewStatusEnum_PolicyReviewStatus = 2 + // Primary review complete. Other reviews may be continuing. + PolicyReviewStatusEnum_REVIEWED PolicyReviewStatusEnum_PolicyReviewStatus = 3 + // The resource has been resubmitted for approval or its policy decision has + // been appealed. + PolicyReviewStatusEnum_UNDER_APPEAL PolicyReviewStatusEnum_PolicyReviewStatus = 4 +) + +var PolicyReviewStatusEnum_PolicyReviewStatus_name = map[int32]string{ + 0: "UNSPECIFIED", + 1: "UNKNOWN", + 2: "REVIEW_IN_PROGRESS", + 3: "REVIEWED", + 4: "UNDER_APPEAL", +} +var PolicyReviewStatusEnum_PolicyReviewStatus_value = map[string]int32{ + "UNSPECIFIED": 0, + "UNKNOWN": 1, + "REVIEW_IN_PROGRESS": 2, + "REVIEWED": 3, + "UNDER_APPEAL": 4, +} + +func (x PolicyReviewStatusEnum_PolicyReviewStatus) String() string { + return proto.EnumName(PolicyReviewStatusEnum_PolicyReviewStatus_name, int32(x)) +} +func (PolicyReviewStatusEnum_PolicyReviewStatus) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_policy_review_status_8e59777bd577c932, []int{0, 0} +} + +// Container for enum describing possible policy review statuses. +type PolicyReviewStatusEnum struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *PolicyReviewStatusEnum) Reset() { *m = PolicyReviewStatusEnum{} } +func (m *PolicyReviewStatusEnum) String() string { return proto.CompactTextString(m) } +func (*PolicyReviewStatusEnum) ProtoMessage() {} +func (*PolicyReviewStatusEnum) Descriptor() ([]byte, []int) { + return fileDescriptor_policy_review_status_8e59777bd577c932, []int{0} +} +func (m *PolicyReviewStatusEnum) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_PolicyReviewStatusEnum.Unmarshal(m, b) +} +func (m *PolicyReviewStatusEnum) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_PolicyReviewStatusEnum.Marshal(b, m, deterministic) +} +func (dst *PolicyReviewStatusEnum) XXX_Merge(src proto.Message) { + xxx_messageInfo_PolicyReviewStatusEnum.Merge(dst, src) +} +func (m *PolicyReviewStatusEnum) XXX_Size() int { + return xxx_messageInfo_PolicyReviewStatusEnum.Size(m) +} +func (m *PolicyReviewStatusEnum) XXX_DiscardUnknown() { + xxx_messageInfo_PolicyReviewStatusEnum.DiscardUnknown(m) +} + +var xxx_messageInfo_PolicyReviewStatusEnum proto.InternalMessageInfo + +func init() { + proto.RegisterType((*PolicyReviewStatusEnum)(nil), "google.ads.googleads.v1.enums.PolicyReviewStatusEnum") + proto.RegisterEnum("google.ads.googleads.v1.enums.PolicyReviewStatusEnum_PolicyReviewStatus", PolicyReviewStatusEnum_PolicyReviewStatus_name, PolicyReviewStatusEnum_PolicyReviewStatus_value) +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/enums/policy_review_status.proto", fileDescriptor_policy_review_status_8e59777bd577c932) +} + +var fileDescriptor_policy_review_status_8e59777bd577c932 = []byte{ + // 330 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x7c, 0x50, 0xcf, 0x4a, 0xc3, 0x30, + 0x1c, 0xb6, 0x9d, 0xa8, 0x64, 0x03, 0x4b, 0x0e, 0x13, 0xc4, 0x1d, 0xb6, 0x07, 0x48, 0x29, 0x5e, + 0x24, 0x9e, 0x32, 0x17, 0x47, 0x51, 0xba, 0xd0, 0xb2, 0x0e, 0xa4, 0x50, 0xea, 0x5a, 0x42, 0x65, + 0x4b, 0xca, 0xd2, 0x4d, 0xbc, 0xfb, 0x24, 0x1e, 0x7d, 0x14, 0x1f, 0x45, 0x7c, 0x08, 0x69, 0xb2, + 0xed, 0x32, 0xf4, 0x12, 0x3e, 0xf2, 0xfd, 0xe1, 0xf7, 0x7d, 0xe0, 0x86, 0x4b, 0xc9, 0x17, 0x85, + 0x9b, 0xe5, 0xca, 0x35, 0xb0, 0x41, 0x1b, 0xcf, 0x2d, 0xc4, 0x7a, 0xa9, 0xdc, 0x4a, 0x2e, 0xca, + 0xf9, 0x5b, 0xba, 0x2a, 0x36, 0x65, 0xf1, 0x9a, 0xaa, 0x3a, 0xab, 0xd7, 0x0a, 0x55, 0x2b, 0x59, + 0x4b, 0xd8, 0x33, 0x72, 0x94, 0xe5, 0x0a, 0xed, 0x9d, 0x68, 0xe3, 0x21, 0xed, 0xbc, 0xbc, 0xda, + 0x05, 0x57, 0xa5, 0x9b, 0x09, 0x21, 0xeb, 0xac, 0x2e, 0xa5, 0xd8, 0x9a, 0x07, 0xef, 0x16, 0xe8, + 0x32, 0x9d, 0x1d, 0xea, 0xe8, 0x48, 0x27, 0x53, 0xb1, 0x5e, 0x0e, 0x5e, 0x00, 0x3c, 0x64, 0xe0, + 0x39, 0x68, 0x4f, 0x83, 0x88, 0xd1, 0x3b, 0xff, 0xde, 0xa7, 0x23, 0xe7, 0x08, 0xb6, 0xc1, 0xe9, + 0x34, 0x78, 0x08, 0x26, 0xb3, 0xc0, 0xb1, 0x60, 0x17, 0xc0, 0x90, 0xc6, 0x3e, 0x9d, 0xa5, 0x7e, + 0x90, 0xb2, 0x70, 0x32, 0x0e, 0x69, 0x14, 0x39, 0x36, 0xec, 0x80, 0x33, 0xf3, 0x4f, 0x47, 0x4e, + 0x0b, 0x3a, 0xa0, 0x33, 0x0d, 0x46, 0x34, 0x4c, 0x09, 0x63, 0x94, 0x3c, 0x3a, 0xc7, 0xc3, 0x1f, + 0x0b, 0xf4, 0xe7, 0x72, 0x89, 0xfe, 0xad, 0x32, 0xbc, 0x38, 0xbc, 0x87, 0x35, 0x2d, 0x98, 0xf5, + 0x34, 0xdc, 0x3a, 0xb9, 0x5c, 0x64, 0x82, 0x23, 0xb9, 0xe2, 0x2e, 0x2f, 0x84, 0xee, 0xb8, 0x9b, + 0xb3, 0x2a, 0xd5, 0x1f, 0xeb, 0xde, 0xea, 0xf7, 0xc3, 0x6e, 0x8d, 0x09, 0xf9, 0xb4, 0x7b, 0x63, + 0x13, 0x45, 0x72, 0x85, 0x0c, 0x6c, 0x50, 0xec, 0xa1, 0x66, 0x15, 0xf5, 0xb5, 0xe3, 0x13, 0x92, + 0xab, 0x64, 0xcf, 0x27, 0xb1, 0x97, 0x68, 0xfe, 0xdb, 0xee, 0x9b, 0x4f, 0x8c, 0x49, 0xae, 0x30, + 0xde, 0x2b, 0x30, 0x8e, 0x3d, 0x8c, 0xb5, 0xe6, 0xf9, 0x44, 0x1f, 0x76, 0xfd, 0x1b, 0x00, 0x00, + 0xff, 0xff, 0x85, 0xab, 0xcf, 0x9a, 0xf5, 0x01, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/policy_topic_entry_type.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/policy_topic_entry_type.pb.go new file mode 100644 index 0000000..65e31dd --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/policy_topic_entry_type.pb.go @@ -0,0 +1,142 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/enums/policy_topic_entry_type.proto + +package enums // import "google.golang.org/genproto/googleapis/ads/googleads/v1/enums" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// The possible policy topic entry types. +type PolicyTopicEntryTypeEnum_PolicyTopicEntryType int32 + +const ( + // No value has been specified. + PolicyTopicEntryTypeEnum_UNSPECIFIED PolicyTopicEntryTypeEnum_PolicyTopicEntryType = 0 + // The received value is not known in this version. + // + // This is a response-only value. + PolicyTopicEntryTypeEnum_UNKNOWN PolicyTopicEntryTypeEnum_PolicyTopicEntryType = 1 + // The resource will not be served. + PolicyTopicEntryTypeEnum_PROHIBITED PolicyTopicEntryTypeEnum_PolicyTopicEntryType = 2 + // The resource will not be served under some circumstances. + PolicyTopicEntryTypeEnum_LIMITED PolicyTopicEntryTypeEnum_PolicyTopicEntryType = 4 + // The resource cannot serve at all because of the current targeting + // criteria. + PolicyTopicEntryTypeEnum_FULLY_LIMITED PolicyTopicEntryTypeEnum_PolicyTopicEntryType = 8 + // May be of interest, but does not limit how the resource is served. + PolicyTopicEntryTypeEnum_DESCRIPTIVE PolicyTopicEntryTypeEnum_PolicyTopicEntryType = 5 + // Could increase coverage beyond normal. + PolicyTopicEntryTypeEnum_BROADENING PolicyTopicEntryTypeEnum_PolicyTopicEntryType = 6 + // Constrained for all targeted countries, but may serve in other countries + // through area of interest. + PolicyTopicEntryTypeEnum_AREA_OF_INTEREST_ONLY PolicyTopicEntryTypeEnum_PolicyTopicEntryType = 7 +) + +var PolicyTopicEntryTypeEnum_PolicyTopicEntryType_name = map[int32]string{ + 0: "UNSPECIFIED", + 1: "UNKNOWN", + 2: "PROHIBITED", + 4: "LIMITED", + 8: "FULLY_LIMITED", + 5: "DESCRIPTIVE", + 6: "BROADENING", + 7: "AREA_OF_INTEREST_ONLY", +} +var PolicyTopicEntryTypeEnum_PolicyTopicEntryType_value = map[string]int32{ + "UNSPECIFIED": 0, + "UNKNOWN": 1, + "PROHIBITED": 2, + "LIMITED": 4, + "FULLY_LIMITED": 8, + "DESCRIPTIVE": 5, + "BROADENING": 6, + "AREA_OF_INTEREST_ONLY": 7, +} + +func (x PolicyTopicEntryTypeEnum_PolicyTopicEntryType) String() string { + return proto.EnumName(PolicyTopicEntryTypeEnum_PolicyTopicEntryType_name, int32(x)) +} +func (PolicyTopicEntryTypeEnum_PolicyTopicEntryType) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_policy_topic_entry_type_2d86a58e897e12eb, []int{0, 0} +} + +// Container for enum describing possible policy topic entry types. +type PolicyTopicEntryTypeEnum struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *PolicyTopicEntryTypeEnum) Reset() { *m = PolicyTopicEntryTypeEnum{} } +func (m *PolicyTopicEntryTypeEnum) String() string { return proto.CompactTextString(m) } +func (*PolicyTopicEntryTypeEnum) ProtoMessage() {} +func (*PolicyTopicEntryTypeEnum) Descriptor() ([]byte, []int) { + return fileDescriptor_policy_topic_entry_type_2d86a58e897e12eb, []int{0} +} +func (m *PolicyTopicEntryTypeEnum) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_PolicyTopicEntryTypeEnum.Unmarshal(m, b) +} +func (m *PolicyTopicEntryTypeEnum) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_PolicyTopicEntryTypeEnum.Marshal(b, m, deterministic) +} +func (dst *PolicyTopicEntryTypeEnum) XXX_Merge(src proto.Message) { + xxx_messageInfo_PolicyTopicEntryTypeEnum.Merge(dst, src) +} +func (m *PolicyTopicEntryTypeEnum) XXX_Size() int { + return xxx_messageInfo_PolicyTopicEntryTypeEnum.Size(m) +} +func (m *PolicyTopicEntryTypeEnum) XXX_DiscardUnknown() { + xxx_messageInfo_PolicyTopicEntryTypeEnum.DiscardUnknown(m) +} + +var xxx_messageInfo_PolicyTopicEntryTypeEnum proto.InternalMessageInfo + +func init() { + proto.RegisterType((*PolicyTopicEntryTypeEnum)(nil), "google.ads.googleads.v1.enums.PolicyTopicEntryTypeEnum") + proto.RegisterEnum("google.ads.googleads.v1.enums.PolicyTopicEntryTypeEnum_PolicyTopicEntryType", PolicyTopicEntryTypeEnum_PolicyTopicEntryType_name, PolicyTopicEntryTypeEnum_PolicyTopicEntryType_value) +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/enums/policy_topic_entry_type.proto", fileDescriptor_policy_topic_entry_type_2d86a58e897e12eb) +} + +var fileDescriptor_policy_topic_entry_type_2d86a58e897e12eb = []byte{ + // 379 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x7c, 0x91, 0xc1, 0xae, 0x93, 0x40, + 0x18, 0x85, 0x05, 0xf5, 0x5e, 0x33, 0x37, 0x2a, 0x12, 0x4d, 0xbc, 0xc6, 0x2e, 0xda, 0x07, 0x18, + 0x42, 0xdc, 0x4d, 0x57, 0x43, 0x99, 0xd6, 0x89, 0x38, 0x10, 0x4a, 0x31, 0x35, 0x24, 0x04, 0x0b, + 0x21, 0x24, 0xed, 0x0c, 0xe9, 0xd0, 0x26, 0xbc, 0x8d, 0x71, 0xe9, 0x3b, 0xf8, 0x02, 0x3e, 0x8a, + 0x0b, 0x9f, 0xc1, 0xcc, 0x20, 0x5d, 0x55, 0x37, 0xe4, 0xf0, 0x9f, 0xff, 0x7c, 0x81, 0xf3, 0x83, + 0x79, 0x2d, 0x44, 0xbd, 0xaf, 0x9c, 0xa2, 0x94, 0xce, 0x20, 0x95, 0x3a, 0xbb, 0x4e, 0xc5, 0x4f, + 0x07, 0xe9, 0xb4, 0x62, 0xdf, 0xec, 0xfa, 0xbc, 0x13, 0x6d, 0xb3, 0xcb, 0x2b, 0xde, 0x1d, 0xfb, + 0xbc, 0xeb, 0xdb, 0x0a, 0xb6, 0x47, 0xd1, 0x09, 0x7b, 0x32, 0x24, 0x60, 0x51, 0x4a, 0x78, 0x09, + 0xc3, 0xb3, 0x0b, 0x75, 0xf8, 0xcd, 0xdb, 0x91, 0xdd, 0x36, 0x4e, 0xc1, 0xb9, 0xe8, 0x8a, 0xae, + 0x11, 0x5c, 0x0e, 0xe1, 0xd9, 0x0f, 0x03, 0xbc, 0x8e, 0x34, 0x3e, 0x51, 0x74, 0xa2, 0xe0, 0x49, + 0xdf, 0x56, 0x84, 0x9f, 0x0e, 0xb3, 0xaf, 0x06, 0x78, 0x79, 0xcd, 0xb4, 0x9f, 0x83, 0xbb, 0x0d, + 0x5b, 0x47, 0x64, 0x41, 0x97, 0x94, 0xf8, 0xd6, 0x03, 0xfb, 0x0e, 0xdc, 0x6e, 0xd8, 0x07, 0x16, + 0x7e, 0x62, 0x96, 0x61, 0x3f, 0x03, 0x20, 0x8a, 0xc3, 0xf7, 0xd4, 0xa3, 0x09, 0xf1, 0x2d, 0x53, + 0x99, 0x01, 0xfd, 0xa8, 0x5f, 0x1e, 0xd9, 0x2f, 0xc0, 0xd3, 0xe5, 0x26, 0x08, 0xb6, 0xf9, 0x38, + 0x7a, 0xa2, 0x68, 0x3e, 0x59, 0x2f, 0x62, 0x1a, 0x25, 0x34, 0x25, 0xd6, 0x63, 0x05, 0xf0, 0xe2, + 0x10, 0xfb, 0x84, 0x51, 0xb6, 0xb2, 0x6e, 0xec, 0x7b, 0xf0, 0x0a, 0xc7, 0x04, 0xe7, 0xe1, 0x32, + 0xa7, 0x2c, 0x21, 0x31, 0x59, 0x27, 0x79, 0xc8, 0x82, 0xad, 0x75, 0xeb, 0xfd, 0x36, 0xc0, 0x74, + 0x27, 0x0e, 0xf0, 0xbf, 0x1d, 0x78, 0xf7, 0xd7, 0xfe, 0x22, 0x52, 0x05, 0x44, 0xc6, 0x67, 0xef, + 0x6f, 0xb6, 0x16, 0xfb, 0x82, 0xd7, 0x50, 0x1c, 0x6b, 0xa7, 0xae, 0xb8, 0xae, 0x67, 0x3c, 0x46, + 0xdb, 0xc8, 0x7f, 0xdc, 0x66, 0xae, 0x9f, 0xdf, 0xcc, 0x87, 0x2b, 0x8c, 0xbf, 0x9b, 0x93, 0xd5, + 0x80, 0xc2, 0xa5, 0x84, 0x83, 0x54, 0x2a, 0x75, 0xa1, 0xaa, 0x53, 0xfe, 0x1c, 0xfd, 0x0c, 0x97, + 0x32, 0xbb, 0xf8, 0x59, 0xea, 0x66, 0xda, 0xff, 0x65, 0x4e, 0x87, 0x21, 0x42, 0xb8, 0x94, 0x08, + 0x5d, 0x36, 0x10, 0x4a, 0x5d, 0x84, 0xf4, 0xce, 0x97, 0x1b, 0xfd, 0x61, 0xef, 0xfe, 0x04, 0x00, + 0x00, 0xff, 0xff, 0x66, 0x35, 0x70, 0x8b, 0x33, 0x02, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/policy_topic_evidence_destination_mismatch_url_type.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/policy_topic_evidence_destination_mismatch_url_type.pb.go new file mode 100644 index 0000000..851fea7 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/policy_topic_evidence_destination_mismatch_url_type.pb.go @@ -0,0 +1,142 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/enums/policy_topic_evidence_destination_mismatch_url_type.proto + +package enums // import "google.golang.org/genproto/googleapis/ads/googleads/v1/enums" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// The possible policy topic evidence destination mismatch url types. +type PolicyTopicEvidenceDestinationMismatchUrlTypeEnum_PolicyTopicEvidenceDestinationMismatchUrlType int32 + +const ( + // No value has been specified. + PolicyTopicEvidenceDestinationMismatchUrlTypeEnum_UNSPECIFIED PolicyTopicEvidenceDestinationMismatchUrlTypeEnum_PolicyTopicEvidenceDestinationMismatchUrlType = 0 + // The received value is not known in this version. + // + // This is a response-only value. + PolicyTopicEvidenceDestinationMismatchUrlTypeEnum_UNKNOWN PolicyTopicEvidenceDestinationMismatchUrlTypeEnum_PolicyTopicEvidenceDestinationMismatchUrlType = 1 + // The display url. + PolicyTopicEvidenceDestinationMismatchUrlTypeEnum_DISPLAY_URL PolicyTopicEvidenceDestinationMismatchUrlTypeEnum_PolicyTopicEvidenceDestinationMismatchUrlType = 2 + // The final url. + PolicyTopicEvidenceDestinationMismatchUrlTypeEnum_FINAL_URL PolicyTopicEvidenceDestinationMismatchUrlTypeEnum_PolicyTopicEvidenceDestinationMismatchUrlType = 3 + // The final mobile url. + PolicyTopicEvidenceDestinationMismatchUrlTypeEnum_FINAL_MOBILE_URL PolicyTopicEvidenceDestinationMismatchUrlTypeEnum_PolicyTopicEvidenceDestinationMismatchUrlType = 4 + // The tracking url template, with substituted desktop url. + PolicyTopicEvidenceDestinationMismatchUrlTypeEnum_TRACKING_URL PolicyTopicEvidenceDestinationMismatchUrlTypeEnum_PolicyTopicEvidenceDestinationMismatchUrlType = 5 + // The tracking url template, with substituted mobile url. + PolicyTopicEvidenceDestinationMismatchUrlTypeEnum_MOBILE_TRACKING_URL PolicyTopicEvidenceDestinationMismatchUrlTypeEnum_PolicyTopicEvidenceDestinationMismatchUrlType = 6 +) + +var PolicyTopicEvidenceDestinationMismatchUrlTypeEnum_PolicyTopicEvidenceDestinationMismatchUrlType_name = map[int32]string{ + 0: "UNSPECIFIED", + 1: "UNKNOWN", + 2: "DISPLAY_URL", + 3: "FINAL_URL", + 4: "FINAL_MOBILE_URL", + 5: "TRACKING_URL", + 6: "MOBILE_TRACKING_URL", +} +var PolicyTopicEvidenceDestinationMismatchUrlTypeEnum_PolicyTopicEvidenceDestinationMismatchUrlType_value = map[string]int32{ + "UNSPECIFIED": 0, + "UNKNOWN": 1, + "DISPLAY_URL": 2, + "FINAL_URL": 3, + "FINAL_MOBILE_URL": 4, + "TRACKING_URL": 5, + "MOBILE_TRACKING_URL": 6, +} + +func (x PolicyTopicEvidenceDestinationMismatchUrlTypeEnum_PolicyTopicEvidenceDestinationMismatchUrlType) String() string { + return proto.EnumName(PolicyTopicEvidenceDestinationMismatchUrlTypeEnum_PolicyTopicEvidenceDestinationMismatchUrlType_name, int32(x)) +} +func (PolicyTopicEvidenceDestinationMismatchUrlTypeEnum_PolicyTopicEvidenceDestinationMismatchUrlType) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_policy_topic_evidence_destination_mismatch_url_type_7351fdf0972bcdb9, []int{0, 0} +} + +// Container for enum describing possible policy topic evidence destination +// mismatch url types. +type PolicyTopicEvidenceDestinationMismatchUrlTypeEnum struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *PolicyTopicEvidenceDestinationMismatchUrlTypeEnum) Reset() { + *m = PolicyTopicEvidenceDestinationMismatchUrlTypeEnum{} +} +func (m *PolicyTopicEvidenceDestinationMismatchUrlTypeEnum) String() string { + return proto.CompactTextString(m) +} +func (*PolicyTopicEvidenceDestinationMismatchUrlTypeEnum) ProtoMessage() {} +func (*PolicyTopicEvidenceDestinationMismatchUrlTypeEnum) Descriptor() ([]byte, []int) { + return fileDescriptor_policy_topic_evidence_destination_mismatch_url_type_7351fdf0972bcdb9, []int{0} +} +func (m *PolicyTopicEvidenceDestinationMismatchUrlTypeEnum) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_PolicyTopicEvidenceDestinationMismatchUrlTypeEnum.Unmarshal(m, b) +} +func (m *PolicyTopicEvidenceDestinationMismatchUrlTypeEnum) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_PolicyTopicEvidenceDestinationMismatchUrlTypeEnum.Marshal(b, m, deterministic) +} +func (dst *PolicyTopicEvidenceDestinationMismatchUrlTypeEnum) XXX_Merge(src proto.Message) { + xxx_messageInfo_PolicyTopicEvidenceDestinationMismatchUrlTypeEnum.Merge(dst, src) +} +func (m *PolicyTopicEvidenceDestinationMismatchUrlTypeEnum) XXX_Size() int { + return xxx_messageInfo_PolicyTopicEvidenceDestinationMismatchUrlTypeEnum.Size(m) +} +func (m *PolicyTopicEvidenceDestinationMismatchUrlTypeEnum) XXX_DiscardUnknown() { + xxx_messageInfo_PolicyTopicEvidenceDestinationMismatchUrlTypeEnum.DiscardUnknown(m) +} + +var xxx_messageInfo_PolicyTopicEvidenceDestinationMismatchUrlTypeEnum proto.InternalMessageInfo + +func init() { + proto.RegisterType((*PolicyTopicEvidenceDestinationMismatchUrlTypeEnum)(nil), "google.ads.googleads.v1.enums.PolicyTopicEvidenceDestinationMismatchUrlTypeEnum") + proto.RegisterEnum("google.ads.googleads.v1.enums.PolicyTopicEvidenceDestinationMismatchUrlTypeEnum_PolicyTopicEvidenceDestinationMismatchUrlType", PolicyTopicEvidenceDestinationMismatchUrlTypeEnum_PolicyTopicEvidenceDestinationMismatchUrlType_name, PolicyTopicEvidenceDestinationMismatchUrlTypeEnum_PolicyTopicEvidenceDestinationMismatchUrlType_value) +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/enums/policy_topic_evidence_destination_mismatch_url_type.proto", fileDescriptor_policy_topic_evidence_destination_mismatch_url_type_7351fdf0972bcdb9) +} + +var fileDescriptor_policy_topic_evidence_destination_mismatch_url_type_7351fdf0972bcdb9 = []byte{ + // 390 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x94, 0x51, 0xc1, 0xaa, 0x9b, 0x40, + 0x14, 0xad, 0xa6, 0x4d, 0xe9, 0xa4, 0xa5, 0x62, 0x0b, 0x85, 0xd2, 0x2c, 0x92, 0x7d, 0x47, 0x6c, + 0x77, 0xd3, 0xd5, 0x98, 0x98, 0x20, 0x31, 0x46, 0x92, 0x98, 0xd0, 0x22, 0x88, 0xd5, 0xc1, 0x0e, + 0xe8, 0x8c, 0x38, 0x26, 0x90, 0x2f, 0xe8, 0x7f, 0x74, 0xd9, 0x45, 0x3f, 0xa4, 0xdf, 0xf1, 0x56, + 0xef, 0x2b, 0x1e, 0x8e, 0x26, 0x8f, 0xb7, 0x78, 0x0f, 0xb2, 0x91, 0x73, 0xef, 0xb9, 0xf7, 0x1c, + 0xe7, 0x5c, 0xb0, 0xcf, 0x38, 0xcf, 0x72, 0x62, 0xc4, 0xa9, 0x30, 0x5a, 0xd8, 0xa0, 0xa3, 0x69, + 0x10, 0x76, 0x28, 0x84, 0x51, 0xf2, 0x9c, 0x26, 0xa7, 0xa8, 0xe6, 0x25, 0x4d, 0x22, 0x72, 0xa4, + 0x29, 0x61, 0x09, 0x89, 0x52, 0x22, 0x6a, 0xca, 0xe2, 0x9a, 0x72, 0x16, 0x15, 0x54, 0x14, 0x71, + 0x9d, 0xfc, 0x8a, 0x0e, 0x55, 0x1e, 0xd5, 0xa7, 0x92, 0xc0, 0xb2, 0xe2, 0x35, 0xd7, 0x87, 0xad, + 0x1a, 0x8c, 0x53, 0x01, 0x2f, 0xc2, 0xf0, 0x68, 0x42, 0x29, 0xfc, 0xf1, 0xd3, 0xd9, 0xb7, 0xa4, + 0x46, 0xcc, 0x18, 0xaf, 0xa5, 0x9a, 0x68, 0x97, 0xc7, 0x37, 0x0a, 0x30, 0x7d, 0x69, 0xbd, 0x6d, + 0x9c, 0xed, 0xce, 0x78, 0x7a, 0xef, 0xbb, 0xec, 0x6c, 0x83, 0x2a, 0xdf, 0x9e, 0x4a, 0x62, 0xb3, + 0x43, 0x31, 0xfe, 0xa7, 0x80, 0xcf, 0x57, 0x6d, 0xe9, 0x6f, 0xc1, 0x20, 0xf0, 0x36, 0xbe, 0x3d, + 0x71, 0x66, 0x8e, 0x3d, 0xd5, 0x9e, 0xe9, 0x03, 0xf0, 0x32, 0xf0, 0x16, 0xde, 0x6a, 0xef, 0x69, + 0x4a, 0xc3, 0x4e, 0x9d, 0x8d, 0xef, 0xe2, 0xef, 0x51, 0xb0, 0x76, 0x35, 0x55, 0x7f, 0x03, 0x5e, + 0xcd, 0x1c, 0x0f, 0xbb, 0xb2, 0xec, 0xe9, 0xef, 0x81, 0xd6, 0x96, 0xcb, 0x95, 0xe5, 0xb8, 0xb6, + 0xec, 0x3e, 0xd7, 0x35, 0xf0, 0x7a, 0xbb, 0xc6, 0x93, 0x85, 0xe3, 0xcd, 0x65, 0xe7, 0x85, 0xfe, + 0x01, 0xbc, 0xeb, 0x26, 0x1e, 0x10, 0x7d, 0xeb, 0xb7, 0x0a, 0x46, 0x09, 0x2f, 0xe0, 0x93, 0x51, + 0x59, 0x5f, 0xae, 0x7a, 0x93, 0xdf, 0x04, 0xe8, 0x2b, 0x3f, 0xac, 0x4e, 0x34, 0xe3, 0x79, 0xcc, + 0x32, 0xc8, 0xab, 0xcc, 0xc8, 0x08, 0x93, 0xf1, 0x9e, 0x0f, 0x5d, 0x52, 0xf1, 0xc8, 0xdd, 0xbf, + 0xc9, 0xef, 0x1f, 0xb5, 0x37, 0xc7, 0xf8, 0xaf, 0x3a, 0x9c, 0xb7, 0x52, 0x38, 0x15, 0xb0, 0x85, + 0x0d, 0xda, 0x99, 0xb0, 0x49, 0x5d, 0xfc, 0x3f, 0xf3, 0x21, 0x4e, 0x45, 0x78, 0xe1, 0xc3, 0x9d, + 0x19, 0x4a, 0xfe, 0x56, 0x1d, 0xb5, 0x4d, 0x84, 0x70, 0x2a, 0x10, 0xba, 0x4c, 0x20, 0xb4, 0x33, + 0x11, 0x92, 0x33, 0x3f, 0xfb, 0xf2, 0xc7, 0xbe, 0xde, 0x05, 0x00, 0x00, 0xff, 0xff, 0xd8, 0x67, + 0xa7, 0xa2, 0x8f, 0x02, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/policy_topic_evidence_destination_not_working_device.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/policy_topic_evidence_destination_not_working_device.pb.go new file mode 100644 index 0000000..a9504cf --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/policy_topic_evidence_destination_not_working_device.pb.go @@ -0,0 +1,131 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/enums/policy_topic_evidence_destination_not_working_device.proto + +package enums // import "google.golang.org/genproto/googleapis/ads/googleads/v1/enums" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// The possible policy topic evidence destination not working devices. +type PolicyTopicEvidenceDestinationNotWorkingDeviceEnum_PolicyTopicEvidenceDestinationNotWorkingDevice int32 + +const ( + // No value has been specified. + PolicyTopicEvidenceDestinationNotWorkingDeviceEnum_UNSPECIFIED PolicyTopicEvidenceDestinationNotWorkingDeviceEnum_PolicyTopicEvidenceDestinationNotWorkingDevice = 0 + // The received value is not known in this version. + // + // This is a response-only value. + PolicyTopicEvidenceDestinationNotWorkingDeviceEnum_UNKNOWN PolicyTopicEvidenceDestinationNotWorkingDeviceEnum_PolicyTopicEvidenceDestinationNotWorkingDevice = 1 + // Landing page doesn't work on desktop device. + PolicyTopicEvidenceDestinationNotWorkingDeviceEnum_DESKTOP PolicyTopicEvidenceDestinationNotWorkingDeviceEnum_PolicyTopicEvidenceDestinationNotWorkingDevice = 2 + // Landing page doesn't work on Android device. + PolicyTopicEvidenceDestinationNotWorkingDeviceEnum_ANDROID PolicyTopicEvidenceDestinationNotWorkingDeviceEnum_PolicyTopicEvidenceDestinationNotWorkingDevice = 3 + // Landing page doesn't work on iOS device. + PolicyTopicEvidenceDestinationNotWorkingDeviceEnum_IOS PolicyTopicEvidenceDestinationNotWorkingDeviceEnum_PolicyTopicEvidenceDestinationNotWorkingDevice = 4 +) + +var PolicyTopicEvidenceDestinationNotWorkingDeviceEnum_PolicyTopicEvidenceDestinationNotWorkingDevice_name = map[int32]string{ + 0: "UNSPECIFIED", + 1: "UNKNOWN", + 2: "DESKTOP", + 3: "ANDROID", + 4: "IOS", +} +var PolicyTopicEvidenceDestinationNotWorkingDeviceEnum_PolicyTopicEvidenceDestinationNotWorkingDevice_value = map[string]int32{ + "UNSPECIFIED": 0, + "UNKNOWN": 1, + "DESKTOP": 2, + "ANDROID": 3, + "IOS": 4, +} + +func (x PolicyTopicEvidenceDestinationNotWorkingDeviceEnum_PolicyTopicEvidenceDestinationNotWorkingDevice) String() string { + return proto.EnumName(PolicyTopicEvidenceDestinationNotWorkingDeviceEnum_PolicyTopicEvidenceDestinationNotWorkingDevice_name, int32(x)) +} +func (PolicyTopicEvidenceDestinationNotWorkingDeviceEnum_PolicyTopicEvidenceDestinationNotWorkingDevice) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_policy_topic_evidence_destination_not_working_device_c2cbfcd4d0456964, []int{0, 0} +} + +// Container for enum describing possible policy topic evidence destination not +// working devices. +type PolicyTopicEvidenceDestinationNotWorkingDeviceEnum struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *PolicyTopicEvidenceDestinationNotWorkingDeviceEnum) Reset() { + *m = PolicyTopicEvidenceDestinationNotWorkingDeviceEnum{} +} +func (m *PolicyTopicEvidenceDestinationNotWorkingDeviceEnum) String() string { + return proto.CompactTextString(m) +} +func (*PolicyTopicEvidenceDestinationNotWorkingDeviceEnum) ProtoMessage() {} +func (*PolicyTopicEvidenceDestinationNotWorkingDeviceEnum) Descriptor() ([]byte, []int) { + return fileDescriptor_policy_topic_evidence_destination_not_working_device_c2cbfcd4d0456964, []int{0} +} +func (m *PolicyTopicEvidenceDestinationNotWorkingDeviceEnum) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_PolicyTopicEvidenceDestinationNotWorkingDeviceEnum.Unmarshal(m, b) +} +func (m *PolicyTopicEvidenceDestinationNotWorkingDeviceEnum) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_PolicyTopicEvidenceDestinationNotWorkingDeviceEnum.Marshal(b, m, deterministic) +} +func (dst *PolicyTopicEvidenceDestinationNotWorkingDeviceEnum) XXX_Merge(src proto.Message) { + xxx_messageInfo_PolicyTopicEvidenceDestinationNotWorkingDeviceEnum.Merge(dst, src) +} +func (m *PolicyTopicEvidenceDestinationNotWorkingDeviceEnum) XXX_Size() int { + return xxx_messageInfo_PolicyTopicEvidenceDestinationNotWorkingDeviceEnum.Size(m) +} +func (m *PolicyTopicEvidenceDestinationNotWorkingDeviceEnum) XXX_DiscardUnknown() { + xxx_messageInfo_PolicyTopicEvidenceDestinationNotWorkingDeviceEnum.DiscardUnknown(m) +} + +var xxx_messageInfo_PolicyTopicEvidenceDestinationNotWorkingDeviceEnum proto.InternalMessageInfo + +func init() { + proto.RegisterType((*PolicyTopicEvidenceDestinationNotWorkingDeviceEnum)(nil), "google.ads.googleads.v1.enums.PolicyTopicEvidenceDestinationNotWorkingDeviceEnum") + proto.RegisterEnum("google.ads.googleads.v1.enums.PolicyTopicEvidenceDestinationNotWorkingDeviceEnum_PolicyTopicEvidenceDestinationNotWorkingDevice", PolicyTopicEvidenceDestinationNotWorkingDeviceEnum_PolicyTopicEvidenceDestinationNotWorkingDevice_name, PolicyTopicEvidenceDestinationNotWorkingDeviceEnum_PolicyTopicEvidenceDestinationNotWorkingDevice_value) +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/enums/policy_topic_evidence_destination_not_working_device.proto", fileDescriptor_policy_topic_evidence_destination_not_working_device_c2cbfcd4d0456964) +} + +var fileDescriptor_policy_topic_evidence_destination_not_working_device_c2cbfcd4d0456964 = []byte{ + // 350 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x94, 0x91, 0xcf, 0x4a, 0xc3, 0x40, + 0x10, 0xc6, 0x6d, 0x2a, 0x16, 0xd2, 0x83, 0x21, 0x47, 0xb1, 0x87, 0xf6, 0x01, 0x36, 0xc4, 0xde, + 0xd6, 0x53, 0x6a, 0x62, 0x09, 0x85, 0x24, 0xd8, 0x7f, 0x22, 0x81, 0x10, 0xb3, 0x4b, 0x58, 0x6c, + 0x77, 0x62, 0x37, 0x8d, 0xf8, 0x06, 0x3e, 0x87, 0x17, 0xc1, 0x47, 0xf1, 0x51, 0x7c, 0x0a, 0xd9, + 0xdd, 0xb6, 0x9e, 0x14, 0x7a, 0x09, 0xdf, 0x64, 0x66, 0xbe, 0xdf, 0xec, 0x8c, 0x79, 0x5f, 0x02, + 0x94, 0x2b, 0xea, 0xe4, 0x44, 0x38, 0x5a, 0x4a, 0xd5, 0xb8, 0x0e, 0xe5, 0xdb, 0xb5, 0x70, 0x2a, + 0x58, 0xb1, 0xe2, 0x35, 0xab, 0xa1, 0x62, 0x45, 0x46, 0x1b, 0x46, 0x28, 0x2f, 0x68, 0x46, 0xa8, + 0xa8, 0x19, 0xcf, 0x6b, 0x06, 0x3c, 0xe3, 0x50, 0x67, 0x2f, 0xb0, 0x79, 0x62, 0xbc, 0xcc, 0x08, + 0x6d, 0x58, 0x41, 0x51, 0xb5, 0x81, 0x1a, 0xec, 0x9e, 0xb6, 0x43, 0x39, 0x11, 0xe8, 0xe0, 0x8c, + 0x1a, 0x17, 0x29, 0xe7, 0x8b, 0xcb, 0x3d, 0xb8, 0x62, 0x4e, 0xce, 0x39, 0xd4, 0xca, 0x4e, 0xe8, + 0xe6, 0xc1, 0x47, 0xcb, 0xbc, 0x4a, 0x14, 0x7b, 0x26, 0xd1, 0xc1, 0x8e, 0xec, 0xff, 0x82, 0x23, + 0xa8, 0x97, 0x1a, 0xeb, 0x2b, 0x6a, 0xc0, 0xb7, 0xeb, 0xc1, 0xb3, 0x89, 0x8e, 0xeb, 0xb2, 0xcf, + 0xcd, 0xee, 0x3c, 0x9a, 0x26, 0xc1, 0x4d, 0x78, 0x1b, 0x06, 0xbe, 0x75, 0x62, 0x77, 0xcd, 0xce, + 0x3c, 0x9a, 0x44, 0xf1, 0x32, 0xb2, 0x5a, 0x32, 0xf0, 0x83, 0xe9, 0x64, 0x16, 0x27, 0x96, 0x21, + 0x03, 0x2f, 0xf2, 0xef, 0xe2, 0xd0, 0xb7, 0xda, 0x76, 0xc7, 0x6c, 0x87, 0xf1, 0xd4, 0x3a, 0x1d, + 0xbd, 0x19, 0x66, 0xbf, 0x80, 0x35, 0xfa, 0xf7, 0xb5, 0xa3, 0xe1, 0x71, 0x63, 0x25, 0x72, 0x09, + 0x49, 0xeb, 0x61, 0xb4, 0x73, 0x2d, 0x61, 0x95, 0xf3, 0x12, 0xc1, 0xa6, 0x74, 0x4a, 0xca, 0xd5, + 0x8a, 0xf6, 0xd7, 0xaa, 0x98, 0xf8, 0xe3, 0x78, 0xd7, 0xea, 0xfb, 0x6e, 0xb4, 0xc7, 0x9e, 0xf7, + 0x69, 0xf4, 0xc6, 0xda, 0xca, 0x23, 0x02, 0x69, 0x29, 0xd5, 0xc2, 0x45, 0x72, 0x71, 0xe2, 0x6b, + 0x9f, 0x4f, 0x3d, 0x22, 0xd2, 0x43, 0x3e, 0x5d, 0xb8, 0xa9, 0xca, 0x7f, 0x1b, 0x7d, 0xfd, 0x13, + 0x63, 0x8f, 0x08, 0x8c, 0x0f, 0x15, 0x18, 0x2f, 0x5c, 0x8c, 0x55, 0xcd, 0xe3, 0x99, 0x1a, 0x6c, + 0xf8, 0x13, 0x00, 0x00, 0xff, 0xff, 0xbf, 0xfa, 0x15, 0x08, 0x54, 0x02, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/positive_geo_target_type.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/positive_geo_target_type.pb.go new file mode 100644 index 0000000..d2547ad --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/positive_geo_target_type.pb.go @@ -0,0 +1,127 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/enums/positive_geo_target_type.proto + +package enums // import "google.golang.org/genproto/googleapis/ads/googleads/v1/enums" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// The possible positive geo target types. +type PositiveGeoTargetTypeEnum_PositiveGeoTargetType int32 + +const ( + // Not specified. + PositiveGeoTargetTypeEnum_UNSPECIFIED PositiveGeoTargetTypeEnum_PositiveGeoTargetType = 0 + // The value is unknown in this version. + PositiveGeoTargetTypeEnum_UNKNOWN PositiveGeoTargetTypeEnum_PositiveGeoTargetType = 1 + // Specifies that an ad is triggered if the user is in, + // or shows interest in, advertiser's targeted locations. + PositiveGeoTargetTypeEnum_DONT_CARE PositiveGeoTargetTypeEnum_PositiveGeoTargetType = 2 + // Specifies that an ad is triggered if the user + // searches for advertiser's targeted locations. + PositiveGeoTargetTypeEnum_AREA_OF_INTEREST PositiveGeoTargetTypeEnum_PositiveGeoTargetType = 3 + // Specifies that an ad is triggered if the user is in + // or regularly in advertiser's targeted locations. + PositiveGeoTargetTypeEnum_LOCATION_OF_PRESENCE PositiveGeoTargetTypeEnum_PositiveGeoTargetType = 4 +) + +var PositiveGeoTargetTypeEnum_PositiveGeoTargetType_name = map[int32]string{ + 0: "UNSPECIFIED", + 1: "UNKNOWN", + 2: "DONT_CARE", + 3: "AREA_OF_INTEREST", + 4: "LOCATION_OF_PRESENCE", +} +var PositiveGeoTargetTypeEnum_PositiveGeoTargetType_value = map[string]int32{ + "UNSPECIFIED": 0, + "UNKNOWN": 1, + "DONT_CARE": 2, + "AREA_OF_INTEREST": 3, + "LOCATION_OF_PRESENCE": 4, +} + +func (x PositiveGeoTargetTypeEnum_PositiveGeoTargetType) String() string { + return proto.EnumName(PositiveGeoTargetTypeEnum_PositiveGeoTargetType_name, int32(x)) +} +func (PositiveGeoTargetTypeEnum_PositiveGeoTargetType) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_positive_geo_target_type_8a4eb110d34593ec, []int{0, 0} +} + +// Container for enum describing possible positive geo target types. +type PositiveGeoTargetTypeEnum struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *PositiveGeoTargetTypeEnum) Reset() { *m = PositiveGeoTargetTypeEnum{} } +func (m *PositiveGeoTargetTypeEnum) String() string { return proto.CompactTextString(m) } +func (*PositiveGeoTargetTypeEnum) ProtoMessage() {} +func (*PositiveGeoTargetTypeEnum) Descriptor() ([]byte, []int) { + return fileDescriptor_positive_geo_target_type_8a4eb110d34593ec, []int{0} +} +func (m *PositiveGeoTargetTypeEnum) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_PositiveGeoTargetTypeEnum.Unmarshal(m, b) +} +func (m *PositiveGeoTargetTypeEnum) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_PositiveGeoTargetTypeEnum.Marshal(b, m, deterministic) +} +func (dst *PositiveGeoTargetTypeEnum) XXX_Merge(src proto.Message) { + xxx_messageInfo_PositiveGeoTargetTypeEnum.Merge(dst, src) +} +func (m *PositiveGeoTargetTypeEnum) XXX_Size() int { + return xxx_messageInfo_PositiveGeoTargetTypeEnum.Size(m) +} +func (m *PositiveGeoTargetTypeEnum) XXX_DiscardUnknown() { + xxx_messageInfo_PositiveGeoTargetTypeEnum.DiscardUnknown(m) +} + +var xxx_messageInfo_PositiveGeoTargetTypeEnum proto.InternalMessageInfo + +func init() { + proto.RegisterType((*PositiveGeoTargetTypeEnum)(nil), "google.ads.googleads.v1.enums.PositiveGeoTargetTypeEnum") + proto.RegisterEnum("google.ads.googleads.v1.enums.PositiveGeoTargetTypeEnum_PositiveGeoTargetType", PositiveGeoTargetTypeEnum_PositiveGeoTargetType_name, PositiveGeoTargetTypeEnum_PositiveGeoTargetType_value) +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/enums/positive_geo_target_type.proto", fileDescriptor_positive_geo_target_type_8a4eb110d34593ec) +} + +var fileDescriptor_positive_geo_target_type_8a4eb110d34593ec = []byte{ + // 348 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x7c, 0x90, 0xd1, 0x4a, 0xfb, 0x30, + 0x18, 0xc5, 0xff, 0xed, 0xfe, 0x28, 0x66, 0x88, 0xa5, 0x4c, 0xd0, 0xe1, 0x2e, 0xb6, 0x07, 0x48, + 0x29, 0xde, 0x45, 0x6f, 0xb2, 0x2e, 0x1b, 0x45, 0x49, 0x4b, 0xd7, 0x4d, 0x90, 0x42, 0xa9, 0x36, + 0x84, 0xc2, 0xd6, 0x94, 0x25, 0x1b, 0xec, 0x31, 0x7c, 0x05, 0x2f, 0x7d, 0x14, 0x1f, 0xc5, 0x1b, + 0x5f, 0x41, 0x9a, 0x6e, 0xbb, 0x9a, 0xde, 0x84, 0x43, 0xce, 0xf7, 0x3b, 0x7c, 0xdf, 0x01, 0xf7, + 0x5c, 0x08, 0xbe, 0x60, 0x4e, 0x96, 0x4b, 0xa7, 0x91, 0xb5, 0xda, 0xb8, 0x0e, 0x2b, 0xd7, 0x4b, + 0xe9, 0x54, 0x42, 0x16, 0xaa, 0xd8, 0xb0, 0x94, 0x33, 0x91, 0xaa, 0x6c, 0xc5, 0x99, 0x4a, 0xd5, + 0xb6, 0x62, 0xb0, 0x5a, 0x09, 0x25, 0xec, 0x5e, 0x83, 0xc0, 0x2c, 0x97, 0xf0, 0x40, 0xc3, 0x8d, + 0x0b, 0x35, 0xdd, 0xbd, 0xd9, 0x87, 0x57, 0x85, 0x93, 0x95, 0xa5, 0x50, 0x99, 0x2a, 0x44, 0x29, + 0x1b, 0x78, 0xf0, 0x66, 0x80, 0xeb, 0x70, 0x97, 0x3f, 0x61, 0x22, 0xd6, 0xe9, 0xf1, 0xb6, 0x62, + 0xa4, 0x5c, 0x2f, 0x07, 0x0a, 0x5c, 0x1e, 0x35, 0xed, 0x0b, 0xd0, 0x9e, 0xd1, 0x69, 0x48, 0x3c, + 0x7f, 0xec, 0x93, 0x91, 0xf5, 0xcf, 0x6e, 0x83, 0xd3, 0x19, 0x7d, 0xa0, 0xc1, 0x13, 0xb5, 0x0c, + 0xfb, 0x1c, 0x9c, 0x8d, 0x02, 0x1a, 0xa7, 0x1e, 0x8e, 0x88, 0x65, 0xda, 0x1d, 0x60, 0xe1, 0x88, + 0xe0, 0x34, 0x18, 0xa7, 0x3e, 0x8d, 0x49, 0x44, 0xa6, 0xb1, 0xd5, 0xb2, 0xaf, 0x40, 0xe7, 0x31, + 0xf0, 0x70, 0xec, 0x07, 0xb4, 0x76, 0xc2, 0x88, 0x4c, 0x09, 0xf5, 0x88, 0xf5, 0x7f, 0xf8, 0x6d, + 0x80, 0xfe, 0xab, 0x58, 0xc2, 0x3f, 0xef, 0x1a, 0x76, 0x8f, 0x6e, 0x16, 0xd6, 0x57, 0x85, 0xc6, + 0xf3, 0x70, 0x07, 0x73, 0xb1, 0xc8, 0x4a, 0x0e, 0xc5, 0x8a, 0x3b, 0x9c, 0x95, 0xfa, 0xe6, 0x7d, + 0xc5, 0x55, 0x21, 0x7f, 0x69, 0xfc, 0x4e, 0xbf, 0xef, 0x66, 0x6b, 0x82, 0xf1, 0x87, 0xd9, 0x9b, + 0x34, 0x51, 0x38, 0x97, 0xb0, 0x91, 0xb5, 0x9a, 0xbb, 0xb0, 0xae, 0x48, 0x7e, 0xee, 0xfd, 0x04, + 0xe7, 0x32, 0x39, 0xf8, 0xc9, 0xdc, 0x4d, 0xb4, 0xff, 0x65, 0xf6, 0x9b, 0x4f, 0x84, 0x70, 0x2e, + 0x11, 0x3a, 0x4c, 0x20, 0x34, 0x77, 0x11, 0xd2, 0x33, 0x2f, 0x27, 0x7a, 0xb1, 0xdb, 0x9f, 0x00, + 0x00, 0x00, 0xff, 0xff, 0x95, 0x62, 0x2c, 0xa8, 0x09, 0x02, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/preferred_content_type.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/preferred_content_type.pb.go new file mode 100644 index 0000000..a7596fc --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/preferred_content_type.pb.go @@ -0,0 +1,114 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/enums/preferred_content_type.proto + +package enums // import "google.golang.org/genproto/googleapis/ads/googleads/v1/enums" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Enumerates preferred content criterion type. +type PreferredContentTypeEnum_PreferredContentType int32 + +const ( + // Not specified. + PreferredContentTypeEnum_UNSPECIFIED PreferredContentTypeEnum_PreferredContentType = 0 + // The value is unknown in this version. + PreferredContentTypeEnum_UNKNOWN PreferredContentTypeEnum_PreferredContentType = 1 + // Represents top content on YouTube. + PreferredContentTypeEnum_YOUTUBE_TOP_CONTENT PreferredContentTypeEnum_PreferredContentType = 400 +) + +var PreferredContentTypeEnum_PreferredContentType_name = map[int32]string{ + 0: "UNSPECIFIED", + 1: "UNKNOWN", + 400: "YOUTUBE_TOP_CONTENT", +} +var PreferredContentTypeEnum_PreferredContentType_value = map[string]int32{ + "UNSPECIFIED": 0, + "UNKNOWN": 1, + "YOUTUBE_TOP_CONTENT": 400, +} + +func (x PreferredContentTypeEnum_PreferredContentType) String() string { + return proto.EnumName(PreferredContentTypeEnum_PreferredContentType_name, int32(x)) +} +func (PreferredContentTypeEnum_PreferredContentType) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_preferred_content_type_89f4aa51b16a342c, []int{0, 0} +} + +// Container for enumeration of preferred content criterion type. +type PreferredContentTypeEnum struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *PreferredContentTypeEnum) Reset() { *m = PreferredContentTypeEnum{} } +func (m *PreferredContentTypeEnum) String() string { return proto.CompactTextString(m) } +func (*PreferredContentTypeEnum) ProtoMessage() {} +func (*PreferredContentTypeEnum) Descriptor() ([]byte, []int) { + return fileDescriptor_preferred_content_type_89f4aa51b16a342c, []int{0} +} +func (m *PreferredContentTypeEnum) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_PreferredContentTypeEnum.Unmarshal(m, b) +} +func (m *PreferredContentTypeEnum) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_PreferredContentTypeEnum.Marshal(b, m, deterministic) +} +func (dst *PreferredContentTypeEnum) XXX_Merge(src proto.Message) { + xxx_messageInfo_PreferredContentTypeEnum.Merge(dst, src) +} +func (m *PreferredContentTypeEnum) XXX_Size() int { + return xxx_messageInfo_PreferredContentTypeEnum.Size(m) +} +func (m *PreferredContentTypeEnum) XXX_DiscardUnknown() { + xxx_messageInfo_PreferredContentTypeEnum.DiscardUnknown(m) +} + +var xxx_messageInfo_PreferredContentTypeEnum proto.InternalMessageInfo + +func init() { + proto.RegisterType((*PreferredContentTypeEnum)(nil), "google.ads.googleads.v1.enums.PreferredContentTypeEnum") + proto.RegisterEnum("google.ads.googleads.v1.enums.PreferredContentTypeEnum_PreferredContentType", PreferredContentTypeEnum_PreferredContentType_name, PreferredContentTypeEnum_PreferredContentType_value) +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/enums/preferred_content_type.proto", fileDescriptor_preferred_content_type_89f4aa51b16a342c) +} + +var fileDescriptor_preferred_content_type_89f4aa51b16a342c = []byte{ + // 316 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x7c, 0x90, 0xd1, 0x4a, 0xfb, 0x30, + 0x18, 0xc5, 0xff, 0xdb, 0xe0, 0x2f, 0x64, 0x17, 0x8e, 0xea, 0xc5, 0x14, 0x77, 0xb1, 0x3d, 0x40, + 0x42, 0xf1, 0x2e, 0x5e, 0xb5, 0xb3, 0x8e, 0x21, 0xa4, 0x05, 0xdb, 0x89, 0x52, 0x28, 0x75, 0x89, + 0x61, 0xb2, 0x25, 0xa1, 0xc9, 0x06, 0x7b, 0x0b, 0x9f, 0xc1, 0x4b, 0x1f, 0xc5, 0x47, 0xf1, 0xc2, + 0x67, 0x90, 0x26, 0xb6, 0x57, 0xd3, 0x9b, 0x72, 0xe8, 0xf9, 0x7e, 0x27, 0xe7, 0xfb, 0x00, 0xe6, + 0x52, 0xf2, 0x35, 0x43, 0x25, 0xd5, 0xc8, 0xc9, 0x5a, 0xed, 0x7c, 0xc4, 0xc4, 0x76, 0xa3, 0x91, + 0xaa, 0xd8, 0x33, 0xab, 0x2a, 0x46, 0x8b, 0xa5, 0x14, 0x86, 0x09, 0x53, 0x98, 0xbd, 0x62, 0x50, + 0x55, 0xd2, 0x48, 0x6f, 0xe4, 0x00, 0x58, 0x52, 0x0d, 0x5b, 0x16, 0xee, 0x7c, 0x68, 0xd9, 0xf3, + 0x8b, 0x26, 0x5a, 0xad, 0x50, 0x29, 0x84, 0x34, 0xa5, 0x59, 0x49, 0xa1, 0x1d, 0x3c, 0x79, 0x01, + 0xc3, 0xa4, 0x09, 0x9f, 0xba, 0xec, 0x74, 0xaf, 0x58, 0x24, 0xb6, 0x9b, 0x09, 0x01, 0xa7, 0x87, + 0x3c, 0xef, 0x18, 0xf4, 0x33, 0x72, 0x97, 0x44, 0xd3, 0xf9, 0xcd, 0x3c, 0xba, 0x1e, 0xfc, 0xf3, + 0xfa, 0xe0, 0x28, 0x23, 0xb7, 0x24, 0xbe, 0x27, 0x83, 0x8e, 0x37, 0x04, 0x27, 0x0f, 0x71, 0x96, + 0x66, 0x61, 0x54, 0xa4, 0x71, 0x52, 0x4c, 0x63, 0x92, 0x46, 0x24, 0x1d, 0xbc, 0xf6, 0xc2, 0xaf, + 0x0e, 0x18, 0x2f, 0xe5, 0x06, 0xfe, 0xd9, 0x37, 0x3c, 0x3b, 0xf4, 0x66, 0x52, 0x97, 0x4d, 0x3a, + 0x8f, 0xe1, 0x0f, 0xcb, 0xe5, 0xba, 0x14, 0x1c, 0xca, 0x8a, 0x23, 0xce, 0x84, 0x5d, 0xa5, 0xb9, + 0x9b, 0x5a, 0xe9, 0x5f, 0xce, 0x78, 0x65, 0xbf, 0x6f, 0xdd, 0xde, 0x2c, 0x08, 0xde, 0xbb, 0xa3, + 0x99, 0x8b, 0x0a, 0xa8, 0x86, 0x4e, 0xd6, 0x6a, 0xe1, 0xc3, 0x7a, 0x77, 0xfd, 0xd1, 0xf8, 0x79, + 0x40, 0x75, 0xde, 0xfa, 0xf9, 0xc2, 0xcf, 0xad, 0xff, 0xd9, 0x1d, 0xbb, 0x9f, 0x18, 0x07, 0x54, + 0x63, 0xdc, 0x4e, 0x60, 0xbc, 0xf0, 0x31, 0xb6, 0x33, 0x4f, 0xff, 0x6d, 0xb1, 0xcb, 0xef, 0x00, + 0x00, 0x00, 0xff, 0xff, 0xaf, 0xc9, 0x9a, 0x9e, 0xde, 0x01, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/price_extension_price_qualifier.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/price_extension_price_qualifier.pb.go new file mode 100644 index 0000000..0eb4d28 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/price_extension_price_qualifier.pb.go @@ -0,0 +1,123 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/enums/price_extension_price_qualifier.proto + +package enums // import "google.golang.org/genproto/googleapis/ads/googleads/v1/enums" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Enums of price extension price qualifier. +type PriceExtensionPriceQualifierEnum_PriceExtensionPriceQualifier int32 + +const ( + // Not specified. + PriceExtensionPriceQualifierEnum_UNSPECIFIED PriceExtensionPriceQualifierEnum_PriceExtensionPriceQualifier = 0 + // Used for return value only. Represents value unknown in this version. + PriceExtensionPriceQualifierEnum_UNKNOWN PriceExtensionPriceQualifierEnum_PriceExtensionPriceQualifier = 1 + // 'From' qualifier for the price. + PriceExtensionPriceQualifierEnum_FROM PriceExtensionPriceQualifierEnum_PriceExtensionPriceQualifier = 2 + // 'Up to' qualifier for the price. + PriceExtensionPriceQualifierEnum_UP_TO PriceExtensionPriceQualifierEnum_PriceExtensionPriceQualifier = 3 + // 'Average' qualifier for the price. + PriceExtensionPriceQualifierEnum_AVERAGE PriceExtensionPriceQualifierEnum_PriceExtensionPriceQualifier = 4 +) + +var PriceExtensionPriceQualifierEnum_PriceExtensionPriceQualifier_name = map[int32]string{ + 0: "UNSPECIFIED", + 1: "UNKNOWN", + 2: "FROM", + 3: "UP_TO", + 4: "AVERAGE", +} +var PriceExtensionPriceQualifierEnum_PriceExtensionPriceQualifier_value = map[string]int32{ + "UNSPECIFIED": 0, + "UNKNOWN": 1, + "FROM": 2, + "UP_TO": 3, + "AVERAGE": 4, +} + +func (x PriceExtensionPriceQualifierEnum_PriceExtensionPriceQualifier) String() string { + return proto.EnumName(PriceExtensionPriceQualifierEnum_PriceExtensionPriceQualifier_name, int32(x)) +} +func (PriceExtensionPriceQualifierEnum_PriceExtensionPriceQualifier) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_price_extension_price_qualifier_960fd1cf90164248, []int{0, 0} +} + +// Container for enum describing a price extension price qualifier. +type PriceExtensionPriceQualifierEnum struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *PriceExtensionPriceQualifierEnum) Reset() { *m = PriceExtensionPriceQualifierEnum{} } +func (m *PriceExtensionPriceQualifierEnum) String() string { return proto.CompactTextString(m) } +func (*PriceExtensionPriceQualifierEnum) ProtoMessage() {} +func (*PriceExtensionPriceQualifierEnum) Descriptor() ([]byte, []int) { + return fileDescriptor_price_extension_price_qualifier_960fd1cf90164248, []int{0} +} +func (m *PriceExtensionPriceQualifierEnum) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_PriceExtensionPriceQualifierEnum.Unmarshal(m, b) +} +func (m *PriceExtensionPriceQualifierEnum) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_PriceExtensionPriceQualifierEnum.Marshal(b, m, deterministic) +} +func (dst *PriceExtensionPriceQualifierEnum) XXX_Merge(src proto.Message) { + xxx_messageInfo_PriceExtensionPriceQualifierEnum.Merge(dst, src) +} +func (m *PriceExtensionPriceQualifierEnum) XXX_Size() int { + return xxx_messageInfo_PriceExtensionPriceQualifierEnum.Size(m) +} +func (m *PriceExtensionPriceQualifierEnum) XXX_DiscardUnknown() { + xxx_messageInfo_PriceExtensionPriceQualifierEnum.DiscardUnknown(m) +} + +var xxx_messageInfo_PriceExtensionPriceQualifierEnum proto.InternalMessageInfo + +func init() { + proto.RegisterType((*PriceExtensionPriceQualifierEnum)(nil), "google.ads.googleads.v1.enums.PriceExtensionPriceQualifierEnum") + proto.RegisterEnum("google.ads.googleads.v1.enums.PriceExtensionPriceQualifierEnum_PriceExtensionPriceQualifier", PriceExtensionPriceQualifierEnum_PriceExtensionPriceQualifier_name, PriceExtensionPriceQualifierEnum_PriceExtensionPriceQualifier_value) +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/enums/price_extension_price_qualifier.proto", fileDescriptor_price_extension_price_qualifier_960fd1cf90164248) +} + +var fileDescriptor_price_extension_price_qualifier_960fd1cf90164248 = []byte{ + // 326 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x7c, 0x50, 0x4d, 0x4e, 0xf3, 0x30, + 0x10, 0xfd, 0x92, 0xf6, 0xe3, 0xc7, 0x5d, 0x10, 0x65, 0x89, 0x5a, 0x89, 0xf6, 0x00, 0x8e, 0x22, + 0x76, 0x66, 0xe5, 0x96, 0xb4, 0xaa, 0x10, 0x69, 0x28, 0x34, 0x48, 0x28, 0xa2, 0x32, 0x8d, 0xb1, + 0x2c, 0xb5, 0x76, 0x88, 0xd3, 0x8a, 0x35, 0x47, 0x61, 0xc9, 0x51, 0x38, 0x0a, 0x07, 0x60, 0x8d, + 0x6c, 0x93, 0xee, 0xc8, 0xc6, 0x7a, 0xe3, 0x79, 0xf3, 0x66, 0xde, 0x03, 0x23, 0x26, 0x25, 0x5b, + 0xd3, 0x80, 0xe4, 0x2a, 0xb0, 0x50, 0xa3, 0x5d, 0x18, 0x50, 0xb1, 0xdd, 0xa8, 0xa0, 0x28, 0xf9, + 0x8a, 0x2e, 0xe9, 0x6b, 0x45, 0x85, 0xe2, 0x52, 0x2c, 0x6d, 0xfd, 0xb2, 0x25, 0x6b, 0xfe, 0xcc, + 0x69, 0x09, 0x8b, 0x52, 0x56, 0xd2, 0xef, 0xd9, 0x49, 0x48, 0x72, 0x05, 0xf7, 0x22, 0x70, 0x17, + 0x42, 0x23, 0x72, 0xda, 0xad, 0x77, 0x14, 0x3c, 0x20, 0x42, 0xc8, 0x8a, 0x54, 0x5c, 0x0a, 0x65, + 0x87, 0x07, 0x6f, 0x0e, 0x38, 0x4b, 0xb4, 0x6c, 0x54, 0x6f, 0x31, 0xd5, 0x4d, 0xbd, 0x23, 0x12, + 0xdb, 0xcd, 0xe0, 0x11, 0x74, 0x9b, 0x38, 0xfe, 0x09, 0xe8, 0x2c, 0xe2, 0xdb, 0x24, 0x1a, 0x4d, + 0xc7, 0xd3, 0xe8, 0xd2, 0xfb, 0xe7, 0x77, 0xc0, 0xe1, 0x22, 0xbe, 0x8a, 0x67, 0xf7, 0xb1, 0xe7, + 0xf8, 0x47, 0xa0, 0x3d, 0x9e, 0xcf, 0xae, 0x3d, 0xd7, 0x3f, 0x06, 0xff, 0x17, 0xc9, 0xf2, 0x6e, + 0xe6, 0xb5, 0x34, 0x03, 0xa7, 0xd1, 0x1c, 0x4f, 0x22, 0xaf, 0x3d, 0xfc, 0x76, 0x40, 0x7f, 0x25, + 0x37, 0xb0, 0xd1, 0xc8, 0xb0, 0xdf, 0x74, 0x43, 0xa2, 0xdd, 0x24, 0xce, 0xc3, 0xf0, 0x57, 0x83, + 0xc9, 0x35, 0x11, 0x0c, 0xca, 0x92, 0x05, 0x8c, 0x0a, 0xe3, 0xb5, 0x4e, 0xb8, 0xe0, 0xea, 0x8f, + 0xc0, 0x2f, 0xcc, 0xfb, 0xee, 0xb6, 0x26, 0x18, 0x7f, 0xb8, 0xbd, 0x89, 0x95, 0xc2, 0xb9, 0x82, + 0x16, 0x6a, 0x94, 0x86, 0x50, 0x67, 0xa2, 0x3e, 0xeb, 0x7e, 0x86, 0x73, 0x95, 0xed, 0xfb, 0x59, + 0x1a, 0x66, 0xa6, 0xff, 0xe5, 0xf6, 0xed, 0x27, 0x42, 0x38, 0x57, 0x08, 0xed, 0x19, 0x08, 0xa5, + 0x21, 0x42, 0x86, 0xf3, 0x74, 0x60, 0x0e, 0x3b, 0xff, 0x09, 0x00, 0x00, 0xff, 0xff, 0x21, 0x8d, + 0x8b, 0x8d, 0x08, 0x02, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/price_extension_price_unit.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/price_extension_price_unit.pb.go new file mode 100644 index 0000000..ce259db --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/price_extension_price_unit.pb.go @@ -0,0 +1,136 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/enums/price_extension_price_unit.proto + +package enums // import "google.golang.org/genproto/googleapis/ads/googleads/v1/enums" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Price extension price unit. +type PriceExtensionPriceUnitEnum_PriceExtensionPriceUnit int32 + +const ( + // Not specified. + PriceExtensionPriceUnitEnum_UNSPECIFIED PriceExtensionPriceUnitEnum_PriceExtensionPriceUnit = 0 + // Used for return value only. Represents value unknown in this version. + PriceExtensionPriceUnitEnum_UNKNOWN PriceExtensionPriceUnitEnum_PriceExtensionPriceUnit = 1 + // Per hour. + PriceExtensionPriceUnitEnum_PER_HOUR PriceExtensionPriceUnitEnum_PriceExtensionPriceUnit = 2 + // Per day. + PriceExtensionPriceUnitEnum_PER_DAY PriceExtensionPriceUnitEnum_PriceExtensionPriceUnit = 3 + // Per week. + PriceExtensionPriceUnitEnum_PER_WEEK PriceExtensionPriceUnitEnum_PriceExtensionPriceUnit = 4 + // Per month. + PriceExtensionPriceUnitEnum_PER_MONTH PriceExtensionPriceUnitEnum_PriceExtensionPriceUnit = 5 + // Per year. + PriceExtensionPriceUnitEnum_PER_YEAR PriceExtensionPriceUnitEnum_PriceExtensionPriceUnit = 6 + // Per night. + PriceExtensionPriceUnitEnum_PER_NIGHT PriceExtensionPriceUnitEnum_PriceExtensionPriceUnit = 7 +) + +var PriceExtensionPriceUnitEnum_PriceExtensionPriceUnit_name = map[int32]string{ + 0: "UNSPECIFIED", + 1: "UNKNOWN", + 2: "PER_HOUR", + 3: "PER_DAY", + 4: "PER_WEEK", + 5: "PER_MONTH", + 6: "PER_YEAR", + 7: "PER_NIGHT", +} +var PriceExtensionPriceUnitEnum_PriceExtensionPriceUnit_value = map[string]int32{ + "UNSPECIFIED": 0, + "UNKNOWN": 1, + "PER_HOUR": 2, + "PER_DAY": 3, + "PER_WEEK": 4, + "PER_MONTH": 5, + "PER_YEAR": 6, + "PER_NIGHT": 7, +} + +func (x PriceExtensionPriceUnitEnum_PriceExtensionPriceUnit) String() string { + return proto.EnumName(PriceExtensionPriceUnitEnum_PriceExtensionPriceUnit_name, int32(x)) +} +func (PriceExtensionPriceUnitEnum_PriceExtensionPriceUnit) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_price_extension_price_unit_b2bf1d2dd17ee2c0, []int{0, 0} +} + +// Container for enum describing price extension price unit. +type PriceExtensionPriceUnitEnum struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *PriceExtensionPriceUnitEnum) Reset() { *m = PriceExtensionPriceUnitEnum{} } +func (m *PriceExtensionPriceUnitEnum) String() string { return proto.CompactTextString(m) } +func (*PriceExtensionPriceUnitEnum) ProtoMessage() {} +func (*PriceExtensionPriceUnitEnum) Descriptor() ([]byte, []int) { + return fileDescriptor_price_extension_price_unit_b2bf1d2dd17ee2c0, []int{0} +} +func (m *PriceExtensionPriceUnitEnum) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_PriceExtensionPriceUnitEnum.Unmarshal(m, b) +} +func (m *PriceExtensionPriceUnitEnum) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_PriceExtensionPriceUnitEnum.Marshal(b, m, deterministic) +} +func (dst *PriceExtensionPriceUnitEnum) XXX_Merge(src proto.Message) { + xxx_messageInfo_PriceExtensionPriceUnitEnum.Merge(dst, src) +} +func (m *PriceExtensionPriceUnitEnum) XXX_Size() int { + return xxx_messageInfo_PriceExtensionPriceUnitEnum.Size(m) +} +func (m *PriceExtensionPriceUnitEnum) XXX_DiscardUnknown() { + xxx_messageInfo_PriceExtensionPriceUnitEnum.DiscardUnknown(m) +} + +var xxx_messageInfo_PriceExtensionPriceUnitEnum proto.InternalMessageInfo + +func init() { + proto.RegisterType((*PriceExtensionPriceUnitEnum)(nil), "google.ads.googleads.v1.enums.PriceExtensionPriceUnitEnum") + proto.RegisterEnum("google.ads.googleads.v1.enums.PriceExtensionPriceUnitEnum_PriceExtensionPriceUnit", PriceExtensionPriceUnitEnum_PriceExtensionPriceUnit_name, PriceExtensionPriceUnitEnum_PriceExtensionPriceUnit_value) +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/enums/price_extension_price_unit.proto", fileDescriptor_price_extension_price_unit_b2bf1d2dd17ee2c0) +} + +var fileDescriptor_price_extension_price_unit_b2bf1d2dd17ee2c0 = []byte{ + // 350 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x7c, 0x91, 0xd1, 0x4e, 0xfa, 0x30, + 0x14, 0xc6, 0xff, 0x1b, 0x7f, 0x41, 0x8b, 0xc6, 0x66, 0x37, 0x26, 0x0a, 0x17, 0xf0, 0x00, 0x5d, + 0x16, 0xef, 0x6a, 0x62, 0x32, 0xa4, 0x02, 0x21, 0x8e, 0x05, 0x19, 0x04, 0xb3, 0x84, 0x4c, 0xb6, + 0x2c, 0x4d, 0xa0, 0x5d, 0xe8, 0x20, 0x3e, 0x84, 0x4f, 0xe1, 0xa5, 0xf1, 0x49, 0x7c, 0x14, 0xef, + 0x7c, 0x03, 0xd3, 0xd6, 0x72, 0x87, 0x37, 0xcb, 0xf7, 0xed, 0xd7, 0xef, 0xb4, 0xe7, 0x1c, 0x70, + 0x9b, 0x73, 0x9e, 0xaf, 0x32, 0x37, 0x49, 0x85, 0xab, 0xa5, 0x54, 0x3b, 0xcf, 0xcd, 0xd8, 0x76, + 0x2d, 0xdc, 0x62, 0x43, 0x97, 0xd9, 0x22, 0x7b, 0x29, 0x33, 0x26, 0x28, 0x67, 0x0b, 0xed, 0xb7, + 0x8c, 0x96, 0xa8, 0xd8, 0xf0, 0x92, 0x3b, 0x4d, 0x1d, 0x42, 0x49, 0x2a, 0xd0, 0x3e, 0x8f, 0x76, + 0x1e, 0x52, 0xf9, 0xcb, 0x86, 0x29, 0x5f, 0x50, 0x37, 0x61, 0x8c, 0x97, 0x49, 0x49, 0x39, 0x13, + 0x3a, 0xdc, 0xfe, 0xb0, 0xc0, 0x55, 0x28, 0x2b, 0x12, 0x73, 0x81, 0x72, 0x11, 0xa3, 0x25, 0x61, + 0xdb, 0x75, 0xfb, 0xd5, 0x02, 0x17, 0x07, 0xb8, 0x73, 0x0e, 0xea, 0x51, 0xf0, 0x18, 0x92, 0xbb, + 0xc1, 0xfd, 0x80, 0x74, 0xe1, 0x3f, 0xa7, 0x0e, 0x6a, 0x51, 0x30, 0x0c, 0x46, 0xb3, 0x00, 0x5a, + 0xce, 0x29, 0x38, 0x0e, 0xc9, 0x78, 0xd1, 0x1f, 0x45, 0x63, 0x68, 0x4b, 0x24, 0x5d, 0xd7, 0x9f, + 0xc3, 0x8a, 0x41, 0x33, 0x42, 0x86, 0xf0, 0xbf, 0x73, 0x06, 0x4e, 0xa4, 0x7b, 0x18, 0x05, 0x93, + 0x3e, 0x3c, 0x32, 0x70, 0x4e, 0xfc, 0x31, 0xac, 0x1a, 0x18, 0x0c, 0x7a, 0xfd, 0x09, 0xac, 0x75, + 0xbe, 0x2d, 0xd0, 0x5a, 0xf2, 0x35, 0xfa, 0xb3, 0xe5, 0x4e, 0xe3, 0xc0, 0x8b, 0x43, 0xd9, 0x72, + 0x68, 0x3d, 0x75, 0x7e, 0xe3, 0x39, 0x5f, 0x25, 0x2c, 0x47, 0x7c, 0x93, 0xbb, 0x79, 0xc6, 0xd4, + 0x40, 0xcc, 0x06, 0x0a, 0x2a, 0x0e, 0x2c, 0xe4, 0x46, 0x7d, 0xdf, 0xec, 0x4a, 0xcf, 0xf7, 0xdf, + 0xed, 0x66, 0x4f, 0x97, 0xf2, 0x53, 0x81, 0xb4, 0x94, 0x6a, 0xea, 0x21, 0x39, 0x3d, 0xf1, 0x69, + 0x78, 0xec, 0xa7, 0x22, 0xde, 0xf3, 0x78, 0xea, 0xc5, 0x8a, 0x7f, 0xd9, 0x2d, 0xfd, 0x13, 0x63, + 0x3f, 0x15, 0x18, 0xef, 0x4f, 0x60, 0x3c, 0xf5, 0x30, 0x56, 0x67, 0x9e, 0xab, 0xea, 0x61, 0xd7, + 0x3f, 0x01, 0x00, 0x00, 0xff, 0xff, 0x32, 0x6a, 0x8d, 0xfb, 0x28, 0x02, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/price_extension_type.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/price_extension_type.pb.go new file mode 100644 index 0000000..374416d --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/price_extension_type.pb.go @@ -0,0 +1,151 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/enums/price_extension_type.proto + +package enums // import "google.golang.org/genproto/googleapis/ads/googleads/v1/enums" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Price extension type. +type PriceExtensionTypeEnum_PriceExtensionType int32 + +const ( + // Not specified. + PriceExtensionTypeEnum_UNSPECIFIED PriceExtensionTypeEnum_PriceExtensionType = 0 + // Used for return value only. Represents value unknown in this version. + PriceExtensionTypeEnum_UNKNOWN PriceExtensionTypeEnum_PriceExtensionType = 1 + // The type for showing a list of brands. + PriceExtensionTypeEnum_BRANDS PriceExtensionTypeEnum_PriceExtensionType = 2 + // The type for showing a list of events. + PriceExtensionTypeEnum_EVENTS PriceExtensionTypeEnum_PriceExtensionType = 3 + // The type for showing locations relevant to your business. + PriceExtensionTypeEnum_LOCATIONS PriceExtensionTypeEnum_PriceExtensionType = 4 + // The type for showing sub-regions or districts within a city or region. + PriceExtensionTypeEnum_NEIGHBORHOODS PriceExtensionTypeEnum_PriceExtensionType = 5 + // The type for showing a collection of product categories. + PriceExtensionTypeEnum_PRODUCT_CATEGORIES PriceExtensionTypeEnum_PriceExtensionType = 6 + // The type for showing a collection of related product tiers. + PriceExtensionTypeEnum_PRODUCT_TIERS PriceExtensionTypeEnum_PriceExtensionType = 7 + // The type for showing a collection of services offered by your business. + PriceExtensionTypeEnum_SERVICES PriceExtensionTypeEnum_PriceExtensionType = 8 + // The type for showing a collection of service categories. + PriceExtensionTypeEnum_SERVICE_CATEGORIES PriceExtensionTypeEnum_PriceExtensionType = 9 + // The type for showing a collection of related service tiers. + PriceExtensionTypeEnum_SERVICE_TIERS PriceExtensionTypeEnum_PriceExtensionType = 10 +) + +var PriceExtensionTypeEnum_PriceExtensionType_name = map[int32]string{ + 0: "UNSPECIFIED", + 1: "UNKNOWN", + 2: "BRANDS", + 3: "EVENTS", + 4: "LOCATIONS", + 5: "NEIGHBORHOODS", + 6: "PRODUCT_CATEGORIES", + 7: "PRODUCT_TIERS", + 8: "SERVICES", + 9: "SERVICE_CATEGORIES", + 10: "SERVICE_TIERS", +} +var PriceExtensionTypeEnum_PriceExtensionType_value = map[string]int32{ + "UNSPECIFIED": 0, + "UNKNOWN": 1, + "BRANDS": 2, + "EVENTS": 3, + "LOCATIONS": 4, + "NEIGHBORHOODS": 5, + "PRODUCT_CATEGORIES": 6, + "PRODUCT_TIERS": 7, + "SERVICES": 8, + "SERVICE_CATEGORIES": 9, + "SERVICE_TIERS": 10, +} + +func (x PriceExtensionTypeEnum_PriceExtensionType) String() string { + return proto.EnumName(PriceExtensionTypeEnum_PriceExtensionType_name, int32(x)) +} +func (PriceExtensionTypeEnum_PriceExtensionType) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_price_extension_type_b4046f5b4d5b8020, []int{0, 0} +} + +// Container for enum describing types for a price extension. +type PriceExtensionTypeEnum struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *PriceExtensionTypeEnum) Reset() { *m = PriceExtensionTypeEnum{} } +func (m *PriceExtensionTypeEnum) String() string { return proto.CompactTextString(m) } +func (*PriceExtensionTypeEnum) ProtoMessage() {} +func (*PriceExtensionTypeEnum) Descriptor() ([]byte, []int) { + return fileDescriptor_price_extension_type_b4046f5b4d5b8020, []int{0} +} +func (m *PriceExtensionTypeEnum) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_PriceExtensionTypeEnum.Unmarshal(m, b) +} +func (m *PriceExtensionTypeEnum) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_PriceExtensionTypeEnum.Marshal(b, m, deterministic) +} +func (dst *PriceExtensionTypeEnum) XXX_Merge(src proto.Message) { + xxx_messageInfo_PriceExtensionTypeEnum.Merge(dst, src) +} +func (m *PriceExtensionTypeEnum) XXX_Size() int { + return xxx_messageInfo_PriceExtensionTypeEnum.Size(m) +} +func (m *PriceExtensionTypeEnum) XXX_DiscardUnknown() { + xxx_messageInfo_PriceExtensionTypeEnum.DiscardUnknown(m) +} + +var xxx_messageInfo_PriceExtensionTypeEnum proto.InternalMessageInfo + +func init() { + proto.RegisterType((*PriceExtensionTypeEnum)(nil), "google.ads.googleads.v1.enums.PriceExtensionTypeEnum") + proto.RegisterEnum("google.ads.googleads.v1.enums.PriceExtensionTypeEnum_PriceExtensionType", PriceExtensionTypeEnum_PriceExtensionType_name, PriceExtensionTypeEnum_PriceExtensionType_value) +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/enums/price_extension_type.proto", fileDescriptor_price_extension_type_b4046f5b4d5b8020) +} + +var fileDescriptor_price_extension_type_b4046f5b4d5b8020 = []byte{ + // 394 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x7c, 0x91, 0xd1, 0x6a, 0xd4, 0x40, + 0x18, 0x85, 0xcd, 0x56, 0xb7, 0xed, 0xd4, 0xe2, 0x38, 0x17, 0x15, 0xc4, 0x5e, 0xb4, 0x0f, 0x30, + 0x21, 0x78, 0x23, 0xe3, 0xd5, 0x24, 0x19, 0xd3, 0xa0, 0xcc, 0x84, 0x4c, 0x36, 0x82, 0x04, 0x96, + 0xd8, 0x0c, 0x21, 0xd0, 0x9d, 0x09, 0x3b, 0x69, 0xb1, 0xaf, 0xe3, 0xa5, 0x8f, 0xe2, 0x95, 0xcf, + 0xa1, 0x3e, 0x84, 0x4c, 0xd2, 0x2c, 0xc2, 0xa2, 0x37, 0xe1, 0xe4, 0xff, 0xcf, 0x77, 0x48, 0xce, + 0x0f, 0xde, 0xb4, 0xc6, 0xb4, 0x37, 0xca, 0xaf, 0x1b, 0xeb, 0x4f, 0xd2, 0xa9, 0xbb, 0xc0, 0x57, + 0xfa, 0x76, 0x63, 0xfd, 0x7e, 0xdb, 0x5d, 0xab, 0xb5, 0xfa, 0x32, 0x28, 0x6d, 0x3b, 0xa3, 0xd7, + 0xc3, 0x7d, 0xaf, 0x70, 0xbf, 0x35, 0x83, 0x41, 0xe7, 0x93, 0x1d, 0xd7, 0x8d, 0xc5, 0x3b, 0x12, + 0xdf, 0x05, 0x78, 0x24, 0x5f, 0xbe, 0x9a, 0x83, 0xfb, 0xce, 0xaf, 0xb5, 0x36, 0x43, 0x3d, 0x74, + 0x46, 0xdb, 0x09, 0xbe, 0xfc, 0xe5, 0x81, 0xb3, 0xcc, 0x65, 0xb3, 0x39, 0xba, 0xb8, 0xef, 0x15, + 0xd3, 0xb7, 0x9b, 0xcb, 0x1f, 0x1e, 0x40, 0xfb, 0x2b, 0xf4, 0x0c, 0x9c, 0xac, 0xb8, 0xcc, 0x58, + 0x94, 0xbe, 0x4b, 0x59, 0x0c, 0x1f, 0xa1, 0x13, 0x70, 0xb8, 0xe2, 0xef, 0xb9, 0xf8, 0xc8, 0xa1, + 0x87, 0x00, 0x58, 0x86, 0x39, 0xe5, 0xb1, 0x84, 0x0b, 0xa7, 0x59, 0xc9, 0x78, 0x21, 0xe1, 0x01, + 0x3a, 0x05, 0xc7, 0x1f, 0x44, 0x44, 0x8b, 0x54, 0x70, 0x09, 0x1f, 0xa3, 0xe7, 0xe0, 0x94, 0xb3, + 0x34, 0xb9, 0x0a, 0x45, 0x7e, 0x25, 0x44, 0x2c, 0xe1, 0x13, 0x74, 0x06, 0x50, 0x96, 0x8b, 0x78, + 0x15, 0x15, 0xeb, 0x88, 0x16, 0x2c, 0x11, 0x79, 0xca, 0x24, 0x5c, 0x3a, 0xeb, 0x3c, 0x2f, 0x52, + 0x96, 0x4b, 0x78, 0x88, 0x9e, 0x82, 0x23, 0xc9, 0xf2, 0x32, 0x8d, 0x98, 0x84, 0x47, 0x0e, 0x7c, + 0x78, 0xfb, 0x1b, 0x3c, 0x76, 0xe0, 0x3c, 0x9f, 0x40, 0x10, 0xfe, 0xf6, 0xc0, 0xc5, 0xb5, 0xd9, + 0xe0, 0xff, 0x36, 0x16, 0xbe, 0xd8, 0xff, 0xeb, 0xcc, 0x95, 0x95, 0x79, 0x9f, 0xc2, 0x07, 0xb2, + 0x35, 0x37, 0xb5, 0x6e, 0xb1, 0xd9, 0xb6, 0x7e, 0xab, 0xf4, 0x58, 0xe5, 0x7c, 0xb5, 0xbe, 0xb3, + 0xff, 0x38, 0xe2, 0xdb, 0xf1, 0xf9, 0x75, 0x71, 0x90, 0x50, 0xfa, 0x6d, 0x71, 0x9e, 0x4c, 0x51, + 0xb4, 0xb1, 0x78, 0x92, 0x4e, 0x95, 0x01, 0x76, 0xe5, 0xdb, 0xef, 0xf3, 0xbe, 0xa2, 0x8d, 0xad, + 0x76, 0xfb, 0xaa, 0x0c, 0xaa, 0x71, 0xff, 0x73, 0x71, 0x31, 0x0d, 0x09, 0xa1, 0x8d, 0x25, 0x64, + 0xe7, 0x20, 0xa4, 0x0c, 0x08, 0x19, 0x3d, 0x9f, 0x97, 0xe3, 0x87, 0xbd, 0xfe, 0x13, 0x00, 0x00, + 0xff, 0xff, 0x6b, 0x1c, 0x5f, 0xce, 0x5c, 0x02, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/price_placeholder_field.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/price_placeholder_field.pb.go new file mode 100644 index 0000000..0d948d9 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/price_placeholder_field.pb.go @@ -0,0 +1,385 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/enums/price_placeholder_field.proto + +package enums // import "google.golang.org/genproto/googleapis/ads/googleads/v1/enums" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Possible values for Price placeholder fields. +type PricePlaceholderFieldEnum_PricePlaceholderField int32 + +const ( + // Not specified. + PricePlaceholderFieldEnum_UNSPECIFIED PricePlaceholderFieldEnum_PricePlaceholderField = 0 + // Used for return value only. Represents value unknown in this version. + PricePlaceholderFieldEnum_UNKNOWN PricePlaceholderFieldEnum_PricePlaceholderField = 1 + // Data Type: STRING. The type of your price feed. Must match one of the + // predefined price feed type exactly. + PricePlaceholderFieldEnum_TYPE PricePlaceholderFieldEnum_PricePlaceholderField = 2 + // Data Type: STRING. The qualifier of each price. Must match one of the + // predefined price qualifiers exactly. + PricePlaceholderFieldEnum_PRICE_QUALIFIER PricePlaceholderFieldEnum_PricePlaceholderField = 3 + // Data Type: URL. Tracking template for the price feed when using Upgraded + // URLs. + PricePlaceholderFieldEnum_TRACKING_TEMPLATE PricePlaceholderFieldEnum_PricePlaceholderField = 4 + // Data Type: STRING. Language of the price feed. Must match one of the + // available available locale codes exactly. + PricePlaceholderFieldEnum_LANGUAGE PricePlaceholderFieldEnum_PricePlaceholderField = 5 + // Data Type: STRING. Final URL suffix for the price feed when using + // parallel tracking. + PricePlaceholderFieldEnum_FINAL_URL_SUFFIX PricePlaceholderFieldEnum_PricePlaceholderField = 6 + // Data Type: STRING. The header of item 1 of the table. + PricePlaceholderFieldEnum_ITEM_1_HEADER PricePlaceholderFieldEnum_PricePlaceholderField = 100 + // Data Type: STRING. The description of item 1 of the table. + PricePlaceholderFieldEnum_ITEM_1_DESCRIPTION PricePlaceholderFieldEnum_PricePlaceholderField = 101 + // Data Type: MONEY. The price (money with currency) of item 1 of the table, + // e.g., 30 USD. The currency must match one of the available currencies. + PricePlaceholderFieldEnum_ITEM_1_PRICE PricePlaceholderFieldEnum_PricePlaceholderField = 102 + // Data Type: STRING. The price unit of item 1 of the table. Must match one + // of the predefined price units. + PricePlaceholderFieldEnum_ITEM_1_UNIT PricePlaceholderFieldEnum_PricePlaceholderField = 103 + // Data Type: URL_LIST. The final URLs of item 1 of the table when using + // Upgraded URLs. + PricePlaceholderFieldEnum_ITEM_1_FINAL_URLS PricePlaceholderFieldEnum_PricePlaceholderField = 104 + // Data Type: URL_LIST. The final mobile URLs of item 1 of the table when + // using Upgraded URLs. + PricePlaceholderFieldEnum_ITEM_1_FINAL_MOBILE_URLS PricePlaceholderFieldEnum_PricePlaceholderField = 105 + // Data Type: STRING. The header of item 2 of the table. + PricePlaceholderFieldEnum_ITEM_2_HEADER PricePlaceholderFieldEnum_PricePlaceholderField = 200 + // Data Type: STRING. The description of item 2 of the table. + PricePlaceholderFieldEnum_ITEM_2_DESCRIPTION PricePlaceholderFieldEnum_PricePlaceholderField = 201 + // Data Type: MONEY. The price (money with currency) of item 2 of the table, + // e.g., 30 USD. The currency must match one of the available currencies. + PricePlaceholderFieldEnum_ITEM_2_PRICE PricePlaceholderFieldEnum_PricePlaceholderField = 202 + // Data Type: STRING. The price unit of item 2 of the table. Must match one + // of the predefined price units. + PricePlaceholderFieldEnum_ITEM_2_UNIT PricePlaceholderFieldEnum_PricePlaceholderField = 203 + // Data Type: URL_LIST. The final URLs of item 2 of the table when using + // Upgraded URLs. + PricePlaceholderFieldEnum_ITEM_2_FINAL_URLS PricePlaceholderFieldEnum_PricePlaceholderField = 204 + // Data Type: URL_LIST. The final mobile URLs of item 2 of the table when + // using Upgraded URLs. + PricePlaceholderFieldEnum_ITEM_2_FINAL_MOBILE_URLS PricePlaceholderFieldEnum_PricePlaceholderField = 205 + // Data Type: STRING. The header of item 3 of the table. + PricePlaceholderFieldEnum_ITEM_3_HEADER PricePlaceholderFieldEnum_PricePlaceholderField = 300 + // Data Type: STRING. The description of item 3 of the table. + PricePlaceholderFieldEnum_ITEM_3_DESCRIPTION PricePlaceholderFieldEnum_PricePlaceholderField = 301 + // Data Type: MONEY. The price (money with currency) of item 3 of the table, + // e.g., 30 USD. The currency must match one of the available currencies. + PricePlaceholderFieldEnum_ITEM_3_PRICE PricePlaceholderFieldEnum_PricePlaceholderField = 302 + // Data Type: STRING. The price unit of item 3 of the table. Must match one + // of the predefined price units. + PricePlaceholderFieldEnum_ITEM_3_UNIT PricePlaceholderFieldEnum_PricePlaceholderField = 303 + // Data Type: URL_LIST. The final URLs of item 3 of the table when using + // Upgraded URLs. + PricePlaceholderFieldEnum_ITEM_3_FINAL_URLS PricePlaceholderFieldEnum_PricePlaceholderField = 304 + // Data Type: URL_LIST. The final mobile URLs of item 3 of the table when + // using Upgraded URLs. + PricePlaceholderFieldEnum_ITEM_3_FINAL_MOBILE_URLS PricePlaceholderFieldEnum_PricePlaceholderField = 305 + // Data Type: STRING. The header of item 4 of the table. + PricePlaceholderFieldEnum_ITEM_4_HEADER PricePlaceholderFieldEnum_PricePlaceholderField = 400 + // Data Type: STRING. The description of item 4 of the table. + PricePlaceholderFieldEnum_ITEM_4_DESCRIPTION PricePlaceholderFieldEnum_PricePlaceholderField = 401 + // Data Type: MONEY. The price (money with currency) of item 4 of the table, + // e.g., 30 USD. The currency must match one of the available currencies. + PricePlaceholderFieldEnum_ITEM_4_PRICE PricePlaceholderFieldEnum_PricePlaceholderField = 402 + // Data Type: STRING. The price unit of item 4 of the table. Must match one + // of the predefined price units. + PricePlaceholderFieldEnum_ITEM_4_UNIT PricePlaceholderFieldEnum_PricePlaceholderField = 403 + // Data Type: URL_LIST. The final URLs of item 4 of the table when using + // Upgraded URLs. + PricePlaceholderFieldEnum_ITEM_4_FINAL_URLS PricePlaceholderFieldEnum_PricePlaceholderField = 404 + // Data Type: URL_LIST. The final mobile URLs of item 4 of the table when + // using Upgraded URLs. + PricePlaceholderFieldEnum_ITEM_4_FINAL_MOBILE_URLS PricePlaceholderFieldEnum_PricePlaceholderField = 405 + // Data Type: STRING. The header of item 5 of the table. + PricePlaceholderFieldEnum_ITEM_5_HEADER PricePlaceholderFieldEnum_PricePlaceholderField = 500 + // Data Type: STRING. The description of item 5 of the table. + PricePlaceholderFieldEnum_ITEM_5_DESCRIPTION PricePlaceholderFieldEnum_PricePlaceholderField = 501 + // Data Type: MONEY. The price (money with currency) of item 5 of the table, + // e.g., 30 USD. The currency must match one of the available currencies. + PricePlaceholderFieldEnum_ITEM_5_PRICE PricePlaceholderFieldEnum_PricePlaceholderField = 502 + // Data Type: STRING. The price unit of item 5 of the table. Must match one + // of the predefined price units. + PricePlaceholderFieldEnum_ITEM_5_UNIT PricePlaceholderFieldEnum_PricePlaceholderField = 503 + // Data Type: URL_LIST. The final URLs of item 5 of the table when using + // Upgraded URLs. + PricePlaceholderFieldEnum_ITEM_5_FINAL_URLS PricePlaceholderFieldEnum_PricePlaceholderField = 504 + // Data Type: URL_LIST. The final mobile URLs of item 5 of the table when + // using Upgraded URLs. + PricePlaceholderFieldEnum_ITEM_5_FINAL_MOBILE_URLS PricePlaceholderFieldEnum_PricePlaceholderField = 505 + // Data Type: STRING. The header of item 6 of the table. + PricePlaceholderFieldEnum_ITEM_6_HEADER PricePlaceholderFieldEnum_PricePlaceholderField = 600 + // Data Type: STRING. The description of item 6 of the table. + PricePlaceholderFieldEnum_ITEM_6_DESCRIPTION PricePlaceholderFieldEnum_PricePlaceholderField = 601 + // Data Type: MONEY. The price (money with currency) of item 6 of the table, + // e.g., 30 USD. The currency must match one of the available currencies. + PricePlaceholderFieldEnum_ITEM_6_PRICE PricePlaceholderFieldEnum_PricePlaceholderField = 602 + // Data Type: STRING. The price unit of item 6 of the table. Must match one + // of the predefined price units. + PricePlaceholderFieldEnum_ITEM_6_UNIT PricePlaceholderFieldEnum_PricePlaceholderField = 603 + // Data Type: URL_LIST. The final URLs of item 6 of the table when using + // Upgraded URLs. + PricePlaceholderFieldEnum_ITEM_6_FINAL_URLS PricePlaceholderFieldEnum_PricePlaceholderField = 604 + // Data Type: URL_LIST. The final mobile URLs of item 6 of the table when + // using Upgraded URLs. + PricePlaceholderFieldEnum_ITEM_6_FINAL_MOBILE_URLS PricePlaceholderFieldEnum_PricePlaceholderField = 605 + // Data Type: STRING. The header of item 7 of the table. + PricePlaceholderFieldEnum_ITEM_7_HEADER PricePlaceholderFieldEnum_PricePlaceholderField = 700 + // Data Type: STRING. The description of item 7 of the table. + PricePlaceholderFieldEnum_ITEM_7_DESCRIPTION PricePlaceholderFieldEnum_PricePlaceholderField = 701 + // Data Type: MONEY. The price (money with currency) of item 7 of the table, + // e.g., 30 USD. The currency must match one of the available currencies. + PricePlaceholderFieldEnum_ITEM_7_PRICE PricePlaceholderFieldEnum_PricePlaceholderField = 702 + // Data Type: STRING. The price unit of item 7 of the table. Must match one + // of the predefined price units. + PricePlaceholderFieldEnum_ITEM_7_UNIT PricePlaceholderFieldEnum_PricePlaceholderField = 703 + // Data Type: URL_LIST. The final URLs of item 7 of the table when using + // Upgraded URLs. + PricePlaceholderFieldEnum_ITEM_7_FINAL_URLS PricePlaceholderFieldEnum_PricePlaceholderField = 704 + // Data Type: URL_LIST. The final mobile URLs of item 7 of the table when + // using Upgraded URLs. + PricePlaceholderFieldEnum_ITEM_7_FINAL_MOBILE_URLS PricePlaceholderFieldEnum_PricePlaceholderField = 705 + // Data Type: STRING. The header of item 8 of the table. + PricePlaceholderFieldEnum_ITEM_8_HEADER PricePlaceholderFieldEnum_PricePlaceholderField = 800 + // Data Type: STRING. The description of item 8 of the table. + PricePlaceholderFieldEnum_ITEM_8_DESCRIPTION PricePlaceholderFieldEnum_PricePlaceholderField = 801 + // Data Type: MONEY. The price (money with currency) of item 8 of the table, + // e.g., 30 USD. The currency must match one of the available currencies. + PricePlaceholderFieldEnum_ITEM_8_PRICE PricePlaceholderFieldEnum_PricePlaceholderField = 802 + // Data Type: STRING. The price unit of item 8 of the table. Must match one + // of the predefined price units. + PricePlaceholderFieldEnum_ITEM_8_UNIT PricePlaceholderFieldEnum_PricePlaceholderField = 803 + // Data Type: URL_LIST. The final URLs of item 8 of the table when using + // Upgraded URLs. + PricePlaceholderFieldEnum_ITEM_8_FINAL_URLS PricePlaceholderFieldEnum_PricePlaceholderField = 804 + // Data Type: URL_LIST. The final mobile URLs of item 8 of the table when + // using Upgraded URLs. + PricePlaceholderFieldEnum_ITEM_8_FINAL_MOBILE_URLS PricePlaceholderFieldEnum_PricePlaceholderField = 805 +) + +var PricePlaceholderFieldEnum_PricePlaceholderField_name = map[int32]string{ + 0: "UNSPECIFIED", + 1: "UNKNOWN", + 2: "TYPE", + 3: "PRICE_QUALIFIER", + 4: "TRACKING_TEMPLATE", + 5: "LANGUAGE", + 6: "FINAL_URL_SUFFIX", + 100: "ITEM_1_HEADER", + 101: "ITEM_1_DESCRIPTION", + 102: "ITEM_1_PRICE", + 103: "ITEM_1_UNIT", + 104: "ITEM_1_FINAL_URLS", + 105: "ITEM_1_FINAL_MOBILE_URLS", + 200: "ITEM_2_HEADER", + 201: "ITEM_2_DESCRIPTION", + 202: "ITEM_2_PRICE", + 203: "ITEM_2_UNIT", + 204: "ITEM_2_FINAL_URLS", + 205: "ITEM_2_FINAL_MOBILE_URLS", + 300: "ITEM_3_HEADER", + 301: "ITEM_3_DESCRIPTION", + 302: "ITEM_3_PRICE", + 303: "ITEM_3_UNIT", + 304: "ITEM_3_FINAL_URLS", + 305: "ITEM_3_FINAL_MOBILE_URLS", + 400: "ITEM_4_HEADER", + 401: "ITEM_4_DESCRIPTION", + 402: "ITEM_4_PRICE", + 403: "ITEM_4_UNIT", + 404: "ITEM_4_FINAL_URLS", + 405: "ITEM_4_FINAL_MOBILE_URLS", + 500: "ITEM_5_HEADER", + 501: "ITEM_5_DESCRIPTION", + 502: "ITEM_5_PRICE", + 503: "ITEM_5_UNIT", + 504: "ITEM_5_FINAL_URLS", + 505: "ITEM_5_FINAL_MOBILE_URLS", + 600: "ITEM_6_HEADER", + 601: "ITEM_6_DESCRIPTION", + 602: "ITEM_6_PRICE", + 603: "ITEM_6_UNIT", + 604: "ITEM_6_FINAL_URLS", + 605: "ITEM_6_FINAL_MOBILE_URLS", + 700: "ITEM_7_HEADER", + 701: "ITEM_7_DESCRIPTION", + 702: "ITEM_7_PRICE", + 703: "ITEM_7_UNIT", + 704: "ITEM_7_FINAL_URLS", + 705: "ITEM_7_FINAL_MOBILE_URLS", + 800: "ITEM_8_HEADER", + 801: "ITEM_8_DESCRIPTION", + 802: "ITEM_8_PRICE", + 803: "ITEM_8_UNIT", + 804: "ITEM_8_FINAL_URLS", + 805: "ITEM_8_FINAL_MOBILE_URLS", +} +var PricePlaceholderFieldEnum_PricePlaceholderField_value = map[string]int32{ + "UNSPECIFIED": 0, + "UNKNOWN": 1, + "TYPE": 2, + "PRICE_QUALIFIER": 3, + "TRACKING_TEMPLATE": 4, + "LANGUAGE": 5, + "FINAL_URL_SUFFIX": 6, + "ITEM_1_HEADER": 100, + "ITEM_1_DESCRIPTION": 101, + "ITEM_1_PRICE": 102, + "ITEM_1_UNIT": 103, + "ITEM_1_FINAL_URLS": 104, + "ITEM_1_FINAL_MOBILE_URLS": 105, + "ITEM_2_HEADER": 200, + "ITEM_2_DESCRIPTION": 201, + "ITEM_2_PRICE": 202, + "ITEM_2_UNIT": 203, + "ITEM_2_FINAL_URLS": 204, + "ITEM_2_FINAL_MOBILE_URLS": 205, + "ITEM_3_HEADER": 300, + "ITEM_3_DESCRIPTION": 301, + "ITEM_3_PRICE": 302, + "ITEM_3_UNIT": 303, + "ITEM_3_FINAL_URLS": 304, + "ITEM_3_FINAL_MOBILE_URLS": 305, + "ITEM_4_HEADER": 400, + "ITEM_4_DESCRIPTION": 401, + "ITEM_4_PRICE": 402, + "ITEM_4_UNIT": 403, + "ITEM_4_FINAL_URLS": 404, + "ITEM_4_FINAL_MOBILE_URLS": 405, + "ITEM_5_HEADER": 500, + "ITEM_5_DESCRIPTION": 501, + "ITEM_5_PRICE": 502, + "ITEM_5_UNIT": 503, + "ITEM_5_FINAL_URLS": 504, + "ITEM_5_FINAL_MOBILE_URLS": 505, + "ITEM_6_HEADER": 600, + "ITEM_6_DESCRIPTION": 601, + "ITEM_6_PRICE": 602, + "ITEM_6_UNIT": 603, + "ITEM_6_FINAL_URLS": 604, + "ITEM_6_FINAL_MOBILE_URLS": 605, + "ITEM_7_HEADER": 700, + "ITEM_7_DESCRIPTION": 701, + "ITEM_7_PRICE": 702, + "ITEM_7_UNIT": 703, + "ITEM_7_FINAL_URLS": 704, + "ITEM_7_FINAL_MOBILE_URLS": 705, + "ITEM_8_HEADER": 800, + "ITEM_8_DESCRIPTION": 801, + "ITEM_8_PRICE": 802, + "ITEM_8_UNIT": 803, + "ITEM_8_FINAL_URLS": 804, + "ITEM_8_FINAL_MOBILE_URLS": 805, +} + +func (x PricePlaceholderFieldEnum_PricePlaceholderField) String() string { + return proto.EnumName(PricePlaceholderFieldEnum_PricePlaceholderField_name, int32(x)) +} +func (PricePlaceholderFieldEnum_PricePlaceholderField) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_price_placeholder_field_70027d62066d5e9c, []int{0, 0} +} + +// Values for Price placeholder fields. +type PricePlaceholderFieldEnum struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *PricePlaceholderFieldEnum) Reset() { *m = PricePlaceholderFieldEnum{} } +func (m *PricePlaceholderFieldEnum) String() string { return proto.CompactTextString(m) } +func (*PricePlaceholderFieldEnum) ProtoMessage() {} +func (*PricePlaceholderFieldEnum) Descriptor() ([]byte, []int) { + return fileDescriptor_price_placeholder_field_70027d62066d5e9c, []int{0} +} +func (m *PricePlaceholderFieldEnum) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_PricePlaceholderFieldEnum.Unmarshal(m, b) +} +func (m *PricePlaceholderFieldEnum) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_PricePlaceholderFieldEnum.Marshal(b, m, deterministic) +} +func (dst *PricePlaceholderFieldEnum) XXX_Merge(src proto.Message) { + xxx_messageInfo_PricePlaceholderFieldEnum.Merge(dst, src) +} +func (m *PricePlaceholderFieldEnum) XXX_Size() int { + return xxx_messageInfo_PricePlaceholderFieldEnum.Size(m) +} +func (m *PricePlaceholderFieldEnum) XXX_DiscardUnknown() { + xxx_messageInfo_PricePlaceholderFieldEnum.DiscardUnknown(m) +} + +var xxx_messageInfo_PricePlaceholderFieldEnum proto.InternalMessageInfo + +func init() { + proto.RegisterType((*PricePlaceholderFieldEnum)(nil), "google.ads.googleads.v1.enums.PricePlaceholderFieldEnum") + proto.RegisterEnum("google.ads.googleads.v1.enums.PricePlaceholderFieldEnum_PricePlaceholderField", PricePlaceholderFieldEnum_PricePlaceholderField_name, PricePlaceholderFieldEnum_PricePlaceholderField_value) +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/enums/price_placeholder_field.proto", fileDescriptor_price_placeholder_field_70027d62066d5e9c) +} + +var fileDescriptor_price_placeholder_field_70027d62066d5e9c = []byte{ + // 731 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x7c, 0xd4, 0x49, 0x4f, 0xdb, 0x5a, + 0x14, 0x07, 0xf0, 0xe7, 0x21, 0x7e, 0x70, 0xe1, 0x89, 0xcb, 0x7d, 0x0f, 0x5e, 0x8b, 0x60, 0x01, + 0x1f, 0xc0, 0x51, 0xe6, 0xc8, 0xac, 0x9c, 0xe0, 0xa4, 0x16, 0xc1, 0xb8, 0x19, 0xe8, 0xa0, 0x48, + 0x56, 0x8a, 0x4d, 0x88, 0x14, 0xe2, 0x28, 0x06, 0x3e, 0x47, 0xc7, 0x7d, 0x4b, 0xdb, 0x4d, 0xd5, + 0x71, 0xdf, 0x71, 0xd7, 0xd2, 0x76, 0xd1, 0x5d, 0xc7, 0x2f, 0xd0, 0x0d, 0x9b, 0x8e, 0xbb, 0xca, + 0xf1, 0xf5, 0x89, 0x2d, 0xb9, 0xdd, 0x58, 0x47, 0xe7, 0xdc, 0x73, 0xfc, 0x5b, 0xfd, 0xd1, 0x72, + 0xdb, 0xb6, 0xdb, 0x5d, 0x2b, 0xde, 0x32, 0x9d, 0xb8, 0x57, 0xba, 0xd5, 0x7e, 0x22, 0x6e, 0xf5, + 0xf6, 0x76, 0x9c, 0x78, 0x7f, 0xd0, 0xd9, 0xb4, 0x8c, 0x7e, 0xb7, 0xb5, 0x69, 0x6d, 0xdb, 0x5d, + 0xd3, 0x1a, 0x18, 0x5b, 0x1d, 0xab, 0x6b, 0x8a, 0xfd, 0x81, 0xbd, 0x6b, 0x93, 0x05, 0x6f, 0x43, + 0x6c, 0x99, 0x8e, 0x08, 0xcb, 0xe2, 0x7e, 0x42, 0x1c, 0x2e, 0xcf, 0xcd, 0xfb, 0xb7, 0xfb, 0x9d, + 0x78, 0xab, 0xd7, 0xb3, 0x77, 0x5b, 0xbb, 0x1d, 0xbb, 0xe7, 0x78, 0xcb, 0x4b, 0x47, 0xe3, 0xe8, + 0xb8, 0xee, 0x9e, 0xd7, 0x47, 0xd7, 0x4b, 0xee, 0x71, 0xa5, 0xb7, 0xb7, 0xb3, 0xf4, 0x66, 0x1c, + 0xcd, 0x44, 0x4e, 0xc9, 0x14, 0x9a, 0x68, 0x68, 0x35, 0x5d, 0x29, 0xaa, 0x25, 0x55, 0x59, 0xc1, + 0x7f, 0x91, 0x09, 0xf4, 0x77, 0x43, 0x5b, 0xd5, 0xd6, 0x4f, 0x69, 0x98, 0x21, 0x63, 0x88, 0xaf, + 0x9f, 0xd1, 0x15, 0xcc, 0x92, 0x7f, 0xd1, 0x94, 0x5e, 0x55, 0x8b, 0x8a, 0x71, 0xb2, 0x21, 0x57, + 0xdc, 0xb7, 0x55, 0xcc, 0x91, 0x19, 0x34, 0x5d, 0xaf, 0xca, 0xc5, 0x55, 0x55, 0x2b, 0x1b, 0x75, + 0x65, 0x4d, 0xaf, 0xc8, 0x75, 0x05, 0xf3, 0x64, 0x12, 0x8d, 0x55, 0x64, 0xad, 0xdc, 0x90, 0xcb, + 0x0a, 0x8e, 0x91, 0xff, 0x10, 0x2e, 0xa9, 0x9a, 0x5c, 0x31, 0x1a, 0xd5, 0x8a, 0x51, 0x6b, 0x94, + 0x4a, 0xea, 0x69, 0x2c, 0x90, 0x69, 0xf4, 0x8f, 0x5a, 0x57, 0xd6, 0x8c, 0x84, 0x71, 0x42, 0x91, + 0x57, 0x94, 0x2a, 0x36, 0xc9, 0x2c, 0x22, 0xb4, 0xb5, 0xa2, 0xd4, 0x8a, 0x55, 0x55, 0xaf, 0xab, + 0xeb, 0x1a, 0xb6, 0x08, 0x46, 0x93, 0xb4, 0x3f, 0x14, 0xe0, 0x2d, 0x17, 0x4d, 0x3b, 0x0d, 0x4d, + 0xad, 0xe3, 0xb6, 0x0b, 0xa1, 0x0d, 0xf8, 0x55, 0x0d, 0x6f, 0x93, 0x79, 0x74, 0x2c, 0xd4, 0x5e, + 0x5b, 0x2f, 0xa8, 0x15, 0xc5, 0x9b, 0x76, 0x08, 0xa1, 0x84, 0xa4, 0x4f, 0x78, 0xce, 0x90, 0xff, + 0xa9, 0x21, 0x19, 0x32, 0xbc, 0x60, 0xc8, 0x34, 0x45, 0x24, 0x29, 0xe2, 0x90, 0x21, 0x98, 0x2a, + 0x92, 0x9e, 0xe2, 0x25, 0x43, 0x66, 0x29, 0x23, 0x19, 0x64, 0xbc, 0x62, 0xc8, 0x02, 0x75, 0x24, + 0x23, 0x1c, 0xaf, 0x19, 0x80, 0xa4, 0x7c, 0xc8, 0x6d, 0x16, 0x20, 0xa9, 0x10, 0xe4, 0x0e, 0x0b, + 0x90, 0x14, 0x85, 0xdc, 0x65, 0x01, 0x92, 0xf2, 0x20, 0xf7, 0x58, 0x80, 0xa4, 0x82, 0x90, 0xfb, + 0x2c, 0x40, 0x52, 0x11, 0x90, 0x07, 0x2c, 0x40, 0xd2, 0x3e, 0xe4, 0x3c, 0x07, 0x90, 0x74, 0x08, + 0x72, 0x81, 0x03, 0x48, 0x9a, 0x42, 0x2e, 0x72, 0x00, 0x49, 0x7b, 0x90, 0x4b, 0x1c, 0x40, 0xd2, + 0x41, 0xc8, 0x65, 0x0e, 0x20, 0xe9, 0x08, 0xc8, 0x15, 0x0e, 0x20, 0x19, 0x1f, 0xf2, 0x65, 0x04, + 0xc9, 0x84, 0x20, 0x5f, 0x47, 0x90, 0x0c, 0x85, 0x7c, 0x1b, 0x41, 0x32, 0x1e, 0xe4, 0xfb, 0x08, + 0x92, 0x09, 0x42, 0x7e, 0x8c, 0x20, 0x99, 0x08, 0xc8, 0xcf, 0x11, 0x24, 0xeb, 0x43, 0xde, 0xf2, + 0x00, 0xc9, 0x86, 0x20, 0xef, 0x78, 0x80, 0x64, 0x29, 0xe4, 0x3d, 0x0f, 0x90, 0xac, 0x07, 0xf9, + 0xc0, 0x03, 0x24, 0x1b, 0x84, 0x7c, 0xe4, 0x01, 0x92, 0x8d, 0x80, 0x7c, 0xe2, 0x01, 0x92, 0xf3, + 0x21, 0x0f, 0x63, 0x00, 0xc9, 0x85, 0x20, 0x8f, 0x62, 0x00, 0xc9, 0x51, 0xc8, 0xe3, 0x18, 0x40, + 0x72, 0x1e, 0xe4, 0x49, 0x0c, 0x20, 0xb9, 0x20, 0xe4, 0x69, 0x0c, 0x20, 0xb9, 0x08, 0xc8, 0xb3, + 0x18, 0x40, 0xf2, 0x3e, 0xe4, 0xaa, 0x00, 0x90, 0x7c, 0x08, 0x72, 0x4d, 0x00, 0x48, 0x9e, 0x42, + 0x0e, 0x04, 0x80, 0xe4, 0x3d, 0xc8, 0x75, 0x01, 0x20, 0xf9, 0x20, 0xe4, 0x86, 0x00, 0x90, 0x7c, + 0x04, 0xe4, 0xa6, 0x50, 0x38, 0x62, 0xd0, 0xe2, 0xa6, 0xbd, 0x23, 0xfe, 0x31, 0x35, 0x0b, 0x73, + 0x91, 0xb1, 0xa7, 0xbb, 0x99, 0xa9, 0x33, 0x67, 0x0b, 0x74, 0xb9, 0x6d, 0x77, 0x5b, 0xbd, 0xb6, + 0x68, 0x0f, 0xda, 0xf1, 0xb6, 0xd5, 0x1b, 0x26, 0xaa, 0x9f, 0xdf, 0xfd, 0x8e, 0xf3, 0x9b, 0x38, + 0x5f, 0x1e, 0x7e, 0x0f, 0x58, 0xae, 0x2c, 0xcb, 0xb7, 0xd8, 0x85, 0xb2, 0x77, 0x4a, 0x36, 0x1d, + 0xd1, 0x2b, 0xdd, 0x6a, 0x23, 0x21, 0xba, 0x01, 0xec, 0x1c, 0xfa, 0xf3, 0xa6, 0x6c, 0x3a, 0x4d, + 0x98, 0x37, 0x37, 0x12, 0xcd, 0xe1, 0xfc, 0x33, 0xbb, 0xe8, 0x35, 0x25, 0x49, 0x36, 0x1d, 0x49, + 0x82, 0x17, 0x92, 0xb4, 0x91, 0x90, 0xa4, 0xe1, 0x9b, 0x73, 0xc2, 0x10, 0x96, 0xfa, 0x15, 0x00, + 0x00, 0xff, 0xff, 0xce, 0x0b, 0xb8, 0x5a, 0x66, 0x06, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/product_bidding_category_level.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/product_bidding_category_level.pb.go new file mode 100644 index 0000000..386bf8e --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/product_bidding_category_level.pb.go @@ -0,0 +1,131 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/enums/product_bidding_category_level.proto + +package enums // import "google.golang.org/genproto/googleapis/ads/googleads/v1/enums" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Enum describing the level of the product bidding category. +type ProductBiddingCategoryLevelEnum_ProductBiddingCategoryLevel int32 + +const ( + // Not specified. + ProductBiddingCategoryLevelEnum_UNSPECIFIED ProductBiddingCategoryLevelEnum_ProductBiddingCategoryLevel = 0 + // Used for return value only. Represents value unknown in this version. + ProductBiddingCategoryLevelEnum_UNKNOWN ProductBiddingCategoryLevelEnum_ProductBiddingCategoryLevel = 1 + // Level 1. + ProductBiddingCategoryLevelEnum_LEVEL1 ProductBiddingCategoryLevelEnum_ProductBiddingCategoryLevel = 2 + // Level 2. + ProductBiddingCategoryLevelEnum_LEVEL2 ProductBiddingCategoryLevelEnum_ProductBiddingCategoryLevel = 3 + // Level 3. + ProductBiddingCategoryLevelEnum_LEVEL3 ProductBiddingCategoryLevelEnum_ProductBiddingCategoryLevel = 4 + // Level 4. + ProductBiddingCategoryLevelEnum_LEVEL4 ProductBiddingCategoryLevelEnum_ProductBiddingCategoryLevel = 5 + // Level 5. + ProductBiddingCategoryLevelEnum_LEVEL5 ProductBiddingCategoryLevelEnum_ProductBiddingCategoryLevel = 6 +) + +var ProductBiddingCategoryLevelEnum_ProductBiddingCategoryLevel_name = map[int32]string{ + 0: "UNSPECIFIED", + 1: "UNKNOWN", + 2: "LEVEL1", + 3: "LEVEL2", + 4: "LEVEL3", + 5: "LEVEL4", + 6: "LEVEL5", +} +var ProductBiddingCategoryLevelEnum_ProductBiddingCategoryLevel_value = map[string]int32{ + "UNSPECIFIED": 0, + "UNKNOWN": 1, + "LEVEL1": 2, + "LEVEL2": 3, + "LEVEL3": 4, + "LEVEL4": 5, + "LEVEL5": 6, +} + +func (x ProductBiddingCategoryLevelEnum_ProductBiddingCategoryLevel) String() string { + return proto.EnumName(ProductBiddingCategoryLevelEnum_ProductBiddingCategoryLevel_name, int32(x)) +} +func (ProductBiddingCategoryLevelEnum_ProductBiddingCategoryLevel) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_product_bidding_category_level_26189038419dce97, []int{0, 0} +} + +// Level of a product bidding category. +type ProductBiddingCategoryLevelEnum struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ProductBiddingCategoryLevelEnum) Reset() { *m = ProductBiddingCategoryLevelEnum{} } +func (m *ProductBiddingCategoryLevelEnum) String() string { return proto.CompactTextString(m) } +func (*ProductBiddingCategoryLevelEnum) ProtoMessage() {} +func (*ProductBiddingCategoryLevelEnum) Descriptor() ([]byte, []int) { + return fileDescriptor_product_bidding_category_level_26189038419dce97, []int{0} +} +func (m *ProductBiddingCategoryLevelEnum) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ProductBiddingCategoryLevelEnum.Unmarshal(m, b) +} +func (m *ProductBiddingCategoryLevelEnum) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ProductBiddingCategoryLevelEnum.Marshal(b, m, deterministic) +} +func (dst *ProductBiddingCategoryLevelEnum) XXX_Merge(src proto.Message) { + xxx_messageInfo_ProductBiddingCategoryLevelEnum.Merge(dst, src) +} +func (m *ProductBiddingCategoryLevelEnum) XXX_Size() int { + return xxx_messageInfo_ProductBiddingCategoryLevelEnum.Size(m) +} +func (m *ProductBiddingCategoryLevelEnum) XXX_DiscardUnknown() { + xxx_messageInfo_ProductBiddingCategoryLevelEnum.DiscardUnknown(m) +} + +var xxx_messageInfo_ProductBiddingCategoryLevelEnum proto.InternalMessageInfo + +func init() { + proto.RegisterType((*ProductBiddingCategoryLevelEnum)(nil), "google.ads.googleads.v1.enums.ProductBiddingCategoryLevelEnum") + proto.RegisterEnum("google.ads.googleads.v1.enums.ProductBiddingCategoryLevelEnum_ProductBiddingCategoryLevel", ProductBiddingCategoryLevelEnum_ProductBiddingCategoryLevel_name, ProductBiddingCategoryLevelEnum_ProductBiddingCategoryLevel_value) +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/enums/product_bidding_category_level.proto", fileDescriptor_product_bidding_category_level_26189038419dce97) +} + +var fileDescriptor_product_bidding_category_level_26189038419dce97 = []byte{ + // 329 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x7c, 0x91, 0x41, 0x4b, 0xfb, 0x30, + 0x18, 0xc6, 0xff, 0xed, 0xfe, 0x4e, 0xc8, 0x0e, 0x86, 0x1e, 0xd5, 0xa1, 0xdb, 0x07, 0x48, 0xa9, + 0xd3, 0x4b, 0x3c, 0xb5, 0xb3, 0x8e, 0xe1, 0xa8, 0x05, 0x59, 0x05, 0x29, 0x8c, 0x6c, 0x09, 0xa1, + 0xd0, 0x25, 0xa5, 0xe9, 0x26, 0x7e, 0x15, 0x8f, 0x1e, 0xfd, 0x28, 0x7e, 0x14, 0xef, 0xde, 0xa5, + 0xc9, 0x96, 0x9b, 0xbb, 0x94, 0x1f, 0x7d, 0xdf, 0xf7, 0x79, 0xde, 0xf7, 0x09, 0x88, 0xb8, 0x94, + 0xbc, 0x64, 0x3e, 0xa1, 0xca, 0x37, 0xd8, 0xd2, 0x36, 0xf0, 0x99, 0xd8, 0xac, 0x95, 0x5f, 0xd5, + 0x92, 0x6e, 0x56, 0xcd, 0x62, 0x59, 0x50, 0x5a, 0x08, 0xbe, 0x58, 0x91, 0x86, 0x71, 0x59, 0xbf, + 0x2d, 0x4a, 0xb6, 0x65, 0x25, 0xaa, 0x6a, 0xd9, 0x48, 0xaf, 0x6f, 0x06, 0x11, 0xa1, 0x0a, 0x59, + 0x0d, 0xb4, 0x0d, 0x90, 0xd6, 0x38, 0x3d, 0xdf, 0x5b, 0x54, 0x85, 0x4f, 0x84, 0x90, 0x0d, 0x69, + 0x0a, 0x29, 0x94, 0x19, 0x1e, 0xbe, 0x3b, 0xe0, 0x22, 0x35, 0x2e, 0x91, 0x31, 0x19, 0xef, 0x3c, + 0x66, 0xad, 0x45, 0x2c, 0x36, 0xeb, 0xe1, 0x2b, 0x38, 0x3b, 0xd0, 0xe2, 0x9d, 0x80, 0xde, 0x3c, + 0x79, 0x4a, 0xe3, 0xf1, 0xf4, 0x7e, 0x1a, 0xdf, 0xc1, 0x7f, 0x5e, 0x0f, 0x1c, 0xcf, 0x93, 0x87, + 0xe4, 0xf1, 0x39, 0x81, 0x8e, 0x07, 0x40, 0x77, 0x16, 0x67, 0xf1, 0x2c, 0x80, 0xae, 0xe5, 0x2b, + 0xd8, 0xb1, 0x3c, 0x82, 0xff, 0x2d, 0x5f, 0xc3, 0x23, 0xcb, 0x37, 0xb0, 0x1b, 0xfd, 0x38, 0x60, + 0xb0, 0x92, 0x6b, 0x74, 0xf0, 0xc0, 0xe8, 0xf2, 0xc0, 0x72, 0x69, 0x7b, 0x64, 0xea, 0xbc, 0xec, + 0x72, 0x46, 0x5c, 0x96, 0x44, 0x70, 0x24, 0x6b, 0xee, 0x73, 0x26, 0x74, 0x04, 0xfb, 0xdc, 0xab, + 0x42, 0xfd, 0xf1, 0x0c, 0xb7, 0xfa, 0xfb, 0xe1, 0x76, 0x26, 0x61, 0xf8, 0xe9, 0xf6, 0x27, 0x46, + 0x2a, 0xa4, 0x0a, 0x19, 0x6c, 0x29, 0x0b, 0x50, 0x9b, 0x95, 0xfa, 0xda, 0xd7, 0xf3, 0x90, 0xaa, + 0xdc, 0xd6, 0xf3, 0x2c, 0xc8, 0x75, 0xfd, 0xdb, 0x1d, 0x98, 0x9f, 0x18, 0x87, 0x54, 0x61, 0x6c, + 0x3b, 0x30, 0xce, 0x02, 0x8c, 0x75, 0xcf, 0xb2, 0xab, 0x17, 0x1b, 0xfd, 0x06, 0x00, 0x00, 0xff, + 0xff, 0x0f, 0xc8, 0x40, 0xe4, 0x1e, 0x02, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/product_bidding_category_status.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/product_bidding_category_status.pb.go new file mode 100644 index 0000000..db1fe77 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/product_bidding_category_status.pb.go @@ -0,0 +1,118 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/enums/product_bidding_category_status.proto + +package enums // import "google.golang.org/genproto/googleapis/ads/googleads/v1/enums" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Enum describing the status of the product bidding category. +type ProductBiddingCategoryStatusEnum_ProductBiddingCategoryStatus int32 + +const ( + // Not specified. + ProductBiddingCategoryStatusEnum_UNSPECIFIED ProductBiddingCategoryStatusEnum_ProductBiddingCategoryStatus = 0 + // Used for return value only. Represents value unknown in this version. + ProductBiddingCategoryStatusEnum_UNKNOWN ProductBiddingCategoryStatusEnum_ProductBiddingCategoryStatus = 1 + // The category is active and can be used for bidding. + ProductBiddingCategoryStatusEnum_ACTIVE ProductBiddingCategoryStatusEnum_ProductBiddingCategoryStatus = 2 + // The category is obsolete. Used only for reporting purposes. + ProductBiddingCategoryStatusEnum_OBSOLETE ProductBiddingCategoryStatusEnum_ProductBiddingCategoryStatus = 3 +) + +var ProductBiddingCategoryStatusEnum_ProductBiddingCategoryStatus_name = map[int32]string{ + 0: "UNSPECIFIED", + 1: "UNKNOWN", + 2: "ACTIVE", + 3: "OBSOLETE", +} +var ProductBiddingCategoryStatusEnum_ProductBiddingCategoryStatus_value = map[string]int32{ + "UNSPECIFIED": 0, + "UNKNOWN": 1, + "ACTIVE": 2, + "OBSOLETE": 3, +} + +func (x ProductBiddingCategoryStatusEnum_ProductBiddingCategoryStatus) String() string { + return proto.EnumName(ProductBiddingCategoryStatusEnum_ProductBiddingCategoryStatus_name, int32(x)) +} +func (ProductBiddingCategoryStatusEnum_ProductBiddingCategoryStatus) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_product_bidding_category_status_e3bdc1049d216265, []int{0, 0} +} + +// Status of the product bidding category. +type ProductBiddingCategoryStatusEnum struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ProductBiddingCategoryStatusEnum) Reset() { *m = ProductBiddingCategoryStatusEnum{} } +func (m *ProductBiddingCategoryStatusEnum) String() string { return proto.CompactTextString(m) } +func (*ProductBiddingCategoryStatusEnum) ProtoMessage() {} +func (*ProductBiddingCategoryStatusEnum) Descriptor() ([]byte, []int) { + return fileDescriptor_product_bidding_category_status_e3bdc1049d216265, []int{0} +} +func (m *ProductBiddingCategoryStatusEnum) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ProductBiddingCategoryStatusEnum.Unmarshal(m, b) +} +func (m *ProductBiddingCategoryStatusEnum) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ProductBiddingCategoryStatusEnum.Marshal(b, m, deterministic) +} +func (dst *ProductBiddingCategoryStatusEnum) XXX_Merge(src proto.Message) { + xxx_messageInfo_ProductBiddingCategoryStatusEnum.Merge(dst, src) +} +func (m *ProductBiddingCategoryStatusEnum) XXX_Size() int { + return xxx_messageInfo_ProductBiddingCategoryStatusEnum.Size(m) +} +func (m *ProductBiddingCategoryStatusEnum) XXX_DiscardUnknown() { + xxx_messageInfo_ProductBiddingCategoryStatusEnum.DiscardUnknown(m) +} + +var xxx_messageInfo_ProductBiddingCategoryStatusEnum proto.InternalMessageInfo + +func init() { + proto.RegisterType((*ProductBiddingCategoryStatusEnum)(nil), "google.ads.googleads.v1.enums.ProductBiddingCategoryStatusEnum") + proto.RegisterEnum("google.ads.googleads.v1.enums.ProductBiddingCategoryStatusEnum_ProductBiddingCategoryStatus", ProductBiddingCategoryStatusEnum_ProductBiddingCategoryStatus_name, ProductBiddingCategoryStatusEnum_ProductBiddingCategoryStatus_value) +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/enums/product_bidding_category_status.proto", fileDescriptor_product_bidding_category_status_e3bdc1049d216265) +} + +var fileDescriptor_product_bidding_category_status_e3bdc1049d216265 = []byte{ + // 319 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x7c, 0x90, 0xd1, 0x4a, 0xc3, 0x30, + 0x14, 0x86, 0x5d, 0x07, 0x53, 0x32, 0xc1, 0xd2, 0x4b, 0xd9, 0xc0, 0xed, 0x01, 0x12, 0x8a, 0x77, + 0xf1, 0x2a, 0xad, 0x71, 0x0c, 0xa5, 0x2b, 0x6c, 0xab, 0x20, 0x85, 0x91, 0x2d, 0x25, 0x14, 0xb6, + 0xa4, 0x34, 0xe9, 0x40, 0x1f, 0xc7, 0x4b, 0x1f, 0xc5, 0x47, 0xf1, 0x01, 0xbc, 0x96, 0x26, 0xdb, + 0xee, 0xdc, 0x4d, 0xf8, 0xc9, 0x39, 0xe7, 0xff, 0xce, 0xf9, 0x41, 0x2c, 0x94, 0x12, 0xdb, 0x02, + 0x31, 0xae, 0x91, 0x93, 0xad, 0xda, 0x87, 0xa8, 0x90, 0xcd, 0x4e, 0xa3, 0xaa, 0x56, 0xbc, 0xd9, + 0x98, 0xd5, 0xba, 0xe4, 0xbc, 0x94, 0x62, 0xb5, 0x61, 0xa6, 0x10, 0xaa, 0x7e, 0x5f, 0x69, 0xc3, + 0x4c, 0xa3, 0x61, 0x55, 0x2b, 0xa3, 0x82, 0xa1, 0x9b, 0x84, 0x8c, 0x6b, 0x78, 0x32, 0x81, 0xfb, + 0x10, 0x5a, 0x93, 0xdb, 0xc1, 0x91, 0x51, 0x95, 0x88, 0x49, 0xa9, 0x0c, 0x33, 0xa5, 0x92, 0x87, + 0xe1, 0xf1, 0x07, 0xb8, 0x4b, 0x1d, 0x25, 0x72, 0x90, 0xf8, 0xc0, 0x98, 0x5b, 0x04, 0x95, 0xcd, + 0x6e, 0x9c, 0x81, 0xc1, 0xb9, 0x9e, 0xe0, 0x06, 0xf4, 0x97, 0xc9, 0x3c, 0xa5, 0xf1, 0xf4, 0x69, + 0x4a, 0x1f, 0xfd, 0x8b, 0xa0, 0x0f, 0x2e, 0x97, 0xc9, 0x73, 0x32, 0x7b, 0x4d, 0xfc, 0x4e, 0x00, + 0x40, 0x8f, 0xc4, 0x8b, 0x69, 0x46, 0x7d, 0x2f, 0xb8, 0x06, 0x57, 0xb3, 0x68, 0x3e, 0x7b, 0xa1, + 0x0b, 0xea, 0x77, 0xa3, 0xdf, 0x0e, 0x18, 0x6d, 0xd4, 0x0e, 0x9e, 0xdd, 0x3f, 0x1a, 0x9d, 0x63, + 0xa7, 0xed, 0x11, 0x69, 0xe7, 0x2d, 0x3a, 0x78, 0x08, 0xb5, 0x65, 0x52, 0x40, 0x55, 0x0b, 0x24, + 0x0a, 0x69, 0x4f, 0x3c, 0x06, 0x5b, 0x95, 0xfa, 0x9f, 0x9c, 0x1f, 0xec, 0xfb, 0xe9, 0x75, 0x27, + 0x84, 0x7c, 0x79, 0xc3, 0x89, 0xb3, 0x22, 0x5c, 0x43, 0x27, 0x5b, 0x95, 0x85, 0xb0, 0xcd, 0x42, + 0x7f, 0x1f, 0xeb, 0x39, 0xe1, 0x3a, 0x3f, 0xd5, 0xf3, 0x2c, 0xcc, 0x6d, 0xfd, 0xc7, 0x1b, 0xb9, + 0x4f, 0x8c, 0x09, 0xd7, 0x18, 0x9f, 0x3a, 0x30, 0xce, 0x42, 0x8c, 0x6d, 0xcf, 0xba, 0x67, 0x17, + 0xbb, 0xff, 0x0b, 0x00, 0x00, 0xff, 0xff, 0xba, 0x44, 0x1c, 0x67, 0xff, 0x01, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/product_channel.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/product_channel.pb.go new file mode 100644 index 0000000..3d5cfcc --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/product_channel.pb.go @@ -0,0 +1,117 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/enums/product_channel.proto + +package enums // import "google.golang.org/genproto/googleapis/ads/googleads/v1/enums" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Enum describing the locality of a product offer. +type ProductChannelEnum_ProductChannel int32 + +const ( + // Not specified. + ProductChannelEnum_UNSPECIFIED ProductChannelEnum_ProductChannel = 0 + // Used for return value only. Represents value unknown in this version. + ProductChannelEnum_UNKNOWN ProductChannelEnum_ProductChannel = 1 + // The item is sold online. + ProductChannelEnum_ONLINE ProductChannelEnum_ProductChannel = 2 + // The item is sold in local stores. + ProductChannelEnum_LOCAL ProductChannelEnum_ProductChannel = 3 +) + +var ProductChannelEnum_ProductChannel_name = map[int32]string{ + 0: "UNSPECIFIED", + 1: "UNKNOWN", + 2: "ONLINE", + 3: "LOCAL", +} +var ProductChannelEnum_ProductChannel_value = map[string]int32{ + "UNSPECIFIED": 0, + "UNKNOWN": 1, + "ONLINE": 2, + "LOCAL": 3, +} + +func (x ProductChannelEnum_ProductChannel) String() string { + return proto.EnumName(ProductChannelEnum_ProductChannel_name, int32(x)) +} +func (ProductChannelEnum_ProductChannel) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_product_channel_ab1de09ee91c448a, []int{0, 0} +} + +// Locality of a product offer. +type ProductChannelEnum struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ProductChannelEnum) Reset() { *m = ProductChannelEnum{} } +func (m *ProductChannelEnum) String() string { return proto.CompactTextString(m) } +func (*ProductChannelEnum) ProtoMessage() {} +func (*ProductChannelEnum) Descriptor() ([]byte, []int) { + return fileDescriptor_product_channel_ab1de09ee91c448a, []int{0} +} +func (m *ProductChannelEnum) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ProductChannelEnum.Unmarshal(m, b) +} +func (m *ProductChannelEnum) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ProductChannelEnum.Marshal(b, m, deterministic) +} +func (dst *ProductChannelEnum) XXX_Merge(src proto.Message) { + xxx_messageInfo_ProductChannelEnum.Merge(dst, src) +} +func (m *ProductChannelEnum) XXX_Size() int { + return xxx_messageInfo_ProductChannelEnum.Size(m) +} +func (m *ProductChannelEnum) XXX_DiscardUnknown() { + xxx_messageInfo_ProductChannelEnum.DiscardUnknown(m) +} + +var xxx_messageInfo_ProductChannelEnum proto.InternalMessageInfo + +func init() { + proto.RegisterType((*ProductChannelEnum)(nil), "google.ads.googleads.v1.enums.ProductChannelEnum") + proto.RegisterEnum("google.ads.googleads.v1.enums.ProductChannelEnum_ProductChannel", ProductChannelEnum_ProductChannel_name, ProductChannelEnum_ProductChannel_value) +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/enums/product_channel.proto", fileDescriptor_product_channel_ab1de09ee91c448a) +} + +var fileDescriptor_product_channel_ab1de09ee91c448a = []byte{ + // 299 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x7c, 0x50, 0xdf, 0x4a, 0xfb, 0x30, + 0x14, 0xfe, 0xad, 0xe3, 0x37, 0x31, 0x03, 0x2d, 0xf1, 0x4e, 0xdc, 0xc5, 0xf6, 0x00, 0x09, 0x65, + 0x77, 0xf1, 0x2a, 0x9b, 0x75, 0x0c, 0x47, 0x56, 0x90, 0x4d, 0xd0, 0x82, 0xc4, 0xa6, 0xc4, 0x42, + 0x9b, 0x94, 0xa6, 0xdd, 0x03, 0x79, 0xe9, 0xa3, 0xf8, 0x24, 0xe2, 0x53, 0x48, 0x13, 0x5b, 0xd8, + 0x85, 0xde, 0x84, 0x8f, 0xf3, 0xfd, 0xc9, 0x77, 0x0e, 0x98, 0x4b, 0xad, 0x65, 0x9e, 0x62, 0x2e, + 0x0c, 0x76, 0xb0, 0x45, 0x87, 0x00, 0xa7, 0xaa, 0x29, 0x0c, 0x2e, 0x2b, 0x2d, 0x9a, 0xa4, 0x7e, + 0x4e, 0x5e, 0xb9, 0x52, 0x69, 0x8e, 0xca, 0x4a, 0xd7, 0x1a, 0x4e, 0x9c, 0x12, 0x71, 0x61, 0x50, + 0x6f, 0x42, 0x87, 0x00, 0x59, 0xd3, 0xe5, 0x55, 0x97, 0x59, 0x66, 0x98, 0x2b, 0xa5, 0x6b, 0x5e, + 0x67, 0x5a, 0x19, 0x67, 0x9e, 0x3d, 0x01, 0x18, 0xb9, 0xd4, 0xa5, 0x0b, 0x0d, 0x55, 0x53, 0xcc, + 0x42, 0x70, 0x76, 0x3c, 0x85, 0xe7, 0x60, 0xbc, 0x63, 0xf7, 0x51, 0xb8, 0x5c, 0xdf, 0xae, 0xc3, + 0x1b, 0xff, 0x1f, 0x1c, 0x83, 0x93, 0x1d, 0xbb, 0x63, 0xdb, 0x07, 0xe6, 0x0f, 0x20, 0x00, 0xa3, + 0x2d, 0xdb, 0xac, 0x59, 0xe8, 0x7b, 0xf0, 0x14, 0xfc, 0xdf, 0x6c, 0x97, 0x74, 0xe3, 0x0f, 0x17, + 0x9f, 0x03, 0x30, 0x4d, 0x74, 0x81, 0xfe, 0x2c, 0xb8, 0xb8, 0x38, 0xfe, 0x2a, 0x6a, 0x7b, 0x45, + 0x83, 0xc7, 0xc5, 0x8f, 0x4b, 0xea, 0x9c, 0x2b, 0x89, 0x74, 0x25, 0xb1, 0x4c, 0x95, 0x6d, 0xdd, + 0xdd, 0xa6, 0xcc, 0xcc, 0x2f, 0xa7, 0xba, 0xb6, 0xef, 0x9b, 0x37, 0x5c, 0x51, 0xfa, 0xee, 0x4d, + 0x56, 0x2e, 0x8a, 0x0a, 0x83, 0x1c, 0x6c, 0xd1, 0x3e, 0x40, 0xed, 0xb2, 0xe6, 0xa3, 0xe3, 0x63, + 0x2a, 0x4c, 0xdc, 0xf3, 0xf1, 0x3e, 0x88, 0x2d, 0xff, 0xe5, 0x4d, 0xdd, 0x90, 0x10, 0x2a, 0x0c, + 0x21, 0xbd, 0x82, 0x90, 0x7d, 0x40, 0x88, 0xd5, 0xbc, 0x8c, 0x6c, 0xb1, 0xf9, 0x77, 0x00, 0x00, + 0x00, 0xff, 0xff, 0x9a, 0xdc, 0x0c, 0x4c, 0xc2, 0x01, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/product_channel_exclusivity.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/product_channel_exclusivity.pb.go new file mode 100644 index 0000000..a7817dd --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/product_channel_exclusivity.pb.go @@ -0,0 +1,121 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/enums/product_channel_exclusivity.proto + +package enums // import "google.golang.org/genproto/googleapis/ads/googleads/v1/enums" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Enum describing the availability of a product offer. +type ProductChannelExclusivityEnum_ProductChannelExclusivity int32 + +const ( + // Not specified. + ProductChannelExclusivityEnum_UNSPECIFIED ProductChannelExclusivityEnum_ProductChannelExclusivity = 0 + // Used for return value only. Represents value unknown in this version. + ProductChannelExclusivityEnum_UNKNOWN ProductChannelExclusivityEnum_ProductChannelExclusivity = 1 + // The item is sold through one channel only, either local stores or online + // as indicated by its ProductChannel. + ProductChannelExclusivityEnum_SINGLE_CHANNEL ProductChannelExclusivityEnum_ProductChannelExclusivity = 2 + // The item is matched to its online or local stores counterpart, indicating + // it is available for purchase in both ShoppingProductChannels. + ProductChannelExclusivityEnum_MULTI_CHANNEL ProductChannelExclusivityEnum_ProductChannelExclusivity = 3 +) + +var ProductChannelExclusivityEnum_ProductChannelExclusivity_name = map[int32]string{ + 0: "UNSPECIFIED", + 1: "UNKNOWN", + 2: "SINGLE_CHANNEL", + 3: "MULTI_CHANNEL", +} +var ProductChannelExclusivityEnum_ProductChannelExclusivity_value = map[string]int32{ + "UNSPECIFIED": 0, + "UNKNOWN": 1, + "SINGLE_CHANNEL": 2, + "MULTI_CHANNEL": 3, +} + +func (x ProductChannelExclusivityEnum_ProductChannelExclusivity) String() string { + return proto.EnumName(ProductChannelExclusivityEnum_ProductChannelExclusivity_name, int32(x)) +} +func (ProductChannelExclusivityEnum_ProductChannelExclusivity) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_product_channel_exclusivity_e83d82105cf52bc2, []int{0, 0} +} + +// Availability of a product offer. +type ProductChannelExclusivityEnum struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ProductChannelExclusivityEnum) Reset() { *m = ProductChannelExclusivityEnum{} } +func (m *ProductChannelExclusivityEnum) String() string { return proto.CompactTextString(m) } +func (*ProductChannelExclusivityEnum) ProtoMessage() {} +func (*ProductChannelExclusivityEnum) Descriptor() ([]byte, []int) { + return fileDescriptor_product_channel_exclusivity_e83d82105cf52bc2, []int{0} +} +func (m *ProductChannelExclusivityEnum) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ProductChannelExclusivityEnum.Unmarshal(m, b) +} +func (m *ProductChannelExclusivityEnum) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ProductChannelExclusivityEnum.Marshal(b, m, deterministic) +} +func (dst *ProductChannelExclusivityEnum) XXX_Merge(src proto.Message) { + xxx_messageInfo_ProductChannelExclusivityEnum.Merge(dst, src) +} +func (m *ProductChannelExclusivityEnum) XXX_Size() int { + return xxx_messageInfo_ProductChannelExclusivityEnum.Size(m) +} +func (m *ProductChannelExclusivityEnum) XXX_DiscardUnknown() { + xxx_messageInfo_ProductChannelExclusivityEnum.DiscardUnknown(m) +} + +var xxx_messageInfo_ProductChannelExclusivityEnum proto.InternalMessageInfo + +func init() { + proto.RegisterType((*ProductChannelExclusivityEnum)(nil), "google.ads.googleads.v1.enums.ProductChannelExclusivityEnum") + proto.RegisterEnum("google.ads.googleads.v1.enums.ProductChannelExclusivityEnum_ProductChannelExclusivity", ProductChannelExclusivityEnum_ProductChannelExclusivity_name, ProductChannelExclusivityEnum_ProductChannelExclusivity_value) +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/enums/product_channel_exclusivity.proto", fileDescriptor_product_channel_exclusivity_e83d82105cf52bc2) +} + +var fileDescriptor_product_channel_exclusivity_e83d82105cf52bc2 = []byte{ + // 324 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x7c, 0x50, 0x41, 0x4b, 0xc3, 0x30, + 0x18, 0xb5, 0x1d, 0x28, 0x64, 0xa8, 0xb3, 0x37, 0xc5, 0x09, 0xdb, 0x0f, 0x48, 0x29, 0xde, 0xe2, + 0x41, 0xb2, 0x19, 0x67, 0x71, 0xc6, 0xc2, 0xdc, 0x04, 0x29, 0xcc, 0xd8, 0x94, 0x58, 0xe8, 0x92, + 0xb2, 0xb4, 0x43, 0x8f, 0xfe, 0x15, 0x8f, 0xfe, 0x14, 0x7f, 0x8a, 0x57, 0xff, 0x80, 0x34, 0x71, + 0xf5, 0x34, 0x2f, 0xe1, 0x91, 0xef, 0x7d, 0xef, 0x7d, 0xef, 0x81, 0x73, 0xa1, 0x94, 0xc8, 0x53, + 0x9f, 0x71, 0xed, 0x5b, 0x58, 0xa3, 0x55, 0xe0, 0xa7, 0xb2, 0x5a, 0x68, 0xbf, 0x58, 0x2a, 0x5e, + 0x25, 0xe5, 0x3c, 0x79, 0x66, 0x52, 0xa6, 0xf9, 0x3c, 0x7d, 0x49, 0xf2, 0x4a, 0x67, 0xab, 0xac, + 0x7c, 0x85, 0xc5, 0x52, 0x95, 0xca, 0xeb, 0xda, 0x2d, 0xc8, 0xb8, 0x86, 0x8d, 0x00, 0x5c, 0x05, + 0xd0, 0x08, 0x1c, 0x1d, 0xaf, 0xf5, 0x8b, 0xcc, 0x67, 0x52, 0xaa, 0x92, 0x95, 0x99, 0x92, 0xda, + 0x2e, 0xf7, 0xdf, 0x1c, 0xd0, 0x8d, 0xac, 0xc5, 0xd0, 0x3a, 0x90, 0x3f, 0x03, 0x22, 0xab, 0x45, + 0xff, 0x11, 0x1c, 0x6e, 0x24, 0x78, 0xfb, 0xa0, 0x3d, 0xa5, 0x93, 0x88, 0x0c, 0xc3, 0xcb, 0x90, + 0x5c, 0x74, 0xb6, 0xbc, 0x36, 0xd8, 0x99, 0xd2, 0x6b, 0x7a, 0x7b, 0x4f, 0x3b, 0x8e, 0xe7, 0x81, + 0xbd, 0x49, 0x48, 0x47, 0x63, 0x32, 0x1f, 0x5e, 0x61, 0x4a, 0xc9, 0xb8, 0xe3, 0x7a, 0x07, 0x60, + 0xf7, 0x66, 0x3a, 0xbe, 0x0b, 0x9b, 0xaf, 0xd6, 0xe0, 0xdb, 0x01, 0xbd, 0x44, 0x2d, 0xe0, 0xbf, + 0x39, 0x06, 0x27, 0x1b, 0xaf, 0x88, 0xea, 0x24, 0x91, 0xf3, 0x30, 0xf8, 0x15, 0x10, 0x2a, 0x67, + 0x52, 0x40, 0xb5, 0x14, 0xbe, 0x48, 0xa5, 0xc9, 0xb9, 0x6e, 0xb6, 0xc8, 0xf4, 0x86, 0xa2, 0xcf, + 0xcc, 0xfb, 0xee, 0xb6, 0x46, 0x18, 0x7f, 0xb8, 0xdd, 0x91, 0x95, 0xc2, 0x5c, 0x43, 0x0b, 0x6b, + 0x34, 0x0b, 0x60, 0x5d, 0x89, 0xfe, 0x5c, 0xcf, 0x63, 0xcc, 0x75, 0xdc, 0xcc, 0xe3, 0x59, 0x10, + 0x9b, 0xf9, 0x97, 0xdb, 0xb3, 0x9f, 0x08, 0x61, 0xae, 0x11, 0x6a, 0x18, 0x08, 0xcd, 0x02, 0x84, + 0x0c, 0xe7, 0x69, 0xdb, 0x1c, 0x76, 0xfa, 0x13, 0x00, 0x00, 0xff, 0xff, 0xc9, 0x87, 0xe2, 0x2d, + 0x00, 0x02, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/product_condition.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/product_condition.pb.go new file mode 100644 index 0000000..f8a7f8a --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/product_condition.pb.go @@ -0,0 +1,122 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/enums/product_condition.proto + +package enums // import "google.golang.org/genproto/googleapis/ads/googleads/v1/enums" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Enum describing the condition of a product offer. +type ProductConditionEnum_ProductCondition int32 + +const ( + // Not specified. + ProductConditionEnum_UNSPECIFIED ProductConditionEnum_ProductCondition = 0 + // Used for return value only. Represents value unknown in this version. + ProductConditionEnum_UNKNOWN ProductConditionEnum_ProductCondition = 1 + // The product condition is new. + ProductConditionEnum_NEW ProductConditionEnum_ProductCondition = 3 + // The product condition is refurbished. + ProductConditionEnum_REFURBISHED ProductConditionEnum_ProductCondition = 4 + // The product condition is used. + ProductConditionEnum_USED ProductConditionEnum_ProductCondition = 5 +) + +var ProductConditionEnum_ProductCondition_name = map[int32]string{ + 0: "UNSPECIFIED", + 1: "UNKNOWN", + 3: "NEW", + 4: "REFURBISHED", + 5: "USED", +} +var ProductConditionEnum_ProductCondition_value = map[string]int32{ + "UNSPECIFIED": 0, + "UNKNOWN": 1, + "NEW": 3, + "REFURBISHED": 4, + "USED": 5, +} + +func (x ProductConditionEnum_ProductCondition) String() string { + return proto.EnumName(ProductConditionEnum_ProductCondition_name, int32(x)) +} +func (ProductConditionEnum_ProductCondition) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_product_condition_fad86b4083272fb9, []int{0, 0} +} + +// Condition of a product offer. +type ProductConditionEnum struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ProductConditionEnum) Reset() { *m = ProductConditionEnum{} } +func (m *ProductConditionEnum) String() string { return proto.CompactTextString(m) } +func (*ProductConditionEnum) ProtoMessage() {} +func (*ProductConditionEnum) Descriptor() ([]byte, []int) { + return fileDescriptor_product_condition_fad86b4083272fb9, []int{0} +} +func (m *ProductConditionEnum) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ProductConditionEnum.Unmarshal(m, b) +} +func (m *ProductConditionEnum) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ProductConditionEnum.Marshal(b, m, deterministic) +} +func (dst *ProductConditionEnum) XXX_Merge(src proto.Message) { + xxx_messageInfo_ProductConditionEnum.Merge(dst, src) +} +func (m *ProductConditionEnum) XXX_Size() int { + return xxx_messageInfo_ProductConditionEnum.Size(m) +} +func (m *ProductConditionEnum) XXX_DiscardUnknown() { + xxx_messageInfo_ProductConditionEnum.DiscardUnknown(m) +} + +var xxx_messageInfo_ProductConditionEnum proto.InternalMessageInfo + +func init() { + proto.RegisterType((*ProductConditionEnum)(nil), "google.ads.googleads.v1.enums.ProductConditionEnum") + proto.RegisterEnum("google.ads.googleads.v1.enums.ProductConditionEnum_ProductCondition", ProductConditionEnum_ProductCondition_name, ProductConditionEnum_ProductCondition_value) +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/enums/product_condition.proto", fileDescriptor_product_condition_fad86b4083272fb9) +} + +var fileDescriptor_product_condition_fad86b4083272fb9 = []byte{ + // 308 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x7c, 0x50, 0xcf, 0x4a, 0xc3, 0x30, + 0x18, 0x77, 0x7f, 0x74, 0x92, 0x1d, 0x0c, 0x45, 0x2f, 0xe2, 0x0e, 0xdb, 0x03, 0x24, 0x14, 0xf1, + 0x12, 0x4f, 0xed, 0x96, 0xcd, 0x21, 0xd4, 0xb2, 0xd9, 0x0d, 0xa4, 0x20, 0x75, 0x19, 0xa1, 0xd0, + 0x26, 0xa5, 0x69, 0xf7, 0x40, 0x1e, 0x7d, 0x14, 0x1f, 0x65, 0x4f, 0x21, 0x49, 0x6c, 0x0f, 0x03, + 0xbd, 0x84, 0x1f, 0xdf, 0xef, 0x4f, 0x7e, 0xdf, 0x07, 0x1e, 0xb8, 0x94, 0x3c, 0xdb, 0xe3, 0x84, + 0x29, 0x6c, 0xa1, 0x46, 0x07, 0x17, 0xef, 0x45, 0x9d, 0x2b, 0x5c, 0x94, 0x92, 0xd5, 0xbb, 0xea, + 0x7d, 0x27, 0x05, 0x4b, 0xab, 0x54, 0x0a, 0x54, 0x94, 0xb2, 0x92, 0xce, 0xc8, 0x6a, 0x51, 0xc2, + 0x14, 0x6a, 0x6d, 0xe8, 0xe0, 0x22, 0x63, 0xbb, 0xbd, 0x6b, 0x52, 0x8b, 0x14, 0x27, 0x42, 0xc8, + 0x2a, 0xd1, 0x5e, 0x65, 0xcd, 0x93, 0x0c, 0x5c, 0x87, 0x36, 0x77, 0xda, 0xc4, 0x52, 0x51, 0xe7, + 0x93, 0x57, 0x00, 0x4f, 0xe7, 0xce, 0x15, 0x18, 0x46, 0xc1, 0x3a, 0xa4, 0xd3, 0xe5, 0x7c, 0x49, + 0x67, 0xf0, 0xcc, 0x19, 0x82, 0x41, 0x14, 0x3c, 0x07, 0x2f, 0xdb, 0x00, 0x76, 0x9c, 0x01, 0xe8, + 0x05, 0x74, 0x0b, 0x7b, 0x5a, 0xb6, 0xa2, 0xf3, 0x68, 0xe5, 0x2f, 0xd7, 0x4f, 0x74, 0x06, 0xfb, + 0xce, 0x25, 0xe8, 0x47, 0x6b, 0x3a, 0x83, 0xe7, 0xfe, 0xb1, 0x03, 0xc6, 0x3b, 0x99, 0xa3, 0x7f, + 0x1b, 0xfb, 0x37, 0xa7, 0x3f, 0x87, 0xba, 0x6a, 0xd8, 0x79, 0xf3, 0x7f, 0x7d, 0x5c, 0x66, 0x89, + 0xe0, 0x48, 0x96, 0x1c, 0xf3, 0xbd, 0x30, 0x8b, 0x34, 0x07, 0x2b, 0x52, 0xf5, 0xc7, 0xfd, 0x1e, + 0xcd, 0xfb, 0xd9, 0xed, 0x2d, 0x3c, 0xef, 0xab, 0x3b, 0x5a, 0xd8, 0x28, 0x8f, 0x29, 0x64, 0xa1, + 0x46, 0x1b, 0x17, 0xe9, 0xed, 0xd5, 0x77, 0xc3, 0xc7, 0x1e, 0x53, 0x71, 0xcb, 0xc7, 0x1b, 0x37, + 0x36, 0xfc, 0xb1, 0x3b, 0xb6, 0x43, 0x42, 0x3c, 0xa6, 0x08, 0x69, 0x15, 0x84, 0x6c, 0x5c, 0x42, + 0x8c, 0xe6, 0xe3, 0xc2, 0x14, 0xbb, 0xff, 0x09, 0x00, 0x00, 0xff, 0xff, 0x12, 0x28, 0x89, 0xbc, + 0xd7, 0x01, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/product_type_level.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/product_type_level.pb.go new file mode 100644 index 0000000..cf4d490 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/product_type_level.pb.go @@ -0,0 +1,130 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/enums/product_type_level.proto + +package enums // import "google.golang.org/genproto/googleapis/ads/googleads/v1/enums" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Enum describing the level of the type of a product offer. +type ProductTypeLevelEnum_ProductTypeLevel int32 + +const ( + // Not specified. + ProductTypeLevelEnum_UNSPECIFIED ProductTypeLevelEnum_ProductTypeLevel = 0 + // Used for return value only. Represents value unknown in this version. + ProductTypeLevelEnum_UNKNOWN ProductTypeLevelEnum_ProductTypeLevel = 1 + // Level 1. + ProductTypeLevelEnum_LEVEL1 ProductTypeLevelEnum_ProductTypeLevel = 7 + // Level 2. + ProductTypeLevelEnum_LEVEL2 ProductTypeLevelEnum_ProductTypeLevel = 8 + // Level 3. + ProductTypeLevelEnum_LEVEL3 ProductTypeLevelEnum_ProductTypeLevel = 9 + // Level 4. + ProductTypeLevelEnum_LEVEL4 ProductTypeLevelEnum_ProductTypeLevel = 10 + // Level 5. + ProductTypeLevelEnum_LEVEL5 ProductTypeLevelEnum_ProductTypeLevel = 11 +) + +var ProductTypeLevelEnum_ProductTypeLevel_name = map[int32]string{ + 0: "UNSPECIFIED", + 1: "UNKNOWN", + 7: "LEVEL1", + 8: "LEVEL2", + 9: "LEVEL3", + 10: "LEVEL4", + 11: "LEVEL5", +} +var ProductTypeLevelEnum_ProductTypeLevel_value = map[string]int32{ + "UNSPECIFIED": 0, + "UNKNOWN": 1, + "LEVEL1": 7, + "LEVEL2": 8, + "LEVEL3": 9, + "LEVEL4": 10, + "LEVEL5": 11, +} + +func (x ProductTypeLevelEnum_ProductTypeLevel) String() string { + return proto.EnumName(ProductTypeLevelEnum_ProductTypeLevel_name, int32(x)) +} +func (ProductTypeLevelEnum_ProductTypeLevel) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_product_type_level_f0ec3feab93e994a, []int{0, 0} +} + +// Level of the type of a product offer. +type ProductTypeLevelEnum struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ProductTypeLevelEnum) Reset() { *m = ProductTypeLevelEnum{} } +func (m *ProductTypeLevelEnum) String() string { return proto.CompactTextString(m) } +func (*ProductTypeLevelEnum) ProtoMessage() {} +func (*ProductTypeLevelEnum) Descriptor() ([]byte, []int) { + return fileDescriptor_product_type_level_f0ec3feab93e994a, []int{0} +} +func (m *ProductTypeLevelEnum) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ProductTypeLevelEnum.Unmarshal(m, b) +} +func (m *ProductTypeLevelEnum) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ProductTypeLevelEnum.Marshal(b, m, deterministic) +} +func (dst *ProductTypeLevelEnum) XXX_Merge(src proto.Message) { + xxx_messageInfo_ProductTypeLevelEnum.Merge(dst, src) +} +func (m *ProductTypeLevelEnum) XXX_Size() int { + return xxx_messageInfo_ProductTypeLevelEnum.Size(m) +} +func (m *ProductTypeLevelEnum) XXX_DiscardUnknown() { + xxx_messageInfo_ProductTypeLevelEnum.DiscardUnknown(m) +} + +var xxx_messageInfo_ProductTypeLevelEnum proto.InternalMessageInfo + +func init() { + proto.RegisterType((*ProductTypeLevelEnum)(nil), "google.ads.googleads.v1.enums.ProductTypeLevelEnum") + proto.RegisterEnum("google.ads.googleads.v1.enums.ProductTypeLevelEnum_ProductTypeLevel", ProductTypeLevelEnum_ProductTypeLevel_name, ProductTypeLevelEnum_ProductTypeLevel_value) +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/enums/product_type_level.proto", fileDescriptor_product_type_level_f0ec3feab93e994a) +} + +var fileDescriptor_product_type_level_f0ec3feab93e994a = []byte{ + // 315 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x7c, 0x90, 0x41, 0x4a, 0x03, 0x31, + 0x18, 0x85, 0x9d, 0x0a, 0xad, 0xa6, 0x0b, 0xc3, 0xa0, 0x1b, 0xb1, 0x8b, 0xf6, 0x00, 0x09, 0xb1, + 0xea, 0x22, 0xae, 0xa6, 0x3a, 0x96, 0x62, 0x19, 0x07, 0xb4, 0x23, 0xc8, 0x40, 0x19, 0x9b, 0x10, + 0x0a, 0xd3, 0x24, 0x34, 0xd3, 0x42, 0xf7, 0x9e, 0xc4, 0xa5, 0x47, 0xf1, 0x28, 0x3d, 0x85, 0x4c, + 0x62, 0xb3, 0x28, 0xe8, 0x26, 0x7c, 0xe4, 0xff, 0xdf, 0xe3, 0x7f, 0x0f, 0xdc, 0x08, 0xa5, 0x44, + 0xc9, 0x71, 0xc1, 0x0c, 0x76, 0x58, 0xd3, 0x9a, 0x60, 0x2e, 0x57, 0x0b, 0x83, 0xf5, 0x52, 0xb1, + 0xd5, 0xac, 0x9a, 0x56, 0x1b, 0xcd, 0xa7, 0x25, 0x5f, 0xf3, 0x12, 0xe9, 0xa5, 0xaa, 0x54, 0xd8, + 0x71, 0xcb, 0xa8, 0x60, 0x06, 0x79, 0x1d, 0x5a, 0x13, 0x64, 0x75, 0xe7, 0x17, 0x3b, 0x5b, 0x3d, + 0xc7, 0x85, 0x94, 0xaa, 0x2a, 0xaa, 0xb9, 0x92, 0xc6, 0x89, 0x7b, 0x1f, 0x01, 0x38, 0x4d, 0x9d, + 0xf3, 0xcb, 0x46, 0xf3, 0x71, 0xed, 0x1b, 0xcb, 0xd5, 0xa2, 0x57, 0x02, 0xb8, 0xff, 0x1f, 0x9e, + 0x80, 0xf6, 0x24, 0x79, 0x4e, 0xe3, 0xbb, 0xd1, 0xc3, 0x28, 0xbe, 0x87, 0x07, 0x61, 0x1b, 0xb4, + 0x26, 0xc9, 0x63, 0xf2, 0xf4, 0x9a, 0xc0, 0x20, 0x04, 0xa0, 0x39, 0x8e, 0xb3, 0x78, 0x4c, 0x60, + 0xcb, 0xf3, 0x25, 0x3c, 0xf2, 0xdc, 0x87, 0xc7, 0x9e, 0xaf, 0x20, 0xf0, 0x7c, 0x0d, 0xdb, 0x83, + 0x6d, 0x00, 0xba, 0x33, 0xb5, 0x40, 0xff, 0x46, 0x19, 0x9c, 0xed, 0x5f, 0x94, 0xd6, 0x19, 0xd2, + 0xe0, 0x6d, 0xf0, 0xab, 0x13, 0xaa, 0x2c, 0xa4, 0x40, 0x6a, 0x29, 0xb0, 0xe0, 0xd2, 0x26, 0xdc, + 0x55, 0xa9, 0xe7, 0xe6, 0x8f, 0x66, 0x6f, 0xed, 0xfb, 0xd9, 0x38, 0x1c, 0x46, 0xd1, 0x57, 0xa3, + 0x33, 0x74, 0x56, 0x11, 0x33, 0xc8, 0x61, 0x4d, 0x19, 0x41, 0x75, 0x2b, 0xe6, 0x7b, 0x37, 0xcf, + 0x23, 0x66, 0x72, 0x3f, 0xcf, 0x33, 0x92, 0xdb, 0xf9, 0xb6, 0xd1, 0x75, 0x9f, 0x94, 0x46, 0xcc, + 0x50, 0xea, 0x37, 0x28, 0xcd, 0x08, 0xa5, 0x76, 0xe7, 0xbd, 0x69, 0x0f, 0xeb, 0xff, 0x04, 0x00, + 0x00, 0xff, 0xff, 0xe0, 0xe7, 0xfa, 0x72, 0xf1, 0x01, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/promotion_extension_discount_modifier.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/promotion_extension_discount_modifier.pb.go new file mode 100644 index 0000000..c658f25 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/promotion_extension_discount_modifier.pb.go @@ -0,0 +1,117 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/enums/promotion_extension_discount_modifier.proto + +package enums // import "google.golang.org/genproto/googleapis/ads/googleads/v1/enums" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// A promotion extension discount modifier. +type PromotionExtensionDiscountModifierEnum_PromotionExtensionDiscountModifier int32 + +const ( + // Not specified. + PromotionExtensionDiscountModifierEnum_UNSPECIFIED PromotionExtensionDiscountModifierEnum_PromotionExtensionDiscountModifier = 0 + // Used for return value only. Represents value unknown in this version. + PromotionExtensionDiscountModifierEnum_UNKNOWN PromotionExtensionDiscountModifierEnum_PromotionExtensionDiscountModifier = 1 + // 'Up to'. + PromotionExtensionDiscountModifierEnum_UP_TO PromotionExtensionDiscountModifierEnum_PromotionExtensionDiscountModifier = 2 +) + +var PromotionExtensionDiscountModifierEnum_PromotionExtensionDiscountModifier_name = map[int32]string{ + 0: "UNSPECIFIED", + 1: "UNKNOWN", + 2: "UP_TO", +} +var PromotionExtensionDiscountModifierEnum_PromotionExtensionDiscountModifier_value = map[string]int32{ + "UNSPECIFIED": 0, + "UNKNOWN": 1, + "UP_TO": 2, +} + +func (x PromotionExtensionDiscountModifierEnum_PromotionExtensionDiscountModifier) String() string { + return proto.EnumName(PromotionExtensionDiscountModifierEnum_PromotionExtensionDiscountModifier_name, int32(x)) +} +func (PromotionExtensionDiscountModifierEnum_PromotionExtensionDiscountModifier) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_promotion_extension_discount_modifier_13658de280b60b6c, []int{0, 0} +} + +// Container for enum describing possible a promotion extension +// discount modifier. +type PromotionExtensionDiscountModifierEnum struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *PromotionExtensionDiscountModifierEnum) Reset() { + *m = PromotionExtensionDiscountModifierEnum{} +} +func (m *PromotionExtensionDiscountModifierEnum) String() string { return proto.CompactTextString(m) } +func (*PromotionExtensionDiscountModifierEnum) ProtoMessage() {} +func (*PromotionExtensionDiscountModifierEnum) Descriptor() ([]byte, []int) { + return fileDescriptor_promotion_extension_discount_modifier_13658de280b60b6c, []int{0} +} +func (m *PromotionExtensionDiscountModifierEnum) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_PromotionExtensionDiscountModifierEnum.Unmarshal(m, b) +} +func (m *PromotionExtensionDiscountModifierEnum) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_PromotionExtensionDiscountModifierEnum.Marshal(b, m, deterministic) +} +func (dst *PromotionExtensionDiscountModifierEnum) XXX_Merge(src proto.Message) { + xxx_messageInfo_PromotionExtensionDiscountModifierEnum.Merge(dst, src) +} +func (m *PromotionExtensionDiscountModifierEnum) XXX_Size() int { + return xxx_messageInfo_PromotionExtensionDiscountModifierEnum.Size(m) +} +func (m *PromotionExtensionDiscountModifierEnum) XXX_DiscardUnknown() { + xxx_messageInfo_PromotionExtensionDiscountModifierEnum.DiscardUnknown(m) +} + +var xxx_messageInfo_PromotionExtensionDiscountModifierEnum proto.InternalMessageInfo + +func init() { + proto.RegisterType((*PromotionExtensionDiscountModifierEnum)(nil), "google.ads.googleads.v1.enums.PromotionExtensionDiscountModifierEnum") + proto.RegisterEnum("google.ads.googleads.v1.enums.PromotionExtensionDiscountModifierEnum_PromotionExtensionDiscountModifier", PromotionExtensionDiscountModifierEnum_PromotionExtensionDiscountModifier_name, PromotionExtensionDiscountModifierEnum_PromotionExtensionDiscountModifier_value) +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/enums/promotion_extension_discount_modifier.proto", fileDescriptor_promotion_extension_discount_modifier_13658de280b60b6c) +} + +var fileDescriptor_promotion_extension_discount_modifier_13658de280b60b6c = []byte{ + // 311 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x84, 0x90, 0xb1, 0x4e, 0xc3, 0x30, + 0x10, 0x86, 0x69, 0x10, 0x20, 0xdc, 0x81, 0x2a, 0x23, 0xa2, 0x43, 0x3b, 0xc0, 0x66, 0x2b, 0x62, + 0x33, 0x53, 0x4a, 0x43, 0x55, 0xa1, 0xa6, 0x91, 0xa0, 0x45, 0x42, 0x91, 0xaa, 0x50, 0x1b, 0xcb, + 0x52, 0xe3, 0x8b, 0xe2, 0xb4, 0xf0, 0x3c, 0x8c, 0x3c, 0x0a, 0x8f, 0xc2, 0x33, 0x30, 0x20, 0xdb, + 0x49, 0x36, 0xe8, 0x62, 0xfd, 0xf2, 0xdd, 0xfd, 0xdf, 0xdd, 0x8f, 0xa6, 0x02, 0x40, 0x6c, 0x38, + 0xc9, 0x98, 0x26, 0x4e, 0x1a, 0xb5, 0x0b, 0x08, 0x57, 0xdb, 0x5c, 0x93, 0xa2, 0x84, 0x1c, 0x2a, + 0x09, 0x6a, 0xc5, 0xdf, 0x2b, 0xae, 0xb4, 0x51, 0x4c, 0xea, 0x35, 0x6c, 0x55, 0xb5, 0xca, 0x81, + 0xc9, 0x57, 0xc9, 0x4b, 0x5c, 0x94, 0x50, 0x81, 0xdf, 0x77, 0xf3, 0x38, 0x63, 0x1a, 0xb7, 0x56, + 0x78, 0x17, 0x60, 0x6b, 0x75, 0x7e, 0xd1, 0x90, 0x0a, 0x49, 0x32, 0xa5, 0xa0, 0xca, 0x8c, 0xaf, + 0x76, 0xc3, 0xc3, 0x37, 0x74, 0x99, 0x34, 0xac, 0xa8, 0x41, 0x8d, 0x6b, 0xd2, 0xac, 0x06, 0x45, + 0x6a, 0x9b, 0x0f, 0x67, 0x68, 0xb8, 0xbf, 0xd3, 0x3f, 0x43, 0xdd, 0x45, 0xfc, 0x90, 0x44, 0xb7, + 0xd3, 0xbb, 0x69, 0x34, 0xee, 0x1d, 0xf8, 0x5d, 0x74, 0xb2, 0x88, 0xef, 0xe3, 0xf9, 0x53, 0xdc, + 0xeb, 0xf8, 0xa7, 0xe8, 0x68, 0x91, 0xac, 0x1e, 0xe7, 0x3d, 0x6f, 0xf4, 0xd3, 0x41, 0x83, 0x35, + 0xe4, 0xf8, 0xdf, 0xe5, 0x47, 0x57, 0xfb, 0x91, 0x89, 0xb9, 0x23, 0xe9, 0x3c, 0x8f, 0x6a, 0x27, + 0x01, 0x9b, 0x4c, 0x09, 0x0c, 0xa5, 0x20, 0x82, 0x2b, 0x7b, 0x65, 0x93, 0x70, 0x21, 0xf5, 0x1f, + 0x81, 0xdf, 0xd8, 0xf7, 0xc3, 0x3b, 0x9c, 0x84, 0xe1, 0xa7, 0xd7, 0x9f, 0x38, 0xab, 0x90, 0x69, + 0xec, 0xa4, 0x51, 0xcb, 0x00, 0x9b, 0x20, 0xf4, 0x57, 0x53, 0x4f, 0x43, 0xa6, 0xd3, 0xb6, 0x9e, + 0x2e, 0x83, 0xd4, 0xd6, 0xbf, 0xbd, 0x81, 0xfb, 0xa4, 0x34, 0x64, 0x9a, 0xd2, 0xb6, 0x83, 0xd2, + 0x65, 0x40, 0xa9, 0xed, 0x79, 0x39, 0xb6, 0x8b, 0x5d, 0xff, 0x06, 0x00, 0x00, 0xff, 0xff, 0xe4, + 0x06, 0xad, 0xad, 0x08, 0x02, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/promotion_extension_occasion.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/promotion_extension_occasion.pb.go new file mode 100644 index 0000000..f88b41b --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/promotion_extension_occasion.pb.go @@ -0,0 +1,282 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/enums/promotion_extension_occasion.proto + +package enums // import "google.golang.org/genproto/googleapis/ads/googleads/v1/enums" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// A promotion extension occasion. +type PromotionExtensionOccasionEnum_PromotionExtensionOccasion int32 + +const ( + // Not specified. + PromotionExtensionOccasionEnum_UNSPECIFIED PromotionExtensionOccasionEnum_PromotionExtensionOccasion = 0 + // Used for return value only. Represents value unknown in this version. + PromotionExtensionOccasionEnum_UNKNOWN PromotionExtensionOccasionEnum_PromotionExtensionOccasion = 1 + // New Year's. + PromotionExtensionOccasionEnum_NEW_YEARS PromotionExtensionOccasionEnum_PromotionExtensionOccasion = 2 + // Chinese New Year. + PromotionExtensionOccasionEnum_CHINESE_NEW_YEAR PromotionExtensionOccasionEnum_PromotionExtensionOccasion = 3 + // Valentine's Day. + PromotionExtensionOccasionEnum_VALENTINES_DAY PromotionExtensionOccasionEnum_PromotionExtensionOccasion = 4 + // Easter. + PromotionExtensionOccasionEnum_EASTER PromotionExtensionOccasionEnum_PromotionExtensionOccasion = 5 + // Mother's Day. + PromotionExtensionOccasionEnum_MOTHERS_DAY PromotionExtensionOccasionEnum_PromotionExtensionOccasion = 6 + // Father's Day. + PromotionExtensionOccasionEnum_FATHERS_DAY PromotionExtensionOccasionEnum_PromotionExtensionOccasion = 7 + // Labor Day. + PromotionExtensionOccasionEnum_LABOR_DAY PromotionExtensionOccasionEnum_PromotionExtensionOccasion = 8 + // Back To School. + PromotionExtensionOccasionEnum_BACK_TO_SCHOOL PromotionExtensionOccasionEnum_PromotionExtensionOccasion = 9 + // Halloween. + PromotionExtensionOccasionEnum_HALLOWEEN PromotionExtensionOccasionEnum_PromotionExtensionOccasion = 10 + // Black Friday. + PromotionExtensionOccasionEnum_BLACK_FRIDAY PromotionExtensionOccasionEnum_PromotionExtensionOccasion = 11 + // Cyber Monday. + PromotionExtensionOccasionEnum_CYBER_MONDAY PromotionExtensionOccasionEnum_PromotionExtensionOccasion = 12 + // Christmas. + PromotionExtensionOccasionEnum_CHRISTMAS PromotionExtensionOccasionEnum_PromotionExtensionOccasion = 13 + // Boxing Day. + PromotionExtensionOccasionEnum_BOXING_DAY PromotionExtensionOccasionEnum_PromotionExtensionOccasion = 14 + // Independence Day in any country. + PromotionExtensionOccasionEnum_INDEPENDENCE_DAY PromotionExtensionOccasionEnum_PromotionExtensionOccasion = 15 + // National Day in any country. + PromotionExtensionOccasionEnum_NATIONAL_DAY PromotionExtensionOccasionEnum_PromotionExtensionOccasion = 16 + // End of any season. + PromotionExtensionOccasionEnum_END_OF_SEASON PromotionExtensionOccasionEnum_PromotionExtensionOccasion = 17 + // Winter Sale. + PromotionExtensionOccasionEnum_WINTER_SALE PromotionExtensionOccasionEnum_PromotionExtensionOccasion = 18 + // Summer sale. + PromotionExtensionOccasionEnum_SUMMER_SALE PromotionExtensionOccasionEnum_PromotionExtensionOccasion = 19 + // Fall Sale. + PromotionExtensionOccasionEnum_FALL_SALE PromotionExtensionOccasionEnum_PromotionExtensionOccasion = 20 + // Spring Sale. + PromotionExtensionOccasionEnum_SPRING_SALE PromotionExtensionOccasionEnum_PromotionExtensionOccasion = 21 + // Ramadan. + PromotionExtensionOccasionEnum_RAMADAN PromotionExtensionOccasionEnum_PromotionExtensionOccasion = 22 + // Eid al-Fitr. + PromotionExtensionOccasionEnum_EID_AL_FITR PromotionExtensionOccasionEnum_PromotionExtensionOccasion = 23 + // Eid al-Adha. + PromotionExtensionOccasionEnum_EID_AL_ADHA PromotionExtensionOccasionEnum_PromotionExtensionOccasion = 24 + // Singles Day. + PromotionExtensionOccasionEnum_SINGLES_DAY PromotionExtensionOccasionEnum_PromotionExtensionOccasion = 25 + // Women's Day. + PromotionExtensionOccasionEnum_WOMENS_DAY PromotionExtensionOccasionEnum_PromotionExtensionOccasion = 26 + // Holi. + PromotionExtensionOccasionEnum_HOLI PromotionExtensionOccasionEnum_PromotionExtensionOccasion = 27 + // Parent's Day. + PromotionExtensionOccasionEnum_PARENTS_DAY PromotionExtensionOccasionEnum_PromotionExtensionOccasion = 28 + // St. Nicholas Day. + PromotionExtensionOccasionEnum_ST_NICHOLAS_DAY PromotionExtensionOccasionEnum_PromotionExtensionOccasion = 29 + // Carnival. + PromotionExtensionOccasionEnum_CARNIVAL PromotionExtensionOccasionEnum_PromotionExtensionOccasion = 30 + // Epiphany, also known as Three Kings' Day. + PromotionExtensionOccasionEnum_EPIPHANY PromotionExtensionOccasionEnum_PromotionExtensionOccasion = 31 + // Rosh Hashanah. + PromotionExtensionOccasionEnum_ROSH_HASHANAH PromotionExtensionOccasionEnum_PromotionExtensionOccasion = 32 + // Passover. + PromotionExtensionOccasionEnum_PASSOVER PromotionExtensionOccasionEnum_PromotionExtensionOccasion = 33 + // Hanukkah. + PromotionExtensionOccasionEnum_HANUKKAH PromotionExtensionOccasionEnum_PromotionExtensionOccasion = 34 + // Diwali. + PromotionExtensionOccasionEnum_DIWALI PromotionExtensionOccasionEnum_PromotionExtensionOccasion = 35 + // Navratri. + PromotionExtensionOccasionEnum_NAVRATRI PromotionExtensionOccasionEnum_PromotionExtensionOccasion = 36 + // Available in Thai: Songkran. + PromotionExtensionOccasionEnum_SONGKRAN PromotionExtensionOccasionEnum_PromotionExtensionOccasion = 37 + // Available in Japanese: Year-end Gift. + PromotionExtensionOccasionEnum_YEAR_END_GIFT PromotionExtensionOccasionEnum_PromotionExtensionOccasion = 38 +) + +var PromotionExtensionOccasionEnum_PromotionExtensionOccasion_name = map[int32]string{ + 0: "UNSPECIFIED", + 1: "UNKNOWN", + 2: "NEW_YEARS", + 3: "CHINESE_NEW_YEAR", + 4: "VALENTINES_DAY", + 5: "EASTER", + 6: "MOTHERS_DAY", + 7: "FATHERS_DAY", + 8: "LABOR_DAY", + 9: "BACK_TO_SCHOOL", + 10: "HALLOWEEN", + 11: "BLACK_FRIDAY", + 12: "CYBER_MONDAY", + 13: "CHRISTMAS", + 14: "BOXING_DAY", + 15: "INDEPENDENCE_DAY", + 16: "NATIONAL_DAY", + 17: "END_OF_SEASON", + 18: "WINTER_SALE", + 19: "SUMMER_SALE", + 20: "FALL_SALE", + 21: "SPRING_SALE", + 22: "RAMADAN", + 23: "EID_AL_FITR", + 24: "EID_AL_ADHA", + 25: "SINGLES_DAY", + 26: "WOMENS_DAY", + 27: "HOLI", + 28: "PARENTS_DAY", + 29: "ST_NICHOLAS_DAY", + 30: "CARNIVAL", + 31: "EPIPHANY", + 32: "ROSH_HASHANAH", + 33: "PASSOVER", + 34: "HANUKKAH", + 35: "DIWALI", + 36: "NAVRATRI", + 37: "SONGKRAN", + 38: "YEAR_END_GIFT", +} +var PromotionExtensionOccasionEnum_PromotionExtensionOccasion_value = map[string]int32{ + "UNSPECIFIED": 0, + "UNKNOWN": 1, + "NEW_YEARS": 2, + "CHINESE_NEW_YEAR": 3, + "VALENTINES_DAY": 4, + "EASTER": 5, + "MOTHERS_DAY": 6, + "FATHERS_DAY": 7, + "LABOR_DAY": 8, + "BACK_TO_SCHOOL": 9, + "HALLOWEEN": 10, + "BLACK_FRIDAY": 11, + "CYBER_MONDAY": 12, + "CHRISTMAS": 13, + "BOXING_DAY": 14, + "INDEPENDENCE_DAY": 15, + "NATIONAL_DAY": 16, + "END_OF_SEASON": 17, + "WINTER_SALE": 18, + "SUMMER_SALE": 19, + "FALL_SALE": 20, + "SPRING_SALE": 21, + "RAMADAN": 22, + "EID_AL_FITR": 23, + "EID_AL_ADHA": 24, + "SINGLES_DAY": 25, + "WOMENS_DAY": 26, + "HOLI": 27, + "PARENTS_DAY": 28, + "ST_NICHOLAS_DAY": 29, + "CARNIVAL": 30, + "EPIPHANY": 31, + "ROSH_HASHANAH": 32, + "PASSOVER": 33, + "HANUKKAH": 34, + "DIWALI": 35, + "NAVRATRI": 36, + "SONGKRAN": 37, + "YEAR_END_GIFT": 38, +} + +func (x PromotionExtensionOccasionEnum_PromotionExtensionOccasion) String() string { + return proto.EnumName(PromotionExtensionOccasionEnum_PromotionExtensionOccasion_name, int32(x)) +} +func (PromotionExtensionOccasionEnum_PromotionExtensionOccasion) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_promotion_extension_occasion_e0195af12e829776, []int{0, 0} +} + +// Container for enum describing a promotion extension occasion. +// For more information about the occasions please check: +// https://support.google.com/google-ads/answer/7367521 +type PromotionExtensionOccasionEnum struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *PromotionExtensionOccasionEnum) Reset() { *m = PromotionExtensionOccasionEnum{} } +func (m *PromotionExtensionOccasionEnum) String() string { return proto.CompactTextString(m) } +func (*PromotionExtensionOccasionEnum) ProtoMessage() {} +func (*PromotionExtensionOccasionEnum) Descriptor() ([]byte, []int) { + return fileDescriptor_promotion_extension_occasion_e0195af12e829776, []int{0} +} +func (m *PromotionExtensionOccasionEnum) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_PromotionExtensionOccasionEnum.Unmarshal(m, b) +} +func (m *PromotionExtensionOccasionEnum) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_PromotionExtensionOccasionEnum.Marshal(b, m, deterministic) +} +func (dst *PromotionExtensionOccasionEnum) XXX_Merge(src proto.Message) { + xxx_messageInfo_PromotionExtensionOccasionEnum.Merge(dst, src) +} +func (m *PromotionExtensionOccasionEnum) XXX_Size() int { + return xxx_messageInfo_PromotionExtensionOccasionEnum.Size(m) +} +func (m *PromotionExtensionOccasionEnum) XXX_DiscardUnknown() { + xxx_messageInfo_PromotionExtensionOccasionEnum.DiscardUnknown(m) +} + +var xxx_messageInfo_PromotionExtensionOccasionEnum proto.InternalMessageInfo + +func init() { + proto.RegisterType((*PromotionExtensionOccasionEnum)(nil), "google.ads.googleads.v1.enums.PromotionExtensionOccasionEnum") + proto.RegisterEnum("google.ads.googleads.v1.enums.PromotionExtensionOccasionEnum_PromotionExtensionOccasion", PromotionExtensionOccasionEnum_PromotionExtensionOccasion_name, PromotionExtensionOccasionEnum_PromotionExtensionOccasion_value) +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/enums/promotion_extension_occasion.proto", fileDescriptor_promotion_extension_occasion_e0195af12e829776) +} + +var fileDescriptor_promotion_extension_occasion_e0195af12e829776 = []byte{ + // 669 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x7c, 0x53, 0xcb, 0x6e, 0xdb, 0x3a, + 0x10, 0xbd, 0x71, 0xde, 0xcc, 0x8b, 0x51, 0x72, 0x5f, 0xb9, 0x79, 0xdc, 0xe4, 0x3e, 0x96, 0x32, + 0x8c, 0xee, 0xd4, 0x4d, 0x47, 0x12, 0x6d, 0x11, 0xa6, 0x87, 0x02, 0x29, 0xdb, 0x75, 0x61, 0x40, + 0x70, 0x63, 0xc3, 0x08, 0x10, 0x4b, 0x46, 0xe4, 0x04, 0xfd, 0x95, 0x6e, 0xbb, 0xec, 0x37, 0xf4, + 0x0b, 0xfa, 0x29, 0x5d, 0xf7, 0x03, 0x8a, 0x91, 0xe2, 0xb4, 0x9b, 0x74, 0x63, 0xcc, 0x9c, 0x73, + 0xe6, 0x1c, 0x93, 0xe2, 0xb0, 0x57, 0xd3, 0x3c, 0x9f, 0xde, 0x4e, 0xea, 0xa3, 0x71, 0x51, 0xaf, + 0x4a, 0xaa, 0x1e, 0x1a, 0xf5, 0x49, 0x76, 0x3f, 0x2b, 0xea, 0xf3, 0xbb, 0x7c, 0x96, 0x2f, 0x6e, + 0xf2, 0x2c, 0x9d, 0xbc, 0x5b, 0x4c, 0xb2, 0x82, 0xaa, 0xfc, 0xfa, 0x7a, 0x44, 0x85, 0x3b, 0xbf, + 0xcb, 0x17, 0xb9, 0x73, 0x56, 0x8d, 0xb9, 0xa3, 0x71, 0xe1, 0x3e, 0x39, 0xb8, 0x0f, 0x0d, 0xb7, + 0x74, 0x38, 0x39, 0x5d, 0x06, 0xcc, 0x6f, 0xea, 0xa3, 0x2c, 0xcb, 0x17, 0x23, 0xb2, 0x2b, 0xaa, + 0xe1, 0xab, 0x4f, 0xeb, 0xec, 0x3c, 0x5e, 0x66, 0x88, 0x65, 0x84, 0x7e, 0x4c, 0x10, 0xd9, 0xfd, + 0xec, 0xea, 0xfd, 0x3a, 0x3b, 0x79, 0x5e, 0xe2, 0x1c, 0xb0, 0x9d, 0x2e, 0xda, 0x58, 0x04, 0xb2, + 0x29, 0x45, 0xc8, 0x7f, 0x71, 0x76, 0xd8, 0x66, 0x17, 0xdb, 0xa8, 0xfb, 0xc8, 0x57, 0x9c, 0x3d, + 0xb6, 0x8d, 0xa2, 0x9f, 0x0e, 0x04, 0x18, 0xcb, 0x6b, 0xce, 0x31, 0xe3, 0x41, 0x24, 0x51, 0x58, + 0x91, 0x2e, 0x61, 0xbe, 0xea, 0x38, 0x6c, 0xbf, 0x07, 0x4a, 0x60, 0x42, 0x4c, 0x1a, 0xc2, 0x80, + 0xaf, 0x39, 0x8c, 0x6d, 0x08, 0xb0, 0x89, 0x30, 0x7c, 0x9d, 0x22, 0x3a, 0x3a, 0x89, 0x84, 0xa9, + 0xc8, 0x0d, 0x02, 0x9a, 0xf0, 0x1d, 0xd8, 0xa4, 0x18, 0x05, 0xbe, 0x36, 0x65, 0xbb, 0x45, 0x86, + 0x3e, 0x04, 0xed, 0x34, 0xd1, 0xa9, 0x0d, 0x22, 0xad, 0x15, 0xdf, 0x26, 0x49, 0x04, 0x4a, 0xe9, + 0xbe, 0x10, 0xc8, 0x99, 0xc3, 0xd9, 0xae, 0xaf, 0x48, 0xd3, 0x34, 0x92, 0x86, 0x76, 0x08, 0x09, + 0x06, 0xbe, 0x30, 0x69, 0x47, 0x23, 0x21, 0xbb, 0x34, 0x12, 0x44, 0x46, 0xda, 0xa4, 0x03, 0x96, + 0xef, 0x39, 0xfb, 0x8c, 0xf9, 0xfa, 0xb5, 0xc4, 0x56, 0x99, 0xb2, 0x4f, 0x87, 0x91, 0x18, 0x8a, + 0x58, 0x60, 0x28, 0x30, 0x10, 0x25, 0x7a, 0x40, 0x36, 0x08, 0x89, 0xd4, 0x08, 0xaa, 0x44, 0xb8, + 0x73, 0xc8, 0xf6, 0x04, 0x86, 0xa9, 0x6e, 0xa6, 0x56, 0x80, 0xd5, 0xc8, 0x0f, 0xe9, 0x00, 0x7d, + 0x89, 0x89, 0x30, 0xa9, 0x05, 0x25, 0xb8, 0x43, 0x80, 0xed, 0x76, 0x3a, 0x4b, 0xe0, 0x88, 0xb2, + 0x9b, 0xa0, 0x54, 0xd5, 0x1e, 0x97, 0x7c, 0x6c, 0x28, 0xbb, 0x04, 0x7e, 0xa5, 0x5b, 0x36, 0xd0, + 0x81, 0x10, 0x90, 0xff, 0x46, 0xac, 0x90, 0x61, 0x0a, 0x2a, 0x6d, 0xca, 0xc4, 0xf0, 0xdf, 0x7f, + 0x00, 0x20, 0x8c, 0x80, 0xff, 0x51, 0xce, 0x4b, 0x6c, 0xa9, 0xc7, 0xfb, 0xfd, 0x93, 0x0e, 0xd3, + 0xd7, 0x1d, 0x81, 0x55, 0x7f, 0xe2, 0x6c, 0xb1, 0xb5, 0x48, 0x2b, 0xc9, 0xff, 0x22, 0x69, 0x0c, + 0x46, 0x60, 0x52, 0x51, 0xa7, 0xce, 0x11, 0x3b, 0xb0, 0x49, 0x8a, 0x32, 0x88, 0xb4, 0x82, 0x0a, + 0x3c, 0x73, 0x76, 0xd9, 0x56, 0x00, 0x06, 0x65, 0x0f, 0x14, 0x3f, 0xa7, 0x4e, 0xc4, 0x32, 0x8e, + 0x00, 0x07, 0xfc, 0x82, 0x0e, 0x6c, 0xb4, 0x8d, 0xd2, 0x08, 0x6c, 0x04, 0x08, 0x11, 0xff, 0x9b, + 0x04, 0x31, 0x58, 0xab, 0x7b, 0xc2, 0xf0, 0x4b, 0xea, 0x22, 0xc0, 0x6e, 0xbb, 0x0d, 0x11, 0xbf, + 0xa2, 0x4f, 0x1d, 0xca, 0x3e, 0x28, 0xc9, 0xff, 0x21, 0x06, 0xa1, 0x67, 0x20, 0x31, 0x92, 0xff, + 0x4b, 0x9d, 0xd5, 0xd8, 0x6a, 0x1b, 0x40, 0xfe, 0x1f, 0xd9, 0xd2, 0x83, 0x49, 0xe9, 0x32, 0x5b, + 0xb2, 0x99, 0xf0, 0xff, 0xfd, 0xaf, 0x2b, 0xec, 0xf2, 0x3a, 0x9f, 0xb9, 0x3f, 0x5d, 0x01, 0xff, + 0xe2, 0xf9, 0xe7, 0x1b, 0xd3, 0x16, 0xc4, 0x2b, 0x6f, 0xfc, 0x47, 0x87, 0x69, 0x7e, 0x3b, 0xca, + 0xa6, 0x6e, 0x7e, 0x37, 0xad, 0x4f, 0x27, 0x59, 0xb9, 0x23, 0xcb, 0xb5, 0x9c, 0xdf, 0x14, 0xcf, + 0x6c, 0xe9, 0xcb, 0xf2, 0xf7, 0x43, 0x6d, 0xb5, 0x05, 0xf0, 0xb1, 0x76, 0xd6, 0xaa, 0xac, 0x60, + 0x5c, 0xb8, 0x55, 0x49, 0x55, 0xaf, 0xe1, 0xd2, 0x36, 0x15, 0x9f, 0x97, 0xfc, 0x10, 0xc6, 0xc5, + 0xf0, 0x89, 0x1f, 0xf6, 0x1a, 0xc3, 0x92, 0xff, 0x52, 0xbb, 0xac, 0x40, 0xcf, 0x83, 0x71, 0xe1, + 0x79, 0x4f, 0x0a, 0xcf, 0xeb, 0x35, 0x3c, 0xaf, 0xd4, 0xbc, 0xdd, 0x28, 0xff, 0xd8, 0x8b, 0x6f, + 0x01, 0x00, 0x00, 0xff, 0xff, 0x3b, 0xb8, 0x9b, 0x12, 0x3d, 0x04, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/promotion_placeholder_field.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/promotion_placeholder_field.pb.go new file mode 100644 index 0000000..c2041a8 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/promotion_placeholder_field.pb.go @@ -0,0 +1,185 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/enums/promotion_placeholder_field.proto + +package enums // import "google.golang.org/genproto/googleapis/ads/googleads/v1/enums" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Possible values for Promotion placeholder fields. +type PromotionPlaceholderFieldEnum_PromotionPlaceholderField int32 + +const ( + // Not specified. + PromotionPlaceholderFieldEnum_UNSPECIFIED PromotionPlaceholderFieldEnum_PromotionPlaceholderField = 0 + // Used for return value only. Represents value unknown in this version. + PromotionPlaceholderFieldEnum_UNKNOWN PromotionPlaceholderFieldEnum_PromotionPlaceholderField = 1 + // Data Type: STRING. The text that appears on the ad when the extension is + // shown. + PromotionPlaceholderFieldEnum_PROMOTION_TARGET PromotionPlaceholderFieldEnum_PromotionPlaceholderField = 2 + // Data Type: STRING. Allows you to add "up to" phrase to the promotion, + // in case you have variable promotion rates. + PromotionPlaceholderFieldEnum_DISCOUNT_MODIFIER PromotionPlaceholderFieldEnum_PromotionPlaceholderField = 3 + // Data Type: INT64. Takes a value in micros, where 1 million micros + // represents 1%, and is shown as a percentage when rendered. + PromotionPlaceholderFieldEnum_PERCENT_OFF PromotionPlaceholderFieldEnum_PromotionPlaceholderField = 4 + // Data Type: MONEY. Requires a currency and an amount of money. + PromotionPlaceholderFieldEnum_MONEY_AMOUNT_OFF PromotionPlaceholderFieldEnum_PromotionPlaceholderField = 5 + // Data Type: STRING. A string that the user enters to get the discount. + PromotionPlaceholderFieldEnum_PROMOTION_CODE PromotionPlaceholderFieldEnum_PromotionPlaceholderField = 6 + // Data Type: MONEY. A minimum spend before the user qualifies for the + // promotion. + PromotionPlaceholderFieldEnum_ORDERS_OVER_AMOUNT PromotionPlaceholderFieldEnum_PromotionPlaceholderField = 7 + // Data Type: DATE. The start date of the promotion. + PromotionPlaceholderFieldEnum_PROMOTION_START PromotionPlaceholderFieldEnum_PromotionPlaceholderField = 8 + // Data Type: DATE. The end date of the promotion. + PromotionPlaceholderFieldEnum_PROMOTION_END PromotionPlaceholderFieldEnum_PromotionPlaceholderField = 9 + // Data Type: STRING. Describes the associated event for the promotion using + // one of the PromotionExtensionOccasion enum values, for example NEW_YEARS. + PromotionPlaceholderFieldEnum_OCCASION PromotionPlaceholderFieldEnum_PromotionPlaceholderField = 10 + // Data Type: URL_LIST. Final URLs to be used in the ad when using Upgraded + // URLs. + PromotionPlaceholderFieldEnum_FINAL_URLS PromotionPlaceholderFieldEnum_PromotionPlaceholderField = 11 + // Data Type: URL_LIST. Final mobile URLs for the ad when using Upgraded + // URLs. + PromotionPlaceholderFieldEnum_FINAL_MOBILE_URLS PromotionPlaceholderFieldEnum_PromotionPlaceholderField = 12 + // Data Type: URL. Tracking template for the ad when using Upgraded URLs. + PromotionPlaceholderFieldEnum_TRACKING_URL PromotionPlaceholderFieldEnum_PromotionPlaceholderField = 13 + // Data Type: STRING. A string represented by a language code for the + // promotion. + PromotionPlaceholderFieldEnum_LANGUAGE PromotionPlaceholderFieldEnum_PromotionPlaceholderField = 14 + // Data Type: STRING. Final URL suffix for the ad when using parallel + // tracking. + PromotionPlaceholderFieldEnum_FINAL_URL_SUFFIX PromotionPlaceholderFieldEnum_PromotionPlaceholderField = 15 +) + +var PromotionPlaceholderFieldEnum_PromotionPlaceholderField_name = map[int32]string{ + 0: "UNSPECIFIED", + 1: "UNKNOWN", + 2: "PROMOTION_TARGET", + 3: "DISCOUNT_MODIFIER", + 4: "PERCENT_OFF", + 5: "MONEY_AMOUNT_OFF", + 6: "PROMOTION_CODE", + 7: "ORDERS_OVER_AMOUNT", + 8: "PROMOTION_START", + 9: "PROMOTION_END", + 10: "OCCASION", + 11: "FINAL_URLS", + 12: "FINAL_MOBILE_URLS", + 13: "TRACKING_URL", + 14: "LANGUAGE", + 15: "FINAL_URL_SUFFIX", +} +var PromotionPlaceholderFieldEnum_PromotionPlaceholderField_value = map[string]int32{ + "UNSPECIFIED": 0, + "UNKNOWN": 1, + "PROMOTION_TARGET": 2, + "DISCOUNT_MODIFIER": 3, + "PERCENT_OFF": 4, + "MONEY_AMOUNT_OFF": 5, + "PROMOTION_CODE": 6, + "ORDERS_OVER_AMOUNT": 7, + "PROMOTION_START": 8, + "PROMOTION_END": 9, + "OCCASION": 10, + "FINAL_URLS": 11, + "FINAL_MOBILE_URLS": 12, + "TRACKING_URL": 13, + "LANGUAGE": 14, + "FINAL_URL_SUFFIX": 15, +} + +func (x PromotionPlaceholderFieldEnum_PromotionPlaceholderField) String() string { + return proto.EnumName(PromotionPlaceholderFieldEnum_PromotionPlaceholderField_name, int32(x)) +} +func (PromotionPlaceholderFieldEnum_PromotionPlaceholderField) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_promotion_placeholder_field_494db46851f5ebaa, []int{0, 0} +} + +// Values for Promotion placeholder fields. +type PromotionPlaceholderFieldEnum struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *PromotionPlaceholderFieldEnum) Reset() { *m = PromotionPlaceholderFieldEnum{} } +func (m *PromotionPlaceholderFieldEnum) String() string { return proto.CompactTextString(m) } +func (*PromotionPlaceholderFieldEnum) ProtoMessage() {} +func (*PromotionPlaceholderFieldEnum) Descriptor() ([]byte, []int) { + return fileDescriptor_promotion_placeholder_field_494db46851f5ebaa, []int{0} +} +func (m *PromotionPlaceholderFieldEnum) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_PromotionPlaceholderFieldEnum.Unmarshal(m, b) +} +func (m *PromotionPlaceholderFieldEnum) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_PromotionPlaceholderFieldEnum.Marshal(b, m, deterministic) +} +func (dst *PromotionPlaceholderFieldEnum) XXX_Merge(src proto.Message) { + xxx_messageInfo_PromotionPlaceholderFieldEnum.Merge(dst, src) +} +func (m *PromotionPlaceholderFieldEnum) XXX_Size() int { + return xxx_messageInfo_PromotionPlaceholderFieldEnum.Size(m) +} +func (m *PromotionPlaceholderFieldEnum) XXX_DiscardUnknown() { + xxx_messageInfo_PromotionPlaceholderFieldEnum.DiscardUnknown(m) +} + +var xxx_messageInfo_PromotionPlaceholderFieldEnum proto.InternalMessageInfo + +func init() { + proto.RegisterType((*PromotionPlaceholderFieldEnum)(nil), "google.ads.googleads.v1.enums.PromotionPlaceholderFieldEnum") + proto.RegisterEnum("google.ads.googleads.v1.enums.PromotionPlaceholderFieldEnum_PromotionPlaceholderField", PromotionPlaceholderFieldEnum_PromotionPlaceholderField_name, PromotionPlaceholderFieldEnum_PromotionPlaceholderField_value) +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/enums/promotion_placeholder_field.proto", fileDescriptor_promotion_placeholder_field_494db46851f5ebaa) +} + +var fileDescriptor_promotion_placeholder_field_494db46851f5ebaa = []byte{ + // 476 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x7c, 0x92, 0xcf, 0x6e, 0x9b, 0x4e, + 0x10, 0xc7, 0x7f, 0x26, 0xbf, 0x26, 0xe9, 0xda, 0xb1, 0x37, 0xdb, 0x3f, 0x52, 0xab, 0xba, 0x52, + 0xf2, 0x00, 0x20, 0xd4, 0x1b, 0x3d, 0x54, 0x6b, 0x58, 0x10, 0x8a, 0xd9, 0x45, 0x0b, 0xb8, 0x7f, + 0x64, 0x69, 0x45, 0x03, 0xa5, 0x96, 0x30, 0x8b, 0xbc, 0x4e, 0x1e, 0xa8, 0xc7, 0x3e, 0x4a, 0xd5, + 0x27, 0xe9, 0xa1, 0x97, 0xbe, 0x40, 0xb5, 0x10, 0xdb, 0x27, 0xf7, 0x82, 0x86, 0xef, 0x7c, 0xe7, + 0x33, 0xa3, 0x9d, 0x01, 0xef, 0x2a, 0x29, 0xab, 0xba, 0xb4, 0xf2, 0x42, 0x59, 0x7d, 0xa8, 0xa3, + 0x7b, 0xdb, 0x2a, 0x9b, 0xbb, 0xb5, 0xb2, 0xda, 0x8d, 0x5c, 0xcb, 0xed, 0x4a, 0x36, 0xa2, 0xad, + 0xf3, 0xdb, 0xf2, 0xab, 0xac, 0x8b, 0x72, 0x23, 0xbe, 0xac, 0xca, 0xba, 0x30, 0xdb, 0x8d, 0xdc, + 0x4a, 0x34, 0xed, 0xab, 0xcc, 0xbc, 0x50, 0xe6, 0x1e, 0x60, 0xde, 0xdb, 0x66, 0x07, 0x78, 0xf9, + 0x6a, 0xc7, 0x6f, 0x57, 0x56, 0xde, 0x34, 0x72, 0x9b, 0x6b, 0x9a, 0xea, 0x8b, 0xaf, 0x7f, 0x1b, + 0x60, 0x1a, 0xef, 0x5a, 0xc4, 0x87, 0x0e, 0xbe, 0x6e, 0x40, 0x9a, 0xbb, 0xf5, 0xf5, 0x4f, 0x03, + 0xbc, 0x38, 0xea, 0x40, 0x13, 0x30, 0xcc, 0x68, 0x12, 0x13, 0x37, 0xf4, 0x43, 0xe2, 0xc1, 0xff, + 0xd0, 0x10, 0x9c, 0x65, 0xf4, 0x86, 0xb2, 0xf7, 0x14, 0x0e, 0xd0, 0x53, 0x00, 0x63, 0xce, 0x22, + 0x96, 0x86, 0x8c, 0x8a, 0x14, 0xf3, 0x80, 0xa4, 0xd0, 0x40, 0xcf, 0xc0, 0xa5, 0x17, 0x26, 0x2e, + 0xcb, 0x68, 0x2a, 0x22, 0xe6, 0xe9, 0x4a, 0x0e, 0x4f, 0x34, 0x2a, 0x26, 0xdc, 0x25, 0x34, 0x15, + 0xcc, 0xf7, 0xe1, 0xff, 0xba, 0x3a, 0x62, 0x94, 0x7c, 0x14, 0x38, 0xea, 0xbc, 0x5a, 0x7d, 0x84, + 0x10, 0x18, 0x1f, 0x98, 0x2e, 0xf3, 0x08, 0x3c, 0x45, 0xcf, 0x01, 0x62, 0xdc, 0x23, 0x3c, 0x11, + 0x6c, 0x41, 0xf8, 0x83, 0x1f, 0x9e, 0xa1, 0x27, 0x60, 0x72, 0xf0, 0x26, 0x29, 0xe6, 0x29, 0x3c, + 0x47, 0x97, 0xe0, 0xe2, 0x20, 0x12, 0xea, 0xc1, 0xc7, 0x68, 0x04, 0xce, 0x99, 0xeb, 0xe2, 0x24, + 0x64, 0x14, 0x02, 0x34, 0x06, 0xc0, 0x0f, 0x29, 0x9e, 0x8b, 0x8c, 0xcf, 0x13, 0x38, 0xd4, 0xf3, + 0xf6, 0xff, 0x11, 0x9b, 0x85, 0x73, 0xd2, 0xcb, 0x23, 0x04, 0xc1, 0x28, 0xe5, 0xd8, 0xbd, 0x09, + 0x69, 0xa0, 0x25, 0x78, 0xa1, 0x31, 0x73, 0x4c, 0x83, 0x0c, 0x07, 0x04, 0x8e, 0xf5, 0xf8, 0x7b, + 0x8c, 0x48, 0x32, 0xdf, 0x0f, 0x3f, 0xc0, 0xc9, 0xec, 0xcf, 0x00, 0x5c, 0xdd, 0xca, 0xb5, 0xf9, + 0xcf, 0xa5, 0xcd, 0x5e, 0x1f, 0x7d, 0xf1, 0x58, 0xaf, 0x2d, 0x1e, 0x7c, 0x9a, 0x3d, 0x00, 0x2a, + 0x59, 0xe7, 0x4d, 0x65, 0xca, 0x4d, 0x65, 0x55, 0x65, 0xd3, 0x2d, 0x75, 0x77, 0x46, 0xed, 0x4a, + 0x1d, 0xb9, 0xaa, 0xb7, 0xdd, 0xf7, 0x9b, 0x71, 0x12, 0x60, 0xfc, 0xdd, 0x98, 0x06, 0x3d, 0x0a, + 0x17, 0xca, 0xec, 0x43, 0x1d, 0x2d, 0x6c, 0x53, 0xef, 0x5f, 0xfd, 0xd8, 0xe5, 0x97, 0xb8, 0x50, + 0xcb, 0x7d, 0x7e, 0xb9, 0xb0, 0x97, 0x5d, 0xfe, 0x97, 0x71, 0xd5, 0x8b, 0x8e, 0x83, 0x0b, 0xe5, + 0x38, 0x7b, 0x87, 0xe3, 0x2c, 0x6c, 0xc7, 0xe9, 0x3c, 0x9f, 0x4f, 0xbb, 0xc1, 0xde, 0xfc, 0x0d, + 0x00, 0x00, 0xff, 0xff, 0xf6, 0x79, 0x56, 0x03, 0xed, 0x02, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/proximity_radius_units.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/proximity_radius_units.pb.go new file mode 100644 index 0000000..6ceac99 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/proximity_radius_units.pb.go @@ -0,0 +1,118 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/enums/proximity_radius_units.proto + +package enums // import "google.golang.org/genproto/googleapis/ads/googleads/v1/enums" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// The unit of radius distance in proximity (e.g. MILES) +type ProximityRadiusUnitsEnum_ProximityRadiusUnits int32 + +const ( + // Not specified. + ProximityRadiusUnitsEnum_UNSPECIFIED ProximityRadiusUnitsEnum_ProximityRadiusUnits = 0 + // Used for return value only. Represents value unknown in this version. + ProximityRadiusUnitsEnum_UNKNOWN ProximityRadiusUnitsEnum_ProximityRadiusUnits = 1 + // Miles + ProximityRadiusUnitsEnum_MILES ProximityRadiusUnitsEnum_ProximityRadiusUnits = 2 + // Kilometers + ProximityRadiusUnitsEnum_KILOMETERS ProximityRadiusUnitsEnum_ProximityRadiusUnits = 3 +) + +var ProximityRadiusUnitsEnum_ProximityRadiusUnits_name = map[int32]string{ + 0: "UNSPECIFIED", + 1: "UNKNOWN", + 2: "MILES", + 3: "KILOMETERS", +} +var ProximityRadiusUnitsEnum_ProximityRadiusUnits_value = map[string]int32{ + "UNSPECIFIED": 0, + "UNKNOWN": 1, + "MILES": 2, + "KILOMETERS": 3, +} + +func (x ProximityRadiusUnitsEnum_ProximityRadiusUnits) String() string { + return proto.EnumName(ProximityRadiusUnitsEnum_ProximityRadiusUnits_name, int32(x)) +} +func (ProximityRadiusUnitsEnum_ProximityRadiusUnits) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_proximity_radius_units_b6e6c9fc53d5bd7b, []int{0, 0} +} + +// Container for enum describing unit of radius in proximity. +type ProximityRadiusUnitsEnum struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ProximityRadiusUnitsEnum) Reset() { *m = ProximityRadiusUnitsEnum{} } +func (m *ProximityRadiusUnitsEnum) String() string { return proto.CompactTextString(m) } +func (*ProximityRadiusUnitsEnum) ProtoMessage() {} +func (*ProximityRadiusUnitsEnum) Descriptor() ([]byte, []int) { + return fileDescriptor_proximity_radius_units_b6e6c9fc53d5bd7b, []int{0} +} +func (m *ProximityRadiusUnitsEnum) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ProximityRadiusUnitsEnum.Unmarshal(m, b) +} +func (m *ProximityRadiusUnitsEnum) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ProximityRadiusUnitsEnum.Marshal(b, m, deterministic) +} +func (dst *ProximityRadiusUnitsEnum) XXX_Merge(src proto.Message) { + xxx_messageInfo_ProximityRadiusUnitsEnum.Merge(dst, src) +} +func (m *ProximityRadiusUnitsEnum) XXX_Size() int { + return xxx_messageInfo_ProximityRadiusUnitsEnum.Size(m) +} +func (m *ProximityRadiusUnitsEnum) XXX_DiscardUnknown() { + xxx_messageInfo_ProximityRadiusUnitsEnum.DiscardUnknown(m) +} + +var xxx_messageInfo_ProximityRadiusUnitsEnum proto.InternalMessageInfo + +func init() { + proto.RegisterType((*ProximityRadiusUnitsEnum)(nil), "google.ads.googleads.v1.enums.ProximityRadiusUnitsEnum") + proto.RegisterEnum("google.ads.googleads.v1.enums.ProximityRadiusUnitsEnum_ProximityRadiusUnits", ProximityRadiusUnitsEnum_ProximityRadiusUnits_name, ProximityRadiusUnitsEnum_ProximityRadiusUnits_value) +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/enums/proximity_radius_units.proto", fileDescriptor_proximity_radius_units_b6e6c9fc53d5bd7b) +} + +var fileDescriptor_proximity_radius_units_b6e6c9fc53d5bd7b = []byte{ + // 315 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x7c, 0x90, 0xd1, 0x4a, 0xfb, 0x30, + 0x18, 0xc5, 0xff, 0xeb, 0xf8, 0x2b, 0x66, 0xa0, 0xa5, 0x78, 0xa1, 0xe2, 0x2e, 0xb6, 0x07, 0x48, + 0x28, 0xde, 0xc5, 0xab, 0x4e, 0xe3, 0x28, 0xdb, 0xba, 0xb2, 0xb9, 0x0a, 0x52, 0x18, 0xd1, 0x94, + 0x10, 0x5c, 0x93, 0xd2, 0xb4, 0x43, 0x5f, 0xc7, 0x4b, 0x1f, 0xc5, 0x47, 0xf1, 0xc2, 0x67, 0x90, + 0xa6, 0x6b, 0xaf, 0xa6, 0x37, 0xe5, 0xd0, 0xf3, 0x9d, 0x5f, 0xce, 0xf7, 0x01, 0xcc, 0x95, 0xe2, + 0x9b, 0x04, 0x51, 0xa6, 0x51, 0x2d, 0x2b, 0xb5, 0x75, 0x51, 0x22, 0xcb, 0x54, 0xa3, 0x2c, 0x57, + 0xaf, 0x22, 0x15, 0xc5, 0xdb, 0x3a, 0xa7, 0x4c, 0x94, 0x7a, 0x5d, 0x4a, 0x51, 0x68, 0x98, 0xe5, + 0xaa, 0x50, 0x4e, 0xbf, 0x0e, 0x40, 0xca, 0x34, 0x6c, 0xb3, 0x70, 0xeb, 0x42, 0x93, 0xbd, 0xb8, + 0x6c, 0xd0, 0x99, 0x40, 0x54, 0x4a, 0x55, 0xd0, 0x42, 0x28, 0xb9, 0x0b, 0x0f, 0x5f, 0xc0, 0x59, + 0xd8, 0xc0, 0x17, 0x86, 0xbd, 0xaa, 0xd0, 0x44, 0x96, 0xe9, 0x70, 0x0e, 0x4e, 0xf7, 0x79, 0xce, + 0x09, 0xe8, 0xad, 0x82, 0x65, 0x48, 0x6e, 0xfc, 0x3b, 0x9f, 0xdc, 0xda, 0xff, 0x9c, 0x1e, 0x38, + 0x5c, 0x05, 0x93, 0x60, 0xfe, 0x10, 0xd8, 0x1d, 0xe7, 0x08, 0xfc, 0x9f, 0xf9, 0x53, 0xb2, 0xb4, + 0x2d, 0xe7, 0x18, 0x80, 0x89, 0x3f, 0x9d, 0xcf, 0xc8, 0x3d, 0x59, 0x2c, 0xed, 0xee, 0xe8, 0xbb, + 0x03, 0x06, 0xcf, 0x2a, 0x85, 0x7f, 0x16, 0x1e, 0x9d, 0xef, 0x7b, 0x34, 0xac, 0xda, 0x86, 0x9d, + 0xc7, 0xd1, 0x2e, 0xcb, 0xd5, 0x86, 0x4a, 0x0e, 0x55, 0xce, 0x11, 0x4f, 0xa4, 0xd9, 0xa5, 0x39, + 0x5c, 0x26, 0xf4, 0x2f, 0x77, 0xbc, 0x36, 0xdf, 0x77, 0xab, 0x3b, 0xf6, 0xbc, 0x0f, 0xab, 0x3f, + 0xae, 0x51, 0x1e, 0xd3, 0xb0, 0x96, 0x95, 0x8a, 0x5c, 0x58, 0x2d, 0xaf, 0x3f, 0x1b, 0x3f, 0xf6, + 0x98, 0x8e, 0x5b, 0x3f, 0x8e, 0xdc, 0xd8, 0xf8, 0x5f, 0xd6, 0xa0, 0xfe, 0x89, 0xb1, 0xc7, 0x34, + 0xc6, 0xed, 0x04, 0xc6, 0x91, 0x8b, 0xb1, 0x99, 0x79, 0x3a, 0x30, 0xc5, 0xae, 0x7e, 0x02, 0x00, + 0x00, 0xff, 0xff, 0x05, 0xc7, 0x92, 0xa6, 0xdf, 0x01, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/quality_score_bucket.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/quality_score_bucket.pb.go new file mode 100644 index 0000000..4df3f3d --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/quality_score_bucket.pb.go @@ -0,0 +1,122 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/enums/quality_score_bucket.proto + +package enums // import "google.golang.org/genproto/googleapis/ads/googleads/v1/enums" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Enum listing the possible quality score buckets. +type QualityScoreBucketEnum_QualityScoreBucket int32 + +const ( + // Not specified. + QualityScoreBucketEnum_UNSPECIFIED QualityScoreBucketEnum_QualityScoreBucket = 0 + // Used for return value only. Represents value unknown in this version. + QualityScoreBucketEnum_UNKNOWN QualityScoreBucketEnum_QualityScoreBucket = 1 + // Quality of the creative is below average. + QualityScoreBucketEnum_BELOW_AVERAGE QualityScoreBucketEnum_QualityScoreBucket = 2 + // Quality of the creative is average. + QualityScoreBucketEnum_AVERAGE QualityScoreBucketEnum_QualityScoreBucket = 3 + // Quality of the creative is above average. + QualityScoreBucketEnum_ABOVE_AVERAGE QualityScoreBucketEnum_QualityScoreBucket = 4 +) + +var QualityScoreBucketEnum_QualityScoreBucket_name = map[int32]string{ + 0: "UNSPECIFIED", + 1: "UNKNOWN", + 2: "BELOW_AVERAGE", + 3: "AVERAGE", + 4: "ABOVE_AVERAGE", +} +var QualityScoreBucketEnum_QualityScoreBucket_value = map[string]int32{ + "UNSPECIFIED": 0, + "UNKNOWN": 1, + "BELOW_AVERAGE": 2, + "AVERAGE": 3, + "ABOVE_AVERAGE": 4, +} + +func (x QualityScoreBucketEnum_QualityScoreBucket) String() string { + return proto.EnumName(QualityScoreBucketEnum_QualityScoreBucket_name, int32(x)) +} +func (QualityScoreBucketEnum_QualityScoreBucket) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_quality_score_bucket_d1c2022083792bdb, []int{0, 0} +} + +// The relative performance compared to other advertisers. +type QualityScoreBucketEnum struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *QualityScoreBucketEnum) Reset() { *m = QualityScoreBucketEnum{} } +func (m *QualityScoreBucketEnum) String() string { return proto.CompactTextString(m) } +func (*QualityScoreBucketEnum) ProtoMessage() {} +func (*QualityScoreBucketEnum) Descriptor() ([]byte, []int) { + return fileDescriptor_quality_score_bucket_d1c2022083792bdb, []int{0} +} +func (m *QualityScoreBucketEnum) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_QualityScoreBucketEnum.Unmarshal(m, b) +} +func (m *QualityScoreBucketEnum) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_QualityScoreBucketEnum.Marshal(b, m, deterministic) +} +func (dst *QualityScoreBucketEnum) XXX_Merge(src proto.Message) { + xxx_messageInfo_QualityScoreBucketEnum.Merge(dst, src) +} +func (m *QualityScoreBucketEnum) XXX_Size() int { + return xxx_messageInfo_QualityScoreBucketEnum.Size(m) +} +func (m *QualityScoreBucketEnum) XXX_DiscardUnknown() { + xxx_messageInfo_QualityScoreBucketEnum.DiscardUnknown(m) +} + +var xxx_messageInfo_QualityScoreBucketEnum proto.InternalMessageInfo + +func init() { + proto.RegisterType((*QualityScoreBucketEnum)(nil), "google.ads.googleads.v1.enums.QualityScoreBucketEnum") + proto.RegisterEnum("google.ads.googleads.v1.enums.QualityScoreBucketEnum_QualityScoreBucket", QualityScoreBucketEnum_QualityScoreBucket_name, QualityScoreBucketEnum_QualityScoreBucket_value) +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/enums/quality_score_bucket.proto", fileDescriptor_quality_score_bucket_d1c2022083792bdb) +} + +var fileDescriptor_quality_score_bucket_d1c2022083792bdb = []byte{ + // 320 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x7c, 0x90, 0xd1, 0x4a, 0xfb, 0x30, + 0x18, 0xc5, 0xff, 0xeb, 0xfe, 0x28, 0x64, 0x88, 0xb3, 0x17, 0x0a, 0xe2, 0x2e, 0xb6, 0x07, 0x48, + 0x28, 0xde, 0x48, 0xbc, 0x4a, 0x34, 0x8e, 0xa1, 0x74, 0xd3, 0xb1, 0x0e, 0xa4, 0x30, 0xb2, 0x35, + 0x84, 0xe2, 0x96, 0xcc, 0xa5, 0x1d, 0x78, 0xe5, 0xbb, 0x78, 0xe9, 0xa3, 0xf8, 0x28, 0xe2, 0x43, + 0x48, 0x12, 0xdb, 0x9b, 0xa1, 0x37, 0xe5, 0x34, 0xbf, 0xef, 0x1c, 0xbe, 0xef, 0x80, 0x0b, 0xa9, + 0xb5, 0x5c, 0x0a, 0xc4, 0x33, 0x83, 0xbc, 0xb4, 0x6a, 0x1b, 0x21, 0xa1, 0xca, 0x95, 0x41, 0xcf, + 0x25, 0x5f, 0xe6, 0xc5, 0xcb, 0xcc, 0x2c, 0xf4, 0x46, 0xcc, 0xe6, 0xe5, 0xe2, 0x49, 0x14, 0x70, + 0xbd, 0xd1, 0x85, 0x0e, 0x3b, 0x7e, 0x1c, 0xf2, 0xcc, 0xc0, 0xda, 0x09, 0xb7, 0x11, 0x74, 0xce, + 0xd3, 0xb3, 0x2a, 0x78, 0x9d, 0x23, 0xae, 0x94, 0x2e, 0x78, 0x91, 0x6b, 0x65, 0xbc, 0xb9, 0xf7, + 0x0a, 0x8e, 0xef, 0x7d, 0xf4, 0xd8, 0x26, 0x53, 0x17, 0xcc, 0x54, 0xb9, 0xea, 0x09, 0x10, 0xee, + 0x92, 0xf0, 0x10, 0xb4, 0x26, 0xf1, 0x78, 0xc4, 0xae, 0x06, 0x37, 0x03, 0x76, 0xdd, 0xfe, 0x17, + 0xb6, 0xc0, 0xfe, 0x24, 0xbe, 0x8d, 0x87, 0xd3, 0xb8, 0xdd, 0x08, 0x8f, 0xc0, 0x01, 0x65, 0x77, + 0xc3, 0xe9, 0x8c, 0x24, 0xec, 0x81, 0xf4, 0x59, 0x3b, 0xb0, 0xbc, 0xfa, 0x69, 0x5a, 0x4e, 0xe8, + 0x30, 0x61, 0x35, 0xff, 0x4f, 0xbf, 0x1a, 0xa0, 0xbb, 0xd0, 0x2b, 0xf8, 0xe7, 0x11, 0xf4, 0x64, + 0x77, 0x95, 0x91, 0xdd, 0x7f, 0xd4, 0x78, 0xa4, 0x3f, 0x4e, 0xa9, 0x97, 0x5c, 0x49, 0xa8, 0x37, + 0x12, 0x49, 0xa1, 0xdc, 0x75, 0x55, 0x91, 0xeb, 0xdc, 0xfc, 0xd2, 0xeb, 0xa5, 0xfb, 0xbe, 0x05, + 0xcd, 0x3e, 0x21, 0xef, 0x41, 0xa7, 0xef, 0xa3, 0x48, 0x66, 0xa0, 0x97, 0x56, 0x25, 0x11, 0xb4, + 0x85, 0x98, 0x8f, 0x8a, 0xa7, 0x24, 0x33, 0x69, 0xcd, 0xd3, 0x24, 0x4a, 0x1d, 0xff, 0x0c, 0xba, + 0xfe, 0x11, 0x63, 0x92, 0x19, 0x8c, 0xeb, 0x09, 0x8c, 0x93, 0x08, 0x63, 0x37, 0x33, 0xdf, 0x73, + 0x8b, 0x9d, 0x7f, 0x07, 0x00, 0x00, 0xff, 0xff, 0xb6, 0xfe, 0xaa, 0x9d, 0xef, 0x01, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/real_estate_placeholder_field.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/real_estate_placeholder_field.pb.go new file mode 100644 index 0000000..a9e5a51 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/real_estate_placeholder_field.pb.go @@ -0,0 +1,212 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/enums/real_estate_placeholder_field.proto + +package enums // import "google.golang.org/genproto/googleapis/ads/googleads/v1/enums" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Possible values for Real Estate placeholder fields. +type RealEstatePlaceholderFieldEnum_RealEstatePlaceholderField int32 + +const ( + // Not specified. + RealEstatePlaceholderFieldEnum_UNSPECIFIED RealEstatePlaceholderFieldEnum_RealEstatePlaceholderField = 0 + // Used for return value only. Represents value unknown in this version. + RealEstatePlaceholderFieldEnum_UNKNOWN RealEstatePlaceholderFieldEnum_RealEstatePlaceholderField = 1 + // Data Type: STRING. Unique ID. + RealEstatePlaceholderFieldEnum_LISTING_ID RealEstatePlaceholderFieldEnum_RealEstatePlaceholderField = 2 + // Data Type: STRING. Main headline with listing name to be shown in dynamic + // ad. + RealEstatePlaceholderFieldEnum_LISTING_NAME RealEstatePlaceholderFieldEnum_RealEstatePlaceholderField = 3 + // Data Type: STRING. City name to be shown in dynamic ad. + RealEstatePlaceholderFieldEnum_CITY_NAME RealEstatePlaceholderFieldEnum_RealEstatePlaceholderField = 4 + // Data Type: STRING. Description of listing to be shown in dynamic ad. + RealEstatePlaceholderFieldEnum_DESCRIPTION RealEstatePlaceholderFieldEnum_RealEstatePlaceholderField = 5 + // Data Type: STRING. Complete listing address, including postal code. + RealEstatePlaceholderFieldEnum_ADDRESS RealEstatePlaceholderFieldEnum_RealEstatePlaceholderField = 6 + // Data Type: STRING. Price to be shown in the ad. + // Example: "100.00 USD" + RealEstatePlaceholderFieldEnum_PRICE RealEstatePlaceholderFieldEnum_RealEstatePlaceholderField = 7 + // Data Type: STRING. Formatted price to be shown in the ad. + // Example: "Starting at $100.00 USD", "$80 - $100" + RealEstatePlaceholderFieldEnum_FORMATTED_PRICE RealEstatePlaceholderFieldEnum_RealEstatePlaceholderField = 8 + // Data Type: URL. Image to be displayed in the ad. + RealEstatePlaceholderFieldEnum_IMAGE_URL RealEstatePlaceholderFieldEnum_RealEstatePlaceholderField = 9 + // Data Type: STRING. Type of property (house, condo, apartment, etc.) used + // to group like items together for recommendation engine. + RealEstatePlaceholderFieldEnum_PROPERTY_TYPE RealEstatePlaceholderFieldEnum_RealEstatePlaceholderField = 10 + // Data Type: STRING. Type of listing (resale, rental, foreclosure, etc.) + // used to group like items together for recommendation engine. + RealEstatePlaceholderFieldEnum_LISTING_TYPE RealEstatePlaceholderFieldEnum_RealEstatePlaceholderField = 11 + // Data Type: STRING_LIST. Keywords used for product retrieval. + RealEstatePlaceholderFieldEnum_CONTEXTUAL_KEYWORDS RealEstatePlaceholderFieldEnum_RealEstatePlaceholderField = 12 + // Data Type: URL_LIST. Final URLs to be used in ad when using Upgraded + // URLs; the more specific the better (e.g. the individual URL of a specific + // listing and its location). + RealEstatePlaceholderFieldEnum_FINAL_URLS RealEstatePlaceholderFieldEnum_RealEstatePlaceholderField = 13 + // Data Type: URL_LIST. Final mobile URLs for the ad when using Upgraded + // URLs. + RealEstatePlaceholderFieldEnum_FINAL_MOBILE_URLS RealEstatePlaceholderFieldEnum_RealEstatePlaceholderField = 14 + // Data Type: URL. Tracking template for the ad when using Upgraded URLs. + RealEstatePlaceholderFieldEnum_TRACKING_URL RealEstatePlaceholderFieldEnum_RealEstatePlaceholderField = 15 + // Data Type: STRING. Android app link. Must be formatted as: + // android-app://{package_id}/{scheme}/{host_path}. + // The components are defined as follows: + // package_id: app ID as specified in Google Play. + // scheme: the scheme to pass to the application. Can be HTTP, or a custom + // scheme. + // host_path: identifies the specific content within your application. + RealEstatePlaceholderFieldEnum_ANDROID_APP_LINK RealEstatePlaceholderFieldEnum_RealEstatePlaceholderField = 16 + // Data Type: STRING_LIST. List of recommended listing IDs to show together + // with this item. + RealEstatePlaceholderFieldEnum_SIMILAR_LISTING_IDS RealEstatePlaceholderFieldEnum_RealEstatePlaceholderField = 17 + // Data Type: STRING. iOS app link. + RealEstatePlaceholderFieldEnum_IOS_APP_LINK RealEstatePlaceholderFieldEnum_RealEstatePlaceholderField = 18 + // Data Type: INT64. iOS app store ID. + RealEstatePlaceholderFieldEnum_IOS_APP_STORE_ID RealEstatePlaceholderFieldEnum_RealEstatePlaceholderField = 19 +) + +var RealEstatePlaceholderFieldEnum_RealEstatePlaceholderField_name = map[int32]string{ + 0: "UNSPECIFIED", + 1: "UNKNOWN", + 2: "LISTING_ID", + 3: "LISTING_NAME", + 4: "CITY_NAME", + 5: "DESCRIPTION", + 6: "ADDRESS", + 7: "PRICE", + 8: "FORMATTED_PRICE", + 9: "IMAGE_URL", + 10: "PROPERTY_TYPE", + 11: "LISTING_TYPE", + 12: "CONTEXTUAL_KEYWORDS", + 13: "FINAL_URLS", + 14: "FINAL_MOBILE_URLS", + 15: "TRACKING_URL", + 16: "ANDROID_APP_LINK", + 17: "SIMILAR_LISTING_IDS", + 18: "IOS_APP_LINK", + 19: "IOS_APP_STORE_ID", +} +var RealEstatePlaceholderFieldEnum_RealEstatePlaceholderField_value = map[string]int32{ + "UNSPECIFIED": 0, + "UNKNOWN": 1, + "LISTING_ID": 2, + "LISTING_NAME": 3, + "CITY_NAME": 4, + "DESCRIPTION": 5, + "ADDRESS": 6, + "PRICE": 7, + "FORMATTED_PRICE": 8, + "IMAGE_URL": 9, + "PROPERTY_TYPE": 10, + "LISTING_TYPE": 11, + "CONTEXTUAL_KEYWORDS": 12, + "FINAL_URLS": 13, + "FINAL_MOBILE_URLS": 14, + "TRACKING_URL": 15, + "ANDROID_APP_LINK": 16, + "SIMILAR_LISTING_IDS": 17, + "IOS_APP_LINK": 18, + "IOS_APP_STORE_ID": 19, +} + +func (x RealEstatePlaceholderFieldEnum_RealEstatePlaceholderField) String() string { + return proto.EnumName(RealEstatePlaceholderFieldEnum_RealEstatePlaceholderField_name, int32(x)) +} +func (RealEstatePlaceholderFieldEnum_RealEstatePlaceholderField) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_real_estate_placeholder_field_0cf045b5aa386b01, []int{0, 0} +} + +// Values for Real Estate placeholder fields. +// For more information about dynamic remarketing feeds, see +// https://support.google.com/google-ads/answer/6053288. +type RealEstatePlaceholderFieldEnum struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *RealEstatePlaceholderFieldEnum) Reset() { *m = RealEstatePlaceholderFieldEnum{} } +func (m *RealEstatePlaceholderFieldEnum) String() string { return proto.CompactTextString(m) } +func (*RealEstatePlaceholderFieldEnum) ProtoMessage() {} +func (*RealEstatePlaceholderFieldEnum) Descriptor() ([]byte, []int) { + return fileDescriptor_real_estate_placeholder_field_0cf045b5aa386b01, []int{0} +} +func (m *RealEstatePlaceholderFieldEnum) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_RealEstatePlaceholderFieldEnum.Unmarshal(m, b) +} +func (m *RealEstatePlaceholderFieldEnum) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_RealEstatePlaceholderFieldEnum.Marshal(b, m, deterministic) +} +func (dst *RealEstatePlaceholderFieldEnum) XXX_Merge(src proto.Message) { + xxx_messageInfo_RealEstatePlaceholderFieldEnum.Merge(dst, src) +} +func (m *RealEstatePlaceholderFieldEnum) XXX_Size() int { + return xxx_messageInfo_RealEstatePlaceholderFieldEnum.Size(m) +} +func (m *RealEstatePlaceholderFieldEnum) XXX_DiscardUnknown() { + xxx_messageInfo_RealEstatePlaceholderFieldEnum.DiscardUnknown(m) +} + +var xxx_messageInfo_RealEstatePlaceholderFieldEnum proto.InternalMessageInfo + +func init() { + proto.RegisterType((*RealEstatePlaceholderFieldEnum)(nil), "google.ads.googleads.v1.enums.RealEstatePlaceholderFieldEnum") + proto.RegisterEnum("google.ads.googleads.v1.enums.RealEstatePlaceholderFieldEnum_RealEstatePlaceholderField", RealEstatePlaceholderFieldEnum_RealEstatePlaceholderField_name, RealEstatePlaceholderFieldEnum_RealEstatePlaceholderField_value) +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/enums/real_estate_placeholder_field.proto", fileDescriptor_real_estate_placeholder_field_0cf045b5aa386b01) +} + +var fileDescriptor_real_estate_placeholder_field_0cf045b5aa386b01 = []byte{ + // 513 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x7c, 0x92, 0xd1, 0x6e, 0xda, 0x3c, + 0x14, 0xc7, 0x3f, 0xe0, 0x6b, 0x3b, 0x4c, 0x29, 0xc6, 0x6c, 0x9a, 0x54, 0xad, 0x9b, 0xda, 0x07, + 0x48, 0x84, 0x76, 0x97, 0x5d, 0x99, 0xc4, 0x20, 0x8b, 0xe0, 0x58, 0xb6, 0xa1, 0x63, 0x42, 0xb2, + 0xb2, 0x26, 0xcb, 0x90, 0x42, 0x82, 0x08, 0xed, 0x23, 0xec, 0x41, 0x76, 0xb7, 0x3d, 0xca, 0x1e, + 0x65, 0xd7, 0x7b, 0x80, 0xc9, 0xc9, 0x02, 0xbb, 0x61, 0x37, 0xd1, 0x39, 0xff, 0x73, 0xf2, 0xfb, + 0x1f, 0xf9, 0x1c, 0x80, 0x93, 0x3c, 0x4f, 0xd2, 0xd8, 0x0e, 0xa3, 0xc2, 0xae, 0x42, 0x13, 0x3d, + 0x0d, 0xed, 0x38, 0x7b, 0xdc, 0x14, 0xf6, 0x2e, 0x0e, 0x53, 0x1d, 0x17, 0xfb, 0x70, 0x1f, 0xeb, + 0x6d, 0x1a, 0x3e, 0xc4, 0x9f, 0xf3, 0x34, 0x8a, 0x77, 0xfa, 0xd3, 0x3a, 0x4e, 0x23, 0x6b, 0xbb, + 0xcb, 0xf7, 0x39, 0xba, 0xa9, 0xfe, 0xb3, 0xc2, 0xa8, 0xb0, 0x0e, 0x08, 0xeb, 0x69, 0x68, 0x95, + 0x88, 0xeb, 0x57, 0xb5, 0xc3, 0x76, 0x6d, 0x87, 0x59, 0x96, 0xef, 0xc3, 0xfd, 0x3a, 0xcf, 0x8a, + 0xea, 0xe7, 0xbb, 0x6f, 0x2d, 0xf0, 0x5a, 0xc4, 0x61, 0x4a, 0x4a, 0x0f, 0x7e, 0xb4, 0x18, 0x1b, + 0x07, 0x92, 0x3d, 0x6e, 0xee, 0xbe, 0xb4, 0xc0, 0xf5, 0xe9, 0x16, 0xd4, 0x03, 0x9d, 0x39, 0x93, + 0x9c, 0xb8, 0x74, 0x4c, 0x89, 0x07, 0xff, 0x43, 0x1d, 0x70, 0x31, 0x67, 0x53, 0x16, 0xdc, 0x33, + 0xd8, 0x40, 0x57, 0x00, 0xf8, 0x54, 0x2a, 0xca, 0x26, 0x9a, 0x7a, 0xb0, 0x89, 0x20, 0xb8, 0xac, + 0x73, 0x86, 0x67, 0x04, 0xb6, 0x50, 0x17, 0xb4, 0x5d, 0xaa, 0x96, 0x55, 0xfa, 0xbf, 0xc1, 0x79, + 0x44, 0xba, 0x82, 0x72, 0x45, 0x03, 0x06, 0xcf, 0x0c, 0x0e, 0x7b, 0x9e, 0x20, 0x52, 0xc2, 0x73, + 0xd4, 0x06, 0x67, 0x5c, 0x50, 0x97, 0xc0, 0x0b, 0x34, 0x00, 0xbd, 0x71, 0x20, 0x66, 0x58, 0x29, + 0xe2, 0xe9, 0x4a, 0x7c, 0x66, 0x60, 0x74, 0x86, 0x27, 0x44, 0xcf, 0x85, 0x0f, 0xdb, 0xa8, 0x0f, + 0xba, 0x5c, 0x04, 0x9c, 0x08, 0xb5, 0xd4, 0x6a, 0xc9, 0x09, 0x04, 0x7f, 0x0f, 0x50, 0x2a, 0x1d, + 0xf4, 0x12, 0x0c, 0xdc, 0x80, 0x29, 0xf2, 0x5e, 0xcd, 0xb1, 0xaf, 0xa7, 0x64, 0x79, 0x1f, 0x08, + 0x4f, 0xc2, 0x4b, 0x33, 0xfb, 0x98, 0x32, 0xec, 0x1b, 0x98, 0x84, 0x5d, 0xf4, 0x02, 0xf4, 0xab, + 0x7c, 0x16, 0x8c, 0xa8, 0x4f, 0x2a, 0xf9, 0xca, 0x10, 0x95, 0xc0, 0xee, 0xd4, 0x20, 0x8d, 0x6d, + 0x0f, 0x3d, 0x07, 0x10, 0x33, 0x4f, 0x04, 0xd4, 0xd3, 0x98, 0x73, 0xed, 0x53, 0x36, 0x85, 0xd0, + 0xf8, 0x48, 0x3a, 0xa3, 0x3e, 0x16, 0xfa, 0xf8, 0x24, 0x12, 0xf6, 0x0d, 0x80, 0x06, 0xf2, 0xd8, + 0x8a, 0x0c, 0xa0, 0x56, 0xa4, 0x0a, 0x04, 0x31, 0x6f, 0x37, 0x18, 0xfd, 0x6a, 0x80, 0xdb, 0x87, + 0x7c, 0x63, 0xfd, 0x73, 0xdf, 0xa3, 0x37, 0xa7, 0x77, 0xc5, 0xcd, 0xca, 0x79, 0xe3, 0xc3, 0xe8, + 0x0f, 0x21, 0xc9, 0xd3, 0x30, 0x4b, 0xac, 0x7c, 0x97, 0xd8, 0x49, 0x9c, 0x95, 0x07, 0x51, 0x1f, + 0xe1, 0x76, 0x5d, 0x9c, 0xb8, 0xc9, 0x77, 0xe5, 0xf7, 0x6b, 0xb3, 0x35, 0xc1, 0xf8, 0x7b, 0xf3, + 0x66, 0x52, 0xa1, 0x70, 0x54, 0x58, 0x55, 0x68, 0xa2, 0xc5, 0xd0, 0x32, 0xa7, 0x53, 0xfc, 0xa8, + 0xeb, 0x2b, 0x1c, 0x15, 0xab, 0x43, 0x7d, 0xb5, 0x18, 0xae, 0xca, 0xfa, 0xcf, 0xe6, 0x6d, 0x25, + 0x3a, 0x0e, 0x8e, 0x0a, 0xc7, 0x39, 0x74, 0x38, 0xce, 0x62, 0xe8, 0x38, 0x65, 0xcf, 0xc7, 0xf3, + 0x72, 0xb0, 0xb7, 0xbf, 0x03, 0x00, 0x00, 0xff, 0xff, 0xcb, 0xfe, 0xe3, 0x31, 0x2b, 0x03, 0x00, + 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/recommendation_type.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/recommendation_type.pb.go new file mode 100644 index 0000000..d61f823 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/recommendation_type.pb.go @@ -0,0 +1,183 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/enums/recommendation_type.proto + +package enums // import "google.golang.org/genproto/googleapis/ads/googleads/v1/enums" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Types of recommendations. +type RecommendationTypeEnum_RecommendationType int32 + +const ( + // Not specified. + RecommendationTypeEnum_UNSPECIFIED RecommendationTypeEnum_RecommendationType = 0 + // Used for return value only. Represents value unknown in this version. + RecommendationTypeEnum_UNKNOWN RecommendationTypeEnum_RecommendationType = 1 + // Budget recommendation for budget constrained campaigns. + RecommendationTypeEnum_CAMPAIGN_BUDGET RecommendationTypeEnum_RecommendationType = 2 + // Keyword recommendation. + RecommendationTypeEnum_KEYWORD RecommendationTypeEnum_RecommendationType = 3 + // Recommendation to add a new text ad. + RecommendationTypeEnum_TEXT_AD RecommendationTypeEnum_RecommendationType = 4 + // Recommendation to update a campaign to use a Target CPA bidding strategy. + RecommendationTypeEnum_TARGET_CPA_OPT_IN RecommendationTypeEnum_RecommendationType = 5 + // Recommendation to update a campaign to use the Maximize Conversions + // bidding strategy. + RecommendationTypeEnum_MAXIMIZE_CONVERSIONS_OPT_IN RecommendationTypeEnum_RecommendationType = 6 + // Recommendation to enable Enhanced Cost Per Click for a campaign. + RecommendationTypeEnum_ENHANCED_CPC_OPT_IN RecommendationTypeEnum_RecommendationType = 7 + // Recommendation to start showing your campaign's ads on Google Search + // Partners Websites. + RecommendationTypeEnum_SEARCH_PARTNERS_OPT_IN RecommendationTypeEnum_RecommendationType = 8 + // Recommendation to update a campaign to use a Maximize Clicks bidding + // strategy. + RecommendationTypeEnum_MAXIMIZE_CLICKS_OPT_IN RecommendationTypeEnum_RecommendationType = 9 + // Recommendation to start using the "Optimize" ad rotation setting for the + // given ad group. + RecommendationTypeEnum_OPTIMIZE_AD_ROTATION RecommendationTypeEnum_RecommendationType = 10 + // Recommendation to add callout extensions to a campaign. + RecommendationTypeEnum_CALLOUT_EXTENSION RecommendationTypeEnum_RecommendationType = 11 + // Recommendation to add sitelink extensions to a campaign. + RecommendationTypeEnum_SITELINK_EXTENSION RecommendationTypeEnum_RecommendationType = 12 + // Recommendation to add call extensions to a campaign. + RecommendationTypeEnum_CALL_EXTENSION RecommendationTypeEnum_RecommendationType = 13 + // Recommendation to change an existing keyword from one match type to a + // broader match type. + RecommendationTypeEnum_KEYWORD_MATCH_TYPE RecommendationTypeEnum_RecommendationType = 14 + // Recommendation to move unused budget from one budget to a constrained + // budget. + RecommendationTypeEnum_MOVE_UNUSED_BUDGET RecommendationTypeEnum_RecommendationType = 15 +) + +var RecommendationTypeEnum_RecommendationType_name = map[int32]string{ + 0: "UNSPECIFIED", + 1: "UNKNOWN", + 2: "CAMPAIGN_BUDGET", + 3: "KEYWORD", + 4: "TEXT_AD", + 5: "TARGET_CPA_OPT_IN", + 6: "MAXIMIZE_CONVERSIONS_OPT_IN", + 7: "ENHANCED_CPC_OPT_IN", + 8: "SEARCH_PARTNERS_OPT_IN", + 9: "MAXIMIZE_CLICKS_OPT_IN", + 10: "OPTIMIZE_AD_ROTATION", + 11: "CALLOUT_EXTENSION", + 12: "SITELINK_EXTENSION", + 13: "CALL_EXTENSION", + 14: "KEYWORD_MATCH_TYPE", + 15: "MOVE_UNUSED_BUDGET", +} +var RecommendationTypeEnum_RecommendationType_value = map[string]int32{ + "UNSPECIFIED": 0, + "UNKNOWN": 1, + "CAMPAIGN_BUDGET": 2, + "KEYWORD": 3, + "TEXT_AD": 4, + "TARGET_CPA_OPT_IN": 5, + "MAXIMIZE_CONVERSIONS_OPT_IN": 6, + "ENHANCED_CPC_OPT_IN": 7, + "SEARCH_PARTNERS_OPT_IN": 8, + "MAXIMIZE_CLICKS_OPT_IN": 9, + "OPTIMIZE_AD_ROTATION": 10, + "CALLOUT_EXTENSION": 11, + "SITELINK_EXTENSION": 12, + "CALL_EXTENSION": 13, + "KEYWORD_MATCH_TYPE": 14, + "MOVE_UNUSED_BUDGET": 15, +} + +func (x RecommendationTypeEnum_RecommendationType) String() string { + return proto.EnumName(RecommendationTypeEnum_RecommendationType_name, int32(x)) +} +func (RecommendationTypeEnum_RecommendationType) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_recommendation_type_5880b2dc3b350db2, []int{0, 0} +} + +// Container for enum describing types of recommendations. +type RecommendationTypeEnum struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *RecommendationTypeEnum) Reset() { *m = RecommendationTypeEnum{} } +func (m *RecommendationTypeEnum) String() string { return proto.CompactTextString(m) } +func (*RecommendationTypeEnum) ProtoMessage() {} +func (*RecommendationTypeEnum) Descriptor() ([]byte, []int) { + return fileDescriptor_recommendation_type_5880b2dc3b350db2, []int{0} +} +func (m *RecommendationTypeEnum) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_RecommendationTypeEnum.Unmarshal(m, b) +} +func (m *RecommendationTypeEnum) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_RecommendationTypeEnum.Marshal(b, m, deterministic) +} +func (dst *RecommendationTypeEnum) XXX_Merge(src proto.Message) { + xxx_messageInfo_RecommendationTypeEnum.Merge(dst, src) +} +func (m *RecommendationTypeEnum) XXX_Size() int { + return xxx_messageInfo_RecommendationTypeEnum.Size(m) +} +func (m *RecommendationTypeEnum) XXX_DiscardUnknown() { + xxx_messageInfo_RecommendationTypeEnum.DiscardUnknown(m) +} + +var xxx_messageInfo_RecommendationTypeEnum proto.InternalMessageInfo + +func init() { + proto.RegisterType((*RecommendationTypeEnum)(nil), "google.ads.googleads.v1.enums.RecommendationTypeEnum") + proto.RegisterEnum("google.ads.googleads.v1.enums.RecommendationTypeEnum_RecommendationType", RecommendationTypeEnum_RecommendationType_name, RecommendationTypeEnum_RecommendationType_value) +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/enums/recommendation_type.proto", fileDescriptor_recommendation_type_5880b2dc3b350db2) +} + +var fileDescriptor_recommendation_type_5880b2dc3b350db2 = []byte{ + // 493 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x7c, 0x92, 0xd1, 0x6e, 0xd3, 0x30, + 0x14, 0x86, 0x69, 0x0b, 0x1b, 0xb8, 0xb0, 0x06, 0x0f, 0x3a, 0x34, 0x98, 0xd0, 0xf6, 0x00, 0x89, + 0x2a, 0x2e, 0x90, 0xc2, 0x95, 0xeb, 0x98, 0xd6, 0x6a, 0x6b, 0x5b, 0x89, 0xd3, 0x75, 0x53, 0x25, + 0xab, 0x2c, 0x51, 0x54, 0x69, 0x89, 0xa3, 0xa6, 0x9b, 0xb4, 0xd7, 0xe0, 0x11, 0xb8, 0xe4, 0x51, + 0x78, 0x14, 0xc4, 0x3d, 0xb7, 0xc8, 0xc9, 0x12, 0x86, 0x26, 0x76, 0x13, 0x9d, 0x9c, 0xef, 0xff, + 0x7f, 0x9d, 0xe4, 0x1c, 0xf0, 0x31, 0xd1, 0x3a, 0xb9, 0x8c, 0x9d, 0x55, 0x54, 0x38, 0x55, 0x69, + 0xaa, 0xeb, 0x81, 0x13, 0x67, 0x57, 0x69, 0xe1, 0x6c, 0xe2, 0x0b, 0x9d, 0xa6, 0x71, 0x16, 0xad, + 0xb6, 0x6b, 0x9d, 0xa9, 0xed, 0x4d, 0x1e, 0xdb, 0xf9, 0x46, 0x6f, 0x35, 0x3c, 0xaa, 0xd4, 0xf6, + 0x2a, 0x2a, 0xec, 0xc6, 0x68, 0x5f, 0x0f, 0xec, 0xd2, 0x78, 0xf8, 0xae, 0xce, 0xcd, 0xd7, 0xce, + 0x2a, 0xcb, 0xf4, 0xb6, 0x0c, 0x28, 0x2a, 0xf3, 0xc9, 0xd7, 0x0e, 0xe8, 0xfb, 0xff, 0x44, 0xcb, + 0x9b, 0x3c, 0x26, 0xd9, 0x55, 0x7a, 0xf2, 0xbb, 0x0d, 0xe0, 0x7d, 0x04, 0x7b, 0xa0, 0x1b, 0xb2, + 0x40, 0x10, 0x4c, 0x3f, 0x53, 0xe2, 0x59, 0x8f, 0x60, 0x17, 0xec, 0x86, 0x6c, 0xc2, 0xf8, 0x29, + 0xb3, 0x5a, 0x70, 0x1f, 0xf4, 0x30, 0x9a, 0x09, 0x44, 0x47, 0x4c, 0x0d, 0x43, 0x6f, 0x44, 0xa4, + 0xd5, 0x36, 0x8a, 0x09, 0x39, 0x3b, 0xe5, 0xbe, 0x67, 0x75, 0xcc, 0x8b, 0x24, 0x0b, 0xa9, 0x90, + 0x67, 0x3d, 0x86, 0xaf, 0xc1, 0x4b, 0x89, 0xfc, 0x11, 0x91, 0x0a, 0x0b, 0xa4, 0xb8, 0x90, 0x8a, + 0x32, 0xeb, 0x09, 0x7c, 0x0f, 0xde, 0xce, 0xd0, 0x82, 0xce, 0xe8, 0x39, 0x51, 0x98, 0xb3, 0x39, + 0xf1, 0x03, 0xca, 0x59, 0x50, 0x0b, 0x76, 0xe0, 0x01, 0xd8, 0x27, 0x6c, 0x8c, 0x18, 0x26, 0x9e, + 0xc2, 0x02, 0xd7, 0x60, 0x17, 0x1e, 0x82, 0x7e, 0x40, 0x90, 0x8f, 0xc7, 0x4a, 0x20, 0x5f, 0x32, + 0xe2, 0x37, 0xa6, 0xa7, 0x86, 0xfd, 0x4d, 0x9d, 0x52, 0x3c, 0x69, 0xd8, 0x33, 0xf8, 0x06, 0xbc, + 0xe2, 0x42, 0x56, 0x0c, 0x79, 0xca, 0xe7, 0x12, 0x49, 0xca, 0x99, 0x05, 0xcc, 0x88, 0x18, 0x4d, + 0xa7, 0x3c, 0x94, 0x8a, 0x2c, 0x24, 0x61, 0x66, 0x12, 0xab, 0x0b, 0xfb, 0x00, 0x06, 0x54, 0x92, + 0x29, 0x65, 0x93, 0x3b, 0xfd, 0xe7, 0x10, 0x82, 0x3d, 0x23, 0xbf, 0xd3, 0x7b, 0x61, 0xb4, 0xb7, + 0xdf, 0xaf, 0x66, 0x48, 0xe2, 0xb1, 0x92, 0x67, 0x82, 0x58, 0x7b, 0xa6, 0x3f, 0xe3, 0x73, 0xa2, + 0x42, 0x16, 0x06, 0xc4, 0xab, 0xff, 0x57, 0x6f, 0xf8, 0xab, 0x05, 0x8e, 0x2f, 0x74, 0x6a, 0x3f, + 0xb8, 0xd8, 0xe1, 0xc1, 0xfd, 0xe5, 0x08, 0xb3, 0x53, 0xd1, 0x3a, 0x1f, 0xde, 0x3a, 0x13, 0x7d, + 0xb9, 0xca, 0x12, 0x5b, 0x6f, 0x12, 0x27, 0x89, 0xb3, 0x72, 0xe3, 0xf5, 0x6d, 0xe5, 0xeb, 0xe2, + 0x3f, 0xa7, 0xf6, 0xa9, 0x7c, 0x7e, 0x6b, 0x77, 0x46, 0x08, 0x7d, 0x6f, 0x1f, 0x8d, 0xaa, 0x28, + 0x14, 0x15, 0x76, 0x55, 0x9a, 0x6a, 0x3e, 0xb0, 0xcd, 0x8d, 0x14, 0x3f, 0x6a, 0xbe, 0x44, 0x51, + 0xb1, 0x6c, 0xf8, 0x72, 0x3e, 0x58, 0x96, 0xfc, 0x67, 0xfb, 0xb8, 0x6a, 0xba, 0x2e, 0x8a, 0x0a, + 0xd7, 0x6d, 0x14, 0xae, 0x3b, 0x1f, 0xb8, 0x6e, 0xa9, 0xf9, 0xb2, 0x53, 0x0e, 0xf6, 0xe1, 0x4f, + 0x00, 0x00, 0x00, 0xff, 0xff, 0xd2, 0xbd, 0xe8, 0x07, 0x02, 0x03, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/search_engine_results_page_type.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/search_engine_results_page_type.pb.go new file mode 100644 index 0000000..4fbe553 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/search_engine_results_page_type.pb.go @@ -0,0 +1,125 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/enums/search_engine_results_page_type.proto + +package enums // import "google.golang.org/genproto/googleapis/ads/googleads/v1/enums" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// The type of the search engine results page. +type SearchEngineResultsPageTypeEnum_SearchEngineResultsPageType int32 + +const ( + // Not specified. + SearchEngineResultsPageTypeEnum_UNSPECIFIED SearchEngineResultsPageTypeEnum_SearchEngineResultsPageType = 0 + // Used for return value only. Represents value unknown in this version. + SearchEngineResultsPageTypeEnum_UNKNOWN SearchEngineResultsPageTypeEnum_SearchEngineResultsPageType = 1 + // Only ads were contained in the search engine results page. + SearchEngineResultsPageTypeEnum_ADS_ONLY SearchEngineResultsPageTypeEnum_SearchEngineResultsPageType = 2 + // Only organic results were contained in the search engine results page. + SearchEngineResultsPageTypeEnum_ORGANIC_ONLY SearchEngineResultsPageTypeEnum_SearchEngineResultsPageType = 3 + // Both ads and organic results were contained in the search engine results + // page. + SearchEngineResultsPageTypeEnum_ADS_AND_ORGANIC SearchEngineResultsPageTypeEnum_SearchEngineResultsPageType = 4 +) + +var SearchEngineResultsPageTypeEnum_SearchEngineResultsPageType_name = map[int32]string{ + 0: "UNSPECIFIED", + 1: "UNKNOWN", + 2: "ADS_ONLY", + 3: "ORGANIC_ONLY", + 4: "ADS_AND_ORGANIC", +} +var SearchEngineResultsPageTypeEnum_SearchEngineResultsPageType_value = map[string]int32{ + "UNSPECIFIED": 0, + "UNKNOWN": 1, + "ADS_ONLY": 2, + "ORGANIC_ONLY": 3, + "ADS_AND_ORGANIC": 4, +} + +func (x SearchEngineResultsPageTypeEnum_SearchEngineResultsPageType) String() string { + return proto.EnumName(SearchEngineResultsPageTypeEnum_SearchEngineResultsPageType_name, int32(x)) +} +func (SearchEngineResultsPageTypeEnum_SearchEngineResultsPageType) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_search_engine_results_page_type_8eeb6594cd669db9, []int{0, 0} +} + +// The type of the search engine results page. +type SearchEngineResultsPageTypeEnum struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *SearchEngineResultsPageTypeEnum) Reset() { *m = SearchEngineResultsPageTypeEnum{} } +func (m *SearchEngineResultsPageTypeEnum) String() string { return proto.CompactTextString(m) } +func (*SearchEngineResultsPageTypeEnum) ProtoMessage() {} +func (*SearchEngineResultsPageTypeEnum) Descriptor() ([]byte, []int) { + return fileDescriptor_search_engine_results_page_type_8eeb6594cd669db9, []int{0} +} +func (m *SearchEngineResultsPageTypeEnum) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_SearchEngineResultsPageTypeEnum.Unmarshal(m, b) +} +func (m *SearchEngineResultsPageTypeEnum) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_SearchEngineResultsPageTypeEnum.Marshal(b, m, deterministic) +} +func (dst *SearchEngineResultsPageTypeEnum) XXX_Merge(src proto.Message) { + xxx_messageInfo_SearchEngineResultsPageTypeEnum.Merge(dst, src) +} +func (m *SearchEngineResultsPageTypeEnum) XXX_Size() int { + return xxx_messageInfo_SearchEngineResultsPageTypeEnum.Size(m) +} +func (m *SearchEngineResultsPageTypeEnum) XXX_DiscardUnknown() { + xxx_messageInfo_SearchEngineResultsPageTypeEnum.DiscardUnknown(m) +} + +var xxx_messageInfo_SearchEngineResultsPageTypeEnum proto.InternalMessageInfo + +func init() { + proto.RegisterType((*SearchEngineResultsPageTypeEnum)(nil), "google.ads.googleads.v1.enums.SearchEngineResultsPageTypeEnum") + proto.RegisterEnum("google.ads.googleads.v1.enums.SearchEngineResultsPageTypeEnum_SearchEngineResultsPageType", SearchEngineResultsPageTypeEnum_SearchEngineResultsPageType_name, SearchEngineResultsPageTypeEnum_SearchEngineResultsPageType_value) +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/enums/search_engine_results_page_type.proto", fileDescriptor_search_engine_results_page_type_8eeb6594cd669db9) +} + +var fileDescriptor_search_engine_results_page_type_8eeb6594cd669db9 = []byte{ + // 339 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x7c, 0x51, 0xd1, 0x4e, 0xc2, 0x30, + 0x14, 0x75, 0xc3, 0xa8, 0x29, 0x24, 0x2c, 0xf3, 0x4d, 0x25, 0x0a, 0x1f, 0xd0, 0x65, 0xf1, 0xad, + 0x3e, 0x15, 0x98, 0x84, 0x68, 0xca, 0x02, 0x82, 0xd1, 0x2c, 0x59, 0x2a, 0x6b, 0xea, 0x12, 0x68, + 0x9b, 0x75, 0x90, 0xf0, 0x1d, 0xfe, 0x81, 0x8f, 0x7e, 0x8a, 0x9f, 0xe2, 0xbb, 0xef, 0x66, 0x2d, + 0xf0, 0x26, 0x2f, 0xcd, 0xc9, 0x3d, 0xf7, 0x9e, 0x73, 0xcf, 0x2d, 0xe8, 0x71, 0x29, 0xf9, 0x82, + 0x05, 0x34, 0xd3, 0x81, 0x85, 0x15, 0x5a, 0x87, 0x01, 0x13, 0xab, 0xa5, 0x0e, 0x34, 0xa3, 0xc5, + 0xfc, 0x3d, 0x65, 0x82, 0xe7, 0x82, 0xa5, 0x05, 0xd3, 0xab, 0x45, 0xa9, 0x53, 0x45, 0x39, 0x4b, + 0xcb, 0x8d, 0x62, 0x50, 0x15, 0xb2, 0x94, 0x7e, 0xcb, 0x4e, 0x42, 0x9a, 0x69, 0xb8, 0x17, 0x81, + 0xeb, 0x10, 0x1a, 0x91, 0x8b, 0xab, 0x9d, 0x87, 0xca, 0x03, 0x2a, 0x84, 0x2c, 0x69, 0x99, 0x4b, + 0xa1, 0xed, 0x70, 0xe7, 0xc3, 0x01, 0xd7, 0x13, 0x63, 0x13, 0x19, 0x97, 0xb1, 0x35, 0x89, 0x29, + 0x67, 0x4f, 0x1b, 0xc5, 0x22, 0xb1, 0x5a, 0x76, 0x14, 0xb8, 0x3c, 0xd0, 0xe2, 0x37, 0x41, 0x7d, + 0x4a, 0x26, 0x71, 0xd4, 0x1b, 0xde, 0x0f, 0xa3, 0xbe, 0x77, 0xe4, 0xd7, 0xc1, 0xe9, 0x94, 0x3c, + 0x90, 0xd1, 0x33, 0xf1, 0x1c, 0xbf, 0x01, 0xce, 0x70, 0x7f, 0x92, 0x8e, 0xc8, 0xe3, 0x8b, 0xe7, + 0xfa, 0x1e, 0x68, 0x8c, 0xc6, 0x03, 0x4c, 0x86, 0x3d, 0x5b, 0xa9, 0xf9, 0xe7, 0xa0, 0x59, 0xf1, + 0x98, 0xf4, 0xd3, 0x2d, 0xe3, 0x1d, 0x77, 0x7f, 0x1d, 0xd0, 0x9e, 0xcb, 0x25, 0x3c, 0x98, 0xac, + 0x7b, 0x73, 0x60, 0xab, 0xb8, 0x4a, 0x17, 0x3b, 0xaf, 0xdd, 0xad, 0x04, 0x97, 0x0b, 0x2a, 0x38, + 0x94, 0x05, 0x0f, 0x38, 0x13, 0x26, 0xfb, 0xee, 0xe2, 0x2a, 0xd7, 0xff, 0x7c, 0xc0, 0x9d, 0x79, + 0x3f, 0xdd, 0xda, 0x00, 0xe3, 0x2f, 0xb7, 0x35, 0xb0, 0x52, 0x38, 0xd3, 0xd0, 0xc2, 0x0a, 0xcd, + 0x42, 0x58, 0x1d, 0x49, 0x7f, 0xef, 0xf8, 0x04, 0x67, 0x3a, 0xd9, 0xf3, 0xc9, 0x2c, 0x4c, 0x0c, + 0xff, 0xe3, 0xb6, 0x6d, 0x11, 0x21, 0x9c, 0x69, 0x84, 0xf6, 0x1d, 0x08, 0xcd, 0x42, 0x84, 0x4c, + 0xcf, 0xdb, 0x89, 0x59, 0xec, 0xf6, 0x2f, 0x00, 0x00, 0xff, 0xff, 0x36, 0xa1, 0xbf, 0x51, 0x18, + 0x02, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/search_term_match_type.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/search_term_match_type.pb.go new file mode 100644 index 0000000..a054ffd --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/search_term_match_type.pb.go @@ -0,0 +1,132 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/enums/search_term_match_type.proto + +package enums // import "google.golang.org/genproto/googleapis/ads/googleads/v1/enums" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Possible match types for a keyword triggering an ad, including variants. +type SearchTermMatchTypeEnum_SearchTermMatchType int32 + +const ( + // Not specified. + SearchTermMatchTypeEnum_UNSPECIFIED SearchTermMatchTypeEnum_SearchTermMatchType = 0 + // Used for return value only. Represents value unknown in this version. + SearchTermMatchTypeEnum_UNKNOWN SearchTermMatchTypeEnum_SearchTermMatchType = 1 + // Broad match. + SearchTermMatchTypeEnum_BROAD SearchTermMatchTypeEnum_SearchTermMatchType = 2 + // Exact match. + SearchTermMatchTypeEnum_EXACT SearchTermMatchTypeEnum_SearchTermMatchType = 3 + // Phrase match. + SearchTermMatchTypeEnum_PHRASE SearchTermMatchTypeEnum_SearchTermMatchType = 4 + // Exact match (close variant). + SearchTermMatchTypeEnum_NEAR_EXACT SearchTermMatchTypeEnum_SearchTermMatchType = 5 + // Phrase match (close variant). + SearchTermMatchTypeEnum_NEAR_PHRASE SearchTermMatchTypeEnum_SearchTermMatchType = 6 +) + +var SearchTermMatchTypeEnum_SearchTermMatchType_name = map[int32]string{ + 0: "UNSPECIFIED", + 1: "UNKNOWN", + 2: "BROAD", + 3: "EXACT", + 4: "PHRASE", + 5: "NEAR_EXACT", + 6: "NEAR_PHRASE", +} +var SearchTermMatchTypeEnum_SearchTermMatchType_value = map[string]int32{ + "UNSPECIFIED": 0, + "UNKNOWN": 1, + "BROAD": 2, + "EXACT": 3, + "PHRASE": 4, + "NEAR_EXACT": 5, + "NEAR_PHRASE": 6, +} + +func (x SearchTermMatchTypeEnum_SearchTermMatchType) String() string { + return proto.EnumName(SearchTermMatchTypeEnum_SearchTermMatchType_name, int32(x)) +} +func (SearchTermMatchTypeEnum_SearchTermMatchType) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_search_term_match_type_20ffb145f6a71e1b, []int{0, 0} +} + +// Container for enum describing match types for a keyword triggering an ad. +type SearchTermMatchTypeEnum struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *SearchTermMatchTypeEnum) Reset() { *m = SearchTermMatchTypeEnum{} } +func (m *SearchTermMatchTypeEnum) String() string { return proto.CompactTextString(m) } +func (*SearchTermMatchTypeEnum) ProtoMessage() {} +func (*SearchTermMatchTypeEnum) Descriptor() ([]byte, []int) { + return fileDescriptor_search_term_match_type_20ffb145f6a71e1b, []int{0} +} +func (m *SearchTermMatchTypeEnum) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_SearchTermMatchTypeEnum.Unmarshal(m, b) +} +func (m *SearchTermMatchTypeEnum) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_SearchTermMatchTypeEnum.Marshal(b, m, deterministic) +} +func (dst *SearchTermMatchTypeEnum) XXX_Merge(src proto.Message) { + xxx_messageInfo_SearchTermMatchTypeEnum.Merge(dst, src) +} +func (m *SearchTermMatchTypeEnum) XXX_Size() int { + return xxx_messageInfo_SearchTermMatchTypeEnum.Size(m) +} +func (m *SearchTermMatchTypeEnum) XXX_DiscardUnknown() { + xxx_messageInfo_SearchTermMatchTypeEnum.DiscardUnknown(m) +} + +var xxx_messageInfo_SearchTermMatchTypeEnum proto.InternalMessageInfo + +func init() { + proto.RegisterType((*SearchTermMatchTypeEnum)(nil), "google.ads.googleads.v1.enums.SearchTermMatchTypeEnum") + proto.RegisterEnum("google.ads.googleads.v1.enums.SearchTermMatchTypeEnum_SearchTermMatchType", SearchTermMatchTypeEnum_SearchTermMatchType_name, SearchTermMatchTypeEnum_SearchTermMatchType_value) +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/enums/search_term_match_type.proto", fileDescriptor_search_term_match_type_20ffb145f6a71e1b) +} + +var fileDescriptor_search_term_match_type_20ffb145f6a71e1b = []byte{ + // 339 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x7c, 0x90, 0xdf, 0x4a, 0xf3, 0x30, + 0x18, 0xc6, 0xbf, 0x76, 0xdf, 0x26, 0x66, 0xa0, 0xa5, 0x1e, 0x28, 0xe2, 0x0e, 0xb6, 0x0b, 0x48, + 0x29, 0x9e, 0xc5, 0xa3, 0x74, 0xab, 0x73, 0x88, 0x5d, 0xd9, 0x3f, 0x45, 0x0a, 0x23, 0xae, 0xa1, + 0x0e, 0x96, 0xa4, 0x34, 0xdd, 0x60, 0x97, 0xe1, 0x2d, 0x78, 0xe8, 0xa5, 0x78, 0x29, 0x82, 0xf7, + 0x20, 0x49, 0xb6, 0x1e, 0x4d, 0x4f, 0xca, 0xaf, 0x79, 0xde, 0xe7, 0xe1, 0x7d, 0x1f, 0x80, 0x32, + 0x21, 0xb2, 0x15, 0xf5, 0x48, 0x2a, 0x3d, 0x83, 0x8a, 0x36, 0xbe, 0x47, 0xf9, 0x9a, 0x49, 0x4f, + 0x52, 0x52, 0x2c, 0x5e, 0xe7, 0x25, 0x2d, 0xd8, 0x9c, 0x91, 0x52, 0xe1, 0x36, 0xa7, 0x30, 0x2f, + 0x44, 0x29, 0xdc, 0x96, 0x31, 0x40, 0x92, 0x4a, 0x58, 0x79, 0xe1, 0xc6, 0x87, 0xda, 0x7b, 0x79, + 0xb5, 0x8f, 0xce, 0x97, 0x1e, 0xe1, 0x5c, 0x94, 0xa4, 0x5c, 0x0a, 0x2e, 0x8d, 0xb9, 0xf3, 0x66, + 0x81, 0xf3, 0xb1, 0x4e, 0x9f, 0xd0, 0x82, 0x3d, 0xa8, 0xec, 0xc9, 0x36, 0xa7, 0x21, 0x5f, 0xb3, + 0xce, 0x06, 0x9c, 0x1d, 0x90, 0xdc, 0x53, 0xd0, 0x9c, 0x46, 0xe3, 0x38, 0xec, 0x0e, 0x6e, 0x07, + 0x61, 0xcf, 0xf9, 0xe7, 0x36, 0xc1, 0xd1, 0x34, 0xba, 0x8f, 0x86, 0x8f, 0x91, 0x63, 0xb9, 0xc7, + 0xa0, 0x1e, 0x8c, 0x86, 0xb8, 0xe7, 0xd8, 0x0a, 0xc3, 0x27, 0xdc, 0x9d, 0x38, 0x35, 0x17, 0x80, + 0x46, 0x7c, 0x37, 0xc2, 0xe3, 0xd0, 0xf9, 0xef, 0x9e, 0x00, 0x10, 0x85, 0x78, 0x34, 0x37, 0x5a, + 0x5d, 0xe5, 0xe9, 0xff, 0xdd, 0x40, 0x23, 0xf8, 0xb6, 0x40, 0x7b, 0x21, 0x18, 0xfc, 0xf3, 0xae, + 0xe0, 0xe2, 0xc0, 0x6e, 0xb1, 0xba, 0x29, 0xb6, 0x9e, 0x83, 0x9d, 0x35, 0x13, 0x2b, 0xc2, 0x33, + 0x28, 0x8a, 0xcc, 0xcb, 0x28, 0xd7, 0x17, 0xef, 0xeb, 0xcd, 0x97, 0xf2, 0x97, 0xb6, 0x6f, 0xf4, + 0xf7, 0xdd, 0xae, 0xf5, 0x31, 0xfe, 0xb0, 0x5b, 0x7d, 0x13, 0x85, 0x53, 0x09, 0x0d, 0x2a, 0x9a, + 0xf9, 0x50, 0x55, 0x24, 0x3f, 0xf7, 0x7a, 0x82, 0x53, 0x99, 0x54, 0x7a, 0x32, 0xf3, 0x13, 0xad, + 0x7f, 0xd9, 0x6d, 0xf3, 0x88, 0x10, 0x4e, 0x25, 0x42, 0xd5, 0x04, 0x42, 0x33, 0x1f, 0x21, 0x3d, + 0xf3, 0xd2, 0xd0, 0x8b, 0x5d, 0xff, 0x04, 0x00, 0x00, 0xff, 0xff, 0x9a, 0x3f, 0x40, 0xe6, 0x05, + 0x02, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/search_term_targeting_status.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/search_term_targeting_status.pb.go new file mode 100644 index 0000000..397b847 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/search_term_targeting_status.pb.go @@ -0,0 +1,129 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/enums/search_term_targeting_status.proto + +package enums // import "google.golang.org/genproto/googleapis/ads/googleads/v1/enums" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Indicates whether the search term is one of your targeted or excluded +// keywords. +type SearchTermTargetingStatusEnum_SearchTermTargetingStatus int32 + +const ( + // Not specified. + SearchTermTargetingStatusEnum_UNSPECIFIED SearchTermTargetingStatusEnum_SearchTermTargetingStatus = 0 + // Used for return value only. Represents value unknown in this version. + SearchTermTargetingStatusEnum_UNKNOWN SearchTermTargetingStatusEnum_SearchTermTargetingStatus = 1 + // Search term is added to targeted keywords. + SearchTermTargetingStatusEnum_ADDED SearchTermTargetingStatusEnum_SearchTermTargetingStatus = 2 + // Search term matches a negative keyword. + SearchTermTargetingStatusEnum_EXCLUDED SearchTermTargetingStatusEnum_SearchTermTargetingStatus = 3 + // Search term has been both added and excluded. + SearchTermTargetingStatusEnum_ADDED_EXCLUDED SearchTermTargetingStatusEnum_SearchTermTargetingStatus = 4 + // Search term is neither targeted nor excluded. + SearchTermTargetingStatusEnum_NONE SearchTermTargetingStatusEnum_SearchTermTargetingStatus = 5 +) + +var SearchTermTargetingStatusEnum_SearchTermTargetingStatus_name = map[int32]string{ + 0: "UNSPECIFIED", + 1: "UNKNOWN", + 2: "ADDED", + 3: "EXCLUDED", + 4: "ADDED_EXCLUDED", + 5: "NONE", +} +var SearchTermTargetingStatusEnum_SearchTermTargetingStatus_value = map[string]int32{ + "UNSPECIFIED": 0, + "UNKNOWN": 1, + "ADDED": 2, + "EXCLUDED": 3, + "ADDED_EXCLUDED": 4, + "NONE": 5, +} + +func (x SearchTermTargetingStatusEnum_SearchTermTargetingStatus) String() string { + return proto.EnumName(SearchTermTargetingStatusEnum_SearchTermTargetingStatus_name, int32(x)) +} +func (SearchTermTargetingStatusEnum_SearchTermTargetingStatus) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_search_term_targeting_status_521500b33031bfcc, []int{0, 0} +} + +// Container for enum indicating whether a search term is one of your targeted +// or excluded keywords. +type SearchTermTargetingStatusEnum struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *SearchTermTargetingStatusEnum) Reset() { *m = SearchTermTargetingStatusEnum{} } +func (m *SearchTermTargetingStatusEnum) String() string { return proto.CompactTextString(m) } +func (*SearchTermTargetingStatusEnum) ProtoMessage() {} +func (*SearchTermTargetingStatusEnum) Descriptor() ([]byte, []int) { + return fileDescriptor_search_term_targeting_status_521500b33031bfcc, []int{0} +} +func (m *SearchTermTargetingStatusEnum) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_SearchTermTargetingStatusEnum.Unmarshal(m, b) +} +func (m *SearchTermTargetingStatusEnum) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_SearchTermTargetingStatusEnum.Marshal(b, m, deterministic) +} +func (dst *SearchTermTargetingStatusEnum) XXX_Merge(src proto.Message) { + xxx_messageInfo_SearchTermTargetingStatusEnum.Merge(dst, src) +} +func (m *SearchTermTargetingStatusEnum) XXX_Size() int { + return xxx_messageInfo_SearchTermTargetingStatusEnum.Size(m) +} +func (m *SearchTermTargetingStatusEnum) XXX_DiscardUnknown() { + xxx_messageInfo_SearchTermTargetingStatusEnum.DiscardUnknown(m) +} + +var xxx_messageInfo_SearchTermTargetingStatusEnum proto.InternalMessageInfo + +func init() { + proto.RegisterType((*SearchTermTargetingStatusEnum)(nil), "google.ads.googleads.v1.enums.SearchTermTargetingStatusEnum") + proto.RegisterEnum("google.ads.googleads.v1.enums.SearchTermTargetingStatusEnum_SearchTermTargetingStatus", SearchTermTargetingStatusEnum_SearchTermTargetingStatus_name, SearchTermTargetingStatusEnum_SearchTermTargetingStatus_value) +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/enums/search_term_targeting_status.proto", fileDescriptor_search_term_targeting_status_521500b33031bfcc) +} + +var fileDescriptor_search_term_targeting_status_521500b33031bfcc = []byte{ + // 335 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x7c, 0x50, 0x4b, 0x4e, 0xc3, 0x30, + 0x10, 0x25, 0x69, 0x0b, 0xc5, 0x45, 0x10, 0x79, 0x07, 0xa2, 0x48, 0xed, 0x01, 0x1c, 0x45, 0xec, + 0xcc, 0x86, 0xb4, 0x09, 0x55, 0x05, 0x4a, 0x2b, 0xf5, 0x03, 0x42, 0x91, 0x22, 0xd3, 0x58, 0x26, + 0x52, 0x63, 0x47, 0xb1, 0xdb, 0x7b, 0x70, 0x05, 0x96, 0x1c, 0x85, 0xa3, 0xb0, 0xe5, 0x02, 0x28, + 0x4e, 0x93, 0x5d, 0xd8, 0x58, 0xcf, 0xf3, 0x66, 0xde, 0x9b, 0x79, 0xe0, 0x9e, 0x09, 0xc1, 0xb6, + 0xd4, 0x26, 0xb1, 0xb4, 0x4b, 0x58, 0xa0, 0xbd, 0x63, 0x53, 0xbe, 0x4b, 0xa5, 0x2d, 0x29, 0xc9, + 0x37, 0xef, 0x91, 0xa2, 0x79, 0x1a, 0x29, 0x92, 0x33, 0xaa, 0x12, 0xce, 0x22, 0xa9, 0x88, 0xda, + 0x49, 0x94, 0xe5, 0x42, 0x09, 0xd8, 0x2f, 0xc7, 0x10, 0x89, 0x25, 0xaa, 0x15, 0xd0, 0xde, 0x41, + 0x5a, 0xe1, 0xea, 0xba, 0x32, 0xc8, 0x12, 0x9b, 0x70, 0x2e, 0x14, 0x51, 0x89, 0xe0, 0x87, 0xe1, + 0xe1, 0x87, 0x01, 0xfa, 0x0b, 0xed, 0xb1, 0xa4, 0x79, 0xba, 0xac, 0x1c, 0x16, 0xda, 0xc0, 0xe7, + 0xbb, 0x74, 0x98, 0x81, 0xcb, 0xc6, 0x06, 0x78, 0x01, 0x7a, 0xab, 0x60, 0x31, 0xf7, 0xc7, 0xd3, + 0x87, 0xa9, 0xef, 0x59, 0x47, 0xb0, 0x07, 0x4e, 0x56, 0xc1, 0x63, 0x30, 0x7b, 0x0e, 0x2c, 0x03, + 0x9e, 0x82, 0x8e, 0xeb, 0x79, 0xbe, 0x67, 0x99, 0xf0, 0x0c, 0x74, 0xfd, 0x97, 0xf1, 0xd3, 0xaa, + 0xf8, 0xb5, 0x20, 0x04, 0xe7, 0x9a, 0x88, 0xea, 0x5a, 0x1b, 0x76, 0x41, 0x3b, 0x98, 0x05, 0xbe, + 0xd5, 0x19, 0xfd, 0x1a, 0x60, 0xb0, 0x11, 0x29, 0xfa, 0xf7, 0xae, 0xd1, 0x4d, 0xe3, 0x56, 0xf3, + 0xe2, 0xb2, 0xb9, 0xf1, 0x3a, 0x3a, 0x08, 0x30, 0xb1, 0x25, 0x9c, 0x21, 0x91, 0x33, 0x9b, 0x51, + 0xae, 0xef, 0xae, 0xa2, 0xce, 0x12, 0xd9, 0x90, 0xfc, 0x9d, 0x7e, 0x3f, 0xcd, 0xd6, 0xc4, 0x75, + 0xbf, 0xcc, 0xfe, 0xa4, 0x94, 0x72, 0x63, 0x89, 0x4a, 0x58, 0xa0, 0xb5, 0x83, 0x8a, 0x88, 0xe4, + 0x77, 0xc5, 0x87, 0x6e, 0x2c, 0xc3, 0x9a, 0x0f, 0xd7, 0x4e, 0xa8, 0xf9, 0x1f, 0x73, 0x50, 0x16, + 0x31, 0x76, 0x63, 0x89, 0x71, 0xdd, 0x81, 0xf1, 0xda, 0xc1, 0x58, 0xf7, 0xbc, 0x1d, 0xeb, 0xc5, + 0x6e, 0xff, 0x02, 0x00, 0x00, 0xff, 0xff, 0x13, 0xc5, 0xfd, 0x2b, 0x11, 0x02, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/served_asset_field_type.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/served_asset_field_type.pb.go new file mode 100644 index 0000000..d0af91d --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/served_asset_field_type.pb.go @@ -0,0 +1,134 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/enums/served_asset_field_type.proto + +package enums // import "google.golang.org/genproto/googleapis/ads/googleads/v1/enums" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// The possible asset field types. +type ServedAssetFieldTypeEnum_ServedAssetFieldType int32 + +const ( + // No value has been specified. + ServedAssetFieldTypeEnum_UNSPECIFIED ServedAssetFieldTypeEnum_ServedAssetFieldType = 0 + // The received value is not known in this version. + // + // This is a response-only value. + ServedAssetFieldTypeEnum_UNKNOWN ServedAssetFieldTypeEnum_ServedAssetFieldType = 1 + // The asset is used in headline 1. + ServedAssetFieldTypeEnum_HEADLINE_1 ServedAssetFieldTypeEnum_ServedAssetFieldType = 2 + // The asset is used in headline 2. + ServedAssetFieldTypeEnum_HEADLINE_2 ServedAssetFieldTypeEnum_ServedAssetFieldType = 3 + // The asset is used in headline 3. + ServedAssetFieldTypeEnum_HEADLINE_3 ServedAssetFieldTypeEnum_ServedAssetFieldType = 4 + // The asset is used in description 1. + ServedAssetFieldTypeEnum_DESCRIPTION_1 ServedAssetFieldTypeEnum_ServedAssetFieldType = 5 + // The asset is used in description 2. + ServedAssetFieldTypeEnum_DESCRIPTION_2 ServedAssetFieldTypeEnum_ServedAssetFieldType = 6 +) + +var ServedAssetFieldTypeEnum_ServedAssetFieldType_name = map[int32]string{ + 0: "UNSPECIFIED", + 1: "UNKNOWN", + 2: "HEADLINE_1", + 3: "HEADLINE_2", + 4: "HEADLINE_3", + 5: "DESCRIPTION_1", + 6: "DESCRIPTION_2", +} +var ServedAssetFieldTypeEnum_ServedAssetFieldType_value = map[string]int32{ + "UNSPECIFIED": 0, + "UNKNOWN": 1, + "HEADLINE_1": 2, + "HEADLINE_2": 3, + "HEADLINE_3": 4, + "DESCRIPTION_1": 5, + "DESCRIPTION_2": 6, +} + +func (x ServedAssetFieldTypeEnum_ServedAssetFieldType) String() string { + return proto.EnumName(ServedAssetFieldTypeEnum_ServedAssetFieldType_name, int32(x)) +} +func (ServedAssetFieldTypeEnum_ServedAssetFieldType) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_served_asset_field_type_a333abd6d211e399, []int{0, 0} +} + +// Container for enum describing possible asset field types. +type ServedAssetFieldTypeEnum struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ServedAssetFieldTypeEnum) Reset() { *m = ServedAssetFieldTypeEnum{} } +func (m *ServedAssetFieldTypeEnum) String() string { return proto.CompactTextString(m) } +func (*ServedAssetFieldTypeEnum) ProtoMessage() {} +func (*ServedAssetFieldTypeEnum) Descriptor() ([]byte, []int) { + return fileDescriptor_served_asset_field_type_a333abd6d211e399, []int{0} +} +func (m *ServedAssetFieldTypeEnum) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ServedAssetFieldTypeEnum.Unmarshal(m, b) +} +func (m *ServedAssetFieldTypeEnum) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ServedAssetFieldTypeEnum.Marshal(b, m, deterministic) +} +func (dst *ServedAssetFieldTypeEnum) XXX_Merge(src proto.Message) { + xxx_messageInfo_ServedAssetFieldTypeEnum.Merge(dst, src) +} +func (m *ServedAssetFieldTypeEnum) XXX_Size() int { + return xxx_messageInfo_ServedAssetFieldTypeEnum.Size(m) +} +func (m *ServedAssetFieldTypeEnum) XXX_DiscardUnknown() { + xxx_messageInfo_ServedAssetFieldTypeEnum.DiscardUnknown(m) +} + +var xxx_messageInfo_ServedAssetFieldTypeEnum proto.InternalMessageInfo + +func init() { + proto.RegisterType((*ServedAssetFieldTypeEnum)(nil), "google.ads.googleads.v1.enums.ServedAssetFieldTypeEnum") + proto.RegisterEnum("google.ads.googleads.v1.enums.ServedAssetFieldTypeEnum_ServedAssetFieldType", ServedAssetFieldTypeEnum_ServedAssetFieldType_name, ServedAssetFieldTypeEnum_ServedAssetFieldType_value) +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/enums/served_asset_field_type.proto", fileDescriptor_served_asset_field_type_a333abd6d211e399) +} + +var fileDescriptor_served_asset_field_type_a333abd6d211e399 = []byte{ + // 344 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x7c, 0x51, 0xdd, 0x4a, 0xc3, 0x30, + 0x14, 0xb6, 0x9d, 0x4e, 0xc8, 0x50, 0x63, 0xf1, 0x42, 0xc5, 0x5d, 0x6c, 0x0f, 0x90, 0x92, 0xed, + 0x2e, 0xbb, 0xca, 0xb6, 0x6e, 0x16, 0xa5, 0x2b, 0xee, 0x47, 0x90, 0x42, 0xa9, 0x26, 0x96, 0xc2, + 0x96, 0x94, 0xa5, 0x1b, 0xec, 0x15, 0x7c, 0x0c, 0x6f, 0x04, 0x1f, 0xc5, 0x47, 0xf1, 0xc2, 0x67, + 0x90, 0xa6, 0x6e, 0x30, 0x99, 0xde, 0x84, 0xef, 0x9c, 0xef, 0x7c, 0x1f, 0xe7, 0x7c, 0x01, 0xad, + 0x58, 0xca, 0x78, 0xca, 0xed, 0x88, 0x29, 0xbb, 0x80, 0x39, 0x5a, 0x62, 0x9b, 0x8b, 0xc5, 0x4c, + 0xd9, 0x8a, 0xcf, 0x97, 0x9c, 0x85, 0x91, 0x52, 0x3c, 0x0b, 0x9f, 0x13, 0x3e, 0x65, 0x61, 0xb6, + 0x4a, 0x39, 0x4a, 0xe7, 0x32, 0x93, 0x56, 0xb5, 0x50, 0xa0, 0x88, 0x29, 0xb4, 0x11, 0xa3, 0x25, + 0x46, 0x5a, 0x7c, 0x79, 0xb5, 0xf6, 0x4e, 0x13, 0x3b, 0x12, 0x42, 0x66, 0x51, 0x96, 0x48, 0xa1, + 0x0a, 0x71, 0xfd, 0xcd, 0x00, 0xe7, 0x43, 0x6d, 0x4f, 0x73, 0xf7, 0x5e, 0x6e, 0x3e, 0x5a, 0xa5, + 0xdc, 0x11, 0x8b, 0x59, 0xfd, 0xc5, 0x00, 0x67, 0xbb, 0x48, 0xeb, 0x04, 0x54, 0xc6, 0xde, 0xd0, + 0x77, 0x3a, 0x6e, 0xcf, 0x75, 0xba, 0x70, 0xcf, 0xaa, 0x80, 0xc3, 0xb1, 0x77, 0xe3, 0x0d, 0xee, + 0x3d, 0x68, 0x58, 0xc7, 0x00, 0x5c, 0x3b, 0xb4, 0x7b, 0xeb, 0x7a, 0x4e, 0x88, 0xa1, 0xb9, 0x55, + 0x37, 0x60, 0x69, 0xab, 0x6e, 0xc2, 0x7d, 0xeb, 0x14, 0x1c, 0x75, 0x9d, 0x61, 0xe7, 0xce, 0xf5, + 0x47, 0xee, 0xc0, 0x0b, 0x31, 0x3c, 0xf8, 0xdd, 0x6a, 0xc0, 0x72, 0xfb, 0xcb, 0x00, 0xb5, 0x27, + 0x39, 0x43, 0xff, 0x5e, 0xdb, 0xbe, 0xd8, 0xb5, 0xaf, 0x9f, 0x9f, 0xea, 0x1b, 0x0f, 0xed, 0x1f, + 0x6d, 0x2c, 0xa7, 0x91, 0x88, 0x91, 0x9c, 0xc7, 0x76, 0xcc, 0x85, 0x0e, 0x62, 0x1d, 0x7b, 0x9a, + 0xa8, 0x3f, 0x7e, 0xa1, 0xa5, 0xdf, 0x57, 0xb3, 0xd4, 0xa7, 0xf4, 0xdd, 0xac, 0xf6, 0x0b, 0x2b, + 0xca, 0x14, 0x2a, 0x60, 0x8e, 0x26, 0x18, 0xe5, 0xc1, 0xa9, 0x8f, 0x35, 0x1f, 0x50, 0xa6, 0x82, + 0x0d, 0x1f, 0x4c, 0x70, 0xa0, 0xf9, 0x4f, 0xb3, 0x56, 0x34, 0x09, 0xa1, 0x4c, 0x11, 0xb2, 0x99, + 0x20, 0x64, 0x82, 0x09, 0xd1, 0x33, 0x8f, 0x65, 0xbd, 0x58, 0xf3, 0x3b, 0x00, 0x00, 0xff, 0xff, + 0xa3, 0x30, 0x0c, 0x9b, 0x1d, 0x02, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/shared_set_status.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/shared_set_status.pb.go new file mode 100644 index 0000000..d29fc65 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/shared_set_status.pb.go @@ -0,0 +1,117 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/enums/shared_set_status.proto + +package enums // import "google.golang.org/genproto/googleapis/ads/googleads/v1/enums" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Enum listing the possible shared set statuses. +type SharedSetStatusEnum_SharedSetStatus int32 + +const ( + // Not specified. + SharedSetStatusEnum_UNSPECIFIED SharedSetStatusEnum_SharedSetStatus = 0 + // Used for return value only. Represents value unknown in this version. + SharedSetStatusEnum_UNKNOWN SharedSetStatusEnum_SharedSetStatus = 1 + // The shared set is enabled. + SharedSetStatusEnum_ENABLED SharedSetStatusEnum_SharedSetStatus = 2 + // The shared set is removed and can no longer be used. + SharedSetStatusEnum_REMOVED SharedSetStatusEnum_SharedSetStatus = 3 +) + +var SharedSetStatusEnum_SharedSetStatus_name = map[int32]string{ + 0: "UNSPECIFIED", + 1: "UNKNOWN", + 2: "ENABLED", + 3: "REMOVED", +} +var SharedSetStatusEnum_SharedSetStatus_value = map[string]int32{ + "UNSPECIFIED": 0, + "UNKNOWN": 1, + "ENABLED": 2, + "REMOVED": 3, +} + +func (x SharedSetStatusEnum_SharedSetStatus) String() string { + return proto.EnumName(SharedSetStatusEnum_SharedSetStatus_name, int32(x)) +} +func (SharedSetStatusEnum_SharedSetStatus) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_shared_set_status_513c75a1a01c6520, []int{0, 0} +} + +// Container for enum describing types of shared set statuses. +type SharedSetStatusEnum struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *SharedSetStatusEnum) Reset() { *m = SharedSetStatusEnum{} } +func (m *SharedSetStatusEnum) String() string { return proto.CompactTextString(m) } +func (*SharedSetStatusEnum) ProtoMessage() {} +func (*SharedSetStatusEnum) Descriptor() ([]byte, []int) { + return fileDescriptor_shared_set_status_513c75a1a01c6520, []int{0} +} +func (m *SharedSetStatusEnum) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_SharedSetStatusEnum.Unmarshal(m, b) +} +func (m *SharedSetStatusEnum) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_SharedSetStatusEnum.Marshal(b, m, deterministic) +} +func (dst *SharedSetStatusEnum) XXX_Merge(src proto.Message) { + xxx_messageInfo_SharedSetStatusEnum.Merge(dst, src) +} +func (m *SharedSetStatusEnum) XXX_Size() int { + return xxx_messageInfo_SharedSetStatusEnum.Size(m) +} +func (m *SharedSetStatusEnum) XXX_DiscardUnknown() { + xxx_messageInfo_SharedSetStatusEnum.DiscardUnknown(m) +} + +var xxx_messageInfo_SharedSetStatusEnum proto.InternalMessageInfo + +func init() { + proto.RegisterType((*SharedSetStatusEnum)(nil), "google.ads.googleads.v1.enums.SharedSetStatusEnum") + proto.RegisterEnum("google.ads.googleads.v1.enums.SharedSetStatusEnum_SharedSetStatus", SharedSetStatusEnum_SharedSetStatus_name, SharedSetStatusEnum_SharedSetStatus_value) +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/enums/shared_set_status.proto", fileDescriptor_shared_set_status_513c75a1a01c6520) +} + +var fileDescriptor_shared_set_status_513c75a1a01c6520 = []byte{ + // 301 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x7c, 0x50, 0x4d, 0x4a, 0x03, 0x31, + 0x14, 0xb6, 0x53, 0x50, 0x48, 0x17, 0x1d, 0xaa, 0x2b, 0xb1, 0x8b, 0xf6, 0x00, 0x09, 0x83, 0xb8, + 0x89, 0xab, 0x8c, 0x8d, 0xa5, 0xa8, 0xd3, 0xe2, 0xd0, 0x11, 0x64, 0xa0, 0x46, 0x13, 0x62, 0xa1, + 0x4d, 0x4a, 0x5f, 0xda, 0x03, 0xb9, 0xf4, 0x28, 0xde, 0x44, 0x4f, 0x21, 0xc9, 0xb4, 0xb3, 0x28, + 0xe8, 0x66, 0xf8, 0xde, 0xfb, 0x7e, 0xe6, 0xcb, 0x43, 0x57, 0xda, 0x5a, 0xbd, 0x50, 0x44, 0x48, + 0x20, 0x15, 0xf4, 0x68, 0x9b, 0x10, 0x65, 0x36, 0x4b, 0x20, 0xf0, 0x2e, 0xd6, 0x4a, 0xce, 0x40, + 0xb9, 0x19, 0x38, 0xe1, 0x36, 0x80, 0x57, 0x6b, 0xeb, 0x6c, 0xa7, 0x5b, 0x69, 0xb1, 0x90, 0x80, + 0x6b, 0x1b, 0xde, 0x26, 0x38, 0xd8, 0xce, 0x2f, 0xf6, 0xa9, 0xab, 0x39, 0x11, 0xc6, 0x58, 0x27, + 0xdc, 0xdc, 0x9a, 0x9d, 0xb9, 0xff, 0x82, 0x4e, 0xf3, 0x90, 0x9b, 0x2b, 0x97, 0x87, 0x54, 0x6e, + 0x36, 0xcb, 0xfe, 0x08, 0xb5, 0x0f, 0xd6, 0x9d, 0x36, 0x6a, 0x4d, 0xb3, 0x7c, 0xc2, 0x6f, 0x46, + 0xb7, 0x23, 0x3e, 0x88, 0x8f, 0x3a, 0x2d, 0x74, 0x32, 0xcd, 0xee, 0xb2, 0xf1, 0x53, 0x16, 0x37, + 0xfc, 0xc0, 0x33, 0x96, 0xde, 0xf3, 0x41, 0x1c, 0xf9, 0xe1, 0x91, 0x3f, 0x8c, 0x0b, 0x3e, 0x88, + 0x9b, 0xe9, 0x77, 0x03, 0xf5, 0xde, 0xec, 0x12, 0xff, 0xdb, 0x32, 0x3d, 0x3b, 0xf8, 0xdd, 0xc4, + 0xb7, 0x9b, 0x34, 0x9e, 0xd3, 0x9d, 0x4d, 0xdb, 0x85, 0x30, 0x1a, 0xdb, 0xb5, 0x26, 0x5a, 0x99, + 0xd0, 0x7d, 0x7f, 0xa3, 0xd5, 0x1c, 0xfe, 0x38, 0xd9, 0x75, 0xf8, 0x7e, 0x44, 0xcd, 0x21, 0x63, + 0x9f, 0x51, 0x77, 0x58, 0x45, 0x31, 0x09, 0xb8, 0x82, 0x1e, 0x15, 0x09, 0xf6, 0x2f, 0x86, 0xaf, + 0x3d, 0x5f, 0x32, 0x09, 0x65, 0xcd, 0x97, 0x45, 0x52, 0x06, 0xfe, 0x27, 0xea, 0x55, 0x4b, 0x4a, + 0x99, 0x04, 0x4a, 0x6b, 0x05, 0xa5, 0x45, 0x42, 0x69, 0xd0, 0xbc, 0x1e, 0x87, 0x62, 0x97, 0xbf, + 0x01, 0x00, 0x00, 0xff, 0xff, 0x1e, 0x08, 0x0a, 0xef, 0xca, 0x01, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/shared_set_type.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/shared_set_type.pb.go new file mode 100644 index 0000000..151b2e6 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/shared_set_type.pb.go @@ -0,0 +1,118 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/enums/shared_set_type.proto + +package enums // import "google.golang.org/genproto/googleapis/ads/googleads/v1/enums" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Enum listing the possible shared set types. +type SharedSetTypeEnum_SharedSetType int32 + +const ( + // Not specified. + SharedSetTypeEnum_UNSPECIFIED SharedSetTypeEnum_SharedSetType = 0 + // Used for return value only. Represents value unknown in this version. + SharedSetTypeEnum_UNKNOWN SharedSetTypeEnum_SharedSetType = 1 + // A set of keywords that can be excluded from targeting. + SharedSetTypeEnum_NEGATIVE_KEYWORDS SharedSetTypeEnum_SharedSetType = 2 + // A set of placements that can be excluded from targeting. + SharedSetTypeEnum_NEGATIVE_PLACEMENTS SharedSetTypeEnum_SharedSetType = 3 +) + +var SharedSetTypeEnum_SharedSetType_name = map[int32]string{ + 0: "UNSPECIFIED", + 1: "UNKNOWN", + 2: "NEGATIVE_KEYWORDS", + 3: "NEGATIVE_PLACEMENTS", +} +var SharedSetTypeEnum_SharedSetType_value = map[string]int32{ + "UNSPECIFIED": 0, + "UNKNOWN": 1, + "NEGATIVE_KEYWORDS": 2, + "NEGATIVE_PLACEMENTS": 3, +} + +func (x SharedSetTypeEnum_SharedSetType) String() string { + return proto.EnumName(SharedSetTypeEnum_SharedSetType_name, int32(x)) +} +func (SharedSetTypeEnum_SharedSetType) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_shared_set_type_6a9f5b8cceba110f, []int{0, 0} +} + +// Container for enum describing types of shared sets. +type SharedSetTypeEnum struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *SharedSetTypeEnum) Reset() { *m = SharedSetTypeEnum{} } +func (m *SharedSetTypeEnum) String() string { return proto.CompactTextString(m) } +func (*SharedSetTypeEnum) ProtoMessage() {} +func (*SharedSetTypeEnum) Descriptor() ([]byte, []int) { + return fileDescriptor_shared_set_type_6a9f5b8cceba110f, []int{0} +} +func (m *SharedSetTypeEnum) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_SharedSetTypeEnum.Unmarshal(m, b) +} +func (m *SharedSetTypeEnum) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_SharedSetTypeEnum.Marshal(b, m, deterministic) +} +func (dst *SharedSetTypeEnum) XXX_Merge(src proto.Message) { + xxx_messageInfo_SharedSetTypeEnum.Merge(dst, src) +} +func (m *SharedSetTypeEnum) XXX_Size() int { + return xxx_messageInfo_SharedSetTypeEnum.Size(m) +} +func (m *SharedSetTypeEnum) XXX_DiscardUnknown() { + xxx_messageInfo_SharedSetTypeEnum.DiscardUnknown(m) +} + +var xxx_messageInfo_SharedSetTypeEnum proto.InternalMessageInfo + +func init() { + proto.RegisterType((*SharedSetTypeEnum)(nil), "google.ads.googleads.v1.enums.SharedSetTypeEnum") + proto.RegisterEnum("google.ads.googleads.v1.enums.SharedSetTypeEnum_SharedSetType", SharedSetTypeEnum_SharedSetType_name, SharedSetTypeEnum_SharedSetType_value) +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/enums/shared_set_type.proto", fileDescriptor_shared_set_type_6a9f5b8cceba110f) +} + +var fileDescriptor_shared_set_type_6a9f5b8cceba110f = []byte{ + // 318 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x7c, 0x50, 0xcf, 0x4a, 0xc3, 0x30, + 0x1c, 0x76, 0x1d, 0x28, 0x64, 0x88, 0x5d, 0x45, 0x04, 0x71, 0x87, 0xed, 0x01, 0x52, 0xca, 0x6e, + 0xf1, 0x94, 0x6d, 0x71, 0x8c, 0x69, 0x57, 0xec, 0xd6, 0xa1, 0x54, 0x46, 0x35, 0x21, 0x0e, 0xb6, + 0xa4, 0x34, 0xd9, 0x60, 0xaf, 0xe3, 0xd1, 0x47, 0xf1, 0x45, 0x04, 0x9f, 0x42, 0x9a, 0xb8, 0xc2, + 0x0e, 0x7a, 0x09, 0x1f, 0xbf, 0xef, 0x0f, 0x5f, 0x3e, 0xd0, 0xe5, 0x52, 0xf2, 0x15, 0xf3, 0x33, + 0xaa, 0x7c, 0x0b, 0x4b, 0xb4, 0x0d, 0x7c, 0x26, 0x36, 0x6b, 0xe5, 0xab, 0xb7, 0xac, 0x60, 0x74, + 0xa1, 0x98, 0x5e, 0xe8, 0x5d, 0xce, 0x60, 0x5e, 0x48, 0x2d, 0xbd, 0x96, 0x55, 0xc2, 0x8c, 0x2a, + 0x58, 0x99, 0xe0, 0x36, 0x80, 0xc6, 0x74, 0x75, 0xbd, 0xcf, 0xcc, 0x97, 0x7e, 0x26, 0x84, 0xd4, + 0x99, 0x5e, 0x4a, 0xa1, 0xac, 0xb9, 0x53, 0x80, 0x66, 0x6c, 0x52, 0x63, 0xa6, 0xa7, 0xbb, 0x9c, + 0x11, 0xb1, 0x59, 0x77, 0x9e, 0xc1, 0xe9, 0xc1, 0xd1, 0x3b, 0x03, 0x8d, 0x59, 0x18, 0x47, 0xa4, + 0x3f, 0xba, 0x1d, 0x91, 0x81, 0x7b, 0xe4, 0x35, 0xc0, 0xc9, 0x2c, 0x1c, 0x87, 0x93, 0x79, 0xe8, + 0xd6, 0xbc, 0x0b, 0xd0, 0x0c, 0xc9, 0x10, 0x4f, 0x47, 0x09, 0x59, 0x8c, 0xc9, 0xe3, 0x7c, 0xf2, + 0x30, 0x88, 0x5d, 0xc7, 0xbb, 0x04, 0xe7, 0xd5, 0x39, 0xba, 0xc3, 0x7d, 0x72, 0x4f, 0xc2, 0x69, + 0xec, 0xd6, 0x7b, 0x5f, 0x35, 0xd0, 0x7e, 0x95, 0x6b, 0xf8, 0x6f, 0xef, 0x9e, 0x77, 0x50, 0x21, + 0x2a, 0xdb, 0x46, 0xb5, 0xa7, 0xde, 0xaf, 0x89, 0xcb, 0x55, 0x26, 0x38, 0x94, 0x05, 0xf7, 0x39, + 0x13, 0xe6, 0x2f, 0xfb, 0xc5, 0xf2, 0xa5, 0xfa, 0x63, 0xc0, 0x1b, 0xf3, 0xbe, 0x3b, 0xf5, 0x21, + 0xc6, 0x1f, 0x4e, 0x6b, 0x68, 0xa3, 0x30, 0x55, 0xd0, 0xc2, 0x12, 0x25, 0x01, 0x2c, 0x37, 0x50, + 0x9f, 0x7b, 0x3e, 0xc5, 0x54, 0xa5, 0x15, 0x9f, 0x26, 0x41, 0x6a, 0xf8, 0x6f, 0xa7, 0x6d, 0x8f, + 0x08, 0x61, 0xaa, 0x10, 0xaa, 0x14, 0x08, 0x25, 0x01, 0x42, 0x46, 0xf3, 0x72, 0x6c, 0x8a, 0x75, + 0x7f, 0x02, 0x00, 0x00, 0xff, 0xff, 0x62, 0x76, 0xf7, 0xc4, 0xd8, 0x01, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/simulation_modification_method.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/simulation_modification_method.pb.go new file mode 100644 index 0000000..134c0dd --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/simulation_modification_method.pb.go @@ -0,0 +1,122 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/enums/simulation_modification_method.proto + +package enums // import "google.golang.org/genproto/googleapis/ads/googleads/v1/enums" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Enum describing the method by which a simulation modifies a field. +type SimulationModificationMethodEnum_SimulationModificationMethod int32 + +const ( + // Not specified. + SimulationModificationMethodEnum_UNSPECIFIED SimulationModificationMethodEnum_SimulationModificationMethod = 0 + // Used for return value only. Represents value unknown in this version. + SimulationModificationMethodEnum_UNKNOWN SimulationModificationMethodEnum_SimulationModificationMethod = 1 + // The values in a simulation were applied to all children of a given + // resource uniformly. Overrides on child resources were not respected. + SimulationModificationMethodEnum_UNIFORM SimulationModificationMethodEnum_SimulationModificationMethod = 2 + // The values in a simulation were applied to the given resource. + // Overrides on child resources were respected, and traffic estimates + // do not include these resources. + SimulationModificationMethodEnum_DEFAULT SimulationModificationMethodEnum_SimulationModificationMethod = 3 +) + +var SimulationModificationMethodEnum_SimulationModificationMethod_name = map[int32]string{ + 0: "UNSPECIFIED", + 1: "UNKNOWN", + 2: "UNIFORM", + 3: "DEFAULT", +} +var SimulationModificationMethodEnum_SimulationModificationMethod_value = map[string]int32{ + "UNSPECIFIED": 0, + "UNKNOWN": 1, + "UNIFORM": 2, + "DEFAULT": 3, +} + +func (x SimulationModificationMethodEnum_SimulationModificationMethod) String() string { + return proto.EnumName(SimulationModificationMethodEnum_SimulationModificationMethod_name, int32(x)) +} +func (SimulationModificationMethodEnum_SimulationModificationMethod) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_simulation_modification_method_76c10895becc3924, []int{0, 0} +} + +// Container for enum describing the method by which a simulation modifies +// a field. +type SimulationModificationMethodEnum struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *SimulationModificationMethodEnum) Reset() { *m = SimulationModificationMethodEnum{} } +func (m *SimulationModificationMethodEnum) String() string { return proto.CompactTextString(m) } +func (*SimulationModificationMethodEnum) ProtoMessage() {} +func (*SimulationModificationMethodEnum) Descriptor() ([]byte, []int) { + return fileDescriptor_simulation_modification_method_76c10895becc3924, []int{0} +} +func (m *SimulationModificationMethodEnum) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_SimulationModificationMethodEnum.Unmarshal(m, b) +} +func (m *SimulationModificationMethodEnum) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_SimulationModificationMethodEnum.Marshal(b, m, deterministic) +} +func (dst *SimulationModificationMethodEnum) XXX_Merge(src proto.Message) { + xxx_messageInfo_SimulationModificationMethodEnum.Merge(dst, src) +} +func (m *SimulationModificationMethodEnum) XXX_Size() int { + return xxx_messageInfo_SimulationModificationMethodEnum.Size(m) +} +func (m *SimulationModificationMethodEnum) XXX_DiscardUnknown() { + xxx_messageInfo_SimulationModificationMethodEnum.DiscardUnknown(m) +} + +var xxx_messageInfo_SimulationModificationMethodEnum proto.InternalMessageInfo + +func init() { + proto.RegisterType((*SimulationModificationMethodEnum)(nil), "google.ads.googleads.v1.enums.SimulationModificationMethodEnum") + proto.RegisterEnum("google.ads.googleads.v1.enums.SimulationModificationMethodEnum_SimulationModificationMethod", SimulationModificationMethodEnum_SimulationModificationMethod_name, SimulationModificationMethodEnum_SimulationModificationMethod_value) +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/enums/simulation_modification_method.proto", fileDescriptor_simulation_modification_method_76c10895becc3924) +} + +var fileDescriptor_simulation_modification_method_76c10895becc3924 = []byte{ + // 308 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x7c, 0x50, 0xd1, 0x4a, 0xc3, 0x30, + 0x14, 0x75, 0x1d, 0x28, 0x64, 0x0f, 0x96, 0x3d, 0xca, 0x06, 0x6e, 0x1f, 0x90, 0x50, 0x7c, 0x8b, + 0x4f, 0x99, 0xeb, 0x46, 0xd1, 0x76, 0xc5, 0xd9, 0x0a, 0x52, 0x90, 0xba, 0xd4, 0x18, 0x68, 0x93, + 0xb2, 0xb4, 0x7b, 0xf0, 0x73, 0x7c, 0xf4, 0x53, 0xfc, 0x14, 0x3f, 0xc0, 0x67, 0x69, 0xb2, 0x16, + 0x5f, 0xec, 0x4b, 0x38, 0x37, 0xf7, 0xdc, 0x73, 0xee, 0x3d, 0x60, 0xc1, 0xa4, 0x64, 0x79, 0x86, + 0x52, 0xaa, 0x90, 0x81, 0x0d, 0x3a, 0x38, 0x28, 0x13, 0x75, 0xa1, 0x90, 0xe2, 0x45, 0x9d, 0xa7, + 0x15, 0x97, 0xe2, 0xb9, 0x90, 0x94, 0xbf, 0xf2, 0xdd, 0xb1, 0xc8, 0xaa, 0x37, 0x49, 0x61, 0xb9, + 0x97, 0x95, 0x1c, 0x4f, 0xcd, 0x20, 0x4c, 0xa9, 0x82, 0x9d, 0x06, 0x3c, 0x38, 0x50, 0x6b, 0x5c, + 0x4c, 0x5a, 0x8b, 0x92, 0xa3, 0x54, 0x08, 0x59, 0x69, 0x09, 0x65, 0x86, 0xe7, 0xef, 0xe0, 0x72, + 0xdb, 0x99, 0xf8, 0x7f, 0x3c, 0x7c, 0x6d, 0xe1, 0x8a, 0xba, 0x98, 0xc7, 0x60, 0xd2, 0xc7, 0x19, + 0x9f, 0x83, 0x51, 0x14, 0x6c, 0x43, 0xf7, 0xc6, 0x5b, 0x79, 0xee, 0xd2, 0x3e, 0x19, 0x8f, 0xc0, + 0x59, 0x14, 0xdc, 0x06, 0x9b, 0xc7, 0xc0, 0x1e, 0x98, 0xc2, 0x5b, 0x6d, 0xee, 0x7d, 0xdb, 0x6a, + 0x8a, 0xa5, 0xbb, 0x22, 0xd1, 0xdd, 0x83, 0x3d, 0x5c, 0xfc, 0x0c, 0xc0, 0x6c, 0x27, 0x0b, 0xd8, + 0xbb, 0xff, 0x62, 0xd6, 0xe7, 0x1d, 0x36, 0x47, 0x84, 0x83, 0xa7, 0x63, 0x8e, 0x90, 0xc9, 0x3c, + 0x15, 0x0c, 0xca, 0x3d, 0x43, 0x2c, 0x13, 0xfa, 0xc4, 0x36, 0xd7, 0x92, 0xab, 0x7f, 0x62, 0xbe, + 0xd6, 0xef, 0x87, 0x35, 0x5c, 0x13, 0xf2, 0x69, 0x4d, 0xd7, 0x46, 0x8a, 0x50, 0x05, 0x0d, 0x6c, + 0x50, 0xec, 0xc0, 0x26, 0x0b, 0xf5, 0xd5, 0xf6, 0x13, 0x42, 0x55, 0xd2, 0xf5, 0x93, 0xd8, 0x49, + 0x74, 0xff, 0xdb, 0x9a, 0x99, 0x4f, 0x8c, 0x09, 0x55, 0x18, 0x77, 0x0c, 0x8c, 0x63, 0x07, 0x63, + 0xcd, 0x79, 0x39, 0xd5, 0x8b, 0x5d, 0xfd, 0x06, 0x00, 0x00, 0xff, 0xff, 0x27, 0x6c, 0xf2, 0x63, + 0xfe, 0x01, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/simulation_type.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/simulation_type.pb.go new file mode 100644 index 0000000..8c6812c --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/simulation_type.pb.go @@ -0,0 +1,127 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/enums/simulation_type.proto + +package enums // import "google.golang.org/genproto/googleapis/ads/googleads/v1/enums" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Enum describing the field a simulation modifies. +type SimulationTypeEnum_SimulationType int32 + +const ( + // Not specified. + SimulationTypeEnum_UNSPECIFIED SimulationTypeEnum_SimulationType = 0 + // Used for return value only. Represents value unknown in this version. + SimulationTypeEnum_UNKNOWN SimulationTypeEnum_SimulationType = 1 + // The simulation is for a cpc bid. + SimulationTypeEnum_CPC_BID SimulationTypeEnum_SimulationType = 2 + // The simulation is for a cpv bid. + SimulationTypeEnum_CPV_BID SimulationTypeEnum_SimulationType = 3 + // The simulation is for a cpa target. + SimulationTypeEnum_TARGET_CPA SimulationTypeEnum_SimulationType = 4 + // The simulation is for a bid modifier. + SimulationTypeEnum_BID_MODIFIER SimulationTypeEnum_SimulationType = 5 +) + +var SimulationTypeEnum_SimulationType_name = map[int32]string{ + 0: "UNSPECIFIED", + 1: "UNKNOWN", + 2: "CPC_BID", + 3: "CPV_BID", + 4: "TARGET_CPA", + 5: "BID_MODIFIER", +} +var SimulationTypeEnum_SimulationType_value = map[string]int32{ + "UNSPECIFIED": 0, + "UNKNOWN": 1, + "CPC_BID": 2, + "CPV_BID": 3, + "TARGET_CPA": 4, + "BID_MODIFIER": 5, +} + +func (x SimulationTypeEnum_SimulationType) String() string { + return proto.EnumName(SimulationTypeEnum_SimulationType_name, int32(x)) +} +func (SimulationTypeEnum_SimulationType) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_simulation_type_c2cbede8840bdffd, []int{0, 0} +} + +// Container for enum describing the field a simulation modifies. +type SimulationTypeEnum struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *SimulationTypeEnum) Reset() { *m = SimulationTypeEnum{} } +func (m *SimulationTypeEnum) String() string { return proto.CompactTextString(m) } +func (*SimulationTypeEnum) ProtoMessage() {} +func (*SimulationTypeEnum) Descriptor() ([]byte, []int) { + return fileDescriptor_simulation_type_c2cbede8840bdffd, []int{0} +} +func (m *SimulationTypeEnum) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_SimulationTypeEnum.Unmarshal(m, b) +} +func (m *SimulationTypeEnum) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_SimulationTypeEnum.Marshal(b, m, deterministic) +} +func (dst *SimulationTypeEnum) XXX_Merge(src proto.Message) { + xxx_messageInfo_SimulationTypeEnum.Merge(dst, src) +} +func (m *SimulationTypeEnum) XXX_Size() int { + return xxx_messageInfo_SimulationTypeEnum.Size(m) +} +func (m *SimulationTypeEnum) XXX_DiscardUnknown() { + xxx_messageInfo_SimulationTypeEnum.DiscardUnknown(m) +} + +var xxx_messageInfo_SimulationTypeEnum proto.InternalMessageInfo + +func init() { + proto.RegisterType((*SimulationTypeEnum)(nil), "google.ads.googleads.v1.enums.SimulationTypeEnum") + proto.RegisterEnum("google.ads.googleads.v1.enums.SimulationTypeEnum_SimulationType", SimulationTypeEnum_SimulationType_name, SimulationTypeEnum_SimulationType_value) +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/enums/simulation_type.proto", fileDescriptor_simulation_type_c2cbede8840bdffd) +} + +var fileDescriptor_simulation_type_c2cbede8840bdffd = []byte{ + // 324 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x7c, 0x50, 0xcd, 0x4a, 0xf3, 0x40, + 0x14, 0xfd, 0x92, 0x7e, 0x2a, 0x4c, 0xa5, 0x86, 0xb8, 0x13, 0xbb, 0x68, 0x1f, 0x60, 0x86, 0xd0, + 0xdd, 0xb8, 0x9a, 0xfc, 0x58, 0x82, 0x98, 0x86, 0xfe, 0x44, 0x90, 0x40, 0x88, 0x26, 0x0c, 0x91, + 0x66, 0x26, 0x74, 0xd2, 0x42, 0x77, 0x3e, 0x8b, 0x4b, 0x1f, 0xc5, 0x27, 0x11, 0x9f, 0x42, 0x66, + 0xc6, 0x06, 0xba, 0xd0, 0xcd, 0x70, 0xee, 0xdc, 0x73, 0x0e, 0xe7, 0x1e, 0x30, 0xa1, 0x9c, 0xd3, + 0x75, 0x89, 0xf2, 0x42, 0x20, 0x0d, 0x25, 0xda, 0x39, 0xa8, 0x64, 0xdb, 0x5a, 0x20, 0x51, 0xd5, + 0xdb, 0x75, 0xde, 0x56, 0x9c, 0x65, 0xed, 0xbe, 0x29, 0x61, 0xb3, 0xe1, 0x2d, 0xb7, 0x87, 0x9a, + 0x09, 0xf3, 0x42, 0xc0, 0x4e, 0x04, 0x77, 0x0e, 0x54, 0xa2, 0xab, 0xeb, 0x83, 0x67, 0x53, 0xa1, + 0x9c, 0x31, 0xde, 0x2a, 0x03, 0xa1, 0xc5, 0xe3, 0x57, 0x03, 0xd8, 0x8b, 0xce, 0x76, 0xb9, 0x6f, + 0xca, 0x80, 0x6d, 0xeb, 0xf1, 0x0b, 0x18, 0x1c, 0xff, 0xda, 0x17, 0xa0, 0xbf, 0x8a, 0x16, 0x71, + 0xe0, 0x85, 0xb7, 0x61, 0xe0, 0x5b, 0xff, 0xec, 0x3e, 0x38, 0x5b, 0x45, 0x77, 0xd1, 0xec, 0x21, + 0xb2, 0x0c, 0x39, 0x78, 0xb1, 0x97, 0xb9, 0xa1, 0x6f, 0x99, 0x7a, 0x48, 0xd4, 0xd0, 0xb3, 0x07, + 0x00, 0x2c, 0xc9, 0x7c, 0x1a, 0x2c, 0x33, 0x2f, 0x26, 0xd6, 0x7f, 0xdb, 0x02, 0xe7, 0x6e, 0xe8, + 0x67, 0xf7, 0x33, 0x5f, 0x1a, 0xcd, 0xad, 0x13, 0xf7, 0xd3, 0x00, 0xa3, 0x67, 0x5e, 0xc3, 0x3f, + 0xcf, 0x70, 0x2f, 0x8f, 0xf3, 0xc4, 0x32, 0x7d, 0x6c, 0x3c, 0xba, 0x3f, 0x2a, 0xca, 0xd7, 0x39, + 0xa3, 0x90, 0x6f, 0x28, 0xa2, 0x25, 0x53, 0xb7, 0x1d, 0x1a, 0x6c, 0x2a, 0xf1, 0x4b, 0xa1, 0x37, + 0xea, 0x7d, 0x33, 0x7b, 0x53, 0x42, 0xde, 0xcd, 0xe1, 0x54, 0x5b, 0x91, 0x42, 0x40, 0x0d, 0x25, + 0x4a, 0x1c, 0x28, 0x1b, 0x11, 0x1f, 0x87, 0x7d, 0x4a, 0x0a, 0x91, 0x76, 0xfb, 0x34, 0x71, 0x52, + 0xb5, 0xff, 0x32, 0x47, 0xfa, 0x13, 0x63, 0x52, 0x08, 0x8c, 0x3b, 0x06, 0xc6, 0x89, 0x83, 0xb1, + 0xe2, 0x3c, 0x9d, 0xaa, 0x60, 0x93, 0xef, 0x00, 0x00, 0x00, 0xff, 0xff, 0x7b, 0x3b, 0x1b, 0xfa, + 0xe8, 0x01, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/sitelink_placeholder_field.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/sitelink_placeholder_field.pb.go new file mode 100644 index 0000000..ee53fd6 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/sitelink_placeholder_field.pb.go @@ -0,0 +1,146 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/enums/sitelink_placeholder_field.proto + +package enums // import "google.golang.org/genproto/googleapis/ads/googleads/v1/enums" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Possible values for Sitelink placeholder fields. +type SitelinkPlaceholderFieldEnum_SitelinkPlaceholderField int32 + +const ( + // Not specified. + SitelinkPlaceholderFieldEnum_UNSPECIFIED SitelinkPlaceholderFieldEnum_SitelinkPlaceholderField = 0 + // Used for return value only. Represents value unknown in this version. + SitelinkPlaceholderFieldEnum_UNKNOWN SitelinkPlaceholderFieldEnum_SitelinkPlaceholderField = 1 + // Data Type: STRING. The link text for your sitelink. + SitelinkPlaceholderFieldEnum_TEXT SitelinkPlaceholderFieldEnum_SitelinkPlaceholderField = 2 + // Data Type: STRING. First line of the sitelink description. + SitelinkPlaceholderFieldEnum_LINE_1 SitelinkPlaceholderFieldEnum_SitelinkPlaceholderField = 3 + // Data Type: STRING. Second line of the sitelink description. + SitelinkPlaceholderFieldEnum_LINE_2 SitelinkPlaceholderFieldEnum_SitelinkPlaceholderField = 4 + // Data Type: URL_LIST. Final URLs for the sitelink when using Upgraded + // URLs. + SitelinkPlaceholderFieldEnum_FINAL_URLS SitelinkPlaceholderFieldEnum_SitelinkPlaceholderField = 5 + // Data Type: URL_LIST. Final Mobile URLs for the sitelink when using + // Upgraded URLs. + SitelinkPlaceholderFieldEnum_FINAL_MOBILE_URLS SitelinkPlaceholderFieldEnum_SitelinkPlaceholderField = 6 + // Data Type: URL. Tracking template for the sitelink when using Upgraded + // URLs. + SitelinkPlaceholderFieldEnum_TRACKING_URL SitelinkPlaceholderFieldEnum_SitelinkPlaceholderField = 7 + // Data Type: STRING. Final URL suffix for sitelink when using parallel + // tracking. + SitelinkPlaceholderFieldEnum_FINAL_URL_SUFFIX SitelinkPlaceholderFieldEnum_SitelinkPlaceholderField = 8 +) + +var SitelinkPlaceholderFieldEnum_SitelinkPlaceholderField_name = map[int32]string{ + 0: "UNSPECIFIED", + 1: "UNKNOWN", + 2: "TEXT", + 3: "LINE_1", + 4: "LINE_2", + 5: "FINAL_URLS", + 6: "FINAL_MOBILE_URLS", + 7: "TRACKING_URL", + 8: "FINAL_URL_SUFFIX", +} +var SitelinkPlaceholderFieldEnum_SitelinkPlaceholderField_value = map[string]int32{ + "UNSPECIFIED": 0, + "UNKNOWN": 1, + "TEXT": 2, + "LINE_1": 3, + "LINE_2": 4, + "FINAL_URLS": 5, + "FINAL_MOBILE_URLS": 6, + "TRACKING_URL": 7, + "FINAL_URL_SUFFIX": 8, +} + +func (x SitelinkPlaceholderFieldEnum_SitelinkPlaceholderField) String() string { + return proto.EnumName(SitelinkPlaceholderFieldEnum_SitelinkPlaceholderField_name, int32(x)) +} +func (SitelinkPlaceholderFieldEnum_SitelinkPlaceholderField) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_sitelink_placeholder_field_4eeb2517ba6aee0a, []int{0, 0} +} + +// Values for Sitelink placeholder fields. +type SitelinkPlaceholderFieldEnum struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *SitelinkPlaceholderFieldEnum) Reset() { *m = SitelinkPlaceholderFieldEnum{} } +func (m *SitelinkPlaceholderFieldEnum) String() string { return proto.CompactTextString(m) } +func (*SitelinkPlaceholderFieldEnum) ProtoMessage() {} +func (*SitelinkPlaceholderFieldEnum) Descriptor() ([]byte, []int) { + return fileDescriptor_sitelink_placeholder_field_4eeb2517ba6aee0a, []int{0} +} +func (m *SitelinkPlaceholderFieldEnum) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_SitelinkPlaceholderFieldEnum.Unmarshal(m, b) +} +func (m *SitelinkPlaceholderFieldEnum) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_SitelinkPlaceholderFieldEnum.Marshal(b, m, deterministic) +} +func (dst *SitelinkPlaceholderFieldEnum) XXX_Merge(src proto.Message) { + xxx_messageInfo_SitelinkPlaceholderFieldEnum.Merge(dst, src) +} +func (m *SitelinkPlaceholderFieldEnum) XXX_Size() int { + return xxx_messageInfo_SitelinkPlaceholderFieldEnum.Size(m) +} +func (m *SitelinkPlaceholderFieldEnum) XXX_DiscardUnknown() { + xxx_messageInfo_SitelinkPlaceholderFieldEnum.DiscardUnknown(m) +} + +var xxx_messageInfo_SitelinkPlaceholderFieldEnum proto.InternalMessageInfo + +func init() { + proto.RegisterType((*SitelinkPlaceholderFieldEnum)(nil), "google.ads.googleads.v1.enums.SitelinkPlaceholderFieldEnum") + proto.RegisterEnum("google.ads.googleads.v1.enums.SitelinkPlaceholderFieldEnum_SitelinkPlaceholderField", SitelinkPlaceholderFieldEnum_SitelinkPlaceholderField_name, SitelinkPlaceholderFieldEnum_SitelinkPlaceholderField_value) +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/enums/sitelink_placeholder_field.proto", fileDescriptor_sitelink_placeholder_field_4eeb2517ba6aee0a) +} + +var fileDescriptor_sitelink_placeholder_field_4eeb2517ba6aee0a = []byte{ + // 377 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x7c, 0x91, 0xd1, 0x8e, 0x9a, 0x40, + 0x14, 0x86, 0x0b, 0x5a, 0x35, 0x63, 0xd3, 0x4e, 0x27, 0x6d, 0xd2, 0x34, 0x7a, 0xa1, 0x0f, 0x30, + 0x84, 0xf6, 0x6e, 0x9a, 0x34, 0x01, 0x0b, 0x86, 0x48, 0x91, 0x88, 0xb8, 0x66, 0x43, 0x42, 0x58, + 0x61, 0x59, 0xb2, 0x38, 0x43, 0x1c, 0xf4, 0x81, 0xf6, 0x6e, 0xf7, 0x51, 0x7c, 0x94, 0xbd, 0xdc, + 0x27, 0xd8, 0xc0, 0x28, 0x7b, 0xe5, 0xde, 0x4c, 0xfe, 0x39, 0xe7, 0x7c, 0xe7, 0xcc, 0xfc, 0x07, + 0xfc, 0x4d, 0x19, 0x4b, 0xf3, 0x44, 0x89, 0x62, 0xae, 0x08, 0x59, 0xa9, 0x83, 0xaa, 0x24, 0x74, + 0xbf, 0xe5, 0x0a, 0xcf, 0xca, 0x24, 0xcf, 0xe8, 0x7d, 0x58, 0xe4, 0xd1, 0x26, 0xb9, 0x63, 0x79, + 0x9c, 0xec, 0xc2, 0xdb, 0x2c, 0xc9, 0x63, 0x5c, 0xec, 0x58, 0xc9, 0xd0, 0x50, 0x40, 0x38, 0x8a, + 0x39, 0x6e, 0x78, 0x7c, 0x50, 0x71, 0xcd, 0xff, 0x1c, 0x9c, 0xdb, 0x17, 0x99, 0x12, 0x51, 0xca, + 0xca, 0xa8, 0xcc, 0x18, 0xe5, 0x02, 0x1e, 0x1f, 0x25, 0x30, 0xf0, 0x4e, 0x13, 0xdc, 0xb7, 0x01, + 0x66, 0xd5, 0xdf, 0xa0, 0xfb, 0xed, 0xf8, 0x51, 0x02, 0x3f, 0x2e, 0x15, 0xa0, 0x2f, 0xa0, 0xef, + 0x3b, 0x9e, 0x6b, 0x4c, 0x2c, 0xd3, 0x32, 0xfe, 0xc1, 0x0f, 0xa8, 0x0f, 0xba, 0xbe, 0x33, 0x73, + 0xe6, 0x57, 0x0e, 0x94, 0x50, 0x0f, 0xb4, 0x97, 0xc6, 0x7a, 0x09, 0x65, 0x04, 0x40, 0xc7, 0xb6, + 0x1c, 0x23, 0x54, 0x61, 0xab, 0xd1, 0xbf, 0x60, 0x1b, 0x7d, 0x06, 0xc0, 0xb4, 0x1c, 0xcd, 0x0e, + 0xfd, 0x85, 0xed, 0xc1, 0x8f, 0xe8, 0x3b, 0xf8, 0x2a, 0xee, 0xff, 0xe7, 0xba, 0x65, 0x1b, 0x22, + 0xdc, 0x41, 0x10, 0x7c, 0x5a, 0x2e, 0xb4, 0xc9, 0xcc, 0x72, 0xa6, 0x55, 0x08, 0x76, 0xd1, 0x37, + 0x00, 0x1b, 0x30, 0xf4, 0x7c, 0xd3, 0xb4, 0xd6, 0xb0, 0xa7, 0xbf, 0x48, 0x60, 0xb4, 0x61, 0x5b, + 0xfc, 0xae, 0x21, 0xfa, 0xf0, 0xd2, 0x77, 0xdc, 0xca, 0x11, 0x57, 0xba, 0xd6, 0x4f, 0x7c, 0xca, + 0xf2, 0x88, 0xa6, 0x98, 0xed, 0x52, 0x25, 0x4d, 0x68, 0xed, 0xd7, 0x79, 0x41, 0x45, 0xc6, 0x2f, + 0xec, 0xeb, 0x4f, 0x7d, 0x3e, 0xc8, 0xad, 0xa9, 0xa6, 0x3d, 0xc9, 0xc3, 0xa9, 0x68, 0xa5, 0xc5, + 0x1c, 0x0b, 0x59, 0xa9, 0x95, 0x8a, 0x2b, 0x6f, 0xf9, 0xf1, 0x9c, 0x0f, 0xb4, 0x98, 0x07, 0x4d, + 0x3e, 0x58, 0xa9, 0x41, 0x9d, 0x7f, 0x96, 0x47, 0x22, 0x48, 0x88, 0x16, 0x73, 0x42, 0x9a, 0x0a, + 0x42, 0x56, 0x2a, 0x21, 0x75, 0xcd, 0x4d, 0xa7, 0x7e, 0xd8, 0xef, 0xd7, 0x00, 0x00, 0x00, 0xff, + 0xff, 0xd0, 0xf6, 0xda, 0x64, 0x47, 0x02, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/slot.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/slot.pb.go new file mode 100644 index 0000000..5e99381 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/slot.pb.go @@ -0,0 +1,140 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/enums/slot.proto + +package enums // import "google.golang.org/genproto/googleapis/ads/googleads/v1/enums" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Enumerates possible positions of the Ad. +type SlotEnum_Slot int32 + +const ( + // Not specified. + SlotEnum_UNSPECIFIED SlotEnum_Slot = 0 + // The value is unknown in this version. + SlotEnum_UNKNOWN SlotEnum_Slot = 1 + // Google search: Side. + SlotEnum_SEARCH_SIDE SlotEnum_Slot = 2 + // Google search: Top. + SlotEnum_SEARCH_TOP SlotEnum_Slot = 3 + // Google search: Other. + SlotEnum_SEARCH_OTHER SlotEnum_Slot = 4 + // Google Display Network. + SlotEnum_CONTENT SlotEnum_Slot = 5 + // Search partners: Top. + SlotEnum_SEARCH_PARTNER_TOP SlotEnum_Slot = 6 + // Search partners: Other. + SlotEnum_SEARCH_PARTNER_OTHER SlotEnum_Slot = 7 + // Cross-network. + SlotEnum_MIXED SlotEnum_Slot = 8 +) + +var SlotEnum_Slot_name = map[int32]string{ + 0: "UNSPECIFIED", + 1: "UNKNOWN", + 2: "SEARCH_SIDE", + 3: "SEARCH_TOP", + 4: "SEARCH_OTHER", + 5: "CONTENT", + 6: "SEARCH_PARTNER_TOP", + 7: "SEARCH_PARTNER_OTHER", + 8: "MIXED", +} +var SlotEnum_Slot_value = map[string]int32{ + "UNSPECIFIED": 0, + "UNKNOWN": 1, + "SEARCH_SIDE": 2, + "SEARCH_TOP": 3, + "SEARCH_OTHER": 4, + "CONTENT": 5, + "SEARCH_PARTNER_TOP": 6, + "SEARCH_PARTNER_OTHER": 7, + "MIXED": 8, +} + +func (x SlotEnum_Slot) String() string { + return proto.EnumName(SlotEnum_Slot_name, int32(x)) +} +func (SlotEnum_Slot) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_slot_050e1c028a893c5a, []int{0, 0} +} + +// Container for enumeration of possible positions of the Ad. +type SlotEnum struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *SlotEnum) Reset() { *m = SlotEnum{} } +func (m *SlotEnum) String() string { return proto.CompactTextString(m) } +func (*SlotEnum) ProtoMessage() {} +func (*SlotEnum) Descriptor() ([]byte, []int) { + return fileDescriptor_slot_050e1c028a893c5a, []int{0} +} +func (m *SlotEnum) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_SlotEnum.Unmarshal(m, b) +} +func (m *SlotEnum) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_SlotEnum.Marshal(b, m, deterministic) +} +func (dst *SlotEnum) XXX_Merge(src proto.Message) { + xxx_messageInfo_SlotEnum.Merge(dst, src) +} +func (m *SlotEnum) XXX_Size() int { + return xxx_messageInfo_SlotEnum.Size(m) +} +func (m *SlotEnum) XXX_DiscardUnknown() { + xxx_messageInfo_SlotEnum.DiscardUnknown(m) +} + +var xxx_messageInfo_SlotEnum proto.InternalMessageInfo + +func init() { + proto.RegisterType((*SlotEnum)(nil), "google.ads.googleads.v1.enums.SlotEnum") + proto.RegisterEnum("google.ads.googleads.v1.enums.SlotEnum_Slot", SlotEnum_Slot_name, SlotEnum_Slot_value) +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/enums/slot.proto", fileDescriptor_slot_050e1c028a893c5a) +} + +var fileDescriptor_slot_050e1c028a893c5a = []byte{ + // 347 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x7c, 0x50, 0xcd, 0x4a, 0xeb, 0x40, + 0x18, 0xbd, 0x49, 0xff, 0xa7, 0x97, 0x7b, 0x87, 0x41, 0x44, 0xc4, 0x2e, 0xda, 0x95, 0xab, 0x09, + 0xc1, 0xdd, 0xb8, 0x4a, 0xdb, 0xb1, 0x0d, 0x62, 0x12, 0x92, 0xb4, 0x8a, 0x04, 0x24, 0x9a, 0x12, + 0x0a, 0xe9, 0x4c, 0xe9, 0xa4, 0x7d, 0x1f, 0x5d, 0xba, 0xf0, 0x41, 0x7c, 0x08, 0x1f, 0xc0, 0xa7, + 0x90, 0x99, 0x49, 0xbb, 0x10, 0x74, 0x33, 0x9c, 0xef, 0x3b, 0xe7, 0x7c, 0xcc, 0x39, 0xe0, 0x3c, + 0xe7, 0x3c, 0x2f, 0x16, 0x56, 0x9a, 0x09, 0x4b, 0x43, 0x89, 0x76, 0xb6, 0xb5, 0x60, 0xdb, 0x95, + 0xb0, 0x44, 0xc1, 0x4b, 0xbc, 0xde, 0xf0, 0x92, 0xa3, 0x9e, 0xa6, 0x71, 0x9a, 0x09, 0x7c, 0x50, + 0xe2, 0x9d, 0x8d, 0x95, 0xf2, 0xf4, 0x6c, 0x7f, 0x68, 0xbd, 0xb4, 0x52, 0xc6, 0x78, 0x99, 0x96, + 0x4b, 0xce, 0x84, 0x36, 0x0f, 0xde, 0x0c, 0xd0, 0x8e, 0x0a, 0x5e, 0x52, 0xb6, 0x5d, 0x0d, 0x9e, + 0x0d, 0x50, 0x97, 0x03, 0xfa, 0x0f, 0xba, 0x33, 0x2f, 0x0a, 0xe8, 0xc8, 0xbd, 0x72, 0xe9, 0x18, + 0xfe, 0x41, 0x5d, 0xd0, 0x9a, 0x79, 0xd7, 0x9e, 0x7f, 0xeb, 0x41, 0x43, 0xb2, 0x11, 0x75, 0xc2, + 0xd1, 0xf4, 0x21, 0x72, 0xc7, 0x14, 0x9a, 0xe8, 0x1f, 0x00, 0xd5, 0x22, 0xf6, 0x03, 0x58, 0x43, + 0x10, 0xfc, 0xad, 0x66, 0x3f, 0x9e, 0xd2, 0x10, 0xd6, 0xa5, 0x7f, 0xe4, 0x7b, 0x31, 0xf5, 0x62, + 0xd8, 0x40, 0xc7, 0x00, 0x55, 0x74, 0xe0, 0x84, 0xb1, 0x47, 0x43, 0x65, 0x6b, 0xa2, 0x13, 0x70, + 0xf4, 0x6d, 0xaf, 0xed, 0x2d, 0xd4, 0x01, 0x8d, 0x1b, 0xf7, 0x8e, 0x8e, 0x61, 0x7b, 0xf8, 0x61, + 0x80, 0xfe, 0x13, 0x5f, 0xe1, 0x5f, 0x43, 0x0f, 0x3b, 0x32, 0x46, 0x20, 0x13, 0x06, 0xc6, 0xfd, + 0xb0, 0xd2, 0xe6, 0xbc, 0x48, 0x59, 0x8e, 0xf9, 0x26, 0xb7, 0xf2, 0x05, 0x53, 0xf9, 0xf7, 0xd5, + 0xae, 0x97, 0xe2, 0x87, 0xa6, 0x2f, 0xd5, 0xfb, 0x62, 0xd6, 0x26, 0x8e, 0xf3, 0x6a, 0xf6, 0x26, + 0xfa, 0x94, 0x93, 0x09, 0xac, 0xa1, 0x44, 0x73, 0x1b, 0xcb, 0xfe, 0xc4, 0xfb, 0x9e, 0x4f, 0x9c, + 0x4c, 0x24, 0x07, 0x3e, 0x99, 0xdb, 0x89, 0xe2, 0x3f, 0xcd, 0xbe, 0x5e, 0x12, 0xe2, 0x64, 0x82, + 0x90, 0x83, 0x82, 0x90, 0xb9, 0x4d, 0x88, 0xd2, 0x3c, 0x36, 0xd5, 0xc7, 0x2e, 0xbe, 0x02, 0x00, + 0x00, 0xff, 0xff, 0x94, 0x2e, 0x04, 0xd6, 0x01, 0x02, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/spending_limit_type.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/spending_limit_type.pb.go new file mode 100644 index 0000000..793531b --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/spending_limit_type.pb.go @@ -0,0 +1,114 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/enums/spending_limit_type.proto + +package enums // import "google.golang.org/genproto/googleapis/ads/googleads/v1/enums" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// The possible spending limit types used by certain resources as an +// alternative to absolute money values in micros. +type SpendingLimitTypeEnum_SpendingLimitType int32 + +const ( + // Not specified. + SpendingLimitTypeEnum_UNSPECIFIED SpendingLimitTypeEnum_SpendingLimitType = 0 + // Used for return value only. Represents value unknown in this version. + SpendingLimitTypeEnum_UNKNOWN SpendingLimitTypeEnum_SpendingLimitType = 1 + // Infinite, indicates unlimited spending power. + SpendingLimitTypeEnum_INFINITE SpendingLimitTypeEnum_SpendingLimitType = 2 +) + +var SpendingLimitTypeEnum_SpendingLimitType_name = map[int32]string{ + 0: "UNSPECIFIED", + 1: "UNKNOWN", + 2: "INFINITE", +} +var SpendingLimitTypeEnum_SpendingLimitType_value = map[string]int32{ + "UNSPECIFIED": 0, + "UNKNOWN": 1, + "INFINITE": 2, +} + +func (x SpendingLimitTypeEnum_SpendingLimitType) String() string { + return proto.EnumName(SpendingLimitTypeEnum_SpendingLimitType_name, int32(x)) +} +func (SpendingLimitTypeEnum_SpendingLimitType) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_spending_limit_type_709c8d5d89f1cb00, []int{0, 0} +} + +// Message describing spending limit types. +type SpendingLimitTypeEnum struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *SpendingLimitTypeEnum) Reset() { *m = SpendingLimitTypeEnum{} } +func (m *SpendingLimitTypeEnum) String() string { return proto.CompactTextString(m) } +func (*SpendingLimitTypeEnum) ProtoMessage() {} +func (*SpendingLimitTypeEnum) Descriptor() ([]byte, []int) { + return fileDescriptor_spending_limit_type_709c8d5d89f1cb00, []int{0} +} +func (m *SpendingLimitTypeEnum) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_SpendingLimitTypeEnum.Unmarshal(m, b) +} +func (m *SpendingLimitTypeEnum) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_SpendingLimitTypeEnum.Marshal(b, m, deterministic) +} +func (dst *SpendingLimitTypeEnum) XXX_Merge(src proto.Message) { + xxx_messageInfo_SpendingLimitTypeEnum.Merge(dst, src) +} +func (m *SpendingLimitTypeEnum) XXX_Size() int { + return xxx_messageInfo_SpendingLimitTypeEnum.Size(m) +} +func (m *SpendingLimitTypeEnum) XXX_DiscardUnknown() { + xxx_messageInfo_SpendingLimitTypeEnum.DiscardUnknown(m) +} + +var xxx_messageInfo_SpendingLimitTypeEnum proto.InternalMessageInfo + +func init() { + proto.RegisterType((*SpendingLimitTypeEnum)(nil), "google.ads.googleads.v1.enums.SpendingLimitTypeEnum") + proto.RegisterEnum("google.ads.googleads.v1.enums.SpendingLimitTypeEnum_SpendingLimitType", SpendingLimitTypeEnum_SpendingLimitType_name, SpendingLimitTypeEnum_SpendingLimitType_value) +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/enums/spending_limit_type.proto", fileDescriptor_spending_limit_type_709c8d5d89f1cb00) +} + +var fileDescriptor_spending_limit_type_709c8d5d89f1cb00 = []byte{ + // 298 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x7c, 0x50, 0xcd, 0x4a, 0xc3, 0x30, + 0x00, 0x76, 0x15, 0x54, 0x32, 0xc1, 0x59, 0xd0, 0x83, 0xb8, 0xc3, 0xf6, 0x00, 0x09, 0xc5, 0x83, + 0x10, 0x0f, 0xd2, 0x69, 0x37, 0x8a, 0x12, 0x0b, 0xdb, 0xaa, 0x48, 0x61, 0x54, 0x13, 0x42, 0xa0, + 0x4d, 0xc2, 0xd2, 0x0d, 0xf6, 0x3a, 0x1e, 0x7d, 0x14, 0x1f, 0x45, 0x5f, 0x42, 0x9a, 0xac, 0xbd, + 0x0c, 0xbd, 0x84, 0x8f, 0x7c, 0x3f, 0xf9, 0xf2, 0x81, 0x6b, 0xae, 0x14, 0x2f, 0x18, 0xca, 0xa9, + 0x41, 0x0e, 0xd6, 0x68, 0x1d, 0x20, 0x26, 0x57, 0xa5, 0x41, 0x46, 0x33, 0x49, 0x85, 0xe4, 0x8b, + 0x42, 0x94, 0xa2, 0x5a, 0x54, 0x1b, 0xcd, 0xa0, 0x5e, 0xaa, 0x4a, 0xf9, 0x7d, 0xa7, 0x86, 0x39, + 0x35, 0xb0, 0x35, 0xc2, 0x75, 0x00, 0xad, 0xf1, 0xe2, 0xb2, 0xc9, 0xd5, 0x02, 0xe5, 0x52, 0xaa, + 0x2a, 0xaf, 0x84, 0x92, 0xc6, 0x99, 0x87, 0x2f, 0xe0, 0x6c, 0xba, 0x4d, 0x7e, 0xac, 0x83, 0x67, + 0x1b, 0xcd, 0x22, 0xb9, 0x2a, 0x87, 0xb7, 0xe0, 0x74, 0x87, 0xf0, 0x4f, 0x40, 0x77, 0x4e, 0xa6, + 0x49, 0x74, 0x17, 0x8f, 0xe3, 0xe8, 0xbe, 0xb7, 0xe7, 0x77, 0xc1, 0xe1, 0x9c, 0x3c, 0x90, 0xa7, + 0x67, 0xd2, 0xeb, 0xf8, 0xc7, 0xe0, 0x28, 0x26, 0xe3, 0x98, 0xc4, 0xb3, 0xa8, 0xe7, 0x8d, 0x7e, + 0x3a, 0x60, 0xf0, 0xae, 0x4a, 0xf8, 0x6f, 0xbb, 0xd1, 0xf9, 0xce, 0x23, 0x49, 0xdd, 0x2b, 0xe9, + 0xbc, 0x8e, 0xb6, 0x46, 0xae, 0x8a, 0x5c, 0x72, 0xa8, 0x96, 0x1c, 0x71, 0x26, 0x6d, 0xeb, 0x66, + 0x1f, 0x2d, 0xcc, 0x1f, 0x73, 0xdd, 0xd8, 0xf3, 0xc3, 0xdb, 0x9f, 0x84, 0xe1, 0xa7, 0xd7, 0x9f, + 0xb8, 0xa8, 0x90, 0x1a, 0xe8, 0x60, 0x8d, 0xd2, 0x00, 0xd6, 0x3f, 0x35, 0x5f, 0x0d, 0x9f, 0x85, + 0xd4, 0x64, 0x2d, 0x9f, 0xa5, 0x41, 0x66, 0xf9, 0x6f, 0x6f, 0xe0, 0x2e, 0x31, 0x0e, 0xa9, 0xc1, + 0xb8, 0x55, 0x60, 0x9c, 0x06, 0x18, 0x5b, 0xcd, 0xdb, 0x81, 0x2d, 0x76, 0xf5, 0x1b, 0x00, 0x00, + 0xff, 0xff, 0x94, 0xae, 0xf2, 0x92, 0xc6, 0x01, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/structured_snippet_placeholder_field.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/structured_snippet_placeholder_field.pb.go new file mode 100644 index 0000000..a48212f --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/structured_snippet_placeholder_field.pb.go @@ -0,0 +1,125 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/enums/structured_snippet_placeholder_field.proto + +package enums // import "google.golang.org/genproto/googleapis/ads/googleads/v1/enums" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Possible values for Structured Snippet placeholder fields. +type StructuredSnippetPlaceholderFieldEnum_StructuredSnippetPlaceholderField int32 + +const ( + // Not specified. + StructuredSnippetPlaceholderFieldEnum_UNSPECIFIED StructuredSnippetPlaceholderFieldEnum_StructuredSnippetPlaceholderField = 0 + // Used for return value only. Represents value unknown in this version. + StructuredSnippetPlaceholderFieldEnum_UNKNOWN StructuredSnippetPlaceholderFieldEnum_StructuredSnippetPlaceholderField = 1 + // Data Type: STRING. The category of snippet of your products/services. + // Must match one of the predefined structured snippets headers exactly. + // See + // https://developers.google.com/adwords/api + // /docs/appendix/structured-snippet-headers + StructuredSnippetPlaceholderFieldEnum_HEADER StructuredSnippetPlaceholderFieldEnum_StructuredSnippetPlaceholderField = 2 + // Data Type: STRING_LIST. Text values that describe your products/services. + // All text must be family safe. Special or non-ASCII characters are not + // permitted. A snippet can be at most 25 characters. + StructuredSnippetPlaceholderFieldEnum_SNIPPETS StructuredSnippetPlaceholderFieldEnum_StructuredSnippetPlaceholderField = 3 +) + +var StructuredSnippetPlaceholderFieldEnum_StructuredSnippetPlaceholderField_name = map[int32]string{ + 0: "UNSPECIFIED", + 1: "UNKNOWN", + 2: "HEADER", + 3: "SNIPPETS", +} +var StructuredSnippetPlaceholderFieldEnum_StructuredSnippetPlaceholderField_value = map[string]int32{ + "UNSPECIFIED": 0, + "UNKNOWN": 1, + "HEADER": 2, + "SNIPPETS": 3, +} + +func (x StructuredSnippetPlaceholderFieldEnum_StructuredSnippetPlaceholderField) String() string { + return proto.EnumName(StructuredSnippetPlaceholderFieldEnum_StructuredSnippetPlaceholderField_name, int32(x)) +} +func (StructuredSnippetPlaceholderFieldEnum_StructuredSnippetPlaceholderField) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_structured_snippet_placeholder_field_77587c1495c9f888, []int{0, 0} +} + +// Values for Structured Snippet placeholder fields. +type StructuredSnippetPlaceholderFieldEnum struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *StructuredSnippetPlaceholderFieldEnum) Reset() { *m = StructuredSnippetPlaceholderFieldEnum{} } +func (m *StructuredSnippetPlaceholderFieldEnum) String() string { return proto.CompactTextString(m) } +func (*StructuredSnippetPlaceholderFieldEnum) ProtoMessage() {} +func (*StructuredSnippetPlaceholderFieldEnum) Descriptor() ([]byte, []int) { + return fileDescriptor_structured_snippet_placeholder_field_77587c1495c9f888, []int{0} +} +func (m *StructuredSnippetPlaceholderFieldEnum) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_StructuredSnippetPlaceholderFieldEnum.Unmarshal(m, b) +} +func (m *StructuredSnippetPlaceholderFieldEnum) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_StructuredSnippetPlaceholderFieldEnum.Marshal(b, m, deterministic) +} +func (dst *StructuredSnippetPlaceholderFieldEnum) XXX_Merge(src proto.Message) { + xxx_messageInfo_StructuredSnippetPlaceholderFieldEnum.Merge(dst, src) +} +func (m *StructuredSnippetPlaceholderFieldEnum) XXX_Size() int { + return xxx_messageInfo_StructuredSnippetPlaceholderFieldEnum.Size(m) +} +func (m *StructuredSnippetPlaceholderFieldEnum) XXX_DiscardUnknown() { + xxx_messageInfo_StructuredSnippetPlaceholderFieldEnum.DiscardUnknown(m) +} + +var xxx_messageInfo_StructuredSnippetPlaceholderFieldEnum proto.InternalMessageInfo + +func init() { + proto.RegisterType((*StructuredSnippetPlaceholderFieldEnum)(nil), "google.ads.googleads.v1.enums.StructuredSnippetPlaceholderFieldEnum") + proto.RegisterEnum("google.ads.googleads.v1.enums.StructuredSnippetPlaceholderFieldEnum_StructuredSnippetPlaceholderField", StructuredSnippetPlaceholderFieldEnum_StructuredSnippetPlaceholderField_name, StructuredSnippetPlaceholderFieldEnum_StructuredSnippetPlaceholderField_value) +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/enums/structured_snippet_placeholder_field.proto", fileDescriptor_structured_snippet_placeholder_field_77587c1495c9f888) +} + +var fileDescriptor_structured_snippet_placeholder_field_77587c1495c9f888 = []byte{ + // 329 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x84, 0x50, 0xdf, 0x4a, 0xfb, 0x30, + 0x18, 0xfd, 0xb5, 0x83, 0xfd, 0x24, 0x13, 0x2c, 0xbd, 0x14, 0x77, 0xb1, 0x81, 0x5e, 0xa6, 0x14, + 0xef, 0xe2, 0x55, 0xe7, 0xba, 0x3f, 0x08, 0xb5, 0x58, 0x37, 0x41, 0x0b, 0x23, 0x2e, 0x31, 0x16, + 0xba, 0x24, 0x34, 0xe9, 0x9e, 0xc0, 0x27, 0xf1, 0xd2, 0x47, 0xf1, 0x51, 0x7c, 0x05, 0x6f, 0xa4, + 0x89, 0xab, 0x57, 0xba, 0x9b, 0x70, 0xc8, 0x77, 0xbe, 0x73, 0xbe, 0x73, 0xc0, 0x8c, 0x09, 0xc1, + 0x4a, 0x1a, 0x60, 0xa2, 0x02, 0x0b, 0x1b, 0xb4, 0x0d, 0x03, 0xca, 0xeb, 0x8d, 0x0a, 0x94, 0xae, + 0xea, 0xb5, 0xae, 0x2b, 0x4a, 0x56, 0x8a, 0x17, 0x52, 0x52, 0xbd, 0x92, 0x25, 0x5e, 0xd3, 0x67, + 0x51, 0x12, 0x5a, 0xad, 0x9e, 0x0a, 0x5a, 0x12, 0x28, 0x2b, 0xa1, 0x85, 0xdf, 0xb7, 0xeb, 0x10, + 0x13, 0x05, 0x5b, 0x25, 0xb8, 0x0d, 0xa1, 0x51, 0x3a, 0x3e, 0xd9, 0x19, 0xc9, 0x22, 0xc0, 0x9c, + 0x0b, 0x8d, 0x75, 0x21, 0xb8, 0xb2, 0xcb, 0xc3, 0x17, 0x07, 0x9c, 0x66, 0xad, 0x57, 0x66, 0xad, + 0xd2, 0x1f, 0xa7, 0x49, 0x63, 0x14, 0xf3, 0x7a, 0x33, 0x7c, 0x00, 0x83, 0xbd, 0x44, 0xff, 0x08, + 0xf4, 0x16, 0x49, 0x96, 0xc6, 0x97, 0xf3, 0xc9, 0x3c, 0x1e, 0x7b, 0xff, 0xfc, 0x1e, 0xf8, 0xbf, + 0x48, 0xae, 0x92, 0xeb, 0xbb, 0xc4, 0x73, 0x7c, 0x00, 0xba, 0xb3, 0x38, 0x1a, 0xc7, 0x37, 0x9e, + 0xeb, 0x1f, 0x82, 0x83, 0x2c, 0x99, 0xa7, 0x69, 0x7c, 0x9b, 0x79, 0x9d, 0xd1, 0xa7, 0x03, 0x06, + 0x6b, 0xb1, 0x81, 0x7f, 0x46, 0x19, 0x9d, 0xed, 0x3d, 0x20, 0x6d, 0x42, 0xa5, 0xce, 0xfd, 0xe8, + 0x5b, 0x88, 0x89, 0x12, 0x73, 0x06, 0x45, 0xc5, 0x02, 0x46, 0xb9, 0x89, 0xbc, 0x6b, 0x5b, 0x16, + 0xea, 0x97, 0xf2, 0x2f, 0xcc, 0xfb, 0xea, 0x76, 0xa6, 0x51, 0xf4, 0xe6, 0xf6, 0xa7, 0x56, 0x2a, + 0x22, 0x0a, 0x5a, 0xd8, 0xa0, 0x65, 0x08, 0x9b, 0x56, 0xd4, 0xfb, 0x6e, 0x9e, 0x47, 0x44, 0xe5, + 0xed, 0x3c, 0x5f, 0x86, 0xb9, 0x99, 0x7f, 0xb8, 0x03, 0xfb, 0x89, 0x50, 0x44, 0x14, 0x42, 0x2d, + 0x03, 0xa1, 0x65, 0x88, 0x90, 0xe1, 0x3c, 0x76, 0xcd, 0x61, 0xe7, 0x5f, 0x01, 0x00, 0x00, 0xff, + 0xff, 0xad, 0xa6, 0xff, 0xbe, 0x14, 0x02, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/system_managed_entity_source.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/system_managed_entity_source.pb.go new file mode 100644 index 0000000..761ec05 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/system_managed_entity_source.pb.go @@ -0,0 +1,114 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/enums/system_managed_entity_source.proto + +package enums // import "google.golang.org/genproto/googleapis/ads/googleads/v1/enums" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Enum listing the possible system managed entity sources. +type SystemManagedResourceSourceEnum_SystemManagedResourceSource int32 + +const ( + // Not specified. + SystemManagedResourceSourceEnum_UNSPECIFIED SystemManagedResourceSourceEnum_SystemManagedResourceSource = 0 + // Used for return value only. Represents value unknown in this version. + SystemManagedResourceSourceEnum_UNKNOWN SystemManagedResourceSourceEnum_SystemManagedResourceSource = 1 + // Generated ad variations experiment ad. + SystemManagedResourceSourceEnum_AD_VARIATIONS SystemManagedResourceSourceEnum_SystemManagedResourceSource = 2 +) + +var SystemManagedResourceSourceEnum_SystemManagedResourceSource_name = map[int32]string{ + 0: "UNSPECIFIED", + 1: "UNKNOWN", + 2: "AD_VARIATIONS", +} +var SystemManagedResourceSourceEnum_SystemManagedResourceSource_value = map[string]int32{ + "UNSPECIFIED": 0, + "UNKNOWN": 1, + "AD_VARIATIONS": 2, +} + +func (x SystemManagedResourceSourceEnum_SystemManagedResourceSource) String() string { + return proto.EnumName(SystemManagedResourceSourceEnum_SystemManagedResourceSource_name, int32(x)) +} +func (SystemManagedResourceSourceEnum_SystemManagedResourceSource) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_system_managed_entity_source_2cb6e967d17e4e0c, []int{0, 0} +} + +// Container for enum describing possible system managed entity sources. +type SystemManagedResourceSourceEnum struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *SystemManagedResourceSourceEnum) Reset() { *m = SystemManagedResourceSourceEnum{} } +func (m *SystemManagedResourceSourceEnum) String() string { return proto.CompactTextString(m) } +func (*SystemManagedResourceSourceEnum) ProtoMessage() {} +func (*SystemManagedResourceSourceEnum) Descriptor() ([]byte, []int) { + return fileDescriptor_system_managed_entity_source_2cb6e967d17e4e0c, []int{0} +} +func (m *SystemManagedResourceSourceEnum) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_SystemManagedResourceSourceEnum.Unmarshal(m, b) +} +func (m *SystemManagedResourceSourceEnum) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_SystemManagedResourceSourceEnum.Marshal(b, m, deterministic) +} +func (dst *SystemManagedResourceSourceEnum) XXX_Merge(src proto.Message) { + xxx_messageInfo_SystemManagedResourceSourceEnum.Merge(dst, src) +} +func (m *SystemManagedResourceSourceEnum) XXX_Size() int { + return xxx_messageInfo_SystemManagedResourceSourceEnum.Size(m) +} +func (m *SystemManagedResourceSourceEnum) XXX_DiscardUnknown() { + xxx_messageInfo_SystemManagedResourceSourceEnum.DiscardUnknown(m) +} + +var xxx_messageInfo_SystemManagedResourceSourceEnum proto.InternalMessageInfo + +func init() { + proto.RegisterType((*SystemManagedResourceSourceEnum)(nil), "google.ads.googleads.v1.enums.SystemManagedResourceSourceEnum") + proto.RegisterEnum("google.ads.googleads.v1.enums.SystemManagedResourceSourceEnum_SystemManagedResourceSource", SystemManagedResourceSourceEnum_SystemManagedResourceSource_name, SystemManagedResourceSourceEnum_SystemManagedResourceSource_value) +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/enums/system_managed_entity_source.proto", fileDescriptor_system_managed_entity_source_2cb6e967d17e4e0c) +} + +var fileDescriptor_system_managed_entity_source_2cb6e967d17e4e0c = []byte{ + // 318 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x7c, 0x50, 0xd1, 0x4a, 0xf3, 0x30, + 0x18, 0xfd, 0xd7, 0x1f, 0x14, 0x32, 0xc4, 0xd9, 0x4b, 0x75, 0xca, 0xf6, 0x00, 0x29, 0xc5, 0xbb, + 0x78, 0x63, 0xe6, 0xea, 0x28, 0x62, 0x37, 0x56, 0x57, 0x41, 0x0a, 0x25, 0x2e, 0x21, 0x14, 0xd6, + 0x64, 0xee, 0xcb, 0x06, 0x7b, 0x1d, 0x2f, 0x7d, 0x14, 0x1f, 0xc5, 0x5b, 0x5f, 0x40, 0x9a, 0xb8, + 0x82, 0x17, 0xee, 0x26, 0x1c, 0xf2, 0x9d, 0xef, 0x9c, 0xf3, 0x1d, 0x74, 0x23, 0xb5, 0x96, 0x0b, + 0x11, 0x30, 0x0e, 0x81, 0x83, 0x35, 0xda, 0x84, 0x81, 0x50, 0xeb, 0x0a, 0x02, 0xd8, 0x82, 0x11, + 0x55, 0x51, 0x31, 0xc5, 0xa4, 0xe0, 0x85, 0x50, 0xa6, 0x34, 0xdb, 0x02, 0xf4, 0x7a, 0x35, 0x17, + 0x78, 0xb9, 0xd2, 0x46, 0xfb, 0x5d, 0xb7, 0x86, 0x19, 0x07, 0xdc, 0x28, 0xe0, 0x4d, 0x88, 0xad, + 0xc2, 0xe9, 0xf9, 0xce, 0x60, 0x59, 0x06, 0x4c, 0x29, 0x6d, 0x98, 0x29, 0xb5, 0x02, 0xb7, 0xdc, + 0x7f, 0x45, 0x97, 0xa9, 0xb5, 0x78, 0x70, 0x0e, 0x53, 0xe1, 0xb4, 0x53, 0xfb, 0x46, 0x6a, 0x5d, + 0xf5, 0x13, 0x74, 0xb6, 0x87, 0xe2, 0x1f, 0xa3, 0xf6, 0x2c, 0x49, 0x27, 0xd1, 0x6d, 0x7c, 0x17, + 0x47, 0xc3, 0xce, 0x3f, 0xbf, 0x8d, 0x0e, 0x67, 0xc9, 0x7d, 0x32, 0x7e, 0x4a, 0x3a, 0x2d, 0xff, + 0x04, 0x1d, 0xd1, 0x61, 0x91, 0xd1, 0x69, 0x4c, 0x1f, 0xe3, 0x71, 0x92, 0x76, 0xbc, 0xc1, 0x57, + 0x0b, 0xf5, 0xe6, 0xba, 0xc2, 0x7b, 0x63, 0x0f, 0x2e, 0x7e, 0x79, 0x46, 0xf6, 0x6e, 0xe7, 0x38, + 0xa9, 0x83, 0x4f, 0x5a, 0xcf, 0x83, 0x1f, 0x01, 0xa9, 0x17, 0x4c, 0x49, 0xac, 0x57, 0x32, 0x90, + 0x42, 0xd9, 0xb3, 0x76, 0x4d, 0x2e, 0x4b, 0xf8, 0xa3, 0xd8, 0x6b, 0xfb, 0xbe, 0x79, 0xff, 0x47, + 0x94, 0xbe, 0x7b, 0xdd, 0x91, 0x93, 0xa2, 0x1c, 0xb0, 0x83, 0x35, 0xca, 0x42, 0x5c, 0x17, 0x00, + 0x1f, 0xbb, 0x79, 0x4e, 0x39, 0xe4, 0xcd, 0x3c, 0xcf, 0xc2, 0xdc, 0xce, 0x3f, 0xbd, 0x9e, 0xfb, + 0x24, 0x84, 0x72, 0x20, 0xa4, 0x61, 0x10, 0x92, 0x85, 0x84, 0x58, 0xce, 0xcb, 0x81, 0x0d, 0x76, + 0xf5, 0x1d, 0x00, 0x00, 0xff, 0xff, 0x52, 0x59, 0xad, 0xca, 0xf0, 0x01, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/target_cpa_opt_in_recommendation_goal.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/target_cpa_opt_in_recommendation_goal.pb.go new file mode 100644 index 0000000..a73a676 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/target_cpa_opt_in_recommendation_goal.pb.go @@ -0,0 +1,130 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/enums/target_cpa_opt_in_recommendation_goal.proto + +package enums // import "google.golang.org/genproto/googleapis/ads/googleads/v1/enums" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Goal of TargetCpaOptIn recommendation. +type TargetCpaOptInRecommendationGoalEnum_TargetCpaOptInRecommendationGoal int32 + +const ( + // Not specified. + TargetCpaOptInRecommendationGoalEnum_UNSPECIFIED TargetCpaOptInRecommendationGoalEnum_TargetCpaOptInRecommendationGoal = 0 + // Used for return value only. Represents value unknown in this version. + TargetCpaOptInRecommendationGoalEnum_UNKNOWN TargetCpaOptInRecommendationGoalEnum_TargetCpaOptInRecommendationGoal = 1 + // Recommendation to set Target CPA to maintain the same cost. + TargetCpaOptInRecommendationGoalEnum_SAME_COST TargetCpaOptInRecommendationGoalEnum_TargetCpaOptInRecommendationGoal = 2 + // Recommendation to set Target CPA to maintain the same conversions. + TargetCpaOptInRecommendationGoalEnum_SAME_CONVERSIONS TargetCpaOptInRecommendationGoalEnum_TargetCpaOptInRecommendationGoal = 3 + // Recommendation to set Target CPA to maintain the same CPA. + TargetCpaOptInRecommendationGoalEnum_SAME_CPA TargetCpaOptInRecommendationGoalEnum_TargetCpaOptInRecommendationGoal = 4 + // Recommendation to set Target CPA to a value that is as close as possible + // to, yet lower than, the actual CPA (computed for past 28 days). + TargetCpaOptInRecommendationGoalEnum_CLOSEST_CPA TargetCpaOptInRecommendationGoalEnum_TargetCpaOptInRecommendationGoal = 5 +) + +var TargetCpaOptInRecommendationGoalEnum_TargetCpaOptInRecommendationGoal_name = map[int32]string{ + 0: "UNSPECIFIED", + 1: "UNKNOWN", + 2: "SAME_COST", + 3: "SAME_CONVERSIONS", + 4: "SAME_CPA", + 5: "CLOSEST_CPA", +} +var TargetCpaOptInRecommendationGoalEnum_TargetCpaOptInRecommendationGoal_value = map[string]int32{ + "UNSPECIFIED": 0, + "UNKNOWN": 1, + "SAME_COST": 2, + "SAME_CONVERSIONS": 3, + "SAME_CPA": 4, + "CLOSEST_CPA": 5, +} + +func (x TargetCpaOptInRecommendationGoalEnum_TargetCpaOptInRecommendationGoal) String() string { + return proto.EnumName(TargetCpaOptInRecommendationGoalEnum_TargetCpaOptInRecommendationGoal_name, int32(x)) +} +func (TargetCpaOptInRecommendationGoalEnum_TargetCpaOptInRecommendationGoal) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_target_cpa_opt_in_recommendation_goal_a5829dcc7eee49d4, []int{0, 0} +} + +// Container for enum describing goals for TargetCpaOptIn recommendation. +type TargetCpaOptInRecommendationGoalEnum struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *TargetCpaOptInRecommendationGoalEnum) Reset() { *m = TargetCpaOptInRecommendationGoalEnum{} } +func (m *TargetCpaOptInRecommendationGoalEnum) String() string { return proto.CompactTextString(m) } +func (*TargetCpaOptInRecommendationGoalEnum) ProtoMessage() {} +func (*TargetCpaOptInRecommendationGoalEnum) Descriptor() ([]byte, []int) { + return fileDescriptor_target_cpa_opt_in_recommendation_goal_a5829dcc7eee49d4, []int{0} +} +func (m *TargetCpaOptInRecommendationGoalEnum) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_TargetCpaOptInRecommendationGoalEnum.Unmarshal(m, b) +} +func (m *TargetCpaOptInRecommendationGoalEnum) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_TargetCpaOptInRecommendationGoalEnum.Marshal(b, m, deterministic) +} +func (dst *TargetCpaOptInRecommendationGoalEnum) XXX_Merge(src proto.Message) { + xxx_messageInfo_TargetCpaOptInRecommendationGoalEnum.Merge(dst, src) +} +func (m *TargetCpaOptInRecommendationGoalEnum) XXX_Size() int { + return xxx_messageInfo_TargetCpaOptInRecommendationGoalEnum.Size(m) +} +func (m *TargetCpaOptInRecommendationGoalEnum) XXX_DiscardUnknown() { + xxx_messageInfo_TargetCpaOptInRecommendationGoalEnum.DiscardUnknown(m) +} + +var xxx_messageInfo_TargetCpaOptInRecommendationGoalEnum proto.InternalMessageInfo + +func init() { + proto.RegisterType((*TargetCpaOptInRecommendationGoalEnum)(nil), "google.ads.googleads.v1.enums.TargetCpaOptInRecommendationGoalEnum") + proto.RegisterEnum("google.ads.googleads.v1.enums.TargetCpaOptInRecommendationGoalEnum_TargetCpaOptInRecommendationGoal", TargetCpaOptInRecommendationGoalEnum_TargetCpaOptInRecommendationGoal_name, TargetCpaOptInRecommendationGoalEnum_TargetCpaOptInRecommendationGoal_value) +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/enums/target_cpa_opt_in_recommendation_goal.proto", fileDescriptor_target_cpa_opt_in_recommendation_goal_a5829dcc7eee49d4) +} + +var fileDescriptor_target_cpa_opt_in_recommendation_goal_a5829dcc7eee49d4 = []byte{ + // 357 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x84, 0x91, 0xdd, 0x4a, 0xf3, 0x30, + 0x18, 0xc7, 0xdf, 0x76, 0xef, 0x97, 0x99, 0x62, 0x29, 0x1e, 0x89, 0x03, 0x37, 0xf4, 0x34, 0xa5, + 0x78, 0x16, 0x8f, 0xba, 0x5a, 0x47, 0x51, 0xdb, 0xb2, 0x6e, 0x15, 0xa4, 0x50, 0xe2, 0x5a, 0x42, + 0xa1, 0x4d, 0x4a, 0x93, 0xed, 0x0a, 0xbc, 0x12, 0x0f, 0x05, 0x6f, 0xc4, 0x4b, 0xf1, 0x12, 0x3c, + 0x92, 0x26, 0xdb, 0xc0, 0x03, 0xdd, 0x49, 0xf8, 0xe7, 0xf9, 0xf8, 0x3d, 0x5f, 0xc0, 0x27, 0x8c, + 0x91, 0xaa, 0xb0, 0x70, 0xce, 0x2d, 0x25, 0x3b, 0xb5, 0xb2, 0xad, 0x82, 0x2e, 0x6b, 0x6e, 0x09, + 0xdc, 0x92, 0x42, 0x64, 0x8b, 0x06, 0x67, 0xac, 0x11, 0x59, 0x49, 0xb3, 0xb6, 0x58, 0xb0, 0xba, + 0x2e, 0x68, 0x8e, 0x45, 0xc9, 0x68, 0x46, 0x18, 0xae, 0x60, 0xd3, 0x32, 0xc1, 0xcc, 0x81, 0xca, + 0x87, 0x38, 0xe7, 0x70, 0x8b, 0x82, 0x2b, 0x1b, 0x4a, 0xd4, 0xf1, 0xc9, 0xa6, 0x52, 0x53, 0x5a, + 0x98, 0x52, 0x26, 0x24, 0x80, 0xab, 0xe4, 0xd1, 0xab, 0x06, 0xce, 0x66, 0xb2, 0x98, 0xdb, 0xe0, + 0xb0, 0x11, 0x3e, 0x9d, 0x7e, 0x29, 0x34, 0x61, 0xb8, 0xf2, 0xe8, 0xb2, 0x1e, 0x3d, 0x69, 0xe0, + 0x74, 0x57, 0xa0, 0x79, 0x08, 0xfa, 0xf3, 0x20, 0x8e, 0x3c, 0xd7, 0xbf, 0xf6, 0xbd, 0x2b, 0xe3, + 0x97, 0xd9, 0x07, 0xff, 0xe6, 0xc1, 0x4d, 0x10, 0xde, 0x07, 0x86, 0x66, 0x1e, 0x80, 0xbd, 0xd8, + 0xb9, 0xf3, 0x32, 0x37, 0x8c, 0x67, 0x86, 0x6e, 0x1e, 0x01, 0x63, 0xfd, 0x0d, 0x12, 0x6f, 0x1a, + 0xfb, 0x61, 0x10, 0x1b, 0x3d, 0x73, 0x1f, 0xfc, 0x57, 0xd6, 0xc8, 0x31, 0x7e, 0x77, 0x40, 0xf7, + 0x36, 0x8c, 0xbd, 0x78, 0x26, 0x0d, 0x7f, 0xc6, 0x1f, 0x1a, 0x18, 0x2e, 0x58, 0x0d, 0x7f, 0x9c, + 0x79, 0x7c, 0xbe, 0xab, 0xd3, 0xa8, 0x1b, 0x3e, 0xd2, 0x1e, 0xc6, 0x6b, 0x0e, 0x61, 0x15, 0xa6, + 0x04, 0xb2, 0x96, 0x58, 0xa4, 0xa0, 0x72, 0x35, 0x9b, 0xb3, 0x34, 0x25, 0xff, 0xe6, 0x4a, 0x97, + 0xf2, 0x7d, 0xd6, 0x7b, 0x13, 0xc7, 0x79, 0xd1, 0x07, 0x13, 0x85, 0x72, 0x72, 0x0e, 0x95, 0xec, + 0x54, 0x62, 0xc3, 0x6e, 0x7d, 0xfc, 0x6d, 0xe3, 0x4f, 0x9d, 0x9c, 0xa7, 0x5b, 0x7f, 0x9a, 0xd8, + 0xa9, 0xf4, 0xbf, 0xeb, 0x43, 0x65, 0x44, 0xc8, 0xc9, 0x39, 0x42, 0xdb, 0x08, 0x84, 0x12, 0x1b, + 0x21, 0x19, 0xf3, 0xf8, 0x57, 0x36, 0x76, 0xf1, 0x19, 0x00, 0x00, 0xff, 0xff, 0x89, 0xa8, 0xee, + 0xa2, 0x3d, 0x02, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/target_impression_share_location.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/target_impression_share_location.pb.go new file mode 100644 index 0000000..ee3abb5 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/target_impression_share_location.pb.go @@ -0,0 +1,126 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/enums/target_impression_share_location.proto + +package enums // import "google.golang.org/genproto/googleapis/ads/googleads/v1/enums" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Enum describing possible goals. +type TargetImpressionShareLocationEnum_TargetImpressionShareLocation int32 + +const ( + // Not specified. + TargetImpressionShareLocationEnum_UNSPECIFIED TargetImpressionShareLocationEnum_TargetImpressionShareLocation = 0 + // Used for return value only. Represents value unknown in this version. + TargetImpressionShareLocationEnum_UNKNOWN TargetImpressionShareLocationEnum_TargetImpressionShareLocation = 1 + // Any location on the web page. + TargetImpressionShareLocationEnum_ANYWHERE_ON_PAGE TargetImpressionShareLocationEnum_TargetImpressionShareLocation = 2 + // Top box of ads. + TargetImpressionShareLocationEnum_TOP_OF_PAGE TargetImpressionShareLocationEnum_TargetImpressionShareLocation = 3 + // Top slot in the top box of ads. + TargetImpressionShareLocationEnum_ABSOLUTE_TOP_OF_PAGE TargetImpressionShareLocationEnum_TargetImpressionShareLocation = 4 +) + +var TargetImpressionShareLocationEnum_TargetImpressionShareLocation_name = map[int32]string{ + 0: "UNSPECIFIED", + 1: "UNKNOWN", + 2: "ANYWHERE_ON_PAGE", + 3: "TOP_OF_PAGE", + 4: "ABSOLUTE_TOP_OF_PAGE", +} +var TargetImpressionShareLocationEnum_TargetImpressionShareLocation_value = map[string]int32{ + "UNSPECIFIED": 0, + "UNKNOWN": 1, + "ANYWHERE_ON_PAGE": 2, + "TOP_OF_PAGE": 3, + "ABSOLUTE_TOP_OF_PAGE": 4, +} + +func (x TargetImpressionShareLocationEnum_TargetImpressionShareLocation) String() string { + return proto.EnumName(TargetImpressionShareLocationEnum_TargetImpressionShareLocation_name, int32(x)) +} +func (TargetImpressionShareLocationEnum_TargetImpressionShareLocation) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_target_impression_share_location_ed72bb7348c0a7f5, []int{0, 0} +} + +// Container for enum describing where on the first search results page the +// automated bidding system should target impressions for the +// TargetImpressionShare bidding strategy. +type TargetImpressionShareLocationEnum struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *TargetImpressionShareLocationEnum) Reset() { *m = TargetImpressionShareLocationEnum{} } +func (m *TargetImpressionShareLocationEnum) String() string { return proto.CompactTextString(m) } +func (*TargetImpressionShareLocationEnum) ProtoMessage() {} +func (*TargetImpressionShareLocationEnum) Descriptor() ([]byte, []int) { + return fileDescriptor_target_impression_share_location_ed72bb7348c0a7f5, []int{0} +} +func (m *TargetImpressionShareLocationEnum) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_TargetImpressionShareLocationEnum.Unmarshal(m, b) +} +func (m *TargetImpressionShareLocationEnum) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_TargetImpressionShareLocationEnum.Marshal(b, m, deterministic) +} +func (dst *TargetImpressionShareLocationEnum) XXX_Merge(src proto.Message) { + xxx_messageInfo_TargetImpressionShareLocationEnum.Merge(dst, src) +} +func (m *TargetImpressionShareLocationEnum) XXX_Size() int { + return xxx_messageInfo_TargetImpressionShareLocationEnum.Size(m) +} +func (m *TargetImpressionShareLocationEnum) XXX_DiscardUnknown() { + xxx_messageInfo_TargetImpressionShareLocationEnum.DiscardUnknown(m) +} + +var xxx_messageInfo_TargetImpressionShareLocationEnum proto.InternalMessageInfo + +func init() { + proto.RegisterType((*TargetImpressionShareLocationEnum)(nil), "google.ads.googleads.v1.enums.TargetImpressionShareLocationEnum") + proto.RegisterEnum("google.ads.googleads.v1.enums.TargetImpressionShareLocationEnum_TargetImpressionShareLocation", TargetImpressionShareLocationEnum_TargetImpressionShareLocation_name, TargetImpressionShareLocationEnum_TargetImpressionShareLocation_value) +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/enums/target_impression_share_location.proto", fileDescriptor_target_impression_share_location_ed72bb7348c0a7f5) +} + +var fileDescriptor_target_impression_share_location_ed72bb7348c0a7f5 = []byte{ + // 343 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x7c, 0x51, 0xbd, 0x6a, 0xeb, 0x30, + 0x18, 0xbd, 0x76, 0x2e, 0xf7, 0x82, 0x32, 0xd4, 0x98, 0x0c, 0xa5, 0x34, 0x43, 0xf2, 0x00, 0x32, + 0xa6, 0x9b, 0x3a, 0xc9, 0x8d, 0x92, 0x86, 0x06, 0xdb, 0x34, 0x7f, 0xb4, 0x18, 0x84, 0x1a, 0x1b, + 0xd5, 0x10, 0x4b, 0xc6, 0x72, 0x32, 0xf6, 0x61, 0xda, 0xad, 0x8f, 0xd2, 0x47, 0xe9, 0x0b, 0x74, + 0x2d, 0x96, 0x12, 0xd3, 0xa5, 0x59, 0xc4, 0x41, 0xe7, 0x3b, 0xe7, 0x7c, 0x3f, 0x60, 0xc4, 0xa5, + 0xe4, 0xdb, 0xcc, 0x63, 0xa9, 0xf2, 0x0c, 0x6c, 0xd0, 0xde, 0xf7, 0x32, 0xb1, 0x2b, 0x94, 0x57, + 0xb3, 0x8a, 0x67, 0x35, 0xcd, 0x8b, 0xb2, 0xca, 0x94, 0xca, 0xa5, 0xa0, 0xea, 0x99, 0x55, 0x19, + 0xdd, 0xca, 0x0d, 0xab, 0x73, 0x29, 0x60, 0x59, 0xc9, 0x5a, 0xba, 0x7d, 0x23, 0x85, 0x2c, 0x55, + 0xb0, 0x75, 0x81, 0x7b, 0x1f, 0x6a, 0x97, 0x8b, 0xcb, 0x63, 0x48, 0x99, 0x7b, 0x4c, 0x08, 0x59, + 0x6b, 0xad, 0x32, 0xe2, 0xe1, 0x9b, 0x05, 0x06, 0x0b, 0x9d, 0x33, 0x6d, 0x63, 0xe6, 0x4d, 0xca, + 0xec, 0x10, 0x42, 0xc4, 0xae, 0x18, 0xbe, 0x80, 0xfe, 0xc9, 0x22, 0xf7, 0x0c, 0x74, 0x97, 0xe1, + 0x3c, 0x26, 0x37, 0xd3, 0xf1, 0x94, 0x8c, 0x9c, 0x3f, 0x6e, 0x17, 0xfc, 0x5f, 0x86, 0x77, 0x61, + 0xb4, 0x0e, 0x1d, 0xcb, 0xed, 0x01, 0x07, 0x87, 0x0f, 0xeb, 0x5b, 0x72, 0x4f, 0x68, 0x14, 0xd2, + 0x18, 0x4f, 0x88, 0x63, 0x37, 0x9a, 0x45, 0x14, 0xd3, 0x68, 0x6c, 0x3e, 0x3a, 0xee, 0x39, 0xe8, + 0xe1, 0x60, 0x1e, 0xcd, 0x96, 0x0b, 0x42, 0x7f, 0x32, 0x7f, 0x83, 0x2f, 0x0b, 0x0c, 0x36, 0xb2, + 0x80, 0x27, 0x27, 0x0d, 0x86, 0x27, 0x7b, 0x8c, 0x9b, 0x79, 0x63, 0xeb, 0x31, 0x38, 0x98, 0x70, + 0xb9, 0x65, 0x82, 0x43, 0x59, 0x71, 0x8f, 0x67, 0x42, 0x6f, 0xe3, 0x78, 0x84, 0x32, 0x57, 0xbf, + 0xdc, 0xe4, 0x5a, 0xbf, 0xaf, 0x76, 0x67, 0x82, 0xf1, 0xbb, 0xdd, 0x9f, 0x18, 0x2b, 0x9c, 0x2a, + 0x68, 0x60, 0x83, 0x56, 0x3e, 0x6c, 0x96, 0xa6, 0x3e, 0x8e, 0x7c, 0x82, 0x53, 0x95, 0xb4, 0x7c, + 0xb2, 0xf2, 0x13, 0xcd, 0x7f, 0xda, 0x03, 0xf3, 0x89, 0x10, 0x4e, 0x15, 0x42, 0x6d, 0x05, 0x42, + 0x2b, 0x1f, 0x21, 0x5d, 0xf3, 0xf4, 0x4f, 0x37, 0x76, 0xf5, 0x1d, 0x00, 0x00, 0xff, 0xff, 0x5e, + 0x8c, 0x33, 0x17, 0x2b, 0x02, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/targeting_dimension.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/targeting_dimension.pb.go new file mode 100644 index 0000000..389862c --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/targeting_dimension.pb.go @@ -0,0 +1,152 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/enums/targeting_dimension.proto + +package enums // import "google.golang.org/genproto/googleapis/ads/googleads/v1/enums" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Enum describing possible targeting dimensions. +type TargetingDimensionEnum_TargetingDimension int32 + +const ( + // Not specified. + TargetingDimensionEnum_UNSPECIFIED TargetingDimensionEnum_TargetingDimension = 0 + // Used for return value only. Represents value unknown in this version. + TargetingDimensionEnum_UNKNOWN TargetingDimensionEnum_TargetingDimension = 1 + // Keyword criteria, e.g. 'mars cruise'. KEYWORD may be used as a custom bid + // dimension. Keywords are always a targeting dimension, so may not be set + // as a target "ALL" dimension with TargetRestriction. + TargetingDimensionEnum_KEYWORD TargetingDimensionEnum_TargetingDimension = 2 + // Audience criteria, which include user list, user interest, custom + // affinity, and custom in market. + TargetingDimensionEnum_AUDIENCE TargetingDimensionEnum_TargetingDimension = 3 + // Topic criteria for targeting categories of content, e.g. + // 'category::Animals>Pets' Used for Display and Video targeting. + TargetingDimensionEnum_TOPIC TargetingDimensionEnum_TargetingDimension = 4 + // Criteria for targeting gender. + TargetingDimensionEnum_GENDER TargetingDimensionEnum_TargetingDimension = 5 + // Criteria for targeting age ranges. + TargetingDimensionEnum_AGE_RANGE TargetingDimensionEnum_TargetingDimension = 6 + // Placement criteria, which include websites like 'www.flowers4sale.com', + // as well as mobile applications, mobile app categories, YouTube videos, + // and YouTube channels. + TargetingDimensionEnum_PLACEMENT TargetingDimensionEnum_TargetingDimension = 7 + // Criteria for parental status targeting. + TargetingDimensionEnum_PARENTAL_STATUS TargetingDimensionEnum_TargetingDimension = 8 + // Criteria for income range targeting. + TargetingDimensionEnum_INCOME_RANGE TargetingDimensionEnum_TargetingDimension = 9 +) + +var TargetingDimensionEnum_TargetingDimension_name = map[int32]string{ + 0: "UNSPECIFIED", + 1: "UNKNOWN", + 2: "KEYWORD", + 3: "AUDIENCE", + 4: "TOPIC", + 5: "GENDER", + 6: "AGE_RANGE", + 7: "PLACEMENT", + 8: "PARENTAL_STATUS", + 9: "INCOME_RANGE", +} +var TargetingDimensionEnum_TargetingDimension_value = map[string]int32{ + "UNSPECIFIED": 0, + "UNKNOWN": 1, + "KEYWORD": 2, + "AUDIENCE": 3, + "TOPIC": 4, + "GENDER": 5, + "AGE_RANGE": 6, + "PLACEMENT": 7, + "PARENTAL_STATUS": 8, + "INCOME_RANGE": 9, +} + +func (x TargetingDimensionEnum_TargetingDimension) String() string { + return proto.EnumName(TargetingDimensionEnum_TargetingDimension_name, int32(x)) +} +func (TargetingDimensionEnum_TargetingDimension) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_targeting_dimension_0a228f606eaa5934, []int{0, 0} +} + +// The dimensions that can be targeted. +type TargetingDimensionEnum struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *TargetingDimensionEnum) Reset() { *m = TargetingDimensionEnum{} } +func (m *TargetingDimensionEnum) String() string { return proto.CompactTextString(m) } +func (*TargetingDimensionEnum) ProtoMessage() {} +func (*TargetingDimensionEnum) Descriptor() ([]byte, []int) { + return fileDescriptor_targeting_dimension_0a228f606eaa5934, []int{0} +} +func (m *TargetingDimensionEnum) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_TargetingDimensionEnum.Unmarshal(m, b) +} +func (m *TargetingDimensionEnum) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_TargetingDimensionEnum.Marshal(b, m, deterministic) +} +func (dst *TargetingDimensionEnum) XXX_Merge(src proto.Message) { + xxx_messageInfo_TargetingDimensionEnum.Merge(dst, src) +} +func (m *TargetingDimensionEnum) XXX_Size() int { + return xxx_messageInfo_TargetingDimensionEnum.Size(m) +} +func (m *TargetingDimensionEnum) XXX_DiscardUnknown() { + xxx_messageInfo_TargetingDimensionEnum.DiscardUnknown(m) +} + +var xxx_messageInfo_TargetingDimensionEnum proto.InternalMessageInfo + +func init() { + proto.RegisterType((*TargetingDimensionEnum)(nil), "google.ads.googleads.v1.enums.TargetingDimensionEnum") + proto.RegisterEnum("google.ads.googleads.v1.enums.TargetingDimensionEnum_TargetingDimension", TargetingDimensionEnum_TargetingDimension_name, TargetingDimensionEnum_TargetingDimension_value) +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/enums/targeting_dimension.proto", fileDescriptor_targeting_dimension_0a228f606eaa5934) +} + +var fileDescriptor_targeting_dimension_0a228f606eaa5934 = []byte{ + // 378 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x7c, 0x51, 0xdf, 0xaa, 0xd3, 0x30, + 0x1c, 0xb6, 0x3d, 0x9e, 0x9d, 0xb3, 0x6c, 0xb2, 0x10, 0x41, 0x41, 0xdc, 0xc5, 0xf6, 0x00, 0x29, + 0xc5, 0x0b, 0x21, 0x5e, 0x65, 0x6d, 0x2c, 0x65, 0x5b, 0x5a, 0xba, 0x76, 0x43, 0x29, 0x8c, 0x6a, + 0x4b, 0x28, 0xac, 0xc9, 0x58, 0xba, 0x3d, 0x90, 0x77, 0xfa, 0x1c, 0x5e, 0xf9, 0x28, 0xe2, 0x43, + 0x48, 0x5b, 0xbb, 0x9b, 0xe1, 0xb9, 0x09, 0xdf, 0x2f, 0xdf, 0x1f, 0x92, 0xef, 0x07, 0xde, 0x0b, + 0xa5, 0xc4, 0xa1, 0xb0, 0xb2, 0x5c, 0x5b, 0x1d, 0x6c, 0xd0, 0xc5, 0xb6, 0x0a, 0x79, 0xae, 0xb4, + 0x55, 0x67, 0x27, 0x51, 0xd4, 0xa5, 0x14, 0xfb, 0xbc, 0xac, 0x0a, 0xa9, 0x4b, 0x25, 0xf1, 0xf1, + 0xa4, 0x6a, 0x85, 0xa6, 0x9d, 0x1a, 0x67, 0xb9, 0xc6, 0x57, 0x23, 0xbe, 0xd8, 0xb8, 0x35, 0xbe, + 0x79, 0xdb, 0xe7, 0x1e, 0x4b, 0x2b, 0x93, 0x52, 0xd5, 0x59, 0x5d, 0x2a, 0xa9, 0x3b, 0xf3, 0xfc, + 0xa7, 0x01, 0x5e, 0xc5, 0x7d, 0xb4, 0xdb, 0x27, 0x33, 0x79, 0xae, 0xe6, 0xdf, 0x0d, 0x80, 0x6e, + 0x29, 0x34, 0x01, 0xa3, 0x84, 0x6f, 0x42, 0xe6, 0xf8, 0x1f, 0x7d, 0xe6, 0xc2, 0x67, 0x68, 0x04, + 0x1e, 0x12, 0xbe, 0xe4, 0xc1, 0x8e, 0x43, 0xa3, 0x19, 0x96, 0xec, 0xd3, 0x2e, 0x88, 0x5c, 0x68, + 0xa2, 0x31, 0x78, 0xa4, 0x89, 0xeb, 0x33, 0xee, 0x30, 0x78, 0x87, 0x86, 0xe0, 0x3e, 0x0e, 0x42, + 0xdf, 0x81, 0xcf, 0x11, 0x00, 0x03, 0x8f, 0x71, 0x97, 0x45, 0xf0, 0x1e, 0xbd, 0x00, 0x43, 0xea, + 0xb1, 0x7d, 0x44, 0xb9, 0xc7, 0xe0, 0xa0, 0x19, 0xc3, 0x15, 0x75, 0xd8, 0x9a, 0xf1, 0x18, 0x3e, + 0xa0, 0x97, 0x60, 0x12, 0xd2, 0x88, 0xf1, 0x98, 0xae, 0xf6, 0x9b, 0x98, 0xc6, 0xc9, 0x06, 0x3e, + 0x22, 0x08, 0xc6, 0x3e, 0x77, 0x82, 0x75, 0xef, 0x1a, 0x2e, 0xfe, 0x18, 0x60, 0xf6, 0x55, 0x55, + 0xf8, 0xc9, 0x2a, 0x16, 0xaf, 0x6f, 0xbf, 0x13, 0x36, 0x2d, 0x84, 0xc6, 0xe7, 0xc5, 0x3f, 0xa7, + 0x50, 0x87, 0x4c, 0x0a, 0xac, 0x4e, 0xc2, 0x12, 0x85, 0x6c, 0x3b, 0xea, 0xb7, 0x71, 0x2c, 0xf5, + 0x7f, 0x96, 0xf3, 0xa1, 0x3d, 0xbf, 0x99, 0x77, 0x1e, 0xa5, 0x3f, 0xcc, 0xa9, 0xd7, 0x45, 0xd1, + 0x5c, 0xe3, 0x0e, 0x36, 0x68, 0x6b, 0xe3, 0xa6, 0x55, 0xfd, 0xab, 0xe7, 0x53, 0x9a, 0xeb, 0xf4, + 0xca, 0xa7, 0x5b, 0x3b, 0x6d, 0xf9, 0xdf, 0xe6, 0xac, 0xbb, 0x24, 0x84, 0xe6, 0x9a, 0x90, 0xab, + 0x82, 0x90, 0xad, 0x4d, 0x48, 0xab, 0xf9, 0x32, 0x68, 0x1f, 0xf6, 0xee, 0x6f, 0x00, 0x00, 0x00, + 0xff, 0xff, 0x7a, 0x7d, 0x14, 0x76, 0x34, 0x02, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/time_type.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/time_type.pb.go new file mode 100644 index 0000000..66adaab --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/time_type.pb.go @@ -0,0 +1,118 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/enums/time_type.proto + +package enums // import "google.golang.org/genproto/googleapis/ads/googleads/v1/enums" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// The possible time types used by certain resources as an alternative to +// absolute timestamps. +type TimeTypeEnum_TimeType int32 + +const ( + // Not specified. + TimeTypeEnum_UNSPECIFIED TimeTypeEnum_TimeType = 0 + // Used for return value only. Represents value unknown in this version. + TimeTypeEnum_UNKNOWN TimeTypeEnum_TimeType = 1 + // As soon as possible. + TimeTypeEnum_NOW TimeTypeEnum_TimeType = 2 + // An infinite point in the future. + TimeTypeEnum_FOREVER TimeTypeEnum_TimeType = 3 +) + +var TimeTypeEnum_TimeType_name = map[int32]string{ + 0: "UNSPECIFIED", + 1: "UNKNOWN", + 2: "NOW", + 3: "FOREVER", +} +var TimeTypeEnum_TimeType_value = map[string]int32{ + "UNSPECIFIED": 0, + "UNKNOWN": 1, + "NOW": 2, + "FOREVER": 3, +} + +func (x TimeTypeEnum_TimeType) String() string { + return proto.EnumName(TimeTypeEnum_TimeType_name, int32(x)) +} +func (TimeTypeEnum_TimeType) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_time_type_cab7bcba6ce59172, []int{0, 0} +} + +// Message describing time types. +type TimeTypeEnum struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *TimeTypeEnum) Reset() { *m = TimeTypeEnum{} } +func (m *TimeTypeEnum) String() string { return proto.CompactTextString(m) } +func (*TimeTypeEnum) ProtoMessage() {} +func (*TimeTypeEnum) Descriptor() ([]byte, []int) { + return fileDescriptor_time_type_cab7bcba6ce59172, []int{0} +} +func (m *TimeTypeEnum) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_TimeTypeEnum.Unmarshal(m, b) +} +func (m *TimeTypeEnum) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_TimeTypeEnum.Marshal(b, m, deterministic) +} +func (dst *TimeTypeEnum) XXX_Merge(src proto.Message) { + xxx_messageInfo_TimeTypeEnum.Merge(dst, src) +} +func (m *TimeTypeEnum) XXX_Size() int { + return xxx_messageInfo_TimeTypeEnum.Size(m) +} +func (m *TimeTypeEnum) XXX_DiscardUnknown() { + xxx_messageInfo_TimeTypeEnum.DiscardUnknown(m) +} + +var xxx_messageInfo_TimeTypeEnum proto.InternalMessageInfo + +func init() { + proto.RegisterType((*TimeTypeEnum)(nil), "google.ads.googleads.v1.enums.TimeTypeEnum") + proto.RegisterEnum("google.ads.googleads.v1.enums.TimeTypeEnum_TimeType", TimeTypeEnum_TimeType_name, TimeTypeEnum_TimeType_value) +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/enums/time_type.proto", fileDescriptor_time_type_cab7bcba6ce59172) +} + +var fileDescriptor_time_type_cab7bcba6ce59172 = []byte{ + // 294 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x7c, 0x90, 0xc1, 0x4a, 0xc3, 0x30, + 0x18, 0xc7, 0x6d, 0x0b, 0x4e, 0x32, 0xc5, 0xd2, 0xa3, 0xb8, 0xc3, 0x76, 0x37, 0xa1, 0x78, 0x8b, + 0x20, 0xb4, 0xda, 0x8d, 0x21, 0xa4, 0x65, 0x6e, 0x1d, 0x48, 0x41, 0xaa, 0x0d, 0xa1, 0xb0, 0x26, + 0x65, 0xe9, 0x06, 0x7b, 0x1d, 0x8f, 0x3e, 0x8a, 0x8f, 0xe1, 0xd1, 0xa7, 0x90, 0x24, 0xa6, 0x37, + 0x77, 0x09, 0xff, 0x7c, 0xff, 0x5f, 0xbe, 0xfc, 0xbf, 0x0f, 0xdc, 0x30, 0x21, 0xd8, 0x86, 0xa2, + 0xb2, 0x92, 0xc8, 0x48, 0xa5, 0xf6, 0x21, 0xa2, 0x7c, 0xd7, 0x48, 0xd4, 0xd5, 0x0d, 0x7d, 0xed, + 0x0e, 0x2d, 0x85, 0xed, 0x56, 0x74, 0x22, 0x18, 0x19, 0x06, 0x96, 0x95, 0x84, 0x3d, 0x0e, 0xf7, + 0x21, 0xd4, 0xf8, 0xd5, 0xb5, 0xed, 0xd6, 0xd6, 0xa8, 0xe4, 0x5c, 0x74, 0x65, 0x57, 0x0b, 0x2e, + 0xcd, 0xe3, 0x09, 0x01, 0xe7, 0xcb, 0xba, 0xa1, 0xcb, 0x43, 0x4b, 0x13, 0xbe, 0x6b, 0x26, 0xf7, + 0xe0, 0xcc, 0xde, 0x83, 0x4b, 0x30, 0x5c, 0x91, 0xe7, 0x2c, 0x79, 0x98, 0x4f, 0xe7, 0xc9, 0xa3, + 0x7f, 0x12, 0x0c, 0xc1, 0x60, 0x45, 0x9e, 0x48, 0xba, 0x26, 0xbe, 0x13, 0x0c, 0x80, 0x47, 0xd2, + 0xb5, 0xef, 0xaa, 0xea, 0x34, 0x5d, 0x24, 0x79, 0xb2, 0xf0, 0xbd, 0xf8, 0xdb, 0x01, 0xe3, 0x77, + 0xd1, 0xc0, 0xa3, 0x99, 0xe2, 0x0b, 0xfb, 0x47, 0xa6, 0x42, 0x64, 0xce, 0x4b, 0xfc, 0xc7, 0x33, + 0xb1, 0x29, 0x39, 0x83, 0x62, 0xcb, 0x10, 0xa3, 0x5c, 0x47, 0xb4, 0x2b, 0x68, 0x6b, 0xf9, 0xcf, + 0x46, 0xee, 0xf4, 0xf9, 0xe1, 0x7a, 0xb3, 0x28, 0xfa, 0x74, 0x47, 0x33, 0xd3, 0x2a, 0xaa, 0x24, + 0x34, 0x52, 0xa9, 0x3c, 0x84, 0x6a, 0x3e, 0xf9, 0x65, 0xfd, 0x22, 0xaa, 0x64, 0xd1, 0xfb, 0x45, + 0x1e, 0x16, 0xda, 0xff, 0x71, 0xc7, 0xa6, 0x88, 0x71, 0x54, 0x49, 0x8c, 0x7b, 0x02, 0xe3, 0x3c, + 0xc4, 0x58, 0x33, 0x6f, 0xa7, 0x3a, 0xd8, 0xed, 0x6f, 0x00, 0x00, 0x00, 0xff, 0xff, 0x38, 0xf4, + 0x42, 0x6f, 0xa9, 0x01, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/tracking_code_page_format.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/tracking_code_page_format.pb.go new file mode 100644 index 0000000..06c7a5d --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/tracking_code_page_format.pb.go @@ -0,0 +1,120 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/enums/tracking_code_page_format.proto + +package enums // import "google.golang.org/genproto/googleapis/ads/googleads/v1/enums" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// The format of the web page where the tracking tag and snippet will be +// installed. +type TrackingCodePageFormatEnum_TrackingCodePageFormat int32 + +const ( + // Not specified. + TrackingCodePageFormatEnum_UNSPECIFIED TrackingCodePageFormatEnum_TrackingCodePageFormat = 0 + // Used for return value only. Represents value unknown in this version. + TrackingCodePageFormatEnum_UNKNOWN TrackingCodePageFormatEnum_TrackingCodePageFormat = 1 + // Standard HTML page format. + TrackingCodePageFormatEnum_HTML TrackingCodePageFormatEnum_TrackingCodePageFormat = 2 + // Google AMP page format. + TrackingCodePageFormatEnum_AMP TrackingCodePageFormatEnum_TrackingCodePageFormat = 3 +) + +var TrackingCodePageFormatEnum_TrackingCodePageFormat_name = map[int32]string{ + 0: "UNSPECIFIED", + 1: "UNKNOWN", + 2: "HTML", + 3: "AMP", +} +var TrackingCodePageFormatEnum_TrackingCodePageFormat_value = map[string]int32{ + "UNSPECIFIED": 0, + "UNKNOWN": 1, + "HTML": 2, + "AMP": 3, +} + +func (x TrackingCodePageFormatEnum_TrackingCodePageFormat) String() string { + return proto.EnumName(TrackingCodePageFormatEnum_TrackingCodePageFormat_name, int32(x)) +} +func (TrackingCodePageFormatEnum_TrackingCodePageFormat) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_tracking_code_page_format_109230f9eb069caa, []int{0, 0} +} + +// Container for enum describing the format of the web page where the tracking +// tag and snippet will be installed. +type TrackingCodePageFormatEnum struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *TrackingCodePageFormatEnum) Reset() { *m = TrackingCodePageFormatEnum{} } +func (m *TrackingCodePageFormatEnum) String() string { return proto.CompactTextString(m) } +func (*TrackingCodePageFormatEnum) ProtoMessage() {} +func (*TrackingCodePageFormatEnum) Descriptor() ([]byte, []int) { + return fileDescriptor_tracking_code_page_format_109230f9eb069caa, []int{0} +} +func (m *TrackingCodePageFormatEnum) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_TrackingCodePageFormatEnum.Unmarshal(m, b) +} +func (m *TrackingCodePageFormatEnum) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_TrackingCodePageFormatEnum.Marshal(b, m, deterministic) +} +func (dst *TrackingCodePageFormatEnum) XXX_Merge(src proto.Message) { + xxx_messageInfo_TrackingCodePageFormatEnum.Merge(dst, src) +} +func (m *TrackingCodePageFormatEnum) XXX_Size() int { + return xxx_messageInfo_TrackingCodePageFormatEnum.Size(m) +} +func (m *TrackingCodePageFormatEnum) XXX_DiscardUnknown() { + xxx_messageInfo_TrackingCodePageFormatEnum.DiscardUnknown(m) +} + +var xxx_messageInfo_TrackingCodePageFormatEnum proto.InternalMessageInfo + +func init() { + proto.RegisterType((*TrackingCodePageFormatEnum)(nil), "google.ads.googleads.v1.enums.TrackingCodePageFormatEnum") + proto.RegisterEnum("google.ads.googleads.v1.enums.TrackingCodePageFormatEnum_TrackingCodePageFormat", TrackingCodePageFormatEnum_TrackingCodePageFormat_name, TrackingCodePageFormatEnum_TrackingCodePageFormat_value) +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/enums/tracking_code_page_format.proto", fileDescriptor_tracking_code_page_format_109230f9eb069caa) +} + +var fileDescriptor_tracking_code_page_format_109230f9eb069caa = []byte{ + // 312 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x7c, 0x90, 0xc1, 0x4e, 0xc2, 0x30, + 0x18, 0xc7, 0x65, 0x18, 0x31, 0xe5, 0xe0, 0xb2, 0x83, 0x07, 0x94, 0x03, 0x3c, 0x40, 0x9b, 0xc5, + 0x5b, 0x8d, 0x87, 0x82, 0x80, 0x44, 0x99, 0x4b, 0x04, 0x4c, 0xcc, 0x12, 0x52, 0x69, 0x6d, 0x16, + 0x59, 0xbf, 0x65, 0x1d, 0x3c, 0x90, 0x47, 0x1f, 0xc5, 0x47, 0xf1, 0xe4, 0x23, 0x98, 0xb5, 0xc0, + 0x09, 0xbd, 0x2c, 0xff, 0xec, 0xff, 0xfd, 0xfe, 0xfd, 0x7f, 0x1f, 0xba, 0x51, 0x00, 0x6a, 0x25, + 0x09, 0x17, 0x86, 0x38, 0x59, 0xa9, 0x4d, 0x48, 0xa4, 0x5e, 0x67, 0x86, 0x94, 0x05, 0x5f, 0xbe, + 0xa7, 0x5a, 0x2d, 0x96, 0x20, 0xe4, 0x22, 0xe7, 0x4a, 0x2e, 0xde, 0xa0, 0xc8, 0x78, 0x89, 0xf3, + 0x02, 0x4a, 0x08, 0xda, 0x8e, 0xc1, 0x5c, 0x18, 0xbc, 0xc7, 0xf1, 0x26, 0xc4, 0x16, 0x6f, 0x5d, + 0xee, 0xd2, 0xf3, 0x94, 0x70, 0xad, 0xa1, 0xe4, 0x65, 0x0a, 0xda, 0x38, 0xb8, 0xab, 0x50, 0x6b, + 0xba, 0xcd, 0xef, 0x83, 0x90, 0x31, 0x57, 0x72, 0x68, 0xc3, 0x07, 0x7a, 0x9d, 0x75, 0xc7, 0xe8, + 0xfc, 0xb0, 0x1b, 0x9c, 0xa1, 0xe6, 0x2c, 0x7a, 0x8a, 0x07, 0xfd, 0xf1, 0x70, 0x3c, 0xb8, 0xf5, + 0x8f, 0x82, 0x26, 0x6a, 0xcc, 0xa2, 0xfb, 0xe8, 0xf1, 0x39, 0xf2, 0x6b, 0xc1, 0x29, 0x3a, 0xbe, + 0x9b, 0x4e, 0x1e, 0x7c, 0x2f, 0x68, 0xa0, 0x3a, 0x9b, 0xc4, 0x7e, 0xbd, 0xf7, 0x53, 0x43, 0x9d, + 0x25, 0x64, 0xf8, 0xdf, 0xb2, 0xbd, 0x8b, 0xc3, 0xcf, 0xc5, 0x55, 0xd7, 0xb8, 0xf6, 0xd2, 0xdb, + 0xd2, 0x0a, 0x56, 0x5c, 0x2b, 0x0c, 0x85, 0x22, 0x4a, 0x6a, 0xbb, 0xc9, 0xee, 0x72, 0x79, 0x6a, + 0xfe, 0x38, 0xe4, 0xb5, 0xfd, 0x7e, 0x78, 0xf5, 0x11, 0x63, 0x9f, 0x5e, 0x7b, 0xe4, 0xa2, 0x98, + 0x30, 0xd8, 0xc9, 0x4a, 0xcd, 0x43, 0x5c, 0x2d, 0x6e, 0xbe, 0x76, 0x7e, 0xc2, 0x84, 0x49, 0xf6, + 0x7e, 0x32, 0x0f, 0x13, 0xeb, 0x7f, 0x7b, 0x1d, 0xf7, 0x93, 0x52, 0x26, 0x0c, 0xa5, 0xfb, 0x09, + 0x4a, 0xe7, 0x21, 0xa5, 0x76, 0xe6, 0xf5, 0xc4, 0x16, 0xbb, 0xfa, 0x0d, 0x00, 0x00, 0xff, 0xff, + 0x30, 0x63, 0x0b, 0x36, 0xe0, 0x01, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/tracking_code_type.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/tracking_code_type.pb.go new file mode 100644 index 0000000..bb24dd0 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/tracking_code_type.pb.go @@ -0,0 +1,127 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/enums/tracking_code_type.proto + +package enums // import "google.golang.org/genproto/googleapis/ads/googleads/v1/enums" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// The type of the generated tag snippets for tracking conversions. +type TrackingCodeTypeEnum_TrackingCodeType int32 + +const ( + // Not specified. + TrackingCodeTypeEnum_UNSPECIFIED TrackingCodeTypeEnum_TrackingCodeType = 0 + // Used for return value only. Represents value unknown in this version. + TrackingCodeTypeEnum_UNKNOWN TrackingCodeTypeEnum_TrackingCodeType = 1 + // The snippet that is fired as a result of a website page loading. + TrackingCodeTypeEnum_WEBPAGE TrackingCodeTypeEnum_TrackingCodeType = 2 + // The snippet contains a JavaScript function which fires the tag. This + // function is typically called from an onClick handler added to a link or + // button element on the page. + TrackingCodeTypeEnum_WEBPAGE_ONCLICK TrackingCodeTypeEnum_TrackingCodeType = 3 + // For embedding on a mobile webpage. The snippet contains a JavaScript + // function which fires the tag. + TrackingCodeTypeEnum_CLICK_TO_CALL TrackingCodeTypeEnum_TrackingCodeType = 4 +) + +var TrackingCodeTypeEnum_TrackingCodeType_name = map[int32]string{ + 0: "UNSPECIFIED", + 1: "UNKNOWN", + 2: "WEBPAGE", + 3: "WEBPAGE_ONCLICK", + 4: "CLICK_TO_CALL", +} +var TrackingCodeTypeEnum_TrackingCodeType_value = map[string]int32{ + "UNSPECIFIED": 0, + "UNKNOWN": 1, + "WEBPAGE": 2, + "WEBPAGE_ONCLICK": 3, + "CLICK_TO_CALL": 4, +} + +func (x TrackingCodeTypeEnum_TrackingCodeType) String() string { + return proto.EnumName(TrackingCodeTypeEnum_TrackingCodeType_name, int32(x)) +} +func (TrackingCodeTypeEnum_TrackingCodeType) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_tracking_code_type_1a45135fd39e29d4, []int{0, 0} +} + +// Container for enum describing the type of the generated tag snippets for +// tracking conversions. +type TrackingCodeTypeEnum struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *TrackingCodeTypeEnum) Reset() { *m = TrackingCodeTypeEnum{} } +func (m *TrackingCodeTypeEnum) String() string { return proto.CompactTextString(m) } +func (*TrackingCodeTypeEnum) ProtoMessage() {} +func (*TrackingCodeTypeEnum) Descriptor() ([]byte, []int) { + return fileDescriptor_tracking_code_type_1a45135fd39e29d4, []int{0} +} +func (m *TrackingCodeTypeEnum) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_TrackingCodeTypeEnum.Unmarshal(m, b) +} +func (m *TrackingCodeTypeEnum) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_TrackingCodeTypeEnum.Marshal(b, m, deterministic) +} +func (dst *TrackingCodeTypeEnum) XXX_Merge(src proto.Message) { + xxx_messageInfo_TrackingCodeTypeEnum.Merge(dst, src) +} +func (m *TrackingCodeTypeEnum) XXX_Size() int { + return xxx_messageInfo_TrackingCodeTypeEnum.Size(m) +} +func (m *TrackingCodeTypeEnum) XXX_DiscardUnknown() { + xxx_messageInfo_TrackingCodeTypeEnum.DiscardUnknown(m) +} + +var xxx_messageInfo_TrackingCodeTypeEnum proto.InternalMessageInfo + +func init() { + proto.RegisterType((*TrackingCodeTypeEnum)(nil), "google.ads.googleads.v1.enums.TrackingCodeTypeEnum") + proto.RegisterEnum("google.ads.googleads.v1.enums.TrackingCodeTypeEnum_TrackingCodeType", TrackingCodeTypeEnum_TrackingCodeType_name, TrackingCodeTypeEnum_TrackingCodeType_value) +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/enums/tracking_code_type.proto", fileDescriptor_tracking_code_type_1a45135fd39e29d4) +} + +var fileDescriptor_tracking_code_type_1a45135fd39e29d4 = []byte{ + // 327 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x7c, 0x50, 0xdf, 0x4a, 0xfb, 0x30, + 0x18, 0xfd, 0xad, 0xfb, 0xa1, 0x90, 0x21, 0xab, 0x55, 0x6f, 0xc4, 0x5d, 0x6c, 0x0f, 0x90, 0x50, + 0x04, 0x2f, 0xe2, 0x55, 0x5a, 0xeb, 0x18, 0x1b, 0x5d, 0xc1, 0xfd, 0x01, 0x29, 0x94, 0xb8, 0x84, + 0x50, 0xdc, 0x92, 0xb2, 0x74, 0x83, 0x5d, 0xf8, 0x32, 0x5e, 0xfa, 0x28, 0x3e, 0xca, 0x9e, 0x42, + 0x9a, 0x6c, 0xbd, 0x18, 0xe8, 0x4d, 0x38, 0xf9, 0xbe, 0x73, 0x0e, 0xe7, 0x3b, 0xe0, 0x41, 0x28, + 0x25, 0x96, 0x1c, 0x51, 0xa6, 0x91, 0x85, 0x15, 0xda, 0xfa, 0x88, 0xcb, 0xcd, 0x4a, 0xa3, 0x72, + 0x4d, 0x17, 0xef, 0xb9, 0x14, 0xd9, 0x42, 0x31, 0x9e, 0x95, 0xbb, 0x82, 0xc3, 0x62, 0xad, 0x4a, + 0xe5, 0x75, 0x2c, 0x19, 0x52, 0xa6, 0x61, 0xad, 0x83, 0x5b, 0x1f, 0x1a, 0xdd, 0xed, 0xdd, 0xd1, + 0xb6, 0xc8, 0x11, 0x95, 0x52, 0x95, 0xb4, 0xcc, 0x95, 0xd4, 0x56, 0xdc, 0xfb, 0x00, 0xd7, 0x93, + 0x83, 0x71, 0xa8, 0x18, 0x9f, 0xec, 0x0a, 0x1e, 0xc9, 0xcd, 0xaa, 0xc7, 0x81, 0x7b, 0x3a, 0xf7, + 0xda, 0xa0, 0x35, 0x8d, 0x5f, 0x92, 0x28, 0x1c, 0x3c, 0x0f, 0xa2, 0x27, 0xf7, 0x9f, 0xd7, 0x02, + 0xe7, 0xd3, 0x78, 0x18, 0x8f, 0xe7, 0xb1, 0xdb, 0xa8, 0x3e, 0xf3, 0x28, 0x48, 0x48, 0x3f, 0x72, + 0x1d, 0xef, 0x0a, 0xb4, 0x0f, 0x9f, 0x6c, 0x1c, 0x87, 0xa3, 0x41, 0x38, 0x74, 0x9b, 0xde, 0x25, + 0xb8, 0x30, 0x30, 0x9b, 0x8c, 0xb3, 0x90, 0x8c, 0x46, 0xee, 0xff, 0x60, 0xdf, 0x00, 0xdd, 0x85, + 0x5a, 0xc1, 0x3f, 0x4f, 0x08, 0x6e, 0x4e, 0xa3, 0x24, 0x55, 0xf6, 0xa4, 0xf1, 0x1a, 0x1c, 0x74, + 0x42, 0x2d, 0xa9, 0x14, 0x50, 0xad, 0x05, 0x12, 0x5c, 0x9a, 0xcb, 0x8e, 0x15, 0x16, 0xb9, 0xfe, + 0xa5, 0xd1, 0x47, 0xf3, 0x7e, 0x3a, 0xcd, 0x3e, 0x21, 0x5f, 0x4e, 0xa7, 0x6f, 0xad, 0x08, 0xd3, + 0xd0, 0xc2, 0x0a, 0xcd, 0x7c, 0x58, 0xd5, 0xa1, 0xbf, 0x8f, 0xfb, 0x94, 0x30, 0x9d, 0xd6, 0xfb, + 0x74, 0xe6, 0xa7, 0x66, 0xbf, 0x77, 0xba, 0x76, 0x88, 0x31, 0x61, 0x1a, 0xe3, 0x9a, 0x81, 0xf1, + 0xcc, 0xc7, 0xd8, 0x70, 0xde, 0xce, 0x4c, 0xb0, 0xfb, 0x9f, 0x00, 0x00, 0x00, 0xff, 0xff, 0x3c, + 0xb2, 0xe6, 0x9c, 0xe9, 0x01, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/travel_placeholder_field.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/travel_placeholder_field.pb.go new file mode 100644 index 0000000..7a587fb --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/travel_placeholder_field.pb.go @@ -0,0 +1,227 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/enums/travel_placeholder_field.proto + +package enums // import "google.golang.org/genproto/googleapis/ads/googleads/v1/enums" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Possible values for Travel placeholder fields. +type TravelPlaceholderFieldEnum_TravelPlaceholderField int32 + +const ( + // Not specified. + TravelPlaceholderFieldEnum_UNSPECIFIED TravelPlaceholderFieldEnum_TravelPlaceholderField = 0 + // Used for return value only. Represents value unknown in this version. + TravelPlaceholderFieldEnum_UNKNOWN TravelPlaceholderFieldEnum_TravelPlaceholderField = 1 + // Data Type: STRING. Required. Destination id. Example: PAR, LON. + // For feed items that only have destination id, destination id must be a + // unique key. For feed items that have both destination id and origin id, + // then the combination must be a unique key. + TravelPlaceholderFieldEnum_DESTINATION_ID TravelPlaceholderFieldEnum_TravelPlaceholderField = 2 + // Data Type: STRING. Origin id. Example: PAR, LON. + // Combination of DESTINATION_ID and ORIGIN_ID must be + // unique per offer. + TravelPlaceholderFieldEnum_ORIGIN_ID TravelPlaceholderFieldEnum_TravelPlaceholderField = 3 + // Data Type: STRING. Required. Main headline with name to be shown in + // dynamic ad. + TravelPlaceholderFieldEnum_TITLE TravelPlaceholderFieldEnum_TravelPlaceholderField = 4 + // Data Type: STRING. The destination name. Shorter names are recommended. + TravelPlaceholderFieldEnum_DESTINATION_NAME TravelPlaceholderFieldEnum_TravelPlaceholderField = 5 + // Data Type: STRING. Origin name. Shorter names are recommended. + TravelPlaceholderFieldEnum_ORIGIN_NAME TravelPlaceholderFieldEnum_TravelPlaceholderField = 6 + // Data Type: STRING. Price to be shown in the ad. Highly recommended for + // dynamic ads. + // Example: "100.00 USD" + TravelPlaceholderFieldEnum_PRICE TravelPlaceholderFieldEnum_TravelPlaceholderField = 7 + // Data Type: STRING. Formatted price to be shown in the ad. + // Example: "Starting at $100.00 USD", "$80 - $100" + TravelPlaceholderFieldEnum_FORMATTED_PRICE TravelPlaceholderFieldEnum_TravelPlaceholderField = 8 + // Data Type: STRING. Sale price to be shown in the ad. + // Example: "80.00 USD" + TravelPlaceholderFieldEnum_SALE_PRICE TravelPlaceholderFieldEnum_TravelPlaceholderField = 9 + // Data Type: STRING. Formatted sale price to be shown in the ad. + // Example: "On sale for $80.00", "$60 - $80" + TravelPlaceholderFieldEnum_FORMATTED_SALE_PRICE TravelPlaceholderFieldEnum_TravelPlaceholderField = 10 + // Data Type: URL. Image to be displayed in the ad. + TravelPlaceholderFieldEnum_IMAGE_URL TravelPlaceholderFieldEnum_TravelPlaceholderField = 11 + // Data Type: STRING. Category of travel offer used to group like items + // together for recommendation engine. + TravelPlaceholderFieldEnum_CATEGORY TravelPlaceholderFieldEnum_TravelPlaceholderField = 12 + // Data Type: STRING_LIST. Keywords used for product retrieval. + TravelPlaceholderFieldEnum_CONTEXTUAL_KEYWORDS TravelPlaceholderFieldEnum_TravelPlaceholderField = 13 + // Data Type: STRING. Address of travel offer, including postal code. + TravelPlaceholderFieldEnum_DESTINATION_ADDRESS TravelPlaceholderFieldEnum_TravelPlaceholderField = 14 + // Data Type: URL_LIST. Required. Final URLs to be used in ad, when using + // Upgraded URLs; the more specific the better (e.g. the individual URL of a + // specific travel offer and its location). + TravelPlaceholderFieldEnum_FINAL_URL TravelPlaceholderFieldEnum_TravelPlaceholderField = 15 + // Data Type: URL_LIST. Final mobile URLs for the ad when using Upgraded + // URLs. + TravelPlaceholderFieldEnum_FINAL_MOBILE_URLS TravelPlaceholderFieldEnum_TravelPlaceholderField = 16 + // Data Type: URL. Tracking template for the ad when using Upgraded URLs. + TravelPlaceholderFieldEnum_TRACKING_URL TravelPlaceholderFieldEnum_TravelPlaceholderField = 17 + // Data Type: STRING. Android app link. Must be formatted as: + // android-app://{package_id}/{scheme}/{host_path}. + // The components are defined as follows: + // package_id: app ID as specified in Google Play. + // scheme: the scheme to pass to the application. Can be HTTP, or a custom + // scheme. + // host_path: identifies the specific content within your application. + TravelPlaceholderFieldEnum_ANDROID_APP_LINK TravelPlaceholderFieldEnum_TravelPlaceholderField = 18 + // Data Type: STRING_LIST. List of recommended destination IDs to show + // together with this item. + TravelPlaceholderFieldEnum_SIMILAR_DESTINATION_IDS TravelPlaceholderFieldEnum_TravelPlaceholderField = 19 + // Data Type: STRING. iOS app link. + TravelPlaceholderFieldEnum_IOS_APP_LINK TravelPlaceholderFieldEnum_TravelPlaceholderField = 20 + // Data Type: INT64. iOS app store ID. + TravelPlaceholderFieldEnum_IOS_APP_STORE_ID TravelPlaceholderFieldEnum_TravelPlaceholderField = 21 +) + +var TravelPlaceholderFieldEnum_TravelPlaceholderField_name = map[int32]string{ + 0: "UNSPECIFIED", + 1: "UNKNOWN", + 2: "DESTINATION_ID", + 3: "ORIGIN_ID", + 4: "TITLE", + 5: "DESTINATION_NAME", + 6: "ORIGIN_NAME", + 7: "PRICE", + 8: "FORMATTED_PRICE", + 9: "SALE_PRICE", + 10: "FORMATTED_SALE_PRICE", + 11: "IMAGE_URL", + 12: "CATEGORY", + 13: "CONTEXTUAL_KEYWORDS", + 14: "DESTINATION_ADDRESS", + 15: "FINAL_URL", + 16: "FINAL_MOBILE_URLS", + 17: "TRACKING_URL", + 18: "ANDROID_APP_LINK", + 19: "SIMILAR_DESTINATION_IDS", + 20: "IOS_APP_LINK", + 21: "IOS_APP_STORE_ID", +} +var TravelPlaceholderFieldEnum_TravelPlaceholderField_value = map[string]int32{ + "UNSPECIFIED": 0, + "UNKNOWN": 1, + "DESTINATION_ID": 2, + "ORIGIN_ID": 3, + "TITLE": 4, + "DESTINATION_NAME": 5, + "ORIGIN_NAME": 6, + "PRICE": 7, + "FORMATTED_PRICE": 8, + "SALE_PRICE": 9, + "FORMATTED_SALE_PRICE": 10, + "IMAGE_URL": 11, + "CATEGORY": 12, + "CONTEXTUAL_KEYWORDS": 13, + "DESTINATION_ADDRESS": 14, + "FINAL_URL": 15, + "FINAL_MOBILE_URLS": 16, + "TRACKING_URL": 17, + "ANDROID_APP_LINK": 18, + "SIMILAR_DESTINATION_IDS": 19, + "IOS_APP_LINK": 20, + "IOS_APP_STORE_ID": 21, +} + +func (x TravelPlaceholderFieldEnum_TravelPlaceholderField) String() string { + return proto.EnumName(TravelPlaceholderFieldEnum_TravelPlaceholderField_name, int32(x)) +} +func (TravelPlaceholderFieldEnum_TravelPlaceholderField) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_travel_placeholder_field_c5fb0d1e99f0b387, []int{0, 0} +} + +// Values for Travel placeholder fields. +// For more information about dynamic remarketing feeds, see +// https://support.google.com/google-ads/answer/6053288. +type TravelPlaceholderFieldEnum struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *TravelPlaceholderFieldEnum) Reset() { *m = TravelPlaceholderFieldEnum{} } +func (m *TravelPlaceholderFieldEnum) String() string { return proto.CompactTextString(m) } +func (*TravelPlaceholderFieldEnum) ProtoMessage() {} +func (*TravelPlaceholderFieldEnum) Descriptor() ([]byte, []int) { + return fileDescriptor_travel_placeholder_field_c5fb0d1e99f0b387, []int{0} +} +func (m *TravelPlaceholderFieldEnum) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_TravelPlaceholderFieldEnum.Unmarshal(m, b) +} +func (m *TravelPlaceholderFieldEnum) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_TravelPlaceholderFieldEnum.Marshal(b, m, deterministic) +} +func (dst *TravelPlaceholderFieldEnum) XXX_Merge(src proto.Message) { + xxx_messageInfo_TravelPlaceholderFieldEnum.Merge(dst, src) +} +func (m *TravelPlaceholderFieldEnum) XXX_Size() int { + return xxx_messageInfo_TravelPlaceholderFieldEnum.Size(m) +} +func (m *TravelPlaceholderFieldEnum) XXX_DiscardUnknown() { + xxx_messageInfo_TravelPlaceholderFieldEnum.DiscardUnknown(m) +} + +var xxx_messageInfo_TravelPlaceholderFieldEnum proto.InternalMessageInfo + +func init() { + proto.RegisterType((*TravelPlaceholderFieldEnum)(nil), "google.ads.googleads.v1.enums.TravelPlaceholderFieldEnum") + proto.RegisterEnum("google.ads.googleads.v1.enums.TravelPlaceholderFieldEnum_TravelPlaceholderField", TravelPlaceholderFieldEnum_TravelPlaceholderField_name, TravelPlaceholderFieldEnum_TravelPlaceholderField_value) +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/enums/travel_placeholder_field.proto", fileDescriptor_travel_placeholder_field_c5fb0d1e99f0b387) +} + +var fileDescriptor_travel_placeholder_field_c5fb0d1e99f0b387 = []byte{ + // 524 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x7c, 0x52, 0xdd, 0x6e, 0xd3, 0x30, + 0x14, 0x66, 0x2d, 0xfb, 0x73, 0xbb, 0xd6, 0x73, 0x3b, 0x86, 0x36, 0x76, 0xb1, 0x3d, 0x40, 0xa2, + 0x8a, 0xbb, 0xc0, 0x8d, 0xdb, 0xb8, 0x91, 0xd5, 0xd4, 0x89, 0x6c, 0xb7, 0x63, 0xa8, 0x52, 0x14, + 0x96, 0x10, 0x2a, 0xa5, 0x49, 0xd5, 0x74, 0x7d, 0x20, 0x2e, 0x79, 0x01, 0xde, 0x81, 0x07, 0xe0, + 0x21, 0xb8, 0xe2, 0x11, 0x90, 0x9d, 0xfe, 0x21, 0x0d, 0x6e, 0xa2, 0x73, 0xce, 0xf7, 0xe3, 0x13, + 0xfb, 0x03, 0xef, 0x93, 0x3c, 0x4f, 0xd2, 0xd8, 0x0c, 0xa3, 0xc2, 0x2c, 0x4b, 0x55, 0xad, 0x3a, + 0x66, 0x9c, 0x3d, 0xcd, 0x0a, 0x73, 0xb9, 0x08, 0x57, 0x71, 0x1a, 0xcc, 0xd3, 0xf0, 0x31, 0xfe, + 0x92, 0xa7, 0x51, 0xbc, 0x08, 0x3e, 0x4f, 0xe3, 0x34, 0x32, 0xe6, 0x8b, 0x7c, 0x99, 0xa3, 0x9b, + 0x52, 0x62, 0x84, 0x51, 0x61, 0x6c, 0xd5, 0xc6, 0xaa, 0x63, 0x68, 0xf5, 0xd5, 0x9b, 0x8d, 0xf9, + 0x7c, 0x6a, 0x86, 0x59, 0x96, 0x2f, 0xc3, 0xe5, 0x34, 0xcf, 0x8a, 0x52, 0x7c, 0xf7, 0xb3, 0x0a, + 0xae, 0xa4, 0xf6, 0xf7, 0x77, 0xf6, 0x7d, 0xe5, 0x4e, 0xb2, 0xa7, 0xd9, 0xdd, 0xf7, 0x2a, 0x78, + 0xf5, 0x3c, 0x8c, 0x9a, 0xa0, 0x36, 0x62, 0xc2, 0x27, 0x3d, 0xda, 0xa7, 0xc4, 0x86, 0x2f, 0x50, + 0x0d, 0x1c, 0x8f, 0xd8, 0x80, 0x79, 0xf7, 0x0c, 0x1e, 0x20, 0x04, 0x1a, 0x36, 0x11, 0x92, 0x32, + 0x2c, 0xa9, 0xc7, 0x02, 0x6a, 0xc3, 0x0a, 0x3a, 0x03, 0xa7, 0x1e, 0xa7, 0x0e, 0xd5, 0x6d, 0x15, + 0x9d, 0x82, 0x43, 0x49, 0xa5, 0x4b, 0xe0, 0x4b, 0xd4, 0x06, 0x70, 0x9f, 0xcd, 0xf0, 0x90, 0xc0, + 0x43, 0x75, 0xc2, 0x9a, 0xaf, 0x07, 0x47, 0x4a, 0xe1, 0x73, 0xda, 0x23, 0xf0, 0x18, 0xb5, 0x40, + 0xb3, 0xef, 0xf1, 0x21, 0x96, 0x92, 0xd8, 0x41, 0x39, 0x3c, 0x41, 0x0d, 0x00, 0x04, 0x76, 0xc9, + 0xba, 0x3f, 0x45, 0xaf, 0x41, 0x7b, 0x47, 0xda, 0x43, 0x80, 0x5a, 0x85, 0x0e, 0xb1, 0x43, 0x82, + 0x11, 0x77, 0x61, 0x0d, 0xd5, 0xc1, 0x49, 0x0f, 0x4b, 0xe2, 0x78, 0xfc, 0x01, 0xd6, 0xd1, 0x25, + 0x68, 0xf5, 0x3c, 0x26, 0xc9, 0x07, 0x39, 0xc2, 0x6e, 0x30, 0x20, 0x0f, 0xf7, 0x1e, 0xb7, 0x05, + 0x3c, 0x53, 0xc0, 0xfe, 0x9a, 0xd8, 0xb6, 0x39, 0x11, 0x02, 0x36, 0x94, 0x5d, 0x9f, 0x32, 0xec, + 0x6a, 0xbb, 0x26, 0xba, 0x00, 0xe7, 0x65, 0x3b, 0xf4, 0xba, 0xd4, 0xd5, 0x87, 0x08, 0x08, 0x11, + 0x04, 0x75, 0xc9, 0x71, 0x6f, 0x40, 0x99, 0xa3, 0x89, 0xe7, 0xea, 0xbf, 0x31, 0xb3, 0xb9, 0x47, + 0xed, 0x00, 0xfb, 0x7e, 0xe0, 0x52, 0x36, 0x80, 0x08, 0x5d, 0x83, 0x4b, 0x41, 0x87, 0xd4, 0xc5, + 0x3c, 0xf8, 0xfb, 0x0e, 0x05, 0x6c, 0x29, 0x13, 0xea, 0x89, 0x1d, 0xbd, 0xad, 0x4c, 0x36, 0x13, + 0x21, 0x3d, 0x4e, 0xd4, 0xed, 0x5e, 0x74, 0x7f, 0x1f, 0x80, 0xdb, 0xc7, 0x7c, 0x66, 0xfc, 0x37, + 0x1c, 0xdd, 0xeb, 0xe7, 0x1f, 0xd7, 0x57, 0xd9, 0xf0, 0x0f, 0x3e, 0x76, 0xd7, 0xea, 0x24, 0x4f, + 0xc3, 0x2c, 0x31, 0xf2, 0x45, 0x62, 0x26, 0x71, 0xa6, 0x93, 0xb3, 0x09, 0xea, 0x7c, 0x5a, 0xfc, + 0x23, 0xb7, 0xef, 0xf4, 0xf7, 0x6b, 0xa5, 0xea, 0x60, 0xfc, 0xad, 0x72, 0xe3, 0x94, 0x56, 0x38, + 0x2a, 0x8c, 0xb2, 0x54, 0xd5, 0xb8, 0x63, 0xa8, 0x9c, 0x15, 0x3f, 0x36, 0xf8, 0x04, 0x47, 0xc5, + 0x64, 0x8b, 0x4f, 0xc6, 0x9d, 0x89, 0xc6, 0x7f, 0x55, 0x6e, 0xcb, 0xa1, 0x65, 0xe1, 0xa8, 0xb0, + 0xac, 0x2d, 0xc3, 0xb2, 0xc6, 0x1d, 0xcb, 0xd2, 0x9c, 0x4f, 0x47, 0x7a, 0xb1, 0xb7, 0x7f, 0x02, + 0x00, 0x00, 0xff, 0xff, 0xb1, 0xed, 0x26, 0x00, 0x4f, 0x03, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/user_interest_taxonomy_type.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/user_interest_taxonomy_type.pb.go new file mode 100644 index 0000000..a1dc7f0 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/user_interest_taxonomy_type.pb.go @@ -0,0 +1,135 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/enums/user_interest_taxonomy_type.proto + +package enums // import "google.golang.org/genproto/googleapis/ads/googleads/v1/enums" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Enum containing the possible UserInterestTaxonomyTypes. +type UserInterestTaxonomyTypeEnum_UserInterestTaxonomyType int32 + +const ( + // Not specified. + UserInterestTaxonomyTypeEnum_UNSPECIFIED UserInterestTaxonomyTypeEnum_UserInterestTaxonomyType = 0 + // Used for return value only. Represents value unknown in this version. + UserInterestTaxonomyTypeEnum_UNKNOWN UserInterestTaxonomyTypeEnum_UserInterestTaxonomyType = 1 + // The affinity for this user interest. + UserInterestTaxonomyTypeEnum_AFFINITY UserInterestTaxonomyTypeEnum_UserInterestTaxonomyType = 2 + // The market for this user interest. + UserInterestTaxonomyTypeEnum_IN_MARKET UserInterestTaxonomyTypeEnum_UserInterestTaxonomyType = 3 + // Users known to have installed applications in the specified categories. + UserInterestTaxonomyTypeEnum_MOBILE_APP_INSTALL_USER UserInterestTaxonomyTypeEnum_UserInterestTaxonomyType = 4 + // The geographical location of the interest-based vertical. + UserInterestTaxonomyTypeEnum_VERTICAL_GEO UserInterestTaxonomyTypeEnum_UserInterestTaxonomyType = 5 + // User interest criteria for new smart phone users. + UserInterestTaxonomyTypeEnum_NEW_SMART_PHONE_USER UserInterestTaxonomyTypeEnum_UserInterestTaxonomyType = 6 +) + +var UserInterestTaxonomyTypeEnum_UserInterestTaxonomyType_name = map[int32]string{ + 0: "UNSPECIFIED", + 1: "UNKNOWN", + 2: "AFFINITY", + 3: "IN_MARKET", + 4: "MOBILE_APP_INSTALL_USER", + 5: "VERTICAL_GEO", + 6: "NEW_SMART_PHONE_USER", +} +var UserInterestTaxonomyTypeEnum_UserInterestTaxonomyType_value = map[string]int32{ + "UNSPECIFIED": 0, + "UNKNOWN": 1, + "AFFINITY": 2, + "IN_MARKET": 3, + "MOBILE_APP_INSTALL_USER": 4, + "VERTICAL_GEO": 5, + "NEW_SMART_PHONE_USER": 6, +} + +func (x UserInterestTaxonomyTypeEnum_UserInterestTaxonomyType) String() string { + return proto.EnumName(UserInterestTaxonomyTypeEnum_UserInterestTaxonomyType_name, int32(x)) +} +func (UserInterestTaxonomyTypeEnum_UserInterestTaxonomyType) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_user_interest_taxonomy_type_2ebc60b51ce4cdbd, []int{0, 0} +} + +// Message describing a UserInterestTaxonomyType. +type UserInterestTaxonomyTypeEnum struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *UserInterestTaxonomyTypeEnum) Reset() { *m = UserInterestTaxonomyTypeEnum{} } +func (m *UserInterestTaxonomyTypeEnum) String() string { return proto.CompactTextString(m) } +func (*UserInterestTaxonomyTypeEnum) ProtoMessage() {} +func (*UserInterestTaxonomyTypeEnum) Descriptor() ([]byte, []int) { + return fileDescriptor_user_interest_taxonomy_type_2ebc60b51ce4cdbd, []int{0} +} +func (m *UserInterestTaxonomyTypeEnum) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_UserInterestTaxonomyTypeEnum.Unmarshal(m, b) +} +func (m *UserInterestTaxonomyTypeEnum) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_UserInterestTaxonomyTypeEnum.Marshal(b, m, deterministic) +} +func (dst *UserInterestTaxonomyTypeEnum) XXX_Merge(src proto.Message) { + xxx_messageInfo_UserInterestTaxonomyTypeEnum.Merge(dst, src) +} +func (m *UserInterestTaxonomyTypeEnum) XXX_Size() int { + return xxx_messageInfo_UserInterestTaxonomyTypeEnum.Size(m) +} +func (m *UserInterestTaxonomyTypeEnum) XXX_DiscardUnknown() { + xxx_messageInfo_UserInterestTaxonomyTypeEnum.DiscardUnknown(m) +} + +var xxx_messageInfo_UserInterestTaxonomyTypeEnum proto.InternalMessageInfo + +func init() { + proto.RegisterType((*UserInterestTaxonomyTypeEnum)(nil), "google.ads.googleads.v1.enums.UserInterestTaxonomyTypeEnum") + proto.RegisterEnum("google.ads.googleads.v1.enums.UserInterestTaxonomyTypeEnum_UserInterestTaxonomyType", UserInterestTaxonomyTypeEnum_UserInterestTaxonomyType_name, UserInterestTaxonomyTypeEnum_UserInterestTaxonomyType_value) +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/enums/user_interest_taxonomy_type.proto", fileDescriptor_user_interest_taxonomy_type_2ebc60b51ce4cdbd) +} + +var fileDescriptor_user_interest_taxonomy_type_2ebc60b51ce4cdbd = []byte{ + // 387 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x7c, 0x51, 0xcf, 0x8a, 0xd4, 0x30, + 0x1c, 0xb6, 0xb3, 0xba, 0x6a, 0x76, 0xc5, 0x12, 0x04, 0x17, 0xdd, 0x39, 0xec, 0x3e, 0x40, 0x4a, + 0xf1, 0x16, 0x0f, 0x92, 0xae, 0x99, 0x31, 0x6c, 0x27, 0x2d, 0xfd, 0xb7, 0x28, 0x85, 0x50, 0x6d, + 0x28, 0x85, 0x6d, 0x52, 0x9a, 0xce, 0xe0, 0xbc, 0x8c, 0x07, 0x8f, 0xbe, 0x84, 0x77, 0x1f, 0xc5, + 0xa3, 0x4f, 0x20, 0x6d, 0x67, 0xe6, 0x36, 0x7b, 0x09, 0x1f, 0xf9, 0xfe, 0xfc, 0x92, 0xef, 0x07, + 0x3e, 0x54, 0x5a, 0x57, 0xf7, 0xd2, 0x29, 0x4a, 0xe3, 0x4c, 0x70, 0x40, 0x1b, 0xd7, 0x91, 0x6a, + 0xdd, 0x18, 0x67, 0x6d, 0x64, 0x27, 0x6a, 0xd5, 0xcb, 0x4e, 0x9a, 0x5e, 0xf4, 0xc5, 0x77, 0xad, + 0x74, 0xb3, 0x15, 0xfd, 0xb6, 0x95, 0xa8, 0xed, 0x74, 0xaf, 0xe1, 0x7c, 0x72, 0xa1, 0xa2, 0x34, + 0xe8, 0x10, 0x80, 0x36, 0x2e, 0x1a, 0x03, 0xde, 0x5c, 0xee, 0xf3, 0xdb, 0xda, 0x29, 0x94, 0xd2, + 0x7d, 0xd1, 0xd7, 0x5a, 0x99, 0xc9, 0x7c, 0xfd, 0xdb, 0x02, 0x97, 0xa9, 0x91, 0x1d, 0xdb, 0x4d, + 0x48, 0x76, 0x03, 0x92, 0x6d, 0x2b, 0xa9, 0x5a, 0x37, 0xd7, 0x3f, 0x2c, 0x70, 0x71, 0x4c, 0x00, + 0x5f, 0x82, 0xb3, 0x94, 0xc7, 0x21, 0xbd, 0x61, 0x0b, 0x46, 0x3f, 0xda, 0x8f, 0xe0, 0x19, 0x78, + 0x9a, 0xf2, 0x5b, 0x1e, 0xdc, 0x71, 0xdb, 0x82, 0xe7, 0xe0, 0x19, 0x59, 0x2c, 0x18, 0x67, 0xc9, + 0x67, 0x7b, 0x06, 0x5f, 0x80, 0xe7, 0x8c, 0x8b, 0x15, 0x89, 0x6e, 0x69, 0x62, 0x9f, 0xc0, 0xb7, + 0xe0, 0xf5, 0x2a, 0xf0, 0x98, 0x4f, 0x05, 0x09, 0x43, 0xc1, 0x78, 0x9c, 0x10, 0xdf, 0x17, 0x69, + 0x4c, 0x23, 0xfb, 0x31, 0xb4, 0xc1, 0x79, 0x46, 0xa3, 0x84, 0xdd, 0x10, 0x5f, 0x2c, 0x69, 0x60, + 0x3f, 0x81, 0x17, 0xe0, 0x15, 0xa7, 0x77, 0x22, 0x5e, 0x91, 0x28, 0x11, 0xe1, 0xa7, 0x80, 0xd3, + 0x49, 0x7b, 0xea, 0xfd, 0xb3, 0xc0, 0xd5, 0x37, 0xdd, 0xa0, 0x07, 0x5b, 0xf0, 0xe6, 0xc7, 0xfe, + 0x10, 0x0e, 0x35, 0x84, 0xd6, 0x17, 0x6f, 0xe7, 0xaf, 0xf4, 0x7d, 0xa1, 0x2a, 0xa4, 0xbb, 0xca, + 0xa9, 0xa4, 0x1a, 0x4b, 0xda, 0xaf, 0xa5, 0xad, 0xcd, 0x91, 0x2d, 0xbd, 0x1f, 0xcf, 0x9f, 0xb3, + 0x93, 0x25, 0x21, 0xbf, 0x66, 0xf3, 0xe5, 0x14, 0x45, 0x4a, 0x83, 0x26, 0x38, 0xa0, 0xcc, 0x45, + 0x43, 0xa1, 0xe6, 0xcf, 0x9e, 0xcf, 0x49, 0x69, 0xf2, 0x03, 0x9f, 0x67, 0x6e, 0x3e, 0xf2, 0x7f, + 0x67, 0x57, 0xd3, 0x25, 0xc6, 0xa4, 0x34, 0x18, 0x1f, 0x14, 0x18, 0x67, 0x2e, 0xc6, 0xa3, 0xe6, + 0xeb, 0xe9, 0xf8, 0xb0, 0x77, 0xff, 0x03, 0x00, 0x00, 0xff, 0xff, 0x7d, 0x2c, 0x3f, 0x4e, 0x3d, + 0x02, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/user_list_access_status.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/user_list_access_status.pb.go new file mode 100644 index 0000000..ac9054a --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/user_list_access_status.pb.go @@ -0,0 +1,118 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/enums/user_list_access_status.proto + +package enums // import "google.golang.org/genproto/googleapis/ads/googleads/v1/enums" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Enum containing possible user list access statuses. +type UserListAccessStatusEnum_UserListAccessStatus int32 + +const ( + // Not specified. + UserListAccessStatusEnum_UNSPECIFIED UserListAccessStatusEnum_UserListAccessStatus = 0 + // Used for return value only. Represents value unknown in this version. + UserListAccessStatusEnum_UNKNOWN UserListAccessStatusEnum_UserListAccessStatus = 1 + // The access is enabled. + UserListAccessStatusEnum_ENABLED UserListAccessStatusEnum_UserListAccessStatus = 2 + // The access is disabled. + UserListAccessStatusEnum_DISABLED UserListAccessStatusEnum_UserListAccessStatus = 3 +) + +var UserListAccessStatusEnum_UserListAccessStatus_name = map[int32]string{ + 0: "UNSPECIFIED", + 1: "UNKNOWN", + 2: "ENABLED", + 3: "DISABLED", +} +var UserListAccessStatusEnum_UserListAccessStatus_value = map[string]int32{ + "UNSPECIFIED": 0, + "UNKNOWN": 1, + "ENABLED": 2, + "DISABLED": 3, +} + +func (x UserListAccessStatusEnum_UserListAccessStatus) String() string { + return proto.EnumName(UserListAccessStatusEnum_UserListAccessStatus_name, int32(x)) +} +func (UserListAccessStatusEnum_UserListAccessStatus) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_user_list_access_status_351bc453276f30da, []int{0, 0} +} + +// Indicates if this client still has access to the list. +type UserListAccessStatusEnum struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *UserListAccessStatusEnum) Reset() { *m = UserListAccessStatusEnum{} } +func (m *UserListAccessStatusEnum) String() string { return proto.CompactTextString(m) } +func (*UserListAccessStatusEnum) ProtoMessage() {} +func (*UserListAccessStatusEnum) Descriptor() ([]byte, []int) { + return fileDescriptor_user_list_access_status_351bc453276f30da, []int{0} +} +func (m *UserListAccessStatusEnum) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_UserListAccessStatusEnum.Unmarshal(m, b) +} +func (m *UserListAccessStatusEnum) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_UserListAccessStatusEnum.Marshal(b, m, deterministic) +} +func (dst *UserListAccessStatusEnum) XXX_Merge(src proto.Message) { + xxx_messageInfo_UserListAccessStatusEnum.Merge(dst, src) +} +func (m *UserListAccessStatusEnum) XXX_Size() int { + return xxx_messageInfo_UserListAccessStatusEnum.Size(m) +} +func (m *UserListAccessStatusEnum) XXX_DiscardUnknown() { + xxx_messageInfo_UserListAccessStatusEnum.DiscardUnknown(m) +} + +var xxx_messageInfo_UserListAccessStatusEnum proto.InternalMessageInfo + +func init() { + proto.RegisterType((*UserListAccessStatusEnum)(nil), "google.ads.googleads.v1.enums.UserListAccessStatusEnum") + proto.RegisterEnum("google.ads.googleads.v1.enums.UserListAccessStatusEnum_UserListAccessStatus", UserListAccessStatusEnum_UserListAccessStatus_name, UserListAccessStatusEnum_UserListAccessStatus_value) +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/enums/user_list_access_status.proto", fileDescriptor_user_list_access_status_351bc453276f30da) +} + +var fileDescriptor_user_list_access_status_351bc453276f30da = []byte{ + // 311 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x7c, 0x50, 0x41, 0x6a, 0xeb, 0x30, + 0x10, 0xfd, 0x71, 0xe0, 0xb7, 0x28, 0x85, 0x06, 0xd3, 0x45, 0x5b, 0x9a, 0x45, 0x72, 0x00, 0x09, + 0xd3, 0x9d, 0xb2, 0x92, 0x1b, 0x37, 0x84, 0x06, 0x27, 0x10, 0x92, 0x42, 0x31, 0x04, 0x35, 0x16, + 0xc2, 0x34, 0x96, 0x8c, 0x47, 0xce, 0x81, 0xba, 0xec, 0x51, 0x7a, 0x94, 0x2e, 0x7a, 0x86, 0x62, + 0x29, 0xf6, 0x2a, 0xed, 0x46, 0xbc, 0xd1, 0x9b, 0xf7, 0xe6, 0xcd, 0xa0, 0xb1, 0xd4, 0x5a, 0xee, + 0x05, 0xe1, 0x29, 0x10, 0x07, 0x6b, 0x74, 0x08, 0x88, 0x50, 0x55, 0x0e, 0xa4, 0x02, 0x51, 0x6e, + 0xf7, 0x19, 0x98, 0x2d, 0xdf, 0xed, 0x04, 0xc0, 0x16, 0x0c, 0x37, 0x15, 0xe0, 0xa2, 0xd4, 0x46, + 0xfb, 0x03, 0xa7, 0xc0, 0x3c, 0x05, 0xdc, 0x8a, 0xf1, 0x21, 0xc0, 0x56, 0x7c, 0x7b, 0xd7, 0x78, + 0x17, 0x19, 0xe1, 0x4a, 0x69, 0xc3, 0x4d, 0xa6, 0xd5, 0x51, 0x3c, 0x7a, 0x43, 0xd7, 0x6b, 0x10, + 0xe5, 0x3c, 0x03, 0xc3, 0xac, 0xf7, 0xca, 0x5a, 0x47, 0xaa, 0xca, 0x47, 0x0b, 0x74, 0x75, 0x8a, + 0xf3, 0x2f, 0x51, 0x6f, 0x1d, 0xaf, 0x96, 0xd1, 0xc3, 0xec, 0x71, 0x16, 0x4d, 0xfa, 0xff, 0xfc, + 0x1e, 0x3a, 0x5b, 0xc7, 0x4f, 0xf1, 0xe2, 0x39, 0xee, 0x77, 0xea, 0x22, 0x8a, 0x59, 0x38, 0x8f, + 0x26, 0x7d, 0xcf, 0xbf, 0x40, 0xe7, 0x93, 0xd9, 0xca, 0x55, 0xdd, 0xf0, 0xbb, 0x83, 0x86, 0x3b, + 0x9d, 0xe3, 0x3f, 0x03, 0x87, 0x37, 0xa7, 0x86, 0x2e, 0xeb, 0xb4, 0xcb, 0xce, 0x4b, 0x78, 0xd4, + 0x4a, 0xbd, 0xe7, 0x4a, 0x62, 0x5d, 0x4a, 0x22, 0x85, 0xb2, 0xbb, 0x34, 0x97, 0x2b, 0x32, 0xf8, + 0xe5, 0x90, 0x63, 0xfb, 0xbe, 0x7b, 0xdd, 0x29, 0x63, 0x1f, 0xde, 0x60, 0xea, 0xac, 0x58, 0x0a, + 0xd8, 0xc1, 0x1a, 0x6d, 0x02, 0x5c, 0x2f, 0x0f, 0x9f, 0x0d, 0x9f, 0xb0, 0x14, 0x92, 0x96, 0x4f, + 0x36, 0x41, 0x62, 0xf9, 0x2f, 0x6f, 0xe8, 0x3e, 0x29, 0x65, 0x29, 0x50, 0xda, 0x76, 0x50, 0xba, + 0x09, 0x28, 0xb5, 0x3d, 0xaf, 0xff, 0x6d, 0xb0, 0xfb, 0x9f, 0x00, 0x00, 0x00, 0xff, 0xff, 0x99, + 0x4b, 0x85, 0x3b, 0xe0, 0x01, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/user_list_closing_reason.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/user_list_closing_reason.pb.go new file mode 100644 index 0000000..f6d7afd --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/user_list_closing_reason.pb.go @@ -0,0 +1,114 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/enums/user_list_closing_reason.proto + +package enums // import "google.golang.org/genproto/googleapis/ads/googleads/v1/enums" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Enum describing possible user list closing reasons. +type UserListClosingReasonEnum_UserListClosingReason int32 + +const ( + // Not specified. + UserListClosingReasonEnum_UNSPECIFIED UserListClosingReasonEnum_UserListClosingReason = 0 + // Used for return value only. Represents value unknown in this version. + UserListClosingReasonEnum_UNKNOWN UserListClosingReasonEnum_UserListClosingReason = 1 + // The userlist was closed because of not being used for over one year. + UserListClosingReasonEnum_UNUSED UserListClosingReasonEnum_UserListClosingReason = 2 +) + +var UserListClosingReasonEnum_UserListClosingReason_name = map[int32]string{ + 0: "UNSPECIFIED", + 1: "UNKNOWN", + 2: "UNUSED", +} +var UserListClosingReasonEnum_UserListClosingReason_value = map[string]int32{ + "UNSPECIFIED": 0, + "UNKNOWN": 1, + "UNUSED": 2, +} + +func (x UserListClosingReasonEnum_UserListClosingReason) String() string { + return proto.EnumName(UserListClosingReasonEnum_UserListClosingReason_name, int32(x)) +} +func (UserListClosingReasonEnum_UserListClosingReason) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_user_list_closing_reason_b58c1e2643567ca7, []int{0, 0} +} + +// Indicates the reason why the userlist was closed. +// This enum is only used when a list is auto-closed by the system. +type UserListClosingReasonEnum struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *UserListClosingReasonEnum) Reset() { *m = UserListClosingReasonEnum{} } +func (m *UserListClosingReasonEnum) String() string { return proto.CompactTextString(m) } +func (*UserListClosingReasonEnum) ProtoMessage() {} +func (*UserListClosingReasonEnum) Descriptor() ([]byte, []int) { + return fileDescriptor_user_list_closing_reason_b58c1e2643567ca7, []int{0} +} +func (m *UserListClosingReasonEnum) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_UserListClosingReasonEnum.Unmarshal(m, b) +} +func (m *UserListClosingReasonEnum) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_UserListClosingReasonEnum.Marshal(b, m, deterministic) +} +func (dst *UserListClosingReasonEnum) XXX_Merge(src proto.Message) { + xxx_messageInfo_UserListClosingReasonEnum.Merge(dst, src) +} +func (m *UserListClosingReasonEnum) XXX_Size() int { + return xxx_messageInfo_UserListClosingReasonEnum.Size(m) +} +func (m *UserListClosingReasonEnum) XXX_DiscardUnknown() { + xxx_messageInfo_UserListClosingReasonEnum.DiscardUnknown(m) +} + +var xxx_messageInfo_UserListClosingReasonEnum proto.InternalMessageInfo + +func init() { + proto.RegisterType((*UserListClosingReasonEnum)(nil), "google.ads.googleads.v1.enums.UserListClosingReasonEnum") + proto.RegisterEnum("google.ads.googleads.v1.enums.UserListClosingReasonEnum_UserListClosingReason", UserListClosingReasonEnum_UserListClosingReason_name, UserListClosingReasonEnum_UserListClosingReason_value) +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/enums/user_list_closing_reason.proto", fileDescriptor_user_list_closing_reason_b58c1e2643567ca7) +} + +var fileDescriptor_user_list_closing_reason_b58c1e2643567ca7 = []byte{ + // 302 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x7c, 0x90, 0xc1, 0x4a, 0xc3, 0x30, + 0x1c, 0xc6, 0x5d, 0x85, 0x09, 0xd9, 0xc1, 0x51, 0xf0, 0xe0, 0x70, 0x87, 0xed, 0x01, 0x12, 0x8a, + 0xb7, 0xe8, 0x25, 0xdb, 0xea, 0x18, 0x4a, 0x1d, 0x8e, 0x4e, 0x90, 0xe2, 0x88, 0x6b, 0x08, 0x81, + 0x36, 0x29, 0xfd, 0xb7, 0x7b, 0x20, 0x8f, 0x3e, 0x8a, 0x8f, 0xe2, 0xc5, 0x57, 0x90, 0x26, 0xb6, + 0xa7, 0xe9, 0x25, 0x7c, 0xe4, 0xfb, 0xff, 0xbe, 0x7c, 0xf9, 0xa3, 0x5b, 0x69, 0x8c, 0xcc, 0x04, + 0xe1, 0x29, 0x10, 0x27, 0x1b, 0x75, 0x08, 0x88, 0xd0, 0x75, 0x0e, 0xa4, 0x06, 0x51, 0xee, 0x32, + 0x05, 0xd5, 0x6e, 0x9f, 0x19, 0x50, 0x5a, 0xee, 0x4a, 0xc1, 0xc1, 0x68, 0x5c, 0x94, 0xa6, 0x32, + 0xfe, 0xd8, 0x21, 0x98, 0xa7, 0x80, 0x3b, 0x1a, 0x1f, 0x02, 0x6c, 0xe9, 0xd1, 0x55, 0x1b, 0x5e, + 0x28, 0xc2, 0xb5, 0x36, 0x15, 0xaf, 0x94, 0xd1, 0xe0, 0xe0, 0xe9, 0x2b, 0xba, 0x8c, 0x41, 0x94, + 0x0f, 0x0a, 0xaa, 0xb9, 0x0b, 0x7f, 0xb2, 0xd9, 0xa1, 0xae, 0xf3, 0x29, 0x43, 0x17, 0x47, 0x4d, + 0xff, 0x1c, 0x0d, 0xe2, 0x68, 0xb3, 0x0e, 0xe7, 0xab, 0xbb, 0x55, 0xb8, 0x18, 0x9e, 0xf8, 0x03, + 0x74, 0x16, 0x47, 0xf7, 0xd1, 0xe3, 0x73, 0x34, 0xec, 0xf9, 0x08, 0xf5, 0xe3, 0x28, 0xde, 0x84, + 0x8b, 0xa1, 0x37, 0xfb, 0xee, 0xa1, 0xc9, 0xde, 0xe4, 0xf8, 0xdf, 0x8e, 0xb3, 0xd1, 0xd1, 0x67, + 0xd6, 0x4d, 0xc3, 0x75, 0xef, 0x65, 0xf6, 0x0b, 0x4b, 0x93, 0x71, 0x2d, 0xb1, 0x29, 0x25, 0x91, + 0x42, 0xdb, 0xfe, 0xed, 0xba, 0x0a, 0x05, 0x7f, 0x6c, 0xef, 0xc6, 0x9e, 0xef, 0xde, 0xe9, 0x92, + 0xb1, 0x0f, 0x6f, 0xbc, 0x74, 0x51, 0x2c, 0x05, 0xec, 0x64, 0xa3, 0xb6, 0x01, 0x6e, 0xfe, 0x0b, + 0x9f, 0xad, 0x9f, 0xb0, 0x14, 0x92, 0xce, 0x4f, 0xb6, 0x41, 0x62, 0xfd, 0x2f, 0x6f, 0xe2, 0x2e, + 0x29, 0x65, 0x29, 0x50, 0xda, 0x4d, 0x50, 0xba, 0x0d, 0x28, 0xb5, 0x33, 0x6f, 0x7d, 0x5b, 0xec, + 0xfa, 0x27, 0x00, 0x00, 0xff, 0xff, 0xa6, 0x92, 0x92, 0x02, 0xd5, 0x01, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/user_list_combined_rule_operator.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/user_list_combined_rule_operator.pb.go new file mode 100644 index 0000000..c0821c8 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/user_list_combined_rule_operator.pb.go @@ -0,0 +1,118 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/enums/user_list_combined_rule_operator.proto + +package enums // import "google.golang.org/genproto/googleapis/ads/googleads/v1/enums" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Enum describing possible user list combined rule operators. +type UserListCombinedRuleOperatorEnum_UserListCombinedRuleOperator int32 + +const ( + // Not specified. + UserListCombinedRuleOperatorEnum_UNSPECIFIED UserListCombinedRuleOperatorEnum_UserListCombinedRuleOperator = 0 + // Used for return value only. Represents value unknown in this version. + UserListCombinedRuleOperatorEnum_UNKNOWN UserListCombinedRuleOperatorEnum_UserListCombinedRuleOperator = 1 + // A AND B. + UserListCombinedRuleOperatorEnum_AND UserListCombinedRuleOperatorEnum_UserListCombinedRuleOperator = 2 + // A AND NOT B. + UserListCombinedRuleOperatorEnum_AND_NOT UserListCombinedRuleOperatorEnum_UserListCombinedRuleOperator = 3 +) + +var UserListCombinedRuleOperatorEnum_UserListCombinedRuleOperator_name = map[int32]string{ + 0: "UNSPECIFIED", + 1: "UNKNOWN", + 2: "AND", + 3: "AND_NOT", +} +var UserListCombinedRuleOperatorEnum_UserListCombinedRuleOperator_value = map[string]int32{ + "UNSPECIFIED": 0, + "UNKNOWN": 1, + "AND": 2, + "AND_NOT": 3, +} + +func (x UserListCombinedRuleOperatorEnum_UserListCombinedRuleOperator) String() string { + return proto.EnumName(UserListCombinedRuleOperatorEnum_UserListCombinedRuleOperator_name, int32(x)) +} +func (UserListCombinedRuleOperatorEnum_UserListCombinedRuleOperator) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_user_list_combined_rule_operator_4ebfc904d7db5e97, []int{0, 0} +} + +// Logical operator connecting two rules. +type UserListCombinedRuleOperatorEnum struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *UserListCombinedRuleOperatorEnum) Reset() { *m = UserListCombinedRuleOperatorEnum{} } +func (m *UserListCombinedRuleOperatorEnum) String() string { return proto.CompactTextString(m) } +func (*UserListCombinedRuleOperatorEnum) ProtoMessage() {} +func (*UserListCombinedRuleOperatorEnum) Descriptor() ([]byte, []int) { + return fileDescriptor_user_list_combined_rule_operator_4ebfc904d7db5e97, []int{0} +} +func (m *UserListCombinedRuleOperatorEnum) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_UserListCombinedRuleOperatorEnum.Unmarshal(m, b) +} +func (m *UserListCombinedRuleOperatorEnum) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_UserListCombinedRuleOperatorEnum.Marshal(b, m, deterministic) +} +func (dst *UserListCombinedRuleOperatorEnum) XXX_Merge(src proto.Message) { + xxx_messageInfo_UserListCombinedRuleOperatorEnum.Merge(dst, src) +} +func (m *UserListCombinedRuleOperatorEnum) XXX_Size() int { + return xxx_messageInfo_UserListCombinedRuleOperatorEnum.Size(m) +} +func (m *UserListCombinedRuleOperatorEnum) XXX_DiscardUnknown() { + xxx_messageInfo_UserListCombinedRuleOperatorEnum.DiscardUnknown(m) +} + +var xxx_messageInfo_UserListCombinedRuleOperatorEnum proto.InternalMessageInfo + +func init() { + proto.RegisterType((*UserListCombinedRuleOperatorEnum)(nil), "google.ads.googleads.v1.enums.UserListCombinedRuleOperatorEnum") + proto.RegisterEnum("google.ads.googleads.v1.enums.UserListCombinedRuleOperatorEnum_UserListCombinedRuleOperator", UserListCombinedRuleOperatorEnum_UserListCombinedRuleOperator_name, UserListCombinedRuleOperatorEnum_UserListCombinedRuleOperator_value) +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/enums/user_list_combined_rule_operator.proto", fileDescriptor_user_list_combined_rule_operator_4ebfc904d7db5e97) +} + +var fileDescriptor_user_list_combined_rule_operator_4ebfc904d7db5e97 = []byte{ + // 318 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x7c, 0x50, 0xc1, 0x6a, 0xea, 0x40, + 0x14, 0x7d, 0x46, 0x78, 0xc2, 0xb8, 0x68, 0xc8, 0xb2, 0x28, 0x54, 0x3f, 0x60, 0x42, 0xe8, 0x6e, + 0xba, 0x1a, 0x8d, 0x15, 0x69, 0x19, 0xc5, 0x56, 0x0b, 0x25, 0x10, 0xa2, 0x19, 0x42, 0x20, 0x99, + 0x1b, 0xe6, 0x26, 0x7e, 0x50, 0x97, 0xfd, 0x94, 0x7e, 0x4a, 0x3f, 0xa0, 0xeb, 0x92, 0x19, 0x75, + 0xd7, 0x6c, 0x86, 0xc3, 0x9c, 0x73, 0xcf, 0xb9, 0xf7, 0x90, 0x30, 0x03, 0xc8, 0x0a, 0xe9, 0x27, + 0x29, 0xfa, 0x16, 0xb6, 0xe8, 0x14, 0xf8, 0x52, 0x35, 0x25, 0xfa, 0x0d, 0x4a, 0x1d, 0x17, 0x39, + 0xd6, 0xf1, 0x11, 0xca, 0x43, 0xae, 0x64, 0x1a, 0xeb, 0xa6, 0x90, 0x31, 0x54, 0x52, 0x27, 0x35, + 0x68, 0x5a, 0x69, 0xa8, 0xc1, 0x1b, 0xdb, 0x51, 0x9a, 0xa4, 0x48, 0xaf, 0x2e, 0xf4, 0x14, 0x50, + 0xe3, 0x72, 0x3b, 0xba, 0x84, 0x54, 0xb9, 0x9f, 0x28, 0x05, 0x75, 0x52, 0xe7, 0xa0, 0xd0, 0x0e, + 0x4f, 0x4f, 0xe4, 0x6e, 0x87, 0x52, 0x3f, 0xe7, 0x58, 0xcf, 0xcf, 0x21, 0xdb, 0xa6, 0x90, 0xeb, + 0x73, 0xc4, 0x42, 0x35, 0xe5, 0x74, 0x4b, 0x46, 0x5d, 0x1a, 0xef, 0x86, 0x0c, 0x77, 0xe2, 0x65, + 0xb3, 0x98, 0xaf, 0x1e, 0x57, 0x8b, 0xd0, 0xfd, 0xe7, 0x0d, 0xc9, 0x60, 0x27, 0x9e, 0xc4, 0xfa, + 0x4d, 0xb8, 0x3d, 0x6f, 0x40, 0xfa, 0x5c, 0x84, 0xae, 0xd3, 0xfe, 0x72, 0x11, 0xc6, 0x62, 0xfd, + 0xea, 0xf6, 0x67, 0x3f, 0x3d, 0x32, 0x39, 0x42, 0x49, 0x3b, 0x77, 0x9f, 0x4d, 0xba, 0x72, 0x37, + 0xed, 0x01, 0x9b, 0xde, 0xfb, 0xec, 0xec, 0x91, 0x41, 0x91, 0xa8, 0x8c, 0x82, 0xce, 0xfc, 0x4c, + 0x2a, 0x73, 0xde, 0xa5, 0xd5, 0x2a, 0xc7, 0x3f, 0x4a, 0x7e, 0x30, 0xef, 0x87, 0xd3, 0x5f, 0x72, + 0xfe, 0xe9, 0x8c, 0x97, 0xd6, 0x8a, 0xa7, 0x48, 0x2d, 0x6c, 0xd1, 0x3e, 0xa0, 0x6d, 0x0f, 0xf8, + 0x75, 0xe1, 0x23, 0x9e, 0x62, 0x74, 0xe5, 0xa3, 0x7d, 0x10, 0x19, 0xfe, 0xdb, 0x99, 0xd8, 0x4f, + 0xc6, 0x78, 0x8a, 0x8c, 0x5d, 0x15, 0x8c, 0xed, 0x03, 0xc6, 0x8c, 0xe6, 0xf0, 0xdf, 0x2c, 0x76, + 0xff, 0x1b, 0x00, 0x00, 0xff, 0xff, 0xdd, 0x53, 0xc2, 0xa7, 0xfc, 0x01, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/user_list_crm_data_source_type.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/user_list_crm_data_source_type.pb.go new file mode 100644 index 0000000..4408180 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/user_list_crm_data_source_type.pb.go @@ -0,0 +1,125 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/enums/user_list_crm_data_source_type.proto + +package enums // import "google.golang.org/genproto/googleapis/ads/googleads/v1/enums" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Enum describing possible user list crm data source type. +type UserListCrmDataSourceTypeEnum_UserListCrmDataSourceType int32 + +const ( + // Not specified. + UserListCrmDataSourceTypeEnum_UNSPECIFIED UserListCrmDataSourceTypeEnum_UserListCrmDataSourceType = 0 + // Used for return value only. Represents value unknown in this version. + UserListCrmDataSourceTypeEnum_UNKNOWN UserListCrmDataSourceTypeEnum_UserListCrmDataSourceType = 1 + // The uploaded data is first-party data. + UserListCrmDataSourceTypeEnum_FIRST_PARTY UserListCrmDataSourceTypeEnum_UserListCrmDataSourceType = 2 + // The uploaded data is from a third-party credit bureau. + UserListCrmDataSourceTypeEnum_THIRD_PARTY_CREDIT_BUREAU UserListCrmDataSourceTypeEnum_UserListCrmDataSourceType = 3 + // The uploaded data is from a third-party voter file. + UserListCrmDataSourceTypeEnum_THIRD_PARTY_VOTER_FILE UserListCrmDataSourceTypeEnum_UserListCrmDataSourceType = 4 +) + +var UserListCrmDataSourceTypeEnum_UserListCrmDataSourceType_name = map[int32]string{ + 0: "UNSPECIFIED", + 1: "UNKNOWN", + 2: "FIRST_PARTY", + 3: "THIRD_PARTY_CREDIT_BUREAU", + 4: "THIRD_PARTY_VOTER_FILE", +} +var UserListCrmDataSourceTypeEnum_UserListCrmDataSourceType_value = map[string]int32{ + "UNSPECIFIED": 0, + "UNKNOWN": 1, + "FIRST_PARTY": 2, + "THIRD_PARTY_CREDIT_BUREAU": 3, + "THIRD_PARTY_VOTER_FILE": 4, +} + +func (x UserListCrmDataSourceTypeEnum_UserListCrmDataSourceType) String() string { + return proto.EnumName(UserListCrmDataSourceTypeEnum_UserListCrmDataSourceType_name, int32(x)) +} +func (UserListCrmDataSourceTypeEnum_UserListCrmDataSourceType) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_user_list_crm_data_source_type_b5173b7d81e81cef, []int{0, 0} +} + +// Indicates source of Crm upload data. +type UserListCrmDataSourceTypeEnum struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *UserListCrmDataSourceTypeEnum) Reset() { *m = UserListCrmDataSourceTypeEnum{} } +func (m *UserListCrmDataSourceTypeEnum) String() string { return proto.CompactTextString(m) } +func (*UserListCrmDataSourceTypeEnum) ProtoMessage() {} +func (*UserListCrmDataSourceTypeEnum) Descriptor() ([]byte, []int) { + return fileDescriptor_user_list_crm_data_source_type_b5173b7d81e81cef, []int{0} +} +func (m *UserListCrmDataSourceTypeEnum) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_UserListCrmDataSourceTypeEnum.Unmarshal(m, b) +} +func (m *UserListCrmDataSourceTypeEnum) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_UserListCrmDataSourceTypeEnum.Marshal(b, m, deterministic) +} +func (dst *UserListCrmDataSourceTypeEnum) XXX_Merge(src proto.Message) { + xxx_messageInfo_UserListCrmDataSourceTypeEnum.Merge(dst, src) +} +func (m *UserListCrmDataSourceTypeEnum) XXX_Size() int { + return xxx_messageInfo_UserListCrmDataSourceTypeEnum.Size(m) +} +func (m *UserListCrmDataSourceTypeEnum) XXX_DiscardUnknown() { + xxx_messageInfo_UserListCrmDataSourceTypeEnum.DiscardUnknown(m) +} + +var xxx_messageInfo_UserListCrmDataSourceTypeEnum proto.InternalMessageInfo + +func init() { + proto.RegisterType((*UserListCrmDataSourceTypeEnum)(nil), "google.ads.googleads.v1.enums.UserListCrmDataSourceTypeEnum") + proto.RegisterEnum("google.ads.googleads.v1.enums.UserListCrmDataSourceTypeEnum_UserListCrmDataSourceType", UserListCrmDataSourceTypeEnum_UserListCrmDataSourceType_name, UserListCrmDataSourceTypeEnum_UserListCrmDataSourceType_value) +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/enums/user_list_crm_data_source_type.proto", fileDescriptor_user_list_crm_data_source_type_b5173b7d81e81cef) +} + +var fileDescriptor_user_list_crm_data_source_type_b5173b7d81e81cef = []byte{ + // 362 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x7c, 0x51, 0xc1, 0x6a, 0xab, 0x40, + 0x14, 0x7d, 0x9a, 0x47, 0x0b, 0x93, 0x45, 0xc5, 0x45, 0x21, 0xa1, 0x16, 0x92, 0x0f, 0x18, 0x91, + 0xee, 0xa6, 0x2b, 0x8d, 0x26, 0x95, 0x06, 0x23, 0x46, 0x2d, 0x2d, 0xc2, 0x30, 0x8d, 0x83, 0x08, + 0xd1, 0x11, 0x67, 0x0c, 0xe4, 0x03, 0xfa, 0x23, 0xdd, 0x14, 0xfa, 0x29, 0xfd, 0x94, 0x6e, 0xfb, + 0x03, 0x45, 0x6d, 0x42, 0x37, 0xe9, 0x66, 0x38, 0xcc, 0x39, 0xf7, 0xdc, 0x7b, 0xcf, 0x05, 0x56, + 0xc6, 0x58, 0xb6, 0xa5, 0x3a, 0x49, 0xb9, 0xde, 0xc3, 0x16, 0xed, 0x0c, 0x9d, 0x96, 0x4d, 0xc1, + 0xf5, 0x86, 0xd3, 0x1a, 0x6f, 0x73, 0x2e, 0xf0, 0xa6, 0x2e, 0x70, 0x4a, 0x04, 0xc1, 0x9c, 0x35, + 0xf5, 0x86, 0x62, 0xb1, 0xaf, 0x28, 0xac, 0x6a, 0x26, 0x98, 0xaa, 0xf5, 0x85, 0x90, 0xa4, 0x1c, + 0x1e, 0x3d, 0xe0, 0xce, 0x80, 0x9d, 0xc7, 0xf8, 0xea, 0xd0, 0xa2, 0xca, 0x75, 0x52, 0x96, 0x4c, + 0x10, 0x91, 0xb3, 0x92, 0xf7, 0xc5, 0xd3, 0x37, 0x09, 0x68, 0x11, 0xa7, 0xf5, 0x32, 0xe7, 0x62, + 0x56, 0x17, 0x36, 0x11, 0x64, 0xdd, 0x75, 0x08, 0xf7, 0x15, 0x75, 0xca, 0xa6, 0x98, 0xbe, 0x48, + 0x60, 0x74, 0x52, 0xa1, 0x5e, 0x80, 0x61, 0xe4, 0xad, 0x7d, 0x67, 0xe6, 0xce, 0x5d, 0xc7, 0x56, + 0xfe, 0xa9, 0x43, 0x70, 0x1e, 0x79, 0xf7, 0xde, 0xea, 0xc1, 0x53, 0xa4, 0x96, 0x9d, 0xbb, 0xc1, + 0x3a, 0xc4, 0xbe, 0x19, 0x84, 0x8f, 0x8a, 0xac, 0x6a, 0x60, 0x14, 0xde, 0xb9, 0x81, 0xdd, 0x7f, + 0xe0, 0x59, 0xe0, 0xd8, 0x6e, 0x88, 0xad, 0x28, 0x70, 0xcc, 0x48, 0x19, 0xa8, 0x63, 0x70, 0xf9, + 0x9b, 0x8e, 0x57, 0xa1, 0x13, 0xe0, 0xb9, 0xbb, 0x74, 0x94, 0xff, 0xd6, 0x97, 0x04, 0x26, 0x1b, + 0x56, 0xc0, 0x3f, 0xb7, 0xb5, 0xae, 0x4f, 0x8e, 0xea, 0xb7, 0xfb, 0xfa, 0xd2, 0xd3, 0x4f, 0xe4, + 0x30, 0x63, 0x5b, 0x52, 0x66, 0x90, 0xd5, 0x99, 0x9e, 0xd1, 0xb2, 0x4b, 0xe3, 0x70, 0x82, 0x2a, + 0xe7, 0x27, 0x2e, 0x72, 0xdb, 0xbd, 0xaf, 0xf2, 0x60, 0x61, 0x9a, 0xef, 0xb2, 0xb6, 0xe8, 0xad, + 0xcc, 0x94, 0xc3, 0x1e, 0xb6, 0x28, 0x36, 0x60, 0x1b, 0x1c, 0xff, 0x38, 0xf0, 0x89, 0x99, 0xf2, + 0xe4, 0xc8, 0x27, 0xb1, 0x91, 0x74, 0xfc, 0xa7, 0x3c, 0xe9, 0x3f, 0x11, 0x32, 0x53, 0x8e, 0xd0, + 0x51, 0x81, 0x50, 0x6c, 0x20, 0xd4, 0x69, 0x9e, 0xcf, 0xba, 0xc1, 0x6e, 0xbe, 0x03, 0x00, 0x00, + 0xff, 0xff, 0x8c, 0x46, 0xec, 0xcc, 0x29, 0x02, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/user_list_date_rule_item_operator.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/user_list_date_rule_item_operator.pb.go new file mode 100644 index 0000000..8e36b2b --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/user_list_date_rule_item_operator.pb.go @@ -0,0 +1,128 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/enums/user_list_date_rule_item_operator.proto + +package enums // import "google.golang.org/genproto/googleapis/ads/googleads/v1/enums" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Enum describing possible user list date rule item operators. +type UserListDateRuleItemOperatorEnum_UserListDateRuleItemOperator int32 + +const ( + // Not specified. + UserListDateRuleItemOperatorEnum_UNSPECIFIED UserListDateRuleItemOperatorEnum_UserListDateRuleItemOperator = 0 + // Used for return value only. Represents value unknown in this version. + UserListDateRuleItemOperatorEnum_UNKNOWN UserListDateRuleItemOperatorEnum_UserListDateRuleItemOperator = 1 + // Equals. + UserListDateRuleItemOperatorEnum_EQUALS UserListDateRuleItemOperatorEnum_UserListDateRuleItemOperator = 2 + // Not Equals. + UserListDateRuleItemOperatorEnum_NOT_EQUALS UserListDateRuleItemOperatorEnum_UserListDateRuleItemOperator = 3 + // Before. + UserListDateRuleItemOperatorEnum_BEFORE UserListDateRuleItemOperatorEnum_UserListDateRuleItemOperator = 4 + // After. + UserListDateRuleItemOperatorEnum_AFTER UserListDateRuleItemOperatorEnum_UserListDateRuleItemOperator = 5 +) + +var UserListDateRuleItemOperatorEnum_UserListDateRuleItemOperator_name = map[int32]string{ + 0: "UNSPECIFIED", + 1: "UNKNOWN", + 2: "EQUALS", + 3: "NOT_EQUALS", + 4: "BEFORE", + 5: "AFTER", +} +var UserListDateRuleItemOperatorEnum_UserListDateRuleItemOperator_value = map[string]int32{ + "UNSPECIFIED": 0, + "UNKNOWN": 1, + "EQUALS": 2, + "NOT_EQUALS": 3, + "BEFORE": 4, + "AFTER": 5, +} + +func (x UserListDateRuleItemOperatorEnum_UserListDateRuleItemOperator) String() string { + return proto.EnumName(UserListDateRuleItemOperatorEnum_UserListDateRuleItemOperator_name, int32(x)) +} +func (UserListDateRuleItemOperatorEnum_UserListDateRuleItemOperator) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_user_list_date_rule_item_operator_c55e0b52762bba4b, []int{0, 0} +} + +// Supported rule operator for date type. +type UserListDateRuleItemOperatorEnum struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *UserListDateRuleItemOperatorEnum) Reset() { *m = UserListDateRuleItemOperatorEnum{} } +func (m *UserListDateRuleItemOperatorEnum) String() string { return proto.CompactTextString(m) } +func (*UserListDateRuleItemOperatorEnum) ProtoMessage() {} +func (*UserListDateRuleItemOperatorEnum) Descriptor() ([]byte, []int) { + return fileDescriptor_user_list_date_rule_item_operator_c55e0b52762bba4b, []int{0} +} +func (m *UserListDateRuleItemOperatorEnum) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_UserListDateRuleItemOperatorEnum.Unmarshal(m, b) +} +func (m *UserListDateRuleItemOperatorEnum) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_UserListDateRuleItemOperatorEnum.Marshal(b, m, deterministic) +} +func (dst *UserListDateRuleItemOperatorEnum) XXX_Merge(src proto.Message) { + xxx_messageInfo_UserListDateRuleItemOperatorEnum.Merge(dst, src) +} +func (m *UserListDateRuleItemOperatorEnum) XXX_Size() int { + return xxx_messageInfo_UserListDateRuleItemOperatorEnum.Size(m) +} +func (m *UserListDateRuleItemOperatorEnum) XXX_DiscardUnknown() { + xxx_messageInfo_UserListDateRuleItemOperatorEnum.DiscardUnknown(m) +} + +var xxx_messageInfo_UserListDateRuleItemOperatorEnum proto.InternalMessageInfo + +func init() { + proto.RegisterType((*UserListDateRuleItemOperatorEnum)(nil), "google.ads.googleads.v1.enums.UserListDateRuleItemOperatorEnum") + proto.RegisterEnum("google.ads.googleads.v1.enums.UserListDateRuleItemOperatorEnum_UserListDateRuleItemOperator", UserListDateRuleItemOperatorEnum_UserListDateRuleItemOperator_name, UserListDateRuleItemOperatorEnum_UserListDateRuleItemOperator_value) +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/enums/user_list_date_rule_item_operator.proto", fileDescriptor_user_list_date_rule_item_operator_c55e0b52762bba4b) +} + +var fileDescriptor_user_list_date_rule_item_operator_c55e0b52762bba4b = []byte{ + // 346 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x7c, 0x51, 0x41, 0x6a, 0xeb, 0x30, + 0x14, 0xfc, 0x76, 0x7e, 0x52, 0xaa, 0x40, 0x6b, 0xbc, 0x2c, 0x09, 0x34, 0x39, 0x80, 0x8c, 0xe9, + 0x4e, 0x5d, 0xc9, 0x8d, 0x12, 0x42, 0x83, 0x9d, 0x26, 0x71, 0x0a, 0xc5, 0x60, 0xd4, 0x5a, 0x18, + 0x83, 0x2d, 0x19, 0x49, 0xce, 0x45, 0x7a, 0x83, 0x2e, 0x7b, 0x94, 0x1e, 0xa5, 0x07, 0xe8, 0xba, + 0xd8, 0x4e, 0xb2, 0xab, 0x37, 0x62, 0xa4, 0x99, 0x37, 0xef, 0xcd, 0x13, 0x20, 0xa9, 0x10, 0x69, + 0xce, 0x1c, 0x9a, 0x28, 0xa7, 0x85, 0x35, 0x3a, 0xb8, 0x0e, 0xe3, 0x55, 0xa1, 0x9c, 0x4a, 0x31, + 0x19, 0xe7, 0x99, 0xd2, 0x71, 0x42, 0x35, 0x8b, 0x65, 0x95, 0xb3, 0x38, 0xd3, 0xac, 0x88, 0x45, + 0xc9, 0x24, 0xd5, 0x42, 0xc2, 0x52, 0x0a, 0x2d, 0xec, 0x71, 0x5b, 0x0b, 0x69, 0xa2, 0xe0, 0xd9, + 0x06, 0x1e, 0x5c, 0xd8, 0xd8, 0xdc, 0x8c, 0x4e, 0x5d, 0xca, 0xcc, 0xa1, 0x9c, 0x0b, 0x4d, 0x75, + 0x26, 0xb8, 0x6a, 0x8b, 0xa7, 0xef, 0x06, 0xb8, 0x0d, 0x15, 0x93, 0xab, 0x4c, 0xe9, 0x19, 0xd5, + 0x6c, 0x53, 0xe5, 0x6c, 0xa9, 0x59, 0x11, 0x1c, 0x7b, 0x10, 0x5e, 0x15, 0x53, 0x01, 0x46, 0x5d, + 0x1a, 0xfb, 0x1a, 0x0c, 0x43, 0x7f, 0xbb, 0x26, 0x0f, 0xcb, 0xf9, 0x92, 0xcc, 0xac, 0x7f, 0xf6, + 0x10, 0x5c, 0x84, 0xfe, 0xa3, 0x1f, 0x3c, 0xfb, 0x96, 0x61, 0x03, 0x30, 0x20, 0x4f, 0x21, 0x5e, + 0x6d, 0x2d, 0xd3, 0xbe, 0x02, 0xc0, 0x0f, 0x76, 0xf1, 0xf1, 0xde, 0xab, 0x39, 0x8f, 0xcc, 0x83, + 0x0d, 0xb1, 0xfe, 0xdb, 0x97, 0xa0, 0x8f, 0xe7, 0x3b, 0xb2, 0xb1, 0xfa, 0xde, 0x8f, 0x01, 0x26, + 0x6f, 0xa2, 0x80, 0x9d, 0xc9, 0xbc, 0x49, 0xd7, 0x50, 0xeb, 0x3a, 0xde, 0xda, 0x78, 0xf1, 0x8e, + 0x1e, 0xa9, 0xc8, 0x29, 0x4f, 0xa1, 0x90, 0xa9, 0x93, 0x32, 0xde, 0x84, 0x3f, 0x2d, 0xbd, 0xcc, + 0xd4, 0x1f, 0x7f, 0x70, 0xdf, 0x9c, 0x1f, 0x66, 0x6f, 0x81, 0xf1, 0xa7, 0x39, 0x5e, 0xb4, 0x56, + 0x38, 0x51, 0xb0, 0x85, 0x35, 0xda, 0xbb, 0xb0, 0x5e, 0x92, 0xfa, 0x3a, 0xf1, 0x11, 0x4e, 0x54, + 0x74, 0xe6, 0xa3, 0xbd, 0x1b, 0x35, 0xfc, 0xb7, 0x39, 0x69, 0x1f, 0x11, 0xc2, 0x89, 0x42, 0xe8, + 0xac, 0x40, 0x68, 0xef, 0x22, 0xd4, 0x68, 0x5e, 0x07, 0xcd, 0x60, 0x77, 0xbf, 0x01, 0x00, 0x00, + 0xff, 0xff, 0x45, 0xbf, 0xc8, 0x51, 0x1b, 0x02, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/user_list_logical_rule_operator.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/user_list_logical_rule_operator.pb.go new file mode 100644 index 0000000..5e91c3d --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/user_list_logical_rule_operator.pb.go @@ -0,0 +1,123 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/enums/user_list_logical_rule_operator.proto + +package enums // import "google.golang.org/genproto/googleapis/ads/googleads/v1/enums" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Enum describing possible user list logical rule operators. +type UserListLogicalRuleOperatorEnum_UserListLogicalRuleOperator int32 + +const ( + // Not specified. + UserListLogicalRuleOperatorEnum_UNSPECIFIED UserListLogicalRuleOperatorEnum_UserListLogicalRuleOperator = 0 + // Used for return value only. Represents value unknown in this version. + UserListLogicalRuleOperatorEnum_UNKNOWN UserListLogicalRuleOperatorEnum_UserListLogicalRuleOperator = 1 + // And - all of the operands. + UserListLogicalRuleOperatorEnum_ALL UserListLogicalRuleOperatorEnum_UserListLogicalRuleOperator = 2 + // Or - at least one of the operands. + UserListLogicalRuleOperatorEnum_ANY UserListLogicalRuleOperatorEnum_UserListLogicalRuleOperator = 3 + // Not - none of the operands. + UserListLogicalRuleOperatorEnum_NONE UserListLogicalRuleOperatorEnum_UserListLogicalRuleOperator = 4 +) + +var UserListLogicalRuleOperatorEnum_UserListLogicalRuleOperator_name = map[int32]string{ + 0: "UNSPECIFIED", + 1: "UNKNOWN", + 2: "ALL", + 3: "ANY", + 4: "NONE", +} +var UserListLogicalRuleOperatorEnum_UserListLogicalRuleOperator_value = map[string]int32{ + "UNSPECIFIED": 0, + "UNKNOWN": 1, + "ALL": 2, + "ANY": 3, + "NONE": 4, +} + +func (x UserListLogicalRuleOperatorEnum_UserListLogicalRuleOperator) String() string { + return proto.EnumName(UserListLogicalRuleOperatorEnum_UserListLogicalRuleOperator_name, int32(x)) +} +func (UserListLogicalRuleOperatorEnum_UserListLogicalRuleOperator) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_user_list_logical_rule_operator_70bc084eae3dce54, []int{0, 0} +} + +// The logical operator of the rule. +type UserListLogicalRuleOperatorEnum struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *UserListLogicalRuleOperatorEnum) Reset() { *m = UserListLogicalRuleOperatorEnum{} } +func (m *UserListLogicalRuleOperatorEnum) String() string { return proto.CompactTextString(m) } +func (*UserListLogicalRuleOperatorEnum) ProtoMessage() {} +func (*UserListLogicalRuleOperatorEnum) Descriptor() ([]byte, []int) { + return fileDescriptor_user_list_logical_rule_operator_70bc084eae3dce54, []int{0} +} +func (m *UserListLogicalRuleOperatorEnum) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_UserListLogicalRuleOperatorEnum.Unmarshal(m, b) +} +func (m *UserListLogicalRuleOperatorEnum) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_UserListLogicalRuleOperatorEnum.Marshal(b, m, deterministic) +} +func (dst *UserListLogicalRuleOperatorEnum) XXX_Merge(src proto.Message) { + xxx_messageInfo_UserListLogicalRuleOperatorEnum.Merge(dst, src) +} +func (m *UserListLogicalRuleOperatorEnum) XXX_Size() int { + return xxx_messageInfo_UserListLogicalRuleOperatorEnum.Size(m) +} +func (m *UserListLogicalRuleOperatorEnum) XXX_DiscardUnknown() { + xxx_messageInfo_UserListLogicalRuleOperatorEnum.DiscardUnknown(m) +} + +var xxx_messageInfo_UserListLogicalRuleOperatorEnum proto.InternalMessageInfo + +func init() { + proto.RegisterType((*UserListLogicalRuleOperatorEnum)(nil), "google.ads.googleads.v1.enums.UserListLogicalRuleOperatorEnum") + proto.RegisterEnum("google.ads.googleads.v1.enums.UserListLogicalRuleOperatorEnum_UserListLogicalRuleOperator", UserListLogicalRuleOperatorEnum_UserListLogicalRuleOperator_name, UserListLogicalRuleOperatorEnum_UserListLogicalRuleOperator_value) +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/enums/user_list_logical_rule_operator.proto", fileDescriptor_user_list_logical_rule_operator_70bc084eae3dce54) +} + +var fileDescriptor_user_list_logical_rule_operator_70bc084eae3dce54 = []byte{ + // 324 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x7c, 0x50, 0xc1, 0x6e, 0xe2, 0x30, + 0x14, 0x5c, 0x02, 0x5a, 0x56, 0xe6, 0xb0, 0x51, 0x8e, 0xbb, 0x45, 0x2d, 0x7c, 0x80, 0xa3, 0xa8, + 0x37, 0xf7, 0x14, 0x68, 0x8a, 0x50, 0x23, 0x83, 0x5a, 0x01, 0x6a, 0x15, 0x29, 0x72, 0x89, 0x65, + 0x45, 0x32, 0x76, 0xe4, 0x97, 0x70, 0xe8, 0xe7, 0xf4, 0xd8, 0x4f, 0xe9, 0xa7, 0xf4, 0xde, 0x7b, + 0x15, 0x1b, 0xb8, 0x95, 0x8b, 0x35, 0xf2, 0xcc, 0x9b, 0x79, 0x6f, 0xd0, 0x54, 0x68, 0x2d, 0x24, + 0x0f, 0x59, 0x01, 0xa1, 0x83, 0x2d, 0xda, 0x47, 0x21, 0x57, 0xcd, 0x0e, 0xc2, 0x06, 0xb8, 0xc9, + 0x65, 0x09, 0x75, 0x2e, 0xb5, 0x28, 0xb7, 0x4c, 0xe6, 0xa6, 0x91, 0x3c, 0xd7, 0x15, 0x37, 0xac, + 0xd6, 0x06, 0x57, 0x46, 0xd7, 0x3a, 0x18, 0xba, 0x49, 0xcc, 0x0a, 0xc0, 0x27, 0x13, 0xbc, 0x8f, + 0xb0, 0x35, 0xf9, 0x77, 0x71, 0xcc, 0xa8, 0xca, 0x90, 0x29, 0xa5, 0x6b, 0x56, 0x97, 0x5a, 0x81, + 0x1b, 0x1e, 0xbf, 0xa2, 0xcb, 0x15, 0x70, 0x93, 0x96, 0x50, 0xa7, 0x2e, 0xe3, 0xa1, 0x91, 0x7c, + 0x71, 0x48, 0x48, 0x54, 0xb3, 0x1b, 0x6f, 0xd0, 0xff, 0x33, 0x92, 0xe0, 0x2f, 0x1a, 0xac, 0xe8, + 0xe3, 0x32, 0x99, 0xce, 0xef, 0xe6, 0xc9, 0xad, 0xff, 0x2b, 0x18, 0xa0, 0xfe, 0x8a, 0xde, 0xd3, + 0xc5, 0x86, 0xfa, 0x9d, 0xa0, 0x8f, 0xba, 0x71, 0x9a, 0xfa, 0x9e, 0x05, 0xf4, 0xc9, 0xef, 0x06, + 0x7f, 0x50, 0x8f, 0x2e, 0x68, 0xe2, 0xf7, 0x26, 0x5f, 0x1d, 0x34, 0xda, 0xea, 0x1d, 0x3e, 0xbb, + 0xff, 0xe4, 0xea, 0x4c, 0xf8, 0xb2, 0xbd, 0x61, 0xd9, 0x79, 0x9e, 0x1c, 0x2c, 0x84, 0x96, 0x4c, + 0x09, 0xac, 0x8d, 0x08, 0x05, 0x57, 0xf6, 0xc2, 0x63, 0xaf, 0x55, 0x09, 0x3f, 0xd4, 0x7c, 0x63, + 0xdf, 0x37, 0xaf, 0x3b, 0x8b, 0xe3, 0x77, 0x6f, 0x38, 0x73, 0x56, 0x71, 0x01, 0xd8, 0xc1, 0x16, + 0xad, 0x23, 0xdc, 0x76, 0x01, 0x1f, 0x47, 0x3e, 0x8b, 0x0b, 0xc8, 0x4e, 0x7c, 0xb6, 0x8e, 0x32, + 0xcb, 0x7f, 0x7a, 0x23, 0xf7, 0x49, 0x48, 0x5c, 0x00, 0x21, 0x27, 0x05, 0x21, 0xeb, 0x88, 0x10, + 0xab, 0x79, 0xf9, 0x6d, 0x17, 0xbb, 0xfe, 0x0e, 0x00, 0x00, 0xff, 0xff, 0x91, 0xa1, 0x11, 0x29, + 0xfe, 0x01, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/user_list_membership_status.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/user_list_membership_status.pb.go new file mode 100644 index 0000000..8d1e459 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/user_list_membership_status.pb.go @@ -0,0 +1,120 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/enums/user_list_membership_status.proto + +package enums // import "google.golang.org/genproto/googleapis/ads/googleads/v1/enums" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Enum containing possible user list membership statuses. +type UserListMembershipStatusEnum_UserListMembershipStatus int32 + +const ( + // Not specified. + UserListMembershipStatusEnum_UNSPECIFIED UserListMembershipStatusEnum_UserListMembershipStatus = 0 + // Used for return value only. Represents value unknown in this version. + UserListMembershipStatusEnum_UNKNOWN UserListMembershipStatusEnum_UserListMembershipStatus = 1 + // Open status - List is accruing members and can be targeted to. + UserListMembershipStatusEnum_OPEN UserListMembershipStatusEnum_UserListMembershipStatus = 2 + // Closed status - No new members being added. Cannot be used for targeting. + UserListMembershipStatusEnum_CLOSED UserListMembershipStatusEnum_UserListMembershipStatus = 3 +) + +var UserListMembershipStatusEnum_UserListMembershipStatus_name = map[int32]string{ + 0: "UNSPECIFIED", + 1: "UNKNOWN", + 2: "OPEN", + 3: "CLOSED", +} +var UserListMembershipStatusEnum_UserListMembershipStatus_value = map[string]int32{ + "UNSPECIFIED": 0, + "UNKNOWN": 1, + "OPEN": 2, + "CLOSED": 3, +} + +func (x UserListMembershipStatusEnum_UserListMembershipStatus) String() string { + return proto.EnumName(UserListMembershipStatusEnum_UserListMembershipStatus_name, int32(x)) +} +func (UserListMembershipStatusEnum_UserListMembershipStatus) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_user_list_membership_status_353af4c79a9ed804, []int{0, 0} +} + +// Membership status of this user list. Indicates whether a user list is open +// or active. Only open user lists can accumulate more users and can be used for +// targeting. +type UserListMembershipStatusEnum struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *UserListMembershipStatusEnum) Reset() { *m = UserListMembershipStatusEnum{} } +func (m *UserListMembershipStatusEnum) String() string { return proto.CompactTextString(m) } +func (*UserListMembershipStatusEnum) ProtoMessage() {} +func (*UserListMembershipStatusEnum) Descriptor() ([]byte, []int) { + return fileDescriptor_user_list_membership_status_353af4c79a9ed804, []int{0} +} +func (m *UserListMembershipStatusEnum) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_UserListMembershipStatusEnum.Unmarshal(m, b) +} +func (m *UserListMembershipStatusEnum) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_UserListMembershipStatusEnum.Marshal(b, m, deterministic) +} +func (dst *UserListMembershipStatusEnum) XXX_Merge(src proto.Message) { + xxx_messageInfo_UserListMembershipStatusEnum.Merge(dst, src) +} +func (m *UserListMembershipStatusEnum) XXX_Size() int { + return xxx_messageInfo_UserListMembershipStatusEnum.Size(m) +} +func (m *UserListMembershipStatusEnum) XXX_DiscardUnknown() { + xxx_messageInfo_UserListMembershipStatusEnum.DiscardUnknown(m) +} + +var xxx_messageInfo_UserListMembershipStatusEnum proto.InternalMessageInfo + +func init() { + proto.RegisterType((*UserListMembershipStatusEnum)(nil), "google.ads.googleads.v1.enums.UserListMembershipStatusEnum") + proto.RegisterEnum("google.ads.googleads.v1.enums.UserListMembershipStatusEnum_UserListMembershipStatus", UserListMembershipStatusEnum_UserListMembershipStatus_name, UserListMembershipStatusEnum_UserListMembershipStatus_value) +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/enums/user_list_membership_status.proto", fileDescriptor_user_list_membership_status_353af4c79a9ed804) +} + +var fileDescriptor_user_list_membership_status_353af4c79a9ed804 = []byte{ + // 314 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x7c, 0x90, 0x41, 0x4a, 0xc3, 0x40, + 0x14, 0x86, 0x6d, 0x2a, 0x55, 0xa6, 0x0b, 0x43, 0x56, 0x22, 0xed, 0xa2, 0x3d, 0xc0, 0x84, 0xe0, + 0x6e, 0x5c, 0x48, 0xda, 0xc6, 0x52, 0xac, 0xd3, 0x40, 0x69, 0x05, 0x09, 0x94, 0xa9, 0x19, 0xc6, + 0x81, 0x66, 0x26, 0xe4, 0x4d, 0x7a, 0x20, 0x97, 0x1e, 0xc5, 0xa3, 0xb8, 0xf4, 0x04, 0x92, 0x49, + 0x9b, 0x5d, 0xdc, 0x84, 0x9f, 0xfc, 0xef, 0xff, 0xe6, 0x7f, 0x0f, 0x3d, 0x0a, 0xad, 0xc5, 0x81, + 0xfb, 0x2c, 0x05, 0xbf, 0x96, 0x95, 0x3a, 0x06, 0x3e, 0x57, 0x65, 0x06, 0x7e, 0x09, 0xbc, 0xd8, + 0x1d, 0x24, 0x98, 0x5d, 0xc6, 0xb3, 0x3d, 0x2f, 0xe0, 0x43, 0xe6, 0x3b, 0x30, 0xcc, 0x94, 0x80, + 0xf3, 0x42, 0x1b, 0xed, 0x0d, 0xeb, 0x14, 0x66, 0x29, 0xe0, 0x06, 0x80, 0x8f, 0x01, 0xb6, 0x80, + 0xbb, 0xc1, 0x99, 0x9f, 0x4b, 0x9f, 0x29, 0xa5, 0x0d, 0x33, 0x52, 0xab, 0x53, 0x78, 0xac, 0xd0, + 0x60, 0x03, 0xbc, 0x58, 0x4a, 0x30, 0x2f, 0x0d, 0x7f, 0x6d, 0xf1, 0x91, 0x2a, 0xb3, 0x31, 0x45, + 0xb7, 0x6d, 0xbe, 0x77, 0x83, 0xfa, 0x1b, 0xba, 0x8e, 0xa3, 0xe9, 0xe2, 0x69, 0x11, 0xcd, 0xdc, + 0x0b, 0xaf, 0x8f, 0xae, 0x36, 0xf4, 0x99, 0xae, 0x5e, 0xa9, 0xdb, 0xf1, 0xae, 0xd1, 0xe5, 0x2a, + 0x8e, 0xa8, 0xeb, 0x78, 0x08, 0xf5, 0xa6, 0xcb, 0xd5, 0x3a, 0x9a, 0xb9, 0xdd, 0xc9, 0x6f, 0x07, + 0x8d, 0xde, 0x75, 0x86, 0xff, 0xed, 0x3c, 0x19, 0xb6, 0xbd, 0x19, 0x57, 0xa5, 0xe3, 0xce, 0xdb, + 0xe4, 0x94, 0x17, 0xfa, 0xc0, 0x94, 0xc0, 0xba, 0x10, 0xbe, 0xe0, 0xca, 0xae, 0x74, 0x3e, 0x62, + 0x2e, 0xa1, 0xe5, 0xa6, 0x0f, 0xf6, 0xfb, 0xe9, 0x74, 0xe7, 0x61, 0xf8, 0xe5, 0x0c, 0xe7, 0x35, + 0x2a, 0x4c, 0x01, 0xd7, 0xb2, 0x52, 0xdb, 0x00, 0x57, 0xfb, 0xc3, 0xf7, 0xd9, 0x4f, 0xc2, 0x14, + 0x92, 0xc6, 0x4f, 0xb6, 0x41, 0x62, 0xfd, 0x1f, 0x67, 0x54, 0xff, 0x24, 0x24, 0x4c, 0x81, 0x90, + 0x66, 0x82, 0x90, 0x6d, 0x40, 0x88, 0x9d, 0xd9, 0xf7, 0x6c, 0xb1, 0xfb, 0xbf, 0x00, 0x00, 0x00, + 0xff, 0xff, 0x1b, 0xbf, 0x61, 0xd9, 0xeb, 0x01, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/user_list_number_rule_item_operator.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/user_list_number_rule_item_operator.pb.go new file mode 100644 index 0000000..ce3146d --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/user_list_number_rule_item_operator.pb.go @@ -0,0 +1,138 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/enums/user_list_number_rule_item_operator.proto + +package enums // import "google.golang.org/genproto/googleapis/ads/googleads/v1/enums" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Enum describing possible user list number rule item operators. +type UserListNumberRuleItemOperatorEnum_UserListNumberRuleItemOperator int32 + +const ( + // Not specified. + UserListNumberRuleItemOperatorEnum_UNSPECIFIED UserListNumberRuleItemOperatorEnum_UserListNumberRuleItemOperator = 0 + // Used for return value only. Represents value unknown in this version. + UserListNumberRuleItemOperatorEnum_UNKNOWN UserListNumberRuleItemOperatorEnum_UserListNumberRuleItemOperator = 1 + // Greater than. + UserListNumberRuleItemOperatorEnum_GREATER_THAN UserListNumberRuleItemOperatorEnum_UserListNumberRuleItemOperator = 2 + // Greater than or equal. + UserListNumberRuleItemOperatorEnum_GREATER_THAN_OR_EQUAL UserListNumberRuleItemOperatorEnum_UserListNumberRuleItemOperator = 3 + // Equals. + UserListNumberRuleItemOperatorEnum_EQUALS UserListNumberRuleItemOperatorEnum_UserListNumberRuleItemOperator = 4 + // Not equals. + UserListNumberRuleItemOperatorEnum_NOT_EQUALS UserListNumberRuleItemOperatorEnum_UserListNumberRuleItemOperator = 5 + // Less than. + UserListNumberRuleItemOperatorEnum_LESS_THAN UserListNumberRuleItemOperatorEnum_UserListNumberRuleItemOperator = 6 + // Less than or equal. + UserListNumberRuleItemOperatorEnum_LESS_THAN_OR_EQUAL UserListNumberRuleItemOperatorEnum_UserListNumberRuleItemOperator = 7 +) + +var UserListNumberRuleItemOperatorEnum_UserListNumberRuleItemOperator_name = map[int32]string{ + 0: "UNSPECIFIED", + 1: "UNKNOWN", + 2: "GREATER_THAN", + 3: "GREATER_THAN_OR_EQUAL", + 4: "EQUALS", + 5: "NOT_EQUALS", + 6: "LESS_THAN", + 7: "LESS_THAN_OR_EQUAL", +} +var UserListNumberRuleItemOperatorEnum_UserListNumberRuleItemOperator_value = map[string]int32{ + "UNSPECIFIED": 0, + "UNKNOWN": 1, + "GREATER_THAN": 2, + "GREATER_THAN_OR_EQUAL": 3, + "EQUALS": 4, + "NOT_EQUALS": 5, + "LESS_THAN": 6, + "LESS_THAN_OR_EQUAL": 7, +} + +func (x UserListNumberRuleItemOperatorEnum_UserListNumberRuleItemOperator) String() string { + return proto.EnumName(UserListNumberRuleItemOperatorEnum_UserListNumberRuleItemOperator_name, int32(x)) +} +func (UserListNumberRuleItemOperatorEnum_UserListNumberRuleItemOperator) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_user_list_number_rule_item_operator_a792052f11149239, []int{0, 0} +} + +// Supported rule operator for number type. +type UserListNumberRuleItemOperatorEnum struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *UserListNumberRuleItemOperatorEnum) Reset() { *m = UserListNumberRuleItemOperatorEnum{} } +func (m *UserListNumberRuleItemOperatorEnum) String() string { return proto.CompactTextString(m) } +func (*UserListNumberRuleItemOperatorEnum) ProtoMessage() {} +func (*UserListNumberRuleItemOperatorEnum) Descriptor() ([]byte, []int) { + return fileDescriptor_user_list_number_rule_item_operator_a792052f11149239, []int{0} +} +func (m *UserListNumberRuleItemOperatorEnum) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_UserListNumberRuleItemOperatorEnum.Unmarshal(m, b) +} +func (m *UserListNumberRuleItemOperatorEnum) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_UserListNumberRuleItemOperatorEnum.Marshal(b, m, deterministic) +} +func (dst *UserListNumberRuleItemOperatorEnum) XXX_Merge(src proto.Message) { + xxx_messageInfo_UserListNumberRuleItemOperatorEnum.Merge(dst, src) +} +func (m *UserListNumberRuleItemOperatorEnum) XXX_Size() int { + return xxx_messageInfo_UserListNumberRuleItemOperatorEnum.Size(m) +} +func (m *UserListNumberRuleItemOperatorEnum) XXX_DiscardUnknown() { + xxx_messageInfo_UserListNumberRuleItemOperatorEnum.DiscardUnknown(m) +} + +var xxx_messageInfo_UserListNumberRuleItemOperatorEnum proto.InternalMessageInfo + +func init() { + proto.RegisterType((*UserListNumberRuleItemOperatorEnum)(nil), "google.ads.googleads.v1.enums.UserListNumberRuleItemOperatorEnum") + proto.RegisterEnum("google.ads.googleads.v1.enums.UserListNumberRuleItemOperatorEnum_UserListNumberRuleItemOperator", UserListNumberRuleItemOperatorEnum_UserListNumberRuleItemOperator_name, UserListNumberRuleItemOperatorEnum_UserListNumberRuleItemOperator_value) +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/enums/user_list_number_rule_item_operator.proto", fileDescriptor_user_list_number_rule_item_operator_a792052f11149239) +} + +var fileDescriptor_user_list_number_rule_item_operator_a792052f11149239 = []byte{ + // 379 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x7c, 0x91, 0xcd, 0xae, 0x93, 0x40, + 0x14, 0xc7, 0x85, 0x6a, 0x1b, 0xa7, 0x7e, 0x90, 0x49, 0x34, 0xd1, 0x58, 0x93, 0xd6, 0xfd, 0x10, + 0xe2, 0x6e, 0x5c, 0x4d, 0x15, 0xb1, 0xb1, 0x81, 0x0a, 0xa5, 0x26, 0x86, 0x64, 0x42, 0x65, 0x42, + 0x48, 0x60, 0x86, 0xcc, 0x0c, 0x7d, 0x20, 0x97, 0x2e, 0x7c, 0x10, 0xf7, 0xbe, 0x84, 0x4f, 0xe0, + 0xd2, 0x30, 0xb4, 0xdc, 0xbb, 0xb9, 0xdd, 0x90, 0xff, 0xf9, 0xfa, 0x1d, 0xe6, 0x7f, 0x40, 0x50, + 0x0a, 0x51, 0xd6, 0xcc, 0xcd, 0x0b, 0xe5, 0x0e, 0xb2, 0x57, 0x27, 0xcf, 0x65, 0xbc, 0x6b, 0x94, + 0xdb, 0x29, 0x26, 0x69, 0x5d, 0x29, 0x4d, 0x79, 0xd7, 0x1c, 0x99, 0xa4, 0xb2, 0xab, 0x19, 0xad, + 0x34, 0x6b, 0xa8, 0x68, 0x99, 0xcc, 0xb5, 0x90, 0xa8, 0x95, 0x42, 0x0b, 0xb8, 0x18, 0xa6, 0x51, + 0x5e, 0x28, 0x34, 0x82, 0xd0, 0xc9, 0x43, 0x06, 0xf4, 0xf2, 0xd5, 0x65, 0x4f, 0x5b, 0xb9, 0x39, + 0xe7, 0x42, 0xe7, 0xba, 0x12, 0x5c, 0x0d, 0xc3, 0xab, 0x3f, 0x16, 0x58, 0xa5, 0x8a, 0xc9, 0x6d, + 0xa5, 0x74, 0x68, 0x16, 0xc5, 0x5d, 0xcd, 0x36, 0x9a, 0x35, 0xd1, 0x79, 0x8b, 0xcf, 0xbb, 0x66, + 0xf5, 0xcb, 0x02, 0xaf, 0xaf, 0xb7, 0xc1, 0xa7, 0x60, 0x9e, 0x86, 0xc9, 0xce, 0x7f, 0xbf, 0xf9, + 0xb8, 0xf1, 0x3f, 0x38, 0xf7, 0xe0, 0x1c, 0xcc, 0xd2, 0xf0, 0x73, 0x18, 0x7d, 0x0d, 0x1d, 0x0b, + 0x3a, 0xe0, 0x51, 0x10, 0xfb, 0x64, 0xef, 0xc7, 0x74, 0xff, 0x89, 0x84, 0x8e, 0x0d, 0x5f, 0x80, + 0x67, 0xb7, 0x33, 0x34, 0x8a, 0xa9, 0xff, 0x25, 0x25, 0x5b, 0x67, 0x02, 0x01, 0x98, 0x1a, 0x99, + 0x38, 0xf7, 0xe1, 0x13, 0x00, 0xc2, 0x68, 0x4f, 0xcf, 0xf1, 0x03, 0xf8, 0x18, 0x3c, 0xdc, 0xfa, + 0x49, 0x32, 0x50, 0xa6, 0xf0, 0x39, 0x80, 0x63, 0x78, 0x83, 0x98, 0xad, 0xff, 0x59, 0x60, 0xf9, + 0x5d, 0x34, 0xe8, 0xaa, 0x37, 0xeb, 0x37, 0xd7, 0xdf, 0xb4, 0xeb, 0x2d, 0xda, 0x59, 0xdf, 0xd6, + 0x67, 0x4a, 0x29, 0xea, 0x9c, 0x97, 0x48, 0xc8, 0xd2, 0x2d, 0x19, 0x37, 0x06, 0x5e, 0x4e, 0xd7, + 0x56, 0xea, 0x8e, 0x4b, 0xbe, 0x33, 0xdf, 0x1f, 0xf6, 0x24, 0x20, 0xe4, 0xa7, 0xbd, 0x08, 0x06, + 0x14, 0x29, 0x14, 0x1a, 0x64, 0xaf, 0x0e, 0x1e, 0xea, 0x6d, 0x56, 0xbf, 0x2f, 0xf5, 0x8c, 0x14, + 0x2a, 0x1b, 0xeb, 0xd9, 0xc1, 0xcb, 0x4c, 0xfd, 0xaf, 0xbd, 0x1c, 0x92, 0x18, 0x93, 0x42, 0x61, + 0x3c, 0x76, 0x60, 0x7c, 0xf0, 0x30, 0x36, 0x3d, 0xc7, 0xa9, 0xf9, 0xb1, 0xb7, 0xff, 0x03, 0x00, + 0x00, 0xff, 0xff, 0x19, 0x0a, 0x8a, 0x67, 0x61, 0x02, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/user_list_prepopulation_status.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/user_list_prepopulation_status.pb.go new file mode 100644 index 0000000..6adb8ee --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/user_list_prepopulation_status.pb.go @@ -0,0 +1,123 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/enums/user_list_prepopulation_status.proto + +package enums // import "google.golang.org/genproto/googleapis/ads/googleads/v1/enums" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Enum describing possible user list prepopulation status. +type UserListPrepopulationStatusEnum_UserListPrepopulationStatus int32 + +const ( + // Not specified. + UserListPrepopulationStatusEnum_UNSPECIFIED UserListPrepopulationStatusEnum_UserListPrepopulationStatus = 0 + // Used for return value only. Represents value unknown in this version. + UserListPrepopulationStatusEnum_UNKNOWN UserListPrepopulationStatusEnum_UserListPrepopulationStatus = 1 + // Prepopoulation is being requested. + UserListPrepopulationStatusEnum_REQUESTED UserListPrepopulationStatusEnum_UserListPrepopulationStatus = 2 + // Prepopulation is finished. + UserListPrepopulationStatusEnum_FINISHED UserListPrepopulationStatusEnum_UserListPrepopulationStatus = 3 + // Prepopulation failed. + UserListPrepopulationStatusEnum_FAILED UserListPrepopulationStatusEnum_UserListPrepopulationStatus = 4 +) + +var UserListPrepopulationStatusEnum_UserListPrepopulationStatus_name = map[int32]string{ + 0: "UNSPECIFIED", + 1: "UNKNOWN", + 2: "REQUESTED", + 3: "FINISHED", + 4: "FAILED", +} +var UserListPrepopulationStatusEnum_UserListPrepopulationStatus_value = map[string]int32{ + "UNSPECIFIED": 0, + "UNKNOWN": 1, + "REQUESTED": 2, + "FINISHED": 3, + "FAILED": 4, +} + +func (x UserListPrepopulationStatusEnum_UserListPrepopulationStatus) String() string { + return proto.EnumName(UserListPrepopulationStatusEnum_UserListPrepopulationStatus_name, int32(x)) +} +func (UserListPrepopulationStatusEnum_UserListPrepopulationStatus) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_user_list_prepopulation_status_8930b9ea3020757e, []int{0, 0} +} + +// Indicates status of prepopulation based on the rule. +type UserListPrepopulationStatusEnum struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *UserListPrepopulationStatusEnum) Reset() { *m = UserListPrepopulationStatusEnum{} } +func (m *UserListPrepopulationStatusEnum) String() string { return proto.CompactTextString(m) } +func (*UserListPrepopulationStatusEnum) ProtoMessage() {} +func (*UserListPrepopulationStatusEnum) Descriptor() ([]byte, []int) { + return fileDescriptor_user_list_prepopulation_status_8930b9ea3020757e, []int{0} +} +func (m *UserListPrepopulationStatusEnum) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_UserListPrepopulationStatusEnum.Unmarshal(m, b) +} +func (m *UserListPrepopulationStatusEnum) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_UserListPrepopulationStatusEnum.Marshal(b, m, deterministic) +} +func (dst *UserListPrepopulationStatusEnum) XXX_Merge(src proto.Message) { + xxx_messageInfo_UserListPrepopulationStatusEnum.Merge(dst, src) +} +func (m *UserListPrepopulationStatusEnum) XXX_Size() int { + return xxx_messageInfo_UserListPrepopulationStatusEnum.Size(m) +} +func (m *UserListPrepopulationStatusEnum) XXX_DiscardUnknown() { + xxx_messageInfo_UserListPrepopulationStatusEnum.DiscardUnknown(m) +} + +var xxx_messageInfo_UserListPrepopulationStatusEnum proto.InternalMessageInfo + +func init() { + proto.RegisterType((*UserListPrepopulationStatusEnum)(nil), "google.ads.googleads.v1.enums.UserListPrepopulationStatusEnum") + proto.RegisterEnum("google.ads.googleads.v1.enums.UserListPrepopulationStatusEnum_UserListPrepopulationStatus", UserListPrepopulationStatusEnum_UserListPrepopulationStatus_name, UserListPrepopulationStatusEnum_UserListPrepopulationStatus_value) +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/enums/user_list_prepopulation_status.proto", fileDescriptor_user_list_prepopulation_status_8930b9ea3020757e) +} + +var fileDescriptor_user_list_prepopulation_status_8930b9ea3020757e = []byte{ + // 333 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x7c, 0x50, 0xc1, 0x4e, 0xb3, 0x40, + 0x18, 0xfc, 0xa1, 0x7f, 0xaa, 0x6e, 0x35, 0x12, 0x8e, 0x6a, 0xa3, 0xed, 0x03, 0x2c, 0x21, 0xde, + 0xd6, 0x13, 0x95, 0x6d, 0x25, 0x36, 0x88, 0x22, 0x35, 0x31, 0x24, 0x0d, 0x0a, 0x21, 0x24, 0x74, + 0x77, 0xc3, 0xb7, 0xf4, 0x15, 0x7c, 0x0f, 0x8f, 0x3e, 0x8a, 0x8f, 0xe2, 0xdd, 0xbb, 0x61, 0x69, + 0x1b, 0x2f, 0x72, 0xd9, 0x4c, 0xf6, 0x9b, 0x6f, 0xe6, 0x9b, 0x41, 0x93, 0x9c, 0xf3, 0xbc, 0xcc, + 0xac, 0x24, 0x05, 0xab, 0x85, 0x0d, 0x5a, 0xdb, 0x56, 0xc6, 0xea, 0x15, 0x58, 0x35, 0x64, 0xd5, + 0xb2, 0x2c, 0x40, 0x2e, 0x45, 0x95, 0x09, 0x2e, 0xea, 0x32, 0x91, 0x05, 0x67, 0x4b, 0x90, 0x89, + 0xac, 0x01, 0x8b, 0x8a, 0x4b, 0x6e, 0x0e, 0xdb, 0x45, 0x9c, 0xa4, 0x80, 0x77, 0x1a, 0x78, 0x6d, + 0x63, 0xa5, 0x71, 0x72, 0xb6, 0xb5, 0x10, 0x85, 0x95, 0x30, 0xc6, 0xa5, 0x92, 0xd8, 0x2c, 0x8f, + 0xdf, 0x34, 0x74, 0x1e, 0x41, 0x56, 0xcd, 0x0b, 0x90, 0xc1, 0x6f, 0x8f, 0x50, 0x59, 0x50, 0x56, + 0xaf, 0xc6, 0x29, 0x3a, 0xed, 0xa0, 0x98, 0xc7, 0x68, 0x10, 0xf9, 0x61, 0x40, 0xaf, 0xbd, 0xa9, + 0x47, 0x5d, 0xe3, 0x9f, 0x39, 0x40, 0x7b, 0x91, 0x7f, 0xeb, 0xdf, 0x3d, 0xf9, 0x86, 0x66, 0x1e, + 0xa1, 0x83, 0x07, 0x7a, 0x1f, 0xd1, 0xf0, 0x91, 0xba, 0x86, 0x6e, 0x1e, 0xa2, 0xfd, 0xa9, 0xe7, + 0x7b, 0xe1, 0x0d, 0x75, 0x8d, 0x9e, 0x89, 0x50, 0x7f, 0xea, 0x78, 0x73, 0xea, 0x1a, 0xff, 0x27, + 0xdf, 0x1a, 0x1a, 0xbd, 0xf2, 0x15, 0xee, 0x4c, 0x33, 0xb9, 0xe8, 0xb8, 0x24, 0x68, 0x12, 0x05, + 0xda, 0xf3, 0xa6, 0x54, 0x9c, 0xf3, 0x32, 0x61, 0x39, 0xe6, 0x55, 0x6e, 0xe5, 0x19, 0x53, 0x79, + 0xb7, 0x25, 0x8b, 0x02, 0xfe, 0xe8, 0xfc, 0x4a, 0xbd, 0xef, 0x7a, 0x6f, 0xe6, 0x38, 0x1f, 0xfa, + 0x70, 0xd6, 0x4a, 0x39, 0x29, 0xe0, 0x16, 0x36, 0x68, 0x61, 0xe3, 0xa6, 0x18, 0xf8, 0xdc, 0xce, + 0x63, 0x27, 0x85, 0x78, 0x37, 0x8f, 0x17, 0x76, 0xac, 0xe6, 0x5f, 0xfa, 0xa8, 0xfd, 0x24, 0xc4, + 0x49, 0x81, 0x90, 0x1d, 0x83, 0x90, 0x85, 0x4d, 0x88, 0xe2, 0xbc, 0xf4, 0xd5, 0x61, 0x97, 0x3f, + 0x01, 0x00, 0x00, 0xff, 0xff, 0xff, 0x85, 0x4f, 0x9a, 0x0b, 0x02, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/user_list_rule_type.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/user_list_rule_type.pb.go new file mode 100644 index 0000000..2025e58 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/user_list_rule_type.pb.go @@ -0,0 +1,118 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/enums/user_list_rule_type.proto + +package enums // import "google.golang.org/genproto/googleapis/ads/googleads/v1/enums" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Enum describing possible user list rule types. +type UserListRuleTypeEnum_UserListRuleType int32 + +const ( + // Not specified. + UserListRuleTypeEnum_UNSPECIFIED UserListRuleTypeEnum_UserListRuleType = 0 + // Used for return value only. Represents value unknown in this version. + UserListRuleTypeEnum_UNKNOWN UserListRuleTypeEnum_UserListRuleType = 1 + // Conjunctive normal form. + UserListRuleTypeEnum_AND_OF_ORS UserListRuleTypeEnum_UserListRuleType = 2 + // Disjunctive normal form. + UserListRuleTypeEnum_OR_OF_ANDS UserListRuleTypeEnum_UserListRuleType = 3 +) + +var UserListRuleTypeEnum_UserListRuleType_name = map[int32]string{ + 0: "UNSPECIFIED", + 1: "UNKNOWN", + 2: "AND_OF_ORS", + 3: "OR_OF_ANDS", +} +var UserListRuleTypeEnum_UserListRuleType_value = map[string]int32{ + "UNSPECIFIED": 0, + "UNKNOWN": 1, + "AND_OF_ORS": 2, + "OR_OF_ANDS": 3, +} + +func (x UserListRuleTypeEnum_UserListRuleType) String() string { + return proto.EnumName(UserListRuleTypeEnum_UserListRuleType_name, int32(x)) +} +func (UserListRuleTypeEnum_UserListRuleType) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_user_list_rule_type_6ff13bf7262f7b61, []int{0, 0} +} + +// Rule based user list rule type. +type UserListRuleTypeEnum struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *UserListRuleTypeEnum) Reset() { *m = UserListRuleTypeEnum{} } +func (m *UserListRuleTypeEnum) String() string { return proto.CompactTextString(m) } +func (*UserListRuleTypeEnum) ProtoMessage() {} +func (*UserListRuleTypeEnum) Descriptor() ([]byte, []int) { + return fileDescriptor_user_list_rule_type_6ff13bf7262f7b61, []int{0} +} +func (m *UserListRuleTypeEnum) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_UserListRuleTypeEnum.Unmarshal(m, b) +} +func (m *UserListRuleTypeEnum) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_UserListRuleTypeEnum.Marshal(b, m, deterministic) +} +func (dst *UserListRuleTypeEnum) XXX_Merge(src proto.Message) { + xxx_messageInfo_UserListRuleTypeEnum.Merge(dst, src) +} +func (m *UserListRuleTypeEnum) XXX_Size() int { + return xxx_messageInfo_UserListRuleTypeEnum.Size(m) +} +func (m *UserListRuleTypeEnum) XXX_DiscardUnknown() { + xxx_messageInfo_UserListRuleTypeEnum.DiscardUnknown(m) +} + +var xxx_messageInfo_UserListRuleTypeEnum proto.InternalMessageInfo + +func init() { + proto.RegisterType((*UserListRuleTypeEnum)(nil), "google.ads.googleads.v1.enums.UserListRuleTypeEnum") + proto.RegisterEnum("google.ads.googleads.v1.enums.UserListRuleTypeEnum_UserListRuleType", UserListRuleTypeEnum_UserListRuleType_name, UserListRuleTypeEnum_UserListRuleType_value) +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/enums/user_list_rule_type.proto", fileDescriptor_user_list_rule_type_6ff13bf7262f7b61) +} + +var fileDescriptor_user_list_rule_type_6ff13bf7262f7b61 = []byte{ + // 315 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x7c, 0x50, 0xdd, 0x4a, 0xc3, 0x30, + 0x14, 0x76, 0x1d, 0x28, 0x64, 0xa0, 0xa5, 0xe8, 0x8d, 0xb8, 0x8b, 0xed, 0x01, 0x52, 0x8a, 0x17, + 0x42, 0xbc, 0xca, 0xdc, 0x0f, 0x43, 0x69, 0xcb, 0xe6, 0x26, 0x48, 0xa1, 0x54, 0x1b, 0x62, 0xa1, + 0x4b, 0x4a, 0x4f, 0x3a, 0xd8, 0xeb, 0x78, 0xe9, 0xa3, 0xf8, 0x28, 0x7b, 0x0a, 0x49, 0xe2, 0x7a, + 0x31, 0xd0, 0x9b, 0xf0, 0x9d, 0xf3, 0x7d, 0xdf, 0xc9, 0x77, 0x0e, 0xba, 0xe3, 0x52, 0xf2, 0x92, + 0xf9, 0x59, 0x0e, 0xbe, 0x85, 0x1a, 0x6d, 0x03, 0x9f, 0x89, 0x66, 0x03, 0x7e, 0x03, 0xac, 0x4e, + 0xcb, 0x02, 0x54, 0x5a, 0x37, 0x25, 0x4b, 0xd5, 0xae, 0x62, 0xb8, 0xaa, 0xa5, 0x92, 0x5e, 0xdf, + 0xaa, 0x71, 0x96, 0x03, 0x6e, 0x8d, 0x78, 0x1b, 0x60, 0x63, 0xbc, 0xbe, 0x39, 0xcc, 0xad, 0x0a, + 0x3f, 0x13, 0x42, 0xaa, 0x4c, 0x15, 0x52, 0x80, 0x35, 0x0f, 0x3f, 0xd0, 0xe5, 0x0a, 0x58, 0xfd, + 0x54, 0x80, 0x5a, 0x34, 0x25, 0x7b, 0xde, 0x55, 0x6c, 0x22, 0x9a, 0xcd, 0x30, 0x46, 0xee, 0x71, + 0xdf, 0xbb, 0x40, 0xbd, 0x55, 0xb8, 0x8c, 0x27, 0x0f, 0xf3, 0xe9, 0x7c, 0x32, 0x76, 0x4f, 0xbc, + 0x1e, 0x3a, 0x5b, 0x85, 0x8f, 0x61, 0xf4, 0x12, 0xba, 0x1d, 0xef, 0x1c, 0x21, 0x1a, 0x8e, 0xd3, + 0x68, 0x9a, 0x46, 0x8b, 0xa5, 0xeb, 0xe8, 0x3a, 0x5a, 0xe8, 0x92, 0x86, 0xe3, 0xa5, 0xdb, 0x1d, + 0xed, 0x3b, 0x68, 0xf0, 0x2e, 0x37, 0xf8, 0xdf, 0xb4, 0xa3, 0xab, 0xe3, 0x5f, 0x63, 0x1d, 0x33, + 0xee, 0xbc, 0x8e, 0x7e, 0x7d, 0x5c, 0x96, 0x99, 0xe0, 0x58, 0xd6, 0xdc, 0xe7, 0x4c, 0x98, 0x25, + 0x0e, 0xe7, 0xaa, 0x0a, 0xf8, 0xe3, 0x7a, 0xf7, 0xe6, 0xfd, 0x74, 0xba, 0x33, 0x4a, 0xbf, 0x9c, + 0xfe, 0xcc, 0x8e, 0xa2, 0x39, 0x60, 0x0b, 0x35, 0x5a, 0x07, 0x58, 0x6f, 0x0e, 0xdf, 0x07, 0x3e, + 0xa1, 0x39, 0x24, 0x2d, 0x9f, 0xac, 0x83, 0xc4, 0xf0, 0x7b, 0x67, 0x60, 0x9b, 0x84, 0xd0, 0x1c, + 0x08, 0x69, 0x15, 0x84, 0xac, 0x03, 0x42, 0x8c, 0xe6, 0xed, 0xd4, 0x04, 0xbb, 0xfd, 0x09, 0x00, + 0x00, 0xff, 0xff, 0xab, 0x46, 0xd8, 0xca, 0xd5, 0x01, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/user_list_size_range.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/user_list_size_range.pb.go new file mode 100644 index 0000000..16add88 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/user_list_size_range.pb.go @@ -0,0 +1,186 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/enums/user_list_size_range.proto + +package enums // import "google.golang.org/genproto/googleapis/ads/googleads/v1/enums" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Enum containing possible user list size ranges. +type UserListSizeRangeEnum_UserListSizeRange int32 + +const ( + // Not specified. + UserListSizeRangeEnum_UNSPECIFIED UserListSizeRangeEnum_UserListSizeRange = 0 + // Used for return value only. Represents value unknown in this version. + UserListSizeRangeEnum_UNKNOWN UserListSizeRangeEnum_UserListSizeRange = 1 + // User list has less than 500 users. + UserListSizeRangeEnum_LESS_THAN_FIVE_HUNDRED UserListSizeRangeEnum_UserListSizeRange = 2 + // User list has number of users in range of 500 to 1000. + UserListSizeRangeEnum_LESS_THAN_ONE_THOUSAND UserListSizeRangeEnum_UserListSizeRange = 3 + // User list has number of users in range of 1000 to 10000. + UserListSizeRangeEnum_ONE_THOUSAND_TO_TEN_THOUSAND UserListSizeRangeEnum_UserListSizeRange = 4 + // User list has number of users in range of 10000 to 50000. + UserListSizeRangeEnum_TEN_THOUSAND_TO_FIFTY_THOUSAND UserListSizeRangeEnum_UserListSizeRange = 5 + // User list has number of users in range of 50000 to 100000. + UserListSizeRangeEnum_FIFTY_THOUSAND_TO_ONE_HUNDRED_THOUSAND UserListSizeRangeEnum_UserListSizeRange = 6 + // User list has number of users in range of 100000 to 300000. + UserListSizeRangeEnum_ONE_HUNDRED_THOUSAND_TO_THREE_HUNDRED_THOUSAND UserListSizeRangeEnum_UserListSizeRange = 7 + // User list has number of users in range of 300000 to 500000. + UserListSizeRangeEnum_THREE_HUNDRED_THOUSAND_TO_FIVE_HUNDRED_THOUSAND UserListSizeRangeEnum_UserListSizeRange = 8 + // User list has number of users in range of 500000 to 1 million. + UserListSizeRangeEnum_FIVE_HUNDRED_THOUSAND_TO_ONE_MILLION UserListSizeRangeEnum_UserListSizeRange = 9 + // User list has number of users in range of 1 to 2 millions. + UserListSizeRangeEnum_ONE_MILLION_TO_TWO_MILLION UserListSizeRangeEnum_UserListSizeRange = 10 + // User list has number of users in range of 2 to 3 millions. + UserListSizeRangeEnum_TWO_MILLION_TO_THREE_MILLION UserListSizeRangeEnum_UserListSizeRange = 11 + // User list has number of users in range of 3 to 5 millions. + UserListSizeRangeEnum_THREE_MILLION_TO_FIVE_MILLION UserListSizeRangeEnum_UserListSizeRange = 12 + // User list has number of users in range of 5 to 10 millions. + UserListSizeRangeEnum_FIVE_MILLION_TO_TEN_MILLION UserListSizeRangeEnum_UserListSizeRange = 13 + // User list has number of users in range of 10 to 20 millions. + UserListSizeRangeEnum_TEN_MILLION_TO_TWENTY_MILLION UserListSizeRangeEnum_UserListSizeRange = 14 + // User list has number of users in range of 20 to 30 millions. + UserListSizeRangeEnum_TWENTY_MILLION_TO_THIRTY_MILLION UserListSizeRangeEnum_UserListSizeRange = 15 + // User list has number of users in range of 30 to 50 millions. + UserListSizeRangeEnum_THIRTY_MILLION_TO_FIFTY_MILLION UserListSizeRangeEnum_UserListSizeRange = 16 + // User list has over 50 million users. + UserListSizeRangeEnum_OVER_FIFTY_MILLION UserListSizeRangeEnum_UserListSizeRange = 17 +) + +var UserListSizeRangeEnum_UserListSizeRange_name = map[int32]string{ + 0: "UNSPECIFIED", + 1: "UNKNOWN", + 2: "LESS_THAN_FIVE_HUNDRED", + 3: "LESS_THAN_ONE_THOUSAND", + 4: "ONE_THOUSAND_TO_TEN_THOUSAND", + 5: "TEN_THOUSAND_TO_FIFTY_THOUSAND", + 6: "FIFTY_THOUSAND_TO_ONE_HUNDRED_THOUSAND", + 7: "ONE_HUNDRED_THOUSAND_TO_THREE_HUNDRED_THOUSAND", + 8: "THREE_HUNDRED_THOUSAND_TO_FIVE_HUNDRED_THOUSAND", + 9: "FIVE_HUNDRED_THOUSAND_TO_ONE_MILLION", + 10: "ONE_MILLION_TO_TWO_MILLION", + 11: "TWO_MILLION_TO_THREE_MILLION", + 12: "THREE_MILLION_TO_FIVE_MILLION", + 13: "FIVE_MILLION_TO_TEN_MILLION", + 14: "TEN_MILLION_TO_TWENTY_MILLION", + 15: "TWENTY_MILLION_TO_THIRTY_MILLION", + 16: "THIRTY_MILLION_TO_FIFTY_MILLION", + 17: "OVER_FIFTY_MILLION", +} +var UserListSizeRangeEnum_UserListSizeRange_value = map[string]int32{ + "UNSPECIFIED": 0, + "UNKNOWN": 1, + "LESS_THAN_FIVE_HUNDRED": 2, + "LESS_THAN_ONE_THOUSAND": 3, + "ONE_THOUSAND_TO_TEN_THOUSAND": 4, + "TEN_THOUSAND_TO_FIFTY_THOUSAND": 5, + "FIFTY_THOUSAND_TO_ONE_HUNDRED_THOUSAND": 6, + "ONE_HUNDRED_THOUSAND_TO_THREE_HUNDRED_THOUSAND": 7, + "THREE_HUNDRED_THOUSAND_TO_FIVE_HUNDRED_THOUSAND": 8, + "FIVE_HUNDRED_THOUSAND_TO_ONE_MILLION": 9, + "ONE_MILLION_TO_TWO_MILLION": 10, + "TWO_MILLION_TO_THREE_MILLION": 11, + "THREE_MILLION_TO_FIVE_MILLION": 12, + "FIVE_MILLION_TO_TEN_MILLION": 13, + "TEN_MILLION_TO_TWENTY_MILLION": 14, + "TWENTY_MILLION_TO_THIRTY_MILLION": 15, + "THIRTY_MILLION_TO_FIFTY_MILLION": 16, + "OVER_FIFTY_MILLION": 17, +} + +func (x UserListSizeRangeEnum_UserListSizeRange) String() string { + return proto.EnumName(UserListSizeRangeEnum_UserListSizeRange_name, int32(x)) +} +func (UserListSizeRangeEnum_UserListSizeRange) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_user_list_size_range_4744de7763dbe702, []int{0, 0} +} + +// Size range in terms of number of users of a UserList. +type UserListSizeRangeEnum struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *UserListSizeRangeEnum) Reset() { *m = UserListSizeRangeEnum{} } +func (m *UserListSizeRangeEnum) String() string { return proto.CompactTextString(m) } +func (*UserListSizeRangeEnum) ProtoMessage() {} +func (*UserListSizeRangeEnum) Descriptor() ([]byte, []int) { + return fileDescriptor_user_list_size_range_4744de7763dbe702, []int{0} +} +func (m *UserListSizeRangeEnum) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_UserListSizeRangeEnum.Unmarshal(m, b) +} +func (m *UserListSizeRangeEnum) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_UserListSizeRangeEnum.Marshal(b, m, deterministic) +} +func (dst *UserListSizeRangeEnum) XXX_Merge(src proto.Message) { + xxx_messageInfo_UserListSizeRangeEnum.Merge(dst, src) +} +func (m *UserListSizeRangeEnum) XXX_Size() int { + return xxx_messageInfo_UserListSizeRangeEnum.Size(m) +} +func (m *UserListSizeRangeEnum) XXX_DiscardUnknown() { + xxx_messageInfo_UserListSizeRangeEnum.DiscardUnknown(m) +} + +var xxx_messageInfo_UserListSizeRangeEnum proto.InternalMessageInfo + +func init() { + proto.RegisterType((*UserListSizeRangeEnum)(nil), "google.ads.googleads.v1.enums.UserListSizeRangeEnum") + proto.RegisterEnum("google.ads.googleads.v1.enums.UserListSizeRangeEnum_UserListSizeRange", UserListSizeRangeEnum_UserListSizeRange_name, UserListSizeRangeEnum_UserListSizeRange_value) +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/enums/user_list_size_range.proto", fileDescriptor_user_list_size_range_4744de7763dbe702) +} + +var fileDescriptor_user_list_size_range_4744de7763dbe702 = []byte{ + // 505 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x7c, 0x93, 0xcb, 0x6e, 0xd3, 0x40, + 0x14, 0x86, 0x49, 0x7a, 0x83, 0x13, 0xa0, 0xee, 0x48, 0x64, 0x11, 0x9a, 0xb6, 0x09, 0x15, 0x42, + 0x2c, 0xc6, 0x0a, 0xdd, 0x20, 0xb3, 0x72, 0xc8, 0xa4, 0xb1, 0x08, 0xe3, 0xc8, 0xb7, 0xaa, 0x28, + 0x92, 0x65, 0xb0, 0x65, 0x59, 0x4a, 0xec, 0xc8, 0xe3, 0x74, 0xd1, 0xe7, 0xe0, 0x09, 0x58, 0xf2, + 0x28, 0x3c, 0x0a, 0xbc, 0x01, 0x2b, 0x64, 0xbb, 0xe3, 0x0b, 0x35, 0x6c, 0xa2, 0xa3, 0xff, 0xff, + 0xce, 0xc9, 0xaf, 0xf1, 0x39, 0xf0, 0xd6, 0x8f, 0x22, 0x7f, 0xe5, 0x89, 0x8e, 0xcb, 0xc4, 0xbc, + 0x4c, 0xab, 0x9b, 0x91, 0xe8, 0x85, 0xdb, 0x35, 0x13, 0xb7, 0xcc, 0x8b, 0xed, 0x55, 0xc0, 0x12, + 0x9b, 0x05, 0xb7, 0x9e, 0x1d, 0x3b, 0xa1, 0xef, 0xe1, 0x4d, 0x1c, 0x25, 0x11, 0xea, 0xe7, 0x38, + 0x76, 0x5c, 0x86, 0x8b, 0x4e, 0x7c, 0x33, 0xc2, 0x59, 0x67, 0xef, 0x98, 0x0f, 0xde, 0x04, 0xa2, + 0x13, 0x86, 0x51, 0xe2, 0x24, 0x41, 0x14, 0xb2, 0xbc, 0x79, 0xf8, 0x75, 0x0f, 0x9e, 0x99, 0xcc, + 0x8b, 0xe7, 0x01, 0x4b, 0xf4, 0xe0, 0xd6, 0xd3, 0xd2, 0xc1, 0x24, 0xdc, 0xae, 0x87, 0xbf, 0x77, + 0xe1, 0xe8, 0x9e, 0x83, 0x0e, 0xa1, 0x63, 0x52, 0x7d, 0x41, 0xde, 0x2b, 0x53, 0x85, 0x4c, 0x84, + 0x07, 0xa8, 0x03, 0x07, 0x26, 0xfd, 0x40, 0xd5, 0x2b, 0x2a, 0xb4, 0x50, 0x0f, 0xba, 0x73, 0xa2, + 0xeb, 0xb6, 0x31, 0x93, 0xa9, 0x3d, 0x55, 0x2c, 0x62, 0xcf, 0x4c, 0x3a, 0xd1, 0xc8, 0x44, 0x68, + 0xd7, 0x3d, 0x95, 0x12, 0xdb, 0x98, 0xa9, 0xa6, 0x2e, 0xd3, 0x89, 0xb0, 0x83, 0xce, 0xe0, 0xb8, + 0xaa, 0xd8, 0x86, 0x6a, 0x1b, 0x84, 0x96, 0xc4, 0x2e, 0x1a, 0xc2, 0x49, 0x55, 0x49, 0x89, 0xa9, + 0x32, 0x35, 0xae, 0x4b, 0x66, 0x0f, 0xbd, 0x86, 0x97, 0x75, 0x2d, 0xa5, 0xd2, 0xb9, 0x77, 0x21, + 0x4a, 0x76, 0x1f, 0xbd, 0x01, 0xdc, 0xe4, 0x64, 0xff, 0x3c, 0xd3, 0x48, 0x43, 0xcf, 0x01, 0xba, + 0x00, 0xb1, 0xd9, 0xcb, 0xd3, 0x58, 0x0d, 0x4d, 0x0f, 0xd1, 0x2b, 0x38, 0x6f, 0xb4, 0x78, 0xb6, + 0x8f, 0xca, 0x7c, 0xae, 0xa8, 0x54, 0x78, 0x84, 0x4e, 0xa0, 0x57, 0x11, 0xb2, 0x24, 0x57, 0x6a, + 0xe1, 0x43, 0xfa, 0x48, 0x15, 0xa1, 0x4c, 0xca, 0x89, 0x0e, 0x1a, 0x40, 0xbf, 0x26, 0x15, 0xb9, + 0x38, 0xf2, 0x18, 0x9d, 0xc2, 0xf3, 0xaa, 0xc2, 0x5f, 0x9a, 0x03, 0x4f, 0xb2, 0x19, 0xa5, 0x90, + 0xa7, 0x20, 0xd4, 0xb8, 0x2e, 0x90, 0xa7, 0xe8, 0x1c, 0xce, 0xea, 0x5a, 0x9e, 0x45, 0xd1, 0x2a, + 0xd4, 0x21, 0x7a, 0x01, 0xa7, 0x75, 0xad, 0xfc, 0x66, 0x1c, 0x12, 0x50, 0x17, 0x90, 0x6a, 0x11, + 0xed, 0x2f, 0xfd, 0x68, 0xfc, 0xab, 0x05, 0x83, 0x2f, 0xd1, 0x1a, 0xff, 0x77, 0xb5, 0xc7, 0xdd, + 0x7b, 0xfb, 0xb9, 0x48, 0x97, 0x7a, 0xd1, 0xfa, 0x34, 0xbe, 0x6b, 0xf4, 0xa3, 0x95, 0x13, 0xfa, + 0x38, 0x8a, 0x7d, 0xd1, 0xf7, 0xc2, 0x6c, 0xe5, 0xf9, 0x75, 0x6d, 0x02, 0xf6, 0x8f, 0x63, 0x7b, + 0x97, 0xfd, 0x7e, 0x6b, 0xef, 0x5c, 0xca, 0xf2, 0xf7, 0x76, 0xff, 0x32, 0x1f, 0x25, 0xbb, 0x0c, + 0xe7, 0x65, 0x5a, 0x59, 0x23, 0x9c, 0x5e, 0x09, 0xfb, 0xc1, 0xfd, 0xa5, 0xec, 0xb2, 0x65, 0xe1, + 0x2f, 0xad, 0xd1, 0x32, 0xf3, 0x7f, 0xb6, 0x07, 0xb9, 0x28, 0x49, 0xb2, 0xcb, 0x24, 0xa9, 0x20, + 0x24, 0xc9, 0x1a, 0x49, 0x52, 0xc6, 0x7c, 0xde, 0xcf, 0x82, 0x5d, 0xfc, 0x09, 0x00, 0x00, 0xff, + 0xff, 0x64, 0x67, 0xa0, 0x98, 0x04, 0x04, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/user_list_string_rule_item_operator.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/user_list_string_rule_item_operator.pb.go new file mode 100644 index 0000000..b195a46 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/user_list_string_rule_item_operator.pb.go @@ -0,0 +1,147 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/enums/user_list_string_rule_item_operator.proto + +package enums // import "google.golang.org/genproto/googleapis/ads/googleads/v1/enums" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Enum describing possible user list string rule item operators. +type UserListStringRuleItemOperatorEnum_UserListStringRuleItemOperator int32 + +const ( + // Not specified. + UserListStringRuleItemOperatorEnum_UNSPECIFIED UserListStringRuleItemOperatorEnum_UserListStringRuleItemOperator = 0 + // Used for return value only. Represents value unknown in this version. + UserListStringRuleItemOperatorEnum_UNKNOWN UserListStringRuleItemOperatorEnum_UserListStringRuleItemOperator = 1 + // Contains. + UserListStringRuleItemOperatorEnum_CONTAINS UserListStringRuleItemOperatorEnum_UserListStringRuleItemOperator = 2 + // Equals. + UserListStringRuleItemOperatorEnum_EQUALS UserListStringRuleItemOperatorEnum_UserListStringRuleItemOperator = 3 + // Starts with. + UserListStringRuleItemOperatorEnum_STARTS_WITH UserListStringRuleItemOperatorEnum_UserListStringRuleItemOperator = 4 + // Ends with. + UserListStringRuleItemOperatorEnum_ENDS_WITH UserListStringRuleItemOperatorEnum_UserListStringRuleItemOperator = 5 + // Not equals. + UserListStringRuleItemOperatorEnum_NOT_EQUALS UserListStringRuleItemOperatorEnum_UserListStringRuleItemOperator = 6 + // Not contains. + UserListStringRuleItemOperatorEnum_NOT_CONTAINS UserListStringRuleItemOperatorEnum_UserListStringRuleItemOperator = 7 + // Not starts with. + UserListStringRuleItemOperatorEnum_NOT_STARTS_WITH UserListStringRuleItemOperatorEnum_UserListStringRuleItemOperator = 8 + // Not ends with. + UserListStringRuleItemOperatorEnum_NOT_ENDS_WITH UserListStringRuleItemOperatorEnum_UserListStringRuleItemOperator = 9 +) + +var UserListStringRuleItemOperatorEnum_UserListStringRuleItemOperator_name = map[int32]string{ + 0: "UNSPECIFIED", + 1: "UNKNOWN", + 2: "CONTAINS", + 3: "EQUALS", + 4: "STARTS_WITH", + 5: "ENDS_WITH", + 6: "NOT_EQUALS", + 7: "NOT_CONTAINS", + 8: "NOT_STARTS_WITH", + 9: "NOT_ENDS_WITH", +} +var UserListStringRuleItemOperatorEnum_UserListStringRuleItemOperator_value = map[string]int32{ + "UNSPECIFIED": 0, + "UNKNOWN": 1, + "CONTAINS": 2, + "EQUALS": 3, + "STARTS_WITH": 4, + "ENDS_WITH": 5, + "NOT_EQUALS": 6, + "NOT_CONTAINS": 7, + "NOT_STARTS_WITH": 8, + "NOT_ENDS_WITH": 9, +} + +func (x UserListStringRuleItemOperatorEnum_UserListStringRuleItemOperator) String() string { + return proto.EnumName(UserListStringRuleItemOperatorEnum_UserListStringRuleItemOperator_name, int32(x)) +} +func (UserListStringRuleItemOperatorEnum_UserListStringRuleItemOperator) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_user_list_string_rule_item_operator_932eec4397d453ac, []int{0, 0} +} + +// Supported rule operator for string type. +type UserListStringRuleItemOperatorEnum struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *UserListStringRuleItemOperatorEnum) Reset() { *m = UserListStringRuleItemOperatorEnum{} } +func (m *UserListStringRuleItemOperatorEnum) String() string { return proto.CompactTextString(m) } +func (*UserListStringRuleItemOperatorEnum) ProtoMessage() {} +func (*UserListStringRuleItemOperatorEnum) Descriptor() ([]byte, []int) { + return fileDescriptor_user_list_string_rule_item_operator_932eec4397d453ac, []int{0} +} +func (m *UserListStringRuleItemOperatorEnum) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_UserListStringRuleItemOperatorEnum.Unmarshal(m, b) +} +func (m *UserListStringRuleItemOperatorEnum) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_UserListStringRuleItemOperatorEnum.Marshal(b, m, deterministic) +} +func (dst *UserListStringRuleItemOperatorEnum) XXX_Merge(src proto.Message) { + xxx_messageInfo_UserListStringRuleItemOperatorEnum.Merge(dst, src) +} +func (m *UserListStringRuleItemOperatorEnum) XXX_Size() int { + return xxx_messageInfo_UserListStringRuleItemOperatorEnum.Size(m) +} +func (m *UserListStringRuleItemOperatorEnum) XXX_DiscardUnknown() { + xxx_messageInfo_UserListStringRuleItemOperatorEnum.DiscardUnknown(m) +} + +var xxx_messageInfo_UserListStringRuleItemOperatorEnum proto.InternalMessageInfo + +func init() { + proto.RegisterType((*UserListStringRuleItemOperatorEnum)(nil), "google.ads.googleads.v1.enums.UserListStringRuleItemOperatorEnum") + proto.RegisterEnum("google.ads.googleads.v1.enums.UserListStringRuleItemOperatorEnum_UserListStringRuleItemOperator", UserListStringRuleItemOperatorEnum_UserListStringRuleItemOperator_name, UserListStringRuleItemOperatorEnum_UserListStringRuleItemOperator_value) +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/enums/user_list_string_rule_item_operator.proto", fileDescriptor_user_list_string_rule_item_operator_932eec4397d453ac) +} + +var fileDescriptor_user_list_string_rule_item_operator_932eec4397d453ac = []byte{ + // 391 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x7c, 0x51, 0x41, 0x8f, 0x94, 0x30, + 0x14, 0x16, 0x56, 0x67, 0x77, 0xbb, 0xbb, 0x2e, 0xd6, 0x9b, 0x71, 0x4d, 0x76, 0xbc, 0x97, 0x10, + 0x6f, 0xf5, 0xd4, 0xd9, 0xc1, 0x91, 0x38, 0x29, 0xe3, 0x00, 0x33, 0x89, 0x21, 0x21, 0x28, 0x0d, + 0x21, 0x81, 0x96, 0xb4, 0x65, 0x7e, 0x90, 0x47, 0x7f, 0x86, 0x47, 0xff, 0x89, 0xfe, 0x02, 0x8f, + 0x86, 0xc2, 0x10, 0x2f, 0x3b, 0x17, 0xf2, 0xbd, 0xf7, 0xbe, 0xef, 0x7b, 0xf4, 0x7b, 0x60, 0x55, + 0x0a, 0x51, 0xd6, 0xcc, 0xcd, 0x0b, 0xe5, 0x0e, 0xb0, 0x47, 0x07, 0xcf, 0x65, 0xbc, 0x6b, 0x94, + 0xdb, 0x29, 0x26, 0xb3, 0xba, 0x52, 0x3a, 0x53, 0x5a, 0x56, 0xbc, 0xcc, 0x64, 0x57, 0xb3, 0xac, + 0xd2, 0xac, 0xc9, 0x44, 0xcb, 0x64, 0xae, 0x85, 0x44, 0xad, 0x14, 0x5a, 0xc0, 0xbb, 0x41, 0x8d, + 0xf2, 0x42, 0xa1, 0xc9, 0x08, 0x1d, 0x3c, 0x64, 0x8c, 0x5e, 0xbd, 0x3e, 0xee, 0x69, 0x2b, 0x37, + 0xe7, 0x5c, 0xe8, 0x5c, 0x57, 0x82, 0xab, 0x41, 0x3c, 0xff, 0x6d, 0x81, 0x79, 0xa2, 0x98, 0x5c, + 0x57, 0x4a, 0x47, 0x66, 0xd1, 0xb6, 0xab, 0x59, 0xa0, 0x59, 0x13, 0x8e, 0x5b, 0x7c, 0xde, 0x35, + 0xf3, 0x9f, 0x16, 0x78, 0x73, 0x9a, 0x06, 0x6f, 0xc1, 0x55, 0x42, 0xa3, 0x8d, 0xff, 0x10, 0x7c, + 0x08, 0xfc, 0xa5, 0xf3, 0x04, 0x5e, 0x81, 0xf3, 0x84, 0x7e, 0xa2, 0xe1, 0x9e, 0x3a, 0x16, 0xbc, + 0x06, 0x17, 0x0f, 0x21, 0x8d, 0x49, 0x40, 0x23, 0xc7, 0x86, 0x00, 0xcc, 0xfc, 0xcf, 0x09, 0x59, + 0x47, 0xce, 0x59, 0xaf, 0x8b, 0x62, 0xb2, 0x8d, 0xa3, 0x6c, 0x1f, 0xc4, 0x1f, 0x9d, 0xa7, 0xf0, + 0x06, 0x5c, 0xfa, 0x74, 0x39, 0x96, 0xcf, 0xe0, 0x73, 0x00, 0x68, 0x18, 0x67, 0x23, 0x7f, 0x06, + 0x1d, 0x70, 0xdd, 0xd7, 0x93, 0xdb, 0x39, 0x7c, 0x09, 0x6e, 0xfb, 0xce, 0xff, 0x2e, 0x17, 0xf0, + 0x05, 0xb8, 0x31, 0xb2, 0xc9, 0xe9, 0x72, 0xf1, 0xd7, 0x02, 0xf7, 0xdf, 0x44, 0x83, 0x4e, 0xe6, + 0xb5, 0x78, 0x7b, 0xfa, 0x9d, 0x9b, 0x3e, 0xb6, 0x8d, 0xf5, 0x65, 0x31, 0xba, 0x94, 0xa2, 0xce, + 0x79, 0x89, 0x84, 0x2c, 0xdd, 0x92, 0x71, 0x13, 0xea, 0xf1, 0x9c, 0x6d, 0xa5, 0x1e, 0xb9, 0xee, + 0x7b, 0xf3, 0xfd, 0x6e, 0x9f, 0xad, 0x08, 0xf9, 0x61, 0xdf, 0xad, 0x06, 0x2b, 0x52, 0x28, 0x34, + 0xc0, 0x1e, 0xed, 0x3c, 0xd4, 0x47, 0xaf, 0x7e, 0x1d, 0xe7, 0x29, 0x29, 0x54, 0x3a, 0xcd, 0xd3, + 0x9d, 0x97, 0x9a, 0xf9, 0x1f, 0xfb, 0x7e, 0x68, 0x62, 0x4c, 0x0a, 0x85, 0xf1, 0xc4, 0xc0, 0x78, + 0xe7, 0x61, 0x6c, 0x38, 0x5f, 0x67, 0xe6, 0xc7, 0xde, 0xfd, 0x0b, 0x00, 0x00, 0xff, 0xff, 0xf8, + 0xb7, 0x29, 0xc0, 0x75, 0x02, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/user_list_type.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/user_list_type.pb.go new file mode 100644 index 0000000..71b0e49 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/user_list_type.pb.go @@ -0,0 +1,138 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/enums/user_list_type.proto + +package enums // import "google.golang.org/genproto/googleapis/ads/googleads/v1/enums" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Enum containing possible user list types. +type UserListTypeEnum_UserListType int32 + +const ( + // Not specified. + UserListTypeEnum_UNSPECIFIED UserListTypeEnum_UserListType = 0 + // Used for return value only. Represents value unknown in this version. + UserListTypeEnum_UNKNOWN UserListTypeEnum_UserListType = 1 + // UserList represented as a collection of conversion types. + UserListTypeEnum_REMARKETING UserListTypeEnum_UserListType = 2 + // UserList represented as a combination of other user lists/interests. + UserListTypeEnum_LOGICAL UserListTypeEnum_UserListType = 3 + // UserList created in the Google Ad Manager platform. + UserListTypeEnum_EXTERNAL_REMARKETING UserListTypeEnum_UserListType = 4 + // UserList associated with a rule. + UserListTypeEnum_RULE_BASED UserListTypeEnum_UserListType = 5 + // UserList with users similar to users of another UserList. + UserListTypeEnum_SIMILAR UserListTypeEnum_UserListType = 6 + // UserList of first-party CRM data provided by advertiser in the form of + // emails or other formats. + UserListTypeEnum_CRM_BASED UserListTypeEnum_UserListType = 7 +) + +var UserListTypeEnum_UserListType_name = map[int32]string{ + 0: "UNSPECIFIED", + 1: "UNKNOWN", + 2: "REMARKETING", + 3: "LOGICAL", + 4: "EXTERNAL_REMARKETING", + 5: "RULE_BASED", + 6: "SIMILAR", + 7: "CRM_BASED", +} +var UserListTypeEnum_UserListType_value = map[string]int32{ + "UNSPECIFIED": 0, + "UNKNOWN": 1, + "REMARKETING": 2, + "LOGICAL": 3, + "EXTERNAL_REMARKETING": 4, + "RULE_BASED": 5, + "SIMILAR": 6, + "CRM_BASED": 7, +} + +func (x UserListTypeEnum_UserListType) String() string { + return proto.EnumName(UserListTypeEnum_UserListType_name, int32(x)) +} +func (UserListTypeEnum_UserListType) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_user_list_type_a2df5dfff3017dbe, []int{0, 0} +} + +// The user list types. +type UserListTypeEnum struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *UserListTypeEnum) Reset() { *m = UserListTypeEnum{} } +func (m *UserListTypeEnum) String() string { return proto.CompactTextString(m) } +func (*UserListTypeEnum) ProtoMessage() {} +func (*UserListTypeEnum) Descriptor() ([]byte, []int) { + return fileDescriptor_user_list_type_a2df5dfff3017dbe, []int{0} +} +func (m *UserListTypeEnum) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_UserListTypeEnum.Unmarshal(m, b) +} +func (m *UserListTypeEnum) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_UserListTypeEnum.Marshal(b, m, deterministic) +} +func (dst *UserListTypeEnum) XXX_Merge(src proto.Message) { + xxx_messageInfo_UserListTypeEnum.Merge(dst, src) +} +func (m *UserListTypeEnum) XXX_Size() int { + return xxx_messageInfo_UserListTypeEnum.Size(m) +} +func (m *UserListTypeEnum) XXX_DiscardUnknown() { + xxx_messageInfo_UserListTypeEnum.DiscardUnknown(m) +} + +var xxx_messageInfo_UserListTypeEnum proto.InternalMessageInfo + +func init() { + proto.RegisterType((*UserListTypeEnum)(nil), "google.ads.googleads.v1.enums.UserListTypeEnum") + proto.RegisterEnum("google.ads.googleads.v1.enums.UserListTypeEnum_UserListType", UserListTypeEnum_UserListType_name, UserListTypeEnum_UserListType_value) +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/enums/user_list_type.proto", fileDescriptor_user_list_type_a2df5dfff3017dbe) +} + +var fileDescriptor_user_list_type_a2df5dfff3017dbe = []byte{ + // 360 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x7c, 0x90, 0xcf, 0x4a, 0xeb, 0x40, + 0x18, 0xc5, 0x6f, 0xd2, 0x7b, 0x5b, 0xee, 0xd4, 0x3f, 0x31, 0xb8, 0x10, 0xb1, 0x8b, 0xf6, 0x01, + 0x26, 0x44, 0x77, 0xe3, 0x6a, 0xd2, 0x8e, 0x21, 0x34, 0x4d, 0x4b, 0xda, 0x54, 0x91, 0x40, 0x88, + 0x66, 0x08, 0x81, 0x76, 0x26, 0x64, 0xd2, 0x42, 0xdf, 0xc2, 0x67, 0x10, 0xdc, 0xf8, 0x28, 0x3e, + 0x88, 0x0b, 0x9f, 0x42, 0x26, 0x69, 0x4b, 0x37, 0xba, 0x19, 0x0e, 0xf3, 0x3b, 0xe7, 0xe3, 0xfb, + 0x0e, 0xb8, 0x4e, 0x39, 0x4f, 0x17, 0xd4, 0x88, 0x13, 0x61, 0xd4, 0x52, 0xaa, 0xb5, 0x69, 0x50, + 0xb6, 0x5a, 0x0a, 0x63, 0x25, 0x68, 0x11, 0x2d, 0x32, 0x51, 0x46, 0xe5, 0x26, 0xa7, 0x30, 0x2f, + 0x78, 0xc9, 0xf5, 0x4e, 0x6d, 0x84, 0x71, 0x22, 0xe0, 0x3e, 0x03, 0xd7, 0x26, 0xac, 0x32, 0x97, + 0x57, 0xbb, 0x91, 0x79, 0x66, 0xc4, 0x8c, 0xf1, 0x32, 0x2e, 0x33, 0xce, 0x44, 0x1d, 0xee, 0xbd, + 0x29, 0x40, 0x0b, 0x04, 0x2d, 0xdc, 0x4c, 0x94, 0xb3, 0x4d, 0x4e, 0x09, 0x5b, 0x2d, 0x7b, 0x2f, + 0x0a, 0x38, 0x3a, 0xfc, 0xd4, 0x4f, 0x41, 0x3b, 0xf0, 0xa6, 0x13, 0xd2, 0x77, 0xee, 0x1c, 0x32, + 0xd0, 0xfe, 0xe8, 0x6d, 0xd0, 0x0a, 0xbc, 0xa1, 0x37, 0xbe, 0xf7, 0x34, 0x45, 0x52, 0x9f, 0x8c, + 0xb0, 0x3f, 0x24, 0x33, 0xc7, 0xb3, 0x35, 0x55, 0x52, 0x77, 0x6c, 0x3b, 0x7d, 0xec, 0x6a, 0x0d, + 0xfd, 0x02, 0x9c, 0x93, 0x87, 0x19, 0xf1, 0x3d, 0xec, 0x46, 0x87, 0xb6, 0xbf, 0xfa, 0x09, 0x00, + 0x7e, 0xe0, 0x92, 0xc8, 0xc2, 0x53, 0x32, 0xd0, 0xfe, 0xc9, 0xd8, 0xd4, 0x19, 0x39, 0x2e, 0xf6, + 0xb5, 0xa6, 0x7e, 0x0c, 0xfe, 0xf7, 0xfd, 0xd1, 0x96, 0xb5, 0xac, 0x4f, 0x05, 0x74, 0x9f, 0xf9, + 0x12, 0xfe, 0x7a, 0xab, 0x75, 0x76, 0xb8, 0xf5, 0x44, 0x1e, 0x38, 0x51, 0x1e, 0xad, 0x6d, 0x26, + 0xe5, 0x8b, 0x98, 0xa5, 0x90, 0x17, 0xa9, 0x91, 0x52, 0x56, 0x9d, 0xbf, 0xeb, 0x38, 0xcf, 0xc4, + 0x0f, 0x95, 0xdf, 0x56, 0xef, 0xab, 0xda, 0xb0, 0x31, 0x7e, 0x57, 0x3b, 0x76, 0x3d, 0x0a, 0x27, + 0x02, 0xd6, 0x52, 0xaa, 0xb9, 0x09, 0x65, 0x6d, 0xe2, 0x63, 0xc7, 0x43, 0x9c, 0x88, 0x70, 0xcf, + 0xc3, 0xb9, 0x19, 0x56, 0xfc, 0x4b, 0xed, 0xd6, 0x9f, 0x08, 0xe1, 0x44, 0x20, 0xb4, 0x77, 0x20, + 0x34, 0x37, 0x11, 0xaa, 0x3c, 0x4f, 0xcd, 0x6a, 0xb1, 0x9b, 0xef, 0x00, 0x00, 0x00, 0xff, 0xff, + 0x09, 0x1d, 0x7e, 0x46, 0x0a, 0x02, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/vanity_pharma_display_url_mode.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/vanity_pharma_display_url_mode.pb.go new file mode 100644 index 0000000..c60cdaa --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/vanity_pharma_display_url_mode.pb.go @@ -0,0 +1,120 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/enums/vanity_pharma_display_url_mode.proto + +package enums // import "google.golang.org/genproto/googleapis/ads/googleads/v1/enums" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Enum describing possible display modes for vanity pharma URLs. +type VanityPharmaDisplayUrlModeEnum_VanityPharmaDisplayUrlMode int32 + +const ( + // Not specified. + VanityPharmaDisplayUrlModeEnum_UNSPECIFIED VanityPharmaDisplayUrlModeEnum_VanityPharmaDisplayUrlMode = 0 + // Used for return value only. Represents value unknown in this version. + VanityPharmaDisplayUrlModeEnum_UNKNOWN VanityPharmaDisplayUrlModeEnum_VanityPharmaDisplayUrlMode = 1 + // Replace vanity pharma URL with manufacturer website url. + VanityPharmaDisplayUrlModeEnum_MANUFACTURER_WEBSITE_URL VanityPharmaDisplayUrlModeEnum_VanityPharmaDisplayUrlMode = 2 + // Replace vanity pharma URL with description of the website. + VanityPharmaDisplayUrlModeEnum_WEBSITE_DESCRIPTION VanityPharmaDisplayUrlModeEnum_VanityPharmaDisplayUrlMode = 3 +) + +var VanityPharmaDisplayUrlModeEnum_VanityPharmaDisplayUrlMode_name = map[int32]string{ + 0: "UNSPECIFIED", + 1: "UNKNOWN", + 2: "MANUFACTURER_WEBSITE_URL", + 3: "WEBSITE_DESCRIPTION", +} +var VanityPharmaDisplayUrlModeEnum_VanityPharmaDisplayUrlMode_value = map[string]int32{ + "UNSPECIFIED": 0, + "UNKNOWN": 1, + "MANUFACTURER_WEBSITE_URL": 2, + "WEBSITE_DESCRIPTION": 3, +} + +func (x VanityPharmaDisplayUrlModeEnum_VanityPharmaDisplayUrlMode) String() string { + return proto.EnumName(VanityPharmaDisplayUrlModeEnum_VanityPharmaDisplayUrlMode_name, int32(x)) +} +func (VanityPharmaDisplayUrlModeEnum_VanityPharmaDisplayUrlMode) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_vanity_pharma_display_url_mode_4cfd7515397e8a48, []int{0, 0} +} + +// The display mode for vanity pharma URLs. +type VanityPharmaDisplayUrlModeEnum struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *VanityPharmaDisplayUrlModeEnum) Reset() { *m = VanityPharmaDisplayUrlModeEnum{} } +func (m *VanityPharmaDisplayUrlModeEnum) String() string { return proto.CompactTextString(m) } +func (*VanityPharmaDisplayUrlModeEnum) ProtoMessage() {} +func (*VanityPharmaDisplayUrlModeEnum) Descriptor() ([]byte, []int) { + return fileDescriptor_vanity_pharma_display_url_mode_4cfd7515397e8a48, []int{0} +} +func (m *VanityPharmaDisplayUrlModeEnum) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_VanityPharmaDisplayUrlModeEnum.Unmarshal(m, b) +} +func (m *VanityPharmaDisplayUrlModeEnum) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_VanityPharmaDisplayUrlModeEnum.Marshal(b, m, deterministic) +} +func (dst *VanityPharmaDisplayUrlModeEnum) XXX_Merge(src proto.Message) { + xxx_messageInfo_VanityPharmaDisplayUrlModeEnum.Merge(dst, src) +} +func (m *VanityPharmaDisplayUrlModeEnum) XXX_Size() int { + return xxx_messageInfo_VanityPharmaDisplayUrlModeEnum.Size(m) +} +func (m *VanityPharmaDisplayUrlModeEnum) XXX_DiscardUnknown() { + xxx_messageInfo_VanityPharmaDisplayUrlModeEnum.DiscardUnknown(m) +} + +var xxx_messageInfo_VanityPharmaDisplayUrlModeEnum proto.InternalMessageInfo + +func init() { + proto.RegisterType((*VanityPharmaDisplayUrlModeEnum)(nil), "google.ads.googleads.v1.enums.VanityPharmaDisplayUrlModeEnum") + proto.RegisterEnum("google.ads.googleads.v1.enums.VanityPharmaDisplayUrlModeEnum_VanityPharmaDisplayUrlMode", VanityPharmaDisplayUrlModeEnum_VanityPharmaDisplayUrlMode_name, VanityPharmaDisplayUrlModeEnum_VanityPharmaDisplayUrlMode_value) +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/enums/vanity_pharma_display_url_mode.proto", fileDescriptor_vanity_pharma_display_url_mode_4cfd7515397e8a48) +} + +var fileDescriptor_vanity_pharma_display_url_mode_4cfd7515397e8a48 = []byte{ + // 347 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x7c, 0x50, 0x41, 0x6a, 0xb3, 0x40, + 0x18, 0xfd, 0x35, 0xf0, 0x17, 0x26, 0x8b, 0x8a, 0x5d, 0xb4, 0x84, 0xa4, 0x25, 0x39, 0xc0, 0x88, + 0x74, 0x37, 0x5d, 0x69, 0x62, 0x82, 0xb4, 0x31, 0x62, 0xa2, 0x81, 0x22, 0xc8, 0xb4, 0x23, 0x56, + 0xd0, 0x19, 0xeb, 0x98, 0x40, 0xce, 0xd1, 0x1b, 0x74, 0xd9, 0xa3, 0xf4, 0x28, 0x5d, 0xf7, 0x00, + 0xc5, 0x99, 0x9a, 0x5d, 0xba, 0x19, 0x1e, 0xf3, 0xbd, 0xef, 0xbd, 0xef, 0x3d, 0x60, 0x67, 0x8c, + 0x65, 0x45, 0x6a, 0x60, 0xc2, 0x0d, 0x09, 0x5b, 0xb4, 0x37, 0x8d, 0x94, 0xee, 0x4a, 0x6e, 0xec, + 0x31, 0xcd, 0x9b, 0x43, 0x52, 0xbd, 0xe0, 0xba, 0xc4, 0x09, 0xc9, 0x79, 0x55, 0xe0, 0x43, 0xb2, + 0xab, 0x8b, 0xa4, 0x64, 0x24, 0x85, 0x55, 0xcd, 0x1a, 0xa6, 0x8f, 0xe4, 0x22, 0xc4, 0x84, 0xc3, + 0xa3, 0x06, 0xdc, 0x9b, 0x50, 0x68, 0x0c, 0x86, 0x9d, 0x45, 0x95, 0x1b, 0x98, 0x52, 0xd6, 0xe0, + 0x26, 0x67, 0x94, 0xcb, 0xe5, 0xc9, 0x9b, 0x02, 0xae, 0x23, 0xe1, 0xe2, 0x0b, 0x93, 0x99, 0xf4, + 0x08, 0xeb, 0x62, 0xc9, 0x48, 0xea, 0xd0, 0x5d, 0x39, 0x79, 0x05, 0x83, 0xd3, 0x0c, 0xfd, 0x1c, + 0xf4, 0x43, 0x6f, 0xed, 0x3b, 0x53, 0x77, 0xee, 0x3a, 0x33, 0xed, 0x9f, 0xde, 0x07, 0x67, 0xa1, + 0x77, 0xef, 0xad, 0xb6, 0x9e, 0xa6, 0xe8, 0x43, 0x70, 0xb5, 0xb4, 0xbc, 0x70, 0x6e, 0x4d, 0x37, + 0x61, 0xe0, 0x04, 0xc9, 0xd6, 0xb1, 0xd7, 0xee, 0xc6, 0x49, 0xc2, 0xe0, 0x41, 0x53, 0xf5, 0x4b, + 0x70, 0xd1, 0x7d, 0xcc, 0x9c, 0xf5, 0x34, 0x70, 0xfd, 0x8d, 0xbb, 0xf2, 0xb4, 0x9e, 0xfd, 0xad, + 0x80, 0xf1, 0x33, 0x2b, 0xe1, 0x9f, 0xc9, 0xec, 0x9b, 0xd3, 0x67, 0xf9, 0x6d, 0x38, 0x5f, 0x79, + 0xfc, 0xed, 0x17, 0x66, 0xac, 0xc0, 0x34, 0x83, 0xac, 0xce, 0x8c, 0x2c, 0xa5, 0x22, 0x7a, 0xd7, + 0x77, 0x95, 0xf3, 0x13, 0xf5, 0xdf, 0x89, 0xf7, 0x5d, 0xed, 0x2d, 0x2c, 0xeb, 0x43, 0x1d, 0x2d, + 0xa4, 0x94, 0x45, 0x38, 0x94, 0xb0, 0x45, 0x91, 0x09, 0xdb, 0x92, 0xf8, 0x67, 0x37, 0x8f, 0x2d, + 0xc2, 0xe3, 0xe3, 0x3c, 0x8e, 0xcc, 0x58, 0xcc, 0xbf, 0xd4, 0xb1, 0xfc, 0x44, 0xc8, 0x22, 0x1c, + 0xa1, 0x23, 0x03, 0xa1, 0xc8, 0x44, 0x48, 0x70, 0x9e, 0xfe, 0x8b, 0xc3, 0x6e, 0x7f, 0x02, 0x00, + 0x00, 0xff, 0xff, 0xbf, 0xa9, 0x73, 0xe1, 0x16, 0x02, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/vanity_pharma_text.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/vanity_pharma_text.pb.go new file mode 100644 index 0000000..a294164 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/vanity_pharma_text.pb.go @@ -0,0 +1,172 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/enums/vanity_pharma_text.proto + +package enums // import "google.golang.org/genproto/googleapis/ads/googleads/v1/enums" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Enum describing possible text. +type VanityPharmaTextEnum_VanityPharmaText int32 + +const ( + // Not specified. + VanityPharmaTextEnum_UNSPECIFIED VanityPharmaTextEnum_VanityPharmaText = 0 + // Used for return value only. Represents value unknown in this version. + VanityPharmaTextEnum_UNKNOWN VanityPharmaTextEnum_VanityPharmaText = 1 + // Prescription treatment website with website content in English. + VanityPharmaTextEnum_PRESCRIPTION_TREATMENT_WEBSITE_EN VanityPharmaTextEnum_VanityPharmaText = 2 + // Prescription treatment website with website content in Spanish + // (Sitio de tratamientos con receta). + VanityPharmaTextEnum_PRESCRIPTION_TREATMENT_WEBSITE_ES VanityPharmaTextEnum_VanityPharmaText = 3 + // Prescription device website with website content in English. + VanityPharmaTextEnum_PRESCRIPTION_DEVICE_WEBSITE_EN VanityPharmaTextEnum_VanityPharmaText = 4 + // Prescription device website with website content in Spanish (Sitio de + // dispositivos con receta). + VanityPharmaTextEnum_PRESCRIPTION_DEVICE_WEBSITE_ES VanityPharmaTextEnum_VanityPharmaText = 5 + // Medical device website with website content in English. + VanityPharmaTextEnum_MEDICAL_DEVICE_WEBSITE_EN VanityPharmaTextEnum_VanityPharmaText = 6 + // Medical device website with website content in Spanish (Sitio de + // dispositivos médicos). + VanityPharmaTextEnum_MEDICAL_DEVICE_WEBSITE_ES VanityPharmaTextEnum_VanityPharmaText = 7 + // Preventative treatment website with website content in English. + VanityPharmaTextEnum_PREVENTATIVE_TREATMENT_WEBSITE_EN VanityPharmaTextEnum_VanityPharmaText = 8 + // Preventative treatment website with website content in Spanish (Sitio de + // tratamientos preventivos). + VanityPharmaTextEnum_PREVENTATIVE_TREATMENT_WEBSITE_ES VanityPharmaTextEnum_VanityPharmaText = 9 + // Prescription contraception website with website content in English. + VanityPharmaTextEnum_PRESCRIPTION_CONTRACEPTION_WEBSITE_EN VanityPharmaTextEnum_VanityPharmaText = 10 + // Prescription contraception website with website content in Spanish (Sitio + // de anticonceptivos con receta). + VanityPharmaTextEnum_PRESCRIPTION_CONTRACEPTION_WEBSITE_ES VanityPharmaTextEnum_VanityPharmaText = 11 + // Prescription vaccine website with website content in English. + VanityPharmaTextEnum_PRESCRIPTION_VACCINE_WEBSITE_EN VanityPharmaTextEnum_VanityPharmaText = 12 + // Prescription vaccine website with website content in Spanish (Sitio de + // vacunas con receta). + VanityPharmaTextEnum_PRESCRIPTION_VACCINE_WEBSITE_ES VanityPharmaTextEnum_VanityPharmaText = 13 +) + +var VanityPharmaTextEnum_VanityPharmaText_name = map[int32]string{ + 0: "UNSPECIFIED", + 1: "UNKNOWN", + 2: "PRESCRIPTION_TREATMENT_WEBSITE_EN", + 3: "PRESCRIPTION_TREATMENT_WEBSITE_ES", + 4: "PRESCRIPTION_DEVICE_WEBSITE_EN", + 5: "PRESCRIPTION_DEVICE_WEBSITE_ES", + 6: "MEDICAL_DEVICE_WEBSITE_EN", + 7: "MEDICAL_DEVICE_WEBSITE_ES", + 8: "PREVENTATIVE_TREATMENT_WEBSITE_EN", + 9: "PREVENTATIVE_TREATMENT_WEBSITE_ES", + 10: "PRESCRIPTION_CONTRACEPTION_WEBSITE_EN", + 11: "PRESCRIPTION_CONTRACEPTION_WEBSITE_ES", + 12: "PRESCRIPTION_VACCINE_WEBSITE_EN", + 13: "PRESCRIPTION_VACCINE_WEBSITE_ES", +} +var VanityPharmaTextEnum_VanityPharmaText_value = map[string]int32{ + "UNSPECIFIED": 0, + "UNKNOWN": 1, + "PRESCRIPTION_TREATMENT_WEBSITE_EN": 2, + "PRESCRIPTION_TREATMENT_WEBSITE_ES": 3, + "PRESCRIPTION_DEVICE_WEBSITE_EN": 4, + "PRESCRIPTION_DEVICE_WEBSITE_ES": 5, + "MEDICAL_DEVICE_WEBSITE_EN": 6, + "MEDICAL_DEVICE_WEBSITE_ES": 7, + "PREVENTATIVE_TREATMENT_WEBSITE_EN": 8, + "PREVENTATIVE_TREATMENT_WEBSITE_ES": 9, + "PRESCRIPTION_CONTRACEPTION_WEBSITE_EN": 10, + "PRESCRIPTION_CONTRACEPTION_WEBSITE_ES": 11, + "PRESCRIPTION_VACCINE_WEBSITE_EN": 12, + "PRESCRIPTION_VACCINE_WEBSITE_ES": 13, +} + +func (x VanityPharmaTextEnum_VanityPharmaText) String() string { + return proto.EnumName(VanityPharmaTextEnum_VanityPharmaText_name, int32(x)) +} +func (VanityPharmaTextEnum_VanityPharmaText) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_vanity_pharma_text_61cebb42c8c693a8, []int{0, 0} +} + +// The text that will be displayed in display URL of the text ad when website +// description is the selected display mode for vanity pharma URLs. +type VanityPharmaTextEnum struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *VanityPharmaTextEnum) Reset() { *m = VanityPharmaTextEnum{} } +func (m *VanityPharmaTextEnum) String() string { return proto.CompactTextString(m) } +func (*VanityPharmaTextEnum) ProtoMessage() {} +func (*VanityPharmaTextEnum) Descriptor() ([]byte, []int) { + return fileDescriptor_vanity_pharma_text_61cebb42c8c693a8, []int{0} +} +func (m *VanityPharmaTextEnum) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_VanityPharmaTextEnum.Unmarshal(m, b) +} +func (m *VanityPharmaTextEnum) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_VanityPharmaTextEnum.Marshal(b, m, deterministic) +} +func (dst *VanityPharmaTextEnum) XXX_Merge(src proto.Message) { + xxx_messageInfo_VanityPharmaTextEnum.Merge(dst, src) +} +func (m *VanityPharmaTextEnum) XXX_Size() int { + return xxx_messageInfo_VanityPharmaTextEnum.Size(m) +} +func (m *VanityPharmaTextEnum) XXX_DiscardUnknown() { + xxx_messageInfo_VanityPharmaTextEnum.DiscardUnknown(m) +} + +var xxx_messageInfo_VanityPharmaTextEnum proto.InternalMessageInfo + +func init() { + proto.RegisterType((*VanityPharmaTextEnum)(nil), "google.ads.googleads.v1.enums.VanityPharmaTextEnum") + proto.RegisterEnum("google.ads.googleads.v1.enums.VanityPharmaTextEnum_VanityPharmaText", VanityPharmaTextEnum_VanityPharmaText_name, VanityPharmaTextEnum_VanityPharmaText_value) +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/enums/vanity_pharma_text.proto", fileDescriptor_vanity_pharma_text_61cebb42c8c693a8) +} + +var fileDescriptor_vanity_pharma_text_61cebb42c8c693a8 = []byte{ + // 429 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x8c, 0x92, 0xc1, 0x8a, 0x13, 0x31, + 0x1c, 0xc6, 0x6d, 0xbb, 0xee, 0x6a, 0xaa, 0x18, 0x06, 0x3d, 0x28, 0x56, 0x69, 0x65, 0x0f, 0x5e, + 0x32, 0x0c, 0x82, 0x87, 0x78, 0x4a, 0xd3, 0xb8, 0x04, 0xdd, 0x74, 0x68, 0xb2, 0x59, 0x90, 0xc2, + 0x10, 0x9d, 0x61, 0x2c, 0x6c, 0x93, 0xd2, 0xcc, 0x96, 0xf5, 0x19, 0x7c, 0x0b, 0x8f, 0x3e, 0x8a, + 0x8f, 0xb2, 0x47, 0x9f, 0x40, 0x9a, 0xd9, 0x96, 0xed, 0xb2, 0x6e, 0x7b, 0x19, 0xfe, 0x93, 0xef, + 0xf7, 0x7d, 0xfc, 0xff, 0xf0, 0x81, 0xf7, 0xa5, 0x73, 0xe5, 0x59, 0x11, 0x9b, 0xdc, 0xc7, 0xf5, + 0xb8, 0x9c, 0x16, 0x49, 0x5c, 0xd8, 0xf3, 0xa9, 0x8f, 0x17, 0xc6, 0x4e, 0xaa, 0x1f, 0xd9, 0xec, + 0xbb, 0x99, 0x4f, 0x4d, 0x56, 0x15, 0x17, 0x15, 0x9a, 0xcd, 0x5d, 0xe5, 0xa2, 0x4e, 0x0d, 0x23, + 0x93, 0x7b, 0xb4, 0xf6, 0xa1, 0x45, 0x82, 0x82, 0xef, 0xc5, 0xcb, 0x55, 0xec, 0x6c, 0x12, 0x1b, + 0x6b, 0x5d, 0x65, 0xaa, 0x89, 0xb3, 0xbe, 0x36, 0xf7, 0x7e, 0xee, 0x81, 0xa7, 0x3a, 0x24, 0xa7, + 0x21, 0x58, 0x15, 0x17, 0x15, 0xb3, 0xe7, 0xd3, 0xde, 0xdf, 0x16, 0x80, 0x37, 0x85, 0xe8, 0x09, + 0x68, 0x9f, 0x08, 0x99, 0x32, 0xca, 0x3f, 0x72, 0x36, 0x80, 0xf7, 0xa2, 0x36, 0x38, 0x38, 0x11, + 0x9f, 0xc4, 0xf0, 0x54, 0xc0, 0x46, 0x74, 0x08, 0xba, 0xe9, 0x88, 0x49, 0x3a, 0xe2, 0xa9, 0xe2, + 0x43, 0x91, 0xa9, 0x11, 0x23, 0xea, 0x98, 0x09, 0x95, 0x9d, 0xb2, 0xbe, 0xe4, 0x8a, 0x65, 0x4c, + 0xc0, 0xe6, 0x2e, 0x98, 0x84, 0xad, 0xa8, 0x07, 0x5e, 0x6d, 0x60, 0x03, 0xa6, 0x39, 0x65, 0xd7, + 0xa3, 0xf6, 0xb6, 0x32, 0x12, 0xde, 0x8f, 0x3a, 0xe0, 0xf9, 0x31, 0x1b, 0x70, 0x4a, 0x3e, 0xdf, + 0x12, 0xb1, 0x7f, 0x97, 0x2c, 0xe1, 0xc1, 0xd5, 0xb2, 0x9a, 0x09, 0x45, 0x14, 0xd7, 0xec, 0xf6, + 0x9b, 0x1e, 0xec, 0x82, 0x49, 0xf8, 0x30, 0x7a, 0x0b, 0x0e, 0x37, 0xf6, 0xa5, 0x43, 0xa1, 0x46, + 0x84, 0xb2, 0xfa, 0xef, 0x5a, 0x22, 0xd8, 0x15, 0x95, 0xb0, 0x1d, 0xbd, 0x01, 0xaf, 0x37, 0x50, + 0x4d, 0x28, 0xe5, 0x62, 0xe3, 0xce, 0x47, 0xdb, 0x21, 0x09, 0x1f, 0xf7, 0x2f, 0x1b, 0xa0, 0xfb, + 0xcd, 0x4d, 0xd1, 0x9d, 0x8d, 0xea, 0x3f, 0xbb, 0xd9, 0x8b, 0x74, 0x59, 0xa5, 0xb4, 0xf1, 0xa5, + 0x7f, 0xe5, 0x2b, 0xdd, 0x99, 0xb1, 0x25, 0x72, 0xf3, 0x32, 0x2e, 0x0b, 0x1b, 0x8a, 0xb6, 0x6a, + 0xf4, 0x6c, 0xe2, 0xff, 0x53, 0xf0, 0x0f, 0xe1, 0xfb, 0xab, 0xd9, 0x3a, 0x22, 0xe4, 0x77, 0xb3, + 0x73, 0x54, 0x47, 0x91, 0xdc, 0xa3, 0x7a, 0x5c, 0x4e, 0x3a, 0x41, 0xcb, 0x72, 0xfa, 0x3f, 0x2b, + 0x7d, 0x4c, 0x72, 0x3f, 0x5e, 0xeb, 0x63, 0x9d, 0x8c, 0x83, 0x7e, 0xd9, 0xec, 0xd6, 0x8f, 0x18, + 0x93, 0xdc, 0x63, 0xbc, 0x26, 0x30, 0xd6, 0x09, 0xc6, 0x81, 0xf9, 0xba, 0x1f, 0x16, 0x7b, 0xf7, + 0x2f, 0x00, 0x00, 0xff, 0xff, 0x16, 0x7d, 0x51, 0x33, 0x78, 0x03, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/webpage_condition_operand.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/webpage_condition_operand.pb.go new file mode 100644 index 0000000..5783d56 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/webpage_condition_operand.pb.go @@ -0,0 +1,133 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/enums/webpage_condition_operand.proto + +package enums // import "google.golang.org/genproto/googleapis/ads/googleads/v1/enums" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// The webpage condition operand in webpage criterion. +type WebpageConditionOperandEnum_WebpageConditionOperand int32 + +const ( + // Not specified. + WebpageConditionOperandEnum_UNSPECIFIED WebpageConditionOperandEnum_WebpageConditionOperand = 0 + // Used for return value only. Represents value unknown in this version. + WebpageConditionOperandEnum_UNKNOWN WebpageConditionOperandEnum_WebpageConditionOperand = 1 + // Operand denoting a webpage URL targeting condition. + WebpageConditionOperandEnum_URL WebpageConditionOperandEnum_WebpageConditionOperand = 2 + // Operand denoting a webpage category targeting condition. + WebpageConditionOperandEnum_CATEGORY WebpageConditionOperandEnum_WebpageConditionOperand = 3 + // Operand denoting a webpage title targeting condition. + WebpageConditionOperandEnum_PAGE_TITLE WebpageConditionOperandEnum_WebpageConditionOperand = 4 + // Operand denoting a webpage content targeting condition. + WebpageConditionOperandEnum_PAGE_CONTENT WebpageConditionOperandEnum_WebpageConditionOperand = 5 + // Operand denoting a webpage custom label targeting condition. + WebpageConditionOperandEnum_CUSTOM_LABEL WebpageConditionOperandEnum_WebpageConditionOperand = 6 +) + +var WebpageConditionOperandEnum_WebpageConditionOperand_name = map[int32]string{ + 0: "UNSPECIFIED", + 1: "UNKNOWN", + 2: "URL", + 3: "CATEGORY", + 4: "PAGE_TITLE", + 5: "PAGE_CONTENT", + 6: "CUSTOM_LABEL", +} +var WebpageConditionOperandEnum_WebpageConditionOperand_value = map[string]int32{ + "UNSPECIFIED": 0, + "UNKNOWN": 1, + "URL": 2, + "CATEGORY": 3, + "PAGE_TITLE": 4, + "PAGE_CONTENT": 5, + "CUSTOM_LABEL": 6, +} + +func (x WebpageConditionOperandEnum_WebpageConditionOperand) String() string { + return proto.EnumName(WebpageConditionOperandEnum_WebpageConditionOperand_name, int32(x)) +} +func (WebpageConditionOperandEnum_WebpageConditionOperand) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_webpage_condition_operand_ce1dd41c06601f19, []int{0, 0} +} + +// Container for enum describing webpage condition operand in webpage criterion. +type WebpageConditionOperandEnum struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *WebpageConditionOperandEnum) Reset() { *m = WebpageConditionOperandEnum{} } +func (m *WebpageConditionOperandEnum) String() string { return proto.CompactTextString(m) } +func (*WebpageConditionOperandEnum) ProtoMessage() {} +func (*WebpageConditionOperandEnum) Descriptor() ([]byte, []int) { + return fileDescriptor_webpage_condition_operand_ce1dd41c06601f19, []int{0} +} +func (m *WebpageConditionOperandEnum) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_WebpageConditionOperandEnum.Unmarshal(m, b) +} +func (m *WebpageConditionOperandEnum) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_WebpageConditionOperandEnum.Marshal(b, m, deterministic) +} +func (dst *WebpageConditionOperandEnum) XXX_Merge(src proto.Message) { + xxx_messageInfo_WebpageConditionOperandEnum.Merge(dst, src) +} +func (m *WebpageConditionOperandEnum) XXX_Size() int { + return xxx_messageInfo_WebpageConditionOperandEnum.Size(m) +} +func (m *WebpageConditionOperandEnum) XXX_DiscardUnknown() { + xxx_messageInfo_WebpageConditionOperandEnum.DiscardUnknown(m) +} + +var xxx_messageInfo_WebpageConditionOperandEnum proto.InternalMessageInfo + +func init() { + proto.RegisterType((*WebpageConditionOperandEnum)(nil), "google.ads.googleads.v1.enums.WebpageConditionOperandEnum") + proto.RegisterEnum("google.ads.googleads.v1.enums.WebpageConditionOperandEnum_WebpageConditionOperand", WebpageConditionOperandEnum_WebpageConditionOperand_name, WebpageConditionOperandEnum_WebpageConditionOperand_value) +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/enums/webpage_condition_operand.proto", fileDescriptor_webpage_condition_operand_ce1dd41c06601f19) +} + +var fileDescriptor_webpage_condition_operand_ce1dd41c06601f19 = []byte{ + // 354 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x7c, 0x51, 0xc1, 0x4e, 0xea, 0x40, + 0x14, 0x7d, 0x2d, 0x4f, 0x30, 0x03, 0xd1, 0x49, 0x37, 0x26, 0x0a, 0x0b, 0xf8, 0x80, 0x69, 0x1a, + 0x77, 0x63, 0x5c, 0x4c, 0xeb, 0xd8, 0x10, 0x6b, 0xdb, 0x40, 0x0b, 0xd1, 0x34, 0x69, 0x0a, 0x6d, + 0x9a, 0x26, 0x30, 0xd3, 0x30, 0x80, 0x7b, 0x3f, 0x85, 0xa5, 0x9f, 0xe2, 0xa7, 0xb8, 0xf3, 0x0f, + 0x4c, 0x3b, 0x96, 0x1d, 0x6e, 0x26, 0x67, 0xee, 0xb9, 0xe7, 0xcc, 0xbd, 0x67, 0xc0, 0x7d, 0xce, + 0x79, 0xbe, 0xca, 0xf4, 0x24, 0x15, 0xba, 0x84, 0x15, 0xda, 0x1b, 0x7a, 0xc6, 0x76, 0x6b, 0xa1, + 0xbf, 0x65, 0x8b, 0x32, 0xc9, 0xb3, 0x78, 0xc9, 0x59, 0x5a, 0x6c, 0x0b, 0xce, 0x62, 0x5e, 0x66, + 0x9b, 0x84, 0xa5, 0xa8, 0xdc, 0xf0, 0x2d, 0xd7, 0x06, 0x52, 0x83, 0x92, 0x54, 0xa0, 0xa3, 0x1c, + 0xed, 0x0d, 0x54, 0xcb, 0xaf, 0xfb, 0x8d, 0x7b, 0x59, 0xe8, 0x09, 0x63, 0x7c, 0x9b, 0x54, 0x1e, + 0x42, 0x8a, 0x47, 0x07, 0x05, 0xdc, 0xcc, 0xe5, 0x03, 0x56, 0xe3, 0xef, 0x49, 0x7b, 0xca, 0x76, + 0xeb, 0xd1, 0xbb, 0x02, 0xae, 0x4e, 0xf0, 0xda, 0x25, 0xe8, 0x86, 0xee, 0xd4, 0xa7, 0xd6, 0xf8, + 0x71, 0x4c, 0x1f, 0xe0, 0x3f, 0xad, 0x0b, 0x3a, 0xa1, 0xfb, 0xe4, 0x7a, 0x73, 0x17, 0x2a, 0x5a, + 0x07, 0xb4, 0xc2, 0x89, 0x03, 0x55, 0xad, 0x07, 0xce, 0x2d, 0x12, 0x50, 0xdb, 0x9b, 0xbc, 0xc0, + 0x96, 0x76, 0x01, 0x80, 0x4f, 0x6c, 0x1a, 0x07, 0xe3, 0xc0, 0xa1, 0xf0, 0xbf, 0x06, 0x41, 0xaf, + 0xbe, 0x5b, 0x9e, 0x1b, 0x50, 0x37, 0x80, 0x67, 0x55, 0xc5, 0x0a, 0xa7, 0x81, 0xf7, 0x1c, 0x3b, + 0xc4, 0xa4, 0x0e, 0x6c, 0x9b, 0xdf, 0x0a, 0x18, 0x2e, 0xf9, 0x1a, 0xfd, 0xb9, 0xa8, 0xd9, 0x3f, + 0x31, 0xa7, 0x5f, 0x2d, 0xea, 0x2b, 0xaf, 0xe6, 0xaf, 0x3c, 0xe7, 0xab, 0x84, 0xe5, 0x88, 0x6f, + 0x72, 0x3d, 0xcf, 0x58, 0x1d, 0x43, 0x13, 0x7b, 0x59, 0x88, 0x13, 0xbf, 0x70, 0x57, 0x9f, 0x07, + 0xb5, 0x65, 0x13, 0xf2, 0xa1, 0x0e, 0x6c, 0x69, 0x45, 0x52, 0x81, 0x24, 0xac, 0xd0, 0xcc, 0x40, + 0x55, 0x66, 0xe2, 0xb3, 0xe1, 0x23, 0x92, 0x8a, 0xe8, 0xc8, 0x47, 0x33, 0x23, 0xaa, 0xf9, 0x2f, + 0x75, 0x28, 0x8b, 0x18, 0x93, 0x54, 0x60, 0x7c, 0xec, 0xc0, 0x78, 0x66, 0x60, 0x5c, 0xf7, 0x2c, + 0xda, 0xf5, 0x60, 0xb7, 0x3f, 0x01, 0x00, 0x00, 0xff, 0xff, 0x34, 0x47, 0x65, 0x8f, 0x1d, 0x02, + 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/webpage_condition_operator.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/webpage_condition_operator.pb.go new file mode 100644 index 0000000..59a77fe --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/enums/webpage_condition_operator.pb.go @@ -0,0 +1,119 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/enums/webpage_condition_operator.proto + +package enums // import "google.golang.org/genproto/googleapis/ads/googleads/v1/enums" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// The webpage condition operator in webpage criterion. +type WebpageConditionOperatorEnum_WebpageConditionOperator int32 + +const ( + // Not specified. + WebpageConditionOperatorEnum_UNSPECIFIED WebpageConditionOperatorEnum_WebpageConditionOperator = 0 + // Used for return value only. Represents value unknown in this version. + WebpageConditionOperatorEnum_UNKNOWN WebpageConditionOperatorEnum_WebpageConditionOperator = 1 + // The argument web condition is equal to the compared web condition. + WebpageConditionOperatorEnum_EQUALS WebpageConditionOperatorEnum_WebpageConditionOperator = 2 + // The argument web condition is part of the compared web condition. + WebpageConditionOperatorEnum_CONTAINS WebpageConditionOperatorEnum_WebpageConditionOperator = 3 +) + +var WebpageConditionOperatorEnum_WebpageConditionOperator_name = map[int32]string{ + 0: "UNSPECIFIED", + 1: "UNKNOWN", + 2: "EQUALS", + 3: "CONTAINS", +} +var WebpageConditionOperatorEnum_WebpageConditionOperator_value = map[string]int32{ + "UNSPECIFIED": 0, + "UNKNOWN": 1, + "EQUALS": 2, + "CONTAINS": 3, +} + +func (x WebpageConditionOperatorEnum_WebpageConditionOperator) String() string { + return proto.EnumName(WebpageConditionOperatorEnum_WebpageConditionOperator_name, int32(x)) +} +func (WebpageConditionOperatorEnum_WebpageConditionOperator) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_webpage_condition_operator_d37a704d21612ea5, []int{0, 0} +} + +// Container for enum describing webpage condition operator in webpage +// criterion. +type WebpageConditionOperatorEnum struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *WebpageConditionOperatorEnum) Reset() { *m = WebpageConditionOperatorEnum{} } +func (m *WebpageConditionOperatorEnum) String() string { return proto.CompactTextString(m) } +func (*WebpageConditionOperatorEnum) ProtoMessage() {} +func (*WebpageConditionOperatorEnum) Descriptor() ([]byte, []int) { + return fileDescriptor_webpage_condition_operator_d37a704d21612ea5, []int{0} +} +func (m *WebpageConditionOperatorEnum) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_WebpageConditionOperatorEnum.Unmarshal(m, b) +} +func (m *WebpageConditionOperatorEnum) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_WebpageConditionOperatorEnum.Marshal(b, m, deterministic) +} +func (dst *WebpageConditionOperatorEnum) XXX_Merge(src proto.Message) { + xxx_messageInfo_WebpageConditionOperatorEnum.Merge(dst, src) +} +func (m *WebpageConditionOperatorEnum) XXX_Size() int { + return xxx_messageInfo_WebpageConditionOperatorEnum.Size(m) +} +func (m *WebpageConditionOperatorEnum) XXX_DiscardUnknown() { + xxx_messageInfo_WebpageConditionOperatorEnum.DiscardUnknown(m) +} + +var xxx_messageInfo_WebpageConditionOperatorEnum proto.InternalMessageInfo + +func init() { + proto.RegisterType((*WebpageConditionOperatorEnum)(nil), "google.ads.googleads.v1.enums.WebpageConditionOperatorEnum") + proto.RegisterEnum("google.ads.googleads.v1.enums.WebpageConditionOperatorEnum_WebpageConditionOperator", WebpageConditionOperatorEnum_WebpageConditionOperator_name, WebpageConditionOperatorEnum_WebpageConditionOperator_value) +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/enums/webpage_condition_operator.proto", fileDescriptor_webpage_condition_operator_d37a704d21612ea5) +} + +var fileDescriptor_webpage_condition_operator_d37a704d21612ea5 = []byte{ + // 312 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x7c, 0x50, 0xd1, 0x6a, 0xc2, 0x30, + 0x14, 0x9d, 0x15, 0xdc, 0x88, 0x83, 0x95, 0x3e, 0x8d, 0xa1, 0x0f, 0xfa, 0x01, 0x29, 0x65, 0x6f, + 0x19, 0x0c, 0xa2, 0xeb, 0x44, 0x36, 0xa2, 0x9b, 0x53, 0x61, 0x14, 0x24, 0x9a, 0x10, 0x0a, 0x9a, + 0x5b, 0x9a, 0xea, 0xfe, 0x67, 0x8f, 0xfb, 0x94, 0x7d, 0xca, 0x1e, 0xf7, 0x05, 0xa3, 0x89, 0xf5, + 0xad, 0x7b, 0x09, 0x87, 0x9c, 0x7b, 0xce, 0x3d, 0xf7, 0xa0, 0x7b, 0x05, 0xa0, 0xb6, 0x32, 0xe4, + 0xc2, 0x84, 0x0e, 0x96, 0xe8, 0x10, 0x85, 0x52, 0xef, 0x77, 0x26, 0xfc, 0x90, 0xeb, 0x8c, 0x2b, + 0xb9, 0xda, 0x80, 0x16, 0x69, 0x91, 0x82, 0x5e, 0x41, 0x26, 0x73, 0x5e, 0x40, 0x8e, 0xb3, 0x1c, + 0x0a, 0x08, 0xba, 0x4e, 0x84, 0xb9, 0x30, 0xf8, 0xa4, 0xc7, 0x87, 0x08, 0x5b, 0xfd, 0x4d, 0xa7, + 0xb2, 0xcf, 0xd2, 0x90, 0x6b, 0x0d, 0x05, 0x2f, 0x4d, 0x8c, 0x13, 0xf7, 0x73, 0xd4, 0x59, 0xba, + 0x05, 0xc3, 0xca, 0x7f, 0x72, 0xb4, 0x8f, 0xf5, 0x7e, 0xd7, 0x7f, 0x45, 0xd7, 0x75, 0x7c, 0x70, + 0x85, 0xda, 0x73, 0x36, 0x9b, 0xc6, 0xc3, 0xf1, 0xe3, 0x38, 0x7e, 0xf0, 0xcf, 0x82, 0x36, 0x3a, + 0x9f, 0xb3, 0x27, 0x36, 0x59, 0x32, 0xbf, 0x11, 0x20, 0xd4, 0x8a, 0x5f, 0xe6, 0xf4, 0x79, 0xe6, + 0x7b, 0xc1, 0x25, 0xba, 0x18, 0x4e, 0xd8, 0x1b, 0x1d, 0xb3, 0x99, 0xdf, 0x1c, 0xfc, 0x36, 0x50, + 0x6f, 0x03, 0x3b, 0xfc, 0x6f, 0xee, 0x41, 0xb7, 0x6e, 0xef, 0xb4, 0x0c, 0x3e, 0x6d, 0xbc, 0x0f, + 0x8e, 0x7a, 0x05, 0x5b, 0xae, 0x15, 0x86, 0x5c, 0x85, 0x4a, 0x6a, 0x7b, 0x56, 0xd5, 0x63, 0x96, + 0x9a, 0x9a, 0x5a, 0xef, 0xec, 0xfb, 0xe9, 0x35, 0x47, 0x94, 0x7e, 0x79, 0xdd, 0x91, 0xb3, 0xa2, + 0xc2, 0x60, 0x07, 0x4b, 0xb4, 0x88, 0x70, 0xd9, 0x81, 0xf9, 0xae, 0xf8, 0x84, 0x0a, 0x93, 0x9c, + 0xf8, 0x64, 0x11, 0x25, 0x96, 0xff, 0xf1, 0x7a, 0xee, 0x93, 0x10, 0x2a, 0x0c, 0x21, 0xa7, 0x09, + 0x42, 0x16, 0x11, 0x21, 0x76, 0x66, 0xdd, 0xb2, 0xc1, 0x6e, 0xff, 0x02, 0x00, 0x00, 0xff, 0xff, + 0xc7, 0x74, 0xb9, 0x52, 0xee, 0x01, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/errors/account_budget_proposal_error.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/errors/account_budget_proposal_error.pb.go new file mode 100644 index 0000000..a2c0b36 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/errors/account_budget_proposal_error.pb.go @@ -0,0 +1,225 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/errors/account_budget_proposal_error.proto + +package errors // import "google.golang.org/genproto/googleapis/ads/googleads/v1/errors" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Enum describing possible account budget proposal errors. +type AccountBudgetProposalErrorEnum_AccountBudgetProposalError int32 + +const ( + // Enum unspecified. + AccountBudgetProposalErrorEnum_UNSPECIFIED AccountBudgetProposalErrorEnum_AccountBudgetProposalError = 0 + // The received error code is not known in this version. + AccountBudgetProposalErrorEnum_UNKNOWN AccountBudgetProposalErrorEnum_AccountBudgetProposalError = 1 + // The field mask must be empty for create/end/remove proposals. + AccountBudgetProposalErrorEnum_FIELD_MASK_NOT_ALLOWED AccountBudgetProposalErrorEnum_AccountBudgetProposalError = 2 + // The field cannot be set because of the proposal type. + AccountBudgetProposalErrorEnum_IMMUTABLE_FIELD AccountBudgetProposalErrorEnum_AccountBudgetProposalError = 3 + // The field is required because of the proposal type. + AccountBudgetProposalErrorEnum_REQUIRED_FIELD_MISSING AccountBudgetProposalErrorEnum_AccountBudgetProposalError = 4 + // Proposals that have been approved cannot be cancelled. + AccountBudgetProposalErrorEnum_CANNOT_CANCEL_APPROVED_PROPOSAL AccountBudgetProposalErrorEnum_AccountBudgetProposalError = 5 + // Budgets that haven't been approved cannot be removed. + AccountBudgetProposalErrorEnum_CANNOT_REMOVE_UNAPPROVED_BUDGET AccountBudgetProposalErrorEnum_AccountBudgetProposalError = 6 + // Budgets that are currently running cannot be removed. + AccountBudgetProposalErrorEnum_CANNOT_REMOVE_RUNNING_BUDGET AccountBudgetProposalErrorEnum_AccountBudgetProposalError = 7 + // Budgets that haven't been approved cannot be truncated. + AccountBudgetProposalErrorEnum_CANNOT_END_UNAPPROVED_BUDGET AccountBudgetProposalErrorEnum_AccountBudgetProposalError = 8 + // Only budgets that are currently running can be truncated. + AccountBudgetProposalErrorEnum_CANNOT_END_INACTIVE_BUDGET AccountBudgetProposalErrorEnum_AccountBudgetProposalError = 9 + // All budgets must have names. + AccountBudgetProposalErrorEnum_BUDGET_NAME_REQUIRED AccountBudgetProposalErrorEnum_AccountBudgetProposalError = 10 + // Expired budgets cannot be edited after a sufficient amount of time has + // passed. + AccountBudgetProposalErrorEnum_CANNOT_UPDATE_OLD_BUDGET AccountBudgetProposalErrorEnum_AccountBudgetProposalError = 11 + // It is not permissible a propose a new budget that ends in the past. + AccountBudgetProposalErrorEnum_CANNOT_END_IN_PAST AccountBudgetProposalErrorEnum_AccountBudgetProposalError = 12 + // An expired budget cannot be extended to overlap with the running budget. + AccountBudgetProposalErrorEnum_CANNOT_EXTEND_END_TIME AccountBudgetProposalErrorEnum_AccountBudgetProposalError = 13 + // A purchase order number is required. + AccountBudgetProposalErrorEnum_PURCHASE_ORDER_NUMBER_REQUIRED AccountBudgetProposalErrorEnum_AccountBudgetProposalError = 14 + // Budgets that have a pending update cannot be updated. + AccountBudgetProposalErrorEnum_PENDING_UPDATE_PROPOSAL_EXISTS AccountBudgetProposalErrorEnum_AccountBudgetProposalError = 15 + // Cannot propose more than one budget when the corresponding billing setup + // hasn't been approved. + AccountBudgetProposalErrorEnum_MULTIPLE_BUDGETS_NOT_ALLOWED_FOR_UNAPPROVED_BILLING_SETUP AccountBudgetProposalErrorEnum_AccountBudgetProposalError = 16 + // Cannot update the start time of a budget that has already started. + AccountBudgetProposalErrorEnum_CANNOT_UPDATE_START_TIME_FOR_STARTED_BUDGET AccountBudgetProposalErrorEnum_AccountBudgetProposalError = 17 + // Cannot update the spending limit of a budget with an amount lower than + // what has already been spent. + AccountBudgetProposalErrorEnum_SPENDING_LIMIT_LOWER_THAN_ACCRUED_COST_NOT_ALLOWED AccountBudgetProposalErrorEnum_AccountBudgetProposalError = 18 + // Cannot propose a budget update without actually changing any fields. + AccountBudgetProposalErrorEnum_UPDATE_IS_NO_OP AccountBudgetProposalErrorEnum_AccountBudgetProposalError = 19 + // The end time must come after the start time. + AccountBudgetProposalErrorEnum_END_TIME_MUST_FOLLOW_START_TIME AccountBudgetProposalErrorEnum_AccountBudgetProposalError = 20 + // The budget's date range must fall within the date range of its billing + // setup. + AccountBudgetProposalErrorEnum_BUDGET_DATE_RANGE_INCOMPATIBLE_WITH_BILLING_SETUP AccountBudgetProposalErrorEnum_AccountBudgetProposalError = 21 + // The user is not authorized to mutate budgets for the given billing setup. + AccountBudgetProposalErrorEnum_NOT_AUTHORIZED AccountBudgetProposalErrorEnum_AccountBudgetProposalError = 22 + // Mutates are not allowed for the given billing setup. + AccountBudgetProposalErrorEnum_INVALID_BILLING_SETUP AccountBudgetProposalErrorEnum_AccountBudgetProposalError = 23 +) + +var AccountBudgetProposalErrorEnum_AccountBudgetProposalError_name = map[int32]string{ + 0: "UNSPECIFIED", + 1: "UNKNOWN", + 2: "FIELD_MASK_NOT_ALLOWED", + 3: "IMMUTABLE_FIELD", + 4: "REQUIRED_FIELD_MISSING", + 5: "CANNOT_CANCEL_APPROVED_PROPOSAL", + 6: "CANNOT_REMOVE_UNAPPROVED_BUDGET", + 7: "CANNOT_REMOVE_RUNNING_BUDGET", + 8: "CANNOT_END_UNAPPROVED_BUDGET", + 9: "CANNOT_END_INACTIVE_BUDGET", + 10: "BUDGET_NAME_REQUIRED", + 11: "CANNOT_UPDATE_OLD_BUDGET", + 12: "CANNOT_END_IN_PAST", + 13: "CANNOT_EXTEND_END_TIME", + 14: "PURCHASE_ORDER_NUMBER_REQUIRED", + 15: "PENDING_UPDATE_PROPOSAL_EXISTS", + 16: "MULTIPLE_BUDGETS_NOT_ALLOWED_FOR_UNAPPROVED_BILLING_SETUP", + 17: "CANNOT_UPDATE_START_TIME_FOR_STARTED_BUDGET", + 18: "SPENDING_LIMIT_LOWER_THAN_ACCRUED_COST_NOT_ALLOWED", + 19: "UPDATE_IS_NO_OP", + 20: "END_TIME_MUST_FOLLOW_START_TIME", + 21: "BUDGET_DATE_RANGE_INCOMPATIBLE_WITH_BILLING_SETUP", + 22: "NOT_AUTHORIZED", + 23: "INVALID_BILLING_SETUP", +} +var AccountBudgetProposalErrorEnum_AccountBudgetProposalError_value = map[string]int32{ + "UNSPECIFIED": 0, + "UNKNOWN": 1, + "FIELD_MASK_NOT_ALLOWED": 2, + "IMMUTABLE_FIELD": 3, + "REQUIRED_FIELD_MISSING": 4, + "CANNOT_CANCEL_APPROVED_PROPOSAL": 5, + "CANNOT_REMOVE_UNAPPROVED_BUDGET": 6, + "CANNOT_REMOVE_RUNNING_BUDGET": 7, + "CANNOT_END_UNAPPROVED_BUDGET": 8, + "CANNOT_END_INACTIVE_BUDGET": 9, + "BUDGET_NAME_REQUIRED": 10, + "CANNOT_UPDATE_OLD_BUDGET": 11, + "CANNOT_END_IN_PAST": 12, + "CANNOT_EXTEND_END_TIME": 13, + "PURCHASE_ORDER_NUMBER_REQUIRED": 14, + "PENDING_UPDATE_PROPOSAL_EXISTS": 15, + "MULTIPLE_BUDGETS_NOT_ALLOWED_FOR_UNAPPROVED_BILLING_SETUP": 16, + "CANNOT_UPDATE_START_TIME_FOR_STARTED_BUDGET": 17, + "SPENDING_LIMIT_LOWER_THAN_ACCRUED_COST_NOT_ALLOWED": 18, + "UPDATE_IS_NO_OP": 19, + "END_TIME_MUST_FOLLOW_START_TIME": 20, + "BUDGET_DATE_RANGE_INCOMPATIBLE_WITH_BILLING_SETUP": 21, + "NOT_AUTHORIZED": 22, + "INVALID_BILLING_SETUP": 23, +} + +func (x AccountBudgetProposalErrorEnum_AccountBudgetProposalError) String() string { + return proto.EnumName(AccountBudgetProposalErrorEnum_AccountBudgetProposalError_name, int32(x)) +} +func (AccountBudgetProposalErrorEnum_AccountBudgetProposalError) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_account_budget_proposal_error_6eb4a0167888d036, []int{0, 0} +} + +// Container for enum describing possible account budget proposal errors. +type AccountBudgetProposalErrorEnum struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *AccountBudgetProposalErrorEnum) Reset() { *m = AccountBudgetProposalErrorEnum{} } +func (m *AccountBudgetProposalErrorEnum) String() string { return proto.CompactTextString(m) } +func (*AccountBudgetProposalErrorEnum) ProtoMessage() {} +func (*AccountBudgetProposalErrorEnum) Descriptor() ([]byte, []int) { + return fileDescriptor_account_budget_proposal_error_6eb4a0167888d036, []int{0} +} +func (m *AccountBudgetProposalErrorEnum) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_AccountBudgetProposalErrorEnum.Unmarshal(m, b) +} +func (m *AccountBudgetProposalErrorEnum) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_AccountBudgetProposalErrorEnum.Marshal(b, m, deterministic) +} +func (dst *AccountBudgetProposalErrorEnum) XXX_Merge(src proto.Message) { + xxx_messageInfo_AccountBudgetProposalErrorEnum.Merge(dst, src) +} +func (m *AccountBudgetProposalErrorEnum) XXX_Size() int { + return xxx_messageInfo_AccountBudgetProposalErrorEnum.Size(m) +} +func (m *AccountBudgetProposalErrorEnum) XXX_DiscardUnknown() { + xxx_messageInfo_AccountBudgetProposalErrorEnum.DiscardUnknown(m) +} + +var xxx_messageInfo_AccountBudgetProposalErrorEnum proto.InternalMessageInfo + +func init() { + proto.RegisterType((*AccountBudgetProposalErrorEnum)(nil), "google.ads.googleads.v1.errors.AccountBudgetProposalErrorEnum") + proto.RegisterEnum("google.ads.googleads.v1.errors.AccountBudgetProposalErrorEnum_AccountBudgetProposalError", AccountBudgetProposalErrorEnum_AccountBudgetProposalError_name, AccountBudgetProposalErrorEnum_AccountBudgetProposalError_value) +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/errors/account_budget_proposal_error.proto", fileDescriptor_account_budget_proposal_error_6eb4a0167888d036) +} + +var fileDescriptor_account_budget_proposal_error_6eb4a0167888d036 = []byte{ + // 686 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x7c, 0x54, 0xcb, 0x6e, 0x13, 0x3d, + 0x14, 0xfe, 0x9b, 0xfe, 0xa4, 0xe0, 0x42, 0x6b, 0xdc, 0x0b, 0x25, 0xaa, 0x52, 0x14, 0x96, 0x48, + 0x13, 0x05, 0x04, 0x12, 0x83, 0x58, 0x38, 0x33, 0x4e, 0x62, 0x75, 0xc6, 0x36, 0xbe, 0xa4, 0x55, + 0x15, 0xc9, 0x4a, 0x9b, 0x28, 0xaa, 0xd4, 0x66, 0xa2, 0x4c, 0xda, 0x07, 0x62, 0xc9, 0x8e, 0xd7, + 0x60, 0xcd, 0x53, 0xf0, 0x08, 0xac, 0x90, 0x67, 0x32, 0x69, 0x52, 0x51, 0x16, 0x51, 0xce, 0xd8, + 0xdf, 0xe5, 0x9c, 0x63, 0x1f, 0x83, 0xe6, 0x28, 0x49, 0x46, 0x57, 0xc3, 0x7a, 0x7f, 0x90, 0xd6, + 0xf3, 0xd0, 0x45, 0xb7, 0x8d, 0xfa, 0x70, 0x3a, 0x4d, 0xa6, 0x69, 0xbd, 0x7f, 0x71, 0x91, 0xdc, + 0x8c, 0x67, 0xf6, 0xfc, 0x66, 0x30, 0x1a, 0xce, 0xec, 0x64, 0x9a, 0x4c, 0x92, 0xb4, 0x7f, 0x65, + 0xb3, 0x6d, 0x6f, 0x32, 0x4d, 0x66, 0x09, 0xaa, 0xe6, 0x44, 0xaf, 0x3f, 0x48, 0xbd, 0x85, 0x86, + 0x77, 0xdb, 0xf0, 0x72, 0x8d, 0xca, 0x61, 0xe1, 0x31, 0xb9, 0xac, 0xf7, 0xc7, 0xe3, 0x64, 0xd6, + 0x9f, 0x5d, 0x26, 0xe3, 0x34, 0x67, 0xd7, 0x7e, 0x96, 0x41, 0x15, 0xe7, 0x2e, 0xcd, 0xcc, 0x44, + 0xcc, 0x3d, 0x88, 0x63, 0x93, 0xf1, 0xcd, 0x75, 0xed, 0x7b, 0x19, 0x54, 0x1e, 0x86, 0xa0, 0x6d, + 0xb0, 0x69, 0x98, 0x12, 0x24, 0xa0, 0x2d, 0x4a, 0x42, 0xf8, 0x1f, 0xda, 0x04, 0x1b, 0x86, 0x1d, + 0x33, 0x7e, 0xc2, 0xe0, 0x1a, 0xaa, 0x80, 0xfd, 0x16, 0x25, 0x51, 0x68, 0x63, 0xac, 0x8e, 0x2d, + 0xe3, 0xda, 0xe2, 0x28, 0xe2, 0x27, 0x24, 0x84, 0x25, 0xb4, 0x03, 0xb6, 0x69, 0x1c, 0x1b, 0x8d, + 0x9b, 0x11, 0xb1, 0x19, 0x0a, 0xae, 0x3b, 0x82, 0x24, 0x5f, 0x0c, 0x95, 0x24, 0xb4, 0x73, 0x26, + 0x55, 0x8a, 0xb2, 0x36, 0xfc, 0x1f, 0xbd, 0x06, 0x47, 0x01, 0x66, 0x4e, 0x24, 0xc0, 0x2c, 0x20, + 0x91, 0xc5, 0x42, 0x48, 0xde, 0x25, 0xa1, 0x15, 0x92, 0x0b, 0xae, 0x70, 0x04, 0x1f, 0x2d, 0x81, + 0x24, 0x89, 0x79, 0x97, 0x58, 0xc3, 0x16, 0xb0, 0xa6, 0x09, 0xdb, 0x44, 0xc3, 0x32, 0x7a, 0x05, + 0x0e, 0x57, 0x41, 0xd2, 0x30, 0x46, 0x59, 0xbb, 0x40, 0x6c, 0x2c, 0x21, 0x08, 0x0b, 0xff, 0xa2, + 0xf1, 0x18, 0x55, 0x41, 0x65, 0x09, 0x41, 0x19, 0x0e, 0x34, 0xed, 0x92, 0x62, 0xff, 0x09, 0x3a, + 0x00, 0xbb, 0x79, 0x6c, 0x19, 0x8e, 0x89, 0x2d, 0xaa, 0x82, 0x00, 0x1d, 0x82, 0x83, 0x39, 0xd3, + 0x88, 0x10, 0x6b, 0x62, 0x79, 0xb4, 0xd0, 0xdd, 0x44, 0xfb, 0x00, 0xad, 0xe8, 0x5a, 0x81, 0x95, + 0x86, 0x4f, 0x5d, 0x67, 0x8a, 0xf5, 0x53, 0xed, 0xb6, 0xdc, 0x4f, 0xd3, 0x98, 0xc0, 0x67, 0xa8, + 0x06, 0xaa, 0xc2, 0xc8, 0xa0, 0x83, 0x15, 0xb1, 0x5c, 0x86, 0x44, 0x5a, 0x66, 0xe2, 0x26, 0x91, + 0x77, 0xae, 0x5b, 0x19, 0x86, 0xb0, 0xd0, 0x55, 0x39, 0xb7, 0x2d, 0xba, 0x66, 0xc9, 0x29, 0x55, + 0x5a, 0xc1, 0x6d, 0xf4, 0x19, 0x7c, 0x8c, 0x4d, 0xa4, 0xa9, 0x88, 0x8a, 0x42, 0xd4, 0xf2, 0xa1, + 0xd9, 0x16, 0x97, 0x2b, 0xbd, 0xa0, 0x51, 0xe4, 0xf4, 0x14, 0xd1, 0x46, 0x40, 0x88, 0xea, 0xe0, + 0xcd, 0x6a, 0x61, 0x4a, 0x63, 0xa9, 0xb3, 0x24, 0x33, 0x6a, 0xf6, 0x79, 0xd7, 0xc3, 0xe7, 0xe8, + 0x03, 0x78, 0xab, 0x8a, 0xa4, 0x22, 0x1a, 0x53, 0x6d, 0x9d, 0x8f, 0xb4, 0xba, 0x83, 0x99, 0xc5, + 0x41, 0x20, 0x0d, 0x09, 0x6d, 0xc0, 0x95, 0x5e, 0xb9, 0x3a, 0xc8, 0x5d, 0x9d, 0xb9, 0x03, 0x75, + 0x09, 0x5a, 0x2e, 0xe0, 0x8e, 0x3b, 0xf9, 0xa2, 0x25, 0x36, 0x36, 0x4a, 0xdb, 0x16, 0x77, 0x84, + 0xa5, 0x24, 0xe0, 0x2e, 0x7a, 0x0f, 0x1a, 0xf3, 0x53, 0xc9, 0xe8, 0x12, 0xb3, 0x36, 0xb1, 0x94, + 0x05, 0x3c, 0x16, 0x58, 0x53, 0x77, 0x13, 0x4f, 0xa8, 0xee, 0xdc, 0xab, 0x6c, 0x0f, 0x21, 0xb0, + 0x95, 0x65, 0x60, 0x74, 0x87, 0x4b, 0x7a, 0x46, 0x42, 0xb8, 0x8f, 0x5e, 0x82, 0x3d, 0xca, 0xba, + 0x38, 0xa2, 0xf7, 0x1b, 0xf1, 0xa2, 0xf9, 0x7b, 0x0d, 0xd4, 0x2e, 0x92, 0x6b, 0xef, 0xdf, 0xb3, + 0xd9, 0x3c, 0x7a, 0x78, 0xae, 0x84, 0x1b, 0x4f, 0xb1, 0x76, 0x16, 0xce, 0x25, 0x46, 0xc9, 0x55, + 0x7f, 0x3c, 0xf2, 0x92, 0xe9, 0xa8, 0x3e, 0x1a, 0x8e, 0xb3, 0xe1, 0x2d, 0x9e, 0x8c, 0xc9, 0x65, + 0xfa, 0xd0, 0x0b, 0xf2, 0x29, 0xff, 0xfb, 0x5a, 0x5a, 0x6f, 0x63, 0xfc, 0xad, 0x54, 0x6d, 0xe7, + 0x62, 0x78, 0x90, 0x7a, 0x79, 0xe8, 0xa2, 0x6e, 0xc3, 0xcb, 0x2c, 0xd3, 0x1f, 0x05, 0xa0, 0x87, + 0x07, 0x69, 0x6f, 0x01, 0xe8, 0x75, 0x1b, 0xbd, 0x1c, 0xf0, 0xab, 0x54, 0xcb, 0x57, 0x7d, 0x1f, + 0x0f, 0x52, 0xdf, 0x5f, 0x40, 0x7c, 0xbf, 0xdb, 0xf0, 0xfd, 0x1c, 0x74, 0x5e, 0xce, 0xb2, 0x7b, + 0xf7, 0x27, 0x00, 0x00, 0xff, 0xff, 0xaf, 0x81, 0x26, 0x78, 0xde, 0x04, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/errors/ad_customizer_error.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/errors/ad_customizer_error.pb.go new file mode 100644 index 0000000..edfb4c2 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/errors/ad_customizer_error.pb.go @@ -0,0 +1,135 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/errors/ad_customizer_error.proto + +package errors // import "google.golang.org/genproto/googleapis/ads/googleads/v1/errors" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Enum describing possible ad customizer errors. +type AdCustomizerErrorEnum_AdCustomizerError int32 + +const ( + // Enum unspecified. + AdCustomizerErrorEnum_UNSPECIFIED AdCustomizerErrorEnum_AdCustomizerError = 0 + // The received error code is not known in this version. + AdCustomizerErrorEnum_UNKNOWN AdCustomizerErrorEnum_AdCustomizerError = 1 + // Invalid date argument in countdown function. + AdCustomizerErrorEnum_COUNTDOWN_INVALID_DATE_FORMAT AdCustomizerErrorEnum_AdCustomizerError = 2 + // Countdown end date is in the past. + AdCustomizerErrorEnum_COUNTDOWN_DATE_IN_PAST AdCustomizerErrorEnum_AdCustomizerError = 3 + // Invalid locale string in countdown function. + AdCustomizerErrorEnum_COUNTDOWN_INVALID_LOCALE AdCustomizerErrorEnum_AdCustomizerError = 4 + // Days-before argument to countdown function is not positive. + AdCustomizerErrorEnum_COUNTDOWN_INVALID_START_DAYS_BEFORE AdCustomizerErrorEnum_AdCustomizerError = 5 + // A user list referenced in an IF function does not exist. + AdCustomizerErrorEnum_UNKNOWN_USER_LIST AdCustomizerErrorEnum_AdCustomizerError = 6 +) + +var AdCustomizerErrorEnum_AdCustomizerError_name = map[int32]string{ + 0: "UNSPECIFIED", + 1: "UNKNOWN", + 2: "COUNTDOWN_INVALID_DATE_FORMAT", + 3: "COUNTDOWN_DATE_IN_PAST", + 4: "COUNTDOWN_INVALID_LOCALE", + 5: "COUNTDOWN_INVALID_START_DAYS_BEFORE", + 6: "UNKNOWN_USER_LIST", +} +var AdCustomizerErrorEnum_AdCustomizerError_value = map[string]int32{ + "UNSPECIFIED": 0, + "UNKNOWN": 1, + "COUNTDOWN_INVALID_DATE_FORMAT": 2, + "COUNTDOWN_DATE_IN_PAST": 3, + "COUNTDOWN_INVALID_LOCALE": 4, + "COUNTDOWN_INVALID_START_DAYS_BEFORE": 5, + "UNKNOWN_USER_LIST": 6, +} + +func (x AdCustomizerErrorEnum_AdCustomizerError) String() string { + return proto.EnumName(AdCustomizerErrorEnum_AdCustomizerError_name, int32(x)) +} +func (AdCustomizerErrorEnum_AdCustomizerError) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_ad_customizer_error_7520f04803e89b43, []int{0, 0} +} + +// Container for enum describing possible ad customizer errors. +type AdCustomizerErrorEnum struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *AdCustomizerErrorEnum) Reset() { *m = AdCustomizerErrorEnum{} } +func (m *AdCustomizerErrorEnum) String() string { return proto.CompactTextString(m) } +func (*AdCustomizerErrorEnum) ProtoMessage() {} +func (*AdCustomizerErrorEnum) Descriptor() ([]byte, []int) { + return fileDescriptor_ad_customizer_error_7520f04803e89b43, []int{0} +} +func (m *AdCustomizerErrorEnum) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_AdCustomizerErrorEnum.Unmarshal(m, b) +} +func (m *AdCustomizerErrorEnum) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_AdCustomizerErrorEnum.Marshal(b, m, deterministic) +} +func (dst *AdCustomizerErrorEnum) XXX_Merge(src proto.Message) { + xxx_messageInfo_AdCustomizerErrorEnum.Merge(dst, src) +} +func (m *AdCustomizerErrorEnum) XXX_Size() int { + return xxx_messageInfo_AdCustomizerErrorEnum.Size(m) +} +func (m *AdCustomizerErrorEnum) XXX_DiscardUnknown() { + xxx_messageInfo_AdCustomizerErrorEnum.DiscardUnknown(m) +} + +var xxx_messageInfo_AdCustomizerErrorEnum proto.InternalMessageInfo + +func init() { + proto.RegisterType((*AdCustomizerErrorEnum)(nil), "google.ads.googleads.v1.errors.AdCustomizerErrorEnum") + proto.RegisterEnum("google.ads.googleads.v1.errors.AdCustomizerErrorEnum_AdCustomizerError", AdCustomizerErrorEnum_AdCustomizerError_name, AdCustomizerErrorEnum_AdCustomizerError_value) +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/errors/ad_customizer_error.proto", fileDescriptor_ad_customizer_error_7520f04803e89b43) +} + +var fileDescriptor_ad_customizer_error_7520f04803e89b43 = []byte{ + // 386 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x7c, 0x91, 0xcf, 0x8a, 0xd4, 0x40, + 0x10, 0xc6, 0x4d, 0x56, 0x57, 0xe8, 0x3d, 0x98, 0x6d, 0xd8, 0x45, 0x96, 0x75, 0xc1, 0x78, 0xf0, + 0xd6, 0x21, 0x78, 0x91, 0xf6, 0xd4, 0x93, 0xf4, 0x0c, 0xc1, 0xd8, 0x09, 0xf9, 0x37, 0x28, 0x81, + 0x26, 0x4e, 0x42, 0x08, 0xcc, 0xa4, 0x87, 0x74, 0x66, 0x0e, 0x3e, 0x8e, 0x47, 0x1f, 0xc5, 0x83, + 0xcf, 0x21, 0xde, 0x7c, 0x03, 0x49, 0x7a, 0x26, 0x1e, 0x82, 0x9e, 0xf2, 0x51, 0xf5, 0xfb, 0x2a, + 0xd5, 0x5f, 0x81, 0xb7, 0xb5, 0x10, 0xf5, 0xb6, 0xb2, 0x8a, 0x52, 0x5a, 0x4a, 0x0e, 0xea, 0x68, + 0x5b, 0x55, 0xd7, 0x89, 0x4e, 0x5a, 0x45, 0xc9, 0x37, 0x07, 0xd9, 0x8b, 0x5d, 0xf3, 0xa5, 0xea, + 0xf8, 0x58, 0x44, 0xfb, 0x4e, 0xf4, 0x02, 0x3e, 0x28, 0x1c, 0x15, 0xa5, 0x44, 0x93, 0x13, 0x1d, + 0x6d, 0xa4, 0x9c, 0x77, 0xf7, 0xe7, 0xc9, 0xfb, 0xc6, 0x2a, 0xda, 0x56, 0xf4, 0x45, 0xdf, 0x88, + 0x56, 0x2a, 0xb7, 0xf9, 0x53, 0x03, 0x37, 0xa4, 0x74, 0xa6, 0xd1, 0x74, 0x30, 0xd1, 0xf6, 0xb0, + 0x33, 0x7f, 0x68, 0xe0, 0x7a, 0xd6, 0x81, 0xcf, 0xc0, 0x55, 0xca, 0xe2, 0x90, 0x3a, 0xde, 0xd2, + 0xa3, 0xae, 0xf1, 0x08, 0x5e, 0x81, 0xa7, 0x29, 0x7b, 0xcf, 0x82, 0x35, 0x33, 0x34, 0xf8, 0x12, + 0xbc, 0x70, 0x82, 0x94, 0x25, 0x6e, 0xb0, 0x66, 0xdc, 0x63, 0x19, 0xf1, 0x3d, 0x97, 0xbb, 0x24, + 0xa1, 0x7c, 0x19, 0x44, 0x1f, 0x48, 0x62, 0xe8, 0xf0, 0x0e, 0xdc, 0xfe, 0x45, 0xc6, 0x96, 0xc7, + 0x78, 0x48, 0xe2, 0xc4, 0xb8, 0x80, 0xf7, 0xe0, 0xf9, 0xdc, 0xee, 0x07, 0x0e, 0xf1, 0xa9, 0xf1, + 0x18, 0xbe, 0x06, 0xaf, 0xe6, 0xdd, 0x38, 0x21, 0x51, 0xc2, 0x5d, 0xf2, 0x31, 0xe6, 0x0b, 0xba, + 0x0c, 0x22, 0x6a, 0x3c, 0x81, 0x37, 0xe0, 0xfa, 0xb4, 0x12, 0x4f, 0x63, 0x1a, 0x71, 0xdf, 0x8b, + 0x13, 0xe3, 0x72, 0xf1, 0x5b, 0x03, 0xe6, 0x46, 0xec, 0xd0, 0xff, 0xf3, 0x5a, 0xdc, 0xce, 0x1e, + 0x1d, 0x0e, 0x49, 0x85, 0xda, 0x27, 0xf7, 0xe4, 0xac, 0xc5, 0xb6, 0x68, 0x6b, 0x24, 0xba, 0xda, + 0xaa, 0xab, 0x76, 0xcc, 0xf1, 0x7c, 0xb3, 0x7d, 0x23, 0xff, 0x75, 0xc2, 0x77, 0xea, 0xf3, 0x55, + 0xbf, 0x58, 0x11, 0xf2, 0x4d, 0x7f, 0x58, 0xa9, 0x61, 0xa4, 0x94, 0x48, 0xc9, 0x41, 0x65, 0x36, + 0x1a, 0x7f, 0x29, 0xbf, 0x9f, 0x81, 0x9c, 0x94, 0x32, 0x9f, 0x80, 0x3c, 0xb3, 0x73, 0x05, 0xfc, + 0xd2, 0x4d, 0x55, 0xc5, 0x98, 0x94, 0x12, 0xe3, 0x09, 0xc1, 0x38, 0xb3, 0x31, 0x56, 0xd0, 0xe7, + 0xcb, 0x71, 0xbb, 0x37, 0x7f, 0x02, 0x00, 0x00, 0xff, 0xff, 0x68, 0x50, 0x94, 0xb3, 0x5f, 0x02, + 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/errors/ad_error.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/errors/ad_error.pb.go new file mode 100644 index 0000000..10b84f1 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/errors/ad_error.pb.go @@ -0,0 +1,758 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/errors/ad_error.proto + +package errors // import "google.golang.org/genproto/googleapis/ads/googleads/v1/errors" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Enum describing possible ad errors. +type AdErrorEnum_AdError int32 + +const ( + // Enum unspecified. + AdErrorEnum_UNSPECIFIED AdErrorEnum_AdError = 0 + // The received error code is not known in this version. + AdErrorEnum_UNKNOWN AdErrorEnum_AdError = 1 + // Ad customizers are not supported for ad type. + AdErrorEnum_AD_CUSTOMIZERS_NOT_SUPPORTED_FOR_AD_TYPE AdErrorEnum_AdError = 2 + // Estimating character sizes the string is too long. + AdErrorEnum_APPROXIMATELY_TOO_LONG AdErrorEnum_AdError = 3 + // Estimating character sizes the string is too short. + AdErrorEnum_APPROXIMATELY_TOO_SHORT AdErrorEnum_AdError = 4 + // There is a problem with the snippet. + AdErrorEnum_BAD_SNIPPET AdErrorEnum_AdError = 5 + // Cannot modify an ad. + AdErrorEnum_CANNOT_MODIFY_AD AdErrorEnum_AdError = 6 + // business name and url cannot be set at the same time + AdErrorEnum_CANNOT_SET_BUSINESS_NAME_IF_URL_SET AdErrorEnum_AdError = 7 + // The specified field is incompatible with this ad's type or settings. + AdErrorEnum_CANNOT_SET_FIELD AdErrorEnum_AdError = 8 + // Cannot set field when originAdId is set. + AdErrorEnum_CANNOT_SET_FIELD_WITH_ORIGIN_AD_ID_SET AdErrorEnum_AdError = 9 + // Cannot set field when an existing ad id is set for sharing. + AdErrorEnum_CANNOT_SET_FIELD_WITH_AD_ID_SET_FOR_SHARING AdErrorEnum_AdError = 10 + // Cannot set allowFlexibleColor false if no color is provided by user. + AdErrorEnum_CANNOT_SET_ALLOW_FLEXIBLE_COLOR_FALSE AdErrorEnum_AdError = 11 + // When user select native, no color control is allowed because we will + // always respect publisher color for native format serving. + AdErrorEnum_CANNOT_SET_COLOR_CONTROL_WHEN_NATIVE_FORMAT_SETTING AdErrorEnum_AdError = 12 + // Cannot specify a url for the ad type + AdErrorEnum_CANNOT_SET_URL AdErrorEnum_AdError = 13 + // Cannot specify a tracking or mobile url without also setting final urls + AdErrorEnum_CANNOT_SET_WITHOUT_FINAL_URLS AdErrorEnum_AdError = 14 + // Cannot specify a legacy url and a final url simultaneously + AdErrorEnum_CANNOT_SET_WITH_FINAL_URLS AdErrorEnum_AdError = 15 + // Cannot specify a urls in UrlData and in template fields simultaneously. + AdErrorEnum_CANNOT_SET_WITH_URL_DATA AdErrorEnum_AdError = 17 + // This operator cannot be used with a subclass of Ad. + AdErrorEnum_CANNOT_USE_AD_SUBCLASS_FOR_OPERATOR AdErrorEnum_AdError = 18 + // Customer is not approved for mobile ads. + AdErrorEnum_CUSTOMER_NOT_APPROVED_MOBILEADS AdErrorEnum_AdError = 19 + // Customer is not approved for 3PAS richmedia ads. + AdErrorEnum_CUSTOMER_NOT_APPROVED_THIRDPARTY_ADS AdErrorEnum_AdError = 20 + // Customer is not approved for 3PAS redirect richmedia (Ad Exchange) ads. + AdErrorEnum_CUSTOMER_NOT_APPROVED_THIRDPARTY_REDIRECT_ADS AdErrorEnum_AdError = 21 + // Not an eligible customer + AdErrorEnum_CUSTOMER_NOT_ELIGIBLE AdErrorEnum_AdError = 22 + // Customer is not eligible for updating beacon url + AdErrorEnum_CUSTOMER_NOT_ELIGIBLE_FOR_UPDATING_BEACON_URL AdErrorEnum_AdError = 23 + // There already exists an ad with the same dimensions in the union. + AdErrorEnum_DIMENSION_ALREADY_IN_UNION AdErrorEnum_AdError = 24 + // Ad's dimension must be set before setting union dimension. + AdErrorEnum_DIMENSION_MUST_BE_SET AdErrorEnum_AdError = 25 + // Ad's dimension must be included in the union dimensions. + AdErrorEnum_DIMENSION_NOT_IN_UNION AdErrorEnum_AdError = 26 + // Display Url cannot be specified (applies to Ad Exchange Ads) + AdErrorEnum_DISPLAY_URL_CANNOT_BE_SPECIFIED AdErrorEnum_AdError = 27 + // Telephone number contains invalid characters or invalid format. Please + // re-enter your number using digits (0-9), dashes (-), and parentheses + // only. + AdErrorEnum_DOMESTIC_PHONE_NUMBER_FORMAT AdErrorEnum_AdError = 28 + // Emergency telephone numbers are not allowed. Please enter a valid + // domestic phone number to connect customers to your business. + AdErrorEnum_EMERGENCY_PHONE_NUMBER AdErrorEnum_AdError = 29 + // A required field was not specified or is an empty string. + AdErrorEnum_EMPTY_FIELD AdErrorEnum_AdError = 30 + // A feed attribute referenced in an ad customizer tag is not in the ad + // customizer mapping for the feed. + AdErrorEnum_FEED_ATTRIBUTE_MUST_HAVE_MAPPING_FOR_TYPE_ID AdErrorEnum_AdError = 31 + // The ad customizer field mapping for the feed attribute does not match the + // expected field type. + AdErrorEnum_FEED_ATTRIBUTE_MAPPING_TYPE_MISMATCH AdErrorEnum_AdError = 32 + // The use of ad customizer tags in the ad text is disallowed. Details in + // trigger. + AdErrorEnum_ILLEGAL_AD_CUSTOMIZER_TAG_USE AdErrorEnum_AdError = 33 + // Tags of the form {PH_x}, where x is a number, are disallowed in ad text. + AdErrorEnum_ILLEGAL_TAG_USE AdErrorEnum_AdError = 34 + // The dimensions of the ad are specified or derived in multiple ways and + // are not consistent. + AdErrorEnum_INCONSISTENT_DIMENSIONS AdErrorEnum_AdError = 35 + // The status cannot differ among template ads of the same union. + AdErrorEnum_INCONSISTENT_STATUS_IN_TEMPLATE_UNION AdErrorEnum_AdError = 36 + // The length of the string is not valid. + AdErrorEnum_INCORRECT_LENGTH AdErrorEnum_AdError = 37 + // The ad is ineligible for upgrade. + AdErrorEnum_INELIGIBLE_FOR_UPGRADE AdErrorEnum_AdError = 38 + // User cannot create mobile ad for countries targeted in specified + // campaign. + AdErrorEnum_INVALID_AD_ADDRESS_CAMPAIGN_TARGET AdErrorEnum_AdError = 39 + // Invalid Ad type. A specific type of Ad is required. + AdErrorEnum_INVALID_AD_TYPE AdErrorEnum_AdError = 40 + // Headline, description or phone cannot be present when creating mobile + // image ad. + AdErrorEnum_INVALID_ATTRIBUTES_FOR_MOBILE_IMAGE AdErrorEnum_AdError = 41 + // Image cannot be present when creating mobile text ad. + AdErrorEnum_INVALID_ATTRIBUTES_FOR_MOBILE_TEXT AdErrorEnum_AdError = 42 + // Invalid call to action text. + AdErrorEnum_INVALID_CALL_TO_ACTION_TEXT AdErrorEnum_AdError = 43 + // Invalid character in URL. + AdErrorEnum_INVALID_CHARACTER_FOR_URL AdErrorEnum_AdError = 44 + // Creative's country code is not valid. + AdErrorEnum_INVALID_COUNTRY_CODE AdErrorEnum_AdError = 45 + // Invalid use of Expanded Dynamic Search Ads tags ({lpurl} etc.) + AdErrorEnum_INVALID_EXPANDED_DYNAMIC_SEARCH_AD_TAG AdErrorEnum_AdError = 47 + // An input error whose real reason was not properly mapped (should not + // happen). + AdErrorEnum_INVALID_INPUT AdErrorEnum_AdError = 48 + // An invalid markup language was entered. + AdErrorEnum_INVALID_MARKUP_LANGUAGE AdErrorEnum_AdError = 49 + // An invalid mobile carrier was entered. + AdErrorEnum_INVALID_MOBILE_CARRIER AdErrorEnum_AdError = 50 + // Specified mobile carriers target a country not targeted by the campaign. + AdErrorEnum_INVALID_MOBILE_CARRIER_TARGET AdErrorEnum_AdError = 51 + // Wrong number of elements for given element type + AdErrorEnum_INVALID_NUMBER_OF_ELEMENTS AdErrorEnum_AdError = 52 + // The format of the telephone number is incorrect. Please re-enter the + // number using the correct format. + AdErrorEnum_INVALID_PHONE_NUMBER_FORMAT AdErrorEnum_AdError = 53 + // The certified vendor format id is incorrect. + AdErrorEnum_INVALID_RICH_MEDIA_CERTIFIED_VENDOR_FORMAT_ID AdErrorEnum_AdError = 54 + // The template ad data contains validation errors. + AdErrorEnum_INVALID_TEMPLATE_DATA AdErrorEnum_AdError = 55 + // The template field doesn't have have the correct type. + AdErrorEnum_INVALID_TEMPLATE_ELEMENT_FIELD_TYPE AdErrorEnum_AdError = 56 + // Invalid template id. + AdErrorEnum_INVALID_TEMPLATE_ID AdErrorEnum_AdError = 57 + // After substituting replacement strings, the line is too wide. + AdErrorEnum_LINE_TOO_WIDE AdErrorEnum_AdError = 58 + // The feed referenced must have ad customizer mapping to be used in a + // customizer tag. + AdErrorEnum_MISSING_AD_CUSTOMIZER_MAPPING AdErrorEnum_AdError = 59 + // Missing address component in template element address field. + AdErrorEnum_MISSING_ADDRESS_COMPONENT AdErrorEnum_AdError = 60 + // An ad name must be entered. + AdErrorEnum_MISSING_ADVERTISEMENT_NAME AdErrorEnum_AdError = 61 + // Business name must be entered. + AdErrorEnum_MISSING_BUSINESS_NAME AdErrorEnum_AdError = 62 + // Description (line 2) must be entered. + AdErrorEnum_MISSING_DESCRIPTION1 AdErrorEnum_AdError = 63 + // Description (line 3) must be entered. + AdErrorEnum_MISSING_DESCRIPTION2 AdErrorEnum_AdError = 64 + // The destination url must contain at least one tag (e.g. {lpurl}) + AdErrorEnum_MISSING_DESTINATION_URL_TAG AdErrorEnum_AdError = 65 + // The tracking url template of ExpandedDynamicSearchAd must contain at + // least one tag. (e.g. {lpurl}) + AdErrorEnum_MISSING_LANDING_PAGE_URL_TAG AdErrorEnum_AdError = 66 + // A valid dimension must be specified for this ad. + AdErrorEnum_MISSING_DIMENSION AdErrorEnum_AdError = 67 + // A display URL must be entered. + AdErrorEnum_MISSING_DISPLAY_URL AdErrorEnum_AdError = 68 + // Headline must be entered. + AdErrorEnum_MISSING_HEADLINE AdErrorEnum_AdError = 69 + // A height must be entered. + AdErrorEnum_MISSING_HEIGHT AdErrorEnum_AdError = 70 + // An image must be entered. + AdErrorEnum_MISSING_IMAGE AdErrorEnum_AdError = 71 + // Marketing image or product videos are required. + AdErrorEnum_MISSING_MARKETING_IMAGE_OR_PRODUCT_VIDEOS AdErrorEnum_AdError = 72 + // The markup language in which your site is written must be entered. + AdErrorEnum_MISSING_MARKUP_LANGUAGES AdErrorEnum_AdError = 73 + // A mobile carrier must be entered. + AdErrorEnum_MISSING_MOBILE_CARRIER AdErrorEnum_AdError = 74 + // Phone number must be entered. + AdErrorEnum_MISSING_PHONE AdErrorEnum_AdError = 75 + // Missing required template fields + AdErrorEnum_MISSING_REQUIRED_TEMPLATE_FIELDS AdErrorEnum_AdError = 76 + // Missing a required field value + AdErrorEnum_MISSING_TEMPLATE_FIELD_VALUE AdErrorEnum_AdError = 77 + // The ad must have text. + AdErrorEnum_MISSING_TEXT AdErrorEnum_AdError = 78 + // A visible URL must be entered. + AdErrorEnum_MISSING_VISIBLE_URL AdErrorEnum_AdError = 79 + // A width must be entered. + AdErrorEnum_MISSING_WIDTH AdErrorEnum_AdError = 80 + // Only 1 feed can be used as the source of ad customizer substitutions in a + // single ad. + AdErrorEnum_MULTIPLE_DISTINCT_FEEDS_UNSUPPORTED AdErrorEnum_AdError = 81 + // TempAdUnionId must be use when adding template ads. + AdErrorEnum_MUST_USE_TEMP_AD_UNION_ID_ON_ADD AdErrorEnum_AdError = 82 + // The string has too many characters. + AdErrorEnum_TOO_LONG AdErrorEnum_AdError = 83 + // The string has too few characters. + AdErrorEnum_TOO_SHORT AdErrorEnum_AdError = 84 + // Ad union dimensions cannot change for saved ads. + AdErrorEnum_UNION_DIMENSIONS_CANNOT_CHANGE AdErrorEnum_AdError = 85 + // Address component is not {country, lat, lng}. + AdErrorEnum_UNKNOWN_ADDRESS_COMPONENT AdErrorEnum_AdError = 86 + // Unknown unique field name + AdErrorEnum_UNKNOWN_FIELD_NAME AdErrorEnum_AdError = 87 + // Unknown unique name (template element type specifier) + AdErrorEnum_UNKNOWN_UNIQUE_NAME AdErrorEnum_AdError = 88 + // Unsupported ad dimension + AdErrorEnum_UNSUPPORTED_DIMENSIONS AdErrorEnum_AdError = 89 + // URL starts with an invalid scheme. + AdErrorEnum_URL_INVALID_SCHEME AdErrorEnum_AdError = 90 + // URL ends with an invalid top-level domain name. + AdErrorEnum_URL_INVALID_TOP_LEVEL_DOMAIN AdErrorEnum_AdError = 91 + // URL contains illegal characters. + AdErrorEnum_URL_MALFORMED AdErrorEnum_AdError = 92 + // URL must contain a host name. + AdErrorEnum_URL_NO_HOST AdErrorEnum_AdError = 93 + // URL not equivalent during upgrade. + AdErrorEnum_URL_NOT_EQUIVALENT AdErrorEnum_AdError = 94 + // URL host name too long to be stored as visible URL (applies to Ad + // Exchange ads) + AdErrorEnum_URL_HOST_NAME_TOO_LONG AdErrorEnum_AdError = 95 + // URL must start with a scheme. + AdErrorEnum_URL_NO_SCHEME AdErrorEnum_AdError = 96 + // URL should end in a valid domain extension, such as .com or .net. + AdErrorEnum_URL_NO_TOP_LEVEL_DOMAIN AdErrorEnum_AdError = 97 + // URL must not end with a path. + AdErrorEnum_URL_PATH_NOT_ALLOWED AdErrorEnum_AdError = 98 + // URL must not specify a port. + AdErrorEnum_URL_PORT_NOT_ALLOWED AdErrorEnum_AdError = 99 + // URL must not contain a query. + AdErrorEnum_URL_QUERY_NOT_ALLOWED AdErrorEnum_AdError = 100 + // A url scheme is not allowed in front of tag in tracking url template + // (e.g. http://{lpurl}) + AdErrorEnum_URL_SCHEME_BEFORE_EXPANDED_DYNAMIC_SEARCH_AD_TAG AdErrorEnum_AdError = 102 + // The user does not have permissions to create a template ad for the given + // template. + AdErrorEnum_USER_DOES_NOT_HAVE_ACCESS_TO_TEMPLATE AdErrorEnum_AdError = 103 + // Expandable setting is inconsistent/wrong. For example, an AdX ad is + // invalid if it has a expandable vendor format but no expanding directions + // specified, or expanding directions is specified, but the vendor format is + // not expandable. + AdErrorEnum_INCONSISTENT_EXPANDABLE_SETTINGS AdErrorEnum_AdError = 104 + // Format is invalid + AdErrorEnum_INVALID_FORMAT AdErrorEnum_AdError = 105 + // The text of this field did not match a pattern of allowed values. + AdErrorEnum_INVALID_FIELD_TEXT AdErrorEnum_AdError = 106 + // Template element is mising + AdErrorEnum_ELEMENT_NOT_PRESENT AdErrorEnum_AdError = 107 + // Error occurred during image processing + AdErrorEnum_IMAGE_ERROR AdErrorEnum_AdError = 108 + // The value is not within the valid range + AdErrorEnum_VALUE_NOT_IN_RANGE AdErrorEnum_AdError = 109 + // Template element field is not present + AdErrorEnum_FIELD_NOT_PRESENT AdErrorEnum_AdError = 110 + // Address is incomplete + AdErrorEnum_ADDRESS_NOT_COMPLETE AdErrorEnum_AdError = 111 + // Invalid address + AdErrorEnum_ADDRESS_INVALID AdErrorEnum_AdError = 112 + // Error retrieving specified video + AdErrorEnum_VIDEO_RETRIEVAL_ERROR AdErrorEnum_AdError = 113 + // Error processing audio + AdErrorEnum_AUDIO_ERROR AdErrorEnum_AdError = 114 + // Display URL is incorrect for YouTube PYV ads + AdErrorEnum_INVALID_YOUTUBE_DISPLAY_URL AdErrorEnum_AdError = 115 + // Too many product Images in GmailAd + AdErrorEnum_TOO_MANY_PRODUCT_IMAGES AdErrorEnum_AdError = 116 + // Too many product Videos in GmailAd + AdErrorEnum_TOO_MANY_PRODUCT_VIDEOS AdErrorEnum_AdError = 117 + // The device preference is not compatible with the ad type + AdErrorEnum_INCOMPATIBLE_AD_TYPE_AND_DEVICE_PREFERENCE AdErrorEnum_AdError = 118 + // Call tracking is not supported for specified country. + AdErrorEnum_CALLTRACKING_NOT_SUPPORTED_FOR_COUNTRY AdErrorEnum_AdError = 119 + // Carrier specific short number is not allowed. + AdErrorEnum_CARRIER_SPECIFIC_SHORT_NUMBER_NOT_ALLOWED AdErrorEnum_AdError = 120 + // Specified phone number type is disallowed. + AdErrorEnum_DISALLOWED_NUMBER_TYPE AdErrorEnum_AdError = 121 + // Phone number not supported for country. + AdErrorEnum_PHONE_NUMBER_NOT_SUPPORTED_FOR_COUNTRY AdErrorEnum_AdError = 122 + // Phone number not supported with call tracking enabled for country. + AdErrorEnum_PHONE_NUMBER_NOT_SUPPORTED_WITH_CALLTRACKING_FOR_COUNTRY AdErrorEnum_AdError = 123 + // Premium rate phone number is not allowed. + AdErrorEnum_PREMIUM_RATE_NUMBER_NOT_ALLOWED AdErrorEnum_AdError = 124 + // Vanity phone number is not allowed. + AdErrorEnum_VANITY_PHONE_NUMBER_NOT_ALLOWED AdErrorEnum_AdError = 125 + // Invalid call conversion type id. + AdErrorEnum_INVALID_CALL_CONVERSION_TYPE_ID AdErrorEnum_AdError = 126 + // Cannot disable call conversion and set conversion type id. + AdErrorEnum_CANNOT_DISABLE_CALL_CONVERSION_AND_SET_CONVERSION_TYPE_ID AdErrorEnum_AdError = 127 + // Cannot set path2 without path1. + AdErrorEnum_CANNOT_SET_PATH2_WITHOUT_PATH1 AdErrorEnum_AdError = 128 + // Missing domain name in campaign setting when adding expanded dynamic + // search ad. + AdErrorEnum_MISSING_DYNAMIC_SEARCH_ADS_SETTING_DOMAIN_NAME AdErrorEnum_AdError = 129 + // The associated ad is not compatible with restriction type. + AdErrorEnum_INCOMPATIBLE_WITH_RESTRICTION_TYPE AdErrorEnum_AdError = 130 + // Consent for call recording is required for creating/updating call only + // ads. Please see https://support.google.com/google-ads/answer/7412639. + AdErrorEnum_CUSTOMER_CONSENT_FOR_CALL_RECORDING_REQUIRED AdErrorEnum_AdError = 131 + // Either an image or a media bundle is required in a display upload ad. + AdErrorEnum_MISSING_IMAGE_OR_MEDIA_BUNDLE AdErrorEnum_AdError = 132 + // The display upload product type is not supported in this campaign. + AdErrorEnum_PRODUCT_TYPE_NOT_SUPPORTED_IN_THIS_CAMPAIGN AdErrorEnum_AdError = 133 +) + +var AdErrorEnum_AdError_name = map[int32]string{ + 0: "UNSPECIFIED", + 1: "UNKNOWN", + 2: "AD_CUSTOMIZERS_NOT_SUPPORTED_FOR_AD_TYPE", + 3: "APPROXIMATELY_TOO_LONG", + 4: "APPROXIMATELY_TOO_SHORT", + 5: "BAD_SNIPPET", + 6: "CANNOT_MODIFY_AD", + 7: "CANNOT_SET_BUSINESS_NAME_IF_URL_SET", + 8: "CANNOT_SET_FIELD", + 9: "CANNOT_SET_FIELD_WITH_ORIGIN_AD_ID_SET", + 10: "CANNOT_SET_FIELD_WITH_AD_ID_SET_FOR_SHARING", + 11: "CANNOT_SET_ALLOW_FLEXIBLE_COLOR_FALSE", + 12: "CANNOT_SET_COLOR_CONTROL_WHEN_NATIVE_FORMAT_SETTING", + 13: "CANNOT_SET_URL", + 14: "CANNOT_SET_WITHOUT_FINAL_URLS", + 15: "CANNOT_SET_WITH_FINAL_URLS", + 17: "CANNOT_SET_WITH_URL_DATA", + 18: "CANNOT_USE_AD_SUBCLASS_FOR_OPERATOR", + 19: "CUSTOMER_NOT_APPROVED_MOBILEADS", + 20: "CUSTOMER_NOT_APPROVED_THIRDPARTY_ADS", + 21: "CUSTOMER_NOT_APPROVED_THIRDPARTY_REDIRECT_ADS", + 22: "CUSTOMER_NOT_ELIGIBLE", + 23: "CUSTOMER_NOT_ELIGIBLE_FOR_UPDATING_BEACON_URL", + 24: "DIMENSION_ALREADY_IN_UNION", + 25: "DIMENSION_MUST_BE_SET", + 26: "DIMENSION_NOT_IN_UNION", + 27: "DISPLAY_URL_CANNOT_BE_SPECIFIED", + 28: "DOMESTIC_PHONE_NUMBER_FORMAT", + 29: "EMERGENCY_PHONE_NUMBER", + 30: "EMPTY_FIELD", + 31: "FEED_ATTRIBUTE_MUST_HAVE_MAPPING_FOR_TYPE_ID", + 32: "FEED_ATTRIBUTE_MAPPING_TYPE_MISMATCH", + 33: "ILLEGAL_AD_CUSTOMIZER_TAG_USE", + 34: "ILLEGAL_TAG_USE", + 35: "INCONSISTENT_DIMENSIONS", + 36: "INCONSISTENT_STATUS_IN_TEMPLATE_UNION", + 37: "INCORRECT_LENGTH", + 38: "INELIGIBLE_FOR_UPGRADE", + 39: "INVALID_AD_ADDRESS_CAMPAIGN_TARGET", + 40: "INVALID_AD_TYPE", + 41: "INVALID_ATTRIBUTES_FOR_MOBILE_IMAGE", + 42: "INVALID_ATTRIBUTES_FOR_MOBILE_TEXT", + 43: "INVALID_CALL_TO_ACTION_TEXT", + 44: "INVALID_CHARACTER_FOR_URL", + 45: "INVALID_COUNTRY_CODE", + 47: "INVALID_EXPANDED_DYNAMIC_SEARCH_AD_TAG", + 48: "INVALID_INPUT", + 49: "INVALID_MARKUP_LANGUAGE", + 50: "INVALID_MOBILE_CARRIER", + 51: "INVALID_MOBILE_CARRIER_TARGET", + 52: "INVALID_NUMBER_OF_ELEMENTS", + 53: "INVALID_PHONE_NUMBER_FORMAT", + 54: "INVALID_RICH_MEDIA_CERTIFIED_VENDOR_FORMAT_ID", + 55: "INVALID_TEMPLATE_DATA", + 56: "INVALID_TEMPLATE_ELEMENT_FIELD_TYPE", + 57: "INVALID_TEMPLATE_ID", + 58: "LINE_TOO_WIDE", + 59: "MISSING_AD_CUSTOMIZER_MAPPING", + 60: "MISSING_ADDRESS_COMPONENT", + 61: "MISSING_ADVERTISEMENT_NAME", + 62: "MISSING_BUSINESS_NAME", + 63: "MISSING_DESCRIPTION1", + 64: "MISSING_DESCRIPTION2", + 65: "MISSING_DESTINATION_URL_TAG", + 66: "MISSING_LANDING_PAGE_URL_TAG", + 67: "MISSING_DIMENSION", + 68: "MISSING_DISPLAY_URL", + 69: "MISSING_HEADLINE", + 70: "MISSING_HEIGHT", + 71: "MISSING_IMAGE", + 72: "MISSING_MARKETING_IMAGE_OR_PRODUCT_VIDEOS", + 73: "MISSING_MARKUP_LANGUAGES", + 74: "MISSING_MOBILE_CARRIER", + 75: "MISSING_PHONE", + 76: "MISSING_REQUIRED_TEMPLATE_FIELDS", + 77: "MISSING_TEMPLATE_FIELD_VALUE", + 78: "MISSING_TEXT", + 79: "MISSING_VISIBLE_URL", + 80: "MISSING_WIDTH", + 81: "MULTIPLE_DISTINCT_FEEDS_UNSUPPORTED", + 82: "MUST_USE_TEMP_AD_UNION_ID_ON_ADD", + 83: "TOO_LONG", + 84: "TOO_SHORT", + 85: "UNION_DIMENSIONS_CANNOT_CHANGE", + 86: "UNKNOWN_ADDRESS_COMPONENT", + 87: "UNKNOWN_FIELD_NAME", + 88: "UNKNOWN_UNIQUE_NAME", + 89: "UNSUPPORTED_DIMENSIONS", + 90: "URL_INVALID_SCHEME", + 91: "URL_INVALID_TOP_LEVEL_DOMAIN", + 92: "URL_MALFORMED", + 93: "URL_NO_HOST", + 94: "URL_NOT_EQUIVALENT", + 95: "URL_HOST_NAME_TOO_LONG", + 96: "URL_NO_SCHEME", + 97: "URL_NO_TOP_LEVEL_DOMAIN", + 98: "URL_PATH_NOT_ALLOWED", + 99: "URL_PORT_NOT_ALLOWED", + 100: "URL_QUERY_NOT_ALLOWED", + 102: "URL_SCHEME_BEFORE_EXPANDED_DYNAMIC_SEARCH_AD_TAG", + 103: "USER_DOES_NOT_HAVE_ACCESS_TO_TEMPLATE", + 104: "INCONSISTENT_EXPANDABLE_SETTINGS", + 105: "INVALID_FORMAT", + 106: "INVALID_FIELD_TEXT", + 107: "ELEMENT_NOT_PRESENT", + 108: "IMAGE_ERROR", + 109: "VALUE_NOT_IN_RANGE", + 110: "FIELD_NOT_PRESENT", + 111: "ADDRESS_NOT_COMPLETE", + 112: "ADDRESS_INVALID", + 113: "VIDEO_RETRIEVAL_ERROR", + 114: "AUDIO_ERROR", + 115: "INVALID_YOUTUBE_DISPLAY_URL", + 116: "TOO_MANY_PRODUCT_IMAGES", + 117: "TOO_MANY_PRODUCT_VIDEOS", + 118: "INCOMPATIBLE_AD_TYPE_AND_DEVICE_PREFERENCE", + 119: "CALLTRACKING_NOT_SUPPORTED_FOR_COUNTRY", + 120: "CARRIER_SPECIFIC_SHORT_NUMBER_NOT_ALLOWED", + 121: "DISALLOWED_NUMBER_TYPE", + 122: "PHONE_NUMBER_NOT_SUPPORTED_FOR_COUNTRY", + 123: "PHONE_NUMBER_NOT_SUPPORTED_WITH_CALLTRACKING_FOR_COUNTRY", + 124: "PREMIUM_RATE_NUMBER_NOT_ALLOWED", + 125: "VANITY_PHONE_NUMBER_NOT_ALLOWED", + 126: "INVALID_CALL_CONVERSION_TYPE_ID", + 127: "CANNOT_DISABLE_CALL_CONVERSION_AND_SET_CONVERSION_TYPE_ID", + 128: "CANNOT_SET_PATH2_WITHOUT_PATH1", + 129: "MISSING_DYNAMIC_SEARCH_ADS_SETTING_DOMAIN_NAME", + 130: "INCOMPATIBLE_WITH_RESTRICTION_TYPE", + 131: "CUSTOMER_CONSENT_FOR_CALL_RECORDING_REQUIRED", + 132: "MISSING_IMAGE_OR_MEDIA_BUNDLE", + 133: "PRODUCT_TYPE_NOT_SUPPORTED_IN_THIS_CAMPAIGN", +} +var AdErrorEnum_AdError_value = map[string]int32{ + "UNSPECIFIED": 0, + "UNKNOWN": 1, + "AD_CUSTOMIZERS_NOT_SUPPORTED_FOR_AD_TYPE": 2, + "APPROXIMATELY_TOO_LONG": 3, + "APPROXIMATELY_TOO_SHORT": 4, + "BAD_SNIPPET": 5, + "CANNOT_MODIFY_AD": 6, + "CANNOT_SET_BUSINESS_NAME_IF_URL_SET": 7, + "CANNOT_SET_FIELD": 8, + "CANNOT_SET_FIELD_WITH_ORIGIN_AD_ID_SET": 9, + "CANNOT_SET_FIELD_WITH_AD_ID_SET_FOR_SHARING": 10, + "CANNOT_SET_ALLOW_FLEXIBLE_COLOR_FALSE": 11, + "CANNOT_SET_COLOR_CONTROL_WHEN_NATIVE_FORMAT_SETTING": 12, + "CANNOT_SET_URL": 13, + "CANNOT_SET_WITHOUT_FINAL_URLS": 14, + "CANNOT_SET_WITH_FINAL_URLS": 15, + "CANNOT_SET_WITH_URL_DATA": 17, + "CANNOT_USE_AD_SUBCLASS_FOR_OPERATOR": 18, + "CUSTOMER_NOT_APPROVED_MOBILEADS": 19, + "CUSTOMER_NOT_APPROVED_THIRDPARTY_ADS": 20, + "CUSTOMER_NOT_APPROVED_THIRDPARTY_REDIRECT_ADS": 21, + "CUSTOMER_NOT_ELIGIBLE": 22, + "CUSTOMER_NOT_ELIGIBLE_FOR_UPDATING_BEACON_URL": 23, + "DIMENSION_ALREADY_IN_UNION": 24, + "DIMENSION_MUST_BE_SET": 25, + "DIMENSION_NOT_IN_UNION": 26, + "DISPLAY_URL_CANNOT_BE_SPECIFIED": 27, + "DOMESTIC_PHONE_NUMBER_FORMAT": 28, + "EMERGENCY_PHONE_NUMBER": 29, + "EMPTY_FIELD": 30, + "FEED_ATTRIBUTE_MUST_HAVE_MAPPING_FOR_TYPE_ID": 31, + "FEED_ATTRIBUTE_MAPPING_TYPE_MISMATCH": 32, + "ILLEGAL_AD_CUSTOMIZER_TAG_USE": 33, + "ILLEGAL_TAG_USE": 34, + "INCONSISTENT_DIMENSIONS": 35, + "INCONSISTENT_STATUS_IN_TEMPLATE_UNION": 36, + "INCORRECT_LENGTH": 37, + "INELIGIBLE_FOR_UPGRADE": 38, + "INVALID_AD_ADDRESS_CAMPAIGN_TARGET": 39, + "INVALID_AD_TYPE": 40, + "INVALID_ATTRIBUTES_FOR_MOBILE_IMAGE": 41, + "INVALID_ATTRIBUTES_FOR_MOBILE_TEXT": 42, + "INVALID_CALL_TO_ACTION_TEXT": 43, + "INVALID_CHARACTER_FOR_URL": 44, + "INVALID_COUNTRY_CODE": 45, + "INVALID_EXPANDED_DYNAMIC_SEARCH_AD_TAG": 47, + "INVALID_INPUT": 48, + "INVALID_MARKUP_LANGUAGE": 49, + "INVALID_MOBILE_CARRIER": 50, + "INVALID_MOBILE_CARRIER_TARGET": 51, + "INVALID_NUMBER_OF_ELEMENTS": 52, + "INVALID_PHONE_NUMBER_FORMAT": 53, + "INVALID_RICH_MEDIA_CERTIFIED_VENDOR_FORMAT_ID": 54, + "INVALID_TEMPLATE_DATA": 55, + "INVALID_TEMPLATE_ELEMENT_FIELD_TYPE": 56, + "INVALID_TEMPLATE_ID": 57, + "LINE_TOO_WIDE": 58, + "MISSING_AD_CUSTOMIZER_MAPPING": 59, + "MISSING_ADDRESS_COMPONENT": 60, + "MISSING_ADVERTISEMENT_NAME": 61, + "MISSING_BUSINESS_NAME": 62, + "MISSING_DESCRIPTION1": 63, + "MISSING_DESCRIPTION2": 64, + "MISSING_DESTINATION_URL_TAG": 65, + "MISSING_LANDING_PAGE_URL_TAG": 66, + "MISSING_DIMENSION": 67, + "MISSING_DISPLAY_URL": 68, + "MISSING_HEADLINE": 69, + "MISSING_HEIGHT": 70, + "MISSING_IMAGE": 71, + "MISSING_MARKETING_IMAGE_OR_PRODUCT_VIDEOS": 72, + "MISSING_MARKUP_LANGUAGES": 73, + "MISSING_MOBILE_CARRIER": 74, + "MISSING_PHONE": 75, + "MISSING_REQUIRED_TEMPLATE_FIELDS": 76, + "MISSING_TEMPLATE_FIELD_VALUE": 77, + "MISSING_TEXT": 78, + "MISSING_VISIBLE_URL": 79, + "MISSING_WIDTH": 80, + "MULTIPLE_DISTINCT_FEEDS_UNSUPPORTED": 81, + "MUST_USE_TEMP_AD_UNION_ID_ON_ADD": 82, + "TOO_LONG": 83, + "TOO_SHORT": 84, + "UNION_DIMENSIONS_CANNOT_CHANGE": 85, + "UNKNOWN_ADDRESS_COMPONENT": 86, + "UNKNOWN_FIELD_NAME": 87, + "UNKNOWN_UNIQUE_NAME": 88, + "UNSUPPORTED_DIMENSIONS": 89, + "URL_INVALID_SCHEME": 90, + "URL_INVALID_TOP_LEVEL_DOMAIN": 91, + "URL_MALFORMED": 92, + "URL_NO_HOST": 93, + "URL_NOT_EQUIVALENT": 94, + "URL_HOST_NAME_TOO_LONG": 95, + "URL_NO_SCHEME": 96, + "URL_NO_TOP_LEVEL_DOMAIN": 97, + "URL_PATH_NOT_ALLOWED": 98, + "URL_PORT_NOT_ALLOWED": 99, + "URL_QUERY_NOT_ALLOWED": 100, + "URL_SCHEME_BEFORE_EXPANDED_DYNAMIC_SEARCH_AD_TAG": 102, + "USER_DOES_NOT_HAVE_ACCESS_TO_TEMPLATE": 103, + "INCONSISTENT_EXPANDABLE_SETTINGS": 104, + "INVALID_FORMAT": 105, + "INVALID_FIELD_TEXT": 106, + "ELEMENT_NOT_PRESENT": 107, + "IMAGE_ERROR": 108, + "VALUE_NOT_IN_RANGE": 109, + "FIELD_NOT_PRESENT": 110, + "ADDRESS_NOT_COMPLETE": 111, + "ADDRESS_INVALID": 112, + "VIDEO_RETRIEVAL_ERROR": 113, + "AUDIO_ERROR": 114, + "INVALID_YOUTUBE_DISPLAY_URL": 115, + "TOO_MANY_PRODUCT_IMAGES": 116, + "TOO_MANY_PRODUCT_VIDEOS": 117, + "INCOMPATIBLE_AD_TYPE_AND_DEVICE_PREFERENCE": 118, + "CALLTRACKING_NOT_SUPPORTED_FOR_COUNTRY": 119, + "CARRIER_SPECIFIC_SHORT_NUMBER_NOT_ALLOWED": 120, + "DISALLOWED_NUMBER_TYPE": 121, + "PHONE_NUMBER_NOT_SUPPORTED_FOR_COUNTRY": 122, + "PHONE_NUMBER_NOT_SUPPORTED_WITH_CALLTRACKING_FOR_COUNTRY": 123, + "PREMIUM_RATE_NUMBER_NOT_ALLOWED": 124, + "VANITY_PHONE_NUMBER_NOT_ALLOWED": 125, + "INVALID_CALL_CONVERSION_TYPE_ID": 126, + "CANNOT_DISABLE_CALL_CONVERSION_AND_SET_CONVERSION_TYPE_ID": 127, + "CANNOT_SET_PATH2_WITHOUT_PATH1": 128, + "MISSING_DYNAMIC_SEARCH_ADS_SETTING_DOMAIN_NAME": 129, + "INCOMPATIBLE_WITH_RESTRICTION_TYPE": 130, + "CUSTOMER_CONSENT_FOR_CALL_RECORDING_REQUIRED": 131, + "MISSING_IMAGE_OR_MEDIA_BUNDLE": 132, + "PRODUCT_TYPE_NOT_SUPPORTED_IN_THIS_CAMPAIGN": 133, +} + +func (x AdErrorEnum_AdError) String() string { + return proto.EnumName(AdErrorEnum_AdError_name, int32(x)) +} +func (AdErrorEnum_AdError) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_ad_error_e9bb2204ffbdff9f, []int{0, 0} +} + +// Container for enum describing possible ad errors. +type AdErrorEnum struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *AdErrorEnum) Reset() { *m = AdErrorEnum{} } +func (m *AdErrorEnum) String() string { return proto.CompactTextString(m) } +func (*AdErrorEnum) ProtoMessage() {} +func (*AdErrorEnum) Descriptor() ([]byte, []int) { + return fileDescriptor_ad_error_e9bb2204ffbdff9f, []int{0} +} +func (m *AdErrorEnum) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_AdErrorEnum.Unmarshal(m, b) +} +func (m *AdErrorEnum) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_AdErrorEnum.Marshal(b, m, deterministic) +} +func (dst *AdErrorEnum) XXX_Merge(src proto.Message) { + xxx_messageInfo_AdErrorEnum.Merge(dst, src) +} +func (m *AdErrorEnum) XXX_Size() int { + return xxx_messageInfo_AdErrorEnum.Size(m) +} +func (m *AdErrorEnum) XXX_DiscardUnknown() { + xxx_messageInfo_AdErrorEnum.DiscardUnknown(m) +} + +var xxx_messageInfo_AdErrorEnum proto.InternalMessageInfo + +func init() { + proto.RegisterType((*AdErrorEnum)(nil), "google.ads.googleads.v1.errors.AdErrorEnum") + proto.RegisterEnum("google.ads.googleads.v1.errors.AdErrorEnum_AdError", AdErrorEnum_AdError_name, AdErrorEnum_AdError_value) +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/errors/ad_error.proto", fileDescriptor_ad_error_e9bb2204ffbdff9f) +} + +var fileDescriptor_ad_error_e9bb2204ffbdff9f = []byte{ + // 2059 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x84, 0x58, 0x6b, 0x7b, 0x1c, 0x37, + 0x15, 0xc6, 0x29, 0x34, 0xad, 0x92, 0x34, 0x8a, 0x72, 0xbf, 0x39, 0xa9, 0x93, 0xe6, 0x9e, 0x75, + 0x9c, 0x14, 0xda, 0xba, 0x2d, 0xa0, 0x1d, 0x9d, 0xdd, 0x11, 0xd1, 0x48, 0x13, 0x49, 0xb3, 0xf6, + 0x86, 0x80, 0x70, 0xeb, 0x60, 0x02, 0x89, 0x37, 0xd8, 0x6e, 0xb8, 0xdf, 0xe1, 0x8f, 0xf0, 0x91, + 0x9f, 0xc2, 0xaf, 0xe0, 0x1b, 0xcf, 0xc3, 0x27, 0x7e, 0x42, 0x9f, 0xa3, 0x19, 0xcd, 0xce, 0xda, + 0x4e, 0xfb, 0x69, 0x67, 0x75, 0x5e, 0x5d, 0xce, 0xab, 0xf7, 0x5c, 0x66, 0xc8, 0xdd, 0x8d, 0xc9, + 0x64, 0xe3, 0xf9, 0xd3, 0xc5, 0xb5, 0xf5, 0xed, 0xc5, 0xfa, 0x11, 0x9f, 0x5e, 0x2d, 0x2d, 0x3e, + 0xdd, 0xda, 0x9a, 0x6c, 0x6d, 0x2f, 0xae, 0xad, 0x87, 0xf8, 0xd4, 0x7b, 0xb9, 0x35, 0xd9, 0x99, + 0xb0, 0xf9, 0x1a, 0xd3, 0x5b, 0x5b, 0xdf, 0xee, 0xb5, 0xf0, 0xde, 0xab, 0xa5, 0x5e, 0x0d, 0x3f, + 0x77, 0x21, 0x2d, 0xf7, 0xf2, 0xd9, 0xe2, 0xda, 0xe6, 0xe6, 0x64, 0x67, 0x6d, 0xe7, 0xd9, 0x64, + 0x73, 0xbb, 0x9e, 0xbd, 0xf0, 0xff, 0xcb, 0xe4, 0x10, 0x5f, 0x07, 0x84, 0xc2, 0xe6, 0x17, 0x2f, + 0x16, 0xfe, 0x73, 0x99, 0x1c, 0x6c, 0xfe, 0xb3, 0xa3, 0xe4, 0x50, 0xa5, 0x5d, 0x09, 0x99, 0x1c, + 0x48, 0x10, 0xf4, 0x1b, 0xec, 0x10, 0x39, 0x58, 0xe9, 0x87, 0xda, 0xac, 0x68, 0x3a, 0xc7, 0xee, + 0x90, 0x1b, 0x5c, 0x84, 0xac, 0x72, 0xde, 0x14, 0xf2, 0x31, 0x58, 0x17, 0xb4, 0xf1, 0xc1, 0x55, + 0x65, 0x69, 0xac, 0x07, 0x11, 0x06, 0xc6, 0x06, 0x2e, 0x82, 0x1f, 0x97, 0x40, 0x0f, 0xb0, 0x73, + 0xe4, 0x14, 0x2f, 0x4b, 0x6b, 0x56, 0x65, 0xc1, 0x3d, 0xa8, 0x71, 0xf0, 0xc6, 0x04, 0x65, 0xf4, + 0x90, 0xbe, 0xc1, 0xce, 0x93, 0xd3, 0x7b, 0x6d, 0x2e, 0x37, 0xd6, 0xd3, 0x6f, 0xe2, 0x21, 0xfa, + 0x5c, 0x04, 0xa7, 0x65, 0x59, 0x82, 0xa7, 0xdf, 0x62, 0x27, 0x08, 0xcd, 0xb8, 0xc6, 0xbd, 0x0a, + 0x23, 0xe4, 0x60, 0x1c, 0xb8, 0xa0, 0x6f, 0xb2, 0xeb, 0xe4, 0x4a, 0x33, 0xea, 0xc0, 0x87, 0x7e, + 0xe5, 0xa4, 0x06, 0xe7, 0x82, 0xe6, 0x05, 0x04, 0x39, 0x08, 0x95, 0x55, 0x68, 0xa0, 0x07, 0x3b, + 0xd3, 0x11, 0x38, 0x90, 0xa0, 0x04, 0x7d, 0x8b, 0xdd, 0x22, 0xd7, 0x76, 0x8f, 0x86, 0x15, 0xe9, + 0xf3, 0x60, 0xac, 0x1c, 0x4a, 0x8d, 0x8e, 0x48, 0x11, 0x57, 0x78, 0x9b, 0x2d, 0x92, 0xdb, 0xfb, + 0x63, 0x5b, 0x50, 0xf4, 0xdd, 0xe5, 0xdc, 0x4a, 0x3d, 0xa4, 0x84, 0xdd, 0x24, 0xef, 0x75, 0x26, + 0x70, 0xa5, 0xcc, 0x4a, 0x18, 0x28, 0x58, 0x95, 0x7d, 0x05, 0x21, 0x33, 0xca, 0xd8, 0x30, 0xe0, + 0xca, 0x01, 0x3d, 0xc4, 0x3e, 0x20, 0x0f, 0x3a, 0xd0, 0xda, 0x96, 0x19, 0xed, 0xad, 0x51, 0x61, + 0x25, 0x07, 0x1d, 0x34, 0xf7, 0x72, 0x04, 0xb8, 0x41, 0xc1, 0x23, 0xc8, 0xe3, 0x1e, 0x87, 0x19, + 0x23, 0xef, 0x74, 0x26, 0x56, 0x56, 0xd1, 0x23, 0xec, 0x5d, 0x72, 0xb1, 0x33, 0x86, 0x47, 0x34, + 0x15, 0x1e, 0x58, 0x73, 0x85, 0x08, 0x47, 0xdf, 0x61, 0xf3, 0xe4, 0xdc, 0x2e, 0x48, 0xd7, 0x7e, + 0x94, 0x5d, 0x20, 0x67, 0x76, 0xdb, 0x91, 0x4a, 0xc1, 0x3d, 0xa7, 0xc7, 0x3a, 0xa4, 0x57, 0x0e, + 0xd0, 0x7d, 0x57, 0xf5, 0x33, 0xc5, 0x9d, 0x8b, 0x04, 0x98, 0x12, 0x2c, 0xf7, 0xc6, 0x52, 0xc6, + 0xae, 0x90, 0x4b, 0xb5, 0x50, 0xc0, 0x46, 0x95, 0xc4, 0xeb, 0x1e, 0x81, 0x08, 0x85, 0xe9, 0x4b, + 0x05, 0x5c, 0x38, 0x7a, 0x9c, 0xdd, 0x20, 0x57, 0xf7, 0x07, 0xf9, 0x5c, 0x5a, 0x51, 0x72, 0xeb, + 0xf1, 0xae, 0x1d, 0x3d, 0xc1, 0x96, 0xc8, 0xdd, 0xaf, 0x45, 0x5a, 0x10, 0xd2, 0x42, 0xe6, 0xe3, + 0x94, 0x93, 0xec, 0x2c, 0x39, 0x39, 0x33, 0x05, 0x94, 0x1c, 0x22, 0xff, 0xf4, 0xd4, 0x9e, 0xd5, + 0x92, 0x29, 0x3a, 0x51, 0x95, 0x82, 0x23, 0xc5, 0xa1, 0x0f, 0x3c, 0x33, 0x3a, 0x32, 0x7b, 0x1a, + 0x69, 0x13, 0xb2, 0x00, 0xed, 0xa4, 0xd1, 0x81, 0x2b, 0x0b, 0x5c, 0x8c, 0x83, 0xd4, 0xa1, 0xd2, + 0xd2, 0x68, 0x7a, 0x06, 0x77, 0x9b, 0xda, 0x8b, 0xca, 0xf9, 0xd0, 0x87, 0xa8, 0x9e, 0xb3, 0x18, + 0x08, 0x53, 0x13, 0x6e, 0xd7, 0x4e, 0x3b, 0x87, 0x34, 0x09, 0xe9, 0x4a, 0xc5, 0xc7, 0x91, 0xe5, + 0x86, 0x5b, 0x9c, 0xda, 0x06, 0xe1, 0x79, 0x76, 0x99, 0x5c, 0x10, 0xa6, 0x00, 0xe7, 0x65, 0x16, + 0xca, 0xdc, 0x68, 0x08, 0xba, 0x2a, 0xfa, 0x60, 0x1b, 0x4d, 0xd0, 0x0b, 0xb8, 0x05, 0x14, 0x60, + 0x87, 0xa0, 0xb3, 0xf1, 0x0c, 0x84, 0x5e, 0xc4, 0x70, 0x82, 0xa2, 0xf4, 0xe3, 0x46, 0xf9, 0xf3, + 0xec, 0x1e, 0xb9, 0x33, 0x00, 0x10, 0x81, 0x7b, 0x6f, 0x65, 0xbf, 0xf2, 0x50, 0x9f, 0x37, 0xe7, + 0x23, 0x08, 0x05, 0x2f, 0x4b, 0xf4, 0x1d, 0x89, 0xc0, 0x38, 0x0e, 0x52, 0xd0, 0x4b, 0x78, 0x4f, + 0xbb, 0x67, 0x34, 0xb8, 0x88, 0x29, 0xa4, 0x2b, 0xb8, 0xcf, 0x72, 0x7a, 0x19, 0x05, 0x28, 0x95, + 0x82, 0x21, 0x57, 0x61, 0x26, 0x55, 0x04, 0xcf, 0x87, 0x28, 0x19, 0xfa, 0x2e, 0x3b, 0x4e, 0x8e, + 0x26, 0x48, 0x1a, 0x5c, 0xc0, 0x84, 0x20, 0x75, 0x66, 0xb4, 0x93, 0xce, 0x83, 0xf6, 0xa1, 0x25, + 0xcc, 0xd1, 0x2b, 0x18, 0x4d, 0x33, 0x46, 0xe7, 0xb9, 0xaf, 0x1c, 0xd2, 0xe8, 0xa1, 0x28, 0x15, + 0xf7, 0xd0, 0xf0, 0x79, 0x15, 0x63, 0x1d, 0xa1, 0x36, 0xea, 0x40, 0x81, 0x1e, 0xfa, 0x9c, 0xbe, + 0x87, 0xf4, 0x48, 0xbd, 0xeb, 0x92, 0x87, 0x96, 0x0b, 0xa0, 0xd7, 0xd8, 0x35, 0xb2, 0x20, 0xf5, + 0x88, 0x2b, 0x29, 0xf0, 0xc4, 0x5c, 0x08, 0x8b, 0x59, 0x24, 0xe3, 0x45, 0xc9, 0xe5, 0x50, 0x07, + 0xcf, 0xed, 0x10, 0x3c, 0xbd, 0x1e, 0x8f, 0x3d, 0xc5, 0xc5, 0x1c, 0x77, 0x03, 0xc3, 0xa1, 0x1d, + 0x4c, 0xdc, 0xd4, 0xa1, 0x50, 0xcb, 0x3c, 0xc8, 0x82, 0x0f, 0x81, 0xde, 0x9c, 0xd9, 0x65, 0x5f, + 0xa0, 0x87, 0x55, 0x4f, 0x6f, 0xb1, 0x4b, 0xe4, 0x7c, 0xc2, 0x65, 0x5c, 0xa9, 0xe0, 0x4d, 0xe0, + 0x99, 0x47, 0xe1, 0x44, 0xc0, 0x6d, 0x76, 0x91, 0x9c, 0x6d, 0x01, 0x39, 0xb7, 0x3c, 0xf3, 0xb5, + 0x0e, 0xa2, 0x4c, 0xef, 0xb0, 0x33, 0xe4, 0x44, 0x6b, 0x36, 0x95, 0xf6, 0x76, 0x1c, 0x32, 0x23, + 0x80, 0xde, 0xc5, 0x7c, 0x97, 0x2c, 0xb0, 0x5a, 0x72, 0x2d, 0x40, 0x04, 0x31, 0xd6, 0xbc, 0x90, + 0x59, 0x70, 0xc0, 0x6d, 0x16, 0xb3, 0x99, 0xe7, 0x43, 0xba, 0xc8, 0x8e, 0x91, 0x23, 0x09, 0x2b, + 0x75, 0x59, 0x79, 0x7a, 0xaf, 0xbe, 0xa0, 0x7a, 0xa8, 0xe0, 0xf6, 0x61, 0x55, 0x06, 0xc5, 0xf5, + 0xb0, 0x42, 0xef, 0x96, 0x6a, 0x7e, 0x1b, 0x63, 0xed, 0x4e, 0xc6, 0xad, 0x95, 0x60, 0xe9, 0xfd, + 0xa8, 0x88, 0x7d, 0x6d, 0x89, 0xda, 0x07, 0x18, 0x5b, 0x09, 0xd2, 0x08, 0xdb, 0x0c, 0x02, 0x28, + 0x28, 0x40, 0x7b, 0x47, 0xdf, 0xef, 0x92, 0xb2, 0x9f, 0xfc, 0xbf, 0x8d, 0xf1, 0x9c, 0x00, 0x56, + 0x66, 0x79, 0x28, 0x40, 0x48, 0x1e, 0x32, 0xb0, 0x3e, 0x86, 0x50, 0x18, 0x81, 0x16, 0x26, 0xe1, + 0x51, 0xd2, 0xdf, 0xc1, 0x78, 0x4d, 0x53, 0x5a, 0x11, 0xc5, 0x1c, 0xf7, 0x41, 0xf7, 0x52, 0x5b, + 0x53, 0x73, 0x9a, 0x26, 0xf7, 0xc7, 0xdb, 0xff, 0x90, 0x9d, 0x26, 0xc7, 0xf7, 0x00, 0xa5, 0xa0, + 0x1f, 0x21, 0x7f, 0x4a, 0x6a, 0x88, 0x55, 0x6d, 0x45, 0x0a, 0xa0, 0xcb, 0x48, 0x43, 0x21, 0x9d, + 0xc3, 0x98, 0x99, 0x0d, 0x8c, 0x26, 0x92, 0xe8, 0xc7, 0x78, 0xb5, 0x53, 0x48, 0x23, 0x43, 0x53, + 0x94, 0x46, 0x83, 0xf6, 0xf4, 0x13, 0x64, 0x69, 0x6a, 0x1e, 0xa1, 0x6f, 0xae, 0x3e, 0x13, 0x56, + 0x3c, 0xfa, 0x29, 0x7a, 0x94, 0xec, 0x33, 0xc5, 0x90, 0x7e, 0x17, 0x55, 0x91, 0x4c, 0x02, 0x5c, + 0x66, 0x65, 0x89, 0x92, 0x5a, 0xa2, 0xdf, 0x7b, 0x8d, 0xe5, 0x3e, 0xfd, 0x3e, 0x92, 0xde, 0xb1, + 0x78, 0x89, 0x65, 0xa8, 0xce, 0x86, 0x51, 0x24, 0x1c, 0xb3, 0x52, 0x02, 0x28, 0xae, 0x05, 0xfe, + 0x96, 0x7c, 0x08, 0x2d, 0xa2, 0xcf, 0x4e, 0x92, 0x63, 0xed, 0x12, 0x29, 0x9e, 0x69, 0x86, 0xb4, + 0x4d, 0x87, 0xdb, 0xdc, 0x47, 0x05, 0x06, 0x6f, 0x32, 0xe4, 0xc0, 0x05, 0x52, 0x48, 0x01, 0xeb, + 0xdc, 0x74, 0x54, 0x0e, 0x73, 0x4f, 0x07, 0x48, 0x70, 0x1a, 0xab, 0x23, 0x6c, 0xc8, 0xee, 0x92, + 0x9b, 0x69, 0x08, 0x05, 0x0a, 0xbe, 0x35, 0x06, 0x63, 0x43, 0x69, 0x8d, 0xa8, 0x32, 0x1f, 0x46, + 0x52, 0x80, 0x71, 0x34, 0xc7, 0x32, 0xd7, 0x85, 0x77, 0xf4, 0xec, 0xa8, 0x44, 0x41, 0xb7, 0xd6, + 0x59, 0x41, 0xff, 0xa0, 0xbb, 0x77, 0x54, 0x23, 0x7d, 0xc8, 0xae, 0x92, 0xcb, 0x69, 0xc8, 0xc2, + 0xa3, 0x4a, 0x5a, 0xe8, 0x28, 0x22, 0x4a, 0xc6, 0x51, 0xd5, 0x25, 0x6c, 0xd6, 0x18, 0x46, 0x5c, + 0x55, 0x40, 0x0b, 0x46, 0xc9, 0xe1, 0x29, 0x62, 0xd5, 0x53, 0xdd, 0xe5, 0x6a, 0x24, 0x5d, 0x4c, + 0x5f, 0xc8, 0x95, 0xe9, 0x9e, 0x62, 0x45, 0x0a, 0x9f, 0xd3, 0x12, 0x75, 0x5b, 0x54, 0xca, 0xcb, + 0x52, 0x01, 0x12, 0xeb, 0xa5, 0xce, 0x7c, 0xc0, 0xbc, 0xed, 0x42, 0xa5, 0xdb, 0x2e, 0x8d, 0x3e, + 0x8a, 0xc7, 0xc5, 0x8c, 0x8f, 0x25, 0x1c, 0x4f, 0x82, 0x8a, 0x8c, 0x09, 0x14, 0x9b, 0x19, 0x2c, + 0x6f, 0x42, 0x50, 0xcb, 0x0e, 0x93, 0xb7, 0xda, 0x8e, 0xcd, 0xb1, 0x23, 0xe4, 0xed, 0x69, 0x8f, + 0xe6, 0xd9, 0x02, 0x99, 0xaf, 0x67, 0x4c, 0x13, 0x75, 0x2a, 0x5e, 0x59, 0xce, 0xf5, 0x10, 0x68, + 0x85, 0x7a, 0x6e, 0x7a, 0xc7, 0x7d, 0xf4, 0x3c, 0x62, 0xa7, 0x08, 0x4b, 0xe6, 0x9a, 0x85, 0x28, + 0xd6, 0x15, 0x74, 0x39, 0x8d, 0x57, 0x5a, 0x3e, 0xaa, 0xa0, 0x36, 0xac, 0xe2, 0xa5, 0x74, 0xfc, + 0xe8, 0x96, 0x88, 0x71, 0x5c, 0xcc, 0xaa, 0x90, 0xc2, 0xd1, 0x65, 0x39, 0x14, 0x40, 0x1f, 0x23, + 0xe7, 0xdd, 0x71, 0x6f, 0xca, 0xa0, 0x60, 0x04, 0x2a, 0x08, 0x53, 0x70, 0xa9, 0xe9, 0x0f, 0x91, + 0x48, 0x44, 0x14, 0x5c, 0x61, 0x7a, 0x00, 0x41, 0x9f, 0xc4, 0x2e, 0xd8, 0xaa, 0xa0, 0x4d, 0xc8, + 0x8d, 0xf3, 0xf4, 0x47, 0x69, 0xf5, 0xd8, 0x2a, 0x3c, 0xaa, 0xe4, 0x88, 0x2b, 0x74, 0xe1, 0xc7, + 0xf1, 0x44, 0x56, 0x45, 0x54, 0xdd, 0x77, 0xb6, 0x84, 0x85, 0xb4, 0xae, 0x36, 0xe9, 0x30, 0x3f, + 0xc1, 0x1c, 0xda, 0x0c, 0xed, 0x39, 0xc7, 0x1a, 0x46, 0x22, 0x1a, 0x4b, 0xee, 0xf3, 0xba, 0xc3, + 0xc1, 0xa6, 0x11, 0x04, 0xfd, 0xac, 0xb5, 0x18, 0xeb, 0x67, 0x2c, 0x9f, 0x63, 0xc8, 0xa3, 0xe5, + 0x51, 0x05, 0x76, 0x3c, 0x63, 0x5a, 0x67, 0xef, 0x93, 0x7b, 0xb1, 0x03, 0x8e, 0x7b, 0x87, 0x3e, + 0x0c, 0x8c, 0x85, 0xaf, 0x4b, 0xfc, 0x3f, 0xc5, 0x4a, 0x5b, 0x39, 0xb0, 0x41, 0x18, 0xa8, 0x9b, + 0xfb, 0xd8, 0x14, 0xf0, 0x2c, 0xc3, 0xcb, 0xf3, 0xa6, 0xd5, 0x2d, 0xdd, 0x40, 0x11, 0xcd, 0x14, + 0xe5, 0x7a, 0x6d, 0x8e, 0x0a, 0x6d, 0x7a, 0x54, 0x47, 0x7f, 0x86, 0xc1, 0x9b, 0xb8, 0x6f, 0xb2, + 0xf5, 0x33, 0x64, 0xb3, 0x1d, 0xab, 0xd3, 0x29, 0x6a, 0xfd, 0xe7, 0x78, 0xf1, 0x29, 0xcd, 0xe2, + 0xd6, 0xa5, 0x05, 0x87, 0x34, 0xff, 0x02, 0xef, 0xa3, 0x0e, 0x64, 0xb0, 0xd6, 0x58, 0xfa, 0x1c, + 0x57, 0x88, 0x21, 0x93, 0xba, 0x29, 0x1b, 0x15, 0xf7, 0x02, 0x13, 0x4e, 0x23, 0xa5, 0xce, 0xfc, + 0x4d, 0x24, 0x30, 0x09, 0x30, 0x0a, 0xd4, 0x14, 0xa5, 0x02, 0x0f, 0x74, 0x82, 0x45, 0x3d, 0x59, + 0x9a, 0x23, 0xd1, 0x97, 0xc8, 0x6a, 0x4c, 0x13, 0xc1, 0x82, 0xb7, 0x12, 0x46, 0x5c, 0x35, 0x1b, + 0xff, 0x12, 0x4f, 0xc2, 0x2b, 0x21, 0x4d, 0x33, 0xb0, 0xd5, 0x2d, 0x4d, 0x63, 0x53, 0xf9, 0xaa, + 0x0f, 0x33, 0x39, 0x6d, 0x1b, 0xef, 0x1c, 0x45, 0x51, 0x70, 0x3d, 0x6e, 0x93, 0x50, 0x74, 0xc6, + 0xd1, 0x9d, 0x7d, 0x8d, 0x4d, 0x86, 0xfa, 0x82, 0xf5, 0xc8, 0x2d, 0x24, 0xb8, 0x28, 0xb9, 0x8f, + 0x71, 0xdf, 0x74, 0x1d, 0x81, 0x6b, 0x11, 0x04, 0x8c, 0x64, 0x06, 0xe8, 0xe2, 0x00, 0x2c, 0xe8, + 0x0c, 0xe8, 0xab, 0xfa, 0x85, 0x46, 0x29, 0x6f, 0x79, 0xf6, 0x10, 0xd3, 0xc2, 0xde, 0x77, 0xb3, + 0xa6, 0x23, 0xa0, 0xbf, 0xc2, 0x64, 0x99, 0xaa, 0x70, 0xd3, 0x68, 0x66, 0x75, 0x68, 0xa7, 0xd2, + 0xda, 0x15, 0xd3, 0xaf, 0xeb, 0x0e, 0xd6, 0x35, 0xff, 0x13, 0x24, 0x16, 0xc1, 0xdf, 0xe0, 0xb6, + 0x33, 0x45, 0xf9, 0xf5, 0xdb, 0xfe, 0x96, 0x7d, 0x42, 0x3e, 0xfc, 0x0a, 0x6c, 0x7c, 0xd7, 0x98, + 0x71, 0xa1, 0x3b, 0xfb, 0x77, 0xd8, 0x2b, 0x97, 0x16, 0x0a, 0x59, 0x15, 0xc1, 0x62, 0xee, 0xdc, + 0xe7, 0xa8, 0xbf, 0x47, 0xd0, 0x88, 0x6b, 0xe9, 0x67, 0xdb, 0xe0, 0x19, 0xd0, 0x1f, 0x10, 0x34, + 0xd3, 0x65, 0x65, 0x46, 0x8f, 0xc0, 0xc6, 0xfe, 0x3c, 0x35, 0xbd, 0x7f, 0x64, 0x9f, 0x92, 0x8f, + 0x9a, 0x8c, 0x86, 0xbe, 0xc7, 0x17, 0xb7, 0x5d, 0x58, 0xbc, 0x89, 0xfa, 0xa5, 0x6d, 0xcf, 0xf4, + 0x3f, 0xb1, 0x2b, 0x64, 0xbe, 0xf3, 0x1e, 0x85, 0x61, 0x7d, 0xbf, 0x7d, 0x21, 0xc3, 0x7f, 0x4b, + 0xf4, 0xcf, 0x73, 0xec, 0x01, 0xe9, 0xb5, 0xa5, 0x70, 0x77, 0x48, 0xba, 0x14, 0x47, 0x4d, 0x8e, + 0xa8, 0xd3, 0xe0, 0x5f, 0xe6, 0xd8, 0x75, 0xec, 0x25, 0x3b, 0xc2, 0x88, 0xbc, 0x59, 0x70, 0xde, + 0xca, 0xa6, 0x55, 0xc4, 0x9b, 0xf9, 0xeb, 0x1c, 0x5b, 0x22, 0x77, 0xda, 0xd7, 0x1c, 0x8c, 0xd4, + 0xd8, 0xc0, 0x20, 0xa5, 0xe8, 0x87, 0x85, 0xcc, 0x58, 0xd1, 0x2d, 0x59, 0xf4, 0x6f, 0x73, 0x6c, + 0x61, 0xda, 0xa6, 0xb4, 0xb5, 0xb3, 0xee, 0xa6, 0xfa, 0x95, 0x16, 0x0a, 0xe8, 0xdf, 0xe7, 0xd8, + 0x3d, 0x72, 0x3b, 0x89, 0x35, 0xba, 0x3b, 0x7b, 0x8b, 0xd8, 0x95, 0xe7, 0x72, 0xda, 0x41, 0xd3, + 0x7f, 0xcc, 0xf5, 0xff, 0x3b, 0x47, 0x16, 0x3e, 0x9f, 0xbc, 0xe8, 0x7d, 0xf5, 0x77, 0x8b, 0xfe, + 0xe1, 0xe6, 0x33, 0x44, 0xb9, 0x35, 0xd9, 0x99, 0x94, 0x73, 0x8f, 0x45, 0x83, 0xdf, 0x98, 0x3c, + 0x5f, 0xdb, 0xdc, 0xe8, 0x4d, 0xb6, 0x36, 0x16, 0x37, 0x9e, 0x6e, 0xc6, 0xaf, 0x18, 0xe9, 0x33, + 0xc9, 0xcb, 0x67, 0xdb, 0xaf, 0xfb, 0x6a, 0xf2, 0x71, 0xfd, 0xf3, 0xcf, 0x03, 0x6f, 0x0c, 0x39, + 0xff, 0xd7, 0x81, 0xf9, 0x61, 0xbd, 0x18, 0x5f, 0xdf, 0xee, 0xd5, 0x8f, 0xf8, 0x34, 0x5a, 0xea, + 0xc5, 0x2d, 0xb7, 0xff, 0x9d, 0x00, 0x4f, 0xf8, 0xfa, 0xf6, 0x93, 0x16, 0xf0, 0x64, 0xb4, 0xf4, + 0xa4, 0x06, 0xfc, 0xef, 0xc0, 0x42, 0x3d, 0xba, 0xbc, 0xcc, 0xd7, 0xb7, 0x97, 0x97, 0x5b, 0xc8, + 0xf2, 0xf2, 0x68, 0x69, 0x79, 0xb9, 0x06, 0x7d, 0xf6, 0x66, 0x3c, 0xdd, 0x83, 0x2f, 0x03, 0x00, + 0x00, 0xff, 0xff, 0xff, 0x2c, 0x0c, 0x2e, 0xd2, 0x11, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/errors/ad_group_ad_error.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/errors/ad_group_ad_error.pb.go new file mode 100644 index 0000000..04248a7 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/errors/ad_group_ad_error.pb.go @@ -0,0 +1,152 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/errors/ad_group_ad_error.proto + +package errors // import "google.golang.org/genproto/googleapis/ads/googleads/v1/errors" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Enum describing possible ad group ad errors. +type AdGroupAdErrorEnum_AdGroupAdError int32 + +const ( + // Enum unspecified. + AdGroupAdErrorEnum_UNSPECIFIED AdGroupAdErrorEnum_AdGroupAdError = 0 + // The received error code is not known in this version. + AdGroupAdErrorEnum_UNKNOWN AdGroupAdErrorEnum_AdGroupAdError = 1 + // No link found between the adgroup ad and the label. + AdGroupAdErrorEnum_AD_GROUP_AD_LABEL_DOES_NOT_EXIST AdGroupAdErrorEnum_AdGroupAdError = 2 + // The label has already been attached to the adgroup ad. + AdGroupAdErrorEnum_AD_GROUP_AD_LABEL_ALREADY_EXISTS AdGroupAdErrorEnum_AdGroupAdError = 3 + // The specified ad was not found in the adgroup + AdGroupAdErrorEnum_AD_NOT_UNDER_ADGROUP AdGroupAdErrorEnum_AdGroupAdError = 4 + // Removed ads may not be modified + AdGroupAdErrorEnum_CANNOT_OPERATE_ON_REMOVED_ADGROUPAD AdGroupAdErrorEnum_AdGroupAdError = 5 + // An ad of this type is deprecated and cannot be created. Only deletions + // are permitted. + AdGroupAdErrorEnum_CANNOT_CREATE_DEPRECATED_ADS AdGroupAdErrorEnum_AdGroupAdError = 6 + // Text ads are deprecated and cannot be created. Use expanded text ads + // instead. + AdGroupAdErrorEnum_CANNOT_CREATE_TEXT_ADS AdGroupAdErrorEnum_AdGroupAdError = 7 + // A required field was not specified or is an empty string. + AdGroupAdErrorEnum_EMPTY_FIELD AdGroupAdErrorEnum_AdGroupAdError = 8 + // An ad may only be modified once per call + AdGroupAdErrorEnum_RESOURCE_REFERENCED_IN_MULTIPLE_OPS AdGroupAdErrorEnum_AdGroupAdError = 9 +) + +var AdGroupAdErrorEnum_AdGroupAdError_name = map[int32]string{ + 0: "UNSPECIFIED", + 1: "UNKNOWN", + 2: "AD_GROUP_AD_LABEL_DOES_NOT_EXIST", + 3: "AD_GROUP_AD_LABEL_ALREADY_EXISTS", + 4: "AD_NOT_UNDER_ADGROUP", + 5: "CANNOT_OPERATE_ON_REMOVED_ADGROUPAD", + 6: "CANNOT_CREATE_DEPRECATED_ADS", + 7: "CANNOT_CREATE_TEXT_ADS", + 8: "EMPTY_FIELD", + 9: "RESOURCE_REFERENCED_IN_MULTIPLE_OPS", +} +var AdGroupAdErrorEnum_AdGroupAdError_value = map[string]int32{ + "UNSPECIFIED": 0, + "UNKNOWN": 1, + "AD_GROUP_AD_LABEL_DOES_NOT_EXIST": 2, + "AD_GROUP_AD_LABEL_ALREADY_EXISTS": 3, + "AD_NOT_UNDER_ADGROUP": 4, + "CANNOT_OPERATE_ON_REMOVED_ADGROUPAD": 5, + "CANNOT_CREATE_DEPRECATED_ADS": 6, + "CANNOT_CREATE_TEXT_ADS": 7, + "EMPTY_FIELD": 8, + "RESOURCE_REFERENCED_IN_MULTIPLE_OPS": 9, +} + +func (x AdGroupAdErrorEnum_AdGroupAdError) String() string { + return proto.EnumName(AdGroupAdErrorEnum_AdGroupAdError_name, int32(x)) +} +func (AdGroupAdErrorEnum_AdGroupAdError) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_ad_group_ad_error_bb843f1771a94842, []int{0, 0} +} + +// Container for enum describing possible ad group ad errors. +type AdGroupAdErrorEnum struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *AdGroupAdErrorEnum) Reset() { *m = AdGroupAdErrorEnum{} } +func (m *AdGroupAdErrorEnum) String() string { return proto.CompactTextString(m) } +func (*AdGroupAdErrorEnum) ProtoMessage() {} +func (*AdGroupAdErrorEnum) Descriptor() ([]byte, []int) { + return fileDescriptor_ad_group_ad_error_bb843f1771a94842, []int{0} +} +func (m *AdGroupAdErrorEnum) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_AdGroupAdErrorEnum.Unmarshal(m, b) +} +func (m *AdGroupAdErrorEnum) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_AdGroupAdErrorEnum.Marshal(b, m, deterministic) +} +func (dst *AdGroupAdErrorEnum) XXX_Merge(src proto.Message) { + xxx_messageInfo_AdGroupAdErrorEnum.Merge(dst, src) +} +func (m *AdGroupAdErrorEnum) XXX_Size() int { + return xxx_messageInfo_AdGroupAdErrorEnum.Size(m) +} +func (m *AdGroupAdErrorEnum) XXX_DiscardUnknown() { + xxx_messageInfo_AdGroupAdErrorEnum.DiscardUnknown(m) +} + +var xxx_messageInfo_AdGroupAdErrorEnum proto.InternalMessageInfo + +func init() { + proto.RegisterType((*AdGroupAdErrorEnum)(nil), "google.ads.googleads.v1.errors.AdGroupAdErrorEnum") + proto.RegisterEnum("google.ads.googleads.v1.errors.AdGroupAdErrorEnum_AdGroupAdError", AdGroupAdErrorEnum_AdGroupAdError_name, AdGroupAdErrorEnum_AdGroupAdError_value) +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/errors/ad_group_ad_error.proto", fileDescriptor_ad_group_ad_error_bb843f1771a94842) +} + +var fileDescriptor_ad_group_ad_error_bb843f1771a94842 = []byte{ + // 448 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x7c, 0x92, 0x51, 0x6b, 0xd4, 0x40, + 0x10, 0xc7, 0xbd, 0x54, 0x5b, 0xdd, 0x82, 0x86, 0x28, 0x22, 0xa5, 0x94, 0x72, 0x0a, 0xbe, 0x25, + 0x1c, 0x82, 0x0f, 0xf1, 0x69, 0x2e, 0x3b, 0x77, 0x04, 0x73, 0x9b, 0x65, 0x93, 0x9c, 0xad, 0x1c, + 0x2c, 0xd1, 0x3d, 0xc2, 0x41, 0x9b, 0x3d, 0xb2, 0xd7, 0x7e, 0x20, 0x1f, 0x7d, 0xf7, 0x4b, 0x88, + 0x9f, 0xc4, 0x07, 0x3f, 0x83, 0x6c, 0xb6, 0x77, 0x70, 0xa0, 0x7d, 0xda, 0x3f, 0x33, 0xbf, 0xff, + 0xcc, 0xee, 0xce, 0x90, 0xf7, 0x8d, 0xd6, 0xcd, 0xd5, 0x32, 0xaa, 0x95, 0x89, 0x9c, 0xb4, 0xea, + 0x76, 0x14, 0x2d, 0xbb, 0x4e, 0x77, 0x26, 0xaa, 0x95, 0x6c, 0x3a, 0x7d, 0xb3, 0x96, 0xb5, 0x92, + 0x7d, 0x28, 0x5c, 0x77, 0x7a, 0xa3, 0x83, 0x33, 0x07, 0x87, 0xb5, 0x32, 0xe1, 0xce, 0x17, 0xde, + 0x8e, 0x42, 0xe7, 0x3b, 0x39, 0xdd, 0xd6, 0x5d, 0xaf, 0xa2, 0xba, 0x6d, 0xf5, 0xa6, 0xde, 0xac, + 0x74, 0x6b, 0x9c, 0x7b, 0xf8, 0xcb, 0x23, 0x01, 0xa8, 0xa9, 0x2d, 0x0c, 0x0a, 0xad, 0x03, 0xdb, + 0x9b, 0xeb, 0xe1, 0x0f, 0x8f, 0x3c, 0xdd, 0x0f, 0x07, 0xcf, 0xc8, 0x71, 0xc5, 0x0a, 0x8e, 0x49, + 0x3a, 0x49, 0x91, 0xfa, 0x0f, 0x82, 0x63, 0x72, 0x54, 0xb1, 0x8f, 0x2c, 0xff, 0xc4, 0xfc, 0x41, + 0xf0, 0x86, 0x9c, 0x03, 0x95, 0x53, 0x91, 0x57, 0x5c, 0x02, 0x95, 0x19, 0x8c, 0x31, 0x93, 0x34, + 0xc7, 0x42, 0xb2, 0xbc, 0x94, 0x78, 0x91, 0x16, 0xa5, 0xef, 0xfd, 0x9b, 0x82, 0x4c, 0x20, 0xd0, + 0x4b, 0x07, 0x15, 0xfe, 0x41, 0xf0, 0x8a, 0xbc, 0x00, 0xda, 0xfb, 0x2a, 0x46, 0x51, 0x48, 0xa0, + 0xbd, 0xc3, 0x7f, 0x18, 0xbc, 0x25, 0xaf, 0x13, 0x60, 0x36, 0x93, 0x73, 0x14, 0x50, 0xa2, 0xcc, + 0x99, 0x14, 0x38, 0xcb, 0xe7, 0x48, 0xb7, 0x18, 0x50, 0xff, 0x51, 0x70, 0x4e, 0x4e, 0xef, 0xc0, + 0x44, 0xa0, 0xe5, 0x28, 0x72, 0x81, 0x09, 0x94, 0x3d, 0x57, 0xf8, 0x87, 0xc1, 0x09, 0x79, 0xb9, + 0x4f, 0x94, 0x78, 0x51, 0xf6, 0xb9, 0x23, 0xfb, 0x54, 0x9c, 0xf1, 0xf2, 0x52, 0x4e, 0x52, 0xcc, + 0xa8, 0xff, 0xd8, 0xf6, 0x15, 0x58, 0xe4, 0x95, 0x48, 0x50, 0x0a, 0x9c, 0xa0, 0x40, 0x96, 0x20, + 0x95, 0x29, 0x93, 0xb3, 0x2a, 0x2b, 0x53, 0x9e, 0xa1, 0xcc, 0x79, 0xe1, 0x3f, 0x19, 0xff, 0x19, + 0x90, 0xe1, 0x57, 0x7d, 0x1d, 0xde, 0x3f, 0x93, 0xf1, 0xf3, 0xfd, 0xbf, 0xe5, 0x76, 0x14, 0x7c, + 0xf0, 0x99, 0xde, 0xd9, 0x1a, 0x7d, 0x55, 0xb7, 0x4d, 0xa8, 0xbb, 0x26, 0x6a, 0x96, 0x6d, 0x3f, + 0xa8, 0xed, 0x4a, 0xac, 0x57, 0xe6, 0x7f, 0x1b, 0xf2, 0xc1, 0x1d, 0xdf, 0xbc, 0x83, 0x29, 0xc0, + 0x77, 0xef, 0x6c, 0xea, 0x8a, 0x81, 0x32, 0xa1, 0x93, 0x56, 0xcd, 0x47, 0x61, 0xdf, 0xd2, 0xfc, + 0xdc, 0x02, 0x0b, 0x50, 0x66, 0xb1, 0x03, 0x16, 0xf3, 0xd1, 0xc2, 0x01, 0xbf, 0xbd, 0xa1, 0x8b, + 0xc6, 0x31, 0x28, 0x13, 0xc7, 0x3b, 0x24, 0x8e, 0xe7, 0xa3, 0x38, 0x76, 0xd0, 0x97, 0xc3, 0xfe, + 0x76, 0xef, 0xfe, 0x06, 0x00, 0x00, 0xff, 0xff, 0x5f, 0xb4, 0xd4, 0xe6, 0xbe, 0x02, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/errors/ad_group_bid_modifier_error.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/errors/ad_group_bid_modifier_error.pb.go new file mode 100644 index 0000000..e9f1ad9 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/errors/ad_group_bid_modifier_error.pb.go @@ -0,0 +1,122 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/errors/ad_group_bid_modifier_error.proto + +package errors // import "google.golang.org/genproto/googleapis/ads/googleads/v1/errors" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Enum describing possible ad group bid modifier errors. +type AdGroupBidModifierErrorEnum_AdGroupBidModifierError int32 + +const ( + // Enum unspecified. + AdGroupBidModifierErrorEnum_UNSPECIFIED AdGroupBidModifierErrorEnum_AdGroupBidModifierError = 0 + // The received error code is not known in this version. + AdGroupBidModifierErrorEnum_UNKNOWN AdGroupBidModifierErrorEnum_AdGroupBidModifierError = 1 + // The criterion ID does not support bid modification. + AdGroupBidModifierErrorEnum_CRITERION_ID_NOT_SUPPORTED AdGroupBidModifierErrorEnum_AdGroupBidModifierError = 2 + // Cannot override the bid modifier for the given criterion ID if the parent + // campaign is opted out of the same criterion. + AdGroupBidModifierErrorEnum_CANNOT_OVERRIDE_OPTED_OUT_CAMPAIGN_CRITERION_BID_MODIFIER AdGroupBidModifierErrorEnum_AdGroupBidModifierError = 3 +) + +var AdGroupBidModifierErrorEnum_AdGroupBidModifierError_name = map[int32]string{ + 0: "UNSPECIFIED", + 1: "UNKNOWN", + 2: "CRITERION_ID_NOT_SUPPORTED", + 3: "CANNOT_OVERRIDE_OPTED_OUT_CAMPAIGN_CRITERION_BID_MODIFIER", +} +var AdGroupBidModifierErrorEnum_AdGroupBidModifierError_value = map[string]int32{ + "UNSPECIFIED": 0, + "UNKNOWN": 1, + "CRITERION_ID_NOT_SUPPORTED": 2, + "CANNOT_OVERRIDE_OPTED_OUT_CAMPAIGN_CRITERION_BID_MODIFIER": 3, +} + +func (x AdGroupBidModifierErrorEnum_AdGroupBidModifierError) String() string { + return proto.EnumName(AdGroupBidModifierErrorEnum_AdGroupBidModifierError_name, int32(x)) +} +func (AdGroupBidModifierErrorEnum_AdGroupBidModifierError) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_ad_group_bid_modifier_error_b3d4b89f8531df2e, []int{0, 0} +} + +// Container for enum describing possible ad group bid modifier errors. +type AdGroupBidModifierErrorEnum struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *AdGroupBidModifierErrorEnum) Reset() { *m = AdGroupBidModifierErrorEnum{} } +func (m *AdGroupBidModifierErrorEnum) String() string { return proto.CompactTextString(m) } +func (*AdGroupBidModifierErrorEnum) ProtoMessage() {} +func (*AdGroupBidModifierErrorEnum) Descriptor() ([]byte, []int) { + return fileDescriptor_ad_group_bid_modifier_error_b3d4b89f8531df2e, []int{0} +} +func (m *AdGroupBidModifierErrorEnum) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_AdGroupBidModifierErrorEnum.Unmarshal(m, b) +} +func (m *AdGroupBidModifierErrorEnum) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_AdGroupBidModifierErrorEnum.Marshal(b, m, deterministic) +} +func (dst *AdGroupBidModifierErrorEnum) XXX_Merge(src proto.Message) { + xxx_messageInfo_AdGroupBidModifierErrorEnum.Merge(dst, src) +} +func (m *AdGroupBidModifierErrorEnum) XXX_Size() int { + return xxx_messageInfo_AdGroupBidModifierErrorEnum.Size(m) +} +func (m *AdGroupBidModifierErrorEnum) XXX_DiscardUnknown() { + xxx_messageInfo_AdGroupBidModifierErrorEnum.DiscardUnknown(m) +} + +var xxx_messageInfo_AdGroupBidModifierErrorEnum proto.InternalMessageInfo + +func init() { + proto.RegisterType((*AdGroupBidModifierErrorEnum)(nil), "google.ads.googleads.v1.errors.AdGroupBidModifierErrorEnum") + proto.RegisterEnum("google.ads.googleads.v1.errors.AdGroupBidModifierErrorEnum_AdGroupBidModifierError", AdGroupBidModifierErrorEnum_AdGroupBidModifierError_name, AdGroupBidModifierErrorEnum_AdGroupBidModifierError_value) +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/errors/ad_group_bid_modifier_error.proto", fileDescriptor_ad_group_bid_modifier_error_b3d4b89f8531df2e) +} + +var fileDescriptor_ad_group_bid_modifier_error_b3d4b89f8531df2e = []byte{ + // 368 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x7c, 0x51, 0xcd, 0xaa, 0xd4, 0x30, + 0x14, 0xb6, 0xbd, 0xa0, 0x90, 0xbb, 0xb0, 0x74, 0x23, 0x5c, 0x2f, 0xb3, 0xe8, 0x03, 0xa4, 0x14, + 0x57, 0x46, 0x04, 0xd3, 0x26, 0x96, 0x20, 0x93, 0x94, 0x4e, 0x5b, 0x41, 0x0a, 0xa1, 0x63, 0x6a, + 0x29, 0xcc, 0x34, 0xa5, 0x99, 0x99, 0x47, 0xf1, 0x01, 0x5c, 0xfa, 0x00, 0x3e, 0x84, 0x8f, 0xe2, + 0x0b, 0xb8, 0x95, 0x36, 0x33, 0xe3, 0x6a, 0xee, 0x2a, 0x1f, 0xe7, 0x7c, 0x3f, 0xe7, 0xe4, 0x80, + 0x0f, 0x9d, 0xd6, 0xdd, 0xae, 0x0d, 0x1b, 0x65, 0x42, 0x0b, 0x67, 0x74, 0x8a, 0xc2, 0x76, 0x9a, + 0xf4, 0x64, 0xc2, 0x46, 0xc9, 0x6e, 0xd2, 0xc7, 0x51, 0x6e, 0x7b, 0x25, 0xf7, 0x5a, 0xf5, 0xdf, + 0xfa, 0x76, 0x92, 0x4b, 0x13, 0x8e, 0x93, 0x3e, 0x68, 0x7f, 0x65, 0x65, 0xb0, 0x51, 0x06, 0x5e, + 0x1d, 0xe0, 0x29, 0x82, 0xd6, 0xe1, 0xe1, 0xf1, 0x92, 0x30, 0xf6, 0x61, 0x33, 0x0c, 0xfa, 0xd0, + 0x1c, 0x7a, 0x3d, 0x18, 0xab, 0x0e, 0x7e, 0x39, 0xe0, 0x35, 0x56, 0xe9, 0x1c, 0x11, 0xf7, 0x6a, + 0x7d, 0x0e, 0xa0, 0xb3, 0x94, 0x0e, 0xc7, 0x7d, 0xf0, 0xdd, 0x01, 0xaf, 0x6e, 0xf4, 0xfd, 0x97, + 0xe0, 0xbe, 0xe4, 0x9b, 0x8c, 0x26, 0xec, 0x23, 0xa3, 0xc4, 0x7b, 0xe6, 0xdf, 0x83, 0x17, 0x25, + 0xff, 0xc4, 0xc5, 0x67, 0xee, 0x39, 0xfe, 0x0a, 0x3c, 0x24, 0x39, 0x2b, 0x68, 0xce, 0x04, 0x97, + 0x8c, 0x48, 0x2e, 0x0a, 0xb9, 0x29, 0xb3, 0x4c, 0xe4, 0x05, 0x25, 0x9e, 0xeb, 0xbf, 0x07, 0x6f, + 0x13, 0xcc, 0xe7, 0xaa, 0xa8, 0x68, 0x9e, 0x33, 0x42, 0xa5, 0xc8, 0x0a, 0x4a, 0xa4, 0x28, 0x0b, + 0x99, 0xe0, 0x75, 0x86, 0x59, 0xca, 0xe5, 0x7f, 0x8b, 0x98, 0x11, 0xb9, 0x16, 0x64, 0xce, 0xca, + 0xbd, 0xbb, 0xf8, 0xaf, 0x03, 0x82, 0xaf, 0x7a, 0x0f, 0x9f, 0xde, 0x3e, 0x7e, 0xbc, 0x31, 0x7c, + 0x36, 0x6f, 0x9f, 0x39, 0x5f, 0xc8, 0x59, 0xdf, 0xe9, 0x5d, 0x33, 0x74, 0x50, 0x4f, 0x5d, 0xd8, + 0xb5, 0xc3, 0xf2, 0x37, 0x97, 0x7b, 0x8c, 0xbd, 0xb9, 0x75, 0x9e, 0x77, 0xf6, 0xf9, 0xe1, 0xde, + 0xa5, 0x18, 0xff, 0x74, 0x57, 0xa9, 0x35, 0xc3, 0xca, 0x40, 0x0b, 0x67, 0x54, 0x45, 0x70, 0x89, + 0x34, 0xbf, 0x2f, 0x84, 0x1a, 0x2b, 0x53, 0x5f, 0x09, 0x75, 0x15, 0xd5, 0x96, 0xf0, 0xc7, 0x0d, + 0x6c, 0x15, 0x21, 0xac, 0x0c, 0x42, 0x57, 0x0a, 0x42, 0x55, 0x84, 0x90, 0x25, 0x6d, 0x9f, 0x2f, + 0xd3, 0xbd, 0xf9, 0x17, 0x00, 0x00, 0xff, 0xff, 0x43, 0x72, 0x94, 0x76, 0x3b, 0x02, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/errors/ad_group_criterion_error.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/errors/ad_group_criterion_error.pb.go new file mode 100644 index 0000000..62dacb2 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/errors/ad_group_criterion_error.pb.go @@ -0,0 +1,309 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/errors/ad_group_criterion_error.proto + +package errors // import "google.golang.org/genproto/googleapis/ads/googleads/v1/errors" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Enum describing possible ad group criterion errors. +type AdGroupCriterionErrorEnum_AdGroupCriterionError int32 + +const ( + // Enum unspecified. + AdGroupCriterionErrorEnum_UNSPECIFIED AdGroupCriterionErrorEnum_AdGroupCriterionError = 0 + // The received error code is not known in this version. + AdGroupCriterionErrorEnum_UNKNOWN AdGroupCriterionErrorEnum_AdGroupCriterionError = 1 + // No link found between the AdGroupCriterion and the label. + AdGroupCriterionErrorEnum_AD_GROUP_CRITERION_LABEL_DOES_NOT_EXIST AdGroupCriterionErrorEnum_AdGroupCriterionError = 2 + // The label has already been attached to the AdGroupCriterion. + AdGroupCriterionErrorEnum_AD_GROUP_CRITERION_LABEL_ALREADY_EXISTS AdGroupCriterionErrorEnum_AdGroupCriterionError = 3 + // Negative AdGroupCriterion cannot have labels. + AdGroupCriterionErrorEnum_CANNOT_ADD_LABEL_TO_NEGATIVE_CRITERION AdGroupCriterionErrorEnum_AdGroupCriterionError = 4 + // Too many operations for a single call. + AdGroupCriterionErrorEnum_TOO_MANY_OPERATIONS AdGroupCriterionErrorEnum_AdGroupCriterionError = 5 + // Negative ad group criteria are not updateable. + AdGroupCriterionErrorEnum_CANT_UPDATE_NEGATIVE AdGroupCriterionErrorEnum_AdGroupCriterionError = 6 + // Concrete type of criterion (keyword v.s. placement) is required for ADD + // and SET operations. + AdGroupCriterionErrorEnum_CONCRETE_TYPE_REQUIRED AdGroupCriterionErrorEnum_AdGroupCriterionError = 7 + // Bid is incompatible with ad group's bidding settings. + AdGroupCriterionErrorEnum_BID_INCOMPATIBLE_WITH_ADGROUP AdGroupCriterionErrorEnum_AdGroupCriterionError = 8 + // Cannot target and exclude the same criterion at once. + AdGroupCriterionErrorEnum_CANNOT_TARGET_AND_EXCLUDE AdGroupCriterionErrorEnum_AdGroupCriterionError = 9 + // The URL of a placement is invalid. + AdGroupCriterionErrorEnum_ILLEGAL_URL AdGroupCriterionErrorEnum_AdGroupCriterionError = 10 + // Keyword text was invalid. + AdGroupCriterionErrorEnum_INVALID_KEYWORD_TEXT AdGroupCriterionErrorEnum_AdGroupCriterionError = 11 + // Destination URL was invalid. + AdGroupCriterionErrorEnum_INVALID_DESTINATION_URL AdGroupCriterionErrorEnum_AdGroupCriterionError = 12 + // The destination url must contain at least one tag (e.g. {lpurl}) + AdGroupCriterionErrorEnum_MISSING_DESTINATION_URL_TAG AdGroupCriterionErrorEnum_AdGroupCriterionError = 13 + // Keyword-level cpm bid is not supported + AdGroupCriterionErrorEnum_KEYWORD_LEVEL_BID_NOT_SUPPORTED_FOR_MANUALCPM AdGroupCriterionErrorEnum_AdGroupCriterionError = 14 + // For example, cannot add a biddable ad group criterion that had been + // removed. + AdGroupCriterionErrorEnum_INVALID_USER_STATUS AdGroupCriterionErrorEnum_AdGroupCriterionError = 15 + // Criteria type cannot be targeted for the ad group. Either the account is + // restricted to keywords only, the criteria type is incompatible with the + // campaign's bidding strategy, or the criteria type can only be applied to + // campaigns. + AdGroupCriterionErrorEnum_CANNOT_ADD_CRITERIA_TYPE AdGroupCriterionErrorEnum_AdGroupCriterionError = 16 + // Criteria type cannot be excluded for the ad group. Refer to the + // documentation for a specific criterion to check if it is excludable. + AdGroupCriterionErrorEnum_CANNOT_EXCLUDE_CRITERIA_TYPE AdGroupCriterionErrorEnum_AdGroupCriterionError = 17 + // Partial failure is not supported for shopping campaign mutate operations. + AdGroupCriterionErrorEnum_CAMPAIGN_TYPE_NOT_COMPATIBLE_WITH_PARTIAL_FAILURE AdGroupCriterionErrorEnum_AdGroupCriterionError = 27 + // Operations in the mutate request changes too many shopping ad groups. + // Please split requests for multiple shopping ad groups across multiple + // requests. + AdGroupCriterionErrorEnum_OPERATIONS_FOR_TOO_MANY_SHOPPING_ADGROUPS AdGroupCriterionErrorEnum_AdGroupCriterionError = 28 + // Not allowed to modify url fields of an ad group criterion if there are + // duplicate elements for that ad group criterion in the request. + AdGroupCriterionErrorEnum_CANNOT_MODIFY_URL_FIELDS_WITH_DUPLICATE_ELEMENTS AdGroupCriterionErrorEnum_AdGroupCriterionError = 29 + // Cannot set url fields without also setting final urls. + AdGroupCriterionErrorEnum_CANNOT_SET_WITHOUT_FINAL_URLS AdGroupCriterionErrorEnum_AdGroupCriterionError = 30 + // Cannot clear final urls if final mobile urls exist. + AdGroupCriterionErrorEnum_CANNOT_CLEAR_FINAL_URLS_IF_FINAL_MOBILE_URLS_EXIST AdGroupCriterionErrorEnum_AdGroupCriterionError = 31 + // Cannot clear final urls if final app urls exist. + AdGroupCriterionErrorEnum_CANNOT_CLEAR_FINAL_URLS_IF_FINAL_APP_URLS_EXIST AdGroupCriterionErrorEnum_AdGroupCriterionError = 32 + // Cannot clear final urls if tracking url template exists. + AdGroupCriterionErrorEnum_CANNOT_CLEAR_FINAL_URLS_IF_TRACKING_URL_TEMPLATE_EXISTS AdGroupCriterionErrorEnum_AdGroupCriterionError = 33 + // Cannot clear final urls if url custom parameters exist. + AdGroupCriterionErrorEnum_CANNOT_CLEAR_FINAL_URLS_IF_URL_CUSTOM_PARAMETERS_EXIST AdGroupCriterionErrorEnum_AdGroupCriterionError = 34 + // Cannot set both destination url and final urls. + AdGroupCriterionErrorEnum_CANNOT_SET_BOTH_DESTINATION_URL_AND_FINAL_URLS AdGroupCriterionErrorEnum_AdGroupCriterionError = 35 + // Cannot set both destination url and tracking url template. + AdGroupCriterionErrorEnum_CANNOT_SET_BOTH_DESTINATION_URL_AND_TRACKING_URL_TEMPLATE AdGroupCriterionErrorEnum_AdGroupCriterionError = 36 + // Final urls are not supported for this criterion type. + AdGroupCriterionErrorEnum_FINAL_URLS_NOT_SUPPORTED_FOR_CRITERION_TYPE AdGroupCriterionErrorEnum_AdGroupCriterionError = 37 + // Final mobile urls are not supported for this criterion type. + AdGroupCriterionErrorEnum_FINAL_MOBILE_URLS_NOT_SUPPORTED_FOR_CRITERION_TYPE AdGroupCriterionErrorEnum_AdGroupCriterionError = 38 + // Ad group is invalid due to the listing groups it contains. + AdGroupCriterionErrorEnum_INVALID_LISTING_GROUP_HIERARCHY AdGroupCriterionErrorEnum_AdGroupCriterionError = 39 + // Listing group unit cannot have children. + AdGroupCriterionErrorEnum_LISTING_GROUP_UNIT_CANNOT_HAVE_CHILDREN AdGroupCriterionErrorEnum_AdGroupCriterionError = 40 + // Subdivided listing groups must have an "others" case. + AdGroupCriterionErrorEnum_LISTING_GROUP_SUBDIVISION_REQUIRES_OTHERS_CASE AdGroupCriterionErrorEnum_AdGroupCriterionError = 41 + // Dimension type of listing group must be the same as that of its siblings. + AdGroupCriterionErrorEnum_LISTING_GROUP_REQUIRES_SAME_DIMENSION_TYPE_AS_SIBLINGS AdGroupCriterionErrorEnum_AdGroupCriterionError = 42 + // Listing group cannot be added to the ad group because it already exists. + AdGroupCriterionErrorEnum_LISTING_GROUP_ALREADY_EXISTS AdGroupCriterionErrorEnum_AdGroupCriterionError = 43 + // Listing group referenced in the operation was not found in the ad group. + AdGroupCriterionErrorEnum_LISTING_GROUP_DOES_NOT_EXIST AdGroupCriterionErrorEnum_AdGroupCriterionError = 44 + // Recursive removal failed because listing group subdivision is being + // created or modified in this request. + AdGroupCriterionErrorEnum_LISTING_GROUP_CANNOT_BE_REMOVED AdGroupCriterionErrorEnum_AdGroupCriterionError = 45 + // Listing group type is not allowed for specified ad group criterion type. + AdGroupCriterionErrorEnum_INVALID_LISTING_GROUP_TYPE AdGroupCriterionErrorEnum_AdGroupCriterionError = 46 + // Listing group in an ADD operation specifies a non temporary criterion id. + AdGroupCriterionErrorEnum_LISTING_GROUP_ADD_MAY_ONLY_USE_TEMP_ID AdGroupCriterionErrorEnum_AdGroupCriterionError = 47 +) + +var AdGroupCriterionErrorEnum_AdGroupCriterionError_name = map[int32]string{ + 0: "UNSPECIFIED", + 1: "UNKNOWN", + 2: "AD_GROUP_CRITERION_LABEL_DOES_NOT_EXIST", + 3: "AD_GROUP_CRITERION_LABEL_ALREADY_EXISTS", + 4: "CANNOT_ADD_LABEL_TO_NEGATIVE_CRITERION", + 5: "TOO_MANY_OPERATIONS", + 6: "CANT_UPDATE_NEGATIVE", + 7: "CONCRETE_TYPE_REQUIRED", + 8: "BID_INCOMPATIBLE_WITH_ADGROUP", + 9: "CANNOT_TARGET_AND_EXCLUDE", + 10: "ILLEGAL_URL", + 11: "INVALID_KEYWORD_TEXT", + 12: "INVALID_DESTINATION_URL", + 13: "MISSING_DESTINATION_URL_TAG", + 14: "KEYWORD_LEVEL_BID_NOT_SUPPORTED_FOR_MANUALCPM", + 15: "INVALID_USER_STATUS", + 16: "CANNOT_ADD_CRITERIA_TYPE", + 17: "CANNOT_EXCLUDE_CRITERIA_TYPE", + 27: "CAMPAIGN_TYPE_NOT_COMPATIBLE_WITH_PARTIAL_FAILURE", + 28: "OPERATIONS_FOR_TOO_MANY_SHOPPING_ADGROUPS", + 29: "CANNOT_MODIFY_URL_FIELDS_WITH_DUPLICATE_ELEMENTS", + 30: "CANNOT_SET_WITHOUT_FINAL_URLS", + 31: "CANNOT_CLEAR_FINAL_URLS_IF_FINAL_MOBILE_URLS_EXIST", + 32: "CANNOT_CLEAR_FINAL_URLS_IF_FINAL_APP_URLS_EXIST", + 33: "CANNOT_CLEAR_FINAL_URLS_IF_TRACKING_URL_TEMPLATE_EXISTS", + 34: "CANNOT_CLEAR_FINAL_URLS_IF_URL_CUSTOM_PARAMETERS_EXIST", + 35: "CANNOT_SET_BOTH_DESTINATION_URL_AND_FINAL_URLS", + 36: "CANNOT_SET_BOTH_DESTINATION_URL_AND_TRACKING_URL_TEMPLATE", + 37: "FINAL_URLS_NOT_SUPPORTED_FOR_CRITERION_TYPE", + 38: "FINAL_MOBILE_URLS_NOT_SUPPORTED_FOR_CRITERION_TYPE", + 39: "INVALID_LISTING_GROUP_HIERARCHY", + 40: "LISTING_GROUP_UNIT_CANNOT_HAVE_CHILDREN", + 41: "LISTING_GROUP_SUBDIVISION_REQUIRES_OTHERS_CASE", + 42: "LISTING_GROUP_REQUIRES_SAME_DIMENSION_TYPE_AS_SIBLINGS", + 43: "LISTING_GROUP_ALREADY_EXISTS", + 44: "LISTING_GROUP_DOES_NOT_EXIST", + 45: "LISTING_GROUP_CANNOT_BE_REMOVED", + 46: "INVALID_LISTING_GROUP_TYPE", + 47: "LISTING_GROUP_ADD_MAY_ONLY_USE_TEMP_ID", +} +var AdGroupCriterionErrorEnum_AdGroupCriterionError_value = map[string]int32{ + "UNSPECIFIED": 0, + "UNKNOWN": 1, + "AD_GROUP_CRITERION_LABEL_DOES_NOT_EXIST": 2, + "AD_GROUP_CRITERION_LABEL_ALREADY_EXISTS": 3, + "CANNOT_ADD_LABEL_TO_NEGATIVE_CRITERION": 4, + "TOO_MANY_OPERATIONS": 5, + "CANT_UPDATE_NEGATIVE": 6, + "CONCRETE_TYPE_REQUIRED": 7, + "BID_INCOMPATIBLE_WITH_ADGROUP": 8, + "CANNOT_TARGET_AND_EXCLUDE": 9, + "ILLEGAL_URL": 10, + "INVALID_KEYWORD_TEXT": 11, + "INVALID_DESTINATION_URL": 12, + "MISSING_DESTINATION_URL_TAG": 13, + "KEYWORD_LEVEL_BID_NOT_SUPPORTED_FOR_MANUALCPM": 14, + "INVALID_USER_STATUS": 15, + "CANNOT_ADD_CRITERIA_TYPE": 16, + "CANNOT_EXCLUDE_CRITERIA_TYPE": 17, + "CAMPAIGN_TYPE_NOT_COMPATIBLE_WITH_PARTIAL_FAILURE": 27, + "OPERATIONS_FOR_TOO_MANY_SHOPPING_ADGROUPS": 28, + "CANNOT_MODIFY_URL_FIELDS_WITH_DUPLICATE_ELEMENTS": 29, + "CANNOT_SET_WITHOUT_FINAL_URLS": 30, + "CANNOT_CLEAR_FINAL_URLS_IF_FINAL_MOBILE_URLS_EXIST": 31, + "CANNOT_CLEAR_FINAL_URLS_IF_FINAL_APP_URLS_EXIST": 32, + "CANNOT_CLEAR_FINAL_URLS_IF_TRACKING_URL_TEMPLATE_EXISTS": 33, + "CANNOT_CLEAR_FINAL_URLS_IF_URL_CUSTOM_PARAMETERS_EXIST": 34, + "CANNOT_SET_BOTH_DESTINATION_URL_AND_FINAL_URLS": 35, + "CANNOT_SET_BOTH_DESTINATION_URL_AND_TRACKING_URL_TEMPLATE": 36, + "FINAL_URLS_NOT_SUPPORTED_FOR_CRITERION_TYPE": 37, + "FINAL_MOBILE_URLS_NOT_SUPPORTED_FOR_CRITERION_TYPE": 38, + "INVALID_LISTING_GROUP_HIERARCHY": 39, + "LISTING_GROUP_UNIT_CANNOT_HAVE_CHILDREN": 40, + "LISTING_GROUP_SUBDIVISION_REQUIRES_OTHERS_CASE": 41, + "LISTING_GROUP_REQUIRES_SAME_DIMENSION_TYPE_AS_SIBLINGS": 42, + "LISTING_GROUP_ALREADY_EXISTS": 43, + "LISTING_GROUP_DOES_NOT_EXIST": 44, + "LISTING_GROUP_CANNOT_BE_REMOVED": 45, + "INVALID_LISTING_GROUP_TYPE": 46, + "LISTING_GROUP_ADD_MAY_ONLY_USE_TEMP_ID": 47, +} + +func (x AdGroupCriterionErrorEnum_AdGroupCriterionError) String() string { + return proto.EnumName(AdGroupCriterionErrorEnum_AdGroupCriterionError_name, int32(x)) +} +func (AdGroupCriterionErrorEnum_AdGroupCriterionError) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_ad_group_criterion_error_22d32d77f0f0a104, []int{0, 0} +} + +// Container for enum describing possible ad group criterion errors. +type AdGroupCriterionErrorEnum struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *AdGroupCriterionErrorEnum) Reset() { *m = AdGroupCriterionErrorEnum{} } +func (m *AdGroupCriterionErrorEnum) String() string { return proto.CompactTextString(m) } +func (*AdGroupCriterionErrorEnum) ProtoMessage() {} +func (*AdGroupCriterionErrorEnum) Descriptor() ([]byte, []int) { + return fileDescriptor_ad_group_criterion_error_22d32d77f0f0a104, []int{0} +} +func (m *AdGroupCriterionErrorEnum) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_AdGroupCriterionErrorEnum.Unmarshal(m, b) +} +func (m *AdGroupCriterionErrorEnum) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_AdGroupCriterionErrorEnum.Marshal(b, m, deterministic) +} +func (dst *AdGroupCriterionErrorEnum) XXX_Merge(src proto.Message) { + xxx_messageInfo_AdGroupCriterionErrorEnum.Merge(dst, src) +} +func (m *AdGroupCriterionErrorEnum) XXX_Size() int { + return xxx_messageInfo_AdGroupCriterionErrorEnum.Size(m) +} +func (m *AdGroupCriterionErrorEnum) XXX_DiscardUnknown() { + xxx_messageInfo_AdGroupCriterionErrorEnum.DiscardUnknown(m) +} + +var xxx_messageInfo_AdGroupCriterionErrorEnum proto.InternalMessageInfo + +func init() { + proto.RegisterType((*AdGroupCriterionErrorEnum)(nil), "google.ads.googleads.v1.errors.AdGroupCriterionErrorEnum") + proto.RegisterEnum("google.ads.googleads.v1.errors.AdGroupCriterionErrorEnum_AdGroupCriterionError", AdGroupCriterionErrorEnum_AdGroupCriterionError_name, AdGroupCriterionErrorEnum_AdGroupCriterionError_value) +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/errors/ad_group_criterion_error.proto", fileDescriptor_ad_group_criterion_error_22d32d77f0f0a104) +} + +var fileDescriptor_ad_group_criterion_error_22d32d77f0f0a104 = []byte{ + // 976 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x8c, 0x55, 0xdd, 0x72, 0x1b, 0x35, + 0x14, 0xa6, 0x29, 0xb4, 0xa0, 0x06, 0x2a, 0xc4, 0x4f, 0xdb, 0xfc, 0xb6, 0x29, 0xb4, 0xb4, 0x21, + 0x36, 0x6e, 0xa1, 0x0c, 0xee, 0xf4, 0xe2, 0x78, 0x75, 0xbc, 0xd6, 0x44, 0x2b, 0x2d, 0x92, 0xd6, + 0x89, 0x99, 0xcc, 0x68, 0x4c, 0x9d, 0xf1, 0x64, 0xa6, 0xf5, 0x66, 0xec, 0xb4, 0x0f, 0xc4, 0x25, + 0x6f, 0xc0, 0x2b, 0xf0, 0x02, 0xbc, 0x03, 0xf7, 0xdc, 0x33, 0xda, 0x5d, 0x3b, 0x8e, 0x13, 0x52, + 0xae, 0xa2, 0xf8, 0x7c, 0x9f, 0xf4, 0x9d, 0xef, 0x9c, 0x3d, 0x87, 0xbc, 0x18, 0xe6, 0xf9, 0xf0, + 0xd5, 0x61, 0xbd, 0x3f, 0x98, 0xd4, 0xcb, 0x63, 0x38, 0xbd, 0x6d, 0xd4, 0x0f, 0xc7, 0xe3, 0x7c, + 0x3c, 0xa9, 0xf7, 0x07, 0x7e, 0x38, 0xce, 0xdf, 0x1c, 0xfb, 0x97, 0xe3, 0xa3, 0x93, 0xc3, 0xf1, + 0x51, 0x3e, 0xf2, 0x45, 0xa4, 0x76, 0x3c, 0xce, 0x4f, 0x72, 0xb6, 0x51, 0x72, 0x6a, 0xfd, 0xc1, + 0xa4, 0x36, 0xa3, 0xd7, 0xde, 0x36, 0x6a, 0x25, 0x7d, 0x65, 0x6d, 0x7a, 0xfd, 0xf1, 0x51, 0xbd, + 0x3f, 0x1a, 0xe5, 0x27, 0xfd, 0x93, 0xa3, 0x7c, 0x34, 0x29, 0xd9, 0x5b, 0x7f, 0x2d, 0x93, 0x3b, + 0x30, 0x88, 0xc3, 0xfd, 0xd1, 0xf4, 0x7a, 0x0c, 0x44, 0x1c, 0xbd, 0x79, 0xbd, 0xf5, 0xc7, 0x32, + 0xf9, 0xe2, 0xc2, 0x28, 0xbb, 0x49, 0x6e, 0x64, 0xca, 0xa6, 0x18, 0x89, 0xb6, 0x40, 0x4e, 0xdf, + 0x63, 0x37, 0xc8, 0xf5, 0x4c, 0xed, 0x2a, 0xbd, 0xa7, 0xe8, 0x15, 0xb6, 0x4d, 0x1e, 0x02, 0xf7, + 0xb1, 0xd1, 0x59, 0xea, 0x23, 0x23, 0x1c, 0x1a, 0xa1, 0x95, 0x97, 0xd0, 0x42, 0xe9, 0xb9, 0x46, + 0xeb, 0x95, 0x76, 0x1e, 0xf7, 0x85, 0x75, 0x74, 0xe9, 0x52, 0x30, 0x48, 0x83, 0xc0, 0x7b, 0x25, + 0xd6, 0xd2, 0xab, 0xec, 0x31, 0x79, 0x10, 0x81, 0x0a, 0x74, 0xe0, 0xbc, 0x02, 0x39, 0xed, 0x15, + 0xc6, 0xe0, 0x44, 0x17, 0x4f, 0x2f, 0xa0, 0xef, 0xb3, 0x5b, 0xe4, 0x33, 0xa7, 0xb5, 0x4f, 0x40, + 0xf5, 0xbc, 0x4e, 0xd1, 0x80, 0x13, 0x5a, 0x59, 0xfa, 0x01, 0xbb, 0x4d, 0x3e, 0x8f, 0x40, 0x39, + 0x9f, 0xa5, 0x1c, 0x1c, 0xce, 0xc8, 0xf4, 0x1a, 0x5b, 0x21, 0x5f, 0x46, 0x5a, 0x45, 0x06, 0x1d, + 0x7a, 0xd7, 0x4b, 0xd1, 0x1b, 0xfc, 0x39, 0x13, 0x06, 0x39, 0xbd, 0xce, 0xee, 0x91, 0xf5, 0x96, + 0xe0, 0x5e, 0xa8, 0x48, 0x27, 0x29, 0x38, 0xd1, 0x92, 0xe8, 0xf7, 0x84, 0xeb, 0x78, 0xe0, 0x85, + 0x78, 0xfa, 0x21, 0x5b, 0x27, 0x77, 0x2a, 0x75, 0x0e, 0x4c, 0x8c, 0xce, 0x83, 0xe2, 0x1e, 0xf7, + 0x23, 0x99, 0x71, 0xa4, 0x1f, 0x05, 0xd3, 0x84, 0x94, 0x18, 0x83, 0xf4, 0x99, 0x91, 0x94, 0x04, + 0x21, 0x42, 0x75, 0x41, 0x0a, 0xee, 0x77, 0xb1, 0xb7, 0xa7, 0x0d, 0xf7, 0x0e, 0xf7, 0x1d, 0xbd, + 0xc1, 0x56, 0xc9, 0xad, 0x69, 0x84, 0xa3, 0x75, 0x42, 0x15, 0xe2, 0x0b, 0xda, 0x32, 0xdb, 0x24, + 0xab, 0x89, 0xb0, 0x56, 0xa8, 0x78, 0x31, 0xe8, 0x1d, 0xc4, 0xf4, 0x63, 0xd6, 0x20, 0x3b, 0xd3, + 0xfb, 0x24, 0x76, 0x51, 0xfa, 0x20, 0x3c, 0xc8, 0xb2, 0x59, 0x9a, 0x6a, 0xe3, 0x90, 0xfb, 0xb6, + 0x36, 0xc1, 0x99, 0x0c, 0x64, 0x94, 0x26, 0xf4, 0x93, 0x60, 0xd6, 0xf4, 0xc1, 0xcc, 0xa2, 0xf1, + 0xd6, 0x81, 0xcb, 0x2c, 0xbd, 0xc9, 0xd6, 0xc8, 0xed, 0x39, 0xc7, 0x2b, 0x7f, 0xa1, 0x70, 0x87, + 0x52, 0x76, 0x97, 0xac, 0x55, 0xd1, 0x2a, 0xcd, 0x05, 0xc4, 0xa7, 0xec, 0x07, 0xd2, 0x88, 0x20, + 0x49, 0x41, 0xc4, 0xaa, 0xb4, 0x34, 0x80, 0x17, 0x2d, 0x4c, 0xc1, 0x38, 0x01, 0xd2, 0xb7, 0x41, + 0xc8, 0xcc, 0x20, 0x5d, 0x65, 0x3b, 0xe4, 0xd1, 0x69, 0xcd, 0x0a, 0xb5, 0xb3, 0x5a, 0xda, 0x8e, + 0x4e, 0xd3, 0x90, 0x7c, 0x65, 0xbc, 0xa5, 0x6b, 0xec, 0x7b, 0xf2, 0x5d, 0xa5, 0x23, 0xd1, 0x5c, + 0xb4, 0x7b, 0x85, 0x19, 0x6d, 0x81, 0x92, 0xdb, 0xf2, 0x05, 0x9e, 0xa5, 0x52, 0x44, 0xa1, 0xdc, + 0x28, 0x31, 0x41, 0xe5, 0x2c, 0x5d, 0x0f, 0x25, 0xad, 0x58, 0x16, 0x5d, 0x81, 0xd3, 0x99, 0xf3, + 0x6d, 0xa1, 0xca, 0x0a, 0x59, 0xba, 0xc1, 0x9e, 0x91, 0x27, 0x15, 0x24, 0x92, 0x08, 0x66, 0x2e, + 0xe8, 0x45, 0xbb, 0xfa, 0x2f, 0xd1, 0x2d, 0x21, 0xb1, 0xfc, 0xb1, 0xec, 0xea, 0x4d, 0xf6, 0x94, + 0xd4, 0xdf, 0xc9, 0x83, 0x34, 0x9d, 0x27, 0xdd, 0x65, 0xcf, 0xc9, 0x8f, 0x97, 0x90, 0x9c, 0x81, + 0x68, 0x37, 0xe4, 0x5d, 0x14, 0x1a, 0x93, 0x54, 0x16, 0xe9, 0x94, 0x9f, 0xc6, 0x3d, 0xd6, 0x24, + 0xcf, 0x2e, 0x21, 0x07, 0x4e, 0x94, 0x59, 0xa7, 0x93, 0x60, 0x36, 0x24, 0xe8, 0xd0, 0x4c, 0x1f, + 0xde, 0x62, 0x4f, 0x48, 0x6d, 0xce, 0x88, 0x96, 0x0e, 0x86, 0x2d, 0x74, 0x56, 0x68, 0xe5, 0x39, + 0x67, 0xee, 0xb3, 0x17, 0xe4, 0xa7, 0xff, 0xc3, 0xb9, 0x50, 0x35, 0xfd, 0x8a, 0xd5, 0xc9, 0xf6, + 0x9c, 0xc2, 0xf3, 0xcd, 0x79, 0x3a, 0x0a, 0x8a, 0x46, 0xfa, 0x3a, 0x54, 0xe2, 0xbc, 0xdd, 0xef, + 0xe4, 0x3d, 0x60, 0xf7, 0xc9, 0xe6, 0xb4, 0xb3, 0xa5, 0x08, 0xfa, 0xe2, 0x6a, 0xd8, 0x74, 0x04, + 0x1a, 0x30, 0x51, 0xa7, 0x47, 0x1f, 0x86, 0x21, 0x74, 0x36, 0x98, 0x29, 0xe1, 0x7c, 0x95, 0x5f, + 0x07, 0xc2, 0x64, 0xe9, 0x08, 0xc9, 0x0d, 0x2a, 0xfa, 0x4d, 0x70, 0xeb, 0x2c, 0xd8, 0x66, 0x2d, + 0x2e, 0xba, 0xc2, 0x86, 0x57, 0xab, 0x89, 0x61, 0xbd, 0x76, 0x9d, 0x60, 0x71, 0x04, 0x16, 0xe9, + 0xa3, 0x50, 0x9d, 0xb3, 0x9c, 0x19, 0xce, 0x42, 0x82, 0x9e, 0x8b, 0x04, 0x95, 0x9d, 0xaa, 0xf6, + 0x60, 0xbd, 0x15, 0x2d, 0x29, 0x54, 0x6c, 0xe9, 0xe3, 0xf0, 0x91, 0x9d, 0xe5, 0x2e, 0x8c, 0xc5, + 0xed, 0xf3, 0x88, 0x85, 0x29, 0xfb, 0x6d, 0x70, 0xe1, 0x2c, 0xa2, 0xca, 0xad, 0x15, 0x66, 0x5c, + 0xa2, 0xbb, 0xc8, 0xe9, 0x0e, 0xdb, 0x20, 0x2b, 0x17, 0x5b, 0x55, 0x58, 0x59, 0x0b, 0xd3, 0x77, + 0x41, 0x08, 0xe7, 0x3e, 0x81, 0x9e, 0xd7, 0x4a, 0xf6, 0xc2, 0xdc, 0x28, 0x0a, 0xec, 0x05, 0xa7, + 0xf5, 0xd6, 0x3f, 0x57, 0xc8, 0xd6, 0xcb, 0xfc, 0x75, 0xed, 0xf2, 0xf5, 0xd4, 0x5a, 0xb9, 0x70, + 0xbf, 0xa4, 0x61, 0x39, 0xa5, 0x57, 0x7e, 0xe1, 0x15, 0x7b, 0x98, 0xbf, 0xea, 0x8f, 0x86, 0xb5, + 0x7c, 0x3c, 0xac, 0x0f, 0x0f, 0x47, 0xc5, 0xea, 0x9a, 0xee, 0xca, 0xe3, 0xa3, 0xc9, 0x7f, 0xad, + 0xce, 0xe7, 0xe5, 0x9f, 0xdf, 0x96, 0xae, 0xc6, 0x00, 0xbf, 0x2f, 0x6d, 0xc4, 0xe5, 0x65, 0x30, + 0x98, 0xd4, 0xca, 0x63, 0x38, 0x75, 0x1b, 0xb5, 0xe2, 0xc9, 0xc9, 0x9f, 0x53, 0xc0, 0x01, 0x0c, + 0x26, 0x07, 0x33, 0xc0, 0x41, 0xb7, 0x71, 0x50, 0x02, 0xfe, 0x5e, 0xda, 0x2a, 0x7f, 0x6d, 0x36, + 0x61, 0x30, 0x69, 0x36, 0x67, 0x90, 0x66, 0xb3, 0xdb, 0x68, 0x36, 0x4b, 0xd0, 0xaf, 0xd7, 0x0a, + 0x75, 0x4f, 0xff, 0x0d, 0x00, 0x00, 0xff, 0xff, 0x4f, 0xf6, 0x6e, 0xef, 0xd7, 0x07, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/errors/ad_group_error.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/errors/ad_group_error.pb.go new file mode 100644 index 0000000..b9e2ce2 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/errors/ad_group_error.pb.go @@ -0,0 +1,176 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/errors/ad_group_error.proto + +package errors // import "google.golang.org/genproto/googleapis/ads/googleads/v1/errors" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Enum describing possible ad group errors. +type AdGroupErrorEnum_AdGroupError int32 + +const ( + // Enum unspecified. + AdGroupErrorEnum_UNSPECIFIED AdGroupErrorEnum_AdGroupError = 0 + // The received error code is not known in this version. + AdGroupErrorEnum_UNKNOWN AdGroupErrorEnum_AdGroupError = 1 + // AdGroup with the same name already exists for the campaign. + AdGroupErrorEnum_DUPLICATE_ADGROUP_NAME AdGroupErrorEnum_AdGroupError = 2 + // AdGroup name is not valid. + AdGroupErrorEnum_INVALID_ADGROUP_NAME AdGroupErrorEnum_AdGroupError = 3 + // Advertiser is not allowed to target sites or set site bids that are not + // on the Google Search Network. + AdGroupErrorEnum_ADVERTISER_NOT_ON_CONTENT_NETWORK AdGroupErrorEnum_AdGroupError = 5 + // Bid amount is too big. + AdGroupErrorEnum_BID_TOO_BIG AdGroupErrorEnum_AdGroupError = 6 + // AdGroup bid does not match the campaign's bidding strategy. + AdGroupErrorEnum_BID_TYPE_AND_BIDDING_STRATEGY_MISMATCH AdGroupErrorEnum_AdGroupError = 7 + // AdGroup name is required for Add. + AdGroupErrorEnum_MISSING_ADGROUP_NAME AdGroupErrorEnum_AdGroupError = 8 + // No link found between the ad group and the label. + AdGroupErrorEnum_ADGROUP_LABEL_DOES_NOT_EXIST AdGroupErrorEnum_AdGroupError = 9 + // The label has already been attached to the ad group. + AdGroupErrorEnum_ADGROUP_LABEL_ALREADY_EXISTS AdGroupErrorEnum_AdGroupError = 10 + // The CriterionTypeGroup is not supported for the content bid dimension. + AdGroupErrorEnum_INVALID_CONTENT_BID_CRITERION_TYPE_GROUP AdGroupErrorEnum_AdGroupError = 11 + // The ad group type is not compatible with the campaign channel type. + AdGroupErrorEnum_AD_GROUP_TYPE_NOT_VALID_FOR_ADVERTISING_CHANNEL_TYPE AdGroupErrorEnum_AdGroupError = 12 + // The ad group type is not supported in the country of sale of the + // campaign. + AdGroupErrorEnum_ADGROUP_TYPE_NOT_SUPPORTED_FOR_CAMPAIGN_SALES_COUNTRY AdGroupErrorEnum_AdGroupError = 13 + // Ad groups of AdGroupType.SEARCH_DYNAMIC_ADS can only be added to + // campaigns that have DynamicSearchAdsSetting attached. + AdGroupErrorEnum_CANNOT_ADD_ADGROUP_OF_TYPE_DSA_TO_CAMPAIGN_WITHOUT_DSA_SETTING AdGroupErrorEnum_AdGroupError = 14 +) + +var AdGroupErrorEnum_AdGroupError_name = map[int32]string{ + 0: "UNSPECIFIED", + 1: "UNKNOWN", + 2: "DUPLICATE_ADGROUP_NAME", + 3: "INVALID_ADGROUP_NAME", + 5: "ADVERTISER_NOT_ON_CONTENT_NETWORK", + 6: "BID_TOO_BIG", + 7: "BID_TYPE_AND_BIDDING_STRATEGY_MISMATCH", + 8: "MISSING_ADGROUP_NAME", + 9: "ADGROUP_LABEL_DOES_NOT_EXIST", + 10: "ADGROUP_LABEL_ALREADY_EXISTS", + 11: "INVALID_CONTENT_BID_CRITERION_TYPE_GROUP", + 12: "AD_GROUP_TYPE_NOT_VALID_FOR_ADVERTISING_CHANNEL_TYPE", + 13: "ADGROUP_TYPE_NOT_SUPPORTED_FOR_CAMPAIGN_SALES_COUNTRY", + 14: "CANNOT_ADD_ADGROUP_OF_TYPE_DSA_TO_CAMPAIGN_WITHOUT_DSA_SETTING", +} +var AdGroupErrorEnum_AdGroupError_value = map[string]int32{ + "UNSPECIFIED": 0, + "UNKNOWN": 1, + "DUPLICATE_ADGROUP_NAME": 2, + "INVALID_ADGROUP_NAME": 3, + "ADVERTISER_NOT_ON_CONTENT_NETWORK": 5, + "BID_TOO_BIG": 6, + "BID_TYPE_AND_BIDDING_STRATEGY_MISMATCH": 7, + "MISSING_ADGROUP_NAME": 8, + "ADGROUP_LABEL_DOES_NOT_EXIST": 9, + "ADGROUP_LABEL_ALREADY_EXISTS": 10, + "INVALID_CONTENT_BID_CRITERION_TYPE_GROUP": 11, + "AD_GROUP_TYPE_NOT_VALID_FOR_ADVERTISING_CHANNEL_TYPE": 12, + "ADGROUP_TYPE_NOT_SUPPORTED_FOR_CAMPAIGN_SALES_COUNTRY": 13, + "CANNOT_ADD_ADGROUP_OF_TYPE_DSA_TO_CAMPAIGN_WITHOUT_DSA_SETTING": 14, +} + +func (x AdGroupErrorEnum_AdGroupError) String() string { + return proto.EnumName(AdGroupErrorEnum_AdGroupError_name, int32(x)) +} +func (AdGroupErrorEnum_AdGroupError) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_ad_group_error_de21abf890f19b77, []int{0, 0} +} + +// Container for enum describing possible ad group errors. +type AdGroupErrorEnum struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *AdGroupErrorEnum) Reset() { *m = AdGroupErrorEnum{} } +func (m *AdGroupErrorEnum) String() string { return proto.CompactTextString(m) } +func (*AdGroupErrorEnum) ProtoMessage() {} +func (*AdGroupErrorEnum) Descriptor() ([]byte, []int) { + return fileDescriptor_ad_group_error_de21abf890f19b77, []int{0} +} +func (m *AdGroupErrorEnum) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_AdGroupErrorEnum.Unmarshal(m, b) +} +func (m *AdGroupErrorEnum) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_AdGroupErrorEnum.Marshal(b, m, deterministic) +} +func (dst *AdGroupErrorEnum) XXX_Merge(src proto.Message) { + xxx_messageInfo_AdGroupErrorEnum.Merge(dst, src) +} +func (m *AdGroupErrorEnum) XXX_Size() int { + return xxx_messageInfo_AdGroupErrorEnum.Size(m) +} +func (m *AdGroupErrorEnum) XXX_DiscardUnknown() { + xxx_messageInfo_AdGroupErrorEnum.DiscardUnknown(m) +} + +var xxx_messageInfo_AdGroupErrorEnum proto.InternalMessageInfo + +func init() { + proto.RegisterType((*AdGroupErrorEnum)(nil), "google.ads.googleads.v1.errors.AdGroupErrorEnum") + proto.RegisterEnum("google.ads.googleads.v1.errors.AdGroupErrorEnum_AdGroupError", AdGroupErrorEnum_AdGroupError_name, AdGroupErrorEnum_AdGroupError_value) +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/errors/ad_group_error.proto", fileDescriptor_ad_group_error_de21abf890f19b77) +} + +var fileDescriptor_ad_group_error_de21abf890f19b77 = []byte{ + // 554 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x7c, 0x92, 0xe1, 0x6a, 0xd4, 0x40, + 0x14, 0x85, 0xed, 0xb6, 0xb6, 0x3a, 0xad, 0x3a, 0x0e, 0x22, 0x52, 0x4a, 0xd1, 0x05, 0x45, 0x44, + 0xb2, 0x2c, 0x55, 0xd0, 0x08, 0xc2, 0x4d, 0x66, 0x9a, 0x0e, 0xcd, 0xce, 0x84, 0xcc, 0x64, 0xeb, + 0xca, 0xc2, 0xb0, 0x9a, 0x12, 0x0a, 0x6d, 0xb2, 0x24, 0xdb, 0xbe, 0x84, 0x2f, 0x21, 0xfe, 0xf4, + 0x51, 0x7c, 0x14, 0xf1, 0x21, 0x64, 0x32, 0x66, 0xdb, 0x22, 0xfa, 0x2b, 0x77, 0xee, 0x7c, 0xe7, + 0xde, 0x93, 0xe4, 0xa0, 0xbd, 0xa2, 0xaa, 0x8a, 0xd3, 0xe3, 0xc1, 0x2c, 0x6f, 0x06, 0xae, 0xb4, + 0xd5, 0xc5, 0x70, 0x70, 0x5c, 0xd7, 0x55, 0xdd, 0x0c, 0x66, 0xb9, 0x29, 0xea, 0xea, 0x7c, 0x6e, + 0xda, 0xb3, 0x37, 0xaf, 0xab, 0x45, 0x45, 0x76, 0x1d, 0xe9, 0xcd, 0xf2, 0xc6, 0x5b, 0x8a, 0xbc, + 0x8b, 0xa1, 0xe7, 0x44, 0xdb, 0x3b, 0xdd, 0xd0, 0xf9, 0xc9, 0x60, 0x56, 0x96, 0xd5, 0x62, 0xb6, + 0x38, 0xa9, 0xca, 0xc6, 0xa9, 0xfb, 0x5f, 0xd7, 0x10, 0x86, 0x3c, 0xb2, 0x53, 0x99, 0xe5, 0x59, + 0x79, 0x7e, 0xd6, 0xff, 0xb2, 0x86, 0xb6, 0xae, 0x36, 0xc9, 0x3d, 0xb4, 0x99, 0x09, 0x95, 0xb0, + 0x90, 0xef, 0x73, 0x46, 0xf1, 0x0d, 0xb2, 0x89, 0x36, 0x32, 0x71, 0x28, 0xe4, 0x91, 0xc0, 0x2b, + 0x64, 0x1b, 0x3d, 0xa4, 0x59, 0x12, 0xf3, 0x10, 0x34, 0x33, 0x40, 0xa3, 0x54, 0x66, 0x89, 0x11, + 0x30, 0x62, 0xb8, 0x47, 0x1e, 0xa1, 0x07, 0x5c, 0x8c, 0x21, 0xe6, 0xf4, 0xfa, 0xcd, 0x2a, 0x79, + 0x8a, 0x9e, 0x00, 0x1d, 0xb3, 0x54, 0x73, 0xc5, 0x52, 0x23, 0xa4, 0x36, 0x52, 0x98, 0x50, 0x0a, + 0xcd, 0x84, 0x36, 0x82, 0xe9, 0x23, 0x99, 0x1e, 0xe2, 0x9b, 0x76, 0x75, 0xc0, 0xa9, 0xd1, 0x52, + 0x9a, 0x80, 0x47, 0x78, 0x9d, 0xbc, 0x40, 0xcf, 0xda, 0xc6, 0x24, 0x61, 0x06, 0x04, 0x35, 0x01, + 0xa7, 0x94, 0x8b, 0xc8, 0x28, 0x9d, 0x82, 0x66, 0xd1, 0xc4, 0x8c, 0xb8, 0x1a, 0x81, 0x0e, 0x0f, + 0xf0, 0x86, 0xdd, 0x3e, 0xe2, 0x4a, 0xd9, 0xeb, 0x6b, 0xdb, 0x6f, 0x91, 0xc7, 0x68, 0xa7, 0xeb, + 0xc4, 0x10, 0xb0, 0xd8, 0x50, 0xc9, 0x54, 0xeb, 0x82, 0x7d, 0xe0, 0x4a, 0xe3, 0xdb, 0x7f, 0x13, + 0x10, 0xa7, 0x0c, 0xe8, 0xc4, 0x01, 0x0a, 0x23, 0xf2, 0x12, 0x3d, 0xef, 0xde, 0xad, 0xf3, 0x6d, + 0x9d, 0x85, 0x29, 0xd7, 0x2c, 0xe5, 0x52, 0x38, 0x8f, 0xed, 0x08, 0xbc, 0x49, 0xde, 0xa0, 0x57, + 0x40, 0xdd, 0xc9, 0x5d, 0xd8, 0x65, 0x4e, 0xbd, 0x2f, 0x53, 0xd3, 0x7d, 0x0b, 0xeb, 0x35, 0x3c, + 0x00, 0x21, 0x58, 0xdc, 0x62, 0x78, 0x8b, 0xbc, 0x45, 0xaf, 0x3b, 0x27, 0x4b, 0xa1, 0xca, 0x92, + 0x44, 0xa6, 0x9a, 0x39, 0x71, 0x08, 0xa3, 0x04, 0x78, 0x24, 0x8c, 0x82, 0x98, 0x29, 0x13, 0xca, + 0x4c, 0xe8, 0x74, 0x82, 0xef, 0x90, 0x00, 0xbd, 0x0f, 0x41, 0x58, 0x01, 0xd0, 0xcb, 0x3f, 0x20, + 0xf7, 0xdd, 0x20, 0xaa, 0xc0, 0x68, 0x79, 0xa9, 0x3e, 0xe2, 0xfa, 0x40, 0x66, 0xba, 0xed, 0x2b, + 0xa6, 0x35, 0x17, 0x11, 0xbe, 0x1b, 0xfc, 0x5a, 0x41, 0xfd, 0xcf, 0xd5, 0x99, 0xf7, 0xff, 0x9c, + 0x05, 0xf7, 0xaf, 0x26, 0x26, 0xb1, 0xe1, 0x4a, 0x56, 0x3e, 0xd2, 0x3f, 0xa2, 0xa2, 0x3a, 0x9d, + 0x95, 0x85, 0x57, 0xd5, 0xc5, 0xa0, 0x38, 0x2e, 0xdb, 0xe8, 0x75, 0x09, 0x9f, 0x9f, 0x34, 0xff, + 0x0a, 0xfc, 0x3b, 0xf7, 0xf8, 0xd6, 0x5b, 0x8d, 0x00, 0xbe, 0xf7, 0x76, 0x23, 0x37, 0x0c, 0xf2, + 0xc6, 0x73, 0xa5, 0xad, 0xc6, 0x43, 0xaf, 0x5d, 0xd9, 0xfc, 0xe8, 0x80, 0x29, 0xe4, 0xcd, 0x74, + 0x09, 0x4c, 0xc7, 0xc3, 0xa9, 0x03, 0x7e, 0xf6, 0xfa, 0xae, 0xeb, 0xfb, 0x90, 0x37, 0xbe, 0xbf, + 0x44, 0x7c, 0x7f, 0x3c, 0xf4, 0x7d, 0x07, 0x7d, 0x5a, 0x6f, 0xdd, 0xed, 0xfd, 0x0e, 0x00, 0x00, + 0xff, 0xff, 0x3e, 0x57, 0xba, 0x3d, 0x8d, 0x03, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/errors/ad_group_feed_error.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/errors/ad_group_feed_error.pb.go new file mode 100644 index 0000000..35bc5c0 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/errors/ad_group_feed_error.pb.go @@ -0,0 +1,148 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/errors/ad_group_feed_error.proto + +package errors // import "google.golang.org/genproto/googleapis/ads/googleads/v1/errors" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Enum describing possible ad group feed errors. +type AdGroupFeedErrorEnum_AdGroupFeedError int32 + +const ( + // Enum unspecified. + AdGroupFeedErrorEnum_UNSPECIFIED AdGroupFeedErrorEnum_AdGroupFeedError = 0 + // The received error code is not known in this version. + AdGroupFeedErrorEnum_UNKNOWN AdGroupFeedErrorEnum_AdGroupFeedError = 1 + // An active feed already exists for this ad group and place holder type. + AdGroupFeedErrorEnum_FEED_ALREADY_EXISTS_FOR_PLACEHOLDER_TYPE AdGroupFeedErrorEnum_AdGroupFeedError = 2 + // The specified feed is removed. + AdGroupFeedErrorEnum_CANNOT_CREATE_FOR_REMOVED_FEED AdGroupFeedErrorEnum_AdGroupFeedError = 3 + // The AdGroupFeed already exists. UPDATE operation should be used to modify + // the existing AdGroupFeed. + AdGroupFeedErrorEnum_ADGROUP_FEED_ALREADY_EXISTS AdGroupFeedErrorEnum_AdGroupFeedError = 4 + // Cannot operate on removed AdGroupFeed. + AdGroupFeedErrorEnum_CANNOT_OPERATE_ON_REMOVED_ADGROUP_FEED AdGroupFeedErrorEnum_AdGroupFeedError = 5 + // Invalid placeholder type. + AdGroupFeedErrorEnum_INVALID_PLACEHOLDER_TYPE AdGroupFeedErrorEnum_AdGroupFeedError = 6 + // Feed mapping for this placeholder type does not exist. + AdGroupFeedErrorEnum_MISSING_FEEDMAPPING_FOR_PLACEHOLDER_TYPE AdGroupFeedErrorEnum_AdGroupFeedError = 7 + // Location AdGroupFeeds cannot be created unless there is a location + // CustomerFeed for the specified feed. + AdGroupFeedErrorEnum_NO_EXISTING_LOCATION_CUSTOMER_FEED AdGroupFeedErrorEnum_AdGroupFeedError = 8 +) + +var AdGroupFeedErrorEnum_AdGroupFeedError_name = map[int32]string{ + 0: "UNSPECIFIED", + 1: "UNKNOWN", + 2: "FEED_ALREADY_EXISTS_FOR_PLACEHOLDER_TYPE", + 3: "CANNOT_CREATE_FOR_REMOVED_FEED", + 4: "ADGROUP_FEED_ALREADY_EXISTS", + 5: "CANNOT_OPERATE_ON_REMOVED_ADGROUP_FEED", + 6: "INVALID_PLACEHOLDER_TYPE", + 7: "MISSING_FEEDMAPPING_FOR_PLACEHOLDER_TYPE", + 8: "NO_EXISTING_LOCATION_CUSTOMER_FEED", +} +var AdGroupFeedErrorEnum_AdGroupFeedError_value = map[string]int32{ + "UNSPECIFIED": 0, + "UNKNOWN": 1, + "FEED_ALREADY_EXISTS_FOR_PLACEHOLDER_TYPE": 2, + "CANNOT_CREATE_FOR_REMOVED_FEED": 3, + "ADGROUP_FEED_ALREADY_EXISTS": 4, + "CANNOT_OPERATE_ON_REMOVED_ADGROUP_FEED": 5, + "INVALID_PLACEHOLDER_TYPE": 6, + "MISSING_FEEDMAPPING_FOR_PLACEHOLDER_TYPE": 7, + "NO_EXISTING_LOCATION_CUSTOMER_FEED": 8, +} + +func (x AdGroupFeedErrorEnum_AdGroupFeedError) String() string { + return proto.EnumName(AdGroupFeedErrorEnum_AdGroupFeedError_name, int32(x)) +} +func (AdGroupFeedErrorEnum_AdGroupFeedError) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_ad_group_feed_error_cfd5a46a58afca74, []int{0, 0} +} + +// Container for enum describing possible ad group feed errors. +type AdGroupFeedErrorEnum struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *AdGroupFeedErrorEnum) Reset() { *m = AdGroupFeedErrorEnum{} } +func (m *AdGroupFeedErrorEnum) String() string { return proto.CompactTextString(m) } +func (*AdGroupFeedErrorEnum) ProtoMessage() {} +func (*AdGroupFeedErrorEnum) Descriptor() ([]byte, []int) { + return fileDescriptor_ad_group_feed_error_cfd5a46a58afca74, []int{0} +} +func (m *AdGroupFeedErrorEnum) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_AdGroupFeedErrorEnum.Unmarshal(m, b) +} +func (m *AdGroupFeedErrorEnum) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_AdGroupFeedErrorEnum.Marshal(b, m, deterministic) +} +func (dst *AdGroupFeedErrorEnum) XXX_Merge(src proto.Message) { + xxx_messageInfo_AdGroupFeedErrorEnum.Merge(dst, src) +} +func (m *AdGroupFeedErrorEnum) XXX_Size() int { + return xxx_messageInfo_AdGroupFeedErrorEnum.Size(m) +} +func (m *AdGroupFeedErrorEnum) XXX_DiscardUnknown() { + xxx_messageInfo_AdGroupFeedErrorEnum.DiscardUnknown(m) +} + +var xxx_messageInfo_AdGroupFeedErrorEnum proto.InternalMessageInfo + +func init() { + proto.RegisterType((*AdGroupFeedErrorEnum)(nil), "google.ads.googleads.v1.errors.AdGroupFeedErrorEnum") + proto.RegisterEnum("google.ads.googleads.v1.errors.AdGroupFeedErrorEnum_AdGroupFeedError", AdGroupFeedErrorEnum_AdGroupFeedError_name, AdGroupFeedErrorEnum_AdGroupFeedError_value) +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/errors/ad_group_feed_error.proto", fileDescriptor_ad_group_feed_error_cfd5a46a58afca74) +} + +var fileDescriptor_ad_group_feed_error_cfd5a46a58afca74 = []byte{ + // 445 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x7c, 0x92, 0xc1, 0x6a, 0xd4, 0x40, + 0x1c, 0xc6, 0xdd, 0x54, 0x5b, 0x99, 0x1e, 0x0c, 0x41, 0x41, 0xb4, 0xac, 0x90, 0x43, 0x11, 0x91, + 0x09, 0xc1, 0x8b, 0xc4, 0xd3, 0x34, 0xf9, 0x6f, 0x0c, 0x66, 0x67, 0x86, 0x24, 0x1b, 0xad, 0x2c, + 0x0c, 0xd1, 0x89, 0x61, 0xa1, 0xcd, 0x2c, 0x99, 0x6d, 0x1f, 0xc8, 0xa3, 0xaf, 0xe1, 0xcd, 0x67, + 0xf0, 0x09, 0x3c, 0xf9, 0x08, 0x32, 0x99, 0xee, 0x22, 0xb5, 0xf6, 0x94, 0x8f, 0xc9, 0xef, 0xfb, + 0xe6, 0x63, 0xfe, 0x7f, 0xf4, 0xba, 0x53, 0xaa, 0x3b, 0x6b, 0x83, 0x46, 0xea, 0xc0, 0x4a, 0xa3, + 0x2e, 0xc3, 0xa0, 0x1d, 0x06, 0x35, 0xe8, 0xa0, 0x91, 0xa2, 0x1b, 0xd4, 0xc5, 0x5a, 0x7c, 0x69, + 0x5b, 0x29, 0xc6, 0x43, 0xbc, 0x1e, 0xd4, 0x46, 0x79, 0x53, 0x8b, 0xe3, 0x46, 0x6a, 0xbc, 0x73, + 0xe2, 0xcb, 0x10, 0x5b, 0xe7, 0x93, 0xa3, 0x6d, 0xf2, 0x7a, 0x15, 0x34, 0x7d, 0xaf, 0x36, 0xcd, + 0x66, 0xa5, 0x7a, 0x6d, 0xdd, 0xfe, 0x4f, 0x07, 0x3d, 0x24, 0x32, 0x35, 0xd1, 0xb3, 0xb6, 0x95, + 0x60, 0x3c, 0xd0, 0x5f, 0x9c, 0xfb, 0xdf, 0x1d, 0xe4, 0x5e, 0xff, 0xe1, 0x3d, 0x40, 0x87, 0x0b, + 0x5a, 0x72, 0x88, 0xb3, 0x59, 0x06, 0x89, 0x7b, 0xc7, 0x3b, 0x44, 0x07, 0x0b, 0xfa, 0x8e, 0xb2, + 0xf7, 0xd4, 0x9d, 0x78, 0x2f, 0xd1, 0xf3, 0x19, 0x40, 0x22, 0x48, 0x5e, 0x00, 0x49, 0x4e, 0x05, + 0x7c, 0xc8, 0xca, 0xaa, 0x14, 0x33, 0x56, 0x08, 0x9e, 0x93, 0x18, 0xde, 0xb2, 0x3c, 0x81, 0x42, + 0x54, 0xa7, 0x1c, 0x5c, 0xc7, 0xf3, 0xd1, 0x34, 0x26, 0x94, 0xb2, 0x4a, 0xc4, 0x05, 0x90, 0x0a, + 0x46, 0xae, 0x80, 0x39, 0xab, 0x21, 0x11, 0x26, 0xc7, 0xdd, 0xf3, 0x9e, 0xa1, 0xa7, 0x24, 0x49, + 0x0b, 0xb6, 0xe0, 0xe2, 0x86, 0x64, 0xf7, 0xae, 0xf7, 0x02, 0x1d, 0x5f, 0x85, 0x30, 0x0e, 0x85, + 0x49, 0x61, 0x74, 0x17, 0xf2, 0xb7, 0xd5, 0xbd, 0xe7, 0x1d, 0xa1, 0xc7, 0x19, 0xad, 0x49, 0x9e, + 0x25, 0xff, 0xd6, 0xd9, 0x37, 0xe5, 0xe7, 0x59, 0x59, 0x66, 0x34, 0x1d, 0xf9, 0x39, 0xe1, 0x7c, + 0xd4, 0x37, 0x95, 0x3f, 0xf0, 0x8e, 0x91, 0x4f, 0x99, 0xad, 0x61, 0xa8, 0x9c, 0xc5, 0xa4, 0xca, + 0x18, 0x15, 0xf1, 0xa2, 0xac, 0xd8, 0x1c, 0x0a, 0x7b, 0xe7, 0xfd, 0x93, 0xdf, 0x13, 0xe4, 0x7f, + 0x56, 0xe7, 0xf8, 0xf6, 0x19, 0x9d, 0x3c, 0xba, 0xfe, 0xd2, 0xdc, 0x0c, 0x87, 0x4f, 0x3e, 0x26, + 0x57, 0xc6, 0x4e, 0x9d, 0x35, 0x7d, 0x87, 0xd5, 0xd0, 0x05, 0x5d, 0xdb, 0x8f, 0xa3, 0xdb, 0xae, + 0xc9, 0x7a, 0xa5, 0xff, 0xb7, 0x35, 0x6f, 0xec, 0xe7, 0xab, 0xb3, 0x97, 0x12, 0xf2, 0xcd, 0x99, + 0xa6, 0x36, 0x8c, 0x48, 0x8d, 0xad, 0x34, 0xaa, 0x0e, 0xf1, 0x78, 0xa5, 0xfe, 0xb1, 0x05, 0x96, + 0x44, 0xea, 0xe5, 0x0e, 0x58, 0xd6, 0xe1, 0xd2, 0x02, 0xbf, 0x1c, 0xdf, 0x9e, 0x46, 0x11, 0x91, + 0x3a, 0x8a, 0x76, 0x48, 0x14, 0xd5, 0x61, 0x14, 0x59, 0xe8, 0xd3, 0xfe, 0xd8, 0xee, 0xd5, 0x9f, + 0x00, 0x00, 0x00, 0xff, 0xff, 0xdb, 0xc0, 0xf1, 0xaf, 0xd2, 0x02, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/errors/ad_parameter_error.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/errors/ad_parameter_error.pb.go new file mode 100644 index 0000000..7e1e8de --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/errors/ad_parameter_error.pb.go @@ -0,0 +1,120 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/errors/ad_parameter_error.proto + +package errors // import "google.golang.org/genproto/googleapis/ads/googleads/v1/errors" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Enum describing possible ad parameter errors. +type AdParameterErrorEnum_AdParameterError int32 + +const ( + // Enum unspecified. + AdParameterErrorEnum_UNSPECIFIED AdParameterErrorEnum_AdParameterError = 0 + // The received error code is not known in this version. + AdParameterErrorEnum_UNKNOWN AdParameterErrorEnum_AdParameterError = 1 + // The ad group criterion must be a keyword criterion. + AdParameterErrorEnum_AD_GROUP_CRITERION_MUST_BE_KEYWORD AdParameterErrorEnum_AdParameterError = 2 + // The insertion text is invalid. + AdParameterErrorEnum_INVALID_INSERTION_TEXT_FORMAT AdParameterErrorEnum_AdParameterError = 3 +) + +var AdParameterErrorEnum_AdParameterError_name = map[int32]string{ + 0: "UNSPECIFIED", + 1: "UNKNOWN", + 2: "AD_GROUP_CRITERION_MUST_BE_KEYWORD", + 3: "INVALID_INSERTION_TEXT_FORMAT", +} +var AdParameterErrorEnum_AdParameterError_value = map[string]int32{ + "UNSPECIFIED": 0, + "UNKNOWN": 1, + "AD_GROUP_CRITERION_MUST_BE_KEYWORD": 2, + "INVALID_INSERTION_TEXT_FORMAT": 3, +} + +func (x AdParameterErrorEnum_AdParameterError) String() string { + return proto.EnumName(AdParameterErrorEnum_AdParameterError_name, int32(x)) +} +func (AdParameterErrorEnum_AdParameterError) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_ad_parameter_error_4740fd19374c0b5b, []int{0, 0} +} + +// Container for enum describing possible ad parameter errors. +type AdParameterErrorEnum struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *AdParameterErrorEnum) Reset() { *m = AdParameterErrorEnum{} } +func (m *AdParameterErrorEnum) String() string { return proto.CompactTextString(m) } +func (*AdParameterErrorEnum) ProtoMessage() {} +func (*AdParameterErrorEnum) Descriptor() ([]byte, []int) { + return fileDescriptor_ad_parameter_error_4740fd19374c0b5b, []int{0} +} +func (m *AdParameterErrorEnum) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_AdParameterErrorEnum.Unmarshal(m, b) +} +func (m *AdParameterErrorEnum) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_AdParameterErrorEnum.Marshal(b, m, deterministic) +} +func (dst *AdParameterErrorEnum) XXX_Merge(src proto.Message) { + xxx_messageInfo_AdParameterErrorEnum.Merge(dst, src) +} +func (m *AdParameterErrorEnum) XXX_Size() int { + return xxx_messageInfo_AdParameterErrorEnum.Size(m) +} +func (m *AdParameterErrorEnum) XXX_DiscardUnknown() { + xxx_messageInfo_AdParameterErrorEnum.DiscardUnknown(m) +} + +var xxx_messageInfo_AdParameterErrorEnum proto.InternalMessageInfo + +func init() { + proto.RegisterType((*AdParameterErrorEnum)(nil), "google.ads.googleads.v1.errors.AdParameterErrorEnum") + proto.RegisterEnum("google.ads.googleads.v1.errors.AdParameterErrorEnum_AdParameterError", AdParameterErrorEnum_AdParameterError_name, AdParameterErrorEnum_AdParameterError_value) +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/errors/ad_parameter_error.proto", fileDescriptor_ad_parameter_error_4740fd19374c0b5b) +} + +var fileDescriptor_ad_parameter_error_4740fd19374c0b5b = []byte{ + // 349 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x7c, 0x90, 0xc1, 0x4a, 0xeb, 0x40, + 0x18, 0x85, 0x6f, 0x52, 0xb8, 0x17, 0xa6, 0x8b, 0x1b, 0x82, 0x6e, 0x44, 0x0b, 0x66, 0xe1, 0x72, + 0x42, 0x70, 0x21, 0x8c, 0xab, 0x69, 0x33, 0x2d, 0xa1, 0x36, 0x09, 0x69, 0x92, 0xaa, 0x04, 0x86, + 0xd1, 0x84, 0x50, 0x68, 0x33, 0x61, 0x26, 0x76, 0xe3, 0x63, 0xf8, 0x06, 0x2e, 0x7d, 0x14, 0x1f, + 0xc5, 0x95, 0x8f, 0x20, 0xc9, 0x98, 0x2e, 0x0a, 0xba, 0x9a, 0xc3, 0xcf, 0x77, 0xce, 0xfc, 0xe7, + 0x07, 0x57, 0x25, 0xe7, 0xe5, 0xa6, 0xb0, 0x59, 0x2e, 0x6d, 0x25, 0x5b, 0xb5, 0x73, 0xec, 0x42, + 0x08, 0x2e, 0xa4, 0xcd, 0x72, 0x5a, 0x33, 0xc1, 0xb6, 0x45, 0x53, 0x08, 0xda, 0xcd, 0x60, 0x2d, + 0x78, 0xc3, 0xcd, 0x91, 0xa2, 0x21, 0xcb, 0x25, 0xdc, 0x1b, 0xe1, 0xce, 0x81, 0xca, 0x78, 0x72, + 0xda, 0x07, 0xd7, 0x6b, 0x9b, 0x55, 0x15, 0x6f, 0x58, 0xb3, 0xe6, 0x95, 0x54, 0x6e, 0xeb, 0x45, + 0x03, 0x47, 0x38, 0x0f, 0xfb, 0x64, 0xd2, 0x7a, 0x48, 0xf5, 0xb4, 0xb5, 0x9e, 0x81, 0x71, 0x38, + 0x37, 0xff, 0x83, 0x61, 0xe2, 0x2f, 0x43, 0x32, 0xf1, 0xa6, 0x1e, 0x71, 0x8d, 0x3f, 0xe6, 0x10, + 0xfc, 0x4b, 0xfc, 0xb9, 0x1f, 0xac, 0x7c, 0x43, 0x33, 0x2f, 0x80, 0x85, 0x5d, 0x3a, 0x8b, 0x82, + 0x24, 0xa4, 0x93, 0xc8, 0x8b, 0x49, 0xe4, 0x05, 0x3e, 0x5d, 0x24, 0xcb, 0x98, 0x8e, 0x09, 0x9d, + 0x93, 0xbb, 0x55, 0x10, 0xb9, 0x86, 0x6e, 0x9e, 0x83, 0x33, 0xcf, 0x4f, 0xf1, 0x8d, 0xe7, 0x52, + 0xcf, 0x5f, 0x92, 0x28, 0x6e, 0xb1, 0x98, 0xdc, 0xc6, 0x74, 0x1a, 0x44, 0x0b, 0x1c, 0x1b, 0x83, + 0xf1, 0xa7, 0x06, 0xac, 0x47, 0xbe, 0x85, 0xbf, 0x57, 0x1b, 0x1f, 0x1f, 0x6e, 0x18, 0xb6, 0x9d, + 0x42, 0xed, 0xde, 0xfd, 0x36, 0x96, 0x7c, 0xc3, 0xaa, 0x12, 0x72, 0x51, 0xda, 0x65, 0x51, 0x75, + 0x8d, 0xfb, 0xe3, 0xd6, 0x6b, 0xf9, 0xd3, 0xad, 0xaf, 0xd5, 0xf3, 0xaa, 0x0f, 0x66, 0x18, 0xbf, + 0xe9, 0xa3, 0x99, 0x0a, 0xc3, 0xb9, 0x84, 0x4a, 0xb6, 0x2a, 0x75, 0x60, 0xf7, 0xa5, 0x7c, 0xef, + 0x81, 0x0c, 0xe7, 0x32, 0xdb, 0x03, 0x59, 0xea, 0x64, 0x0a, 0xf8, 0xd0, 0x2d, 0x35, 0x45, 0x08, + 0xe7, 0x12, 0xa1, 0x3d, 0x82, 0x50, 0xea, 0x20, 0xa4, 0xa0, 0x87, 0xbf, 0xdd, 0x76, 0x97, 0x5f, + 0x01, 0x00, 0x00, 0xff, 0xff, 0x8d, 0x79, 0xd7, 0xc6, 0x08, 0x02, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/errors/ad_sharing_error.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/errors/ad_sharing_error.pb.go new file mode 100644 index 0000000..90576b7 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/errors/ad_sharing_error.pb.go @@ -0,0 +1,126 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/errors/ad_sharing_error.proto + +package errors // import "google.golang.org/genproto/googleapis/ads/googleads/v1/errors" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Enum describing possible ad sharing errors. +type AdSharingErrorEnum_AdSharingError int32 + +const ( + // Enum unspecified. + AdSharingErrorEnum_UNSPECIFIED AdSharingErrorEnum_AdSharingError = 0 + // The received error code is not known in this version. + AdSharingErrorEnum_UNKNOWN AdSharingErrorEnum_AdSharingError = 1 + // Error resulting in attempting to add an Ad to an AdGroup that already + // contains the Ad. + AdSharingErrorEnum_AD_GROUP_ALREADY_CONTAINS_AD AdSharingErrorEnum_AdSharingError = 2 + // Ad is not compatible with the AdGroup it is being shared with. + AdSharingErrorEnum_INCOMPATIBLE_AD_UNDER_AD_GROUP AdSharingErrorEnum_AdSharingError = 3 + // Cannot add AdGroupAd on inactive Ad. + AdSharingErrorEnum_CANNOT_SHARE_INACTIVE_AD AdSharingErrorEnum_AdSharingError = 4 +) + +var AdSharingErrorEnum_AdSharingError_name = map[int32]string{ + 0: "UNSPECIFIED", + 1: "UNKNOWN", + 2: "AD_GROUP_ALREADY_CONTAINS_AD", + 3: "INCOMPATIBLE_AD_UNDER_AD_GROUP", + 4: "CANNOT_SHARE_INACTIVE_AD", +} +var AdSharingErrorEnum_AdSharingError_value = map[string]int32{ + "UNSPECIFIED": 0, + "UNKNOWN": 1, + "AD_GROUP_ALREADY_CONTAINS_AD": 2, + "INCOMPATIBLE_AD_UNDER_AD_GROUP": 3, + "CANNOT_SHARE_INACTIVE_AD": 4, +} + +func (x AdSharingErrorEnum_AdSharingError) String() string { + return proto.EnumName(AdSharingErrorEnum_AdSharingError_name, int32(x)) +} +func (AdSharingErrorEnum_AdSharingError) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_ad_sharing_error_ecb9463c83b86137, []int{0, 0} +} + +// Container for enum describing possible ad sharing errors. +type AdSharingErrorEnum struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *AdSharingErrorEnum) Reset() { *m = AdSharingErrorEnum{} } +func (m *AdSharingErrorEnum) String() string { return proto.CompactTextString(m) } +func (*AdSharingErrorEnum) ProtoMessage() {} +func (*AdSharingErrorEnum) Descriptor() ([]byte, []int) { + return fileDescriptor_ad_sharing_error_ecb9463c83b86137, []int{0} +} +func (m *AdSharingErrorEnum) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_AdSharingErrorEnum.Unmarshal(m, b) +} +func (m *AdSharingErrorEnum) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_AdSharingErrorEnum.Marshal(b, m, deterministic) +} +func (dst *AdSharingErrorEnum) XXX_Merge(src proto.Message) { + xxx_messageInfo_AdSharingErrorEnum.Merge(dst, src) +} +func (m *AdSharingErrorEnum) XXX_Size() int { + return xxx_messageInfo_AdSharingErrorEnum.Size(m) +} +func (m *AdSharingErrorEnum) XXX_DiscardUnknown() { + xxx_messageInfo_AdSharingErrorEnum.DiscardUnknown(m) +} + +var xxx_messageInfo_AdSharingErrorEnum proto.InternalMessageInfo + +func init() { + proto.RegisterType((*AdSharingErrorEnum)(nil), "google.ads.googleads.v1.errors.AdSharingErrorEnum") + proto.RegisterEnum("google.ads.googleads.v1.errors.AdSharingErrorEnum_AdSharingError", AdSharingErrorEnum_AdSharingError_name, AdSharingErrorEnum_AdSharingError_value) +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/errors/ad_sharing_error.proto", fileDescriptor_ad_sharing_error_ecb9463c83b86137) +} + +var fileDescriptor_ad_sharing_error_ecb9463c83b86137 = []byte{ + // 360 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x7c, 0x90, 0xdf, 0x4a, 0xe3, 0x40, + 0x14, 0x87, 0x37, 0xe9, 0xb2, 0x0b, 0x53, 0xd8, 0x0d, 0xd9, 0x9b, 0x65, 0x29, 0x65, 0xc9, 0x03, + 0x4c, 0x08, 0xe2, 0xcd, 0x78, 0x75, 0x9a, 0x8c, 0x35, 0x58, 0x27, 0x21, 0xff, 0x44, 0x09, 0x0c, + 0xd1, 0x94, 0x18, 0x68, 0x33, 0x25, 0x53, 0xfb, 0x20, 0x3e, 0x82, 0x77, 0xfa, 0x28, 0x3e, 0x8a, + 0x17, 0x3e, 0x83, 0x24, 0x63, 0x0a, 0xbd, 0xd0, 0xab, 0xf9, 0x71, 0xf8, 0xbe, 0x33, 0xe7, 0x1c, + 0x74, 0x5c, 0x09, 0x51, 0xad, 0x96, 0x76, 0x51, 0x4a, 0x5b, 0xc5, 0x2e, 0xed, 0x1c, 0x7b, 0xd9, + 0xb6, 0xa2, 0x95, 0x76, 0x51, 0x72, 0x79, 0x57, 0xb4, 0x75, 0x53, 0xf1, 0xbe, 0x82, 0x37, 0xad, + 0xd8, 0x0a, 0x73, 0xaa, 0x58, 0x5c, 0x94, 0x12, 0xef, 0x35, 0xbc, 0x73, 0xb0, 0xd2, 0xfe, 0x4d, + 0x86, 0xb6, 0x9b, 0xda, 0x2e, 0x9a, 0x46, 0x6c, 0x8b, 0x6d, 0x2d, 0x1a, 0xa9, 0x6c, 0xeb, 0x49, + 0x43, 0x26, 0x94, 0xb1, 0xea, 0x4b, 0x3b, 0x83, 0x36, 0xf7, 0x6b, 0xeb, 0x41, 0x43, 0xbf, 0x0e, + 0xcb, 0xe6, 0x6f, 0x34, 0x4e, 0x59, 0x1c, 0x52, 0xd7, 0x3f, 0xf5, 0xa9, 0x67, 0x7c, 0x33, 0xc7, + 0xe8, 0x67, 0xca, 0xce, 0x59, 0x70, 0xc9, 0x0c, 0xcd, 0xfc, 0x8f, 0x26, 0xe0, 0xf1, 0x79, 0x14, + 0xa4, 0x21, 0x87, 0x45, 0x44, 0xc1, 0xbb, 0xe2, 0x6e, 0xc0, 0x12, 0xf0, 0x59, 0xcc, 0xc1, 0x33, + 0x74, 0xd3, 0x42, 0x53, 0x9f, 0xb9, 0xc1, 0x45, 0x08, 0x89, 0x3f, 0x5b, 0x50, 0x0e, 0x1e, 0x4f, + 0x99, 0x47, 0x23, 0x3e, 0x78, 0xc6, 0xc8, 0x9c, 0xa0, 0xbf, 0x2e, 0x30, 0x16, 0x24, 0x3c, 0x3e, + 0x83, 0x88, 0x72, 0x9f, 0x81, 0x9b, 0xf8, 0x59, 0x07, 0x1b, 0xdf, 0x67, 0x6f, 0x1a, 0xb2, 0x6e, + 0xc5, 0x1a, 0x7f, 0xbd, 0xf0, 0xec, 0xcf, 0xe1, 0xe0, 0x61, 0xb7, 0x67, 0xa8, 0x5d, 0x7b, 0x1f, + 0x5a, 0x25, 0x56, 0x45, 0x53, 0x61, 0xd1, 0x56, 0x76, 0xb5, 0x6c, 0xfa, 0x2b, 0x0c, 0xe7, 0xde, + 0xd4, 0xf2, 0xb3, 0xeb, 0x9f, 0xa8, 0xe7, 0x51, 0x1f, 0xcd, 0x01, 0x9e, 0xf5, 0xe9, 0x5c, 0x35, + 0x83, 0x52, 0x62, 0x15, 0xbb, 0x94, 0x39, 0xb8, 0xff, 0x52, 0xbe, 0x0c, 0x40, 0x0e, 0xa5, 0xcc, + 0xf7, 0x40, 0x9e, 0x39, 0xb9, 0x02, 0x5e, 0x75, 0x4b, 0x55, 0x09, 0x81, 0x52, 0x12, 0xb2, 0x47, + 0x08, 0xc9, 0x1c, 0x42, 0x14, 0x74, 0xf3, 0xa3, 0x9f, 0xee, 0xe8, 0x3d, 0x00, 0x00, 0xff, 0xff, + 0xe1, 0x7a, 0xe6, 0xc7, 0x1a, 0x02, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/errors/adx_error.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/errors/adx_error.pb.go new file mode 100644 index 0000000..82dc870 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/errors/adx_error.pb.go @@ -0,0 +1,113 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/errors/adx_error.proto + +package errors // import "google.golang.org/genproto/googleapis/ads/googleads/v1/errors" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Enum describing possible adx errors. +type AdxErrorEnum_AdxError int32 + +const ( + // Enum unspecified. + AdxErrorEnum_UNSPECIFIED AdxErrorEnum_AdxError = 0 + // The received error code is not known in this version. + AdxErrorEnum_UNKNOWN AdxErrorEnum_AdxError = 1 + // Attempt to use non-AdX feature by AdX customer. + AdxErrorEnum_UNSUPPORTED_FEATURE AdxErrorEnum_AdxError = 2 +) + +var AdxErrorEnum_AdxError_name = map[int32]string{ + 0: "UNSPECIFIED", + 1: "UNKNOWN", + 2: "UNSUPPORTED_FEATURE", +} +var AdxErrorEnum_AdxError_value = map[string]int32{ + "UNSPECIFIED": 0, + "UNKNOWN": 1, + "UNSUPPORTED_FEATURE": 2, +} + +func (x AdxErrorEnum_AdxError) String() string { + return proto.EnumName(AdxErrorEnum_AdxError_name, int32(x)) +} +func (AdxErrorEnum_AdxError) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_adx_error_3c0c7a571d9648a3, []int{0, 0} +} + +// Container for enum describing possible adx errors. +type AdxErrorEnum struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *AdxErrorEnum) Reset() { *m = AdxErrorEnum{} } +func (m *AdxErrorEnum) String() string { return proto.CompactTextString(m) } +func (*AdxErrorEnum) ProtoMessage() {} +func (*AdxErrorEnum) Descriptor() ([]byte, []int) { + return fileDescriptor_adx_error_3c0c7a571d9648a3, []int{0} +} +func (m *AdxErrorEnum) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_AdxErrorEnum.Unmarshal(m, b) +} +func (m *AdxErrorEnum) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_AdxErrorEnum.Marshal(b, m, deterministic) +} +func (dst *AdxErrorEnum) XXX_Merge(src proto.Message) { + xxx_messageInfo_AdxErrorEnum.Merge(dst, src) +} +func (m *AdxErrorEnum) XXX_Size() int { + return xxx_messageInfo_AdxErrorEnum.Size(m) +} +func (m *AdxErrorEnum) XXX_DiscardUnknown() { + xxx_messageInfo_AdxErrorEnum.DiscardUnknown(m) +} + +var xxx_messageInfo_AdxErrorEnum proto.InternalMessageInfo + +func init() { + proto.RegisterType((*AdxErrorEnum)(nil), "google.ads.googleads.v1.errors.AdxErrorEnum") + proto.RegisterEnum("google.ads.googleads.v1.errors.AdxErrorEnum_AdxError", AdxErrorEnum_AdxError_name, AdxErrorEnum_AdxError_value) +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/errors/adx_error.proto", fileDescriptor_adx_error_3c0c7a571d9648a3) +} + +var fileDescriptor_adx_error_3c0c7a571d9648a3 = []byte{ + // 293 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x7c, 0x90, 0xd1, 0x4a, 0xf3, 0x30, + 0x14, 0xc7, 0xbf, 0xf6, 0x03, 0x95, 0x4c, 0xb1, 0xd4, 0x0b, 0x41, 0x64, 0x17, 0x7d, 0x80, 0x84, + 0xe2, 0x5d, 0xbc, 0xca, 0x6c, 0x36, 0x86, 0xd0, 0xd5, 0x6d, 0xad, 0x20, 0x85, 0x11, 0x4d, 0x09, + 0x85, 0x2d, 0x29, 0x4d, 0x1d, 0x7b, 0x1e, 0x2f, 0x7d, 0x14, 0x9f, 0x44, 0x7c, 0x0a, 0x69, 0x8f, + 0xed, 0x9d, 0x5e, 0xe5, 0x97, 0xc3, 0xef, 0x9c, 0xf3, 0xe7, 0x20, 0xac, 0x8c, 0x51, 0xdb, 0x82, + 0x08, 0x69, 0x09, 0x60, 0x4b, 0xfb, 0x90, 0x14, 0x75, 0x6d, 0x6a, 0x4b, 0x84, 0x3c, 0x6c, 0x3a, + 0xc4, 0x55, 0x6d, 0x1a, 0xe3, 0x8f, 0x41, 0xc2, 0x42, 0x5a, 0x3c, 0xf8, 0x78, 0x1f, 0x62, 0xf0, + 0xaf, 0xae, 0xfb, 0x79, 0x55, 0x49, 0x84, 0xd6, 0xa6, 0x11, 0x4d, 0x69, 0xb4, 0x85, 0xee, 0xe0, + 0x01, 0x9d, 0x32, 0x79, 0xe0, 0xad, 0xca, 0xf5, 0xeb, 0x2e, 0x60, 0xe8, 0xa4, 0xff, 0xfb, 0xe7, + 0x68, 0x94, 0xc6, 0xab, 0x84, 0xdf, 0xcd, 0xa7, 0x73, 0x1e, 0x79, 0xff, 0xfc, 0x11, 0x3a, 0x4e, + 0xe3, 0xfb, 0x78, 0xf1, 0x18, 0x7b, 0x8e, 0x7f, 0x89, 0x2e, 0xd2, 0x78, 0x95, 0x26, 0xc9, 0x62, + 0xb9, 0xe6, 0xd1, 0x66, 0xca, 0xd9, 0x3a, 0x5d, 0x72, 0xcf, 0x9d, 0x7c, 0x3a, 0x28, 0x78, 0x31, + 0x3b, 0xfc, 0x77, 0xae, 0xc9, 0x59, 0xbf, 0x27, 0x69, 0x83, 0x24, 0xce, 0x53, 0xf4, 0xd3, 0xa0, + 0xcc, 0x56, 0x68, 0x85, 0x4d, 0xad, 0x88, 0x2a, 0x74, 0x17, 0xb3, 0x3f, 0x44, 0x55, 0xda, 0xdf, + 0xee, 0x72, 0x0b, 0xcf, 0x9b, 0xfb, 0x7f, 0xc6, 0xd8, 0xbb, 0x3b, 0x9e, 0xc1, 0x30, 0x26, 0x2d, + 0x06, 0x6c, 0x29, 0x0b, 0x71, 0xb7, 0xd2, 0x7e, 0xf4, 0x42, 0xce, 0xa4, 0xcd, 0x07, 0x21, 0xcf, + 0xc2, 0x1c, 0x84, 0x2f, 0x37, 0x80, 0x2a, 0xa5, 0x4c, 0x5a, 0x4a, 0x07, 0x85, 0xd2, 0x2c, 0xa4, + 0x14, 0xa4, 0xe7, 0xa3, 0x2e, 0xdd, 0xcd, 0x77, 0x00, 0x00, 0x00, 0xff, 0xff, 0x9d, 0x58, 0x1f, + 0x59, 0xb4, 0x01, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/errors/asset_error.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/errors/asset_error.pb.go new file mode 100644 index 0000000..075c440 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/errors/asset_error.pb.go @@ -0,0 +1,136 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/errors/asset_error.proto + +package errors // import "google.golang.org/genproto/googleapis/ads/googleads/v1/errors" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Enum describing possible asset errors. +type AssetErrorEnum_AssetError int32 + +const ( + // Enum unspecified. + AssetErrorEnum_UNSPECIFIED AssetErrorEnum_AssetError = 0 + // The received error code is not known in this version. + AssetErrorEnum_UNKNOWN AssetErrorEnum_AssetError = 1 + // The customer is not whitelisted for this asset type. + AssetErrorEnum_CUSTOMER_NOT_WHITELISTED_FOR_ASSET_TYPE AssetErrorEnum_AssetError = 2 + // Assets are duplicated across operations. + AssetErrorEnum_DUPLICATE_ASSET AssetErrorEnum_AssetError = 3 + // The asset name is duplicated, either across operations or with an + // existing asset. + AssetErrorEnum_DUPLICATE_ASSET_NAME AssetErrorEnum_AssetError = 4 + // The Asset.asset_data oneof is empty. + AssetErrorEnum_ASSET_DATA_IS_MISSING AssetErrorEnum_AssetError = 5 + // The asset has a name which is different from an existing duplicate that + // represents the same content. + AssetErrorEnum_CANNOT_MODIFY_ASSET_NAME AssetErrorEnum_AssetError = 6 +) + +var AssetErrorEnum_AssetError_name = map[int32]string{ + 0: "UNSPECIFIED", + 1: "UNKNOWN", + 2: "CUSTOMER_NOT_WHITELISTED_FOR_ASSET_TYPE", + 3: "DUPLICATE_ASSET", + 4: "DUPLICATE_ASSET_NAME", + 5: "ASSET_DATA_IS_MISSING", + 6: "CANNOT_MODIFY_ASSET_NAME", +} +var AssetErrorEnum_AssetError_value = map[string]int32{ + "UNSPECIFIED": 0, + "UNKNOWN": 1, + "CUSTOMER_NOT_WHITELISTED_FOR_ASSET_TYPE": 2, + "DUPLICATE_ASSET": 3, + "DUPLICATE_ASSET_NAME": 4, + "ASSET_DATA_IS_MISSING": 5, + "CANNOT_MODIFY_ASSET_NAME": 6, +} + +func (x AssetErrorEnum_AssetError) String() string { + return proto.EnumName(AssetErrorEnum_AssetError_name, int32(x)) +} +func (AssetErrorEnum_AssetError) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_asset_error_d59e2d33636ee424, []int{0, 0} +} + +// Container for enum describing possible asset errors. +type AssetErrorEnum struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *AssetErrorEnum) Reset() { *m = AssetErrorEnum{} } +func (m *AssetErrorEnum) String() string { return proto.CompactTextString(m) } +func (*AssetErrorEnum) ProtoMessage() {} +func (*AssetErrorEnum) Descriptor() ([]byte, []int) { + return fileDescriptor_asset_error_d59e2d33636ee424, []int{0} +} +func (m *AssetErrorEnum) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_AssetErrorEnum.Unmarshal(m, b) +} +func (m *AssetErrorEnum) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_AssetErrorEnum.Marshal(b, m, deterministic) +} +func (dst *AssetErrorEnum) XXX_Merge(src proto.Message) { + xxx_messageInfo_AssetErrorEnum.Merge(dst, src) +} +func (m *AssetErrorEnum) XXX_Size() int { + return xxx_messageInfo_AssetErrorEnum.Size(m) +} +func (m *AssetErrorEnum) XXX_DiscardUnknown() { + xxx_messageInfo_AssetErrorEnum.DiscardUnknown(m) +} + +var xxx_messageInfo_AssetErrorEnum proto.InternalMessageInfo + +func init() { + proto.RegisterType((*AssetErrorEnum)(nil), "google.ads.googleads.v1.errors.AssetErrorEnum") + proto.RegisterEnum("google.ads.googleads.v1.errors.AssetErrorEnum_AssetError", AssetErrorEnum_AssetError_name, AssetErrorEnum_AssetError_value) +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/errors/asset_error.proto", fileDescriptor_asset_error_d59e2d33636ee424) +} + +var fileDescriptor_asset_error_d59e2d33636ee424 = []byte{ + // 380 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x7c, 0x91, 0xc1, 0x8e, 0xd3, 0x30, + 0x10, 0x86, 0x49, 0x16, 0x16, 0xc9, 0x2b, 0xd1, 0xc8, 0x80, 0xb4, 0xa0, 0xd5, 0x1e, 0x72, 0xe1, + 0x80, 0xe4, 0x10, 0x71, 0x33, 0x27, 0x6f, 0xe2, 0x16, 0x8b, 0x8d, 0x13, 0xd5, 0x4e, 0xab, 0xa2, + 0x48, 0x56, 0x20, 0x51, 0x54, 0xa9, 0x8d, 0xab, 0x38, 0xf4, 0x81, 0x38, 0xf2, 0x12, 0xdc, 0x39, + 0xf2, 0x18, 0x7d, 0x0a, 0x94, 0x98, 0xb6, 0x08, 0x09, 0x4e, 0xfe, 0x67, 0xfc, 0xfd, 0x33, 0xf6, + 0x0c, 0x78, 0xd3, 0x68, 0xdd, 0x6c, 0xea, 0xa0, 0xac, 0x4c, 0x60, 0xe5, 0xa0, 0xf6, 0x61, 0x50, + 0x77, 0x9d, 0xee, 0x4c, 0x50, 0x1a, 0x53, 0xf7, 0x6a, 0x0c, 0xd0, 0xae, 0xd3, 0xbd, 0x86, 0xb7, + 0x16, 0x43, 0x65, 0x65, 0xd0, 0xc9, 0x81, 0xf6, 0x21, 0xb2, 0x8e, 0x97, 0x37, 0xc7, 0x8a, 0xbb, + 0x75, 0x50, 0xb6, 0xad, 0xee, 0xcb, 0x7e, 0xad, 0x5b, 0x63, 0xdd, 0xfe, 0x4f, 0x07, 0x3c, 0x21, + 0x43, 0x4d, 0x3a, 0xd0, 0xb4, 0xfd, 0xb2, 0xf5, 0xbf, 0x3b, 0x00, 0x9c, 0x53, 0x70, 0x02, 0xae, + 0x72, 0x2e, 0x32, 0x1a, 0xb1, 0x29, 0xa3, 0xb1, 0xf7, 0x00, 0x5e, 0x81, 0xc7, 0x39, 0xff, 0xc0, + 0xd3, 0x25, 0xf7, 0x1c, 0xf8, 0x1a, 0xbc, 0x8a, 0x72, 0x21, 0xd3, 0x84, 0xce, 0x15, 0x4f, 0xa5, + 0x5a, 0xbe, 0x67, 0x92, 0xde, 0x33, 0x21, 0x69, 0xac, 0xa6, 0xe9, 0x5c, 0x11, 0x21, 0xa8, 0x54, + 0x72, 0x95, 0x51, 0xcf, 0x85, 0x4f, 0xc1, 0x24, 0xce, 0xb3, 0x7b, 0x16, 0x11, 0x49, 0xed, 0x8d, + 0x77, 0x01, 0xaf, 0xc1, 0xb3, 0xbf, 0x92, 0x8a, 0x93, 0x84, 0x7a, 0x0f, 0xe1, 0x0b, 0xf0, 0xdc, + 0xc6, 0x31, 0x91, 0x44, 0x31, 0xa1, 0x12, 0x26, 0x04, 0xe3, 0x33, 0xef, 0x11, 0xbc, 0x01, 0xd7, + 0x11, 0xe1, 0x43, 0xc3, 0x24, 0x8d, 0xd9, 0x74, 0xf5, 0xa7, 0xf1, 0xf2, 0xee, 0xe0, 0x00, 0xff, + 0xb3, 0xde, 0xa2, 0xff, 0x4f, 0xe6, 0x6e, 0x72, 0xfe, 0x65, 0x36, 0x0c, 0x23, 0x73, 0x3e, 0xc6, + 0xbf, 0x2d, 0x8d, 0xde, 0x94, 0x6d, 0x83, 0x74, 0xd7, 0x04, 0x4d, 0xdd, 0x8e, 0xa3, 0x3a, 0xae, + 0x63, 0xb7, 0x36, 0xff, 0xda, 0xce, 0x3b, 0x7b, 0x7c, 0x75, 0x2f, 0x66, 0x84, 0x7c, 0x73, 0x6f, + 0x67, 0xb6, 0x18, 0xa9, 0x0c, 0xb2, 0x72, 0x50, 0x8b, 0x10, 0x8d, 0x2d, 0xcd, 0x8f, 0x23, 0x50, + 0x90, 0xca, 0x14, 0x27, 0xa0, 0x58, 0x84, 0x85, 0x05, 0x0e, 0xae, 0x6f, 0xb3, 0x18, 0x93, 0xca, + 0x60, 0x7c, 0x42, 0x30, 0x5e, 0x84, 0x18, 0x5b, 0xe8, 0xd3, 0xe5, 0xf8, 0xba, 0xb7, 0xbf, 0x02, + 0x00, 0x00, 0xff, 0xff, 0xcc, 0x3d, 0xa0, 0x27, 0x3a, 0x02, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/errors/authentication_error.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/errors/authentication_error.pb.go new file mode 100644 index 0000000..6926924 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/errors/authentication_error.pb.go @@ -0,0 +1,202 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/errors/authentication_error.proto + +package errors // import "google.golang.org/genproto/googleapis/ads/googleads/v1/errors" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Enum describing possible authentication errors. +type AuthenticationErrorEnum_AuthenticationError int32 + +const ( + // Enum unspecified. + AuthenticationErrorEnum_UNSPECIFIED AuthenticationErrorEnum_AuthenticationError = 0 + // The received error code is not known in this version. + AuthenticationErrorEnum_UNKNOWN AuthenticationErrorEnum_AuthenticationError = 1 + // Authentication of the request failed. + AuthenticationErrorEnum_AUTHENTICATION_ERROR AuthenticationErrorEnum_AuthenticationError = 2 + // Client Customer Id is not a number. + AuthenticationErrorEnum_CLIENT_CUSTOMER_ID_INVALID AuthenticationErrorEnum_AuthenticationError = 5 + // No customer found for the provided customer id. + AuthenticationErrorEnum_CUSTOMER_NOT_FOUND AuthenticationErrorEnum_AuthenticationError = 8 + // Client's Google Account is deleted. + AuthenticationErrorEnum_GOOGLE_ACCOUNT_DELETED AuthenticationErrorEnum_AuthenticationError = 9 + // Google account login token in the cookie is invalid. + AuthenticationErrorEnum_GOOGLE_ACCOUNT_COOKIE_INVALID AuthenticationErrorEnum_AuthenticationError = 10 + // A problem occurred during Google account authentication. + AuthenticationErrorEnum_GOOGLE_ACCOUNT_AUTHENTICATION_FAILED AuthenticationErrorEnum_AuthenticationError = 25 + // The user in the google account login token does not match the UserId in + // the cookie. + AuthenticationErrorEnum_GOOGLE_ACCOUNT_USER_AND_ADS_USER_MISMATCH AuthenticationErrorEnum_AuthenticationError = 12 + // Login cookie is required for authentication. + AuthenticationErrorEnum_LOGIN_COOKIE_REQUIRED AuthenticationErrorEnum_AuthenticationError = 13 + // User in the cookie is not a valid Ads user. + AuthenticationErrorEnum_NOT_ADS_USER AuthenticationErrorEnum_AuthenticationError = 14 + // Oauth token in the header is not valid. + AuthenticationErrorEnum_OAUTH_TOKEN_INVALID AuthenticationErrorEnum_AuthenticationError = 15 + // Oauth token in the header has expired. + AuthenticationErrorEnum_OAUTH_TOKEN_EXPIRED AuthenticationErrorEnum_AuthenticationError = 16 + // Oauth token in the header has been disabled. + AuthenticationErrorEnum_OAUTH_TOKEN_DISABLED AuthenticationErrorEnum_AuthenticationError = 17 + // Oauth token in the header has been revoked. + AuthenticationErrorEnum_OAUTH_TOKEN_REVOKED AuthenticationErrorEnum_AuthenticationError = 18 + // Oauth token HTTP header is malformed. + AuthenticationErrorEnum_OAUTH_TOKEN_HEADER_INVALID AuthenticationErrorEnum_AuthenticationError = 19 + // Login cookie is not valid. + AuthenticationErrorEnum_LOGIN_COOKIE_INVALID AuthenticationErrorEnum_AuthenticationError = 20 + // User Id in the header is not a valid id. + AuthenticationErrorEnum_USER_ID_INVALID AuthenticationErrorEnum_AuthenticationError = 22 + // An account administrator changed this account's authentication settings. + // To access this Google Ads account, enable 2-Step Verification in your + // Google account at https://www.google.com/landing/2step. + AuthenticationErrorEnum_TWO_STEP_VERIFICATION_NOT_ENROLLED AuthenticationErrorEnum_AuthenticationError = 23 + // An account administrator changed this account's authentication settings. + // To access this Google Ads account, enable Advanced Protection in your + // Google account at https://landing.google.com/advancedprotection. + AuthenticationErrorEnum_ADVANCED_PROTECTION_NOT_ENROLLED AuthenticationErrorEnum_AuthenticationError = 24 +) + +var AuthenticationErrorEnum_AuthenticationError_name = map[int32]string{ + 0: "UNSPECIFIED", + 1: "UNKNOWN", + 2: "AUTHENTICATION_ERROR", + 5: "CLIENT_CUSTOMER_ID_INVALID", + 8: "CUSTOMER_NOT_FOUND", + 9: "GOOGLE_ACCOUNT_DELETED", + 10: "GOOGLE_ACCOUNT_COOKIE_INVALID", + 25: "GOOGLE_ACCOUNT_AUTHENTICATION_FAILED", + 12: "GOOGLE_ACCOUNT_USER_AND_ADS_USER_MISMATCH", + 13: "LOGIN_COOKIE_REQUIRED", + 14: "NOT_ADS_USER", + 15: "OAUTH_TOKEN_INVALID", + 16: "OAUTH_TOKEN_EXPIRED", + 17: "OAUTH_TOKEN_DISABLED", + 18: "OAUTH_TOKEN_REVOKED", + 19: "OAUTH_TOKEN_HEADER_INVALID", + 20: "LOGIN_COOKIE_INVALID", + 22: "USER_ID_INVALID", + 23: "TWO_STEP_VERIFICATION_NOT_ENROLLED", + 24: "ADVANCED_PROTECTION_NOT_ENROLLED", +} +var AuthenticationErrorEnum_AuthenticationError_value = map[string]int32{ + "UNSPECIFIED": 0, + "UNKNOWN": 1, + "AUTHENTICATION_ERROR": 2, + "CLIENT_CUSTOMER_ID_INVALID": 5, + "CUSTOMER_NOT_FOUND": 8, + "GOOGLE_ACCOUNT_DELETED": 9, + "GOOGLE_ACCOUNT_COOKIE_INVALID": 10, + "GOOGLE_ACCOUNT_AUTHENTICATION_FAILED": 25, + "GOOGLE_ACCOUNT_USER_AND_ADS_USER_MISMATCH": 12, + "LOGIN_COOKIE_REQUIRED": 13, + "NOT_ADS_USER": 14, + "OAUTH_TOKEN_INVALID": 15, + "OAUTH_TOKEN_EXPIRED": 16, + "OAUTH_TOKEN_DISABLED": 17, + "OAUTH_TOKEN_REVOKED": 18, + "OAUTH_TOKEN_HEADER_INVALID": 19, + "LOGIN_COOKIE_INVALID": 20, + "USER_ID_INVALID": 22, + "TWO_STEP_VERIFICATION_NOT_ENROLLED": 23, + "ADVANCED_PROTECTION_NOT_ENROLLED": 24, +} + +func (x AuthenticationErrorEnum_AuthenticationError) String() string { + return proto.EnumName(AuthenticationErrorEnum_AuthenticationError_name, int32(x)) +} +func (AuthenticationErrorEnum_AuthenticationError) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_authentication_error_2d6a04c4485c2fa4, []int{0, 0} +} + +// Container for enum describing possible authentication errors. +type AuthenticationErrorEnum struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *AuthenticationErrorEnum) Reset() { *m = AuthenticationErrorEnum{} } +func (m *AuthenticationErrorEnum) String() string { return proto.CompactTextString(m) } +func (*AuthenticationErrorEnum) ProtoMessage() {} +func (*AuthenticationErrorEnum) Descriptor() ([]byte, []int) { + return fileDescriptor_authentication_error_2d6a04c4485c2fa4, []int{0} +} +func (m *AuthenticationErrorEnum) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_AuthenticationErrorEnum.Unmarshal(m, b) +} +func (m *AuthenticationErrorEnum) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_AuthenticationErrorEnum.Marshal(b, m, deterministic) +} +func (dst *AuthenticationErrorEnum) XXX_Merge(src proto.Message) { + xxx_messageInfo_AuthenticationErrorEnum.Merge(dst, src) +} +func (m *AuthenticationErrorEnum) XXX_Size() int { + return xxx_messageInfo_AuthenticationErrorEnum.Size(m) +} +func (m *AuthenticationErrorEnum) XXX_DiscardUnknown() { + xxx_messageInfo_AuthenticationErrorEnum.DiscardUnknown(m) +} + +var xxx_messageInfo_AuthenticationErrorEnum proto.InternalMessageInfo + +func init() { + proto.RegisterType((*AuthenticationErrorEnum)(nil), "google.ads.googleads.v1.errors.AuthenticationErrorEnum") + proto.RegisterEnum("google.ads.googleads.v1.errors.AuthenticationErrorEnum_AuthenticationError", AuthenticationErrorEnum_AuthenticationError_name, AuthenticationErrorEnum_AuthenticationError_value) +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/errors/authentication_error.proto", fileDescriptor_authentication_error_2d6a04c4485c2fa4) +} + +var fileDescriptor_authentication_error_2d6a04c4485c2fa4 = []byte{ + // 558 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x7c, 0x53, 0xdd, 0x6e, 0xd3, 0x30, + 0x14, 0x66, 0x65, 0xfc, 0x79, 0x83, 0x19, 0x77, 0xec, 0x4f, 0x30, 0x41, 0x35, 0x21, 0xb8, 0x20, + 0x51, 0xc5, 0x15, 0xe1, 0xca, 0x8b, 0x4f, 0x3b, 0xab, 0x99, 0x1d, 0x12, 0x27, 0x43, 0xa8, 0x92, + 0x15, 0xd6, 0x2a, 0x54, 0xda, 0x92, 0xa9, 0xe9, 0xf6, 0x40, 0x5c, 0xf2, 0x28, 0x88, 0x07, 0x41, + 0xdc, 0xf2, 0x02, 0xc8, 0xc9, 0x12, 0x6d, 0x61, 0x70, 0xd5, 0x53, 0x7f, 0x3f, 0xe7, 0xf3, 0xc9, + 0x31, 0x7a, 0x97, 0xe6, 0x79, 0x7a, 0x32, 0xb5, 0x93, 0x49, 0x61, 0x57, 0xa5, 0xa9, 0x2e, 0xfa, + 0xf6, 0x74, 0x3e, 0xcf, 0xe7, 0x85, 0x9d, 0x9c, 0x2f, 0xbe, 0x4c, 0xb3, 0xc5, 0xec, 0x38, 0x59, + 0xcc, 0xf2, 0x4c, 0x97, 0xa7, 0xd6, 0xd9, 0x3c, 0x5f, 0xe4, 0x64, 0xb7, 0xe2, 0x5b, 0xc9, 0xa4, + 0xb0, 0x1a, 0xa9, 0x75, 0xd1, 0xb7, 0x2a, 0xe9, 0xce, 0xd3, 0xda, 0xfa, 0x6c, 0x66, 0x27, 0x59, + 0x96, 0x2f, 0x4a, 0x8b, 0xa2, 0x52, 0xf7, 0x7e, 0x2e, 0xa3, 0x4d, 0x7a, 0xcd, 0x1c, 0x8c, 0x0c, + 0xb2, 0xf3, 0xd3, 0xde, 0x8f, 0x65, 0xd4, 0xbd, 0x01, 0x23, 0x6b, 0x68, 0x25, 0x12, 0xa1, 0x0f, + 0x2e, 0x1f, 0x70, 0x60, 0xf8, 0x16, 0x59, 0x41, 0xf7, 0x22, 0x31, 0x12, 0xf2, 0x48, 0xe0, 0x25, + 0xb2, 0x85, 0xd6, 0x69, 0xa4, 0x0e, 0x40, 0x28, 0xee, 0x52, 0xc5, 0xa5, 0xd0, 0x10, 0x04, 0x32, + 0xc0, 0x1d, 0xb2, 0x8b, 0x76, 0x5c, 0x8f, 0x83, 0x50, 0xda, 0x8d, 0x42, 0x25, 0x0f, 0x21, 0xd0, + 0x9c, 0x69, 0x2e, 0x62, 0xea, 0x71, 0x86, 0xef, 0x90, 0x0d, 0x44, 0x1a, 0x40, 0x48, 0xa5, 0x07, + 0x32, 0x12, 0x0c, 0xdf, 0x27, 0x3b, 0x68, 0x63, 0x28, 0xe5, 0xd0, 0x03, 0x4d, 0x5d, 0x57, 0x46, + 0x42, 0x69, 0x06, 0x1e, 0x28, 0x60, 0xf8, 0x01, 0x79, 0x81, 0x9e, 0xb5, 0x30, 0x57, 0xca, 0x11, + 0x87, 0xc6, 0x16, 0x91, 0x57, 0x68, 0xaf, 0x45, 0x69, 0xe5, 0x1b, 0x50, 0xee, 0x01, 0xc3, 0xdb, + 0xe4, 0x0d, 0x7a, 0xdd, 0x62, 0x46, 0x21, 0x04, 0x9a, 0x0a, 0xa6, 0x29, 0x0b, 0xab, 0x3f, 0x87, + 0x3c, 0x3c, 0xa4, 0xca, 0x3d, 0xc0, 0xab, 0x64, 0x1b, 0x3d, 0xf1, 0xe4, 0x90, 0x8b, 0xba, 0x65, + 0x00, 0x1f, 0x22, 0x1e, 0x00, 0xc3, 0x0f, 0x09, 0x46, 0xab, 0xe6, 0x06, 0xb5, 0x0a, 0x3f, 0x22, + 0x9b, 0xa8, 0x2b, 0x4d, 0x5f, 0xad, 0xe4, 0x08, 0x44, 0x13, 0x6f, 0xad, 0x0d, 0xc0, 0x47, 0xbf, + 0xf4, 0xc0, 0x66, 0x90, 0x57, 0x01, 0xc6, 0x43, 0xba, 0x6f, 0x72, 0x3e, 0x6e, 0x4b, 0x02, 0x88, + 0xe5, 0x08, 0x18, 0x26, 0x66, 0xc2, 0x57, 0x81, 0x03, 0xa0, 0xcc, 0x0c, 0xf9, 0xb2, 0x57, 0xd7, + 0x58, 0x5e, 0x4b, 0x5c, 0x23, 0xeb, 0xa4, 0x8b, 0xd6, 0xca, 0xeb, 0x5d, 0xf9, 0x20, 0x1b, 0xe4, + 0x25, 0xea, 0xa9, 0x23, 0xa9, 0x43, 0x05, 0xbe, 0x8e, 0x21, 0xe0, 0x83, 0x7a, 0x62, 0xe6, 0x6e, + 0x20, 0x02, 0xe9, 0x99, 0x3c, 0x9b, 0x64, 0x0f, 0x3d, 0xa7, 0x2c, 0xa6, 0xc2, 0x05, 0xa6, 0xfd, + 0x40, 0x2a, 0x70, 0xff, 0x66, 0x6d, 0xed, 0xff, 0x5e, 0x42, 0xbd, 0xe3, 0xfc, 0xd4, 0xfa, 0xff, + 0xbe, 0xee, 0x6f, 0xdd, 0xb0, 0x72, 0xbe, 0xd9, 0x55, 0x7f, 0xe9, 0x13, 0xbb, 0xd4, 0xa6, 0xf9, + 0x49, 0x92, 0xa5, 0x56, 0x3e, 0x4f, 0xed, 0x74, 0x9a, 0x95, 0x9b, 0x5c, 0x3f, 0x9b, 0xb3, 0x59, + 0xf1, 0xaf, 0x57, 0xf4, 0xbe, 0xfa, 0xf9, 0xda, 0xb9, 0x3d, 0xa4, 0xf4, 0x5b, 0x67, 0x77, 0x58, + 0x99, 0xd1, 0x49, 0x61, 0x55, 0xa5, 0xa9, 0xe2, 0xbe, 0x55, 0xb6, 0x2c, 0xbe, 0xd7, 0x84, 0x31, + 0x9d, 0x14, 0xe3, 0x86, 0x30, 0x8e, 0xfb, 0xe3, 0x8a, 0xf0, 0xab, 0xd3, 0xab, 0x4e, 0x1d, 0x87, + 0x4e, 0x0a, 0xc7, 0x69, 0x28, 0x8e, 0x13, 0xf7, 0x1d, 0xa7, 0x22, 0x7d, 0xbe, 0x5b, 0xa6, 0x7b, + 0xfb, 0x27, 0x00, 0x00, 0xff, 0xff, 0xc1, 0xb7, 0xd1, 0x26, 0xe2, 0x03, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/errors/authorization_error.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/errors/authorization_error.pb.go new file mode 100644 index 0000000..71ed081 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/errors/authorization_error.pb.go @@ -0,0 +1,167 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/errors/authorization_error.proto + +package errors // import "google.golang.org/genproto/googleapis/ads/googleads/v1/errors" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Enum describing possible authorization errors. +type AuthorizationErrorEnum_AuthorizationError int32 + +const ( + // Enum unspecified. + AuthorizationErrorEnum_UNSPECIFIED AuthorizationErrorEnum_AuthorizationError = 0 + // The received error code is not known in this version. + AuthorizationErrorEnum_UNKNOWN AuthorizationErrorEnum_AuthorizationError = 1 + // User doesn't have permission to access customer. Note: If you're + // accessing a client customer, the manager's customer id must be set in the + // 'login-customer-id' header. See + // https://developers.google.com/google-ads/api/docs/concepts/ + // call-structure#login-customer-id + AuthorizationErrorEnum_USER_PERMISSION_DENIED AuthorizationErrorEnum_AuthorizationError = 2 + // The developer token is not whitelisted. + AuthorizationErrorEnum_DEVELOPER_TOKEN_NOT_WHITELISTED AuthorizationErrorEnum_AuthorizationError = 3 + // The developer token is not allowed with the project sent in the request. + AuthorizationErrorEnum_DEVELOPER_TOKEN_PROHIBITED AuthorizationErrorEnum_AuthorizationError = 4 + // The Google Cloud project sent in the request does not have permission to + // access the api. + AuthorizationErrorEnum_PROJECT_DISABLED AuthorizationErrorEnum_AuthorizationError = 5 + // Authorization of the client failed. + AuthorizationErrorEnum_AUTHORIZATION_ERROR AuthorizationErrorEnum_AuthorizationError = 6 + // The user does not have permission to perform this action + // (e.g., ADD, UPDATE, REMOVE) on the resource or call a method. + AuthorizationErrorEnum_ACTION_NOT_PERMITTED AuthorizationErrorEnum_AuthorizationError = 7 + // Signup not complete. + AuthorizationErrorEnum_INCOMPLETE_SIGNUP AuthorizationErrorEnum_AuthorizationError = 8 + // The customer can't be used because it isn't enabled. + AuthorizationErrorEnum_CUSTOMER_NOT_ENABLED AuthorizationErrorEnum_AuthorizationError = 24 + // The developer must sign the terms of service. They can be found here: + // ads.google.com/aw/apicenter + AuthorizationErrorEnum_MISSING_TOS AuthorizationErrorEnum_AuthorizationError = 9 + // The developer token is not approved. Non-approved developer tokens can + // only be used with test accounts. + AuthorizationErrorEnum_DEVELOPER_TOKEN_NOT_APPROVED AuthorizationErrorEnum_AuthorizationError = 10 +) + +var AuthorizationErrorEnum_AuthorizationError_name = map[int32]string{ + 0: "UNSPECIFIED", + 1: "UNKNOWN", + 2: "USER_PERMISSION_DENIED", + 3: "DEVELOPER_TOKEN_NOT_WHITELISTED", + 4: "DEVELOPER_TOKEN_PROHIBITED", + 5: "PROJECT_DISABLED", + 6: "AUTHORIZATION_ERROR", + 7: "ACTION_NOT_PERMITTED", + 8: "INCOMPLETE_SIGNUP", + 24: "CUSTOMER_NOT_ENABLED", + 9: "MISSING_TOS", + 10: "DEVELOPER_TOKEN_NOT_APPROVED", +} +var AuthorizationErrorEnum_AuthorizationError_value = map[string]int32{ + "UNSPECIFIED": 0, + "UNKNOWN": 1, + "USER_PERMISSION_DENIED": 2, + "DEVELOPER_TOKEN_NOT_WHITELISTED": 3, + "DEVELOPER_TOKEN_PROHIBITED": 4, + "PROJECT_DISABLED": 5, + "AUTHORIZATION_ERROR": 6, + "ACTION_NOT_PERMITTED": 7, + "INCOMPLETE_SIGNUP": 8, + "CUSTOMER_NOT_ENABLED": 24, + "MISSING_TOS": 9, + "DEVELOPER_TOKEN_NOT_APPROVED": 10, +} + +func (x AuthorizationErrorEnum_AuthorizationError) String() string { + return proto.EnumName(AuthorizationErrorEnum_AuthorizationError_name, int32(x)) +} +func (AuthorizationErrorEnum_AuthorizationError) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_authorization_error_456d20243b5f6327, []int{0, 0} +} + +// Container for enum describing possible authorization errors. +type AuthorizationErrorEnum struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *AuthorizationErrorEnum) Reset() { *m = AuthorizationErrorEnum{} } +func (m *AuthorizationErrorEnum) String() string { return proto.CompactTextString(m) } +func (*AuthorizationErrorEnum) ProtoMessage() {} +func (*AuthorizationErrorEnum) Descriptor() ([]byte, []int) { + return fileDescriptor_authorization_error_456d20243b5f6327, []int{0} +} +func (m *AuthorizationErrorEnum) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_AuthorizationErrorEnum.Unmarshal(m, b) +} +func (m *AuthorizationErrorEnum) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_AuthorizationErrorEnum.Marshal(b, m, deterministic) +} +func (dst *AuthorizationErrorEnum) XXX_Merge(src proto.Message) { + xxx_messageInfo_AuthorizationErrorEnum.Merge(dst, src) +} +func (m *AuthorizationErrorEnum) XXX_Size() int { + return xxx_messageInfo_AuthorizationErrorEnum.Size(m) +} +func (m *AuthorizationErrorEnum) XXX_DiscardUnknown() { + xxx_messageInfo_AuthorizationErrorEnum.DiscardUnknown(m) +} + +var xxx_messageInfo_AuthorizationErrorEnum proto.InternalMessageInfo + +func init() { + proto.RegisterType((*AuthorizationErrorEnum)(nil), "google.ads.googleads.v1.errors.AuthorizationErrorEnum") + proto.RegisterEnum("google.ads.googleads.v1.errors.AuthorizationErrorEnum_AuthorizationError", AuthorizationErrorEnum_AuthorizationError_name, AuthorizationErrorEnum_AuthorizationError_value) +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/errors/authorization_error.proto", fileDescriptor_authorization_error_456d20243b5f6327) +} + +var fileDescriptor_authorization_error_456d20243b5f6327 = []byte{ + // 462 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x7c, 0x92, 0xc1, 0x6e, 0xd3, 0x30, + 0x18, 0xc7, 0x69, 0x06, 0x1b, 0x78, 0x07, 0x82, 0x19, 0xdb, 0x54, 0x4d, 0x05, 0x95, 0x7b, 0xa2, + 0x88, 0x0b, 0x0a, 0x27, 0x37, 0xf9, 0x68, 0xcd, 0x5a, 0xdb, 0x72, 0x9c, 0x4c, 0x9a, 0x2a, 0x45, + 0x81, 0x54, 0xa1, 0xd2, 0x16, 0x57, 0x49, 0xb7, 0x03, 0x8f, 0xc3, 0x91, 0xa7, 0xe0, 0xcc, 0x2b, + 0xf0, 0x06, 0x1c, 0x79, 0x02, 0xe4, 0x98, 0x56, 0x88, 0xc2, 0x4e, 0xf9, 0xf4, 0xf9, 0xf7, 0xff, + 0xff, 0xfd, 0xe5, 0x33, 0x7a, 0x5d, 0x69, 0x5d, 0x5d, 0x2d, 0xfc, 0xa2, 0x6c, 0x7d, 0x5b, 0x9a, + 0xea, 0x36, 0xf0, 0x17, 0x4d, 0xa3, 0x9b, 0xd6, 0x2f, 0x6e, 0xd6, 0x1f, 0x75, 0xb3, 0xfc, 0x54, + 0xac, 0x97, 0xba, 0xce, 0xbb, 0xa6, 0xb7, 0x6a, 0xf4, 0x5a, 0xe3, 0x81, 0xc5, 0xbd, 0xa2, 0x6c, + 0xbd, 0xad, 0xd2, 0xbb, 0x0d, 0x3c, 0xab, 0xec, 0x9f, 0x6d, 0x9c, 0x57, 0x4b, 0xbf, 0xa8, 0x6b, + 0xbd, 0xee, 0x2c, 0x5a, 0xab, 0x1e, 0x7e, 0x77, 0xd0, 0x31, 0xf9, 0xd3, 0x1b, 0x8c, 0x0a, 0xea, + 0x9b, 0xeb, 0xe1, 0x57, 0x07, 0xe1, 0xdd, 0x23, 0xfc, 0x18, 0x1d, 0xa6, 0x2c, 0x11, 0x10, 0xd1, + 0xb7, 0x14, 0x62, 0xf7, 0x1e, 0x3e, 0x44, 0x07, 0x29, 0x3b, 0x67, 0xfc, 0x82, 0xb9, 0x3d, 0xdc, + 0x47, 0xc7, 0x69, 0x02, 0x32, 0x17, 0x20, 0x67, 0x34, 0x49, 0x28, 0x67, 0x79, 0x0c, 0xcc, 0x80, + 0x0e, 0x7e, 0x89, 0x9e, 0xc7, 0x90, 0xc1, 0x94, 0x0b, 0x90, 0xb9, 0xe2, 0xe7, 0xc0, 0x72, 0xc6, + 0x55, 0x7e, 0x31, 0xa1, 0x0a, 0xa6, 0x34, 0x51, 0x10, 0xbb, 0x7b, 0x78, 0x80, 0xfa, 0x7f, 0x43, + 0x42, 0xf2, 0x09, 0x1d, 0x51, 0x73, 0x7e, 0x1f, 0x1f, 0x21, 0x57, 0x48, 0xfe, 0x0e, 0x22, 0x95, + 0xc7, 0x34, 0x21, 0xa3, 0x29, 0xc4, 0xee, 0x03, 0x7c, 0x82, 0x9e, 0x92, 0x54, 0x4d, 0xb8, 0xa4, + 0x97, 0x44, 0x99, 0x50, 0x90, 0x92, 0x4b, 0x77, 0x1f, 0x9f, 0xa2, 0x23, 0x12, 0x75, 0x1d, 0x13, + 0xd5, 0xdd, 0x4a, 0x19, 0xa3, 0x03, 0xfc, 0x0c, 0x3d, 0xa1, 0x2c, 0xe2, 0x33, 0x31, 0x05, 0x05, + 0x79, 0x42, 0xc7, 0x2c, 0x15, 0xee, 0x43, 0x23, 0x88, 0xd2, 0x44, 0xf1, 0x19, 0xc8, 0x4e, 0x02, + 0xcc, 0x66, 0x9c, 0x9a, 0xc1, 0xbb, 0x91, 0xd8, 0x38, 0x57, 0x3c, 0x71, 0x1f, 0xe1, 0x17, 0xe8, + 0xec, 0x5f, 0xf3, 0x10, 0x21, 0x24, 0xcf, 0x20, 0x76, 0xd1, 0xe8, 0x67, 0x0f, 0x0d, 0x3f, 0xe8, + 0x6b, 0xef, 0xee, 0x15, 0x8d, 0x4e, 0x76, 0x7f, 0xb3, 0x30, 0xdb, 0x11, 0xbd, 0xcb, 0xf8, 0xb7, + 0xb4, 0xd2, 0x57, 0x45, 0x5d, 0x79, 0xba, 0xa9, 0xfc, 0x6a, 0x51, 0x77, 0xbb, 0xdb, 0xbc, 0x93, + 0xd5, 0xb2, 0xfd, 0xdf, 0xb3, 0x79, 0x63, 0x3f, 0x9f, 0x9d, 0xbd, 0x31, 0x21, 0x5f, 0x9c, 0xc1, + 0xd8, 0x9a, 0x91, 0xb2, 0xf5, 0x6c, 0x69, 0xaa, 0x2c, 0xf0, 0xba, 0xc8, 0xf6, 0xdb, 0x06, 0x98, + 0x93, 0xb2, 0x9d, 0x6f, 0x81, 0x79, 0x16, 0xcc, 0x2d, 0xf0, 0xc3, 0x19, 0xda, 0x6e, 0x18, 0x92, + 0xb2, 0x0d, 0xc3, 0x2d, 0x12, 0x86, 0x59, 0x10, 0x86, 0x16, 0x7a, 0xbf, 0xdf, 0xdd, 0xee, 0xd5, + 0xaf, 0x00, 0x00, 0x00, 0xff, 0xff, 0x68, 0x0f, 0xf8, 0xb2, 0xd3, 0x02, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/errors/bidding_error.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/errors/bidding_error.pb.go new file mode 100644 index 0000000..947053d --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/errors/bidding_error.pb.go @@ -0,0 +1,229 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/errors/bidding_error.proto + +package errors // import "google.golang.org/genproto/googleapis/ads/googleads/v1/errors" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Enum describing possible bidding errors. +type BiddingErrorEnum_BiddingError int32 + +const ( + // Enum unspecified. + BiddingErrorEnum_UNSPECIFIED BiddingErrorEnum_BiddingError = 0 + // The received error code is not known in this version. + BiddingErrorEnum_UNKNOWN BiddingErrorEnum_BiddingError = 1 + // Cannot transition to new bidding strategy. + BiddingErrorEnum_BIDDING_STRATEGY_TRANSITION_NOT_ALLOWED BiddingErrorEnum_BiddingError = 2 + // Cannot attach bidding strategy to campaign. + BiddingErrorEnum_CANNOT_ATTACH_BIDDING_STRATEGY_TO_CAMPAIGN BiddingErrorEnum_BiddingError = 7 + // Bidding strategy is not supported or cannot be used as anonymous. + BiddingErrorEnum_INVALID_ANONYMOUS_BIDDING_STRATEGY_TYPE BiddingErrorEnum_BiddingError = 10 + // The type does not match the named strategy's type. + BiddingErrorEnum_INVALID_BIDDING_STRATEGY_TYPE BiddingErrorEnum_BiddingError = 14 + // The bid is invalid. + BiddingErrorEnum_INVALID_BID BiddingErrorEnum_BiddingError = 17 + // Bidding strategy is not available for the account type. + BiddingErrorEnum_BIDDING_STRATEGY_NOT_AVAILABLE_FOR_ACCOUNT_TYPE BiddingErrorEnum_BiddingError = 18 + // Conversion tracking is not enabled for the campaign for VBB transition. + BiddingErrorEnum_CONVERSION_TRACKING_NOT_ENABLED BiddingErrorEnum_BiddingError = 19 + // Not enough conversions tracked for VBB transitions. + BiddingErrorEnum_NOT_ENOUGH_CONVERSIONS BiddingErrorEnum_BiddingError = 20 + // Campaign can not be created with given bidding strategy. It can be + // transitioned to the strategy, once eligible. + BiddingErrorEnum_CANNOT_CREATE_CAMPAIGN_WITH_BIDDING_STRATEGY BiddingErrorEnum_BiddingError = 21 + // Cannot target content network only as campaign uses Page One Promoted + // bidding strategy. + BiddingErrorEnum_CANNOT_TARGET_CONTENT_NETWORK_ONLY_WITH_CAMPAIGN_LEVEL_POP_BIDDING_STRATEGY BiddingErrorEnum_BiddingError = 23 + // Budget Optimizer and Target Spend bidding strategies are not supported + // for campaigns with AdSchedule targeting. + BiddingErrorEnum_BIDDING_STRATEGY_NOT_SUPPORTED_WITH_AD_SCHEDULE BiddingErrorEnum_BiddingError = 24 + // Pay per conversion is not available to all the customer, only few + // whitelisted customers can use this. + BiddingErrorEnum_PAY_PER_CONVERSION_NOT_AVAILABLE_FOR_CUSTOMER BiddingErrorEnum_BiddingError = 25 + // Pay per conversion is not allowed with Target CPA. + BiddingErrorEnum_PAY_PER_CONVERSION_NOT_ALLOWED_WITH_TARGET_CPA BiddingErrorEnum_BiddingError = 26 + // Cannot set bidding strategy to Manual CPM for search network only + // campaigns. + BiddingErrorEnum_BIDDING_STRATEGY_NOT_ALLOWED_FOR_SEARCH_ONLY_CAMPAIGNS BiddingErrorEnum_BiddingError = 27 + // The bidding strategy is not supported for use in drafts or experiments. + BiddingErrorEnum_BIDDING_STRATEGY_NOT_SUPPORTED_IN_DRAFTS_OR_EXPERIMENTS BiddingErrorEnum_BiddingError = 28 + // Bidding strategy type does not support product type ad group criterion. + BiddingErrorEnum_BIDDING_STRATEGY_TYPE_DOES_NOT_SUPPORT_PRODUCT_TYPE_ADGROUP_CRITERION BiddingErrorEnum_BiddingError = 29 + // Bid amount is too small. + BiddingErrorEnum_BID_TOO_SMALL BiddingErrorEnum_BiddingError = 30 + // Bid amount is too big. + BiddingErrorEnum_BID_TOO_BIG BiddingErrorEnum_BiddingError = 31 + // Bid has too many fractional digit precision. + BiddingErrorEnum_BID_TOO_MANY_FRACTIONAL_DIGITS BiddingErrorEnum_BiddingError = 32 + // Invalid domain name specified. + BiddingErrorEnum_INVALID_DOMAIN_NAME BiddingErrorEnum_BiddingError = 33 + // The field is not compatible with the payment mode. + BiddingErrorEnum_NOT_COMPATIBLE_WITH_PAYMENT_MODE BiddingErrorEnum_BiddingError = 34 + // The field is not compatible with the budget type. + BiddingErrorEnum_NOT_COMPATIBLE_WITH_BUDGET_TYPE BiddingErrorEnum_BiddingError = 35 +) + +var BiddingErrorEnum_BiddingError_name = map[int32]string{ + 0: "UNSPECIFIED", + 1: "UNKNOWN", + 2: "BIDDING_STRATEGY_TRANSITION_NOT_ALLOWED", + 7: "CANNOT_ATTACH_BIDDING_STRATEGY_TO_CAMPAIGN", + 10: "INVALID_ANONYMOUS_BIDDING_STRATEGY_TYPE", + 14: "INVALID_BIDDING_STRATEGY_TYPE", + 17: "INVALID_BID", + 18: "BIDDING_STRATEGY_NOT_AVAILABLE_FOR_ACCOUNT_TYPE", + 19: "CONVERSION_TRACKING_NOT_ENABLED", + 20: "NOT_ENOUGH_CONVERSIONS", + 21: "CANNOT_CREATE_CAMPAIGN_WITH_BIDDING_STRATEGY", + 23: "CANNOT_TARGET_CONTENT_NETWORK_ONLY_WITH_CAMPAIGN_LEVEL_POP_BIDDING_STRATEGY", + 24: "BIDDING_STRATEGY_NOT_SUPPORTED_WITH_AD_SCHEDULE", + 25: "PAY_PER_CONVERSION_NOT_AVAILABLE_FOR_CUSTOMER", + 26: "PAY_PER_CONVERSION_NOT_ALLOWED_WITH_TARGET_CPA", + 27: "BIDDING_STRATEGY_NOT_ALLOWED_FOR_SEARCH_ONLY_CAMPAIGNS", + 28: "BIDDING_STRATEGY_NOT_SUPPORTED_IN_DRAFTS_OR_EXPERIMENTS", + 29: "BIDDING_STRATEGY_TYPE_DOES_NOT_SUPPORT_PRODUCT_TYPE_ADGROUP_CRITERION", + 30: "BID_TOO_SMALL", + 31: "BID_TOO_BIG", + 32: "BID_TOO_MANY_FRACTIONAL_DIGITS", + 33: "INVALID_DOMAIN_NAME", + 34: "NOT_COMPATIBLE_WITH_PAYMENT_MODE", + 35: "NOT_COMPATIBLE_WITH_BUDGET_TYPE", +} +var BiddingErrorEnum_BiddingError_value = map[string]int32{ + "UNSPECIFIED": 0, + "UNKNOWN": 1, + "BIDDING_STRATEGY_TRANSITION_NOT_ALLOWED": 2, + "CANNOT_ATTACH_BIDDING_STRATEGY_TO_CAMPAIGN": 7, + "INVALID_ANONYMOUS_BIDDING_STRATEGY_TYPE": 10, + "INVALID_BIDDING_STRATEGY_TYPE": 14, + "INVALID_BID": 17, + "BIDDING_STRATEGY_NOT_AVAILABLE_FOR_ACCOUNT_TYPE": 18, + "CONVERSION_TRACKING_NOT_ENABLED": 19, + "NOT_ENOUGH_CONVERSIONS": 20, + "CANNOT_CREATE_CAMPAIGN_WITH_BIDDING_STRATEGY": 21, + "CANNOT_TARGET_CONTENT_NETWORK_ONLY_WITH_CAMPAIGN_LEVEL_POP_BIDDING_STRATEGY": 23, + "BIDDING_STRATEGY_NOT_SUPPORTED_WITH_AD_SCHEDULE": 24, + "PAY_PER_CONVERSION_NOT_AVAILABLE_FOR_CUSTOMER": 25, + "PAY_PER_CONVERSION_NOT_ALLOWED_WITH_TARGET_CPA": 26, + "BIDDING_STRATEGY_NOT_ALLOWED_FOR_SEARCH_ONLY_CAMPAIGNS": 27, + "BIDDING_STRATEGY_NOT_SUPPORTED_IN_DRAFTS_OR_EXPERIMENTS": 28, + "BIDDING_STRATEGY_TYPE_DOES_NOT_SUPPORT_PRODUCT_TYPE_ADGROUP_CRITERION": 29, + "BID_TOO_SMALL": 30, + "BID_TOO_BIG": 31, + "BID_TOO_MANY_FRACTIONAL_DIGITS": 32, + "INVALID_DOMAIN_NAME": 33, + "NOT_COMPATIBLE_WITH_PAYMENT_MODE": 34, + "NOT_COMPATIBLE_WITH_BUDGET_TYPE": 35, +} + +func (x BiddingErrorEnum_BiddingError) String() string { + return proto.EnumName(BiddingErrorEnum_BiddingError_name, int32(x)) +} +func (BiddingErrorEnum_BiddingError) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_bidding_error_0777b280f66560dd, []int{0, 0} +} + +// Container for enum describing possible bidding errors. +type BiddingErrorEnum struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *BiddingErrorEnum) Reset() { *m = BiddingErrorEnum{} } +func (m *BiddingErrorEnum) String() string { return proto.CompactTextString(m) } +func (*BiddingErrorEnum) ProtoMessage() {} +func (*BiddingErrorEnum) Descriptor() ([]byte, []int) { + return fileDescriptor_bidding_error_0777b280f66560dd, []int{0} +} +func (m *BiddingErrorEnum) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_BiddingErrorEnum.Unmarshal(m, b) +} +func (m *BiddingErrorEnum) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_BiddingErrorEnum.Marshal(b, m, deterministic) +} +func (dst *BiddingErrorEnum) XXX_Merge(src proto.Message) { + xxx_messageInfo_BiddingErrorEnum.Merge(dst, src) +} +func (m *BiddingErrorEnum) XXX_Size() int { + return xxx_messageInfo_BiddingErrorEnum.Size(m) +} +func (m *BiddingErrorEnum) XXX_DiscardUnknown() { + xxx_messageInfo_BiddingErrorEnum.DiscardUnknown(m) +} + +var xxx_messageInfo_BiddingErrorEnum proto.InternalMessageInfo + +func init() { + proto.RegisterType((*BiddingErrorEnum)(nil), "google.ads.googleads.v1.errors.BiddingErrorEnum") + proto.RegisterEnum("google.ads.googleads.v1.errors.BiddingErrorEnum_BiddingError", BiddingErrorEnum_BiddingError_name, BiddingErrorEnum_BiddingError_value) +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/errors/bidding_error.proto", fileDescriptor_bidding_error_0777b280f66560dd) +} + +var fileDescriptor_bidding_error_0777b280f66560dd = []byte{ + // 729 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x84, 0x54, 0xed, 0x8e, 0xe3, 0x34, + 0x14, 0x65, 0x8a, 0xc4, 0x20, 0x2f, 0x1f, 0x1e, 0x2f, 0xb0, 0x30, 0xec, 0x76, 0x77, 0xbb, 0x48, + 0x48, 0x7c, 0xa4, 0x74, 0x57, 0x02, 0x29, 0xfb, 0xeb, 0x26, 0x76, 0x53, 0xab, 0x89, 0x6d, 0xd9, + 0x4e, 0x4b, 0x51, 0x25, 0xab, 0x4b, 0x47, 0x55, 0xa5, 0xdd, 0x66, 0xd4, 0x0c, 0xf3, 0x04, 0x3c, + 0x09, 0x3f, 0x79, 0x14, 0x1e, 0x05, 0x21, 0xf1, 0x0a, 0xc8, 0x71, 0x53, 0x2a, 0xb5, 0x03, 0xbf, + 0x72, 0x73, 0x75, 0xce, 0xb1, 0xcf, 0xcd, 0xc9, 0x45, 0xcf, 0x57, 0x55, 0xb5, 0x7a, 0x7d, 0xd5, + 0x5f, 0x2c, 0xeb, 0x7e, 0x28, 0x7d, 0x75, 0x3b, 0xe8, 0x5f, 0x6d, 0xb7, 0xd5, 0xb6, 0xee, 0xbf, + 0x5a, 0x2f, 0x97, 0xeb, 0xcd, 0xca, 0x35, 0xaf, 0xd1, 0xf5, 0xb6, 0xba, 0xa9, 0x48, 0x37, 0x00, + 0xa3, 0xc5, 0xb2, 0x8e, 0xf6, 0x9c, 0xe8, 0x76, 0x10, 0x05, 0xce, 0xe5, 0xc3, 0x56, 0xf3, 0x7a, + 0xdd, 0x5f, 0x6c, 0x36, 0xd5, 0xcd, 0xe2, 0x66, 0x5d, 0x6d, 0xea, 0xc0, 0xee, 0xfd, 0xfa, 0x2e, + 0xc2, 0x49, 0x50, 0x65, 0x1e, 0xcf, 0x36, 0xbf, 0xbc, 0xe9, 0xfd, 0x7d, 0x8e, 0xde, 0x3b, 0x6c, + 0x92, 0x0f, 0xd1, 0xbd, 0x52, 0x18, 0xc5, 0x52, 0x3e, 0xe4, 0x8c, 0xe2, 0xb7, 0xc8, 0x3d, 0x74, + 0x5e, 0x8a, 0xb1, 0x90, 0x53, 0x81, 0xcf, 0xc8, 0xd7, 0xe8, 0xcb, 0x84, 0x53, 0xca, 0x45, 0xe6, + 0x8c, 0xd5, 0x60, 0x59, 0x36, 0x73, 0x56, 0x83, 0x30, 0xdc, 0x72, 0x29, 0x9c, 0x90, 0xd6, 0x41, + 0x9e, 0xcb, 0x29, 0xa3, 0xb8, 0x43, 0x22, 0xf4, 0x55, 0x0a, 0xa2, 0xe9, 0x59, 0x0b, 0xe9, 0xc8, + 0x1d, 0x53, 0xa5, 0x4b, 0xa1, 0x50, 0xc0, 0x33, 0x81, 0xcf, 0xbd, 0x38, 0x17, 0x13, 0xc8, 0x39, + 0x75, 0x20, 0xa4, 0x98, 0x15, 0xb2, 0x34, 0x27, 0x38, 0x33, 0xc5, 0x30, 0x22, 0x4f, 0xd1, 0xa3, + 0x16, 0x7c, 0x1a, 0xf2, 0x81, 0xb7, 0x72, 0x00, 0xc1, 0x17, 0xe4, 0x05, 0xea, 0x1f, 0x61, 0x9b, + 0xeb, 0x4d, 0x80, 0xe7, 0x90, 0xe4, 0xcc, 0x0d, 0xa5, 0x76, 0x90, 0xa6, 0xb2, 0x14, 0x36, 0xa8, + 0x10, 0xf2, 0x0c, 0x3d, 0x4e, 0xa5, 0x98, 0x30, 0x6d, 0xbc, 0x43, 0xab, 0x21, 0x1d, 0x7b, 0x01, + 0xcf, 0x63, 0xc2, 0x93, 0x28, 0xbe, 0x4f, 0x2e, 0xd1, 0x27, 0xa1, 0x21, 0xcb, 0x6c, 0xe4, 0xfe, + 0xc5, 0x1b, 0xfc, 0x11, 0xf9, 0x0e, 0x7d, 0xb3, 0x1b, 0x43, 0xaa, 0x19, 0x58, 0xb6, 0xb7, 0xec, + 0xa6, 0xdc, 0x1e, 0x0f, 0x05, 0x7f, 0x4c, 0x24, 0x1a, 0xef, 0x18, 0x16, 0x74, 0xc6, 0xac, 0x17, + 0xb4, 0x4c, 0x58, 0x27, 0x98, 0x9d, 0x4a, 0x3d, 0x76, 0x52, 0xe4, 0xb3, 0xc0, 0xde, 0x6b, 0xe5, + 0x6c, 0xc2, 0x72, 0xa7, 0xa4, 0x3a, 0x16, 0x7c, 0x70, 0xa7, 0x71, 0x53, 0x2a, 0x25, 0xb5, 0x65, + 0x34, 0x88, 0x01, 0x75, 0x26, 0x1d, 0x31, 0x5a, 0xe6, 0x0c, 0x7f, 0x4a, 0x06, 0xe8, 0x5b, 0x05, + 0x33, 0xa7, 0x98, 0x3e, 0x30, 0x74, 0x62, 0x5e, 0x69, 0x69, 0xac, 0x2c, 0x98, 0xc6, 0x9f, 0x91, + 0xe7, 0x28, 0xba, 0x8b, 0x12, 0x52, 0x11, 0xce, 0x69, 0x5d, 0x29, 0xc0, 0x97, 0x24, 0x46, 0xdf, + 0x9f, 0xfe, 0x28, 0x3b, 0x86, 0x3f, 0xc2, 0x30, 0xd0, 0xe9, 0x28, 0xd8, 0x6e, 0x1d, 0x1b, 0xfc, + 0x39, 0x79, 0x89, 0x7e, 0xf8, 0x1f, 0x5f, 0x5c, 0x38, 0xaa, 0x61, 0x68, 0x8d, 0x93, 0xda, 0xb1, + 0x1f, 0x15, 0xd3, 0xbc, 0x60, 0xc2, 0x1a, 0xfc, 0x90, 0x70, 0xc4, 0x4e, 0x26, 0xc7, 0x51, 0xc9, + 0xcc, 0xa1, 0x8c, 0x53, 0x5a, 0xd2, 0x32, 0x0d, 0x89, 0x70, 0x40, 0x33, 0x2d, 0x4b, 0xe5, 0x52, + 0xcd, 0x2d, 0xd3, 0x5c, 0x0a, 0xfc, 0x88, 0x5c, 0xa0, 0xf7, 0x13, 0x4e, 0x9d, 0x95, 0xd2, 0x99, + 0x02, 0xf2, 0x1c, 0x77, 0x7d, 0xf8, 0xda, 0x56, 0xc2, 0x33, 0xfc, 0x98, 0xf4, 0x50, 0xb7, 0x6d, + 0x14, 0x20, 0x66, 0x6e, 0xa8, 0x21, 0xf5, 0x3f, 0x0d, 0xe4, 0x8e, 0xf2, 0x8c, 0x5b, 0x83, 0x9f, + 0x90, 0x07, 0xe8, 0x7e, 0x9b, 0x58, 0x2a, 0x0b, 0xe0, 0xc2, 0x09, 0x28, 0x18, 0x7e, 0x4a, 0xbe, + 0x40, 0x4f, 0x9a, 0x00, 0xc9, 0x42, 0x81, 0xe5, 0x7e, 0xf2, 0xcd, 0x20, 0x15, 0xcc, 0xbc, 0x1b, + 0x57, 0x48, 0xca, 0x70, 0xcf, 0x47, 0xf5, 0x14, 0x2a, 0x29, 0xa9, 0x1f, 0x77, 0x93, 0xe7, 0x67, + 0xc9, 0x5f, 0x67, 0xa8, 0xf7, 0x73, 0xf5, 0x26, 0xfa, 0xef, 0x5d, 0x92, 0x5c, 0x1c, 0x6e, 0x05, + 0xe5, 0x17, 0x88, 0x3a, 0xfb, 0x89, 0xee, 0x48, 0xab, 0xea, 0xf5, 0x62, 0xb3, 0x8a, 0xaa, 0xed, + 0xaa, 0xbf, 0xba, 0xda, 0x34, 0xeb, 0xa5, 0x5d, 0x62, 0xd7, 0xeb, 0xfa, 0xae, 0x9d, 0xf6, 0x32, + 0x3c, 0x7e, 0xeb, 0xbc, 0x9d, 0x01, 0xfc, 0xde, 0xe9, 0x66, 0x41, 0x0c, 0x96, 0x75, 0x14, 0x4a, + 0x5f, 0x4d, 0x06, 0x51, 0x73, 0x64, 0xfd, 0x47, 0x0b, 0x98, 0xc3, 0xb2, 0x9e, 0xef, 0x01, 0xf3, + 0xc9, 0x60, 0x1e, 0x00, 0x7f, 0x76, 0x7a, 0xa1, 0x1b, 0xc7, 0xb0, 0xac, 0xe3, 0x78, 0x0f, 0x89, + 0xe3, 0xc9, 0x20, 0x8e, 0x03, 0xe8, 0xd5, 0x3b, 0xcd, 0xed, 0x5e, 0xfc, 0x13, 0x00, 0x00, 0xff, + 0xff, 0xd8, 0x74, 0xa6, 0xcc, 0x70, 0x05, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/errors/bidding_strategy_error.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/errors/bidding_strategy_error.pb.go new file mode 100644 index 0000000..69cc129 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/errors/bidding_strategy_error.pb.go @@ -0,0 +1,138 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/errors/bidding_strategy_error.proto + +package errors // import "google.golang.org/genproto/googleapis/ads/googleads/v1/errors" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Enum describing possible bidding strategy errors. +type BiddingStrategyErrorEnum_BiddingStrategyError int32 + +const ( + // Enum unspecified. + BiddingStrategyErrorEnum_UNSPECIFIED BiddingStrategyErrorEnum_BiddingStrategyError = 0 + // The received error code is not known in this version. + BiddingStrategyErrorEnum_UNKNOWN BiddingStrategyErrorEnum_BiddingStrategyError = 1 + // Each bidding strategy must have a unique name. + BiddingStrategyErrorEnum_DUPLICATE_NAME BiddingStrategyErrorEnum_BiddingStrategyError = 2 + // Bidding strategy type is immutable. + BiddingStrategyErrorEnum_CANNOT_CHANGE_BIDDING_STRATEGY_TYPE BiddingStrategyErrorEnum_BiddingStrategyError = 3 + // Only bidding strategies not linked to campaigns, adgroups or adgroup + // criteria can be removed. + BiddingStrategyErrorEnum_CANNOT_REMOVE_ASSOCIATED_STRATEGY BiddingStrategyErrorEnum_BiddingStrategyError = 4 + // The specified bidding strategy is not supported. + BiddingStrategyErrorEnum_BIDDING_STRATEGY_NOT_SUPPORTED BiddingStrategyErrorEnum_BiddingStrategyError = 5 + // The bidding strategy is incompatible with the campaign's bidding + // strategy goal type. + BiddingStrategyErrorEnum_INCOMPATIBLE_BIDDING_STRATEGY_AND_BIDDING_STRATEGY_GOAL_TYPE BiddingStrategyErrorEnum_BiddingStrategyError = 6 +) + +var BiddingStrategyErrorEnum_BiddingStrategyError_name = map[int32]string{ + 0: "UNSPECIFIED", + 1: "UNKNOWN", + 2: "DUPLICATE_NAME", + 3: "CANNOT_CHANGE_BIDDING_STRATEGY_TYPE", + 4: "CANNOT_REMOVE_ASSOCIATED_STRATEGY", + 5: "BIDDING_STRATEGY_NOT_SUPPORTED", + 6: "INCOMPATIBLE_BIDDING_STRATEGY_AND_BIDDING_STRATEGY_GOAL_TYPE", +} +var BiddingStrategyErrorEnum_BiddingStrategyError_value = map[string]int32{ + "UNSPECIFIED": 0, + "UNKNOWN": 1, + "DUPLICATE_NAME": 2, + "CANNOT_CHANGE_BIDDING_STRATEGY_TYPE": 3, + "CANNOT_REMOVE_ASSOCIATED_STRATEGY": 4, + "BIDDING_STRATEGY_NOT_SUPPORTED": 5, + "INCOMPATIBLE_BIDDING_STRATEGY_AND_BIDDING_STRATEGY_GOAL_TYPE": 6, +} + +func (x BiddingStrategyErrorEnum_BiddingStrategyError) String() string { + return proto.EnumName(BiddingStrategyErrorEnum_BiddingStrategyError_name, int32(x)) +} +func (BiddingStrategyErrorEnum_BiddingStrategyError) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_bidding_strategy_error_3abccb3fafd9c38d, []int{0, 0} +} + +// Container for enum describing possible bidding strategy errors. +type BiddingStrategyErrorEnum struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *BiddingStrategyErrorEnum) Reset() { *m = BiddingStrategyErrorEnum{} } +func (m *BiddingStrategyErrorEnum) String() string { return proto.CompactTextString(m) } +func (*BiddingStrategyErrorEnum) ProtoMessage() {} +func (*BiddingStrategyErrorEnum) Descriptor() ([]byte, []int) { + return fileDescriptor_bidding_strategy_error_3abccb3fafd9c38d, []int{0} +} +func (m *BiddingStrategyErrorEnum) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_BiddingStrategyErrorEnum.Unmarshal(m, b) +} +func (m *BiddingStrategyErrorEnum) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_BiddingStrategyErrorEnum.Marshal(b, m, deterministic) +} +func (dst *BiddingStrategyErrorEnum) XXX_Merge(src proto.Message) { + xxx_messageInfo_BiddingStrategyErrorEnum.Merge(dst, src) +} +func (m *BiddingStrategyErrorEnum) XXX_Size() int { + return xxx_messageInfo_BiddingStrategyErrorEnum.Size(m) +} +func (m *BiddingStrategyErrorEnum) XXX_DiscardUnknown() { + xxx_messageInfo_BiddingStrategyErrorEnum.DiscardUnknown(m) +} + +var xxx_messageInfo_BiddingStrategyErrorEnum proto.InternalMessageInfo + +func init() { + proto.RegisterType((*BiddingStrategyErrorEnum)(nil), "google.ads.googleads.v1.errors.BiddingStrategyErrorEnum") + proto.RegisterEnum("google.ads.googleads.v1.errors.BiddingStrategyErrorEnum_BiddingStrategyError", BiddingStrategyErrorEnum_BiddingStrategyError_name, BiddingStrategyErrorEnum_BiddingStrategyError_value) +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/errors/bidding_strategy_error.proto", fileDescriptor_bidding_strategy_error_3abccb3fafd9c38d) +} + +var fileDescriptor_bidding_strategy_error_3abccb3fafd9c38d = []byte{ + // 408 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x7c, 0x91, 0x41, 0x8b, 0xd4, 0x30, + 0x1c, 0xc5, 0x9d, 0xae, 0xae, 0x90, 0x05, 0x2d, 0xc1, 0x83, 0x8a, 0x0c, 0x58, 0x11, 0x6f, 0x29, + 0xc5, 0x5b, 0xd6, 0x83, 0x69, 0x13, 0x6b, 0x70, 0x26, 0x0d, 0xd3, 0xcc, 0xc8, 0xca, 0x40, 0xe8, + 0xda, 0x21, 0x14, 0x76, 0x9b, 0xa1, 0xa9, 0x0b, 0x7e, 0x16, 0x6f, 0x1e, 0xfd, 0x28, 0x7e, 0x14, + 0xcf, 0x9e, 0x45, 0x3a, 0x99, 0xe9, 0x65, 0x47, 0x4f, 0x7d, 0xfc, 0xfb, 0x7e, 0x2f, 0x7f, 0xde, + 0x1f, 0x9c, 0x1b, 0x6b, 0xcd, 0xd5, 0x26, 0xae, 0x6a, 0x17, 0x7b, 0x39, 0xa8, 0x9b, 0x24, 0xde, + 0x74, 0x9d, 0xed, 0x5c, 0x7c, 0xd9, 0xd4, 0x75, 0xd3, 0x1a, 0xed, 0xfa, 0xae, 0xea, 0x37, 0xe6, + 0xab, 0xde, 0xcd, 0xd1, 0xb6, 0xb3, 0xbd, 0x85, 0x53, 0x4f, 0xa0, 0xaa, 0x76, 0x68, 0x84, 0xd1, + 0x4d, 0x82, 0x3c, 0xfc, 0xf4, 0xd9, 0x21, 0x7c, 0xdb, 0xc4, 0x55, 0xdb, 0xda, 0xbe, 0xea, 0x1b, + 0xdb, 0x3a, 0x4f, 0x47, 0xdf, 0x02, 0xf0, 0x38, 0xf5, 0xf1, 0xe5, 0x3e, 0x9d, 0x0d, 0x1c, 0x6b, + 0xbf, 0x5c, 0x47, 0x7f, 0x26, 0xe0, 0xd1, 0xb1, 0x9f, 0xf0, 0x21, 0x38, 0x5b, 0x8a, 0x52, 0xb2, + 0x8c, 0xbf, 0xe3, 0x8c, 0x86, 0x77, 0xe0, 0x19, 0xb8, 0xbf, 0x14, 0x1f, 0x44, 0xf1, 0x51, 0x84, + 0x13, 0x08, 0xc1, 0x03, 0xba, 0x94, 0x33, 0x9e, 0x11, 0xc5, 0xb4, 0x20, 0x73, 0x16, 0x06, 0xf0, + 0x15, 0x78, 0x91, 0x11, 0x21, 0x0a, 0xa5, 0xb3, 0xf7, 0x44, 0xe4, 0x4c, 0xa7, 0x9c, 0x52, 0x2e, + 0x72, 0x5d, 0xaa, 0x05, 0x51, 0x2c, 0xbf, 0xd0, 0xea, 0x42, 0xb2, 0xf0, 0x04, 0xbe, 0x04, 0xcf, + 0xf7, 0xc6, 0x05, 0x9b, 0x17, 0x2b, 0xa6, 0x49, 0x59, 0x16, 0x19, 0x27, 0x8a, 0xd1, 0xd1, 0x1b, + 0xde, 0x85, 0x11, 0x98, 0xde, 0x4a, 0x18, 0xa0, 0x72, 0x29, 0x65, 0xb1, 0x50, 0x8c, 0x86, 0xf7, + 0xe0, 0x5b, 0xf0, 0x86, 0x8b, 0xac, 0x98, 0x4b, 0xa2, 0x78, 0x3a, 0x3b, 0xf2, 0x24, 0x11, 0xf4, + 0xf6, 0x30, 0x2f, 0xc8, 0xcc, 0x2f, 0x73, 0x9a, 0xfe, 0x9e, 0x80, 0xe8, 0xb3, 0xbd, 0x46, 0xff, + 0xaf, 0x38, 0x7d, 0x72, 0xac, 0x24, 0x39, 0xf4, 0x2b, 0x27, 0x9f, 0xe8, 0x1e, 0x36, 0xf6, 0xaa, + 0x6a, 0x0d, 0xb2, 0x9d, 0x89, 0xcd, 0xa6, 0xdd, 0xb5, 0x7f, 0x38, 0xf6, 0xb6, 0x71, 0xff, 0xba, + 0xfd, 0xb9, 0xff, 0x7c, 0x0f, 0x4e, 0x72, 0x42, 0x7e, 0x04, 0xd3, 0xdc, 0x87, 0x91, 0xda, 0x21, + 0x2f, 0x07, 0xb5, 0x4a, 0xd0, 0xee, 0x49, 0xf7, 0xf3, 0x60, 0x58, 0x93, 0xda, 0xad, 0x47, 0xc3, + 0x7a, 0x95, 0xac, 0xbd, 0xe1, 0x57, 0x10, 0xf9, 0x29, 0xc6, 0xa4, 0x76, 0x18, 0x8f, 0x16, 0x8c, + 0x57, 0x09, 0xc6, 0xde, 0x74, 0x79, 0xba, 0xdb, 0xee, 0xf5, 0xdf, 0x00, 0x00, 0x00, 0xff, 0xff, + 0x22, 0xb7, 0x07, 0xda, 0x98, 0x02, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/errors/billing_setup_error.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/errors/billing_setup_error.pb.go new file mode 100644 index 0000000..4b3eb50 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/errors/billing_setup_error.pb.go @@ -0,0 +1,198 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/errors/billing_setup_error.proto + +package errors // import "google.golang.org/genproto/googleapis/ads/googleads/v1/errors" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Enum describing possible billing setup errors. +type BillingSetupErrorEnum_BillingSetupError int32 + +const ( + // Enum unspecified. + BillingSetupErrorEnum_UNSPECIFIED BillingSetupErrorEnum_BillingSetupError = 0 + // The received error code is not known in this version. + BillingSetupErrorEnum_UNKNOWN BillingSetupErrorEnum_BillingSetupError = 1 + // Cannot use both an existing Payments account and a new Payments account + // when setting up billing. + BillingSetupErrorEnum_CANNOT_USE_EXISTING_AND_NEW_ACCOUNT BillingSetupErrorEnum_BillingSetupError = 2 + // Cannot cancel an APPROVED billing setup whose start time has passed. + BillingSetupErrorEnum_CANNOT_REMOVE_STARTED_BILLING_SETUP BillingSetupErrorEnum_BillingSetupError = 3 + // Cannot perform a Change of Bill-To (CBT) to the same Payments account. + BillingSetupErrorEnum_CANNOT_CHANGE_BILLING_TO_SAME_PAYMENTS_ACCOUNT BillingSetupErrorEnum_BillingSetupError = 4 + // Billing Setups can only be used by customers with ENABLED or DRAFT + // status. + BillingSetupErrorEnum_BILLING_SETUP_NOT_PERMITTED_FOR_CUSTOMER_STATUS BillingSetupErrorEnum_BillingSetupError = 5 + // Billing Setups must either include a correctly formatted existing + // Payments account id, or a non-empty new Payments account name. + BillingSetupErrorEnum_INVALID_PAYMENTS_ACCOUNT BillingSetupErrorEnum_BillingSetupError = 6 + // Only billable and third-party customers can create billing setups. + BillingSetupErrorEnum_BILLING_SETUP_NOT_PERMITTED_FOR_CUSTOMER_CATEGORY BillingSetupErrorEnum_BillingSetupError = 7 + // Billing Setup creations can only use NOW for start time type. + BillingSetupErrorEnum_INVALID_START_TIME_TYPE BillingSetupErrorEnum_BillingSetupError = 8 + // Billing Setups can only be created for a third-party customer if they do + // not already have a setup. + BillingSetupErrorEnum_THIRD_PARTY_ALREADY_HAS_BILLING BillingSetupErrorEnum_BillingSetupError = 9 + // Billing Setups cannot be created if there is already a pending billing in + // progress, ie. a billing known to Payments. + BillingSetupErrorEnum_BILLING_SETUP_IN_PROGRESS BillingSetupErrorEnum_BillingSetupError = 10 + // Billing Setups can only be created by customers who have permission to + // setup billings. Users can contact a representative for help setting up + // permissions. + BillingSetupErrorEnum_NO_SIGNUP_PERMISSION BillingSetupErrorEnum_BillingSetupError = 11 + // Billing Setups cannot be created if there is already a future-approved + // billing. + BillingSetupErrorEnum_CHANGE_OF_BILL_TO_IN_PROGRESS BillingSetupErrorEnum_BillingSetupError = 12 + // Billing Setup creation failed because Payments could not find the + // requested Payments profile. + BillingSetupErrorEnum_PAYMENTS_PROFILE_NOT_FOUND BillingSetupErrorEnum_BillingSetupError = 13 + // Billing Setup creation failed because Payments could not find the + // requested Payments account. + BillingSetupErrorEnum_PAYMENTS_ACCOUNT_NOT_FOUND BillingSetupErrorEnum_BillingSetupError = 14 + // Billing Setup creation failed because Payments considers requested + // Payments profile ineligible. + BillingSetupErrorEnum_PAYMENTS_PROFILE_INELIGIBLE BillingSetupErrorEnum_BillingSetupError = 15 + // Billing Setup creation failed because Payments considers requested + // Payments account ineligible. + BillingSetupErrorEnum_PAYMENTS_ACCOUNT_INELIGIBLE BillingSetupErrorEnum_BillingSetupError = 16 +) + +var BillingSetupErrorEnum_BillingSetupError_name = map[int32]string{ + 0: "UNSPECIFIED", + 1: "UNKNOWN", + 2: "CANNOT_USE_EXISTING_AND_NEW_ACCOUNT", + 3: "CANNOT_REMOVE_STARTED_BILLING_SETUP", + 4: "CANNOT_CHANGE_BILLING_TO_SAME_PAYMENTS_ACCOUNT", + 5: "BILLING_SETUP_NOT_PERMITTED_FOR_CUSTOMER_STATUS", + 6: "INVALID_PAYMENTS_ACCOUNT", + 7: "BILLING_SETUP_NOT_PERMITTED_FOR_CUSTOMER_CATEGORY", + 8: "INVALID_START_TIME_TYPE", + 9: "THIRD_PARTY_ALREADY_HAS_BILLING", + 10: "BILLING_SETUP_IN_PROGRESS", + 11: "NO_SIGNUP_PERMISSION", + 12: "CHANGE_OF_BILL_TO_IN_PROGRESS", + 13: "PAYMENTS_PROFILE_NOT_FOUND", + 14: "PAYMENTS_ACCOUNT_NOT_FOUND", + 15: "PAYMENTS_PROFILE_INELIGIBLE", + 16: "PAYMENTS_ACCOUNT_INELIGIBLE", +} +var BillingSetupErrorEnum_BillingSetupError_value = map[string]int32{ + "UNSPECIFIED": 0, + "UNKNOWN": 1, + "CANNOT_USE_EXISTING_AND_NEW_ACCOUNT": 2, + "CANNOT_REMOVE_STARTED_BILLING_SETUP": 3, + "CANNOT_CHANGE_BILLING_TO_SAME_PAYMENTS_ACCOUNT": 4, + "BILLING_SETUP_NOT_PERMITTED_FOR_CUSTOMER_STATUS": 5, + "INVALID_PAYMENTS_ACCOUNT": 6, + "BILLING_SETUP_NOT_PERMITTED_FOR_CUSTOMER_CATEGORY": 7, + "INVALID_START_TIME_TYPE": 8, + "THIRD_PARTY_ALREADY_HAS_BILLING": 9, + "BILLING_SETUP_IN_PROGRESS": 10, + "NO_SIGNUP_PERMISSION": 11, + "CHANGE_OF_BILL_TO_IN_PROGRESS": 12, + "PAYMENTS_PROFILE_NOT_FOUND": 13, + "PAYMENTS_ACCOUNT_NOT_FOUND": 14, + "PAYMENTS_PROFILE_INELIGIBLE": 15, + "PAYMENTS_ACCOUNT_INELIGIBLE": 16, +} + +func (x BillingSetupErrorEnum_BillingSetupError) String() string { + return proto.EnumName(BillingSetupErrorEnum_BillingSetupError_name, int32(x)) +} +func (BillingSetupErrorEnum_BillingSetupError) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_billing_setup_error_67fc3dcc859d5bc8, []int{0, 0} +} + +// Container for enum describing possible billing setup errors. +type BillingSetupErrorEnum struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *BillingSetupErrorEnum) Reset() { *m = BillingSetupErrorEnum{} } +func (m *BillingSetupErrorEnum) String() string { return proto.CompactTextString(m) } +func (*BillingSetupErrorEnum) ProtoMessage() {} +func (*BillingSetupErrorEnum) Descriptor() ([]byte, []int) { + return fileDescriptor_billing_setup_error_67fc3dcc859d5bc8, []int{0} +} +func (m *BillingSetupErrorEnum) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_BillingSetupErrorEnum.Unmarshal(m, b) +} +func (m *BillingSetupErrorEnum) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_BillingSetupErrorEnum.Marshal(b, m, deterministic) +} +func (dst *BillingSetupErrorEnum) XXX_Merge(src proto.Message) { + xxx_messageInfo_BillingSetupErrorEnum.Merge(dst, src) +} +func (m *BillingSetupErrorEnum) XXX_Size() int { + return xxx_messageInfo_BillingSetupErrorEnum.Size(m) +} +func (m *BillingSetupErrorEnum) XXX_DiscardUnknown() { + xxx_messageInfo_BillingSetupErrorEnum.DiscardUnknown(m) +} + +var xxx_messageInfo_BillingSetupErrorEnum proto.InternalMessageInfo + +func init() { + proto.RegisterType((*BillingSetupErrorEnum)(nil), "google.ads.googleads.v1.errors.BillingSetupErrorEnum") + proto.RegisterEnum("google.ads.googleads.v1.errors.BillingSetupErrorEnum_BillingSetupError", BillingSetupErrorEnum_BillingSetupError_name, BillingSetupErrorEnum_BillingSetupError_value) +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/errors/billing_setup_error.proto", fileDescriptor_billing_setup_error_67fc3dcc859d5bc8) +} + +var fileDescriptor_billing_setup_error_67fc3dcc859d5bc8 = []byte{ + // 568 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x8c, 0x93, 0xcf, 0x6f, 0xd3, 0x30, + 0x14, 0xc7, 0x59, 0x37, 0x36, 0xf0, 0x80, 0x19, 0x8b, 0x1f, 0x63, 0x3f, 0x45, 0x77, 0xe0, 0x96, + 0xa8, 0x4c, 0x48, 0x28, 0x9c, 0xdc, 0xe4, 0x35, 0xb5, 0x48, 0xed, 0x28, 0x76, 0x3a, 0x8a, 0x2a, + 0x59, 0x1d, 0xad, 0xa2, 0x4a, 0x5d, 0x52, 0x35, 0xdd, 0xfe, 0x20, 0x8e, 0xfc, 0x29, 0xfc, 0x17, + 0x5c, 0xb9, 0x71, 0xe6, 0x82, 0x12, 0xb7, 0x51, 0xab, 0x0a, 0xc4, 0xc9, 0x4f, 0x7e, 0xdf, 0xef, + 0xe7, 0xf9, 0x3d, 0x3d, 0xa3, 0xf7, 0x49, 0x96, 0x25, 0x93, 0x91, 0x3d, 0x18, 0xe6, 0xb6, 0x09, + 0x8b, 0xe8, 0xae, 0x61, 0x8f, 0x66, 0xb3, 0x6c, 0x96, 0xdb, 0xd7, 0xe3, 0xc9, 0x64, 0x9c, 0x26, + 0x3a, 0x1f, 0xcd, 0x6f, 0xa7, 0xba, 0xbc, 0xb4, 0xa6, 0xb3, 0x6c, 0x9e, 0x91, 0x33, 0x23, 0xb7, + 0x06, 0xc3, 0xdc, 0xaa, 0x9c, 0xd6, 0x5d, 0xc3, 0x32, 0xce, 0xa3, 0x93, 0x25, 0x79, 0x3a, 0xb6, + 0x07, 0x69, 0x9a, 0xcd, 0x07, 0xf3, 0x71, 0x96, 0xe6, 0xc6, 0x5d, 0xff, 0xbd, 0x83, 0x9e, 0x37, + 0x0d, 0x5b, 0x16, 0x68, 0x28, 0x4c, 0x90, 0xde, 0xde, 0xd4, 0x7f, 0xec, 0xa0, 0xa7, 0x1b, 0x19, + 0x72, 0x80, 0xf6, 0x63, 0x2e, 0x43, 0x70, 0x59, 0x8b, 0x81, 0x87, 0xef, 0x91, 0x7d, 0xb4, 0x17, + 0xf3, 0x8f, 0x5c, 0x5c, 0x71, 0xbc, 0x45, 0xde, 0xa0, 0x0b, 0x97, 0x72, 0x2e, 0x94, 0x8e, 0x25, + 0x68, 0xf8, 0xc4, 0xa4, 0x62, 0xdc, 0xd7, 0x94, 0x7b, 0x9a, 0xc3, 0x95, 0xa6, 0xae, 0x2b, 0x62, + 0xae, 0x70, 0x6d, 0x45, 0x18, 0x41, 0x47, 0x74, 0x41, 0x4b, 0x45, 0x23, 0x05, 0x9e, 0x6e, 0xb2, + 0x20, 0x28, 0x2c, 0x12, 0x54, 0x1c, 0xe2, 0x6d, 0xf2, 0x16, 0x59, 0x0b, 0xa1, 0xdb, 0xa6, 0xdc, + 0x87, 0x4a, 0xa0, 0x84, 0x96, 0xb4, 0x03, 0x3a, 0xa4, 0xbd, 0x0e, 0x70, 0x25, 0x2b, 0xf8, 0x0e, + 0xb9, 0x44, 0xf6, 0x1a, 0x46, 0x17, 0xf6, 0x10, 0xa2, 0x0e, 0x53, 0x45, 0x89, 0x96, 0x88, 0xb4, + 0x1b, 0x4b, 0x25, 0x3a, 0x10, 0x15, 0x75, 0x55, 0x2c, 0xf1, 0x7d, 0x72, 0x82, 0x0e, 0x19, 0xef, + 0xd2, 0x80, 0x79, 0x9b, 0xc8, 0x5d, 0xf2, 0x0e, 0x35, 0xfe, 0x1b, 0xe9, 0x52, 0x05, 0xbe, 0x88, + 0x7a, 0x78, 0x8f, 0x1c, 0xa3, 0x97, 0x4b, 0x68, 0xd9, 0xa0, 0x56, 0xac, 0x03, 0x5a, 0xf5, 0x42, + 0xc0, 0x0f, 0xc8, 0x05, 0x3a, 0x57, 0x6d, 0x16, 0x15, 0xf5, 0x22, 0xd5, 0xd3, 0x34, 0x88, 0x80, + 0x7a, 0x3d, 0xdd, 0xa6, 0x72, 0xd9, 0x24, 0x7e, 0x48, 0x4e, 0xd1, 0xab, 0xf5, 0xc2, 0x8c, 0xeb, + 0x30, 0x12, 0x7e, 0x04, 0x52, 0x62, 0x44, 0x0e, 0xd1, 0x33, 0x2e, 0xb4, 0x64, 0x3e, 0x8f, 0x43, + 0xf3, 0x1e, 0x29, 0x99, 0xe0, 0x78, 0x9f, 0xbc, 0x46, 0xa7, 0x8b, 0x89, 0x89, 0x56, 0xc9, 0x2b, + 0x26, 0xb6, 0x6a, 0x7e, 0x44, 0xce, 0xd0, 0x51, 0xd5, 0x6a, 0x18, 0x89, 0x16, 0x0b, 0xa0, 0xec, + 0xab, 0x25, 0x62, 0xee, 0xe1, 0xc7, 0x6b, 0xf9, 0xc5, 0x28, 0x56, 0xf2, 0x4f, 0xc8, 0x39, 0x3a, + 0xde, 0xf0, 0x33, 0x0e, 0x01, 0xf3, 0x59, 0x33, 0x00, 0x7c, 0xb0, 0x26, 0x58, 0x02, 0x56, 0x04, + 0xb8, 0xf9, 0x6b, 0x0b, 0xd5, 0xbf, 0x64, 0x37, 0xd6, 0xbf, 0x57, 0xb8, 0xf9, 0x62, 0x63, 0x0f, + 0xc3, 0x62, 0x79, 0xc3, 0xad, 0xcf, 0xde, 0xc2, 0x99, 0x64, 0x93, 0x41, 0x9a, 0x58, 0xd9, 0x2c, + 0xb1, 0x93, 0x51, 0x5a, 0xae, 0xf6, 0xf2, 0x1b, 0x4d, 0xc7, 0xf9, 0xdf, 0x7e, 0xd5, 0x07, 0x73, + 0x7c, 0xad, 0x6d, 0xfb, 0x94, 0x7e, 0xab, 0x9d, 0xf9, 0x06, 0x46, 0x87, 0xb9, 0x65, 0xc2, 0x22, + 0xea, 0x36, 0xac, 0xb2, 0x64, 0xfe, 0x7d, 0x29, 0xe8, 0xd3, 0x61, 0xde, 0xaf, 0x04, 0xfd, 0x6e, + 0xa3, 0x6f, 0x04, 0x3f, 0x6b, 0x75, 0x73, 0xeb, 0x38, 0x74, 0x98, 0x3b, 0x4e, 0x25, 0x71, 0x9c, + 0x6e, 0xc3, 0x71, 0x8c, 0xe8, 0x7a, 0xb7, 0x7c, 0xdd, 0xe5, 0x9f, 0x00, 0x00, 0x00, 0xff, 0xff, + 0x29, 0x15, 0x51, 0x97, 0xf2, 0x03, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/errors/campaign_budget_error.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/errors/campaign_budget_error.pb.go new file mode 100644 index 0000000..b07b2bf --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/errors/campaign_budget_error.pb.go @@ -0,0 +1,192 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/errors/campaign_budget_error.proto + +package errors // import "google.golang.org/genproto/googleapis/ads/googleads/v1/errors" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Enum describing possible campaign budget errors. +type CampaignBudgetErrorEnum_CampaignBudgetError int32 + +const ( + // Enum unspecified. + CampaignBudgetErrorEnum_UNSPECIFIED CampaignBudgetErrorEnum_CampaignBudgetError = 0 + // The received error code is not known in this version. + CampaignBudgetErrorEnum_UNKNOWN CampaignBudgetErrorEnum_CampaignBudgetError = 1 + // The campaign budget cannot be shared. + CampaignBudgetErrorEnum_CAMPAIGN_BUDGET_CANNOT_BE_SHARED CampaignBudgetErrorEnum_CampaignBudgetError = 17 + // The requested campaign budget no longer exists. + CampaignBudgetErrorEnum_CAMPAIGN_BUDGET_REMOVED CampaignBudgetErrorEnum_CampaignBudgetError = 2 + // The campaign budget is associated with at least one campaign, and so the + // campaign budget cannot be removed. + CampaignBudgetErrorEnum_CAMPAIGN_BUDGET_IN_USE CampaignBudgetErrorEnum_CampaignBudgetError = 3 + // Customer is not whitelisted for this campaign budget period. + CampaignBudgetErrorEnum_CAMPAIGN_BUDGET_PERIOD_NOT_AVAILABLE CampaignBudgetErrorEnum_CampaignBudgetError = 4 + // This field is not mutable on implicitly shared campaign budgets + CampaignBudgetErrorEnum_CANNOT_MODIFY_FIELD_OF_IMPLICITLY_SHARED_CAMPAIGN_BUDGET CampaignBudgetErrorEnum_CampaignBudgetError = 6 + // Cannot change explicitly shared campaign budgets back to implicitly + // shared ones. + CampaignBudgetErrorEnum_CANNOT_UPDATE_CAMPAIGN_BUDGET_TO_IMPLICITLY_SHARED CampaignBudgetErrorEnum_CampaignBudgetError = 7 + // An implicit campaign budget without a name cannot be changed to + // explicitly shared campaign budget. + CampaignBudgetErrorEnum_CANNOT_UPDATE_CAMPAIGN_BUDGET_TO_EXPLICITLY_SHARED_WITHOUT_NAME CampaignBudgetErrorEnum_CampaignBudgetError = 8 + // Cannot change an implicitly shared campaign budget to an explicitly + // shared one. + CampaignBudgetErrorEnum_CANNOT_UPDATE_CAMPAIGN_BUDGET_TO_EXPLICITLY_SHARED CampaignBudgetErrorEnum_CampaignBudgetError = 9 + // Only explicitly shared campaign budgets can be used with multiple + // campaigns. + CampaignBudgetErrorEnum_CANNOT_USE_IMPLICITLY_SHARED_CAMPAIGN_BUDGET_WITH_MULTIPLE_CAMPAIGNS CampaignBudgetErrorEnum_CampaignBudgetError = 10 + // A campaign budget with this name already exists. + CampaignBudgetErrorEnum_DUPLICATE_NAME CampaignBudgetErrorEnum_CampaignBudgetError = 11 + // A money amount was not in the expected currency. + CampaignBudgetErrorEnum_MONEY_AMOUNT_IN_WRONG_CURRENCY CampaignBudgetErrorEnum_CampaignBudgetError = 12 + // A money amount was less than the minimum CPC for currency. + CampaignBudgetErrorEnum_MONEY_AMOUNT_LESS_THAN_CURRENCY_MINIMUM_CPC CampaignBudgetErrorEnum_CampaignBudgetError = 13 + // A money amount was greater than the maximum allowed. + CampaignBudgetErrorEnum_MONEY_AMOUNT_TOO_LARGE CampaignBudgetErrorEnum_CampaignBudgetError = 14 + // A money amount was negative. + CampaignBudgetErrorEnum_NEGATIVE_MONEY_AMOUNT CampaignBudgetErrorEnum_CampaignBudgetError = 15 + // A money amount was not a multiple of a minimum unit. + CampaignBudgetErrorEnum_NON_MULTIPLE_OF_MINIMUM_CURRENCY_UNIT CampaignBudgetErrorEnum_CampaignBudgetError = 16 +) + +var CampaignBudgetErrorEnum_CampaignBudgetError_name = map[int32]string{ + 0: "UNSPECIFIED", + 1: "UNKNOWN", + 17: "CAMPAIGN_BUDGET_CANNOT_BE_SHARED", + 2: "CAMPAIGN_BUDGET_REMOVED", + 3: "CAMPAIGN_BUDGET_IN_USE", + 4: "CAMPAIGN_BUDGET_PERIOD_NOT_AVAILABLE", + 6: "CANNOT_MODIFY_FIELD_OF_IMPLICITLY_SHARED_CAMPAIGN_BUDGET", + 7: "CANNOT_UPDATE_CAMPAIGN_BUDGET_TO_IMPLICITLY_SHARED", + 8: "CANNOT_UPDATE_CAMPAIGN_BUDGET_TO_EXPLICITLY_SHARED_WITHOUT_NAME", + 9: "CANNOT_UPDATE_CAMPAIGN_BUDGET_TO_EXPLICITLY_SHARED", + 10: "CANNOT_USE_IMPLICITLY_SHARED_CAMPAIGN_BUDGET_WITH_MULTIPLE_CAMPAIGNS", + 11: "DUPLICATE_NAME", + 12: "MONEY_AMOUNT_IN_WRONG_CURRENCY", + 13: "MONEY_AMOUNT_LESS_THAN_CURRENCY_MINIMUM_CPC", + 14: "MONEY_AMOUNT_TOO_LARGE", + 15: "NEGATIVE_MONEY_AMOUNT", + 16: "NON_MULTIPLE_OF_MINIMUM_CURRENCY_UNIT", +} +var CampaignBudgetErrorEnum_CampaignBudgetError_value = map[string]int32{ + "UNSPECIFIED": 0, + "UNKNOWN": 1, + "CAMPAIGN_BUDGET_CANNOT_BE_SHARED": 17, + "CAMPAIGN_BUDGET_REMOVED": 2, + "CAMPAIGN_BUDGET_IN_USE": 3, + "CAMPAIGN_BUDGET_PERIOD_NOT_AVAILABLE": 4, + "CANNOT_MODIFY_FIELD_OF_IMPLICITLY_SHARED_CAMPAIGN_BUDGET": 6, + "CANNOT_UPDATE_CAMPAIGN_BUDGET_TO_IMPLICITLY_SHARED": 7, + "CANNOT_UPDATE_CAMPAIGN_BUDGET_TO_EXPLICITLY_SHARED_WITHOUT_NAME": 8, + "CANNOT_UPDATE_CAMPAIGN_BUDGET_TO_EXPLICITLY_SHARED": 9, + "CANNOT_USE_IMPLICITLY_SHARED_CAMPAIGN_BUDGET_WITH_MULTIPLE_CAMPAIGNS": 10, + "DUPLICATE_NAME": 11, + "MONEY_AMOUNT_IN_WRONG_CURRENCY": 12, + "MONEY_AMOUNT_LESS_THAN_CURRENCY_MINIMUM_CPC": 13, + "MONEY_AMOUNT_TOO_LARGE": 14, + "NEGATIVE_MONEY_AMOUNT": 15, + "NON_MULTIPLE_OF_MINIMUM_CURRENCY_UNIT": 16, +} + +func (x CampaignBudgetErrorEnum_CampaignBudgetError) String() string { + return proto.EnumName(CampaignBudgetErrorEnum_CampaignBudgetError_name, int32(x)) +} +func (CampaignBudgetErrorEnum_CampaignBudgetError) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_campaign_budget_error_18e5e7124b214724, []int{0, 0} +} + +// Container for enum describing possible campaign budget errors. +type CampaignBudgetErrorEnum struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *CampaignBudgetErrorEnum) Reset() { *m = CampaignBudgetErrorEnum{} } +func (m *CampaignBudgetErrorEnum) String() string { return proto.CompactTextString(m) } +func (*CampaignBudgetErrorEnum) ProtoMessage() {} +func (*CampaignBudgetErrorEnum) Descriptor() ([]byte, []int) { + return fileDescriptor_campaign_budget_error_18e5e7124b214724, []int{0} +} +func (m *CampaignBudgetErrorEnum) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_CampaignBudgetErrorEnum.Unmarshal(m, b) +} +func (m *CampaignBudgetErrorEnum) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_CampaignBudgetErrorEnum.Marshal(b, m, deterministic) +} +func (dst *CampaignBudgetErrorEnum) XXX_Merge(src proto.Message) { + xxx_messageInfo_CampaignBudgetErrorEnum.Merge(dst, src) +} +func (m *CampaignBudgetErrorEnum) XXX_Size() int { + return xxx_messageInfo_CampaignBudgetErrorEnum.Size(m) +} +func (m *CampaignBudgetErrorEnum) XXX_DiscardUnknown() { + xxx_messageInfo_CampaignBudgetErrorEnum.DiscardUnknown(m) +} + +var xxx_messageInfo_CampaignBudgetErrorEnum proto.InternalMessageInfo + +func init() { + proto.RegisterType((*CampaignBudgetErrorEnum)(nil), "google.ads.googleads.v1.errors.CampaignBudgetErrorEnum") + proto.RegisterEnum("google.ads.googleads.v1.errors.CampaignBudgetErrorEnum_CampaignBudgetError", CampaignBudgetErrorEnum_CampaignBudgetError_name, CampaignBudgetErrorEnum_CampaignBudgetError_value) +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/errors/campaign_budget_error.proto", fileDescriptor_campaign_budget_error_18e5e7124b214724) +} + +var fileDescriptor_campaign_budget_error_18e5e7124b214724 = []byte{ + // 582 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x8c, 0x93, 0xdf, 0x6a, 0xd4, 0x4c, + 0x18, 0xc6, 0xbf, 0x6e, 0xbf, 0xb6, 0x3a, 0xd5, 0x76, 0x1c, 0xd1, 0x6a, 0x95, 0x22, 0x4b, 0x05, + 0x45, 0x48, 0x58, 0x05, 0x91, 0x28, 0xc8, 0x24, 0x99, 0xcd, 0x0e, 0x26, 0x33, 0x21, 0x99, 0x49, + 0x5d, 0x59, 0x18, 0xd2, 0x66, 0x09, 0x0b, 0xdd, 0x64, 0xd9, 0x6c, 0x7b, 0x41, 0x1e, 0x7a, 0x1d, + 0x1e, 0x79, 0x15, 0x1e, 0x7b, 0xea, 0x0d, 0x48, 0x92, 0xfd, 0x63, 0xd3, 0x6a, 0x3d, 0xca, 0xcb, + 0xbc, 0xcf, 0xef, 0x79, 0xde, 0x30, 0xf3, 0x02, 0x23, 0xcd, 0xf3, 0xf4, 0x74, 0xa8, 0xc7, 0x49, + 0xa1, 0xd7, 0x65, 0x59, 0x9d, 0x77, 0xf4, 0xe1, 0x74, 0x9a, 0x4f, 0x0b, 0xfd, 0x24, 0x1e, 0x4f, + 0xe2, 0x51, 0x9a, 0xa9, 0xe3, 0xb3, 0x24, 0x1d, 0xce, 0x54, 0x75, 0xac, 0x4d, 0xa6, 0xf9, 0x2c, + 0x47, 0x07, 0x35, 0xa0, 0xc5, 0x49, 0xa1, 0x2d, 0x59, 0xed, 0xbc, 0xa3, 0xd5, 0xec, 0xfe, 0xe3, + 0x85, 0xf7, 0x64, 0xa4, 0xc7, 0x59, 0x96, 0xcf, 0xe2, 0xd9, 0x28, 0xcf, 0x8a, 0x9a, 0x6e, 0x7f, + 0xdf, 0x00, 0x7b, 0xd6, 0xdc, 0xdd, 0xac, 0xcc, 0x49, 0x89, 0x91, 0xec, 0x6c, 0xdc, 0xfe, 0xba, + 0x01, 0xee, 0x5e, 0xd1, 0x43, 0xbb, 0x60, 0x5b, 0xb2, 0xd0, 0x27, 0x16, 0xed, 0x52, 0x62, 0xc3, + 0xff, 0xd0, 0x36, 0xd8, 0x92, 0xec, 0x03, 0xe3, 0x47, 0x0c, 0xae, 0xa1, 0x43, 0xf0, 0xc4, 0xc2, + 0x9e, 0x8f, 0xa9, 0xc3, 0x94, 0x29, 0x6d, 0x87, 0x08, 0x65, 0x61, 0xc6, 0xb8, 0x50, 0x26, 0x51, + 0x61, 0x0f, 0x07, 0xc4, 0x86, 0x77, 0xd0, 0x23, 0xb0, 0xd7, 0x54, 0x05, 0xc4, 0xe3, 0x11, 0xb1, + 0x61, 0x0b, 0xed, 0x83, 0xfb, 0xcd, 0x26, 0x65, 0x4a, 0x86, 0x04, 0xae, 0xa3, 0x67, 0xe0, 0xb0, + 0xd9, 0xf3, 0x49, 0x40, 0xb9, 0xad, 0xca, 0x08, 0x1c, 0x61, 0xea, 0x62, 0xd3, 0x25, 0xf0, 0x7f, + 0xf4, 0x0e, 0xbc, 0x99, 0x07, 0x7b, 0xdc, 0xa6, 0xdd, 0xbe, 0xea, 0x52, 0xe2, 0xda, 0x8a, 0x77, + 0x15, 0xf5, 0x7c, 0x97, 0x5a, 0x54, 0xb8, 0xfd, 0xf9, 0x40, 0xaa, 0x61, 0x09, 0x37, 0xd1, 0x6b, + 0xf0, 0x72, 0x4e, 0x4b, 0xdf, 0xc6, 0x82, 0x34, 0x25, 0x4a, 0xf0, 0xcb, 0x3e, 0x70, 0x0b, 0x59, + 0xe0, 0xfd, 0xb5, 0x1c, 0xf9, 0xd8, 0xcc, 0x3f, 0xa2, 0xa2, 0xc7, 0xa5, 0x50, 0x0c, 0x7b, 0x04, + 0xde, 0xf8, 0xa7, 0xf0, 0x4b, 0x26, 0xf0, 0x26, 0xea, 0x01, 0x7b, 0xc1, 0x85, 0xe4, 0xfa, 0xdf, + 0xac, 0x62, 0x95, 0x27, 0x5d, 0x41, 0x7d, 0x77, 0x15, 0x12, 0x42, 0x80, 0x10, 0xd8, 0xb1, 0x65, + 0x89, 0x97, 0xe9, 0xd5, 0x54, 0xdb, 0xa8, 0x0d, 0x0e, 0x3c, 0xce, 0x48, 0x5f, 0x61, 0x8f, 0x4b, + 0x56, 0xdd, 0xc9, 0x51, 0xc0, 0x99, 0xa3, 0x2c, 0x19, 0x04, 0x84, 0x59, 0x7d, 0x78, 0x0b, 0xe9, + 0xe0, 0xc5, 0x05, 0x8d, 0x4b, 0xc2, 0x50, 0x89, 0x1e, 0x66, 0x4b, 0x91, 0xf2, 0x28, 0xa3, 0x9e, + 0xf4, 0x94, 0xe5, 0x5b, 0xf0, 0x76, 0x79, 0xd7, 0x17, 0x00, 0xc1, 0xb9, 0x72, 0x71, 0xe0, 0x10, + 0xb8, 0x83, 0x1e, 0x82, 0x7b, 0x8c, 0x38, 0x58, 0xd0, 0x88, 0xa8, 0xdf, 0x45, 0x70, 0x17, 0x3d, + 0x07, 0x4f, 0x19, 0x67, 0xab, 0xd9, 0x79, 0x77, 0xe5, 0xbb, 0x08, 0x92, 0x8c, 0x0a, 0x08, 0xcd, + 0x9f, 0x6b, 0xa0, 0x7d, 0x92, 0x8f, 0xb5, 0xbf, 0xef, 0x89, 0xf9, 0xe0, 0x8a, 0xa7, 0xee, 0x97, + 0x3b, 0xe2, 0xaf, 0x7d, 0xb2, 0xe7, 0x6c, 0x9a, 0x9f, 0xc6, 0x59, 0xaa, 0xe5, 0xd3, 0x54, 0x4f, + 0x87, 0x59, 0xb5, 0x41, 0x8b, 0x7d, 0x9d, 0x8c, 0x8a, 0x3f, 0xad, 0xef, 0xdb, 0xfa, 0xf3, 0xb9, + 0xb5, 0xee, 0x60, 0xfc, 0xa5, 0x75, 0xe0, 0xd4, 0x66, 0x38, 0x29, 0xb4, 0xba, 0x2c, 0xab, 0xa8, + 0xa3, 0x55, 0x91, 0xc5, 0xb7, 0x85, 0x60, 0x80, 0x93, 0x62, 0xb0, 0x14, 0x0c, 0xa2, 0xce, 0xa0, + 0x16, 0xfc, 0x68, 0xb5, 0xeb, 0x53, 0xc3, 0xc0, 0x49, 0x61, 0x18, 0x4b, 0x89, 0x61, 0x44, 0x1d, + 0xc3, 0xa8, 0x45, 0xc7, 0x9b, 0xd5, 0x74, 0xaf, 0x7e, 0x05, 0x00, 0x00, 0xff, 0xff, 0xff, 0xf7, + 0xa5, 0x0c, 0x5b, 0x04, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/errors/campaign_criterion_error.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/errors/campaign_criterion_error.pb.go new file mode 100644 index 0000000..f9cca63 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/errors/campaign_criterion_error.pb.go @@ -0,0 +1,171 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/errors/campaign_criterion_error.proto + +package errors // import "google.golang.org/genproto/googleapis/ads/googleads/v1/errors" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Enum describing possible campaign criterion errors. +type CampaignCriterionErrorEnum_CampaignCriterionError int32 + +const ( + // Enum unspecified. + CampaignCriterionErrorEnum_UNSPECIFIED CampaignCriterionErrorEnum_CampaignCriterionError = 0 + // The received error code is not known in this version. + CampaignCriterionErrorEnum_UNKNOWN CampaignCriterionErrorEnum_CampaignCriterionError = 1 + // Concrete type of criterion (keyword v.s. placement) is required for + // CREATE and UPDATE operations. + CampaignCriterionErrorEnum_CONCRETE_TYPE_REQUIRED CampaignCriterionErrorEnum_CampaignCriterionError = 2 + // Invalid placement URL. + CampaignCriterionErrorEnum_INVALID_PLACEMENT_URL CampaignCriterionErrorEnum_CampaignCriterionError = 3 + // Criteria type can not be excluded for the campaign by the customer. like + // AOL account type cannot target site type criteria + CampaignCriterionErrorEnum_CANNOT_EXCLUDE_CRITERIA_TYPE CampaignCriterionErrorEnum_CampaignCriterionError = 4 + // Cannot set the campaign criterion status for this criteria type. + CampaignCriterionErrorEnum_CANNOT_SET_STATUS_FOR_CRITERIA_TYPE CampaignCriterionErrorEnum_CampaignCriterionError = 5 + // Cannot set the campaign criterion status for an excluded criteria. + CampaignCriterionErrorEnum_CANNOT_SET_STATUS_FOR_EXCLUDED_CRITERIA CampaignCriterionErrorEnum_CampaignCriterionError = 6 + // Cannot target and exclude the same criterion. + CampaignCriterionErrorEnum_CANNOT_TARGET_AND_EXCLUDE CampaignCriterionErrorEnum_CampaignCriterionError = 7 + // The mutate contained too many operations. + CampaignCriterionErrorEnum_TOO_MANY_OPERATIONS CampaignCriterionErrorEnum_CampaignCriterionError = 8 + // This operator cannot be applied to a criterion of this type. + CampaignCriterionErrorEnum_OPERATOR_NOT_SUPPORTED_FOR_CRITERION_TYPE CampaignCriterionErrorEnum_CampaignCriterionError = 9 + // The Shopping campaign sales country is not supported for + // ProductSalesChannel targeting. + CampaignCriterionErrorEnum_SHOPPING_CAMPAIGN_SALES_COUNTRY_NOT_SUPPORTED_FOR_SALES_CHANNEL CampaignCriterionErrorEnum_CampaignCriterionError = 10 + // The existing field can't be updated with CREATE operation. It can be + // updated with UPDATE operation only. + CampaignCriterionErrorEnum_CANNOT_ADD_EXISTING_FIELD CampaignCriterionErrorEnum_CampaignCriterionError = 11 + // Negative criteria are immutable, so updates are not allowed. + CampaignCriterionErrorEnum_CANNOT_UPDATE_NEGATIVE_CRITERION CampaignCriterionErrorEnum_CampaignCriterionError = 12 +) + +var CampaignCriterionErrorEnum_CampaignCriterionError_name = map[int32]string{ + 0: "UNSPECIFIED", + 1: "UNKNOWN", + 2: "CONCRETE_TYPE_REQUIRED", + 3: "INVALID_PLACEMENT_URL", + 4: "CANNOT_EXCLUDE_CRITERIA_TYPE", + 5: "CANNOT_SET_STATUS_FOR_CRITERIA_TYPE", + 6: "CANNOT_SET_STATUS_FOR_EXCLUDED_CRITERIA", + 7: "CANNOT_TARGET_AND_EXCLUDE", + 8: "TOO_MANY_OPERATIONS", + 9: "OPERATOR_NOT_SUPPORTED_FOR_CRITERION_TYPE", + 10: "SHOPPING_CAMPAIGN_SALES_COUNTRY_NOT_SUPPORTED_FOR_SALES_CHANNEL", + 11: "CANNOT_ADD_EXISTING_FIELD", + 12: "CANNOT_UPDATE_NEGATIVE_CRITERION", +} +var CampaignCriterionErrorEnum_CampaignCriterionError_value = map[string]int32{ + "UNSPECIFIED": 0, + "UNKNOWN": 1, + "CONCRETE_TYPE_REQUIRED": 2, + "INVALID_PLACEMENT_URL": 3, + "CANNOT_EXCLUDE_CRITERIA_TYPE": 4, + "CANNOT_SET_STATUS_FOR_CRITERIA_TYPE": 5, + "CANNOT_SET_STATUS_FOR_EXCLUDED_CRITERIA": 6, + "CANNOT_TARGET_AND_EXCLUDE": 7, + "TOO_MANY_OPERATIONS": 8, + "OPERATOR_NOT_SUPPORTED_FOR_CRITERION_TYPE": 9, + "SHOPPING_CAMPAIGN_SALES_COUNTRY_NOT_SUPPORTED_FOR_SALES_CHANNEL": 10, + "CANNOT_ADD_EXISTING_FIELD": 11, + "CANNOT_UPDATE_NEGATIVE_CRITERION": 12, +} + +func (x CampaignCriterionErrorEnum_CampaignCriterionError) String() string { + return proto.EnumName(CampaignCriterionErrorEnum_CampaignCriterionError_name, int32(x)) +} +func (CampaignCriterionErrorEnum_CampaignCriterionError) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_campaign_criterion_error_d8b84e8d646e37a2, []int{0, 0} +} + +// Container for enum describing possible campaign criterion errors. +type CampaignCriterionErrorEnum struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *CampaignCriterionErrorEnum) Reset() { *m = CampaignCriterionErrorEnum{} } +func (m *CampaignCriterionErrorEnum) String() string { return proto.CompactTextString(m) } +func (*CampaignCriterionErrorEnum) ProtoMessage() {} +func (*CampaignCriterionErrorEnum) Descriptor() ([]byte, []int) { + return fileDescriptor_campaign_criterion_error_d8b84e8d646e37a2, []int{0} +} +func (m *CampaignCriterionErrorEnum) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_CampaignCriterionErrorEnum.Unmarshal(m, b) +} +func (m *CampaignCriterionErrorEnum) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_CampaignCriterionErrorEnum.Marshal(b, m, deterministic) +} +func (dst *CampaignCriterionErrorEnum) XXX_Merge(src proto.Message) { + xxx_messageInfo_CampaignCriterionErrorEnum.Merge(dst, src) +} +func (m *CampaignCriterionErrorEnum) XXX_Size() int { + return xxx_messageInfo_CampaignCriterionErrorEnum.Size(m) +} +func (m *CampaignCriterionErrorEnum) XXX_DiscardUnknown() { + xxx_messageInfo_CampaignCriterionErrorEnum.DiscardUnknown(m) +} + +var xxx_messageInfo_CampaignCriterionErrorEnum proto.InternalMessageInfo + +func init() { + proto.RegisterType((*CampaignCriterionErrorEnum)(nil), "google.ads.googleads.v1.errors.CampaignCriterionErrorEnum") + proto.RegisterEnum("google.ads.googleads.v1.errors.CampaignCriterionErrorEnum_CampaignCriterionError", CampaignCriterionErrorEnum_CampaignCriterionError_name, CampaignCriterionErrorEnum_CampaignCriterionError_value) +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/errors/campaign_criterion_error.proto", fileDescriptor_campaign_criterion_error_d8b84e8d646e37a2) +} + +var fileDescriptor_campaign_criterion_error_d8b84e8d646e37a2 = []byte{ + // 526 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x7c, 0x92, 0xd1, 0x6a, 0xd4, 0x40, + 0x14, 0x86, 0xed, 0xae, 0xb6, 0x3a, 0x15, 0x0c, 0x23, 0x56, 0x5a, 0x6b, 0x29, 0xab, 0x50, 0x44, + 0x4c, 0x58, 0xbc, 0x8b, 0x88, 0x4c, 0x27, 0xa7, 0xe9, 0x60, 0x3a, 0x33, 0x4e, 0x26, 0xab, 0x95, + 0x85, 0x21, 0x76, 0x97, 0x10, 0xe8, 0x26, 0x4b, 0xb2, 0xf6, 0x81, 0xbc, 0xf4, 0x51, 0x7c, 0x00, + 0x1f, 0xc2, 0x7b, 0xbd, 0x96, 0x64, 0xb2, 0x6b, 0x91, 0xd5, 0xab, 0x9c, 0xe4, 0x7c, 0xff, 0x7f, + 0x7e, 0xc8, 0x8f, 0x5e, 0x67, 0x65, 0x99, 0x5d, 0x4e, 0xbd, 0x74, 0x52, 0x7b, 0x76, 0x6c, 0xa6, + 0xab, 0xa1, 0x37, 0xad, 0xaa, 0xb2, 0xaa, 0xbd, 0x8b, 0x74, 0x36, 0x4f, 0xf3, 0xac, 0x30, 0x17, + 0x55, 0xbe, 0x98, 0x56, 0x79, 0x59, 0x98, 0x76, 0xe3, 0xce, 0xab, 0x72, 0x51, 0xe2, 0x03, 0xab, + 0x71, 0xd3, 0x49, 0xed, 0xae, 0xe4, 0xee, 0xd5, 0xd0, 0xb5, 0xf2, 0xbd, 0xfd, 0xa5, 0xfd, 0x3c, + 0xf7, 0xd2, 0xa2, 0x28, 0x17, 0xe9, 0x22, 0x2f, 0x8b, 0xda, 0xaa, 0x07, 0x3f, 0xfb, 0x68, 0x8f, + 0x76, 0x07, 0xe8, 0xd2, 0x1f, 0x1a, 0x25, 0x14, 0x9f, 0x67, 0x83, 0xef, 0x7d, 0xb4, 0xb3, 0x7e, + 0x8d, 0xef, 0xa1, 0xed, 0x84, 0xc7, 0x12, 0x28, 0x3b, 0x61, 0x10, 0x38, 0x37, 0xf0, 0x36, 0xda, + 0x4a, 0xf8, 0x5b, 0x2e, 0xde, 0x73, 0x67, 0x03, 0xef, 0xa1, 0x1d, 0x2a, 0x38, 0x55, 0xa0, 0xc1, + 0xe8, 0x73, 0x09, 0x46, 0xc1, 0xbb, 0x84, 0x29, 0x08, 0x9c, 0x1e, 0xde, 0x45, 0x0f, 0x18, 0x1f, + 0x91, 0x88, 0x05, 0x46, 0x46, 0x84, 0xc2, 0x19, 0x70, 0x6d, 0x12, 0x15, 0x39, 0x7d, 0x7c, 0x88, + 0xf6, 0x29, 0xe1, 0x5c, 0x68, 0x03, 0x1f, 0x68, 0x94, 0x04, 0x60, 0xa8, 0x62, 0x1a, 0x14, 0x23, + 0xad, 0x8b, 0x73, 0x13, 0x1f, 0xa1, 0x27, 0x1d, 0x11, 0x83, 0x36, 0xb1, 0x26, 0x3a, 0x89, 0xcd, + 0x89, 0x50, 0x7f, 0x81, 0xb7, 0xf0, 0x73, 0x74, 0xb4, 0x1e, 0xec, 0x9c, 0x83, 0x95, 0xc2, 0xd9, + 0xc4, 0x8f, 0xd1, 0x6e, 0x07, 0x6b, 0xa2, 0x42, 0xd0, 0x86, 0xf0, 0x60, 0x09, 0x3a, 0x5b, 0xf8, + 0x21, 0xba, 0xaf, 0x85, 0x30, 0x67, 0x84, 0x9f, 0x1b, 0x21, 0x41, 0x11, 0xcd, 0x04, 0x8f, 0x9d, + 0xdb, 0xf8, 0x05, 0x7a, 0x66, 0xdf, 0x85, 0x32, 0xed, 0xa9, 0x44, 0x4a, 0xa1, 0x34, 0x04, 0xd7, + 0x23, 0x09, 0x6e, 0x33, 0xdd, 0xc1, 0x14, 0xbd, 0x89, 0x4f, 0x85, 0x94, 0x8c, 0x87, 0x86, 0x92, + 0x33, 0x49, 0x58, 0xc8, 0x4d, 0x4c, 0x22, 0x88, 0x0d, 0x15, 0x09, 0xd7, 0xea, 0x7c, 0x8d, 0x4b, + 0xb7, 0x3f, 0x25, 0x9c, 0x43, 0xe4, 0xa0, 0x6b, 0x59, 0x49, 0xd0, 0x84, 0x64, 0xb1, 0x6e, 0xfc, + 0x4e, 0x18, 0x44, 0x81, 0xb3, 0x8d, 0x9f, 0xa2, 0xc3, 0x6e, 0x9d, 0xc8, 0x80, 0x68, 0x30, 0x1c, + 0x42, 0xa2, 0xd9, 0x08, 0xfe, 0xc4, 0x71, 0xee, 0x1e, 0xff, 0xda, 0x40, 0x83, 0x8b, 0x72, 0xe6, + 0xfe, 0xbf, 0x3c, 0xc7, 0x8f, 0xd6, 0xff, 0x7c, 0xd9, 0x74, 0x47, 0x6e, 0x7c, 0x0c, 0x3a, 0x79, + 0x56, 0x5e, 0xa6, 0x45, 0xe6, 0x96, 0x55, 0xe6, 0x65, 0xd3, 0xa2, 0x6d, 0xd6, 0xb2, 0xca, 0xf3, + 0xbc, 0xfe, 0x57, 0xb3, 0x5f, 0xd9, 0xc7, 0x97, 0x5e, 0x3f, 0x24, 0xe4, 0x6b, 0xef, 0x20, 0xb4, + 0x66, 0x64, 0x52, 0xbb, 0x76, 0x6c, 0xa6, 0xd1, 0xd0, 0x6d, 0x4f, 0xd6, 0xdf, 0x96, 0xc0, 0x98, + 0x4c, 0xea, 0xf1, 0x0a, 0x18, 0x8f, 0x86, 0x63, 0x0b, 0xfc, 0xe8, 0x0d, 0xec, 0x57, 0xdf, 0x27, + 0x93, 0xda, 0xf7, 0x57, 0x88, 0xef, 0x8f, 0x86, 0xbe, 0x6f, 0xa1, 0x4f, 0x9b, 0x6d, 0xba, 0x97, + 0xbf, 0x03, 0x00, 0x00, 0xff, 0xff, 0xdd, 0x6b, 0xc7, 0xa8, 0x76, 0x03, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/errors/campaign_error.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/errors/campaign_error.pb.go new file mode 100644 index 0000000..074179f --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/errors/campaign_error.pb.go @@ -0,0 +1,312 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/errors/campaign_error.proto + +package errors // import "google.golang.org/genproto/googleapis/ads/googleads/v1/errors" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Enum describing possible campaign errors. +type CampaignErrorEnum_CampaignError int32 + +const ( + // Enum unspecified. + CampaignErrorEnum_UNSPECIFIED CampaignErrorEnum_CampaignError = 0 + // The received error code is not known in this version. + CampaignErrorEnum_UNKNOWN CampaignErrorEnum_CampaignError = 1 + // Cannot target content network. + CampaignErrorEnum_CANNOT_TARGET_CONTENT_NETWORK CampaignErrorEnum_CampaignError = 3 + // Cannot target search network. + CampaignErrorEnum_CANNOT_TARGET_SEARCH_NETWORK CampaignErrorEnum_CampaignError = 4 + // Cannot cover search network without google search network. + CampaignErrorEnum_CANNOT_TARGET_SEARCH_NETWORK_WITHOUT_GOOGLE_SEARCH CampaignErrorEnum_CampaignError = 5 + // Cannot target Google Search network for a CPM campaign. + CampaignErrorEnum_CANNOT_TARGET_GOOGLE_SEARCH_FOR_CPM_CAMPAIGN CampaignErrorEnum_CampaignError = 6 + // Must target at least one network. + CampaignErrorEnum_CAMPAIGN_MUST_TARGET_AT_LEAST_ONE_NETWORK CampaignErrorEnum_CampaignError = 7 + // Only some Google partners are allowed to target partner search network. + CampaignErrorEnum_CANNOT_TARGET_PARTNER_SEARCH_NETWORK CampaignErrorEnum_CampaignError = 8 + // Cannot target content network only as campaign has criteria-level bidding + // strategy. + CampaignErrorEnum_CANNOT_TARGET_CONTENT_NETWORK_ONLY_WITH_CRITERIA_LEVEL_BIDDING_STRATEGY CampaignErrorEnum_CampaignError = 9 + // Cannot modify the start or end date such that the campaign duration would + // not contain the durations of all runnable trials. + CampaignErrorEnum_CAMPAIGN_DURATION_MUST_CONTAIN_ALL_RUNNABLE_TRIALS CampaignErrorEnum_CampaignError = 10 + // Cannot modify dates, budget or campaign name of a trial campaign. + CampaignErrorEnum_CANNOT_MODIFY_FOR_TRIAL_CAMPAIGN CampaignErrorEnum_CampaignError = 11 + // Trying to modify the name of an active or paused campaign, where the name + // is already assigned to another active or paused campaign. + CampaignErrorEnum_DUPLICATE_CAMPAIGN_NAME CampaignErrorEnum_CampaignError = 12 + // Two fields are in conflicting modes. + CampaignErrorEnum_INCOMPATIBLE_CAMPAIGN_FIELD CampaignErrorEnum_CampaignError = 13 + // Campaign name cannot be used. + CampaignErrorEnum_INVALID_CAMPAIGN_NAME CampaignErrorEnum_CampaignError = 14 + // Given status is invalid. + CampaignErrorEnum_INVALID_AD_SERVING_OPTIMIZATION_STATUS CampaignErrorEnum_CampaignError = 15 + // Error in the campaign level tracking url. + CampaignErrorEnum_INVALID_TRACKING_URL CampaignErrorEnum_CampaignError = 16 + // Cannot set both tracking url template and tracking setting. An user has + // to clear legacy tracking setting in order to add tracking url template. + CampaignErrorEnum_CANNOT_SET_BOTH_TRACKING_URL_TEMPLATE_AND_TRACKING_SETTING CampaignErrorEnum_CampaignError = 17 + // The maximum number of impressions for Frequency Cap should be an integer + // greater than 0. + CampaignErrorEnum_MAX_IMPRESSIONS_NOT_IN_RANGE CampaignErrorEnum_CampaignError = 18 + // Only the Day, Week and Month time units are supported. + CampaignErrorEnum_TIME_UNIT_NOT_SUPPORTED CampaignErrorEnum_CampaignError = 19 + // Operation not allowed on a campaign whose serving status has ended + CampaignErrorEnum_INVALID_OPERATION_IF_SERVING_STATUS_HAS_ENDED CampaignErrorEnum_CampaignError = 20 + // This budget is exclusively linked to a Campaign that is using experiments + // so it cannot be shared. + CampaignErrorEnum_BUDGET_CANNOT_BE_SHARED CampaignErrorEnum_CampaignError = 21 + // Campaigns using experiments cannot use a shared budget. + CampaignErrorEnum_CAMPAIGN_CANNOT_USE_SHARED_BUDGET CampaignErrorEnum_CampaignError = 22 + // A different budget cannot be assigned to a campaign when there are + // running or scheduled trials. + CampaignErrorEnum_CANNOT_CHANGE_BUDGET_ON_CAMPAIGN_WITH_TRIALS CampaignErrorEnum_CampaignError = 23 + // No link found between the campaign and the label. + CampaignErrorEnum_CAMPAIGN_LABEL_DOES_NOT_EXIST CampaignErrorEnum_CampaignError = 24 + // The label has already been attached to the campaign. + CampaignErrorEnum_CAMPAIGN_LABEL_ALREADY_EXISTS CampaignErrorEnum_CampaignError = 25 + // A ShoppingSetting was not found when creating a shopping campaign. + CampaignErrorEnum_MISSING_SHOPPING_SETTING CampaignErrorEnum_CampaignError = 26 + // The country in shopping setting is not an allowed country. + CampaignErrorEnum_INVALID_SHOPPING_SALES_COUNTRY CampaignErrorEnum_CampaignError = 27 + // A Campaign with channel sub type UNIVERSAL_APP_CAMPAIGN must have a + // UniversalAppCampaignSetting specified. + CampaignErrorEnum_MISSING_UNIVERSAL_APP_CAMPAIGN_SETTING CampaignErrorEnum_CampaignError = 30 + // The requested channel type is not available according to the customer's + // account setting. + CampaignErrorEnum_ADVERTISING_CHANNEL_TYPE_NOT_AVAILABLE_FOR_ACCOUNT_TYPE CampaignErrorEnum_CampaignError = 31 + // The AdvertisingChannelSubType is not a valid subtype of the primary + // channel type. + CampaignErrorEnum_INVALID_ADVERTISING_CHANNEL_SUB_TYPE CampaignErrorEnum_CampaignError = 32 + // At least one conversion must be selected. + CampaignErrorEnum_AT_LEAST_ONE_CONVERSION_MUST_BE_SELECTED CampaignErrorEnum_CampaignError = 33 + // Setting ad rotation mode for a campaign is not allowed. Ad rotation mode + // at campaign is deprecated. + CampaignErrorEnum_CANNOT_SET_AD_ROTATION_MODE CampaignErrorEnum_CampaignError = 34 + // Trying to change start date on a campaign that has started. + CampaignErrorEnum_CANNOT_MODIFY_START_DATE_IF_ALREADY_STARTED CampaignErrorEnum_CampaignError = 35 + // Trying to modify a date into the past. + CampaignErrorEnum_CANNOT_SET_DATE_TO_PAST CampaignErrorEnum_CampaignError = 36 + // Hotel center id in the hotel setting does not match any customer links. + CampaignErrorEnum_MISSING_HOTEL_CUSTOMER_LINK CampaignErrorEnum_CampaignError = 37 + // Hotel center id in the hotel setting must match an active customer link. + CampaignErrorEnum_INVALID_HOTEL_CUSTOMER_LINK CampaignErrorEnum_CampaignError = 38 + // Hotel setting was not found when creating a hotel ads campaign. + CampaignErrorEnum_MISSING_HOTEL_SETTING CampaignErrorEnum_CampaignError = 39 + // A Campaign cannot use shared campaign budgets and be part of a campaign + // group. + CampaignErrorEnum_CANNOT_USE_SHARED_CAMPAIGN_BUDGET_WHILE_PART_OF_CAMPAIGN_GROUP CampaignErrorEnum_CampaignError = 40 + // The app ID was not found. + CampaignErrorEnum_APP_NOT_FOUND CampaignErrorEnum_CampaignError = 41 +) + +var CampaignErrorEnum_CampaignError_name = map[int32]string{ + 0: "UNSPECIFIED", + 1: "UNKNOWN", + 3: "CANNOT_TARGET_CONTENT_NETWORK", + 4: "CANNOT_TARGET_SEARCH_NETWORK", + 5: "CANNOT_TARGET_SEARCH_NETWORK_WITHOUT_GOOGLE_SEARCH", + 6: "CANNOT_TARGET_GOOGLE_SEARCH_FOR_CPM_CAMPAIGN", + 7: "CAMPAIGN_MUST_TARGET_AT_LEAST_ONE_NETWORK", + 8: "CANNOT_TARGET_PARTNER_SEARCH_NETWORK", + 9: "CANNOT_TARGET_CONTENT_NETWORK_ONLY_WITH_CRITERIA_LEVEL_BIDDING_STRATEGY", + 10: "CAMPAIGN_DURATION_MUST_CONTAIN_ALL_RUNNABLE_TRIALS", + 11: "CANNOT_MODIFY_FOR_TRIAL_CAMPAIGN", + 12: "DUPLICATE_CAMPAIGN_NAME", + 13: "INCOMPATIBLE_CAMPAIGN_FIELD", + 14: "INVALID_CAMPAIGN_NAME", + 15: "INVALID_AD_SERVING_OPTIMIZATION_STATUS", + 16: "INVALID_TRACKING_URL", + 17: "CANNOT_SET_BOTH_TRACKING_URL_TEMPLATE_AND_TRACKING_SETTING", + 18: "MAX_IMPRESSIONS_NOT_IN_RANGE", + 19: "TIME_UNIT_NOT_SUPPORTED", + 20: "INVALID_OPERATION_IF_SERVING_STATUS_HAS_ENDED", + 21: "BUDGET_CANNOT_BE_SHARED", + 22: "CAMPAIGN_CANNOT_USE_SHARED_BUDGET", + 23: "CANNOT_CHANGE_BUDGET_ON_CAMPAIGN_WITH_TRIALS", + 24: "CAMPAIGN_LABEL_DOES_NOT_EXIST", + 25: "CAMPAIGN_LABEL_ALREADY_EXISTS", + 26: "MISSING_SHOPPING_SETTING", + 27: "INVALID_SHOPPING_SALES_COUNTRY", + 30: "MISSING_UNIVERSAL_APP_CAMPAIGN_SETTING", + 31: "ADVERTISING_CHANNEL_TYPE_NOT_AVAILABLE_FOR_ACCOUNT_TYPE", + 32: "INVALID_ADVERTISING_CHANNEL_SUB_TYPE", + 33: "AT_LEAST_ONE_CONVERSION_MUST_BE_SELECTED", + 34: "CANNOT_SET_AD_ROTATION_MODE", + 35: "CANNOT_MODIFY_START_DATE_IF_ALREADY_STARTED", + 36: "CANNOT_SET_DATE_TO_PAST", + 37: "MISSING_HOTEL_CUSTOMER_LINK", + 38: "INVALID_HOTEL_CUSTOMER_LINK", + 39: "MISSING_HOTEL_SETTING", + 40: "CANNOT_USE_SHARED_CAMPAIGN_BUDGET_WHILE_PART_OF_CAMPAIGN_GROUP", + 41: "APP_NOT_FOUND", +} +var CampaignErrorEnum_CampaignError_value = map[string]int32{ + "UNSPECIFIED": 0, + "UNKNOWN": 1, + "CANNOT_TARGET_CONTENT_NETWORK": 3, + "CANNOT_TARGET_SEARCH_NETWORK": 4, + "CANNOT_TARGET_SEARCH_NETWORK_WITHOUT_GOOGLE_SEARCH": 5, + "CANNOT_TARGET_GOOGLE_SEARCH_FOR_CPM_CAMPAIGN": 6, + "CAMPAIGN_MUST_TARGET_AT_LEAST_ONE_NETWORK": 7, + "CANNOT_TARGET_PARTNER_SEARCH_NETWORK": 8, + "CANNOT_TARGET_CONTENT_NETWORK_ONLY_WITH_CRITERIA_LEVEL_BIDDING_STRATEGY": 9, + "CAMPAIGN_DURATION_MUST_CONTAIN_ALL_RUNNABLE_TRIALS": 10, + "CANNOT_MODIFY_FOR_TRIAL_CAMPAIGN": 11, + "DUPLICATE_CAMPAIGN_NAME": 12, + "INCOMPATIBLE_CAMPAIGN_FIELD": 13, + "INVALID_CAMPAIGN_NAME": 14, + "INVALID_AD_SERVING_OPTIMIZATION_STATUS": 15, + "INVALID_TRACKING_URL": 16, + "CANNOT_SET_BOTH_TRACKING_URL_TEMPLATE_AND_TRACKING_SETTING": 17, + "MAX_IMPRESSIONS_NOT_IN_RANGE": 18, + "TIME_UNIT_NOT_SUPPORTED": 19, + "INVALID_OPERATION_IF_SERVING_STATUS_HAS_ENDED": 20, + "BUDGET_CANNOT_BE_SHARED": 21, + "CAMPAIGN_CANNOT_USE_SHARED_BUDGET": 22, + "CANNOT_CHANGE_BUDGET_ON_CAMPAIGN_WITH_TRIALS": 23, + "CAMPAIGN_LABEL_DOES_NOT_EXIST": 24, + "CAMPAIGN_LABEL_ALREADY_EXISTS": 25, + "MISSING_SHOPPING_SETTING": 26, + "INVALID_SHOPPING_SALES_COUNTRY": 27, + "MISSING_UNIVERSAL_APP_CAMPAIGN_SETTING": 30, + "ADVERTISING_CHANNEL_TYPE_NOT_AVAILABLE_FOR_ACCOUNT_TYPE": 31, + "INVALID_ADVERTISING_CHANNEL_SUB_TYPE": 32, + "AT_LEAST_ONE_CONVERSION_MUST_BE_SELECTED": 33, + "CANNOT_SET_AD_ROTATION_MODE": 34, + "CANNOT_MODIFY_START_DATE_IF_ALREADY_STARTED": 35, + "CANNOT_SET_DATE_TO_PAST": 36, + "MISSING_HOTEL_CUSTOMER_LINK": 37, + "INVALID_HOTEL_CUSTOMER_LINK": 38, + "MISSING_HOTEL_SETTING": 39, + "CANNOT_USE_SHARED_CAMPAIGN_BUDGET_WHILE_PART_OF_CAMPAIGN_GROUP": 40, + "APP_NOT_FOUND": 41, +} + +func (x CampaignErrorEnum_CampaignError) String() string { + return proto.EnumName(CampaignErrorEnum_CampaignError_name, int32(x)) +} +func (CampaignErrorEnum_CampaignError) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_campaign_error_1caad62f8629dd50, []int{0, 0} +} + +// Container for enum describing possible campaign errors. +type CampaignErrorEnum struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *CampaignErrorEnum) Reset() { *m = CampaignErrorEnum{} } +func (m *CampaignErrorEnum) String() string { return proto.CompactTextString(m) } +func (*CampaignErrorEnum) ProtoMessage() {} +func (*CampaignErrorEnum) Descriptor() ([]byte, []int) { + return fileDescriptor_campaign_error_1caad62f8629dd50, []int{0} +} +func (m *CampaignErrorEnum) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_CampaignErrorEnum.Unmarshal(m, b) +} +func (m *CampaignErrorEnum) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_CampaignErrorEnum.Marshal(b, m, deterministic) +} +func (dst *CampaignErrorEnum) XXX_Merge(src proto.Message) { + xxx_messageInfo_CampaignErrorEnum.Merge(dst, src) +} +func (m *CampaignErrorEnum) XXX_Size() int { + return xxx_messageInfo_CampaignErrorEnum.Size(m) +} +func (m *CampaignErrorEnum) XXX_DiscardUnknown() { + xxx_messageInfo_CampaignErrorEnum.DiscardUnknown(m) +} + +var xxx_messageInfo_CampaignErrorEnum proto.InternalMessageInfo + +func init() { + proto.RegisterType((*CampaignErrorEnum)(nil), "google.ads.googleads.v1.errors.CampaignErrorEnum") + proto.RegisterEnum("google.ads.googleads.v1.errors.CampaignErrorEnum_CampaignError", CampaignErrorEnum_CampaignError_name, CampaignErrorEnum_CampaignError_value) +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/errors/campaign_error.proto", fileDescriptor_campaign_error_1caad62f8629dd50) +} + +var fileDescriptor_campaign_error_1caad62f8629dd50 = []byte{ + // 990 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x7c, 0x55, 0xff, 0x6e, 0x1b, 0x45, + 0x10, 0x26, 0x29, 0xb4, 0xb0, 0x49, 0xe8, 0x66, 0x69, 0x69, 0xda, 0x84, 0x34, 0x31, 0x69, 0x49, + 0xa1, 0xb5, 0x31, 0x95, 0x40, 0x72, 0xa5, 0x4a, 0xe3, 0xdb, 0xf1, 0x79, 0x95, 0xbd, 0xdd, 0xd3, + 0xee, 0x9e, 0x53, 0x57, 0x91, 0x56, 0xa6, 0x89, 0xac, 0x48, 0x8d, 0x1d, 0xc5, 0xa1, 0x2f, 0xc2, + 0x1b, 0xf0, 0x1f, 0x3c, 0x0a, 0x8f, 0x82, 0xc4, 0x3b, 0xa0, 0xbd, 0x5f, 0x8e, 0xdb, 0x92, 0xbf, + 0xbc, 0xb7, 0xf3, 0xcd, 0xb7, 0x33, 0xdf, 0xcc, 0x78, 0xc8, 0xf3, 0xf1, 0x74, 0x3a, 0x7e, 0x7b, + 0xd2, 0x1a, 0x1d, 0xcf, 0x5a, 0xc5, 0x31, 0x9c, 0xde, 0xb5, 0x5b, 0x27, 0x17, 0x17, 0xd3, 0x8b, + 0x59, 0xeb, 0xcd, 0xe8, 0xec, 0x7c, 0x74, 0x3a, 0x9e, 0xf8, 0xfc, 0xbb, 0x79, 0x7e, 0x31, 0xbd, + 0x9c, 0xb2, 0xed, 0x02, 0xd9, 0x1c, 0x1d, 0xcf, 0x9a, 0xb5, 0x53, 0xf3, 0x5d, 0xbb, 0x59, 0x38, + 0x3d, 0xd8, 0xaa, 0x48, 0xcf, 0x4f, 0x5b, 0xa3, 0xc9, 0x64, 0x7a, 0x39, 0xba, 0x3c, 0x9d, 0x4e, + 0x66, 0x85, 0x77, 0xe3, 0xcf, 0x55, 0xb2, 0x1e, 0x95, 0xb4, 0x18, 0x1c, 0x70, 0xf2, 0xdb, 0x59, + 0xe3, 0xf7, 0x55, 0xb2, 0xb6, 0x70, 0xcb, 0x6e, 0x93, 0x95, 0x4c, 0xd9, 0x14, 0x23, 0xd1, 0x13, + 0xc8, 0xe9, 0x27, 0x6c, 0x85, 0xdc, 0xca, 0xd4, 0x81, 0xd2, 0x87, 0x8a, 0x2e, 0xb1, 0x5d, 0xf2, + 0x4d, 0x04, 0x4a, 0x69, 0xe7, 0x1d, 0x98, 0x18, 0x9d, 0x8f, 0xb4, 0x72, 0xa8, 0x9c, 0x57, 0xe8, + 0x0e, 0xb5, 0x39, 0xa0, 0x37, 0xd8, 0x0e, 0xd9, 0x5a, 0x84, 0x58, 0x04, 0x13, 0xf5, 0x6b, 0xc4, + 0xa7, 0xec, 0x67, 0xf2, 0xd3, 0x75, 0x08, 0x7f, 0x28, 0x5c, 0x5f, 0x67, 0xce, 0xc7, 0x5a, 0xc7, + 0x12, 0x4b, 0x2b, 0xfd, 0x8c, 0xfd, 0x48, 0x9e, 0x2e, 0xfa, 0x2d, 0x00, 0x7c, 0x4f, 0x1b, 0x1f, + 0xa5, 0x89, 0x8f, 0x20, 0x49, 0x41, 0xc4, 0x8a, 0xde, 0x64, 0xcf, 0xc8, 0x93, 0xea, 0xcb, 0x27, + 0x99, 0xad, 0x1d, 0xc1, 0x79, 0x89, 0x60, 0x9d, 0xd7, 0x0a, 0xeb, 0xc0, 0x6e, 0xb1, 0x7d, 0xb2, + 0xb7, 0xf8, 0x40, 0x0a, 0xc6, 0x29, 0x34, 0xef, 0xa7, 0xf0, 0x39, 0x3b, 0x20, 0xf1, 0xb5, 0x3a, + 0x78, 0xad, 0xe4, 0x30, 0x4f, 0xc4, 0x47, 0x46, 0x38, 0x34, 0x02, 0xbc, 0xc4, 0x01, 0x4a, 0xdf, + 0x15, 0x9c, 0x0b, 0x15, 0x7b, 0xeb, 0x0c, 0x38, 0x8c, 0x87, 0xf4, 0x8b, 0x42, 0x8f, 0x32, 0x4a, + 0x9e, 0x19, 0x70, 0x42, 0x97, 0xe1, 0x06, 0x56, 0x10, 0xca, 0x83, 0x94, 0xde, 0x64, 0x4a, 0x41, + 0x57, 0xa2, 0x77, 0x46, 0x80, 0xb4, 0x94, 0xb0, 0x3d, 0xb2, 0x53, 0x06, 0x91, 0x68, 0x2e, 0x7a, + 0xc3, 0x5c, 0x81, 0xdc, 0x3a, 0xd7, 0x60, 0x85, 0x6d, 0x92, 0x7b, 0x3c, 0x4b, 0xa5, 0x88, 0xc0, + 0x61, 0x7d, 0xef, 0x15, 0x24, 0x48, 0x57, 0xd9, 0x43, 0xb2, 0x29, 0x54, 0xa4, 0x93, 0x14, 0x9c, + 0x08, 0xdc, 0xb5, 0xbd, 0x27, 0x50, 0x72, 0xba, 0xc6, 0xee, 0x93, 0xbb, 0x42, 0x0d, 0x40, 0x0a, + 0xfe, 0x9e, 0xef, 0x97, 0xec, 0x7b, 0xf2, 0xb8, 0x32, 0x01, 0xf7, 0x16, 0xcd, 0x20, 0xe4, 0xa5, + 0x53, 0x27, 0x12, 0xf1, 0xba, 0x48, 0xc2, 0x3a, 0x70, 0x99, 0xa5, 0xb7, 0xd9, 0x06, 0xb9, 0x53, + 0x61, 0x9d, 0x81, 0xe8, 0x20, 0x20, 0x33, 0x23, 0x29, 0x65, 0x2f, 0x49, 0xa7, 0x4c, 0xc2, 0xa2, + 0xf3, 0x5d, 0xed, 0xfa, 0x0b, 0x08, 0xef, 0x30, 0x49, 0x65, 0x08, 0x1d, 0xd4, 0x15, 0x5f, 0x8b, + 0xce, 0x09, 0x15, 0xd3, 0xf5, 0xd0, 0x6e, 0x09, 0xbc, 0xf2, 0x22, 0x49, 0x0d, 0x5a, 0x2b, 0xb4, + 0xb2, 0x3e, 0x90, 0x09, 0xe5, 0x0d, 0xa8, 0x18, 0x29, 0x0b, 0x02, 0x38, 0x91, 0xa0, 0xcf, 0x94, + 0x70, 0xb9, 0xcd, 0x66, 0x69, 0xaa, 0x8d, 0x43, 0x4e, 0xbf, 0x62, 0x6d, 0xf2, 0xac, 0x0a, 0x4c, + 0xa7, 0x58, 0x6a, 0x2f, 0x7a, 0x75, 0x3a, 0x45, 0x06, 0xbe, 0x0f, 0xd6, 0xa3, 0xe2, 0xc8, 0xe9, + 0x9d, 0xc0, 0xd7, 0xcd, 0x78, 0x5e, 0xf4, 0x22, 0xf0, 0x2e, 0x7a, 0xdb, 0x07, 0x83, 0x9c, 0xde, + 0x65, 0x8f, 0xc8, 0x6e, 0xad, 0x53, 0x69, 0xce, 0x6c, 0x65, 0xf7, 0x85, 0x1f, 0xfd, 0xfa, 0x4a, + 0x2b, 0x47, 0xfd, 0x10, 0x66, 0x69, 0xf1, 0x5a, 0xcd, 0x65, 0xce, 0xbb, 0xa7, 0x2c, 0xf6, 0xbd, + 0x62, 0xf2, 0x4a, 0x8b, 0x84, 0x2e, 0x4a, 0xcf, 0x35, 0x16, 0xb9, 0xe2, 0x2b, 0x61, 0x1d, 0xdd, + 0xf8, 0x08, 0x04, 0xa4, 0x41, 0xe0, 0xc3, 0x02, 0x61, 0xe9, 0x7d, 0xb6, 0x45, 0x36, 0x12, 0x61, + 0x6d, 0x9e, 0x59, 0x5f, 0xa7, 0xe9, 0x55, 0x2d, 0x1f, 0xb0, 0x06, 0xd9, 0xae, 0xc4, 0x98, 0x5b, + 0x41, 0xa2, 0xf5, 0x91, 0xce, 0x94, 0x33, 0x43, 0xba, 0x19, 0xaa, 0x5e, 0x31, 0x64, 0x4a, 0x0c, + 0xd0, 0x58, 0x90, 0x1e, 0xd2, 0x74, 0x1e, 0x77, 0xc5, 0xb7, 0xcd, 0x5e, 0x90, 0x5f, 0x80, 0x0f, + 0xd0, 0x38, 0x91, 0xe3, 0x43, 0xaa, 0x0a, 0xa5, 0x77, 0xc3, 0x14, 0xf3, 0xc0, 0x61, 0x00, 0x42, + 0xe6, 0x3d, 0x1d, 0x3a, 0x17, 0xa2, 0xfc, 0x95, 0xdc, 0x4c, 0x1f, 0x86, 0x61, 0x9c, 0xb7, 0xd7, + 0x87, 0x24, 0x36, 0xeb, 0x16, 0xc8, 0x1d, 0xf6, 0x94, 0xec, 0x2f, 0x0c, 0x74, 0xa4, 0x55, 0x88, + 0xab, 0x9e, 0xa2, 0x50, 0x1f, 0x94, 0x18, 0x85, 0x8a, 0xef, 0x86, 0x96, 0xbf, 0xd2, 0x70, 0xc0, + 0xbd, 0xd1, 0xae, 0x1c, 0x39, 0xcd, 0x91, 0x36, 0x58, 0x8b, 0xfc, 0xb0, 0x38, 0x56, 0xd6, 0x81, + 0x71, 0x9e, 0x87, 0x26, 0x14, 0xbd, 0x5a, 0xd3, 0xfc, 0x16, 0x39, 0xfd, 0x36, 0x34, 0xc4, 0x15, + 0xc6, 0x1c, 0xe7, 0xb4, 0x4f, 0xc1, 0x3a, 0xba, 0x17, 0x9e, 0xab, 0xf4, 0xea, 0x6b, 0x87, 0xd2, + 0x47, 0x99, 0x75, 0x3a, 0x41, 0xe3, 0xa5, 0x50, 0x07, 0xf4, 0x51, 0x31, 0x82, 0x45, 0x9e, 0x1f, + 0x03, 0x3c, 0x0e, 0x23, 0xb8, 0xc8, 0x50, 0x09, 0xfc, 0x1d, 0xeb, 0x92, 0x97, 0x1f, 0x36, 0x59, + 0x5d, 0x88, 0xb2, 0xa7, 0x0e, 0xfb, 0x42, 0x62, 0xfe, 0x57, 0xe6, 0x75, 0x6f, 0x6e, 0x8d, 0x8d, + 0xce, 0x52, 0xba, 0xcf, 0xd6, 0xc9, 0x5a, 0x28, 0x5f, 0x20, 0xe9, 0xe9, 0x4c, 0x71, 0xfa, 0xa4, + 0xfb, 0xef, 0x12, 0x69, 0xbc, 0x99, 0x9e, 0x35, 0xaf, 0x5f, 0x38, 0x5d, 0xb6, 0xb0, 0x39, 0xd2, + 0xb0, 0x66, 0xd2, 0xa5, 0xd7, 0xbc, 0xf4, 0x1a, 0x4f, 0xdf, 0x8e, 0x26, 0xe3, 0xe6, 0xf4, 0x62, + 0xdc, 0x1a, 0x9f, 0x4c, 0xf2, 0x25, 0x54, 0xed, 0xba, 0xf3, 0xd3, 0xd9, 0xff, 0xad, 0xbe, 0x17, + 0xc5, 0xcf, 0x1f, 0xcb, 0x37, 0x62, 0x80, 0xbf, 0x96, 0xb7, 0xe3, 0x82, 0x0c, 0x8e, 0x67, 0xcd, + 0xe2, 0x18, 0x4e, 0x83, 0x76, 0x33, 0x7f, 0x72, 0xf6, 0x77, 0x05, 0x38, 0x82, 0xe3, 0xd9, 0x51, + 0x0d, 0x38, 0x1a, 0xb4, 0x8f, 0x0a, 0xc0, 0x3f, 0xcb, 0x8d, 0xe2, 0xb6, 0xd3, 0x81, 0xe3, 0x59, + 0xa7, 0x53, 0x43, 0x3a, 0x9d, 0x41, 0xbb, 0xd3, 0x29, 0x40, 0xbf, 0xde, 0xcc, 0xa3, 0x7b, 0xfe, + 0x5f, 0x00, 0x00, 0x00, 0xff, 0xff, 0xfb, 0x15, 0xbb, 0x12, 0x97, 0x07, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/errors/campaign_feed_error.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/errors/campaign_feed_error.pb.go new file mode 100644 index 0000000..75cf34e --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/errors/campaign_feed_error.pb.go @@ -0,0 +1,142 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/errors/campaign_feed_error.proto + +package errors // import "google.golang.org/genproto/googleapis/ads/googleads/v1/errors" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Enum describing possible campaign feed errors. +type CampaignFeedErrorEnum_CampaignFeedError int32 + +const ( + // Enum unspecified. + CampaignFeedErrorEnum_UNSPECIFIED CampaignFeedErrorEnum_CampaignFeedError = 0 + // The received error code is not known in this version. + CampaignFeedErrorEnum_UNKNOWN CampaignFeedErrorEnum_CampaignFeedError = 1 + // An active feed already exists for this campaign and placeholder type. + CampaignFeedErrorEnum_FEED_ALREADY_EXISTS_FOR_PLACEHOLDER_TYPE CampaignFeedErrorEnum_CampaignFeedError = 2 + // The specified feed is removed. + CampaignFeedErrorEnum_CANNOT_CREATE_FOR_REMOVED_FEED CampaignFeedErrorEnum_CampaignFeedError = 4 + // The CampaignFeed already exists. UPDATE should be used to modify the + // existing CampaignFeed. + CampaignFeedErrorEnum_CANNOT_CREATE_ALREADY_EXISTING_CAMPAIGN_FEED CampaignFeedErrorEnum_CampaignFeedError = 5 + // Cannot update removed campaign feed. + CampaignFeedErrorEnum_CANNOT_MODIFY_REMOVED_CAMPAIGN_FEED CampaignFeedErrorEnum_CampaignFeedError = 6 + // Invalid placeholder type. + CampaignFeedErrorEnum_INVALID_PLACEHOLDER_TYPE CampaignFeedErrorEnum_CampaignFeedError = 7 + // Feed mapping for this placeholder type does not exist. + CampaignFeedErrorEnum_MISSING_FEEDMAPPING_FOR_PLACEHOLDER_TYPE CampaignFeedErrorEnum_CampaignFeedError = 8 +) + +var CampaignFeedErrorEnum_CampaignFeedError_name = map[int32]string{ + 0: "UNSPECIFIED", + 1: "UNKNOWN", + 2: "FEED_ALREADY_EXISTS_FOR_PLACEHOLDER_TYPE", + 4: "CANNOT_CREATE_FOR_REMOVED_FEED", + 5: "CANNOT_CREATE_ALREADY_EXISTING_CAMPAIGN_FEED", + 6: "CANNOT_MODIFY_REMOVED_CAMPAIGN_FEED", + 7: "INVALID_PLACEHOLDER_TYPE", + 8: "MISSING_FEEDMAPPING_FOR_PLACEHOLDER_TYPE", +} +var CampaignFeedErrorEnum_CampaignFeedError_value = map[string]int32{ + "UNSPECIFIED": 0, + "UNKNOWN": 1, + "FEED_ALREADY_EXISTS_FOR_PLACEHOLDER_TYPE": 2, + "CANNOT_CREATE_FOR_REMOVED_FEED": 4, + "CANNOT_CREATE_ALREADY_EXISTING_CAMPAIGN_FEED": 5, + "CANNOT_MODIFY_REMOVED_CAMPAIGN_FEED": 6, + "INVALID_PLACEHOLDER_TYPE": 7, + "MISSING_FEEDMAPPING_FOR_PLACEHOLDER_TYPE": 8, +} + +func (x CampaignFeedErrorEnum_CampaignFeedError) String() string { + return proto.EnumName(CampaignFeedErrorEnum_CampaignFeedError_name, int32(x)) +} +func (CampaignFeedErrorEnum_CampaignFeedError) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_campaign_feed_error_0aa2b696cc3972eb, []int{0, 0} +} + +// Container for enum describing possible campaign feed errors. +type CampaignFeedErrorEnum struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *CampaignFeedErrorEnum) Reset() { *m = CampaignFeedErrorEnum{} } +func (m *CampaignFeedErrorEnum) String() string { return proto.CompactTextString(m) } +func (*CampaignFeedErrorEnum) ProtoMessage() {} +func (*CampaignFeedErrorEnum) Descriptor() ([]byte, []int) { + return fileDescriptor_campaign_feed_error_0aa2b696cc3972eb, []int{0} +} +func (m *CampaignFeedErrorEnum) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_CampaignFeedErrorEnum.Unmarshal(m, b) +} +func (m *CampaignFeedErrorEnum) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_CampaignFeedErrorEnum.Marshal(b, m, deterministic) +} +func (dst *CampaignFeedErrorEnum) XXX_Merge(src proto.Message) { + xxx_messageInfo_CampaignFeedErrorEnum.Merge(dst, src) +} +func (m *CampaignFeedErrorEnum) XXX_Size() int { + return xxx_messageInfo_CampaignFeedErrorEnum.Size(m) +} +func (m *CampaignFeedErrorEnum) XXX_DiscardUnknown() { + xxx_messageInfo_CampaignFeedErrorEnum.DiscardUnknown(m) +} + +var xxx_messageInfo_CampaignFeedErrorEnum proto.InternalMessageInfo + +func init() { + proto.RegisterType((*CampaignFeedErrorEnum)(nil), "google.ads.googleads.v1.errors.CampaignFeedErrorEnum") + proto.RegisterEnum("google.ads.googleads.v1.errors.CampaignFeedErrorEnum_CampaignFeedError", CampaignFeedErrorEnum_CampaignFeedError_name, CampaignFeedErrorEnum_CampaignFeedError_value) +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/errors/campaign_feed_error.proto", fileDescriptor_campaign_feed_error_0aa2b696cc3972eb) +} + +var fileDescriptor_campaign_feed_error_0aa2b696cc3972eb = []byte{ + // 423 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x7c, 0x92, 0x41, 0x8e, 0xd3, 0x30, + 0x14, 0x86, 0x69, 0x80, 0x19, 0xe4, 0x59, 0x50, 0x2c, 0x81, 0x10, 0x1a, 0x75, 0x11, 0x16, 0xb0, + 0x18, 0x39, 0x44, 0x6c, 0x90, 0x59, 0x79, 0x62, 0xa7, 0x58, 0x34, 0x8e, 0x95, 0x74, 0x02, 0x45, + 0x95, 0xac, 0x30, 0x09, 0x51, 0xa5, 0x69, 0x5c, 0xc5, 0x65, 0x0e, 0xc4, 0xb2, 0xe7, 0x60, 0xc5, + 0x51, 0xd8, 0x71, 0x03, 0xe4, 0xb8, 0xad, 0x54, 0x15, 0x66, 0x95, 0x5f, 0xce, 0xf7, 0xff, 0x7e, + 0xcf, 0xef, 0x81, 0x77, 0x8d, 0xd6, 0xcd, 0x4d, 0x1d, 0x94, 0x95, 0x09, 0x9c, 0xb4, 0xea, 0x36, + 0x0c, 0xea, 0xae, 0xd3, 0x9d, 0x09, 0xae, 0xcb, 0xe5, 0xaa, 0x5c, 0x34, 0xad, 0xfa, 0x56, 0xd7, + 0x95, 0xea, 0x0f, 0xd1, 0xaa, 0xd3, 0x6b, 0x0d, 0x47, 0x0e, 0x47, 0x65, 0x65, 0xd0, 0xde, 0x89, + 0x6e, 0x43, 0xe4, 0x9c, 0x2f, 0xce, 0x77, 0xc9, 0xab, 0x45, 0x50, 0xb6, 0xad, 0x5e, 0x97, 0xeb, + 0x85, 0x6e, 0x8d, 0x73, 0xfb, 0x3f, 0x3d, 0xf0, 0x34, 0xda, 0x66, 0xc7, 0x75, 0x5d, 0x31, 0x6b, + 0x62, 0xed, 0xf7, 0xa5, 0xbf, 0xf1, 0xc0, 0x93, 0xa3, 0x3f, 0xf0, 0x31, 0x38, 0xbb, 0x12, 0xb9, + 0x64, 0x11, 0x8f, 0x39, 0xa3, 0xc3, 0x7b, 0xf0, 0x0c, 0x9c, 0x5e, 0x89, 0x8f, 0x22, 0xfd, 0x24, + 0x86, 0x03, 0x78, 0x01, 0x5e, 0xc7, 0x8c, 0x51, 0x45, 0x26, 0x19, 0x23, 0x74, 0xa6, 0xd8, 0x67, + 0x9e, 0x4f, 0x73, 0x15, 0xa7, 0x99, 0x92, 0x13, 0x12, 0xb1, 0x0f, 0xe9, 0x84, 0xb2, 0x4c, 0x4d, + 0x67, 0x92, 0x0d, 0x3d, 0xe8, 0x83, 0x51, 0x44, 0x84, 0x48, 0xa7, 0x2a, 0xca, 0x18, 0x99, 0xb2, + 0x9e, 0xcb, 0x58, 0x92, 0x16, 0x8c, 0x2a, 0x9b, 0x33, 0x7c, 0x00, 0xdf, 0x80, 0x8b, 0x43, 0xe6, + 0x20, 0x9a, 0x8b, 0xb1, 0x8a, 0x48, 0x22, 0x09, 0x1f, 0x0b, 0xe7, 0x78, 0x08, 0x5f, 0x81, 0x97, + 0x5b, 0x47, 0x92, 0x52, 0x1e, 0xcf, 0xf6, 0x89, 0x87, 0xe0, 0x09, 0x3c, 0x07, 0xcf, 0xb9, 0x28, + 0xc8, 0x84, 0xd3, 0xe3, 0xe2, 0x4e, 0x6d, 0x2b, 0x09, 0xcf, 0x73, 0x7b, 0x83, 0xe5, 0x13, 0x22, + 0x65, 0xaf, 0xff, 0xd5, 0xca, 0xa3, 0xcb, 0x3f, 0x03, 0xe0, 0x5f, 0xeb, 0x25, 0xba, 0x7b, 0x16, + 0x97, 0xcf, 0x8e, 0x1e, 0x54, 0xda, 0x29, 0xc8, 0xc1, 0x17, 0xba, 0x75, 0x36, 0xfa, 0xa6, 0x6c, + 0x1b, 0xa4, 0xbb, 0x26, 0x68, 0xea, 0xb6, 0x9f, 0xd1, 0x6e, 0x1f, 0x56, 0x0b, 0xf3, 0xbf, 0xf5, + 0x78, 0xef, 0x3e, 0x3f, 0xbc, 0xfb, 0x63, 0x42, 0x36, 0xde, 0x68, 0xec, 0xc2, 0x48, 0x65, 0x90, + 0x93, 0x56, 0x15, 0x21, 0xea, 0xaf, 0x34, 0xbf, 0x76, 0xc0, 0x9c, 0x54, 0x66, 0xbe, 0x07, 0xe6, + 0x45, 0x38, 0x77, 0xc0, 0x6f, 0xcf, 0x77, 0xa7, 0x18, 0x93, 0xca, 0x60, 0xbc, 0x47, 0x30, 0x2e, + 0x42, 0x8c, 0x1d, 0xf4, 0xf5, 0xa4, 0xaf, 0xee, 0xed, 0xdf, 0x00, 0x00, 0x00, 0xff, 0xff, 0xe2, + 0x70, 0xf2, 0xc1, 0xbb, 0x02, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/errors/campaign_shared_set_error.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/errors/campaign_shared_set_error.pb.go new file mode 100644 index 0000000..4cb3f7b --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/errors/campaign_shared_set_error.pb.go @@ -0,0 +1,114 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/errors/campaign_shared_set_error.proto + +package errors // import "google.golang.org/genproto/googleapis/ads/googleads/v1/errors" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Enum describing possible campaign shared set errors. +type CampaignSharedSetErrorEnum_CampaignSharedSetError int32 + +const ( + // Enum unspecified. + CampaignSharedSetErrorEnum_UNSPECIFIED CampaignSharedSetErrorEnum_CampaignSharedSetError = 0 + // The received error code is not known in this version. + CampaignSharedSetErrorEnum_UNKNOWN CampaignSharedSetErrorEnum_CampaignSharedSetError = 1 + // The shared set belongs to another customer and permission isn't granted. + CampaignSharedSetErrorEnum_SHARED_SET_ACCESS_DENIED CampaignSharedSetErrorEnum_CampaignSharedSetError = 2 +) + +var CampaignSharedSetErrorEnum_CampaignSharedSetError_name = map[int32]string{ + 0: "UNSPECIFIED", + 1: "UNKNOWN", + 2: "SHARED_SET_ACCESS_DENIED", +} +var CampaignSharedSetErrorEnum_CampaignSharedSetError_value = map[string]int32{ + "UNSPECIFIED": 0, + "UNKNOWN": 1, + "SHARED_SET_ACCESS_DENIED": 2, +} + +func (x CampaignSharedSetErrorEnum_CampaignSharedSetError) String() string { + return proto.EnumName(CampaignSharedSetErrorEnum_CampaignSharedSetError_name, int32(x)) +} +func (CampaignSharedSetErrorEnum_CampaignSharedSetError) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_campaign_shared_set_error_cec87191eef93255, []int{0, 0} +} + +// Container for enum describing possible campaign shared set errors. +type CampaignSharedSetErrorEnum struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *CampaignSharedSetErrorEnum) Reset() { *m = CampaignSharedSetErrorEnum{} } +func (m *CampaignSharedSetErrorEnum) String() string { return proto.CompactTextString(m) } +func (*CampaignSharedSetErrorEnum) ProtoMessage() {} +func (*CampaignSharedSetErrorEnum) Descriptor() ([]byte, []int) { + return fileDescriptor_campaign_shared_set_error_cec87191eef93255, []int{0} +} +func (m *CampaignSharedSetErrorEnum) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_CampaignSharedSetErrorEnum.Unmarshal(m, b) +} +func (m *CampaignSharedSetErrorEnum) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_CampaignSharedSetErrorEnum.Marshal(b, m, deterministic) +} +func (dst *CampaignSharedSetErrorEnum) XXX_Merge(src proto.Message) { + xxx_messageInfo_CampaignSharedSetErrorEnum.Merge(dst, src) +} +func (m *CampaignSharedSetErrorEnum) XXX_Size() int { + return xxx_messageInfo_CampaignSharedSetErrorEnum.Size(m) +} +func (m *CampaignSharedSetErrorEnum) XXX_DiscardUnknown() { + xxx_messageInfo_CampaignSharedSetErrorEnum.DiscardUnknown(m) +} + +var xxx_messageInfo_CampaignSharedSetErrorEnum proto.InternalMessageInfo + +func init() { + proto.RegisterType((*CampaignSharedSetErrorEnum)(nil), "google.ads.googleads.v1.errors.CampaignSharedSetErrorEnum") + proto.RegisterEnum("google.ads.googleads.v1.errors.CampaignSharedSetErrorEnum_CampaignSharedSetError", CampaignSharedSetErrorEnum_CampaignSharedSetError_name, CampaignSharedSetErrorEnum_CampaignSharedSetError_value) +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/errors/campaign_shared_set_error.proto", fileDescriptor_campaign_shared_set_error_cec87191eef93255) +} + +var fileDescriptor_campaign_shared_set_error_cec87191eef93255 = []byte{ + // 315 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x7c, 0x90, 0xd1, 0x4a, 0xf3, 0x30, + 0x14, 0xc7, 0xbf, 0xf5, 0x03, 0x85, 0xec, 0xc2, 0xd1, 0x0b, 0x91, 0x39, 0x76, 0xd1, 0x07, 0x48, + 0x29, 0xde, 0x45, 0x10, 0xb2, 0x36, 0xce, 0x21, 0xd4, 0x61, 0xb6, 0x09, 0x52, 0x08, 0x71, 0x09, + 0xb1, 0xb0, 0x25, 0x25, 0xa9, 0x7b, 0x20, 0x2f, 0x7d, 0x14, 0x1f, 0xc5, 0x07, 0xf0, 0x5a, 0xda, + 0x6c, 0xbd, 0x9a, 0x5e, 0xf5, 0x4f, 0xcf, 0xef, 0x77, 0xce, 0xc9, 0x01, 0x37, 0xca, 0x18, 0xb5, + 0x91, 0x31, 0x17, 0x2e, 0xf6, 0xb1, 0x49, 0xbb, 0x24, 0x96, 0xd6, 0x1a, 0xeb, 0xe2, 0x35, 0xdf, + 0x56, 0xbc, 0x54, 0x9a, 0xb9, 0x57, 0x6e, 0xa5, 0x60, 0x4e, 0xd6, 0xac, 0x2d, 0xc1, 0xca, 0x9a, + 0xda, 0x84, 0x63, 0x2f, 0x41, 0x2e, 0x1c, 0xec, 0x7c, 0xb8, 0x4b, 0xa0, 0xf7, 0x87, 0xa3, 0x43, + 0xff, 0xaa, 0x8c, 0xb9, 0xd6, 0xa6, 0xe6, 0x75, 0x69, 0xb4, 0xf3, 0x76, 0x64, 0xc1, 0x30, 0xdd, + 0x0f, 0xa0, 0x6d, 0x7f, 0x2a, 0x6b, 0xd2, 0x88, 0x44, 0xbf, 0x6d, 0xa3, 0x05, 0x38, 0x3f, 0x5e, + 0x0d, 0xcf, 0x40, 0x7f, 0x99, 0xd3, 0x39, 0x49, 0x67, 0xb7, 0x33, 0x92, 0x0d, 0xfe, 0x85, 0x7d, + 0x70, 0xba, 0xcc, 0xef, 0xf3, 0x87, 0xa7, 0x7c, 0xd0, 0x0b, 0x47, 0xe0, 0x82, 0xde, 0xe1, 0x47, + 0x92, 0x31, 0x4a, 0x16, 0x0c, 0xa7, 0x29, 0xa1, 0x94, 0x65, 0x24, 0x6f, 0xd0, 0x60, 0xf2, 0xdd, + 0x03, 0xd1, 0xda, 0x6c, 0xe1, 0xdf, 0x8b, 0x4f, 0x2e, 0x8f, 0x8f, 0x9e, 0x37, 0x7b, 0xcf, 0x7b, + 0xcf, 0xd9, 0x5e, 0x57, 0x66, 0xc3, 0xb5, 0x82, 0xc6, 0xaa, 0x58, 0x49, 0xdd, 0xbe, 0xea, 0x70, + 0xc7, 0xaa, 0x74, 0xbf, 0x9d, 0xf5, 0xda, 0x7f, 0xde, 0x83, 0xff, 0x53, 0x8c, 0x3f, 0x82, 0xf1, + 0xd4, 0x37, 0xc3, 0xc2, 0x41, 0x1f, 0x9b, 0xb4, 0x4a, 0x60, 0x3b, 0xd2, 0x7d, 0x1e, 0x80, 0x02, + 0x0b, 0x57, 0x74, 0x40, 0xb1, 0x4a, 0x0a, 0x0f, 0x7c, 0x05, 0x91, 0xff, 0x8b, 0x10, 0x16, 0x0e, + 0xa1, 0x0e, 0x41, 0x68, 0x95, 0x20, 0xe4, 0xa1, 0x97, 0x93, 0x76, 0xbb, 0xab, 0x9f, 0x00, 0x00, + 0x00, 0xff, 0xff, 0xe2, 0x6d, 0x0c, 0xff, 0xf3, 0x01, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/errors/change_status_error.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/errors/change_status_error.pb.go new file mode 100644 index 0000000..e2b1b0a --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/errors/change_status_error.pb.go @@ -0,0 +1,114 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/errors/change_status_error.proto + +package errors // import "google.golang.org/genproto/googleapis/ads/googleads/v1/errors" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Enum describing possible change status errors. +type ChangeStatusErrorEnum_ChangeStatusError int32 + +const ( + // Enum unspecified. + ChangeStatusErrorEnum_UNSPECIFIED ChangeStatusErrorEnum_ChangeStatusError = 0 + // The received error code is not known in this version. + ChangeStatusErrorEnum_UNKNOWN ChangeStatusErrorEnum_ChangeStatusError = 1 + // The requested start date is too old. + ChangeStatusErrorEnum_START_DATE_TOO_OLD ChangeStatusErrorEnum_ChangeStatusError = 3 +) + +var ChangeStatusErrorEnum_ChangeStatusError_name = map[int32]string{ + 0: "UNSPECIFIED", + 1: "UNKNOWN", + 3: "START_DATE_TOO_OLD", +} +var ChangeStatusErrorEnum_ChangeStatusError_value = map[string]int32{ + "UNSPECIFIED": 0, + "UNKNOWN": 1, + "START_DATE_TOO_OLD": 3, +} + +func (x ChangeStatusErrorEnum_ChangeStatusError) String() string { + return proto.EnumName(ChangeStatusErrorEnum_ChangeStatusError_name, int32(x)) +} +func (ChangeStatusErrorEnum_ChangeStatusError) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_change_status_error_dcadad2fbec236b5, []int{0, 0} +} + +// Container for enum describing possible change status errors. +type ChangeStatusErrorEnum struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ChangeStatusErrorEnum) Reset() { *m = ChangeStatusErrorEnum{} } +func (m *ChangeStatusErrorEnum) String() string { return proto.CompactTextString(m) } +func (*ChangeStatusErrorEnum) ProtoMessage() {} +func (*ChangeStatusErrorEnum) Descriptor() ([]byte, []int) { + return fileDescriptor_change_status_error_dcadad2fbec236b5, []int{0} +} +func (m *ChangeStatusErrorEnum) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ChangeStatusErrorEnum.Unmarshal(m, b) +} +func (m *ChangeStatusErrorEnum) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ChangeStatusErrorEnum.Marshal(b, m, deterministic) +} +func (dst *ChangeStatusErrorEnum) XXX_Merge(src proto.Message) { + xxx_messageInfo_ChangeStatusErrorEnum.Merge(dst, src) +} +func (m *ChangeStatusErrorEnum) XXX_Size() int { + return xxx_messageInfo_ChangeStatusErrorEnum.Size(m) +} +func (m *ChangeStatusErrorEnum) XXX_DiscardUnknown() { + xxx_messageInfo_ChangeStatusErrorEnum.DiscardUnknown(m) +} + +var xxx_messageInfo_ChangeStatusErrorEnum proto.InternalMessageInfo + +func init() { + proto.RegisterType((*ChangeStatusErrorEnum)(nil), "google.ads.googleads.v1.errors.ChangeStatusErrorEnum") + proto.RegisterEnum("google.ads.googleads.v1.errors.ChangeStatusErrorEnum_ChangeStatusError", ChangeStatusErrorEnum_ChangeStatusError_name, ChangeStatusErrorEnum_ChangeStatusError_value) +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/errors/change_status_error.proto", fileDescriptor_change_status_error_dcadad2fbec236b5) +} + +var fileDescriptor_change_status_error_dcadad2fbec236b5 = []byte{ + // 306 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x7c, 0x90, 0xc1, 0x6a, 0xb3, 0x40, + 0x14, 0x85, 0x7f, 0x0d, 0xfc, 0x85, 0xc9, 0xa2, 0x56, 0x68, 0x16, 0xa5, 0x64, 0xe1, 0x03, 0xcc, + 0x20, 0xdd, 0x94, 0xe9, 0x6a, 0x12, 0x6d, 0x90, 0x16, 0x95, 0x6a, 0x2c, 0x14, 0x41, 0x26, 0x51, + 0xa6, 0x42, 0x32, 0x23, 0x8e, 0xc9, 0x03, 0x75, 0xd9, 0x47, 0xe9, 0xa3, 0x74, 0xd7, 0x37, 0x28, + 0xce, 0x54, 0x37, 0xa1, 0x5d, 0x79, 0xb8, 0x7e, 0xe7, 0xdc, 0x33, 0x17, 0xdc, 0x32, 0x21, 0xd8, + 0xae, 0x42, 0xb4, 0x94, 0x48, 0xcb, 0x5e, 0x1d, 0x5d, 0x54, 0xb5, 0xad, 0x68, 0x25, 0xda, 0xbe, + 0x52, 0xce, 0xaa, 0x42, 0x76, 0xb4, 0x3b, 0xc8, 0x42, 0x0d, 0x61, 0xd3, 0x8a, 0x4e, 0xd8, 0x73, + 0x8d, 0x43, 0x5a, 0x4a, 0x38, 0x3a, 0xe1, 0xd1, 0x85, 0xda, 0x79, 0x75, 0x3d, 0x24, 0x37, 0x35, + 0xa2, 0x9c, 0x8b, 0x8e, 0x76, 0xb5, 0xe0, 0x52, 0xbb, 0x9d, 0x0d, 0xb8, 0x5c, 0xaa, 0xe8, 0x44, + 0x25, 0xfb, 0xbd, 0xc7, 0xe7, 0x87, 0xbd, 0x13, 0x80, 0x8b, 0x93, 0x1f, 0xf6, 0x39, 0x98, 0xae, + 0xc3, 0x24, 0xf6, 0x97, 0xc1, 0x7d, 0xe0, 0x7b, 0xd6, 0x3f, 0x7b, 0x0a, 0xce, 0xd6, 0xe1, 0x43, + 0x18, 0x3d, 0x87, 0x96, 0x61, 0xcf, 0x80, 0x9d, 0xa4, 0xe4, 0x29, 0x2d, 0x3c, 0x92, 0xfa, 0x45, + 0x1a, 0x45, 0x45, 0xf4, 0xe8, 0x59, 0x93, 0xc5, 0x97, 0x01, 0x9c, 0xad, 0xd8, 0xc3, 0xbf, 0x8b, + 0x2e, 0x66, 0x27, 0xfb, 0xe2, 0xbe, 0x62, 0x6c, 0xbc, 0x78, 0x3f, 0x4e, 0x26, 0x76, 0x94, 0x33, + 0x28, 0x5a, 0x86, 0x58, 0xc5, 0xd5, 0x03, 0x86, 0x63, 0x35, 0xb5, 0xfc, 0xed, 0x76, 0x77, 0xfa, + 0xf3, 0x66, 0x4e, 0x56, 0x84, 0xbc, 0x9b, 0xf3, 0x95, 0x0e, 0x23, 0xa5, 0x84, 0x5a, 0xf6, 0x2a, + 0x73, 0xa1, 0x5a, 0x29, 0x3f, 0x06, 0x20, 0x27, 0xa5, 0xcc, 0x47, 0x20, 0xcf, 0xdc, 0x5c, 0x03, + 0x9f, 0xa6, 0xa3, 0xa7, 0x18, 0x93, 0x52, 0x62, 0x3c, 0x22, 0x18, 0x67, 0x2e, 0xc6, 0x1a, 0xda, + 0xfc, 0x57, 0xed, 0x6e, 0xbe, 0x03, 0x00, 0x00, 0xff, 0xff, 0xd5, 0xa9, 0xfd, 0xb6, 0xd8, 0x01, + 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/errors/collection_size_error.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/errors/collection_size_error.pb.go new file mode 100644 index 0000000..87529b1 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/errors/collection_size_error.pb.go @@ -0,0 +1,118 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/errors/collection_size_error.proto + +package errors // import "google.golang.org/genproto/googleapis/ads/googleads/v1/errors" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Enum describing possible collection size errors. +type CollectionSizeErrorEnum_CollectionSizeError int32 + +const ( + // Enum unspecified. + CollectionSizeErrorEnum_UNSPECIFIED CollectionSizeErrorEnum_CollectionSizeError = 0 + // The received error code is not known in this version. + CollectionSizeErrorEnum_UNKNOWN CollectionSizeErrorEnum_CollectionSizeError = 1 + // Too few. + CollectionSizeErrorEnum_TOO_FEW CollectionSizeErrorEnum_CollectionSizeError = 2 + // Too many. + CollectionSizeErrorEnum_TOO_MANY CollectionSizeErrorEnum_CollectionSizeError = 3 +) + +var CollectionSizeErrorEnum_CollectionSizeError_name = map[int32]string{ + 0: "UNSPECIFIED", + 1: "UNKNOWN", + 2: "TOO_FEW", + 3: "TOO_MANY", +} +var CollectionSizeErrorEnum_CollectionSizeError_value = map[string]int32{ + "UNSPECIFIED": 0, + "UNKNOWN": 1, + "TOO_FEW": 2, + "TOO_MANY": 3, +} + +func (x CollectionSizeErrorEnum_CollectionSizeError) String() string { + return proto.EnumName(CollectionSizeErrorEnum_CollectionSizeError_name, int32(x)) +} +func (CollectionSizeErrorEnum_CollectionSizeError) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_collection_size_error_e5383bdfcaf040c2, []int{0, 0} +} + +// Container for enum describing possible collection size errors. +type CollectionSizeErrorEnum struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *CollectionSizeErrorEnum) Reset() { *m = CollectionSizeErrorEnum{} } +func (m *CollectionSizeErrorEnum) String() string { return proto.CompactTextString(m) } +func (*CollectionSizeErrorEnum) ProtoMessage() {} +func (*CollectionSizeErrorEnum) Descriptor() ([]byte, []int) { + return fileDescriptor_collection_size_error_e5383bdfcaf040c2, []int{0} +} +func (m *CollectionSizeErrorEnum) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_CollectionSizeErrorEnum.Unmarshal(m, b) +} +func (m *CollectionSizeErrorEnum) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_CollectionSizeErrorEnum.Marshal(b, m, deterministic) +} +func (dst *CollectionSizeErrorEnum) XXX_Merge(src proto.Message) { + xxx_messageInfo_CollectionSizeErrorEnum.Merge(dst, src) +} +func (m *CollectionSizeErrorEnum) XXX_Size() int { + return xxx_messageInfo_CollectionSizeErrorEnum.Size(m) +} +func (m *CollectionSizeErrorEnum) XXX_DiscardUnknown() { + xxx_messageInfo_CollectionSizeErrorEnum.DiscardUnknown(m) +} + +var xxx_messageInfo_CollectionSizeErrorEnum proto.InternalMessageInfo + +func init() { + proto.RegisterType((*CollectionSizeErrorEnum)(nil), "google.ads.googleads.v1.errors.CollectionSizeErrorEnum") + proto.RegisterEnum("google.ads.googleads.v1.errors.CollectionSizeErrorEnum_CollectionSizeError", CollectionSizeErrorEnum_CollectionSizeError_name, CollectionSizeErrorEnum_CollectionSizeError_value) +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/errors/collection_size_error.proto", fileDescriptor_collection_size_error_e5383bdfcaf040c2) +} + +var fileDescriptor_collection_size_error_e5383bdfcaf040c2 = []byte{ + // 308 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xe2, 0xb2, 0x4a, 0xcf, 0xcf, 0x4f, + 0xcf, 0x49, 0xd5, 0x4f, 0x4c, 0x29, 0xd6, 0x87, 0x30, 0x41, 0xac, 0x32, 0x43, 0xfd, 0xd4, 0xa2, + 0xa2, 0xfc, 0xa2, 0x62, 0xfd, 0xe4, 0xfc, 0x9c, 0x9c, 0xd4, 0xe4, 0x92, 0xcc, 0xfc, 0xbc, 0xf8, + 0xe2, 0xcc, 0xaa, 0xd4, 0x78, 0xb0, 0xb0, 0x5e, 0x41, 0x51, 0x7e, 0x49, 0xbe, 0x90, 0x1c, 0x44, + 0x83, 0x5e, 0x62, 0x4a, 0xb1, 0x1e, 0x5c, 0xaf, 0x5e, 0x99, 0xa1, 0x1e, 0x44, 0xaf, 0x94, 0x0c, + 0xcc, 0xec, 0x82, 0x4c, 0xfd, 0xc4, 0xbc, 0xbc, 0xfc, 0x92, 0x44, 0x90, 0x39, 0xc5, 0x10, 0xdd, + 0x4a, 0x99, 0x5c, 0xe2, 0xce, 0x70, 0xc3, 0x83, 0x33, 0xab, 0x52, 0x5d, 0x41, 0xba, 0x5c, 0xf3, + 0x4a, 0x73, 0x95, 0xfc, 0xb8, 0x84, 0xb1, 0x48, 0x09, 0xf1, 0x73, 0x71, 0x87, 0xfa, 0x05, 0x07, + 0xb8, 0x3a, 0x7b, 0xba, 0x79, 0xba, 0xba, 0x08, 0x30, 0x08, 0x71, 0x73, 0xb1, 0x87, 0xfa, 0x79, + 0xfb, 0xf9, 0x87, 0xfb, 0x09, 0x30, 0x82, 0x38, 0x21, 0xfe, 0xfe, 0xf1, 0x6e, 0xae, 0xe1, 0x02, + 0x4c, 0x42, 0x3c, 0x5c, 0x1c, 0x20, 0x8e, 0xaf, 0xa3, 0x5f, 0xa4, 0x00, 0xb3, 0xd3, 0x67, 0x46, + 0x2e, 0xa5, 0xe4, 0xfc, 0x5c, 0x3d, 0xfc, 0xee, 0x75, 0x92, 0xc0, 0x62, 0x69, 0x00, 0xc8, 0xad, + 0x01, 0x8c, 0x51, 0x2e, 0x50, 0xbd, 0xe9, 0xf9, 0x39, 0x89, 0x79, 0xe9, 0x7a, 0xf9, 0x45, 0xe9, + 0xfa, 0xe9, 0xa9, 0x79, 0x60, 0x9f, 0xc0, 0xc2, 0xad, 0x20, 0xb3, 0x18, 0x57, 0x30, 0x5a, 0x43, + 0xa8, 0x45, 0x4c, 0xcc, 0xee, 0x8e, 0x8e, 0xab, 0x98, 0xe4, 0xdc, 0x21, 0x86, 0x39, 0xa6, 0x14, + 0xeb, 0x41, 0x98, 0x20, 0x56, 0x98, 0xa1, 0x1e, 0xd8, 0xca, 0xe2, 0x53, 0x30, 0x05, 0x31, 0x8e, + 0x29, 0xc5, 0x31, 0x70, 0x05, 0x31, 0x61, 0x86, 0x31, 0x10, 0x05, 0xaf, 0x98, 0x94, 0x20, 0xa2, + 0x56, 0x56, 0x8e, 0x29, 0xc5, 0x56, 0x56, 0x70, 0x25, 0x56, 0x56, 0x61, 0x86, 0x56, 0x56, 0x10, + 0x45, 0x49, 0x6c, 0x60, 0xd7, 0x19, 0x03, 0x02, 0x00, 0x00, 0xff, 0xff, 0xfe, 0x89, 0x03, 0xbb, + 0xe3, 0x01, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/errors/context_error.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/errors/context_error.pb.go new file mode 100644 index 0000000..caabf2d --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/errors/context_error.pb.go @@ -0,0 +1,119 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/errors/context_error.proto + +package errors // import "google.golang.org/genproto/googleapis/ads/googleads/v1/errors" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Enum describing possible context errors. +type ContextErrorEnum_ContextError int32 + +const ( + // Enum unspecified. + ContextErrorEnum_UNSPECIFIED ContextErrorEnum_ContextError = 0 + // The received error code is not known in this version. + ContextErrorEnum_UNKNOWN ContextErrorEnum_ContextError = 1 + // The operation is not allowed for the given context. + ContextErrorEnum_OPERATION_NOT_PERMITTED_FOR_CONTEXT ContextErrorEnum_ContextError = 2 + // The operation is not allowed for removed resources. + ContextErrorEnum_OPERATION_NOT_PERMITTED_FOR_REMOVED_RESOURCE ContextErrorEnum_ContextError = 3 +) + +var ContextErrorEnum_ContextError_name = map[int32]string{ + 0: "UNSPECIFIED", + 1: "UNKNOWN", + 2: "OPERATION_NOT_PERMITTED_FOR_CONTEXT", + 3: "OPERATION_NOT_PERMITTED_FOR_REMOVED_RESOURCE", +} +var ContextErrorEnum_ContextError_value = map[string]int32{ + "UNSPECIFIED": 0, + "UNKNOWN": 1, + "OPERATION_NOT_PERMITTED_FOR_CONTEXT": 2, + "OPERATION_NOT_PERMITTED_FOR_REMOVED_RESOURCE": 3, +} + +func (x ContextErrorEnum_ContextError) String() string { + return proto.EnumName(ContextErrorEnum_ContextError_name, int32(x)) +} +func (ContextErrorEnum_ContextError) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_context_error_889c9ab3b358b3b5, []int{0, 0} +} + +// Container for enum describing possible context errors. +type ContextErrorEnum struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ContextErrorEnum) Reset() { *m = ContextErrorEnum{} } +func (m *ContextErrorEnum) String() string { return proto.CompactTextString(m) } +func (*ContextErrorEnum) ProtoMessage() {} +func (*ContextErrorEnum) Descriptor() ([]byte, []int) { + return fileDescriptor_context_error_889c9ab3b358b3b5, []int{0} +} +func (m *ContextErrorEnum) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ContextErrorEnum.Unmarshal(m, b) +} +func (m *ContextErrorEnum) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ContextErrorEnum.Marshal(b, m, deterministic) +} +func (dst *ContextErrorEnum) XXX_Merge(src proto.Message) { + xxx_messageInfo_ContextErrorEnum.Merge(dst, src) +} +func (m *ContextErrorEnum) XXX_Size() int { + return xxx_messageInfo_ContextErrorEnum.Size(m) +} +func (m *ContextErrorEnum) XXX_DiscardUnknown() { + xxx_messageInfo_ContextErrorEnum.DiscardUnknown(m) +} + +var xxx_messageInfo_ContextErrorEnum proto.InternalMessageInfo + +func init() { + proto.RegisterType((*ContextErrorEnum)(nil), "google.ads.googleads.v1.errors.ContextErrorEnum") + proto.RegisterEnum("google.ads.googleads.v1.errors.ContextErrorEnum_ContextError", ContextErrorEnum_ContextError_name, ContextErrorEnum_ContextError_value) +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/errors/context_error.proto", fileDescriptor_context_error_889c9ab3b358b3b5) +} + +var fileDescriptor_context_error_889c9ab3b358b3b5 = []byte{ + // 335 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x7c, 0x90, 0x41, 0x4a, 0xc3, 0x40, + 0x18, 0x85, 0x4d, 0x0a, 0x0a, 0x53, 0xc1, 0x98, 0xa5, 0x48, 0x17, 0x71, 0xe1, 0x46, 0x26, 0x46, + 0x77, 0xe3, 0x2a, 0x4d, 0xa6, 0x25, 0x48, 0x67, 0x42, 0x9a, 0x46, 0x91, 0x40, 0x88, 0x4d, 0x18, + 0x0a, 0xed, 0x4c, 0xc9, 0xc4, 0xe2, 0x0d, 0xbc, 0x84, 0x2b, 0x97, 0x1e, 0xc5, 0xa3, 0x88, 0x87, + 0x90, 0x64, 0x6c, 0xe8, 0xc6, 0xae, 0xf2, 0xf2, 0xf3, 0xbd, 0x37, 0xff, 0xfb, 0xc1, 0x0d, 0x13, + 0x82, 0x2d, 0x4b, 0x3b, 0x2f, 0xa4, 0xad, 0x64, 0xa3, 0x36, 0x8e, 0x5d, 0x56, 0x95, 0xa8, 0xa4, + 0x3d, 0x17, 0xbc, 0x2e, 0x5f, 0xeb, 0xac, 0xfd, 0x85, 0xeb, 0x4a, 0xd4, 0xc2, 0x1c, 0x28, 0x10, + 0xe6, 0x85, 0x84, 0x9d, 0x07, 0x6e, 0x1c, 0xa8, 0x3c, 0x67, 0xe7, 0xdb, 0xcc, 0xf5, 0xc2, 0xce, + 0x39, 0x17, 0x75, 0x5e, 0x2f, 0x04, 0x97, 0xca, 0x6d, 0xbd, 0x6b, 0xc0, 0xf0, 0x54, 0x2a, 0x6e, + 0x78, 0xcc, 0x5f, 0x56, 0xd6, 0x9b, 0x06, 0x8e, 0x77, 0x87, 0xe6, 0x09, 0xe8, 0xcf, 0xc8, 0x34, + 0xc4, 0x5e, 0x30, 0x0a, 0xb0, 0x6f, 0x1c, 0x98, 0x7d, 0x70, 0x34, 0x23, 0xf7, 0x84, 0x3e, 0x10, + 0x43, 0x33, 0x2f, 0xc1, 0x05, 0x0d, 0x71, 0xe4, 0xc6, 0x01, 0x25, 0x19, 0xa1, 0x71, 0x16, 0xe2, + 0x68, 0x12, 0xc4, 0x31, 0xf6, 0xb3, 0x11, 0x8d, 0x32, 0x8f, 0x92, 0x18, 0x3f, 0xc6, 0x86, 0x6e, + 0x5e, 0x83, 0xab, 0x7d, 0x60, 0x84, 0x27, 0x34, 0xc1, 0x7e, 0x16, 0xe1, 0x29, 0x9d, 0x45, 0x1e, + 0x36, 0x7a, 0xc3, 0x1f, 0x0d, 0x58, 0x73, 0xb1, 0x82, 0xfb, 0x3b, 0x0e, 0x4f, 0x77, 0xb7, 0x0d, + 0x9b, 0x62, 0xa1, 0xf6, 0xe4, 0xff, 0x99, 0x98, 0x58, 0xe6, 0x9c, 0x41, 0x51, 0x31, 0x9b, 0x95, + 0xbc, 0xad, 0xbd, 0x3d, 0xee, 0x7a, 0x21, 0xff, 0xbb, 0xf5, 0x9d, 0xfa, 0x7c, 0xe8, 0xbd, 0xb1, + 0xeb, 0x7e, 0xea, 0x83, 0xb1, 0x0a, 0x73, 0x0b, 0x09, 0x95, 0x6c, 0x54, 0xe2, 0xc0, 0xf6, 0x49, + 0xf9, 0xb5, 0x05, 0x52, 0xb7, 0x90, 0x69, 0x07, 0xa4, 0x89, 0x93, 0x2a, 0xe0, 0x5b, 0xb7, 0xd4, + 0x14, 0x21, 0xb7, 0x90, 0x08, 0x75, 0x08, 0x42, 0x89, 0x83, 0x90, 0x82, 0x9e, 0x0f, 0xdb, 0xed, + 0x6e, 0x7f, 0x03, 0x00, 0x00, 0xff, 0xff, 0x24, 0xa0, 0xf3, 0x92, 0x08, 0x02, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/errors/conversion_action_error.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/errors/conversion_action_error.pb.go new file mode 100644 index 0000000..d35a7fc --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/errors/conversion_action_error.pb.go @@ -0,0 +1,157 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/errors/conversion_action_error.proto + +package errors // import "google.golang.org/genproto/googleapis/ads/googleads/v1/errors" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Enum describing possible conversion action errors. +type ConversionActionErrorEnum_ConversionActionError int32 + +const ( + // Enum unspecified. + ConversionActionErrorEnum_UNSPECIFIED ConversionActionErrorEnum_ConversionActionError = 0 + // The received error code is not known in this version. + ConversionActionErrorEnum_UNKNOWN ConversionActionErrorEnum_ConversionActionError = 1 + // The specified conversion action name already exists. + ConversionActionErrorEnum_DUPLICATE_NAME ConversionActionErrorEnum_ConversionActionError = 2 + // Another conversion action with the specified app id already exists. + ConversionActionErrorEnum_DUPLICATE_APP_ID ConversionActionErrorEnum_ConversionActionError = 3 + // Android first open action conflicts with Google play codeless download + // action tracking the same app. + ConversionActionErrorEnum_TWO_CONVERSION_ACTIONS_BIDDING_ON_SAME_APP_DOWNLOAD ConversionActionErrorEnum_ConversionActionError = 4 + // Android first open action conflicts with Google play codeless download + // action tracking the same app. + ConversionActionErrorEnum_BIDDING_ON_SAME_APP_DOWNLOAD_AS_GLOBAL_ACTION ConversionActionErrorEnum_ConversionActionError = 5 + // The attribution model cannot be set to DATA_DRIVEN because a data-driven + // model has never been generated. + ConversionActionErrorEnum_DATA_DRIVEN_MODEL_WAS_NEVER_GENERATED ConversionActionErrorEnum_ConversionActionError = 6 + // The attribution model cannot be set to DATA_DRIVEN because the + // data-driven model is expired. + ConversionActionErrorEnum_DATA_DRIVEN_MODEL_EXPIRED ConversionActionErrorEnum_ConversionActionError = 7 + // The attribution model cannot be set to DATA_DRIVEN because the + // data-driven model is stale. + ConversionActionErrorEnum_DATA_DRIVEN_MODEL_STALE ConversionActionErrorEnum_ConversionActionError = 8 + // The attribution model cannot be set to DATA_DRIVEN because the + // data-driven model is unavailable or the conversion action was newly + // added. + ConversionActionErrorEnum_DATA_DRIVEN_MODEL_UNKNOWN ConversionActionErrorEnum_ConversionActionError = 9 +) + +var ConversionActionErrorEnum_ConversionActionError_name = map[int32]string{ + 0: "UNSPECIFIED", + 1: "UNKNOWN", + 2: "DUPLICATE_NAME", + 3: "DUPLICATE_APP_ID", + 4: "TWO_CONVERSION_ACTIONS_BIDDING_ON_SAME_APP_DOWNLOAD", + 5: "BIDDING_ON_SAME_APP_DOWNLOAD_AS_GLOBAL_ACTION", + 6: "DATA_DRIVEN_MODEL_WAS_NEVER_GENERATED", + 7: "DATA_DRIVEN_MODEL_EXPIRED", + 8: "DATA_DRIVEN_MODEL_STALE", + 9: "DATA_DRIVEN_MODEL_UNKNOWN", +} +var ConversionActionErrorEnum_ConversionActionError_value = map[string]int32{ + "UNSPECIFIED": 0, + "UNKNOWN": 1, + "DUPLICATE_NAME": 2, + "DUPLICATE_APP_ID": 3, + "TWO_CONVERSION_ACTIONS_BIDDING_ON_SAME_APP_DOWNLOAD": 4, + "BIDDING_ON_SAME_APP_DOWNLOAD_AS_GLOBAL_ACTION": 5, + "DATA_DRIVEN_MODEL_WAS_NEVER_GENERATED": 6, + "DATA_DRIVEN_MODEL_EXPIRED": 7, + "DATA_DRIVEN_MODEL_STALE": 8, + "DATA_DRIVEN_MODEL_UNKNOWN": 9, +} + +func (x ConversionActionErrorEnum_ConversionActionError) String() string { + return proto.EnumName(ConversionActionErrorEnum_ConversionActionError_name, int32(x)) +} +func (ConversionActionErrorEnum_ConversionActionError) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_conversion_action_error_ba5f46d45d84a23e, []int{0, 0} +} + +// Container for enum describing possible conversion action errors. +type ConversionActionErrorEnum struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ConversionActionErrorEnum) Reset() { *m = ConversionActionErrorEnum{} } +func (m *ConversionActionErrorEnum) String() string { return proto.CompactTextString(m) } +func (*ConversionActionErrorEnum) ProtoMessage() {} +func (*ConversionActionErrorEnum) Descriptor() ([]byte, []int) { + return fileDescriptor_conversion_action_error_ba5f46d45d84a23e, []int{0} +} +func (m *ConversionActionErrorEnum) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ConversionActionErrorEnum.Unmarshal(m, b) +} +func (m *ConversionActionErrorEnum) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ConversionActionErrorEnum.Marshal(b, m, deterministic) +} +func (dst *ConversionActionErrorEnum) XXX_Merge(src proto.Message) { + xxx_messageInfo_ConversionActionErrorEnum.Merge(dst, src) +} +func (m *ConversionActionErrorEnum) XXX_Size() int { + return xxx_messageInfo_ConversionActionErrorEnum.Size(m) +} +func (m *ConversionActionErrorEnum) XXX_DiscardUnknown() { + xxx_messageInfo_ConversionActionErrorEnum.DiscardUnknown(m) +} + +var xxx_messageInfo_ConversionActionErrorEnum proto.InternalMessageInfo + +func init() { + proto.RegisterType((*ConversionActionErrorEnum)(nil), "google.ads.googleads.v1.errors.ConversionActionErrorEnum") + proto.RegisterEnum("google.ads.googleads.v1.errors.ConversionActionErrorEnum_ConversionActionError", ConversionActionErrorEnum_ConversionActionError_name, ConversionActionErrorEnum_ConversionActionError_value) +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/errors/conversion_action_error.proto", fileDescriptor_conversion_action_error_ba5f46d45d84a23e) +} + +var fileDescriptor_conversion_action_error_ba5f46d45d84a23e = []byte{ + // 443 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x7c, 0x92, 0x41, 0x8b, 0xd3, 0x40, + 0x14, 0xc7, 0x6d, 0x56, 0x77, 0x75, 0x16, 0x74, 0x18, 0x14, 0xd9, 0x55, 0xf7, 0x50, 0xf0, 0xe0, + 0xc1, 0x84, 0xb0, 0x07, 0x21, 0x7a, 0x79, 0xcd, 0x8c, 0x61, 0x30, 0x9d, 0x09, 0x49, 0x9a, 0x8a, + 0x14, 0x86, 0xd8, 0x94, 0x50, 0xd8, 0xcd, 0x94, 0x4c, 0xed, 0x07, 0xf2, 0xe8, 0x47, 0xf1, 0xe6, + 0xd7, 0xf0, 0xe2, 0xc9, 0xbb, 0x24, 0xb3, 0xad, 0x87, 0xed, 0xf6, 0x34, 0x7f, 0xde, 0xfb, 0xff, + 0xfe, 0x09, 0xef, 0x3d, 0xf4, 0xa1, 0xd6, 0xba, 0xbe, 0x5a, 0x78, 0x65, 0x65, 0x3c, 0x2b, 0x3b, + 0xb5, 0xf1, 0xbd, 0x45, 0xdb, 0xea, 0xd6, 0x78, 0x73, 0xdd, 0x6c, 0x16, 0xad, 0x59, 0xea, 0x46, + 0x95, 0xf3, 0x75, 0xf7, 0xf4, 0x0d, 0x77, 0xd5, 0xea, 0xb5, 0x26, 0x17, 0x16, 0x71, 0xcb, 0xca, + 0xb8, 0x3b, 0xda, 0xdd, 0xf8, 0xae, 0xa5, 0xcf, 0x5f, 0x6e, 0xd3, 0x57, 0x4b, 0xaf, 0x6c, 0x1a, + 0xbd, 0x2e, 0xbb, 0x08, 0x63, 0xe9, 0xe1, 0x1f, 0x07, 0x9d, 0x85, 0xbb, 0x7c, 0xe8, 0xe3, 0x59, + 0x07, 0xb2, 0xe6, 0xdb, 0xf5, 0xf0, 0x97, 0x83, 0x9e, 0xed, 0xed, 0x92, 0x27, 0xe8, 0x74, 0x22, + 0xb2, 0x84, 0x85, 0xfc, 0x23, 0x67, 0x14, 0xdf, 0x23, 0xa7, 0xe8, 0x64, 0x22, 0x3e, 0x09, 0x39, + 0x15, 0x78, 0x40, 0x08, 0x7a, 0x4c, 0x27, 0x49, 0xcc, 0x43, 0xc8, 0x99, 0x12, 0x30, 0x66, 0xd8, + 0x21, 0x4f, 0x11, 0xfe, 0x5f, 0x83, 0x24, 0x51, 0x9c, 0xe2, 0x23, 0xf2, 0x0e, 0x5d, 0xe6, 0x53, + 0xa9, 0x42, 0x29, 0x0a, 0x96, 0x66, 0x5c, 0x0a, 0x05, 0x61, 0xce, 0xa5, 0xc8, 0xd4, 0x88, 0x53, + 0xca, 0x45, 0xa4, 0xa4, 0x50, 0x19, 0x8c, 0x2d, 0x42, 0xe5, 0x54, 0xc4, 0x12, 0x28, 0xbe, 0x4f, + 0x7c, 0xf4, 0xf6, 0x90, 0x43, 0x41, 0xa6, 0xa2, 0x58, 0x8e, 0x20, 0xbe, 0x09, 0xc4, 0x0f, 0xc8, + 0x1b, 0xf4, 0x9a, 0x42, 0x0e, 0x8a, 0xa6, 0xbc, 0x60, 0x42, 0x8d, 0x25, 0x65, 0xb1, 0x9a, 0x42, + 0xa6, 0x04, 0x2b, 0x58, 0xaa, 0x22, 0x26, 0x58, 0x0a, 0x39, 0xa3, 0xf8, 0x98, 0xbc, 0x42, 0x67, + 0xb7, 0xad, 0xec, 0x73, 0xc2, 0x53, 0x46, 0xf1, 0x09, 0x79, 0x81, 0x9e, 0xdf, 0x6e, 0x67, 0x39, + 0xc4, 0x0c, 0x3f, 0xdc, 0xcf, 0x6e, 0x67, 0xf3, 0x68, 0xf4, 0x77, 0x80, 0x86, 0x73, 0x7d, 0xed, + 0x1e, 0x5e, 0xdb, 0xe8, 0x7c, 0xef, 0xdc, 0x93, 0x6e, 0x69, 0xc9, 0xe0, 0x0b, 0xbd, 0xa1, 0x6b, + 0x7d, 0x55, 0x36, 0xb5, 0xab, 0xdb, 0xda, 0xab, 0x17, 0x4d, 0xbf, 0xd2, 0xed, 0x09, 0xad, 0x96, + 0xe6, 0xae, 0x8b, 0x7a, 0x6f, 0x9f, 0xef, 0xce, 0x51, 0x04, 0xf0, 0xc3, 0xb9, 0x88, 0x6c, 0x18, + 0x54, 0xc6, 0xb5, 0xb2, 0x53, 0x85, 0xef, 0xf6, 0x9f, 0x34, 0x3f, 0xb7, 0x86, 0x19, 0x54, 0x66, + 0xb6, 0x33, 0xcc, 0x0a, 0x7f, 0x66, 0x0d, 0xbf, 0x9d, 0xa1, 0xad, 0x06, 0x01, 0x54, 0x26, 0x08, + 0x76, 0x96, 0x20, 0x28, 0xfc, 0x20, 0xb0, 0xa6, 0xaf, 0xc7, 0xfd, 0xdf, 0x5d, 0xfe, 0x0b, 0x00, + 0x00, 0xff, 0xff, 0x0c, 0xe0, 0x92, 0x88, 0xee, 0x02, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/errors/conversion_adjustment_upload_error.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/errors/conversion_adjustment_upload_error.pb.go new file mode 100644 index 0000000..c637507 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/errors/conversion_adjustment_upload_error.pb.go @@ -0,0 +1,164 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/errors/conversion_adjustment_upload_error.proto + +package errors // import "google.golang.org/genproto/googleapis/ads/googleads/v1/errors" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Enum describing possible conversion adjustment upload errors. +type ConversionAdjustmentUploadErrorEnum_ConversionAdjustmentUploadError int32 + +const ( + // Not specified. + ConversionAdjustmentUploadErrorEnum_UNSPECIFIED ConversionAdjustmentUploadErrorEnum_ConversionAdjustmentUploadError = 0 + // The received error code is not known in this version. + ConversionAdjustmentUploadErrorEnum_UNKNOWN ConversionAdjustmentUploadErrorEnum_ConversionAdjustmentUploadError = 1 + // The specified conversion action was created too recently. + // Please try the upload again after 4-6 hours have passed since the + // conversion action was created. + ConversionAdjustmentUploadErrorEnum_TOO_RECENT_CONVERSION_ACTION ConversionAdjustmentUploadErrorEnum_ConversionAdjustmentUploadError = 2 + // No conversion action of a supported ConversionActionType that matches the + // provided information can be found for the customer. + ConversionAdjustmentUploadErrorEnum_INVALID_CONVERSION_ACTION ConversionAdjustmentUploadErrorEnum_ConversionAdjustmentUploadError = 3 + // A retraction was already reported for this conversion. + ConversionAdjustmentUploadErrorEnum_CONVERSION_ALREADY_RETRACTED ConversionAdjustmentUploadErrorEnum_ConversionAdjustmentUploadError = 4 + // A conversion for the supplied combination of conversion + // action and conversion identifier could not be found. + ConversionAdjustmentUploadErrorEnum_CONVERSION_NOT_FOUND ConversionAdjustmentUploadErrorEnum_ConversionAdjustmentUploadError = 5 + // The specified conversion has already expired. Conversions expire after 55 + // days, after which adjustments cannot be reported against them. + ConversionAdjustmentUploadErrorEnum_CONVERSION_EXPIRED ConversionAdjustmentUploadErrorEnum_ConversionAdjustmentUploadError = 6 + // The supplied adjustment date time precedes that of the original + // conversion. + ConversionAdjustmentUploadErrorEnum_ADJUSTMENT_PRECEDES_CONVERSION ConversionAdjustmentUploadErrorEnum_ConversionAdjustmentUploadError = 7 + // A restatement with a more recent adjustment date time was already + // reported for this conversion. + ConversionAdjustmentUploadErrorEnum_MORE_RECENT_RESTATEMENT_FOUND ConversionAdjustmentUploadErrorEnum_ConversionAdjustmentUploadError = 8 + // The conversion was created too recently. + ConversionAdjustmentUploadErrorEnum_TOO_RECENT_CONVERSION ConversionAdjustmentUploadErrorEnum_ConversionAdjustmentUploadError = 9 + // Restatements cannot be reported for a conversion action that always uses + // the default value. + ConversionAdjustmentUploadErrorEnum_CANNOT_RESTATE_CONVERSION_ACTION_THAT_ALWAYS_USES_DEFAULT_CONVERSION_VALUE ConversionAdjustmentUploadErrorEnum_ConversionAdjustmentUploadError = 10 +) + +var ConversionAdjustmentUploadErrorEnum_ConversionAdjustmentUploadError_name = map[int32]string{ + 0: "UNSPECIFIED", + 1: "UNKNOWN", + 2: "TOO_RECENT_CONVERSION_ACTION", + 3: "INVALID_CONVERSION_ACTION", + 4: "CONVERSION_ALREADY_RETRACTED", + 5: "CONVERSION_NOT_FOUND", + 6: "CONVERSION_EXPIRED", + 7: "ADJUSTMENT_PRECEDES_CONVERSION", + 8: "MORE_RECENT_RESTATEMENT_FOUND", + 9: "TOO_RECENT_CONVERSION", + 10: "CANNOT_RESTATE_CONVERSION_ACTION_THAT_ALWAYS_USES_DEFAULT_CONVERSION_VALUE", +} +var ConversionAdjustmentUploadErrorEnum_ConversionAdjustmentUploadError_value = map[string]int32{ + "UNSPECIFIED": 0, + "UNKNOWN": 1, + "TOO_RECENT_CONVERSION_ACTION": 2, + "INVALID_CONVERSION_ACTION": 3, + "CONVERSION_ALREADY_RETRACTED": 4, + "CONVERSION_NOT_FOUND": 5, + "CONVERSION_EXPIRED": 6, + "ADJUSTMENT_PRECEDES_CONVERSION": 7, + "MORE_RECENT_RESTATEMENT_FOUND": 8, + "TOO_RECENT_CONVERSION": 9, + "CANNOT_RESTATE_CONVERSION_ACTION_THAT_ALWAYS_USES_DEFAULT_CONVERSION_VALUE": 10, +} + +func (x ConversionAdjustmentUploadErrorEnum_ConversionAdjustmentUploadError) String() string { + return proto.EnumName(ConversionAdjustmentUploadErrorEnum_ConversionAdjustmentUploadError_name, int32(x)) +} +func (ConversionAdjustmentUploadErrorEnum_ConversionAdjustmentUploadError) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_conversion_adjustment_upload_error_27a6081dae4d6cab, []int{0, 0} +} + +// Container for enum describing possible conversion adjustment upload errors. +type ConversionAdjustmentUploadErrorEnum struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ConversionAdjustmentUploadErrorEnum) Reset() { *m = ConversionAdjustmentUploadErrorEnum{} } +func (m *ConversionAdjustmentUploadErrorEnum) String() string { return proto.CompactTextString(m) } +func (*ConversionAdjustmentUploadErrorEnum) ProtoMessage() {} +func (*ConversionAdjustmentUploadErrorEnum) Descriptor() ([]byte, []int) { + return fileDescriptor_conversion_adjustment_upload_error_27a6081dae4d6cab, []int{0} +} +func (m *ConversionAdjustmentUploadErrorEnum) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ConversionAdjustmentUploadErrorEnum.Unmarshal(m, b) +} +func (m *ConversionAdjustmentUploadErrorEnum) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ConversionAdjustmentUploadErrorEnum.Marshal(b, m, deterministic) +} +func (dst *ConversionAdjustmentUploadErrorEnum) XXX_Merge(src proto.Message) { + xxx_messageInfo_ConversionAdjustmentUploadErrorEnum.Merge(dst, src) +} +func (m *ConversionAdjustmentUploadErrorEnum) XXX_Size() int { + return xxx_messageInfo_ConversionAdjustmentUploadErrorEnum.Size(m) +} +func (m *ConversionAdjustmentUploadErrorEnum) XXX_DiscardUnknown() { + xxx_messageInfo_ConversionAdjustmentUploadErrorEnum.DiscardUnknown(m) +} + +var xxx_messageInfo_ConversionAdjustmentUploadErrorEnum proto.InternalMessageInfo + +func init() { + proto.RegisterType((*ConversionAdjustmentUploadErrorEnum)(nil), "google.ads.googleads.v1.errors.ConversionAdjustmentUploadErrorEnum") + proto.RegisterEnum("google.ads.googleads.v1.errors.ConversionAdjustmentUploadErrorEnum_ConversionAdjustmentUploadError", ConversionAdjustmentUploadErrorEnum_ConversionAdjustmentUploadError_name, ConversionAdjustmentUploadErrorEnum_ConversionAdjustmentUploadError_value) +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/errors/conversion_adjustment_upload_error.proto", fileDescriptor_conversion_adjustment_upload_error_27a6081dae4d6cab) +} + +var fileDescriptor_conversion_adjustment_upload_error_27a6081dae4d6cab = []byte{ + // 478 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x84, 0x92, 0xd1, 0x6e, 0xd3, 0x30, + 0x14, 0x86, 0x59, 0x0b, 0x1b, 0x78, 0x17, 0x44, 0x16, 0x20, 0x86, 0xb6, 0x02, 0x85, 0xeb, 0x44, + 0x11, 0x77, 0xe1, 0xca, 0x8b, 0x4f, 0x4b, 0x46, 0xe6, 0x44, 0x89, 0x93, 0x31, 0x54, 0xc9, 0x0a, + 0x4b, 0x14, 0x15, 0xb5, 0x71, 0x15, 0xa7, 0x7d, 0x04, 0x1e, 0x84, 0x4b, 0x2e, 0x78, 0x10, 0x1e, + 0x85, 0x97, 0x00, 0x25, 0x5e, 0xa3, 0x22, 0x06, 0xbb, 0xca, 0x91, 0xfd, 0x9d, 0xff, 0xff, 0x9d, + 0x73, 0xd0, 0xb4, 0x94, 0xb2, 0x5c, 0x14, 0x56, 0x96, 0x2b, 0x4b, 0x97, 0x6d, 0xb5, 0xb1, 0xad, + 0xa2, 0xae, 0x65, 0xad, 0xac, 0x2b, 0x59, 0x6d, 0x8a, 0x5a, 0xcd, 0x65, 0x25, 0xb2, 0xfc, 0xf3, + 0x5a, 0x35, 0xcb, 0xa2, 0x6a, 0xc4, 0x7a, 0xb5, 0x90, 0x59, 0x2e, 0x3a, 0xc6, 0x5c, 0xd5, 0xb2, + 0x91, 0x78, 0xa4, 0xbb, 0xcd, 0x2c, 0x57, 0x66, 0x2f, 0x64, 0x6e, 0x6c, 0x53, 0x0b, 0x3d, 0x3b, + 0xde, 0x1a, 0xad, 0xe6, 0x56, 0x56, 0x55, 0xb2, 0xc9, 0x9a, 0xb9, 0xac, 0x94, 0xee, 0x1e, 0x7f, + 0x1f, 0xa2, 0x57, 0x6e, 0x6f, 0x45, 0x7a, 0xa7, 0xa4, 0x33, 0x82, 0x56, 0x02, 0xaa, 0xf5, 0x72, + 0xfc, 0x65, 0x88, 0x9e, 0xdf, 0xc2, 0xe1, 0x87, 0xe8, 0x30, 0x61, 0x71, 0x08, 0xae, 0x37, 0xf1, + 0x80, 0x1a, 0x77, 0xf0, 0x21, 0x3a, 0x48, 0xd8, 0x7b, 0x16, 0x5c, 0x30, 0x63, 0x0f, 0xbf, 0x40, + 0xc7, 0x3c, 0x08, 0x44, 0x04, 0x2e, 0x30, 0x2e, 0xdc, 0x80, 0xa5, 0x10, 0xc5, 0x5e, 0xc0, 0x04, + 0x71, 0xb9, 0x17, 0x30, 0x63, 0x80, 0x4f, 0xd0, 0x91, 0xc7, 0x52, 0xe2, 0x7b, 0xf4, 0x86, 0xeb, + 0x61, 0x2b, 0xb0, 0x7b, 0xec, 0x47, 0x40, 0xe8, 0xa5, 0x88, 0x80, 0x47, 0xc4, 0xe5, 0x40, 0x8d, + 0xbb, 0xf8, 0x29, 0x7a, 0xb4, 0x43, 0xb0, 0x80, 0x8b, 0x49, 0x90, 0x30, 0x6a, 0xdc, 0xc3, 0x4f, + 0x10, 0xde, 0xb9, 0x81, 0x0f, 0xa1, 0x17, 0x01, 0x35, 0xf6, 0xf1, 0x18, 0x8d, 0x08, 0x3d, 0x4b, + 0x62, 0x7e, 0xde, 0x86, 0x0a, 0xdb, 0x70, 0x14, 0xe2, 0x1d, 0x7b, 0xe3, 0x00, 0xbf, 0x44, 0x27, + 0xe7, 0x41, 0x04, 0xdb, 0xe4, 0x11, 0xc4, 0x9c, 0x70, 0xe8, 0x1a, 0xb4, 0xfc, 0x7d, 0x7c, 0x84, + 0x1e, 0xdf, 0xf8, 0x36, 0xe3, 0x01, 0x66, 0xe8, 0xcc, 0x25, 0xac, 0xcd, 0x72, 0xdd, 0xf8, 0xf7, + 0xdb, 0x04, 0x7f, 0x47, 0xb8, 0x20, 0xfe, 0x05, 0xb9, 0x8c, 0x45, 0x12, 0x43, 0x2c, 0x28, 0x4c, + 0x48, 0xe2, 0xff, 0xf1, 0x97, 0x52, 0xe2, 0x27, 0x60, 0xa0, 0xd3, 0x5f, 0x7b, 0x68, 0x7c, 0x25, + 0x97, 0xe6, 0xff, 0xa7, 0x7e, 0xfa, 0xfa, 0x96, 0x61, 0x85, 0xed, 0xf4, 0xc3, 0xbd, 0x8f, 0xf4, + 0x5a, 0xa7, 0x94, 0x8b, 0xac, 0x2a, 0x4d, 0x59, 0x97, 0x56, 0x59, 0x54, 0xdd, 0x6e, 0x6c, 0xd7, + 0x72, 0x35, 0x57, 0xff, 0xda, 0xd2, 0xb7, 0xfa, 0xf3, 0x75, 0x30, 0x9c, 0x12, 0xf2, 0x6d, 0x30, + 0x9a, 0x6a, 0x31, 0x92, 0x2b, 0x53, 0x97, 0x6d, 0x95, 0xda, 0x66, 0x67, 0xa9, 0x7e, 0x6c, 0x81, + 0x19, 0xc9, 0xd5, 0xac, 0x07, 0x66, 0xa9, 0x3d, 0xd3, 0xc0, 0xcf, 0xc1, 0x58, 0x9f, 0x3a, 0x0e, + 0xc9, 0x95, 0xe3, 0xf4, 0x88, 0xe3, 0xa4, 0xb6, 0xe3, 0x68, 0xe8, 0xd3, 0x7e, 0x97, 0xee, 0xcd, + 0xef, 0x00, 0x00, 0x00, 0xff, 0xff, 0x23, 0x00, 0x6b, 0x77, 0x42, 0x03, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/errors/conversion_upload_error.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/errors/conversion_upload_error.pb.go new file mode 100644 index 0000000..c695a62 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/errors/conversion_upload_error.pb.go @@ -0,0 +1,237 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/errors/conversion_upload_error.proto + +package errors // import "google.golang.org/genproto/googleapis/ads/googleads/v1/errors" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Enum describing possible conversion upload errors. +type ConversionUploadErrorEnum_ConversionUploadError int32 + +const ( + // Enum unspecified. + ConversionUploadErrorEnum_UNSPECIFIED ConversionUploadErrorEnum_ConversionUploadError = 0 + // The received error code is not known in this version. + ConversionUploadErrorEnum_UNKNOWN ConversionUploadErrorEnum_ConversionUploadError = 1 + // The request contained more than 2000 conversions. + ConversionUploadErrorEnum_TOO_MANY_CONVERSIONS_IN_REQUEST ConversionUploadErrorEnum_ConversionUploadError = 2 + // The specified gclid could not be decoded. + ConversionUploadErrorEnum_UNPARSEABLE_GCLID ConversionUploadErrorEnum_ConversionUploadError = 3 + // The specified conversion_date_time is before the event time + // associated with the given gclid. + ConversionUploadErrorEnum_CONVERSION_PRECEDES_GCLID ConversionUploadErrorEnum_ConversionUploadError = 4 + // The click associated with the given gclid is either too old to be + // imported or occurred outside of the click through lookback window for the + // specified conversion action. + ConversionUploadErrorEnum_EXPIRED_GCLID ConversionUploadErrorEnum_ConversionUploadError = 5 + // The click associated with the given gclid occurred too recently. Please + // try uploading again after 24 hours have passed since the click occurred. + ConversionUploadErrorEnum_TOO_RECENT_GCLID ConversionUploadErrorEnum_ConversionUploadError = 6 + // The click associated with the given gclid could not be found in the + // system. This can happen if Google Click IDs are collected for non Google + // Ads clicks. + ConversionUploadErrorEnum_GCLID_NOT_FOUND ConversionUploadErrorEnum_ConversionUploadError = 7 + // The click associated with the given gclid is owned by a customer + // account that the uploading customer does not manage. + ConversionUploadErrorEnum_UNAUTHORIZED_CUSTOMER ConversionUploadErrorEnum_ConversionUploadError = 8 + // No upload eligible conversion action that matches the provided + // information can be found for the customer. + ConversionUploadErrorEnum_INVALID_CONVERSION_ACTION ConversionUploadErrorEnum_ConversionUploadError = 9 + // The specified conversion action was created too recently. + // Please try the upload again after 4-6 hours have passed since the + // conversion action was created. + ConversionUploadErrorEnum_TOO_RECENT_CONVERSION_ACTION ConversionUploadErrorEnum_ConversionUploadError = 10 + // The click associated with the given gclid does not contain conversion + // tracking information. + ConversionUploadErrorEnum_CONVERSION_TRACKING_NOT_ENABLED_AT_IMPRESSION_TIME ConversionUploadErrorEnum_ConversionUploadError = 11 + // The specified conversion action does not use an external attribution + // model, but external_attribution_data was set. + ConversionUploadErrorEnum_EXTERNAL_ATTRIBUTION_DATA_SET_FOR_NON_EXTERNALLY_ATTRIBUTED_CONVERSION_ACTION ConversionUploadErrorEnum_ConversionUploadError = 12 + // The specified conversion action uses an external attribution model, but + // external_attribution_data or one of its contained fields was not set. + // Both external_attribution_credit and external_attribution_model must be + // set for externally attributed conversion actions. + ConversionUploadErrorEnum_EXTERNAL_ATTRIBUTION_DATA_NOT_SET_FOR_EXTERNALLY_ATTRIBUTED_CONVERSION_ACTION ConversionUploadErrorEnum_ConversionUploadError = 13 + // Order IDs are not supported for conversion actions which use an external + // attribution model. + ConversionUploadErrorEnum_ORDER_ID_NOT_PERMITTED_FOR_EXTERNALLY_ATTRIBUTED_CONVERSION_ACTION ConversionUploadErrorEnum_ConversionUploadError = 14 + // A conversion with the same order id and conversion action combination + // already exists in our system. + ConversionUploadErrorEnum_ORDER_ID_ALREADY_IN_USE ConversionUploadErrorEnum_ConversionUploadError = 15 + // The request contained two or more conversions with the same order id and + // conversion action combination. + ConversionUploadErrorEnum_DUPLICATE_ORDER_ID ConversionUploadErrorEnum_ConversionUploadError = 16 + // The call occurred too recently. Please try uploading again after 24 hours + // have passed since the call occurred. + ConversionUploadErrorEnum_TOO_RECENT_CALL ConversionUploadErrorEnum_ConversionUploadError = 17 + // The click that initiated the call is too old for this conversion to be + // imported. + ConversionUploadErrorEnum_EXPIRED_CALL ConversionUploadErrorEnum_ConversionUploadError = 18 + // The call or the click leading to the call was not found. + ConversionUploadErrorEnum_CALL_NOT_FOUND ConversionUploadErrorEnum_ConversionUploadError = 19 + // The specified conversion_date_time is before the call_start_date_time. + ConversionUploadErrorEnum_CONVERSION_PRECEDES_CALL ConversionUploadErrorEnum_ConversionUploadError = 20 + // The click associated with the call does not contain conversion tracking + // information. + ConversionUploadErrorEnum_CONVERSION_TRACKING_NOT_ENABLED_AT_CALL_TIME ConversionUploadErrorEnum_ConversionUploadError = 21 + // The caller’s phone number cannot be parsed. It should be formatted either + // as E.164 "+16502531234", International "+64 3-331 6005" or US national + // number "6502531234". + ConversionUploadErrorEnum_UNPARSEABLE_CALLERS_PHONE_NUMBER ConversionUploadErrorEnum_ConversionUploadError = 22 +) + +var ConversionUploadErrorEnum_ConversionUploadError_name = map[int32]string{ + 0: "UNSPECIFIED", + 1: "UNKNOWN", + 2: "TOO_MANY_CONVERSIONS_IN_REQUEST", + 3: "UNPARSEABLE_GCLID", + 4: "CONVERSION_PRECEDES_GCLID", + 5: "EXPIRED_GCLID", + 6: "TOO_RECENT_GCLID", + 7: "GCLID_NOT_FOUND", + 8: "UNAUTHORIZED_CUSTOMER", + 9: "INVALID_CONVERSION_ACTION", + 10: "TOO_RECENT_CONVERSION_ACTION", + 11: "CONVERSION_TRACKING_NOT_ENABLED_AT_IMPRESSION_TIME", + 12: "EXTERNAL_ATTRIBUTION_DATA_SET_FOR_NON_EXTERNALLY_ATTRIBUTED_CONVERSION_ACTION", + 13: "EXTERNAL_ATTRIBUTION_DATA_NOT_SET_FOR_EXTERNALLY_ATTRIBUTED_CONVERSION_ACTION", + 14: "ORDER_ID_NOT_PERMITTED_FOR_EXTERNALLY_ATTRIBUTED_CONVERSION_ACTION", + 15: "ORDER_ID_ALREADY_IN_USE", + 16: "DUPLICATE_ORDER_ID", + 17: "TOO_RECENT_CALL", + 18: "EXPIRED_CALL", + 19: "CALL_NOT_FOUND", + 20: "CONVERSION_PRECEDES_CALL", + 21: "CONVERSION_TRACKING_NOT_ENABLED_AT_CALL_TIME", + 22: "UNPARSEABLE_CALLERS_PHONE_NUMBER", +} +var ConversionUploadErrorEnum_ConversionUploadError_value = map[string]int32{ + "UNSPECIFIED": 0, + "UNKNOWN": 1, + "TOO_MANY_CONVERSIONS_IN_REQUEST": 2, + "UNPARSEABLE_GCLID": 3, + "CONVERSION_PRECEDES_GCLID": 4, + "EXPIRED_GCLID": 5, + "TOO_RECENT_GCLID": 6, + "GCLID_NOT_FOUND": 7, + "UNAUTHORIZED_CUSTOMER": 8, + "INVALID_CONVERSION_ACTION": 9, + "TOO_RECENT_CONVERSION_ACTION": 10, + "CONVERSION_TRACKING_NOT_ENABLED_AT_IMPRESSION_TIME": 11, + "EXTERNAL_ATTRIBUTION_DATA_SET_FOR_NON_EXTERNALLY_ATTRIBUTED_CONVERSION_ACTION": 12, + "EXTERNAL_ATTRIBUTION_DATA_NOT_SET_FOR_EXTERNALLY_ATTRIBUTED_CONVERSION_ACTION": 13, + "ORDER_ID_NOT_PERMITTED_FOR_EXTERNALLY_ATTRIBUTED_CONVERSION_ACTION": 14, + "ORDER_ID_ALREADY_IN_USE": 15, + "DUPLICATE_ORDER_ID": 16, + "TOO_RECENT_CALL": 17, + "EXPIRED_CALL": 18, + "CALL_NOT_FOUND": 19, + "CONVERSION_PRECEDES_CALL": 20, + "CONVERSION_TRACKING_NOT_ENABLED_AT_CALL_TIME": 21, + "UNPARSEABLE_CALLERS_PHONE_NUMBER": 22, +} + +func (x ConversionUploadErrorEnum_ConversionUploadError) String() string { + return proto.EnumName(ConversionUploadErrorEnum_ConversionUploadError_name, int32(x)) +} +func (ConversionUploadErrorEnum_ConversionUploadError) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_conversion_upload_error_c36471db71103be7, []int{0, 0} +} + +// Container for enum describing possible conversion upload errors. +type ConversionUploadErrorEnum struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ConversionUploadErrorEnum) Reset() { *m = ConversionUploadErrorEnum{} } +func (m *ConversionUploadErrorEnum) String() string { return proto.CompactTextString(m) } +func (*ConversionUploadErrorEnum) ProtoMessage() {} +func (*ConversionUploadErrorEnum) Descriptor() ([]byte, []int) { + return fileDescriptor_conversion_upload_error_c36471db71103be7, []int{0} +} +func (m *ConversionUploadErrorEnum) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ConversionUploadErrorEnum.Unmarshal(m, b) +} +func (m *ConversionUploadErrorEnum) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ConversionUploadErrorEnum.Marshal(b, m, deterministic) +} +func (dst *ConversionUploadErrorEnum) XXX_Merge(src proto.Message) { + xxx_messageInfo_ConversionUploadErrorEnum.Merge(dst, src) +} +func (m *ConversionUploadErrorEnum) XXX_Size() int { + return xxx_messageInfo_ConversionUploadErrorEnum.Size(m) +} +func (m *ConversionUploadErrorEnum) XXX_DiscardUnknown() { + xxx_messageInfo_ConversionUploadErrorEnum.DiscardUnknown(m) +} + +var xxx_messageInfo_ConversionUploadErrorEnum proto.InternalMessageInfo + +func init() { + proto.RegisterType((*ConversionUploadErrorEnum)(nil), "google.ads.googleads.v1.errors.ConversionUploadErrorEnum") + proto.RegisterEnum("google.ads.googleads.v1.errors.ConversionUploadErrorEnum_ConversionUploadError", ConversionUploadErrorEnum_ConversionUploadError_name, ConversionUploadErrorEnum_ConversionUploadError_value) +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/errors/conversion_upload_error.proto", fileDescriptor_conversion_upload_error_c36471db71103be7) +} + +var fileDescriptor_conversion_upload_error_c36471db71103be7 = []byte{ + // 638 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x94, 0x54, 0xcd, 0x6e, 0xd3, 0x4e, + 0x10, 0xff, 0x37, 0xfd, 0x93, 0xc2, 0xf6, 0x6b, 0xbb, 0x6d, 0x0a, 0x2d, 0xa5, 0x54, 0x81, 0x23, + 0x72, 0x08, 0x48, 0x1c, 0x0c, 0x97, 0x8d, 0x3d, 0x4d, 0x57, 0x75, 0xd6, 0xee, 0x7a, 0x37, 0xb4, + 0x55, 0xa4, 0x55, 0x68, 0xa2, 0x28, 0x52, 0xeb, 0x8d, 0xe2, 0xb6, 0x0f, 0xc4, 0x91, 0x07, 0xe0, + 0x21, 0x38, 0xf3, 0x14, 0xdc, 0xb9, 0xa3, 0xb5, 0xe3, 0x10, 0x89, 0x50, 0x95, 0x53, 0x46, 0x33, + 0xbf, 0x8f, 0x89, 0x67, 0x66, 0xd1, 0x87, 0x81, 0x31, 0x83, 0xcb, 0x7e, 0xad, 0xdb, 0x4b, 0x6b, + 0x79, 0x68, 0xa3, 0xdb, 0x7a, 0xad, 0x3f, 0x1e, 0x9b, 0x71, 0x5a, 0xbb, 0x30, 0xc9, 0x6d, 0x7f, + 0x9c, 0x0e, 0x4d, 0xa2, 0x6f, 0x46, 0x97, 0xa6, 0xdb, 0xd3, 0x59, 0xc1, 0x19, 0x8d, 0xcd, 0xb5, + 0x21, 0xfb, 0x39, 0xc5, 0xe9, 0xf6, 0x52, 0x67, 0xca, 0x76, 0x6e, 0xeb, 0x4e, 0xce, 0xde, 0xdd, + 0x2b, 0xd4, 0x47, 0xc3, 0x5a, 0x37, 0x49, 0xcc, 0x75, 0xf7, 0x7a, 0x68, 0x92, 0x34, 0x67, 0x57, + 0xbf, 0x97, 0xd1, 0x8e, 0x37, 0xd5, 0x57, 0x99, 0x3c, 0x58, 0x22, 0x24, 0x37, 0x57, 0xd5, 0xaf, + 0x65, 0x54, 0x99, 0x5b, 0x25, 0xeb, 0x68, 0x59, 0xf1, 0x38, 0x02, 0x8f, 0x1d, 0x32, 0xf0, 0xf1, + 0x7f, 0x64, 0x19, 0x2d, 0x29, 0x7e, 0xcc, 0xc3, 0x8f, 0x1c, 0x2f, 0x90, 0x17, 0xe8, 0xb9, 0x0c, + 0x43, 0xdd, 0xa2, 0xfc, 0x4c, 0x7b, 0x21, 0x6f, 0x83, 0x88, 0x59, 0xc8, 0x63, 0xcd, 0xb8, 0x16, + 0x70, 0xa2, 0x20, 0x96, 0xb8, 0x44, 0x2a, 0x68, 0x43, 0xf1, 0x88, 0x8a, 0x18, 0x68, 0x23, 0x00, + 0xdd, 0xf4, 0x02, 0xe6, 0xe3, 0x45, 0xf2, 0x0c, 0xed, 0xfc, 0xa6, 0xe8, 0x48, 0x80, 0x07, 0x3e, + 0xc4, 0x93, 0xf2, 0xff, 0x64, 0x03, 0xad, 0xc2, 0x69, 0xc4, 0x04, 0xf8, 0x93, 0xd4, 0x03, 0xb2, + 0x85, 0xb0, 0x75, 0xb3, 0x48, 0x2e, 0x27, 0xd9, 0x32, 0xd9, 0x44, 0xeb, 0x59, 0xa8, 0x79, 0x28, + 0xf5, 0x61, 0xa8, 0xb8, 0x8f, 0x97, 0xc8, 0x0e, 0xaa, 0x28, 0x4e, 0x95, 0x3c, 0x0a, 0x05, 0x3b, + 0x07, 0x5f, 0x7b, 0x2a, 0x96, 0x61, 0x0b, 0x04, 0x7e, 0x68, 0x7d, 0x19, 0x6f, 0x53, 0xcb, 0x98, + 0xf1, 0xa7, 0x9e, 0x64, 0x21, 0xc7, 0x8f, 0xc8, 0x01, 0xda, 0x9b, 0x31, 0xf9, 0x13, 0x81, 0xc8, + 0x3b, 0xf4, 0x66, 0x26, 0x2d, 0x05, 0xf5, 0x8e, 0x19, 0x6f, 0x66, 0xf6, 0xc0, 0xed, 0x5f, 0xf4, + 0x35, 0x95, 0x9a, 0xb5, 0x22, 0x01, 0x71, 0x0e, 0x61, 0x2d, 0xc0, 0xcb, 0xe4, 0x04, 0xb5, 0xe0, + 0x54, 0x82, 0xe0, 0x34, 0xd0, 0x54, 0x4a, 0xc1, 0x1a, 0xca, 0x2a, 0x6a, 0x9f, 0x4a, 0xaa, 0x63, + 0xb0, 0xcd, 0x0b, 0xcd, 0x43, 0xae, 0x0b, 0x54, 0x70, 0x36, 0xc5, 0xc1, 0xbc, 0x66, 0x57, 0xee, + 0x96, 0xb4, 0x0d, 0x15, 0xb2, 0xf7, 0x95, 0x5c, 0x25, 0x87, 0xa8, 0x11, 0x0a, 0x1f, 0x84, 0x9e, + 0x7c, 0xd1, 0x08, 0x44, 0x8b, 0x49, 0x8b, 0xfe, 0x17, 0x9d, 0x35, 0xf2, 0x14, 0x3d, 0x9e, 0xea, + 0xd0, 0x40, 0x00, 0xf5, 0xcf, 0xec, 0x5a, 0xa8, 0x18, 0xf0, 0x3a, 0xd9, 0x46, 0xc4, 0x57, 0x51, + 0xc0, 0x3c, 0x2a, 0x41, 0x17, 0x30, 0x8c, 0xed, 0x2c, 0x67, 0x3f, 0x3e, 0x0d, 0x02, 0xbc, 0x41, + 0x30, 0x5a, 0x29, 0x36, 0x21, 0xcb, 0x10, 0x42, 0xd0, 0x9a, 0x8d, 0x66, 0x26, 0xbe, 0x49, 0xf6, + 0xd0, 0x93, 0x79, 0xeb, 0x94, 0x31, 0xb6, 0xc8, 0x6b, 0xf4, 0xea, 0x1e, 0x33, 0xcb, 0x44, 0xb3, + 0x69, 0x55, 0xc8, 0x4b, 0x74, 0x30, 0xbb, 0xb5, 0xb6, 0x04, 0x22, 0xd6, 0xd1, 0x51, 0xc8, 0x41, + 0x73, 0xd5, 0x6a, 0x80, 0xc0, 0xdb, 0x8d, 0x9f, 0x0b, 0xa8, 0x7a, 0x61, 0xae, 0x9c, 0xbb, 0x6f, + 0xb3, 0xb1, 0x3b, 0xf7, 0xb8, 0x22, 0x7b, 0x99, 0xd1, 0xc2, 0xb9, 0x3f, 0x61, 0x0f, 0xcc, 0x65, + 0x37, 0x19, 0x38, 0x66, 0x3c, 0xa8, 0x0d, 0xfa, 0x49, 0x76, 0xb7, 0xc5, 0x3b, 0x31, 0x1a, 0xa6, + 0x7f, 0x7b, 0x36, 0xde, 0xe7, 0x3f, 0x9f, 0x4b, 0x8b, 0x4d, 0x4a, 0xbf, 0x94, 0xf6, 0x9b, 0xb9, + 0x18, 0xed, 0xa5, 0x4e, 0x1e, 0xda, 0xa8, 0x5d, 0x77, 0x32, 0xcb, 0xf4, 0x5b, 0x01, 0xe8, 0xd0, + 0x5e, 0xda, 0x99, 0x02, 0x3a, 0xed, 0x7a, 0x27, 0x07, 0xfc, 0x28, 0x55, 0xf3, 0xac, 0xeb, 0xd2, + 0x5e, 0xea, 0xba, 0x53, 0x88, 0xeb, 0xb6, 0xeb, 0xae, 0x9b, 0x83, 0x3e, 0x95, 0xb3, 0xee, 0xde, + 0xfe, 0x0a, 0x00, 0x00, 0xff, 0xff, 0x09, 0x85, 0xd8, 0x22, 0xd3, 0x04, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/errors/country_code_error.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/errors/country_code_error.pb.go new file mode 100644 index 0000000..8132c86 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/errors/country_code_error.pb.go @@ -0,0 +1,114 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/errors/country_code_error.proto + +package errors // import "google.golang.org/genproto/googleapis/ads/googleads/v1/errors" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Enum describing country code errors. +type CountryCodeErrorEnum_CountryCodeError int32 + +const ( + // Enum unspecified. + CountryCodeErrorEnum_UNSPECIFIED CountryCodeErrorEnum_CountryCodeError = 0 + // The received error code is not known in this version. + CountryCodeErrorEnum_UNKNOWN CountryCodeErrorEnum_CountryCodeError = 1 + // The country code is invalid. + CountryCodeErrorEnum_INVALID_COUNTRY_CODE CountryCodeErrorEnum_CountryCodeError = 2 +) + +var CountryCodeErrorEnum_CountryCodeError_name = map[int32]string{ + 0: "UNSPECIFIED", + 1: "UNKNOWN", + 2: "INVALID_COUNTRY_CODE", +} +var CountryCodeErrorEnum_CountryCodeError_value = map[string]int32{ + "UNSPECIFIED": 0, + "UNKNOWN": 1, + "INVALID_COUNTRY_CODE": 2, +} + +func (x CountryCodeErrorEnum_CountryCodeError) String() string { + return proto.EnumName(CountryCodeErrorEnum_CountryCodeError_name, int32(x)) +} +func (CountryCodeErrorEnum_CountryCodeError) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_country_code_error_f5f418d6eb875d13, []int{0, 0} +} + +// Container for enum describing country code errors. +type CountryCodeErrorEnum struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *CountryCodeErrorEnum) Reset() { *m = CountryCodeErrorEnum{} } +func (m *CountryCodeErrorEnum) String() string { return proto.CompactTextString(m) } +func (*CountryCodeErrorEnum) ProtoMessage() {} +func (*CountryCodeErrorEnum) Descriptor() ([]byte, []int) { + return fileDescriptor_country_code_error_f5f418d6eb875d13, []int{0} +} +func (m *CountryCodeErrorEnum) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_CountryCodeErrorEnum.Unmarshal(m, b) +} +func (m *CountryCodeErrorEnum) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_CountryCodeErrorEnum.Marshal(b, m, deterministic) +} +func (dst *CountryCodeErrorEnum) XXX_Merge(src proto.Message) { + xxx_messageInfo_CountryCodeErrorEnum.Merge(dst, src) +} +func (m *CountryCodeErrorEnum) XXX_Size() int { + return xxx_messageInfo_CountryCodeErrorEnum.Size(m) +} +func (m *CountryCodeErrorEnum) XXX_DiscardUnknown() { + xxx_messageInfo_CountryCodeErrorEnum.DiscardUnknown(m) +} + +var xxx_messageInfo_CountryCodeErrorEnum proto.InternalMessageInfo + +func init() { + proto.RegisterType((*CountryCodeErrorEnum)(nil), "google.ads.googleads.v1.errors.CountryCodeErrorEnum") + proto.RegisterEnum("google.ads.googleads.v1.errors.CountryCodeErrorEnum_CountryCodeError", CountryCodeErrorEnum_CountryCodeError_name, CountryCodeErrorEnum_CountryCodeError_value) +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/errors/country_code_error.proto", fileDescriptor_country_code_error_f5f418d6eb875d13) +} + +var fileDescriptor_country_code_error_f5f418d6eb875d13 = []byte{ + // 306 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x7c, 0x90, 0xc1, 0x4a, 0xc3, 0x30, + 0x1c, 0xc6, 0x5d, 0x05, 0x85, 0xec, 0x60, 0x29, 0x13, 0x44, 0x64, 0x87, 0x3e, 0x40, 0x42, 0xf1, + 0x20, 0xc4, 0x53, 0xd6, 0xd6, 0x51, 0x95, 0x6c, 0xa8, 0xab, 0x28, 0x85, 0xd2, 0x2d, 0x25, 0x0c, + 0xb6, 0xfc, 0x47, 0xd2, 0x0d, 0x7c, 0x1d, 0x8f, 0x3e, 0x8a, 0x8f, 0xe2, 0xc9, 0x47, 0x90, 0x36, + 0xb6, 0x87, 0x81, 0x9e, 0xf2, 0xf1, 0xf1, 0xfb, 0xbe, 0x7c, 0xfc, 0xd1, 0x95, 0x04, 0x90, 0xab, + 0x92, 0x14, 0xc2, 0x10, 0x2b, 0x6b, 0xb5, 0x0b, 0x48, 0xa9, 0x35, 0x68, 0x43, 0x16, 0xb0, 0x55, + 0x95, 0x7e, 0xcb, 0x17, 0x20, 0xca, 0xbc, 0xf1, 0xf0, 0x46, 0x43, 0x05, 0xde, 0xd0, 0xd2, 0xb8, + 0x10, 0x06, 0x77, 0x41, 0xbc, 0x0b, 0xb0, 0x0d, 0x9e, 0x5f, 0xb4, 0xc5, 0x9b, 0x25, 0x29, 0x94, + 0x82, 0xaa, 0xa8, 0x96, 0xa0, 0x8c, 0x4d, 0xfb, 0x73, 0x34, 0x08, 0x6d, 0x73, 0x08, 0xa2, 0x8c, + 0xeb, 0x48, 0xac, 0xb6, 0x6b, 0xff, 0x16, 0xb9, 0xfb, 0xbe, 0x77, 0x82, 0xfa, 0x33, 0xfe, 0x38, + 0x8d, 0xc3, 0xe4, 0x26, 0x89, 0x23, 0xf7, 0xc0, 0xeb, 0xa3, 0xe3, 0x19, 0xbf, 0xe3, 0x93, 0x67, + 0xee, 0xf6, 0xbc, 0x33, 0x34, 0x48, 0x78, 0xca, 0xee, 0x93, 0x28, 0x0f, 0x27, 0x33, 0xfe, 0xf4, + 0xf0, 0x92, 0x87, 0x93, 0x28, 0x76, 0x9d, 0xd1, 0x77, 0x0f, 0xf9, 0x0b, 0x58, 0xe3, 0xff, 0x87, + 0x8e, 0x4e, 0xf7, 0x3f, 0x9c, 0xd6, 0x0b, 0xa7, 0xbd, 0xd7, 0xe8, 0x37, 0x28, 0x61, 0x55, 0x28, + 0x89, 0x41, 0x4b, 0x22, 0x4b, 0xd5, 0xec, 0x6f, 0x4f, 0xb5, 0x59, 0x9a, 0xbf, 0x2e, 0x77, 0x6d, + 0x9f, 0x77, 0xe7, 0x70, 0xcc, 0xd8, 0x87, 0x33, 0x1c, 0xdb, 0x32, 0x26, 0x0c, 0xb6, 0xb2, 0x56, + 0x69, 0x80, 0x9b, 0x2f, 0xcd, 0x67, 0x0b, 0x64, 0x4c, 0x98, 0xac, 0x03, 0xb2, 0x34, 0xc8, 0x2c, + 0xf0, 0xe5, 0xf8, 0xd6, 0xa5, 0x94, 0x09, 0x43, 0x69, 0x87, 0x50, 0x9a, 0x06, 0x94, 0x5a, 0x68, + 0x7e, 0xd4, 0xac, 0xbb, 0xfc, 0x09, 0x00, 0x00, 0xff, 0xff, 0xb8, 0x26, 0x72, 0x76, 0xd6, 0x01, + 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/errors/criterion_error.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/errors/criterion_error.pb.go new file mode 100644 index 0000000..c4ab94b --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/errors/criterion_error.pb.go @@ -0,0 +1,618 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/errors/criterion_error.proto + +package errors // import "google.golang.org/genproto/googleapis/ads/googleads/v1/errors" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Enum describing possible criterion errors. +type CriterionErrorEnum_CriterionError int32 + +const ( + // Enum unspecified. + CriterionErrorEnum_UNSPECIFIED CriterionErrorEnum_CriterionError = 0 + // The received error code is not known in this version. + CriterionErrorEnum_UNKNOWN CriterionErrorEnum_CriterionError = 1 + // Concrete type of criterion is required for CREATE and UPDATE operations. + CriterionErrorEnum_CONCRETE_TYPE_REQUIRED CriterionErrorEnum_CriterionError = 2 + // The category requested for exclusion is invalid. + CriterionErrorEnum_INVALID_EXCLUDED_CATEGORY CriterionErrorEnum_CriterionError = 3 + // Invalid keyword criteria text. + CriterionErrorEnum_INVALID_KEYWORD_TEXT CriterionErrorEnum_CriterionError = 4 + // Keyword text should be less than 80 chars. + CriterionErrorEnum_KEYWORD_TEXT_TOO_LONG CriterionErrorEnum_CriterionError = 5 + // Keyword text has too many words. + CriterionErrorEnum_KEYWORD_HAS_TOO_MANY_WORDS CriterionErrorEnum_CriterionError = 6 + // Keyword text has invalid characters or symbols. + CriterionErrorEnum_KEYWORD_HAS_INVALID_CHARS CriterionErrorEnum_CriterionError = 7 + // Invalid placement URL. + CriterionErrorEnum_INVALID_PLACEMENT_URL CriterionErrorEnum_CriterionError = 8 + // Invalid user list criterion. + CriterionErrorEnum_INVALID_USER_LIST CriterionErrorEnum_CriterionError = 9 + // Invalid user interest criterion. + CriterionErrorEnum_INVALID_USER_INTEREST CriterionErrorEnum_CriterionError = 10 + // Placement URL has wrong format. + CriterionErrorEnum_INVALID_FORMAT_FOR_PLACEMENT_URL CriterionErrorEnum_CriterionError = 11 + // Placement URL is too long. + CriterionErrorEnum_PLACEMENT_URL_IS_TOO_LONG CriterionErrorEnum_CriterionError = 12 + // Indicates the URL contains an illegal character. + CriterionErrorEnum_PLACEMENT_URL_HAS_ILLEGAL_CHAR CriterionErrorEnum_CriterionError = 13 + // Indicates the URL contains multiple comma separated URLs. + CriterionErrorEnum_PLACEMENT_URL_HAS_MULTIPLE_SITES_IN_LINE CriterionErrorEnum_CriterionError = 14 + // Indicates the domain is blacklisted. + CriterionErrorEnum_PLACEMENT_IS_NOT_AVAILABLE_FOR_TARGETING_OR_EXCLUSION CriterionErrorEnum_CriterionError = 15 + // Invalid topic path. + CriterionErrorEnum_INVALID_TOPIC_PATH CriterionErrorEnum_CriterionError = 16 + // The YouTube Channel Id is invalid. + CriterionErrorEnum_INVALID_YOUTUBE_CHANNEL_ID CriterionErrorEnum_CriterionError = 17 + // The YouTube Video Id is invalid. + CriterionErrorEnum_INVALID_YOUTUBE_VIDEO_ID CriterionErrorEnum_CriterionError = 18 + // Indicates the placement is a YouTube vertical channel, which is no longer + // supported. + CriterionErrorEnum_YOUTUBE_VERTICAL_CHANNEL_DEPRECATED CriterionErrorEnum_CriterionError = 19 + // Indicates the placement is a YouTube demographic channel, which is no + // longer supported. + CriterionErrorEnum_YOUTUBE_DEMOGRAPHIC_CHANNEL_DEPRECATED CriterionErrorEnum_CriterionError = 20 + // YouTube urls are not supported in Placement criterion. Use YouTubeChannel + // and YouTubeVideo criterion instead. + CriterionErrorEnum_YOUTUBE_URL_UNSUPPORTED CriterionErrorEnum_CriterionError = 21 + // Criteria type can not be excluded by the customer, like AOL account type + // cannot target site type criteria. + CriterionErrorEnum_CANNOT_EXCLUDE_CRITERIA_TYPE CriterionErrorEnum_CriterionError = 22 + // Criteria type can not be targeted. + CriterionErrorEnum_CANNOT_ADD_CRITERIA_TYPE CriterionErrorEnum_CriterionError = 23 + // Product filter in the product criteria has invalid characters. Operand + // and the argument in the filter can not have "==" or "&+". + CriterionErrorEnum_INVALID_PRODUCT_FILTER CriterionErrorEnum_CriterionError = 24 + // Product filter in the product criteria is translated to a string as + // operand1==argument1&+operand2==argument2, maximum allowed length for the + // string is 255 chars. + CriterionErrorEnum_PRODUCT_FILTER_TOO_LONG CriterionErrorEnum_CriterionError = 25 + // Not allowed to exclude similar user list. + CriterionErrorEnum_CANNOT_EXCLUDE_SIMILAR_USER_LIST CriterionErrorEnum_CriterionError = 26 + // Not allowed to target a closed user list. + CriterionErrorEnum_CANNOT_ADD_CLOSED_USER_LIST CriterionErrorEnum_CriterionError = 27 + // Not allowed to add display only UserLists to search only campaigns. + CriterionErrorEnum_CANNOT_ADD_DISPLAY_ONLY_LISTS_TO_SEARCH_ONLY_CAMPAIGNS CriterionErrorEnum_CriterionError = 28 + // Not allowed to add display only UserLists to search plus campaigns. + CriterionErrorEnum_CANNOT_ADD_DISPLAY_ONLY_LISTS_TO_SEARCH_CAMPAIGNS CriterionErrorEnum_CriterionError = 29 + // Not allowed to add display only UserLists to shopping campaigns. + CriterionErrorEnum_CANNOT_ADD_DISPLAY_ONLY_LISTS_TO_SHOPPING_CAMPAIGNS CriterionErrorEnum_CriterionError = 30 + // Not allowed to add User interests to search only campaigns. + CriterionErrorEnum_CANNOT_ADD_USER_INTERESTS_TO_SEARCH_CAMPAIGNS CriterionErrorEnum_CriterionError = 31 + // Not allowed to set bids for this criterion type in search campaigns + CriterionErrorEnum_CANNOT_SET_BIDS_ON_CRITERION_TYPE_IN_SEARCH_CAMPAIGNS CriterionErrorEnum_CriterionError = 32 + // Final URLs, URL Templates and CustomParameters cannot be set for the + // criterion types of Gender, AgeRange, UserList, Placement, MobileApp, and + // MobileAppCategory in search campaigns and shopping campaigns. + CriterionErrorEnum_CANNOT_ADD_URLS_TO_CRITERION_TYPE_FOR_CAMPAIGN_TYPE CriterionErrorEnum_CriterionError = 33 + // Invalid custom affinity criterion. + CriterionErrorEnum_INVALID_CUSTOM_AFFINITY CriterionErrorEnum_CriterionError = 96 + // Invalid custom intent criterion. + CriterionErrorEnum_INVALID_CUSTOM_INTENT CriterionErrorEnum_CriterionError = 97 + // IP address is not valid. + CriterionErrorEnum_INVALID_IP_ADDRESS CriterionErrorEnum_CriterionError = 34 + // IP format is not valid. + CriterionErrorEnum_INVALID_IP_FORMAT CriterionErrorEnum_CriterionError = 35 + // Mobile application is not valid. + CriterionErrorEnum_INVALID_MOBILE_APP CriterionErrorEnum_CriterionError = 36 + // Mobile application category is not valid. + CriterionErrorEnum_INVALID_MOBILE_APP_CATEGORY CriterionErrorEnum_CriterionError = 37 + // The CriterionId does not exist or is of the incorrect type. + CriterionErrorEnum_INVALID_CRITERION_ID CriterionErrorEnum_CriterionError = 38 + // The Criterion is not allowed to be targeted. + CriterionErrorEnum_CANNOT_TARGET_CRITERION CriterionErrorEnum_CriterionError = 39 + // The criterion is not allowed to be targeted as it is deprecated. + CriterionErrorEnum_CANNOT_TARGET_OBSOLETE_CRITERION CriterionErrorEnum_CriterionError = 40 + // The CriterionId is not valid for the type. + CriterionErrorEnum_CRITERION_ID_AND_TYPE_MISMATCH CriterionErrorEnum_CriterionError = 41 + // Distance for the radius for the proximity criterion is invalid. + CriterionErrorEnum_INVALID_PROXIMITY_RADIUS CriterionErrorEnum_CriterionError = 42 + // Units for the distance for the radius for the proximity criterion is + // invalid. + CriterionErrorEnum_INVALID_PROXIMITY_RADIUS_UNITS CriterionErrorEnum_CriterionError = 43 + // Street address in the address is not valid. + CriterionErrorEnum_INVALID_STREETADDRESS_LENGTH CriterionErrorEnum_CriterionError = 44 + // City name in the address is not valid. + CriterionErrorEnum_INVALID_CITYNAME_LENGTH CriterionErrorEnum_CriterionError = 45 + // Region code in the address is not valid. + CriterionErrorEnum_INVALID_REGIONCODE_LENGTH CriterionErrorEnum_CriterionError = 46 + // Region name in the address is not valid. + CriterionErrorEnum_INVALID_REGIONNAME_LENGTH CriterionErrorEnum_CriterionError = 47 + // Postal code in the address is not valid. + CriterionErrorEnum_INVALID_POSTALCODE_LENGTH CriterionErrorEnum_CriterionError = 48 + // Country code in the address is not valid. + CriterionErrorEnum_INVALID_COUNTRY_CODE CriterionErrorEnum_CriterionError = 49 + // Latitude for the GeoPoint is not valid. + CriterionErrorEnum_INVALID_LATITUDE CriterionErrorEnum_CriterionError = 50 + // Longitude for the GeoPoint is not valid. + CriterionErrorEnum_INVALID_LONGITUDE CriterionErrorEnum_CriterionError = 51 + // The Proximity input is not valid. Both address and geoPoint cannot be + // null. + CriterionErrorEnum_PROXIMITY_GEOPOINT_AND_ADDRESS_BOTH_CANNOT_BE_NULL CriterionErrorEnum_CriterionError = 52 + // The Proximity address cannot be geocoded to a valid lat/long. + CriterionErrorEnum_INVALID_PROXIMITY_ADDRESS CriterionErrorEnum_CriterionError = 53 + // User domain name is not valid. + CriterionErrorEnum_INVALID_USER_DOMAIN_NAME CriterionErrorEnum_CriterionError = 54 + // Length of serialized criterion parameter exceeded size limit. + CriterionErrorEnum_CRITERION_PARAMETER_TOO_LONG CriterionErrorEnum_CriterionError = 55 + // Time interval in the AdSchedule overlaps with another AdSchedule. + CriterionErrorEnum_AD_SCHEDULE_TIME_INTERVALS_OVERLAP CriterionErrorEnum_CriterionError = 56 + // AdSchedule time interval cannot span multiple days. + CriterionErrorEnum_AD_SCHEDULE_INTERVAL_CANNOT_SPAN_MULTIPLE_DAYS CriterionErrorEnum_CriterionError = 57 + // AdSchedule time interval specified is invalid, endTime cannot be earlier + // than startTime. + CriterionErrorEnum_AD_SCHEDULE_INVALID_TIME_INTERVAL CriterionErrorEnum_CriterionError = 58 + // The number of AdSchedule entries in a day exceeds the limit. + CriterionErrorEnum_AD_SCHEDULE_EXCEEDED_INTERVALS_PER_DAY_LIMIT CriterionErrorEnum_CriterionError = 59 + // CriteriaId does not match the interval of the AdSchedule specified. + CriterionErrorEnum_AD_SCHEDULE_CRITERION_ID_MISMATCHING_FIELDS CriterionErrorEnum_CriterionError = 60 + // Cannot set bid modifier for this criterion type. + CriterionErrorEnum_CANNOT_BID_MODIFY_CRITERION_TYPE CriterionErrorEnum_CriterionError = 61 + // Cannot bid modify criterion, since it is opted out of the campaign. + CriterionErrorEnum_CANNOT_BID_MODIFY_CRITERION_CAMPAIGN_OPTED_OUT CriterionErrorEnum_CriterionError = 62 + // Cannot set bid modifier for a negative criterion. + CriterionErrorEnum_CANNOT_BID_MODIFY_NEGATIVE_CRITERION CriterionErrorEnum_CriterionError = 63 + // Bid Modifier already exists. Use SET operation to update. + CriterionErrorEnum_BID_MODIFIER_ALREADY_EXISTS CriterionErrorEnum_CriterionError = 64 + // Feed Id is not allowed in these Location Groups. + CriterionErrorEnum_FEED_ID_NOT_ALLOWED CriterionErrorEnum_CriterionError = 65 + // The account may not use the requested criteria type. For example, some + // accounts are restricted to keywords only. + CriterionErrorEnum_ACCOUNT_INELIGIBLE_FOR_CRITERIA_TYPE CriterionErrorEnum_CriterionError = 66 + // The requested criteria type cannot be used with campaign or ad group + // bidding strategy. + CriterionErrorEnum_CRITERIA_TYPE_INVALID_FOR_BIDDING_STRATEGY CriterionErrorEnum_CriterionError = 67 + // The Criterion is not allowed to be excluded. + CriterionErrorEnum_CANNOT_EXCLUDE_CRITERION CriterionErrorEnum_CriterionError = 68 + // The criterion is not allowed to be removed. For example, we cannot remove + // any of the device criterion. + CriterionErrorEnum_CANNOT_REMOVE_CRITERION CriterionErrorEnum_CriterionError = 69 + // The combined length of product dimension values of the product scope + // criterion is too long. + CriterionErrorEnum_PRODUCT_SCOPE_TOO_LONG CriterionErrorEnum_CriterionError = 70 + // Product scope contains too many dimensions. + CriterionErrorEnum_PRODUCT_SCOPE_TOO_MANY_DIMENSIONS CriterionErrorEnum_CriterionError = 71 + // The combined length of product dimension values of the product partition + // criterion is too long. + CriterionErrorEnum_PRODUCT_PARTITION_TOO_LONG CriterionErrorEnum_CriterionError = 72 + // Product partition contains too many dimensions. + CriterionErrorEnum_PRODUCT_PARTITION_TOO_MANY_DIMENSIONS CriterionErrorEnum_CriterionError = 73 + // The product dimension is invalid (e.g. dimension contains illegal value, + // dimension type is represented with wrong class, etc). Product dimension + // value can not contain "==" or "&+". + CriterionErrorEnum_INVALID_PRODUCT_DIMENSION CriterionErrorEnum_CriterionError = 74 + // Product dimension type is either invalid for campaigns of this type or + // cannot be used in the current context. BIDDING_CATEGORY_Lx and + // PRODUCT_TYPE_Lx product dimensions must be used in ascending order of + // their levels: L1, L2, L3, L4, L5... The levels must be specified + // sequentially and start from L1. Furthermore, an "others" product + // partition cannot be subdivided with a dimension of the same type but of a + // higher level ("others" BIDDING_CATEGORY_L3 can be subdivided with BRAND + // but not with BIDDING_CATEGORY_L4). + CriterionErrorEnum_INVALID_PRODUCT_DIMENSION_TYPE CriterionErrorEnum_CriterionError = 75 + // Bidding categories do not form a valid path in the Shopping bidding + // category taxonomy. + CriterionErrorEnum_INVALID_PRODUCT_BIDDING_CATEGORY CriterionErrorEnum_CriterionError = 76 + // ShoppingSetting must be added to the campaign before ProductScope + // criteria can be added. + CriterionErrorEnum_MISSING_SHOPPING_SETTING CriterionErrorEnum_CriterionError = 77 + // Matching function is invalid. + CriterionErrorEnum_INVALID_MATCHING_FUNCTION CriterionErrorEnum_CriterionError = 78 + // Filter parameters not allowed for location groups targeting. + CriterionErrorEnum_LOCATION_FILTER_NOT_ALLOWED CriterionErrorEnum_CriterionError = 79 + // Feed not found, or the feed is not an enabled location feed. + CriterionErrorEnum_INVALID_FEED_FOR_LOCATION_FILTER CriterionErrorEnum_CriterionError = 98 + // Given location filter parameter is invalid for location groups targeting. + CriterionErrorEnum_LOCATION_FILTER_INVALID CriterionErrorEnum_CriterionError = 80 + // Criteria type cannot be associated with a campaign and its ad group(s) + // simultaneously. + CriterionErrorEnum_CANNOT_ATTACH_CRITERIA_AT_CAMPAIGN_AND_ADGROUP CriterionErrorEnum_CriterionError = 81 + // Range represented by hotel length of stay's min nights and max nights + // overlaps with an existing criterion. + CriterionErrorEnum_HOTEL_LENGTH_OF_STAY_OVERLAPS_WITH_EXISTING_CRITERION CriterionErrorEnum_CriterionError = 82 + // Range represented by hotel advance booking window's min days and max days + // overlaps with an existing criterion. + CriterionErrorEnum_HOTEL_ADVANCE_BOOKING_WINDOW_OVERLAPS_WITH_EXISTING_CRITERION CriterionErrorEnum_CriterionError = 83 + // The field is not allowed to be set when the negative field is set to + // true, e.g. we don't allow bids in negative ad group or campaign criteria. + CriterionErrorEnum_FIELD_INCOMPATIBLE_WITH_NEGATIVE_TARGETING CriterionErrorEnum_CriterionError = 84 + // The combination of operand and operator in webpage condition is invalid. + CriterionErrorEnum_INVALID_WEBPAGE_CONDITION CriterionErrorEnum_CriterionError = 85 + // The URL of webpage condition is invalid. + CriterionErrorEnum_INVALID_WEBPAGE_CONDITION_URL CriterionErrorEnum_CriterionError = 86 + // The URL of webpage condition cannot be empty or contain white space. + CriterionErrorEnum_WEBPAGE_CONDITION_URL_CANNOT_BE_EMPTY CriterionErrorEnum_CriterionError = 87 + // The URL of webpage condition contains an unsupported protocol. + CriterionErrorEnum_WEBPAGE_CONDITION_URL_UNSUPPORTED_PROTOCOL CriterionErrorEnum_CriterionError = 88 + // The URL of webpage condition cannot be an IP address. + CriterionErrorEnum_WEBPAGE_CONDITION_URL_CANNOT_BE_IP_ADDRESS CriterionErrorEnum_CriterionError = 89 + // The domain of the URL is not consistent with the domain in campaign + // setting. + CriterionErrorEnum_WEBPAGE_CONDITION_URL_DOMAIN_NOT_CONSISTENT_WITH_CAMPAIGN_SETTING CriterionErrorEnum_CriterionError = 90 + // The URL of webpage condition cannot be a public suffix itself. + CriterionErrorEnum_WEBPAGE_CONDITION_URL_CANNOT_BE_PUBLIC_SUFFIX CriterionErrorEnum_CriterionError = 91 + // The URL of webpage condition has an invalid public suffix. + CriterionErrorEnum_WEBPAGE_CONDITION_URL_INVALID_PUBLIC_SUFFIX CriterionErrorEnum_CriterionError = 92 + // Value track parameter is not supported in webpage condition URL. + CriterionErrorEnum_WEBPAGE_CONDITION_URL_VALUE_TRACK_VALUE_NOT_SUPPORTED CriterionErrorEnum_CriterionError = 93 + // Only one URL-EQUALS webpage condition is allowed in a webpage + // criterion and it cannot be combined with other conditions. + CriterionErrorEnum_WEBPAGE_CRITERION_URL_EQUALS_CAN_HAVE_ONLY_ONE_CONDITION CriterionErrorEnum_CriterionError = 94 + // A webpage criterion cannot be added to a non-DSA ad group. + CriterionErrorEnum_WEBPAGE_CRITERION_NOT_SUPPORTED_ON_NON_DSA_AD_GROUP CriterionErrorEnum_CriterionError = 95 +) + +var CriterionErrorEnum_CriterionError_name = map[int32]string{ + 0: "UNSPECIFIED", + 1: "UNKNOWN", + 2: "CONCRETE_TYPE_REQUIRED", + 3: "INVALID_EXCLUDED_CATEGORY", + 4: "INVALID_KEYWORD_TEXT", + 5: "KEYWORD_TEXT_TOO_LONG", + 6: "KEYWORD_HAS_TOO_MANY_WORDS", + 7: "KEYWORD_HAS_INVALID_CHARS", + 8: "INVALID_PLACEMENT_URL", + 9: "INVALID_USER_LIST", + 10: "INVALID_USER_INTEREST", + 11: "INVALID_FORMAT_FOR_PLACEMENT_URL", + 12: "PLACEMENT_URL_IS_TOO_LONG", + 13: "PLACEMENT_URL_HAS_ILLEGAL_CHAR", + 14: "PLACEMENT_URL_HAS_MULTIPLE_SITES_IN_LINE", + 15: "PLACEMENT_IS_NOT_AVAILABLE_FOR_TARGETING_OR_EXCLUSION", + 16: "INVALID_TOPIC_PATH", + 17: "INVALID_YOUTUBE_CHANNEL_ID", + 18: "INVALID_YOUTUBE_VIDEO_ID", + 19: "YOUTUBE_VERTICAL_CHANNEL_DEPRECATED", + 20: "YOUTUBE_DEMOGRAPHIC_CHANNEL_DEPRECATED", + 21: "YOUTUBE_URL_UNSUPPORTED", + 22: "CANNOT_EXCLUDE_CRITERIA_TYPE", + 23: "CANNOT_ADD_CRITERIA_TYPE", + 24: "INVALID_PRODUCT_FILTER", + 25: "PRODUCT_FILTER_TOO_LONG", + 26: "CANNOT_EXCLUDE_SIMILAR_USER_LIST", + 27: "CANNOT_ADD_CLOSED_USER_LIST", + 28: "CANNOT_ADD_DISPLAY_ONLY_LISTS_TO_SEARCH_ONLY_CAMPAIGNS", + 29: "CANNOT_ADD_DISPLAY_ONLY_LISTS_TO_SEARCH_CAMPAIGNS", + 30: "CANNOT_ADD_DISPLAY_ONLY_LISTS_TO_SHOPPING_CAMPAIGNS", + 31: "CANNOT_ADD_USER_INTERESTS_TO_SEARCH_CAMPAIGNS", + 32: "CANNOT_SET_BIDS_ON_CRITERION_TYPE_IN_SEARCH_CAMPAIGNS", + 33: "CANNOT_ADD_URLS_TO_CRITERION_TYPE_FOR_CAMPAIGN_TYPE", + 96: "INVALID_CUSTOM_AFFINITY", + 97: "INVALID_CUSTOM_INTENT", + 34: "INVALID_IP_ADDRESS", + 35: "INVALID_IP_FORMAT", + 36: "INVALID_MOBILE_APP", + 37: "INVALID_MOBILE_APP_CATEGORY", + 38: "INVALID_CRITERION_ID", + 39: "CANNOT_TARGET_CRITERION", + 40: "CANNOT_TARGET_OBSOLETE_CRITERION", + 41: "CRITERION_ID_AND_TYPE_MISMATCH", + 42: "INVALID_PROXIMITY_RADIUS", + 43: "INVALID_PROXIMITY_RADIUS_UNITS", + 44: "INVALID_STREETADDRESS_LENGTH", + 45: "INVALID_CITYNAME_LENGTH", + 46: "INVALID_REGIONCODE_LENGTH", + 47: "INVALID_REGIONNAME_LENGTH", + 48: "INVALID_POSTALCODE_LENGTH", + 49: "INVALID_COUNTRY_CODE", + 50: "INVALID_LATITUDE", + 51: "INVALID_LONGITUDE", + 52: "PROXIMITY_GEOPOINT_AND_ADDRESS_BOTH_CANNOT_BE_NULL", + 53: "INVALID_PROXIMITY_ADDRESS", + 54: "INVALID_USER_DOMAIN_NAME", + 55: "CRITERION_PARAMETER_TOO_LONG", + 56: "AD_SCHEDULE_TIME_INTERVALS_OVERLAP", + 57: "AD_SCHEDULE_INTERVAL_CANNOT_SPAN_MULTIPLE_DAYS", + 58: "AD_SCHEDULE_INVALID_TIME_INTERVAL", + 59: "AD_SCHEDULE_EXCEEDED_INTERVALS_PER_DAY_LIMIT", + 60: "AD_SCHEDULE_CRITERION_ID_MISMATCHING_FIELDS", + 61: "CANNOT_BID_MODIFY_CRITERION_TYPE", + 62: "CANNOT_BID_MODIFY_CRITERION_CAMPAIGN_OPTED_OUT", + 63: "CANNOT_BID_MODIFY_NEGATIVE_CRITERION", + 64: "BID_MODIFIER_ALREADY_EXISTS", + 65: "FEED_ID_NOT_ALLOWED", + 66: "ACCOUNT_INELIGIBLE_FOR_CRITERIA_TYPE", + 67: "CRITERIA_TYPE_INVALID_FOR_BIDDING_STRATEGY", + 68: "CANNOT_EXCLUDE_CRITERION", + 69: "CANNOT_REMOVE_CRITERION", + 70: "PRODUCT_SCOPE_TOO_LONG", + 71: "PRODUCT_SCOPE_TOO_MANY_DIMENSIONS", + 72: "PRODUCT_PARTITION_TOO_LONG", + 73: "PRODUCT_PARTITION_TOO_MANY_DIMENSIONS", + 74: "INVALID_PRODUCT_DIMENSION", + 75: "INVALID_PRODUCT_DIMENSION_TYPE", + 76: "INVALID_PRODUCT_BIDDING_CATEGORY", + 77: "MISSING_SHOPPING_SETTING", + 78: "INVALID_MATCHING_FUNCTION", + 79: "LOCATION_FILTER_NOT_ALLOWED", + 98: "INVALID_FEED_FOR_LOCATION_FILTER", + 80: "LOCATION_FILTER_INVALID", + 81: "CANNOT_ATTACH_CRITERIA_AT_CAMPAIGN_AND_ADGROUP", + 82: "HOTEL_LENGTH_OF_STAY_OVERLAPS_WITH_EXISTING_CRITERION", + 83: "HOTEL_ADVANCE_BOOKING_WINDOW_OVERLAPS_WITH_EXISTING_CRITERION", + 84: "FIELD_INCOMPATIBLE_WITH_NEGATIVE_TARGETING", + 85: "INVALID_WEBPAGE_CONDITION", + 86: "INVALID_WEBPAGE_CONDITION_URL", + 87: "WEBPAGE_CONDITION_URL_CANNOT_BE_EMPTY", + 88: "WEBPAGE_CONDITION_URL_UNSUPPORTED_PROTOCOL", + 89: "WEBPAGE_CONDITION_URL_CANNOT_BE_IP_ADDRESS", + 90: "WEBPAGE_CONDITION_URL_DOMAIN_NOT_CONSISTENT_WITH_CAMPAIGN_SETTING", + 91: "WEBPAGE_CONDITION_URL_CANNOT_BE_PUBLIC_SUFFIX", + 92: "WEBPAGE_CONDITION_URL_INVALID_PUBLIC_SUFFIX", + 93: "WEBPAGE_CONDITION_URL_VALUE_TRACK_VALUE_NOT_SUPPORTED", + 94: "WEBPAGE_CRITERION_URL_EQUALS_CAN_HAVE_ONLY_ONE_CONDITION", + 95: "WEBPAGE_CRITERION_NOT_SUPPORTED_ON_NON_DSA_AD_GROUP", +} +var CriterionErrorEnum_CriterionError_value = map[string]int32{ + "UNSPECIFIED": 0, + "UNKNOWN": 1, + "CONCRETE_TYPE_REQUIRED": 2, + "INVALID_EXCLUDED_CATEGORY": 3, + "INVALID_KEYWORD_TEXT": 4, + "KEYWORD_TEXT_TOO_LONG": 5, + "KEYWORD_HAS_TOO_MANY_WORDS": 6, + "KEYWORD_HAS_INVALID_CHARS": 7, + "INVALID_PLACEMENT_URL": 8, + "INVALID_USER_LIST": 9, + "INVALID_USER_INTEREST": 10, + "INVALID_FORMAT_FOR_PLACEMENT_URL": 11, + "PLACEMENT_URL_IS_TOO_LONG": 12, + "PLACEMENT_URL_HAS_ILLEGAL_CHAR": 13, + "PLACEMENT_URL_HAS_MULTIPLE_SITES_IN_LINE": 14, + "PLACEMENT_IS_NOT_AVAILABLE_FOR_TARGETING_OR_EXCLUSION": 15, + "INVALID_TOPIC_PATH": 16, + "INVALID_YOUTUBE_CHANNEL_ID": 17, + "INVALID_YOUTUBE_VIDEO_ID": 18, + "YOUTUBE_VERTICAL_CHANNEL_DEPRECATED": 19, + "YOUTUBE_DEMOGRAPHIC_CHANNEL_DEPRECATED": 20, + "YOUTUBE_URL_UNSUPPORTED": 21, + "CANNOT_EXCLUDE_CRITERIA_TYPE": 22, + "CANNOT_ADD_CRITERIA_TYPE": 23, + "INVALID_PRODUCT_FILTER": 24, + "PRODUCT_FILTER_TOO_LONG": 25, + "CANNOT_EXCLUDE_SIMILAR_USER_LIST": 26, + "CANNOT_ADD_CLOSED_USER_LIST": 27, + "CANNOT_ADD_DISPLAY_ONLY_LISTS_TO_SEARCH_ONLY_CAMPAIGNS": 28, + "CANNOT_ADD_DISPLAY_ONLY_LISTS_TO_SEARCH_CAMPAIGNS": 29, + "CANNOT_ADD_DISPLAY_ONLY_LISTS_TO_SHOPPING_CAMPAIGNS": 30, + "CANNOT_ADD_USER_INTERESTS_TO_SEARCH_CAMPAIGNS": 31, + "CANNOT_SET_BIDS_ON_CRITERION_TYPE_IN_SEARCH_CAMPAIGNS": 32, + "CANNOT_ADD_URLS_TO_CRITERION_TYPE_FOR_CAMPAIGN_TYPE": 33, + "INVALID_CUSTOM_AFFINITY": 96, + "INVALID_CUSTOM_INTENT": 97, + "INVALID_IP_ADDRESS": 34, + "INVALID_IP_FORMAT": 35, + "INVALID_MOBILE_APP": 36, + "INVALID_MOBILE_APP_CATEGORY": 37, + "INVALID_CRITERION_ID": 38, + "CANNOT_TARGET_CRITERION": 39, + "CANNOT_TARGET_OBSOLETE_CRITERION": 40, + "CRITERION_ID_AND_TYPE_MISMATCH": 41, + "INVALID_PROXIMITY_RADIUS": 42, + "INVALID_PROXIMITY_RADIUS_UNITS": 43, + "INVALID_STREETADDRESS_LENGTH": 44, + "INVALID_CITYNAME_LENGTH": 45, + "INVALID_REGIONCODE_LENGTH": 46, + "INVALID_REGIONNAME_LENGTH": 47, + "INVALID_POSTALCODE_LENGTH": 48, + "INVALID_COUNTRY_CODE": 49, + "INVALID_LATITUDE": 50, + "INVALID_LONGITUDE": 51, + "PROXIMITY_GEOPOINT_AND_ADDRESS_BOTH_CANNOT_BE_NULL": 52, + "INVALID_PROXIMITY_ADDRESS": 53, + "INVALID_USER_DOMAIN_NAME": 54, + "CRITERION_PARAMETER_TOO_LONG": 55, + "AD_SCHEDULE_TIME_INTERVALS_OVERLAP": 56, + "AD_SCHEDULE_INTERVAL_CANNOT_SPAN_MULTIPLE_DAYS": 57, + "AD_SCHEDULE_INVALID_TIME_INTERVAL": 58, + "AD_SCHEDULE_EXCEEDED_INTERVALS_PER_DAY_LIMIT": 59, + "AD_SCHEDULE_CRITERION_ID_MISMATCHING_FIELDS": 60, + "CANNOT_BID_MODIFY_CRITERION_TYPE": 61, + "CANNOT_BID_MODIFY_CRITERION_CAMPAIGN_OPTED_OUT": 62, + "CANNOT_BID_MODIFY_NEGATIVE_CRITERION": 63, + "BID_MODIFIER_ALREADY_EXISTS": 64, + "FEED_ID_NOT_ALLOWED": 65, + "ACCOUNT_INELIGIBLE_FOR_CRITERIA_TYPE": 66, + "CRITERIA_TYPE_INVALID_FOR_BIDDING_STRATEGY": 67, + "CANNOT_EXCLUDE_CRITERION": 68, + "CANNOT_REMOVE_CRITERION": 69, + "PRODUCT_SCOPE_TOO_LONG": 70, + "PRODUCT_SCOPE_TOO_MANY_DIMENSIONS": 71, + "PRODUCT_PARTITION_TOO_LONG": 72, + "PRODUCT_PARTITION_TOO_MANY_DIMENSIONS": 73, + "INVALID_PRODUCT_DIMENSION": 74, + "INVALID_PRODUCT_DIMENSION_TYPE": 75, + "INVALID_PRODUCT_BIDDING_CATEGORY": 76, + "MISSING_SHOPPING_SETTING": 77, + "INVALID_MATCHING_FUNCTION": 78, + "LOCATION_FILTER_NOT_ALLOWED": 79, + "INVALID_FEED_FOR_LOCATION_FILTER": 98, + "LOCATION_FILTER_INVALID": 80, + "CANNOT_ATTACH_CRITERIA_AT_CAMPAIGN_AND_ADGROUP": 81, + "HOTEL_LENGTH_OF_STAY_OVERLAPS_WITH_EXISTING_CRITERION": 82, + "HOTEL_ADVANCE_BOOKING_WINDOW_OVERLAPS_WITH_EXISTING_CRITERION": 83, + "FIELD_INCOMPATIBLE_WITH_NEGATIVE_TARGETING": 84, + "INVALID_WEBPAGE_CONDITION": 85, + "INVALID_WEBPAGE_CONDITION_URL": 86, + "WEBPAGE_CONDITION_URL_CANNOT_BE_EMPTY": 87, + "WEBPAGE_CONDITION_URL_UNSUPPORTED_PROTOCOL": 88, + "WEBPAGE_CONDITION_URL_CANNOT_BE_IP_ADDRESS": 89, + "WEBPAGE_CONDITION_URL_DOMAIN_NOT_CONSISTENT_WITH_CAMPAIGN_SETTING": 90, + "WEBPAGE_CONDITION_URL_CANNOT_BE_PUBLIC_SUFFIX": 91, + "WEBPAGE_CONDITION_URL_INVALID_PUBLIC_SUFFIX": 92, + "WEBPAGE_CONDITION_URL_VALUE_TRACK_VALUE_NOT_SUPPORTED": 93, + "WEBPAGE_CRITERION_URL_EQUALS_CAN_HAVE_ONLY_ONE_CONDITION": 94, + "WEBPAGE_CRITERION_NOT_SUPPORTED_ON_NON_DSA_AD_GROUP": 95, +} + +func (x CriterionErrorEnum_CriterionError) String() string { + return proto.EnumName(CriterionErrorEnum_CriterionError_name, int32(x)) +} +func (CriterionErrorEnum_CriterionError) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_criterion_error_640614236324c79e, []int{0, 0} +} + +// Container for enum describing possible criterion errors. +type CriterionErrorEnum struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *CriterionErrorEnum) Reset() { *m = CriterionErrorEnum{} } +func (m *CriterionErrorEnum) String() string { return proto.CompactTextString(m) } +func (*CriterionErrorEnum) ProtoMessage() {} +func (*CriterionErrorEnum) Descriptor() ([]byte, []int) { + return fileDescriptor_criterion_error_640614236324c79e, []int{0} +} +func (m *CriterionErrorEnum) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_CriterionErrorEnum.Unmarshal(m, b) +} +func (m *CriterionErrorEnum) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_CriterionErrorEnum.Marshal(b, m, deterministic) +} +func (dst *CriterionErrorEnum) XXX_Merge(src proto.Message) { + xxx_messageInfo_CriterionErrorEnum.Merge(dst, src) +} +func (m *CriterionErrorEnum) XXX_Size() int { + return xxx_messageInfo_CriterionErrorEnum.Size(m) +} +func (m *CriterionErrorEnum) XXX_DiscardUnknown() { + xxx_messageInfo_CriterionErrorEnum.DiscardUnknown(m) +} + +var xxx_messageInfo_CriterionErrorEnum proto.InternalMessageInfo + +func init() { + proto.RegisterType((*CriterionErrorEnum)(nil), "google.ads.googleads.v1.errors.CriterionErrorEnum") + proto.RegisterEnum("google.ads.googleads.v1.errors.CriterionErrorEnum_CriterionError", CriterionErrorEnum_CriterionError_name, CriterionErrorEnum_CriterionError_value) +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/errors/criterion_error.proto", fileDescriptor_criterion_error_640614236324c79e) +} + +var fileDescriptor_criterion_error_640614236324c79e = []byte{ + // 1695 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x8c, 0x57, 0xef, 0x72, 0x1b, 0xb7, + 0x11, 0xaf, 0x9d, 0x36, 0x69, 0xe1, 0x36, 0x41, 0xe0, 0xff, 0xff, 0x14, 0x5b, 0xb1, 0x1d, 0xc7, + 0x71, 0xa8, 0xc8, 0x8e, 0x9d, 0x84, 0x49, 0xda, 0x82, 0x87, 0x25, 0x89, 0x0a, 0x07, 0xc0, 0x00, + 0x8e, 0x12, 0x53, 0xb5, 0xa8, 0x12, 0x79, 0x34, 0x9a, 0x49, 0x44, 0x8f, 0xe8, 0xe6, 0x3d, 0xfa, + 0x0a, 0xfd, 0xd4, 0xe9, 0xa3, 0xf4, 0x51, 0xfa, 0xa1, 0xcf, 0xd0, 0xd9, 0x3b, 0xe2, 0x88, 0xa3, + 0xe4, 0x38, 0x9f, 0xc8, 0xd9, 0xdd, 0x1f, 0xb0, 0x8b, 0xfd, 0xed, 0x9f, 0x23, 0x9f, 0x1e, 0xcc, + 0x66, 0x07, 0xdf, 0x3f, 0xdf, 0xd8, 0xdb, 0x9f, 0x6f, 0x34, 0x7f, 0xf1, 0xdf, 0x8f, 0x9b, 0x1b, + 0xcf, 0x8f, 0x8f, 0x67, 0xc7, 0xf3, 0x8d, 0xef, 0x8e, 0x0f, 0x5f, 0x3e, 0x3f, 0x3e, 0x9c, 0x1d, + 0xc5, 0x5a, 0xd0, 0x7b, 0x71, 0x3c, 0x7b, 0x39, 0x63, 0x6b, 0x8d, 0x69, 0x6f, 0x6f, 0x7f, 0xde, + 0x6b, 0x51, 0xbd, 0x1f, 0x37, 0x7b, 0x0d, 0xea, 0xda, 0x8d, 0x74, 0xea, 0x8b, 0xc3, 0x8d, 0xbd, + 0xa3, 0xa3, 0xd9, 0xcb, 0xbd, 0x97, 0x87, 0xb3, 0xa3, 0x79, 0x83, 0x5e, 0xff, 0xd7, 0x4d, 0xc2, + 0x8a, 0x74, 0x2e, 0x20, 0x02, 0x8e, 0xfe, 0xfe, 0xc3, 0xfa, 0x3f, 0x6e, 0x92, 0xb7, 0xbb, 0x62, + 0xf6, 0x0e, 0x39, 0x57, 0x69, 0x6f, 0xa1, 0x90, 0x43, 0x09, 0x82, 0xfe, 0x82, 0x9d, 0x23, 0x6f, + 0x55, 0x7a, 0x4b, 0x9b, 0x6d, 0x4d, 0xcf, 0xb0, 0x6b, 0xe4, 0x52, 0x61, 0x74, 0xe1, 0x20, 0x40, + 0x0c, 0x53, 0x0b, 0xd1, 0xc1, 0xb3, 0x4a, 0x3a, 0x10, 0xf4, 0x2c, 0xbb, 0x49, 0xae, 0x4a, 0x3d, + 0xe1, 0x4a, 0x8a, 0x08, 0x3b, 0x85, 0xaa, 0x04, 0x88, 0x58, 0xf0, 0x00, 0x23, 0xe3, 0xa6, 0xf4, + 0x0d, 0x76, 0x85, 0x5c, 0x48, 0xea, 0x2d, 0x98, 0x6e, 0x1b, 0x27, 0x62, 0x80, 0x9d, 0x40, 0x7f, + 0xc9, 0xae, 0x92, 0x8b, 0xb9, 0x24, 0x06, 0x63, 0xa2, 0x32, 0x7a, 0x44, 0x7f, 0xc5, 0xd6, 0xc8, + 0xb5, 0xa4, 0x1a, 0x73, 0x5f, 0x6b, 0x4a, 0xae, 0xa7, 0x11, 0x25, 0x9e, 0xbe, 0x89, 0x77, 0xe6, + 0xfa, 0x74, 0x41, 0x31, 0xe6, 0xce, 0xd3, 0xb7, 0xf0, 0xe4, 0x24, 0xb2, 0x8a, 0x17, 0x50, 0x82, + 0x0e, 0xb1, 0x72, 0x8a, 0xfe, 0x9a, 0x5d, 0x24, 0xef, 0x26, 0x55, 0xe5, 0xc1, 0x45, 0x25, 0x7d, + 0xa0, 0xbf, 0xc9, 0x11, 0xb5, 0x58, 0xea, 0x00, 0x0e, 0x7c, 0xa0, 0x84, 0xdd, 0x21, 0xb7, 0x92, + 0x6a, 0x68, 0x5c, 0xc9, 0x03, 0xfe, 0xac, 0x9c, 0x7b, 0x0e, 0x3d, 0xea, 0x88, 0xa2, 0xf4, 0xcb, + 0x80, 0x7e, 0xcb, 0xd6, 0xc9, 0x5a, 0x57, 0x5d, 0xbb, 0xad, 0x14, 0x8c, 0xb8, 0xaa, 0xdd, 0xa6, + 0xbf, 0x63, 0x0f, 0xc9, 0xfd, 0x93, 0x36, 0x65, 0xa5, 0x82, 0xb4, 0x0a, 0xa2, 0x97, 0x01, 0x30, + 0xd2, 0xa8, 0xa4, 0x06, 0xfa, 0x36, 0xfb, 0x82, 0x3c, 0x59, 0x5a, 0x4b, 0x1f, 0xb5, 0x09, 0x91, + 0x4f, 0xb8, 0x54, 0x7c, 0xa0, 0xa0, 0x76, 0x31, 0x70, 0x37, 0x82, 0x20, 0xf5, 0x28, 0x1a, 0xd7, + 0xa4, 0xc6, 0x4b, 0xa3, 0xe9, 0x3b, 0xec, 0x12, 0x61, 0x29, 0xa2, 0x60, 0xac, 0x2c, 0xa2, 0xe5, + 0x61, 0x4c, 0x29, 0xbe, 0x7a, 0x92, 0x4f, 0x4d, 0x15, 0xaa, 0x01, 0xa0, 0x6b, 0x5a, 0x83, 0x8a, + 0x52, 0xd0, 0x77, 0xd9, 0x0d, 0x72, 0x65, 0x55, 0x3f, 0x91, 0x02, 0x0c, 0x6a, 0x19, 0xfb, 0x80, + 0xbc, 0xdf, 0x4a, 0xc1, 0x05, 0x59, 0x34, 0x91, 0xd5, 0x70, 0x01, 0xd6, 0x01, 0x92, 0x42, 0xd0, + 0xf3, 0xec, 0x01, 0xb9, 0x97, 0x0c, 0x05, 0x94, 0x66, 0xe4, 0xb8, 0x1d, 0xcb, 0xe2, 0x34, 0xdb, + 0x0b, 0xec, 0x3a, 0xb9, 0x9c, 0x6c, 0xf1, 0x45, 0x2a, 0xed, 0x2b, 0x6b, 0x8d, 0x43, 0xe5, 0x45, + 0x76, 0x8b, 0xdc, 0x28, 0xb8, 0xc6, 0xc0, 0x17, 0xc4, 0x8b, 0x85, 0x93, 0x01, 0x9c, 0xe4, 0x35, + 0x49, 0xe9, 0x25, 0xf4, 0x78, 0x61, 0xc1, 0x85, 0x58, 0xd1, 0x5e, 0x46, 0x56, 0xb7, 0x34, 0x71, + 0x46, 0x54, 0x45, 0x88, 0x43, 0xa9, 0x02, 0x38, 0x7a, 0x05, 0x2f, 0xee, 0xca, 0x96, 0xd9, 0xbc, + 0x8a, 0x94, 0x58, 0xb9, 0xd8, 0xcb, 0x52, 0x2a, 0xee, 0x32, 0x4e, 0x5d, 0x63, 0xef, 0x91, 0xeb, + 0xf9, 0xe5, 0xca, 0x78, 0xc8, 0x49, 0x77, 0x9d, 0xf5, 0xc9, 0xd3, 0xcc, 0x40, 0x48, 0x6f, 0x15, + 0x9f, 0x46, 0xa3, 0xd5, 0xb4, 0xb6, 0x40, 0x02, 0x45, 0x0f, 0xdc, 0x15, 0xe3, 0x46, 0x58, 0xf0, + 0xd2, 0x72, 0x39, 0xd2, 0x9e, 0xde, 0x60, 0x4f, 0xc8, 0xe6, 0xcf, 0xc5, 0x2e, 0x61, 0x37, 0xd9, + 0x67, 0xe4, 0xf1, 0xeb, 0x61, 0x63, 0x63, 0x2d, 0xd2, 0x66, 0x09, 0x5c, 0x63, 0x9b, 0xe4, 0xe3, + 0x0c, 0xd8, 0xa9, 0x91, 0xd3, 0xef, 0x7a, 0x0f, 0x19, 0xba, 0x80, 0x78, 0x08, 0x71, 0x20, 0x85, + 0x8f, 0x46, 0xa7, 0x24, 0x18, 0xdd, 0x34, 0x12, 0xa9, 0x4f, 0x42, 0x6f, 0xad, 0xb8, 0x59, 0x39, + 0x55, 0xdf, 0xb1, 0x02, 0x45, 0x8e, 0x27, 0x50, 0x93, 0xd2, 0xdb, 0x98, 0xb6, 0xb6, 0x19, 0x54, + 0x3e, 0x98, 0x32, 0xf2, 0xe1, 0x50, 0x6a, 0x19, 0xa6, 0xf4, 0x6f, 0x79, 0x91, 0x2f, 0x94, 0x18, + 0x82, 0x0e, 0x74, 0x2f, 0x2f, 0x09, 0x69, 0xf1, 0x52, 0x07, 0xde, 0xd3, 0xf5, 0xbc, 0x5d, 0x48, + 0xbb, 0xa8, 0x7f, 0xfa, 0x7e, 0x6e, 0x5e, 0x9a, 0x81, 0x54, 0x10, 0xb9, 0xb5, 0xf4, 0x0e, 0xa6, + 0xfc, 0xa4, 0x7c, 0xd9, 0x0d, 0xef, 0xe6, 0xdd, 0x70, 0x19, 0x8d, 0x14, 0xf4, 0x1e, 0x7a, 0xbe, + 0x08, 0xb9, 0xa9, 0xdb, 0xa5, 0x9e, 0x7e, 0x90, 0x11, 0x6e, 0xa1, 0x34, 0x03, 0x6f, 0x14, 0x76, + 0xe3, 0xa5, 0xd5, 0x7d, 0x6c, 0x32, 0xf9, 0xa1, 0x91, 0x6b, 0xd1, 0xbc, 0x54, 0x29, 0x7d, 0xc9, + 0x43, 0x31, 0xa6, 0x1f, 0xe6, 0x35, 0x6c, 0x9d, 0xd9, 0x91, 0xa5, 0x0c, 0xd3, 0xe8, 0xb8, 0x90, + 0x95, 0xa7, 0x0f, 0xf0, 0x84, 0x57, 0x69, 0x63, 0xa5, 0x65, 0xf0, 0xf4, 0x23, 0xac, 0xba, 0x64, + 0xe3, 0x83, 0x03, 0x08, 0x8b, 0xd7, 0x8a, 0x0a, 0xf4, 0x28, 0x8c, 0xe9, 0xc3, 0x4e, 0x12, 0x64, + 0x98, 0x6a, 0x5e, 0x42, 0x52, 0x7e, 0x9c, 0x8f, 0x0b, 0x07, 0x23, 0x69, 0x74, 0x61, 0x44, 0xab, + 0xee, 0x9d, 0x54, 0xe7, 0xe8, 0x8d, 0x5c, 0x6d, 0x8d, 0x0f, 0x5c, 0xe5, 0xe8, 0x4f, 0x3a, 0xcf, + 0x6b, 0x2a, 0x1d, 0xdc, 0x34, 0xa2, 0x01, 0xdd, 0x64, 0x17, 0x08, 0x4d, 0x1a, 0xc5, 0x83, 0x0c, + 0x95, 0x00, 0xfa, 0x28, 0x4f, 0x2f, 0x96, 0x76, 0x23, 0x7e, 0xcc, 0x9e, 0x92, 0x47, 0xcb, 0xf0, + 0x47, 0x60, 0xac, 0x91, 0x3a, 0xd4, 0xcf, 0x99, 0x62, 0x1d, 0x98, 0x80, 0x8c, 0xad, 0x53, 0x32, + 0x80, 0xa8, 0x2b, 0xa5, 0xe8, 0xa7, 0x1d, 0xef, 0x5a, 0x7c, 0x22, 0xd3, 0x93, 0xfc, 0xed, 0xeb, + 0x02, 0x12, 0xa6, 0xe4, 0x52, 0x47, 0x8c, 0x90, 0x3e, 0xad, 0xbb, 0x59, 0x9b, 0x3d, 0xcb, 0x1d, + 0x2f, 0xa1, 0xd3, 0x76, 0x3e, 0x63, 0xf7, 0xc8, 0x3a, 0x17, 0xd1, 0x17, 0x63, 0x10, 0x95, 0x82, + 0x18, 0x64, 0x09, 0x4d, 0x11, 0x4e, 0xb8, 0xf2, 0xd1, 0x4c, 0xc0, 0x29, 0x6e, 0xe9, 0xe7, 0xec, + 0x11, 0xe9, 0xe5, 0x76, 0xc9, 0x24, 0xf9, 0xeb, 0x2d, 0xd7, 0xcb, 0xb9, 0x22, 0xf8, 0xd4, 0xd3, + 0x2f, 0xd8, 0x5d, 0x72, 0xbb, 0x8b, 0x59, 0xcc, 0x87, 0xfc, 0x0e, 0xda, 0x67, 0x9f, 0x90, 0x87, + 0xb9, 0x19, 0xec, 0x14, 0x00, 0x38, 0xf0, 0x97, 0x6e, 0x58, 0x0c, 0x8c, 0x63, 0x3b, 0x29, 0x65, + 0xa0, 0x5f, 0xb2, 0x0d, 0xf2, 0x51, 0x8e, 0xe8, 0x10, 0x34, 0xf1, 0x12, 0x9b, 0xcd, 0x50, 0x82, + 0x12, 0x9e, 0x7e, 0x95, 0x71, 0x7d, 0x50, 0x97, 0x91, 0x90, 0xc3, 0xe9, 0x4a, 0xe9, 0xd3, 0xaf, + 0x31, 0xc6, 0x9f, 0xb2, 0x6a, 0xfb, 0x82, 0xb1, 0x01, 0x44, 0x34, 0x55, 0xa0, 0xbf, 0x67, 0xf7, + 0xc9, 0x9d, 0x93, 0x18, 0x0d, 0x23, 0x1e, 0xe4, 0x24, 0xaf, 0xa4, 0x3f, 0x60, 0x1d, 0xb7, 0x26, + 0x12, 0x5c, 0xe4, 0xca, 0x01, 0x17, 0xd3, 0x08, 0x3b, 0xd8, 0x24, 0xe9, 0x1f, 0xd9, 0x65, 0x72, + 0x7e, 0x08, 0x18, 0xb7, 0x68, 0x06, 0xaf, 0x52, 0x66, 0x1b, 0x04, 0xe5, 0x78, 0x07, 0x2f, 0x6a, + 0xee, 0x45, 0xa9, 0x41, 0xc9, 0x91, 0x4c, 0xe3, 0xb8, 0x3b, 0x7d, 0x06, 0xac, 0x47, 0x1e, 0x74, + 0x44, 0x31, 0xdb, 0x32, 0xd0, 0x41, 0x81, 0xaf, 0xe2, 0x83, 0xc3, 0xe6, 0x31, 0xa5, 0x45, 0x36, + 0xcb, 0x56, 0xa6, 0x9d, 0xd1, 0x54, 0x64, 0xed, 0xc3, 0x41, 0x69, 0x3a, 0xe1, 0x00, 0x0e, 0xba, + 0x34, 0xcc, 0x7c, 0x61, 0x2c, 0x2c, 0x49, 0x35, 0xc4, 0xc4, 0x9f, 0xd4, 0xd5, 0xcb, 0x96, 0x90, + 0x25, 0x68, 0x5c, 0x19, 0x3c, 0x1d, 0xe1, 0x6e, 0x90, 0xcc, 0x2c, 0x77, 0x41, 0x86, 0x3a, 0x17, + 0xe9, 0x98, 0x31, 0xfb, 0x90, 0xdc, 0x3d, 0x5d, 0xbf, 0x7a, 0x94, 0x5c, 0xa9, 0x92, 0x1a, 0xd2, + 0xea, 0xe9, 0x9f, 0x56, 0x7a, 0x50, 0x57, 0xdd, 0xbc, 0xdd, 0x56, 0xbe, 0x93, 0x25, 0x9b, 0xf4, + 0x62, 0x6d, 0xb3, 0x55, 0xf8, 0x62, 0xa5, 0xf4, 0xbe, 0x7e, 0xc7, 0x34, 0xd3, 0x3c, 0x04, 0x5c, + 0x89, 0x68, 0x99, 0xbb, 0xb1, 0x24, 0x61, 0xa5, 0x0b, 0xf4, 0x9c, 0x6a, 0xa4, 0x80, 0x32, 0x05, + 0xaf, 0xe3, 0x58, 0x6c, 0x00, 0x79, 0xa6, 0x4d, 0x67, 0x2f, 0x44, 0x2a, 0x60, 0xda, 0x56, 0x10, + 0xf4, 0x5b, 0xcc, 0xcb, 0xea, 0x31, 0x0b, 0x14, 0xb5, 0x19, 0x89, 0x79, 0x08, 0x1c, 0x67, 0x60, + 0x22, 0x04, 0x0f, 0x4b, 0x0a, 0x37, 0x7d, 0x67, 0xe4, 0x4c, 0x65, 0xe9, 0x33, 0x9c, 0xaa, 0x63, + 0x13, 0x40, 0x2d, 0x9a, 0x5e, 0x34, 0xc3, 0xe8, 0x03, 0xce, 0xf0, 0xa6, 0xfc, 0x7d, 0xdc, 0x96, + 0x61, 0xdc, 0x10, 0xb5, 0x7e, 0x8e, 0x96, 0x06, 0x8e, 0x71, 0xf2, 0x75, 0x03, 0xe5, 0x62, 0xc2, + 0x75, 0x01, 0x71, 0x60, 0xcc, 0x16, 0x1a, 0x6d, 0x4b, 0x2d, 0xcc, 0xf6, 0xeb, 0x8f, 0xf0, 0x48, + 0xda, 0xba, 0x50, 0xa3, 0xd4, 0x85, 0x29, 0x2d, 0x0f, 0x35, 0xbd, 0x6b, 0xfb, 0xb6, 0x90, 0xda, + 0xbd, 0x93, 0x86, 0xfc, 0x91, 0xb7, 0x61, 0x60, 0xf9, 0x08, 0x62, 0x61, 0xb4, 0xa8, 0xe9, 0x41, + 0x2b, 0x76, 0x9b, 0xdc, 0x7c, 0xa5, 0xba, 0x5e, 0xac, 0x27, 0x48, 0xac, 0x53, 0x55, 0x59, 0xf7, + 0x85, 0xd2, 0x86, 0x29, 0xdd, 0x46, 0xe7, 0x4e, 0x37, 0xcd, 0xd6, 0x46, 0xe4, 0x4b, 0x30, 0x85, + 0x51, 0x74, 0xe7, 0xd5, 0xf6, 0xcb, 0xa3, 0xb3, 0x65, 0x60, 0xca, 0x80, 0xf0, 0xd3, 0xed, 0x53, + 0x23, 0x37, 0x01, 0x15, 0x5e, 0x7a, 0xdc, 0x2a, 0x9a, 0x57, 0x69, 0xb3, 0x98, 0x88, 0xf7, 0x0d, + 0xae, 0x52, 0xaf, 0xbb, 0xd6, 0x56, 0x03, 0x25, 0x8b, 0xe8, 0xab, 0xe1, 0x50, 0xee, 0xd0, 0x3f, + 0x63, 0x13, 0x3d, 0x1d, 0xd2, 0x56, 0x41, 0x07, 0xb0, 0x8b, 0x2c, 0x39, 0x1d, 0x30, 0xe1, 0xaa, + 0x82, 0x18, 0x1c, 0x2f, 0xb6, 0x16, 0xff, 0xeb, 0x91, 0xd0, 0x6e, 0xd5, 0x7f, 0x61, 0x5f, 0x91, + 0xcf, 0x5b, 0x68, 0xdb, 0x4f, 0x11, 0x0a, 0xcf, 0x2a, 0x6c, 0xf0, 0x05, 0xd7, 0x71, 0xcc, 0x27, + 0xd0, 0x6c, 0x8d, 0x46, 0xe7, 0x19, 0xfd, 0x2b, 0x6e, 0x6e, 0x27, 0xd1, 0x9d, 0x2b, 0x62, 0x2d, + 0xd0, 0x51, 0x78, 0x1e, 0xb9, 0x88, 0x0d, 0xaf, 0xe3, 0xe0, 0x7f, 0x67, 0xc8, 0xfa, 0x77, 0xb3, + 0x1f, 0x7a, 0x3f, 0xfd, 0xbd, 0x3b, 0x38, 0xdf, 0xfd, 0x6e, 0xb5, 0xf8, 0x99, 0x6b, 0xcf, 0x7c, + 0x23, 0x16, 0xb0, 0x83, 0xd9, 0xf7, 0x7b, 0x47, 0x07, 0xbd, 0xd9, 0xf1, 0xc1, 0xc6, 0xc1, 0xf3, + 0xa3, 0xfa, 0x23, 0x38, 0x7d, 0x6c, 0xbf, 0x38, 0x9c, 0xbf, 0xea, 0xdb, 0xfb, 0xcb, 0xe6, 0xe7, + 0x9f, 0x67, 0xdf, 0x18, 0x71, 0xfe, 0xef, 0xb3, 0x6b, 0xa3, 0xe6, 0x30, 0xbe, 0x3f, 0xef, 0x35, + 0x7f, 0xf1, 0xdf, 0x64, 0xb3, 0x57, 0x5f, 0x39, 0xff, 0x4f, 0x32, 0xd8, 0xe5, 0xfb, 0xf3, 0xdd, + 0xd6, 0x60, 0x77, 0xb2, 0xb9, 0xdb, 0x18, 0xfc, 0xf7, 0xec, 0x7a, 0x23, 0xed, 0xf7, 0xf9, 0xfe, + 0xbc, 0xdf, 0x6f, 0x4d, 0xfa, 0xfd, 0xc9, 0x66, 0xbf, 0xdf, 0x18, 0x7d, 0xfb, 0x66, 0xed, 0xdd, + 0xe3, 0xff, 0x07, 0x00, 0x00, 0xff, 0xff, 0xe2, 0x41, 0x47, 0xff, 0x18, 0x10, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/errors/custom_interest_error.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/errors/custom_interest_error.pb.go new file mode 100644 index 0000000..8fb3084 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/errors/custom_interest_error.pb.go @@ -0,0 +1,147 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/errors/custom_interest_error.proto + +package errors // import "google.golang.org/genproto/googleapis/ads/googleads/v1/errors" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Enum describing possible custom interest errors. +type CustomInterestErrorEnum_CustomInterestError int32 + +const ( + // Enum unspecified. + CustomInterestErrorEnum_UNSPECIFIED CustomInterestErrorEnum_CustomInterestError = 0 + // The received error code is not known in this version. + CustomInterestErrorEnum_UNKNOWN CustomInterestErrorEnum_CustomInterestError = 1 + // Duplicate custom interest name ignoring case. + CustomInterestErrorEnum_NAME_ALREADY_USED CustomInterestErrorEnum_CustomInterestError = 2 + // In the remove custom interest member operation, both member ID and + // pair [type, parameter] are not present. + CustomInterestErrorEnum_CUSTOM_INTEREST_MEMBER_ID_AND_TYPE_PARAMETER_NOT_PRESENT_IN_REMOVE CustomInterestErrorEnum_CustomInterestError = 3 + // The pair of [type, parameter] does not exist. + CustomInterestErrorEnum_TYPE_AND_PARAMETER_NOT_FOUND CustomInterestErrorEnum_CustomInterestError = 4 + // The pair of [type, parameter] already exists. + CustomInterestErrorEnum_TYPE_AND_PARAMETER_ALREADY_EXISTED CustomInterestErrorEnum_CustomInterestError = 5 + // Unsupported custom interest member type. + CustomInterestErrorEnum_INVALID_CUSTOM_INTEREST_MEMBER_TYPE CustomInterestErrorEnum_CustomInterestError = 6 + // Cannot remove a custom interest while it's still being targeted. + CustomInterestErrorEnum_CANNOT_REMOVE_WHILE_IN_USE CustomInterestErrorEnum_CustomInterestError = 7 + // Cannot mutate custom interest type. + CustomInterestErrorEnum_CANNOT_CHANGE_TYPE CustomInterestErrorEnum_CustomInterestError = 8 +) + +var CustomInterestErrorEnum_CustomInterestError_name = map[int32]string{ + 0: "UNSPECIFIED", + 1: "UNKNOWN", + 2: "NAME_ALREADY_USED", + 3: "CUSTOM_INTEREST_MEMBER_ID_AND_TYPE_PARAMETER_NOT_PRESENT_IN_REMOVE", + 4: "TYPE_AND_PARAMETER_NOT_FOUND", + 5: "TYPE_AND_PARAMETER_ALREADY_EXISTED", + 6: "INVALID_CUSTOM_INTEREST_MEMBER_TYPE", + 7: "CANNOT_REMOVE_WHILE_IN_USE", + 8: "CANNOT_CHANGE_TYPE", +} +var CustomInterestErrorEnum_CustomInterestError_value = map[string]int32{ + "UNSPECIFIED": 0, + "UNKNOWN": 1, + "NAME_ALREADY_USED": 2, + "CUSTOM_INTEREST_MEMBER_ID_AND_TYPE_PARAMETER_NOT_PRESENT_IN_REMOVE": 3, + "TYPE_AND_PARAMETER_NOT_FOUND": 4, + "TYPE_AND_PARAMETER_ALREADY_EXISTED": 5, + "INVALID_CUSTOM_INTEREST_MEMBER_TYPE": 6, + "CANNOT_REMOVE_WHILE_IN_USE": 7, + "CANNOT_CHANGE_TYPE": 8, +} + +func (x CustomInterestErrorEnum_CustomInterestError) String() string { + return proto.EnumName(CustomInterestErrorEnum_CustomInterestError_name, int32(x)) +} +func (CustomInterestErrorEnum_CustomInterestError) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_custom_interest_error_c7ecd8e8f5b7a014, []int{0, 0} +} + +// Container for enum describing possible custom interest errors. +type CustomInterestErrorEnum struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *CustomInterestErrorEnum) Reset() { *m = CustomInterestErrorEnum{} } +func (m *CustomInterestErrorEnum) String() string { return proto.CompactTextString(m) } +func (*CustomInterestErrorEnum) ProtoMessage() {} +func (*CustomInterestErrorEnum) Descriptor() ([]byte, []int) { + return fileDescriptor_custom_interest_error_c7ecd8e8f5b7a014, []int{0} +} +func (m *CustomInterestErrorEnum) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_CustomInterestErrorEnum.Unmarshal(m, b) +} +func (m *CustomInterestErrorEnum) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_CustomInterestErrorEnum.Marshal(b, m, deterministic) +} +func (dst *CustomInterestErrorEnum) XXX_Merge(src proto.Message) { + xxx_messageInfo_CustomInterestErrorEnum.Merge(dst, src) +} +func (m *CustomInterestErrorEnum) XXX_Size() int { + return xxx_messageInfo_CustomInterestErrorEnum.Size(m) +} +func (m *CustomInterestErrorEnum) XXX_DiscardUnknown() { + xxx_messageInfo_CustomInterestErrorEnum.DiscardUnknown(m) +} + +var xxx_messageInfo_CustomInterestErrorEnum proto.InternalMessageInfo + +func init() { + proto.RegisterType((*CustomInterestErrorEnum)(nil), "google.ads.googleads.v1.errors.CustomInterestErrorEnum") + proto.RegisterEnum("google.ads.googleads.v1.errors.CustomInterestErrorEnum_CustomInterestError", CustomInterestErrorEnum_CustomInterestError_name, CustomInterestErrorEnum_CustomInterestError_value) +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/errors/custom_interest_error.proto", fileDescriptor_custom_interest_error_c7ecd8e8f5b7a014) +} + +var fileDescriptor_custom_interest_error_c7ecd8e8f5b7a014 = []byte{ + // 448 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x7c, 0x92, 0xdf, 0x6a, 0xdb, 0x30, + 0x18, 0xc5, 0x17, 0x77, 0x6b, 0x87, 0x7a, 0x31, 0x4f, 0x63, 0x7f, 0x28, 0x25, 0x8c, 0x0c, 0xb6, + 0x3b, 0x1b, 0xb3, 0x3b, 0xed, 0x4a, 0xb1, 0xbe, 0xa4, 0x62, 0xb1, 0x6c, 0xfc, 0xaf, 0xeb, 0x08, + 0x08, 0xaf, 0x36, 0x26, 0xd0, 0x58, 0xc1, 0x72, 0xfb, 0x40, 0xbb, 0xdc, 0x3b, 0xec, 0x05, 0xf6, + 0x06, 0x7b, 0x85, 0xdd, 0xee, 0x05, 0x86, 0xad, 0x24, 0x30, 0x48, 0x7b, 0xe5, 0x83, 0x74, 0x7e, + 0x47, 0x07, 0x7f, 0x1f, 0x22, 0xb5, 0x52, 0xf5, 0x4d, 0xe5, 0x16, 0xa5, 0x76, 0x8d, 0xec, 0xd5, + 0x9d, 0xe7, 0x56, 0x6d, 0xab, 0x5a, 0xed, 0x5e, 0xdf, 0xea, 0x4e, 0xad, 0xe5, 0xaa, 0xe9, 0xaa, + 0xb6, 0xd2, 0x9d, 0x1c, 0x8e, 0x9d, 0x4d, 0xab, 0x3a, 0x85, 0xc7, 0x06, 0x70, 0x8a, 0x52, 0x3b, + 0x7b, 0xd6, 0xb9, 0xf3, 0x1c, 0xc3, 0x9e, 0x9d, 0xef, 0xb2, 0x37, 0x2b, 0xb7, 0x68, 0x1a, 0xd5, + 0x15, 0xdd, 0x4a, 0x35, 0xda, 0xd0, 0x93, 0xdf, 0x16, 0x7a, 0xed, 0x0f, 0xe9, 0x7c, 0x1b, 0x0e, + 0x3d, 0x06, 0xcd, 0xed, 0x7a, 0xf2, 0xd3, 0x42, 0x2f, 0x0e, 0xdc, 0xe1, 0x67, 0xe8, 0x34, 0x13, + 0x49, 0x04, 0x3e, 0x9f, 0x71, 0x60, 0xf6, 0x23, 0x7c, 0x8a, 0x4e, 0x32, 0xf1, 0x59, 0x84, 0x97, + 0xc2, 0x1e, 0xe1, 0x97, 0xe8, 0xb9, 0xa0, 0x01, 0x48, 0xba, 0x88, 0x81, 0xb2, 0x2b, 0x99, 0x25, + 0xc0, 0x6c, 0x0b, 0xcf, 0xd0, 0xd4, 0xcf, 0x92, 0x34, 0x0c, 0x24, 0x17, 0x29, 0xc4, 0x90, 0xa4, + 0x32, 0x80, 0x60, 0x0a, 0xb1, 0xe4, 0x4c, 0x52, 0xc1, 0x64, 0x7a, 0x15, 0x81, 0x8c, 0x68, 0x4c, + 0x03, 0x48, 0x21, 0x96, 0x22, 0x4c, 0x65, 0x14, 0x43, 0x02, 0x22, 0x95, 0x5c, 0xc8, 0x18, 0x82, + 0x30, 0x07, 0xfb, 0x08, 0xbf, 0x45, 0xe7, 0x83, 0xb3, 0x47, 0xfe, 0x77, 0xcf, 0xc2, 0x4c, 0x30, + 0xfb, 0x31, 0x7e, 0x8f, 0x26, 0x07, 0x1c, 0xbb, 0x3a, 0xf0, 0x85, 0x27, 0x29, 0x30, 0xfb, 0x09, + 0xfe, 0x80, 0xde, 0x71, 0x91, 0xd3, 0x05, 0x67, 0xf2, 0x9e, 0x66, 0x7d, 0x8c, 0x7d, 0x8c, 0xc7, + 0xe8, 0xcc, 0xa7, 0xa2, 0x7f, 0xc2, 0xb4, 0x90, 0x97, 0x17, 0x7c, 0x01, 0x7d, 0xab, 0x2c, 0x01, + 0xfb, 0x04, 0xbf, 0x42, 0x78, 0x7b, 0xef, 0x5f, 0x50, 0x31, 0x07, 0xc3, 0x3d, 0x9d, 0xfe, 0x1d, + 0xa1, 0xc9, 0xb5, 0x5a, 0x3b, 0x0f, 0x0f, 0x68, 0xfa, 0xe6, 0xc0, 0x3f, 0x8e, 0xfa, 0xe1, 0x44, + 0xa3, 0xaf, 0x6c, 0xcb, 0xd6, 0xea, 0xa6, 0x68, 0x6a, 0x47, 0xb5, 0xb5, 0x5b, 0x57, 0xcd, 0x30, + 0xba, 0xdd, 0xa2, 0x6c, 0x56, 0xfa, 0xbe, 0xbd, 0xf9, 0x64, 0x3e, 0xdf, 0xad, 0xa3, 0x39, 0xa5, + 0x3f, 0xac, 0xf1, 0xdc, 0x84, 0xd1, 0x52, 0x3b, 0x46, 0xf6, 0x2a, 0xf7, 0x9c, 0xe1, 0x49, 0xfd, + 0x6b, 0x67, 0x58, 0xd2, 0x52, 0x2f, 0xf7, 0x86, 0x65, 0xee, 0x2d, 0x8d, 0xe1, 0x8f, 0x35, 0x31, + 0xa7, 0x84, 0xd0, 0x52, 0x13, 0xb2, 0xb7, 0x10, 0x92, 0x7b, 0x84, 0x18, 0xd3, 0xb7, 0xe3, 0xa1, + 0xdd, 0xc7, 0x7f, 0x01, 0x00, 0x00, 0xff, 0xff, 0xac, 0x31, 0xb2, 0xd6, 0xd4, 0x02, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/errors/customer_client_link_error.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/errors/customer_client_link_error.pb.go new file mode 100644 index 0000000..c83428a --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/errors/customer_client_link_error.pb.go @@ -0,0 +1,147 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/errors/customer_client_link_error.proto + +package errors // import "google.golang.org/genproto/googleapis/ads/googleads/v1/errors" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Enum describing possible CustomerClientLink errors. +type CustomerClientLinkErrorEnum_CustomerClientLinkError int32 + +const ( + // Enum unspecified. + CustomerClientLinkErrorEnum_UNSPECIFIED CustomerClientLinkErrorEnum_CustomerClientLinkError = 0 + // The received error code is not known in this version. + CustomerClientLinkErrorEnum_UNKNOWN CustomerClientLinkErrorEnum_CustomerClientLinkError = 1 + // Trying to manage a client that already in being managed by customer. + CustomerClientLinkErrorEnum_CLIENT_ALREADY_INVITED_BY_THIS_MANAGER CustomerClientLinkErrorEnum_CustomerClientLinkError = 2 + // Already managed by some other manager in the hierarchy. + CustomerClientLinkErrorEnum_CLIENT_ALREADY_MANAGED_IN_HIERARCHY CustomerClientLinkErrorEnum_CustomerClientLinkError = 3 + // Attempt to create a cycle in the hierarchy. + CustomerClientLinkErrorEnum_CYCLIC_LINK_NOT_ALLOWED CustomerClientLinkErrorEnum_CustomerClientLinkError = 4 + // Managed accounts has the maximum number of linked accounts. + CustomerClientLinkErrorEnum_CUSTOMER_HAS_TOO_MANY_ACCOUNTS CustomerClientLinkErrorEnum_CustomerClientLinkError = 5 + // Invitor has the maximum pending invitations. + CustomerClientLinkErrorEnum_CLIENT_HAS_TOO_MANY_INVITATIONS CustomerClientLinkErrorEnum_CustomerClientLinkError = 6 + // Attempt to change hidden status of a link that is not active. + CustomerClientLinkErrorEnum_CANNOT_HIDE_OR_UNHIDE_MANAGER_ACCOUNTS CustomerClientLinkErrorEnum_CustomerClientLinkError = 7 + // Parent manager account has the maximum number of linked accounts. + CustomerClientLinkErrorEnum_CUSTOMER_HAS_TOO_MANY_ACCOUNTS_AT_MANAGER CustomerClientLinkErrorEnum_CustomerClientLinkError = 8 +) + +var CustomerClientLinkErrorEnum_CustomerClientLinkError_name = map[int32]string{ + 0: "UNSPECIFIED", + 1: "UNKNOWN", + 2: "CLIENT_ALREADY_INVITED_BY_THIS_MANAGER", + 3: "CLIENT_ALREADY_MANAGED_IN_HIERARCHY", + 4: "CYCLIC_LINK_NOT_ALLOWED", + 5: "CUSTOMER_HAS_TOO_MANY_ACCOUNTS", + 6: "CLIENT_HAS_TOO_MANY_INVITATIONS", + 7: "CANNOT_HIDE_OR_UNHIDE_MANAGER_ACCOUNTS", + 8: "CUSTOMER_HAS_TOO_MANY_ACCOUNTS_AT_MANAGER", +} +var CustomerClientLinkErrorEnum_CustomerClientLinkError_value = map[string]int32{ + "UNSPECIFIED": 0, + "UNKNOWN": 1, + "CLIENT_ALREADY_INVITED_BY_THIS_MANAGER": 2, + "CLIENT_ALREADY_MANAGED_IN_HIERARCHY": 3, + "CYCLIC_LINK_NOT_ALLOWED": 4, + "CUSTOMER_HAS_TOO_MANY_ACCOUNTS": 5, + "CLIENT_HAS_TOO_MANY_INVITATIONS": 6, + "CANNOT_HIDE_OR_UNHIDE_MANAGER_ACCOUNTS": 7, + "CUSTOMER_HAS_TOO_MANY_ACCOUNTS_AT_MANAGER": 8, +} + +func (x CustomerClientLinkErrorEnum_CustomerClientLinkError) String() string { + return proto.EnumName(CustomerClientLinkErrorEnum_CustomerClientLinkError_name, int32(x)) +} +func (CustomerClientLinkErrorEnum_CustomerClientLinkError) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_customer_client_link_error_c03bfa9f5c874866, []int{0, 0} +} + +// Container for enum describing possible CustomeClientLink errors. +type CustomerClientLinkErrorEnum struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *CustomerClientLinkErrorEnum) Reset() { *m = CustomerClientLinkErrorEnum{} } +func (m *CustomerClientLinkErrorEnum) String() string { return proto.CompactTextString(m) } +func (*CustomerClientLinkErrorEnum) ProtoMessage() {} +func (*CustomerClientLinkErrorEnum) Descriptor() ([]byte, []int) { + return fileDescriptor_customer_client_link_error_c03bfa9f5c874866, []int{0} +} +func (m *CustomerClientLinkErrorEnum) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_CustomerClientLinkErrorEnum.Unmarshal(m, b) +} +func (m *CustomerClientLinkErrorEnum) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_CustomerClientLinkErrorEnum.Marshal(b, m, deterministic) +} +func (dst *CustomerClientLinkErrorEnum) XXX_Merge(src proto.Message) { + xxx_messageInfo_CustomerClientLinkErrorEnum.Merge(dst, src) +} +func (m *CustomerClientLinkErrorEnum) XXX_Size() int { + return xxx_messageInfo_CustomerClientLinkErrorEnum.Size(m) +} +func (m *CustomerClientLinkErrorEnum) XXX_DiscardUnknown() { + xxx_messageInfo_CustomerClientLinkErrorEnum.DiscardUnknown(m) +} + +var xxx_messageInfo_CustomerClientLinkErrorEnum proto.InternalMessageInfo + +func init() { + proto.RegisterType((*CustomerClientLinkErrorEnum)(nil), "google.ads.googleads.v1.errors.CustomerClientLinkErrorEnum") + proto.RegisterEnum("google.ads.googleads.v1.errors.CustomerClientLinkErrorEnum_CustomerClientLinkError", CustomerClientLinkErrorEnum_CustomerClientLinkError_name, CustomerClientLinkErrorEnum_CustomerClientLinkError_value) +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/errors/customer_client_link_error.proto", fileDescriptor_customer_client_link_error_c03bfa9f5c874866) +} + +var fileDescriptor_customer_client_link_error_c03bfa9f5c874866 = []byte{ + // 453 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x7c, 0x52, 0x4f, 0x6b, 0xd4, 0x40, + 0x14, 0x77, 0x53, 0x6d, 0x65, 0x7a, 0x30, 0xe4, 0x52, 0xb0, 0x65, 0x85, 0x14, 0x14, 0x05, 0x13, + 0x82, 0xb7, 0x78, 0x90, 0xd9, 0xc9, 0xb8, 0x19, 0x9a, 0x4e, 0x96, 0xfc, 0xd9, 0x12, 0x59, 0x78, + 0xc4, 0x4d, 0x08, 0xa1, 0xbb, 0x99, 0x25, 0x93, 0xf6, 0x03, 0x79, 0xf4, 0xa3, 0x08, 0x7e, 0x11, + 0xc1, 0xb3, 0x57, 0x49, 0x66, 0x77, 0xc5, 0xc2, 0xf6, 0x94, 0x1f, 0x79, 0xbf, 0x7f, 0xc9, 0x7b, + 0xe8, 0x53, 0x25, 0x44, 0xb5, 0x2a, 0xed, 0xbc, 0x90, 0xb6, 0x82, 0x3d, 0xba, 0x77, 0xec, 0xb2, + 0x6d, 0x45, 0x2b, 0xed, 0xe5, 0x9d, 0xec, 0xc4, 0xba, 0x6c, 0x61, 0xb9, 0xaa, 0xcb, 0xa6, 0x83, + 0x55, 0xdd, 0xdc, 0xc2, 0x30, 0xb3, 0x36, 0xad, 0xe8, 0x84, 0x31, 0x56, 0x2a, 0x2b, 0x2f, 0xa4, + 0xb5, 0x37, 0xb0, 0xee, 0x1d, 0x4b, 0x19, 0xbc, 0xbc, 0xd8, 0x05, 0x6c, 0x6a, 0x3b, 0x6f, 0x1a, + 0xd1, 0xe5, 0x5d, 0x2d, 0x1a, 0xa9, 0xd4, 0xe6, 0x6f, 0x0d, 0x9d, 0x93, 0x6d, 0x04, 0x19, 0x12, + 0x82, 0xba, 0xb9, 0xa5, 0xbd, 0x94, 0x36, 0x77, 0x6b, 0xf3, 0xa7, 0x86, 0xce, 0x0e, 0xcc, 0x8d, + 0x17, 0xe8, 0x34, 0xe5, 0xf1, 0x8c, 0x12, 0xf6, 0x99, 0x51, 0x4f, 0x7f, 0x62, 0x9c, 0xa2, 0x93, + 0x94, 0x5f, 0xf1, 0xf0, 0x86, 0xeb, 0x23, 0xe3, 0x1d, 0x7a, 0x4d, 0x02, 0x46, 0x79, 0x02, 0x38, + 0x88, 0x28, 0xf6, 0x32, 0x60, 0x7c, 0xce, 0x12, 0xea, 0xc1, 0x24, 0x83, 0xc4, 0x67, 0x31, 0x5c, + 0x63, 0x8e, 0xa7, 0x34, 0xd2, 0x35, 0xe3, 0x0d, 0xba, 0x7c, 0xc0, 0x55, 0x33, 0x0f, 0x18, 0x07, + 0x9f, 0xd1, 0x08, 0x47, 0xc4, 0xcf, 0xf4, 0x23, 0xe3, 0x1c, 0x9d, 0x91, 0x8c, 0x04, 0x8c, 0x40, + 0xc0, 0xf8, 0x15, 0xf0, 0xb0, 0x57, 0x04, 0xe1, 0x0d, 0xf5, 0xf4, 0xa7, 0x86, 0x89, 0xc6, 0x24, + 0x8d, 0x93, 0xf0, 0x9a, 0x46, 0xe0, 0xe3, 0x18, 0x92, 0x30, 0xec, 0x7d, 0x32, 0xc0, 0x84, 0x84, + 0x29, 0x4f, 0x62, 0xfd, 0x99, 0x71, 0x89, 0x5e, 0x6d, 0x93, 0xfe, 0x63, 0x0c, 0xd5, 0x70, 0xc2, + 0x42, 0x1e, 0xeb, 0xc7, 0x43, 0x75, 0xcc, 0x7b, 0x73, 0x9f, 0x79, 0x14, 0xc2, 0x08, 0x52, 0x3e, + 0xa0, 0x6d, 0xe3, 0x7f, 0x86, 0x27, 0xc6, 0x7b, 0xf4, 0xf6, 0xf1, 0x50, 0xc0, 0xc9, 0xfe, 0x4b, + 0x9f, 0x4f, 0xfe, 0x8c, 0x90, 0xb9, 0x14, 0x6b, 0xeb, 0xf1, 0xa5, 0x4d, 0x2e, 0x0e, 0xfc, 0xf3, + 0x59, 0xbf, 0xb4, 0xd9, 0xe8, 0x8b, 0xb7, 0xd5, 0x57, 0x62, 0x95, 0x37, 0x95, 0x25, 0xda, 0xca, + 0xae, 0xca, 0x66, 0x58, 0xe9, 0xee, 0x8a, 0x36, 0xb5, 0x3c, 0x74, 0x54, 0x1f, 0xd5, 0xe3, 0x9b, + 0x76, 0x34, 0xc5, 0xf8, 0xbb, 0x36, 0x9e, 0x2a, 0x33, 0x5c, 0x48, 0x4b, 0xc1, 0x1e, 0xcd, 0x1d, + 0x6b, 0x88, 0x94, 0x3f, 0x76, 0x84, 0x05, 0x2e, 0xe4, 0x62, 0x4f, 0x58, 0xcc, 0x9d, 0x85, 0x22, + 0xfc, 0xd2, 0x4c, 0xf5, 0xd6, 0x75, 0x71, 0x21, 0x5d, 0x77, 0x4f, 0x71, 0xdd, 0xb9, 0xe3, 0xba, + 0x8a, 0xf4, 0xf5, 0x78, 0x68, 0xf7, 0xe1, 0x6f, 0x00, 0x00, 0x00, 0xff, 0xff, 0xe9, 0x0f, 0x8b, + 0x6a, 0xf1, 0x02, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/errors/customer_error.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/errors/customer_error.pb.go new file mode 100644 index 0000000..23693cf --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/errors/customer_error.pb.go @@ -0,0 +1,122 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/errors/customer_error.proto + +package errors // import "google.golang.org/genproto/googleapis/ads/googleads/v1/errors" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Set of errors that are related to requests dealing with Customer. +// Next id: 26 +type CustomerErrorEnum_CustomerError int32 + +const ( + // Enum unspecified. + CustomerErrorEnum_UNSPECIFIED CustomerErrorEnum_CustomerError = 0 + // The received error code is not known in this version. + CustomerErrorEnum_UNKNOWN CustomerErrorEnum_CustomerError = 1 + // Customer status is not allowed to be changed from DRAFT and CLOSED. + // Currency code and at least one of country code and time zone needs to be + // set when status is changed to ENABLED. + CustomerErrorEnum_STATUS_CHANGE_DISALLOWED CustomerErrorEnum_CustomerError = 2 + // CustomerService cannot get a customer that has not been fully set up. + CustomerErrorEnum_ACCOUNT_NOT_SET_UP CustomerErrorEnum_CustomerError = 3 +) + +var CustomerErrorEnum_CustomerError_name = map[int32]string{ + 0: "UNSPECIFIED", + 1: "UNKNOWN", + 2: "STATUS_CHANGE_DISALLOWED", + 3: "ACCOUNT_NOT_SET_UP", +} +var CustomerErrorEnum_CustomerError_value = map[string]int32{ + "UNSPECIFIED": 0, + "UNKNOWN": 1, + "STATUS_CHANGE_DISALLOWED": 2, + "ACCOUNT_NOT_SET_UP": 3, +} + +func (x CustomerErrorEnum_CustomerError) String() string { + return proto.EnumName(CustomerErrorEnum_CustomerError_name, int32(x)) +} +func (CustomerErrorEnum_CustomerError) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_customer_error_6cd97071f19e5e21, []int{0, 0} +} + +// Container for enum describing possible customer errors. +type CustomerErrorEnum struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *CustomerErrorEnum) Reset() { *m = CustomerErrorEnum{} } +func (m *CustomerErrorEnum) String() string { return proto.CompactTextString(m) } +func (*CustomerErrorEnum) ProtoMessage() {} +func (*CustomerErrorEnum) Descriptor() ([]byte, []int) { + return fileDescriptor_customer_error_6cd97071f19e5e21, []int{0} +} +func (m *CustomerErrorEnum) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_CustomerErrorEnum.Unmarshal(m, b) +} +func (m *CustomerErrorEnum) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_CustomerErrorEnum.Marshal(b, m, deterministic) +} +func (dst *CustomerErrorEnum) XXX_Merge(src proto.Message) { + xxx_messageInfo_CustomerErrorEnum.Merge(dst, src) +} +func (m *CustomerErrorEnum) XXX_Size() int { + return xxx_messageInfo_CustomerErrorEnum.Size(m) +} +func (m *CustomerErrorEnum) XXX_DiscardUnknown() { + xxx_messageInfo_CustomerErrorEnum.DiscardUnknown(m) +} + +var xxx_messageInfo_CustomerErrorEnum proto.InternalMessageInfo + +func init() { + proto.RegisterType((*CustomerErrorEnum)(nil), "google.ads.googleads.v1.errors.CustomerErrorEnum") + proto.RegisterEnum("google.ads.googleads.v1.errors.CustomerErrorEnum_CustomerError", CustomerErrorEnum_CustomerError_name, CustomerErrorEnum_CustomerError_value) +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/errors/customer_error.proto", fileDescriptor_customer_error_6cd97071f19e5e21) +} + +var fileDescriptor_customer_error_6cd97071f19e5e21 = []byte{ + // 321 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x7c, 0x90, 0xc1, 0x4a, 0xc3, 0x30, + 0x1c, 0xc6, 0x5d, 0x07, 0x0a, 0x19, 0x62, 0xcd, 0x41, 0x44, 0xc6, 0x0e, 0x7d, 0x80, 0x94, 0xb2, + 0x5b, 0x3c, 0x65, 0x6d, 0x9c, 0xc3, 0x91, 0x16, 0xda, 0x6e, 0x20, 0x85, 0x52, 0xd7, 0x12, 0x06, + 0x5b, 0x32, 0x92, 0x6e, 0xf8, 0x3c, 0x1e, 0x7d, 0x14, 0x1f, 0x45, 0xf0, 0x1d, 0xa4, 0xcd, 0x56, + 0xd8, 0x41, 0x4f, 0xf9, 0xf2, 0xe7, 0xf7, 0x7d, 0xf9, 0xf2, 0x07, 0x63, 0x2e, 0x25, 0xdf, 0x54, + 0x6e, 0x51, 0x6a, 0xd7, 0xc8, 0x46, 0x1d, 0x3c, 0xb7, 0x52, 0x4a, 0x2a, 0xed, 0xae, 0xf6, 0xba, + 0x96, 0xdb, 0x4a, 0xe5, 0xed, 0x1d, 0xed, 0x94, 0xac, 0x25, 0x1c, 0x19, 0x12, 0x15, 0xa5, 0x46, + 0x9d, 0x09, 0x1d, 0x3c, 0x64, 0x4c, 0x0f, 0xc3, 0x53, 0xe8, 0x6e, 0xed, 0x16, 0x42, 0xc8, 0xba, + 0xa8, 0xd7, 0x52, 0x68, 0xe3, 0x76, 0xde, 0xc1, 0xad, 0x7f, 0x4c, 0xa5, 0x0d, 0x4f, 0xc5, 0x7e, + 0xeb, 0xac, 0xc0, 0xf5, 0xd9, 0x10, 0xde, 0x80, 0x41, 0xca, 0xe2, 0x88, 0xfa, 0xb3, 0xa7, 0x19, + 0x0d, 0xec, 0x0b, 0x38, 0x00, 0x57, 0x29, 0x7b, 0x61, 0xe1, 0x92, 0xd9, 0x3d, 0x38, 0x04, 0xf7, + 0x71, 0x42, 0x92, 0x34, 0xce, 0xfd, 0x67, 0xc2, 0xa6, 0x34, 0x0f, 0x66, 0x31, 0x99, 0xcf, 0xc3, + 0x25, 0x0d, 0x6c, 0x0b, 0xde, 0x01, 0x48, 0x7c, 0x3f, 0x4c, 0x59, 0x92, 0xb3, 0x30, 0xc9, 0x63, + 0x9a, 0xe4, 0x69, 0x64, 0xf7, 0x27, 0x3f, 0x3d, 0xe0, 0xac, 0xe4, 0x16, 0xfd, 0x5f, 0x7f, 0x02, + 0xcf, 0x9a, 0x44, 0x4d, 0xe9, 0xa8, 0xf7, 0x1a, 0x1c, 0x5d, 0x5c, 0x6e, 0x0a, 0xc1, 0x91, 0x54, + 0xdc, 0xe5, 0x95, 0x68, 0xbf, 0x74, 0xda, 0xdc, 0x6e, 0xad, 0xff, 0x5a, 0xe4, 0xa3, 0x39, 0x3e, + 0xac, 0xfe, 0x94, 0x90, 0x4f, 0x6b, 0x34, 0x35, 0x61, 0xa4, 0xd4, 0xc8, 0xc8, 0x46, 0x2d, 0x3c, + 0xd4, 0x3e, 0xa9, 0xbf, 0x4e, 0x40, 0x46, 0x4a, 0x9d, 0x75, 0x40, 0xb6, 0xf0, 0x32, 0x03, 0x7c, + 0x5b, 0x8e, 0x99, 0x62, 0x4c, 0x4a, 0x8d, 0x71, 0x87, 0x60, 0xbc, 0xf0, 0x30, 0x36, 0xd0, 0xdb, + 0x65, 0xdb, 0x6e, 0xfc, 0x1b, 0x00, 0x00, 0xff, 0xff, 0x03, 0x84, 0xf7, 0x6d, 0xe5, 0x01, 0x00, + 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/errors/customer_feed_error.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/errors/customer_feed_error.pb.go new file mode 100644 index 0000000..34cdef1 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/errors/customer_feed_error.pb.go @@ -0,0 +1,147 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/errors/customer_feed_error.proto + +package errors // import "google.golang.org/genproto/googleapis/ads/googleads/v1/errors" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Enum describing possible customer feed errors. +type CustomerFeedErrorEnum_CustomerFeedError int32 + +const ( + // Enum unspecified. + CustomerFeedErrorEnum_UNSPECIFIED CustomerFeedErrorEnum_CustomerFeedError = 0 + // The received error code is not known in this version. + CustomerFeedErrorEnum_UNKNOWN CustomerFeedErrorEnum_CustomerFeedError = 1 + // An active feed already exists for this customer and place holder type. + CustomerFeedErrorEnum_FEED_ALREADY_EXISTS_FOR_PLACEHOLDER_TYPE CustomerFeedErrorEnum_CustomerFeedError = 2 + // The specified feed is removed. + CustomerFeedErrorEnum_CANNOT_CREATE_FOR_REMOVED_FEED CustomerFeedErrorEnum_CustomerFeedError = 3 + // The CustomerFeed already exists. Update should be used to modify the + // existing CustomerFeed. + CustomerFeedErrorEnum_CANNOT_CREATE_ALREADY_EXISTING_CUSTOMER_FEED CustomerFeedErrorEnum_CustomerFeedError = 4 + // Cannot update removed customer feed. + CustomerFeedErrorEnum_CANNOT_MODIFY_REMOVED_CUSTOMER_FEED CustomerFeedErrorEnum_CustomerFeedError = 5 + // Invalid placeholder type. + CustomerFeedErrorEnum_INVALID_PLACEHOLDER_TYPE CustomerFeedErrorEnum_CustomerFeedError = 6 + // Feed mapping for this placeholder type does not exist. + CustomerFeedErrorEnum_MISSING_FEEDMAPPING_FOR_PLACEHOLDER_TYPE CustomerFeedErrorEnum_CustomerFeedError = 7 + // Placeholder not allowed at the account level. + CustomerFeedErrorEnum_PLACEHOLDER_TYPE_NOT_ALLOWED_ON_CUSTOMER_FEED CustomerFeedErrorEnum_CustomerFeedError = 8 +) + +var CustomerFeedErrorEnum_CustomerFeedError_name = map[int32]string{ + 0: "UNSPECIFIED", + 1: "UNKNOWN", + 2: "FEED_ALREADY_EXISTS_FOR_PLACEHOLDER_TYPE", + 3: "CANNOT_CREATE_FOR_REMOVED_FEED", + 4: "CANNOT_CREATE_ALREADY_EXISTING_CUSTOMER_FEED", + 5: "CANNOT_MODIFY_REMOVED_CUSTOMER_FEED", + 6: "INVALID_PLACEHOLDER_TYPE", + 7: "MISSING_FEEDMAPPING_FOR_PLACEHOLDER_TYPE", + 8: "PLACEHOLDER_TYPE_NOT_ALLOWED_ON_CUSTOMER_FEED", +} +var CustomerFeedErrorEnum_CustomerFeedError_value = map[string]int32{ + "UNSPECIFIED": 0, + "UNKNOWN": 1, + "FEED_ALREADY_EXISTS_FOR_PLACEHOLDER_TYPE": 2, + "CANNOT_CREATE_FOR_REMOVED_FEED": 3, + "CANNOT_CREATE_ALREADY_EXISTING_CUSTOMER_FEED": 4, + "CANNOT_MODIFY_REMOVED_CUSTOMER_FEED": 5, + "INVALID_PLACEHOLDER_TYPE": 6, + "MISSING_FEEDMAPPING_FOR_PLACEHOLDER_TYPE": 7, + "PLACEHOLDER_TYPE_NOT_ALLOWED_ON_CUSTOMER_FEED": 8, +} + +func (x CustomerFeedErrorEnum_CustomerFeedError) String() string { + return proto.EnumName(CustomerFeedErrorEnum_CustomerFeedError_name, int32(x)) +} +func (CustomerFeedErrorEnum_CustomerFeedError) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_customer_feed_error_b47213809494701d, []int{0, 0} +} + +// Container for enum describing possible customer feed errors. +type CustomerFeedErrorEnum struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *CustomerFeedErrorEnum) Reset() { *m = CustomerFeedErrorEnum{} } +func (m *CustomerFeedErrorEnum) String() string { return proto.CompactTextString(m) } +func (*CustomerFeedErrorEnum) ProtoMessage() {} +func (*CustomerFeedErrorEnum) Descriptor() ([]byte, []int) { + return fileDescriptor_customer_feed_error_b47213809494701d, []int{0} +} +func (m *CustomerFeedErrorEnum) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_CustomerFeedErrorEnum.Unmarshal(m, b) +} +func (m *CustomerFeedErrorEnum) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_CustomerFeedErrorEnum.Marshal(b, m, deterministic) +} +func (dst *CustomerFeedErrorEnum) XXX_Merge(src proto.Message) { + xxx_messageInfo_CustomerFeedErrorEnum.Merge(dst, src) +} +func (m *CustomerFeedErrorEnum) XXX_Size() int { + return xxx_messageInfo_CustomerFeedErrorEnum.Size(m) +} +func (m *CustomerFeedErrorEnum) XXX_DiscardUnknown() { + xxx_messageInfo_CustomerFeedErrorEnum.DiscardUnknown(m) +} + +var xxx_messageInfo_CustomerFeedErrorEnum proto.InternalMessageInfo + +func init() { + proto.RegisterType((*CustomerFeedErrorEnum)(nil), "google.ads.googleads.v1.errors.CustomerFeedErrorEnum") + proto.RegisterEnum("google.ads.googleads.v1.errors.CustomerFeedErrorEnum_CustomerFeedError", CustomerFeedErrorEnum_CustomerFeedError_name, CustomerFeedErrorEnum_CustomerFeedError_value) +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/errors/customer_feed_error.proto", fileDescriptor_customer_feed_error_b47213809494701d) +} + +var fileDescriptor_customer_feed_error_b47213809494701d = []byte{ + // 443 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x7c, 0x92, 0x41, 0x6f, 0xd3, 0x30, + 0x14, 0xc7, 0x69, 0x06, 0x1b, 0xf2, 0x0e, 0x14, 0x4b, 0x20, 0x84, 0xa6, 0x1e, 0xc2, 0x01, 0x0e, + 0xc3, 0x21, 0xe2, 0x82, 0xc2, 0xc9, 0x8b, 0x9d, 0x12, 0x91, 0xd8, 0x51, 0x92, 0x66, 0x14, 0x55, + 0xb2, 0xc2, 0x12, 0xa2, 0x4a, 0x6b, 0x5c, 0xc5, 0xdd, 0x3e, 0x10, 0x47, 0x3e, 0x0a, 0xdf, 0x81, + 0x2f, 0xc0, 0x8d, 0x13, 0x57, 0xe4, 0xb8, 0xad, 0xd4, 0x16, 0x76, 0xca, 0x5f, 0x2f, 0xbf, 0xff, + 0xff, 0x3d, 0xeb, 0x3d, 0xf0, 0xae, 0x91, 0xb2, 0xb9, 0xae, 0x9d, 0xb2, 0x52, 0x8e, 0x91, 0x5a, + 0xdd, 0xba, 0x4e, 0xdd, 0x75, 0xb2, 0x53, 0xce, 0xd5, 0x8d, 0x5a, 0xc9, 0x45, 0xdd, 0x89, 0xaf, + 0x75, 0x5d, 0x89, 0xbe, 0x88, 0x96, 0x9d, 0x5c, 0x49, 0x38, 0x32, 0x38, 0x2a, 0x2b, 0x85, 0xb6, + 0x4e, 0x74, 0xeb, 0x22, 0xe3, 0x7c, 0x7e, 0xb6, 0x49, 0x5e, 0xce, 0x9d, 0xb2, 0x6d, 0xe5, 0xaa, + 0x5c, 0xcd, 0x65, 0xab, 0x8c, 0xdb, 0xfe, 0x63, 0x81, 0x27, 0xfe, 0x3a, 0x3b, 0xa8, 0xeb, 0x8a, + 0x6a, 0x13, 0x6d, 0x6f, 0x16, 0xf6, 0x4f, 0x0b, 0x3c, 0x3e, 0xf8, 0x03, 0x1f, 0x81, 0xd3, 0x09, + 0xcb, 0x12, 0xea, 0x87, 0x41, 0x48, 0xc9, 0xf0, 0x1e, 0x3c, 0x05, 0x27, 0x13, 0xf6, 0x91, 0xf1, + 0x4b, 0x36, 0x1c, 0xc0, 0x73, 0xf0, 0x2a, 0xa0, 0x94, 0x08, 0x1c, 0xa5, 0x14, 0x93, 0xa9, 0xa0, + 0x9f, 0xc2, 0x2c, 0xcf, 0x44, 0xc0, 0x53, 0x91, 0x44, 0xd8, 0xa7, 0x1f, 0x78, 0x44, 0x68, 0x2a, + 0xf2, 0x69, 0x42, 0x87, 0x16, 0xb4, 0xc1, 0xc8, 0xc7, 0x8c, 0xf1, 0x5c, 0xf8, 0x29, 0xc5, 0x39, + 0xed, 0xb9, 0x94, 0xc6, 0xbc, 0xa0, 0x44, 0xe8, 0x9c, 0xe1, 0x11, 0x7c, 0x03, 0xce, 0x77, 0x99, + 0x9d, 0xe8, 0x90, 0x8d, 0x85, 0x3f, 0xc9, 0x72, 0x1e, 0xd3, 0xd4, 0x38, 0xee, 0xc3, 0x97, 0xe0, + 0xc5, 0xda, 0x11, 0x73, 0x12, 0x06, 0xd3, 0x6d, 0xe2, 0x2e, 0xf8, 0x00, 0x9e, 0x81, 0x67, 0x21, + 0x2b, 0x70, 0x14, 0x92, 0xc3, 0xe1, 0x8e, 0xf5, 0x53, 0xe2, 0x30, 0xcb, 0x74, 0x07, 0xcd, 0xc7, + 0x38, 0x49, 0x7a, 0xfd, 0xaf, 0xa7, 0x9c, 0x40, 0x17, 0xbc, 0xde, 0xaf, 0x0a, 0x3d, 0x02, 0x8e, + 0x22, 0x7e, 0x49, 0x89, 0xe0, 0x6c, 0xaf, 0xfd, 0xc3, 0x8b, 0xdf, 0x03, 0x60, 0x5f, 0xc9, 0x05, + 0xba, 0x7b, 0x7d, 0x17, 0x4f, 0x0f, 0x76, 0x90, 0xe8, 0xc5, 0x25, 0x83, 0xcf, 0x64, 0xed, 0x6c, + 0xe4, 0x75, 0xd9, 0x36, 0x48, 0x76, 0x8d, 0xd3, 0xd4, 0x6d, 0xbf, 0xd6, 0xcd, 0x09, 0x2d, 0xe7, + 0xea, 0x7f, 0x17, 0xf5, 0xde, 0x7c, 0xbe, 0x59, 0x47, 0x63, 0x8c, 0xbf, 0x5b, 0xa3, 0xb1, 0x09, + 0xc3, 0x95, 0x42, 0x46, 0x6a, 0x55, 0xb8, 0xa8, 0x6f, 0xa9, 0x7e, 0x6c, 0x80, 0x19, 0xae, 0xd4, + 0x6c, 0x0b, 0xcc, 0x0a, 0x77, 0x66, 0x80, 0x5f, 0x96, 0x6d, 0xaa, 0x9e, 0x87, 0x2b, 0xe5, 0x79, + 0x5b, 0xc4, 0xf3, 0x0a, 0xd7, 0xf3, 0x0c, 0xf4, 0xe5, 0xb8, 0x9f, 0xee, 0xed, 0xdf, 0x00, 0x00, + 0x00, 0xff, 0xff, 0x64, 0xe1, 0x58, 0x11, 0xee, 0x02, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/errors/customer_manager_link_error.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/errors/customer_manager_link_error.pb.go new file mode 100644 index 0000000..de46978 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/errors/customer_manager_link_error.pb.go @@ -0,0 +1,156 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/errors/customer_manager_link_error.proto + +package errors // import "google.golang.org/genproto/googleapis/ads/googleads/v1/errors" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Enum describing possible CustomerManagerLink errors. +type CustomerManagerLinkErrorEnum_CustomerManagerLinkError int32 + +const ( + // Enum unspecified. + CustomerManagerLinkErrorEnum_UNSPECIFIED CustomerManagerLinkErrorEnum_CustomerManagerLinkError = 0 + // The received error code is not known in this version. + CustomerManagerLinkErrorEnum_UNKNOWN CustomerManagerLinkErrorEnum_CustomerManagerLinkError = 1 + // No pending invitation. + CustomerManagerLinkErrorEnum_NO_PENDING_INVITE CustomerManagerLinkErrorEnum_CustomerManagerLinkError = 2 + // Attempt to operate on the same client more than once in the same call. + CustomerManagerLinkErrorEnum_SAME_CLIENT_MORE_THAN_ONCE_PER_CALL CustomerManagerLinkErrorEnum_CustomerManagerLinkError = 3 + // Manager account has the maximum number of linked accounts. + CustomerManagerLinkErrorEnum_MANAGER_HAS_MAX_NUMBER_OF_LINKED_ACCOUNTS CustomerManagerLinkErrorEnum_CustomerManagerLinkError = 4 + // If no active user on account it cannot be unlinked from its manager. + CustomerManagerLinkErrorEnum_CANNOT_UNLINK_ACCOUNT_WITHOUT_ACTIVE_USER CustomerManagerLinkErrorEnum_CustomerManagerLinkError = 5 + // Account should have at least one active owner on it before being + // unlinked. + CustomerManagerLinkErrorEnum_CANNOT_REMOVE_LAST_CLIENT_ACCOUNT_OWNER CustomerManagerLinkErrorEnum_CustomerManagerLinkError = 6 + // Only account owners may change their permission role. + CustomerManagerLinkErrorEnum_CANNOT_CHANGE_ROLE_BY_NON_ACCOUNT_OWNER CustomerManagerLinkErrorEnum_CustomerManagerLinkError = 7 + // When a client's link to its manager is not active, the link role cannot + // be changed. + CustomerManagerLinkErrorEnum_CANNOT_CHANGE_ROLE_FOR_NON_ACTIVE_LINK_ACCOUNT CustomerManagerLinkErrorEnum_CustomerManagerLinkError = 8 + // Attempt to link a child to a parent that contains or will contain + // duplicate children. + CustomerManagerLinkErrorEnum_DUPLICATE_CHILD_FOUND CustomerManagerLinkErrorEnum_CustomerManagerLinkError = 9 +) + +var CustomerManagerLinkErrorEnum_CustomerManagerLinkError_name = map[int32]string{ + 0: "UNSPECIFIED", + 1: "UNKNOWN", + 2: "NO_PENDING_INVITE", + 3: "SAME_CLIENT_MORE_THAN_ONCE_PER_CALL", + 4: "MANAGER_HAS_MAX_NUMBER_OF_LINKED_ACCOUNTS", + 5: "CANNOT_UNLINK_ACCOUNT_WITHOUT_ACTIVE_USER", + 6: "CANNOT_REMOVE_LAST_CLIENT_ACCOUNT_OWNER", + 7: "CANNOT_CHANGE_ROLE_BY_NON_ACCOUNT_OWNER", + 8: "CANNOT_CHANGE_ROLE_FOR_NON_ACTIVE_LINK_ACCOUNT", + 9: "DUPLICATE_CHILD_FOUND", +} +var CustomerManagerLinkErrorEnum_CustomerManagerLinkError_value = map[string]int32{ + "UNSPECIFIED": 0, + "UNKNOWN": 1, + "NO_PENDING_INVITE": 2, + "SAME_CLIENT_MORE_THAN_ONCE_PER_CALL": 3, + "MANAGER_HAS_MAX_NUMBER_OF_LINKED_ACCOUNTS": 4, + "CANNOT_UNLINK_ACCOUNT_WITHOUT_ACTIVE_USER": 5, + "CANNOT_REMOVE_LAST_CLIENT_ACCOUNT_OWNER": 6, + "CANNOT_CHANGE_ROLE_BY_NON_ACCOUNT_OWNER": 7, + "CANNOT_CHANGE_ROLE_FOR_NON_ACTIVE_LINK_ACCOUNT": 8, + "DUPLICATE_CHILD_FOUND": 9, +} + +func (x CustomerManagerLinkErrorEnum_CustomerManagerLinkError) String() string { + return proto.EnumName(CustomerManagerLinkErrorEnum_CustomerManagerLinkError_name, int32(x)) +} +func (CustomerManagerLinkErrorEnum_CustomerManagerLinkError) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_customer_manager_link_error_154e0a6b1d419238, []int{0, 0} +} + +// Container for enum describing possible CustomerManagerLink errors. +type CustomerManagerLinkErrorEnum struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *CustomerManagerLinkErrorEnum) Reset() { *m = CustomerManagerLinkErrorEnum{} } +func (m *CustomerManagerLinkErrorEnum) String() string { return proto.CompactTextString(m) } +func (*CustomerManagerLinkErrorEnum) ProtoMessage() {} +func (*CustomerManagerLinkErrorEnum) Descriptor() ([]byte, []int) { + return fileDescriptor_customer_manager_link_error_154e0a6b1d419238, []int{0} +} +func (m *CustomerManagerLinkErrorEnum) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_CustomerManagerLinkErrorEnum.Unmarshal(m, b) +} +func (m *CustomerManagerLinkErrorEnum) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_CustomerManagerLinkErrorEnum.Marshal(b, m, deterministic) +} +func (dst *CustomerManagerLinkErrorEnum) XXX_Merge(src proto.Message) { + xxx_messageInfo_CustomerManagerLinkErrorEnum.Merge(dst, src) +} +func (m *CustomerManagerLinkErrorEnum) XXX_Size() int { + return xxx_messageInfo_CustomerManagerLinkErrorEnum.Size(m) +} +func (m *CustomerManagerLinkErrorEnum) XXX_DiscardUnknown() { + xxx_messageInfo_CustomerManagerLinkErrorEnum.DiscardUnknown(m) +} + +var xxx_messageInfo_CustomerManagerLinkErrorEnum proto.InternalMessageInfo + +func init() { + proto.RegisterType((*CustomerManagerLinkErrorEnum)(nil), "google.ads.googleads.v1.errors.CustomerManagerLinkErrorEnum") + proto.RegisterEnum("google.ads.googleads.v1.errors.CustomerManagerLinkErrorEnum_CustomerManagerLinkError", CustomerManagerLinkErrorEnum_CustomerManagerLinkError_name, CustomerManagerLinkErrorEnum_CustomerManagerLinkError_value) +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/errors/customer_manager_link_error.proto", fileDescriptor_customer_manager_link_error_154e0a6b1d419238) +} + +var fileDescriptor_customer_manager_link_error_154e0a6b1d419238 = []byte{ + // 496 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x7c, 0x92, 0xcf, 0x6e, 0xd3, 0x40, + 0x10, 0xc6, 0x69, 0x0a, 0x2d, 0x6c, 0x0f, 0x18, 0x4b, 0x95, 0x00, 0x95, 0x1e, 0xc2, 0xa1, 0x42, + 0x08, 0x5b, 0x81, 0x9b, 0xb9, 0xb0, 0x59, 0x4f, 0x12, 0xab, 0xf6, 0xac, 0xe5, 0x7f, 0x01, 0x14, + 0x69, 0x64, 0x9a, 0xc8, 0x8a, 0x9a, 0x78, 0x23, 0x3b, 0xed, 0xf3, 0x20, 0x8e, 0x3c, 0x0a, 0x8f, + 0xc2, 0x13, 0x70, 0x03, 0x39, 0x1b, 0x47, 0x15, 0x22, 0x9c, 0x76, 0xb4, 0xf3, 0x9b, 0xef, 0xfb, + 0xa4, 0x19, 0xf6, 0xa1, 0x50, 0xaa, 0x58, 0xcc, 0xec, 0x7c, 0x5a, 0xdb, 0xba, 0x6c, 0xaa, 0xdb, + 0x9e, 0x3d, 0xab, 0x2a, 0x55, 0xd5, 0xf6, 0xd5, 0x4d, 0xbd, 0x56, 0xcb, 0x59, 0x45, 0xcb, 0xbc, + 0xcc, 0x8b, 0x59, 0x45, 0x8b, 0x79, 0x79, 0x4d, 0x9b, 0xa6, 0xb5, 0xaa, 0xd4, 0x5a, 0x99, 0xe7, + 0x7a, 0xcc, 0xca, 0xa7, 0xb5, 0xb5, 0x53, 0xb0, 0x6e, 0x7b, 0x96, 0x56, 0x78, 0x7e, 0xd6, 0x3a, + 0xac, 0xe6, 0x76, 0x5e, 0x96, 0x6a, 0x9d, 0xaf, 0xe7, 0xaa, 0xac, 0xf5, 0x74, 0xf7, 0xeb, 0x21, + 0x3b, 0x13, 0x5b, 0x8f, 0x40, 0x5b, 0xf8, 0xf3, 0xf2, 0x1a, 0x9a, 0x59, 0x28, 0x6f, 0x96, 0xdd, + 0xdf, 0x1d, 0xf6, 0x74, 0x1f, 0x60, 0x3e, 0x66, 0x27, 0x29, 0xc6, 0x21, 0x08, 0x6f, 0xe0, 0x81, + 0x6b, 0xdc, 0x33, 0x4f, 0xd8, 0x71, 0x8a, 0x97, 0x28, 0xc7, 0x68, 0x1c, 0x98, 0xa7, 0xec, 0x09, + 0x4a, 0x0a, 0x01, 0x5d, 0x0f, 0x87, 0xe4, 0x61, 0xe6, 0x25, 0x60, 0x74, 0xcc, 0x0b, 0xf6, 0x32, + 0xe6, 0x01, 0x90, 0xf0, 0x3d, 0xc0, 0x84, 0x02, 0x19, 0x01, 0x25, 0x23, 0x8e, 0x24, 0x51, 0x00, + 0x85, 0x10, 0x91, 0xe0, 0xbe, 0x6f, 0x1c, 0x9a, 0x6f, 0xd8, 0xab, 0x80, 0x23, 0x1f, 0x42, 0x44, + 0x23, 0x1e, 0x53, 0xc0, 0x3f, 0x12, 0xa6, 0x41, 0x1f, 0x22, 0x92, 0x03, 0xf2, 0x3d, 0xbc, 0x04, + 0x97, 0xb8, 0x10, 0x32, 0xc5, 0x24, 0x36, 0xee, 0x37, 0xb8, 0xe0, 0x88, 0x32, 0xa1, 0x14, 0x9b, + 0x6e, 0xdb, 0xa3, 0xb1, 0x97, 0x8c, 0x64, 0x9a, 0x10, 0x17, 0x89, 0x97, 0x01, 0xa5, 0x31, 0x44, + 0xc6, 0x03, 0xf3, 0x35, 0xbb, 0xd8, 0xe2, 0x11, 0x04, 0x32, 0x03, 0xf2, 0x79, 0x9c, 0xb4, 0xa1, + 0xda, 0x51, 0x39, 0x46, 0x88, 0x8c, 0xa3, 0x3b, 0xb0, 0x18, 0x71, 0x1c, 0x02, 0x45, 0xd2, 0x07, + 0xea, 0x7f, 0x22, 0x94, 0xf8, 0x17, 0x7c, 0x6c, 0xbe, 0x65, 0xd6, 0x3f, 0xe0, 0x81, 0x8c, 0xb6, + 0xf4, 0x26, 0xc5, 0xdd, 0x84, 0xc6, 0x43, 0xf3, 0x19, 0x3b, 0x75, 0xd3, 0xd0, 0xf7, 0x04, 0x4f, + 0x80, 0xc4, 0xc8, 0xf3, 0x5d, 0x1a, 0xc8, 0x14, 0x5d, 0xe3, 0x51, 0xff, 0xd7, 0x01, 0xeb, 0x5e, + 0xa9, 0xa5, 0xf5, 0xff, 0x3d, 0xf7, 0x5f, 0xec, 0xdb, 0x52, 0xd8, 0x2c, 0x3a, 0x3c, 0xf8, 0xec, + 0x6e, 0x05, 0x0a, 0xb5, 0xc8, 0xcb, 0xc2, 0x52, 0x55, 0x61, 0x17, 0xb3, 0x72, 0x73, 0x06, 0xed, + 0xe9, 0xad, 0xe6, 0xf5, 0xbe, 0x4b, 0x7c, 0xaf, 0x9f, 0x6f, 0x9d, 0xc3, 0x21, 0xe7, 0xdf, 0x3b, + 0xe7, 0x43, 0x2d, 0xc6, 0xa7, 0xb5, 0xa5, 0xcb, 0xa6, 0xca, 0x7a, 0xd6, 0xc6, 0xb2, 0xfe, 0xd1, + 0x02, 0x13, 0x3e, 0xad, 0x27, 0x3b, 0x60, 0x92, 0xf5, 0x26, 0x1a, 0xf8, 0xd9, 0xe9, 0xea, 0x5f, + 0xc7, 0xe1, 0xd3, 0xda, 0x71, 0x76, 0x88, 0xe3, 0x64, 0x3d, 0xc7, 0xd1, 0xd0, 0x97, 0xa3, 0x4d, + 0xba, 0x77, 0x7f, 0x02, 0x00, 0x00, 0xff, 0xff, 0xcf, 0xcb, 0x93, 0x3c, 0x26, 0x03, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/errors/database_error.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/errors/database_error.pb.go new file mode 100644 index 0000000..db62aa9 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/errors/database_error.pb.go @@ -0,0 +1,114 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/errors/database_error.proto + +package errors // import "google.golang.org/genproto/googleapis/ads/googleads/v1/errors" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Enum describing possible database errors. +type DatabaseErrorEnum_DatabaseError int32 + +const ( + // Enum unspecified. + DatabaseErrorEnum_UNSPECIFIED DatabaseErrorEnum_DatabaseError = 0 + // The received error code is not known in this version. + DatabaseErrorEnum_UNKNOWN DatabaseErrorEnum_DatabaseError = 1 + // Multiple requests were attempting to modify the same resource at once. + // Please retry the request. + DatabaseErrorEnum_CONCURRENT_MODIFICATION DatabaseErrorEnum_DatabaseError = 2 +) + +var DatabaseErrorEnum_DatabaseError_name = map[int32]string{ + 0: "UNSPECIFIED", + 1: "UNKNOWN", + 2: "CONCURRENT_MODIFICATION", +} +var DatabaseErrorEnum_DatabaseError_value = map[string]int32{ + "UNSPECIFIED": 0, + "UNKNOWN": 1, + "CONCURRENT_MODIFICATION": 2, +} + +func (x DatabaseErrorEnum_DatabaseError) String() string { + return proto.EnumName(DatabaseErrorEnum_DatabaseError_name, int32(x)) +} +func (DatabaseErrorEnum_DatabaseError) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_database_error_4830c69527554d32, []int{0, 0} +} + +// Container for enum describing possible database errors. +type DatabaseErrorEnum struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *DatabaseErrorEnum) Reset() { *m = DatabaseErrorEnum{} } +func (m *DatabaseErrorEnum) String() string { return proto.CompactTextString(m) } +func (*DatabaseErrorEnum) ProtoMessage() {} +func (*DatabaseErrorEnum) Descriptor() ([]byte, []int) { + return fileDescriptor_database_error_4830c69527554d32, []int{0} +} +func (m *DatabaseErrorEnum) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_DatabaseErrorEnum.Unmarshal(m, b) +} +func (m *DatabaseErrorEnum) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_DatabaseErrorEnum.Marshal(b, m, deterministic) +} +func (dst *DatabaseErrorEnum) XXX_Merge(src proto.Message) { + xxx_messageInfo_DatabaseErrorEnum.Merge(dst, src) +} +func (m *DatabaseErrorEnum) XXX_Size() int { + return xxx_messageInfo_DatabaseErrorEnum.Size(m) +} +func (m *DatabaseErrorEnum) XXX_DiscardUnknown() { + xxx_messageInfo_DatabaseErrorEnum.DiscardUnknown(m) +} + +var xxx_messageInfo_DatabaseErrorEnum proto.InternalMessageInfo + +func init() { + proto.RegisterType((*DatabaseErrorEnum)(nil), "google.ads.googleads.v1.errors.DatabaseErrorEnum") + proto.RegisterEnum("google.ads.googleads.v1.errors.DatabaseErrorEnum_DatabaseError", DatabaseErrorEnum_DatabaseError_name, DatabaseErrorEnum_DatabaseError_value) +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/errors/database_error.proto", fileDescriptor_database_error_4830c69527554d32) +} + +var fileDescriptor_database_error_4830c69527554d32 = []byte{ + // 300 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x7c, 0x90, 0xd1, 0x4a, 0xc3, 0x30, + 0x18, 0x85, 0x5d, 0x05, 0x85, 0x0c, 0x71, 0xf6, 0x46, 0x50, 0xd9, 0x45, 0x1f, 0x20, 0xa1, 0xec, + 0x2e, 0x5e, 0x65, 0x6d, 0x37, 0xaa, 0x98, 0x96, 0xb9, 0x56, 0x90, 0x42, 0xc9, 0x4c, 0x09, 0x85, + 0x2d, 0x29, 0x4d, 0xdd, 0x03, 0x79, 0xe9, 0xa3, 0xf8, 0x28, 0x82, 0xef, 0x20, 0x6d, 0xd6, 0xc2, + 0x2e, 0xf4, 0x2a, 0x27, 0x3f, 0xdf, 0x39, 0xff, 0xe1, 0x07, 0x33, 0xa1, 0x94, 0xd8, 0x16, 0x88, + 0x71, 0x8d, 0x8c, 0x6c, 0xd5, 0xde, 0x45, 0x45, 0x5d, 0xab, 0x5a, 0x23, 0xce, 0x1a, 0xb6, 0x61, + 0xba, 0xc8, 0xbb, 0x3f, 0xac, 0x6a, 0xd5, 0x28, 0x7b, 0x6a, 0x48, 0xc8, 0xb8, 0x86, 0x83, 0x09, + 0xee, 0x5d, 0x68, 0x4c, 0x37, 0x77, 0x7d, 0x68, 0x55, 0x22, 0x26, 0xa5, 0x6a, 0x58, 0x53, 0x2a, + 0xa9, 0x8d, 0xdb, 0xc9, 0xc1, 0x95, 0x7f, 0x48, 0x0d, 0x5a, 0x3e, 0x90, 0xef, 0x3b, 0xe7, 0x01, + 0x5c, 0x1c, 0x0d, 0xed, 0x4b, 0x30, 0x4e, 0xe8, 0x73, 0x1c, 0x78, 0xe1, 0x22, 0x0c, 0xfc, 0xc9, + 0x89, 0x3d, 0x06, 0xe7, 0x09, 0x7d, 0xa4, 0xd1, 0x0b, 0x9d, 0x8c, 0xec, 0x5b, 0x70, 0xed, 0x45, + 0xd4, 0x4b, 0x56, 0xab, 0x80, 0xae, 0xf3, 0xa7, 0xc8, 0x0f, 0x17, 0xa1, 0x47, 0xd6, 0x61, 0x44, + 0x27, 0xd6, 0xfc, 0x67, 0x04, 0x9c, 0x37, 0xb5, 0x83, 0xff, 0xb7, 0x9c, 0xdb, 0x47, 0x0b, 0xe3, + 0xb6, 0x5b, 0x3c, 0x7a, 0xf5, 0x0f, 0x2e, 0xa1, 0xb6, 0x4c, 0x0a, 0xa8, 0x6a, 0x81, 0x44, 0x21, + 0xbb, 0xe6, 0xfd, 0x81, 0xaa, 0x52, 0xff, 0x75, 0xaf, 0x7b, 0xf3, 0x7c, 0x58, 0xa7, 0x4b, 0x42, + 0x3e, 0xad, 0xe9, 0xd2, 0x84, 0x11, 0xae, 0xa1, 0x91, 0xad, 0x4a, 0x5d, 0xd8, 0xad, 0xd4, 0x5f, + 0x3d, 0x90, 0x11, 0xae, 0xb3, 0x01, 0xc8, 0x52, 0x37, 0x33, 0xc0, 0xb7, 0xe5, 0x98, 0x29, 0xc6, + 0x84, 0x6b, 0x8c, 0x07, 0x04, 0xe3, 0xd4, 0xc5, 0xd8, 0x40, 0x9b, 0xb3, 0xae, 0xdd, 0xec, 0x37, + 0x00, 0x00, 0xff, 0xff, 0x34, 0x89, 0xca, 0xdb, 0xcc, 0x01, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/errors/date_error.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/errors/date_error.pb.go new file mode 100644 index 0000000..6d1095a --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/errors/date_error.pb.go @@ -0,0 +1,158 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/errors/date_error.proto + +package errors // import "google.golang.org/genproto/googleapis/ads/googleads/v1/errors" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Enum describing possible date errors. +type DateErrorEnum_DateError int32 + +const ( + // Enum unspecified. + DateErrorEnum_UNSPECIFIED DateErrorEnum_DateError = 0 + // The received error code is not known in this version. + DateErrorEnum_UNKNOWN DateErrorEnum_DateError = 1 + // Given field values do not correspond to a valid date. + DateErrorEnum_INVALID_FIELD_VALUES_IN_DATE DateErrorEnum_DateError = 2 + // Given field values do not correspond to a valid date time. + DateErrorEnum_INVALID_FIELD_VALUES_IN_DATE_TIME DateErrorEnum_DateError = 3 + // The string date's format should be yyyy-mm-dd. + DateErrorEnum_INVALID_STRING_DATE DateErrorEnum_DateError = 4 + // The string date time's format should be yyyy-mm-dd hh:mm:ss.ssssss. + DateErrorEnum_INVALID_STRING_DATE_TIME_MICROS DateErrorEnum_DateError = 6 + // The string date time's format should be yyyy-mm-dd hh:mm:ss. + DateErrorEnum_INVALID_STRING_DATE_TIME_SECONDS DateErrorEnum_DateError = 11 + // The string date time's format should be yyyy-mm-dd hh:mm:ss+|-hh:mm. + DateErrorEnum_INVALID_STRING_DATE_TIME_SECONDS_WITH_OFFSET DateErrorEnum_DateError = 12 + // Date is before allowed minimum. + DateErrorEnum_EARLIER_THAN_MINIMUM_DATE DateErrorEnum_DateError = 7 + // Date is after allowed maximum. + DateErrorEnum_LATER_THAN_MAXIMUM_DATE DateErrorEnum_DateError = 8 + // Date range bounds are not in order. + DateErrorEnum_DATE_RANGE_MINIMUM_DATE_LATER_THAN_MAXIMUM_DATE DateErrorEnum_DateError = 9 + // Both dates in range are null. + DateErrorEnum_DATE_RANGE_MINIMUM_AND_MAXIMUM_DATES_BOTH_NULL DateErrorEnum_DateError = 10 +) + +var DateErrorEnum_DateError_name = map[int32]string{ + 0: "UNSPECIFIED", + 1: "UNKNOWN", + 2: "INVALID_FIELD_VALUES_IN_DATE", + 3: "INVALID_FIELD_VALUES_IN_DATE_TIME", + 4: "INVALID_STRING_DATE", + 6: "INVALID_STRING_DATE_TIME_MICROS", + 11: "INVALID_STRING_DATE_TIME_SECONDS", + 12: "INVALID_STRING_DATE_TIME_SECONDS_WITH_OFFSET", + 7: "EARLIER_THAN_MINIMUM_DATE", + 8: "LATER_THAN_MAXIMUM_DATE", + 9: "DATE_RANGE_MINIMUM_DATE_LATER_THAN_MAXIMUM_DATE", + 10: "DATE_RANGE_MINIMUM_AND_MAXIMUM_DATES_BOTH_NULL", +} +var DateErrorEnum_DateError_value = map[string]int32{ + "UNSPECIFIED": 0, + "UNKNOWN": 1, + "INVALID_FIELD_VALUES_IN_DATE": 2, + "INVALID_FIELD_VALUES_IN_DATE_TIME": 3, + "INVALID_STRING_DATE": 4, + "INVALID_STRING_DATE_TIME_MICROS": 6, + "INVALID_STRING_DATE_TIME_SECONDS": 11, + "INVALID_STRING_DATE_TIME_SECONDS_WITH_OFFSET": 12, + "EARLIER_THAN_MINIMUM_DATE": 7, + "LATER_THAN_MAXIMUM_DATE": 8, + "DATE_RANGE_MINIMUM_DATE_LATER_THAN_MAXIMUM_DATE": 9, + "DATE_RANGE_MINIMUM_AND_MAXIMUM_DATES_BOTH_NULL": 10, +} + +func (x DateErrorEnum_DateError) String() string { + return proto.EnumName(DateErrorEnum_DateError_name, int32(x)) +} +func (DateErrorEnum_DateError) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_date_error_46c20899519a126a, []int{0, 0} +} + +// Container for enum describing possible date errors. +type DateErrorEnum struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *DateErrorEnum) Reset() { *m = DateErrorEnum{} } +func (m *DateErrorEnum) String() string { return proto.CompactTextString(m) } +func (*DateErrorEnum) ProtoMessage() {} +func (*DateErrorEnum) Descriptor() ([]byte, []int) { + return fileDescriptor_date_error_46c20899519a126a, []int{0} +} +func (m *DateErrorEnum) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_DateErrorEnum.Unmarshal(m, b) +} +func (m *DateErrorEnum) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_DateErrorEnum.Marshal(b, m, deterministic) +} +func (dst *DateErrorEnum) XXX_Merge(src proto.Message) { + xxx_messageInfo_DateErrorEnum.Merge(dst, src) +} +func (m *DateErrorEnum) XXX_Size() int { + return xxx_messageInfo_DateErrorEnum.Size(m) +} +func (m *DateErrorEnum) XXX_DiscardUnknown() { + xxx_messageInfo_DateErrorEnum.DiscardUnknown(m) +} + +var xxx_messageInfo_DateErrorEnum proto.InternalMessageInfo + +func init() { + proto.RegisterType((*DateErrorEnum)(nil), "google.ads.googleads.v1.errors.DateErrorEnum") + proto.RegisterEnum("google.ads.googleads.v1.errors.DateErrorEnum_DateError", DateErrorEnum_DateError_name, DateErrorEnum_DateError_value) +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/errors/date_error.proto", fileDescriptor_date_error_46c20899519a126a) +} + +var fileDescriptor_date_error_46c20899519a126a = []byte{ + // 446 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x84, 0x92, 0xdd, 0x6e, 0xd3, 0x30, + 0x18, 0x86, 0x69, 0x8b, 0x36, 0xf6, 0x95, 0x9f, 0xc8, 0x1c, 0x4c, 0xc0, 0x18, 0xa3, 0xc0, 0x19, + 0x72, 0x08, 0x3b, 0x0b, 0x47, 0x6e, 0xe3, 0xb6, 0x16, 0x89, 0x53, 0xe5, 0x6f, 0x08, 0x55, 0xb2, + 0x02, 0x89, 0xa2, 0x4a, 0x5b, 0x5c, 0xc5, 0x61, 0x17, 0xc4, 0x21, 0x12, 0xd7, 0xc0, 0x39, 0x77, + 0x02, 0x57, 0x81, 0x1a, 0x2f, 0x41, 0x93, 0xd6, 0xee, 0x28, 0xaf, 0xfc, 0x3d, 0xcf, 0x17, 0x4b, + 0x7e, 0xc1, 0x2c, 0xa4, 0x2c, 0xce, 0x73, 0x33, 0xcd, 0xd4, 0x55, 0xdc, 0xa4, 0x4b, 0xcb, 0xcc, + 0xab, 0x4a, 0x56, 0xca, 0xcc, 0xd2, 0x3a, 0x17, 0x4d, 0xc6, 0xeb, 0x4a, 0xd6, 0x12, 0x1d, 0x6b, + 0x0a, 0xa7, 0x99, 0xc2, 0x9d, 0x80, 0x2f, 0x2d, 0xac, 0x85, 0xa7, 0x47, 0xed, 0xc2, 0xf5, 0xca, + 0x4c, 0xcb, 0x52, 0xd6, 0x69, 0xbd, 0x92, 0xa5, 0xd2, 0xf6, 0xe8, 0xd7, 0x00, 0x1e, 0x38, 0x69, + 0x9d, 0xd3, 0x0d, 0x4c, 0xcb, 0x6f, 0x17, 0xa3, 0x9f, 0x03, 0x38, 0xe8, 0x4e, 0xd0, 0x23, 0x18, + 0xc6, 0x3c, 0x5c, 0xd0, 0x09, 0x9b, 0x32, 0xea, 0x18, 0x77, 0xd0, 0x10, 0xf6, 0x63, 0xfe, 0x91, + 0xfb, 0x67, 0xdc, 0xe8, 0xa1, 0x13, 0x38, 0x62, 0x3c, 0x21, 0x2e, 0x73, 0xc4, 0x94, 0x51, 0xd7, + 0x11, 0x09, 0x71, 0x63, 0x1a, 0x0a, 0xc6, 0x85, 0x43, 0x22, 0x6a, 0xf4, 0xd1, 0x1b, 0x78, 0xb9, + 0x8b, 0x10, 0x11, 0xf3, 0xa8, 0x31, 0x40, 0x87, 0xf0, 0xb8, 0xc5, 0xc2, 0x28, 0x60, 0x7c, 0xa6, + 0xfd, 0xbb, 0xe8, 0x15, 0xbc, 0xb8, 0x61, 0xd0, 0x68, 0xc2, 0x63, 0x93, 0xc0, 0x0f, 0x8d, 0x3d, + 0xf4, 0x1a, 0x4e, 0xb6, 0x42, 0x21, 0x9d, 0xf8, 0xdc, 0x09, 0x8d, 0x21, 0x7a, 0x07, 0x6f, 0x6f, + 0xa3, 0xc4, 0x19, 0x8b, 0xe6, 0xc2, 0x9f, 0x4e, 0x43, 0x1a, 0x19, 0xf7, 0xd1, 0x73, 0x78, 0x42, + 0x49, 0xe0, 0x32, 0x1a, 0x88, 0x68, 0x4e, 0xb8, 0xf0, 0x18, 0x67, 0x5e, 0xec, 0xe9, 0xbb, 0xed, + 0xa3, 0x67, 0x70, 0xe8, 0x92, 0xa8, 0x1b, 0x92, 0x4f, 0xff, 0x87, 0xf7, 0xd0, 0x29, 0x98, 0xcd, + 0xfa, 0x80, 0xf0, 0x19, 0xbd, 0x66, 0x8a, 0x6d, 0xd2, 0x01, 0x7a, 0x0f, 0xf8, 0x06, 0x89, 0x70, + 0xe7, 0x1a, 0x18, 0x8a, 0xb1, 0x1f, 0xcd, 0x05, 0x8f, 0x5d, 0xd7, 0x80, 0xf1, 0x9f, 0x1e, 0x8c, + 0xbe, 0xca, 0x0b, 0xbc, 0xbb, 0x06, 0xe3, 0x87, 0xdd, 0x9b, 0x2e, 0x36, 0x0f, 0xbf, 0xe8, 0x7d, + 0x76, 0xae, 0x8c, 0x42, 0x9e, 0xa7, 0x65, 0x81, 0x65, 0x55, 0x98, 0x45, 0x5e, 0x36, 0xb5, 0x68, + 0x9b, 0xb7, 0x5e, 0xa9, 0x6d, 0x45, 0xfc, 0xa0, 0x3f, 0xdf, 0xfb, 0x83, 0x19, 0x21, 0x3f, 0xfa, + 0xc7, 0x33, 0xbd, 0x8c, 0x64, 0x0a, 0xeb, 0xb8, 0x49, 0x89, 0x85, 0x9b, 0x5f, 0xaa, 0xdf, 0x2d, + 0xb0, 0x24, 0x99, 0x5a, 0x76, 0xc0, 0x32, 0xb1, 0x96, 0x1a, 0xf8, 0xdb, 0x1f, 0xe9, 0x53, 0xdb, + 0x26, 0x99, 0xb2, 0xed, 0x0e, 0xb1, 0xed, 0xc4, 0xb2, 0x6d, 0x0d, 0x7d, 0xd9, 0x6b, 0x6e, 0x77, + 0xfa, 0x2f, 0x00, 0x00, 0xff, 0xff, 0xa8, 0x90, 0x80, 0x3b, 0x25, 0x03, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/errors/date_range_error.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/errors/date_range_error.pb.go new file mode 100644 index 0000000..c7d0f9a --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/errors/date_range_error.pb.go @@ -0,0 +1,135 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/errors/date_range_error.proto + +package errors // import "google.golang.org/genproto/googleapis/ads/googleads/v1/errors" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Enum describing possible date range errors. +type DateRangeErrorEnum_DateRangeError int32 + +const ( + // Enum unspecified. + DateRangeErrorEnum_UNSPECIFIED DateRangeErrorEnum_DateRangeError = 0 + // The received error code is not known in this version. + DateRangeErrorEnum_UNKNOWN DateRangeErrorEnum_DateRangeError = 1 + // Invalid date. + DateRangeErrorEnum_INVALID_DATE DateRangeErrorEnum_DateRangeError = 2 + // The start date was after the end date. + DateRangeErrorEnum_START_DATE_AFTER_END_DATE DateRangeErrorEnum_DateRangeError = 3 + // Cannot set date to past time + DateRangeErrorEnum_CANNOT_SET_DATE_TO_PAST DateRangeErrorEnum_DateRangeError = 4 + // A date was used that is past the system "last" date. + DateRangeErrorEnum_AFTER_MAXIMUM_ALLOWABLE_DATE DateRangeErrorEnum_DateRangeError = 5 + // Trying to change start date on a resource that has started. + DateRangeErrorEnum_CANNOT_MODIFY_START_DATE_IF_ALREADY_STARTED DateRangeErrorEnum_DateRangeError = 6 +) + +var DateRangeErrorEnum_DateRangeError_name = map[int32]string{ + 0: "UNSPECIFIED", + 1: "UNKNOWN", + 2: "INVALID_DATE", + 3: "START_DATE_AFTER_END_DATE", + 4: "CANNOT_SET_DATE_TO_PAST", + 5: "AFTER_MAXIMUM_ALLOWABLE_DATE", + 6: "CANNOT_MODIFY_START_DATE_IF_ALREADY_STARTED", +} +var DateRangeErrorEnum_DateRangeError_value = map[string]int32{ + "UNSPECIFIED": 0, + "UNKNOWN": 1, + "INVALID_DATE": 2, + "START_DATE_AFTER_END_DATE": 3, + "CANNOT_SET_DATE_TO_PAST": 4, + "AFTER_MAXIMUM_ALLOWABLE_DATE": 5, + "CANNOT_MODIFY_START_DATE_IF_ALREADY_STARTED": 6, +} + +func (x DateRangeErrorEnum_DateRangeError) String() string { + return proto.EnumName(DateRangeErrorEnum_DateRangeError_name, int32(x)) +} +func (DateRangeErrorEnum_DateRangeError) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_date_range_error_4de96ce6b4f7f7c6, []int{0, 0} +} + +// Container for enum describing possible date range errors. +type DateRangeErrorEnum struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *DateRangeErrorEnum) Reset() { *m = DateRangeErrorEnum{} } +func (m *DateRangeErrorEnum) String() string { return proto.CompactTextString(m) } +func (*DateRangeErrorEnum) ProtoMessage() {} +func (*DateRangeErrorEnum) Descriptor() ([]byte, []int) { + return fileDescriptor_date_range_error_4de96ce6b4f7f7c6, []int{0} +} +func (m *DateRangeErrorEnum) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_DateRangeErrorEnum.Unmarshal(m, b) +} +func (m *DateRangeErrorEnum) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_DateRangeErrorEnum.Marshal(b, m, deterministic) +} +func (dst *DateRangeErrorEnum) XXX_Merge(src proto.Message) { + xxx_messageInfo_DateRangeErrorEnum.Merge(dst, src) +} +func (m *DateRangeErrorEnum) XXX_Size() int { + return xxx_messageInfo_DateRangeErrorEnum.Size(m) +} +func (m *DateRangeErrorEnum) XXX_DiscardUnknown() { + xxx_messageInfo_DateRangeErrorEnum.DiscardUnknown(m) +} + +var xxx_messageInfo_DateRangeErrorEnum proto.InternalMessageInfo + +func init() { + proto.RegisterType((*DateRangeErrorEnum)(nil), "google.ads.googleads.v1.errors.DateRangeErrorEnum") + proto.RegisterEnum("google.ads.googleads.v1.errors.DateRangeErrorEnum_DateRangeError", DateRangeErrorEnum_DateRangeError_name, DateRangeErrorEnum_DateRangeError_value) +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/errors/date_range_error.proto", fileDescriptor_date_range_error_4de96ce6b4f7f7c6) +} + +var fileDescriptor_date_range_error_4de96ce6b4f7f7c6 = []byte{ + // 391 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x7c, 0x91, 0xc1, 0x8e, 0x95, 0x30, + 0x14, 0x86, 0xe5, 0x8e, 0x8e, 0x49, 0xc7, 0x28, 0xa9, 0x0b, 0xa3, 0x8e, 0x13, 0xc3, 0xd6, 0xa4, + 0x84, 0x18, 0x37, 0x75, 0x75, 0xee, 0xb4, 0x4c, 0x88, 0x50, 0x08, 0x70, 0x19, 0x35, 0x24, 0x4d, + 0x15, 0x42, 0x6e, 0x32, 0x43, 0x6f, 0x28, 0xce, 0x03, 0xb9, 0xf4, 0x51, 0xdc, 0xf8, 0x16, 0x2e, + 0x5c, 0xf8, 0x0c, 0x06, 0xca, 0xbd, 0x71, 0x16, 0xba, 0xe2, 0xe7, 0xef, 0xff, 0x9d, 0xd3, 0x9e, + 0x83, 0xde, 0x74, 0x5a, 0x77, 0x57, 0xad, 0xaf, 0x1a, 0xe3, 0x5b, 0x39, 0xa9, 0x9b, 0xc0, 0x6f, + 0x87, 0x41, 0x0f, 0xc6, 0x6f, 0xd4, 0xd8, 0xca, 0x41, 0xf5, 0x5d, 0x2b, 0x67, 0x87, 0xec, 0x06, + 0x3d, 0x6a, 0x7c, 0x66, 0xb3, 0x44, 0x35, 0x86, 0x1c, 0x30, 0x72, 0x13, 0x10, 0x8b, 0x3d, 0x3b, + 0xdd, 0x97, 0xdd, 0x6d, 0x7d, 0xd5, 0xf7, 0x7a, 0x54, 0xe3, 0x56, 0xf7, 0xc6, 0xd2, 0xde, 0x4f, + 0x07, 0x61, 0xa6, 0xc6, 0x36, 0x9f, 0xea, 0xf2, 0x89, 0xe0, 0xfd, 0x97, 0x6b, 0xef, 0x87, 0x83, + 0x1e, 0xde, 0xb6, 0xf1, 0x23, 0x74, 0xb2, 0x11, 0x45, 0xc6, 0xcf, 0xa3, 0x30, 0xe2, 0xcc, 0xbd, + 0x83, 0x4f, 0xd0, 0xfd, 0x8d, 0x78, 0x27, 0xd2, 0x4b, 0xe1, 0x3a, 0xd8, 0x45, 0x0f, 0x22, 0x51, + 0x41, 0x1c, 0x31, 0xc9, 0xa0, 0xe4, 0xee, 0x0a, 0xbf, 0x40, 0x4f, 0x8b, 0x12, 0xf2, 0x72, 0xfe, + 0x97, 0x10, 0x96, 0x3c, 0x97, 0x5c, 0x2c, 0xc7, 0x47, 0xf8, 0x39, 0x7a, 0x72, 0x0e, 0x42, 0xa4, + 0xa5, 0x2c, 0xf8, 0x92, 0x29, 0x53, 0x99, 0x41, 0x51, 0xba, 0x77, 0xf1, 0x4b, 0x74, 0x6a, 0x81, + 0x04, 0xde, 0x47, 0xc9, 0x26, 0x91, 0x10, 0xc7, 0xe9, 0x25, 0xac, 0x63, 0x6e, 0xf1, 0x7b, 0xd8, + 0x47, 0xaf, 0x16, 0x3c, 0x49, 0x59, 0x14, 0x7e, 0x90, 0x7f, 0xf5, 0x8a, 0x42, 0x09, 0x71, 0xce, + 0x81, 0x2d, 0x2e, 0x67, 0xee, 0xf1, 0xfa, 0xb7, 0x83, 0xbc, 0xcf, 0xfa, 0x9a, 0xfc, 0x7f, 0x5a, + 0xeb, 0xc7, 0xb7, 0x5f, 0x9d, 0x4d, 0x43, 0xca, 0x9c, 0x8f, 0x6c, 0xc1, 0x3a, 0x7d, 0xa5, 0xfa, + 0x8e, 0xe8, 0xa1, 0xf3, 0xbb, 0xb6, 0x9f, 0x47, 0xb8, 0xdf, 0xd5, 0x6e, 0x6b, 0xfe, 0xb5, 0xba, + 0xb7, 0xf6, 0xf3, 0x75, 0x75, 0x74, 0x01, 0xf0, 0x6d, 0x75, 0x76, 0x61, 0x8b, 0x41, 0x63, 0x88, + 0x95, 0x93, 0xaa, 0x02, 0x32, 0xb7, 0x34, 0xdf, 0xf7, 0x81, 0x1a, 0x1a, 0x53, 0x1f, 0x02, 0x75, + 0x15, 0xd4, 0x36, 0xf0, 0x6b, 0xe5, 0x59, 0x97, 0x52, 0x68, 0x0c, 0xa5, 0x87, 0x08, 0xa5, 0x55, + 0x40, 0xa9, 0x0d, 0x7d, 0x3a, 0x9e, 0x6f, 0xf7, 0xfa, 0x4f, 0x00, 0x00, 0x00, 0xff, 0xff, 0xd0, + 0x3e, 0xbd, 0x2d, 0x57, 0x02, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/errors/distinct_error.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/errors/distinct_error.pb.go new file mode 100644 index 0000000..627a8b0 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/errors/distinct_error.pb.go @@ -0,0 +1,118 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/errors/distinct_error.proto + +package errors // import "google.golang.org/genproto/googleapis/ads/googleads/v1/errors" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Enum describing possible distinct errors. +type DistinctErrorEnum_DistinctError int32 + +const ( + // Enum unspecified. + DistinctErrorEnum_UNSPECIFIED DistinctErrorEnum_DistinctError = 0 + // The received error code is not known in this version. + DistinctErrorEnum_UNKNOWN DistinctErrorEnum_DistinctError = 1 + // Duplicate element. + DistinctErrorEnum_DUPLICATE_ELEMENT DistinctErrorEnum_DistinctError = 2 + // Duplicate type. + DistinctErrorEnum_DUPLICATE_TYPE DistinctErrorEnum_DistinctError = 3 +) + +var DistinctErrorEnum_DistinctError_name = map[int32]string{ + 0: "UNSPECIFIED", + 1: "UNKNOWN", + 2: "DUPLICATE_ELEMENT", + 3: "DUPLICATE_TYPE", +} +var DistinctErrorEnum_DistinctError_value = map[string]int32{ + "UNSPECIFIED": 0, + "UNKNOWN": 1, + "DUPLICATE_ELEMENT": 2, + "DUPLICATE_TYPE": 3, +} + +func (x DistinctErrorEnum_DistinctError) String() string { + return proto.EnumName(DistinctErrorEnum_DistinctError_name, int32(x)) +} +func (DistinctErrorEnum_DistinctError) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_distinct_error_e8a8de7568fed594, []int{0, 0} +} + +// Container for enum describing possible distinct errors. +type DistinctErrorEnum struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *DistinctErrorEnum) Reset() { *m = DistinctErrorEnum{} } +func (m *DistinctErrorEnum) String() string { return proto.CompactTextString(m) } +func (*DistinctErrorEnum) ProtoMessage() {} +func (*DistinctErrorEnum) Descriptor() ([]byte, []int) { + return fileDescriptor_distinct_error_e8a8de7568fed594, []int{0} +} +func (m *DistinctErrorEnum) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_DistinctErrorEnum.Unmarshal(m, b) +} +func (m *DistinctErrorEnum) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_DistinctErrorEnum.Marshal(b, m, deterministic) +} +func (dst *DistinctErrorEnum) XXX_Merge(src proto.Message) { + xxx_messageInfo_DistinctErrorEnum.Merge(dst, src) +} +func (m *DistinctErrorEnum) XXX_Size() int { + return xxx_messageInfo_DistinctErrorEnum.Size(m) +} +func (m *DistinctErrorEnum) XXX_DiscardUnknown() { + xxx_messageInfo_DistinctErrorEnum.DiscardUnknown(m) +} + +var xxx_messageInfo_DistinctErrorEnum proto.InternalMessageInfo + +func init() { + proto.RegisterType((*DistinctErrorEnum)(nil), "google.ads.googleads.v1.errors.DistinctErrorEnum") + proto.RegisterEnum("google.ads.googleads.v1.errors.DistinctErrorEnum_DistinctError", DistinctErrorEnum_DistinctError_name, DistinctErrorEnum_DistinctError_value) +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/errors/distinct_error.proto", fileDescriptor_distinct_error_e8a8de7568fed594) +} + +var fileDescriptor_distinct_error_e8a8de7568fed594 = []byte{ + // 307 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x7c, 0x90, 0xdf, 0x4a, 0xc3, 0x30, + 0x18, 0xc5, 0x5d, 0x07, 0x0a, 0x19, 0x6a, 0x17, 0xf0, 0x46, 0x64, 0x17, 0x7d, 0x80, 0x84, 0xb2, + 0xbb, 0x78, 0x95, 0xad, 0x71, 0x0c, 0x67, 0x2d, 0xd8, 0xd6, 0x3f, 0x14, 0x46, 0x5d, 0x4a, 0x28, + 0xac, 0x49, 0x69, 0xea, 0x1e, 0xc8, 0x4b, 0x1f, 0xc5, 0x47, 0x11, 0x7c, 0x07, 0x69, 0xb3, 0x56, + 0x76, 0xa1, 0x57, 0x3d, 0xfd, 0xf8, 0x9d, 0xf3, 0x9d, 0x7c, 0x60, 0x2a, 0x94, 0x12, 0xdb, 0x0c, + 0xa7, 0x5c, 0x63, 0x23, 0x1b, 0xb5, 0x73, 0x71, 0x56, 0x55, 0xaa, 0xd2, 0x98, 0xe7, 0xba, 0xce, + 0xe5, 0xa6, 0x5e, 0xb7, 0xff, 0xa8, 0xac, 0x54, 0xad, 0xe0, 0xc4, 0x90, 0x28, 0xe5, 0x1a, 0xf5, + 0x26, 0xb4, 0x73, 0x91, 0x31, 0x5d, 0x5e, 0x75, 0xa1, 0x65, 0x8e, 0x53, 0x29, 0x55, 0x9d, 0xd6, + 0xb9, 0x92, 0xda, 0xb8, 0x9d, 0x02, 0x8c, 0xbd, 0x7d, 0x2a, 0x6b, 0x78, 0x26, 0xdf, 0x0a, 0xe7, + 0x09, 0x9c, 0x1e, 0x0c, 0xe1, 0x39, 0x18, 0x45, 0xfe, 0x43, 0xc0, 0xe6, 0xcb, 0x9b, 0x25, 0xf3, + 0xec, 0x23, 0x38, 0x02, 0x27, 0x91, 0x7f, 0xeb, 0xdf, 0x3f, 0xfa, 0xf6, 0x00, 0x5e, 0x80, 0xb1, + 0x17, 0x05, 0xab, 0xe5, 0x9c, 0x86, 0x6c, 0xcd, 0x56, 0xec, 0x8e, 0xf9, 0xa1, 0x6d, 0x41, 0x08, + 0xce, 0x7e, 0xc7, 0xe1, 0x73, 0xc0, 0xec, 0xe1, 0xec, 0x7b, 0x00, 0x9c, 0x8d, 0x2a, 0xd0, 0xff, + 0x9d, 0x67, 0xf0, 0x60, 0x7d, 0xd0, 0x34, 0x0d, 0x06, 0x2f, 0xde, 0xde, 0x25, 0xd4, 0x36, 0x95, + 0x02, 0xa9, 0x4a, 0x60, 0x91, 0xc9, 0xf6, 0x1d, 0xdd, 0xb9, 0xca, 0x5c, 0xff, 0x75, 0xbd, 0x6b, + 0xf3, 0x79, 0xb7, 0x86, 0x0b, 0x4a, 0x3f, 0xac, 0xc9, 0xc2, 0x84, 0x51, 0xae, 0x91, 0x91, 0x8d, + 0x8a, 0x5d, 0xd4, 0xae, 0xd4, 0x9f, 0x1d, 0x90, 0x50, 0xae, 0x93, 0x1e, 0x48, 0x62, 0x37, 0x31, + 0xc0, 0x97, 0xe5, 0x98, 0x29, 0x21, 0x94, 0x6b, 0x42, 0x7a, 0x84, 0x90, 0xd8, 0x25, 0xc4, 0x40, + 0xaf, 0xc7, 0x6d, 0xbb, 0xe9, 0x4f, 0x00, 0x00, 0x00, 0xff, 0xff, 0x5f, 0xfb, 0x24, 0x56, 0xda, + 0x01, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/errors/enum_error.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/errors/enum_error.pb.go new file mode 100644 index 0000000..05ce677 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/errors/enum_error.pb.go @@ -0,0 +1,113 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/errors/enum_error.proto + +package errors // import "google.golang.org/genproto/googleapis/ads/googleads/v1/errors" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Enum describing possible enum errors. +type EnumErrorEnum_EnumError int32 + +const ( + // Enum unspecified. + EnumErrorEnum_UNSPECIFIED EnumErrorEnum_EnumError = 0 + // The received error code is not known in this version. + EnumErrorEnum_UNKNOWN EnumErrorEnum_EnumError = 1 + // The enum value is not permitted. + EnumErrorEnum_ENUM_VALUE_NOT_PERMITTED EnumErrorEnum_EnumError = 3 +) + +var EnumErrorEnum_EnumError_name = map[int32]string{ + 0: "UNSPECIFIED", + 1: "UNKNOWN", + 3: "ENUM_VALUE_NOT_PERMITTED", +} +var EnumErrorEnum_EnumError_value = map[string]int32{ + "UNSPECIFIED": 0, + "UNKNOWN": 1, + "ENUM_VALUE_NOT_PERMITTED": 3, +} + +func (x EnumErrorEnum_EnumError) String() string { + return proto.EnumName(EnumErrorEnum_EnumError_name, int32(x)) +} +func (EnumErrorEnum_EnumError) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_enum_error_9874f7159f8ae424, []int{0, 0} +} + +// Container for enum describing possible enum errors. +type EnumErrorEnum struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *EnumErrorEnum) Reset() { *m = EnumErrorEnum{} } +func (m *EnumErrorEnum) String() string { return proto.CompactTextString(m) } +func (*EnumErrorEnum) ProtoMessage() {} +func (*EnumErrorEnum) Descriptor() ([]byte, []int) { + return fileDescriptor_enum_error_9874f7159f8ae424, []int{0} +} +func (m *EnumErrorEnum) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_EnumErrorEnum.Unmarshal(m, b) +} +func (m *EnumErrorEnum) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_EnumErrorEnum.Marshal(b, m, deterministic) +} +func (dst *EnumErrorEnum) XXX_Merge(src proto.Message) { + xxx_messageInfo_EnumErrorEnum.Merge(dst, src) +} +func (m *EnumErrorEnum) XXX_Size() int { + return xxx_messageInfo_EnumErrorEnum.Size(m) +} +func (m *EnumErrorEnum) XXX_DiscardUnknown() { + xxx_messageInfo_EnumErrorEnum.DiscardUnknown(m) +} + +var xxx_messageInfo_EnumErrorEnum proto.InternalMessageInfo + +func init() { + proto.RegisterType((*EnumErrorEnum)(nil), "google.ads.googleads.v1.errors.EnumErrorEnum") + proto.RegisterEnum("google.ads.googleads.v1.errors.EnumErrorEnum_EnumError", EnumErrorEnum_EnumError_name, EnumErrorEnum_EnumError_value) +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/errors/enum_error.proto", fileDescriptor_enum_error_9874f7159f8ae424) +} + +var fileDescriptor_enum_error_9874f7159f8ae424 = []byte{ + // 293 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x7c, 0x90, 0xc1, 0x4a, 0x84, 0x40, + 0x18, 0xc7, 0xd3, 0x85, 0xa2, 0x59, 0x2a, 0xf1, 0x14, 0xb1, 0xec, 0xc1, 0x07, 0x98, 0x41, 0xba, + 0x4d, 0xa7, 0xd9, 0x9c, 0x44, 0x6a, 0x67, 0xa5, 0xd4, 0x22, 0x04, 0xb1, 0x94, 0x41, 0x58, 0x67, + 0xc4, 0x71, 0xf7, 0x81, 0x3a, 0xf6, 0x28, 0xbd, 0x49, 0x3d, 0x45, 0xe8, 0xac, 0xde, 0xea, 0x34, + 0x3f, 0x86, 0xdf, 0xff, 0xfb, 0xfe, 0x7c, 0x00, 0x71, 0x29, 0xf9, 0xb6, 0x44, 0x79, 0xa1, 0x0e, + 0xd8, 0xd3, 0xde, 0x45, 0x65, 0xdb, 0xca, 0x56, 0xa1, 0x52, 0xec, 0xea, 0x6c, 0x60, 0xd8, 0xb4, + 0xb2, 0x93, 0xf6, 0x52, 0x5b, 0x30, 0x2f, 0x14, 0x9c, 0x02, 0x70, 0xef, 0x42, 0x1d, 0xb8, 0x5a, + 0x8c, 0x03, 0x9b, 0x0a, 0xe5, 0x42, 0xc8, 0x2e, 0xef, 0x2a, 0x29, 0x94, 0x4e, 0x3b, 0x2f, 0xe0, + 0x8c, 0x8a, 0x5d, 0x4d, 0x7b, 0xb7, 0x07, 0xc7, 0x07, 0xa7, 0xd3, 0x87, 0x7d, 0x01, 0xe6, 0x31, + 0x7b, 0x0a, 0xe9, 0x6d, 0x70, 0x17, 0x50, 0xcf, 0x3a, 0xb2, 0xe7, 0xe0, 0x24, 0x66, 0xf7, 0x6c, + 0xf3, 0xcc, 0x2c, 0xc3, 0x5e, 0x80, 0x4b, 0xca, 0xe2, 0x75, 0x96, 0x90, 0x87, 0x98, 0x66, 0x6c, + 0x13, 0x65, 0x21, 0x7d, 0x5c, 0x07, 0x51, 0x44, 0x3d, 0x6b, 0xb6, 0xfa, 0x36, 0x80, 0xf3, 0x2e, + 0x6b, 0xf8, 0x7f, 0xbd, 0xd5, 0xf9, 0xb4, 0x2d, 0xec, 0x0b, 0x85, 0xc6, 0xab, 0x77, 0x48, 0x70, + 0xb9, 0xcd, 0x05, 0x87, 0xb2, 0xe5, 0x88, 0x97, 0x62, 0xa8, 0x3b, 0x5e, 0xa4, 0xa9, 0xd4, 0x5f, + 0x07, 0xba, 0xd1, 0xcf, 0x87, 0x39, 0xf3, 0x09, 0xf9, 0x34, 0x97, 0xbe, 0x1e, 0x46, 0x0a, 0x05, + 0x35, 0xf6, 0x94, 0xb8, 0x70, 0x58, 0xa9, 0xbe, 0x46, 0x21, 0x25, 0x85, 0x4a, 0x27, 0x21, 0x4d, + 0xdc, 0x54, 0x0b, 0x3f, 0xa6, 0xa3, 0x7f, 0x31, 0x26, 0x85, 0xc2, 0x78, 0x52, 0x30, 0x4e, 0x5c, + 0x8c, 0xb5, 0xf4, 0x76, 0x3c, 0xb4, 0xbb, 0xfe, 0x0d, 0x00, 0x00, 0xff, 0xff, 0x29, 0xb5, 0xb1, + 0xb9, 0xbd, 0x01, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/errors/errors.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/errors/errors.pb.go new file mode 100644 index 0000000..1718797 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/errors/errors.pb.go @@ -0,0 +1,3496 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/errors/errors.proto + +package errors // import "google.golang.org/genproto/googleapis/ads/googleads/v1/errors" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import wrappers "github.com/golang/protobuf/ptypes/wrappers" +import common "google.golang.org/genproto/googleapis/ads/googleads/v1/common" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Describes how a GoogleAds API call failed. It's returned inside +// google.rpc.Status.details when a call fails. +type GoogleAdsFailure struct { + // The list of errors that occurred. + Errors []*GoogleAdsError `protobuf:"bytes,1,rep,name=errors,proto3" json:"errors,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GoogleAdsFailure) Reset() { *m = GoogleAdsFailure{} } +func (m *GoogleAdsFailure) String() string { return proto.CompactTextString(m) } +func (*GoogleAdsFailure) ProtoMessage() {} +func (*GoogleAdsFailure) Descriptor() ([]byte, []int) { + return fileDescriptor_errors_70395b57fa918bda, []int{0} +} +func (m *GoogleAdsFailure) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GoogleAdsFailure.Unmarshal(m, b) +} +func (m *GoogleAdsFailure) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GoogleAdsFailure.Marshal(b, m, deterministic) +} +func (dst *GoogleAdsFailure) XXX_Merge(src proto.Message) { + xxx_messageInfo_GoogleAdsFailure.Merge(dst, src) +} +func (m *GoogleAdsFailure) XXX_Size() int { + return xxx_messageInfo_GoogleAdsFailure.Size(m) +} +func (m *GoogleAdsFailure) XXX_DiscardUnknown() { + xxx_messageInfo_GoogleAdsFailure.DiscardUnknown(m) +} + +var xxx_messageInfo_GoogleAdsFailure proto.InternalMessageInfo + +func (m *GoogleAdsFailure) GetErrors() []*GoogleAdsError { + if m != nil { + return m.Errors + } + return nil +} + +// GoogleAds-specific error. +type GoogleAdsError struct { + // An enum value that indicates which error occurred. + ErrorCode *ErrorCode `protobuf:"bytes,1,opt,name=error_code,json=errorCode,proto3" json:"error_code,omitempty"` + // A human-readable description of the error. + Message string `protobuf:"bytes,2,opt,name=message,proto3" json:"message,omitempty"` + // The value that triggered the error. + Trigger *common.Value `protobuf:"bytes,3,opt,name=trigger,proto3" json:"trigger,omitempty"` + // Describes the part of the request proto that caused the error. + Location *ErrorLocation `protobuf:"bytes,4,opt,name=location,proto3" json:"location,omitempty"` + // Additional error details, which are returned by certain error codes. Most + // error codes do not include details. + Details *ErrorDetails `protobuf:"bytes,5,opt,name=details,proto3" json:"details,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GoogleAdsError) Reset() { *m = GoogleAdsError{} } +func (m *GoogleAdsError) String() string { return proto.CompactTextString(m) } +func (*GoogleAdsError) ProtoMessage() {} +func (*GoogleAdsError) Descriptor() ([]byte, []int) { + return fileDescriptor_errors_70395b57fa918bda, []int{1} +} +func (m *GoogleAdsError) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GoogleAdsError.Unmarshal(m, b) +} +func (m *GoogleAdsError) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GoogleAdsError.Marshal(b, m, deterministic) +} +func (dst *GoogleAdsError) XXX_Merge(src proto.Message) { + xxx_messageInfo_GoogleAdsError.Merge(dst, src) +} +func (m *GoogleAdsError) XXX_Size() int { + return xxx_messageInfo_GoogleAdsError.Size(m) +} +func (m *GoogleAdsError) XXX_DiscardUnknown() { + xxx_messageInfo_GoogleAdsError.DiscardUnknown(m) +} + +var xxx_messageInfo_GoogleAdsError proto.InternalMessageInfo + +func (m *GoogleAdsError) GetErrorCode() *ErrorCode { + if m != nil { + return m.ErrorCode + } + return nil +} + +func (m *GoogleAdsError) GetMessage() string { + if m != nil { + return m.Message + } + return "" +} + +func (m *GoogleAdsError) GetTrigger() *common.Value { + if m != nil { + return m.Trigger + } + return nil +} + +func (m *GoogleAdsError) GetLocation() *ErrorLocation { + if m != nil { + return m.Location + } + return nil +} + +func (m *GoogleAdsError) GetDetails() *ErrorDetails { + if m != nil { + return m.Details + } + return nil +} + +// The error reason represented by type and enum. +type ErrorCode struct { + // The list of error enums + // + // Types that are valid to be assigned to ErrorCode: + // *ErrorCode_RequestError + // *ErrorCode_BiddingStrategyError + // *ErrorCode_UrlFieldError + // *ErrorCode_ListOperationError + // *ErrorCode_QueryError + // *ErrorCode_MutateError + // *ErrorCode_FieldMaskError + // *ErrorCode_AuthorizationError + // *ErrorCode_InternalError + // *ErrorCode_QuotaError + // *ErrorCode_AdError + // *ErrorCode_AdGroupError + // *ErrorCode_CampaignBudgetError + // *ErrorCode_CampaignError + // *ErrorCode_AuthenticationError + // *ErrorCode_AdGroupCriterionError + // *ErrorCode_AdCustomizerError + // *ErrorCode_AdGroupAdError + // *ErrorCode_AdSharingError + // *ErrorCode_AdxError + // *ErrorCode_AssetError + // *ErrorCode_BiddingError + // *ErrorCode_CampaignCriterionError + // *ErrorCode_CollectionSizeError + // *ErrorCode_CountryCodeError + // *ErrorCode_CriterionError + // *ErrorCode_CustomerError + // *ErrorCode_DateError + // *ErrorCode_DateRangeError + // *ErrorCode_DistinctError + // *ErrorCode_FeedAttributeReferenceError + // *ErrorCode_FunctionError + // *ErrorCode_FunctionParsingError + // *ErrorCode_IdError + // *ErrorCode_ImageError + // *ErrorCode_LanguageCodeError + // *ErrorCode_MediaBundleError + // *ErrorCode_MediaUploadError + // *ErrorCode_MediaFileError + // *ErrorCode_MultiplierError + // *ErrorCode_NewResourceCreationError + // *ErrorCode_NotEmptyError + // *ErrorCode_NullError + // *ErrorCode_OperatorError + // *ErrorCode_RangeError + // *ErrorCode_RecommendationError + // *ErrorCode_RegionCodeError + // *ErrorCode_SettingError + // *ErrorCode_StringFormatError + // *ErrorCode_StringLengthError + // *ErrorCode_OperationAccessDeniedError + // *ErrorCode_ResourceAccessDeniedError + // *ErrorCode_ResourceCountLimitExceededError + // *ErrorCode_YoutubeVideoRegistrationError + // *ErrorCode_AdGroupBidModifierError + // *ErrorCode_ContextError + // *ErrorCode_FieldError + // *ErrorCode_SharedSetError + // *ErrorCode_SharedCriterionError + // *ErrorCode_CampaignSharedSetError + // *ErrorCode_ConversionActionError + // *ErrorCode_ConversionAdjustmentUploadError + // *ErrorCode_ConversionUploadError + // *ErrorCode_HeaderError + // *ErrorCode_DatabaseError + // *ErrorCode_PolicyFindingError + // *ErrorCode_EnumError + // *ErrorCode_KeywordPlanError + // *ErrorCode_KeywordPlanCampaignError + // *ErrorCode_KeywordPlanNegativeKeywordError + // *ErrorCode_KeywordPlanAdGroupError + // *ErrorCode_KeywordPlanKeywordError + // *ErrorCode_KeywordPlanIdeaError + // *ErrorCode_AccountBudgetProposalError + // *ErrorCode_UserListError + // *ErrorCode_ChangeStatusError + // *ErrorCode_FeedError + // *ErrorCode_GeoTargetConstantSuggestionError + // *ErrorCode_FeedItemError + // *ErrorCode_LabelError + // *ErrorCode_BillingSetupError + // *ErrorCode_CustomerClientLinkError + // *ErrorCode_CustomerManagerLinkError + // *ErrorCode_FeedMappingError + // *ErrorCode_CustomerFeedError + // *ErrorCode_AdGroupFeedError + // *ErrorCode_CampaignFeedError + // *ErrorCode_CustomInterestError + // *ErrorCode_ExtensionFeedItemError + // *ErrorCode_AdParameterError + // *ErrorCode_FeedItemValidationError + // *ErrorCode_ExtensionSettingError + // *ErrorCode_FeedItemTargetError + // *ErrorCode_PolicyViolationError + // *ErrorCode_MutateJobError + // *ErrorCode_PartialFailureError + // *ErrorCode_PolicyValidationParameterError + // *ErrorCode_SizeLimitError + ErrorCode isErrorCode_ErrorCode `protobuf_oneof:"error_code"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ErrorCode) Reset() { *m = ErrorCode{} } +func (m *ErrorCode) String() string { return proto.CompactTextString(m) } +func (*ErrorCode) ProtoMessage() {} +func (*ErrorCode) Descriptor() ([]byte, []int) { + return fileDescriptor_errors_70395b57fa918bda, []int{2} +} +func (m *ErrorCode) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ErrorCode.Unmarshal(m, b) +} +func (m *ErrorCode) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ErrorCode.Marshal(b, m, deterministic) +} +func (dst *ErrorCode) XXX_Merge(src proto.Message) { + xxx_messageInfo_ErrorCode.Merge(dst, src) +} +func (m *ErrorCode) XXX_Size() int { + return xxx_messageInfo_ErrorCode.Size(m) +} +func (m *ErrorCode) XXX_DiscardUnknown() { + xxx_messageInfo_ErrorCode.DiscardUnknown(m) +} + +var xxx_messageInfo_ErrorCode proto.InternalMessageInfo + +type isErrorCode_ErrorCode interface { + isErrorCode_ErrorCode() +} + +type ErrorCode_RequestError struct { + RequestError RequestErrorEnum_RequestError `protobuf:"varint,1,opt,name=request_error,json=requestError,proto3,enum=google.ads.googleads.v1.errors.RequestErrorEnum_RequestError,oneof"` +} + +type ErrorCode_BiddingStrategyError struct { + BiddingStrategyError BiddingStrategyErrorEnum_BiddingStrategyError `protobuf:"varint,2,opt,name=bidding_strategy_error,json=biddingStrategyError,proto3,enum=google.ads.googleads.v1.errors.BiddingStrategyErrorEnum_BiddingStrategyError,oneof"` +} + +type ErrorCode_UrlFieldError struct { + UrlFieldError UrlFieldErrorEnum_UrlFieldError `protobuf:"varint,3,opt,name=url_field_error,json=urlFieldError,proto3,enum=google.ads.googleads.v1.errors.UrlFieldErrorEnum_UrlFieldError,oneof"` +} + +type ErrorCode_ListOperationError struct { + ListOperationError ListOperationErrorEnum_ListOperationError `protobuf:"varint,4,opt,name=list_operation_error,json=listOperationError,proto3,enum=google.ads.googleads.v1.errors.ListOperationErrorEnum_ListOperationError,oneof"` +} + +type ErrorCode_QueryError struct { + QueryError QueryErrorEnum_QueryError `protobuf:"varint,5,opt,name=query_error,json=queryError,proto3,enum=google.ads.googleads.v1.errors.QueryErrorEnum_QueryError,oneof"` +} + +type ErrorCode_MutateError struct { + MutateError MutateErrorEnum_MutateError `protobuf:"varint,7,opt,name=mutate_error,json=mutateError,proto3,enum=google.ads.googleads.v1.errors.MutateErrorEnum_MutateError,oneof"` +} + +type ErrorCode_FieldMaskError struct { + FieldMaskError FieldMaskErrorEnum_FieldMaskError `protobuf:"varint,8,opt,name=field_mask_error,json=fieldMaskError,proto3,enum=google.ads.googleads.v1.errors.FieldMaskErrorEnum_FieldMaskError,oneof"` +} + +type ErrorCode_AuthorizationError struct { + AuthorizationError AuthorizationErrorEnum_AuthorizationError `protobuf:"varint,9,opt,name=authorization_error,json=authorizationError,proto3,enum=google.ads.googleads.v1.errors.AuthorizationErrorEnum_AuthorizationError,oneof"` +} + +type ErrorCode_InternalError struct { + InternalError InternalErrorEnum_InternalError `protobuf:"varint,10,opt,name=internal_error,json=internalError,proto3,enum=google.ads.googleads.v1.errors.InternalErrorEnum_InternalError,oneof"` +} + +type ErrorCode_QuotaError struct { + QuotaError QuotaErrorEnum_QuotaError `protobuf:"varint,11,opt,name=quota_error,json=quotaError,proto3,enum=google.ads.googleads.v1.errors.QuotaErrorEnum_QuotaError,oneof"` +} + +type ErrorCode_AdError struct { + AdError AdErrorEnum_AdError `protobuf:"varint,12,opt,name=ad_error,json=adError,proto3,enum=google.ads.googleads.v1.errors.AdErrorEnum_AdError,oneof"` +} + +type ErrorCode_AdGroupError struct { + AdGroupError AdGroupErrorEnum_AdGroupError `protobuf:"varint,13,opt,name=ad_group_error,json=adGroupError,proto3,enum=google.ads.googleads.v1.errors.AdGroupErrorEnum_AdGroupError,oneof"` +} + +type ErrorCode_CampaignBudgetError struct { + CampaignBudgetError CampaignBudgetErrorEnum_CampaignBudgetError `protobuf:"varint,14,opt,name=campaign_budget_error,json=campaignBudgetError,proto3,enum=google.ads.googleads.v1.errors.CampaignBudgetErrorEnum_CampaignBudgetError,oneof"` +} + +type ErrorCode_CampaignError struct { + CampaignError CampaignErrorEnum_CampaignError `protobuf:"varint,15,opt,name=campaign_error,json=campaignError,proto3,enum=google.ads.googleads.v1.errors.CampaignErrorEnum_CampaignError,oneof"` +} + +type ErrorCode_AuthenticationError struct { + AuthenticationError AuthenticationErrorEnum_AuthenticationError `protobuf:"varint,17,opt,name=authentication_error,json=authenticationError,proto3,enum=google.ads.googleads.v1.errors.AuthenticationErrorEnum_AuthenticationError,oneof"` +} + +type ErrorCode_AdGroupCriterionError struct { + AdGroupCriterionError AdGroupCriterionErrorEnum_AdGroupCriterionError `protobuf:"varint,18,opt,name=ad_group_criterion_error,json=adGroupCriterionError,proto3,enum=google.ads.googleads.v1.errors.AdGroupCriterionErrorEnum_AdGroupCriterionError,oneof"` +} + +type ErrorCode_AdCustomizerError struct { + AdCustomizerError AdCustomizerErrorEnum_AdCustomizerError `protobuf:"varint,19,opt,name=ad_customizer_error,json=adCustomizerError,proto3,enum=google.ads.googleads.v1.errors.AdCustomizerErrorEnum_AdCustomizerError,oneof"` +} + +type ErrorCode_AdGroupAdError struct { + AdGroupAdError AdGroupAdErrorEnum_AdGroupAdError `protobuf:"varint,21,opt,name=ad_group_ad_error,json=adGroupAdError,proto3,enum=google.ads.googleads.v1.errors.AdGroupAdErrorEnum_AdGroupAdError,oneof"` +} + +type ErrorCode_AdSharingError struct { + AdSharingError AdSharingErrorEnum_AdSharingError `protobuf:"varint,24,opt,name=ad_sharing_error,json=adSharingError,proto3,enum=google.ads.googleads.v1.errors.AdSharingErrorEnum_AdSharingError,oneof"` +} + +type ErrorCode_AdxError struct { + AdxError AdxErrorEnum_AdxError `protobuf:"varint,25,opt,name=adx_error,json=adxError,proto3,enum=google.ads.googleads.v1.errors.AdxErrorEnum_AdxError,oneof"` +} + +type ErrorCode_AssetError struct { + AssetError AssetErrorEnum_AssetError `protobuf:"varint,107,opt,name=asset_error,json=assetError,proto3,enum=google.ads.googleads.v1.errors.AssetErrorEnum_AssetError,oneof"` +} + +type ErrorCode_BiddingError struct { + BiddingError BiddingErrorEnum_BiddingError `protobuf:"varint,26,opt,name=bidding_error,json=biddingError,proto3,enum=google.ads.googleads.v1.errors.BiddingErrorEnum_BiddingError,oneof"` +} + +type ErrorCode_CampaignCriterionError struct { + CampaignCriterionError CampaignCriterionErrorEnum_CampaignCriterionError `protobuf:"varint,29,opt,name=campaign_criterion_error,json=campaignCriterionError,proto3,enum=google.ads.googleads.v1.errors.CampaignCriterionErrorEnum_CampaignCriterionError,oneof"` +} + +type ErrorCode_CollectionSizeError struct { + CollectionSizeError CollectionSizeErrorEnum_CollectionSizeError `protobuf:"varint,31,opt,name=collection_size_error,json=collectionSizeError,proto3,enum=google.ads.googleads.v1.errors.CollectionSizeErrorEnum_CollectionSizeError,oneof"` +} + +type ErrorCode_CountryCodeError struct { + CountryCodeError CountryCodeErrorEnum_CountryCodeError `protobuf:"varint,109,opt,name=country_code_error,json=countryCodeError,proto3,enum=google.ads.googleads.v1.errors.CountryCodeErrorEnum_CountryCodeError,oneof"` +} + +type ErrorCode_CriterionError struct { + CriterionError CriterionErrorEnum_CriterionError `protobuf:"varint,32,opt,name=criterion_error,json=criterionError,proto3,enum=google.ads.googleads.v1.errors.CriterionErrorEnum_CriterionError,oneof"` +} + +type ErrorCode_CustomerError struct { + CustomerError CustomerErrorEnum_CustomerError `protobuf:"varint,90,opt,name=customer_error,json=customerError,proto3,enum=google.ads.googleads.v1.errors.CustomerErrorEnum_CustomerError,oneof"` +} + +type ErrorCode_DateError struct { + DateError DateErrorEnum_DateError `protobuf:"varint,33,opt,name=date_error,json=dateError,proto3,enum=google.ads.googleads.v1.errors.DateErrorEnum_DateError,oneof"` +} + +type ErrorCode_DateRangeError struct { + DateRangeError DateRangeErrorEnum_DateRangeError `protobuf:"varint,34,opt,name=date_range_error,json=dateRangeError,proto3,enum=google.ads.googleads.v1.errors.DateRangeErrorEnum_DateRangeError,oneof"` +} + +type ErrorCode_DistinctError struct { + DistinctError DistinctErrorEnum_DistinctError `protobuf:"varint,35,opt,name=distinct_error,json=distinctError,proto3,enum=google.ads.googleads.v1.errors.DistinctErrorEnum_DistinctError,oneof"` +} + +type ErrorCode_FeedAttributeReferenceError struct { + FeedAttributeReferenceError FeedAttributeReferenceErrorEnum_FeedAttributeReferenceError `protobuf:"varint,36,opt,name=feed_attribute_reference_error,json=feedAttributeReferenceError,proto3,enum=google.ads.googleads.v1.errors.FeedAttributeReferenceErrorEnum_FeedAttributeReferenceError,oneof"` +} + +type ErrorCode_FunctionError struct { + FunctionError FunctionErrorEnum_FunctionError `protobuf:"varint,37,opt,name=function_error,json=functionError,proto3,enum=google.ads.googleads.v1.errors.FunctionErrorEnum_FunctionError,oneof"` +} + +type ErrorCode_FunctionParsingError struct { + FunctionParsingError FunctionParsingErrorEnum_FunctionParsingError `protobuf:"varint,38,opt,name=function_parsing_error,json=functionParsingError,proto3,enum=google.ads.googleads.v1.errors.FunctionParsingErrorEnum_FunctionParsingError,oneof"` +} + +type ErrorCode_IdError struct { + IdError IdErrorEnum_IdError `protobuf:"varint,39,opt,name=id_error,json=idError,proto3,enum=google.ads.googleads.v1.errors.IdErrorEnum_IdError,oneof"` +} + +type ErrorCode_ImageError struct { + ImageError ImageErrorEnum_ImageError `protobuf:"varint,40,opt,name=image_error,json=imageError,proto3,enum=google.ads.googleads.v1.errors.ImageErrorEnum_ImageError,oneof"` +} + +type ErrorCode_LanguageCodeError struct { + LanguageCodeError LanguageCodeErrorEnum_LanguageCodeError `protobuf:"varint,110,opt,name=language_code_error,json=languageCodeError,proto3,enum=google.ads.googleads.v1.errors.LanguageCodeErrorEnum_LanguageCodeError,oneof"` +} + +type ErrorCode_MediaBundleError struct { + MediaBundleError MediaBundleErrorEnum_MediaBundleError `protobuf:"varint,42,opt,name=media_bundle_error,json=mediaBundleError,proto3,enum=google.ads.googleads.v1.errors.MediaBundleErrorEnum_MediaBundleError,oneof"` +} + +type ErrorCode_MediaUploadError struct { + MediaUploadError MediaUploadErrorEnum_MediaUploadError `protobuf:"varint,116,opt,name=media_upload_error,json=mediaUploadError,proto3,enum=google.ads.googleads.v1.errors.MediaUploadErrorEnum_MediaUploadError,oneof"` +} + +type ErrorCode_MediaFileError struct { + MediaFileError MediaFileErrorEnum_MediaFileError `protobuf:"varint,86,opt,name=media_file_error,json=mediaFileError,proto3,enum=google.ads.googleads.v1.errors.MediaFileErrorEnum_MediaFileError,oneof"` +} + +type ErrorCode_MultiplierError struct { + MultiplierError MultiplierErrorEnum_MultiplierError `protobuf:"varint,44,opt,name=multiplier_error,json=multiplierError,proto3,enum=google.ads.googleads.v1.errors.MultiplierErrorEnum_MultiplierError,oneof"` +} + +type ErrorCode_NewResourceCreationError struct { + NewResourceCreationError NewResourceCreationErrorEnum_NewResourceCreationError `protobuf:"varint,45,opt,name=new_resource_creation_error,json=newResourceCreationError,proto3,enum=google.ads.googleads.v1.errors.NewResourceCreationErrorEnum_NewResourceCreationError,oneof"` +} + +type ErrorCode_NotEmptyError struct { + NotEmptyError NotEmptyErrorEnum_NotEmptyError `protobuf:"varint,46,opt,name=not_empty_error,json=notEmptyError,proto3,enum=google.ads.googleads.v1.errors.NotEmptyErrorEnum_NotEmptyError,oneof"` +} + +type ErrorCode_NullError struct { + NullError NullErrorEnum_NullError `protobuf:"varint,47,opt,name=null_error,json=nullError,proto3,enum=google.ads.googleads.v1.errors.NullErrorEnum_NullError,oneof"` +} + +type ErrorCode_OperatorError struct { + OperatorError OperatorErrorEnum_OperatorError `protobuf:"varint,48,opt,name=operator_error,json=operatorError,proto3,enum=google.ads.googleads.v1.errors.OperatorErrorEnum_OperatorError,oneof"` +} + +type ErrorCode_RangeError struct { + RangeError RangeErrorEnum_RangeError `protobuf:"varint,49,opt,name=range_error,json=rangeError,proto3,enum=google.ads.googleads.v1.errors.RangeErrorEnum_RangeError,oneof"` +} + +type ErrorCode_RecommendationError struct { + RecommendationError RecommendationErrorEnum_RecommendationError `protobuf:"varint,58,opt,name=recommendation_error,json=recommendationError,proto3,enum=google.ads.googleads.v1.errors.RecommendationErrorEnum_RecommendationError,oneof"` +} + +type ErrorCode_RegionCodeError struct { + RegionCodeError RegionCodeErrorEnum_RegionCodeError `protobuf:"varint,51,opt,name=region_code_error,json=regionCodeError,proto3,enum=google.ads.googleads.v1.errors.RegionCodeErrorEnum_RegionCodeError,oneof"` +} + +type ErrorCode_SettingError struct { + SettingError SettingErrorEnum_SettingError `protobuf:"varint,52,opt,name=setting_error,json=settingError,proto3,enum=google.ads.googleads.v1.errors.SettingErrorEnum_SettingError,oneof"` +} + +type ErrorCode_StringFormatError struct { + StringFormatError StringFormatErrorEnum_StringFormatError `protobuf:"varint,53,opt,name=string_format_error,json=stringFormatError,proto3,enum=google.ads.googleads.v1.errors.StringFormatErrorEnum_StringFormatError,oneof"` +} + +type ErrorCode_StringLengthError struct { + StringLengthError StringLengthErrorEnum_StringLengthError `protobuf:"varint,54,opt,name=string_length_error,json=stringLengthError,proto3,enum=google.ads.googleads.v1.errors.StringLengthErrorEnum_StringLengthError,oneof"` +} + +type ErrorCode_OperationAccessDeniedError struct { + OperationAccessDeniedError OperationAccessDeniedErrorEnum_OperationAccessDeniedError `protobuf:"varint,55,opt,name=operation_access_denied_error,json=operationAccessDeniedError,proto3,enum=google.ads.googleads.v1.errors.OperationAccessDeniedErrorEnum_OperationAccessDeniedError,oneof"` +} + +type ErrorCode_ResourceAccessDeniedError struct { + ResourceAccessDeniedError ResourceAccessDeniedErrorEnum_ResourceAccessDeniedError `protobuf:"varint,56,opt,name=resource_access_denied_error,json=resourceAccessDeniedError,proto3,enum=google.ads.googleads.v1.errors.ResourceAccessDeniedErrorEnum_ResourceAccessDeniedError,oneof"` +} + +type ErrorCode_ResourceCountLimitExceededError struct { + ResourceCountLimitExceededError ResourceCountLimitExceededErrorEnum_ResourceCountLimitExceededError `protobuf:"varint,57,opt,name=resource_count_limit_exceeded_error,json=resourceCountLimitExceededError,proto3,enum=google.ads.googleads.v1.errors.ResourceCountLimitExceededErrorEnum_ResourceCountLimitExceededError,oneof"` +} + +type ErrorCode_YoutubeVideoRegistrationError struct { + YoutubeVideoRegistrationError YoutubeVideoRegistrationErrorEnum_YoutubeVideoRegistrationError `protobuf:"varint,117,opt,name=youtube_video_registration_error,json=youtubeVideoRegistrationError,proto3,enum=google.ads.googleads.v1.errors.YoutubeVideoRegistrationErrorEnum_YoutubeVideoRegistrationError,oneof"` +} + +type ErrorCode_AdGroupBidModifierError struct { + AdGroupBidModifierError AdGroupBidModifierErrorEnum_AdGroupBidModifierError `protobuf:"varint,59,opt,name=ad_group_bid_modifier_error,json=adGroupBidModifierError,proto3,enum=google.ads.googleads.v1.errors.AdGroupBidModifierErrorEnum_AdGroupBidModifierError,oneof"` +} + +type ErrorCode_ContextError struct { + ContextError ContextErrorEnum_ContextError `protobuf:"varint,60,opt,name=context_error,json=contextError,proto3,enum=google.ads.googleads.v1.errors.ContextErrorEnum_ContextError,oneof"` +} + +type ErrorCode_FieldError struct { + FieldError FieldErrorEnum_FieldError `protobuf:"varint,61,opt,name=field_error,json=fieldError,proto3,enum=google.ads.googleads.v1.errors.FieldErrorEnum_FieldError,oneof"` +} + +type ErrorCode_SharedSetError struct { + SharedSetError SharedSetErrorEnum_SharedSetError `protobuf:"varint,62,opt,name=shared_set_error,json=sharedSetError,proto3,enum=google.ads.googleads.v1.errors.SharedSetErrorEnum_SharedSetError,oneof"` +} + +type ErrorCode_SharedCriterionError struct { + SharedCriterionError SharedCriterionErrorEnum_SharedCriterionError `protobuf:"varint,63,opt,name=shared_criterion_error,json=sharedCriterionError,proto3,enum=google.ads.googleads.v1.errors.SharedCriterionErrorEnum_SharedCriterionError,oneof"` +} + +type ErrorCode_CampaignSharedSetError struct { + CampaignSharedSetError CampaignSharedSetErrorEnum_CampaignSharedSetError `protobuf:"varint,64,opt,name=campaign_shared_set_error,json=campaignSharedSetError,proto3,enum=google.ads.googleads.v1.errors.CampaignSharedSetErrorEnum_CampaignSharedSetError,oneof"` +} + +type ErrorCode_ConversionActionError struct { + ConversionActionError ConversionActionErrorEnum_ConversionActionError `protobuf:"varint,65,opt,name=conversion_action_error,json=conversionActionError,proto3,enum=google.ads.googleads.v1.errors.ConversionActionErrorEnum_ConversionActionError,oneof"` +} + +type ErrorCode_ConversionAdjustmentUploadError struct { + ConversionAdjustmentUploadError ConversionAdjustmentUploadErrorEnum_ConversionAdjustmentUploadError `protobuf:"varint,115,opt,name=conversion_adjustment_upload_error,json=conversionAdjustmentUploadError,proto3,enum=google.ads.googleads.v1.errors.ConversionAdjustmentUploadErrorEnum_ConversionAdjustmentUploadError,oneof"` +} + +type ErrorCode_ConversionUploadError struct { + ConversionUploadError ConversionUploadErrorEnum_ConversionUploadError `protobuf:"varint,111,opt,name=conversion_upload_error,json=conversionUploadError,proto3,enum=google.ads.googleads.v1.errors.ConversionUploadErrorEnum_ConversionUploadError,oneof"` +} + +type ErrorCode_HeaderError struct { + HeaderError HeaderErrorEnum_HeaderError `protobuf:"varint,66,opt,name=header_error,json=headerError,proto3,enum=google.ads.googleads.v1.errors.HeaderErrorEnum_HeaderError,oneof"` +} + +type ErrorCode_DatabaseError struct { + DatabaseError DatabaseErrorEnum_DatabaseError `protobuf:"varint,67,opt,name=database_error,json=databaseError,proto3,enum=google.ads.googleads.v1.errors.DatabaseErrorEnum_DatabaseError,oneof"` +} + +type ErrorCode_PolicyFindingError struct { + PolicyFindingError PolicyFindingErrorEnum_PolicyFindingError `protobuf:"varint,68,opt,name=policy_finding_error,json=policyFindingError,proto3,enum=google.ads.googleads.v1.errors.PolicyFindingErrorEnum_PolicyFindingError,oneof"` +} + +type ErrorCode_EnumError struct { + EnumError EnumErrorEnum_EnumError `protobuf:"varint,70,opt,name=enum_error,json=enumError,proto3,enum=google.ads.googleads.v1.errors.EnumErrorEnum_EnumError,oneof"` +} + +type ErrorCode_KeywordPlanError struct { + KeywordPlanError KeywordPlanErrorEnum_KeywordPlanError `protobuf:"varint,71,opt,name=keyword_plan_error,json=keywordPlanError,proto3,enum=google.ads.googleads.v1.errors.KeywordPlanErrorEnum_KeywordPlanError,oneof"` +} + +type ErrorCode_KeywordPlanCampaignError struct { + KeywordPlanCampaignError KeywordPlanCampaignErrorEnum_KeywordPlanCampaignError `protobuf:"varint,72,opt,name=keyword_plan_campaign_error,json=keywordPlanCampaignError,proto3,enum=google.ads.googleads.v1.errors.KeywordPlanCampaignErrorEnum_KeywordPlanCampaignError,oneof"` +} + +type ErrorCode_KeywordPlanNegativeKeywordError struct { + KeywordPlanNegativeKeywordError KeywordPlanNegativeKeywordErrorEnum_KeywordPlanNegativeKeywordError `protobuf:"varint,73,opt,name=keyword_plan_negative_keyword_error,json=keywordPlanNegativeKeywordError,proto3,enum=google.ads.googleads.v1.errors.KeywordPlanNegativeKeywordErrorEnum_KeywordPlanNegativeKeywordError,oneof"` +} + +type ErrorCode_KeywordPlanAdGroupError struct { + KeywordPlanAdGroupError KeywordPlanAdGroupErrorEnum_KeywordPlanAdGroupError `protobuf:"varint,74,opt,name=keyword_plan_ad_group_error,json=keywordPlanAdGroupError,proto3,enum=google.ads.googleads.v1.errors.KeywordPlanAdGroupErrorEnum_KeywordPlanAdGroupError,oneof"` +} + +type ErrorCode_KeywordPlanKeywordError struct { + KeywordPlanKeywordError KeywordPlanKeywordErrorEnum_KeywordPlanKeywordError `protobuf:"varint,75,opt,name=keyword_plan_keyword_error,json=keywordPlanKeywordError,proto3,enum=google.ads.googleads.v1.errors.KeywordPlanKeywordErrorEnum_KeywordPlanKeywordError,oneof"` +} + +type ErrorCode_KeywordPlanIdeaError struct { + KeywordPlanIdeaError KeywordPlanIdeaErrorEnum_KeywordPlanIdeaError `protobuf:"varint,76,opt,name=keyword_plan_idea_error,json=keywordPlanIdeaError,proto3,enum=google.ads.googleads.v1.errors.KeywordPlanIdeaErrorEnum_KeywordPlanIdeaError,oneof"` +} + +type ErrorCode_AccountBudgetProposalError struct { + AccountBudgetProposalError AccountBudgetProposalErrorEnum_AccountBudgetProposalError `protobuf:"varint,77,opt,name=account_budget_proposal_error,json=accountBudgetProposalError,proto3,enum=google.ads.googleads.v1.errors.AccountBudgetProposalErrorEnum_AccountBudgetProposalError,oneof"` +} + +type ErrorCode_UserListError struct { + UserListError UserListErrorEnum_UserListError `protobuf:"varint,78,opt,name=user_list_error,json=userListError,proto3,enum=google.ads.googleads.v1.errors.UserListErrorEnum_UserListError,oneof"` +} + +type ErrorCode_ChangeStatusError struct { + ChangeStatusError ChangeStatusErrorEnum_ChangeStatusError `protobuf:"varint,79,opt,name=change_status_error,json=changeStatusError,proto3,enum=google.ads.googleads.v1.errors.ChangeStatusErrorEnum_ChangeStatusError,oneof"` +} + +type ErrorCode_FeedError struct { + FeedError FeedErrorEnum_FeedError `protobuf:"varint,80,opt,name=feed_error,json=feedError,proto3,enum=google.ads.googleads.v1.errors.FeedErrorEnum_FeedError,oneof"` +} + +type ErrorCode_GeoTargetConstantSuggestionError struct { + GeoTargetConstantSuggestionError GeoTargetConstantSuggestionErrorEnum_GeoTargetConstantSuggestionError `protobuf:"varint,81,opt,name=geo_target_constant_suggestion_error,json=geoTargetConstantSuggestionError,proto3,enum=google.ads.googleads.v1.errors.GeoTargetConstantSuggestionErrorEnum_GeoTargetConstantSuggestionError,oneof"` +} + +type ErrorCode_FeedItemError struct { + FeedItemError FeedItemErrorEnum_FeedItemError `protobuf:"varint,83,opt,name=feed_item_error,json=feedItemError,proto3,enum=google.ads.googleads.v1.errors.FeedItemErrorEnum_FeedItemError,oneof"` +} + +type ErrorCode_LabelError struct { + LabelError LabelErrorEnum_LabelError `protobuf:"varint,84,opt,name=label_error,json=labelError,proto3,enum=google.ads.googleads.v1.errors.LabelErrorEnum_LabelError,oneof"` +} + +type ErrorCode_BillingSetupError struct { + BillingSetupError BillingSetupErrorEnum_BillingSetupError `protobuf:"varint,87,opt,name=billing_setup_error,json=billingSetupError,proto3,enum=google.ads.googleads.v1.errors.BillingSetupErrorEnum_BillingSetupError,oneof"` +} + +type ErrorCode_CustomerClientLinkError struct { + CustomerClientLinkError CustomerClientLinkErrorEnum_CustomerClientLinkError `protobuf:"varint,88,opt,name=customer_client_link_error,json=customerClientLinkError,proto3,enum=google.ads.googleads.v1.errors.CustomerClientLinkErrorEnum_CustomerClientLinkError,oneof"` +} + +type ErrorCode_CustomerManagerLinkError struct { + CustomerManagerLinkError CustomerManagerLinkErrorEnum_CustomerManagerLinkError `protobuf:"varint,91,opt,name=customer_manager_link_error,json=customerManagerLinkError,proto3,enum=google.ads.googleads.v1.errors.CustomerManagerLinkErrorEnum_CustomerManagerLinkError,oneof"` +} + +type ErrorCode_FeedMappingError struct { + FeedMappingError FeedMappingErrorEnum_FeedMappingError `protobuf:"varint,92,opt,name=feed_mapping_error,json=feedMappingError,proto3,enum=google.ads.googleads.v1.errors.FeedMappingErrorEnum_FeedMappingError,oneof"` +} + +type ErrorCode_CustomerFeedError struct { + CustomerFeedError CustomerFeedErrorEnum_CustomerFeedError `protobuf:"varint,93,opt,name=customer_feed_error,json=customerFeedError,proto3,enum=google.ads.googleads.v1.errors.CustomerFeedErrorEnum_CustomerFeedError,oneof"` +} + +type ErrorCode_AdGroupFeedError struct { + AdGroupFeedError AdGroupFeedErrorEnum_AdGroupFeedError `protobuf:"varint,94,opt,name=ad_group_feed_error,json=adGroupFeedError,proto3,enum=google.ads.googleads.v1.errors.AdGroupFeedErrorEnum_AdGroupFeedError,oneof"` +} + +type ErrorCode_CampaignFeedError struct { + CampaignFeedError CampaignFeedErrorEnum_CampaignFeedError `protobuf:"varint,96,opt,name=campaign_feed_error,json=campaignFeedError,proto3,enum=google.ads.googleads.v1.errors.CampaignFeedErrorEnum_CampaignFeedError,oneof"` +} + +type ErrorCode_CustomInterestError struct { + CustomInterestError CustomInterestErrorEnum_CustomInterestError `protobuf:"varint,97,opt,name=custom_interest_error,json=customInterestError,proto3,enum=google.ads.googleads.v1.errors.CustomInterestErrorEnum_CustomInterestError,oneof"` +} + +type ErrorCode_ExtensionFeedItemError struct { + ExtensionFeedItemError ExtensionFeedItemErrorEnum_ExtensionFeedItemError `protobuf:"varint,100,opt,name=extension_feed_item_error,json=extensionFeedItemError,proto3,enum=google.ads.googleads.v1.errors.ExtensionFeedItemErrorEnum_ExtensionFeedItemError,oneof"` +} + +type ErrorCode_AdParameterError struct { + AdParameterError AdParameterErrorEnum_AdParameterError `protobuf:"varint,101,opt,name=ad_parameter_error,json=adParameterError,proto3,enum=google.ads.googleads.v1.errors.AdParameterErrorEnum_AdParameterError,oneof"` +} + +type ErrorCode_FeedItemValidationError struct { + FeedItemValidationError FeedItemValidationErrorEnum_FeedItemValidationError `protobuf:"varint,102,opt,name=feed_item_validation_error,json=feedItemValidationError,proto3,enum=google.ads.googleads.v1.errors.FeedItemValidationErrorEnum_FeedItemValidationError,oneof"` +} + +type ErrorCode_ExtensionSettingError struct { + ExtensionSettingError ExtensionSettingErrorEnum_ExtensionSettingError `protobuf:"varint,103,opt,name=extension_setting_error,json=extensionSettingError,proto3,enum=google.ads.googleads.v1.errors.ExtensionSettingErrorEnum_ExtensionSettingError,oneof"` +} + +type ErrorCode_FeedItemTargetError struct { + FeedItemTargetError FeedItemTargetErrorEnum_FeedItemTargetError `protobuf:"varint,104,opt,name=feed_item_target_error,json=feedItemTargetError,proto3,enum=google.ads.googleads.v1.errors.FeedItemTargetErrorEnum_FeedItemTargetError,oneof"` +} + +type ErrorCode_PolicyViolationError struct { + PolicyViolationError PolicyViolationErrorEnum_PolicyViolationError `protobuf:"varint,105,opt,name=policy_violation_error,json=policyViolationError,proto3,enum=google.ads.googleads.v1.errors.PolicyViolationErrorEnum_PolicyViolationError,oneof"` +} + +type ErrorCode_MutateJobError struct { + MutateJobError MutateJobErrorEnum_MutateJobError `protobuf:"varint,108,opt,name=mutate_job_error,json=mutateJobError,proto3,enum=google.ads.googleads.v1.errors.MutateJobErrorEnum_MutateJobError,oneof"` +} + +type ErrorCode_PartialFailureError struct { + PartialFailureError PartialFailureErrorEnum_PartialFailureError `protobuf:"varint,112,opt,name=partial_failure_error,json=partialFailureError,proto3,enum=google.ads.googleads.v1.errors.PartialFailureErrorEnum_PartialFailureError,oneof"` +} + +type ErrorCode_PolicyValidationParameterError struct { + PolicyValidationParameterError PolicyValidationParameterErrorEnum_PolicyValidationParameterError `protobuf:"varint,114,opt,name=policy_validation_parameter_error,json=policyValidationParameterError,proto3,enum=google.ads.googleads.v1.errors.PolicyValidationParameterErrorEnum_PolicyValidationParameterError,oneof"` +} + +type ErrorCode_SizeLimitError struct { + SizeLimitError SizeLimitErrorEnum_SizeLimitError `protobuf:"varint,118,opt,name=size_limit_error,json=sizeLimitError,proto3,enum=google.ads.googleads.v1.errors.SizeLimitErrorEnum_SizeLimitError,oneof"` +} + +func (*ErrorCode_RequestError) isErrorCode_ErrorCode() {} + +func (*ErrorCode_BiddingStrategyError) isErrorCode_ErrorCode() {} + +func (*ErrorCode_UrlFieldError) isErrorCode_ErrorCode() {} + +func (*ErrorCode_ListOperationError) isErrorCode_ErrorCode() {} + +func (*ErrorCode_QueryError) isErrorCode_ErrorCode() {} + +func (*ErrorCode_MutateError) isErrorCode_ErrorCode() {} + +func (*ErrorCode_FieldMaskError) isErrorCode_ErrorCode() {} + +func (*ErrorCode_AuthorizationError) isErrorCode_ErrorCode() {} + +func (*ErrorCode_InternalError) isErrorCode_ErrorCode() {} + +func (*ErrorCode_QuotaError) isErrorCode_ErrorCode() {} + +func (*ErrorCode_AdError) isErrorCode_ErrorCode() {} + +func (*ErrorCode_AdGroupError) isErrorCode_ErrorCode() {} + +func (*ErrorCode_CampaignBudgetError) isErrorCode_ErrorCode() {} + +func (*ErrorCode_CampaignError) isErrorCode_ErrorCode() {} + +func (*ErrorCode_AuthenticationError) isErrorCode_ErrorCode() {} + +func (*ErrorCode_AdGroupCriterionError) isErrorCode_ErrorCode() {} + +func (*ErrorCode_AdCustomizerError) isErrorCode_ErrorCode() {} + +func (*ErrorCode_AdGroupAdError) isErrorCode_ErrorCode() {} + +func (*ErrorCode_AdSharingError) isErrorCode_ErrorCode() {} + +func (*ErrorCode_AdxError) isErrorCode_ErrorCode() {} + +func (*ErrorCode_AssetError) isErrorCode_ErrorCode() {} + +func (*ErrorCode_BiddingError) isErrorCode_ErrorCode() {} + +func (*ErrorCode_CampaignCriterionError) isErrorCode_ErrorCode() {} + +func (*ErrorCode_CollectionSizeError) isErrorCode_ErrorCode() {} + +func (*ErrorCode_CountryCodeError) isErrorCode_ErrorCode() {} + +func (*ErrorCode_CriterionError) isErrorCode_ErrorCode() {} + +func (*ErrorCode_CustomerError) isErrorCode_ErrorCode() {} + +func (*ErrorCode_DateError) isErrorCode_ErrorCode() {} + +func (*ErrorCode_DateRangeError) isErrorCode_ErrorCode() {} + +func (*ErrorCode_DistinctError) isErrorCode_ErrorCode() {} + +func (*ErrorCode_FeedAttributeReferenceError) isErrorCode_ErrorCode() {} + +func (*ErrorCode_FunctionError) isErrorCode_ErrorCode() {} + +func (*ErrorCode_FunctionParsingError) isErrorCode_ErrorCode() {} + +func (*ErrorCode_IdError) isErrorCode_ErrorCode() {} + +func (*ErrorCode_ImageError) isErrorCode_ErrorCode() {} + +func (*ErrorCode_LanguageCodeError) isErrorCode_ErrorCode() {} + +func (*ErrorCode_MediaBundleError) isErrorCode_ErrorCode() {} + +func (*ErrorCode_MediaUploadError) isErrorCode_ErrorCode() {} + +func (*ErrorCode_MediaFileError) isErrorCode_ErrorCode() {} + +func (*ErrorCode_MultiplierError) isErrorCode_ErrorCode() {} + +func (*ErrorCode_NewResourceCreationError) isErrorCode_ErrorCode() {} + +func (*ErrorCode_NotEmptyError) isErrorCode_ErrorCode() {} + +func (*ErrorCode_NullError) isErrorCode_ErrorCode() {} + +func (*ErrorCode_OperatorError) isErrorCode_ErrorCode() {} + +func (*ErrorCode_RangeError) isErrorCode_ErrorCode() {} + +func (*ErrorCode_RecommendationError) isErrorCode_ErrorCode() {} + +func (*ErrorCode_RegionCodeError) isErrorCode_ErrorCode() {} + +func (*ErrorCode_SettingError) isErrorCode_ErrorCode() {} + +func (*ErrorCode_StringFormatError) isErrorCode_ErrorCode() {} + +func (*ErrorCode_StringLengthError) isErrorCode_ErrorCode() {} + +func (*ErrorCode_OperationAccessDeniedError) isErrorCode_ErrorCode() {} + +func (*ErrorCode_ResourceAccessDeniedError) isErrorCode_ErrorCode() {} + +func (*ErrorCode_ResourceCountLimitExceededError) isErrorCode_ErrorCode() {} + +func (*ErrorCode_YoutubeVideoRegistrationError) isErrorCode_ErrorCode() {} + +func (*ErrorCode_AdGroupBidModifierError) isErrorCode_ErrorCode() {} + +func (*ErrorCode_ContextError) isErrorCode_ErrorCode() {} + +func (*ErrorCode_FieldError) isErrorCode_ErrorCode() {} + +func (*ErrorCode_SharedSetError) isErrorCode_ErrorCode() {} + +func (*ErrorCode_SharedCriterionError) isErrorCode_ErrorCode() {} + +func (*ErrorCode_CampaignSharedSetError) isErrorCode_ErrorCode() {} + +func (*ErrorCode_ConversionActionError) isErrorCode_ErrorCode() {} + +func (*ErrorCode_ConversionAdjustmentUploadError) isErrorCode_ErrorCode() {} + +func (*ErrorCode_ConversionUploadError) isErrorCode_ErrorCode() {} + +func (*ErrorCode_HeaderError) isErrorCode_ErrorCode() {} + +func (*ErrorCode_DatabaseError) isErrorCode_ErrorCode() {} + +func (*ErrorCode_PolicyFindingError) isErrorCode_ErrorCode() {} + +func (*ErrorCode_EnumError) isErrorCode_ErrorCode() {} + +func (*ErrorCode_KeywordPlanError) isErrorCode_ErrorCode() {} + +func (*ErrorCode_KeywordPlanCampaignError) isErrorCode_ErrorCode() {} + +func (*ErrorCode_KeywordPlanNegativeKeywordError) isErrorCode_ErrorCode() {} + +func (*ErrorCode_KeywordPlanAdGroupError) isErrorCode_ErrorCode() {} + +func (*ErrorCode_KeywordPlanKeywordError) isErrorCode_ErrorCode() {} + +func (*ErrorCode_KeywordPlanIdeaError) isErrorCode_ErrorCode() {} + +func (*ErrorCode_AccountBudgetProposalError) isErrorCode_ErrorCode() {} + +func (*ErrorCode_UserListError) isErrorCode_ErrorCode() {} + +func (*ErrorCode_ChangeStatusError) isErrorCode_ErrorCode() {} + +func (*ErrorCode_FeedError) isErrorCode_ErrorCode() {} + +func (*ErrorCode_GeoTargetConstantSuggestionError) isErrorCode_ErrorCode() {} + +func (*ErrorCode_FeedItemError) isErrorCode_ErrorCode() {} + +func (*ErrorCode_LabelError) isErrorCode_ErrorCode() {} + +func (*ErrorCode_BillingSetupError) isErrorCode_ErrorCode() {} + +func (*ErrorCode_CustomerClientLinkError) isErrorCode_ErrorCode() {} + +func (*ErrorCode_CustomerManagerLinkError) isErrorCode_ErrorCode() {} + +func (*ErrorCode_FeedMappingError) isErrorCode_ErrorCode() {} + +func (*ErrorCode_CustomerFeedError) isErrorCode_ErrorCode() {} + +func (*ErrorCode_AdGroupFeedError) isErrorCode_ErrorCode() {} + +func (*ErrorCode_CampaignFeedError) isErrorCode_ErrorCode() {} + +func (*ErrorCode_CustomInterestError) isErrorCode_ErrorCode() {} + +func (*ErrorCode_ExtensionFeedItemError) isErrorCode_ErrorCode() {} + +func (*ErrorCode_AdParameterError) isErrorCode_ErrorCode() {} + +func (*ErrorCode_FeedItemValidationError) isErrorCode_ErrorCode() {} + +func (*ErrorCode_ExtensionSettingError) isErrorCode_ErrorCode() {} + +func (*ErrorCode_FeedItemTargetError) isErrorCode_ErrorCode() {} + +func (*ErrorCode_PolicyViolationError) isErrorCode_ErrorCode() {} + +func (*ErrorCode_MutateJobError) isErrorCode_ErrorCode() {} + +func (*ErrorCode_PartialFailureError) isErrorCode_ErrorCode() {} + +func (*ErrorCode_PolicyValidationParameterError) isErrorCode_ErrorCode() {} + +func (*ErrorCode_SizeLimitError) isErrorCode_ErrorCode() {} + +func (m *ErrorCode) GetErrorCode() isErrorCode_ErrorCode { + if m != nil { + return m.ErrorCode + } + return nil +} + +func (m *ErrorCode) GetRequestError() RequestErrorEnum_RequestError { + if x, ok := m.GetErrorCode().(*ErrorCode_RequestError); ok { + return x.RequestError + } + return RequestErrorEnum_UNSPECIFIED +} + +func (m *ErrorCode) GetBiddingStrategyError() BiddingStrategyErrorEnum_BiddingStrategyError { + if x, ok := m.GetErrorCode().(*ErrorCode_BiddingStrategyError); ok { + return x.BiddingStrategyError + } + return BiddingStrategyErrorEnum_UNSPECIFIED +} + +func (m *ErrorCode) GetUrlFieldError() UrlFieldErrorEnum_UrlFieldError { + if x, ok := m.GetErrorCode().(*ErrorCode_UrlFieldError); ok { + return x.UrlFieldError + } + return UrlFieldErrorEnum_UNSPECIFIED +} + +func (m *ErrorCode) GetListOperationError() ListOperationErrorEnum_ListOperationError { + if x, ok := m.GetErrorCode().(*ErrorCode_ListOperationError); ok { + return x.ListOperationError + } + return ListOperationErrorEnum_UNSPECIFIED +} + +func (m *ErrorCode) GetQueryError() QueryErrorEnum_QueryError { + if x, ok := m.GetErrorCode().(*ErrorCode_QueryError); ok { + return x.QueryError + } + return QueryErrorEnum_UNSPECIFIED +} + +func (m *ErrorCode) GetMutateError() MutateErrorEnum_MutateError { + if x, ok := m.GetErrorCode().(*ErrorCode_MutateError); ok { + return x.MutateError + } + return MutateErrorEnum_UNSPECIFIED +} + +func (m *ErrorCode) GetFieldMaskError() FieldMaskErrorEnum_FieldMaskError { + if x, ok := m.GetErrorCode().(*ErrorCode_FieldMaskError); ok { + return x.FieldMaskError + } + return FieldMaskErrorEnum_UNSPECIFIED +} + +func (m *ErrorCode) GetAuthorizationError() AuthorizationErrorEnum_AuthorizationError { + if x, ok := m.GetErrorCode().(*ErrorCode_AuthorizationError); ok { + return x.AuthorizationError + } + return AuthorizationErrorEnum_UNSPECIFIED +} + +func (m *ErrorCode) GetInternalError() InternalErrorEnum_InternalError { + if x, ok := m.GetErrorCode().(*ErrorCode_InternalError); ok { + return x.InternalError + } + return InternalErrorEnum_UNSPECIFIED +} + +func (m *ErrorCode) GetQuotaError() QuotaErrorEnum_QuotaError { + if x, ok := m.GetErrorCode().(*ErrorCode_QuotaError); ok { + return x.QuotaError + } + return QuotaErrorEnum_UNSPECIFIED +} + +func (m *ErrorCode) GetAdError() AdErrorEnum_AdError { + if x, ok := m.GetErrorCode().(*ErrorCode_AdError); ok { + return x.AdError + } + return AdErrorEnum_UNSPECIFIED +} + +func (m *ErrorCode) GetAdGroupError() AdGroupErrorEnum_AdGroupError { + if x, ok := m.GetErrorCode().(*ErrorCode_AdGroupError); ok { + return x.AdGroupError + } + return AdGroupErrorEnum_UNSPECIFIED +} + +func (m *ErrorCode) GetCampaignBudgetError() CampaignBudgetErrorEnum_CampaignBudgetError { + if x, ok := m.GetErrorCode().(*ErrorCode_CampaignBudgetError); ok { + return x.CampaignBudgetError + } + return CampaignBudgetErrorEnum_UNSPECIFIED +} + +func (m *ErrorCode) GetCampaignError() CampaignErrorEnum_CampaignError { + if x, ok := m.GetErrorCode().(*ErrorCode_CampaignError); ok { + return x.CampaignError + } + return CampaignErrorEnum_UNSPECIFIED +} + +func (m *ErrorCode) GetAuthenticationError() AuthenticationErrorEnum_AuthenticationError { + if x, ok := m.GetErrorCode().(*ErrorCode_AuthenticationError); ok { + return x.AuthenticationError + } + return AuthenticationErrorEnum_UNSPECIFIED +} + +func (m *ErrorCode) GetAdGroupCriterionError() AdGroupCriterionErrorEnum_AdGroupCriterionError { + if x, ok := m.GetErrorCode().(*ErrorCode_AdGroupCriterionError); ok { + return x.AdGroupCriterionError + } + return AdGroupCriterionErrorEnum_UNSPECIFIED +} + +func (m *ErrorCode) GetAdCustomizerError() AdCustomizerErrorEnum_AdCustomizerError { + if x, ok := m.GetErrorCode().(*ErrorCode_AdCustomizerError); ok { + return x.AdCustomizerError + } + return AdCustomizerErrorEnum_UNSPECIFIED +} + +func (m *ErrorCode) GetAdGroupAdError() AdGroupAdErrorEnum_AdGroupAdError { + if x, ok := m.GetErrorCode().(*ErrorCode_AdGroupAdError); ok { + return x.AdGroupAdError + } + return AdGroupAdErrorEnum_UNSPECIFIED +} + +func (m *ErrorCode) GetAdSharingError() AdSharingErrorEnum_AdSharingError { + if x, ok := m.GetErrorCode().(*ErrorCode_AdSharingError); ok { + return x.AdSharingError + } + return AdSharingErrorEnum_UNSPECIFIED +} + +func (m *ErrorCode) GetAdxError() AdxErrorEnum_AdxError { + if x, ok := m.GetErrorCode().(*ErrorCode_AdxError); ok { + return x.AdxError + } + return AdxErrorEnum_UNSPECIFIED +} + +func (m *ErrorCode) GetAssetError() AssetErrorEnum_AssetError { + if x, ok := m.GetErrorCode().(*ErrorCode_AssetError); ok { + return x.AssetError + } + return AssetErrorEnum_UNSPECIFIED +} + +func (m *ErrorCode) GetBiddingError() BiddingErrorEnum_BiddingError { + if x, ok := m.GetErrorCode().(*ErrorCode_BiddingError); ok { + return x.BiddingError + } + return BiddingErrorEnum_UNSPECIFIED +} + +func (m *ErrorCode) GetCampaignCriterionError() CampaignCriterionErrorEnum_CampaignCriterionError { + if x, ok := m.GetErrorCode().(*ErrorCode_CampaignCriterionError); ok { + return x.CampaignCriterionError + } + return CampaignCriterionErrorEnum_UNSPECIFIED +} + +func (m *ErrorCode) GetCollectionSizeError() CollectionSizeErrorEnum_CollectionSizeError { + if x, ok := m.GetErrorCode().(*ErrorCode_CollectionSizeError); ok { + return x.CollectionSizeError + } + return CollectionSizeErrorEnum_UNSPECIFIED +} + +func (m *ErrorCode) GetCountryCodeError() CountryCodeErrorEnum_CountryCodeError { + if x, ok := m.GetErrorCode().(*ErrorCode_CountryCodeError); ok { + return x.CountryCodeError + } + return CountryCodeErrorEnum_UNSPECIFIED +} + +func (m *ErrorCode) GetCriterionError() CriterionErrorEnum_CriterionError { + if x, ok := m.GetErrorCode().(*ErrorCode_CriterionError); ok { + return x.CriterionError + } + return CriterionErrorEnum_UNSPECIFIED +} + +func (m *ErrorCode) GetCustomerError() CustomerErrorEnum_CustomerError { + if x, ok := m.GetErrorCode().(*ErrorCode_CustomerError); ok { + return x.CustomerError + } + return CustomerErrorEnum_UNSPECIFIED +} + +func (m *ErrorCode) GetDateError() DateErrorEnum_DateError { + if x, ok := m.GetErrorCode().(*ErrorCode_DateError); ok { + return x.DateError + } + return DateErrorEnum_UNSPECIFIED +} + +func (m *ErrorCode) GetDateRangeError() DateRangeErrorEnum_DateRangeError { + if x, ok := m.GetErrorCode().(*ErrorCode_DateRangeError); ok { + return x.DateRangeError + } + return DateRangeErrorEnum_UNSPECIFIED +} + +func (m *ErrorCode) GetDistinctError() DistinctErrorEnum_DistinctError { + if x, ok := m.GetErrorCode().(*ErrorCode_DistinctError); ok { + return x.DistinctError + } + return DistinctErrorEnum_UNSPECIFIED +} + +func (m *ErrorCode) GetFeedAttributeReferenceError() FeedAttributeReferenceErrorEnum_FeedAttributeReferenceError { + if x, ok := m.GetErrorCode().(*ErrorCode_FeedAttributeReferenceError); ok { + return x.FeedAttributeReferenceError + } + return FeedAttributeReferenceErrorEnum_UNSPECIFIED +} + +func (m *ErrorCode) GetFunctionError() FunctionErrorEnum_FunctionError { + if x, ok := m.GetErrorCode().(*ErrorCode_FunctionError); ok { + return x.FunctionError + } + return FunctionErrorEnum_UNSPECIFIED +} + +func (m *ErrorCode) GetFunctionParsingError() FunctionParsingErrorEnum_FunctionParsingError { + if x, ok := m.GetErrorCode().(*ErrorCode_FunctionParsingError); ok { + return x.FunctionParsingError + } + return FunctionParsingErrorEnum_UNSPECIFIED +} + +func (m *ErrorCode) GetIdError() IdErrorEnum_IdError { + if x, ok := m.GetErrorCode().(*ErrorCode_IdError); ok { + return x.IdError + } + return IdErrorEnum_UNSPECIFIED +} + +func (m *ErrorCode) GetImageError() ImageErrorEnum_ImageError { + if x, ok := m.GetErrorCode().(*ErrorCode_ImageError); ok { + return x.ImageError + } + return ImageErrorEnum_UNSPECIFIED +} + +func (m *ErrorCode) GetLanguageCodeError() LanguageCodeErrorEnum_LanguageCodeError { + if x, ok := m.GetErrorCode().(*ErrorCode_LanguageCodeError); ok { + return x.LanguageCodeError + } + return LanguageCodeErrorEnum_UNSPECIFIED +} + +func (m *ErrorCode) GetMediaBundleError() MediaBundleErrorEnum_MediaBundleError { + if x, ok := m.GetErrorCode().(*ErrorCode_MediaBundleError); ok { + return x.MediaBundleError + } + return MediaBundleErrorEnum_UNSPECIFIED +} + +func (m *ErrorCode) GetMediaUploadError() MediaUploadErrorEnum_MediaUploadError { + if x, ok := m.GetErrorCode().(*ErrorCode_MediaUploadError); ok { + return x.MediaUploadError + } + return MediaUploadErrorEnum_UNSPECIFIED +} + +func (m *ErrorCode) GetMediaFileError() MediaFileErrorEnum_MediaFileError { + if x, ok := m.GetErrorCode().(*ErrorCode_MediaFileError); ok { + return x.MediaFileError + } + return MediaFileErrorEnum_UNSPECIFIED +} + +func (m *ErrorCode) GetMultiplierError() MultiplierErrorEnum_MultiplierError { + if x, ok := m.GetErrorCode().(*ErrorCode_MultiplierError); ok { + return x.MultiplierError + } + return MultiplierErrorEnum_UNSPECIFIED +} + +func (m *ErrorCode) GetNewResourceCreationError() NewResourceCreationErrorEnum_NewResourceCreationError { + if x, ok := m.GetErrorCode().(*ErrorCode_NewResourceCreationError); ok { + return x.NewResourceCreationError + } + return NewResourceCreationErrorEnum_UNSPECIFIED +} + +func (m *ErrorCode) GetNotEmptyError() NotEmptyErrorEnum_NotEmptyError { + if x, ok := m.GetErrorCode().(*ErrorCode_NotEmptyError); ok { + return x.NotEmptyError + } + return NotEmptyErrorEnum_UNSPECIFIED +} + +func (m *ErrorCode) GetNullError() NullErrorEnum_NullError { + if x, ok := m.GetErrorCode().(*ErrorCode_NullError); ok { + return x.NullError + } + return NullErrorEnum_UNSPECIFIED +} + +func (m *ErrorCode) GetOperatorError() OperatorErrorEnum_OperatorError { + if x, ok := m.GetErrorCode().(*ErrorCode_OperatorError); ok { + return x.OperatorError + } + return OperatorErrorEnum_UNSPECIFIED +} + +func (m *ErrorCode) GetRangeError() RangeErrorEnum_RangeError { + if x, ok := m.GetErrorCode().(*ErrorCode_RangeError); ok { + return x.RangeError + } + return RangeErrorEnum_UNSPECIFIED +} + +func (m *ErrorCode) GetRecommendationError() RecommendationErrorEnum_RecommendationError { + if x, ok := m.GetErrorCode().(*ErrorCode_RecommendationError); ok { + return x.RecommendationError + } + return RecommendationErrorEnum_UNSPECIFIED +} + +func (m *ErrorCode) GetRegionCodeError() RegionCodeErrorEnum_RegionCodeError { + if x, ok := m.GetErrorCode().(*ErrorCode_RegionCodeError); ok { + return x.RegionCodeError + } + return RegionCodeErrorEnum_UNSPECIFIED +} + +func (m *ErrorCode) GetSettingError() SettingErrorEnum_SettingError { + if x, ok := m.GetErrorCode().(*ErrorCode_SettingError); ok { + return x.SettingError + } + return SettingErrorEnum_UNSPECIFIED +} + +func (m *ErrorCode) GetStringFormatError() StringFormatErrorEnum_StringFormatError { + if x, ok := m.GetErrorCode().(*ErrorCode_StringFormatError); ok { + return x.StringFormatError + } + return StringFormatErrorEnum_UNSPECIFIED +} + +func (m *ErrorCode) GetStringLengthError() StringLengthErrorEnum_StringLengthError { + if x, ok := m.GetErrorCode().(*ErrorCode_StringLengthError); ok { + return x.StringLengthError + } + return StringLengthErrorEnum_UNSPECIFIED +} + +func (m *ErrorCode) GetOperationAccessDeniedError() OperationAccessDeniedErrorEnum_OperationAccessDeniedError { + if x, ok := m.GetErrorCode().(*ErrorCode_OperationAccessDeniedError); ok { + return x.OperationAccessDeniedError + } + return OperationAccessDeniedErrorEnum_UNSPECIFIED +} + +func (m *ErrorCode) GetResourceAccessDeniedError() ResourceAccessDeniedErrorEnum_ResourceAccessDeniedError { + if x, ok := m.GetErrorCode().(*ErrorCode_ResourceAccessDeniedError); ok { + return x.ResourceAccessDeniedError + } + return ResourceAccessDeniedErrorEnum_UNSPECIFIED +} + +func (m *ErrorCode) GetResourceCountLimitExceededError() ResourceCountLimitExceededErrorEnum_ResourceCountLimitExceededError { + if x, ok := m.GetErrorCode().(*ErrorCode_ResourceCountLimitExceededError); ok { + return x.ResourceCountLimitExceededError + } + return ResourceCountLimitExceededErrorEnum_UNSPECIFIED +} + +func (m *ErrorCode) GetYoutubeVideoRegistrationError() YoutubeVideoRegistrationErrorEnum_YoutubeVideoRegistrationError { + if x, ok := m.GetErrorCode().(*ErrorCode_YoutubeVideoRegistrationError); ok { + return x.YoutubeVideoRegistrationError + } + return YoutubeVideoRegistrationErrorEnum_UNSPECIFIED +} + +func (m *ErrorCode) GetAdGroupBidModifierError() AdGroupBidModifierErrorEnum_AdGroupBidModifierError { + if x, ok := m.GetErrorCode().(*ErrorCode_AdGroupBidModifierError); ok { + return x.AdGroupBidModifierError + } + return AdGroupBidModifierErrorEnum_UNSPECIFIED +} + +func (m *ErrorCode) GetContextError() ContextErrorEnum_ContextError { + if x, ok := m.GetErrorCode().(*ErrorCode_ContextError); ok { + return x.ContextError + } + return ContextErrorEnum_UNSPECIFIED +} + +func (m *ErrorCode) GetFieldError() FieldErrorEnum_FieldError { + if x, ok := m.GetErrorCode().(*ErrorCode_FieldError); ok { + return x.FieldError + } + return FieldErrorEnum_UNSPECIFIED +} + +func (m *ErrorCode) GetSharedSetError() SharedSetErrorEnum_SharedSetError { + if x, ok := m.GetErrorCode().(*ErrorCode_SharedSetError); ok { + return x.SharedSetError + } + return SharedSetErrorEnum_UNSPECIFIED +} + +func (m *ErrorCode) GetSharedCriterionError() SharedCriterionErrorEnum_SharedCriterionError { + if x, ok := m.GetErrorCode().(*ErrorCode_SharedCriterionError); ok { + return x.SharedCriterionError + } + return SharedCriterionErrorEnum_UNSPECIFIED +} + +func (m *ErrorCode) GetCampaignSharedSetError() CampaignSharedSetErrorEnum_CampaignSharedSetError { + if x, ok := m.GetErrorCode().(*ErrorCode_CampaignSharedSetError); ok { + return x.CampaignSharedSetError + } + return CampaignSharedSetErrorEnum_UNSPECIFIED +} + +func (m *ErrorCode) GetConversionActionError() ConversionActionErrorEnum_ConversionActionError { + if x, ok := m.GetErrorCode().(*ErrorCode_ConversionActionError); ok { + return x.ConversionActionError + } + return ConversionActionErrorEnum_UNSPECIFIED +} + +func (m *ErrorCode) GetConversionAdjustmentUploadError() ConversionAdjustmentUploadErrorEnum_ConversionAdjustmentUploadError { + if x, ok := m.GetErrorCode().(*ErrorCode_ConversionAdjustmentUploadError); ok { + return x.ConversionAdjustmentUploadError + } + return ConversionAdjustmentUploadErrorEnum_UNSPECIFIED +} + +func (m *ErrorCode) GetConversionUploadError() ConversionUploadErrorEnum_ConversionUploadError { + if x, ok := m.GetErrorCode().(*ErrorCode_ConversionUploadError); ok { + return x.ConversionUploadError + } + return ConversionUploadErrorEnum_UNSPECIFIED +} + +func (m *ErrorCode) GetHeaderError() HeaderErrorEnum_HeaderError { + if x, ok := m.GetErrorCode().(*ErrorCode_HeaderError); ok { + return x.HeaderError + } + return HeaderErrorEnum_UNSPECIFIED +} + +func (m *ErrorCode) GetDatabaseError() DatabaseErrorEnum_DatabaseError { + if x, ok := m.GetErrorCode().(*ErrorCode_DatabaseError); ok { + return x.DatabaseError + } + return DatabaseErrorEnum_UNSPECIFIED +} + +func (m *ErrorCode) GetPolicyFindingError() PolicyFindingErrorEnum_PolicyFindingError { + if x, ok := m.GetErrorCode().(*ErrorCode_PolicyFindingError); ok { + return x.PolicyFindingError + } + return PolicyFindingErrorEnum_UNSPECIFIED +} + +func (m *ErrorCode) GetEnumError() EnumErrorEnum_EnumError { + if x, ok := m.GetErrorCode().(*ErrorCode_EnumError); ok { + return x.EnumError + } + return EnumErrorEnum_UNSPECIFIED +} + +func (m *ErrorCode) GetKeywordPlanError() KeywordPlanErrorEnum_KeywordPlanError { + if x, ok := m.GetErrorCode().(*ErrorCode_KeywordPlanError); ok { + return x.KeywordPlanError + } + return KeywordPlanErrorEnum_UNSPECIFIED +} + +func (m *ErrorCode) GetKeywordPlanCampaignError() KeywordPlanCampaignErrorEnum_KeywordPlanCampaignError { + if x, ok := m.GetErrorCode().(*ErrorCode_KeywordPlanCampaignError); ok { + return x.KeywordPlanCampaignError + } + return KeywordPlanCampaignErrorEnum_UNSPECIFIED +} + +func (m *ErrorCode) GetKeywordPlanNegativeKeywordError() KeywordPlanNegativeKeywordErrorEnum_KeywordPlanNegativeKeywordError { + if x, ok := m.GetErrorCode().(*ErrorCode_KeywordPlanNegativeKeywordError); ok { + return x.KeywordPlanNegativeKeywordError + } + return KeywordPlanNegativeKeywordErrorEnum_UNSPECIFIED +} + +func (m *ErrorCode) GetKeywordPlanAdGroupError() KeywordPlanAdGroupErrorEnum_KeywordPlanAdGroupError { + if x, ok := m.GetErrorCode().(*ErrorCode_KeywordPlanAdGroupError); ok { + return x.KeywordPlanAdGroupError + } + return KeywordPlanAdGroupErrorEnum_UNSPECIFIED +} + +func (m *ErrorCode) GetKeywordPlanKeywordError() KeywordPlanKeywordErrorEnum_KeywordPlanKeywordError { + if x, ok := m.GetErrorCode().(*ErrorCode_KeywordPlanKeywordError); ok { + return x.KeywordPlanKeywordError + } + return KeywordPlanKeywordErrorEnum_UNSPECIFIED +} + +func (m *ErrorCode) GetKeywordPlanIdeaError() KeywordPlanIdeaErrorEnum_KeywordPlanIdeaError { + if x, ok := m.GetErrorCode().(*ErrorCode_KeywordPlanIdeaError); ok { + return x.KeywordPlanIdeaError + } + return KeywordPlanIdeaErrorEnum_UNSPECIFIED +} + +func (m *ErrorCode) GetAccountBudgetProposalError() AccountBudgetProposalErrorEnum_AccountBudgetProposalError { + if x, ok := m.GetErrorCode().(*ErrorCode_AccountBudgetProposalError); ok { + return x.AccountBudgetProposalError + } + return AccountBudgetProposalErrorEnum_UNSPECIFIED +} + +func (m *ErrorCode) GetUserListError() UserListErrorEnum_UserListError { + if x, ok := m.GetErrorCode().(*ErrorCode_UserListError); ok { + return x.UserListError + } + return UserListErrorEnum_UNSPECIFIED +} + +func (m *ErrorCode) GetChangeStatusError() ChangeStatusErrorEnum_ChangeStatusError { + if x, ok := m.GetErrorCode().(*ErrorCode_ChangeStatusError); ok { + return x.ChangeStatusError + } + return ChangeStatusErrorEnum_UNSPECIFIED +} + +func (m *ErrorCode) GetFeedError() FeedErrorEnum_FeedError { + if x, ok := m.GetErrorCode().(*ErrorCode_FeedError); ok { + return x.FeedError + } + return FeedErrorEnum_UNSPECIFIED +} + +func (m *ErrorCode) GetGeoTargetConstantSuggestionError() GeoTargetConstantSuggestionErrorEnum_GeoTargetConstantSuggestionError { + if x, ok := m.GetErrorCode().(*ErrorCode_GeoTargetConstantSuggestionError); ok { + return x.GeoTargetConstantSuggestionError + } + return GeoTargetConstantSuggestionErrorEnum_UNSPECIFIED +} + +func (m *ErrorCode) GetFeedItemError() FeedItemErrorEnum_FeedItemError { + if x, ok := m.GetErrorCode().(*ErrorCode_FeedItemError); ok { + return x.FeedItemError + } + return FeedItemErrorEnum_UNSPECIFIED +} + +func (m *ErrorCode) GetLabelError() LabelErrorEnum_LabelError { + if x, ok := m.GetErrorCode().(*ErrorCode_LabelError); ok { + return x.LabelError + } + return LabelErrorEnum_UNSPECIFIED +} + +func (m *ErrorCode) GetBillingSetupError() BillingSetupErrorEnum_BillingSetupError { + if x, ok := m.GetErrorCode().(*ErrorCode_BillingSetupError); ok { + return x.BillingSetupError + } + return BillingSetupErrorEnum_UNSPECIFIED +} + +func (m *ErrorCode) GetCustomerClientLinkError() CustomerClientLinkErrorEnum_CustomerClientLinkError { + if x, ok := m.GetErrorCode().(*ErrorCode_CustomerClientLinkError); ok { + return x.CustomerClientLinkError + } + return CustomerClientLinkErrorEnum_UNSPECIFIED +} + +func (m *ErrorCode) GetCustomerManagerLinkError() CustomerManagerLinkErrorEnum_CustomerManagerLinkError { + if x, ok := m.GetErrorCode().(*ErrorCode_CustomerManagerLinkError); ok { + return x.CustomerManagerLinkError + } + return CustomerManagerLinkErrorEnum_UNSPECIFIED +} + +func (m *ErrorCode) GetFeedMappingError() FeedMappingErrorEnum_FeedMappingError { + if x, ok := m.GetErrorCode().(*ErrorCode_FeedMappingError); ok { + return x.FeedMappingError + } + return FeedMappingErrorEnum_UNSPECIFIED +} + +func (m *ErrorCode) GetCustomerFeedError() CustomerFeedErrorEnum_CustomerFeedError { + if x, ok := m.GetErrorCode().(*ErrorCode_CustomerFeedError); ok { + return x.CustomerFeedError + } + return CustomerFeedErrorEnum_UNSPECIFIED +} + +func (m *ErrorCode) GetAdGroupFeedError() AdGroupFeedErrorEnum_AdGroupFeedError { + if x, ok := m.GetErrorCode().(*ErrorCode_AdGroupFeedError); ok { + return x.AdGroupFeedError + } + return AdGroupFeedErrorEnum_UNSPECIFIED +} + +func (m *ErrorCode) GetCampaignFeedError() CampaignFeedErrorEnum_CampaignFeedError { + if x, ok := m.GetErrorCode().(*ErrorCode_CampaignFeedError); ok { + return x.CampaignFeedError + } + return CampaignFeedErrorEnum_UNSPECIFIED +} + +func (m *ErrorCode) GetCustomInterestError() CustomInterestErrorEnum_CustomInterestError { + if x, ok := m.GetErrorCode().(*ErrorCode_CustomInterestError); ok { + return x.CustomInterestError + } + return CustomInterestErrorEnum_UNSPECIFIED +} + +func (m *ErrorCode) GetExtensionFeedItemError() ExtensionFeedItemErrorEnum_ExtensionFeedItemError { + if x, ok := m.GetErrorCode().(*ErrorCode_ExtensionFeedItemError); ok { + return x.ExtensionFeedItemError + } + return ExtensionFeedItemErrorEnum_UNSPECIFIED +} + +func (m *ErrorCode) GetAdParameterError() AdParameterErrorEnum_AdParameterError { + if x, ok := m.GetErrorCode().(*ErrorCode_AdParameterError); ok { + return x.AdParameterError + } + return AdParameterErrorEnum_UNSPECIFIED +} + +func (m *ErrorCode) GetFeedItemValidationError() FeedItemValidationErrorEnum_FeedItemValidationError { + if x, ok := m.GetErrorCode().(*ErrorCode_FeedItemValidationError); ok { + return x.FeedItemValidationError + } + return FeedItemValidationErrorEnum_UNSPECIFIED +} + +func (m *ErrorCode) GetExtensionSettingError() ExtensionSettingErrorEnum_ExtensionSettingError { + if x, ok := m.GetErrorCode().(*ErrorCode_ExtensionSettingError); ok { + return x.ExtensionSettingError + } + return ExtensionSettingErrorEnum_UNSPECIFIED +} + +func (m *ErrorCode) GetFeedItemTargetError() FeedItemTargetErrorEnum_FeedItemTargetError { + if x, ok := m.GetErrorCode().(*ErrorCode_FeedItemTargetError); ok { + return x.FeedItemTargetError + } + return FeedItemTargetErrorEnum_UNSPECIFIED +} + +func (m *ErrorCode) GetPolicyViolationError() PolicyViolationErrorEnum_PolicyViolationError { + if x, ok := m.GetErrorCode().(*ErrorCode_PolicyViolationError); ok { + return x.PolicyViolationError + } + return PolicyViolationErrorEnum_UNSPECIFIED +} + +func (m *ErrorCode) GetMutateJobError() MutateJobErrorEnum_MutateJobError { + if x, ok := m.GetErrorCode().(*ErrorCode_MutateJobError); ok { + return x.MutateJobError + } + return MutateJobErrorEnum_UNSPECIFIED +} + +func (m *ErrorCode) GetPartialFailureError() PartialFailureErrorEnum_PartialFailureError { + if x, ok := m.GetErrorCode().(*ErrorCode_PartialFailureError); ok { + return x.PartialFailureError + } + return PartialFailureErrorEnum_UNSPECIFIED +} + +func (m *ErrorCode) GetPolicyValidationParameterError() PolicyValidationParameterErrorEnum_PolicyValidationParameterError { + if x, ok := m.GetErrorCode().(*ErrorCode_PolicyValidationParameterError); ok { + return x.PolicyValidationParameterError + } + return PolicyValidationParameterErrorEnum_UNSPECIFIED +} + +func (m *ErrorCode) GetSizeLimitError() SizeLimitErrorEnum_SizeLimitError { + if x, ok := m.GetErrorCode().(*ErrorCode_SizeLimitError); ok { + return x.SizeLimitError + } + return SizeLimitErrorEnum_UNSPECIFIED +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*ErrorCode) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _ErrorCode_OneofMarshaler, _ErrorCode_OneofUnmarshaler, _ErrorCode_OneofSizer, []interface{}{ + (*ErrorCode_RequestError)(nil), + (*ErrorCode_BiddingStrategyError)(nil), + (*ErrorCode_UrlFieldError)(nil), + (*ErrorCode_ListOperationError)(nil), + (*ErrorCode_QueryError)(nil), + (*ErrorCode_MutateError)(nil), + (*ErrorCode_FieldMaskError)(nil), + (*ErrorCode_AuthorizationError)(nil), + (*ErrorCode_InternalError)(nil), + (*ErrorCode_QuotaError)(nil), + (*ErrorCode_AdError)(nil), + (*ErrorCode_AdGroupError)(nil), + (*ErrorCode_CampaignBudgetError)(nil), + (*ErrorCode_CampaignError)(nil), + (*ErrorCode_AuthenticationError)(nil), + (*ErrorCode_AdGroupCriterionError)(nil), + (*ErrorCode_AdCustomizerError)(nil), + (*ErrorCode_AdGroupAdError)(nil), + (*ErrorCode_AdSharingError)(nil), + (*ErrorCode_AdxError)(nil), + (*ErrorCode_AssetError)(nil), + (*ErrorCode_BiddingError)(nil), + (*ErrorCode_CampaignCriterionError)(nil), + (*ErrorCode_CollectionSizeError)(nil), + (*ErrorCode_CountryCodeError)(nil), + (*ErrorCode_CriterionError)(nil), + (*ErrorCode_CustomerError)(nil), + (*ErrorCode_DateError)(nil), + (*ErrorCode_DateRangeError)(nil), + (*ErrorCode_DistinctError)(nil), + (*ErrorCode_FeedAttributeReferenceError)(nil), + (*ErrorCode_FunctionError)(nil), + (*ErrorCode_FunctionParsingError)(nil), + (*ErrorCode_IdError)(nil), + (*ErrorCode_ImageError)(nil), + (*ErrorCode_LanguageCodeError)(nil), + (*ErrorCode_MediaBundleError)(nil), + (*ErrorCode_MediaUploadError)(nil), + (*ErrorCode_MediaFileError)(nil), + (*ErrorCode_MultiplierError)(nil), + (*ErrorCode_NewResourceCreationError)(nil), + (*ErrorCode_NotEmptyError)(nil), + (*ErrorCode_NullError)(nil), + (*ErrorCode_OperatorError)(nil), + (*ErrorCode_RangeError)(nil), + (*ErrorCode_RecommendationError)(nil), + (*ErrorCode_RegionCodeError)(nil), + (*ErrorCode_SettingError)(nil), + (*ErrorCode_StringFormatError)(nil), + (*ErrorCode_StringLengthError)(nil), + (*ErrorCode_OperationAccessDeniedError)(nil), + (*ErrorCode_ResourceAccessDeniedError)(nil), + (*ErrorCode_ResourceCountLimitExceededError)(nil), + (*ErrorCode_YoutubeVideoRegistrationError)(nil), + (*ErrorCode_AdGroupBidModifierError)(nil), + (*ErrorCode_ContextError)(nil), + (*ErrorCode_FieldError)(nil), + (*ErrorCode_SharedSetError)(nil), + (*ErrorCode_SharedCriterionError)(nil), + (*ErrorCode_CampaignSharedSetError)(nil), + (*ErrorCode_ConversionActionError)(nil), + (*ErrorCode_ConversionAdjustmentUploadError)(nil), + (*ErrorCode_ConversionUploadError)(nil), + (*ErrorCode_HeaderError)(nil), + (*ErrorCode_DatabaseError)(nil), + (*ErrorCode_PolicyFindingError)(nil), + (*ErrorCode_EnumError)(nil), + (*ErrorCode_KeywordPlanError)(nil), + (*ErrorCode_KeywordPlanCampaignError)(nil), + (*ErrorCode_KeywordPlanNegativeKeywordError)(nil), + (*ErrorCode_KeywordPlanAdGroupError)(nil), + (*ErrorCode_KeywordPlanKeywordError)(nil), + (*ErrorCode_KeywordPlanIdeaError)(nil), + (*ErrorCode_AccountBudgetProposalError)(nil), + (*ErrorCode_UserListError)(nil), + (*ErrorCode_ChangeStatusError)(nil), + (*ErrorCode_FeedError)(nil), + (*ErrorCode_GeoTargetConstantSuggestionError)(nil), + (*ErrorCode_FeedItemError)(nil), + (*ErrorCode_LabelError)(nil), + (*ErrorCode_BillingSetupError)(nil), + (*ErrorCode_CustomerClientLinkError)(nil), + (*ErrorCode_CustomerManagerLinkError)(nil), + (*ErrorCode_FeedMappingError)(nil), + (*ErrorCode_CustomerFeedError)(nil), + (*ErrorCode_AdGroupFeedError)(nil), + (*ErrorCode_CampaignFeedError)(nil), + (*ErrorCode_CustomInterestError)(nil), + (*ErrorCode_ExtensionFeedItemError)(nil), + (*ErrorCode_AdParameterError)(nil), + (*ErrorCode_FeedItemValidationError)(nil), + (*ErrorCode_ExtensionSettingError)(nil), + (*ErrorCode_FeedItemTargetError)(nil), + (*ErrorCode_PolicyViolationError)(nil), + (*ErrorCode_MutateJobError)(nil), + (*ErrorCode_PartialFailureError)(nil), + (*ErrorCode_PolicyValidationParameterError)(nil), + (*ErrorCode_SizeLimitError)(nil), + } +} + +func _ErrorCode_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*ErrorCode) + // error_code + switch x := m.ErrorCode.(type) { + case *ErrorCode_RequestError: + b.EncodeVarint(1<<3 | proto.WireVarint) + b.EncodeVarint(uint64(x.RequestError)) + case *ErrorCode_BiddingStrategyError: + b.EncodeVarint(2<<3 | proto.WireVarint) + b.EncodeVarint(uint64(x.BiddingStrategyError)) + case *ErrorCode_UrlFieldError: + b.EncodeVarint(3<<3 | proto.WireVarint) + b.EncodeVarint(uint64(x.UrlFieldError)) + case *ErrorCode_ListOperationError: + b.EncodeVarint(4<<3 | proto.WireVarint) + b.EncodeVarint(uint64(x.ListOperationError)) + case *ErrorCode_QueryError: + b.EncodeVarint(5<<3 | proto.WireVarint) + b.EncodeVarint(uint64(x.QueryError)) + case *ErrorCode_MutateError: + b.EncodeVarint(7<<3 | proto.WireVarint) + b.EncodeVarint(uint64(x.MutateError)) + case *ErrorCode_FieldMaskError: + b.EncodeVarint(8<<3 | proto.WireVarint) + b.EncodeVarint(uint64(x.FieldMaskError)) + case *ErrorCode_AuthorizationError: + b.EncodeVarint(9<<3 | proto.WireVarint) + b.EncodeVarint(uint64(x.AuthorizationError)) + case *ErrorCode_InternalError: + b.EncodeVarint(10<<3 | proto.WireVarint) + b.EncodeVarint(uint64(x.InternalError)) + case *ErrorCode_QuotaError: + b.EncodeVarint(11<<3 | proto.WireVarint) + b.EncodeVarint(uint64(x.QuotaError)) + case *ErrorCode_AdError: + b.EncodeVarint(12<<3 | proto.WireVarint) + b.EncodeVarint(uint64(x.AdError)) + case *ErrorCode_AdGroupError: + b.EncodeVarint(13<<3 | proto.WireVarint) + b.EncodeVarint(uint64(x.AdGroupError)) + case *ErrorCode_CampaignBudgetError: + b.EncodeVarint(14<<3 | proto.WireVarint) + b.EncodeVarint(uint64(x.CampaignBudgetError)) + case *ErrorCode_CampaignError: + b.EncodeVarint(15<<3 | proto.WireVarint) + b.EncodeVarint(uint64(x.CampaignError)) + case *ErrorCode_AuthenticationError: + b.EncodeVarint(17<<3 | proto.WireVarint) + b.EncodeVarint(uint64(x.AuthenticationError)) + case *ErrorCode_AdGroupCriterionError: + b.EncodeVarint(18<<3 | proto.WireVarint) + b.EncodeVarint(uint64(x.AdGroupCriterionError)) + case *ErrorCode_AdCustomizerError: + b.EncodeVarint(19<<3 | proto.WireVarint) + b.EncodeVarint(uint64(x.AdCustomizerError)) + case *ErrorCode_AdGroupAdError: + b.EncodeVarint(21<<3 | proto.WireVarint) + b.EncodeVarint(uint64(x.AdGroupAdError)) + case *ErrorCode_AdSharingError: + b.EncodeVarint(24<<3 | proto.WireVarint) + b.EncodeVarint(uint64(x.AdSharingError)) + case *ErrorCode_AdxError: + b.EncodeVarint(25<<3 | proto.WireVarint) + b.EncodeVarint(uint64(x.AdxError)) + case *ErrorCode_AssetError: + b.EncodeVarint(107<<3 | proto.WireVarint) + b.EncodeVarint(uint64(x.AssetError)) + case *ErrorCode_BiddingError: + b.EncodeVarint(26<<3 | proto.WireVarint) + b.EncodeVarint(uint64(x.BiddingError)) + case *ErrorCode_CampaignCriterionError: + b.EncodeVarint(29<<3 | proto.WireVarint) + b.EncodeVarint(uint64(x.CampaignCriterionError)) + case *ErrorCode_CollectionSizeError: + b.EncodeVarint(31<<3 | proto.WireVarint) + b.EncodeVarint(uint64(x.CollectionSizeError)) + case *ErrorCode_CountryCodeError: + b.EncodeVarint(109<<3 | proto.WireVarint) + b.EncodeVarint(uint64(x.CountryCodeError)) + case *ErrorCode_CriterionError: + b.EncodeVarint(32<<3 | proto.WireVarint) + b.EncodeVarint(uint64(x.CriterionError)) + case *ErrorCode_CustomerError: + b.EncodeVarint(90<<3 | proto.WireVarint) + b.EncodeVarint(uint64(x.CustomerError)) + case *ErrorCode_DateError: + b.EncodeVarint(33<<3 | proto.WireVarint) + b.EncodeVarint(uint64(x.DateError)) + case *ErrorCode_DateRangeError: + b.EncodeVarint(34<<3 | proto.WireVarint) + b.EncodeVarint(uint64(x.DateRangeError)) + case *ErrorCode_DistinctError: + b.EncodeVarint(35<<3 | proto.WireVarint) + b.EncodeVarint(uint64(x.DistinctError)) + case *ErrorCode_FeedAttributeReferenceError: + b.EncodeVarint(36<<3 | proto.WireVarint) + b.EncodeVarint(uint64(x.FeedAttributeReferenceError)) + case *ErrorCode_FunctionError: + b.EncodeVarint(37<<3 | proto.WireVarint) + b.EncodeVarint(uint64(x.FunctionError)) + case *ErrorCode_FunctionParsingError: + b.EncodeVarint(38<<3 | proto.WireVarint) + b.EncodeVarint(uint64(x.FunctionParsingError)) + case *ErrorCode_IdError: + b.EncodeVarint(39<<3 | proto.WireVarint) + b.EncodeVarint(uint64(x.IdError)) + case *ErrorCode_ImageError: + b.EncodeVarint(40<<3 | proto.WireVarint) + b.EncodeVarint(uint64(x.ImageError)) + case *ErrorCode_LanguageCodeError: + b.EncodeVarint(110<<3 | proto.WireVarint) + b.EncodeVarint(uint64(x.LanguageCodeError)) + case *ErrorCode_MediaBundleError: + b.EncodeVarint(42<<3 | proto.WireVarint) + b.EncodeVarint(uint64(x.MediaBundleError)) + case *ErrorCode_MediaUploadError: + b.EncodeVarint(116<<3 | proto.WireVarint) + b.EncodeVarint(uint64(x.MediaUploadError)) + case *ErrorCode_MediaFileError: + b.EncodeVarint(86<<3 | proto.WireVarint) + b.EncodeVarint(uint64(x.MediaFileError)) + case *ErrorCode_MultiplierError: + b.EncodeVarint(44<<3 | proto.WireVarint) + b.EncodeVarint(uint64(x.MultiplierError)) + case *ErrorCode_NewResourceCreationError: + b.EncodeVarint(45<<3 | proto.WireVarint) + b.EncodeVarint(uint64(x.NewResourceCreationError)) + case *ErrorCode_NotEmptyError: + b.EncodeVarint(46<<3 | proto.WireVarint) + b.EncodeVarint(uint64(x.NotEmptyError)) + case *ErrorCode_NullError: + b.EncodeVarint(47<<3 | proto.WireVarint) + b.EncodeVarint(uint64(x.NullError)) + case *ErrorCode_OperatorError: + b.EncodeVarint(48<<3 | proto.WireVarint) + b.EncodeVarint(uint64(x.OperatorError)) + case *ErrorCode_RangeError: + b.EncodeVarint(49<<3 | proto.WireVarint) + b.EncodeVarint(uint64(x.RangeError)) + case *ErrorCode_RecommendationError: + b.EncodeVarint(58<<3 | proto.WireVarint) + b.EncodeVarint(uint64(x.RecommendationError)) + case *ErrorCode_RegionCodeError: + b.EncodeVarint(51<<3 | proto.WireVarint) + b.EncodeVarint(uint64(x.RegionCodeError)) + case *ErrorCode_SettingError: + b.EncodeVarint(52<<3 | proto.WireVarint) + b.EncodeVarint(uint64(x.SettingError)) + case *ErrorCode_StringFormatError: + b.EncodeVarint(53<<3 | proto.WireVarint) + b.EncodeVarint(uint64(x.StringFormatError)) + case *ErrorCode_StringLengthError: + b.EncodeVarint(54<<3 | proto.WireVarint) + b.EncodeVarint(uint64(x.StringLengthError)) + case *ErrorCode_OperationAccessDeniedError: + b.EncodeVarint(55<<3 | proto.WireVarint) + b.EncodeVarint(uint64(x.OperationAccessDeniedError)) + case *ErrorCode_ResourceAccessDeniedError: + b.EncodeVarint(56<<3 | proto.WireVarint) + b.EncodeVarint(uint64(x.ResourceAccessDeniedError)) + case *ErrorCode_ResourceCountLimitExceededError: + b.EncodeVarint(57<<3 | proto.WireVarint) + b.EncodeVarint(uint64(x.ResourceCountLimitExceededError)) + case *ErrorCode_YoutubeVideoRegistrationError: + b.EncodeVarint(117<<3 | proto.WireVarint) + b.EncodeVarint(uint64(x.YoutubeVideoRegistrationError)) + case *ErrorCode_AdGroupBidModifierError: + b.EncodeVarint(59<<3 | proto.WireVarint) + b.EncodeVarint(uint64(x.AdGroupBidModifierError)) + case *ErrorCode_ContextError: + b.EncodeVarint(60<<3 | proto.WireVarint) + b.EncodeVarint(uint64(x.ContextError)) + case *ErrorCode_FieldError: + b.EncodeVarint(61<<3 | proto.WireVarint) + b.EncodeVarint(uint64(x.FieldError)) + case *ErrorCode_SharedSetError: + b.EncodeVarint(62<<3 | proto.WireVarint) + b.EncodeVarint(uint64(x.SharedSetError)) + case *ErrorCode_SharedCriterionError: + b.EncodeVarint(63<<3 | proto.WireVarint) + b.EncodeVarint(uint64(x.SharedCriterionError)) + case *ErrorCode_CampaignSharedSetError: + b.EncodeVarint(64<<3 | proto.WireVarint) + b.EncodeVarint(uint64(x.CampaignSharedSetError)) + case *ErrorCode_ConversionActionError: + b.EncodeVarint(65<<3 | proto.WireVarint) + b.EncodeVarint(uint64(x.ConversionActionError)) + case *ErrorCode_ConversionAdjustmentUploadError: + b.EncodeVarint(115<<3 | proto.WireVarint) + b.EncodeVarint(uint64(x.ConversionAdjustmentUploadError)) + case *ErrorCode_ConversionUploadError: + b.EncodeVarint(111<<3 | proto.WireVarint) + b.EncodeVarint(uint64(x.ConversionUploadError)) + case *ErrorCode_HeaderError: + b.EncodeVarint(66<<3 | proto.WireVarint) + b.EncodeVarint(uint64(x.HeaderError)) + case *ErrorCode_DatabaseError: + b.EncodeVarint(67<<3 | proto.WireVarint) + b.EncodeVarint(uint64(x.DatabaseError)) + case *ErrorCode_PolicyFindingError: + b.EncodeVarint(68<<3 | proto.WireVarint) + b.EncodeVarint(uint64(x.PolicyFindingError)) + case *ErrorCode_EnumError: + b.EncodeVarint(70<<3 | proto.WireVarint) + b.EncodeVarint(uint64(x.EnumError)) + case *ErrorCode_KeywordPlanError: + b.EncodeVarint(71<<3 | proto.WireVarint) + b.EncodeVarint(uint64(x.KeywordPlanError)) + case *ErrorCode_KeywordPlanCampaignError: + b.EncodeVarint(72<<3 | proto.WireVarint) + b.EncodeVarint(uint64(x.KeywordPlanCampaignError)) + case *ErrorCode_KeywordPlanNegativeKeywordError: + b.EncodeVarint(73<<3 | proto.WireVarint) + b.EncodeVarint(uint64(x.KeywordPlanNegativeKeywordError)) + case *ErrorCode_KeywordPlanAdGroupError: + b.EncodeVarint(74<<3 | proto.WireVarint) + b.EncodeVarint(uint64(x.KeywordPlanAdGroupError)) + case *ErrorCode_KeywordPlanKeywordError: + b.EncodeVarint(75<<3 | proto.WireVarint) + b.EncodeVarint(uint64(x.KeywordPlanKeywordError)) + case *ErrorCode_KeywordPlanIdeaError: + b.EncodeVarint(76<<3 | proto.WireVarint) + b.EncodeVarint(uint64(x.KeywordPlanIdeaError)) + case *ErrorCode_AccountBudgetProposalError: + b.EncodeVarint(77<<3 | proto.WireVarint) + b.EncodeVarint(uint64(x.AccountBudgetProposalError)) + case *ErrorCode_UserListError: + b.EncodeVarint(78<<3 | proto.WireVarint) + b.EncodeVarint(uint64(x.UserListError)) + case *ErrorCode_ChangeStatusError: + b.EncodeVarint(79<<3 | proto.WireVarint) + b.EncodeVarint(uint64(x.ChangeStatusError)) + case *ErrorCode_FeedError: + b.EncodeVarint(80<<3 | proto.WireVarint) + b.EncodeVarint(uint64(x.FeedError)) + case *ErrorCode_GeoTargetConstantSuggestionError: + b.EncodeVarint(81<<3 | proto.WireVarint) + b.EncodeVarint(uint64(x.GeoTargetConstantSuggestionError)) + case *ErrorCode_FeedItemError: + b.EncodeVarint(83<<3 | proto.WireVarint) + b.EncodeVarint(uint64(x.FeedItemError)) + case *ErrorCode_LabelError: + b.EncodeVarint(84<<3 | proto.WireVarint) + b.EncodeVarint(uint64(x.LabelError)) + case *ErrorCode_BillingSetupError: + b.EncodeVarint(87<<3 | proto.WireVarint) + b.EncodeVarint(uint64(x.BillingSetupError)) + case *ErrorCode_CustomerClientLinkError: + b.EncodeVarint(88<<3 | proto.WireVarint) + b.EncodeVarint(uint64(x.CustomerClientLinkError)) + case *ErrorCode_CustomerManagerLinkError: + b.EncodeVarint(91<<3 | proto.WireVarint) + b.EncodeVarint(uint64(x.CustomerManagerLinkError)) + case *ErrorCode_FeedMappingError: + b.EncodeVarint(92<<3 | proto.WireVarint) + b.EncodeVarint(uint64(x.FeedMappingError)) + case *ErrorCode_CustomerFeedError: + b.EncodeVarint(93<<3 | proto.WireVarint) + b.EncodeVarint(uint64(x.CustomerFeedError)) + case *ErrorCode_AdGroupFeedError: + b.EncodeVarint(94<<3 | proto.WireVarint) + b.EncodeVarint(uint64(x.AdGroupFeedError)) + case *ErrorCode_CampaignFeedError: + b.EncodeVarint(96<<3 | proto.WireVarint) + b.EncodeVarint(uint64(x.CampaignFeedError)) + case *ErrorCode_CustomInterestError: + b.EncodeVarint(97<<3 | proto.WireVarint) + b.EncodeVarint(uint64(x.CustomInterestError)) + case *ErrorCode_ExtensionFeedItemError: + b.EncodeVarint(100<<3 | proto.WireVarint) + b.EncodeVarint(uint64(x.ExtensionFeedItemError)) + case *ErrorCode_AdParameterError: + b.EncodeVarint(101<<3 | proto.WireVarint) + b.EncodeVarint(uint64(x.AdParameterError)) + case *ErrorCode_FeedItemValidationError: + b.EncodeVarint(102<<3 | proto.WireVarint) + b.EncodeVarint(uint64(x.FeedItemValidationError)) + case *ErrorCode_ExtensionSettingError: + b.EncodeVarint(103<<3 | proto.WireVarint) + b.EncodeVarint(uint64(x.ExtensionSettingError)) + case *ErrorCode_FeedItemTargetError: + b.EncodeVarint(104<<3 | proto.WireVarint) + b.EncodeVarint(uint64(x.FeedItemTargetError)) + case *ErrorCode_PolicyViolationError: + b.EncodeVarint(105<<3 | proto.WireVarint) + b.EncodeVarint(uint64(x.PolicyViolationError)) + case *ErrorCode_MutateJobError: + b.EncodeVarint(108<<3 | proto.WireVarint) + b.EncodeVarint(uint64(x.MutateJobError)) + case *ErrorCode_PartialFailureError: + b.EncodeVarint(112<<3 | proto.WireVarint) + b.EncodeVarint(uint64(x.PartialFailureError)) + case *ErrorCode_PolicyValidationParameterError: + b.EncodeVarint(114<<3 | proto.WireVarint) + b.EncodeVarint(uint64(x.PolicyValidationParameterError)) + case *ErrorCode_SizeLimitError: + b.EncodeVarint(118<<3 | proto.WireVarint) + b.EncodeVarint(uint64(x.SizeLimitError)) + case nil: + default: + return fmt.Errorf("ErrorCode.ErrorCode has unexpected type %T", x) + } + return nil +} + +func _ErrorCode_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*ErrorCode) + switch tag { + case 1: // error_code.request_error + if wire != proto.WireVarint { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeVarint() + m.ErrorCode = &ErrorCode_RequestError{RequestErrorEnum_RequestError(x)} + return true, err + case 2: // error_code.bidding_strategy_error + if wire != proto.WireVarint { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeVarint() + m.ErrorCode = &ErrorCode_BiddingStrategyError{BiddingStrategyErrorEnum_BiddingStrategyError(x)} + return true, err + case 3: // error_code.url_field_error + if wire != proto.WireVarint { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeVarint() + m.ErrorCode = &ErrorCode_UrlFieldError{UrlFieldErrorEnum_UrlFieldError(x)} + return true, err + case 4: // error_code.list_operation_error + if wire != proto.WireVarint { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeVarint() + m.ErrorCode = &ErrorCode_ListOperationError{ListOperationErrorEnum_ListOperationError(x)} + return true, err + case 5: // error_code.query_error + if wire != proto.WireVarint { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeVarint() + m.ErrorCode = &ErrorCode_QueryError{QueryErrorEnum_QueryError(x)} + return true, err + case 7: // error_code.mutate_error + if wire != proto.WireVarint { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeVarint() + m.ErrorCode = &ErrorCode_MutateError{MutateErrorEnum_MutateError(x)} + return true, err + case 8: // error_code.field_mask_error + if wire != proto.WireVarint { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeVarint() + m.ErrorCode = &ErrorCode_FieldMaskError{FieldMaskErrorEnum_FieldMaskError(x)} + return true, err + case 9: // error_code.authorization_error + if wire != proto.WireVarint { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeVarint() + m.ErrorCode = &ErrorCode_AuthorizationError{AuthorizationErrorEnum_AuthorizationError(x)} + return true, err + case 10: // error_code.internal_error + if wire != proto.WireVarint { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeVarint() + m.ErrorCode = &ErrorCode_InternalError{InternalErrorEnum_InternalError(x)} + return true, err + case 11: // error_code.quota_error + if wire != proto.WireVarint { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeVarint() + m.ErrorCode = &ErrorCode_QuotaError{QuotaErrorEnum_QuotaError(x)} + return true, err + case 12: // error_code.ad_error + if wire != proto.WireVarint { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeVarint() + m.ErrorCode = &ErrorCode_AdError{AdErrorEnum_AdError(x)} + return true, err + case 13: // error_code.ad_group_error + if wire != proto.WireVarint { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeVarint() + m.ErrorCode = &ErrorCode_AdGroupError{AdGroupErrorEnum_AdGroupError(x)} + return true, err + case 14: // error_code.campaign_budget_error + if wire != proto.WireVarint { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeVarint() + m.ErrorCode = &ErrorCode_CampaignBudgetError{CampaignBudgetErrorEnum_CampaignBudgetError(x)} + return true, err + case 15: // error_code.campaign_error + if wire != proto.WireVarint { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeVarint() + m.ErrorCode = &ErrorCode_CampaignError{CampaignErrorEnum_CampaignError(x)} + return true, err + case 17: // error_code.authentication_error + if wire != proto.WireVarint { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeVarint() + m.ErrorCode = &ErrorCode_AuthenticationError{AuthenticationErrorEnum_AuthenticationError(x)} + return true, err + case 18: // error_code.ad_group_criterion_error + if wire != proto.WireVarint { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeVarint() + m.ErrorCode = &ErrorCode_AdGroupCriterionError{AdGroupCriterionErrorEnum_AdGroupCriterionError(x)} + return true, err + case 19: // error_code.ad_customizer_error + if wire != proto.WireVarint { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeVarint() + m.ErrorCode = &ErrorCode_AdCustomizerError{AdCustomizerErrorEnum_AdCustomizerError(x)} + return true, err + case 21: // error_code.ad_group_ad_error + if wire != proto.WireVarint { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeVarint() + m.ErrorCode = &ErrorCode_AdGroupAdError{AdGroupAdErrorEnum_AdGroupAdError(x)} + return true, err + case 24: // error_code.ad_sharing_error + if wire != proto.WireVarint { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeVarint() + m.ErrorCode = &ErrorCode_AdSharingError{AdSharingErrorEnum_AdSharingError(x)} + return true, err + case 25: // error_code.adx_error + if wire != proto.WireVarint { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeVarint() + m.ErrorCode = &ErrorCode_AdxError{AdxErrorEnum_AdxError(x)} + return true, err + case 107: // error_code.asset_error + if wire != proto.WireVarint { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeVarint() + m.ErrorCode = &ErrorCode_AssetError{AssetErrorEnum_AssetError(x)} + return true, err + case 26: // error_code.bidding_error + if wire != proto.WireVarint { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeVarint() + m.ErrorCode = &ErrorCode_BiddingError{BiddingErrorEnum_BiddingError(x)} + return true, err + case 29: // error_code.campaign_criterion_error + if wire != proto.WireVarint { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeVarint() + m.ErrorCode = &ErrorCode_CampaignCriterionError{CampaignCriterionErrorEnum_CampaignCriterionError(x)} + return true, err + case 31: // error_code.collection_size_error + if wire != proto.WireVarint { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeVarint() + m.ErrorCode = &ErrorCode_CollectionSizeError{CollectionSizeErrorEnum_CollectionSizeError(x)} + return true, err + case 109: // error_code.country_code_error + if wire != proto.WireVarint { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeVarint() + m.ErrorCode = &ErrorCode_CountryCodeError{CountryCodeErrorEnum_CountryCodeError(x)} + return true, err + case 32: // error_code.criterion_error + if wire != proto.WireVarint { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeVarint() + m.ErrorCode = &ErrorCode_CriterionError{CriterionErrorEnum_CriterionError(x)} + return true, err + case 90: // error_code.customer_error + if wire != proto.WireVarint { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeVarint() + m.ErrorCode = &ErrorCode_CustomerError{CustomerErrorEnum_CustomerError(x)} + return true, err + case 33: // error_code.date_error + if wire != proto.WireVarint { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeVarint() + m.ErrorCode = &ErrorCode_DateError{DateErrorEnum_DateError(x)} + return true, err + case 34: // error_code.date_range_error + if wire != proto.WireVarint { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeVarint() + m.ErrorCode = &ErrorCode_DateRangeError{DateRangeErrorEnum_DateRangeError(x)} + return true, err + case 35: // error_code.distinct_error + if wire != proto.WireVarint { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeVarint() + m.ErrorCode = &ErrorCode_DistinctError{DistinctErrorEnum_DistinctError(x)} + return true, err + case 36: // error_code.feed_attribute_reference_error + if wire != proto.WireVarint { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeVarint() + m.ErrorCode = &ErrorCode_FeedAttributeReferenceError{FeedAttributeReferenceErrorEnum_FeedAttributeReferenceError(x)} + return true, err + case 37: // error_code.function_error + if wire != proto.WireVarint { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeVarint() + m.ErrorCode = &ErrorCode_FunctionError{FunctionErrorEnum_FunctionError(x)} + return true, err + case 38: // error_code.function_parsing_error + if wire != proto.WireVarint { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeVarint() + m.ErrorCode = &ErrorCode_FunctionParsingError{FunctionParsingErrorEnum_FunctionParsingError(x)} + return true, err + case 39: // error_code.id_error + if wire != proto.WireVarint { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeVarint() + m.ErrorCode = &ErrorCode_IdError{IdErrorEnum_IdError(x)} + return true, err + case 40: // error_code.image_error + if wire != proto.WireVarint { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeVarint() + m.ErrorCode = &ErrorCode_ImageError{ImageErrorEnum_ImageError(x)} + return true, err + case 110: // error_code.language_code_error + if wire != proto.WireVarint { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeVarint() + m.ErrorCode = &ErrorCode_LanguageCodeError{LanguageCodeErrorEnum_LanguageCodeError(x)} + return true, err + case 42: // error_code.media_bundle_error + if wire != proto.WireVarint { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeVarint() + m.ErrorCode = &ErrorCode_MediaBundleError{MediaBundleErrorEnum_MediaBundleError(x)} + return true, err + case 116: // error_code.media_upload_error + if wire != proto.WireVarint { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeVarint() + m.ErrorCode = &ErrorCode_MediaUploadError{MediaUploadErrorEnum_MediaUploadError(x)} + return true, err + case 86: // error_code.media_file_error + if wire != proto.WireVarint { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeVarint() + m.ErrorCode = &ErrorCode_MediaFileError{MediaFileErrorEnum_MediaFileError(x)} + return true, err + case 44: // error_code.multiplier_error + if wire != proto.WireVarint { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeVarint() + m.ErrorCode = &ErrorCode_MultiplierError{MultiplierErrorEnum_MultiplierError(x)} + return true, err + case 45: // error_code.new_resource_creation_error + if wire != proto.WireVarint { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeVarint() + m.ErrorCode = &ErrorCode_NewResourceCreationError{NewResourceCreationErrorEnum_NewResourceCreationError(x)} + return true, err + case 46: // error_code.not_empty_error + if wire != proto.WireVarint { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeVarint() + m.ErrorCode = &ErrorCode_NotEmptyError{NotEmptyErrorEnum_NotEmptyError(x)} + return true, err + case 47: // error_code.null_error + if wire != proto.WireVarint { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeVarint() + m.ErrorCode = &ErrorCode_NullError{NullErrorEnum_NullError(x)} + return true, err + case 48: // error_code.operator_error + if wire != proto.WireVarint { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeVarint() + m.ErrorCode = &ErrorCode_OperatorError{OperatorErrorEnum_OperatorError(x)} + return true, err + case 49: // error_code.range_error + if wire != proto.WireVarint { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeVarint() + m.ErrorCode = &ErrorCode_RangeError{RangeErrorEnum_RangeError(x)} + return true, err + case 58: // error_code.recommendation_error + if wire != proto.WireVarint { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeVarint() + m.ErrorCode = &ErrorCode_RecommendationError{RecommendationErrorEnum_RecommendationError(x)} + return true, err + case 51: // error_code.region_code_error + if wire != proto.WireVarint { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeVarint() + m.ErrorCode = &ErrorCode_RegionCodeError{RegionCodeErrorEnum_RegionCodeError(x)} + return true, err + case 52: // error_code.setting_error + if wire != proto.WireVarint { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeVarint() + m.ErrorCode = &ErrorCode_SettingError{SettingErrorEnum_SettingError(x)} + return true, err + case 53: // error_code.string_format_error + if wire != proto.WireVarint { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeVarint() + m.ErrorCode = &ErrorCode_StringFormatError{StringFormatErrorEnum_StringFormatError(x)} + return true, err + case 54: // error_code.string_length_error + if wire != proto.WireVarint { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeVarint() + m.ErrorCode = &ErrorCode_StringLengthError{StringLengthErrorEnum_StringLengthError(x)} + return true, err + case 55: // error_code.operation_access_denied_error + if wire != proto.WireVarint { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeVarint() + m.ErrorCode = &ErrorCode_OperationAccessDeniedError{OperationAccessDeniedErrorEnum_OperationAccessDeniedError(x)} + return true, err + case 56: // error_code.resource_access_denied_error + if wire != proto.WireVarint { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeVarint() + m.ErrorCode = &ErrorCode_ResourceAccessDeniedError{ResourceAccessDeniedErrorEnum_ResourceAccessDeniedError(x)} + return true, err + case 57: // error_code.resource_count_limit_exceeded_error + if wire != proto.WireVarint { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeVarint() + m.ErrorCode = &ErrorCode_ResourceCountLimitExceededError{ResourceCountLimitExceededErrorEnum_ResourceCountLimitExceededError(x)} + return true, err + case 117: // error_code.youtube_video_registration_error + if wire != proto.WireVarint { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeVarint() + m.ErrorCode = &ErrorCode_YoutubeVideoRegistrationError{YoutubeVideoRegistrationErrorEnum_YoutubeVideoRegistrationError(x)} + return true, err + case 59: // error_code.ad_group_bid_modifier_error + if wire != proto.WireVarint { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeVarint() + m.ErrorCode = &ErrorCode_AdGroupBidModifierError{AdGroupBidModifierErrorEnum_AdGroupBidModifierError(x)} + return true, err + case 60: // error_code.context_error + if wire != proto.WireVarint { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeVarint() + m.ErrorCode = &ErrorCode_ContextError{ContextErrorEnum_ContextError(x)} + return true, err + case 61: // error_code.field_error + if wire != proto.WireVarint { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeVarint() + m.ErrorCode = &ErrorCode_FieldError{FieldErrorEnum_FieldError(x)} + return true, err + case 62: // error_code.shared_set_error + if wire != proto.WireVarint { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeVarint() + m.ErrorCode = &ErrorCode_SharedSetError{SharedSetErrorEnum_SharedSetError(x)} + return true, err + case 63: // error_code.shared_criterion_error + if wire != proto.WireVarint { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeVarint() + m.ErrorCode = &ErrorCode_SharedCriterionError{SharedCriterionErrorEnum_SharedCriterionError(x)} + return true, err + case 64: // error_code.campaign_shared_set_error + if wire != proto.WireVarint { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeVarint() + m.ErrorCode = &ErrorCode_CampaignSharedSetError{CampaignSharedSetErrorEnum_CampaignSharedSetError(x)} + return true, err + case 65: // error_code.conversion_action_error + if wire != proto.WireVarint { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeVarint() + m.ErrorCode = &ErrorCode_ConversionActionError{ConversionActionErrorEnum_ConversionActionError(x)} + return true, err + case 115: // error_code.conversion_adjustment_upload_error + if wire != proto.WireVarint { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeVarint() + m.ErrorCode = &ErrorCode_ConversionAdjustmentUploadError{ConversionAdjustmentUploadErrorEnum_ConversionAdjustmentUploadError(x)} + return true, err + case 111: // error_code.conversion_upload_error + if wire != proto.WireVarint { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeVarint() + m.ErrorCode = &ErrorCode_ConversionUploadError{ConversionUploadErrorEnum_ConversionUploadError(x)} + return true, err + case 66: // error_code.header_error + if wire != proto.WireVarint { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeVarint() + m.ErrorCode = &ErrorCode_HeaderError{HeaderErrorEnum_HeaderError(x)} + return true, err + case 67: // error_code.database_error + if wire != proto.WireVarint { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeVarint() + m.ErrorCode = &ErrorCode_DatabaseError{DatabaseErrorEnum_DatabaseError(x)} + return true, err + case 68: // error_code.policy_finding_error + if wire != proto.WireVarint { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeVarint() + m.ErrorCode = &ErrorCode_PolicyFindingError{PolicyFindingErrorEnum_PolicyFindingError(x)} + return true, err + case 70: // error_code.enum_error + if wire != proto.WireVarint { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeVarint() + m.ErrorCode = &ErrorCode_EnumError{EnumErrorEnum_EnumError(x)} + return true, err + case 71: // error_code.keyword_plan_error + if wire != proto.WireVarint { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeVarint() + m.ErrorCode = &ErrorCode_KeywordPlanError{KeywordPlanErrorEnum_KeywordPlanError(x)} + return true, err + case 72: // error_code.keyword_plan_campaign_error + if wire != proto.WireVarint { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeVarint() + m.ErrorCode = &ErrorCode_KeywordPlanCampaignError{KeywordPlanCampaignErrorEnum_KeywordPlanCampaignError(x)} + return true, err + case 73: // error_code.keyword_plan_negative_keyword_error + if wire != proto.WireVarint { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeVarint() + m.ErrorCode = &ErrorCode_KeywordPlanNegativeKeywordError{KeywordPlanNegativeKeywordErrorEnum_KeywordPlanNegativeKeywordError(x)} + return true, err + case 74: // error_code.keyword_plan_ad_group_error + if wire != proto.WireVarint { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeVarint() + m.ErrorCode = &ErrorCode_KeywordPlanAdGroupError{KeywordPlanAdGroupErrorEnum_KeywordPlanAdGroupError(x)} + return true, err + case 75: // error_code.keyword_plan_keyword_error + if wire != proto.WireVarint { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeVarint() + m.ErrorCode = &ErrorCode_KeywordPlanKeywordError{KeywordPlanKeywordErrorEnum_KeywordPlanKeywordError(x)} + return true, err + case 76: // error_code.keyword_plan_idea_error + if wire != proto.WireVarint { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeVarint() + m.ErrorCode = &ErrorCode_KeywordPlanIdeaError{KeywordPlanIdeaErrorEnum_KeywordPlanIdeaError(x)} + return true, err + case 77: // error_code.account_budget_proposal_error + if wire != proto.WireVarint { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeVarint() + m.ErrorCode = &ErrorCode_AccountBudgetProposalError{AccountBudgetProposalErrorEnum_AccountBudgetProposalError(x)} + return true, err + case 78: // error_code.user_list_error + if wire != proto.WireVarint { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeVarint() + m.ErrorCode = &ErrorCode_UserListError{UserListErrorEnum_UserListError(x)} + return true, err + case 79: // error_code.change_status_error + if wire != proto.WireVarint { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeVarint() + m.ErrorCode = &ErrorCode_ChangeStatusError{ChangeStatusErrorEnum_ChangeStatusError(x)} + return true, err + case 80: // error_code.feed_error + if wire != proto.WireVarint { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeVarint() + m.ErrorCode = &ErrorCode_FeedError{FeedErrorEnum_FeedError(x)} + return true, err + case 81: // error_code.geo_target_constant_suggestion_error + if wire != proto.WireVarint { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeVarint() + m.ErrorCode = &ErrorCode_GeoTargetConstantSuggestionError{GeoTargetConstantSuggestionErrorEnum_GeoTargetConstantSuggestionError(x)} + return true, err + case 83: // error_code.feed_item_error + if wire != proto.WireVarint { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeVarint() + m.ErrorCode = &ErrorCode_FeedItemError{FeedItemErrorEnum_FeedItemError(x)} + return true, err + case 84: // error_code.label_error + if wire != proto.WireVarint { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeVarint() + m.ErrorCode = &ErrorCode_LabelError{LabelErrorEnum_LabelError(x)} + return true, err + case 87: // error_code.billing_setup_error + if wire != proto.WireVarint { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeVarint() + m.ErrorCode = &ErrorCode_BillingSetupError{BillingSetupErrorEnum_BillingSetupError(x)} + return true, err + case 88: // error_code.customer_client_link_error + if wire != proto.WireVarint { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeVarint() + m.ErrorCode = &ErrorCode_CustomerClientLinkError{CustomerClientLinkErrorEnum_CustomerClientLinkError(x)} + return true, err + case 91: // error_code.customer_manager_link_error + if wire != proto.WireVarint { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeVarint() + m.ErrorCode = &ErrorCode_CustomerManagerLinkError{CustomerManagerLinkErrorEnum_CustomerManagerLinkError(x)} + return true, err + case 92: // error_code.feed_mapping_error + if wire != proto.WireVarint { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeVarint() + m.ErrorCode = &ErrorCode_FeedMappingError{FeedMappingErrorEnum_FeedMappingError(x)} + return true, err + case 93: // error_code.customer_feed_error + if wire != proto.WireVarint { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeVarint() + m.ErrorCode = &ErrorCode_CustomerFeedError{CustomerFeedErrorEnum_CustomerFeedError(x)} + return true, err + case 94: // error_code.ad_group_feed_error + if wire != proto.WireVarint { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeVarint() + m.ErrorCode = &ErrorCode_AdGroupFeedError{AdGroupFeedErrorEnum_AdGroupFeedError(x)} + return true, err + case 96: // error_code.campaign_feed_error + if wire != proto.WireVarint { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeVarint() + m.ErrorCode = &ErrorCode_CampaignFeedError{CampaignFeedErrorEnum_CampaignFeedError(x)} + return true, err + case 97: // error_code.custom_interest_error + if wire != proto.WireVarint { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeVarint() + m.ErrorCode = &ErrorCode_CustomInterestError{CustomInterestErrorEnum_CustomInterestError(x)} + return true, err + case 100: // error_code.extension_feed_item_error + if wire != proto.WireVarint { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeVarint() + m.ErrorCode = &ErrorCode_ExtensionFeedItemError{ExtensionFeedItemErrorEnum_ExtensionFeedItemError(x)} + return true, err + case 101: // error_code.ad_parameter_error + if wire != proto.WireVarint { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeVarint() + m.ErrorCode = &ErrorCode_AdParameterError{AdParameterErrorEnum_AdParameterError(x)} + return true, err + case 102: // error_code.feed_item_validation_error + if wire != proto.WireVarint { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeVarint() + m.ErrorCode = &ErrorCode_FeedItemValidationError{FeedItemValidationErrorEnum_FeedItemValidationError(x)} + return true, err + case 103: // error_code.extension_setting_error + if wire != proto.WireVarint { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeVarint() + m.ErrorCode = &ErrorCode_ExtensionSettingError{ExtensionSettingErrorEnum_ExtensionSettingError(x)} + return true, err + case 104: // error_code.feed_item_target_error + if wire != proto.WireVarint { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeVarint() + m.ErrorCode = &ErrorCode_FeedItemTargetError{FeedItemTargetErrorEnum_FeedItemTargetError(x)} + return true, err + case 105: // error_code.policy_violation_error + if wire != proto.WireVarint { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeVarint() + m.ErrorCode = &ErrorCode_PolicyViolationError{PolicyViolationErrorEnum_PolicyViolationError(x)} + return true, err + case 108: // error_code.mutate_job_error + if wire != proto.WireVarint { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeVarint() + m.ErrorCode = &ErrorCode_MutateJobError{MutateJobErrorEnum_MutateJobError(x)} + return true, err + case 112: // error_code.partial_failure_error + if wire != proto.WireVarint { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeVarint() + m.ErrorCode = &ErrorCode_PartialFailureError{PartialFailureErrorEnum_PartialFailureError(x)} + return true, err + case 114: // error_code.policy_validation_parameter_error + if wire != proto.WireVarint { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeVarint() + m.ErrorCode = &ErrorCode_PolicyValidationParameterError{PolicyValidationParameterErrorEnum_PolicyValidationParameterError(x)} + return true, err + case 118: // error_code.size_limit_error + if wire != proto.WireVarint { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeVarint() + m.ErrorCode = &ErrorCode_SizeLimitError{SizeLimitErrorEnum_SizeLimitError(x)} + return true, err + default: + return false, nil + } +} + +func _ErrorCode_OneofSizer(msg proto.Message) (n int) { + m := msg.(*ErrorCode) + // error_code + switch x := m.ErrorCode.(type) { + case *ErrorCode_RequestError: + n += 1 // tag and wire + n += proto.SizeVarint(uint64(x.RequestError)) + case *ErrorCode_BiddingStrategyError: + n += 1 // tag and wire + n += proto.SizeVarint(uint64(x.BiddingStrategyError)) + case *ErrorCode_UrlFieldError: + n += 1 // tag and wire + n += proto.SizeVarint(uint64(x.UrlFieldError)) + case *ErrorCode_ListOperationError: + n += 1 // tag and wire + n += proto.SizeVarint(uint64(x.ListOperationError)) + case *ErrorCode_QueryError: + n += 1 // tag and wire + n += proto.SizeVarint(uint64(x.QueryError)) + case *ErrorCode_MutateError: + n += 1 // tag and wire + n += proto.SizeVarint(uint64(x.MutateError)) + case *ErrorCode_FieldMaskError: + n += 1 // tag and wire + n += proto.SizeVarint(uint64(x.FieldMaskError)) + case *ErrorCode_AuthorizationError: + n += 1 // tag and wire + n += proto.SizeVarint(uint64(x.AuthorizationError)) + case *ErrorCode_InternalError: + n += 1 // tag and wire + n += proto.SizeVarint(uint64(x.InternalError)) + case *ErrorCode_QuotaError: + n += 1 // tag and wire + n += proto.SizeVarint(uint64(x.QuotaError)) + case *ErrorCode_AdError: + n += 1 // tag and wire + n += proto.SizeVarint(uint64(x.AdError)) + case *ErrorCode_AdGroupError: + n += 1 // tag and wire + n += proto.SizeVarint(uint64(x.AdGroupError)) + case *ErrorCode_CampaignBudgetError: + n += 1 // tag and wire + n += proto.SizeVarint(uint64(x.CampaignBudgetError)) + case *ErrorCode_CampaignError: + n += 1 // tag and wire + n += proto.SizeVarint(uint64(x.CampaignError)) + case *ErrorCode_AuthenticationError: + n += 2 // tag and wire + n += proto.SizeVarint(uint64(x.AuthenticationError)) + case *ErrorCode_AdGroupCriterionError: + n += 2 // tag and wire + n += proto.SizeVarint(uint64(x.AdGroupCriterionError)) + case *ErrorCode_AdCustomizerError: + n += 2 // tag and wire + n += proto.SizeVarint(uint64(x.AdCustomizerError)) + case *ErrorCode_AdGroupAdError: + n += 2 // tag and wire + n += proto.SizeVarint(uint64(x.AdGroupAdError)) + case *ErrorCode_AdSharingError: + n += 2 // tag and wire + n += proto.SizeVarint(uint64(x.AdSharingError)) + case *ErrorCode_AdxError: + n += 2 // tag and wire + n += proto.SizeVarint(uint64(x.AdxError)) + case *ErrorCode_AssetError: + n += 2 // tag and wire + n += proto.SizeVarint(uint64(x.AssetError)) + case *ErrorCode_BiddingError: + n += 2 // tag and wire + n += proto.SizeVarint(uint64(x.BiddingError)) + case *ErrorCode_CampaignCriterionError: + n += 2 // tag and wire + n += proto.SizeVarint(uint64(x.CampaignCriterionError)) + case *ErrorCode_CollectionSizeError: + n += 2 // tag and wire + n += proto.SizeVarint(uint64(x.CollectionSizeError)) + case *ErrorCode_CountryCodeError: + n += 2 // tag and wire + n += proto.SizeVarint(uint64(x.CountryCodeError)) + case *ErrorCode_CriterionError: + n += 2 // tag and wire + n += proto.SizeVarint(uint64(x.CriterionError)) + case *ErrorCode_CustomerError: + n += 2 // tag and wire + n += proto.SizeVarint(uint64(x.CustomerError)) + case *ErrorCode_DateError: + n += 2 // tag and wire + n += proto.SizeVarint(uint64(x.DateError)) + case *ErrorCode_DateRangeError: + n += 2 // tag and wire + n += proto.SizeVarint(uint64(x.DateRangeError)) + case *ErrorCode_DistinctError: + n += 2 // tag and wire + n += proto.SizeVarint(uint64(x.DistinctError)) + case *ErrorCode_FeedAttributeReferenceError: + n += 2 // tag and wire + n += proto.SizeVarint(uint64(x.FeedAttributeReferenceError)) + case *ErrorCode_FunctionError: + n += 2 // tag and wire + n += proto.SizeVarint(uint64(x.FunctionError)) + case *ErrorCode_FunctionParsingError: + n += 2 // tag and wire + n += proto.SizeVarint(uint64(x.FunctionParsingError)) + case *ErrorCode_IdError: + n += 2 // tag and wire + n += proto.SizeVarint(uint64(x.IdError)) + case *ErrorCode_ImageError: + n += 2 // tag and wire + n += proto.SizeVarint(uint64(x.ImageError)) + case *ErrorCode_LanguageCodeError: + n += 2 // tag and wire + n += proto.SizeVarint(uint64(x.LanguageCodeError)) + case *ErrorCode_MediaBundleError: + n += 2 // tag and wire + n += proto.SizeVarint(uint64(x.MediaBundleError)) + case *ErrorCode_MediaUploadError: + n += 2 // tag and wire + n += proto.SizeVarint(uint64(x.MediaUploadError)) + case *ErrorCode_MediaFileError: + n += 2 // tag and wire + n += proto.SizeVarint(uint64(x.MediaFileError)) + case *ErrorCode_MultiplierError: + n += 2 // tag and wire + n += proto.SizeVarint(uint64(x.MultiplierError)) + case *ErrorCode_NewResourceCreationError: + n += 2 // tag and wire + n += proto.SizeVarint(uint64(x.NewResourceCreationError)) + case *ErrorCode_NotEmptyError: + n += 2 // tag and wire + n += proto.SizeVarint(uint64(x.NotEmptyError)) + case *ErrorCode_NullError: + n += 2 // tag and wire + n += proto.SizeVarint(uint64(x.NullError)) + case *ErrorCode_OperatorError: + n += 2 // tag and wire + n += proto.SizeVarint(uint64(x.OperatorError)) + case *ErrorCode_RangeError: + n += 2 // tag and wire + n += proto.SizeVarint(uint64(x.RangeError)) + case *ErrorCode_RecommendationError: + n += 2 // tag and wire + n += proto.SizeVarint(uint64(x.RecommendationError)) + case *ErrorCode_RegionCodeError: + n += 2 // tag and wire + n += proto.SizeVarint(uint64(x.RegionCodeError)) + case *ErrorCode_SettingError: + n += 2 // tag and wire + n += proto.SizeVarint(uint64(x.SettingError)) + case *ErrorCode_StringFormatError: + n += 2 // tag and wire + n += proto.SizeVarint(uint64(x.StringFormatError)) + case *ErrorCode_StringLengthError: + n += 2 // tag and wire + n += proto.SizeVarint(uint64(x.StringLengthError)) + case *ErrorCode_OperationAccessDeniedError: + n += 2 // tag and wire + n += proto.SizeVarint(uint64(x.OperationAccessDeniedError)) + case *ErrorCode_ResourceAccessDeniedError: + n += 2 // tag and wire + n += proto.SizeVarint(uint64(x.ResourceAccessDeniedError)) + case *ErrorCode_ResourceCountLimitExceededError: + n += 2 // tag and wire + n += proto.SizeVarint(uint64(x.ResourceCountLimitExceededError)) + case *ErrorCode_YoutubeVideoRegistrationError: + n += 2 // tag and wire + n += proto.SizeVarint(uint64(x.YoutubeVideoRegistrationError)) + case *ErrorCode_AdGroupBidModifierError: + n += 2 // tag and wire + n += proto.SizeVarint(uint64(x.AdGroupBidModifierError)) + case *ErrorCode_ContextError: + n += 2 // tag and wire + n += proto.SizeVarint(uint64(x.ContextError)) + case *ErrorCode_FieldError: + n += 2 // tag and wire + n += proto.SizeVarint(uint64(x.FieldError)) + case *ErrorCode_SharedSetError: + n += 2 // tag and wire + n += proto.SizeVarint(uint64(x.SharedSetError)) + case *ErrorCode_SharedCriterionError: + n += 2 // tag and wire + n += proto.SizeVarint(uint64(x.SharedCriterionError)) + case *ErrorCode_CampaignSharedSetError: + n += 2 // tag and wire + n += proto.SizeVarint(uint64(x.CampaignSharedSetError)) + case *ErrorCode_ConversionActionError: + n += 2 // tag and wire + n += proto.SizeVarint(uint64(x.ConversionActionError)) + case *ErrorCode_ConversionAdjustmentUploadError: + n += 2 // tag and wire + n += proto.SizeVarint(uint64(x.ConversionAdjustmentUploadError)) + case *ErrorCode_ConversionUploadError: + n += 2 // tag and wire + n += proto.SizeVarint(uint64(x.ConversionUploadError)) + case *ErrorCode_HeaderError: + n += 2 // tag and wire + n += proto.SizeVarint(uint64(x.HeaderError)) + case *ErrorCode_DatabaseError: + n += 2 // tag and wire + n += proto.SizeVarint(uint64(x.DatabaseError)) + case *ErrorCode_PolicyFindingError: + n += 2 // tag and wire + n += proto.SizeVarint(uint64(x.PolicyFindingError)) + case *ErrorCode_EnumError: + n += 2 // tag and wire + n += proto.SizeVarint(uint64(x.EnumError)) + case *ErrorCode_KeywordPlanError: + n += 2 // tag and wire + n += proto.SizeVarint(uint64(x.KeywordPlanError)) + case *ErrorCode_KeywordPlanCampaignError: + n += 2 // tag and wire + n += proto.SizeVarint(uint64(x.KeywordPlanCampaignError)) + case *ErrorCode_KeywordPlanNegativeKeywordError: + n += 2 // tag and wire + n += proto.SizeVarint(uint64(x.KeywordPlanNegativeKeywordError)) + case *ErrorCode_KeywordPlanAdGroupError: + n += 2 // tag and wire + n += proto.SizeVarint(uint64(x.KeywordPlanAdGroupError)) + case *ErrorCode_KeywordPlanKeywordError: + n += 2 // tag and wire + n += proto.SizeVarint(uint64(x.KeywordPlanKeywordError)) + case *ErrorCode_KeywordPlanIdeaError: + n += 2 // tag and wire + n += proto.SizeVarint(uint64(x.KeywordPlanIdeaError)) + case *ErrorCode_AccountBudgetProposalError: + n += 2 // tag and wire + n += proto.SizeVarint(uint64(x.AccountBudgetProposalError)) + case *ErrorCode_UserListError: + n += 2 // tag and wire + n += proto.SizeVarint(uint64(x.UserListError)) + case *ErrorCode_ChangeStatusError: + n += 2 // tag and wire + n += proto.SizeVarint(uint64(x.ChangeStatusError)) + case *ErrorCode_FeedError: + n += 2 // tag and wire + n += proto.SizeVarint(uint64(x.FeedError)) + case *ErrorCode_GeoTargetConstantSuggestionError: + n += 2 // tag and wire + n += proto.SizeVarint(uint64(x.GeoTargetConstantSuggestionError)) + case *ErrorCode_FeedItemError: + n += 2 // tag and wire + n += proto.SizeVarint(uint64(x.FeedItemError)) + case *ErrorCode_LabelError: + n += 2 // tag and wire + n += proto.SizeVarint(uint64(x.LabelError)) + case *ErrorCode_BillingSetupError: + n += 2 // tag and wire + n += proto.SizeVarint(uint64(x.BillingSetupError)) + case *ErrorCode_CustomerClientLinkError: + n += 2 // tag and wire + n += proto.SizeVarint(uint64(x.CustomerClientLinkError)) + case *ErrorCode_CustomerManagerLinkError: + n += 2 // tag and wire + n += proto.SizeVarint(uint64(x.CustomerManagerLinkError)) + case *ErrorCode_FeedMappingError: + n += 2 // tag and wire + n += proto.SizeVarint(uint64(x.FeedMappingError)) + case *ErrorCode_CustomerFeedError: + n += 2 // tag and wire + n += proto.SizeVarint(uint64(x.CustomerFeedError)) + case *ErrorCode_AdGroupFeedError: + n += 2 // tag and wire + n += proto.SizeVarint(uint64(x.AdGroupFeedError)) + case *ErrorCode_CampaignFeedError: + n += 2 // tag and wire + n += proto.SizeVarint(uint64(x.CampaignFeedError)) + case *ErrorCode_CustomInterestError: + n += 2 // tag and wire + n += proto.SizeVarint(uint64(x.CustomInterestError)) + case *ErrorCode_ExtensionFeedItemError: + n += 2 // tag and wire + n += proto.SizeVarint(uint64(x.ExtensionFeedItemError)) + case *ErrorCode_AdParameterError: + n += 2 // tag and wire + n += proto.SizeVarint(uint64(x.AdParameterError)) + case *ErrorCode_FeedItemValidationError: + n += 2 // tag and wire + n += proto.SizeVarint(uint64(x.FeedItemValidationError)) + case *ErrorCode_ExtensionSettingError: + n += 2 // tag and wire + n += proto.SizeVarint(uint64(x.ExtensionSettingError)) + case *ErrorCode_FeedItemTargetError: + n += 2 // tag and wire + n += proto.SizeVarint(uint64(x.FeedItemTargetError)) + case *ErrorCode_PolicyViolationError: + n += 2 // tag and wire + n += proto.SizeVarint(uint64(x.PolicyViolationError)) + case *ErrorCode_MutateJobError: + n += 2 // tag and wire + n += proto.SizeVarint(uint64(x.MutateJobError)) + case *ErrorCode_PartialFailureError: + n += 2 // tag and wire + n += proto.SizeVarint(uint64(x.PartialFailureError)) + case *ErrorCode_PolicyValidationParameterError: + n += 2 // tag and wire + n += proto.SizeVarint(uint64(x.PolicyValidationParameterError)) + case *ErrorCode_SizeLimitError: + n += 2 // tag and wire + n += proto.SizeVarint(uint64(x.SizeLimitError)) + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +// Describes the part of the request proto that caused the error. +type ErrorLocation struct { + // A field path that indicates which field was invalid in the request. + FieldPathElements []*ErrorLocation_FieldPathElement `protobuf:"bytes,2,rep,name=field_path_elements,json=fieldPathElements,proto3" json:"field_path_elements,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ErrorLocation) Reset() { *m = ErrorLocation{} } +func (m *ErrorLocation) String() string { return proto.CompactTextString(m) } +func (*ErrorLocation) ProtoMessage() {} +func (*ErrorLocation) Descriptor() ([]byte, []int) { + return fileDescriptor_errors_70395b57fa918bda, []int{3} +} +func (m *ErrorLocation) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ErrorLocation.Unmarshal(m, b) +} +func (m *ErrorLocation) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ErrorLocation.Marshal(b, m, deterministic) +} +func (dst *ErrorLocation) XXX_Merge(src proto.Message) { + xxx_messageInfo_ErrorLocation.Merge(dst, src) +} +func (m *ErrorLocation) XXX_Size() int { + return xxx_messageInfo_ErrorLocation.Size(m) +} +func (m *ErrorLocation) XXX_DiscardUnknown() { + xxx_messageInfo_ErrorLocation.DiscardUnknown(m) +} + +var xxx_messageInfo_ErrorLocation proto.InternalMessageInfo + +func (m *ErrorLocation) GetFieldPathElements() []*ErrorLocation_FieldPathElement { + if m != nil { + return m.FieldPathElements + } + return nil +} + +// A part of a field path. +type ErrorLocation_FieldPathElement struct { + // The name of a field or a oneof + FieldName string `protobuf:"bytes,1,opt,name=field_name,json=fieldName,proto3" json:"field_name,omitempty"` + // If field_name is a repeated field, this is the element that failed + Index *wrappers.Int64Value `protobuf:"bytes,2,opt,name=index,proto3" json:"index,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ErrorLocation_FieldPathElement) Reset() { *m = ErrorLocation_FieldPathElement{} } +func (m *ErrorLocation_FieldPathElement) String() string { return proto.CompactTextString(m) } +func (*ErrorLocation_FieldPathElement) ProtoMessage() {} +func (*ErrorLocation_FieldPathElement) Descriptor() ([]byte, []int) { + return fileDescriptor_errors_70395b57fa918bda, []int{3, 0} +} +func (m *ErrorLocation_FieldPathElement) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ErrorLocation_FieldPathElement.Unmarshal(m, b) +} +func (m *ErrorLocation_FieldPathElement) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ErrorLocation_FieldPathElement.Marshal(b, m, deterministic) +} +func (dst *ErrorLocation_FieldPathElement) XXX_Merge(src proto.Message) { + xxx_messageInfo_ErrorLocation_FieldPathElement.Merge(dst, src) +} +func (m *ErrorLocation_FieldPathElement) XXX_Size() int { + return xxx_messageInfo_ErrorLocation_FieldPathElement.Size(m) +} +func (m *ErrorLocation_FieldPathElement) XXX_DiscardUnknown() { + xxx_messageInfo_ErrorLocation_FieldPathElement.DiscardUnknown(m) +} + +var xxx_messageInfo_ErrorLocation_FieldPathElement proto.InternalMessageInfo + +func (m *ErrorLocation_FieldPathElement) GetFieldName() string { + if m != nil { + return m.FieldName + } + return "" +} + +func (m *ErrorLocation_FieldPathElement) GetIndex() *wrappers.Int64Value { + if m != nil { + return m.Index + } + return nil +} + +// Additional error details. +type ErrorDetails struct { + // The error code that should have been returned, but wasn't. This is used + // when the error code is InternalError.ERROR_CODE_NOT_PUBLISHED. + UnpublishedErrorCode string `protobuf:"bytes,1,opt,name=unpublished_error_code,json=unpublishedErrorCode,proto3" json:"unpublished_error_code,omitempty"` + // Describes an ad policy violation. + PolicyViolationDetails *PolicyViolationDetails `protobuf:"bytes,2,opt,name=policy_violation_details,json=policyViolationDetails,proto3" json:"policy_violation_details,omitempty"` + // Describes policy violation findings. + PolicyFindingDetails *PolicyFindingDetails `protobuf:"bytes,3,opt,name=policy_finding_details,json=policyFindingDetails,proto3" json:"policy_finding_details,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ErrorDetails) Reset() { *m = ErrorDetails{} } +func (m *ErrorDetails) String() string { return proto.CompactTextString(m) } +func (*ErrorDetails) ProtoMessage() {} +func (*ErrorDetails) Descriptor() ([]byte, []int) { + return fileDescriptor_errors_70395b57fa918bda, []int{4} +} +func (m *ErrorDetails) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ErrorDetails.Unmarshal(m, b) +} +func (m *ErrorDetails) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ErrorDetails.Marshal(b, m, deterministic) +} +func (dst *ErrorDetails) XXX_Merge(src proto.Message) { + xxx_messageInfo_ErrorDetails.Merge(dst, src) +} +func (m *ErrorDetails) XXX_Size() int { + return xxx_messageInfo_ErrorDetails.Size(m) +} +func (m *ErrorDetails) XXX_DiscardUnknown() { + xxx_messageInfo_ErrorDetails.DiscardUnknown(m) +} + +var xxx_messageInfo_ErrorDetails proto.InternalMessageInfo + +func (m *ErrorDetails) GetUnpublishedErrorCode() string { + if m != nil { + return m.UnpublishedErrorCode + } + return "" +} + +func (m *ErrorDetails) GetPolicyViolationDetails() *PolicyViolationDetails { + if m != nil { + return m.PolicyViolationDetails + } + return nil +} + +func (m *ErrorDetails) GetPolicyFindingDetails() *PolicyFindingDetails { + if m != nil { + return m.PolicyFindingDetails + } + return nil +} + +// Error returned as part of a mutate response. +// This error indicates single policy violation by some text +// in one of the fields. +type PolicyViolationDetails struct { + // Human readable description of policy violation. + ExternalPolicyDescription string `protobuf:"bytes,2,opt,name=external_policy_description,json=externalPolicyDescription,proto3" json:"external_policy_description,omitempty"` + // Unique identifier for this violation. + // If policy is exemptible, this key may be used to request exemption. + Key *common.PolicyViolationKey `protobuf:"bytes,4,opt,name=key,proto3" json:"key,omitempty"` + // Human readable name of the policy. + ExternalPolicyName string `protobuf:"bytes,5,opt,name=external_policy_name,json=externalPolicyName,proto3" json:"external_policy_name,omitempty"` + // Whether user can file an exemption request for this violation. + IsExemptible bool `protobuf:"varint,6,opt,name=is_exemptible,json=isExemptible,proto3" json:"is_exemptible,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *PolicyViolationDetails) Reset() { *m = PolicyViolationDetails{} } +func (m *PolicyViolationDetails) String() string { return proto.CompactTextString(m) } +func (*PolicyViolationDetails) ProtoMessage() {} +func (*PolicyViolationDetails) Descriptor() ([]byte, []int) { + return fileDescriptor_errors_70395b57fa918bda, []int{5} +} +func (m *PolicyViolationDetails) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_PolicyViolationDetails.Unmarshal(m, b) +} +func (m *PolicyViolationDetails) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_PolicyViolationDetails.Marshal(b, m, deterministic) +} +func (dst *PolicyViolationDetails) XXX_Merge(src proto.Message) { + xxx_messageInfo_PolicyViolationDetails.Merge(dst, src) +} +func (m *PolicyViolationDetails) XXX_Size() int { + return xxx_messageInfo_PolicyViolationDetails.Size(m) +} +func (m *PolicyViolationDetails) XXX_DiscardUnknown() { + xxx_messageInfo_PolicyViolationDetails.DiscardUnknown(m) +} + +var xxx_messageInfo_PolicyViolationDetails proto.InternalMessageInfo + +func (m *PolicyViolationDetails) GetExternalPolicyDescription() string { + if m != nil { + return m.ExternalPolicyDescription + } + return "" +} + +func (m *PolicyViolationDetails) GetKey() *common.PolicyViolationKey { + if m != nil { + return m.Key + } + return nil +} + +func (m *PolicyViolationDetails) GetExternalPolicyName() string { + if m != nil { + return m.ExternalPolicyName + } + return "" +} + +func (m *PolicyViolationDetails) GetIsExemptible() bool { + if m != nil { + return m.IsExemptible + } + return false +} + +// Error returned as part of a mutate response. +// This error indicates one or more policy findings in the fields of a +// resource. +type PolicyFindingDetails struct { + // The list of policy topics for the resource. Contains the PROHIBITED or + // FULLY_LIMITED policy topic entries that prevented the resource from being + // saved (among any other entries the resource may also have). + PolicyTopicEntries []*common.PolicyTopicEntry `protobuf:"bytes,1,rep,name=policy_topic_entries,json=policyTopicEntries,proto3" json:"policy_topic_entries,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *PolicyFindingDetails) Reset() { *m = PolicyFindingDetails{} } +func (m *PolicyFindingDetails) String() string { return proto.CompactTextString(m) } +func (*PolicyFindingDetails) ProtoMessage() {} +func (*PolicyFindingDetails) Descriptor() ([]byte, []int) { + return fileDescriptor_errors_70395b57fa918bda, []int{6} +} +func (m *PolicyFindingDetails) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_PolicyFindingDetails.Unmarshal(m, b) +} +func (m *PolicyFindingDetails) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_PolicyFindingDetails.Marshal(b, m, deterministic) +} +func (dst *PolicyFindingDetails) XXX_Merge(src proto.Message) { + xxx_messageInfo_PolicyFindingDetails.Merge(dst, src) +} +func (m *PolicyFindingDetails) XXX_Size() int { + return xxx_messageInfo_PolicyFindingDetails.Size(m) +} +func (m *PolicyFindingDetails) XXX_DiscardUnknown() { + xxx_messageInfo_PolicyFindingDetails.DiscardUnknown(m) +} + +var xxx_messageInfo_PolicyFindingDetails proto.InternalMessageInfo + +func (m *PolicyFindingDetails) GetPolicyTopicEntries() []*common.PolicyTopicEntry { + if m != nil { + return m.PolicyTopicEntries + } + return nil +} + +func init() { + proto.RegisterType((*GoogleAdsFailure)(nil), "google.ads.googleads.v1.errors.GoogleAdsFailure") + proto.RegisterType((*GoogleAdsError)(nil), "google.ads.googleads.v1.errors.GoogleAdsError") + proto.RegisterType((*ErrorCode)(nil), "google.ads.googleads.v1.errors.ErrorCode") + proto.RegisterType((*ErrorLocation)(nil), "google.ads.googleads.v1.errors.ErrorLocation") + proto.RegisterType((*ErrorLocation_FieldPathElement)(nil), "google.ads.googleads.v1.errors.ErrorLocation.FieldPathElement") + proto.RegisterType((*ErrorDetails)(nil), "google.ads.googleads.v1.errors.ErrorDetails") + proto.RegisterType((*PolicyViolationDetails)(nil), "google.ads.googleads.v1.errors.PolicyViolationDetails") + proto.RegisterType((*PolicyFindingDetails)(nil), "google.ads.googleads.v1.errors.PolicyFindingDetails") +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/errors/errors.proto", fileDescriptor_errors_70395b57fa918bda) +} + +var fileDescriptor_errors_70395b57fa918bda = []byte{ + // 3983 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x9c, 0x5c, 0xcd, 0x73, 0x1c, 0xb9, + 0x75, 0x9f, 0xd1, 0x66, 0x57, 0x22, 0x28, 0x51, 0x52, 0x8b, 0xab, 0x85, 0x24, 0x4b, 0x96, 0xb9, + 0x76, 0xa2, 0x38, 0x5e, 0x52, 0xa4, 0xbe, 0x76, 0x29, 0xaf, 0xb4, 0xc3, 0x2f, 0x0d, 0x2d, 0x52, + 0xe2, 0x36, 0xb5, 0xb4, 0xbd, 0x56, 0x3c, 0x8b, 0xe9, 0xc6, 0x0c, 0xb1, 0xec, 0x2f, 0xf5, 0x07, + 0x45, 0xaa, 0xe2, 0xaa, 0x24, 0x55, 0x49, 0x55, 0x52, 0x95, 0x1c, 0x92, 0xaa, 0x1c, 0x72, 0xc8, + 0x61, 0x8f, 0x3e, 0xe4, 0x90, 0x1c, 0xf2, 0x2f, 0xa4, 0xf2, 0x47, 0xe4, 0x96, 0x1c, 0x72, 0xcf, + 0xdd, 0x05, 0xf4, 0x17, 0x80, 0xc6, 0x0c, 0xd0, 0x3e, 0x89, 0xf8, 0x01, 0xbf, 0xf7, 0xd0, 0x98, + 0x87, 0x07, 0x3c, 0x3c, 0x40, 0xe0, 0x4f, 0xc6, 0x61, 0x38, 0xf6, 0xf0, 0x12, 0x72, 0x93, 0xa5, + 0xfc, 0x4f, 0xfa, 0xd7, 0xf1, 0xf2, 0x12, 0x8e, 0xe3, 0x30, 0x4e, 0x8a, 0x7f, 0x16, 0xa3, 0x38, + 0x4c, 0x43, 0xeb, 0x56, 0xde, 0x62, 0x11, 0xb9, 0xc9, 0x62, 0xd5, 0x78, 0xf1, 0x78, 0x79, 0x31, + 0x6f, 0x75, 0x7d, 0xa2, 0x30, 0x27, 0xf4, 0xfd, 0x30, 0x58, 0x8a, 0x42, 0x8f, 0x38, 0xa7, 0xb9, + 0xb0, 0xeb, 0x3f, 0xd6, 0x34, 0x3e, 0x46, 0x5e, 0x86, 0x8b, 0xb6, 0x6b, 0x9a, 0x5e, 0x22, 0xc7, + 0x09, 0xb3, 0x20, 0x1d, 0x0c, 0x33, 0x77, 0x8c, 0xd3, 0x41, 0x14, 0x87, 0x51, 0x98, 0x20, 0x6f, + 0xc0, 0xaa, 0x0b, 0x19, 0x9f, 0xea, 0x64, 0xb8, 0x03, 0x27, 0x4b, 0xd2, 0xd0, 0x27, 0xef, 0x70, + 0x2c, 0x30, 0x3f, 0xd1, 0x33, 0xf9, 0xe6, 0x0f, 0xf5, 0xcd, 0xc7, 0x71, 0x98, 0x45, 0x03, 0x89, + 0xf7, 0x85, 0x29, 0x6f, 0x48, 0xdc, 0x81, 0x1f, 0xba, 0x64, 0x44, 0xa4, 0x8e, 0x7e, 0x6e, 0x2a, + 0xc1, 0x89, 0x49, 0x8a, 0x63, 0x12, 0x06, 0x02, 0xfd, 0x9e, 0x29, 0xbd, 0xe5, 0xb0, 0xe6, 0xa4, + 0x11, 0xc6, 0xe2, 0xf7, 0x3e, 0xd2, 0x33, 0x23, 0x14, 0x23, 0x1f, 0xa7, 0xd2, 0x67, 0x3e, 0xd0, + 0x13, 0x93, 0x43, 0x14, 0x93, 0x60, 0x2c, 0xd0, 0x16, 0xb5, 0xb4, 0x13, 0xa1, 0xfd, 0x5d, 0x5d, + 0xfb, 0x24, 0xc1, 0xa9, 0xc0, 0xf8, 0x4c, 0xc7, 0xc8, 0xd2, 0x43, 0x1c, 0xa4, 0xc4, 0x41, 0xa9, + 0x3c, 0xf6, 0x9f, 0x1a, 0x50, 0xc3, 0x98, 0xbc, 0x6b, 0x32, 0x57, 0x34, 0xcc, 0x21, 0x71, 0x5d, + 0x79, 0x28, 0x1e, 0x1b, 0x72, 0x92, 0x34, 0x46, 0x29, 0x1e, 0x9f, 0xb6, 0xea, 0xea, 0x90, 0x78, + 0x1e, 0x23, 0xe3, 0x54, 0xb2, 0x95, 0x55, 0x0d, 0xd3, 0x41, 0x7e, 0x84, 0xc8, 0x38, 0x28, 0xe7, + 0x71, 0x1b, 0xdb, 0xae, 0xb8, 0xbf, 0x9f, 0x6d, 0x57, 0xf4, 0x36, 0x5f, 0x5a, 0x91, 0x1a, 0xb6, + 0xfd, 0xc4, 0x94, 0x49, 0x0d, 0x15, 0xbb, 0x03, 0xd9, 0x92, 0xb4, 0x9a, 0x0f, 0x51, 0x30, 0xc6, + 0x83, 0x24, 0x45, 0x69, 0x96, 0xb4, 0x1b, 0xe3, 0xd0, 0xf3, 0xb0, 0xc3, 0xac, 0x28, 0x21, 0xef, + 0x70, 0x2b, 0x53, 0x72, 0xc2, 0x20, 0xc5, 0x27, 0x62, 0x4f, 0x7f, 0xaa, 0xe7, 0x1c, 0xe3, 0x38, + 0xa1, 0xfa, 0x90, 0xd3, 0x30, 0xde, 0x67, 0x2d, 0xd8, 0xee, 0xb7, 0x59, 0x92, 0xfa, 0x38, 0x48, + 0x07, 0x59, 0xe4, 0x85, 0x92, 0xf3, 0x6c, 0xd1, 0x0d, 0x05, 0xfb, 0x91, 0x96, 0x9d, 0x05, 0x69, + 0x7c, 0x3a, 0x70, 0x42, 0x57, 0x1c, 0xb1, 0xfb, 0x3a, 0xa2, 0xd2, 0x18, 0xb5, 0xbf, 0x11, 0x5b, + 0x87, 0x06, 0x24, 0x48, 0x71, 0x8c, 0x13, 0x71, 0xbc, 0x9f, 0x1a, 0x71, 0x71, 0x3c, 0x70, 0x3c, + 0x42, 0xc7, 0xca, 0x23, 0xc1, 0x51, 0xbb, 0x99, 0x50, 0x0a, 0x68, 0x65, 0x8f, 0x25, 0xa9, 0x31, + 0x13, 0xbe, 0x30, 0x65, 0xfa, 0x28, 0x40, 0x63, 0x1c, 0xb7, 0xef, 0xb0, 0x8b, 0x52, 0x34, 0x44, + 0x89, 0xf8, 0xc3, 0x2c, 0xe9, 0x49, 0xb8, 0xd5, 0xa2, 0xc2, 0x08, 0x31, 0x9b, 0x75, 0xad, 0x3a, + 0x47, 0x92, 0x94, 0x04, 0x4e, 0xda, 0xaa, 0x73, 0x38, 0xc8, 0xfc, 0x56, 0xee, 0x04, 0x9f, 0xa4, + 0x38, 0x60, 0xc6, 0xcd, 0xc6, 0x9f, 0xa4, 0xd8, 0x6f, 0x35, 0x3b, 0x6a, 0x7e, 0x82, 0xd3, 0x54, + 0x5e, 0x2d, 0xd6, 0x35, 0x6c, 0xa6, 0x13, 0xa5, 0x69, 0x4c, 0x86, 0x19, 0x1d, 0x24, 0x3c, 0xc2, + 0x31, 0x0e, 0x9c, 0x76, 0x3f, 0x48, 0xc3, 0x70, 0xee, 0x9b, 0x10, 0x1a, 0x5f, 0xfa, 0xd8, 0x98, + 0x95, 0xa2, 0x58, 0x5e, 0x63, 0x9e, 0x1a, 0x93, 0x8f, 0x91, 0x47, 0xdc, 0xe6, 0x5a, 0xfc, 0xc8, + 0x44, 0x80, 0x8f, 0xa2, 0x48, 0x1e, 0x62, 0xdd, 0x5e, 0x63, 0x44, 0xb0, 0xe7, 0xb6, 0xb2, 0xd7, + 0x9c, 0xe1, 0xa3, 0xa4, 0xdd, 0x64, 0x1a, 0x65, 0x41, 0xd3, 0x4b, 0x3f, 0x36, 0x25, 0x45, 0x28, + 0x4e, 0xe4, 0x4f, 0xdb, 0xd6, 0x90, 0xc7, 0x38, 0x2c, 0x7f, 0x0b, 0x27, 0x0c, 0x92, 0x14, 0x05, + 0xe9, 0x20, 0xc9, 0xc6, 0x63, 0x9c, 0x34, 0xfa, 0xb1, 0xac, 0x11, 0x75, 0x88, 0x91, 0xdb, 0x72, + 0xef, 0x4e, 0xdc, 0x56, 0xbf, 0x03, 0xf1, 0x51, 0x4b, 0x07, 0xc0, 0x9c, 0x78, 0x20, 0xc5, 0x22, + 0x3a, 0xa7, 0x78, 0x84, 0x4f, 0xdf, 0x86, 0xb1, 0x3b, 0x88, 0x3c, 0x44, 0x17, 0x3e, 0xc5, 0xb6, + 0xbb, 0x95, 0x04, 0xe5, 0xe6, 0xe6, 0x51, 0x1b, 0x09, 0x6d, 0x9c, 0x89, 0x40, 0x24, 0x2e, 0x46, + 0xad, 0xe6, 0x98, 0xc0, 0x2e, 0x0b, 0xbc, 0x80, 0x7e, 0x1b, 0x01, 0x01, 0x1e, 0xa3, 0x94, 0x1c, + 0x63, 0xa5, 0x24, 0xdd, 0x8f, 0xed, 0xa1, 0x21, 0x6e, 0x17, 0x43, 0x7a, 0x28, 0x18, 0x67, 0xd4, + 0x42, 0x1a, 0x1b, 0x05, 0x5d, 0x68, 0xe0, 0x91, 0x24, 0x1d, 0x84, 0x11, 0x8e, 0xdb, 0x3b, 0x15, + 0x1f, 0xbb, 0x04, 0x0d, 0x86, 0x59, 0xe0, 0x7a, 0xed, 0x96, 0xb4, 0x9c, 0x38, 0x22, 0x12, 0xcd, + 0x4c, 0x9f, 0x62, 0x17, 0xa5, 0xd5, 0x97, 0x79, 0x29, 0x89, 0x3c, 0x39, 0x6a, 0x5d, 0xd6, 0xd2, + 0xd2, 0xb6, 0x8b, 0x75, 0x41, 0xf9, 0x36, 0x1c, 0xb6, 0x9a, 0x34, 0x01, 0x7e, 0x3b, 0x88, 0x71, + 0x12, 0x66, 0xb1, 0x83, 0x07, 0x4e, 0x8c, 0x9b, 0xbf, 0x85, 0x6e, 0x51, 0x0a, 0xc2, 0x74, 0x80, + 0xfd, 0x28, 0x3d, 0x6d, 0xb5, 0xf6, 0x05, 0x99, 0x27, 0xda, 0x99, 0xee, 0xbc, 0xa3, 0x36, 0x14, + 0xe4, 0x38, 0x38, 0x49, 0x06, 0x2e, 0x0e, 0x88, 0xb4, 0x7e, 0xde, 0x33, 0x92, 0x11, 0xc6, 0xad, + 0x76, 0xa6, 0x11, 0x8a, 0x53, 0x82, 0xbc, 0xc1, 0x08, 0x11, 0x2f, 0x8b, 0xdb, 0x99, 0x78, 0x7e, + 0xfa, 0x33, 0x18, 0x91, 0xa0, 0x11, 0x8f, 0x6e, 0x99, 0x51, 0xb9, 0x55, 0x57, 0x7d, 0x32, 0xf0, + 0xd8, 0x50, 0x0e, 0x09, 0xbd, 0xe6, 0x6f, 0xab, 0x73, 0x07, 0x6f, 0x32, 0x1c, 0x9f, 0xb6, 0x64, + 0x84, 0x29, 0x6a, 0xc5, 0x68, 0x6e, 0x30, 0x75, 0xa3, 0x1a, 0x63, 0x27, 0xf4, 0x7d, 0x1c, 0x28, + 0x76, 0x23, 0x0f, 0xb5, 0xd4, 0x31, 0xa5, 0x34, 0x7c, 0xd5, 0x8a, 0x96, 0xf7, 0x26, 0x93, 0xc3, + 0x92, 0x9e, 0x96, 0x53, 0x4c, 0xab, 0xc9, 0x06, 0xdb, 0x37, 0x15, 0x91, 0x9f, 0xf5, 0x79, 0xc4, + 0x27, 0xe9, 0x00, 0x9f, 0x38, 0x18, 0xbb, 0x92, 0x24, 0xdd, 0x07, 0xa8, 0x36, 0xb9, 0x3a, 0xd3, + 0x29, 0x02, 0x75, 0x75, 0x40, 0xf7, 0xc0, 0x8c, 0x2c, 0x47, 0xf9, 0x5a, 0x1a, 0x0d, 0xd0, 0x8b, + 0xef, 0x6c, 0xb1, 0x0a, 0x25, 0x29, 0x3b, 0xfb, 0x1a, 0x85, 0xb1, 0x8f, 0x7e, 0x2f, 0xa6, 0x87, + 0x83, 0x71, 0x7a, 0xd8, 0xca, 0xf1, 0x65, 0xb1, 0x37, 0x68, 0x6e, 0x52, 0xb5, 0xac, 0x84, 0x05, + 0x7c, 0x92, 0x2d, 0x6d, 0x6a, 0x58, 0xa7, 0x61, 0x96, 0x66, 0x43, 0x3c, 0x38, 0x26, 0x2e, 0x0e, + 0x07, 0xd4, 0x8a, 0xd9, 0x39, 0x95, 0xfc, 0xa3, 0x14, 0xa7, 0xd5, 0x4b, 0xac, 0x34, 0xcc, 0x46, + 0x4b, 0x6f, 0x63, 0x14, 0x45, 0xb8, 0x3c, 0xcd, 0xbe, 0xfe, 0xbd, 0x52, 0x4d, 0x44, 0x96, 0x50, + 0x10, 0x84, 0x29, 0x13, 0x51, 0xd4, 0x2e, 0x7c, 0x0d, 0x2e, 0x3d, 0x63, 0xf5, 0x3d, 0x37, 0xd9, + 0xca, 0xbd, 0x9d, 0xb5, 0x05, 0x3e, 0xc8, 0xbb, 0x00, 0xbb, 0xb7, 0xdf, 0xbb, 0x33, 0xbb, 0xb2, + 0xb8, 0x38, 0xfd, 0x40, 0x7c, 0xb1, 0x92, 0xb0, 0x49, 0xcb, 0x76, 0xc1, 0x5e, 0xf8, 0xcf, 0x33, + 0x60, 0x4e, 0xac, 0xb2, 0xfa, 0x00, 0xb0, 0x4a, 0x36, 0x1b, 0x61, 0xf7, 0x76, 0xf7, 0xce, 0xec, + 0xca, 0x1f, 0xeb, 0xc4, 0x33, 0xea, 0x7a, 0xe8, 0x62, 0x7b, 0x06, 0x97, 0x7f, 0x5a, 0x10, 0x9c, + 0xf5, 0x71, 0x92, 0xa0, 0x31, 0x86, 0x67, 0x6e, 0x77, 0xef, 0xcc, 0xd8, 0x65, 0xd1, 0x7a, 0x0a, + 0xce, 0xa6, 0x31, 0x19, 0x8f, 0x71, 0x0c, 0xdf, 0x63, 0x0a, 0x7e, 0x34, 0x51, 0x41, 0x7e, 0x06, + 0xbf, 0x78, 0x80, 0xbc, 0x0c, 0xdb, 0x25, 0xcb, 0xda, 0x06, 0xe7, 0xbc, 0x30, 0x3f, 0xbc, 0x84, + 0x7f, 0xc0, 0x24, 0x7c, 0x62, 0xd4, 0xc5, 0x9d, 0x82, 0x64, 0x57, 0x74, 0x6b, 0x0b, 0x9c, 0x75, + 0x71, 0x8a, 0x88, 0x97, 0xc0, 0xf7, 0x99, 0xa4, 0x9f, 0x18, 0x49, 0xda, 0xc8, 0x39, 0x76, 0x49, + 0x5e, 0xf8, 0xef, 0x5f, 0x83, 0x99, 0x6a, 0x18, 0x2c, 0x17, 0x5c, 0x10, 0x9c, 0x13, 0x1b, 0xc8, + 0xb9, 0x95, 0xcf, 0x75, 0xb2, 0xed, 0x9c, 0xc4, 0x04, 0x6d, 0x06, 0x99, 0x2f, 0x00, 0xfd, 0x8e, + 0x7d, 0x3e, 0xe6, 0xca, 0xd6, 0x5f, 0x75, 0xc1, 0x55, 0xf5, 0x09, 0x29, 0x1b, 0xf1, 0xb9, 0x95, + 0x5d, 0x9d, 0xbe, 0xb5, 0x9c, 0xbd, 0x5f, 0x90, 0x6b, 0xbd, 0xaa, 0x8a, 0x7e, 0xc7, 0x9e, 0x1f, + 0x2a, 0x70, 0x8b, 0x80, 0x8b, 0xd2, 0xb4, 0x63, 0xbf, 0xeb, 0xdc, 0xca, 0x53, 0x9d, 0xfe, 0xaf, + 0x62, 0x6f, 0x8b, 0xb2, 0x6a, 0xc5, 0x02, 0xd2, 0xef, 0xd8, 0x17, 0x32, 0x1e, 0xb0, 0x7e, 0x03, + 0xe6, 0x55, 0x3b, 0x54, 0x66, 0x05, 0x73, 0x2b, 0xdb, 0x3a, 0x7d, 0x3b, 0x24, 0x49, 0x5f, 0x96, + 0xd4, 0x5a, 0x69, 0x13, 0xee, 0x77, 0x6c, 0xcb, 0x6b, 0xa0, 0xd6, 0x6b, 0x30, 0xcb, 0xad, 0xbe, + 0xcc, 0x62, 0xe6, 0x56, 0x3e, 0xd3, 0x69, 0xfd, 0x92, 0x52, 0x6a, 0x6d, 0x75, 0xb1, 0xdf, 0xb1, + 0xc1, 0x9b, 0xaa, 0x64, 0x7d, 0x03, 0xce, 0xf3, 0x7b, 0x4c, 0x78, 0x96, 0x89, 0x7f, 0xac, 0x13, + 0xbf, 0xcb, 0x38, 0xb5, 0x7c, 0xae, 0xdc, 0xef, 0xd8, 0xb3, 0x7e, 0x5d, 0xb4, 0x7c, 0x70, 0x49, + 0x8e, 0xc7, 0xe1, 0x39, 0xa6, 0xa5, 0xa7, 0xd3, 0xc2, 0x7e, 0x84, 0x5d, 0x94, 0x1c, 0xd5, 0x8a, + 0x44, 0xa8, 0xdf, 0xb1, 0xe7, 0x46, 0x02, 0x62, 0xfd, 0x19, 0xb8, 0xa2, 0xc8, 0x17, 0xc0, 0x19, + 0xb3, 0x1f, 0xab, 0xc7, 0x53, 0x6b, 0xad, 0x4d, 0x98, 0xfe, 0x58, 0xa8, 0x81, 0x5a, 0x87, 0x60, + 0x4e, 0x0c, 0x7a, 0x21, 0x30, 0xb3, 0xca, 0xed, 0x82, 0x55, 0xeb, 0x14, 0x10, 0x6a, 0x95, 0x84, + 0x07, 0x72, 0xb3, 0xa8, 0xb6, 0x58, 0x70, 0xd6, 0xd4, 0x2c, 0xc2, 0x14, 0xf1, 0x66, 0x51, 0x16, + 0x73, 0xb3, 0x28, 0x4b, 0xd6, 0x1e, 0x38, 0x57, 0x06, 0x38, 0xf0, 0x3c, 0x13, 0x7d, 0x4f, 0x3b, + 0x74, 0xdc, 0x8c, 0xea, 0x55, 0x73, 0xe9, 0x2c, 0x2a, 0x66, 0x11, 0x06, 0x73, 0x62, 0x30, 0x0f, + 0x2f, 0x98, 0xf9, 0xa7, 0x9e, 0xfb, 0x8c, 0x92, 0x78, 0xe1, 0x35, 0x40, 0xfd, 0x13, 0xe2, 0xca, + 0xd6, 0x5f, 0x74, 0xc1, 0x87, 0xca, 0x54, 0x0a, 0x9c, 0x63, 0xea, 0x9e, 0xeb, 0xd4, 0xad, 0x17, + 0xe4, 0x35, 0xc6, 0xad, 0xb5, 0x2a, 0xf0, 0x7e, 0xc7, 0xbe, 0xe2, 0x34, 0x61, 0x6a, 0x04, 0xe2, + 0xa9, 0x03, 0xbc, 0x68, 0x66, 0x04, 0xa5, 0x8e, 0xa6, 0xd6, 0xca, 0x08, 0x1c, 0x1e, 0xb0, 0xfe, + 0xbc, 0x0b, 0xe6, 0x55, 0x89, 0x35, 0x78, 0xd9, 0xec, 0x63, 0x7b, 0x02, 0x57, 0xb4, 0x77, 0x09, + 0xa7, 0x1f, 0x8b, 0x9a, 0xb0, 0xf5, 0xb7, 0x5d, 0x00, 0x27, 0xe5, 0x56, 0xa1, 0xc5, 0xba, 0xf1, + 0xd2, 0xf0, 0x27, 0x5e, 0x2f, 0xd9, 0x8d, 0xdf, 0x5a, 0xac, 0xe9, 0x77, 0xec, 0x0f, 0x91, 0xaa, + 0xc2, 0x3a, 0x05, 0x57, 0x14, 0xa9, 0x6c, 0x78, 0x85, 0x75, 0xe3, 0x99, 0xbe, 0x1b, 0xeb, 0x15, + 0x93, 0xef, 0x82, 0x84, 0xf6, 0x3b, 0xf6, 0x65, 0x24, 0x83, 0x56, 0x00, 0x2e, 0x37, 0x92, 0xdb, + 0xf0, 0x43, 0x33, 0x3f, 0x57, 0x7c, 0xa5, 0x38, 0x83, 0x78, 0x88, 0xfa, 0x39, 0x24, 0x20, 0xd4, + 0xad, 0xca, 0xb9, 0x5e, 0x08, 0x4d, 0xd5, 0xed, 0xe7, 0x34, 0x5e, 0x1d, 0x0f, 0xe5, 0xea, 0x78, + 0xc4, 0x7a, 0x05, 0x66, 0xaa, 0x1c, 0x31, 0xbc, 0xc6, 0xf4, 0x3c, 0xd0, 0xeb, 0x39, 0xe1, 0x35, + 0x9c, 0x94, 0xb2, 0xcf, 0xa1, 0xe2, 0x6f, 0xea, 0xc4, 0xb8, 0x4c, 0x32, 0x3c, 0x32, 0x73, 0x62, + 0x3d, 0x4a, 0xe1, 0x24, 0x57, 0x45, 0xea, 0xc4, 0x50, 0x55, 0xa2, 0x3b, 0x22, 0x21, 0x01, 0x0c, + 0xaf, 0x9b, 0x79, 0x9c, 0x62, 0x23, 0xd2, 0xd8, 0x99, 0x54, 0x1e, 0x67, 0xc8, 0x95, 0xad, 0xbf, + 0xeb, 0x02, 0x38, 0x29, 0x01, 0x0b, 0x6f, 0x32, 0x8d, 0x5f, 0x9a, 0x4e, 0x7c, 0xc5, 0x0c, 0x50, + 0x57, 0xf5, 0x3b, 0xf6, 0x55, 0x47, 0x59, 0x93, 0x7b, 0x40, 0x55, 0xa2, 0x13, 0x7e, 0xdf, 0xd0, + 0x03, 0x56, 0xe4, 0x7d, 0xf2, 0x8e, 0x5b, 0xe3, 0x15, 0x38, 0xf3, 0x80, 0x4d, 0xd8, 0xca, 0x80, + 0xd5, 0x4c, 0x1b, 0x42, 0x9f, 0xe9, 0xdf, 0xd4, 0xeb, 0x67, 0x4c, 0xba, 0xa9, 0xe5, 0x95, 0x8b, + 0x60, 0xbf, 0x63, 0x5f, 0x72, 0x24, 0xcc, 0xf2, 0xc0, 0x45, 0xf9, 0x07, 0xb8, 0x6d, 0x36, 0x25, + 0x54, 0x03, 0x2f, 0x0f, 0xf8, 0x9c, 0x23, 0x0e, 0x34, 0x75, 0xf3, 0x42, 0xbe, 0x10, 0x7e, 0x6d, + 0xe8, 0xe6, 0x0b, 0x16, 0xa7, 0x8b, 0x47, 0x98, 0x9b, 0xe7, 0x01, 0xeb, 0x17, 0x00, 0xd4, 0x39, + 0x3b, 0xf8, 0x03, 0xa6, 0xe5, 0x91, 0x4e, 0xcb, 0x86, 0xb0, 0x41, 0xdb, 0xe0, 0xb6, 0x67, 0x33, + 0x2e, 0xbf, 0x39, 0x93, 0x93, 0x7b, 0x70, 0xc1, 0x6c, 0xc8, 0xa8, 0x44, 0x9b, 0xd2, 0x44, 0x25, + 0x35, 0x44, 0x87, 0xcc, 0x15, 0x10, 0x3a, 0x64, 0x62, 0x52, 0x10, 0x7e, 0x6c, 0x36, 0x64, 0x1b, + 0x05, 0x8b, 0xd3, 0xc5, 0x23, 0x74, 0xc8, 0x5c, 0x1e, 0xb0, 0xbe, 0xeb, 0x82, 0x5b, 0xd3, 0x73, + 0x73, 0xf0, 0x87, 0x4c, 0xf5, 0xaf, 0xb4, 0x9b, 0x50, 0x8c, 0xdd, 0x5e, 0x29, 0xc4, 0x2e, 0x65, + 0x70, 0x3b, 0xd2, 0xc9, 0xf5, 0xfd, 0x8e, 0x7d, 0x63, 0x34, 0xb9, 0x9a, 0x0e, 0x87, 0x98, 0x73, + 0x82, 0x3f, 0x32, 0x1b, 0x8e, 0xad, 0x82, 0xc5, 0xf5, 0x82, 0x47, 0xe8, 0x70, 0x8c, 0x78, 0x80, + 0x85, 0x6d, 0xea, 0x4c, 0x15, 0xfc, 0x43, 0xb3, 0xb0, 0xad, 0x54, 0xb0, 0x97, 0x93, 0x9b, 0x9a, + 0xf9, 0x0a, 0x1a, 0xb6, 0x8d, 0x14, 0x38, 0xdd, 0x57, 0x96, 0x59, 0x27, 0xf8, 0x47, 0x66, 0xfb, + 0xca, 0x6d, 0x6e, 0x55, 0xdc, 0xae, 0xf7, 0x95, 0xc4, 0xad, 0x96, 0x10, 0x2e, 0x31, 0x05, 0xef, + 0x98, 0x2d, 0x21, 0xdb, 0x94, 0xc2, 0xc9, 0xad, 0x8a, 0x74, 0x09, 0x21, 0x55, 0x89, 0x6e, 0x28, + 0x14, 0x79, 0x0d, 0x18, 0x98, 0x6d, 0x28, 0x76, 0x0a, 0xaa, 0xe8, 0xca, 0x1a, 0x28, 0xdd, 0x50, + 0x78, 0x32, 0x48, 0x7d, 0x68, 0x33, 0xbb, 0x01, 0x7f, 0x6c, 0xe6, 0x43, 0x77, 0x29, 0x73, 0x8d, + 0x11, 0xb9, 0x20, 0x4d, 0x02, 0xa9, 0x0f, 0xf5, 0x25, 0xac, 0x56, 0xcb, 0x27, 0x39, 0x60, 0xda, + 0x42, 0xed, 0x57, 0x8c, 0x28, 0xa9, 0xe5, 0xc0, 0x4a, 0x2d, 0x87, 0x51, 0x47, 0x24, 0xa7, 0x64, + 0xe0, 0x81, 0x99, 0x23, 0x62, 0xf2, 0xb7, 0x48, 0xe3, 0x4b, 0x2b, 0x88, 0x3a, 0x22, 0x5f, 0x40, + 0xac, 0x08, 0x5c, 0x92, 0x33, 0x32, 0xf0, 0x27, 0x4c, 0xdd, 0xba, 0x3e, 0xf4, 0x2d, 0x79, 0x7c, + 0xf8, 0x2b, 0x60, 0xfd, 0x8e, 0x7d, 0xd1, 0x17, 0x21, 0xeb, 0x9f, 0xba, 0xe0, 0xc6, 0x94, 0x1c, + 0x0b, 0xfc, 0x84, 0x69, 0xff, 0x4a, 0xa7, 0xfd, 0x05, 0x7e, 0x6b, 0x17, 0x12, 0xd6, 0x0b, 0x01, + 0x75, 0x37, 0x26, 0x55, 0xf6, 0x3b, 0x36, 0x0c, 0x26, 0xd4, 0x59, 0x04, 0x5c, 0x94, 0x32, 0x37, + 0x70, 0xd1, 0xcc, 0x0b, 0xbd, 0x08, 0xd3, 0x4d, 0xca, 0xe2, 0x3a, 0xc0, 0x23, 0xd4, 0x0b, 0x05, + 0x3c, 0x40, 0xd7, 0xb1, 0x3a, 0xdd, 0x03, 0x97, 0xcc, 0xd6, 0xb1, 0x17, 0x99, 0xc7, 0x45, 0xc5, + 0x55, 0x89, 0xae, 0x63, 0x41, 0x59, 0xa0, 0x9e, 0x54, 0xcc, 0xe9, 0xc0, 0xbb, 0x66, 0xdf, 0xf0, + 0xb2, 0x60, 0xd5, 0x1a, 0x04, 0x84, 0x7e, 0x43, 0xc8, 0x03, 0xd4, 0xdf, 0xf0, 0x8b, 0xe5, 0xb2, + 0x99, 0xbf, 0x91, 0x16, 0x4a, 0x61, 0x91, 0x04, 0x71, 0xbd, 0x40, 0xd2, 0x80, 0x4e, 0x95, 0xd5, + 0x80, 0xab, 0x66, 0x7b, 0x37, 0x5b, 0xe0, 0xf2, 0x67, 0x7a, 0x0d, 0x9c, 0xee, 0xdd, 0xe2, 0x26, + 0x6c, 0xbd, 0x01, 0x97, 0x1b, 0xc9, 0x11, 0x78, 0xcf, 0x6c, 0x6e, 0xd8, 0x8c, 0x28, 0xba, 0x3b, + 0x09, 0xa3, 0x73, 0x23, 0x16, 0x21, 0xba, 0x51, 0x17, 0xd2, 0x12, 0xf0, 0xbe, 0xd9, 0x46, 0x7d, + 0x3f, 0x27, 0xd5, 0xba, 0x78, 0x80, 0x6e, 0xd4, 0x13, 0xae, 0x4c, 0x7d, 0xb9, 0x22, 0x3b, 0x00, + 0x1f, 0x98, 0xf9, 0xf2, 0x7d, 0x46, 0xdd, 0x62, 0x4c, 0x4e, 0xa1, 0x8c, 0x52, 0x5f, 0x9e, 0xc8, + 0x20, 0xa7, 0x9a, 0x4f, 0x2f, 0xc0, 0x87, 0x6d, 0x54, 0xef, 0x30, 0xa6, 0xac, 0x9a, 0x43, 0x6b, + 0xd5, 0x1c, 0x68, 0xfd, 0x4b, 0x17, 0xdc, 0x9c, 0x9a, 0x32, 0x85, 0x8f, 0x58, 0x2f, 0x7e, 0x69, + 0x36, 0x53, 0x48, 0x18, 0xf4, 0x98, 0x8c, 0x0d, 0x26, 0x42, 0x9e, 0x36, 0xaa, 0xea, 0x7e, 0xc7, + 0xbe, 0x1e, 0x4e, 0xac, 0xb5, 0xfe, 0xb9, 0x0b, 0xbe, 0x37, 0x2d, 0x43, 0x06, 0x3f, 0x65, 0xfd, + 0xfb, 0xb9, 0xde, 0xf6, 0x72, 0x19, 0xea, 0xee, 0x4d, 0xac, 0xed, 0x77, 0xec, 0x6b, 0xf1, 0xa4, + 0x4a, 0xeb, 0xdf, 0xbb, 0xe0, 0x63, 0x83, 0xdc, 0x1b, 0xfc, 0x8c, 0xf5, 0xd1, 0x31, 0xed, 0x23, + 0x8b, 0x66, 0x76, 0xa8, 0xa0, 0xcd, 0x42, 0x4e, 0xb3, 0xa7, 0x13, 0xda, 0xf4, 0x3b, 0xf6, 0xf7, + 0xe3, 0xe9, 0x4d, 0xac, 0xdf, 0x76, 0xc1, 0x6d, 0x5d, 0xa2, 0x08, 0x66, 0xac, 0xcb, 0x03, 0x5d, + 0x97, 0x7f, 0x99, 0xcb, 0x39, 0xa0, 0x62, 0x6c, 0x4e, 0x4a, 0xdd, 0xe1, 0xa9, 0x2d, 0xfa, 0x1d, + 0xfb, 0xe6, 0xe9, 0xb4, 0x06, 0xd6, 0x3f, 0x76, 0xc1, 0x8d, 0x29, 0xd7, 0xfb, 0xe1, 0x63, 0xd6, + 0xcf, 0x7d, 0xc3, 0x33, 0x94, 0x35, 0xe2, 0xee, 0x16, 0xfc, 0xc6, 0x61, 0x8a, 0x5c, 0xd7, 0xef, + 0xd8, 0x1f, 0x21, 0x75, 0x15, 0x75, 0x49, 0xc2, 0x8d, 0x5f, 0xf8, 0x53, 0x33, 0x97, 0xb4, 0x9e, + 0x93, 0xf8, 0xc0, 0xb5, 0x06, 0xa8, 0x4b, 0x72, 0xb8, 0x32, 0x5d, 0x4c, 0xf8, 0x0c, 0xc6, 0xe7, + 0x66, 0x8b, 0x89, 0x94, 0xbe, 0x10, 0x72, 0x17, 0x60, 0x54, 0x27, 0x2e, 0x7c, 0x70, 0x49, 0x4e, + 0xbe, 0xc2, 0x27, 0x66, 0x7b, 0xaa, 0x7d, 0xc6, 0xdb, 0xe7, 0x8f, 0x59, 0x44, 0x88, 0xee, 0xa9, + 0x12, 0x01, 0x61, 0x31, 0x86, 0x3a, 0x53, 0x0c, 0x9f, 0x9a, 0xc5, 0x18, 0xb9, 0x0a, 0x45, 0x2c, + 0xae, 0xaa, 0xa0, 0x31, 0x46, 0xa2, 0xc0, 0xad, 0xbf, 0xef, 0x82, 0x6b, 0x13, 0xef, 0x98, 0xc3, + 0x2f, 0xda, 0x9d, 0xc8, 0x28, 0x06, 0x42, 0x5d, 0xc5, 0x9f, 0xc8, 0x88, 0x35, 0xd6, 0xdf, 0x74, + 0xc1, 0x47, 0x13, 0xae, 0x82, 0xc3, 0x9e, 0xd9, 0x09, 0xe9, 0x7a, 0x45, 0xef, 0x49, 0x81, 0x9f, + 0xb2, 0xa6, 0xdf, 0xb1, 0x3f, 0x74, 0x54, 0x15, 0xd6, 0xbf, 0x75, 0xc1, 0x82, 0xfe, 0x62, 0x39, + 0x4c, 0xcc, 0xfc, 0x19, 0xa7, 0xbc, 0x12, 0x24, 0x6f, 0xff, 0x35, 0x6d, 0xa8, 0x3f, 0x73, 0xa6, + 0x37, 0x91, 0x07, 0x50, 0xe8, 0x69, 0xd8, 0x76, 0x00, 0x27, 0xf7, 0x4f, 0xec, 0x15, 0x37, 0x80, + 0x7c, 0x5f, 0xbe, 0x01, 0xe7, 0xf9, 0xab, 0x96, 0x70, 0xcd, 0x2c, 0x61, 0xd6, 0x67, 0x9c, 0x5a, + 0x2b, 0x57, 0xee, 0x77, 0xec, 0xd9, 0xc3, 0xba, 0xc8, 0x0e, 0x49, 0x84, 0x6b, 0xdd, 0x70, 0xdd, + 0xf0, 0x90, 0xa4, 0x60, 0x09, 0x07, 0x32, 0x35, 0xc2, 0x0e, 0x49, 0x78, 0xc0, 0xfa, 0x0d, 0x98, + 0x57, 0x5d, 0x4c, 0x82, 0x1b, 0x66, 0xc9, 0xb2, 0x3d, 0xc6, 0xdd, 0xca, 0xa9, 0xb5, 0xd2, 0x26, + 0xdc, 0xef, 0xd8, 0x56, 0xd4, 0x40, 0x69, 0x38, 0x50, 0xdf, 0xf6, 0x86, 0x5b, 0x66, 0xe1, 0x00, + 0x55, 0x51, 0xeb, 0xaa, 0x4a, 0x34, 0x1c, 0xc0, 0x65, 0x81, 0x06, 0xb1, 0xcd, 0x2b, 0x9c, 0xf0, + 0x99, 0x59, 0x10, 0xfb, 0x3c, 0x67, 0xee, 0x79, 0x88, 0x9b, 0x66, 0x32, 0x48, 0x83, 0xd8, 0x23, + 0x09, 0x63, 0x31, 0xde, 0x94, 0xcb, 0xa7, 0xb0, 0x6f, 0x16, 0xe3, 0x71, 0xba, 0x9a, 0x19, 0xa1, + 0x49, 0x95, 0x34, 0xc6, 0x3b, 0x9a, 0x50, 0xc7, 0xb6, 0x31, 0x06, 0x77, 0x43, 0xe1, 0xb6, 0xd9, + 0xb4, 0xe7, 0xfa, 0xf0, 0xa2, 0x10, 0x54, 0x40, 0xca, 0x7e, 0xaa, 0xda, 0xd0, 0x69, 0x7f, 0x34, + 0xbd, 0x09, 0xdb, 0x19, 0x4c, 0xb9, 0x0d, 0x0c, 0x7f, 0x66, 0xb6, 0x33, 0xe0, 0x7a, 0xd2, 0xc8, + 0x25, 0x4e, 0xa8, 0xa3, 0x3b, 0x83, 0x23, 0x75, 0x95, 0xf5, 0x0f, 0x5d, 0x70, 0x7d, 0xf2, 0x45, + 0x5d, 0xf8, 0xbc, 0x75, 0xa7, 0xa6, 0x0d, 0x9d, 0x34, 0x64, 0x7c, 0xa7, 0x84, 0xa1, 0xfa, 0xeb, + 0x2e, 0xf8, 0x68, 0xc2, 0xdd, 0x63, 0xb8, 0x63, 0xb6, 0xf8, 0x72, 0x5a, 0xb7, 0x5d, 0x8c, 0x94, + 0xdd, 0xa9, 0x2a, 0xe8, 0xe2, 0x7b, 0xa4, 0xc0, 0x59, 0xb8, 0x31, 0xf5, 0x45, 0x2a, 0xdc, 0x35, + 0x0b, 0x37, 0x7a, 0xb9, 0x90, 0x3c, 0xaf, 0xba, 0x57, 0x88, 0xe0, 0xb6, 0x74, 0x13, 0xab, 0x69, + 0xb8, 0x81, 0x26, 0xd6, 0xb2, 0x8b, 0x23, 0xe2, 0xcd, 0x2b, 0xf8, 0xc2, 0xf0, 0xe2, 0x48, 0x82, + 0xe3, 0x1d, 0xc2, 0xdf, 0x94, 0x11, 0x10, 0x76, 0x71, 0x84, 0x07, 0x68, 0xd4, 0xa7, 0x78, 0xab, + 0x06, 0x5f, 0x9a, 0x45, 0x7d, 0xeb, 0x8c, 0xba, 0xcf, 0x98, 0xdc, 0x6a, 0x25, 0xa3, 0x34, 0xea, + 0x73, 0x64, 0x90, 0xba, 0xd6, 0xfa, 0x51, 0x09, 0xdc, 0x33, 0x73, 0xad, 0x5b, 0x98, 0x8f, 0x43, + 0xaa, 0x12, 0x75, 0xad, 0xa3, 0xb2, 0x60, 0xfd, 0x47, 0x17, 0xfc, 0xd0, 0xe4, 0xd9, 0x02, 0xfc, + 0x92, 0x29, 0xc5, 0xda, 0x6b, 0x62, 0x38, 0x7c, 0xc5, 0x44, 0xad, 0x17, 0x92, 0xf6, 0x2b, 0x41, + 0x75, 0x5f, 0x74, 0x8d, 0xfa, 0x1d, 0xfb, 0xf6, 0x58, 0xd3, 0x86, 0xfe, 0xf2, 0xd2, 0xbb, 0x19, + 0xb8, 0x6f, 0x78, 0xdc, 0x8e, 0xb1, 0xbb, 0x9d, 0x62, 0x5f, 0x1c, 0x9c, 0x0a, 0x61, 0xc7, 0xed, + 0x3c, 0x40, 0xf7, 0xf5, 0xdc, 0x05, 0x7a, 0xf8, 0xca, 0x6c, 0x5f, 0xbf, 0x43, 0x29, 0xfc, 0x39, + 0x71, 0x59, 0xa4, 0xfb, 0x7a, 0xaf, 0x2a, 0x51, 0xbb, 0x52, 0xbc, 0x33, 0x85, 0x3f, 0x37, 0xb3, + 0xab, 0xb5, 0x9c, 0xba, 0x4f, 0x99, 0x7c, 0x8a, 0x53, 0x42, 0xa9, 0x5d, 0x0d, 0x65, 0x90, 0x39, + 0xbf, 0xc9, 0xaf, 0xec, 0xe0, 0x2f, 0xcc, 0x9c, 0x5f, 0x99, 0xee, 0x5a, 0x67, 0x02, 0x76, 0x48, + 0x70, 0xd4, 0x4c, 0x85, 0x49, 0x75, 0xd4, 0xf9, 0x39, 0xea, 0x2a, 0xb6, 0xec, 0x4e, 0x79, 0x4a, + 0x07, 0x7f, 0x65, 0xb6, 0xec, 0x96, 0x9a, 0x77, 0x73, 0x09, 0xea, 0x6e, 0xc9, 0x95, 0x74, 0xd9, + 0x75, 0x26, 0xd4, 0xd1, 0x6d, 0x48, 0xf3, 0xd5, 0x13, 0x7c, 0x6d, 0xb6, 0x0d, 0xa1, 0x26, 0xb6, + 0x9b, 0x13, 0x45, 0xbb, 0xe3, 0x41, 0xba, 0x0d, 0x19, 0x49, 0x18, 0xf3, 0x3b, 0xcd, 0x37, 0x89, + 0xf0, 0x4f, 0x0d, 0xfd, 0x4e, 0x41, 0x15, 0xbd, 0x41, 0x03, 0x65, 0x7e, 0x47, 0x06, 0xad, 0x63, + 0x76, 0x01, 0x43, 0x7e, 0xf4, 0x0e, 0x7f, 0x6d, 0xf6, 0xc9, 0xc5, 0x3a, 0x2b, 0x6a, 0x96, 0x41, + 0xfa, 0xc9, 0x48, 0xc2, 0xd8, 0x27, 0x37, 0x1f, 0x24, 0xc3, 0x6f, 0x0c, 0x3f, 0xb9, 0xa0, 0x4a, + 0x9f, 0x2c, 0xa3, 0xec, 0x93, 0x65, 0x30, 0xcf, 0xb7, 0xab, 0x1e, 0xad, 0x42, 0x64, 0x98, 0x6f, + 0x67, 0xe4, 0xed, 0x82, 0x2b, 0x0f, 0xb9, 0x80, 0xb3, 0x7c, 0x7b, 0x13, 0x66, 0x21, 0xef, 0xc4, + 0x77, 0x90, 0xd0, 0x35, 0x0b, 0x79, 0x37, 0x4b, 0x01, 0x4d, 0x77, 0xa7, 0xae, 0xa2, 0x21, 0x2f, + 0x56, 0xd6, 0x50, 0xcb, 0x6f, 0xfe, 0x17, 0x06, 0x10, 0x9b, 0x9a, 0xc1, 0x5e, 0x49, 0xe4, 0xcd, + 0x40, 0x04, 0x73, 0x33, 0x10, 0x31, 0xe6, 0x9e, 0x26, 0x3f, 0x54, 0x84, 0x23, 0x33, 0xf7, 0x54, + 0x7e, 0xca, 0x41, 0xc5, 0x6f, 0x3a, 0x7e, 0xa9, 0x8e, 0xba, 0xa7, 0x91, 0xba, 0x8a, 0x45, 0xaf, + 0x13, 0x1e, 0x99, 0xc2, 0xb1, 0x59, 0xf4, 0x5a, 0x8d, 0x7f, 0xe3, 0xc4, 0x5b, 0x59, 0x43, 0xa3, + 0x57, 0xac, 0xaa, 0xb0, 0xfe, 0xb2, 0x0b, 0xae, 0xaa, 0x9f, 0x81, 0xc2, 0x43, 0x33, 0x6b, 0x2d, + 0x07, 0x20, 0x5f, 0x6b, 0x9b, 0x03, 0xc3, 0xe1, 0xd4, 0x5a, 0x47, 0x4d, 0x98, 0x1d, 0x14, 0xa9, + 0x5f, 0xa3, 0x40, 0x62, 0xb6, 0x57, 0xcd, 0x43, 0xcc, 0x83, 0x92, 0x2c, 0xc7, 0x9e, 0x62, 0x05, + 0xdd, 0xab, 0x46, 0x0a, 0x9c, 0xe5, 0x1c, 0xa5, 0xc7, 0x52, 0xd0, 0x33, 0xcc, 0x39, 0x32, 0xde, + 0xcf, 0xc2, 0xa1, 0x7c, 0x05, 0xb6, 0x84, 0x58, 0xce, 0x51, 0x40, 0x98, 0xa3, 0x50, 0xbe, 0x21, + 0x82, 0x91, 0xd9, 0xd0, 0xef, 0xe5, 0xe4, 0xe2, 0x46, 0x3e, 0xf7, 0xd1, 0x4d, 0x9c, 0x0e, 0x7d, + 0xd4, 0x84, 0xad, 0x7f, 0xed, 0x82, 0x1f, 0x68, 0x1f, 0x14, 0xc1, 0x98, 0xf5, 0x07, 0x19, 0xfe, + 0x0a, 0x95, 0x1c, 0xc5, 0xac, 0x9d, 0xde, 0xa4, 0xdf, 0xb1, 0x6f, 0x45, 0x53, 0x5b, 0xb0, 0x33, + 0x4c, 0xe9, 0x25, 0x08, 0x3c, 0x36, 0x3c, 0xc3, 0x24, 0xef, 0x70, 0x7e, 0x38, 0x5e, 0x9f, 0x23, + 0x0a, 0x10, 0x3b, 0xc3, 0x14, 0x90, 0xb5, 0xf3, 0xfc, 0x53, 0x84, 0x85, 0xff, 0xe9, 0x82, 0x0b, + 0xc2, 0x25, 0x7e, 0x2b, 0x00, 0x57, 0xf2, 0x03, 0xdb, 0x08, 0xa5, 0x87, 0x03, 0xec, 0x61, 0x1f, + 0x07, 0x69, 0x02, 0xcf, 0xb0, 0x27, 0x11, 0x4f, 0x5a, 0x3d, 0x08, 0xc8, 0xcf, 0x6d, 0xf7, 0x50, + 0x7a, 0xb8, 0x99, 0x8b, 0xb1, 0x2f, 0x8f, 0x24, 0x24, 0xb9, 0xee, 0x82, 0x4b, 0x72, 0x33, 0xeb, + 0x26, 0xc8, 0x0f, 0x79, 0x07, 0x01, 0xf2, 0xf3, 0xe7, 0x12, 0x33, 0xf6, 0x0c, 0x43, 0x5e, 0x20, + 0x1f, 0x5b, 0xcb, 0xe0, 0x7d, 0x12, 0xb8, 0xf8, 0x84, 0xdd, 0xc7, 0x9f, 0x5d, 0xb9, 0x51, 0x76, + 0xaa, 0x7c, 0x0a, 0xb2, 0xb8, 0x1d, 0xa4, 0x0f, 0xef, 0xe7, 0xaf, 0x1b, 0xf2, 0x96, 0x0b, 0xdf, + 0x9d, 0x01, 0xe7, 0xf9, 0x27, 0x06, 0xd6, 0x7d, 0x70, 0x35, 0x0b, 0xa2, 0x6c, 0xe8, 0x91, 0xe4, + 0xb0, 0x5c, 0x4c, 0xeb, 0xd7, 0x19, 0x33, 0xf6, 0x3c, 0x57, 0x5b, 0xbf, 0x40, 0x88, 0x00, 0x6c, + 0x4c, 0xeb, 0xf2, 0xa1, 0x43, 0xde, 0x99, 0x87, 0x2d, 0x27, 0x76, 0xf9, 0xe4, 0xe1, 0x6a, 0xa4, + 0xc4, 0xad, 0x6f, 0x2b, 0x47, 0x52, 0x1e, 0x60, 0x95, 0xfa, 0xf2, 0x47, 0x1e, 0xf7, 0x5b, 0x1d, + 0x61, 0x95, 0xda, 0xe6, 0x23, 0x05, 0xba, 0xf0, 0xff, 0x5d, 0x70, 0x55, 0xdd, 0x3d, 0xeb, 0x09, + 0xb8, 0x41, 0xdd, 0x2d, 0xbb, 0xf5, 0x5d, 0xf4, 0xc7, 0xc5, 0x89, 0x13, 0x93, 0x88, 0x3d, 0x17, + 0xc9, 0x9f, 0xa2, 0x5c, 0x2b, 0x9b, 0xe4, 0x42, 0x36, 0xea, 0x06, 0xd6, 0x06, 0x78, 0xef, 0x08, + 0x9f, 0x16, 0xcf, 0x4a, 0x56, 0x74, 0x0f, 0x53, 0xa4, 0x4e, 0x3c, 0xc7, 0xa7, 0x36, 0xa5, 0x5b, + 0x77, 0xc1, 0xbc, 0xdc, 0x0b, 0x66, 0x21, 0xef, 0x33, 0xf5, 0x96, 0xa8, 0x9e, 0x99, 0xca, 0xc7, + 0xe0, 0x02, 0x49, 0x06, 0xf8, 0x04, 0xfb, 0x51, 0x4a, 0x86, 0x1e, 0x86, 0x1f, 0xdc, 0xee, 0xde, + 0x39, 0x67, 0x9f, 0x27, 0xc9, 0x66, 0x85, 0x2d, 0xbc, 0x03, 0xf3, 0xaa, 0x51, 0xb2, 0x86, 0xd5, + 0xe1, 0x61, 0x1a, 0x46, 0xc4, 0x19, 0xe0, 0x20, 0x8d, 0x09, 0x2e, 0x9f, 0x07, 0xdd, 0x35, 0xfb, + 0x8a, 0x57, 0x94, 0xba, 0x19, 0xa4, 0xf1, 0x69, 0x79, 0x42, 0x58, 0x21, 0x04, 0x27, 0x6b, 0xff, + 0xcb, 0x4e, 0xab, 0x7d, 0xcd, 0xaf, 0xb8, 0x36, 0xcb, 0x6c, 0x30, 0xd9, 0xa3, 0x16, 0xbe, 0xd7, + 0xfd, 0x7a, 0xa3, 0x68, 0x3e, 0x0e, 0x3d, 0x14, 0x8c, 0x17, 0xc3, 0x78, 0xbc, 0x34, 0xc6, 0x01, + 0xb3, 0xff, 0xf2, 0x4d, 0x55, 0x44, 0x92, 0x49, 0x4f, 0xac, 0x1e, 0xe7, 0xff, 0x7c, 0x77, 0xe6, + 0xbd, 0x67, 0xbd, 0xde, 0x6f, 0xcf, 0xdc, 0xca, 0x1f, 0x2b, 0x2d, 0xf6, 0x5c, 0xee, 0x49, 0xd3, + 0xe2, 0xc1, 0x72, 0x3e, 0x99, 0x93, 0xff, 0x2a, 0x1b, 0xbc, 0xee, 0xb9, 0xc9, 0xeb, 0xaa, 0xc1, + 0xeb, 0x83, 0xe5, 0xd7, 0x79, 0x83, 0xff, 0x3b, 0xb3, 0x90, 0xa3, 0xab, 0xab, 0x3d, 0x37, 0x59, + 0x5d, 0xad, 0x9a, 0xac, 0xae, 0x1e, 0x2c, 0xaf, 0xae, 0xe6, 0x8d, 0x86, 0x1f, 0xb0, 0xde, 0xdd, + 0xfb, 0x5d, 0x00, 0x00, 0x00, 0xff, 0xff, 0x80, 0x11, 0x25, 0x08, 0x93, 0x4c, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/errors/extension_feed_item_error.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/errors/extension_feed_item_error.pb.go new file mode 100644 index 0000000..f1cb8e7 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/errors/extension_feed_item_error.pb.go @@ -0,0 +1,343 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/errors/extension_feed_item_error.proto + +package errors // import "google.golang.org/genproto/googleapis/ads/googleads/v1/errors" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Enum describing possible extension feed item errors. +type ExtensionFeedItemErrorEnum_ExtensionFeedItemError int32 + +const ( + // Enum unspecified. + ExtensionFeedItemErrorEnum_UNSPECIFIED ExtensionFeedItemErrorEnum_ExtensionFeedItemError = 0 + // The received error code is not known in this version. + ExtensionFeedItemErrorEnum_UNKNOWN ExtensionFeedItemErrorEnum_ExtensionFeedItemError = 1 + // Value is not within the accepted range. + ExtensionFeedItemErrorEnum_VALUE_OUT_OF_RANGE ExtensionFeedItemErrorEnum_ExtensionFeedItemError = 2 + // Url list is too long. + ExtensionFeedItemErrorEnum_URL_LIST_TOO_LONG ExtensionFeedItemErrorEnum_ExtensionFeedItemError = 3 + // Cannot have a geo targeting restriction without having geo targeting. + ExtensionFeedItemErrorEnum_CANNOT_HAVE_RESTRICTION_ON_EMPTY_GEO_TARGETING ExtensionFeedItemErrorEnum_ExtensionFeedItemError = 4 + // Cannot simultaneously set sitelink field with final urls. + ExtensionFeedItemErrorEnum_CANNOT_SET_WITH_FINAL_URLS ExtensionFeedItemErrorEnum_ExtensionFeedItemError = 5 + // Must set field with final urls. + ExtensionFeedItemErrorEnum_CANNOT_SET_WITHOUT_FINAL_URLS ExtensionFeedItemErrorEnum_ExtensionFeedItemError = 6 + // Phone number for a call extension is invalid. + ExtensionFeedItemErrorEnum_INVALID_PHONE_NUMBER ExtensionFeedItemErrorEnum_ExtensionFeedItemError = 7 + // Phone number for a call extension is not supported for the given country + // code. + ExtensionFeedItemErrorEnum_PHONE_NUMBER_NOT_SUPPORTED_FOR_COUNTRY ExtensionFeedItemErrorEnum_ExtensionFeedItemError = 8 + // A carrier specific number in short format is not allowed for call + // extensions. + ExtensionFeedItemErrorEnum_CARRIER_SPECIFIC_SHORT_NUMBER_NOT_ALLOWED ExtensionFeedItemErrorEnum_ExtensionFeedItemError = 9 + // Premium rate numbers are not allowed for call extensions. + ExtensionFeedItemErrorEnum_PREMIUM_RATE_NUMBER_NOT_ALLOWED ExtensionFeedItemErrorEnum_ExtensionFeedItemError = 10 + // Phone number type for a call extension is not allowed. + // For example, personal number is not allowed for a call extension in + // most regions. + ExtensionFeedItemErrorEnum_DISALLOWED_NUMBER_TYPE ExtensionFeedItemErrorEnum_ExtensionFeedItemError = 11 + // Phone number for a call extension does not meet domestic format + // requirements. + ExtensionFeedItemErrorEnum_INVALID_DOMESTIC_PHONE_NUMBER_FORMAT ExtensionFeedItemErrorEnum_ExtensionFeedItemError = 12 + // Vanity phone numbers (i.e. those including letters) are not allowed for + // call extensions. + ExtensionFeedItemErrorEnum_VANITY_PHONE_NUMBER_NOT_ALLOWED ExtensionFeedItemErrorEnum_ExtensionFeedItemError = 13 + // Call conversion action provided for a call extension is invalid. + ExtensionFeedItemErrorEnum_INVALID_CALL_CONVERSION_ACTION ExtensionFeedItemErrorEnum_ExtensionFeedItemError = 14 + // For a call extension, the customer is not whitelisted for call tracking. + ExtensionFeedItemErrorEnum_CUSTOMER_NOT_WHITELISTED_FOR_CALLTRACKING ExtensionFeedItemErrorEnum_ExtensionFeedItemError = 15 + // Call tracking is not supported for the given country for a call + // extension. + ExtensionFeedItemErrorEnum_CALLTRACKING_NOT_SUPPORTED_FOR_COUNTRY ExtensionFeedItemErrorEnum_ExtensionFeedItemError = 16 + // Customer hasn't consented for call recording, which is required for + // creating/updating call feed items. Please see + // https://support.google.com/google-ads/answer/7412639. + ExtensionFeedItemErrorEnum_CUSTOMER_CONSENT_FOR_CALL_RECORDING_REQUIRED ExtensionFeedItemErrorEnum_ExtensionFeedItemError = 17 + // App id provided for an app extension is invalid. + ExtensionFeedItemErrorEnum_INVALID_APP_ID ExtensionFeedItemErrorEnum_ExtensionFeedItemError = 18 + // Quotation marks present in the review text for a review extension. + ExtensionFeedItemErrorEnum_QUOTES_IN_REVIEW_EXTENSION_SNIPPET ExtensionFeedItemErrorEnum_ExtensionFeedItemError = 19 + // Hyphen character present in the review text for a review extension. + ExtensionFeedItemErrorEnum_HYPHENS_IN_REVIEW_EXTENSION_SNIPPET ExtensionFeedItemErrorEnum_ExtensionFeedItemError = 20 + // A blacklisted review source name or url was provided for a review + // extension. + ExtensionFeedItemErrorEnum_REVIEW_EXTENSION_SOURCE_INELIGIBLE ExtensionFeedItemErrorEnum_ExtensionFeedItemError = 21 + // Review source name should not be found in the review text. + ExtensionFeedItemErrorEnum_SOURCE_NAME_IN_REVIEW_EXTENSION_TEXT ExtensionFeedItemErrorEnum_ExtensionFeedItemError = 22 + // Inconsistent currency codes. + ExtensionFeedItemErrorEnum_INCONSISTENT_CURRENCY_CODES ExtensionFeedItemErrorEnum_ExtensionFeedItemError = 23 + // Price extension cannot have duplicated headers. + ExtensionFeedItemErrorEnum_PRICE_EXTENSION_HAS_DUPLICATED_HEADERS ExtensionFeedItemErrorEnum_ExtensionFeedItemError = 24 + // Price item cannot have duplicated header and description. + ExtensionFeedItemErrorEnum_PRICE_ITEM_HAS_DUPLICATED_HEADER_AND_DESCRIPTION ExtensionFeedItemErrorEnum_ExtensionFeedItemError = 25 + // Price extension has too few items. + ExtensionFeedItemErrorEnum_PRICE_EXTENSION_HAS_TOO_FEW_ITEMS ExtensionFeedItemErrorEnum_ExtensionFeedItemError = 26 + // Price extension has too many items. + ExtensionFeedItemErrorEnum_PRICE_EXTENSION_HAS_TOO_MANY_ITEMS ExtensionFeedItemErrorEnum_ExtensionFeedItemError = 27 + // The input value is not currently supported. + ExtensionFeedItemErrorEnum_UNSUPPORTED_VALUE ExtensionFeedItemErrorEnum_ExtensionFeedItemError = 28 + // The input value is not currently supported in the selected language of an + // extension. + ExtensionFeedItemErrorEnum_UNSUPPORTED_VALUE_IN_SELECTED_LANGUAGE ExtensionFeedItemErrorEnum_ExtensionFeedItemError = 29 + // Unknown or unsupported device preference. + ExtensionFeedItemErrorEnum_INVALID_DEVICE_PREFERENCE ExtensionFeedItemErrorEnum_ExtensionFeedItemError = 30 + // Invalid feed item schedule end time (i.e., endHour = 24 and endMinute != + // 0). + ExtensionFeedItemErrorEnum_INVALID_SCHEDULE_END ExtensionFeedItemErrorEnum_ExtensionFeedItemError = 31 + // Date time zone does not match the account's time zone. + ExtensionFeedItemErrorEnum_DATE_TIME_MUST_BE_IN_ACCOUNT_TIME_ZONE ExtensionFeedItemErrorEnum_ExtensionFeedItemError = 32 + // Invalid structured snippet header. + ExtensionFeedItemErrorEnum_INVALID_SNIPPETS_HEADER ExtensionFeedItemErrorEnum_ExtensionFeedItemError = 33 + // Cannot operate on removed feed item. + ExtensionFeedItemErrorEnum_CANNOT_OPERATE_ON_REMOVED_FEED_ITEM ExtensionFeedItemErrorEnum_ExtensionFeedItemError = 34 + // Phone number not supported when call tracking enabled for country. + ExtensionFeedItemErrorEnum_PHONE_NUMBER_NOT_SUPPORTED_WITH_CALLTRACKING_FOR_COUNTRY ExtensionFeedItemErrorEnum_ExtensionFeedItemError = 35 + // Cannot set call_conversion_action while call_conversion_tracking_enabled + // is set to true. + ExtensionFeedItemErrorEnum_CONFLICTING_CALL_CONVERSION_SETTINGS ExtensionFeedItemErrorEnum_ExtensionFeedItemError = 36 + // The type of the input extension feed item doesn't match the existing + // extension feed item. + ExtensionFeedItemErrorEnum_EXTENSION_TYPE_MISMATCH ExtensionFeedItemErrorEnum_ExtensionFeedItemError = 37 + // The oneof field extension i.e. subtype of extension feed item is + // required. + ExtensionFeedItemErrorEnum_EXTENSION_SUBTYPE_REQUIRED ExtensionFeedItemErrorEnum_ExtensionFeedItemError = 38 + // The referenced feed item is not mapped to a supported extension type. + ExtensionFeedItemErrorEnum_EXTENSION_TYPE_UNSUPPORTED ExtensionFeedItemErrorEnum_ExtensionFeedItemError = 39 + // Cannot operate on a Feed with more than one active FeedMapping. + ExtensionFeedItemErrorEnum_CANNOT_OPERATE_ON_FEED_WITH_MULTIPLE_MAPPINGS ExtensionFeedItemErrorEnum_ExtensionFeedItemError = 40 + // Cannot operate on a Feed that has key attributes. + ExtensionFeedItemErrorEnum_CANNOT_OPERATE_ON_FEED_WITH_KEY_ATTRIBUTES ExtensionFeedItemErrorEnum_ExtensionFeedItemError = 41 + // Input price is not in a valid format. + ExtensionFeedItemErrorEnum_INVALID_PRICE_FORMAT ExtensionFeedItemErrorEnum_ExtensionFeedItemError = 42 + // The promotion time is invalid. + ExtensionFeedItemErrorEnum_PROMOTION_INVALID_TIME ExtensionFeedItemErrorEnum_ExtensionFeedItemError = 43 + // This field has too many decimal places specified. + ExtensionFeedItemErrorEnum_TOO_MANY_DECIMAL_PLACES_SPECIFIED ExtensionFeedItemErrorEnum_ExtensionFeedItemError = 44 +) + +var ExtensionFeedItemErrorEnum_ExtensionFeedItemError_name = map[int32]string{ + 0: "UNSPECIFIED", + 1: "UNKNOWN", + 2: "VALUE_OUT_OF_RANGE", + 3: "URL_LIST_TOO_LONG", + 4: "CANNOT_HAVE_RESTRICTION_ON_EMPTY_GEO_TARGETING", + 5: "CANNOT_SET_WITH_FINAL_URLS", + 6: "CANNOT_SET_WITHOUT_FINAL_URLS", + 7: "INVALID_PHONE_NUMBER", + 8: "PHONE_NUMBER_NOT_SUPPORTED_FOR_COUNTRY", + 9: "CARRIER_SPECIFIC_SHORT_NUMBER_NOT_ALLOWED", + 10: "PREMIUM_RATE_NUMBER_NOT_ALLOWED", + 11: "DISALLOWED_NUMBER_TYPE", + 12: "INVALID_DOMESTIC_PHONE_NUMBER_FORMAT", + 13: "VANITY_PHONE_NUMBER_NOT_ALLOWED", + 14: "INVALID_CALL_CONVERSION_ACTION", + 15: "CUSTOMER_NOT_WHITELISTED_FOR_CALLTRACKING", + 16: "CALLTRACKING_NOT_SUPPORTED_FOR_COUNTRY", + 17: "CUSTOMER_CONSENT_FOR_CALL_RECORDING_REQUIRED", + 18: "INVALID_APP_ID", + 19: "QUOTES_IN_REVIEW_EXTENSION_SNIPPET", + 20: "HYPHENS_IN_REVIEW_EXTENSION_SNIPPET", + 21: "REVIEW_EXTENSION_SOURCE_INELIGIBLE", + 22: "SOURCE_NAME_IN_REVIEW_EXTENSION_TEXT", + 23: "INCONSISTENT_CURRENCY_CODES", + 24: "PRICE_EXTENSION_HAS_DUPLICATED_HEADERS", + 25: "PRICE_ITEM_HAS_DUPLICATED_HEADER_AND_DESCRIPTION", + 26: "PRICE_EXTENSION_HAS_TOO_FEW_ITEMS", + 27: "PRICE_EXTENSION_HAS_TOO_MANY_ITEMS", + 28: "UNSUPPORTED_VALUE", + 29: "UNSUPPORTED_VALUE_IN_SELECTED_LANGUAGE", + 30: "INVALID_DEVICE_PREFERENCE", + 31: "INVALID_SCHEDULE_END", + 32: "DATE_TIME_MUST_BE_IN_ACCOUNT_TIME_ZONE", + 33: "INVALID_SNIPPETS_HEADER", + 34: "CANNOT_OPERATE_ON_REMOVED_FEED_ITEM", + 35: "PHONE_NUMBER_NOT_SUPPORTED_WITH_CALLTRACKING_FOR_COUNTRY", + 36: "CONFLICTING_CALL_CONVERSION_SETTINGS", + 37: "EXTENSION_TYPE_MISMATCH", + 38: "EXTENSION_SUBTYPE_REQUIRED", + 39: "EXTENSION_TYPE_UNSUPPORTED", + 40: "CANNOT_OPERATE_ON_FEED_WITH_MULTIPLE_MAPPINGS", + 41: "CANNOT_OPERATE_ON_FEED_WITH_KEY_ATTRIBUTES", + 42: "INVALID_PRICE_FORMAT", + 43: "PROMOTION_INVALID_TIME", + 44: "TOO_MANY_DECIMAL_PLACES_SPECIFIED", +} +var ExtensionFeedItemErrorEnum_ExtensionFeedItemError_value = map[string]int32{ + "UNSPECIFIED": 0, + "UNKNOWN": 1, + "VALUE_OUT_OF_RANGE": 2, + "URL_LIST_TOO_LONG": 3, + "CANNOT_HAVE_RESTRICTION_ON_EMPTY_GEO_TARGETING": 4, + "CANNOT_SET_WITH_FINAL_URLS": 5, + "CANNOT_SET_WITHOUT_FINAL_URLS": 6, + "INVALID_PHONE_NUMBER": 7, + "PHONE_NUMBER_NOT_SUPPORTED_FOR_COUNTRY": 8, + "CARRIER_SPECIFIC_SHORT_NUMBER_NOT_ALLOWED": 9, + "PREMIUM_RATE_NUMBER_NOT_ALLOWED": 10, + "DISALLOWED_NUMBER_TYPE": 11, + "INVALID_DOMESTIC_PHONE_NUMBER_FORMAT": 12, + "VANITY_PHONE_NUMBER_NOT_ALLOWED": 13, + "INVALID_CALL_CONVERSION_ACTION": 14, + "CUSTOMER_NOT_WHITELISTED_FOR_CALLTRACKING": 15, + "CALLTRACKING_NOT_SUPPORTED_FOR_COUNTRY": 16, + "CUSTOMER_CONSENT_FOR_CALL_RECORDING_REQUIRED": 17, + "INVALID_APP_ID": 18, + "QUOTES_IN_REVIEW_EXTENSION_SNIPPET": 19, + "HYPHENS_IN_REVIEW_EXTENSION_SNIPPET": 20, + "REVIEW_EXTENSION_SOURCE_INELIGIBLE": 21, + "SOURCE_NAME_IN_REVIEW_EXTENSION_TEXT": 22, + "INCONSISTENT_CURRENCY_CODES": 23, + "PRICE_EXTENSION_HAS_DUPLICATED_HEADERS": 24, + "PRICE_ITEM_HAS_DUPLICATED_HEADER_AND_DESCRIPTION": 25, + "PRICE_EXTENSION_HAS_TOO_FEW_ITEMS": 26, + "PRICE_EXTENSION_HAS_TOO_MANY_ITEMS": 27, + "UNSUPPORTED_VALUE": 28, + "UNSUPPORTED_VALUE_IN_SELECTED_LANGUAGE": 29, + "INVALID_DEVICE_PREFERENCE": 30, + "INVALID_SCHEDULE_END": 31, + "DATE_TIME_MUST_BE_IN_ACCOUNT_TIME_ZONE": 32, + "INVALID_SNIPPETS_HEADER": 33, + "CANNOT_OPERATE_ON_REMOVED_FEED_ITEM": 34, + "PHONE_NUMBER_NOT_SUPPORTED_WITH_CALLTRACKING_FOR_COUNTRY": 35, + "CONFLICTING_CALL_CONVERSION_SETTINGS": 36, + "EXTENSION_TYPE_MISMATCH": 37, + "EXTENSION_SUBTYPE_REQUIRED": 38, + "EXTENSION_TYPE_UNSUPPORTED": 39, + "CANNOT_OPERATE_ON_FEED_WITH_MULTIPLE_MAPPINGS": 40, + "CANNOT_OPERATE_ON_FEED_WITH_KEY_ATTRIBUTES": 41, + "INVALID_PRICE_FORMAT": 42, + "PROMOTION_INVALID_TIME": 43, + "TOO_MANY_DECIMAL_PLACES_SPECIFIED": 44, +} + +func (x ExtensionFeedItemErrorEnum_ExtensionFeedItemError) String() string { + return proto.EnumName(ExtensionFeedItemErrorEnum_ExtensionFeedItemError_name, int32(x)) +} +func (ExtensionFeedItemErrorEnum_ExtensionFeedItemError) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_extension_feed_item_error_4484ec81d93b3330, []int{0, 0} +} + +// Container for enum describing possible extension feed item error. +type ExtensionFeedItemErrorEnum struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ExtensionFeedItemErrorEnum) Reset() { *m = ExtensionFeedItemErrorEnum{} } +func (m *ExtensionFeedItemErrorEnum) String() string { return proto.CompactTextString(m) } +func (*ExtensionFeedItemErrorEnum) ProtoMessage() {} +func (*ExtensionFeedItemErrorEnum) Descriptor() ([]byte, []int) { + return fileDescriptor_extension_feed_item_error_4484ec81d93b3330, []int{0} +} +func (m *ExtensionFeedItemErrorEnum) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ExtensionFeedItemErrorEnum.Unmarshal(m, b) +} +func (m *ExtensionFeedItemErrorEnum) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ExtensionFeedItemErrorEnum.Marshal(b, m, deterministic) +} +func (dst *ExtensionFeedItemErrorEnum) XXX_Merge(src proto.Message) { + xxx_messageInfo_ExtensionFeedItemErrorEnum.Merge(dst, src) +} +func (m *ExtensionFeedItemErrorEnum) XXX_Size() int { + return xxx_messageInfo_ExtensionFeedItemErrorEnum.Size(m) +} +func (m *ExtensionFeedItemErrorEnum) XXX_DiscardUnknown() { + xxx_messageInfo_ExtensionFeedItemErrorEnum.DiscardUnknown(m) +} + +var xxx_messageInfo_ExtensionFeedItemErrorEnum proto.InternalMessageInfo + +func init() { + proto.RegisterType((*ExtensionFeedItemErrorEnum)(nil), "google.ads.googleads.v1.errors.ExtensionFeedItemErrorEnum") + proto.RegisterEnum("google.ads.googleads.v1.errors.ExtensionFeedItemErrorEnum_ExtensionFeedItemError", ExtensionFeedItemErrorEnum_ExtensionFeedItemError_name, ExtensionFeedItemErrorEnum_ExtensionFeedItemError_value) +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/errors/extension_feed_item_error.proto", fileDescriptor_extension_feed_item_error_4484ec81d93b3330) +} + +var fileDescriptor_extension_feed_item_error_4484ec81d93b3330 = []byte{ + // 1056 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x7c, 0x55, 0xdd, 0x8e, 0x53, 0x37, + 0x10, 0x2e, 0x4b, 0x0b, 0xad, 0x29, 0x60, 0x5c, 0x58, 0x60, 0x17, 0x96, 0x12, 0xfe, 0x29, 0x24, + 0xa4, 0xed, 0x45, 0x95, 0x56, 0x95, 0x1c, 0x9f, 0x49, 0x62, 0xe1, 0x63, 0x1b, 0xdb, 0x27, 0x21, + 0x68, 0xa5, 0xd1, 0xb6, 0x49, 0xa3, 0x95, 0xd8, 0x64, 0xb5, 0x49, 0x51, 0x9f, 0xa2, 0x0f, 0xd1, + 0xcb, 0x4a, 0x7d, 0x91, 0x3e, 0x4a, 0x1f, 0xa0, 0xd7, 0x95, 0x7d, 0x92, 0x90, 0xed, 0x86, 0xbd, + 0xca, 0xc9, 0xf8, 0x9b, 0xff, 0x6f, 0x66, 0xc8, 0x8f, 0xa3, 0xc9, 0x64, 0xf4, 0x76, 0x58, 0xdb, + 0x1b, 0x4c, 0x6b, 0xe5, 0x67, 0xfc, 0x7a, 0x57, 0xaf, 0x0d, 0x8f, 0x8e, 0x26, 0x47, 0xd3, 0xda, + 0xf0, 0xb7, 0xd9, 0x70, 0x3c, 0xdd, 0x9f, 0x8c, 0xf1, 0x97, 0xe1, 0x70, 0x80, 0xfb, 0xb3, 0xe1, + 0x01, 0xa6, 0xa7, 0xea, 0xe1, 0xd1, 0x64, 0x36, 0x61, 0x3b, 0xa5, 0x52, 0x75, 0x6f, 0x30, 0xad, + 0x2e, 0xf5, 0xab, 0xef, 0xea, 0xd5, 0x52, 0x7f, 0xeb, 0xd6, 0xc2, 0xfe, 0xe1, 0x7e, 0x6d, 0x6f, + 0x3c, 0x9e, 0xcc, 0xf6, 0x66, 0xfb, 0x93, 0xf1, 0xb4, 0xd4, 0xae, 0xfc, 0x75, 0x91, 0x6c, 0xc1, + 0xc2, 0x43, 0x6b, 0x38, 0x1c, 0xc8, 0xd9, 0xf0, 0x00, 0xa2, 0x26, 0x8c, 0x7f, 0x3d, 0xa8, 0xfc, + 0x7e, 0x91, 0x6c, 0xae, 0x7f, 0x66, 0x97, 0xc9, 0x85, 0x42, 0x7b, 0x0b, 0x42, 0xb6, 0x24, 0x64, + 0xf4, 0x23, 0x76, 0x81, 0x9c, 0x2f, 0xf4, 0x4b, 0x6d, 0x7a, 0x9a, 0x9e, 0x61, 0x9b, 0x84, 0x75, + 0xb9, 0x2a, 0x00, 0x4d, 0x11, 0xd0, 0xb4, 0xd0, 0x71, 0xdd, 0x06, 0xba, 0xc1, 0xae, 0x91, 0x2b, + 0x85, 0x53, 0xa8, 0xa4, 0x0f, 0x18, 0x8c, 0x41, 0x65, 0x74, 0x9b, 0x9e, 0x65, 0x5f, 0x93, 0xaa, + 0xe0, 0x5a, 0x9b, 0x80, 0x1d, 0xde, 0x05, 0x74, 0xe0, 0x83, 0x93, 0x22, 0x48, 0xa3, 0xd1, 0x68, + 0x84, 0xdc, 0x86, 0x3e, 0xb6, 0xc1, 0x60, 0xe0, 0xae, 0x0d, 0x41, 0xea, 0x36, 0xfd, 0x98, 0xed, + 0x90, 0xad, 0xb9, 0x8e, 0x87, 0x80, 0x3d, 0x19, 0x3a, 0xd8, 0x92, 0x9a, 0x2b, 0x2c, 0x9c, 0xf2, + 0xf4, 0x13, 0x76, 0x97, 0xdc, 0xfe, 0xdf, 0x7b, 0x8c, 0x65, 0x05, 0x72, 0x8e, 0xdd, 0x20, 0x57, + 0xa5, 0xee, 0x72, 0x25, 0x33, 0xb4, 0x1d, 0xa3, 0x01, 0x75, 0x91, 0x37, 0xc1, 0xd1, 0xf3, 0xec, + 0x29, 0x79, 0xb8, 0x2a, 0xc1, 0x64, 0xa6, 0xb0, 0xd6, 0xb8, 0x00, 0x19, 0xb6, 0x8c, 0x43, 0x61, + 0x0a, 0x1d, 0x5c, 0x9f, 0x7e, 0xca, 0x9e, 0x93, 0x27, 0x82, 0x3b, 0x27, 0xc1, 0xe1, 0xbc, 0x1e, + 0x02, 0x7d, 0xc7, 0xb8, 0xb0, 0xaa, 0xcc, 0x95, 0x32, 0x3d, 0xc8, 0xe8, 0x67, 0xec, 0x1e, 0xb9, + 0x63, 0x1d, 0xe4, 0xb2, 0xc8, 0xd1, 0xf1, 0x00, 0xeb, 0x40, 0x84, 0x6d, 0x91, 0xcd, 0x4c, 0xfa, + 0xf9, 0xff, 0x05, 0x24, 0xf4, 0x2d, 0xd0, 0x0b, 0xec, 0x31, 0xb9, 0xbf, 0x88, 0x3a, 0x33, 0x39, + 0xf8, 0x20, 0xc5, 0xb1, 0xf0, 0x63, 0x78, 0x39, 0x0f, 0xf4, 0xf3, 0xe8, 0xaa, 0xcb, 0xb5, 0x0c, + 0x7d, 0x3c, 0x91, 0xcc, 0xc2, 0xd5, 0x45, 0x56, 0x21, 0x3b, 0x0b, 0x73, 0x82, 0x2b, 0x85, 0xc2, + 0xe8, 0x2e, 0x38, 0x1f, 0x6b, 0xcf, 0x53, 0x0b, 0xe8, 0xa5, 0x94, 0x62, 0xe1, 0x83, 0xc9, 0xe7, + 0xda, 0xbd, 0x8e, 0x0c, 0x10, 0x9b, 0xb8, 0x28, 0x06, 0x57, 0x2a, 0x38, 0x2e, 0x5e, 0xc6, 0xd6, + 0x5c, 0x8e, 0xd5, 0x5b, 0x95, 0x9c, 0x52, 0x3d, 0xca, 0x5e, 0x90, 0x67, 0x4b, 0xd3, 0xc2, 0x68, + 0x0f, 0x3a, 0x2c, 0x4d, 0xa2, 0x03, 0x61, 0x5c, 0x16, 0x4d, 0x38, 0x78, 0x55, 0x48, 0x07, 0x19, + 0xbd, 0xc2, 0x18, 0xb9, 0xb4, 0x08, 0x98, 0x5b, 0x8b, 0x32, 0xa3, 0x8c, 0x3d, 0x24, 0x95, 0x57, + 0x85, 0x09, 0xe0, 0x51, 0x6a, 0x74, 0xd0, 0x95, 0xd0, 0x43, 0x78, 0x1d, 0x40, 0xa7, 0x3c, 0xbc, + 0x96, 0xd6, 0x42, 0xa0, 0x5f, 0xb0, 0x47, 0xe4, 0x5e, 0xa7, 0x6f, 0x3b, 0xa0, 0x4f, 0x07, 0x5e, + 0x8d, 0x06, 0x4f, 0xbe, 0x9a, 0xc2, 0x09, 0x40, 0xa9, 0x41, 0xc9, 0xb6, 0x6c, 0x2a, 0xa0, 0xd7, + 0x62, 0x33, 0xe6, 0x62, 0xcd, 0x73, 0x58, 0x6b, 0x34, 0xc0, 0xeb, 0x40, 0x37, 0xd9, 0x1d, 0xb2, + 0x2d, 0x75, 0xcc, 0x30, 0x96, 0x4d, 0x07, 0x14, 0x85, 0x73, 0xa0, 0x45, 0x1f, 0x85, 0xc9, 0xc0, + 0xd3, 0xeb, 0x89, 0x73, 0x4e, 0x0a, 0x58, 0x51, 0xed, 0x70, 0x8f, 0x59, 0x61, 0x95, 0x14, 0x3c, + 0x56, 0xae, 0x03, 0x3c, 0x03, 0xe7, 0xe9, 0x0d, 0xf6, 0x2d, 0x79, 0x51, 0x62, 0x65, 0x80, 0x7c, + 0x3d, 0x0c, 0xb9, 0xce, 0x30, 0x03, 0x2f, 0x9c, 0xb4, 0xa9, 0x8d, 0x37, 0xd9, 0x03, 0x72, 0x77, + 0x9d, 0x87, 0x38, 0x88, 0x2d, 0xe8, 0x25, 0x5b, 0x9e, 0x6e, 0xc5, 0xdc, 0x3f, 0x04, 0xcb, 0xb9, + 0xee, 0xcf, 0x71, 0xdb, 0x69, 0x98, 0xf5, 0xfb, 0xbe, 0xa6, 0x81, 0xa7, 0xb7, 0x62, 0x1e, 0x27, + 0xc4, 0xb1, 0x30, 0x1e, 0x14, 0x88, 0x28, 0x51, 0x5c, 0xb7, 0x0b, 0xde, 0x06, 0x7a, 0x9b, 0xdd, + 0x26, 0x37, 0x97, 0x5c, 0x86, 0x6e, 0xf4, 0x69, 0x1d, 0xb4, 0x20, 0x16, 0x06, 0xe8, 0xce, 0xea, + 0x80, 0x7a, 0xd1, 0x81, 0xac, 0x50, 0x80, 0xa0, 0x33, 0x7a, 0x27, 0x3a, 0xc9, 0xe2, 0xf4, 0x04, + 0x99, 0x03, 0xe6, 0x85, 0x0f, 0xd8, 0x4c, 0x4e, 0xb8, 0x48, 0xd4, 0x2a, 0xe5, 0x6f, 0x8c, 0x06, + 0xfa, 0x25, 0xdb, 0x26, 0xd7, 0x97, 0x56, 0xca, 0x06, 0xfb, 0x79, 0x8d, 0xe8, 0xdd, 0xc8, 0x88, + 0xf9, 0x9a, 0x30, 0x16, 0xd2, 0x40, 0x9a, 0xd8, 0xc3, 0xdc, 0x74, 0x23, 0x55, 0x01, 0xb2, 0x94, + 0x2e, 0xad, 0xb0, 0x1f, 0xc8, 0x77, 0xa7, 0xac, 0x84, 0xb4, 0x7f, 0x8e, 0x91, 0x7e, 0x95, 0xe6, + 0xf7, 0x22, 0x4f, 0x84, 0xd1, 0x2d, 0x15, 0xb7, 0x9a, 0x6e, 0x9f, 0x98, 0x34, 0x0f, 0x21, 0xca, + 0x3d, 0xbd, 0x1f, 0xa3, 0x5d, 0xe1, 0x4e, 0xdf, 0x02, 0xe6, 0xd2, 0xe7, 0x3c, 0x88, 0x0e, 0x7d, + 0x10, 0x97, 0xde, 0x0a, 0x1f, 0x8b, 0x66, 0x7a, 0x5f, 0xce, 0xc6, 0xc3, 0xe3, 0xef, 0xe9, 0x71, + 0xa5, 0x15, 0xf4, 0x11, 0xab, 0x93, 0xe7, 0x27, 0xb3, 0x4d, 0x59, 0xa6, 0x04, 0xf2, 0x42, 0x05, + 0x69, 0x15, 0x60, 0xce, 0xad, 0x4d, 0xf1, 0x3c, 0x66, 0x55, 0xf2, 0xf4, 0x34, 0x95, 0x97, 0xd0, + 0x47, 0x1e, 0x82, 0x93, 0xcd, 0x22, 0x80, 0xa7, 0x4f, 0x8e, 0x2d, 0xd5, 0xc4, 0xa2, 0xf9, 0x3a, + 0x7a, 0x1a, 0x97, 0x9a, 0x75, 0x26, 0x37, 0x69, 0xaf, 0x2f, 0x30, 0xb1, 0x51, 0xf4, 0xab, 0x48, + 0xcd, 0x25, 0xbf, 0x32, 0x10, 0x32, 0xe7, 0x0a, 0xad, 0xe2, 0x02, 0x3c, 0xbe, 0x3f, 0x32, 0xcf, + 0x9a, 0xff, 0x9e, 0x21, 0x95, 0x9f, 0x27, 0x07, 0xd5, 0xd3, 0x8f, 0x5e, 0x73, 0x7b, 0xfd, 0xd1, + 0xb2, 0xf1, 0xe6, 0xd9, 0x33, 0x6f, 0xb2, 0xb9, 0xfa, 0x68, 0xf2, 0x76, 0x6f, 0x3c, 0xaa, 0x4e, + 0x8e, 0x46, 0xb5, 0xd1, 0x70, 0x9c, 0x2e, 0xe2, 0xe2, 0x06, 0x1f, 0xee, 0x4f, 0x3f, 0x74, 0x92, + 0xbf, 0x2f, 0x7f, 0xfe, 0xd8, 0x38, 0xdb, 0xe6, 0xfc, 0xcf, 0x8d, 0x9d, 0x76, 0x69, 0x8c, 0x0f, + 0xa6, 0xd5, 0xf2, 0x33, 0x7e, 0x75, 0xeb, 0xd5, 0xe4, 0x72, 0xfa, 0xf7, 0x02, 0xb0, 0xcb, 0x07, + 0xd3, 0xdd, 0x25, 0x60, 0xb7, 0x5b, 0xdf, 0x2d, 0x01, 0xff, 0x6c, 0x54, 0x4a, 0x69, 0xa3, 0xc1, + 0x07, 0xd3, 0x46, 0x63, 0x09, 0x69, 0x34, 0xba, 0xf5, 0x46, 0xa3, 0x04, 0xfd, 0x74, 0x2e, 0x45, + 0xf7, 0xcd, 0x7f, 0x01, 0x00, 0x00, 0xff, 0xff, 0x4a, 0x95, 0x61, 0x9f, 0x2f, 0x08, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/errors/extension_setting_error.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/errors/extension_setting_error.pb.go new file mode 100644 index 0000000..b197725 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/errors/extension_setting_error.pb.go @@ -0,0 +1,450 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/errors/extension_setting_error.proto + +package errors // import "google.golang.org/genproto/googleapis/ads/googleads/v1/errors" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Enum describing possible extension setting errors. +type ExtensionSettingErrorEnum_ExtensionSettingError int32 + +const ( + // Enum unspecified. + ExtensionSettingErrorEnum_UNSPECIFIED ExtensionSettingErrorEnum_ExtensionSettingError = 0 + // The received error code is not known in this version. + ExtensionSettingErrorEnum_UNKNOWN ExtensionSettingErrorEnum_ExtensionSettingError = 1 + // A platform restriction was provided without input extensions or existing + // extensions. + ExtensionSettingErrorEnum_EXTENSIONS_REQUIRED ExtensionSettingErrorEnum_ExtensionSettingError = 2 + // The provided feed type does not correspond to the provided extensions. + ExtensionSettingErrorEnum_FEED_TYPE_EXTENSION_TYPE_MISMATCH ExtensionSettingErrorEnum_ExtensionSettingError = 3 + // The provided feed type cannot be used. + ExtensionSettingErrorEnum_INVALID_FEED_TYPE ExtensionSettingErrorEnum_ExtensionSettingError = 4 + // The provided feed type cannot be used at the customer level. + ExtensionSettingErrorEnum_INVALID_FEED_TYPE_FOR_CUSTOMER_EXTENSION_SETTING ExtensionSettingErrorEnum_ExtensionSettingError = 5 + // Cannot change a feed item field on a CREATE operation. + ExtensionSettingErrorEnum_CANNOT_CHANGE_FEED_ITEM_ON_CREATE ExtensionSettingErrorEnum_ExtensionSettingError = 6 + // Cannot update an extension that is not already in this setting. + ExtensionSettingErrorEnum_CANNOT_UPDATE_NEWLY_CREATED_EXTENSION ExtensionSettingErrorEnum_ExtensionSettingError = 7 + // There is no existing AdGroupExtensionSetting for this type. + ExtensionSettingErrorEnum_NO_EXISTING_AD_GROUP_EXTENSION_SETTING_FOR_TYPE ExtensionSettingErrorEnum_ExtensionSettingError = 8 + // There is no existing CampaignExtensionSetting for this type. + ExtensionSettingErrorEnum_NO_EXISTING_CAMPAIGN_EXTENSION_SETTING_FOR_TYPE ExtensionSettingErrorEnum_ExtensionSettingError = 9 + // There is no existing CustomerExtensionSetting for this type. + ExtensionSettingErrorEnum_NO_EXISTING_CUSTOMER_EXTENSION_SETTING_FOR_TYPE ExtensionSettingErrorEnum_ExtensionSettingError = 10 + // The AdGroupExtensionSetting already exists. UPDATE should be used to + // modify the existing AdGroupExtensionSetting. + ExtensionSettingErrorEnum_AD_GROUP_EXTENSION_SETTING_ALREADY_EXISTS ExtensionSettingErrorEnum_ExtensionSettingError = 11 + // The CampaignExtensionSetting already exists. UPDATE should be used to + // modify the existing CampaignExtensionSetting. + ExtensionSettingErrorEnum_CAMPAIGN_EXTENSION_SETTING_ALREADY_EXISTS ExtensionSettingErrorEnum_ExtensionSettingError = 12 + // The CustomerExtensionSetting already exists. UPDATE should be used to + // modify the existing CustomerExtensionSetting. + ExtensionSettingErrorEnum_CUSTOMER_EXTENSION_SETTING_ALREADY_EXISTS ExtensionSettingErrorEnum_ExtensionSettingError = 13 + // An active ad group feed already exists for this place holder type. + ExtensionSettingErrorEnum_AD_GROUP_FEED_ALREADY_EXISTS_FOR_PLACEHOLDER_TYPE ExtensionSettingErrorEnum_ExtensionSettingError = 14 + // An active campaign feed already exists for this place holder type. + ExtensionSettingErrorEnum_CAMPAIGN_FEED_ALREADY_EXISTS_FOR_PLACEHOLDER_TYPE ExtensionSettingErrorEnum_ExtensionSettingError = 15 + // An active customer feed already exists for this place holder type. + ExtensionSettingErrorEnum_CUSTOMER_FEED_ALREADY_EXISTS_FOR_PLACEHOLDER_TYPE ExtensionSettingErrorEnum_ExtensionSettingError = 16 + // Value is not within the accepted range. + ExtensionSettingErrorEnum_VALUE_OUT_OF_RANGE ExtensionSettingErrorEnum_ExtensionSettingError = 17 + // Cannot simultaneously set specified field with final urls. + ExtensionSettingErrorEnum_CANNOT_SET_FIELD_WITH_FINAL_URLS ExtensionSettingErrorEnum_ExtensionSettingError = 18 + // Must set field with final urls. + ExtensionSettingErrorEnum_FINAL_URLS_NOT_SET ExtensionSettingErrorEnum_ExtensionSettingError = 19 + // Phone number for a call extension is invalid. + ExtensionSettingErrorEnum_INVALID_PHONE_NUMBER ExtensionSettingErrorEnum_ExtensionSettingError = 20 + // Phone number for a call extension is not supported for the given country + // code. + ExtensionSettingErrorEnum_PHONE_NUMBER_NOT_SUPPORTED_FOR_COUNTRY ExtensionSettingErrorEnum_ExtensionSettingError = 21 + // A carrier specific number in short format is not allowed for call + // extensions. + ExtensionSettingErrorEnum_CARRIER_SPECIFIC_SHORT_NUMBER_NOT_ALLOWED ExtensionSettingErrorEnum_ExtensionSettingError = 22 + // Premium rate numbers are not allowed for call extensions. + ExtensionSettingErrorEnum_PREMIUM_RATE_NUMBER_NOT_ALLOWED ExtensionSettingErrorEnum_ExtensionSettingError = 23 + // Phone number type for a call extension is not allowed. + ExtensionSettingErrorEnum_DISALLOWED_NUMBER_TYPE ExtensionSettingErrorEnum_ExtensionSettingError = 24 + // Phone number for a call extension does not meet domestic format + // requirements. + ExtensionSettingErrorEnum_INVALID_DOMESTIC_PHONE_NUMBER_FORMAT ExtensionSettingErrorEnum_ExtensionSettingError = 25 + // Vanity phone numbers (i.e. those including letters) are not allowed for + // call extensions. + ExtensionSettingErrorEnum_VANITY_PHONE_NUMBER_NOT_ALLOWED ExtensionSettingErrorEnum_ExtensionSettingError = 26 + // Country code provided for a call extension is invalid. + ExtensionSettingErrorEnum_INVALID_COUNTRY_CODE ExtensionSettingErrorEnum_ExtensionSettingError = 27 + // Call conversion type id provided for a call extension is invalid. + ExtensionSettingErrorEnum_INVALID_CALL_CONVERSION_TYPE_ID ExtensionSettingErrorEnum_ExtensionSettingError = 28 + // For a call extension, the customer is not whitelisted for call tracking. + ExtensionSettingErrorEnum_CUSTOMER_NOT_WHITELISTED_FOR_CALLTRACKING ExtensionSettingErrorEnum_ExtensionSettingError = 29 + // Call tracking is not supported for the given country for a call + // extension. + ExtensionSettingErrorEnum_CALLTRACKING_NOT_SUPPORTED_FOR_COUNTRY ExtensionSettingErrorEnum_ExtensionSettingError = 30 + // App id provided for an app extension is invalid. + ExtensionSettingErrorEnum_INVALID_APP_ID ExtensionSettingErrorEnum_ExtensionSettingError = 31 + // Quotation marks present in the review text for a review extension. + ExtensionSettingErrorEnum_QUOTES_IN_REVIEW_EXTENSION_SNIPPET ExtensionSettingErrorEnum_ExtensionSettingError = 32 + // Hyphen character present in the review text for a review extension. + ExtensionSettingErrorEnum_HYPHENS_IN_REVIEW_EXTENSION_SNIPPET ExtensionSettingErrorEnum_ExtensionSettingError = 33 + // A blacklisted review source name or url was provided for a review + // extension. + ExtensionSettingErrorEnum_REVIEW_EXTENSION_SOURCE_NOT_ELIGIBLE ExtensionSettingErrorEnum_ExtensionSettingError = 34 + // Review source name should not be found in the review text. + ExtensionSettingErrorEnum_SOURCE_NAME_IN_REVIEW_EXTENSION_TEXT ExtensionSettingErrorEnum_ExtensionSettingError = 35 + // Field must be set. + ExtensionSettingErrorEnum_MISSING_FIELD ExtensionSettingErrorEnum_ExtensionSettingError = 36 + // Inconsistent currency codes. + ExtensionSettingErrorEnum_INCONSISTENT_CURRENCY_CODES ExtensionSettingErrorEnum_ExtensionSettingError = 37 + // Price extension cannot have duplicated headers. + ExtensionSettingErrorEnum_PRICE_EXTENSION_HAS_DUPLICATED_HEADERS ExtensionSettingErrorEnum_ExtensionSettingError = 38 + // Price item cannot have duplicated header and description. + ExtensionSettingErrorEnum_PRICE_ITEM_HAS_DUPLICATED_HEADER_AND_DESCRIPTION ExtensionSettingErrorEnum_ExtensionSettingError = 39 + // Price extension has too few items + ExtensionSettingErrorEnum_PRICE_EXTENSION_HAS_TOO_FEW_ITEMS ExtensionSettingErrorEnum_ExtensionSettingError = 40 + // Price extension has too many items + ExtensionSettingErrorEnum_PRICE_EXTENSION_HAS_TOO_MANY_ITEMS ExtensionSettingErrorEnum_ExtensionSettingError = 41 + // The input value is not currently supported. + ExtensionSettingErrorEnum_UNSUPPORTED_VALUE ExtensionSettingErrorEnum_ExtensionSettingError = 42 + // Unknown or unsupported device preference. + ExtensionSettingErrorEnum_INVALID_DEVICE_PREFERENCE ExtensionSettingErrorEnum_ExtensionSettingError = 43 + // Invalid feed item schedule end time (i.e., endHour = 24 and + // endMinute != 0). + ExtensionSettingErrorEnum_INVALID_SCHEDULE_END ExtensionSettingErrorEnum_ExtensionSettingError = 45 + // Date time zone does not match the account's time zone. + ExtensionSettingErrorEnum_DATE_TIME_MUST_BE_IN_ACCOUNT_TIME_ZONE ExtensionSettingErrorEnum_ExtensionSettingError = 47 + // Overlapping feed item schedule times (e.g., 7-10AM and 8-11AM) are not + // allowed. + ExtensionSettingErrorEnum_OVERLAPPING_SCHEDULES_NOT_ALLOWED ExtensionSettingErrorEnum_ExtensionSettingError = 48 + // Feed item schedule end time must be after start time. + ExtensionSettingErrorEnum_SCHEDULE_END_NOT_AFTER_START ExtensionSettingErrorEnum_ExtensionSettingError = 49 + // There are too many feed item schedules per day. + ExtensionSettingErrorEnum_TOO_MANY_SCHEDULES_PER_DAY ExtensionSettingErrorEnum_ExtensionSettingError = 50 + // Cannot edit the same extension feed item more than once in the same + // request. + ExtensionSettingErrorEnum_DUPLICATE_EXTENSION_FEED_ITEM_EDIT ExtensionSettingErrorEnum_ExtensionSettingError = 51 + // Invalid structured snippet header. + ExtensionSettingErrorEnum_INVALID_SNIPPETS_HEADER ExtensionSettingErrorEnum_ExtensionSettingError = 52 + // Phone number with call tracking enabled is not supported for the + // specified country. + ExtensionSettingErrorEnum_PHONE_NUMBER_NOT_SUPPORTED_WITH_CALLTRACKING_FOR_COUNTRY ExtensionSettingErrorEnum_ExtensionSettingError = 53 + // The targeted adgroup must belong to the targeted campaign. + ExtensionSettingErrorEnum_CAMPAIGN_TARGETING_MISMATCH ExtensionSettingErrorEnum_ExtensionSettingError = 54 + // The feed used by the ExtensionSetting is removed and cannot be operated + // on. Remove the ExtensionSetting to allow a new one to be created using + // an active feed. + ExtensionSettingErrorEnum_CANNOT_OPERATE_ON_REMOVED_FEED ExtensionSettingErrorEnum_ExtensionSettingError = 55 + // The ExtensionFeedItem type is required for this operation. + ExtensionSettingErrorEnum_EXTENSION_TYPE_REQUIRED ExtensionSettingErrorEnum_ExtensionSettingError = 56 + // The matching function that links the extension feed to the customer, + // campaign, or ad group is not compatible with the ExtensionSetting + // services. + ExtensionSettingErrorEnum_INCOMPATIBLE_UNDERLYING_MATCHING_FUNCTION ExtensionSettingErrorEnum_ExtensionSettingError = 57 + // Start date must be before end date. + ExtensionSettingErrorEnum_START_DATE_AFTER_END_DATE ExtensionSettingErrorEnum_ExtensionSettingError = 58 + // Input price is not in a valid format. + ExtensionSettingErrorEnum_INVALID_PRICE_FORMAT ExtensionSettingErrorEnum_ExtensionSettingError = 59 + // The promotion time is invalid. + ExtensionSettingErrorEnum_PROMOTION_INVALID_TIME ExtensionSettingErrorEnum_ExtensionSettingError = 60 + // Cannot set both percent discount and money discount fields. + ExtensionSettingErrorEnum_PROMOTION_CANNOT_SET_PERCENT_DISCOUNT_AND_MONEY_DISCOUNT ExtensionSettingErrorEnum_ExtensionSettingError = 61 + // Cannot set both promotion code and orders over amount fields. + ExtensionSettingErrorEnum_PROMOTION_CANNOT_SET_PROMOTION_CODE_AND_ORDERS_OVER_AMOUNT ExtensionSettingErrorEnum_ExtensionSettingError = 62 + // This field has too many decimal places specified. + ExtensionSettingErrorEnum_TOO_MANY_DECIMAL_PLACES_SPECIFIED ExtensionSettingErrorEnum_ExtensionSettingError = 63 + // The language code is not valid. + ExtensionSettingErrorEnum_INVALID_LANGUAGE_CODE ExtensionSettingErrorEnum_ExtensionSettingError = 64 + // The language is not supported. + ExtensionSettingErrorEnum_UNSUPPORTED_LANGUAGE ExtensionSettingErrorEnum_ExtensionSettingError = 65 + // Customer hasn't consented for call recording, which is required for + // adding/updating call extensions. Please see + // https://support.google.com/google-ads/answer/7412639. + ExtensionSettingErrorEnum_CUSTOMER_CONSENT_FOR_CALL_RECORDING_REQUIRED ExtensionSettingErrorEnum_ExtensionSettingError = 66 + // The UPDATE operation does not specify any fields other than the resource + // name in the update mask. + ExtensionSettingErrorEnum_EXTENSION_SETTING_UPDATE_IS_A_NOOP ExtensionSettingErrorEnum_ExtensionSettingError = 67 +) + +var ExtensionSettingErrorEnum_ExtensionSettingError_name = map[int32]string{ + 0: "UNSPECIFIED", + 1: "UNKNOWN", + 2: "EXTENSIONS_REQUIRED", + 3: "FEED_TYPE_EXTENSION_TYPE_MISMATCH", + 4: "INVALID_FEED_TYPE", + 5: "INVALID_FEED_TYPE_FOR_CUSTOMER_EXTENSION_SETTING", + 6: "CANNOT_CHANGE_FEED_ITEM_ON_CREATE", + 7: "CANNOT_UPDATE_NEWLY_CREATED_EXTENSION", + 8: "NO_EXISTING_AD_GROUP_EXTENSION_SETTING_FOR_TYPE", + 9: "NO_EXISTING_CAMPAIGN_EXTENSION_SETTING_FOR_TYPE", + 10: "NO_EXISTING_CUSTOMER_EXTENSION_SETTING_FOR_TYPE", + 11: "AD_GROUP_EXTENSION_SETTING_ALREADY_EXISTS", + 12: "CAMPAIGN_EXTENSION_SETTING_ALREADY_EXISTS", + 13: "CUSTOMER_EXTENSION_SETTING_ALREADY_EXISTS", + 14: "AD_GROUP_FEED_ALREADY_EXISTS_FOR_PLACEHOLDER_TYPE", + 15: "CAMPAIGN_FEED_ALREADY_EXISTS_FOR_PLACEHOLDER_TYPE", + 16: "CUSTOMER_FEED_ALREADY_EXISTS_FOR_PLACEHOLDER_TYPE", + 17: "VALUE_OUT_OF_RANGE", + 18: "CANNOT_SET_FIELD_WITH_FINAL_URLS", + 19: "FINAL_URLS_NOT_SET", + 20: "INVALID_PHONE_NUMBER", + 21: "PHONE_NUMBER_NOT_SUPPORTED_FOR_COUNTRY", + 22: "CARRIER_SPECIFIC_SHORT_NUMBER_NOT_ALLOWED", + 23: "PREMIUM_RATE_NUMBER_NOT_ALLOWED", + 24: "DISALLOWED_NUMBER_TYPE", + 25: "INVALID_DOMESTIC_PHONE_NUMBER_FORMAT", + 26: "VANITY_PHONE_NUMBER_NOT_ALLOWED", + 27: "INVALID_COUNTRY_CODE", + 28: "INVALID_CALL_CONVERSION_TYPE_ID", + 29: "CUSTOMER_NOT_WHITELISTED_FOR_CALLTRACKING", + 30: "CALLTRACKING_NOT_SUPPORTED_FOR_COUNTRY", + 31: "INVALID_APP_ID", + 32: "QUOTES_IN_REVIEW_EXTENSION_SNIPPET", + 33: "HYPHENS_IN_REVIEW_EXTENSION_SNIPPET", + 34: "REVIEW_EXTENSION_SOURCE_NOT_ELIGIBLE", + 35: "SOURCE_NAME_IN_REVIEW_EXTENSION_TEXT", + 36: "MISSING_FIELD", + 37: "INCONSISTENT_CURRENCY_CODES", + 38: "PRICE_EXTENSION_HAS_DUPLICATED_HEADERS", + 39: "PRICE_ITEM_HAS_DUPLICATED_HEADER_AND_DESCRIPTION", + 40: "PRICE_EXTENSION_HAS_TOO_FEW_ITEMS", + 41: "PRICE_EXTENSION_HAS_TOO_MANY_ITEMS", + 42: "UNSUPPORTED_VALUE", + 43: "INVALID_DEVICE_PREFERENCE", + 45: "INVALID_SCHEDULE_END", + 47: "DATE_TIME_MUST_BE_IN_ACCOUNT_TIME_ZONE", + 48: "OVERLAPPING_SCHEDULES_NOT_ALLOWED", + 49: "SCHEDULE_END_NOT_AFTER_START", + 50: "TOO_MANY_SCHEDULES_PER_DAY", + 51: "DUPLICATE_EXTENSION_FEED_ITEM_EDIT", + 52: "INVALID_SNIPPETS_HEADER", + 53: "PHONE_NUMBER_NOT_SUPPORTED_WITH_CALLTRACKING_FOR_COUNTRY", + 54: "CAMPAIGN_TARGETING_MISMATCH", + 55: "CANNOT_OPERATE_ON_REMOVED_FEED", + 56: "EXTENSION_TYPE_REQUIRED", + 57: "INCOMPATIBLE_UNDERLYING_MATCHING_FUNCTION", + 58: "START_DATE_AFTER_END_DATE", + 59: "INVALID_PRICE_FORMAT", + 60: "PROMOTION_INVALID_TIME", + 61: "PROMOTION_CANNOT_SET_PERCENT_DISCOUNT_AND_MONEY_DISCOUNT", + 62: "PROMOTION_CANNOT_SET_PROMOTION_CODE_AND_ORDERS_OVER_AMOUNT", + 63: "TOO_MANY_DECIMAL_PLACES_SPECIFIED", + 64: "INVALID_LANGUAGE_CODE", + 65: "UNSUPPORTED_LANGUAGE", + 66: "CUSTOMER_CONSENT_FOR_CALL_RECORDING_REQUIRED", + 67: "EXTENSION_SETTING_UPDATE_IS_A_NOOP", +} +var ExtensionSettingErrorEnum_ExtensionSettingError_value = map[string]int32{ + "UNSPECIFIED": 0, + "UNKNOWN": 1, + "EXTENSIONS_REQUIRED": 2, + "FEED_TYPE_EXTENSION_TYPE_MISMATCH": 3, + "INVALID_FEED_TYPE": 4, + "INVALID_FEED_TYPE_FOR_CUSTOMER_EXTENSION_SETTING": 5, + "CANNOT_CHANGE_FEED_ITEM_ON_CREATE": 6, + "CANNOT_UPDATE_NEWLY_CREATED_EXTENSION": 7, + "NO_EXISTING_AD_GROUP_EXTENSION_SETTING_FOR_TYPE": 8, + "NO_EXISTING_CAMPAIGN_EXTENSION_SETTING_FOR_TYPE": 9, + "NO_EXISTING_CUSTOMER_EXTENSION_SETTING_FOR_TYPE": 10, + "AD_GROUP_EXTENSION_SETTING_ALREADY_EXISTS": 11, + "CAMPAIGN_EXTENSION_SETTING_ALREADY_EXISTS": 12, + "CUSTOMER_EXTENSION_SETTING_ALREADY_EXISTS": 13, + "AD_GROUP_FEED_ALREADY_EXISTS_FOR_PLACEHOLDER_TYPE": 14, + "CAMPAIGN_FEED_ALREADY_EXISTS_FOR_PLACEHOLDER_TYPE": 15, + "CUSTOMER_FEED_ALREADY_EXISTS_FOR_PLACEHOLDER_TYPE": 16, + "VALUE_OUT_OF_RANGE": 17, + "CANNOT_SET_FIELD_WITH_FINAL_URLS": 18, + "FINAL_URLS_NOT_SET": 19, + "INVALID_PHONE_NUMBER": 20, + "PHONE_NUMBER_NOT_SUPPORTED_FOR_COUNTRY": 21, + "CARRIER_SPECIFIC_SHORT_NUMBER_NOT_ALLOWED": 22, + "PREMIUM_RATE_NUMBER_NOT_ALLOWED": 23, + "DISALLOWED_NUMBER_TYPE": 24, + "INVALID_DOMESTIC_PHONE_NUMBER_FORMAT": 25, + "VANITY_PHONE_NUMBER_NOT_ALLOWED": 26, + "INVALID_COUNTRY_CODE": 27, + "INVALID_CALL_CONVERSION_TYPE_ID": 28, + "CUSTOMER_NOT_WHITELISTED_FOR_CALLTRACKING": 29, + "CALLTRACKING_NOT_SUPPORTED_FOR_COUNTRY": 30, + "INVALID_APP_ID": 31, + "QUOTES_IN_REVIEW_EXTENSION_SNIPPET": 32, + "HYPHENS_IN_REVIEW_EXTENSION_SNIPPET": 33, + "REVIEW_EXTENSION_SOURCE_NOT_ELIGIBLE": 34, + "SOURCE_NAME_IN_REVIEW_EXTENSION_TEXT": 35, + "MISSING_FIELD": 36, + "INCONSISTENT_CURRENCY_CODES": 37, + "PRICE_EXTENSION_HAS_DUPLICATED_HEADERS": 38, + "PRICE_ITEM_HAS_DUPLICATED_HEADER_AND_DESCRIPTION": 39, + "PRICE_EXTENSION_HAS_TOO_FEW_ITEMS": 40, + "PRICE_EXTENSION_HAS_TOO_MANY_ITEMS": 41, + "UNSUPPORTED_VALUE": 42, + "INVALID_DEVICE_PREFERENCE": 43, + "INVALID_SCHEDULE_END": 45, + "DATE_TIME_MUST_BE_IN_ACCOUNT_TIME_ZONE": 47, + "OVERLAPPING_SCHEDULES_NOT_ALLOWED": 48, + "SCHEDULE_END_NOT_AFTER_START": 49, + "TOO_MANY_SCHEDULES_PER_DAY": 50, + "DUPLICATE_EXTENSION_FEED_ITEM_EDIT": 51, + "INVALID_SNIPPETS_HEADER": 52, + "PHONE_NUMBER_NOT_SUPPORTED_WITH_CALLTRACKING_FOR_COUNTRY": 53, + "CAMPAIGN_TARGETING_MISMATCH": 54, + "CANNOT_OPERATE_ON_REMOVED_FEED": 55, + "EXTENSION_TYPE_REQUIRED": 56, + "INCOMPATIBLE_UNDERLYING_MATCHING_FUNCTION": 57, + "START_DATE_AFTER_END_DATE": 58, + "INVALID_PRICE_FORMAT": 59, + "PROMOTION_INVALID_TIME": 60, + "PROMOTION_CANNOT_SET_PERCENT_DISCOUNT_AND_MONEY_DISCOUNT": 61, + "PROMOTION_CANNOT_SET_PROMOTION_CODE_AND_ORDERS_OVER_AMOUNT": 62, + "TOO_MANY_DECIMAL_PLACES_SPECIFIED": 63, + "INVALID_LANGUAGE_CODE": 64, + "UNSUPPORTED_LANGUAGE": 65, + "CUSTOMER_CONSENT_FOR_CALL_RECORDING_REQUIRED": 66, + "EXTENSION_SETTING_UPDATE_IS_A_NOOP": 67, +} + +func (x ExtensionSettingErrorEnum_ExtensionSettingError) String() string { + return proto.EnumName(ExtensionSettingErrorEnum_ExtensionSettingError_name, int32(x)) +} +func (ExtensionSettingErrorEnum_ExtensionSettingError) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_extension_setting_error_36d74918ab80bb6d, []int{0, 0} +} + +// Container for enum describing validation errors of extension settings. +type ExtensionSettingErrorEnum struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ExtensionSettingErrorEnum) Reset() { *m = ExtensionSettingErrorEnum{} } +func (m *ExtensionSettingErrorEnum) String() string { return proto.CompactTextString(m) } +func (*ExtensionSettingErrorEnum) ProtoMessage() {} +func (*ExtensionSettingErrorEnum) Descriptor() ([]byte, []int) { + return fileDescriptor_extension_setting_error_36d74918ab80bb6d, []int{0} +} +func (m *ExtensionSettingErrorEnum) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ExtensionSettingErrorEnum.Unmarshal(m, b) +} +func (m *ExtensionSettingErrorEnum) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ExtensionSettingErrorEnum.Marshal(b, m, deterministic) +} +func (dst *ExtensionSettingErrorEnum) XXX_Merge(src proto.Message) { + xxx_messageInfo_ExtensionSettingErrorEnum.Merge(dst, src) +} +func (m *ExtensionSettingErrorEnum) XXX_Size() int { + return xxx_messageInfo_ExtensionSettingErrorEnum.Size(m) +} +func (m *ExtensionSettingErrorEnum) XXX_DiscardUnknown() { + xxx_messageInfo_ExtensionSettingErrorEnum.DiscardUnknown(m) +} + +var xxx_messageInfo_ExtensionSettingErrorEnum proto.InternalMessageInfo + +func init() { + proto.RegisterType((*ExtensionSettingErrorEnum)(nil), "google.ads.googleads.v1.errors.ExtensionSettingErrorEnum") + proto.RegisterEnum("google.ads.googleads.v1.errors.ExtensionSettingErrorEnum_ExtensionSettingError", ExtensionSettingErrorEnum_ExtensionSettingError_name, ExtensionSettingErrorEnum_ExtensionSettingError_value) +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/errors/extension_setting_error.proto", fileDescriptor_extension_setting_error_36d74918ab80bb6d) +} + +var fileDescriptor_extension_setting_error_36d74918ab80bb6d = []byte{ + // 1313 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x8c, 0x56, 0x5d, 0x73, 0x53, 0xb7, + 0x16, 0xbd, 0x84, 0x7b, 0xe1, 0x5e, 0x71, 0x01, 0x21, 0x08, 0x90, 0x00, 0x01, 0xc2, 0x37, 0x2d, + 0x36, 0x29, 0xd0, 0x52, 0x43, 0x69, 0x95, 0xa3, 0x6d, 0x5b, 0xc3, 0x39, 0x92, 0x90, 0x74, 0x1c, + 0xcc, 0x64, 0x46, 0x93, 0x36, 0x99, 0x4c, 0x66, 0xc0, 0x66, 0xe2, 0x94, 0xe9, 0x4f, 0xe9, 0x73, + 0x1f, 0xfb, 0x53, 0xfa, 0x53, 0xfa, 0xde, 0x97, 0x3e, 0x75, 0xb6, 0xce, 0x87, 0x9d, 0x38, 0xf1, + 0xf0, 0x64, 0x1f, 0x69, 0xad, 0xbd, 0xb7, 0xb6, 0x96, 0x96, 0x44, 0x5e, 0x6e, 0x0f, 0x87, 0xdb, + 0xef, 0xb7, 0x9a, 0x1b, 0x9b, 0xa3, 0x66, 0xf1, 0x17, 0xff, 0x7d, 0x5a, 0x69, 0x6e, 0xed, 0xee, + 0x0e, 0x77, 0x47, 0xcd, 0xad, 0x5f, 0xf6, 0xb6, 0x06, 0xa3, 0x9d, 0xe1, 0x20, 0x8c, 0xb6, 0xf6, + 0xf6, 0x76, 0x06, 0xdb, 0x21, 0x4e, 0x34, 0x3e, 0xee, 0x0e, 0xf7, 0x86, 0x6c, 0xa9, 0xa0, 0x34, + 0x36, 0x36, 0x47, 0x8d, 0x9a, 0xdd, 0xf8, 0xb4, 0xd2, 0x28, 0xd8, 0x8b, 0x57, 0xab, 0xe8, 0x1f, + 0x77, 0x9a, 0x1b, 0x83, 0xc1, 0x70, 0x6f, 0x63, 0x6f, 0x67, 0x38, 0x18, 0x15, 0xec, 0xe5, 0x5f, + 0x2f, 0x90, 0x05, 0xa8, 0xe2, 0xbb, 0x22, 0x3c, 0x20, 0x11, 0x06, 0x3f, 0x7f, 0x58, 0xfe, 0xfb, + 0x3c, 0x99, 0x3f, 0x74, 0x96, 0x9d, 0x25, 0xa7, 0x72, 0xe5, 0x0c, 0x24, 0xb2, 0x2d, 0x41, 0xd0, + 0x7f, 0xb1, 0x53, 0xe4, 0x64, 0xae, 0x5e, 0x2b, 0xbd, 0xa6, 0xe8, 0x31, 0x76, 0x89, 0x9c, 0x87, + 0xb7, 0x1e, 0x94, 0x93, 0x5a, 0xb9, 0x60, 0xe1, 0x4d, 0x2e, 0x2d, 0x08, 0x3a, 0xc7, 0xee, 0x90, + 0x9b, 0x6d, 0x00, 0x11, 0x7c, 0xdf, 0x40, 0xa8, 0x21, 0xc5, 0x67, 0x26, 0x5d, 0xc6, 0x7d, 0xd2, + 0xa5, 0xc7, 0xd9, 0x3c, 0x39, 0x27, 0x55, 0x8f, 0xa7, 0x52, 0x84, 0x1a, 0x4e, 0xff, 0xcd, 0x9e, + 0x92, 0xc7, 0x53, 0xc3, 0xa1, 0xad, 0x6d, 0x48, 0x72, 0xe7, 0x75, 0x06, 0x76, 0x22, 0xa4, 0x03, + 0xef, 0xa5, 0xea, 0xd0, 0xff, 0x60, 0xce, 0x84, 0x2b, 0xa5, 0x7d, 0x48, 0xba, 0x5c, 0x75, 0xa0, + 0xe0, 0x4a, 0x0f, 0x59, 0xd0, 0x2a, 0x24, 0x16, 0xb8, 0x07, 0x7a, 0x82, 0x3d, 0x20, 0x77, 0x4a, + 0x58, 0x6e, 0x04, 0xf7, 0x10, 0x14, 0xac, 0xa5, 0xfd, 0x72, 0x5e, 0x8c, 0x23, 0xd3, 0x93, 0xec, + 0x09, 0x69, 0x2a, 0x1d, 0xe0, 0xad, 0x74, 0x98, 0x22, 0x70, 0x11, 0x3a, 0x56, 0xe7, 0x66, 0x3a, + 0x7b, 0x2c, 0x2e, 0x16, 0xff, 0xdf, 0x83, 0xa4, 0x84, 0x67, 0x86, 0xcb, 0x8e, 0x9a, 0x45, 0xfa, + 0xdf, 0x14, 0xe9, 0xc8, 0x75, 0x8e, 0x49, 0x84, 0x3d, 0x22, 0x0f, 0x66, 0x94, 0xc4, 0x53, 0x0b, + 0x5c, 0xf4, 0x8b, 0xa0, 0x8e, 0x9e, 0x42, 0xf8, 0x8c, 0x62, 0x0e, 0xc0, 0xff, 0x1f, 0xe1, 0x47, + 0x97, 0x71, 0x00, 0x7e, 0x9a, 0x3d, 0x23, 0x2b, 0x75, 0x31, 0xb1, 0xf1, 0xfb, 0x11, 0xb1, 0x6e, + 0x93, 0xf2, 0x04, 0xba, 0x3a, 0x15, 0x50, 0xae, 0xe1, 0x0c, 0xd2, 0xea, 0xa2, 0x3e, 0x9b, 0x76, + 0x36, 0xd2, 0xaa, 0xe2, 0x3e, 0x9b, 0x46, 0xd9, 0x45, 0xc2, 0x7a, 0x3c, 0xcd, 0x21, 0xe8, 0xdc, + 0x07, 0xdd, 0x0e, 0x16, 0x75, 0x42, 0xcf, 0xb1, 0xdb, 0xe4, 0x46, 0xa9, 0x09, 0x07, 0x3e, 0xb4, + 0x25, 0xa4, 0x22, 0xac, 0x49, 0xdf, 0x0d, 0x6d, 0xa9, 0x78, 0x1a, 0x72, 0x9b, 0x3a, 0xca, 0x90, + 0x3d, 0xfe, 0x0e, 0x25, 0x9a, 0x9e, 0x67, 0x97, 0xc9, 0x85, 0x4a, 0xae, 0xa6, 0xab, 0x15, 0x04, + 0x95, 0x67, 0xab, 0x60, 0xe9, 0x05, 0xf6, 0x90, 0xdc, 0x9d, 0x1c, 0x29, 0x38, 0xb9, 0x31, 0xda, + 0xa2, 0xd4, 0xa2, 0xa2, 0x75, 0xae, 0xbc, 0xed, 0xd3, 0xf9, 0x62, 0x7b, 0xac, 0x95, 0x60, 0x43, + 0x79, 0xde, 0x92, 0xe0, 0xba, 0xda, 0xfa, 0x49, 0x32, 0x4f, 0x53, 0xbd, 0x06, 0x82, 0x5e, 0x64, + 0xb7, 0xc8, 0x75, 0x63, 0x21, 0x93, 0x79, 0x16, 0x6c, 0x54, 0xf1, 0x34, 0xe8, 0x12, 0x5b, 0x24, + 0x17, 0x85, 0x74, 0xe5, 0x77, 0x05, 0x89, 0xbd, 0xb8, 0xcc, 0xee, 0x93, 0xdb, 0x55, 0xd5, 0x42, + 0x67, 0xe0, 0xbc, 0x4c, 0xf6, 0x95, 0x8f, 0xe5, 0x65, 0xdc, 0xd3, 0x05, 0x4c, 0xd5, 0xe3, 0x4a, + 0xfa, 0x7e, 0x98, 0x5a, 0x4c, 0x95, 0x6a, 0x71, 0xb2, 0x09, 0xe5, 0x9a, 0x42, 0xa2, 0x05, 0xd0, + 0x2b, 0x48, 0xaf, 0x67, 0x78, 0x9a, 0x86, 0x44, 0xab, 0x1e, 0xd8, 0xb1, 0x1f, 0x48, 0x41, 0xaf, + 0xee, 0x53, 0x1b, 0x06, 0x5e, 0xeb, 0x4a, 0x0f, 0xa9, 0x74, 0x75, 0x9f, 0x78, 0x9a, 0x7a, 0xcb, + 0x93, 0xd7, 0x78, 0xd6, 0xaf, 0x61, 0x63, 0x27, 0x47, 0x66, 0x34, 0x76, 0x89, 0x31, 0x72, 0xa6, + 0xca, 0xcf, 0x8d, 0xc1, 0x74, 0xd7, 0xd9, 0x5d, 0xb2, 0xfc, 0x26, 0xd7, 0x1e, 0x5c, 0x90, 0x2a, + 0x58, 0xe8, 0x49, 0x58, 0x9b, 0x14, 0xb9, 0x92, 0xc6, 0x80, 0xa7, 0x37, 0xd8, 0x3d, 0x72, 0xab, + 0xdb, 0x37, 0x5d, 0x50, 0xb3, 0x81, 0x37, 0xb1, 0x9b, 0xd3, 0xb3, 0x3a, 0xb7, 0x09, 0xc4, 0xda, + 0x20, 0x95, 0x1d, 0xb9, 0x9a, 0x02, 0x5d, 0x46, 0x64, 0x35, 0xc1, 0x33, 0x38, 0x34, 0xac, 0x87, + 0xb7, 0x9e, 0xde, 0x62, 0xe7, 0xc8, 0xe9, 0x4c, 0x3a, 0x17, 0x4f, 0x3d, 0x4a, 0x92, 0xde, 0x66, + 0xd7, 0xc9, 0x15, 0xa9, 0x12, 0xad, 0x1c, 0x76, 0x46, 0xf9, 0x90, 0xe4, 0xd6, 0x82, 0x4a, 0x8a, + 0x5e, 0x3b, 0x7a, 0x27, 0x2a, 0xce, 0xca, 0x64, 0xd2, 0x74, 0xbb, 0xdc, 0x05, 0x91, 0x9b, 0x54, + 0x26, 0xd1, 0xe0, 0xba, 0xc0, 0x05, 0x58, 0x47, 0xef, 0xa2, 0xcd, 0x16, 0xd8, 0xe8, 0x91, 0x87, + 0xc2, 0x02, 0x57, 0x22, 0x08, 0x70, 0x89, 0x95, 0xc6, 0xa3, 0x29, 0xde, 0x43, 0x9b, 0x3d, 0x2c, + 0x83, 0xd7, 0x3a, 0xb4, 0x61, 0x2d, 0xc6, 0x72, 0xf4, 0x3e, 0x76, 0xf8, 0x28, 0x58, 0xc6, 0x55, + 0xbf, 0xc4, 0x3d, 0xc0, 0x2b, 0x20, 0x57, 0xe3, 0xad, 0x8b, 0xc7, 0x93, 0x3e, 0x64, 0xd7, 0xc8, + 0x42, 0xad, 0x4e, 0xe8, 0x61, 0x1c, 0x63, 0xa1, 0x0d, 0xb8, 0x58, 0xa0, 0x5f, 0x4c, 0xaa, 0xcd, + 0x25, 0x5d, 0x10, 0x79, 0x0a, 0x01, 0x94, 0xa0, 0x8f, 0xb0, 0x01, 0xd1, 0xd5, 0xbd, 0xcc, 0x20, + 0x64, 0xb9, 0xf3, 0x61, 0x35, 0x36, 0x99, 0x27, 0x51, 0x11, 0xc5, 0xf8, 0x3b, 0xad, 0x80, 0x36, + 0x71, 0x29, 0xba, 0x07, 0x36, 0xe5, 0xc6, 0x60, 0x93, 0xab, 0x48, 0x6e, 0x9f, 0xb4, 0x1f, 0xb3, + 0x1b, 0xe4, 0xea, 0x64, 0x92, 0x62, 0xb6, 0xed, 0xf1, 0xa0, 0x7a, 0x6e, 0x3d, 0x5d, 0x61, 0x4b, + 0x64, 0xb1, 0x5e, 0xd8, 0x38, 0x8a, 0x01, 0x1b, 0x04, 0xef, 0xd3, 0xaf, 0xb0, 0x19, 0x75, 0x6b, + 0x27, 0x1a, 0x32, 0xbe, 0xa0, 0x40, 0x48, 0x4f, 0x9f, 0xb0, 0x2b, 0xe4, 0x52, 0xbd, 0xac, 0x42, + 0x5a, 0xae, 0xdc, 0x08, 0xfa, 0x94, 0xbd, 0x24, 0xcf, 0x67, 0x98, 0x49, 0xb4, 0xab, 0x7d, 0x67, + 0x62, 0xf2, 0x14, 0x3c, 0x43, 0xe5, 0xd4, 0x46, 0xeb, 0xb9, 0xed, 0x40, 0xb4, 0xf1, 0xfa, 0x2e, + 0xfe, 0x9a, 0x2d, 0x93, 0xa5, 0xd2, 0x03, 0xb5, 0x81, 0x68, 0x29, 0x1a, 0xa5, 0x99, 0xe9, 0x1e, + 0x14, 0xd7, 0x30, 0xfd, 0x06, 0xeb, 0x3b, 0x70, 0x99, 0xd7, 0x77, 0xfe, 0x73, 0x3c, 0xc2, 0xa8, + 0xcd, 0xcc, 0x70, 0x8f, 0x52, 0x0f, 0xb9, 0x12, 0x60, 0xd3, 0x7e, 0x4c, 0x83, 0x39, 0x62, 0x4d, + 0xb9, 0x4a, 0xa2, 0x8e, 0xbe, 0xc5, 0x1d, 0x8e, 0xed, 0x0b, 0x71, 0xbb, 0x8a, 0x7e, 0x62, 0x77, + 0xf1, 0x93, 0xb6, 0xf6, 0x99, 0x6a, 0xd4, 0x51, 0x69, 0x47, 0x2f, 0xd0, 0xd4, 0x8c, 0xd5, 0x99, + 0xc6, 0x38, 0xa1, 0xc2, 0xe0, 0xb6, 0xd2, 0x97, 0xb1, 0x47, 0xf5, 0xdc, 0x84, 0xa5, 0x1b, 0xb0, + 0x09, 0x9e, 0x17, 0x21, 0x5d, 0x21, 0x03, 0xd4, 0x75, 0xa6, 0x15, 0xf4, 0xeb, 0x21, 0xfa, 0x1d, + 0x7b, 0x45, 0x5a, 0x87, 0xb3, 0xc7, 0x83, 0x5a, 0x40, 0xe4, 0x6a, 0x8b, 0x87, 0x28, 0xa0, 0x7c, + 0x02, 0xcf, 0x22, 0xff, 0x15, 0xea, 0xa9, 0x96, 0x81, 0x80, 0x44, 0x66, 0x3c, 0x2d, 0x6e, 0x21, + 0x17, 0xc6, 0x4f, 0xa8, 0xef, 0xd9, 0x02, 0x99, 0xaf, 0xca, 0x4e, 0xb9, 0xea, 0xe4, 0xbc, 0x03, + 0x85, 0x57, 0xfe, 0x80, 0xab, 0x9e, 0x3c, 0x0d, 0xd5, 0x34, 0xe5, 0xec, 0x31, 0xf9, 0xb2, 0x36, + 0x48, 0x34, 0x00, 0x5c, 0x4d, 0x65, 0x8c, 0xc1, 0x42, 0xa2, 0xad, 0xc0, 0x06, 0xd7, 0xfb, 0xb1, + 0x8a, 0xa2, 0x9b, 0xbe, 0xb7, 0xcb, 0x37, 0x8f, 0x74, 0x81, 0x07, 0xa5, 0xb5, 0xa1, 0xc9, 0xea, + 0x5f, 0xc7, 0xc8, 0xf2, 0x4f, 0xc3, 0x0f, 0x8d, 0xd9, 0xef, 0xcb, 0xd5, 0xc5, 0x43, 0x1f, 0x88, + 0x06, 0x5f, 0x97, 0xe6, 0xd8, 0x3b, 0x51, 0xb2, 0xb7, 0x87, 0xef, 0x37, 0x06, 0xdb, 0x8d, 0xe1, + 0xee, 0x76, 0x73, 0x7b, 0x6b, 0x10, 0xdf, 0x9e, 0xd5, 0x5b, 0xf7, 0xe3, 0xce, 0xe8, 0xa8, 0xa7, + 0xef, 0x8b, 0xe2, 0xe7, 0xb7, 0xb9, 0xe3, 0x1d, 0xce, 0x7f, 0x9f, 0x5b, 0xea, 0x14, 0xc1, 0xf8, + 0xe6, 0xa8, 0x51, 0xfc, 0xc5, 0x7f, 0xbd, 0x95, 0x46, 0x4c, 0x39, 0xfa, 0xa3, 0x02, 0xac, 0xf3, + 0xcd, 0xd1, 0x7a, 0x0d, 0x58, 0xef, 0xad, 0xac, 0x17, 0x80, 0x3f, 0xe7, 0x96, 0x8b, 0xd1, 0x56, + 0x8b, 0x6f, 0x8e, 0x5a, 0xad, 0x1a, 0xd2, 0x6a, 0xf5, 0x56, 0x5a, 0xad, 0x02, 0xf4, 0xe3, 0x89, + 0x58, 0xdd, 0x93, 0x7f, 0x02, 0x00, 0x00, 0xff, 0xff, 0xa6, 0x85, 0x42, 0xdd, 0x97, 0x0b, 0x00, + 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/errors/feed_attribute_reference_error.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/errors/feed_attribute_reference_error.pb.go new file mode 100644 index 0000000..c23e3aa --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/errors/feed_attribute_reference_error.pb.go @@ -0,0 +1,125 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/errors/feed_attribute_reference_error.proto + +package errors // import "google.golang.org/genproto/googleapis/ads/googleads/v1/errors" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Enum describing possible feed attribute reference errors. +type FeedAttributeReferenceErrorEnum_FeedAttributeReferenceError int32 + +const ( + // Enum unspecified. + FeedAttributeReferenceErrorEnum_UNSPECIFIED FeedAttributeReferenceErrorEnum_FeedAttributeReferenceError = 0 + // The received error code is not known in this version. + FeedAttributeReferenceErrorEnum_UNKNOWN FeedAttributeReferenceErrorEnum_FeedAttributeReferenceError = 1 + // A feed referenced by ID has been removed. + FeedAttributeReferenceErrorEnum_CANNOT_REFERENCE_REMOVED_FEED FeedAttributeReferenceErrorEnum_FeedAttributeReferenceError = 2 + // There is no enabled feed with the given name. + FeedAttributeReferenceErrorEnum_INVALID_FEED_NAME FeedAttributeReferenceErrorEnum_FeedAttributeReferenceError = 3 + // There is no feed attribute in an enabled feed with the given name. + FeedAttributeReferenceErrorEnum_INVALID_FEED_ATTRIBUTE_NAME FeedAttributeReferenceErrorEnum_FeedAttributeReferenceError = 4 +) + +var FeedAttributeReferenceErrorEnum_FeedAttributeReferenceError_name = map[int32]string{ + 0: "UNSPECIFIED", + 1: "UNKNOWN", + 2: "CANNOT_REFERENCE_REMOVED_FEED", + 3: "INVALID_FEED_NAME", + 4: "INVALID_FEED_ATTRIBUTE_NAME", +} +var FeedAttributeReferenceErrorEnum_FeedAttributeReferenceError_value = map[string]int32{ + "UNSPECIFIED": 0, + "UNKNOWN": 1, + "CANNOT_REFERENCE_REMOVED_FEED": 2, + "INVALID_FEED_NAME": 3, + "INVALID_FEED_ATTRIBUTE_NAME": 4, +} + +func (x FeedAttributeReferenceErrorEnum_FeedAttributeReferenceError) String() string { + return proto.EnumName(FeedAttributeReferenceErrorEnum_FeedAttributeReferenceError_name, int32(x)) +} +func (FeedAttributeReferenceErrorEnum_FeedAttributeReferenceError) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_feed_attribute_reference_error_dc3a3bf8c254dc8e, []int{0, 0} +} + +// Container for enum describing possible feed attribute reference errors. +type FeedAttributeReferenceErrorEnum struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *FeedAttributeReferenceErrorEnum) Reset() { *m = FeedAttributeReferenceErrorEnum{} } +func (m *FeedAttributeReferenceErrorEnum) String() string { return proto.CompactTextString(m) } +func (*FeedAttributeReferenceErrorEnum) ProtoMessage() {} +func (*FeedAttributeReferenceErrorEnum) Descriptor() ([]byte, []int) { + return fileDescriptor_feed_attribute_reference_error_dc3a3bf8c254dc8e, []int{0} +} +func (m *FeedAttributeReferenceErrorEnum) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_FeedAttributeReferenceErrorEnum.Unmarshal(m, b) +} +func (m *FeedAttributeReferenceErrorEnum) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_FeedAttributeReferenceErrorEnum.Marshal(b, m, deterministic) +} +func (dst *FeedAttributeReferenceErrorEnum) XXX_Merge(src proto.Message) { + xxx_messageInfo_FeedAttributeReferenceErrorEnum.Merge(dst, src) +} +func (m *FeedAttributeReferenceErrorEnum) XXX_Size() int { + return xxx_messageInfo_FeedAttributeReferenceErrorEnum.Size(m) +} +func (m *FeedAttributeReferenceErrorEnum) XXX_DiscardUnknown() { + xxx_messageInfo_FeedAttributeReferenceErrorEnum.DiscardUnknown(m) +} + +var xxx_messageInfo_FeedAttributeReferenceErrorEnum proto.InternalMessageInfo + +func init() { + proto.RegisterType((*FeedAttributeReferenceErrorEnum)(nil), "google.ads.googleads.v1.errors.FeedAttributeReferenceErrorEnum") + proto.RegisterEnum("google.ads.googleads.v1.errors.FeedAttributeReferenceErrorEnum_FeedAttributeReferenceError", FeedAttributeReferenceErrorEnum_FeedAttributeReferenceError_name, FeedAttributeReferenceErrorEnum_FeedAttributeReferenceError_value) +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/errors/feed_attribute_reference_error.proto", fileDescriptor_feed_attribute_reference_error_dc3a3bf8c254dc8e) +} + +var fileDescriptor_feed_attribute_reference_error_dc3a3bf8c254dc8e = []byte{ + // 364 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x7c, 0x91, 0x41, 0x6e, 0xa3, 0x30, + 0x14, 0x86, 0x07, 0x32, 0x9a, 0x91, 0x9c, 0xc5, 0x30, 0x48, 0xb3, 0x99, 0xb4, 0x49, 0xcb, 0x01, + 0x8c, 0x50, 0x77, 0xee, 0xca, 0x01, 0x13, 0xa1, 0x36, 0x4e, 0x44, 0x09, 0x95, 0x2a, 0x24, 0x44, + 0x82, 0x83, 0x90, 0x12, 0x1c, 0xd9, 0x24, 0x47, 0xe9, 0x01, 0xba, 0xec, 0x11, 0x7a, 0x84, 0x1e, + 0xa5, 0x57, 0xe8, 0xa6, 0x02, 0x97, 0x48, 0x5d, 0x34, 0x2b, 0xff, 0x7a, 0xfe, 0xde, 0xff, 0x3f, + 0xbd, 0x07, 0xdc, 0x82, 0xf3, 0x62, 0xc3, 0xec, 0x2c, 0x97, 0xb6, 0x92, 0x8d, 0x3a, 0x38, 0x36, + 0x13, 0x82, 0x0b, 0x69, 0xaf, 0x19, 0xcb, 0xd3, 0xac, 0xae, 0x45, 0xb9, 0xdc, 0xd7, 0x2c, 0x15, + 0x6c, 0xcd, 0x04, 0xab, 0x56, 0x2c, 0x6d, 0xff, 0xe1, 0x4e, 0xf0, 0x9a, 0x9b, 0x43, 0xd5, 0x09, + 0xb3, 0x5c, 0xc2, 0xa3, 0x09, 0x3c, 0x38, 0x50, 0x99, 0xfc, 0x3f, 0xeb, 0x42, 0x76, 0xa5, 0x9d, + 0x55, 0x15, 0xaf, 0xb3, 0xba, 0xe4, 0x95, 0x54, 0xdd, 0xd6, 0x8b, 0x06, 0x46, 0x3e, 0x63, 0x39, + 0xee, 0x52, 0xc2, 0x2e, 0x84, 0x34, 0xed, 0xa4, 0xda, 0x6f, 0xad, 0x47, 0x0d, 0x0c, 0x4e, 0x30, + 0xe6, 0x1f, 0xd0, 0x5f, 0xd0, 0xbb, 0x39, 0x71, 0x03, 0x3f, 0x20, 0x9e, 0xf1, 0xc3, 0xec, 0x83, + 0xdf, 0x0b, 0x7a, 0x43, 0x67, 0xf7, 0xd4, 0xd0, 0xcc, 0x4b, 0x70, 0xee, 0x62, 0x4a, 0x67, 0x51, + 0x1a, 0x12, 0x9f, 0x84, 0x84, 0xba, 0x24, 0x0d, 0xc9, 0x74, 0x16, 0x13, 0x2f, 0xf5, 0x09, 0xf1, + 0x0c, 0xdd, 0xfc, 0x07, 0xfe, 0x06, 0x34, 0xc6, 0xb7, 0x81, 0xaa, 0xa4, 0x14, 0x4f, 0x89, 0xd1, + 0x33, 0x47, 0x60, 0xf0, 0xa5, 0x8c, 0xa3, 0x28, 0x0c, 0xc6, 0x8b, 0x88, 0x28, 0xe0, 0xe7, 0xf8, + 0x5d, 0x03, 0xd6, 0x8a, 0x6f, 0xe1, 0xe9, 0x0d, 0x8c, 0x2f, 0x4e, 0x0c, 0x3f, 0x6f, 0xb6, 0x30, + 0xd7, 0x1e, 0xbc, 0x4f, 0x8f, 0x82, 0x6f, 0xb2, 0xaa, 0x80, 0x5c, 0x14, 0x76, 0xc1, 0xaa, 0x76, + 0x47, 0xdd, 0x69, 0x76, 0xa5, 0xfc, 0xee, 0x52, 0xd7, 0xea, 0x79, 0xd2, 0x7b, 0x13, 0x8c, 0x9f, + 0xf5, 0xe1, 0x44, 0x99, 0xe1, 0x5c, 0x42, 0x25, 0x1b, 0x15, 0x3b, 0xb0, 0x8d, 0x94, 0xaf, 0x1d, + 0x90, 0xe0, 0x5c, 0x26, 0x47, 0x20, 0x89, 0x9d, 0x44, 0x01, 0x6f, 0xba, 0xa5, 0xaa, 0x08, 0xe1, + 0x5c, 0x22, 0x74, 0x44, 0x10, 0x8a, 0x1d, 0x84, 0x14, 0xb4, 0xfc, 0xd5, 0x4e, 0x77, 0xf5, 0x11, + 0x00, 0x00, 0xff, 0xff, 0xd6, 0xd2, 0x2a, 0xb7, 0x46, 0x02, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/errors/feed_error.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/errors/feed_error.pb.go new file mode 100644 index 0000000..3d5bfa0 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/errors/feed_error.pb.go @@ -0,0 +1,206 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/errors/feed_error.proto + +package errors // import "google.golang.org/genproto/googleapis/ads/googleads/v1/errors" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Enum describing possible feed errors. +type FeedErrorEnum_FeedError int32 + +const ( + // Enum unspecified. + FeedErrorEnum_UNSPECIFIED FeedErrorEnum_FeedError = 0 + // The received error code is not known in this version. + FeedErrorEnum_UNKNOWN FeedErrorEnum_FeedError = 1 + // The names of the FeedAttributes must be unique. + FeedErrorEnum_ATTRIBUTE_NAMES_NOT_UNIQUE FeedErrorEnum_FeedError = 2 + // The attribute list must be an exact copy of the existing list if the + // attribute ID's are present. + FeedErrorEnum_ATTRIBUTES_DO_NOT_MATCH_EXISTING_ATTRIBUTES FeedErrorEnum_FeedError = 3 + // Cannot specify USER origin for a system generated feed. + FeedErrorEnum_CANNOT_SPECIFY_USER_ORIGIN_FOR_SYSTEM_FEED FeedErrorEnum_FeedError = 4 + // Cannot specify GOOGLE origin for a non-system generated feed. + FeedErrorEnum_CANNOT_SPECIFY_GOOGLE_ORIGIN_FOR_NON_SYSTEM_FEED FeedErrorEnum_FeedError = 5 + // Cannot specify feed attributes for system feed. + FeedErrorEnum_CANNOT_SPECIFY_FEED_ATTRIBUTES_FOR_SYSTEM_FEED FeedErrorEnum_FeedError = 6 + // Cannot update FeedAttributes on feed with origin GOOGLE. + FeedErrorEnum_CANNOT_UPDATE_FEED_ATTRIBUTES_WITH_ORIGIN_GOOGLE FeedErrorEnum_FeedError = 7 + // The given ID refers to a removed Feed. Removed Feeds are immutable. + FeedErrorEnum_FEED_REMOVED FeedErrorEnum_FeedError = 8 + // The origin of the feed is not valid for the client. + FeedErrorEnum_INVALID_ORIGIN_VALUE FeedErrorEnum_FeedError = 9 + // A user can only create and modify feeds with USER origin. + FeedErrorEnum_FEED_ORIGIN_IS_NOT_USER FeedErrorEnum_FeedError = 10 + // Invalid auth token for the given email. + FeedErrorEnum_INVALID_AUTH_TOKEN_FOR_EMAIL FeedErrorEnum_FeedError = 11 + // Invalid email specified. + FeedErrorEnum_INVALID_EMAIL FeedErrorEnum_FeedError = 12 + // Feed name matches that of another active Feed. + FeedErrorEnum_DUPLICATE_FEED_NAME FeedErrorEnum_FeedError = 13 + // Name of feed is not allowed. + FeedErrorEnum_INVALID_FEED_NAME FeedErrorEnum_FeedError = 14 + // Missing OAuthInfo. + FeedErrorEnum_MISSING_OAUTH_INFO FeedErrorEnum_FeedError = 15 + // New FeedAttributes must not affect the unique key. + FeedErrorEnum_NEW_ATTRIBUTE_CANNOT_BE_PART_OF_UNIQUE_KEY FeedErrorEnum_FeedError = 16 + // Too many FeedAttributes for a Feed. + FeedErrorEnum_TOO_MANY_ATTRIBUTES FeedErrorEnum_FeedError = 17 + // The business account is not valid. + FeedErrorEnum_INVALID_BUSINESS_ACCOUNT FeedErrorEnum_FeedError = 18 + // Business account cannot access Google My Business account. + FeedErrorEnum_BUSINESS_ACCOUNT_CANNOT_ACCESS_LOCATION_ACCOUNT FeedErrorEnum_FeedError = 19 + // Invalid chain ID provided for affiliate location feed. + FeedErrorEnum_INVALID_AFFILIATE_CHAIN_ID FeedErrorEnum_FeedError = 20 +) + +var FeedErrorEnum_FeedError_name = map[int32]string{ + 0: "UNSPECIFIED", + 1: "UNKNOWN", + 2: "ATTRIBUTE_NAMES_NOT_UNIQUE", + 3: "ATTRIBUTES_DO_NOT_MATCH_EXISTING_ATTRIBUTES", + 4: "CANNOT_SPECIFY_USER_ORIGIN_FOR_SYSTEM_FEED", + 5: "CANNOT_SPECIFY_GOOGLE_ORIGIN_FOR_NON_SYSTEM_FEED", + 6: "CANNOT_SPECIFY_FEED_ATTRIBUTES_FOR_SYSTEM_FEED", + 7: "CANNOT_UPDATE_FEED_ATTRIBUTES_WITH_ORIGIN_GOOGLE", + 8: "FEED_REMOVED", + 9: "INVALID_ORIGIN_VALUE", + 10: "FEED_ORIGIN_IS_NOT_USER", + 11: "INVALID_AUTH_TOKEN_FOR_EMAIL", + 12: "INVALID_EMAIL", + 13: "DUPLICATE_FEED_NAME", + 14: "INVALID_FEED_NAME", + 15: "MISSING_OAUTH_INFO", + 16: "NEW_ATTRIBUTE_CANNOT_BE_PART_OF_UNIQUE_KEY", + 17: "TOO_MANY_ATTRIBUTES", + 18: "INVALID_BUSINESS_ACCOUNT", + 19: "BUSINESS_ACCOUNT_CANNOT_ACCESS_LOCATION_ACCOUNT", + 20: "INVALID_AFFILIATE_CHAIN_ID", +} +var FeedErrorEnum_FeedError_value = map[string]int32{ + "UNSPECIFIED": 0, + "UNKNOWN": 1, + "ATTRIBUTE_NAMES_NOT_UNIQUE": 2, + "ATTRIBUTES_DO_NOT_MATCH_EXISTING_ATTRIBUTES": 3, + "CANNOT_SPECIFY_USER_ORIGIN_FOR_SYSTEM_FEED": 4, + "CANNOT_SPECIFY_GOOGLE_ORIGIN_FOR_NON_SYSTEM_FEED": 5, + "CANNOT_SPECIFY_FEED_ATTRIBUTES_FOR_SYSTEM_FEED": 6, + "CANNOT_UPDATE_FEED_ATTRIBUTES_WITH_ORIGIN_GOOGLE": 7, + "FEED_REMOVED": 8, + "INVALID_ORIGIN_VALUE": 9, + "FEED_ORIGIN_IS_NOT_USER": 10, + "INVALID_AUTH_TOKEN_FOR_EMAIL": 11, + "INVALID_EMAIL": 12, + "DUPLICATE_FEED_NAME": 13, + "INVALID_FEED_NAME": 14, + "MISSING_OAUTH_INFO": 15, + "NEW_ATTRIBUTE_CANNOT_BE_PART_OF_UNIQUE_KEY": 16, + "TOO_MANY_ATTRIBUTES": 17, + "INVALID_BUSINESS_ACCOUNT": 18, + "BUSINESS_ACCOUNT_CANNOT_ACCESS_LOCATION_ACCOUNT": 19, + "INVALID_AFFILIATE_CHAIN_ID": 20, +} + +func (x FeedErrorEnum_FeedError) String() string { + return proto.EnumName(FeedErrorEnum_FeedError_name, int32(x)) +} +func (FeedErrorEnum_FeedError) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_feed_error_55545305caaf267c, []int{0, 0} +} + +// Container for enum describing possible feed errors. +type FeedErrorEnum struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *FeedErrorEnum) Reset() { *m = FeedErrorEnum{} } +func (m *FeedErrorEnum) String() string { return proto.CompactTextString(m) } +func (*FeedErrorEnum) ProtoMessage() {} +func (*FeedErrorEnum) Descriptor() ([]byte, []int) { + return fileDescriptor_feed_error_55545305caaf267c, []int{0} +} +func (m *FeedErrorEnum) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_FeedErrorEnum.Unmarshal(m, b) +} +func (m *FeedErrorEnum) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_FeedErrorEnum.Marshal(b, m, deterministic) +} +func (dst *FeedErrorEnum) XXX_Merge(src proto.Message) { + xxx_messageInfo_FeedErrorEnum.Merge(dst, src) +} +func (m *FeedErrorEnum) XXX_Size() int { + return xxx_messageInfo_FeedErrorEnum.Size(m) +} +func (m *FeedErrorEnum) XXX_DiscardUnknown() { + xxx_messageInfo_FeedErrorEnum.DiscardUnknown(m) +} + +var xxx_messageInfo_FeedErrorEnum proto.InternalMessageInfo + +func init() { + proto.RegisterType((*FeedErrorEnum)(nil), "google.ads.googleads.v1.errors.FeedErrorEnum") + proto.RegisterEnum("google.ads.googleads.v1.errors.FeedErrorEnum_FeedError", FeedErrorEnum_FeedError_name, FeedErrorEnum_FeedError_value) +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/errors/feed_error.proto", fileDescriptor_feed_error_55545305caaf267c) +} + +var fileDescriptor_feed_error_55545305caaf267c = []byte{ + // 611 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x7c, 0x93, 0xdf, 0x6e, 0xd3, 0x3e, + 0x14, 0xc7, 0x7f, 0xeb, 0x7e, 0xdb, 0x98, 0xf7, 0xcf, 0xf3, 0x06, 0x9b, 0xc6, 0x34, 0xa1, 0x5e, + 0x82, 0x94, 0x50, 0xc6, 0x55, 0xb8, 0x72, 0x13, 0xa7, 0xb5, 0x96, 0xd8, 0x25, 0xb6, 0x3b, 0x8a, + 0x2a, 0x59, 0x85, 0x84, 0x6a, 0xd2, 0xd6, 0x4c, 0xcd, 0xd8, 0x03, 0x71, 0xc9, 0xa3, 0x20, 0x9e, + 0x81, 0x7b, 0x78, 0x0a, 0xe4, 0x38, 0xc9, 0xca, 0x26, 0xb8, 0xca, 0xd1, 0xf1, 0xe7, 0x9b, 0xef, + 0xf1, 0x39, 0x3e, 0xc0, 0x9d, 0xe6, 0xf9, 0xf4, 0x32, 0x73, 0x27, 0x69, 0x51, 0x85, 0x26, 0xba, + 0xed, 0xb8, 0xd9, 0x7c, 0x9e, 0xcf, 0x0b, 0xf7, 0x53, 0x96, 0xa5, 0xba, 0x8c, 0x9d, 0xeb, 0x79, + 0x7e, 0x93, 0xa3, 0x13, 0x4b, 0x39, 0x93, 0xb4, 0x70, 0x1a, 0x81, 0x73, 0xdb, 0x71, 0xac, 0xe0, + 0xe8, 0xb8, 0xfe, 0xe1, 0xf5, 0x85, 0x3b, 0x99, 0xcd, 0xf2, 0x9b, 0xc9, 0xcd, 0x45, 0x3e, 0x2b, + 0xac, 0xba, 0xfd, 0x63, 0x05, 0x6c, 0x85, 0x59, 0x96, 0x12, 0x03, 0x93, 0xd9, 0xe7, 0xab, 0xf6, + 0xf7, 0x15, 0xb0, 0xde, 0x64, 0xd0, 0x0e, 0xd8, 0x50, 0x4c, 0x0c, 0x88, 0x4f, 0x43, 0x4a, 0x02, + 0xf8, 0x1f, 0xda, 0x00, 0x6b, 0x8a, 0x9d, 0x31, 0x7e, 0xce, 0xe0, 0x12, 0x3a, 0x01, 0x47, 0x58, + 0xca, 0x84, 0x76, 0x95, 0x24, 0x9a, 0xe1, 0x98, 0x08, 0xcd, 0xb8, 0xd4, 0x8a, 0xd1, 0xb7, 0x8a, + 0xc0, 0x16, 0x72, 0xc1, 0x8b, 0xe6, 0x5c, 0xe8, 0x80, 0x97, 0xa7, 0x31, 0x96, 0x7e, 0x5f, 0x93, + 0x77, 0x54, 0x48, 0xca, 0x7a, 0xfa, 0x0e, 0x80, 0xcb, 0xc8, 0x01, 0xcf, 0x7d, 0xcc, 0x0c, 0x65, + 0x3d, 0x47, 0x5a, 0x09, 0x92, 0x68, 0x9e, 0xd0, 0x1e, 0x65, 0x3a, 0xe4, 0x89, 0x16, 0x23, 0x21, + 0x49, 0xac, 0x43, 0x42, 0x02, 0xf8, 0x3f, 0x7a, 0x0d, 0x5e, 0xde, 0xe3, 0x7b, 0x9c, 0xf7, 0x22, + 0xb2, 0xa8, 0x60, 0x9c, 0xfd, 0xa1, 0x5a, 0x41, 0xaf, 0x80, 0x73, 0x4f, 0x65, 0x0e, 0x16, 0x2a, + 0x79, 0xe0, 0xb4, 0xba, 0xe0, 0xa4, 0x06, 0x01, 0x96, 0xe4, 0x81, 0xe4, 0x9c, 0xca, 0x7e, 0x6d, + 0x6b, 0x8b, 0x80, 0x6b, 0x08, 0x82, 0xcd, 0x92, 0x4b, 0x48, 0xcc, 0x87, 0x24, 0x80, 0x8f, 0xd0, + 0x21, 0xd8, 0xa7, 0x6c, 0x88, 0x23, 0x1a, 0xd4, 0xf0, 0x10, 0x47, 0x8a, 0xc0, 0x75, 0xf4, 0x14, + 0x1c, 0x94, 0x6c, 0x95, 0xa6, 0x55, 0x2f, 0x05, 0x49, 0x20, 0x40, 0xcf, 0xc0, 0x71, 0x2d, 0xc3, + 0x4a, 0xf6, 0xb5, 0xe4, 0x67, 0xc4, 0x5e, 0x8f, 0xc4, 0x98, 0x46, 0x70, 0x03, 0xed, 0x82, 0xad, + 0x9a, 0xb0, 0xa9, 0x4d, 0x74, 0x00, 0xf6, 0x02, 0x35, 0x88, 0xa8, 0xdf, 0xd4, 0x6b, 0x66, 0x04, + 0xb7, 0xd0, 0x63, 0xb0, 0x5b, 0xb3, 0x77, 0xe9, 0x6d, 0xf4, 0x04, 0xa0, 0x98, 0x0a, 0x61, 0xa6, + 0xc2, 0x4b, 0x17, 0xca, 0x42, 0x0e, 0x77, 0xcc, 0x54, 0x18, 0x39, 0xbf, 0xbb, 0xac, 0xae, 0x3a, + 0xd1, 0x25, 0x7a, 0x80, 0x13, 0xa9, 0x79, 0x58, 0x8d, 0x5c, 0x9f, 0x91, 0x11, 0x84, 0xc6, 0x57, + 0x72, 0xae, 0x63, 0xcc, 0x46, 0x8b, 0xe3, 0xdd, 0x45, 0xc7, 0xe0, 0xb0, 0xf6, 0xed, 0x2a, 0x41, + 0x19, 0x11, 0x42, 0x63, 0xdf, 0xe7, 0x8a, 0x49, 0x88, 0xd0, 0x29, 0x70, 0xef, 0x67, 0x6b, 0x27, + 0xec, 0xfb, 0x26, 0x1b, 0x71, 0x1f, 0x4b, 0xca, 0x59, 0x23, 0xda, 0x33, 0x4f, 0xb0, 0x69, 0x4c, + 0x18, 0xd2, 0x88, 0x9a, 0xbb, 0xfa, 0x7d, 0x6c, 0x3a, 0x18, 0xc0, 0xfd, 0xee, 0xcf, 0x25, 0xd0, + 0xfe, 0x98, 0x5f, 0x39, 0xff, 0xde, 0x92, 0xee, 0x76, 0xf3, 0xe4, 0x07, 0x66, 0x2f, 0x06, 0x4b, + 0xef, 0x83, 0x4a, 0x31, 0xcd, 0x2f, 0x27, 0xb3, 0xa9, 0x93, 0xcf, 0xa7, 0xee, 0x34, 0x9b, 0x95, + 0x5b, 0x53, 0x2f, 0xe6, 0xf5, 0x45, 0xf1, 0xb7, 0x3d, 0x7d, 0x63, 0x3f, 0x5f, 0x5a, 0xcb, 0x3d, + 0x8c, 0xbf, 0xb6, 0x4e, 0x7a, 0xf6, 0x67, 0x38, 0x2d, 0x1c, 0x1b, 0x9a, 0x68, 0xd8, 0x71, 0x4a, + 0xcb, 0xe2, 0x5b, 0x0d, 0x8c, 0x71, 0x5a, 0x8c, 0x1b, 0x60, 0x3c, 0xec, 0x8c, 0x2d, 0xf0, 0xab, + 0xd5, 0xb6, 0x59, 0xcf, 0xc3, 0x69, 0xe1, 0x79, 0x0d, 0xe2, 0x79, 0xc3, 0x8e, 0xe7, 0x59, 0xe8, + 0xc3, 0x6a, 0x59, 0xdd, 0xe9, 0xef, 0x00, 0x00, 0x00, 0xff, 0xff, 0xef, 0x35, 0xec, 0x3e, 0x44, + 0x04, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/errors/feed_item_error.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/errors/feed_item_error.pb.go new file mode 100644 index 0000000..2fe978b --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/errors/feed_item_error.pb.go @@ -0,0 +1,157 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/errors/feed_item_error.proto + +package errors // import "google.golang.org/genproto/googleapis/ads/googleads/v1/errors" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Enum describing possible feed item errors. +type FeedItemErrorEnum_FeedItemError int32 + +const ( + // Enum unspecified. + FeedItemErrorEnum_UNSPECIFIED FeedItemErrorEnum_FeedItemError = 0 + // The received error code is not known in this version. + FeedItemErrorEnum_UNKNOWN FeedItemErrorEnum_FeedItemError = 1 + // Cannot convert the feed attribute value from string to its real type. + FeedItemErrorEnum_CANNOT_CONVERT_ATTRIBUTE_VALUE_FROM_STRING FeedItemErrorEnum_FeedItemError = 2 + // Cannot operate on removed feed item. + FeedItemErrorEnum_CANNOT_OPERATE_ON_REMOVED_FEED_ITEM FeedItemErrorEnum_FeedItemError = 3 + // Date time zone does not match the account's time zone. + FeedItemErrorEnum_DATE_TIME_MUST_BE_IN_ACCOUNT_TIME_ZONE FeedItemErrorEnum_FeedItemError = 4 + // Feed item with the key attributes could not be found. + FeedItemErrorEnum_KEY_ATTRIBUTES_NOT_FOUND FeedItemErrorEnum_FeedItemError = 5 + // Url feed attribute value is not valid. + FeedItemErrorEnum_INVALID_URL FeedItemErrorEnum_FeedItemError = 6 + // Some key attributes are missing. + FeedItemErrorEnum_MISSING_KEY_ATTRIBUTES FeedItemErrorEnum_FeedItemError = 7 + // Feed item has same key attributes as another feed item. + FeedItemErrorEnum_KEY_ATTRIBUTES_NOT_UNIQUE FeedItemErrorEnum_FeedItemError = 8 + // Cannot modify key attributes on an existing feed item. + FeedItemErrorEnum_CANNOT_MODIFY_KEY_ATTRIBUTE_VALUE FeedItemErrorEnum_FeedItemError = 9 + // The feed attribute value is too large. + FeedItemErrorEnum_SIZE_TOO_LARGE_FOR_MULTI_VALUE_ATTRIBUTE FeedItemErrorEnum_FeedItemError = 10 +) + +var FeedItemErrorEnum_FeedItemError_name = map[int32]string{ + 0: "UNSPECIFIED", + 1: "UNKNOWN", + 2: "CANNOT_CONVERT_ATTRIBUTE_VALUE_FROM_STRING", + 3: "CANNOT_OPERATE_ON_REMOVED_FEED_ITEM", + 4: "DATE_TIME_MUST_BE_IN_ACCOUNT_TIME_ZONE", + 5: "KEY_ATTRIBUTES_NOT_FOUND", + 6: "INVALID_URL", + 7: "MISSING_KEY_ATTRIBUTES", + 8: "KEY_ATTRIBUTES_NOT_UNIQUE", + 9: "CANNOT_MODIFY_KEY_ATTRIBUTE_VALUE", + 10: "SIZE_TOO_LARGE_FOR_MULTI_VALUE_ATTRIBUTE", +} +var FeedItemErrorEnum_FeedItemError_value = map[string]int32{ + "UNSPECIFIED": 0, + "UNKNOWN": 1, + "CANNOT_CONVERT_ATTRIBUTE_VALUE_FROM_STRING": 2, + "CANNOT_OPERATE_ON_REMOVED_FEED_ITEM": 3, + "DATE_TIME_MUST_BE_IN_ACCOUNT_TIME_ZONE": 4, + "KEY_ATTRIBUTES_NOT_FOUND": 5, + "INVALID_URL": 6, + "MISSING_KEY_ATTRIBUTES": 7, + "KEY_ATTRIBUTES_NOT_UNIQUE": 8, + "CANNOT_MODIFY_KEY_ATTRIBUTE_VALUE": 9, + "SIZE_TOO_LARGE_FOR_MULTI_VALUE_ATTRIBUTE": 10, +} + +func (x FeedItemErrorEnum_FeedItemError) String() string { + return proto.EnumName(FeedItemErrorEnum_FeedItemError_name, int32(x)) +} +func (FeedItemErrorEnum_FeedItemError) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_feed_item_error_be75ed22e97f1137, []int{0, 0} +} + +// Container for enum describing possible feed item errors. +type FeedItemErrorEnum struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *FeedItemErrorEnum) Reset() { *m = FeedItemErrorEnum{} } +func (m *FeedItemErrorEnum) String() string { return proto.CompactTextString(m) } +func (*FeedItemErrorEnum) ProtoMessage() {} +func (*FeedItemErrorEnum) Descriptor() ([]byte, []int) { + return fileDescriptor_feed_item_error_be75ed22e97f1137, []int{0} +} +func (m *FeedItemErrorEnum) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_FeedItemErrorEnum.Unmarshal(m, b) +} +func (m *FeedItemErrorEnum) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_FeedItemErrorEnum.Marshal(b, m, deterministic) +} +func (dst *FeedItemErrorEnum) XXX_Merge(src proto.Message) { + xxx_messageInfo_FeedItemErrorEnum.Merge(dst, src) +} +func (m *FeedItemErrorEnum) XXX_Size() int { + return xxx_messageInfo_FeedItemErrorEnum.Size(m) +} +func (m *FeedItemErrorEnum) XXX_DiscardUnknown() { + xxx_messageInfo_FeedItemErrorEnum.DiscardUnknown(m) +} + +var xxx_messageInfo_FeedItemErrorEnum proto.InternalMessageInfo + +func init() { + proto.RegisterType((*FeedItemErrorEnum)(nil), "google.ads.googleads.v1.errors.FeedItemErrorEnum") + proto.RegisterEnum("google.ads.googleads.v1.errors.FeedItemErrorEnum_FeedItemError", FeedItemErrorEnum_FeedItemError_name, FeedItemErrorEnum_FeedItemError_value) +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/errors/feed_item_error.proto", fileDescriptor_feed_item_error_be75ed22e97f1137) +} + +var fileDescriptor_feed_item_error_be75ed22e97f1137 = []byte{ + // 485 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x7c, 0x52, 0xdd, 0x8a, 0xd3, 0x40, + 0x18, 0x75, 0x5b, 0xdd, 0xd5, 0x59, 0xc4, 0x71, 0x2e, 0x44, 0x97, 0x75, 0xc1, 0x8a, 0x3f, 0x2c, + 0x92, 0x50, 0xf4, 0x2a, 0x5e, 0x4d, 0x9b, 0x2f, 0x65, 0xd8, 0x66, 0xa6, 0x26, 0x33, 0x91, 0x2d, + 0x85, 0x8f, 0x6a, 0x62, 0x28, 0x6c, 0x33, 0xa5, 0xa9, 0xfb, 0x0a, 0xbe, 0x87, 0x97, 0x3e, 0x8a, + 0x8f, 0x22, 0x78, 0xe1, 0x1b, 0x48, 0x3a, 0x6d, 0xa5, 0xa0, 0x5e, 0xe5, 0x70, 0x72, 0xce, 0xf9, + 0x66, 0xe6, 0x7c, 0xe4, 0x4d, 0x69, 0x6d, 0x79, 0x55, 0xf8, 0xd3, 0xbc, 0xf6, 0x1d, 0x6c, 0xd0, + 0x75, 0xd7, 0x2f, 0x96, 0x4b, 0xbb, 0xac, 0xfd, 0x4f, 0x45, 0x91, 0xe3, 0x6c, 0x55, 0xcc, 0x71, + 0x4d, 0x78, 0x8b, 0xa5, 0x5d, 0x59, 0x76, 0xe6, 0xa4, 0xde, 0x34, 0xaf, 0xbd, 0x9d, 0xcb, 0xbb, + 0xee, 0x7a, 0xce, 0x75, 0x72, 0xba, 0x4d, 0x5d, 0xcc, 0xfc, 0x69, 0x55, 0xd9, 0xd5, 0x74, 0x35, + 0xb3, 0x55, 0xed, 0xdc, 0x9d, 0x2f, 0x6d, 0x72, 0x3f, 0x2a, 0x8a, 0x5c, 0xac, 0x8a, 0x39, 0x34, + 0x06, 0xa8, 0x3e, 0xcf, 0x3b, 0xbf, 0x5a, 0xe4, 0xee, 0x1e, 0xcb, 0xee, 0x91, 0x63, 0x23, 0xd3, + 0x11, 0xf4, 0x45, 0x24, 0x20, 0xa4, 0x37, 0xd8, 0x31, 0x39, 0x32, 0xf2, 0x42, 0xaa, 0xf7, 0x92, + 0x1e, 0x30, 0x8f, 0x9c, 0xf7, 0xb9, 0x94, 0x4a, 0x63, 0x5f, 0xc9, 0x0c, 0x12, 0x8d, 0x5c, 0xeb, + 0x44, 0xf4, 0x8c, 0x06, 0xcc, 0xf8, 0xd0, 0x00, 0x46, 0x89, 0x8a, 0x31, 0xd5, 0x89, 0x90, 0x03, + 0xda, 0x62, 0x2f, 0xc8, 0xd3, 0x8d, 0x5e, 0x8d, 0x20, 0xe1, 0x1a, 0x50, 0x49, 0x4c, 0x20, 0x56, + 0x19, 0x84, 0x18, 0x01, 0x84, 0x28, 0x34, 0xc4, 0xb4, 0xcd, 0xce, 0xc9, 0xf3, 0xb0, 0xf9, 0xad, + 0x45, 0x0c, 0x18, 0x9b, 0x54, 0x63, 0x0f, 0x50, 0x48, 0xe4, 0xfd, 0xbe, 0x32, 0x52, 0x3b, 0x7e, + 0xac, 0x24, 0xd0, 0x9b, 0xec, 0x94, 0x3c, 0xbc, 0x80, 0xcb, 0x3f, 0x93, 0x53, 0x6c, 0x06, 0x44, + 0xca, 0xc8, 0x90, 0xde, 0x6a, 0x2e, 0x20, 0x64, 0xc6, 0x87, 0x22, 0x44, 0x93, 0x0c, 0xe9, 0x21, + 0x3b, 0x21, 0x0f, 0x62, 0x91, 0xa6, 0x42, 0x0e, 0x70, 0xdf, 0x46, 0x8f, 0xd8, 0x63, 0xf2, 0xe8, + 0x2f, 0x51, 0x46, 0x8a, 0x77, 0x06, 0xe8, 0x6d, 0xf6, 0x8c, 0x3c, 0xd9, 0x1c, 0x3f, 0x56, 0xa1, + 0x88, 0x2e, 0xf7, 0x03, 0xdc, 0x8d, 0xe9, 0x1d, 0xf6, 0x8a, 0xbc, 0x4c, 0xc5, 0x18, 0x50, 0x2b, + 0x85, 0x43, 0x9e, 0x0c, 0x00, 0x23, 0x95, 0x60, 0x6c, 0x86, 0x5a, 0x6c, 0x5e, 0x65, 0xe7, 0xa1, + 0xa4, 0xf7, 0xf3, 0x80, 0x74, 0x3e, 0xda, 0xb9, 0xf7, 0xff, 0x3a, 0x7b, 0x6c, 0xaf, 0x97, 0x51, + 0x53, 0xe2, 0xe8, 0x60, 0x1c, 0x6e, 0x5c, 0xa5, 0xbd, 0x9a, 0x56, 0xa5, 0x67, 0x97, 0xa5, 0x5f, + 0x16, 0xd5, 0xba, 0xe2, 0xed, 0x2a, 0x2d, 0x66, 0xf5, 0xbf, 0x36, 0xeb, 0xad, 0xfb, 0x7c, 0x6d, + 0xb5, 0x07, 0x9c, 0x7f, 0x6b, 0x9d, 0x0d, 0x5c, 0x18, 0xcf, 0x6b, 0xcf, 0xc1, 0x06, 0x65, 0x5d, + 0x6f, 0x3d, 0xb2, 0xfe, 0xbe, 0x15, 0x4c, 0x78, 0x5e, 0x4f, 0x76, 0x82, 0x49, 0xd6, 0x9d, 0x38, + 0xc1, 0x8f, 0x56, 0xc7, 0xb1, 0x41, 0xc0, 0xf3, 0x3a, 0x08, 0x76, 0x92, 0x20, 0xc8, 0xba, 0x41, + 0xe0, 0x44, 0x1f, 0x0e, 0xd7, 0xa7, 0x7b, 0xfd, 0x3b, 0x00, 0x00, 0xff, 0xff, 0x59, 0x09, 0xd2, + 0xb2, 0xf6, 0x02, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/errors/feed_item_target_error.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/errors/feed_item_target_error.pb.go new file mode 100644 index 0000000..3e2e3e9 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/errors/feed_item_target_error.pb.go @@ -0,0 +1,145 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/errors/feed_item_target_error.proto + +package errors // import "google.golang.org/genproto/googleapis/ads/googleads/v1/errors" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Enum describing possible feed item target errors. +type FeedItemTargetErrorEnum_FeedItemTargetError int32 + +const ( + // Enum unspecified. + FeedItemTargetErrorEnum_UNSPECIFIED FeedItemTargetErrorEnum_FeedItemTargetError = 0 + // The received error code is not known in this version. + FeedItemTargetErrorEnum_UNKNOWN FeedItemTargetErrorEnum_FeedItemTargetError = 1 + // On CREATE, the FeedItemTarget must have a populated field in the oneof + // target. + FeedItemTargetErrorEnum_MUST_SET_TARGET_ONEOF_ON_CREATE FeedItemTargetErrorEnum_FeedItemTargetError = 2 + // The specified feed item target already exists, so it cannot be added. + FeedItemTargetErrorEnum_FEED_ITEM_TARGET_ALREADY_EXISTS FeedItemTargetErrorEnum_FeedItemTargetError = 3 + // The schedules for a given feed item cannot overlap. + FeedItemTargetErrorEnum_FEED_ITEM_SCHEDULES_CANNOT_OVERLAP FeedItemTargetErrorEnum_FeedItemTargetError = 4 + // Too many targets of a given type were added for a single feed item. + FeedItemTargetErrorEnum_TARGET_LIMIT_EXCEEDED_FOR_GIVEN_TYPE FeedItemTargetErrorEnum_FeedItemTargetError = 5 + // Too many AdSchedules are enabled for the feed item for the given day. + FeedItemTargetErrorEnum_TOO_MANY_SCHEDULES_PER_DAY FeedItemTargetErrorEnum_FeedItemTargetError = 6 + // A feed item may either have an enabled campaign target or an enabled ad + // group target. + FeedItemTargetErrorEnum_CANNOT_HAVE_ENABLED_CAMPAIGN_AND_ENABLED_AD_GROUP_TARGETS FeedItemTargetErrorEnum_FeedItemTargetError = 7 +) + +var FeedItemTargetErrorEnum_FeedItemTargetError_name = map[int32]string{ + 0: "UNSPECIFIED", + 1: "UNKNOWN", + 2: "MUST_SET_TARGET_ONEOF_ON_CREATE", + 3: "FEED_ITEM_TARGET_ALREADY_EXISTS", + 4: "FEED_ITEM_SCHEDULES_CANNOT_OVERLAP", + 5: "TARGET_LIMIT_EXCEEDED_FOR_GIVEN_TYPE", + 6: "TOO_MANY_SCHEDULES_PER_DAY", + 7: "CANNOT_HAVE_ENABLED_CAMPAIGN_AND_ENABLED_AD_GROUP_TARGETS", +} +var FeedItemTargetErrorEnum_FeedItemTargetError_value = map[string]int32{ + "UNSPECIFIED": 0, + "UNKNOWN": 1, + "MUST_SET_TARGET_ONEOF_ON_CREATE": 2, + "FEED_ITEM_TARGET_ALREADY_EXISTS": 3, + "FEED_ITEM_SCHEDULES_CANNOT_OVERLAP": 4, + "TARGET_LIMIT_EXCEEDED_FOR_GIVEN_TYPE": 5, + "TOO_MANY_SCHEDULES_PER_DAY": 6, + "CANNOT_HAVE_ENABLED_CAMPAIGN_AND_ENABLED_AD_GROUP_TARGETS": 7, +} + +func (x FeedItemTargetErrorEnum_FeedItemTargetError) String() string { + return proto.EnumName(FeedItemTargetErrorEnum_FeedItemTargetError_name, int32(x)) +} +func (FeedItemTargetErrorEnum_FeedItemTargetError) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_feed_item_target_error_c657952a4e3022c2, []int{0, 0} +} + +// Container for enum describing possible feed item target errors. +type FeedItemTargetErrorEnum struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *FeedItemTargetErrorEnum) Reset() { *m = FeedItemTargetErrorEnum{} } +func (m *FeedItemTargetErrorEnum) String() string { return proto.CompactTextString(m) } +func (*FeedItemTargetErrorEnum) ProtoMessage() {} +func (*FeedItemTargetErrorEnum) Descriptor() ([]byte, []int) { + return fileDescriptor_feed_item_target_error_c657952a4e3022c2, []int{0} +} +func (m *FeedItemTargetErrorEnum) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_FeedItemTargetErrorEnum.Unmarshal(m, b) +} +func (m *FeedItemTargetErrorEnum) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_FeedItemTargetErrorEnum.Marshal(b, m, deterministic) +} +func (dst *FeedItemTargetErrorEnum) XXX_Merge(src proto.Message) { + xxx_messageInfo_FeedItemTargetErrorEnum.Merge(dst, src) +} +func (m *FeedItemTargetErrorEnum) XXX_Size() int { + return xxx_messageInfo_FeedItemTargetErrorEnum.Size(m) +} +func (m *FeedItemTargetErrorEnum) XXX_DiscardUnknown() { + xxx_messageInfo_FeedItemTargetErrorEnum.DiscardUnknown(m) +} + +var xxx_messageInfo_FeedItemTargetErrorEnum proto.InternalMessageInfo + +func init() { + proto.RegisterType((*FeedItemTargetErrorEnum)(nil), "google.ads.googleads.v1.errors.FeedItemTargetErrorEnum") + proto.RegisterEnum("google.ads.googleads.v1.errors.FeedItemTargetErrorEnum_FeedItemTargetError", FeedItemTargetErrorEnum_FeedItemTargetError_name, FeedItemTargetErrorEnum_FeedItemTargetError_value) +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/errors/feed_item_target_error.proto", fileDescriptor_feed_item_target_error_c657952a4e3022c2) +} + +var fileDescriptor_feed_item_target_error_c657952a4e3022c2 = []byte{ + // 462 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x7c, 0x92, 0xc1, 0x8e, 0xd3, 0x3e, + 0x10, 0xc6, 0xff, 0xcd, 0xfe, 0xd9, 0x95, 0xbc, 0x07, 0xa2, 0x70, 0x00, 0xad, 0x50, 0x91, 0x0a, + 0x42, 0x9c, 0x12, 0x55, 0x9c, 0xc8, 0x8a, 0x83, 0x1b, 0x4f, 0xb3, 0x11, 0xad, 0x1d, 0x25, 0x4e, + 0xd8, 0xa2, 0x4a, 0xa3, 0x40, 0x4c, 0x54, 0x69, 0x1b, 0x57, 0x49, 0xd8, 0x07, 0xe2, 0xc8, 0x81, + 0x07, 0xd9, 0x47, 0xe1, 0xca, 0x0b, 0xa0, 0xc4, 0x6d, 0xe1, 0xb0, 0x70, 0xf2, 0xa7, 0xf1, 0xef, + 0xfb, 0x3c, 0xf2, 0x0c, 0xb9, 0xac, 0xb4, 0xae, 0x6e, 0x94, 0x57, 0x94, 0xad, 0x67, 0x64, 0xaf, + 0x6e, 0xa7, 0x9e, 0x6a, 0x1a, 0xdd, 0xb4, 0xde, 0x67, 0xa5, 0x4a, 0xdc, 0x74, 0x6a, 0x8b, 0x5d, + 0xd1, 0x54, 0xaa, 0xc3, 0xa1, 0xee, 0xee, 0x1a, 0xdd, 0x69, 0x67, 0x6c, 0x1c, 0x6e, 0x51, 0xb6, + 0xee, 0xd1, 0xec, 0xde, 0x4e, 0x5d, 0x63, 0xbe, 0x78, 0x7a, 0x08, 0xdf, 0x6d, 0xbc, 0xa2, 0xae, + 0x75, 0x57, 0x74, 0x1b, 0x5d, 0xb7, 0xc6, 0x3d, 0xb9, 0xb3, 0xc8, 0xe3, 0xb9, 0x52, 0x65, 0xd4, + 0xa9, 0xad, 0x1c, 0xc2, 0xa1, 0xb7, 0x41, 0xfd, 0x65, 0x3b, 0xf9, 0x6e, 0x91, 0x47, 0xf7, 0xdc, + 0x39, 0x0f, 0xc9, 0x79, 0xc6, 0xd3, 0x18, 0x82, 0x68, 0x1e, 0x01, 0xb3, 0xff, 0x73, 0xce, 0xc9, + 0x59, 0xc6, 0xdf, 0x71, 0xf1, 0x9e, 0xdb, 0x23, 0xe7, 0x39, 0x79, 0xb6, 0xcc, 0x52, 0x89, 0x29, + 0x48, 0x94, 0x34, 0x09, 0x41, 0xa2, 0xe0, 0x20, 0xe6, 0x28, 0x38, 0x06, 0x09, 0x50, 0x09, 0xb6, + 0xd5, 0x43, 0x73, 0x00, 0x86, 0x91, 0x84, 0xe5, 0x81, 0xa2, 0x8b, 0x04, 0x28, 0x5b, 0x21, 0x5c, + 0x47, 0xa9, 0x4c, 0xed, 0x13, 0xe7, 0x25, 0x99, 0xfc, 0x86, 0xd2, 0xe0, 0x0a, 0x58, 0xb6, 0x80, + 0x14, 0x03, 0xca, 0xb9, 0x90, 0x28, 0x72, 0x48, 0x16, 0x34, 0xb6, 0xff, 0x77, 0x5e, 0x91, 0x17, + 0xfb, 0x88, 0x45, 0xb4, 0x8c, 0x24, 0xc2, 0x75, 0x00, 0xc0, 0x80, 0xe1, 0x5c, 0x24, 0x18, 0x46, + 0x39, 0x70, 0x94, 0xab, 0x18, 0xec, 0x07, 0xce, 0x98, 0x5c, 0x48, 0x21, 0x70, 0x49, 0xf9, 0xea, + 0x8f, 0xc0, 0x18, 0x12, 0x64, 0x74, 0x65, 0x9f, 0x3a, 0x6f, 0xc9, 0x9b, 0x7d, 0xfa, 0x15, 0xcd, + 0x01, 0x81, 0xd3, 0xd9, 0x02, 0x18, 0x06, 0x74, 0x19, 0xd3, 0x28, 0xe4, 0x48, 0x39, 0x3b, 0x16, + 0x29, 0xc3, 0x30, 0x11, 0x59, 0xbc, 0x6f, 0x3f, 0xb5, 0xcf, 0x66, 0x3f, 0x47, 0x64, 0xf2, 0x49, + 0x6f, 0xdd, 0x7f, 0x4f, 0x64, 0xf6, 0xe4, 0x9e, 0x4f, 0x8d, 0xfb, 0x69, 0xc4, 0xa3, 0x0f, 0x6c, + 0xef, 0xad, 0xf4, 0x4d, 0x51, 0x57, 0xae, 0x6e, 0x2a, 0xaf, 0x52, 0xf5, 0x30, 0xab, 0xc3, 0x6a, + 0xec, 0x36, 0xed, 0xdf, 0x36, 0xe5, 0xd2, 0x1c, 0x5f, 0xad, 0x93, 0x90, 0xd2, 0x6f, 0xd6, 0x38, + 0x34, 0x61, 0xb4, 0x6c, 0x5d, 0x23, 0x7b, 0x95, 0x4f, 0xdd, 0xe1, 0xc9, 0xf6, 0xee, 0x00, 0xac, + 0x69, 0xd9, 0xae, 0x8f, 0xc0, 0x3a, 0x9f, 0xae, 0x0d, 0xf0, 0xc3, 0x9a, 0x98, 0xaa, 0xef, 0xd3, + 0xb2, 0xf5, 0xfd, 0x23, 0xe2, 0xfb, 0xf9, 0xd4, 0xf7, 0x0d, 0xf4, 0xf1, 0x74, 0xe8, 0xee, 0xf5, + 0xaf, 0x00, 0x00, 0x00, 0xff, 0xff, 0xf2, 0x09, 0xce, 0x26, 0xc6, 0x02, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/errors/feed_item_validation_error.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/errors/feed_item_validation_error.pb.go new file mode 100644 index 0000000..e4f39bd --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/errors/feed_item_validation_error.pb.go @@ -0,0 +1,591 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/errors/feed_item_validation_error.proto + +package errors // import "google.golang.org/genproto/googleapis/ads/googleads/v1/errors" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// The possible validation errors of a feed item. +type FeedItemValidationErrorEnum_FeedItemValidationError int32 + +const ( + // No value has been specified. + FeedItemValidationErrorEnum_UNSPECIFIED FeedItemValidationErrorEnum_FeedItemValidationError = 0 + // Used for return value only. Represents value unknown in this version. + FeedItemValidationErrorEnum_UNKNOWN FeedItemValidationErrorEnum_FeedItemValidationError = 1 + // String is too short. + FeedItemValidationErrorEnum_STRING_TOO_SHORT FeedItemValidationErrorEnum_FeedItemValidationError = 2 + // String is too long. + FeedItemValidationErrorEnum_STRING_TOO_LONG FeedItemValidationErrorEnum_FeedItemValidationError = 3 + // Value is not provided. + FeedItemValidationErrorEnum_VALUE_NOT_SPECIFIED FeedItemValidationErrorEnum_FeedItemValidationError = 4 + // Phone number format is invalid for region. + FeedItemValidationErrorEnum_INVALID_DOMESTIC_PHONE_NUMBER_FORMAT FeedItemValidationErrorEnum_FeedItemValidationError = 5 + // String does not represent a phone number. + FeedItemValidationErrorEnum_INVALID_PHONE_NUMBER FeedItemValidationErrorEnum_FeedItemValidationError = 6 + // Phone number format is not compatible with country code. + FeedItemValidationErrorEnum_PHONE_NUMBER_NOT_SUPPORTED_FOR_COUNTRY FeedItemValidationErrorEnum_FeedItemValidationError = 7 + // Premium rate number is not allowed. + FeedItemValidationErrorEnum_PREMIUM_RATE_NUMBER_NOT_ALLOWED FeedItemValidationErrorEnum_FeedItemValidationError = 8 + // Phone number type is not allowed. + FeedItemValidationErrorEnum_DISALLOWED_NUMBER_TYPE FeedItemValidationErrorEnum_FeedItemValidationError = 9 + // Specified value is outside of the valid range. + FeedItemValidationErrorEnum_VALUE_OUT_OF_RANGE FeedItemValidationErrorEnum_FeedItemValidationError = 10 + // Call tracking is not supported in the selected country. + FeedItemValidationErrorEnum_CALLTRACKING_NOT_SUPPORTED_FOR_COUNTRY FeedItemValidationErrorEnum_FeedItemValidationError = 11 + // Customer is not whitelisted for call tracking. + FeedItemValidationErrorEnum_CUSTOMER_NOT_WHITELISTED_FOR_CALLTRACKING FeedItemValidationErrorEnum_FeedItemValidationError = 12 + // Country code is invalid. + FeedItemValidationErrorEnum_INVALID_COUNTRY_CODE FeedItemValidationErrorEnum_FeedItemValidationError = 13 + // The specified mobile app id is invalid. + FeedItemValidationErrorEnum_INVALID_APP_ID FeedItemValidationErrorEnum_FeedItemValidationError = 14 + // Some required field attributes are missing. + FeedItemValidationErrorEnum_MISSING_ATTRIBUTES_FOR_FIELDS FeedItemValidationErrorEnum_FeedItemValidationError = 15 + // Invalid email button type for email extension. + FeedItemValidationErrorEnum_INVALID_TYPE_ID FeedItemValidationErrorEnum_FeedItemValidationError = 16 + // Email address is invalid. + FeedItemValidationErrorEnum_INVALID_EMAIL_ADDRESS FeedItemValidationErrorEnum_FeedItemValidationError = 17 + // The HTTPS URL in email extension is invalid. + FeedItemValidationErrorEnum_INVALID_HTTPS_URL FeedItemValidationErrorEnum_FeedItemValidationError = 18 + // Delivery address is missing from email extension. + FeedItemValidationErrorEnum_MISSING_DELIVERY_ADDRESS FeedItemValidationErrorEnum_FeedItemValidationError = 19 + // FeedItem scheduling start date comes after end date. + FeedItemValidationErrorEnum_START_DATE_AFTER_END_DATE FeedItemValidationErrorEnum_FeedItemValidationError = 20 + // FeedItem scheduling start time is missing. + FeedItemValidationErrorEnum_MISSING_FEED_ITEM_START_TIME FeedItemValidationErrorEnum_FeedItemValidationError = 21 + // FeedItem scheduling end time is missing. + FeedItemValidationErrorEnum_MISSING_FEED_ITEM_END_TIME FeedItemValidationErrorEnum_FeedItemValidationError = 22 + // Cannot compute system attributes on a FeedItem that has no FeedItemId. + FeedItemValidationErrorEnum_MISSING_FEED_ITEM_ID FeedItemValidationErrorEnum_FeedItemValidationError = 23 + // Call extension vanity phone numbers are not supported. + FeedItemValidationErrorEnum_VANITY_PHONE_NUMBER_NOT_ALLOWED FeedItemValidationErrorEnum_FeedItemValidationError = 24 + // Invalid review text. + FeedItemValidationErrorEnum_INVALID_REVIEW_EXTENSION_SNIPPET FeedItemValidationErrorEnum_FeedItemValidationError = 25 + // Invalid format for numeric value in ad parameter. + FeedItemValidationErrorEnum_INVALID_NUMBER_FORMAT FeedItemValidationErrorEnum_FeedItemValidationError = 26 + // Invalid format for date value in ad parameter. + FeedItemValidationErrorEnum_INVALID_DATE_FORMAT FeedItemValidationErrorEnum_FeedItemValidationError = 27 + // Invalid format for price value in ad parameter. + FeedItemValidationErrorEnum_INVALID_PRICE_FORMAT FeedItemValidationErrorEnum_FeedItemValidationError = 28 + // Unrecognized type given for value in ad parameter. + FeedItemValidationErrorEnum_UNKNOWN_PLACEHOLDER_FIELD FeedItemValidationErrorEnum_FeedItemValidationError = 29 + // Enhanced sitelinks must have both description lines specified. + FeedItemValidationErrorEnum_MISSING_ENHANCED_SITELINK_DESCRIPTION_LINE FeedItemValidationErrorEnum_FeedItemValidationError = 30 + // Review source is ineligible. + FeedItemValidationErrorEnum_REVIEW_EXTENSION_SOURCE_INELIGIBLE FeedItemValidationErrorEnum_FeedItemValidationError = 31 + // Review text cannot contain hyphens or dashes. + FeedItemValidationErrorEnum_HYPHENS_IN_REVIEW_EXTENSION_SNIPPET FeedItemValidationErrorEnum_FeedItemValidationError = 32 + // Review text cannot contain double quote characters. + FeedItemValidationErrorEnum_DOUBLE_QUOTES_IN_REVIEW_EXTENSION_SNIPPET FeedItemValidationErrorEnum_FeedItemValidationError = 33 + // Review text cannot contain quote characters. + FeedItemValidationErrorEnum_QUOTES_IN_REVIEW_EXTENSION_SNIPPET FeedItemValidationErrorEnum_FeedItemValidationError = 34 + // Parameters are encoded in the wrong format. + FeedItemValidationErrorEnum_INVALID_FORM_ENCODED_PARAMS FeedItemValidationErrorEnum_FeedItemValidationError = 35 + // URL parameter name must contain only letters, numbers, underscores, and + // dashes. + FeedItemValidationErrorEnum_INVALID_URL_PARAMETER_NAME FeedItemValidationErrorEnum_FeedItemValidationError = 36 + // Cannot find address location. + FeedItemValidationErrorEnum_NO_GEOCODING_RESULT FeedItemValidationErrorEnum_FeedItemValidationError = 37 + // Review extension text has source name. + FeedItemValidationErrorEnum_SOURCE_NAME_IN_REVIEW_EXTENSION_TEXT FeedItemValidationErrorEnum_FeedItemValidationError = 38 + // Some phone numbers can be shorter than usual. Some of these short numbers + // are carrier-specific, and we disallow those in ad extensions because they + // will not be available to all users. + FeedItemValidationErrorEnum_CARRIER_SPECIFIC_SHORT_NUMBER_NOT_ALLOWED FeedItemValidationErrorEnum_FeedItemValidationError = 39 + // Triggered when a request references a placeholder field id that does not + // exist. + FeedItemValidationErrorEnum_INVALID_PLACEHOLDER_FIELD_ID FeedItemValidationErrorEnum_FeedItemValidationError = 40 + // URL contains invalid ValueTrack tags or format. + FeedItemValidationErrorEnum_INVALID_URL_TAG FeedItemValidationErrorEnum_FeedItemValidationError = 41 + // Provided list exceeds acceptable size. + FeedItemValidationErrorEnum_LIST_TOO_LONG FeedItemValidationErrorEnum_FeedItemValidationError = 42 + // Certain combinations of attributes aren't allowed to be specified in the + // same feed item. + FeedItemValidationErrorEnum_INVALID_ATTRIBUTES_COMBINATION FeedItemValidationErrorEnum_FeedItemValidationError = 43 + // An attribute has the same value repeatedly. + FeedItemValidationErrorEnum_DUPLICATE_VALUES FeedItemValidationErrorEnum_FeedItemValidationError = 44 + // Advertisers can link a conversion action with a phone number to indicate + // that sufficiently long calls forwarded to that phone number should be + // counted as conversions of the specified type. This is an error message + // indicating that the conversion action specified is invalid (e.g., the + // conversion action does not exist within the appropriate Google Ads + // account, or it is a type of conversion not appropriate to phone call + // conversions). + FeedItemValidationErrorEnum_INVALID_CALL_CONVERSION_ACTION_ID FeedItemValidationErrorEnum_FeedItemValidationError = 45 + // Tracking template requires final url to be set. + FeedItemValidationErrorEnum_CANNOT_SET_WITHOUT_FINAL_URLS FeedItemValidationErrorEnum_FeedItemValidationError = 46 + // An app id was provided that doesn't exist in the given app store. + FeedItemValidationErrorEnum_APP_ID_DOESNT_EXIST_IN_APP_STORE FeedItemValidationErrorEnum_FeedItemValidationError = 47 + // Invalid U2 final url. + FeedItemValidationErrorEnum_INVALID_FINAL_URL FeedItemValidationErrorEnum_FeedItemValidationError = 48 + // Invalid U2 tracking url. + FeedItemValidationErrorEnum_INVALID_TRACKING_URL FeedItemValidationErrorEnum_FeedItemValidationError = 49 + // Final URL should start from App download URL. + FeedItemValidationErrorEnum_INVALID_FINAL_URL_FOR_APP_DOWNLOAD_URL FeedItemValidationErrorEnum_FeedItemValidationError = 50 + // List provided is too short. + FeedItemValidationErrorEnum_LIST_TOO_SHORT FeedItemValidationErrorEnum_FeedItemValidationError = 51 + // User Action field has invalid value. + FeedItemValidationErrorEnum_INVALID_USER_ACTION FeedItemValidationErrorEnum_FeedItemValidationError = 52 + // Type field has invalid value. + FeedItemValidationErrorEnum_INVALID_TYPE_NAME FeedItemValidationErrorEnum_FeedItemValidationError = 53 + // Change status for event is invalid. + FeedItemValidationErrorEnum_INVALID_EVENT_CHANGE_STATUS FeedItemValidationErrorEnum_FeedItemValidationError = 54 + // The header of a structured snippets extension is not one of the valid + // headers. + FeedItemValidationErrorEnum_INVALID_SNIPPETS_HEADER FeedItemValidationErrorEnum_FeedItemValidationError = 55 + // Android app link is not formatted correctly + FeedItemValidationErrorEnum_INVALID_ANDROID_APP_LINK FeedItemValidationErrorEnum_FeedItemValidationError = 56 + // Phone number incompatible with call tracking for country. + FeedItemValidationErrorEnum_NUMBER_TYPE_WITH_CALLTRACKING_NOT_SUPPORTED_FOR_COUNTRY FeedItemValidationErrorEnum_FeedItemValidationError = 57 + // The input is identical to a reserved keyword + FeedItemValidationErrorEnum_RESERVED_KEYWORD_OTHER FeedItemValidationErrorEnum_FeedItemValidationError = 58 + // Each option label in the message extension must be unique. + FeedItemValidationErrorEnum_DUPLICATE_OPTION_LABELS FeedItemValidationErrorEnum_FeedItemValidationError = 59 + // Each option prefill in the message extension must be unique. + FeedItemValidationErrorEnum_DUPLICATE_OPTION_PREFILLS FeedItemValidationErrorEnum_FeedItemValidationError = 60 + // In message extensions, the number of optional labels and optional + // prefills must be the same. + FeedItemValidationErrorEnum_UNEQUAL_LIST_LENGTHS FeedItemValidationErrorEnum_FeedItemValidationError = 61 + // All currency codes in an ad extension must be the same. + FeedItemValidationErrorEnum_INCONSISTENT_CURRENCY_CODES FeedItemValidationErrorEnum_FeedItemValidationError = 62 + // Headers in price extension are not unique. + FeedItemValidationErrorEnum_PRICE_EXTENSION_HAS_DUPLICATED_HEADERS FeedItemValidationErrorEnum_FeedItemValidationError = 63 + // Header and description in an item are the same. + FeedItemValidationErrorEnum_ITEM_HAS_DUPLICATED_HEADER_AND_DESCRIPTION FeedItemValidationErrorEnum_FeedItemValidationError = 64 + // Price extension has too few items. + FeedItemValidationErrorEnum_PRICE_EXTENSION_HAS_TOO_FEW_ITEMS FeedItemValidationErrorEnum_FeedItemValidationError = 65 + // The given value is not supported. + FeedItemValidationErrorEnum_UNSUPPORTED_VALUE FeedItemValidationErrorEnum_FeedItemValidationError = 66 + // Invalid final mobile url. + FeedItemValidationErrorEnum_INVALID_FINAL_MOBILE_URL FeedItemValidationErrorEnum_FeedItemValidationError = 67 + // The given string value of Label contains invalid characters + FeedItemValidationErrorEnum_INVALID_KEYWORDLESS_AD_RULE_LABEL FeedItemValidationErrorEnum_FeedItemValidationError = 68 + // The given URL contains value track parameters. + FeedItemValidationErrorEnum_VALUE_TRACK_PARAMETER_NOT_SUPPORTED FeedItemValidationErrorEnum_FeedItemValidationError = 69 + // The given value is not supported in the selected language of an + // extension. + FeedItemValidationErrorEnum_UNSUPPORTED_VALUE_IN_SELECTED_LANGUAGE FeedItemValidationErrorEnum_FeedItemValidationError = 70 + // The iOS app link is not formatted correctly. + FeedItemValidationErrorEnum_INVALID_IOS_APP_LINK FeedItemValidationErrorEnum_FeedItemValidationError = 71 + // iOS app link or iOS app store id is missing. + FeedItemValidationErrorEnum_MISSING_IOS_APP_LINK_OR_IOS_APP_STORE_ID FeedItemValidationErrorEnum_FeedItemValidationError = 72 + // Promotion time is invalid. + FeedItemValidationErrorEnum_PROMOTION_INVALID_TIME FeedItemValidationErrorEnum_FeedItemValidationError = 73 + // Both the percent off and money amount off fields are set. + FeedItemValidationErrorEnum_PROMOTION_CANNOT_SET_PERCENT_OFF_AND_MONEY_AMOUNT_OFF FeedItemValidationErrorEnum_FeedItemValidationError = 74 + // Both the promotion code and orders over amount fields are set. + FeedItemValidationErrorEnum_PROMOTION_CANNOT_SET_PROMOTION_CODE_AND_ORDERS_OVER_AMOUNT FeedItemValidationErrorEnum_FeedItemValidationError = 75 + // Too many decimal places are specified. + FeedItemValidationErrorEnum_TOO_MANY_DECIMAL_PLACES_SPECIFIED FeedItemValidationErrorEnum_FeedItemValidationError = 76 + // Ad Customizers are present and not allowed. + FeedItemValidationErrorEnum_AD_CUSTOMIZERS_NOT_ALLOWED FeedItemValidationErrorEnum_FeedItemValidationError = 77 + // Language code is not valid. + FeedItemValidationErrorEnum_INVALID_LANGUAGE_CODE FeedItemValidationErrorEnum_FeedItemValidationError = 78 + // Language is not supported. + FeedItemValidationErrorEnum_UNSUPPORTED_LANGUAGE FeedItemValidationErrorEnum_FeedItemValidationError = 79 + // IF Function is present and not allowed. + FeedItemValidationErrorEnum_IF_FUNCTION_NOT_ALLOWED FeedItemValidationErrorEnum_FeedItemValidationError = 80 + // Final url suffix is not valid. + FeedItemValidationErrorEnum_INVALID_FINAL_URL_SUFFIX FeedItemValidationErrorEnum_FeedItemValidationError = 81 + // Final url suffix contains an invalid tag. + FeedItemValidationErrorEnum_INVALID_TAG_IN_FINAL_URL_SUFFIX FeedItemValidationErrorEnum_FeedItemValidationError = 82 + // Final url suffix is formatted incorrectly. + FeedItemValidationErrorEnum_INVALID_FINAL_URL_SUFFIX_FORMAT FeedItemValidationErrorEnum_FeedItemValidationError = 83 + // Consent for call recording, which is required for the use of call + // extensions, was not provided by the advertiser. Please see + // https://support.google.com/google-ads/answer/7412639. + FeedItemValidationErrorEnum_CUSTOMER_CONSENT_FOR_CALL_RECORDING_REQUIRED FeedItemValidationErrorEnum_FeedItemValidationError = 84 + // Multiple message delivery options are set. + FeedItemValidationErrorEnum_ONLY_ONE_DELIVERY_OPTION_IS_ALLOWED FeedItemValidationErrorEnum_FeedItemValidationError = 85 + // No message delivery option is set. + FeedItemValidationErrorEnum_NO_DELIVERY_OPTION_IS_SET FeedItemValidationErrorEnum_FeedItemValidationError = 86 + // String value of conversion reporting state field is not valid. + FeedItemValidationErrorEnum_INVALID_CONVERSION_REPORTING_STATE FeedItemValidationErrorEnum_FeedItemValidationError = 87 + // Image size is not right. + FeedItemValidationErrorEnum_IMAGE_SIZE_WRONG FeedItemValidationErrorEnum_FeedItemValidationError = 88 + // Email delivery is not supported in the country specified in the country + // code field. + FeedItemValidationErrorEnum_EMAIL_DELIVERY_NOT_AVAILABLE_IN_COUNTRY FeedItemValidationErrorEnum_FeedItemValidationError = 89 + // Auto reply is not supported in the country specified in the country code + // field. + FeedItemValidationErrorEnum_AUTO_REPLY_NOT_AVAILABLE_IN_COUNTRY FeedItemValidationErrorEnum_FeedItemValidationError = 90 + // Invalid value specified for latitude. + FeedItemValidationErrorEnum_INVALID_LATITUDE_VALUE FeedItemValidationErrorEnum_FeedItemValidationError = 91 + // Invalid value specified for longitude. + FeedItemValidationErrorEnum_INVALID_LONGITUDE_VALUE FeedItemValidationErrorEnum_FeedItemValidationError = 92 + // Too many label fields provided. + FeedItemValidationErrorEnum_TOO_MANY_LABELS FeedItemValidationErrorEnum_FeedItemValidationError = 93 + // Invalid image url. + FeedItemValidationErrorEnum_INVALID_IMAGE_URL FeedItemValidationErrorEnum_FeedItemValidationError = 94 + // Latitude value is missing. + FeedItemValidationErrorEnum_MISSING_LATITUDE_VALUE FeedItemValidationErrorEnum_FeedItemValidationError = 95 + // Longitude value is missing. + FeedItemValidationErrorEnum_MISSING_LONGITUDE_VALUE FeedItemValidationErrorEnum_FeedItemValidationError = 96 +) + +var FeedItemValidationErrorEnum_FeedItemValidationError_name = map[int32]string{ + 0: "UNSPECIFIED", + 1: "UNKNOWN", + 2: "STRING_TOO_SHORT", + 3: "STRING_TOO_LONG", + 4: "VALUE_NOT_SPECIFIED", + 5: "INVALID_DOMESTIC_PHONE_NUMBER_FORMAT", + 6: "INVALID_PHONE_NUMBER", + 7: "PHONE_NUMBER_NOT_SUPPORTED_FOR_COUNTRY", + 8: "PREMIUM_RATE_NUMBER_NOT_ALLOWED", + 9: "DISALLOWED_NUMBER_TYPE", + 10: "VALUE_OUT_OF_RANGE", + 11: "CALLTRACKING_NOT_SUPPORTED_FOR_COUNTRY", + 12: "CUSTOMER_NOT_WHITELISTED_FOR_CALLTRACKING", + 13: "INVALID_COUNTRY_CODE", + 14: "INVALID_APP_ID", + 15: "MISSING_ATTRIBUTES_FOR_FIELDS", + 16: "INVALID_TYPE_ID", + 17: "INVALID_EMAIL_ADDRESS", + 18: "INVALID_HTTPS_URL", + 19: "MISSING_DELIVERY_ADDRESS", + 20: "START_DATE_AFTER_END_DATE", + 21: "MISSING_FEED_ITEM_START_TIME", + 22: "MISSING_FEED_ITEM_END_TIME", + 23: "MISSING_FEED_ITEM_ID", + 24: "VANITY_PHONE_NUMBER_NOT_ALLOWED", + 25: "INVALID_REVIEW_EXTENSION_SNIPPET", + 26: "INVALID_NUMBER_FORMAT", + 27: "INVALID_DATE_FORMAT", + 28: "INVALID_PRICE_FORMAT", + 29: "UNKNOWN_PLACEHOLDER_FIELD", + 30: "MISSING_ENHANCED_SITELINK_DESCRIPTION_LINE", + 31: "REVIEW_EXTENSION_SOURCE_INELIGIBLE", + 32: "HYPHENS_IN_REVIEW_EXTENSION_SNIPPET", + 33: "DOUBLE_QUOTES_IN_REVIEW_EXTENSION_SNIPPET", + 34: "QUOTES_IN_REVIEW_EXTENSION_SNIPPET", + 35: "INVALID_FORM_ENCODED_PARAMS", + 36: "INVALID_URL_PARAMETER_NAME", + 37: "NO_GEOCODING_RESULT", + 38: "SOURCE_NAME_IN_REVIEW_EXTENSION_TEXT", + 39: "CARRIER_SPECIFIC_SHORT_NUMBER_NOT_ALLOWED", + 40: "INVALID_PLACEHOLDER_FIELD_ID", + 41: "INVALID_URL_TAG", + 42: "LIST_TOO_LONG", + 43: "INVALID_ATTRIBUTES_COMBINATION", + 44: "DUPLICATE_VALUES", + 45: "INVALID_CALL_CONVERSION_ACTION_ID", + 46: "CANNOT_SET_WITHOUT_FINAL_URLS", + 47: "APP_ID_DOESNT_EXIST_IN_APP_STORE", + 48: "INVALID_FINAL_URL", + 49: "INVALID_TRACKING_URL", + 50: "INVALID_FINAL_URL_FOR_APP_DOWNLOAD_URL", + 51: "LIST_TOO_SHORT", + 52: "INVALID_USER_ACTION", + 53: "INVALID_TYPE_NAME", + 54: "INVALID_EVENT_CHANGE_STATUS", + 55: "INVALID_SNIPPETS_HEADER", + 56: "INVALID_ANDROID_APP_LINK", + 57: "NUMBER_TYPE_WITH_CALLTRACKING_NOT_SUPPORTED_FOR_COUNTRY", + 58: "RESERVED_KEYWORD_OTHER", + 59: "DUPLICATE_OPTION_LABELS", + 60: "DUPLICATE_OPTION_PREFILLS", + 61: "UNEQUAL_LIST_LENGTHS", + 62: "INCONSISTENT_CURRENCY_CODES", + 63: "PRICE_EXTENSION_HAS_DUPLICATED_HEADERS", + 64: "ITEM_HAS_DUPLICATED_HEADER_AND_DESCRIPTION", + 65: "PRICE_EXTENSION_HAS_TOO_FEW_ITEMS", + 66: "UNSUPPORTED_VALUE", + 67: "INVALID_FINAL_MOBILE_URL", + 68: "INVALID_KEYWORDLESS_AD_RULE_LABEL", + 69: "VALUE_TRACK_PARAMETER_NOT_SUPPORTED", + 70: "UNSUPPORTED_VALUE_IN_SELECTED_LANGUAGE", + 71: "INVALID_IOS_APP_LINK", + 72: "MISSING_IOS_APP_LINK_OR_IOS_APP_STORE_ID", + 73: "PROMOTION_INVALID_TIME", + 74: "PROMOTION_CANNOT_SET_PERCENT_OFF_AND_MONEY_AMOUNT_OFF", + 75: "PROMOTION_CANNOT_SET_PROMOTION_CODE_AND_ORDERS_OVER_AMOUNT", + 76: "TOO_MANY_DECIMAL_PLACES_SPECIFIED", + 77: "AD_CUSTOMIZERS_NOT_ALLOWED", + 78: "INVALID_LANGUAGE_CODE", + 79: "UNSUPPORTED_LANGUAGE", + 80: "IF_FUNCTION_NOT_ALLOWED", + 81: "INVALID_FINAL_URL_SUFFIX", + 82: "INVALID_TAG_IN_FINAL_URL_SUFFIX", + 83: "INVALID_FINAL_URL_SUFFIX_FORMAT", + 84: "CUSTOMER_CONSENT_FOR_CALL_RECORDING_REQUIRED", + 85: "ONLY_ONE_DELIVERY_OPTION_IS_ALLOWED", + 86: "NO_DELIVERY_OPTION_IS_SET", + 87: "INVALID_CONVERSION_REPORTING_STATE", + 88: "IMAGE_SIZE_WRONG", + 89: "EMAIL_DELIVERY_NOT_AVAILABLE_IN_COUNTRY", + 90: "AUTO_REPLY_NOT_AVAILABLE_IN_COUNTRY", + 91: "INVALID_LATITUDE_VALUE", + 92: "INVALID_LONGITUDE_VALUE", + 93: "TOO_MANY_LABELS", + 94: "INVALID_IMAGE_URL", + 95: "MISSING_LATITUDE_VALUE", + 96: "MISSING_LONGITUDE_VALUE", +} +var FeedItemValidationErrorEnum_FeedItemValidationError_value = map[string]int32{ + "UNSPECIFIED": 0, + "UNKNOWN": 1, + "STRING_TOO_SHORT": 2, + "STRING_TOO_LONG": 3, + "VALUE_NOT_SPECIFIED": 4, + "INVALID_DOMESTIC_PHONE_NUMBER_FORMAT": 5, + "INVALID_PHONE_NUMBER": 6, + "PHONE_NUMBER_NOT_SUPPORTED_FOR_COUNTRY": 7, + "PREMIUM_RATE_NUMBER_NOT_ALLOWED": 8, + "DISALLOWED_NUMBER_TYPE": 9, + "VALUE_OUT_OF_RANGE": 10, + "CALLTRACKING_NOT_SUPPORTED_FOR_COUNTRY": 11, + "CUSTOMER_NOT_WHITELISTED_FOR_CALLTRACKING": 12, + "INVALID_COUNTRY_CODE": 13, + "INVALID_APP_ID": 14, + "MISSING_ATTRIBUTES_FOR_FIELDS": 15, + "INVALID_TYPE_ID": 16, + "INVALID_EMAIL_ADDRESS": 17, + "INVALID_HTTPS_URL": 18, + "MISSING_DELIVERY_ADDRESS": 19, + "START_DATE_AFTER_END_DATE": 20, + "MISSING_FEED_ITEM_START_TIME": 21, + "MISSING_FEED_ITEM_END_TIME": 22, + "MISSING_FEED_ITEM_ID": 23, + "VANITY_PHONE_NUMBER_NOT_ALLOWED": 24, + "INVALID_REVIEW_EXTENSION_SNIPPET": 25, + "INVALID_NUMBER_FORMAT": 26, + "INVALID_DATE_FORMAT": 27, + "INVALID_PRICE_FORMAT": 28, + "UNKNOWN_PLACEHOLDER_FIELD": 29, + "MISSING_ENHANCED_SITELINK_DESCRIPTION_LINE": 30, + "REVIEW_EXTENSION_SOURCE_INELIGIBLE": 31, + "HYPHENS_IN_REVIEW_EXTENSION_SNIPPET": 32, + "DOUBLE_QUOTES_IN_REVIEW_EXTENSION_SNIPPET": 33, + "QUOTES_IN_REVIEW_EXTENSION_SNIPPET": 34, + "INVALID_FORM_ENCODED_PARAMS": 35, + "INVALID_URL_PARAMETER_NAME": 36, + "NO_GEOCODING_RESULT": 37, + "SOURCE_NAME_IN_REVIEW_EXTENSION_TEXT": 38, + "CARRIER_SPECIFIC_SHORT_NUMBER_NOT_ALLOWED": 39, + "INVALID_PLACEHOLDER_FIELD_ID": 40, + "INVALID_URL_TAG": 41, + "LIST_TOO_LONG": 42, + "INVALID_ATTRIBUTES_COMBINATION": 43, + "DUPLICATE_VALUES": 44, + "INVALID_CALL_CONVERSION_ACTION_ID": 45, + "CANNOT_SET_WITHOUT_FINAL_URLS": 46, + "APP_ID_DOESNT_EXIST_IN_APP_STORE": 47, + "INVALID_FINAL_URL": 48, + "INVALID_TRACKING_URL": 49, + "INVALID_FINAL_URL_FOR_APP_DOWNLOAD_URL": 50, + "LIST_TOO_SHORT": 51, + "INVALID_USER_ACTION": 52, + "INVALID_TYPE_NAME": 53, + "INVALID_EVENT_CHANGE_STATUS": 54, + "INVALID_SNIPPETS_HEADER": 55, + "INVALID_ANDROID_APP_LINK": 56, + "NUMBER_TYPE_WITH_CALLTRACKING_NOT_SUPPORTED_FOR_COUNTRY": 57, + "RESERVED_KEYWORD_OTHER": 58, + "DUPLICATE_OPTION_LABELS": 59, + "DUPLICATE_OPTION_PREFILLS": 60, + "UNEQUAL_LIST_LENGTHS": 61, + "INCONSISTENT_CURRENCY_CODES": 62, + "PRICE_EXTENSION_HAS_DUPLICATED_HEADERS": 63, + "ITEM_HAS_DUPLICATED_HEADER_AND_DESCRIPTION": 64, + "PRICE_EXTENSION_HAS_TOO_FEW_ITEMS": 65, + "UNSUPPORTED_VALUE": 66, + "INVALID_FINAL_MOBILE_URL": 67, + "INVALID_KEYWORDLESS_AD_RULE_LABEL": 68, + "VALUE_TRACK_PARAMETER_NOT_SUPPORTED": 69, + "UNSUPPORTED_VALUE_IN_SELECTED_LANGUAGE": 70, + "INVALID_IOS_APP_LINK": 71, + "MISSING_IOS_APP_LINK_OR_IOS_APP_STORE_ID": 72, + "PROMOTION_INVALID_TIME": 73, + "PROMOTION_CANNOT_SET_PERCENT_OFF_AND_MONEY_AMOUNT_OFF": 74, + "PROMOTION_CANNOT_SET_PROMOTION_CODE_AND_ORDERS_OVER_AMOUNT": 75, + "TOO_MANY_DECIMAL_PLACES_SPECIFIED": 76, + "AD_CUSTOMIZERS_NOT_ALLOWED": 77, + "INVALID_LANGUAGE_CODE": 78, + "UNSUPPORTED_LANGUAGE": 79, + "IF_FUNCTION_NOT_ALLOWED": 80, + "INVALID_FINAL_URL_SUFFIX": 81, + "INVALID_TAG_IN_FINAL_URL_SUFFIX": 82, + "INVALID_FINAL_URL_SUFFIX_FORMAT": 83, + "CUSTOMER_CONSENT_FOR_CALL_RECORDING_REQUIRED": 84, + "ONLY_ONE_DELIVERY_OPTION_IS_ALLOWED": 85, + "NO_DELIVERY_OPTION_IS_SET": 86, + "INVALID_CONVERSION_REPORTING_STATE": 87, + "IMAGE_SIZE_WRONG": 88, + "EMAIL_DELIVERY_NOT_AVAILABLE_IN_COUNTRY": 89, + "AUTO_REPLY_NOT_AVAILABLE_IN_COUNTRY": 90, + "INVALID_LATITUDE_VALUE": 91, + "INVALID_LONGITUDE_VALUE": 92, + "TOO_MANY_LABELS": 93, + "INVALID_IMAGE_URL": 94, + "MISSING_LATITUDE_VALUE": 95, + "MISSING_LONGITUDE_VALUE": 96, +} + +func (x FeedItemValidationErrorEnum_FeedItemValidationError) String() string { + return proto.EnumName(FeedItemValidationErrorEnum_FeedItemValidationError_name, int32(x)) +} +func (FeedItemValidationErrorEnum_FeedItemValidationError) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_feed_item_validation_error_1e58e32e34431998, []int{0, 0} +} + +// Container for enum describing possible validation errors of a feed item. +type FeedItemValidationErrorEnum struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *FeedItemValidationErrorEnum) Reset() { *m = FeedItemValidationErrorEnum{} } +func (m *FeedItemValidationErrorEnum) String() string { return proto.CompactTextString(m) } +func (*FeedItemValidationErrorEnum) ProtoMessage() {} +func (*FeedItemValidationErrorEnum) Descriptor() ([]byte, []int) { + return fileDescriptor_feed_item_validation_error_1e58e32e34431998, []int{0} +} +func (m *FeedItemValidationErrorEnum) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_FeedItemValidationErrorEnum.Unmarshal(m, b) +} +func (m *FeedItemValidationErrorEnum) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_FeedItemValidationErrorEnum.Marshal(b, m, deterministic) +} +func (dst *FeedItemValidationErrorEnum) XXX_Merge(src proto.Message) { + xxx_messageInfo_FeedItemValidationErrorEnum.Merge(dst, src) +} +func (m *FeedItemValidationErrorEnum) XXX_Size() int { + return xxx_messageInfo_FeedItemValidationErrorEnum.Size(m) +} +func (m *FeedItemValidationErrorEnum) XXX_DiscardUnknown() { + xxx_messageInfo_FeedItemValidationErrorEnum.DiscardUnknown(m) +} + +var xxx_messageInfo_FeedItemValidationErrorEnum proto.InternalMessageInfo + +func init() { + proto.RegisterType((*FeedItemValidationErrorEnum)(nil), "google.ads.googleads.v1.errors.FeedItemValidationErrorEnum") + proto.RegisterEnum("google.ads.googleads.v1.errors.FeedItemValidationErrorEnum_FeedItemValidationError", FeedItemValidationErrorEnum_FeedItemValidationError_name, FeedItemValidationErrorEnum_FeedItemValidationError_value) +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/errors/feed_item_validation_error.proto", fileDescriptor_feed_item_validation_error_1e58e32e34431998) +} + +var fileDescriptor_feed_item_validation_error_1e58e32e34431998 = []byte{ + // 1643 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x8c, 0x57, 0xdd, 0x76, 0x1b, 0xb7, + 0x11, 0xae, 0x9d, 0x36, 0x69, 0xe1, 0x26, 0x86, 0xe1, 0x5f, 0xd9, 0xb2, 0x6c, 0xcb, 0x8e, 0xed, + 0x38, 0x0e, 0x15, 0x35, 0x4d, 0xd3, 0xd0, 0x6d, 0x52, 0x70, 0x31, 0x4b, 0xa2, 0xc2, 0x02, 0x6b, + 0x00, 0x4b, 0x8a, 0xaa, 0x5a, 0x54, 0x2d, 0x59, 0x1d, 0x9d, 0x63, 0x89, 0x3e, 0xa2, 0xea, 0x27, + 0xe9, 0x13, 0xf4, 0xb2, 0xd7, 0x7d, 0x8a, 0x3e, 0x4a, 0x5f, 0xa0, 0xb7, 0x3d, 0x83, 0xfd, 0xe1, + 0x4a, 0x94, 0x7c, 0x72, 0xc5, 0xe5, 0xce, 0x0f, 0x80, 0x6f, 0xbe, 0xf9, 0x06, 0x4b, 0xbe, 0xdf, + 0x9f, 0xcd, 0xf6, 0xdf, 0x4c, 0x37, 0xf6, 0x26, 0xf3, 0x8d, 0xf2, 0x11, 0x9f, 0xde, 0x6d, 0x6e, + 0x4c, 0x8f, 0x8f, 0x67, 0xc7, 0xf3, 0x8d, 0xbf, 0x4d, 0xa7, 0x93, 0x70, 0x70, 0x32, 0x3d, 0x0c, + 0xef, 0xf6, 0xde, 0x1c, 0x4c, 0xf6, 0x4e, 0x0e, 0x66, 0x47, 0x21, 0xda, 0x3a, 0x6f, 0x8f, 0x67, + 0x27, 0x33, 0xb6, 0x56, 0x46, 0x75, 0xf6, 0x26, 0xf3, 0x4e, 0x93, 0xa0, 0xf3, 0x6e, 0xb3, 0x53, + 0x26, 0xb8, 0xbb, 0x5a, 0x2f, 0xf0, 0xf6, 0x60, 0x63, 0xef, 0xe8, 0x68, 0x76, 0x12, 0x53, 0xcc, + 0xcb, 0xe8, 0xf5, 0x7f, 0xaf, 0x90, 0x7b, 0xe9, 0x74, 0x3a, 0x91, 0x27, 0xd3, 0xc3, 0x61, 0xb3, + 0x00, 0x60, 0x28, 0x1c, 0xfd, 0xfd, 0x70, 0xfd, 0x1f, 0x2b, 0xe4, 0xf6, 0x05, 0x76, 0x76, 0x95, + 0x5c, 0x29, 0xb4, 0xcb, 0x21, 0x91, 0xa9, 0x04, 0x41, 0x7f, 0xc4, 0xae, 0x90, 0x8f, 0x0a, 0xbd, + 0xa5, 0xcd, 0x48, 0xd3, 0x4b, 0xec, 0x06, 0xa1, 0xce, 0x5b, 0xa9, 0xfb, 0xc1, 0x1b, 0x13, 0xdc, + 0xc0, 0x58, 0x4f, 0x2f, 0xb3, 0xeb, 0xe4, 0x6a, 0xeb, 0xad, 0x32, 0xba, 0x4f, 0x3f, 0x60, 0xb7, + 0xc9, 0xf5, 0x21, 0x57, 0x05, 0x04, 0x6d, 0x7c, 0x58, 0x24, 0xfc, 0x31, 0x7b, 0x4e, 0x9e, 0x48, + 0x3d, 0xe4, 0x4a, 0x8a, 0x20, 0x4c, 0x06, 0xce, 0xcb, 0x24, 0xe4, 0x03, 0xa3, 0x21, 0xe8, 0x22, + 0xeb, 0x81, 0x0d, 0xa9, 0xb1, 0x19, 0xf7, 0xf4, 0x27, 0xec, 0x0e, 0xb9, 0x51, 0x7b, 0xb6, 0x1d, + 0xe8, 0x87, 0xec, 0x05, 0x79, 0x7a, 0x2a, 0x24, 0xae, 0x51, 0xe4, 0xb9, 0xb1, 0x1e, 0x04, 0x26, + 0x08, 0x89, 0x29, 0xb4, 0xb7, 0x63, 0xfa, 0x11, 0x7b, 0x4c, 0x1e, 0xe4, 0x16, 0x32, 0x59, 0x64, + 0xc1, 0x72, 0x7f, 0x2a, 0x84, 0x2b, 0x65, 0x46, 0x20, 0xe8, 0x4f, 0xd9, 0x5d, 0x72, 0x4b, 0x48, + 0x57, 0xfd, 0xaf, 0x5d, 0xfc, 0x38, 0x07, 0xfa, 0x33, 0x76, 0x8b, 0xb0, 0xf2, 0x24, 0xa6, 0xf0, + 0xc1, 0xa4, 0xc1, 0x72, 0xdd, 0x07, 0x4a, 0x70, 0x13, 0x09, 0x57, 0xca, 0x5b, 0x9e, 0x6c, 0xe1, + 0xe1, 0x2f, 0xde, 0xc4, 0x15, 0xf6, 0x05, 0xf9, 0x2c, 0x29, 0x9c, 0x37, 0x59, 0xb5, 0xf2, 0x68, + 0x20, 0x3d, 0x28, 0xe9, 0x1a, 0xcf, 0x56, 0x22, 0xfa, 0xf3, 0xf6, 0xc9, 0xab, 0x1c, 0x21, 0x31, + 0x02, 0xe8, 0xc7, 0x8c, 0x91, 0x4f, 0x6a, 0x0b, 0xcf, 0xf3, 0x20, 0x05, 0xfd, 0x84, 0x3d, 0x22, + 0xf7, 0x33, 0xe9, 0x1c, 0xee, 0x81, 0x7b, 0x6f, 0x65, 0xaf, 0xf0, 0xe0, 0x62, 0xda, 0x54, 0x82, + 0x12, 0x8e, 0x5e, 0xc5, 0x12, 0xd5, 0x61, 0x78, 0x2a, 0x8c, 0xa3, 0x6c, 0x85, 0xdc, 0xac, 0x5f, + 0x42, 0xc6, 0xa5, 0x0a, 0x5c, 0x08, 0x0b, 0xce, 0xd1, 0x6b, 0xec, 0x26, 0xb9, 0x56, 0x9b, 0x06, + 0xde, 0xe7, 0x2e, 0x14, 0x56, 0x51, 0xc6, 0x56, 0xc9, 0x9d, 0x7a, 0x25, 0x01, 0x4a, 0x0e, 0xc1, + 0x8e, 0x9b, 0xa0, 0xeb, 0xec, 0x3e, 0x59, 0x71, 0x9e, 0x5b, 0x1f, 0x04, 0xe2, 0xcc, 0x53, 0x0f, + 0x36, 0x80, 0x16, 0xf1, 0x2f, 0xbd, 0xc1, 0x1e, 0x92, 0xd5, 0x3a, 0x38, 0x05, 0x10, 0x41, 0x7a, + 0xc8, 0x42, 0x19, 0xe0, 0x65, 0x06, 0xf4, 0x26, 0x5b, 0x23, 0x77, 0x97, 0x3d, 0x30, 0x43, 0xb4, + 0xdf, 0x42, 0x58, 0x96, 0xed, 0x52, 0xd0, 0xdb, 0x58, 0xe4, 0x21, 0xd7, 0xd2, 0x8f, 0xc3, 0x12, + 0x2f, 0xea, 0x22, 0xdf, 0x61, 0x4f, 0xc8, 0xc3, 0xfa, 0x50, 0x16, 0x86, 0x12, 0x46, 0x01, 0xb6, + 0x3d, 0x68, 0x27, 0x8d, 0x0e, 0x4e, 0xcb, 0x3c, 0x07, 0x4f, 0x57, 0xda, 0xa8, 0x9c, 0x26, 0xe4, + 0x5d, 0xe4, 0x74, 0x43, 0x5d, 0x3c, 0x62, 0x65, 0xb8, 0x77, 0x8a, 0xa9, 0x56, 0x26, 0x8d, 0x65, + 0x15, 0x31, 0xa9, 0xda, 0x27, 0xe4, 0x8a, 0x27, 0x30, 0x30, 0x4a, 0x40, 0x55, 0x18, 0x7a, 0x9f, + 0x75, 0xc8, 0x8b, 0xfa, 0x44, 0xa0, 0x07, 0x5c, 0x27, 0x20, 0x82, 0x8b, 0xd4, 0xd0, 0x5b, 0x41, + 0x80, 0x4b, 0xac, 0xcc, 0x3d, 0xee, 0x4f, 0x49, 0x0d, 0x74, 0x8d, 0x3d, 0x25, 0xeb, 0xcb, 0x5b, + 0x37, 0x85, 0x4d, 0x20, 0x48, 0x0d, 0x4a, 0xf6, 0x65, 0x4f, 0x01, 0x7d, 0xc0, 0x9e, 0x91, 0xc7, + 0x83, 0x71, 0x3e, 0x00, 0xed, 0x82, 0xd4, 0x17, 0x9f, 0xf6, 0x21, 0x12, 0x53, 0x98, 0xa2, 0xa7, + 0x20, 0xbc, 0x2e, 0x0c, 0xd2, 0xe6, 0x7d, 0xee, 0x8f, 0x70, 0xfd, 0x1f, 0xe0, 0xb7, 0xce, 0x1e, + 0x90, 0x7b, 0x35, 0x20, 0x08, 0x45, 0x00, 0x8d, 0xfc, 0x15, 0x21, 0xe7, 0x96, 0x67, 0x8e, 0x3e, + 0xc6, 0x52, 0xd7, 0x0e, 0x85, 0x55, 0xe5, 0x7b, 0x40, 0xc2, 0x68, 0x9e, 0x01, 0x7d, 0x82, 0x50, + 0x6b, 0x13, 0xfa, 0x60, 0x12, 0x23, 0x10, 0x1d, 0x0b, 0xae, 0x50, 0x9e, 0x7e, 0x8a, 0xf2, 0x51, + 0x1d, 0x18, 0x3d, 0xcf, 0xdd, 0x86, 0x87, 0x6d, 0x4f, 0x9f, 0xc6, 0x9e, 0xe3, 0xd6, 0x4a, 0xb0, + 0xb5, 0xfe, 0x24, 0xa5, 0x64, 0x9d, 0xc7, 0x8e, 0x67, 0x48, 0xcf, 0xa6, 0x86, 0x67, 0x2b, 0x85, + 0x24, 0x7b, 0xde, 0x6e, 0x22, 0xdc, 0xb3, 0xe7, 0x7d, 0xfa, 0x19, 0xbb, 0x46, 0x3e, 0xc6, 0x3e, + 0x5e, 0x48, 0xdf, 0x0b, 0xb6, 0x4e, 0xd6, 0x9a, 0x1e, 0x5d, 0xf4, 0x63, 0x62, 0xb2, 0x9e, 0xd4, + 0x1c, 0xab, 0x49, 0x3f, 0x47, 0x25, 0x15, 0x45, 0xae, 0x64, 0x82, 0x3c, 0x8a, 0xf2, 0xe2, 0xe8, + 0x4b, 0xf6, 0x29, 0x79, 0xd4, 0xf4, 0x3d, 0x57, 0x2a, 0x24, 0x46, 0x0f, 0xc1, 0xc6, 0x43, 0xf1, + 0x24, 0xf2, 0x40, 0x0a, 0xfa, 0x05, 0x36, 0x7c, 0xc2, 0x75, 0xd4, 0x1b, 0xf0, 0x61, 0x24, 0xfd, + 0x00, 0xa5, 0x29, 0x95, 0x9a, 0x2b, 0xdc, 0x99, 0xa3, 0x1d, 0xe4, 0x7a, 0xa9, 0x0f, 0x41, 0x18, + 0x70, 0xda, 0x07, 0xd8, 0xc6, 0x5d, 0x4a, 0x1d, 0x65, 0xc3, 0x79, 0x63, 0x81, 0x6e, 0xb4, 0xdb, + 0xbc, 0x89, 0xa6, 0x5f, 0xb6, 0xe9, 0xdc, 0xa8, 0x1b, 0x5a, 0x36, 0x51, 0xf3, 0x96, 0x02, 0xa2, + 0xd2, 0x60, 0x56, 0x61, 0x46, 0x5a, 0x19, 0x1e, 0xd1, 0xa1, 0xbf, 0x40, 0xa9, 0x6a, 0x90, 0x29, + 0x47, 0xc5, 0x57, 0xed, 0x0e, 0x2a, 0x1c, 0xd8, 0xea, 0x54, 0xf4, 0x97, 0xed, 0x9d, 0x44, 0x81, + 0x8a, 0x34, 0xf8, 0xba, 0xcd, 0x23, 0x18, 0x82, 0xf6, 0x21, 0x19, 0xa0, 0xfa, 0xa2, 0x6c, 0xf8, + 0xc2, 0xd1, 0x5f, 0xb1, 0x7b, 0xe4, 0x76, 0xed, 0x50, 0xb1, 0xcf, 0x85, 0x01, 0x70, 0x01, 0x96, + 0x7e, 0x83, 0x72, 0xd5, 0x14, 0x42, 0x0b, 0x6b, 0x2a, 0xd1, 0xc4, 0xe6, 0xa2, 0xbf, 0x66, 0xaf, + 0xc8, 0x37, 0x2d, 0xa1, 0x8f, 0x30, 0x86, 0x1f, 0x28, 0xe8, 0xdf, 0xe2, 0xc0, 0xb0, 0xe0, 0xc0, + 0x0e, 0x41, 0x84, 0x2d, 0x18, 0x8f, 0x8c, 0x15, 0xc1, 0xf8, 0x01, 0x58, 0xda, 0xc5, 0x3d, 0x2d, + 0x6a, 0x6b, 0xaa, 0xfe, 0xe5, 0x3d, 0x50, 0x8e, 0xbe, 0x42, 0x41, 0x58, 0x32, 0xe6, 0x16, 0x52, + 0xa9, 0x94, 0xa3, 0xbf, 0x41, 0xe8, 0x0b, 0x0d, 0xaf, 0x0b, 0xae, 0x42, 0x04, 0x4f, 0x81, 0xee, + 0xfb, 0x81, 0xa3, 0xbf, 0x2d, 0xa1, 0x48, 0x8c, 0x76, 0x38, 0x35, 0x10, 0x89, 0xc2, 0x5a, 0xd0, + 0x49, 0x39, 0x19, 0x1c, 0xfd, 0x2e, 0x0e, 0xc5, 0x28, 0x3e, 0x8b, 0x4e, 0x18, 0x70, 0x17, 0x9a, + 0xd5, 0x44, 0x05, 0x8c, 0xa3, 0xdf, 0xa3, 0xee, 0x44, 0xf1, 0x3c, 0xd7, 0x01, 0xc1, 0x6a, 0x8b, + 0x0f, 0xfd, 0x1d, 0x12, 0xf3, 0xbc, 0xdc, 0x58, 0xda, 0x14, 0x46, 0x51, 0x88, 0x1d, 0xe5, 0x58, + 0xc5, 0x42, 0x2f, 0x20, 0x8b, 0xbc, 0xa6, 0xbd, 0x76, 0x1d, 0x4a, 0xd6, 0x64, 0xa6, 0x27, 0x15, + 0x44, 0x9e, 0x24, 0x6d, 0xd2, 0x57, 0x48, 0x2a, 0x70, 0x2e, 0x70, 0x11, 0x6c, 0xa1, 0xa0, 0x44, + 0x8e, 0x0a, 0x94, 0xb4, 0x72, 0x0c, 0xc7, 0xfa, 0xb4, 0x15, 0xa3, 0x5d, 0x26, 0x0a, 0x88, 0xc3, + 0xd2, 0x26, 0x90, 0xf8, 0x0e, 0x14, 0x24, 0xf8, 0x46, 0x71, 0xdd, 0x2f, 0x78, 0x1f, 0x68, 0xda, + 0x66, 0xba, 0x34, 0x6e, 0xc1, 0x8e, 0x3e, 0x7b, 0x49, 0x9e, 0xd7, 0xca, 0xdc, 0xb6, 0x04, 0x63, + 0x9b, 0xff, 0xb1, 0x8b, 0xb0, 0x23, 0x07, 0x48, 0x87, 0xdc, 0x9a, 0xcc, 0x94, 0x3d, 0x5a, 0x13, + 0x19, 0xa7, 0x96, 0x64, 0xdf, 0x92, 0xaf, 0x17, 0xb6, 0x56, 0xdf, 0xe6, 0x60, 0x13, 0x2c, 0xa4, + 0x49, 0xd3, 0x88, 0x78, 0x66, 0x34, 0x8c, 0x03, 0xcf, 0x90, 0x63, 0xf8, 0x92, 0xfe, 0x9e, 0x7d, + 0x47, 0xba, 0xe7, 0x87, 0x2e, 0x5e, 0x1a, 0x01, 0x31, 0xda, 0x58, 0x2c, 0x6d, 0x30, 0x43, 0xac, + 0x5f, 0xcc, 0x41, 0xb7, 0x10, 0x5a, 0x2c, 0x51, 0xc6, 0xf5, 0x38, 0x08, 0x48, 0x64, 0xc6, 0x55, + 0x29, 0x6e, 0xae, 0x75, 0x25, 0x53, 0x28, 0xc6, 0x5c, 0x84, 0xf2, 0x82, 0x22, 0x77, 0x30, 0x4d, + 0x5b, 0x1a, 0xb3, 0xf6, 0x48, 0xac, 0xb1, 0x2b, 0xef, 0x23, 0xba, 0xe4, 0xeb, 0x02, 0xec, 0x06, + 0x5a, 0x13, 0x3b, 0x33, 0x0d, 0x69, 0xa1, 0x4b, 0xe1, 0x6a, 0x67, 0xcc, 0x97, 0x19, 0x81, 0x3a, + 0xe2, 0x8a, 0x34, 0x95, 0xdb, 0xf4, 0x35, 0x4e, 0xf3, 0x06, 0x43, 0xde, 0xc7, 0xda, 0x2d, 0x39, + 0xd9, 0xb6, 0xd3, 0x59, 0x6b, 0x3d, 0x7e, 0x1d, 0xfb, 0x92, 0xbc, 0x6c, 0xee, 0x5d, 0xd8, 0x3b, + 0x88, 0x77, 0x7d, 0xdf, 0x0a, 0x16, 0x12, 0x63, 0xab, 0xe9, 0xf2, 0xba, 0x90, 0x16, 0x04, 0xf5, + 0x48, 0x33, 0xa3, 0xd5, 0x38, 0xe0, 0x2d, 0xa2, 0xb9, 0xe3, 0x54, 0x7d, 0x2a, 0x5d, 0x73, 0x84, + 0x02, 0x1b, 0x59, 0x9b, 0xf3, 0x5c, 0x1c, 0x78, 0x3a, 0xc4, 0x49, 0xb9, 0xb8, 0xc2, 0x35, 0x2a, + 0x6e, 0x01, 0x81, 0xc2, 0x25, 0x51, 0xc1, 0x80, 0x8e, 0x70, 0x10, 0xc8, 0x0c, 0x01, 0x75, 0x72, + 0x07, 0xc2, 0xc8, 0xe2, 0x08, 0xd9, 0x66, 0x9f, 0x93, 0x67, 0xe5, 0x95, 0xac, 0xc9, 0x1f, 0xf1, + 0x1b, 0x72, 0xa9, 0x38, 0x0e, 0x6b, 0xa9, 0x1b, 0x2d, 0x1a, 0xe3, 0x96, 0x79, 0xe1, 0x0d, 0x26, + 0x57, 0xef, 0x71, 0xdc, 0x41, 0x96, 0x2e, 0xea, 0xe8, 0xa5, 0x2f, 0x44, 0x35, 0x7b, 0xe8, 0x1f, + 0xda, 0x42, 0x8a, 0x63, 0xac, 0x6d, 0xdc, 0xc5, 0xc9, 0xd7, 0xf0, 0xa8, 0x52, 0xb2, 0x3f, 0xb6, + 0x25, 0xbb, 0x3c, 0x01, 0xb6, 0xf3, 0x9f, 0x70, 0x91, 0xba, 0x71, 0xce, 0x2c, 0x12, 0x70, 0x91, + 0xc6, 0x76, 0x66, 0x91, 0x3f, 0xf7, 0xfe, 0x77, 0x89, 0xac, 0xff, 0x75, 0x76, 0xd8, 0x79, 0xff, + 0xb7, 0x4f, 0x6f, 0xf5, 0x82, 0x4f, 0x97, 0x1c, 0xbf, 0x7d, 0xf2, 0x4b, 0x3b, 0xa2, 0x8a, 0xdf, + 0x9f, 0xbd, 0xd9, 0x3b, 0xda, 0xef, 0xcc, 0x8e, 0xf7, 0x37, 0xf6, 0xa7, 0x47, 0xf1, 0xcb, 0xa8, + 0xfe, 0x18, 0x7b, 0x7b, 0x30, 0xbf, 0xe8, 0xdb, 0xec, 0x55, 0xf9, 0xf3, 0xcf, 0xcb, 0x1f, 0xf4, + 0x39, 0xff, 0xd7, 0xe5, 0xb5, 0x7e, 0x99, 0x8c, 0x4f, 0xe6, 0x9d, 0xf2, 0x11, 0x9f, 0x86, 0x9b, + 0x9d, 0xb8, 0xe4, 0xfc, 0x3f, 0xb5, 0xc3, 0x2e, 0x9f, 0xcc, 0x77, 0x1b, 0x87, 0xdd, 0xe1, 0xe6, + 0x6e, 0xe9, 0xf0, 0xdf, 0xcb, 0xeb, 0xe5, 0xdb, 0x6e, 0x97, 0x4f, 0xe6, 0xdd, 0x6e, 0xe3, 0xd2, + 0xed, 0x0e, 0x37, 0xbb, 0xdd, 0xd2, 0xe9, 0x2f, 0x1f, 0xc6, 0xdd, 0x7d, 0xf5, 0xff, 0x00, 0x00, + 0x00, 0xff, 0xff, 0xb9, 0x80, 0xf8, 0x96, 0x38, 0x0e, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/errors/feed_mapping_error.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/errors/feed_mapping_error.pb.go new file mode 100644 index 0000000..18c7bf0 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/errors/feed_mapping_error.pb.go @@ -0,0 +1,194 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/errors/feed_mapping_error.proto + +package errors // import "google.golang.org/genproto/googleapis/ads/googleads/v1/errors" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Enum describing possible feed item errors. +type FeedMappingErrorEnum_FeedMappingError int32 + +const ( + // Enum unspecified. + FeedMappingErrorEnum_UNSPECIFIED FeedMappingErrorEnum_FeedMappingError = 0 + // The received error code is not known in this version. + FeedMappingErrorEnum_UNKNOWN FeedMappingErrorEnum_FeedMappingError = 1 + // The given placeholder field does not exist. + FeedMappingErrorEnum_INVALID_PLACEHOLDER_FIELD FeedMappingErrorEnum_FeedMappingError = 2 + // The given criterion field does not exist. + FeedMappingErrorEnum_INVALID_CRITERION_FIELD FeedMappingErrorEnum_FeedMappingError = 3 + // The given placeholder type does not exist. + FeedMappingErrorEnum_INVALID_PLACEHOLDER_TYPE FeedMappingErrorEnum_FeedMappingError = 4 + // The given criterion type does not exist. + FeedMappingErrorEnum_INVALID_CRITERION_TYPE FeedMappingErrorEnum_FeedMappingError = 5 + // A feed mapping must contain at least one attribute field mapping. + FeedMappingErrorEnum_NO_ATTRIBUTE_FIELD_MAPPINGS FeedMappingErrorEnum_FeedMappingError = 7 + // The type of the feed attribute referenced in the attribute field mapping + // must match the type of the placeholder field. + FeedMappingErrorEnum_FEED_ATTRIBUTE_TYPE_MISMATCH FeedMappingErrorEnum_FeedMappingError = 8 + // A feed mapping for a system generated feed cannot be operated on. + FeedMappingErrorEnum_CANNOT_OPERATE_ON_MAPPINGS_FOR_SYSTEM_GENERATED_FEED FeedMappingErrorEnum_FeedMappingError = 9 + // Only one feed mapping for a placeholder type is allowed per feed or + // customer (depending on the placeholder type). + FeedMappingErrorEnum_MULTIPLE_MAPPINGS_FOR_PLACEHOLDER_TYPE FeedMappingErrorEnum_FeedMappingError = 10 + // Only one feed mapping for a criterion type is allowed per customer. + FeedMappingErrorEnum_MULTIPLE_MAPPINGS_FOR_CRITERION_TYPE FeedMappingErrorEnum_FeedMappingError = 11 + // Only one feed attribute mapping for a placeholder field is allowed + // (depending on the placeholder type). + FeedMappingErrorEnum_MULTIPLE_MAPPINGS_FOR_PLACEHOLDER_FIELD FeedMappingErrorEnum_FeedMappingError = 12 + // Only one feed attribute mapping for a criterion field is allowed + // (depending on the criterion type). + FeedMappingErrorEnum_MULTIPLE_MAPPINGS_FOR_CRITERION_FIELD FeedMappingErrorEnum_FeedMappingError = 13 + // This feed mapping may not contain any explicit attribute field mappings. + FeedMappingErrorEnum_UNEXPECTED_ATTRIBUTE_FIELD_MAPPINGS FeedMappingErrorEnum_FeedMappingError = 14 + // Location placeholder feed mappings can only be created for Places feeds. + FeedMappingErrorEnum_LOCATION_PLACEHOLDER_ONLY_FOR_PLACES_FEEDS FeedMappingErrorEnum_FeedMappingError = 15 + // Mappings for typed feeds cannot be modified. + FeedMappingErrorEnum_CANNOT_MODIFY_MAPPINGS_FOR_TYPED_FEED FeedMappingErrorEnum_FeedMappingError = 16 + // The given placeholder type can only be mapped to system generated feeds. + FeedMappingErrorEnum_INVALID_PLACEHOLDER_TYPE_FOR_NON_SYSTEM_GENERATED_FEED FeedMappingErrorEnum_FeedMappingError = 17 + // The given placeholder type cannot be mapped to a system generated feed + // with the given type. + FeedMappingErrorEnum_INVALID_PLACEHOLDER_TYPE_FOR_SYSTEM_GENERATED_FEED_TYPE FeedMappingErrorEnum_FeedMappingError = 18 +) + +var FeedMappingErrorEnum_FeedMappingError_name = map[int32]string{ + 0: "UNSPECIFIED", + 1: "UNKNOWN", + 2: "INVALID_PLACEHOLDER_FIELD", + 3: "INVALID_CRITERION_FIELD", + 4: "INVALID_PLACEHOLDER_TYPE", + 5: "INVALID_CRITERION_TYPE", + 7: "NO_ATTRIBUTE_FIELD_MAPPINGS", + 8: "FEED_ATTRIBUTE_TYPE_MISMATCH", + 9: "CANNOT_OPERATE_ON_MAPPINGS_FOR_SYSTEM_GENERATED_FEED", + 10: "MULTIPLE_MAPPINGS_FOR_PLACEHOLDER_TYPE", + 11: "MULTIPLE_MAPPINGS_FOR_CRITERION_TYPE", + 12: "MULTIPLE_MAPPINGS_FOR_PLACEHOLDER_FIELD", + 13: "MULTIPLE_MAPPINGS_FOR_CRITERION_FIELD", + 14: "UNEXPECTED_ATTRIBUTE_FIELD_MAPPINGS", + 15: "LOCATION_PLACEHOLDER_ONLY_FOR_PLACES_FEEDS", + 16: "CANNOT_MODIFY_MAPPINGS_FOR_TYPED_FEED", + 17: "INVALID_PLACEHOLDER_TYPE_FOR_NON_SYSTEM_GENERATED_FEED", + 18: "INVALID_PLACEHOLDER_TYPE_FOR_SYSTEM_GENERATED_FEED_TYPE", +} +var FeedMappingErrorEnum_FeedMappingError_value = map[string]int32{ + "UNSPECIFIED": 0, + "UNKNOWN": 1, + "INVALID_PLACEHOLDER_FIELD": 2, + "INVALID_CRITERION_FIELD": 3, + "INVALID_PLACEHOLDER_TYPE": 4, + "INVALID_CRITERION_TYPE": 5, + "NO_ATTRIBUTE_FIELD_MAPPINGS": 7, + "FEED_ATTRIBUTE_TYPE_MISMATCH": 8, + "CANNOT_OPERATE_ON_MAPPINGS_FOR_SYSTEM_GENERATED_FEED": 9, + "MULTIPLE_MAPPINGS_FOR_PLACEHOLDER_TYPE": 10, + "MULTIPLE_MAPPINGS_FOR_CRITERION_TYPE": 11, + "MULTIPLE_MAPPINGS_FOR_PLACEHOLDER_FIELD": 12, + "MULTIPLE_MAPPINGS_FOR_CRITERION_FIELD": 13, + "UNEXPECTED_ATTRIBUTE_FIELD_MAPPINGS": 14, + "LOCATION_PLACEHOLDER_ONLY_FOR_PLACES_FEEDS": 15, + "CANNOT_MODIFY_MAPPINGS_FOR_TYPED_FEED": 16, + "INVALID_PLACEHOLDER_TYPE_FOR_NON_SYSTEM_GENERATED_FEED": 17, + "INVALID_PLACEHOLDER_TYPE_FOR_SYSTEM_GENERATED_FEED_TYPE": 18, +} + +func (x FeedMappingErrorEnum_FeedMappingError) String() string { + return proto.EnumName(FeedMappingErrorEnum_FeedMappingError_name, int32(x)) +} +func (FeedMappingErrorEnum_FeedMappingError) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_feed_mapping_error_abb8d6f5674d8589, []int{0, 0} +} + +// Container for enum describing possible feed item errors. +type FeedMappingErrorEnum struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *FeedMappingErrorEnum) Reset() { *m = FeedMappingErrorEnum{} } +func (m *FeedMappingErrorEnum) String() string { return proto.CompactTextString(m) } +func (*FeedMappingErrorEnum) ProtoMessage() {} +func (*FeedMappingErrorEnum) Descriptor() ([]byte, []int) { + return fileDescriptor_feed_mapping_error_abb8d6f5674d8589, []int{0} +} +func (m *FeedMappingErrorEnum) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_FeedMappingErrorEnum.Unmarshal(m, b) +} +func (m *FeedMappingErrorEnum) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_FeedMappingErrorEnum.Marshal(b, m, deterministic) +} +func (dst *FeedMappingErrorEnum) XXX_Merge(src proto.Message) { + xxx_messageInfo_FeedMappingErrorEnum.Merge(dst, src) +} +func (m *FeedMappingErrorEnum) XXX_Size() int { + return xxx_messageInfo_FeedMappingErrorEnum.Size(m) +} +func (m *FeedMappingErrorEnum) XXX_DiscardUnknown() { + xxx_messageInfo_FeedMappingErrorEnum.DiscardUnknown(m) +} + +var xxx_messageInfo_FeedMappingErrorEnum proto.InternalMessageInfo + +func init() { + proto.RegisterType((*FeedMappingErrorEnum)(nil), "google.ads.googleads.v1.errors.FeedMappingErrorEnum") + proto.RegisterEnum("google.ads.googleads.v1.errors.FeedMappingErrorEnum_FeedMappingError", FeedMappingErrorEnum_FeedMappingError_name, FeedMappingErrorEnum_FeedMappingError_value) +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/errors/feed_mapping_error.proto", fileDescriptor_feed_mapping_error_abb8d6f5674d8589) +} + +var fileDescriptor_feed_mapping_error_abb8d6f5674d8589 = []byte{ + // 556 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x84, 0x93, 0xdf, 0x6e, 0xd3, 0x30, + 0x14, 0xc6, 0x69, 0x47, 0x19, 0xb8, 0xc0, 0x82, 0xc5, 0xdf, 0xad, 0x0c, 0x54, 0xfe, 0x0c, 0x86, + 0x94, 0xa8, 0x02, 0x31, 0x94, 0x5d, 0xb9, 0x89, 0xd3, 0x59, 0x24, 0x76, 0x94, 0xb8, 0x85, 0xa2, + 0x4a, 0x56, 0x20, 0x21, 0xaa, 0xb4, 0x26, 0x55, 0x52, 0xf6, 0x40, 0x5c, 0xf2, 0x28, 0x5c, 0xf0, + 0x1a, 0x48, 0x5c, 0xf1, 0x08, 0x28, 0x71, 0x5b, 0xd6, 0xae, 0xdd, 0xae, 0x7a, 0xe4, 0xf3, 0xfd, + 0x3e, 0x1f, 0x7f, 0xcd, 0x01, 0x07, 0x71, 0x9a, 0xc6, 0xc7, 0x91, 0x16, 0x84, 0xb9, 0x26, 0xcb, + 0xa2, 0x3a, 0x69, 0x69, 0x51, 0x96, 0xa5, 0x59, 0xae, 0x7d, 0x8d, 0xa2, 0x50, 0x8c, 0x82, 0xf1, + 0x78, 0x98, 0xc4, 0xa2, 0x3c, 0x53, 0xc7, 0x59, 0x3a, 0x49, 0xe1, 0xae, 0x54, 0xab, 0x41, 0x98, + 0xab, 0x73, 0x50, 0x3d, 0x69, 0xa9, 0x12, 0xdc, 0x6e, 0xcc, 0x8c, 0xc7, 0x43, 0x2d, 0x48, 0x92, + 0x74, 0x12, 0x4c, 0x86, 0x69, 0x92, 0x4b, 0xba, 0xf9, 0xbb, 0x06, 0x6e, 0x5b, 0x51, 0x14, 0x3a, + 0xd2, 0x19, 0x17, 0x0c, 0x4e, 0xbe, 0x8d, 0x9a, 0xbf, 0x6a, 0x40, 0x59, 0x6e, 0xc0, 0x2d, 0x50, + 0xef, 0x52, 0xdf, 0xc5, 0x06, 0xb1, 0x08, 0x36, 0x95, 0x4b, 0xb0, 0x0e, 0x36, 0xbb, 0xf4, 0x3d, + 0x65, 0x1f, 0xa8, 0x52, 0x81, 0x0f, 0xc1, 0x03, 0x42, 0x7b, 0xc8, 0x26, 0xa6, 0x70, 0x6d, 0x64, + 0xe0, 0x23, 0x66, 0x9b, 0xd8, 0x13, 0x16, 0xc1, 0xb6, 0xa9, 0x54, 0xe1, 0x0e, 0xb8, 0x37, 0x6b, + 0x1b, 0x1e, 0xe1, 0xd8, 0x23, 0x8c, 0x4e, 0x9b, 0x1b, 0xb0, 0x01, 0xee, 0xaf, 0x62, 0x79, 0xdf, + 0xc5, 0xca, 0x65, 0xb8, 0x0d, 0xee, 0x9e, 0x45, 0xcb, 0x5e, 0x0d, 0x3e, 0x02, 0x3b, 0x94, 0x09, + 0xc4, 0xb9, 0x47, 0xda, 0x5d, 0x8e, 0xa5, 0xa3, 0x70, 0x90, 0xeb, 0x12, 0xda, 0xf1, 0x95, 0x4d, + 0xf8, 0x18, 0x34, 0x2c, 0x8c, 0xcd, 0x53, 0x92, 0x82, 0x14, 0x0e, 0xf1, 0x1d, 0xc4, 0x8d, 0x23, + 0xe5, 0x2a, 0x7c, 0x07, 0xde, 0x18, 0x88, 0x52, 0xc6, 0x05, 0x73, 0xb1, 0x87, 0x38, 0x16, 0x8c, + 0xce, 0x1d, 0x84, 0xc5, 0x3c, 0xe1, 0xf7, 0x7d, 0x8e, 0x1d, 0xd1, 0xc1, 0xb4, 0xec, 0x9b, 0xa2, + 0x70, 0x54, 0xae, 0xc1, 0x7d, 0xf0, 0xdc, 0xe9, 0xda, 0x9c, 0xb8, 0x36, 0x5e, 0x04, 0xce, 0x3c, + 0x02, 0xc0, 0x17, 0xe0, 0xe9, 0x6a, 0xed, 0xd2, 0x93, 0xea, 0xf0, 0x15, 0xd8, 0xbb, 0xd8, 0x55, + 0x26, 0x77, 0x1d, 0xbe, 0x04, 0xcf, 0x2e, 0xb2, 0x95, 0xd2, 0x1b, 0x70, 0x0f, 0x3c, 0xe9, 0x52, + 0xfc, 0xd1, 0xc5, 0x06, 0x5f, 0xc8, 0x63, 0x29, 0xb2, 0x9b, 0x50, 0x05, 0xfb, 0x36, 0x33, 0x10, + 0x2f, 0xe0, 0xd3, 0x77, 0x32, 0x6a, 0xf7, 0xff, 0x0f, 0xe2, 0x97, 0x29, 0xf8, 0xca, 0x56, 0x31, + 0xc3, 0x34, 0x40, 0x87, 0x99, 0xc4, 0xea, 0x2f, 0x0e, 0x52, 0xbc, 0x6a, 0x9a, 0x98, 0x02, 0x75, + 0xf0, 0x76, 0xdd, 0x1f, 0x5d, 0x8a, 0x29, 0xa3, 0x6b, 0xd2, 0xbe, 0x05, 0x0f, 0xc1, 0xc1, 0xb9, + 0xec, 0x4a, 0x4e, 0x86, 0x0a, 0xdb, 0x7f, 0x2b, 0xa0, 0xf9, 0x25, 0x1d, 0xa9, 0xe7, 0xaf, 0x4b, + 0xfb, 0xce, 0xf2, 0x47, 0xef, 0x16, 0x7b, 0xe2, 0x56, 0x3e, 0x99, 0x53, 0x30, 0x4e, 0x8f, 0x83, + 0x24, 0x56, 0xd3, 0x2c, 0xd6, 0xe2, 0x28, 0x29, 0xb7, 0x68, 0xb6, 0xb0, 0xe3, 0x61, 0xbe, 0x6e, + 0x7f, 0x0f, 0xe5, 0xcf, 0xf7, 0xea, 0x46, 0x07, 0xa1, 0x1f, 0xd5, 0xdd, 0x8e, 0x34, 0x43, 0x61, + 0xae, 0xca, 0xb2, 0xa8, 0x7a, 0x2d, 0xb5, 0xbc, 0x32, 0xff, 0x39, 0x13, 0x0c, 0x50, 0x98, 0x0f, + 0xe6, 0x82, 0x41, 0xaf, 0x35, 0x90, 0x82, 0x3f, 0xd5, 0xa6, 0x3c, 0xd5, 0x75, 0x14, 0xe6, 0xba, + 0x3e, 0x97, 0xe8, 0x7a, 0xaf, 0xa5, 0xeb, 0x52, 0xf4, 0xf9, 0x4a, 0x39, 0xdd, 0xeb, 0x7f, 0x01, + 0x00, 0x00, 0xff, 0xff, 0x32, 0x72, 0x8f, 0x98, 0x5c, 0x04, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/errors/field_error.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/errors/field_error.pb.go new file mode 100644 index 0000000..4520877 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/errors/field_error.pb.go @@ -0,0 +1,143 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/errors/field_error.proto + +package errors // import "google.golang.org/genproto/googleapis/ads/googleads/v1/errors" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Enum describing possible field errors. +type FieldErrorEnum_FieldError int32 + +const ( + // Enum unspecified. + FieldErrorEnum_UNSPECIFIED FieldErrorEnum_FieldError = 0 + // The received error code is not known in this version. + FieldErrorEnum_UNKNOWN FieldErrorEnum_FieldError = 1 + // The required field was not present. + FieldErrorEnum_REQUIRED FieldErrorEnum_FieldError = 2 + // The field attempted to be mutated is immutable. + FieldErrorEnum_IMMUTABLE_FIELD FieldErrorEnum_FieldError = 3 + // The field's value is invalid. + FieldErrorEnum_INVALID_VALUE FieldErrorEnum_FieldError = 4 + // The field cannot be set. + FieldErrorEnum_VALUE_MUST_BE_UNSET FieldErrorEnum_FieldError = 5 + // The required repeated field was empty. + FieldErrorEnum_REQUIRED_NONEMPTY_LIST FieldErrorEnum_FieldError = 6 + // The field cannot be cleared. + FieldErrorEnum_FIELD_CANNOT_BE_CLEARED FieldErrorEnum_FieldError = 7 + // The field's value is on a blacklist for this field. + FieldErrorEnum_BLACKLISTED_VALUE FieldErrorEnum_FieldError = 8 +) + +var FieldErrorEnum_FieldError_name = map[int32]string{ + 0: "UNSPECIFIED", + 1: "UNKNOWN", + 2: "REQUIRED", + 3: "IMMUTABLE_FIELD", + 4: "INVALID_VALUE", + 5: "VALUE_MUST_BE_UNSET", + 6: "REQUIRED_NONEMPTY_LIST", + 7: "FIELD_CANNOT_BE_CLEARED", + 8: "BLACKLISTED_VALUE", +} +var FieldErrorEnum_FieldError_value = map[string]int32{ + "UNSPECIFIED": 0, + "UNKNOWN": 1, + "REQUIRED": 2, + "IMMUTABLE_FIELD": 3, + "INVALID_VALUE": 4, + "VALUE_MUST_BE_UNSET": 5, + "REQUIRED_NONEMPTY_LIST": 6, + "FIELD_CANNOT_BE_CLEARED": 7, + "BLACKLISTED_VALUE": 8, +} + +func (x FieldErrorEnum_FieldError) String() string { + return proto.EnumName(FieldErrorEnum_FieldError_name, int32(x)) +} +func (FieldErrorEnum_FieldError) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_field_error_63b25d27a87432fe, []int{0, 0} +} + +// Container for enum describing possible field errors. +type FieldErrorEnum struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *FieldErrorEnum) Reset() { *m = FieldErrorEnum{} } +func (m *FieldErrorEnum) String() string { return proto.CompactTextString(m) } +func (*FieldErrorEnum) ProtoMessage() {} +func (*FieldErrorEnum) Descriptor() ([]byte, []int) { + return fileDescriptor_field_error_63b25d27a87432fe, []int{0} +} +func (m *FieldErrorEnum) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_FieldErrorEnum.Unmarshal(m, b) +} +func (m *FieldErrorEnum) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_FieldErrorEnum.Marshal(b, m, deterministic) +} +func (dst *FieldErrorEnum) XXX_Merge(src proto.Message) { + xxx_messageInfo_FieldErrorEnum.Merge(dst, src) +} +func (m *FieldErrorEnum) XXX_Size() int { + return xxx_messageInfo_FieldErrorEnum.Size(m) +} +func (m *FieldErrorEnum) XXX_DiscardUnknown() { + xxx_messageInfo_FieldErrorEnum.DiscardUnknown(m) +} + +var xxx_messageInfo_FieldErrorEnum proto.InternalMessageInfo + +func init() { + proto.RegisterType((*FieldErrorEnum)(nil), "google.ads.googleads.v1.errors.FieldErrorEnum") + proto.RegisterEnum("google.ads.googleads.v1.errors.FieldErrorEnum_FieldError", FieldErrorEnum_FieldError_name, FieldErrorEnum_FieldError_value) +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/errors/field_error.proto", fileDescriptor_field_error_63b25d27a87432fe) +} + +var fileDescriptor_field_error_63b25d27a87432fe = []byte{ + // 392 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x7c, 0x91, 0x4f, 0x8e, 0xd3, 0x30, + 0x14, 0xc6, 0x49, 0x06, 0x66, 0x46, 0x1e, 0xa0, 0x19, 0x8f, 0x60, 0xa4, 0x01, 0xcd, 0x22, 0x07, + 0x70, 0x88, 0xd8, 0x99, 0x95, 0x93, 0xb8, 0x23, 0xab, 0x89, 0x1b, 0x9a, 0x3f, 0x08, 0x14, 0xc9, + 0x0a, 0x24, 0x44, 0x91, 0xda, 0xb8, 0x8a, 0x4b, 0x0f, 0xc4, 0x92, 0x9b, 0xc0, 0x19, 0x38, 0x41, + 0x4f, 0x81, 0x12, 0x93, 0x76, 0x05, 0x2b, 0x7f, 0x7e, 0xfa, 0x7d, 0xdf, 0xb3, 0xdf, 0x03, 0x6f, + 0x1a, 0x29, 0x9b, 0x75, 0xed, 0x94, 0x95, 0x72, 0xb4, 0x1c, 0xd4, 0xde, 0x75, 0xea, 0xbe, 0x97, + 0xbd, 0x72, 0xbe, 0xb6, 0xf5, 0xba, 0x12, 0xe3, 0x05, 0x6d, 0x7b, 0xb9, 0x93, 0xf0, 0x5e, 0x63, + 0xa8, 0xac, 0x14, 0x3a, 0x3a, 0xd0, 0xde, 0x45, 0xda, 0x71, 0xf7, 0x7a, 0x4a, 0xdc, 0xb6, 0x4e, + 0xd9, 0x75, 0x72, 0x57, 0xee, 0x5a, 0xd9, 0x29, 0xed, 0xb6, 0x7f, 0x1b, 0xe0, 0xf9, 0x7c, 0xc8, + 0xa4, 0x03, 0x4d, 0xbb, 0x6f, 0x1b, 0xfb, 0xa7, 0x01, 0xc0, 0xa9, 0x04, 0x67, 0xe0, 0x2a, 0xe3, + 0x49, 0x4c, 0x7d, 0x36, 0x67, 0x34, 0xb0, 0x1e, 0xc1, 0x2b, 0x70, 0x91, 0xf1, 0x05, 0x5f, 0x7e, + 0xe0, 0x96, 0x01, 0x9f, 0x82, 0xcb, 0x15, 0x7d, 0x9f, 0xb1, 0x15, 0x0d, 0x2c, 0x13, 0xde, 0x80, + 0x19, 0x8b, 0xa2, 0x2c, 0x25, 0x5e, 0x48, 0xc5, 0x9c, 0xd1, 0x30, 0xb0, 0xce, 0xe0, 0x35, 0x78, + 0xc6, 0x78, 0x4e, 0x42, 0x16, 0x88, 0x9c, 0x84, 0x19, 0xb5, 0x1e, 0xc3, 0x5b, 0x70, 0x33, 0x4a, + 0x11, 0x65, 0x49, 0x2a, 0x3c, 0x2a, 0x32, 0x9e, 0xd0, 0xd4, 0x7a, 0x02, 0xef, 0xc0, 0xcb, 0x29, + 0x4e, 0xf0, 0x25, 0xa7, 0x51, 0x9c, 0x7e, 0x14, 0x21, 0x4b, 0x52, 0xeb, 0x1c, 0xbe, 0x02, 0xb7, + 0x63, 0xa4, 0xf0, 0x09, 0xe7, 0xcb, 0xd1, 0xe6, 0x87, 0x94, 0x0c, 0x9d, 0x2f, 0xe0, 0x0b, 0x70, + 0xed, 0x85, 0xc4, 0x5f, 0x0c, 0x2c, 0x9d, 0x1a, 0x5d, 0x7a, 0x07, 0x03, 0xd8, 0x5f, 0xe4, 0x06, + 0xfd, 0x7f, 0x46, 0xde, 0xec, 0xf4, 0xdf, 0x78, 0x18, 0x4b, 0x6c, 0x7c, 0x0a, 0xfe, 0x5a, 0x1a, + 0xb9, 0x2e, 0xbb, 0x06, 0xc9, 0xbe, 0x71, 0x9a, 0xba, 0x1b, 0x87, 0x36, 0x2d, 0x66, 0xdb, 0xaa, + 0x7f, 0xed, 0xe9, 0x9d, 0x3e, 0xbe, 0x9b, 0x67, 0x0f, 0x84, 0xfc, 0x30, 0xef, 0x1f, 0x74, 0x18, + 0xa9, 0x14, 0xd2, 0x72, 0x50, 0xb9, 0x8b, 0xc6, 0x96, 0xea, 0xd7, 0x04, 0x14, 0xa4, 0x52, 0xc5, + 0x11, 0x28, 0x72, 0xb7, 0xd0, 0xc0, 0xc1, 0xb4, 0x75, 0x15, 0x63, 0x52, 0x29, 0x8c, 0x8f, 0x08, + 0xc6, 0xb9, 0x8b, 0xb1, 0x86, 0x3e, 0x9f, 0x8f, 0xaf, 0x7b, 0xfb, 0x27, 0x00, 0x00, 0xff, 0xff, + 0x9c, 0xd0, 0xb3, 0x96, 0x44, 0x02, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/errors/field_mask_error.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/errors/field_mask_error.pb.go new file mode 100644 index 0000000..bf93842 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/errors/field_mask_error.pb.go @@ -0,0 +1,131 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/errors/field_mask_error.proto + +package errors // import "google.golang.org/genproto/googleapis/ads/googleads/v1/errors" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Enum describing possible field mask errors. +type FieldMaskErrorEnum_FieldMaskError int32 + +const ( + // Enum unspecified. + FieldMaskErrorEnum_UNSPECIFIED FieldMaskErrorEnum_FieldMaskError = 0 + // The received error code is not known in this version. + FieldMaskErrorEnum_UNKNOWN FieldMaskErrorEnum_FieldMaskError = 1 + // The field mask must be provided for update operations. + FieldMaskErrorEnum_FIELD_MASK_MISSING FieldMaskErrorEnum_FieldMaskError = 5 + // The field mask must be empty for create and remove operations. + FieldMaskErrorEnum_FIELD_MASK_NOT_ALLOWED FieldMaskErrorEnum_FieldMaskError = 4 + // The field mask contained an invalid field. + FieldMaskErrorEnum_FIELD_NOT_FOUND FieldMaskErrorEnum_FieldMaskError = 2 + // The field mask updated a field with subfields. Fields with subfields may + // be cleared, but not updated. To fix this, the field mask should select + // all the subfields of the invalid field. + FieldMaskErrorEnum_FIELD_HAS_SUBFIELDS FieldMaskErrorEnum_FieldMaskError = 3 +) + +var FieldMaskErrorEnum_FieldMaskError_name = map[int32]string{ + 0: "UNSPECIFIED", + 1: "UNKNOWN", + 5: "FIELD_MASK_MISSING", + 4: "FIELD_MASK_NOT_ALLOWED", + 2: "FIELD_NOT_FOUND", + 3: "FIELD_HAS_SUBFIELDS", +} +var FieldMaskErrorEnum_FieldMaskError_value = map[string]int32{ + "UNSPECIFIED": 0, + "UNKNOWN": 1, + "FIELD_MASK_MISSING": 5, + "FIELD_MASK_NOT_ALLOWED": 4, + "FIELD_NOT_FOUND": 2, + "FIELD_HAS_SUBFIELDS": 3, +} + +func (x FieldMaskErrorEnum_FieldMaskError) String() string { + return proto.EnumName(FieldMaskErrorEnum_FieldMaskError_name, int32(x)) +} +func (FieldMaskErrorEnum_FieldMaskError) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_field_mask_error_ed6cfea039bdd6d3, []int{0, 0} +} + +// Container for enum describing possible field mask errors. +type FieldMaskErrorEnum struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *FieldMaskErrorEnum) Reset() { *m = FieldMaskErrorEnum{} } +func (m *FieldMaskErrorEnum) String() string { return proto.CompactTextString(m) } +func (*FieldMaskErrorEnum) ProtoMessage() {} +func (*FieldMaskErrorEnum) Descriptor() ([]byte, []int) { + return fileDescriptor_field_mask_error_ed6cfea039bdd6d3, []int{0} +} +func (m *FieldMaskErrorEnum) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_FieldMaskErrorEnum.Unmarshal(m, b) +} +func (m *FieldMaskErrorEnum) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_FieldMaskErrorEnum.Marshal(b, m, deterministic) +} +func (dst *FieldMaskErrorEnum) XXX_Merge(src proto.Message) { + xxx_messageInfo_FieldMaskErrorEnum.Merge(dst, src) +} +func (m *FieldMaskErrorEnum) XXX_Size() int { + return xxx_messageInfo_FieldMaskErrorEnum.Size(m) +} +func (m *FieldMaskErrorEnum) XXX_DiscardUnknown() { + xxx_messageInfo_FieldMaskErrorEnum.DiscardUnknown(m) +} + +var xxx_messageInfo_FieldMaskErrorEnum proto.InternalMessageInfo + +func init() { + proto.RegisterType((*FieldMaskErrorEnum)(nil), "google.ads.googleads.v1.errors.FieldMaskErrorEnum") + proto.RegisterEnum("google.ads.googleads.v1.errors.FieldMaskErrorEnum_FieldMaskError", FieldMaskErrorEnum_FieldMaskError_name, FieldMaskErrorEnum_FieldMaskError_value) +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/errors/field_mask_error.proto", fileDescriptor_field_mask_error_ed6cfea039bdd6d3) +} + +var fileDescriptor_field_mask_error_ed6cfea039bdd6d3 = []byte{ + // 353 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x7c, 0x90, 0xd1, 0x4a, 0xfb, 0x30, + 0x14, 0xc6, 0xff, 0xed, 0xfe, 0x2a, 0x64, 0xe0, 0x4a, 0x06, 0x13, 0x86, 0xec, 0xa2, 0x0f, 0x90, + 0x52, 0xc4, 0x9b, 0x78, 0x95, 0xd9, 0x76, 0x96, 0x6d, 0xe9, 0x20, 0x76, 0x03, 0x29, 0x94, 0x68, + 0x6b, 0x29, 0xdb, 0x9a, 0xd1, 0xcc, 0x3d, 0x87, 0xcf, 0xe0, 0x8d, 0xe0, 0xa3, 0xf8, 0x28, 0x5e, + 0xf8, 0x0c, 0xd2, 0xc6, 0x15, 0x77, 0xa1, 0x57, 0xf9, 0xf2, 0xe5, 0xf7, 0x9d, 0x9c, 0x73, 0xc0, + 0x65, 0x26, 0x44, 0xb6, 0x4a, 0x2d, 0x9e, 0x48, 0x4b, 0xc9, 0x4a, 0xed, 0x6c, 0x2b, 0x2d, 0x4b, + 0x51, 0x4a, 0xeb, 0x31, 0x4f, 0x57, 0x49, 0xbc, 0xe6, 0x72, 0x19, 0xd7, 0x0e, 0xda, 0x94, 0x62, + 0x2b, 0xe0, 0x40, 0xb1, 0x88, 0x27, 0x12, 0x35, 0x31, 0xb4, 0xb3, 0x91, 0x8a, 0xf5, 0xcf, 0xf7, + 0x65, 0x37, 0xb9, 0xc5, 0x8b, 0x42, 0x6c, 0xf9, 0x36, 0x17, 0x85, 0x54, 0x69, 0xf3, 0x55, 0x03, + 0xd0, 0xab, 0x0a, 0x4f, 0xb9, 0x5c, 0xba, 0x55, 0xc2, 0x2d, 0x9e, 0xd6, 0xe6, 0xb3, 0x06, 0x4e, + 0x0f, 0x6d, 0xd8, 0x01, 0xed, 0x90, 0xb2, 0x99, 0x7b, 0xed, 0x7b, 0xbe, 0xeb, 0x18, 0xff, 0x60, + 0x1b, 0x9c, 0x84, 0x74, 0x4c, 0x83, 0x05, 0x35, 0x34, 0xd8, 0x03, 0xd0, 0xf3, 0xdd, 0x89, 0x13, + 0x4f, 0x09, 0x1b, 0xc7, 0x53, 0x9f, 0x31, 0x9f, 0x8e, 0x8c, 0x23, 0xd8, 0x07, 0xbd, 0x1f, 0x3e, + 0x0d, 0x6e, 0x63, 0x32, 0x99, 0x04, 0x0b, 0xd7, 0x31, 0xfe, 0xc3, 0x2e, 0xe8, 0xa8, 0xb7, 0xca, + 0xf6, 0x82, 0x90, 0x3a, 0x86, 0x0e, 0xcf, 0x40, 0x57, 0x99, 0x37, 0x84, 0xc5, 0x2c, 0x1c, 0xd6, + 0x17, 0x66, 0xb4, 0x86, 0x9f, 0x1a, 0x30, 0x1f, 0xc4, 0x1a, 0xfd, 0x3d, 0xee, 0xb0, 0x7b, 0xd8, + 0xf6, 0xac, 0x9a, 0x72, 0xa6, 0xdd, 0x39, 0xdf, 0xb1, 0x4c, 0xac, 0x78, 0x91, 0x21, 0x51, 0x66, + 0x56, 0x96, 0x16, 0xf5, 0x0e, 0xf6, 0xcb, 0xde, 0xe4, 0xf2, 0xb7, 0xdd, 0x5f, 0xa9, 0xe3, 0x45, + 0x6f, 0x8d, 0x08, 0x79, 0xd3, 0x07, 0x23, 0x55, 0x8c, 0x24, 0x12, 0x29, 0x59, 0xa9, 0xb9, 0x8d, + 0xea, 0x2f, 0xe5, 0xfb, 0x1e, 0x88, 0x48, 0x22, 0xa3, 0x06, 0x88, 0xe6, 0x76, 0xa4, 0x80, 0x0f, + 0xdd, 0x54, 0x2e, 0xc6, 0x24, 0x91, 0x18, 0x37, 0x08, 0xc6, 0x73, 0x1b, 0x63, 0x05, 0xdd, 0x1f, + 0xd7, 0xdd, 0x5d, 0x7c, 0x05, 0x00, 0x00, 0xff, 0xff, 0x56, 0x39, 0xab, 0xbc, 0x18, 0x02, 0x00, + 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/errors/function_error.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/errors/function_error.pb.go new file mode 100644 index 0000000..ee612c1 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/errors/function_error.pb.go @@ -0,0 +1,189 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/errors/function_error.proto + +package errors // import "google.golang.org/genproto/googleapis/ads/googleads/v1/errors" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Enum describing possible function errors. +type FunctionErrorEnum_FunctionError int32 + +const ( + // Enum unspecified. + FunctionErrorEnum_UNSPECIFIED FunctionErrorEnum_FunctionError = 0 + // The received error code is not known in this version. + FunctionErrorEnum_UNKNOWN FunctionErrorEnum_FunctionError = 1 + // The format of the function is not recognized as a supported function + // format. + FunctionErrorEnum_INVALID_FUNCTION_FORMAT FunctionErrorEnum_FunctionError = 2 + // Operand data types do not match. + FunctionErrorEnum_DATA_TYPE_MISMATCH FunctionErrorEnum_FunctionError = 3 + // The operands cannot be used together in a conjunction. + FunctionErrorEnum_INVALID_CONJUNCTION_OPERANDS FunctionErrorEnum_FunctionError = 4 + // Invalid numer of Operands. + FunctionErrorEnum_INVALID_NUMBER_OF_OPERANDS FunctionErrorEnum_FunctionError = 5 + // Operand Type not supported. + FunctionErrorEnum_INVALID_OPERAND_TYPE FunctionErrorEnum_FunctionError = 6 + // Operator not supported. + FunctionErrorEnum_INVALID_OPERATOR FunctionErrorEnum_FunctionError = 7 + // Request context type not supported. + FunctionErrorEnum_INVALID_REQUEST_CONTEXT_TYPE FunctionErrorEnum_FunctionError = 8 + // The matching function is not allowed for call placeholders + FunctionErrorEnum_INVALID_FUNCTION_FOR_CALL_PLACEHOLDER FunctionErrorEnum_FunctionError = 9 + // The matching function is not allowed for the specified placeholder + FunctionErrorEnum_INVALID_FUNCTION_FOR_PLACEHOLDER FunctionErrorEnum_FunctionError = 10 + // Invalid operand. + FunctionErrorEnum_INVALID_OPERAND FunctionErrorEnum_FunctionError = 11 + // Missing value for the constant operand. + FunctionErrorEnum_MISSING_CONSTANT_OPERAND_VALUE FunctionErrorEnum_FunctionError = 12 + // The value of the constant operand is invalid. + FunctionErrorEnum_INVALID_CONSTANT_OPERAND_VALUE FunctionErrorEnum_FunctionError = 13 + // Invalid function nesting. + FunctionErrorEnum_INVALID_NESTING FunctionErrorEnum_FunctionError = 14 + // The Feed ID was different from another Feed ID in the same function. + FunctionErrorEnum_MULTIPLE_FEED_IDS_NOT_SUPPORTED FunctionErrorEnum_FunctionError = 15 + // The matching function is invalid for use with a feed with a fixed schema. + FunctionErrorEnum_INVALID_FUNCTION_FOR_FEED_WITH_FIXED_SCHEMA FunctionErrorEnum_FunctionError = 16 + // Invalid attribute name. + FunctionErrorEnum_INVALID_ATTRIBUTE_NAME FunctionErrorEnum_FunctionError = 17 +) + +var FunctionErrorEnum_FunctionError_name = map[int32]string{ + 0: "UNSPECIFIED", + 1: "UNKNOWN", + 2: "INVALID_FUNCTION_FORMAT", + 3: "DATA_TYPE_MISMATCH", + 4: "INVALID_CONJUNCTION_OPERANDS", + 5: "INVALID_NUMBER_OF_OPERANDS", + 6: "INVALID_OPERAND_TYPE", + 7: "INVALID_OPERATOR", + 8: "INVALID_REQUEST_CONTEXT_TYPE", + 9: "INVALID_FUNCTION_FOR_CALL_PLACEHOLDER", + 10: "INVALID_FUNCTION_FOR_PLACEHOLDER", + 11: "INVALID_OPERAND", + 12: "MISSING_CONSTANT_OPERAND_VALUE", + 13: "INVALID_CONSTANT_OPERAND_VALUE", + 14: "INVALID_NESTING", + 15: "MULTIPLE_FEED_IDS_NOT_SUPPORTED", + 16: "INVALID_FUNCTION_FOR_FEED_WITH_FIXED_SCHEMA", + 17: "INVALID_ATTRIBUTE_NAME", +} +var FunctionErrorEnum_FunctionError_value = map[string]int32{ + "UNSPECIFIED": 0, + "UNKNOWN": 1, + "INVALID_FUNCTION_FORMAT": 2, + "DATA_TYPE_MISMATCH": 3, + "INVALID_CONJUNCTION_OPERANDS": 4, + "INVALID_NUMBER_OF_OPERANDS": 5, + "INVALID_OPERAND_TYPE": 6, + "INVALID_OPERATOR": 7, + "INVALID_REQUEST_CONTEXT_TYPE": 8, + "INVALID_FUNCTION_FOR_CALL_PLACEHOLDER": 9, + "INVALID_FUNCTION_FOR_PLACEHOLDER": 10, + "INVALID_OPERAND": 11, + "MISSING_CONSTANT_OPERAND_VALUE": 12, + "INVALID_CONSTANT_OPERAND_VALUE": 13, + "INVALID_NESTING": 14, + "MULTIPLE_FEED_IDS_NOT_SUPPORTED": 15, + "INVALID_FUNCTION_FOR_FEED_WITH_FIXED_SCHEMA": 16, + "INVALID_ATTRIBUTE_NAME": 17, +} + +func (x FunctionErrorEnum_FunctionError) String() string { + return proto.EnumName(FunctionErrorEnum_FunctionError_name, int32(x)) +} +func (FunctionErrorEnum_FunctionError) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_function_error_646271f62f11fab0, []int{0, 0} +} + +// Container for enum describing possible function errors. +type FunctionErrorEnum struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *FunctionErrorEnum) Reset() { *m = FunctionErrorEnum{} } +func (m *FunctionErrorEnum) String() string { return proto.CompactTextString(m) } +func (*FunctionErrorEnum) ProtoMessage() {} +func (*FunctionErrorEnum) Descriptor() ([]byte, []int) { + return fileDescriptor_function_error_646271f62f11fab0, []int{0} +} +func (m *FunctionErrorEnum) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_FunctionErrorEnum.Unmarshal(m, b) +} +func (m *FunctionErrorEnum) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_FunctionErrorEnum.Marshal(b, m, deterministic) +} +func (dst *FunctionErrorEnum) XXX_Merge(src proto.Message) { + xxx_messageInfo_FunctionErrorEnum.Merge(dst, src) +} +func (m *FunctionErrorEnum) XXX_Size() int { + return xxx_messageInfo_FunctionErrorEnum.Size(m) +} +func (m *FunctionErrorEnum) XXX_DiscardUnknown() { + xxx_messageInfo_FunctionErrorEnum.DiscardUnknown(m) +} + +var xxx_messageInfo_FunctionErrorEnum proto.InternalMessageInfo + +func init() { + proto.RegisterType((*FunctionErrorEnum)(nil), "google.ads.googleads.v1.errors.FunctionErrorEnum") + proto.RegisterEnum("google.ads.googleads.v1.errors.FunctionErrorEnum_FunctionError", FunctionErrorEnum_FunctionError_name, FunctionErrorEnum_FunctionError_value) +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/errors/function_error.proto", fileDescriptor_function_error_646271f62f11fab0) +} + +var fileDescriptor_function_error_646271f62f11fab0 = []byte{ + // 541 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x7c, 0x93, 0x51, 0x6f, 0xd3, 0x30, + 0x10, 0xc7, 0x59, 0x37, 0x36, 0xf0, 0x18, 0xf5, 0xcc, 0x34, 0x50, 0x99, 0xca, 0x54, 0xe0, 0x01, + 0x21, 0x25, 0xaa, 0xf6, 0x16, 0x9e, 0xdc, 0xe4, 0xd2, 0x1a, 0x12, 0x27, 0xc4, 0x4e, 0x37, 0x50, + 0x25, 0xab, 0xac, 0x25, 0xaa, 0xb4, 0x25, 0x55, 0xd3, 0xed, 0x03, 0xf1, 0x08, 0x9f, 0x82, 0x57, + 0x3e, 0x0a, 0x12, 0xdf, 0x01, 0x25, 0x6e, 0x4a, 0x8b, 0x0a, 0x4f, 0xb9, 0xdc, 0xfd, 0xfe, 0xf7, + 0x3f, 0x5b, 0x67, 0x74, 0x96, 0x64, 0x59, 0x72, 0x35, 0x36, 0x87, 0xa3, 0xdc, 0xd4, 0x61, 0x11, + 0xdd, 0xb6, 0xcd, 0xf1, 0x6c, 0x96, 0xcd, 0x72, 0xf3, 0xf3, 0x4d, 0x7a, 0x39, 0x9f, 0x64, 0xa9, + 0x2a, 0xff, 0x8d, 0xe9, 0x2c, 0x9b, 0x67, 0xa4, 0xa9, 0x49, 0x63, 0x38, 0xca, 0x8d, 0xa5, 0xc8, + 0xb8, 0x6d, 0x1b, 0x5a, 0xd4, 0x38, 0xa9, 0x9a, 0x4e, 0x27, 0xe6, 0x30, 0x4d, 0xb3, 0xf9, 0xb0, + 0x68, 0x91, 0x6b, 0x75, 0xeb, 0xfb, 0x0e, 0x3a, 0x74, 0x17, 0x6d, 0xa1, 0x10, 0x40, 0x7a, 0x73, + 0xdd, 0xfa, 0xb6, 0x83, 0x0e, 0xd6, 0xb2, 0xa4, 0x8e, 0xf6, 0x63, 0x2e, 0x42, 0xb0, 0x99, 0xcb, + 0xc0, 0xc1, 0x77, 0xc8, 0x3e, 0xda, 0x8b, 0xf9, 0x3b, 0x1e, 0x9c, 0x73, 0xbc, 0x45, 0x9e, 0xa2, + 0xc7, 0x8c, 0xf7, 0xa9, 0xc7, 0x1c, 0xe5, 0xc6, 0xdc, 0x96, 0x2c, 0xe0, 0xca, 0x0d, 0x22, 0x9f, + 0x4a, 0x5c, 0x23, 0xc7, 0x88, 0x38, 0x54, 0x52, 0x25, 0x3f, 0x84, 0xa0, 0x7c, 0x26, 0x7c, 0x2a, + 0xed, 0x1e, 0xde, 0x26, 0xa7, 0xe8, 0xa4, 0x12, 0xd9, 0x01, 0x7f, 0x5b, 0xe9, 0x82, 0x10, 0x22, + 0xca, 0x1d, 0x81, 0x77, 0x48, 0x13, 0x35, 0x2a, 0x82, 0xc7, 0x7e, 0x07, 0x22, 0x15, 0xb8, 0x7f, + 0xea, 0x77, 0xc9, 0x13, 0x74, 0x54, 0xd5, 0x17, 0xd9, 0xd2, 0x04, 0xef, 0x92, 0x23, 0x84, 0xd7, + 0x2a, 0x32, 0x88, 0xf0, 0xde, 0xaa, 0x63, 0x04, 0xef, 0x63, 0x10, 0xb2, 0x70, 0x96, 0x70, 0x21, + 0xb5, 0xee, 0x1e, 0x79, 0x85, 0x5e, 0x6e, 0x3a, 0x88, 0xb2, 0xa9, 0xe7, 0xa9, 0xd0, 0xa3, 0x36, + 0xf4, 0x02, 0xcf, 0x81, 0x08, 0xdf, 0x27, 0x2f, 0xd0, 0xe9, 0x46, 0x74, 0x95, 0x42, 0xe4, 0x11, + 0xaa, 0xff, 0x35, 0x22, 0xde, 0x27, 0x2d, 0xd4, 0xf4, 0x99, 0x10, 0x8c, 0x77, 0x0b, 0x7f, 0x21, + 0x29, 0x97, 0xcb, 0x03, 0xf4, 0xa9, 0x17, 0x03, 0x7e, 0x50, 0x30, 0x2b, 0xb7, 0xb3, 0x89, 0x39, + 0x58, 0x6d, 0xce, 0x41, 0x48, 0xc6, 0xbb, 0xf8, 0x21, 0x79, 0x8e, 0x9e, 0xf9, 0xb1, 0x27, 0x59, + 0xe8, 0x81, 0x72, 0x01, 0x1c, 0xc5, 0x1c, 0xa1, 0x78, 0x20, 0x95, 0x88, 0xc3, 0x30, 0x88, 0x24, + 0x38, 0xb8, 0x4e, 0x4c, 0xf4, 0x7a, 0xe3, 0xf0, 0xa5, 0xe0, 0x9c, 0xc9, 0x9e, 0x72, 0xd9, 0x05, + 0x38, 0x4a, 0xd8, 0x3d, 0xf0, 0x29, 0xc6, 0xa4, 0x81, 0x8e, 0x2b, 0x01, 0x95, 0x32, 0x62, 0x9d, + 0x58, 0x82, 0xe2, 0xd4, 0x07, 0x7c, 0xd8, 0xf9, 0xb5, 0x85, 0x5a, 0x97, 0xd9, 0xb5, 0xf1, 0xff, + 0x45, 0xec, 0x90, 0xb5, 0x8d, 0x0a, 0x8b, 0xf5, 0x0b, 0xb7, 0x3e, 0x3a, 0x0b, 0x55, 0x92, 0x5d, + 0x0d, 0xd3, 0xc4, 0xc8, 0x66, 0x89, 0x99, 0x8c, 0xd3, 0x72, 0x39, 0xab, 0x37, 0x30, 0x9d, 0xe4, + 0xff, 0x7a, 0x12, 0x6f, 0xf4, 0xe7, 0x4b, 0x6d, 0xbb, 0x4b, 0xe9, 0xd7, 0x5a, 0xb3, 0xab, 0x9b, + 0xd1, 0x51, 0x6e, 0xe8, 0xb0, 0x88, 0xfa, 0x6d, 0xa3, 0xb4, 0xcc, 0x7f, 0x54, 0xc0, 0x80, 0x8e, + 0xf2, 0xc1, 0x12, 0x18, 0xf4, 0xdb, 0x03, 0x0d, 0xfc, 0xac, 0xb5, 0x74, 0xd6, 0xb2, 0xe8, 0x28, + 0xb7, 0xac, 0x25, 0x62, 0x59, 0xfd, 0xb6, 0x65, 0x69, 0xe8, 0xd3, 0x6e, 0x39, 0xdd, 0xd9, 0xef, + 0x00, 0x00, 0x00, 0xff, 0xff, 0x03, 0xc2, 0xf5, 0x98, 0xaf, 0x03, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/errors/function_parsing_error.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/errors/function_parsing_error.pb.go new file mode 100644 index 0000000..2f1c6b3 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/errors/function_parsing_error.pb.go @@ -0,0 +1,165 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/errors/function_parsing_error.proto + +package errors // import "google.golang.org/genproto/googleapis/ads/googleads/v1/errors" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Enum describing possible function parsing errors. +type FunctionParsingErrorEnum_FunctionParsingError int32 + +const ( + // Enum unspecified. + FunctionParsingErrorEnum_UNSPECIFIED FunctionParsingErrorEnum_FunctionParsingError = 0 + // The received error code is not known in this version. + FunctionParsingErrorEnum_UNKNOWN FunctionParsingErrorEnum_FunctionParsingError = 1 + // Unexpected end of function string. + FunctionParsingErrorEnum_NO_MORE_INPUT FunctionParsingErrorEnum_FunctionParsingError = 2 + // Could not find an expected character. + FunctionParsingErrorEnum_EXPECTED_CHARACTER FunctionParsingErrorEnum_FunctionParsingError = 3 + // Unexpected separator character. + FunctionParsingErrorEnum_UNEXPECTED_SEPARATOR FunctionParsingErrorEnum_FunctionParsingError = 4 + // Unmatched left bracket or parenthesis. + FunctionParsingErrorEnum_UNMATCHED_LEFT_BRACKET FunctionParsingErrorEnum_FunctionParsingError = 5 + // Unmatched right bracket or parenthesis. + FunctionParsingErrorEnum_UNMATCHED_RIGHT_BRACKET FunctionParsingErrorEnum_FunctionParsingError = 6 + // Functions are nested too deeply. + FunctionParsingErrorEnum_TOO_MANY_NESTED_FUNCTIONS FunctionParsingErrorEnum_FunctionParsingError = 7 + // Missing right-hand-side operand. + FunctionParsingErrorEnum_MISSING_RIGHT_HAND_OPERAND FunctionParsingErrorEnum_FunctionParsingError = 8 + // Invalid operator/function name. + FunctionParsingErrorEnum_INVALID_OPERATOR_NAME FunctionParsingErrorEnum_FunctionParsingError = 9 + // Feed attribute operand's argument is not an integer. + FunctionParsingErrorEnum_FEED_ATTRIBUTE_OPERAND_ARGUMENT_NOT_INTEGER FunctionParsingErrorEnum_FunctionParsingError = 10 + // Missing function operands. + FunctionParsingErrorEnum_NO_OPERANDS FunctionParsingErrorEnum_FunctionParsingError = 11 + // Function had too many operands. + FunctionParsingErrorEnum_TOO_MANY_OPERANDS FunctionParsingErrorEnum_FunctionParsingError = 12 +) + +var FunctionParsingErrorEnum_FunctionParsingError_name = map[int32]string{ + 0: "UNSPECIFIED", + 1: "UNKNOWN", + 2: "NO_MORE_INPUT", + 3: "EXPECTED_CHARACTER", + 4: "UNEXPECTED_SEPARATOR", + 5: "UNMATCHED_LEFT_BRACKET", + 6: "UNMATCHED_RIGHT_BRACKET", + 7: "TOO_MANY_NESTED_FUNCTIONS", + 8: "MISSING_RIGHT_HAND_OPERAND", + 9: "INVALID_OPERATOR_NAME", + 10: "FEED_ATTRIBUTE_OPERAND_ARGUMENT_NOT_INTEGER", + 11: "NO_OPERANDS", + 12: "TOO_MANY_OPERANDS", +} +var FunctionParsingErrorEnum_FunctionParsingError_value = map[string]int32{ + "UNSPECIFIED": 0, + "UNKNOWN": 1, + "NO_MORE_INPUT": 2, + "EXPECTED_CHARACTER": 3, + "UNEXPECTED_SEPARATOR": 4, + "UNMATCHED_LEFT_BRACKET": 5, + "UNMATCHED_RIGHT_BRACKET": 6, + "TOO_MANY_NESTED_FUNCTIONS": 7, + "MISSING_RIGHT_HAND_OPERAND": 8, + "INVALID_OPERATOR_NAME": 9, + "FEED_ATTRIBUTE_OPERAND_ARGUMENT_NOT_INTEGER": 10, + "NO_OPERANDS": 11, + "TOO_MANY_OPERANDS": 12, +} + +func (x FunctionParsingErrorEnum_FunctionParsingError) String() string { + return proto.EnumName(FunctionParsingErrorEnum_FunctionParsingError_name, int32(x)) +} +func (FunctionParsingErrorEnum_FunctionParsingError) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_function_parsing_error_0b827c0c284191cb, []int{0, 0} +} + +// Container for enum describing possible function parsing errors. +type FunctionParsingErrorEnum struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *FunctionParsingErrorEnum) Reset() { *m = FunctionParsingErrorEnum{} } +func (m *FunctionParsingErrorEnum) String() string { return proto.CompactTextString(m) } +func (*FunctionParsingErrorEnum) ProtoMessage() {} +func (*FunctionParsingErrorEnum) Descriptor() ([]byte, []int) { + return fileDescriptor_function_parsing_error_0b827c0c284191cb, []int{0} +} +func (m *FunctionParsingErrorEnum) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_FunctionParsingErrorEnum.Unmarshal(m, b) +} +func (m *FunctionParsingErrorEnum) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_FunctionParsingErrorEnum.Marshal(b, m, deterministic) +} +func (dst *FunctionParsingErrorEnum) XXX_Merge(src proto.Message) { + xxx_messageInfo_FunctionParsingErrorEnum.Merge(dst, src) +} +func (m *FunctionParsingErrorEnum) XXX_Size() int { + return xxx_messageInfo_FunctionParsingErrorEnum.Size(m) +} +func (m *FunctionParsingErrorEnum) XXX_DiscardUnknown() { + xxx_messageInfo_FunctionParsingErrorEnum.DiscardUnknown(m) +} + +var xxx_messageInfo_FunctionParsingErrorEnum proto.InternalMessageInfo + +func init() { + proto.RegisterType((*FunctionParsingErrorEnum)(nil), "google.ads.googleads.v1.errors.FunctionParsingErrorEnum") + proto.RegisterEnum("google.ads.googleads.v1.errors.FunctionParsingErrorEnum_FunctionParsingError", FunctionParsingErrorEnum_FunctionParsingError_name, FunctionParsingErrorEnum_FunctionParsingError_value) +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/errors/function_parsing_error.proto", fileDescriptor_function_parsing_error_0b827c0c284191cb) +} + +var fileDescriptor_function_parsing_error_0b827c0c284191cb = []byte{ + // 488 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x7c, 0x92, 0xcd, 0x6e, 0xd3, 0x40, + 0x14, 0x85, 0x49, 0x02, 0x2d, 0x4c, 0x40, 0xb8, 0xa3, 0xb6, 0x34, 0x01, 0xb2, 0xc8, 0x16, 0xc9, + 0x56, 0xc4, 0xce, 0x5d, 0x4d, 0xec, 0x1b, 0x67, 0xd4, 0xfa, 0x8e, 0x35, 0x1e, 0x87, 0x1f, 0x45, + 0x1a, 0x99, 0x26, 0x58, 0x91, 0x5a, 0x3b, 0xb2, 0xd3, 0x3e, 0x00, 0x8f, 0xc2, 0x92, 0x47, 0xe1, + 0x3d, 0xd8, 0xb0, 0xe6, 0x01, 0x90, 0x33, 0x89, 0xbb, 0x29, 0xac, 0x7c, 0x75, 0xcf, 0xf9, 0x8e, + 0x47, 0x73, 0x86, 0x9c, 0x67, 0x45, 0x91, 0x5d, 0x2f, 0x9d, 0x74, 0x51, 0x39, 0x66, 0xac, 0xa7, + 0xbb, 0x91, 0xb3, 0x2c, 0xcb, 0xa2, 0xac, 0x9c, 0xaf, 0xb7, 0xf9, 0xd5, 0x66, 0x55, 0xe4, 0x7a, + 0x9d, 0x96, 0xd5, 0x2a, 0xcf, 0xf4, 0x76, 0x6f, 0xaf, 0xcb, 0x62, 0x53, 0xd0, 0x81, 0x21, 0xec, + 0x74, 0x51, 0xd9, 0x0d, 0x6c, 0xdf, 0x8d, 0x6c, 0x03, 0xf7, 0xdf, 0xec, 0xc3, 0xd7, 0x2b, 0x27, + 0xcd, 0xf3, 0x62, 0x93, 0xd6, 0x51, 0x95, 0xa1, 0x87, 0xdf, 0x3a, 0xe4, 0x6c, 0xb2, 0x8b, 0x8f, + 0x4c, 0x3a, 0xd4, 0x1c, 0xe4, 0xb7, 0x37, 0xc3, 0x5f, 0x6d, 0x72, 0xfc, 0x90, 0x48, 0x5f, 0x92, + 0x6e, 0x82, 0x71, 0x04, 0x1e, 0x9f, 0x70, 0xf0, 0xad, 0x47, 0xb4, 0x4b, 0x0e, 0x13, 0xbc, 0x40, + 0xf1, 0x01, 0xad, 0x16, 0x3d, 0x22, 0x2f, 0x50, 0xe8, 0x50, 0x48, 0xd0, 0x1c, 0xa3, 0x44, 0x59, + 0x6d, 0x7a, 0x4a, 0x28, 0x7c, 0x8c, 0xc0, 0x53, 0xe0, 0x6b, 0x6f, 0xca, 0x24, 0xf3, 0x14, 0x48, + 0xab, 0x43, 0xcf, 0xc8, 0x71, 0x82, 0x8d, 0x12, 0x43, 0xc4, 0x24, 0x53, 0x42, 0x5a, 0x8f, 0x69, + 0x9f, 0x9c, 0x26, 0x18, 0x32, 0xe5, 0x4d, 0xc1, 0xd7, 0x97, 0x30, 0x51, 0x7a, 0x2c, 0x99, 0x77, + 0x01, 0xca, 0x7a, 0x42, 0x5f, 0x93, 0x57, 0xf7, 0x9a, 0xe4, 0xc1, 0xf4, 0x5e, 0x3c, 0xa0, 0x6f, + 0x49, 0x4f, 0x09, 0xa1, 0x43, 0x86, 0x9f, 0x34, 0x42, 0x5c, 0xe7, 0x4e, 0x12, 0xf4, 0x14, 0x17, + 0x18, 0x5b, 0x87, 0x74, 0x40, 0xfa, 0x21, 0x8f, 0x63, 0x8e, 0xc1, 0x8e, 0x9c, 0x32, 0xf4, 0xb5, + 0x88, 0x40, 0x32, 0xf4, 0xad, 0xa7, 0xb4, 0x47, 0x4e, 0x38, 0xce, 0xd8, 0x25, 0xdf, 0x2d, 0x95, + 0x90, 0x1a, 0x59, 0x08, 0xd6, 0x33, 0xea, 0x90, 0x77, 0x13, 0x00, 0x5f, 0x33, 0xa5, 0x24, 0x1f, + 0x27, 0x0a, 0xf6, 0x98, 0x66, 0x32, 0x48, 0x42, 0x40, 0xa5, 0x51, 0x28, 0xcd, 0x51, 0x41, 0x00, + 0xd2, 0x22, 0xf5, 0x35, 0xa1, 0xd8, 0x9b, 0x62, 0xab, 0x4b, 0x4f, 0xc8, 0x51, 0x73, 0xb6, 0x66, + 0xfd, 0x7c, 0xfc, 0xa7, 0x45, 0x86, 0x57, 0xc5, 0x8d, 0xfd, 0xff, 0x26, 0xc7, 0xbd, 0x87, 0xba, + 0x88, 0xea, 0x1a, 0xa3, 0xd6, 0x67, 0x7f, 0x07, 0x67, 0xc5, 0x75, 0x9a, 0x67, 0x76, 0x51, 0x66, + 0x4e, 0xb6, 0xcc, 0xb7, 0x25, 0xef, 0xdf, 0xd4, 0x7a, 0x55, 0xfd, 0xeb, 0x89, 0x9d, 0x9b, 0xcf, + 0xf7, 0x76, 0x27, 0x60, 0xec, 0x47, 0x7b, 0x10, 0x98, 0x30, 0xb6, 0xa8, 0x6c, 0x33, 0xd6, 0xd3, + 0x6c, 0x64, 0x6f, 0x7f, 0x59, 0xfd, 0xdc, 0x1b, 0xe6, 0x6c, 0x51, 0xcd, 0x1b, 0xc3, 0x7c, 0x36, + 0x9a, 0x1b, 0xc3, 0xef, 0xf6, 0xd0, 0x6c, 0x5d, 0x97, 0x2d, 0x2a, 0xd7, 0x6d, 0x2c, 0xae, 0x3b, + 0x1b, 0xb9, 0xae, 0x31, 0x7d, 0x39, 0xd8, 0x9e, 0xee, 0xfd, 0xdf, 0x00, 0x00, 0x00, 0xff, 0xff, + 0xc2, 0x4d, 0xd3, 0x1f, 0xff, 0x02, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/errors/geo_target_constant_suggestion_error.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/errors/geo_target_constant_suggestion_error.pb.go new file mode 100644 index 0000000..62f55dc --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/errors/geo_target_constant_suggestion_error.pb.go @@ -0,0 +1,133 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/errors/geo_target_constant_suggestion_error.proto + +package errors // import "google.golang.org/genproto/googleapis/ads/googleads/v1/errors" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Enum describing possible geo target constant suggestion errors. +type GeoTargetConstantSuggestionErrorEnum_GeoTargetConstantSuggestionError int32 + +const ( + // Enum unspecified. + GeoTargetConstantSuggestionErrorEnum_UNSPECIFIED GeoTargetConstantSuggestionErrorEnum_GeoTargetConstantSuggestionError = 0 + // The received error code is not known in this version. + GeoTargetConstantSuggestionErrorEnum_UNKNOWN GeoTargetConstantSuggestionErrorEnum_GeoTargetConstantSuggestionError = 1 + // A location name cannot be greater than 300 characters. + GeoTargetConstantSuggestionErrorEnum_LOCATION_NAME_SIZE_LIMIT GeoTargetConstantSuggestionErrorEnum_GeoTargetConstantSuggestionError = 2 + // At most 25 location names can be specified in a SuggestGeoTargetConstants + // method. + GeoTargetConstantSuggestionErrorEnum_LOCATION_NAME_LIMIT GeoTargetConstantSuggestionErrorEnum_GeoTargetConstantSuggestionError = 3 + // The country code is invalid. + GeoTargetConstantSuggestionErrorEnum_INVALID_COUNTRY_CODE GeoTargetConstantSuggestionErrorEnum_GeoTargetConstantSuggestionError = 4 + // Geo target constant resource names or location names must be provided in + // the request. + GeoTargetConstantSuggestionErrorEnum_REQUEST_PARAMETERS_UNSET GeoTargetConstantSuggestionErrorEnum_GeoTargetConstantSuggestionError = 5 +) + +var GeoTargetConstantSuggestionErrorEnum_GeoTargetConstantSuggestionError_name = map[int32]string{ + 0: "UNSPECIFIED", + 1: "UNKNOWN", + 2: "LOCATION_NAME_SIZE_LIMIT", + 3: "LOCATION_NAME_LIMIT", + 4: "INVALID_COUNTRY_CODE", + 5: "REQUEST_PARAMETERS_UNSET", +} +var GeoTargetConstantSuggestionErrorEnum_GeoTargetConstantSuggestionError_value = map[string]int32{ + "UNSPECIFIED": 0, + "UNKNOWN": 1, + "LOCATION_NAME_SIZE_LIMIT": 2, + "LOCATION_NAME_LIMIT": 3, + "INVALID_COUNTRY_CODE": 4, + "REQUEST_PARAMETERS_UNSET": 5, +} + +func (x GeoTargetConstantSuggestionErrorEnum_GeoTargetConstantSuggestionError) String() string { + return proto.EnumName(GeoTargetConstantSuggestionErrorEnum_GeoTargetConstantSuggestionError_name, int32(x)) +} +func (GeoTargetConstantSuggestionErrorEnum_GeoTargetConstantSuggestionError) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_geo_target_constant_suggestion_error_30f41090bf4a4fdb, []int{0, 0} +} + +// Container for enum describing possible geo target constant suggestion errors. +type GeoTargetConstantSuggestionErrorEnum struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GeoTargetConstantSuggestionErrorEnum) Reset() { *m = GeoTargetConstantSuggestionErrorEnum{} } +func (m *GeoTargetConstantSuggestionErrorEnum) String() string { return proto.CompactTextString(m) } +func (*GeoTargetConstantSuggestionErrorEnum) ProtoMessage() {} +func (*GeoTargetConstantSuggestionErrorEnum) Descriptor() ([]byte, []int) { + return fileDescriptor_geo_target_constant_suggestion_error_30f41090bf4a4fdb, []int{0} +} +func (m *GeoTargetConstantSuggestionErrorEnum) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GeoTargetConstantSuggestionErrorEnum.Unmarshal(m, b) +} +func (m *GeoTargetConstantSuggestionErrorEnum) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GeoTargetConstantSuggestionErrorEnum.Marshal(b, m, deterministic) +} +func (dst *GeoTargetConstantSuggestionErrorEnum) XXX_Merge(src proto.Message) { + xxx_messageInfo_GeoTargetConstantSuggestionErrorEnum.Merge(dst, src) +} +func (m *GeoTargetConstantSuggestionErrorEnum) XXX_Size() int { + return xxx_messageInfo_GeoTargetConstantSuggestionErrorEnum.Size(m) +} +func (m *GeoTargetConstantSuggestionErrorEnum) XXX_DiscardUnknown() { + xxx_messageInfo_GeoTargetConstantSuggestionErrorEnum.DiscardUnknown(m) +} + +var xxx_messageInfo_GeoTargetConstantSuggestionErrorEnum proto.InternalMessageInfo + +func init() { + proto.RegisterType((*GeoTargetConstantSuggestionErrorEnum)(nil), "google.ads.googleads.v1.errors.GeoTargetConstantSuggestionErrorEnum") + proto.RegisterEnum("google.ads.googleads.v1.errors.GeoTargetConstantSuggestionErrorEnum_GeoTargetConstantSuggestionError", GeoTargetConstantSuggestionErrorEnum_GeoTargetConstantSuggestionError_name, GeoTargetConstantSuggestionErrorEnum_GeoTargetConstantSuggestionError_value) +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/errors/geo_target_constant_suggestion_error.proto", fileDescriptor_geo_target_constant_suggestion_error_30f41090bf4a4fdb) +} + +var fileDescriptor_geo_target_constant_suggestion_error_30f41090bf4a4fdb = []byte{ + // 386 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x84, 0x91, 0xcf, 0xaa, 0xd4, 0x30, + 0x14, 0xc6, 0x6d, 0xaf, 0x7f, 0x20, 0x77, 0x61, 0xa9, 0x82, 0x17, 0xb9, 0x0c, 0x52, 0x74, 0x9b, + 0x52, 0xdc, 0xc5, 0x55, 0xa6, 0x8d, 0x43, 0x70, 0x26, 0xad, 0xfd, 0x27, 0x0e, 0x85, 0x50, 0xa7, + 0x25, 0x0c, 0xcc, 0x24, 0x43, 0xd3, 0x99, 0xb5, 0xcf, 0xe2, 0xd2, 0x8d, 0xef, 0xe1, 0x13, 0xf8, + 0x0c, 0x3e, 0x85, 0xb4, 0x99, 0x29, 0xb8, 0xd0, 0x59, 0xe5, 0x23, 0xe7, 0x3b, 0xbf, 0x2f, 0x39, + 0x07, 0x50, 0xa1, 0x94, 0xd8, 0xb5, 0x7e, 0xdd, 0x68, 0xdf, 0xc8, 0x41, 0x9d, 0x02, 0xbf, 0xed, + 0x3a, 0xd5, 0x69, 0x5f, 0xb4, 0x8a, 0xf7, 0x75, 0x27, 0xda, 0x9e, 0x6f, 0x94, 0xd4, 0x7d, 0x2d, + 0x7b, 0xae, 0x8f, 0x42, 0xb4, 0xba, 0xdf, 0x2a, 0xc9, 0x47, 0x17, 0x3c, 0x74, 0xaa, 0x57, 0xee, + 0xcc, 0xf4, 0xc3, 0xba, 0xd1, 0x70, 0x42, 0xc1, 0x53, 0x00, 0x0d, 0xea, 0xe5, 0xfd, 0x25, 0xea, + 0xb0, 0xf5, 0x6b, 0x29, 0x55, 0x5f, 0x0f, 0x08, 0x6d, 0xba, 0xbd, 0x5f, 0x16, 0x78, 0xbd, 0x68, + 0x55, 0x3e, 0x66, 0x85, 0xe7, 0xa8, 0x6c, 0x4a, 0x22, 0x03, 0x83, 0xc8, 0xe3, 0xde, 0xfb, 0x61, + 0x81, 0x57, 0xd7, 0x8c, 0xee, 0x53, 0x70, 0x5b, 0xb0, 0x2c, 0x21, 0x21, 0x7d, 0x4f, 0x49, 0xe4, + 0x3c, 0x70, 0x6f, 0xc1, 0x93, 0x82, 0x7d, 0x60, 0xf1, 0x27, 0xe6, 0x58, 0xee, 0x3d, 0xb8, 0x5b, + 0xc6, 0x21, 0xce, 0x69, 0xcc, 0x38, 0xc3, 0x2b, 0xc2, 0x33, 0xba, 0x26, 0x7c, 0x49, 0x57, 0x34, + 0x77, 0x6c, 0xf7, 0x05, 0x78, 0xf6, 0x77, 0xd5, 0x14, 0x6e, 0xdc, 0x3b, 0xf0, 0x9c, 0xb2, 0x12, + 0x2f, 0x69, 0xc4, 0xc3, 0xb8, 0x60, 0x79, 0xfa, 0x99, 0x87, 0x71, 0x44, 0x9c, 0x87, 0x03, 0x30, + 0x25, 0x1f, 0x0b, 0x92, 0xe5, 0x3c, 0xc1, 0x29, 0x5e, 0x91, 0x9c, 0xa4, 0x19, 0x2f, 0x58, 0x46, + 0x72, 0xe7, 0xd1, 0xfc, 0xab, 0x0d, 0xbc, 0x8d, 0xda, 0xc3, 0xff, 0xcf, 0x67, 0xfe, 0xe6, 0xda, + 0xaf, 0x92, 0x61, 0x50, 0x89, 0xb5, 0x8e, 0xce, 0x20, 0xa1, 0x76, 0xb5, 0x14, 0x50, 0x75, 0xc2, + 0x17, 0xad, 0x1c, 0xc7, 0x78, 0xd9, 0xe1, 0x61, 0xab, 0xff, 0xb5, 0xd2, 0x77, 0xe6, 0xf8, 0x66, + 0xdf, 0x2c, 0x30, 0xfe, 0x6e, 0xcf, 0x16, 0x06, 0x86, 0x1b, 0x0d, 0x8d, 0x1c, 0x54, 0x19, 0xc0, + 0x31, 0x52, 0xff, 0xbc, 0x18, 0x2a, 0xdc, 0xe8, 0x6a, 0x32, 0x54, 0x65, 0x50, 0x19, 0xc3, 0x6f, + 0xdb, 0x33, 0xb7, 0x08, 0xe1, 0x46, 0x23, 0x34, 0x59, 0x10, 0x2a, 0x03, 0x84, 0x8c, 0xe9, 0xcb, + 0xe3, 0xf1, 0x75, 0x6f, 0xff, 0x04, 0x00, 0x00, 0xff, 0xff, 0x82, 0x5e, 0xcb, 0x80, 0x6f, 0x02, + 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/errors/header_error.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/errors/header_error.pb.go new file mode 100644 index 0000000..301bf28 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/errors/header_error.pb.go @@ -0,0 +1,113 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/errors/header_error.proto + +package errors // import "google.golang.org/genproto/googleapis/ads/googleads/v1/errors" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Enum describing possible header errors. +type HeaderErrorEnum_HeaderError int32 + +const ( + // Enum unspecified. + HeaderErrorEnum_UNSPECIFIED HeaderErrorEnum_HeaderError = 0 + // The received error code is not known in this version. + HeaderErrorEnum_UNKNOWN HeaderErrorEnum_HeaderError = 1 + // The login customer id could not be validated. + HeaderErrorEnum_INVALID_LOGIN_CUSTOMER_ID HeaderErrorEnum_HeaderError = 3 +) + +var HeaderErrorEnum_HeaderError_name = map[int32]string{ + 0: "UNSPECIFIED", + 1: "UNKNOWN", + 3: "INVALID_LOGIN_CUSTOMER_ID", +} +var HeaderErrorEnum_HeaderError_value = map[string]int32{ + "UNSPECIFIED": 0, + "UNKNOWN": 1, + "INVALID_LOGIN_CUSTOMER_ID": 3, +} + +func (x HeaderErrorEnum_HeaderError) String() string { + return proto.EnumName(HeaderErrorEnum_HeaderError_name, int32(x)) +} +func (HeaderErrorEnum_HeaderError) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_header_error_12c8e9116eb505a1, []int{0, 0} +} + +// Container for enum describing possible header errors. +type HeaderErrorEnum struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *HeaderErrorEnum) Reset() { *m = HeaderErrorEnum{} } +func (m *HeaderErrorEnum) String() string { return proto.CompactTextString(m) } +func (*HeaderErrorEnum) ProtoMessage() {} +func (*HeaderErrorEnum) Descriptor() ([]byte, []int) { + return fileDescriptor_header_error_12c8e9116eb505a1, []int{0} +} +func (m *HeaderErrorEnum) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_HeaderErrorEnum.Unmarshal(m, b) +} +func (m *HeaderErrorEnum) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_HeaderErrorEnum.Marshal(b, m, deterministic) +} +func (dst *HeaderErrorEnum) XXX_Merge(src proto.Message) { + xxx_messageInfo_HeaderErrorEnum.Merge(dst, src) +} +func (m *HeaderErrorEnum) XXX_Size() int { + return xxx_messageInfo_HeaderErrorEnum.Size(m) +} +func (m *HeaderErrorEnum) XXX_DiscardUnknown() { + xxx_messageInfo_HeaderErrorEnum.DiscardUnknown(m) +} + +var xxx_messageInfo_HeaderErrorEnum proto.InternalMessageInfo + +func init() { + proto.RegisterType((*HeaderErrorEnum)(nil), "google.ads.googleads.v1.errors.HeaderErrorEnum") + proto.RegisterEnum("google.ads.googleads.v1.errors.HeaderErrorEnum_HeaderError", HeaderErrorEnum_HeaderError_name, HeaderErrorEnum_HeaderError_value) +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/errors/header_error.proto", fileDescriptor_header_error_12c8e9116eb505a1) +} + +var fileDescriptor_header_error_12c8e9116eb505a1 = []byte{ + // 300 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x7c, 0x90, 0xc1, 0x4a, 0xc3, 0x30, + 0x1c, 0xc6, 0xdd, 0x06, 0x0a, 0xd9, 0x61, 0xa5, 0x37, 0x45, 0x77, 0xe8, 0x03, 0x24, 0x04, 0x6f, + 0xf1, 0x94, 0xad, 0x75, 0x46, 0x67, 0x36, 0x9c, 0xab, 0x20, 0x95, 0x12, 0x4d, 0x89, 0x83, 0x2d, + 0x19, 0x49, 0xdd, 0x03, 0x79, 0xf4, 0x51, 0x7c, 0x14, 0x7d, 0x09, 0x69, 0x63, 0xcb, 0x2e, 0x7a, + 0xca, 0x97, 0x3f, 0xbf, 0xef, 0xfb, 0x7f, 0xfc, 0x01, 0x56, 0xc6, 0xa8, 0x75, 0x81, 0x84, 0x74, + 0xc8, 0xcb, 0x4a, 0xed, 0x30, 0x2a, 0xac, 0x35, 0xd6, 0xa1, 0xd7, 0x42, 0xc8, 0xc2, 0xe6, 0xf5, + 0x0f, 0x6e, 0xad, 0x29, 0x4d, 0x38, 0xf4, 0x1c, 0x14, 0xd2, 0xc1, 0xd6, 0x02, 0x77, 0x18, 0x7a, + 0xcb, 0xc9, 0x69, 0x13, 0xb9, 0x5d, 0x21, 0xa1, 0xb5, 0x29, 0x45, 0xb9, 0x32, 0xda, 0x79, 0x77, + 0xf4, 0x04, 0x06, 0x57, 0x75, 0x66, 0x52, 0xd1, 0x89, 0x7e, 0xdb, 0x44, 0xd7, 0xa0, 0xbf, 0x37, + 0x0a, 0x07, 0xa0, 0xbf, 0xe4, 0x8b, 0x79, 0x32, 0x66, 0x97, 0x2c, 0x89, 0x83, 0x83, 0xb0, 0x0f, + 0x8e, 0x96, 0xfc, 0x86, 0xcf, 0x1e, 0x78, 0xd0, 0x09, 0xcf, 0xc0, 0x31, 0xe3, 0x29, 0x9d, 0xb2, + 0x38, 0x9f, 0xce, 0x26, 0x8c, 0xe7, 0xe3, 0xe5, 0xe2, 0x7e, 0x76, 0x9b, 0xdc, 0xe5, 0x2c, 0x0e, + 0x7a, 0xa3, 0xef, 0x0e, 0x88, 0x5e, 0xcc, 0x06, 0xfe, 0xdf, 0x71, 0x14, 0xec, 0x2d, 0x9c, 0x57, + 0xbd, 0xe6, 0x9d, 0xc7, 0xf8, 0xd7, 0xa3, 0xcc, 0x5a, 0x68, 0x05, 0x8d, 0x55, 0x48, 0x15, 0xba, + 0x6e, 0xdd, 0x9c, 0x66, 0xbb, 0x72, 0x7f, 0x5d, 0xea, 0xc2, 0x3f, 0xef, 0xdd, 0xde, 0x84, 0xd2, + 0x8f, 0xee, 0x70, 0xe2, 0xc3, 0xa8, 0x74, 0xd0, 0xcb, 0x4a, 0xa5, 0x18, 0xd6, 0x2b, 0xdd, 0x67, + 0x03, 0x64, 0x54, 0xba, 0xac, 0x05, 0xb2, 0x14, 0x67, 0x1e, 0xf8, 0xea, 0x46, 0x7e, 0x4a, 0x08, + 0x95, 0x8e, 0x90, 0x16, 0x21, 0x24, 0xc5, 0x84, 0x78, 0xe8, 0xf9, 0xb0, 0x6e, 0x77, 0xfe, 0x13, + 0x00, 0x00, 0xff, 0xff, 0xab, 0x94, 0x78, 0x3f, 0xc6, 0x01, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/errors/id_error.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/errors/id_error.pb.go new file mode 100644 index 0000000..5dc2c26 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/errors/id_error.pb.go @@ -0,0 +1,112 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/errors/id_error.proto + +package errors // import "google.golang.org/genproto/googleapis/ads/googleads/v1/errors" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Enum describing possible id errors. +type IdErrorEnum_IdError int32 + +const ( + // Enum unspecified. + IdErrorEnum_UNSPECIFIED IdErrorEnum_IdError = 0 + // The received error code is not known in this version. + IdErrorEnum_UNKNOWN IdErrorEnum_IdError = 1 + // Id not found + IdErrorEnum_NOT_FOUND IdErrorEnum_IdError = 2 +) + +var IdErrorEnum_IdError_name = map[int32]string{ + 0: "UNSPECIFIED", + 1: "UNKNOWN", + 2: "NOT_FOUND", +} +var IdErrorEnum_IdError_value = map[string]int32{ + "UNSPECIFIED": 0, + "UNKNOWN": 1, + "NOT_FOUND": 2, +} + +func (x IdErrorEnum_IdError) String() string { + return proto.EnumName(IdErrorEnum_IdError_name, int32(x)) +} +func (IdErrorEnum_IdError) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_id_error_7d0b19ff3e859e44, []int{0, 0} +} + +// Container for enum describing possible id errors. +type IdErrorEnum struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *IdErrorEnum) Reset() { *m = IdErrorEnum{} } +func (m *IdErrorEnum) String() string { return proto.CompactTextString(m) } +func (*IdErrorEnum) ProtoMessage() {} +func (*IdErrorEnum) Descriptor() ([]byte, []int) { + return fileDescriptor_id_error_7d0b19ff3e859e44, []int{0} +} +func (m *IdErrorEnum) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_IdErrorEnum.Unmarshal(m, b) +} +func (m *IdErrorEnum) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_IdErrorEnum.Marshal(b, m, deterministic) +} +func (dst *IdErrorEnum) XXX_Merge(src proto.Message) { + xxx_messageInfo_IdErrorEnum.Merge(dst, src) +} +func (m *IdErrorEnum) XXX_Size() int { + return xxx_messageInfo_IdErrorEnum.Size(m) +} +func (m *IdErrorEnum) XXX_DiscardUnknown() { + xxx_messageInfo_IdErrorEnum.DiscardUnknown(m) +} + +var xxx_messageInfo_IdErrorEnum proto.InternalMessageInfo + +func init() { + proto.RegisterType((*IdErrorEnum)(nil), "google.ads.googleads.v1.errors.IdErrorEnum") + proto.RegisterEnum("google.ads.googleads.v1.errors.IdErrorEnum_IdError", IdErrorEnum_IdError_name, IdErrorEnum_IdError_value) +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/errors/id_error.proto", fileDescriptor_id_error_7d0b19ff3e859e44) +} + +var fileDescriptor_id_error_7d0b19ff3e859e44 = []byte{ + // 279 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x7c, 0x90, 0x41, 0x4a, 0xc4, 0x30, + 0x14, 0x86, 0x6d, 0x05, 0x07, 0x53, 0xc5, 0xd2, 0xa5, 0xc8, 0x2c, 0xba, 0x37, 0xa1, 0x08, 0x2e, + 0xe2, 0xaa, 0x63, 0x3b, 0x43, 0x11, 0xd2, 0x82, 0xb6, 0x82, 0x14, 0x86, 0x6a, 0x4a, 0x28, 0xcc, + 0x24, 0x25, 0xa9, 0x73, 0x20, 0x97, 0x1e, 0xc5, 0x8b, 0x08, 0x9e, 0x42, 0xda, 0xd7, 0x76, 0xa7, + 0xab, 0x7c, 0x09, 0x5f, 0xfe, 0xf7, 0x27, 0xe8, 0x5a, 0x28, 0x25, 0x76, 0x35, 0xa9, 0xb8, 0x21, + 0x80, 0x3d, 0x1d, 0x02, 0x52, 0x6b, 0xad, 0xb4, 0x21, 0x0d, 0xdf, 0x0e, 0x84, 0x5b, 0xad, 0x3a, + 0xe5, 0x2d, 0xc1, 0xc1, 0x15, 0x37, 0x78, 0xd6, 0xf1, 0x21, 0xc0, 0xa0, 0x5f, 0x5e, 0x4d, 0x71, + 0x6d, 0x43, 0x2a, 0x29, 0x55, 0x57, 0x75, 0x8d, 0x92, 0x06, 0x6e, 0xfb, 0x31, 0x72, 0x12, 0x1e, + 0xf7, 0x66, 0x2c, 0xdf, 0xf7, 0xfe, 0x2d, 0x5a, 0x8c, 0x5b, 0xef, 0x02, 0x39, 0x39, 0x7b, 0xcc, + 0xe2, 0xfb, 0x64, 0x9d, 0xc4, 0x91, 0x7b, 0xe4, 0x39, 0x68, 0x91, 0xb3, 0x07, 0x96, 0x3e, 0x33, + 0xd7, 0xf2, 0xce, 0xd1, 0x29, 0x4b, 0x9f, 0xb6, 0xeb, 0x34, 0x67, 0x91, 0x6b, 0xaf, 0xbe, 0x2d, + 0xe4, 0xbf, 0xa9, 0x3d, 0xfe, 0xbf, 0xcb, 0xea, 0x6c, 0x0c, 0xcf, 0xfa, 0xd9, 0x99, 0xf5, 0x12, + 0x8d, 0xbe, 0x50, 0xbb, 0x4a, 0x0a, 0xac, 0xb4, 0x20, 0xa2, 0x96, 0x43, 0xb3, 0xe9, 0xe9, 0x6d, + 0x63, 0xfe, 0xfa, 0x89, 0x3b, 0x58, 0x3e, 0xec, 0xe3, 0x4d, 0x18, 0x7e, 0xda, 0xcb, 0x0d, 0x84, + 0x85, 0xdc, 0x60, 0xc0, 0x9e, 0x8a, 0x00, 0x0f, 0x23, 0xcd, 0xd7, 0x24, 0x94, 0x21, 0x37, 0xe5, + 0x2c, 0x94, 0x45, 0x50, 0x82, 0xf0, 0x63, 0xfb, 0x70, 0x4a, 0x69, 0xc8, 0x0d, 0xa5, 0xb3, 0x42, + 0x69, 0x11, 0x50, 0x0a, 0xd2, 0xeb, 0xc9, 0xd0, 0xee, 0xe6, 0x37, 0x00, 0x00, 0xff, 0xff, 0x69, + 0x6d, 0xa5, 0x72, 0xa6, 0x01, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/errors/image_error.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/errors/image_error.pb.go new file mode 100644 index 0000000..6ec62e0 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/errors/image_error.pb.go @@ -0,0 +1,289 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/errors/image_error.proto + +package errors // import "google.golang.org/genproto/googleapis/ads/googleads/v1/errors" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Enum describing possible image errors. +type ImageErrorEnum_ImageError int32 + +const ( + // Enum unspecified. + ImageErrorEnum_UNSPECIFIED ImageErrorEnum_ImageError = 0 + // The received error code is not known in this version. + ImageErrorEnum_UNKNOWN ImageErrorEnum_ImageError = 1 + // The image is not valid. + ImageErrorEnum_INVALID_IMAGE ImageErrorEnum_ImageError = 2 + // The image could not be stored. + ImageErrorEnum_STORAGE_ERROR ImageErrorEnum_ImageError = 3 + // There was a problem with the request. + ImageErrorEnum_BAD_REQUEST ImageErrorEnum_ImageError = 4 + // The image is not of legal dimensions. + ImageErrorEnum_UNEXPECTED_SIZE ImageErrorEnum_ImageError = 5 + // Animated image are not permitted. + ImageErrorEnum_ANIMATED_NOT_ALLOWED ImageErrorEnum_ImageError = 6 + // Animation is too long. + ImageErrorEnum_ANIMATION_TOO_LONG ImageErrorEnum_ImageError = 7 + // There was an error on the server. + ImageErrorEnum_SERVER_ERROR ImageErrorEnum_ImageError = 8 + // Image cannot be in CMYK color format. + ImageErrorEnum_CMYK_JPEG_NOT_ALLOWED ImageErrorEnum_ImageError = 9 + // Flash images are not permitted. + ImageErrorEnum_FLASH_NOT_ALLOWED ImageErrorEnum_ImageError = 10 + // Flash images must support clickTag. + ImageErrorEnum_FLASH_WITHOUT_CLICKTAG ImageErrorEnum_ImageError = 11 + // A flash error has occurred after fixing the click tag. + ImageErrorEnum_FLASH_ERROR_AFTER_FIXING_CLICK_TAG ImageErrorEnum_ImageError = 12 + // Unacceptable visual effects. + ImageErrorEnum_ANIMATED_VISUAL_EFFECT ImageErrorEnum_ImageError = 13 + // There was a problem with the flash image. + ImageErrorEnum_FLASH_ERROR ImageErrorEnum_ImageError = 14 + // Incorrect image layout. + ImageErrorEnum_LAYOUT_PROBLEM ImageErrorEnum_ImageError = 15 + // There was a problem reading the image file. + ImageErrorEnum_PROBLEM_READING_IMAGE_FILE ImageErrorEnum_ImageError = 16 + // There was an error storing the image. + ImageErrorEnum_ERROR_STORING_IMAGE ImageErrorEnum_ImageError = 17 + // The aspect ratio of the image is not allowed. + ImageErrorEnum_ASPECT_RATIO_NOT_ALLOWED ImageErrorEnum_ImageError = 18 + // Flash cannot have network objects. + ImageErrorEnum_FLASH_HAS_NETWORK_OBJECTS ImageErrorEnum_ImageError = 19 + // Flash cannot have network methods. + ImageErrorEnum_FLASH_HAS_NETWORK_METHODS ImageErrorEnum_ImageError = 20 + // Flash cannot have a Url. + ImageErrorEnum_FLASH_HAS_URL ImageErrorEnum_ImageError = 21 + // Flash cannot use mouse tracking. + ImageErrorEnum_FLASH_HAS_MOUSE_TRACKING ImageErrorEnum_ImageError = 22 + // Flash cannot have a random number. + ImageErrorEnum_FLASH_HAS_RANDOM_NUM ImageErrorEnum_ImageError = 23 + // Ad click target cannot be '_self'. + ImageErrorEnum_FLASH_SELF_TARGETS ImageErrorEnum_ImageError = 24 + // GetUrl method should only use '_blank'. + ImageErrorEnum_FLASH_BAD_GETURL_TARGET ImageErrorEnum_ImageError = 25 + // Flash version is not supported. + ImageErrorEnum_FLASH_VERSION_NOT_SUPPORTED ImageErrorEnum_ImageError = 26 + // Flash movies need to have hard coded click URL or clickTAG + ImageErrorEnum_FLASH_WITHOUT_HARD_CODED_CLICK_URL ImageErrorEnum_ImageError = 27 + // Uploaded flash file is corrupted. + ImageErrorEnum_INVALID_FLASH_FILE ImageErrorEnum_ImageError = 28 + // Uploaded flash file can be parsed, but the click tag can not be fixed + // properly. + ImageErrorEnum_FAILED_TO_FIX_CLICK_TAG_IN_FLASH ImageErrorEnum_ImageError = 29 + // Flash movie accesses network resources + ImageErrorEnum_FLASH_ACCESSES_NETWORK_RESOURCES ImageErrorEnum_ImageError = 30 + // Flash movie attempts to call external javascript code + ImageErrorEnum_FLASH_EXTERNAL_JS_CALL ImageErrorEnum_ImageError = 31 + // Flash movie attempts to call flash system commands + ImageErrorEnum_FLASH_EXTERNAL_FS_CALL ImageErrorEnum_ImageError = 32 + // Image file is too large. + ImageErrorEnum_FILE_TOO_LARGE ImageErrorEnum_ImageError = 33 + // Image data is too large. + ImageErrorEnum_IMAGE_DATA_TOO_LARGE ImageErrorEnum_ImageError = 34 + // Error while processing the image. + ImageErrorEnum_IMAGE_PROCESSING_ERROR ImageErrorEnum_ImageError = 35 + // Image is too small. + ImageErrorEnum_IMAGE_TOO_SMALL ImageErrorEnum_ImageError = 36 + // Input was invalid. + ImageErrorEnum_INVALID_INPUT ImageErrorEnum_ImageError = 37 + // There was a problem reading the image file. + ImageErrorEnum_PROBLEM_READING_FILE ImageErrorEnum_ImageError = 38 +) + +var ImageErrorEnum_ImageError_name = map[int32]string{ + 0: "UNSPECIFIED", + 1: "UNKNOWN", + 2: "INVALID_IMAGE", + 3: "STORAGE_ERROR", + 4: "BAD_REQUEST", + 5: "UNEXPECTED_SIZE", + 6: "ANIMATED_NOT_ALLOWED", + 7: "ANIMATION_TOO_LONG", + 8: "SERVER_ERROR", + 9: "CMYK_JPEG_NOT_ALLOWED", + 10: "FLASH_NOT_ALLOWED", + 11: "FLASH_WITHOUT_CLICKTAG", + 12: "FLASH_ERROR_AFTER_FIXING_CLICK_TAG", + 13: "ANIMATED_VISUAL_EFFECT", + 14: "FLASH_ERROR", + 15: "LAYOUT_PROBLEM", + 16: "PROBLEM_READING_IMAGE_FILE", + 17: "ERROR_STORING_IMAGE", + 18: "ASPECT_RATIO_NOT_ALLOWED", + 19: "FLASH_HAS_NETWORK_OBJECTS", + 20: "FLASH_HAS_NETWORK_METHODS", + 21: "FLASH_HAS_URL", + 22: "FLASH_HAS_MOUSE_TRACKING", + 23: "FLASH_HAS_RANDOM_NUM", + 24: "FLASH_SELF_TARGETS", + 25: "FLASH_BAD_GETURL_TARGET", + 26: "FLASH_VERSION_NOT_SUPPORTED", + 27: "FLASH_WITHOUT_HARD_CODED_CLICK_URL", + 28: "INVALID_FLASH_FILE", + 29: "FAILED_TO_FIX_CLICK_TAG_IN_FLASH", + 30: "FLASH_ACCESSES_NETWORK_RESOURCES", + 31: "FLASH_EXTERNAL_JS_CALL", + 32: "FLASH_EXTERNAL_FS_CALL", + 33: "FILE_TOO_LARGE", + 34: "IMAGE_DATA_TOO_LARGE", + 35: "IMAGE_PROCESSING_ERROR", + 36: "IMAGE_TOO_SMALL", + 37: "INVALID_INPUT", + 38: "PROBLEM_READING_FILE", +} +var ImageErrorEnum_ImageError_value = map[string]int32{ + "UNSPECIFIED": 0, + "UNKNOWN": 1, + "INVALID_IMAGE": 2, + "STORAGE_ERROR": 3, + "BAD_REQUEST": 4, + "UNEXPECTED_SIZE": 5, + "ANIMATED_NOT_ALLOWED": 6, + "ANIMATION_TOO_LONG": 7, + "SERVER_ERROR": 8, + "CMYK_JPEG_NOT_ALLOWED": 9, + "FLASH_NOT_ALLOWED": 10, + "FLASH_WITHOUT_CLICKTAG": 11, + "FLASH_ERROR_AFTER_FIXING_CLICK_TAG": 12, + "ANIMATED_VISUAL_EFFECT": 13, + "FLASH_ERROR": 14, + "LAYOUT_PROBLEM": 15, + "PROBLEM_READING_IMAGE_FILE": 16, + "ERROR_STORING_IMAGE": 17, + "ASPECT_RATIO_NOT_ALLOWED": 18, + "FLASH_HAS_NETWORK_OBJECTS": 19, + "FLASH_HAS_NETWORK_METHODS": 20, + "FLASH_HAS_URL": 21, + "FLASH_HAS_MOUSE_TRACKING": 22, + "FLASH_HAS_RANDOM_NUM": 23, + "FLASH_SELF_TARGETS": 24, + "FLASH_BAD_GETURL_TARGET": 25, + "FLASH_VERSION_NOT_SUPPORTED": 26, + "FLASH_WITHOUT_HARD_CODED_CLICK_URL": 27, + "INVALID_FLASH_FILE": 28, + "FAILED_TO_FIX_CLICK_TAG_IN_FLASH": 29, + "FLASH_ACCESSES_NETWORK_RESOURCES": 30, + "FLASH_EXTERNAL_JS_CALL": 31, + "FLASH_EXTERNAL_FS_CALL": 32, + "FILE_TOO_LARGE": 33, + "IMAGE_DATA_TOO_LARGE": 34, + "IMAGE_PROCESSING_ERROR": 35, + "IMAGE_TOO_SMALL": 36, + "INVALID_INPUT": 37, + "PROBLEM_READING_FILE": 38, +} + +func (x ImageErrorEnum_ImageError) String() string { + return proto.EnumName(ImageErrorEnum_ImageError_name, int32(x)) +} +func (ImageErrorEnum_ImageError) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_image_error_f8e718005d4b0553, []int{0, 0} +} + +// Container for enum describing possible image errors. +type ImageErrorEnum struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ImageErrorEnum) Reset() { *m = ImageErrorEnum{} } +func (m *ImageErrorEnum) String() string { return proto.CompactTextString(m) } +func (*ImageErrorEnum) ProtoMessage() {} +func (*ImageErrorEnum) Descriptor() ([]byte, []int) { + return fileDescriptor_image_error_f8e718005d4b0553, []int{0} +} +func (m *ImageErrorEnum) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ImageErrorEnum.Unmarshal(m, b) +} +func (m *ImageErrorEnum) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ImageErrorEnum.Marshal(b, m, deterministic) +} +func (dst *ImageErrorEnum) XXX_Merge(src proto.Message) { + xxx_messageInfo_ImageErrorEnum.Merge(dst, src) +} +func (m *ImageErrorEnum) XXX_Size() int { + return xxx_messageInfo_ImageErrorEnum.Size(m) +} +func (m *ImageErrorEnum) XXX_DiscardUnknown() { + xxx_messageInfo_ImageErrorEnum.DiscardUnknown(m) +} + +var xxx_messageInfo_ImageErrorEnum proto.InternalMessageInfo + +func init() { + proto.RegisterType((*ImageErrorEnum)(nil), "google.ads.googleads.v1.errors.ImageErrorEnum") + proto.RegisterEnum("google.ads.googleads.v1.errors.ImageErrorEnum_ImageError", ImageErrorEnum_ImageError_name, ImageErrorEnum_ImageError_value) +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/errors/image_error.proto", fileDescriptor_image_error_f8e718005d4b0553) +} + +var fileDescriptor_image_error_f8e718005d4b0553 = []byte{ + // 786 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x7c, 0x54, 0xcd, 0x72, 0xdb, 0x36, + 0x10, 0xae, 0x9d, 0x36, 0x4e, 0xe1, 0xc4, 0x86, 0xe1, 0xf8, 0xdf, 0x71, 0x52, 0x35, 0xcd, 0x51, + 0xaa, 0xa6, 0x37, 0xf5, 0x04, 0x91, 0x4b, 0x0a, 0x16, 0x08, 0xb0, 0x00, 0x28, 0x3b, 0x19, 0xcf, + 0x60, 0xd4, 0xca, 0xa3, 0xf1, 0x4c, 0x2c, 0x7a, 0x44, 0x37, 0x0f, 0xd1, 0xc7, 0xe8, 0xb1, 0x8f, + 0xd2, 0x47, 0xc9, 0x0b, 0xf4, 0xda, 0x59, 0x42, 0x12, 0xe3, 0x4e, 0x9a, 0x93, 0xa0, 0xfd, 0x3e, + 0x7c, 0xbb, 0xd8, 0x6f, 0xb9, 0xe4, 0xc7, 0x69, 0x59, 0x4e, 0xdf, 0x5f, 0x77, 0xc6, 0x93, 0xaa, + 0x13, 0x8e, 0x78, 0xfa, 0xd0, 0xed, 0x5c, 0xcf, 0xe7, 0xe5, 0xbc, 0xea, 0xdc, 0xdc, 0x8e, 0xa7, + 0xd7, 0xbe, 0xfe, 0xd3, 0xbe, 0x9b, 0x97, 0xf7, 0x25, 0x3b, 0x0b, 0xb4, 0xf6, 0x78, 0x52, 0xb5, + 0x57, 0x37, 0xda, 0x1f, 0xba, 0xed, 0x70, 0xe3, 0xf8, 0x74, 0xa9, 0x78, 0x77, 0xd3, 0x19, 0xcf, + 0x66, 0xe5, 0xfd, 0xf8, 0xfe, 0xa6, 0x9c, 0x55, 0xe1, 0x76, 0xeb, 0x8f, 0x27, 0x64, 0x4b, 0xa0, + 0x26, 0x20, 0x1b, 0x66, 0xbf, 0xdf, 0xb6, 0xfe, 0xd9, 0x20, 0xa4, 0x09, 0xb1, 0x6d, 0xb2, 0x59, + 0x28, 0x9b, 0x43, 0x24, 0x12, 0x01, 0x31, 0xfd, 0x8a, 0x6d, 0x92, 0x8d, 0x42, 0x0d, 0x95, 0xbe, + 0x50, 0x74, 0x8d, 0xed, 0x90, 0x67, 0x42, 0x8d, 0xb8, 0x14, 0xb1, 0x17, 0x19, 0x4f, 0x81, 0xae, + 0x63, 0xc8, 0x3a, 0x6d, 0x78, 0x0a, 0x1e, 0x8c, 0xd1, 0x86, 0x3e, 0x42, 0x8d, 0x3e, 0x8f, 0xbd, + 0x81, 0x5f, 0x0a, 0xb0, 0x8e, 0x7e, 0xcd, 0x76, 0xc9, 0x76, 0xa1, 0xe0, 0x32, 0x87, 0xc8, 0x41, + 0xec, 0xad, 0x78, 0x07, 0xf4, 0x1b, 0x76, 0x48, 0x9e, 0x73, 0x25, 0x32, 0x8e, 0x21, 0xa5, 0x9d, + 0xe7, 0x52, 0xea, 0x0b, 0x88, 0xe9, 0x63, 0xb6, 0x4f, 0x58, 0x40, 0x84, 0x56, 0xde, 0x69, 0xed, + 0xa5, 0x56, 0x29, 0xdd, 0x60, 0x94, 0x3c, 0xb5, 0x60, 0x46, 0x60, 0x16, 0x99, 0x9e, 0xb0, 0x23, + 0xb2, 0x17, 0x65, 0x6f, 0x87, 0xfe, 0x3c, 0x87, 0xf4, 0x81, 0xc8, 0xb7, 0x6c, 0x8f, 0xec, 0x24, + 0x92, 0xdb, 0xc1, 0x83, 0x30, 0x61, 0xc7, 0x64, 0x3f, 0x84, 0x2f, 0x84, 0x1b, 0xe8, 0xc2, 0xf9, + 0x48, 0x8a, 0x68, 0xe8, 0x78, 0x4a, 0x37, 0xd9, 0x1b, 0xd2, 0x0a, 0x58, 0x2d, 0xef, 0x79, 0xe2, + 0xc0, 0xf8, 0x44, 0x5c, 0x0a, 0x95, 0x06, 0x9a, 0x47, 0xde, 0x53, 0xd4, 0x58, 0x55, 0x3e, 0x12, + 0xb6, 0xe0, 0xd2, 0x43, 0x92, 0x40, 0xe4, 0xe8, 0x33, 0x7c, 0xfb, 0x27, 0x1a, 0x74, 0x8b, 0x31, + 0xb2, 0x25, 0xf9, 0x5b, 0xcc, 0x94, 0x1b, 0xdd, 0x97, 0x90, 0xd1, 0x6d, 0x76, 0x46, 0x8e, 0x17, + 0x7f, 0xbc, 0x01, 0x1e, 0xa3, 0x7e, 0xdd, 0x4e, 0x9f, 0x08, 0x09, 0x94, 0xb2, 0x03, 0xb2, 0x1b, + 0x4a, 0xc0, 0xce, 0xae, 0x50, 0xba, 0xc3, 0x4e, 0xc9, 0x21, 0x47, 0x73, 0x9c, 0x37, 0xd8, 0x9d, + 0x07, 0x6f, 0x63, 0xec, 0x05, 0x39, 0x0a, 0xb9, 0x07, 0xdc, 0x7a, 0x05, 0xee, 0x42, 0x9b, 0xa1, + 0xd7, 0xfd, 0x73, 0x88, 0x9c, 0xa5, 0xbb, 0x9f, 0x87, 0x33, 0x70, 0x03, 0x1d, 0x5b, 0xfa, 0x1c, + 0x8d, 0x6c, 0xe0, 0xc2, 0x48, 0xba, 0x87, 0xe9, 0x9a, 0x50, 0xa6, 0x0b, 0x0b, 0xde, 0x19, 0x1e, + 0x0d, 0x85, 0x4a, 0xe9, 0x3e, 0x1a, 0xd8, 0xa0, 0x86, 0xab, 0x58, 0x67, 0x5e, 0x15, 0x19, 0x3d, + 0x40, 0x03, 0x03, 0x62, 0x41, 0x26, 0xde, 0x71, 0x93, 0x82, 0xb3, 0xf4, 0x90, 0x9d, 0x90, 0x83, + 0x10, 0xc7, 0xf1, 0x48, 0xc1, 0x15, 0x46, 0x2e, 0x50, 0x7a, 0xc4, 0x5e, 0x92, 0x93, 0x00, 0x8e, + 0xc0, 0x58, 0x74, 0x1e, 0x1f, 0x67, 0x8b, 0x3c, 0xd7, 0xc6, 0x41, 0x4c, 0x8f, 0x1b, 0x7b, 0x96, + 0xd6, 0x0d, 0xb8, 0x89, 0x7d, 0xa4, 0x63, 0x88, 0x17, 0xf6, 0x60, 0xd5, 0x27, 0x98, 0x7d, 0x39, + 0xa4, 0x81, 0x5f, 0x77, 0xf5, 0x94, 0xbd, 0x26, 0xaf, 0x12, 0x2e, 0x24, 0xc4, 0xde, 0x69, 0xb4, + 0xb5, 0xf1, 0xd4, 0x0b, 0x15, 0xa8, 0xf4, 0x45, 0xcd, 0xaa, 0x6f, 0xf1, 0x28, 0x02, 0x6b, 0xa1, + 0x69, 0x95, 0x01, 0xab, 0x0b, 0x13, 0x81, 0xa5, 0x67, 0xcd, 0x18, 0xc1, 0xa5, 0x03, 0xa3, 0xb8, + 0xf4, 0xe7, 0xd6, 0x47, 0x5c, 0x4a, 0xfa, 0xf2, 0x33, 0x58, 0xb2, 0xc0, 0x5e, 0xe1, 0x34, 0x60, + 0x35, 0x61, 0xaa, 0xf1, 0xe5, 0xf4, 0x3b, 0xec, 0x63, 0x70, 0x3f, 0xe6, 0x8e, 0x7f, 0x82, 0xb4, + 0x50, 0x29, 0x20, 0xb9, 0xd1, 0x58, 0x0c, 0x8e, 0x42, 0x98, 0xab, 0xef, 0xf1, 0x9b, 0x0a, 0x18, + 0x5e, 0xb0, 0x19, 0xca, 0xbf, 0x7e, 0xf0, 0x7d, 0xaa, 0xbc, 0x70, 0xf4, 0x07, 0x54, 0xff, 0xef, + 0xac, 0xd5, 0xfd, 0x78, 0xd3, 0xff, 0xb8, 0x46, 0x5a, 0xbf, 0x95, 0xb7, 0xed, 0x2f, 0x6f, 0x94, + 0xfe, 0x76, 0xb3, 0x1d, 0x72, 0x5c, 0x22, 0xf9, 0xda, 0xbb, 0x78, 0x71, 0x65, 0x5a, 0xbe, 0x1f, + 0xcf, 0xa6, 0xed, 0x72, 0x3e, 0xed, 0x4c, 0xaf, 0x67, 0xf5, 0x8a, 0x59, 0xae, 0xb1, 0xbb, 0x9b, + 0xea, 0xff, 0xb6, 0xda, 0xcf, 0xe1, 0xe7, 0xcf, 0xf5, 0x47, 0x29, 0xe7, 0x7f, 0xad, 0x9f, 0xa5, + 0x41, 0x8c, 0x4f, 0xaa, 0x76, 0x38, 0xe2, 0x69, 0xd4, 0x6d, 0xd7, 0x29, 0xab, 0xbf, 0x97, 0x84, + 0x2b, 0x3e, 0xa9, 0xae, 0x56, 0x84, 0xab, 0x51, 0xf7, 0x2a, 0x10, 0x3e, 0xae, 0xb7, 0x42, 0xb4, + 0xd7, 0xe3, 0x93, 0xaa, 0xd7, 0x5b, 0x51, 0x7a, 0xbd, 0x51, 0xb7, 0xd7, 0x0b, 0xa4, 0x5f, 0x1f, + 0xd7, 0xd5, 0xfd, 0xf4, 0x6f, 0x00, 0x00, 0x00, 0xff, 0xff, 0xa4, 0xb5, 0x22, 0x81, 0x72, 0x05, + 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/errors/internal_error.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/errors/internal_error.pb.go new file mode 100644 index 0000000..cea6942 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/errors/internal_error.pb.go @@ -0,0 +1,125 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/errors/internal_error.proto + +package errors // import "google.golang.org/genproto/googleapis/ads/googleads/v1/errors" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Enum describing possible internal errors. +type InternalErrorEnum_InternalError int32 + +const ( + // Enum unspecified. + InternalErrorEnum_UNSPECIFIED InternalErrorEnum_InternalError = 0 + // The received error code is not known in this version. + InternalErrorEnum_UNKNOWN InternalErrorEnum_InternalError = 1 + // Google Ads API encountered unexpected internal error. + InternalErrorEnum_INTERNAL_ERROR InternalErrorEnum_InternalError = 2 + // The intended error code doesn't exist in any API version. This will be + // fixed by adding a new error code as soon as possible. + InternalErrorEnum_ERROR_CODE_NOT_PUBLISHED InternalErrorEnum_InternalError = 3 + // Google Ads API encountered an unexpected transient error. The user + // should retry their request in these cases. + InternalErrorEnum_TRANSIENT_ERROR InternalErrorEnum_InternalError = 4 +) + +var InternalErrorEnum_InternalError_name = map[int32]string{ + 0: "UNSPECIFIED", + 1: "UNKNOWN", + 2: "INTERNAL_ERROR", + 3: "ERROR_CODE_NOT_PUBLISHED", + 4: "TRANSIENT_ERROR", +} +var InternalErrorEnum_InternalError_value = map[string]int32{ + "UNSPECIFIED": 0, + "UNKNOWN": 1, + "INTERNAL_ERROR": 2, + "ERROR_CODE_NOT_PUBLISHED": 3, + "TRANSIENT_ERROR": 4, +} + +func (x InternalErrorEnum_InternalError) String() string { + return proto.EnumName(InternalErrorEnum_InternalError_name, int32(x)) +} +func (InternalErrorEnum_InternalError) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_internal_error_61af587325ee8155, []int{0, 0} +} + +// Container for enum describing possible internal errors. +type InternalErrorEnum struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *InternalErrorEnum) Reset() { *m = InternalErrorEnum{} } +func (m *InternalErrorEnum) String() string { return proto.CompactTextString(m) } +func (*InternalErrorEnum) ProtoMessage() {} +func (*InternalErrorEnum) Descriptor() ([]byte, []int) { + return fileDescriptor_internal_error_61af587325ee8155, []int{0} +} +func (m *InternalErrorEnum) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_InternalErrorEnum.Unmarshal(m, b) +} +func (m *InternalErrorEnum) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_InternalErrorEnum.Marshal(b, m, deterministic) +} +func (dst *InternalErrorEnum) XXX_Merge(src proto.Message) { + xxx_messageInfo_InternalErrorEnum.Merge(dst, src) +} +func (m *InternalErrorEnum) XXX_Size() int { + return xxx_messageInfo_InternalErrorEnum.Size(m) +} +func (m *InternalErrorEnum) XXX_DiscardUnknown() { + xxx_messageInfo_InternalErrorEnum.DiscardUnknown(m) +} + +var xxx_messageInfo_InternalErrorEnum proto.InternalMessageInfo + +func init() { + proto.RegisterType((*InternalErrorEnum)(nil), "google.ads.googleads.v1.errors.InternalErrorEnum") + proto.RegisterEnum("google.ads.googleads.v1.errors.InternalErrorEnum_InternalError", InternalErrorEnum_InternalError_name, InternalErrorEnum_InternalError_value) +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/errors/internal_error.proto", fileDescriptor_internal_error_61af587325ee8155) +} + +var fileDescriptor_internal_error_61af587325ee8155 = []byte{ + // 331 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x7c, 0x90, 0xd1, 0x4a, 0xc3, 0x30, + 0x14, 0x86, 0x6d, 0x27, 0x0a, 0x19, 0xba, 0x1a, 0x6f, 0x44, 0xc6, 0x2e, 0xfa, 0x00, 0x29, 0x65, + 0x77, 0xf1, 0x2a, 0x5b, 0xe3, 0x2c, 0x8e, 0xb4, 0x74, 0xdd, 0x04, 0x29, 0x94, 0x6a, 0x4b, 0x29, + 0x6c, 0xc9, 0x48, 0xea, 0xde, 0xc1, 0xd7, 0xf0, 0xd2, 0x47, 0xf1, 0x51, 0x04, 0xdf, 0x41, 0xda, + 0x6c, 0x85, 0x5d, 0xe8, 0x55, 0xff, 0x1e, 0xbe, 0xef, 0xe4, 0x9c, 0x03, 0xc6, 0xa5, 0x10, 0xe5, + 0xba, 0x70, 0xb2, 0x5c, 0x39, 0x3a, 0x36, 0x69, 0xe7, 0x3a, 0x85, 0x94, 0x42, 0x2a, 0xa7, 0xe2, + 0x75, 0x21, 0x79, 0xb6, 0x4e, 0xdb, 0x7f, 0xb4, 0x95, 0xa2, 0x16, 0x70, 0xa4, 0x49, 0x94, 0xe5, + 0x0a, 0x75, 0x12, 0xda, 0xb9, 0x48, 0x4b, 0xb7, 0xc3, 0x43, 0xd3, 0x6d, 0xe5, 0x64, 0x9c, 0x8b, + 0x3a, 0xab, 0x2b, 0xc1, 0x95, 0xb6, 0xed, 0x77, 0x03, 0x5c, 0xf9, 0xfb, 0xb6, 0xb4, 0x11, 0x28, + 0x7f, 0xdb, 0xd8, 0x35, 0xb8, 0x38, 0x2a, 0xc2, 0x01, 0xe8, 0x2f, 0xd9, 0x22, 0xa4, 0x53, 0xff, + 0xde, 0xa7, 0x9e, 0x75, 0x02, 0xfb, 0xe0, 0x7c, 0xc9, 0x1e, 0x59, 0xf0, 0xc4, 0x2c, 0x03, 0x42, + 0x70, 0xe9, 0xb3, 0x98, 0x46, 0x8c, 0xcc, 0x53, 0x1a, 0x45, 0x41, 0x64, 0x99, 0x70, 0x08, 0x6e, + 0xda, 0x98, 0x4e, 0x03, 0x8f, 0xa6, 0x2c, 0x88, 0xd3, 0x70, 0x39, 0x99, 0xfb, 0x8b, 0x07, 0xea, + 0x59, 0x3d, 0x78, 0x0d, 0x06, 0x71, 0x44, 0xd8, 0xc2, 0xa7, 0x2c, 0xde, 0x2b, 0xa7, 0x93, 0x1f, + 0x03, 0xd8, 0xaf, 0x62, 0x83, 0xfe, 0x5f, 0x68, 0x02, 0x8f, 0x46, 0x0b, 0x9b, 0x35, 0x42, 0xe3, + 0xd9, 0xdb, 0x5b, 0xa5, 0x58, 0x67, 0xbc, 0x44, 0x42, 0x96, 0x4e, 0x59, 0xf0, 0x76, 0xc9, 0xc3, + 0x2d, 0xb7, 0x95, 0xfa, 0xeb, 0xb4, 0x77, 0xfa, 0xf3, 0x61, 0xf6, 0x66, 0x84, 0x7c, 0x9a, 0xa3, + 0x99, 0x6e, 0x46, 0x72, 0x85, 0x74, 0x6c, 0xd2, 0xca, 0x45, 0xed, 0x93, 0xea, 0xeb, 0x00, 0x24, + 0x24, 0x57, 0x49, 0x07, 0x24, 0x2b, 0x37, 0xd1, 0xc0, 0xb7, 0x69, 0xeb, 0x2a, 0xc6, 0x24, 0x57, + 0x18, 0x77, 0x08, 0xc6, 0x2b, 0x17, 0x63, 0x0d, 0xbd, 0x9c, 0xb5, 0xd3, 0x8d, 0x7f, 0x03, 0x00, + 0x00, 0xff, 0xff, 0xef, 0xf8, 0x2e, 0x2f, 0xf7, 0x01, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/errors/keyword_plan_ad_group_error.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/errors/keyword_plan_ad_group_error.pb.go new file mode 100644 index 0000000..a05fe7c --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/errors/keyword_plan_ad_group_error.pb.go @@ -0,0 +1,122 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/errors/keyword_plan_ad_group_error.proto + +package errors // import "google.golang.org/genproto/googleapis/ads/googleads/v1/errors" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Enum describing possible errors from applying a keyword plan ad group. +type KeywordPlanAdGroupErrorEnum_KeywordPlanAdGroupError int32 + +const ( + // Enum unspecified. + KeywordPlanAdGroupErrorEnum_UNSPECIFIED KeywordPlanAdGroupErrorEnum_KeywordPlanAdGroupError = 0 + // The received error code is not known in this version. + KeywordPlanAdGroupErrorEnum_UNKNOWN KeywordPlanAdGroupErrorEnum_KeywordPlanAdGroupError = 1 + // The keyword plan ad group name is missing, empty, longer than allowed + // limit or contains invalid chars. + KeywordPlanAdGroupErrorEnum_INVALID_NAME KeywordPlanAdGroupErrorEnum_KeywordPlanAdGroupError = 2 + // The keyword plan ad group name is duplicate to an existing keyword plan + // AdGroup name or other keyword plan AdGroup name in the request. + KeywordPlanAdGroupErrorEnum_DUPLICATE_NAME KeywordPlanAdGroupErrorEnum_KeywordPlanAdGroupError = 3 +) + +var KeywordPlanAdGroupErrorEnum_KeywordPlanAdGroupError_name = map[int32]string{ + 0: "UNSPECIFIED", + 1: "UNKNOWN", + 2: "INVALID_NAME", + 3: "DUPLICATE_NAME", +} +var KeywordPlanAdGroupErrorEnum_KeywordPlanAdGroupError_value = map[string]int32{ + "UNSPECIFIED": 0, + "UNKNOWN": 1, + "INVALID_NAME": 2, + "DUPLICATE_NAME": 3, +} + +func (x KeywordPlanAdGroupErrorEnum_KeywordPlanAdGroupError) String() string { + return proto.EnumName(KeywordPlanAdGroupErrorEnum_KeywordPlanAdGroupError_name, int32(x)) +} +func (KeywordPlanAdGroupErrorEnum_KeywordPlanAdGroupError) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_keyword_plan_ad_group_error_e4bac52c78eeee86, []int{0, 0} +} + +// Container for enum describing possible errors from applying a keyword plan +// ad group. +type KeywordPlanAdGroupErrorEnum struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *KeywordPlanAdGroupErrorEnum) Reset() { *m = KeywordPlanAdGroupErrorEnum{} } +func (m *KeywordPlanAdGroupErrorEnum) String() string { return proto.CompactTextString(m) } +func (*KeywordPlanAdGroupErrorEnum) ProtoMessage() {} +func (*KeywordPlanAdGroupErrorEnum) Descriptor() ([]byte, []int) { + return fileDescriptor_keyword_plan_ad_group_error_e4bac52c78eeee86, []int{0} +} +func (m *KeywordPlanAdGroupErrorEnum) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_KeywordPlanAdGroupErrorEnum.Unmarshal(m, b) +} +func (m *KeywordPlanAdGroupErrorEnum) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_KeywordPlanAdGroupErrorEnum.Marshal(b, m, deterministic) +} +func (dst *KeywordPlanAdGroupErrorEnum) XXX_Merge(src proto.Message) { + xxx_messageInfo_KeywordPlanAdGroupErrorEnum.Merge(dst, src) +} +func (m *KeywordPlanAdGroupErrorEnum) XXX_Size() int { + return xxx_messageInfo_KeywordPlanAdGroupErrorEnum.Size(m) +} +func (m *KeywordPlanAdGroupErrorEnum) XXX_DiscardUnknown() { + xxx_messageInfo_KeywordPlanAdGroupErrorEnum.DiscardUnknown(m) +} + +var xxx_messageInfo_KeywordPlanAdGroupErrorEnum proto.InternalMessageInfo + +func init() { + proto.RegisterType((*KeywordPlanAdGroupErrorEnum)(nil), "google.ads.googleads.v1.errors.KeywordPlanAdGroupErrorEnum") + proto.RegisterEnum("google.ads.googleads.v1.errors.KeywordPlanAdGroupErrorEnum_KeywordPlanAdGroupError", KeywordPlanAdGroupErrorEnum_KeywordPlanAdGroupError_name, KeywordPlanAdGroupErrorEnum_KeywordPlanAdGroupError_value) +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/errors/keyword_plan_ad_group_error.proto", fileDescriptor_keyword_plan_ad_group_error_e4bac52c78eeee86) +} + +var fileDescriptor_keyword_plan_ad_group_error_e4bac52c78eeee86 = []byte{ + // 323 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x7c, 0x50, 0xcf, 0x4a, 0xc3, 0x30, + 0x18, 0x77, 0x1d, 0x28, 0x64, 0xa2, 0xa5, 0x17, 0x41, 0xc7, 0x0e, 0x7d, 0x80, 0x94, 0xe2, 0x2d, + 0x5e, 0xcc, 0xd6, 0x3a, 0xca, 0x66, 0x2c, 0xe8, 0x2a, 0x48, 0xa5, 0x44, 0x53, 0xc2, 0xb0, 0x4b, + 0x4a, 0xb2, 0x4d, 0x04, 0x9f, 0xc6, 0xa3, 0x8f, 0xe2, 0xa3, 0xf8, 0x02, 0x5e, 0xa5, 0xfd, 0xdc, + 0x6e, 0xf3, 0x94, 0x1f, 0xf9, 0x7e, 0x7f, 0xbe, 0xdf, 0x87, 0x2e, 0xa5, 0xd6, 0xb2, 0x2a, 0x03, + 0x2e, 0x6c, 0x00, 0xb0, 0x41, 0xeb, 0x30, 0x28, 0x8d, 0xd1, 0xc6, 0x06, 0x2f, 0xe5, 0xdb, 0xab, + 0x36, 0xa2, 0xa8, 0x2b, 0xae, 0x0a, 0x2e, 0x0a, 0x69, 0xf4, 0xaa, 0x2e, 0xda, 0x21, 0xae, 0x8d, + 0x5e, 0x6a, 0x6f, 0x00, 0x32, 0xcc, 0x85, 0xc5, 0x5b, 0x07, 0xbc, 0x0e, 0x31, 0x38, 0x9c, 0xf6, + 0x37, 0x09, 0xf5, 0x3c, 0xe0, 0x4a, 0xe9, 0x25, 0x5f, 0xce, 0xb5, 0xb2, 0xa0, 0xf6, 0xdf, 0xd1, + 0xd9, 0x04, 0x22, 0xd2, 0x8a, 0x2b, 0x2a, 0xc6, 0x8d, 0x7f, 0xdc, 0x28, 0x63, 0xb5, 0x5a, 0xf8, + 0x8f, 0xe8, 0x64, 0xc7, 0xd8, 0x3b, 0x46, 0xbd, 0x19, 0xbb, 0x4d, 0xe3, 0x51, 0x72, 0x95, 0xc4, + 0x91, 0xbb, 0xe7, 0xf5, 0xd0, 0xc1, 0x8c, 0x4d, 0xd8, 0xcd, 0x3d, 0x73, 0x3b, 0x9e, 0x8b, 0x0e, + 0x13, 0x96, 0xd1, 0x69, 0x12, 0x15, 0x8c, 0x5e, 0xc7, 0xae, 0xe3, 0x79, 0xe8, 0x28, 0x9a, 0xa5, + 0xd3, 0x64, 0x44, 0xef, 0x62, 0xf8, 0xeb, 0x0e, 0x7f, 0x3a, 0xc8, 0x7f, 0xd6, 0x0b, 0xfc, 0x7f, + 0x85, 0x61, 0x7f, 0xc7, 0x0e, 0x69, 0x53, 0x21, 0xed, 0x3c, 0x44, 0x7f, 0x7a, 0xa9, 0x2b, 0xae, + 0x24, 0xd6, 0x46, 0x06, 0xb2, 0x54, 0x6d, 0xc1, 0xcd, 0x51, 0xeb, 0xb9, 0xdd, 0x75, 0xe3, 0x0b, + 0x78, 0x3e, 0x9c, 0xee, 0x98, 0xd2, 0x4f, 0x67, 0x30, 0x06, 0x33, 0x2a, 0x2c, 0x06, 0xd8, 0xa0, + 0x2c, 0xc4, 0x6d, 0xa4, 0xfd, 0xda, 0x10, 0x72, 0x2a, 0x6c, 0xbe, 0x25, 0xe4, 0x59, 0x98, 0x03, + 0xe1, 0xdb, 0xf1, 0xe1, 0x97, 0x10, 0x2a, 0x2c, 0x21, 0x5b, 0x0a, 0x21, 0x59, 0x48, 0x08, 0x90, + 0x9e, 0xf6, 0xdb, 0xed, 0xce, 0x7f, 0x03, 0x00, 0x00, 0xff, 0xff, 0xab, 0xca, 0xe7, 0x7b, 0x00, + 0x02, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/errors/keyword_plan_campaign_error.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/errors/keyword_plan_campaign_error.pb.go new file mode 100644 index 0000000..ef35665 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/errors/keyword_plan_campaign_error.pb.go @@ -0,0 +1,136 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/errors/keyword_plan_campaign_error.proto + +package errors // import "google.golang.org/genproto/googleapis/ads/googleads/v1/errors" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Enum describing possible errors from applying a keyword plan campaign. +type KeywordPlanCampaignErrorEnum_KeywordPlanCampaignError int32 + +const ( + // Enum unspecified. + KeywordPlanCampaignErrorEnum_UNSPECIFIED KeywordPlanCampaignErrorEnum_KeywordPlanCampaignError = 0 + // The received error code is not known in this version. + KeywordPlanCampaignErrorEnum_UNKNOWN KeywordPlanCampaignErrorEnum_KeywordPlanCampaignError = 1 + // A keyword plan campaign name is missing, empty, longer than allowed limit + // or contains invalid chars. + KeywordPlanCampaignErrorEnum_INVALID_NAME KeywordPlanCampaignErrorEnum_KeywordPlanCampaignError = 2 + // A keyword plan campaign contains one or more untargetable languages. + KeywordPlanCampaignErrorEnum_INVALID_LANGUAGES KeywordPlanCampaignErrorEnum_KeywordPlanCampaignError = 3 + // A keyword plan campaign contains one or more invalid geo targets. + KeywordPlanCampaignErrorEnum_INVALID_GEOS KeywordPlanCampaignErrorEnum_KeywordPlanCampaignError = 4 + // The keyword plan campaign name is duplicate to an existing keyword plan + // campaign name or other keyword plan campaign name in the request. + KeywordPlanCampaignErrorEnum_DUPLICATE_NAME KeywordPlanCampaignErrorEnum_KeywordPlanCampaignError = 5 + // The number of geo targets in the keyword plan campaign exceeds limits. + KeywordPlanCampaignErrorEnum_MAX_GEOS_EXCEEDED KeywordPlanCampaignErrorEnum_KeywordPlanCampaignError = 6 +) + +var KeywordPlanCampaignErrorEnum_KeywordPlanCampaignError_name = map[int32]string{ + 0: "UNSPECIFIED", + 1: "UNKNOWN", + 2: "INVALID_NAME", + 3: "INVALID_LANGUAGES", + 4: "INVALID_GEOS", + 5: "DUPLICATE_NAME", + 6: "MAX_GEOS_EXCEEDED", +} +var KeywordPlanCampaignErrorEnum_KeywordPlanCampaignError_value = map[string]int32{ + "UNSPECIFIED": 0, + "UNKNOWN": 1, + "INVALID_NAME": 2, + "INVALID_LANGUAGES": 3, + "INVALID_GEOS": 4, + "DUPLICATE_NAME": 5, + "MAX_GEOS_EXCEEDED": 6, +} + +func (x KeywordPlanCampaignErrorEnum_KeywordPlanCampaignError) String() string { + return proto.EnumName(KeywordPlanCampaignErrorEnum_KeywordPlanCampaignError_name, int32(x)) +} +func (KeywordPlanCampaignErrorEnum_KeywordPlanCampaignError) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_keyword_plan_campaign_error_409b324e7f5751b8, []int{0, 0} +} + +// Container for enum describing possible errors from applying a keyword plan +// campaign. +type KeywordPlanCampaignErrorEnum struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *KeywordPlanCampaignErrorEnum) Reset() { *m = KeywordPlanCampaignErrorEnum{} } +func (m *KeywordPlanCampaignErrorEnum) String() string { return proto.CompactTextString(m) } +func (*KeywordPlanCampaignErrorEnum) ProtoMessage() {} +func (*KeywordPlanCampaignErrorEnum) Descriptor() ([]byte, []int) { + return fileDescriptor_keyword_plan_campaign_error_409b324e7f5751b8, []int{0} +} +func (m *KeywordPlanCampaignErrorEnum) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_KeywordPlanCampaignErrorEnum.Unmarshal(m, b) +} +func (m *KeywordPlanCampaignErrorEnum) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_KeywordPlanCampaignErrorEnum.Marshal(b, m, deterministic) +} +func (dst *KeywordPlanCampaignErrorEnum) XXX_Merge(src proto.Message) { + xxx_messageInfo_KeywordPlanCampaignErrorEnum.Merge(dst, src) +} +func (m *KeywordPlanCampaignErrorEnum) XXX_Size() int { + return xxx_messageInfo_KeywordPlanCampaignErrorEnum.Size(m) +} +func (m *KeywordPlanCampaignErrorEnum) XXX_DiscardUnknown() { + xxx_messageInfo_KeywordPlanCampaignErrorEnum.DiscardUnknown(m) +} + +var xxx_messageInfo_KeywordPlanCampaignErrorEnum proto.InternalMessageInfo + +func init() { + proto.RegisterType((*KeywordPlanCampaignErrorEnum)(nil), "google.ads.googleads.v1.errors.KeywordPlanCampaignErrorEnum") + proto.RegisterEnum("google.ads.googleads.v1.errors.KeywordPlanCampaignErrorEnum_KeywordPlanCampaignError", KeywordPlanCampaignErrorEnum_KeywordPlanCampaignError_name, KeywordPlanCampaignErrorEnum_KeywordPlanCampaignError_value) +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/errors/keyword_plan_campaign_error.proto", fileDescriptor_keyword_plan_campaign_error_409b324e7f5751b8) +} + +var fileDescriptor_keyword_plan_campaign_error_409b324e7f5751b8 = []byte{ + // 367 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x7c, 0x91, 0xc1, 0x6a, 0xa3, 0x40, + 0x1c, 0xc6, 0x57, 0xb3, 0x9b, 0x85, 0xc9, 0xb2, 0xeb, 0x0a, 0x85, 0x52, 0xd2, 0x1c, 0x7c, 0x80, + 0x11, 0xe9, 0x6d, 0x7a, 0xe9, 0x44, 0xa7, 0x22, 0x49, 0x8c, 0x90, 0x6a, 0x43, 0x11, 0x64, 0x1a, + 0x45, 0xa4, 0x66, 0x46, 0x9c, 0x34, 0xa5, 0x2f, 0xd3, 0x43, 0x8f, 0x7d, 0x89, 0xde, 0xfb, 0x28, + 0x7d, 0x82, 0x1e, 0x8b, 0x4e, 0x0c, 0xbd, 0xa4, 0x27, 0x3f, 0xfe, 0xfe, 0xbe, 0xef, 0xd3, 0xff, + 0x1f, 0x5c, 0xe4, 0x9c, 0xe7, 0x65, 0x66, 0xd2, 0x54, 0x98, 0x52, 0x36, 0x6a, 0x6b, 0x99, 0x59, + 0x5d, 0xf3, 0x5a, 0x98, 0x77, 0xd9, 0xe3, 0x03, 0xaf, 0xd3, 0xa4, 0x2a, 0x29, 0x4b, 0x56, 0x74, + 0x5d, 0xd1, 0x22, 0x67, 0x49, 0xfb, 0x12, 0x56, 0x35, 0xdf, 0x70, 0x7d, 0x24, 0x6d, 0x90, 0xa6, + 0x02, 0xee, 0x13, 0xe0, 0xd6, 0x82, 0x32, 0xe1, 0x64, 0xd8, 0x35, 0x54, 0x85, 0x49, 0x19, 0xe3, + 0x1b, 0xba, 0x29, 0x38, 0x13, 0xd2, 0x6d, 0xbc, 0x2a, 0x60, 0x38, 0x91, 0x1d, 0x41, 0x49, 0x99, + 0xbd, 0x6b, 0x20, 0x8d, 0x97, 0xb0, 0xfb, 0xb5, 0xf1, 0xa4, 0x80, 0xe3, 0x43, 0x80, 0xfe, 0x0f, + 0x0c, 0x42, 0x7f, 0x11, 0x10, 0xdb, 0xbb, 0xf4, 0x88, 0xa3, 0xfd, 0xd0, 0x07, 0xe0, 0x77, 0xe8, + 0x4f, 0xfc, 0xf9, 0xb5, 0xaf, 0x29, 0xba, 0x06, 0xfe, 0x78, 0x7e, 0x84, 0xa7, 0x9e, 0x93, 0xf8, + 0x78, 0x46, 0x34, 0x55, 0x3f, 0x02, 0xff, 0xbb, 0xc9, 0x14, 0xfb, 0x6e, 0x88, 0x5d, 0xb2, 0xd0, + 0x7a, 0x5f, 0x41, 0x97, 0xcc, 0x17, 0xda, 0x4f, 0x5d, 0x07, 0x7f, 0x9d, 0x30, 0x98, 0x7a, 0x36, + 0xbe, 0x22, 0xd2, 0xfc, 0xab, 0x31, 0xcf, 0xf0, 0xb2, 0x25, 0x12, 0xb2, 0xb4, 0x09, 0x71, 0x88, + 0xa3, 0xf5, 0xc7, 0x1f, 0x0a, 0x30, 0x56, 0x7c, 0x0d, 0xbf, 0x5f, 0xc3, 0xf8, 0xf4, 0xd0, 0x4f, + 0x04, 0xcd, 0x1e, 0x02, 0xe5, 0xc6, 0xd9, 0x05, 0xe4, 0xbc, 0xa4, 0x2c, 0x87, 0xbc, 0xce, 0xcd, + 0x3c, 0x63, 0xed, 0x96, 0xba, 0xcb, 0x54, 0x85, 0x38, 0x74, 0xa8, 0x73, 0xf9, 0x78, 0x56, 0x7b, + 0x2e, 0xc6, 0x2f, 0xea, 0xc8, 0x95, 0x61, 0x38, 0x15, 0x50, 0xca, 0x46, 0x45, 0x16, 0x6c, 0x2b, + 0xc5, 0x5b, 0x07, 0xc4, 0x38, 0x15, 0xf1, 0x1e, 0x88, 0x23, 0x2b, 0x96, 0xc0, 0xbb, 0x6a, 0xc8, + 0x29, 0x42, 0x38, 0x15, 0x08, 0xed, 0x11, 0x84, 0x22, 0x0b, 0x21, 0x09, 0xdd, 0xf6, 0xdb, 0xaf, + 0x3b, 0xfb, 0x0c, 0x00, 0x00, 0xff, 0xff, 0xd0, 0x4c, 0xf0, 0x1b, 0x45, 0x02, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/errors/keyword_plan_error.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/errors/keyword_plan_error.pb.go new file mode 100644 index 0000000..c54a1e5 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/errors/keyword_plan_error.pb.go @@ -0,0 +1,181 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/errors/keyword_plan_error.proto + +package errors // import "google.golang.org/genproto/googleapis/ads/googleads/v1/errors" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Enum describing possible errors from applying a keyword plan. +type KeywordPlanErrorEnum_KeywordPlanError int32 + +const ( + // Enum unspecified. + KeywordPlanErrorEnum_UNSPECIFIED KeywordPlanErrorEnum_KeywordPlanError = 0 + // The received error code is not known in this version. + KeywordPlanErrorEnum_UNKNOWN KeywordPlanErrorEnum_KeywordPlanError = 1 + // The plan's bid multiplier value is outside the valid range. + KeywordPlanErrorEnum_BID_MULTIPLIER_OUT_OF_RANGE KeywordPlanErrorEnum_KeywordPlanError = 2 + // The plan's bid value is too high. + KeywordPlanErrorEnum_BID_TOO_HIGH KeywordPlanErrorEnum_KeywordPlanError = 3 + // The plan's bid value is too low. + KeywordPlanErrorEnum_BID_TOO_LOW KeywordPlanErrorEnum_KeywordPlanError = 4 + // The plan's cpc bid is not a multiple of the minimum billable unit. + KeywordPlanErrorEnum_BID_TOO_MANY_FRACTIONAL_DIGITS KeywordPlanErrorEnum_KeywordPlanError = 5 + // The plan's daily budget value is too low. + KeywordPlanErrorEnum_DAILY_BUDGET_TOO_LOW KeywordPlanErrorEnum_KeywordPlanError = 6 + // The plan's daily budget is not a multiple of the minimum billable unit. + KeywordPlanErrorEnum_DAILY_BUDGET_TOO_MANY_FRACTIONAL_DIGITS KeywordPlanErrorEnum_KeywordPlanError = 7 + // The input has an invalid value. + KeywordPlanErrorEnum_INVALID_VALUE KeywordPlanErrorEnum_KeywordPlanError = 8 + // The plan has no keyword. + KeywordPlanErrorEnum_KEYWORD_PLAN_HAS_NO_KEYWORDS KeywordPlanErrorEnum_KeywordPlanError = 9 + // The plan is not enabled and API cannot provide mutation, forecast or + // stats. + KeywordPlanErrorEnum_KEYWORD_PLAN_NOT_ENABLED KeywordPlanErrorEnum_KeywordPlanError = 10 + // The requested plan cannot be found for providing forecast or stats. + KeywordPlanErrorEnum_KEYWORD_PLAN_NOT_FOUND KeywordPlanErrorEnum_KeywordPlanError = 11 + // The plan is missing a cpc bid. + KeywordPlanErrorEnum_MISSING_BID KeywordPlanErrorEnum_KeywordPlanError = 13 + // The plan is missing required forecast_period field. + KeywordPlanErrorEnum_MISSING_FORECAST_PERIOD KeywordPlanErrorEnum_KeywordPlanError = 14 + // The plan's forecast_period has invalid forecast date range. + KeywordPlanErrorEnum_INVALID_FORECAST_DATE_RANGE KeywordPlanErrorEnum_KeywordPlanError = 15 + // The plan's name is invalid. + KeywordPlanErrorEnum_INVALID_NAME KeywordPlanErrorEnum_KeywordPlanError = 16 +) + +var KeywordPlanErrorEnum_KeywordPlanError_name = map[int32]string{ + 0: "UNSPECIFIED", + 1: "UNKNOWN", + 2: "BID_MULTIPLIER_OUT_OF_RANGE", + 3: "BID_TOO_HIGH", + 4: "BID_TOO_LOW", + 5: "BID_TOO_MANY_FRACTIONAL_DIGITS", + 6: "DAILY_BUDGET_TOO_LOW", + 7: "DAILY_BUDGET_TOO_MANY_FRACTIONAL_DIGITS", + 8: "INVALID_VALUE", + 9: "KEYWORD_PLAN_HAS_NO_KEYWORDS", + 10: "KEYWORD_PLAN_NOT_ENABLED", + 11: "KEYWORD_PLAN_NOT_FOUND", + 13: "MISSING_BID", + 14: "MISSING_FORECAST_PERIOD", + 15: "INVALID_FORECAST_DATE_RANGE", + 16: "INVALID_NAME", +} +var KeywordPlanErrorEnum_KeywordPlanError_value = map[string]int32{ + "UNSPECIFIED": 0, + "UNKNOWN": 1, + "BID_MULTIPLIER_OUT_OF_RANGE": 2, + "BID_TOO_HIGH": 3, + "BID_TOO_LOW": 4, + "BID_TOO_MANY_FRACTIONAL_DIGITS": 5, + "DAILY_BUDGET_TOO_LOW": 6, + "DAILY_BUDGET_TOO_MANY_FRACTIONAL_DIGITS": 7, + "INVALID_VALUE": 8, + "KEYWORD_PLAN_HAS_NO_KEYWORDS": 9, + "KEYWORD_PLAN_NOT_ENABLED": 10, + "KEYWORD_PLAN_NOT_FOUND": 11, + "MISSING_BID": 13, + "MISSING_FORECAST_PERIOD": 14, + "INVALID_FORECAST_DATE_RANGE": 15, + "INVALID_NAME": 16, +} + +func (x KeywordPlanErrorEnum_KeywordPlanError) String() string { + return proto.EnumName(KeywordPlanErrorEnum_KeywordPlanError_name, int32(x)) +} +func (KeywordPlanErrorEnum_KeywordPlanError) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_keyword_plan_error_76c865a67b9ab15d, []int{0, 0} +} + +// Container for enum describing possible errors from applying a keyword plan +// resource (keyword plan, keyword plan campaign, keyword plan ad group or +// keyword plan keyword) or KeywordPlanService RPC. +type KeywordPlanErrorEnum struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *KeywordPlanErrorEnum) Reset() { *m = KeywordPlanErrorEnum{} } +func (m *KeywordPlanErrorEnum) String() string { return proto.CompactTextString(m) } +func (*KeywordPlanErrorEnum) ProtoMessage() {} +func (*KeywordPlanErrorEnum) Descriptor() ([]byte, []int) { + return fileDescriptor_keyword_plan_error_76c865a67b9ab15d, []int{0} +} +func (m *KeywordPlanErrorEnum) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_KeywordPlanErrorEnum.Unmarshal(m, b) +} +func (m *KeywordPlanErrorEnum) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_KeywordPlanErrorEnum.Marshal(b, m, deterministic) +} +func (dst *KeywordPlanErrorEnum) XXX_Merge(src proto.Message) { + xxx_messageInfo_KeywordPlanErrorEnum.Merge(dst, src) +} +func (m *KeywordPlanErrorEnum) XXX_Size() int { + return xxx_messageInfo_KeywordPlanErrorEnum.Size(m) +} +func (m *KeywordPlanErrorEnum) XXX_DiscardUnknown() { + xxx_messageInfo_KeywordPlanErrorEnum.DiscardUnknown(m) +} + +var xxx_messageInfo_KeywordPlanErrorEnum proto.InternalMessageInfo + +func init() { + proto.RegisterType((*KeywordPlanErrorEnum)(nil), "google.ads.googleads.v1.errors.KeywordPlanErrorEnum") + proto.RegisterEnum("google.ads.googleads.v1.errors.KeywordPlanErrorEnum_KeywordPlanError", KeywordPlanErrorEnum_KeywordPlanError_name, KeywordPlanErrorEnum_KeywordPlanError_value) +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/errors/keyword_plan_error.proto", fileDescriptor_keyword_plan_error_76c865a67b9ab15d) +} + +var fileDescriptor_keyword_plan_error_76c865a67b9ab15d = []byte{ + // 508 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x7c, 0x92, 0x4f, 0x8b, 0xd3, 0x4e, + 0x1c, 0xc6, 0x7f, 0x6d, 0x7f, 0xee, 0xea, 0xd4, 0x75, 0xc7, 0x61, 0xd5, 0x65, 0xb7, 0x54, 0xe9, + 0xc5, 0x83, 0x90, 0x50, 0x3c, 0x08, 0xf1, 0x34, 0xe9, 0x4c, 0xd3, 0xa1, 0xe9, 0x4c, 0xc8, 0xbf, + 0x52, 0x29, 0x0c, 0xd1, 0x94, 0x50, 0xec, 0x66, 0x4a, 0x52, 0x57, 0x7c, 0x3b, 0x1e, 0xbd, 0xf8, + 0x36, 0xc4, 0x97, 0xe2, 0xc9, 0x97, 0x20, 0xe9, 0x34, 0x01, 0x77, 0x59, 0x4f, 0xf9, 0xf2, 0x7c, + 0x3f, 0xcf, 0x93, 0xc9, 0xe4, 0x01, 0x6f, 0x32, 0xa5, 0xb2, 0xcd, 0xca, 0x4c, 0xd2, 0xd2, 0xd4, + 0x63, 0x35, 0x5d, 0x0f, 0xcd, 0x55, 0x51, 0xa8, 0xa2, 0x34, 0x3f, 0xae, 0xbe, 0x7c, 0x56, 0x45, + 0x2a, 0xb7, 0x9b, 0x24, 0x97, 0x7b, 0xcd, 0xd8, 0x16, 0x6a, 0xa7, 0x50, 0x5f, 0xd3, 0x46, 0x92, + 0x96, 0x46, 0x63, 0x34, 0xae, 0x87, 0x86, 0x36, 0x5e, 0xf4, 0xea, 0xe0, 0xed, 0xda, 0x4c, 0xf2, + 0x5c, 0xed, 0x92, 0xdd, 0x5a, 0xe5, 0xa5, 0x76, 0x0f, 0x7e, 0x74, 0xc0, 0xd9, 0x54, 0x47, 0x7b, + 0x9b, 0x24, 0xa7, 0x95, 0x87, 0xe6, 0x9f, 0xae, 0x06, 0xdf, 0x3b, 0x00, 0xde, 0x5c, 0xa0, 0x53, + 0xd0, 0x8d, 0x78, 0xe0, 0xd1, 0x11, 0x1b, 0x33, 0x4a, 0xe0, 0x7f, 0xa8, 0x0b, 0x8e, 0x23, 0x3e, + 0xe5, 0x62, 0xce, 0x61, 0x0b, 0x3d, 0x07, 0x97, 0x36, 0x23, 0x72, 0x16, 0xb9, 0x21, 0xf3, 0x5c, + 0x46, 0x7d, 0x29, 0xa2, 0x50, 0x8a, 0xb1, 0xf4, 0x31, 0x77, 0x28, 0x6c, 0x23, 0x08, 0x1e, 0x56, + 0x40, 0x28, 0x84, 0x9c, 0x30, 0x67, 0x02, 0x3b, 0x55, 0x60, 0xad, 0xb8, 0x62, 0x0e, 0xff, 0x47, + 0x03, 0xd0, 0xaf, 0x85, 0x19, 0xe6, 0x0b, 0x39, 0xf6, 0xf1, 0x28, 0x64, 0x82, 0x63, 0x57, 0x12, + 0xe6, 0xb0, 0x30, 0x80, 0xf7, 0xd0, 0x39, 0x38, 0x23, 0x98, 0xb9, 0x0b, 0x69, 0x47, 0xc4, 0xa1, + 0x61, 0xe3, 0x3e, 0x42, 0xaf, 0xc0, 0xcb, 0x5b, 0x9b, 0x3b, 0x62, 0x8e, 0xd1, 0x63, 0x70, 0xc2, + 0x78, 0x8c, 0x5d, 0x46, 0x64, 0x8c, 0xdd, 0x88, 0xc2, 0xfb, 0xe8, 0x05, 0xe8, 0x4d, 0xe9, 0x62, + 0x2e, 0x7c, 0x22, 0x3d, 0x17, 0x73, 0x39, 0xc1, 0x81, 0xe4, 0x42, 0x1e, 0xb4, 0x00, 0x3e, 0x40, + 0x3d, 0x70, 0xfe, 0x17, 0xc1, 0x45, 0x28, 0x29, 0xc7, 0xb6, 0x4b, 0x09, 0x04, 0xe8, 0x02, 0x3c, + 0xbd, 0xb5, 0x1d, 0x8b, 0x88, 0x13, 0xd8, 0xad, 0x3e, 0x75, 0xc6, 0x82, 0x80, 0x71, 0x47, 0xda, + 0x8c, 0xc0, 0x13, 0x74, 0x09, 0x9e, 0xd5, 0xc2, 0x58, 0xf8, 0x74, 0x84, 0x83, 0x50, 0x7a, 0xd4, + 0x67, 0x82, 0xc0, 0x47, 0xd5, 0x5d, 0xd6, 0x87, 0x6b, 0x96, 0x04, 0x87, 0xf4, 0x70, 0x97, 0xa7, + 0xd5, 0x5d, 0xd6, 0x00, 0xc7, 0x33, 0x0a, 0xa1, 0xfd, 0xbb, 0x05, 0x06, 0x1f, 0xd4, 0x95, 0xf1, + 0xef, 0x3e, 0xd8, 0x4f, 0x6e, 0xfe, 0x55, 0xaf, 0x2a, 0x82, 0xd7, 0x7a, 0x47, 0x0e, 0xc6, 0x4c, + 0x6d, 0x92, 0x3c, 0x33, 0x54, 0x91, 0x99, 0xd9, 0x2a, 0xdf, 0xd7, 0xa4, 0x6e, 0xe4, 0x76, 0x5d, + 0xde, 0x55, 0xd0, 0xb7, 0xfa, 0xf1, 0xb5, 0xdd, 0x71, 0x30, 0xfe, 0xd6, 0xee, 0x3b, 0x3a, 0x0c, + 0xa7, 0xa5, 0xa1, 0xc7, 0x6a, 0x8a, 0x87, 0xc6, 0xfe, 0x95, 0xe5, 0xcf, 0x1a, 0x58, 0xe2, 0xb4, + 0x5c, 0x36, 0xc0, 0x32, 0x1e, 0x2e, 0x35, 0xf0, 0xab, 0x3d, 0xd0, 0xaa, 0x65, 0xe1, 0xb4, 0xb4, + 0xac, 0x06, 0xb1, 0xac, 0x78, 0x68, 0x59, 0x1a, 0x7a, 0x7f, 0xb4, 0x3f, 0xdd, 0xeb, 0x3f, 0x01, + 0x00, 0x00, 0xff, 0xff, 0x7a, 0xf0, 0xe0, 0x47, 0x3d, 0x03, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/errors/keyword_plan_idea_error.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/errors/keyword_plan_idea_error.pb.go new file mode 100644 index 0000000..1f57b47 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/errors/keyword_plan_idea_error.pb.go @@ -0,0 +1,119 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/errors/keyword_plan_idea_error.proto + +package errors // import "google.golang.org/genproto/googleapis/ads/googleads/v1/errors" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Enum describing possible errors from KeywordPlanIdeaService. +type KeywordPlanIdeaErrorEnum_KeywordPlanIdeaError int32 + +const ( + // Enum unspecified. + KeywordPlanIdeaErrorEnum_UNSPECIFIED KeywordPlanIdeaErrorEnum_KeywordPlanIdeaError = 0 + // The received error code is not known in this version. + KeywordPlanIdeaErrorEnum_UNKNOWN KeywordPlanIdeaErrorEnum_KeywordPlanIdeaError = 1 + // Error when crawling the input URL. + KeywordPlanIdeaErrorEnum_URL_CRAWL_ERROR KeywordPlanIdeaErrorEnum_KeywordPlanIdeaError = 2 + // The input has an invalid value. + KeywordPlanIdeaErrorEnum_INVALID_VALUE KeywordPlanIdeaErrorEnum_KeywordPlanIdeaError = 3 +) + +var KeywordPlanIdeaErrorEnum_KeywordPlanIdeaError_name = map[int32]string{ + 0: "UNSPECIFIED", + 1: "UNKNOWN", + 2: "URL_CRAWL_ERROR", + 3: "INVALID_VALUE", +} +var KeywordPlanIdeaErrorEnum_KeywordPlanIdeaError_value = map[string]int32{ + "UNSPECIFIED": 0, + "UNKNOWN": 1, + "URL_CRAWL_ERROR": 2, + "INVALID_VALUE": 3, +} + +func (x KeywordPlanIdeaErrorEnum_KeywordPlanIdeaError) String() string { + return proto.EnumName(KeywordPlanIdeaErrorEnum_KeywordPlanIdeaError_name, int32(x)) +} +func (KeywordPlanIdeaErrorEnum_KeywordPlanIdeaError) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_keyword_plan_idea_error_cd9f9ae2fcdd2e05, []int{0, 0} +} + +// Container for enum describing possible errors from KeywordPlanIdeaService. +type KeywordPlanIdeaErrorEnum struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *KeywordPlanIdeaErrorEnum) Reset() { *m = KeywordPlanIdeaErrorEnum{} } +func (m *KeywordPlanIdeaErrorEnum) String() string { return proto.CompactTextString(m) } +func (*KeywordPlanIdeaErrorEnum) ProtoMessage() {} +func (*KeywordPlanIdeaErrorEnum) Descriptor() ([]byte, []int) { + return fileDescriptor_keyword_plan_idea_error_cd9f9ae2fcdd2e05, []int{0} +} +func (m *KeywordPlanIdeaErrorEnum) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_KeywordPlanIdeaErrorEnum.Unmarshal(m, b) +} +func (m *KeywordPlanIdeaErrorEnum) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_KeywordPlanIdeaErrorEnum.Marshal(b, m, deterministic) +} +func (dst *KeywordPlanIdeaErrorEnum) XXX_Merge(src proto.Message) { + xxx_messageInfo_KeywordPlanIdeaErrorEnum.Merge(dst, src) +} +func (m *KeywordPlanIdeaErrorEnum) XXX_Size() int { + return xxx_messageInfo_KeywordPlanIdeaErrorEnum.Size(m) +} +func (m *KeywordPlanIdeaErrorEnum) XXX_DiscardUnknown() { + xxx_messageInfo_KeywordPlanIdeaErrorEnum.DiscardUnknown(m) +} + +var xxx_messageInfo_KeywordPlanIdeaErrorEnum proto.InternalMessageInfo + +func init() { + proto.RegisterType((*KeywordPlanIdeaErrorEnum)(nil), "google.ads.googleads.v1.errors.KeywordPlanIdeaErrorEnum") + proto.RegisterEnum("google.ads.googleads.v1.errors.KeywordPlanIdeaErrorEnum_KeywordPlanIdeaError", KeywordPlanIdeaErrorEnum_KeywordPlanIdeaError_name, KeywordPlanIdeaErrorEnum_KeywordPlanIdeaError_value) +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/errors/keyword_plan_idea_error.proto", fileDescriptor_keyword_plan_idea_error_cd9f9ae2fcdd2e05) +} + +var fileDescriptor_keyword_plan_idea_error_cd9f9ae2fcdd2e05 = []byte{ + // 326 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x7c, 0x90, 0xd1, 0x4a, 0xf3, 0x30, + 0x1c, 0xc5, 0xbf, 0x75, 0xf0, 0x09, 0x19, 0xb2, 0x5a, 0xbd, 0x50, 0x91, 0x5d, 0xf4, 0x01, 0x52, + 0x8a, 0x77, 0xd1, 0x9b, 0x6c, 0xab, 0xa3, 0xac, 0x74, 0xa5, 0xd2, 0x0e, 0xa4, 0x50, 0xa2, 0x09, + 0xa1, 0xd8, 0x25, 0x25, 0x99, 0x53, 0x5f, 0xc7, 0x4b, 0x1f, 0xc5, 0x47, 0xf1, 0xda, 0x07, 0x90, + 0x36, 0x6e, 0x57, 0xd3, 0xab, 0x1c, 0xf2, 0x3f, 0xbf, 0x93, 0x93, 0x3f, 0xb8, 0xe6, 0x52, 0xf2, + 0x9a, 0x79, 0x84, 0x6a, 0xcf, 0xc8, 0x56, 0x6d, 0x7c, 0x8f, 0x29, 0x25, 0x95, 0xf6, 0x1e, 0xd9, + 0xeb, 0xb3, 0x54, 0xb4, 0x6c, 0x6a, 0x22, 0xca, 0x8a, 0x32, 0x52, 0x76, 0x03, 0xd8, 0x28, 0xb9, + 0x96, 0xce, 0xc8, 0x20, 0x90, 0x50, 0x0d, 0x77, 0x34, 0xdc, 0xf8, 0xd0, 0xd0, 0xe7, 0x17, 0xdb, + 0xf4, 0xa6, 0xf2, 0x88, 0x10, 0x72, 0x4d, 0xd6, 0x95, 0x14, 0xda, 0xd0, 0xee, 0x0b, 0x38, 0x9d, + 0x9b, 0xf8, 0xa4, 0x26, 0x22, 0xa4, 0x8c, 0x04, 0x2d, 0x16, 0x88, 0xa7, 0x95, 0x5b, 0x80, 0x93, + 0x7d, 0x33, 0x67, 0x08, 0x06, 0x59, 0x7c, 0x9b, 0x04, 0x93, 0xf0, 0x26, 0x0c, 0xa6, 0xf6, 0x3f, + 0x67, 0x00, 0x0e, 0xb2, 0x78, 0x1e, 0x2f, 0x96, 0xb1, 0xdd, 0x73, 0x8e, 0xc1, 0x30, 0x4b, 0xa3, + 0x72, 0x92, 0xe2, 0x65, 0x54, 0x06, 0x69, 0xba, 0x48, 0x6d, 0xcb, 0x39, 0x02, 0x87, 0x61, 0x9c, + 0xe3, 0x28, 0x9c, 0x96, 0x39, 0x8e, 0xb2, 0xc0, 0xee, 0x8f, 0xbf, 0x7a, 0xc0, 0x7d, 0x90, 0x2b, + 0xf8, 0x77, 0xfd, 0xf1, 0xd9, 0xbe, 0x0a, 0x49, 0xdb, 0x3d, 0xe9, 0xdd, 0x4d, 0x7f, 0x60, 0x2e, + 0x6b, 0x22, 0x38, 0x94, 0x8a, 0x7b, 0x9c, 0x89, 0xee, 0x67, 0xdb, 0x4d, 0x36, 0x95, 0xfe, 0x6d, + 0xb1, 0x57, 0xe6, 0x78, 0xb3, 0xfa, 0x33, 0x8c, 0xdf, 0xad, 0xd1, 0xcc, 0x84, 0x61, 0xaa, 0xa1, + 0x91, 0xad, 0xca, 0x7d, 0xd8, 0x3d, 0xa9, 0x3f, 0xb6, 0x86, 0x02, 0x53, 0x5d, 0xec, 0x0c, 0x45, + 0xee, 0x17, 0xc6, 0xf0, 0x69, 0xb9, 0xe6, 0x16, 0x21, 0x4c, 0x35, 0x42, 0x3b, 0x0b, 0x42, 0xb9, + 0x8f, 0x90, 0x31, 0xdd, 0xff, 0xef, 0xda, 0x5d, 0x7e, 0x07, 0x00, 0x00, 0xff, 0xff, 0x1b, 0xe9, + 0x4c, 0x1e, 0xf5, 0x01, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/errors/keyword_plan_keyword_error.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/errors/keyword_plan_keyword_error.pb.go new file mode 100644 index 0000000..a92aa3e --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/errors/keyword_plan_keyword_error.pb.go @@ -0,0 +1,141 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/errors/keyword_plan_keyword_error.proto + +package errors // import "google.golang.org/genproto/googleapis/ads/googleads/v1/errors" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Enum describing possible errors from applying a keyword plan keyword. +type KeywordPlanKeywordErrorEnum_KeywordPlanKeywordError int32 + +const ( + // Enum unspecified. + KeywordPlanKeywordErrorEnum_UNSPECIFIED KeywordPlanKeywordErrorEnum_KeywordPlanKeywordError = 0 + // The received error code is not known in this version. + KeywordPlanKeywordErrorEnum_UNKNOWN KeywordPlanKeywordErrorEnum_KeywordPlanKeywordError = 1 + // A keyword or negative keyword has invalid match type. + KeywordPlanKeywordErrorEnum_INVALID_KEYWORD_MATCH_TYPE KeywordPlanKeywordErrorEnum_KeywordPlanKeywordError = 2 + // A keyword or negative keyword with same text and match type already + // exists. + KeywordPlanKeywordErrorEnum_DUPLICATE_KEYWORD KeywordPlanKeywordErrorEnum_KeywordPlanKeywordError = 3 + // Keyword or negative keyword text exceeds the allowed limit. + KeywordPlanKeywordErrorEnum_KEYWORD_TEXT_TOO_LONG KeywordPlanKeywordErrorEnum_KeywordPlanKeywordError = 4 + // Keyword or negative keyword text has invalid characters or symbols. + KeywordPlanKeywordErrorEnum_KEYWORD_HAS_INVALID_CHARS KeywordPlanKeywordErrorEnum_KeywordPlanKeywordError = 5 + // Keyword or negative keyword text has too many words. + KeywordPlanKeywordErrorEnum_KEYWORD_HAS_TOO_MANY_WORDS KeywordPlanKeywordErrorEnum_KeywordPlanKeywordError = 6 + // Keyword or negative keyword has invalid text. + KeywordPlanKeywordErrorEnum_INVALID_KEYWORD_TEXT KeywordPlanKeywordErrorEnum_KeywordPlanKeywordError = 7 +) + +var KeywordPlanKeywordErrorEnum_KeywordPlanKeywordError_name = map[int32]string{ + 0: "UNSPECIFIED", + 1: "UNKNOWN", + 2: "INVALID_KEYWORD_MATCH_TYPE", + 3: "DUPLICATE_KEYWORD", + 4: "KEYWORD_TEXT_TOO_LONG", + 5: "KEYWORD_HAS_INVALID_CHARS", + 6: "KEYWORD_HAS_TOO_MANY_WORDS", + 7: "INVALID_KEYWORD_TEXT", +} +var KeywordPlanKeywordErrorEnum_KeywordPlanKeywordError_value = map[string]int32{ + "UNSPECIFIED": 0, + "UNKNOWN": 1, + "INVALID_KEYWORD_MATCH_TYPE": 2, + "DUPLICATE_KEYWORD": 3, + "KEYWORD_TEXT_TOO_LONG": 4, + "KEYWORD_HAS_INVALID_CHARS": 5, + "KEYWORD_HAS_TOO_MANY_WORDS": 6, + "INVALID_KEYWORD_TEXT": 7, +} + +func (x KeywordPlanKeywordErrorEnum_KeywordPlanKeywordError) String() string { + return proto.EnumName(KeywordPlanKeywordErrorEnum_KeywordPlanKeywordError_name, int32(x)) +} +func (KeywordPlanKeywordErrorEnum_KeywordPlanKeywordError) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_keyword_plan_keyword_error_dbf4ef80d98a243c, []int{0, 0} +} + +// Container for enum describing possible errors from applying a keyword or a +// negative keyword from a keyword plan. +type KeywordPlanKeywordErrorEnum struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *KeywordPlanKeywordErrorEnum) Reset() { *m = KeywordPlanKeywordErrorEnum{} } +func (m *KeywordPlanKeywordErrorEnum) String() string { return proto.CompactTextString(m) } +func (*KeywordPlanKeywordErrorEnum) ProtoMessage() {} +func (*KeywordPlanKeywordErrorEnum) Descriptor() ([]byte, []int) { + return fileDescriptor_keyword_plan_keyword_error_dbf4ef80d98a243c, []int{0} +} +func (m *KeywordPlanKeywordErrorEnum) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_KeywordPlanKeywordErrorEnum.Unmarshal(m, b) +} +func (m *KeywordPlanKeywordErrorEnum) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_KeywordPlanKeywordErrorEnum.Marshal(b, m, deterministic) +} +func (dst *KeywordPlanKeywordErrorEnum) XXX_Merge(src proto.Message) { + xxx_messageInfo_KeywordPlanKeywordErrorEnum.Merge(dst, src) +} +func (m *KeywordPlanKeywordErrorEnum) XXX_Size() int { + return xxx_messageInfo_KeywordPlanKeywordErrorEnum.Size(m) +} +func (m *KeywordPlanKeywordErrorEnum) XXX_DiscardUnknown() { + xxx_messageInfo_KeywordPlanKeywordErrorEnum.DiscardUnknown(m) +} + +var xxx_messageInfo_KeywordPlanKeywordErrorEnum proto.InternalMessageInfo + +func init() { + proto.RegisterType((*KeywordPlanKeywordErrorEnum)(nil), "google.ads.googleads.v1.errors.KeywordPlanKeywordErrorEnum") + proto.RegisterEnum("google.ads.googleads.v1.errors.KeywordPlanKeywordErrorEnum_KeywordPlanKeywordError", KeywordPlanKeywordErrorEnum_KeywordPlanKeywordError_name, KeywordPlanKeywordErrorEnum_KeywordPlanKeywordError_value) +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/errors/keyword_plan_keyword_error.proto", fileDescriptor_keyword_plan_keyword_error_dbf4ef80d98a243c) +} + +var fileDescriptor_keyword_plan_keyword_error_dbf4ef80d98a243c = []byte{ + // 395 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x7c, 0x51, 0xcd, 0x6e, 0xd3, 0x30, + 0x00, 0xa6, 0x19, 0x6c, 0x92, 0x77, 0xc0, 0x58, 0x4c, 0xb0, 0x31, 0x7a, 0xc8, 0x03, 0x38, 0x8a, + 0xb8, 0x99, 0x03, 0xf2, 0x12, 0xd3, 0x46, 0xed, 0x92, 0x68, 0x49, 0x33, 0x8a, 0x22, 0x59, 0x81, + 0x44, 0x51, 0x45, 0x66, 0x47, 0x71, 0x19, 0xe2, 0xca, 0xa3, 0x70, 0xe4, 0x51, 0x78, 0x0c, 0x8e, + 0xbc, 0x00, 0x57, 0xe4, 0x78, 0xae, 0x10, 0x52, 0x77, 0xca, 0x17, 0x7f, 0x7f, 0xd6, 0x67, 0xf0, + 0xa6, 0x95, 0xb2, 0xed, 0x1a, 0xaf, 0xaa, 0x95, 0x67, 0xa0, 0x46, 0xb7, 0xbe, 0xd7, 0x0c, 0x83, + 0x1c, 0x94, 0xf7, 0xa9, 0xf9, 0xfa, 0x45, 0x0e, 0x35, 0xef, 0xbb, 0x4a, 0x70, 0xfb, 0x33, 0x72, + 0xb8, 0x1f, 0xe4, 0x56, 0xa2, 0xa9, 0x71, 0xe1, 0xaa, 0x56, 0x78, 0x17, 0x80, 0x6f, 0x7d, 0x6c, + 0x02, 0xce, 0xce, 0x6d, 0x41, 0xbf, 0xf1, 0x2a, 0x21, 0xe4, 0xb6, 0xda, 0x6e, 0xa4, 0x50, 0xc6, + 0xed, 0x7e, 0x73, 0xc0, 0x8b, 0x85, 0x49, 0x4d, 0xbb, 0x4a, 0xdc, 0x41, 0xa6, 0xad, 0x4c, 0x7c, + 0xbe, 0x71, 0x7f, 0x4d, 0xc0, 0xb3, 0x3d, 0x3c, 0x7a, 0x0c, 0x8e, 0x57, 0x71, 0x96, 0xb2, 0x20, + 0x7a, 0x1b, 0xb1, 0x10, 0x3e, 0x40, 0xc7, 0xe0, 0x68, 0x15, 0x2f, 0xe2, 0xe4, 0x3a, 0x86, 0x13, + 0x34, 0x05, 0x67, 0x51, 0x5c, 0xd0, 0x65, 0x14, 0xf2, 0x05, 0x5b, 0x5f, 0x27, 0x57, 0x21, 0xbf, + 0xa4, 0x79, 0x30, 0xe7, 0xf9, 0x3a, 0x65, 0xd0, 0x41, 0x27, 0xe0, 0x49, 0xb8, 0x4a, 0x97, 0x51, + 0x40, 0x73, 0x66, 0x15, 0xf0, 0x00, 0x9d, 0x82, 0x13, 0x2b, 0xcf, 0xd9, 0xbb, 0x9c, 0xe7, 0x49, + 0xc2, 0x97, 0x49, 0x3c, 0x83, 0x0f, 0xd1, 0x4b, 0x70, 0x6a, 0xa9, 0x39, 0xcd, 0xb8, 0x4d, 0x0f, + 0xe6, 0xf4, 0x2a, 0x83, 0x8f, 0x74, 0xe1, 0xbf, 0xb4, 0x36, 0x5e, 0xd2, 0x78, 0xcd, 0xf5, 0x49, + 0x06, 0x0f, 0xd1, 0x73, 0xf0, 0xf4, 0xff, 0x0b, 0xe9, 0x06, 0x78, 0x74, 0xf1, 0x67, 0x02, 0xdc, + 0x8f, 0xf2, 0x06, 0xdf, 0xbf, 0xe4, 0xc5, 0xf9, 0x9e, 0x21, 0x52, 0xbd, 0x64, 0x3a, 0x79, 0x1f, + 0xde, 0xf9, 0x5b, 0xd9, 0x55, 0xa2, 0xc5, 0x72, 0x68, 0xbd, 0xb6, 0x11, 0xe3, 0xce, 0xf6, 0x69, + 0xfb, 0x8d, 0xda, 0xf7, 0xd2, 0xaf, 0xcd, 0xe7, 0xbb, 0x73, 0x30, 0xa3, 0xf4, 0x87, 0x33, 0x9d, + 0x99, 0x30, 0x5a, 0x2b, 0x6c, 0xa0, 0x46, 0x85, 0x8f, 0xc7, 0x4a, 0xf5, 0xd3, 0x0a, 0x4a, 0x5a, + 0xab, 0x72, 0x27, 0x28, 0x0b, 0xbf, 0x34, 0x82, 0xdf, 0x8e, 0x6b, 0x4e, 0x09, 0xa1, 0xb5, 0x22, + 0x64, 0x27, 0x21, 0xa4, 0xf0, 0x09, 0x31, 0xa2, 0x0f, 0x87, 0xe3, 0xed, 0x5e, 0xfd, 0x0d, 0x00, + 0x00, 0xff, 0xff, 0xcd, 0x4e, 0x24, 0x72, 0x86, 0x02, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/errors/keyword_plan_negative_keyword_error.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/errors/keyword_plan_negative_keyword_error.pb.go new file mode 100644 index 0000000..fc2dc9b --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/errors/keyword_plan_negative_keyword_error.pb.go @@ -0,0 +1,111 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/errors/keyword_plan_negative_keyword_error.proto + +package errors // import "google.golang.org/genproto/googleapis/ads/googleads/v1/errors" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Enum describing possible errors from applying a keyword plan negative +// keyword. +type KeywordPlanNegativeKeywordErrorEnum_KeywordPlanNegativeKeywordError int32 + +const ( + // Enum unspecified. + KeywordPlanNegativeKeywordErrorEnum_UNSPECIFIED KeywordPlanNegativeKeywordErrorEnum_KeywordPlanNegativeKeywordError = 0 + // The received error code is not known in this version. + KeywordPlanNegativeKeywordErrorEnum_UNKNOWN KeywordPlanNegativeKeywordErrorEnum_KeywordPlanNegativeKeywordError = 1 +) + +var KeywordPlanNegativeKeywordErrorEnum_KeywordPlanNegativeKeywordError_name = map[int32]string{ + 0: "UNSPECIFIED", + 1: "UNKNOWN", +} +var KeywordPlanNegativeKeywordErrorEnum_KeywordPlanNegativeKeywordError_value = map[string]int32{ + "UNSPECIFIED": 0, + "UNKNOWN": 1, +} + +func (x KeywordPlanNegativeKeywordErrorEnum_KeywordPlanNegativeKeywordError) String() string { + return proto.EnumName(KeywordPlanNegativeKeywordErrorEnum_KeywordPlanNegativeKeywordError_name, int32(x)) +} +func (KeywordPlanNegativeKeywordErrorEnum_KeywordPlanNegativeKeywordError) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_keyword_plan_negative_keyword_error_64e283750637729c, []int{0, 0} +} + +// Container for enum describing possible errors from applying a keyword plan +// negative keyword. +type KeywordPlanNegativeKeywordErrorEnum struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *KeywordPlanNegativeKeywordErrorEnum) Reset() { *m = KeywordPlanNegativeKeywordErrorEnum{} } +func (m *KeywordPlanNegativeKeywordErrorEnum) String() string { return proto.CompactTextString(m) } +func (*KeywordPlanNegativeKeywordErrorEnum) ProtoMessage() {} +func (*KeywordPlanNegativeKeywordErrorEnum) Descriptor() ([]byte, []int) { + return fileDescriptor_keyword_plan_negative_keyword_error_64e283750637729c, []int{0} +} +func (m *KeywordPlanNegativeKeywordErrorEnum) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_KeywordPlanNegativeKeywordErrorEnum.Unmarshal(m, b) +} +func (m *KeywordPlanNegativeKeywordErrorEnum) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_KeywordPlanNegativeKeywordErrorEnum.Marshal(b, m, deterministic) +} +func (dst *KeywordPlanNegativeKeywordErrorEnum) XXX_Merge(src proto.Message) { + xxx_messageInfo_KeywordPlanNegativeKeywordErrorEnum.Merge(dst, src) +} +func (m *KeywordPlanNegativeKeywordErrorEnum) XXX_Size() int { + return xxx_messageInfo_KeywordPlanNegativeKeywordErrorEnum.Size(m) +} +func (m *KeywordPlanNegativeKeywordErrorEnum) XXX_DiscardUnknown() { + xxx_messageInfo_KeywordPlanNegativeKeywordErrorEnum.DiscardUnknown(m) +} + +var xxx_messageInfo_KeywordPlanNegativeKeywordErrorEnum proto.InternalMessageInfo + +func init() { + proto.RegisterType((*KeywordPlanNegativeKeywordErrorEnum)(nil), "google.ads.googleads.v1.errors.KeywordPlanNegativeKeywordErrorEnum") + proto.RegisterEnum("google.ads.googleads.v1.errors.KeywordPlanNegativeKeywordErrorEnum_KeywordPlanNegativeKeywordError", KeywordPlanNegativeKeywordErrorEnum_KeywordPlanNegativeKeywordError_name, KeywordPlanNegativeKeywordErrorEnum_KeywordPlanNegativeKeywordError_value) +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/errors/keyword_plan_negative_keyword_error.proto", fileDescriptor_keyword_plan_negative_keyword_error_64e283750637729c) +} + +var fileDescriptor_keyword_plan_negative_keyword_error_64e283750637729c = []byte{ + // 297 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x84, 0x90, 0xc1, 0x4a, 0xf4, 0x30, + 0x14, 0x85, 0xff, 0xf6, 0x07, 0x85, 0xcc, 0xc2, 0xa1, 0x4b, 0x91, 0x11, 0xaa, 0xeb, 0x84, 0xe2, + 0x2e, 0x2e, 0xa4, 0xe3, 0xd4, 0x71, 0x18, 0xa8, 0x05, 0x99, 0x0a, 0x52, 0x28, 0xd1, 0xc4, 0x50, + 0xec, 0x24, 0x25, 0xa9, 0x15, 0x5f, 0xc7, 0xa5, 0x8f, 0xe2, 0xa3, 0xf8, 0x12, 0x4a, 0x7b, 0xdb, + 0xee, 0x74, 0x56, 0x39, 0x5c, 0xbe, 0x7b, 0xce, 0xc9, 0x45, 0xd7, 0x52, 0x6b, 0x59, 0x0a, 0xc2, + 0xb8, 0x25, 0x20, 0x5b, 0xd5, 0x04, 0x44, 0x18, 0xa3, 0x8d, 0x25, 0xcf, 0xe2, 0xed, 0x55, 0x1b, + 0x9e, 0x57, 0x25, 0x53, 0xb9, 0x12, 0x92, 0xd5, 0x45, 0x23, 0xf2, 0x61, 0xda, 0x41, 0xb8, 0x32, + 0xba, 0xd6, 0xde, 0x0c, 0xd6, 0x31, 0xe3, 0x16, 0x8f, 0x4e, 0xb8, 0x09, 0x30, 0x38, 0x1d, 0x1e, + 0x0d, 0x49, 0x55, 0x41, 0x98, 0x52, 0xba, 0x66, 0x75, 0xa1, 0x95, 0x85, 0x6d, 0xff, 0x09, 0x9d, + 0xac, 0xc1, 0x34, 0x29, 0x99, 0x8a, 0xfb, 0xa0, 0x7e, 0x14, 0xb5, 0x0e, 0x91, 0x7a, 0xd9, 0xfa, + 0x17, 0xe8, 0x78, 0x07, 0xe6, 0x1d, 0xa0, 0xc9, 0x26, 0xbe, 0x4d, 0xa2, 0xcb, 0xd5, 0xd5, 0x2a, + 0x5a, 0x4c, 0xff, 0x79, 0x13, 0xb4, 0xbf, 0x89, 0xd7, 0xf1, 0xcd, 0x5d, 0x3c, 0x75, 0xe6, 0xdf, + 0x0e, 0xf2, 0x1f, 0xf5, 0x16, 0xff, 0x5d, 0x76, 0x7e, 0xba, 0x23, 0x25, 0x69, 0x4b, 0x27, 0xce, + 0xfd, 0xa2, 0xf7, 0x91, 0xba, 0x64, 0x4a, 0x62, 0x6d, 0x24, 0x91, 0x42, 0x75, 0x5f, 0x1a, 0xce, + 0x59, 0x15, 0xf6, 0xb7, 0xeb, 0x9e, 0xc3, 0xf3, 0xee, 0xfe, 0x5f, 0x86, 0xe1, 0x87, 0x3b, 0x5b, + 0x82, 0x59, 0xc8, 0x2d, 0x06, 0xd9, 0xaa, 0x34, 0xc0, 0x5d, 0xa4, 0xfd, 0x1c, 0x80, 0x2c, 0xe4, + 0x36, 0x1b, 0x81, 0x2c, 0x0d, 0x32, 0x00, 0xbe, 0x5c, 0x1f, 0xa6, 0x94, 0x86, 0xdc, 0x52, 0x3a, + 0x22, 0x94, 0xa6, 0x01, 0xa5, 0x00, 0x3d, 0xec, 0x75, 0xed, 0xce, 0x7e, 0x02, 0x00, 0x00, 0xff, + 0xff, 0x6b, 0xa8, 0xb2, 0xa0, 0xfa, 0x01, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/errors/label_error.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/errors/label_error.pb.go new file mode 100644 index 0000000..bbbc231 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/errors/label_error.pb.go @@ -0,0 +1,158 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/errors/label_error.proto + +package errors // import "google.golang.org/genproto/googleapis/ads/googleads/v1/errors" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Enum describing possible label errors. +type LabelErrorEnum_LabelError int32 + +const ( + // Enum unspecified. + LabelErrorEnum_UNSPECIFIED LabelErrorEnum_LabelError = 0 + // The received error code is not known in this version. + LabelErrorEnum_UNKNOWN LabelErrorEnum_LabelError = 1 + // An inactive label cannot be applied. + LabelErrorEnum_CANNOT_APPLY_INACTIVE_LABEL LabelErrorEnum_LabelError = 2 + // A label cannot be applied to a disabled ad group criterion. + LabelErrorEnum_CANNOT_APPLY_LABEL_TO_DISABLED_AD_GROUP_CRITERION LabelErrorEnum_LabelError = 3 + // A label cannot be applied to a negative ad group criterion. + LabelErrorEnum_CANNOT_APPLY_LABEL_TO_NEGATIVE_AD_GROUP_CRITERION LabelErrorEnum_LabelError = 4 + // Cannot apply more than 50 labels per resource. + LabelErrorEnum_EXCEEDED_LABEL_LIMIT_PER_TYPE LabelErrorEnum_LabelError = 5 + // Labels from a manager account cannot be applied to campaign, ad group, + // ad group ad, or ad group criterion resources. + LabelErrorEnum_INVALID_RESOURCE_FOR_MANAGER_LABEL LabelErrorEnum_LabelError = 6 + // Label names must be unique. + LabelErrorEnum_DUPLICATE_NAME LabelErrorEnum_LabelError = 7 + // Label names cannot be empty. + LabelErrorEnum_INVALID_LABEL_NAME LabelErrorEnum_LabelError = 8 + // Labels cannot be applied to a draft. + LabelErrorEnum_CANNOT_ATTACH_LABEL_TO_DRAFT LabelErrorEnum_LabelError = 9 + // Labels not from a manager account cannot be applied to the customer + // resource. + LabelErrorEnum_CANNOT_ATTACH_NON_MANAGER_LABEL_TO_CUSTOMER LabelErrorEnum_LabelError = 10 +) + +var LabelErrorEnum_LabelError_name = map[int32]string{ + 0: "UNSPECIFIED", + 1: "UNKNOWN", + 2: "CANNOT_APPLY_INACTIVE_LABEL", + 3: "CANNOT_APPLY_LABEL_TO_DISABLED_AD_GROUP_CRITERION", + 4: "CANNOT_APPLY_LABEL_TO_NEGATIVE_AD_GROUP_CRITERION", + 5: "EXCEEDED_LABEL_LIMIT_PER_TYPE", + 6: "INVALID_RESOURCE_FOR_MANAGER_LABEL", + 7: "DUPLICATE_NAME", + 8: "INVALID_LABEL_NAME", + 9: "CANNOT_ATTACH_LABEL_TO_DRAFT", + 10: "CANNOT_ATTACH_NON_MANAGER_LABEL_TO_CUSTOMER", +} +var LabelErrorEnum_LabelError_value = map[string]int32{ + "UNSPECIFIED": 0, + "UNKNOWN": 1, + "CANNOT_APPLY_INACTIVE_LABEL": 2, + "CANNOT_APPLY_LABEL_TO_DISABLED_AD_GROUP_CRITERION": 3, + "CANNOT_APPLY_LABEL_TO_NEGATIVE_AD_GROUP_CRITERION": 4, + "EXCEEDED_LABEL_LIMIT_PER_TYPE": 5, + "INVALID_RESOURCE_FOR_MANAGER_LABEL": 6, + "DUPLICATE_NAME": 7, + "INVALID_LABEL_NAME": 8, + "CANNOT_ATTACH_LABEL_TO_DRAFT": 9, + "CANNOT_ATTACH_NON_MANAGER_LABEL_TO_CUSTOMER": 10, +} + +func (x LabelErrorEnum_LabelError) String() string { + return proto.EnumName(LabelErrorEnum_LabelError_name, int32(x)) +} +func (LabelErrorEnum_LabelError) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_label_error_9335abe2f6e3dde0, []int{0, 0} +} + +// Container for enum describing possible label errors. +type LabelErrorEnum struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *LabelErrorEnum) Reset() { *m = LabelErrorEnum{} } +func (m *LabelErrorEnum) String() string { return proto.CompactTextString(m) } +func (*LabelErrorEnum) ProtoMessage() {} +func (*LabelErrorEnum) Descriptor() ([]byte, []int) { + return fileDescriptor_label_error_9335abe2f6e3dde0, []int{0} +} +func (m *LabelErrorEnum) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_LabelErrorEnum.Unmarshal(m, b) +} +func (m *LabelErrorEnum) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_LabelErrorEnum.Marshal(b, m, deterministic) +} +func (dst *LabelErrorEnum) XXX_Merge(src proto.Message) { + xxx_messageInfo_LabelErrorEnum.Merge(dst, src) +} +func (m *LabelErrorEnum) XXX_Size() int { + return xxx_messageInfo_LabelErrorEnum.Size(m) +} +func (m *LabelErrorEnum) XXX_DiscardUnknown() { + xxx_messageInfo_LabelErrorEnum.DiscardUnknown(m) +} + +var xxx_messageInfo_LabelErrorEnum proto.InternalMessageInfo + +func init() { + proto.RegisterType((*LabelErrorEnum)(nil), "google.ads.googleads.v1.errors.LabelErrorEnum") + proto.RegisterEnum("google.ads.googleads.v1.errors.LabelErrorEnum_LabelError", LabelErrorEnum_LabelError_name, LabelErrorEnum_LabelError_value) +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/errors/label_error.proto", fileDescriptor_label_error_9335abe2f6e3dde0) +} + +var fileDescriptor_label_error_9335abe2f6e3dde0 = []byte{ + // 465 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x7c, 0x92, 0xc1, 0x6e, 0xd3, 0x40, + 0x10, 0x40, 0x69, 0x02, 0x2d, 0x6c, 0xa5, 0xd6, 0xda, 0x03, 0x07, 0x28, 0x05, 0x72, 0xe0, 0x82, + 0x64, 0x63, 0x21, 0x2e, 0xe6, 0x34, 0xb1, 0x27, 0x66, 0x85, 0xb3, 0xb6, 0x36, 0x6b, 0x43, 0x51, + 0xa4, 0x95, 0x8b, 0x23, 0x2b, 0x52, 0xea, 0x8d, 0xec, 0xd0, 0x1f, 0xe0, 0x1f, 0xf8, 0x00, 0x8e, + 0x7c, 0x0a, 0x9f, 0xd2, 0xaf, 0x40, 0xf6, 0xc6, 0x29, 0x91, 0x28, 0x27, 0x8f, 0x47, 0xef, 0xcd, + 0xcc, 0x6a, 0x86, 0xbc, 0x29, 0xb5, 0x2e, 0x57, 0x0b, 0x27, 0x2f, 0x1a, 0xc7, 0x84, 0x6d, 0x74, + 0xed, 0x3a, 0x8b, 0xba, 0xd6, 0x75, 0xe3, 0xac, 0xf2, 0xcb, 0xc5, 0x4a, 0x75, 0x3f, 0xf6, 0xba, + 0xd6, 0x1b, 0x4d, 0xcf, 0x0d, 0x66, 0xe7, 0x45, 0x63, 0xef, 0x0c, 0xfb, 0xda, 0xb5, 0x8d, 0xf1, + 0xe4, 0xac, 0xaf, 0xb8, 0x5e, 0x3a, 0x79, 0x55, 0xe9, 0x4d, 0xbe, 0x59, 0xea, 0xaa, 0x31, 0xf6, + 0xe8, 0xc7, 0x90, 0x9c, 0x44, 0x6d, 0x4d, 0x6c, 0x69, 0xac, 0xbe, 0x5d, 0x8d, 0xbe, 0x0f, 0x09, + 0xb9, 0x4d, 0xd1, 0x53, 0x72, 0x9c, 0xf2, 0x59, 0x82, 0x3e, 0x9b, 0x30, 0x0c, 0xac, 0x7b, 0xf4, + 0x98, 0x1c, 0xa5, 0xfc, 0x23, 0x8f, 0x3f, 0x71, 0xeb, 0x80, 0x3e, 0x27, 0x4f, 0x7d, 0xe0, 0x3c, + 0x96, 0x0a, 0x92, 0x24, 0xba, 0x50, 0x8c, 0x83, 0x2f, 0x59, 0x86, 0x2a, 0x82, 0x31, 0x46, 0xd6, + 0x80, 0xbe, 0x23, 0xee, 0x1e, 0xd0, 0xe5, 0x95, 0x8c, 0x55, 0xc0, 0x66, 0x30, 0x8e, 0x30, 0x50, + 0x10, 0xa8, 0x50, 0xc4, 0x69, 0xa2, 0x7c, 0xc1, 0x24, 0x0a, 0x16, 0x73, 0x6b, 0x78, 0xb7, 0xc6, + 0x31, 0x84, 0xae, 0xc1, 0x3f, 0xb4, 0xfb, 0xf4, 0x25, 0x79, 0x86, 0x9f, 0x7d, 0xc4, 0x00, 0x83, + 0xad, 0x12, 0xb1, 0x29, 0x93, 0x2a, 0x41, 0xa1, 0xe4, 0x45, 0x82, 0xd6, 0x03, 0xfa, 0x8a, 0x8c, + 0x18, 0xcf, 0x20, 0x62, 0x81, 0x12, 0x38, 0x8b, 0x53, 0xe1, 0xa3, 0x9a, 0xc4, 0x42, 0x4d, 0x81, + 0x43, 0x88, 0x62, 0x3b, 0xf8, 0x21, 0xa5, 0xe4, 0x24, 0x48, 0x93, 0x88, 0xf9, 0x20, 0x51, 0x71, + 0x98, 0xa2, 0x75, 0x44, 0x1f, 0x13, 0xda, 0xbb, 0xa6, 0x7a, 0x97, 0x7f, 0x48, 0x5f, 0x90, 0xb3, + 0x7e, 0x5a, 0x29, 0xc1, 0xff, 0xf0, 0xd7, 0x2b, 0x05, 0x4c, 0xa4, 0xf5, 0x88, 0x3a, 0xe4, 0xf5, + 0x3e, 0xc1, 0x63, 0xbe, 0xdf, 0xb2, 0xa5, 0xfd, 0x74, 0x26, 0xe3, 0x29, 0x0a, 0x8b, 0x8c, 0x6f, + 0x0e, 0xc8, 0xe8, 0xab, 0xbe, 0xb2, 0xff, 0xbf, 0xdd, 0xf1, 0xe9, 0xed, 0xa6, 0x92, 0x76, 0xa1, + 0xc9, 0xc1, 0x97, 0x60, 0xab, 0x94, 0x7a, 0x95, 0x57, 0xa5, 0xad, 0xeb, 0xd2, 0x29, 0x17, 0x55, + 0xb7, 0xee, 0xfe, 0xa4, 0xd6, 0xcb, 0xe6, 0xae, 0x0b, 0x7b, 0x6f, 0x3e, 0x3f, 0x07, 0xc3, 0x10, + 0xe0, 0xd7, 0xe0, 0x3c, 0x34, 0xc5, 0xa0, 0x68, 0x6c, 0x13, 0xb6, 0x51, 0xe6, 0xda, 0x5d, 0xcb, + 0xe6, 0x77, 0x0f, 0xcc, 0xa1, 0x68, 0xe6, 0x3b, 0x60, 0x9e, 0xb9, 0x73, 0x03, 0xdc, 0x0c, 0x46, + 0x26, 0xeb, 0x79, 0x50, 0x34, 0x9e, 0xb7, 0x43, 0x3c, 0x2f, 0x73, 0x3d, 0xcf, 0x40, 0x97, 0x87, + 0xdd, 0x74, 0x6f, 0xff, 0x04, 0x00, 0x00, 0xff, 0xff, 0xc7, 0x2b, 0xdf, 0x02, 0xfe, 0x02, 0x00, + 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/errors/language_code_error.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/errors/language_code_error.pb.go new file mode 100644 index 0000000..53df041 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/errors/language_code_error.pb.go @@ -0,0 +1,119 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/errors/language_code_error.proto + +package errors // import "google.golang.org/genproto/googleapis/ads/googleads/v1/errors" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Enum describing language code errors. +type LanguageCodeErrorEnum_LanguageCodeError int32 + +const ( + // Enum unspecified. + LanguageCodeErrorEnum_UNSPECIFIED LanguageCodeErrorEnum_LanguageCodeError = 0 + // The received error code is not known in this version. + LanguageCodeErrorEnum_UNKNOWN LanguageCodeErrorEnum_LanguageCodeError = 1 + // The input language code is not recognized. + LanguageCodeErrorEnum_LANGUAGE_CODE_NOT_FOUND LanguageCodeErrorEnum_LanguageCodeError = 2 + // The language is not allowed to use. + LanguageCodeErrorEnum_INVALID_LANGUAGE_CODE LanguageCodeErrorEnum_LanguageCodeError = 3 +) + +var LanguageCodeErrorEnum_LanguageCodeError_name = map[int32]string{ + 0: "UNSPECIFIED", + 1: "UNKNOWN", + 2: "LANGUAGE_CODE_NOT_FOUND", + 3: "INVALID_LANGUAGE_CODE", +} +var LanguageCodeErrorEnum_LanguageCodeError_value = map[string]int32{ + "UNSPECIFIED": 0, + "UNKNOWN": 1, + "LANGUAGE_CODE_NOT_FOUND": 2, + "INVALID_LANGUAGE_CODE": 3, +} + +func (x LanguageCodeErrorEnum_LanguageCodeError) String() string { + return proto.EnumName(LanguageCodeErrorEnum_LanguageCodeError_name, int32(x)) +} +func (LanguageCodeErrorEnum_LanguageCodeError) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_language_code_error_66c9745952dd8d08, []int{0, 0} +} + +// Container for enum describing language code errors. +type LanguageCodeErrorEnum struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *LanguageCodeErrorEnum) Reset() { *m = LanguageCodeErrorEnum{} } +func (m *LanguageCodeErrorEnum) String() string { return proto.CompactTextString(m) } +func (*LanguageCodeErrorEnum) ProtoMessage() {} +func (*LanguageCodeErrorEnum) Descriptor() ([]byte, []int) { + return fileDescriptor_language_code_error_66c9745952dd8d08, []int{0} +} +func (m *LanguageCodeErrorEnum) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_LanguageCodeErrorEnum.Unmarshal(m, b) +} +func (m *LanguageCodeErrorEnum) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_LanguageCodeErrorEnum.Marshal(b, m, deterministic) +} +func (dst *LanguageCodeErrorEnum) XXX_Merge(src proto.Message) { + xxx_messageInfo_LanguageCodeErrorEnum.Merge(dst, src) +} +func (m *LanguageCodeErrorEnum) XXX_Size() int { + return xxx_messageInfo_LanguageCodeErrorEnum.Size(m) +} +func (m *LanguageCodeErrorEnum) XXX_DiscardUnknown() { + xxx_messageInfo_LanguageCodeErrorEnum.DiscardUnknown(m) +} + +var xxx_messageInfo_LanguageCodeErrorEnum proto.InternalMessageInfo + +func init() { + proto.RegisterType((*LanguageCodeErrorEnum)(nil), "google.ads.googleads.v1.errors.LanguageCodeErrorEnum") + proto.RegisterEnum("google.ads.googleads.v1.errors.LanguageCodeErrorEnum_LanguageCodeError", LanguageCodeErrorEnum_LanguageCodeError_name, LanguageCodeErrorEnum_LanguageCodeError_value) +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/errors/language_code_error.proto", fileDescriptor_language_code_error_66c9745952dd8d08) +} + +var fileDescriptor_language_code_error_66c9745952dd8d08 = []byte{ + // 325 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x7c, 0x90, 0x4f, 0x4a, 0xc3, 0x40, + 0x14, 0xc6, 0x4d, 0x0a, 0x0a, 0xd3, 0x85, 0x31, 0x50, 0xc5, 0x3f, 0x74, 0x91, 0x03, 0x4c, 0x08, + 0x6e, 0x64, 0x5c, 0x4d, 0x9b, 0x34, 0x04, 0xcb, 0xa4, 0xa0, 0x89, 0x20, 0x81, 0x10, 0x3b, 0x61, + 0x08, 0xb4, 0xf3, 0x4a, 0xa6, 0xed, 0x01, 0x3c, 0x8a, 0x4b, 0x8f, 0xe2, 0x51, 0xdc, 0x79, 0x03, + 0x49, 0xa6, 0x0d, 0x48, 0xd1, 0x55, 0x3e, 0x5e, 0x7e, 0xdf, 0x9b, 0xf7, 0x7d, 0xe8, 0x4e, 0x00, + 0x88, 0x45, 0xe9, 0x16, 0x5c, 0xb9, 0x5a, 0x36, 0x6a, 0xeb, 0xb9, 0x65, 0x5d, 0x43, 0xad, 0xdc, + 0x45, 0x21, 0xc5, 0xa6, 0x10, 0x65, 0x3e, 0x07, 0x5e, 0xe6, 0xed, 0x10, 0xaf, 0x6a, 0x58, 0x83, + 0x3d, 0xd4, 0x38, 0x2e, 0xb8, 0xc2, 0x9d, 0x13, 0x6f, 0x3d, 0xac, 0x9d, 0x57, 0x37, 0xfb, 0xcd, + 0xab, 0xca, 0x2d, 0xa4, 0x84, 0x75, 0xb1, 0xae, 0x40, 0x2a, 0xed, 0x76, 0xde, 0x0c, 0x34, 0x98, + 0xee, 0x76, 0x8f, 0x81, 0x97, 0x41, 0x63, 0x0a, 0xe4, 0x66, 0xe9, 0x54, 0xe8, 0xec, 0xe0, 0x87, + 0x7d, 0x8a, 0xfa, 0x09, 0x7b, 0x9c, 0x05, 0xe3, 0x68, 0x12, 0x05, 0xbe, 0x75, 0x64, 0xf7, 0xd1, + 0x49, 0xc2, 0x1e, 0x58, 0xfc, 0xcc, 0x2c, 0xc3, 0xbe, 0x46, 0x17, 0x53, 0xca, 0xc2, 0x84, 0x86, + 0x41, 0x3e, 0x8e, 0xfd, 0x20, 0x67, 0xf1, 0x53, 0x3e, 0x89, 0x13, 0xe6, 0x5b, 0xa6, 0x7d, 0x89, + 0x06, 0x11, 0x4b, 0xe9, 0x34, 0xf2, 0xf3, 0x5f, 0x90, 0xd5, 0x1b, 0x7d, 0x1b, 0xc8, 0x99, 0xc3, + 0x12, 0xff, 0x9f, 0x64, 0x74, 0x7e, 0x70, 0xcf, 0xac, 0xc9, 0x30, 0x33, 0x5e, 0xfc, 0x9d, 0x53, + 0x40, 0xd3, 0x13, 0x86, 0x5a, 0xb8, 0xa2, 0x94, 0x6d, 0xc2, 0x7d, 0x9b, 0xab, 0x4a, 0xfd, 0x55, + 0xee, 0xbd, 0xfe, 0xbc, 0x9b, 0xbd, 0x90, 0xd2, 0x0f, 0x73, 0x18, 0xea, 0x65, 0x94, 0x2b, 0xac, + 0x65, 0xa3, 0x52, 0x0f, 0xb7, 0x4f, 0xaa, 0xcf, 0x3d, 0x90, 0x51, 0xae, 0xb2, 0x0e, 0xc8, 0x52, + 0x2f, 0xd3, 0xc0, 0x97, 0xe9, 0xe8, 0x29, 0x21, 0x94, 0x2b, 0x42, 0x3a, 0x84, 0x90, 0xd4, 0x23, + 0x44, 0x43, 0xaf, 0xc7, 0xed, 0x75, 0xb7, 0x3f, 0x01, 0x00, 0x00, 0xff, 0xff, 0x48, 0xef, 0x6d, + 0x3e, 0xf9, 0x01, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/errors/list_operation_error.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/errors/list_operation_error.pb.go new file mode 100644 index 0000000..97bcf49 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/errors/list_operation_error.pb.go @@ -0,0 +1,119 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/errors/list_operation_error.proto + +package errors // import "google.golang.org/genproto/googleapis/ads/googleads/v1/errors" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Enum describing possible list operation errors. +type ListOperationErrorEnum_ListOperationError int32 + +const ( + // Enum unspecified. + ListOperationErrorEnum_UNSPECIFIED ListOperationErrorEnum_ListOperationError = 0 + // The received error code is not known in this version. + ListOperationErrorEnum_UNKNOWN ListOperationErrorEnum_ListOperationError = 1 + // Field required in value is missing. + ListOperationErrorEnum_REQUIRED_FIELD_MISSING ListOperationErrorEnum_ListOperationError = 7 + // Duplicate or identical value is sent in multiple list operations. + ListOperationErrorEnum_DUPLICATE_VALUES ListOperationErrorEnum_ListOperationError = 8 +) + +var ListOperationErrorEnum_ListOperationError_name = map[int32]string{ + 0: "UNSPECIFIED", + 1: "UNKNOWN", + 7: "REQUIRED_FIELD_MISSING", + 8: "DUPLICATE_VALUES", +} +var ListOperationErrorEnum_ListOperationError_value = map[string]int32{ + "UNSPECIFIED": 0, + "UNKNOWN": 1, + "REQUIRED_FIELD_MISSING": 7, + "DUPLICATE_VALUES": 8, +} + +func (x ListOperationErrorEnum_ListOperationError) String() string { + return proto.EnumName(ListOperationErrorEnum_ListOperationError_name, int32(x)) +} +func (ListOperationErrorEnum_ListOperationError) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_list_operation_error_945cc7d5592a6156, []int{0, 0} +} + +// Container for enum describing possible list operation errors. +type ListOperationErrorEnum struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ListOperationErrorEnum) Reset() { *m = ListOperationErrorEnum{} } +func (m *ListOperationErrorEnum) String() string { return proto.CompactTextString(m) } +func (*ListOperationErrorEnum) ProtoMessage() {} +func (*ListOperationErrorEnum) Descriptor() ([]byte, []int) { + return fileDescriptor_list_operation_error_945cc7d5592a6156, []int{0} +} +func (m *ListOperationErrorEnum) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ListOperationErrorEnum.Unmarshal(m, b) +} +func (m *ListOperationErrorEnum) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ListOperationErrorEnum.Marshal(b, m, deterministic) +} +func (dst *ListOperationErrorEnum) XXX_Merge(src proto.Message) { + xxx_messageInfo_ListOperationErrorEnum.Merge(dst, src) +} +func (m *ListOperationErrorEnum) XXX_Size() int { + return xxx_messageInfo_ListOperationErrorEnum.Size(m) +} +func (m *ListOperationErrorEnum) XXX_DiscardUnknown() { + xxx_messageInfo_ListOperationErrorEnum.DiscardUnknown(m) +} + +var xxx_messageInfo_ListOperationErrorEnum proto.InternalMessageInfo + +func init() { + proto.RegisterType((*ListOperationErrorEnum)(nil), "google.ads.googleads.v1.errors.ListOperationErrorEnum") + proto.RegisterEnum("google.ads.googleads.v1.errors.ListOperationErrorEnum_ListOperationError", ListOperationErrorEnum_ListOperationError_name, ListOperationErrorEnum_ListOperationError_value) +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/errors/list_operation_error.proto", fileDescriptor_list_operation_error_945cc7d5592a6156) +} + +var fileDescriptor_list_operation_error_945cc7d5592a6156 = []byte{ + // 326 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x7c, 0x90, 0xc1, 0x4a, 0xc3, 0x30, + 0x1c, 0xc6, 0xdd, 0x04, 0x27, 0xd9, 0xc1, 0x52, 0x64, 0xc2, 0x90, 0x1d, 0xfa, 0x00, 0x29, 0xc5, + 0x93, 0xf1, 0x94, 0xad, 0xd9, 0x08, 0xd6, 0xae, 0xae, 0xb6, 0x82, 0x14, 0x4a, 0xb5, 0x25, 0x14, + 0xb6, 0xa4, 0x24, 0x75, 0x47, 0x1f, 0xc6, 0xa3, 0x8f, 0xe2, 0xa3, 0x78, 0xf4, 0x09, 0xa4, 0x8d, + 0xed, 0x65, 0xe8, 0xa9, 0x1f, 0xff, 0xfe, 0xbe, 0x2f, 0xdf, 0xff, 0x0f, 0xae, 0x99, 0x10, 0x6c, + 0x5b, 0xd8, 0x59, 0xae, 0x6c, 0x2d, 0x1b, 0xb5, 0x77, 0xec, 0x42, 0x4a, 0x21, 0x95, 0xbd, 0x2d, + 0x55, 0x9d, 0x8a, 0xaa, 0x90, 0x59, 0x5d, 0x0a, 0x9e, 0xb6, 0x53, 0x58, 0x49, 0x51, 0x0b, 0x73, + 0xa6, 0x79, 0x98, 0xe5, 0x0a, 0xf6, 0x56, 0xb8, 0x77, 0xa0, 0xb6, 0x4e, 0x2f, 0xbb, 0xe8, 0xaa, + 0xb4, 0x33, 0xce, 0x45, 0xdd, 0x46, 0x28, 0xed, 0xb6, 0xde, 0xc0, 0xc4, 0x2b, 0x55, 0xbd, 0xee, + 0xa2, 0x49, 0x63, 0x22, 0xfc, 0x75, 0x67, 0xe5, 0xc0, 0x3c, 0xfc, 0x63, 0x9e, 0x81, 0x71, 0xe4, + 0x87, 0x01, 0x59, 0xd0, 0x25, 0x25, 0xae, 0x71, 0x64, 0x8e, 0xc1, 0x28, 0xf2, 0x6f, 0xfd, 0xf5, + 0xa3, 0x6f, 0x0c, 0xcc, 0x29, 0x98, 0x6c, 0xc8, 0x7d, 0x44, 0x37, 0xc4, 0x4d, 0x97, 0x94, 0x78, + 0x6e, 0x7a, 0x47, 0xc3, 0x90, 0xfa, 0x2b, 0x63, 0x64, 0x9e, 0x03, 0xc3, 0x8d, 0x02, 0x8f, 0x2e, + 0xf0, 0x03, 0x49, 0x63, 0xec, 0x45, 0x24, 0x34, 0x4e, 0xe7, 0xdf, 0x03, 0x60, 0xbd, 0x88, 0x1d, + 0xfc, 0x7f, 0x89, 0xf9, 0xc5, 0x61, 0x95, 0xa0, 0xe9, 0x1f, 0x0c, 0x9e, 0xdc, 0x5f, 0x2b, 0x13, + 0xdb, 0x8c, 0x33, 0x28, 0x24, 0xb3, 0x59, 0xc1, 0xdb, 0xed, 0xba, 0x53, 0x56, 0xa5, 0xfa, 0xeb, + 0xb2, 0x37, 0xfa, 0xf3, 0x3e, 0x3c, 0x5e, 0x61, 0xfc, 0x31, 0x9c, 0xad, 0x74, 0x18, 0xce, 0x15, + 0xd4, 0xb2, 0x51, 0xb1, 0x03, 0xdb, 0x27, 0xd5, 0x67, 0x07, 0x24, 0x38, 0x57, 0x49, 0x0f, 0x24, + 0xb1, 0x93, 0x68, 0xe0, 0x6b, 0x68, 0xe9, 0x29, 0x42, 0x38, 0x57, 0x08, 0xf5, 0x08, 0x42, 0xb1, + 0x83, 0x90, 0x86, 0x9e, 0x4f, 0xda, 0x76, 0x57, 0x3f, 0x01, 0x00, 0x00, 0xff, 0xff, 0x92, 0x06, + 0x57, 0xb9, 0xf6, 0x01, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/errors/media_bundle_error.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/errors/media_bundle_error.pb.go new file mode 100644 index 0000000..79f0d0c --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/errors/media_bundle_error.pb.go @@ -0,0 +1,219 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/errors/media_bundle_error.proto + +package errors // import "google.golang.org/genproto/googleapis/ads/googleads/v1/errors" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Enum describing possible media bundle errors. +type MediaBundleErrorEnum_MediaBundleError int32 + +const ( + // Enum unspecified. + MediaBundleErrorEnum_UNSPECIFIED MediaBundleErrorEnum_MediaBundleError = 0 + // The received error code is not known in this version. + MediaBundleErrorEnum_UNKNOWN MediaBundleErrorEnum_MediaBundleError = 1 + // There was a problem with the request. + MediaBundleErrorEnum_BAD_REQUEST MediaBundleErrorEnum_MediaBundleError = 3 + // HTML5 ads using DoubleClick Studio created ZIP files are not supported. + MediaBundleErrorEnum_DOUBLECLICK_BUNDLE_NOT_ALLOWED MediaBundleErrorEnum_MediaBundleError = 4 + // Cannot reference URL external to the media bundle. + MediaBundleErrorEnum_EXTERNAL_URL_NOT_ALLOWED MediaBundleErrorEnum_MediaBundleError = 5 + // Media bundle file is too large. + MediaBundleErrorEnum_FILE_TOO_LARGE MediaBundleErrorEnum_MediaBundleError = 6 + // ZIP file from Google Web Designer is not published. + MediaBundleErrorEnum_GOOGLE_WEB_DESIGNER_ZIP_FILE_NOT_PUBLISHED MediaBundleErrorEnum_MediaBundleError = 7 + // Input was invalid. + MediaBundleErrorEnum_INVALID_INPUT MediaBundleErrorEnum_MediaBundleError = 8 + // There was a problem with the media bundle. + MediaBundleErrorEnum_INVALID_MEDIA_BUNDLE MediaBundleErrorEnum_MediaBundleError = 9 + // There was a problem with one or more of the media bundle entries. + MediaBundleErrorEnum_INVALID_MEDIA_BUNDLE_ENTRY MediaBundleErrorEnum_MediaBundleError = 10 + // The media bundle contains a file with an unknown mime type + MediaBundleErrorEnum_INVALID_MIME_TYPE MediaBundleErrorEnum_MediaBundleError = 11 + // The media bundle contain an invalid asset path. + MediaBundleErrorEnum_INVALID_PATH MediaBundleErrorEnum_MediaBundleError = 12 + // HTML5 ad is trying to reference an asset not in .ZIP file + MediaBundleErrorEnum_INVALID_URL_REFERENCE MediaBundleErrorEnum_MediaBundleError = 13 + // Media data is too large. + MediaBundleErrorEnum_MEDIA_DATA_TOO_LARGE MediaBundleErrorEnum_MediaBundleError = 14 + // The media bundle contains no primary entry. + MediaBundleErrorEnum_MISSING_PRIMARY_MEDIA_BUNDLE_ENTRY MediaBundleErrorEnum_MediaBundleError = 15 + // There was an error on the server. + MediaBundleErrorEnum_SERVER_ERROR MediaBundleErrorEnum_MediaBundleError = 16 + // The image could not be stored. + MediaBundleErrorEnum_STORAGE_ERROR MediaBundleErrorEnum_MediaBundleError = 17 + // Media bundle created with the Swiffy tool is not allowed. + MediaBundleErrorEnum_SWIFFY_BUNDLE_NOT_ALLOWED MediaBundleErrorEnum_MediaBundleError = 18 + // The media bundle contains too many files. + MediaBundleErrorEnum_TOO_MANY_FILES MediaBundleErrorEnum_MediaBundleError = 19 + // The media bundle is not of legal dimensions. + MediaBundleErrorEnum_UNEXPECTED_SIZE MediaBundleErrorEnum_MediaBundleError = 20 + // Google Web Designer not created for "Google Ads" environment. + MediaBundleErrorEnum_UNSUPPORTED_GOOGLE_WEB_DESIGNER_ENVIRONMENT MediaBundleErrorEnum_MediaBundleError = 21 + // Unsupported HTML5 feature in HTML5 asset. + MediaBundleErrorEnum_UNSUPPORTED_HTML5_FEATURE MediaBundleErrorEnum_MediaBundleError = 22 + // URL in HTML5 entry is not ssl compliant. + MediaBundleErrorEnum_URL_IN_MEDIA_BUNDLE_NOT_SSL_COMPLIANT MediaBundleErrorEnum_MediaBundleError = 23 + // Custom exits not allowed in HTML5 entry. + MediaBundleErrorEnum_CUSTOM_EXIT_NOT_ALLOWED MediaBundleErrorEnum_MediaBundleError = 24 +) + +var MediaBundleErrorEnum_MediaBundleError_name = map[int32]string{ + 0: "UNSPECIFIED", + 1: "UNKNOWN", + 3: "BAD_REQUEST", + 4: "DOUBLECLICK_BUNDLE_NOT_ALLOWED", + 5: "EXTERNAL_URL_NOT_ALLOWED", + 6: "FILE_TOO_LARGE", + 7: "GOOGLE_WEB_DESIGNER_ZIP_FILE_NOT_PUBLISHED", + 8: "INVALID_INPUT", + 9: "INVALID_MEDIA_BUNDLE", + 10: "INVALID_MEDIA_BUNDLE_ENTRY", + 11: "INVALID_MIME_TYPE", + 12: "INVALID_PATH", + 13: "INVALID_URL_REFERENCE", + 14: "MEDIA_DATA_TOO_LARGE", + 15: "MISSING_PRIMARY_MEDIA_BUNDLE_ENTRY", + 16: "SERVER_ERROR", + 17: "STORAGE_ERROR", + 18: "SWIFFY_BUNDLE_NOT_ALLOWED", + 19: "TOO_MANY_FILES", + 20: "UNEXPECTED_SIZE", + 21: "UNSUPPORTED_GOOGLE_WEB_DESIGNER_ENVIRONMENT", + 22: "UNSUPPORTED_HTML5_FEATURE", + 23: "URL_IN_MEDIA_BUNDLE_NOT_SSL_COMPLIANT", + 24: "CUSTOM_EXIT_NOT_ALLOWED", +} +var MediaBundleErrorEnum_MediaBundleError_value = map[string]int32{ + "UNSPECIFIED": 0, + "UNKNOWN": 1, + "BAD_REQUEST": 3, + "DOUBLECLICK_BUNDLE_NOT_ALLOWED": 4, + "EXTERNAL_URL_NOT_ALLOWED": 5, + "FILE_TOO_LARGE": 6, + "GOOGLE_WEB_DESIGNER_ZIP_FILE_NOT_PUBLISHED": 7, + "INVALID_INPUT": 8, + "INVALID_MEDIA_BUNDLE": 9, + "INVALID_MEDIA_BUNDLE_ENTRY": 10, + "INVALID_MIME_TYPE": 11, + "INVALID_PATH": 12, + "INVALID_URL_REFERENCE": 13, + "MEDIA_DATA_TOO_LARGE": 14, + "MISSING_PRIMARY_MEDIA_BUNDLE_ENTRY": 15, + "SERVER_ERROR": 16, + "STORAGE_ERROR": 17, + "SWIFFY_BUNDLE_NOT_ALLOWED": 18, + "TOO_MANY_FILES": 19, + "UNEXPECTED_SIZE": 20, + "UNSUPPORTED_GOOGLE_WEB_DESIGNER_ENVIRONMENT": 21, + "UNSUPPORTED_HTML5_FEATURE": 22, + "URL_IN_MEDIA_BUNDLE_NOT_SSL_COMPLIANT": 23, + "CUSTOM_EXIT_NOT_ALLOWED": 24, +} + +func (x MediaBundleErrorEnum_MediaBundleError) String() string { + return proto.EnumName(MediaBundleErrorEnum_MediaBundleError_name, int32(x)) +} +func (MediaBundleErrorEnum_MediaBundleError) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_media_bundle_error_4672b1a9825524e9, []int{0, 0} +} + +// Container for enum describing possible media bundle errors. +type MediaBundleErrorEnum struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *MediaBundleErrorEnum) Reset() { *m = MediaBundleErrorEnum{} } +func (m *MediaBundleErrorEnum) String() string { return proto.CompactTextString(m) } +func (*MediaBundleErrorEnum) ProtoMessage() {} +func (*MediaBundleErrorEnum) Descriptor() ([]byte, []int) { + return fileDescriptor_media_bundle_error_4672b1a9825524e9, []int{0} +} +func (m *MediaBundleErrorEnum) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_MediaBundleErrorEnum.Unmarshal(m, b) +} +func (m *MediaBundleErrorEnum) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_MediaBundleErrorEnum.Marshal(b, m, deterministic) +} +func (dst *MediaBundleErrorEnum) XXX_Merge(src proto.Message) { + xxx_messageInfo_MediaBundleErrorEnum.Merge(dst, src) +} +func (m *MediaBundleErrorEnum) XXX_Size() int { + return xxx_messageInfo_MediaBundleErrorEnum.Size(m) +} +func (m *MediaBundleErrorEnum) XXX_DiscardUnknown() { + xxx_messageInfo_MediaBundleErrorEnum.DiscardUnknown(m) +} + +var xxx_messageInfo_MediaBundleErrorEnum proto.InternalMessageInfo + +func init() { + proto.RegisterType((*MediaBundleErrorEnum)(nil), "google.ads.googleads.v1.errors.MediaBundleErrorEnum") + proto.RegisterEnum("google.ads.googleads.v1.errors.MediaBundleErrorEnum_MediaBundleError", MediaBundleErrorEnum_MediaBundleError_name, MediaBundleErrorEnum_MediaBundleError_value) +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/errors/media_bundle_error.proto", fileDescriptor_media_bundle_error_4672b1a9825524e9) +} + +var fileDescriptor_media_bundle_error_4672b1a9825524e9 = []byte{ + // 641 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x7c, 0x53, 0xdd, 0x6a, 0x13, 0x41, + 0x14, 0xb6, 0xad, 0x6d, 0x75, 0xfa, 0x37, 0x9d, 0x36, 0xf6, 0xc7, 0x9a, 0x8b, 0x80, 0x82, 0x0a, + 0x1b, 0x82, 0x88, 0xb0, 0x5e, 0xcd, 0x66, 0x4f, 0xb6, 0x43, 0x67, 0x67, 0xd6, 0x99, 0xd9, 0xa4, + 0x29, 0x81, 0x21, 0x35, 0x21, 0x04, 0xda, 0x6c, 0xc9, 0xb6, 0x7d, 0x1d, 0xc1, 0x4b, 0x9f, 0xc0, + 0x67, 0xf0, 0x51, 0xbc, 0xf2, 0x11, 0x64, 0xb2, 0x4d, 0x68, 0x4b, 0xf4, 0x6a, 0x0e, 0xdf, 0xf9, + 0xbe, 0xef, 0xfc, 0x30, 0x07, 0x7d, 0x1a, 0x64, 0xd9, 0xe0, 0xa2, 0x5f, 0xed, 0xf6, 0xf2, 0x6a, + 0x11, 0xba, 0xe8, 0xb6, 0x56, 0xed, 0x8f, 0xc7, 0xd9, 0x38, 0xaf, 0x5e, 0xf6, 0x7b, 0xc3, 0xae, + 0x3d, 0xbf, 0x19, 0xf5, 0x2e, 0xfa, 0x76, 0x82, 0x79, 0x57, 0xe3, 0xec, 0x3a, 0x23, 0xe5, 0x82, + 0xed, 0x75, 0x7b, 0xb9, 0x37, 0x13, 0x7a, 0xb7, 0x35, 0xaf, 0x10, 0x1e, 0x1e, 0x4d, 0x8d, 0xaf, + 0x86, 0xd5, 0xee, 0x68, 0x94, 0x5d, 0x77, 0xaf, 0x87, 0xd9, 0x28, 0x2f, 0xd4, 0x95, 0x9f, 0xcb, + 0x68, 0x37, 0x76, 0xd6, 0xc1, 0xc4, 0x19, 0x9c, 0x06, 0x46, 0x37, 0x97, 0x95, 0x6f, 0xcb, 0x08, + 0x3f, 0x4e, 0x90, 0x2d, 0xb4, 0x96, 0x0a, 0x9d, 0x40, 0x9d, 0x35, 0x18, 0x84, 0xf8, 0x09, 0x59, + 0x43, 0xab, 0xa9, 0x38, 0x11, 0xb2, 0x25, 0xf0, 0x82, 0xcb, 0x06, 0x34, 0xb4, 0x0a, 0xbe, 0xa4, + 0xa0, 0x0d, 0x5e, 0x22, 0x15, 0x54, 0x0e, 0x65, 0x1a, 0x70, 0xa8, 0x73, 0x56, 0x3f, 0xb1, 0x41, + 0x2a, 0x42, 0x0e, 0x56, 0x48, 0x63, 0x29, 0xe7, 0xb2, 0x05, 0x21, 0x7e, 0x4a, 0x8e, 0xd0, 0x3e, + 0x9c, 0x1a, 0x50, 0x82, 0x72, 0x9b, 0x2a, 0xfe, 0x20, 0xbb, 0x4c, 0x08, 0xda, 0x6c, 0x30, 0x0e, + 0xd6, 0x48, 0x69, 0x39, 0x55, 0x11, 0xe0, 0x15, 0xe2, 0xa1, 0x77, 0x91, 0x94, 0x11, 0x07, 0xdb, + 0x82, 0xc0, 0x86, 0xa0, 0x59, 0x24, 0x40, 0xd9, 0x33, 0x96, 0xd8, 0x09, 0xd7, 0x39, 0x24, 0x69, + 0xc0, 0x99, 0x3e, 0x86, 0x10, 0xaf, 0x92, 0x6d, 0xb4, 0xc1, 0x44, 0x93, 0x72, 0x16, 0x5a, 0x26, + 0x92, 0xd4, 0xe0, 0x67, 0x64, 0x1f, 0xed, 0x4e, 0xa1, 0x18, 0x42, 0x46, 0xef, 0x5a, 0xc3, 0xcf, + 0x49, 0x19, 0x1d, 0xce, 0xcb, 0x58, 0x10, 0x46, 0xb5, 0x31, 0x22, 0x25, 0xb4, 0x3d, 0xcb, 0xb3, + 0x18, 0xac, 0x69, 0x27, 0x80, 0xd7, 0x08, 0x46, 0xeb, 0x53, 0x38, 0xa1, 0xe6, 0x18, 0xaf, 0x93, + 0x03, 0x54, 0x9a, 0x22, 0x6e, 0x2c, 0x05, 0x0d, 0x50, 0x20, 0xea, 0x80, 0x37, 0x5c, 0xf5, 0xc2, + 0x3b, 0xa4, 0x86, 0xde, 0x1b, 0x6d, 0x93, 0xbc, 0x41, 0x95, 0x98, 0x69, 0xcd, 0x44, 0x64, 0x13, + 0xc5, 0x62, 0xaa, 0xda, 0xf3, 0xba, 0xd8, 0x72, 0xe5, 0x34, 0xa8, 0x26, 0x28, 0x0b, 0x4a, 0x49, + 0x85, 0xb1, 0x1b, 0x52, 0x1b, 0xa9, 0x68, 0x04, 0x77, 0xd0, 0x36, 0x79, 0x85, 0x0e, 0x74, 0x8b, + 0x35, 0x1a, 0xed, 0x79, 0x8b, 0x27, 0x6e, 0xb5, 0xae, 0x74, 0x4c, 0x45, 0x7b, 0xb2, 0x37, 0x8d, + 0x77, 0xc8, 0x0e, 0xda, 0x4a, 0x05, 0x9c, 0x26, 0x50, 0x37, 0x10, 0x5a, 0xcd, 0xce, 0x00, 0xef, + 0x92, 0x2a, 0x7a, 0x9f, 0x0a, 0x9d, 0x26, 0x89, 0x54, 0x0e, 0x9d, 0xb7, 0x7b, 0x10, 0x4d, 0xa6, + 0xa4, 0x88, 0x41, 0x18, 0x5c, 0x72, 0x85, 0xef, 0x0b, 0x8e, 0x4d, 0xcc, 0x3f, 0xda, 0x06, 0x50, + 0x93, 0x2a, 0xc0, 0x2f, 0xc8, 0x5b, 0xf4, 0xda, 0x6d, 0x84, 0x89, 0x87, 0xb3, 0xb9, 0xee, 0xb4, + 0xe6, 0xb6, 0x2e, 0xe3, 0x84, 0x33, 0x2a, 0x0c, 0xde, 0x23, 0x2f, 0xd1, 0x5e, 0x3d, 0xd5, 0x46, + 0xc6, 0x16, 0x4e, 0x99, 0x79, 0x30, 0xc0, 0x7e, 0xf0, 0x67, 0x01, 0x55, 0xbe, 0x66, 0x97, 0xde, + 0xff, 0xff, 0x7f, 0x50, 0x7a, 0xfc, 0x8b, 0x13, 0xf7, 0xf1, 0x93, 0x85, 0xb3, 0xf0, 0x4e, 0x38, + 0xc8, 0x2e, 0xba, 0xa3, 0x81, 0x97, 0x8d, 0x07, 0xd5, 0x41, 0x7f, 0x34, 0x39, 0x8b, 0xe9, 0x05, + 0x5e, 0x0d, 0xf3, 0x7f, 0x1d, 0xe4, 0xe7, 0xe2, 0xf9, 0xbe, 0xb8, 0x14, 0x51, 0xfa, 0x63, 0xb1, + 0x1c, 0x15, 0x66, 0xb4, 0x97, 0x7b, 0x45, 0xe8, 0xa2, 0x66, 0xcd, 0x9b, 0x94, 0xcc, 0x7f, 0x4d, + 0x09, 0x1d, 0xda, 0xcb, 0x3b, 0x33, 0x42, 0xa7, 0x59, 0xeb, 0x14, 0x84, 0xdf, 0x8b, 0x95, 0x02, + 0xf5, 0x7d, 0xda, 0xcb, 0x7d, 0x7f, 0x46, 0xf1, 0xfd, 0x66, 0xcd, 0xf7, 0x0b, 0xd2, 0xf9, 0xca, + 0xa4, 0xbb, 0x0f, 0x7f, 0x03, 0x00, 0x00, 0xff, 0xff, 0xf9, 0x49, 0xe4, 0x52, 0x2d, 0x04, 0x00, + 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/errors/media_file_error.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/errors/media_file_error.pb.go new file mode 100644 index 0000000..e73cf9b --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/errors/media_file_error.pb.go @@ -0,0 +1,225 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/errors/media_file_error.proto + +package errors // import "google.golang.org/genproto/googleapis/ads/googleads/v1/errors" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Enum describing possible media file errors. +type MediaFileErrorEnum_MediaFileError int32 + +const ( + // Enum unspecified. + MediaFileErrorEnum_UNSPECIFIED MediaFileErrorEnum_MediaFileError = 0 + // The received error code is not known in this version. + MediaFileErrorEnum_UNKNOWN MediaFileErrorEnum_MediaFileError = 1 + // Cannot create a standard icon type. + MediaFileErrorEnum_CANNOT_CREATE_STANDARD_ICON MediaFileErrorEnum_MediaFileError = 2 + // May only select Standard Icons alone. + MediaFileErrorEnum_CANNOT_SELECT_STANDARD_ICON_WITH_OTHER_TYPES MediaFileErrorEnum_MediaFileError = 3 + // Image contains both a media file ID and data. + MediaFileErrorEnum_CANNOT_SPECIFY_MEDIA_FILE_ID_AND_DATA MediaFileErrorEnum_MediaFileError = 4 + // A media file with given type and reference ID already exists. + MediaFileErrorEnum_DUPLICATE_MEDIA MediaFileErrorEnum_MediaFileError = 5 + // A required field was not specified or is an empty string. + MediaFileErrorEnum_EMPTY_FIELD MediaFileErrorEnum_MediaFileError = 6 + // A media file may only be modified once per call. + MediaFileErrorEnum_RESOURCE_REFERENCED_IN_MULTIPLE_OPS MediaFileErrorEnum_MediaFileError = 7 + // Field is not supported for the media sub type. + MediaFileErrorEnum_FIELD_NOT_SUPPORTED_FOR_MEDIA_SUB_TYPE MediaFileErrorEnum_MediaFileError = 8 + // The media file ID is invalid. + MediaFileErrorEnum_INVALID_MEDIA_FILE_ID MediaFileErrorEnum_MediaFileError = 9 + // The media subtype is invalid. + MediaFileErrorEnum_INVALID_MEDIA_SUB_TYPE MediaFileErrorEnum_MediaFileError = 10 + // The media file type is invalid. + MediaFileErrorEnum_INVALID_MEDIA_FILE_TYPE MediaFileErrorEnum_MediaFileError = 11 + // The mimetype is invalid. + MediaFileErrorEnum_INVALID_MIME_TYPE MediaFileErrorEnum_MediaFileError = 12 + // The media reference ID is invalid. + MediaFileErrorEnum_INVALID_REFERENCE_ID MediaFileErrorEnum_MediaFileError = 13 + // The YouTube video ID is invalid. + MediaFileErrorEnum_INVALID_YOU_TUBE_ID MediaFileErrorEnum_MediaFileError = 14 + // Media file has failed transcoding + MediaFileErrorEnum_MEDIA_FILE_FAILED_TRANSCODING MediaFileErrorEnum_MediaFileError = 15 + // Media file has not been transcoded. + MediaFileErrorEnum_MEDIA_NOT_TRANSCODED MediaFileErrorEnum_MediaFileError = 16 + // The media type does not match the actual media file's type. + MediaFileErrorEnum_MEDIA_TYPE_DOES_NOT_MATCH_MEDIA_FILE_TYPE MediaFileErrorEnum_MediaFileError = 17 + // None of the fields have been specified. + MediaFileErrorEnum_NO_FIELDS_SPECIFIED MediaFileErrorEnum_MediaFileError = 18 + // One of reference ID or media file ID must be specified. + MediaFileErrorEnum_NULL_REFERENCE_ID_AND_MEDIA_ID MediaFileErrorEnum_MediaFileError = 19 + // The string has too many characters. + MediaFileErrorEnum_TOO_LONG MediaFileErrorEnum_MediaFileError = 20 + // The specified type is not supported. + MediaFileErrorEnum_UNSUPPORTED_TYPE MediaFileErrorEnum_MediaFileError = 21 + // YouTube is unavailable for requesting video data. + MediaFileErrorEnum_YOU_TUBE_SERVICE_UNAVAILABLE MediaFileErrorEnum_MediaFileError = 22 + // The YouTube video has a non positive duration. + MediaFileErrorEnum_YOU_TUBE_VIDEO_HAS_NON_POSITIVE_DURATION MediaFileErrorEnum_MediaFileError = 23 + // The YouTube video ID is syntactically valid but the video was not found. + MediaFileErrorEnum_YOU_TUBE_VIDEO_NOT_FOUND MediaFileErrorEnum_MediaFileError = 24 +) + +var MediaFileErrorEnum_MediaFileError_name = map[int32]string{ + 0: "UNSPECIFIED", + 1: "UNKNOWN", + 2: "CANNOT_CREATE_STANDARD_ICON", + 3: "CANNOT_SELECT_STANDARD_ICON_WITH_OTHER_TYPES", + 4: "CANNOT_SPECIFY_MEDIA_FILE_ID_AND_DATA", + 5: "DUPLICATE_MEDIA", + 6: "EMPTY_FIELD", + 7: "RESOURCE_REFERENCED_IN_MULTIPLE_OPS", + 8: "FIELD_NOT_SUPPORTED_FOR_MEDIA_SUB_TYPE", + 9: "INVALID_MEDIA_FILE_ID", + 10: "INVALID_MEDIA_SUB_TYPE", + 11: "INVALID_MEDIA_FILE_TYPE", + 12: "INVALID_MIME_TYPE", + 13: "INVALID_REFERENCE_ID", + 14: "INVALID_YOU_TUBE_ID", + 15: "MEDIA_FILE_FAILED_TRANSCODING", + 16: "MEDIA_NOT_TRANSCODED", + 17: "MEDIA_TYPE_DOES_NOT_MATCH_MEDIA_FILE_TYPE", + 18: "NO_FIELDS_SPECIFIED", + 19: "NULL_REFERENCE_ID_AND_MEDIA_ID", + 20: "TOO_LONG", + 21: "UNSUPPORTED_TYPE", + 22: "YOU_TUBE_SERVICE_UNAVAILABLE", + 23: "YOU_TUBE_VIDEO_HAS_NON_POSITIVE_DURATION", + 24: "YOU_TUBE_VIDEO_NOT_FOUND", +} +var MediaFileErrorEnum_MediaFileError_value = map[string]int32{ + "UNSPECIFIED": 0, + "UNKNOWN": 1, + "CANNOT_CREATE_STANDARD_ICON": 2, + "CANNOT_SELECT_STANDARD_ICON_WITH_OTHER_TYPES": 3, + "CANNOT_SPECIFY_MEDIA_FILE_ID_AND_DATA": 4, + "DUPLICATE_MEDIA": 5, + "EMPTY_FIELD": 6, + "RESOURCE_REFERENCED_IN_MULTIPLE_OPS": 7, + "FIELD_NOT_SUPPORTED_FOR_MEDIA_SUB_TYPE": 8, + "INVALID_MEDIA_FILE_ID": 9, + "INVALID_MEDIA_SUB_TYPE": 10, + "INVALID_MEDIA_FILE_TYPE": 11, + "INVALID_MIME_TYPE": 12, + "INVALID_REFERENCE_ID": 13, + "INVALID_YOU_TUBE_ID": 14, + "MEDIA_FILE_FAILED_TRANSCODING": 15, + "MEDIA_NOT_TRANSCODED": 16, + "MEDIA_TYPE_DOES_NOT_MATCH_MEDIA_FILE_TYPE": 17, + "NO_FIELDS_SPECIFIED": 18, + "NULL_REFERENCE_ID_AND_MEDIA_ID": 19, + "TOO_LONG": 20, + "UNSUPPORTED_TYPE": 21, + "YOU_TUBE_SERVICE_UNAVAILABLE": 22, + "YOU_TUBE_VIDEO_HAS_NON_POSITIVE_DURATION": 23, + "YOU_TUBE_VIDEO_NOT_FOUND": 24, +} + +func (x MediaFileErrorEnum_MediaFileError) String() string { + return proto.EnumName(MediaFileErrorEnum_MediaFileError_name, int32(x)) +} +func (MediaFileErrorEnum_MediaFileError) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_media_file_error_bd80bc9494b4cc45, []int{0, 0} +} + +// Container for enum describing possible media file errors. +type MediaFileErrorEnum struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *MediaFileErrorEnum) Reset() { *m = MediaFileErrorEnum{} } +func (m *MediaFileErrorEnum) String() string { return proto.CompactTextString(m) } +func (*MediaFileErrorEnum) ProtoMessage() {} +func (*MediaFileErrorEnum) Descriptor() ([]byte, []int) { + return fileDescriptor_media_file_error_bd80bc9494b4cc45, []int{0} +} +func (m *MediaFileErrorEnum) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_MediaFileErrorEnum.Unmarshal(m, b) +} +func (m *MediaFileErrorEnum) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_MediaFileErrorEnum.Marshal(b, m, deterministic) +} +func (dst *MediaFileErrorEnum) XXX_Merge(src proto.Message) { + xxx_messageInfo_MediaFileErrorEnum.Merge(dst, src) +} +func (m *MediaFileErrorEnum) XXX_Size() int { + return xxx_messageInfo_MediaFileErrorEnum.Size(m) +} +func (m *MediaFileErrorEnum) XXX_DiscardUnknown() { + xxx_messageInfo_MediaFileErrorEnum.DiscardUnknown(m) +} + +var xxx_messageInfo_MediaFileErrorEnum proto.InternalMessageInfo + +func init() { + proto.RegisterType((*MediaFileErrorEnum)(nil), "google.ads.googleads.v1.errors.MediaFileErrorEnum") + proto.RegisterEnum("google.ads.googleads.v1.errors.MediaFileErrorEnum_MediaFileError", MediaFileErrorEnum_MediaFileError_name, MediaFileErrorEnum_MediaFileError_value) +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/errors/media_file_error.proto", fileDescriptor_media_file_error_bd80bc9494b4cc45) +} + +var fileDescriptor_media_file_error_bd80bc9494b4cc45 = []byte{ + // 674 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x7c, 0x53, 0xcd, 0x6e, 0xd3, 0x4c, + 0x14, 0xfd, 0x9a, 0x7e, 0xa4, 0x65, 0x5a, 0xda, 0xe9, 0xa4, 0x7f, 0xb4, 0xa5, 0x40, 0x10, 0x3f, + 0x45, 0xc5, 0x21, 0x42, 0x6c, 0xcc, 0x6a, 0xe2, 0xb9, 0x4e, 0x46, 0x38, 0x33, 0x96, 0x3d, 0x76, + 0x15, 0x14, 0x69, 0x14, 0x48, 0x88, 0x22, 0xa5, 0x71, 0x15, 0x97, 0xae, 0x79, 0x0b, 0xf6, 0x2c, + 0x79, 0x14, 0x1e, 0x85, 0x05, 0xcf, 0x80, 0xec, 0x49, 0x5c, 0xc2, 0xdf, 0x2a, 0x57, 0xf7, 0x9c, + 0x73, 0xcf, 0xb9, 0x13, 0x5f, 0xf4, 0x72, 0x98, 0x24, 0xc3, 0xf1, 0xa0, 0xd6, 0xeb, 0xa7, 0x35, + 0x53, 0x66, 0xd5, 0x55, 0xbd, 0x36, 0x98, 0x4e, 0x93, 0x69, 0x5a, 0x3b, 0x1f, 0xf4, 0x47, 0x3d, + 0xfd, 0x7e, 0x34, 0x1e, 0xe8, 0xbc, 0x63, 0x5d, 0x4c, 0x93, 0xcb, 0x84, 0x1c, 0x1b, 0xae, 0xd5, + 0xeb, 0xa7, 0x56, 0x21, 0xb3, 0xae, 0xea, 0x96, 0x91, 0x1d, 0x1c, 0xcd, 0xc7, 0x5e, 0x8c, 0x6a, + 0xbd, 0xc9, 0x24, 0xb9, 0xec, 0x5d, 0x8e, 0x92, 0x49, 0x6a, 0xd4, 0xd5, 0x4f, 0x65, 0x44, 0xda, + 0xd9, 0x60, 0x77, 0x34, 0x1e, 0x40, 0xa6, 0x80, 0xc9, 0x87, 0xf3, 0xea, 0xc7, 0x32, 0xda, 0x58, + 0x6c, 0x93, 0x4d, 0xb4, 0x16, 0x89, 0xd0, 0x07, 0x87, 0xbb, 0x1c, 0x18, 0xfe, 0x8f, 0xac, 0xa1, + 0x95, 0x48, 0xbc, 0x16, 0xf2, 0x4c, 0xe0, 0x25, 0x72, 0x17, 0x1d, 0x3a, 0x54, 0x08, 0xa9, 0xb4, + 0x13, 0x00, 0x55, 0xa0, 0x43, 0x45, 0x05, 0xa3, 0x01, 0xd3, 0xdc, 0x91, 0x02, 0x97, 0xc8, 0x73, + 0x74, 0x3a, 0x23, 0x84, 0xe0, 0x81, 0xa3, 0x16, 0x09, 0xfa, 0x8c, 0xab, 0x96, 0x96, 0xaa, 0x05, + 0x81, 0x56, 0x1d, 0x1f, 0x42, 0xbc, 0x4c, 0x4e, 0xd0, 0xc3, 0xb9, 0x22, 0x77, 0xed, 0xe8, 0x36, + 0x30, 0x4e, 0xb5, 0xcb, 0x3d, 0xd0, 0x9c, 0x69, 0x2a, 0x98, 0x66, 0x54, 0x51, 0xfc, 0x3f, 0xa9, + 0xa0, 0x4d, 0x16, 0xf9, 0x1e, 0x77, 0x32, 0xe7, 0x9c, 0x85, 0x6f, 0x64, 0x81, 0xa1, 0xed, 0xab, + 0x8e, 0x76, 0x39, 0x78, 0x0c, 0x97, 0xc9, 0x63, 0xf4, 0x20, 0x80, 0x50, 0x46, 0x81, 0x03, 0x3a, + 0x00, 0x17, 0x02, 0x10, 0x0e, 0x30, 0xcd, 0x85, 0x6e, 0x47, 0x9e, 0xe2, 0xbe, 0x07, 0x5a, 0xfa, + 0x21, 0x5e, 0x21, 0x4f, 0xd1, 0xa3, 0x5c, 0xa3, 0x73, 0xf3, 0xc8, 0xf7, 0x65, 0xa0, 0x80, 0x69, + 0x57, 0x06, 0xb3, 0x08, 0x61, 0xd4, 0xc8, 0x63, 0xe2, 0x55, 0x72, 0x1b, 0xed, 0x70, 0x11, 0x53, + 0x8f, 0xb3, 0xc5, 0x78, 0xf8, 0x26, 0x39, 0x40, 0xbb, 0x8b, 0x50, 0x21, 0x43, 0xe4, 0x10, 0xed, + 0xfd, 0x41, 0x96, 0x83, 0x6b, 0x64, 0x07, 0x6d, 0x15, 0x20, 0x6f, 0xcf, 0xda, 0xeb, 0x64, 0x1f, + 0x6d, 0xcf, 0xdb, 0x45, 0xfc, 0xcc, 0xe9, 0x16, 0xd9, 0x43, 0x95, 0x39, 0xd2, 0x91, 0x91, 0x56, + 0x51, 0x23, 0x07, 0x36, 0xc8, 0x7d, 0x74, 0xe7, 0xa7, 0xf1, 0x2e, 0xe5, 0x1e, 0x30, 0xad, 0x02, + 0x2a, 0x42, 0x47, 0x32, 0x2e, 0x9a, 0x78, 0x33, 0x9b, 0x6a, 0x28, 0xd9, 0xb2, 0x73, 0x08, 0x18, + 0xc6, 0xe4, 0x19, 0x3a, 0x31, 0x48, 0xe6, 0xaf, 0x99, 0x84, 0x30, 0xe7, 0xb4, 0xa9, 0x72, 0x5a, + 0xbf, 0xa5, 0xde, 0xca, 0x42, 0x08, 0x69, 0x1e, 0x3b, 0xd4, 0xd7, 0x1f, 0x0a, 0x21, 0x55, 0x74, + 0x2c, 0x22, 0xcf, 0x5b, 0x08, 0x9d, 0xff, 0x7b, 0x66, 0x06, 0x67, 0xb8, 0x42, 0xd6, 0xd1, 0xaa, + 0x92, 0x52, 0x7b, 0x52, 0x34, 0xf1, 0x36, 0xd9, 0x46, 0x38, 0x12, 0xd7, 0x0f, 0x9f, 0x1b, 0xec, + 0x90, 0x7b, 0xe8, 0xa8, 0xd8, 0x2e, 0x84, 0x20, 0xe6, 0x0e, 0xe8, 0x48, 0xd0, 0x98, 0x72, 0x8f, + 0x36, 0x3c, 0xc0, 0xbb, 0xe4, 0x14, 0x3d, 0x29, 0x18, 0x31, 0x67, 0x20, 0x75, 0x8b, 0x66, 0xa1, + 0x85, 0xf6, 0x65, 0xc8, 0x15, 0x8f, 0x41, 0xb3, 0x28, 0xa0, 0x8a, 0x4b, 0x81, 0xf7, 0xc8, 0x11, + 0xda, 0xff, 0x85, 0x9d, 0xad, 0xe7, 0xca, 0x48, 0x30, 0xbc, 0xdf, 0xf8, 0xbe, 0x84, 0xaa, 0xef, + 0x92, 0x73, 0xeb, 0xdf, 0xe7, 0xd5, 0xa8, 0x2c, 0x9e, 0x89, 0x9f, 0x5d, 0x95, 0xbf, 0xf4, 0x86, + 0xcd, 0x64, 0xc3, 0x64, 0xdc, 0x9b, 0x0c, 0xad, 0x64, 0x3a, 0xac, 0x0d, 0x07, 0x93, 0xfc, 0xe6, + 0xe6, 0xc7, 0x7d, 0x31, 0x4a, 0xff, 0x76, 0xeb, 0xaf, 0xcc, 0xcf, 0xe7, 0xd2, 0x72, 0x93, 0xd2, + 0x2f, 0xa5, 0xe3, 0xa6, 0x19, 0x46, 0xfb, 0xa9, 0x65, 0xca, 0xac, 0x8a, 0xeb, 0x56, 0x6e, 0x99, + 0x7e, 0x9d, 0x13, 0xba, 0xb4, 0x9f, 0x76, 0x0b, 0x42, 0x37, 0xae, 0x77, 0x0d, 0xe1, 0x5b, 0xa9, + 0x6a, 0xba, 0xb6, 0x4d, 0xfb, 0xa9, 0x6d, 0x17, 0x14, 0xdb, 0x8e, 0xeb, 0xb6, 0x6d, 0x48, 0x6f, + 0xcb, 0x79, 0xba, 0x17, 0x3f, 0x02, 0x00, 0x00, 0xff, 0xff, 0x8c, 0xd7, 0xc6, 0xf8, 0x88, 0x04, + 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/errors/media_upload_error.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/errors/media_upload_error.pb.go new file mode 100644 index 0000000..ebc3b92 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/errors/media_upload_error.pb.go @@ -0,0 +1,129 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/errors/media_upload_error.proto + +package errors // import "google.golang.org/genproto/googleapis/ads/googleads/v1/errors" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Enum describing possible media uploading errors. +type MediaUploadErrorEnum_MediaUploadError int32 + +const ( + // Enum unspecified. + MediaUploadErrorEnum_UNSPECIFIED MediaUploadErrorEnum_MediaUploadError = 0 + // The received error code is not known in this version. + MediaUploadErrorEnum_UNKNOWN MediaUploadErrorEnum_MediaUploadError = 1 + // The uploaded file is too big. + MediaUploadErrorEnum_FILE_TOO_BIG MediaUploadErrorEnum_MediaUploadError = 2 + // Image data is unparseable. + MediaUploadErrorEnum_UNPARSEABLE_IMAGE MediaUploadErrorEnum_MediaUploadError = 3 + // Animated images are not allowed. + MediaUploadErrorEnum_ANIMATED_IMAGE_NOT_ALLOWED MediaUploadErrorEnum_MediaUploadError = 4 + // The image or media bundle format is not allowed. + MediaUploadErrorEnum_FORMAT_NOT_ALLOWED MediaUploadErrorEnum_MediaUploadError = 5 +) + +var MediaUploadErrorEnum_MediaUploadError_name = map[int32]string{ + 0: "UNSPECIFIED", + 1: "UNKNOWN", + 2: "FILE_TOO_BIG", + 3: "UNPARSEABLE_IMAGE", + 4: "ANIMATED_IMAGE_NOT_ALLOWED", + 5: "FORMAT_NOT_ALLOWED", +} +var MediaUploadErrorEnum_MediaUploadError_value = map[string]int32{ + "UNSPECIFIED": 0, + "UNKNOWN": 1, + "FILE_TOO_BIG": 2, + "UNPARSEABLE_IMAGE": 3, + "ANIMATED_IMAGE_NOT_ALLOWED": 4, + "FORMAT_NOT_ALLOWED": 5, +} + +func (x MediaUploadErrorEnum_MediaUploadError) String() string { + return proto.EnumName(MediaUploadErrorEnum_MediaUploadError_name, int32(x)) +} +func (MediaUploadErrorEnum_MediaUploadError) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_media_upload_error_1210dc4a8093b4d0, []int{0, 0} +} + +// Container for enum describing possible media uploading errors. +type MediaUploadErrorEnum struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *MediaUploadErrorEnum) Reset() { *m = MediaUploadErrorEnum{} } +func (m *MediaUploadErrorEnum) String() string { return proto.CompactTextString(m) } +func (*MediaUploadErrorEnum) ProtoMessage() {} +func (*MediaUploadErrorEnum) Descriptor() ([]byte, []int) { + return fileDescriptor_media_upload_error_1210dc4a8093b4d0, []int{0} +} +func (m *MediaUploadErrorEnum) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_MediaUploadErrorEnum.Unmarshal(m, b) +} +func (m *MediaUploadErrorEnum) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_MediaUploadErrorEnum.Marshal(b, m, deterministic) +} +func (dst *MediaUploadErrorEnum) XXX_Merge(src proto.Message) { + xxx_messageInfo_MediaUploadErrorEnum.Merge(dst, src) +} +func (m *MediaUploadErrorEnum) XXX_Size() int { + return xxx_messageInfo_MediaUploadErrorEnum.Size(m) +} +func (m *MediaUploadErrorEnum) XXX_DiscardUnknown() { + xxx_messageInfo_MediaUploadErrorEnum.DiscardUnknown(m) +} + +var xxx_messageInfo_MediaUploadErrorEnum proto.InternalMessageInfo + +func init() { + proto.RegisterType((*MediaUploadErrorEnum)(nil), "google.ads.googleads.v1.errors.MediaUploadErrorEnum") + proto.RegisterEnum("google.ads.googleads.v1.errors.MediaUploadErrorEnum_MediaUploadError", MediaUploadErrorEnum_MediaUploadError_name, MediaUploadErrorEnum_MediaUploadError_value) +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/errors/media_upload_error.proto", fileDescriptor_media_upload_error_1210dc4a8093b4d0) +} + +var fileDescriptor_media_upload_error_1210dc4a8093b4d0 = []byte{ + // 357 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x7c, 0x91, 0xcf, 0x6a, 0xab, 0x40, + 0x18, 0xc5, 0xaf, 0xe6, 0xfe, 0x81, 0xc9, 0x85, 0xda, 0xa1, 0xe9, 0x22, 0x94, 0x2c, 0x7c, 0x80, + 0x11, 0xe9, 0xa2, 0x30, 0x5d, 0x8d, 0x75, 0x22, 0x52, 0xff, 0x91, 0x68, 0x02, 0x45, 0x10, 0xdb, + 0x11, 0x11, 0x12, 0x47, 0x9c, 0x24, 0xef, 0xd1, 0x57, 0xe8, 0x32, 0x8f, 0xd2, 0x47, 0xe9, 0xaa, + 0x8f, 0x50, 0x74, 0x9a, 0x40, 0x03, 0xed, 0xca, 0xc3, 0xf1, 0x77, 0x8e, 0xdf, 0xf7, 0x09, 0x6e, + 0x4a, 0xce, 0xcb, 0x55, 0x61, 0xe4, 0x4c, 0x18, 0x52, 0x76, 0x6a, 0x67, 0x1a, 0x45, 0xdb, 0xf2, + 0x56, 0x18, 0xeb, 0x82, 0x55, 0x79, 0xb6, 0x6d, 0x56, 0x3c, 0x67, 0x59, 0xef, 0xa1, 0xa6, 0xe5, + 0x1b, 0x0e, 0x27, 0x92, 0x46, 0x39, 0x13, 0xe8, 0x18, 0x44, 0x3b, 0x13, 0xc9, 0xe0, 0xf8, 0xea, + 0x50, 0xdc, 0x54, 0x46, 0x5e, 0xd7, 0x7c, 0x93, 0x6f, 0x2a, 0x5e, 0x0b, 0x99, 0xd6, 0xf7, 0x0a, + 0xb8, 0xf0, 0xbb, 0xea, 0xa4, 0x6f, 0xa6, 0x5d, 0x86, 0xd6, 0xdb, 0xb5, 0xfe, 0xac, 0x00, 0xed, + 0xf4, 0x05, 0x3c, 0x03, 0xc3, 0x24, 0x98, 0x47, 0xf4, 0xce, 0x9d, 0xba, 0xd4, 0xd6, 0x7e, 0xc1, + 0x21, 0xf8, 0x97, 0x04, 0xf7, 0x41, 0xb8, 0x0c, 0x34, 0x05, 0x6a, 0xe0, 0xff, 0xd4, 0xf5, 0x68, + 0x16, 0x87, 0x61, 0x66, 0xb9, 0x8e, 0xa6, 0xc2, 0x11, 0x38, 0x4f, 0x82, 0x88, 0xcc, 0xe6, 0x94, + 0x58, 0x1e, 0xcd, 0x5c, 0x9f, 0x38, 0x54, 0x1b, 0xc0, 0x09, 0x18, 0x93, 0xc0, 0xf5, 0x49, 0x4c, + 0x6d, 0xe9, 0x65, 0x41, 0x18, 0x67, 0xc4, 0xf3, 0xc2, 0x25, 0xb5, 0xb5, 0xdf, 0xf0, 0x12, 0xc0, + 0x69, 0x38, 0xf3, 0x49, 0xfc, 0xc5, 0xff, 0x63, 0xbd, 0x2b, 0x40, 0x7f, 0xe2, 0x6b, 0xf4, 0xf3, + 0xc6, 0xd6, 0xe8, 0x74, 0xee, 0xa8, 0x5b, 0x35, 0x52, 0x1e, 0xec, 0xcf, 0x60, 0xc9, 0x57, 0x79, + 0x5d, 0x22, 0xde, 0x96, 0x46, 0x59, 0xd4, 0xfd, 0x21, 0x0e, 0x37, 0x6f, 0x2a, 0xf1, 0xdd, 0x2f, + 0xb8, 0x95, 0x8f, 0x17, 0x75, 0xe0, 0x10, 0xb2, 0x57, 0x27, 0x8e, 0x2c, 0x23, 0x4c, 0x20, 0x29, + 0x3b, 0xb5, 0x30, 0x51, 0xff, 0x49, 0xf1, 0x7a, 0x00, 0x52, 0xc2, 0x44, 0x7a, 0x04, 0xd2, 0x85, + 0x99, 0x4a, 0xe0, 0x4d, 0xd5, 0xa5, 0x8b, 0x31, 0x61, 0x02, 0xe3, 0x23, 0x82, 0xf1, 0xc2, 0xc4, + 0x58, 0x42, 0x8f, 0x7f, 0xfb, 0xe9, 0xae, 0x3f, 0x02, 0x00, 0x00, 0xff, 0xff, 0x14, 0x70, 0xd8, + 0xc2, 0x1f, 0x02, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/errors/multiplier_error.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/errors/multiplier_error.pb.go new file mode 100644 index 0000000..9921b6a --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/errors/multiplier_error.pb.go @@ -0,0 +1,172 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/errors/multiplier_error.proto + +package errors // import "google.golang.org/genproto/googleapis/ads/googleads/v1/errors" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Enum describing possible multiplier errors. +type MultiplierErrorEnum_MultiplierError int32 + +const ( + // Enum unspecified. + MultiplierErrorEnum_UNSPECIFIED MultiplierErrorEnum_MultiplierError = 0 + // The received error code is not known in this version. + MultiplierErrorEnum_UNKNOWN MultiplierErrorEnum_MultiplierError = 1 + // Multiplier value is too high + MultiplierErrorEnum_MULTIPLIER_TOO_HIGH MultiplierErrorEnum_MultiplierError = 2 + // Multiplier value is too low + MultiplierErrorEnum_MULTIPLIER_TOO_LOW MultiplierErrorEnum_MultiplierError = 3 + // Too many fractional digits + MultiplierErrorEnum_TOO_MANY_FRACTIONAL_DIGITS MultiplierErrorEnum_MultiplierError = 4 + // A multiplier cannot be set for this bidding strategy + MultiplierErrorEnum_MULTIPLIER_NOT_ALLOWED_FOR_BIDDING_STRATEGY MultiplierErrorEnum_MultiplierError = 5 + // A multiplier cannot be set when there is no base bid (e.g., content max + // cpc) + MultiplierErrorEnum_MULTIPLIER_NOT_ALLOWED_WHEN_BASE_BID_IS_MISSING MultiplierErrorEnum_MultiplierError = 6 + // A bid multiplier must be specified + MultiplierErrorEnum_NO_MULTIPLIER_SPECIFIED MultiplierErrorEnum_MultiplierError = 7 + // Multiplier causes bid to exceed daily budget + MultiplierErrorEnum_MULTIPLIER_CAUSES_BID_TO_EXCEED_DAILY_BUDGET MultiplierErrorEnum_MultiplierError = 8 + // Multiplier causes bid to exceed monthly budget + MultiplierErrorEnum_MULTIPLIER_CAUSES_BID_TO_EXCEED_MONTHLY_BUDGET MultiplierErrorEnum_MultiplierError = 9 + // Multiplier causes bid to exceed custom budget + MultiplierErrorEnum_MULTIPLIER_CAUSES_BID_TO_EXCEED_CUSTOM_BUDGET MultiplierErrorEnum_MultiplierError = 10 + // Multiplier causes bid to exceed maximum allowed bid + MultiplierErrorEnum_MULTIPLIER_CAUSES_BID_TO_EXCEED_MAX_ALLOWED_BID MultiplierErrorEnum_MultiplierError = 11 + // Multiplier causes bid to become less than the minimum bid allowed + MultiplierErrorEnum_BID_LESS_THAN_MIN_ALLOWED_BID_WITH_MULTIPLIER MultiplierErrorEnum_MultiplierError = 12 + // Multiplier type (cpc vs. cpm) needs to match campaign's bidding strategy + MultiplierErrorEnum_MULTIPLIER_AND_BIDDING_STRATEGY_TYPE_MISMATCH MultiplierErrorEnum_MultiplierError = 13 +) + +var MultiplierErrorEnum_MultiplierError_name = map[int32]string{ + 0: "UNSPECIFIED", + 1: "UNKNOWN", + 2: "MULTIPLIER_TOO_HIGH", + 3: "MULTIPLIER_TOO_LOW", + 4: "TOO_MANY_FRACTIONAL_DIGITS", + 5: "MULTIPLIER_NOT_ALLOWED_FOR_BIDDING_STRATEGY", + 6: "MULTIPLIER_NOT_ALLOWED_WHEN_BASE_BID_IS_MISSING", + 7: "NO_MULTIPLIER_SPECIFIED", + 8: "MULTIPLIER_CAUSES_BID_TO_EXCEED_DAILY_BUDGET", + 9: "MULTIPLIER_CAUSES_BID_TO_EXCEED_MONTHLY_BUDGET", + 10: "MULTIPLIER_CAUSES_BID_TO_EXCEED_CUSTOM_BUDGET", + 11: "MULTIPLIER_CAUSES_BID_TO_EXCEED_MAX_ALLOWED_BID", + 12: "BID_LESS_THAN_MIN_ALLOWED_BID_WITH_MULTIPLIER", + 13: "MULTIPLIER_AND_BIDDING_STRATEGY_TYPE_MISMATCH", +} +var MultiplierErrorEnum_MultiplierError_value = map[string]int32{ + "UNSPECIFIED": 0, + "UNKNOWN": 1, + "MULTIPLIER_TOO_HIGH": 2, + "MULTIPLIER_TOO_LOW": 3, + "TOO_MANY_FRACTIONAL_DIGITS": 4, + "MULTIPLIER_NOT_ALLOWED_FOR_BIDDING_STRATEGY": 5, + "MULTIPLIER_NOT_ALLOWED_WHEN_BASE_BID_IS_MISSING": 6, + "NO_MULTIPLIER_SPECIFIED": 7, + "MULTIPLIER_CAUSES_BID_TO_EXCEED_DAILY_BUDGET": 8, + "MULTIPLIER_CAUSES_BID_TO_EXCEED_MONTHLY_BUDGET": 9, + "MULTIPLIER_CAUSES_BID_TO_EXCEED_CUSTOM_BUDGET": 10, + "MULTIPLIER_CAUSES_BID_TO_EXCEED_MAX_ALLOWED_BID": 11, + "BID_LESS_THAN_MIN_ALLOWED_BID_WITH_MULTIPLIER": 12, + "MULTIPLIER_AND_BIDDING_STRATEGY_TYPE_MISMATCH": 13, +} + +func (x MultiplierErrorEnum_MultiplierError) String() string { + return proto.EnumName(MultiplierErrorEnum_MultiplierError_name, int32(x)) +} +func (MultiplierErrorEnum_MultiplierError) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_multiplier_error_e904d1bae997600a, []int{0, 0} +} + +// Container for enum describing possible multiplier errors. +type MultiplierErrorEnum struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *MultiplierErrorEnum) Reset() { *m = MultiplierErrorEnum{} } +func (m *MultiplierErrorEnum) String() string { return proto.CompactTextString(m) } +func (*MultiplierErrorEnum) ProtoMessage() {} +func (*MultiplierErrorEnum) Descriptor() ([]byte, []int) { + return fileDescriptor_multiplier_error_e904d1bae997600a, []int{0} +} +func (m *MultiplierErrorEnum) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_MultiplierErrorEnum.Unmarshal(m, b) +} +func (m *MultiplierErrorEnum) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_MultiplierErrorEnum.Marshal(b, m, deterministic) +} +func (dst *MultiplierErrorEnum) XXX_Merge(src proto.Message) { + xxx_messageInfo_MultiplierErrorEnum.Merge(dst, src) +} +func (m *MultiplierErrorEnum) XXX_Size() int { + return xxx_messageInfo_MultiplierErrorEnum.Size(m) +} +func (m *MultiplierErrorEnum) XXX_DiscardUnknown() { + xxx_messageInfo_MultiplierErrorEnum.DiscardUnknown(m) +} + +var xxx_messageInfo_MultiplierErrorEnum proto.InternalMessageInfo + +func init() { + proto.RegisterType((*MultiplierErrorEnum)(nil), "google.ads.googleads.v1.errors.MultiplierErrorEnum") + proto.RegisterEnum("google.ads.googleads.v1.errors.MultiplierErrorEnum_MultiplierError", MultiplierErrorEnum_MultiplierError_name, MultiplierErrorEnum_MultiplierError_value) +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/errors/multiplier_error.proto", fileDescriptor_multiplier_error_e904d1bae997600a) +} + +var fileDescriptor_multiplier_error_e904d1bae997600a = []byte{ + // 523 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x84, 0x93, 0xd1, 0x6e, 0xd3, 0x3c, + 0x14, 0xc7, 0xbf, 0x76, 0xfd, 0x36, 0x70, 0x41, 0xb3, 0x3c, 0xc4, 0xa4, 0x81, 0x7a, 0xd1, 0x5b, + 0x20, 0x21, 0x4c, 0xdc, 0x84, 0x2b, 0x27, 0x76, 0x13, 0x8b, 0xc4, 0xae, 0x6a, 0xa7, 0x5d, 0x51, + 0x25, 0xab, 0xd0, 0x2a, 0xaa, 0xd4, 0x26, 0x55, 0xd2, 0xed, 0x81, 0xb8, 0xe4, 0x05, 0x78, 0x07, + 0x6e, 0x78, 0x0f, 0x6e, 0x78, 0x05, 0x94, 0x9a, 0x76, 0x55, 0x60, 0xec, 0x2a, 0x27, 0xc7, 0xff, + 0xdf, 0x39, 0x3e, 0x47, 0x7f, 0x83, 0xb7, 0x69, 0x9e, 0xa7, 0xcb, 0xb9, 0x3d, 0x9d, 0x95, 0xb6, + 0x09, 0xab, 0xe8, 0xc6, 0xb1, 0xe7, 0x45, 0x91, 0x17, 0xa5, 0xbd, 0xba, 0x5e, 0x6e, 0x16, 0xeb, + 0xe5, 0x62, 0x5e, 0xe8, 0x6d, 0xc6, 0x5a, 0x17, 0xf9, 0x26, 0x47, 0x1d, 0xa3, 0xb5, 0xa6, 0xb3, + 0xd2, 0xda, 0x63, 0xd6, 0x8d, 0x63, 0x19, 0xec, 0xe2, 0xf9, 0xae, 0xec, 0x7a, 0x61, 0x4f, 0xb3, + 0x2c, 0xdf, 0x4c, 0x37, 0x8b, 0x3c, 0x2b, 0x0d, 0xdd, 0xfd, 0xde, 0x02, 0x67, 0xf1, 0xbe, 0x30, + 0xad, 0x10, 0x9a, 0x5d, 0xaf, 0xba, 0x5f, 0x5b, 0xe0, 0xb4, 0x96, 0x47, 0xa7, 0xa0, 0x9d, 0x70, + 0xd9, 0xa7, 0x3e, 0xeb, 0x31, 0x4a, 0xe0, 0x7f, 0xa8, 0x0d, 0x4e, 0x12, 0xfe, 0x9e, 0x8b, 0x11, + 0x87, 0x0d, 0x74, 0x0e, 0xce, 0xe2, 0x24, 0x52, 0xac, 0x1f, 0x31, 0x3a, 0xd0, 0x4a, 0x08, 0x1d, + 0xb2, 0x20, 0x84, 0x4d, 0xf4, 0x14, 0xa0, 0xda, 0x41, 0x24, 0x46, 0xf0, 0x08, 0x75, 0xc0, 0x45, + 0xf5, 0x13, 0x63, 0x3e, 0xd6, 0xbd, 0x01, 0xf6, 0x15, 0x13, 0x1c, 0x47, 0x9a, 0xb0, 0x80, 0x29, + 0x09, 0x5b, 0xc8, 0x06, 0x2f, 0x0e, 0x38, 0x2e, 0x94, 0xc6, 0x51, 0x24, 0x46, 0x94, 0xe8, 0x9e, + 0x18, 0x68, 0x8f, 0x11, 0xc2, 0x78, 0xa0, 0xa5, 0x1a, 0x60, 0x45, 0x83, 0x31, 0xfc, 0x1f, 0x5d, + 0x02, 0xfb, 0x0e, 0x60, 0x14, 0x52, 0xae, 0x3d, 0x2c, 0x69, 0x85, 0x69, 0x26, 0x75, 0xcc, 0xa4, + 0x64, 0x3c, 0x80, 0xc7, 0xe8, 0x19, 0x38, 0xe7, 0x42, 0x1f, 0x70, 0xb7, 0x03, 0x9e, 0xa0, 0xd7, + 0xe0, 0xe5, 0xc1, 0x89, 0x8f, 0x13, 0x49, 0xe5, 0xb6, 0x84, 0x12, 0x9a, 0x5e, 0xf9, 0x94, 0x12, + 0x4d, 0x30, 0x8b, 0xc6, 0xda, 0x4b, 0x48, 0x40, 0x15, 0x7c, 0x80, 0xde, 0x00, 0xeb, 0x3e, 0x22, + 0x16, 0x5c, 0x85, 0xb7, 0xcc, 0x43, 0xe4, 0x80, 0x57, 0xf7, 0x31, 0x7e, 0x22, 0x95, 0x88, 0x77, + 0x08, 0xa8, 0x8d, 0xfa, 0xf7, 0x36, 0xf8, 0x6a, 0x3f, 0xbf, 0xc7, 0x08, 0x6c, 0x57, 0x7d, 0x2a, + 0x49, 0x44, 0xa5, 0xd4, 0x2a, 0xc4, 0x5c, 0xc7, 0x8c, 0x1f, 0x4a, 0xf4, 0x88, 0xa9, 0xf0, 0x60, + 0x15, 0xf0, 0x51, 0xed, 0x6a, 0x98, 0x93, 0x3f, 0xf6, 0xae, 0xd5, 0xb8, 0x4f, 0xab, 0x7d, 0xc6, + 0x58, 0xf9, 0x21, 0x7c, 0xec, 0xfd, 0x6c, 0x80, 0xee, 0xa7, 0x7c, 0x65, 0xfd, 0xdb, 0x96, 0xde, + 0x93, 0x9a, 0xbb, 0xfa, 0x95, 0x1d, 0xfb, 0x8d, 0x0f, 0xe4, 0x37, 0x97, 0xe6, 0xcb, 0x69, 0x96, + 0x5a, 0x79, 0x91, 0xda, 0xe9, 0x3c, 0xdb, 0x9a, 0x75, 0xf7, 0x2a, 0xd6, 0x8b, 0xf2, 0xae, 0x47, + 0xf2, 0xce, 0x7c, 0x3e, 0x37, 0x8f, 0x02, 0x8c, 0xbf, 0x34, 0x3b, 0x81, 0x29, 0x86, 0x67, 0xa5, + 0x65, 0xc2, 0x2a, 0x1a, 0x3a, 0xd6, 0xb6, 0x65, 0xf9, 0x6d, 0x27, 0x98, 0xe0, 0x59, 0x39, 0xd9, + 0x0b, 0x26, 0x43, 0x67, 0x62, 0x04, 0x3f, 0x9a, 0x5d, 0x93, 0x75, 0x5d, 0x3c, 0x2b, 0x5d, 0x77, + 0x2f, 0x71, 0xdd, 0xa1, 0xe3, 0xba, 0x46, 0xf4, 0xf1, 0x78, 0x7b, 0xbb, 0xcb, 0x5f, 0x01, 0x00, + 0x00, 0xff, 0xff, 0x28, 0xe2, 0xc6, 0x65, 0xc1, 0x03, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/errors/mutate_error.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/errors/mutate_error.pb.go new file mode 100644 index 0000000..2189ee1 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/errors/mutate_error.pb.go @@ -0,0 +1,140 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/errors/mutate_error.proto + +package errors // import "google.golang.org/genproto/googleapis/ads/googleads/v1/errors" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Enum describing possible mutate errors. +type MutateErrorEnum_MutateError int32 + +const ( + // Enum unspecified. + MutateErrorEnum_UNSPECIFIED MutateErrorEnum_MutateError = 0 + // The received error code is not known in this version. + MutateErrorEnum_UNKNOWN MutateErrorEnum_MutateError = 1 + // Requested resource was not found. + MutateErrorEnum_RESOURCE_NOT_FOUND MutateErrorEnum_MutateError = 3 + // Cannot mutate the same resource twice in one request. + MutateErrorEnum_ID_EXISTS_IN_MULTIPLE_MUTATES MutateErrorEnum_MutateError = 7 + // The field's contents don't match another field that represents the same + // data. + MutateErrorEnum_INCONSISTENT_FIELD_VALUES MutateErrorEnum_MutateError = 8 + // Mutates are not allowed for the requested resource. + MutateErrorEnum_MUTATE_NOT_ALLOWED MutateErrorEnum_MutateError = 9 + // The resource isn't in Google Ads. It belongs to another ads system. + MutateErrorEnum_RESOURCE_NOT_IN_GOOGLE_ADS MutateErrorEnum_MutateError = 10 + // The resource being created already exists. + MutateErrorEnum_RESOURCE_ALREADY_EXISTS MutateErrorEnum_MutateError = 11 +) + +var MutateErrorEnum_MutateError_name = map[int32]string{ + 0: "UNSPECIFIED", + 1: "UNKNOWN", + 3: "RESOURCE_NOT_FOUND", + 7: "ID_EXISTS_IN_MULTIPLE_MUTATES", + 8: "INCONSISTENT_FIELD_VALUES", + 9: "MUTATE_NOT_ALLOWED", + 10: "RESOURCE_NOT_IN_GOOGLE_ADS", + 11: "RESOURCE_ALREADY_EXISTS", +} +var MutateErrorEnum_MutateError_value = map[string]int32{ + "UNSPECIFIED": 0, + "UNKNOWN": 1, + "RESOURCE_NOT_FOUND": 3, + "ID_EXISTS_IN_MULTIPLE_MUTATES": 7, + "INCONSISTENT_FIELD_VALUES": 8, + "MUTATE_NOT_ALLOWED": 9, + "RESOURCE_NOT_IN_GOOGLE_ADS": 10, + "RESOURCE_ALREADY_EXISTS": 11, +} + +func (x MutateErrorEnum_MutateError) String() string { + return proto.EnumName(MutateErrorEnum_MutateError_name, int32(x)) +} +func (MutateErrorEnum_MutateError) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_mutate_error_703af8e0974ec265, []int{0, 0} +} + +// Container for enum describing possible mutate errors. +type MutateErrorEnum struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *MutateErrorEnum) Reset() { *m = MutateErrorEnum{} } +func (m *MutateErrorEnum) String() string { return proto.CompactTextString(m) } +func (*MutateErrorEnum) ProtoMessage() {} +func (*MutateErrorEnum) Descriptor() ([]byte, []int) { + return fileDescriptor_mutate_error_703af8e0974ec265, []int{0} +} +func (m *MutateErrorEnum) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_MutateErrorEnum.Unmarshal(m, b) +} +func (m *MutateErrorEnum) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_MutateErrorEnum.Marshal(b, m, deterministic) +} +func (dst *MutateErrorEnum) XXX_Merge(src proto.Message) { + xxx_messageInfo_MutateErrorEnum.Merge(dst, src) +} +func (m *MutateErrorEnum) XXX_Size() int { + return xxx_messageInfo_MutateErrorEnum.Size(m) +} +func (m *MutateErrorEnum) XXX_DiscardUnknown() { + xxx_messageInfo_MutateErrorEnum.DiscardUnknown(m) +} + +var xxx_messageInfo_MutateErrorEnum proto.InternalMessageInfo + +func init() { + proto.RegisterType((*MutateErrorEnum)(nil), "google.ads.googleads.v1.errors.MutateErrorEnum") + proto.RegisterEnum("google.ads.googleads.v1.errors.MutateErrorEnum_MutateError", MutateErrorEnum_MutateError_name, MutateErrorEnum_MutateError_value) +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/errors/mutate_error.proto", fileDescriptor_mutate_error_703af8e0974ec265) +} + +var fileDescriptor_mutate_error_703af8e0974ec265 = []byte{ + // 399 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x7c, 0x91, 0x41, 0x8e, 0xd3, 0x30, + 0x18, 0x85, 0x49, 0x47, 0x62, 0xc0, 0x5d, 0x8c, 0xe5, 0x05, 0x88, 0x81, 0xa9, 0x44, 0x0f, 0xe0, + 0x28, 0x62, 0x67, 0x56, 0x9e, 0xda, 0xad, 0x2c, 0x5c, 0xbb, 0xaa, 0x93, 0x0e, 0xa0, 0x4a, 0x56, + 0x20, 0x51, 0x54, 0x69, 0x1a, 0x57, 0x71, 0x66, 0x0e, 0xc4, 0x92, 0xa3, 0x70, 0x04, 0x8e, 0x00, + 0x12, 0x67, 0x40, 0x89, 0xa7, 0x51, 0x59, 0x30, 0xab, 0xbc, 0xd8, 0xdf, 0x7b, 0xfe, 0xf5, 0x7e, + 0x90, 0x54, 0xce, 0x55, 0xb7, 0x65, 0x9c, 0x17, 0x3e, 0x0e, 0xb2, 0x53, 0xf7, 0x49, 0x5c, 0x36, + 0x8d, 0x6b, 0x7c, 0xbc, 0xbf, 0x6b, 0xf3, 0xb6, 0xb4, 0xfd, 0x1f, 0x3e, 0x34, 0xae, 0x75, 0x68, + 0x12, 0x38, 0x9c, 0x17, 0x1e, 0x0f, 0x16, 0x7c, 0x9f, 0xe0, 0x60, 0xb9, 0x7c, 0x73, 0x8c, 0x3c, + 0xec, 0xe2, 0xbc, 0xae, 0x5d, 0x9b, 0xb7, 0x3b, 0x57, 0xfb, 0xe0, 0x9e, 0xfe, 0x89, 0xc0, 0xc5, + 0xb2, 0x0f, 0xe5, 0x1d, 0xce, 0xeb, 0xbb, 0xfd, 0xf4, 0x67, 0x04, 0xc6, 0x27, 0x67, 0xe8, 0x02, + 0x8c, 0x33, 0x65, 0x56, 0x7c, 0x26, 0xe6, 0x82, 0x33, 0xf8, 0x04, 0x8d, 0xc1, 0x79, 0xa6, 0x3e, + 0x28, 0x7d, 0xa3, 0x60, 0x84, 0x5e, 0x00, 0xb4, 0xe6, 0x46, 0x67, 0xeb, 0x19, 0xb7, 0x4a, 0xa7, + 0x76, 0xae, 0x33, 0xc5, 0xe0, 0x19, 0x7a, 0x0b, 0xae, 0x04, 0xb3, 0xfc, 0xa3, 0x30, 0xa9, 0xb1, + 0x42, 0xd9, 0x65, 0x26, 0x53, 0xb1, 0x92, 0xdc, 0x2e, 0xb3, 0x94, 0xa6, 0xdc, 0xc0, 0x73, 0x74, + 0x05, 0x5e, 0x09, 0x35, 0xd3, 0xca, 0x08, 0x93, 0x72, 0x95, 0xda, 0xb9, 0xe0, 0x92, 0xd9, 0x0d, + 0x95, 0x19, 0x37, 0xf0, 0x59, 0x97, 0x1c, 0xd8, 0x3e, 0x97, 0x4a, 0xa9, 0x6f, 0x38, 0x83, 0xcf, + 0xd1, 0x04, 0x5c, 0xfe, 0xf3, 0xa2, 0x50, 0x76, 0xa1, 0xf5, 0x42, 0x72, 0x4b, 0x99, 0x81, 0x00, + 0xbd, 0x06, 0x2f, 0x87, 0x7b, 0x2a, 0xd7, 0x9c, 0xb2, 0x4f, 0x0f, 0x73, 0xc0, 0xf1, 0xf5, 0xef, + 0x08, 0x4c, 0xbf, 0xba, 0x3d, 0x7e, 0xbc, 0xb5, 0x6b, 0x78, 0x52, 0xc0, 0xaa, 0x6b, 0x6a, 0x15, + 0x7d, 0x66, 0x0f, 0x9e, 0xca, 0xdd, 0xe6, 0x75, 0x85, 0x5d, 0x53, 0xc5, 0x55, 0x59, 0xf7, 0x3d, + 0x1e, 0x97, 0x75, 0xd8, 0xf9, 0xff, 0xed, 0xee, 0x7d, 0xf8, 0x7c, 0x1b, 0x9d, 0x2d, 0x28, 0xfd, + 0x3e, 0x9a, 0x2c, 0x42, 0x18, 0x2d, 0x3c, 0x0e, 0xb2, 0x53, 0x9b, 0x04, 0xf7, 0x4f, 0xfa, 0x1f, + 0x47, 0x60, 0x4b, 0x0b, 0xbf, 0x1d, 0x80, 0xed, 0x26, 0xd9, 0x06, 0xe0, 0xd7, 0x68, 0x1a, 0x4e, + 0x09, 0xa1, 0x85, 0x27, 0x64, 0x40, 0x08, 0xd9, 0x24, 0x84, 0x04, 0xe8, 0xcb, 0xd3, 0x7e, 0xba, + 0x77, 0x7f, 0x03, 0x00, 0x00, 0xff, 0xff, 0xde, 0x4c, 0xb9, 0x2c, 0x58, 0x02, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/errors/mutate_job_error.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/errors/mutate_job_error.pb.go new file mode 100644 index 0000000..d0828c5 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/errors/mutate_job_error.pb.go @@ -0,0 +1,136 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/errors/mutate_job_error.proto + +package errors // import "google.golang.org/genproto/googleapis/ads/googleads/v1/errors" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Enum describing possible request errors. +type MutateJobErrorEnum_MutateJobError int32 + +const ( + // Enum unspecified. + MutateJobErrorEnum_UNSPECIFIED MutateJobErrorEnum_MutateJobError = 0 + // The received error code is not known in this version. + MutateJobErrorEnum_UNKNOWN MutateJobErrorEnum_MutateJobError = 1 + // The mutate job cannot add more operations or run after it has started + // running. + MutateJobErrorEnum_CANNOT_MODIFY_JOB_AFTER_JOB_STARTS_RUNNING MutateJobErrorEnum_MutateJobError = 2 + // The operations for an AddMutateJobOperations request were empty. + MutateJobErrorEnum_EMPTY_OPERATIONS MutateJobErrorEnum_MutateJobError = 3 + // The sequence token for an AddMutateJobOperations request was invalid. + MutateJobErrorEnum_INVALID_SEQUENCE_TOKEN MutateJobErrorEnum_MutateJobError = 4 + // Mutate Job Results can only be retrieved once the job is finished. + MutateJobErrorEnum_RESULTS_NOT_READY MutateJobErrorEnum_MutateJobError = 5 + // The page size for ListMutateJobResults was invalid. + MutateJobErrorEnum_INVALID_PAGE_SIZE MutateJobErrorEnum_MutateJobError = 6 +) + +var MutateJobErrorEnum_MutateJobError_name = map[int32]string{ + 0: "UNSPECIFIED", + 1: "UNKNOWN", + 2: "CANNOT_MODIFY_JOB_AFTER_JOB_STARTS_RUNNING", + 3: "EMPTY_OPERATIONS", + 4: "INVALID_SEQUENCE_TOKEN", + 5: "RESULTS_NOT_READY", + 6: "INVALID_PAGE_SIZE", +} +var MutateJobErrorEnum_MutateJobError_value = map[string]int32{ + "UNSPECIFIED": 0, + "UNKNOWN": 1, + "CANNOT_MODIFY_JOB_AFTER_JOB_STARTS_RUNNING": 2, + "EMPTY_OPERATIONS": 3, + "INVALID_SEQUENCE_TOKEN": 4, + "RESULTS_NOT_READY": 5, + "INVALID_PAGE_SIZE": 6, +} + +func (x MutateJobErrorEnum_MutateJobError) String() string { + return proto.EnumName(MutateJobErrorEnum_MutateJobError_name, int32(x)) +} +func (MutateJobErrorEnum_MutateJobError) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_mutate_job_error_15eaa13915eb323a, []int{0, 0} +} + +// Container for enum describing possible mutate job errors. +type MutateJobErrorEnum struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *MutateJobErrorEnum) Reset() { *m = MutateJobErrorEnum{} } +func (m *MutateJobErrorEnum) String() string { return proto.CompactTextString(m) } +func (*MutateJobErrorEnum) ProtoMessage() {} +func (*MutateJobErrorEnum) Descriptor() ([]byte, []int) { + return fileDescriptor_mutate_job_error_15eaa13915eb323a, []int{0} +} +func (m *MutateJobErrorEnum) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_MutateJobErrorEnum.Unmarshal(m, b) +} +func (m *MutateJobErrorEnum) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_MutateJobErrorEnum.Marshal(b, m, deterministic) +} +func (dst *MutateJobErrorEnum) XXX_Merge(src proto.Message) { + xxx_messageInfo_MutateJobErrorEnum.Merge(dst, src) +} +func (m *MutateJobErrorEnum) XXX_Size() int { + return xxx_messageInfo_MutateJobErrorEnum.Size(m) +} +func (m *MutateJobErrorEnum) XXX_DiscardUnknown() { + xxx_messageInfo_MutateJobErrorEnum.DiscardUnknown(m) +} + +var xxx_messageInfo_MutateJobErrorEnum proto.InternalMessageInfo + +func init() { + proto.RegisterType((*MutateJobErrorEnum)(nil), "google.ads.googleads.v1.errors.MutateJobErrorEnum") + proto.RegisterEnum("google.ads.googleads.v1.errors.MutateJobErrorEnum_MutateJobError", MutateJobErrorEnum_MutateJobError_name, MutateJobErrorEnum_MutateJobError_value) +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/errors/mutate_job_error.proto", fileDescriptor_mutate_job_error_15eaa13915eb323a) +} + +var fileDescriptor_mutate_job_error_15eaa13915eb323a = []byte{ + // 399 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x7c, 0x91, 0x4f, 0x8e, 0xd3, 0x30, + 0x18, 0xc5, 0x49, 0x07, 0x06, 0xc9, 0x23, 0x81, 0x31, 0x7f, 0x16, 0x23, 0x34, 0x8b, 0x2e, 0x59, + 0x38, 0x8a, 0x10, 0x1b, 0xb3, 0x72, 0x1b, 0xb7, 0xf2, 0xcc, 0xd4, 0x09, 0xb1, 0x13, 0xd4, 0x51, + 0x24, 0x2b, 0x25, 0x51, 0x54, 0x34, 0x8d, 0xab, 0x38, 0x33, 0x07, 0x62, 0xc9, 0x21, 0x38, 0x00, + 0x7b, 0x2e, 0xc1, 0x82, 0x33, 0xa0, 0xc4, 0xa4, 0x52, 0x17, 0xcc, 0x2a, 0x4f, 0x4f, 0xef, 0xf7, + 0x7d, 0xf1, 0xfb, 0xc0, 0x87, 0xda, 0x98, 0xfa, 0xb6, 0xf2, 0x8b, 0xd2, 0xfa, 0x4e, 0xf6, 0xea, + 0x3e, 0xf0, 0xab, 0xb6, 0x35, 0xad, 0xf5, 0x77, 0x77, 0x5d, 0xd1, 0x55, 0xfa, 0xab, 0xd9, 0xe8, + 0xc1, 0xc1, 0xfb, 0xd6, 0x74, 0x06, 0x5d, 0xb8, 0x2c, 0x2e, 0x4a, 0x8b, 0x0f, 0x18, 0xbe, 0x0f, + 0xb0, 0xc3, 0xce, 0xdf, 0x8e, 0x63, 0xf7, 0x5b, 0xbf, 0x68, 0x1a, 0xd3, 0x15, 0xdd, 0xd6, 0x34, + 0xd6, 0xd1, 0xd3, 0x5f, 0x1e, 0x40, 0xab, 0x61, 0xf0, 0xa5, 0xd9, 0xb0, 0x9e, 0x60, 0xcd, 0xdd, + 0x6e, 0xfa, 0xc3, 0x03, 0xcf, 0x8e, 0x6d, 0xf4, 0x1c, 0x9c, 0xa5, 0x42, 0xc6, 0x6c, 0xce, 0x17, + 0x9c, 0x85, 0xf0, 0x11, 0x3a, 0x03, 0x4f, 0x53, 0x71, 0x25, 0xa2, 0xcf, 0x02, 0x7a, 0x08, 0x83, + 0x77, 0x73, 0x2a, 0x44, 0xa4, 0xf4, 0x2a, 0x0a, 0xf9, 0x62, 0xad, 0x2f, 0xa3, 0x99, 0xa6, 0x0b, + 0xc5, 0x92, 0x41, 0x49, 0x45, 0x13, 0x25, 0x75, 0x92, 0x0a, 0xc1, 0xc5, 0x12, 0x4e, 0xd0, 0x2b, + 0x00, 0xd9, 0x2a, 0x56, 0x6b, 0x1d, 0xc5, 0x2c, 0xa1, 0x8a, 0x47, 0x42, 0xc2, 0x13, 0x74, 0x0e, + 0xde, 0x70, 0x91, 0xd1, 0x6b, 0x1e, 0x6a, 0xc9, 0x3e, 0xa5, 0x4c, 0xcc, 0x99, 0x56, 0xd1, 0x15, + 0x13, 0xf0, 0x31, 0x7a, 0x0d, 0x5e, 0x24, 0x4c, 0xa6, 0xd7, 0x4a, 0xea, 0x7e, 0x4d, 0xc2, 0x68, + 0xb8, 0x86, 0x4f, 0x7a, 0x7b, 0x44, 0x62, 0xba, 0x64, 0x5a, 0xf2, 0x1b, 0x06, 0x4f, 0x67, 0x7f, + 0x3c, 0x30, 0xfd, 0x62, 0x76, 0xf8, 0xe1, 0x72, 0x66, 0x2f, 0x8f, 0x1f, 0x19, 0xf7, 0x9d, 0xc4, + 0xde, 0x4d, 0xf8, 0x0f, 0xab, 0xcd, 0x6d, 0xd1, 0xd4, 0xd8, 0xb4, 0xb5, 0x5f, 0x57, 0xcd, 0xd0, + 0xd8, 0x78, 0x9a, 0xfd, 0xd6, 0xfe, 0xef, 0x52, 0x1f, 0xdd, 0xe7, 0xdb, 0xe4, 0x64, 0x49, 0xe9, + 0xf7, 0xc9, 0xc5, 0xd2, 0x0d, 0xa3, 0xa5, 0xc5, 0x4e, 0xf6, 0x2a, 0x0b, 0xf0, 0xb0, 0xd2, 0xfe, + 0x1c, 0x03, 0x39, 0x2d, 0x6d, 0x7e, 0x08, 0xe4, 0x59, 0x90, 0xbb, 0xc0, 0xef, 0xc9, 0xd4, 0xb9, + 0x84, 0xd0, 0xd2, 0x12, 0x72, 0x88, 0x10, 0x92, 0x05, 0x84, 0xb8, 0xd0, 0xe6, 0x74, 0xf8, 0xbb, + 0xf7, 0x7f, 0x03, 0x00, 0x00, 0xff, 0xff, 0x79, 0x1b, 0x6a, 0x2b, 0x46, 0x02, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/errors/new_resource_creation_error.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/errors/new_resource_creation_error.pb.go new file mode 100644 index 0000000..73b9e89 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/errors/new_resource_creation_error.pb.go @@ -0,0 +1,126 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/errors/new_resource_creation_error.proto + +package errors // import "google.golang.org/genproto/googleapis/ads/googleads/v1/errors" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Enum describing possible new resource creation errors. +type NewResourceCreationErrorEnum_NewResourceCreationError int32 + +const ( + // Enum unspecified. + NewResourceCreationErrorEnum_UNSPECIFIED NewResourceCreationErrorEnum_NewResourceCreationError = 0 + // The received error code is not known in this version. + NewResourceCreationErrorEnum_UNKNOWN NewResourceCreationErrorEnum_NewResourceCreationError = 1 + // Do not set the id field while creating new resources. + NewResourceCreationErrorEnum_CANNOT_SET_ID_FOR_CREATE NewResourceCreationErrorEnum_NewResourceCreationError = 2 + // Creating more than one resource with the same temp ID is not allowed. + NewResourceCreationErrorEnum_DUPLICATE_TEMP_IDS NewResourceCreationErrorEnum_NewResourceCreationError = 3 + // Parent resource with specified temp ID failed validation, so no + // validation will be done for this child resource. + NewResourceCreationErrorEnum_TEMP_ID_RESOURCE_HAD_ERRORS NewResourceCreationErrorEnum_NewResourceCreationError = 4 +) + +var NewResourceCreationErrorEnum_NewResourceCreationError_name = map[int32]string{ + 0: "UNSPECIFIED", + 1: "UNKNOWN", + 2: "CANNOT_SET_ID_FOR_CREATE", + 3: "DUPLICATE_TEMP_IDS", + 4: "TEMP_ID_RESOURCE_HAD_ERRORS", +} +var NewResourceCreationErrorEnum_NewResourceCreationError_value = map[string]int32{ + "UNSPECIFIED": 0, + "UNKNOWN": 1, + "CANNOT_SET_ID_FOR_CREATE": 2, + "DUPLICATE_TEMP_IDS": 3, + "TEMP_ID_RESOURCE_HAD_ERRORS": 4, +} + +func (x NewResourceCreationErrorEnum_NewResourceCreationError) String() string { + return proto.EnumName(NewResourceCreationErrorEnum_NewResourceCreationError_name, int32(x)) +} +func (NewResourceCreationErrorEnum_NewResourceCreationError) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_new_resource_creation_error_2cd7e98c3abb8598, []int{0, 0} +} + +// Container for enum describing possible new resource creation errors. +type NewResourceCreationErrorEnum struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *NewResourceCreationErrorEnum) Reset() { *m = NewResourceCreationErrorEnum{} } +func (m *NewResourceCreationErrorEnum) String() string { return proto.CompactTextString(m) } +func (*NewResourceCreationErrorEnum) ProtoMessage() {} +func (*NewResourceCreationErrorEnum) Descriptor() ([]byte, []int) { + return fileDescriptor_new_resource_creation_error_2cd7e98c3abb8598, []int{0} +} +func (m *NewResourceCreationErrorEnum) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_NewResourceCreationErrorEnum.Unmarshal(m, b) +} +func (m *NewResourceCreationErrorEnum) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_NewResourceCreationErrorEnum.Marshal(b, m, deterministic) +} +func (dst *NewResourceCreationErrorEnum) XXX_Merge(src proto.Message) { + xxx_messageInfo_NewResourceCreationErrorEnum.Merge(dst, src) +} +func (m *NewResourceCreationErrorEnum) XXX_Size() int { + return xxx_messageInfo_NewResourceCreationErrorEnum.Size(m) +} +func (m *NewResourceCreationErrorEnum) XXX_DiscardUnknown() { + xxx_messageInfo_NewResourceCreationErrorEnum.DiscardUnknown(m) +} + +var xxx_messageInfo_NewResourceCreationErrorEnum proto.InternalMessageInfo + +func init() { + proto.RegisterType((*NewResourceCreationErrorEnum)(nil), "google.ads.googleads.v1.errors.NewResourceCreationErrorEnum") + proto.RegisterEnum("google.ads.googleads.v1.errors.NewResourceCreationErrorEnum_NewResourceCreationError", NewResourceCreationErrorEnum_NewResourceCreationError_name, NewResourceCreationErrorEnum_NewResourceCreationError_value) +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/errors/new_resource_creation_error.proto", fileDescriptor_new_resource_creation_error_2cd7e98c3abb8598) +} + +var fileDescriptor_new_resource_creation_error_2cd7e98c3abb8598 = []byte{ + // 367 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x7c, 0x91, 0x4f, 0x6e, 0xa3, 0x30, + 0x14, 0xc6, 0x07, 0x32, 0x9a, 0x91, 0x9c, 0xc5, 0x20, 0x16, 0xa3, 0x68, 0x26, 0x93, 0x91, 0x38, + 0x80, 0x11, 0x9a, 0x9d, 0x67, 0x33, 0x0e, 0x38, 0x19, 0xd4, 0x16, 0x90, 0x81, 0x54, 0xaa, 0x90, + 0x2c, 0x1a, 0x2c, 0x14, 0x29, 0xc1, 0x91, 0x9d, 0x3f, 0xc7, 0xe8, 0x1d, 0xba, 0xec, 0xaa, 0xe7, + 0xe8, 0x51, 0x7a, 0x82, 0x2e, 0x2b, 0x30, 0xc9, 0x2e, 0x5d, 0xf1, 0x61, 0xff, 0xde, 0xf7, 0xbd, + 0xe7, 0x07, 0xfe, 0xd5, 0x42, 0xd4, 0x6b, 0xee, 0x96, 0x95, 0x72, 0xb5, 0x6c, 0xd5, 0xc1, 0x73, + 0xb9, 0x94, 0x42, 0x2a, 0xb7, 0xe1, 0x47, 0x26, 0xb9, 0x12, 0x7b, 0xb9, 0xe4, 0x6c, 0x29, 0x79, + 0xb9, 0x5b, 0x89, 0x86, 0x75, 0x97, 0x70, 0x2b, 0xc5, 0x4e, 0xd8, 0x13, 0x5d, 0x06, 0xcb, 0x4a, + 0xc1, 0xb3, 0x03, 0x3c, 0x78, 0x50, 0x3b, 0xfc, 0x18, 0x9f, 0x12, 0xb6, 0x2b, 0xb7, 0x6c, 0x1a, + 0xb1, 0xeb, 0x2c, 0x94, 0xae, 0x76, 0x9e, 0x0d, 0x30, 0x8e, 0xf8, 0x91, 0xf6, 0x11, 0x7e, 0x9f, + 0x40, 0xda, 0x5a, 0xd2, 0xec, 0x37, 0xce, 0x83, 0x01, 0x46, 0x97, 0x00, 0xfb, 0x1b, 0x18, 0xe6, + 0x51, 0x9a, 0x10, 0x3f, 0x9c, 0x85, 0x24, 0xb0, 0x3e, 0xd9, 0x43, 0xf0, 0x35, 0x8f, 0xae, 0xa2, + 0xf8, 0x36, 0xb2, 0x0c, 0x7b, 0x0c, 0x46, 0x3e, 0x8e, 0xa2, 0x38, 0x63, 0x29, 0xc9, 0x58, 0x18, + 0xb0, 0x59, 0x4c, 0x99, 0x4f, 0x09, 0xce, 0x88, 0x65, 0xda, 0xdf, 0x81, 0x1d, 0xe4, 0xc9, 0x75, + 0xe8, 0xe3, 0x8c, 0xb0, 0x8c, 0xdc, 0x24, 0x2c, 0x0c, 0x52, 0x6b, 0x60, 0xff, 0x06, 0x3f, 0xfb, + 0x3f, 0x46, 0x49, 0x1a, 0xe7, 0xd4, 0x27, 0xec, 0x3f, 0x0e, 0x18, 0xa1, 0x34, 0xa6, 0xa9, 0xf5, + 0x79, 0xfa, 0x66, 0x00, 0x67, 0x29, 0x36, 0xf0, 0xe3, 0xb9, 0xa7, 0xbf, 0x2e, 0x75, 0x9d, 0xb4, + 0x83, 0x27, 0xc6, 0x5d, 0xd0, 0x1b, 0xd4, 0x62, 0x5d, 0x36, 0x35, 0x14, 0xb2, 0x76, 0x6b, 0xde, + 0x74, 0xcf, 0x72, 0x5a, 0xc5, 0x76, 0xa5, 0x2e, 0x6d, 0xe6, 0xaf, 0xfe, 0x3c, 0x9a, 0x83, 0x39, + 0xc6, 0x4f, 0xe6, 0x64, 0xae, 0xcd, 0x70, 0xa5, 0xa0, 0x96, 0xad, 0x5a, 0x78, 0xb0, 0x8b, 0x54, + 0x2f, 0x27, 0xa0, 0xc0, 0x95, 0x2a, 0xce, 0x40, 0xb1, 0xf0, 0x0a, 0x0d, 0xbc, 0x9a, 0x8e, 0x3e, + 0x45, 0x08, 0x57, 0x0a, 0xa1, 0x33, 0x82, 0xd0, 0xc2, 0x43, 0x48, 0x43, 0xf7, 0x5f, 0xba, 0xee, + 0xfe, 0xbc, 0x07, 0x00, 0x00, 0xff, 0xff, 0x8a, 0x68, 0x8c, 0x1b, 0x36, 0x02, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/errors/not_empty_error.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/errors/not_empty_error.pb.go new file mode 100644 index 0000000..9e0f548 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/errors/not_empty_error.pb.go @@ -0,0 +1,113 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/errors/not_empty_error.proto + +package errors // import "google.golang.org/genproto/googleapis/ads/googleads/v1/errors" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Enum describing possible not empty errors. +type NotEmptyErrorEnum_NotEmptyError int32 + +const ( + // Enum unspecified. + NotEmptyErrorEnum_UNSPECIFIED NotEmptyErrorEnum_NotEmptyError = 0 + // The received error code is not known in this version. + NotEmptyErrorEnum_UNKNOWN NotEmptyErrorEnum_NotEmptyError = 1 + // Empty list. + NotEmptyErrorEnum_EMPTY_LIST NotEmptyErrorEnum_NotEmptyError = 2 +) + +var NotEmptyErrorEnum_NotEmptyError_name = map[int32]string{ + 0: "UNSPECIFIED", + 1: "UNKNOWN", + 2: "EMPTY_LIST", +} +var NotEmptyErrorEnum_NotEmptyError_value = map[string]int32{ + "UNSPECIFIED": 0, + "UNKNOWN": 1, + "EMPTY_LIST": 2, +} + +func (x NotEmptyErrorEnum_NotEmptyError) String() string { + return proto.EnumName(NotEmptyErrorEnum_NotEmptyError_name, int32(x)) +} +func (NotEmptyErrorEnum_NotEmptyError) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_not_empty_error_60da7a8d2d549cbf, []int{0, 0} +} + +// Container for enum describing possible not empty errors. +type NotEmptyErrorEnum struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *NotEmptyErrorEnum) Reset() { *m = NotEmptyErrorEnum{} } +func (m *NotEmptyErrorEnum) String() string { return proto.CompactTextString(m) } +func (*NotEmptyErrorEnum) ProtoMessage() {} +func (*NotEmptyErrorEnum) Descriptor() ([]byte, []int) { + return fileDescriptor_not_empty_error_60da7a8d2d549cbf, []int{0} +} +func (m *NotEmptyErrorEnum) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_NotEmptyErrorEnum.Unmarshal(m, b) +} +func (m *NotEmptyErrorEnum) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_NotEmptyErrorEnum.Marshal(b, m, deterministic) +} +func (dst *NotEmptyErrorEnum) XXX_Merge(src proto.Message) { + xxx_messageInfo_NotEmptyErrorEnum.Merge(dst, src) +} +func (m *NotEmptyErrorEnum) XXX_Size() int { + return xxx_messageInfo_NotEmptyErrorEnum.Size(m) +} +func (m *NotEmptyErrorEnum) XXX_DiscardUnknown() { + xxx_messageInfo_NotEmptyErrorEnum.DiscardUnknown(m) +} + +var xxx_messageInfo_NotEmptyErrorEnum proto.InternalMessageInfo + +func init() { + proto.RegisterType((*NotEmptyErrorEnum)(nil), "google.ads.googleads.v1.errors.NotEmptyErrorEnum") + proto.RegisterEnum("google.ads.googleads.v1.errors.NotEmptyErrorEnum_NotEmptyError", NotEmptyErrorEnum_NotEmptyError_name, NotEmptyErrorEnum_NotEmptyError_value) +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/errors/not_empty_error.proto", fileDescriptor_not_empty_error_60da7a8d2d549cbf) +} + +var fileDescriptor_not_empty_error_60da7a8d2d549cbf = []byte{ + // 293 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x7c, 0x90, 0xd1, 0x4a, 0xc3, 0x30, + 0x18, 0x85, 0x6d, 0x05, 0x85, 0x0c, 0xb5, 0xf6, 0x52, 0x64, 0x17, 0x7d, 0x80, 0x84, 0xa2, 0x57, + 0x11, 0x2f, 0x3a, 0x17, 0x47, 0x51, 0x6b, 0x71, 0x5b, 0x45, 0x29, 0x94, 0x6a, 0x4b, 0x28, 0xac, + 0xf9, 0x4b, 0x13, 0x07, 0xbe, 0x8e, 0x97, 0x3e, 0x8a, 0x8f, 0x22, 0xf8, 0x0e, 0x92, 0xc6, 0x16, + 0x76, 0xe1, 0xae, 0x72, 0x38, 0x7c, 0xe7, 0xe4, 0xf0, 0xa3, 0x73, 0x0e, 0xc0, 0x57, 0x25, 0xc9, + 0x0b, 0x49, 0x8c, 0xd4, 0x6a, 0xed, 0x93, 0xb2, 0x6d, 0xa1, 0x95, 0x44, 0x80, 0xca, 0xca, 0xba, + 0x51, 0xef, 0x59, 0x67, 0xe0, 0xa6, 0x05, 0x05, 0xee, 0xd8, 0xa0, 0x38, 0x2f, 0x24, 0x1e, 0x52, + 0x78, 0xed, 0x63, 0x93, 0x3a, 0x39, 0xed, 0x5b, 0x9b, 0x8a, 0xe4, 0x42, 0x80, 0xca, 0x55, 0x05, + 0x42, 0x9a, 0xb4, 0xf7, 0x80, 0x8e, 0x23, 0x50, 0x4c, 0xb7, 0x32, 0xcd, 0x33, 0xf1, 0x56, 0x7b, + 0x97, 0xe8, 0x60, 0xc3, 0x74, 0x8f, 0xd0, 0x68, 0x19, 0xcd, 0x63, 0x76, 0x15, 0x5e, 0x87, 0x6c, + 0xea, 0xec, 0xb8, 0x23, 0xb4, 0xbf, 0x8c, 0x6e, 0xa2, 0xfb, 0xc7, 0xc8, 0xb1, 0xdc, 0x43, 0x84, + 0xd8, 0x5d, 0xbc, 0x78, 0xca, 0x6e, 0xc3, 0xf9, 0xc2, 0xb1, 0x27, 0x3f, 0x16, 0xf2, 0x5e, 0xa1, + 0xc6, 0xdb, 0x87, 0x4d, 0xdc, 0x8d, 0x3f, 0x62, 0x3d, 0x27, 0xb6, 0x9e, 0xa7, 0x7f, 0x29, 0x0e, + 0xab, 0x5c, 0x70, 0x0c, 0x2d, 0x27, 0xbc, 0x14, 0xdd, 0xd8, 0xfe, 0x28, 0x4d, 0x25, 0xff, 0xbb, + 0xd1, 0x85, 0x79, 0x3e, 0xec, 0xdd, 0x59, 0x10, 0x7c, 0xda, 0xe3, 0x99, 0x29, 0x0b, 0x0a, 0x89, + 0x8d, 0xd4, 0x2a, 0xf1, 0x71, 0xf7, 0xa5, 0xfc, 0xea, 0x81, 0x34, 0x28, 0x64, 0x3a, 0x00, 0x69, + 0xe2, 0xa7, 0x06, 0xf8, 0xb6, 0x3d, 0xe3, 0x52, 0x1a, 0x14, 0x92, 0xd2, 0x01, 0xa1, 0x34, 0xf1, + 0x29, 0x35, 0xd0, 0xcb, 0x5e, 0xb7, 0xee, 0xec, 0x37, 0x00, 0x00, 0xff, 0xff, 0xbb, 0x55, 0x9c, + 0x20, 0xc0, 0x01, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/errors/null_error.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/errors/null_error.pb.go new file mode 100644 index 0000000..17bf68c --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/errors/null_error.pb.go @@ -0,0 +1,112 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/errors/null_error.proto + +package errors // import "google.golang.org/genproto/googleapis/ads/googleads/v1/errors" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Enum describing possible null errors. +type NullErrorEnum_NullError int32 + +const ( + // Enum unspecified. + NullErrorEnum_UNSPECIFIED NullErrorEnum_NullError = 0 + // The received error code is not known in this version. + NullErrorEnum_UNKNOWN NullErrorEnum_NullError = 1 + // Specified list/container must not contain any null elements + NullErrorEnum_NULL_CONTENT NullErrorEnum_NullError = 2 +) + +var NullErrorEnum_NullError_name = map[int32]string{ + 0: "UNSPECIFIED", + 1: "UNKNOWN", + 2: "NULL_CONTENT", +} +var NullErrorEnum_NullError_value = map[string]int32{ + "UNSPECIFIED": 0, + "UNKNOWN": 1, + "NULL_CONTENT": 2, +} + +func (x NullErrorEnum_NullError) String() string { + return proto.EnumName(NullErrorEnum_NullError_name, int32(x)) +} +func (NullErrorEnum_NullError) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_null_error_8cdf831fc777f390, []int{0, 0} +} + +// Container for enum describing possible null errors. +type NullErrorEnum struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *NullErrorEnum) Reset() { *m = NullErrorEnum{} } +func (m *NullErrorEnum) String() string { return proto.CompactTextString(m) } +func (*NullErrorEnum) ProtoMessage() {} +func (*NullErrorEnum) Descriptor() ([]byte, []int) { + return fileDescriptor_null_error_8cdf831fc777f390, []int{0} +} +func (m *NullErrorEnum) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_NullErrorEnum.Unmarshal(m, b) +} +func (m *NullErrorEnum) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_NullErrorEnum.Marshal(b, m, deterministic) +} +func (dst *NullErrorEnum) XXX_Merge(src proto.Message) { + xxx_messageInfo_NullErrorEnum.Merge(dst, src) +} +func (m *NullErrorEnum) XXX_Size() int { + return xxx_messageInfo_NullErrorEnum.Size(m) +} +func (m *NullErrorEnum) XXX_DiscardUnknown() { + xxx_messageInfo_NullErrorEnum.DiscardUnknown(m) +} + +var xxx_messageInfo_NullErrorEnum proto.InternalMessageInfo + +func init() { + proto.RegisterType((*NullErrorEnum)(nil), "google.ads.googleads.v1.errors.NullErrorEnum") + proto.RegisterEnum("google.ads.googleads.v1.errors.NullErrorEnum_NullError", NullErrorEnum_NullError_name, NullErrorEnum_NullError_value) +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/errors/null_error.proto", fileDescriptor_null_error_8cdf831fc777f390) +} + +var fileDescriptor_null_error_8cdf831fc777f390 = []byte{ + // 285 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x7c, 0x90, 0xdf, 0x6a, 0x83, 0x30, + 0x14, 0xc6, 0xa7, 0x83, 0x8d, 0xa5, 0xfb, 0x23, 0x5e, 0x8e, 0xd1, 0x0b, 0x1f, 0x20, 0x41, 0x76, + 0x97, 0x5e, 0xd9, 0xd6, 0x95, 0x32, 0x49, 0x85, 0x55, 0x07, 0x43, 0x28, 0x6e, 0x4a, 0x10, 0xd2, + 0x1c, 0x31, 0xda, 0x07, 0xda, 0xe5, 0x1e, 0x65, 0x6f, 0xb2, 0x3d, 0xc5, 0xd0, 0xd4, 0xdc, 0xad, + 0x57, 0xf9, 0x11, 0x7e, 0xdf, 0x39, 0x1f, 0x07, 0x11, 0x0e, 0xc0, 0x45, 0x49, 0xf2, 0x42, 0x1d, + 0xb1, 0xa7, 0x83, 0x4f, 0xca, 0xa6, 0x81, 0x46, 0x11, 0xd9, 0x09, 0xb1, 0x1b, 0x18, 0xd7, 0x0d, + 0xb4, 0xe0, 0x4e, 0xb5, 0x85, 0xf3, 0x42, 0x61, 0x13, 0xc0, 0x07, 0x1f, 0xeb, 0xc0, 0xfd, 0xc3, + 0x38, 0xb0, 0xae, 0x48, 0x2e, 0x25, 0xb4, 0x79, 0x5b, 0x81, 0x54, 0x3a, 0xed, 0x45, 0xe8, 0x86, + 0x75, 0x42, 0x84, 0xbd, 0x1b, 0xca, 0x6e, 0xef, 0xcd, 0xd0, 0x95, 0xf9, 0x70, 0xef, 0xd0, 0x24, + 0x61, 0x2f, 0x71, 0xb8, 0x58, 0x3f, 0xad, 0xc3, 0xa5, 0x73, 0xe6, 0x4e, 0xd0, 0x65, 0xc2, 0x9e, + 0xd9, 0xe6, 0x95, 0x39, 0x96, 0xeb, 0xa0, 0x6b, 0x96, 0x44, 0xd1, 0x6e, 0xb1, 0x61, 0xdb, 0x90, + 0x6d, 0x1d, 0x7b, 0xfe, 0x63, 0x21, 0xef, 0x03, 0xf6, 0xf8, 0x74, 0xa5, 0xf9, 0xad, 0xd9, 0x10, + 0xf7, 0x25, 0x62, 0xeb, 0x6d, 0x79, 0x4c, 0x70, 0x10, 0xb9, 0xe4, 0x18, 0x1a, 0x4e, 0x78, 0x29, + 0x87, 0x8a, 0xe3, 0x15, 0xea, 0x4a, 0xfd, 0x77, 0x94, 0x99, 0x7e, 0x3e, 0xed, 0xf3, 0x55, 0x10, + 0x7c, 0xd9, 0xd3, 0x95, 0x1e, 0x16, 0x14, 0x0a, 0x6b, 0xec, 0x29, 0xf5, 0xf1, 0xb0, 0x52, 0x7d, + 0x8f, 0x42, 0x16, 0x14, 0x2a, 0x33, 0x42, 0x96, 0xfa, 0x99, 0x16, 0x7e, 0x6d, 0x4f, 0xff, 0x52, + 0x1a, 0x14, 0x8a, 0x52, 0xa3, 0x50, 0x9a, 0xfa, 0x94, 0x6a, 0xe9, 0xfd, 0x62, 0x68, 0xf7, 0xf8, + 0x17, 0x00, 0x00, 0xff, 0xff, 0xf4, 0xcd, 0xb2, 0x8f, 0xb1, 0x01, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/errors/operation_access_denied_error.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/errors/operation_access_denied_error.pb.go new file mode 100644 index 0000000..8b4e87a --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/errors/operation_access_denied_error.pb.go @@ -0,0 +1,158 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/errors/operation_access_denied_error.proto + +package errors // import "google.golang.org/genproto/googleapis/ads/googleads/v1/errors" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Enum describing possible operation access denied errors. +type OperationAccessDeniedErrorEnum_OperationAccessDeniedError int32 + +const ( + // Enum unspecified. + OperationAccessDeniedErrorEnum_UNSPECIFIED OperationAccessDeniedErrorEnum_OperationAccessDeniedError = 0 + // The received error code is not known in this version. + OperationAccessDeniedErrorEnum_UNKNOWN OperationAccessDeniedErrorEnum_OperationAccessDeniedError = 1 + // Unauthorized invocation of a service's method (get, mutate, etc.) + OperationAccessDeniedErrorEnum_ACTION_NOT_PERMITTED OperationAccessDeniedErrorEnum_OperationAccessDeniedError = 2 + // Unauthorized CREATE operation in invoking a service's mutate method. + OperationAccessDeniedErrorEnum_CREATE_OPERATION_NOT_PERMITTED OperationAccessDeniedErrorEnum_OperationAccessDeniedError = 3 + // Unauthorized REMOVE operation in invoking a service's mutate method. + OperationAccessDeniedErrorEnum_REMOVE_OPERATION_NOT_PERMITTED OperationAccessDeniedErrorEnum_OperationAccessDeniedError = 4 + // Unauthorized UPDATE operation in invoking a service's mutate method. + OperationAccessDeniedErrorEnum_UPDATE_OPERATION_NOT_PERMITTED OperationAccessDeniedErrorEnum_OperationAccessDeniedError = 5 + // A mutate action is not allowed on this campaign, from this client. + OperationAccessDeniedErrorEnum_MUTATE_ACTION_NOT_PERMITTED_FOR_CLIENT OperationAccessDeniedErrorEnum_OperationAccessDeniedError = 6 + // This operation is not permitted on this campaign type + OperationAccessDeniedErrorEnum_OPERATION_NOT_PERMITTED_FOR_CAMPAIGN_TYPE OperationAccessDeniedErrorEnum_OperationAccessDeniedError = 7 + // A CREATE operation may not set status to REMOVED. + OperationAccessDeniedErrorEnum_CREATE_AS_REMOVED_NOT_PERMITTED OperationAccessDeniedErrorEnum_OperationAccessDeniedError = 8 + // This operation is not allowed because the campaign or adgroup is removed. + OperationAccessDeniedErrorEnum_OPERATION_NOT_PERMITTED_FOR_REMOVED_RESOURCE OperationAccessDeniedErrorEnum_OperationAccessDeniedError = 9 + // This operation is not permitted on this ad group type. + OperationAccessDeniedErrorEnum_OPERATION_NOT_PERMITTED_FOR_AD_GROUP_TYPE OperationAccessDeniedErrorEnum_OperationAccessDeniedError = 10 + // The mutate is not allowed for this customer. + OperationAccessDeniedErrorEnum_MUTATE_NOT_PERMITTED_FOR_CUSTOMER OperationAccessDeniedErrorEnum_OperationAccessDeniedError = 11 +) + +var OperationAccessDeniedErrorEnum_OperationAccessDeniedError_name = map[int32]string{ + 0: "UNSPECIFIED", + 1: "UNKNOWN", + 2: "ACTION_NOT_PERMITTED", + 3: "CREATE_OPERATION_NOT_PERMITTED", + 4: "REMOVE_OPERATION_NOT_PERMITTED", + 5: "UPDATE_OPERATION_NOT_PERMITTED", + 6: "MUTATE_ACTION_NOT_PERMITTED_FOR_CLIENT", + 7: "OPERATION_NOT_PERMITTED_FOR_CAMPAIGN_TYPE", + 8: "CREATE_AS_REMOVED_NOT_PERMITTED", + 9: "OPERATION_NOT_PERMITTED_FOR_REMOVED_RESOURCE", + 10: "OPERATION_NOT_PERMITTED_FOR_AD_GROUP_TYPE", + 11: "MUTATE_NOT_PERMITTED_FOR_CUSTOMER", +} +var OperationAccessDeniedErrorEnum_OperationAccessDeniedError_value = map[string]int32{ + "UNSPECIFIED": 0, + "UNKNOWN": 1, + "ACTION_NOT_PERMITTED": 2, + "CREATE_OPERATION_NOT_PERMITTED": 3, + "REMOVE_OPERATION_NOT_PERMITTED": 4, + "UPDATE_OPERATION_NOT_PERMITTED": 5, + "MUTATE_ACTION_NOT_PERMITTED_FOR_CLIENT": 6, + "OPERATION_NOT_PERMITTED_FOR_CAMPAIGN_TYPE": 7, + "CREATE_AS_REMOVED_NOT_PERMITTED": 8, + "OPERATION_NOT_PERMITTED_FOR_REMOVED_RESOURCE": 9, + "OPERATION_NOT_PERMITTED_FOR_AD_GROUP_TYPE": 10, + "MUTATE_NOT_PERMITTED_FOR_CUSTOMER": 11, +} + +func (x OperationAccessDeniedErrorEnum_OperationAccessDeniedError) String() string { + return proto.EnumName(OperationAccessDeniedErrorEnum_OperationAccessDeniedError_name, int32(x)) +} +func (OperationAccessDeniedErrorEnum_OperationAccessDeniedError) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_operation_access_denied_error_c1c948edc110cca8, []int{0, 0} +} + +// Container for enum describing possible operation access denied errors. +type OperationAccessDeniedErrorEnum struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *OperationAccessDeniedErrorEnum) Reset() { *m = OperationAccessDeniedErrorEnum{} } +func (m *OperationAccessDeniedErrorEnum) String() string { return proto.CompactTextString(m) } +func (*OperationAccessDeniedErrorEnum) ProtoMessage() {} +func (*OperationAccessDeniedErrorEnum) Descriptor() ([]byte, []int) { + return fileDescriptor_operation_access_denied_error_c1c948edc110cca8, []int{0} +} +func (m *OperationAccessDeniedErrorEnum) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_OperationAccessDeniedErrorEnum.Unmarshal(m, b) +} +func (m *OperationAccessDeniedErrorEnum) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_OperationAccessDeniedErrorEnum.Marshal(b, m, deterministic) +} +func (dst *OperationAccessDeniedErrorEnum) XXX_Merge(src proto.Message) { + xxx_messageInfo_OperationAccessDeniedErrorEnum.Merge(dst, src) +} +func (m *OperationAccessDeniedErrorEnum) XXX_Size() int { + return xxx_messageInfo_OperationAccessDeniedErrorEnum.Size(m) +} +func (m *OperationAccessDeniedErrorEnum) XXX_DiscardUnknown() { + xxx_messageInfo_OperationAccessDeniedErrorEnum.DiscardUnknown(m) +} + +var xxx_messageInfo_OperationAccessDeniedErrorEnum proto.InternalMessageInfo + +func init() { + proto.RegisterType((*OperationAccessDeniedErrorEnum)(nil), "google.ads.googleads.v1.errors.OperationAccessDeniedErrorEnum") + proto.RegisterEnum("google.ads.googleads.v1.errors.OperationAccessDeniedErrorEnum_OperationAccessDeniedError", OperationAccessDeniedErrorEnum_OperationAccessDeniedError_name, OperationAccessDeniedErrorEnum_OperationAccessDeniedError_value) +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/errors/operation_access_denied_error.proto", fileDescriptor_operation_access_denied_error_c1c948edc110cca8) +} + +var fileDescriptor_operation_access_denied_error_c1c948edc110cca8 = []byte{ + // 448 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x84, 0x92, 0xc1, 0x8a, 0xd3, 0x40, + 0x1c, 0xc6, 0x6d, 0xab, 0xbb, 0x3a, 0x3d, 0x18, 0x06, 0x0f, 0xb2, 0x48, 0x17, 0x23, 0x0a, 0x8a, + 0x4e, 0x0c, 0xde, 0xe2, 0x69, 0x9a, 0xcc, 0x96, 0xa0, 0x99, 0x19, 0xa6, 0x93, 0x8a, 0x52, 0x18, + 0x62, 0x13, 0x42, 0x61, 0x37, 0x53, 0x32, 0xb5, 0x0f, 0xe4, 0xd1, 0x27, 0x11, 0x1f, 0x45, 0xdf, + 0xc0, 0x93, 0x24, 0xd3, 0xf4, 0x20, 0x4d, 0xf7, 0x94, 0x3f, 0xc9, 0xef, 0xff, 0x7d, 0xdf, 0x9f, + 0x7c, 0x60, 0x5a, 0x6a, 0x5d, 0x5e, 0x17, 0x5e, 0x96, 0x1b, 0xcf, 0x8e, 0xcd, 0xb4, 0xf3, 0xbd, + 0xa2, 0xae, 0x75, 0x6d, 0x3c, 0xbd, 0x29, 0xea, 0x6c, 0xbb, 0xd6, 0x95, 0xca, 0x56, 0xab, 0xc2, + 0x18, 0x95, 0x17, 0xd5, 0xba, 0xc8, 0x55, 0xfb, 0x19, 0x6d, 0x6a, 0xbd, 0xd5, 0x70, 0x62, 0x17, + 0x51, 0x96, 0x1b, 0x74, 0xd0, 0x40, 0x3b, 0x1f, 0x59, 0x8d, 0x8b, 0x27, 0x9d, 0xc7, 0x66, 0xed, + 0x65, 0x55, 0xa5, 0xb7, 0xad, 0xa0, 0xb1, 0xdb, 0xee, 0x9f, 0x11, 0x98, 0xb0, 0xce, 0x05, 0xb7, + 0x26, 0x51, 0xeb, 0x41, 0x9a, 0x6d, 0x52, 0x7d, 0xbb, 0x71, 0x7f, 0x8e, 0xc0, 0x45, 0x3f, 0x02, + 0x1f, 0x82, 0x71, 0x4a, 0xe7, 0x9c, 0x84, 0xf1, 0x55, 0x4c, 0x22, 0xe7, 0x0e, 0x1c, 0x83, 0xf3, + 0x94, 0x7e, 0xa0, 0xec, 0x13, 0x75, 0x06, 0xf0, 0x31, 0x78, 0x84, 0x43, 0x19, 0x33, 0xaa, 0x28, + 0x93, 0x8a, 0x13, 0x91, 0xc4, 0x52, 0x92, 0xc8, 0x19, 0x42, 0x17, 0x4c, 0x42, 0x41, 0xb0, 0x24, + 0x8a, 0x71, 0x22, 0xf0, 0x11, 0x66, 0xd4, 0x30, 0x82, 0x24, 0x6c, 0xd1, 0xcf, 0xdc, 0x6d, 0x98, + 0x94, 0x47, 0xa7, 0x74, 0xee, 0xc1, 0x57, 0xe0, 0x45, 0x92, 0xca, 0x86, 0x39, 0x16, 0x46, 0x5d, + 0x31, 0xa1, 0xc2, 0x8f, 0x31, 0xa1, 0xd2, 0x39, 0x83, 0x6f, 0xc0, 0xcb, 0x1e, 0x21, 0xcb, 0xe1, + 0x84, 0xe3, 0x78, 0x46, 0x95, 0xfc, 0xcc, 0x89, 0x73, 0x0e, 0x9f, 0x81, 0xcb, 0xfd, 0x19, 0x78, + 0xae, 0x6c, 0xd8, 0xe8, 0x3f, 0xff, 0xfb, 0xf0, 0x2d, 0x78, 0x7d, 0x4a, 0xb3, 0x5b, 0x13, 0x64, + 0xce, 0x52, 0x11, 0x12, 0xe7, 0xc1, 0x6d, 0x29, 0x70, 0xa4, 0x66, 0x82, 0xa5, 0xdc, 0xa6, 0x00, + 0xf0, 0x39, 0x78, 0xba, 0x3f, 0xf0, 0x48, 0xe2, 0x74, 0x2e, 0x59, 0x42, 0x84, 0x33, 0x9e, 0xfe, + 0x1d, 0x00, 0x77, 0xa5, 0x6f, 0xd0, 0xe9, 0xca, 0x4c, 0x2f, 0xfb, 0x7f, 0x37, 0x6f, 0x5a, 0xc3, + 0x07, 0x5f, 0xa2, 0xbd, 0x44, 0xa9, 0xaf, 0xb3, 0xaa, 0x44, 0xba, 0x2e, 0xbd, 0xb2, 0xa8, 0xda, + 0x4e, 0x75, 0x4d, 0xde, 0xac, 0x4d, 0x5f, 0xb1, 0xdf, 0xdb, 0xc7, 0xf7, 0xe1, 0x68, 0x86, 0xf1, + 0x8f, 0xe1, 0x64, 0x66, 0xc5, 0x70, 0x6e, 0x90, 0x1d, 0x9b, 0x69, 0xe1, 0xa3, 0xd6, 0xd2, 0xfc, + 0xea, 0x80, 0x25, 0xce, 0xcd, 0xf2, 0x00, 0x2c, 0x17, 0xfe, 0xd2, 0x02, 0xbf, 0x87, 0xae, 0x7d, + 0x1b, 0x04, 0x38, 0x37, 0x41, 0x70, 0x40, 0x82, 0x60, 0xe1, 0x07, 0x81, 0x85, 0xbe, 0x9e, 0xb5, + 0xe9, 0xde, 0xfd, 0x0b, 0x00, 0x00, 0xff, 0xff, 0x05, 0x7d, 0xe7, 0xd4, 0x75, 0x03, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/errors/operator_error.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/errors/operator_error.pb.go new file mode 100644 index 0000000..7282715 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/errors/operator_error.pb.go @@ -0,0 +1,113 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/errors/operator_error.proto + +package errors // import "google.golang.org/genproto/googleapis/ads/googleads/v1/errors" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Enum describing possible operator errors. +type OperatorErrorEnum_OperatorError int32 + +const ( + // Enum unspecified. + OperatorErrorEnum_UNSPECIFIED OperatorErrorEnum_OperatorError = 0 + // The received error code is not known in this version. + OperatorErrorEnum_UNKNOWN OperatorErrorEnum_OperatorError = 1 + // Operator not supported. + OperatorErrorEnum_OPERATOR_NOT_SUPPORTED OperatorErrorEnum_OperatorError = 2 +) + +var OperatorErrorEnum_OperatorError_name = map[int32]string{ + 0: "UNSPECIFIED", + 1: "UNKNOWN", + 2: "OPERATOR_NOT_SUPPORTED", +} +var OperatorErrorEnum_OperatorError_value = map[string]int32{ + "UNSPECIFIED": 0, + "UNKNOWN": 1, + "OPERATOR_NOT_SUPPORTED": 2, +} + +func (x OperatorErrorEnum_OperatorError) String() string { + return proto.EnumName(OperatorErrorEnum_OperatorError_name, int32(x)) +} +func (OperatorErrorEnum_OperatorError) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_operator_error_28a8d2f041e454d4, []int{0, 0} +} + +// Container for enum describing possible operator errors. +type OperatorErrorEnum struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *OperatorErrorEnum) Reset() { *m = OperatorErrorEnum{} } +func (m *OperatorErrorEnum) String() string { return proto.CompactTextString(m) } +func (*OperatorErrorEnum) ProtoMessage() {} +func (*OperatorErrorEnum) Descriptor() ([]byte, []int) { + return fileDescriptor_operator_error_28a8d2f041e454d4, []int{0} +} +func (m *OperatorErrorEnum) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_OperatorErrorEnum.Unmarshal(m, b) +} +func (m *OperatorErrorEnum) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_OperatorErrorEnum.Marshal(b, m, deterministic) +} +func (dst *OperatorErrorEnum) XXX_Merge(src proto.Message) { + xxx_messageInfo_OperatorErrorEnum.Merge(dst, src) +} +func (m *OperatorErrorEnum) XXX_Size() int { + return xxx_messageInfo_OperatorErrorEnum.Size(m) +} +func (m *OperatorErrorEnum) XXX_DiscardUnknown() { + xxx_messageInfo_OperatorErrorEnum.DiscardUnknown(m) +} + +var xxx_messageInfo_OperatorErrorEnum proto.InternalMessageInfo + +func init() { + proto.RegisterType((*OperatorErrorEnum)(nil), "google.ads.googleads.v1.errors.OperatorErrorEnum") + proto.RegisterEnum("google.ads.googleads.v1.errors.OperatorErrorEnum_OperatorError", OperatorErrorEnum_OperatorError_name, OperatorErrorEnum_OperatorError_value) +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/errors/operator_error.proto", fileDescriptor_operator_error_28a8d2f041e454d4) +} + +var fileDescriptor_operator_error_28a8d2f041e454d4 = []byte{ + // 301 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x7c, 0x90, 0xcf, 0x4a, 0x33, 0x31, + 0x14, 0xc5, 0xbf, 0xce, 0x07, 0x0a, 0x29, 0x62, 0x9d, 0x85, 0x8b, 0x22, 0x5d, 0xcc, 0x03, 0x24, + 0x0c, 0xdd, 0xc5, 0x55, 0x6a, 0x63, 0x29, 0xc2, 0x24, 0xf4, 0x9f, 0x20, 0x83, 0x25, 0x3a, 0x43, + 0x18, 0x68, 0x73, 0x87, 0x64, 0xec, 0x03, 0xb9, 0xf4, 0x51, 0x7c, 0x14, 0xc1, 0x77, 0x90, 0x99, + 0x74, 0x06, 0xba, 0xd0, 0x55, 0x4e, 0x2e, 0xbf, 0x73, 0xee, 0xe1, 0xa2, 0xb1, 0x06, 0xd0, 0xbb, + 0x9c, 0xa8, 0xcc, 0x11, 0x2f, 0x6b, 0x75, 0x88, 0x49, 0x6e, 0x2d, 0x58, 0x47, 0xa0, 0xcc, 0xad, + 0xaa, 0xc0, 0x6e, 0x9b, 0x3f, 0x2e, 0x2d, 0x54, 0x10, 0x8e, 0x3c, 0x89, 0x55, 0xe6, 0x70, 0x67, + 0xc2, 0x87, 0x18, 0x7b, 0xd3, 0xf0, 0xa6, 0x0d, 0x2d, 0x0b, 0xa2, 0x8c, 0x81, 0x4a, 0x55, 0x05, + 0x18, 0xe7, 0xdd, 0xd1, 0x33, 0xba, 0x12, 0xc7, 0x54, 0x5e, 0xf3, 0xdc, 0xbc, 0xed, 0xa3, 0x39, + 0xba, 0x38, 0x19, 0x86, 0x97, 0xa8, 0xbf, 0x4e, 0x96, 0x92, 0xdf, 0xcd, 0xef, 0xe7, 0x7c, 0x3a, + 0xf8, 0x17, 0xf6, 0xd1, 0xf9, 0x3a, 0x79, 0x48, 0xc4, 0x63, 0x32, 0xe8, 0x85, 0x43, 0x74, 0x2d, + 0x24, 0x5f, 0xb0, 0x95, 0x58, 0x6c, 0x13, 0xb1, 0xda, 0x2e, 0xd7, 0x52, 0x8a, 0xc5, 0x8a, 0x4f, + 0x07, 0xc1, 0xe4, 0xbb, 0x87, 0xa2, 0x57, 0xd8, 0xe3, 0xbf, 0x4b, 0x4e, 0xc2, 0x93, 0x7d, 0xb2, + 0xae, 0x26, 0x7b, 0x4f, 0xd3, 0xa3, 0x4b, 0xc3, 0x4e, 0x19, 0x8d, 0xc1, 0x6a, 0xa2, 0x73, 0xd3, + 0x14, 0x6f, 0xef, 0x53, 0x16, 0xee, 0xb7, 0x73, 0xdd, 0xfa, 0xe7, 0x3d, 0xf8, 0x3f, 0x63, 0xec, + 0x23, 0x18, 0xcd, 0x7c, 0x18, 0xcb, 0x1c, 0xf6, 0xb2, 0x56, 0x9b, 0x18, 0x37, 0x2b, 0xdd, 0x67, + 0x0b, 0xa4, 0x2c, 0x73, 0x69, 0x07, 0xa4, 0x9b, 0x38, 0xf5, 0xc0, 0x57, 0x10, 0xf9, 0x29, 0xa5, + 0x2c, 0x73, 0x94, 0x76, 0x08, 0xa5, 0x9b, 0x98, 0x52, 0x0f, 0xbd, 0x9c, 0x35, 0xed, 0xc6, 0x3f, + 0x01, 0x00, 0x00, 0xff, 0xff, 0x09, 0x5e, 0xde, 0xc0, 0xcb, 0x01, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/errors/partial_failure_error.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/errors/partial_failure_error.pb.go new file mode 100644 index 0000000..48d6e6e --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/errors/partial_failure_error.pb.go @@ -0,0 +1,115 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/errors/partial_failure_error.proto + +package errors // import "google.golang.org/genproto/googleapis/ads/googleads/v1/errors" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Enum describing possible partial failure errors. +type PartialFailureErrorEnum_PartialFailureError int32 + +const ( + // Enum unspecified. + PartialFailureErrorEnum_UNSPECIFIED PartialFailureErrorEnum_PartialFailureError = 0 + // The received error code is not known in this version. + PartialFailureErrorEnum_UNKNOWN PartialFailureErrorEnum_PartialFailureError = 1 + // The partial failure field was false in the request. + // This method requires this field be set to true. + PartialFailureErrorEnum_PARTIAL_FAILURE_MODE_REQUIRED PartialFailureErrorEnum_PartialFailureError = 2 +) + +var PartialFailureErrorEnum_PartialFailureError_name = map[int32]string{ + 0: "UNSPECIFIED", + 1: "UNKNOWN", + 2: "PARTIAL_FAILURE_MODE_REQUIRED", +} +var PartialFailureErrorEnum_PartialFailureError_value = map[string]int32{ + "UNSPECIFIED": 0, + "UNKNOWN": 1, + "PARTIAL_FAILURE_MODE_REQUIRED": 2, +} + +func (x PartialFailureErrorEnum_PartialFailureError) String() string { + return proto.EnumName(PartialFailureErrorEnum_PartialFailureError_name, int32(x)) +} +func (PartialFailureErrorEnum_PartialFailureError) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_partial_failure_error_0a8571a1c815d149, []int{0, 0} +} + +// Container for enum describing possible partial failure errors. +type PartialFailureErrorEnum struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *PartialFailureErrorEnum) Reset() { *m = PartialFailureErrorEnum{} } +func (m *PartialFailureErrorEnum) String() string { return proto.CompactTextString(m) } +func (*PartialFailureErrorEnum) ProtoMessage() {} +func (*PartialFailureErrorEnum) Descriptor() ([]byte, []int) { + return fileDescriptor_partial_failure_error_0a8571a1c815d149, []int{0} +} +func (m *PartialFailureErrorEnum) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_PartialFailureErrorEnum.Unmarshal(m, b) +} +func (m *PartialFailureErrorEnum) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_PartialFailureErrorEnum.Marshal(b, m, deterministic) +} +func (dst *PartialFailureErrorEnum) XXX_Merge(src proto.Message) { + xxx_messageInfo_PartialFailureErrorEnum.Merge(dst, src) +} +func (m *PartialFailureErrorEnum) XXX_Size() int { + return xxx_messageInfo_PartialFailureErrorEnum.Size(m) +} +func (m *PartialFailureErrorEnum) XXX_DiscardUnknown() { + xxx_messageInfo_PartialFailureErrorEnum.DiscardUnknown(m) +} + +var xxx_messageInfo_PartialFailureErrorEnum proto.InternalMessageInfo + +func init() { + proto.RegisterType((*PartialFailureErrorEnum)(nil), "google.ads.googleads.v1.errors.PartialFailureErrorEnum") + proto.RegisterEnum("google.ads.googleads.v1.errors.PartialFailureErrorEnum_PartialFailureError", PartialFailureErrorEnum_PartialFailureError_name, PartialFailureErrorEnum_PartialFailureError_value) +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/errors/partial_failure_error.proto", fileDescriptor_partial_failure_error_0a8571a1c815d149) +} + +var fileDescriptor_partial_failure_error_0a8571a1c815d149 = []byte{ + // 316 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x7c, 0x90, 0xc1, 0x4e, 0xb3, 0x40, + 0x14, 0x85, 0xff, 0xf2, 0x27, 0x9a, 0x4c, 0x17, 0x36, 0xb8, 0xd0, 0x18, 0x6d, 0x22, 0x0f, 0x30, + 0x84, 0xb8, 0x1b, 0x57, 0x53, 0x99, 0x36, 0xc4, 0x4a, 0x11, 0x05, 0x13, 0x43, 0x42, 0x46, 0xc1, + 0x09, 0x09, 0x9d, 0xc1, 0x19, 0xda, 0x07, 0x72, 0xe9, 0xa3, 0xf8, 0x28, 0x6e, 0x7d, 0x01, 0x03, + 0xd7, 0x76, 0x55, 0x5d, 0x71, 0x72, 0x39, 0xdf, 0xb9, 0x67, 0x2e, 0x22, 0x42, 0x29, 0x51, 0x97, + 0x2e, 0x2f, 0x8c, 0x0b, 0xb2, 0x53, 0x6b, 0xcf, 0x2d, 0xb5, 0x56, 0xda, 0xb8, 0x0d, 0xd7, 0x6d, + 0xc5, 0xeb, 0xfc, 0x85, 0x57, 0xf5, 0x4a, 0x97, 0x79, 0x3f, 0xc6, 0x8d, 0x56, 0xad, 0xb2, 0xc7, + 0x00, 0x60, 0x5e, 0x18, 0xbc, 0x65, 0xf1, 0xda, 0xc3, 0xc0, 0x9e, 0x9c, 0x6e, 0xb2, 0x9b, 0xca, + 0xe5, 0x52, 0xaa, 0x96, 0xb7, 0x95, 0x92, 0x06, 0x68, 0xe7, 0x15, 0x1d, 0x45, 0x10, 0x3e, 0x85, + 0x6c, 0xd6, 0x51, 0x4c, 0xae, 0x96, 0x4e, 0x8a, 0x0e, 0x77, 0xfc, 0xb2, 0x0f, 0xd0, 0x30, 0x09, + 0xef, 0x22, 0x76, 0x15, 0x4c, 0x03, 0xe6, 0x8f, 0xfe, 0xd9, 0x43, 0xb4, 0x9f, 0x84, 0xd7, 0xe1, + 0xe2, 0x21, 0x1c, 0x0d, 0xec, 0x73, 0x74, 0x16, 0xd1, 0xf8, 0x3e, 0xa0, 0xf3, 0x7c, 0x4a, 0x83, + 0x79, 0x12, 0xb3, 0xfc, 0x66, 0xe1, 0xb3, 0x3c, 0x66, 0xb7, 0x49, 0x10, 0x33, 0x7f, 0x64, 0x4d, + 0xbe, 0x06, 0xc8, 0x79, 0x56, 0x4b, 0xfc, 0x77, 0xef, 0xc9, 0xf1, 0x8e, 0xe5, 0x51, 0xd7, 0x39, + 0x1a, 0x3c, 0xfa, 0x3f, 0xac, 0x50, 0x35, 0x97, 0x02, 0x2b, 0x2d, 0x5c, 0x51, 0xca, 0xfe, 0x45, + 0x9b, 0xfb, 0x35, 0x95, 0xf9, 0xed, 0x9c, 0x97, 0xf0, 0x79, 0xb3, 0xfe, 0xcf, 0x28, 0x7d, 0xb7, + 0xc6, 0x33, 0x08, 0xa3, 0x85, 0xc1, 0x20, 0x3b, 0x95, 0x7a, 0xb8, 0x5f, 0x69, 0x3e, 0x36, 0x86, + 0x8c, 0x16, 0x26, 0xdb, 0x1a, 0xb2, 0xd4, 0xcb, 0xc0, 0xf0, 0x69, 0x39, 0x30, 0x25, 0x84, 0x16, + 0x86, 0x90, 0xad, 0x85, 0x90, 0xd4, 0x23, 0x04, 0x4c, 0x4f, 0x7b, 0x7d, 0xbb, 0x8b, 0xef, 0x00, + 0x00, 0x00, 0xff, 0xff, 0x0d, 0xc9, 0x1f, 0x5b, 0xeb, 0x01, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/errors/policy_finding_error.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/errors/policy_finding_error.pb.go new file mode 100644 index 0000000..b53fc00 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/errors/policy_finding_error.pb.go @@ -0,0 +1,120 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/errors/policy_finding_error.proto + +package errors // import "google.golang.org/genproto/googleapis/ads/googleads/v1/errors" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Enum describing possible policy finding errors. +type PolicyFindingErrorEnum_PolicyFindingError int32 + +const ( + // Enum unspecified. + PolicyFindingErrorEnum_UNSPECIFIED PolicyFindingErrorEnum_PolicyFindingError = 0 + // The received error code is not known in this version. + PolicyFindingErrorEnum_UNKNOWN PolicyFindingErrorEnum_PolicyFindingError = 1 + // The resource has been disapproved since the policy summary includes + // policy topics of type PROHIBITED. + PolicyFindingErrorEnum_POLICY_FINDING PolicyFindingErrorEnum_PolicyFindingError = 2 + // The given policy topic does not exist. + PolicyFindingErrorEnum_POLICY_TOPIC_NOT_FOUND PolicyFindingErrorEnum_PolicyFindingError = 3 +) + +var PolicyFindingErrorEnum_PolicyFindingError_name = map[int32]string{ + 0: "UNSPECIFIED", + 1: "UNKNOWN", + 2: "POLICY_FINDING", + 3: "POLICY_TOPIC_NOT_FOUND", +} +var PolicyFindingErrorEnum_PolicyFindingError_value = map[string]int32{ + "UNSPECIFIED": 0, + "UNKNOWN": 1, + "POLICY_FINDING": 2, + "POLICY_TOPIC_NOT_FOUND": 3, +} + +func (x PolicyFindingErrorEnum_PolicyFindingError) String() string { + return proto.EnumName(PolicyFindingErrorEnum_PolicyFindingError_name, int32(x)) +} +func (PolicyFindingErrorEnum_PolicyFindingError) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_policy_finding_error_488cb98685ad631d, []int{0, 0} +} + +// Container for enum describing possible policy finding errors. +type PolicyFindingErrorEnum struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *PolicyFindingErrorEnum) Reset() { *m = PolicyFindingErrorEnum{} } +func (m *PolicyFindingErrorEnum) String() string { return proto.CompactTextString(m) } +func (*PolicyFindingErrorEnum) ProtoMessage() {} +func (*PolicyFindingErrorEnum) Descriptor() ([]byte, []int) { + return fileDescriptor_policy_finding_error_488cb98685ad631d, []int{0} +} +func (m *PolicyFindingErrorEnum) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_PolicyFindingErrorEnum.Unmarshal(m, b) +} +func (m *PolicyFindingErrorEnum) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_PolicyFindingErrorEnum.Marshal(b, m, deterministic) +} +func (dst *PolicyFindingErrorEnum) XXX_Merge(src proto.Message) { + xxx_messageInfo_PolicyFindingErrorEnum.Merge(dst, src) +} +func (m *PolicyFindingErrorEnum) XXX_Size() int { + return xxx_messageInfo_PolicyFindingErrorEnum.Size(m) +} +func (m *PolicyFindingErrorEnum) XXX_DiscardUnknown() { + xxx_messageInfo_PolicyFindingErrorEnum.DiscardUnknown(m) +} + +var xxx_messageInfo_PolicyFindingErrorEnum proto.InternalMessageInfo + +func init() { + proto.RegisterType((*PolicyFindingErrorEnum)(nil), "google.ads.googleads.v1.errors.PolicyFindingErrorEnum") + proto.RegisterEnum("google.ads.googleads.v1.errors.PolicyFindingErrorEnum_PolicyFindingError", PolicyFindingErrorEnum_PolicyFindingError_name, PolicyFindingErrorEnum_PolicyFindingError_value) +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/errors/policy_finding_error.proto", fileDescriptor_policy_finding_error_488cb98685ad631d) +} + +var fileDescriptor_policy_finding_error_488cb98685ad631d = []byte{ + // 323 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x7c, 0x90, 0xc1, 0x4a, 0xc3, 0x30, + 0x1c, 0xc6, 0x5d, 0x07, 0x0a, 0x19, 0x68, 0xc9, 0x61, 0xc2, 0x90, 0x1d, 0xfa, 0x00, 0x29, 0xc5, + 0x93, 0xf1, 0xd4, 0xad, 0xed, 0x08, 0x4a, 0x1a, 0x70, 0x9b, 0x28, 0x85, 0xd2, 0x2d, 0x33, 0x04, + 0xb6, 0xa4, 0x34, 0x73, 0x20, 0xf8, 0x34, 0x1e, 0x7d, 0x14, 0x1f, 0xc5, 0xa3, 0x4f, 0x20, 0x6d, + 0xb6, 0x5e, 0x86, 0x9e, 0xfa, 0xf1, 0xef, 0xef, 0xfb, 0xf2, 0xfd, 0xff, 0xe0, 0x46, 0x68, 0x2d, + 0xd6, 0x2b, 0xbf, 0xe0, 0xc6, 0xb7, 0xb2, 0x56, 0xbb, 0xc0, 0x5f, 0x55, 0x95, 0xae, 0x8c, 0x5f, + 0xea, 0xb5, 0x5c, 0xbe, 0xe5, 0x2f, 0x52, 0x71, 0xa9, 0x44, 0xde, 0x4c, 0x51, 0x59, 0xe9, 0xad, + 0x86, 0x43, 0xcb, 0xa3, 0x82, 0x1b, 0xd4, 0x5a, 0xd1, 0x2e, 0x40, 0xd6, 0x3a, 0xb8, 0x3a, 0x44, + 0x97, 0xd2, 0x2f, 0x94, 0xd2, 0xdb, 0x62, 0x2b, 0xb5, 0x32, 0xd6, 0xed, 0xbd, 0x83, 0x3e, 0x6b, + 0xb2, 0x13, 0x1b, 0x1d, 0xd7, 0xa6, 0x58, 0xbd, 0x6e, 0xbc, 0x05, 0x80, 0xc7, 0x7f, 0xe0, 0x05, + 0xe8, 0xcd, 0xe8, 0x03, 0x8b, 0xc7, 0x24, 0x21, 0x71, 0xe4, 0x9e, 0xc0, 0x1e, 0x38, 0x9b, 0xd1, + 0x3b, 0x9a, 0x3e, 0x52, 0xb7, 0x03, 0x21, 0x38, 0x67, 0xe9, 0x3d, 0x19, 0x3f, 0xe5, 0x09, 0xa1, + 0x11, 0xa1, 0x13, 0xd7, 0x81, 0x03, 0xd0, 0xdf, 0xcf, 0xa6, 0x29, 0x23, 0xe3, 0x9c, 0xa6, 0xd3, + 0x3c, 0x49, 0x67, 0x34, 0x72, 0xbb, 0xa3, 0x9f, 0x0e, 0xf0, 0x96, 0x7a, 0x83, 0xfe, 0x5f, 0x61, + 0x74, 0x79, 0x5c, 0x84, 0xd5, 0xed, 0x59, 0xe7, 0x39, 0xda, 0x5b, 0x85, 0x5e, 0x17, 0x4a, 0x20, + 0x5d, 0x09, 0x5f, 0xac, 0x54, 0xb3, 0xdb, 0xe1, 0x90, 0xa5, 0x34, 0x7f, 0xdd, 0xf5, 0xd6, 0x7e, + 0x3e, 0x9c, 0xee, 0x24, 0x0c, 0x3f, 0x9d, 0xe1, 0xc4, 0x86, 0x85, 0xdc, 0x20, 0x2b, 0x6b, 0x35, + 0x0f, 0x50, 0xf3, 0xa4, 0xf9, 0x3a, 0x00, 0x59, 0xc8, 0x4d, 0xd6, 0x02, 0xd9, 0x3c, 0xc8, 0x2c, + 0xf0, 0xed, 0x78, 0x76, 0x8a, 0x71, 0xc8, 0x0d, 0xc6, 0x2d, 0x82, 0xf1, 0x3c, 0xc0, 0xd8, 0x42, + 0x8b, 0xd3, 0xa6, 0xdd, 0xf5, 0x6f, 0x00, 0x00, 0x00, 0xff, 0xff, 0xe1, 0xe1, 0x6b, 0xc5, 0xf4, + 0x01, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/errors/policy_validation_parameter_error.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/errors/policy_validation_parameter_error.pb.go new file mode 100644 index 0000000..80ced02 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/errors/policy_validation_parameter_error.pb.go @@ -0,0 +1,128 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/errors/policy_validation_parameter_error.proto + +package errors // import "google.golang.org/genproto/googleapis/ads/googleads/v1/errors" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Enum describing possible policy validation parameter errors. +type PolicyValidationParameterErrorEnum_PolicyValidationParameterError int32 + +const ( + // Enum unspecified. + PolicyValidationParameterErrorEnum_UNSPECIFIED PolicyValidationParameterErrorEnum_PolicyValidationParameterError = 0 + // The received error code is not known in this version. + PolicyValidationParameterErrorEnum_UNKNOWN PolicyValidationParameterErrorEnum_PolicyValidationParameterError = 1 + // Ignorable policy topics are not supported for the ad type. + PolicyValidationParameterErrorEnum_UNSUPPORTED_AD_TYPE_FOR_IGNORABLE_POLICY_TOPICS PolicyValidationParameterErrorEnum_PolicyValidationParameterError = 2 + // Exempt policy violation keys are not supported for the ad type. + PolicyValidationParameterErrorEnum_UNSUPPORTED_AD_TYPE_FOR_EXEMPT_POLICY_VIOLATION_KEYS PolicyValidationParameterErrorEnum_PolicyValidationParameterError = 3 + // Cannot set ignorable policy topics and exempt policy violation keys in + // the same policy violation parameter. + PolicyValidationParameterErrorEnum_CANNOT_SET_BOTH_IGNORABLE_POLICY_TOPICS_AND_EXEMPT_POLICY_VIOLATION_KEYS PolicyValidationParameterErrorEnum_PolicyValidationParameterError = 4 +) + +var PolicyValidationParameterErrorEnum_PolicyValidationParameterError_name = map[int32]string{ + 0: "UNSPECIFIED", + 1: "UNKNOWN", + 2: "UNSUPPORTED_AD_TYPE_FOR_IGNORABLE_POLICY_TOPICS", + 3: "UNSUPPORTED_AD_TYPE_FOR_EXEMPT_POLICY_VIOLATION_KEYS", + 4: "CANNOT_SET_BOTH_IGNORABLE_POLICY_TOPICS_AND_EXEMPT_POLICY_VIOLATION_KEYS", +} +var PolicyValidationParameterErrorEnum_PolicyValidationParameterError_value = map[string]int32{ + "UNSPECIFIED": 0, + "UNKNOWN": 1, + "UNSUPPORTED_AD_TYPE_FOR_IGNORABLE_POLICY_TOPICS": 2, + "UNSUPPORTED_AD_TYPE_FOR_EXEMPT_POLICY_VIOLATION_KEYS": 3, + "CANNOT_SET_BOTH_IGNORABLE_POLICY_TOPICS_AND_EXEMPT_POLICY_VIOLATION_KEYS": 4, +} + +func (x PolicyValidationParameterErrorEnum_PolicyValidationParameterError) String() string { + return proto.EnumName(PolicyValidationParameterErrorEnum_PolicyValidationParameterError_name, int32(x)) +} +func (PolicyValidationParameterErrorEnum_PolicyValidationParameterError) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_policy_validation_parameter_error_30d40d9e925110fb, []int{0, 0} +} + +// Container for enum describing possible policy validation parameter errors. +type PolicyValidationParameterErrorEnum struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *PolicyValidationParameterErrorEnum) Reset() { *m = PolicyValidationParameterErrorEnum{} } +func (m *PolicyValidationParameterErrorEnum) String() string { return proto.CompactTextString(m) } +func (*PolicyValidationParameterErrorEnum) ProtoMessage() {} +func (*PolicyValidationParameterErrorEnum) Descriptor() ([]byte, []int) { + return fileDescriptor_policy_validation_parameter_error_30d40d9e925110fb, []int{0} +} +func (m *PolicyValidationParameterErrorEnum) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_PolicyValidationParameterErrorEnum.Unmarshal(m, b) +} +func (m *PolicyValidationParameterErrorEnum) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_PolicyValidationParameterErrorEnum.Marshal(b, m, deterministic) +} +func (dst *PolicyValidationParameterErrorEnum) XXX_Merge(src proto.Message) { + xxx_messageInfo_PolicyValidationParameterErrorEnum.Merge(dst, src) +} +func (m *PolicyValidationParameterErrorEnum) XXX_Size() int { + return xxx_messageInfo_PolicyValidationParameterErrorEnum.Size(m) +} +func (m *PolicyValidationParameterErrorEnum) XXX_DiscardUnknown() { + xxx_messageInfo_PolicyValidationParameterErrorEnum.DiscardUnknown(m) +} + +var xxx_messageInfo_PolicyValidationParameterErrorEnum proto.InternalMessageInfo + +func init() { + proto.RegisterType((*PolicyValidationParameterErrorEnum)(nil), "google.ads.googleads.v1.errors.PolicyValidationParameterErrorEnum") + proto.RegisterEnum("google.ads.googleads.v1.errors.PolicyValidationParameterErrorEnum_PolicyValidationParameterError", PolicyValidationParameterErrorEnum_PolicyValidationParameterError_name, PolicyValidationParameterErrorEnum_PolicyValidationParameterError_value) +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/errors/policy_validation_parameter_error.proto", fileDescriptor_policy_validation_parameter_error_30d40d9e925110fb) +} + +var fileDescriptor_policy_validation_parameter_error_30d40d9e925110fb = []byte{ + // 391 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x7c, 0x92, 0x41, 0x8a, 0xdb, 0x30, + 0x18, 0x85, 0x1b, 0x4f, 0x69, 0x41, 0xb3, 0x68, 0xf0, 0xb2, 0x94, 0x2c, 0xdc, 0xbd, 0x8c, 0x99, + 0x2e, 0x8a, 0xba, 0x52, 0x62, 0x25, 0x63, 0x26, 0x95, 0x44, 0xac, 0xb8, 0x4d, 0x31, 0x08, 0x75, + 0x6c, 0x8c, 0xc1, 0xb1, 0x8c, 0xe5, 0x06, 0x7a, 0x9d, 0x59, 0xf6, 0x28, 0x3d, 0x4a, 0xaf, 0x50, + 0x28, 0xc5, 0xd6, 0xd8, 0xbb, 0x78, 0xa5, 0x87, 0x78, 0xef, 0x7b, 0xe2, 0xd7, 0x0f, 0xb6, 0x85, + 0xd6, 0x45, 0x95, 0xfb, 0x2a, 0x33, 0xbe, 0x95, 0xbd, 0xba, 0x04, 0x7e, 0xde, 0xb6, 0xba, 0x35, + 0x7e, 0xa3, 0xab, 0xf2, 0xf1, 0xa7, 0xbc, 0xa8, 0xaa, 0xcc, 0x54, 0x57, 0xea, 0x5a, 0x36, 0xaa, + 0x55, 0xe7, 0xbc, 0xcb, 0x5b, 0x39, 0x58, 0x60, 0xd3, 0xea, 0x4e, 0xbb, 0x2b, 0x1b, 0x86, 0x2a, + 0x33, 0x70, 0xe2, 0xc0, 0x4b, 0x00, 0x2d, 0xe7, 0xed, 0xbb, 0xb1, 0xa7, 0x29, 0x7d, 0x55, 0xd7, + 0xba, 0x1b, 0x60, 0xc6, 0xa6, 0xbd, 0x27, 0x07, 0x78, 0x7c, 0x68, 0x4a, 0xa6, 0x22, 0x3e, 0xf6, + 0x90, 0x9e, 0x40, 0xea, 0x1f, 0x67, 0xef, 0xef, 0x02, 0xac, 0xe6, 0x6d, 0xee, 0x1b, 0x70, 0x7b, + 0xa4, 0x31, 0x27, 0x9b, 0x68, 0x1b, 0x91, 0x70, 0xf9, 0xc2, 0xbd, 0x05, 0xaf, 0x8f, 0xf4, 0x81, + 0xb2, 0x2f, 0x74, 0xb9, 0x70, 0xef, 0x80, 0x7f, 0xa4, 0xf1, 0x91, 0x73, 0x76, 0x10, 0x24, 0x94, + 0x38, 0x94, 0xe2, 0xc4, 0x89, 0xdc, 0xb2, 0x83, 0x8c, 0x76, 0x94, 0x1d, 0xf0, 0x7a, 0x4f, 0x24, + 0x67, 0xfb, 0x68, 0x73, 0x92, 0x82, 0xf1, 0x68, 0x13, 0x2f, 0x1d, 0xf7, 0x23, 0xf8, 0x70, 0x2d, + 0x44, 0xbe, 0x92, 0xcf, 0x5c, 0x8c, 0x89, 0x24, 0x62, 0x7b, 0x2c, 0x22, 0x46, 0xe5, 0x03, 0x39, + 0xc5, 0xcb, 0x1b, 0x77, 0x0f, 0xee, 0x37, 0x98, 0x52, 0x26, 0x64, 0x4c, 0x84, 0x5c, 0x33, 0x71, + 0x7f, 0xad, 0x46, 0x62, 0x1a, 0xce, 0xd3, 0x5e, 0xae, 0xff, 0x2d, 0x80, 0xf7, 0xa8, 0xcf, 0x70, + 0x7e, 0xd2, 0xeb, 0xf7, 0xf3, 0x13, 0xe2, 0xfd, 0xc0, 0xf9, 0xe2, 0x5b, 0xf8, 0x8c, 0x29, 0x74, + 0xa5, 0xea, 0x02, 0xea, 0xb6, 0xf0, 0x8b, 0xbc, 0x1e, 0xbe, 0x63, 0x5c, 0x84, 0xa6, 0x34, 0xd7, + 0xf6, 0xe2, 0x93, 0x3d, 0x9e, 0x9c, 0x9b, 0x1d, 0xc6, 0xbf, 0x9c, 0xd5, 0xce, 0xc2, 0x70, 0x66, + 0xa0, 0x95, 0xbd, 0x4a, 0x02, 0x38, 0x54, 0x9a, 0xdf, 0xa3, 0x21, 0xc5, 0x99, 0x49, 0x27, 0x43, + 0x9a, 0x04, 0xa9, 0x35, 0xfc, 0x71, 0x3c, 0x7b, 0x8b, 0x10, 0xce, 0x0c, 0x42, 0x93, 0x05, 0xa1, + 0x24, 0x40, 0xc8, 0x9a, 0xbe, 0xbf, 0x1a, 0x5e, 0x77, 0xf7, 0x3f, 0x00, 0x00, 0xff, 0xff, 0x02, + 0x12, 0xa0, 0x1c, 0xb4, 0x02, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/errors/policy_violation_error.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/errors/policy_violation_error.pb.go new file mode 100644 index 0000000..fc8d5b4 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/errors/policy_violation_error.pb.go @@ -0,0 +1,113 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/errors/policy_violation_error.proto + +package errors // import "google.golang.org/genproto/googleapis/ads/googleads/v1/errors" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Enum describing possible policy violation errors. +type PolicyViolationErrorEnum_PolicyViolationError int32 + +const ( + // Enum unspecified. + PolicyViolationErrorEnum_UNSPECIFIED PolicyViolationErrorEnum_PolicyViolationError = 0 + // The received error code is not known in this version. + PolicyViolationErrorEnum_UNKNOWN PolicyViolationErrorEnum_PolicyViolationError = 1 + // A policy was violated. See PolicyViolationDetails for more detail. + PolicyViolationErrorEnum_POLICY_ERROR PolicyViolationErrorEnum_PolicyViolationError = 2 +) + +var PolicyViolationErrorEnum_PolicyViolationError_name = map[int32]string{ + 0: "UNSPECIFIED", + 1: "UNKNOWN", + 2: "POLICY_ERROR", +} +var PolicyViolationErrorEnum_PolicyViolationError_value = map[string]int32{ + "UNSPECIFIED": 0, + "UNKNOWN": 1, + "POLICY_ERROR": 2, +} + +func (x PolicyViolationErrorEnum_PolicyViolationError) String() string { + return proto.EnumName(PolicyViolationErrorEnum_PolicyViolationError_name, int32(x)) +} +func (PolicyViolationErrorEnum_PolicyViolationError) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_policy_violation_error_6fb18553f085b239, []int{0, 0} +} + +// Container for enum describing possible policy violation errors. +type PolicyViolationErrorEnum struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *PolicyViolationErrorEnum) Reset() { *m = PolicyViolationErrorEnum{} } +func (m *PolicyViolationErrorEnum) String() string { return proto.CompactTextString(m) } +func (*PolicyViolationErrorEnum) ProtoMessage() {} +func (*PolicyViolationErrorEnum) Descriptor() ([]byte, []int) { + return fileDescriptor_policy_violation_error_6fb18553f085b239, []int{0} +} +func (m *PolicyViolationErrorEnum) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_PolicyViolationErrorEnum.Unmarshal(m, b) +} +func (m *PolicyViolationErrorEnum) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_PolicyViolationErrorEnum.Marshal(b, m, deterministic) +} +func (dst *PolicyViolationErrorEnum) XXX_Merge(src proto.Message) { + xxx_messageInfo_PolicyViolationErrorEnum.Merge(dst, src) +} +func (m *PolicyViolationErrorEnum) XXX_Size() int { + return xxx_messageInfo_PolicyViolationErrorEnum.Size(m) +} +func (m *PolicyViolationErrorEnum) XXX_DiscardUnknown() { + xxx_messageInfo_PolicyViolationErrorEnum.DiscardUnknown(m) +} + +var xxx_messageInfo_PolicyViolationErrorEnum proto.InternalMessageInfo + +func init() { + proto.RegisterType((*PolicyViolationErrorEnum)(nil), "google.ads.googleads.v1.errors.PolicyViolationErrorEnum") + proto.RegisterEnum("google.ads.googleads.v1.errors.PolicyViolationErrorEnum_PolicyViolationError", PolicyViolationErrorEnum_PolicyViolationError_name, PolicyViolationErrorEnum_PolicyViolationError_value) +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/errors/policy_violation_error.proto", fileDescriptor_policy_violation_error_6fb18553f085b239) +} + +var fileDescriptor_policy_violation_error_6fb18553f085b239 = []byte{ + // 301 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x7c, 0x90, 0xd1, 0x4a, 0x84, 0x40, + 0x14, 0x86, 0xd3, 0xa0, 0x60, 0x36, 0x48, 0xa4, 0x8b, 0x8a, 0xd8, 0x0b, 0x1f, 0x60, 0x06, 0xe9, + 0x6e, 0xf6, 0xca, 0xdd, 0x75, 0x17, 0x29, 0x54, 0x8c, 0x35, 0x0a, 0x41, 0xdc, 0x55, 0x06, 0xc1, + 0x9d, 0x23, 0x8e, 0x09, 0xbd, 0x4e, 0x97, 0x3d, 0x4a, 0x8f, 0xd2, 0x75, 0x0f, 0x10, 0xce, 0xa4, + 0x57, 0xdb, 0x5e, 0xf9, 0x73, 0xfc, 0xbf, 0xff, 0xfc, 0x73, 0xd0, 0x8c, 0x01, 0xb0, 0xaa, 0x20, + 0x59, 0x2e, 0x88, 0x92, 0xbd, 0xea, 0x6c, 0x52, 0x34, 0x0d, 0x34, 0x82, 0xd4, 0x50, 0x95, 0xbb, + 0xf7, 0xb4, 0x2b, 0xa1, 0xca, 0xda, 0x12, 0x78, 0x2a, 0xe7, 0xb8, 0x6e, 0xa0, 0x05, 0x73, 0xaa, + 0x08, 0x9c, 0xe5, 0x02, 0x8f, 0x30, 0xee, 0x6c, 0xac, 0xe0, 0xdb, 0xbb, 0x21, 0xbc, 0x2e, 0x49, + 0xc6, 0x39, 0xb4, 0x32, 0x42, 0x28, 0xda, 0xda, 0xa2, 0xeb, 0x50, 0xa6, 0xc7, 0x43, 0xb8, 0xdb, + 0x63, 0x2e, 0x7f, 0xdb, 0x5b, 0x2b, 0x74, 0x75, 0xe8, 0x9f, 0x79, 0x89, 0x26, 0x1b, 0xff, 0x29, + 0x74, 0x17, 0xde, 0xca, 0x73, 0x97, 0xc6, 0x89, 0x39, 0x41, 0xe7, 0x1b, 0xff, 0xc1, 0x0f, 0x9e, + 0x7d, 0x43, 0x33, 0x0d, 0x74, 0x11, 0x06, 0x8f, 0xde, 0xe2, 0x25, 0x75, 0xa3, 0x28, 0x88, 0x0c, + 0x7d, 0xfe, 0xa3, 0x21, 0x6b, 0x07, 0x7b, 0x7c, 0xbc, 0xe8, 0xfc, 0xe6, 0xd0, 0xb2, 0xb0, 0x6f, + 0x19, 0x6a, 0xaf, 0xcb, 0x3f, 0x98, 0x41, 0x95, 0x71, 0x86, 0xa1, 0x61, 0x84, 0x15, 0x5c, 0xbe, + 0x61, 0x38, 0x59, 0x5d, 0x8a, 0xff, 0x2e, 0x38, 0x53, 0x9f, 0x0f, 0xfd, 0x74, 0xed, 0x38, 0x9f, + 0xfa, 0x74, 0xad, 0xc2, 0x9c, 0x5c, 0x60, 0x25, 0x7b, 0x15, 0xdb, 0x58, 0xae, 0x14, 0x5f, 0x83, + 0x21, 0x71, 0x72, 0x91, 0x8c, 0x86, 0x24, 0xb6, 0x13, 0x65, 0xf8, 0xd6, 0x2d, 0x35, 0xa5, 0xd4, + 0xc9, 0x05, 0xa5, 0xa3, 0x85, 0xd2, 0xd8, 0xa6, 0x54, 0x99, 0xb6, 0x67, 0xb2, 0xdd, 0xfd, 0x6f, + 0x00, 0x00, 0x00, 0xff, 0xff, 0x9a, 0x0b, 0x2a, 0xb5, 0xde, 0x01, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/errors/query_error.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/errors/query_error.pb.go new file mode 100644 index 0000000..4110a0d --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/errors/query_error.pb.go @@ -0,0 +1,373 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/errors/query_error.proto + +package errors // import "google.golang.org/genproto/googleapis/ads/googleads/v1/errors" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Enum describing possible query errors. +type QueryErrorEnum_QueryError int32 + +const ( + // Name unspecified. + QueryErrorEnum_UNSPECIFIED QueryErrorEnum_QueryError = 0 + // The received error code is not known in this version. + QueryErrorEnum_UNKNOWN QueryErrorEnum_QueryError = 1 + // Returned if all other query error reasons are not applicable. + QueryErrorEnum_QUERY_ERROR QueryErrorEnum_QueryError = 50 + // A condition used in the query references an invalid enum constant. + QueryErrorEnum_BAD_ENUM_CONSTANT QueryErrorEnum_QueryError = 18 + // Query contains an invalid escape sequence. + QueryErrorEnum_BAD_ESCAPE_SEQUENCE QueryErrorEnum_QueryError = 7 + // Field name is invalid. + QueryErrorEnum_BAD_FIELD_NAME QueryErrorEnum_QueryError = 12 + // Limit value is invalid (i.e. not a number) + QueryErrorEnum_BAD_LIMIT_VALUE QueryErrorEnum_QueryError = 15 + // Encountered number can not be parsed. + QueryErrorEnum_BAD_NUMBER QueryErrorEnum_QueryError = 5 + // Invalid operator encountered. + QueryErrorEnum_BAD_OPERATOR QueryErrorEnum_QueryError = 3 + // Parameter unknown or not supported. + QueryErrorEnum_BAD_PARAMETER_NAME QueryErrorEnum_QueryError = 61 + // Parameter have invalid value. + QueryErrorEnum_BAD_PARAMETER_VALUE QueryErrorEnum_QueryError = 62 + // Invalid resource type was specified in the FROM clause. + QueryErrorEnum_BAD_RESOURCE_TYPE_IN_FROM_CLAUSE QueryErrorEnum_QueryError = 45 + // Non-ASCII symbol encountered outside of strings. + QueryErrorEnum_BAD_SYMBOL QueryErrorEnum_QueryError = 2 + // Value is invalid. + QueryErrorEnum_BAD_VALUE QueryErrorEnum_QueryError = 4 + // Date filters fail to restrict date to a range smaller than 31 days. + // Applicable if the query is segmented by date. + QueryErrorEnum_DATE_RANGE_TOO_WIDE QueryErrorEnum_QueryError = 36 + // Expected AND between values with BETWEEN operator. + QueryErrorEnum_EXPECTED_AND QueryErrorEnum_QueryError = 30 + // Expecting ORDER BY to have BY. + QueryErrorEnum_EXPECTED_BY QueryErrorEnum_QueryError = 14 + // There was no dimension field selected. + QueryErrorEnum_EXPECTED_DIMENSION_FIELD_IN_SELECT_CLAUSE QueryErrorEnum_QueryError = 37 + // Missing filters on date related fields. + QueryErrorEnum_EXPECTED_FILTERS_ON_DATE_RANGE QueryErrorEnum_QueryError = 55 + // Missing FROM clause. + QueryErrorEnum_EXPECTED_FROM QueryErrorEnum_QueryError = 44 + // The operator used in the conditions requires the value to be a list. + QueryErrorEnum_EXPECTED_LIST QueryErrorEnum_QueryError = 41 + // Fields used in WHERE or ORDER BY clauses are missing from the SELECT + // clause. + QueryErrorEnum_EXPECTED_REFERENCED_FIELD_IN_SELECT_CLAUSE QueryErrorEnum_QueryError = 16 + // SELECT is missing at the beginning of query. + QueryErrorEnum_EXPECTED_SELECT QueryErrorEnum_QueryError = 13 + // A list was passed as a value to a condition whose operator expects a + // single value. + QueryErrorEnum_EXPECTED_SINGLE_VALUE QueryErrorEnum_QueryError = 42 + // Missing one or both values with BETWEEN operator. + QueryErrorEnum_EXPECTED_VALUE_WITH_BETWEEN_OPERATOR QueryErrorEnum_QueryError = 29 + // Invalid date format. Expected 'YYYY-MM-DD'. + QueryErrorEnum_INVALID_DATE_FORMAT QueryErrorEnum_QueryError = 38 + // Value passed was not a string when it should have been. I.e., it was a + // number or unquoted literal. + QueryErrorEnum_INVALID_STRING_VALUE QueryErrorEnum_QueryError = 57 + // A String value passed to the BETWEEN operator does not parse as a date. + QueryErrorEnum_INVALID_VALUE_WITH_BETWEEN_OPERATOR QueryErrorEnum_QueryError = 26 + // The value passed to the DURING operator is not a Date range literal + QueryErrorEnum_INVALID_VALUE_WITH_DURING_OPERATOR QueryErrorEnum_QueryError = 22 + // A non-string value was passed to the LIKE operator. + QueryErrorEnum_INVALID_VALUE_WITH_LIKE_OPERATOR QueryErrorEnum_QueryError = 56 + // An operator was provided that is inapplicable to the field being + // filtered. + QueryErrorEnum_OPERATOR_FIELD_MISMATCH QueryErrorEnum_QueryError = 35 + // A Condition was found with an empty list. + QueryErrorEnum_PROHIBITED_EMPTY_LIST_IN_CONDITION QueryErrorEnum_QueryError = 28 + // A condition used in the query references an unsupported enum constant. + QueryErrorEnum_PROHIBITED_ENUM_CONSTANT QueryErrorEnum_QueryError = 54 + // Fields that are not allowed to be selected together were included in + // the SELECT clause. + QueryErrorEnum_PROHIBITED_FIELD_COMBINATION_IN_SELECT_CLAUSE QueryErrorEnum_QueryError = 31 + // A field that is not orderable was included in the ORDER BY clause. + QueryErrorEnum_PROHIBITED_FIELD_IN_ORDER_BY_CLAUSE QueryErrorEnum_QueryError = 40 + // A field that is not selectable was included in the SELECT clause. + QueryErrorEnum_PROHIBITED_FIELD_IN_SELECT_CLAUSE QueryErrorEnum_QueryError = 23 + // A field that is not filterable was included in the WHERE clause. + QueryErrorEnum_PROHIBITED_FIELD_IN_WHERE_CLAUSE QueryErrorEnum_QueryError = 24 + // Resource type specified in the FROM clause is not supported by this + // service. + QueryErrorEnum_PROHIBITED_RESOURCE_TYPE_IN_FROM_CLAUSE QueryErrorEnum_QueryError = 43 + // A field that comes from an incompatible resource was included in the + // SELECT clause. + QueryErrorEnum_PROHIBITED_RESOURCE_TYPE_IN_SELECT_CLAUSE QueryErrorEnum_QueryError = 48 + // A field that comes from an incompatible resource was included in the + // WHERE clause. + QueryErrorEnum_PROHIBITED_RESOURCE_TYPE_IN_WHERE_CLAUSE QueryErrorEnum_QueryError = 58 + // A metric incompatible with the main resource or other selected + // segmenting resources was included in the SELECT or WHERE clause. + QueryErrorEnum_PROHIBITED_METRIC_IN_SELECT_OR_WHERE_CLAUSE QueryErrorEnum_QueryError = 49 + // A segment incompatible with the main resource or other selected + // segmenting resources was included in the SELECT or WHERE clause. + QueryErrorEnum_PROHIBITED_SEGMENT_IN_SELECT_OR_WHERE_CLAUSE QueryErrorEnum_QueryError = 51 + // A segment in the SELECT clause is incompatible with a metric in the + // SELECT or WHERE clause. + QueryErrorEnum_PROHIBITED_SEGMENT_WITH_METRIC_IN_SELECT_OR_WHERE_CLAUSE QueryErrorEnum_QueryError = 53 + // The value passed to the limit clause is too low. + QueryErrorEnum_LIMIT_VALUE_TOO_LOW QueryErrorEnum_QueryError = 25 + // Query has a string containing a newline character. + QueryErrorEnum_PROHIBITED_NEWLINE_IN_STRING QueryErrorEnum_QueryError = 8 + // List contains values of different types. + QueryErrorEnum_PROHIBITED_VALUE_COMBINATION_IN_LIST QueryErrorEnum_QueryError = 10 + // The values passed to the BETWEEN operator are not of the same type. + QueryErrorEnum_PROHIBITED_VALUE_COMBINATION_WITH_BETWEEN_OPERATOR QueryErrorEnum_QueryError = 21 + // Query contains unterminated string. + QueryErrorEnum_STRING_NOT_TERMINATED QueryErrorEnum_QueryError = 6 + // Too many segments are specified in SELECT clause. + QueryErrorEnum_TOO_MANY_SEGMENTS QueryErrorEnum_QueryError = 34 + // Query is incomplete and cannot be parsed. + QueryErrorEnum_UNEXPECTED_END_OF_QUERY QueryErrorEnum_QueryError = 9 + // FROM clause cannot be specified in this query. + QueryErrorEnum_UNEXPECTED_FROM_CLAUSE QueryErrorEnum_QueryError = 47 + // Query contains one or more unrecognized fields. + QueryErrorEnum_UNRECOGNIZED_FIELD QueryErrorEnum_QueryError = 32 + // Query has an unexpected extra part. + QueryErrorEnum_UNEXPECTED_INPUT QueryErrorEnum_QueryError = 11 + // Metrics cannot be requested for a manager account. To retrieve metrics, + // issue separate requests against each client account under the manager + // account. + QueryErrorEnum_REQUESTED_METRICS_FOR_MANAGER QueryErrorEnum_QueryError = 59 +) + +var QueryErrorEnum_QueryError_name = map[int32]string{ + 0: "UNSPECIFIED", + 1: "UNKNOWN", + 50: "QUERY_ERROR", + 18: "BAD_ENUM_CONSTANT", + 7: "BAD_ESCAPE_SEQUENCE", + 12: "BAD_FIELD_NAME", + 15: "BAD_LIMIT_VALUE", + 5: "BAD_NUMBER", + 3: "BAD_OPERATOR", + 61: "BAD_PARAMETER_NAME", + 62: "BAD_PARAMETER_VALUE", + 45: "BAD_RESOURCE_TYPE_IN_FROM_CLAUSE", + 2: "BAD_SYMBOL", + 4: "BAD_VALUE", + 36: "DATE_RANGE_TOO_WIDE", + 30: "EXPECTED_AND", + 14: "EXPECTED_BY", + 37: "EXPECTED_DIMENSION_FIELD_IN_SELECT_CLAUSE", + 55: "EXPECTED_FILTERS_ON_DATE_RANGE", + 44: "EXPECTED_FROM", + 41: "EXPECTED_LIST", + 16: "EXPECTED_REFERENCED_FIELD_IN_SELECT_CLAUSE", + 13: "EXPECTED_SELECT", + 42: "EXPECTED_SINGLE_VALUE", + 29: "EXPECTED_VALUE_WITH_BETWEEN_OPERATOR", + 38: "INVALID_DATE_FORMAT", + 57: "INVALID_STRING_VALUE", + 26: "INVALID_VALUE_WITH_BETWEEN_OPERATOR", + 22: "INVALID_VALUE_WITH_DURING_OPERATOR", + 56: "INVALID_VALUE_WITH_LIKE_OPERATOR", + 35: "OPERATOR_FIELD_MISMATCH", + 28: "PROHIBITED_EMPTY_LIST_IN_CONDITION", + 54: "PROHIBITED_ENUM_CONSTANT", + 31: "PROHIBITED_FIELD_COMBINATION_IN_SELECT_CLAUSE", + 40: "PROHIBITED_FIELD_IN_ORDER_BY_CLAUSE", + 23: "PROHIBITED_FIELD_IN_SELECT_CLAUSE", + 24: "PROHIBITED_FIELD_IN_WHERE_CLAUSE", + 43: "PROHIBITED_RESOURCE_TYPE_IN_FROM_CLAUSE", + 48: "PROHIBITED_RESOURCE_TYPE_IN_SELECT_CLAUSE", + 58: "PROHIBITED_RESOURCE_TYPE_IN_WHERE_CLAUSE", + 49: "PROHIBITED_METRIC_IN_SELECT_OR_WHERE_CLAUSE", + 51: "PROHIBITED_SEGMENT_IN_SELECT_OR_WHERE_CLAUSE", + 53: "PROHIBITED_SEGMENT_WITH_METRIC_IN_SELECT_OR_WHERE_CLAUSE", + 25: "LIMIT_VALUE_TOO_LOW", + 8: "PROHIBITED_NEWLINE_IN_STRING", + 10: "PROHIBITED_VALUE_COMBINATION_IN_LIST", + 21: "PROHIBITED_VALUE_COMBINATION_WITH_BETWEEN_OPERATOR", + 6: "STRING_NOT_TERMINATED", + 34: "TOO_MANY_SEGMENTS", + 9: "UNEXPECTED_END_OF_QUERY", + 47: "UNEXPECTED_FROM_CLAUSE", + 32: "UNRECOGNIZED_FIELD", + 11: "UNEXPECTED_INPUT", + 59: "REQUESTED_METRICS_FOR_MANAGER", +} +var QueryErrorEnum_QueryError_value = map[string]int32{ + "UNSPECIFIED": 0, + "UNKNOWN": 1, + "QUERY_ERROR": 50, + "BAD_ENUM_CONSTANT": 18, + "BAD_ESCAPE_SEQUENCE": 7, + "BAD_FIELD_NAME": 12, + "BAD_LIMIT_VALUE": 15, + "BAD_NUMBER": 5, + "BAD_OPERATOR": 3, + "BAD_PARAMETER_NAME": 61, + "BAD_PARAMETER_VALUE": 62, + "BAD_RESOURCE_TYPE_IN_FROM_CLAUSE": 45, + "BAD_SYMBOL": 2, + "BAD_VALUE": 4, + "DATE_RANGE_TOO_WIDE": 36, + "EXPECTED_AND": 30, + "EXPECTED_BY": 14, + "EXPECTED_DIMENSION_FIELD_IN_SELECT_CLAUSE": 37, + "EXPECTED_FILTERS_ON_DATE_RANGE": 55, + "EXPECTED_FROM": 44, + "EXPECTED_LIST": 41, + "EXPECTED_REFERENCED_FIELD_IN_SELECT_CLAUSE": 16, + "EXPECTED_SELECT": 13, + "EXPECTED_SINGLE_VALUE": 42, + "EXPECTED_VALUE_WITH_BETWEEN_OPERATOR": 29, + "INVALID_DATE_FORMAT": 38, + "INVALID_STRING_VALUE": 57, + "INVALID_VALUE_WITH_BETWEEN_OPERATOR": 26, + "INVALID_VALUE_WITH_DURING_OPERATOR": 22, + "INVALID_VALUE_WITH_LIKE_OPERATOR": 56, + "OPERATOR_FIELD_MISMATCH": 35, + "PROHIBITED_EMPTY_LIST_IN_CONDITION": 28, + "PROHIBITED_ENUM_CONSTANT": 54, + "PROHIBITED_FIELD_COMBINATION_IN_SELECT_CLAUSE": 31, + "PROHIBITED_FIELD_IN_ORDER_BY_CLAUSE": 40, + "PROHIBITED_FIELD_IN_SELECT_CLAUSE": 23, + "PROHIBITED_FIELD_IN_WHERE_CLAUSE": 24, + "PROHIBITED_RESOURCE_TYPE_IN_FROM_CLAUSE": 43, + "PROHIBITED_RESOURCE_TYPE_IN_SELECT_CLAUSE": 48, + "PROHIBITED_RESOURCE_TYPE_IN_WHERE_CLAUSE": 58, + "PROHIBITED_METRIC_IN_SELECT_OR_WHERE_CLAUSE": 49, + "PROHIBITED_SEGMENT_IN_SELECT_OR_WHERE_CLAUSE": 51, + "PROHIBITED_SEGMENT_WITH_METRIC_IN_SELECT_OR_WHERE_CLAUSE": 53, + "LIMIT_VALUE_TOO_LOW": 25, + "PROHIBITED_NEWLINE_IN_STRING": 8, + "PROHIBITED_VALUE_COMBINATION_IN_LIST": 10, + "PROHIBITED_VALUE_COMBINATION_WITH_BETWEEN_OPERATOR": 21, + "STRING_NOT_TERMINATED": 6, + "TOO_MANY_SEGMENTS": 34, + "UNEXPECTED_END_OF_QUERY": 9, + "UNEXPECTED_FROM_CLAUSE": 47, + "UNRECOGNIZED_FIELD": 32, + "UNEXPECTED_INPUT": 11, + "REQUESTED_METRICS_FOR_MANAGER": 59, +} + +func (x QueryErrorEnum_QueryError) String() string { + return proto.EnumName(QueryErrorEnum_QueryError_name, int32(x)) +} +func (QueryErrorEnum_QueryError) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_query_error_6b95ddf2843c3ef1, []int{0, 0} +} + +// Container for enum describing possible query errors. +type QueryErrorEnum struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *QueryErrorEnum) Reset() { *m = QueryErrorEnum{} } +func (m *QueryErrorEnum) String() string { return proto.CompactTextString(m) } +func (*QueryErrorEnum) ProtoMessage() {} +func (*QueryErrorEnum) Descriptor() ([]byte, []int) { + return fileDescriptor_query_error_6b95ddf2843c3ef1, []int{0} +} +func (m *QueryErrorEnum) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_QueryErrorEnum.Unmarshal(m, b) +} +func (m *QueryErrorEnum) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_QueryErrorEnum.Marshal(b, m, deterministic) +} +func (dst *QueryErrorEnum) XXX_Merge(src proto.Message) { + xxx_messageInfo_QueryErrorEnum.Merge(dst, src) +} +func (m *QueryErrorEnum) XXX_Size() int { + return xxx_messageInfo_QueryErrorEnum.Size(m) +} +func (m *QueryErrorEnum) XXX_DiscardUnknown() { + xxx_messageInfo_QueryErrorEnum.DiscardUnknown(m) +} + +var xxx_messageInfo_QueryErrorEnum proto.InternalMessageInfo + +func init() { + proto.RegisterType((*QueryErrorEnum)(nil), "google.ads.googleads.v1.errors.QueryErrorEnum") + proto.RegisterEnum("google.ads.googleads.v1.errors.QueryErrorEnum_QueryError", QueryErrorEnum_QueryError_name, QueryErrorEnum_QueryError_value) +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/errors/query_error.proto", fileDescriptor_query_error_6b95ddf2843c3ef1) +} + +var fileDescriptor_query_error_6b95ddf2843c3ef1 = []byte{ + // 970 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x84, 0x55, 0xff, 0x6e, 0x13, 0x47, + 0x10, 0x6e, 0x42, 0x0b, 0x65, 0x42, 0x92, 0x65, 0x21, 0x3f, 0x48, 0x43, 0x1a, 0x4c, 0x80, 0x40, + 0x12, 0x3b, 0x06, 0x95, 0x52, 0xd3, 0x56, 0xda, 0xbb, 0x1b, 0x3b, 0x2b, 0xee, 0x76, 0x2f, 0x7b, + 0x7b, 0x71, 0x8d, 0x22, 0xad, 0xd2, 0x26, 0xb2, 0x22, 0x81, 0x2f, 0xb5, 0x03, 0x52, 0x5f, 0xa7, + 0x52, 0xff, 0xe9, 0x13, 0xf4, 0x19, 0xfa, 0x28, 0x3c, 0x45, 0xb5, 0x77, 0xbe, 0xf3, 0xb9, 0x18, + 0xf3, 0xd7, 0xed, 0xce, 0x7c, 0xdf, 0x37, 0x7b, 0xb3, 0x33, 0x3b, 0xb0, 0xdf, 0x4d, 0x92, 0xee, + 0x9b, 0xb3, 0xda, 0xc9, 0xe9, 0xa0, 0x96, 0x2d, 0xed, 0xea, 0x7d, 0xbd, 0x76, 0xd6, 0xef, 0x27, + 0xfd, 0x41, 0xed, 0xf7, 0x77, 0x67, 0xfd, 0x3f, 0x4c, 0xba, 0xa9, 0x5e, 0xf4, 0x93, 0xcb, 0x84, + 0x6e, 0x64, 0xb0, 0xea, 0xc9, 0xe9, 0xa0, 0x5a, 0x30, 0xaa, 0xef, 0xeb, 0xd5, 0x8c, 0xb1, 0xb6, + 0x9e, 0x2b, 0x5e, 0x9c, 0xd7, 0x4e, 0x7a, 0xbd, 0xe4, 0xf2, 0xe4, 0xf2, 0x3c, 0xe9, 0x0d, 0x32, + 0x76, 0xe5, 0x9f, 0x79, 0x58, 0x38, 0xb4, 0x9a, 0x68, 0xd1, 0xd8, 0x7b, 0xf7, 0xb6, 0xf2, 0xd7, + 0x3c, 0xc0, 0xc8, 0x44, 0x17, 0x61, 0x2e, 0x16, 0x51, 0x88, 0x2e, 0x6f, 0x72, 0xf4, 0xc8, 0x17, + 0x74, 0x0e, 0xae, 0xc5, 0xe2, 0x95, 0x90, 0x6d, 0x41, 0x66, 0xac, 0xf7, 0x30, 0x46, 0xd5, 0x31, + 0xa8, 0x94, 0x54, 0xe4, 0x29, 0x5d, 0x82, 0x9b, 0x0e, 0xf3, 0x0c, 0x8a, 0x38, 0x30, 0xae, 0x14, + 0x91, 0x66, 0x42, 0x13, 0x4a, 0x57, 0xe0, 0x56, 0x6a, 0x8e, 0x5c, 0x16, 0xa2, 0x89, 0xf0, 0x30, + 0x46, 0xe1, 0x22, 0xb9, 0x46, 0x29, 0x2c, 0x58, 0x47, 0x93, 0xa3, 0xef, 0x19, 0xc1, 0x02, 0x24, + 0x37, 0xe8, 0x2d, 0x58, 0xb4, 0x36, 0x9f, 0x07, 0x5c, 0x9b, 0x23, 0xe6, 0xc7, 0x48, 0x16, 0xe9, + 0x02, 0x80, 0x35, 0x8a, 0x38, 0x70, 0x50, 0x91, 0xaf, 0x28, 0x81, 0x1b, 0x76, 0x2f, 0x43, 0x54, + 0x4c, 0x4b, 0x45, 0xae, 0xd0, 0x65, 0xa0, 0xd6, 0x12, 0x32, 0xc5, 0x02, 0xd4, 0xa8, 0x32, 0xb9, + 0x9f, 0xf2, 0xd8, 0x23, 0x7b, 0x26, 0xf9, 0x33, 0xdd, 0x82, 0x4d, 0xeb, 0x50, 0x18, 0xc9, 0x58, + 0xb9, 0x68, 0x74, 0x27, 0x44, 0xc3, 0x85, 0x69, 0x2a, 0x19, 0x18, 0xd7, 0x67, 0x71, 0x84, 0x64, + 0x2f, 0x0f, 0x1c, 0x75, 0x02, 0x47, 0xfa, 0x64, 0x96, 0xce, 0xc3, 0x75, 0xbb, 0xcf, 0x44, 0xbe, + 0xb4, 0xea, 0x1e, 0xd3, 0x68, 0x14, 0x13, 0x2d, 0x34, 0x5a, 0x4a, 0xd3, 0xe6, 0x1e, 0x92, 0x2d, + 0x7b, 0x40, 0xfc, 0x25, 0x44, 0x57, 0xa3, 0x67, 0x98, 0xf0, 0xc8, 0x86, 0x4d, 0x56, 0x61, 0x71, + 0x3a, 0x64, 0x81, 0xee, 0xc1, 0xe3, 0xc2, 0xe0, 0xf1, 0x00, 0x45, 0xc4, 0xa5, 0x18, 0xe6, 0x82, + 0x0b, 0x13, 0xa1, 0x8f, 0xae, 0xce, 0x4f, 0xf2, 0x80, 0x56, 0x60, 0xa3, 0x80, 0x37, 0xb9, 0xaf, + 0x51, 0x45, 0x46, 0x0a, 0x33, 0x0a, 0x4f, 0xbe, 0xa7, 0x37, 0x61, 0x7e, 0x84, 0x51, 0x32, 0x20, + 0xbb, 0x63, 0x26, 0x9f, 0x47, 0x9a, 0x3c, 0xa6, 0x55, 0x78, 0x52, 0x98, 0x14, 0x36, 0x51, 0xd9, + 0xdb, 0xf0, 0x3e, 0x15, 0x99, 0xd8, 0x1b, 0x29, 0xf0, 0x99, 0x8f, 0xcc, 0xd3, 0x3b, 0xb0, 0x34, + 0x32, 0x72, 0xd1, 0xf2, 0x71, 0x98, 0x94, 0x27, 0x74, 0x1b, 0xb6, 0x0a, 0x57, 0x6a, 0x33, 0x6d, + 0xae, 0x0f, 0x8c, 0x83, 0xba, 0x8d, 0x28, 0x46, 0x97, 0x76, 0xd7, 0xa6, 0x8f, 0x8b, 0x23, 0xe6, + 0x73, 0x2f, 0xfb, 0x8f, 0xa6, 0x54, 0x01, 0xd3, 0xe4, 0x21, 0x5d, 0x85, 0xdb, 0xb9, 0x23, 0xd2, + 0x8a, 0x8b, 0xd6, 0x50, 0xfc, 0x07, 0xfa, 0x08, 0xee, 0xe7, 0x9e, 0x69, 0xda, 0x6b, 0xf4, 0x21, + 0x54, 0x26, 0x00, 0xbd, 0x38, 0x55, 0x2b, 0x70, 0xcb, 0xb6, 0x0e, 0x26, 0xe0, 0x7c, 0xfe, 0x0a, + 0x47, 0xa8, 0x17, 0xf4, 0x1b, 0x58, 0xc9, 0x77, 0xc3, 0x44, 0x05, 0x3c, 0x0a, 0x98, 0x76, 0x0f, + 0xc8, 0x7d, 0x1b, 0x2a, 0x54, 0xf2, 0x80, 0x3b, 0xdc, 0xfe, 0x32, 0x06, 0xa1, 0xee, 0xa4, 0xb9, + 0xb6, 0xc9, 0x74, 0xa5, 0xf0, 0xb8, 0xe6, 0x52, 0x90, 0x75, 0xba, 0x0e, 0xab, 0x65, 0xdc, 0x58, + 0x97, 0x3c, 0xa7, 0x75, 0xd8, 0x2b, 0x79, 0xb3, 0x20, 0xae, 0x0c, 0x1c, 0x2e, 0x98, 0xe5, 0x7f, + 0x7c, 0x33, 0xdf, 0xda, 0x64, 0x7c, 0x44, 0xe1, 0xc2, 0x48, 0xe5, 0xa1, 0x32, 0x4e, 0x27, 0x07, + 0x6e, 0xd3, 0x07, 0x70, 0x6f, 0x12, 0x70, 0x5c, 0x6f, 0xc5, 0xe6, 0x62, 0x12, 0xac, 0x7d, 0x80, + 0x0a, 0x73, 0xd4, 0x2a, 0xdd, 0x81, 0x47, 0x25, 0xd4, 0xd4, 0x06, 0xda, 0xb1, 0x55, 0x3e, 0x0d, + 0x3c, 0x7e, 0x82, 0x7d, 0xba, 0x0b, 0xdb, 0xd3, 0xe0, 0x63, 0x27, 0x69, 0xd0, 0x1a, 0xec, 0x94, + 0xd0, 0x01, 0x6a, 0xc5, 0xdd, 0x92, 0xaa, 0x54, 0xe3, 0x84, 0x3a, 0xdd, 0x87, 0xdd, 0x12, 0x21, + 0xc2, 0x56, 0x80, 0x42, 0x4f, 0x61, 0x3c, 0xa3, 0x3f, 0xc2, 0x8b, 0x09, 0x8c, 0xb4, 0x46, 0x3e, + 0x1b, 0xef, 0x3b, 0x5b, 0xe0, 0xa5, 0x87, 0x2c, 0x7d, 0x20, 0x7c, 0xd9, 0x26, 0x77, 0xe8, 0x26, + 0xac, 0x97, 0x64, 0x05, 0xb6, 0x7d, 0x2e, 0xb2, 0x84, 0xa4, 0xe5, 0x4e, 0xbe, 0xb6, 0x5d, 0x54, + 0x42, 0x64, 0xfc, 0xff, 0x95, 0x43, 0xda, 0xcf, 0x40, 0x9f, 0xc3, 0xd3, 0xa9, 0xc8, 0xc9, 0x1d, + 0xb2, 0x64, 0x5b, 0x78, 0xd8, 0x5c, 0x42, 0x6a, 0xa3, 0x51, 0x05, 0x96, 0x81, 0x1e, 0xb9, 0x6a, + 0x1f, 0x72, 0x7b, 0xd6, 0x80, 0x89, 0x4e, 0xfe, 0xcf, 0x11, 0xa9, 0xd8, 0x2e, 0x88, 0x45, 0xd1, + 0xdb, 0x28, 0x3c, 0x23, 0x9b, 0x26, 0x1d, 0x01, 0xe4, 0x3a, 0x5d, 0x83, 0xe5, 0x92, 0xb3, 0x5c, + 0x05, 0x35, 0xfb, 0x3a, 0xc7, 0x42, 0xa1, 0x2b, 0x5b, 0x82, 0xbf, 0xce, 0x4b, 0x8b, 0x6c, 0xd2, + 0xdb, 0x40, 0x4a, 0x1c, 0x2e, 0xc2, 0x58, 0x93, 0x39, 0x7a, 0x0f, 0xee, 0x2a, 0x3b, 0x24, 0xa2, + 0xd1, 0xad, 0x46, 0xf6, 0x6d, 0xb0, 0xe7, 0x61, 0x2d, 0x54, 0xe4, 0xa5, 0xf3, 0x61, 0x06, 0x2a, + 0xbf, 0x25, 0x6f, 0xab, 0xd3, 0xe7, 0x9f, 0xb3, 0x38, 0x9a, 0x65, 0xa1, 0x1d, 0x79, 0xe1, 0xcc, + 0x6b, 0x6f, 0x48, 0xe9, 0x26, 0x6f, 0x4e, 0x7a, 0xdd, 0x6a, 0xd2, 0xef, 0xd6, 0xba, 0x67, 0xbd, + 0x74, 0x20, 0xe6, 0x43, 0xf7, 0xe2, 0x7c, 0xf0, 0xa9, 0x19, 0xfc, 0x32, 0xfb, 0xfc, 0x39, 0x7b, + 0xa5, 0xc5, 0xd8, 0xdf, 0xb3, 0x1b, 0xad, 0x4c, 0x8c, 0x9d, 0x0e, 0xaa, 0xd9, 0xd2, 0xae, 0x8e, + 0xea, 0xd5, 0x34, 0xe4, 0xe0, 0xdf, 0x1c, 0x70, 0xcc, 0x4e, 0x07, 0xc7, 0x05, 0xe0, 0xf8, 0xa8, + 0x7e, 0x9c, 0x01, 0x3e, 0xcc, 0x56, 0x32, 0x6b, 0xa3, 0xc1, 0x4e, 0x07, 0x8d, 0x46, 0x01, 0x69, + 0x34, 0x8e, 0xea, 0x8d, 0x46, 0x06, 0xfa, 0xf5, 0x6a, 0x7a, 0xba, 0x67, 0xff, 0x05, 0x00, 0x00, + 0xff, 0xff, 0x00, 0x01, 0x26, 0x00, 0x20, 0x08, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/errors/quota_error.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/errors/quota_error.pb.go new file mode 100644 index 0000000..07b26a1 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/errors/quota_error.pb.go @@ -0,0 +1,123 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/errors/quota_error.proto + +package errors // import "google.golang.org/genproto/googleapis/ads/googleads/v1/errors" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Enum describing possible quota errors. +type QuotaErrorEnum_QuotaError int32 + +const ( + // Enum unspecified. + QuotaErrorEnum_UNSPECIFIED QuotaErrorEnum_QuotaError = 0 + // The received error code is not known in this version. + QuotaErrorEnum_UNKNOWN QuotaErrorEnum_QuotaError = 1 + // Too many requests. + QuotaErrorEnum_RESOURCE_EXHAUSTED QuotaErrorEnum_QuotaError = 2 + // Access is prohibited. + QuotaErrorEnum_ACCESS_PROHIBITED QuotaErrorEnum_QuotaError = 3 + // Too many requests in a short amount of time. + QuotaErrorEnum_RESOURCE_TEMPORARILY_EXHAUSTED QuotaErrorEnum_QuotaError = 4 +) + +var QuotaErrorEnum_QuotaError_name = map[int32]string{ + 0: "UNSPECIFIED", + 1: "UNKNOWN", + 2: "RESOURCE_EXHAUSTED", + 3: "ACCESS_PROHIBITED", + 4: "RESOURCE_TEMPORARILY_EXHAUSTED", +} +var QuotaErrorEnum_QuotaError_value = map[string]int32{ + "UNSPECIFIED": 0, + "UNKNOWN": 1, + "RESOURCE_EXHAUSTED": 2, + "ACCESS_PROHIBITED": 3, + "RESOURCE_TEMPORARILY_EXHAUSTED": 4, +} + +func (x QuotaErrorEnum_QuotaError) String() string { + return proto.EnumName(QuotaErrorEnum_QuotaError_name, int32(x)) +} +func (QuotaErrorEnum_QuotaError) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_quota_error_731d878185294ca1, []int{0, 0} +} + +// Container for enum describing possible quota errors. +type QuotaErrorEnum struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *QuotaErrorEnum) Reset() { *m = QuotaErrorEnum{} } +func (m *QuotaErrorEnum) String() string { return proto.CompactTextString(m) } +func (*QuotaErrorEnum) ProtoMessage() {} +func (*QuotaErrorEnum) Descriptor() ([]byte, []int) { + return fileDescriptor_quota_error_731d878185294ca1, []int{0} +} +func (m *QuotaErrorEnum) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_QuotaErrorEnum.Unmarshal(m, b) +} +func (m *QuotaErrorEnum) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_QuotaErrorEnum.Marshal(b, m, deterministic) +} +func (dst *QuotaErrorEnum) XXX_Merge(src proto.Message) { + xxx_messageInfo_QuotaErrorEnum.Merge(dst, src) +} +func (m *QuotaErrorEnum) XXX_Size() int { + return xxx_messageInfo_QuotaErrorEnum.Size(m) +} +func (m *QuotaErrorEnum) XXX_DiscardUnknown() { + xxx_messageInfo_QuotaErrorEnum.DiscardUnknown(m) +} + +var xxx_messageInfo_QuotaErrorEnum proto.InternalMessageInfo + +func init() { + proto.RegisterType((*QuotaErrorEnum)(nil), "google.ads.googleads.v1.errors.QuotaErrorEnum") + proto.RegisterEnum("google.ads.googleads.v1.errors.QuotaErrorEnum_QuotaError", QuotaErrorEnum_QuotaError_name, QuotaErrorEnum_QuotaError_value) +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/errors/quota_error.proto", fileDescriptor_quota_error_731d878185294ca1) +} + +var fileDescriptor_quota_error_731d878185294ca1 = []byte{ + // 335 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x7c, 0x90, 0xcf, 0x4e, 0xb3, 0x40, + 0x14, 0xc5, 0x3f, 0xe8, 0x17, 0x4d, 0xa6, 0x89, 0xad, 0x93, 0xe8, 0xc2, 0x98, 0x2e, 0x78, 0x80, + 0x41, 0xe2, 0x6e, 0x5c, 0x4d, 0xe9, 0xd8, 0x12, 0x15, 0x10, 0x4a, 0xfd, 0x13, 0x92, 0x06, 0xa5, + 0x99, 0x34, 0x69, 0x67, 0x2a, 0x43, 0xbb, 0xf3, 0x19, 0x7c, 0x07, 0x97, 0x3e, 0x8a, 0x8f, 0xd2, + 0xa7, 0x30, 0xc3, 0x08, 0x75, 0xa3, 0x2b, 0x0e, 0x37, 0xbf, 0x73, 0xe6, 0x9e, 0x0b, 0xce, 0x98, + 0x10, 0x6c, 0x31, 0xb3, 0xb3, 0x5c, 0xda, 0x5a, 0x2a, 0xb5, 0x71, 0xec, 0x59, 0x51, 0x88, 0x42, + 0xda, 0x2f, 0x6b, 0x51, 0x66, 0xd3, 0xea, 0x07, 0xad, 0x0a, 0x51, 0x0a, 0xd8, 0xd3, 0x18, 0xca, + 0x72, 0x89, 0x1a, 0x07, 0xda, 0x38, 0x48, 0x3b, 0x4e, 0x4e, 0xeb, 0xc4, 0xd5, 0xdc, 0xce, 0x38, + 0x17, 0x65, 0x56, 0xce, 0x05, 0x97, 0xda, 0x6d, 0xbd, 0x19, 0xe0, 0xe0, 0x56, 0x65, 0x52, 0x45, + 0x53, 0xbe, 0x5e, 0x5a, 0xaf, 0x00, 0xec, 0x26, 0xb0, 0x03, 0xda, 0x89, 0x1f, 0x87, 0xd4, 0xf5, + 0x2e, 0x3d, 0x3a, 0xe8, 0xfe, 0x83, 0x6d, 0xb0, 0x9f, 0xf8, 0x57, 0x7e, 0x70, 0xe7, 0x77, 0x0d, + 0x78, 0x0c, 0x60, 0x44, 0xe3, 0x20, 0x89, 0x5c, 0x3a, 0xa5, 0xf7, 0x23, 0x92, 0xc4, 0x63, 0x3a, + 0xe8, 0x9a, 0xf0, 0x08, 0x1c, 0x12, 0xd7, 0xa5, 0x71, 0x3c, 0x0d, 0xa3, 0x60, 0xe4, 0xf5, 0x3d, + 0x35, 0x6e, 0x41, 0x0b, 0xf4, 0x1a, 0x7c, 0x4c, 0x6f, 0xc2, 0x20, 0x22, 0x91, 0x77, 0xfd, 0xf0, + 0xc3, 0xfa, 0xbf, 0xbf, 0x35, 0x80, 0xf5, 0x2c, 0x96, 0xe8, 0xef, 0x5a, 0xfd, 0xce, 0x6e, 0xc7, + 0x50, 0x35, 0x09, 0x8d, 0xc7, 0xc1, 0xb7, 0x85, 0x89, 0x45, 0xc6, 0x19, 0x12, 0x05, 0xb3, 0xd9, + 0x8c, 0x57, 0x3d, 0xeb, 0x5b, 0xae, 0xe6, 0xf2, 0xb7, 0xd3, 0x5e, 0xe8, 0xcf, 0xbb, 0xd9, 0x1a, + 0x12, 0xf2, 0x61, 0xf6, 0x86, 0x3a, 0x8c, 0xe4, 0x12, 0x69, 0xa9, 0xd4, 0xc4, 0x41, 0xd5, 0x93, + 0xf2, 0xb3, 0x06, 0x52, 0x92, 0xcb, 0xb4, 0x01, 0xd2, 0x89, 0x93, 0x6a, 0x60, 0x6b, 0x5a, 0x7a, + 0x8a, 0x31, 0xc9, 0x25, 0xc6, 0x0d, 0x82, 0xf1, 0xc4, 0xc1, 0x58, 0x43, 0x4f, 0x7b, 0xd5, 0x76, + 0xe7, 0x5f, 0x01, 0x00, 0x00, 0xff, 0xff, 0x4f, 0x72, 0x66, 0x2a, 0xf7, 0x01, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/errors/range_error.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/errors/range_error.pb.go new file mode 100644 index 0000000..6322ff8 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/errors/range_error.pb.go @@ -0,0 +1,117 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/errors/range_error.proto + +package errors // import "google.golang.org/genproto/googleapis/ads/googleads/v1/errors" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Enum describing possible range errors. +type RangeErrorEnum_RangeError int32 + +const ( + // Enum unspecified. + RangeErrorEnum_UNSPECIFIED RangeErrorEnum_RangeError = 0 + // The received error code is not known in this version. + RangeErrorEnum_UNKNOWN RangeErrorEnum_RangeError = 1 + // Too low. + RangeErrorEnum_TOO_LOW RangeErrorEnum_RangeError = 2 + // Too high. + RangeErrorEnum_TOO_HIGH RangeErrorEnum_RangeError = 3 +) + +var RangeErrorEnum_RangeError_name = map[int32]string{ + 0: "UNSPECIFIED", + 1: "UNKNOWN", + 2: "TOO_LOW", + 3: "TOO_HIGH", +} +var RangeErrorEnum_RangeError_value = map[string]int32{ + "UNSPECIFIED": 0, + "UNKNOWN": 1, + "TOO_LOW": 2, + "TOO_HIGH": 3, +} + +func (x RangeErrorEnum_RangeError) String() string { + return proto.EnumName(RangeErrorEnum_RangeError_name, int32(x)) +} +func (RangeErrorEnum_RangeError) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_range_error_706dbda1b4701d63, []int{0, 0} +} + +// Container for enum describing possible range errors. +type RangeErrorEnum struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *RangeErrorEnum) Reset() { *m = RangeErrorEnum{} } +func (m *RangeErrorEnum) String() string { return proto.CompactTextString(m) } +func (*RangeErrorEnum) ProtoMessage() {} +func (*RangeErrorEnum) Descriptor() ([]byte, []int) { + return fileDescriptor_range_error_706dbda1b4701d63, []int{0} +} +func (m *RangeErrorEnum) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_RangeErrorEnum.Unmarshal(m, b) +} +func (m *RangeErrorEnum) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_RangeErrorEnum.Marshal(b, m, deterministic) +} +func (dst *RangeErrorEnum) XXX_Merge(src proto.Message) { + xxx_messageInfo_RangeErrorEnum.Merge(dst, src) +} +func (m *RangeErrorEnum) XXX_Size() int { + return xxx_messageInfo_RangeErrorEnum.Size(m) +} +func (m *RangeErrorEnum) XXX_DiscardUnknown() { + xxx_messageInfo_RangeErrorEnum.DiscardUnknown(m) +} + +var xxx_messageInfo_RangeErrorEnum proto.InternalMessageInfo + +func init() { + proto.RegisterType((*RangeErrorEnum)(nil), "google.ads.googleads.v1.errors.RangeErrorEnum") + proto.RegisterEnum("google.ads.googleads.v1.errors.RangeErrorEnum_RangeError", RangeErrorEnum_RangeError_name, RangeErrorEnum_RangeError_value) +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/errors/range_error.proto", fileDescriptor_range_error_706dbda1b4701d63) +} + +var fileDescriptor_range_error_706dbda1b4701d63 = []byte{ + // 292 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x7c, 0x90, 0xc1, 0x4a, 0xf3, 0x40, + 0x14, 0x85, 0xff, 0xa4, 0xf0, 0x2b, 0x53, 0xb1, 0x21, 0x4b, 0x91, 0x2e, 0xf2, 0x00, 0x33, 0x06, + 0x77, 0xe3, 0x2a, 0xb5, 0x31, 0x0d, 0x4a, 0x12, 0xd4, 0x26, 0x20, 0x81, 0x32, 0x9a, 0x30, 0x04, + 0xda, 0x99, 0x30, 0x13, 0xfb, 0x40, 0x2e, 0x7d, 0x14, 0x1f, 0xa5, 0x4f, 0x21, 0x93, 0x6b, 0xd2, + 0x95, 0xae, 0xe6, 0xdc, 0xe1, 0x3b, 0xe7, 0x1e, 0x2e, 0xba, 0xe2, 0x52, 0xf2, 0x6d, 0x4d, 0x58, + 0xa5, 0x09, 0x48, 0xa3, 0xf6, 0x3e, 0xa9, 0x95, 0x92, 0x4a, 0x13, 0xc5, 0x04, 0xaf, 0x37, 0xfd, + 0x80, 0x5b, 0x25, 0x3b, 0xe9, 0xce, 0x01, 0xc3, 0xac, 0xd2, 0x78, 0x74, 0xe0, 0xbd, 0x8f, 0xc1, + 0x71, 0x71, 0x39, 0x24, 0xb6, 0x0d, 0x61, 0x42, 0xc8, 0x8e, 0x75, 0x8d, 0x14, 0x1a, 0xdc, 0x5e, + 0x81, 0xce, 0x1f, 0x4d, 0x64, 0x68, 0xe0, 0x50, 0xbc, 0xef, 0xbc, 0x10, 0xa1, 0xe3, 0x8f, 0x3b, + 0x43, 0xd3, 0x75, 0xf2, 0x94, 0x85, 0xb7, 0xf1, 0x5d, 0x1c, 0x2e, 0x9d, 0x7f, 0xee, 0x14, 0x9d, + 0xac, 0x93, 0xfb, 0x24, 0x2d, 0x12, 0xc7, 0x32, 0xc3, 0x73, 0x9a, 0x6e, 0x1e, 0xd2, 0xc2, 0xb1, + 0xdd, 0x33, 0x74, 0x6a, 0x86, 0x55, 0x1c, 0xad, 0x9c, 0xc9, 0xe2, 0x60, 0x21, 0xef, 0x4d, 0xee, + 0xf0, 0xdf, 0xed, 0x16, 0xb3, 0xe3, 0xae, 0xcc, 0x14, 0xca, 0xac, 0x97, 0xe5, 0x8f, 0x85, 0xcb, + 0x2d, 0x13, 0x1c, 0x4b, 0xc5, 0x09, 0xaf, 0x45, 0x5f, 0x77, 0x38, 0x49, 0xdb, 0xe8, 0xdf, 0x2e, + 0x74, 0x03, 0xcf, 0x87, 0x3d, 0x89, 0x82, 0xe0, 0xd3, 0x9e, 0x47, 0x10, 0x16, 0x54, 0x1a, 0x83, + 0x34, 0x2a, 0xf7, 0x71, 0xbf, 0x52, 0x7f, 0x0d, 0x40, 0x19, 0x54, 0xba, 0x1c, 0x81, 0x32, 0xf7, + 0x4b, 0x00, 0x0e, 0xb6, 0x07, 0xbf, 0x94, 0x06, 0x95, 0xa6, 0x74, 0x44, 0x28, 0xcd, 0x7d, 0x4a, + 0x01, 0x7a, 0xfd, 0xdf, 0xb7, 0xbb, 0xfe, 0x0e, 0x00, 0x00, 0xff, 0xff, 0x05, 0xb6, 0x0f, 0x2d, + 0xbe, 0x01, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/errors/recommendation_error.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/errors/recommendation_error.pb.go new file mode 100644 index 0000000..b815b49 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/errors/recommendation_error.pb.go @@ -0,0 +1,183 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/errors/recommendation_error.proto + +package errors // import "google.golang.org/genproto/googleapis/ads/googleads/v1/errors" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Enum describing possible errors from applying a recommendation. +type RecommendationErrorEnum_RecommendationError int32 + +const ( + // Enum unspecified. + RecommendationErrorEnum_UNSPECIFIED RecommendationErrorEnum_RecommendationError = 0 + // The received error code is not known in this version. + RecommendationErrorEnum_UNKNOWN RecommendationErrorEnum_RecommendationError = 1 + // The specified budget amount is too low e.g. lower than minimum currency + // unit or lower than ad group minimum cost-per-click. + RecommendationErrorEnum_BUDGET_AMOUNT_TOO_SMALL RecommendationErrorEnum_RecommendationError = 2 + // The specified budget amount is too large. + RecommendationErrorEnum_BUDGET_AMOUNT_TOO_LARGE RecommendationErrorEnum_RecommendationError = 3 + // The specified budget amount is not a valid amount. e.g. not a multiple + // of minimum currency unit. + RecommendationErrorEnum_INVALID_BUDGET_AMOUNT RecommendationErrorEnum_RecommendationError = 4 + // The specified keyword or ad violates ad policy. + RecommendationErrorEnum_POLICY_ERROR RecommendationErrorEnum_RecommendationError = 5 + // The specified bid amount is not valid. e.g. too many fractional digits, + // or negative amount. + RecommendationErrorEnum_INVALID_BID_AMOUNT RecommendationErrorEnum_RecommendationError = 6 + // The number of keywords in ad group have reached the maximum allowed. + RecommendationErrorEnum_ADGROUP_KEYWORD_LIMIT RecommendationErrorEnum_RecommendationError = 7 + // The recommendation requested to apply has already been applied. + RecommendationErrorEnum_RECOMMENDATION_ALREADY_APPLIED RecommendationErrorEnum_RecommendationError = 8 + // The recommendation requested to apply has been invalidated. + RecommendationErrorEnum_RECOMMENDATION_INVALIDATED RecommendationErrorEnum_RecommendationError = 9 + // The number of operations in a single request exceeds the maximum allowed. + RecommendationErrorEnum_TOO_MANY_OPERATIONS RecommendationErrorEnum_RecommendationError = 10 + // There are no operations in the request. + RecommendationErrorEnum_NO_OPERATIONS RecommendationErrorEnum_RecommendationError = 11 + // Operations with multiple recommendation types are not supported when + // partial failure mode is not enabled. + RecommendationErrorEnum_DIFFERENT_TYPES_NOT_SUPPORTED RecommendationErrorEnum_RecommendationError = 12 + // Request contains multiple operations with the same resource_name. + RecommendationErrorEnum_DUPLICATE_RESOURCE_NAME RecommendationErrorEnum_RecommendationError = 13 + // The recommendation requested to dismiss has already been dismissed. + RecommendationErrorEnum_RECOMMENDATION_ALREADY_DISMISSED RecommendationErrorEnum_RecommendationError = 14 + // The recommendation apply request was malformed and invalid. + RecommendationErrorEnum_INVALID_APPLY_REQUEST RecommendationErrorEnum_RecommendationError = 15 +) + +var RecommendationErrorEnum_RecommendationError_name = map[int32]string{ + 0: "UNSPECIFIED", + 1: "UNKNOWN", + 2: "BUDGET_AMOUNT_TOO_SMALL", + 3: "BUDGET_AMOUNT_TOO_LARGE", + 4: "INVALID_BUDGET_AMOUNT", + 5: "POLICY_ERROR", + 6: "INVALID_BID_AMOUNT", + 7: "ADGROUP_KEYWORD_LIMIT", + 8: "RECOMMENDATION_ALREADY_APPLIED", + 9: "RECOMMENDATION_INVALIDATED", + 10: "TOO_MANY_OPERATIONS", + 11: "NO_OPERATIONS", + 12: "DIFFERENT_TYPES_NOT_SUPPORTED", + 13: "DUPLICATE_RESOURCE_NAME", + 14: "RECOMMENDATION_ALREADY_DISMISSED", + 15: "INVALID_APPLY_REQUEST", +} +var RecommendationErrorEnum_RecommendationError_value = map[string]int32{ + "UNSPECIFIED": 0, + "UNKNOWN": 1, + "BUDGET_AMOUNT_TOO_SMALL": 2, + "BUDGET_AMOUNT_TOO_LARGE": 3, + "INVALID_BUDGET_AMOUNT": 4, + "POLICY_ERROR": 5, + "INVALID_BID_AMOUNT": 6, + "ADGROUP_KEYWORD_LIMIT": 7, + "RECOMMENDATION_ALREADY_APPLIED": 8, + "RECOMMENDATION_INVALIDATED": 9, + "TOO_MANY_OPERATIONS": 10, + "NO_OPERATIONS": 11, + "DIFFERENT_TYPES_NOT_SUPPORTED": 12, + "DUPLICATE_RESOURCE_NAME": 13, + "RECOMMENDATION_ALREADY_DISMISSED": 14, + "INVALID_APPLY_REQUEST": 15, +} + +func (x RecommendationErrorEnum_RecommendationError) String() string { + return proto.EnumName(RecommendationErrorEnum_RecommendationError_name, int32(x)) +} +func (RecommendationErrorEnum_RecommendationError) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_recommendation_error_7a76f155b17969f8, []int{0, 0} +} + +// Container for enum describing possible errors from applying a recommendation. +type RecommendationErrorEnum struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *RecommendationErrorEnum) Reset() { *m = RecommendationErrorEnum{} } +func (m *RecommendationErrorEnum) String() string { return proto.CompactTextString(m) } +func (*RecommendationErrorEnum) ProtoMessage() {} +func (*RecommendationErrorEnum) Descriptor() ([]byte, []int) { + return fileDescriptor_recommendation_error_7a76f155b17969f8, []int{0} +} +func (m *RecommendationErrorEnum) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_RecommendationErrorEnum.Unmarshal(m, b) +} +func (m *RecommendationErrorEnum) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_RecommendationErrorEnum.Marshal(b, m, deterministic) +} +func (dst *RecommendationErrorEnum) XXX_Merge(src proto.Message) { + xxx_messageInfo_RecommendationErrorEnum.Merge(dst, src) +} +func (m *RecommendationErrorEnum) XXX_Size() int { + return xxx_messageInfo_RecommendationErrorEnum.Size(m) +} +func (m *RecommendationErrorEnum) XXX_DiscardUnknown() { + xxx_messageInfo_RecommendationErrorEnum.DiscardUnknown(m) +} + +var xxx_messageInfo_RecommendationErrorEnum proto.InternalMessageInfo + +func init() { + proto.RegisterType((*RecommendationErrorEnum)(nil), "google.ads.googleads.v1.errors.RecommendationErrorEnum") + proto.RegisterEnum("google.ads.googleads.v1.errors.RecommendationErrorEnum_RecommendationError", RecommendationErrorEnum_RecommendationError_name, RecommendationErrorEnum_RecommendationError_value) +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/errors/recommendation_error.proto", fileDescriptor_recommendation_error_7a76f155b17969f8) +} + +var fileDescriptor_recommendation_error_7a76f155b17969f8 = []byte{ + // 515 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x7c, 0x92, 0xd1, 0x6e, 0xd3, 0x30, + 0x14, 0x86, 0x59, 0x0b, 0x1b, 0x78, 0x1b, 0x33, 0x9e, 0x60, 0x30, 0xa0, 0x82, 0x8a, 0xeb, 0x44, + 0x15, 0x57, 0x84, 0x2b, 0xb7, 0x3e, 0xad, 0xac, 0x25, 0xb6, 0x71, 0x92, 0x4e, 0x41, 0x95, 0xac, + 0xb2, 0x54, 0x51, 0xa5, 0x35, 0xa9, 0x92, 0xb2, 0x07, 0xe2, 0x92, 0x87, 0xe0, 0x01, 0x78, 0x04, + 0x1e, 0x81, 0x5b, 0x5e, 0x00, 0x39, 0x59, 0xab, 0x0d, 0x75, 0x5c, 0xe5, 0xe8, 0xf8, 0xfb, 0x7f, + 0x9f, 0xe3, 0xfc, 0xe8, 0x43, 0x56, 0x14, 0xd9, 0xe5, 0xcc, 0x9d, 0xa6, 0x95, 0xdb, 0x94, 0xb6, + 0xba, 0xea, 0xb9, 0xb3, 0xb2, 0x2c, 0xca, 0xca, 0x2d, 0x67, 0x17, 0xc5, 0x62, 0x31, 0xcb, 0xd3, + 0xe9, 0x6a, 0x5e, 0xe4, 0xa6, 0xee, 0x3a, 0xcb, 0xb2, 0x58, 0x15, 0xa4, 0xd3, 0xf0, 0xce, 0x34, + 0xad, 0x9c, 0x8d, 0xd4, 0xb9, 0xea, 0x39, 0x8d, 0xf4, 0xf4, 0xd5, 0xda, 0x7a, 0x39, 0x77, 0xa7, + 0x79, 0x5e, 0xac, 0x6a, 0x8b, 0xaa, 0x51, 0x77, 0x7f, 0xb5, 0xd1, 0x89, 0xbe, 0x65, 0x0e, 0x56, + 0x06, 0xf9, 0xd7, 0x45, 0xf7, 0x47, 0x1b, 0x1d, 0x6f, 0x39, 0x23, 0x47, 0x68, 0x3f, 0x16, 0xa1, + 0x82, 0x01, 0x1f, 0x72, 0x60, 0xf8, 0x1e, 0xd9, 0x47, 0x7b, 0xb1, 0x38, 0x13, 0xf2, 0x5c, 0xe0, + 0x1d, 0xf2, 0x12, 0x9d, 0xf4, 0x63, 0x36, 0x82, 0xc8, 0xd0, 0x40, 0xc6, 0x22, 0x32, 0x91, 0x94, + 0x26, 0x0c, 0xa8, 0xef, 0xe3, 0xd6, 0xf6, 0x43, 0x9f, 0xea, 0x11, 0xe0, 0x36, 0x79, 0x81, 0x9e, + 0x72, 0x31, 0xa6, 0x3e, 0x67, 0xe6, 0x16, 0x84, 0xef, 0x13, 0x8c, 0x0e, 0x94, 0xf4, 0xf9, 0x20, + 0x31, 0xa0, 0xb5, 0xd4, 0xf8, 0x01, 0x79, 0x86, 0xc8, 0x06, 0xe6, 0x6c, 0x4d, 0xee, 0x5a, 0x13, + 0xca, 0x46, 0x5a, 0xc6, 0xca, 0x9c, 0x41, 0x72, 0x2e, 0x35, 0x33, 0x3e, 0x0f, 0x78, 0x84, 0xf7, + 0x48, 0x17, 0x75, 0x34, 0x0c, 0x64, 0x10, 0x80, 0x60, 0x34, 0xe2, 0x52, 0x18, 0xea, 0x6b, 0xa0, + 0x2c, 0x31, 0x54, 0x29, 0xdf, 0xae, 0xf2, 0x90, 0x74, 0xd0, 0xe9, 0x3f, 0xcc, 0xf5, 0x2d, 0x34, + 0x02, 0x86, 0x1f, 0x91, 0x13, 0x74, 0x6c, 0x47, 0x0e, 0xa8, 0x48, 0x8c, 0x54, 0xa0, 0x6b, 0x26, + 0xc4, 0x88, 0x3c, 0x41, 0x87, 0x42, 0xde, 0x6c, 0xed, 0x93, 0xb7, 0xe8, 0x35, 0xe3, 0xc3, 0x21, + 0x68, 0xb0, 0x8b, 0x26, 0x0a, 0x42, 0x23, 0x64, 0x64, 0xc2, 0x58, 0x29, 0xa9, 0xad, 0xdd, 0x81, + 0x7d, 0x0f, 0x16, 0x2b, 0x9f, 0x0f, 0x68, 0x04, 0x46, 0x43, 0x28, 0x63, 0x3d, 0x00, 0x23, 0x68, + 0x00, 0xf8, 0x90, 0xbc, 0x43, 0x6f, 0xee, 0x98, 0x97, 0xf1, 0x30, 0xe0, 0x61, 0x08, 0x0c, 0x3f, + 0xbe, 0xf9, 0x6a, 0x76, 0x8d, 0xc4, 0x68, 0xf8, 0x14, 0x43, 0x18, 0xe1, 0xa3, 0xfe, 0x9f, 0x1d, + 0xd4, 0xbd, 0x28, 0x16, 0xce, 0xff, 0x13, 0xd2, 0x7f, 0xbe, 0xe5, 0x27, 0x2b, 0x9b, 0x0e, 0xb5, + 0xf3, 0x99, 0x5d, 0x6b, 0xb3, 0xe2, 0x72, 0x9a, 0x67, 0x4e, 0x51, 0x66, 0x6e, 0x36, 0xcb, 0xeb, + 0xec, 0xac, 0x83, 0xba, 0x9c, 0x57, 0x77, 0xe5, 0xf6, 0x63, 0xf3, 0xf9, 0xd6, 0x6a, 0x8f, 0x28, + 0xfd, 0xde, 0xea, 0x8c, 0x1a, 0x33, 0x9a, 0x56, 0x4e, 0x53, 0xda, 0x6a, 0xdc, 0x73, 0xea, 0x2b, + 0xab, 0x9f, 0x6b, 0x60, 0x42, 0xd3, 0x6a, 0xb2, 0x01, 0x26, 0xe3, 0xde, 0xa4, 0x01, 0x7e, 0xb7, + 0xba, 0x4d, 0xd7, 0xf3, 0x68, 0x5a, 0x79, 0xde, 0x06, 0xf1, 0xbc, 0x71, 0xcf, 0xf3, 0x1a, 0xe8, + 0xcb, 0x6e, 0x3d, 0xdd, 0xfb, 0xbf, 0x01, 0x00, 0x00, 0xff, 0xff, 0x4f, 0xb2, 0x52, 0x7c, 0x54, + 0x03, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/errors/region_code_error.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/errors/region_code_error.pb.go new file mode 100644 index 0000000..8934d24 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/errors/region_code_error.pb.go @@ -0,0 +1,113 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/errors/region_code_error.proto + +package errors // import "google.golang.org/genproto/googleapis/ads/googleads/v1/errors" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Enum describing possible region code errors. +type RegionCodeErrorEnum_RegionCodeError int32 + +const ( + // Enum unspecified. + RegionCodeErrorEnum_UNSPECIFIED RegionCodeErrorEnum_RegionCodeError = 0 + // The received error code is not known in this version. + RegionCodeErrorEnum_UNKNOWN RegionCodeErrorEnum_RegionCodeError = 1 + // Invalid region code. + RegionCodeErrorEnum_INVALID_REGION_CODE RegionCodeErrorEnum_RegionCodeError = 2 +) + +var RegionCodeErrorEnum_RegionCodeError_name = map[int32]string{ + 0: "UNSPECIFIED", + 1: "UNKNOWN", + 2: "INVALID_REGION_CODE", +} +var RegionCodeErrorEnum_RegionCodeError_value = map[string]int32{ + "UNSPECIFIED": 0, + "UNKNOWN": 1, + "INVALID_REGION_CODE": 2, +} + +func (x RegionCodeErrorEnum_RegionCodeError) String() string { + return proto.EnumName(RegionCodeErrorEnum_RegionCodeError_name, int32(x)) +} +func (RegionCodeErrorEnum_RegionCodeError) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_region_code_error_3ef96732f9283c65, []int{0, 0} +} + +// Container for enum describing possible region code errors. +type RegionCodeErrorEnum struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *RegionCodeErrorEnum) Reset() { *m = RegionCodeErrorEnum{} } +func (m *RegionCodeErrorEnum) String() string { return proto.CompactTextString(m) } +func (*RegionCodeErrorEnum) ProtoMessage() {} +func (*RegionCodeErrorEnum) Descriptor() ([]byte, []int) { + return fileDescriptor_region_code_error_3ef96732f9283c65, []int{0} +} +func (m *RegionCodeErrorEnum) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_RegionCodeErrorEnum.Unmarshal(m, b) +} +func (m *RegionCodeErrorEnum) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_RegionCodeErrorEnum.Marshal(b, m, deterministic) +} +func (dst *RegionCodeErrorEnum) XXX_Merge(src proto.Message) { + xxx_messageInfo_RegionCodeErrorEnum.Merge(dst, src) +} +func (m *RegionCodeErrorEnum) XXX_Size() int { + return xxx_messageInfo_RegionCodeErrorEnum.Size(m) +} +func (m *RegionCodeErrorEnum) XXX_DiscardUnknown() { + xxx_messageInfo_RegionCodeErrorEnum.DiscardUnknown(m) +} + +var xxx_messageInfo_RegionCodeErrorEnum proto.InternalMessageInfo + +func init() { + proto.RegisterType((*RegionCodeErrorEnum)(nil), "google.ads.googleads.v1.errors.RegionCodeErrorEnum") + proto.RegisterEnum("google.ads.googleads.v1.errors.RegionCodeErrorEnum_RegionCodeError", RegionCodeErrorEnum_RegionCodeError_name, RegionCodeErrorEnum_RegionCodeError_value) +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/errors/region_code_error.proto", fileDescriptor_region_code_error_3ef96732f9283c65) +} + +var fileDescriptor_region_code_error_3ef96732f9283c65 = []byte{ + // 304 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x7c, 0x90, 0xd1, 0x4a, 0xf3, 0x30, + 0x1c, 0xc5, 0xbf, 0xf5, 0x03, 0x85, 0xec, 0x62, 0xa3, 0x13, 0x04, 0x91, 0x5d, 0xf4, 0x01, 0x12, + 0x8a, 0xe0, 0x45, 0xbc, 0xca, 0xd6, 0x38, 0x8b, 0x92, 0x8d, 0xc9, 0x2a, 0x48, 0xa1, 0xd4, 0x25, + 0x84, 0xc2, 0x96, 0xff, 0x48, 0xe6, 0x1e, 0xc8, 0x4b, 0x1f, 0xc5, 0x47, 0xf1, 0xc6, 0x57, 0x90, + 0x36, 0xb6, 0x17, 0x03, 0xbd, 0xca, 0xe1, 0xf0, 0x3b, 0x27, 0x87, 0x3f, 0xba, 0xd6, 0x00, 0x7a, + 0xa3, 0x48, 0x29, 0x1d, 0xf1, 0xb2, 0x56, 0x87, 0x98, 0x28, 0x6b, 0xc1, 0x3a, 0x62, 0x95, 0xae, + 0xc0, 0x14, 0x6b, 0x90, 0xaa, 0x68, 0x2c, 0xbc, 0xb3, 0xb0, 0x87, 0x70, 0xec, 0x61, 0x5c, 0x4a, + 0x87, 0xbb, 0x1c, 0x3e, 0xc4, 0xd8, 0xe7, 0x2e, 0x2e, 0xdb, 0xde, 0x5d, 0x45, 0x4a, 0x63, 0x60, + 0x5f, 0xee, 0x2b, 0x30, 0xce, 0xa7, 0xa3, 0x02, 0x8d, 0x96, 0x4d, 0xf1, 0x14, 0xa4, 0xe2, 0x75, + 0x82, 0x9b, 0xd7, 0x6d, 0x74, 0x87, 0x06, 0x47, 0x76, 0x38, 0x40, 0xfd, 0x95, 0x78, 0x5c, 0xf0, + 0x69, 0x7a, 0x9b, 0xf2, 0x64, 0xf8, 0x2f, 0xec, 0xa3, 0xd3, 0x95, 0xb8, 0x17, 0xf3, 0x27, 0x31, + 0xec, 0x85, 0xe7, 0x68, 0x94, 0x8a, 0x8c, 0x3d, 0xa4, 0x49, 0xb1, 0xe4, 0xb3, 0x74, 0x2e, 0x8a, + 0xe9, 0x3c, 0xe1, 0xc3, 0x60, 0xf2, 0xd5, 0x43, 0xd1, 0x1a, 0xb6, 0xf8, 0xef, 0x95, 0x93, 0xb3, + 0xa3, 0xef, 0x16, 0xf5, 0xba, 0x45, 0xef, 0x39, 0xf9, 0xc9, 0x69, 0xd8, 0x94, 0x46, 0x63, 0xb0, + 0x9a, 0x68, 0x65, 0x9a, 0xed, 0xed, 0x95, 0x76, 0x95, 0xfb, 0xed, 0x68, 0x37, 0xfe, 0x79, 0x0b, + 0xfe, 0xcf, 0x18, 0x7b, 0x0f, 0xc6, 0x33, 0x5f, 0xc6, 0xa4, 0xc3, 0x5e, 0xd6, 0x2a, 0x8b, 0x71, + 0xf3, 0xa5, 0xfb, 0x68, 0x81, 0x9c, 0x49, 0x97, 0x77, 0x40, 0x9e, 0xc5, 0xb9, 0x07, 0x3e, 0x83, + 0xc8, 0xbb, 0x94, 0x32, 0xe9, 0x28, 0xed, 0x10, 0x4a, 0xb3, 0x98, 0x52, 0x0f, 0xbd, 0x9c, 0x34, + 0xeb, 0xae, 0xbe, 0x03, 0x00, 0x00, 0xff, 0xff, 0x9a, 0x2b, 0x1e, 0xbc, 0xd1, 0x01, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/errors/request_error.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/errors/request_error.pb.go new file mode 100644 index 0000000..c94ecd8 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/errors/request_error.pb.go @@ -0,0 +1,201 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/errors/request_error.proto + +package errors // import "google.golang.org/genproto/googleapis/ads/googleads/v1/errors" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Enum describing possible request errors. +type RequestErrorEnum_RequestError int32 + +const ( + // Enum unspecified. + RequestErrorEnum_UNSPECIFIED RequestErrorEnum_RequestError = 0 + // The received error code is not known in this version. + RequestErrorEnum_UNKNOWN RequestErrorEnum_RequestError = 1 + // Resource name is required for this request. + RequestErrorEnum_RESOURCE_NAME_MISSING RequestErrorEnum_RequestError = 3 + // Resource name provided is malformed. + RequestErrorEnum_RESOURCE_NAME_MALFORMED RequestErrorEnum_RequestError = 4 + // Resource name provided is malformed. + RequestErrorEnum_BAD_RESOURCE_ID RequestErrorEnum_RequestError = 17 + // Customer ID is invalid. + RequestErrorEnum_INVALID_CUSTOMER_ID RequestErrorEnum_RequestError = 16 + // Mutate operation should have either create, update, or remove specified. + RequestErrorEnum_OPERATION_REQUIRED RequestErrorEnum_RequestError = 5 + // Requested resource not found. + RequestErrorEnum_RESOURCE_NOT_FOUND RequestErrorEnum_RequestError = 6 + // Next page token specified in user request is invalid. + RequestErrorEnum_INVALID_PAGE_TOKEN RequestErrorEnum_RequestError = 7 + // Next page token specified in user request has expired. + RequestErrorEnum_EXPIRED_PAGE_TOKEN RequestErrorEnum_RequestError = 8 + // Page size specified in user request is invalid. + RequestErrorEnum_INVALID_PAGE_SIZE RequestErrorEnum_RequestError = 22 + // Required field is missing. + RequestErrorEnum_REQUIRED_FIELD_MISSING RequestErrorEnum_RequestError = 9 + // The field cannot be modified because it's immutable. It's also possible + // that the field can be modified using 'create' operation but not 'update'. + RequestErrorEnum_IMMUTABLE_FIELD RequestErrorEnum_RequestError = 11 + // Received too many entries in request. + RequestErrorEnum_TOO_MANY_MUTATE_OPERATIONS RequestErrorEnum_RequestError = 13 + // Request cannot be executed by a manager account. + RequestErrorEnum_CANNOT_BE_EXECUTED_BY_MANAGER_ACCOUNT RequestErrorEnum_RequestError = 14 + // Mutate request was attempting to modify a readonly field. + // For instance, Budget fields can be requested for Ad Group, + // but are read-only for adGroups:mutate. + RequestErrorEnum_CANNOT_MODIFY_FOREIGN_FIELD RequestErrorEnum_RequestError = 15 + // Enum value is not permitted. + RequestErrorEnum_INVALID_ENUM_VALUE RequestErrorEnum_RequestError = 18 + // The developer-token parameter is required for all requests. + RequestErrorEnum_DEVELOPER_TOKEN_PARAMETER_MISSING RequestErrorEnum_RequestError = 19 + // The login-customer-id parameter is required for this request. + RequestErrorEnum_LOGIN_CUSTOMER_ID_PARAMETER_MISSING RequestErrorEnum_RequestError = 20 + // page_token is set in the validate only request + RequestErrorEnum_VALIDATE_ONLY_REQUEST_HAS_PAGE_TOKEN RequestErrorEnum_RequestError = 21 +) + +var RequestErrorEnum_RequestError_name = map[int32]string{ + 0: "UNSPECIFIED", + 1: "UNKNOWN", + 3: "RESOURCE_NAME_MISSING", + 4: "RESOURCE_NAME_MALFORMED", + 17: "BAD_RESOURCE_ID", + 16: "INVALID_CUSTOMER_ID", + 5: "OPERATION_REQUIRED", + 6: "RESOURCE_NOT_FOUND", + 7: "INVALID_PAGE_TOKEN", + 8: "EXPIRED_PAGE_TOKEN", + 22: "INVALID_PAGE_SIZE", + 9: "REQUIRED_FIELD_MISSING", + 11: "IMMUTABLE_FIELD", + 13: "TOO_MANY_MUTATE_OPERATIONS", + 14: "CANNOT_BE_EXECUTED_BY_MANAGER_ACCOUNT", + 15: "CANNOT_MODIFY_FOREIGN_FIELD", + 18: "INVALID_ENUM_VALUE", + 19: "DEVELOPER_TOKEN_PARAMETER_MISSING", + 20: "LOGIN_CUSTOMER_ID_PARAMETER_MISSING", + 21: "VALIDATE_ONLY_REQUEST_HAS_PAGE_TOKEN", +} +var RequestErrorEnum_RequestError_value = map[string]int32{ + "UNSPECIFIED": 0, + "UNKNOWN": 1, + "RESOURCE_NAME_MISSING": 3, + "RESOURCE_NAME_MALFORMED": 4, + "BAD_RESOURCE_ID": 17, + "INVALID_CUSTOMER_ID": 16, + "OPERATION_REQUIRED": 5, + "RESOURCE_NOT_FOUND": 6, + "INVALID_PAGE_TOKEN": 7, + "EXPIRED_PAGE_TOKEN": 8, + "INVALID_PAGE_SIZE": 22, + "REQUIRED_FIELD_MISSING": 9, + "IMMUTABLE_FIELD": 11, + "TOO_MANY_MUTATE_OPERATIONS": 13, + "CANNOT_BE_EXECUTED_BY_MANAGER_ACCOUNT": 14, + "CANNOT_MODIFY_FOREIGN_FIELD": 15, + "INVALID_ENUM_VALUE": 18, + "DEVELOPER_TOKEN_PARAMETER_MISSING": 19, + "LOGIN_CUSTOMER_ID_PARAMETER_MISSING": 20, + "VALIDATE_ONLY_REQUEST_HAS_PAGE_TOKEN": 21, +} + +func (x RequestErrorEnum_RequestError) String() string { + return proto.EnumName(RequestErrorEnum_RequestError_name, int32(x)) +} +func (RequestErrorEnum_RequestError) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_request_error_898c7e116ba56a69, []int{0, 0} +} + +// Container for enum describing possible request errors. +type RequestErrorEnum struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *RequestErrorEnum) Reset() { *m = RequestErrorEnum{} } +func (m *RequestErrorEnum) String() string { return proto.CompactTextString(m) } +func (*RequestErrorEnum) ProtoMessage() {} +func (*RequestErrorEnum) Descriptor() ([]byte, []int) { + return fileDescriptor_request_error_898c7e116ba56a69, []int{0} +} +func (m *RequestErrorEnum) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_RequestErrorEnum.Unmarshal(m, b) +} +func (m *RequestErrorEnum) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_RequestErrorEnum.Marshal(b, m, deterministic) +} +func (dst *RequestErrorEnum) XXX_Merge(src proto.Message) { + xxx_messageInfo_RequestErrorEnum.Merge(dst, src) +} +func (m *RequestErrorEnum) XXX_Size() int { + return xxx_messageInfo_RequestErrorEnum.Size(m) +} +func (m *RequestErrorEnum) XXX_DiscardUnknown() { + xxx_messageInfo_RequestErrorEnum.DiscardUnknown(m) +} + +var xxx_messageInfo_RequestErrorEnum proto.InternalMessageInfo + +func init() { + proto.RegisterType((*RequestErrorEnum)(nil), "google.ads.googleads.v1.errors.RequestErrorEnum") + proto.RegisterEnum("google.ads.googleads.v1.errors.RequestErrorEnum_RequestError", RequestErrorEnum_RequestError_name, RequestErrorEnum_RequestError_value) +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/errors/request_error.proto", fileDescriptor_request_error_898c7e116ba56a69) +} + +var fileDescriptor_request_error_898c7e116ba56a69 = []byte{ + // 573 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x7c, 0x53, 0xdd, 0x6e, 0xd3, 0x30, + 0x14, 0x66, 0x3f, 0x6c, 0xe0, 0x01, 0xf3, 0x3c, 0xb6, 0x89, 0x0d, 0x0d, 0x51, 0x98, 0x80, 0x9b, + 0x54, 0x85, 0xbb, 0x70, 0xe5, 0xc6, 0xa7, 0xc1, 0x5a, 0x62, 0x07, 0x27, 0x29, 0xeb, 0x54, 0xc9, + 0x2a, 0xb4, 0xaa, 0x26, 0x6d, 0xc9, 0x48, 0xba, 0x3d, 0x10, 0x97, 0x3c, 0x01, 0xcf, 0x80, 0xc4, + 0x8b, 0x20, 0x1e, 0x02, 0x39, 0x6e, 0x43, 0x40, 0xc0, 0x55, 0x4e, 0xbe, 0xf3, 0x7d, 0xe7, 0x3b, + 0xe7, 0xe8, 0x18, 0xbd, 0x9c, 0xe6, 0xf9, 0xf4, 0x7c, 0xd2, 0x1e, 0x8d, 0xcb, 0xb6, 0x0d, 0x4d, + 0x74, 0xdd, 0x69, 0x4f, 0x8a, 0x22, 0x2f, 0xca, 0x76, 0x31, 0xf9, 0x78, 0x35, 0x29, 0x67, 0xba, + 0xfa, 0x75, 0x2e, 0x8b, 0x7c, 0x96, 0x93, 0x43, 0x4b, 0x74, 0x46, 0xe3, 0xd2, 0xa9, 0x35, 0xce, + 0x75, 0xc7, 0xb1, 0x9a, 0xfd, 0x87, 0x8b, 0x9a, 0x97, 0x67, 0xed, 0x51, 0x96, 0xe5, 0xb3, 0xd1, + 0xec, 0x2c, 0xcf, 0x4a, 0xab, 0x6e, 0x7d, 0x5b, 0x45, 0x58, 0xd9, 0xaa, 0x60, 0xf8, 0x90, 0x5d, + 0x5d, 0xb4, 0xbe, 0xac, 0xa2, 0x3b, 0x4d, 0x90, 0x6c, 0xa2, 0x8d, 0x54, 0xc4, 0x11, 0x78, 0xbc, + 0xc7, 0x81, 0xe1, 0x1b, 0x64, 0x03, 0xad, 0xa7, 0xe2, 0x58, 0xc8, 0x77, 0x02, 0x2f, 0x91, 0x07, + 0x68, 0x47, 0x41, 0x2c, 0x53, 0xe5, 0x81, 0x16, 0x34, 0x04, 0x1d, 0xf2, 0x38, 0xe6, 0xc2, 0xc7, + 0x2b, 0xe4, 0x00, 0xed, 0xfd, 0x91, 0xa2, 0x41, 0x4f, 0xaa, 0x10, 0x18, 0x5e, 0x25, 0xdb, 0x68, + 0xb3, 0x4b, 0x99, 0xae, 0x09, 0x9c, 0xe1, 0x2d, 0xb2, 0x87, 0xb6, 0xb9, 0xe8, 0xd3, 0x80, 0x33, + 0xed, 0xa5, 0x71, 0x22, 0x43, 0x50, 0x26, 0x81, 0xc9, 0x2e, 0x22, 0x32, 0x02, 0x45, 0x13, 0x2e, + 0x85, 0x56, 0xf0, 0x36, 0xe5, 0x0a, 0x18, 0xbe, 0x69, 0xf0, 0x5f, 0x16, 0x32, 0xd1, 0x3d, 0x99, + 0x0a, 0x86, 0xd7, 0x0c, 0xbe, 0x28, 0x14, 0x51, 0x1f, 0x74, 0x22, 0x8f, 0x41, 0xe0, 0x75, 0x83, + 0xc3, 0x49, 0x64, 0xc4, 0x4d, 0xfc, 0x16, 0xd9, 0x41, 0x5b, 0xbf, 0xf1, 0x63, 0x7e, 0x0a, 0x78, + 0x97, 0xec, 0xa3, 0xdd, 0x85, 0x99, 0xee, 0x71, 0x08, 0x58, 0x3d, 0xdd, 0x6d, 0x33, 0x00, 0x0f, + 0xc3, 0x34, 0xa1, 0xdd, 0x00, 0x6c, 0x12, 0x6f, 0x90, 0x43, 0xb4, 0x9f, 0x48, 0xa9, 0x43, 0x2a, + 0x06, 0xda, 0xe4, 0x12, 0xd0, 0x75, 0xdf, 0x31, 0xbe, 0x4b, 0x5e, 0xa0, 0x23, 0x8f, 0x0a, 0xd3, + 0x69, 0x17, 0x34, 0x9c, 0x80, 0x97, 0x26, 0xc0, 0x74, 0x77, 0x60, 0x14, 0xd4, 0x07, 0xa5, 0xa9, + 0xe7, 0xc9, 0x54, 0x24, 0xf8, 0x1e, 0x79, 0x84, 0x0e, 0xe6, 0xd4, 0x50, 0x32, 0xde, 0x1b, 0xe8, + 0x9e, 0x54, 0xc0, 0x7d, 0x31, 0xf7, 0xda, 0x6c, 0xce, 0x08, 0x22, 0x0d, 0x75, 0x9f, 0x06, 0x29, + 0x60, 0x42, 0x8e, 0xd0, 0x63, 0x06, 0x7d, 0x08, 0x8c, 0xb1, 0x1d, 0x50, 0x47, 0x54, 0xd1, 0x10, + 0x12, 0x50, 0x75, 0xff, 0xdb, 0xe4, 0x19, 0x7a, 0x12, 0x48, 0x9f, 0x8b, 0xe6, 0xa6, 0xff, 0x42, + 0xbc, 0x4f, 0x9e, 0xa3, 0xa7, 0x95, 0x4b, 0x35, 0x8c, 0x08, 0x06, 0xd5, 0xfe, 0x21, 0x4e, 0xf4, + 0x1b, 0x1a, 0x37, 0xb7, 0xb8, 0xd3, 0xfd, 0xb1, 0x84, 0x5a, 0x1f, 0xf2, 0x0b, 0xe7, 0xff, 0x47, + 0xd9, 0xdd, 0x6a, 0x9e, 0x57, 0x64, 0x2e, 0x31, 0x5a, 0x3a, 0x65, 0x73, 0xd1, 0x34, 0x3f, 0x1f, + 0x65, 0x53, 0x27, 0x2f, 0xa6, 0xed, 0xe9, 0x24, 0xab, 0xee, 0x74, 0xf1, 0x1a, 0x2e, 0xcf, 0xca, + 0x7f, 0x3d, 0x8e, 0xd7, 0xf6, 0xf3, 0x69, 0x79, 0xc5, 0xa7, 0xf4, 0xf3, 0xf2, 0xa1, 0x6f, 0x8b, + 0xd1, 0x71, 0xe9, 0xd8, 0xd0, 0x44, 0xfd, 0x8e, 0x53, 0x59, 0x96, 0x5f, 0x17, 0x84, 0x21, 0x1d, + 0x97, 0xc3, 0x9a, 0x30, 0xec, 0x77, 0x86, 0x96, 0xf0, 0x7d, 0xb9, 0x65, 0x51, 0xd7, 0xa5, 0xe3, + 0xd2, 0x75, 0x6b, 0x8a, 0xeb, 0xf6, 0x3b, 0xae, 0x6b, 0x49, 0xef, 0xd7, 0xaa, 0xee, 0x5e, 0xfd, + 0x0c, 0x00, 0x00, 0xff, 0xff, 0x4d, 0x0c, 0x84, 0xce, 0xb9, 0x03, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/errors/resource_access_denied_error.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/errors/resource_access_denied_error.pb.go new file mode 100644 index 0000000..492e3ef --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/errors/resource_access_denied_error.pb.go @@ -0,0 +1,114 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/errors/resource_access_denied_error.proto + +package errors // import "google.golang.org/genproto/googleapis/ads/googleads/v1/errors" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Enum describing possible resource access denied errors. +type ResourceAccessDeniedErrorEnum_ResourceAccessDeniedError int32 + +const ( + // Enum unspecified. + ResourceAccessDeniedErrorEnum_UNSPECIFIED ResourceAccessDeniedErrorEnum_ResourceAccessDeniedError = 0 + // The received error code is not known in this version. + ResourceAccessDeniedErrorEnum_UNKNOWN ResourceAccessDeniedErrorEnum_ResourceAccessDeniedError = 1 + // User did not have write access. + ResourceAccessDeniedErrorEnum_WRITE_ACCESS_DENIED ResourceAccessDeniedErrorEnum_ResourceAccessDeniedError = 3 +) + +var ResourceAccessDeniedErrorEnum_ResourceAccessDeniedError_name = map[int32]string{ + 0: "UNSPECIFIED", + 1: "UNKNOWN", + 3: "WRITE_ACCESS_DENIED", +} +var ResourceAccessDeniedErrorEnum_ResourceAccessDeniedError_value = map[string]int32{ + "UNSPECIFIED": 0, + "UNKNOWN": 1, + "WRITE_ACCESS_DENIED": 3, +} + +func (x ResourceAccessDeniedErrorEnum_ResourceAccessDeniedError) String() string { + return proto.EnumName(ResourceAccessDeniedErrorEnum_ResourceAccessDeniedError_name, int32(x)) +} +func (ResourceAccessDeniedErrorEnum_ResourceAccessDeniedError) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_resource_access_denied_error_f45d8d71096e1d66, []int{0, 0} +} + +// Container for enum describing possible resource access denied errors. +type ResourceAccessDeniedErrorEnum struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ResourceAccessDeniedErrorEnum) Reset() { *m = ResourceAccessDeniedErrorEnum{} } +func (m *ResourceAccessDeniedErrorEnum) String() string { return proto.CompactTextString(m) } +func (*ResourceAccessDeniedErrorEnum) ProtoMessage() {} +func (*ResourceAccessDeniedErrorEnum) Descriptor() ([]byte, []int) { + return fileDescriptor_resource_access_denied_error_f45d8d71096e1d66, []int{0} +} +func (m *ResourceAccessDeniedErrorEnum) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ResourceAccessDeniedErrorEnum.Unmarshal(m, b) +} +func (m *ResourceAccessDeniedErrorEnum) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ResourceAccessDeniedErrorEnum.Marshal(b, m, deterministic) +} +func (dst *ResourceAccessDeniedErrorEnum) XXX_Merge(src proto.Message) { + xxx_messageInfo_ResourceAccessDeniedErrorEnum.Merge(dst, src) +} +func (m *ResourceAccessDeniedErrorEnum) XXX_Size() int { + return xxx_messageInfo_ResourceAccessDeniedErrorEnum.Size(m) +} +func (m *ResourceAccessDeniedErrorEnum) XXX_DiscardUnknown() { + xxx_messageInfo_ResourceAccessDeniedErrorEnum.DiscardUnknown(m) +} + +var xxx_messageInfo_ResourceAccessDeniedErrorEnum proto.InternalMessageInfo + +func init() { + proto.RegisterType((*ResourceAccessDeniedErrorEnum)(nil), "google.ads.googleads.v1.errors.ResourceAccessDeniedErrorEnum") + proto.RegisterEnum("google.ads.googleads.v1.errors.ResourceAccessDeniedErrorEnum_ResourceAccessDeniedError", ResourceAccessDeniedErrorEnum_ResourceAccessDeniedError_name, ResourceAccessDeniedErrorEnum_ResourceAccessDeniedError_value) +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/errors/resource_access_denied_error.proto", fileDescriptor_resource_access_denied_error_f45d8d71096e1d66) +} + +var fileDescriptor_resource_access_denied_error_f45d8d71096e1d66 = []byte{ + // 312 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x7c, 0x90, 0xc1, 0x6a, 0x32, 0x31, + 0x14, 0x85, 0x7f, 0x15, 0xfe, 0x42, 0x5c, 0x54, 0xa6, 0x8b, 0xd2, 0xd2, 0xba, 0x98, 0x07, 0x48, + 0x18, 0xba, 0x4b, 0x57, 0xd1, 0x49, 0x45, 0x0a, 0x53, 0xd1, 0xaa, 0x50, 0x06, 0x86, 0x74, 0x12, + 0xc2, 0x80, 0x26, 0x92, 0xab, 0x3e, 0x50, 0x97, 0x7d, 0x94, 0x3e, 0x4a, 0xdf, 0xa0, 0xbb, 0x32, + 0x89, 0xce, 0x6e, 0xba, 0xca, 0x21, 0xf9, 0xee, 0x39, 0x27, 0x17, 0x31, 0x6d, 0xad, 0xde, 0x28, + 0x22, 0x24, 0x90, 0x20, 0x6b, 0x75, 0x4c, 0x88, 0x72, 0xce, 0x3a, 0x20, 0x4e, 0x81, 0x3d, 0xb8, + 0x52, 0x15, 0xa2, 0x2c, 0x15, 0x40, 0x21, 0x95, 0xa9, 0x94, 0x2c, 0xfc, 0x2b, 0xde, 0x39, 0xbb, + 0xb7, 0xd1, 0x30, 0xcc, 0x61, 0x21, 0x01, 0x37, 0x16, 0xf8, 0x98, 0xe0, 0x60, 0x71, 0x7b, 0x77, + 0x8e, 0xd8, 0x55, 0x44, 0x18, 0x63, 0xf7, 0x62, 0x5f, 0x59, 0x03, 0x61, 0x3a, 0x06, 0x74, 0x3f, + 0x3f, 0x65, 0x30, 0x1f, 0x91, 0xfa, 0x04, 0x5e, 0xcf, 0x72, 0x73, 0xd8, 0xc6, 0x73, 0x74, 0xd3, + 0x0a, 0x44, 0x97, 0xa8, 0xbf, 0xcc, 0x16, 0x33, 0x3e, 0x9e, 0x3e, 0x4d, 0x79, 0x3a, 0xf8, 0x17, + 0xf5, 0xd1, 0xc5, 0x32, 0x7b, 0xce, 0x5e, 0xd6, 0xd9, 0xa0, 0x13, 0x5d, 0xa3, 0xab, 0xf5, 0x7c, + 0xfa, 0xca, 0x0b, 0x36, 0x1e, 0xf3, 0xc5, 0xa2, 0x48, 0x79, 0x56, 0x53, 0xbd, 0xd1, 0x4f, 0x07, + 0xc5, 0xa5, 0xdd, 0xe2, 0xbf, 0x9b, 0x8f, 0x86, 0xad, 0xc1, 0xb3, 0xba, 0xfb, 0xac, 0xf3, 0x96, + 0x9e, 0x1c, 0xb4, 0xdd, 0x08, 0xa3, 0xb1, 0x75, 0x9a, 0x68, 0x65, 0xfc, 0xcf, 0xce, 0xeb, 0xdc, + 0x55, 0xd0, 0xb6, 0xdd, 0xc7, 0x70, 0x7c, 0x74, 0x7b, 0x13, 0xc6, 0x3e, 0xbb, 0xc3, 0x49, 0x30, + 0x63, 0x12, 0x70, 0x90, 0xb5, 0x5a, 0x25, 0xd8, 0x47, 0xc2, 0xd7, 0x19, 0xc8, 0x99, 0x84, 0xbc, + 0x01, 0xf2, 0x55, 0x92, 0x07, 0xe0, 0xbb, 0x1b, 0x87, 0x5b, 0x4a, 0x99, 0x04, 0x4a, 0x1b, 0x84, + 0xd2, 0x55, 0x42, 0x69, 0x80, 0xde, 0xff, 0xfb, 0x76, 0x0f, 0xbf, 0x01, 0x00, 0x00, 0xff, 0xff, + 0x6c, 0xe6, 0xc2, 0x05, 0xfa, 0x01, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/errors/resource_count_limit_exceeded_error.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/errors/resource_count_limit_exceeded_error.pb.go new file mode 100644 index 0000000..252b84c --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/errors/resource_count_limit_exceeded_error.pb.go @@ -0,0 +1,169 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/errors/resource_count_limit_exceeded_error.proto + +package errors // import "google.golang.org/genproto/googleapis/ads/googleads/v1/errors" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Enum describing possible resource count limit exceeded errors. +type ResourceCountLimitExceededErrorEnum_ResourceCountLimitExceededError int32 + +const ( + // Enum unspecified. + ResourceCountLimitExceededErrorEnum_UNSPECIFIED ResourceCountLimitExceededErrorEnum_ResourceCountLimitExceededError = 0 + // The received error code is not known in this version. + ResourceCountLimitExceededErrorEnum_UNKNOWN ResourceCountLimitExceededErrorEnum_ResourceCountLimitExceededError = 1 + // Indicates that this request would exceed the number of allowed resources + // for the Google Ads account. The exact resource type and limit being + // checked can be inferred from accountLimitType. + ResourceCountLimitExceededErrorEnum_ACCOUNT_LIMIT ResourceCountLimitExceededErrorEnum_ResourceCountLimitExceededError = 2 + // Indicates that this request would exceed the number of allowed resources + // in a Campaign. The exact resource type and limit being checked can be + // inferred from accountLimitType, and the numeric id of the + // Campaign involved is given by enclosingId. + ResourceCountLimitExceededErrorEnum_CAMPAIGN_LIMIT ResourceCountLimitExceededErrorEnum_ResourceCountLimitExceededError = 3 + // Indicates that this request would exceed the number of allowed resources + // in an ad group. The exact resource type and limit being checked can be + // inferred from accountLimitType, and the numeric id of the + // ad group involved is given by enclosingId. + ResourceCountLimitExceededErrorEnum_ADGROUP_LIMIT ResourceCountLimitExceededErrorEnum_ResourceCountLimitExceededError = 4 + // Indicates that this request would exceed the number of allowed resources + // in an ad group ad. The exact resource type and limit being checked can + // be inferred from accountLimitType, and the enclosingId + // contains the ad group id followed by the ad id, separated by a single + // comma (,). + ResourceCountLimitExceededErrorEnum_AD_GROUP_AD_LIMIT ResourceCountLimitExceededErrorEnum_ResourceCountLimitExceededError = 5 + // Indicates that this request would exceed the number of allowed resources + // in an ad group criterion. The exact resource type and limit being checked + // can be inferred from accountLimitType, and the + // enclosingId contains the ad group id followed by the + // criterion id, separated by a single comma (,). + ResourceCountLimitExceededErrorEnum_AD_GROUP_CRITERION_LIMIT ResourceCountLimitExceededErrorEnum_ResourceCountLimitExceededError = 6 + // Indicates that this request would exceed the number of allowed resources + // in this shared set. The exact resource type and limit being checked can + // be inferred from accountLimitType, and the numeric id of the + // shared set involved is given by enclosingId. + ResourceCountLimitExceededErrorEnum_SHARED_SET_LIMIT ResourceCountLimitExceededErrorEnum_ResourceCountLimitExceededError = 7 + // Exceeds a limit related to a matching function. + ResourceCountLimitExceededErrorEnum_MATCHING_FUNCTION_LIMIT ResourceCountLimitExceededErrorEnum_ResourceCountLimitExceededError = 8 + // The response for this request would exceed the maximum number of rows + // that can be returned. + ResourceCountLimitExceededErrorEnum_RESPONSE_ROW_LIMIT_EXCEEDED ResourceCountLimitExceededErrorEnum_ResourceCountLimitExceededError = 9 +) + +var ResourceCountLimitExceededErrorEnum_ResourceCountLimitExceededError_name = map[int32]string{ + 0: "UNSPECIFIED", + 1: "UNKNOWN", + 2: "ACCOUNT_LIMIT", + 3: "CAMPAIGN_LIMIT", + 4: "ADGROUP_LIMIT", + 5: "AD_GROUP_AD_LIMIT", + 6: "AD_GROUP_CRITERION_LIMIT", + 7: "SHARED_SET_LIMIT", + 8: "MATCHING_FUNCTION_LIMIT", + 9: "RESPONSE_ROW_LIMIT_EXCEEDED", +} +var ResourceCountLimitExceededErrorEnum_ResourceCountLimitExceededError_value = map[string]int32{ + "UNSPECIFIED": 0, + "UNKNOWN": 1, + "ACCOUNT_LIMIT": 2, + "CAMPAIGN_LIMIT": 3, + "ADGROUP_LIMIT": 4, + "AD_GROUP_AD_LIMIT": 5, + "AD_GROUP_CRITERION_LIMIT": 6, + "SHARED_SET_LIMIT": 7, + "MATCHING_FUNCTION_LIMIT": 8, + "RESPONSE_ROW_LIMIT_EXCEEDED": 9, +} + +func (x ResourceCountLimitExceededErrorEnum_ResourceCountLimitExceededError) String() string { + return proto.EnumName(ResourceCountLimitExceededErrorEnum_ResourceCountLimitExceededError_name, int32(x)) +} +func (ResourceCountLimitExceededErrorEnum_ResourceCountLimitExceededError) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_resource_count_limit_exceeded_error_4e9c7c26ba2a3e8f, []int{0, 0} +} + +// Container for enum describing possible resource count limit exceeded errors. +type ResourceCountLimitExceededErrorEnum struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ResourceCountLimitExceededErrorEnum) Reset() { *m = ResourceCountLimitExceededErrorEnum{} } +func (m *ResourceCountLimitExceededErrorEnum) String() string { return proto.CompactTextString(m) } +func (*ResourceCountLimitExceededErrorEnum) ProtoMessage() {} +func (*ResourceCountLimitExceededErrorEnum) Descriptor() ([]byte, []int) { + return fileDescriptor_resource_count_limit_exceeded_error_4e9c7c26ba2a3e8f, []int{0} +} +func (m *ResourceCountLimitExceededErrorEnum) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ResourceCountLimitExceededErrorEnum.Unmarshal(m, b) +} +func (m *ResourceCountLimitExceededErrorEnum) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ResourceCountLimitExceededErrorEnum.Marshal(b, m, deterministic) +} +func (dst *ResourceCountLimitExceededErrorEnum) XXX_Merge(src proto.Message) { + xxx_messageInfo_ResourceCountLimitExceededErrorEnum.Merge(dst, src) +} +func (m *ResourceCountLimitExceededErrorEnum) XXX_Size() int { + return xxx_messageInfo_ResourceCountLimitExceededErrorEnum.Size(m) +} +func (m *ResourceCountLimitExceededErrorEnum) XXX_DiscardUnknown() { + xxx_messageInfo_ResourceCountLimitExceededErrorEnum.DiscardUnknown(m) +} + +var xxx_messageInfo_ResourceCountLimitExceededErrorEnum proto.InternalMessageInfo + +func init() { + proto.RegisterType((*ResourceCountLimitExceededErrorEnum)(nil), "google.ads.googleads.v1.errors.ResourceCountLimitExceededErrorEnum") + proto.RegisterEnum("google.ads.googleads.v1.errors.ResourceCountLimitExceededErrorEnum_ResourceCountLimitExceededError", ResourceCountLimitExceededErrorEnum_ResourceCountLimitExceededError_name, ResourceCountLimitExceededErrorEnum_ResourceCountLimitExceededError_value) +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/errors/resource_count_limit_exceeded_error.proto", fileDescriptor_resource_count_limit_exceeded_error_4e9c7c26ba2a3e8f) +} + +var fileDescriptor_resource_count_limit_exceeded_error_4e9c7c26ba2a3e8f = []byte{ + // 428 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x84, 0x92, 0xdf, 0x6a, 0xd4, 0x40, + 0x18, 0xc5, 0xdd, 0x54, 0x5b, 0x9d, 0xa2, 0xa6, 0x83, 0xa2, 0xd8, 0xd2, 0xc2, 0xea, 0xf5, 0x84, + 0xe0, 0x5d, 0xbc, 0x9a, 0x4e, 0xa6, 0xd9, 0xc1, 0xee, 0x24, 0xe4, 0xcf, 0x56, 0x64, 0x61, 0x88, + 0x9b, 0x21, 0x04, 0x76, 0x33, 0x4b, 0x26, 0x5b, 0xbc, 0xf6, 0x51, 0xbc, 0xec, 0xa3, 0xf8, 0x28, + 0xbe, 0x84, 0x92, 0x4c, 0xb2, 0x5e, 0xd9, 0x5e, 0xe5, 0x70, 0xf8, 0x7d, 0xe7, 0x84, 0x6f, 0x3e, + 0x30, 0x2b, 0x95, 0x2a, 0xd7, 0xd2, 0xc9, 0x0b, 0xed, 0x18, 0xd9, 0xa9, 0x5b, 0xd7, 0x91, 0x4d, + 0xa3, 0x1a, 0xed, 0x34, 0x52, 0xab, 0x5d, 0xb3, 0x92, 0x62, 0xa5, 0x76, 0x75, 0x2b, 0xd6, 0xd5, + 0xa6, 0x6a, 0x85, 0xfc, 0xbe, 0x92, 0xb2, 0x90, 0x85, 0xe8, 0x21, 0xb4, 0x6d, 0x54, 0xab, 0xe0, + 0xb9, 0x19, 0x47, 0x79, 0xa1, 0xd1, 0x3e, 0x09, 0xdd, 0xba, 0xc8, 0x24, 0xbd, 0x3b, 0x1b, 0x9b, + 0xb6, 0x95, 0x93, 0xd7, 0xb5, 0x6a, 0xf3, 0xb6, 0x52, 0xb5, 0x36, 0xd3, 0xd3, 0x3b, 0x0b, 0xbc, + 0x8f, 0x87, 0x2e, 0xd2, 0x55, 0x5d, 0x77, 0x4d, 0x74, 0x28, 0xa2, 0x5d, 0x04, 0xad, 0x77, 0x9b, + 0xe9, 0x0f, 0x0b, 0x5c, 0x3c, 0xc0, 0xc1, 0x97, 0xe0, 0x38, 0xe3, 0x49, 0x44, 0x09, 0xbb, 0x62, + 0xd4, 0xb7, 0x1f, 0xc1, 0x63, 0x70, 0x94, 0xf1, 0xcf, 0x3c, 0xbc, 0xe1, 0xf6, 0x04, 0x9e, 0x80, + 0xe7, 0x98, 0x90, 0x30, 0xe3, 0xa9, 0xb8, 0x66, 0x73, 0x96, 0xda, 0x16, 0x84, 0xe0, 0x05, 0xc1, + 0xf3, 0x08, 0xb3, 0x80, 0x0f, 0xde, 0x41, 0x8f, 0xf9, 0x41, 0x1c, 0x66, 0xd1, 0x60, 0x3d, 0x86, + 0xaf, 0xc1, 0x09, 0xf6, 0x85, 0xf1, 0xb0, 0x3f, 0xd8, 0x4f, 0xe0, 0x19, 0x78, 0xbb, 0xb7, 0x49, + 0xcc, 0x52, 0x1a, 0xb3, 0x70, 0xcc, 0x39, 0x84, 0xaf, 0x80, 0x9d, 0xcc, 0x70, 0x4c, 0x7d, 0x91, + 0xd0, 0xb1, 0xf1, 0x08, 0x9e, 0x82, 0x37, 0x73, 0x9c, 0x92, 0x19, 0xe3, 0x81, 0xb8, 0xca, 0x38, + 0x49, 0xff, 0x8d, 0x3c, 0x85, 0x17, 0xe0, 0x34, 0xa6, 0x49, 0x14, 0xf2, 0x84, 0x8a, 0x38, 0xbc, + 0x31, 0xbe, 0xa0, 0x5f, 0x08, 0xa5, 0x3e, 0xf5, 0xed, 0x67, 0x97, 0x7f, 0x26, 0x60, 0xba, 0x52, + 0x1b, 0x74, 0xff, 0xc6, 0x2f, 0x3f, 0x3c, 0xb0, 0xa8, 0xa8, 0xdb, 0x7c, 0x34, 0xf9, 0xea, 0x0f, + 0x39, 0xa5, 0x5a, 0xe7, 0x75, 0x89, 0x54, 0x53, 0x3a, 0xa5, 0xac, 0xfb, 0x77, 0x19, 0x6f, 0x62, + 0x5b, 0xe9, 0xff, 0x9d, 0xc8, 0x27, 0xf3, 0xf9, 0x69, 0x1d, 0x04, 0x18, 0xdf, 0x59, 0xe7, 0x81, + 0x09, 0xc3, 0x85, 0x46, 0x46, 0x76, 0x6a, 0xe1, 0xa2, 0xbe, 0x52, 0xff, 0x1a, 0x81, 0x25, 0x2e, + 0xf4, 0x72, 0x0f, 0x2c, 0x17, 0xee, 0xd2, 0x00, 0xbf, 0xad, 0xa9, 0x71, 0x3d, 0x0f, 0x17, 0xda, + 0xf3, 0xf6, 0x88, 0xe7, 0x2d, 0x5c, 0xcf, 0x33, 0xd0, 0xb7, 0xc3, 0xfe, 0xef, 0x3e, 0xfe, 0x0d, + 0x00, 0x00, 0xff, 0xff, 0xb1, 0x30, 0xbc, 0xab, 0xbf, 0x02, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/errors/setting_error.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/errors/setting_error.pb.go new file mode 100644 index 0000000..1e0f62a --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/errors/setting_error.pb.go @@ -0,0 +1,212 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/errors/setting_error.proto + +package errors // import "google.golang.org/genproto/googleapis/ads/googleads/v1/errors" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Enum describing possible setting errors. +type SettingErrorEnum_SettingError int32 + +const ( + // Enum unspecified. + SettingErrorEnum_UNSPECIFIED SettingErrorEnum_SettingError = 0 + // The received error code is not known in this version. + SettingErrorEnum_UNKNOWN SettingErrorEnum_SettingError = 1 + // The campaign setting is not available for this Google Ads account. + SettingErrorEnum_SETTING_TYPE_IS_NOT_AVAILABLE SettingErrorEnum_SettingError = 3 + // The setting is not compatible with the campaign. + SettingErrorEnum_SETTING_TYPE_IS_NOT_COMPATIBLE_WITH_CAMPAIGN SettingErrorEnum_SettingError = 4 + // The supplied TargetingSetting contains an invalid CriterionTypeGroup. See + // CriterionTypeGroup documentation for CriterionTypeGroups allowed + // in Campaign or AdGroup TargetingSettings. + SettingErrorEnum_TARGETING_SETTING_CONTAINS_INVALID_CRITERION_TYPE_GROUP SettingErrorEnum_SettingError = 5 + // TargetingSetting must not explicitly + // set any of the Demographic CriterionTypeGroups (AGE_RANGE, GENDER, + // PARENT, INCOME_RANGE) to false (it's okay to not set them at all, in + // which case the system will set them to true automatically). + SettingErrorEnum_TARGETING_SETTING_DEMOGRAPHIC_CRITERION_TYPE_GROUPS_MUST_BE_SET_TO_TARGET_ALL SettingErrorEnum_SettingError = 6 + // TargetingSetting cannot change any of + // the Demographic CriterionTypeGroups (AGE_RANGE, GENDER, PARENT, + // INCOME_RANGE) from true to false. + SettingErrorEnum_TARGETING_SETTING_CANNOT_CHANGE_TARGET_ALL_TO_FALSE_FOR_DEMOGRAPHIC_CRITERION_TYPE_GROUP SettingErrorEnum_SettingError = 7 + // At least one feed id should be present. + SettingErrorEnum_DYNAMIC_SEARCH_ADS_SETTING_AT_LEAST_ONE_FEED_ID_MUST_BE_PRESENT SettingErrorEnum_SettingError = 8 + // The supplied DynamicSearchAdsSetting contains an invalid domain name. + SettingErrorEnum_DYNAMIC_SEARCH_ADS_SETTING_CONTAINS_INVALID_DOMAIN_NAME SettingErrorEnum_SettingError = 9 + // The supplied DynamicSearchAdsSetting contains a subdomain name. + SettingErrorEnum_DYNAMIC_SEARCH_ADS_SETTING_CONTAINS_SUBDOMAIN_NAME SettingErrorEnum_SettingError = 10 + // The supplied DynamicSearchAdsSetting contains an invalid language code. + SettingErrorEnum_DYNAMIC_SEARCH_ADS_SETTING_CONTAINS_INVALID_LANGUAGE_CODE SettingErrorEnum_SettingError = 11 + // TargetingSettings in search campaigns should not have + // CriterionTypeGroup.PLACEMENT set to targetAll. + SettingErrorEnum_TARGET_ALL_IS_NOT_ALLOWED_FOR_PLACEMENT_IN_SEARCH_CAMPAIGN SettingErrorEnum_SettingError = 12 + // Duplicate description in universal app setting description field. + SettingErrorEnum_UNIVERSAL_APP_CAMPAIGN_SETTING_DUPLICATE_DESCRIPTION SettingErrorEnum_SettingError = 13 + // Description line width is too long in universal app setting description + // field. + SettingErrorEnum_UNIVERSAL_APP_CAMPAIGN_SETTING_DESCRIPTION_LINE_WIDTH_TOO_LONG SettingErrorEnum_SettingError = 14 + // Universal app setting appId field cannot be modified for COMPLETE + // campaigns. + SettingErrorEnum_UNIVERSAL_APP_CAMPAIGN_SETTING_APP_ID_CANNOT_BE_MODIFIED SettingErrorEnum_SettingError = 15 + // YoutubeVideoMediaIds in universal app setting cannot exceed size limit. + SettingErrorEnum_TOO_MANY_YOUTUBE_MEDIA_IDS_IN_UNIVERSAL_APP_CAMPAIGN SettingErrorEnum_SettingError = 16 + // ImageMediaIds in universal app setting cannot exceed size limit. + SettingErrorEnum_TOO_MANY_IMAGE_MEDIA_IDS_IN_UNIVERSAL_APP_CAMPAIGN SettingErrorEnum_SettingError = 17 + // Media is incompatible for universal app campaign. + SettingErrorEnum_MEDIA_INCOMPATIBLE_FOR_UNIVERSAL_APP_CAMPAIGN SettingErrorEnum_SettingError = 18 + // Too many exclamation marks in universal app campaign ad text ideas. + SettingErrorEnum_TOO_MANY_EXCLAMATION_MARKS SettingErrorEnum_SettingError = 19 +) + +var SettingErrorEnum_SettingError_name = map[int32]string{ + 0: "UNSPECIFIED", + 1: "UNKNOWN", + 3: "SETTING_TYPE_IS_NOT_AVAILABLE", + 4: "SETTING_TYPE_IS_NOT_COMPATIBLE_WITH_CAMPAIGN", + 5: "TARGETING_SETTING_CONTAINS_INVALID_CRITERION_TYPE_GROUP", + 6: "TARGETING_SETTING_DEMOGRAPHIC_CRITERION_TYPE_GROUPS_MUST_BE_SET_TO_TARGET_ALL", + 7: "TARGETING_SETTING_CANNOT_CHANGE_TARGET_ALL_TO_FALSE_FOR_DEMOGRAPHIC_CRITERION_TYPE_GROUP", + 8: "DYNAMIC_SEARCH_ADS_SETTING_AT_LEAST_ONE_FEED_ID_MUST_BE_PRESENT", + 9: "DYNAMIC_SEARCH_ADS_SETTING_CONTAINS_INVALID_DOMAIN_NAME", + 10: "DYNAMIC_SEARCH_ADS_SETTING_CONTAINS_SUBDOMAIN_NAME", + 11: "DYNAMIC_SEARCH_ADS_SETTING_CONTAINS_INVALID_LANGUAGE_CODE", + 12: "TARGET_ALL_IS_NOT_ALLOWED_FOR_PLACEMENT_IN_SEARCH_CAMPAIGN", + 13: "UNIVERSAL_APP_CAMPAIGN_SETTING_DUPLICATE_DESCRIPTION", + 14: "UNIVERSAL_APP_CAMPAIGN_SETTING_DESCRIPTION_LINE_WIDTH_TOO_LONG", + 15: "UNIVERSAL_APP_CAMPAIGN_SETTING_APP_ID_CANNOT_BE_MODIFIED", + 16: "TOO_MANY_YOUTUBE_MEDIA_IDS_IN_UNIVERSAL_APP_CAMPAIGN", + 17: "TOO_MANY_IMAGE_MEDIA_IDS_IN_UNIVERSAL_APP_CAMPAIGN", + 18: "MEDIA_INCOMPATIBLE_FOR_UNIVERSAL_APP_CAMPAIGN", + 19: "TOO_MANY_EXCLAMATION_MARKS", +} +var SettingErrorEnum_SettingError_value = map[string]int32{ + "UNSPECIFIED": 0, + "UNKNOWN": 1, + "SETTING_TYPE_IS_NOT_AVAILABLE": 3, + "SETTING_TYPE_IS_NOT_COMPATIBLE_WITH_CAMPAIGN": 4, + "TARGETING_SETTING_CONTAINS_INVALID_CRITERION_TYPE_GROUP": 5, + "TARGETING_SETTING_DEMOGRAPHIC_CRITERION_TYPE_GROUPS_MUST_BE_SET_TO_TARGET_ALL": 6, + "TARGETING_SETTING_CANNOT_CHANGE_TARGET_ALL_TO_FALSE_FOR_DEMOGRAPHIC_CRITERION_TYPE_GROUP": 7, + "DYNAMIC_SEARCH_ADS_SETTING_AT_LEAST_ONE_FEED_ID_MUST_BE_PRESENT": 8, + "DYNAMIC_SEARCH_ADS_SETTING_CONTAINS_INVALID_DOMAIN_NAME": 9, + "DYNAMIC_SEARCH_ADS_SETTING_CONTAINS_SUBDOMAIN_NAME": 10, + "DYNAMIC_SEARCH_ADS_SETTING_CONTAINS_INVALID_LANGUAGE_CODE": 11, + "TARGET_ALL_IS_NOT_ALLOWED_FOR_PLACEMENT_IN_SEARCH_CAMPAIGN": 12, + "UNIVERSAL_APP_CAMPAIGN_SETTING_DUPLICATE_DESCRIPTION": 13, + "UNIVERSAL_APP_CAMPAIGN_SETTING_DESCRIPTION_LINE_WIDTH_TOO_LONG": 14, + "UNIVERSAL_APP_CAMPAIGN_SETTING_APP_ID_CANNOT_BE_MODIFIED": 15, + "TOO_MANY_YOUTUBE_MEDIA_IDS_IN_UNIVERSAL_APP_CAMPAIGN": 16, + "TOO_MANY_IMAGE_MEDIA_IDS_IN_UNIVERSAL_APP_CAMPAIGN": 17, + "MEDIA_INCOMPATIBLE_FOR_UNIVERSAL_APP_CAMPAIGN": 18, + "TOO_MANY_EXCLAMATION_MARKS": 19, +} + +func (x SettingErrorEnum_SettingError) String() string { + return proto.EnumName(SettingErrorEnum_SettingError_name, int32(x)) +} +func (SettingErrorEnum_SettingError) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_setting_error_c07b21986f03a4ed, []int{0, 0} +} + +// Container for enum describing possible setting errors. +type SettingErrorEnum struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *SettingErrorEnum) Reset() { *m = SettingErrorEnum{} } +func (m *SettingErrorEnum) String() string { return proto.CompactTextString(m) } +func (*SettingErrorEnum) ProtoMessage() {} +func (*SettingErrorEnum) Descriptor() ([]byte, []int) { + return fileDescriptor_setting_error_c07b21986f03a4ed, []int{0} +} +func (m *SettingErrorEnum) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_SettingErrorEnum.Unmarshal(m, b) +} +func (m *SettingErrorEnum) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_SettingErrorEnum.Marshal(b, m, deterministic) +} +func (dst *SettingErrorEnum) XXX_Merge(src proto.Message) { + xxx_messageInfo_SettingErrorEnum.Merge(dst, src) +} +func (m *SettingErrorEnum) XXX_Size() int { + return xxx_messageInfo_SettingErrorEnum.Size(m) +} +func (m *SettingErrorEnum) XXX_DiscardUnknown() { + xxx_messageInfo_SettingErrorEnum.DiscardUnknown(m) +} + +var xxx_messageInfo_SettingErrorEnum proto.InternalMessageInfo + +func init() { + proto.RegisterType((*SettingErrorEnum)(nil), "google.ads.googleads.v1.errors.SettingErrorEnum") + proto.RegisterEnum("google.ads.googleads.v1.errors.SettingErrorEnum_SettingError", SettingErrorEnum_SettingError_name, SettingErrorEnum_SettingError_value) +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/errors/setting_error.proto", fileDescriptor_setting_error_c07b21986f03a4ed) +} + +var fileDescriptor_setting_error_c07b21986f03a4ed = []byte{ + // 696 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x94, 0x94, 0xd1, 0x8e, 0xdb, 0x44, + 0x14, 0x86, 0xd9, 0x2d, 0xb4, 0xcb, 0x6c, 0xa1, 0xd3, 0xe1, 0xae, 0x82, 0x95, 0xd8, 0x6b, 0x70, + 0x48, 0x41, 0x50, 0x5c, 0x28, 0x3a, 0xf6, 0x9c, 0x75, 0x46, 0x1d, 0xcf, 0x18, 0xcf, 0x38, 0xdb, + 0xa0, 0x48, 0x47, 0x81, 0x44, 0x51, 0xa4, 0x36, 0x5e, 0xc5, 0xa1, 0x0f, 0xc4, 0x1d, 0x3c, 0x04, + 0x0f, 0xc0, 0xa3, 0x20, 0x1e, 0x02, 0x8d, 0xdd, 0xb8, 0x91, 0x9a, 0x76, 0xdb, 0xab, 0x4c, 0xc6, + 0xff, 0xf7, 0x9f, 0xf1, 0xef, 0x33, 0x87, 0xdd, 0x5f, 0xd6, 0xf5, 0xf2, 0xe9, 0x62, 0x30, 0x9b, + 0x37, 0x83, 0x6e, 0x19, 0x56, 0xcf, 0x87, 0x83, 0xc5, 0x66, 0x53, 0x6f, 0x9a, 0x41, 0xb3, 0xd8, + 0x6e, 0x57, 0xeb, 0x25, 0xb5, 0x7f, 0xa3, 0xab, 0x4d, 0xbd, 0xad, 0xc5, 0x59, 0x27, 0x8c, 0x66, + 0xf3, 0x26, 0xea, 0x99, 0xe8, 0xf9, 0x30, 0xea, 0x98, 0x7b, 0x9f, 0xee, 0x3c, 0xaf, 0x56, 0x83, + 0xd9, 0x7a, 0x5d, 0x6f, 0x67, 0xdb, 0x55, 0xbd, 0x6e, 0x3a, 0xfa, 0xfc, 0xef, 0x13, 0xc6, 0x5d, + 0xe7, 0x8a, 0x41, 0x8f, 0xeb, 0xdf, 0x9f, 0x9d, 0xff, 0x79, 0xc2, 0x6e, 0xef, 0x6f, 0x8a, 0x3b, + 0xec, 0xb4, 0x32, 0xae, 0xc0, 0x54, 0x5d, 0x28, 0x94, 0xfc, 0x3d, 0x71, 0xca, 0x6e, 0x55, 0xe6, + 0xb1, 0xb1, 0x97, 0x86, 0x1f, 0x89, 0xcf, 0xd9, 0x67, 0x0e, 0xbd, 0x57, 0x26, 0x23, 0x3f, 0x29, + 0x90, 0x94, 0x23, 0x63, 0x3d, 0xc1, 0x18, 0x94, 0x86, 0x44, 0x23, 0xbf, 0x21, 0xbe, 0x62, 0x5f, + 0x1c, 0x92, 0xa4, 0x36, 0x2f, 0xc0, 0xab, 0x44, 0x23, 0x5d, 0x2a, 0x3f, 0xa2, 0x14, 0xf2, 0x02, + 0x54, 0x66, 0xf8, 0xfb, 0xe2, 0x21, 0xfb, 0xce, 0x43, 0x99, 0x61, 0xcb, 0xec, 0xd8, 0xd4, 0x1a, + 0x0f, 0xca, 0x38, 0x52, 0x66, 0x0c, 0x5a, 0x49, 0x4a, 0x4b, 0xe5, 0xb1, 0x54, 0xd6, 0x74, 0xb6, + 0x59, 0x69, 0xab, 0x82, 0x7f, 0x20, 0x7e, 0x66, 0xf9, 0xab, 0xb0, 0xc4, 0xdc, 0x66, 0x25, 0x14, + 0x23, 0x95, 0x1e, 0xe4, 0x1c, 0xe5, 0x95, 0xf3, 0x94, 0x60, 0x20, 0xc8, 0x5b, 0xea, 0x2c, 0x08, + 0xb4, 0xe6, 0x37, 0xc5, 0x94, 0x3d, 0x39, 0x70, 0x1e, 0x30, 0xed, 0x6b, 0x8c, 0xc0, 0x64, 0xb8, + 0xa7, 0x0f, 0xf4, 0x05, 0x68, 0x87, 0x74, 0x61, 0xcb, 0x6b, 0x0b, 0xf3, 0x5b, 0x22, 0x65, 0x3f, + 0xc9, 0x89, 0x81, 0x5c, 0xa5, 0xe4, 0x10, 0xca, 0x74, 0x44, 0x20, 0x5d, 0x5f, 0x06, 0x3c, 0x69, + 0x04, 0xe7, 0xc9, 0x1a, 0xa4, 0x0b, 0x44, 0x49, 0x4a, 0xf6, 0x87, 0x2d, 0x4a, 0x74, 0x68, 0x3c, + 0x3f, 0x09, 0x91, 0xbd, 0xc1, 0xe4, 0x95, 0xec, 0xa4, 0xcd, 0x41, 0x19, 0x32, 0x90, 0x23, 0xff, + 0x50, 0x7c, 0xcb, 0xee, 0xbf, 0x0d, 0xec, 0xaa, 0x64, 0x9f, 0x63, 0xe2, 0x47, 0xf6, 0xfd, 0xbb, + 0x14, 0xd5, 0x60, 0xb2, 0x0a, 0x32, 0xa4, 0xd4, 0x4a, 0xe4, 0xa7, 0xe2, 0x11, 0x8b, 0xf7, 0x62, + 0xdb, 0x75, 0x8e, 0xd6, 0xf6, 0x12, 0x65, 0x1b, 0x5e, 0xa1, 0x21, 0xc5, 0x1c, 0x8d, 0x27, 0x65, + 0x76, 0x15, 0xfa, 0x36, 0xb9, 0x2d, 0x1e, 0xb0, 0x6f, 0x2a, 0xa3, 0xc6, 0x58, 0x3a, 0xd0, 0x04, + 0x45, 0xd1, 0x3f, 0x7b, 0xf9, 0xd9, 0xab, 0x42, 0xab, 0x14, 0x3c, 0x92, 0x44, 0x97, 0x96, 0xaa, + 0xf0, 0xca, 0x1a, 0xfe, 0x91, 0x48, 0xd8, 0xa3, 0xeb, 0xc8, 0x97, 0x7a, 0xd2, 0xca, 0x84, 0x1e, + 0x95, 0x7e, 0x44, 0xde, 0x5a, 0xd2, 0xd6, 0x64, 0xfc, 0x63, 0xf1, 0x03, 0x7b, 0x70, 0x8d, 0x47, + 0xd8, 0x0c, 0x7d, 0xda, 0x35, 0x4a, 0x82, 0x94, 0x5b, 0xd9, 0x5d, 0xa2, 0x3b, 0xe1, 0xec, 0xc1, + 0x2b, 0x07, 0x33, 0xa1, 0x89, 0xad, 0x7c, 0x15, 0x1e, 0xa3, 0x54, 0x40, 0x4a, 0x86, 0xc4, 0xe8, + 0xb0, 0x37, 0xe7, 0xe1, 0x63, 0xf5, 0xa4, 0xca, 0x43, 0x9c, 0x6f, 0xc3, 0xdd, 0x15, 0x43, 0xf6, + 0xe5, 0x0b, 0xa1, 0xd9, 0xbb, 0x7a, 0x21, 0xe6, 0xd7, 0x20, 0x42, 0x9c, 0xb1, 0x7b, 0x7d, 0x29, + 0x7c, 0x92, 0x6a, 0xc8, 0xa1, 0x0d, 0x24, 0x87, 0xf2, 0xb1, 0xe3, 0x9f, 0x24, 0xff, 0x1d, 0xb1, + 0xf3, 0xdf, 0xea, 0x67, 0xd1, 0x9b, 0xa7, 0x50, 0x72, 0x77, 0x7f, 0x9e, 0x14, 0x61, 0xf4, 0x14, + 0x47, 0xbf, 0xc8, 0x17, 0xd0, 0xb2, 0x7e, 0x3a, 0x5b, 0x2f, 0xa3, 0x7a, 0xb3, 0x1c, 0x2c, 0x17, + 0xeb, 0x76, 0x30, 0xed, 0xc6, 0xdf, 0xd5, 0xaa, 0x79, 0xdd, 0x34, 0x7c, 0xd8, 0xfd, 0xfc, 0x71, + 0x7c, 0x23, 0x03, 0xf8, 0xeb, 0xf8, 0x2c, 0xeb, 0xcc, 0x60, 0xde, 0x44, 0xdd, 0x32, 0xac, 0xc6, + 0xc3, 0xa8, 0x2d, 0xd9, 0xfc, 0xb3, 0x13, 0x4c, 0x61, 0xde, 0x4c, 0x7b, 0xc1, 0x74, 0x3c, 0x9c, + 0x76, 0x82, 0x7f, 0x8f, 0xcf, 0xbb, 0xdd, 0x38, 0x86, 0x79, 0x13, 0xc7, 0xbd, 0x24, 0x8e, 0xc7, + 0xc3, 0x38, 0xee, 0x44, 0xbf, 0xde, 0x6c, 0x4f, 0xf7, 0xf5, 0xff, 0x01, 0x00, 0x00, 0xff, 0xff, + 0x83, 0x90, 0xd3, 0xfd, 0xaa, 0x05, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/errors/shared_criterion_error.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/errors/shared_criterion_error.pb.go new file mode 100644 index 0000000..fc71882 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/errors/shared_criterion_error.pb.go @@ -0,0 +1,115 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/errors/shared_criterion_error.proto + +package errors // import "google.golang.org/genproto/googleapis/ads/googleads/v1/errors" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Enum describing possible shared criterion errors. +type SharedCriterionErrorEnum_SharedCriterionError int32 + +const ( + // Enum unspecified. + SharedCriterionErrorEnum_UNSPECIFIED SharedCriterionErrorEnum_SharedCriterionError = 0 + // The received error code is not known in this version. + SharedCriterionErrorEnum_UNKNOWN SharedCriterionErrorEnum_SharedCriterionError = 1 + // The criterion is not appropriate for the shared set type. + SharedCriterionErrorEnum_CRITERION_TYPE_NOT_ALLOWED_FOR_SHARED_SET_TYPE SharedCriterionErrorEnum_SharedCriterionError = 2 +) + +var SharedCriterionErrorEnum_SharedCriterionError_name = map[int32]string{ + 0: "UNSPECIFIED", + 1: "UNKNOWN", + 2: "CRITERION_TYPE_NOT_ALLOWED_FOR_SHARED_SET_TYPE", +} +var SharedCriterionErrorEnum_SharedCriterionError_value = map[string]int32{ + "UNSPECIFIED": 0, + "UNKNOWN": 1, + "CRITERION_TYPE_NOT_ALLOWED_FOR_SHARED_SET_TYPE": 2, +} + +func (x SharedCriterionErrorEnum_SharedCriterionError) String() string { + return proto.EnumName(SharedCriterionErrorEnum_SharedCriterionError_name, int32(x)) +} +func (SharedCriterionErrorEnum_SharedCriterionError) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_shared_criterion_error_40c1d60c7ede7d32, []int{0, 0} +} + +// Container for enum describing possible shared criterion errors. +type SharedCriterionErrorEnum struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *SharedCriterionErrorEnum) Reset() { *m = SharedCriterionErrorEnum{} } +func (m *SharedCriterionErrorEnum) String() string { return proto.CompactTextString(m) } +func (*SharedCriterionErrorEnum) ProtoMessage() {} +func (*SharedCriterionErrorEnum) Descriptor() ([]byte, []int) { + return fileDescriptor_shared_criterion_error_40c1d60c7ede7d32, []int{0} +} +func (m *SharedCriterionErrorEnum) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_SharedCriterionErrorEnum.Unmarshal(m, b) +} +func (m *SharedCriterionErrorEnum) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_SharedCriterionErrorEnum.Marshal(b, m, deterministic) +} +func (dst *SharedCriterionErrorEnum) XXX_Merge(src proto.Message) { + xxx_messageInfo_SharedCriterionErrorEnum.Merge(dst, src) +} +func (m *SharedCriterionErrorEnum) XXX_Size() int { + return xxx_messageInfo_SharedCriterionErrorEnum.Size(m) +} +func (m *SharedCriterionErrorEnum) XXX_DiscardUnknown() { + xxx_messageInfo_SharedCriterionErrorEnum.DiscardUnknown(m) +} + +var xxx_messageInfo_SharedCriterionErrorEnum proto.InternalMessageInfo + +func init() { + proto.RegisterType((*SharedCriterionErrorEnum)(nil), "google.ads.googleads.v1.errors.SharedCriterionErrorEnum") + proto.RegisterEnum("google.ads.googleads.v1.errors.SharedCriterionErrorEnum_SharedCriterionError", SharedCriterionErrorEnum_SharedCriterionError_name, SharedCriterionErrorEnum_SharedCriterionError_value) +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/errors/shared_criterion_error.proto", fileDescriptor_shared_criterion_error_40c1d60c7ede7d32) +} + +var fileDescriptor_shared_criterion_error_40c1d60c7ede7d32 = []byte{ + // 330 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x7c, 0x90, 0x41, 0x4e, 0x83, 0x40, + 0x14, 0x86, 0x05, 0x13, 0x4d, 0xa6, 0x0b, 0x1b, 0xe2, 0x42, 0x8d, 0xe9, 0x82, 0x03, 0x0c, 0x41, + 0x77, 0xd3, 0xd5, 0xb4, 0x4c, 0x2b, 0xb1, 0x01, 0x02, 0xb4, 0x8d, 0x86, 0x64, 0x82, 0x85, 0x50, + 0x92, 0x76, 0xa6, 0x99, 0xc1, 0x9e, 0xc0, 0x93, 0xb8, 0xf4, 0x28, 0x1e, 0xc5, 0xb5, 0x07, 0x30, + 0xcc, 0x48, 0x57, 0xd5, 0xd5, 0xfc, 0x79, 0xef, 0xff, 0xfe, 0x79, 0xef, 0x81, 0x61, 0xc5, 0x79, + 0xb5, 0x29, 0x9d, 0xbc, 0x90, 0x8e, 0x96, 0xad, 0xda, 0xbb, 0x4e, 0x29, 0x04, 0x17, 0xd2, 0x91, + 0xeb, 0x5c, 0x94, 0x05, 0x5d, 0x89, 0xba, 0x29, 0x45, 0xcd, 0x19, 0x55, 0x75, 0xb8, 0x13, 0xbc, + 0xe1, 0xd6, 0x40, 0x13, 0x30, 0x2f, 0x24, 0x3c, 0xc0, 0x70, 0xef, 0x42, 0x0d, 0xdf, 0xdc, 0x76, + 0xe1, 0xbb, 0xda, 0xc9, 0x19, 0xe3, 0x4d, 0xde, 0xd4, 0x9c, 0x49, 0x4d, 0xdb, 0x6f, 0x06, 0xb8, + 0x4a, 0x54, 0xfc, 0xb8, 0x4b, 0x27, 0x2d, 0x47, 0xd8, 0xeb, 0xd6, 0x5e, 0x83, 0xcb, 0x63, 0x3d, + 0xeb, 0x02, 0xf4, 0xe6, 0x41, 0x12, 0x91, 0xb1, 0x3f, 0xf1, 0x89, 0xd7, 0x3f, 0xb1, 0x7a, 0xe0, + 0x7c, 0x1e, 0x3c, 0x06, 0xe1, 0x32, 0xe8, 0x1b, 0xd6, 0x1d, 0x80, 0xe3, 0xd8, 0x4f, 0x49, 0xec, + 0x87, 0x01, 0x4d, 0x9f, 0x22, 0x42, 0x83, 0x30, 0xa5, 0x78, 0x36, 0x0b, 0x97, 0xc4, 0xa3, 0x93, + 0x30, 0xa6, 0xc9, 0x03, 0x8e, 0x89, 0x47, 0x13, 0x92, 0xaa, 0x7e, 0xdf, 0x1c, 0x7d, 0x1b, 0xc0, + 0x5e, 0xf1, 0x2d, 0xfc, 0x7f, 0x97, 0xd1, 0xf5, 0xb1, 0x71, 0xa2, 0x76, 0x91, 0xc8, 0x78, 0xf6, + 0x7e, 0xe1, 0x8a, 0x6f, 0x72, 0x56, 0x41, 0x2e, 0x2a, 0xa7, 0x2a, 0x99, 0x5a, 0xb3, 0xbb, 0xea, + 0xae, 0x96, 0x7f, 0x1d, 0x79, 0xa8, 0x9f, 0x77, 0xf3, 0x74, 0x8a, 0xf1, 0x87, 0x39, 0x98, 0xea, + 0x30, 0x5c, 0x48, 0xa8, 0x65, 0xab, 0x16, 0x2e, 0x54, 0x5f, 0xca, 0xcf, 0xce, 0x90, 0xe1, 0x42, + 0x66, 0x07, 0x43, 0xb6, 0x70, 0x33, 0x6d, 0xf8, 0x32, 0x6d, 0x5d, 0x45, 0x08, 0x17, 0x12, 0xa1, + 0x83, 0x05, 0xa1, 0x85, 0x8b, 0x90, 0x36, 0xbd, 0x9c, 0xa9, 0xe9, 0xee, 0x7f, 0x02, 0x00, 0x00, + 0xff, 0xff, 0xee, 0x75, 0xa0, 0xa2, 0x01, 0x02, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/errors/shared_set_error.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/errors/shared_set_error.pb.go new file mode 100644 index 0000000..89b2479 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/errors/shared_set_error.pb.go @@ -0,0 +1,130 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/errors/shared_set_error.proto + +package errors // import "google.golang.org/genproto/googleapis/ads/googleads/v1/errors" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Enum describing possible shared set errors. +type SharedSetErrorEnum_SharedSetError int32 + +const ( + // Enum unspecified. + SharedSetErrorEnum_UNSPECIFIED SharedSetErrorEnum_SharedSetError = 0 + // The received error code is not known in this version. + SharedSetErrorEnum_UNKNOWN SharedSetErrorEnum_SharedSetError = 1 + // The customer cannot create this type of shared set. + SharedSetErrorEnum_CUSTOMER_CANNOT_CREATE_SHARED_SET_OF_THIS_TYPE SharedSetErrorEnum_SharedSetError = 2 + // A shared set with this name already exists. + SharedSetErrorEnum_DUPLICATE_NAME SharedSetErrorEnum_SharedSetError = 3 + // Removed shared sets cannot be mutated. + SharedSetErrorEnum_SHARED_SET_REMOVED SharedSetErrorEnum_SharedSetError = 4 + // The shared set cannot be removed because it is in use. + SharedSetErrorEnum_SHARED_SET_IN_USE SharedSetErrorEnum_SharedSetError = 5 +) + +var SharedSetErrorEnum_SharedSetError_name = map[int32]string{ + 0: "UNSPECIFIED", + 1: "UNKNOWN", + 2: "CUSTOMER_CANNOT_CREATE_SHARED_SET_OF_THIS_TYPE", + 3: "DUPLICATE_NAME", + 4: "SHARED_SET_REMOVED", + 5: "SHARED_SET_IN_USE", +} +var SharedSetErrorEnum_SharedSetError_value = map[string]int32{ + "UNSPECIFIED": 0, + "UNKNOWN": 1, + "CUSTOMER_CANNOT_CREATE_SHARED_SET_OF_THIS_TYPE": 2, + "DUPLICATE_NAME": 3, + "SHARED_SET_REMOVED": 4, + "SHARED_SET_IN_USE": 5, +} + +func (x SharedSetErrorEnum_SharedSetError) String() string { + return proto.EnumName(SharedSetErrorEnum_SharedSetError_name, int32(x)) +} +func (SharedSetErrorEnum_SharedSetError) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_shared_set_error_3a2a77f77d946ee5, []int{0, 0} +} + +// Container for enum describing possible shared set errors. +type SharedSetErrorEnum struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *SharedSetErrorEnum) Reset() { *m = SharedSetErrorEnum{} } +func (m *SharedSetErrorEnum) String() string { return proto.CompactTextString(m) } +func (*SharedSetErrorEnum) ProtoMessage() {} +func (*SharedSetErrorEnum) Descriptor() ([]byte, []int) { + return fileDescriptor_shared_set_error_3a2a77f77d946ee5, []int{0} +} +func (m *SharedSetErrorEnum) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_SharedSetErrorEnum.Unmarshal(m, b) +} +func (m *SharedSetErrorEnum) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_SharedSetErrorEnum.Marshal(b, m, deterministic) +} +func (dst *SharedSetErrorEnum) XXX_Merge(src proto.Message) { + xxx_messageInfo_SharedSetErrorEnum.Merge(dst, src) +} +func (m *SharedSetErrorEnum) XXX_Size() int { + return xxx_messageInfo_SharedSetErrorEnum.Size(m) +} +func (m *SharedSetErrorEnum) XXX_DiscardUnknown() { + xxx_messageInfo_SharedSetErrorEnum.DiscardUnknown(m) +} + +var xxx_messageInfo_SharedSetErrorEnum proto.InternalMessageInfo + +func init() { + proto.RegisterType((*SharedSetErrorEnum)(nil), "google.ads.googleads.v1.errors.SharedSetErrorEnum") + proto.RegisterEnum("google.ads.googleads.v1.errors.SharedSetErrorEnum_SharedSetError", SharedSetErrorEnum_SharedSetError_name, SharedSetErrorEnum_SharedSetError_value) +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/errors/shared_set_error.proto", fileDescriptor_shared_set_error_3a2a77f77d946ee5) +} + +var fileDescriptor_shared_set_error_3a2a77f77d946ee5 = []byte{ + // 372 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x7c, 0x91, 0x4f, 0x8e, 0x9b, 0x30, + 0x18, 0xc5, 0x0b, 0xe9, 0x1f, 0xc9, 0x91, 0x52, 0xea, 0xaa, 0x5d, 0x54, 0x55, 0x16, 0x1c, 0xc0, + 0x88, 0x56, 0xdd, 0xb8, 0x2b, 0x07, 0x9c, 0x04, 0xb5, 0x31, 0x08, 0x03, 0x55, 0x2b, 0x24, 0x8b, + 0x16, 0x44, 0x23, 0x25, 0x38, 0xc2, 0x4c, 0x0e, 0x34, 0xd2, 0x6c, 0xe6, 0x0c, 0x73, 0x82, 0x39, + 0xca, 0x2c, 0xe6, 0x0c, 0x23, 0xf0, 0x24, 0x4a, 0x16, 0x33, 0x2b, 0x9e, 0x9e, 0x7e, 0xef, 0xe1, + 0xef, 0xfb, 0xc0, 0xb7, 0x5a, 0xca, 0x7a, 0x53, 0x39, 0x45, 0xa9, 0x1c, 0x2d, 0x7b, 0xb5, 0x77, + 0x9d, 0xaa, 0x6d, 0x65, 0xab, 0x1c, 0xf5, 0xbf, 0x68, 0xab, 0x52, 0xa8, 0xaa, 0x13, 0x83, 0x83, + 0x76, 0xad, 0xec, 0x24, 0x9c, 0x6a, 0x16, 0x15, 0xa5, 0x42, 0xc7, 0x18, 0xda, 0xbb, 0x48, 0xc7, + 0x3e, 0x7d, 0x3e, 0xd4, 0xee, 0xd6, 0x4e, 0xd1, 0x34, 0xb2, 0x2b, 0xba, 0xb5, 0x6c, 0x94, 0x4e, + 0xdb, 0x37, 0x06, 0x80, 0x7c, 0x28, 0xe6, 0x55, 0x47, 0xfb, 0x04, 0x6d, 0x2e, 0xb6, 0xf6, 0x95, + 0x01, 0x26, 0xe7, 0x36, 0x7c, 0x0b, 0xc6, 0x29, 0xe3, 0x11, 0xf5, 0x82, 0x79, 0x40, 0x7d, 0xeb, + 0x05, 0x1c, 0x83, 0x37, 0x29, 0xfb, 0xc1, 0xc2, 0x5f, 0xcc, 0x32, 0xe0, 0x17, 0x80, 0xbc, 0x94, + 0x27, 0xe1, 0x8a, 0xc6, 0xc2, 0x23, 0x8c, 0x85, 0x89, 0xf0, 0x62, 0x4a, 0x12, 0x2a, 0xf8, 0x92, + 0xc4, 0xd4, 0x17, 0x9c, 0x26, 0x22, 0x9c, 0x8b, 0x64, 0x19, 0x70, 0x91, 0xfc, 0x8e, 0xa8, 0x65, + 0x42, 0x08, 0x26, 0x7e, 0x1a, 0xfd, 0x0c, 0xbc, 0x1e, 0x63, 0x64, 0x45, 0xad, 0x11, 0xfc, 0x08, + 0xe0, 0x49, 0x20, 0xa6, 0xab, 0x30, 0xa3, 0xbe, 0xf5, 0x12, 0x7e, 0x00, 0xef, 0x4e, 0xfc, 0x80, + 0x89, 0x94, 0x53, 0xeb, 0xd5, 0xec, 0xde, 0x00, 0xf6, 0x3f, 0xb9, 0x45, 0xcf, 0xef, 0x60, 0xf6, + 0xfe, 0x7c, 0x96, 0xa8, 0x1f, 0x3d, 0x32, 0xfe, 0xf8, 0x8f, 0xb1, 0x5a, 0x6e, 0x8a, 0xa6, 0x46, + 0xb2, 0xad, 0x9d, 0xba, 0x6a, 0x86, 0xc5, 0x1c, 0x2e, 0xb0, 0x5b, 0xab, 0xa7, 0x0e, 0xf2, 0x5d, + 0x7f, 0x2e, 0xcd, 0xd1, 0x82, 0x90, 0x6b, 0x73, 0xba, 0xd0, 0x65, 0xa4, 0x54, 0x48, 0xcb, 0x5e, + 0x65, 0x2e, 0x1a, 0x7e, 0xa9, 0x6e, 0x0f, 0x40, 0x4e, 0x4a, 0x95, 0x1f, 0x81, 0x3c, 0x73, 0x73, + 0x0d, 0xdc, 0x99, 0xb6, 0x76, 0x31, 0x26, 0xa5, 0xc2, 0xf8, 0x88, 0x60, 0x9c, 0xb9, 0x18, 0x6b, + 0xe8, 0xef, 0xeb, 0xe1, 0x75, 0x5f, 0x1f, 0x02, 0x00, 0x00, 0xff, 0xff, 0x19, 0xb4, 0xb0, 0x71, + 0x2d, 0x02, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/errors/size_limit_error.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/errors/size_limit_error.pb.go new file mode 100644 index 0000000..f216fe5 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/errors/size_limit_error.pb.go @@ -0,0 +1,119 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/errors/size_limit_error.proto + +package errors // import "google.golang.org/genproto/googleapis/ads/googleads/v1/errors" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Enum describing possible size limit errors. +type SizeLimitErrorEnum_SizeLimitError int32 + +const ( + // Enum unspecified. + SizeLimitErrorEnum_UNSPECIFIED SizeLimitErrorEnum_SizeLimitError = 0 + // The received error code is not known in this version. + SizeLimitErrorEnum_UNKNOWN SizeLimitErrorEnum_SizeLimitError = 1 + // The number of entries in the request exceeds the system limit. + SizeLimitErrorEnum_REQUEST_SIZE_LIMIT_EXCEEDED SizeLimitErrorEnum_SizeLimitError = 2 + // The number of entries in the response exceeds the system limit. + SizeLimitErrorEnum_RESPONSE_SIZE_LIMIT_EXCEEDED SizeLimitErrorEnum_SizeLimitError = 3 +) + +var SizeLimitErrorEnum_SizeLimitError_name = map[int32]string{ + 0: "UNSPECIFIED", + 1: "UNKNOWN", + 2: "REQUEST_SIZE_LIMIT_EXCEEDED", + 3: "RESPONSE_SIZE_LIMIT_EXCEEDED", +} +var SizeLimitErrorEnum_SizeLimitError_value = map[string]int32{ + "UNSPECIFIED": 0, + "UNKNOWN": 1, + "REQUEST_SIZE_LIMIT_EXCEEDED": 2, + "RESPONSE_SIZE_LIMIT_EXCEEDED": 3, +} + +func (x SizeLimitErrorEnum_SizeLimitError) String() string { + return proto.EnumName(SizeLimitErrorEnum_SizeLimitError_name, int32(x)) +} +func (SizeLimitErrorEnum_SizeLimitError) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_size_limit_error_92d39604b305e0fb, []int{0, 0} +} + +// Container for enum describing possible size limit errors. +type SizeLimitErrorEnum struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *SizeLimitErrorEnum) Reset() { *m = SizeLimitErrorEnum{} } +func (m *SizeLimitErrorEnum) String() string { return proto.CompactTextString(m) } +func (*SizeLimitErrorEnum) ProtoMessage() {} +func (*SizeLimitErrorEnum) Descriptor() ([]byte, []int) { + return fileDescriptor_size_limit_error_92d39604b305e0fb, []int{0} +} +func (m *SizeLimitErrorEnum) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_SizeLimitErrorEnum.Unmarshal(m, b) +} +func (m *SizeLimitErrorEnum) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_SizeLimitErrorEnum.Marshal(b, m, deterministic) +} +func (dst *SizeLimitErrorEnum) XXX_Merge(src proto.Message) { + xxx_messageInfo_SizeLimitErrorEnum.Merge(dst, src) +} +func (m *SizeLimitErrorEnum) XXX_Size() int { + return xxx_messageInfo_SizeLimitErrorEnum.Size(m) +} +func (m *SizeLimitErrorEnum) XXX_DiscardUnknown() { + xxx_messageInfo_SizeLimitErrorEnum.DiscardUnknown(m) +} + +var xxx_messageInfo_SizeLimitErrorEnum proto.InternalMessageInfo + +func init() { + proto.RegisterType((*SizeLimitErrorEnum)(nil), "google.ads.googleads.v1.errors.SizeLimitErrorEnum") + proto.RegisterEnum("google.ads.googleads.v1.errors.SizeLimitErrorEnum_SizeLimitError", SizeLimitErrorEnum_SizeLimitError_name, SizeLimitErrorEnum_SizeLimitError_value) +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/errors/size_limit_error.proto", fileDescriptor_size_limit_error_92d39604b305e0fb) +} + +var fileDescriptor_size_limit_error_92d39604b305e0fb = []byte{ + // 327 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x7c, 0x90, 0x41, 0x4a, 0xc3, 0x40, + 0x14, 0x86, 0x4d, 0x0a, 0x0a, 0x53, 0xd0, 0x12, 0x77, 0x5a, 0xaa, 0xe4, 0x00, 0x13, 0x82, 0xb8, + 0x19, 0x57, 0x69, 0xf3, 0x2c, 0xc1, 0x9a, 0xc6, 0x4e, 0x5b, 0xa5, 0x04, 0x42, 0x34, 0x61, 0x18, + 0x68, 0x67, 0x6a, 0x26, 0x76, 0xd1, 0x0b, 0x78, 0x0f, 0x97, 0x1e, 0xc5, 0xa3, 0xb8, 0xf0, 0x0c, + 0x92, 0x8c, 0x2d, 0x14, 0xd4, 0xd5, 0xfc, 0x3c, 0xbe, 0xff, 0x9f, 0xf7, 0x3f, 0x74, 0xc9, 0xa4, + 0x64, 0xf3, 0xdc, 0x49, 0x33, 0xe5, 0x68, 0x59, 0xa9, 0x95, 0xeb, 0xe4, 0x45, 0x21, 0x0b, 0xe5, + 0x28, 0xbe, 0xce, 0x93, 0x39, 0x5f, 0xf0, 0x32, 0xa9, 0x27, 0x78, 0x59, 0xc8, 0x52, 0x5a, 0x1d, + 0xcd, 0xe2, 0x34, 0x53, 0x78, 0x6b, 0xc3, 0x2b, 0x17, 0x6b, 0xdb, 0x49, 0x7b, 0x13, 0xbb, 0xe4, + 0x4e, 0x2a, 0x84, 0x2c, 0xd3, 0x92, 0x4b, 0xa1, 0xb4, 0xdb, 0x7e, 0x35, 0x90, 0x45, 0xf9, 0x3a, + 0x1f, 0x54, 0xb9, 0x50, 0x39, 0x40, 0xbc, 0x2c, 0xec, 0x67, 0x74, 0xb8, 0x3b, 0xb5, 0x8e, 0x50, + 0x73, 0x12, 0xd2, 0x08, 0x7a, 0xc1, 0x75, 0x00, 0x7e, 0x6b, 0xcf, 0x6a, 0xa2, 0x83, 0x49, 0x78, + 0x13, 0x0e, 0xef, 0xc3, 0x96, 0x61, 0x9d, 0xa1, 0xd3, 0x11, 0xdc, 0x4d, 0x80, 0x8e, 0x13, 0x1a, + 0xcc, 0x20, 0x19, 0x04, 0xb7, 0xc1, 0x38, 0x81, 0x87, 0x1e, 0x80, 0x0f, 0x7e, 0xcb, 0xb4, 0xce, + 0x51, 0x7b, 0x04, 0x34, 0x1a, 0x86, 0x14, 0x7e, 0x25, 0x1a, 0xdd, 0x2f, 0x03, 0xd9, 0x4f, 0x72, + 0x81, 0xff, 0xaf, 0xd3, 0x3d, 0xde, 0xdd, 0x2b, 0xaa, 0x5a, 0x44, 0xc6, 0xcc, 0xff, 0xb1, 0x31, + 0x39, 0x4f, 0x05, 0xc3, 0xb2, 0x60, 0x0e, 0xcb, 0x45, 0xdd, 0x71, 0x73, 0xcc, 0x25, 0x57, 0x7f, + 0xdd, 0xf6, 0x4a, 0x3f, 0x6f, 0x66, 0xa3, 0xef, 0x79, 0xef, 0x66, 0xa7, 0xaf, 0xc3, 0xbc, 0x4c, + 0x61, 0x2d, 0x2b, 0x35, 0x75, 0x71, 0xfd, 0xa5, 0xfa, 0xd8, 0x00, 0xb1, 0x97, 0xa9, 0x78, 0x0b, + 0xc4, 0x53, 0x37, 0xd6, 0xc0, 0xa7, 0x69, 0xeb, 0x29, 0x21, 0x5e, 0xa6, 0x08, 0xd9, 0x22, 0x84, + 0x4c, 0x5d, 0x42, 0x34, 0xf4, 0xb8, 0x5f, 0x6f, 0x77, 0xf1, 0x1d, 0x00, 0x00, 0xff, 0xff, 0x16, + 0xd6, 0x32, 0xa2, 0xf8, 0x01, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/errors/string_format_error.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/errors/string_format_error.pb.go new file mode 100644 index 0000000..279c72e --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/errors/string_format_error.pb.go @@ -0,0 +1,118 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/errors/string_format_error.proto + +package errors // import "google.golang.org/genproto/googleapis/ads/googleads/v1/errors" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Enum describing possible string format errors. +type StringFormatErrorEnum_StringFormatError int32 + +const ( + // Enum unspecified. + StringFormatErrorEnum_UNSPECIFIED StringFormatErrorEnum_StringFormatError = 0 + // The received error code is not known in this version. + StringFormatErrorEnum_UNKNOWN StringFormatErrorEnum_StringFormatError = 1 + // The input string value contains disallowed characters. + StringFormatErrorEnum_ILLEGAL_CHARS StringFormatErrorEnum_StringFormatError = 2 + // The input string value is invalid for the associated field. + StringFormatErrorEnum_INVALID_FORMAT StringFormatErrorEnum_StringFormatError = 3 +) + +var StringFormatErrorEnum_StringFormatError_name = map[int32]string{ + 0: "UNSPECIFIED", + 1: "UNKNOWN", + 2: "ILLEGAL_CHARS", + 3: "INVALID_FORMAT", +} +var StringFormatErrorEnum_StringFormatError_value = map[string]int32{ + "UNSPECIFIED": 0, + "UNKNOWN": 1, + "ILLEGAL_CHARS": 2, + "INVALID_FORMAT": 3, +} + +func (x StringFormatErrorEnum_StringFormatError) String() string { + return proto.EnumName(StringFormatErrorEnum_StringFormatError_name, int32(x)) +} +func (StringFormatErrorEnum_StringFormatError) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_string_format_error_f7006cd68d0ebbf8, []int{0, 0} +} + +// Container for enum describing possible string format errors. +type StringFormatErrorEnum struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *StringFormatErrorEnum) Reset() { *m = StringFormatErrorEnum{} } +func (m *StringFormatErrorEnum) String() string { return proto.CompactTextString(m) } +func (*StringFormatErrorEnum) ProtoMessage() {} +func (*StringFormatErrorEnum) Descriptor() ([]byte, []int) { + return fileDescriptor_string_format_error_f7006cd68d0ebbf8, []int{0} +} +func (m *StringFormatErrorEnum) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_StringFormatErrorEnum.Unmarshal(m, b) +} +func (m *StringFormatErrorEnum) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_StringFormatErrorEnum.Marshal(b, m, deterministic) +} +func (dst *StringFormatErrorEnum) XXX_Merge(src proto.Message) { + xxx_messageInfo_StringFormatErrorEnum.Merge(dst, src) +} +func (m *StringFormatErrorEnum) XXX_Size() int { + return xxx_messageInfo_StringFormatErrorEnum.Size(m) +} +func (m *StringFormatErrorEnum) XXX_DiscardUnknown() { + xxx_messageInfo_StringFormatErrorEnum.DiscardUnknown(m) +} + +var xxx_messageInfo_StringFormatErrorEnum proto.InternalMessageInfo + +func init() { + proto.RegisterType((*StringFormatErrorEnum)(nil), "google.ads.googleads.v1.errors.StringFormatErrorEnum") + proto.RegisterEnum("google.ads.googleads.v1.errors.StringFormatErrorEnum_StringFormatError", StringFormatErrorEnum_StringFormatError_name, StringFormatErrorEnum_StringFormatError_value) +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/errors/string_format_error.proto", fileDescriptor_string_format_error_f7006cd68d0ebbf8) +} + +var fileDescriptor_string_format_error_f7006cd68d0ebbf8 = []byte{ + // 318 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x7c, 0x90, 0xc1, 0x4a, 0xc3, 0x30, + 0x1c, 0xc6, 0x5d, 0x07, 0x0a, 0x19, 0x6a, 0x17, 0xd0, 0x83, 0xc8, 0x0e, 0x7d, 0x80, 0x84, 0xe2, + 0x45, 0xe2, 0x29, 0xdb, 0xba, 0x59, 0xac, 0xdd, 0xd8, 0x5c, 0x15, 0x29, 0x94, 0x68, 0x67, 0x28, + 0x6c, 0xc9, 0x4c, 0xea, 0x1e, 0xc8, 0xa3, 0x8f, 0xe2, 0xa3, 0x78, 0xf3, 0x0d, 0xa4, 0x89, 0xed, + 0x65, 0xe8, 0xa9, 0x1f, 0xff, 0xfe, 0xbe, 0xef, 0xff, 0xe5, 0x0f, 0x2e, 0xb9, 0x94, 0x7c, 0xb5, + 0xc4, 0x2c, 0xd7, 0xd8, 0xca, 0x4a, 0x6d, 0x7d, 0xbc, 0x54, 0x4a, 0x2a, 0x8d, 0x75, 0xa9, 0x0a, + 0xc1, 0xb3, 0x17, 0xa9, 0xd6, 0xac, 0xcc, 0xcc, 0x10, 0x6d, 0x94, 0x2c, 0x25, 0xec, 0x59, 0x1c, + 0xb1, 0x5c, 0xa3, 0xc6, 0x89, 0xb6, 0x3e, 0xb2, 0xce, 0xb3, 0xf3, 0x3a, 0x79, 0x53, 0x60, 0x26, + 0x84, 0x2c, 0x59, 0x59, 0x48, 0xa1, 0xad, 0xdb, 0x7b, 0x05, 0x27, 0x73, 0x13, 0x3d, 0x32, 0xc9, + 0x41, 0xe5, 0x09, 0xc4, 0xdb, 0xda, 0x7b, 0x00, 0xdd, 0x9d, 0x1f, 0xf0, 0x18, 0x74, 0x16, 0xf1, + 0x7c, 0x1a, 0x0c, 0xc2, 0x51, 0x18, 0x0c, 0xdd, 0x3d, 0xd8, 0x01, 0x07, 0x8b, 0xf8, 0x26, 0x9e, + 0xdc, 0xc7, 0x6e, 0x0b, 0x76, 0xc1, 0x61, 0x18, 0x45, 0xc1, 0x98, 0x46, 0xd9, 0xe0, 0x9a, 0xce, + 0xe6, 0xae, 0x03, 0x21, 0x38, 0x0a, 0xe3, 0x84, 0x46, 0xe1, 0x30, 0x1b, 0x4d, 0x66, 0xb7, 0xf4, + 0xce, 0x6d, 0xf7, 0xbf, 0x5b, 0xc0, 0x7b, 0x96, 0x6b, 0xf4, 0x7f, 0xef, 0xfe, 0xe9, 0xce, 0xfa, + 0x69, 0xd5, 0x78, 0xda, 0x7a, 0x1c, 0xfe, 0x3a, 0xb9, 0x5c, 0x31, 0xc1, 0x91, 0x54, 0x1c, 0xf3, + 0xa5, 0x30, 0xef, 0xa9, 0x6f, 0xb7, 0x29, 0xf4, 0x5f, 0xa7, 0xbc, 0xb2, 0x9f, 0x77, 0xa7, 0x3d, + 0xa6, 0xf4, 0xc3, 0xe9, 0x8d, 0x6d, 0x18, 0xcd, 0x35, 0xb2, 0xb2, 0x52, 0x89, 0x8f, 0xcc, 0x4a, + 0xfd, 0x59, 0x03, 0x29, 0xcd, 0x75, 0xda, 0x00, 0x69, 0xe2, 0xa7, 0x16, 0xf8, 0x72, 0x3c, 0x3b, + 0x25, 0x84, 0xe6, 0x9a, 0x90, 0x06, 0x21, 0x24, 0xf1, 0x09, 0xb1, 0xd0, 0xd3, 0xbe, 0x69, 0x77, + 0xf1, 0x13, 0x00, 0x00, 0xff, 0xff, 0xb4, 0x40, 0x90, 0x0d, 0xe7, 0x01, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/errors/string_length_error.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/errors/string_length_error.pb.go new file mode 100644 index 0000000..b54d63d --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/errors/string_length_error.pb.go @@ -0,0 +1,118 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/errors/string_length_error.proto + +package errors // import "google.golang.org/genproto/googleapis/ads/googleads/v1/errors" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Enum describing possible string length errors. +type StringLengthErrorEnum_StringLengthError int32 + +const ( + // Enum unspecified. + StringLengthErrorEnum_UNSPECIFIED StringLengthErrorEnum_StringLengthError = 0 + // The received error code is not known in this version. + StringLengthErrorEnum_UNKNOWN StringLengthErrorEnum_StringLengthError = 1 + // Too short. + StringLengthErrorEnum_TOO_SHORT StringLengthErrorEnum_StringLengthError = 2 + // Too long. + StringLengthErrorEnum_TOO_LONG StringLengthErrorEnum_StringLengthError = 3 +) + +var StringLengthErrorEnum_StringLengthError_name = map[int32]string{ + 0: "UNSPECIFIED", + 1: "UNKNOWN", + 2: "TOO_SHORT", + 3: "TOO_LONG", +} +var StringLengthErrorEnum_StringLengthError_value = map[string]int32{ + "UNSPECIFIED": 0, + "UNKNOWN": 1, + "TOO_SHORT": 2, + "TOO_LONG": 3, +} + +func (x StringLengthErrorEnum_StringLengthError) String() string { + return proto.EnumName(StringLengthErrorEnum_StringLengthError_name, int32(x)) +} +func (StringLengthErrorEnum_StringLengthError) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_string_length_error_fcfdf078aefc1641, []int{0, 0} +} + +// Container for enum describing possible string length errors. +type StringLengthErrorEnum struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *StringLengthErrorEnum) Reset() { *m = StringLengthErrorEnum{} } +func (m *StringLengthErrorEnum) String() string { return proto.CompactTextString(m) } +func (*StringLengthErrorEnum) ProtoMessage() {} +func (*StringLengthErrorEnum) Descriptor() ([]byte, []int) { + return fileDescriptor_string_length_error_fcfdf078aefc1641, []int{0} +} +func (m *StringLengthErrorEnum) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_StringLengthErrorEnum.Unmarshal(m, b) +} +func (m *StringLengthErrorEnum) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_StringLengthErrorEnum.Marshal(b, m, deterministic) +} +func (dst *StringLengthErrorEnum) XXX_Merge(src proto.Message) { + xxx_messageInfo_StringLengthErrorEnum.Merge(dst, src) +} +func (m *StringLengthErrorEnum) XXX_Size() int { + return xxx_messageInfo_StringLengthErrorEnum.Size(m) +} +func (m *StringLengthErrorEnum) XXX_DiscardUnknown() { + xxx_messageInfo_StringLengthErrorEnum.DiscardUnknown(m) +} + +var xxx_messageInfo_StringLengthErrorEnum proto.InternalMessageInfo + +func init() { + proto.RegisterType((*StringLengthErrorEnum)(nil), "google.ads.googleads.v1.errors.StringLengthErrorEnum") + proto.RegisterEnum("google.ads.googleads.v1.errors.StringLengthErrorEnum_StringLengthError", StringLengthErrorEnum_StringLengthError_name, StringLengthErrorEnum_StringLengthError_value) +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/errors/string_length_error.proto", fileDescriptor_string_length_error_fcfdf078aefc1641) +} + +var fileDescriptor_string_length_error_fcfdf078aefc1641 = []byte{ + // 309 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x7c, 0x90, 0x4f, 0x4e, 0x84, 0x30, + 0x14, 0xc6, 0x85, 0x49, 0xfc, 0xd3, 0xd1, 0x88, 0x24, 0xba, 0x30, 0x66, 0x16, 0x1c, 0xa0, 0x0d, + 0x71, 0x63, 0xea, 0x8a, 0x71, 0x10, 0x27, 0x4e, 0x0a, 0x91, 0x19, 0x4c, 0x0c, 0x09, 0x41, 0x21, + 0x95, 0x84, 0x69, 0x49, 0x8b, 0x73, 0x20, 0x97, 0x1e, 0xc5, 0xa3, 0xb8, 0xf3, 0x06, 0x86, 0x56, + 0xd8, 0x4c, 0x74, 0xd5, 0xaf, 0xaf, 0xbf, 0xef, 0x7b, 0xaf, 0x0f, 0x5c, 0x51, 0xce, 0x69, 0x5d, + 0xa2, 0xbc, 0x90, 0x48, 0xcb, 0x4e, 0x6d, 0x5c, 0x54, 0x0a, 0xc1, 0x85, 0x44, 0xb2, 0x15, 0x15, + 0xa3, 0x59, 0x5d, 0x32, 0xda, 0xbe, 0x66, 0xaa, 0x08, 0x1b, 0xc1, 0x5b, 0x6e, 0x4f, 0x34, 0x0e, + 0xf3, 0x42, 0xc2, 0xc1, 0x09, 0x37, 0x2e, 0xd4, 0xce, 0xf3, 0x8b, 0x3e, 0xb9, 0xa9, 0x50, 0xce, + 0x18, 0x6f, 0xf3, 0xb6, 0xe2, 0x4c, 0x6a, 0xb7, 0x43, 0xc1, 0x69, 0xac, 0xa2, 0x17, 0x2a, 0xd9, + 0xef, 0x3c, 0x3e, 0x7b, 0x5b, 0x3b, 0x04, 0x9c, 0x6c, 0x3d, 0xd8, 0xc7, 0x60, 0xbc, 0x22, 0x71, + 0xe4, 0xdf, 0xcc, 0x6f, 0xe7, 0xfe, 0xcc, 0xda, 0xb1, 0xc7, 0x60, 0x6f, 0x45, 0xee, 0x49, 0xf8, + 0x48, 0x2c, 0xc3, 0x3e, 0x02, 0x07, 0xcb, 0x30, 0xcc, 0xe2, 0xbb, 0xf0, 0x61, 0x69, 0x99, 0xf6, + 0x21, 0xd8, 0xef, 0xae, 0x8b, 0x90, 0x04, 0xd6, 0x68, 0xfa, 0x6d, 0x00, 0xe7, 0x85, 0xaf, 0xe1, + 0xff, 0xd3, 0x4e, 0xcf, 0xb6, 0x9a, 0x46, 0xdd, 0x9c, 0x91, 0xf1, 0x34, 0xfb, 0x75, 0x52, 0x5e, + 0xe7, 0x8c, 0x42, 0x2e, 0x28, 0xa2, 0x25, 0x53, 0xbf, 0xe8, 0x37, 0xd6, 0x54, 0xf2, 0xaf, 0x05, + 0x5e, 0xeb, 0xe3, 0xdd, 0x1c, 0x05, 0x9e, 0xf7, 0x61, 0x4e, 0x02, 0x1d, 0xe6, 0x15, 0x12, 0x6a, + 0xd9, 0xa9, 0xc4, 0x85, 0xaa, 0xa5, 0xfc, 0xec, 0x81, 0xd4, 0x2b, 0x64, 0x3a, 0x00, 0x69, 0xe2, + 0xa6, 0x1a, 0xf8, 0x32, 0x1d, 0x5d, 0xc5, 0xd8, 0x2b, 0x24, 0xc6, 0x03, 0x82, 0x71, 0xe2, 0x62, + 0xac, 0xa1, 0xe7, 0x5d, 0x35, 0xdd, 0xe5, 0x4f, 0x00, 0x00, 0x00, 0xff, 0xff, 0x64, 0x6d, 0x29, + 0x4e, 0xdd, 0x01, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/errors/url_field_error.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/errors/url_field_error.pb.go new file mode 100644 index 0000000..18b9ca8 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/errors/url_field_error.pb.go @@ -0,0 +1,363 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/errors/url_field_error.proto + +package errors // import "google.golang.org/genproto/googleapis/ads/googleads/v1/errors" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Enum describing possible url field errors. +type UrlFieldErrorEnum_UrlFieldError int32 + +const ( + // Enum unspecified. + UrlFieldErrorEnum_UNSPECIFIED UrlFieldErrorEnum_UrlFieldError = 0 + // The received error code is not known in this version. + UrlFieldErrorEnum_UNKNOWN UrlFieldErrorEnum_UrlFieldError = 1 + // The tracking url template is invalid. + UrlFieldErrorEnum_INVALID_TRACKING_URL_TEMPLATE UrlFieldErrorEnum_UrlFieldError = 2 + // The tracking url template contains invalid tag. + UrlFieldErrorEnum_INVALID_TAG_IN_TRACKING_URL_TEMPLATE UrlFieldErrorEnum_UrlFieldError = 3 + // The tracking url template must contain at least one tag (e.g. {lpurl}), + // This applies only to tracking url template associated with website ads or + // product ads. + UrlFieldErrorEnum_MISSING_TRACKING_URL_TEMPLATE_TAG UrlFieldErrorEnum_UrlFieldError = 4 + // The tracking url template must start with a valid protocol (or lpurl + // tag). + UrlFieldErrorEnum_MISSING_PROTOCOL_IN_TRACKING_URL_TEMPLATE UrlFieldErrorEnum_UrlFieldError = 5 + // The tracking url template starts with an invalid protocol. + UrlFieldErrorEnum_INVALID_PROTOCOL_IN_TRACKING_URL_TEMPLATE UrlFieldErrorEnum_UrlFieldError = 6 + // The tracking url template contains illegal characters. + UrlFieldErrorEnum_MALFORMED_TRACKING_URL_TEMPLATE UrlFieldErrorEnum_UrlFieldError = 7 + // The tracking url template must contain a host name (or lpurl tag). + UrlFieldErrorEnum_MISSING_HOST_IN_TRACKING_URL_TEMPLATE UrlFieldErrorEnum_UrlFieldError = 8 + // The tracking url template has an invalid or missing top level domain + // extension. + UrlFieldErrorEnum_INVALID_TLD_IN_TRACKING_URL_TEMPLATE UrlFieldErrorEnum_UrlFieldError = 9 + // The tracking url template contains nested occurrences of the same + // conditional tag (i.e. {ifmobile:{ifmobile:x}}). + UrlFieldErrorEnum_REDUNDANT_NESTED_TRACKING_URL_TEMPLATE_TAG UrlFieldErrorEnum_UrlFieldError = 10 + // The final url is invalid. + UrlFieldErrorEnum_INVALID_FINAL_URL UrlFieldErrorEnum_UrlFieldError = 11 + // The final url contains invalid tag. + UrlFieldErrorEnum_INVALID_TAG_IN_FINAL_URL UrlFieldErrorEnum_UrlFieldError = 12 + // The final url contains nested occurrences of the same conditional tag + // (i.e. {ifmobile:{ifmobile:x}}). + UrlFieldErrorEnum_REDUNDANT_NESTED_FINAL_URL_TAG UrlFieldErrorEnum_UrlFieldError = 13 + // The final url must start with a valid protocol. + UrlFieldErrorEnum_MISSING_PROTOCOL_IN_FINAL_URL UrlFieldErrorEnum_UrlFieldError = 14 + // The final url starts with an invalid protocol. + UrlFieldErrorEnum_INVALID_PROTOCOL_IN_FINAL_URL UrlFieldErrorEnum_UrlFieldError = 15 + // The final url contains illegal characters. + UrlFieldErrorEnum_MALFORMED_FINAL_URL UrlFieldErrorEnum_UrlFieldError = 16 + // The final url must contain a host name. + UrlFieldErrorEnum_MISSING_HOST_IN_FINAL_URL UrlFieldErrorEnum_UrlFieldError = 17 + // The tracking url template has an invalid or missing top level domain + // extension. + UrlFieldErrorEnum_INVALID_TLD_IN_FINAL_URL UrlFieldErrorEnum_UrlFieldError = 18 + // The final mobile url is invalid. + UrlFieldErrorEnum_INVALID_FINAL_MOBILE_URL UrlFieldErrorEnum_UrlFieldError = 19 + // The final mobile url contains invalid tag. + UrlFieldErrorEnum_INVALID_TAG_IN_FINAL_MOBILE_URL UrlFieldErrorEnum_UrlFieldError = 20 + // The final mobile url contains nested occurrences of the same conditional + // tag (i.e. {ifmobile:{ifmobile:x}}). + UrlFieldErrorEnum_REDUNDANT_NESTED_FINAL_MOBILE_URL_TAG UrlFieldErrorEnum_UrlFieldError = 21 + // The final mobile url must start with a valid protocol. + UrlFieldErrorEnum_MISSING_PROTOCOL_IN_FINAL_MOBILE_URL UrlFieldErrorEnum_UrlFieldError = 22 + // The final mobile url starts with an invalid protocol. + UrlFieldErrorEnum_INVALID_PROTOCOL_IN_FINAL_MOBILE_URL UrlFieldErrorEnum_UrlFieldError = 23 + // The final mobile url contains illegal characters. + UrlFieldErrorEnum_MALFORMED_FINAL_MOBILE_URL UrlFieldErrorEnum_UrlFieldError = 24 + // The final mobile url must contain a host name. + UrlFieldErrorEnum_MISSING_HOST_IN_FINAL_MOBILE_URL UrlFieldErrorEnum_UrlFieldError = 25 + // The tracking url template has an invalid or missing top level domain + // extension. + UrlFieldErrorEnum_INVALID_TLD_IN_FINAL_MOBILE_URL UrlFieldErrorEnum_UrlFieldError = 26 + // The final app url is invalid. + UrlFieldErrorEnum_INVALID_FINAL_APP_URL UrlFieldErrorEnum_UrlFieldError = 27 + // The final app url contains invalid tag. + UrlFieldErrorEnum_INVALID_TAG_IN_FINAL_APP_URL UrlFieldErrorEnum_UrlFieldError = 28 + // The final app url contains nested occurrences of the same conditional tag + // (i.e. {ifmobile:{ifmobile:x}}). + UrlFieldErrorEnum_REDUNDANT_NESTED_FINAL_APP_URL_TAG UrlFieldErrorEnum_UrlFieldError = 29 + // More than one app url found for the same OS type. + UrlFieldErrorEnum_MULTIPLE_APP_URLS_FOR_OSTYPE UrlFieldErrorEnum_UrlFieldError = 30 + // The OS type given for an app url is not valid. + UrlFieldErrorEnum_INVALID_OSTYPE UrlFieldErrorEnum_UrlFieldError = 31 + // The protocol given for an app url is not valid. (E.g. "android-app://") + UrlFieldErrorEnum_INVALID_PROTOCOL_FOR_APP_URL UrlFieldErrorEnum_UrlFieldError = 32 + // The package id (app id) given for an app url is not valid. + UrlFieldErrorEnum_INVALID_PACKAGE_ID_FOR_APP_URL UrlFieldErrorEnum_UrlFieldError = 33 + // The number of url custom parameters for an resource exceeds the maximum + // limit allowed. + UrlFieldErrorEnum_URL_CUSTOM_PARAMETERS_COUNT_EXCEEDS_LIMIT UrlFieldErrorEnum_UrlFieldError = 34 + // An invalid character appears in the parameter key. + UrlFieldErrorEnum_INVALID_CHARACTERS_IN_URL_CUSTOM_PARAMETER_KEY UrlFieldErrorEnum_UrlFieldError = 39 + // An invalid character appears in the parameter value. + UrlFieldErrorEnum_INVALID_CHARACTERS_IN_URL_CUSTOM_PARAMETER_VALUE UrlFieldErrorEnum_UrlFieldError = 40 + // The url custom parameter value fails url tag validation. + UrlFieldErrorEnum_INVALID_TAG_IN_URL_CUSTOM_PARAMETER_VALUE UrlFieldErrorEnum_UrlFieldError = 41 + // The custom parameter contains nested occurrences of the same conditional + // tag (i.e. {ifmobile:{ifmobile:x}}). + UrlFieldErrorEnum_REDUNDANT_NESTED_URL_CUSTOM_PARAMETER_TAG UrlFieldErrorEnum_UrlFieldError = 42 + // The protocol (http:// or https://) is missing. + UrlFieldErrorEnum_MISSING_PROTOCOL UrlFieldErrorEnum_UrlFieldError = 43 + // Unsupported protocol in URL. Only http and https are supported. + UrlFieldErrorEnum_INVALID_PROTOCOL UrlFieldErrorEnum_UrlFieldError = 52 + // The url is invalid. + UrlFieldErrorEnum_INVALID_URL UrlFieldErrorEnum_UrlFieldError = 44 + // Destination Url is deprecated. + UrlFieldErrorEnum_DESTINATION_URL_DEPRECATED UrlFieldErrorEnum_UrlFieldError = 45 + // The url contains invalid tag. + UrlFieldErrorEnum_INVALID_TAG_IN_URL UrlFieldErrorEnum_UrlFieldError = 46 + // The url must contain at least one tag (e.g. {lpurl}), This applies only + // to urls associated with website ads or product ads. + UrlFieldErrorEnum_MISSING_URL_TAG UrlFieldErrorEnum_UrlFieldError = 47 + // Duplicate url id. + UrlFieldErrorEnum_DUPLICATE_URL_ID UrlFieldErrorEnum_UrlFieldError = 48 + // Invalid url id. + UrlFieldErrorEnum_INVALID_URL_ID UrlFieldErrorEnum_UrlFieldError = 49 + // The final url suffix cannot begin with '?' or '&' characters and must be + // a valid query string. + UrlFieldErrorEnum_FINAL_URL_SUFFIX_MALFORMED UrlFieldErrorEnum_UrlFieldError = 50 + // The final url suffix cannot contain {lpurl} related or {ignore} tags. + UrlFieldErrorEnum_INVALID_TAG_IN_FINAL_URL_SUFFIX UrlFieldErrorEnum_UrlFieldError = 51 + // The top level domain is invalid, e.g, not a public top level domain + // listed in publicsuffix.org. + UrlFieldErrorEnum_INVALID_TOP_LEVEL_DOMAIN UrlFieldErrorEnum_UrlFieldError = 53 + // Malformed top level domain in URL. + UrlFieldErrorEnum_MALFORMED_TOP_LEVEL_DOMAIN UrlFieldErrorEnum_UrlFieldError = 54 + // Malformed URL. + UrlFieldErrorEnum_MALFORMED_URL UrlFieldErrorEnum_UrlFieldError = 55 + // No host found in URL. + UrlFieldErrorEnum_MISSING_HOST UrlFieldErrorEnum_UrlFieldError = 56 +) + +var UrlFieldErrorEnum_UrlFieldError_name = map[int32]string{ + 0: "UNSPECIFIED", + 1: "UNKNOWN", + 2: "INVALID_TRACKING_URL_TEMPLATE", + 3: "INVALID_TAG_IN_TRACKING_URL_TEMPLATE", + 4: "MISSING_TRACKING_URL_TEMPLATE_TAG", + 5: "MISSING_PROTOCOL_IN_TRACKING_URL_TEMPLATE", + 6: "INVALID_PROTOCOL_IN_TRACKING_URL_TEMPLATE", + 7: "MALFORMED_TRACKING_URL_TEMPLATE", + 8: "MISSING_HOST_IN_TRACKING_URL_TEMPLATE", + 9: "INVALID_TLD_IN_TRACKING_URL_TEMPLATE", + 10: "REDUNDANT_NESTED_TRACKING_URL_TEMPLATE_TAG", + 11: "INVALID_FINAL_URL", + 12: "INVALID_TAG_IN_FINAL_URL", + 13: "REDUNDANT_NESTED_FINAL_URL_TAG", + 14: "MISSING_PROTOCOL_IN_FINAL_URL", + 15: "INVALID_PROTOCOL_IN_FINAL_URL", + 16: "MALFORMED_FINAL_URL", + 17: "MISSING_HOST_IN_FINAL_URL", + 18: "INVALID_TLD_IN_FINAL_URL", + 19: "INVALID_FINAL_MOBILE_URL", + 20: "INVALID_TAG_IN_FINAL_MOBILE_URL", + 21: "REDUNDANT_NESTED_FINAL_MOBILE_URL_TAG", + 22: "MISSING_PROTOCOL_IN_FINAL_MOBILE_URL", + 23: "INVALID_PROTOCOL_IN_FINAL_MOBILE_URL", + 24: "MALFORMED_FINAL_MOBILE_URL", + 25: "MISSING_HOST_IN_FINAL_MOBILE_URL", + 26: "INVALID_TLD_IN_FINAL_MOBILE_URL", + 27: "INVALID_FINAL_APP_URL", + 28: "INVALID_TAG_IN_FINAL_APP_URL", + 29: "REDUNDANT_NESTED_FINAL_APP_URL_TAG", + 30: "MULTIPLE_APP_URLS_FOR_OSTYPE", + 31: "INVALID_OSTYPE", + 32: "INVALID_PROTOCOL_FOR_APP_URL", + 33: "INVALID_PACKAGE_ID_FOR_APP_URL", + 34: "URL_CUSTOM_PARAMETERS_COUNT_EXCEEDS_LIMIT", + 39: "INVALID_CHARACTERS_IN_URL_CUSTOM_PARAMETER_KEY", + 40: "INVALID_CHARACTERS_IN_URL_CUSTOM_PARAMETER_VALUE", + 41: "INVALID_TAG_IN_URL_CUSTOM_PARAMETER_VALUE", + 42: "REDUNDANT_NESTED_URL_CUSTOM_PARAMETER_TAG", + 43: "MISSING_PROTOCOL", + 52: "INVALID_PROTOCOL", + 44: "INVALID_URL", + 45: "DESTINATION_URL_DEPRECATED", + 46: "INVALID_TAG_IN_URL", + 47: "MISSING_URL_TAG", + 48: "DUPLICATE_URL_ID", + 49: "INVALID_URL_ID", + 50: "FINAL_URL_SUFFIX_MALFORMED", + 51: "INVALID_TAG_IN_FINAL_URL_SUFFIX", + 53: "INVALID_TOP_LEVEL_DOMAIN", + 54: "MALFORMED_TOP_LEVEL_DOMAIN", + 55: "MALFORMED_URL", + 56: "MISSING_HOST", +} +var UrlFieldErrorEnum_UrlFieldError_value = map[string]int32{ + "UNSPECIFIED": 0, + "UNKNOWN": 1, + "INVALID_TRACKING_URL_TEMPLATE": 2, + "INVALID_TAG_IN_TRACKING_URL_TEMPLATE": 3, + "MISSING_TRACKING_URL_TEMPLATE_TAG": 4, + "MISSING_PROTOCOL_IN_TRACKING_URL_TEMPLATE": 5, + "INVALID_PROTOCOL_IN_TRACKING_URL_TEMPLATE": 6, + "MALFORMED_TRACKING_URL_TEMPLATE": 7, + "MISSING_HOST_IN_TRACKING_URL_TEMPLATE": 8, + "INVALID_TLD_IN_TRACKING_URL_TEMPLATE": 9, + "REDUNDANT_NESTED_TRACKING_URL_TEMPLATE_TAG": 10, + "INVALID_FINAL_URL": 11, + "INVALID_TAG_IN_FINAL_URL": 12, + "REDUNDANT_NESTED_FINAL_URL_TAG": 13, + "MISSING_PROTOCOL_IN_FINAL_URL": 14, + "INVALID_PROTOCOL_IN_FINAL_URL": 15, + "MALFORMED_FINAL_URL": 16, + "MISSING_HOST_IN_FINAL_URL": 17, + "INVALID_TLD_IN_FINAL_URL": 18, + "INVALID_FINAL_MOBILE_URL": 19, + "INVALID_TAG_IN_FINAL_MOBILE_URL": 20, + "REDUNDANT_NESTED_FINAL_MOBILE_URL_TAG": 21, + "MISSING_PROTOCOL_IN_FINAL_MOBILE_URL": 22, + "INVALID_PROTOCOL_IN_FINAL_MOBILE_URL": 23, + "MALFORMED_FINAL_MOBILE_URL": 24, + "MISSING_HOST_IN_FINAL_MOBILE_URL": 25, + "INVALID_TLD_IN_FINAL_MOBILE_URL": 26, + "INVALID_FINAL_APP_URL": 27, + "INVALID_TAG_IN_FINAL_APP_URL": 28, + "REDUNDANT_NESTED_FINAL_APP_URL_TAG": 29, + "MULTIPLE_APP_URLS_FOR_OSTYPE": 30, + "INVALID_OSTYPE": 31, + "INVALID_PROTOCOL_FOR_APP_URL": 32, + "INVALID_PACKAGE_ID_FOR_APP_URL": 33, + "URL_CUSTOM_PARAMETERS_COUNT_EXCEEDS_LIMIT": 34, + "INVALID_CHARACTERS_IN_URL_CUSTOM_PARAMETER_KEY": 39, + "INVALID_CHARACTERS_IN_URL_CUSTOM_PARAMETER_VALUE": 40, + "INVALID_TAG_IN_URL_CUSTOM_PARAMETER_VALUE": 41, + "REDUNDANT_NESTED_URL_CUSTOM_PARAMETER_TAG": 42, + "MISSING_PROTOCOL": 43, + "INVALID_PROTOCOL": 52, + "INVALID_URL": 44, + "DESTINATION_URL_DEPRECATED": 45, + "INVALID_TAG_IN_URL": 46, + "MISSING_URL_TAG": 47, + "DUPLICATE_URL_ID": 48, + "INVALID_URL_ID": 49, + "FINAL_URL_SUFFIX_MALFORMED": 50, + "INVALID_TAG_IN_FINAL_URL_SUFFIX": 51, + "INVALID_TOP_LEVEL_DOMAIN": 53, + "MALFORMED_TOP_LEVEL_DOMAIN": 54, + "MALFORMED_URL": 55, + "MISSING_HOST": 56, +} + +func (x UrlFieldErrorEnum_UrlFieldError) String() string { + return proto.EnumName(UrlFieldErrorEnum_UrlFieldError_name, int32(x)) +} +func (UrlFieldErrorEnum_UrlFieldError) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_url_field_error_eb3fbd09420326b6, []int{0, 0} +} + +// Container for enum describing possible url field errors. +type UrlFieldErrorEnum struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *UrlFieldErrorEnum) Reset() { *m = UrlFieldErrorEnum{} } +func (m *UrlFieldErrorEnum) String() string { return proto.CompactTextString(m) } +func (*UrlFieldErrorEnum) ProtoMessage() {} +func (*UrlFieldErrorEnum) Descriptor() ([]byte, []int) { + return fileDescriptor_url_field_error_eb3fbd09420326b6, []int{0} +} +func (m *UrlFieldErrorEnum) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_UrlFieldErrorEnum.Unmarshal(m, b) +} +func (m *UrlFieldErrorEnum) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_UrlFieldErrorEnum.Marshal(b, m, deterministic) +} +func (dst *UrlFieldErrorEnum) XXX_Merge(src proto.Message) { + xxx_messageInfo_UrlFieldErrorEnum.Merge(dst, src) +} +func (m *UrlFieldErrorEnum) XXX_Size() int { + return xxx_messageInfo_UrlFieldErrorEnum.Size(m) +} +func (m *UrlFieldErrorEnum) XXX_DiscardUnknown() { + xxx_messageInfo_UrlFieldErrorEnum.DiscardUnknown(m) +} + +var xxx_messageInfo_UrlFieldErrorEnum proto.InternalMessageInfo + +func init() { + proto.RegisterType((*UrlFieldErrorEnum)(nil), "google.ads.googleads.v1.errors.UrlFieldErrorEnum") + proto.RegisterEnum("google.ads.googleads.v1.errors.UrlFieldErrorEnum_UrlFieldError", UrlFieldErrorEnum_UrlFieldError_name, UrlFieldErrorEnum_UrlFieldError_value) +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/errors/url_field_error.proto", fileDescriptor_url_field_error_eb3fbd09420326b6) +} + +var fileDescriptor_url_field_error_eb3fbd09420326b6 = []byte{ + // 854 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x94, 0x95, 0xeb, 0x6e, 0xdc, 0x44, + 0x14, 0xc7, 0x49, 0x0a, 0x2d, 0x9c, 0x34, 0xc9, 0xe4, 0xa4, 0x69, 0x9b, 0x90, 0x6c, 0x93, 0xa5, + 0x85, 0xa6, 0x50, 0x6f, 0xb7, 0x0d, 0x17, 0x99, 0x4f, 0x13, 0x7b, 0x76, 0x3b, 0x8a, 0x6f, 0xb2, + 0xc7, 0x4b, 0x8b, 0x22, 0x8d, 0x16, 0x36, 0xac, 0x22, 0x6d, 0xd7, 0xd1, 0x3a, 0xcd, 0x9b, 0xf0, + 0x02, 0x7c, 0xe4, 0x51, 0x78, 0x14, 0x24, 0x9e, 0x01, 0x34, 0xf6, 0xfa, 0xb6, 0xb1, 0x83, 0xfa, + 0xc9, 0xa3, 0x73, 0x7e, 0xe7, 0x32, 0xff, 0xe3, 0x99, 0x81, 0xa3, 0x71, 0x14, 0x8d, 0x27, 0x67, + 0x9d, 0xe1, 0x28, 0xee, 0xa4, 0x4b, 0xb5, 0xba, 0xea, 0x76, 0xce, 0x66, 0xb3, 0x68, 0x16, 0x77, + 0xde, 0xcf, 0x26, 0xf2, 0xb7, 0xf3, 0xb3, 0xc9, 0x48, 0x26, 0x06, 0xed, 0x62, 0x16, 0x5d, 0x46, + 0xd8, 0x4a, 0x51, 0x6d, 0x38, 0x8a, 0xb5, 0x3c, 0x4a, 0xbb, 0xea, 0x6a, 0x69, 0xd4, 0xce, 0x6e, + 0x96, 0xf5, 0xe2, 0xbc, 0x33, 0x9c, 0x4e, 0xa3, 0xcb, 0xe1, 0xe5, 0x79, 0x34, 0x8d, 0xd3, 0xe8, + 0xf6, 0xef, 0x6b, 0xb0, 0x11, 0xce, 0x26, 0x3d, 0x95, 0x96, 0xa9, 0x00, 0x36, 0x7d, 0xff, 0xae, + 0xfd, 0xef, 0x2a, 0xac, 0x56, 0xac, 0xb8, 0x0e, 0x2b, 0xa1, 0x13, 0x78, 0xcc, 0xe0, 0x3d, 0xce, + 0x4c, 0xf2, 0x11, 0xae, 0xc0, 0x9d, 0xd0, 0x39, 0x71, 0xdc, 0x9f, 0x1c, 0xb2, 0x84, 0x07, 0xb0, + 0xc7, 0x9d, 0x01, 0xb5, 0xb8, 0x29, 0x85, 0x4f, 0x8d, 0x13, 0xee, 0xf4, 0x65, 0xe8, 0x5b, 0x52, + 0x30, 0xdb, 0xb3, 0xa8, 0x60, 0x64, 0x19, 0x9f, 0xc2, 0xe3, 0x1c, 0xa1, 0x7d, 0xc9, 0x9d, 0x06, + 0xf2, 0x16, 0x3e, 0x81, 0x03, 0x9b, 0x07, 0x81, 0xf2, 0xd4, 0x22, 0x2a, 0x9e, 0x7c, 0x8c, 0xcf, + 0xe1, 0x30, 0xc3, 0x3c, 0xdf, 0x15, 0xae, 0xe1, 0x5a, 0xcd, 0x59, 0x3f, 0x51, 0x78, 0x56, 0xff, + 0xff, 0xf1, 0xdb, 0xf8, 0x05, 0x3c, 0xb2, 0xa9, 0xd5, 0x73, 0x7d, 0x9b, 0x35, 0xed, 0xe9, 0x0e, + 0x1e, 0xc2, 0x93, 0xac, 0x85, 0xd7, 0x6e, 0x20, 0x9a, 0xf3, 0x7d, 0x5a, 0xd9, 0xbe, 0x65, 0x36, + 0x93, 0x9f, 0xa1, 0x06, 0xcf, 0x7c, 0x66, 0x86, 0x8e, 0x49, 0x1d, 0x21, 0x1d, 0x16, 0x88, 0xa6, + 0x06, 0x12, 0x1d, 0x00, 0xb7, 0x60, 0x23, 0xcb, 0xdc, 0xe3, 0x0e, 0xb5, 0x14, 0x43, 0x56, 0x70, + 0x17, 0x1e, 0x2e, 0xe8, 0x5d, 0x78, 0xef, 0x62, 0x1b, 0x5a, 0xd7, 0x8a, 0xe4, 0xfe, 0x24, 0xf1, + 0xaa, 0x1a, 0x6a, 0x9d, 0xc0, 0x45, 0x9a, 0xb5, 0xf2, 0xdc, 0xeb, 0x91, 0x75, 0x7c, 0x00, 0x9b, + 0x85, 0x90, 0x85, 0x83, 0xe0, 0x1e, 0x6c, 0x2f, 0x8a, 0x57, 0xb8, 0x37, 0x2a, 0xfd, 0xa7, 0x82, + 0x15, 0x5e, 0x2c, 0x7b, 0x53, 0xb3, 0xed, 0x1e, 0x73, 0x8b, 0x25, 0xde, 0x4d, 0x35, 0xbc, 0xda, + 0xbd, 0x97, 0xa0, 0x7b, 0x6a, 0x78, 0x0d, 0x12, 0x14, 0x58, 0xa2, 0xc4, 0x96, 0x1a, 0x5e, 0xb3, + 0x12, 0xa5, 0xa4, 0xf7, 0xcb, 0x63, 0xbe, 0x91, 0x7c, 0x80, 0x2d, 0xd8, 0x59, 0xd4, 0xa5, 0xe4, + 0x7f, 0x88, 0x8f, 0x61, 0xbf, 0x5e, 0x9e, 0x12, 0xb5, 0x5d, 0xd9, 0x69, 0x59, 0xa5, 0x12, 0xb4, + 0x83, 0xdb, 0xb0, 0x55, 0x15, 0x8b, 0x7a, 0x5e, 0xe2, 0xfa, 0x1c, 0xf7, 0x61, 0xb7, 0x56, 0xa9, + 0x8c, 0xd8, 0xc5, 0x2f, 0xa1, 0xdd, 0x20, 0xd3, 0x9c, 0x49, 0x34, 0xda, 0x53, 0x99, 0xec, 0xd0, + 0x12, 0xdc, 0xb3, 0x58, 0xe6, 0x09, 0x64, 0xcf, 0xf5, 0xa5, 0x1b, 0x88, 0xb7, 0x1e, 0x23, 0x2d, + 0x44, 0x58, 0xcb, 0x6a, 0xcd, 0x6d, 0x8f, 0xca, 0xf5, 0x73, 0xbd, 0x54, 0x50, 0x56, 0x7f, 0x5f, + 0xfd, 0xa9, 0x39, 0x41, 0x8d, 0x13, 0xda, 0x67, 0x52, 0xed, 0xa3, 0xc4, 0x1c, 0xa8, 0xb3, 0xad, + 0x1a, 0x31, 0xc2, 0x40, 0xb8, 0xb6, 0xf4, 0xa8, 0x4f, 0x6d, 0x26, 0x98, 0x1f, 0x48, 0xc3, 0x0d, + 0x1d, 0x21, 0xd9, 0x1b, 0x83, 0x31, 0x33, 0x90, 0x16, 0xb7, 0xb9, 0x20, 0x6d, 0x7c, 0x09, 0x5a, + 0x96, 0xd2, 0x78, 0x4d, 0x7d, 0x6a, 0x24, 0x2c, 0x77, 0x64, 0x5d, 0x12, 0x79, 0xc2, 0xde, 0x92, + 0xaf, 0xf0, 0x08, 0x5e, 0x7c, 0x40, 0xcc, 0x80, 0x5a, 0x21, 0x23, 0x4f, 0xcb, 0x97, 0xce, 0x5c, + 0xde, 0x1b, 0xf0, 0x43, 0x85, 0x5f, 0xd3, 0xba, 0x36, 0x40, 0x49, 0xfe, 0x0c, 0xef, 0x01, 0x59, + 0xfc, 0x2d, 0xc9, 0xd7, 0xca, 0xba, 0x28, 0x29, 0x39, 0x52, 0xf7, 0x77, 0x66, 0x55, 0x9a, 0x7d, + 0xa3, 0xfe, 0x3f, 0x93, 0x05, 0x82, 0x3b, 0x54, 0x70, 0x37, 0xed, 0xcb, 0x64, 0x9e, 0xcf, 0x0c, + 0x2a, 0x98, 0x49, 0x9e, 0xe3, 0x7d, 0xc0, 0xeb, 0xad, 0x13, 0x0d, 0x37, 0x61, 0x3d, 0x2b, 0x9a, + 0x0d, 0xbf, 0xa3, 0x6a, 0x9a, 0xa1, 0x67, 0x71, 0x15, 0x9c, 0x98, 0xb9, 0x49, 0x5e, 0x94, 0x07, + 0x3e, 0xb7, 0x75, 0x55, 0xd9, 0xe2, 0x9e, 0x09, 0xc2, 0x5e, 0x8f, 0xbf, 0x91, 0xf9, 0x39, 0x20, + 0x2f, 0x1b, 0x8f, 0x6e, 0x81, 0x93, 0x57, 0x95, 0xbb, 0xc1, 0xf5, 0xa4, 0xc5, 0x06, 0xcc, 0x92, + 0xa6, 0x6b, 0x53, 0xee, 0x90, 0x6f, 0xab, 0x27, 0xeb, 0x9a, 0xff, 0x3b, 0xdc, 0x80, 0xd5, 0xc2, + 0xaf, 0x36, 0xf5, 0x3d, 0x12, 0xb8, 0x5b, 0x3e, 0x6c, 0xe4, 0x87, 0xe3, 0x7f, 0x96, 0xa0, 0xfd, + 0x6b, 0xf4, 0x4e, 0xbb, 0xf9, 0x71, 0x3d, 0xc6, 0xca, 0x2b, 0xe9, 0xa9, 0x27, 0xd5, 0x5b, 0xfa, + 0xd9, 0x9c, 0x47, 0x8d, 0xa3, 0xc9, 0x70, 0x3a, 0xd6, 0xa2, 0xd9, 0xb8, 0x33, 0x3e, 0x9b, 0x26, + 0x0f, 0x6e, 0xf6, 0xb0, 0x5f, 0x9c, 0xc7, 0x4d, 0xef, 0xfc, 0x8f, 0xe9, 0xe7, 0x8f, 0xe5, 0x5b, + 0x7d, 0x4a, 0xff, 0x5c, 0x6e, 0xf5, 0xd3, 0x64, 0x74, 0x14, 0x6b, 0xe9, 0x52, 0xad, 0x06, 0x5d, + 0x2d, 0x29, 0x19, 0xff, 0x95, 0x01, 0xa7, 0x74, 0x14, 0x9f, 0xe6, 0xc0, 0xe9, 0xa0, 0x7b, 0x9a, + 0x02, 0x7f, 0x2f, 0xb7, 0x53, 0xab, 0xae, 0xd3, 0x51, 0xac, 0xeb, 0x39, 0xa2, 0xeb, 0x83, 0xae, + 0xae, 0xa7, 0xd0, 0x2f, 0xb7, 0x93, 0xee, 0x5e, 0xfd, 0x17, 0x00, 0x00, 0xff, 0xff, 0xc5, 0x66, + 0xaa, 0x3f, 0x84, 0x08, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/errors/user_list_error.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/errors/user_list_error.pb.go new file mode 100644 index 0000000..d23fcfa --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/errors/user_list_error.pb.go @@ -0,0 +1,248 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/errors/user_list_error.proto + +package errors // import "google.golang.org/genproto/googleapis/ads/googleads/v1/errors" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Enum describing possible user list errors. +type UserListErrorEnum_UserListError int32 + +const ( + // Enum unspecified. + UserListErrorEnum_UNSPECIFIED UserListErrorEnum_UserListError = 0 + // The received error code is not known in this version. + UserListErrorEnum_UNKNOWN UserListErrorEnum_UserListError = 1 + // Creating and updating external remarketing user lists is not supported. + UserListErrorEnum_EXTERNAL_REMARKETING_USER_LIST_MUTATE_NOT_SUPPORTED UserListErrorEnum_UserListError = 2 + // Concrete type of user list is required. + UserListErrorEnum_CONCRETE_TYPE_REQUIRED UserListErrorEnum_UserListError = 3 + // Creating/updating user list conversion types requires specifying the + // conversion type Id. + UserListErrorEnum_CONVERSION_TYPE_ID_REQUIRED UserListErrorEnum_UserListError = 4 + // Remarketing user list cannot have duplicate conversion types. + UserListErrorEnum_DUPLICATE_CONVERSION_TYPES UserListErrorEnum_UserListError = 5 + // Conversion type is invalid/unknown. + UserListErrorEnum_INVALID_CONVERSION_TYPE UserListErrorEnum_UserListError = 6 + // User list description is empty or invalid. + UserListErrorEnum_INVALID_DESCRIPTION UserListErrorEnum_UserListError = 7 + // User list name is empty or invalid. + UserListErrorEnum_INVALID_NAME UserListErrorEnum_UserListError = 8 + // Type of the UserList does not match. + UserListErrorEnum_INVALID_TYPE UserListErrorEnum_UserListError = 9 + // Embedded logical user lists are not allowed. + UserListErrorEnum_CAN_NOT_ADD_LOGICAL_LIST_AS_LOGICAL_LIST_OPERAND UserListErrorEnum_UserListError = 10 + // User list rule operand is invalid. + UserListErrorEnum_INVALID_USER_LIST_LOGICAL_RULE_OPERAND UserListErrorEnum_UserListError = 11 + // Name is already being used for another user list for the account. + UserListErrorEnum_NAME_ALREADY_USED UserListErrorEnum_UserListError = 12 + // Name is required when creating a new conversion type. + UserListErrorEnum_NEW_CONVERSION_TYPE_NAME_REQUIRED UserListErrorEnum_UserListError = 13 + // The given conversion type name has been used. + UserListErrorEnum_CONVERSION_TYPE_NAME_ALREADY_USED UserListErrorEnum_UserListError = 14 + // Only an owner account may edit a user list. + UserListErrorEnum_OWNERSHIP_REQUIRED_FOR_SET UserListErrorEnum_UserListError = 15 + // Creating user list without setting type in oneof user_list field, or + // creating/updating read-only user list types is not allowed. + UserListErrorEnum_USER_LIST_MUTATE_NOT_SUPPORTED UserListErrorEnum_UserListError = 16 + // Rule is invalid. + UserListErrorEnum_INVALID_RULE UserListErrorEnum_UserListError = 17 + // The specified date range is empty. + UserListErrorEnum_INVALID_DATE_RANGE UserListErrorEnum_UserListError = 27 + // A UserList which is privacy sensitive or legal rejected cannot be mutated + // by external users. + UserListErrorEnum_CAN_NOT_MUTATE_SENSITIVE_USERLIST UserListErrorEnum_UserListError = 28 + // Maximum number of rulebased user lists a customer can have. + UserListErrorEnum_MAX_NUM_RULEBASED_USERLISTS UserListErrorEnum_UserListError = 29 + // BasicUserList's billable record field cannot be modified once it is set. + UserListErrorEnum_CANNOT_MODIFY_BILLABLE_RECORD_COUNT UserListErrorEnum_UserListError = 30 + // crm_based_user_list.app_id field must be set when upload_key_type is + // MOBILE_ADVERTISING_ID. + UserListErrorEnum_APP_ID_NOT_SET UserListErrorEnum_UserListError = 31 + // Name of the user list is reserved for system generated lists and cannot + // be used. + UserListErrorEnum_USERLIST_NAME_IS_RESERVED_FOR_SYSTEM_LIST UserListErrorEnum_UserListError = 32 + // Advertiser needs to be whitelisted to use remarketing lists created from + // advertiser uploaded data (e.g., Customer Match lists). + UserListErrorEnum_ADVERTISER_NOT_WHITELISTED_FOR_USING_UPLOADED_DATA UserListErrorEnum_UserListError = 33 + // The provided rule_type is not supported for the user list. + UserListErrorEnum_RULE_TYPE_IS_NOT_SUPPORTED UserListErrorEnum_UserListError = 34 + // Similar user list cannot be used as a logical user list operand. + UserListErrorEnum_CAN_NOT_ADD_A_SIMILAR_USERLIST_AS_LOGICAL_LIST_OPERAND UserListErrorEnum_UserListError = 35 + // Logical user list should not have a mix of CRM based user list and other + // types of lists in its rules. + UserListErrorEnum_CAN_NOT_MIX_CRM_BASED_IN_LOGICAL_LIST_WITH_OTHER_LISTS UserListErrorEnum_UserListError = 36 +) + +var UserListErrorEnum_UserListError_name = map[int32]string{ + 0: "UNSPECIFIED", + 1: "UNKNOWN", + 2: "EXTERNAL_REMARKETING_USER_LIST_MUTATE_NOT_SUPPORTED", + 3: "CONCRETE_TYPE_REQUIRED", + 4: "CONVERSION_TYPE_ID_REQUIRED", + 5: "DUPLICATE_CONVERSION_TYPES", + 6: "INVALID_CONVERSION_TYPE", + 7: "INVALID_DESCRIPTION", + 8: "INVALID_NAME", + 9: "INVALID_TYPE", + 10: "CAN_NOT_ADD_LOGICAL_LIST_AS_LOGICAL_LIST_OPERAND", + 11: "INVALID_USER_LIST_LOGICAL_RULE_OPERAND", + 12: "NAME_ALREADY_USED", + 13: "NEW_CONVERSION_TYPE_NAME_REQUIRED", + 14: "CONVERSION_TYPE_NAME_ALREADY_USED", + 15: "OWNERSHIP_REQUIRED_FOR_SET", + 16: "USER_LIST_MUTATE_NOT_SUPPORTED", + 17: "INVALID_RULE", + 27: "INVALID_DATE_RANGE", + 28: "CAN_NOT_MUTATE_SENSITIVE_USERLIST", + 29: "MAX_NUM_RULEBASED_USERLISTS", + 30: "CANNOT_MODIFY_BILLABLE_RECORD_COUNT", + 31: "APP_ID_NOT_SET", + 32: "USERLIST_NAME_IS_RESERVED_FOR_SYSTEM_LIST", + 33: "ADVERTISER_NOT_WHITELISTED_FOR_USING_UPLOADED_DATA", + 34: "RULE_TYPE_IS_NOT_SUPPORTED", + 35: "CAN_NOT_ADD_A_SIMILAR_USERLIST_AS_LOGICAL_LIST_OPERAND", + 36: "CAN_NOT_MIX_CRM_BASED_IN_LOGICAL_LIST_WITH_OTHER_LISTS", +} +var UserListErrorEnum_UserListError_value = map[string]int32{ + "UNSPECIFIED": 0, + "UNKNOWN": 1, + "EXTERNAL_REMARKETING_USER_LIST_MUTATE_NOT_SUPPORTED": 2, + "CONCRETE_TYPE_REQUIRED": 3, + "CONVERSION_TYPE_ID_REQUIRED": 4, + "DUPLICATE_CONVERSION_TYPES": 5, + "INVALID_CONVERSION_TYPE": 6, + "INVALID_DESCRIPTION": 7, + "INVALID_NAME": 8, + "INVALID_TYPE": 9, + "CAN_NOT_ADD_LOGICAL_LIST_AS_LOGICAL_LIST_OPERAND": 10, + "INVALID_USER_LIST_LOGICAL_RULE_OPERAND": 11, + "NAME_ALREADY_USED": 12, + "NEW_CONVERSION_TYPE_NAME_REQUIRED": 13, + "CONVERSION_TYPE_NAME_ALREADY_USED": 14, + "OWNERSHIP_REQUIRED_FOR_SET": 15, + "USER_LIST_MUTATE_NOT_SUPPORTED": 16, + "INVALID_RULE": 17, + "INVALID_DATE_RANGE": 27, + "CAN_NOT_MUTATE_SENSITIVE_USERLIST": 28, + "MAX_NUM_RULEBASED_USERLISTS": 29, + "CANNOT_MODIFY_BILLABLE_RECORD_COUNT": 30, + "APP_ID_NOT_SET": 31, + "USERLIST_NAME_IS_RESERVED_FOR_SYSTEM_LIST": 32, + "ADVERTISER_NOT_WHITELISTED_FOR_USING_UPLOADED_DATA": 33, + "RULE_TYPE_IS_NOT_SUPPORTED": 34, + "CAN_NOT_ADD_A_SIMILAR_USERLIST_AS_LOGICAL_LIST_OPERAND": 35, + "CAN_NOT_MIX_CRM_BASED_IN_LOGICAL_LIST_WITH_OTHER_LISTS": 36, +} + +func (x UserListErrorEnum_UserListError) String() string { + return proto.EnumName(UserListErrorEnum_UserListError_name, int32(x)) +} +func (UserListErrorEnum_UserListError) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_user_list_error_de5f1fdf418dc5db, []int{0, 0} +} + +// Container for enum describing possible user list errors. +type UserListErrorEnum struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *UserListErrorEnum) Reset() { *m = UserListErrorEnum{} } +func (m *UserListErrorEnum) String() string { return proto.CompactTextString(m) } +func (*UserListErrorEnum) ProtoMessage() {} +func (*UserListErrorEnum) Descriptor() ([]byte, []int) { + return fileDescriptor_user_list_error_de5f1fdf418dc5db, []int{0} +} +func (m *UserListErrorEnum) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_UserListErrorEnum.Unmarshal(m, b) +} +func (m *UserListErrorEnum) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_UserListErrorEnum.Marshal(b, m, deterministic) +} +func (dst *UserListErrorEnum) XXX_Merge(src proto.Message) { + xxx_messageInfo_UserListErrorEnum.Merge(dst, src) +} +func (m *UserListErrorEnum) XXX_Size() int { + return xxx_messageInfo_UserListErrorEnum.Size(m) +} +func (m *UserListErrorEnum) XXX_DiscardUnknown() { + xxx_messageInfo_UserListErrorEnum.DiscardUnknown(m) +} + +var xxx_messageInfo_UserListErrorEnum proto.InternalMessageInfo + +func init() { + proto.RegisterType((*UserListErrorEnum)(nil), "google.ads.googleads.v1.errors.UserListErrorEnum") + proto.RegisterEnum("google.ads.googleads.v1.errors.UserListErrorEnum_UserListError", UserListErrorEnum_UserListError_name, UserListErrorEnum_UserListError_value) +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/errors/user_list_error.proto", fileDescriptor_user_list_error_de5f1fdf418dc5db) +} + +var fileDescriptor_user_list_error_de5f1fdf418dc5db = []byte{ + // 742 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x84, 0x54, 0xed, 0x6e, 0x23, 0x35, + 0x14, 0xa5, 0x59, 0xd8, 0x80, 0xbb, 0x1f, 0xae, 0x11, 0xbb, 0x52, 0xbb, 0x64, 0xd9, 0x2c, 0x1f, + 0x02, 0x89, 0x09, 0x61, 0x57, 0x45, 0x1a, 0x7e, 0x39, 0xe3, 0xdb, 0xc4, 0xea, 0x8c, 0x67, 0xb0, + 0x3d, 0x93, 0x06, 0x45, 0xb2, 0x02, 0x89, 0xa2, 0x48, 0x6d, 0xa6, 0xca, 0xa4, 0x7d, 0x20, 0x7e, + 0xf2, 0x28, 0x3c, 0x00, 0x0f, 0x81, 0xe0, 0x1d, 0x90, 0x3d, 0x99, 0x69, 0x12, 0x51, 0xf6, 0x57, + 0x1c, 0xdf, 0x73, 0x8e, 0xef, 0x3d, 0xf7, 0xce, 0x45, 0x6f, 0xe7, 0x79, 0x3e, 0xbf, 0x9c, 0x75, + 0x26, 0xd3, 0xa2, 0x53, 0x1e, 0xed, 0xe9, 0xb6, 0xdb, 0x99, 0xad, 0x56, 0xf9, 0xaa, 0xe8, 0xdc, + 0x14, 0xb3, 0x95, 0xb9, 0x5c, 0x14, 0x6b, 0xe3, 0x2e, 0xbc, 0xeb, 0x55, 0xbe, 0xce, 0x49, 0xab, + 0x84, 0x7a, 0x93, 0x69, 0xe1, 0xd5, 0x2c, 0xef, 0xb6, 0xeb, 0x95, 0xac, 0xe3, 0x17, 0x95, 0xea, + 0xf5, 0xa2, 0x33, 0x59, 0x2e, 0xf3, 0xf5, 0x64, 0xbd, 0xc8, 0x97, 0x45, 0xc9, 0x6e, 0xff, 0xdd, + 0x44, 0x47, 0x69, 0x31, 0x5b, 0x85, 0x8b, 0x62, 0x0d, 0x96, 0x00, 0xcb, 0x9b, 0xab, 0xf6, 0x9f, + 0x4d, 0xf4, 0x78, 0xe7, 0x96, 0x3c, 0x45, 0x87, 0xa9, 0x50, 0x09, 0x04, 0xfc, 0x8c, 0x03, 0xc3, + 0xef, 0x91, 0x43, 0xd4, 0x4c, 0xc5, 0xb9, 0x88, 0x87, 0x02, 0x1f, 0x90, 0x1f, 0xd0, 0x1b, 0xb8, + 0xd0, 0x20, 0x05, 0x0d, 0x8d, 0x84, 0x88, 0xca, 0x73, 0xd0, 0x5c, 0xf4, 0x4d, 0xaa, 0x40, 0x9a, + 0x90, 0x2b, 0x6d, 0xa2, 0x54, 0x53, 0x0d, 0x46, 0xc4, 0xda, 0xa8, 0x34, 0x49, 0x62, 0xa9, 0x81, + 0xe1, 0x06, 0x39, 0x46, 0xcf, 0x82, 0x58, 0x04, 0x12, 0x34, 0x18, 0x3d, 0x4a, 0xc0, 0x48, 0xf8, + 0x29, 0xe5, 0x12, 0x18, 0x7e, 0x40, 0x5e, 0xa2, 0x93, 0x20, 0x16, 0x19, 0x48, 0xc5, 0x63, 0x51, + 0x46, 0x39, 0xbb, 0x03, 0xbc, 0x4f, 0x5a, 0xe8, 0x98, 0xa5, 0x49, 0xc8, 0x03, 0xab, 0xbc, 0x07, + 0x55, 0xf8, 0x03, 0x72, 0x82, 0x9e, 0x73, 0x91, 0xd1, 0x90, 0xb3, 0xfd, 0x28, 0x7e, 0x48, 0x9e, + 0xa3, 0x8f, 0xab, 0x20, 0x03, 0x15, 0x48, 0x9e, 0x68, 0x1e, 0x0b, 0xdc, 0x24, 0x18, 0x3d, 0xaa, + 0x02, 0x82, 0x46, 0x80, 0x3f, 0xdc, 0xbe, 0x71, 0xe4, 0x8f, 0xc8, 0x5b, 0xf4, 0x5d, 0x40, 0x85, + 0xab, 0x86, 0x32, 0x66, 0xc2, 0xb8, 0xcf, 0x03, 0x1a, 0x96, 0x95, 0x52, 0xb5, 0xfb, 0x3f, 0x4e, + 0x40, 0x52, 0xc1, 0x30, 0x22, 0xdf, 0xa0, 0x2f, 0x2b, 0x9d, 0x3b, 0x63, 0x2a, 0xac, 0x4c, 0x43, + 0xa8, 0xb1, 0x87, 0xe4, 0x13, 0x74, 0x64, 0x5f, 0x37, 0x34, 0x94, 0x40, 0xd9, 0xc8, 0x12, 0x18, + 0x7e, 0x44, 0xbe, 0x40, 0xaf, 0x04, 0x0c, 0xf7, 0xcb, 0x71, 0x89, 0xde, 0x39, 0xf3, 0xd8, 0xc2, + 0xfe, 0x13, 0xb2, 0xa3, 0xf6, 0xc4, 0x1a, 0x18, 0x0f, 0x05, 0x48, 0x35, 0xe0, 0x49, 0x4d, 0x37, + 0x67, 0xb1, 0x34, 0x0a, 0x34, 0x7e, 0x4a, 0xda, 0xa8, 0xf5, 0x8e, 0x0e, 0xe2, 0x6d, 0x73, 0x6c, + 0x09, 0xf8, 0x88, 0x3c, 0x43, 0xa4, 0x76, 0xd6, 0x32, 0x24, 0x15, 0x7d, 0xc0, 0x27, 0x2e, 0xa9, + 0x8d, 0x69, 0x1b, 0x2d, 0x05, 0x42, 0x71, 0xcd, 0x33, 0x70, 0x7e, 0xd8, 0x57, 0xf0, 0x0b, 0xdb, + 0xf6, 0x88, 0x5e, 0x18, 0x91, 0x46, 0x4e, 0xb0, 0x47, 0x15, 0xb0, 0x3a, 0xae, 0xf0, 0xa7, 0xe4, + 0x2b, 0xf4, 0x3a, 0xa0, 0xc2, 0xc9, 0xc4, 0x8c, 0x9f, 0x8d, 0x4c, 0x8f, 0x87, 0x21, 0xed, 0x85, + 0xd6, 0x81, 0x20, 0x96, 0xb6, 0xd9, 0xa9, 0xd0, 0xb8, 0x45, 0x08, 0x7a, 0x42, 0x93, 0xc4, 0x0e, + 0x8d, 0x4b, 0x1a, 0x34, 0x7e, 0x49, 0xbe, 0x45, 0x5f, 0x57, 0x5a, 0xa5, 0x25, 0x5c, 0x19, 0x09, + 0x0a, 0x64, 0x56, 0x55, 0x3e, 0x52, 0x1a, 0x22, 0x57, 0x32, 0xfe, 0x8c, 0x9c, 0xa2, 0xef, 0x29, + 0xcb, 0x40, 0x6a, 0x6e, 0x7d, 0xb0, 0x32, 0xc3, 0x01, 0xd7, 0x60, 0xa3, 0x1b, 0x7c, 0xaa, 0xdc, + 0xa0, 0x27, 0x61, 0x4c, 0x19, 0xb8, 0x8a, 0x29, 0x7e, 0x65, 0x9d, 0x75, 0x0d, 0x2d, 0xa7, 0x56, + 0xed, 0xb9, 0xd6, 0x26, 0x3e, 0x3a, 0xdd, 0x1e, 0x20, 0x6a, 0x14, 0x8f, 0x78, 0x48, 0x65, 0x5d, + 0xe8, 0xbd, 0x63, 0xf4, 0x7a, 0x9b, 0x1b, 0xf1, 0x0b, 0x13, 0xc8, 0xc8, 0x94, 0x26, 0x71, 0xb1, + 0x4b, 0x19, 0x72, 0x3d, 0x30, 0xb1, 0x1e, 0x6c, 0x3a, 0xa8, 0xf0, 0xe7, 0xbd, 0x7f, 0x0e, 0x50, + 0xfb, 0xd7, 0xfc, 0xca, 0xfb, 0xff, 0x9d, 0xd1, 0x23, 0x3b, 0x1f, 0x7f, 0x62, 0x37, 0x45, 0x72, + 0xf0, 0x33, 0xdb, 0xb0, 0xe6, 0xf9, 0xe5, 0x64, 0x39, 0xf7, 0xf2, 0xd5, 0xbc, 0x33, 0x9f, 0x2d, + 0xdd, 0x1e, 0xa9, 0xf6, 0xd5, 0xf5, 0xa2, 0xb8, 0x6f, 0x7d, 0xfd, 0x58, 0xfe, 0xfc, 0xd6, 0x78, + 0xd0, 0xa7, 0xf4, 0xf7, 0x46, 0xab, 0x5f, 0x8a, 0xd1, 0x69, 0xe1, 0x95, 0x47, 0x7b, 0xca, 0xba, + 0x9e, 0x7b, 0xb2, 0xf8, 0xa3, 0x02, 0x8c, 0xe9, 0xb4, 0x18, 0xd7, 0x80, 0x71, 0xd6, 0x1d, 0x97, + 0x80, 0xbf, 0x1a, 0xed, 0xf2, 0xd6, 0xf7, 0xe9, 0xb4, 0xf0, 0xfd, 0x1a, 0xe2, 0xfb, 0x59, 0xd7, + 0xf7, 0x4b, 0xd0, 0x2f, 0x0f, 0x5d, 0x76, 0x6f, 0xfe, 0x0d, 0x00, 0x00, 0xff, 0xff, 0x5f, 0x7c, + 0xfa, 0x44, 0x5b, 0x05, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/errors/youtube_video_registration_error.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/errors/youtube_video_registration_error.pb.go new file mode 100644 index 0000000..f3c5150 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/errors/youtube_video_registration_error.pb.go @@ -0,0 +1,119 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/errors/youtube_video_registration_error.proto + +package errors // import "google.golang.org/genproto/googleapis/ads/googleads/v1/errors" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Enum describing YouTube video registration errors. +type YoutubeVideoRegistrationErrorEnum_YoutubeVideoRegistrationError int32 + +const ( + // Enum unspecified. + YoutubeVideoRegistrationErrorEnum_UNSPECIFIED YoutubeVideoRegistrationErrorEnum_YoutubeVideoRegistrationError = 0 + // The received error code is not known in this version. + YoutubeVideoRegistrationErrorEnum_UNKNOWN YoutubeVideoRegistrationErrorEnum_YoutubeVideoRegistrationError = 1 + // Video to be registered wasn't found. + YoutubeVideoRegistrationErrorEnum_VIDEO_NOT_FOUND YoutubeVideoRegistrationErrorEnum_YoutubeVideoRegistrationError = 2 + // Video to be registered is not accessible (e.g. private). + YoutubeVideoRegistrationErrorEnum_VIDEO_NOT_ACCESSIBLE YoutubeVideoRegistrationErrorEnum_YoutubeVideoRegistrationError = 3 +) + +var YoutubeVideoRegistrationErrorEnum_YoutubeVideoRegistrationError_name = map[int32]string{ + 0: "UNSPECIFIED", + 1: "UNKNOWN", + 2: "VIDEO_NOT_FOUND", + 3: "VIDEO_NOT_ACCESSIBLE", +} +var YoutubeVideoRegistrationErrorEnum_YoutubeVideoRegistrationError_value = map[string]int32{ + "UNSPECIFIED": 0, + "UNKNOWN": 1, + "VIDEO_NOT_FOUND": 2, + "VIDEO_NOT_ACCESSIBLE": 3, +} + +func (x YoutubeVideoRegistrationErrorEnum_YoutubeVideoRegistrationError) String() string { + return proto.EnumName(YoutubeVideoRegistrationErrorEnum_YoutubeVideoRegistrationError_name, int32(x)) +} +func (YoutubeVideoRegistrationErrorEnum_YoutubeVideoRegistrationError) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_youtube_video_registration_error_b51a63b007d526e2, []int{0, 0} +} + +// Container for enum describing YouTube video registration errors. +type YoutubeVideoRegistrationErrorEnum struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *YoutubeVideoRegistrationErrorEnum) Reset() { *m = YoutubeVideoRegistrationErrorEnum{} } +func (m *YoutubeVideoRegistrationErrorEnum) String() string { return proto.CompactTextString(m) } +func (*YoutubeVideoRegistrationErrorEnum) ProtoMessage() {} +func (*YoutubeVideoRegistrationErrorEnum) Descriptor() ([]byte, []int) { + return fileDescriptor_youtube_video_registration_error_b51a63b007d526e2, []int{0} +} +func (m *YoutubeVideoRegistrationErrorEnum) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_YoutubeVideoRegistrationErrorEnum.Unmarshal(m, b) +} +func (m *YoutubeVideoRegistrationErrorEnum) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_YoutubeVideoRegistrationErrorEnum.Marshal(b, m, deterministic) +} +func (dst *YoutubeVideoRegistrationErrorEnum) XXX_Merge(src proto.Message) { + xxx_messageInfo_YoutubeVideoRegistrationErrorEnum.Merge(dst, src) +} +func (m *YoutubeVideoRegistrationErrorEnum) XXX_Size() int { + return xxx_messageInfo_YoutubeVideoRegistrationErrorEnum.Size(m) +} +func (m *YoutubeVideoRegistrationErrorEnum) XXX_DiscardUnknown() { + xxx_messageInfo_YoutubeVideoRegistrationErrorEnum.DiscardUnknown(m) +} + +var xxx_messageInfo_YoutubeVideoRegistrationErrorEnum proto.InternalMessageInfo + +func init() { + proto.RegisterType((*YoutubeVideoRegistrationErrorEnum)(nil), "google.ads.googleads.v1.errors.YoutubeVideoRegistrationErrorEnum") + proto.RegisterEnum("google.ads.googleads.v1.errors.YoutubeVideoRegistrationErrorEnum_YoutubeVideoRegistrationError", YoutubeVideoRegistrationErrorEnum_YoutubeVideoRegistrationError_name, YoutubeVideoRegistrationErrorEnum_YoutubeVideoRegistrationError_value) +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/errors/youtube_video_registration_error.proto", fileDescriptor_youtube_video_registration_error_b51a63b007d526e2) +} + +var fileDescriptor_youtube_video_registration_error_b51a63b007d526e2 = []byte{ + // 332 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x7c, 0x91, 0x41, 0x4a, 0xc3, 0x40, + 0x14, 0x86, 0x4d, 0x0a, 0x0a, 0xd3, 0x45, 0x43, 0x74, 0x21, 0xa2, 0x05, 0x73, 0x80, 0x09, 0xc1, + 0xdd, 0xb8, 0x4a, 0x9b, 0x69, 0x09, 0x4a, 0x52, 0xac, 0x8d, 0x28, 0x81, 0x90, 0x3a, 0xc3, 0x10, + 0x68, 0xe7, 0x95, 0x99, 0xb4, 0xe0, 0x31, 0xbc, 0x82, 0x4b, 0x8f, 0xe2, 0x51, 0xbc, 0x83, 0x20, + 0xc9, 0xd8, 0xea, 0xc6, 0xae, 0xf2, 0xf3, 0xf8, 0xde, 0xff, 0xbf, 0x3f, 0x83, 0xa8, 0x00, 0x10, + 0x0b, 0xee, 0x97, 0x4c, 0xfb, 0x46, 0x36, 0x6a, 0x13, 0xf8, 0x5c, 0x29, 0x50, 0xda, 0x7f, 0x81, + 0x75, 0xbd, 0x9e, 0xf3, 0x62, 0x53, 0x31, 0x0e, 0x85, 0xe2, 0xa2, 0xd2, 0xb5, 0x2a, 0xeb, 0x0a, + 0x64, 0xd1, 0x12, 0x78, 0xa5, 0xa0, 0x06, 0xb7, 0x6f, 0x76, 0x71, 0xc9, 0x34, 0xde, 0xd9, 0xe0, + 0x4d, 0x80, 0x8d, 0xcd, 0xd9, 0xf9, 0x36, 0x66, 0x55, 0xf9, 0xa5, 0x94, 0x50, 0xb7, 0x16, 0xda, + 0x6c, 0x7b, 0xaf, 0x16, 0xba, 0x7c, 0x34, 0x41, 0x59, 0x93, 0x73, 0xf7, 0x27, 0x86, 0x36, 0x06, + 0x54, 0xae, 0x97, 0xde, 0x02, 0x5d, 0xec, 0x85, 0xdc, 0x1e, 0xea, 0xce, 0x92, 0xe9, 0x84, 0x0e, + 0xe3, 0x51, 0x4c, 0x23, 0xe7, 0xc0, 0xed, 0xa2, 0xa3, 0x59, 0x72, 0x93, 0xa4, 0x0f, 0x89, 0x63, + 0xb9, 0xc7, 0xa8, 0x97, 0xc5, 0x11, 0x4d, 0x8b, 0x24, 0xbd, 0x2f, 0x46, 0xe9, 0x2c, 0x89, 0x1c, + 0xdb, 0x3d, 0x45, 0x27, 0xbf, 0xc3, 0x70, 0x38, 0xa4, 0xd3, 0x69, 0x3c, 0xb8, 0xa5, 0x4e, 0x67, + 0xf0, 0x65, 0x21, 0xef, 0x19, 0x96, 0x78, 0x7f, 0xb1, 0x81, 0xb7, 0xf7, 0xa4, 0x49, 0x53, 0x6f, + 0x62, 0x3d, 0x45, 0x3f, 0x2e, 0x02, 0x16, 0xa5, 0x14, 0x18, 0x94, 0xf0, 0x05, 0x97, 0x6d, 0xf9, + 0xed, 0x5f, 0x5f, 0x55, 0xfa, 0xbf, 0x47, 0xb8, 0x36, 0x9f, 0x37, 0xbb, 0x33, 0x0e, 0xc3, 0x77, + 0xbb, 0x3f, 0x36, 0x66, 0x21, 0xd3, 0xd8, 0xc8, 0x46, 0x65, 0x01, 0x6e, 0x23, 0xf5, 0xc7, 0x16, + 0xc8, 0x43, 0xa6, 0xf3, 0x1d, 0x90, 0x67, 0x41, 0x6e, 0x80, 0x4f, 0xdb, 0x33, 0x53, 0x42, 0x42, + 0xa6, 0x09, 0xd9, 0x21, 0x84, 0x64, 0x01, 0x21, 0x06, 0x9a, 0x1f, 0xb6, 0xd7, 0x5d, 0x7d, 0x07, + 0x00, 0x00, 0xff, 0xff, 0x20, 0xae, 0x04, 0xe3, 0x21, 0x02, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/resources/account_budget.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/resources/account_budget.pb.go new file mode 100644 index 0000000..f214a5c --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/resources/account_budget.pb.go @@ -0,0 +1,1055 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/resources/account_budget.proto + +package resources // import "google.golang.org/genproto/googleapis/ads/googleads/v1/resources" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import wrappers "github.com/golang/protobuf/ptypes/wrappers" +import enums "google.golang.org/genproto/googleapis/ads/googleads/v1/enums" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// An account-level budget. It contains information about the budget itself, +// as well as the most recently approved changes to the budget and proposed +// changes that are pending approval. The proposed changes that are pending +// approval, if any, are found in 'pending_proposal'. Effective details about +// the budget are found in fields prefixed 'approved_', 'adjusted_' and those +// without a prefix. Since some effective details may differ from what the user +// had originally requested (e.g. spending limit), these differences are +// juxtaposed via 'proposed_', 'approved_', and possibly 'adjusted_' fields. +// +// This resource is mutated using AccountBudgetProposal and cannot be mutated +// directly. A budget may have at most one pending proposal at any given time. +// It is read through pending_proposal. +// +// Once approved, a budget may be subject to adjustments, such as credit +// adjustments. Adjustments create differences between the 'approved' and +// 'adjusted' fields, which would otherwise be identical. +type AccountBudget struct { + // The resource name of the account-level budget. + // AccountBudget resource names have the form: + // + // `customers/{customer_id}/accountBudgets/{account_budget_id}` + ResourceName string `protobuf:"bytes,1,opt,name=resource_name,json=resourceName,proto3" json:"resource_name,omitempty"` + // The ID of the account-level budget. + Id *wrappers.Int64Value `protobuf:"bytes,2,opt,name=id,proto3" json:"id,omitempty"` + // The resource name of the billing setup associated with this account-level + // budget. BillingSetup resource names have the form: + // + // `customers/{customer_id}/billingSetups/{billing_setup_id}` + BillingSetup *wrappers.StringValue `protobuf:"bytes,3,opt,name=billing_setup,json=billingSetup,proto3" json:"billing_setup,omitempty"` + // The status of this account-level budget. + Status enums.AccountBudgetStatusEnum_AccountBudgetStatus `protobuf:"varint,4,opt,name=status,proto3,enum=google.ads.googleads.v1.enums.AccountBudgetStatusEnum_AccountBudgetStatus" json:"status,omitempty"` + // The name of the account-level budget. + Name *wrappers.StringValue `protobuf:"bytes,5,opt,name=name,proto3" json:"name,omitempty"` + // The proposed start time of the account-level budget in + // yyyy-MM-dd HH:mm:ss format. If a start time type of NOW was proposed, + // this is the time of request. + ProposedStartDateTime *wrappers.StringValue `protobuf:"bytes,6,opt,name=proposed_start_date_time,json=proposedStartDateTime,proto3" json:"proposed_start_date_time,omitempty"` + // The approved start time of the account-level budget in yyyy-MM-dd HH:mm:ss + // format. + // + // For example, if a new budget is approved after the proposed start time, + // the approved start time is the time of approval. + ApprovedStartDateTime *wrappers.StringValue `protobuf:"bytes,7,opt,name=approved_start_date_time,json=approvedStartDateTime,proto3" json:"approved_start_date_time,omitempty"` + // The total adjustments amount. + // + // An example of an adjustment is courtesy credits. + TotalAdjustmentsMicros *wrappers.Int64Value `protobuf:"bytes,18,opt,name=total_adjustments_micros,json=totalAdjustmentsMicros,proto3" json:"total_adjustments_micros,omitempty"` + // The value of Ads that have been served, in micros. + // + // This includes overdelivery costs, in which case a credit might be + // automatically applied to the budget (see total_adjustments_micros). + AmountServedMicros *wrappers.Int64Value `protobuf:"bytes,19,opt,name=amount_served_micros,json=amountServedMicros,proto3" json:"amount_served_micros,omitempty"` + // A purchase order number is a value that helps users reference this budget + // in their monthly invoices. + PurchaseOrderNumber *wrappers.StringValue `protobuf:"bytes,20,opt,name=purchase_order_number,json=purchaseOrderNumber,proto3" json:"purchase_order_number,omitempty"` + // Notes associated with the budget. + Notes *wrappers.StringValue `protobuf:"bytes,21,opt,name=notes,proto3" json:"notes,omitempty"` + // The pending proposal to modify this budget, if applicable. + PendingProposal *AccountBudget_PendingAccountBudgetProposal `protobuf:"bytes,22,opt,name=pending_proposal,json=pendingProposal,proto3" json:"pending_proposal,omitempty"` + // The proposed end time of the account-level budget. + // + // Types that are valid to be assigned to ProposedEndTime: + // *AccountBudget_ProposedEndDateTime + // *AccountBudget_ProposedEndTimeType + ProposedEndTime isAccountBudget_ProposedEndTime `protobuf_oneof:"proposed_end_time"` + // The approved end time of the account-level budget. + // + // For example, if a budget's end time is updated and the proposal is approved + // after the proposed end time, the approved end time is the time of approval. + // + // Types that are valid to be assigned to ApprovedEndTime: + // *AccountBudget_ApprovedEndDateTime + // *AccountBudget_ApprovedEndTimeType + ApprovedEndTime isAccountBudget_ApprovedEndTime `protobuf_oneof:"approved_end_time"` + // The proposed spending limit. + // + // Types that are valid to be assigned to ProposedSpendingLimit: + // *AccountBudget_ProposedSpendingLimitMicros + // *AccountBudget_ProposedSpendingLimitType + ProposedSpendingLimit isAccountBudget_ProposedSpendingLimit `protobuf_oneof:"proposed_spending_limit"` + // The approved spending limit. + // + // For example, if the amount already spent by the account exceeds the + // proposed spending limit at the time the proposal is approved, the approved + // spending limit is set to the amount already spent. + // + // Types that are valid to be assigned to ApprovedSpendingLimit: + // *AccountBudget_ApprovedSpendingLimitMicros + // *AccountBudget_ApprovedSpendingLimitType + ApprovedSpendingLimit isAccountBudget_ApprovedSpendingLimit `protobuf_oneof:"approved_spending_limit"` + // The spending limit after adjustments have been applied. Adjustments are + // stored in total_adjustments_micros. + // + // This value has the final say on how much the account is allowed to spend. + // + // Types that are valid to be assigned to AdjustedSpendingLimit: + // *AccountBudget_AdjustedSpendingLimitMicros + // *AccountBudget_AdjustedSpendingLimitType + AdjustedSpendingLimit isAccountBudget_AdjustedSpendingLimit `protobuf_oneof:"adjusted_spending_limit"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *AccountBudget) Reset() { *m = AccountBudget{} } +func (m *AccountBudget) String() string { return proto.CompactTextString(m) } +func (*AccountBudget) ProtoMessage() {} +func (*AccountBudget) Descriptor() ([]byte, []int) { + return fileDescriptor_account_budget_80044f3210f6054b, []int{0} +} +func (m *AccountBudget) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_AccountBudget.Unmarshal(m, b) +} +func (m *AccountBudget) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_AccountBudget.Marshal(b, m, deterministic) +} +func (dst *AccountBudget) XXX_Merge(src proto.Message) { + xxx_messageInfo_AccountBudget.Merge(dst, src) +} +func (m *AccountBudget) XXX_Size() int { + return xxx_messageInfo_AccountBudget.Size(m) +} +func (m *AccountBudget) XXX_DiscardUnknown() { + xxx_messageInfo_AccountBudget.DiscardUnknown(m) +} + +var xxx_messageInfo_AccountBudget proto.InternalMessageInfo + +func (m *AccountBudget) GetResourceName() string { + if m != nil { + return m.ResourceName + } + return "" +} + +func (m *AccountBudget) GetId() *wrappers.Int64Value { + if m != nil { + return m.Id + } + return nil +} + +func (m *AccountBudget) GetBillingSetup() *wrappers.StringValue { + if m != nil { + return m.BillingSetup + } + return nil +} + +func (m *AccountBudget) GetStatus() enums.AccountBudgetStatusEnum_AccountBudgetStatus { + if m != nil { + return m.Status + } + return enums.AccountBudgetStatusEnum_UNSPECIFIED +} + +func (m *AccountBudget) GetName() *wrappers.StringValue { + if m != nil { + return m.Name + } + return nil +} + +func (m *AccountBudget) GetProposedStartDateTime() *wrappers.StringValue { + if m != nil { + return m.ProposedStartDateTime + } + return nil +} + +func (m *AccountBudget) GetApprovedStartDateTime() *wrappers.StringValue { + if m != nil { + return m.ApprovedStartDateTime + } + return nil +} + +func (m *AccountBudget) GetTotalAdjustmentsMicros() *wrappers.Int64Value { + if m != nil { + return m.TotalAdjustmentsMicros + } + return nil +} + +func (m *AccountBudget) GetAmountServedMicros() *wrappers.Int64Value { + if m != nil { + return m.AmountServedMicros + } + return nil +} + +func (m *AccountBudget) GetPurchaseOrderNumber() *wrappers.StringValue { + if m != nil { + return m.PurchaseOrderNumber + } + return nil +} + +func (m *AccountBudget) GetNotes() *wrappers.StringValue { + if m != nil { + return m.Notes + } + return nil +} + +func (m *AccountBudget) GetPendingProposal() *AccountBudget_PendingAccountBudgetProposal { + if m != nil { + return m.PendingProposal + } + return nil +} + +type isAccountBudget_ProposedEndTime interface { + isAccountBudget_ProposedEndTime() +} + +type AccountBudget_ProposedEndDateTime struct { + ProposedEndDateTime *wrappers.StringValue `protobuf:"bytes,8,opt,name=proposed_end_date_time,json=proposedEndDateTime,proto3,oneof"` +} + +type AccountBudget_ProposedEndTimeType struct { + ProposedEndTimeType enums.TimeTypeEnum_TimeType `protobuf:"varint,9,opt,name=proposed_end_time_type,json=proposedEndTimeType,proto3,enum=google.ads.googleads.v1.enums.TimeTypeEnum_TimeType,oneof"` +} + +func (*AccountBudget_ProposedEndDateTime) isAccountBudget_ProposedEndTime() {} + +func (*AccountBudget_ProposedEndTimeType) isAccountBudget_ProposedEndTime() {} + +func (m *AccountBudget) GetProposedEndTime() isAccountBudget_ProposedEndTime { + if m != nil { + return m.ProposedEndTime + } + return nil +} + +func (m *AccountBudget) GetProposedEndDateTime() *wrappers.StringValue { + if x, ok := m.GetProposedEndTime().(*AccountBudget_ProposedEndDateTime); ok { + return x.ProposedEndDateTime + } + return nil +} + +func (m *AccountBudget) GetProposedEndTimeType() enums.TimeTypeEnum_TimeType { + if x, ok := m.GetProposedEndTime().(*AccountBudget_ProposedEndTimeType); ok { + return x.ProposedEndTimeType + } + return enums.TimeTypeEnum_UNSPECIFIED +} + +type isAccountBudget_ApprovedEndTime interface { + isAccountBudget_ApprovedEndTime() +} + +type AccountBudget_ApprovedEndDateTime struct { + ApprovedEndDateTime *wrappers.StringValue `protobuf:"bytes,10,opt,name=approved_end_date_time,json=approvedEndDateTime,proto3,oneof"` +} + +type AccountBudget_ApprovedEndTimeType struct { + ApprovedEndTimeType enums.TimeTypeEnum_TimeType `protobuf:"varint,11,opt,name=approved_end_time_type,json=approvedEndTimeType,proto3,enum=google.ads.googleads.v1.enums.TimeTypeEnum_TimeType,oneof"` +} + +func (*AccountBudget_ApprovedEndDateTime) isAccountBudget_ApprovedEndTime() {} + +func (*AccountBudget_ApprovedEndTimeType) isAccountBudget_ApprovedEndTime() {} + +func (m *AccountBudget) GetApprovedEndTime() isAccountBudget_ApprovedEndTime { + if m != nil { + return m.ApprovedEndTime + } + return nil +} + +func (m *AccountBudget) GetApprovedEndDateTime() *wrappers.StringValue { + if x, ok := m.GetApprovedEndTime().(*AccountBudget_ApprovedEndDateTime); ok { + return x.ApprovedEndDateTime + } + return nil +} + +func (m *AccountBudget) GetApprovedEndTimeType() enums.TimeTypeEnum_TimeType { + if x, ok := m.GetApprovedEndTime().(*AccountBudget_ApprovedEndTimeType); ok { + return x.ApprovedEndTimeType + } + return enums.TimeTypeEnum_UNSPECIFIED +} + +type isAccountBudget_ProposedSpendingLimit interface { + isAccountBudget_ProposedSpendingLimit() +} + +type AccountBudget_ProposedSpendingLimitMicros struct { + ProposedSpendingLimitMicros *wrappers.Int64Value `protobuf:"bytes,12,opt,name=proposed_spending_limit_micros,json=proposedSpendingLimitMicros,proto3,oneof"` +} + +type AccountBudget_ProposedSpendingLimitType struct { + ProposedSpendingLimitType enums.SpendingLimitTypeEnum_SpendingLimitType `protobuf:"varint,13,opt,name=proposed_spending_limit_type,json=proposedSpendingLimitType,proto3,enum=google.ads.googleads.v1.enums.SpendingLimitTypeEnum_SpendingLimitType,oneof"` +} + +func (*AccountBudget_ProposedSpendingLimitMicros) isAccountBudget_ProposedSpendingLimit() {} + +func (*AccountBudget_ProposedSpendingLimitType) isAccountBudget_ProposedSpendingLimit() {} + +func (m *AccountBudget) GetProposedSpendingLimit() isAccountBudget_ProposedSpendingLimit { + if m != nil { + return m.ProposedSpendingLimit + } + return nil +} + +func (m *AccountBudget) GetProposedSpendingLimitMicros() *wrappers.Int64Value { + if x, ok := m.GetProposedSpendingLimit().(*AccountBudget_ProposedSpendingLimitMicros); ok { + return x.ProposedSpendingLimitMicros + } + return nil +} + +func (m *AccountBudget) GetProposedSpendingLimitType() enums.SpendingLimitTypeEnum_SpendingLimitType { + if x, ok := m.GetProposedSpendingLimit().(*AccountBudget_ProposedSpendingLimitType); ok { + return x.ProposedSpendingLimitType + } + return enums.SpendingLimitTypeEnum_UNSPECIFIED +} + +type isAccountBudget_ApprovedSpendingLimit interface { + isAccountBudget_ApprovedSpendingLimit() +} + +type AccountBudget_ApprovedSpendingLimitMicros struct { + ApprovedSpendingLimitMicros *wrappers.Int64Value `protobuf:"bytes,14,opt,name=approved_spending_limit_micros,json=approvedSpendingLimitMicros,proto3,oneof"` +} + +type AccountBudget_ApprovedSpendingLimitType struct { + ApprovedSpendingLimitType enums.SpendingLimitTypeEnum_SpendingLimitType `protobuf:"varint,15,opt,name=approved_spending_limit_type,json=approvedSpendingLimitType,proto3,enum=google.ads.googleads.v1.enums.SpendingLimitTypeEnum_SpendingLimitType,oneof"` +} + +func (*AccountBudget_ApprovedSpendingLimitMicros) isAccountBudget_ApprovedSpendingLimit() {} + +func (*AccountBudget_ApprovedSpendingLimitType) isAccountBudget_ApprovedSpendingLimit() {} + +func (m *AccountBudget) GetApprovedSpendingLimit() isAccountBudget_ApprovedSpendingLimit { + if m != nil { + return m.ApprovedSpendingLimit + } + return nil +} + +func (m *AccountBudget) GetApprovedSpendingLimitMicros() *wrappers.Int64Value { + if x, ok := m.GetApprovedSpendingLimit().(*AccountBudget_ApprovedSpendingLimitMicros); ok { + return x.ApprovedSpendingLimitMicros + } + return nil +} + +func (m *AccountBudget) GetApprovedSpendingLimitType() enums.SpendingLimitTypeEnum_SpendingLimitType { + if x, ok := m.GetApprovedSpendingLimit().(*AccountBudget_ApprovedSpendingLimitType); ok { + return x.ApprovedSpendingLimitType + } + return enums.SpendingLimitTypeEnum_UNSPECIFIED +} + +type isAccountBudget_AdjustedSpendingLimit interface { + isAccountBudget_AdjustedSpendingLimit() +} + +type AccountBudget_AdjustedSpendingLimitMicros struct { + AdjustedSpendingLimitMicros *wrappers.Int64Value `protobuf:"bytes,16,opt,name=adjusted_spending_limit_micros,json=adjustedSpendingLimitMicros,proto3,oneof"` +} + +type AccountBudget_AdjustedSpendingLimitType struct { + AdjustedSpendingLimitType enums.SpendingLimitTypeEnum_SpendingLimitType `protobuf:"varint,17,opt,name=adjusted_spending_limit_type,json=adjustedSpendingLimitType,proto3,enum=google.ads.googleads.v1.enums.SpendingLimitTypeEnum_SpendingLimitType,oneof"` +} + +func (*AccountBudget_AdjustedSpendingLimitMicros) isAccountBudget_AdjustedSpendingLimit() {} + +func (*AccountBudget_AdjustedSpendingLimitType) isAccountBudget_AdjustedSpendingLimit() {} + +func (m *AccountBudget) GetAdjustedSpendingLimit() isAccountBudget_AdjustedSpendingLimit { + if m != nil { + return m.AdjustedSpendingLimit + } + return nil +} + +func (m *AccountBudget) GetAdjustedSpendingLimitMicros() *wrappers.Int64Value { + if x, ok := m.GetAdjustedSpendingLimit().(*AccountBudget_AdjustedSpendingLimitMicros); ok { + return x.AdjustedSpendingLimitMicros + } + return nil +} + +func (m *AccountBudget) GetAdjustedSpendingLimitType() enums.SpendingLimitTypeEnum_SpendingLimitType { + if x, ok := m.GetAdjustedSpendingLimit().(*AccountBudget_AdjustedSpendingLimitType); ok { + return x.AdjustedSpendingLimitType + } + return enums.SpendingLimitTypeEnum_UNSPECIFIED +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*AccountBudget) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _AccountBudget_OneofMarshaler, _AccountBudget_OneofUnmarshaler, _AccountBudget_OneofSizer, []interface{}{ + (*AccountBudget_ProposedEndDateTime)(nil), + (*AccountBudget_ProposedEndTimeType)(nil), + (*AccountBudget_ApprovedEndDateTime)(nil), + (*AccountBudget_ApprovedEndTimeType)(nil), + (*AccountBudget_ProposedSpendingLimitMicros)(nil), + (*AccountBudget_ProposedSpendingLimitType)(nil), + (*AccountBudget_ApprovedSpendingLimitMicros)(nil), + (*AccountBudget_ApprovedSpendingLimitType)(nil), + (*AccountBudget_AdjustedSpendingLimitMicros)(nil), + (*AccountBudget_AdjustedSpendingLimitType)(nil), + } +} + +func _AccountBudget_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*AccountBudget) + // proposed_end_time + switch x := m.ProposedEndTime.(type) { + case *AccountBudget_ProposedEndDateTime: + b.EncodeVarint(8<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.ProposedEndDateTime); err != nil { + return err + } + case *AccountBudget_ProposedEndTimeType: + b.EncodeVarint(9<<3 | proto.WireVarint) + b.EncodeVarint(uint64(x.ProposedEndTimeType)) + case nil: + default: + return fmt.Errorf("AccountBudget.ProposedEndTime has unexpected type %T", x) + } + // approved_end_time + switch x := m.ApprovedEndTime.(type) { + case *AccountBudget_ApprovedEndDateTime: + b.EncodeVarint(10<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.ApprovedEndDateTime); err != nil { + return err + } + case *AccountBudget_ApprovedEndTimeType: + b.EncodeVarint(11<<3 | proto.WireVarint) + b.EncodeVarint(uint64(x.ApprovedEndTimeType)) + case nil: + default: + return fmt.Errorf("AccountBudget.ApprovedEndTime has unexpected type %T", x) + } + // proposed_spending_limit + switch x := m.ProposedSpendingLimit.(type) { + case *AccountBudget_ProposedSpendingLimitMicros: + b.EncodeVarint(12<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.ProposedSpendingLimitMicros); err != nil { + return err + } + case *AccountBudget_ProposedSpendingLimitType: + b.EncodeVarint(13<<3 | proto.WireVarint) + b.EncodeVarint(uint64(x.ProposedSpendingLimitType)) + case nil: + default: + return fmt.Errorf("AccountBudget.ProposedSpendingLimit has unexpected type %T", x) + } + // approved_spending_limit + switch x := m.ApprovedSpendingLimit.(type) { + case *AccountBudget_ApprovedSpendingLimitMicros: + b.EncodeVarint(14<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.ApprovedSpendingLimitMicros); err != nil { + return err + } + case *AccountBudget_ApprovedSpendingLimitType: + b.EncodeVarint(15<<3 | proto.WireVarint) + b.EncodeVarint(uint64(x.ApprovedSpendingLimitType)) + case nil: + default: + return fmt.Errorf("AccountBudget.ApprovedSpendingLimit has unexpected type %T", x) + } + // adjusted_spending_limit + switch x := m.AdjustedSpendingLimit.(type) { + case *AccountBudget_AdjustedSpendingLimitMicros: + b.EncodeVarint(16<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.AdjustedSpendingLimitMicros); err != nil { + return err + } + case *AccountBudget_AdjustedSpendingLimitType: + b.EncodeVarint(17<<3 | proto.WireVarint) + b.EncodeVarint(uint64(x.AdjustedSpendingLimitType)) + case nil: + default: + return fmt.Errorf("AccountBudget.AdjustedSpendingLimit has unexpected type %T", x) + } + return nil +} + +func _AccountBudget_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*AccountBudget) + switch tag { + case 8: // proposed_end_time.proposed_end_date_time + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(wrappers.StringValue) + err := b.DecodeMessage(msg) + m.ProposedEndTime = &AccountBudget_ProposedEndDateTime{msg} + return true, err + case 9: // proposed_end_time.proposed_end_time_type + if wire != proto.WireVarint { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeVarint() + m.ProposedEndTime = &AccountBudget_ProposedEndTimeType{enums.TimeTypeEnum_TimeType(x)} + return true, err + case 10: // approved_end_time.approved_end_date_time + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(wrappers.StringValue) + err := b.DecodeMessage(msg) + m.ApprovedEndTime = &AccountBudget_ApprovedEndDateTime{msg} + return true, err + case 11: // approved_end_time.approved_end_time_type + if wire != proto.WireVarint { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeVarint() + m.ApprovedEndTime = &AccountBudget_ApprovedEndTimeType{enums.TimeTypeEnum_TimeType(x)} + return true, err + case 12: // proposed_spending_limit.proposed_spending_limit_micros + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(wrappers.Int64Value) + err := b.DecodeMessage(msg) + m.ProposedSpendingLimit = &AccountBudget_ProposedSpendingLimitMicros{msg} + return true, err + case 13: // proposed_spending_limit.proposed_spending_limit_type + if wire != proto.WireVarint { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeVarint() + m.ProposedSpendingLimit = &AccountBudget_ProposedSpendingLimitType{enums.SpendingLimitTypeEnum_SpendingLimitType(x)} + return true, err + case 14: // approved_spending_limit.approved_spending_limit_micros + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(wrappers.Int64Value) + err := b.DecodeMessage(msg) + m.ApprovedSpendingLimit = &AccountBudget_ApprovedSpendingLimitMicros{msg} + return true, err + case 15: // approved_spending_limit.approved_spending_limit_type + if wire != proto.WireVarint { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeVarint() + m.ApprovedSpendingLimit = &AccountBudget_ApprovedSpendingLimitType{enums.SpendingLimitTypeEnum_SpendingLimitType(x)} + return true, err + case 16: // adjusted_spending_limit.adjusted_spending_limit_micros + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(wrappers.Int64Value) + err := b.DecodeMessage(msg) + m.AdjustedSpendingLimit = &AccountBudget_AdjustedSpendingLimitMicros{msg} + return true, err + case 17: // adjusted_spending_limit.adjusted_spending_limit_type + if wire != proto.WireVarint { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeVarint() + m.AdjustedSpendingLimit = &AccountBudget_AdjustedSpendingLimitType{enums.SpendingLimitTypeEnum_SpendingLimitType(x)} + return true, err + default: + return false, nil + } +} + +func _AccountBudget_OneofSizer(msg proto.Message) (n int) { + m := msg.(*AccountBudget) + // proposed_end_time + switch x := m.ProposedEndTime.(type) { + case *AccountBudget_ProposedEndDateTime: + s := proto.Size(x.ProposedEndDateTime) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *AccountBudget_ProposedEndTimeType: + n += 1 // tag and wire + n += proto.SizeVarint(uint64(x.ProposedEndTimeType)) + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + // approved_end_time + switch x := m.ApprovedEndTime.(type) { + case *AccountBudget_ApprovedEndDateTime: + s := proto.Size(x.ApprovedEndDateTime) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *AccountBudget_ApprovedEndTimeType: + n += 1 // tag and wire + n += proto.SizeVarint(uint64(x.ApprovedEndTimeType)) + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + // proposed_spending_limit + switch x := m.ProposedSpendingLimit.(type) { + case *AccountBudget_ProposedSpendingLimitMicros: + s := proto.Size(x.ProposedSpendingLimitMicros) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *AccountBudget_ProposedSpendingLimitType: + n += 1 // tag and wire + n += proto.SizeVarint(uint64(x.ProposedSpendingLimitType)) + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + // approved_spending_limit + switch x := m.ApprovedSpendingLimit.(type) { + case *AccountBudget_ApprovedSpendingLimitMicros: + s := proto.Size(x.ApprovedSpendingLimitMicros) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *AccountBudget_ApprovedSpendingLimitType: + n += 1 // tag and wire + n += proto.SizeVarint(uint64(x.ApprovedSpendingLimitType)) + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + // adjusted_spending_limit + switch x := m.AdjustedSpendingLimit.(type) { + case *AccountBudget_AdjustedSpendingLimitMicros: + s := proto.Size(x.AdjustedSpendingLimitMicros) + n += 2 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *AccountBudget_AdjustedSpendingLimitType: + n += 2 // tag and wire + n += proto.SizeVarint(uint64(x.AdjustedSpendingLimitType)) + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +// A pending proposal associated with the enclosing account-level budget, +// if applicable. +type AccountBudget_PendingAccountBudgetProposal struct { + // The resource name of the proposal. + // AccountBudgetProposal resource names have the form: + // + // + // `customers/{customer_id}/accountBudgetProposals/{account_budget_proposal_id}` + AccountBudgetProposal *wrappers.StringValue `protobuf:"bytes,1,opt,name=account_budget_proposal,json=accountBudgetProposal,proto3" json:"account_budget_proposal,omitempty"` + // The type of this proposal, e.g. END to end the budget associated + // with this proposal. + ProposalType enums.AccountBudgetProposalTypeEnum_AccountBudgetProposalType `protobuf:"varint,2,opt,name=proposal_type,json=proposalType,proto3,enum=google.ads.googleads.v1.enums.AccountBudgetProposalTypeEnum_AccountBudgetProposalType" json:"proposal_type,omitempty"` + // The name to assign to the account-level budget. + Name *wrappers.StringValue `protobuf:"bytes,3,opt,name=name,proto3" json:"name,omitempty"` + // The start time in yyyy-MM-dd HH:mm:ss format. + StartDateTime *wrappers.StringValue `protobuf:"bytes,4,opt,name=start_date_time,json=startDateTime,proto3" json:"start_date_time,omitempty"` + // A purchase order number is a value that helps users reference this budget + // in their monthly invoices. + PurchaseOrderNumber *wrappers.StringValue `protobuf:"bytes,9,opt,name=purchase_order_number,json=purchaseOrderNumber,proto3" json:"purchase_order_number,omitempty"` + // Notes associated with this budget. + Notes *wrappers.StringValue `protobuf:"bytes,10,opt,name=notes,proto3" json:"notes,omitempty"` + // The time when this account-level budget proposal was created. + // Formatted as yyyy-MM-dd HH:mm:ss. + CreationDateTime *wrappers.StringValue `protobuf:"bytes,11,opt,name=creation_date_time,json=creationDateTime,proto3" json:"creation_date_time,omitempty"` + // The end time of the account-level budget. + // + // Types that are valid to be assigned to EndTime: + // *AccountBudget_PendingAccountBudgetProposal_EndDateTime + // *AccountBudget_PendingAccountBudgetProposal_EndTimeType + EndTime isAccountBudget_PendingAccountBudgetProposal_EndTime `protobuf_oneof:"end_time"` + // The spending limit. + // + // Types that are valid to be assigned to SpendingLimit: + // *AccountBudget_PendingAccountBudgetProposal_SpendingLimitMicros + // *AccountBudget_PendingAccountBudgetProposal_SpendingLimitType + SpendingLimit isAccountBudget_PendingAccountBudgetProposal_SpendingLimit `protobuf_oneof:"spending_limit"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *AccountBudget_PendingAccountBudgetProposal) Reset() { + *m = AccountBudget_PendingAccountBudgetProposal{} +} +func (m *AccountBudget_PendingAccountBudgetProposal) String() string { + return proto.CompactTextString(m) +} +func (*AccountBudget_PendingAccountBudgetProposal) ProtoMessage() {} +func (*AccountBudget_PendingAccountBudgetProposal) Descriptor() ([]byte, []int) { + return fileDescriptor_account_budget_80044f3210f6054b, []int{0, 0} +} +func (m *AccountBudget_PendingAccountBudgetProposal) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_AccountBudget_PendingAccountBudgetProposal.Unmarshal(m, b) +} +func (m *AccountBudget_PendingAccountBudgetProposal) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_AccountBudget_PendingAccountBudgetProposal.Marshal(b, m, deterministic) +} +func (dst *AccountBudget_PendingAccountBudgetProposal) XXX_Merge(src proto.Message) { + xxx_messageInfo_AccountBudget_PendingAccountBudgetProposal.Merge(dst, src) +} +func (m *AccountBudget_PendingAccountBudgetProposal) XXX_Size() int { + return xxx_messageInfo_AccountBudget_PendingAccountBudgetProposal.Size(m) +} +func (m *AccountBudget_PendingAccountBudgetProposal) XXX_DiscardUnknown() { + xxx_messageInfo_AccountBudget_PendingAccountBudgetProposal.DiscardUnknown(m) +} + +var xxx_messageInfo_AccountBudget_PendingAccountBudgetProposal proto.InternalMessageInfo + +func (m *AccountBudget_PendingAccountBudgetProposal) GetAccountBudgetProposal() *wrappers.StringValue { + if m != nil { + return m.AccountBudgetProposal + } + return nil +} + +func (m *AccountBudget_PendingAccountBudgetProposal) GetProposalType() enums.AccountBudgetProposalTypeEnum_AccountBudgetProposalType { + if m != nil { + return m.ProposalType + } + return enums.AccountBudgetProposalTypeEnum_UNSPECIFIED +} + +func (m *AccountBudget_PendingAccountBudgetProposal) GetName() *wrappers.StringValue { + if m != nil { + return m.Name + } + return nil +} + +func (m *AccountBudget_PendingAccountBudgetProposal) GetStartDateTime() *wrappers.StringValue { + if m != nil { + return m.StartDateTime + } + return nil +} + +func (m *AccountBudget_PendingAccountBudgetProposal) GetPurchaseOrderNumber() *wrappers.StringValue { + if m != nil { + return m.PurchaseOrderNumber + } + return nil +} + +func (m *AccountBudget_PendingAccountBudgetProposal) GetNotes() *wrappers.StringValue { + if m != nil { + return m.Notes + } + return nil +} + +func (m *AccountBudget_PendingAccountBudgetProposal) GetCreationDateTime() *wrappers.StringValue { + if m != nil { + return m.CreationDateTime + } + return nil +} + +type isAccountBudget_PendingAccountBudgetProposal_EndTime interface { + isAccountBudget_PendingAccountBudgetProposal_EndTime() +} + +type AccountBudget_PendingAccountBudgetProposal_EndDateTime struct { + EndDateTime *wrappers.StringValue `protobuf:"bytes,5,opt,name=end_date_time,json=endDateTime,proto3,oneof"` +} + +type AccountBudget_PendingAccountBudgetProposal_EndTimeType struct { + EndTimeType enums.TimeTypeEnum_TimeType `protobuf:"varint,6,opt,name=end_time_type,json=endTimeType,proto3,enum=google.ads.googleads.v1.enums.TimeTypeEnum_TimeType,oneof"` +} + +func (*AccountBudget_PendingAccountBudgetProposal_EndDateTime) isAccountBudget_PendingAccountBudgetProposal_EndTime() { +} + +func (*AccountBudget_PendingAccountBudgetProposal_EndTimeType) isAccountBudget_PendingAccountBudgetProposal_EndTime() { +} + +func (m *AccountBudget_PendingAccountBudgetProposal) GetEndTime() isAccountBudget_PendingAccountBudgetProposal_EndTime { + if m != nil { + return m.EndTime + } + return nil +} + +func (m *AccountBudget_PendingAccountBudgetProposal) GetEndDateTime() *wrappers.StringValue { + if x, ok := m.GetEndTime().(*AccountBudget_PendingAccountBudgetProposal_EndDateTime); ok { + return x.EndDateTime + } + return nil +} + +func (m *AccountBudget_PendingAccountBudgetProposal) GetEndTimeType() enums.TimeTypeEnum_TimeType { + if x, ok := m.GetEndTime().(*AccountBudget_PendingAccountBudgetProposal_EndTimeType); ok { + return x.EndTimeType + } + return enums.TimeTypeEnum_UNSPECIFIED +} + +type isAccountBudget_PendingAccountBudgetProposal_SpendingLimit interface { + isAccountBudget_PendingAccountBudgetProposal_SpendingLimit() +} + +type AccountBudget_PendingAccountBudgetProposal_SpendingLimitMicros struct { + SpendingLimitMicros *wrappers.Int64Value `protobuf:"bytes,7,opt,name=spending_limit_micros,json=spendingLimitMicros,proto3,oneof"` +} + +type AccountBudget_PendingAccountBudgetProposal_SpendingLimitType struct { + SpendingLimitType enums.SpendingLimitTypeEnum_SpendingLimitType `protobuf:"varint,8,opt,name=spending_limit_type,json=spendingLimitType,proto3,enum=google.ads.googleads.v1.enums.SpendingLimitTypeEnum_SpendingLimitType,oneof"` +} + +func (*AccountBudget_PendingAccountBudgetProposal_SpendingLimitMicros) isAccountBudget_PendingAccountBudgetProposal_SpendingLimit() { +} + +func (*AccountBudget_PendingAccountBudgetProposal_SpendingLimitType) isAccountBudget_PendingAccountBudgetProposal_SpendingLimit() { +} + +func (m *AccountBudget_PendingAccountBudgetProposal) GetSpendingLimit() isAccountBudget_PendingAccountBudgetProposal_SpendingLimit { + if m != nil { + return m.SpendingLimit + } + return nil +} + +func (m *AccountBudget_PendingAccountBudgetProposal) GetSpendingLimitMicros() *wrappers.Int64Value { + if x, ok := m.GetSpendingLimit().(*AccountBudget_PendingAccountBudgetProposal_SpendingLimitMicros); ok { + return x.SpendingLimitMicros + } + return nil +} + +func (m *AccountBudget_PendingAccountBudgetProposal) GetSpendingLimitType() enums.SpendingLimitTypeEnum_SpendingLimitType { + if x, ok := m.GetSpendingLimit().(*AccountBudget_PendingAccountBudgetProposal_SpendingLimitType); ok { + return x.SpendingLimitType + } + return enums.SpendingLimitTypeEnum_UNSPECIFIED +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*AccountBudget_PendingAccountBudgetProposal) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _AccountBudget_PendingAccountBudgetProposal_OneofMarshaler, _AccountBudget_PendingAccountBudgetProposal_OneofUnmarshaler, _AccountBudget_PendingAccountBudgetProposal_OneofSizer, []interface{}{ + (*AccountBudget_PendingAccountBudgetProposal_EndDateTime)(nil), + (*AccountBudget_PendingAccountBudgetProposal_EndTimeType)(nil), + (*AccountBudget_PendingAccountBudgetProposal_SpendingLimitMicros)(nil), + (*AccountBudget_PendingAccountBudgetProposal_SpendingLimitType)(nil), + } +} + +func _AccountBudget_PendingAccountBudgetProposal_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*AccountBudget_PendingAccountBudgetProposal) + // end_time + switch x := m.EndTime.(type) { + case *AccountBudget_PendingAccountBudgetProposal_EndDateTime: + b.EncodeVarint(5<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.EndDateTime); err != nil { + return err + } + case *AccountBudget_PendingAccountBudgetProposal_EndTimeType: + b.EncodeVarint(6<<3 | proto.WireVarint) + b.EncodeVarint(uint64(x.EndTimeType)) + case nil: + default: + return fmt.Errorf("AccountBudget_PendingAccountBudgetProposal.EndTime has unexpected type %T", x) + } + // spending_limit + switch x := m.SpendingLimit.(type) { + case *AccountBudget_PendingAccountBudgetProposal_SpendingLimitMicros: + b.EncodeVarint(7<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.SpendingLimitMicros); err != nil { + return err + } + case *AccountBudget_PendingAccountBudgetProposal_SpendingLimitType: + b.EncodeVarint(8<<3 | proto.WireVarint) + b.EncodeVarint(uint64(x.SpendingLimitType)) + case nil: + default: + return fmt.Errorf("AccountBudget_PendingAccountBudgetProposal.SpendingLimit has unexpected type %T", x) + } + return nil +} + +func _AccountBudget_PendingAccountBudgetProposal_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*AccountBudget_PendingAccountBudgetProposal) + switch tag { + case 5: // end_time.end_date_time + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(wrappers.StringValue) + err := b.DecodeMessage(msg) + m.EndTime = &AccountBudget_PendingAccountBudgetProposal_EndDateTime{msg} + return true, err + case 6: // end_time.end_time_type + if wire != proto.WireVarint { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeVarint() + m.EndTime = &AccountBudget_PendingAccountBudgetProposal_EndTimeType{enums.TimeTypeEnum_TimeType(x)} + return true, err + case 7: // spending_limit.spending_limit_micros + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(wrappers.Int64Value) + err := b.DecodeMessage(msg) + m.SpendingLimit = &AccountBudget_PendingAccountBudgetProposal_SpendingLimitMicros{msg} + return true, err + case 8: // spending_limit.spending_limit_type + if wire != proto.WireVarint { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeVarint() + m.SpendingLimit = &AccountBudget_PendingAccountBudgetProposal_SpendingLimitType{enums.SpendingLimitTypeEnum_SpendingLimitType(x)} + return true, err + default: + return false, nil + } +} + +func _AccountBudget_PendingAccountBudgetProposal_OneofSizer(msg proto.Message) (n int) { + m := msg.(*AccountBudget_PendingAccountBudgetProposal) + // end_time + switch x := m.EndTime.(type) { + case *AccountBudget_PendingAccountBudgetProposal_EndDateTime: + s := proto.Size(x.EndDateTime) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *AccountBudget_PendingAccountBudgetProposal_EndTimeType: + n += 1 // tag and wire + n += proto.SizeVarint(uint64(x.EndTimeType)) + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + // spending_limit + switch x := m.SpendingLimit.(type) { + case *AccountBudget_PendingAccountBudgetProposal_SpendingLimitMicros: + s := proto.Size(x.SpendingLimitMicros) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *AccountBudget_PendingAccountBudgetProposal_SpendingLimitType: + n += 1 // tag and wire + n += proto.SizeVarint(uint64(x.SpendingLimitType)) + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +func init() { + proto.RegisterType((*AccountBudget)(nil), "google.ads.googleads.v1.resources.AccountBudget") + proto.RegisterType((*AccountBudget_PendingAccountBudgetProposal)(nil), "google.ads.googleads.v1.resources.AccountBudget.PendingAccountBudgetProposal") +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/resources/account_budget.proto", fileDescriptor_account_budget_80044f3210f6054b) +} + +var fileDescriptor_account_budget_80044f3210f6054b = []byte{ + // 1010 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xac, 0x57, 0xdd, 0x6e, 0xdb, 0x36, + 0x14, 0x9e, 0x6c, 0xe7, 0x8f, 0x89, 0xf3, 0x23, 0x37, 0xa9, 0xf2, 0x83, 0x22, 0xdd, 0x50, 0x20, + 0xc0, 0x30, 0x79, 0xce, 0x8a, 0x0e, 0xf3, 0x6e, 0x66, 0xa3, 0x5d, 0xb3, 0x60, 0xe9, 0x32, 0x39, + 0xf3, 0x45, 0x11, 0x40, 0xa0, 0x2d, 0x4e, 0xd3, 0x66, 0x91, 0x02, 0x49, 0x65, 0x2d, 0xf6, 0x04, + 0x7b, 0x8d, 0x5d, 0xec, 0x62, 0x0f, 0xb2, 0x8b, 0x3d, 0xca, 0x5e, 0x62, 0x03, 0x29, 0x91, 0xb6, + 0x6c, 0x29, 0x52, 0x90, 0xdc, 0x51, 0xe4, 0xf9, 0xbe, 0x73, 0x3e, 0x92, 0xe7, 0x1c, 0x0a, 0xbc, + 0xf0, 0x09, 0xf1, 0x27, 0xa8, 0x0d, 0x3d, 0xd6, 0x4e, 0x86, 0x62, 0x74, 0xd3, 0x69, 0x53, 0xc4, + 0x48, 0x4c, 0xc7, 0x88, 0xb5, 0xe1, 0x78, 0x4c, 0x62, 0xcc, 0xdd, 0x51, 0xec, 0xf9, 0x88, 0xdb, + 0x11, 0x25, 0x9c, 0x98, 0x4f, 0x13, 0x63, 0x1b, 0x7a, 0xcc, 0xd6, 0x38, 0xfb, 0xa6, 0x63, 0x6b, + 0xdc, 0xc1, 0x57, 0x45, 0xd4, 0x08, 0xc7, 0xe1, 0x3c, 0xad, 0x1b, 0x51, 0x12, 0x11, 0x06, 0x27, + 0x2e, 0x7f, 0x1f, 0xa1, 0xc4, 0xc9, 0xc1, 0x17, 0x77, 0x62, 0x60, 0x1c, 0xf2, 0x98, 0xa5, 0xd0, + 0xcf, 0x6f, 0x87, 0xb2, 0x08, 0x61, 0x2f, 0xc0, 0xbe, 0x3b, 0x09, 0xc2, 0x80, 0xcf, 0xfa, 0xfc, + 0xe4, 0x76, 0x20, 0x0f, 0x42, 0x34, 0x6b, 0xfe, 0x24, 0x35, 0x97, 0x5f, 0xa3, 0xf8, 0xc7, 0xf6, + 0xaf, 0x14, 0x46, 0x11, 0xa2, 0x2a, 0x8e, 0x23, 0x45, 0x17, 0x05, 0x6d, 0x88, 0x31, 0xe1, 0x90, + 0x07, 0x04, 0xa7, 0xab, 0x1f, 0xfe, 0x69, 0x81, 0x66, 0x2f, 0x51, 0xd1, 0x97, 0x22, 0xcc, 0x8f, + 0x40, 0x53, 0xed, 0xa0, 0x8b, 0x61, 0x88, 0x2c, 0xe3, 0xd8, 0x38, 0x59, 0x73, 0x36, 0xd4, 0xe4, + 0x1b, 0x18, 0x22, 0xf3, 0x63, 0x50, 0x0b, 0x3c, 0xab, 0x76, 0x6c, 0x9c, 0xac, 0x9f, 0x1e, 0xa6, + 0xdb, 0x6f, 0xab, 0x08, 0xec, 0x6f, 0x30, 0x7f, 0xf1, 0x7c, 0x08, 0x27, 0x31, 0x72, 0x6a, 0x81, + 0x67, 0xf6, 0x40, 0x73, 0x14, 0x4c, 0x26, 0x42, 0x2c, 0x43, 0x3c, 0x8e, 0xac, 0xba, 0xc4, 0x1d, + 0x2d, 0xe0, 0x06, 0x9c, 0x06, 0xd8, 0x4f, 0x80, 0x1b, 0x29, 0x64, 0x20, 0x10, 0xe6, 0x08, 0x2c, + 0x27, 0x9b, 0x6b, 0x35, 0x8e, 0x8d, 0x93, 0xcd, 0xd3, 0x73, 0xbb, 0xe8, 0xf4, 0xe5, 0x26, 0xd9, + 0x19, 0x49, 0x03, 0x89, 0x7c, 0x85, 0xe3, 0x30, 0x6f, 0xde, 0x49, 0x99, 0xcd, 0x4f, 0x41, 0x43, + 0xea, 0x5d, 0xaa, 0x10, 0x9d, 0xb4, 0x34, 0x7f, 0x00, 0x56, 0x72, 0x69, 0x90, 0x27, 0xce, 0x9e, + 0x72, 0xd7, 0x83, 0x1c, 0xb9, 0xe2, 0x84, 0xac, 0xe5, 0x0a, 0x2c, 0xbb, 0x0a, 0x3d, 0x10, 0xe0, + 0x97, 0x90, 0xa3, 0xab, 0x20, 0xa1, 0x85, 0x51, 0x44, 0xc9, 0x4d, 0x0e, 0xed, 0x4a, 0x15, 0x5a, + 0x85, 0x5e, 0xa0, 0xe5, 0x84, 0xc3, 0x89, 0x0b, 0xbd, 0x9f, 0x63, 0xc6, 0x43, 0x84, 0x39, 0x73, + 0xc3, 0x60, 0x4c, 0x09, 0xb3, 0xcc, 0xf2, 0x93, 0xdc, 0x93, 0xe0, 0xde, 0x14, 0x7b, 0x21, 0xa1, + 0xe6, 0x05, 0x78, 0x04, 0x43, 0x99, 0x05, 0x0c, 0x51, 0x11, 0x72, 0x4a, 0xd9, 0x2a, 0xa7, 0x34, + 0x13, 0xe0, 0x40, 0xe2, 0x52, 0xba, 0x4b, 0xb0, 0x1b, 0xc5, 0x74, 0xfc, 0x13, 0x64, 0xc8, 0x25, + 0xd4, 0x43, 0xd4, 0xc5, 0x71, 0x38, 0x42, 0xd4, 0x7a, 0x54, 0x41, 0x79, 0x4b, 0x41, 0xbf, 0x13, + 0xc8, 0x37, 0x12, 0x68, 0x9e, 0x82, 0x25, 0x4c, 0x38, 0x62, 0xd6, 0x6e, 0x05, 0x86, 0xc4, 0xd4, + 0x7c, 0x07, 0xb6, 0x55, 0x7e, 0xaa, 0xb2, 0x60, 0xed, 0x49, 0xf8, 0x85, 0x5d, 0x5a, 0x77, 0xb2, + 0xb7, 0xcc, 0xbe, 0x4c, 0x88, 0x32, 0x93, 0x97, 0x29, 0xa9, 0xb3, 0x95, 0xba, 0x51, 0x13, 0xe6, + 0x00, 0xec, 0xe9, 0x3b, 0x85, 0xb0, 0x37, 0x73, 0xf4, 0xab, 0xe5, 0xe1, 0x9f, 0x7d, 0xe0, 0xb4, + 0x14, 0xfa, 0x15, 0xf6, 0xf4, 0xd1, 0xff, 0x32, 0x47, 0xaa, 0x6b, 0x88, 0xb5, 0x26, 0xd3, 0xe9, + 0x79, 0x49, 0x3a, 0x09, 0x92, 0xab, 0xf7, 0x11, 0x92, 0x39, 0xa4, 0x3e, 0xe6, 0x9c, 0xa9, 0x69, + 0xa1, 0x40, 0x5f, 0xdf, 0xac, 0x02, 0x50, 0x41, 0x81, 0xe1, 0xb4, 0x14, 0x7a, 0x4e, 0x41, 0x86, + 0x74, 0xaa, 0x60, 0xfd, 0x1e, 0x0a, 0xb2, 0xce, 0xb4, 0x82, 0x11, 0x78, 0x32, 0xcd, 0xeb, 0x6c, + 0x9d, 0x4e, 0x2f, 0xf7, 0x46, 0xe9, 0xe5, 0x3e, 0xab, 0x39, 0x87, 0x3a, 0xbd, 0x53, 0x8e, 0x6f, + 0x05, 0x45, 0x7a, 0xcf, 0x7f, 0x37, 0xc0, 0x51, 0x91, 0x13, 0xa9, 0xab, 0x29, 0x75, 0x7d, 0x5d, + 0xa2, 0x2b, 0x43, 0xad, 0x05, 0x2e, 0xcc, 0x9e, 0xd5, 0x9c, 0xfd, 0xdc, 0x68, 0x94, 0xde, 0x69, + 0xc1, 0xc9, 0xd5, 0xbb, 0x59, 0xae, 0xb7, 0xee, 0x1c, 0xea, 0xba, 0x53, 0xa0, 0xb7, 0xc8, 0x89, + 0xd4, 0xbb, 0xf5, 0xa0, 0x7a, 0xeb, 0xce, 0x7e, 0x6e, 0x34, 0x5a, 0xaf, 0xac, 0x63, 0x85, 0x7a, + 0xb7, 0xcb, 0xf5, 0x36, 0x9c, 0x43, 0x45, 0x52, 0xa8, 0xb7, 0xc0, 0x89, 0xd4, 0xbb, 0xf3, 0xa0, + 0x7a, 0x1b, 0xce, 0x7e, 0x6e, 0x34, 0x62, 0xf1, 0xe0, 0xef, 0x15, 0x70, 0x74, 0x5b, 0x15, 0x32, + 0xaf, 0xc0, 0xe3, 0x82, 0xc7, 0x90, 0xec, 0xfe, 0xe5, 0x0d, 0x27, 0x97, 0xf5, 0x37, 0xd0, 0xcc, + 0xbc, 0xa9, 0xe4, 0x7b, 0x61, 0xf3, 0x74, 0x78, 0x97, 0xde, 0xad, 0xc8, 0xb4, 0xf4, 0xc2, 0x55, + 0x67, 0x23, 0x9a, 0xf9, 0xd2, 0xdd, 0xbc, 0x5e, 0xb9, 0x9b, 0xbf, 0x04, 0x5b, 0xf3, 0xdd, 0xb6, + 0x51, 0x01, 0xdc, 0x64, 0x99, 0x2e, 0x5b, 0xd8, 0xbf, 0xd6, 0xee, 0xdd, 0xbf, 0x40, 0xf5, 0xfe, + 0x75, 0x0e, 0xcc, 0x31, 0x45, 0xf2, 0xa5, 0x37, 0x23, 0x67, 0xbd, 0x02, 0xc1, 0xb6, 0xc2, 0x69, + 0x45, 0x7d, 0xd0, 0xcc, 0x96, 0xf1, 0xa5, 0x4a, 0x8d, 0x68, 0x1d, 0xcd, 0x94, 0xef, 0xb7, 0x09, + 0xc7, 0xb4, 0x6a, 0x2f, 0xdf, 0xab, 0xef, 0x08, 0x6e, 0x5d, 0xad, 0xbf, 0x07, 0xbb, 0xf9, 0x49, + 0xbc, 0x52, 0x9e, 0xc4, 0x86, 0xd3, 0x62, 0x39, 0xc9, 0xfb, 0x0e, 0xb4, 0xf2, 0x52, 0x76, 0xf5, + 0x41, 0x53, 0xd6, 0x70, 0x76, 0xd8, 0xfc, 0x64, 0x1f, 0x80, 0x55, 0xb5, 0x51, 0xfd, 0x6d, 0xb0, + 0x99, 0x8d, 0xa2, 0xdf, 0x02, 0x3b, 0x0b, 0x7d, 0x5c, 0x4c, 0x2e, 0xb4, 0xc6, 0xfe, 0x3e, 0x78, + 0x5c, 0xd0, 0x5d, 0xc4, 0x52, 0x41, 0x21, 0x96, 0x4b, 0xf9, 0x35, 0xab, 0xff, 0x9f, 0x01, 0x9e, + 0x8d, 0x49, 0x58, 0xfe, 0xfa, 0xe9, 0x9b, 0xf3, 0x29, 0xca, 0xc9, 0xa5, 0xf1, 0xf6, 0x3c, 0x05, + 0xfa, 0x64, 0x02, 0xb1, 0x6f, 0x13, 0xea, 0xb7, 0x7d, 0x84, 0xe5, 0x99, 0xa8, 0xbf, 0x9c, 0x28, + 0x60, 0xb7, 0xfc, 0x05, 0x7e, 0xa9, 0x47, 0x7f, 0xd4, 0xea, 0xaf, 0x7b, 0xbd, 0xbf, 0x6a, 0x4f, + 0x5f, 0x27, 0x94, 0x3d, 0x8f, 0xd9, 0xc9, 0x50, 0x8c, 0x86, 0x1d, 0xdb, 0x51, 0x96, 0xff, 0x28, + 0x9b, 0xeb, 0x9e, 0xc7, 0xae, 0xb5, 0xcd, 0xf5, 0xb0, 0x73, 0xad, 0x6d, 0xfe, 0xad, 0x3d, 0x4b, + 0x16, 0xba, 0xdd, 0x9e, 0xc7, 0xba, 0x5d, 0x6d, 0xd5, 0xed, 0x0e, 0x3b, 0xdd, 0xae, 0xb6, 0x1b, + 0x2d, 0xcb, 0x60, 0x3f, 0xfb, 0x3f, 0x00, 0x00, 0xff, 0xff, 0x7d, 0xfd, 0x3b, 0x79, 0xb1, 0x0e, + 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/resources/account_budget_proposal.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/resources/account_budget_proposal.pb.go new file mode 100644 index 0000000..e6e33ba --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/resources/account_budget_proposal.pb.go @@ -0,0 +1,713 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/resources/account_budget_proposal.proto + +package resources // import "google.golang.org/genproto/googleapis/ads/googleads/v1/resources" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import wrappers "github.com/golang/protobuf/ptypes/wrappers" +import enums "google.golang.org/genproto/googleapis/ads/googleads/v1/enums" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// An account-level budget proposal. +// +// All fields prefixed with 'proposed' may not necessarily be applied directly. +// For example, proposed spending limits may be adjusted before their +// application. This is true if the 'proposed' field has an 'approved' +// counterpart, e.g. spending limits. +// +// Please note that the proposal type (proposal_type) changes which fields are +// required and which must remain empty. +type AccountBudgetProposal struct { + // The resource name of the proposal. + // AccountBudgetProposal resource names have the form: + // + // + // `customers/{customer_id}/accountBudgetProposals/{account_budget_proposal_id}` + ResourceName string `protobuf:"bytes,1,opt,name=resource_name,json=resourceName,proto3" json:"resource_name,omitempty"` + // The ID of the proposal. + Id *wrappers.Int64Value `protobuf:"bytes,14,opt,name=id,proto3" json:"id,omitempty"` + // The resource name of the billing setup associated with this proposal. + BillingSetup *wrappers.StringValue `protobuf:"bytes,2,opt,name=billing_setup,json=billingSetup,proto3" json:"billing_setup,omitempty"` + // The resource name of the account-level budget associated with this + // proposal. + AccountBudget *wrappers.StringValue `protobuf:"bytes,3,opt,name=account_budget,json=accountBudget,proto3" json:"account_budget,omitempty"` + // The type of this proposal, e.g. END to end the budget associated with this + // proposal. + ProposalType enums.AccountBudgetProposalTypeEnum_AccountBudgetProposalType `protobuf:"varint,4,opt,name=proposal_type,json=proposalType,proto3,enum=google.ads.googleads.v1.enums.AccountBudgetProposalTypeEnum_AccountBudgetProposalType" json:"proposal_type,omitempty"` + // The status of this proposal. + // When a new proposal is created, the status defaults to PENDING. + Status enums.AccountBudgetProposalStatusEnum_AccountBudgetProposalStatus `protobuf:"varint,15,opt,name=status,proto3,enum=google.ads.googleads.v1.enums.AccountBudgetProposalStatusEnum_AccountBudgetProposalStatus" json:"status,omitempty"` + // The name to assign to the account-level budget. + ProposedName *wrappers.StringValue `protobuf:"bytes,5,opt,name=proposed_name,json=proposedName,proto3" json:"proposed_name,omitempty"` + // The approved start date time in yyyy-mm-dd hh:mm:ss format. + ApprovedStartDateTime *wrappers.StringValue `protobuf:"bytes,20,opt,name=approved_start_date_time,json=approvedStartDateTime,proto3" json:"approved_start_date_time,omitempty"` + // A purchase order number is a value that enables the user to help them + // reference this budget in their monthly invoices. + ProposedPurchaseOrderNumber *wrappers.StringValue `protobuf:"bytes,12,opt,name=proposed_purchase_order_number,json=proposedPurchaseOrderNumber,proto3" json:"proposed_purchase_order_number,omitempty"` + // Notes associated with this budget. + ProposedNotes *wrappers.StringValue `protobuf:"bytes,13,opt,name=proposed_notes,json=proposedNotes,proto3" json:"proposed_notes,omitempty"` + // The date time when this account-level budget proposal was created, which is + // not the same as its approval date time, if applicable. + CreationDateTime *wrappers.StringValue `protobuf:"bytes,16,opt,name=creation_date_time,json=creationDateTime,proto3" json:"creation_date_time,omitempty"` + // The date time when this account-level budget was approved, if applicable. + ApprovalDateTime *wrappers.StringValue `protobuf:"bytes,17,opt,name=approval_date_time,json=approvalDateTime,proto3" json:"approval_date_time,omitempty"` + // The proposed start date time of the account-level budget, which cannot be + // in the past. + // + // Types that are valid to be assigned to ProposedStartTime: + // *AccountBudgetProposal_ProposedStartDateTime + // *AccountBudgetProposal_ProposedStartTimeType + ProposedStartTime isAccountBudgetProposal_ProposedStartTime `protobuf_oneof:"proposed_start_time"` + // The proposed end date time of the account-level budget, which cannot be in + // the past. + // + // Types that are valid to be assigned to ProposedEndTime: + // *AccountBudgetProposal_ProposedEndDateTime + // *AccountBudgetProposal_ProposedEndTimeType + ProposedEndTime isAccountBudgetProposal_ProposedEndTime `protobuf_oneof:"proposed_end_time"` + // The approved end date time of the account-level budget. + // + // Types that are valid to be assigned to ApprovedEndTime: + // *AccountBudgetProposal_ApprovedEndDateTime + // *AccountBudgetProposal_ApprovedEndTimeType + ApprovedEndTime isAccountBudgetProposal_ApprovedEndTime `protobuf_oneof:"approved_end_time"` + // The proposed spending limit. + // + // Types that are valid to be assigned to ProposedSpendingLimit: + // *AccountBudgetProposal_ProposedSpendingLimitMicros + // *AccountBudgetProposal_ProposedSpendingLimitType + ProposedSpendingLimit isAccountBudgetProposal_ProposedSpendingLimit `protobuf_oneof:"proposed_spending_limit"` + // The approved spending limit. + // + // Types that are valid to be assigned to ApprovedSpendingLimit: + // *AccountBudgetProposal_ApprovedSpendingLimitMicros + // *AccountBudgetProposal_ApprovedSpendingLimitType + ApprovedSpendingLimit isAccountBudgetProposal_ApprovedSpendingLimit `protobuf_oneof:"approved_spending_limit"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *AccountBudgetProposal) Reset() { *m = AccountBudgetProposal{} } +func (m *AccountBudgetProposal) String() string { return proto.CompactTextString(m) } +func (*AccountBudgetProposal) ProtoMessage() {} +func (*AccountBudgetProposal) Descriptor() ([]byte, []int) { + return fileDescriptor_account_budget_proposal_bb72fd32acc2bc82, []int{0} +} +func (m *AccountBudgetProposal) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_AccountBudgetProposal.Unmarshal(m, b) +} +func (m *AccountBudgetProposal) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_AccountBudgetProposal.Marshal(b, m, deterministic) +} +func (dst *AccountBudgetProposal) XXX_Merge(src proto.Message) { + xxx_messageInfo_AccountBudgetProposal.Merge(dst, src) +} +func (m *AccountBudgetProposal) XXX_Size() int { + return xxx_messageInfo_AccountBudgetProposal.Size(m) +} +func (m *AccountBudgetProposal) XXX_DiscardUnknown() { + xxx_messageInfo_AccountBudgetProposal.DiscardUnknown(m) +} + +var xxx_messageInfo_AccountBudgetProposal proto.InternalMessageInfo + +func (m *AccountBudgetProposal) GetResourceName() string { + if m != nil { + return m.ResourceName + } + return "" +} + +func (m *AccountBudgetProposal) GetId() *wrappers.Int64Value { + if m != nil { + return m.Id + } + return nil +} + +func (m *AccountBudgetProposal) GetBillingSetup() *wrappers.StringValue { + if m != nil { + return m.BillingSetup + } + return nil +} + +func (m *AccountBudgetProposal) GetAccountBudget() *wrappers.StringValue { + if m != nil { + return m.AccountBudget + } + return nil +} + +func (m *AccountBudgetProposal) GetProposalType() enums.AccountBudgetProposalTypeEnum_AccountBudgetProposalType { + if m != nil { + return m.ProposalType + } + return enums.AccountBudgetProposalTypeEnum_UNSPECIFIED +} + +func (m *AccountBudgetProposal) GetStatus() enums.AccountBudgetProposalStatusEnum_AccountBudgetProposalStatus { + if m != nil { + return m.Status + } + return enums.AccountBudgetProposalStatusEnum_UNSPECIFIED +} + +func (m *AccountBudgetProposal) GetProposedName() *wrappers.StringValue { + if m != nil { + return m.ProposedName + } + return nil +} + +func (m *AccountBudgetProposal) GetApprovedStartDateTime() *wrappers.StringValue { + if m != nil { + return m.ApprovedStartDateTime + } + return nil +} + +func (m *AccountBudgetProposal) GetProposedPurchaseOrderNumber() *wrappers.StringValue { + if m != nil { + return m.ProposedPurchaseOrderNumber + } + return nil +} + +func (m *AccountBudgetProposal) GetProposedNotes() *wrappers.StringValue { + if m != nil { + return m.ProposedNotes + } + return nil +} + +func (m *AccountBudgetProposal) GetCreationDateTime() *wrappers.StringValue { + if m != nil { + return m.CreationDateTime + } + return nil +} + +func (m *AccountBudgetProposal) GetApprovalDateTime() *wrappers.StringValue { + if m != nil { + return m.ApprovalDateTime + } + return nil +} + +type isAccountBudgetProposal_ProposedStartTime interface { + isAccountBudgetProposal_ProposedStartTime() +} + +type AccountBudgetProposal_ProposedStartDateTime struct { + ProposedStartDateTime *wrappers.StringValue `protobuf:"bytes,18,opt,name=proposed_start_date_time,json=proposedStartDateTime,proto3,oneof"` +} + +type AccountBudgetProposal_ProposedStartTimeType struct { + ProposedStartTimeType enums.TimeTypeEnum_TimeType `protobuf:"varint,7,opt,name=proposed_start_time_type,json=proposedStartTimeType,proto3,enum=google.ads.googleads.v1.enums.TimeTypeEnum_TimeType,oneof"` +} + +func (*AccountBudgetProposal_ProposedStartDateTime) isAccountBudgetProposal_ProposedStartTime() {} + +func (*AccountBudgetProposal_ProposedStartTimeType) isAccountBudgetProposal_ProposedStartTime() {} + +func (m *AccountBudgetProposal) GetProposedStartTime() isAccountBudgetProposal_ProposedStartTime { + if m != nil { + return m.ProposedStartTime + } + return nil +} + +func (m *AccountBudgetProposal) GetProposedStartDateTime() *wrappers.StringValue { + if x, ok := m.GetProposedStartTime().(*AccountBudgetProposal_ProposedStartDateTime); ok { + return x.ProposedStartDateTime + } + return nil +} + +func (m *AccountBudgetProposal) GetProposedStartTimeType() enums.TimeTypeEnum_TimeType { + if x, ok := m.GetProposedStartTime().(*AccountBudgetProposal_ProposedStartTimeType); ok { + return x.ProposedStartTimeType + } + return enums.TimeTypeEnum_UNSPECIFIED +} + +type isAccountBudgetProposal_ProposedEndTime interface { + isAccountBudgetProposal_ProposedEndTime() +} + +type AccountBudgetProposal_ProposedEndDateTime struct { + ProposedEndDateTime *wrappers.StringValue `protobuf:"bytes,19,opt,name=proposed_end_date_time,json=proposedEndDateTime,proto3,oneof"` +} + +type AccountBudgetProposal_ProposedEndTimeType struct { + ProposedEndTimeType enums.TimeTypeEnum_TimeType `protobuf:"varint,9,opt,name=proposed_end_time_type,json=proposedEndTimeType,proto3,enum=google.ads.googleads.v1.enums.TimeTypeEnum_TimeType,oneof"` +} + +func (*AccountBudgetProposal_ProposedEndDateTime) isAccountBudgetProposal_ProposedEndTime() {} + +func (*AccountBudgetProposal_ProposedEndTimeType) isAccountBudgetProposal_ProposedEndTime() {} + +func (m *AccountBudgetProposal) GetProposedEndTime() isAccountBudgetProposal_ProposedEndTime { + if m != nil { + return m.ProposedEndTime + } + return nil +} + +func (m *AccountBudgetProposal) GetProposedEndDateTime() *wrappers.StringValue { + if x, ok := m.GetProposedEndTime().(*AccountBudgetProposal_ProposedEndDateTime); ok { + return x.ProposedEndDateTime + } + return nil +} + +func (m *AccountBudgetProposal) GetProposedEndTimeType() enums.TimeTypeEnum_TimeType { + if x, ok := m.GetProposedEndTime().(*AccountBudgetProposal_ProposedEndTimeType); ok { + return x.ProposedEndTimeType + } + return enums.TimeTypeEnum_UNSPECIFIED +} + +type isAccountBudgetProposal_ApprovedEndTime interface { + isAccountBudgetProposal_ApprovedEndTime() +} + +type AccountBudgetProposal_ApprovedEndDateTime struct { + ApprovedEndDateTime *wrappers.StringValue `protobuf:"bytes,21,opt,name=approved_end_date_time,json=approvedEndDateTime,proto3,oneof"` +} + +type AccountBudgetProposal_ApprovedEndTimeType struct { + ApprovedEndTimeType enums.TimeTypeEnum_TimeType `protobuf:"varint,22,opt,name=approved_end_time_type,json=approvedEndTimeType,proto3,enum=google.ads.googleads.v1.enums.TimeTypeEnum_TimeType,oneof"` +} + +func (*AccountBudgetProposal_ApprovedEndDateTime) isAccountBudgetProposal_ApprovedEndTime() {} + +func (*AccountBudgetProposal_ApprovedEndTimeType) isAccountBudgetProposal_ApprovedEndTime() {} + +func (m *AccountBudgetProposal) GetApprovedEndTime() isAccountBudgetProposal_ApprovedEndTime { + if m != nil { + return m.ApprovedEndTime + } + return nil +} + +func (m *AccountBudgetProposal) GetApprovedEndDateTime() *wrappers.StringValue { + if x, ok := m.GetApprovedEndTime().(*AccountBudgetProposal_ApprovedEndDateTime); ok { + return x.ApprovedEndDateTime + } + return nil +} + +func (m *AccountBudgetProposal) GetApprovedEndTimeType() enums.TimeTypeEnum_TimeType { + if x, ok := m.GetApprovedEndTime().(*AccountBudgetProposal_ApprovedEndTimeType); ok { + return x.ApprovedEndTimeType + } + return enums.TimeTypeEnum_UNSPECIFIED +} + +type isAccountBudgetProposal_ProposedSpendingLimit interface { + isAccountBudgetProposal_ProposedSpendingLimit() +} + +type AccountBudgetProposal_ProposedSpendingLimitMicros struct { + ProposedSpendingLimitMicros *wrappers.Int64Value `protobuf:"bytes,10,opt,name=proposed_spending_limit_micros,json=proposedSpendingLimitMicros,proto3,oneof"` +} + +type AccountBudgetProposal_ProposedSpendingLimitType struct { + ProposedSpendingLimitType enums.SpendingLimitTypeEnum_SpendingLimitType `protobuf:"varint,11,opt,name=proposed_spending_limit_type,json=proposedSpendingLimitType,proto3,enum=google.ads.googleads.v1.enums.SpendingLimitTypeEnum_SpendingLimitType,oneof"` +} + +func (*AccountBudgetProposal_ProposedSpendingLimitMicros) isAccountBudgetProposal_ProposedSpendingLimit() { +} + +func (*AccountBudgetProposal_ProposedSpendingLimitType) isAccountBudgetProposal_ProposedSpendingLimit() { +} + +func (m *AccountBudgetProposal) GetProposedSpendingLimit() isAccountBudgetProposal_ProposedSpendingLimit { + if m != nil { + return m.ProposedSpendingLimit + } + return nil +} + +func (m *AccountBudgetProposal) GetProposedSpendingLimitMicros() *wrappers.Int64Value { + if x, ok := m.GetProposedSpendingLimit().(*AccountBudgetProposal_ProposedSpendingLimitMicros); ok { + return x.ProposedSpendingLimitMicros + } + return nil +} + +func (m *AccountBudgetProposal) GetProposedSpendingLimitType() enums.SpendingLimitTypeEnum_SpendingLimitType { + if x, ok := m.GetProposedSpendingLimit().(*AccountBudgetProposal_ProposedSpendingLimitType); ok { + return x.ProposedSpendingLimitType + } + return enums.SpendingLimitTypeEnum_UNSPECIFIED +} + +type isAccountBudgetProposal_ApprovedSpendingLimit interface { + isAccountBudgetProposal_ApprovedSpendingLimit() +} + +type AccountBudgetProposal_ApprovedSpendingLimitMicros struct { + ApprovedSpendingLimitMicros *wrappers.Int64Value `protobuf:"bytes,23,opt,name=approved_spending_limit_micros,json=approvedSpendingLimitMicros,proto3,oneof"` +} + +type AccountBudgetProposal_ApprovedSpendingLimitType struct { + ApprovedSpendingLimitType enums.SpendingLimitTypeEnum_SpendingLimitType `protobuf:"varint,24,opt,name=approved_spending_limit_type,json=approvedSpendingLimitType,proto3,enum=google.ads.googleads.v1.enums.SpendingLimitTypeEnum_SpendingLimitType,oneof"` +} + +func (*AccountBudgetProposal_ApprovedSpendingLimitMicros) isAccountBudgetProposal_ApprovedSpendingLimit() { +} + +func (*AccountBudgetProposal_ApprovedSpendingLimitType) isAccountBudgetProposal_ApprovedSpendingLimit() { +} + +func (m *AccountBudgetProposal) GetApprovedSpendingLimit() isAccountBudgetProposal_ApprovedSpendingLimit { + if m != nil { + return m.ApprovedSpendingLimit + } + return nil +} + +func (m *AccountBudgetProposal) GetApprovedSpendingLimitMicros() *wrappers.Int64Value { + if x, ok := m.GetApprovedSpendingLimit().(*AccountBudgetProposal_ApprovedSpendingLimitMicros); ok { + return x.ApprovedSpendingLimitMicros + } + return nil +} + +func (m *AccountBudgetProposal) GetApprovedSpendingLimitType() enums.SpendingLimitTypeEnum_SpendingLimitType { + if x, ok := m.GetApprovedSpendingLimit().(*AccountBudgetProposal_ApprovedSpendingLimitType); ok { + return x.ApprovedSpendingLimitType + } + return enums.SpendingLimitTypeEnum_UNSPECIFIED +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*AccountBudgetProposal) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _AccountBudgetProposal_OneofMarshaler, _AccountBudgetProposal_OneofUnmarshaler, _AccountBudgetProposal_OneofSizer, []interface{}{ + (*AccountBudgetProposal_ProposedStartDateTime)(nil), + (*AccountBudgetProposal_ProposedStartTimeType)(nil), + (*AccountBudgetProposal_ProposedEndDateTime)(nil), + (*AccountBudgetProposal_ProposedEndTimeType)(nil), + (*AccountBudgetProposal_ApprovedEndDateTime)(nil), + (*AccountBudgetProposal_ApprovedEndTimeType)(nil), + (*AccountBudgetProposal_ProposedSpendingLimitMicros)(nil), + (*AccountBudgetProposal_ProposedSpendingLimitType)(nil), + (*AccountBudgetProposal_ApprovedSpendingLimitMicros)(nil), + (*AccountBudgetProposal_ApprovedSpendingLimitType)(nil), + } +} + +func _AccountBudgetProposal_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*AccountBudgetProposal) + // proposed_start_time + switch x := m.ProposedStartTime.(type) { + case *AccountBudgetProposal_ProposedStartDateTime: + b.EncodeVarint(18<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.ProposedStartDateTime); err != nil { + return err + } + case *AccountBudgetProposal_ProposedStartTimeType: + b.EncodeVarint(7<<3 | proto.WireVarint) + b.EncodeVarint(uint64(x.ProposedStartTimeType)) + case nil: + default: + return fmt.Errorf("AccountBudgetProposal.ProposedStartTime has unexpected type %T", x) + } + // proposed_end_time + switch x := m.ProposedEndTime.(type) { + case *AccountBudgetProposal_ProposedEndDateTime: + b.EncodeVarint(19<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.ProposedEndDateTime); err != nil { + return err + } + case *AccountBudgetProposal_ProposedEndTimeType: + b.EncodeVarint(9<<3 | proto.WireVarint) + b.EncodeVarint(uint64(x.ProposedEndTimeType)) + case nil: + default: + return fmt.Errorf("AccountBudgetProposal.ProposedEndTime has unexpected type %T", x) + } + // approved_end_time + switch x := m.ApprovedEndTime.(type) { + case *AccountBudgetProposal_ApprovedEndDateTime: + b.EncodeVarint(21<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.ApprovedEndDateTime); err != nil { + return err + } + case *AccountBudgetProposal_ApprovedEndTimeType: + b.EncodeVarint(22<<3 | proto.WireVarint) + b.EncodeVarint(uint64(x.ApprovedEndTimeType)) + case nil: + default: + return fmt.Errorf("AccountBudgetProposal.ApprovedEndTime has unexpected type %T", x) + } + // proposed_spending_limit + switch x := m.ProposedSpendingLimit.(type) { + case *AccountBudgetProposal_ProposedSpendingLimitMicros: + b.EncodeVarint(10<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.ProposedSpendingLimitMicros); err != nil { + return err + } + case *AccountBudgetProposal_ProposedSpendingLimitType: + b.EncodeVarint(11<<3 | proto.WireVarint) + b.EncodeVarint(uint64(x.ProposedSpendingLimitType)) + case nil: + default: + return fmt.Errorf("AccountBudgetProposal.ProposedSpendingLimit has unexpected type %T", x) + } + // approved_spending_limit + switch x := m.ApprovedSpendingLimit.(type) { + case *AccountBudgetProposal_ApprovedSpendingLimitMicros: + b.EncodeVarint(23<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.ApprovedSpendingLimitMicros); err != nil { + return err + } + case *AccountBudgetProposal_ApprovedSpendingLimitType: + b.EncodeVarint(24<<3 | proto.WireVarint) + b.EncodeVarint(uint64(x.ApprovedSpendingLimitType)) + case nil: + default: + return fmt.Errorf("AccountBudgetProposal.ApprovedSpendingLimit has unexpected type %T", x) + } + return nil +} + +func _AccountBudgetProposal_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*AccountBudgetProposal) + switch tag { + case 18: // proposed_start_time.proposed_start_date_time + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(wrappers.StringValue) + err := b.DecodeMessage(msg) + m.ProposedStartTime = &AccountBudgetProposal_ProposedStartDateTime{msg} + return true, err + case 7: // proposed_start_time.proposed_start_time_type + if wire != proto.WireVarint { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeVarint() + m.ProposedStartTime = &AccountBudgetProposal_ProposedStartTimeType{enums.TimeTypeEnum_TimeType(x)} + return true, err + case 19: // proposed_end_time.proposed_end_date_time + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(wrappers.StringValue) + err := b.DecodeMessage(msg) + m.ProposedEndTime = &AccountBudgetProposal_ProposedEndDateTime{msg} + return true, err + case 9: // proposed_end_time.proposed_end_time_type + if wire != proto.WireVarint { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeVarint() + m.ProposedEndTime = &AccountBudgetProposal_ProposedEndTimeType{enums.TimeTypeEnum_TimeType(x)} + return true, err + case 21: // approved_end_time.approved_end_date_time + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(wrappers.StringValue) + err := b.DecodeMessage(msg) + m.ApprovedEndTime = &AccountBudgetProposal_ApprovedEndDateTime{msg} + return true, err + case 22: // approved_end_time.approved_end_time_type + if wire != proto.WireVarint { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeVarint() + m.ApprovedEndTime = &AccountBudgetProposal_ApprovedEndTimeType{enums.TimeTypeEnum_TimeType(x)} + return true, err + case 10: // proposed_spending_limit.proposed_spending_limit_micros + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(wrappers.Int64Value) + err := b.DecodeMessage(msg) + m.ProposedSpendingLimit = &AccountBudgetProposal_ProposedSpendingLimitMicros{msg} + return true, err + case 11: // proposed_spending_limit.proposed_spending_limit_type + if wire != proto.WireVarint { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeVarint() + m.ProposedSpendingLimit = &AccountBudgetProposal_ProposedSpendingLimitType{enums.SpendingLimitTypeEnum_SpendingLimitType(x)} + return true, err + case 23: // approved_spending_limit.approved_spending_limit_micros + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(wrappers.Int64Value) + err := b.DecodeMessage(msg) + m.ApprovedSpendingLimit = &AccountBudgetProposal_ApprovedSpendingLimitMicros{msg} + return true, err + case 24: // approved_spending_limit.approved_spending_limit_type + if wire != proto.WireVarint { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeVarint() + m.ApprovedSpendingLimit = &AccountBudgetProposal_ApprovedSpendingLimitType{enums.SpendingLimitTypeEnum_SpendingLimitType(x)} + return true, err + default: + return false, nil + } +} + +func _AccountBudgetProposal_OneofSizer(msg proto.Message) (n int) { + m := msg.(*AccountBudgetProposal) + // proposed_start_time + switch x := m.ProposedStartTime.(type) { + case *AccountBudgetProposal_ProposedStartDateTime: + s := proto.Size(x.ProposedStartDateTime) + n += 2 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *AccountBudgetProposal_ProposedStartTimeType: + n += 1 // tag and wire + n += proto.SizeVarint(uint64(x.ProposedStartTimeType)) + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + // proposed_end_time + switch x := m.ProposedEndTime.(type) { + case *AccountBudgetProposal_ProposedEndDateTime: + s := proto.Size(x.ProposedEndDateTime) + n += 2 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *AccountBudgetProposal_ProposedEndTimeType: + n += 1 // tag and wire + n += proto.SizeVarint(uint64(x.ProposedEndTimeType)) + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + // approved_end_time + switch x := m.ApprovedEndTime.(type) { + case *AccountBudgetProposal_ApprovedEndDateTime: + s := proto.Size(x.ApprovedEndDateTime) + n += 2 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *AccountBudgetProposal_ApprovedEndTimeType: + n += 2 // tag and wire + n += proto.SizeVarint(uint64(x.ApprovedEndTimeType)) + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + // proposed_spending_limit + switch x := m.ProposedSpendingLimit.(type) { + case *AccountBudgetProposal_ProposedSpendingLimitMicros: + s := proto.Size(x.ProposedSpendingLimitMicros) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *AccountBudgetProposal_ProposedSpendingLimitType: + n += 1 // tag and wire + n += proto.SizeVarint(uint64(x.ProposedSpendingLimitType)) + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + // approved_spending_limit + switch x := m.ApprovedSpendingLimit.(type) { + case *AccountBudgetProposal_ApprovedSpendingLimitMicros: + s := proto.Size(x.ApprovedSpendingLimitMicros) + n += 2 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *AccountBudgetProposal_ApprovedSpendingLimitType: + n += 2 // tag and wire + n += proto.SizeVarint(uint64(x.ApprovedSpendingLimitType)) + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +func init() { + proto.RegisterType((*AccountBudgetProposal)(nil), "google.ads.googleads.v1.resources.AccountBudgetProposal") +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/resources/account_budget_proposal.proto", fileDescriptor_account_budget_proposal_bb72fd32acc2bc82) +} + +var fileDescriptor_account_budget_proposal_bb72fd32acc2bc82 = []byte{ + // 840 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xac, 0x96, 0xef, 0x6e, 0xeb, 0x34, + 0x18, 0xc6, 0x69, 0x3a, 0x0e, 0x3a, 0x3e, 0xed, 0xd8, 0x49, 0xe9, 0x39, 0xd9, 0x1f, 0x1d, 0x6d, + 0xa0, 0x49, 0x93, 0x10, 0xa9, 0x3a, 0x26, 0x90, 0xc2, 0x07, 0x68, 0x61, 0x6c, 0x4c, 0x30, 0xaa, + 0x74, 0x14, 0x69, 0xaa, 0x14, 0xb9, 0x8d, 0x09, 0x11, 0x89, 0x1d, 0xd9, 0xce, 0xd0, 0xc4, 0x05, + 0x20, 0x6e, 0x83, 0x8f, 0x5c, 0x0a, 0x77, 0xc1, 0x57, 0xae, 0x02, 0xd9, 0x89, 0xdd, 0xa4, 0x6d, + 0xd6, 0x68, 0xe3, 0x9b, 0x13, 0xfb, 0x79, 0x5e, 0xff, 0xfc, 0xbe, 0xaf, 0x13, 0xf0, 0x79, 0x40, + 0x48, 0x10, 0xa1, 0x1e, 0xf4, 0x59, 0x2f, 0x1b, 0x8a, 0xd1, 0x5d, 0xbf, 0x47, 0x11, 0x23, 0x29, + 0x9d, 0x23, 0xd6, 0x83, 0xf3, 0x39, 0x49, 0x31, 0xf7, 0x66, 0xa9, 0x1f, 0x20, 0xee, 0x25, 0x94, + 0x24, 0x84, 0xc1, 0xc8, 0x4e, 0x28, 0xe1, 0xc4, 0x3c, 0xca, 0x54, 0x36, 0xf4, 0x99, 0xad, 0x0d, + 0xec, 0xbb, 0xbe, 0xad, 0x0d, 0xf6, 0x86, 0x55, 0x31, 0x10, 0x4e, 0xe3, 0x4a, 0x7f, 0x8f, 0x71, + 0xc8, 0x53, 0x96, 0x85, 0xd9, 0xfb, 0xe2, 0x71, 0x1e, 0xfc, 0x3e, 0x41, 0xb9, 0xc3, 0xa7, 0x0f, + 0x3b, 0xb0, 0x04, 0x61, 0x3f, 0xc4, 0x81, 0x17, 0x85, 0x71, 0xc8, 0x8b, 0xc2, 0x8f, 0x1e, 0x16, + 0xf2, 0x30, 0x46, 0xc5, 0xe5, 0x6f, 0xf2, 0xe5, 0xf2, 0x69, 0x96, 0xfe, 0xd4, 0xfb, 0x95, 0xc2, + 0x24, 0x41, 0x54, 0x91, 0x1c, 0x28, 0xbb, 0x24, 0xec, 0x41, 0x8c, 0x09, 0x87, 0x3c, 0x24, 0x38, + 0x9f, 0x7d, 0xff, 0x9f, 0x1d, 0xd0, 0x1d, 0x64, 0x30, 0x43, 0xc9, 0x32, 0xca, 0x51, 0xcc, 0x0f, + 0x40, 0x5b, 0x1d, 0xa9, 0x87, 0x61, 0x8c, 0xac, 0xc6, 0x61, 0xe3, 0xe4, 0xb9, 0xdb, 0x52, 0x2f, + 0xaf, 0x61, 0x8c, 0xcc, 0x0f, 0x81, 0x11, 0xfa, 0xd6, 0xf6, 0x61, 0xe3, 0xe4, 0xc5, 0xe9, 0x7e, + 0x9e, 0x0f, 0x5b, 0xed, 0xc4, 0xfe, 0x06, 0xf3, 0x4f, 0xce, 0x26, 0x30, 0x4a, 0x91, 0x6b, 0x84, + 0xbe, 0x39, 0x00, 0xed, 0x59, 0x18, 0x45, 0x02, 0x9a, 0x21, 0x9e, 0x26, 0x96, 0x21, 0x75, 0x07, + 0x2b, 0xba, 0x31, 0xa7, 0x21, 0x0e, 0x32, 0x61, 0x2b, 0x97, 0x8c, 0x85, 0xc2, 0xfc, 0x12, 0x6c, + 0x97, 0x8f, 0xde, 0x6a, 0xd6, 0xf0, 0x68, 0xc3, 0x22, 0xa1, 0xf9, 0x1b, 0x68, 0x97, 0x12, 0x66, + 0x6d, 0x1d, 0x36, 0x4e, 0xb6, 0x4f, 0x27, 0x76, 0x55, 0x69, 0xc9, 0x83, 0xb7, 0xd7, 0x1e, 0xd3, + 0xcd, 0x7d, 0x82, 0xce, 0x71, 0x1a, 0x57, 0xcf, 0xba, 0xad, 0xa4, 0xf0, 0x64, 0x52, 0xf0, 0x2c, + 0x2b, 0x34, 0xeb, 0x5d, 0x19, 0xf5, 0xf6, 0x31, 0x51, 0xc7, 0xd2, 0xa1, 0x3a, 0x6e, 0x36, 0xef, + 0xe6, 0x91, 0xc4, 0xc1, 0x67, 0x7b, 0x40, 0x7e, 0x96, 0xca, 0xb7, 0xeb, 0x1c, 0xbc, 0x92, 0xc8, + 0x44, 0xff, 0x00, 0x2c, 0x98, 0x24, 0x94, 0xdc, 0x21, 0x5f, 0x34, 0x0a, 0xe5, 0x9e, 0x0f, 0x39, + 0xf2, 0x44, 0x31, 0x5a, 0xef, 0xd5, 0x70, 0xeb, 0x2a, 0xf5, 0x58, 0x88, 0xbf, 0x82, 0x1c, 0xdd, + 0x84, 0x31, 0x32, 0x21, 0x78, 0xa3, 0x77, 0x96, 0xa4, 0x74, 0xfe, 0x33, 0x64, 0xc8, 0x23, 0xd4, + 0x47, 0xd4, 0xc3, 0x69, 0x3c, 0x43, 0xd4, 0x6a, 0xd5, 0x30, 0xdf, 0x57, 0x1e, 0xa3, 0xdc, 0xe2, + 0x7b, 0xe1, 0x70, 0x2d, 0x0d, 0x44, 0xc9, 0x2c, 0xe0, 0x09, 0x47, 0xcc, 0x6a, 0xd7, 0x29, 0x19, + 0x4d, 0x2f, 0x24, 0xe6, 0x15, 0x30, 0xe7, 0x14, 0xc9, 0xce, 0x29, 0x80, 0xef, 0xd4, 0x30, 0xda, + 0x51, 0x3a, 0xcd, 0x7c, 0x05, 0xcc, 0xec, 0x30, 0x60, 0x54, 0xf0, 0x7a, 0x59, 0xc7, 0x4b, 0xe9, + 0xb4, 0xd7, 0x8f, 0xc0, 0xd2, 0x70, 0xcb, 0x69, 0x31, 0x37, 0x3b, 0x5e, 0xbe, 0xe5, 0x76, 0x95, + 0xbe, 0x9c, 0x18, 0xb2, 0x62, 0xac, 0xef, 0x1d, 0xeb, 0x1d, 0x59, 0xb8, 0x67, 0x1b, 0x0a, 0x57, + 0xd8, 0xe8, 0xee, 0x50, 0x0f, 0x2b, 0x01, 0xd5, 0x84, 0x39, 0x06, 0xaf, 0x74, 0x40, 0x84, 0xfd, + 0x02, 0x47, 0xa7, 0x06, 0x47, 0xc3, 0xed, 0x28, 0xf5, 0x39, 0xf6, 0x35, 0xc5, 0x2f, 0x4b, 0xa6, + 0x0b, 0x86, 0xe7, 0x4f, 0x60, 0x28, 0x07, 0x2b, 0x12, 0xe8, 0x16, 0x29, 0x13, 0x74, 0x6b, 0x10, + 0x18, 0x6e, 0x47, 0xa9, 0x97, 0x08, 0x4a, 0xa6, 0x0b, 0x82, 0x57, 0x4f, 0x20, 0x28, 0x07, 0xd3, + 0x04, 0xb3, 0x42, 0x37, 0x2e, 0x7d, 0x9f, 0xe2, 0x70, 0x4e, 0x09, 0xb3, 0xc0, 0xc6, 0x9b, 0xfe, + 0xb2, 0xb9, 0x68, 0xc7, 0x71, 0xee, 0xf1, 0xad, 0xb0, 0xf8, 0x4e, 0x3a, 0x98, 0x7f, 0x34, 0xc0, + 0x41, 0x55, 0x10, 0xc9, 0xf5, 0x42, 0x72, 0x7d, 0xbd, 0x81, 0xab, 0x64, 0xad, 0x01, 0x57, 0xde, + 0x5e, 0x36, 0xdd, 0xdd, 0xb5, 0xbb, 0x51, 0xbc, 0x8b, 0x4b, 0x6d, 0x2d, 0xef, 0xeb, 0xcd, 0xbc, + 0x5b, 0xee, 0xbe, 0xbe, 0xdb, 0x2a, 0x78, 0xab, 0x82, 0x48, 0x5e, 0xeb, 0x7f, 0xe5, 0xdd, 0x72, + 0x77, 0xd7, 0xee, 0x46, 0x4c, 0x0e, 0xbb, 0xa0, 0xb3, 0xa6, 0xa9, 0x87, 0x1d, 0xf0, 0x72, 0xa5, + 0x4b, 0xc4, 0xcb, 0x95, 0xc2, 0x1b, 0xee, 0x82, 0xd7, 0x15, 0xb9, 0x13, 0x53, 0x15, 0x98, 0xc3, + 0xdf, 0x0d, 0x70, 0x3c, 0x27, 0xb1, 0xbd, 0xf1, 0xcf, 0x6d, 0xb8, 0xb7, 0xf6, 0x6b, 0x36, 0x12, + 0xa7, 0x3e, 0x6a, 0xdc, 0x5e, 0xe5, 0x06, 0x01, 0x89, 0x20, 0x0e, 0x6c, 0x42, 0x83, 0x5e, 0x80, + 0xb0, 0xcc, 0x89, 0xfa, 0x51, 0x4a, 0x42, 0xf6, 0xc0, 0xaf, 0xe5, 0x67, 0x7a, 0xf4, 0xa7, 0xd1, + 0xbc, 0x18, 0x0c, 0xfe, 0x32, 0x8e, 0x2e, 0x32, 0xcb, 0x81, 0xcf, 0xec, 0x6c, 0x28, 0x46, 0x93, + 0xbe, 0xed, 0xaa, 0x95, 0x7f, 0xab, 0x35, 0xd3, 0x81, 0xcf, 0xa6, 0x7a, 0xcd, 0x74, 0xd2, 0x9f, + 0xea, 0x35, 0xff, 0x1a, 0xc7, 0xd9, 0x84, 0xe3, 0x0c, 0x7c, 0xe6, 0x38, 0x7a, 0x95, 0xe3, 0x4c, + 0xfa, 0x8e, 0xa3, 0xd7, 0xcd, 0x9e, 0xc9, 0xcd, 0x7e, 0xfc, 0x5f, 0x00, 0x00, 0x00, 0xff, 0xff, + 0x5d, 0xf5, 0x49, 0xe3, 0x06, 0x0b, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/resources/ad.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/resources/ad.pb.go new file mode 100644 index 0000000..a7121d3 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/resources/ad.pb.go @@ -0,0 +1,884 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/resources/ad.proto + +package resources // import "google.golang.org/genproto/googleapis/ads/googleads/v1/resources" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import wrappers "github.com/golang/protobuf/ptypes/wrappers" +import common "google.golang.org/genproto/googleapis/ads/googleads/v1/common" +import enums "google.golang.org/genproto/googleapis/ads/googleads/v1/enums" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// An ad. +type Ad struct { + // The ID of the ad. + Id *wrappers.Int64Value `protobuf:"bytes,1,opt,name=id,proto3" json:"id,omitempty"` + // The list of possible final URLs after all cross-domain redirects for the + // ad. + FinalUrls []*wrappers.StringValue `protobuf:"bytes,2,rep,name=final_urls,json=finalUrls,proto3" json:"final_urls,omitempty"` + // A list of final app URLs that will be used on mobile if the user has the + // specific app installed. + FinalAppUrls []*common.FinalAppUrl `protobuf:"bytes,35,rep,name=final_app_urls,json=finalAppUrls,proto3" json:"final_app_urls,omitempty"` + // The list of possible final mobile URLs after all cross-domain redirects + // for the ad. + FinalMobileUrls []*wrappers.StringValue `protobuf:"bytes,16,rep,name=final_mobile_urls,json=finalMobileUrls,proto3" json:"final_mobile_urls,omitempty"` + // The URL template for constructing a tracking URL. + TrackingUrlTemplate *wrappers.StringValue `protobuf:"bytes,12,opt,name=tracking_url_template,json=trackingUrlTemplate,proto3" json:"tracking_url_template,omitempty"` + // The list of mappings that can be used to substitute custom parameter tags + // in a `tracking_url_template`, `final_urls`, or `mobile_final_urls`. + UrlCustomParameters []*common.CustomParameter `protobuf:"bytes,10,rep,name=url_custom_parameters,json=urlCustomParameters,proto3" json:"url_custom_parameters,omitempty"` + // The URL that appears in the ad description for some ad formats. + DisplayUrl *wrappers.StringValue `protobuf:"bytes,4,opt,name=display_url,json=displayUrl,proto3" json:"display_url,omitempty"` + // The type of ad. + Type enums.AdTypeEnum_AdType `protobuf:"varint,5,opt,name=type,proto3,enum=google.ads.googleads.v1.enums.AdTypeEnum_AdType" json:"type,omitempty"` + // Indicates if this ad was automatically added by Google Ads and not by a + // user. For example, this could happen when ads are automatically created as + // suggestions for new ads based on knowledge of how existing ads are + // performing. + AddedByGoogleAds *wrappers.BoolValue `protobuf:"bytes,19,opt,name=added_by_google_ads,json=addedByGoogleAds,proto3" json:"added_by_google_ads,omitempty"` + // The device preference for the ad. You can only specify a preference for + // mobile devices. When this preference is set the ad will be preferred over + // other ads when being displayed on a mobile device. The ad can still be + // displayed on other device types, e.g. if no other ads are available. + // If unspecified (no device preference), all devices are targeted. + // This is only supported by some ad types. + DevicePreference enums.DeviceEnum_Device `protobuf:"varint,20,opt,name=device_preference,json=devicePreference,proto3,enum=google.ads.googleads.v1.enums.DeviceEnum_Device" json:"device_preference,omitempty"` + // Additional URLs for the ad that are tagged with a unique identifier that + // can be referenced from other fields in the ad. + UrlCollections []*common.UrlCollection `protobuf:"bytes,26,rep,name=url_collections,json=urlCollections,proto3" json:"url_collections,omitempty"` + // The name of the ad. This is only used to be able to identify the ad. It + // does not need to be unique and does not affect the served ad. + Name *wrappers.StringValue `protobuf:"bytes,23,opt,name=name,proto3" json:"name,omitempty"` + // If this ad is system managed, then this field will indicate the source. + // This field is read-only. + SystemManagedResourceSource enums.SystemManagedResourceSourceEnum_SystemManagedResourceSource `protobuf:"varint,27,opt,name=system_managed_resource_source,json=systemManagedResourceSource,proto3,enum=google.ads.googleads.v1.enums.SystemManagedResourceSourceEnum_SystemManagedResourceSource" json:"system_managed_resource_source,omitempty"` + // Details pertinent to the ad type. Exactly one value must be set. + // + // Types that are valid to be assigned to AdData: + // *Ad_TextAd + // *Ad_ExpandedTextAd + // *Ad_CallOnlyAd + // *Ad_ExpandedDynamicSearchAd + // *Ad_HotelAd + // *Ad_ShoppingSmartAd + // *Ad_ShoppingProductAd + // *Ad_GmailAd + // *Ad_ImageAd + // *Ad_VideoAd + // *Ad_ResponsiveSearchAd + // *Ad_LegacyResponsiveDisplayAd + // *Ad_AppAd + // *Ad_LegacyAppInstallAd + // *Ad_ResponsiveDisplayAd + // *Ad_DisplayUploadAd + // *Ad_AppEngagementAd + AdData isAd_AdData `protobuf_oneof:"ad_data"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Ad) Reset() { *m = Ad{} } +func (m *Ad) String() string { return proto.CompactTextString(m) } +func (*Ad) ProtoMessage() {} +func (*Ad) Descriptor() ([]byte, []int) { + return fileDescriptor_ad_55cd72465d865885, []int{0} +} +func (m *Ad) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Ad.Unmarshal(m, b) +} +func (m *Ad) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Ad.Marshal(b, m, deterministic) +} +func (dst *Ad) XXX_Merge(src proto.Message) { + xxx_messageInfo_Ad.Merge(dst, src) +} +func (m *Ad) XXX_Size() int { + return xxx_messageInfo_Ad.Size(m) +} +func (m *Ad) XXX_DiscardUnknown() { + xxx_messageInfo_Ad.DiscardUnknown(m) +} + +var xxx_messageInfo_Ad proto.InternalMessageInfo + +func (m *Ad) GetId() *wrappers.Int64Value { + if m != nil { + return m.Id + } + return nil +} + +func (m *Ad) GetFinalUrls() []*wrappers.StringValue { + if m != nil { + return m.FinalUrls + } + return nil +} + +func (m *Ad) GetFinalAppUrls() []*common.FinalAppUrl { + if m != nil { + return m.FinalAppUrls + } + return nil +} + +func (m *Ad) GetFinalMobileUrls() []*wrappers.StringValue { + if m != nil { + return m.FinalMobileUrls + } + return nil +} + +func (m *Ad) GetTrackingUrlTemplate() *wrappers.StringValue { + if m != nil { + return m.TrackingUrlTemplate + } + return nil +} + +func (m *Ad) GetUrlCustomParameters() []*common.CustomParameter { + if m != nil { + return m.UrlCustomParameters + } + return nil +} + +func (m *Ad) GetDisplayUrl() *wrappers.StringValue { + if m != nil { + return m.DisplayUrl + } + return nil +} + +func (m *Ad) GetType() enums.AdTypeEnum_AdType { + if m != nil { + return m.Type + } + return enums.AdTypeEnum_UNSPECIFIED +} + +func (m *Ad) GetAddedByGoogleAds() *wrappers.BoolValue { + if m != nil { + return m.AddedByGoogleAds + } + return nil +} + +func (m *Ad) GetDevicePreference() enums.DeviceEnum_Device { + if m != nil { + return m.DevicePreference + } + return enums.DeviceEnum_UNSPECIFIED +} + +func (m *Ad) GetUrlCollections() []*common.UrlCollection { + if m != nil { + return m.UrlCollections + } + return nil +} + +func (m *Ad) GetName() *wrappers.StringValue { + if m != nil { + return m.Name + } + return nil +} + +func (m *Ad) GetSystemManagedResourceSource() enums.SystemManagedResourceSourceEnum_SystemManagedResourceSource { + if m != nil { + return m.SystemManagedResourceSource + } + return enums.SystemManagedResourceSourceEnum_UNSPECIFIED +} + +type isAd_AdData interface { + isAd_AdData() +} + +type Ad_TextAd struct { + TextAd *common.TextAdInfo `protobuf:"bytes,6,opt,name=text_ad,json=textAd,proto3,oneof"` +} + +type Ad_ExpandedTextAd struct { + ExpandedTextAd *common.ExpandedTextAdInfo `protobuf:"bytes,7,opt,name=expanded_text_ad,json=expandedTextAd,proto3,oneof"` +} + +type Ad_CallOnlyAd struct { + CallOnlyAd *common.CallOnlyAdInfo `protobuf:"bytes,13,opt,name=call_only_ad,json=callOnlyAd,proto3,oneof"` +} + +type Ad_ExpandedDynamicSearchAd struct { + ExpandedDynamicSearchAd *common.ExpandedDynamicSearchAdInfo `protobuf:"bytes,14,opt,name=expanded_dynamic_search_ad,json=expandedDynamicSearchAd,proto3,oneof"` +} + +type Ad_HotelAd struct { + HotelAd *common.HotelAdInfo `protobuf:"bytes,15,opt,name=hotel_ad,json=hotelAd,proto3,oneof"` +} + +type Ad_ShoppingSmartAd struct { + ShoppingSmartAd *common.ShoppingSmartAdInfo `protobuf:"bytes,17,opt,name=shopping_smart_ad,json=shoppingSmartAd,proto3,oneof"` +} + +type Ad_ShoppingProductAd struct { + ShoppingProductAd *common.ShoppingProductAdInfo `protobuf:"bytes,18,opt,name=shopping_product_ad,json=shoppingProductAd,proto3,oneof"` +} + +type Ad_GmailAd struct { + GmailAd *common.GmailAdInfo `protobuf:"bytes,21,opt,name=gmail_ad,json=gmailAd,proto3,oneof"` +} + +type Ad_ImageAd struct { + ImageAd *common.ImageAdInfo `protobuf:"bytes,22,opt,name=image_ad,json=imageAd,proto3,oneof"` +} + +type Ad_VideoAd struct { + VideoAd *common.VideoAdInfo `protobuf:"bytes,24,opt,name=video_ad,json=videoAd,proto3,oneof"` +} + +type Ad_ResponsiveSearchAd struct { + ResponsiveSearchAd *common.ResponsiveSearchAdInfo `protobuf:"bytes,25,opt,name=responsive_search_ad,json=responsiveSearchAd,proto3,oneof"` +} + +type Ad_LegacyResponsiveDisplayAd struct { + LegacyResponsiveDisplayAd *common.LegacyResponsiveDisplayAdInfo `protobuf:"bytes,28,opt,name=legacy_responsive_display_ad,json=legacyResponsiveDisplayAd,proto3,oneof"` +} + +type Ad_AppAd struct { + AppAd *common.AppAdInfo `protobuf:"bytes,29,opt,name=app_ad,json=appAd,proto3,oneof"` +} + +type Ad_LegacyAppInstallAd struct { + LegacyAppInstallAd *common.LegacyAppInstallAdInfo `protobuf:"bytes,30,opt,name=legacy_app_install_ad,json=legacyAppInstallAd,proto3,oneof"` +} + +type Ad_ResponsiveDisplayAd struct { + ResponsiveDisplayAd *common.ResponsiveDisplayAdInfo `protobuf:"bytes,31,opt,name=responsive_display_ad,json=responsiveDisplayAd,proto3,oneof"` +} + +type Ad_DisplayUploadAd struct { + DisplayUploadAd *common.DisplayUploadAdInfo `protobuf:"bytes,33,opt,name=display_upload_ad,json=displayUploadAd,proto3,oneof"` +} + +type Ad_AppEngagementAd struct { + AppEngagementAd *common.AppEngagementAdInfo `protobuf:"bytes,34,opt,name=app_engagement_ad,json=appEngagementAd,proto3,oneof"` +} + +func (*Ad_TextAd) isAd_AdData() {} + +func (*Ad_ExpandedTextAd) isAd_AdData() {} + +func (*Ad_CallOnlyAd) isAd_AdData() {} + +func (*Ad_ExpandedDynamicSearchAd) isAd_AdData() {} + +func (*Ad_HotelAd) isAd_AdData() {} + +func (*Ad_ShoppingSmartAd) isAd_AdData() {} + +func (*Ad_ShoppingProductAd) isAd_AdData() {} + +func (*Ad_GmailAd) isAd_AdData() {} + +func (*Ad_ImageAd) isAd_AdData() {} + +func (*Ad_VideoAd) isAd_AdData() {} + +func (*Ad_ResponsiveSearchAd) isAd_AdData() {} + +func (*Ad_LegacyResponsiveDisplayAd) isAd_AdData() {} + +func (*Ad_AppAd) isAd_AdData() {} + +func (*Ad_LegacyAppInstallAd) isAd_AdData() {} + +func (*Ad_ResponsiveDisplayAd) isAd_AdData() {} + +func (*Ad_DisplayUploadAd) isAd_AdData() {} + +func (*Ad_AppEngagementAd) isAd_AdData() {} + +func (m *Ad) GetAdData() isAd_AdData { + if m != nil { + return m.AdData + } + return nil +} + +func (m *Ad) GetTextAd() *common.TextAdInfo { + if x, ok := m.GetAdData().(*Ad_TextAd); ok { + return x.TextAd + } + return nil +} + +func (m *Ad) GetExpandedTextAd() *common.ExpandedTextAdInfo { + if x, ok := m.GetAdData().(*Ad_ExpandedTextAd); ok { + return x.ExpandedTextAd + } + return nil +} + +func (m *Ad) GetCallOnlyAd() *common.CallOnlyAdInfo { + if x, ok := m.GetAdData().(*Ad_CallOnlyAd); ok { + return x.CallOnlyAd + } + return nil +} + +func (m *Ad) GetExpandedDynamicSearchAd() *common.ExpandedDynamicSearchAdInfo { + if x, ok := m.GetAdData().(*Ad_ExpandedDynamicSearchAd); ok { + return x.ExpandedDynamicSearchAd + } + return nil +} + +func (m *Ad) GetHotelAd() *common.HotelAdInfo { + if x, ok := m.GetAdData().(*Ad_HotelAd); ok { + return x.HotelAd + } + return nil +} + +func (m *Ad) GetShoppingSmartAd() *common.ShoppingSmartAdInfo { + if x, ok := m.GetAdData().(*Ad_ShoppingSmartAd); ok { + return x.ShoppingSmartAd + } + return nil +} + +func (m *Ad) GetShoppingProductAd() *common.ShoppingProductAdInfo { + if x, ok := m.GetAdData().(*Ad_ShoppingProductAd); ok { + return x.ShoppingProductAd + } + return nil +} + +func (m *Ad) GetGmailAd() *common.GmailAdInfo { + if x, ok := m.GetAdData().(*Ad_GmailAd); ok { + return x.GmailAd + } + return nil +} + +func (m *Ad) GetImageAd() *common.ImageAdInfo { + if x, ok := m.GetAdData().(*Ad_ImageAd); ok { + return x.ImageAd + } + return nil +} + +func (m *Ad) GetVideoAd() *common.VideoAdInfo { + if x, ok := m.GetAdData().(*Ad_VideoAd); ok { + return x.VideoAd + } + return nil +} + +func (m *Ad) GetResponsiveSearchAd() *common.ResponsiveSearchAdInfo { + if x, ok := m.GetAdData().(*Ad_ResponsiveSearchAd); ok { + return x.ResponsiveSearchAd + } + return nil +} + +func (m *Ad) GetLegacyResponsiveDisplayAd() *common.LegacyResponsiveDisplayAdInfo { + if x, ok := m.GetAdData().(*Ad_LegacyResponsiveDisplayAd); ok { + return x.LegacyResponsiveDisplayAd + } + return nil +} + +func (m *Ad) GetAppAd() *common.AppAdInfo { + if x, ok := m.GetAdData().(*Ad_AppAd); ok { + return x.AppAd + } + return nil +} + +func (m *Ad) GetLegacyAppInstallAd() *common.LegacyAppInstallAdInfo { + if x, ok := m.GetAdData().(*Ad_LegacyAppInstallAd); ok { + return x.LegacyAppInstallAd + } + return nil +} + +func (m *Ad) GetResponsiveDisplayAd() *common.ResponsiveDisplayAdInfo { + if x, ok := m.GetAdData().(*Ad_ResponsiveDisplayAd); ok { + return x.ResponsiveDisplayAd + } + return nil +} + +func (m *Ad) GetDisplayUploadAd() *common.DisplayUploadAdInfo { + if x, ok := m.GetAdData().(*Ad_DisplayUploadAd); ok { + return x.DisplayUploadAd + } + return nil +} + +func (m *Ad) GetAppEngagementAd() *common.AppEngagementAdInfo { + if x, ok := m.GetAdData().(*Ad_AppEngagementAd); ok { + return x.AppEngagementAd + } + return nil +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*Ad) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _Ad_OneofMarshaler, _Ad_OneofUnmarshaler, _Ad_OneofSizer, []interface{}{ + (*Ad_TextAd)(nil), + (*Ad_ExpandedTextAd)(nil), + (*Ad_CallOnlyAd)(nil), + (*Ad_ExpandedDynamicSearchAd)(nil), + (*Ad_HotelAd)(nil), + (*Ad_ShoppingSmartAd)(nil), + (*Ad_ShoppingProductAd)(nil), + (*Ad_GmailAd)(nil), + (*Ad_ImageAd)(nil), + (*Ad_VideoAd)(nil), + (*Ad_ResponsiveSearchAd)(nil), + (*Ad_LegacyResponsiveDisplayAd)(nil), + (*Ad_AppAd)(nil), + (*Ad_LegacyAppInstallAd)(nil), + (*Ad_ResponsiveDisplayAd)(nil), + (*Ad_DisplayUploadAd)(nil), + (*Ad_AppEngagementAd)(nil), + } +} + +func _Ad_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*Ad) + // ad_data + switch x := m.AdData.(type) { + case *Ad_TextAd: + b.EncodeVarint(6<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.TextAd); err != nil { + return err + } + case *Ad_ExpandedTextAd: + b.EncodeVarint(7<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.ExpandedTextAd); err != nil { + return err + } + case *Ad_CallOnlyAd: + b.EncodeVarint(13<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.CallOnlyAd); err != nil { + return err + } + case *Ad_ExpandedDynamicSearchAd: + b.EncodeVarint(14<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.ExpandedDynamicSearchAd); err != nil { + return err + } + case *Ad_HotelAd: + b.EncodeVarint(15<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.HotelAd); err != nil { + return err + } + case *Ad_ShoppingSmartAd: + b.EncodeVarint(17<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.ShoppingSmartAd); err != nil { + return err + } + case *Ad_ShoppingProductAd: + b.EncodeVarint(18<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.ShoppingProductAd); err != nil { + return err + } + case *Ad_GmailAd: + b.EncodeVarint(21<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.GmailAd); err != nil { + return err + } + case *Ad_ImageAd: + b.EncodeVarint(22<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.ImageAd); err != nil { + return err + } + case *Ad_VideoAd: + b.EncodeVarint(24<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.VideoAd); err != nil { + return err + } + case *Ad_ResponsiveSearchAd: + b.EncodeVarint(25<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.ResponsiveSearchAd); err != nil { + return err + } + case *Ad_LegacyResponsiveDisplayAd: + b.EncodeVarint(28<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.LegacyResponsiveDisplayAd); err != nil { + return err + } + case *Ad_AppAd: + b.EncodeVarint(29<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.AppAd); err != nil { + return err + } + case *Ad_LegacyAppInstallAd: + b.EncodeVarint(30<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.LegacyAppInstallAd); err != nil { + return err + } + case *Ad_ResponsiveDisplayAd: + b.EncodeVarint(31<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.ResponsiveDisplayAd); err != nil { + return err + } + case *Ad_DisplayUploadAd: + b.EncodeVarint(33<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.DisplayUploadAd); err != nil { + return err + } + case *Ad_AppEngagementAd: + b.EncodeVarint(34<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.AppEngagementAd); err != nil { + return err + } + case nil: + default: + return fmt.Errorf("Ad.AdData has unexpected type %T", x) + } + return nil +} + +func _Ad_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*Ad) + switch tag { + case 6: // ad_data.text_ad + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(common.TextAdInfo) + err := b.DecodeMessage(msg) + m.AdData = &Ad_TextAd{msg} + return true, err + case 7: // ad_data.expanded_text_ad + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(common.ExpandedTextAdInfo) + err := b.DecodeMessage(msg) + m.AdData = &Ad_ExpandedTextAd{msg} + return true, err + case 13: // ad_data.call_only_ad + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(common.CallOnlyAdInfo) + err := b.DecodeMessage(msg) + m.AdData = &Ad_CallOnlyAd{msg} + return true, err + case 14: // ad_data.expanded_dynamic_search_ad + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(common.ExpandedDynamicSearchAdInfo) + err := b.DecodeMessage(msg) + m.AdData = &Ad_ExpandedDynamicSearchAd{msg} + return true, err + case 15: // ad_data.hotel_ad + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(common.HotelAdInfo) + err := b.DecodeMessage(msg) + m.AdData = &Ad_HotelAd{msg} + return true, err + case 17: // ad_data.shopping_smart_ad + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(common.ShoppingSmartAdInfo) + err := b.DecodeMessage(msg) + m.AdData = &Ad_ShoppingSmartAd{msg} + return true, err + case 18: // ad_data.shopping_product_ad + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(common.ShoppingProductAdInfo) + err := b.DecodeMessage(msg) + m.AdData = &Ad_ShoppingProductAd{msg} + return true, err + case 21: // ad_data.gmail_ad + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(common.GmailAdInfo) + err := b.DecodeMessage(msg) + m.AdData = &Ad_GmailAd{msg} + return true, err + case 22: // ad_data.image_ad + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(common.ImageAdInfo) + err := b.DecodeMessage(msg) + m.AdData = &Ad_ImageAd{msg} + return true, err + case 24: // ad_data.video_ad + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(common.VideoAdInfo) + err := b.DecodeMessage(msg) + m.AdData = &Ad_VideoAd{msg} + return true, err + case 25: // ad_data.responsive_search_ad + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(common.ResponsiveSearchAdInfo) + err := b.DecodeMessage(msg) + m.AdData = &Ad_ResponsiveSearchAd{msg} + return true, err + case 28: // ad_data.legacy_responsive_display_ad + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(common.LegacyResponsiveDisplayAdInfo) + err := b.DecodeMessage(msg) + m.AdData = &Ad_LegacyResponsiveDisplayAd{msg} + return true, err + case 29: // ad_data.app_ad + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(common.AppAdInfo) + err := b.DecodeMessage(msg) + m.AdData = &Ad_AppAd{msg} + return true, err + case 30: // ad_data.legacy_app_install_ad + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(common.LegacyAppInstallAdInfo) + err := b.DecodeMessage(msg) + m.AdData = &Ad_LegacyAppInstallAd{msg} + return true, err + case 31: // ad_data.responsive_display_ad + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(common.ResponsiveDisplayAdInfo) + err := b.DecodeMessage(msg) + m.AdData = &Ad_ResponsiveDisplayAd{msg} + return true, err + case 33: // ad_data.display_upload_ad + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(common.DisplayUploadAdInfo) + err := b.DecodeMessage(msg) + m.AdData = &Ad_DisplayUploadAd{msg} + return true, err + case 34: // ad_data.app_engagement_ad + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(common.AppEngagementAdInfo) + err := b.DecodeMessage(msg) + m.AdData = &Ad_AppEngagementAd{msg} + return true, err + default: + return false, nil + } +} + +func _Ad_OneofSizer(msg proto.Message) (n int) { + m := msg.(*Ad) + // ad_data + switch x := m.AdData.(type) { + case *Ad_TextAd: + s := proto.Size(x.TextAd) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *Ad_ExpandedTextAd: + s := proto.Size(x.ExpandedTextAd) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *Ad_CallOnlyAd: + s := proto.Size(x.CallOnlyAd) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *Ad_ExpandedDynamicSearchAd: + s := proto.Size(x.ExpandedDynamicSearchAd) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *Ad_HotelAd: + s := proto.Size(x.HotelAd) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *Ad_ShoppingSmartAd: + s := proto.Size(x.ShoppingSmartAd) + n += 2 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *Ad_ShoppingProductAd: + s := proto.Size(x.ShoppingProductAd) + n += 2 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *Ad_GmailAd: + s := proto.Size(x.GmailAd) + n += 2 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *Ad_ImageAd: + s := proto.Size(x.ImageAd) + n += 2 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *Ad_VideoAd: + s := proto.Size(x.VideoAd) + n += 2 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *Ad_ResponsiveSearchAd: + s := proto.Size(x.ResponsiveSearchAd) + n += 2 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *Ad_LegacyResponsiveDisplayAd: + s := proto.Size(x.LegacyResponsiveDisplayAd) + n += 2 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *Ad_AppAd: + s := proto.Size(x.AppAd) + n += 2 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *Ad_LegacyAppInstallAd: + s := proto.Size(x.LegacyAppInstallAd) + n += 2 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *Ad_ResponsiveDisplayAd: + s := proto.Size(x.ResponsiveDisplayAd) + n += 2 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *Ad_DisplayUploadAd: + s := proto.Size(x.DisplayUploadAd) + n += 2 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *Ad_AppEngagementAd: + s := proto.Size(x.AppEngagementAd) + n += 2 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +func init() { + proto.RegisterType((*Ad)(nil), "google.ads.googleads.v1.resources.Ad") +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/resources/ad.proto", fileDescriptor_ad_55cd72465d865885) +} + +var fileDescriptor_ad_55cd72465d865885 = []byte{ + // 1176 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x8c, 0x97, 0xdf, 0x6e, 0xdb, 0x36, + 0x1b, 0xc6, 0x3f, 0xbb, 0x6d, 0xf2, 0x95, 0xed, 0xf2, 0x87, 0x6e, 0x56, 0x35, 0xcd, 0xba, 0xb6, + 0x43, 0x81, 0xae, 0xc5, 0xe4, 0x26, 0x5d, 0x3b, 0xc0, 0x45, 0x81, 0xc9, 0x4d, 0xd6, 0x64, 0x58, + 0x31, 0xcf, 0xae, 0x7d, 0x50, 0x64, 0x13, 0x18, 0x91, 0x56, 0xb4, 0x52, 0x24, 0x41, 0x4a, 0x5e, + 0xbd, 0xa3, 0xdd, 0xc9, 0x80, 0x1d, 0xee, 0x52, 0x76, 0x25, 0xc3, 0x8e, 0x77, 0x01, 0x03, 0x49, + 0x89, 0x8e, 0x93, 0x38, 0xd2, 0x89, 0x21, 0x92, 0xef, 0xf3, 0x7b, 0x5e, 0xbe, 0x24, 0x45, 0x19, + 0x3c, 0x8a, 0x39, 0x8f, 0x29, 0x69, 0x23, 0xac, 0xda, 0xf6, 0x51, 0x3f, 0x4d, 0xb6, 0xdb, 0x92, + 0x28, 0x9e, 0xcb, 0x88, 0xa8, 0x36, 0xc2, 0xbe, 0x90, 0x3c, 0xe3, 0xf0, 0x9e, 0x0d, 0xf0, 0x11, + 0x56, 0xbe, 0x8b, 0xf5, 0x27, 0xdb, 0xbe, 0x8b, 0xdd, 0xdc, 0x59, 0x84, 0x8b, 0x78, 0x9a, 0x72, + 0xd6, 0x46, 0x38, 0xcc, 0xa6, 0x82, 0x84, 0x09, 0x1b, 0x73, 0x65, 0xb1, 0x9b, 0xcf, 0x2a, 0x34, + 0x51, 0xae, 0x32, 0x9e, 0x86, 0x02, 0x49, 0x94, 0x92, 0x8c, 0xc8, 0x42, 0x56, 0x65, 0x35, 0x4e, + 0x18, 0xa2, 0x21, 0x12, 0x22, 0xcc, 0x25, 0x2d, 0x34, 0x4f, 0x2b, 0x34, 0xb9, 0xa4, 0x61, 0xc4, + 0x29, 0x25, 0x51, 0x96, 0x70, 0x56, 0x88, 0x1e, 0x2f, 0x12, 0x11, 0x96, 0xa7, 0xaa, 0x9c, 0x52, + 0x11, 0xfc, 0xe8, 0xe2, 0x60, 0x4c, 0x26, 0x49, 0x54, 0xc6, 0x7e, 0x7d, 0x71, 0xac, 0x9a, 0xaa, + 0x8c, 0xa4, 0x61, 0x8a, 0x18, 0x8a, 0x09, 0x0e, 0x09, 0xcb, 0x92, 0x6c, 0x1a, 0xda, 0x4a, 0x17, + 0x84, 0x3b, 0x05, 0xc1, 0xb4, 0x8e, 0xf2, 0x71, 0xfb, 0x17, 0x89, 0x84, 0x20, 0xb2, 0x2c, 0xed, + 0x56, 0xe9, 0x20, 0x92, 0x36, 0x62, 0x8c, 0x67, 0x48, 0xcf, 0xab, 0x18, 0xbd, 0xff, 0xf7, 0x0d, + 0xd0, 0x0c, 0x30, 0x7c, 0x0c, 0x9a, 0x09, 0xf6, 0x1a, 0x77, 0x1b, 0x0f, 0xaf, 0xed, 0xdc, 0x2e, + 0x16, 0xd6, 0x2f, 0x89, 0xfe, 0x01, 0xcb, 0x9e, 0x7f, 0x39, 0x42, 0x34, 0x27, 0xfd, 0x66, 0x82, + 0xe1, 0x0b, 0x00, 0x6c, 0x61, 0x73, 0x49, 0x95, 0xd7, 0xbc, 0x7b, 0xe9, 0xe1, 0xb5, 0x9d, 0xad, + 0x33, 0xa2, 0x41, 0x26, 0x13, 0x16, 0x5b, 0xd5, 0x55, 0x13, 0x3f, 0x94, 0x54, 0xc1, 0x1f, 0xc0, + 0xca, 0xdc, 0xaa, 0x28, 0xef, 0x33, 0x03, 0x78, 0xec, 0x2f, 0xda, 0x59, 0x76, 0x5d, 0xfc, 0x6f, + 0xb4, 0x2a, 0x10, 0x62, 0x28, 0x69, 0xff, 0xfa, 0x78, 0xd6, 0x50, 0x70, 0x1f, 0xac, 0x5b, 0x64, + 0xca, 0x8f, 0x12, 0x4a, 0x2c, 0x75, 0xad, 0x46, 0x5a, 0xab, 0x46, 0xf6, 0xc6, 0xa8, 0x0c, 0xa9, + 0x07, 0x36, 0x32, 0x89, 0xa2, 0xf7, 0x09, 0x8b, 0x35, 0x25, 0xcc, 0x48, 0x2a, 0x28, 0xca, 0x88, + 0x77, 0xdd, 0x54, 0xe6, 0x62, 0x5a, 0xab, 0x94, 0x0e, 0x25, 0x7d, 0x5b, 0x08, 0x61, 0x04, 0x36, + 0xcc, 0x86, 0x3a, 0xb5, 0x7f, 0x95, 0x07, 0x4c, 0x7e, 0xed, 0xaa, 0x59, 0xbf, 0x32, 0xc2, 0x5e, + 0xa9, 0xeb, 0xb7, 0x72, 0x49, 0x4f, 0xf5, 0x29, 0xf8, 0x12, 0x5c, 0xc3, 0x89, 0x12, 0x14, 0x4d, + 0x75, 0xd6, 0xde, 0xe5, 0x1a, 0xc9, 0x82, 0x42, 0x30, 0x94, 0x14, 0xee, 0x82, 0xcb, 0x7a, 0xf7, + 0x7a, 0x57, 0xee, 0x36, 0x1e, 0xae, 0xec, 0x3c, 0x59, 0x98, 0x92, 0xd9, 0x92, 0x7e, 0x80, 0xdf, + 0x4e, 0x05, 0xd9, 0x63, 0x79, 0x5a, 0x3c, 0xf6, 0x8d, 0x1a, 0x1e, 0x80, 0x16, 0xc2, 0x98, 0xe0, + 0xf0, 0x68, 0x1a, 0x5a, 0x59, 0x88, 0xb0, 0xf2, 0x5a, 0x26, 0x99, 0xcd, 0x33, 0xc9, 0x74, 0x39, + 0xa7, 0x36, 0x95, 0x35, 0x23, 0xeb, 0x4e, 0x5f, 0x9b, 0x88, 0x00, 0x2b, 0xf8, 0x23, 0x58, 0xb7, + 0x87, 0x24, 0x14, 0x92, 0x8c, 0x89, 0x24, 0x2c, 0x22, 0xde, 0x8d, 0x5a, 0xd9, 0xed, 0x1a, 0x9d, + 0xc9, 0xce, 0x3e, 0xf6, 0xd7, 0x2c, 0xaa, 0xe7, 0x48, 0x70, 0x04, 0x56, 0xe7, 0x0f, 0xb9, 0xf2, + 0x36, 0xcd, 0x6a, 0x7c, 0x51, 0xb5, 0x1a, 0x43, 0x49, 0x5f, 0x39, 0x55, 0x7f, 0x25, 0x3f, 0xd9, + 0x54, 0xf0, 0x09, 0xb8, 0xcc, 0x50, 0x4a, 0xbc, 0x9b, 0x35, 0xea, 0x6f, 0x22, 0xe1, 0xef, 0x0d, + 0x70, 0xe7, 0xd4, 0x11, 0x2f, 0xdf, 0xa3, 0xc5, 0x21, 0xf7, 0x6e, 0x9b, 0x69, 0xbf, 0xab, 0x98, + 0xf6, 0xc0, 0x40, 0xde, 0x58, 0x46, 0xbf, 0x40, 0x0c, 0xcc, 0xaf, 0xa9, 0xc5, 0x05, 0xe3, 0xfd, + 0xdb, 0x6a, 0xf1, 0x20, 0xdc, 0x03, 0xcb, 0x19, 0xf9, 0x90, 0x85, 0x08, 0x7b, 0x4b, 0x66, 0x5a, + 0x8f, 0xaa, 0x6a, 0xf4, 0x96, 0x7c, 0xc8, 0x02, 0x7c, 0xc0, 0xc6, 0x7c, 0xff, 0x7f, 0xfd, 0xa5, + 0xcc, 0xb4, 0xe0, 0x4f, 0x60, 0x8d, 0x7c, 0x10, 0x88, 0xe9, 0xfd, 0x51, 0xf2, 0x96, 0x0d, 0x6f, + 0xa7, 0x8a, 0xb7, 0x57, 0xe8, 0xe6, 0xb8, 0x2b, 0x64, 0xae, 0x17, 0xf6, 0xc1, 0xf5, 0x08, 0x51, + 0x1a, 0x72, 0x46, 0xa7, 0x9a, 0xfd, 0x91, 0x61, 0xfb, 0x95, 0xa7, 0x0b, 0x51, 0xfa, 0x3d, 0xa3, + 0x53, 0xc7, 0x05, 0x91, 0xeb, 0x81, 0xbf, 0x82, 0x4d, 0x97, 0x33, 0x9e, 0x32, 0x94, 0x26, 0x51, + 0xa8, 0x08, 0x92, 0xd1, 0xb1, 0x76, 0x58, 0x31, 0x0e, 0x2f, 0xea, 0x66, 0xbf, 0x6b, 0x01, 0x03, + 0xa3, 0x77, 0x76, 0x37, 0xc9, 0xf9, 0xc3, 0x70, 0x1f, 0xfc, 0xff, 0x98, 0x67, 0x84, 0x6a, 0xa7, + 0x55, 0xe3, 0x54, 0xf9, 0x7e, 0xdc, 0xd7, 0xf1, 0x8e, 0xbc, 0x7c, 0x6c, 0x9b, 0x10, 0x81, 0x75, + 0x75, 0xcc, 0x85, 0xd0, 0xaf, 0x34, 0x95, 0x22, 0x69, 0x4a, 0xbf, 0x6e, 0x90, 0x4f, 0xab, 0x90, + 0x83, 0x42, 0x38, 0xd0, 0x3a, 0x87, 0x5e, 0x55, 0xf3, 0xdd, 0x30, 0x06, 0x2d, 0x67, 0x21, 0x24, + 0xc7, 0x79, 0x64, 0x4c, 0xa0, 0x31, 0x79, 0x56, 0xd7, 0xa4, 0x67, 0x95, 0xce, 0xc6, 0xa5, 0xed, + 0x06, 0x74, 0x55, 0xe2, 0x14, 0x25, 0xa6, 0x2a, 0x1b, 0xf5, 0xaa, 0xf2, 0x5a, 0xc7, 0xcf, 0xaa, + 0x12, 0xdb, 0xa6, 0x26, 0x25, 0x29, 0x8a, 0xf5, 0x2b, 0xca, 0xfb, 0xb8, 0x1e, 0xe9, 0x40, 0xc7, + 0xcf, 0x48, 0x89, 0x6d, 0x6a, 0xd2, 0x24, 0xc1, 0x84, 0x6b, 0x92, 0x57, 0x8f, 0x34, 0xd2, 0xf1, + 0x33, 0xd2, 0xc4, 0x36, 0xe1, 0xcf, 0xe0, 0x86, 0x24, 0x4a, 0x70, 0xa6, 0x92, 0x09, 0x39, 0xb1, + 0xd3, 0x6e, 0x19, 0xea, 0xf3, 0x2a, 0x6a, 0xdf, 0x69, 0x4f, 0x6d, 0x32, 0x28, 0xcf, 0x8c, 0xc0, + 0xdf, 0x1a, 0x60, 0x8b, 0x92, 0x18, 0x45, 0xd3, 0xf0, 0x84, 0x67, 0x79, 0x89, 0x20, 0xec, 0x6d, + 0x19, 0xd3, 0x97, 0x55, 0xa6, 0xdf, 0x19, 0xc6, 0xcc, 0x7a, 0xd7, 0x12, 0x9c, 0xf7, 0x2d, 0xba, + 0x28, 0x00, 0x76, 0xc1, 0x92, 0xfe, 0x04, 0x40, 0xd8, 0xfb, 0xc4, 0x78, 0x7d, 0x5e, 0xe5, 0x15, + 0x08, 0xe1, 0xb8, 0x57, 0x90, 0x6e, 0xc0, 0xf7, 0x60, 0xa3, 0x98, 0x85, 0x46, 0x25, 0x4c, 0x65, + 0xfa, 0x25, 0x80, 0xb0, 0x77, 0xa7, 0x5e, 0xcd, 0x6c, 0xfa, 0x81, 0x10, 0x07, 0x56, 0x3a, 0xab, + 0x19, 0x3d, 0x33, 0x02, 0x53, 0xb0, 0x71, 0x7e, 0xad, 0x3e, 0x35, 0x66, 0x5f, 0xd5, 0x5f, 0xa0, + 0xd3, 0x55, 0x6a, 0xc9, 0x73, 0xea, 0x83, 0xc0, 0xba, 0xbb, 0xd4, 0x05, 0xe5, 0x08, 0x6b, 0xab, + 0x7b, 0xf5, 0x0e, 0x6e, 0x41, 0x19, 0x1a, 0xdd, 0xec, 0xe0, 0xe2, 0xf9, 0x6e, 0x6d, 0xa1, 0xeb, + 0x46, 0x58, 0x8c, 0x62, 0x92, 0x12, 0x66, 0x8e, 0xed, 0xfd, 0x7a, 0x16, 0x81, 0x10, 0x7b, 0x4e, + 0x37, 0xb3, 0x40, 0xf3, 0xdd, 0xdd, 0xab, 0x60, 0x19, 0xe1, 0x10, 0xa3, 0x0c, 0x75, 0xff, 0x6d, + 0x80, 0x07, 0x11, 0x4f, 0xfd, 0xca, 0x7f, 0x10, 0xdd, 0xe5, 0x00, 0xf7, 0xf4, 0xa5, 0xd9, 0x6b, + 0xbc, 0xfb, 0xb6, 0x88, 0x8e, 0x39, 0x45, 0x2c, 0xf6, 0xb9, 0x8c, 0xdb, 0x31, 0x61, 0xe6, 0x4a, + 0x2d, 0xbf, 0x97, 0x45, 0xa2, 0x2e, 0xf8, 0xeb, 0xf2, 0xc2, 0x3d, 0xfd, 0xd1, 0xbc, 0xf4, 0x3a, + 0x08, 0xfe, 0x6c, 0xde, 0xb3, 0x1f, 0x1a, 0x7e, 0x80, 0x95, 0xef, 0xbe, 0x39, 0xfc, 0xd1, 0xb6, + 0x5f, 0x5e, 0x7d, 0xea, 0xaf, 0x32, 0xe6, 0x30, 0xc0, 0xea, 0xd0, 0xc5, 0x1c, 0x8e, 0xb6, 0x0f, + 0x5d, 0xcc, 0x3f, 0xcd, 0x07, 0x76, 0xa0, 0xd3, 0x09, 0xb0, 0xea, 0x74, 0x5c, 0x54, 0xa7, 0x33, + 0xda, 0xee, 0x74, 0x5c, 0xdc, 0xd1, 0x92, 0x49, 0xf6, 0xe9, 0x7f, 0x01, 0x00, 0x00, 0xff, 0xff, + 0xd8, 0x9c, 0xa7, 0x91, 0x66, 0x0d, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/resources/ad_group.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/resources/ad_group.pb.go new file mode 100644 index 0000000..1702283 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/resources/ad_group.pb.go @@ -0,0 +1,366 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/resources/ad_group.proto + +package resources // import "google.golang.org/genproto/googleapis/ads/googleads/v1/resources" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import wrappers "github.com/golang/protobuf/ptypes/wrappers" +import common "google.golang.org/genproto/googleapis/ads/googleads/v1/common" +import enums "google.golang.org/genproto/googleapis/ads/googleads/v1/enums" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// An ad group. +type AdGroup struct { + // The resource name of the ad group. + // Ad group resource names have the form: + // + // `customers/{customer_id}/adGroups/{ad_group_id}` + ResourceName string `protobuf:"bytes,1,opt,name=resource_name,json=resourceName,proto3" json:"resource_name,omitempty"` + // The ID of the ad group. + Id *wrappers.Int64Value `protobuf:"bytes,3,opt,name=id,proto3" json:"id,omitempty"` + // The name of the ad group. + // + // This field is required and should not be empty when creating new ad + // groups. + // + // It must contain fewer than 255 UTF-8 full-width characters. + // + // It must not contain any null (code point 0x0), NL line feed + // (code point 0xA) or carriage return (code point 0xD) characters. + Name *wrappers.StringValue `protobuf:"bytes,4,opt,name=name,proto3" json:"name,omitempty"` + // The status of the ad group. + Status enums.AdGroupStatusEnum_AdGroupStatus `protobuf:"varint,5,opt,name=status,proto3,enum=google.ads.googleads.v1.enums.AdGroupStatusEnum_AdGroupStatus" json:"status,omitempty"` + // The type of the ad group. + Type enums.AdGroupTypeEnum_AdGroupType `protobuf:"varint,12,opt,name=type,proto3,enum=google.ads.googleads.v1.enums.AdGroupTypeEnum_AdGroupType" json:"type,omitempty"` + // The ad rotation mode of the ad group. + AdRotationMode enums.AdGroupAdRotationModeEnum_AdGroupAdRotationMode `protobuf:"varint,22,opt,name=ad_rotation_mode,json=adRotationMode,proto3,enum=google.ads.googleads.v1.enums.AdGroupAdRotationModeEnum_AdGroupAdRotationMode" json:"ad_rotation_mode,omitempty"` + // The URL template for constructing a tracking URL. + TrackingUrlTemplate *wrappers.StringValue `protobuf:"bytes,13,opt,name=tracking_url_template,json=trackingUrlTemplate,proto3" json:"tracking_url_template,omitempty"` + // The list of mappings used to substitute custom parameter tags in a + // `tracking_url_template`, `final_urls`, or `mobile_final_urls`. + UrlCustomParameters []*common.CustomParameter `protobuf:"bytes,6,rep,name=url_custom_parameters,json=urlCustomParameters,proto3" json:"url_custom_parameters,omitempty"` + // The campaign to which the ad group belongs. + Campaign *wrappers.StringValue `protobuf:"bytes,10,opt,name=campaign,proto3" json:"campaign,omitempty"` + // The maximum CPC (cost-per-click) bid. + CpcBidMicros *wrappers.Int64Value `protobuf:"bytes,14,opt,name=cpc_bid_micros,json=cpcBidMicros,proto3" json:"cpc_bid_micros,omitempty"` + // The maximum CPM (cost-per-thousand viewable impressions) bid. + CpmBidMicros *wrappers.Int64Value `protobuf:"bytes,15,opt,name=cpm_bid_micros,json=cpmBidMicros,proto3" json:"cpm_bid_micros,omitempty"` + // The target CPA (cost-per-acquisition). + TargetCpaMicros *wrappers.Int64Value `protobuf:"bytes,27,opt,name=target_cpa_micros,json=targetCpaMicros,proto3" json:"target_cpa_micros,omitempty"` + // The CPV (cost-per-view) bid. + CpvBidMicros *wrappers.Int64Value `protobuf:"bytes,17,opt,name=cpv_bid_micros,json=cpvBidMicros,proto3" json:"cpv_bid_micros,omitempty"` + // Average amount in micros that the advertiser is willing to pay for every + // thousand times the ad is shown. + TargetCpmMicros *wrappers.Int64Value `protobuf:"bytes,26,opt,name=target_cpm_micros,json=targetCpmMicros,proto3" json:"target_cpm_micros,omitempty"` + // The target ROAS (return-on-ad-spend) override. If the ad group's campaign + // bidding strategy is a standard Target ROAS strategy, then this field + // overrides the target ROAS specified in the campaign's bidding strategy. + // Otherwise, this value is ignored. + TargetRoas *wrappers.DoubleValue `protobuf:"bytes,30,opt,name=target_roas,json=targetRoas,proto3" json:"target_roas,omitempty"` + // The percent cpc bid amount, expressed as a fraction of the advertised price + // for some good or service. The valid range for the fraction is [0,1) and the + // value stored here is 1,000,000 * [fraction]. + PercentCpcBidMicros *wrappers.Int64Value `protobuf:"bytes,20,opt,name=percent_cpc_bid_micros,json=percentCpcBidMicros,proto3" json:"percent_cpc_bid_micros,omitempty"` + // Settings for the Display Campaign Optimizer, initially termed "Explorer". + ExplorerAutoOptimizerSetting *common.ExplorerAutoOptimizerSetting `protobuf:"bytes,21,opt,name=explorer_auto_optimizer_setting,json=explorerAutoOptimizerSetting,proto3" json:"explorer_auto_optimizer_setting,omitempty"` + // Allows advertisers to specify a targeting dimension on which to place + // absolute bids. This is only applicable for campaigns that target only the + // display network and not search. + DisplayCustomBidDimension enums.TargetingDimensionEnum_TargetingDimension `protobuf:"varint,23,opt,name=display_custom_bid_dimension,json=displayCustomBidDimension,proto3,enum=google.ads.googleads.v1.enums.TargetingDimensionEnum_TargetingDimension" json:"display_custom_bid_dimension,omitempty"` + // URL template for appending params to Final URL. + FinalUrlSuffix *wrappers.StringValue `protobuf:"bytes,24,opt,name=final_url_suffix,json=finalUrlSuffix,proto3" json:"final_url_suffix,omitempty"` + // Setting for targeting related features. + TargetingSetting *common.TargetingSetting `protobuf:"bytes,25,opt,name=targeting_setting,json=targetingSetting,proto3" json:"targeting_setting,omitempty"` + // The effective target CPA (cost-per-acquisition). + // This field is read-only. + EffectiveTargetCpaMicros *wrappers.Int64Value `protobuf:"bytes,28,opt,name=effective_target_cpa_micros,json=effectiveTargetCpaMicros,proto3" json:"effective_target_cpa_micros,omitempty"` + // Source of the effective target CPA. + // This field is read-only. + EffectiveTargetCpaSource enums.BiddingSourceEnum_BiddingSource `protobuf:"varint,29,opt,name=effective_target_cpa_source,json=effectiveTargetCpaSource,proto3,enum=google.ads.googleads.v1.enums.BiddingSourceEnum_BiddingSource" json:"effective_target_cpa_source,omitempty"` + // The effective target ROAS (return-on-ad-spend). + // This field is read-only. + EffectiveTargetRoas *wrappers.DoubleValue `protobuf:"bytes,31,opt,name=effective_target_roas,json=effectiveTargetRoas,proto3" json:"effective_target_roas,omitempty"` + // Source of the effective target ROAS. + // This field is read-only. + EffectiveTargetRoasSource enums.BiddingSourceEnum_BiddingSource `protobuf:"varint,32,opt,name=effective_target_roas_source,json=effectiveTargetRoasSource,proto3,enum=google.ads.googleads.v1.enums.BiddingSourceEnum_BiddingSource" json:"effective_target_roas_source,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *AdGroup) Reset() { *m = AdGroup{} } +func (m *AdGroup) String() string { return proto.CompactTextString(m) } +func (*AdGroup) ProtoMessage() {} +func (*AdGroup) Descriptor() ([]byte, []int) { + return fileDescriptor_ad_group_4e6d485528e2fbcb, []int{0} +} +func (m *AdGroup) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_AdGroup.Unmarshal(m, b) +} +func (m *AdGroup) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_AdGroup.Marshal(b, m, deterministic) +} +func (dst *AdGroup) XXX_Merge(src proto.Message) { + xxx_messageInfo_AdGroup.Merge(dst, src) +} +func (m *AdGroup) XXX_Size() int { + return xxx_messageInfo_AdGroup.Size(m) +} +func (m *AdGroup) XXX_DiscardUnknown() { + xxx_messageInfo_AdGroup.DiscardUnknown(m) +} + +var xxx_messageInfo_AdGroup proto.InternalMessageInfo + +func (m *AdGroup) GetResourceName() string { + if m != nil { + return m.ResourceName + } + return "" +} + +func (m *AdGroup) GetId() *wrappers.Int64Value { + if m != nil { + return m.Id + } + return nil +} + +func (m *AdGroup) GetName() *wrappers.StringValue { + if m != nil { + return m.Name + } + return nil +} + +func (m *AdGroup) GetStatus() enums.AdGroupStatusEnum_AdGroupStatus { + if m != nil { + return m.Status + } + return enums.AdGroupStatusEnum_UNSPECIFIED +} + +func (m *AdGroup) GetType() enums.AdGroupTypeEnum_AdGroupType { + if m != nil { + return m.Type + } + return enums.AdGroupTypeEnum_UNSPECIFIED +} + +func (m *AdGroup) GetAdRotationMode() enums.AdGroupAdRotationModeEnum_AdGroupAdRotationMode { + if m != nil { + return m.AdRotationMode + } + return enums.AdGroupAdRotationModeEnum_UNSPECIFIED +} + +func (m *AdGroup) GetTrackingUrlTemplate() *wrappers.StringValue { + if m != nil { + return m.TrackingUrlTemplate + } + return nil +} + +func (m *AdGroup) GetUrlCustomParameters() []*common.CustomParameter { + if m != nil { + return m.UrlCustomParameters + } + return nil +} + +func (m *AdGroup) GetCampaign() *wrappers.StringValue { + if m != nil { + return m.Campaign + } + return nil +} + +func (m *AdGroup) GetCpcBidMicros() *wrappers.Int64Value { + if m != nil { + return m.CpcBidMicros + } + return nil +} + +func (m *AdGroup) GetCpmBidMicros() *wrappers.Int64Value { + if m != nil { + return m.CpmBidMicros + } + return nil +} + +func (m *AdGroup) GetTargetCpaMicros() *wrappers.Int64Value { + if m != nil { + return m.TargetCpaMicros + } + return nil +} + +func (m *AdGroup) GetCpvBidMicros() *wrappers.Int64Value { + if m != nil { + return m.CpvBidMicros + } + return nil +} + +func (m *AdGroup) GetTargetCpmMicros() *wrappers.Int64Value { + if m != nil { + return m.TargetCpmMicros + } + return nil +} + +func (m *AdGroup) GetTargetRoas() *wrappers.DoubleValue { + if m != nil { + return m.TargetRoas + } + return nil +} + +func (m *AdGroup) GetPercentCpcBidMicros() *wrappers.Int64Value { + if m != nil { + return m.PercentCpcBidMicros + } + return nil +} + +func (m *AdGroup) GetExplorerAutoOptimizerSetting() *common.ExplorerAutoOptimizerSetting { + if m != nil { + return m.ExplorerAutoOptimizerSetting + } + return nil +} + +func (m *AdGroup) GetDisplayCustomBidDimension() enums.TargetingDimensionEnum_TargetingDimension { + if m != nil { + return m.DisplayCustomBidDimension + } + return enums.TargetingDimensionEnum_UNSPECIFIED +} + +func (m *AdGroup) GetFinalUrlSuffix() *wrappers.StringValue { + if m != nil { + return m.FinalUrlSuffix + } + return nil +} + +func (m *AdGroup) GetTargetingSetting() *common.TargetingSetting { + if m != nil { + return m.TargetingSetting + } + return nil +} + +func (m *AdGroup) GetEffectiveTargetCpaMicros() *wrappers.Int64Value { + if m != nil { + return m.EffectiveTargetCpaMicros + } + return nil +} + +func (m *AdGroup) GetEffectiveTargetCpaSource() enums.BiddingSourceEnum_BiddingSource { + if m != nil { + return m.EffectiveTargetCpaSource + } + return enums.BiddingSourceEnum_UNSPECIFIED +} + +func (m *AdGroup) GetEffectiveTargetRoas() *wrappers.DoubleValue { + if m != nil { + return m.EffectiveTargetRoas + } + return nil +} + +func (m *AdGroup) GetEffectiveTargetRoasSource() enums.BiddingSourceEnum_BiddingSource { + if m != nil { + return m.EffectiveTargetRoasSource + } + return enums.BiddingSourceEnum_UNSPECIFIED +} + +func init() { + proto.RegisterType((*AdGroup)(nil), "google.ads.googleads.v1.resources.AdGroup") +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/resources/ad_group.proto", fileDescriptor_ad_group_4e6d485528e2fbcb) +} + +var fileDescriptor_ad_group_4e6d485528e2fbcb = []byte{ + // 952 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xac, 0x96, 0xcf, 0x6f, 0x23, 0x35, + 0x14, 0xc7, 0x95, 0xb4, 0x14, 0x70, 0xbb, 0x69, 0x77, 0x4a, 0x97, 0x69, 0x1b, 0x76, 0xb3, 0xa0, + 0x95, 0x2a, 0x21, 0x4d, 0x9a, 0x2e, 0x2c, 0x28, 0xb0, 0x48, 0x49, 0xbb, 0x14, 0x90, 0xb6, 0x44, + 0x93, 0x6e, 0x0e, 0xab, 0xa2, 0x91, 0x33, 0x76, 0x46, 0x16, 0x33, 0x63, 0xcb, 0xf6, 0x84, 0x16, + 0x09, 0x71, 0xe0, 0xc4, 0xbf, 0xc1, 0x91, 0x3f, 0x85, 0x3f, 0x85, 0x2b, 0x27, 0x6e, 0x68, 0xfc, + 0x63, 0x9a, 0xa4, 0x4d, 0x67, 0x90, 0xf6, 0xe6, 0xb1, 0xdf, 0xf7, 0xe3, 0xf7, 0x9e, 0xdf, 0x1b, + 0x1b, 0x1c, 0x46, 0x94, 0x46, 0x31, 0x6e, 0x43, 0x24, 0xda, 0x7a, 0x98, 0x8f, 0xa6, 0x9d, 0x36, + 0xc7, 0x82, 0x66, 0x3c, 0xc4, 0xa2, 0x0d, 0x51, 0x10, 0x71, 0x9a, 0x31, 0x8f, 0x71, 0x2a, 0xa9, + 0xf3, 0x58, 0x9b, 0x79, 0x10, 0x09, 0xaf, 0x50, 0x78, 0xd3, 0x8e, 0x57, 0x28, 0xf6, 0x3e, 0x5d, + 0x06, 0x0d, 0x69, 0x92, 0xd0, 0xb4, 0x1d, 0x66, 0x42, 0xd2, 0x24, 0x60, 0x90, 0xc3, 0x04, 0x4b, + 0xcc, 0x35, 0x79, 0xef, 0xa4, 0x44, 0x86, 0x2f, 0x59, 0x4c, 0x39, 0xe6, 0x01, 0xcc, 0x24, 0x0d, + 0x28, 0x93, 0x24, 0x21, 0x3f, 0x63, 0x1e, 0x08, 0x2c, 0x25, 0x49, 0x23, 0x43, 0x79, 0x56, 0x42, + 0x91, 0x90, 0x47, 0x38, 0xb7, 0x5f, 0xd0, 0x3d, 0x5f, 0xa6, 0xc3, 0x69, 0x96, 0x5c, 0x67, 0x21, + 0x80, 0x28, 0xe0, 0x54, 0x42, 0x49, 0x68, 0x1a, 0x24, 0x14, 0x61, 0x23, 0x7f, 0x5a, 0x51, 0x2e, + 0x24, 0x94, 0x99, 0x30, 0xa2, 0x4e, 0x45, 0x91, 0xbc, 0x62, 0x76, 0x9f, 0xa3, 0xbb, 0x25, 0x63, + 0x82, 0x90, 0x8a, 0x4d, 0x1d, 0x85, 0xd1, 0x7c, 0x76, 0xb7, 0xe6, 0x3a, 0x23, 0x88, 0x24, 0x38, + 0x15, 0x84, 0xa6, 0x46, 0xf8, 0xd0, 0x08, 0xd5, 0xd7, 0x38, 0x9b, 0xb4, 0x7f, 0xe2, 0x90, 0x31, + 0xcc, 0xad, 0xff, 0x4d, 0x0b, 0x66, 0xa4, 0x0d, 0xd3, 0xd4, 0xa4, 0xc5, 0xac, 0x7e, 0xf8, 0xcf, + 0x26, 0x78, 0xbb, 0x87, 0x4e, 0xf3, 0x08, 0x9c, 0x8f, 0xc0, 0x3d, 0x5b, 0x1f, 0x41, 0x0a, 0x13, + 0xec, 0xd6, 0x5a, 0xb5, 0x83, 0x77, 0xfd, 0x0d, 0x3b, 0x79, 0x06, 0x13, 0xec, 0x7c, 0x0c, 0xea, + 0x04, 0xb9, 0x2b, 0xad, 0xda, 0xc1, 0xfa, 0xd1, 0xbe, 0x29, 0x2e, 0xcf, 0xee, 0xed, 0x7d, 0x9b, + 0xca, 0x67, 0x9f, 0x8c, 0x60, 0x9c, 0x61, 0xbf, 0x4e, 0x90, 0x73, 0x08, 0x56, 0x15, 0x68, 0x55, + 0x99, 0x37, 0x6f, 0x98, 0x0f, 0x25, 0x27, 0x69, 0xa4, 0xed, 0x95, 0xa5, 0x33, 0x02, 0x6b, 0x3a, + 0xfb, 0xee, 0x5b, 0xad, 0xda, 0x41, 0xe3, 0xe8, 0x2b, 0x6f, 0x59, 0x29, 0xab, 0xbc, 0x78, 0xc6, + 0xf7, 0xa1, 0xd2, 0xbc, 0x48, 0xb3, 0x64, 0x7e, 0xc6, 0x37, 0x34, 0xe7, 0x0c, 0xac, 0xe6, 0x07, + 0xe4, 0x6e, 0x28, 0x6a, 0xb7, 0x1a, 0xf5, 0xfc, 0x8a, 0xe1, 0x59, 0x66, 0xfe, 0xed, 0x2b, 0x8e, + 0x73, 0x09, 0xb6, 0x16, 0x8b, 0xcc, 0x7d, 0xa0, 0xd8, 0x67, 0xd5, 0xd8, 0x3d, 0xe4, 0x1b, 0xf1, + 0x4b, 0x8a, 0xe6, 0x76, 0x99, 0x5f, 0xf1, 0x1b, 0x70, 0xee, 0xdb, 0x19, 0x80, 0x1d, 0xc9, 0x61, + 0xf8, 0x63, 0x5e, 0x0b, 0x19, 0x8f, 0x03, 0x89, 0x13, 0x16, 0x43, 0x89, 0xdd, 0x7b, 0x15, 0x92, + 0xbc, 0x6d, 0xa5, 0xaf, 0x78, 0x7c, 0x6e, 0x84, 0x4e, 0x08, 0x76, 0x72, 0xd0, 0x62, 0xc7, 0x0b, + 0x77, 0xad, 0xb5, 0x72, 0xb0, 0x7e, 0xd4, 0x5e, 0x1a, 0x90, 0xee, 0x56, 0xef, 0x58, 0x09, 0x07, + 0x56, 0xe7, 0x6f, 0x67, 0x3c, 0x5e, 0x98, 0x13, 0xce, 0xe7, 0xe0, 0x9d, 0x10, 0x26, 0x0c, 0x92, + 0x28, 0x75, 0x41, 0x05, 0x4f, 0x0b, 0x6b, 0xa7, 0x07, 0x1a, 0x21, 0x0b, 0x83, 0x31, 0x41, 0x41, + 0x42, 0x42, 0x4e, 0x85, 0xdb, 0x28, 0xaf, 0xbe, 0x8d, 0x90, 0x85, 0x7d, 0x82, 0x5e, 0x2a, 0x81, + 0x46, 0x24, 0xb3, 0x88, 0xcd, 0x4a, 0x88, 0xe4, 0x1a, 0x71, 0x0a, 0xee, 0xeb, 0x1e, 0x0c, 0x42, + 0x06, 0x2d, 0x65, 0xbf, 0x9c, 0xb2, 0xa9, 0x55, 0xc7, 0x0c, 0xce, 0xfa, 0x32, 0x9d, 0xf5, 0xe5, + 0x7e, 0x25, 0x5f, 0xa6, 0xb7, 0xfb, 0x92, 0x58, 0xca, 0xde, 0xff, 0xf0, 0x25, 0x31, 0xa0, 0xe7, + 0x60, 0xdd, 0x80, 0x38, 0x85, 0xc2, 0x7d, 0xb8, 0xe4, 0x5c, 0x4e, 0x68, 0x36, 0x8e, 0xb1, 0x66, + 0x00, 0x2d, 0xf0, 0x29, 0x14, 0xce, 0x00, 0x3c, 0x60, 0x98, 0x87, 0x38, 0xcd, 0x1d, 0x99, 0x3b, + 0xa1, 0xf7, 0xca, 0x9d, 0xd9, 0x36, 0xd2, 0xe3, 0xd9, 0x83, 0xfa, 0xad, 0x06, 0x1e, 0x95, 0x5c, + 0x21, 0xee, 0x8e, 0x62, 0x7f, 0x59, 0x56, 0x95, 0x2f, 0x0c, 0xa6, 0x97, 0x49, 0xfa, 0xbd, 0x85, + 0x0c, 0x35, 0xc3, 0x6f, 0xe2, 0x3b, 0x56, 0x9d, 0xdf, 0x6b, 0xa0, 0x89, 0x88, 0x60, 0x31, 0xbc, + 0xb2, 0x5d, 0x91, 0xc7, 0x56, 0xfc, 0x79, 0xdd, 0xf7, 0x55, 0xa7, 0x7f, 0x53, 0xd2, 0xe9, 0xe7, + 0xf6, 0x9f, 0x7d, 0x62, 0x85, 0xaa, 0xcd, 0x6f, 0x4e, 0xfb, 0xbb, 0x66, 0x37, 0xdd, 0x35, 0x7d, + 0x82, 0x8a, 0x25, 0xe7, 0x6b, 0xb0, 0x35, 0x21, 0x29, 0x8c, 0x55, 0xaf, 0x8b, 0x6c, 0x32, 0x21, + 0x97, 0xae, 0x5b, 0xa1, 0x7f, 0x1a, 0x4a, 0xf5, 0x8a, 0xc7, 0x43, 0xa5, 0x71, 0x7e, 0xb0, 0x35, + 0x33, 0x73, 0xab, 0xba, 0xbb, 0x0a, 0x74, 0x58, 0x96, 0xca, 0xc2, 0x63, 0x9b, 0xbe, 0x2d, 0xb9, + 0x30, 0xe3, 0xbc, 0x06, 0xfb, 0x78, 0x32, 0xc1, 0xa1, 0x24, 0x53, 0x1c, 0xdc, 0x6c, 0x94, 0x66, + 0x79, 0x3d, 0xb8, 0x85, 0xfe, 0x7c, 0xa1, 0x63, 0x7e, 0x59, 0xc2, 0xd6, 0xb7, 0x92, 0xfb, 0x41, + 0xa5, 0x8b, 0xa2, 0xaf, 0x2f, 0xdd, 0xa1, 0xd2, 0xa8, 0x73, 0x98, 0x9b, 0xb9, 0x6d, 0x7b, 0xbd, + 0x92, 0xff, 0x70, 0x6f, 0x6c, 0xaf, 0xda, 0xe5, 0x51, 0x85, 0x76, 0xd9, 0x5e, 0xc0, 0xaa, 0xbe, + 0xf9, 0x15, 0x34, 0x6f, 0x25, 0xda, 0x88, 0x5a, 0x6f, 0x24, 0xa2, 0xdd, 0x5b, 0xb6, 0xd6, 0x4b, + 0xfd, 0x7f, 0x6b, 0xe0, 0x49, 0x48, 0x13, 0xaf, 0xf4, 0x99, 0xd8, 0xdf, 0x30, 0x97, 0xd2, 0x20, + 0x0f, 0x6e, 0x50, 0x7b, 0xfd, 0x9d, 0x91, 0x44, 0x34, 0x86, 0x69, 0xe4, 0x51, 0x1e, 0xb5, 0x23, + 0x9c, 0xaa, 0xd0, 0xed, 0xb3, 0x85, 0x11, 0x71, 0xc7, 0x53, 0xf5, 0x8b, 0x62, 0xf4, 0x47, 0x7d, + 0xe5, 0xb4, 0xd7, 0xfb, 0xb3, 0xfe, 0xf8, 0x54, 0x23, 0x7b, 0x48, 0x78, 0x7a, 0x98, 0x8f, 0x46, + 0x1d, 0xcf, 0xb7, 0x96, 0x7f, 0x59, 0x9b, 0x8b, 0x1e, 0x12, 0x17, 0x85, 0xcd, 0xc5, 0xa8, 0x73, + 0x51, 0xd8, 0xfc, 0x5d, 0x7f, 0xa2, 0x17, 0xba, 0xdd, 0x1e, 0x12, 0xdd, 0x6e, 0x61, 0xd5, 0xed, + 0x8e, 0x3a, 0xdd, 0x6e, 0x61, 0x37, 0x5e, 0x53, 0xce, 0x3e, 0xfd, 0x2f, 0x00, 0x00, 0xff, 0xff, + 0x33, 0x43, 0x6e, 0x63, 0x56, 0x0b, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/resources/ad_group_ad.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/resources/ad_group_ad.pb.go new file mode 100644 index 0000000..2f4d741 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/resources/ad_group_ad.pb.go @@ -0,0 +1,220 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/resources/ad_group_ad.proto + +package resources // import "google.golang.org/genproto/googleapis/ads/googleads/v1/resources" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import wrappers "github.com/golang/protobuf/ptypes/wrappers" +import common "google.golang.org/genproto/googleapis/ads/googleads/v1/common" +import enums "google.golang.org/genproto/googleapis/ads/googleads/v1/enums" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// An ad group ad. +type AdGroupAd struct { + // The resource name of the ad. + // Ad group ad resource names have the form: + // + // `customers/{customer_id}/adGroupAds/{ad_group_id}~{ad_id}` + ResourceName string `protobuf:"bytes,1,opt,name=resource_name,json=resourceName,proto3" json:"resource_name,omitempty"` + // The status of the ad. + Status enums.AdGroupAdStatusEnum_AdGroupAdStatus `protobuf:"varint,3,opt,name=status,proto3,enum=google.ads.googleads.v1.enums.AdGroupAdStatusEnum_AdGroupAdStatus" json:"status,omitempty"` + // The ad group to which the ad belongs. + AdGroup *wrappers.StringValue `protobuf:"bytes,4,opt,name=ad_group,json=adGroup,proto3" json:"ad_group,omitempty"` + // The ad. + Ad *Ad `protobuf:"bytes,5,opt,name=ad,proto3" json:"ad,omitempty"` + // Policy information for the ad. + PolicySummary *AdGroupAdPolicySummary `protobuf:"bytes,6,opt,name=policy_summary,json=policySummary,proto3" json:"policy_summary,omitempty"` + // Overall ad strength for this ad group ad. + AdStrength enums.AdStrengthEnum_AdStrength `protobuf:"varint,7,opt,name=ad_strength,json=adStrength,proto3,enum=google.ads.googleads.v1.enums.AdStrengthEnum_AdStrength" json:"ad_strength,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *AdGroupAd) Reset() { *m = AdGroupAd{} } +func (m *AdGroupAd) String() string { return proto.CompactTextString(m) } +func (*AdGroupAd) ProtoMessage() {} +func (*AdGroupAd) Descriptor() ([]byte, []int) { + return fileDescriptor_ad_group_ad_ea5fcfcc30667776, []int{0} +} +func (m *AdGroupAd) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_AdGroupAd.Unmarshal(m, b) +} +func (m *AdGroupAd) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_AdGroupAd.Marshal(b, m, deterministic) +} +func (dst *AdGroupAd) XXX_Merge(src proto.Message) { + xxx_messageInfo_AdGroupAd.Merge(dst, src) +} +func (m *AdGroupAd) XXX_Size() int { + return xxx_messageInfo_AdGroupAd.Size(m) +} +func (m *AdGroupAd) XXX_DiscardUnknown() { + xxx_messageInfo_AdGroupAd.DiscardUnknown(m) +} + +var xxx_messageInfo_AdGroupAd proto.InternalMessageInfo + +func (m *AdGroupAd) GetResourceName() string { + if m != nil { + return m.ResourceName + } + return "" +} + +func (m *AdGroupAd) GetStatus() enums.AdGroupAdStatusEnum_AdGroupAdStatus { + if m != nil { + return m.Status + } + return enums.AdGroupAdStatusEnum_UNSPECIFIED +} + +func (m *AdGroupAd) GetAdGroup() *wrappers.StringValue { + if m != nil { + return m.AdGroup + } + return nil +} + +func (m *AdGroupAd) GetAd() *Ad { + if m != nil { + return m.Ad + } + return nil +} + +func (m *AdGroupAd) GetPolicySummary() *AdGroupAdPolicySummary { + if m != nil { + return m.PolicySummary + } + return nil +} + +func (m *AdGroupAd) GetAdStrength() enums.AdStrengthEnum_AdStrength { + if m != nil { + return m.AdStrength + } + return enums.AdStrengthEnum_UNSPECIFIED +} + +// Contains policy information for an ad. +type AdGroupAdPolicySummary struct { + // The list of policy findings for this ad. + PolicyTopicEntries []*common.PolicyTopicEntry `protobuf:"bytes,1,rep,name=policy_topic_entries,json=policyTopicEntries,proto3" json:"policy_topic_entries,omitempty"` + // Where in the review process this ad is. + ReviewStatus enums.PolicyReviewStatusEnum_PolicyReviewStatus `protobuf:"varint,2,opt,name=review_status,json=reviewStatus,proto3,enum=google.ads.googleads.v1.enums.PolicyReviewStatusEnum_PolicyReviewStatus" json:"review_status,omitempty"` + // The overall approval status of this ad, calculated based on the status of + // its individual policy topic entries. + ApprovalStatus enums.PolicyApprovalStatusEnum_PolicyApprovalStatus `protobuf:"varint,3,opt,name=approval_status,json=approvalStatus,proto3,enum=google.ads.googleads.v1.enums.PolicyApprovalStatusEnum_PolicyApprovalStatus" json:"approval_status,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *AdGroupAdPolicySummary) Reset() { *m = AdGroupAdPolicySummary{} } +func (m *AdGroupAdPolicySummary) String() string { return proto.CompactTextString(m) } +func (*AdGroupAdPolicySummary) ProtoMessage() {} +func (*AdGroupAdPolicySummary) Descriptor() ([]byte, []int) { + return fileDescriptor_ad_group_ad_ea5fcfcc30667776, []int{1} +} +func (m *AdGroupAdPolicySummary) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_AdGroupAdPolicySummary.Unmarshal(m, b) +} +func (m *AdGroupAdPolicySummary) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_AdGroupAdPolicySummary.Marshal(b, m, deterministic) +} +func (dst *AdGroupAdPolicySummary) XXX_Merge(src proto.Message) { + xxx_messageInfo_AdGroupAdPolicySummary.Merge(dst, src) +} +func (m *AdGroupAdPolicySummary) XXX_Size() int { + return xxx_messageInfo_AdGroupAdPolicySummary.Size(m) +} +func (m *AdGroupAdPolicySummary) XXX_DiscardUnknown() { + xxx_messageInfo_AdGroupAdPolicySummary.DiscardUnknown(m) +} + +var xxx_messageInfo_AdGroupAdPolicySummary proto.InternalMessageInfo + +func (m *AdGroupAdPolicySummary) GetPolicyTopicEntries() []*common.PolicyTopicEntry { + if m != nil { + return m.PolicyTopicEntries + } + return nil +} + +func (m *AdGroupAdPolicySummary) GetReviewStatus() enums.PolicyReviewStatusEnum_PolicyReviewStatus { + if m != nil { + return m.ReviewStatus + } + return enums.PolicyReviewStatusEnum_UNSPECIFIED +} + +func (m *AdGroupAdPolicySummary) GetApprovalStatus() enums.PolicyApprovalStatusEnum_PolicyApprovalStatus { + if m != nil { + return m.ApprovalStatus + } + return enums.PolicyApprovalStatusEnum_UNSPECIFIED +} + +func init() { + proto.RegisterType((*AdGroupAd)(nil), "google.ads.googleads.v1.resources.AdGroupAd") + proto.RegisterType((*AdGroupAdPolicySummary)(nil), "google.ads.googleads.v1.resources.AdGroupAdPolicySummary") +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/resources/ad_group_ad.proto", fileDescriptor_ad_group_ad_ea5fcfcc30667776) +} + +var fileDescriptor_ad_group_ad_ea5fcfcc30667776 = []byte{ + // 592 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x8c, 0x53, 0xdf, 0x6a, 0xdb, 0x3e, + 0x18, 0xc5, 0xce, 0xef, 0x97, 0xae, 0xca, 0x9a, 0x81, 0x19, 0xc3, 0x94, 0x32, 0xd2, 0x8e, 0x40, + 0xd8, 0x40, 0x5e, 0x52, 0xb6, 0x75, 0xde, 0x95, 0x03, 0x25, 0x63, 0x8c, 0x51, 0x9c, 0x11, 0x58, + 0x09, 0x78, 0x4a, 0xa4, 0x79, 0x86, 0x58, 0x12, 0x92, 0x9d, 0x92, 0xd7, 0xd9, 0xe5, 0xde, 0x60, + 0xaf, 0xb0, 0xc7, 0xd8, 0xe5, 0x5e, 0x61, 0x37, 0xc3, 0x96, 0xe4, 0xc6, 0xed, 0xd2, 0xe4, 0xee, + 0xfb, 0xa4, 0x73, 0xce, 0xf7, 0x47, 0x47, 0xe0, 0x34, 0x66, 0x2c, 0x5e, 0x10, 0x0f, 0x61, 0xe9, + 0xa9, 0xb0, 0x88, 0x96, 0x7d, 0x4f, 0x10, 0xc9, 0x72, 0x31, 0x27, 0xd2, 0x43, 0x38, 0x8a, 0x05, + 0xcb, 0x79, 0x84, 0x30, 0xe4, 0x82, 0x65, 0xcc, 0x39, 0x56, 0x48, 0x88, 0xb0, 0x84, 0x15, 0x09, + 0x2e, 0xfb, 0xb0, 0x22, 0x1d, 0x3e, 0xdb, 0xa4, 0x3b, 0x67, 0x69, 0xca, 0xa8, 0xc7, 0xd9, 0x22, + 0x99, 0xaf, 0x94, 0xde, 0xe1, 0xcb, 0x4d, 0x60, 0x42, 0xf3, 0xb4, 0xd6, 0x40, 0x24, 0x33, 0x94, + 0xe5, 0x52, 0xf3, 0xbc, 0xad, 0x3c, 0x99, 0x09, 0x42, 0xe3, 0xec, 0xab, 0x26, 0xf8, 0x77, 0x13, + 0x54, 0x53, 0x11, 0xe2, 0x5c, 0xb0, 0x25, 0x5a, 0xd4, 0x8b, 0x9d, 0xed, 0xc4, 0x15, 0x64, 0x99, + 0x90, 0xab, 0x3a, 0xf3, 0xe9, 0x2e, 0x3b, 0xd6, 0xd8, 0xc7, 0x1a, 0x5b, 0x66, 0xb3, 0xfc, 0x8b, + 0x77, 0x25, 0x10, 0xe7, 0x44, 0x18, 0xad, 0x23, 0xa3, 0xc5, 0x13, 0x0f, 0x51, 0xca, 0x32, 0x94, + 0x25, 0x8c, 0xea, 0xdb, 0x93, 0x1f, 0x0d, 0xb0, 0x1f, 0xe0, 0x51, 0xb1, 0xac, 0x00, 0x3b, 0x4f, + 0xc0, 0x81, 0xa9, 0x10, 0x51, 0x94, 0x12, 0xd7, 0xea, 0x58, 0xbd, 0xfd, 0xf0, 0xbe, 0x39, 0xfc, + 0x80, 0x52, 0xe2, 0x5c, 0x82, 0xa6, 0x6a, 0xd6, 0x6d, 0x74, 0xac, 0x5e, 0x7b, 0x30, 0x84, 0x9b, + 0x1e, 0xb7, 0x9c, 0x13, 0x56, 0xf2, 0xe3, 0x92, 0x75, 0x4e, 0xf3, 0xf4, 0xe6, 0x59, 0xa8, 0x15, + 0x9d, 0x57, 0xe0, 0x9e, 0x79, 0x3b, 0xf7, 0xbf, 0x8e, 0xd5, 0x6b, 0x0d, 0x8e, 0x8c, 0xba, 0x99, + 0x0f, 0x8e, 0x33, 0x91, 0xd0, 0x78, 0x82, 0x16, 0x39, 0x09, 0xf7, 0x90, 0x12, 0x72, 0x5e, 0x00, + 0x1b, 0x61, 0xf7, 0xff, 0x92, 0xd2, 0x85, 0x5b, 0xdd, 0x06, 0x03, 0x1c, 0xda, 0x08, 0x3b, 0x9f, + 0x41, 0x5b, 0x3f, 0x83, 0xcc, 0xd3, 0x14, 0x89, 0x95, 0xdb, 0x2c, 0x25, 0x5e, 0xef, 0x24, 0xa1, + 0x67, 0xb8, 0x28, 0x15, 0xc6, 0x4a, 0x20, 0x3c, 0xe0, 0xeb, 0xa9, 0xf3, 0x09, 0xb4, 0xd6, 0x5c, + 0xe5, 0xee, 0x95, 0x2b, 0x3b, 0xdb, 0xba, 0xb2, 0xb1, 0x26, 0xe8, 0x6d, 0x99, 0x34, 0x04, 0xa8, + 0x8a, 0x4f, 0x7e, 0xd9, 0xe0, 0xd1, 0xbf, 0x9b, 0x70, 0x66, 0xe0, 0xa1, 0x9e, 0x2b, 0x63, 0x3c, + 0x99, 0x47, 0x84, 0x66, 0x22, 0x21, 0xd2, 0xb5, 0x3a, 0x8d, 0x5e, 0x6b, 0xf0, 0x7c, 0x63, 0x79, + 0xf5, 0xd7, 0xa0, 0x12, 0xfb, 0x58, 0x50, 0xcf, 0x69, 0x26, 0x56, 0xa1, 0xc3, 0xeb, 0x27, 0x09, + 0x91, 0x4e, 0x5a, 0x98, 0x65, 0xcd, 0xbb, 0xae, 0x5d, 0xce, 0xf6, 0x76, 0xcb, 0x6c, 0x4a, 0x3b, + 0x2c, 0x99, 0x6b, 0x8e, 0xb8, 0x7d, 0x5c, 0xd8, 0xee, 0x3a, 0x73, 0x72, 0xf0, 0xe0, 0xc6, 0x37, + 0xd3, 0xfe, 0x7b, 0xbf, 0x53, 0xc1, 0x40, 0x73, 0x6f, 0x95, 0xac, 0x5f, 0x84, 0x6d, 0x54, 0xcb, + 0x87, 0x7f, 0x2c, 0xd0, 0x9d, 0xb3, 0x74, 0xbb, 0x1f, 0x86, 0xed, 0xeb, 0xb7, 0x28, 0xac, 0x7a, + 0x61, 0x5d, 0xbe, 0xd3, 0xa4, 0x98, 0x2d, 0x10, 0x8d, 0x21, 0x13, 0xb1, 0x17, 0x13, 0x5a, 0x1a, + 0xd9, 0x7c, 0x6b, 0x9e, 0xc8, 0x3b, 0x7e, 0xf9, 0x9b, 0x2a, 0xfa, 0x66, 0x37, 0x46, 0x41, 0xf0, + 0xdd, 0x3e, 0x1e, 0x29, 0xc9, 0x00, 0x4b, 0xa8, 0xc2, 0x22, 0x9a, 0xf4, 0x61, 0x68, 0x90, 0x3f, + 0x0d, 0x66, 0x1a, 0x60, 0x39, 0xad, 0x30, 0xd3, 0x49, 0x7f, 0x5a, 0x61, 0x7e, 0xdb, 0x5d, 0x75, + 0xe1, 0xfb, 0x01, 0x96, 0xbe, 0x5f, 0xa1, 0x7c, 0x7f, 0xd2, 0xf7, 0xfd, 0x0a, 0x37, 0x6b, 0x96, + 0xcd, 0x9e, 0xfe, 0x0d, 0x00, 0x00, 0xff, 0xff, 0xda, 0xae, 0x85, 0x3e, 0xf5, 0x05, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/resources/ad_group_ad_label.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/resources/ad_group_ad_label.pb.go new file mode 100644 index 0000000..af9b934 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/resources/ad_group_ad_label.pb.go @@ -0,0 +1,114 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/resources/ad_group_ad_label.proto + +package resources // import "google.golang.org/genproto/googleapis/ads/googleads/v1/resources" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import wrappers "github.com/golang/protobuf/ptypes/wrappers" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// A relationship between an ad group ad and a label. +type AdGroupAdLabel struct { + // The resource name of the ad group ad label. + // Ad group ad label resource names have the form: + // `customers/{customer_id}/adGroupAdLabels/{ad_group_id}~{ad_id}~{label_id}` + ResourceName string `protobuf:"bytes,1,opt,name=resource_name,json=resourceName,proto3" json:"resource_name,omitempty"` + // The ad group ad to which the label is attached. + AdGroupAd *wrappers.StringValue `protobuf:"bytes,2,opt,name=ad_group_ad,json=adGroupAd,proto3" json:"ad_group_ad,omitempty"` + // The label assigned to the ad group ad. + Label *wrappers.StringValue `protobuf:"bytes,3,opt,name=label,proto3" json:"label,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *AdGroupAdLabel) Reset() { *m = AdGroupAdLabel{} } +func (m *AdGroupAdLabel) String() string { return proto.CompactTextString(m) } +func (*AdGroupAdLabel) ProtoMessage() {} +func (*AdGroupAdLabel) Descriptor() ([]byte, []int) { + return fileDescriptor_ad_group_ad_label_6145341efa86b0bb, []int{0} +} +func (m *AdGroupAdLabel) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_AdGroupAdLabel.Unmarshal(m, b) +} +func (m *AdGroupAdLabel) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_AdGroupAdLabel.Marshal(b, m, deterministic) +} +func (dst *AdGroupAdLabel) XXX_Merge(src proto.Message) { + xxx_messageInfo_AdGroupAdLabel.Merge(dst, src) +} +func (m *AdGroupAdLabel) XXX_Size() int { + return xxx_messageInfo_AdGroupAdLabel.Size(m) +} +func (m *AdGroupAdLabel) XXX_DiscardUnknown() { + xxx_messageInfo_AdGroupAdLabel.DiscardUnknown(m) +} + +var xxx_messageInfo_AdGroupAdLabel proto.InternalMessageInfo + +func (m *AdGroupAdLabel) GetResourceName() string { + if m != nil { + return m.ResourceName + } + return "" +} + +func (m *AdGroupAdLabel) GetAdGroupAd() *wrappers.StringValue { + if m != nil { + return m.AdGroupAd + } + return nil +} + +func (m *AdGroupAdLabel) GetLabel() *wrappers.StringValue { + if m != nil { + return m.Label + } + return nil +} + +func init() { + proto.RegisterType((*AdGroupAdLabel)(nil), "google.ads.googleads.v1.resources.AdGroupAdLabel") +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/resources/ad_group_ad_label.proto", fileDescriptor_ad_group_ad_label_6145341efa86b0bb) +} + +var fileDescriptor_ad_group_ad_label_6145341efa86b0bb = []byte{ + // 331 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x8c, 0x91, 0xcf, 0x4a, 0xc3, 0x30, + 0x00, 0xc6, 0x69, 0x87, 0xc2, 0x3a, 0xf5, 0x50, 0x2f, 0x63, 0x0c, 0xd9, 0x94, 0xc1, 0x4e, 0x09, + 0x9d, 0x27, 0xa3, 0x97, 0xec, 0x32, 0x10, 0x91, 0x31, 0xa1, 0x07, 0x29, 0x94, 0x6c, 0x89, 0xa1, + 0xd0, 0x26, 0x25, 0x69, 0xe7, 0xd5, 0x67, 0xf1, 0x22, 0xf8, 0x28, 0x3e, 0x8a, 0x4f, 0x21, 0x6d, + 0x9a, 0xa0, 0x17, 0xf5, 0xf6, 0xd1, 0xfc, 0xbe, 0x3f, 0x69, 0x82, 0x2b, 0x2e, 0x25, 0xcf, 0x19, + 0x24, 0x54, 0x43, 0x23, 0x1b, 0xb5, 0x8f, 0xa0, 0x62, 0x5a, 0xd6, 0x6a, 0xc7, 0x34, 0x24, 0x34, + 0xe5, 0x4a, 0xd6, 0x65, 0x4a, 0x68, 0x9a, 0x93, 0x2d, 0xcb, 0x41, 0xa9, 0x64, 0x25, 0xc3, 0xa9, + 0xe1, 0x01, 0xa1, 0x1a, 0x38, 0x2b, 0xd8, 0x47, 0xc0, 0x59, 0x47, 0x67, 0x5d, 0x7a, 0x6b, 0xd8, + 0xd6, 0x4f, 0xf0, 0x59, 0x91, 0xb2, 0x64, 0x4a, 0x9b, 0x88, 0xd1, 0xd8, 0xb6, 0x97, 0x19, 0x24, + 0x42, 0xc8, 0x8a, 0x54, 0x99, 0x14, 0xdd, 0xe9, 0xf9, 0x9b, 0x17, 0x9c, 0x60, 0xba, 0x6a, 0xba, + 0x31, 0xbd, 0x6b, 0x9a, 0xc3, 0x8b, 0xe0, 0xd8, 0xa6, 0xa7, 0x82, 0x14, 0x6c, 0xe8, 0x4d, 0xbc, + 0x79, 0x7f, 0x73, 0x64, 0x3f, 0xde, 0x93, 0x82, 0x85, 0x37, 0xc1, 0xe0, 0xdb, 0xe6, 0xa1, 0x3f, + 0xf1, 0xe6, 0x83, 0xc5, 0xb8, 0xdb, 0x08, 0xec, 0x16, 0xf0, 0x50, 0xa9, 0x4c, 0xf0, 0x98, 0xe4, + 0x35, 0xdb, 0xf4, 0x89, 0xed, 0x09, 0x17, 0xc1, 0x41, 0x7b, 0xcb, 0x61, 0xef, 0x1f, 0x3e, 0x83, + 0x2e, 0x5f, 0xfc, 0x60, 0xb6, 0x93, 0x05, 0xf8, 0xf3, 0x8f, 0x2c, 0x4f, 0x7f, 0x5e, 0x68, 0xdd, + 0x84, 0xae, 0xbd, 0xc7, 0xdb, 0xce, 0xc9, 0x65, 0x4e, 0x04, 0x07, 0x52, 0x71, 0xc8, 0x99, 0x68, + 0x2b, 0xed, 0xb3, 0x94, 0x99, 0xfe, 0xe5, 0x95, 0xae, 0x9d, 0x7a, 0xf5, 0x7b, 0x2b, 0x8c, 0xdf, + 0xfd, 0xe9, 0xca, 0x44, 0x62, 0xaa, 0x81, 0x91, 0x8d, 0x8a, 0x23, 0xb0, 0xb1, 0xe4, 0x87, 0x65, + 0x12, 0x4c, 0x75, 0xe2, 0x98, 0x24, 0x8e, 0x12, 0xc7, 0x7c, 0xfa, 0x33, 0x73, 0x80, 0x10, 0xa6, + 0x1a, 0x21, 0x47, 0x21, 0x14, 0x47, 0x08, 0x39, 0x6e, 0x7b, 0xd8, 0x8e, 0xbd, 0xfc, 0x0a, 0x00, + 0x00, 0xff, 0xff, 0x3b, 0xaf, 0x27, 0xf8, 0x51, 0x02, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/resources/ad_group_audience_view.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/resources/ad_group_audience_view.pb.go new file mode 100644 index 0000000..526808c --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/resources/ad_group_audience_view.pb.go @@ -0,0 +1,96 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/resources/ad_group_audience_view.proto + +package resources // import "google.golang.org/genproto/googleapis/ads/googleads/v1/resources" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// An ad group audience view. +// Includes performance data from interests and remarketing lists for Display +// Network and YouTube Network ads, and remarketing lists for search ads (RLSA), +// aggregated at the audience level. +type AdGroupAudienceView struct { + // The resource name of the ad group audience view. + // Ad group audience view resource names have the form: + // + // `customers/{customer_id}/adGroupAudienceViews/{ad_group_id}~{criterion_id}` + ResourceName string `protobuf:"bytes,1,opt,name=resource_name,json=resourceName,proto3" json:"resource_name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *AdGroupAudienceView) Reset() { *m = AdGroupAudienceView{} } +func (m *AdGroupAudienceView) String() string { return proto.CompactTextString(m) } +func (*AdGroupAudienceView) ProtoMessage() {} +func (*AdGroupAudienceView) Descriptor() ([]byte, []int) { + return fileDescriptor_ad_group_audience_view_52e87a3873ae29dc, []int{0} +} +func (m *AdGroupAudienceView) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_AdGroupAudienceView.Unmarshal(m, b) +} +func (m *AdGroupAudienceView) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_AdGroupAudienceView.Marshal(b, m, deterministic) +} +func (dst *AdGroupAudienceView) XXX_Merge(src proto.Message) { + xxx_messageInfo_AdGroupAudienceView.Merge(dst, src) +} +func (m *AdGroupAudienceView) XXX_Size() int { + return xxx_messageInfo_AdGroupAudienceView.Size(m) +} +func (m *AdGroupAudienceView) XXX_DiscardUnknown() { + xxx_messageInfo_AdGroupAudienceView.DiscardUnknown(m) +} + +var xxx_messageInfo_AdGroupAudienceView proto.InternalMessageInfo + +func (m *AdGroupAudienceView) GetResourceName() string { + if m != nil { + return m.ResourceName + } + return "" +} + +func init() { + proto.RegisterType((*AdGroupAudienceView)(nil), "google.ads.googleads.v1.resources.AdGroupAudienceView") +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/resources/ad_group_audience_view.proto", fileDescriptor_ad_group_audience_view_52e87a3873ae29dc) +} + +var fileDescriptor_ad_group_audience_view_52e87a3873ae29dc = []byte{ + // 276 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x84, 0x90, 0x4f, 0x4a, 0xc4, 0x30, + 0x14, 0xc6, 0x99, 0x0a, 0x82, 0x45, 0x37, 0xe3, 0x66, 0x10, 0x17, 0x8e, 0x32, 0xe0, 0x2a, 0x21, + 0xb8, 0x8b, 0x20, 0x64, 0x36, 0x05, 0x17, 0x32, 0xcc, 0xa2, 0x0b, 0x29, 0x94, 0xd8, 0x3c, 0x42, + 0x60, 0x9a, 0x57, 0x92, 0xb6, 0x73, 0x02, 0x2f, 0xe2, 0xd2, 0xa3, 0x78, 0x14, 0x4f, 0x21, 0x9d, + 0x4c, 0xb2, 0x12, 0xdd, 0x7d, 0x24, 0xbf, 0xef, 0x0f, 0x2f, 0x7f, 0xd2, 0x88, 0x7a, 0x07, 0x54, + 0x2a, 0x4f, 0x83, 0x9c, 0xd4, 0xc8, 0xa8, 0x03, 0x8f, 0x83, 0x6b, 0xc0, 0x53, 0xa9, 0x6a, 0xed, + 0x70, 0xe8, 0x6a, 0x39, 0x28, 0x03, 0xb6, 0x81, 0x7a, 0x34, 0xb0, 0x27, 0x9d, 0xc3, 0x1e, 0xe7, + 0xcb, 0x60, 0x22, 0x52, 0x79, 0x92, 0xfc, 0x64, 0x64, 0x24, 0xf9, 0xaf, 0xae, 0x63, 0x45, 0x67, + 0xa8, 0xb4, 0x16, 0x7b, 0xd9, 0x1b, 0xb4, 0x3e, 0x04, 0xdc, 0xf2, 0xfc, 0x52, 0xa8, 0x62, 0xca, + 0x17, 0xc7, 0xf8, 0xd2, 0xc0, 0x7e, 0x7e, 0x97, 0x5f, 0xc4, 0x84, 0xda, 0xca, 0x16, 0x16, 0xb3, + 0x9b, 0xd9, 0xfd, 0xd9, 0xf6, 0x3c, 0x3e, 0xbe, 0xc8, 0x16, 0xd6, 0xef, 0x59, 0xbe, 0x6a, 0xb0, + 0x25, 0xff, 0x6e, 0x58, 0x2f, 0x7e, 0xe9, 0xd8, 0x4c, 0xfd, 0x9b, 0xd9, 0xeb, 0xf3, 0xd1, 0xae, + 0x71, 0x27, 0xad, 0x26, 0xe8, 0x34, 0xd5, 0x60, 0x0f, 0xeb, 0xe2, 0x49, 0x3a, 0xe3, 0xff, 0xb8, + 0xd0, 0x63, 0x52, 0x1f, 0xd9, 0x49, 0x21, 0xc4, 0x67, 0xb6, 0x2c, 0x42, 0xa4, 0x50, 0x9e, 0x04, + 0x39, 0xa9, 0x92, 0x91, 0x6d, 0x24, 0xbf, 0x22, 0x53, 0x09, 0xe5, 0xab, 0xc4, 0x54, 0x25, 0xab, + 0x12, 0xf3, 0x9d, 0xad, 0xc2, 0x07, 0xe7, 0x42, 0x79, 0xce, 0x13, 0xc5, 0x79, 0xc9, 0x38, 0x4f, + 0xdc, 0xdb, 0xe9, 0x61, 0xec, 0xc3, 0x4f, 0x00, 0x00, 0x00, 0xff, 0xff, 0x68, 0xe3, 0x2c, 0x1c, + 0xcd, 0x01, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/resources/ad_group_bid_modifier.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/resources/ad_group_bid_modifier.pb.go new file mode 100644 index 0000000..ea56cea --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/resources/ad_group_bid_modifier.pb.go @@ -0,0 +1,424 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/resources/ad_group_bid_modifier.proto + +package resources // import "google.golang.org/genproto/googleapis/ads/googleads/v1/resources" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import wrappers "github.com/golang/protobuf/ptypes/wrappers" +import common "google.golang.org/genproto/googleapis/ads/googleads/v1/common" +import enums "google.golang.org/genproto/googleapis/ads/googleads/v1/enums" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Represents an ad group bid modifier. +type AdGroupBidModifier struct { + // The resource name of the ad group bid modifier. + // Ad group bid modifier resource names have the form: + // + // `customers/{customer_id}/adGroupBidModifiers/{ad_group_id}~{criterion_id}` + ResourceName string `protobuf:"bytes,1,opt,name=resource_name,json=resourceName,proto3" json:"resource_name,omitempty"` + // The ad group to which this criterion belongs. + AdGroup *wrappers.StringValue `protobuf:"bytes,2,opt,name=ad_group,json=adGroup,proto3" json:"ad_group,omitempty"` + // The ID of the criterion to bid modify. + // + // This field is ignored for mutates. + CriterionId *wrappers.Int64Value `protobuf:"bytes,3,opt,name=criterion_id,json=criterionId,proto3" json:"criterion_id,omitempty"` + // The modifier for the bid when the criterion matches. The modifier must be + // in the range: 0.1 - 10.0. The range is 1.0 - 6.0 for PreferredContent. + // Use 0 to opt out of a Device type. + BidModifier *wrappers.DoubleValue `protobuf:"bytes,4,opt,name=bid_modifier,json=bidModifier,proto3" json:"bid_modifier,omitempty"` + // The base ad group from which this draft/trial adgroup bid modifier was + // created. If ad_group is a base ad group then this field will be equal to + // ad_group. If the ad group was created in the draft or trial and has no + // corresponding base ad group, then this field will be null. + // This field is readonly. + BaseAdGroup *wrappers.StringValue `protobuf:"bytes,9,opt,name=base_ad_group,json=baseAdGroup,proto3" json:"base_ad_group,omitempty"` + // Bid modifier source. + BidModifierSource enums.BidModifierSourceEnum_BidModifierSource `protobuf:"varint,10,opt,name=bid_modifier_source,json=bidModifierSource,proto3,enum=google.ads.googleads.v1.enums.BidModifierSourceEnum_BidModifierSource" json:"bid_modifier_source,omitempty"` + // The criterion of this ad group bid modifier. + // + // Types that are valid to be assigned to Criterion: + // *AdGroupBidModifier_HotelDateSelectionType + // *AdGroupBidModifier_HotelAdvanceBookingWindow + // *AdGroupBidModifier_HotelLengthOfStay + // *AdGroupBidModifier_HotelCheckInDay + // *AdGroupBidModifier_Device + // *AdGroupBidModifier_PreferredContent + Criterion isAdGroupBidModifier_Criterion `protobuf_oneof:"criterion"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *AdGroupBidModifier) Reset() { *m = AdGroupBidModifier{} } +func (m *AdGroupBidModifier) String() string { return proto.CompactTextString(m) } +func (*AdGroupBidModifier) ProtoMessage() {} +func (*AdGroupBidModifier) Descriptor() ([]byte, []int) { + return fileDescriptor_ad_group_bid_modifier_c58bd31b1ada3574, []int{0} +} +func (m *AdGroupBidModifier) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_AdGroupBidModifier.Unmarshal(m, b) +} +func (m *AdGroupBidModifier) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_AdGroupBidModifier.Marshal(b, m, deterministic) +} +func (dst *AdGroupBidModifier) XXX_Merge(src proto.Message) { + xxx_messageInfo_AdGroupBidModifier.Merge(dst, src) +} +func (m *AdGroupBidModifier) XXX_Size() int { + return xxx_messageInfo_AdGroupBidModifier.Size(m) +} +func (m *AdGroupBidModifier) XXX_DiscardUnknown() { + xxx_messageInfo_AdGroupBidModifier.DiscardUnknown(m) +} + +var xxx_messageInfo_AdGroupBidModifier proto.InternalMessageInfo + +func (m *AdGroupBidModifier) GetResourceName() string { + if m != nil { + return m.ResourceName + } + return "" +} + +func (m *AdGroupBidModifier) GetAdGroup() *wrappers.StringValue { + if m != nil { + return m.AdGroup + } + return nil +} + +func (m *AdGroupBidModifier) GetCriterionId() *wrappers.Int64Value { + if m != nil { + return m.CriterionId + } + return nil +} + +func (m *AdGroupBidModifier) GetBidModifier() *wrappers.DoubleValue { + if m != nil { + return m.BidModifier + } + return nil +} + +func (m *AdGroupBidModifier) GetBaseAdGroup() *wrappers.StringValue { + if m != nil { + return m.BaseAdGroup + } + return nil +} + +func (m *AdGroupBidModifier) GetBidModifierSource() enums.BidModifierSourceEnum_BidModifierSource { + if m != nil { + return m.BidModifierSource + } + return enums.BidModifierSourceEnum_UNSPECIFIED +} + +type isAdGroupBidModifier_Criterion interface { + isAdGroupBidModifier_Criterion() +} + +type AdGroupBidModifier_HotelDateSelectionType struct { + HotelDateSelectionType *common.HotelDateSelectionTypeInfo `protobuf:"bytes,5,opt,name=hotel_date_selection_type,json=hotelDateSelectionType,proto3,oneof"` +} + +type AdGroupBidModifier_HotelAdvanceBookingWindow struct { + HotelAdvanceBookingWindow *common.HotelAdvanceBookingWindowInfo `protobuf:"bytes,6,opt,name=hotel_advance_booking_window,json=hotelAdvanceBookingWindow,proto3,oneof"` +} + +type AdGroupBidModifier_HotelLengthOfStay struct { + HotelLengthOfStay *common.HotelLengthOfStayInfo `protobuf:"bytes,7,opt,name=hotel_length_of_stay,json=hotelLengthOfStay,proto3,oneof"` +} + +type AdGroupBidModifier_HotelCheckInDay struct { + HotelCheckInDay *common.HotelCheckInDayInfo `protobuf:"bytes,8,opt,name=hotel_check_in_day,json=hotelCheckInDay,proto3,oneof"` +} + +type AdGroupBidModifier_Device struct { + Device *common.DeviceInfo `protobuf:"bytes,11,opt,name=device,proto3,oneof"` +} + +type AdGroupBidModifier_PreferredContent struct { + PreferredContent *common.PreferredContentInfo `protobuf:"bytes,12,opt,name=preferred_content,json=preferredContent,proto3,oneof"` +} + +func (*AdGroupBidModifier_HotelDateSelectionType) isAdGroupBidModifier_Criterion() {} + +func (*AdGroupBidModifier_HotelAdvanceBookingWindow) isAdGroupBidModifier_Criterion() {} + +func (*AdGroupBidModifier_HotelLengthOfStay) isAdGroupBidModifier_Criterion() {} + +func (*AdGroupBidModifier_HotelCheckInDay) isAdGroupBidModifier_Criterion() {} + +func (*AdGroupBidModifier_Device) isAdGroupBidModifier_Criterion() {} + +func (*AdGroupBidModifier_PreferredContent) isAdGroupBidModifier_Criterion() {} + +func (m *AdGroupBidModifier) GetCriterion() isAdGroupBidModifier_Criterion { + if m != nil { + return m.Criterion + } + return nil +} + +func (m *AdGroupBidModifier) GetHotelDateSelectionType() *common.HotelDateSelectionTypeInfo { + if x, ok := m.GetCriterion().(*AdGroupBidModifier_HotelDateSelectionType); ok { + return x.HotelDateSelectionType + } + return nil +} + +func (m *AdGroupBidModifier) GetHotelAdvanceBookingWindow() *common.HotelAdvanceBookingWindowInfo { + if x, ok := m.GetCriterion().(*AdGroupBidModifier_HotelAdvanceBookingWindow); ok { + return x.HotelAdvanceBookingWindow + } + return nil +} + +func (m *AdGroupBidModifier) GetHotelLengthOfStay() *common.HotelLengthOfStayInfo { + if x, ok := m.GetCriterion().(*AdGroupBidModifier_HotelLengthOfStay); ok { + return x.HotelLengthOfStay + } + return nil +} + +func (m *AdGroupBidModifier) GetHotelCheckInDay() *common.HotelCheckInDayInfo { + if x, ok := m.GetCriterion().(*AdGroupBidModifier_HotelCheckInDay); ok { + return x.HotelCheckInDay + } + return nil +} + +func (m *AdGroupBidModifier) GetDevice() *common.DeviceInfo { + if x, ok := m.GetCriterion().(*AdGroupBidModifier_Device); ok { + return x.Device + } + return nil +} + +func (m *AdGroupBidModifier) GetPreferredContent() *common.PreferredContentInfo { + if x, ok := m.GetCriterion().(*AdGroupBidModifier_PreferredContent); ok { + return x.PreferredContent + } + return nil +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*AdGroupBidModifier) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _AdGroupBidModifier_OneofMarshaler, _AdGroupBidModifier_OneofUnmarshaler, _AdGroupBidModifier_OneofSizer, []interface{}{ + (*AdGroupBidModifier_HotelDateSelectionType)(nil), + (*AdGroupBidModifier_HotelAdvanceBookingWindow)(nil), + (*AdGroupBidModifier_HotelLengthOfStay)(nil), + (*AdGroupBidModifier_HotelCheckInDay)(nil), + (*AdGroupBidModifier_Device)(nil), + (*AdGroupBidModifier_PreferredContent)(nil), + } +} + +func _AdGroupBidModifier_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*AdGroupBidModifier) + // criterion + switch x := m.Criterion.(type) { + case *AdGroupBidModifier_HotelDateSelectionType: + b.EncodeVarint(5<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.HotelDateSelectionType); err != nil { + return err + } + case *AdGroupBidModifier_HotelAdvanceBookingWindow: + b.EncodeVarint(6<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.HotelAdvanceBookingWindow); err != nil { + return err + } + case *AdGroupBidModifier_HotelLengthOfStay: + b.EncodeVarint(7<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.HotelLengthOfStay); err != nil { + return err + } + case *AdGroupBidModifier_HotelCheckInDay: + b.EncodeVarint(8<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.HotelCheckInDay); err != nil { + return err + } + case *AdGroupBidModifier_Device: + b.EncodeVarint(11<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.Device); err != nil { + return err + } + case *AdGroupBidModifier_PreferredContent: + b.EncodeVarint(12<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.PreferredContent); err != nil { + return err + } + case nil: + default: + return fmt.Errorf("AdGroupBidModifier.Criterion has unexpected type %T", x) + } + return nil +} + +func _AdGroupBidModifier_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*AdGroupBidModifier) + switch tag { + case 5: // criterion.hotel_date_selection_type + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(common.HotelDateSelectionTypeInfo) + err := b.DecodeMessage(msg) + m.Criterion = &AdGroupBidModifier_HotelDateSelectionType{msg} + return true, err + case 6: // criterion.hotel_advance_booking_window + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(common.HotelAdvanceBookingWindowInfo) + err := b.DecodeMessage(msg) + m.Criterion = &AdGroupBidModifier_HotelAdvanceBookingWindow{msg} + return true, err + case 7: // criterion.hotel_length_of_stay + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(common.HotelLengthOfStayInfo) + err := b.DecodeMessage(msg) + m.Criterion = &AdGroupBidModifier_HotelLengthOfStay{msg} + return true, err + case 8: // criterion.hotel_check_in_day + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(common.HotelCheckInDayInfo) + err := b.DecodeMessage(msg) + m.Criterion = &AdGroupBidModifier_HotelCheckInDay{msg} + return true, err + case 11: // criterion.device + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(common.DeviceInfo) + err := b.DecodeMessage(msg) + m.Criterion = &AdGroupBidModifier_Device{msg} + return true, err + case 12: // criterion.preferred_content + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(common.PreferredContentInfo) + err := b.DecodeMessage(msg) + m.Criterion = &AdGroupBidModifier_PreferredContent{msg} + return true, err + default: + return false, nil + } +} + +func _AdGroupBidModifier_OneofSizer(msg proto.Message) (n int) { + m := msg.(*AdGroupBidModifier) + // criterion + switch x := m.Criterion.(type) { + case *AdGroupBidModifier_HotelDateSelectionType: + s := proto.Size(x.HotelDateSelectionType) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *AdGroupBidModifier_HotelAdvanceBookingWindow: + s := proto.Size(x.HotelAdvanceBookingWindow) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *AdGroupBidModifier_HotelLengthOfStay: + s := proto.Size(x.HotelLengthOfStay) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *AdGroupBidModifier_HotelCheckInDay: + s := proto.Size(x.HotelCheckInDay) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *AdGroupBidModifier_Device: + s := proto.Size(x.Device) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *AdGroupBidModifier_PreferredContent: + s := proto.Size(x.PreferredContent) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +func init() { + proto.RegisterType((*AdGroupBidModifier)(nil), "google.ads.googleads.v1.resources.AdGroupBidModifier") +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/resources/ad_group_bid_modifier.proto", fileDescriptor_ad_group_bid_modifier_c58bd31b1ada3574) +} + +var fileDescriptor_ad_group_bid_modifier_c58bd31b1ada3574 = []byte{ + // 694 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x8c, 0x54, 0xdd, 0x6e, 0xd3, 0x48, + 0x14, 0xde, 0xa4, 0xbb, 0xfd, 0x99, 0xa4, 0xbb, 0x5b, 0xef, 0x6a, 0xd7, 0x94, 0x0a, 0xb5, 0xa0, + 0x4a, 0x15, 0x12, 0xb6, 0xd2, 0x16, 0x2a, 0x19, 0x15, 0x48, 0x1a, 0x68, 0x83, 0xf8, 0xa9, 0x12, + 0x14, 0x24, 0x14, 0x69, 0x34, 0xf6, 0x9c, 0x38, 0xa3, 0xc6, 0x33, 0x96, 0x3d, 0x49, 0x94, 0x3b, + 0x2e, 0x78, 0x12, 0x2e, 0xb9, 0xe1, 0x3d, 0x78, 0x14, 0x9e, 0x02, 0x79, 0x66, 0x6c, 0x85, 0xb6, + 0xa1, 0xb9, 0x3b, 0x3e, 0xe7, 0xfb, 0x39, 0xe7, 0x8c, 0x67, 0xd0, 0x71, 0x28, 0x44, 0x38, 0x04, + 0x97, 0xd0, 0xd4, 0xd5, 0x61, 0x16, 0x8d, 0x6b, 0x6e, 0x02, 0xa9, 0x18, 0x25, 0x01, 0xa4, 0x2e, + 0xa1, 0x38, 0x4c, 0xc4, 0x28, 0xc6, 0x3e, 0xa3, 0x38, 0x12, 0x94, 0xf5, 0x19, 0x24, 0x4e, 0x9c, + 0x08, 0x29, 0xac, 0x1d, 0xcd, 0x71, 0x08, 0x4d, 0x9d, 0x82, 0xee, 0x8c, 0x6b, 0x4e, 0x41, 0xdf, + 0x7c, 0x30, 0xcf, 0x21, 0x10, 0x51, 0x24, 0xb8, 0x1b, 0x24, 0x4c, 0x42, 0xc2, 0x88, 0x56, 0xdc, + 0x3c, 0x9a, 0x07, 0x07, 0x3e, 0x8a, 0x52, 0x77, 0xb6, 0x07, 0xac, 0x2d, 0x0c, 0xf1, 0x8e, 0x21, + 0xaa, 0x2f, 0x7f, 0xd4, 0x77, 0x27, 0x09, 0x89, 0x63, 0x48, 0x52, 0x53, 0xdf, 0xca, 0x85, 0x63, + 0xe6, 0x12, 0xce, 0x85, 0x24, 0x92, 0x09, 0x6e, 0xaa, 0x77, 0xbf, 0xae, 0x22, 0xab, 0x4e, 0x4f, + 0xb3, 0x39, 0x1b, 0x8c, 0xbe, 0x36, 0x0e, 0xd6, 0x3d, 0xb4, 0x9e, 0x4f, 0x82, 0x39, 0x89, 0xc0, + 0x2e, 0x6d, 0x97, 0xf6, 0xd6, 0xda, 0xd5, 0x3c, 0xf9, 0x86, 0x44, 0x60, 0x1d, 0xa1, 0xd5, 0x7c, + 0x47, 0x76, 0x79, 0xbb, 0xb4, 0x57, 0xd9, 0xdf, 0x32, 0xcb, 0x70, 0xf2, 0x66, 0x9c, 0x8e, 0x4c, + 0x18, 0x0f, 0xbb, 0x64, 0x38, 0x82, 0xf6, 0x0a, 0xd1, 0x46, 0xd6, 0x13, 0x54, 0x35, 0xd3, 0x0b, + 0x8e, 0x19, 0xb5, 0x97, 0x14, 0xf9, 0xf6, 0x15, 0x72, 0x8b, 0xcb, 0x47, 0x87, 0x9a, 0x5b, 0x29, + 0x08, 0x2d, 0x6a, 0x3d, 0x45, 0xd5, 0xd9, 0x7d, 0xd8, 0xbf, 0xcf, 0x31, 0x6f, 0x8a, 0x91, 0x3f, + 0x04, 0x23, 0xe0, 0xcf, 0x8c, 0xf7, 0x0c, 0xad, 0xfb, 0x24, 0x05, 0x5c, 0xb4, 0xbf, 0xb6, 0x40, + 0xfb, 0x95, 0x8c, 0x62, 0x76, 0x65, 0x8d, 0xd1, 0x3f, 0xd7, 0x1c, 0x89, 0x8d, 0xb6, 0x4b, 0x7b, + 0x7f, 0xee, 0xbf, 0x70, 0xe6, 0xfd, 0x1e, 0xea, 0x30, 0x9d, 0x99, 0x4d, 0x77, 0x14, 0xef, 0x39, + 0x1f, 0x45, 0x57, 0xb3, 0xed, 0x0d, 0xff, 0x72, 0xca, 0x9a, 0xa0, 0x5b, 0x03, 0x21, 0x61, 0x88, + 0x29, 0x91, 0x80, 0x53, 0x18, 0x42, 0x90, 0x1d, 0x27, 0x96, 0xd3, 0x18, 0xec, 0x3f, 0xd4, 0x14, + 0xde, 0x5c, 0x77, 0xfd, 0xe7, 0x39, 0x67, 0x99, 0x40, 0x93, 0x48, 0xe8, 0xe4, 0xf4, 0x77, 0xd3, + 0x18, 0x5a, 0xbc, 0x2f, 0xce, 0x7e, 0x6b, 0xff, 0x37, 0xb8, 0xb6, 0x6a, 0x7d, 0x2c, 0xa1, 0x2d, + 0xed, 0x4c, 0xe8, 0x98, 0xf0, 0x00, 0xb0, 0x2f, 0xc4, 0x05, 0xe3, 0x21, 0x9e, 0x30, 0x4e, 0xc5, + 0xc4, 0x5e, 0x56, 0xe6, 0xc7, 0x0b, 0x99, 0xd7, 0xb5, 0x44, 0x43, 0x2b, 0xbc, 0x57, 0x02, 0xc6, + 0x5f, 0x8f, 0x77, 0x1d, 0xc0, 0x1a, 0xa0, 0x7f, 0x75, 0x07, 0x43, 0xe0, 0xa1, 0x1c, 0x60, 0xd1, + 0xc7, 0xa9, 0x24, 0x53, 0x7b, 0x45, 0x39, 0x3f, 0x5c, 0xc8, 0xf9, 0x95, 0xa2, 0xbe, 0xed, 0x77, + 0x24, 0x99, 0x1a, 0xc7, 0x8d, 0xc1, 0xe5, 0x82, 0xe5, 0x23, 0x4b, 0x3b, 0x05, 0x03, 0x08, 0x2e, + 0x30, 0xe3, 0x98, 0x92, 0xa9, 0xbd, 0xaa, 0x7c, 0x0e, 0x16, 0xf2, 0x39, 0xc9, 0x88, 0x2d, 0xde, + 0x2c, 0x5c, 0xfe, 0x1a, 0xfc, 0x9c, 0xb6, 0x9a, 0x68, 0x99, 0xc2, 0x98, 0x05, 0x60, 0x57, 0x94, + 0xee, 0xfd, 0x9b, 0x74, 0x9b, 0x0a, 0x6d, 0xe4, 0x0c, 0xd7, 0x0a, 0xd0, 0x46, 0x9c, 0x40, 0x1f, + 0x92, 0x04, 0x28, 0x0e, 0x04, 0x97, 0xc0, 0xa5, 0x5d, 0x55, 0x82, 0x87, 0x37, 0x09, 0x9e, 0xe7, + 0xc4, 0x13, 0xcd, 0x33, 0xd2, 0x7f, 0xc7, 0x97, 0xf2, 0x8d, 0x0a, 0x5a, 0x2b, 0xae, 0x5f, 0xe3, + 0x53, 0x19, 0xed, 0x06, 0x22, 0x72, 0x6e, 0x7c, 0x01, 0x1b, 0xff, 0x5f, 0x7d, 0x58, 0xce, 0xb3, + 0x8b, 0x75, 0x5e, 0xfa, 0xf0, 0xd2, 0xb0, 0x43, 0x31, 0x24, 0x3c, 0x74, 0x44, 0x12, 0xba, 0x21, + 0x70, 0x75, 0xed, 0xf2, 0xd7, 0x2f, 0x66, 0xe9, 0x2f, 0x5e, 0xe7, 0xc7, 0x45, 0xf4, 0xb9, 0xbc, + 0x74, 0x5a, 0xaf, 0x7f, 0x29, 0xef, 0x9c, 0x6a, 0xc9, 0x3a, 0x4d, 0x1d, 0x1d, 0x66, 0x51, 0xb7, + 0xe6, 0xb4, 0x73, 0xe4, 0xb7, 0x1c, 0xd3, 0xab, 0xd3, 0xb4, 0x57, 0x60, 0x7a, 0xdd, 0x5a, 0xaf, + 0xc0, 0x7c, 0x2f, 0xef, 0xea, 0x82, 0xe7, 0xd5, 0x69, 0xea, 0x79, 0x05, 0xca, 0xf3, 0xba, 0x35, + 0xcf, 0x2b, 0x70, 0xfe, 0xb2, 0x6a, 0xf6, 0xe0, 0x47, 0x00, 0x00, 0x00, 0xff, 0xff, 0xb4, 0xc7, + 0xdc, 0x80, 0x49, 0x06, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/resources/ad_group_criterion.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/resources/ad_group_criterion.pb.go new file mode 100644 index 0000000..2dd9861 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/resources/ad_group_criterion.pb.go @@ -0,0 +1,1238 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/resources/ad_group_criterion.proto + +package resources // import "google.golang.org/genproto/googleapis/ads/googleads/v1/resources" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import wrappers "github.com/golang/protobuf/ptypes/wrappers" +import common "google.golang.org/genproto/googleapis/ads/googleads/v1/common" +import enums "google.golang.org/genproto/googleapis/ads/googleads/v1/enums" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// An ad group criterion. +type AdGroupCriterion struct { + // The resource name of the ad group criterion. + // Ad group criterion resource names have the form: + // + // `customers/{customer_id}/adGroupCriteria/{ad_group_id}~{criterion_id}` + ResourceName string `protobuf:"bytes,1,opt,name=resource_name,json=resourceName,proto3" json:"resource_name,omitempty"` + // The ID of the criterion. + // + // This field is ignored for mutates. + CriterionId *wrappers.Int64Value `protobuf:"bytes,26,opt,name=criterion_id,json=criterionId,proto3" json:"criterion_id,omitempty"` + // The status of the criterion. + Status enums.AdGroupCriterionStatusEnum_AdGroupCriterionStatus `protobuf:"varint,3,opt,name=status,proto3,enum=google.ads.googleads.v1.enums.AdGroupCriterionStatusEnum_AdGroupCriterionStatus" json:"status,omitempty"` + // Information regarding the quality of the criterion. + QualityInfo *AdGroupCriterion_QualityInfo `protobuf:"bytes,4,opt,name=quality_info,json=qualityInfo,proto3" json:"quality_info,omitempty"` + // The ad group to which the criterion belongs. + AdGroup *wrappers.StringValue `protobuf:"bytes,5,opt,name=ad_group,json=adGroup,proto3" json:"ad_group,omitempty"` + // The type of the criterion. + Type enums.CriterionTypeEnum_CriterionType `protobuf:"varint,25,opt,name=type,proto3,enum=google.ads.googleads.v1.enums.CriterionTypeEnum_CriterionType" json:"type,omitempty"` + // Whether to target (`false`) or exclude (`true`) the criterion. + // + // This field is immutable. To switch a criterion from positive to negative, + // remove then re-add it. + Negative *wrappers.BoolValue `protobuf:"bytes,31,opt,name=negative,proto3" json:"negative,omitempty"` + // Serving status of the criterion. + SystemServingStatus enums.CriterionSystemServingStatusEnum_CriterionSystemServingStatus `protobuf:"varint,52,opt,name=system_serving_status,json=systemServingStatus,proto3,enum=google.ads.googleads.v1.enums.CriterionSystemServingStatusEnum_CriterionSystemServingStatus" json:"system_serving_status,omitempty"` + // Approval status of the criterion. + ApprovalStatus enums.AdGroupCriterionApprovalStatusEnum_AdGroupCriterionApprovalStatus `protobuf:"varint,53,opt,name=approval_status,json=approvalStatus,proto3,enum=google.ads.googleads.v1.enums.AdGroupCriterionApprovalStatusEnum_AdGroupCriterionApprovalStatus" json:"approval_status,omitempty"` + // The modifier for the bid when the criterion matches. The modifier must be + // in the range: 0.1 - 10.0. Most targetable criteria types support modifiers. + BidModifier *wrappers.DoubleValue `protobuf:"bytes,44,opt,name=bid_modifier,json=bidModifier,proto3" json:"bid_modifier,omitempty"` + // The CPC (cost-per-click) bid. + CpcBidMicros *wrappers.Int64Value `protobuf:"bytes,16,opt,name=cpc_bid_micros,json=cpcBidMicros,proto3" json:"cpc_bid_micros,omitempty"` + // The CPM (cost-per-thousand viewable impressions) bid. + CpmBidMicros *wrappers.Int64Value `protobuf:"bytes,17,opt,name=cpm_bid_micros,json=cpmBidMicros,proto3" json:"cpm_bid_micros,omitempty"` + // The CPV (cost-per-view) bid. + CpvBidMicros *wrappers.Int64Value `protobuf:"bytes,24,opt,name=cpv_bid_micros,json=cpvBidMicros,proto3" json:"cpv_bid_micros,omitempty"` + // The CPC bid amount, expressed as a fraction of the advertised price + // for some good or service. The valid range for the fraction is [0,1) and the + // value stored here is 1,000,000 * [fraction]. + PercentCpcBidMicros *wrappers.Int64Value `protobuf:"bytes,33,opt,name=percent_cpc_bid_micros,json=percentCpcBidMicros,proto3" json:"percent_cpc_bid_micros,omitempty"` + // The effective CPC (cost-per-click) bid. + EffectiveCpcBidMicros *wrappers.Int64Value `protobuf:"bytes,18,opt,name=effective_cpc_bid_micros,json=effectiveCpcBidMicros,proto3" json:"effective_cpc_bid_micros,omitempty"` + // The effective CPM (cost-per-thousand viewable impressions) bid. + EffectiveCpmBidMicros *wrappers.Int64Value `protobuf:"bytes,19,opt,name=effective_cpm_bid_micros,json=effectiveCpmBidMicros,proto3" json:"effective_cpm_bid_micros,omitempty"` + // The effective CPV (cost-per-view) bid. + EffectiveCpvBidMicros *wrappers.Int64Value `protobuf:"bytes,20,opt,name=effective_cpv_bid_micros,json=effectiveCpvBidMicros,proto3" json:"effective_cpv_bid_micros,omitempty"` + // The effective Percent CPC bid amount. + EffectivePercentCpcBidMicros *wrappers.Int64Value `protobuf:"bytes,34,opt,name=effective_percent_cpc_bid_micros,json=effectivePercentCpcBidMicros,proto3" json:"effective_percent_cpc_bid_micros,omitempty"` + // Source of the effective CPC bid. + EffectiveCpcBidSource enums.BiddingSourceEnum_BiddingSource `protobuf:"varint,21,opt,name=effective_cpc_bid_source,json=effectiveCpcBidSource,proto3,enum=google.ads.googleads.v1.enums.BiddingSourceEnum_BiddingSource" json:"effective_cpc_bid_source,omitempty"` + // Source of the effective CPM bid. + EffectiveCpmBidSource enums.BiddingSourceEnum_BiddingSource `protobuf:"varint,22,opt,name=effective_cpm_bid_source,json=effectiveCpmBidSource,proto3,enum=google.ads.googleads.v1.enums.BiddingSourceEnum_BiddingSource" json:"effective_cpm_bid_source,omitempty"` + // Source of the effective CPV bid. + EffectiveCpvBidSource enums.BiddingSourceEnum_BiddingSource `protobuf:"varint,23,opt,name=effective_cpv_bid_source,json=effectiveCpvBidSource,proto3,enum=google.ads.googleads.v1.enums.BiddingSourceEnum_BiddingSource" json:"effective_cpv_bid_source,omitempty"` + // Source of the effective Percent CPC bid. + EffectivePercentCpcBidSource enums.BiddingSourceEnum_BiddingSource `protobuf:"varint,35,opt,name=effective_percent_cpc_bid_source,json=effectivePercentCpcBidSource,proto3,enum=google.ads.googleads.v1.enums.BiddingSourceEnum_BiddingSource" json:"effective_percent_cpc_bid_source,omitempty"` + // Estimates for criterion bids at various positions. + PositionEstimates *AdGroupCriterion_PositionEstimates `protobuf:"bytes,10,opt,name=position_estimates,json=positionEstimates,proto3" json:"position_estimates,omitempty"` + // The list of possible final URLs after all cross-domain redirects for the + // ad. + FinalUrls []*wrappers.StringValue `protobuf:"bytes,11,rep,name=final_urls,json=finalUrls,proto3" json:"final_urls,omitempty"` + // The list of possible final mobile URLs after all cross-domain redirects. + FinalMobileUrls []*wrappers.StringValue `protobuf:"bytes,51,rep,name=final_mobile_urls,json=finalMobileUrls,proto3" json:"final_mobile_urls,omitempty"` + // URL template for appending params to final URL. + FinalUrlSuffix *wrappers.StringValue `protobuf:"bytes,50,opt,name=final_url_suffix,json=finalUrlSuffix,proto3" json:"final_url_suffix,omitempty"` + // The URL template for constructing a tracking URL. + TrackingUrlTemplate *wrappers.StringValue `protobuf:"bytes,13,opt,name=tracking_url_template,json=trackingUrlTemplate,proto3" json:"tracking_url_template,omitempty"` + // The list of mappings used to substitute custom parameter tags in a + // `tracking_url_template`, `final_urls`, or `mobile_final_urls`. + UrlCustomParameters []*common.CustomParameter `protobuf:"bytes,14,rep,name=url_custom_parameters,json=urlCustomParameters,proto3" json:"url_custom_parameters,omitempty"` + // The ad group criterion. + // + // Exactly one must be set. + // + // Types that are valid to be assigned to Criterion: + // *AdGroupCriterion_Keyword + // *AdGroupCriterion_Placement + // *AdGroupCriterion_MobileAppCategory + // *AdGroupCriterion_MobileApplication + // *AdGroupCriterion_ListingGroup + // *AdGroupCriterion_AgeRange + // *AdGroupCriterion_Gender + // *AdGroupCriterion_IncomeRange + // *AdGroupCriterion_ParentalStatus + // *AdGroupCriterion_UserList + // *AdGroupCriterion_YoutubeVideo + // *AdGroupCriterion_YoutubeChannel + // *AdGroupCriterion_Topic + // *AdGroupCriterion_UserInterest + // *AdGroupCriterion_Webpage + // *AdGroupCriterion_AppPaymentModel + // *AdGroupCriterion_CustomAffinity + // *AdGroupCriterion_CustomIntent + Criterion isAdGroupCriterion_Criterion `protobuf_oneof:"criterion"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *AdGroupCriterion) Reset() { *m = AdGroupCriterion{} } +func (m *AdGroupCriterion) String() string { return proto.CompactTextString(m) } +func (*AdGroupCriterion) ProtoMessage() {} +func (*AdGroupCriterion) Descriptor() ([]byte, []int) { + return fileDescriptor_ad_group_criterion_18b9ede2c7927371, []int{0} +} +func (m *AdGroupCriterion) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_AdGroupCriterion.Unmarshal(m, b) +} +func (m *AdGroupCriterion) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_AdGroupCriterion.Marshal(b, m, deterministic) +} +func (dst *AdGroupCriterion) XXX_Merge(src proto.Message) { + xxx_messageInfo_AdGroupCriterion.Merge(dst, src) +} +func (m *AdGroupCriterion) XXX_Size() int { + return xxx_messageInfo_AdGroupCriterion.Size(m) +} +func (m *AdGroupCriterion) XXX_DiscardUnknown() { + xxx_messageInfo_AdGroupCriterion.DiscardUnknown(m) +} + +var xxx_messageInfo_AdGroupCriterion proto.InternalMessageInfo + +func (m *AdGroupCriterion) GetResourceName() string { + if m != nil { + return m.ResourceName + } + return "" +} + +func (m *AdGroupCriterion) GetCriterionId() *wrappers.Int64Value { + if m != nil { + return m.CriterionId + } + return nil +} + +func (m *AdGroupCriterion) GetStatus() enums.AdGroupCriterionStatusEnum_AdGroupCriterionStatus { + if m != nil { + return m.Status + } + return enums.AdGroupCriterionStatusEnum_UNSPECIFIED +} + +func (m *AdGroupCriterion) GetQualityInfo() *AdGroupCriterion_QualityInfo { + if m != nil { + return m.QualityInfo + } + return nil +} + +func (m *AdGroupCriterion) GetAdGroup() *wrappers.StringValue { + if m != nil { + return m.AdGroup + } + return nil +} + +func (m *AdGroupCriterion) GetType() enums.CriterionTypeEnum_CriterionType { + if m != nil { + return m.Type + } + return enums.CriterionTypeEnum_UNSPECIFIED +} + +func (m *AdGroupCriterion) GetNegative() *wrappers.BoolValue { + if m != nil { + return m.Negative + } + return nil +} + +func (m *AdGroupCriterion) GetSystemServingStatus() enums.CriterionSystemServingStatusEnum_CriterionSystemServingStatus { + if m != nil { + return m.SystemServingStatus + } + return enums.CriterionSystemServingStatusEnum_UNSPECIFIED +} + +func (m *AdGroupCriterion) GetApprovalStatus() enums.AdGroupCriterionApprovalStatusEnum_AdGroupCriterionApprovalStatus { + if m != nil { + return m.ApprovalStatus + } + return enums.AdGroupCriterionApprovalStatusEnum_UNSPECIFIED +} + +func (m *AdGroupCriterion) GetBidModifier() *wrappers.DoubleValue { + if m != nil { + return m.BidModifier + } + return nil +} + +func (m *AdGroupCriterion) GetCpcBidMicros() *wrappers.Int64Value { + if m != nil { + return m.CpcBidMicros + } + return nil +} + +func (m *AdGroupCriterion) GetCpmBidMicros() *wrappers.Int64Value { + if m != nil { + return m.CpmBidMicros + } + return nil +} + +func (m *AdGroupCriterion) GetCpvBidMicros() *wrappers.Int64Value { + if m != nil { + return m.CpvBidMicros + } + return nil +} + +func (m *AdGroupCriterion) GetPercentCpcBidMicros() *wrappers.Int64Value { + if m != nil { + return m.PercentCpcBidMicros + } + return nil +} + +func (m *AdGroupCriterion) GetEffectiveCpcBidMicros() *wrappers.Int64Value { + if m != nil { + return m.EffectiveCpcBidMicros + } + return nil +} + +func (m *AdGroupCriterion) GetEffectiveCpmBidMicros() *wrappers.Int64Value { + if m != nil { + return m.EffectiveCpmBidMicros + } + return nil +} + +func (m *AdGroupCriterion) GetEffectiveCpvBidMicros() *wrappers.Int64Value { + if m != nil { + return m.EffectiveCpvBidMicros + } + return nil +} + +func (m *AdGroupCriterion) GetEffectivePercentCpcBidMicros() *wrappers.Int64Value { + if m != nil { + return m.EffectivePercentCpcBidMicros + } + return nil +} + +func (m *AdGroupCriterion) GetEffectiveCpcBidSource() enums.BiddingSourceEnum_BiddingSource { + if m != nil { + return m.EffectiveCpcBidSource + } + return enums.BiddingSourceEnum_UNSPECIFIED +} + +func (m *AdGroupCriterion) GetEffectiveCpmBidSource() enums.BiddingSourceEnum_BiddingSource { + if m != nil { + return m.EffectiveCpmBidSource + } + return enums.BiddingSourceEnum_UNSPECIFIED +} + +func (m *AdGroupCriterion) GetEffectiveCpvBidSource() enums.BiddingSourceEnum_BiddingSource { + if m != nil { + return m.EffectiveCpvBidSource + } + return enums.BiddingSourceEnum_UNSPECIFIED +} + +func (m *AdGroupCriterion) GetEffectivePercentCpcBidSource() enums.BiddingSourceEnum_BiddingSource { + if m != nil { + return m.EffectivePercentCpcBidSource + } + return enums.BiddingSourceEnum_UNSPECIFIED +} + +func (m *AdGroupCriterion) GetPositionEstimates() *AdGroupCriterion_PositionEstimates { + if m != nil { + return m.PositionEstimates + } + return nil +} + +func (m *AdGroupCriterion) GetFinalUrls() []*wrappers.StringValue { + if m != nil { + return m.FinalUrls + } + return nil +} + +func (m *AdGroupCriterion) GetFinalMobileUrls() []*wrappers.StringValue { + if m != nil { + return m.FinalMobileUrls + } + return nil +} + +func (m *AdGroupCriterion) GetFinalUrlSuffix() *wrappers.StringValue { + if m != nil { + return m.FinalUrlSuffix + } + return nil +} + +func (m *AdGroupCriterion) GetTrackingUrlTemplate() *wrappers.StringValue { + if m != nil { + return m.TrackingUrlTemplate + } + return nil +} + +func (m *AdGroupCriterion) GetUrlCustomParameters() []*common.CustomParameter { + if m != nil { + return m.UrlCustomParameters + } + return nil +} + +type isAdGroupCriterion_Criterion interface { + isAdGroupCriterion_Criterion() +} + +type AdGroupCriterion_Keyword struct { + Keyword *common.KeywordInfo `protobuf:"bytes,27,opt,name=keyword,proto3,oneof"` +} + +type AdGroupCriterion_Placement struct { + Placement *common.PlacementInfo `protobuf:"bytes,28,opt,name=placement,proto3,oneof"` +} + +type AdGroupCriterion_MobileAppCategory struct { + MobileAppCategory *common.MobileAppCategoryInfo `protobuf:"bytes,29,opt,name=mobile_app_category,json=mobileAppCategory,proto3,oneof"` +} + +type AdGroupCriterion_MobileApplication struct { + MobileApplication *common.MobileApplicationInfo `protobuf:"bytes,30,opt,name=mobile_application,json=mobileApplication,proto3,oneof"` +} + +type AdGroupCriterion_ListingGroup struct { + ListingGroup *common.ListingGroupInfo `protobuf:"bytes,32,opt,name=listing_group,json=listingGroup,proto3,oneof"` +} + +type AdGroupCriterion_AgeRange struct { + AgeRange *common.AgeRangeInfo `protobuf:"bytes,36,opt,name=age_range,json=ageRange,proto3,oneof"` +} + +type AdGroupCriterion_Gender struct { + Gender *common.GenderInfo `protobuf:"bytes,37,opt,name=gender,proto3,oneof"` +} + +type AdGroupCriterion_IncomeRange struct { + IncomeRange *common.IncomeRangeInfo `protobuf:"bytes,38,opt,name=income_range,json=incomeRange,proto3,oneof"` +} + +type AdGroupCriterion_ParentalStatus struct { + ParentalStatus *common.ParentalStatusInfo `protobuf:"bytes,39,opt,name=parental_status,json=parentalStatus,proto3,oneof"` +} + +type AdGroupCriterion_UserList struct { + UserList *common.UserListInfo `protobuf:"bytes,42,opt,name=user_list,json=userList,proto3,oneof"` +} + +type AdGroupCriterion_YoutubeVideo struct { + YoutubeVideo *common.YouTubeVideoInfo `protobuf:"bytes,40,opt,name=youtube_video,json=youtubeVideo,proto3,oneof"` +} + +type AdGroupCriterion_YoutubeChannel struct { + YoutubeChannel *common.YouTubeChannelInfo `protobuf:"bytes,41,opt,name=youtube_channel,json=youtubeChannel,proto3,oneof"` +} + +type AdGroupCriterion_Topic struct { + Topic *common.TopicInfo `protobuf:"bytes,43,opt,name=topic,proto3,oneof"` +} + +type AdGroupCriterion_UserInterest struct { + UserInterest *common.UserInterestInfo `protobuf:"bytes,45,opt,name=user_interest,json=userInterest,proto3,oneof"` +} + +type AdGroupCriterion_Webpage struct { + Webpage *common.WebpageInfo `protobuf:"bytes,46,opt,name=webpage,proto3,oneof"` +} + +type AdGroupCriterion_AppPaymentModel struct { + AppPaymentModel *common.AppPaymentModelInfo `protobuf:"bytes,47,opt,name=app_payment_model,json=appPaymentModel,proto3,oneof"` +} + +type AdGroupCriterion_CustomAffinity struct { + CustomAffinity *common.CustomAffinityInfo `protobuf:"bytes,48,opt,name=custom_affinity,json=customAffinity,proto3,oneof"` +} + +type AdGroupCriterion_CustomIntent struct { + CustomIntent *common.CustomIntentInfo `protobuf:"bytes,49,opt,name=custom_intent,json=customIntent,proto3,oneof"` +} + +func (*AdGroupCriterion_Keyword) isAdGroupCriterion_Criterion() {} + +func (*AdGroupCriterion_Placement) isAdGroupCriterion_Criterion() {} + +func (*AdGroupCriterion_MobileAppCategory) isAdGroupCriterion_Criterion() {} + +func (*AdGroupCriterion_MobileApplication) isAdGroupCriterion_Criterion() {} + +func (*AdGroupCriterion_ListingGroup) isAdGroupCriterion_Criterion() {} + +func (*AdGroupCriterion_AgeRange) isAdGroupCriterion_Criterion() {} + +func (*AdGroupCriterion_Gender) isAdGroupCriterion_Criterion() {} + +func (*AdGroupCriterion_IncomeRange) isAdGroupCriterion_Criterion() {} + +func (*AdGroupCriterion_ParentalStatus) isAdGroupCriterion_Criterion() {} + +func (*AdGroupCriterion_UserList) isAdGroupCriterion_Criterion() {} + +func (*AdGroupCriterion_YoutubeVideo) isAdGroupCriterion_Criterion() {} + +func (*AdGroupCriterion_YoutubeChannel) isAdGroupCriterion_Criterion() {} + +func (*AdGroupCriterion_Topic) isAdGroupCriterion_Criterion() {} + +func (*AdGroupCriterion_UserInterest) isAdGroupCriterion_Criterion() {} + +func (*AdGroupCriterion_Webpage) isAdGroupCriterion_Criterion() {} + +func (*AdGroupCriterion_AppPaymentModel) isAdGroupCriterion_Criterion() {} + +func (*AdGroupCriterion_CustomAffinity) isAdGroupCriterion_Criterion() {} + +func (*AdGroupCriterion_CustomIntent) isAdGroupCriterion_Criterion() {} + +func (m *AdGroupCriterion) GetCriterion() isAdGroupCriterion_Criterion { + if m != nil { + return m.Criterion + } + return nil +} + +func (m *AdGroupCriterion) GetKeyword() *common.KeywordInfo { + if x, ok := m.GetCriterion().(*AdGroupCriterion_Keyword); ok { + return x.Keyword + } + return nil +} + +func (m *AdGroupCriterion) GetPlacement() *common.PlacementInfo { + if x, ok := m.GetCriterion().(*AdGroupCriterion_Placement); ok { + return x.Placement + } + return nil +} + +func (m *AdGroupCriterion) GetMobileAppCategory() *common.MobileAppCategoryInfo { + if x, ok := m.GetCriterion().(*AdGroupCriterion_MobileAppCategory); ok { + return x.MobileAppCategory + } + return nil +} + +func (m *AdGroupCriterion) GetMobileApplication() *common.MobileApplicationInfo { + if x, ok := m.GetCriterion().(*AdGroupCriterion_MobileApplication); ok { + return x.MobileApplication + } + return nil +} + +func (m *AdGroupCriterion) GetListingGroup() *common.ListingGroupInfo { + if x, ok := m.GetCriterion().(*AdGroupCriterion_ListingGroup); ok { + return x.ListingGroup + } + return nil +} + +func (m *AdGroupCriterion) GetAgeRange() *common.AgeRangeInfo { + if x, ok := m.GetCriterion().(*AdGroupCriterion_AgeRange); ok { + return x.AgeRange + } + return nil +} + +func (m *AdGroupCriterion) GetGender() *common.GenderInfo { + if x, ok := m.GetCriterion().(*AdGroupCriterion_Gender); ok { + return x.Gender + } + return nil +} + +func (m *AdGroupCriterion) GetIncomeRange() *common.IncomeRangeInfo { + if x, ok := m.GetCriterion().(*AdGroupCriterion_IncomeRange); ok { + return x.IncomeRange + } + return nil +} + +func (m *AdGroupCriterion) GetParentalStatus() *common.ParentalStatusInfo { + if x, ok := m.GetCriterion().(*AdGroupCriterion_ParentalStatus); ok { + return x.ParentalStatus + } + return nil +} + +func (m *AdGroupCriterion) GetUserList() *common.UserListInfo { + if x, ok := m.GetCriterion().(*AdGroupCriterion_UserList); ok { + return x.UserList + } + return nil +} + +func (m *AdGroupCriterion) GetYoutubeVideo() *common.YouTubeVideoInfo { + if x, ok := m.GetCriterion().(*AdGroupCriterion_YoutubeVideo); ok { + return x.YoutubeVideo + } + return nil +} + +func (m *AdGroupCriterion) GetYoutubeChannel() *common.YouTubeChannelInfo { + if x, ok := m.GetCriterion().(*AdGroupCriterion_YoutubeChannel); ok { + return x.YoutubeChannel + } + return nil +} + +func (m *AdGroupCriterion) GetTopic() *common.TopicInfo { + if x, ok := m.GetCriterion().(*AdGroupCriterion_Topic); ok { + return x.Topic + } + return nil +} + +func (m *AdGroupCriterion) GetUserInterest() *common.UserInterestInfo { + if x, ok := m.GetCriterion().(*AdGroupCriterion_UserInterest); ok { + return x.UserInterest + } + return nil +} + +func (m *AdGroupCriterion) GetWebpage() *common.WebpageInfo { + if x, ok := m.GetCriterion().(*AdGroupCriterion_Webpage); ok { + return x.Webpage + } + return nil +} + +func (m *AdGroupCriterion) GetAppPaymentModel() *common.AppPaymentModelInfo { + if x, ok := m.GetCriterion().(*AdGroupCriterion_AppPaymentModel); ok { + return x.AppPaymentModel + } + return nil +} + +func (m *AdGroupCriterion) GetCustomAffinity() *common.CustomAffinityInfo { + if x, ok := m.GetCriterion().(*AdGroupCriterion_CustomAffinity); ok { + return x.CustomAffinity + } + return nil +} + +func (m *AdGroupCriterion) GetCustomIntent() *common.CustomIntentInfo { + if x, ok := m.GetCriterion().(*AdGroupCriterion_CustomIntent); ok { + return x.CustomIntent + } + return nil +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*AdGroupCriterion) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _AdGroupCriterion_OneofMarshaler, _AdGroupCriterion_OneofUnmarshaler, _AdGroupCriterion_OneofSizer, []interface{}{ + (*AdGroupCriterion_Keyword)(nil), + (*AdGroupCriterion_Placement)(nil), + (*AdGroupCriterion_MobileAppCategory)(nil), + (*AdGroupCriterion_MobileApplication)(nil), + (*AdGroupCriterion_ListingGroup)(nil), + (*AdGroupCriterion_AgeRange)(nil), + (*AdGroupCriterion_Gender)(nil), + (*AdGroupCriterion_IncomeRange)(nil), + (*AdGroupCriterion_ParentalStatus)(nil), + (*AdGroupCriterion_UserList)(nil), + (*AdGroupCriterion_YoutubeVideo)(nil), + (*AdGroupCriterion_YoutubeChannel)(nil), + (*AdGroupCriterion_Topic)(nil), + (*AdGroupCriterion_UserInterest)(nil), + (*AdGroupCriterion_Webpage)(nil), + (*AdGroupCriterion_AppPaymentModel)(nil), + (*AdGroupCriterion_CustomAffinity)(nil), + (*AdGroupCriterion_CustomIntent)(nil), + } +} + +func _AdGroupCriterion_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*AdGroupCriterion) + // criterion + switch x := m.Criterion.(type) { + case *AdGroupCriterion_Keyword: + b.EncodeVarint(27<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.Keyword); err != nil { + return err + } + case *AdGroupCriterion_Placement: + b.EncodeVarint(28<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.Placement); err != nil { + return err + } + case *AdGroupCriterion_MobileAppCategory: + b.EncodeVarint(29<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.MobileAppCategory); err != nil { + return err + } + case *AdGroupCriterion_MobileApplication: + b.EncodeVarint(30<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.MobileApplication); err != nil { + return err + } + case *AdGroupCriterion_ListingGroup: + b.EncodeVarint(32<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.ListingGroup); err != nil { + return err + } + case *AdGroupCriterion_AgeRange: + b.EncodeVarint(36<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.AgeRange); err != nil { + return err + } + case *AdGroupCriterion_Gender: + b.EncodeVarint(37<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.Gender); err != nil { + return err + } + case *AdGroupCriterion_IncomeRange: + b.EncodeVarint(38<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.IncomeRange); err != nil { + return err + } + case *AdGroupCriterion_ParentalStatus: + b.EncodeVarint(39<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.ParentalStatus); err != nil { + return err + } + case *AdGroupCriterion_UserList: + b.EncodeVarint(42<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.UserList); err != nil { + return err + } + case *AdGroupCriterion_YoutubeVideo: + b.EncodeVarint(40<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.YoutubeVideo); err != nil { + return err + } + case *AdGroupCriterion_YoutubeChannel: + b.EncodeVarint(41<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.YoutubeChannel); err != nil { + return err + } + case *AdGroupCriterion_Topic: + b.EncodeVarint(43<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.Topic); err != nil { + return err + } + case *AdGroupCriterion_UserInterest: + b.EncodeVarint(45<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.UserInterest); err != nil { + return err + } + case *AdGroupCriterion_Webpage: + b.EncodeVarint(46<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.Webpage); err != nil { + return err + } + case *AdGroupCriterion_AppPaymentModel: + b.EncodeVarint(47<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.AppPaymentModel); err != nil { + return err + } + case *AdGroupCriterion_CustomAffinity: + b.EncodeVarint(48<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.CustomAffinity); err != nil { + return err + } + case *AdGroupCriterion_CustomIntent: + b.EncodeVarint(49<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.CustomIntent); err != nil { + return err + } + case nil: + default: + return fmt.Errorf("AdGroupCriterion.Criterion has unexpected type %T", x) + } + return nil +} + +func _AdGroupCriterion_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*AdGroupCriterion) + switch tag { + case 27: // criterion.keyword + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(common.KeywordInfo) + err := b.DecodeMessage(msg) + m.Criterion = &AdGroupCriterion_Keyword{msg} + return true, err + case 28: // criterion.placement + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(common.PlacementInfo) + err := b.DecodeMessage(msg) + m.Criterion = &AdGroupCriterion_Placement{msg} + return true, err + case 29: // criterion.mobile_app_category + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(common.MobileAppCategoryInfo) + err := b.DecodeMessage(msg) + m.Criterion = &AdGroupCriterion_MobileAppCategory{msg} + return true, err + case 30: // criterion.mobile_application + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(common.MobileApplicationInfo) + err := b.DecodeMessage(msg) + m.Criterion = &AdGroupCriterion_MobileApplication{msg} + return true, err + case 32: // criterion.listing_group + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(common.ListingGroupInfo) + err := b.DecodeMessage(msg) + m.Criterion = &AdGroupCriterion_ListingGroup{msg} + return true, err + case 36: // criterion.age_range + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(common.AgeRangeInfo) + err := b.DecodeMessage(msg) + m.Criterion = &AdGroupCriterion_AgeRange{msg} + return true, err + case 37: // criterion.gender + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(common.GenderInfo) + err := b.DecodeMessage(msg) + m.Criterion = &AdGroupCriterion_Gender{msg} + return true, err + case 38: // criterion.income_range + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(common.IncomeRangeInfo) + err := b.DecodeMessage(msg) + m.Criterion = &AdGroupCriterion_IncomeRange{msg} + return true, err + case 39: // criterion.parental_status + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(common.ParentalStatusInfo) + err := b.DecodeMessage(msg) + m.Criterion = &AdGroupCriterion_ParentalStatus{msg} + return true, err + case 42: // criterion.user_list + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(common.UserListInfo) + err := b.DecodeMessage(msg) + m.Criterion = &AdGroupCriterion_UserList{msg} + return true, err + case 40: // criterion.youtube_video + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(common.YouTubeVideoInfo) + err := b.DecodeMessage(msg) + m.Criterion = &AdGroupCriterion_YoutubeVideo{msg} + return true, err + case 41: // criterion.youtube_channel + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(common.YouTubeChannelInfo) + err := b.DecodeMessage(msg) + m.Criterion = &AdGroupCriterion_YoutubeChannel{msg} + return true, err + case 43: // criterion.topic + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(common.TopicInfo) + err := b.DecodeMessage(msg) + m.Criterion = &AdGroupCriterion_Topic{msg} + return true, err + case 45: // criterion.user_interest + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(common.UserInterestInfo) + err := b.DecodeMessage(msg) + m.Criterion = &AdGroupCriterion_UserInterest{msg} + return true, err + case 46: // criterion.webpage + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(common.WebpageInfo) + err := b.DecodeMessage(msg) + m.Criterion = &AdGroupCriterion_Webpage{msg} + return true, err + case 47: // criterion.app_payment_model + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(common.AppPaymentModelInfo) + err := b.DecodeMessage(msg) + m.Criterion = &AdGroupCriterion_AppPaymentModel{msg} + return true, err + case 48: // criterion.custom_affinity + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(common.CustomAffinityInfo) + err := b.DecodeMessage(msg) + m.Criterion = &AdGroupCriterion_CustomAffinity{msg} + return true, err + case 49: // criterion.custom_intent + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(common.CustomIntentInfo) + err := b.DecodeMessage(msg) + m.Criterion = &AdGroupCriterion_CustomIntent{msg} + return true, err + default: + return false, nil + } +} + +func _AdGroupCriterion_OneofSizer(msg proto.Message) (n int) { + m := msg.(*AdGroupCriterion) + // criterion + switch x := m.Criterion.(type) { + case *AdGroupCriterion_Keyword: + s := proto.Size(x.Keyword) + n += 2 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *AdGroupCriterion_Placement: + s := proto.Size(x.Placement) + n += 2 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *AdGroupCriterion_MobileAppCategory: + s := proto.Size(x.MobileAppCategory) + n += 2 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *AdGroupCriterion_MobileApplication: + s := proto.Size(x.MobileApplication) + n += 2 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *AdGroupCriterion_ListingGroup: + s := proto.Size(x.ListingGroup) + n += 2 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *AdGroupCriterion_AgeRange: + s := proto.Size(x.AgeRange) + n += 2 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *AdGroupCriterion_Gender: + s := proto.Size(x.Gender) + n += 2 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *AdGroupCriterion_IncomeRange: + s := proto.Size(x.IncomeRange) + n += 2 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *AdGroupCriterion_ParentalStatus: + s := proto.Size(x.ParentalStatus) + n += 2 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *AdGroupCriterion_UserList: + s := proto.Size(x.UserList) + n += 2 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *AdGroupCriterion_YoutubeVideo: + s := proto.Size(x.YoutubeVideo) + n += 2 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *AdGroupCriterion_YoutubeChannel: + s := proto.Size(x.YoutubeChannel) + n += 2 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *AdGroupCriterion_Topic: + s := proto.Size(x.Topic) + n += 2 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *AdGroupCriterion_UserInterest: + s := proto.Size(x.UserInterest) + n += 2 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *AdGroupCriterion_Webpage: + s := proto.Size(x.Webpage) + n += 2 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *AdGroupCriterion_AppPaymentModel: + s := proto.Size(x.AppPaymentModel) + n += 2 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *AdGroupCriterion_CustomAffinity: + s := proto.Size(x.CustomAffinity) + n += 2 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *AdGroupCriterion_CustomIntent: + s := proto.Size(x.CustomIntent) + n += 2 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +// A container for ad group criterion quality information. +type AdGroupCriterion_QualityInfo struct { + // The quality score. + // + // This field may not be populated if Google does not have enough + // information to determine a value. + QualityScore *wrappers.Int32Value `protobuf:"bytes,1,opt,name=quality_score,json=qualityScore,proto3" json:"quality_score,omitempty"` + // The performance of the ad compared to other advertisers. + CreativeQualityScore enums.QualityScoreBucketEnum_QualityScoreBucket `protobuf:"varint,2,opt,name=creative_quality_score,json=creativeQualityScore,proto3,enum=google.ads.googleads.v1.enums.QualityScoreBucketEnum_QualityScoreBucket" json:"creative_quality_score,omitempty"` + // The quality score of the landing page. + PostClickQualityScore enums.QualityScoreBucketEnum_QualityScoreBucket `protobuf:"varint,3,opt,name=post_click_quality_score,json=postClickQualityScore,proto3,enum=google.ads.googleads.v1.enums.QualityScoreBucketEnum_QualityScoreBucket" json:"post_click_quality_score,omitempty"` + // The click-through rate compared to that of other advertisers. + SearchPredictedCtr enums.QualityScoreBucketEnum_QualityScoreBucket `protobuf:"varint,4,opt,name=search_predicted_ctr,json=searchPredictedCtr,proto3,enum=google.ads.googleads.v1.enums.QualityScoreBucketEnum_QualityScoreBucket" json:"search_predicted_ctr,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *AdGroupCriterion_QualityInfo) Reset() { *m = AdGroupCriterion_QualityInfo{} } +func (m *AdGroupCriterion_QualityInfo) String() string { return proto.CompactTextString(m) } +func (*AdGroupCriterion_QualityInfo) ProtoMessage() {} +func (*AdGroupCriterion_QualityInfo) Descriptor() ([]byte, []int) { + return fileDescriptor_ad_group_criterion_18b9ede2c7927371, []int{0, 0} +} +func (m *AdGroupCriterion_QualityInfo) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_AdGroupCriterion_QualityInfo.Unmarshal(m, b) +} +func (m *AdGroupCriterion_QualityInfo) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_AdGroupCriterion_QualityInfo.Marshal(b, m, deterministic) +} +func (dst *AdGroupCriterion_QualityInfo) XXX_Merge(src proto.Message) { + xxx_messageInfo_AdGroupCriterion_QualityInfo.Merge(dst, src) +} +func (m *AdGroupCriterion_QualityInfo) XXX_Size() int { + return xxx_messageInfo_AdGroupCriterion_QualityInfo.Size(m) +} +func (m *AdGroupCriterion_QualityInfo) XXX_DiscardUnknown() { + xxx_messageInfo_AdGroupCriterion_QualityInfo.DiscardUnknown(m) +} + +var xxx_messageInfo_AdGroupCriterion_QualityInfo proto.InternalMessageInfo + +func (m *AdGroupCriterion_QualityInfo) GetQualityScore() *wrappers.Int32Value { + if m != nil { + return m.QualityScore + } + return nil +} + +func (m *AdGroupCriterion_QualityInfo) GetCreativeQualityScore() enums.QualityScoreBucketEnum_QualityScoreBucket { + if m != nil { + return m.CreativeQualityScore + } + return enums.QualityScoreBucketEnum_UNSPECIFIED +} + +func (m *AdGroupCriterion_QualityInfo) GetPostClickQualityScore() enums.QualityScoreBucketEnum_QualityScoreBucket { + if m != nil { + return m.PostClickQualityScore + } + return enums.QualityScoreBucketEnum_UNSPECIFIED +} + +func (m *AdGroupCriterion_QualityInfo) GetSearchPredictedCtr() enums.QualityScoreBucketEnum_QualityScoreBucket { + if m != nil { + return m.SearchPredictedCtr + } + return enums.QualityScoreBucketEnum_UNSPECIFIED +} + +// Estimates for criterion bids at various positions. +type AdGroupCriterion_PositionEstimates struct { + // The estimate of the CPC bid required for ad to be shown on first + // page of search results. + FirstPageCpcMicros *wrappers.Int64Value `protobuf:"bytes,1,opt,name=first_page_cpc_micros,json=firstPageCpcMicros,proto3" json:"first_page_cpc_micros,omitempty"` + // The estimate of the CPC bid required for ad to be displayed in first + // position, at the top of the first page of search results. + FirstPositionCpcMicros *wrappers.Int64Value `protobuf:"bytes,2,opt,name=first_position_cpc_micros,json=firstPositionCpcMicros,proto3" json:"first_position_cpc_micros,omitempty"` + // The estimate of the CPC bid required for ad to be displayed at the top + // of the first page of search results. + TopOfPageCpcMicros *wrappers.Int64Value `protobuf:"bytes,3,opt,name=top_of_page_cpc_micros,json=topOfPageCpcMicros,proto3" json:"top_of_page_cpc_micros,omitempty"` + // Estimate of how many clicks per week you might get by changing your + // keyword bid to the value in first_position_cpc_micros. + EstimatedAddClicksAtFirstPositionCpc *wrappers.Int64Value `protobuf:"bytes,4,opt,name=estimated_add_clicks_at_first_position_cpc,json=estimatedAddClicksAtFirstPositionCpc,proto3" json:"estimated_add_clicks_at_first_position_cpc,omitempty"` + // Estimate of how your cost per week might change when changing your + // keyword bid to the value in first_position_cpc_micros. + EstimatedAddCostAtFirstPositionCpc *wrappers.Int64Value `protobuf:"bytes,5,opt,name=estimated_add_cost_at_first_position_cpc,json=estimatedAddCostAtFirstPositionCpc,proto3" json:"estimated_add_cost_at_first_position_cpc,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *AdGroupCriterion_PositionEstimates) Reset() { *m = AdGroupCriterion_PositionEstimates{} } +func (m *AdGroupCriterion_PositionEstimates) String() string { return proto.CompactTextString(m) } +func (*AdGroupCriterion_PositionEstimates) ProtoMessage() {} +func (*AdGroupCriterion_PositionEstimates) Descriptor() ([]byte, []int) { + return fileDescriptor_ad_group_criterion_18b9ede2c7927371, []int{0, 1} +} +func (m *AdGroupCriterion_PositionEstimates) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_AdGroupCriterion_PositionEstimates.Unmarshal(m, b) +} +func (m *AdGroupCriterion_PositionEstimates) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_AdGroupCriterion_PositionEstimates.Marshal(b, m, deterministic) +} +func (dst *AdGroupCriterion_PositionEstimates) XXX_Merge(src proto.Message) { + xxx_messageInfo_AdGroupCriterion_PositionEstimates.Merge(dst, src) +} +func (m *AdGroupCriterion_PositionEstimates) XXX_Size() int { + return xxx_messageInfo_AdGroupCriterion_PositionEstimates.Size(m) +} +func (m *AdGroupCriterion_PositionEstimates) XXX_DiscardUnknown() { + xxx_messageInfo_AdGroupCriterion_PositionEstimates.DiscardUnknown(m) +} + +var xxx_messageInfo_AdGroupCriterion_PositionEstimates proto.InternalMessageInfo + +func (m *AdGroupCriterion_PositionEstimates) GetFirstPageCpcMicros() *wrappers.Int64Value { + if m != nil { + return m.FirstPageCpcMicros + } + return nil +} + +func (m *AdGroupCriterion_PositionEstimates) GetFirstPositionCpcMicros() *wrappers.Int64Value { + if m != nil { + return m.FirstPositionCpcMicros + } + return nil +} + +func (m *AdGroupCriterion_PositionEstimates) GetTopOfPageCpcMicros() *wrappers.Int64Value { + if m != nil { + return m.TopOfPageCpcMicros + } + return nil +} + +func (m *AdGroupCriterion_PositionEstimates) GetEstimatedAddClicksAtFirstPositionCpc() *wrappers.Int64Value { + if m != nil { + return m.EstimatedAddClicksAtFirstPositionCpc + } + return nil +} + +func (m *AdGroupCriterion_PositionEstimates) GetEstimatedAddCostAtFirstPositionCpc() *wrappers.Int64Value { + if m != nil { + return m.EstimatedAddCostAtFirstPositionCpc + } + return nil +} + +func init() { + proto.RegisterType((*AdGroupCriterion)(nil), "google.ads.googleads.v1.resources.AdGroupCriterion") + proto.RegisterType((*AdGroupCriterion_QualityInfo)(nil), "google.ads.googleads.v1.resources.AdGroupCriterion.QualityInfo") + proto.RegisterType((*AdGroupCriterion_PositionEstimates)(nil), "google.ads.googleads.v1.resources.AdGroupCriterion.PositionEstimates") +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/resources/ad_group_criterion.proto", fileDescriptor_ad_group_criterion_18b9ede2c7927371) +} + +var fileDescriptor_ad_group_criterion_18b9ede2c7927371 = []byte{ + // 1706 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xac, 0x98, 0xdd, 0x72, 0x1b, 0xb7, + 0x15, 0xc7, 0x4b, 0x29, 0x76, 0x2c, 0x48, 0x96, 0x2d, 0xc8, 0x52, 0x11, 0xc5, 0x4d, 0x65, 0x27, + 0x6e, 0x14, 0x27, 0x26, 0x23, 0x39, 0x4e, 0x3b, 0xcc, 0x34, 0x29, 0xc5, 0xd8, 0xb2, 0x9a, 0x2a, + 0x61, 0x28, 0x59, 0x9e, 0x76, 0xd4, 0xd9, 0x82, 0xbb, 0xe0, 0x1a, 0xe3, 0xdd, 0x05, 0x0c, 0x60, + 0xa9, 0xb2, 0x17, 0x9d, 0x69, 0x2f, 0x3a, 0xd3, 0xbb, 0x3e, 0x43, 0x2f, 0xfb, 0x1a, 0xbd, 0xeb, + 0x4d, 0xdf, 0xa3, 0x0f, 0xd0, 0xeb, 0x0e, 0xbe, 0xc8, 0x25, 0x29, 0x72, 0xa9, 0x8e, 0xee, 0x76, + 0xcf, 0xe2, 0xff, 0x3b, 0x07, 0x07, 0x67, 0xb1, 0x07, 0x0b, 0xea, 0x31, 0x63, 0x71, 0x42, 0x6a, + 0x38, 0x92, 0x35, 0x7b, 0xa9, 0xaf, 0x7a, 0xbb, 0x35, 0x41, 0x24, 0xcb, 0x45, 0x48, 0x64, 0x0d, + 0x47, 0x41, 0x2c, 0x58, 0xce, 0x83, 0x50, 0x50, 0x45, 0x04, 0x65, 0x59, 0x95, 0x0b, 0xa6, 0x18, + 0xbc, 0x67, 0x05, 0x55, 0x1c, 0xc9, 0xea, 0x40, 0x5b, 0xed, 0xed, 0x56, 0x07, 0xda, 0xad, 0x47, + 0xd3, 0xf0, 0x21, 0x4b, 0x53, 0x96, 0xd5, 0x1c, 0x12, 0x5b, 0xe2, 0xd6, 0x93, 0xb2, 0xe1, 0xb9, + 0x54, 0x2c, 0x0d, 0x38, 0x16, 0x38, 0x25, 0x8a, 0x08, 0x27, 0x7b, 0x36, 0x4d, 0x46, 0xb2, 0x3c, + 0xbd, 0x68, 0x02, 0x01, 0xe6, 0x5c, 0xb0, 0x1e, 0x4e, 0x02, 0xa9, 0xb0, 0xca, 0xa5, 0xe3, 0xfc, + 0xfc, 0xd2, 0x9c, 0x11, 0xf9, 0xde, 0x6c, 0x79, 0x87, 0x46, 0x11, 0xcd, 0xe2, 0xc0, 0xe6, 0xc6, + 0x69, 0x9a, 0xb3, 0x35, 0x05, 0x4f, 0x7d, 0xa9, 0x48, 0x1a, 0x48, 0x22, 0x7a, 0x06, 0x72, 0x09, + 0xc7, 0x43, 0x88, 0xea, 0x73, 0xef, 0xf8, 0x67, 0xb3, 0x35, 0x6f, 0x72, 0x9c, 0x50, 0xd5, 0x0f, + 0x64, 0xc8, 0x04, 0x09, 0x3a, 0x79, 0xf8, 0x9a, 0x28, 0xa7, 0x7c, 0xcf, 0x29, 0xcd, 0x5d, 0x27, + 0xef, 0xd6, 0xce, 0x05, 0xe6, 0x9c, 0x08, 0x1f, 0xcd, 0x5d, 0x4f, 0xe6, 0xb4, 0x86, 0xb3, 0x8c, + 0x29, 0xac, 0x28, 0xcb, 0xdc, 0xd3, 0xfb, 0xff, 0xfc, 0x10, 0xdc, 0x6e, 0x44, 0x07, 0x3a, 0x8f, + 0x4d, 0x1f, 0x17, 0x7c, 0x1f, 0xdc, 0xf4, 0x35, 0x13, 0x64, 0x38, 0x25, 0xa8, 0xb2, 0x5d, 0xd9, + 0x59, 0x6a, 0xaf, 0x78, 0xe3, 0xb7, 0x38, 0x25, 0xf0, 0x4b, 0xb0, 0x32, 0x9c, 0x09, 0x8d, 0xd0, + 0xd6, 0x76, 0x65, 0x67, 0x79, 0xef, 0x5d, 0x57, 0x7a, 0x55, 0x1f, 0x4e, 0xf5, 0x30, 0x53, 0x9f, + 0x7f, 0x76, 0x8a, 0x93, 0x9c, 0xb4, 0x97, 0x07, 0x82, 0xc3, 0x08, 0xbe, 0x02, 0xd7, 0x6d, 0xd6, + 0xd0, 0xe2, 0x76, 0x65, 0x67, 0x75, 0xaf, 0x55, 0x9d, 0x56, 0xbf, 0x26, 0x05, 0xd5, 0xf1, 0x28, + 0x8f, 0x8d, 0xf8, 0x69, 0x96, 0xa7, 0x53, 0x1e, 0xb5, 0x1d, 0x1f, 0x76, 0xc0, 0x8a, 0xcf, 0x1f, + 0xcd, 0xba, 0x0c, 0xbd, 0x65, 0x22, 0xfd, 0xaa, 0x5a, 0xfa, 0xbe, 0x4c, 0x80, 0xab, 0xdf, 0x5b, + 0xce, 0x61, 0xd6, 0x65, 0xed, 0xe5, 0x37, 0xc3, 0x1b, 0xf8, 0x53, 0x70, 0xc3, 0xd7, 0x23, 0xba, + 0x66, 0xf8, 0x77, 0x27, 0x32, 0x71, 0xac, 0x04, 0xcd, 0x62, 0x9b, 0x8a, 0xb7, 0xb1, 0x45, 0xc3, + 0x36, 0x78, 0x4b, 0x97, 0x01, 0x7a, 0xc7, 0x24, 0xe1, 0xcb, 0x92, 0x24, 0x0c, 0x22, 0x39, 0xe9, + 0x73, 0x62, 0xe6, 0x3e, 0x62, 0x69, 0x1b, 0x16, 0xfc, 0x1c, 0xdc, 0xc8, 0x48, 0x8c, 0x15, 0xed, + 0x11, 0xf4, 0x63, 0x13, 0xcc, 0xd6, 0x44, 0x30, 0xfb, 0x8c, 0x25, 0x36, 0x94, 0xc1, 0x58, 0xf8, + 0xb7, 0x0a, 0xd8, 0xb8, 0xb0, 0xb0, 0xd1, 0x67, 0x26, 0xba, 0xb3, 0x79, 0xa3, 0x3b, 0x36, 0x90, + 0x63, 0xcb, 0x28, 0x2c, 0xd4, 0xac, 0x01, 0xed, 0x75, 0x39, 0x69, 0x84, 0x7f, 0xad, 0x80, 0x5b, + 0x63, 0xbb, 0x03, 0x7a, 0x62, 0x82, 0xf9, 0xdd, 0x25, 0xeb, 0xa5, 0xe1, 0x28, 0x33, 0xea, 0x66, + 0x74, 0x48, 0x7b, 0x15, 0x8f, 0xdc, 0xc3, 0xaf, 0xc0, 0x4a, 0x87, 0x46, 0x41, 0xca, 0x22, 0xda, + 0xa5, 0x44, 0xa0, 0x4f, 0xa6, 0xac, 0xf3, 0xd7, 0x2c, 0xef, 0x24, 0xc4, 0x95, 0x7c, 0x87, 0x46, + 0x47, 0x4e, 0x00, 0x1b, 0x60, 0x35, 0xe4, 0x61, 0x60, 0x20, 0x34, 0x14, 0x4c, 0xa2, 0xdb, 0xe5, + 0x2f, 0xcd, 0x4a, 0xc8, 0xc3, 0x7d, 0x1a, 0x1d, 0x19, 0x81, 0x45, 0xa4, 0x45, 0xc4, 0xda, 0x5c, + 0x88, 0x74, 0x0c, 0xd1, 0x2b, 0x22, 0xd0, 0x5c, 0x88, 0xde, 0x10, 0xd1, 0x02, 0x9b, 0x9c, 0x88, + 0x90, 0x64, 0x2a, 0x18, 0x9b, 0xd0, 0xbd, 0x72, 0xd4, 0xba, 0x93, 0x36, 0x8b, 0xf3, 0x3a, 0x01, + 0x88, 0x74, 0xbb, 0x24, 0xd4, 0x75, 0x38, 0xce, 0x84, 0xe5, 0xcc, 0x8d, 0x81, 0x78, 0x16, 0x75, + 0x24, 0x6f, 0xeb, 0x97, 0xa3, 0xa6, 0x53, 0xa9, 0x23, 0xa9, 0xbc, 0x73, 0x39, 0x6a, 0x21, 0xa7, + 0x21, 0xd8, 0x1e, 0x52, 0xa7, 0x64, 0xf7, 0x7e, 0x39, 0xfd, 0xee, 0x00, 0xd2, 0xba, 0x20, 0xcd, + 0xe7, 0x17, 0xa5, 0xd9, 0xee, 0x76, 0x68, 0x63, 0xae, 0x1d, 0x68, 0xdf, 0x7e, 0x36, 0x8f, 0x8d, + 0xc6, 0xbc, 0x45, 0x23, 0x96, 0x89, 0x95, 0xb0, 0xe6, 0x71, 0xc7, 0x69, 0xd1, 0xf1, 0xe6, 0x95, + 0x3b, 0x4e, 0xa7, 0x3a, 0xee, 0x15, 0x1d, 0xff, 0xf0, 0xca, 0x1d, 0xf7, 0x86, 0x8e, 0xff, 0x52, + 0x99, 0xb5, 0xa0, 0x2e, 0x82, 0xf7, 0xaf, 0x24, 0x82, 0x29, 0x6b, 0xee, 0x02, 0x51, 0x00, 0x72, + 0x26, 0xa9, 0xfe, 0xea, 0x07, 0x44, 0x2a, 0x9a, 0x62, 0x45, 0x24, 0x02, 0xa6, 0x94, 0x9e, 0xfe, + 0x3f, 0x1f, 0xc1, 0x96, 0xa3, 0x3d, 0xf5, 0xb0, 0xf6, 0x1a, 0x1f, 0x37, 0xc1, 0x2f, 0x00, 0xe8, + 0xd2, 0x0c, 0x27, 0x41, 0x2e, 0x12, 0x89, 0x96, 0xb7, 0x17, 0x4b, 0x3f, 0x89, 0x4b, 0x66, 0xfc, + 0x0b, 0x91, 0x48, 0xf8, 0x1c, 0xac, 0x59, 0x71, 0xca, 0x3a, 0x34, 0x21, 0x96, 0xf1, 0x78, 0x0e, + 0xc6, 0x2d, 0x23, 0x3b, 0x32, 0x2a, 0x43, 0x7a, 0x06, 0x6e, 0x0f, 0xc2, 0x08, 0x64, 0xde, 0xed, + 0xd2, 0xdf, 0xa3, 0xbd, 0x39, 0xbe, 0xcf, 0xab, 0x3e, 0x98, 0x63, 0xa3, 0x81, 0x2d, 0xb0, 0xa1, + 0x04, 0x0e, 0x5f, 0xeb, 0x6f, 0xa2, 0x46, 0x29, 0x92, 0xf2, 0x04, 0x2b, 0x82, 0x6e, 0xce, 0x01, + 0x5b, 0xf7, 0xd2, 0x17, 0x22, 0x39, 0x71, 0x42, 0x18, 0x82, 0x0d, 0x0d, 0x1a, 0xef, 0xa1, 0x25, + 0x5a, 0x35, 0xf3, 0xac, 0x4d, 0x5d, 0x19, 0xdb, 0x7c, 0x57, 0x9b, 0x46, 0xd8, 0xf2, 0xba, 0xf6, + 0x7a, 0x2e, 0x92, 0x31, 0x9b, 0x84, 0x07, 0xe0, 0xed, 0xd7, 0xa4, 0x7f, 0xce, 0x44, 0x84, 0xde, + 0x35, 0x81, 0x7e, 0x5c, 0x86, 0xfd, 0xc6, 0x0e, 0xd7, 0x4d, 0xcd, 0xf3, 0x1f, 0xb4, 0xbd, 0x1a, + 0x1e, 0x81, 0x25, 0x9e, 0xe0, 0x90, 0xa4, 0x24, 0x53, 0xe8, 0xae, 0x41, 0x3d, 0x2a, 0x43, 0xb5, + 0xbc, 0xc0, 0xc1, 0x86, 0x04, 0x18, 0x83, 0x75, 0xb7, 0xb4, 0x98, 0xf3, 0x20, 0xc4, 0x8a, 0xc4, + 0x4c, 0xf4, 0xd1, 0x8f, 0x0c, 0xf8, 0x49, 0x19, 0xd8, 0xae, 0x6f, 0x83, 0xf3, 0xa6, 0x13, 0x3a, + 0x07, 0x6b, 0xe9, 0xf8, 0x03, 0xd8, 0x05, 0x70, 0xe8, 0x28, 0xa1, 0xa1, 0x69, 0x7e, 0xd1, 0x7b, + 0x97, 0xf4, 0xe3, 0x85, 0x13, 0x7e, 0xfc, 0x03, 0xf8, 0x12, 0xdc, 0x4c, 0xa8, 0x54, 0xba, 0x3c, + 0x6c, 0x13, 0xb8, 0x6d, 0x5c, 0x7c, 0x5a, 0xe6, 0xe2, 0x57, 0x56, 0x64, 0xde, 0x30, 0x47, 0x5f, + 0x49, 0x0a, 0x36, 0xf8, 0x0d, 0x58, 0xc2, 0x31, 0x09, 0x04, 0xce, 0x62, 0x82, 0x3e, 0x30, 0xd0, + 0x4f, 0xca, 0xa0, 0x8d, 0x98, 0xb4, 0xf5, 0x78, 0x07, 0xbc, 0x81, 0xdd, 0x3d, 0xfc, 0x1a, 0x5c, + 0x8f, 0x49, 0x16, 0x11, 0x81, 0x1e, 0x18, 0xd2, 0xc3, 0x32, 0xd2, 0x81, 0x19, 0xed, 0x38, 0x4e, + 0x0b, 0x4f, 0xc0, 0x0a, 0xcd, 0x42, 0x96, 0xfa, 0xa8, 0x7e, 0x62, 0x58, 0xa5, 0x05, 0x7b, 0x68, + 0x34, 0xc5, 0xc0, 0x96, 0xe9, 0xd0, 0x04, 0x7f, 0x0b, 0x6e, 0x71, 0x2c, 0x48, 0xa6, 0x86, 0x8d, + 0xde, 0x87, 0x06, 0xbc, 0x57, 0x5a, 0x67, 0x4e, 0x66, 0xdb, 0x34, 0xc7, 0x5e, 0xe5, 0x23, 0x56, + 0x9d, 0xc7, 0x5c, 0x12, 0x11, 0xe8, 0xe4, 0xa2, 0x87, 0xf3, 0xe5, 0xf1, 0x85, 0x24, 0x42, 0x2f, + 0x90, 0xcf, 0x63, 0xee, 0xee, 0xf5, 0x6a, 0xf7, 0x59, 0xae, 0xf2, 0x0e, 0x09, 0x7a, 0x34, 0x22, + 0x0c, 0xed, 0xcc, 0xb7, 0xda, 0xbf, 0x66, 0xf9, 0x49, 0xde, 0x21, 0xa7, 0x5a, 0xe3, 0x57, 0xdb, + 0x81, 0x8c, 0x4d, 0x27, 0xc1, 0x83, 0xc3, 0x57, 0x38, 0xcb, 0x48, 0x82, 0x3e, 0x9a, 0x2f, 0x09, + 0x0e, 0xdd, 0xb4, 0x2a, 0x9f, 0x04, 0x07, 0x73, 0x56, 0xd8, 0x00, 0xd7, 0x14, 0xe3, 0x34, 0x44, + 0x1f, 0x1b, 0xe8, 0x47, 0x65, 0xd0, 0x13, 0x3d, 0xd8, 0xb1, 0xac, 0x52, 0x4f, 0xdd, 0xe4, 0x91, + 0x66, 0x8a, 0x08, 0x22, 0x15, 0x7a, 0x34, 0xdf, 0xd4, 0x75, 0x2e, 0x0f, 0x9d, 0xc6, 0x4f, 0x3d, + 0x2f, 0xd8, 0xf4, 0x56, 0x75, 0x4e, 0x3a, 0x1c, 0xc7, 0x04, 0x55, 0xe7, 0xdb, 0xaa, 0x5e, 0xda, + 0xe1, 0x7e, 0xab, 0x72, 0x6a, 0x88, 0xc1, 0x9a, 0xde, 0x54, 0x38, 0xee, 0xeb, 0xad, 0x46, 0xb7, + 0xeb, 0x24, 0x41, 0x35, 0x83, 0x7c, 0x5c, 0xfa, 0xe6, 0x70, 0xde, 0xb2, 0xba, 0x23, 0x2d, 0x73, + 0x68, 0x7d, 0x02, 0x29, 0x9a, 0xf5, 0x32, 0xb9, 0x7d, 0x1b, 0x77, 0xbb, 0x34, 0xa3, 0xaa, 0x8f, + 0x3e, 0x9d, 0x6f, 0x99, 0xec, 0x0e, 0xdd, 0x70, 0x2a, 0xbf, 0x4c, 0xe1, 0x88, 0x55, 0xe7, 0xd8, + 0xe1, 0x75, 0x96, 0x33, 0x85, 0x76, 0xe7, 0xcb, 0xb1, 0x85, 0x1f, 0x1a, 0x8d, 0xcf, 0x71, 0x58, + 0xb0, 0x6d, 0xfd, 0x7b, 0x11, 0x2c, 0x17, 0x8e, 0xb0, 0xf0, 0x17, 0xe0, 0xe6, 0xc8, 0x9f, 0x05, + 0x73, 0xd0, 0x9f, 0xd2, 0x60, 0x3e, 0xde, 0x73, 0x27, 0x01, 0xa7, 0x38, 0xd6, 0x02, 0xf8, 0x47, + 0xb0, 0x19, 0x0a, 0x62, 0x8e, 0x8f, 0xc1, 0x28, 0x6a, 0xc1, 0xb4, 0x36, 0xcf, 0x4b, 0x5a, 0x9b, + 0xef, 0x0b, 0xb0, 0x7d, 0xf3, 0x5b, 0xc3, 0xf4, 0x37, 0x93, 0xe6, 0xf6, 0x1d, 0xef, 0xa7, 0xf8, + 0x0c, 0xfe, 0xa9, 0x02, 0x10, 0x67, 0x52, 0x05, 0x61, 0x42, 0xc3, 0xd7, 0x63, 0x21, 0x2c, 0x5e, + 0x71, 0x08, 0x1b, 0xda, 0x53, 0x53, 0x3b, 0x1a, 0x89, 0xe1, 0x0f, 0xe0, 0x8e, 0x24, 0x58, 0x84, + 0xaf, 0x02, 0x2e, 0x48, 0x44, 0x43, 0x45, 0xa2, 0x20, 0x54, 0xc2, 0xfc, 0x67, 0xb8, 0x4a, 0xf7, + 0xd0, 0x7a, 0x69, 0x79, 0x27, 0x4d, 0x25, 0xb6, 0xfe, 0xbb, 0x08, 0xd6, 0x26, 0xfa, 0x31, 0xf8, + 0x2d, 0xd8, 0xe8, 0x52, 0x21, 0x55, 0xa0, 0x5f, 0x08, 0xd3, 0x73, 0xba, 0x03, 0x44, 0xa5, 0xfc, + 0x00, 0x01, 0x8d, 0xb2, 0x85, 0x63, 0xdd, 0xc0, 0xbb, 0x63, 0xc3, 0x29, 0x78, 0xc7, 0xf1, 0x7c, + 0x23, 0x59, 0x60, 0x2e, 0x94, 0x33, 0x37, 0x2d, 0xd3, 0x89, 0x87, 0xdc, 0xef, 0xc0, 0xa6, 0x62, + 0x3c, 0x60, 0xdd, 0x89, 0x40, 0x17, 0xe7, 0x08, 0x54, 0x31, 0xfe, 0x5d, 0x77, 0x34, 0xd0, 0x1c, + 0x3c, 0xf4, 0x2d, 0x6e, 0x14, 0xe0, 0x28, 0xb2, 0x65, 0x21, 0x03, 0xac, 0x82, 0xc9, 0x09, 0xb8, + 0x1f, 0x41, 0x33, 0x9d, 0x7c, 0x30, 0xc0, 0x35, 0xa2, 0xc8, 0x2c, 0xbd, 0x6c, 0xa8, 0x67, 0x63, + 0xb3, 0x81, 0x6f, 0xc0, 0xce, 0x98, 0x5b, 0x5d, 0x92, 0x17, 0x3b, 0xbd, 0x56, 0xee, 0xf4, 0xfe, + 0x88, 0x53, 0x26, 0xd5, 0xa4, 0xcb, 0xfd, 0x65, 0xb0, 0x34, 0xf8, 0x9b, 0xb6, 0xff, 0xe7, 0x05, + 0xf0, 0x20, 0x64, 0x69, 0x79, 0x33, 0xbf, 0xbf, 0x31, 0xde, 0xcd, 0xb7, 0xb4, 0xff, 0x56, 0xe5, + 0x37, 0xbf, 0x74, 0xda, 0x98, 0x25, 0x38, 0x8b, 0xab, 0x4c, 0xc4, 0xb5, 0x98, 0x64, 0x26, 0x3a, + 0xff, 0x43, 0x92, 0x53, 0x39, 0xe3, 0xc7, 0xf4, 0x17, 0x83, 0xab, 0xbf, 0x2f, 0x2c, 0x1e, 0x34, + 0x1a, 0xff, 0x58, 0xb8, 0x77, 0x60, 0x91, 0x8d, 0x48, 0x56, 0xed, 0xa5, 0xbe, 0x3a, 0xdd, 0xad, + 0xb6, 0xfd, 0xc8, 0x7f, 0xf9, 0x31, 0x67, 0x8d, 0x48, 0x9e, 0x0d, 0xc6, 0x9c, 0x9d, 0xee, 0x9e, + 0x0d, 0xc6, 0xfc, 0x67, 0xe1, 0x81, 0x7d, 0x50, 0xaf, 0x37, 0x22, 0x59, 0xaf, 0x0f, 0x46, 0xd5, + 0xeb, 0xa7, 0xbb, 0xf5, 0xfa, 0x60, 0x5c, 0xe7, 0xba, 0x09, 0xf6, 0xf1, 0xff, 0x02, 0x00, 0x00, + 0xff, 0xff, 0x2a, 0xaa, 0x4c, 0x2a, 0x44, 0x17, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/resources/ad_group_criterion_label.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/resources/ad_group_criterion_label.pb.go new file mode 100644 index 0000000..ac9f69a --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/resources/ad_group_criterion_label.pb.go @@ -0,0 +1,116 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/resources/ad_group_criterion_label.proto + +package resources // import "google.golang.org/genproto/googleapis/ads/googleads/v1/resources" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import wrappers "github.com/golang/protobuf/ptypes/wrappers" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// A relationship between an ad group criterion and a label. +type AdGroupCriterionLabel struct { + // The resource name of the ad group criterion label. + // Ad group criterion label resource names have the form: + // + // `customers/{customer_id}/adGroupCriterionLabels/{ad_group_id}~{criterion_id}~{label_id}` + ResourceName string `protobuf:"bytes,1,opt,name=resource_name,json=resourceName,proto3" json:"resource_name,omitempty"` + // The ad group criterion to which the label is attached. + AdGroupCriterion *wrappers.StringValue `protobuf:"bytes,2,opt,name=ad_group_criterion,json=adGroupCriterion,proto3" json:"ad_group_criterion,omitempty"` + // The label assigned to the ad group criterion. + Label *wrappers.StringValue `protobuf:"bytes,3,opt,name=label,proto3" json:"label,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *AdGroupCriterionLabel) Reset() { *m = AdGroupCriterionLabel{} } +func (m *AdGroupCriterionLabel) String() string { return proto.CompactTextString(m) } +func (*AdGroupCriterionLabel) ProtoMessage() {} +func (*AdGroupCriterionLabel) Descriptor() ([]byte, []int) { + return fileDescriptor_ad_group_criterion_label_889d4a773b2ff94f, []int{0} +} +func (m *AdGroupCriterionLabel) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_AdGroupCriterionLabel.Unmarshal(m, b) +} +func (m *AdGroupCriterionLabel) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_AdGroupCriterionLabel.Marshal(b, m, deterministic) +} +func (dst *AdGroupCriterionLabel) XXX_Merge(src proto.Message) { + xxx_messageInfo_AdGroupCriterionLabel.Merge(dst, src) +} +func (m *AdGroupCriterionLabel) XXX_Size() int { + return xxx_messageInfo_AdGroupCriterionLabel.Size(m) +} +func (m *AdGroupCriterionLabel) XXX_DiscardUnknown() { + xxx_messageInfo_AdGroupCriterionLabel.DiscardUnknown(m) +} + +var xxx_messageInfo_AdGroupCriterionLabel proto.InternalMessageInfo + +func (m *AdGroupCriterionLabel) GetResourceName() string { + if m != nil { + return m.ResourceName + } + return "" +} + +func (m *AdGroupCriterionLabel) GetAdGroupCriterion() *wrappers.StringValue { + if m != nil { + return m.AdGroupCriterion + } + return nil +} + +func (m *AdGroupCriterionLabel) GetLabel() *wrappers.StringValue { + if m != nil { + return m.Label + } + return nil +} + +func init() { + proto.RegisterType((*AdGroupCriterionLabel)(nil), "google.ads.googleads.v1.resources.AdGroupCriterionLabel") +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/resources/ad_group_criterion_label.proto", fileDescriptor_ad_group_criterion_label_889d4a773b2ff94f) +} + +var fileDescriptor_ad_group_criterion_label_889d4a773b2ff94f = []byte{ + // 343 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x8c, 0x91, 0xc1, 0x4a, 0xc3, 0x30, + 0x1c, 0xc6, 0x69, 0x87, 0x82, 0x55, 0x41, 0x0a, 0xc2, 0x18, 0x43, 0x36, 0x65, 0xb0, 0x53, 0x42, + 0xe7, 0x2d, 0x5e, 0xec, 0x3c, 0x0c, 0x86, 0xc8, 0x98, 0xd0, 0x83, 0x14, 0x4a, 0xb6, 0xc4, 0x50, + 0x68, 0x93, 0x90, 0xb4, 0xf3, 0x11, 0x7c, 0x0f, 0x8f, 0x3e, 0x83, 0x4f, 0xe0, 0xa3, 0xf8, 0x14, + 0xd2, 0xa6, 0xc9, 0x41, 0x45, 0xbd, 0x7d, 0x34, 0xdf, 0xff, 0xfb, 0x7e, 0xff, 0xfe, 0x83, 0x6b, + 0x26, 0x04, 0x2b, 0x28, 0xc4, 0x44, 0x43, 0x23, 0x1b, 0xb5, 0x8b, 0xa0, 0xa2, 0x5a, 0xd4, 0x6a, + 0x4b, 0x35, 0xc4, 0x24, 0x63, 0x4a, 0xd4, 0x32, 0xdb, 0xaa, 0xbc, 0xa2, 0x2a, 0x17, 0x3c, 0x2b, + 0xf0, 0x86, 0x16, 0x40, 0x2a, 0x51, 0x89, 0x70, 0x6c, 0xc6, 0x00, 0x26, 0x1a, 0xb8, 0x04, 0xb0, + 0x8b, 0x80, 0x4b, 0x18, 0x9c, 0x75, 0x25, 0xed, 0xc0, 0xa6, 0x7e, 0x84, 0x4f, 0x0a, 0x4b, 0x49, + 0x95, 0x36, 0x11, 0x83, 0xa1, 0x85, 0x90, 0x39, 0xc4, 0x9c, 0x8b, 0x0a, 0x57, 0xb9, 0xe0, 0xdd, + 0xeb, 0xf9, 0x9b, 0x17, 0x9c, 0xc6, 0x64, 0xd1, 0x20, 0xdc, 0x58, 0x82, 0xdb, 0x06, 0x20, 0xbc, + 0x08, 0x8e, 0x6d, 0x49, 0xc6, 0x71, 0x49, 0xfb, 0xde, 0xc8, 0x9b, 0x1e, 0xac, 0x8f, 0xec, 0xc7, + 0x3b, 0x5c, 0xd2, 0x70, 0x19, 0x84, 0xdf, 0x37, 0xe8, 0xfb, 0x23, 0x6f, 0x7a, 0x38, 0x1b, 0x76, + 0xc4, 0xc0, 0x92, 0x81, 0xfb, 0x4a, 0xe5, 0x9c, 0x25, 0xb8, 0xa8, 0xe9, 0xfa, 0x04, 0x7f, 0x69, + 0x0d, 0x67, 0xc1, 0x5e, 0xbb, 0x7a, 0xbf, 0xf7, 0x8f, 0x71, 0x63, 0x9d, 0x3f, 0xfb, 0xc1, 0x64, + 0x2b, 0x4a, 0xf0, 0xe7, 0x6f, 0x9a, 0x0f, 0x7e, 0xdc, 0x72, 0xd5, 0x64, 0xaf, 0xbc, 0x87, 0x65, + 0x17, 0xc0, 0x44, 0x81, 0x39, 0x03, 0x42, 0x31, 0xc8, 0x28, 0x6f, 0x9b, 0xed, 0xe5, 0x64, 0xae, + 0x7f, 0x39, 0xe4, 0x95, 0x53, 0x2f, 0x7e, 0x6f, 0x11, 0xc7, 0xaf, 0xfe, 0x78, 0x61, 0x22, 0x63, + 0xa2, 0x81, 0x91, 0x8d, 0x4a, 0x22, 0xb0, 0xb6, 0xce, 0x77, 0xeb, 0x49, 0x63, 0xa2, 0x53, 0xe7, + 0x49, 0x93, 0x28, 0x75, 0x9e, 0x0f, 0x7f, 0x62, 0x1e, 0x10, 0x8a, 0x89, 0x46, 0xc8, 0xb9, 0x10, + 0x4a, 0x22, 0x84, 0x9c, 0x6f, 0xb3, 0xdf, 0xc2, 0x5e, 0x7e, 0x06, 0x00, 0x00, 0xff, 0xff, 0xcd, + 0x4f, 0x5d, 0xc5, 0x74, 0x02, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/resources/ad_group_criterion_simulation.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/resources/ad_group_criterion_simulation.pb.go new file mode 100644 index 0000000..fcb0615 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/resources/ad_group_criterion_simulation.pb.go @@ -0,0 +1,256 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/resources/ad_group_criterion_simulation.proto + +package resources // import "google.golang.org/genproto/googleapis/ads/googleads/v1/resources" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import wrappers "github.com/golang/protobuf/ptypes/wrappers" +import common "google.golang.org/genproto/googleapis/ads/googleads/v1/common" +import enums "google.golang.org/genproto/googleapis/ads/googleads/v1/enums" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// An ad group criterion simulation. Supported combinations of advertising +// channel type, criterion types, simulation type, and simulation modification +// method are detailed below respectively. +// +// SEARCH KEYWORDS CPC_BID UNIFORM +type AdGroupCriterionSimulation struct { + // The resource name of the ad group criterion simulation. + // Ad group criterion simulation resource names have the form: + // + // + // `customers/{customer_id}/adGroupCriterionSimulations/{ad_group_id}~{criterion_id}~{type}~{modification_method}~{start_date}~{end_date}` + ResourceName string `protobuf:"bytes,1,opt,name=resource_name,json=resourceName,proto3" json:"resource_name,omitempty"` + // AdGroup ID of the simulation. + AdGroupId *wrappers.Int64Value `protobuf:"bytes,2,opt,name=ad_group_id,json=adGroupId,proto3" json:"ad_group_id,omitempty"` + // Criterion ID of the simulation. + CriterionId *wrappers.Int64Value `protobuf:"bytes,3,opt,name=criterion_id,json=criterionId,proto3" json:"criterion_id,omitempty"` + // The field that the simulation modifies. + Type enums.SimulationTypeEnum_SimulationType `protobuf:"varint,4,opt,name=type,proto3,enum=google.ads.googleads.v1.enums.SimulationTypeEnum_SimulationType" json:"type,omitempty"` + // How the simulation modifies the field. + ModificationMethod enums.SimulationModificationMethodEnum_SimulationModificationMethod `protobuf:"varint,5,opt,name=modification_method,json=modificationMethod,proto3,enum=google.ads.googleads.v1.enums.SimulationModificationMethodEnum_SimulationModificationMethod" json:"modification_method,omitempty"` + // First day on which the simulation is based, in YYYY-MM-DD format. + StartDate *wrappers.StringValue `protobuf:"bytes,6,opt,name=start_date,json=startDate,proto3" json:"start_date,omitempty"` + // Last day on which the simulation is based, in YYYY-MM-DD format. + EndDate *wrappers.StringValue `protobuf:"bytes,7,opt,name=end_date,json=endDate,proto3" json:"end_date,omitempty"` + // List of simulation points. + // + // Types that are valid to be assigned to PointList: + // *AdGroupCriterionSimulation_CpcBidPointList + PointList isAdGroupCriterionSimulation_PointList `protobuf_oneof:"point_list"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *AdGroupCriterionSimulation) Reset() { *m = AdGroupCriterionSimulation{} } +func (m *AdGroupCriterionSimulation) String() string { return proto.CompactTextString(m) } +func (*AdGroupCriterionSimulation) ProtoMessage() {} +func (*AdGroupCriterionSimulation) Descriptor() ([]byte, []int) { + return fileDescriptor_ad_group_criterion_simulation_a6c4c667ef018570, []int{0} +} +func (m *AdGroupCriterionSimulation) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_AdGroupCriterionSimulation.Unmarshal(m, b) +} +func (m *AdGroupCriterionSimulation) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_AdGroupCriterionSimulation.Marshal(b, m, deterministic) +} +func (dst *AdGroupCriterionSimulation) XXX_Merge(src proto.Message) { + xxx_messageInfo_AdGroupCriterionSimulation.Merge(dst, src) +} +func (m *AdGroupCriterionSimulation) XXX_Size() int { + return xxx_messageInfo_AdGroupCriterionSimulation.Size(m) +} +func (m *AdGroupCriterionSimulation) XXX_DiscardUnknown() { + xxx_messageInfo_AdGroupCriterionSimulation.DiscardUnknown(m) +} + +var xxx_messageInfo_AdGroupCriterionSimulation proto.InternalMessageInfo + +func (m *AdGroupCriterionSimulation) GetResourceName() string { + if m != nil { + return m.ResourceName + } + return "" +} + +func (m *AdGroupCriterionSimulation) GetAdGroupId() *wrappers.Int64Value { + if m != nil { + return m.AdGroupId + } + return nil +} + +func (m *AdGroupCriterionSimulation) GetCriterionId() *wrappers.Int64Value { + if m != nil { + return m.CriterionId + } + return nil +} + +func (m *AdGroupCriterionSimulation) GetType() enums.SimulationTypeEnum_SimulationType { + if m != nil { + return m.Type + } + return enums.SimulationTypeEnum_UNSPECIFIED +} + +func (m *AdGroupCriterionSimulation) GetModificationMethod() enums.SimulationModificationMethodEnum_SimulationModificationMethod { + if m != nil { + return m.ModificationMethod + } + return enums.SimulationModificationMethodEnum_UNSPECIFIED +} + +func (m *AdGroupCriterionSimulation) GetStartDate() *wrappers.StringValue { + if m != nil { + return m.StartDate + } + return nil +} + +func (m *AdGroupCriterionSimulation) GetEndDate() *wrappers.StringValue { + if m != nil { + return m.EndDate + } + return nil +} + +type isAdGroupCriterionSimulation_PointList interface { + isAdGroupCriterionSimulation_PointList() +} + +type AdGroupCriterionSimulation_CpcBidPointList struct { + CpcBidPointList *common.CpcBidSimulationPointList `protobuf:"bytes,8,opt,name=cpc_bid_point_list,json=cpcBidPointList,proto3,oneof"` +} + +func (*AdGroupCriterionSimulation_CpcBidPointList) isAdGroupCriterionSimulation_PointList() {} + +func (m *AdGroupCriterionSimulation) GetPointList() isAdGroupCriterionSimulation_PointList { + if m != nil { + return m.PointList + } + return nil +} + +func (m *AdGroupCriterionSimulation) GetCpcBidPointList() *common.CpcBidSimulationPointList { + if x, ok := m.GetPointList().(*AdGroupCriterionSimulation_CpcBidPointList); ok { + return x.CpcBidPointList + } + return nil +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*AdGroupCriterionSimulation) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _AdGroupCriterionSimulation_OneofMarshaler, _AdGroupCriterionSimulation_OneofUnmarshaler, _AdGroupCriterionSimulation_OneofSizer, []interface{}{ + (*AdGroupCriterionSimulation_CpcBidPointList)(nil), + } +} + +func _AdGroupCriterionSimulation_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*AdGroupCriterionSimulation) + // point_list + switch x := m.PointList.(type) { + case *AdGroupCriterionSimulation_CpcBidPointList: + b.EncodeVarint(8<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.CpcBidPointList); err != nil { + return err + } + case nil: + default: + return fmt.Errorf("AdGroupCriterionSimulation.PointList has unexpected type %T", x) + } + return nil +} + +func _AdGroupCriterionSimulation_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*AdGroupCriterionSimulation) + switch tag { + case 8: // point_list.cpc_bid_point_list + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(common.CpcBidSimulationPointList) + err := b.DecodeMessage(msg) + m.PointList = &AdGroupCriterionSimulation_CpcBidPointList{msg} + return true, err + default: + return false, nil + } +} + +func _AdGroupCriterionSimulation_OneofSizer(msg proto.Message) (n int) { + m := msg.(*AdGroupCriterionSimulation) + // point_list + switch x := m.PointList.(type) { + case *AdGroupCriterionSimulation_CpcBidPointList: + s := proto.Size(x.CpcBidPointList) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +func init() { + proto.RegisterType((*AdGroupCriterionSimulation)(nil), "google.ads.googleads.v1.resources.AdGroupCriterionSimulation") +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/resources/ad_group_criterion_simulation.proto", fileDescriptor_ad_group_criterion_simulation_a6c4c667ef018570) +} + +var fileDescriptor_ad_group_criterion_simulation_a6c4c667ef018570 = []byte{ + // 557 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x8c, 0x54, 0xdd, 0x6e, 0xd3, 0x3e, + 0x14, 0xff, 0xa7, 0xfb, 0xf6, 0xfa, 0x07, 0x29, 0xdc, 0x44, 0x65, 0x82, 0x0e, 0x34, 0xa9, 0x57, + 0x8e, 0xba, 0x21, 0x10, 0xa9, 0x84, 0x48, 0xc7, 0x54, 0x8a, 0x18, 0xaa, 0xb2, 0xa9, 0x17, 0xa8, + 0x52, 0xe4, 0xc6, 0x5e, 0x66, 0xa9, 0xb1, 0x2d, 0xdb, 0x19, 0xda, 0x3b, 0xc0, 0x0d, 0x8f, 0xc0, + 0x25, 0x8f, 0xc2, 0xa3, 0xf0, 0x14, 0x28, 0x4e, 0xe2, 0x4e, 0x2d, 0x65, 0xbd, 0x3b, 0x39, 0xe7, + 0xf7, 0xe1, 0xf3, 0x73, 0x5d, 0x70, 0x96, 0x72, 0x9e, 0xce, 0x88, 0x8f, 0xb0, 0xf2, 0xcb, 0xb2, + 0xa8, 0x6e, 0xba, 0xbe, 0x24, 0x8a, 0xe7, 0x32, 0x21, 0xca, 0x47, 0x38, 0x4e, 0x25, 0xcf, 0x45, + 0x9c, 0x48, 0xaa, 0x89, 0xa4, 0x9c, 0xc5, 0x8a, 0x66, 0xf9, 0x0c, 0x69, 0xca, 0x19, 0x14, 0x92, + 0x6b, 0xee, 0x1e, 0x96, 0x5c, 0x88, 0xb0, 0x82, 0x56, 0x06, 0xde, 0x74, 0xa1, 0x95, 0x69, 0xf9, + 0xab, 0x9c, 0x12, 0x9e, 0x65, 0x9c, 0xf9, 0x8b, 0x9a, 0xad, 0xfe, 0x2a, 0x02, 0x61, 0x79, 0xa6, + 0xee, 0xe0, 0xe3, 0x8c, 0x63, 0x7a, 0x45, 0x93, 0xea, 0x83, 0xe8, 0x6b, 0x8e, 0x2b, 0x8d, 0x93, + 0xb5, 0x35, 0xf4, 0xad, 0x20, 0x15, 0xe9, 0x49, 0x45, 0x32, 0x5f, 0xd3, 0xfc, 0xca, 0xff, 0x22, + 0x91, 0x10, 0x44, 0xaa, 0x6a, 0x7e, 0x50, 0x8b, 0x0a, 0xea, 0x23, 0xc6, 0xb8, 0x36, 0x0a, 0xd5, + 0xf4, 0xd9, 0xf7, 0x2d, 0xd0, 0x0a, 0xf1, 0xa0, 0x48, 0xec, 0xb4, 0x0e, 0xec, 0xc2, 0xfa, 0xb8, + 0xcf, 0xc1, 0xff, 0x75, 0x26, 0x31, 0x43, 0x19, 0xf1, 0x9c, 0xb6, 0xd3, 0xd9, 0x8b, 0x9a, 0x75, + 0xf3, 0x13, 0xca, 0x88, 0xdb, 0x03, 0xfb, 0x36, 0x75, 0x8a, 0xbd, 0x46, 0xdb, 0xe9, 0xec, 0x1f, + 0x3f, 0xae, 0x92, 0x85, 0xf5, 0xb9, 0xe0, 0x90, 0xe9, 0x97, 0x2f, 0xc6, 0x68, 0x96, 0x93, 0x68, + 0x0f, 0x95, 0x96, 0x43, 0xec, 0xbe, 0x01, 0xcd, 0xf9, 0x4d, 0x51, 0xec, 0x6d, 0xdc, 0xcf, 0xde, + 0xb7, 0x84, 0x21, 0x76, 0x2f, 0xc1, 0x66, 0x11, 0x86, 0xb7, 0xd9, 0x76, 0x3a, 0x0f, 0x8e, 0xdf, + 0xc2, 0x55, 0x57, 0x6b, 0x22, 0x84, 0xf3, 0xd5, 0x2e, 0x6f, 0x05, 0x39, 0x63, 0x79, 0xb6, 0xd0, + 0x8a, 0x8c, 0x9a, 0xfb, 0xcd, 0x01, 0x8f, 0xfe, 0x72, 0x4f, 0xde, 0x96, 0x71, 0x99, 0xac, 0xed, + 0x72, 0x7e, 0x47, 0xe3, 0xdc, 0x48, 0x2c, 0x78, 0x2e, 0x03, 0x22, 0x37, 0x5b, 0xea, 0xb9, 0x3d, + 0x00, 0x94, 0x46, 0x52, 0xc7, 0x18, 0x69, 0xe2, 0x6d, 0x9b, 0x8c, 0x0e, 0x96, 0x32, 0xba, 0xd0, + 0x92, 0xb2, 0xb4, 0x8a, 0xd8, 0xe0, 0xdf, 0x21, 0x4d, 0xdc, 0x57, 0x60, 0x97, 0x30, 0x5c, 0x52, + 0x77, 0xd6, 0xa0, 0xee, 0x10, 0x86, 0x0d, 0xf1, 0x1a, 0xb8, 0x89, 0x48, 0xe2, 0x29, 0xc5, 0xb1, + 0xe0, 0x94, 0xe9, 0x78, 0x46, 0x95, 0xf6, 0x76, 0x8d, 0xc4, 0xeb, 0x95, 0x19, 0x94, 0x2f, 0x04, + 0x9e, 0x8a, 0xa4, 0x4f, 0xf1, 0x7c, 0xd3, 0x51, 0xa1, 0xf0, 0x91, 0x2a, 0xfd, 0xfe, 0xbf, 0xe8, + 0x61, 0x62, 0x86, 0xb6, 0xd5, 0x6f, 0x02, 0x30, 0x77, 0xe8, 0x7f, 0x6d, 0x80, 0xa3, 0x84, 0x67, + 0xf0, 0xde, 0x67, 0xda, 0x7f, 0xba, 0xfa, 0xb7, 0x3b, 0x2a, 0x56, 0x1b, 0x39, 0x9f, 0x3f, 0x54, + 0x2a, 0x29, 0x9f, 0x21, 0x96, 0x42, 0x2e, 0x53, 0x3f, 0x25, 0xcc, 0x2c, 0x5e, 0x3f, 0x32, 0x41, + 0xd5, 0x3f, 0xfe, 0x52, 0x7a, 0xb6, 0xfa, 0xd1, 0xd8, 0x18, 0x84, 0xe1, 0xcf, 0xc6, 0xe1, 0xa0, + 0x94, 0x0c, 0xb1, 0x82, 0x65, 0x59, 0x54, 0xe3, 0x2e, 0x8c, 0x6a, 0xe4, 0xaf, 0x1a, 0x33, 0x09, + 0xb1, 0x9a, 0x58, 0xcc, 0x64, 0xdc, 0x9d, 0x58, 0xcc, 0xef, 0xc6, 0x51, 0x39, 0x08, 0x82, 0x10, + 0xab, 0x20, 0xb0, 0xa8, 0x20, 0x18, 0x77, 0x83, 0xc0, 0xe2, 0xa6, 0xdb, 0xe6, 0xb0, 0x27, 0x7f, + 0x02, 0x00, 0x00, 0xff, 0xff, 0x26, 0x19, 0xf6, 0x17, 0xfe, 0x04, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/resources/ad_group_extension_setting.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/resources/ad_group_extension_setting.pb.go new file mode 100644 index 0000000..cb414ba --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/resources/ad_group_extension_setting.pb.go @@ -0,0 +1,150 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/resources/ad_group_extension_setting.proto + +package resources // import "google.golang.org/genproto/googleapis/ads/googleads/v1/resources" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import wrappers "github.com/golang/protobuf/ptypes/wrappers" +import enums "google.golang.org/genproto/googleapis/ads/googleads/v1/enums" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// An ad group extension setting. +type AdGroupExtensionSetting struct { + // The resource name of the ad group extension setting. + // AdGroupExtensionSetting resource names have the form: + // + // + // `customers/{customer_id}/adGroupExtensionSettings/{ad_group_id}~{extension_type}` + ResourceName string `protobuf:"bytes,1,opt,name=resource_name,json=resourceName,proto3" json:"resource_name,omitempty"` + // The extension type of the ad group extension setting. + ExtensionType enums.ExtensionTypeEnum_ExtensionType `protobuf:"varint,2,opt,name=extension_type,json=extensionType,proto3,enum=google.ads.googleads.v1.enums.ExtensionTypeEnum_ExtensionType" json:"extension_type,omitempty"` + // The resource name of the ad group. The linked extension feed items will + // serve under this ad group. + // AdGroup resource names have the form: + // + // `customers/{customer_id}/adGroups/{ad_group_id}` + AdGroup *wrappers.StringValue `protobuf:"bytes,3,opt,name=ad_group,json=adGroup,proto3" json:"ad_group,omitempty"` + // The resource names of the extension feed items to serve under the ad group. + // ExtensionFeedItem resource names have the form: + // + // `customers/{customer_id}/extensionFeedItems/{feed_item_id}` + ExtensionFeedItems []*wrappers.StringValue `protobuf:"bytes,4,rep,name=extension_feed_items,json=extensionFeedItems,proto3" json:"extension_feed_items,omitempty"` + // The device for which the extensions will serve. Optional. + Device enums.ExtensionSettingDeviceEnum_ExtensionSettingDevice `protobuf:"varint,5,opt,name=device,proto3,enum=google.ads.googleads.v1.enums.ExtensionSettingDeviceEnum_ExtensionSettingDevice" json:"device,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *AdGroupExtensionSetting) Reset() { *m = AdGroupExtensionSetting{} } +func (m *AdGroupExtensionSetting) String() string { return proto.CompactTextString(m) } +func (*AdGroupExtensionSetting) ProtoMessage() {} +func (*AdGroupExtensionSetting) Descriptor() ([]byte, []int) { + return fileDescriptor_ad_group_extension_setting_7412d90d5487962e, []int{0} +} +func (m *AdGroupExtensionSetting) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_AdGroupExtensionSetting.Unmarshal(m, b) +} +func (m *AdGroupExtensionSetting) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_AdGroupExtensionSetting.Marshal(b, m, deterministic) +} +func (dst *AdGroupExtensionSetting) XXX_Merge(src proto.Message) { + xxx_messageInfo_AdGroupExtensionSetting.Merge(dst, src) +} +func (m *AdGroupExtensionSetting) XXX_Size() int { + return xxx_messageInfo_AdGroupExtensionSetting.Size(m) +} +func (m *AdGroupExtensionSetting) XXX_DiscardUnknown() { + xxx_messageInfo_AdGroupExtensionSetting.DiscardUnknown(m) +} + +var xxx_messageInfo_AdGroupExtensionSetting proto.InternalMessageInfo + +func (m *AdGroupExtensionSetting) GetResourceName() string { + if m != nil { + return m.ResourceName + } + return "" +} + +func (m *AdGroupExtensionSetting) GetExtensionType() enums.ExtensionTypeEnum_ExtensionType { + if m != nil { + return m.ExtensionType + } + return enums.ExtensionTypeEnum_UNSPECIFIED +} + +func (m *AdGroupExtensionSetting) GetAdGroup() *wrappers.StringValue { + if m != nil { + return m.AdGroup + } + return nil +} + +func (m *AdGroupExtensionSetting) GetExtensionFeedItems() []*wrappers.StringValue { + if m != nil { + return m.ExtensionFeedItems + } + return nil +} + +func (m *AdGroupExtensionSetting) GetDevice() enums.ExtensionSettingDeviceEnum_ExtensionSettingDevice { + if m != nil { + return m.Device + } + return enums.ExtensionSettingDeviceEnum_UNSPECIFIED +} + +func init() { + proto.RegisterType((*AdGroupExtensionSetting)(nil), "google.ads.googleads.v1.resources.AdGroupExtensionSetting") +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/resources/ad_group_extension_setting.proto", fileDescriptor_ad_group_extension_setting_7412d90d5487962e) +} + +var fileDescriptor_ad_group_extension_setting_7412d90d5487962e = []byte{ + // 449 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x8c, 0x53, 0xd1, 0x8a, 0xd4, 0x30, + 0x14, 0xa5, 0x1d, 0x5d, 0xb5, 0xba, 0xfb, 0x50, 0x04, 0xcb, 0x32, 0xc8, 0xac, 0xb2, 0x30, 0x4f, + 0x29, 0x1d, 0x1f, 0x84, 0x2a, 0x42, 0x07, 0xd7, 0x41, 0x1f, 0x96, 0xa1, 0x2b, 0xf3, 0x20, 0x03, + 0x25, 0x3b, 0xb9, 0x1b, 0x03, 0xd3, 0x24, 0x24, 0xe9, 0xe8, 0x7e, 0x82, 0x7f, 0xe0, 0xb3, 0x8f, + 0x7e, 0x8a, 0x9f, 0xe2, 0x57, 0x48, 0x9b, 0x26, 0x3a, 0xc8, 0xec, 0xce, 0xdb, 0x69, 0xee, 0x39, + 0xe7, 0xde, 0x93, 0x9b, 0x46, 0x53, 0x2a, 0x04, 0x5d, 0x43, 0x8a, 0x89, 0x4e, 0x2d, 0x6c, 0xd1, + 0x26, 0x4b, 0x15, 0x68, 0xd1, 0xa8, 0x15, 0xe8, 0x14, 0x93, 0x8a, 0x2a, 0xd1, 0xc8, 0x0a, 0xbe, + 0x1a, 0xe0, 0x9a, 0x09, 0x5e, 0x69, 0x30, 0x86, 0x71, 0x8a, 0xa4, 0x12, 0x46, 0xc4, 0x27, 0x56, + 0x88, 0x30, 0xd1, 0xc8, 0x7b, 0xa0, 0x4d, 0x86, 0xbc, 0xc7, 0xf1, 0xeb, 0x5d, 0x6d, 0x80, 0x37, + 0xb5, 0x4e, 0xff, 0x73, 0xae, 0x08, 0x6c, 0xd8, 0x0a, 0x6c, 0x83, 0xe3, 0xc9, 0xbe, 0x6a, 0x73, + 0x2d, 0x9d, 0xe6, 0x69, 0xaf, 0xe9, 0xbe, 0x2e, 0x9b, 0xab, 0xf4, 0x8b, 0xc2, 0x52, 0x82, 0xd2, + 0x7d, 0x7d, 0xe8, 0x3c, 0x25, 0x4b, 0x31, 0xe7, 0xc2, 0x60, 0xc3, 0x04, 0xef, 0xab, 0xcf, 0xbe, + 0x0f, 0xa2, 0x27, 0x05, 0x99, 0xb5, 0xb1, 0xcf, 0x9c, 0xfb, 0x85, 0x1d, 0x2d, 0x7e, 0x1e, 0x1d, + 0xba, 0x60, 0x15, 0xc7, 0x35, 0x24, 0xc1, 0x28, 0x18, 0x3f, 0x28, 0x1f, 0xb9, 0xc3, 0x73, 0x5c, + 0x43, 0x0c, 0xd1, 0xd1, 0xf6, 0x58, 0x49, 0x38, 0x0a, 0xc6, 0x47, 0x93, 0x37, 0x68, 0xd7, 0x65, + 0x75, 0x59, 0x90, 0xef, 0xf6, 0xf1, 0x5a, 0xc2, 0x19, 0x6f, 0xea, 0xed, 0x93, 0xf2, 0x10, 0xfe, + 0xfd, 0x8c, 0x5f, 0x46, 0xf7, 0xdd, 0x7a, 0x92, 0xc1, 0x28, 0x18, 0x3f, 0x9c, 0x0c, 0x5d, 0x03, + 0x17, 0x1c, 0x5d, 0x18, 0xc5, 0x38, 0x5d, 0xe0, 0x75, 0x03, 0xe5, 0x3d, 0x6c, 0x43, 0xc5, 0xe7, + 0xd1, 0xe3, 0xbf, 0xf3, 0x5d, 0x01, 0x90, 0x8a, 0x19, 0xa8, 0x75, 0x72, 0x67, 0x34, 0xb8, 0xd5, + 0x24, 0xf6, 0xca, 0x77, 0x00, 0xe4, 0x7d, 0xab, 0x8b, 0x3f, 0x47, 0x07, 0x76, 0x65, 0xc9, 0xdd, + 0x2e, 0xe7, 0x7c, 0xdf, 0x9c, 0xfd, 0xad, 0xbe, 0xed, 0xc4, 0xdb, 0x81, 0xb7, 0x4a, 0x65, 0xef, + 0x3f, 0xfd, 0x16, 0x46, 0xa7, 0x2b, 0x51, 0xa3, 0x5b, 0x1f, 0xdd, 0x74, 0xb8, 0x63, 0x83, 0xf3, + 0x36, 0xd4, 0x3c, 0xf8, 0xf4, 0xa1, 0xb7, 0xa0, 0x62, 0x8d, 0x39, 0x45, 0x42, 0xd1, 0x94, 0x02, + 0xef, 0x22, 0xbb, 0x67, 0x26, 0x99, 0xbe, 0xe1, 0xd7, 0x78, 0xe5, 0xd1, 0x8f, 0x70, 0x30, 0x2b, + 0x8a, 0x9f, 0xe1, 0xc9, 0xcc, 0x5a, 0x16, 0x44, 0x23, 0x0b, 0x5b, 0xb4, 0xc8, 0x50, 0xe9, 0x98, + 0xbf, 0x1c, 0x67, 0x59, 0x10, 0xbd, 0xf4, 0x9c, 0xe5, 0x22, 0x5b, 0x7a, 0xce, 0xef, 0xf0, 0xd4, + 0x16, 0xf2, 0xbc, 0x20, 0x3a, 0xcf, 0x3d, 0x2b, 0xcf, 0x17, 0x59, 0x9e, 0x7b, 0xde, 0xe5, 0x41, + 0x37, 0xec, 0x8b, 0x3f, 0x01, 0x00, 0x00, 0xff, 0xff, 0x3a, 0xcb, 0x94, 0xc7, 0xc6, 0x03, 0x00, + 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/resources/ad_group_feed.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/resources/ad_group_feed.pb.go new file mode 100644 index 0000000..96e0820 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/resources/ad_group_feed.pb.go @@ -0,0 +1,158 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/resources/ad_group_feed.proto + +package resources // import "google.golang.org/genproto/googleapis/ads/googleads/v1/resources" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import wrappers "github.com/golang/protobuf/ptypes/wrappers" +import common "google.golang.org/genproto/googleapis/ads/googleads/v1/common" +import enums "google.golang.org/genproto/googleapis/ads/googleads/v1/enums" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// An ad group feed. +type AdGroupFeed struct { + // The resource name of the ad group feed. + // Ad group feed resource names have the form: + // + // `customers/{customer_id}/adGroupFeeds/{ad_group_id}~{feed_id} + ResourceName string `protobuf:"bytes,1,opt,name=resource_name,json=resourceName,proto3" json:"resource_name,omitempty"` + // The feed being linked to the ad group. + Feed *wrappers.StringValue `protobuf:"bytes,2,opt,name=feed,proto3" json:"feed,omitempty"` + // The ad group being linked to the feed. + AdGroup *wrappers.StringValue `protobuf:"bytes,3,opt,name=ad_group,json=adGroup,proto3" json:"ad_group,omitempty"` + // Indicates which placeholder types the feed may populate under the connected + // ad group. Required. + PlaceholderTypes []enums.PlaceholderTypeEnum_PlaceholderType `protobuf:"varint,4,rep,packed,name=placeholder_types,json=placeholderTypes,proto3,enum=google.ads.googleads.v1.enums.PlaceholderTypeEnum_PlaceholderType" json:"placeholder_types,omitempty"` + // Matching function associated with the AdGroupFeed. + // The matching function is used to filter the set of feed items selected. + // Required. + MatchingFunction *common.MatchingFunction `protobuf:"bytes,5,opt,name=matching_function,json=matchingFunction,proto3" json:"matching_function,omitempty"` + // Status of the ad group feed. + // This field is read-only. + Status enums.FeedLinkStatusEnum_FeedLinkStatus `protobuf:"varint,6,opt,name=status,proto3,enum=google.ads.googleads.v1.enums.FeedLinkStatusEnum_FeedLinkStatus" json:"status,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *AdGroupFeed) Reset() { *m = AdGroupFeed{} } +func (m *AdGroupFeed) String() string { return proto.CompactTextString(m) } +func (*AdGroupFeed) ProtoMessage() {} +func (*AdGroupFeed) Descriptor() ([]byte, []int) { + return fileDescriptor_ad_group_feed_182dd6d2597b72d9, []int{0} +} +func (m *AdGroupFeed) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_AdGroupFeed.Unmarshal(m, b) +} +func (m *AdGroupFeed) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_AdGroupFeed.Marshal(b, m, deterministic) +} +func (dst *AdGroupFeed) XXX_Merge(src proto.Message) { + xxx_messageInfo_AdGroupFeed.Merge(dst, src) +} +func (m *AdGroupFeed) XXX_Size() int { + return xxx_messageInfo_AdGroupFeed.Size(m) +} +func (m *AdGroupFeed) XXX_DiscardUnknown() { + xxx_messageInfo_AdGroupFeed.DiscardUnknown(m) +} + +var xxx_messageInfo_AdGroupFeed proto.InternalMessageInfo + +func (m *AdGroupFeed) GetResourceName() string { + if m != nil { + return m.ResourceName + } + return "" +} + +func (m *AdGroupFeed) GetFeed() *wrappers.StringValue { + if m != nil { + return m.Feed + } + return nil +} + +func (m *AdGroupFeed) GetAdGroup() *wrappers.StringValue { + if m != nil { + return m.AdGroup + } + return nil +} + +func (m *AdGroupFeed) GetPlaceholderTypes() []enums.PlaceholderTypeEnum_PlaceholderType { + if m != nil { + return m.PlaceholderTypes + } + return nil +} + +func (m *AdGroupFeed) GetMatchingFunction() *common.MatchingFunction { + if m != nil { + return m.MatchingFunction + } + return nil +} + +func (m *AdGroupFeed) GetStatus() enums.FeedLinkStatusEnum_FeedLinkStatus { + if m != nil { + return m.Status + } + return enums.FeedLinkStatusEnum_UNSPECIFIED +} + +func init() { + proto.RegisterType((*AdGroupFeed)(nil), "google.ads.googleads.v1.resources.AdGroupFeed") +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/resources/ad_group_feed.proto", fileDescriptor_ad_group_feed_182dd6d2597b72d9) +} + +var fileDescriptor_ad_group_feed_182dd6d2597b72d9 = []byte{ + // 489 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x8c, 0x93, 0xdd, 0x6a, 0xd4, 0x40, + 0x14, 0xc7, 0xc9, 0x6e, 0x5d, 0x75, 0xaa, 0x65, 0x9b, 0xab, 0x50, 0x8a, 0x6c, 0x95, 0xc2, 0x5e, + 0xcd, 0x34, 0xeb, 0x17, 0xc4, 0x1b, 0xb3, 0x60, 0x17, 0x44, 0x65, 0x49, 0x65, 0x11, 0x59, 0x09, + 0xd3, 0xcc, 0xec, 0x34, 0x34, 0xf3, 0xc1, 0x4c, 0x52, 0xe9, 0xeb, 0x78, 0xe9, 0x8b, 0x08, 0x3e, + 0x8a, 0xef, 0x20, 0x48, 0x32, 0x99, 0x68, 0x57, 0xb6, 0xed, 0xdd, 0xd9, 0x33, 0xff, 0xdf, 0xd9, + 0x73, 0xce, 0xff, 0x04, 0x3c, 0x67, 0x52, 0xb2, 0x82, 0x22, 0x4c, 0x0c, 0xb2, 0x61, 0x1d, 0x5d, + 0x84, 0x48, 0x53, 0x23, 0x2b, 0x9d, 0x51, 0x83, 0x30, 0x49, 0x99, 0x96, 0x95, 0x4a, 0x57, 0x94, + 0x12, 0xa8, 0xb4, 0x2c, 0xa5, 0x7f, 0x60, 0xb5, 0x10, 0x13, 0x03, 0x3b, 0x0c, 0x5e, 0x84, 0xb0, + 0xc3, 0xf6, 0x5e, 0x6c, 0xaa, 0x9c, 0x49, 0xce, 0xa5, 0x40, 0x1c, 0x97, 0xd9, 0x59, 0x2e, 0x58, + 0xba, 0xaa, 0x44, 0x56, 0xe6, 0x52, 0xd8, 0xd2, 0x7b, 0xcf, 0x36, 0x71, 0x54, 0x54, 0xdc, 0xa0, + 0xba, 0x89, 0xb4, 0xc8, 0xc5, 0x79, 0x6a, 0x4a, 0x5c, 0x56, 0xe6, 0x76, 0x94, 0x2a, 0x70, 0x46, + 0xcf, 0x64, 0x41, 0xa8, 0x4e, 0xcb, 0x4b, 0x45, 0x5b, 0xea, 0x51, 0x4b, 0x35, 0xbf, 0x4e, 0xab, + 0x15, 0xfa, 0xaa, 0xb1, 0x52, 0x54, 0xbb, 0xaa, 0xfb, 0xae, 0xaa, 0xca, 0x11, 0x16, 0x42, 0x96, + 0xb8, 0x6e, 0xb4, 0x7d, 0x7d, 0xfc, 0xa3, 0x0f, 0xb6, 0x63, 0x32, 0xab, 0x77, 0x73, 0x4c, 0x29, + 0xf1, 0x9f, 0x80, 0x87, 0x6e, 0xfc, 0x54, 0x60, 0x4e, 0x03, 0x6f, 0xe4, 0x8d, 0xef, 0x27, 0x0f, + 0x5c, 0xf2, 0x03, 0xe6, 0xd4, 0x3f, 0x02, 0x5b, 0xf5, 0x08, 0x41, 0x6f, 0xe4, 0x8d, 0xb7, 0x27, + 0xfb, 0xed, 0xf6, 0xa0, 0xeb, 0x00, 0x9e, 0x94, 0x3a, 0x17, 0x6c, 0x81, 0x8b, 0x8a, 0x26, 0x8d, + 0xd2, 0x7f, 0x09, 0xee, 0x39, 0x0b, 0x82, 0xfe, 0x2d, 0xa8, 0xbb, 0xd8, 0xf6, 0xe4, 0x4b, 0xb0, + 0xbb, 0x3e, 0xb7, 0x09, 0xb6, 0x46, 0xfd, 0xf1, 0xce, 0x64, 0x0a, 0x37, 0x19, 0xd8, 0xec, 0x0b, + 0xce, 0xff, 0x72, 0x1f, 0x2f, 0x15, 0x7d, 0x23, 0x2a, 0xbe, 0x9e, 0x4b, 0x86, 0xea, 0x6a, 0xc2, + 0xf8, 0x5f, 0xc0, 0xee, 0x7f, 0xae, 0x06, 0x77, 0x9a, 0x96, 0x8f, 0x36, 0xfe, 0xa1, 0x3d, 0x07, + 0xf8, 0xbe, 0x05, 0x8f, 0x5b, 0x2e, 0x19, 0xf2, 0xb5, 0x8c, 0xff, 0x09, 0x0c, 0xac, 0xe7, 0xc1, + 0x60, 0xe4, 0x8d, 0x77, 0x26, 0xaf, 0x6f, 0x18, 0xa2, 0x36, 0xe5, 0x5d, 0x2e, 0xce, 0x4f, 0x1a, + 0xa8, 0x99, 0xe1, 0x6a, 0x2a, 0x69, 0xeb, 0x4d, 0x7f, 0x7b, 0xe0, 0x30, 0x93, 0x1c, 0xde, 0x78, + 0xd5, 0xd3, 0xe1, 0x3f, 0x86, 0xcf, 0xeb, 0xed, 0xcf, 0xbd, 0xcf, 0x6f, 0x5b, 0x8c, 0xc9, 0x02, + 0x0b, 0x06, 0xa5, 0x66, 0x88, 0x51, 0xd1, 0x78, 0xe3, 0x6e, 0x51, 0xe5, 0xe6, 0x9a, 0x4f, 0xec, + 0x55, 0x17, 0x7d, 0xeb, 0xf5, 0x67, 0x71, 0xfc, 0xbd, 0x77, 0x30, 0xb3, 0x25, 0x63, 0x62, 0xa0, + 0x0d, 0xeb, 0x68, 0x11, 0xc2, 0xc4, 0x29, 0x7f, 0x3a, 0xcd, 0x32, 0x26, 0x66, 0xd9, 0x69, 0x96, + 0x8b, 0x70, 0xd9, 0x69, 0x7e, 0xf5, 0x0e, 0xed, 0x43, 0x14, 0xc5, 0xc4, 0x44, 0x51, 0xa7, 0x8a, + 0xa2, 0x45, 0x18, 0x45, 0x9d, 0xee, 0x74, 0xd0, 0x34, 0xfb, 0xf4, 0x4f, 0x00, 0x00, 0x00, 0xff, + 0xff, 0xec, 0x0c, 0x2f, 0x34, 0x0e, 0x04, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/resources/ad_group_label.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/resources/ad_group_label.pb.go new file mode 100644 index 0000000..af68ab5 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/resources/ad_group_label.pb.go @@ -0,0 +1,114 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/resources/ad_group_label.proto + +package resources // import "google.golang.org/genproto/googleapis/ads/googleads/v1/resources" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import wrappers "github.com/golang/protobuf/ptypes/wrappers" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// A relationship between an ad group and a label. +type AdGroupLabel struct { + // The resource name of the ad group label. + // Ad group label resource names have the form: + // `customers/{customer_id}/adGroupLabels/{ad_group_id}~{label_id}` + ResourceName string `protobuf:"bytes,1,opt,name=resource_name,json=resourceName,proto3" json:"resource_name,omitempty"` + // The ad group to which the label is attached. + AdGroup *wrappers.StringValue `protobuf:"bytes,2,opt,name=ad_group,json=adGroup,proto3" json:"ad_group,omitempty"` + // The label assigned to the ad group. + Label *wrappers.StringValue `protobuf:"bytes,3,opt,name=label,proto3" json:"label,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *AdGroupLabel) Reset() { *m = AdGroupLabel{} } +func (m *AdGroupLabel) String() string { return proto.CompactTextString(m) } +func (*AdGroupLabel) ProtoMessage() {} +func (*AdGroupLabel) Descriptor() ([]byte, []int) { + return fileDescriptor_ad_group_label_0fa746bd05e2b50f, []int{0} +} +func (m *AdGroupLabel) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_AdGroupLabel.Unmarshal(m, b) +} +func (m *AdGroupLabel) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_AdGroupLabel.Marshal(b, m, deterministic) +} +func (dst *AdGroupLabel) XXX_Merge(src proto.Message) { + xxx_messageInfo_AdGroupLabel.Merge(dst, src) +} +func (m *AdGroupLabel) XXX_Size() int { + return xxx_messageInfo_AdGroupLabel.Size(m) +} +func (m *AdGroupLabel) XXX_DiscardUnknown() { + xxx_messageInfo_AdGroupLabel.DiscardUnknown(m) +} + +var xxx_messageInfo_AdGroupLabel proto.InternalMessageInfo + +func (m *AdGroupLabel) GetResourceName() string { + if m != nil { + return m.ResourceName + } + return "" +} + +func (m *AdGroupLabel) GetAdGroup() *wrappers.StringValue { + if m != nil { + return m.AdGroup + } + return nil +} + +func (m *AdGroupLabel) GetLabel() *wrappers.StringValue { + if m != nil { + return m.Label + } + return nil +} + +func init() { + proto.RegisterType((*AdGroupLabel)(nil), "google.ads.googleads.v1.resources.AdGroupLabel") +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/resources/ad_group_label.proto", fileDescriptor_ad_group_label_0fa746bd05e2b50f) +} + +var fileDescriptor_ad_group_label_0fa746bd05e2b50f = []byte{ + // 328 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x8c, 0x91, 0x4f, 0x4b, 0xf3, 0x30, + 0x1c, 0xc7, 0x69, 0xc7, 0xf3, 0xa8, 0x75, 0x1e, 0xec, 0x69, 0x8c, 0x21, 0x9b, 0x32, 0xd8, 0x29, + 0xa1, 0x13, 0x14, 0xe2, 0xa9, 0xbb, 0x0c, 0x44, 0x64, 0x4c, 0xe8, 0x41, 0x0a, 0xe3, 0xb7, 0x25, + 0x86, 0x42, 0x9b, 0x94, 0xa4, 0x9d, 0x6f, 0x47, 0x3c, 0xfa, 0x52, 0x7c, 0x29, 0xbe, 0x08, 0x91, + 0x36, 0x4d, 0xf0, 0xa4, 0xde, 0xbe, 0x34, 0x9f, 0xef, 0x9f, 0x34, 0xc1, 0x15, 0x97, 0x92, 0xe7, + 0x0c, 0x03, 0xd5, 0xd8, 0xc8, 0x46, 0xed, 0x23, 0xac, 0x98, 0x96, 0xb5, 0xda, 0x31, 0x8d, 0x81, + 0x6e, 0xb8, 0x92, 0x75, 0xb9, 0xc9, 0x61, 0xcb, 0x72, 0x54, 0x2a, 0x59, 0xc9, 0x70, 0x62, 0x60, + 0x04, 0x54, 0x23, 0xe7, 0x43, 0xfb, 0x08, 0x39, 0xdf, 0xf0, 0xac, 0x8b, 0x6e, 0x0d, 0xdb, 0xfa, + 0x09, 0x3f, 0x2b, 0x28, 0x4b, 0xa6, 0xb4, 0x89, 0x18, 0x8e, 0x6c, 0x75, 0x99, 0x61, 0x10, 0x42, + 0x56, 0x50, 0x65, 0x52, 0x74, 0xa7, 0xe7, 0x2f, 0x5e, 0xd0, 0x8f, 0xe9, 0xb2, 0x29, 0xbe, 0x6b, + 0x7a, 0xc3, 0x8b, 0xe0, 0xc4, 0x66, 0x6f, 0x04, 0x14, 0x6c, 0xe0, 0x8d, 0xbd, 0xd9, 0xd1, 0xba, + 0x6f, 0x3f, 0xde, 0x43, 0xc1, 0xc2, 0xeb, 0xe0, 0xd0, 0xce, 0x1d, 0xf8, 0x63, 0x6f, 0x76, 0x3c, + 0x1f, 0x75, 0xf3, 0x90, 0x9d, 0x81, 0x1e, 0x2a, 0x95, 0x09, 0x9e, 0x40, 0x5e, 0xb3, 0xf5, 0x01, + 0x98, 0x8a, 0x70, 0x1e, 0xfc, 0x6b, 0xaf, 0x37, 0xe8, 0xfd, 0xc1, 0x65, 0xd0, 0xc5, 0xa7, 0x17, + 0x4c, 0x77, 0xb2, 0x40, 0xbf, 0xfe, 0x8a, 0xc5, 0xe9, 0xf7, 0x9b, 0xac, 0x9a, 0xc8, 0x95, 0xf7, + 0x78, 0xdb, 0xf9, 0xb8, 0xcc, 0x41, 0x70, 0x24, 0x15, 0xc7, 0x9c, 0x89, 0xb6, 0xd0, 0x3e, 0x45, + 0x99, 0xe9, 0x1f, 0x5e, 0xe6, 0xc6, 0xa9, 0x57, 0xbf, 0xb7, 0x8c, 0xe3, 0x37, 0x7f, 0xb2, 0x34, + 0x91, 0x31, 0xd5, 0xc8, 0xc8, 0x46, 0x25, 0x11, 0x5a, 0x5b, 0xf2, 0xdd, 0x32, 0x69, 0x4c, 0x75, + 0xea, 0x98, 0x34, 0x89, 0x52, 0xc7, 0x7c, 0xf8, 0x53, 0x73, 0x40, 0x48, 0x4c, 0x35, 0x21, 0x8e, + 0x22, 0x24, 0x89, 0x08, 0x71, 0xdc, 0xf6, 0x7f, 0x3b, 0xf6, 0xf2, 0x2b, 0x00, 0x00, 0xff, 0xff, + 0x20, 0xb4, 0xcf, 0x4d, 0x45, 0x02, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/resources/ad_group_simulation.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/resources/ad_group_simulation.pb.go new file mode 100644 index 0000000..d806d58 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/resources/ad_group_simulation.pb.go @@ -0,0 +1,286 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/resources/ad_group_simulation.proto + +package resources // import "google.golang.org/genproto/googleapis/ads/googleads/v1/resources" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import wrappers "github.com/golang/protobuf/ptypes/wrappers" +import common "google.golang.org/genproto/googleapis/ads/googleads/v1/common" +import enums "google.golang.org/genproto/googleapis/ads/googleads/v1/enums" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// An ad group simulation. Supported combinations of advertising +// channel type, simulation type and simulation modification method is +// detailed below respectively. +// +// SEARCH CPC_BID DEFAULT +// SEARCH CPC_BID UNIFORM +// SEARCH TARGET_CPA UNIFORM +// DISPLAY CPC_BID DEFAULT +// DISPLAY CPC_BID UNIFORM +// DISPLAY TARGET_CPA UNIFORM +type AdGroupSimulation struct { + // The resource name of the ad group simulation. + // Ad group simulation resource names have the form: + // + // + // `customers/{customer_id}/adGroupSimulations/{ad_group_id}~{type}~{modification_method}~{start_date}~{end_date}` + ResourceName string `protobuf:"bytes,1,opt,name=resource_name,json=resourceName,proto3" json:"resource_name,omitempty"` + // Ad group id of the simulation. + AdGroupId *wrappers.Int64Value `protobuf:"bytes,2,opt,name=ad_group_id,json=adGroupId,proto3" json:"ad_group_id,omitempty"` + // The field that the simulation modifies. + Type enums.SimulationTypeEnum_SimulationType `protobuf:"varint,3,opt,name=type,proto3,enum=google.ads.googleads.v1.enums.SimulationTypeEnum_SimulationType" json:"type,omitempty"` + // How the simulation modifies the field. + ModificationMethod enums.SimulationModificationMethodEnum_SimulationModificationMethod `protobuf:"varint,4,opt,name=modification_method,json=modificationMethod,proto3,enum=google.ads.googleads.v1.enums.SimulationModificationMethodEnum_SimulationModificationMethod" json:"modification_method,omitempty"` + // First day on which the simulation is based, in YYYY-MM-DD format. + StartDate *wrappers.StringValue `protobuf:"bytes,5,opt,name=start_date,json=startDate,proto3" json:"start_date,omitempty"` + // Last day on which the simulation is based, in YYYY-MM-DD format + EndDate *wrappers.StringValue `protobuf:"bytes,6,opt,name=end_date,json=endDate,proto3" json:"end_date,omitempty"` + // List of simulation points. + // + // Types that are valid to be assigned to PointList: + // *AdGroupSimulation_CpcBidPointList + // *AdGroupSimulation_TargetCpaPointList + PointList isAdGroupSimulation_PointList `protobuf_oneof:"point_list"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *AdGroupSimulation) Reset() { *m = AdGroupSimulation{} } +func (m *AdGroupSimulation) String() string { return proto.CompactTextString(m) } +func (*AdGroupSimulation) ProtoMessage() {} +func (*AdGroupSimulation) Descriptor() ([]byte, []int) { + return fileDescriptor_ad_group_simulation_a578bba85f881904, []int{0} +} +func (m *AdGroupSimulation) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_AdGroupSimulation.Unmarshal(m, b) +} +func (m *AdGroupSimulation) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_AdGroupSimulation.Marshal(b, m, deterministic) +} +func (dst *AdGroupSimulation) XXX_Merge(src proto.Message) { + xxx_messageInfo_AdGroupSimulation.Merge(dst, src) +} +func (m *AdGroupSimulation) XXX_Size() int { + return xxx_messageInfo_AdGroupSimulation.Size(m) +} +func (m *AdGroupSimulation) XXX_DiscardUnknown() { + xxx_messageInfo_AdGroupSimulation.DiscardUnknown(m) +} + +var xxx_messageInfo_AdGroupSimulation proto.InternalMessageInfo + +func (m *AdGroupSimulation) GetResourceName() string { + if m != nil { + return m.ResourceName + } + return "" +} + +func (m *AdGroupSimulation) GetAdGroupId() *wrappers.Int64Value { + if m != nil { + return m.AdGroupId + } + return nil +} + +func (m *AdGroupSimulation) GetType() enums.SimulationTypeEnum_SimulationType { + if m != nil { + return m.Type + } + return enums.SimulationTypeEnum_UNSPECIFIED +} + +func (m *AdGroupSimulation) GetModificationMethod() enums.SimulationModificationMethodEnum_SimulationModificationMethod { + if m != nil { + return m.ModificationMethod + } + return enums.SimulationModificationMethodEnum_UNSPECIFIED +} + +func (m *AdGroupSimulation) GetStartDate() *wrappers.StringValue { + if m != nil { + return m.StartDate + } + return nil +} + +func (m *AdGroupSimulation) GetEndDate() *wrappers.StringValue { + if m != nil { + return m.EndDate + } + return nil +} + +type isAdGroupSimulation_PointList interface { + isAdGroupSimulation_PointList() +} + +type AdGroupSimulation_CpcBidPointList struct { + CpcBidPointList *common.CpcBidSimulationPointList `protobuf:"bytes,8,opt,name=cpc_bid_point_list,json=cpcBidPointList,proto3,oneof"` +} + +type AdGroupSimulation_TargetCpaPointList struct { + TargetCpaPointList *common.TargetCpaSimulationPointList `protobuf:"bytes,9,opt,name=target_cpa_point_list,json=targetCpaPointList,proto3,oneof"` +} + +func (*AdGroupSimulation_CpcBidPointList) isAdGroupSimulation_PointList() {} + +func (*AdGroupSimulation_TargetCpaPointList) isAdGroupSimulation_PointList() {} + +func (m *AdGroupSimulation) GetPointList() isAdGroupSimulation_PointList { + if m != nil { + return m.PointList + } + return nil +} + +func (m *AdGroupSimulation) GetCpcBidPointList() *common.CpcBidSimulationPointList { + if x, ok := m.GetPointList().(*AdGroupSimulation_CpcBidPointList); ok { + return x.CpcBidPointList + } + return nil +} + +func (m *AdGroupSimulation) GetTargetCpaPointList() *common.TargetCpaSimulationPointList { + if x, ok := m.GetPointList().(*AdGroupSimulation_TargetCpaPointList); ok { + return x.TargetCpaPointList + } + return nil +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*AdGroupSimulation) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _AdGroupSimulation_OneofMarshaler, _AdGroupSimulation_OneofUnmarshaler, _AdGroupSimulation_OneofSizer, []interface{}{ + (*AdGroupSimulation_CpcBidPointList)(nil), + (*AdGroupSimulation_TargetCpaPointList)(nil), + } +} + +func _AdGroupSimulation_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*AdGroupSimulation) + // point_list + switch x := m.PointList.(type) { + case *AdGroupSimulation_CpcBidPointList: + b.EncodeVarint(8<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.CpcBidPointList); err != nil { + return err + } + case *AdGroupSimulation_TargetCpaPointList: + b.EncodeVarint(9<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.TargetCpaPointList); err != nil { + return err + } + case nil: + default: + return fmt.Errorf("AdGroupSimulation.PointList has unexpected type %T", x) + } + return nil +} + +func _AdGroupSimulation_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*AdGroupSimulation) + switch tag { + case 8: // point_list.cpc_bid_point_list + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(common.CpcBidSimulationPointList) + err := b.DecodeMessage(msg) + m.PointList = &AdGroupSimulation_CpcBidPointList{msg} + return true, err + case 9: // point_list.target_cpa_point_list + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(common.TargetCpaSimulationPointList) + err := b.DecodeMessage(msg) + m.PointList = &AdGroupSimulation_TargetCpaPointList{msg} + return true, err + default: + return false, nil + } +} + +func _AdGroupSimulation_OneofSizer(msg proto.Message) (n int) { + m := msg.(*AdGroupSimulation) + // point_list + switch x := m.PointList.(type) { + case *AdGroupSimulation_CpcBidPointList: + s := proto.Size(x.CpcBidPointList) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *AdGroupSimulation_TargetCpaPointList: + s := proto.Size(x.TargetCpaPointList) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +func init() { + proto.RegisterType((*AdGroupSimulation)(nil), "google.ads.googleads.v1.resources.AdGroupSimulation") +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/resources/ad_group_simulation.proto", fileDescriptor_ad_group_simulation_a578bba85f881904) +} + +var fileDescriptor_ad_group_simulation_a578bba85f881904 = []byte{ + // 563 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x8c, 0x54, 0xdd, 0x6e, 0xd3, 0x3c, + 0x18, 0xfe, 0xd2, 0xfd, 0x7c, 0x9b, 0x37, 0x40, 0x18, 0x81, 0xa2, 0x31, 0xa1, 0x0e, 0x34, 0xa9, + 0x47, 0x8e, 0xba, 0x21, 0x10, 0x29, 0x07, 0xa4, 0x03, 0x95, 0x21, 0x86, 0xaa, 0xac, 0xea, 0x01, + 0xaa, 0x14, 0xb9, 0xb1, 0x97, 0x59, 0x6a, 0x6c, 0x13, 0x3b, 0x43, 0x3b, 0xe6, 0x98, 0x9b, 0xe0, + 0x90, 0x6b, 0xe0, 0x0a, 0xb8, 0x14, 0xae, 0x02, 0xc5, 0x49, 0xdc, 0xaa, 0x5d, 0x59, 0xcf, 0x5e, + 0xbf, 0xef, 0xf3, 0x63, 0x3f, 0xb1, 0x03, 0x3a, 0x89, 0x10, 0xc9, 0x84, 0x7a, 0x98, 0x28, 0xaf, + 0x2c, 0x8b, 0xea, 0xaa, 0xed, 0x65, 0x54, 0x89, 0x3c, 0x8b, 0xa9, 0xf2, 0x30, 0x89, 0x92, 0x4c, + 0xe4, 0x32, 0x52, 0x2c, 0xcd, 0x27, 0x58, 0x33, 0xc1, 0x91, 0xcc, 0x84, 0x16, 0xf0, 0xa0, 0x64, + 0x20, 0x4c, 0x14, 0xb2, 0x64, 0x74, 0xd5, 0x46, 0x96, 0xbc, 0xe7, 0x2d, 0xd3, 0x8f, 0x45, 0x9a, + 0x0a, 0xee, 0xcd, 0x6b, 0xee, 0x75, 0x97, 0x11, 0x28, 0xcf, 0x53, 0x35, 0x83, 0x8f, 0x52, 0x41, + 0xd8, 0x05, 0x8b, 0xab, 0x05, 0xd5, 0x97, 0x82, 0x54, 0x1a, 0xc7, 0x2b, 0x6b, 0xe8, 0x6b, 0x49, + 0x2b, 0xd2, 0x93, 0x8a, 0x64, 0x56, 0xe3, 0xfc, 0xc2, 0xfb, 0x9a, 0x61, 0x29, 0x69, 0xa6, 0xaa, + 0xf9, 0x7e, 0x2d, 0x2a, 0x99, 0x87, 0x39, 0x17, 0xda, 0x28, 0x54, 0xd3, 0xa7, 0xbf, 0x36, 0xc0, + 0xfd, 0x80, 0xf4, 0x8a, 0x9c, 0xce, 0xad, 0x3c, 0x7c, 0x06, 0xee, 0xd4, 0x51, 0x44, 0x1c, 0xa7, + 0xd4, 0x75, 0x9a, 0x4e, 0x6b, 0x3b, 0xdc, 0xad, 0x9b, 0x9f, 0x70, 0x4a, 0x61, 0x07, 0xec, 0xd8, + 0x88, 0x19, 0x71, 0x1b, 0x4d, 0xa7, 0xb5, 0x73, 0xf4, 0xb8, 0x0a, 0x14, 0xd5, 0xdb, 0x41, 0xa7, + 0x5c, 0xbf, 0x78, 0x3e, 0xc4, 0x93, 0x9c, 0x86, 0xdb, 0xb8, 0x74, 0x3a, 0x25, 0x70, 0x00, 0xd6, + 0x8b, 0x33, 0xb8, 0x6b, 0x4d, 0xa7, 0x75, 0xf7, 0xe8, 0x0d, 0x5a, 0xf6, 0x45, 0xcc, 0xc9, 0xd1, + 0x74, 0x6b, 0x83, 0x6b, 0x49, 0xdf, 0xf1, 0x3c, 0x9d, 0x6b, 0x85, 0x46, 0x0d, 0x7e, 0x77, 0xc0, + 0x83, 0x1b, 0xe2, 0x75, 0xd7, 0x8d, 0xcb, 0x68, 0x65, 0x97, 0xb3, 0x19, 0x8d, 0x33, 0x23, 0x31, + 0xe7, 0xb9, 0x08, 0x08, 0x61, 0xba, 0xd0, 0x83, 0x1d, 0x00, 0x94, 0xc6, 0x99, 0x8e, 0x08, 0xd6, + 0xd4, 0xdd, 0x30, 0x09, 0xed, 0x2f, 0x24, 0x74, 0xae, 0x33, 0xc6, 0x93, 0x2a, 0x22, 0x83, 0x7f, + 0x8b, 0x35, 0x85, 0x2f, 0xc1, 0x16, 0xe5, 0xa4, 0xa4, 0x6e, 0xae, 0x40, 0xfd, 0x9f, 0x72, 0x62, + 0x88, 0x97, 0x00, 0xc6, 0x32, 0x8e, 0xc6, 0x8c, 0x44, 0x52, 0x30, 0xae, 0xa3, 0x09, 0x53, 0xda, + 0xdd, 0x32, 0x12, 0xaf, 0x96, 0x66, 0x50, 0x5e, 0x6c, 0x74, 0x22, 0xe3, 0x2e, 0x23, 0xd3, 0x93, + 0xf6, 0x0b, 0x85, 0x8f, 0x4c, 0xe9, 0xf7, 0xff, 0x85, 0xf7, 0x62, 0x33, 0xb4, 0x2d, 0xf8, 0x05, + 0x3c, 0xd4, 0x38, 0x4b, 0xa8, 0x8e, 0x62, 0x89, 0x67, 0xcd, 0xb6, 0x8d, 0xd9, 0xeb, 0xdb, 0xcc, + 0x06, 0x86, 0x7c, 0x22, 0xf1, 0xcd, 0x7e, 0x50, 0xd7, 0x73, 0xdb, 0xed, 0xee, 0x02, 0x30, 0xf5, + 0xe9, 0x7e, 0x6b, 0x80, 0xc3, 0x58, 0xa4, 0xe8, 0xd6, 0x07, 0xdd, 0x7d, 0xb4, 0x70, 0xcb, 0xfb, + 0x45, 0x88, 0x7d, 0xe7, 0xf3, 0x87, 0x8a, 0x9c, 0x88, 0x09, 0xe6, 0x09, 0x12, 0x59, 0xe2, 0x25, + 0x94, 0x9b, 0x88, 0xeb, 0x57, 0x28, 0x99, 0xfa, 0xc7, 0x9f, 0xa6, 0x63, 0xab, 0x1f, 0x8d, 0xb5, + 0x5e, 0x10, 0xfc, 0x6c, 0x1c, 0xf4, 0x4a, 0xc9, 0x80, 0x28, 0x54, 0x96, 0x45, 0x35, 0x6c, 0xa3, + 0xb0, 0x46, 0xfe, 0xae, 0x31, 0xa3, 0x80, 0xa8, 0x91, 0xc5, 0x8c, 0x86, 0xed, 0x91, 0xc5, 0xfc, + 0x69, 0x1c, 0x96, 0x03, 0xdf, 0x0f, 0x88, 0xf2, 0x7d, 0x8b, 0xf2, 0xfd, 0x61, 0xdb, 0xf7, 0x2d, + 0x6e, 0xbc, 0x69, 0x36, 0x7b, 0xfc, 0x37, 0x00, 0x00, 0xff, 0xff, 0x05, 0xed, 0xb9, 0x15, 0x15, + 0x05, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/resources/ad_parameter.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/resources/ad_parameter.pb.go new file mode 100644 index 0000000..3703a7c --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/resources/ad_parameter.pb.go @@ -0,0 +1,146 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/resources/ad_parameter.proto + +package resources // import "google.golang.org/genproto/googleapis/ads/googleads/v1/resources" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import wrappers "github.com/golang/protobuf/ptypes/wrappers" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// An ad parameter that is used to update numeric values (such as prices or +// inventory levels) in any text line of an ad (including URLs). There can +// be a maximum of two AdParameters per ad group criterion. (One with +// parameter_index = 1 and one with parameter_index = 2.) +// In the ad the parameters are referenced by a placeholder of the form +// "{param#:value}". E.g. "{param1:$17}" +type AdParameter struct { + // The resource name of the ad parameter. + // Ad parameter resource names have the form: + // + // + // `customers/{customer_id}/adParameters/{ad_group_id}~{criterion_id}~{parameter_index}` + ResourceName string `protobuf:"bytes,1,opt,name=resource_name,json=resourceName,proto3" json:"resource_name,omitempty"` + // The ad group criterion that this ad parameter belongs to. + AdGroupCriterion *wrappers.StringValue `protobuf:"bytes,2,opt,name=ad_group_criterion,json=adGroupCriterion,proto3" json:"ad_group_criterion,omitempty"` + // The unique index of this ad parameter. Must be either 1 or 2. + ParameterIndex *wrappers.Int64Value `protobuf:"bytes,3,opt,name=parameter_index,json=parameterIndex,proto3" json:"parameter_index,omitempty"` + // Numeric value to insert into the ad text. The following restrictions + // apply: + // - Can use comma or period as a separator, with an optional period or + // comma (respectively) for fractional values. For example, 1,000,000.00 + // and 2.000.000,10 are valid. + // - Can be prepended or appended with a currency symbol. For example, + // $99.99 is valid. + // - Can be prepended or appended with a currency code. For example, 99.99USD + // and EUR200 are valid. + // - Can use '%'. For example, 1.0% and 1,0% are valid. + // - Can use plus or minus. For example, -10.99 and 25+ are valid. + // - Can use '/' between two numbers. For example 4/1 and 0.95/0.45 are + // valid. + InsertionText *wrappers.StringValue `protobuf:"bytes,4,opt,name=insertion_text,json=insertionText,proto3" json:"insertion_text,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *AdParameter) Reset() { *m = AdParameter{} } +func (m *AdParameter) String() string { return proto.CompactTextString(m) } +func (*AdParameter) ProtoMessage() {} +func (*AdParameter) Descriptor() ([]byte, []int) { + return fileDescriptor_ad_parameter_c2ef224ce905eb1b, []int{0} +} +func (m *AdParameter) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_AdParameter.Unmarshal(m, b) +} +func (m *AdParameter) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_AdParameter.Marshal(b, m, deterministic) +} +func (dst *AdParameter) XXX_Merge(src proto.Message) { + xxx_messageInfo_AdParameter.Merge(dst, src) +} +func (m *AdParameter) XXX_Size() int { + return xxx_messageInfo_AdParameter.Size(m) +} +func (m *AdParameter) XXX_DiscardUnknown() { + xxx_messageInfo_AdParameter.DiscardUnknown(m) +} + +var xxx_messageInfo_AdParameter proto.InternalMessageInfo + +func (m *AdParameter) GetResourceName() string { + if m != nil { + return m.ResourceName + } + return "" +} + +func (m *AdParameter) GetAdGroupCriterion() *wrappers.StringValue { + if m != nil { + return m.AdGroupCriterion + } + return nil +} + +func (m *AdParameter) GetParameterIndex() *wrappers.Int64Value { + if m != nil { + return m.ParameterIndex + } + return nil +} + +func (m *AdParameter) GetInsertionText() *wrappers.StringValue { + if m != nil { + return m.InsertionText + } + return nil +} + +func init() { + proto.RegisterType((*AdParameter)(nil), "google.ads.googleads.v1.resources.AdParameter") +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/resources/ad_parameter.proto", fileDescriptor_ad_parameter_c2ef224ce905eb1b) +} + +var fileDescriptor_ad_parameter_c2ef224ce905eb1b = []byte{ + // 385 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x8c, 0x91, 0xdf, 0x6a, 0xdb, 0x30, + 0x18, 0xc5, 0xb1, 0x33, 0x06, 0x73, 0x96, 0x2c, 0xf8, 0xca, 0x64, 0x61, 0x24, 0x1b, 0x81, 0x5c, + 0xc9, 0x78, 0x0b, 0xbb, 0xd0, 0xae, 0x9c, 0x0c, 0x42, 0x72, 0x31, 0x42, 0x56, 0x7c, 0x51, 0x0c, + 0x46, 0x89, 0x54, 0x21, 0x88, 0x25, 0x23, 0xc9, 0x69, 0x5e, 0xa1, 0xaf, 0xd1, 0xcb, 0x3e, 0x4a, + 0x1f, 0xa5, 0xef, 0x50, 0x28, 0xfe, 0x23, 0x51, 0x28, 0xb4, 0xbd, 0x3b, 0x48, 0xe7, 0x77, 0xbe, + 0x23, 0x7d, 0xde, 0x9c, 0x0a, 0x41, 0x8f, 0x24, 0x44, 0x58, 0x85, 0x8d, 0xac, 0xd4, 0x29, 0x0a, + 0x25, 0x51, 0xa2, 0x94, 0x07, 0xa2, 0x42, 0x84, 0xb3, 0x02, 0x49, 0x94, 0x13, 0x4d, 0x24, 0x28, + 0xa4, 0xd0, 0xc2, 0x9f, 0x34, 0x56, 0x80, 0xb0, 0x02, 0x96, 0x02, 0xa7, 0x08, 0x58, 0x6a, 0xf8, + 0xad, 0x0d, 0xae, 0x81, 0x7d, 0x79, 0x15, 0x5e, 0x4b, 0x54, 0x14, 0x44, 0xaa, 0x26, 0x62, 0x38, + 0x32, 0x83, 0x0b, 0x16, 0x22, 0xce, 0x85, 0x46, 0x9a, 0x09, 0xde, 0xde, 0x7e, 0xbf, 0x71, 0xbd, + 0x6e, 0x8c, 0xb7, 0x66, 0xac, 0xff, 0xc3, 0xeb, 0x99, 0xe8, 0x8c, 0xa3, 0x9c, 0x04, 0xce, 0xd8, + 0x99, 0x7d, 0xda, 0x7d, 0x36, 0x87, 0xff, 0x50, 0x4e, 0xfc, 0x8d, 0xe7, 0x23, 0x9c, 0x51, 0x29, + 0xca, 0x22, 0x3b, 0x48, 0xa6, 0x89, 0x64, 0x82, 0x07, 0xee, 0xd8, 0x99, 0x75, 0x7f, 0x8e, 0xda, + 0x9e, 0xc0, 0xf4, 0x01, 0xff, 0xb5, 0x64, 0x9c, 0x26, 0xe8, 0x58, 0x92, 0xdd, 0x00, 0xe1, 0x55, + 0x85, 0x2d, 0x0d, 0xe5, 0xff, 0xf5, 0xbe, 0xd8, 0x47, 0x67, 0x8c, 0x63, 0x72, 0x0e, 0x3a, 0x75, + 0xd0, 0xd7, 0x17, 0x41, 0x6b, 0xae, 0x7f, 0xcf, 0x9b, 0x9c, 0xbe, 0x65, 0xd6, 0x15, 0xe2, 0x2f, + 0xbd, 0x3e, 0xe3, 0x8a, 0xc8, 0xea, 0x69, 0x99, 0x26, 0x67, 0x1d, 0x7c, 0x78, 0x47, 0x9b, 0x9e, + 0x65, 0x2e, 0xc8, 0x59, 0x2f, 0x1e, 0x1d, 0x6f, 0x7a, 0x10, 0x39, 0x78, 0xf3, 0xcf, 0x17, 0x83, + 0x67, 0x5f, 0xb6, 0xad, 0x92, 0xb7, 0xce, 0xe5, 0xa6, 0xc5, 0xa8, 0x38, 0x22, 0x4e, 0x81, 0x90, + 0x34, 0xa4, 0x84, 0xd7, 0x73, 0xcd, 0xc2, 0x0b, 0xa6, 0x5e, 0xd9, 0xff, 0x1f, 0xab, 0x6e, 0xdd, + 0xce, 0x2a, 0x8e, 0xef, 0xdc, 0xc9, 0xaa, 0x89, 0x8c, 0xb1, 0x02, 0x8d, 0xac, 0x54, 0x12, 0x81, + 0x9d, 0x71, 0xde, 0x1b, 0x4f, 0x1a, 0x63, 0x95, 0x5a, 0x4f, 0x9a, 0x44, 0xa9, 0xf5, 0x3c, 0xb8, + 0xd3, 0xe6, 0x02, 0xc2, 0x18, 0x2b, 0x08, 0xad, 0x0b, 0xc2, 0x24, 0x82, 0xd0, 0xfa, 0xf6, 0x1f, + 0xeb, 0xb2, 0xbf, 0x9e, 0x02, 0x00, 0x00, 0xff, 0xff, 0xe7, 0xad, 0x70, 0x5d, 0xab, 0x02, 0x00, + 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/resources/ad_schedule_view.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/resources/ad_schedule_view.pb.go new file mode 100644 index 0000000..56f4e35 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/resources/ad_schedule_view.pb.go @@ -0,0 +1,93 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/resources/ad_schedule_view.proto + +package resources // import "google.golang.org/genproto/googleapis/ads/googleads/v1/resources" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// An ad schedule view summarizes the performance of campaigns by +// AdSchedule criteria. +type AdScheduleView struct { + // The resource name of the ad schedule view. + // AdSchedule view resource names have the form: + // + // `customers/{customer_id}/adScheduleViews/{campaign_id}~{criterion_id}` + ResourceName string `protobuf:"bytes,1,opt,name=resource_name,json=resourceName,proto3" json:"resource_name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *AdScheduleView) Reset() { *m = AdScheduleView{} } +func (m *AdScheduleView) String() string { return proto.CompactTextString(m) } +func (*AdScheduleView) ProtoMessage() {} +func (*AdScheduleView) Descriptor() ([]byte, []int) { + return fileDescriptor_ad_schedule_view_4019259953664afe, []int{0} +} +func (m *AdScheduleView) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_AdScheduleView.Unmarshal(m, b) +} +func (m *AdScheduleView) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_AdScheduleView.Marshal(b, m, deterministic) +} +func (dst *AdScheduleView) XXX_Merge(src proto.Message) { + xxx_messageInfo_AdScheduleView.Merge(dst, src) +} +func (m *AdScheduleView) XXX_Size() int { + return xxx_messageInfo_AdScheduleView.Size(m) +} +func (m *AdScheduleView) XXX_DiscardUnknown() { + xxx_messageInfo_AdScheduleView.DiscardUnknown(m) +} + +var xxx_messageInfo_AdScheduleView proto.InternalMessageInfo + +func (m *AdScheduleView) GetResourceName() string { + if m != nil { + return m.ResourceName + } + return "" +} + +func init() { + proto.RegisterType((*AdScheduleView)(nil), "google.ads.googleads.v1.resources.AdScheduleView") +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/resources/ad_schedule_view.proto", fileDescriptor_ad_schedule_view_4019259953664afe) +} + +var fileDescriptor_ad_schedule_view_4019259953664afe = []byte{ + // 271 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x84, 0x90, 0xc1, 0x4a, 0xc4, 0x30, + 0x10, 0x86, 0x69, 0x05, 0xc1, 0xa2, 0x1e, 0xd6, 0x8b, 0x88, 0x07, 0x57, 0x59, 0xf0, 0x94, 0x50, + 0x44, 0x90, 0x78, 0xca, 0x5e, 0x16, 0x3c, 0xc8, 0xb2, 0x42, 0x0f, 0x52, 0x28, 0xb1, 0x19, 0x62, + 0xa1, 0xcd, 0x94, 0x4e, 0xb7, 0x7b, 0xf5, 0x59, 0x3c, 0xfa, 0x28, 0x3e, 0x8a, 0x4f, 0x21, 0xdd, + 0x6c, 0x02, 0x5e, 0xdc, 0xdb, 0x4f, 0xf2, 0xfd, 0xdf, 0x0c, 0x93, 0x3c, 0x18, 0x44, 0x53, 0x03, + 0x57, 0x9a, 0xb8, 0x8b, 0x63, 0x1a, 0x52, 0xde, 0x01, 0xe1, 0xba, 0x2b, 0x81, 0xb8, 0xd2, 0x05, + 0x95, 0xef, 0xa0, 0xd7, 0x35, 0x14, 0x43, 0x05, 0x1b, 0xd6, 0x76, 0xd8, 0xe3, 0x64, 0xea, 0x70, + 0xa6, 0x34, 0xb1, 0xd0, 0x64, 0x43, 0xca, 0x42, 0xf3, 0xe2, 0xd2, 0xcb, 0xdb, 0x8a, 0x2b, 0x6b, + 0xb1, 0x57, 0x7d, 0x85, 0x96, 0x9c, 0xe0, 0xfa, 0x3e, 0x39, 0x95, 0xfa, 0x65, 0x67, 0xce, 0x2a, + 0xd8, 0x4c, 0x6e, 0x92, 0x13, 0x5f, 0x2e, 0xac, 0x6a, 0xe0, 0x3c, 0xba, 0x8a, 0x6e, 0x8f, 0x56, + 0xc7, 0xfe, 0xf1, 0x59, 0x35, 0x30, 0xff, 0x88, 0x93, 0x59, 0x89, 0x0d, 0xdb, 0x3b, 0x7e, 0x7e, + 0xf6, 0x57, 0xbf, 0x1c, 0xa7, 0x2e, 0xa3, 0xd7, 0xa7, 0x5d, 0xd3, 0x60, 0xad, 0xac, 0x61, 0xd8, + 0x19, 0x6e, 0xc0, 0x6e, 0x77, 0xf2, 0x27, 0x68, 0x2b, 0xfa, 0xe7, 0x22, 0x8f, 0x21, 0x7d, 0xc6, + 0x07, 0x0b, 0x29, 0xbf, 0xe2, 0xe9, 0xc2, 0x29, 0xa5, 0x26, 0xe6, 0xe2, 0x98, 0xb2, 0x94, 0xad, + 0x3c, 0xf9, 0xed, 0x99, 0x5c, 0x6a, 0xca, 0x03, 0x93, 0x67, 0x69, 0x1e, 0x98, 0x9f, 0x78, 0xe6, + 0x3e, 0x84, 0x90, 0x9a, 0x84, 0x08, 0x94, 0x10, 0x59, 0x2a, 0x44, 0xe0, 0xde, 0x0e, 0xb7, 0xcb, + 0xde, 0xfd, 0x06, 0x00, 0x00, 0xff, 0xff, 0xd4, 0xff, 0xfa, 0xec, 0xbd, 0x01, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/resources/age_range_view.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/resources/age_range_view.pb.go new file mode 100644 index 0000000..6da32f9 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/resources/age_range_view.pb.go @@ -0,0 +1,92 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/resources/age_range_view.proto + +package resources // import "google.golang.org/genproto/googleapis/ads/googleads/v1/resources" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// An age range view. +type AgeRangeView struct { + // The resource name of the age range view. + // Age range view resource names have the form: + // + // `customers/{customer_id}/ageRangeViews/{ad_group_id}~{criterion_id}` + ResourceName string `protobuf:"bytes,1,opt,name=resource_name,json=resourceName,proto3" json:"resource_name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *AgeRangeView) Reset() { *m = AgeRangeView{} } +func (m *AgeRangeView) String() string { return proto.CompactTextString(m) } +func (*AgeRangeView) ProtoMessage() {} +func (*AgeRangeView) Descriptor() ([]byte, []int) { + return fileDescriptor_age_range_view_32980c6c992c3551, []int{0} +} +func (m *AgeRangeView) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_AgeRangeView.Unmarshal(m, b) +} +func (m *AgeRangeView) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_AgeRangeView.Marshal(b, m, deterministic) +} +func (dst *AgeRangeView) XXX_Merge(src proto.Message) { + xxx_messageInfo_AgeRangeView.Merge(dst, src) +} +func (m *AgeRangeView) XXX_Size() int { + return xxx_messageInfo_AgeRangeView.Size(m) +} +func (m *AgeRangeView) XXX_DiscardUnknown() { + xxx_messageInfo_AgeRangeView.DiscardUnknown(m) +} + +var xxx_messageInfo_AgeRangeView proto.InternalMessageInfo + +func (m *AgeRangeView) GetResourceName() string { + if m != nil { + return m.ResourceName + } + return "" +} + +func init() { + proto.RegisterType((*AgeRangeView)(nil), "google.ads.googleads.v1.resources.AgeRangeView") +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/resources/age_range_view.proto", fileDescriptor_age_range_view_32980c6c992c3551) +} + +var fileDescriptor_age_range_view_32980c6c992c3551 = []byte{ + // 267 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x84, 0x90, 0xb1, 0x4a, 0xc4, 0x30, + 0x18, 0xc7, 0x69, 0x05, 0xc1, 0x72, 0x0e, 0xde, 0x24, 0xe2, 0xe0, 0x29, 0x07, 0x4e, 0x09, 0xe1, + 0xc0, 0x21, 0x4e, 0xb9, 0xe5, 0xc0, 0x41, 0x8e, 0x0e, 0x1d, 0xa4, 0x50, 0xe2, 0xe5, 0x23, 0x04, + 0xae, 0xf9, 0x4a, 0x52, 0x7b, 0xef, 0xe3, 0xe8, 0xa3, 0xf8, 0x28, 0x3e, 0x84, 0x48, 0x1b, 0x13, + 0x9c, 0xbc, 0xed, 0x4f, 0xf2, 0xfb, 0xff, 0xbe, 0x2f, 0x29, 0x1e, 0x34, 0xa2, 0xde, 0x03, 0x95, + 0xca, 0xd3, 0x10, 0xc7, 0x34, 0x30, 0xea, 0xc0, 0xe3, 0x9b, 0xdb, 0x81, 0xa7, 0x52, 0x43, 0xe3, + 0xa4, 0xd5, 0xd0, 0x0c, 0x06, 0x0e, 0xa4, 0x73, 0xd8, 0xe3, 0x7c, 0x11, 0x60, 0x22, 0x95, 0x27, + 0xa9, 0x47, 0x06, 0x46, 0x52, 0xef, 0xea, 0x3a, 0xaa, 0x3b, 0x43, 0xa5, 0xb5, 0xd8, 0xcb, 0xde, + 0xa0, 0xf5, 0x41, 0x70, 0xbb, 0x2a, 0x66, 0x42, 0x43, 0x39, 0x7a, 0x2b, 0x03, 0x87, 0xf9, 0x5d, + 0x71, 0x1e, 0xab, 0x8d, 0x95, 0x2d, 0x5c, 0x66, 0x37, 0xd9, 0xfd, 0x59, 0x39, 0x8b, 0x87, 0xcf, + 0xb2, 0x85, 0xf5, 0x77, 0x56, 0x2c, 0x77, 0xd8, 0x92, 0xa3, 0xc3, 0xd7, 0x17, 0x7f, 0xe5, 0xdb, + 0x71, 0xe2, 0x36, 0x7b, 0x79, 0xfa, 0xed, 0x69, 0xdc, 0x4b, 0xab, 0x09, 0x3a, 0x4d, 0x35, 0xd8, + 0x69, 0x9f, 0xf8, 0xf8, 0xce, 0xf8, 0x7f, 0xfe, 0xe2, 0x31, 0xa5, 0xf7, 0xfc, 0x64, 0x23, 0xc4, + 0x47, 0xbe, 0xd8, 0x04, 0xa5, 0x50, 0x9e, 0x84, 0x38, 0xa6, 0x8a, 0x91, 0x32, 0x92, 0x9f, 0x91, + 0xa9, 0x85, 0xf2, 0x75, 0x62, 0xea, 0x8a, 0xd5, 0x89, 0xf9, 0xca, 0x97, 0xe1, 0x82, 0x73, 0xa1, + 0x3c, 0xe7, 0x89, 0xe2, 0xbc, 0x62, 0x9c, 0x27, 0xee, 0xf5, 0x74, 0x5a, 0x76, 0xf5, 0x13, 0x00, + 0x00, 0xff, 0xff, 0x0e, 0xab, 0x24, 0x97, 0xb7, 0x01, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/resources/asset.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/resources/asset.pb.go new file mode 100644 index 0000000..45216e1 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/resources/asset.pb.go @@ -0,0 +1,321 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/resources/asset.proto + +package resources // import "google.golang.org/genproto/googleapis/ads/googleads/v1/resources" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import wrappers "github.com/golang/protobuf/ptypes/wrappers" +import common "google.golang.org/genproto/googleapis/ads/googleads/v1/common" +import enums "google.golang.org/genproto/googleapis/ads/googleads/v1/enums" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Asset is a part of an ad which can be shared across multiple ads. +// It can be an image (ImageAsset), a video (YoutubeVideoAsset), etc. +type Asset struct { + // The resource name of the asset. + // Asset resource names have the form: + // + // `customers/{customer_id}/assets/{asset_id}` + ResourceName string `protobuf:"bytes,1,opt,name=resource_name,json=resourceName,proto3" json:"resource_name,omitempty"` + // The ID of the asset. + Id *wrappers.Int64Value `protobuf:"bytes,2,opt,name=id,proto3" json:"id,omitempty"` + // Optional name of the asset. + Name *wrappers.StringValue `protobuf:"bytes,3,opt,name=name,proto3" json:"name,omitempty"` + // Type of the asset. + Type enums.AssetTypeEnum_AssetType `protobuf:"varint,4,opt,name=type,proto3,enum=google.ads.googleads.v1.enums.AssetTypeEnum_AssetType" json:"type,omitempty"` + // The specific type of the asset. + // + // Types that are valid to be assigned to AssetData: + // *Asset_YoutubeVideoAsset + // *Asset_MediaBundleAsset + // *Asset_ImageAsset + // *Asset_TextAsset + AssetData isAsset_AssetData `protobuf_oneof:"asset_data"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Asset) Reset() { *m = Asset{} } +func (m *Asset) String() string { return proto.CompactTextString(m) } +func (*Asset) ProtoMessage() {} +func (*Asset) Descriptor() ([]byte, []int) { + return fileDescriptor_asset_06e147e85df80ec5, []int{0} +} +func (m *Asset) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Asset.Unmarshal(m, b) +} +func (m *Asset) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Asset.Marshal(b, m, deterministic) +} +func (dst *Asset) XXX_Merge(src proto.Message) { + xxx_messageInfo_Asset.Merge(dst, src) +} +func (m *Asset) XXX_Size() int { + return xxx_messageInfo_Asset.Size(m) +} +func (m *Asset) XXX_DiscardUnknown() { + xxx_messageInfo_Asset.DiscardUnknown(m) +} + +var xxx_messageInfo_Asset proto.InternalMessageInfo + +func (m *Asset) GetResourceName() string { + if m != nil { + return m.ResourceName + } + return "" +} + +func (m *Asset) GetId() *wrappers.Int64Value { + if m != nil { + return m.Id + } + return nil +} + +func (m *Asset) GetName() *wrappers.StringValue { + if m != nil { + return m.Name + } + return nil +} + +func (m *Asset) GetType() enums.AssetTypeEnum_AssetType { + if m != nil { + return m.Type + } + return enums.AssetTypeEnum_UNSPECIFIED +} + +type isAsset_AssetData interface { + isAsset_AssetData() +} + +type Asset_YoutubeVideoAsset struct { + YoutubeVideoAsset *common.YoutubeVideoAsset `protobuf:"bytes,5,opt,name=youtube_video_asset,json=youtubeVideoAsset,proto3,oneof"` +} + +type Asset_MediaBundleAsset struct { + MediaBundleAsset *common.MediaBundleAsset `protobuf:"bytes,6,opt,name=media_bundle_asset,json=mediaBundleAsset,proto3,oneof"` +} + +type Asset_ImageAsset struct { + ImageAsset *common.ImageAsset `protobuf:"bytes,7,opt,name=image_asset,json=imageAsset,proto3,oneof"` +} + +type Asset_TextAsset struct { + TextAsset *common.TextAsset `protobuf:"bytes,8,opt,name=text_asset,json=textAsset,proto3,oneof"` +} + +func (*Asset_YoutubeVideoAsset) isAsset_AssetData() {} + +func (*Asset_MediaBundleAsset) isAsset_AssetData() {} + +func (*Asset_ImageAsset) isAsset_AssetData() {} + +func (*Asset_TextAsset) isAsset_AssetData() {} + +func (m *Asset) GetAssetData() isAsset_AssetData { + if m != nil { + return m.AssetData + } + return nil +} + +func (m *Asset) GetYoutubeVideoAsset() *common.YoutubeVideoAsset { + if x, ok := m.GetAssetData().(*Asset_YoutubeVideoAsset); ok { + return x.YoutubeVideoAsset + } + return nil +} + +func (m *Asset) GetMediaBundleAsset() *common.MediaBundleAsset { + if x, ok := m.GetAssetData().(*Asset_MediaBundleAsset); ok { + return x.MediaBundleAsset + } + return nil +} + +func (m *Asset) GetImageAsset() *common.ImageAsset { + if x, ok := m.GetAssetData().(*Asset_ImageAsset); ok { + return x.ImageAsset + } + return nil +} + +func (m *Asset) GetTextAsset() *common.TextAsset { + if x, ok := m.GetAssetData().(*Asset_TextAsset); ok { + return x.TextAsset + } + return nil +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*Asset) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _Asset_OneofMarshaler, _Asset_OneofUnmarshaler, _Asset_OneofSizer, []interface{}{ + (*Asset_YoutubeVideoAsset)(nil), + (*Asset_MediaBundleAsset)(nil), + (*Asset_ImageAsset)(nil), + (*Asset_TextAsset)(nil), + } +} + +func _Asset_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*Asset) + // asset_data + switch x := m.AssetData.(type) { + case *Asset_YoutubeVideoAsset: + b.EncodeVarint(5<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.YoutubeVideoAsset); err != nil { + return err + } + case *Asset_MediaBundleAsset: + b.EncodeVarint(6<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.MediaBundleAsset); err != nil { + return err + } + case *Asset_ImageAsset: + b.EncodeVarint(7<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.ImageAsset); err != nil { + return err + } + case *Asset_TextAsset: + b.EncodeVarint(8<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.TextAsset); err != nil { + return err + } + case nil: + default: + return fmt.Errorf("Asset.AssetData has unexpected type %T", x) + } + return nil +} + +func _Asset_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*Asset) + switch tag { + case 5: // asset_data.youtube_video_asset + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(common.YoutubeVideoAsset) + err := b.DecodeMessage(msg) + m.AssetData = &Asset_YoutubeVideoAsset{msg} + return true, err + case 6: // asset_data.media_bundle_asset + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(common.MediaBundleAsset) + err := b.DecodeMessage(msg) + m.AssetData = &Asset_MediaBundleAsset{msg} + return true, err + case 7: // asset_data.image_asset + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(common.ImageAsset) + err := b.DecodeMessage(msg) + m.AssetData = &Asset_ImageAsset{msg} + return true, err + case 8: // asset_data.text_asset + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(common.TextAsset) + err := b.DecodeMessage(msg) + m.AssetData = &Asset_TextAsset{msg} + return true, err + default: + return false, nil + } +} + +func _Asset_OneofSizer(msg proto.Message) (n int) { + m := msg.(*Asset) + // asset_data + switch x := m.AssetData.(type) { + case *Asset_YoutubeVideoAsset: + s := proto.Size(x.YoutubeVideoAsset) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *Asset_MediaBundleAsset: + s := proto.Size(x.MediaBundleAsset) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *Asset_ImageAsset: + s := proto.Size(x.ImageAsset) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *Asset_TextAsset: + s := proto.Size(x.TextAsset) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +func init() { + proto.RegisterType((*Asset)(nil), "google.ads.googleads.v1.resources.Asset") +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/resources/asset.proto", fileDescriptor_asset_06e147e85df80ec5) +} + +var fileDescriptor_asset_06e147e85df80ec5 = []byte{ + // 508 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x84, 0x93, 0xc1, 0x6e, 0xd3, 0x30, + 0x18, 0xc7, 0x49, 0xd6, 0x0d, 0xe6, 0x0d, 0x04, 0xe6, 0x52, 0x8d, 0x09, 0x75, 0xa0, 0x49, 0x05, + 0x84, 0xd3, 0x0c, 0xb4, 0x43, 0x38, 0xa5, 0x12, 0x1a, 0xab, 0x34, 0x34, 0x85, 0x29, 0x12, 0xa8, + 0x52, 0x70, 0x6b, 0x13, 0x59, 0xaa, 0xed, 0x28, 0x76, 0xca, 0xfa, 0x3a, 0x1c, 0x79, 0x11, 0x24, + 0x1e, 0x85, 0x17, 0xe0, 0x8a, 0x62, 0xc7, 0x01, 0x15, 0x75, 0xb9, 0x7d, 0x76, 0x7e, 0xff, 0xdf, + 0xf7, 0xd5, 0xb5, 0xc1, 0xcb, 0x5c, 0xca, 0x7c, 0x41, 0x03, 0x4c, 0x54, 0x60, 0xcb, 0xba, 0x5a, + 0x86, 0x41, 0x49, 0x95, 0xac, 0xca, 0x39, 0x55, 0x01, 0x56, 0x8a, 0x6a, 0x54, 0x94, 0x52, 0x4b, + 0x78, 0x64, 0x19, 0x84, 0x89, 0x42, 0x2d, 0x8e, 0x96, 0x21, 0x6a, 0xf1, 0x83, 0xd1, 0x26, 0xe3, + 0x5c, 0x72, 0x2e, 0x85, 0xd5, 0x65, 0x7a, 0x55, 0x50, 0x65, 0xa5, 0x07, 0x68, 0x53, 0x82, 0x8a, + 0x8a, 0xab, 0x7f, 0x02, 0x0d, 0xff, 0xb8, 0xe1, 0xcd, 0x6a, 0x56, 0x7d, 0x09, 0xbe, 0x96, 0xb8, + 0x28, 0x68, 0xe9, 0x7c, 0x87, 0xce, 0x57, 0xb0, 0x00, 0x0b, 0x21, 0x35, 0xd6, 0x4c, 0x8a, 0xe6, + 0xeb, 0x93, 0x1f, 0x3d, 0xb0, 0x1d, 0xd7, 0x4a, 0xf8, 0x14, 0xdc, 0x75, 0x63, 0x67, 0x02, 0x73, + 0xda, 0xf7, 0x06, 0xde, 0x70, 0x37, 0xd9, 0x77, 0x9b, 0xef, 0x31, 0xa7, 0xf0, 0x05, 0xf0, 0x19, + 0xe9, 0xfb, 0x03, 0x6f, 0xb8, 0x77, 0xf2, 0xa8, 0x99, 0x14, 0xb9, 0xce, 0xe8, 0x5c, 0xe8, 0xd3, + 0xd7, 0x29, 0x5e, 0x54, 0x34, 0xf1, 0x19, 0x81, 0x23, 0xd0, 0x33, 0xa2, 0x2d, 0x83, 0x1f, 0xfe, + 0x87, 0x7f, 0xd0, 0x25, 0x13, 0xb9, 0xe5, 0x0d, 0x09, 0x27, 0xa0, 0x57, 0xff, 0xb2, 0x7e, 0x6f, + 0xe0, 0x0d, 0xef, 0x9d, 0x9c, 0xa2, 0x4d, 0xe7, 0x6b, 0x8e, 0x02, 0x99, 0xb9, 0xaf, 0x56, 0x05, + 0x7d, 0x2b, 0x2a, 0xfe, 0x77, 0x95, 0x18, 0x07, 0x9c, 0x83, 0x87, 0x2b, 0x59, 0xe9, 0x6a, 0x46, + 0xb3, 0x25, 0x23, 0x54, 0x66, 0xe6, 0xe4, 0xfa, 0xdb, 0x66, 0x98, 0x70, 0xa3, 0xda, 0xfe, 0x2f, + 0xe8, 0xa3, 0x8d, 0xa6, 0x75, 0xd2, 0x98, 0xdf, 0xdd, 0x4a, 0x1e, 0xac, 0xd6, 0x37, 0xe1, 0x67, + 0x00, 0x39, 0x25, 0x0c, 0x67, 0xb3, 0x4a, 0x90, 0x05, 0x6d, 0x7a, 0xec, 0x98, 0x1e, 0xa3, 0xae, + 0x1e, 0x17, 0x75, 0x72, 0x6c, 0x82, 0xae, 0xc5, 0x7d, 0xbe, 0xb6, 0x07, 0x2f, 0xc0, 0x1e, 0xe3, + 0x38, 0x77, 0xea, 0xdb, 0x46, 0xfd, 0xbc, 0x4b, 0x7d, 0x5e, 0x47, 0x9c, 0x14, 0xb0, 0x76, 0x05, + 0x27, 0x00, 0x68, 0x7a, 0xad, 0x1b, 0xdb, 0x1d, 0x63, 0x7b, 0xd6, 0x65, 0xbb, 0xa2, 0xd7, 0xda, + 0xc9, 0x76, 0xb5, 0x5b, 0x8c, 0xf7, 0x01, 0xb0, 0xb7, 0x91, 0x60, 0x8d, 0xc7, 0xbf, 0x3d, 0x70, + 0x3c, 0x97, 0x1c, 0x75, 0xbe, 0x89, 0x31, 0x30, 0xf1, 0xcb, 0xfa, 0x1a, 0x5c, 0x7a, 0x9f, 0x26, + 0x4d, 0x20, 0x97, 0x0b, 0x2c, 0x72, 0x24, 0xcb, 0x3c, 0xc8, 0xa9, 0x30, 0x97, 0xc4, 0xdd, 0xff, + 0x82, 0xa9, 0x1b, 0x9e, 0xe4, 0x9b, 0xb6, 0xfa, 0xe6, 0x6f, 0x9d, 0xc5, 0xf1, 0x77, 0xff, 0xe8, + 0xcc, 0x2a, 0x63, 0xa2, 0x90, 0x2d, 0xeb, 0x2a, 0x0d, 0x51, 0xe2, 0xc8, 0x9f, 0x8e, 0x99, 0xc6, + 0x44, 0x4d, 0x5b, 0x66, 0x9a, 0x86, 0xd3, 0x96, 0xf9, 0xe5, 0x1f, 0xdb, 0x0f, 0x51, 0x14, 0x13, + 0x15, 0x45, 0x2d, 0x15, 0x45, 0x69, 0x18, 0x45, 0x2d, 0x37, 0xdb, 0x31, 0xc3, 0xbe, 0xfa, 0x13, + 0x00, 0x00, 0xff, 0xff, 0x7c, 0x61, 0x45, 0xad, 0x3e, 0x04, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/resources/bidding_strategy.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/resources/bidding_strategy.pb.go new file mode 100644 index 0000000..3b70fa9 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/resources/bidding_strategy.pb.go @@ -0,0 +1,473 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/resources/bidding_strategy.proto + +package resources // import "google.golang.org/genproto/googleapis/ads/googleads/v1/resources" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import wrappers "github.com/golang/protobuf/ptypes/wrappers" +import common "google.golang.org/genproto/googleapis/ads/googleads/v1/common" +import enums "google.golang.org/genproto/googleapis/ads/googleads/v1/enums" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// A bidding strategy. +type BiddingStrategy struct { + // The resource name of the bidding strategy. + // Bidding strategy resource names have the form: + // + // `customers/{customer_id}/biddingStrategies/{bidding_strategy_id}` + ResourceName string `protobuf:"bytes,1,opt,name=resource_name,json=resourceName,proto3" json:"resource_name,omitempty"` + // The ID of the bidding strategy. + Id *wrappers.Int64Value `protobuf:"bytes,3,opt,name=id,proto3" json:"id,omitempty"` + // The name of the bidding strategy. + // All bidding strategies within an account must be named distinctly. + // + // The length of this string should be between 1 and 255, inclusive, + // in UTF-8 bytes, (trimmed). + Name *wrappers.StringValue `protobuf:"bytes,4,opt,name=name,proto3" json:"name,omitempty"` + // The status of the bidding strategy. + // + // This field is read-only. + Status enums.BiddingStrategyStatusEnum_BiddingStrategyStatus `protobuf:"varint,15,opt,name=status,proto3,enum=google.ads.googleads.v1.enums.BiddingStrategyStatusEnum_BiddingStrategyStatus" json:"status,omitempty"` + // The type of the bidding strategy. + // Create a bidding strategy by setting the bidding scheme. + // + // This field is read-only. + Type enums.BiddingStrategyTypeEnum_BiddingStrategyType `protobuf:"varint,5,opt,name=type,proto3,enum=google.ads.googleads.v1.enums.BiddingStrategyTypeEnum_BiddingStrategyType" json:"type,omitempty"` + // The number of campaigns attached to this bidding strategy. + // + // This field is read-only. + CampaignCount *wrappers.Int64Value `protobuf:"bytes,13,opt,name=campaign_count,json=campaignCount,proto3" json:"campaign_count,omitempty"` + // The number of non-removed campaigns attached to this bidding strategy. + // + // This field is read-only. + NonRemovedCampaignCount *wrappers.Int64Value `protobuf:"bytes,14,opt,name=non_removed_campaign_count,json=nonRemovedCampaignCount,proto3" json:"non_removed_campaign_count,omitempty"` + // The bidding scheme. + // + // Only one can be set. + // + // Types that are valid to be assigned to Scheme: + // *BiddingStrategy_EnhancedCpc + // *BiddingStrategy_PageOnePromoted + // *BiddingStrategy_TargetCpa + // *BiddingStrategy_TargetImpressionShare + // *BiddingStrategy_TargetOutrankShare + // *BiddingStrategy_TargetRoas + // *BiddingStrategy_TargetSpend + Scheme isBiddingStrategy_Scheme `protobuf_oneof:"scheme"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *BiddingStrategy) Reset() { *m = BiddingStrategy{} } +func (m *BiddingStrategy) String() string { return proto.CompactTextString(m) } +func (*BiddingStrategy) ProtoMessage() {} +func (*BiddingStrategy) Descriptor() ([]byte, []int) { + return fileDescriptor_bidding_strategy_c7e03cac8ed0390b, []int{0} +} +func (m *BiddingStrategy) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_BiddingStrategy.Unmarshal(m, b) +} +func (m *BiddingStrategy) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_BiddingStrategy.Marshal(b, m, deterministic) +} +func (dst *BiddingStrategy) XXX_Merge(src proto.Message) { + xxx_messageInfo_BiddingStrategy.Merge(dst, src) +} +func (m *BiddingStrategy) XXX_Size() int { + return xxx_messageInfo_BiddingStrategy.Size(m) +} +func (m *BiddingStrategy) XXX_DiscardUnknown() { + xxx_messageInfo_BiddingStrategy.DiscardUnknown(m) +} + +var xxx_messageInfo_BiddingStrategy proto.InternalMessageInfo + +func (m *BiddingStrategy) GetResourceName() string { + if m != nil { + return m.ResourceName + } + return "" +} + +func (m *BiddingStrategy) GetId() *wrappers.Int64Value { + if m != nil { + return m.Id + } + return nil +} + +func (m *BiddingStrategy) GetName() *wrappers.StringValue { + if m != nil { + return m.Name + } + return nil +} + +func (m *BiddingStrategy) GetStatus() enums.BiddingStrategyStatusEnum_BiddingStrategyStatus { + if m != nil { + return m.Status + } + return enums.BiddingStrategyStatusEnum_UNSPECIFIED +} + +func (m *BiddingStrategy) GetType() enums.BiddingStrategyTypeEnum_BiddingStrategyType { + if m != nil { + return m.Type + } + return enums.BiddingStrategyTypeEnum_UNSPECIFIED +} + +func (m *BiddingStrategy) GetCampaignCount() *wrappers.Int64Value { + if m != nil { + return m.CampaignCount + } + return nil +} + +func (m *BiddingStrategy) GetNonRemovedCampaignCount() *wrappers.Int64Value { + if m != nil { + return m.NonRemovedCampaignCount + } + return nil +} + +type isBiddingStrategy_Scheme interface { + isBiddingStrategy_Scheme() +} + +type BiddingStrategy_EnhancedCpc struct { + EnhancedCpc *common.EnhancedCpc `protobuf:"bytes,7,opt,name=enhanced_cpc,json=enhancedCpc,proto3,oneof"` +} + +type BiddingStrategy_PageOnePromoted struct { + PageOnePromoted *common.PageOnePromoted `protobuf:"bytes,8,opt,name=page_one_promoted,json=pageOnePromoted,proto3,oneof"` +} + +type BiddingStrategy_TargetCpa struct { + TargetCpa *common.TargetCpa `protobuf:"bytes,9,opt,name=target_cpa,json=targetCpa,proto3,oneof"` +} + +type BiddingStrategy_TargetImpressionShare struct { + TargetImpressionShare *common.TargetImpressionShare `protobuf:"bytes,48,opt,name=target_impression_share,json=targetImpressionShare,proto3,oneof"` +} + +type BiddingStrategy_TargetOutrankShare struct { + TargetOutrankShare *common.TargetOutrankShare `protobuf:"bytes,10,opt,name=target_outrank_share,json=targetOutrankShare,proto3,oneof"` +} + +type BiddingStrategy_TargetRoas struct { + TargetRoas *common.TargetRoas `protobuf:"bytes,11,opt,name=target_roas,json=targetRoas,proto3,oneof"` +} + +type BiddingStrategy_TargetSpend struct { + TargetSpend *common.TargetSpend `protobuf:"bytes,12,opt,name=target_spend,json=targetSpend,proto3,oneof"` +} + +func (*BiddingStrategy_EnhancedCpc) isBiddingStrategy_Scheme() {} + +func (*BiddingStrategy_PageOnePromoted) isBiddingStrategy_Scheme() {} + +func (*BiddingStrategy_TargetCpa) isBiddingStrategy_Scheme() {} + +func (*BiddingStrategy_TargetImpressionShare) isBiddingStrategy_Scheme() {} + +func (*BiddingStrategy_TargetOutrankShare) isBiddingStrategy_Scheme() {} + +func (*BiddingStrategy_TargetRoas) isBiddingStrategy_Scheme() {} + +func (*BiddingStrategy_TargetSpend) isBiddingStrategy_Scheme() {} + +func (m *BiddingStrategy) GetScheme() isBiddingStrategy_Scheme { + if m != nil { + return m.Scheme + } + return nil +} + +func (m *BiddingStrategy) GetEnhancedCpc() *common.EnhancedCpc { + if x, ok := m.GetScheme().(*BiddingStrategy_EnhancedCpc); ok { + return x.EnhancedCpc + } + return nil +} + +func (m *BiddingStrategy) GetPageOnePromoted() *common.PageOnePromoted { + if x, ok := m.GetScheme().(*BiddingStrategy_PageOnePromoted); ok { + return x.PageOnePromoted + } + return nil +} + +func (m *BiddingStrategy) GetTargetCpa() *common.TargetCpa { + if x, ok := m.GetScheme().(*BiddingStrategy_TargetCpa); ok { + return x.TargetCpa + } + return nil +} + +func (m *BiddingStrategy) GetTargetImpressionShare() *common.TargetImpressionShare { + if x, ok := m.GetScheme().(*BiddingStrategy_TargetImpressionShare); ok { + return x.TargetImpressionShare + } + return nil +} + +func (m *BiddingStrategy) GetTargetOutrankShare() *common.TargetOutrankShare { + if x, ok := m.GetScheme().(*BiddingStrategy_TargetOutrankShare); ok { + return x.TargetOutrankShare + } + return nil +} + +func (m *BiddingStrategy) GetTargetRoas() *common.TargetRoas { + if x, ok := m.GetScheme().(*BiddingStrategy_TargetRoas); ok { + return x.TargetRoas + } + return nil +} + +func (m *BiddingStrategy) GetTargetSpend() *common.TargetSpend { + if x, ok := m.GetScheme().(*BiddingStrategy_TargetSpend); ok { + return x.TargetSpend + } + return nil +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*BiddingStrategy) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _BiddingStrategy_OneofMarshaler, _BiddingStrategy_OneofUnmarshaler, _BiddingStrategy_OneofSizer, []interface{}{ + (*BiddingStrategy_EnhancedCpc)(nil), + (*BiddingStrategy_PageOnePromoted)(nil), + (*BiddingStrategy_TargetCpa)(nil), + (*BiddingStrategy_TargetImpressionShare)(nil), + (*BiddingStrategy_TargetOutrankShare)(nil), + (*BiddingStrategy_TargetRoas)(nil), + (*BiddingStrategy_TargetSpend)(nil), + } +} + +func _BiddingStrategy_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*BiddingStrategy) + // scheme + switch x := m.Scheme.(type) { + case *BiddingStrategy_EnhancedCpc: + b.EncodeVarint(7<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.EnhancedCpc); err != nil { + return err + } + case *BiddingStrategy_PageOnePromoted: + b.EncodeVarint(8<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.PageOnePromoted); err != nil { + return err + } + case *BiddingStrategy_TargetCpa: + b.EncodeVarint(9<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.TargetCpa); err != nil { + return err + } + case *BiddingStrategy_TargetImpressionShare: + b.EncodeVarint(48<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.TargetImpressionShare); err != nil { + return err + } + case *BiddingStrategy_TargetOutrankShare: + b.EncodeVarint(10<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.TargetOutrankShare); err != nil { + return err + } + case *BiddingStrategy_TargetRoas: + b.EncodeVarint(11<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.TargetRoas); err != nil { + return err + } + case *BiddingStrategy_TargetSpend: + b.EncodeVarint(12<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.TargetSpend); err != nil { + return err + } + case nil: + default: + return fmt.Errorf("BiddingStrategy.Scheme has unexpected type %T", x) + } + return nil +} + +func _BiddingStrategy_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*BiddingStrategy) + switch tag { + case 7: // scheme.enhanced_cpc + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(common.EnhancedCpc) + err := b.DecodeMessage(msg) + m.Scheme = &BiddingStrategy_EnhancedCpc{msg} + return true, err + case 8: // scheme.page_one_promoted + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(common.PageOnePromoted) + err := b.DecodeMessage(msg) + m.Scheme = &BiddingStrategy_PageOnePromoted{msg} + return true, err + case 9: // scheme.target_cpa + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(common.TargetCpa) + err := b.DecodeMessage(msg) + m.Scheme = &BiddingStrategy_TargetCpa{msg} + return true, err + case 48: // scheme.target_impression_share + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(common.TargetImpressionShare) + err := b.DecodeMessage(msg) + m.Scheme = &BiddingStrategy_TargetImpressionShare{msg} + return true, err + case 10: // scheme.target_outrank_share + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(common.TargetOutrankShare) + err := b.DecodeMessage(msg) + m.Scheme = &BiddingStrategy_TargetOutrankShare{msg} + return true, err + case 11: // scheme.target_roas + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(common.TargetRoas) + err := b.DecodeMessage(msg) + m.Scheme = &BiddingStrategy_TargetRoas{msg} + return true, err + case 12: // scheme.target_spend + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(common.TargetSpend) + err := b.DecodeMessage(msg) + m.Scheme = &BiddingStrategy_TargetSpend{msg} + return true, err + default: + return false, nil + } +} + +func _BiddingStrategy_OneofSizer(msg proto.Message) (n int) { + m := msg.(*BiddingStrategy) + // scheme + switch x := m.Scheme.(type) { + case *BiddingStrategy_EnhancedCpc: + s := proto.Size(x.EnhancedCpc) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *BiddingStrategy_PageOnePromoted: + s := proto.Size(x.PageOnePromoted) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *BiddingStrategy_TargetCpa: + s := proto.Size(x.TargetCpa) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *BiddingStrategy_TargetImpressionShare: + s := proto.Size(x.TargetImpressionShare) + n += 2 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *BiddingStrategy_TargetOutrankShare: + s := proto.Size(x.TargetOutrankShare) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *BiddingStrategy_TargetRoas: + s := proto.Size(x.TargetRoas) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *BiddingStrategy_TargetSpend: + s := proto.Size(x.TargetSpend) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +func init() { + proto.RegisterType((*BiddingStrategy)(nil), "google.ads.googleads.v1.resources.BiddingStrategy") +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/resources/bidding_strategy.proto", fileDescriptor_bidding_strategy_c7e03cac8ed0390b) +} + +var fileDescriptor_bidding_strategy_c7e03cac8ed0390b = []byte{ + // 693 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x94, 0x95, 0xdd, 0x4e, 0xdb, 0x30, + 0x14, 0xc7, 0xdb, 0xc2, 0x18, 0xb8, 0x7c, 0x68, 0x11, 0x13, 0x11, 0x43, 0x13, 0x6c, 0x42, 0x62, + 0x63, 0x72, 0x28, 0xfb, 0xd0, 0x16, 0xae, 0xda, 0x0a, 0x51, 0x90, 0x06, 0x55, 0x8a, 0xaa, 0x69, + 0xea, 0x16, 0x99, 0xe4, 0x10, 0xa2, 0x11, 0xdb, 0xb2, 0x1d, 0x26, 0x2e, 0xf7, 0x2a, 0xbb, 0xdc, + 0xa3, 0xec, 0x51, 0xf6, 0x0c, 0xbb, 0x98, 0xe2, 0x38, 0x41, 0x7c, 0x74, 0x2d, 0x77, 0x3e, 0xf6, + 0xff, 0xff, 0xfb, 0xbb, 0xa7, 0x27, 0x09, 0x7a, 0x1f, 0x31, 0x16, 0x9d, 0x83, 0x43, 0x42, 0xe9, + 0xe4, 0xcb, 0x6c, 0x75, 0xd1, 0x70, 0x04, 0x48, 0x96, 0x8a, 0x00, 0xa4, 0x73, 0x12, 0x87, 0x61, + 0x4c, 0x23, 0x5f, 0x2a, 0x41, 0x14, 0x44, 0x97, 0x98, 0x0b, 0xa6, 0x98, 0xb5, 0x96, 0xcb, 0x31, + 0x09, 0x25, 0x2e, 0x9d, 0xf8, 0xa2, 0x81, 0x4b, 0xe7, 0xf2, 0xab, 0x61, 0xf0, 0x80, 0x25, 0x09, + 0xa3, 0x05, 0x39, 0x07, 0x2e, 0xef, 0x0c, 0x53, 0x03, 0x4d, 0x93, 0xdb, 0xd7, 0xf0, 0xa5, 0x22, + 0x2a, 0x95, 0xc6, 0xfc, 0xe1, 0x9e, 0x66, 0x75, 0xc9, 0xc1, 0x58, 0x9f, 0x1a, 0xab, 0xae, 0x4e, + 0xd2, 0x53, 0xe7, 0xbb, 0x20, 0x9c, 0x83, 0x28, 0xd0, 0x2b, 0x05, 0x9a, 0xc7, 0x0e, 0xa1, 0x94, + 0x29, 0xa2, 0x62, 0x46, 0xcd, 0xe9, 0xb3, 0xbf, 0xd3, 0x68, 0xa1, 0x95, 0xd3, 0x7b, 0x06, 0x6e, + 0x3d, 0x47, 0x73, 0x45, 0x13, 0x7c, 0x4a, 0x12, 0xb0, 0xab, 0xab, 0xd5, 0x8d, 0x19, 0x6f, 0xb6, + 0xd8, 0x3c, 0x24, 0x09, 0x58, 0x9b, 0xa8, 0x16, 0x87, 0xf6, 0xc4, 0x6a, 0x75, 0xa3, 0xbe, 0xfd, + 0xc4, 0x74, 0x10, 0x17, 0x77, 0xc0, 0xfb, 0x54, 0xbd, 0x7b, 0xd3, 0x27, 0xe7, 0x29, 0x78, 0xb5, + 0x38, 0xb4, 0xb6, 0xd0, 0xa4, 0x06, 0x4d, 0x6a, 0xf9, 0xca, 0x2d, 0x79, 0x4f, 0x89, 0x98, 0x46, + 0xb9, 0x5e, 0x2b, 0xad, 0x53, 0x34, 0x95, 0x37, 0xc8, 0x5e, 0x58, 0xad, 0x6e, 0xcc, 0x6f, 0x1f, + 0xe2, 0x61, 0xff, 0x97, 0xee, 0x10, 0xbe, 0xf1, 0x1b, 0x7a, 0xda, 0xbb, 0x4b, 0xd3, 0xe4, 0xee, + 0x13, 0xcf, 0xd0, 0xad, 0xaf, 0x68, 0x32, 0xeb, 0xa5, 0xfd, 0x40, 0xa7, 0x1c, 0xdc, 0x2f, 0xe5, + 0xf8, 0x92, 0xc3, 0x5d, 0x19, 0xd9, 0xbe, 0xa7, 0xb9, 0x56, 0x0b, 0xcd, 0x07, 0x24, 0xe1, 0x24, + 0x8e, 0xa8, 0x1f, 0xb0, 0x94, 0x2a, 0x7b, 0x6e, 0x74, 0xcb, 0xe6, 0x0a, 0x4b, 0x3b, 0x73, 0x58, + 0x9f, 0xd0, 0x32, 0x65, 0xd4, 0x17, 0x90, 0xb0, 0x0b, 0x08, 0xfd, 0x1b, 0xbc, 0xf9, 0xd1, 0xbc, + 0x25, 0xca, 0xa8, 0x97, 0xbb, 0xdb, 0xd7, 0xc8, 0x5d, 0x34, 0x0b, 0xf4, 0x8c, 0xd0, 0x20, 0xc3, + 0xf2, 0xc0, 0x7e, 0xa8, 0x59, 0x9b, 0x43, 0xbb, 0x90, 0x0f, 0x3e, 0xde, 0x35, 0x9e, 0x36, 0x0f, + 0x3a, 0x15, 0xaf, 0x0e, 0x57, 0xa5, 0xf5, 0x05, 0x3d, 0xe2, 0x24, 0x02, 0x9f, 0x51, 0xf0, 0xb9, + 0x60, 0x09, 0x53, 0x10, 0xda, 0xd3, 0x1a, 0xeb, 0x8c, 0xc2, 0x76, 0x49, 0x04, 0x47, 0x14, 0xba, + 0xc6, 0xd6, 0xa9, 0x78, 0x0b, 0xfc, 0xfa, 0x96, 0x75, 0x80, 0x90, 0x22, 0x22, 0x02, 0xe5, 0x07, + 0x9c, 0xd8, 0x33, 0x9a, 0xfb, 0x62, 0x14, 0xf7, 0x58, 0x3b, 0xda, 0x9c, 0x74, 0x2a, 0xde, 0x8c, + 0x2a, 0x0a, 0x8b, 0xa1, 0x25, 0xc3, 0x8a, 0x13, 0x2e, 0x40, 0xca, 0x98, 0x51, 0x5f, 0x9e, 0x11, + 0x01, 0xf6, 0x96, 0x06, 0xbf, 0x1d, 0x0f, 0xbc, 0x5f, 0xba, 0x7b, 0x99, 0xb9, 0x53, 0xf1, 0x1e, + 0xab, 0xbb, 0x0e, 0xac, 0x53, 0xb4, 0x68, 0x02, 0x59, 0xaa, 0x04, 0xa1, 0xdf, 0x4c, 0x1a, 0xd2, + 0x69, 0xdb, 0xe3, 0xa5, 0x1d, 0xe5, 0xd6, 0x22, 0xca, 0x52, 0xb7, 0x76, 0xad, 0x8f, 0xa8, 0x6e, + 0x72, 0x04, 0x23, 0xd2, 0xae, 0x6b, 0xfc, 0xcb, 0xf1, 0xf0, 0x1e, 0x23, 0xb2, 0x53, 0xf1, 0x4c, + 0x97, 0xb3, 0x2a, 0x1b, 0x12, 0x83, 0x93, 0x1c, 0x68, 0x68, 0xcf, 0x8e, 0x37, 0x24, 0x39, 0xaf, + 0x97, 0x59, 0xb2, 0x21, 0x51, 0x57, 0x65, 0x6b, 0x1a, 0x4d, 0xc9, 0xe0, 0x0c, 0x12, 0x68, 0xfd, + 0xa8, 0xa1, 0xf5, 0x80, 0x25, 0x78, 0xe4, 0xcb, 0xb8, 0xb5, 0x78, 0xe3, 0x19, 0xeb, 0x66, 0x73, + 0xde, 0xad, 0x7e, 0x3e, 0x30, 0xd6, 0x88, 0x9d, 0x13, 0x1a, 0x61, 0x26, 0x22, 0x27, 0x02, 0xaa, + 0x9f, 0x82, 0xe2, 0x4d, 0xca, 0x63, 0xf9, 0x9f, 0x0f, 0xc4, 0x4e, 0xb9, 0xfa, 0x59, 0x9b, 0xd8, + 0x6b, 0x36, 0x7f, 0xd5, 0xd6, 0xf6, 0x72, 0x64, 0x33, 0x94, 0x38, 0x5f, 0x66, 0xab, 0x7e, 0x03, + 0x7b, 0x85, 0xf2, 0x77, 0xa1, 0x19, 0x34, 0x43, 0x39, 0x28, 0x35, 0x83, 0x7e, 0x63, 0x50, 0x6a, + 0xfe, 0xd4, 0xd6, 0xf3, 0x03, 0xd7, 0x6d, 0x86, 0xd2, 0x75, 0x4b, 0x95, 0xeb, 0xf6, 0x1b, 0xae, + 0x5b, 0xea, 0x4e, 0xa6, 0xf4, 0x65, 0x5f, 0xff, 0x0b, 0x00, 0x00, 0xff, 0xff, 0x35, 0xc9, 0x5d, + 0xf0, 0xcc, 0x06, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/resources/billing_setup.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/resources/billing_setup.pb.go new file mode 100644 index 0000000..f241e55 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/resources/billing_setup.pb.go @@ -0,0 +1,462 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/resources/billing_setup.proto + +package resources // import "google.golang.org/genproto/googleapis/ads/googleads/v1/resources" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import wrappers "github.com/golang/protobuf/ptypes/wrappers" +import enums "google.golang.org/genproto/googleapis/ads/googleads/v1/enums" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// A billing setup across Ads and Payments systems; an association between a +// Payments account and an advertiser. A billing setup is specific to one +// advertiser. +type BillingSetup struct { + // The resource name of the billing setup. + // BillingSetup resource names have the form: + // + // `customers/{customer_id}/billingSetups/{billing_setup_id}` + ResourceName string `protobuf:"bytes,1,opt,name=resource_name,json=resourceName,proto3" json:"resource_name,omitempty"` + // The ID of the billing setup. + Id *wrappers.Int64Value `protobuf:"bytes,2,opt,name=id,proto3" json:"id,omitempty"` + // The status of the billing setup. + Status enums.BillingSetupStatusEnum_BillingSetupStatus `protobuf:"varint,3,opt,name=status,proto3,enum=google.ads.googleads.v1.enums.BillingSetupStatusEnum_BillingSetupStatus" json:"status,omitempty"` + // The resource name of the Payments account associated with this billing + // setup. Payments resource names have the form: + // + // `customers/{customer_id}/paymentsAccounts/{payments_account_id}` + // When setting up billing, this is used to signup with an existing Payments + // account (and then payments_account_info should not be set). + // When getting a billing setup, this and payments_account_info will be + // populated. + PaymentsAccount *wrappers.StringValue `protobuf:"bytes,11,opt,name=payments_account,json=paymentsAccount,proto3" json:"payments_account,omitempty"` + // The Payments account information associated with this billing setup. + // When setting up billing, this is used to signup with a new Payments account + // (and then payments_account should not be set). + // When getting a billing setup, this and payments_account will be + // populated. + PaymentsAccountInfo *BillingSetup_PaymentsAccountInfo `protobuf:"bytes,12,opt,name=payments_account_info,json=paymentsAccountInfo,proto3" json:"payments_account_info,omitempty"` + // When creating a new billing setup, this is when the setup should take + // effect. NOW is the only acceptable start time if the customer doesn't have + // any approved setups. + // + // When fetching an existing billing setup, this is the requested start time. + // However, if the setup was approved (see status) after the requested start + // time, then this is the approval time. + // + // Types that are valid to be assigned to StartTime: + // *BillingSetup_StartDateTime + // *BillingSetup_StartTimeType + StartTime isBillingSetup_StartTime `protobuf_oneof:"start_time"` + // When the billing setup ends / ended. This is either FOREVER or the start + // time of the next scheduled billing setup. + // + // Types that are valid to be assigned to EndTime: + // *BillingSetup_EndDateTime + // *BillingSetup_EndTimeType + EndTime isBillingSetup_EndTime `protobuf_oneof:"end_time"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *BillingSetup) Reset() { *m = BillingSetup{} } +func (m *BillingSetup) String() string { return proto.CompactTextString(m) } +func (*BillingSetup) ProtoMessage() {} +func (*BillingSetup) Descriptor() ([]byte, []int) { + return fileDescriptor_billing_setup_1af7eb39eb240777, []int{0} +} +func (m *BillingSetup) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_BillingSetup.Unmarshal(m, b) +} +func (m *BillingSetup) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_BillingSetup.Marshal(b, m, deterministic) +} +func (dst *BillingSetup) XXX_Merge(src proto.Message) { + xxx_messageInfo_BillingSetup.Merge(dst, src) +} +func (m *BillingSetup) XXX_Size() int { + return xxx_messageInfo_BillingSetup.Size(m) +} +func (m *BillingSetup) XXX_DiscardUnknown() { + xxx_messageInfo_BillingSetup.DiscardUnknown(m) +} + +var xxx_messageInfo_BillingSetup proto.InternalMessageInfo + +func (m *BillingSetup) GetResourceName() string { + if m != nil { + return m.ResourceName + } + return "" +} + +func (m *BillingSetup) GetId() *wrappers.Int64Value { + if m != nil { + return m.Id + } + return nil +} + +func (m *BillingSetup) GetStatus() enums.BillingSetupStatusEnum_BillingSetupStatus { + if m != nil { + return m.Status + } + return enums.BillingSetupStatusEnum_UNSPECIFIED +} + +func (m *BillingSetup) GetPaymentsAccount() *wrappers.StringValue { + if m != nil { + return m.PaymentsAccount + } + return nil +} + +func (m *BillingSetup) GetPaymentsAccountInfo() *BillingSetup_PaymentsAccountInfo { + if m != nil { + return m.PaymentsAccountInfo + } + return nil +} + +type isBillingSetup_StartTime interface { + isBillingSetup_StartTime() +} + +type BillingSetup_StartDateTime struct { + StartDateTime *wrappers.StringValue `protobuf:"bytes,9,opt,name=start_date_time,json=startDateTime,proto3,oneof"` +} + +type BillingSetup_StartTimeType struct { + StartTimeType enums.TimeTypeEnum_TimeType `protobuf:"varint,10,opt,name=start_time_type,json=startTimeType,proto3,enum=google.ads.googleads.v1.enums.TimeTypeEnum_TimeType,oneof"` +} + +func (*BillingSetup_StartDateTime) isBillingSetup_StartTime() {} + +func (*BillingSetup_StartTimeType) isBillingSetup_StartTime() {} + +func (m *BillingSetup) GetStartTime() isBillingSetup_StartTime { + if m != nil { + return m.StartTime + } + return nil +} + +func (m *BillingSetup) GetStartDateTime() *wrappers.StringValue { + if x, ok := m.GetStartTime().(*BillingSetup_StartDateTime); ok { + return x.StartDateTime + } + return nil +} + +func (m *BillingSetup) GetStartTimeType() enums.TimeTypeEnum_TimeType { + if x, ok := m.GetStartTime().(*BillingSetup_StartTimeType); ok { + return x.StartTimeType + } + return enums.TimeTypeEnum_UNSPECIFIED +} + +type isBillingSetup_EndTime interface { + isBillingSetup_EndTime() +} + +type BillingSetup_EndDateTime struct { + EndDateTime *wrappers.StringValue `protobuf:"bytes,13,opt,name=end_date_time,json=endDateTime,proto3,oneof"` +} + +type BillingSetup_EndTimeType struct { + EndTimeType enums.TimeTypeEnum_TimeType `protobuf:"varint,14,opt,name=end_time_type,json=endTimeType,proto3,enum=google.ads.googleads.v1.enums.TimeTypeEnum_TimeType,oneof"` +} + +func (*BillingSetup_EndDateTime) isBillingSetup_EndTime() {} + +func (*BillingSetup_EndTimeType) isBillingSetup_EndTime() {} + +func (m *BillingSetup) GetEndTime() isBillingSetup_EndTime { + if m != nil { + return m.EndTime + } + return nil +} + +func (m *BillingSetup) GetEndDateTime() *wrappers.StringValue { + if x, ok := m.GetEndTime().(*BillingSetup_EndDateTime); ok { + return x.EndDateTime + } + return nil +} + +func (m *BillingSetup) GetEndTimeType() enums.TimeTypeEnum_TimeType { + if x, ok := m.GetEndTime().(*BillingSetup_EndTimeType); ok { + return x.EndTimeType + } + return enums.TimeTypeEnum_UNSPECIFIED +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*BillingSetup) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _BillingSetup_OneofMarshaler, _BillingSetup_OneofUnmarshaler, _BillingSetup_OneofSizer, []interface{}{ + (*BillingSetup_StartDateTime)(nil), + (*BillingSetup_StartTimeType)(nil), + (*BillingSetup_EndDateTime)(nil), + (*BillingSetup_EndTimeType)(nil), + } +} + +func _BillingSetup_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*BillingSetup) + // start_time + switch x := m.StartTime.(type) { + case *BillingSetup_StartDateTime: + b.EncodeVarint(9<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.StartDateTime); err != nil { + return err + } + case *BillingSetup_StartTimeType: + b.EncodeVarint(10<<3 | proto.WireVarint) + b.EncodeVarint(uint64(x.StartTimeType)) + case nil: + default: + return fmt.Errorf("BillingSetup.StartTime has unexpected type %T", x) + } + // end_time + switch x := m.EndTime.(type) { + case *BillingSetup_EndDateTime: + b.EncodeVarint(13<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.EndDateTime); err != nil { + return err + } + case *BillingSetup_EndTimeType: + b.EncodeVarint(14<<3 | proto.WireVarint) + b.EncodeVarint(uint64(x.EndTimeType)) + case nil: + default: + return fmt.Errorf("BillingSetup.EndTime has unexpected type %T", x) + } + return nil +} + +func _BillingSetup_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*BillingSetup) + switch tag { + case 9: // start_time.start_date_time + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(wrappers.StringValue) + err := b.DecodeMessage(msg) + m.StartTime = &BillingSetup_StartDateTime{msg} + return true, err + case 10: // start_time.start_time_type + if wire != proto.WireVarint { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeVarint() + m.StartTime = &BillingSetup_StartTimeType{enums.TimeTypeEnum_TimeType(x)} + return true, err + case 13: // end_time.end_date_time + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(wrappers.StringValue) + err := b.DecodeMessage(msg) + m.EndTime = &BillingSetup_EndDateTime{msg} + return true, err + case 14: // end_time.end_time_type + if wire != proto.WireVarint { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeVarint() + m.EndTime = &BillingSetup_EndTimeType{enums.TimeTypeEnum_TimeType(x)} + return true, err + default: + return false, nil + } +} + +func _BillingSetup_OneofSizer(msg proto.Message) (n int) { + m := msg.(*BillingSetup) + // start_time + switch x := m.StartTime.(type) { + case *BillingSetup_StartDateTime: + s := proto.Size(x.StartDateTime) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *BillingSetup_StartTimeType: + n += 1 // tag and wire + n += proto.SizeVarint(uint64(x.StartTimeType)) + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + // end_time + switch x := m.EndTime.(type) { + case *BillingSetup_EndDateTime: + s := proto.Size(x.EndDateTime) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *BillingSetup_EndTimeType: + n += 1 // tag and wire + n += proto.SizeVarint(uint64(x.EndTimeType)) + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +// Container of Payments account information for this billing. +type BillingSetup_PaymentsAccountInfo struct { + // A 16 digit id used to identify the Payments account associated with the + // billing setup. + // + // This must be passed as a string with dashes, e.g. "1234-5678-9012-3456". + PaymentsAccountId *wrappers.StringValue `protobuf:"bytes,1,opt,name=payments_account_id,json=paymentsAccountId,proto3" json:"payments_account_id,omitempty"` + // The name of the Payments account associated with the billing setup. + // + // This enables the user to specify a meaningful name for a Payments account + // to aid in reconciling monthly invoices. + // + // This name will be printed in the monthly invoices. + PaymentsAccountName *wrappers.StringValue `protobuf:"bytes,2,opt,name=payments_account_name,json=paymentsAccountName,proto3" json:"payments_account_name,omitempty"` + // A 12 digit id used to identify the Payments profile associated with the + // billing setup. + // + // This must be passed in as a string with dashes, e.g. "1234-5678-9012". + PaymentsProfileId *wrappers.StringValue `protobuf:"bytes,3,opt,name=payments_profile_id,json=paymentsProfileId,proto3" json:"payments_profile_id,omitempty"` + // The name of the Payments profile associated with the billing setup. + PaymentsProfileName *wrappers.StringValue `protobuf:"bytes,4,opt,name=payments_profile_name,json=paymentsProfileName,proto3" json:"payments_profile_name,omitempty"` + // A secondary payments profile id present in uncommon situations, e.g. + // when a sequential liability agreement has been arranged. + SecondaryPaymentsProfileId *wrappers.StringValue `protobuf:"bytes,5,opt,name=secondary_payments_profile_id,json=secondaryPaymentsProfileId,proto3" json:"secondary_payments_profile_id,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *BillingSetup_PaymentsAccountInfo) Reset() { *m = BillingSetup_PaymentsAccountInfo{} } +func (m *BillingSetup_PaymentsAccountInfo) String() string { return proto.CompactTextString(m) } +func (*BillingSetup_PaymentsAccountInfo) ProtoMessage() {} +func (*BillingSetup_PaymentsAccountInfo) Descriptor() ([]byte, []int) { + return fileDescriptor_billing_setup_1af7eb39eb240777, []int{0, 0} +} +func (m *BillingSetup_PaymentsAccountInfo) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_BillingSetup_PaymentsAccountInfo.Unmarshal(m, b) +} +func (m *BillingSetup_PaymentsAccountInfo) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_BillingSetup_PaymentsAccountInfo.Marshal(b, m, deterministic) +} +func (dst *BillingSetup_PaymentsAccountInfo) XXX_Merge(src proto.Message) { + xxx_messageInfo_BillingSetup_PaymentsAccountInfo.Merge(dst, src) +} +func (m *BillingSetup_PaymentsAccountInfo) XXX_Size() int { + return xxx_messageInfo_BillingSetup_PaymentsAccountInfo.Size(m) +} +func (m *BillingSetup_PaymentsAccountInfo) XXX_DiscardUnknown() { + xxx_messageInfo_BillingSetup_PaymentsAccountInfo.DiscardUnknown(m) +} + +var xxx_messageInfo_BillingSetup_PaymentsAccountInfo proto.InternalMessageInfo + +func (m *BillingSetup_PaymentsAccountInfo) GetPaymentsAccountId() *wrappers.StringValue { + if m != nil { + return m.PaymentsAccountId + } + return nil +} + +func (m *BillingSetup_PaymentsAccountInfo) GetPaymentsAccountName() *wrappers.StringValue { + if m != nil { + return m.PaymentsAccountName + } + return nil +} + +func (m *BillingSetup_PaymentsAccountInfo) GetPaymentsProfileId() *wrappers.StringValue { + if m != nil { + return m.PaymentsProfileId + } + return nil +} + +func (m *BillingSetup_PaymentsAccountInfo) GetPaymentsProfileName() *wrappers.StringValue { + if m != nil { + return m.PaymentsProfileName + } + return nil +} + +func (m *BillingSetup_PaymentsAccountInfo) GetSecondaryPaymentsProfileId() *wrappers.StringValue { + if m != nil { + return m.SecondaryPaymentsProfileId + } + return nil +} + +func init() { + proto.RegisterType((*BillingSetup)(nil), "google.ads.googleads.v1.resources.BillingSetup") + proto.RegisterType((*BillingSetup_PaymentsAccountInfo)(nil), "google.ads.googleads.v1.resources.BillingSetup.PaymentsAccountInfo") +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/resources/billing_setup.proto", fileDescriptor_billing_setup_1af7eb39eb240777) +} + +var fileDescriptor_billing_setup_1af7eb39eb240777 = []byte{ + // 631 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xa4, 0x95, 0xdd, 0x6e, 0xd3, 0x30, + 0x14, 0x80, 0x49, 0x0a, 0x13, 0xf3, 0xda, 0x8d, 0x65, 0x42, 0x8a, 0xca, 0x40, 0x1b, 0x68, 0xd2, + 0x24, 0x84, 0xa3, 0x8e, 0x81, 0x50, 0xb8, 0x6a, 0xf9, 0xd9, 0x86, 0x10, 0xaa, 0xb2, 0xa9, 0x17, + 0x53, 0x45, 0xf0, 0x6a, 0x37, 0xb2, 0x94, 0xd8, 0x51, 0xec, 0x6c, 0xea, 0xd3, 0x20, 0x71, 0xc9, + 0x03, 0xf0, 0x10, 0x3c, 0x0a, 0x0f, 0x81, 0x50, 0xec, 0x38, 0xeb, 0xb2, 0x6e, 0xdd, 0xc4, 0x9d, + 0xed, 0x9c, 0xf3, 0xe9, 0xf3, 0x39, 0xc7, 0x2d, 0x78, 0x15, 0x71, 0x1e, 0xc5, 0xc4, 0x43, 0x58, + 0x78, 0x7a, 0x59, 0xac, 0x4e, 0x3b, 0x5e, 0x46, 0x04, 0xcf, 0xb3, 0x11, 0x11, 0xde, 0x09, 0x8d, + 0x63, 0xca, 0xa2, 0x50, 0x10, 0x99, 0xa7, 0x30, 0xcd, 0xb8, 0xe4, 0xce, 0xa6, 0x8e, 0x85, 0x08, + 0x0b, 0x58, 0xa5, 0xc1, 0xd3, 0x0e, 0xac, 0xd2, 0xda, 0x6f, 0xae, 0x22, 0x13, 0x96, 0x27, 0x35, + 0x6a, 0x28, 0x24, 0x92, 0xb9, 0xd0, 0xf0, 0xf6, 0x8b, 0xeb, 0x33, 0x25, 0x4d, 0x48, 0x28, 0x27, + 0x29, 0x29, 0xc3, 0x9f, 0x94, 0xe1, 0x6a, 0x77, 0x92, 0x8f, 0xbd, 0xb3, 0x0c, 0xa5, 0x29, 0xc9, + 0x0c, 0x6e, 0xdd, 0xe0, 0x52, 0xea, 0x21, 0xc6, 0xb8, 0x44, 0x92, 0x72, 0x56, 0x7e, 0x7d, 0xfa, + 0x7d, 0x11, 0x34, 0x7b, 0xda, 0xe5, 0xb0, 0x50, 0x71, 0x9e, 0x81, 0x96, 0xb9, 0x44, 0xc8, 0x50, + 0x42, 0x5c, 0x6b, 0xc3, 0xda, 0x5e, 0x0c, 0x9a, 0xe6, 0xf0, 0x0b, 0x4a, 0x88, 0xf3, 0x1c, 0xd8, + 0x14, 0xbb, 0xf6, 0x86, 0xb5, 0xbd, 0xb4, 0xf3, 0xa8, 0xac, 0x00, 0x34, 0x02, 0xf0, 0x80, 0xc9, + 0xd7, 0xbb, 0x03, 0x14, 0xe7, 0x24, 0xb0, 0x29, 0x76, 0xbe, 0x81, 0x05, 0x7d, 0x3f, 0xb7, 0xb1, + 0x61, 0x6d, 0x2f, 0xef, 0xec, 0xc3, 0xab, 0xaa, 0xa7, 0x2e, 0x08, 0xa7, 0x75, 0x0e, 0x55, 0xe2, + 0x07, 0x96, 0x27, 0x33, 0x8e, 0x83, 0x92, 0xeb, 0xec, 0x81, 0x07, 0x29, 0x9a, 0x24, 0x84, 0x49, + 0x11, 0xa2, 0xd1, 0x88, 0xe7, 0x4c, 0xba, 0x4b, 0x4a, 0x6e, 0xfd, 0x92, 0xdc, 0xa1, 0xcc, 0x28, + 0x8b, 0xb4, 0xdd, 0x8a, 0xc9, 0xea, 0xea, 0x24, 0xe7, 0x0c, 0x3c, 0xac, 0x83, 0x42, 0xca, 0xc6, + 0xdc, 0x6d, 0x2a, 0xda, 0x3b, 0x38, 0xb7, 0xef, 0x17, 0x34, 0x61, 0xff, 0x22, 0xff, 0x80, 0x8d, + 0x79, 0xb0, 0x96, 0x5e, 0x3e, 0x74, 0x3e, 0x82, 0x15, 0x21, 0x51, 0x26, 0x43, 0x8c, 0x24, 0x09, + 0x8b, 0x16, 0xbb, 0x8b, 0xf3, 0x2f, 0xb0, 0x7f, 0x27, 0x68, 0xa9, 0xb4, 0xf7, 0x48, 0x92, 0x23, + 0x9a, 0x10, 0xe7, 0xab, 0xe1, 0x54, 0x53, 0xe2, 0x02, 0x55, 0xf4, 0xdd, 0x39, 0x45, 0x2f, 0xb2, + 0x8f, 0x26, 0x29, 0x51, 0xa5, 0x36, 0x9b, 0x8a, 0x6f, 0x0e, 0x9c, 0x1e, 0x68, 0x11, 0x86, 0xa7, + 0x2c, 0x5b, 0x37, 0xb0, 0xb4, 0x82, 0x25, 0xc2, 0x70, 0xe5, 0x78, 0xac, 0x19, 0xe7, 0x86, 0xcb, + 0xff, 0x61, 0xa8, 0xd9, 0x66, 0xdb, 0xfe, 0xd5, 0x00, 0x6b, 0x33, 0x8a, 0xee, 0x7c, 0x06, 0x6b, + 0x97, 0x1b, 0x8b, 0xd5, 0x6c, 0xcf, 0x1b, 0x92, 0xd5, 0x7a, 0xbf, 0xb0, 0xd3, 0x9f, 0x31, 0x26, + 0xea, 0xad, 0xd8, 0x37, 0xe0, 0xd5, 0xfb, 0xaf, 0x1e, 0xd4, 0xb4, 0x5f, 0x9a, 0xf1, 0x31, 0x8d, + 0x49, 0xe1, 0xd7, 0xb8, 0x8d, 0x5f, 0x5f, 0xe7, 0xd5, 0xfc, 0x0c, 0x4d, 0xf9, 0xdd, 0xbd, 0x8d, + 0x5f, 0xc9, 0x53, 0x7e, 0x21, 0x78, 0x2c, 0xc8, 0x88, 0x33, 0x8c, 0xb2, 0x49, 0x38, 0xcb, 0xf4, + 0xde, 0x0d, 0xc8, 0xed, 0x0a, 0xd1, 0xaf, 0x2b, 0xf7, 0x9a, 0x00, 0x9c, 0x0f, 0x6e, 0x0f, 0x80, + 0xfb, 0x66, 0x44, 0x7a, 0x7f, 0x2d, 0xb0, 0x35, 0xe2, 0xc9, 0xfc, 0xa7, 0xd7, 0x5b, 0x9d, 0x7e, + 0x7b, 0xfd, 0xc2, 0xa0, 0x6f, 0x1d, 0x7f, 0x2a, 0xf3, 0x22, 0x1e, 0x23, 0x16, 0x41, 0x9e, 0x45, + 0x5e, 0x44, 0x98, 0xf2, 0x33, 0xbf, 0xae, 0x29, 0x15, 0xd7, 0xfc, 0x01, 0xbc, 0xad, 0x56, 0x3f, + 0xec, 0xc6, 0x5e, 0xb7, 0xfb, 0xd3, 0xde, 0xdc, 0xd3, 0xc8, 0x2e, 0x16, 0x50, 0x2f, 0x8b, 0xd5, + 0xa0, 0x03, 0x03, 0x13, 0xf9, 0xdb, 0xc4, 0x0c, 0xbb, 0x58, 0x0c, 0xab, 0x98, 0xe1, 0xa0, 0x33, + 0xac, 0x62, 0xfe, 0xd8, 0x5b, 0xfa, 0x83, 0xef, 0x77, 0xb1, 0xf0, 0xfd, 0x2a, 0xca, 0xf7, 0x07, + 0x1d, 0xdf, 0xaf, 0xe2, 0x4e, 0x16, 0x94, 0xec, 0xcb, 0x7f, 0x01, 0x00, 0x00, 0xff, 0xff, 0x50, + 0x93, 0xbd, 0xf6, 0xac, 0x06, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/resources/campaign.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/resources/campaign.pb.go new file mode 100644 index 0000000..a8c5691 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/resources/campaign.pb.go @@ -0,0 +1,1510 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/resources/campaign.proto + +package resources // import "google.golang.org/genproto/googleapis/ads/googleads/v1/resources" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import wrappers "github.com/golang/protobuf/ptypes/wrappers" +import common "google.golang.org/genproto/googleapis/ads/googleads/v1/common" +import enums "google.golang.org/genproto/googleapis/ads/googleads/v1/enums" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// A campaign. +type Campaign struct { + // The resource name of the campaign. + // Campaign resource names have the form: + // + // `customers/{customer_id}/campaigns/{campaign_id}` + ResourceName string `protobuf:"bytes,1,opt,name=resource_name,json=resourceName,proto3" json:"resource_name,omitempty"` + // The ID of the campaign. + Id *wrappers.Int64Value `protobuf:"bytes,3,opt,name=id,proto3" json:"id,omitempty"` + // The name of the campaign. + // + // This field is required and should not be empty when creating new + // campaigns. + // + // It must not contain any null (code point 0x0), NL line feed + // (code point 0xA) or carriage return (code point 0xD) characters. + Name *wrappers.StringValue `protobuf:"bytes,4,opt,name=name,proto3" json:"name,omitempty"` + // The status of the campaign. + // + // When a new campaign is added, the status defaults to ENABLED. + Status enums.CampaignStatusEnum_CampaignStatus `protobuf:"varint,5,opt,name=status,proto3,enum=google.ads.googleads.v1.enums.CampaignStatusEnum_CampaignStatus" json:"status,omitempty"` + // The ad serving status of the campaign. + ServingStatus enums.CampaignServingStatusEnum_CampaignServingStatus `protobuf:"varint,21,opt,name=serving_status,json=servingStatus,proto3,enum=google.ads.googleads.v1.enums.CampaignServingStatusEnum_CampaignServingStatus" json:"serving_status,omitempty"` + // The ad serving optimization status of the campaign. + AdServingOptimizationStatus enums.AdServingOptimizationStatusEnum_AdServingOptimizationStatus `protobuf:"varint,8,opt,name=ad_serving_optimization_status,json=adServingOptimizationStatus,proto3,enum=google.ads.googleads.v1.enums.AdServingOptimizationStatusEnum_AdServingOptimizationStatus" json:"ad_serving_optimization_status,omitempty"` + // The primary serving target for ads within the campaign. + // The targeting options can be refined in `network_settings`. + // + // This field is required and should not be empty when creating new + // campaigns. + // + // Can be set only when creating campaigns. + // After the campaign is created, the field can not be changed. + AdvertisingChannelType enums.AdvertisingChannelTypeEnum_AdvertisingChannelType `protobuf:"varint,9,opt,name=advertising_channel_type,json=advertisingChannelType,proto3,enum=google.ads.googleads.v1.enums.AdvertisingChannelTypeEnum_AdvertisingChannelType" json:"advertising_channel_type,omitempty"` + // Optional refinement to `advertising_channel_type`. + // Must be a valid sub-type of the parent channel type. + // + // Can be set only when creating campaigns. + // After campaign is created, the field can not be changed. + AdvertisingChannelSubType enums.AdvertisingChannelSubTypeEnum_AdvertisingChannelSubType `protobuf:"varint,10,opt,name=advertising_channel_sub_type,json=advertisingChannelSubType,proto3,enum=google.ads.googleads.v1.enums.AdvertisingChannelSubTypeEnum_AdvertisingChannelSubType" json:"advertising_channel_sub_type,omitempty"` + // The URL template for constructing a tracking URL. + TrackingUrlTemplate *wrappers.StringValue `protobuf:"bytes,11,opt,name=tracking_url_template,json=trackingUrlTemplate,proto3" json:"tracking_url_template,omitempty"` + // The list of mappings used to substitute custom parameter tags in a + // `tracking_url_template`, `final_urls`, or `mobile_final_urls`. + UrlCustomParameters []*common.CustomParameter `protobuf:"bytes,12,rep,name=url_custom_parameters,json=urlCustomParameters,proto3" json:"url_custom_parameters,omitempty"` + // Settings for Real-Time Bidding, a feature only available for campaigns + // targeting the Ad Exchange network. + RealTimeBiddingSetting *common.RealTimeBiddingSetting `protobuf:"bytes,39,opt,name=real_time_bidding_setting,json=realTimeBiddingSetting,proto3" json:"real_time_bidding_setting,omitempty"` + // The network settings for the campaign. + NetworkSettings *Campaign_NetworkSettings `protobuf:"bytes,14,opt,name=network_settings,json=networkSettings,proto3" json:"network_settings,omitempty"` + // The hotel setting for the campaign. + HotelSetting *Campaign_HotelSettingInfo `protobuf:"bytes,32,opt,name=hotel_setting,json=hotelSetting,proto3" json:"hotel_setting,omitempty"` + // The setting for controlling Dynamic Search Ads (DSA). + DynamicSearchAdsSetting *Campaign_DynamicSearchAdsSetting `protobuf:"bytes,33,opt,name=dynamic_search_ads_setting,json=dynamicSearchAdsSetting,proto3" json:"dynamic_search_ads_setting,omitempty"` + // The setting for controlling Shopping campaigns. + ShoppingSetting *Campaign_ShoppingSetting `protobuf:"bytes,36,opt,name=shopping_setting,json=shoppingSetting,proto3" json:"shopping_setting,omitempty"` + // Setting for targeting related features. + TargetingSetting *common.TargetingSetting `protobuf:"bytes,43,opt,name=targeting_setting,json=targetingSetting,proto3" json:"targeting_setting,omitempty"` + // The setting for ads geotargeting. + GeoTargetTypeSetting *Campaign_GeoTargetTypeSetting `protobuf:"bytes,47,opt,name=geo_target_type_setting,json=geoTargetTypeSetting,proto3" json:"geo_target_type_setting,omitempty"` + // The setting related to App Campaign. + AppCampaignSetting *Campaign_AppCampaignSetting `protobuf:"bytes,51,opt,name=app_campaign_setting,json=appCampaignSetting,proto3" json:"app_campaign_setting,omitempty"` + // The type of campaign: normal, draft, or experiment. + ExperimentType enums.CampaignExperimentTypeEnum_CampaignExperimentType `protobuf:"varint,17,opt,name=experiment_type,json=experimentType,proto3,enum=google.ads.googleads.v1.enums.CampaignExperimentTypeEnum_CampaignExperimentType" json:"experiment_type,omitempty"` + // The resource name of the base campaign of a draft or experiment campaign. + // For base campaigns, this is equal to `resource_name`. + // + // This field is read-only. + BaseCampaign *wrappers.StringValue `protobuf:"bytes,28,opt,name=base_campaign,json=baseCampaign,proto3" json:"base_campaign,omitempty"` + // The budget of the campaign. + CampaignBudget *wrappers.StringValue `protobuf:"bytes,6,opt,name=campaign_budget,json=campaignBudget,proto3" json:"campaign_budget,omitempty"` + // The type of bidding strategy. + // + // A bidding strategy can be created by setting either the bidding scheme to + // create a standard bidding strategy or the `bidding_strategy` field to + // create a portfolio bidding strategy. + // + // This field is read-only. + BiddingStrategyType enums.BiddingStrategyTypeEnum_BiddingStrategyType `protobuf:"varint,22,opt,name=bidding_strategy_type,json=biddingStrategyType,proto3,enum=google.ads.googleads.v1.enums.BiddingStrategyTypeEnum_BiddingStrategyType" json:"bidding_strategy_type,omitempty"` + // The date when campaign started. + // + // This field must not be used in WHERE clauses. + StartDate *wrappers.StringValue `protobuf:"bytes,19,opt,name=start_date,json=startDate,proto3" json:"start_date,omitempty"` + // The date when campaign ended. + // + // This field must not be used in WHERE clauses. + EndDate *wrappers.StringValue `protobuf:"bytes,20,opt,name=end_date,json=endDate,proto3" json:"end_date,omitempty"` + // Suffix used to append query parameters to landing pages that are served + // with parallel tracking. + FinalUrlSuffix *wrappers.StringValue `protobuf:"bytes,38,opt,name=final_url_suffix,json=finalUrlSuffix,proto3" json:"final_url_suffix,omitempty"` + // A list that limits how often each user will see this campaign's ads. + FrequencyCaps []*common.FrequencyCapEntry `protobuf:"bytes,40,rep,name=frequency_caps,json=frequencyCaps,proto3" json:"frequency_caps,omitempty"` + // 3-Tier Brand Safety setting for the campaign. + VideoBrandSafetySuitability enums.BrandSafetySuitabilityEnum_BrandSafetySuitability `protobuf:"varint,42,opt,name=video_brand_safety_suitability,json=videoBrandSafetySuitability,proto3,enum=google.ads.googleads.v1.enums.BrandSafetySuitabilityEnum_BrandSafetySuitability" json:"video_brand_safety_suitability,omitempty"` + // Describes how unbranded pharma ads will be displayed. + VanityPharma *Campaign_VanityPharma `protobuf:"bytes,44,opt,name=vanity_pharma,json=vanityPharma,proto3" json:"vanity_pharma,omitempty"` + // Selective optimization setting for this campaign, which includes a set of + // conversion actions to optimize this campaign towards. + SelectiveOptimization *Campaign_SelectiveOptimization `protobuf:"bytes,45,opt,name=selective_optimization,json=selectiveOptimization,proto3" json:"selective_optimization,omitempty"` + // Campaign level settings for tracking information. + TrackingSetting *Campaign_TrackingSetting `protobuf:"bytes,46,opt,name=tracking_setting,json=trackingSetting,proto3" json:"tracking_setting,omitempty"` + // Payment mode for the campaign. + PaymentMode enums.PaymentModeEnum_PaymentMode `protobuf:"varint,52,opt,name=payment_mode,json=paymentMode,proto3,enum=google.ads.googleads.v1.enums.PaymentModeEnum_PaymentMode" json:"payment_mode,omitempty"` + // The bidding strategy for the campaign. + // + // Must be either portfolio (created via BiddingStrategy service) or + // standard, that is embedded into the campaign. + // + // Types that are valid to be assigned to CampaignBiddingStrategy: + // *Campaign_BiddingStrategy + // *Campaign_Commission + // *Campaign_ManualCpc + // *Campaign_ManualCpm + // *Campaign_ManualCpv + // *Campaign_MaximizeConversions + // *Campaign_MaximizeConversionValue + // *Campaign_TargetCpa + // *Campaign_TargetImpressionShare + // *Campaign_TargetRoas + // *Campaign_TargetSpend + // *Campaign_PercentCpc + // *Campaign_TargetCpm + CampaignBiddingStrategy isCampaign_CampaignBiddingStrategy `protobuf_oneof:"campaign_bidding_strategy"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Campaign) Reset() { *m = Campaign{} } +func (m *Campaign) String() string { return proto.CompactTextString(m) } +func (*Campaign) ProtoMessage() {} +func (*Campaign) Descriptor() ([]byte, []int) { + return fileDescriptor_campaign_159b92ef9ae7a828, []int{0} +} +func (m *Campaign) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Campaign.Unmarshal(m, b) +} +func (m *Campaign) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Campaign.Marshal(b, m, deterministic) +} +func (dst *Campaign) XXX_Merge(src proto.Message) { + xxx_messageInfo_Campaign.Merge(dst, src) +} +func (m *Campaign) XXX_Size() int { + return xxx_messageInfo_Campaign.Size(m) +} +func (m *Campaign) XXX_DiscardUnknown() { + xxx_messageInfo_Campaign.DiscardUnknown(m) +} + +var xxx_messageInfo_Campaign proto.InternalMessageInfo + +func (m *Campaign) GetResourceName() string { + if m != nil { + return m.ResourceName + } + return "" +} + +func (m *Campaign) GetId() *wrappers.Int64Value { + if m != nil { + return m.Id + } + return nil +} + +func (m *Campaign) GetName() *wrappers.StringValue { + if m != nil { + return m.Name + } + return nil +} + +func (m *Campaign) GetStatus() enums.CampaignStatusEnum_CampaignStatus { + if m != nil { + return m.Status + } + return enums.CampaignStatusEnum_UNSPECIFIED +} + +func (m *Campaign) GetServingStatus() enums.CampaignServingStatusEnum_CampaignServingStatus { + if m != nil { + return m.ServingStatus + } + return enums.CampaignServingStatusEnum_UNSPECIFIED +} + +func (m *Campaign) GetAdServingOptimizationStatus() enums.AdServingOptimizationStatusEnum_AdServingOptimizationStatus { + if m != nil { + return m.AdServingOptimizationStatus + } + return enums.AdServingOptimizationStatusEnum_UNSPECIFIED +} + +func (m *Campaign) GetAdvertisingChannelType() enums.AdvertisingChannelTypeEnum_AdvertisingChannelType { + if m != nil { + return m.AdvertisingChannelType + } + return enums.AdvertisingChannelTypeEnum_UNSPECIFIED +} + +func (m *Campaign) GetAdvertisingChannelSubType() enums.AdvertisingChannelSubTypeEnum_AdvertisingChannelSubType { + if m != nil { + return m.AdvertisingChannelSubType + } + return enums.AdvertisingChannelSubTypeEnum_UNSPECIFIED +} + +func (m *Campaign) GetTrackingUrlTemplate() *wrappers.StringValue { + if m != nil { + return m.TrackingUrlTemplate + } + return nil +} + +func (m *Campaign) GetUrlCustomParameters() []*common.CustomParameter { + if m != nil { + return m.UrlCustomParameters + } + return nil +} + +func (m *Campaign) GetRealTimeBiddingSetting() *common.RealTimeBiddingSetting { + if m != nil { + return m.RealTimeBiddingSetting + } + return nil +} + +func (m *Campaign) GetNetworkSettings() *Campaign_NetworkSettings { + if m != nil { + return m.NetworkSettings + } + return nil +} + +func (m *Campaign) GetHotelSetting() *Campaign_HotelSettingInfo { + if m != nil { + return m.HotelSetting + } + return nil +} + +func (m *Campaign) GetDynamicSearchAdsSetting() *Campaign_DynamicSearchAdsSetting { + if m != nil { + return m.DynamicSearchAdsSetting + } + return nil +} + +func (m *Campaign) GetShoppingSetting() *Campaign_ShoppingSetting { + if m != nil { + return m.ShoppingSetting + } + return nil +} + +func (m *Campaign) GetTargetingSetting() *common.TargetingSetting { + if m != nil { + return m.TargetingSetting + } + return nil +} + +func (m *Campaign) GetGeoTargetTypeSetting() *Campaign_GeoTargetTypeSetting { + if m != nil { + return m.GeoTargetTypeSetting + } + return nil +} + +func (m *Campaign) GetAppCampaignSetting() *Campaign_AppCampaignSetting { + if m != nil { + return m.AppCampaignSetting + } + return nil +} + +func (m *Campaign) GetExperimentType() enums.CampaignExperimentTypeEnum_CampaignExperimentType { + if m != nil { + return m.ExperimentType + } + return enums.CampaignExperimentTypeEnum_UNSPECIFIED +} + +func (m *Campaign) GetBaseCampaign() *wrappers.StringValue { + if m != nil { + return m.BaseCampaign + } + return nil +} + +func (m *Campaign) GetCampaignBudget() *wrappers.StringValue { + if m != nil { + return m.CampaignBudget + } + return nil +} + +func (m *Campaign) GetBiddingStrategyType() enums.BiddingStrategyTypeEnum_BiddingStrategyType { + if m != nil { + return m.BiddingStrategyType + } + return enums.BiddingStrategyTypeEnum_UNSPECIFIED +} + +func (m *Campaign) GetStartDate() *wrappers.StringValue { + if m != nil { + return m.StartDate + } + return nil +} + +func (m *Campaign) GetEndDate() *wrappers.StringValue { + if m != nil { + return m.EndDate + } + return nil +} + +func (m *Campaign) GetFinalUrlSuffix() *wrappers.StringValue { + if m != nil { + return m.FinalUrlSuffix + } + return nil +} + +func (m *Campaign) GetFrequencyCaps() []*common.FrequencyCapEntry { + if m != nil { + return m.FrequencyCaps + } + return nil +} + +func (m *Campaign) GetVideoBrandSafetySuitability() enums.BrandSafetySuitabilityEnum_BrandSafetySuitability { + if m != nil { + return m.VideoBrandSafetySuitability + } + return enums.BrandSafetySuitabilityEnum_UNSPECIFIED +} + +func (m *Campaign) GetVanityPharma() *Campaign_VanityPharma { + if m != nil { + return m.VanityPharma + } + return nil +} + +func (m *Campaign) GetSelectiveOptimization() *Campaign_SelectiveOptimization { + if m != nil { + return m.SelectiveOptimization + } + return nil +} + +func (m *Campaign) GetTrackingSetting() *Campaign_TrackingSetting { + if m != nil { + return m.TrackingSetting + } + return nil +} + +func (m *Campaign) GetPaymentMode() enums.PaymentModeEnum_PaymentMode { + if m != nil { + return m.PaymentMode + } + return enums.PaymentModeEnum_UNSPECIFIED +} + +type isCampaign_CampaignBiddingStrategy interface { + isCampaign_CampaignBiddingStrategy() +} + +type Campaign_BiddingStrategy struct { + BiddingStrategy *wrappers.StringValue `protobuf:"bytes,23,opt,name=bidding_strategy,json=biddingStrategy,proto3,oneof"` +} + +type Campaign_Commission struct { + Commission *common.Commission `protobuf:"bytes,49,opt,name=commission,proto3,oneof"` +} + +type Campaign_ManualCpc struct { + ManualCpc *common.ManualCpc `protobuf:"bytes,24,opt,name=manual_cpc,json=manualCpc,proto3,oneof"` +} + +type Campaign_ManualCpm struct { + ManualCpm *common.ManualCpm `protobuf:"bytes,25,opt,name=manual_cpm,json=manualCpm,proto3,oneof"` +} + +type Campaign_ManualCpv struct { + ManualCpv *common.ManualCpv `protobuf:"bytes,37,opt,name=manual_cpv,json=manualCpv,proto3,oneof"` +} + +type Campaign_MaximizeConversions struct { + MaximizeConversions *common.MaximizeConversions `protobuf:"bytes,30,opt,name=maximize_conversions,json=maximizeConversions,proto3,oneof"` +} + +type Campaign_MaximizeConversionValue struct { + MaximizeConversionValue *common.MaximizeConversionValue `protobuf:"bytes,31,opt,name=maximize_conversion_value,json=maximizeConversionValue,proto3,oneof"` +} + +type Campaign_TargetCpa struct { + TargetCpa *common.TargetCpa `protobuf:"bytes,26,opt,name=target_cpa,json=targetCpa,proto3,oneof"` +} + +type Campaign_TargetImpressionShare struct { + TargetImpressionShare *common.TargetImpressionShare `protobuf:"bytes,48,opt,name=target_impression_share,json=targetImpressionShare,proto3,oneof"` +} + +type Campaign_TargetRoas struct { + TargetRoas *common.TargetRoas `protobuf:"bytes,29,opt,name=target_roas,json=targetRoas,proto3,oneof"` +} + +type Campaign_TargetSpend struct { + TargetSpend *common.TargetSpend `protobuf:"bytes,27,opt,name=target_spend,json=targetSpend,proto3,oneof"` +} + +type Campaign_PercentCpc struct { + PercentCpc *common.PercentCpc `protobuf:"bytes,34,opt,name=percent_cpc,json=percentCpc,proto3,oneof"` +} + +type Campaign_TargetCpm struct { + TargetCpm *common.TargetCpm `protobuf:"bytes,41,opt,name=target_cpm,json=targetCpm,proto3,oneof"` +} + +func (*Campaign_BiddingStrategy) isCampaign_CampaignBiddingStrategy() {} + +func (*Campaign_Commission) isCampaign_CampaignBiddingStrategy() {} + +func (*Campaign_ManualCpc) isCampaign_CampaignBiddingStrategy() {} + +func (*Campaign_ManualCpm) isCampaign_CampaignBiddingStrategy() {} + +func (*Campaign_ManualCpv) isCampaign_CampaignBiddingStrategy() {} + +func (*Campaign_MaximizeConversions) isCampaign_CampaignBiddingStrategy() {} + +func (*Campaign_MaximizeConversionValue) isCampaign_CampaignBiddingStrategy() {} + +func (*Campaign_TargetCpa) isCampaign_CampaignBiddingStrategy() {} + +func (*Campaign_TargetImpressionShare) isCampaign_CampaignBiddingStrategy() {} + +func (*Campaign_TargetRoas) isCampaign_CampaignBiddingStrategy() {} + +func (*Campaign_TargetSpend) isCampaign_CampaignBiddingStrategy() {} + +func (*Campaign_PercentCpc) isCampaign_CampaignBiddingStrategy() {} + +func (*Campaign_TargetCpm) isCampaign_CampaignBiddingStrategy() {} + +func (m *Campaign) GetCampaignBiddingStrategy() isCampaign_CampaignBiddingStrategy { + if m != nil { + return m.CampaignBiddingStrategy + } + return nil +} + +func (m *Campaign) GetBiddingStrategy() *wrappers.StringValue { + if x, ok := m.GetCampaignBiddingStrategy().(*Campaign_BiddingStrategy); ok { + return x.BiddingStrategy + } + return nil +} + +func (m *Campaign) GetCommission() *common.Commission { + if x, ok := m.GetCampaignBiddingStrategy().(*Campaign_Commission); ok { + return x.Commission + } + return nil +} + +func (m *Campaign) GetManualCpc() *common.ManualCpc { + if x, ok := m.GetCampaignBiddingStrategy().(*Campaign_ManualCpc); ok { + return x.ManualCpc + } + return nil +} + +func (m *Campaign) GetManualCpm() *common.ManualCpm { + if x, ok := m.GetCampaignBiddingStrategy().(*Campaign_ManualCpm); ok { + return x.ManualCpm + } + return nil +} + +func (m *Campaign) GetManualCpv() *common.ManualCpv { + if x, ok := m.GetCampaignBiddingStrategy().(*Campaign_ManualCpv); ok { + return x.ManualCpv + } + return nil +} + +func (m *Campaign) GetMaximizeConversions() *common.MaximizeConversions { + if x, ok := m.GetCampaignBiddingStrategy().(*Campaign_MaximizeConversions); ok { + return x.MaximizeConversions + } + return nil +} + +func (m *Campaign) GetMaximizeConversionValue() *common.MaximizeConversionValue { + if x, ok := m.GetCampaignBiddingStrategy().(*Campaign_MaximizeConversionValue); ok { + return x.MaximizeConversionValue + } + return nil +} + +func (m *Campaign) GetTargetCpa() *common.TargetCpa { + if x, ok := m.GetCampaignBiddingStrategy().(*Campaign_TargetCpa); ok { + return x.TargetCpa + } + return nil +} + +func (m *Campaign) GetTargetImpressionShare() *common.TargetImpressionShare { + if x, ok := m.GetCampaignBiddingStrategy().(*Campaign_TargetImpressionShare); ok { + return x.TargetImpressionShare + } + return nil +} + +func (m *Campaign) GetTargetRoas() *common.TargetRoas { + if x, ok := m.GetCampaignBiddingStrategy().(*Campaign_TargetRoas); ok { + return x.TargetRoas + } + return nil +} + +func (m *Campaign) GetTargetSpend() *common.TargetSpend { + if x, ok := m.GetCampaignBiddingStrategy().(*Campaign_TargetSpend); ok { + return x.TargetSpend + } + return nil +} + +func (m *Campaign) GetPercentCpc() *common.PercentCpc { + if x, ok := m.GetCampaignBiddingStrategy().(*Campaign_PercentCpc); ok { + return x.PercentCpc + } + return nil +} + +func (m *Campaign) GetTargetCpm() *common.TargetCpm { + if x, ok := m.GetCampaignBiddingStrategy().(*Campaign_TargetCpm); ok { + return x.TargetCpm + } + return nil +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*Campaign) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _Campaign_OneofMarshaler, _Campaign_OneofUnmarshaler, _Campaign_OneofSizer, []interface{}{ + (*Campaign_BiddingStrategy)(nil), + (*Campaign_Commission)(nil), + (*Campaign_ManualCpc)(nil), + (*Campaign_ManualCpm)(nil), + (*Campaign_ManualCpv)(nil), + (*Campaign_MaximizeConversions)(nil), + (*Campaign_MaximizeConversionValue)(nil), + (*Campaign_TargetCpa)(nil), + (*Campaign_TargetImpressionShare)(nil), + (*Campaign_TargetRoas)(nil), + (*Campaign_TargetSpend)(nil), + (*Campaign_PercentCpc)(nil), + (*Campaign_TargetCpm)(nil), + } +} + +func _Campaign_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*Campaign) + // campaign_bidding_strategy + switch x := m.CampaignBiddingStrategy.(type) { + case *Campaign_BiddingStrategy: + b.EncodeVarint(23<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.BiddingStrategy); err != nil { + return err + } + case *Campaign_Commission: + b.EncodeVarint(49<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.Commission); err != nil { + return err + } + case *Campaign_ManualCpc: + b.EncodeVarint(24<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.ManualCpc); err != nil { + return err + } + case *Campaign_ManualCpm: + b.EncodeVarint(25<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.ManualCpm); err != nil { + return err + } + case *Campaign_ManualCpv: + b.EncodeVarint(37<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.ManualCpv); err != nil { + return err + } + case *Campaign_MaximizeConversions: + b.EncodeVarint(30<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.MaximizeConversions); err != nil { + return err + } + case *Campaign_MaximizeConversionValue: + b.EncodeVarint(31<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.MaximizeConversionValue); err != nil { + return err + } + case *Campaign_TargetCpa: + b.EncodeVarint(26<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.TargetCpa); err != nil { + return err + } + case *Campaign_TargetImpressionShare: + b.EncodeVarint(48<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.TargetImpressionShare); err != nil { + return err + } + case *Campaign_TargetRoas: + b.EncodeVarint(29<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.TargetRoas); err != nil { + return err + } + case *Campaign_TargetSpend: + b.EncodeVarint(27<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.TargetSpend); err != nil { + return err + } + case *Campaign_PercentCpc: + b.EncodeVarint(34<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.PercentCpc); err != nil { + return err + } + case *Campaign_TargetCpm: + b.EncodeVarint(41<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.TargetCpm); err != nil { + return err + } + case nil: + default: + return fmt.Errorf("Campaign.CampaignBiddingStrategy has unexpected type %T", x) + } + return nil +} + +func _Campaign_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*Campaign) + switch tag { + case 23: // campaign_bidding_strategy.bidding_strategy + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(wrappers.StringValue) + err := b.DecodeMessage(msg) + m.CampaignBiddingStrategy = &Campaign_BiddingStrategy{msg} + return true, err + case 49: // campaign_bidding_strategy.commission + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(common.Commission) + err := b.DecodeMessage(msg) + m.CampaignBiddingStrategy = &Campaign_Commission{msg} + return true, err + case 24: // campaign_bidding_strategy.manual_cpc + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(common.ManualCpc) + err := b.DecodeMessage(msg) + m.CampaignBiddingStrategy = &Campaign_ManualCpc{msg} + return true, err + case 25: // campaign_bidding_strategy.manual_cpm + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(common.ManualCpm) + err := b.DecodeMessage(msg) + m.CampaignBiddingStrategy = &Campaign_ManualCpm{msg} + return true, err + case 37: // campaign_bidding_strategy.manual_cpv + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(common.ManualCpv) + err := b.DecodeMessage(msg) + m.CampaignBiddingStrategy = &Campaign_ManualCpv{msg} + return true, err + case 30: // campaign_bidding_strategy.maximize_conversions + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(common.MaximizeConversions) + err := b.DecodeMessage(msg) + m.CampaignBiddingStrategy = &Campaign_MaximizeConversions{msg} + return true, err + case 31: // campaign_bidding_strategy.maximize_conversion_value + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(common.MaximizeConversionValue) + err := b.DecodeMessage(msg) + m.CampaignBiddingStrategy = &Campaign_MaximizeConversionValue{msg} + return true, err + case 26: // campaign_bidding_strategy.target_cpa + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(common.TargetCpa) + err := b.DecodeMessage(msg) + m.CampaignBiddingStrategy = &Campaign_TargetCpa{msg} + return true, err + case 48: // campaign_bidding_strategy.target_impression_share + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(common.TargetImpressionShare) + err := b.DecodeMessage(msg) + m.CampaignBiddingStrategy = &Campaign_TargetImpressionShare{msg} + return true, err + case 29: // campaign_bidding_strategy.target_roas + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(common.TargetRoas) + err := b.DecodeMessage(msg) + m.CampaignBiddingStrategy = &Campaign_TargetRoas{msg} + return true, err + case 27: // campaign_bidding_strategy.target_spend + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(common.TargetSpend) + err := b.DecodeMessage(msg) + m.CampaignBiddingStrategy = &Campaign_TargetSpend{msg} + return true, err + case 34: // campaign_bidding_strategy.percent_cpc + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(common.PercentCpc) + err := b.DecodeMessage(msg) + m.CampaignBiddingStrategy = &Campaign_PercentCpc{msg} + return true, err + case 41: // campaign_bidding_strategy.target_cpm + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(common.TargetCpm) + err := b.DecodeMessage(msg) + m.CampaignBiddingStrategy = &Campaign_TargetCpm{msg} + return true, err + default: + return false, nil + } +} + +func _Campaign_OneofSizer(msg proto.Message) (n int) { + m := msg.(*Campaign) + // campaign_bidding_strategy + switch x := m.CampaignBiddingStrategy.(type) { + case *Campaign_BiddingStrategy: + s := proto.Size(x.BiddingStrategy) + n += 2 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *Campaign_Commission: + s := proto.Size(x.Commission) + n += 2 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *Campaign_ManualCpc: + s := proto.Size(x.ManualCpc) + n += 2 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *Campaign_ManualCpm: + s := proto.Size(x.ManualCpm) + n += 2 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *Campaign_ManualCpv: + s := proto.Size(x.ManualCpv) + n += 2 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *Campaign_MaximizeConversions: + s := proto.Size(x.MaximizeConversions) + n += 2 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *Campaign_MaximizeConversionValue: + s := proto.Size(x.MaximizeConversionValue) + n += 2 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *Campaign_TargetCpa: + s := proto.Size(x.TargetCpa) + n += 2 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *Campaign_TargetImpressionShare: + s := proto.Size(x.TargetImpressionShare) + n += 2 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *Campaign_TargetRoas: + s := proto.Size(x.TargetRoas) + n += 2 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *Campaign_TargetSpend: + s := proto.Size(x.TargetSpend) + n += 2 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *Campaign_PercentCpc: + s := proto.Size(x.PercentCpc) + n += 2 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *Campaign_TargetCpm: + s := proto.Size(x.TargetCpm) + n += 2 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +// The network settings for the campaign. +type Campaign_NetworkSettings struct { + // Whether ads will be served with google.com search results. + TargetGoogleSearch *wrappers.BoolValue `protobuf:"bytes,1,opt,name=target_google_search,json=targetGoogleSearch,proto3" json:"target_google_search,omitempty"` + // Whether ads will be served on partner sites in the Google Search Network + // (requires `target_google_search` to also be `true`). + TargetSearchNetwork *wrappers.BoolValue `protobuf:"bytes,2,opt,name=target_search_network,json=targetSearchNetwork,proto3" json:"target_search_network,omitempty"` + // Whether ads will be served on specified placements in the Google Display + // Network. Placements are specified using the Placement criterion. + TargetContentNetwork *wrappers.BoolValue `protobuf:"bytes,3,opt,name=target_content_network,json=targetContentNetwork,proto3" json:"target_content_network,omitempty"` + // Whether ads will be served on the Google Partner Network. + // This is available only to some select Google partner accounts. + TargetPartnerSearchNetwork *wrappers.BoolValue `protobuf:"bytes,4,opt,name=target_partner_search_network,json=targetPartnerSearchNetwork,proto3" json:"target_partner_search_network,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Campaign_NetworkSettings) Reset() { *m = Campaign_NetworkSettings{} } +func (m *Campaign_NetworkSettings) String() string { return proto.CompactTextString(m) } +func (*Campaign_NetworkSettings) ProtoMessage() {} +func (*Campaign_NetworkSettings) Descriptor() ([]byte, []int) { + return fileDescriptor_campaign_159b92ef9ae7a828, []int{0, 0} +} +func (m *Campaign_NetworkSettings) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Campaign_NetworkSettings.Unmarshal(m, b) +} +func (m *Campaign_NetworkSettings) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Campaign_NetworkSettings.Marshal(b, m, deterministic) +} +func (dst *Campaign_NetworkSettings) XXX_Merge(src proto.Message) { + xxx_messageInfo_Campaign_NetworkSettings.Merge(dst, src) +} +func (m *Campaign_NetworkSettings) XXX_Size() int { + return xxx_messageInfo_Campaign_NetworkSettings.Size(m) +} +func (m *Campaign_NetworkSettings) XXX_DiscardUnknown() { + xxx_messageInfo_Campaign_NetworkSettings.DiscardUnknown(m) +} + +var xxx_messageInfo_Campaign_NetworkSettings proto.InternalMessageInfo + +func (m *Campaign_NetworkSettings) GetTargetGoogleSearch() *wrappers.BoolValue { + if m != nil { + return m.TargetGoogleSearch + } + return nil +} + +func (m *Campaign_NetworkSettings) GetTargetSearchNetwork() *wrappers.BoolValue { + if m != nil { + return m.TargetSearchNetwork + } + return nil +} + +func (m *Campaign_NetworkSettings) GetTargetContentNetwork() *wrappers.BoolValue { + if m != nil { + return m.TargetContentNetwork + } + return nil +} + +func (m *Campaign_NetworkSettings) GetTargetPartnerSearchNetwork() *wrappers.BoolValue { + if m != nil { + return m.TargetPartnerSearchNetwork + } + return nil +} + +// Campaign-level settings for hotel ads. +type Campaign_HotelSettingInfo struct { + // The linked Hotel Center account. + HotelCenterId *wrappers.Int64Value `protobuf:"bytes,1,opt,name=hotel_center_id,json=hotelCenterId,proto3" json:"hotel_center_id,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Campaign_HotelSettingInfo) Reset() { *m = Campaign_HotelSettingInfo{} } +func (m *Campaign_HotelSettingInfo) String() string { return proto.CompactTextString(m) } +func (*Campaign_HotelSettingInfo) ProtoMessage() {} +func (*Campaign_HotelSettingInfo) Descriptor() ([]byte, []int) { + return fileDescriptor_campaign_159b92ef9ae7a828, []int{0, 1} +} +func (m *Campaign_HotelSettingInfo) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Campaign_HotelSettingInfo.Unmarshal(m, b) +} +func (m *Campaign_HotelSettingInfo) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Campaign_HotelSettingInfo.Marshal(b, m, deterministic) +} +func (dst *Campaign_HotelSettingInfo) XXX_Merge(src proto.Message) { + xxx_messageInfo_Campaign_HotelSettingInfo.Merge(dst, src) +} +func (m *Campaign_HotelSettingInfo) XXX_Size() int { + return xxx_messageInfo_Campaign_HotelSettingInfo.Size(m) +} +func (m *Campaign_HotelSettingInfo) XXX_DiscardUnknown() { + xxx_messageInfo_Campaign_HotelSettingInfo.DiscardUnknown(m) +} + +var xxx_messageInfo_Campaign_HotelSettingInfo proto.InternalMessageInfo + +func (m *Campaign_HotelSettingInfo) GetHotelCenterId() *wrappers.Int64Value { + if m != nil { + return m.HotelCenterId + } + return nil +} + +// The setting for controlling Dynamic Search Ads (DSA). +type Campaign_DynamicSearchAdsSetting struct { + // The Internet domain name that this setting represents, e.g., "google.com" + // or "www.google.com". + DomainName *wrappers.StringValue `protobuf:"bytes,1,opt,name=domain_name,json=domainName,proto3" json:"domain_name,omitempty"` + // The language code specifying the language of the domain, e.g., "en". + LanguageCode *wrappers.StringValue `protobuf:"bytes,2,opt,name=language_code,json=languageCode,proto3" json:"language_code,omitempty"` + // Whether the campaign uses advertiser supplied URLs exclusively. + UseSuppliedUrlsOnly *wrappers.BoolValue `protobuf:"bytes,3,opt,name=use_supplied_urls_only,json=useSuppliedUrlsOnly,proto3" json:"use_supplied_urls_only,omitempty"` + // The list of page feeds associated with the campaign. + Feeds []*wrappers.StringValue `protobuf:"bytes,5,rep,name=feeds,proto3" json:"feeds,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Campaign_DynamicSearchAdsSetting) Reset() { *m = Campaign_DynamicSearchAdsSetting{} } +func (m *Campaign_DynamicSearchAdsSetting) String() string { return proto.CompactTextString(m) } +func (*Campaign_DynamicSearchAdsSetting) ProtoMessage() {} +func (*Campaign_DynamicSearchAdsSetting) Descriptor() ([]byte, []int) { + return fileDescriptor_campaign_159b92ef9ae7a828, []int{0, 2} +} +func (m *Campaign_DynamicSearchAdsSetting) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Campaign_DynamicSearchAdsSetting.Unmarshal(m, b) +} +func (m *Campaign_DynamicSearchAdsSetting) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Campaign_DynamicSearchAdsSetting.Marshal(b, m, deterministic) +} +func (dst *Campaign_DynamicSearchAdsSetting) XXX_Merge(src proto.Message) { + xxx_messageInfo_Campaign_DynamicSearchAdsSetting.Merge(dst, src) +} +func (m *Campaign_DynamicSearchAdsSetting) XXX_Size() int { + return xxx_messageInfo_Campaign_DynamicSearchAdsSetting.Size(m) +} +func (m *Campaign_DynamicSearchAdsSetting) XXX_DiscardUnknown() { + xxx_messageInfo_Campaign_DynamicSearchAdsSetting.DiscardUnknown(m) +} + +var xxx_messageInfo_Campaign_DynamicSearchAdsSetting proto.InternalMessageInfo + +func (m *Campaign_DynamicSearchAdsSetting) GetDomainName() *wrappers.StringValue { + if m != nil { + return m.DomainName + } + return nil +} + +func (m *Campaign_DynamicSearchAdsSetting) GetLanguageCode() *wrappers.StringValue { + if m != nil { + return m.LanguageCode + } + return nil +} + +func (m *Campaign_DynamicSearchAdsSetting) GetUseSuppliedUrlsOnly() *wrappers.BoolValue { + if m != nil { + return m.UseSuppliedUrlsOnly + } + return nil +} + +func (m *Campaign_DynamicSearchAdsSetting) GetFeeds() []*wrappers.StringValue { + if m != nil { + return m.Feeds + } + return nil +} + +// Campaign level settings for tracking information. +type Campaign_TrackingSetting struct { + // The url used for dynamic tracking. + TrackingUrl *wrappers.StringValue `protobuf:"bytes,1,opt,name=tracking_url,json=trackingUrl,proto3" json:"tracking_url,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Campaign_TrackingSetting) Reset() { *m = Campaign_TrackingSetting{} } +func (m *Campaign_TrackingSetting) String() string { return proto.CompactTextString(m) } +func (*Campaign_TrackingSetting) ProtoMessage() {} +func (*Campaign_TrackingSetting) Descriptor() ([]byte, []int) { + return fileDescriptor_campaign_159b92ef9ae7a828, []int{0, 3} +} +func (m *Campaign_TrackingSetting) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Campaign_TrackingSetting.Unmarshal(m, b) +} +func (m *Campaign_TrackingSetting) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Campaign_TrackingSetting.Marshal(b, m, deterministic) +} +func (dst *Campaign_TrackingSetting) XXX_Merge(src proto.Message) { + xxx_messageInfo_Campaign_TrackingSetting.Merge(dst, src) +} +func (m *Campaign_TrackingSetting) XXX_Size() int { + return xxx_messageInfo_Campaign_TrackingSetting.Size(m) +} +func (m *Campaign_TrackingSetting) XXX_DiscardUnknown() { + xxx_messageInfo_Campaign_TrackingSetting.DiscardUnknown(m) +} + +var xxx_messageInfo_Campaign_TrackingSetting proto.InternalMessageInfo + +func (m *Campaign_TrackingSetting) GetTrackingUrl() *wrappers.StringValue { + if m != nil { + return m.TrackingUrl + } + return nil +} + +// Represents a collection of settings related to ads geotargeting. +type Campaign_GeoTargetTypeSetting struct { + // The setting used for positive geotargeting in this particular campaign. + PositiveGeoTargetType enums.PositiveGeoTargetTypeEnum_PositiveGeoTargetType `protobuf:"varint,1,opt,name=positive_geo_target_type,json=positiveGeoTargetType,proto3,enum=google.ads.googleads.v1.enums.PositiveGeoTargetTypeEnum_PositiveGeoTargetType" json:"positive_geo_target_type,omitempty"` + // The setting used for negative geotargeting in this particular campaign. + NegativeGeoTargetType enums.NegativeGeoTargetTypeEnum_NegativeGeoTargetType `protobuf:"varint,2,opt,name=negative_geo_target_type,json=negativeGeoTargetType,proto3,enum=google.ads.googleads.v1.enums.NegativeGeoTargetTypeEnum_NegativeGeoTargetType" json:"negative_geo_target_type,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Campaign_GeoTargetTypeSetting) Reset() { *m = Campaign_GeoTargetTypeSetting{} } +func (m *Campaign_GeoTargetTypeSetting) String() string { return proto.CompactTextString(m) } +func (*Campaign_GeoTargetTypeSetting) ProtoMessage() {} +func (*Campaign_GeoTargetTypeSetting) Descriptor() ([]byte, []int) { + return fileDescriptor_campaign_159b92ef9ae7a828, []int{0, 4} +} +func (m *Campaign_GeoTargetTypeSetting) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Campaign_GeoTargetTypeSetting.Unmarshal(m, b) +} +func (m *Campaign_GeoTargetTypeSetting) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Campaign_GeoTargetTypeSetting.Marshal(b, m, deterministic) +} +func (dst *Campaign_GeoTargetTypeSetting) XXX_Merge(src proto.Message) { + xxx_messageInfo_Campaign_GeoTargetTypeSetting.Merge(dst, src) +} +func (m *Campaign_GeoTargetTypeSetting) XXX_Size() int { + return xxx_messageInfo_Campaign_GeoTargetTypeSetting.Size(m) +} +func (m *Campaign_GeoTargetTypeSetting) XXX_DiscardUnknown() { + xxx_messageInfo_Campaign_GeoTargetTypeSetting.DiscardUnknown(m) +} + +var xxx_messageInfo_Campaign_GeoTargetTypeSetting proto.InternalMessageInfo + +func (m *Campaign_GeoTargetTypeSetting) GetPositiveGeoTargetType() enums.PositiveGeoTargetTypeEnum_PositiveGeoTargetType { + if m != nil { + return m.PositiveGeoTargetType + } + return enums.PositiveGeoTargetTypeEnum_UNSPECIFIED +} + +func (m *Campaign_GeoTargetTypeSetting) GetNegativeGeoTargetType() enums.NegativeGeoTargetTypeEnum_NegativeGeoTargetType { + if m != nil { + return m.NegativeGeoTargetType + } + return enums.NegativeGeoTargetTypeEnum_UNSPECIFIED +} + +// Selective optimization setting for this campaign, which includes a set of +// conversion actions to optimize this campaign towards. +type Campaign_SelectiveOptimization struct { + // The selected set of conversion actions for optimizing this campaign. + ConversionActions []*wrappers.StringValue `protobuf:"bytes,1,rep,name=conversion_actions,json=conversionActions,proto3" json:"conversion_actions,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Campaign_SelectiveOptimization) Reset() { *m = Campaign_SelectiveOptimization{} } +func (m *Campaign_SelectiveOptimization) String() string { return proto.CompactTextString(m) } +func (*Campaign_SelectiveOptimization) ProtoMessage() {} +func (*Campaign_SelectiveOptimization) Descriptor() ([]byte, []int) { + return fileDescriptor_campaign_159b92ef9ae7a828, []int{0, 5} +} +func (m *Campaign_SelectiveOptimization) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Campaign_SelectiveOptimization.Unmarshal(m, b) +} +func (m *Campaign_SelectiveOptimization) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Campaign_SelectiveOptimization.Marshal(b, m, deterministic) +} +func (dst *Campaign_SelectiveOptimization) XXX_Merge(src proto.Message) { + xxx_messageInfo_Campaign_SelectiveOptimization.Merge(dst, src) +} +func (m *Campaign_SelectiveOptimization) XXX_Size() int { + return xxx_messageInfo_Campaign_SelectiveOptimization.Size(m) +} +func (m *Campaign_SelectiveOptimization) XXX_DiscardUnknown() { + xxx_messageInfo_Campaign_SelectiveOptimization.DiscardUnknown(m) +} + +var xxx_messageInfo_Campaign_SelectiveOptimization proto.InternalMessageInfo + +func (m *Campaign_SelectiveOptimization) GetConversionActions() []*wrappers.StringValue { + if m != nil { + return m.ConversionActions + } + return nil +} + +// The setting for Shopping campaigns. Defines the universe of products that +// can be advertised by the campaign, and how this campaign interacts with +// other Shopping campaigns. +type Campaign_ShoppingSetting struct { + // ID of the Merchant Center account. + // This field is required for create operations. This field is immutable for + // Shopping campaigns. + MerchantId *wrappers.Int64Value `protobuf:"bytes,1,opt,name=merchant_id,json=merchantId,proto3" json:"merchant_id,omitempty"` + // Sales country of products to include in the campaign. + // This field is required for Shopping campaigns. This field is immutable. + // This field is optional for non-Shopping campaigns, but it must be equal + // to 'ZZ' if set. + SalesCountry *wrappers.StringValue `protobuf:"bytes,2,opt,name=sales_country,json=salesCountry,proto3" json:"sales_country,omitempty"` + // Priority of the campaign. Campaigns with numerically higher priorities + // take precedence over those with lower priorities. + // This field is required for Shopping campaigns, with values between 0 and + // 2, inclusive. + // This field is optional for Smart Shopping campaigns, but must be equal to + // 3 if set. + CampaignPriority *wrappers.Int32Value `protobuf:"bytes,3,opt,name=campaign_priority,json=campaignPriority,proto3" json:"campaign_priority,omitempty"` + // Whether to include local products. + EnableLocal *wrappers.BoolValue `protobuf:"bytes,4,opt,name=enable_local,json=enableLocal,proto3" json:"enable_local,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Campaign_ShoppingSetting) Reset() { *m = Campaign_ShoppingSetting{} } +func (m *Campaign_ShoppingSetting) String() string { return proto.CompactTextString(m) } +func (*Campaign_ShoppingSetting) ProtoMessage() {} +func (*Campaign_ShoppingSetting) Descriptor() ([]byte, []int) { + return fileDescriptor_campaign_159b92ef9ae7a828, []int{0, 6} +} +func (m *Campaign_ShoppingSetting) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Campaign_ShoppingSetting.Unmarshal(m, b) +} +func (m *Campaign_ShoppingSetting) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Campaign_ShoppingSetting.Marshal(b, m, deterministic) +} +func (dst *Campaign_ShoppingSetting) XXX_Merge(src proto.Message) { + xxx_messageInfo_Campaign_ShoppingSetting.Merge(dst, src) +} +func (m *Campaign_ShoppingSetting) XXX_Size() int { + return xxx_messageInfo_Campaign_ShoppingSetting.Size(m) +} +func (m *Campaign_ShoppingSetting) XXX_DiscardUnknown() { + xxx_messageInfo_Campaign_ShoppingSetting.DiscardUnknown(m) +} + +var xxx_messageInfo_Campaign_ShoppingSetting proto.InternalMessageInfo + +func (m *Campaign_ShoppingSetting) GetMerchantId() *wrappers.Int64Value { + if m != nil { + return m.MerchantId + } + return nil +} + +func (m *Campaign_ShoppingSetting) GetSalesCountry() *wrappers.StringValue { + if m != nil { + return m.SalesCountry + } + return nil +} + +func (m *Campaign_ShoppingSetting) GetCampaignPriority() *wrappers.Int32Value { + if m != nil { + return m.CampaignPriority + } + return nil +} + +func (m *Campaign_ShoppingSetting) GetEnableLocal() *wrappers.BoolValue { + if m != nil { + return m.EnableLocal + } + return nil +} + +// Campaign level settings for App Campaigns. +type Campaign_AppCampaignSetting struct { + // Represents the goal which the bidding strategy of this app campaign + // should optimize towards. + BiddingStrategyGoalType enums.AppCampaignBiddingStrategyGoalTypeEnum_AppCampaignBiddingStrategyGoalType `protobuf:"varint,1,opt,name=bidding_strategy_goal_type,json=biddingStrategyGoalType,proto3,enum=google.ads.googleads.v1.enums.AppCampaignBiddingStrategyGoalTypeEnum_AppCampaignBiddingStrategyGoalType" json:"bidding_strategy_goal_type,omitempty"` + // A string that uniquely identifies a mobile application. + AppId *wrappers.StringValue `protobuf:"bytes,2,opt,name=app_id,json=appId,proto3" json:"app_id,omitempty"` + // The application store that distributes this specific app. + AppStore enums.AppCampaignAppStoreEnum_AppCampaignAppStore `protobuf:"varint,3,opt,name=app_store,json=appStore,proto3,enum=google.ads.googleads.v1.enums.AppCampaignAppStoreEnum_AppCampaignAppStore" json:"app_store,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Campaign_AppCampaignSetting) Reset() { *m = Campaign_AppCampaignSetting{} } +func (m *Campaign_AppCampaignSetting) String() string { return proto.CompactTextString(m) } +func (*Campaign_AppCampaignSetting) ProtoMessage() {} +func (*Campaign_AppCampaignSetting) Descriptor() ([]byte, []int) { + return fileDescriptor_campaign_159b92ef9ae7a828, []int{0, 7} +} +func (m *Campaign_AppCampaignSetting) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Campaign_AppCampaignSetting.Unmarshal(m, b) +} +func (m *Campaign_AppCampaignSetting) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Campaign_AppCampaignSetting.Marshal(b, m, deterministic) +} +func (dst *Campaign_AppCampaignSetting) XXX_Merge(src proto.Message) { + xxx_messageInfo_Campaign_AppCampaignSetting.Merge(dst, src) +} +func (m *Campaign_AppCampaignSetting) XXX_Size() int { + return xxx_messageInfo_Campaign_AppCampaignSetting.Size(m) +} +func (m *Campaign_AppCampaignSetting) XXX_DiscardUnknown() { + xxx_messageInfo_Campaign_AppCampaignSetting.DiscardUnknown(m) +} + +var xxx_messageInfo_Campaign_AppCampaignSetting proto.InternalMessageInfo + +func (m *Campaign_AppCampaignSetting) GetBiddingStrategyGoalType() enums.AppCampaignBiddingStrategyGoalTypeEnum_AppCampaignBiddingStrategyGoalType { + if m != nil { + return m.BiddingStrategyGoalType + } + return enums.AppCampaignBiddingStrategyGoalTypeEnum_UNSPECIFIED +} + +func (m *Campaign_AppCampaignSetting) GetAppId() *wrappers.StringValue { + if m != nil { + return m.AppId + } + return nil +} + +func (m *Campaign_AppCampaignSetting) GetAppStore() enums.AppCampaignAppStoreEnum_AppCampaignAppStore { + if m != nil { + return m.AppStore + } + return enums.AppCampaignAppStoreEnum_UNSPECIFIED +} + +// Describes how unbranded pharma ads will be displayed. +type Campaign_VanityPharma struct { + // The display mode for vanity pharma URLs. + VanityPharmaDisplayUrlMode enums.VanityPharmaDisplayUrlModeEnum_VanityPharmaDisplayUrlMode `protobuf:"varint,1,opt,name=vanity_pharma_display_url_mode,json=vanityPharmaDisplayUrlMode,proto3,enum=google.ads.googleads.v1.enums.VanityPharmaDisplayUrlModeEnum_VanityPharmaDisplayUrlMode" json:"vanity_pharma_display_url_mode,omitempty"` + // The text that will be displayed in display URL of the text ad when + // website description is the selected display mode for vanity pharma URLs. + VanityPharmaText enums.VanityPharmaTextEnum_VanityPharmaText `protobuf:"varint,2,opt,name=vanity_pharma_text,json=vanityPharmaText,proto3,enum=google.ads.googleads.v1.enums.VanityPharmaTextEnum_VanityPharmaText" json:"vanity_pharma_text,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Campaign_VanityPharma) Reset() { *m = Campaign_VanityPharma{} } +func (m *Campaign_VanityPharma) String() string { return proto.CompactTextString(m) } +func (*Campaign_VanityPharma) ProtoMessage() {} +func (*Campaign_VanityPharma) Descriptor() ([]byte, []int) { + return fileDescriptor_campaign_159b92ef9ae7a828, []int{0, 8} +} +func (m *Campaign_VanityPharma) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Campaign_VanityPharma.Unmarshal(m, b) +} +func (m *Campaign_VanityPharma) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Campaign_VanityPharma.Marshal(b, m, deterministic) +} +func (dst *Campaign_VanityPharma) XXX_Merge(src proto.Message) { + xxx_messageInfo_Campaign_VanityPharma.Merge(dst, src) +} +func (m *Campaign_VanityPharma) XXX_Size() int { + return xxx_messageInfo_Campaign_VanityPharma.Size(m) +} +func (m *Campaign_VanityPharma) XXX_DiscardUnknown() { + xxx_messageInfo_Campaign_VanityPharma.DiscardUnknown(m) +} + +var xxx_messageInfo_Campaign_VanityPharma proto.InternalMessageInfo + +func (m *Campaign_VanityPharma) GetVanityPharmaDisplayUrlMode() enums.VanityPharmaDisplayUrlModeEnum_VanityPharmaDisplayUrlMode { + if m != nil { + return m.VanityPharmaDisplayUrlMode + } + return enums.VanityPharmaDisplayUrlModeEnum_UNSPECIFIED +} + +func (m *Campaign_VanityPharma) GetVanityPharmaText() enums.VanityPharmaTextEnum_VanityPharmaText { + if m != nil { + return m.VanityPharmaText + } + return enums.VanityPharmaTextEnum_UNSPECIFIED +} + +func init() { + proto.RegisterType((*Campaign)(nil), "google.ads.googleads.v1.resources.Campaign") + proto.RegisterType((*Campaign_NetworkSettings)(nil), "google.ads.googleads.v1.resources.Campaign.NetworkSettings") + proto.RegisterType((*Campaign_HotelSettingInfo)(nil), "google.ads.googleads.v1.resources.Campaign.HotelSettingInfo") + proto.RegisterType((*Campaign_DynamicSearchAdsSetting)(nil), "google.ads.googleads.v1.resources.Campaign.DynamicSearchAdsSetting") + proto.RegisterType((*Campaign_TrackingSetting)(nil), "google.ads.googleads.v1.resources.Campaign.TrackingSetting") + proto.RegisterType((*Campaign_GeoTargetTypeSetting)(nil), "google.ads.googleads.v1.resources.Campaign.GeoTargetTypeSetting") + proto.RegisterType((*Campaign_SelectiveOptimization)(nil), "google.ads.googleads.v1.resources.Campaign.SelectiveOptimization") + proto.RegisterType((*Campaign_ShoppingSetting)(nil), "google.ads.googleads.v1.resources.Campaign.ShoppingSetting") + proto.RegisterType((*Campaign_AppCampaignSetting)(nil), "google.ads.googleads.v1.resources.Campaign.AppCampaignSetting") + proto.RegisterType((*Campaign_VanityPharma)(nil), "google.ads.googleads.v1.resources.Campaign.VanityPharma") +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/resources/campaign.proto", fileDescriptor_campaign_159b92ef9ae7a828) +} + +var fileDescriptor_campaign_159b92ef9ae7a828 = []byte{ + // 2205 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x94, 0x59, 0x5b, 0x93, 0xdb, 0xb6, + 0x15, 0x5e, 0xc9, 0x97, 0xda, 0xd8, 0xab, 0xb1, 0x37, 0x5a, 0x76, 0x5c, 0x3b, 0xad, 0x5b, 0x27, + 0x4e, 0x25, 0xef, 0x6e, 0xe2, 0xb4, 0x6b, 0x27, 0xb5, 0x56, 0x76, 0xbc, 0xeb, 0xf8, 0xa2, 0xa1, + 0xd6, 0x5b, 0x4f, 0xc6, 0x1e, 0x0e, 0x44, 0x62, 0xb5, 0x9c, 0x90, 0x04, 0x02, 0x80, 0xf2, 0xaa, + 0x0f, 0x9d, 0xbc, 0x75, 0xa6, 0xef, 0x9d, 0x3e, 0xb4, 0x33, 0xbd, 0xcc, 0xf4, 0x25, 0x3f, 0xa5, + 0x3f, 0xa1, 0x4f, 0x7d, 0xee, 0x4f, 0xe8, 0x53, 0x07, 0x17, 0x72, 0x29, 0x89, 0x12, 0xa9, 0x37, + 0xe2, 0xe0, 0x7c, 0xdf, 0x39, 0x38, 0x00, 0x0e, 0x0e, 0x40, 0x70, 0xaf, 0x47, 0x48, 0x2f, 0xc0, + 0x0d, 0xe4, 0xf1, 0x86, 0xfe, 0x94, 0x5f, 0xfd, 0xad, 0x06, 0xc3, 0x9c, 0xc4, 0xcc, 0xc5, 0xbc, + 0xe1, 0xa2, 0x90, 0x22, 0xbf, 0x17, 0xd5, 0x29, 0x23, 0x82, 0xc0, 0x5b, 0x5a, 0xad, 0x8e, 0x3c, + 0x5e, 0x4f, 0x11, 0xf5, 0xfe, 0x56, 0x3d, 0x45, 0xd4, 0x3e, 0x99, 0x44, 0xea, 0x92, 0x30, 0x24, + 0x51, 0xa3, 0xeb, 0x7b, 0x9e, 0x1f, 0xf5, 0x34, 0x61, 0xed, 0xb3, 0x02, 0x6d, 0x37, 0xe6, 0x82, + 0x84, 0x0e, 0x45, 0x0c, 0x85, 0x58, 0x60, 0x66, 0x60, 0xdb, 0x05, 0xb0, 0x63, 0x86, 0xbf, 0x8b, + 0x71, 0xe4, 0x0e, 0x1c, 0x17, 0x51, 0x83, 0xf9, 0xb2, 0x00, 0xc3, 0x30, 0x0a, 0x1c, 0xe1, 0x87, + 0xd8, 0x31, 0x2e, 0x3a, 0x1c, 0x0b, 0x71, 0xe6, 0xea, 0xfd, 0x02, 0xbc, 0x40, 0xac, 0x87, 0xc5, + 0x38, 0x6e, 0x6f, 0x12, 0x0e, 0x47, 0x71, 0xc8, 0x1b, 0xc8, 0x73, 0x38, 0x66, 0x7d, 0x89, 0x23, + 0x54, 0xf8, 0xa1, 0xff, 0x5b, 0x24, 0x7c, 0x12, 0x39, 0x5c, 0x20, 0x11, 0x73, 0xc3, 0xf1, 0xa8, + 0x88, 0xa3, 0x8f, 0x99, 0xf0, 0xb9, 0x24, 0x71, 0x4f, 0x50, 0x14, 0xe1, 0xc0, 0xe1, 0x71, 0xd7, + 0x11, 0x03, 0x8a, 0x0d, 0xc3, 0xc3, 0xd9, 0x19, 0x32, 0xe8, 0xdd, 0x02, 0x34, 0xa5, 0x4e, 0xb2, + 0x52, 0x1c, 0xd9, 0xe0, 0x82, 0xb0, 0x04, 0xfb, 0xf5, 0x0c, 0xd8, 0x34, 0xf2, 0x82, 0x21, 0x81, + 0x7b, 0x03, 0xa7, 0x47, 0xd0, 0x90, 0x23, 0xbf, 0x9a, 0x4e, 0x36, 0x86, 0x2f, 0x1f, 0x81, 0x2e, + 0x43, 0x91, 0xe7, 0x70, 0x74, 0x8c, 0xc5, 0xc0, 0xe1, 0xb1, 0x2f, 0x50, 0xd7, 0x0f, 0x7c, 0x31, + 0x28, 0x87, 0x4e, 0x47, 0x80, 0x4f, 0x29, 0x66, 0x7e, 0x88, 0x23, 0x91, 0xb5, 0xfd, 0xa0, 0x24, + 0x3a, 0x59, 0x09, 0x43, 0x93, 0xbf, 0x53, 0x16, 0x9c, 0x05, 0x15, 0xf8, 0x1b, 0xe1, 0x1e, 0x12, + 0x7e, 0x1f, 0x3b, 0x3d, 0x4c, 0x1c, 0xbd, 0x72, 0xb3, 0xfe, 0xde, 0x9b, 0x8e, 0xa6, 0x68, 0xa0, + 0x46, 0x18, 0x12, 0xaf, 0x64, 0x74, 0x29, 0xe1, 0xfe, 0x14, 0x7b, 0x05, 0x7b, 0xa4, 0x8f, 0x22, + 0x5f, 0x0c, 0x1c, 0x7a, 0x82, 0x58, 0x88, 0x1c, 0xcf, 0xe7, 0x34, 0x40, 0x03, 0x27, 0x66, 0x41, + 0xd6, 0x83, 0xfb, 0xb3, 0x70, 0x08, 0x7c, 0x2a, 0x0c, 0xee, 0x86, 0xc1, 0xa9, 0x56, 0x37, 0x3e, + 0x6e, 0xbc, 0x67, 0x88, 0x52, 0xcc, 0x92, 0x48, 0x5e, 0x4f, 0x78, 0xa9, 0xdf, 0x40, 0x51, 0x44, + 0x84, 0xda, 0x9f, 0xa6, 0xf7, 0xc3, 0xef, 0xb7, 0xc1, 0xa5, 0x96, 0x99, 0x01, 0xf8, 0x13, 0xb0, + 0x98, 0x24, 0x42, 0x27, 0x42, 0x21, 0xb6, 0x2a, 0x37, 0x2b, 0x77, 0x2e, 0xdb, 0x0b, 0x89, 0xf0, + 0x25, 0x0a, 0x31, 0xbc, 0x0b, 0xaa, 0xbe, 0x67, 0x9d, 0xbb, 0x59, 0xb9, 0x33, 0xbf, 0x7d, 0xcd, + 0x64, 0xd1, 0x7a, 0x62, 0xbc, 0x7e, 0x10, 0x89, 0xfb, 0x9f, 0x1e, 0xa1, 0x20, 0xc6, 0x76, 0xd5, + 0xf7, 0xe0, 0x3d, 0x70, 0x5e, 0x11, 0x9d, 0x57, 0xea, 0xd7, 0xc7, 0xd4, 0x3b, 0x82, 0xf9, 0x51, + 0x4f, 0xeb, 0x2b, 0x4d, 0xf8, 0x06, 0x5c, 0xd4, 0x0b, 0xc1, 0xba, 0x70, 0xb3, 0x72, 0x67, 0x69, + 0xfb, 0x51, 0x7d, 0x52, 0xce, 0x56, 0x71, 0xa9, 0x27, 0xce, 0x77, 0x14, 0xe8, 0x49, 0x14, 0x87, + 0x23, 0x22, 0xdb, 0xf0, 0xc1, 0x18, 0x2c, 0x0d, 0xaf, 0x4f, 0x6b, 0x5d, 0x59, 0x78, 0x59, 0xd6, + 0x82, 0x06, 0xe7, 0x19, 0xca, 0xf6, 0xd8, 0x8b, 0x3c, 0xdb, 0x84, 0x7f, 0xad, 0x80, 0x1b, 0xd3, + 0x93, 0xa4, 0x75, 0x49, 0xf9, 0xf1, 0x4d, 0x81, 0x1f, 0x4d, 0xcf, 0xd8, 0x79, 0x95, 0xa1, 0xc8, + 0x78, 0x33, 0xa5, 0xdf, 0xbe, 0x86, 0x26, 0x77, 0xc2, 0x3f, 0x54, 0x80, 0x35, 0x29, 0x81, 0x5a, + 0x97, 0x95, 0x6f, 0xed, 0x42, 0xdf, 0x52, 0x78, 0x4b, 0xa3, 0x0f, 0x07, 0x14, 0x1b, 0xb7, 0xf2, + 0xba, 0xec, 0x0d, 0x94, 0x2b, 0x87, 0x7f, 0xaa, 0x80, 0xeb, 0xd3, 0xce, 0x03, 0x0b, 0x28, 0x87, + 0x8e, 0x66, 0x76, 0xa8, 0x13, 0x77, 0xa7, 0xf8, 0x64, 0x7a, 0xed, 0xab, 0x68, 0x52, 0x17, 0x6c, + 0x83, 0x75, 0xc1, 0x90, 0xfb, 0xad, 0xf4, 0x4a, 0xee, 0x5d, 0x81, 0x43, 0x1a, 0x20, 0x81, 0xad, + 0xf9, 0x12, 0x8b, 0x7b, 0x35, 0x81, 0xbe, 0x66, 0xc1, 0xa1, 0x01, 0x42, 0x17, 0xac, 0x4b, 0xa2, + 0xd1, 0x22, 0x81, 0x5b, 0x0b, 0x37, 0xcf, 0xdd, 0x99, 0xdf, 0x6e, 0x4c, 0x1c, 0xa3, 0x3e, 0xb2, + 0xeb, 0x2d, 0x05, 0x6c, 0x27, 0x38, 0x7b, 0x35, 0x66, 0xc1, 0x88, 0x8c, 0xc3, 0xef, 0xc0, 0xd5, + 0x89, 0xa5, 0x81, 0xf5, 0x73, 0xe5, 0xfa, 0xfd, 0x22, 0x43, 0x36, 0x46, 0xc1, 0xa1, 0x1f, 0xe2, + 0x3d, 0x0d, 0xef, 0x68, 0xb4, 0xbd, 0xc1, 0x72, 0xe5, 0xf0, 0x18, 0xac, 0x44, 0x58, 0xbc, 0x27, + 0xec, 0xdb, 0xc4, 0x10, 0xb7, 0x96, 0x94, 0xa5, 0x07, 0xf5, 0xc2, 0x0a, 0x2c, 0xdd, 0x55, 0xf5, + 0x97, 0x9a, 0xc3, 0xb0, 0x72, 0x7b, 0x39, 0x1a, 0x16, 0x40, 0x04, 0x16, 0x4f, 0x88, 0x90, 0x8b, + 0xc3, 0x0c, 0xe7, 0xa6, 0x32, 0xf2, 0x70, 0x16, 0x23, 0xfb, 0x92, 0xc0, 0x30, 0x1e, 0x44, 0xc7, + 0xc4, 0x5e, 0x38, 0xc9, 0x48, 0xe0, 0xf7, 0x15, 0x50, 0xf3, 0x06, 0x11, 0x0a, 0x7d, 0xd7, 0xe1, + 0x18, 0x31, 0xf7, 0xc4, 0x41, 0x1e, 0x4f, 0x0d, 0xde, 0x52, 0x06, 0x5b, 0xb3, 0x18, 0x7c, 0xac, + 0xd9, 0x3a, 0x8a, 0xac, 0xe9, 0xf1, 0x24, 0x98, 0x9b, 0x5e, 0x7e, 0x87, 0x8c, 0x26, 0x3f, 0x21, + 0x94, 0x66, 0xe7, 0xed, 0xa7, 0xb3, 0x47, 0xb3, 0x63, 0x38, 0x12, 0x7b, 0xcb, 0x7c, 0x58, 0x00, + 0xdf, 0x81, 0x2b, 0x63, 0x35, 0xa0, 0x75, 0x57, 0x19, 0xba, 0x57, 0xb4, 0x40, 0x0e, 0x13, 0x60, + 0xc2, 0xbe, 0x22, 0x46, 0x24, 0xf0, 0x3d, 0xd8, 0x1c, 0x39, 0x3c, 0x53, 0x23, 0x0d, 0x65, 0xe4, + 0xd1, 0x2c, 0xa3, 0x79, 0x8a, 0x89, 0xb6, 0x29, 0xb7, 0x66, 0x62, 0x74, 0xad, 0x97, 0x23, 0x85, + 0x14, 0xac, 0x0d, 0x15, 0x69, 0x89, 0xd5, 0x1d, 0x65, 0xf5, 0xcb, 0x59, 0xac, 0x36, 0x29, 0x3d, + 0xcb, 0xf9, 0xda, 0x26, 0x44, 0x63, 0x32, 0x38, 0x00, 0xcb, 0x23, 0x75, 0x94, 0x75, 0xa5, 0x54, + 0x1a, 0x4d, 0x88, 0x9e, 0xa4, 0xe8, 0x34, 0x65, 0xe5, 0x77, 0xd9, 0x4b, 0x78, 0xa8, 0x0d, 0x9b, + 0x60, 0xb1, 0x8b, 0x38, 0x4e, 0x47, 0x6b, 0x5d, 0x2f, 0x91, 0x9c, 0x16, 0x24, 0x24, 0xad, 0x02, + 0x9e, 0x80, 0xe5, 0xb3, 0x82, 0x36, 0xf6, 0x7a, 0x58, 0x58, 0x17, 0x4b, 0x90, 0x2c, 0x25, 0xa0, + 0x3d, 0x85, 0x81, 0xbf, 0x03, 0xeb, 0xb9, 0xe5, 0xac, 0xb5, 0xa1, 0x42, 0xf1, 0xac, 0x20, 0x14, + 0x49, 0x4a, 0x31, 0xd0, 0x34, 0x0e, 0x39, 0x72, 0x7b, 0xb5, 0x3b, 0x2e, 0x84, 0x0f, 0x00, 0xe0, + 0x02, 0x31, 0xe1, 0x78, 0x32, 0x47, 0xaf, 0x96, 0x18, 0xc1, 0x65, 0xa5, 0xff, 0x58, 0x66, 0xe6, + 0xcf, 0xc1, 0x25, 0x1c, 0x79, 0x1a, 0xba, 0x56, 0x02, 0xfa, 0x23, 0x1c, 0x79, 0x0a, 0xf8, 0x15, + 0x58, 0x39, 0xf6, 0x23, 0x14, 0xa8, 0x13, 0x82, 0xc7, 0xc7, 0xc7, 0xfe, 0xa9, 0xf5, 0xb3, 0x32, + 0xd1, 0x53, 0xa8, 0xd7, 0x2c, 0xe8, 0x28, 0x0c, 0x7c, 0x03, 0x96, 0x86, 0x2e, 0x81, 0xdc, 0xba, + 0xa3, 0xce, 0x84, 0xad, 0xa2, 0x9d, 0xf8, 0x55, 0x82, 0x6a, 0x21, 0xfa, 0x24, 0x12, 0x6c, 0x60, + 0x2f, 0x1e, 0x67, 0x44, 0x1c, 0xfe, 0xb1, 0x02, 0x6e, 0xf4, 0x7d, 0x0f, 0x13, 0x67, 0xd2, 0x95, + 0xc1, 0xfa, 0xb8, 0xd4, 0x62, 0xdd, 0x93, 0xf0, 0x8e, 0x42, 0x77, 0xce, 0xc0, 0x7a, 0x92, 0x72, + 0xbb, 0xec, 0x6b, 0xca, 0x6e, 0x7e, 0x27, 0x7c, 0x07, 0x16, 0x87, 0x6a, 0x5c, 0xeb, 0x13, 0x15, + 0xb6, 0x5f, 0xce, 0xb2, 0x3f, 0x8f, 0x14, 0x41, 0x5b, 0xe1, 0xed, 0x85, 0x7e, 0xa6, 0x05, 0x4f, + 0xc1, 0x06, 0xc7, 0x01, 0x76, 0x55, 0x15, 0x9f, 0x2d, 0xc2, 0xac, 0x5f, 0x28, 0x3b, 0xcd, 0x99, + 0x72, 0x69, 0xc2, 0x94, 0xad, 0xa6, 0xec, 0x75, 0x9e, 0x27, 0x96, 0xf9, 0x3b, 0xad, 0x1b, 0x92, + 0xdc, 0x53, 0x9f, 0x3d, 0x7f, 0x1f, 0x1a, 0x8e, 0x34, 0x7f, 0x8b, 0x61, 0x01, 0x7c, 0x07, 0x16, + 0xb2, 0x17, 0x1b, 0xeb, 0x53, 0x35, 0x8b, 0xbb, 0x05, 0xb3, 0xd8, 0xd6, 0x90, 0x17, 0xc4, 0xd3, + 0xfb, 0x2b, 0xd3, 0xb6, 0xe7, 0xe9, 0x59, 0x03, 0x1e, 0x80, 0x95, 0xd1, 0xfd, 0x6c, 0x6d, 0x16, + 0xaf, 0xec, 0xfd, 0x39, 0x7b, 0x79, 0x64, 0x7b, 0xc2, 0xe7, 0x00, 0xc8, 0xd5, 0xea, 0x73, 0x2e, + 0xe3, 0xbf, 0xa5, 0x48, 0x3e, 0x2e, 0x2c, 0x76, 0x52, 0xc4, 0xfe, 0x9c, 0x9d, 0xc1, 0xc3, 0x67, + 0x00, 0x84, 0x28, 0x8a, 0x51, 0xe0, 0xb8, 0xd4, 0xb5, 0x2c, 0xc5, 0xf6, 0x51, 0x11, 0xdb, 0x0b, + 0x85, 0x68, 0x51, 0x77, 0x7f, 0xce, 0xbe, 0x1c, 0x26, 0x8d, 0x21, 0xae, 0xd0, 0xba, 0x3a, 0x1b, + 0x57, 0x98, 0xe5, 0x0a, 0x87, 0xb8, 0xfa, 0xd6, 0xed, 0xd9, 0xb8, 0xfa, 0x59, 0xae, 0x3e, 0x3c, + 0x01, 0x6b, 0x21, 0x3a, 0x95, 0x6b, 0x0a, 0x3b, 0x2e, 0x89, 0xfa, 0x98, 0xc9, 0xa1, 0x73, 0xeb, + 0x86, 0x62, 0xdd, 0x29, 0x66, 0xd5, 0xd8, 0xd6, 0x19, 0x74, 0x7f, 0xce, 0x5e, 0x0d, 0xc7, 0xc5, + 0x30, 0x06, 0x57, 0x73, 0x2c, 0x39, 0x7d, 0x39, 0x97, 0xd6, 0x8f, 0x95, 0xb9, 0xcf, 0x67, 0x37, + 0x97, 0x2c, 0x85, 0xcd, 0x30, 0xbf, 0x4b, 0x06, 0xcb, 0x54, 0x06, 0x2e, 0x45, 0x56, 0xad, 0x5c, + 0xb0, 0xf4, 0x59, 0xdf, 0xa2, 0x48, 0x06, 0x4b, 0x24, 0x0d, 0x48, 0xc0, 0xa6, 0xe1, 0xf2, 0x43, + 0xca, 0xb0, 0x5a, 0x25, 0x0e, 0x3f, 0x41, 0x0c, 0x5b, 0xf7, 0x14, 0xf1, 0x67, 0xe5, 0x88, 0x0f, + 0x52, 0x74, 0x47, 0x82, 0xf7, 0xe7, 0xec, 0x75, 0x91, 0xd7, 0x01, 0x5f, 0x80, 0x79, 0x63, 0x90, + 0x11, 0xc4, 0xad, 0x0f, 0xca, 0x2d, 0x68, 0x6d, 0xc4, 0x26, 0x48, 0xce, 0x85, 0x19, 0xbd, 0x6c, + 0xc1, 0x36, 0x58, 0x30, 0x74, 0x9c, 0xe2, 0xc8, 0xb3, 0xae, 0x29, 0xbe, 0xbb, 0xe5, 0xf8, 0x3a, + 0x12, 0xb2, 0x3f, 0x67, 0x1b, 0x8f, 0x54, 0x53, 0x3a, 0x48, 0x31, 0x73, 0x65, 0x6a, 0x90, 0x7b, + 0xe4, 0xc3, 0x72, 0x0e, 0xb6, 0x35, 0x44, 0x6f, 0x12, 0x40, 0xd3, 0xd6, 0xd0, 0x64, 0x85, 0xd6, + 0x47, 0xb3, 0x4d, 0x56, 0x98, 0x9d, 0xac, 0xb0, 0xf6, 0x9f, 0x2a, 0x58, 0x1e, 0x29, 0xf4, 0xe1, + 0x73, 0xb0, 0x66, 0xf8, 0x35, 0x91, 0xa9, 0xbc, 0xd5, 0x73, 0xc4, 0xfc, 0x76, 0x6d, 0x2c, 0xdd, + 0xec, 0x11, 0x12, 0xe8, 0x63, 0x14, 0x6a, 0xdc, 0x53, 0xa5, 0xa0, 0x2b, 0x69, 0xf8, 0x12, 0xac, + 0x27, 0xe1, 0xd4, 0x05, 0xbc, 0xb9, 0x46, 0x58, 0xd5, 0x42, 0xba, 0x55, 0x13, 0x44, 0x85, 0x33, + 0x5e, 0xc2, 0x36, 0xd8, 0x48, 0x46, 0x4f, 0x22, 0x21, 0x63, 0x9a, 0x10, 0x9e, 0x2b, 0x24, 0x34, + 0xe3, 0x6a, 0x69, 0x60, 0xc2, 0xf8, 0x0e, 0x7c, 0x60, 0x18, 0x29, 0x62, 0x22, 0xc2, 0x6c, 0xd4, + 0xd3, 0xf3, 0x85, 0xc4, 0x35, 0x4d, 0xd0, 0xd6, 0xf8, 0x21, 0x87, 0x6b, 0xbf, 0x01, 0x2b, 0xa3, + 0xb7, 0x1c, 0xd8, 0x02, 0xcb, 0xfa, 0xea, 0x24, 0xe7, 0x14, 0x33, 0xc7, 0xf7, 0x4c, 0x74, 0xa7, + 0x3e, 0xe9, 0xe8, 0xeb, 0x56, 0x4b, 0x41, 0x0e, 0xbc, 0xda, 0xdf, 0xaa, 0x60, 0x73, 0xc2, 0x75, + 0x06, 0x7e, 0x01, 0xe6, 0x3d, 0x12, 0x22, 0x3f, 0x3a, 0x7b, 0x49, 0x2a, 0xaa, 0x81, 0x80, 0x06, + 0xa8, 0x57, 0xa6, 0x26, 0x58, 0x0c, 0x50, 0xd4, 0x8b, 0x51, 0x4f, 0xa6, 0x21, 0x0f, 0x9b, 0xc9, + 0x2a, 0xa8, 0x63, 0x13, 0x48, 0x4b, 0x1e, 0x58, 0xaf, 0xc0, 0x46, 0xcc, 0xb1, 0xc3, 0x63, 0x4a, + 0x03, 0x1f, 0x7b, 0xb2, 0x22, 0xe3, 0x0e, 0x89, 0x82, 0x41, 0x89, 0x79, 0x5a, 0x8d, 0x39, 0xee, + 0x18, 0xe0, 0x6b, 0x16, 0xf0, 0x57, 0x51, 0x30, 0x80, 0xdb, 0xe0, 0xc2, 0x31, 0xc6, 0x1e, 0xb7, + 0x2e, 0xa8, 0x52, 0x6c, 0xba, 0x2f, 0x5a, 0xb5, 0x66, 0x83, 0xe5, 0x91, 0x83, 0x1b, 0xfe, 0x1a, + 0x2c, 0x64, 0xdf, 0x11, 0x4a, 0x85, 0x66, 0x3e, 0xf3, 0x7c, 0x50, 0xfb, 0xa1, 0x0a, 0xd6, 0xf2, + 0xee, 0x3f, 0xf0, 0xf7, 0x15, 0x60, 0x4d, 0x7a, 0xa9, 0x54, 0x66, 0x8a, 0x1f, 0xbb, 0xda, 0x06, + 0x3e, 0xc4, 0xaf, 0x0b, 0x83, 0xbc, 0x1e, 0x7b, 0x9d, 0xe6, 0x89, 0x95, 0x27, 0x93, 0xde, 0x68, + 0xd5, 0x54, 0x16, 0x7b, 0xf2, 0xd2, 0xc0, 0xc7, 0x3d, 0xc9, 0xed, 0xb1, 0xd7, 0xa3, 0x3c, 0x71, + 0xcd, 0x03, 0xeb, 0xb9, 0xd5, 0x1a, 0xfc, 0x1a, 0xc0, 0xcc, 0xf9, 0x86, 0x5c, 0xf5, 0x2a, 0x6a, + 0x55, 0x4a, 0x4c, 0xed, 0x95, 0x33, 0x5c, 0x53, 0xc3, 0x6a, 0x7f, 0xae, 0x82, 0xe5, 0x91, 0x0b, + 0x36, 0x7c, 0x08, 0xe6, 0x43, 0xcc, 0xdc, 0x13, 0x14, 0x89, 0x92, 0xdb, 0x0b, 0x24, 0xfa, 0x07, + 0x9e, 0xdc, 0x00, 0x1c, 0x05, 0x98, 0x3b, 0x2e, 0x89, 0x65, 0x19, 0x5f, 0x6e, 0x03, 0x28, 0x48, + 0x4b, 0x23, 0xe0, 0x3e, 0xb8, 0x92, 0x5e, 0xe4, 0x28, 0xf3, 0x09, 0x93, 0xb5, 0xfd, 0x94, 0x87, + 0xdb, 0x9d, 0x6d, 0xcd, 0xb2, 0x92, 0xa0, 0xda, 0x06, 0x04, 0xbf, 0x00, 0x0b, 0x38, 0x42, 0xdd, + 0x00, 0x3b, 0x01, 0x71, 0x51, 0x50, 0x22, 0x1f, 0xcd, 0x6b, 0xfd, 0xe7, 0x52, 0xbd, 0xf6, 0xef, + 0x2a, 0x80, 0xe3, 0x57, 0x67, 0xf8, 0xcf, 0x0a, 0xa8, 0x4d, 0xfe, 0x63, 0x62, 0x16, 0xec, 0x49, + 0xd1, 0x43, 0xdf, 0x19, 0xef, 0xc8, 0xd5, 0xf0, 0x29, 0x41, 0x99, 0x57, 0xc8, 0x42, 0x35, 0x7b, + 0xb3, 0x9b, 0xdf, 0x01, 0x77, 0xc0, 0x45, 0x44, 0xa9, 0x9c, 0xc3, 0x32, 0x73, 0x70, 0x01, 0x51, + 0x7a, 0xe0, 0xc1, 0x1e, 0xb8, 0x9c, 0xfe, 0x49, 0x52, 0x41, 0x2f, 0xbe, 0xf2, 0x66, 0x7c, 0x6c, + 0x52, 0xda, 0x91, 0xc8, 0x51, 0xdf, 0x13, 0xb9, 0x7d, 0x09, 0x99, 0xaf, 0xda, 0xdf, 0xab, 0x60, + 0x21, 0x7b, 0xef, 0x81, 0x7f, 0x91, 0x17, 0xbc, 0xa9, 0x7f, 0x1c, 0x4c, 0x68, 0xdf, 0x14, 0xf8, + 0x93, 0x65, 0x7d, 0xac, 0x29, 0x5e, 0xb3, 0x20, 0xbd, 0x29, 0x4c, 0xee, 0xb6, 0x6b, 0xfd, 0x89, + 0x7d, 0x90, 0x01, 0x38, 0xfe, 0x2f, 0xc3, 0xe4, 0x84, 0xc7, 0x33, 0x78, 0x74, 0x88, 0x4f, 0xc5, + 0x98, 0x1f, 0x52, 0x68, 0xaf, 0xf4, 0x47, 0x24, 0x7b, 0xd7, 0xc0, 0xd5, 0x89, 0xff, 0xe8, 0xf6, + 0xfe, 0x57, 0x01, 0xb7, 0x5d, 0x12, 0x16, 0xdf, 0xc5, 0xf6, 0x16, 0x5b, 0xe9, 0xc6, 0x20, 0x82, + 0xb4, 0x2b, 0xdf, 0x3c, 0x33, 0x98, 0x1e, 0x91, 0x27, 0x4f, 0x9d, 0xb0, 0x5e, 0xa3, 0x87, 0x23, + 0xb5, 0x2c, 0x92, 0x7f, 0x38, 0xd4, 0xe7, 0x53, 0x7e, 0x50, 0x3f, 0x48, 0xbf, 0xfe, 0x51, 0x3d, + 0xf7, 0xb4, 0xd9, 0xfc, 0xa1, 0x7a, 0x4b, 0xd7, 0x2e, 0xf5, 0xa6, 0xc7, 0xeb, 0xfa, 0x53, 0x7e, + 0x1d, 0x6d, 0xd5, 0xed, 0x44, 0xf3, 0x5f, 0x89, 0xce, 0xdb, 0xa6, 0xc7, 0xdf, 0xa6, 0x3a, 0x6f, + 0x8f, 0xb6, 0xde, 0xa6, 0x3a, 0xff, 0xad, 0xde, 0xd6, 0x1d, 0xbb, 0xbb, 0x4d, 0x8f, 0xef, 0xee, + 0xa6, 0x5a, 0xbb, 0xbb, 0x47, 0x5b, 0xbb, 0xbb, 0xa9, 0x5e, 0xf7, 0xa2, 0x72, 0x76, 0xe7, 0xff, + 0x01, 0x00, 0x00, 0xff, 0xff, 0xb9, 0x0b, 0x95, 0x6f, 0x4c, 0x1f, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/resources/campaign_audience_view.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/resources/campaign_audience_view.pb.go new file mode 100644 index 0000000..a47c6bb --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/resources/campaign_audience_view.pb.go @@ -0,0 +1,98 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/resources/campaign_audience_view.proto + +package resources // import "google.golang.org/genproto/googleapis/ads/googleads/v1/resources" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// A campaign audience view. +// Includes performance data from interests and remarketing lists for Display +// Network and YouTube Network ads, and remarketing lists for search ads (RLSA), +// aggregated by campaign and audience criterion. This view only includes +// audiences attached at the campaign level. +type CampaignAudienceView struct { + // The resource name of the campaign audience view. + // Campaign audience view resource names have the form: + // + // + // `customers/{customer_id}/campaignAudienceViews/{campaign_id}~{criterion_id}` + ResourceName string `protobuf:"bytes,1,opt,name=resource_name,json=resourceName,proto3" json:"resource_name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *CampaignAudienceView) Reset() { *m = CampaignAudienceView{} } +func (m *CampaignAudienceView) String() string { return proto.CompactTextString(m) } +func (*CampaignAudienceView) ProtoMessage() {} +func (*CampaignAudienceView) Descriptor() ([]byte, []int) { + return fileDescriptor_campaign_audience_view_0a728cd592b53255, []int{0} +} +func (m *CampaignAudienceView) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_CampaignAudienceView.Unmarshal(m, b) +} +func (m *CampaignAudienceView) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_CampaignAudienceView.Marshal(b, m, deterministic) +} +func (dst *CampaignAudienceView) XXX_Merge(src proto.Message) { + xxx_messageInfo_CampaignAudienceView.Merge(dst, src) +} +func (m *CampaignAudienceView) XXX_Size() int { + return xxx_messageInfo_CampaignAudienceView.Size(m) +} +func (m *CampaignAudienceView) XXX_DiscardUnknown() { + xxx_messageInfo_CampaignAudienceView.DiscardUnknown(m) +} + +var xxx_messageInfo_CampaignAudienceView proto.InternalMessageInfo + +func (m *CampaignAudienceView) GetResourceName() string { + if m != nil { + return m.ResourceName + } + return "" +} + +func init() { + proto.RegisterType((*CampaignAudienceView)(nil), "google.ads.googleads.v1.resources.CampaignAudienceView") +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/resources/campaign_audience_view.proto", fileDescriptor_campaign_audience_view_0a728cd592b53255) +} + +var fileDescriptor_campaign_audience_view_0a728cd592b53255 = []byte{ + // 276 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x84, 0x90, 0x4f, 0x4a, 0xc4, 0x30, + 0x14, 0xc6, 0x99, 0x0a, 0x82, 0x45, 0x37, 0x83, 0x0b, 0x15, 0x17, 0x8e, 0x32, 0xe0, 0x2a, 0xa1, + 0xb8, 0xcb, 0x80, 0x90, 0x71, 0x31, 0xe0, 0x42, 0x86, 0x59, 0x74, 0x21, 0x85, 0xf2, 0x6c, 0x1e, + 0x21, 0x30, 0xcd, 0x2b, 0x4d, 0xa7, 0x73, 0x03, 0x0f, 0xe2, 0xd2, 0xa3, 0x78, 0x14, 0x4f, 0x21, + 0x9d, 0x34, 0x59, 0x89, 0xee, 0x3e, 0x92, 0xdf, 0xf7, 0x87, 0x97, 0x3e, 0x6a, 0x22, 0xbd, 0x45, + 0x0e, 0xca, 0x71, 0x2f, 0x07, 0xd5, 0x67, 0xbc, 0x45, 0x47, 0xbb, 0xb6, 0x42, 0xc7, 0x2b, 0xa8, + 0x1b, 0x30, 0xda, 0x96, 0xb0, 0x53, 0x06, 0x6d, 0x85, 0x65, 0x6f, 0x70, 0xcf, 0x9a, 0x96, 0x3a, + 0x9a, 0xce, 0xbc, 0x89, 0x81, 0x72, 0x2c, 0xfa, 0x59, 0x9f, 0xb1, 0xe8, 0xbf, 0xba, 0x0e, 0x15, + 0x8d, 0xe1, 0x60, 0x2d, 0x75, 0xd0, 0x19, 0xb2, 0xce, 0x07, 0xdc, 0x2e, 0xd2, 0xf3, 0xa7, 0xb1, + 0x40, 0x8e, 0xf9, 0xb9, 0xc1, 0xfd, 0xf4, 0x2e, 0x3d, 0x0b, 0x11, 0xa5, 0x85, 0x1a, 0x2f, 0x26, + 0x37, 0x93, 0xfb, 0x93, 0xcd, 0x69, 0x78, 0x7c, 0x81, 0x1a, 0x97, 0xef, 0x49, 0x3a, 0xaf, 0xa8, + 0x66, 0xff, 0x8e, 0x58, 0x5e, 0xfe, 0x56, 0xb2, 0x1e, 0x16, 0xac, 0x27, 0xaf, 0xcf, 0xa3, 0x5f, + 0xd3, 0x16, 0xac, 0x66, 0xd4, 0x6a, 0xae, 0xd1, 0x1e, 0xf6, 0x85, 0xa3, 0x34, 0xc6, 0xfd, 0x71, + 0xa3, 0x45, 0x54, 0x1f, 0xc9, 0xd1, 0x4a, 0xca, 0xcf, 0x64, 0xb6, 0xf2, 0x91, 0x52, 0x39, 0xe6, + 0xe5, 0xa0, 0xf2, 0x8c, 0x6d, 0x02, 0xf9, 0x15, 0x98, 0x42, 0x2a, 0x57, 0x44, 0xa6, 0xc8, 0xb3, + 0x22, 0x32, 0xdf, 0xc9, 0xdc, 0x7f, 0x08, 0x21, 0x95, 0x13, 0x22, 0x52, 0x42, 0xe4, 0x99, 0x10, + 0x91, 0x7b, 0x3b, 0x3e, 0x8c, 0x7d, 0xf8, 0x09, 0x00, 0x00, 0xff, 0xff, 0xeb, 0x40, 0xc3, 0x93, + 0xcf, 0x01, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/resources/campaign_bid_modifier.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/resources/campaign_bid_modifier.pb.go new file mode 100644 index 0000000..4a8beed --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/resources/campaign_bid_modifier.pb.go @@ -0,0 +1,218 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/resources/campaign_bid_modifier.proto + +package resources // import "google.golang.org/genproto/googleapis/ads/googleads/v1/resources" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import wrappers "github.com/golang/protobuf/ptypes/wrappers" +import common "google.golang.org/genproto/googleapis/ads/googleads/v1/common" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Represents a bid-modifiable only criterion at the campaign level. +type CampaignBidModifier struct { + // The resource name of the campaign bid modifier. + // Campaign bid modifier resource names have the form: + // + // `customers/{customer_id}/campaignBidModifiers/{campaign_id}~{criterion_id}` + ResourceName string `protobuf:"bytes,1,opt,name=resource_name,json=resourceName,proto3" json:"resource_name,omitempty"` + // The campaign to which this criterion belongs. + Campaign *wrappers.StringValue `protobuf:"bytes,2,opt,name=campaign,proto3" json:"campaign,omitempty"` + // The ID of the criterion to bid modify. + // + // This field is ignored for mutates. + CriterionId *wrappers.Int64Value `protobuf:"bytes,3,opt,name=criterion_id,json=criterionId,proto3" json:"criterion_id,omitempty"` + // The modifier for the bid when the criterion matches. + BidModifier *wrappers.DoubleValue `protobuf:"bytes,4,opt,name=bid_modifier,json=bidModifier,proto3" json:"bid_modifier,omitempty"` + // The criterion of this campaign bid modifier. + // + // Types that are valid to be assigned to Criterion: + // *CampaignBidModifier_InteractionType + Criterion isCampaignBidModifier_Criterion `protobuf_oneof:"criterion"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *CampaignBidModifier) Reset() { *m = CampaignBidModifier{} } +func (m *CampaignBidModifier) String() string { return proto.CompactTextString(m) } +func (*CampaignBidModifier) ProtoMessage() {} +func (*CampaignBidModifier) Descriptor() ([]byte, []int) { + return fileDescriptor_campaign_bid_modifier_22909370b669e990, []int{0} +} +func (m *CampaignBidModifier) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_CampaignBidModifier.Unmarshal(m, b) +} +func (m *CampaignBidModifier) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_CampaignBidModifier.Marshal(b, m, deterministic) +} +func (dst *CampaignBidModifier) XXX_Merge(src proto.Message) { + xxx_messageInfo_CampaignBidModifier.Merge(dst, src) +} +func (m *CampaignBidModifier) XXX_Size() int { + return xxx_messageInfo_CampaignBidModifier.Size(m) +} +func (m *CampaignBidModifier) XXX_DiscardUnknown() { + xxx_messageInfo_CampaignBidModifier.DiscardUnknown(m) +} + +var xxx_messageInfo_CampaignBidModifier proto.InternalMessageInfo + +func (m *CampaignBidModifier) GetResourceName() string { + if m != nil { + return m.ResourceName + } + return "" +} + +func (m *CampaignBidModifier) GetCampaign() *wrappers.StringValue { + if m != nil { + return m.Campaign + } + return nil +} + +func (m *CampaignBidModifier) GetCriterionId() *wrappers.Int64Value { + if m != nil { + return m.CriterionId + } + return nil +} + +func (m *CampaignBidModifier) GetBidModifier() *wrappers.DoubleValue { + if m != nil { + return m.BidModifier + } + return nil +} + +type isCampaignBidModifier_Criterion interface { + isCampaignBidModifier_Criterion() +} + +type CampaignBidModifier_InteractionType struct { + InteractionType *common.InteractionTypeInfo `protobuf:"bytes,5,opt,name=interaction_type,json=interactionType,proto3,oneof"` +} + +func (*CampaignBidModifier_InteractionType) isCampaignBidModifier_Criterion() {} + +func (m *CampaignBidModifier) GetCriterion() isCampaignBidModifier_Criterion { + if m != nil { + return m.Criterion + } + return nil +} + +func (m *CampaignBidModifier) GetInteractionType() *common.InteractionTypeInfo { + if x, ok := m.GetCriterion().(*CampaignBidModifier_InteractionType); ok { + return x.InteractionType + } + return nil +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*CampaignBidModifier) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _CampaignBidModifier_OneofMarshaler, _CampaignBidModifier_OneofUnmarshaler, _CampaignBidModifier_OneofSizer, []interface{}{ + (*CampaignBidModifier_InteractionType)(nil), + } +} + +func _CampaignBidModifier_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*CampaignBidModifier) + // criterion + switch x := m.Criterion.(type) { + case *CampaignBidModifier_InteractionType: + b.EncodeVarint(5<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.InteractionType); err != nil { + return err + } + case nil: + default: + return fmt.Errorf("CampaignBidModifier.Criterion has unexpected type %T", x) + } + return nil +} + +func _CampaignBidModifier_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*CampaignBidModifier) + switch tag { + case 5: // criterion.interaction_type + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(common.InteractionTypeInfo) + err := b.DecodeMessage(msg) + m.Criterion = &CampaignBidModifier_InteractionType{msg} + return true, err + default: + return false, nil + } +} + +func _CampaignBidModifier_OneofSizer(msg proto.Message) (n int) { + m := msg.(*CampaignBidModifier) + // criterion + switch x := m.Criterion.(type) { + case *CampaignBidModifier_InteractionType: + s := proto.Size(x.InteractionType) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +func init() { + proto.RegisterType((*CampaignBidModifier)(nil), "google.ads.googleads.v1.resources.CampaignBidModifier") +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/resources/campaign_bid_modifier.proto", fileDescriptor_campaign_bid_modifier_22909370b669e990) +} + +var fileDescriptor_campaign_bid_modifier_22909370b669e990 = []byte{ + // 440 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x84, 0x52, 0xdd, 0x8a, 0xd4, 0x30, + 0x14, 0x76, 0xba, 0x2a, 0x6e, 0x66, 0x44, 0xa9, 0x37, 0x65, 0x5d, 0x64, 0x57, 0x59, 0xd8, 0x1b, + 0x53, 0xea, 0x8a, 0x48, 0x45, 0xa5, 0xa3, 0xb0, 0x8e, 0xa0, 0x2c, 0xa3, 0xcc, 0x85, 0x0c, 0xd4, + 0xb4, 0xc9, 0x94, 0xc0, 0x34, 0x27, 0x24, 0xe9, 0xca, 0x3e, 0x80, 0x2f, 0xe2, 0xa5, 0x8f, 0xe2, + 0x73, 0x78, 0xe5, 0x53, 0x48, 0x9b, 0x1f, 0x46, 0x74, 0xdc, 0xbb, 0x93, 0x9c, 0xef, 0xfb, 0xf2, + 0x7d, 0xe7, 0x04, 0x3d, 0x6f, 0x00, 0x9a, 0x35, 0x4b, 0x09, 0xd5, 0xa9, 0x2d, 0xfb, 0xea, 0x3c, + 0x4b, 0x15, 0xd3, 0xd0, 0xa9, 0x9a, 0xe9, 0xb4, 0x26, 0xad, 0x24, 0xbc, 0x11, 0x65, 0xc5, 0x69, + 0xd9, 0x02, 0xe5, 0x2b, 0xce, 0x14, 0x96, 0x0a, 0x0c, 0xc4, 0x87, 0x96, 0x83, 0x09, 0xd5, 0x38, + 0xd0, 0xf1, 0x79, 0x86, 0x03, 0x7d, 0xef, 0xe1, 0xb6, 0x17, 0x6a, 0x68, 0x5b, 0x10, 0x69, 0xad, + 0xb8, 0x61, 0x8a, 0x13, 0xab, 0xb8, 0x77, 0xcf, 0xc1, 0x87, 0x53, 0xd5, 0xad, 0xd2, 0x2f, 0x8a, + 0x48, 0xc9, 0x94, 0x76, 0xfd, 0x7d, 0x2f, 0x27, 0x79, 0x4a, 0x84, 0x00, 0x43, 0x0c, 0x07, 0xe1, + 0xba, 0xf7, 0x7f, 0x46, 0xe8, 0xce, 0x2b, 0xe7, 0x77, 0xca, 0xe9, 0x3b, 0xe7, 0x36, 0x7e, 0x80, + 0x6e, 0x7a, 0x47, 0xa5, 0x20, 0x2d, 0x4b, 0x46, 0x07, 0xa3, 0xe3, 0xdd, 0xf9, 0xc4, 0x5f, 0xbe, + 0x27, 0x2d, 0x8b, 0x9f, 0xa2, 0x1b, 0x3e, 0x6b, 0x12, 0x1d, 0x8c, 0x8e, 0xc7, 0x8f, 0xf6, 0x5d, + 0x28, 0xec, 0xdd, 0xe0, 0x0f, 0x46, 0x71, 0xd1, 0x2c, 0xc8, 0xba, 0x63, 0xf3, 0x80, 0x8e, 0x5f, + 0xa0, 0x89, 0x8b, 0x01, 0xa2, 0xe4, 0x34, 0xd9, 0x19, 0xd8, 0x77, 0xff, 0x62, 0xcf, 0x84, 0x79, + 0xf2, 0xd8, 0x92, 0xc7, 0x81, 0x30, 0xa3, 0xf1, 0x4b, 0x34, 0xd9, 0x1c, 0x6e, 0x72, 0x75, 0xcb, + 0xeb, 0xaf, 0xa1, 0xab, 0xd6, 0xcc, 0x09, 0x54, 0x1b, 0xf9, 0x3e, 0xa3, 0xdb, 0x5c, 0x18, 0xa6, + 0x48, 0xdd, 0x4f, 0xa3, 0x34, 0x17, 0x92, 0x25, 0xd7, 0x06, 0x91, 0x13, 0xbc, 0x6d, 0x45, 0x76, + 0xfe, 0xbd, 0x27, 0xcf, 0xfb, 0x78, 0x21, 0xd9, 0x4c, 0xac, 0xe0, 0xcd, 0x95, 0xf9, 0x2d, 0xfe, + 0xe7, 0xf5, 0x74, 0x8c, 0x76, 0x83, 0xe3, 0xe9, 0xd7, 0x08, 0x1d, 0xd5, 0xd0, 0xe2, 0x4b, 0xb7, + 0x3f, 0x4d, 0xfe, 0xb1, 0x8d, 0xb3, 0x3e, 0xce, 0xd9, 0xe8, 0xd3, 0x5b, 0x47, 0x6f, 0x60, 0x4d, + 0x44, 0x83, 0x41, 0x35, 0x69, 0xc3, 0xc4, 0x10, 0xd6, 0xff, 0x14, 0xc9, 0xf5, 0x7f, 0xbe, 0xe6, + 0xb3, 0x50, 0x7d, 0x8b, 0x76, 0x4e, 0x8b, 0xe2, 0x7b, 0x74, 0x78, 0x6a, 0x25, 0x0b, 0xaa, 0xb1, + 0x2d, 0xfb, 0x6a, 0x91, 0xe1, 0xb9, 0x47, 0xfe, 0xf0, 0x98, 0x65, 0x41, 0xf5, 0x32, 0x60, 0x96, + 0x8b, 0x6c, 0x19, 0x30, 0xbf, 0xa2, 0x23, 0xdb, 0xc8, 0xf3, 0x82, 0xea, 0x3c, 0x0f, 0xa8, 0x3c, + 0x5f, 0x64, 0x79, 0x1e, 0x70, 0xd5, 0xf5, 0xc1, 0xec, 0xc9, 0xef, 0x00, 0x00, 0x00, 0xff, 0xff, + 0x7f, 0x89, 0xc2, 0xf4, 0x46, 0x03, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/resources/campaign_budget.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/resources/campaign_budget.pb.go new file mode 100644 index 0000000..32894c2 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/resources/campaign_budget.pb.go @@ -0,0 +1,321 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/resources/campaign_budget.proto + +package resources // import "google.golang.org/genproto/googleapis/ads/googleads/v1/resources" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import wrappers "github.com/golang/protobuf/ptypes/wrappers" +import enums "google.golang.org/genproto/googleapis/ads/googleads/v1/enums" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// A campaign budget. +type CampaignBudget struct { + // The resource name of the campaign budget. + // Campaign budget resource names have the form: + // + // `customers/{customer_id}/campaignBudgets/{budget_id}` + ResourceName string `protobuf:"bytes,1,opt,name=resource_name,json=resourceName,proto3" json:"resource_name,omitempty"` + // The ID of the campaign budget. + // + // A campaign budget is created using the CampaignBudgetService create + // operation and is assigned a budget ID. A budget ID can be shared across + // different campaigns; the system will then allocate the campaign budget + // among different campaigns to get optimum results. + Id *wrappers.Int64Value `protobuf:"bytes,3,opt,name=id,proto3" json:"id,omitempty"` + // The name of the campaign budget. + // + // When creating a campaign budget through CampaignBudgetService, every + // explicitly shared campaign budget must have a non-null, non-empty name. + // Campaign budgets that are not explicitly shared derive their name from the + // attached campaign's name. + // + // The length of this string must be between 1 and 255, inclusive, + // in UTF-8 bytes, (trimmed). + Name *wrappers.StringValue `protobuf:"bytes,4,opt,name=name,proto3" json:"name,omitempty"` + // The amount of the budget, in the local currency for the account. + // Amount is specified in micros, where one million is equivalent to one + // currency unit. Monthly spend is capped at 30.4 times this amount. + AmountMicros *wrappers.Int64Value `protobuf:"bytes,5,opt,name=amount_micros,json=amountMicros,proto3" json:"amount_micros,omitempty"` + // The lifetime amount of the budget, in the local currency for the account. + // Amount is specified in micros, where one million is equivalent to one + // currency unit. + TotalAmountMicros *wrappers.Int64Value `protobuf:"bytes,10,opt,name=total_amount_micros,json=totalAmountMicros,proto3" json:"total_amount_micros,omitempty"` + // The status of this campaign budget. This field is read-only. + Status enums.BudgetStatusEnum_BudgetStatus `protobuf:"varint,6,opt,name=status,proto3,enum=google.ads.googleads.v1.enums.BudgetStatusEnum_BudgetStatus" json:"status,omitempty"` + // The delivery method that determines the rate at which the campaign budget + // is spent. + // + // Defaults to STANDARD if unspecified in a create operation. + DeliveryMethod enums.BudgetDeliveryMethodEnum_BudgetDeliveryMethod `protobuf:"varint,7,opt,name=delivery_method,json=deliveryMethod,proto3,enum=google.ads.googleads.v1.enums.BudgetDeliveryMethodEnum_BudgetDeliveryMethod" json:"delivery_method,omitempty"` + // Specifies whether the budget is explicitly shared. Defaults to true if + // unspecified in a create operation. + // + // If true, the budget was created with the purpose of sharing + // across one or more campaigns. + // + // If false, the budget was created with the intention of only being used + // with a single campaign. The budget's name and status will stay in sync + // with the campaign's name and status. Attempting to share the budget with a + // second campaign will result in an error. + // + // A non-shared budget can become an explicitly shared. The same operation + // must also assign the budget a name. + // + // A shared campaign budget can never become non-shared. + ExplicitlyShared *wrappers.BoolValue `protobuf:"bytes,8,opt,name=explicitly_shared,json=explicitlyShared,proto3" json:"explicitly_shared,omitempty"` + // The number of campaigns actively using the budget. + // + // This field is read-only. + ReferenceCount *wrappers.Int64Value `protobuf:"bytes,9,opt,name=reference_count,json=referenceCount,proto3" json:"reference_count,omitempty"` + // Indicates whether there is a recommended budget for this campaign budget. + // + // This field is read-only. + HasRecommendedBudget *wrappers.BoolValue `protobuf:"bytes,11,opt,name=has_recommended_budget,json=hasRecommendedBudget,proto3" json:"has_recommended_budget,omitempty"` + // The recommended budget amount. If no recommendation is available, this will + // be set to the budget amount. + // Amount is specified in micros, where one million is equivalent to one + // currency unit. + // + // This field is read-only. + RecommendedBudgetAmountMicros *wrappers.Int64Value `protobuf:"bytes,12,opt,name=recommended_budget_amount_micros,json=recommendedBudgetAmountMicros,proto3" json:"recommended_budget_amount_micros,omitempty"` + // Period over which to spend the budget. Defaults to DAILY if not specified. + Period enums.BudgetPeriodEnum_BudgetPeriod `protobuf:"varint,13,opt,name=period,proto3,enum=google.ads.googleads.v1.enums.BudgetPeriodEnum_BudgetPeriod" json:"period,omitempty"` + // The estimated change in weekly clicks if the recommended budget is applied. + // + // This field is read-only. + RecommendedBudgetEstimatedChangeWeeklyClicks *wrappers.Int64Value `protobuf:"bytes,14,opt,name=recommended_budget_estimated_change_weekly_clicks,json=recommendedBudgetEstimatedChangeWeeklyClicks,proto3" json:"recommended_budget_estimated_change_weekly_clicks,omitempty"` + // The estimated change in weekly cost in micros if the recommended budget is + // applied. One million is equivalent to one currency unit. + // + // This field is read-only. + RecommendedBudgetEstimatedChangeWeeklyCostMicros *wrappers.Int64Value `protobuf:"bytes,15,opt,name=recommended_budget_estimated_change_weekly_cost_micros,json=recommendedBudgetEstimatedChangeWeeklyCostMicros,proto3" json:"recommended_budget_estimated_change_weekly_cost_micros,omitempty"` + // The estimated change in weekly interactions if the recommended budget is + // applied. + // + // This field is read-only. + RecommendedBudgetEstimatedChangeWeeklyInteractions *wrappers.Int64Value `protobuf:"bytes,16,opt,name=recommended_budget_estimated_change_weekly_interactions,json=recommendedBudgetEstimatedChangeWeeklyInteractions,proto3" json:"recommended_budget_estimated_change_weekly_interactions,omitempty"` + // The estimated change in weekly views if the recommended budget is applied. + // + // This field is read-only. + RecommendedBudgetEstimatedChangeWeeklyViews *wrappers.Int64Value `protobuf:"bytes,17,opt,name=recommended_budget_estimated_change_weekly_views,json=recommendedBudgetEstimatedChangeWeeklyViews,proto3" json:"recommended_budget_estimated_change_weekly_views,omitempty"` + // The type of the campaign budget. + Type enums.BudgetTypeEnum_BudgetType `protobuf:"varint,18,opt,name=type,proto3,enum=google.ads.googleads.v1.enums.BudgetTypeEnum_BudgetType" json:"type,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *CampaignBudget) Reset() { *m = CampaignBudget{} } +func (m *CampaignBudget) String() string { return proto.CompactTextString(m) } +func (*CampaignBudget) ProtoMessage() {} +func (*CampaignBudget) Descriptor() ([]byte, []int) { + return fileDescriptor_campaign_budget_b018f8e27e2503ef, []int{0} +} +func (m *CampaignBudget) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_CampaignBudget.Unmarshal(m, b) +} +func (m *CampaignBudget) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_CampaignBudget.Marshal(b, m, deterministic) +} +func (dst *CampaignBudget) XXX_Merge(src proto.Message) { + xxx_messageInfo_CampaignBudget.Merge(dst, src) +} +func (m *CampaignBudget) XXX_Size() int { + return xxx_messageInfo_CampaignBudget.Size(m) +} +func (m *CampaignBudget) XXX_DiscardUnknown() { + xxx_messageInfo_CampaignBudget.DiscardUnknown(m) +} + +var xxx_messageInfo_CampaignBudget proto.InternalMessageInfo + +func (m *CampaignBudget) GetResourceName() string { + if m != nil { + return m.ResourceName + } + return "" +} + +func (m *CampaignBudget) GetId() *wrappers.Int64Value { + if m != nil { + return m.Id + } + return nil +} + +func (m *CampaignBudget) GetName() *wrappers.StringValue { + if m != nil { + return m.Name + } + return nil +} + +func (m *CampaignBudget) GetAmountMicros() *wrappers.Int64Value { + if m != nil { + return m.AmountMicros + } + return nil +} + +func (m *CampaignBudget) GetTotalAmountMicros() *wrappers.Int64Value { + if m != nil { + return m.TotalAmountMicros + } + return nil +} + +func (m *CampaignBudget) GetStatus() enums.BudgetStatusEnum_BudgetStatus { + if m != nil { + return m.Status + } + return enums.BudgetStatusEnum_UNSPECIFIED +} + +func (m *CampaignBudget) GetDeliveryMethod() enums.BudgetDeliveryMethodEnum_BudgetDeliveryMethod { + if m != nil { + return m.DeliveryMethod + } + return enums.BudgetDeliveryMethodEnum_UNSPECIFIED +} + +func (m *CampaignBudget) GetExplicitlyShared() *wrappers.BoolValue { + if m != nil { + return m.ExplicitlyShared + } + return nil +} + +func (m *CampaignBudget) GetReferenceCount() *wrappers.Int64Value { + if m != nil { + return m.ReferenceCount + } + return nil +} + +func (m *CampaignBudget) GetHasRecommendedBudget() *wrappers.BoolValue { + if m != nil { + return m.HasRecommendedBudget + } + return nil +} + +func (m *CampaignBudget) GetRecommendedBudgetAmountMicros() *wrappers.Int64Value { + if m != nil { + return m.RecommendedBudgetAmountMicros + } + return nil +} + +func (m *CampaignBudget) GetPeriod() enums.BudgetPeriodEnum_BudgetPeriod { + if m != nil { + return m.Period + } + return enums.BudgetPeriodEnum_UNSPECIFIED +} + +func (m *CampaignBudget) GetRecommendedBudgetEstimatedChangeWeeklyClicks() *wrappers.Int64Value { + if m != nil { + return m.RecommendedBudgetEstimatedChangeWeeklyClicks + } + return nil +} + +func (m *CampaignBudget) GetRecommendedBudgetEstimatedChangeWeeklyCostMicros() *wrappers.Int64Value { + if m != nil { + return m.RecommendedBudgetEstimatedChangeWeeklyCostMicros + } + return nil +} + +func (m *CampaignBudget) GetRecommendedBudgetEstimatedChangeWeeklyInteractions() *wrappers.Int64Value { + if m != nil { + return m.RecommendedBudgetEstimatedChangeWeeklyInteractions + } + return nil +} + +func (m *CampaignBudget) GetRecommendedBudgetEstimatedChangeWeeklyViews() *wrappers.Int64Value { + if m != nil { + return m.RecommendedBudgetEstimatedChangeWeeklyViews + } + return nil +} + +func (m *CampaignBudget) GetType() enums.BudgetTypeEnum_BudgetType { + if m != nil { + return m.Type + } + return enums.BudgetTypeEnum_UNSPECIFIED +} + +func init() { + proto.RegisterType((*CampaignBudget)(nil), "google.ads.googleads.v1.resources.CampaignBudget") +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/resources/campaign_budget.proto", fileDescriptor_campaign_budget_b018f8e27e2503ef) +} + +var fileDescriptor_campaign_budget_b018f8e27e2503ef = []byte{ + // 752 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x94, 0x95, 0x5d, 0x6b, 0x2b, 0x45, + 0x18, 0xc7, 0xd9, 0x9c, 0x5a, 0x3d, 0x73, 0xd2, 0xf4, 0x74, 0x8f, 0xc8, 0x52, 0x8f, 0x92, 0x2a, + 0x85, 0x42, 0x65, 0xb7, 0xa9, 0xd2, 0xca, 0xea, 0x85, 0x49, 0x5a, 0x4a, 0xb5, 0x95, 0xb0, 0x2d, + 0x11, 0x24, 0xb0, 0x4c, 0x77, 0x9e, 0x6e, 0x86, 0xee, 0xce, 0x2c, 0x33, 0xb3, 0xa9, 0xb9, 0x13, + 0xf1, 0x4a, 0x10, 0xfc, 0x0c, 0x5e, 0xfa, 0x51, 0xfc, 0x28, 0x7e, 0x0a, 0xd9, 0xd9, 0x97, 0xe6, + 0xc5, 0xba, 0xc9, 0xdd, 0xbc, 0x3c, 0xff, 0x5f, 0xfe, 0xf3, 0x9f, 0x67, 0x33, 0xe8, 0x34, 0xe4, + 0x3c, 0x8c, 0xc0, 0xc1, 0x44, 0x3a, 0xf9, 0x30, 0x1b, 0x4d, 0x3a, 0x8e, 0x00, 0xc9, 0x53, 0x11, + 0x80, 0x74, 0x02, 0x1c, 0x27, 0x98, 0x86, 0xcc, 0xbf, 0x4b, 0x49, 0x08, 0xca, 0x4e, 0x04, 0x57, + 0xdc, 0xdc, 0xcb, 0xab, 0x6d, 0x4c, 0xa4, 0x5d, 0x09, 0xed, 0x49, 0xc7, 0xae, 0x84, 0xbb, 0xee, + 0x73, 0x6c, 0x60, 0x69, 0x2c, 0x9d, 0x1c, 0xe7, 0x13, 0x88, 0xe8, 0x04, 0xc4, 0xd4, 0x8f, 0x41, + 0x8d, 0x39, 0xc9, 0xf1, 0xbb, 0x9d, 0x95, 0xb4, 0x09, 0x08, 0xba, 0xa6, 0x44, 0x2a, 0xac, 0x52, + 0x59, 0x48, 0x9c, 0x95, 0x24, 0x6a, 0x9a, 0x40, 0x21, 0xf8, 0xb8, 0x10, 0xe8, 0xd9, 0x5d, 0x7a, + 0xef, 0x3c, 0x0a, 0x9c, 0x24, 0x20, 0x4a, 0xe0, 0xdb, 0x12, 0x98, 0x50, 0x07, 0x33, 0xc6, 0x15, + 0x56, 0x94, 0xb3, 0x62, 0xf7, 0x93, 0xdf, 0x9a, 0xa8, 0xd5, 0x2f, 0xd2, 0xec, 0x69, 0xb6, 0xf9, + 0x29, 0xda, 0x2a, 0x03, 0xf3, 0x19, 0x8e, 0xc1, 0x32, 0xda, 0xc6, 0xc1, 0x4b, 0xaf, 0x59, 0x2e, + 0x7e, 0x8f, 0x63, 0x30, 0x0f, 0x51, 0x83, 0x12, 0xeb, 0x45, 0xdb, 0x38, 0x78, 0x75, 0xfc, 0x61, + 0x91, 0xb6, 0x5d, 0x5a, 0xb0, 0x2f, 0x99, 0x3a, 0xf9, 0x62, 0x88, 0xa3, 0x14, 0xbc, 0x06, 0x25, + 0xe6, 0x11, 0xda, 0xd0, 0xa0, 0x0d, 0x5d, 0xfe, 0x76, 0xa9, 0xfc, 0x46, 0x09, 0xca, 0xc2, 0xbc, + 0x5e, 0x57, 0x9a, 0xdf, 0xa0, 0x2d, 0x1c, 0xf3, 0x94, 0x29, 0x3f, 0xa6, 0x81, 0xe0, 0xd2, 0x7a, + 0xa7, 0xfe, 0x97, 0x9a, 0xb9, 0xe2, 0x5a, 0x0b, 0xcc, 0xef, 0xd0, 0x1b, 0xc5, 0x15, 0x8e, 0xfc, + 0x79, 0x0e, 0xaa, 0xe7, 0xec, 0x68, 0x5d, 0x77, 0x16, 0x76, 0x8b, 0x36, 0xf3, 0x4b, 0xb2, 0x36, + 0xdb, 0xc6, 0x41, 0xeb, 0xf8, 0x6b, 0xfb, 0xb9, 0x56, 0xd3, 0xb7, 0x64, 0xe7, 0x49, 0xde, 0x68, + 0xc9, 0x39, 0x4b, 0xe3, 0xb9, 0x05, 0xaf, 0x60, 0x99, 0x29, 0xda, 0x5e, 0xe8, 0x34, 0xeb, 0x5d, + 0x8d, 0xbf, 0x5a, 0x09, 0x7f, 0x56, 0x68, 0xaf, 0xb5, 0x74, 0xe6, 0x67, 0xe6, 0x37, 0xbc, 0x16, + 0x99, 0x9b, 0x9b, 0x17, 0x68, 0x07, 0x7e, 0x4a, 0x22, 0x1a, 0x50, 0x15, 0x4d, 0x7d, 0x39, 0xc6, + 0x02, 0x88, 0xf5, 0x9e, 0xce, 0x65, 0x77, 0x29, 0x97, 0x1e, 0xe7, 0x51, 0x1e, 0xcb, 0xeb, 0x27, + 0xd1, 0x8d, 0xd6, 0x98, 0x67, 0x68, 0x5b, 0xc0, 0x3d, 0x08, 0x60, 0x01, 0xf8, 0x41, 0x16, 0x97, + 0xf5, 0xb2, 0x3e, 0xde, 0x56, 0xa5, 0xe9, 0x67, 0x12, 0x73, 0x80, 0x3e, 0x18, 0x63, 0xe9, 0x0b, + 0x08, 0x78, 0x1c, 0x03, 0x23, 0x40, 0x8a, 0xaf, 0xda, 0x7a, 0x55, 0xeb, 0xe9, 0xfd, 0x31, 0x96, + 0xde, 0x93, 0xb0, 0x68, 0x60, 0x82, 0xda, 0xcb, 0xb4, 0x85, 0x3e, 0x68, 0xd6, 0x1b, 0xfd, 0x48, + 0x2c, 0x92, 0x17, 0x7b, 0x22, 0xff, 0xd6, 0xad, 0xad, 0x35, 0x7a, 0x62, 0xa0, 0x25, 0x33, 0x97, + 0x95, 0x2f, 0x78, 0x05, 0xcb, 0xfc, 0xd5, 0x40, 0x9d, 0xff, 0x30, 0x0f, 0x52, 0xd1, 0x18, 0x2b, + 0x20, 0x7e, 0x30, 0xc6, 0x2c, 0x04, 0xff, 0x11, 0xe0, 0x21, 0x9a, 0xfa, 0x41, 0x44, 0x83, 0x07, + 0x69, 0xb5, 0xea, 0x4f, 0xf3, 0xd9, 0xd2, 0x69, 0xce, 0x4b, 0x66, 0x5f, 0x23, 0x7f, 0xd0, 0xc4, + 0xbe, 0x06, 0x9a, 0xbf, 0x1b, 0xe8, 0x64, 0x1d, 0x1b, 0x5c, 0x56, 0xc9, 0x6e, 0xd7, 0x7b, 0x39, + 0x5a, 0xd1, 0x0b, 0x97, 0x65, 0xd8, 0x7f, 0x18, 0xe8, 0x74, 0x0d, 0x3f, 0x94, 0x29, 0x10, 0x38, + 0xd0, 0x7f, 0x74, 0xd6, 0xeb, 0x7a, 0x43, 0xc7, 0xab, 0x19, 0xba, 0x9c, 0xc1, 0x9a, 0xbf, 0x18, + 0xe8, 0x68, 0x0d, 0x4b, 0x13, 0x0a, 0x8f, 0xd2, 0xda, 0xa9, 0xf7, 0x72, 0xb8, 0x9a, 0x97, 0x61, + 0xc6, 0x33, 0xaf, 0xd0, 0x46, 0xf6, 0x14, 0x58, 0xa6, 0x6e, 0xc1, 0x2f, 0x57, 0x6a, 0xc1, 0xdb, + 0x69, 0x02, 0x33, 0x0d, 0x98, 0x4d, 0x3d, 0x4d, 0xe9, 0xfd, 0xdc, 0x40, 0xfb, 0x01, 0x8f, 0xed, + 0xda, 0x77, 0xb4, 0xf7, 0x66, 0xfe, 0xcd, 0x18, 0x64, 0xe7, 0x18, 0x18, 0x3f, 0x7e, 0x5b, 0x28, + 0x43, 0x1e, 0x61, 0x16, 0xda, 0x5c, 0x84, 0x4e, 0x08, 0x4c, 0x9f, 0xb2, 0x7c, 0xcc, 0x12, 0x2a, + 0xff, 0xe7, 0x65, 0xff, 0xaa, 0x1a, 0xfd, 0xd9, 0x78, 0x71, 0xd1, 0xed, 0xfe, 0xd5, 0xd8, 0xbb, + 0xc8, 0x91, 0x5d, 0x22, 0xed, 0x7c, 0x98, 0x8d, 0x86, 0x1d, 0xdb, 0x2b, 0x2b, 0xff, 0x2e, 0x6b, + 0x46, 0x5d, 0x22, 0x47, 0x55, 0xcd, 0x68, 0xd8, 0x19, 0x55, 0x35, 0xff, 0x34, 0xf6, 0xf3, 0x0d, + 0xd7, 0xed, 0x12, 0xe9, 0xba, 0x55, 0x95, 0xeb, 0x0e, 0x3b, 0xae, 0x5b, 0xd5, 0xdd, 0x6d, 0x6a, + 0xb3, 0x9f, 0xff, 0x1b, 0x00, 0x00, 0xff, 0xff, 0xfe, 0x08, 0x1b, 0x60, 0x85, 0x08, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/resources/campaign_criterion.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/resources/campaign_criterion.pb.go new file mode 100644 index 0000000..4d7bac2 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/resources/campaign_criterion.pb.go @@ -0,0 +1,1106 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/resources/campaign_criterion.proto + +package resources // import "google.golang.org/genproto/googleapis/ads/googleads/v1/resources" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import wrappers "github.com/golang/protobuf/ptypes/wrappers" +import common "google.golang.org/genproto/googleapis/ads/googleads/v1/common" +import enums "google.golang.org/genproto/googleapis/ads/googleads/v1/enums" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// A campaign criterion. +type CampaignCriterion struct { + // The resource name of the campaign criterion. + // Campaign criterion resource names have the form: + // + // `customers/{customer_id}/campaignCriteria/{campaign_id}~{criterion_id}` + ResourceName string `protobuf:"bytes,1,opt,name=resource_name,json=resourceName,proto3" json:"resource_name,omitempty"` + // The campaign to which the criterion belongs. + Campaign *wrappers.StringValue `protobuf:"bytes,4,opt,name=campaign,proto3" json:"campaign,omitempty"` + // The ID of the criterion. + // + // This field is ignored during mutate. + CriterionId *wrappers.Int64Value `protobuf:"bytes,5,opt,name=criterion_id,json=criterionId,proto3" json:"criterion_id,omitempty"` + // The modifier for the bids when the criterion matches. The modifier must be + // in the range: 0.1 - 10.0. Most targetable criteria types support modifiers. + // Use 0 to opt out of a Device type. + BidModifier *wrappers.FloatValue `protobuf:"bytes,14,opt,name=bid_modifier,json=bidModifier,proto3" json:"bid_modifier,omitempty"` + // Whether to target (`false`) or exclude (`true`) the criterion. + Negative *wrappers.BoolValue `protobuf:"bytes,7,opt,name=negative,proto3" json:"negative,omitempty"` + // The type of the criterion. + Type enums.CriterionTypeEnum_CriterionType `protobuf:"varint,6,opt,name=type,proto3,enum=google.ads.googleads.v1.enums.CriterionTypeEnum_CriterionType" json:"type,omitempty"` + // The campaign criterion. + // + // Exactly one must be set. + // + // Types that are valid to be assigned to Criterion: + // *CampaignCriterion_Keyword + // *CampaignCriterion_Placement + // *CampaignCriterion_MobileAppCategory + // *CampaignCriterion_MobileApplication + // *CampaignCriterion_Location + // *CampaignCriterion_Device + // *CampaignCriterion_AdSchedule + // *CampaignCriterion_AgeRange + // *CampaignCriterion_Gender + // *CampaignCriterion_IncomeRange + // *CampaignCriterion_ParentalStatus + // *CampaignCriterion_UserList + // *CampaignCriterion_YoutubeVideo + // *CampaignCriterion_YoutubeChannel + // *CampaignCriterion_Proximity + // *CampaignCriterion_Topic + // *CampaignCriterion_ListingScope + // *CampaignCriterion_Language + // *CampaignCriterion_IpBlock + // *CampaignCriterion_ContentLabel + // *CampaignCriterion_Carrier + // *CampaignCriterion_UserInterest + // *CampaignCriterion_Webpage + // *CampaignCriterion_OperatingSystemVersion + // *CampaignCriterion_MobileDevice + // *CampaignCriterion_LocationGroup + Criterion isCampaignCriterion_Criterion `protobuf_oneof:"criterion"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *CampaignCriterion) Reset() { *m = CampaignCriterion{} } +func (m *CampaignCriterion) String() string { return proto.CompactTextString(m) } +func (*CampaignCriterion) ProtoMessage() {} +func (*CampaignCriterion) Descriptor() ([]byte, []int) { + return fileDescriptor_campaign_criterion_f1511be1f9991413, []int{0} +} +func (m *CampaignCriterion) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_CampaignCriterion.Unmarshal(m, b) +} +func (m *CampaignCriterion) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_CampaignCriterion.Marshal(b, m, deterministic) +} +func (dst *CampaignCriterion) XXX_Merge(src proto.Message) { + xxx_messageInfo_CampaignCriterion.Merge(dst, src) +} +func (m *CampaignCriterion) XXX_Size() int { + return xxx_messageInfo_CampaignCriterion.Size(m) +} +func (m *CampaignCriterion) XXX_DiscardUnknown() { + xxx_messageInfo_CampaignCriterion.DiscardUnknown(m) +} + +var xxx_messageInfo_CampaignCriterion proto.InternalMessageInfo + +func (m *CampaignCriterion) GetResourceName() string { + if m != nil { + return m.ResourceName + } + return "" +} + +func (m *CampaignCriterion) GetCampaign() *wrappers.StringValue { + if m != nil { + return m.Campaign + } + return nil +} + +func (m *CampaignCriterion) GetCriterionId() *wrappers.Int64Value { + if m != nil { + return m.CriterionId + } + return nil +} + +func (m *CampaignCriterion) GetBidModifier() *wrappers.FloatValue { + if m != nil { + return m.BidModifier + } + return nil +} + +func (m *CampaignCriterion) GetNegative() *wrappers.BoolValue { + if m != nil { + return m.Negative + } + return nil +} + +func (m *CampaignCriterion) GetType() enums.CriterionTypeEnum_CriterionType { + if m != nil { + return m.Type + } + return enums.CriterionTypeEnum_UNSPECIFIED +} + +type isCampaignCriterion_Criterion interface { + isCampaignCriterion_Criterion() +} + +type CampaignCriterion_Keyword struct { + Keyword *common.KeywordInfo `protobuf:"bytes,8,opt,name=keyword,proto3,oneof"` +} + +type CampaignCriterion_Placement struct { + Placement *common.PlacementInfo `protobuf:"bytes,9,opt,name=placement,proto3,oneof"` +} + +type CampaignCriterion_MobileAppCategory struct { + MobileAppCategory *common.MobileAppCategoryInfo `protobuf:"bytes,10,opt,name=mobile_app_category,json=mobileAppCategory,proto3,oneof"` +} + +type CampaignCriterion_MobileApplication struct { + MobileApplication *common.MobileApplicationInfo `protobuf:"bytes,11,opt,name=mobile_application,json=mobileApplication,proto3,oneof"` +} + +type CampaignCriterion_Location struct { + Location *common.LocationInfo `protobuf:"bytes,12,opt,name=location,proto3,oneof"` +} + +type CampaignCriterion_Device struct { + Device *common.DeviceInfo `protobuf:"bytes,13,opt,name=device,proto3,oneof"` +} + +type CampaignCriterion_AdSchedule struct { + AdSchedule *common.AdScheduleInfo `protobuf:"bytes,15,opt,name=ad_schedule,json=adSchedule,proto3,oneof"` +} + +type CampaignCriterion_AgeRange struct { + AgeRange *common.AgeRangeInfo `protobuf:"bytes,16,opt,name=age_range,json=ageRange,proto3,oneof"` +} + +type CampaignCriterion_Gender struct { + Gender *common.GenderInfo `protobuf:"bytes,17,opt,name=gender,proto3,oneof"` +} + +type CampaignCriterion_IncomeRange struct { + IncomeRange *common.IncomeRangeInfo `protobuf:"bytes,18,opt,name=income_range,json=incomeRange,proto3,oneof"` +} + +type CampaignCriterion_ParentalStatus struct { + ParentalStatus *common.ParentalStatusInfo `protobuf:"bytes,19,opt,name=parental_status,json=parentalStatus,proto3,oneof"` +} + +type CampaignCriterion_UserList struct { + UserList *common.UserListInfo `protobuf:"bytes,22,opt,name=user_list,json=userList,proto3,oneof"` +} + +type CampaignCriterion_YoutubeVideo struct { + YoutubeVideo *common.YouTubeVideoInfo `protobuf:"bytes,20,opt,name=youtube_video,json=youtubeVideo,proto3,oneof"` +} + +type CampaignCriterion_YoutubeChannel struct { + YoutubeChannel *common.YouTubeChannelInfo `protobuf:"bytes,21,opt,name=youtube_channel,json=youtubeChannel,proto3,oneof"` +} + +type CampaignCriterion_Proximity struct { + Proximity *common.ProximityInfo `protobuf:"bytes,23,opt,name=proximity,proto3,oneof"` +} + +type CampaignCriterion_Topic struct { + Topic *common.TopicInfo `protobuf:"bytes,24,opt,name=topic,proto3,oneof"` +} + +type CampaignCriterion_ListingScope struct { + ListingScope *common.ListingScopeInfo `protobuf:"bytes,25,opt,name=listing_scope,json=listingScope,proto3,oneof"` +} + +type CampaignCriterion_Language struct { + Language *common.LanguageInfo `protobuf:"bytes,26,opt,name=language,proto3,oneof"` +} + +type CampaignCriterion_IpBlock struct { + IpBlock *common.IpBlockInfo `protobuf:"bytes,27,opt,name=ip_block,json=ipBlock,proto3,oneof"` +} + +type CampaignCriterion_ContentLabel struct { + ContentLabel *common.ContentLabelInfo `protobuf:"bytes,28,opt,name=content_label,json=contentLabel,proto3,oneof"` +} + +type CampaignCriterion_Carrier struct { + Carrier *common.CarrierInfo `protobuf:"bytes,29,opt,name=carrier,proto3,oneof"` +} + +type CampaignCriterion_UserInterest struct { + UserInterest *common.UserInterestInfo `protobuf:"bytes,30,opt,name=user_interest,json=userInterest,proto3,oneof"` +} + +type CampaignCriterion_Webpage struct { + Webpage *common.WebpageInfo `protobuf:"bytes,31,opt,name=webpage,proto3,oneof"` +} + +type CampaignCriterion_OperatingSystemVersion struct { + OperatingSystemVersion *common.OperatingSystemVersionInfo `protobuf:"bytes,32,opt,name=operating_system_version,json=operatingSystemVersion,proto3,oneof"` +} + +type CampaignCriterion_MobileDevice struct { + MobileDevice *common.MobileDeviceInfo `protobuf:"bytes,33,opt,name=mobile_device,json=mobileDevice,proto3,oneof"` +} + +type CampaignCriterion_LocationGroup struct { + LocationGroup *common.LocationGroupInfo `protobuf:"bytes,34,opt,name=location_group,json=locationGroup,proto3,oneof"` +} + +func (*CampaignCriterion_Keyword) isCampaignCriterion_Criterion() {} + +func (*CampaignCriterion_Placement) isCampaignCriterion_Criterion() {} + +func (*CampaignCriterion_MobileAppCategory) isCampaignCriterion_Criterion() {} + +func (*CampaignCriterion_MobileApplication) isCampaignCriterion_Criterion() {} + +func (*CampaignCriterion_Location) isCampaignCriterion_Criterion() {} + +func (*CampaignCriterion_Device) isCampaignCriterion_Criterion() {} + +func (*CampaignCriterion_AdSchedule) isCampaignCriterion_Criterion() {} + +func (*CampaignCriterion_AgeRange) isCampaignCriterion_Criterion() {} + +func (*CampaignCriterion_Gender) isCampaignCriterion_Criterion() {} + +func (*CampaignCriterion_IncomeRange) isCampaignCriterion_Criterion() {} + +func (*CampaignCriterion_ParentalStatus) isCampaignCriterion_Criterion() {} + +func (*CampaignCriterion_UserList) isCampaignCriterion_Criterion() {} + +func (*CampaignCriterion_YoutubeVideo) isCampaignCriterion_Criterion() {} + +func (*CampaignCriterion_YoutubeChannel) isCampaignCriterion_Criterion() {} + +func (*CampaignCriterion_Proximity) isCampaignCriterion_Criterion() {} + +func (*CampaignCriterion_Topic) isCampaignCriterion_Criterion() {} + +func (*CampaignCriterion_ListingScope) isCampaignCriterion_Criterion() {} + +func (*CampaignCriterion_Language) isCampaignCriterion_Criterion() {} + +func (*CampaignCriterion_IpBlock) isCampaignCriterion_Criterion() {} + +func (*CampaignCriterion_ContentLabel) isCampaignCriterion_Criterion() {} + +func (*CampaignCriterion_Carrier) isCampaignCriterion_Criterion() {} + +func (*CampaignCriterion_UserInterest) isCampaignCriterion_Criterion() {} + +func (*CampaignCriterion_Webpage) isCampaignCriterion_Criterion() {} + +func (*CampaignCriterion_OperatingSystemVersion) isCampaignCriterion_Criterion() {} + +func (*CampaignCriterion_MobileDevice) isCampaignCriterion_Criterion() {} + +func (*CampaignCriterion_LocationGroup) isCampaignCriterion_Criterion() {} + +func (m *CampaignCriterion) GetCriterion() isCampaignCriterion_Criterion { + if m != nil { + return m.Criterion + } + return nil +} + +func (m *CampaignCriterion) GetKeyword() *common.KeywordInfo { + if x, ok := m.GetCriterion().(*CampaignCriterion_Keyword); ok { + return x.Keyword + } + return nil +} + +func (m *CampaignCriterion) GetPlacement() *common.PlacementInfo { + if x, ok := m.GetCriterion().(*CampaignCriterion_Placement); ok { + return x.Placement + } + return nil +} + +func (m *CampaignCriterion) GetMobileAppCategory() *common.MobileAppCategoryInfo { + if x, ok := m.GetCriterion().(*CampaignCriterion_MobileAppCategory); ok { + return x.MobileAppCategory + } + return nil +} + +func (m *CampaignCriterion) GetMobileApplication() *common.MobileApplicationInfo { + if x, ok := m.GetCriterion().(*CampaignCriterion_MobileApplication); ok { + return x.MobileApplication + } + return nil +} + +func (m *CampaignCriterion) GetLocation() *common.LocationInfo { + if x, ok := m.GetCriterion().(*CampaignCriterion_Location); ok { + return x.Location + } + return nil +} + +func (m *CampaignCriterion) GetDevice() *common.DeviceInfo { + if x, ok := m.GetCriterion().(*CampaignCriterion_Device); ok { + return x.Device + } + return nil +} + +func (m *CampaignCriterion) GetAdSchedule() *common.AdScheduleInfo { + if x, ok := m.GetCriterion().(*CampaignCriterion_AdSchedule); ok { + return x.AdSchedule + } + return nil +} + +func (m *CampaignCriterion) GetAgeRange() *common.AgeRangeInfo { + if x, ok := m.GetCriterion().(*CampaignCriterion_AgeRange); ok { + return x.AgeRange + } + return nil +} + +func (m *CampaignCriterion) GetGender() *common.GenderInfo { + if x, ok := m.GetCriterion().(*CampaignCriterion_Gender); ok { + return x.Gender + } + return nil +} + +func (m *CampaignCriterion) GetIncomeRange() *common.IncomeRangeInfo { + if x, ok := m.GetCriterion().(*CampaignCriterion_IncomeRange); ok { + return x.IncomeRange + } + return nil +} + +func (m *CampaignCriterion) GetParentalStatus() *common.ParentalStatusInfo { + if x, ok := m.GetCriterion().(*CampaignCriterion_ParentalStatus); ok { + return x.ParentalStatus + } + return nil +} + +func (m *CampaignCriterion) GetUserList() *common.UserListInfo { + if x, ok := m.GetCriterion().(*CampaignCriterion_UserList); ok { + return x.UserList + } + return nil +} + +func (m *CampaignCriterion) GetYoutubeVideo() *common.YouTubeVideoInfo { + if x, ok := m.GetCriterion().(*CampaignCriterion_YoutubeVideo); ok { + return x.YoutubeVideo + } + return nil +} + +func (m *CampaignCriterion) GetYoutubeChannel() *common.YouTubeChannelInfo { + if x, ok := m.GetCriterion().(*CampaignCriterion_YoutubeChannel); ok { + return x.YoutubeChannel + } + return nil +} + +func (m *CampaignCriterion) GetProximity() *common.ProximityInfo { + if x, ok := m.GetCriterion().(*CampaignCriterion_Proximity); ok { + return x.Proximity + } + return nil +} + +func (m *CampaignCriterion) GetTopic() *common.TopicInfo { + if x, ok := m.GetCriterion().(*CampaignCriterion_Topic); ok { + return x.Topic + } + return nil +} + +func (m *CampaignCriterion) GetListingScope() *common.ListingScopeInfo { + if x, ok := m.GetCriterion().(*CampaignCriterion_ListingScope); ok { + return x.ListingScope + } + return nil +} + +func (m *CampaignCriterion) GetLanguage() *common.LanguageInfo { + if x, ok := m.GetCriterion().(*CampaignCriterion_Language); ok { + return x.Language + } + return nil +} + +func (m *CampaignCriterion) GetIpBlock() *common.IpBlockInfo { + if x, ok := m.GetCriterion().(*CampaignCriterion_IpBlock); ok { + return x.IpBlock + } + return nil +} + +func (m *CampaignCriterion) GetContentLabel() *common.ContentLabelInfo { + if x, ok := m.GetCriterion().(*CampaignCriterion_ContentLabel); ok { + return x.ContentLabel + } + return nil +} + +func (m *CampaignCriterion) GetCarrier() *common.CarrierInfo { + if x, ok := m.GetCriterion().(*CampaignCriterion_Carrier); ok { + return x.Carrier + } + return nil +} + +func (m *CampaignCriterion) GetUserInterest() *common.UserInterestInfo { + if x, ok := m.GetCriterion().(*CampaignCriterion_UserInterest); ok { + return x.UserInterest + } + return nil +} + +func (m *CampaignCriterion) GetWebpage() *common.WebpageInfo { + if x, ok := m.GetCriterion().(*CampaignCriterion_Webpage); ok { + return x.Webpage + } + return nil +} + +func (m *CampaignCriterion) GetOperatingSystemVersion() *common.OperatingSystemVersionInfo { + if x, ok := m.GetCriterion().(*CampaignCriterion_OperatingSystemVersion); ok { + return x.OperatingSystemVersion + } + return nil +} + +func (m *CampaignCriterion) GetMobileDevice() *common.MobileDeviceInfo { + if x, ok := m.GetCriterion().(*CampaignCriterion_MobileDevice); ok { + return x.MobileDevice + } + return nil +} + +func (m *CampaignCriterion) GetLocationGroup() *common.LocationGroupInfo { + if x, ok := m.GetCriterion().(*CampaignCriterion_LocationGroup); ok { + return x.LocationGroup + } + return nil +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*CampaignCriterion) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _CampaignCriterion_OneofMarshaler, _CampaignCriterion_OneofUnmarshaler, _CampaignCriterion_OneofSizer, []interface{}{ + (*CampaignCriterion_Keyword)(nil), + (*CampaignCriterion_Placement)(nil), + (*CampaignCriterion_MobileAppCategory)(nil), + (*CampaignCriterion_MobileApplication)(nil), + (*CampaignCriterion_Location)(nil), + (*CampaignCriterion_Device)(nil), + (*CampaignCriterion_AdSchedule)(nil), + (*CampaignCriterion_AgeRange)(nil), + (*CampaignCriterion_Gender)(nil), + (*CampaignCriterion_IncomeRange)(nil), + (*CampaignCriterion_ParentalStatus)(nil), + (*CampaignCriterion_UserList)(nil), + (*CampaignCriterion_YoutubeVideo)(nil), + (*CampaignCriterion_YoutubeChannel)(nil), + (*CampaignCriterion_Proximity)(nil), + (*CampaignCriterion_Topic)(nil), + (*CampaignCriterion_ListingScope)(nil), + (*CampaignCriterion_Language)(nil), + (*CampaignCriterion_IpBlock)(nil), + (*CampaignCriterion_ContentLabel)(nil), + (*CampaignCriterion_Carrier)(nil), + (*CampaignCriterion_UserInterest)(nil), + (*CampaignCriterion_Webpage)(nil), + (*CampaignCriterion_OperatingSystemVersion)(nil), + (*CampaignCriterion_MobileDevice)(nil), + (*CampaignCriterion_LocationGroup)(nil), + } +} + +func _CampaignCriterion_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*CampaignCriterion) + // criterion + switch x := m.Criterion.(type) { + case *CampaignCriterion_Keyword: + b.EncodeVarint(8<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.Keyword); err != nil { + return err + } + case *CampaignCriterion_Placement: + b.EncodeVarint(9<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.Placement); err != nil { + return err + } + case *CampaignCriterion_MobileAppCategory: + b.EncodeVarint(10<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.MobileAppCategory); err != nil { + return err + } + case *CampaignCriterion_MobileApplication: + b.EncodeVarint(11<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.MobileApplication); err != nil { + return err + } + case *CampaignCriterion_Location: + b.EncodeVarint(12<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.Location); err != nil { + return err + } + case *CampaignCriterion_Device: + b.EncodeVarint(13<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.Device); err != nil { + return err + } + case *CampaignCriterion_AdSchedule: + b.EncodeVarint(15<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.AdSchedule); err != nil { + return err + } + case *CampaignCriterion_AgeRange: + b.EncodeVarint(16<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.AgeRange); err != nil { + return err + } + case *CampaignCriterion_Gender: + b.EncodeVarint(17<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.Gender); err != nil { + return err + } + case *CampaignCriterion_IncomeRange: + b.EncodeVarint(18<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.IncomeRange); err != nil { + return err + } + case *CampaignCriterion_ParentalStatus: + b.EncodeVarint(19<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.ParentalStatus); err != nil { + return err + } + case *CampaignCriterion_UserList: + b.EncodeVarint(22<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.UserList); err != nil { + return err + } + case *CampaignCriterion_YoutubeVideo: + b.EncodeVarint(20<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.YoutubeVideo); err != nil { + return err + } + case *CampaignCriterion_YoutubeChannel: + b.EncodeVarint(21<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.YoutubeChannel); err != nil { + return err + } + case *CampaignCriterion_Proximity: + b.EncodeVarint(23<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.Proximity); err != nil { + return err + } + case *CampaignCriterion_Topic: + b.EncodeVarint(24<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.Topic); err != nil { + return err + } + case *CampaignCriterion_ListingScope: + b.EncodeVarint(25<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.ListingScope); err != nil { + return err + } + case *CampaignCriterion_Language: + b.EncodeVarint(26<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.Language); err != nil { + return err + } + case *CampaignCriterion_IpBlock: + b.EncodeVarint(27<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.IpBlock); err != nil { + return err + } + case *CampaignCriterion_ContentLabel: + b.EncodeVarint(28<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.ContentLabel); err != nil { + return err + } + case *CampaignCriterion_Carrier: + b.EncodeVarint(29<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.Carrier); err != nil { + return err + } + case *CampaignCriterion_UserInterest: + b.EncodeVarint(30<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.UserInterest); err != nil { + return err + } + case *CampaignCriterion_Webpage: + b.EncodeVarint(31<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.Webpage); err != nil { + return err + } + case *CampaignCriterion_OperatingSystemVersion: + b.EncodeVarint(32<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.OperatingSystemVersion); err != nil { + return err + } + case *CampaignCriterion_MobileDevice: + b.EncodeVarint(33<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.MobileDevice); err != nil { + return err + } + case *CampaignCriterion_LocationGroup: + b.EncodeVarint(34<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.LocationGroup); err != nil { + return err + } + case nil: + default: + return fmt.Errorf("CampaignCriterion.Criterion has unexpected type %T", x) + } + return nil +} + +func _CampaignCriterion_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*CampaignCriterion) + switch tag { + case 8: // criterion.keyword + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(common.KeywordInfo) + err := b.DecodeMessage(msg) + m.Criterion = &CampaignCriterion_Keyword{msg} + return true, err + case 9: // criterion.placement + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(common.PlacementInfo) + err := b.DecodeMessage(msg) + m.Criterion = &CampaignCriterion_Placement{msg} + return true, err + case 10: // criterion.mobile_app_category + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(common.MobileAppCategoryInfo) + err := b.DecodeMessage(msg) + m.Criterion = &CampaignCriterion_MobileAppCategory{msg} + return true, err + case 11: // criterion.mobile_application + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(common.MobileApplicationInfo) + err := b.DecodeMessage(msg) + m.Criterion = &CampaignCriterion_MobileApplication{msg} + return true, err + case 12: // criterion.location + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(common.LocationInfo) + err := b.DecodeMessage(msg) + m.Criterion = &CampaignCriterion_Location{msg} + return true, err + case 13: // criterion.device + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(common.DeviceInfo) + err := b.DecodeMessage(msg) + m.Criterion = &CampaignCriterion_Device{msg} + return true, err + case 15: // criterion.ad_schedule + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(common.AdScheduleInfo) + err := b.DecodeMessage(msg) + m.Criterion = &CampaignCriterion_AdSchedule{msg} + return true, err + case 16: // criterion.age_range + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(common.AgeRangeInfo) + err := b.DecodeMessage(msg) + m.Criterion = &CampaignCriterion_AgeRange{msg} + return true, err + case 17: // criterion.gender + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(common.GenderInfo) + err := b.DecodeMessage(msg) + m.Criterion = &CampaignCriterion_Gender{msg} + return true, err + case 18: // criterion.income_range + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(common.IncomeRangeInfo) + err := b.DecodeMessage(msg) + m.Criterion = &CampaignCriterion_IncomeRange{msg} + return true, err + case 19: // criterion.parental_status + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(common.ParentalStatusInfo) + err := b.DecodeMessage(msg) + m.Criterion = &CampaignCriterion_ParentalStatus{msg} + return true, err + case 22: // criterion.user_list + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(common.UserListInfo) + err := b.DecodeMessage(msg) + m.Criterion = &CampaignCriterion_UserList{msg} + return true, err + case 20: // criterion.youtube_video + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(common.YouTubeVideoInfo) + err := b.DecodeMessage(msg) + m.Criterion = &CampaignCriterion_YoutubeVideo{msg} + return true, err + case 21: // criterion.youtube_channel + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(common.YouTubeChannelInfo) + err := b.DecodeMessage(msg) + m.Criterion = &CampaignCriterion_YoutubeChannel{msg} + return true, err + case 23: // criterion.proximity + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(common.ProximityInfo) + err := b.DecodeMessage(msg) + m.Criterion = &CampaignCriterion_Proximity{msg} + return true, err + case 24: // criterion.topic + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(common.TopicInfo) + err := b.DecodeMessage(msg) + m.Criterion = &CampaignCriterion_Topic{msg} + return true, err + case 25: // criterion.listing_scope + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(common.ListingScopeInfo) + err := b.DecodeMessage(msg) + m.Criterion = &CampaignCriterion_ListingScope{msg} + return true, err + case 26: // criterion.language + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(common.LanguageInfo) + err := b.DecodeMessage(msg) + m.Criterion = &CampaignCriterion_Language{msg} + return true, err + case 27: // criterion.ip_block + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(common.IpBlockInfo) + err := b.DecodeMessage(msg) + m.Criterion = &CampaignCriterion_IpBlock{msg} + return true, err + case 28: // criterion.content_label + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(common.ContentLabelInfo) + err := b.DecodeMessage(msg) + m.Criterion = &CampaignCriterion_ContentLabel{msg} + return true, err + case 29: // criterion.carrier + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(common.CarrierInfo) + err := b.DecodeMessage(msg) + m.Criterion = &CampaignCriterion_Carrier{msg} + return true, err + case 30: // criterion.user_interest + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(common.UserInterestInfo) + err := b.DecodeMessage(msg) + m.Criterion = &CampaignCriterion_UserInterest{msg} + return true, err + case 31: // criterion.webpage + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(common.WebpageInfo) + err := b.DecodeMessage(msg) + m.Criterion = &CampaignCriterion_Webpage{msg} + return true, err + case 32: // criterion.operating_system_version + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(common.OperatingSystemVersionInfo) + err := b.DecodeMessage(msg) + m.Criterion = &CampaignCriterion_OperatingSystemVersion{msg} + return true, err + case 33: // criterion.mobile_device + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(common.MobileDeviceInfo) + err := b.DecodeMessage(msg) + m.Criterion = &CampaignCriterion_MobileDevice{msg} + return true, err + case 34: // criterion.location_group + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(common.LocationGroupInfo) + err := b.DecodeMessage(msg) + m.Criterion = &CampaignCriterion_LocationGroup{msg} + return true, err + default: + return false, nil + } +} + +func _CampaignCriterion_OneofSizer(msg proto.Message) (n int) { + m := msg.(*CampaignCriterion) + // criterion + switch x := m.Criterion.(type) { + case *CampaignCriterion_Keyword: + s := proto.Size(x.Keyword) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *CampaignCriterion_Placement: + s := proto.Size(x.Placement) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *CampaignCriterion_MobileAppCategory: + s := proto.Size(x.MobileAppCategory) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *CampaignCriterion_MobileApplication: + s := proto.Size(x.MobileApplication) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *CampaignCriterion_Location: + s := proto.Size(x.Location) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *CampaignCriterion_Device: + s := proto.Size(x.Device) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *CampaignCriterion_AdSchedule: + s := proto.Size(x.AdSchedule) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *CampaignCriterion_AgeRange: + s := proto.Size(x.AgeRange) + n += 2 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *CampaignCriterion_Gender: + s := proto.Size(x.Gender) + n += 2 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *CampaignCriterion_IncomeRange: + s := proto.Size(x.IncomeRange) + n += 2 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *CampaignCriterion_ParentalStatus: + s := proto.Size(x.ParentalStatus) + n += 2 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *CampaignCriterion_UserList: + s := proto.Size(x.UserList) + n += 2 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *CampaignCriterion_YoutubeVideo: + s := proto.Size(x.YoutubeVideo) + n += 2 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *CampaignCriterion_YoutubeChannel: + s := proto.Size(x.YoutubeChannel) + n += 2 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *CampaignCriterion_Proximity: + s := proto.Size(x.Proximity) + n += 2 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *CampaignCriterion_Topic: + s := proto.Size(x.Topic) + n += 2 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *CampaignCriterion_ListingScope: + s := proto.Size(x.ListingScope) + n += 2 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *CampaignCriterion_Language: + s := proto.Size(x.Language) + n += 2 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *CampaignCriterion_IpBlock: + s := proto.Size(x.IpBlock) + n += 2 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *CampaignCriterion_ContentLabel: + s := proto.Size(x.ContentLabel) + n += 2 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *CampaignCriterion_Carrier: + s := proto.Size(x.Carrier) + n += 2 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *CampaignCriterion_UserInterest: + s := proto.Size(x.UserInterest) + n += 2 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *CampaignCriterion_Webpage: + s := proto.Size(x.Webpage) + n += 2 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *CampaignCriterion_OperatingSystemVersion: + s := proto.Size(x.OperatingSystemVersion) + n += 2 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *CampaignCriterion_MobileDevice: + s := proto.Size(x.MobileDevice) + n += 2 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *CampaignCriterion_LocationGroup: + s := proto.Size(x.LocationGroup) + n += 2 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +func init() { + proto.RegisterType((*CampaignCriterion)(nil), "google.ads.googleads.v1.resources.CampaignCriterion") +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/resources/campaign_criterion.proto", fileDescriptor_campaign_criterion_f1511be1f9991413) +} + +var fileDescriptor_campaign_criterion_f1511be1f9991413 = []byte{ + // 1084 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x94, 0x96, 0xdf, 0x72, 0xdc, 0x34, + 0x17, 0xc0, 0xbf, 0xe4, 0x6b, 0xd3, 0x8d, 0x76, 0x93, 0x12, 0xb5, 0x04, 0x91, 0x86, 0x92, 0x94, + 0xe9, 0x4c, 0xf8, 0x53, 0x2f, 0x1b, 0xa0, 0xc3, 0x2c, 0x33, 0x9d, 0xd9, 0x6c, 0x21, 0xdd, 0x36, + 0x81, 0xb0, 0x09, 0xdb, 0xa1, 0x13, 0xc6, 0xa3, 0xb5, 0x4f, 0x5c, 0x4d, 0x6d, 0x49, 0x23, 0xcb, + 0x1b, 0xf6, 0x9a, 0x97, 0xe0, 0x9a, 0x4b, 0x1e, 0x85, 0x47, 0xe1, 0x29, 0x18, 0xc9, 0x92, 0xb3, + 0x21, 0x84, 0x35, 0x77, 0xf6, 0xd1, 0xf9, 0xfd, 0x7c, 0x74, 0x6c, 0x4b, 0x42, 0xdd, 0x44, 0x88, + 0x24, 0x85, 0x36, 0x8d, 0xf3, 0x76, 0x79, 0x69, 0xae, 0x26, 0x9d, 0xb6, 0x82, 0x5c, 0x14, 0x2a, + 0x82, 0xbc, 0x1d, 0xd1, 0x4c, 0x52, 0x96, 0xf0, 0x30, 0x52, 0x4c, 0x83, 0x62, 0x82, 0x07, 0x52, + 0x09, 0x2d, 0xf0, 0x76, 0x09, 0x04, 0x34, 0xce, 0x83, 0x8a, 0x0d, 0x26, 0x9d, 0xa0, 0x62, 0x37, + 0x1e, 0x5d, 0xa7, 0x8f, 0x44, 0x96, 0x09, 0xde, 0x76, 0x4a, 0x5a, 0x1a, 0x37, 0x76, 0xaf, 0x4b, + 0x07, 0x5e, 0x64, 0x79, 0xbb, 0x2a, 0x20, 0xd4, 0x53, 0x09, 0x8e, 0xb9, 0xef, 0x18, 0x7b, 0x37, + 0x2e, 0xce, 0xda, 0xe7, 0x8a, 0x4a, 0x09, 0x2a, 0x77, 0xe3, 0x9b, 0xde, 0x29, 0x59, 0x9b, 0x72, + 0x2e, 0x34, 0xd5, 0x4c, 0x70, 0x37, 0xfa, 0xe0, 0xd7, 0xbb, 0x68, 0xad, 0xef, 0x26, 0xd8, 0xf7, + 0x7a, 0xfc, 0x01, 0x5a, 0xf1, 0x73, 0x08, 0x39, 0xcd, 0x80, 0x2c, 0x6c, 0x2d, 0xec, 0x2c, 0x0f, + 0x5b, 0x3e, 0xf8, 0x2d, 0xcd, 0x00, 0x7f, 0x89, 0x1a, 0xbe, 0x35, 0xe4, 0xc6, 0xd6, 0xc2, 0x4e, + 0x73, 0x77, 0xd3, 0xb5, 0x21, 0xf0, 0xb5, 0x04, 0xc7, 0x5a, 0x31, 0x9e, 0x8c, 0x68, 0x5a, 0xc0, + 0xb0, 0xca, 0xc6, 0x4f, 0x50, 0xeb, 0x62, 0x2a, 0x2c, 0x26, 0x37, 0x2d, 0x7d, 0xef, 0x0a, 0x3d, + 0xe0, 0xfa, 0xf1, 0xe7, 0x25, 0xdc, 0xac, 0x80, 0x41, 0x6c, 0xf8, 0x31, 0x8b, 0xc3, 0x4c, 0xc4, + 0xec, 0x8c, 0x81, 0x22, 0xab, 0xd7, 0xf0, 0xdf, 0xa4, 0x82, 0x6a, 0xc7, 0x8f, 0x59, 0x7c, 0xe8, + 0xf2, 0xf1, 0x63, 0xd4, 0xe0, 0x90, 0x50, 0xcd, 0x26, 0x40, 0x6e, 0x59, 0x76, 0xe3, 0x0a, 0xbb, + 0x27, 0x44, 0xea, 0xea, 0xf6, 0xb9, 0x78, 0x88, 0x6e, 0x98, 0xc6, 0x93, 0xa5, 0xad, 0x85, 0x9d, + 0xd5, 0xdd, 0x27, 0xc1, 0x75, 0xef, 0xdf, 0xbe, 0xad, 0xa0, 0x6a, 0xe7, 0xc9, 0x54, 0xc2, 0xd7, + 0xbc, 0xc8, 0x2e, 0x47, 0x86, 0xd6, 0x85, 0xf7, 0xd1, 0xad, 0x37, 0x30, 0x3d, 0x17, 0x2a, 0x26, + 0x0d, 0x5b, 0xca, 0xc7, 0xd7, 0x6a, 0xcb, 0x6f, 0x26, 0x78, 0x51, 0xa6, 0x0f, 0xf8, 0x99, 0x78, + 0xf6, 0xbf, 0xa1, 0xa7, 0xf1, 0x21, 0x5a, 0x96, 0x29, 0x8d, 0x20, 0x03, 0xae, 0xc9, 0xb2, 0x55, + 0x3d, 0x9a, 0xa7, 0x3a, 0xf2, 0x80, 0x93, 0x5d, 0x18, 0x70, 0x82, 0xee, 0x64, 0x62, 0xcc, 0x52, + 0x08, 0xa9, 0x94, 0x61, 0x44, 0x35, 0x24, 0x42, 0x4d, 0x09, 0xb2, 0xe2, 0x2f, 0xe6, 0x89, 0x0f, + 0x2d, 0xda, 0x93, 0xb2, 0xef, 0x40, 0xf7, 0x80, 0xb5, 0xec, 0xef, 0x03, 0xf8, 0x0c, 0xe1, 0x8b, + 0x07, 0xa5, 0x2c, 0xb2, 0x9f, 0x27, 0x69, 0xfe, 0xc7, 0xe7, 0x78, 0xf0, 0xca, 0x73, 0xfc, 0x00, + 0x7e, 0x8e, 0x1a, 0xa9, 0x70, 0xf6, 0x96, 0xb5, 0x7f, 0x32, 0xcf, 0x7e, 0x20, 0x2e, 0x49, 0x2b, + 0x1e, 0x3f, 0x45, 0x4b, 0x31, 0x4c, 0x58, 0x04, 0x64, 0xc5, 0x9a, 0x3e, 0x9a, 0x67, 0x7a, 0x6a, + 0xb3, 0x9d, 0xc7, 0xb1, 0xf8, 0x7b, 0xd4, 0xa4, 0x71, 0x98, 0x47, 0xaf, 0x21, 0x2e, 0x52, 0x20, + 0xb7, 0xad, 0x2a, 0x98, 0xa7, 0xea, 0xc5, 0xc7, 0x8e, 0x70, 0x3a, 0x44, 0xab, 0x08, 0x7e, 0x81, + 0x96, 0x69, 0x02, 0xa1, 0xa2, 0x3c, 0x01, 0xf2, 0x56, 0xbd, 0x59, 0xf6, 0x12, 0x18, 0x9a, 0x7c, + 0x3f, 0x4b, 0xea, 0xee, 0xcd, 0x2c, 0x13, 0xe0, 0x31, 0x28, 0xb2, 0x56, 0x6f, 0x96, 0xfb, 0x36, + 0xdb, 0xcf, 0xb2, 0x64, 0xf1, 0x09, 0x6a, 0x31, 0x1e, 0x89, 0xcc, 0x57, 0x85, 0xad, 0xab, 0x3d, + 0xcf, 0x35, 0xb0, 0xcc, 0x6c, 0x61, 0x4d, 0x76, 0x11, 0xc2, 0x3f, 0xa1, 0xdb, 0x92, 0x2a, 0xe0, + 0x9a, 0xa6, 0x61, 0xae, 0xa9, 0x2e, 0x72, 0x72, 0xc7, 0x8a, 0x77, 0xe7, 0x7e, 0xf3, 0x0e, 0x3b, + 0xb6, 0x94, 0x73, 0xaf, 0xca, 0x4b, 0x51, 0xd3, 0xc7, 0x22, 0x07, 0x15, 0xa6, 0x2c, 0xd7, 0x64, + 0xbd, 0x5e, 0x1f, 0x7f, 0xc8, 0x41, 0x1d, 0xb0, 0xdc, 0xff, 0x4b, 0x8d, 0xc2, 0xdd, 0xe3, 0x97, + 0x68, 0x65, 0x2a, 0x0a, 0x5d, 0x8c, 0x21, 0x9c, 0xb0, 0x18, 0x04, 0xb9, 0x6b, 0x85, 0x9f, 0xce, + 0x13, 0xfe, 0x28, 0x8a, 0x93, 0x62, 0x0c, 0x23, 0xc3, 0x38, 0x69, 0xcb, 0x89, 0x6c, 0xcc, 0x34, + 0xc1, 0x8b, 0xa3, 0xd7, 0x94, 0x73, 0x48, 0xc9, 0xdb, 0xf5, 0x9a, 0xe0, 0xd4, 0xfd, 0x92, 0xf2, + 0x4d, 0x70, 0x32, 0x17, 0xb5, 0x2b, 0x8a, 0x12, 0x3f, 0xb3, 0x8c, 0xe9, 0x29, 0x79, 0xa7, 0xe6, + 0x8a, 0xe2, 0x81, 0x6a, 0x45, 0xf1, 0x01, 0xdc, 0x43, 0x37, 0xb5, 0x90, 0x2c, 0x22, 0xc4, 0xaa, + 0x3e, 0x9c, 0xa7, 0x3a, 0x31, 0xc9, 0x4e, 0x53, 0x92, 0xa6, 0x93, 0xe6, 0x8d, 0x30, 0x9e, 0x84, + 0x79, 0x24, 0x24, 0x90, 0x77, 0xeb, 0x75, 0xf2, 0xa0, 0x84, 0x8e, 0x0d, 0xe3, 0x3b, 0x99, 0xce, + 0xc4, 0xec, 0xe2, 0x40, 0x79, 0x52, 0xd0, 0x04, 0xc8, 0x46, 0xcd, 0xc5, 0xc1, 0xe5, 0x57, 0x8b, + 0x83, 0xbb, 0xc7, 0xcf, 0x50, 0x83, 0xc9, 0x70, 0x9c, 0x8a, 0xe8, 0x0d, 0xb9, 0x57, 0x6f, 0x49, + 0x1f, 0xc8, 0x3d, 0x93, 0xee, 0x97, 0x74, 0x56, 0xde, 0x9a, 0xe9, 0x46, 0x82, 0x6b, 0xe0, 0x3a, + 0x4c, 0xe9, 0x18, 0x52, 0xb2, 0x59, 0x6f, 0xba, 0xfd, 0x12, 0x3a, 0x30, 0x8c, 0x9f, 0x6e, 0x34, + 0x13, 0x33, 0x9b, 0x4e, 0x44, 0x95, 0x32, 0x7b, 0xe7, 0x7b, 0xf5, 0x2a, 0xec, 0x97, 0xe9, 0xbe, + 0x42, 0x47, 0x9b, 0x0a, 0xed, 0x7f, 0xc2, 0xb8, 0x06, 0x05, 0xb9, 0x26, 0xf7, 0xeb, 0x55, 0x68, + 0xfe, 0x95, 0x81, 0x63, 0x7c, 0x85, 0xc5, 0x4c, 0xcc, 0x54, 0x78, 0x0e, 0x63, 0x69, 0xde, 0xc7, + 0xfb, 0xf5, 0x2a, 0x7c, 0x59, 0xa6, 0xfb, 0x0a, 0x1d, 0x8d, 0x27, 0x88, 0x08, 0x09, 0x8a, 0x96, + 0x1f, 0xcd, 0x34, 0xd7, 0x90, 0x85, 0x13, 0x50, 0xb9, 0xd9, 0x06, 0xb6, 0xac, 0xb9, 0x3b, 0xcf, + 0xfc, 0x9d, 0xe7, 0x8f, 0x2d, 0x3e, 0x2a, 0x69, 0xf7, 0xa0, 0x75, 0xf1, 0x8f, 0xa3, 0xa6, 0x33, + 0x6e, 0x5b, 0x73, 0x3b, 0xc5, 0x76, 0xbd, 0xce, 0x94, 0x3b, 0xda, 0xa5, 0xfd, 0xa2, 0x95, 0xcd, + 0xc4, 0xf0, 0x2b, 0xb4, 0xea, 0xf7, 0xa1, 0x30, 0x51, 0xa2, 0x90, 0xe4, 0x81, 0x35, 0x77, 0xea, + 0xee, 0x66, 0xfb, 0x06, 0x72, 0xea, 0x95, 0x74, 0x36, 0xb8, 0xd7, 0x44, 0xcb, 0xd5, 0x39, 0x6b, + 0xef, 0x97, 0x45, 0xf4, 0x30, 0x12, 0x59, 0x30, 0xf7, 0x94, 0xbb, 0xb7, 0x7e, 0xe5, 0x04, 0x79, + 0x64, 0x8e, 0x51, 0x47, 0x0b, 0xaf, 0x9e, 0x3b, 0x38, 0x11, 0xe6, 0xf7, 0x08, 0x84, 0x4a, 0xda, + 0x09, 0x70, 0x7b, 0xc8, 0xf2, 0x07, 0x5c, 0xc9, 0xf2, 0x7f, 0x39, 0x7d, 0x7f, 0x55, 0x5d, 0xfd, + 0xb6, 0xf8, 0xff, 0xfd, 0x5e, 0xef, 0xf7, 0xc5, 0xed, 0xfd, 0x52, 0xd9, 0x8b, 0xf3, 0xa0, 0xbc, + 0x34, 0x57, 0xa3, 0x4e, 0x30, 0xf4, 0x99, 0x7f, 0xf8, 0x9c, 0xd3, 0x5e, 0x9c, 0x9f, 0x56, 0x39, + 0xa7, 0xa3, 0xce, 0x69, 0x95, 0xf3, 0xe7, 0xe2, 0xc3, 0x72, 0xa0, 0xdb, 0xed, 0xc5, 0x79, 0xb7, + 0x5b, 0x65, 0x75, 0xbb, 0xa3, 0x4e, 0xb7, 0x5b, 0xe5, 0x8d, 0x97, 0x6c, 0xb1, 0x9f, 0xfd, 0x15, + 0x00, 0x00, 0xff, 0xff, 0x1d, 0x03, 0xae, 0x82, 0x29, 0x0c, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/resources/campaign_criterion_simulation.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/resources/campaign_criterion_simulation.pb.go new file mode 100644 index 0000000..af9a41e --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/resources/campaign_criterion_simulation.pb.go @@ -0,0 +1,257 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/resources/campaign_criterion_simulation.proto + +package resources // import "google.golang.org/genproto/googleapis/ads/googleads/v1/resources" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import wrappers "github.com/golang/protobuf/ptypes/wrappers" +import common "google.golang.org/genproto/googleapis/ads/googleads/v1/common" +import enums "google.golang.org/genproto/googleapis/ads/googleads/v1/enums" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// A campaign criterion simulation. Supported combinations of advertising +// channel type, criterion ids, simulation type and simulation modification +// method is detailed below respectively. +// +// SEARCH 30000,30001,30002 BID_MODIFIER UNIFORM +// DISPLAY 30001 BID_MODIFIER UNIFORM +type CampaignCriterionSimulation struct { + // The resource name of the campaign criterion simulation. + // Campaign criterion simulation resource names have the form: + // + // + // `customers/{customer_id}/campaignCriterionSimulations/{campaign_id}~{criterion_id}~{type}~{modification_method}~{start_date}~{end_date}` + ResourceName string `protobuf:"bytes,1,opt,name=resource_name,json=resourceName,proto3" json:"resource_name,omitempty"` + // Campaign ID of the simulation. + CampaignId *wrappers.Int64Value `protobuf:"bytes,2,opt,name=campaign_id,json=campaignId,proto3" json:"campaign_id,omitempty"` + // Criterion ID of the simulation. + CriterionId *wrappers.Int64Value `protobuf:"bytes,3,opt,name=criterion_id,json=criterionId,proto3" json:"criterion_id,omitempty"` + // The field that the simulation modifies. + Type enums.SimulationTypeEnum_SimulationType `protobuf:"varint,4,opt,name=type,proto3,enum=google.ads.googleads.v1.enums.SimulationTypeEnum_SimulationType" json:"type,omitempty"` + // How the simulation modifies the field. + ModificationMethod enums.SimulationModificationMethodEnum_SimulationModificationMethod `protobuf:"varint,5,opt,name=modification_method,json=modificationMethod,proto3,enum=google.ads.googleads.v1.enums.SimulationModificationMethodEnum_SimulationModificationMethod" json:"modification_method,omitempty"` + // First day on which the simulation is based, in YYYY-MM-DD format. + StartDate *wrappers.StringValue `protobuf:"bytes,6,opt,name=start_date,json=startDate,proto3" json:"start_date,omitempty"` + // Last day on which the simulation is based, in YYYY-MM-DD format. + EndDate *wrappers.StringValue `protobuf:"bytes,7,opt,name=end_date,json=endDate,proto3" json:"end_date,omitempty"` + // List of simulation points. + // + // Types that are valid to be assigned to PointList: + // *CampaignCriterionSimulation_BidModifierPointList + PointList isCampaignCriterionSimulation_PointList `protobuf_oneof:"point_list"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *CampaignCriterionSimulation) Reset() { *m = CampaignCriterionSimulation{} } +func (m *CampaignCriterionSimulation) String() string { return proto.CompactTextString(m) } +func (*CampaignCriterionSimulation) ProtoMessage() {} +func (*CampaignCriterionSimulation) Descriptor() ([]byte, []int) { + return fileDescriptor_campaign_criterion_simulation_7330cdca86aa0af3, []int{0} +} +func (m *CampaignCriterionSimulation) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_CampaignCriterionSimulation.Unmarshal(m, b) +} +func (m *CampaignCriterionSimulation) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_CampaignCriterionSimulation.Marshal(b, m, deterministic) +} +func (dst *CampaignCriterionSimulation) XXX_Merge(src proto.Message) { + xxx_messageInfo_CampaignCriterionSimulation.Merge(dst, src) +} +func (m *CampaignCriterionSimulation) XXX_Size() int { + return xxx_messageInfo_CampaignCriterionSimulation.Size(m) +} +func (m *CampaignCriterionSimulation) XXX_DiscardUnknown() { + xxx_messageInfo_CampaignCriterionSimulation.DiscardUnknown(m) +} + +var xxx_messageInfo_CampaignCriterionSimulation proto.InternalMessageInfo + +func (m *CampaignCriterionSimulation) GetResourceName() string { + if m != nil { + return m.ResourceName + } + return "" +} + +func (m *CampaignCriterionSimulation) GetCampaignId() *wrappers.Int64Value { + if m != nil { + return m.CampaignId + } + return nil +} + +func (m *CampaignCriterionSimulation) GetCriterionId() *wrappers.Int64Value { + if m != nil { + return m.CriterionId + } + return nil +} + +func (m *CampaignCriterionSimulation) GetType() enums.SimulationTypeEnum_SimulationType { + if m != nil { + return m.Type + } + return enums.SimulationTypeEnum_UNSPECIFIED +} + +func (m *CampaignCriterionSimulation) GetModificationMethod() enums.SimulationModificationMethodEnum_SimulationModificationMethod { + if m != nil { + return m.ModificationMethod + } + return enums.SimulationModificationMethodEnum_UNSPECIFIED +} + +func (m *CampaignCriterionSimulation) GetStartDate() *wrappers.StringValue { + if m != nil { + return m.StartDate + } + return nil +} + +func (m *CampaignCriterionSimulation) GetEndDate() *wrappers.StringValue { + if m != nil { + return m.EndDate + } + return nil +} + +type isCampaignCriterionSimulation_PointList interface { + isCampaignCriterionSimulation_PointList() +} + +type CampaignCriterionSimulation_BidModifierPointList struct { + BidModifierPointList *common.BidModifierSimulationPointList `protobuf:"bytes,8,opt,name=bid_modifier_point_list,json=bidModifierPointList,proto3,oneof"` +} + +func (*CampaignCriterionSimulation_BidModifierPointList) isCampaignCriterionSimulation_PointList() {} + +func (m *CampaignCriterionSimulation) GetPointList() isCampaignCriterionSimulation_PointList { + if m != nil { + return m.PointList + } + return nil +} + +func (m *CampaignCriterionSimulation) GetBidModifierPointList() *common.BidModifierSimulationPointList { + if x, ok := m.GetPointList().(*CampaignCriterionSimulation_BidModifierPointList); ok { + return x.BidModifierPointList + } + return nil +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*CampaignCriterionSimulation) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _CampaignCriterionSimulation_OneofMarshaler, _CampaignCriterionSimulation_OneofUnmarshaler, _CampaignCriterionSimulation_OneofSizer, []interface{}{ + (*CampaignCriterionSimulation_BidModifierPointList)(nil), + } +} + +func _CampaignCriterionSimulation_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*CampaignCriterionSimulation) + // point_list + switch x := m.PointList.(type) { + case *CampaignCriterionSimulation_BidModifierPointList: + b.EncodeVarint(8<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.BidModifierPointList); err != nil { + return err + } + case nil: + default: + return fmt.Errorf("CampaignCriterionSimulation.PointList has unexpected type %T", x) + } + return nil +} + +func _CampaignCriterionSimulation_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*CampaignCriterionSimulation) + switch tag { + case 8: // point_list.bid_modifier_point_list + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(common.BidModifierSimulationPointList) + err := b.DecodeMessage(msg) + m.PointList = &CampaignCriterionSimulation_BidModifierPointList{msg} + return true, err + default: + return false, nil + } +} + +func _CampaignCriterionSimulation_OneofSizer(msg proto.Message) (n int) { + m := msg.(*CampaignCriterionSimulation) + // point_list + switch x := m.PointList.(type) { + case *CampaignCriterionSimulation_BidModifierPointList: + s := proto.Size(x.BidModifierPointList) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +func init() { + proto.RegisterType((*CampaignCriterionSimulation)(nil), "google.ads.googleads.v1.resources.CampaignCriterionSimulation") +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/resources/campaign_criterion_simulation.proto", fileDescriptor_campaign_criterion_simulation_7330cdca86aa0af3) +} + +var fileDescriptor_campaign_criterion_simulation_7330cdca86aa0af3 = []byte{ + // 556 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x8c, 0x94, 0xdf, 0x6a, 0xdb, 0x30, + 0x14, 0xc6, 0xe7, 0xf4, 0xbf, 0x9a, 0xed, 0xc2, 0x1b, 0xcc, 0xb4, 0x65, 0xa4, 0x1b, 0x85, 0x5c, + 0xc9, 0xa4, 0x1d, 0x1b, 0xb8, 0xa3, 0x2c, 0xe9, 0x4a, 0x97, 0xb1, 0x8c, 0xe0, 0x96, 0x5c, 0x8c, + 0x80, 0x51, 0x2c, 0xd5, 0x13, 0x44, 0x92, 0x91, 0xe4, 0x96, 0x3e, 0x44, 0x5f, 0x62, 0xbb, 0xdb, + 0xa3, 0xec, 0x51, 0xf6, 0x14, 0x23, 0xb2, 0xac, 0x94, 0x64, 0x69, 0x73, 0x77, 0x7c, 0xf4, 0x7d, + 0xbf, 0x63, 0x7d, 0xb2, 0x0c, 0xce, 0x32, 0x21, 0xb2, 0x31, 0x09, 0x11, 0x56, 0x61, 0x59, 0x4e, + 0xaa, 0xeb, 0x56, 0x28, 0x89, 0x12, 0x85, 0x4c, 0x89, 0x0a, 0x53, 0xc4, 0x72, 0x44, 0x33, 0x9e, + 0xa4, 0x92, 0x6a, 0x22, 0xa9, 0xe0, 0x89, 0xa2, 0xac, 0x18, 0x23, 0x4d, 0x05, 0x87, 0xb9, 0x14, + 0x5a, 0xf8, 0xfb, 0xa5, 0x17, 0x22, 0xac, 0xa0, 0xc3, 0xc0, 0xeb, 0x16, 0x74, 0x98, 0x9d, 0x70, + 0xd1, 0xa4, 0x54, 0x30, 0x26, 0x78, 0x38, 0xcb, 0xdc, 0xe9, 0x2c, 0x32, 0x10, 0x5e, 0x30, 0x75, + 0x4f, 0x9f, 0x30, 0x81, 0xe9, 0x15, 0x4d, 0xed, 0x03, 0xd1, 0x3f, 0x04, 0xb6, 0x8c, 0xa3, 0xa5, + 0x19, 0xfa, 0x36, 0x27, 0xd6, 0xf4, 0xca, 0x9a, 0xcc, 0xd3, 0xa8, 0xb8, 0x0a, 0x6f, 0x24, 0xca, + 0x73, 0x22, 0x95, 0x5d, 0xdf, 0xab, 0xa0, 0x39, 0x0d, 0x11, 0xe7, 0x42, 0x1b, 0x82, 0x5d, 0x7d, + 0xfd, 0x6b, 0x0d, 0xec, 0x9e, 0xda, 0xc8, 0x4e, 0xab, 0xc4, 0x2e, 0xdc, 0x20, 0xff, 0x0d, 0x78, + 0x5a, 0x85, 0x92, 0x70, 0xc4, 0x48, 0xe0, 0x35, 0xbc, 0xe6, 0x56, 0x5c, 0xaf, 0x9a, 0xdf, 0x10, + 0x23, 0xfe, 0x07, 0xb0, 0xed, 0x62, 0xa7, 0x38, 0xa8, 0x35, 0xbc, 0xe6, 0xf6, 0xe1, 0xae, 0x8d, + 0x16, 0x56, 0x2f, 0x06, 0xbb, 0x5c, 0xbf, 0x7b, 0x3b, 0x40, 0xe3, 0x82, 0xc4, 0xa0, 0xd2, 0x77, + 0xb1, 0x7f, 0x02, 0xea, 0xd3, 0xb3, 0xa2, 0x38, 0x58, 0x79, 0xdc, 0xbe, 0xed, 0x0c, 0x5d, 0xec, + 0x5f, 0x82, 0xd5, 0x49, 0x1c, 0xc1, 0x6a, 0xc3, 0x6b, 0x3e, 0x3b, 0xfc, 0x08, 0x17, 0x1d, 0xae, + 0x09, 0x11, 0x4e, 0xf7, 0x76, 0x79, 0x9b, 0x93, 0x33, 0x5e, 0xb0, 0x99, 0x56, 0x6c, 0x68, 0xfe, + 0x9d, 0x07, 0x9e, 0xff, 0xe7, 0xa4, 0x82, 0x35, 0x33, 0x65, 0xb8, 0xf4, 0x94, 0xde, 0x3d, 0x46, + 0xcf, 0x20, 0x66, 0x66, 0xce, 0x0b, 0x62, 0x9f, 0xcd, 0xf5, 0xfc, 0x63, 0x00, 0x94, 0x46, 0x52, + 0x27, 0x18, 0x69, 0x12, 0xac, 0x9b, 0x8c, 0xf6, 0xe6, 0x32, 0xba, 0xd0, 0x92, 0xf2, 0xac, 0x0c, + 0x69, 0xcb, 0xe8, 0x3f, 0x21, 0x4d, 0xfc, 0xf7, 0x60, 0x93, 0x70, 0x5c, 0x5a, 0x37, 0x96, 0xb0, + 0x6e, 0x10, 0x8e, 0x8d, 0xf1, 0x06, 0xbc, 0x1c, 0x51, 0x6c, 0x3f, 0x59, 0x22, 0x93, 0x5c, 0x50, + 0xae, 0x93, 0x31, 0x55, 0x3a, 0xd8, 0x34, 0x9c, 0x93, 0x85, 0x41, 0x94, 0x17, 0x05, 0x76, 0x28, + 0xee, 0x59, 0xf7, 0x74, 0xcf, 0xfd, 0x09, 0xe6, 0x2b, 0x55, 0xfa, 0xf3, 0x93, 0xf8, 0xc5, 0x68, + 0xaa, 0x70, 0xfd, 0x4e, 0x1d, 0x80, 0xe9, 0xac, 0xce, 0x5d, 0x0d, 0x1c, 0xa4, 0x82, 0xc1, 0x47, + 0xef, 0x6d, 0xa7, 0xf1, 0xc0, 0xc7, 0xdc, 0x9f, 0x6c, 0xb5, 0xef, 0x7d, 0xff, 0x62, 0x31, 0x99, + 0x18, 0x23, 0x9e, 0x41, 0x21, 0xb3, 0x30, 0x23, 0xdc, 0x04, 0x51, 0x5d, 0xbb, 0x9c, 0xaa, 0x07, + 0x7e, 0x32, 0xc7, 0xae, 0xfa, 0x59, 0x5b, 0x39, 0x6f, 0xb7, 0x7f, 0xd7, 0xf6, 0xcf, 0x4b, 0x64, + 0x1b, 0x2b, 0x58, 0x96, 0x93, 0x6a, 0xd0, 0x82, 0x71, 0xa5, 0xfc, 0x53, 0x69, 0x86, 0x6d, 0xac, + 0x86, 0x4e, 0x33, 0x1c, 0xb4, 0x86, 0x4e, 0xf3, 0xb7, 0x76, 0x50, 0x2e, 0x44, 0x51, 0x1b, 0xab, + 0x28, 0x72, 0xaa, 0x28, 0x1a, 0xb4, 0xa2, 0xc8, 0xe9, 0x46, 0xeb, 0xe6, 0x65, 0x8f, 0xfe, 0x05, + 0x00, 0x00, 0xff, 0xff, 0xb8, 0x24, 0xd5, 0x5b, 0x10, 0x05, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/resources/campaign_extension_setting.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/resources/campaign_extension_setting.pb.go new file mode 100644 index 0000000..efe8e48 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/resources/campaign_extension_setting.pb.go @@ -0,0 +1,149 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/resources/campaign_extension_setting.proto + +package resources // import "google.golang.org/genproto/googleapis/ads/googleads/v1/resources" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import wrappers "github.com/golang/protobuf/ptypes/wrappers" +import enums "google.golang.org/genproto/googleapis/ads/googleads/v1/enums" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// A campaign extension setting. +type CampaignExtensionSetting struct { + // The resource name of the campaign extension setting. + // CampaignExtensionSetting resource names have the form: + // + // + // `customers/{customer_id}/campaignExtensionSettings/{campaign_id}~{extension_type}` + ResourceName string `protobuf:"bytes,1,opt,name=resource_name,json=resourceName,proto3" json:"resource_name,omitempty"` + // The extension type of the customer extension setting. + ExtensionType enums.ExtensionTypeEnum_ExtensionType `protobuf:"varint,2,opt,name=extension_type,json=extensionType,proto3,enum=google.ads.googleads.v1.enums.ExtensionTypeEnum_ExtensionType" json:"extension_type,omitempty"` + // The resource name of the campaign. The linked extension feed items will + // serve under this campaign. + // Campaign resource names have the form: + // + // `customers/{customer_id}/campaigns/{campaign_id}` + Campaign *wrappers.StringValue `protobuf:"bytes,3,opt,name=campaign,proto3" json:"campaign,omitempty"` + // The resource names of the extension feed items to serve under the campaign. + // ExtensionFeedItem resource names have the form: + // + // `customers/{customer_id}/extensionFeedItems/{feed_item_id}` + ExtensionFeedItems []*wrappers.StringValue `protobuf:"bytes,4,rep,name=extension_feed_items,json=extensionFeedItems,proto3" json:"extension_feed_items,omitempty"` + // The device for which the extensions will serve. Optional. + Device enums.ExtensionSettingDeviceEnum_ExtensionSettingDevice `protobuf:"varint,5,opt,name=device,proto3,enum=google.ads.googleads.v1.enums.ExtensionSettingDeviceEnum_ExtensionSettingDevice" json:"device,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *CampaignExtensionSetting) Reset() { *m = CampaignExtensionSetting{} } +func (m *CampaignExtensionSetting) String() string { return proto.CompactTextString(m) } +func (*CampaignExtensionSetting) ProtoMessage() {} +func (*CampaignExtensionSetting) Descriptor() ([]byte, []int) { + return fileDescriptor_campaign_extension_setting_b38dd0ac0ea02cbd, []int{0} +} +func (m *CampaignExtensionSetting) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_CampaignExtensionSetting.Unmarshal(m, b) +} +func (m *CampaignExtensionSetting) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_CampaignExtensionSetting.Marshal(b, m, deterministic) +} +func (dst *CampaignExtensionSetting) XXX_Merge(src proto.Message) { + xxx_messageInfo_CampaignExtensionSetting.Merge(dst, src) +} +func (m *CampaignExtensionSetting) XXX_Size() int { + return xxx_messageInfo_CampaignExtensionSetting.Size(m) +} +func (m *CampaignExtensionSetting) XXX_DiscardUnknown() { + xxx_messageInfo_CampaignExtensionSetting.DiscardUnknown(m) +} + +var xxx_messageInfo_CampaignExtensionSetting proto.InternalMessageInfo + +func (m *CampaignExtensionSetting) GetResourceName() string { + if m != nil { + return m.ResourceName + } + return "" +} + +func (m *CampaignExtensionSetting) GetExtensionType() enums.ExtensionTypeEnum_ExtensionType { + if m != nil { + return m.ExtensionType + } + return enums.ExtensionTypeEnum_UNSPECIFIED +} + +func (m *CampaignExtensionSetting) GetCampaign() *wrappers.StringValue { + if m != nil { + return m.Campaign + } + return nil +} + +func (m *CampaignExtensionSetting) GetExtensionFeedItems() []*wrappers.StringValue { + if m != nil { + return m.ExtensionFeedItems + } + return nil +} + +func (m *CampaignExtensionSetting) GetDevice() enums.ExtensionSettingDeviceEnum_ExtensionSettingDevice { + if m != nil { + return m.Device + } + return enums.ExtensionSettingDeviceEnum_UNSPECIFIED +} + +func init() { + proto.RegisterType((*CampaignExtensionSetting)(nil), "google.ads.googleads.v1.resources.CampaignExtensionSetting") +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/resources/campaign_extension_setting.proto", fileDescriptor_campaign_extension_setting_b38dd0ac0ea02cbd) +} + +var fileDescriptor_campaign_extension_setting_b38dd0ac0ea02cbd = []byte{ + // 445 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x8c, 0x53, 0xd1, 0x6e, 0xd3, 0x30, + 0x14, 0x55, 0x52, 0x98, 0x20, 0xb0, 0x3d, 0x44, 0x3c, 0x44, 0xd3, 0x40, 0x1d, 0x68, 0x52, 0x9f, + 0x6c, 0xa5, 0xbc, 0x20, 0x83, 0x90, 0x52, 0x18, 0x13, 0x3c, 0x4c, 0x55, 0x86, 0xfa, 0x80, 0x2a, + 0x45, 0x5e, 0x7d, 0x67, 0x2c, 0x35, 0xb6, 0x15, 0x3b, 0x85, 0xfd, 0x02, 0x9f, 0xc0, 0x23, 0x8f, + 0x7c, 0x0a, 0x9f, 0xc2, 0x57, 0xa0, 0xc6, 0xb1, 0xa1, 0x42, 0xdd, 0xfa, 0x76, 0xe2, 0x7b, 0xce, + 0xb9, 0xf7, 0xf8, 0x3a, 0xc9, 0x84, 0x2b, 0xc5, 0x97, 0x80, 0x29, 0x33, 0xd8, 0xc1, 0x35, 0x5a, + 0xe5, 0xb8, 0x01, 0xa3, 0xda, 0x66, 0x01, 0x06, 0x2f, 0x68, 0xad, 0xa9, 0xe0, 0xb2, 0x82, 0xaf, + 0x16, 0xa4, 0x11, 0x4a, 0x56, 0x06, 0xac, 0x15, 0x92, 0x23, 0xdd, 0x28, 0xab, 0xd2, 0x63, 0x27, + 0x44, 0x94, 0x19, 0x14, 0x3c, 0xd0, 0x2a, 0x47, 0xc1, 0xe3, 0xf0, 0xd5, 0xb6, 0x36, 0x20, 0xdb, + 0xda, 0xe0, 0xff, 0x9c, 0x2b, 0x06, 0x2b, 0xb1, 0x00, 0xd7, 0xe0, 0x70, 0xbc, 0xab, 0xda, 0x5e, + 0x6b, 0xaf, 0x79, 0xd2, 0x6b, 0xba, 0xaf, 0xcb, 0xf6, 0x0a, 0x7f, 0x69, 0xa8, 0xd6, 0xd0, 0x98, + 0xbe, 0x7e, 0xe4, 0x3d, 0xb5, 0xc0, 0x54, 0x4a, 0x65, 0xa9, 0x15, 0x4a, 0xf6, 0xd5, 0xa7, 0xdf, + 0x07, 0x49, 0xf6, 0xa6, 0xcf, 0x7d, 0xea, 0xed, 0x2f, 0xdc, 0x6c, 0xe9, 0xb3, 0x64, 0xdf, 0x27, + 0xab, 0x24, 0xad, 0x21, 0x8b, 0x86, 0xd1, 0xe8, 0x7e, 0xf9, 0xd0, 0x1f, 0x9e, 0xd3, 0x1a, 0x52, + 0x48, 0x0e, 0x36, 0xe7, 0xca, 0xe2, 0x61, 0x34, 0x3a, 0x18, 0xbf, 0x46, 0xdb, 0x6e, 0xab, 0x0b, + 0x83, 0x42, 0xb7, 0x8f, 0xd7, 0x1a, 0x4e, 0x65, 0x5b, 0x6f, 0x9e, 0x94, 0xfb, 0xf0, 0xef, 0x67, + 0xfa, 0x22, 0xb9, 0xe7, 0xf7, 0x93, 0x0d, 0x86, 0xd1, 0xe8, 0xc1, 0xf8, 0xc8, 0x37, 0xf0, 0xc9, + 0xd1, 0x85, 0x6d, 0x84, 0xe4, 0x33, 0xba, 0x6c, 0xa1, 0x0c, 0xec, 0xf4, 0x3c, 0x79, 0xf4, 0x77, + 0xc0, 0x2b, 0x00, 0x56, 0x09, 0x0b, 0xb5, 0xc9, 0xee, 0x0c, 0x07, 0xb7, 0xba, 0xa4, 0x41, 0xf9, + 0x0e, 0x80, 0xbd, 0x5f, 0xeb, 0xd2, 0xcf, 0xc9, 0x9e, 0x5b, 0x5a, 0x76, 0xb7, 0x0b, 0x3a, 0xdd, + 0x35, 0x68, 0x7f, 0xad, 0x6f, 0x3b, 0xf1, 0x66, 0xe2, 0x8d, 0x52, 0xd9, 0xfb, 0x4f, 0xbe, 0xc5, + 0xc9, 0xc9, 0x42, 0xd5, 0xe8, 0xd6, 0x67, 0x37, 0x79, 0xbc, 0x6d, 0x87, 0xd3, 0x75, 0xaa, 0x69, + 0xf4, 0xe9, 0x43, 0xef, 0xc1, 0xd5, 0x92, 0x4a, 0x8e, 0x54, 0xc3, 0x31, 0x07, 0xd9, 0x65, 0xf6, + 0x2f, 0x4d, 0x0b, 0x73, 0xc3, 0xdf, 0xf1, 0x32, 0xa0, 0x1f, 0xf1, 0xe0, 0xac, 0x28, 0x7e, 0xc6, + 0xc7, 0x67, 0xce, 0xb2, 0x60, 0x06, 0x39, 0xb8, 0x46, 0xb3, 0x1c, 0x95, 0x9e, 0xf9, 0xcb, 0x73, + 0xe6, 0x05, 0x33, 0xf3, 0xc0, 0x99, 0xcf, 0xf2, 0x79, 0xe0, 0xfc, 0x8e, 0x4f, 0x5c, 0x81, 0x90, + 0x82, 0x19, 0x42, 0x02, 0x8b, 0x90, 0x59, 0x4e, 0x48, 0xe0, 0x5d, 0xee, 0x75, 0xc3, 0x3e, 0xff, + 0x13, 0x00, 0x00, 0xff, 0xff, 0x40, 0x38, 0xd0, 0xe6, 0xc9, 0x03, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/resources/campaign_feed.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/resources/campaign_feed.pb.go new file mode 100644 index 0000000..1767f58 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/resources/campaign_feed.pb.go @@ -0,0 +1,158 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/resources/campaign_feed.proto + +package resources // import "google.golang.org/genproto/googleapis/ads/googleads/v1/resources" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import wrappers "github.com/golang/protobuf/ptypes/wrappers" +import common "google.golang.org/genproto/googleapis/ads/googleads/v1/common" +import enums "google.golang.org/genproto/googleapis/ads/googleads/v1/enums" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// A campaign feed. +type CampaignFeed struct { + // The resource name of the campaign feed. + // Campaign feed resource names have the form: + // + // `customers/{customer_id}/campaignFeeds/{campaign_id}~{feed_id} + ResourceName string `protobuf:"bytes,1,opt,name=resource_name,json=resourceName,proto3" json:"resource_name,omitempty"` + // The feed to which the CampaignFeed belongs. + Feed *wrappers.StringValue `protobuf:"bytes,2,opt,name=feed,proto3" json:"feed,omitempty"` + // The campaign to which the CampaignFeed belongs. + Campaign *wrappers.StringValue `protobuf:"bytes,3,opt,name=campaign,proto3" json:"campaign,omitempty"` + // Indicates which placeholder types the feed may populate under the connected + // campaign. Required. + PlaceholderTypes []enums.PlaceholderTypeEnum_PlaceholderType `protobuf:"varint,4,rep,packed,name=placeholder_types,json=placeholderTypes,proto3,enum=google.ads.googleads.v1.enums.PlaceholderTypeEnum_PlaceholderType" json:"placeholder_types,omitempty"` + // Matching function associated with the CampaignFeed. + // The matching function is used to filter the set of feed items selected. + // Required. + MatchingFunction *common.MatchingFunction `protobuf:"bytes,5,opt,name=matching_function,json=matchingFunction,proto3" json:"matching_function,omitempty"` + // Status of the campaign feed. + // This field is read-only. + Status enums.FeedLinkStatusEnum_FeedLinkStatus `protobuf:"varint,6,opt,name=status,proto3,enum=google.ads.googleads.v1.enums.FeedLinkStatusEnum_FeedLinkStatus" json:"status,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *CampaignFeed) Reset() { *m = CampaignFeed{} } +func (m *CampaignFeed) String() string { return proto.CompactTextString(m) } +func (*CampaignFeed) ProtoMessage() {} +func (*CampaignFeed) Descriptor() ([]byte, []int) { + return fileDescriptor_campaign_feed_3a6668ca9a325d53, []int{0} +} +func (m *CampaignFeed) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_CampaignFeed.Unmarshal(m, b) +} +func (m *CampaignFeed) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_CampaignFeed.Marshal(b, m, deterministic) +} +func (dst *CampaignFeed) XXX_Merge(src proto.Message) { + xxx_messageInfo_CampaignFeed.Merge(dst, src) +} +func (m *CampaignFeed) XXX_Size() int { + return xxx_messageInfo_CampaignFeed.Size(m) +} +func (m *CampaignFeed) XXX_DiscardUnknown() { + xxx_messageInfo_CampaignFeed.DiscardUnknown(m) +} + +var xxx_messageInfo_CampaignFeed proto.InternalMessageInfo + +func (m *CampaignFeed) GetResourceName() string { + if m != nil { + return m.ResourceName + } + return "" +} + +func (m *CampaignFeed) GetFeed() *wrappers.StringValue { + if m != nil { + return m.Feed + } + return nil +} + +func (m *CampaignFeed) GetCampaign() *wrappers.StringValue { + if m != nil { + return m.Campaign + } + return nil +} + +func (m *CampaignFeed) GetPlaceholderTypes() []enums.PlaceholderTypeEnum_PlaceholderType { + if m != nil { + return m.PlaceholderTypes + } + return nil +} + +func (m *CampaignFeed) GetMatchingFunction() *common.MatchingFunction { + if m != nil { + return m.MatchingFunction + } + return nil +} + +func (m *CampaignFeed) GetStatus() enums.FeedLinkStatusEnum_FeedLinkStatus { + if m != nil { + return m.Status + } + return enums.FeedLinkStatusEnum_UNSPECIFIED +} + +func init() { + proto.RegisterType((*CampaignFeed)(nil), "google.ads.googleads.v1.resources.CampaignFeed") +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/resources/campaign_feed.proto", fileDescriptor_campaign_feed_3a6668ca9a325d53) +} + +var fileDescriptor_campaign_feed_3a6668ca9a325d53 = []byte{ + // 485 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x8c, 0x93, 0xdd, 0x6a, 0xd4, 0x40, + 0x14, 0xc7, 0xc9, 0x6e, 0x5d, 0x74, 0xac, 0xa5, 0x9b, 0xab, 0x50, 0x8a, 0x6c, 0x95, 0xc2, 0x5e, + 0xcd, 0x34, 0xeb, 0x07, 0x12, 0x6f, 0xcc, 0x8a, 0x2d, 0x88, 0xca, 0x92, 0xca, 0x22, 0xb2, 0x12, + 0xa6, 0xc9, 0xd9, 0x69, 0x68, 0xe6, 0x83, 0x4c, 0x52, 0xe9, 0xeb, 0x78, 0xe9, 0x9b, 0xe8, 0xa3, + 0xf8, 0x10, 0x22, 0x49, 0x66, 0xa2, 0x5d, 0xd9, 0x76, 0xef, 0xce, 0x9e, 0xf9, 0xff, 0xce, 0x9e, + 0x73, 0xfe, 0x27, 0xe8, 0x19, 0x93, 0x92, 0xe5, 0x40, 0x68, 0xaa, 0x49, 0x1b, 0xd6, 0xd1, 0xa5, + 0x4f, 0x0a, 0xd0, 0xb2, 0x2a, 0x12, 0xd0, 0x24, 0xa1, 0x5c, 0xd1, 0x8c, 0x89, 0x78, 0x09, 0x90, + 0x62, 0x55, 0xc8, 0x52, 0xba, 0x07, 0xad, 0x16, 0xd3, 0x54, 0xe3, 0x0e, 0xc3, 0x97, 0x3e, 0xee, + 0xb0, 0xbd, 0xe7, 0xeb, 0x2a, 0x27, 0x92, 0x73, 0x29, 0x08, 0xa7, 0x65, 0x72, 0x9e, 0x09, 0x16, + 0x2f, 0x2b, 0x91, 0x94, 0x99, 0x14, 0x6d, 0xe9, 0xbd, 0xa7, 0xeb, 0x38, 0x10, 0x15, 0xd7, 0xa4, + 0x6e, 0x22, 0xce, 0x33, 0x71, 0x11, 0xeb, 0x92, 0x96, 0x95, 0xde, 0x8c, 0x52, 0x39, 0x4d, 0xe0, + 0x5c, 0xe6, 0x29, 0x14, 0x71, 0x79, 0xa5, 0xc0, 0x50, 0x0f, 0x0d, 0xd5, 0xfc, 0x3a, 0xab, 0x96, + 0xe4, 0x6b, 0x41, 0x95, 0x82, 0xc2, 0x56, 0xdd, 0xb7, 0x55, 0x55, 0x46, 0xa8, 0x10, 0xb2, 0xa4, + 0x75, 0xa3, 0xe6, 0xf5, 0xd1, 0x8f, 0x3e, 0xda, 0x7e, 0x6d, 0x96, 0x73, 0x0c, 0x90, 0xba, 0x8f, + 0xd1, 0x03, 0x3b, 0x7f, 0x2c, 0x28, 0x07, 0xcf, 0x19, 0x39, 0xe3, 0x7b, 0xd1, 0xb6, 0x4d, 0x7e, + 0xa0, 0x1c, 0xdc, 0x23, 0xb4, 0x55, 0xcf, 0xe0, 0xf5, 0x46, 0xce, 0xf8, 0xfe, 0x64, 0xdf, 0xac, + 0x0f, 0xdb, 0x16, 0xf0, 0x69, 0x59, 0x64, 0x82, 0xcd, 0x69, 0x5e, 0x41, 0xd4, 0x28, 0xdd, 0x17, + 0xe8, 0xae, 0xf5, 0xc0, 0xeb, 0x6f, 0x40, 0x75, 0x6a, 0x57, 0xa2, 0xe1, 0xea, 0xe4, 0xda, 0xdb, + 0x1a, 0xf5, 0xc7, 0x3b, 0x93, 0x29, 0x5e, 0x67, 0x61, 0xb3, 0x31, 0x3c, 0xfb, 0xcb, 0x7d, 0xbc, + 0x52, 0xf0, 0x46, 0x54, 0x7c, 0x35, 0x17, 0xed, 0xaa, 0xeb, 0x09, 0xed, 0x7e, 0x41, 0xc3, 0xff, + 0x7c, 0xf5, 0xee, 0x34, 0x3d, 0x1f, 0xad, 0xfd, 0xc3, 0xf6, 0x20, 0xf0, 0x7b, 0x03, 0x1e, 0x1b, + 0x2e, 0xda, 0xe5, 0x2b, 0x19, 0xf7, 0x13, 0x1a, 0xb4, 0xae, 0x7b, 0x83, 0x91, 0x33, 0xde, 0x99, + 0xbc, 0xba, 0x65, 0x88, 0xda, 0x95, 0x77, 0x99, 0xb8, 0x38, 0x6d, 0xa0, 0x66, 0x86, 0xeb, 0xa9, + 0xc8, 0xd4, 0x9b, 0xfe, 0x76, 0xd0, 0x61, 0x22, 0x39, 0xbe, 0xf5, 0xae, 0xa7, 0xc3, 0x7f, 0x2d, + 0x9f, 0xd5, 0xfb, 0x9f, 0x39, 0x9f, 0xdf, 0x1a, 0x8e, 0xc9, 0x9c, 0x0a, 0x86, 0x65, 0xc1, 0x08, + 0x03, 0xd1, 0xb8, 0x63, 0xcf, 0x51, 0x65, 0xfa, 0x86, 0xaf, 0xec, 0x65, 0x17, 0x7d, 0xeb, 0xf5, + 0x4f, 0xc2, 0xf0, 0x7b, 0xef, 0xe0, 0xa4, 0x2d, 0x19, 0xa6, 0x1a, 0xb7, 0x61, 0x1d, 0xcd, 0x7d, + 0x1c, 0x59, 0xe5, 0x4f, 0xab, 0x59, 0x84, 0xa9, 0x5e, 0x74, 0x9a, 0xc5, 0xdc, 0x5f, 0x74, 0x9a, + 0x5f, 0xbd, 0xc3, 0xf6, 0x21, 0x08, 0xc2, 0x54, 0x07, 0x41, 0xa7, 0x0a, 0x82, 0xb9, 0x1f, 0x04, + 0x9d, 0xee, 0x6c, 0xd0, 0x34, 0xfb, 0xe4, 0x4f, 0x00, 0x00, 0x00, 0xff, 0xff, 0xa3, 0xf6, 0xe4, + 0x5c, 0x11, 0x04, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/resources/campaign_label.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/resources/campaign_label.pb.go new file mode 100644 index 0000000..5600ba0 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/resources/campaign_label.pb.go @@ -0,0 +1,114 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/resources/campaign_label.proto + +package resources // import "google.golang.org/genproto/googleapis/ads/googleads/v1/resources" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import wrappers "github.com/golang/protobuf/ptypes/wrappers" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Represents a relationship between a campaign and a label. +type CampaignLabel struct { + // Name of the resource. + // Campaign label resource names have the form: + // `customers/{customer_id}/campaignLabels/{campaign_id}~{label_id}` + ResourceName string `protobuf:"bytes,1,opt,name=resource_name,json=resourceName,proto3" json:"resource_name,omitempty"` + // The campaign to which the label is attached. + Campaign *wrappers.StringValue `protobuf:"bytes,2,opt,name=campaign,proto3" json:"campaign,omitempty"` + // The label assigned to the campaign. + Label *wrappers.StringValue `protobuf:"bytes,3,opt,name=label,proto3" json:"label,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *CampaignLabel) Reset() { *m = CampaignLabel{} } +func (m *CampaignLabel) String() string { return proto.CompactTextString(m) } +func (*CampaignLabel) ProtoMessage() {} +func (*CampaignLabel) Descriptor() ([]byte, []int) { + return fileDescriptor_campaign_label_866cb6c3a9d00bde, []int{0} +} +func (m *CampaignLabel) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_CampaignLabel.Unmarshal(m, b) +} +func (m *CampaignLabel) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_CampaignLabel.Marshal(b, m, deterministic) +} +func (dst *CampaignLabel) XXX_Merge(src proto.Message) { + xxx_messageInfo_CampaignLabel.Merge(dst, src) +} +func (m *CampaignLabel) XXX_Size() int { + return xxx_messageInfo_CampaignLabel.Size(m) +} +func (m *CampaignLabel) XXX_DiscardUnknown() { + xxx_messageInfo_CampaignLabel.DiscardUnknown(m) +} + +var xxx_messageInfo_CampaignLabel proto.InternalMessageInfo + +func (m *CampaignLabel) GetResourceName() string { + if m != nil { + return m.ResourceName + } + return "" +} + +func (m *CampaignLabel) GetCampaign() *wrappers.StringValue { + if m != nil { + return m.Campaign + } + return nil +} + +func (m *CampaignLabel) GetLabel() *wrappers.StringValue { + if m != nil { + return m.Label + } + return nil +} + +func init() { + proto.RegisterType((*CampaignLabel)(nil), "google.ads.googleads.v1.resources.CampaignLabel") +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/resources/campaign_label.proto", fileDescriptor_campaign_label_866cb6c3a9d00bde) +} + +var fileDescriptor_campaign_label_866cb6c3a9d00bde = []byte{ + // 323 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x8c, 0x91, 0xcf, 0x4a, 0xc3, 0x30, + 0x1c, 0xc7, 0x69, 0x87, 0xa2, 0xd5, 0x5d, 0x7a, 0x1a, 0x63, 0xc8, 0xa6, 0x0c, 0x76, 0x4a, 0xe8, + 0x04, 0x91, 0x78, 0xea, 0x3c, 0x0c, 0x44, 0x64, 0x4c, 0xe8, 0x41, 0x0a, 0xe3, 0xb7, 0x35, 0x86, + 0x42, 0x9b, 0x84, 0xa4, 0x9d, 0xef, 0xb3, 0xa3, 0x8f, 0xe2, 0xa3, 0xf8, 0x12, 0x4a, 0x9b, 0x26, + 0xe0, 0x45, 0xbd, 0x7d, 0x69, 0x3e, 0xdf, 0x3f, 0x69, 0x82, 0x1b, 0x26, 0x04, 0x2b, 0x28, 0x86, + 0x4c, 0x63, 0x23, 0x1b, 0xb5, 0x8f, 0xb0, 0xa2, 0x5a, 0xd4, 0x6a, 0x47, 0x35, 0xde, 0x41, 0x29, + 0x21, 0x67, 0x7c, 0x53, 0xc0, 0x96, 0x16, 0x48, 0x2a, 0x51, 0x89, 0x70, 0x62, 0x60, 0x04, 0x99, + 0x46, 0xce, 0x87, 0xf6, 0x11, 0x72, 0xbe, 0xe1, 0x45, 0x17, 0xdd, 0x1a, 0xb6, 0xf5, 0x2b, 0x7e, + 0x53, 0x20, 0x25, 0x55, 0xda, 0x44, 0x0c, 0x47, 0xb6, 0x5a, 0xe6, 0x18, 0x38, 0x17, 0x15, 0x54, + 0xb9, 0xe0, 0xdd, 0xe9, 0xe5, 0xc1, 0x0b, 0xfa, 0xf7, 0x5d, 0xf3, 0x63, 0x53, 0x1c, 0x5e, 0x05, + 0x7d, 0x1b, 0xbe, 0xe1, 0x50, 0xd2, 0x81, 0x37, 0xf6, 0x66, 0xa7, 0xeb, 0x73, 0xfb, 0xf1, 0x09, + 0x4a, 0x1a, 0xde, 0x06, 0x27, 0x76, 0xef, 0xc0, 0x1f, 0x7b, 0xb3, 0xb3, 0xf9, 0xa8, 0xdb, 0x87, + 0xec, 0x0e, 0xf4, 0x5c, 0xa9, 0x9c, 0xb3, 0x04, 0x8a, 0x9a, 0xae, 0x1d, 0x1d, 0xce, 0x83, 0xa3, + 0xf6, 0x82, 0x83, 0xde, 0x3f, 0x6c, 0x06, 0x5d, 0x7c, 0x79, 0xc1, 0x74, 0x27, 0x4a, 0xf4, 0xe7, + 0xcf, 0x58, 0x84, 0x3f, 0xee, 0xb2, 0x6a, 0x32, 0x57, 0xde, 0xcb, 0x43, 0x67, 0x64, 0xa2, 0x00, + 0xce, 0x90, 0x50, 0x0c, 0x33, 0xca, 0xdb, 0x46, 0xfb, 0x1a, 0x32, 0xd7, 0xbf, 0x3c, 0xce, 0x9d, + 0x53, 0x07, 0xbf, 0xb7, 0x8c, 0xe3, 0x77, 0x7f, 0xb2, 0x34, 0x91, 0x71, 0xa6, 0x91, 0x91, 0x8d, + 0x4a, 0x22, 0xb4, 0xb6, 0xe4, 0x87, 0x65, 0xd2, 0x38, 0xd3, 0xa9, 0x63, 0xd2, 0x24, 0x4a, 0x1d, + 0xf3, 0xe9, 0x4f, 0xcd, 0x01, 0x21, 0x71, 0xa6, 0x09, 0x71, 0x14, 0x21, 0x49, 0x44, 0x88, 0xe3, + 0xb6, 0xc7, 0xed, 0xd8, 0xeb, 0xef, 0x00, 0x00, 0x00, 0xff, 0xff, 0x7d, 0x26, 0x6c, 0xe5, 0x48, + 0x02, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/resources/campaign_shared_set.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/resources/campaign_shared_set.pb.go new file mode 100644 index 0000000..3421a44 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/resources/campaign_shared_set.pb.go @@ -0,0 +1,135 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/resources/campaign_shared_set.proto + +package resources // import "google.golang.org/genproto/googleapis/ads/googleads/v1/resources" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import wrappers "github.com/golang/protobuf/ptypes/wrappers" +import enums "google.golang.org/genproto/googleapis/ads/googleads/v1/enums" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// CampaignSharedSets are used for managing the shared sets associated with a +// campaign. +type CampaignSharedSet struct { + // The resource name of the campaign shared set. + // Campaign shared set resource names have the form: + // + // `customers/{customer_id}/campaignSharedSets/{campaign_id}~{shared_set_id}` + ResourceName string `protobuf:"bytes,1,opt,name=resource_name,json=resourceName,proto3" json:"resource_name,omitempty"` + // The campaign to which the campaign shared set belongs. + Campaign *wrappers.StringValue `protobuf:"bytes,3,opt,name=campaign,proto3" json:"campaign,omitempty"` + // The shared set associated with the campaign. This may be a negative keyword + // shared set of another customer. This customer should be a manager of the + // other customer, otherwise the campaign shared set will exist but have no + // serving effect. Only negative keyword shared sets can be associated with + // Shopping campaigns. Only negative placement shared sets can be associated + // with Display mobile app campaigns. + SharedSet *wrappers.StringValue `protobuf:"bytes,4,opt,name=shared_set,json=sharedSet,proto3" json:"shared_set,omitempty"` + // The status of this campaign shared set. Read only. + Status enums.CampaignSharedSetStatusEnum_CampaignSharedSetStatus `protobuf:"varint,2,opt,name=status,proto3,enum=google.ads.googleads.v1.enums.CampaignSharedSetStatusEnum_CampaignSharedSetStatus" json:"status,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *CampaignSharedSet) Reset() { *m = CampaignSharedSet{} } +func (m *CampaignSharedSet) String() string { return proto.CompactTextString(m) } +func (*CampaignSharedSet) ProtoMessage() {} +func (*CampaignSharedSet) Descriptor() ([]byte, []int) { + return fileDescriptor_campaign_shared_set_f9acec9356ea71cc, []int{0} +} +func (m *CampaignSharedSet) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_CampaignSharedSet.Unmarshal(m, b) +} +func (m *CampaignSharedSet) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_CampaignSharedSet.Marshal(b, m, deterministic) +} +func (dst *CampaignSharedSet) XXX_Merge(src proto.Message) { + xxx_messageInfo_CampaignSharedSet.Merge(dst, src) +} +func (m *CampaignSharedSet) XXX_Size() int { + return xxx_messageInfo_CampaignSharedSet.Size(m) +} +func (m *CampaignSharedSet) XXX_DiscardUnknown() { + xxx_messageInfo_CampaignSharedSet.DiscardUnknown(m) +} + +var xxx_messageInfo_CampaignSharedSet proto.InternalMessageInfo + +func (m *CampaignSharedSet) GetResourceName() string { + if m != nil { + return m.ResourceName + } + return "" +} + +func (m *CampaignSharedSet) GetCampaign() *wrappers.StringValue { + if m != nil { + return m.Campaign + } + return nil +} + +func (m *CampaignSharedSet) GetSharedSet() *wrappers.StringValue { + if m != nil { + return m.SharedSet + } + return nil +} + +func (m *CampaignSharedSet) GetStatus() enums.CampaignSharedSetStatusEnum_CampaignSharedSetStatus { + if m != nil { + return m.Status + } + return enums.CampaignSharedSetStatusEnum_UNSPECIFIED +} + +func init() { + proto.RegisterType((*CampaignSharedSet)(nil), "google.ads.googleads.v1.resources.CampaignSharedSet") +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/resources/campaign_shared_set.proto", fileDescriptor_campaign_shared_set_f9acec9356ea71cc) +} + +var fileDescriptor_campaign_shared_set_f9acec9356ea71cc = []byte{ + // 386 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x8c, 0x52, 0xcd, 0x6a, 0xe3, 0x30, + 0x18, 0xc4, 0xce, 0x12, 0x36, 0xda, 0x1f, 0x58, 0x1f, 0x16, 0x13, 0xc2, 0x92, 0x6c, 0x09, 0xe4, + 0x24, 0xe3, 0xf4, 0x52, 0x14, 0x28, 0x38, 0xa5, 0x04, 0x7a, 0x28, 0xc1, 0x06, 0x1f, 0x8a, 0xc1, + 0x28, 0xb1, 0xaa, 0xba, 0xc4, 0x92, 0x91, 0xe4, 0xf4, 0x01, 0xfa, 0x18, 0xbd, 0xf5, 0xd8, 0x47, + 0xe9, 0xa3, 0xf4, 0x29, 0x4a, 0x6c, 0x4b, 0x3d, 0xa4, 0x69, 0x7b, 0x1b, 0xfb, 0x9b, 0x99, 0x6f, + 0x46, 0x12, 0x98, 0x51, 0xce, 0xe9, 0x86, 0x78, 0x38, 0x93, 0x5e, 0x03, 0x77, 0x68, 0xeb, 0x7b, + 0x82, 0x48, 0x5e, 0x89, 0x35, 0x91, 0xde, 0x1a, 0x17, 0x25, 0xce, 0x29, 0x4b, 0xe5, 0x0d, 0x16, + 0x24, 0x4b, 0x25, 0x51, 0xb0, 0x14, 0x5c, 0x71, 0x67, 0xd4, 0x28, 0x20, 0xce, 0x24, 0x34, 0x62, + 0xb8, 0xf5, 0xa1, 0x11, 0xf7, 0x4f, 0x0f, 0xf9, 0x13, 0x56, 0x15, 0xef, 0x7a, 0xa7, 0x52, 0x61, + 0x55, 0xc9, 0x66, 0x45, 0xff, 0x5f, 0xab, 0xaf, 0xbf, 0x56, 0xd5, 0xb5, 0x77, 0x27, 0x70, 0x59, + 0x12, 0xa1, 0xe7, 0x03, 0xed, 0x5f, 0xe6, 0x1e, 0x66, 0x8c, 0x2b, 0xac, 0x72, 0xce, 0xda, 0xe9, + 0xff, 0x07, 0x1b, 0xfc, 0x39, 0x6b, 0x57, 0x44, 0xf5, 0x86, 0x88, 0x28, 0xe7, 0x08, 0xfc, 0xd2, + 0x01, 0x53, 0x86, 0x0b, 0xe2, 0x5a, 0x43, 0x6b, 0xd2, 0x0b, 0x7f, 0xea, 0x9f, 0x97, 0xb8, 0x20, + 0xce, 0x09, 0xf8, 0xae, 0xc3, 0xb9, 0x9d, 0xa1, 0x35, 0xf9, 0x31, 0x1d, 0xb4, 0x1d, 0xa1, 0xce, + 0x02, 0x23, 0x25, 0x72, 0x46, 0x63, 0xbc, 0xa9, 0x48, 0x68, 0xd8, 0xce, 0x0c, 0x80, 0xb7, 0x36, + 0xee, 0xb7, 0x2f, 0x68, 0x7b, 0xd2, 0x64, 0xbb, 0x05, 0xdd, 0xa6, 0xbf, 0x6b, 0x0f, 0xad, 0xc9, + 0xef, 0x69, 0x08, 0x0f, 0x9d, 0x71, 0x7d, 0x80, 0x70, 0xaf, 0x5d, 0x54, 0xab, 0xcf, 0x59, 0x55, + 0x1c, 0x9a, 0x85, 0xed, 0x86, 0xf9, 0xbd, 0x0d, 0xc6, 0x6b, 0x5e, 0xc0, 0x4f, 0x6f, 0x71, 0xfe, + 0x77, 0xcf, 0x6a, 0xb9, 0xeb, 0xb1, 0xb4, 0xae, 0x2e, 0x5a, 0x31, 0xe5, 0x1b, 0xcc, 0x28, 0xe4, + 0x82, 0x7a, 0x94, 0xb0, 0xba, 0xa5, 0xbe, 0xef, 0x32, 0x97, 0x1f, 0x3c, 0xaf, 0x99, 0x41, 0x8f, + 0x76, 0x67, 0x11, 0x04, 0x4f, 0xf6, 0x68, 0xd1, 0x58, 0x06, 0x99, 0x84, 0x0d, 0xdc, 0xa1, 0xd8, + 0x87, 0xa1, 0x66, 0x3e, 0x6b, 0x4e, 0x12, 0x64, 0x32, 0x31, 0x9c, 0x24, 0xf6, 0x13, 0xc3, 0x79, + 0xb1, 0xc7, 0xcd, 0x00, 0xa1, 0x20, 0x93, 0x08, 0x19, 0x16, 0x42, 0xb1, 0x8f, 0x90, 0xe1, 0xad, + 0xba, 0x75, 0xd8, 0xe3, 0xd7, 0x00, 0x00, 0x00, 0xff, 0xff, 0x31, 0x93, 0x1c, 0xd9, 0x0a, 0x03, + 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/resources/carrier_constant.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/resources/carrier_constant.pb.go new file mode 100644 index 0000000..f812048 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/resources/carrier_constant.pb.go @@ -0,0 +1,127 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/resources/carrier_constant.proto + +package resources // import "google.golang.org/genproto/googleapis/ads/googleads/v1/resources" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import wrappers "github.com/golang/protobuf/ptypes/wrappers" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// A carrier criterion that can be used in campaign targeting. +type CarrierConstant struct { + // The resource name of the carrier criterion. + // Carrier criterion resource names have the form: + // + // `carrierConstants/{criterion_id}` + ResourceName string `protobuf:"bytes,1,opt,name=resource_name,json=resourceName,proto3" json:"resource_name,omitempty"` + // The ID of the carrier criterion. + Id *wrappers.Int64Value `protobuf:"bytes,2,opt,name=id,proto3" json:"id,omitempty"` + // The full name of the carrier in English. + Name *wrappers.StringValue `protobuf:"bytes,3,opt,name=name,proto3" json:"name,omitempty"` + // The country code of the country where the carrier is located, e.g., "AR", + // "FR", etc. + CountryCode *wrappers.StringValue `protobuf:"bytes,4,opt,name=country_code,json=countryCode,proto3" json:"country_code,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *CarrierConstant) Reset() { *m = CarrierConstant{} } +func (m *CarrierConstant) String() string { return proto.CompactTextString(m) } +func (*CarrierConstant) ProtoMessage() {} +func (*CarrierConstant) Descriptor() ([]byte, []int) { + return fileDescriptor_carrier_constant_55ed5152c85d3e1c, []int{0} +} +func (m *CarrierConstant) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_CarrierConstant.Unmarshal(m, b) +} +func (m *CarrierConstant) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_CarrierConstant.Marshal(b, m, deterministic) +} +func (dst *CarrierConstant) XXX_Merge(src proto.Message) { + xxx_messageInfo_CarrierConstant.Merge(dst, src) +} +func (m *CarrierConstant) XXX_Size() int { + return xxx_messageInfo_CarrierConstant.Size(m) +} +func (m *CarrierConstant) XXX_DiscardUnknown() { + xxx_messageInfo_CarrierConstant.DiscardUnknown(m) +} + +var xxx_messageInfo_CarrierConstant proto.InternalMessageInfo + +func (m *CarrierConstant) GetResourceName() string { + if m != nil { + return m.ResourceName + } + return "" +} + +func (m *CarrierConstant) GetId() *wrappers.Int64Value { + if m != nil { + return m.Id + } + return nil +} + +func (m *CarrierConstant) GetName() *wrappers.StringValue { + if m != nil { + return m.Name + } + return nil +} + +func (m *CarrierConstant) GetCountryCode() *wrappers.StringValue { + if m != nil { + return m.CountryCode + } + return nil +} + +func init() { + proto.RegisterType((*CarrierConstant)(nil), "google.ads.googleads.v1.resources.CarrierConstant") +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/resources/carrier_constant.proto", fileDescriptor_carrier_constant_55ed5152c85d3e1c) +} + +var fileDescriptor_carrier_constant_55ed5152c85d3e1c = []byte{ + // 363 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x8c, 0x91, 0xc1, 0x4a, 0xeb, 0x40, + 0x14, 0x86, 0x49, 0x5a, 0x2e, 0xdc, 0xb4, 0x97, 0x0b, 0xc1, 0x45, 0xa9, 0x45, 0x5a, 0xa5, 0x50, + 0x10, 0x26, 0x46, 0x45, 0x64, 0x5c, 0x48, 0xda, 0x45, 0xd1, 0x85, 0x94, 0x0a, 0x59, 0x48, 0xa0, + 0x4c, 0x33, 0x63, 0x08, 0xb4, 0x73, 0xc2, 0xcc, 0xa4, 0xe2, 0xd2, 0x57, 0x71, 0xe9, 0xa3, 0xf8, + 0x00, 0x3e, 0x84, 0x4f, 0x21, 0xcd, 0x64, 0x66, 0xa1, 0xa0, 0xee, 0x7e, 0x72, 0xbe, 0xff, 0xff, + 0x4f, 0xe6, 0x78, 0xe7, 0x19, 0x40, 0xb6, 0x62, 0x01, 0xa1, 0x32, 0xd0, 0x72, 0xab, 0x36, 0x61, + 0x20, 0x98, 0x84, 0x52, 0xa4, 0x4c, 0x06, 0x29, 0x11, 0x22, 0x67, 0x62, 0x91, 0x02, 0x97, 0x8a, + 0x70, 0x85, 0x0a, 0x01, 0x0a, 0xfc, 0x81, 0xc6, 0x11, 0xa1, 0x12, 0x59, 0x27, 0xda, 0x84, 0xc8, + 0x3a, 0xbb, 0x7b, 0x75, 0x78, 0x65, 0x58, 0x96, 0xf7, 0xc1, 0x83, 0x20, 0x45, 0xc1, 0x84, 0xd4, + 0x11, 0xdd, 0x9e, 0x29, 0x2f, 0xf2, 0x80, 0x70, 0x0e, 0x8a, 0xa8, 0x1c, 0x78, 0x3d, 0xdd, 0x7f, + 0x73, 0xbc, 0xff, 0x13, 0xdd, 0x3d, 0xa9, 0xab, 0xfd, 0x03, 0xef, 0x9f, 0x89, 0x5f, 0x70, 0xb2, + 0x66, 0x1d, 0xa7, 0xef, 0x8c, 0xfe, 0xce, 0xdb, 0xe6, 0xe3, 0x0d, 0x59, 0x33, 0xff, 0xd0, 0x73, + 0x73, 0xda, 0x71, 0xfb, 0xce, 0xa8, 0x75, 0xbc, 0x5b, 0xef, 0x86, 0xcc, 0x0e, 0xe8, 0x8a, 0xab, + 0xb3, 0xd3, 0x98, 0xac, 0x4a, 0x36, 0x77, 0x73, 0xea, 0x1f, 0x79, 0xcd, 0x2a, 0xa8, 0x51, 0xe1, + 0xbd, 0x2f, 0xf8, 0xad, 0x12, 0x39, 0xcf, 0x34, 0x5f, 0x91, 0xfe, 0xa5, 0xd7, 0x4e, 0xa1, 0xe4, + 0x4a, 0x3c, 0x2e, 0x52, 0xa0, 0xac, 0xd3, 0xfc, 0x85, 0xb3, 0x55, 0x3b, 0x26, 0x40, 0xd9, 0xf8, + 0xc9, 0xf5, 0x86, 0x29, 0xac, 0xd1, 0x8f, 0x0f, 0x38, 0xde, 0xf9, 0xf4, 0xff, 0xb3, 0x6d, 0xf6, + 0xcc, 0xb9, 0xbb, 0xae, 0xad, 0x19, 0xac, 0x08, 0xcf, 0x10, 0x88, 0x2c, 0xc8, 0x18, 0xaf, 0x9a, + 0xcd, 0x15, 0x8b, 0x5c, 0x7e, 0x73, 0xd4, 0x0b, 0xab, 0x9e, 0xdd, 0xc6, 0x34, 0x8a, 0x5e, 0xdc, + 0xc1, 0x54, 0x47, 0x46, 0x54, 0x22, 0x2d, 0xb7, 0x2a, 0x0e, 0xd1, 0xdc, 0x90, 0xaf, 0x86, 0x49, + 0x22, 0x2a, 0x13, 0xcb, 0x24, 0x71, 0x98, 0x58, 0xe6, 0xdd, 0x1d, 0xea, 0x01, 0xc6, 0x11, 0x95, + 0x18, 0x5b, 0x0a, 0xe3, 0x38, 0xc4, 0xd8, 0x72, 0xcb, 0x3f, 0xd5, 0xb2, 0x27, 0x1f, 0x01, 0x00, + 0x00, 0xff, 0xff, 0xd2, 0xda, 0x43, 0x48, 0x80, 0x02, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/resources/change_status.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/resources/change_status.pb.go new file mode 100644 index 0000000..33f5fd8 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/resources/change_status.pb.go @@ -0,0 +1,234 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/resources/change_status.proto + +package resources // import "google.golang.org/genproto/googleapis/ads/googleads/v1/resources" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import wrappers "github.com/golang/protobuf/ptypes/wrappers" +import enums "google.golang.org/genproto/googleapis/ads/googleads/v1/enums" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Describes the status of returned resource. +type ChangeStatus struct { + // The resource name of the change status. + // Change status resource names have the form: + // + // `customers/{customer_id}/changeStatus/{change_status_id}` + ResourceName string `protobuf:"bytes,1,opt,name=resource_name,json=resourceName,proto3" json:"resource_name,omitempty"` + // Time at which the most recent change has occurred on this resource. + LastChangeDateTime *wrappers.StringValue `protobuf:"bytes,3,opt,name=last_change_date_time,json=lastChangeDateTime,proto3" json:"last_change_date_time,omitempty"` + // Represents the type of the changed resource. This dictates what fields + // will be set. For example, for AD_GROUP, campaign and ad_group fields will + // be set. + ResourceType enums.ChangeStatusResourceTypeEnum_ChangeStatusResourceType `protobuf:"varint,4,opt,name=resource_type,json=resourceType,proto3,enum=google.ads.googleads.v1.enums.ChangeStatusResourceTypeEnum_ChangeStatusResourceType" json:"resource_type,omitempty"` + // The Campaign affected by this change. + Campaign *wrappers.StringValue `protobuf:"bytes,5,opt,name=campaign,proto3" json:"campaign,omitempty"` + // The AdGroup affected by this change. + AdGroup *wrappers.StringValue `protobuf:"bytes,6,opt,name=ad_group,json=adGroup,proto3" json:"ad_group,omitempty"` + // Represents the status of the changed resource. + ResourceStatus enums.ChangeStatusOperationEnum_ChangeStatusOperation `protobuf:"varint,8,opt,name=resource_status,json=resourceStatus,proto3,enum=google.ads.googleads.v1.enums.ChangeStatusOperationEnum_ChangeStatusOperation" json:"resource_status,omitempty"` + // The AdGroupAd affected by this change. + AdGroupAd *wrappers.StringValue `protobuf:"bytes,9,opt,name=ad_group_ad,json=adGroupAd,proto3" json:"ad_group_ad,omitempty"` + // The AdGroupCriterion affected by this change. + AdGroupCriterion *wrappers.StringValue `protobuf:"bytes,10,opt,name=ad_group_criterion,json=adGroupCriterion,proto3" json:"ad_group_criterion,omitempty"` + // The CampaignCriterion affected by this change. + CampaignCriterion *wrappers.StringValue `protobuf:"bytes,11,opt,name=campaign_criterion,json=campaignCriterion,proto3" json:"campaign_criterion,omitempty"` + // The Feed affected by this change. + Feed *wrappers.StringValue `protobuf:"bytes,12,opt,name=feed,proto3" json:"feed,omitempty"` + // The FeedItem affected by this change. + FeedItem *wrappers.StringValue `protobuf:"bytes,13,opt,name=feed_item,json=feedItem,proto3" json:"feed_item,omitempty"` + // The AdGroupFeed affected by this change. + AdGroupFeed *wrappers.StringValue `protobuf:"bytes,14,opt,name=ad_group_feed,json=adGroupFeed,proto3" json:"ad_group_feed,omitempty"` + // The CampaignFeed affected by this change. + CampaignFeed *wrappers.StringValue `protobuf:"bytes,15,opt,name=campaign_feed,json=campaignFeed,proto3" json:"campaign_feed,omitempty"` + // The AdGroupBidModifier affected by this change. + AdGroupBidModifier *wrappers.StringValue `protobuf:"bytes,16,opt,name=ad_group_bid_modifier,json=adGroupBidModifier,proto3" json:"ad_group_bid_modifier,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ChangeStatus) Reset() { *m = ChangeStatus{} } +func (m *ChangeStatus) String() string { return proto.CompactTextString(m) } +func (*ChangeStatus) ProtoMessage() {} +func (*ChangeStatus) Descriptor() ([]byte, []int) { + return fileDescriptor_change_status_3c3999904ed1b9b4, []int{0} +} +func (m *ChangeStatus) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ChangeStatus.Unmarshal(m, b) +} +func (m *ChangeStatus) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ChangeStatus.Marshal(b, m, deterministic) +} +func (dst *ChangeStatus) XXX_Merge(src proto.Message) { + xxx_messageInfo_ChangeStatus.Merge(dst, src) +} +func (m *ChangeStatus) XXX_Size() int { + return xxx_messageInfo_ChangeStatus.Size(m) +} +func (m *ChangeStatus) XXX_DiscardUnknown() { + xxx_messageInfo_ChangeStatus.DiscardUnknown(m) +} + +var xxx_messageInfo_ChangeStatus proto.InternalMessageInfo + +func (m *ChangeStatus) GetResourceName() string { + if m != nil { + return m.ResourceName + } + return "" +} + +func (m *ChangeStatus) GetLastChangeDateTime() *wrappers.StringValue { + if m != nil { + return m.LastChangeDateTime + } + return nil +} + +func (m *ChangeStatus) GetResourceType() enums.ChangeStatusResourceTypeEnum_ChangeStatusResourceType { + if m != nil { + return m.ResourceType + } + return enums.ChangeStatusResourceTypeEnum_UNSPECIFIED +} + +func (m *ChangeStatus) GetCampaign() *wrappers.StringValue { + if m != nil { + return m.Campaign + } + return nil +} + +func (m *ChangeStatus) GetAdGroup() *wrappers.StringValue { + if m != nil { + return m.AdGroup + } + return nil +} + +func (m *ChangeStatus) GetResourceStatus() enums.ChangeStatusOperationEnum_ChangeStatusOperation { + if m != nil { + return m.ResourceStatus + } + return enums.ChangeStatusOperationEnum_UNSPECIFIED +} + +func (m *ChangeStatus) GetAdGroupAd() *wrappers.StringValue { + if m != nil { + return m.AdGroupAd + } + return nil +} + +func (m *ChangeStatus) GetAdGroupCriterion() *wrappers.StringValue { + if m != nil { + return m.AdGroupCriterion + } + return nil +} + +func (m *ChangeStatus) GetCampaignCriterion() *wrappers.StringValue { + if m != nil { + return m.CampaignCriterion + } + return nil +} + +func (m *ChangeStatus) GetFeed() *wrappers.StringValue { + if m != nil { + return m.Feed + } + return nil +} + +func (m *ChangeStatus) GetFeedItem() *wrappers.StringValue { + if m != nil { + return m.FeedItem + } + return nil +} + +func (m *ChangeStatus) GetAdGroupFeed() *wrappers.StringValue { + if m != nil { + return m.AdGroupFeed + } + return nil +} + +func (m *ChangeStatus) GetCampaignFeed() *wrappers.StringValue { + if m != nil { + return m.CampaignFeed + } + return nil +} + +func (m *ChangeStatus) GetAdGroupBidModifier() *wrappers.StringValue { + if m != nil { + return m.AdGroupBidModifier + } + return nil +} + +func init() { + proto.RegisterType((*ChangeStatus)(nil), "google.ads.googleads.v1.resources.ChangeStatus") +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/resources/change_status.proto", fileDescriptor_change_status_3c3999904ed1b9b4) +} + +var fileDescriptor_change_status_3c3999904ed1b9b4 = []byte{ + // 603 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x94, 0x54, 0xdd, 0x6a, 0xdb, 0x3e, + 0x14, 0xc7, 0x6d, 0xff, 0xfd, 0x27, 0x4a, 0xd2, 0x0f, 0x41, 0xc1, 0x94, 0x32, 0xda, 0x8d, 0x42, + 0xaf, 0xe4, 0xa5, 0x63, 0x6c, 0x73, 0x07, 0x9b, 0xdb, 0x6d, 0x65, 0x1d, 0x6b, 0x4b, 0x5a, 0x72, + 0x31, 0x02, 0x46, 0x8d, 0x54, 0x4f, 0x10, 0x4b, 0x46, 0x92, 0x5b, 0xf2, 0x00, 0x7b, 0x91, 0x5d, + 0xee, 0x51, 0xf6, 0x28, 0x7b, 0x88, 0x31, 0x2c, 0x4b, 0x6a, 0xd8, 0xc8, 0xe6, 0x5e, 0xe5, 0x38, + 0xe7, 0xf7, 0x75, 0x8e, 0x6c, 0x81, 0xa7, 0x99, 0x10, 0xd9, 0x84, 0x46, 0x98, 0xa8, 0xa8, 0x2e, + 0xab, 0xea, 0xa6, 0x1f, 0x49, 0xaa, 0x44, 0x29, 0xc7, 0x54, 0x45, 0xe3, 0xcf, 0x98, 0x67, 0x34, + 0x55, 0x1a, 0xeb, 0x52, 0xa1, 0x42, 0x0a, 0x2d, 0xe0, 0x4e, 0x8d, 0x45, 0x98, 0x28, 0xe4, 0x69, + 0xe8, 0xa6, 0x8f, 0x3c, 0x6d, 0xf3, 0x60, 0x9e, 0x32, 0xe5, 0x65, 0xfe, 0x9b, 0x6a, 0x2a, 0x0a, + 0x2a, 0xb1, 0x66, 0x82, 0xd7, 0xfa, 0x9b, 0xaf, 0xee, 0x43, 0x76, 0x9e, 0xa9, 0x9e, 0x16, 0xd4, + 0x0a, 0x3c, 0xb0, 0x02, 0xe6, 0xe9, 0xaa, 0xbc, 0x8e, 0x6e, 0x25, 0x2e, 0x0a, 0x2a, 0xed, 0x00, + 0x9b, 0x5b, 0xce, 0xa0, 0x60, 0x11, 0xe6, 0x5c, 0x68, 0xe3, 0x6e, 0xbb, 0x0f, 0xbf, 0xb4, 0x40, + 0xf7, 0xc8, 0x78, 0x5c, 0x18, 0x0b, 0xf8, 0x08, 0xf4, 0xbc, 0x0b, 0xc7, 0x39, 0x0d, 0x83, 0xed, + 0x60, 0xaf, 0x3d, 0xe8, 0xba, 0x3f, 0x4f, 0x71, 0x4e, 0xe1, 0x19, 0xd8, 0x98, 0x60, 0xa5, 0x53, + 0x9b, 0x8e, 0x60, 0x4d, 0x53, 0xcd, 0x72, 0x1a, 0x2e, 0x6e, 0x07, 0x7b, 0x9d, 0xfd, 0x2d, 0xbb, + 0x29, 0xe4, 0x32, 0xa1, 0x0b, 0x2d, 0x19, 0xcf, 0x86, 0x78, 0x52, 0xd2, 0x01, 0xac, 0xa8, 0xb5, + 0xe7, 0x1b, 0xac, 0xe9, 0x25, 0xcb, 0x29, 0x9c, 0xce, 0xb8, 0x56, 0xb3, 0x85, 0x4b, 0xdb, 0xc1, + 0xde, 0xca, 0xfe, 0x25, 0x9a, 0xb7, 0x7d, 0xb3, 0x1d, 0x34, 0x9b, 0x7c, 0x60, 0xf9, 0x97, 0xd3, + 0x82, 0xbe, 0xe5, 0x65, 0x3e, 0xb7, 0x79, 0x37, 0x4b, 0xf5, 0x04, 0x9f, 0x83, 0xd6, 0x18, 0xe7, + 0x05, 0x66, 0x19, 0x0f, 0xff, 0x6b, 0x10, 0xdf, 0xa3, 0xe1, 0x33, 0xd0, 0xc2, 0x24, 0xcd, 0xa4, + 0x28, 0x8b, 0x70, 0xb9, 0x01, 0xf3, 0x7f, 0x4c, 0x8e, 0x2b, 0x30, 0xbc, 0x05, 0xab, 0x7e, 0xda, + 0xfa, 0x64, 0xc3, 0x96, 0x99, 0xf7, 0xf4, 0x1e, 0xf3, 0x9e, 0xb9, 0x17, 0xe9, 0x8f, 0x61, 0x7d, + 0x67, 0xb0, 0xe2, 0x6c, 0xec, 0xe1, 0xbe, 0x04, 0x1d, 0x97, 0x38, 0xc5, 0x24, 0x6c, 0x37, 0x08, + 0xdd, 0xb6, 0xa1, 0x13, 0x02, 0x4f, 0x00, 0xf4, 0xec, 0xb1, 0x64, 0x9a, 0x4a, 0x26, 0x78, 0x08, + 0x1a, 0x88, 0xac, 0x59, 0x91, 0x23, 0xc7, 0x82, 0x1f, 0x00, 0x74, 0x7b, 0x9c, 0xd1, 0xea, 0x34, + 0xd0, 0x5a, 0x77, 0xbc, 0x3b, 0xb1, 0xc7, 0x60, 0xe9, 0x9a, 0x52, 0x12, 0x76, 0x1b, 0xd0, 0x0d, + 0x12, 0xbe, 0x00, 0xed, 0xea, 0x37, 0x65, 0x9a, 0xe6, 0x61, 0xaf, 0xc9, 0xa9, 0x57, 0xf0, 0xf7, + 0x9a, 0xe6, 0xf0, 0x35, 0xe8, 0xf9, 0x2d, 0x18, 0xd7, 0x95, 0x06, 0xf4, 0x8e, 0x5d, 0xc0, 0xbb, + 0xca, 0x3c, 0x01, 0x3d, 0x3f, 0xbb, 0x51, 0x58, 0x6d, 0xa0, 0xd0, 0x75, 0x14, 0x23, 0x71, 0x06, + 0x36, 0x7c, 0x88, 0x2b, 0x46, 0xd2, 0x5c, 0x10, 0x76, 0xcd, 0xa8, 0x0c, 0xd7, 0x9a, 0x7c, 0x80, + 0x36, 0xcc, 0x21, 0x23, 0x1f, 0x2d, 0xef, 0xf0, 0x67, 0x00, 0x76, 0xc7, 0x22, 0x47, 0xff, 0xbc, + 0xed, 0x0e, 0xd7, 0x67, 0x5f, 0xb5, 0xf3, 0x4a, 0xff, 0x3c, 0xf8, 0x74, 0x62, 0x79, 0x99, 0x98, + 0x60, 0x9e, 0x21, 0x21, 0xb3, 0x28, 0xa3, 0xdc, 0xb8, 0xbb, 0x5b, 0xad, 0x60, 0xea, 0x2f, 0x77, + 0xef, 0x81, 0xaf, 0xbe, 0x2e, 0x2c, 0x1e, 0x27, 0xc9, 0xb7, 0x85, 0x9d, 0xe3, 0x5a, 0x32, 0x21, + 0x0a, 0xd5, 0x65, 0x55, 0x0d, 0xfb, 0xc8, 0x7d, 0xcb, 0xea, 0xbb, 0xc3, 0x8c, 0x12, 0xa2, 0x46, + 0x1e, 0x33, 0x1a, 0xf6, 0x47, 0x1e, 0xf3, 0x63, 0x61, 0xb7, 0x6e, 0xc4, 0x71, 0x42, 0x54, 0x1c, + 0x7b, 0x54, 0x1c, 0x0f, 0xfb, 0x71, 0xec, 0x71, 0x57, 0xcb, 0x26, 0xec, 0x93, 0x5f, 0x01, 0x00, + 0x00, 0xff, 0xff, 0xe0, 0xa2, 0x09, 0x27, 0x27, 0x06, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/resources/click_view.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/resources/click_view.pb.go new file mode 100644 index 0000000..3860715 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/resources/click_view.pb.go @@ -0,0 +1,144 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/resources/click_view.proto + +package resources // import "google.golang.org/genproto/googleapis/ads/googleads/v1/resources" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import wrappers "github.com/golang/protobuf/ptypes/wrappers" +import common "google.golang.org/genproto/googleapis/ads/googleads/v1/common" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// A click view with metrics aggregated at each click level, including both +// valid and invalid clicks. For non-Search campaigns, metrics.clicks +// represents the number of valid and invalid interactions. +type ClickView struct { + // The resource name of the click view. + // Click view resource names have the form: + // + // `customers/{customer_id}/clickViews/{date (yyyy-MM-dd)}~{gclid}` + ResourceName string `protobuf:"bytes,1,opt,name=resource_name,json=resourceName,proto3" json:"resource_name,omitempty"` + // The Google Click ID. + Gclid *wrappers.StringValue `protobuf:"bytes,2,opt,name=gclid,proto3" json:"gclid,omitempty"` + // The location criteria matching the area of interest associated with the + // impression. + AreaOfInterest *common.ClickLocation `protobuf:"bytes,3,opt,name=area_of_interest,json=areaOfInterest,proto3" json:"area_of_interest,omitempty"` + // The location criteria matching the location of presence associated with the + // impression. + LocationOfPresence *common.ClickLocation `protobuf:"bytes,4,opt,name=location_of_presence,json=locationOfPresence,proto3" json:"location_of_presence,omitempty"` + // Page number in search results where the ad was shown. + PageNumber *wrappers.Int64Value `protobuf:"bytes,5,opt,name=page_number,json=pageNumber,proto3" json:"page_number,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ClickView) Reset() { *m = ClickView{} } +func (m *ClickView) String() string { return proto.CompactTextString(m) } +func (*ClickView) ProtoMessage() {} +func (*ClickView) Descriptor() ([]byte, []int) { + return fileDescriptor_click_view_e4e236f4d52695b3, []int{0} +} +func (m *ClickView) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ClickView.Unmarshal(m, b) +} +func (m *ClickView) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ClickView.Marshal(b, m, deterministic) +} +func (dst *ClickView) XXX_Merge(src proto.Message) { + xxx_messageInfo_ClickView.Merge(dst, src) +} +func (m *ClickView) XXX_Size() int { + return xxx_messageInfo_ClickView.Size(m) +} +func (m *ClickView) XXX_DiscardUnknown() { + xxx_messageInfo_ClickView.DiscardUnknown(m) +} + +var xxx_messageInfo_ClickView proto.InternalMessageInfo + +func (m *ClickView) GetResourceName() string { + if m != nil { + return m.ResourceName + } + return "" +} + +func (m *ClickView) GetGclid() *wrappers.StringValue { + if m != nil { + return m.Gclid + } + return nil +} + +func (m *ClickView) GetAreaOfInterest() *common.ClickLocation { + if m != nil { + return m.AreaOfInterest + } + return nil +} + +func (m *ClickView) GetLocationOfPresence() *common.ClickLocation { + if m != nil { + return m.LocationOfPresence + } + return nil +} + +func (m *ClickView) GetPageNumber() *wrappers.Int64Value { + if m != nil { + return m.PageNumber + } + return nil +} + +func init() { + proto.RegisterType((*ClickView)(nil), "google.ads.googleads.v1.resources.ClickView") +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/resources/click_view.proto", fileDescriptor_click_view_e4e236f4d52695b3) +} + +var fileDescriptor_click_view_e4e236f4d52695b3 = []byte{ + // 429 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x9c, 0x92, 0xdf, 0x6a, 0xd4, 0x40, + 0x14, 0x87, 0x49, 0x6a, 0x85, 0x4e, 0xb5, 0x48, 0xf0, 0x22, 0xd4, 0x22, 0xad, 0x52, 0xe8, 0x8d, + 0x13, 0xb2, 0x15, 0x2f, 0xa2, 0x37, 0xa9, 0x17, 0xa5, 0x22, 0xed, 0xb2, 0x42, 0x04, 0x59, 0x08, + 0xb3, 0x93, 0xb3, 0xc3, 0x60, 0x32, 0x13, 0x66, 0x92, 0xdd, 0xf7, 0xf1, 0xd2, 0x47, 0xf1, 0x19, + 0x7c, 0x02, 0x5f, 0xc1, 0x1b, 0xc9, 0xfc, 0xbb, 0x91, 0x55, 0xe8, 0xdd, 0xd9, 0x3d, 0xdf, 0xef, + 0xcb, 0x39, 0x33, 0x83, 0x66, 0x4c, 0x4a, 0xd6, 0x42, 0x46, 0x1a, 0x9d, 0xd9, 0x72, 0xaa, 0x36, + 0x79, 0xa6, 0x40, 0xcb, 0x51, 0x51, 0xd0, 0x19, 0x6d, 0x39, 0xfd, 0x5a, 0x6f, 0x38, 0x6c, 0x71, + 0xaf, 0xe4, 0x20, 0x93, 0x33, 0x0b, 0x62, 0xd2, 0x68, 0x1c, 0x32, 0x78, 0x93, 0xe3, 0x90, 0x39, + 0xbe, 0xdc, 0xa5, 0xa5, 0xb2, 0xeb, 0xa4, 0x70, 0xce, 0x56, 0x52, 0x32, 0x70, 0x29, 0xac, 0xf7, + 0xf8, 0xb9, 0x0b, 0x99, 0x5f, 0xab, 0x71, 0x9d, 0x6d, 0x15, 0xe9, 0x7b, 0x50, 0xda, 0xf5, 0x4f, + 0xbc, 0xb4, 0xe7, 0x19, 0x11, 0x42, 0x0e, 0x26, 0xec, 0xba, 0x2f, 0x7e, 0xc6, 0xe8, 0xe0, 0xfd, + 0xa4, 0xad, 0x38, 0x6c, 0x93, 0x97, 0xe8, 0xb1, 0x9f, 0xa6, 0x16, 0xa4, 0x83, 0x34, 0x3a, 0x8d, + 0x2e, 0x0e, 0x16, 0x8f, 0xfc, 0x9f, 0xb7, 0xa4, 0x83, 0x64, 0x86, 0xf6, 0x19, 0x6d, 0x79, 0x93, + 0xc6, 0xa7, 0xd1, 0xc5, 0xe1, 0xec, 0xc4, 0x6d, 0x83, 0xfd, 0x00, 0xf8, 0xd3, 0xa0, 0xb8, 0x60, + 0x15, 0x69, 0x47, 0x58, 0x58, 0x34, 0xf9, 0x8c, 0x9e, 0x10, 0x05, 0xa4, 0x96, 0xeb, 0x9a, 0x8b, + 0x01, 0x14, 0xe8, 0x21, 0xdd, 0x33, 0xf1, 0x57, 0x78, 0xd7, 0xb9, 0xd8, 0xa5, 0xb1, 0x99, 0xee, + 0xa3, 0xdb, 0x79, 0x71, 0x34, 0x69, 0xee, 0xd6, 0x37, 0x4e, 0x92, 0xd4, 0xe8, 0xa9, 0x3f, 0x8f, + 0x49, 0xde, 0x2b, 0xd0, 0x20, 0x28, 0xa4, 0x0f, 0xee, 0x23, 0x4f, 0xbc, 0xea, 0x6e, 0x3d, 0x77, + 0xa2, 0xe4, 0x1d, 0x3a, 0xec, 0x09, 0x83, 0x5a, 0x8c, 0xdd, 0x0a, 0x54, 0xba, 0x6f, 0xbc, 0xcf, + 0xfe, 0xda, 0xf9, 0x46, 0x0c, 0x6f, 0x5e, 0xdb, 0x95, 0xd1, 0xc4, 0xdf, 0x1a, 0xfc, 0xea, 0x77, + 0x84, 0xce, 0xa9, 0xec, 0xf0, 0x7f, 0xef, 0xfe, 0xea, 0x28, 0xdc, 0xc2, 0x7c, 0x72, 0xce, 0xa3, + 0x2f, 0x1f, 0x5c, 0x88, 0xc9, 0x96, 0x08, 0x86, 0xa5, 0x62, 0x19, 0x03, 0x61, 0xbe, 0xe8, 0x5f, + 0x47, 0xcf, 0xf5, 0x3f, 0xde, 0xe0, 0xdb, 0x50, 0x7d, 0x8b, 0xf7, 0xae, 0xcb, 0xf2, 0x7b, 0x7c, + 0x76, 0x6d, 0x95, 0x65, 0xa3, 0xb1, 0x2d, 0xa7, 0xaa, 0xca, 0xf1, 0xc2, 0x93, 0x3f, 0x3c, 0xb3, + 0x2c, 0x1b, 0xbd, 0x0c, 0xcc, 0xb2, 0xca, 0x97, 0x81, 0xf9, 0x15, 0x9f, 0xdb, 0x46, 0x51, 0x94, + 0x8d, 0x2e, 0x8a, 0x40, 0x15, 0x45, 0x95, 0x17, 0x45, 0xe0, 0x56, 0x0f, 0xcd, 0xb0, 0x97, 0x7f, + 0x02, 0x00, 0x00, 0xff, 0xff, 0x7d, 0xc6, 0x81, 0x37, 0x2f, 0x03, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/resources/conversion_action.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/resources/conversion_action.pb.go new file mode 100644 index 0000000..469bd78 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/resources/conversion_action.pb.go @@ -0,0 +1,404 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/resources/conversion_action.proto + +package resources // import "google.golang.org/genproto/googleapis/ads/googleads/v1/resources" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import wrappers "github.com/golang/protobuf/ptypes/wrappers" +import common "google.golang.org/genproto/googleapis/ads/googleads/v1/common" +import enums "google.golang.org/genproto/googleapis/ads/googleads/v1/enums" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// A conversion action. +type ConversionAction struct { + // The resource name of the conversion action. + // Conversion action resource names have the form: + // + // `customers/{customer_id}/conversionActions/{conversion_action_id}` + ResourceName string `protobuf:"bytes,1,opt,name=resource_name,json=resourceName,proto3" json:"resource_name,omitempty"` + // The ID of the conversion action. + Id *wrappers.Int64Value `protobuf:"bytes,2,opt,name=id,proto3" json:"id,omitempty"` + // The name of the conversion action. + // + // This field is required and should not be empty when creating new + // conversion actions. + Name *wrappers.StringValue `protobuf:"bytes,3,opt,name=name,proto3" json:"name,omitempty"` + // The status of this conversion action for conversion event accrual. + Status enums.ConversionActionStatusEnum_ConversionActionStatus `protobuf:"varint,4,opt,name=status,proto3,enum=google.ads.googleads.v1.enums.ConversionActionStatusEnum_ConversionActionStatus" json:"status,omitempty"` + // The type of this conversion action. + Type enums.ConversionActionTypeEnum_ConversionActionType `protobuf:"varint,5,opt,name=type,proto3,enum=google.ads.googleads.v1.enums.ConversionActionTypeEnum_ConversionActionType" json:"type,omitempty"` + // The category of conversions reported for this conversion action. + Category enums.ConversionActionCategoryEnum_ConversionActionCategory `protobuf:"varint,6,opt,name=category,proto3,enum=google.ads.googleads.v1.enums.ConversionActionCategoryEnum_ConversionActionCategory" json:"category,omitempty"` + // The resource name of the conversion action owner customer, or null if this + // is a system-defined conversion action. + OwnerCustomer *wrappers.StringValue `protobuf:"bytes,7,opt,name=owner_customer,json=ownerCustomer,proto3" json:"owner_customer,omitempty"` + // Whether this conversion action should be included in the "conversions" + // metric. + IncludeInConversionsMetric *wrappers.BoolValue `protobuf:"bytes,8,opt,name=include_in_conversions_metric,json=includeInConversionsMetric,proto3" json:"include_in_conversions_metric,omitempty"` + // The maximum number of days that may elapse between an interaction + // (e.g., a click) and a conversion event. + ClickThroughLookbackWindowDays *wrappers.Int64Value `protobuf:"bytes,9,opt,name=click_through_lookback_window_days,json=clickThroughLookbackWindowDays,proto3" json:"click_through_lookback_window_days,omitempty"` + // The maximum number of days which may elapse between an impression and a + // conversion without an interaction. + ViewThroughLookbackWindowDays *wrappers.Int64Value `protobuf:"bytes,10,opt,name=view_through_lookback_window_days,json=viewThroughLookbackWindowDays,proto3" json:"view_through_lookback_window_days,omitempty"` + // Settings related to the value for conversion events associated with this + // conversion action. + ValueSettings *ConversionAction_ValueSettings `protobuf:"bytes,11,opt,name=value_settings,json=valueSettings,proto3" json:"value_settings,omitempty"` + // How to count conversion events for the conversion action. + CountingType enums.ConversionActionCountingTypeEnum_ConversionActionCountingType `protobuf:"varint,12,opt,name=counting_type,json=countingType,proto3,enum=google.ads.googleads.v1.enums.ConversionActionCountingTypeEnum_ConversionActionCountingType" json:"counting_type,omitempty"` + // Settings related to this conversion action's attribution model. + AttributionModelSettings *ConversionAction_AttributionModelSettings `protobuf:"bytes,13,opt,name=attribution_model_settings,json=attributionModelSettings,proto3" json:"attribution_model_settings,omitempty"` + // The snippets used for tracking conversions. + TagSnippets []*common.TagSnippet `protobuf:"bytes,14,rep,name=tag_snippets,json=tagSnippets,proto3" json:"tag_snippets,omitempty"` + // The phone call duration in seconds after which a conversion should be + // reported for this conversion action. + // + // The value must be between 0 and 10000, inclusive. + PhoneCallDurationSeconds *wrappers.Int64Value `protobuf:"bytes,15,opt,name=phone_call_duration_seconds,json=phoneCallDurationSeconds,proto3" json:"phone_call_duration_seconds,omitempty"` + // App ID for an app conversion action. + AppId *wrappers.StringValue `protobuf:"bytes,16,opt,name=app_id,json=appId,proto3" json:"app_id,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ConversionAction) Reset() { *m = ConversionAction{} } +func (m *ConversionAction) String() string { return proto.CompactTextString(m) } +func (*ConversionAction) ProtoMessage() {} +func (*ConversionAction) Descriptor() ([]byte, []int) { + return fileDescriptor_conversion_action_c6dc8f0fecb5b482, []int{0} +} +func (m *ConversionAction) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ConversionAction.Unmarshal(m, b) +} +func (m *ConversionAction) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ConversionAction.Marshal(b, m, deterministic) +} +func (dst *ConversionAction) XXX_Merge(src proto.Message) { + xxx_messageInfo_ConversionAction.Merge(dst, src) +} +func (m *ConversionAction) XXX_Size() int { + return xxx_messageInfo_ConversionAction.Size(m) +} +func (m *ConversionAction) XXX_DiscardUnknown() { + xxx_messageInfo_ConversionAction.DiscardUnknown(m) +} + +var xxx_messageInfo_ConversionAction proto.InternalMessageInfo + +func (m *ConversionAction) GetResourceName() string { + if m != nil { + return m.ResourceName + } + return "" +} + +func (m *ConversionAction) GetId() *wrappers.Int64Value { + if m != nil { + return m.Id + } + return nil +} + +func (m *ConversionAction) GetName() *wrappers.StringValue { + if m != nil { + return m.Name + } + return nil +} + +func (m *ConversionAction) GetStatus() enums.ConversionActionStatusEnum_ConversionActionStatus { + if m != nil { + return m.Status + } + return enums.ConversionActionStatusEnum_UNSPECIFIED +} + +func (m *ConversionAction) GetType() enums.ConversionActionTypeEnum_ConversionActionType { + if m != nil { + return m.Type + } + return enums.ConversionActionTypeEnum_UNSPECIFIED +} + +func (m *ConversionAction) GetCategory() enums.ConversionActionCategoryEnum_ConversionActionCategory { + if m != nil { + return m.Category + } + return enums.ConversionActionCategoryEnum_UNSPECIFIED +} + +func (m *ConversionAction) GetOwnerCustomer() *wrappers.StringValue { + if m != nil { + return m.OwnerCustomer + } + return nil +} + +func (m *ConversionAction) GetIncludeInConversionsMetric() *wrappers.BoolValue { + if m != nil { + return m.IncludeInConversionsMetric + } + return nil +} + +func (m *ConversionAction) GetClickThroughLookbackWindowDays() *wrappers.Int64Value { + if m != nil { + return m.ClickThroughLookbackWindowDays + } + return nil +} + +func (m *ConversionAction) GetViewThroughLookbackWindowDays() *wrappers.Int64Value { + if m != nil { + return m.ViewThroughLookbackWindowDays + } + return nil +} + +func (m *ConversionAction) GetValueSettings() *ConversionAction_ValueSettings { + if m != nil { + return m.ValueSettings + } + return nil +} + +func (m *ConversionAction) GetCountingType() enums.ConversionActionCountingTypeEnum_ConversionActionCountingType { + if m != nil { + return m.CountingType + } + return enums.ConversionActionCountingTypeEnum_UNSPECIFIED +} + +func (m *ConversionAction) GetAttributionModelSettings() *ConversionAction_AttributionModelSettings { + if m != nil { + return m.AttributionModelSettings + } + return nil +} + +func (m *ConversionAction) GetTagSnippets() []*common.TagSnippet { + if m != nil { + return m.TagSnippets + } + return nil +} + +func (m *ConversionAction) GetPhoneCallDurationSeconds() *wrappers.Int64Value { + if m != nil { + return m.PhoneCallDurationSeconds + } + return nil +} + +func (m *ConversionAction) GetAppId() *wrappers.StringValue { + if m != nil { + return m.AppId + } + return nil +} + +// Settings related to this conversion action's attribution model. +type ConversionAction_AttributionModelSettings struct { + // The attribution model type of this conversion action. + AttributionModel enums.AttributionModelEnum_AttributionModel `protobuf:"varint,1,opt,name=attribution_model,json=attributionModel,proto3,enum=google.ads.googleads.v1.enums.AttributionModelEnum_AttributionModel" json:"attribution_model,omitempty"` + // The status of the data-driven attribution model for the conversion + // action. + DataDrivenModelStatus enums.DataDrivenModelStatusEnum_DataDrivenModelStatus `protobuf:"varint,2,opt,name=data_driven_model_status,json=dataDrivenModelStatus,proto3,enum=google.ads.googleads.v1.enums.DataDrivenModelStatusEnum_DataDrivenModelStatus" json:"data_driven_model_status,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ConversionAction_AttributionModelSettings) Reset() { + *m = ConversionAction_AttributionModelSettings{} +} +func (m *ConversionAction_AttributionModelSettings) String() string { return proto.CompactTextString(m) } +func (*ConversionAction_AttributionModelSettings) ProtoMessage() {} +func (*ConversionAction_AttributionModelSettings) Descriptor() ([]byte, []int) { + return fileDescriptor_conversion_action_c6dc8f0fecb5b482, []int{0, 0} +} +func (m *ConversionAction_AttributionModelSettings) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ConversionAction_AttributionModelSettings.Unmarshal(m, b) +} +func (m *ConversionAction_AttributionModelSettings) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ConversionAction_AttributionModelSettings.Marshal(b, m, deterministic) +} +func (dst *ConversionAction_AttributionModelSettings) XXX_Merge(src proto.Message) { + xxx_messageInfo_ConversionAction_AttributionModelSettings.Merge(dst, src) +} +func (m *ConversionAction_AttributionModelSettings) XXX_Size() int { + return xxx_messageInfo_ConversionAction_AttributionModelSettings.Size(m) +} +func (m *ConversionAction_AttributionModelSettings) XXX_DiscardUnknown() { + xxx_messageInfo_ConversionAction_AttributionModelSettings.DiscardUnknown(m) +} + +var xxx_messageInfo_ConversionAction_AttributionModelSettings proto.InternalMessageInfo + +func (m *ConversionAction_AttributionModelSettings) GetAttributionModel() enums.AttributionModelEnum_AttributionModel { + if m != nil { + return m.AttributionModel + } + return enums.AttributionModelEnum_UNSPECIFIED +} + +func (m *ConversionAction_AttributionModelSettings) GetDataDrivenModelStatus() enums.DataDrivenModelStatusEnum_DataDrivenModelStatus { + if m != nil { + return m.DataDrivenModelStatus + } + return enums.DataDrivenModelStatusEnum_UNSPECIFIED +} + +// Settings related to the value for conversion events associated with this +// conversion action. +type ConversionAction_ValueSettings struct { + // The value to use when conversion events for this conversion action are + // sent with an invalid, disallowed or missing value, or when + // this conversion action is configured to always use the default value. + DefaultValue *wrappers.DoubleValue `protobuf:"bytes,1,opt,name=default_value,json=defaultValue,proto3" json:"default_value,omitempty"` + // The currency code to use when conversion events for this conversion + // action are sent with an invalid or missing currency code, or when this + // conversion action is configured to always use the default value. + DefaultCurrencyCode *wrappers.StringValue `protobuf:"bytes,2,opt,name=default_currency_code,json=defaultCurrencyCode,proto3" json:"default_currency_code,omitempty"` + // Controls whether the default value and default currency code are used in + // place of the value and currency code specified in conversion events for + // this conversion action. + AlwaysUseDefaultValue *wrappers.BoolValue `protobuf:"bytes,3,opt,name=always_use_default_value,json=alwaysUseDefaultValue,proto3" json:"always_use_default_value,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ConversionAction_ValueSettings) Reset() { *m = ConversionAction_ValueSettings{} } +func (m *ConversionAction_ValueSettings) String() string { return proto.CompactTextString(m) } +func (*ConversionAction_ValueSettings) ProtoMessage() {} +func (*ConversionAction_ValueSettings) Descriptor() ([]byte, []int) { + return fileDescriptor_conversion_action_c6dc8f0fecb5b482, []int{0, 1} +} +func (m *ConversionAction_ValueSettings) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ConversionAction_ValueSettings.Unmarshal(m, b) +} +func (m *ConversionAction_ValueSettings) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ConversionAction_ValueSettings.Marshal(b, m, deterministic) +} +func (dst *ConversionAction_ValueSettings) XXX_Merge(src proto.Message) { + xxx_messageInfo_ConversionAction_ValueSettings.Merge(dst, src) +} +func (m *ConversionAction_ValueSettings) XXX_Size() int { + return xxx_messageInfo_ConversionAction_ValueSettings.Size(m) +} +func (m *ConversionAction_ValueSettings) XXX_DiscardUnknown() { + xxx_messageInfo_ConversionAction_ValueSettings.DiscardUnknown(m) +} + +var xxx_messageInfo_ConversionAction_ValueSettings proto.InternalMessageInfo + +func (m *ConversionAction_ValueSettings) GetDefaultValue() *wrappers.DoubleValue { + if m != nil { + return m.DefaultValue + } + return nil +} + +func (m *ConversionAction_ValueSettings) GetDefaultCurrencyCode() *wrappers.StringValue { + if m != nil { + return m.DefaultCurrencyCode + } + return nil +} + +func (m *ConversionAction_ValueSettings) GetAlwaysUseDefaultValue() *wrappers.BoolValue { + if m != nil { + return m.AlwaysUseDefaultValue + } + return nil +} + +func init() { + proto.RegisterType((*ConversionAction)(nil), "google.ads.googleads.v1.resources.ConversionAction") + proto.RegisterType((*ConversionAction_AttributionModelSettings)(nil), "google.ads.googleads.v1.resources.ConversionAction.AttributionModelSettings") + proto.RegisterType((*ConversionAction_ValueSettings)(nil), "google.ads.googleads.v1.resources.ConversionAction.ValueSettings") +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/resources/conversion_action.proto", fileDescriptor_conversion_action_c6dc8f0fecb5b482) +} + +var fileDescriptor_conversion_action_c6dc8f0fecb5b482 = []byte{ + // 971 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x9c, 0x56, 0xdd, 0x6e, 0xe3, 0x44, + 0x14, 0x56, 0xd2, 0x6e, 0xd9, 0x9d, 0x26, 0xd9, 0x32, 0xa8, 0x92, 0x95, 0xfd, 0x51, 0xbb, 0x68, + 0xa5, 0x0a, 0x24, 0x67, 0x93, 0x05, 0x24, 0x02, 0x42, 0x72, 0x13, 0xb4, 0x2a, 0x6a, 0x57, 0x95, + 0x53, 0x8a, 0xb4, 0x0a, 0x1a, 0x26, 0x9e, 0xa9, 0x63, 0xd5, 0x9e, 0x31, 0x33, 0xe3, 0x44, 0xb9, + 0x84, 0x1b, 0x24, 0x5e, 0x80, 0x7b, 0x2e, 0x79, 0x03, 0x5e, 0x81, 0x47, 0xe1, 0x0d, 0xb8, 0x43, + 0x1e, 0x8f, 0xdd, 0x34, 0x89, 0x9b, 0x66, 0xef, 0x66, 0x7c, 0xce, 0xf7, 0x7d, 0xe7, 0x9c, 0x39, + 0x73, 0xc6, 0xe0, 0x4b, 0x9f, 0x73, 0x3f, 0xa4, 0x2d, 0x4c, 0x64, 0x2b, 0x5b, 0xa6, 0xab, 0x49, + 0xbb, 0x25, 0xa8, 0xe4, 0x89, 0xf0, 0xa8, 0x6c, 0x79, 0x9c, 0x4d, 0xa8, 0x90, 0x01, 0x67, 0x08, + 0x7b, 0x2a, 0xe0, 0xcc, 0x8e, 0x05, 0x57, 0x1c, 0x1e, 0x66, 0xfe, 0x36, 0x26, 0xd2, 0x2e, 0xa0, + 0xf6, 0xa4, 0x6d, 0x17, 0xd0, 0xe6, 0xab, 0x32, 0x76, 0x8f, 0x47, 0x11, 0x67, 0x2d, 0x85, 0x7d, + 0x24, 0x59, 0x10, 0xc7, 0x54, 0x65, 0xa4, 0xcd, 0xcf, 0xcb, 0x10, 0x94, 0x25, 0x91, 0x6c, 0x61, + 0xa5, 0x44, 0x30, 0x4a, 0xd2, 0x28, 0x50, 0xc4, 0x09, 0x0d, 0x0d, 0xec, 0x9b, 0xbb, 0x61, 0x4b, + 0x29, 0x20, 0x0f, 0x2b, 0xea, 0x73, 0x31, 0x33, 0xf8, 0xde, 0xc6, 0x78, 0x9e, 0x30, 0x15, 0x30, + 0x1f, 0xa9, 0x59, 0x4c, 0x0d, 0xc9, 0xd7, 0x9b, 0x92, 0x48, 0x85, 0x55, 0x22, 0x0d, 0xba, 0xbb, + 0x29, 0xfa, 0xfe, 0xca, 0x04, 0x2b, 0x8c, 0x88, 0x08, 0x26, 0xd4, 0x54, 0xed, 0xb6, 0xf2, 0x73, + 0x83, 0xd6, 0xbb, 0x51, 0x72, 0xd5, 0x9a, 0x0a, 0x1c, 0xc7, 0x54, 0xe4, 0xf6, 0xa7, 0x39, 0x7b, + 0x1c, 0xb4, 0x30, 0x63, 0x5c, 0xe1, 0x54, 0xdf, 0x58, 0x5f, 0xfc, 0xfd, 0x18, 0xec, 0xf5, 0x8a, + 0xe0, 0x1c, 0x1d, 0x1b, 0xfc, 0x18, 0xd4, 0xf3, 0x2e, 0x40, 0x0c, 0x47, 0xd4, 0xaa, 0x1c, 0x54, + 0x8e, 0x1e, 0xb9, 0xb5, 0xfc, 0xe3, 0x5b, 0x1c, 0x51, 0xf8, 0x29, 0xa8, 0x06, 0xc4, 0xaa, 0x1e, + 0x54, 0x8e, 0x76, 0x3b, 0x4f, 0x4c, 0x0b, 0xd9, 0x79, 0x10, 0xf6, 0x09, 0x53, 0x5f, 0x7c, 0x76, + 0x89, 0xc3, 0x84, 0xba, 0xd5, 0x80, 0xc0, 0x57, 0x60, 0x5b, 0x13, 0x6d, 0x69, 0xf7, 0xa7, 0x4b, + 0xee, 0x03, 0x25, 0x02, 0xe6, 0x67, 0xfe, 0xda, 0x13, 0x8e, 0xc1, 0x4e, 0x96, 0xa6, 0xb5, 0x7d, + 0x50, 0x39, 0x6a, 0x74, 0xce, 0xed, 0xb2, 0x86, 0xd5, 0x55, 0xb2, 0x17, 0x93, 0x18, 0x68, 0xf0, + 0xb7, 0x2c, 0x89, 0x4a, 0x4c, 0xae, 0xe1, 0x87, 0x3f, 0x81, 0xed, 0xf4, 0x30, 0xac, 0x07, 0x5a, + 0xe7, 0x74, 0x43, 0x9d, 0x8b, 0x59, 0x4c, 0x57, 0xaa, 0xa4, 0x06, 0x57, 0x33, 0xc3, 0x18, 0x3c, + 0xcc, 0x3b, 0xd6, 0xda, 0xd1, 0x2a, 0x17, 0x1b, 0xaa, 0xf4, 0x0c, 0x7c, 0xa5, 0x52, 0x6e, 0x74, + 0x0b, 0x15, 0xd8, 0x03, 0x0d, 0x3e, 0x65, 0x54, 0x20, 0x2f, 0x91, 0x8a, 0x47, 0x54, 0x58, 0x1f, + 0xdc, 0xa3, 0xf2, 0x75, 0x8d, 0xe9, 0x19, 0x08, 0xfc, 0x11, 0x3c, 0x0b, 0x98, 0x17, 0x26, 0x84, + 0xa2, 0x20, 0xbd, 0x33, 0xb9, 0xaa, 0x44, 0x11, 0x55, 0x22, 0xf0, 0xac, 0x87, 0x9a, 0xb3, 0xb9, + 0xc4, 0x79, 0xcc, 0x79, 0x98, 0x31, 0x36, 0x0d, 0xc1, 0x09, 0xbb, 0x09, 0x5a, 0x9e, 0x69, 0x34, + 0xf4, 0xc1, 0x0b, 0x2f, 0x0c, 0xbc, 0x6b, 0xa4, 0xc6, 0x82, 0x27, 0xfe, 0x18, 0x85, 0x9c, 0x5f, + 0x8f, 0xb0, 0x77, 0x8d, 0xa6, 0x01, 0x23, 0x7c, 0x8a, 0x08, 0x9e, 0x49, 0xeb, 0xd1, 0xfa, 0x06, + 0x7b, 0xae, 0x69, 0x2e, 0x32, 0x96, 0x53, 0x43, 0xf2, 0x83, 0xe6, 0xe8, 0xe3, 0x99, 0x84, 0x14, + 0x1c, 0x4e, 0x02, 0x3a, 0xbd, 0x5b, 0x07, 0xac, 0xd7, 0x79, 0x96, 0xb2, 0x94, 0xcb, 0x8c, 0x41, + 0x63, 0x92, 0xfa, 0x21, 0x49, 0x55, 0x3a, 0x5c, 0xa4, 0xb5, 0xab, 0x39, 0x1d, 0x7b, 0xed, 0xa8, + 0x5d, 0x3a, 0x52, 0x5b, 0x2b, 0x0e, 0x0c, 0x91, 0x5b, 0x9f, 0xcc, 0x6f, 0xe1, 0x2f, 0x15, 0x50, + 0xbf, 0x35, 0xc2, 0xac, 0x9a, 0xee, 0xaa, 0xe1, 0xa6, 0x5d, 0x65, 0x38, 0x4a, 0x7b, 0x78, 0xde, + 0xc1, 0xad, 0x79, 0x73, 0x3b, 0xf8, 0x7b, 0x05, 0x34, 0x97, 0xe6, 0xf9, 0x4d, 0xea, 0x75, 0x9d, + 0xfa, 0xe9, 0xfb, 0xa4, 0xee, 0xdc, 0xb0, 0x9e, 0xa5, 0xa4, 0x45, 0x15, 0x2c, 0x5c, 0x62, 0x81, + 0x67, 0xa0, 0x36, 0xf7, 0x18, 0x49, 0xab, 0x71, 0xb0, 0x75, 0xb4, 0xdb, 0xf9, 0xa4, 0x54, 0x3d, + 0x7b, 0xc0, 0xec, 0x0b, 0xec, 0x0f, 0x32, 0x88, 0xbb, 0xab, 0x8a, 0xb5, 0x84, 0xef, 0xc0, 0x93, + 0x78, 0xcc, 0x19, 0x45, 0x1e, 0x0e, 0x43, 0x44, 0x12, 0x81, 0xb3, 0x81, 0x4f, 0x3d, 0xce, 0x88, + 0xb4, 0x1e, 0xaf, 0x6f, 0x15, 0x4b, 0xe3, 0x7b, 0x38, 0x0c, 0xfb, 0x06, 0x3d, 0xc8, 0xc0, 0xf0, + 0x35, 0xd8, 0xc1, 0x71, 0x8c, 0x02, 0x62, 0xed, 0xdd, 0xe3, 0x46, 0x3e, 0xc0, 0x71, 0x7c, 0x42, + 0x9a, 0x7f, 0x54, 0x81, 0x55, 0x56, 0x16, 0xf8, 0x33, 0xf8, 0x70, 0xe9, 0x20, 0xf4, 0xc4, 0x6e, + 0x74, 0xfa, 0x6b, 0x1a, 0x62, 0x91, 0x53, 0x37, 0xc1, 0xe2, 0x47, 0x77, 0x6f, 0xb1, 0xee, 0xf0, + 0xb7, 0x0a, 0xb0, 0xca, 0x9e, 0x25, 0xfd, 0x24, 0x34, 0x3a, 0x6f, 0xd7, 0x48, 0xf7, 0xb1, 0xc2, + 0x7d, 0x8d, 0xce, 0xb2, 0xb9, 0x19, 0xd7, 0x2b, 0x2d, 0xee, 0x3e, 0x59, 0xf5, 0xb9, 0xf9, 0x5f, + 0x05, 0xd4, 0x6f, 0xdd, 0x15, 0xe8, 0x80, 0x3a, 0xa1, 0x57, 0x38, 0x09, 0x15, 0xd2, 0xb7, 0x46, + 0x97, 0x62, 0x55, 0x9d, 0xfb, 0x3c, 0x19, 0x85, 0x34, 0xab, 0x73, 0xcd, 0x40, 0xf4, 0x0e, 0x9e, + 0x83, 0xfd, 0x9c, 0xc2, 0x4b, 0x84, 0xa0, 0xcc, 0x9b, 0x21, 0x8f, 0x13, 0x6a, 0x5e, 0xbb, 0xbb, + 0x8f, 0xec, 0x23, 0x03, 0xed, 0x19, 0x64, 0x8f, 0x13, 0x0a, 0x07, 0xc0, 0xc2, 0xe1, 0x14, 0xcf, + 0x24, 0x4a, 0x24, 0x45, 0xb7, 0xe3, 0xdb, 0x5a, 0x3b, 0x45, 0xf7, 0x33, 0xec, 0xf7, 0x92, 0xf6, + 0xe7, 0xc2, 0x3c, 0xfe, 0xb5, 0x0a, 0x5e, 0x7a, 0x3c, 0x5a, 0x7f, 0xc7, 0x8e, 0xf7, 0x17, 0x2f, + 0xd9, 0x79, 0x2a, 0x72, 0x5e, 0x79, 0xf7, 0x9d, 0xc1, 0xfa, 0x3c, 0xc4, 0xcc, 0xb7, 0xb9, 0xf0, + 0x5b, 0x3e, 0x65, 0x3a, 0x84, 0xfc, 0x57, 0x24, 0x0e, 0xe4, 0x1d, 0xff, 0x97, 0x5f, 0x15, 0xab, + 0x3f, 0xab, 0x5b, 0x6f, 0x1c, 0xe7, 0xaf, 0xea, 0xe1, 0x9b, 0x8c, 0xd2, 0x21, 0xd2, 0xce, 0x96, + 0xe9, 0xea, 0xb2, 0x6d, 0xbb, 0xb9, 0xe7, 0x3f, 0xb9, 0xcf, 0xd0, 0x21, 0x72, 0x58, 0xf8, 0x0c, + 0x2f, 0xdb, 0xc3, 0xc2, 0xe7, 0xdf, 0xea, 0xcb, 0xcc, 0xd0, 0xed, 0x3a, 0x44, 0x76, 0xbb, 0x85, + 0x57, 0xb7, 0x7b, 0xd9, 0xee, 0x76, 0x0b, 0xbf, 0xd1, 0x8e, 0x0e, 0xf6, 0xf5, 0xff, 0x01, 0x00, + 0x00, 0xff, 0xff, 0xc5, 0x8f, 0x6f, 0xb6, 0x0b, 0x0b, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/resources/custom_interest.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/resources/custom_interest.pb.go new file mode 100644 index 0000000..05abc7d --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/resources/custom_interest.pb.go @@ -0,0 +1,222 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/resources/custom_interest.proto + +package resources // import "google.golang.org/genproto/googleapis/ads/googleads/v1/resources" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import wrappers "github.com/golang/protobuf/ptypes/wrappers" +import enums "google.golang.org/genproto/googleapis/ads/googleads/v1/enums" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// A custom interest. This is a list of users by interest. +type CustomInterest struct { + // The resource name of the custom interest. + // Custom interest resource names have the form: + // + // `customers/{customer_id}/customInterests/{custom_interest_id}` + ResourceName string `protobuf:"bytes,1,opt,name=resource_name,json=resourceName,proto3" json:"resource_name,omitempty"` + // Id of the custom interest. + Id *wrappers.Int64Value `protobuf:"bytes,2,opt,name=id,proto3" json:"id,omitempty"` + // Status of this custom interest. Indicates whether the custom interest is + // enabled or removed. + Status enums.CustomInterestStatusEnum_CustomInterestStatus `protobuf:"varint,3,opt,name=status,proto3,enum=google.ads.googleads.v1.enums.CustomInterestStatusEnum_CustomInterestStatus" json:"status,omitempty"` + // Name of the custom interest. It should be unique across the same custom + // affinity audience. + // This field is required for create operations. + Name *wrappers.StringValue `protobuf:"bytes,4,opt,name=name,proto3" json:"name,omitempty"` + // Type of the custom interest, CUSTOM_AFFINITY or CUSTOM_INTENT. + // By default the type is set to CUSTOM_AFFINITY. + Type enums.CustomInterestTypeEnum_CustomInterestType `protobuf:"varint,5,opt,name=type,proto3,enum=google.ads.googleads.v1.enums.CustomInterestTypeEnum_CustomInterestType" json:"type,omitempty"` + // Description of this custom interest audience. + Description *wrappers.StringValue `protobuf:"bytes,6,opt,name=description,proto3" json:"description,omitempty"` + // List of custom interest members that this custom interest is composed of. + // Members can be added during CustomInterest creation. If members are + // presented in UPDATE operation, existing members will be overridden. + Members []*CustomInterestMember `protobuf:"bytes,7,rep,name=members,proto3" json:"members,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *CustomInterest) Reset() { *m = CustomInterest{} } +func (m *CustomInterest) String() string { return proto.CompactTextString(m) } +func (*CustomInterest) ProtoMessage() {} +func (*CustomInterest) Descriptor() ([]byte, []int) { + return fileDescriptor_custom_interest_3c6adccc8a716e4f, []int{0} +} +func (m *CustomInterest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_CustomInterest.Unmarshal(m, b) +} +func (m *CustomInterest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_CustomInterest.Marshal(b, m, deterministic) +} +func (dst *CustomInterest) XXX_Merge(src proto.Message) { + xxx_messageInfo_CustomInterest.Merge(dst, src) +} +func (m *CustomInterest) XXX_Size() int { + return xxx_messageInfo_CustomInterest.Size(m) +} +func (m *CustomInterest) XXX_DiscardUnknown() { + xxx_messageInfo_CustomInterest.DiscardUnknown(m) +} + +var xxx_messageInfo_CustomInterest proto.InternalMessageInfo + +func (m *CustomInterest) GetResourceName() string { + if m != nil { + return m.ResourceName + } + return "" +} + +func (m *CustomInterest) GetId() *wrappers.Int64Value { + if m != nil { + return m.Id + } + return nil +} + +func (m *CustomInterest) GetStatus() enums.CustomInterestStatusEnum_CustomInterestStatus { + if m != nil { + return m.Status + } + return enums.CustomInterestStatusEnum_UNSPECIFIED +} + +func (m *CustomInterest) GetName() *wrappers.StringValue { + if m != nil { + return m.Name + } + return nil +} + +func (m *CustomInterest) GetType() enums.CustomInterestTypeEnum_CustomInterestType { + if m != nil { + return m.Type + } + return enums.CustomInterestTypeEnum_UNSPECIFIED +} + +func (m *CustomInterest) GetDescription() *wrappers.StringValue { + if m != nil { + return m.Description + } + return nil +} + +func (m *CustomInterest) GetMembers() []*CustomInterestMember { + if m != nil { + return m.Members + } + return nil +} + +// A member of custom interest audience. A member can be a keyword or url. +// It is immutable, that is, it can only be created or removed but not changed. +type CustomInterestMember struct { + // The type of custom interest member, KEYWORD or URL. + MemberType enums.CustomInterestMemberTypeEnum_CustomInterestMemberType `protobuf:"varint,1,opt,name=member_type,json=memberType,proto3,enum=google.ads.googleads.v1.enums.CustomInterestMemberTypeEnum_CustomInterestMemberType" json:"member_type,omitempty"` + // Keyword text when member_type is KEYWORD or URL string when + // member_type is URL. + Parameter *wrappers.StringValue `protobuf:"bytes,2,opt,name=parameter,proto3" json:"parameter,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *CustomInterestMember) Reset() { *m = CustomInterestMember{} } +func (m *CustomInterestMember) String() string { return proto.CompactTextString(m) } +func (*CustomInterestMember) ProtoMessage() {} +func (*CustomInterestMember) Descriptor() ([]byte, []int) { + return fileDescriptor_custom_interest_3c6adccc8a716e4f, []int{1} +} +func (m *CustomInterestMember) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_CustomInterestMember.Unmarshal(m, b) +} +func (m *CustomInterestMember) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_CustomInterestMember.Marshal(b, m, deterministic) +} +func (dst *CustomInterestMember) XXX_Merge(src proto.Message) { + xxx_messageInfo_CustomInterestMember.Merge(dst, src) +} +func (m *CustomInterestMember) XXX_Size() int { + return xxx_messageInfo_CustomInterestMember.Size(m) +} +func (m *CustomInterestMember) XXX_DiscardUnknown() { + xxx_messageInfo_CustomInterestMember.DiscardUnknown(m) +} + +var xxx_messageInfo_CustomInterestMember proto.InternalMessageInfo + +func (m *CustomInterestMember) GetMemberType() enums.CustomInterestMemberTypeEnum_CustomInterestMemberType { + if m != nil { + return m.MemberType + } + return enums.CustomInterestMemberTypeEnum_UNSPECIFIED +} + +func (m *CustomInterestMember) GetParameter() *wrappers.StringValue { + if m != nil { + return m.Parameter + } + return nil +} + +func init() { + proto.RegisterType((*CustomInterest)(nil), "google.ads.googleads.v1.resources.CustomInterest") + proto.RegisterType((*CustomInterestMember)(nil), "google.ads.googleads.v1.resources.CustomInterestMember") +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/resources/custom_interest.proto", fileDescriptor_custom_interest_3c6adccc8a716e4f) +} + +var fileDescriptor_custom_interest_3c6adccc8a716e4f = []byte{ + // 520 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x94, 0x54, 0xd1, 0x6e, 0xd3, 0x30, + 0x14, 0x55, 0xd2, 0xd2, 0x69, 0x2e, 0xf4, 0xc1, 0xf0, 0x10, 0x8d, 0x09, 0x75, 0x43, 0x93, 0x2a, + 0x21, 0x39, 0xb4, 0x20, 0x86, 0x8c, 0x04, 0xca, 0x10, 0x1a, 0x43, 0x80, 0x46, 0x36, 0xf5, 0x01, + 0x45, 0xaa, 0xdc, 0xc6, 0x44, 0x91, 0x6a, 0x3b, 0xb2, 0x9d, 0xa1, 0xbd, 0xf1, 0x2d, 0x3c, 0xf2, + 0x27, 0xf0, 0x29, 0xf0, 0x13, 0x28, 0x76, 0x9c, 0x75, 0xac, 0xdd, 0xe8, 0xdb, 0xad, 0xef, 0x39, + 0xe7, 0x1e, 0x1f, 0xdf, 0x06, 0xec, 0x67, 0x42, 0x64, 0x73, 0x1a, 0x92, 0x54, 0x85, 0xb6, 0xac, + 0xaa, 0xb3, 0x61, 0x28, 0xa9, 0x12, 0xa5, 0x9c, 0x51, 0x15, 0xce, 0x4a, 0xa5, 0x05, 0x9b, 0xe4, + 0x5c, 0x53, 0x49, 0x95, 0x46, 0x85, 0x14, 0x5a, 0xc0, 0x1d, 0x8b, 0x46, 0x24, 0x55, 0xa8, 0x21, + 0xa2, 0xb3, 0x21, 0x6a, 0x88, 0x5b, 0xaf, 0x56, 0x69, 0x53, 0x5e, 0xb2, 0x2b, 0xba, 0x13, 0x46, + 0xd9, 0x94, 0xca, 0x89, 0x3e, 0x2f, 0xa8, 0x9d, 0xb1, 0x85, 0xd7, 0x13, 0x50, 0x9a, 0xe8, 0x52, + 0xd5, 0xdc, 0xe7, 0xeb, 0x71, 0x17, 0xa6, 0x3e, 0xa8, 0x99, 0xe6, 0xd7, 0xb4, 0xfc, 0x12, 0x7e, + 0x95, 0xa4, 0x28, 0xa8, 0x74, 0xca, 0xdb, 0x4e, 0xb9, 0xc8, 0x43, 0xc2, 0xb9, 0xd0, 0x44, 0xe7, + 0x82, 0xd7, 0xdd, 0xdd, 0x3f, 0x2d, 0xd0, 0x7b, 0x6d, 0xc4, 0x8f, 0x6a, 0x6d, 0xf8, 0x10, 0xdc, + 0x71, 0xa1, 0x4c, 0x38, 0x61, 0x34, 0xf0, 0xfa, 0xde, 0x60, 0x33, 0xbe, 0xed, 0x0e, 0x3f, 0x12, + 0x46, 0xe1, 0x23, 0xe0, 0xe7, 0x69, 0xe0, 0xf7, 0xbd, 0x41, 0x77, 0x74, 0xbf, 0x4e, 0x14, 0x39, + 0x0b, 0xe8, 0x88, 0xeb, 0x67, 0x4f, 0xc7, 0x64, 0x5e, 0xd2, 0xd8, 0xcf, 0x53, 0x98, 0x82, 0x8e, + 0xbd, 0x6c, 0xd0, 0xea, 0x7b, 0x83, 0xde, 0xe8, 0x3d, 0x5a, 0xf5, 0x1a, 0xe6, 0xb6, 0xe8, 0xb2, + 0xa1, 0x13, 0x43, 0x7d, 0xc3, 0x4b, 0xb6, 0xb4, 0x11, 0xd7, 0xda, 0xf0, 0x31, 0x68, 0x1b, 0xbb, + 0x6d, 0x63, 0x6a, 0xfb, 0x8a, 0xa9, 0x13, 0x2d, 0x73, 0x9e, 0x59, 0x57, 0x06, 0x09, 0x13, 0xd0, + 0xae, 0x82, 0x0c, 0x6e, 0x19, 0x57, 0x6f, 0xd7, 0x72, 0x75, 0x7a, 0x5e, 0xd0, 0x25, 0x9e, 0xaa, + 0xe3, 0xd8, 0xa8, 0xc2, 0x97, 0xa0, 0x9b, 0x52, 0x35, 0x93, 0x79, 0x51, 0x05, 0x1e, 0x74, 0xfe, + 0xc3, 0xd6, 0x22, 0x01, 0x7e, 0x02, 0x1b, 0x76, 0xc7, 0x54, 0xb0, 0xd1, 0x6f, 0x0d, 0xba, 0xa3, + 0x7d, 0x74, 0xe3, 0x12, 0xff, 0xe3, 0xe6, 0x83, 0xe1, 0xc7, 0x4e, 0x67, 0xf7, 0xa7, 0x07, 0xee, + 0x2d, 0x43, 0xc0, 0x12, 0x74, 0x17, 0xf6, 0xd9, 0xbc, 0x78, 0x6f, 0x74, 0xba, 0x56, 0x20, 0x56, + 0x69, 0x45, 0x2c, 0x17, 0xcd, 0x18, 0xb0, 0xa6, 0x86, 0x18, 0x6c, 0x16, 0x44, 0x12, 0x46, 0x35, + 0x95, 0xf5, 0x32, 0x5d, 0x1f, 0xd0, 0x05, 0xfc, 0xe0, 0x9b, 0x0f, 0xf6, 0x66, 0x82, 0xdd, 0x9c, + 0xc9, 0xc1, 0xdd, 0xcb, 0x5e, 0x8e, 0x2b, 0xe1, 0x63, 0xef, 0xf3, 0xbb, 0x9a, 0x99, 0x89, 0x39, + 0xe1, 0x19, 0x12, 0x32, 0x0b, 0x33, 0xca, 0xcd, 0x58, 0xf7, 0x17, 0x2c, 0x72, 0x75, 0xcd, 0xa7, + 0xe6, 0x45, 0x53, 0x7d, 0xf7, 0x5b, 0x87, 0x51, 0xf4, 0xc3, 0xdf, 0x39, 0xb4, 0x92, 0x51, 0xaa, + 0x90, 0x2d, 0xab, 0x6a, 0x3c, 0x44, 0xb1, 0x43, 0xfe, 0x72, 0x98, 0x24, 0x4a, 0x55, 0xd2, 0x60, + 0x92, 0xf1, 0x30, 0x69, 0x30, 0xbf, 0xfd, 0x3d, 0xdb, 0xc0, 0x38, 0x4a, 0x15, 0xc6, 0x0d, 0x0a, + 0xe3, 0xf1, 0x10, 0xe3, 0x06, 0x37, 0xed, 0x18, 0xb3, 0x4f, 0xfe, 0x06, 0x00, 0x00, 0xff, 0xff, + 0xf9, 0xb0, 0xf7, 0x99, 0x16, 0x05, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/resources/customer.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/resources/customer.pb.go new file mode 100644 index 0000000..f01a92b --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/resources/customer.pb.go @@ -0,0 +1,422 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/resources/customer.proto + +package resources // import "google.golang.org/genproto/googleapis/ads/googleads/v1/resources" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import wrappers "github.com/golang/protobuf/ptypes/wrappers" +import enums "google.golang.org/genproto/googleapis/ads/googleads/v1/enums" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// A customer. +type Customer struct { + // The resource name of the customer. + // Customer resource names have the form: + // + // `customers/{customer_id}` + ResourceName string `protobuf:"bytes,1,opt,name=resource_name,json=resourceName,proto3" json:"resource_name,omitempty"` + // The ID of the customer. + Id *wrappers.Int64Value `protobuf:"bytes,3,opt,name=id,proto3" json:"id,omitempty"` + // Optional, non-unique descriptive name of the customer. + DescriptiveName *wrappers.StringValue `protobuf:"bytes,4,opt,name=descriptive_name,json=descriptiveName,proto3" json:"descriptive_name,omitempty"` + // The currency in which the account operates. + // A subset of the currency codes from the ISO 4217 standard is + // supported. + CurrencyCode *wrappers.StringValue `protobuf:"bytes,5,opt,name=currency_code,json=currencyCode,proto3" json:"currency_code,omitempty"` + // The local timezone ID of the customer. + TimeZone *wrappers.StringValue `protobuf:"bytes,6,opt,name=time_zone,json=timeZone,proto3" json:"time_zone,omitempty"` + // The URL template for constructing a tracking URL out of parameters. + TrackingUrlTemplate *wrappers.StringValue `protobuf:"bytes,7,opt,name=tracking_url_template,json=trackingUrlTemplate,proto3" json:"tracking_url_template,omitempty"` + // The URL template for appending params to the final URL + FinalUrlSuffix *wrappers.StringValue `protobuf:"bytes,11,opt,name=final_url_suffix,json=finalUrlSuffix,proto3" json:"final_url_suffix,omitempty"` + // Whether auto-tagging is enabled for the customer. + AutoTaggingEnabled *wrappers.BoolValue `protobuf:"bytes,8,opt,name=auto_tagging_enabled,json=autoTaggingEnabled,proto3" json:"auto_tagging_enabled,omitempty"` + // Whether the Customer has a Partners program badge. If the Customer is not + // associated with the Partners program, this will be false. For more + // information, see https://support.google.com/partners/answer/3125774. + HasPartnersBadge *wrappers.BoolValue `protobuf:"bytes,9,opt,name=has_partners_badge,json=hasPartnersBadge,proto3" json:"has_partners_badge,omitempty"` + // Whether the customer is a manager. + Manager *wrappers.BoolValue `protobuf:"bytes,12,opt,name=manager,proto3" json:"manager,omitempty"` + // Whether the customer is a test account. + TestAccount *wrappers.BoolValue `protobuf:"bytes,13,opt,name=test_account,json=testAccount,proto3" json:"test_account,omitempty"` + // Call reporting setting for a customer. + CallReportingSetting *CallReportingSetting `protobuf:"bytes,10,opt,name=call_reporting_setting,json=callReportingSetting,proto3" json:"call_reporting_setting,omitempty"` + // Conversion tracking setting for a customer. + ConversionTrackingSetting *ConversionTrackingSetting `protobuf:"bytes,14,opt,name=conversion_tracking_setting,json=conversionTrackingSetting,proto3" json:"conversion_tracking_setting,omitempty"` + // Remarketing setting for a customer. + RemarketingSetting *RemarketingSetting `protobuf:"bytes,15,opt,name=remarketing_setting,json=remarketingSetting,proto3" json:"remarketing_setting,omitempty"` + // Reasons why the customer is not eligible to use PaymentMode.CONVERSIONS. If + // the list is empty, the customer is eligible. This field is read-only. + PayPerConversionEligibilityFailureReasons []enums.CustomerPayPerConversionEligibilityFailureReasonEnum_CustomerPayPerConversionEligibilityFailureReason `protobuf:"varint,16,rep,packed,name=pay_per_conversion_eligibility_failure_reasons,json=payPerConversionEligibilityFailureReasons,proto3,enum=google.ads.googleads.v1.enums.CustomerPayPerConversionEligibilityFailureReasonEnum_CustomerPayPerConversionEligibilityFailureReason" json:"pay_per_conversion_eligibility_failure_reasons,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Customer) Reset() { *m = Customer{} } +func (m *Customer) String() string { return proto.CompactTextString(m) } +func (*Customer) ProtoMessage() {} +func (*Customer) Descriptor() ([]byte, []int) { + return fileDescriptor_customer_df9b425a0de8248d, []int{0} +} +func (m *Customer) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Customer.Unmarshal(m, b) +} +func (m *Customer) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Customer.Marshal(b, m, deterministic) +} +func (dst *Customer) XXX_Merge(src proto.Message) { + xxx_messageInfo_Customer.Merge(dst, src) +} +func (m *Customer) XXX_Size() int { + return xxx_messageInfo_Customer.Size(m) +} +func (m *Customer) XXX_DiscardUnknown() { + xxx_messageInfo_Customer.DiscardUnknown(m) +} + +var xxx_messageInfo_Customer proto.InternalMessageInfo + +func (m *Customer) GetResourceName() string { + if m != nil { + return m.ResourceName + } + return "" +} + +func (m *Customer) GetId() *wrappers.Int64Value { + if m != nil { + return m.Id + } + return nil +} + +func (m *Customer) GetDescriptiveName() *wrappers.StringValue { + if m != nil { + return m.DescriptiveName + } + return nil +} + +func (m *Customer) GetCurrencyCode() *wrappers.StringValue { + if m != nil { + return m.CurrencyCode + } + return nil +} + +func (m *Customer) GetTimeZone() *wrappers.StringValue { + if m != nil { + return m.TimeZone + } + return nil +} + +func (m *Customer) GetTrackingUrlTemplate() *wrappers.StringValue { + if m != nil { + return m.TrackingUrlTemplate + } + return nil +} + +func (m *Customer) GetFinalUrlSuffix() *wrappers.StringValue { + if m != nil { + return m.FinalUrlSuffix + } + return nil +} + +func (m *Customer) GetAutoTaggingEnabled() *wrappers.BoolValue { + if m != nil { + return m.AutoTaggingEnabled + } + return nil +} + +func (m *Customer) GetHasPartnersBadge() *wrappers.BoolValue { + if m != nil { + return m.HasPartnersBadge + } + return nil +} + +func (m *Customer) GetManager() *wrappers.BoolValue { + if m != nil { + return m.Manager + } + return nil +} + +func (m *Customer) GetTestAccount() *wrappers.BoolValue { + if m != nil { + return m.TestAccount + } + return nil +} + +func (m *Customer) GetCallReportingSetting() *CallReportingSetting { + if m != nil { + return m.CallReportingSetting + } + return nil +} + +func (m *Customer) GetConversionTrackingSetting() *ConversionTrackingSetting { + if m != nil { + return m.ConversionTrackingSetting + } + return nil +} + +func (m *Customer) GetRemarketingSetting() *RemarketingSetting { + if m != nil { + return m.RemarketingSetting + } + return nil +} + +func (m *Customer) GetPayPerConversionEligibilityFailureReasons() []enums.CustomerPayPerConversionEligibilityFailureReasonEnum_CustomerPayPerConversionEligibilityFailureReason { + if m != nil { + return m.PayPerConversionEligibilityFailureReasons + } + return nil +} + +// Call reporting setting for a customer. +type CallReportingSetting struct { + // Enable reporting of phone call events by redirecting them via Google + // System. + CallReportingEnabled *wrappers.BoolValue `protobuf:"bytes,1,opt,name=call_reporting_enabled,json=callReportingEnabled,proto3" json:"call_reporting_enabled,omitempty"` + // Whether to enable call conversion reporting. + CallConversionReportingEnabled *wrappers.BoolValue `protobuf:"bytes,2,opt,name=call_conversion_reporting_enabled,json=callConversionReportingEnabled,proto3" json:"call_conversion_reporting_enabled,omitempty"` + // Customer-level call conversion action to attribute a call conversion to. + // If not set a default conversion action is used. Only in effect when + // call_conversion_reporting_enabled is set to true. + CallConversionAction *wrappers.StringValue `protobuf:"bytes,9,opt,name=call_conversion_action,json=callConversionAction,proto3" json:"call_conversion_action,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *CallReportingSetting) Reset() { *m = CallReportingSetting{} } +func (m *CallReportingSetting) String() string { return proto.CompactTextString(m) } +func (*CallReportingSetting) ProtoMessage() {} +func (*CallReportingSetting) Descriptor() ([]byte, []int) { + return fileDescriptor_customer_df9b425a0de8248d, []int{1} +} +func (m *CallReportingSetting) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_CallReportingSetting.Unmarshal(m, b) +} +func (m *CallReportingSetting) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_CallReportingSetting.Marshal(b, m, deterministic) +} +func (dst *CallReportingSetting) XXX_Merge(src proto.Message) { + xxx_messageInfo_CallReportingSetting.Merge(dst, src) +} +func (m *CallReportingSetting) XXX_Size() int { + return xxx_messageInfo_CallReportingSetting.Size(m) +} +func (m *CallReportingSetting) XXX_DiscardUnknown() { + xxx_messageInfo_CallReportingSetting.DiscardUnknown(m) +} + +var xxx_messageInfo_CallReportingSetting proto.InternalMessageInfo + +func (m *CallReportingSetting) GetCallReportingEnabled() *wrappers.BoolValue { + if m != nil { + return m.CallReportingEnabled + } + return nil +} + +func (m *CallReportingSetting) GetCallConversionReportingEnabled() *wrappers.BoolValue { + if m != nil { + return m.CallConversionReportingEnabled + } + return nil +} + +func (m *CallReportingSetting) GetCallConversionAction() *wrappers.StringValue { + if m != nil { + return m.CallConversionAction + } + return nil +} + +// A collection of customer-wide settings related to Google Ads Conversion +// Tracking. +type ConversionTrackingSetting struct { + // The conversion tracking id used for this account. This id is automatically + // assigned after any conversion tracking feature is used. If the customer + // doesn't use conversion tracking, this is 0. This field is read-only. + ConversionTrackingId *wrappers.Int64Value `protobuf:"bytes,1,opt,name=conversion_tracking_id,json=conversionTrackingId,proto3" json:"conversion_tracking_id,omitempty"` + // The conversion tracking id of the customer's manager. This is set when the + // customer is opted into cross account conversion tracking, and it overrides + // conversion_tracking_id. This field can only be managed through the Google + // Ads UI. This field is read-only. + CrossAccountConversionTrackingId *wrappers.Int64Value `protobuf:"bytes,2,opt,name=cross_account_conversion_tracking_id,json=crossAccountConversionTrackingId,proto3" json:"cross_account_conversion_tracking_id,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ConversionTrackingSetting) Reset() { *m = ConversionTrackingSetting{} } +func (m *ConversionTrackingSetting) String() string { return proto.CompactTextString(m) } +func (*ConversionTrackingSetting) ProtoMessage() {} +func (*ConversionTrackingSetting) Descriptor() ([]byte, []int) { + return fileDescriptor_customer_df9b425a0de8248d, []int{2} +} +func (m *ConversionTrackingSetting) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ConversionTrackingSetting.Unmarshal(m, b) +} +func (m *ConversionTrackingSetting) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ConversionTrackingSetting.Marshal(b, m, deterministic) +} +func (dst *ConversionTrackingSetting) XXX_Merge(src proto.Message) { + xxx_messageInfo_ConversionTrackingSetting.Merge(dst, src) +} +func (m *ConversionTrackingSetting) XXX_Size() int { + return xxx_messageInfo_ConversionTrackingSetting.Size(m) +} +func (m *ConversionTrackingSetting) XXX_DiscardUnknown() { + xxx_messageInfo_ConversionTrackingSetting.DiscardUnknown(m) +} + +var xxx_messageInfo_ConversionTrackingSetting proto.InternalMessageInfo + +func (m *ConversionTrackingSetting) GetConversionTrackingId() *wrappers.Int64Value { + if m != nil { + return m.ConversionTrackingId + } + return nil +} + +func (m *ConversionTrackingSetting) GetCrossAccountConversionTrackingId() *wrappers.Int64Value { + if m != nil { + return m.CrossAccountConversionTrackingId + } + return nil +} + +// Remarketing setting for a customer. +type RemarketingSetting struct { + // The Google global site tag. + GoogleGlobalSiteTag *wrappers.StringValue `protobuf:"bytes,1,opt,name=google_global_site_tag,json=googleGlobalSiteTag,proto3" json:"google_global_site_tag,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *RemarketingSetting) Reset() { *m = RemarketingSetting{} } +func (m *RemarketingSetting) String() string { return proto.CompactTextString(m) } +func (*RemarketingSetting) ProtoMessage() {} +func (*RemarketingSetting) Descriptor() ([]byte, []int) { + return fileDescriptor_customer_df9b425a0de8248d, []int{3} +} +func (m *RemarketingSetting) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_RemarketingSetting.Unmarshal(m, b) +} +func (m *RemarketingSetting) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_RemarketingSetting.Marshal(b, m, deterministic) +} +func (dst *RemarketingSetting) XXX_Merge(src proto.Message) { + xxx_messageInfo_RemarketingSetting.Merge(dst, src) +} +func (m *RemarketingSetting) XXX_Size() int { + return xxx_messageInfo_RemarketingSetting.Size(m) +} +func (m *RemarketingSetting) XXX_DiscardUnknown() { + xxx_messageInfo_RemarketingSetting.DiscardUnknown(m) +} + +var xxx_messageInfo_RemarketingSetting proto.InternalMessageInfo + +func (m *RemarketingSetting) GetGoogleGlobalSiteTag() *wrappers.StringValue { + if m != nil { + return m.GoogleGlobalSiteTag + } + return nil +} + +func init() { + proto.RegisterType((*Customer)(nil), "google.ads.googleads.v1.resources.Customer") + proto.RegisterType((*CallReportingSetting)(nil), "google.ads.googleads.v1.resources.CallReportingSetting") + proto.RegisterType((*ConversionTrackingSetting)(nil), "google.ads.googleads.v1.resources.ConversionTrackingSetting") + proto.RegisterType((*RemarketingSetting)(nil), "google.ads.googleads.v1.resources.RemarketingSetting") +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/resources/customer.proto", fileDescriptor_customer_df9b425a0de8248d) +} + +var fileDescriptor_customer_df9b425a0de8248d = []byte{ + // 879 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x94, 0x96, 0xdf, 0x6e, 0xe4, 0x34, + 0x14, 0xc6, 0x95, 0x29, 0xec, 0xb6, 0xee, 0x9f, 0xad, 0xbc, 0xc3, 0x2a, 0xdb, 0x5d, 0xad, 0xda, + 0xc2, 0x4a, 0x45, 0x48, 0x19, 0x66, 0x59, 0x40, 0x0c, 0x70, 0x31, 0xad, 0xba, 0x65, 0x11, 0x42, + 0x43, 0xda, 0xed, 0x45, 0x55, 0xc9, 0xf2, 0x24, 0x67, 0xb2, 0x56, 0x1d, 0x3b, 0xb2, 0x9d, 0x42, + 0x11, 0x97, 0x5c, 0xf1, 0x18, 0x70, 0xc7, 0xa3, 0xf0, 0x0a, 0xbc, 0x00, 0xe2, 0x11, 0xb8, 0x42, + 0x71, 0xe2, 0x4c, 0xab, 0xcc, 0x34, 0xd3, 0xab, 0xf1, 0xd8, 0xe7, 0xfb, 0x7d, 0xb6, 0x73, 0xce, + 0x49, 0xd0, 0xc7, 0x89, 0x94, 0x09, 0x87, 0x1e, 0x8d, 0x75, 0xaf, 0x1c, 0x16, 0xa3, 0xcb, 0x7e, + 0x4f, 0x81, 0x96, 0xb9, 0x8a, 0x40, 0xf7, 0xa2, 0x5c, 0x1b, 0x99, 0x82, 0x0a, 0x32, 0x25, 0x8d, + 0xc4, 0x3b, 0x65, 0x58, 0x40, 0x63, 0x1d, 0xd4, 0x8a, 0xe0, 0xb2, 0x1f, 0xd4, 0x8a, 0xad, 0xb3, + 0x79, 0x50, 0x10, 0x79, 0x3a, 0x05, 0x92, 0x8c, 0x5e, 0x91, 0x0c, 0x14, 0x89, 0xa4, 0xb8, 0x04, + 0xa5, 0x99, 0x14, 0x04, 0x38, 0x4b, 0xd8, 0x98, 0x71, 0x66, 0xae, 0xc8, 0x84, 0x32, 0x9e, 0x2b, + 0x20, 0x0a, 0xa8, 0x96, 0xa2, 0xb4, 0xdf, 0x7a, 0x56, 0xb1, 0xed, 0xbf, 0x71, 0x3e, 0xe9, 0xfd, + 0xa8, 0x68, 0x96, 0x81, 0xd2, 0xd5, 0xfa, 0x53, 0xe7, 0x9d, 0xb1, 0x1e, 0x15, 0x42, 0x1a, 0x6a, + 0x98, 0x14, 0xd5, 0xea, 0xee, 0x6f, 0x08, 0x2d, 0x1f, 0x54, 0xf6, 0xf8, 0x7d, 0xb4, 0xee, 0xf6, + 0x4c, 0x04, 0x4d, 0xc1, 0xf7, 0xb6, 0xbd, 0xbd, 0x95, 0x70, 0xcd, 0x4d, 0x7e, 0x4f, 0x53, 0xc0, + 0x1f, 0xa1, 0x0e, 0x8b, 0xfd, 0xa5, 0x6d, 0x6f, 0x6f, 0xf5, 0xc5, 0x93, 0xea, 0xc0, 0x81, 0x33, + 0x0f, 0x5e, 0x0b, 0xf3, 0xd9, 0xcb, 0x53, 0xca, 0x73, 0x08, 0x3b, 0x2c, 0xc6, 0x47, 0x68, 0x33, + 0x06, 0x1d, 0x29, 0x96, 0x19, 0x76, 0x59, 0x41, 0xdf, 0xb1, 0xd2, 0xa7, 0x0d, 0xe9, 0xb1, 0x51, + 0x4c, 0x24, 0xa5, 0xf6, 0xc1, 0x35, 0x95, 0x75, 0x1d, 0xa2, 0xf5, 0x28, 0x57, 0x0a, 0x44, 0x74, + 0x45, 0x22, 0x19, 0x83, 0xff, 0xee, 0x02, 0x94, 0x35, 0x27, 0x39, 0x90, 0x31, 0xe0, 0x2f, 0xd0, + 0x8a, 0x61, 0x29, 0x90, 0x9f, 0xa5, 0x00, 0xff, 0xde, 0x02, 0xf2, 0xe5, 0x22, 0xfc, 0x4c, 0x0a, + 0xc0, 0x23, 0xf4, 0x9e, 0x51, 0x34, 0xba, 0x60, 0x22, 0x21, 0xb9, 0xe2, 0xc4, 0x40, 0x9a, 0x71, + 0x6a, 0xc0, 0xbf, 0xbf, 0x00, 0xe6, 0xa1, 0x93, 0xbe, 0x51, 0xfc, 0xa4, 0x12, 0xe2, 0x57, 0x68, + 0x73, 0xc2, 0x04, 0xe5, 0x16, 0xa7, 0xf3, 0xc9, 0x84, 0xfd, 0xe4, 0xaf, 0x2e, 0x00, 0xdb, 0xb0, + 0xaa, 0x37, 0x8a, 0x1f, 0x5b, 0x0d, 0xfe, 0x0e, 0x75, 0x69, 0x6e, 0x24, 0x31, 0x34, 0x49, 0x8a, + 0xdd, 0x81, 0xa0, 0x63, 0x0e, 0xb1, 0xbf, 0x6c, 0x59, 0x5b, 0x0d, 0xd6, 0xbe, 0x94, 0xbc, 0x24, + 0xe1, 0x42, 0x77, 0x52, 0xca, 0x0e, 0x4b, 0x15, 0xfe, 0x06, 0xe1, 0xb7, 0x54, 0x93, 0x8c, 0x2a, + 0x23, 0x40, 0x69, 0x32, 0xa6, 0x71, 0x02, 0xfe, 0x4a, 0x2b, 0x6b, 0xf3, 0x2d, 0xd5, 0xa3, 0x4a, + 0xb4, 0x5f, 0x68, 0xf0, 0x4b, 0x74, 0x3f, 0xa5, 0x82, 0x26, 0xa0, 0xfc, 0xb5, 0x56, 0xb9, 0x0b, + 0xc5, 0x5f, 0xa3, 0x35, 0x03, 0xda, 0x10, 0x1a, 0x45, 0x32, 0x17, 0xc6, 0x5f, 0x6f, 0x95, 0xae, + 0x16, 0xf1, 0xc3, 0x32, 0x1c, 0xa7, 0xe8, 0x51, 0x44, 0x39, 0x27, 0x0a, 0x32, 0xa9, 0x4c, 0x71, + 0x1d, 0x1a, 0x4c, 0xf1, 0xeb, 0x23, 0x0b, 0xfa, 0x3c, 0x68, 0x2d, 0xd5, 0xe0, 0x80, 0x72, 0x1e, + 0x3a, 0xfd, 0x71, 0x29, 0x0f, 0xbb, 0xd1, 0x8c, 0x59, 0xfc, 0x0b, 0x7a, 0x72, 0xad, 0x50, 0xeb, + 0x04, 0x71, 0x9e, 0x1b, 0xd6, 0xf3, 0xab, 0x45, 0x3c, 0x6b, 0xca, 0x49, 0x05, 0x71, 0xc6, 0x8f, + 0xa3, 0x79, 0x4b, 0x78, 0x82, 0x1e, 0x2a, 0x48, 0xa9, 0xba, 0x80, 0x1b, 0x27, 0x7d, 0x60, 0x5d, + 0x3f, 0x5d, 0xc0, 0x35, 0x9c, 0xaa, 0x9d, 0x1d, 0x56, 0x8d, 0x39, 0xfc, 0x8f, 0x87, 0x82, 0x3b, + 0xf5, 0x25, 0xed, 0x6f, 0x6e, 0x2f, 0xed, 0x6d, 0xbc, 0xf8, 0xd5, 0x9b, 0xbb, 0x09, 0xdb, 0xf6, + 0x02, 0xd7, 0x77, 0x46, 0xf4, 0x6a, 0x04, 0x6a, 0x7a, 0x09, 0x87, 0x53, 0xf4, 0xab, 0x92, 0x1c, + 0x5a, 0xf0, 0xa1, 0xc8, 0xd3, 0x3b, 0x8b, 0xc2, 0x0f, 0xb3, 0x05, 0x23, 0xf5, 0xee, 0x1f, 0x1d, + 0xd4, 0x9d, 0xf5, 0xfc, 0xf1, 0xa8, 0x91, 0x58, 0xae, 0xce, 0xbc, 0xd6, 0x0c, 0xbd, 0x99, 0x3b, + 0xae, 0xd2, 0x00, 0xed, 0x58, 0xe2, 0xb5, 0x1b, 0x6d, 0xc2, 0x3b, 0xad, 0xf0, 0x67, 0x05, 0x64, + 0x7a, 0xb4, 0x86, 0x4d, 0x58, 0x6d, 0xfc, 0x9a, 0x0d, 0x8d, 0x8a, 0xfe, 0x5f, 0x15, 0xf5, 0xed, + 0xcd, 0xa6, 0x7b, 0x93, 0x3e, 0xb4, 0xca, 0xdd, 0xbf, 0x3d, 0xf4, 0x78, 0x6e, 0xc6, 0xe2, 0x1f, + 0xd0, 0xa3, 0x59, 0x45, 0xc1, 0xdc, 0x55, 0xdd, 0xfa, 0xca, 0xe8, 0x36, 0xd3, 0xfd, 0x75, 0x8c, + 0x2f, 0xd0, 0x07, 0x91, 0x92, 0x5a, 0xbb, 0xb6, 0x40, 0xe6, 0x18, 0x74, 0xda, 0x0d, 0xb6, 0x2d, + 0xa8, 0xea, 0x17, 0x07, 0x33, 0xcc, 0x76, 0x13, 0x84, 0x9b, 0x85, 0x51, 0x9c, 0xaa, 0xa4, 0x92, + 0x84, 0xcb, 0x31, 0xe5, 0x44, 0x33, 0x03, 0x45, 0xd3, 0xad, 0x4e, 0xd5, 0xf2, 0x06, 0x28, 0x17, + 0x8f, 0xac, 0xf4, 0x98, 0x19, 0x38, 0xa1, 0xc9, 0xfe, 0x7f, 0x1e, 0x7a, 0x1e, 0xc9, 0xb4, 0xbd, + 0x50, 0xf7, 0xd7, 0xeb, 0x9c, 0x2f, 0xe0, 0x23, 0xef, 0xec, 0xdb, 0x4a, 0x93, 0x48, 0x4e, 0x45, + 0x12, 0x48, 0x95, 0xf4, 0x12, 0x10, 0xd6, 0xda, 0x7d, 0x5e, 0x64, 0x4c, 0xdf, 0xf2, 0x09, 0xf3, + 0x65, 0x3d, 0xfa, 0xbd, 0xb3, 0x74, 0x34, 0x1c, 0xfe, 0xd9, 0xd9, 0x39, 0x2a, 0x91, 0xc3, 0x58, + 0x07, 0xe5, 0xb0, 0x18, 0x9d, 0xf6, 0x83, 0xd0, 0x45, 0xfe, 0xe5, 0x62, 0xce, 0x87, 0xb1, 0x3e, + 0xaf, 0x63, 0xce, 0x4f, 0xfb, 0xe7, 0x75, 0xcc, 0xbf, 0x9d, 0xe7, 0xe5, 0xc2, 0x60, 0x30, 0x8c, + 0xf5, 0x60, 0x50, 0x47, 0x0d, 0x06, 0xa7, 0xfd, 0xc1, 0xa0, 0x8e, 0x1b, 0xdf, 0xb3, 0x9b, 0xfd, + 0xe4, 0xff, 0x00, 0x00, 0x00, 0xff, 0xff, 0x65, 0x97, 0x1c, 0xdc, 0x6e, 0x09, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/resources/customer_client.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/resources/customer_client.pb.go new file mode 100644 index 0000000..f6a2d6e --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/resources/customer_client.pb.go @@ -0,0 +1,132 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/resources/customer_client.proto + +package resources // import "google.golang.org/genproto/googleapis/ads/googleads/v1/resources" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import wrappers "github.com/golang/protobuf/ptypes/wrappers" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// A link between the given customer and a client customer. CustomerClients only +// exist for manager customers. All direct and indirect client customers are +// included, as well as the manager itself. +type CustomerClient struct { + // The resource name of the customer client. + // CustomerClient resource names have the form: + // `customers/{customer_id}/customerClients/{client_customer_id}` + ResourceName string `protobuf:"bytes,1,opt,name=resource_name,json=resourceName,proto3" json:"resource_name,omitempty"` + // The resource name of the client-customer which is linked to + // the given customer. Read only. + ClientCustomer *wrappers.StringValue `protobuf:"bytes,3,opt,name=client_customer,json=clientCustomer,proto3" json:"client_customer,omitempty"` + // Specifies whether this is a hidden account. Learn more about hidden + // accounts here. Read + // only. + Hidden *wrappers.BoolValue `protobuf:"bytes,4,opt,name=hidden,proto3" json:"hidden,omitempty"` + // Distance between given customer and client. For self link, the level value + // will be 0. Read only. + Level *wrappers.Int64Value `protobuf:"bytes,5,opt,name=level,proto3" json:"level,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *CustomerClient) Reset() { *m = CustomerClient{} } +func (m *CustomerClient) String() string { return proto.CompactTextString(m) } +func (*CustomerClient) ProtoMessage() {} +func (*CustomerClient) Descriptor() ([]byte, []int) { + return fileDescriptor_customer_client_509fb76a6f1ab219, []int{0} +} +func (m *CustomerClient) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_CustomerClient.Unmarshal(m, b) +} +func (m *CustomerClient) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_CustomerClient.Marshal(b, m, deterministic) +} +func (dst *CustomerClient) XXX_Merge(src proto.Message) { + xxx_messageInfo_CustomerClient.Merge(dst, src) +} +func (m *CustomerClient) XXX_Size() int { + return xxx_messageInfo_CustomerClient.Size(m) +} +func (m *CustomerClient) XXX_DiscardUnknown() { + xxx_messageInfo_CustomerClient.DiscardUnknown(m) +} + +var xxx_messageInfo_CustomerClient proto.InternalMessageInfo + +func (m *CustomerClient) GetResourceName() string { + if m != nil { + return m.ResourceName + } + return "" +} + +func (m *CustomerClient) GetClientCustomer() *wrappers.StringValue { + if m != nil { + return m.ClientCustomer + } + return nil +} + +func (m *CustomerClient) GetHidden() *wrappers.BoolValue { + if m != nil { + return m.Hidden + } + return nil +} + +func (m *CustomerClient) GetLevel() *wrappers.Int64Value { + if m != nil { + return m.Level + } + return nil +} + +func init() { + proto.RegisterType((*CustomerClient)(nil), "google.ads.googleads.v1.resources.CustomerClient") +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/resources/customer_client.proto", fileDescriptor_customer_client_509fb76a6f1ab219) +} + +var fileDescriptor_customer_client_509fb76a6f1ab219 = []byte{ + // 364 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x84, 0x91, 0x41, 0x4b, 0xfb, 0x30, + 0x00, 0xc5, 0x69, 0xf7, 0xdf, 0xe0, 0x5f, 0x75, 0x42, 0xbd, 0x94, 0x39, 0x64, 0x53, 0x06, 0x3b, + 0xa5, 0x74, 0x8a, 0x42, 0x3c, 0x75, 0x43, 0x86, 0x1e, 0x64, 0x4c, 0xe8, 0x41, 0x0a, 0x23, 0x6b, + 0x63, 0x2d, 0xa4, 0x49, 0x49, 0xd2, 0x79, 0xf5, 0xb3, 0x78, 0xf4, 0xa3, 0xf8, 0x35, 0xbc, 0xf9, + 0x29, 0x64, 0x4d, 0x13, 0x90, 0x81, 0xde, 0x1e, 0xc9, 0xfb, 0xbd, 0xf7, 0x48, 0x9c, 0xab, 0x8c, + 0xb1, 0x8c, 0x60, 0x1f, 0xa5, 0xc2, 0x57, 0x72, 0xab, 0x36, 0x81, 0xcf, 0xb1, 0x60, 0x15, 0x4f, + 0xb0, 0xf0, 0x93, 0x4a, 0x48, 0x56, 0x60, 0xbe, 0x4a, 0x48, 0x8e, 0xa9, 0x04, 0x25, 0x67, 0x92, + 0xb9, 0x43, 0xe5, 0x06, 0x28, 0x15, 0xc0, 0x80, 0x60, 0x13, 0x00, 0x03, 0xf6, 0x4e, 0x9a, 0xec, + 0x1a, 0x58, 0x57, 0x4f, 0xfe, 0x0b, 0x47, 0x65, 0x89, 0xb9, 0x50, 0x11, 0xbd, 0xbe, 0xee, 0x2e, + 0x73, 0x1f, 0x51, 0xca, 0x24, 0x92, 0x39, 0xa3, 0xcd, 0xed, 0xe9, 0xa7, 0xe5, 0x74, 0x67, 0x4d, + 0xf5, 0xac, 0x6e, 0x76, 0xcf, 0x9c, 0x03, 0x9d, 0xbe, 0xa2, 0xa8, 0xc0, 0x9e, 0x35, 0xb0, 0xc6, + 0xff, 0x97, 0xfb, 0xfa, 0xf0, 0x1e, 0x15, 0xd8, 0xbd, 0x71, 0x0e, 0xd5, 0xd0, 0x95, 0x1e, 0xee, + 0xb5, 0x06, 0xd6, 0x78, 0x6f, 0xd2, 0x6f, 0x76, 0x02, 0xbd, 0x07, 0x3c, 0x48, 0x9e, 0xd3, 0x2c, + 0x42, 0xa4, 0xc2, 0xcb, 0xae, 0x82, 0x74, 0xa3, 0x3b, 0x71, 0x3a, 0xcf, 0x79, 0x9a, 0x62, 0xea, + 0xfd, 0xab, 0xe9, 0xde, 0x0e, 0x3d, 0x65, 0x8c, 0x28, 0xb6, 0x71, 0xba, 0x81, 0xd3, 0x26, 0x78, + 0x83, 0x89, 0xd7, 0xae, 0x91, 0xe3, 0x1d, 0xe4, 0x96, 0xca, 0xcb, 0x0b, 0xc5, 0x28, 0xe7, 0xf4, + 0xd5, 0x76, 0x46, 0x09, 0x2b, 0xc0, 0x9f, 0xaf, 0x39, 0x3d, 0xfa, 0xf9, 0x18, 0x8b, 0x6d, 0xe6, + 0xc2, 0x7a, 0xbc, 0x6b, 0xc8, 0x8c, 0x11, 0x44, 0x33, 0xc0, 0x78, 0xe6, 0x67, 0x98, 0xd6, 0x8d, + 0xfa, 0x43, 0xcb, 0x5c, 0xfc, 0xf2, 0xbf, 0xd7, 0x46, 0xbd, 0xd9, 0xad, 0x79, 0x18, 0xbe, 0xdb, + 0xc3, 0xb9, 0x8a, 0x0c, 0x53, 0x01, 0x94, 0xdc, 0xaa, 0x28, 0x00, 0x4b, 0xed, 0xfc, 0xd0, 0x9e, + 0x38, 0x4c, 0x45, 0x6c, 0x3c, 0x71, 0x14, 0xc4, 0xc6, 0xf3, 0x65, 0x8f, 0xd4, 0x05, 0x84, 0x61, + 0x2a, 0x20, 0x34, 0x2e, 0x08, 0xa3, 0x00, 0x42, 0xe3, 0x5b, 0x77, 0xea, 0xb1, 0xe7, 0xdf, 0x01, + 0x00, 0x00, 0xff, 0xff, 0xf6, 0x0b, 0x0a, 0xfa, 0x8b, 0x02, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/resources/customer_client_link.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/resources/customer_client_link.pb.go new file mode 100644 index 0000000..46e9119 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/resources/customer_client_link.pb.go @@ -0,0 +1,143 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/resources/customer_client_link.proto + +package resources // import "google.golang.org/genproto/googleapis/ads/googleads/v1/resources" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import wrappers "github.com/golang/protobuf/ptypes/wrappers" +import enums "google.golang.org/genproto/googleapis/ads/googleads/v1/enums" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Represents customer client link relationship. +type CustomerClientLink struct { + // Name of the resource. + // CustomerClientLink resource names have the form: + // + // `customers/{customer_id}/customerClientLinks/{client_customer_id}~{manager_link_id}` + ResourceName string `protobuf:"bytes,1,opt,name=resource_name,json=resourceName,proto3" json:"resource_name,omitempty"` + // The client customer linked to this customer. + ClientCustomer *wrappers.StringValue `protobuf:"bytes,3,opt,name=client_customer,json=clientCustomer,proto3" json:"client_customer,omitempty"` + // This is uniquely identifies a customer client link. Read only. + ManagerLinkId *wrappers.Int64Value `protobuf:"bytes,4,opt,name=manager_link_id,json=managerLinkId,proto3" json:"manager_link_id,omitempty"` + // This is the status of the link between client and manager. + Status enums.ManagerLinkStatusEnum_ManagerLinkStatus `protobuf:"varint,5,opt,name=status,proto3,enum=google.ads.googleads.v1.enums.ManagerLinkStatusEnum_ManagerLinkStatus" json:"status,omitempty"` + // The visibility of the link. Users can choose whether or not to see hidden + // links in the AdWords UI. + // Default value is false + Hidden *wrappers.BoolValue `protobuf:"bytes,6,opt,name=hidden,proto3" json:"hidden,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *CustomerClientLink) Reset() { *m = CustomerClientLink{} } +func (m *CustomerClientLink) String() string { return proto.CompactTextString(m) } +func (*CustomerClientLink) ProtoMessage() {} +func (*CustomerClientLink) Descriptor() ([]byte, []int) { + return fileDescriptor_customer_client_link_282011011b313e4f, []int{0} +} +func (m *CustomerClientLink) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_CustomerClientLink.Unmarshal(m, b) +} +func (m *CustomerClientLink) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_CustomerClientLink.Marshal(b, m, deterministic) +} +func (dst *CustomerClientLink) XXX_Merge(src proto.Message) { + xxx_messageInfo_CustomerClientLink.Merge(dst, src) +} +func (m *CustomerClientLink) XXX_Size() int { + return xxx_messageInfo_CustomerClientLink.Size(m) +} +func (m *CustomerClientLink) XXX_DiscardUnknown() { + xxx_messageInfo_CustomerClientLink.DiscardUnknown(m) +} + +var xxx_messageInfo_CustomerClientLink proto.InternalMessageInfo + +func (m *CustomerClientLink) GetResourceName() string { + if m != nil { + return m.ResourceName + } + return "" +} + +func (m *CustomerClientLink) GetClientCustomer() *wrappers.StringValue { + if m != nil { + return m.ClientCustomer + } + return nil +} + +func (m *CustomerClientLink) GetManagerLinkId() *wrappers.Int64Value { + if m != nil { + return m.ManagerLinkId + } + return nil +} + +func (m *CustomerClientLink) GetStatus() enums.ManagerLinkStatusEnum_ManagerLinkStatus { + if m != nil { + return m.Status + } + return enums.ManagerLinkStatusEnum_UNSPECIFIED +} + +func (m *CustomerClientLink) GetHidden() *wrappers.BoolValue { + if m != nil { + return m.Hidden + } + return nil +} + +func init() { + proto.RegisterType((*CustomerClientLink)(nil), "google.ads.googleads.v1.resources.CustomerClientLink") +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/resources/customer_client_link.proto", fileDescriptor_customer_client_link_282011011b313e4f) +} + +var fileDescriptor_customer_client_link_282011011b313e4f = []byte{ + // 436 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x84, 0x52, 0xdd, 0x6a, 0x14, 0x31, + 0x18, 0x65, 0xa6, 0xba, 0x60, 0xb4, 0x2d, 0xcc, 0x8d, 0xc3, 0x5a, 0x64, 0xab, 0x14, 0xf6, 0x2a, + 0x61, 0x56, 0x51, 0x88, 0xde, 0xcc, 0x2e, 0xb5, 0x54, 0x54, 0xca, 0x16, 0xf6, 0x42, 0x16, 0x97, + 0x74, 0x13, 0x63, 0xe8, 0x4c, 0x32, 0x24, 0x99, 0xfa, 0x02, 0x3e, 0x89, 0x97, 0x3e, 0x8a, 0x6f, + 0xe0, 0x2b, 0xf8, 0x14, 0x32, 0xf9, 0x19, 0x91, 0x61, 0xed, 0xdd, 0x49, 0xbe, 0x73, 0xce, 0x77, + 0xbe, 0x2f, 0x01, 0xaf, 0xb9, 0x52, 0xbc, 0x62, 0x88, 0x50, 0x83, 0x3c, 0xec, 0xd0, 0x4d, 0x81, + 0x34, 0x33, 0xaa, 0xd5, 0x5b, 0x66, 0xd0, 0xb6, 0x35, 0x56, 0xd5, 0x4c, 0x6f, 0xb6, 0x95, 0x60, + 0xd2, 0x6e, 0x2a, 0x21, 0xaf, 0x61, 0xa3, 0x95, 0x55, 0xd9, 0xb1, 0x97, 0x40, 0x42, 0x0d, 0xec, + 0xd5, 0xf0, 0xa6, 0x80, 0xbd, 0x7a, 0xfc, 0x72, 0x57, 0x03, 0x26, 0xdb, 0xda, 0xa0, 0x9a, 0x48, + 0xc2, 0x99, 0x76, 0xa6, 0x1b, 0x63, 0x89, 0x6d, 0x8d, 0xf7, 0x1e, 0x3f, 0x0e, 0x42, 0x77, 0xba, + 0x6a, 0x3f, 0xa3, 0xaf, 0x9a, 0x34, 0x0d, 0xd3, 0xb1, 0x7e, 0x14, 0x8d, 0x1b, 0x81, 0x88, 0x94, + 0xca, 0x12, 0x2b, 0x94, 0x0c, 0xd5, 0x27, 0xbf, 0x52, 0x90, 0x2d, 0x42, 0xf0, 0x85, 0xcb, 0xfd, + 0x4e, 0xc8, 0xeb, 0xec, 0x29, 0xd8, 0x8f, 0xd1, 0x36, 0x92, 0xd4, 0x2c, 0x4f, 0x26, 0xc9, 0xf4, + 0xde, 0xf2, 0x41, 0xbc, 0xfc, 0x40, 0x6a, 0x96, 0x9d, 0x82, 0xc3, 0x30, 0x6a, 0x1c, 0x3d, 0xdf, + 0x9b, 0x24, 0xd3, 0xfb, 0xb3, 0xa3, 0x30, 0x24, 0x8c, 0x99, 0xe0, 0xa5, 0xd5, 0x42, 0xf2, 0x15, + 0xa9, 0x5a, 0xb6, 0x3c, 0xf0, 0xa2, 0xd8, 0x35, 0x5b, 0x80, 0xc3, 0x7f, 0xa6, 0x13, 0x34, 0xbf, + 0xe3, 0x6c, 0x1e, 0x0d, 0x6c, 0xce, 0xa5, 0x7d, 0xf1, 0xdc, 0xbb, 0xec, 0x07, 0x4d, 0x17, 0xf7, + 0x9c, 0x66, 0x9f, 0xc0, 0xc8, 0x6f, 0x25, 0xbf, 0x3b, 0x49, 0xa6, 0x07, 0xb3, 0x37, 0x70, 0xd7, + 0xca, 0xdd, 0x3e, 0xe1, 0xfb, 0xbf, 0xea, 0x4b, 0xa7, 0x3b, 0x95, 0x6d, 0x3d, 0xbc, 0x5d, 0x06, + 0xd7, 0x6c, 0x06, 0x46, 0x5f, 0x04, 0xa5, 0x4c, 0xe6, 0x23, 0x97, 0x6d, 0x3c, 0xc8, 0x36, 0x57, + 0xaa, 0xf2, 0xd1, 0x02, 0x73, 0xfe, 0x2d, 0x05, 0x27, 0x5b, 0x55, 0xc3, 0x5b, 0x1f, 0x7f, 0xfe, + 0x70, 0xf8, 0x04, 0x17, 0x9d, 0xef, 0x45, 0xf2, 0xf1, 0x6d, 0x50, 0x73, 0x55, 0x11, 0xc9, 0xa1, + 0xd2, 0x1c, 0x71, 0x26, 0x5d, 0xd7, 0xf8, 0x4f, 0x1a, 0x61, 0xfe, 0xf3, 0x2f, 0x5f, 0xf5, 0xe8, + 0x7b, 0xba, 0x77, 0x56, 0x96, 0x3f, 0xd2, 0xe3, 0x33, 0x6f, 0x59, 0x52, 0x03, 0x3d, 0xec, 0xd0, + 0xaa, 0x80, 0xcb, 0xc8, 0xfc, 0x19, 0x39, 0xeb, 0x92, 0x9a, 0x75, 0xcf, 0x59, 0xaf, 0x8a, 0x75, + 0xcf, 0xf9, 0x9d, 0x9e, 0xf8, 0x02, 0xc6, 0x25, 0x35, 0x18, 0xf7, 0x2c, 0x8c, 0x57, 0x05, 0xc6, + 0x3d, 0xef, 0x6a, 0xe4, 0xc2, 0x3e, 0xfb, 0x13, 0x00, 0x00, 0xff, 0xff, 0x73, 0x1a, 0x09, 0xc6, + 0x43, 0x03, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/resources/customer_extension_setting.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/resources/customer_extension_setting.pb.go new file mode 100644 index 0000000..16a643c --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/resources/customer_extension_setting.pb.go @@ -0,0 +1,134 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/resources/customer_extension_setting.proto + +package resources // import "google.golang.org/genproto/googleapis/ads/googleads/v1/resources" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import wrappers "github.com/golang/protobuf/ptypes/wrappers" +import enums "google.golang.org/genproto/googleapis/ads/googleads/v1/enums" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// A customer extension setting. +type CustomerExtensionSetting struct { + // The resource name of the customer extension setting. + // CustomerExtensionSetting resource names have the form: + // + // `customers/{customer_id}/customerExtensionSettings/{extension_type}` + ResourceName string `protobuf:"bytes,1,opt,name=resource_name,json=resourceName,proto3" json:"resource_name,omitempty"` + // The extension type of the customer extension setting. + ExtensionType enums.ExtensionTypeEnum_ExtensionType `protobuf:"varint,2,opt,name=extension_type,json=extensionType,proto3,enum=google.ads.googleads.v1.enums.ExtensionTypeEnum_ExtensionType" json:"extension_type,omitempty"` + // The resource names of the extension feed items to serve under the customer. + // ExtensionFeedItem resource names have the form: + // + // `customers/{customer_id}/extensionFeedItems/{feed_item_id}` + ExtensionFeedItems []*wrappers.StringValue `protobuf:"bytes,3,rep,name=extension_feed_items,json=extensionFeedItems,proto3" json:"extension_feed_items,omitempty"` + // The device for which the extensions will serve. Optional. + Device enums.ExtensionSettingDeviceEnum_ExtensionSettingDevice `protobuf:"varint,4,opt,name=device,proto3,enum=google.ads.googleads.v1.enums.ExtensionSettingDeviceEnum_ExtensionSettingDevice" json:"device,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *CustomerExtensionSetting) Reset() { *m = CustomerExtensionSetting{} } +func (m *CustomerExtensionSetting) String() string { return proto.CompactTextString(m) } +func (*CustomerExtensionSetting) ProtoMessage() {} +func (*CustomerExtensionSetting) Descriptor() ([]byte, []int) { + return fileDescriptor_customer_extension_setting_36805005c826cc7e, []int{0} +} +func (m *CustomerExtensionSetting) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_CustomerExtensionSetting.Unmarshal(m, b) +} +func (m *CustomerExtensionSetting) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_CustomerExtensionSetting.Marshal(b, m, deterministic) +} +func (dst *CustomerExtensionSetting) XXX_Merge(src proto.Message) { + xxx_messageInfo_CustomerExtensionSetting.Merge(dst, src) +} +func (m *CustomerExtensionSetting) XXX_Size() int { + return xxx_messageInfo_CustomerExtensionSetting.Size(m) +} +func (m *CustomerExtensionSetting) XXX_DiscardUnknown() { + xxx_messageInfo_CustomerExtensionSetting.DiscardUnknown(m) +} + +var xxx_messageInfo_CustomerExtensionSetting proto.InternalMessageInfo + +func (m *CustomerExtensionSetting) GetResourceName() string { + if m != nil { + return m.ResourceName + } + return "" +} + +func (m *CustomerExtensionSetting) GetExtensionType() enums.ExtensionTypeEnum_ExtensionType { + if m != nil { + return m.ExtensionType + } + return enums.ExtensionTypeEnum_UNSPECIFIED +} + +func (m *CustomerExtensionSetting) GetExtensionFeedItems() []*wrappers.StringValue { + if m != nil { + return m.ExtensionFeedItems + } + return nil +} + +func (m *CustomerExtensionSetting) GetDevice() enums.ExtensionSettingDeviceEnum_ExtensionSettingDevice { + if m != nil { + return m.Device + } + return enums.ExtensionSettingDeviceEnum_UNSPECIFIED +} + +func init() { + proto.RegisterType((*CustomerExtensionSetting)(nil), "google.ads.googleads.v1.resources.CustomerExtensionSetting") +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/resources/customer_extension_setting.proto", fileDescriptor_customer_extension_setting_36805005c826cc7e) +} + +var fileDescriptor_customer_extension_setting_36805005c826cc7e = []byte{ + // 431 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x8c, 0x52, 0xc1, 0x6e, 0xd4, 0x30, + 0x10, 0xd5, 0x66, 0x51, 0x25, 0x02, 0xed, 0x21, 0xe2, 0x10, 0x55, 0x05, 0x6d, 0x41, 0x95, 0xf6, + 0x64, 0x2b, 0xcb, 0xcd, 0x20, 0xa4, 0x2c, 0x94, 0x0a, 0x0e, 0xd5, 0x2a, 0x45, 0x7b, 0x40, 0x2b, + 0x45, 0xee, 0x7a, 0x6a, 0x2c, 0x6d, 0xec, 0xc8, 0x76, 0x16, 0xfa, 0x0b, 0x7c, 0x06, 0x47, 0x3e, + 0x85, 0xaf, 0xe0, 0xcc, 0x57, 0xa0, 0xc4, 0xb1, 0x21, 0x42, 0x4b, 0xf7, 0x36, 0xf1, 0xbc, 0xf7, + 0xe6, 0xbd, 0xc9, 0xc4, 0x73, 0xae, 0x14, 0xdf, 0x00, 0xa6, 0xcc, 0x60, 0x57, 0xb6, 0xd5, 0x36, + 0xc3, 0x1a, 0x8c, 0x6a, 0xf4, 0x1a, 0x0c, 0x5e, 0x37, 0xc6, 0xaa, 0x0a, 0x74, 0x09, 0x5f, 0x2c, + 0x48, 0x23, 0x94, 0x2c, 0x0d, 0x58, 0x2b, 0x24, 0x47, 0xb5, 0x56, 0x56, 0x25, 0xa7, 0x8e, 0x88, + 0x28, 0x33, 0x28, 0x68, 0xa0, 0x6d, 0x86, 0x82, 0xc6, 0xf1, 0xcb, 0x5d, 0x63, 0x40, 0x36, 0x95, + 0xc1, 0xff, 0x28, 0x97, 0x0c, 0xb6, 0x62, 0x0d, 0x6e, 0xc0, 0xf1, 0x6c, 0x5f, 0xb6, 0xbd, 0xad, + 0x3d, 0xe7, 0x49, 0xcf, 0xe9, 0xbe, 0xae, 0x9b, 0x1b, 0xfc, 0x59, 0xd3, 0xba, 0x06, 0x6d, 0xfa, + 0xfe, 0x89, 0xd7, 0xac, 0x05, 0xa6, 0x52, 0x2a, 0x4b, 0xad, 0x50, 0xb2, 0xef, 0x3e, 0xfd, 0x19, + 0xc5, 0xe9, 0xeb, 0x3e, 0xf7, 0xb9, 0x97, 0xbf, 0x72, 0xde, 0x92, 0x67, 0xf1, 0xa1, 0x4f, 0x56, + 0x4a, 0x5a, 0x41, 0x3a, 0x9a, 0x8c, 0xa6, 0xf7, 0x8b, 0x87, 0xfe, 0xf1, 0x92, 0x56, 0x90, 0x40, + 0x7c, 0x34, 0xf4, 0x95, 0x46, 0x93, 0xd1, 0xf4, 0x68, 0xf6, 0x0a, 0xed, 0xda, 0x56, 0x17, 0x06, + 0x85, 0x69, 0x1f, 0x6e, 0x6b, 0x38, 0x97, 0x4d, 0x35, 0x7c, 0x29, 0x0e, 0xe1, 0xef, 0xcf, 0xe4, + 0x32, 0x7e, 0xf4, 0x67, 0xcc, 0x0d, 0x00, 0x2b, 0x85, 0x85, 0xca, 0xa4, 0xe3, 0xc9, 0x78, 0xfa, + 0x60, 0x76, 0xe2, 0x87, 0xf9, 0x2d, 0xa0, 0x2b, 0xab, 0x85, 0xe4, 0x4b, 0xba, 0x69, 0xa0, 0x48, + 0x02, 0xf3, 0x2d, 0x00, 0x7b, 0xd7, 0xf2, 0x92, 0x4f, 0xf1, 0x81, 0x5b, 0x7d, 0x7a, 0xaf, 0xb3, + 0xbb, 0xd8, 0xd7, 0x6e, 0xbf, 0x9c, 0x37, 0x1d, 0x79, 0xe8, 0x7b, 0xd0, 0x2a, 0x7a, 0xfd, 0xf9, + 0xd7, 0x28, 0x3e, 0x5b, 0xab, 0x0a, 0xdd, 0x79, 0x3c, 0xf3, 0xc7, 0xbb, 0xfe, 0xc4, 0xa2, 0x4d, + 0xb5, 0x18, 0x7d, 0x7c, 0xdf, 0x6b, 0x70, 0xb5, 0xa1, 0x92, 0x23, 0xa5, 0x39, 0xe6, 0x20, 0xbb, + 0xcc, 0xfe, 0x5e, 0x6a, 0x61, 0xfe, 0x73, 0xe3, 0x2f, 0x42, 0xf5, 0x2d, 0x1a, 0x5f, 0xe4, 0xf9, + 0xf7, 0xe8, 0xf4, 0xc2, 0x49, 0xe6, 0xcc, 0x20, 0x57, 0xb6, 0xd5, 0x32, 0x43, 0x85, 0x47, 0xfe, + 0xf0, 0x98, 0x55, 0xce, 0xcc, 0x2a, 0x60, 0x56, 0xcb, 0x6c, 0x15, 0x30, 0xbf, 0xa2, 0x33, 0xd7, + 0x20, 0x24, 0x67, 0x86, 0x90, 0x80, 0x22, 0x64, 0x99, 0x11, 0x12, 0x70, 0xd7, 0x07, 0x9d, 0xd9, + 0xe7, 0xbf, 0x03, 0x00, 0x00, 0xff, 0xff, 0x13, 0xdf, 0xd9, 0x40, 0x8f, 0x03, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/resources/customer_feed.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/resources/customer_feed.pb.go new file mode 100644 index 0000000..9d7b2e7 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/resources/customer_feed.pb.go @@ -0,0 +1,148 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/resources/customer_feed.proto + +package resources // import "google.golang.org/genproto/googleapis/ads/googleads/v1/resources" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import wrappers "github.com/golang/protobuf/ptypes/wrappers" +import common "google.golang.org/genproto/googleapis/ads/googleads/v1/common" +import enums "google.golang.org/genproto/googleapis/ads/googleads/v1/enums" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// A customer feed. +type CustomerFeed struct { + // The resource name of the customer feed. + // Customer feed resource names have the form: + // + // `customers/{customer_id}/customerFeeds/{feed_id}` + ResourceName string `protobuf:"bytes,1,opt,name=resource_name,json=resourceName,proto3" json:"resource_name,omitempty"` + // The feed being linked to the customer. + Feed *wrappers.StringValue `protobuf:"bytes,2,opt,name=feed,proto3" json:"feed,omitempty"` + // Indicates which placeholder types the feed may populate under the connected + // customer. Required. + PlaceholderTypes []enums.PlaceholderTypeEnum_PlaceholderType `protobuf:"varint,3,rep,packed,name=placeholder_types,json=placeholderTypes,proto3,enum=google.ads.googleads.v1.enums.PlaceholderTypeEnum_PlaceholderType" json:"placeholder_types,omitempty"` + // Matching function associated with the CustomerFeed. + // The matching function is used to filter the set of feed items selected. + // Required. + MatchingFunction *common.MatchingFunction `protobuf:"bytes,4,opt,name=matching_function,json=matchingFunction,proto3" json:"matching_function,omitempty"` + // Status of the customer feed. + // This field is read-only. + Status enums.FeedLinkStatusEnum_FeedLinkStatus `protobuf:"varint,5,opt,name=status,proto3,enum=google.ads.googleads.v1.enums.FeedLinkStatusEnum_FeedLinkStatus" json:"status,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *CustomerFeed) Reset() { *m = CustomerFeed{} } +func (m *CustomerFeed) String() string { return proto.CompactTextString(m) } +func (*CustomerFeed) ProtoMessage() {} +func (*CustomerFeed) Descriptor() ([]byte, []int) { + return fileDescriptor_customer_feed_eb3e39480bda33e3, []int{0} +} +func (m *CustomerFeed) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_CustomerFeed.Unmarshal(m, b) +} +func (m *CustomerFeed) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_CustomerFeed.Marshal(b, m, deterministic) +} +func (dst *CustomerFeed) XXX_Merge(src proto.Message) { + xxx_messageInfo_CustomerFeed.Merge(dst, src) +} +func (m *CustomerFeed) XXX_Size() int { + return xxx_messageInfo_CustomerFeed.Size(m) +} +func (m *CustomerFeed) XXX_DiscardUnknown() { + xxx_messageInfo_CustomerFeed.DiscardUnknown(m) +} + +var xxx_messageInfo_CustomerFeed proto.InternalMessageInfo + +func (m *CustomerFeed) GetResourceName() string { + if m != nil { + return m.ResourceName + } + return "" +} + +func (m *CustomerFeed) GetFeed() *wrappers.StringValue { + if m != nil { + return m.Feed + } + return nil +} + +func (m *CustomerFeed) GetPlaceholderTypes() []enums.PlaceholderTypeEnum_PlaceholderType { + if m != nil { + return m.PlaceholderTypes + } + return nil +} + +func (m *CustomerFeed) GetMatchingFunction() *common.MatchingFunction { + if m != nil { + return m.MatchingFunction + } + return nil +} + +func (m *CustomerFeed) GetStatus() enums.FeedLinkStatusEnum_FeedLinkStatus { + if m != nil { + return m.Status + } + return enums.FeedLinkStatusEnum_UNSPECIFIED +} + +func init() { + proto.RegisterType((*CustomerFeed)(nil), "google.ads.googleads.v1.resources.CustomerFeed") +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/resources/customer_feed.proto", fileDescriptor_customer_feed_eb3e39480bda33e3) +} + +var fileDescriptor_customer_feed_eb3e39480bda33e3 = []byte{ + // 473 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x8c, 0x53, 0xdd, 0x6a, 0xd4, 0x40, + 0x14, 0x26, 0xbb, 0xb5, 0x60, 0xac, 0xa5, 0x9b, 0xab, 0x50, 0x8a, 0x6c, 0x95, 0xc2, 0x5e, 0xcd, + 0x34, 0xeb, 0xcf, 0x45, 0xbc, 0x31, 0x2b, 0xb6, 0x20, 0x2a, 0x4b, 0x2a, 0x8b, 0xc8, 0x4a, 0x98, + 0x26, 0x67, 0xd3, 0xd0, 0xcc, 0x0f, 0x33, 0x93, 0x4a, 0x9f, 0xc2, 0x77, 0xf0, 0xd2, 0x47, 0xf1, + 0x51, 0x7c, 0x08, 0x91, 0x64, 0x66, 0xa2, 0x5d, 0x59, 0xeb, 0xdd, 0xc9, 0x99, 0xef, 0xfb, 0xf2, + 0x9d, 0xf3, 0xcd, 0xf8, 0x4f, 0x4b, 0xce, 0xcb, 0x1a, 0x30, 0x29, 0x14, 0x36, 0x65, 0x5b, 0x5d, + 0x45, 0x58, 0x82, 0xe2, 0x8d, 0xcc, 0x41, 0xe1, 0xbc, 0x51, 0x9a, 0x53, 0x90, 0xd9, 0x0a, 0xa0, + 0x40, 0x42, 0x72, 0xcd, 0x83, 0x43, 0x83, 0x45, 0xa4, 0x50, 0xa8, 0xa7, 0xa1, 0xab, 0x08, 0xf5, + 0xb4, 0xfd, 0x67, 0x9b, 0x94, 0x73, 0x4e, 0x29, 0x67, 0x98, 0x12, 0x9d, 0x5f, 0x54, 0xac, 0xcc, + 0x56, 0x0d, 0xcb, 0x75, 0xc5, 0x99, 0x91, 0xde, 0x7f, 0xb2, 0x89, 0x07, 0xac, 0xa1, 0x0a, 0xb7, + 0x26, 0xb2, 0xba, 0x62, 0x97, 0x99, 0xd2, 0x44, 0x37, 0xea, 0xff, 0x58, 0xa2, 0x26, 0x39, 0x5c, + 0xf0, 0xba, 0x00, 0x99, 0xe9, 0x6b, 0x01, 0x96, 0xf5, 0xc0, 0xb2, 0xba, 0xaf, 0xf3, 0x66, 0x85, + 0x3f, 0x4b, 0x22, 0x04, 0x48, 0xa7, 0x7a, 0xe0, 0x54, 0x45, 0x85, 0x09, 0x63, 0x5c, 0x93, 0xd6, + 0xa8, 0x3d, 0x7d, 0xf8, 0x65, 0xe8, 0xef, 0xbc, 0xb4, 0xcb, 0x39, 0x01, 0x28, 0x82, 0x47, 0xfe, + 0x7d, 0x37, 0x7f, 0xc6, 0x08, 0x85, 0xd0, 0x1b, 0x7b, 0x93, 0xbb, 0xe9, 0x8e, 0x6b, 0xbe, 0x23, + 0x14, 0x82, 0x63, 0x7f, 0xab, 0x9d, 0x21, 0x1c, 0x8c, 0xbd, 0xc9, 0xbd, 0xe9, 0x81, 0x5d, 0x1f, + 0x72, 0x16, 0xd0, 0x99, 0x96, 0x15, 0x2b, 0x17, 0xa4, 0x6e, 0x20, 0xed, 0x90, 0x01, 0xf7, 0x47, + 0xeb, 0xfe, 0x55, 0x38, 0x1c, 0x0f, 0x27, 0xbb, 0xd3, 0x19, 0xda, 0x14, 0x44, 0x37, 0x37, 0x9a, + 0xff, 0xe6, 0xbd, 0xbf, 0x16, 0xf0, 0x8a, 0x35, 0x74, 0xbd, 0x97, 0xee, 0x89, 0x9b, 0x0d, 0x15, + 0x7c, 0xf2, 0x47, 0x7f, 0xa5, 0x13, 0x6e, 0x75, 0x7e, 0x8f, 0x37, 0xfe, 0xd0, 0xc4, 0x8a, 0xde, + 0x5a, 0xe2, 0x89, 0xe5, 0xa5, 0x7b, 0x74, 0xad, 0x13, 0x7c, 0xf0, 0xb7, 0x4d, 0x76, 0xe1, 0x9d, + 0xb1, 0x37, 0xd9, 0x9d, 0xbe, 0xb8, 0x65, 0x88, 0x76, 0xb7, 0x6f, 0x2a, 0x76, 0x79, 0xd6, 0x91, + 0xba, 0x19, 0x6e, 0xb6, 0x52, 0xab, 0x37, 0xfb, 0xe9, 0xf9, 0x47, 0x39, 0xa7, 0xe8, 0xd6, 0xdb, + 0x39, 0x1b, 0xfd, 0x19, 0xdc, 0xbc, 0xdd, 0xfd, 0xdc, 0xfb, 0xf8, 0xda, 0xf2, 0x4a, 0x5e, 0x13, + 0x56, 0x22, 0x2e, 0x4b, 0x5c, 0x02, 0xeb, 0x92, 0x71, 0x97, 0x4a, 0x54, 0xea, 0x1f, 0x6f, 0xe5, + 0x79, 0x5f, 0x7d, 0x1d, 0x0c, 0x4f, 0x93, 0xe4, 0xdb, 0xe0, 0xf0, 0xd4, 0x48, 0x26, 0x85, 0x42, + 0xa6, 0x6c, 0xab, 0x45, 0x84, 0x52, 0x87, 0xfc, 0xee, 0x30, 0xcb, 0xa4, 0x50, 0xcb, 0x1e, 0xb3, + 0x5c, 0x44, 0xcb, 0x1e, 0xf3, 0x63, 0x70, 0x64, 0x0e, 0xe2, 0x38, 0x29, 0x54, 0x1c, 0xf7, 0xa8, + 0x38, 0x5e, 0x44, 0x71, 0xdc, 0xe3, 0xce, 0xb7, 0x3b, 0xb3, 0x8f, 0x7f, 0x05, 0x00, 0x00, 0xff, + 0xff, 0x67, 0xbb, 0xa2, 0x22, 0xd7, 0x03, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/resources/customer_label.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/resources/customer_label.pb.go new file mode 100644 index 0000000..08c1b09 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/resources/customer_label.pb.go @@ -0,0 +1,120 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/resources/customer_label.proto + +package resources // import "google.golang.org/genproto/googleapis/ads/googleads/v1/resources" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import wrappers "github.com/golang/protobuf/ptypes/wrappers" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Represents a relationship between a customer and a label. This customer may +// not have access to all the labels attached to it. Additional CustomerLabels +// may be returned by increasing permissions with login-customer-id. +type CustomerLabel struct { + // Name of the resource. + // Customer label resource names have the form: + // `customers/{customer_id}/customerLabels/{label_id}` + ResourceName string `protobuf:"bytes,1,opt,name=resource_name,json=resourceName,proto3" json:"resource_name,omitempty"` + // The resource name of the customer to which the label is attached. + // Read only. + Customer *wrappers.StringValue `protobuf:"bytes,2,opt,name=customer,proto3" json:"customer,omitempty"` + // The resource name of the label assigned to the customer. + // + // Note: the Customer ID portion of the label resource name is not + // validated when creating a new CustomerLabel. + Label *wrappers.StringValue `protobuf:"bytes,3,opt,name=label,proto3" json:"label,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *CustomerLabel) Reset() { *m = CustomerLabel{} } +func (m *CustomerLabel) String() string { return proto.CompactTextString(m) } +func (*CustomerLabel) ProtoMessage() {} +func (*CustomerLabel) Descriptor() ([]byte, []int) { + return fileDescriptor_customer_label_992888de78627c4d, []int{0} +} +func (m *CustomerLabel) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_CustomerLabel.Unmarshal(m, b) +} +func (m *CustomerLabel) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_CustomerLabel.Marshal(b, m, deterministic) +} +func (dst *CustomerLabel) XXX_Merge(src proto.Message) { + xxx_messageInfo_CustomerLabel.Merge(dst, src) +} +func (m *CustomerLabel) XXX_Size() int { + return xxx_messageInfo_CustomerLabel.Size(m) +} +func (m *CustomerLabel) XXX_DiscardUnknown() { + xxx_messageInfo_CustomerLabel.DiscardUnknown(m) +} + +var xxx_messageInfo_CustomerLabel proto.InternalMessageInfo + +func (m *CustomerLabel) GetResourceName() string { + if m != nil { + return m.ResourceName + } + return "" +} + +func (m *CustomerLabel) GetCustomer() *wrappers.StringValue { + if m != nil { + return m.Customer + } + return nil +} + +func (m *CustomerLabel) GetLabel() *wrappers.StringValue { + if m != nil { + return m.Label + } + return nil +} + +func init() { + proto.RegisterType((*CustomerLabel)(nil), "google.ads.googleads.v1.resources.CustomerLabel") +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/resources/customer_label.proto", fileDescriptor_customer_label_992888de78627c4d) +} + +var fileDescriptor_customer_label_992888de78627c4d = []byte{ + // 322 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x8c, 0x91, 0xcf, 0x4a, 0xc3, 0x30, + 0x00, 0xc6, 0x49, 0x87, 0xa2, 0xd1, 0x5d, 0x7a, 0x1a, 0x63, 0xc8, 0xa6, 0x0c, 0x76, 0x4a, 0xe8, + 0x04, 0x91, 0x78, 0xea, 0x3c, 0x0c, 0x44, 0x64, 0x4c, 0xe8, 0x41, 0x0a, 0x23, 0x6d, 0x63, 0x28, + 0xb4, 0x49, 0x49, 0xda, 0xf9, 0x3e, 0x3b, 0xfa, 0x28, 0x3e, 0x8a, 0x2f, 0xa1, 0xb4, 0x69, 0x02, + 0x5e, 0xd4, 0xdb, 0x47, 0xf3, 0xfb, 0xfe, 0xa4, 0x81, 0x37, 0x5c, 0x4a, 0x5e, 0x30, 0x4c, 0x33, + 0x8d, 0x8d, 0x6c, 0xd5, 0x3e, 0xc0, 0x8a, 0x69, 0xd9, 0xa8, 0x94, 0x69, 0x9c, 0x36, 0xba, 0x96, + 0x25, 0x53, 0xbb, 0x82, 0x26, 0xac, 0x40, 0x95, 0x92, 0xb5, 0xf4, 0x67, 0x06, 0x46, 0x34, 0xd3, + 0xc8, 0xf9, 0xd0, 0x3e, 0x40, 0xce, 0x37, 0xbe, 0xe8, 0xa3, 0x3b, 0x43, 0xd2, 0xbc, 0xe2, 0x37, + 0x45, 0xab, 0x8a, 0x29, 0x6d, 0x22, 0xc6, 0x13, 0x5b, 0x5d, 0xe5, 0x98, 0x0a, 0x21, 0x6b, 0x5a, + 0xe7, 0x52, 0xf4, 0xa7, 0x97, 0x07, 0x00, 0x87, 0xf7, 0x7d, 0xf3, 0x63, 0x5b, 0xec, 0x5f, 0xc1, + 0xa1, 0x0d, 0xdf, 0x09, 0x5a, 0xb2, 0x11, 0x98, 0x82, 0xc5, 0xe9, 0xf6, 0xdc, 0x7e, 0x7c, 0xa2, + 0x25, 0xf3, 0x6f, 0xe1, 0x89, 0xdd, 0x3b, 0xf2, 0xa6, 0x60, 0x71, 0xb6, 0x9c, 0xf4, 0xfb, 0x90, + 0xdd, 0x81, 0x9e, 0x6b, 0x95, 0x0b, 0x1e, 0xd1, 0xa2, 0x61, 0x5b, 0x47, 0xfb, 0x4b, 0x78, 0xd4, + 0x5d, 0x70, 0x34, 0xf8, 0x87, 0xcd, 0xa0, 0xab, 0x2f, 0x00, 0xe7, 0xa9, 0x2c, 0xd1, 0x9f, 0x3f, + 0x63, 0xe5, 0xff, 0xb8, 0xcb, 0xa6, 0xcd, 0xdc, 0x80, 0x97, 0x87, 0xde, 0xc8, 0x65, 0x41, 0x05, + 0x47, 0x52, 0x71, 0xcc, 0x99, 0xe8, 0x1a, 0xed, 0x6b, 0x54, 0xb9, 0xfe, 0xe5, 0x71, 0xee, 0x9c, + 0x3a, 0x78, 0x83, 0x75, 0x18, 0xbe, 0x7b, 0xb3, 0xb5, 0x89, 0x0c, 0x33, 0x8d, 0x8c, 0x6c, 0x55, + 0x14, 0xa0, 0xad, 0x25, 0x3f, 0x2c, 0x13, 0x87, 0x99, 0x8e, 0x1d, 0x13, 0x47, 0x41, 0xec, 0x98, + 0x4f, 0x6f, 0x6e, 0x0e, 0x08, 0x09, 0x33, 0x4d, 0x88, 0xa3, 0x08, 0x89, 0x02, 0x42, 0x1c, 0x97, + 0x1c, 0x77, 0x63, 0xaf, 0xbf, 0x03, 0x00, 0x00, 0xff, 0xff, 0xbe, 0x55, 0x24, 0xa1, 0x48, 0x02, + 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/resources/customer_manager_link.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/resources/customer_manager_link.pb.go new file mode 100644 index 0000000..0788734 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/resources/customer_manager_link.pb.go @@ -0,0 +1,130 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/resources/customer_manager_link.proto + +package resources // import "google.golang.org/genproto/googleapis/ads/googleads/v1/resources" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import wrappers "github.com/golang/protobuf/ptypes/wrappers" +import enums "google.golang.org/genproto/googleapis/ads/googleads/v1/enums" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Represents customer-manager link relationship. +type CustomerManagerLink struct { + // Name of the resource. + // CustomerManagerLink resource names have the form: + // + // `customers/{customer_id}/customerManagerLinks/{manager_customer_id}~{manager_link_id}` + ResourceName string `protobuf:"bytes,1,opt,name=resource_name,json=resourceName,proto3" json:"resource_name,omitempty"` + // The manager customer linked to the customer. + ManagerCustomer *wrappers.StringValue `protobuf:"bytes,3,opt,name=manager_customer,json=managerCustomer,proto3" json:"manager_customer,omitempty"` + // ID of the customer-manager link. This field is read only. + ManagerLinkId *wrappers.Int64Value `protobuf:"bytes,4,opt,name=manager_link_id,json=managerLinkId,proto3" json:"manager_link_id,omitempty"` + // Status of the link between the customer and the manager. + Status enums.ManagerLinkStatusEnum_ManagerLinkStatus `protobuf:"varint,5,opt,name=status,proto3,enum=google.ads.googleads.v1.enums.ManagerLinkStatusEnum_ManagerLinkStatus" json:"status,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *CustomerManagerLink) Reset() { *m = CustomerManagerLink{} } +func (m *CustomerManagerLink) String() string { return proto.CompactTextString(m) } +func (*CustomerManagerLink) ProtoMessage() {} +func (*CustomerManagerLink) Descriptor() ([]byte, []int) { + return fileDescriptor_customer_manager_link_ea27bde8cdb5c4ca, []int{0} +} +func (m *CustomerManagerLink) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_CustomerManagerLink.Unmarshal(m, b) +} +func (m *CustomerManagerLink) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_CustomerManagerLink.Marshal(b, m, deterministic) +} +func (dst *CustomerManagerLink) XXX_Merge(src proto.Message) { + xxx_messageInfo_CustomerManagerLink.Merge(dst, src) +} +func (m *CustomerManagerLink) XXX_Size() int { + return xxx_messageInfo_CustomerManagerLink.Size(m) +} +func (m *CustomerManagerLink) XXX_DiscardUnknown() { + xxx_messageInfo_CustomerManagerLink.DiscardUnknown(m) +} + +var xxx_messageInfo_CustomerManagerLink proto.InternalMessageInfo + +func (m *CustomerManagerLink) GetResourceName() string { + if m != nil { + return m.ResourceName + } + return "" +} + +func (m *CustomerManagerLink) GetManagerCustomer() *wrappers.StringValue { + if m != nil { + return m.ManagerCustomer + } + return nil +} + +func (m *CustomerManagerLink) GetManagerLinkId() *wrappers.Int64Value { + if m != nil { + return m.ManagerLinkId + } + return nil +} + +func (m *CustomerManagerLink) GetStatus() enums.ManagerLinkStatusEnum_ManagerLinkStatus { + if m != nil { + return m.Status + } + return enums.ManagerLinkStatusEnum_UNSPECIFIED +} + +func init() { + proto.RegisterType((*CustomerManagerLink)(nil), "google.ads.googleads.v1.resources.CustomerManagerLink") +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/resources/customer_manager_link.proto", fileDescriptor_customer_manager_link_ea27bde8cdb5c4ca) +} + +var fileDescriptor_customer_manager_link_ea27bde8cdb5c4ca = []byte{ + // 408 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x84, 0x52, 0xd1, 0xca, 0xd3, 0x30, + 0x18, 0xa5, 0xfd, 0xf5, 0x07, 0xab, 0x73, 0x52, 0x6f, 0xca, 0x1c, 0xb2, 0x29, 0x83, 0x5d, 0x25, + 0x74, 0x8a, 0x42, 0xc4, 0x8b, 0x6e, 0xe8, 0x98, 0xa8, 0x8c, 0x0e, 0x7a, 0x21, 0xc5, 0x92, 0xad, + 0x31, 0x94, 0x35, 0x49, 0x49, 0xd2, 0xf9, 0x04, 0xbe, 0x88, 0x57, 0xe2, 0xa3, 0xf8, 0x28, 0x3e, + 0x85, 0xac, 0x69, 0xea, 0x64, 0x4e, 0xef, 0x4e, 0x93, 0x73, 0xce, 0x77, 0xce, 0x97, 0x7a, 0x2f, + 0xa9, 0x10, 0xb4, 0x24, 0x10, 0xe7, 0x0a, 0x1a, 0x78, 0x44, 0x87, 0x10, 0x4a, 0xa2, 0x44, 0x2d, + 0x77, 0x44, 0xc1, 0x5d, 0xad, 0xb4, 0x60, 0x44, 0x66, 0x0c, 0x73, 0x4c, 0x89, 0xcc, 0xca, 0x82, + 0xef, 0x41, 0x25, 0x85, 0x16, 0xfe, 0xd8, 0x68, 0x00, 0xce, 0x15, 0xe8, 0xe4, 0xe0, 0x10, 0x82, + 0x4e, 0x3e, 0x78, 0x7e, 0x69, 0x02, 0xe1, 0x35, 0x53, 0xf0, 0xd4, 0x34, 0x53, 0x1a, 0xeb, 0x5a, + 0x19, 0xef, 0xc1, 0xc3, 0x56, 0xd8, 0x7c, 0x6d, 0xeb, 0x4f, 0xf0, 0xb3, 0xc4, 0x55, 0x45, 0xa4, + 0xbd, 0x1f, 0x5a, 0xe3, 0xaa, 0x80, 0x98, 0x73, 0xa1, 0xb1, 0x2e, 0x04, 0x6f, 0x6f, 0x1f, 0x7d, + 0x73, 0xbd, 0xfb, 0x8b, 0x36, 0xf9, 0x3b, 0x33, 0xe3, 0x6d, 0xc1, 0xf7, 0xfe, 0x63, 0xaf, 0x67, + 0xb3, 0x65, 0x1c, 0x33, 0x12, 0x38, 0x23, 0x67, 0x7a, 0x2b, 0xbe, 0x63, 0x0f, 0xdf, 0x63, 0x46, + 0xfc, 0xa5, 0x77, 0xcf, 0xe6, 0xb2, 0xed, 0x83, 0xab, 0x91, 0x33, 0xbd, 0x3d, 0x1b, 0xb6, 0x35, + 0x81, 0x4d, 0x05, 0x36, 0x5a, 0x16, 0x9c, 0x26, 0xb8, 0xac, 0x49, 0xdc, 0x6f, 0x55, 0x76, 0xb0, + 0xbf, 0xf0, 0xfa, 0x7f, 0x14, 0x2c, 0xf2, 0xe0, 0x46, 0xe3, 0xf3, 0xe0, 0xcc, 0x67, 0xc5, 0xf5, + 0xb3, 0xa7, 0xc6, 0xa6, 0xc7, 0x7e, 0x07, 0x5e, 0xe5, 0xfe, 0x47, 0xef, 0xda, 0x2c, 0x26, 0xb8, + 0x39, 0x72, 0xa6, 0x77, 0x67, 0xaf, 0xc1, 0xa5, 0xad, 0x37, 0x2b, 0x05, 0x27, 0x75, 0x37, 0x8d, + 0xee, 0x15, 0xaf, 0xd9, 0xf9, 0x69, 0xdc, 0xba, 0xce, 0xbf, 0xb8, 0xde, 0x64, 0x27, 0x18, 0xf8, + 0xef, 0x5b, 0xce, 0x83, 0xbf, 0x6c, 0x74, 0x7d, 0x2c, 0xb0, 0x76, 0x3e, 0xbc, 0x69, 0xe5, 0x54, + 0x94, 0x98, 0x53, 0x20, 0x24, 0x85, 0x94, 0xf0, 0xa6, 0x9e, 0x7d, 0xf7, 0xaa, 0x50, 0xff, 0xf8, + 0xd1, 0x5e, 0x74, 0xe8, 0xab, 0x7b, 0xb5, 0x8c, 0xa2, 0xef, 0xee, 0x78, 0x69, 0x2c, 0xa3, 0x5c, + 0x01, 0x03, 0x8f, 0x28, 0x09, 0x41, 0x6c, 0x99, 0x3f, 0x2c, 0x27, 0x8d, 0x72, 0x95, 0x76, 0x9c, + 0x34, 0x09, 0xd3, 0x8e, 0xf3, 0xd3, 0x9d, 0x98, 0x0b, 0x84, 0xa2, 0x5c, 0x21, 0xd4, 0xb1, 0x10, + 0x4a, 0x42, 0x84, 0x3a, 0xde, 0xf6, 0xba, 0x09, 0xfb, 0xe4, 0x57, 0x00, 0x00, 0x00, 0xff, 0xff, + 0x60, 0xab, 0x53, 0xed, 0x14, 0x03, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/resources/customer_negative_criterion.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/resources/customer_negative_criterion.pb.go new file mode 100644 index 0000000..0e65ac5 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/resources/customer_negative_criterion.pb.go @@ -0,0 +1,383 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/resources/customer_negative_criterion.proto + +package resources // import "google.golang.org/genproto/googleapis/ads/googleads/v1/resources" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import wrappers "github.com/golang/protobuf/ptypes/wrappers" +import common "google.golang.org/genproto/googleapis/ads/googleads/v1/common" +import enums "google.golang.org/genproto/googleapis/ads/googleads/v1/enums" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// A negative criterion for exclusions at the customer level. +type CustomerNegativeCriterion struct { + // The resource name of the customer negative criterion. + // Customer negative criterion resource names have the form: + // + // `customers/{customer_id}/customerNegativeCriteria/{criterion_id}` + ResourceName string `protobuf:"bytes,1,opt,name=resource_name,json=resourceName,proto3" json:"resource_name,omitempty"` + // The ID of the criterion. + Id *wrappers.Int64Value `protobuf:"bytes,2,opt,name=id,proto3" json:"id,omitempty"` + // The type of the criterion. + Type enums.CriterionTypeEnum_CriterionType `protobuf:"varint,3,opt,name=type,proto3,enum=google.ads.googleads.v1.enums.CriterionTypeEnum_CriterionType" json:"type,omitempty"` + // The customer negative criterion. + // + // Exactly one must be set. + // + // Types that are valid to be assigned to Criterion: + // *CustomerNegativeCriterion_ContentLabel + // *CustomerNegativeCriterion_MobileApplication + // *CustomerNegativeCriterion_MobileAppCategory + // *CustomerNegativeCriterion_Placement + // *CustomerNegativeCriterion_YoutubeVideo + // *CustomerNegativeCriterion_YoutubeChannel + Criterion isCustomerNegativeCriterion_Criterion `protobuf_oneof:"criterion"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *CustomerNegativeCriterion) Reset() { *m = CustomerNegativeCriterion{} } +func (m *CustomerNegativeCriterion) String() string { return proto.CompactTextString(m) } +func (*CustomerNegativeCriterion) ProtoMessage() {} +func (*CustomerNegativeCriterion) Descriptor() ([]byte, []int) { + return fileDescriptor_customer_negative_criterion_44d1ead2708d58a3, []int{0} +} +func (m *CustomerNegativeCriterion) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_CustomerNegativeCriterion.Unmarshal(m, b) +} +func (m *CustomerNegativeCriterion) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_CustomerNegativeCriterion.Marshal(b, m, deterministic) +} +func (dst *CustomerNegativeCriterion) XXX_Merge(src proto.Message) { + xxx_messageInfo_CustomerNegativeCriterion.Merge(dst, src) +} +func (m *CustomerNegativeCriterion) XXX_Size() int { + return xxx_messageInfo_CustomerNegativeCriterion.Size(m) +} +func (m *CustomerNegativeCriterion) XXX_DiscardUnknown() { + xxx_messageInfo_CustomerNegativeCriterion.DiscardUnknown(m) +} + +var xxx_messageInfo_CustomerNegativeCriterion proto.InternalMessageInfo + +func (m *CustomerNegativeCriterion) GetResourceName() string { + if m != nil { + return m.ResourceName + } + return "" +} + +func (m *CustomerNegativeCriterion) GetId() *wrappers.Int64Value { + if m != nil { + return m.Id + } + return nil +} + +func (m *CustomerNegativeCriterion) GetType() enums.CriterionTypeEnum_CriterionType { + if m != nil { + return m.Type + } + return enums.CriterionTypeEnum_UNSPECIFIED +} + +type isCustomerNegativeCriterion_Criterion interface { + isCustomerNegativeCriterion_Criterion() +} + +type CustomerNegativeCriterion_ContentLabel struct { + ContentLabel *common.ContentLabelInfo `protobuf:"bytes,4,opt,name=content_label,json=contentLabel,proto3,oneof"` +} + +type CustomerNegativeCriterion_MobileApplication struct { + MobileApplication *common.MobileApplicationInfo `protobuf:"bytes,5,opt,name=mobile_application,json=mobileApplication,proto3,oneof"` +} + +type CustomerNegativeCriterion_MobileAppCategory struct { + MobileAppCategory *common.MobileAppCategoryInfo `protobuf:"bytes,6,opt,name=mobile_app_category,json=mobileAppCategory,proto3,oneof"` +} + +type CustomerNegativeCriterion_Placement struct { + Placement *common.PlacementInfo `protobuf:"bytes,7,opt,name=placement,proto3,oneof"` +} + +type CustomerNegativeCriterion_YoutubeVideo struct { + YoutubeVideo *common.YouTubeVideoInfo `protobuf:"bytes,8,opt,name=youtube_video,json=youtubeVideo,proto3,oneof"` +} + +type CustomerNegativeCriterion_YoutubeChannel struct { + YoutubeChannel *common.YouTubeChannelInfo `protobuf:"bytes,9,opt,name=youtube_channel,json=youtubeChannel,proto3,oneof"` +} + +func (*CustomerNegativeCriterion_ContentLabel) isCustomerNegativeCriterion_Criterion() {} + +func (*CustomerNegativeCriterion_MobileApplication) isCustomerNegativeCriterion_Criterion() {} + +func (*CustomerNegativeCriterion_MobileAppCategory) isCustomerNegativeCriterion_Criterion() {} + +func (*CustomerNegativeCriterion_Placement) isCustomerNegativeCriterion_Criterion() {} + +func (*CustomerNegativeCriterion_YoutubeVideo) isCustomerNegativeCriterion_Criterion() {} + +func (*CustomerNegativeCriterion_YoutubeChannel) isCustomerNegativeCriterion_Criterion() {} + +func (m *CustomerNegativeCriterion) GetCriterion() isCustomerNegativeCriterion_Criterion { + if m != nil { + return m.Criterion + } + return nil +} + +func (m *CustomerNegativeCriterion) GetContentLabel() *common.ContentLabelInfo { + if x, ok := m.GetCriterion().(*CustomerNegativeCriterion_ContentLabel); ok { + return x.ContentLabel + } + return nil +} + +func (m *CustomerNegativeCriterion) GetMobileApplication() *common.MobileApplicationInfo { + if x, ok := m.GetCriterion().(*CustomerNegativeCriterion_MobileApplication); ok { + return x.MobileApplication + } + return nil +} + +func (m *CustomerNegativeCriterion) GetMobileAppCategory() *common.MobileAppCategoryInfo { + if x, ok := m.GetCriterion().(*CustomerNegativeCriterion_MobileAppCategory); ok { + return x.MobileAppCategory + } + return nil +} + +func (m *CustomerNegativeCriterion) GetPlacement() *common.PlacementInfo { + if x, ok := m.GetCriterion().(*CustomerNegativeCriterion_Placement); ok { + return x.Placement + } + return nil +} + +func (m *CustomerNegativeCriterion) GetYoutubeVideo() *common.YouTubeVideoInfo { + if x, ok := m.GetCriterion().(*CustomerNegativeCriterion_YoutubeVideo); ok { + return x.YoutubeVideo + } + return nil +} + +func (m *CustomerNegativeCriterion) GetYoutubeChannel() *common.YouTubeChannelInfo { + if x, ok := m.GetCriterion().(*CustomerNegativeCriterion_YoutubeChannel); ok { + return x.YoutubeChannel + } + return nil +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*CustomerNegativeCriterion) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _CustomerNegativeCriterion_OneofMarshaler, _CustomerNegativeCriterion_OneofUnmarshaler, _CustomerNegativeCriterion_OneofSizer, []interface{}{ + (*CustomerNegativeCriterion_ContentLabel)(nil), + (*CustomerNegativeCriterion_MobileApplication)(nil), + (*CustomerNegativeCriterion_MobileAppCategory)(nil), + (*CustomerNegativeCriterion_Placement)(nil), + (*CustomerNegativeCriterion_YoutubeVideo)(nil), + (*CustomerNegativeCriterion_YoutubeChannel)(nil), + } +} + +func _CustomerNegativeCriterion_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*CustomerNegativeCriterion) + // criterion + switch x := m.Criterion.(type) { + case *CustomerNegativeCriterion_ContentLabel: + b.EncodeVarint(4<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.ContentLabel); err != nil { + return err + } + case *CustomerNegativeCriterion_MobileApplication: + b.EncodeVarint(5<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.MobileApplication); err != nil { + return err + } + case *CustomerNegativeCriterion_MobileAppCategory: + b.EncodeVarint(6<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.MobileAppCategory); err != nil { + return err + } + case *CustomerNegativeCriterion_Placement: + b.EncodeVarint(7<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.Placement); err != nil { + return err + } + case *CustomerNegativeCriterion_YoutubeVideo: + b.EncodeVarint(8<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.YoutubeVideo); err != nil { + return err + } + case *CustomerNegativeCriterion_YoutubeChannel: + b.EncodeVarint(9<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.YoutubeChannel); err != nil { + return err + } + case nil: + default: + return fmt.Errorf("CustomerNegativeCriterion.Criterion has unexpected type %T", x) + } + return nil +} + +func _CustomerNegativeCriterion_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*CustomerNegativeCriterion) + switch tag { + case 4: // criterion.content_label + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(common.ContentLabelInfo) + err := b.DecodeMessage(msg) + m.Criterion = &CustomerNegativeCriterion_ContentLabel{msg} + return true, err + case 5: // criterion.mobile_application + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(common.MobileApplicationInfo) + err := b.DecodeMessage(msg) + m.Criterion = &CustomerNegativeCriterion_MobileApplication{msg} + return true, err + case 6: // criterion.mobile_app_category + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(common.MobileAppCategoryInfo) + err := b.DecodeMessage(msg) + m.Criterion = &CustomerNegativeCriterion_MobileAppCategory{msg} + return true, err + case 7: // criterion.placement + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(common.PlacementInfo) + err := b.DecodeMessage(msg) + m.Criterion = &CustomerNegativeCriterion_Placement{msg} + return true, err + case 8: // criterion.youtube_video + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(common.YouTubeVideoInfo) + err := b.DecodeMessage(msg) + m.Criterion = &CustomerNegativeCriterion_YoutubeVideo{msg} + return true, err + case 9: // criterion.youtube_channel + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(common.YouTubeChannelInfo) + err := b.DecodeMessage(msg) + m.Criterion = &CustomerNegativeCriterion_YoutubeChannel{msg} + return true, err + default: + return false, nil + } +} + +func _CustomerNegativeCriterion_OneofSizer(msg proto.Message) (n int) { + m := msg.(*CustomerNegativeCriterion) + // criterion + switch x := m.Criterion.(type) { + case *CustomerNegativeCriterion_ContentLabel: + s := proto.Size(x.ContentLabel) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *CustomerNegativeCriterion_MobileApplication: + s := proto.Size(x.MobileApplication) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *CustomerNegativeCriterion_MobileAppCategory: + s := proto.Size(x.MobileAppCategory) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *CustomerNegativeCriterion_Placement: + s := proto.Size(x.Placement) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *CustomerNegativeCriterion_YoutubeVideo: + s := proto.Size(x.YoutubeVideo) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *CustomerNegativeCriterion_YoutubeChannel: + s := proto.Size(x.YoutubeChannel) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +func init() { + proto.RegisterType((*CustomerNegativeCriterion)(nil), "google.ads.googleads.v1.resources.CustomerNegativeCriterion") +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/resources/customer_negative_criterion.proto", fileDescriptor_customer_negative_criterion_44d1ead2708d58a3) +} + +var fileDescriptor_customer_negative_criterion_44d1ead2708d58a3 = []byte{ + // 575 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x94, 0x94, 0xdd, 0x8a, 0xd3, 0x40, + 0x14, 0xc7, 0x6d, 0xf6, 0x43, 0x3b, 0xfb, 0x21, 0xc6, 0x9b, 0xb8, 0xca, 0xd2, 0x55, 0x16, 0x0a, + 0xb2, 0x13, 0x5b, 0x3f, 0x2e, 0x22, 0x08, 0x69, 0x90, 0x75, 0xc5, 0x5d, 0x4a, 0x58, 0x2a, 0x4a, + 0x25, 0x4c, 0x92, 0xd3, 0x18, 0x48, 0x66, 0x42, 0x66, 0x52, 0xe9, 0x33, 0xf8, 0x16, 0x5e, 0xfa, + 0x28, 0x3e, 0x8a, 0x2f, 0xa1, 0x64, 0x32, 0x93, 0xfa, 0x41, 0xed, 0x7a, 0x77, 0x7a, 0xce, 0xf9, + 0xff, 0xce, 0x99, 0x3f, 0x3d, 0x41, 0x5e, 0xc2, 0x58, 0x92, 0x81, 0x4d, 0x62, 0x6e, 0x37, 0x61, + 0x1d, 0xcd, 0x07, 0x76, 0x09, 0x9c, 0x55, 0x65, 0x04, 0xdc, 0x8e, 0x2a, 0x2e, 0x58, 0x0e, 0x65, + 0x40, 0x21, 0x21, 0x22, 0x9d, 0x43, 0x10, 0x95, 0xa9, 0x80, 0x32, 0x65, 0x14, 0x17, 0x25, 0x13, + 0xcc, 0x3c, 0x6a, 0x94, 0x98, 0xc4, 0x1c, 0xb7, 0x10, 0x3c, 0x1f, 0xe0, 0x16, 0x72, 0x70, 0xb2, + 0x6a, 0x4e, 0xc4, 0xf2, 0x9c, 0x51, 0x5b, 0x21, 0x49, 0x43, 0x3c, 0x18, 0xae, 0x6a, 0x07, 0x5a, + 0xe5, 0xdc, 0x6e, 0x17, 0x08, 0xc4, 0xa2, 0x00, 0xa5, 0x39, 0x54, 0x1a, 0xf9, 0x2b, 0xac, 0x66, + 0xf6, 0xa7, 0x92, 0x14, 0x05, 0x94, 0x5c, 0xd5, 0xef, 0x69, 0x66, 0x91, 0xda, 0x84, 0x52, 0x26, + 0x88, 0x48, 0x19, 0x55, 0xd5, 0xfb, 0x3f, 0xb6, 0xd0, 0x1d, 0x4f, 0xbd, 0xf4, 0x42, 0x3d, 0xd4, + 0xd3, 0x63, 0xcc, 0x07, 0x68, 0x4f, 0xbf, 0x25, 0xa0, 0x24, 0x07, 0xab, 0xd3, 0xeb, 0xf4, 0xbb, + 0xfe, 0xae, 0x4e, 0x5e, 0x90, 0x1c, 0xcc, 0x87, 0xc8, 0x48, 0x63, 0xcb, 0xe8, 0x75, 0xfa, 0x3b, + 0xc3, 0xbb, 0xca, 0x08, 0xac, 0xb7, 0xc1, 0x67, 0x54, 0x3c, 0x7b, 0x32, 0x21, 0x59, 0x05, 0xbe, + 0x91, 0xc6, 0xa6, 0x8f, 0x36, 0xeb, 0xdd, 0xad, 0x8d, 0x5e, 0xa7, 0xbf, 0x3f, 0x7c, 0x81, 0x57, + 0x59, 0x28, 0x1f, 0x8c, 0xdb, 0x4d, 0x2e, 0x17, 0x05, 0xbc, 0xa4, 0x55, 0xfe, 0x7b, 0xc6, 0x97, + 0x2c, 0xf3, 0x2d, 0xda, 0x8b, 0x18, 0x15, 0x40, 0x45, 0x90, 0x91, 0x10, 0x32, 0x6b, 0x53, 0xee, + 0xf2, 0x68, 0x25, 0xbc, 0x31, 0x1f, 0x7b, 0x8d, 0xe8, 0x4d, 0xad, 0x39, 0xa3, 0x33, 0xf6, 0xea, + 0x9a, 0xbf, 0x1b, 0xfd, 0x92, 0x33, 0x67, 0xc8, 0xcc, 0x59, 0x98, 0x66, 0x10, 0x90, 0xa2, 0xc8, + 0xd2, 0x48, 0x3a, 0x67, 0x6d, 0x49, 0xfa, 0xd3, 0x75, 0xf4, 0x73, 0xa9, 0x74, 0x97, 0x42, 0x35, + 0xe2, 0x56, 0xfe, 0x67, 0xc1, 0x4c, 0xd0, 0xed, 0xe5, 0x9c, 0x20, 0x22, 0x02, 0x12, 0x56, 0x2e, + 0xac, 0xed, 0xff, 0x1c, 0xe4, 0x29, 0xe1, 0x5f, 0x83, 0x74, 0xc1, 0x3c, 0x47, 0xdd, 0x22, 0x23, + 0x11, 0xe4, 0x40, 0x85, 0x75, 0x5d, 0xe2, 0x4f, 0xd6, 0xe1, 0xc7, 0x5a, 0xa0, 0xb0, 0x4b, 0x42, + 0x6d, 0xfc, 0x82, 0x55, 0xa2, 0x0a, 0x21, 0x98, 0xa7, 0x31, 0x30, 0xeb, 0xc6, 0xd5, 0x8c, 0x7f, + 0xc7, 0xaa, 0xcb, 0x2a, 0x84, 0x49, 0xad, 0xd1, 0xc6, 0x2b, 0x90, 0xcc, 0x99, 0x1f, 0xd0, 0x4d, + 0x0d, 0x8e, 0x3e, 0x12, 0x4a, 0x21, 0xb3, 0xba, 0x12, 0x3d, 0xbc, 0x22, 0xda, 0x6b, 0x54, 0x0a, + 0xbe, 0xaf, 0x60, 0x2a, 0x3b, 0xda, 0x41, 0xdd, 0xf6, 0x94, 0x46, 0x9f, 0x0d, 0x74, 0x1c, 0xb1, + 0x1c, 0xaf, 0x3d, 0xe6, 0xd1, 0xe1, 0xca, 0x43, 0x19, 0xd7, 0xff, 0xf6, 0x71, 0xe7, 0xfd, 0x6b, + 0x05, 0x49, 0x58, 0x46, 0x68, 0x82, 0x59, 0x99, 0xd8, 0x09, 0x50, 0x79, 0x0b, 0xfa, 0x9e, 0x8b, + 0x94, 0xff, 0xe3, 0xab, 0xf3, 0xbc, 0x8d, 0xbe, 0x18, 0x1b, 0xa7, 0xae, 0xfb, 0xd5, 0x38, 0x3a, + 0x6d, 0x90, 0x6e, 0xcc, 0x71, 0x13, 0xd6, 0xd1, 0x64, 0x80, 0x7d, 0xdd, 0xf9, 0x4d, 0xf7, 0x4c, + 0xdd, 0x98, 0x4f, 0xdb, 0x9e, 0xe9, 0x64, 0x30, 0x6d, 0x7b, 0xbe, 0x1b, 0xc7, 0x4d, 0xc1, 0x71, + 0xdc, 0x98, 0x3b, 0x4e, 0xdb, 0xe5, 0x38, 0x93, 0x81, 0xe3, 0xb4, 0x7d, 0xe1, 0xb6, 0x5c, 0xf6, + 0xf1, 0xcf, 0x00, 0x00, 0x00, 0xff, 0xff, 0x7d, 0x01, 0xad, 0x51, 0x21, 0x05, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/resources/detail_placement_view.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/resources/detail_placement_view.pb.go new file mode 100644 index 0000000..449c971 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/resources/detail_placement_view.pb.go @@ -0,0 +1,155 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/resources/detail_placement_view.proto + +package resources // import "google.golang.org/genproto/googleapis/ads/googleads/v1/resources" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import wrappers "github.com/golang/protobuf/ptypes/wrappers" +import enums "google.golang.org/genproto/googleapis/ads/googleads/v1/enums" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// A view with metrics aggregated by ad group and URL or YouTube video. +type DetailPlacementView struct { + // The resource name of the detail placement view. + // Detail placement view resource names have the form: + // + // + // `customers/{customer_id}/detailPlacementViews/{ad_group_id}~{base64_placement}` + ResourceName string `protobuf:"bytes,1,opt,name=resource_name,json=resourceName,proto3" json:"resource_name,omitempty"` + // The automatic placement string at detail level, e. g. website URL, mobile + // application ID, or a YouTube video ID. + Placement *wrappers.StringValue `protobuf:"bytes,2,opt,name=placement,proto3" json:"placement,omitempty"` + // The display name is URL name for websites, YouTube video name for YouTube + // videos, and translated mobile app name for mobile apps. + DisplayName *wrappers.StringValue `protobuf:"bytes,3,opt,name=display_name,json=displayName,proto3" json:"display_name,omitempty"` + // URL of the group placement, e.g. domain, link to the mobile application in + // app store, or a YouTube channel URL. + GroupPlacementTargetUrl *wrappers.StringValue `protobuf:"bytes,4,opt,name=group_placement_target_url,json=groupPlacementTargetUrl,proto3" json:"group_placement_target_url,omitempty"` + // URL of the placement, e.g. website, link to the mobile application in app + // store, or a YouTube video URL. + TargetUrl *wrappers.StringValue `protobuf:"bytes,5,opt,name=target_url,json=targetUrl,proto3" json:"target_url,omitempty"` + // Type of the placement, e.g. Website, YouTube Video, and Mobile Application. + PlacementType enums.PlacementTypeEnum_PlacementType `protobuf:"varint,6,opt,name=placement_type,json=placementType,proto3,enum=google.ads.googleads.v1.enums.PlacementTypeEnum_PlacementType" json:"placement_type,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *DetailPlacementView) Reset() { *m = DetailPlacementView{} } +func (m *DetailPlacementView) String() string { return proto.CompactTextString(m) } +func (*DetailPlacementView) ProtoMessage() {} +func (*DetailPlacementView) Descriptor() ([]byte, []int) { + return fileDescriptor_detail_placement_view_204339af36109cb0, []int{0} +} +func (m *DetailPlacementView) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_DetailPlacementView.Unmarshal(m, b) +} +func (m *DetailPlacementView) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_DetailPlacementView.Marshal(b, m, deterministic) +} +func (dst *DetailPlacementView) XXX_Merge(src proto.Message) { + xxx_messageInfo_DetailPlacementView.Merge(dst, src) +} +func (m *DetailPlacementView) XXX_Size() int { + return xxx_messageInfo_DetailPlacementView.Size(m) +} +func (m *DetailPlacementView) XXX_DiscardUnknown() { + xxx_messageInfo_DetailPlacementView.DiscardUnknown(m) +} + +var xxx_messageInfo_DetailPlacementView proto.InternalMessageInfo + +func (m *DetailPlacementView) GetResourceName() string { + if m != nil { + return m.ResourceName + } + return "" +} + +func (m *DetailPlacementView) GetPlacement() *wrappers.StringValue { + if m != nil { + return m.Placement + } + return nil +} + +func (m *DetailPlacementView) GetDisplayName() *wrappers.StringValue { + if m != nil { + return m.DisplayName + } + return nil +} + +func (m *DetailPlacementView) GetGroupPlacementTargetUrl() *wrappers.StringValue { + if m != nil { + return m.GroupPlacementTargetUrl + } + return nil +} + +func (m *DetailPlacementView) GetTargetUrl() *wrappers.StringValue { + if m != nil { + return m.TargetUrl + } + return nil +} + +func (m *DetailPlacementView) GetPlacementType() enums.PlacementTypeEnum_PlacementType { + if m != nil { + return m.PlacementType + } + return enums.PlacementTypeEnum_UNSPECIFIED +} + +func init() { + proto.RegisterType((*DetailPlacementView)(nil), "google.ads.googleads.v1.resources.DetailPlacementView") +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/resources/detail_placement_view.proto", fileDescriptor_detail_placement_view_204339af36109cb0) +} + +var fileDescriptor_detail_placement_view_204339af36109cb0 = []byte{ + // 439 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x8c, 0x52, 0xdd, 0x8a, 0x13, 0x31, + 0x14, 0x66, 0xa6, 0xba, 0xb0, 0xd9, 0x9f, 0x8b, 0xf1, 0xc2, 0xa1, 0x2c, 0xd2, 0x55, 0x16, 0x7a, + 0x95, 0x61, 0xea, 0x5d, 0x16, 0x95, 0x59, 0x94, 0x05, 0x2f, 0xa4, 0x54, 0x1d, 0x50, 0x0a, 0x43, + 0xb6, 0x73, 0x0c, 0x03, 0x33, 0x49, 0x48, 0x32, 0x2d, 0x7d, 0x00, 0x5f, 0xc4, 0x4b, 0x1f, 0xc0, + 0x87, 0xf0, 0x51, 0x7c, 0x0a, 0x69, 0x66, 0x92, 0x76, 0xc1, 0xba, 0xbd, 0x3b, 0xc9, 0xf9, 0xbe, + 0x2f, 0xdf, 0x97, 0x73, 0xd0, 0x2b, 0x26, 0x04, 0xab, 0x21, 0xa1, 0xa5, 0x4e, 0xba, 0x72, 0x53, + 0x2d, 0xd3, 0x44, 0x81, 0x16, 0xad, 0x5a, 0x80, 0x4e, 0x4a, 0x30, 0xb4, 0xaa, 0x0b, 0x59, 0xd3, + 0x05, 0x34, 0xc0, 0x4d, 0xb1, 0xac, 0x60, 0x85, 0xa5, 0x12, 0x46, 0x44, 0x97, 0x1d, 0x07, 0xd3, + 0x52, 0x63, 0x4f, 0xc7, 0xcb, 0x14, 0x7b, 0xfa, 0x70, 0xb2, 0xef, 0x05, 0xe0, 0x6d, 0xa3, 0x93, + 0xad, 0xac, 0x59, 0x4b, 0xe8, 0x64, 0x87, 0xcf, 0x7a, 0x8e, 0x3d, 0xdd, 0xb5, 0xdf, 0x92, 0x95, + 0xa2, 0x52, 0x82, 0xd2, 0x7d, 0xff, 0xc2, 0x69, 0xca, 0x2a, 0xa1, 0x9c, 0x0b, 0x43, 0x4d, 0x25, + 0x78, 0xdf, 0x7d, 0xfe, 0x6b, 0x80, 0x9e, 0xbc, 0xb5, 0xa6, 0xa7, 0x4e, 0x3c, 0xaf, 0x60, 0x15, + 0xbd, 0x40, 0x67, 0xce, 0x56, 0xc1, 0x69, 0x03, 0x71, 0x30, 0x0a, 0xc6, 0xc7, 0xb3, 0x53, 0x77, + 0xf9, 0x81, 0x36, 0x10, 0x11, 0x74, 0xec, 0x2d, 0xc5, 0xe1, 0x28, 0x18, 0x9f, 0x4c, 0x2e, 0xfa, + 0x68, 0xd8, 0xd9, 0xc1, 0x1f, 0x8d, 0xaa, 0x38, 0xcb, 0x69, 0xdd, 0xc2, 0x6c, 0x0b, 0x8f, 0xde, + 0xa0, 0xd3, 0xb2, 0xd2, 0xb2, 0xa6, 0xeb, 0x4e, 0x7f, 0x70, 0x00, 0xfd, 0xa4, 0x67, 0xd8, 0xc7, + 0xbf, 0xa0, 0x21, 0x53, 0xa2, 0x95, 0x3b, 0x9f, 0x6d, 0xa8, 0x62, 0x60, 0x8a, 0x56, 0xd5, 0xf1, + 0xa3, 0x03, 0xe4, 0x9e, 0x5a, 0xbe, 0xcf, 0xfd, 0xc9, 0xb2, 0x3f, 0xab, 0x3a, 0xba, 0x46, 0x68, + 0x47, 0xea, 0xf1, 0x21, 0xc1, 0x8c, 0x27, 0x03, 0x3a, 0xbf, 0x3f, 0xa7, 0xf8, 0x68, 0x14, 0x8c, + 0xcf, 0x27, 0xaf, 0xf1, 0xbe, 0xf9, 0xdb, 0xe1, 0xe2, 0xad, 0x8f, 0xb5, 0x84, 0x77, 0xbc, 0x6d, + 0xee, 0xdf, 0xcc, 0xce, 0xe4, 0xee, 0xf1, 0xe6, 0x7b, 0x88, 0xae, 0x16, 0xa2, 0xc1, 0x0f, 0x2e, + 0xd5, 0x4d, 0xfc, 0x8f, 0xf9, 0x4e, 0x37, 0x21, 0xa6, 0xc1, 0xd7, 0xf7, 0x3d, 0x9d, 0x89, 0x9a, + 0x72, 0x86, 0x85, 0x62, 0x09, 0x03, 0x6e, 0x23, 0xba, 0x05, 0x94, 0x95, 0xfe, 0xcf, 0xc6, 0x5f, + 0xfb, 0xea, 0x47, 0x38, 0xb8, 0xcd, 0xb2, 0x9f, 0xe1, 0xe5, 0x6d, 0x27, 0x99, 0x95, 0x1a, 0x77, + 0xe5, 0xa6, 0xca, 0x53, 0x3c, 0x73, 0xc8, 0xdf, 0x0e, 0x33, 0xcf, 0x4a, 0x3d, 0xf7, 0x98, 0x79, + 0x9e, 0xce, 0x3d, 0xe6, 0x4f, 0x78, 0xd5, 0x35, 0x08, 0xc9, 0x4a, 0x4d, 0x88, 0x47, 0x11, 0x92, + 0xa7, 0x84, 0x78, 0xdc, 0xdd, 0x91, 0x35, 0xfb, 0xf2, 0x6f, 0x00, 0x00, 0x00, 0xff, 0xff, 0xd6, + 0x02, 0xfe, 0xbc, 0x9d, 0x03, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/resources/display_keyword_view.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/resources/display_keyword_view.pb.go new file mode 100644 index 0000000..b4b30c2 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/resources/display_keyword_view.pb.go @@ -0,0 +1,93 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/resources/display_keyword_view.proto + +package resources // import "google.golang.org/genproto/googleapis/ads/googleads/v1/resources" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// A display keyword view. +type DisplayKeywordView struct { + // The resource name of the display keyword view. + // Display Keyword view resource names have the form: + // + // `customers/{customer_id}/displayKeywordViews/{ad_group_id}~{criterion_id}` + ResourceName string `protobuf:"bytes,1,opt,name=resource_name,json=resourceName,proto3" json:"resource_name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *DisplayKeywordView) Reset() { *m = DisplayKeywordView{} } +func (m *DisplayKeywordView) String() string { return proto.CompactTextString(m) } +func (*DisplayKeywordView) ProtoMessage() {} +func (*DisplayKeywordView) Descriptor() ([]byte, []int) { + return fileDescriptor_display_keyword_view_a1af203287ac2439, []int{0} +} +func (m *DisplayKeywordView) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_DisplayKeywordView.Unmarshal(m, b) +} +func (m *DisplayKeywordView) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_DisplayKeywordView.Marshal(b, m, deterministic) +} +func (dst *DisplayKeywordView) XXX_Merge(src proto.Message) { + xxx_messageInfo_DisplayKeywordView.Merge(dst, src) +} +func (m *DisplayKeywordView) XXX_Size() int { + return xxx_messageInfo_DisplayKeywordView.Size(m) +} +func (m *DisplayKeywordView) XXX_DiscardUnknown() { + xxx_messageInfo_DisplayKeywordView.DiscardUnknown(m) +} + +var xxx_messageInfo_DisplayKeywordView proto.InternalMessageInfo + +func (m *DisplayKeywordView) GetResourceName() string { + if m != nil { + return m.ResourceName + } + return "" +} + +func init() { + proto.RegisterType((*DisplayKeywordView)(nil), "google.ads.googleads.v1.resources.DisplayKeywordView") +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/resources/display_keyword_view.proto", fileDescriptor_display_keyword_view_a1af203287ac2439) +} + +var fileDescriptor_display_keyword_view_a1af203287ac2439 = []byte{ + // 276 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x84, 0x90, 0xcf, 0x4a, 0xec, 0x30, + 0x14, 0xc6, 0x69, 0x2f, 0x5c, 0xb0, 0xe8, 0xa6, 0x1b, 0x45, 0x5c, 0x38, 0xca, 0x80, 0xab, 0x84, + 0xe2, 0xca, 0xe8, 0x26, 0x83, 0x30, 0xa0, 0x20, 0xc3, 0x2c, 0xba, 0x90, 0x42, 0x89, 0x93, 0x10, + 0x82, 0x6d, 0x4e, 0xc9, 0xa9, 0x2d, 0xb3, 0xf7, 0x49, 0x5c, 0xfa, 0x28, 0x3e, 0x8a, 0x4f, 0x21, + 0x9d, 0x98, 0x6c, 0x04, 0xdd, 0x7d, 0x24, 0xbf, 0xef, 0x0f, 0x27, 0xbb, 0xd1, 0x00, 0xba, 0x51, + 0x54, 0x48, 0xa4, 0x5e, 0x4e, 0x6a, 0x28, 0xa8, 0x53, 0x08, 0x2f, 0x6e, 0xa3, 0x90, 0x4a, 0x83, + 0x5d, 0x23, 0xb6, 0xf5, 0xb3, 0xda, 0x8e, 0xe0, 0x64, 0x3d, 0x18, 0x35, 0x92, 0xce, 0x41, 0x0f, + 0xf9, 0xcc, 0x5b, 0x88, 0x90, 0x48, 0xa2, 0x9b, 0x0c, 0x05, 0x89, 0xee, 0xe3, 0x93, 0x50, 0xd0, + 0x19, 0x2a, 0xac, 0x85, 0x5e, 0xf4, 0x06, 0x2c, 0xfa, 0x80, 0xb3, 0xab, 0x2c, 0xbf, 0xf5, 0xf1, + 0xf7, 0x3e, 0xbd, 0x34, 0x6a, 0xcc, 0xcf, 0xb3, 0x83, 0x10, 0x50, 0x5b, 0xd1, 0xaa, 0xa3, 0xe4, + 0x34, 0xb9, 0xd8, 0x5b, 0xef, 0x87, 0xc7, 0x07, 0xd1, 0xaa, 0xc5, 0x6b, 0x9a, 0xcd, 0x37, 0xd0, + 0x92, 0x3f, 0x27, 0x2c, 0x0e, 0x7f, 0x56, 0xac, 0xa6, 0xf6, 0x55, 0xf2, 0x78, 0xf7, 0xed, 0xd6, + 0xd0, 0x08, 0xab, 0x09, 0x38, 0x4d, 0xb5, 0xb2, 0xbb, 0x6d, 0xe1, 0x1c, 0x9d, 0xc1, 0x5f, 0xae, + 0x73, 0x1d, 0xd5, 0x5b, 0xfa, 0x6f, 0xc9, 0xf9, 0x7b, 0x3a, 0x5b, 0xfa, 0x48, 0x2e, 0x91, 0x78, + 0x39, 0xa9, 0xb2, 0x20, 0xeb, 0x40, 0x7e, 0x04, 0xa6, 0xe2, 0x12, 0xab, 0xc8, 0x54, 0x65, 0x51, + 0x45, 0xe6, 0x33, 0x9d, 0xfb, 0x0f, 0xc6, 0xb8, 0x44, 0xc6, 0x22, 0xc5, 0x58, 0x59, 0x30, 0x16, + 0xb9, 0xa7, 0xff, 0xbb, 0xb1, 0x97, 0x5f, 0x01, 0x00, 0x00, 0xff, 0xff, 0x4d, 0x96, 0xc0, 0x95, + 0xc9, 0x01, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/resources/domain_category.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/resources/domain_category.pb.go new file mode 100644 index 0000000..2e67571 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/resources/domain_category.pb.go @@ -0,0 +1,190 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/resources/domain_category.proto + +package resources // import "google.golang.org/genproto/googleapis/ads/googleads/v1/resources" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import wrappers "github.com/golang/protobuf/ptypes/wrappers" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// A category generated automatically by crawling a domain. If a campaign uses +// the DynamicSearchAdsSetting, then domain categories will be generated for +// the domain. The categories can be targeted using WebpageConditionInfo. +// See: https://support.google.com/google-ads/answer/2471185 +type DomainCategory struct { + // The resource name of the domain category. + // Domain category resource names have the form: + // + // + // `customers/{customer_id}/domainCategories/{campaign_id}~{category_base64}~{language_code}` + ResourceName string `protobuf:"bytes,1,opt,name=resource_name,json=resourceName,proto3" json:"resource_name,omitempty"` + // The campaign this category is recommended for. + Campaign *wrappers.StringValue `protobuf:"bytes,2,opt,name=campaign,proto3" json:"campaign,omitempty"` + // Recommended category for the website domain. e.g. if you have a website + // about electronics, the categories could be "cameras", "televisions", etc. + Category *wrappers.StringValue `protobuf:"bytes,3,opt,name=category,proto3" json:"category,omitempty"` + // The language code specifying the language of the website. e.g. "en" for + // English. The language can be specified in the DynamicSearchAdsSetting + // required for dynamic search ads. This is the language of the pages from + // your website that you want Google Ads to find, create ads for, + // and match searches with. + LanguageCode *wrappers.StringValue `protobuf:"bytes,4,opt,name=language_code,json=languageCode,proto3" json:"language_code,omitempty"` + // The domain for the website. The domain can be specified in the + // DynamicSearchAdsSetting required for dynamic search ads. + Domain *wrappers.StringValue `protobuf:"bytes,5,opt,name=domain,proto3" json:"domain,omitempty"` + // Fraction of pages on your site that this category matches. + CoverageFraction *wrappers.DoubleValue `protobuf:"bytes,6,opt,name=coverage_fraction,json=coverageFraction,proto3" json:"coverage_fraction,omitempty"` + // The position of this category in the set of categories. Lower numbers + // indicate a better match for the domain. null indicates not recommended. + CategoryRank *wrappers.Int64Value `protobuf:"bytes,7,opt,name=category_rank,json=categoryRank,proto3" json:"category_rank,omitempty"` + // Indicates whether this category has sub-categories. + HasChildren *wrappers.BoolValue `protobuf:"bytes,8,opt,name=has_children,json=hasChildren,proto3" json:"has_children,omitempty"` + // The recommended cost per click for the category. + RecommendedCpcBidMicros *wrappers.Int64Value `protobuf:"bytes,9,opt,name=recommended_cpc_bid_micros,json=recommendedCpcBidMicros,proto3" json:"recommended_cpc_bid_micros,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *DomainCategory) Reset() { *m = DomainCategory{} } +func (m *DomainCategory) String() string { return proto.CompactTextString(m) } +func (*DomainCategory) ProtoMessage() {} +func (*DomainCategory) Descriptor() ([]byte, []int) { + return fileDescriptor_domain_category_4860955cf62b00ac, []int{0} +} +func (m *DomainCategory) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_DomainCategory.Unmarshal(m, b) +} +func (m *DomainCategory) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_DomainCategory.Marshal(b, m, deterministic) +} +func (dst *DomainCategory) XXX_Merge(src proto.Message) { + xxx_messageInfo_DomainCategory.Merge(dst, src) +} +func (m *DomainCategory) XXX_Size() int { + return xxx_messageInfo_DomainCategory.Size(m) +} +func (m *DomainCategory) XXX_DiscardUnknown() { + xxx_messageInfo_DomainCategory.DiscardUnknown(m) +} + +var xxx_messageInfo_DomainCategory proto.InternalMessageInfo + +func (m *DomainCategory) GetResourceName() string { + if m != nil { + return m.ResourceName + } + return "" +} + +func (m *DomainCategory) GetCampaign() *wrappers.StringValue { + if m != nil { + return m.Campaign + } + return nil +} + +func (m *DomainCategory) GetCategory() *wrappers.StringValue { + if m != nil { + return m.Category + } + return nil +} + +func (m *DomainCategory) GetLanguageCode() *wrappers.StringValue { + if m != nil { + return m.LanguageCode + } + return nil +} + +func (m *DomainCategory) GetDomain() *wrappers.StringValue { + if m != nil { + return m.Domain + } + return nil +} + +func (m *DomainCategory) GetCoverageFraction() *wrappers.DoubleValue { + if m != nil { + return m.CoverageFraction + } + return nil +} + +func (m *DomainCategory) GetCategoryRank() *wrappers.Int64Value { + if m != nil { + return m.CategoryRank + } + return nil +} + +func (m *DomainCategory) GetHasChildren() *wrappers.BoolValue { + if m != nil { + return m.HasChildren + } + return nil +} + +func (m *DomainCategory) GetRecommendedCpcBidMicros() *wrappers.Int64Value { + if m != nil { + return m.RecommendedCpcBidMicros + } + return nil +} + +func init() { + proto.RegisterType((*DomainCategory)(nil), "google.ads.googleads.v1.resources.DomainCategory") +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/resources/domain_category.proto", fileDescriptor_domain_category_4860955cf62b00ac) +} + +var fileDescriptor_domain_category_4860955cf62b00ac = []byte{ + // 493 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x8c, 0x93, 0xd1, 0x6a, 0x14, 0x3d, + 0x1c, 0xc5, 0xd9, 0x6d, 0xbf, 0xfd, 0xda, 0x74, 0x57, 0x74, 0xbc, 0x70, 0x58, 0x8b, 0xb4, 0x4a, + 0xa1, 0x57, 0x19, 0x46, 0x8b, 0xca, 0x88, 0xe0, 0xec, 0x16, 0x4b, 0x05, 0xa5, 0xac, 0xb0, 0x88, + 0x2c, 0x0c, 0xd9, 0x24, 0xcd, 0x86, 0xce, 0xe4, 0x3f, 0x24, 0x33, 0x2b, 0xde, 0xf9, 0x2c, 0x5e, + 0xfa, 0x24, 0xe2, 0xa3, 0xf8, 0x14, 0x32, 0x93, 0x64, 0x50, 0x8a, 0x76, 0xef, 0xfe, 0x4c, 0xce, + 0xef, 0x9c, 0x93, 0x4c, 0x82, 0x9e, 0x09, 0x00, 0x91, 0xf3, 0x88, 0x30, 0x13, 0xd9, 0xb1, 0x99, + 0xd6, 0x71, 0xa4, 0xb9, 0x81, 0x5a, 0x53, 0x6e, 0x22, 0x06, 0x05, 0x91, 0x2a, 0xa3, 0xa4, 0xe2, + 0x02, 0xf4, 0x67, 0x5c, 0x6a, 0xa8, 0x20, 0x38, 0xb4, 0x6a, 0x4c, 0x98, 0xc1, 0x1d, 0x88, 0xd7, + 0x31, 0xee, 0xc0, 0xf1, 0x03, 0xe7, 0xdd, 0x02, 0xcb, 0xfa, 0x32, 0xfa, 0xa4, 0x49, 0x59, 0x72, + 0x6d, 0xac, 0xc5, 0x78, 0xdf, 0x67, 0x97, 0x32, 0x22, 0x4a, 0x41, 0x45, 0x2a, 0x09, 0xca, 0xad, + 0x3e, 0xfc, 0xbe, 0x8d, 0x6e, 0x9d, 0xb6, 0xd1, 0x53, 0x97, 0x1c, 0x3c, 0x42, 0x23, 0xef, 0x9e, + 0x29, 0x52, 0xf0, 0xb0, 0x77, 0xd0, 0x3b, 0xde, 0x9d, 0x0d, 0xfd, 0xc7, 0x77, 0xa4, 0xe0, 0xc1, + 0x73, 0xb4, 0x43, 0x49, 0x51, 0x12, 0x29, 0x54, 0xd8, 0x3f, 0xe8, 0x1d, 0xef, 0x3d, 0xde, 0x77, + 0x05, 0xb1, 0x2f, 0x82, 0xdf, 0x57, 0x5a, 0x2a, 0x31, 0x27, 0x79, 0xcd, 0x67, 0x9d, 0xda, 0x92, + 0x36, 0x2a, 0xdc, 0xda, 0x8c, 0x74, 0xc5, 0x52, 0x34, 0xca, 0x89, 0x12, 0x35, 0x11, 0x3c, 0xa3, + 0xc0, 0x78, 0xb8, 0xbd, 0x01, 0x3e, 0xf4, 0xc8, 0x14, 0x18, 0x0f, 0x4e, 0xd0, 0xc0, 0x1e, 0x74, + 0xf8, 0xdf, 0x06, 0xac, 0xd3, 0x06, 0xe7, 0xe8, 0x0e, 0x85, 0x35, 0xd7, 0x4d, 0xf0, 0xa5, 0x26, + 0xb4, 0x39, 0xc0, 0x70, 0xf0, 0x17, 0x83, 0x53, 0xa8, 0x97, 0x39, 0xb7, 0x06, 0xb7, 0x3d, 0xf6, + 0xda, 0x51, 0xc1, 0x2b, 0x34, 0xf2, 0xfb, 0xc9, 0x34, 0x51, 0x57, 0xe1, 0xff, 0xad, 0xcd, 0xfd, + 0x6b, 0x36, 0xe7, 0xaa, 0x7a, 0x7a, 0xe2, 0xb6, 0xe0, 0x89, 0x19, 0x51, 0x57, 0xc1, 0x4b, 0x34, + 0x5c, 0x11, 0x93, 0xd1, 0x95, 0xcc, 0x99, 0xe6, 0x2a, 0xdc, 0x69, 0x0d, 0xc6, 0xd7, 0x0c, 0x26, + 0x00, 0xb9, 0xe5, 0xf7, 0x56, 0xc4, 0x4c, 0x9d, 0x3c, 0xf8, 0x80, 0xc6, 0x9a, 0x53, 0x28, 0x0a, + 0xae, 0x18, 0x67, 0x19, 0x2d, 0x69, 0xb6, 0x94, 0x2c, 0x2b, 0x24, 0xd5, 0x60, 0xc2, 0xdd, 0x9b, + 0xdb, 0xdc, 0xfb, 0x0d, 0x9f, 0x96, 0x74, 0x22, 0xd9, 0xdb, 0x96, 0x9d, 0x7c, 0xe9, 0xa3, 0x23, + 0x0a, 0x05, 0xbe, 0xf1, 0xca, 0x4e, 0xee, 0xfe, 0x79, 0xe3, 0x2e, 0x9a, 0x94, 0x8b, 0xde, 0xc7, + 0x37, 0x8e, 0x14, 0xd0, 0xfc, 0x33, 0x0c, 0x5a, 0x44, 0x82, 0xab, 0xb6, 0x83, 0x7f, 0x35, 0xa5, + 0x34, 0xff, 0x78, 0x44, 0x2f, 0xba, 0xe9, 0x6b, 0x7f, 0xeb, 0x2c, 0x4d, 0xbf, 0xf5, 0x0f, 0xcf, + 0xac, 0x65, 0xca, 0x0c, 0xb6, 0x63, 0x33, 0xcd, 0x63, 0x3c, 0xf3, 0xca, 0x1f, 0x5e, 0xb3, 0x48, + 0x99, 0x59, 0x74, 0x9a, 0xc5, 0x3c, 0x5e, 0x74, 0x9a, 0x9f, 0xfd, 0x23, 0xbb, 0x90, 0x24, 0x29, + 0x33, 0x49, 0xd2, 0xa9, 0x92, 0x64, 0x1e, 0x27, 0x49, 0xa7, 0x5b, 0x0e, 0xda, 0xb2, 0x4f, 0x7e, + 0x05, 0x00, 0x00, 0xff, 0xff, 0xce, 0x80, 0xc4, 0x0b, 0xf0, 0x03, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/resources/dynamic_search_ads_search_term_view.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/resources/dynamic_search_ads_search_term_view.pb.go new file mode 100644 index 0000000..d04cb4b --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/resources/dynamic_search_ads_search_term_view.pb.go @@ -0,0 +1,146 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/resources/dynamic_search_ads_search_term_view.proto + +package resources // import "google.golang.org/genproto/googleapis/ads/googleads/v1/resources" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import wrappers "github.com/golang/protobuf/ptypes/wrappers" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// A dynamic search ads search term view. +type DynamicSearchAdsSearchTermView struct { + // The resource name of the dynamic search ads search term view. + // Dynamic search ads search term view resource names have the form: + // + // + // `customers/{customer_id}/dynamicSearchAdsSearchTermViews/{ad_group_id}~{search_term_fp}~{headline_fp}~{landing_page_fp}~{page_url_fp}` + ResourceName string `protobuf:"bytes,1,opt,name=resource_name,json=resourceName,proto3" json:"resource_name,omitempty"` + // Search term + // + // This field is read-only. + SearchTerm *wrappers.StringValue `protobuf:"bytes,2,opt,name=search_term,json=searchTerm,proto3" json:"search_term,omitempty"` + // The dynamically generated headline of the Dynamic Search Ad. + // + // This field is read-only. + Headline *wrappers.StringValue `protobuf:"bytes,3,opt,name=headline,proto3" json:"headline,omitempty"` + // The dynamically selected landing page URL of the impression. + // + // This field is read-only. + LandingPage *wrappers.StringValue `protobuf:"bytes,4,opt,name=landing_page,json=landingPage,proto3" json:"landing_page,omitempty"` + // The URL of page feed item served for the impression. + // + // This field is read-only. + PageUrl *wrappers.StringValue `protobuf:"bytes,5,opt,name=page_url,json=pageUrl,proto3" json:"page_url,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *DynamicSearchAdsSearchTermView) Reset() { *m = DynamicSearchAdsSearchTermView{} } +func (m *DynamicSearchAdsSearchTermView) String() string { return proto.CompactTextString(m) } +func (*DynamicSearchAdsSearchTermView) ProtoMessage() {} +func (*DynamicSearchAdsSearchTermView) Descriptor() ([]byte, []int) { + return fileDescriptor_dynamic_search_ads_search_term_view_4a7297abf6b93574, []int{0} +} +func (m *DynamicSearchAdsSearchTermView) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_DynamicSearchAdsSearchTermView.Unmarshal(m, b) +} +func (m *DynamicSearchAdsSearchTermView) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_DynamicSearchAdsSearchTermView.Marshal(b, m, deterministic) +} +func (dst *DynamicSearchAdsSearchTermView) XXX_Merge(src proto.Message) { + xxx_messageInfo_DynamicSearchAdsSearchTermView.Merge(dst, src) +} +func (m *DynamicSearchAdsSearchTermView) XXX_Size() int { + return xxx_messageInfo_DynamicSearchAdsSearchTermView.Size(m) +} +func (m *DynamicSearchAdsSearchTermView) XXX_DiscardUnknown() { + xxx_messageInfo_DynamicSearchAdsSearchTermView.DiscardUnknown(m) +} + +var xxx_messageInfo_DynamicSearchAdsSearchTermView proto.InternalMessageInfo + +func (m *DynamicSearchAdsSearchTermView) GetResourceName() string { + if m != nil { + return m.ResourceName + } + return "" +} + +func (m *DynamicSearchAdsSearchTermView) GetSearchTerm() *wrappers.StringValue { + if m != nil { + return m.SearchTerm + } + return nil +} + +func (m *DynamicSearchAdsSearchTermView) GetHeadline() *wrappers.StringValue { + if m != nil { + return m.Headline + } + return nil +} + +func (m *DynamicSearchAdsSearchTermView) GetLandingPage() *wrappers.StringValue { + if m != nil { + return m.LandingPage + } + return nil +} + +func (m *DynamicSearchAdsSearchTermView) GetPageUrl() *wrappers.StringValue { + if m != nil { + return m.PageUrl + } + return nil +} + +func init() { + proto.RegisterType((*DynamicSearchAdsSearchTermView)(nil), "google.ads.googleads.v1.resources.DynamicSearchAdsSearchTermView") +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/resources/dynamic_search_ads_search_term_view.proto", fileDescriptor_dynamic_search_ads_search_term_view_4a7297abf6b93574) +} + +var fileDescriptor_dynamic_search_ads_search_term_view_4a7297abf6b93574 = []byte{ + // 399 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x8c, 0x92, 0xcf, 0x6a, 0xd4, 0x40, + 0x1c, 0xc7, 0xd9, 0xd4, 0x3f, 0x75, 0xb6, 0x5e, 0x72, 0x0a, 0xa5, 0x94, 0xd6, 0x52, 0xe8, 0x69, + 0x42, 0xf4, 0xa0, 0x8c, 0x88, 0xa4, 0x08, 0x05, 0x05, 0x59, 0xb6, 0x9a, 0x83, 0x04, 0xc2, 0xaf, + 0x99, 0x9f, 0xd3, 0x81, 0x64, 0x26, 0xcc, 0x24, 0xbb, 0xf8, 0x16, 0x3e, 0x83, 0x47, 0x9f, 0xc0, + 0x67, 0xf0, 0x51, 0x7c, 0x0a, 0x49, 0x26, 0x33, 0x78, 0x5a, 0xf7, 0xf6, 0x25, 0xf3, 0xf9, 0xfe, + 0x49, 0x32, 0xe4, 0x83, 0xd0, 0x5a, 0x34, 0x98, 0x02, 0xb7, 0xa9, 0x93, 0xa3, 0xda, 0x64, 0xa9, + 0x41, 0xab, 0x07, 0x53, 0xa3, 0x4d, 0xf9, 0x37, 0x05, 0xad, 0xac, 0x2b, 0x8b, 0x60, 0xea, 0xfb, + 0x0a, 0xb8, 0xf5, 0xb2, 0x47, 0xd3, 0x56, 0x1b, 0x89, 0x5b, 0xda, 0x19, 0xdd, 0xeb, 0xf8, 0xdc, + 0x25, 0x50, 0xe0, 0x96, 0x86, 0x30, 0xba, 0xc9, 0x68, 0x08, 0x3b, 0x3e, 0x9d, 0xfb, 0x26, 0xc3, + 0xdd, 0xf0, 0x35, 0xdd, 0x1a, 0xe8, 0x3a, 0x34, 0xd6, 0x45, 0x1c, 0x9f, 0xf8, 0x3d, 0x9d, 0x4c, + 0x41, 0x29, 0xdd, 0x43, 0x2f, 0xb5, 0x9a, 0x4f, 0x9f, 0xfd, 0x8a, 0xc8, 0xe9, 0x3b, 0x37, 0xe7, + 0x76, 0x9a, 0x90, 0x73, 0xeb, 0xc4, 0x27, 0x34, 0x6d, 0x21, 0x71, 0x1b, 0x5f, 0x90, 0xa7, 0xbe, + 0xad, 0x52, 0xd0, 0x62, 0xb2, 0x38, 0x5b, 0x5c, 0x3d, 0x59, 0x1f, 0xf9, 0x87, 0x1f, 0xa1, 0xc5, + 0xf8, 0x0d, 0x59, 0xfe, 0xf3, 0x0a, 0x49, 0x74, 0xb6, 0xb8, 0x5a, 0x3e, 0x3f, 0x99, 0x37, 0x53, + 0xbf, 0x8d, 0xde, 0xf6, 0x46, 0x2a, 0x51, 0x40, 0x33, 0xe0, 0x9a, 0xd8, 0xd0, 0x13, 0xbf, 0x22, + 0x87, 0xf7, 0x08, 0xbc, 0x91, 0x0a, 0x93, 0x83, 0x3d, 0xbc, 0x81, 0x8e, 0xdf, 0x92, 0xa3, 0x06, + 0x14, 0x97, 0x4a, 0x54, 0x1d, 0x08, 0x4c, 0x1e, 0xec, 0xe1, 0x5e, 0xce, 0x8e, 0x15, 0x08, 0x8c, + 0x5f, 0x92, 0xc3, 0xd1, 0x58, 0x0d, 0xa6, 0x49, 0x1e, 0xee, 0x61, 0x7e, 0x3c, 0xd2, 0x9f, 0x4d, + 0x73, 0xfd, 0x3d, 0x22, 0x97, 0xb5, 0x6e, 0xe9, 0x7f, 0x7f, 0xd1, 0xf5, 0xc5, 0xee, 0x2f, 0xbc, + 0x1a, 0x6b, 0x56, 0x8b, 0x2f, 0xef, 0xe7, 0x24, 0xa1, 0x1b, 0x50, 0x82, 0x6a, 0x23, 0x52, 0x81, + 0x6a, 0x1a, 0xe1, 0x6f, 0x52, 0x27, 0xed, 0x8e, 0x8b, 0xf5, 0x3a, 0xa8, 0x1f, 0xd1, 0xc1, 0x4d, + 0x9e, 0xff, 0x8c, 0xce, 0x6f, 0x5c, 0x64, 0xce, 0x2d, 0x75, 0x72, 0x54, 0x45, 0x46, 0xd7, 0x9e, + 0xfc, 0xed, 0x99, 0x32, 0xe7, 0xb6, 0x0c, 0x4c, 0x59, 0x64, 0x65, 0x60, 0xfe, 0x44, 0x97, 0xee, + 0x80, 0xb1, 0x9c, 0x5b, 0xc6, 0x02, 0xc5, 0x58, 0x91, 0x31, 0x16, 0xb8, 0xbb, 0x47, 0xd3, 0xd8, + 0x17, 0x7f, 0x03, 0x00, 0x00, 0xff, 0xff, 0x93, 0xd8, 0xfa, 0x90, 0x04, 0x03, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/resources/expanded_landing_page_view.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/resources/expanded_landing_page_view.pb.go new file mode 100644 index 0000000..d89eb17 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/resources/expanded_landing_page_view.pb.go @@ -0,0 +1,109 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/resources/expanded_landing_page_view.proto + +package resources // import "google.golang.org/genproto/googleapis/ads/googleads/v1/resources" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import wrappers "github.com/golang/protobuf/ptypes/wrappers" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// A landing page view with metrics aggregated at the expanded final URL +// level. +type ExpandedLandingPageView struct { + // The resource name of the expanded landing page view. + // Expanded landing page view resource names have the form: + // + // + // `customers/{customer_id}/expandedLandingPageViews/{expanded_final_url_fingerprint}` + ResourceName string `protobuf:"bytes,1,opt,name=resource_name,json=resourceName,proto3" json:"resource_name,omitempty"` + // The final URL that clicks are directed to. + ExpandedFinalUrl *wrappers.StringValue `protobuf:"bytes,2,opt,name=expanded_final_url,json=expandedFinalUrl,proto3" json:"expanded_final_url,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ExpandedLandingPageView) Reset() { *m = ExpandedLandingPageView{} } +func (m *ExpandedLandingPageView) String() string { return proto.CompactTextString(m) } +func (*ExpandedLandingPageView) ProtoMessage() {} +func (*ExpandedLandingPageView) Descriptor() ([]byte, []int) { + return fileDescriptor_expanded_landing_page_view_c5c0b44fb5c32bb4, []int{0} +} +func (m *ExpandedLandingPageView) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ExpandedLandingPageView.Unmarshal(m, b) +} +func (m *ExpandedLandingPageView) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ExpandedLandingPageView.Marshal(b, m, deterministic) +} +func (dst *ExpandedLandingPageView) XXX_Merge(src proto.Message) { + xxx_messageInfo_ExpandedLandingPageView.Merge(dst, src) +} +func (m *ExpandedLandingPageView) XXX_Size() int { + return xxx_messageInfo_ExpandedLandingPageView.Size(m) +} +func (m *ExpandedLandingPageView) XXX_DiscardUnknown() { + xxx_messageInfo_ExpandedLandingPageView.DiscardUnknown(m) +} + +var xxx_messageInfo_ExpandedLandingPageView proto.InternalMessageInfo + +func (m *ExpandedLandingPageView) GetResourceName() string { + if m != nil { + return m.ResourceName + } + return "" +} + +func (m *ExpandedLandingPageView) GetExpandedFinalUrl() *wrappers.StringValue { + if m != nil { + return m.ExpandedFinalUrl + } + return nil +} + +func init() { + proto.RegisterType((*ExpandedLandingPageView)(nil), "google.ads.googleads.v1.resources.ExpandedLandingPageView") +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/resources/expanded_landing_page_view.proto", fileDescriptor_expanded_landing_page_view_c5c0b44fb5c32bb4) +} + +var fileDescriptor_expanded_landing_page_view_c5c0b44fb5c32bb4 = []byte{ + // 343 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x84, 0x91, 0xc1, 0x4a, 0xeb, 0x40, + 0x14, 0x86, 0x49, 0x2e, 0x5c, 0xb8, 0xb9, 0x0a, 0x92, 0x8d, 0xa5, 0x14, 0x69, 0x95, 0x42, 0x57, + 0x13, 0xa2, 0xbb, 0x71, 0x95, 0x82, 0x16, 0x8a, 0x48, 0xa9, 0x98, 0x85, 0x04, 0xc2, 0x69, 0xe7, + 0x74, 0x18, 0x48, 0x67, 0xc2, 0x4c, 0xd2, 0xfa, 0x0a, 0xfa, 0x18, 0x2e, 0x7d, 0x14, 0x1f, 0xc5, + 0xa7, 0x90, 0x34, 0x99, 0xd9, 0xa9, 0xbb, 0x9f, 0x99, 0xff, 0xff, 0xcf, 0x77, 0x38, 0xc1, 0x94, + 0x2b, 0xc5, 0x0b, 0x8c, 0x80, 0x99, 0xa8, 0x95, 0x8d, 0xda, 0xc5, 0x91, 0x46, 0xa3, 0x6a, 0xbd, + 0x46, 0x13, 0xe1, 0x73, 0x09, 0x92, 0x21, 0xcb, 0x0b, 0x90, 0x4c, 0x48, 0x9e, 0x97, 0xc0, 0x31, + 0xdf, 0x09, 0xdc, 0x93, 0x52, 0xab, 0x4a, 0x85, 0xa3, 0x36, 0x48, 0x80, 0x19, 0xe2, 0x3a, 0xc8, + 0x2e, 0x26, 0xae, 0xa3, 0x7f, 0xd6, 0x8d, 0x39, 0x04, 0x56, 0xf5, 0x26, 0xda, 0x6b, 0x28, 0x4b, + 0xd4, 0xa6, 0xad, 0xe8, 0x0f, 0x2c, 0x46, 0x29, 0x22, 0x90, 0x52, 0x55, 0x50, 0x09, 0x25, 0xbb, + 0xdf, 0xf3, 0x57, 0x2f, 0x38, 0xbd, 0xe9, 0x28, 0xee, 0x5a, 0x88, 0x05, 0x70, 0x4c, 0x05, 0xee, + 0xc3, 0x8b, 0xe0, 0xd8, 0x8e, 0xc9, 0x25, 0x6c, 0xb1, 0xe7, 0x0d, 0xbd, 0xc9, 0xbf, 0xe5, 0x91, + 0x7d, 0xbc, 0x87, 0x2d, 0x86, 0xf3, 0x20, 0x74, 0x5b, 0x6c, 0x84, 0x84, 0x22, 0xaf, 0x75, 0xd1, + 0xf3, 0x87, 0xde, 0xe4, 0xff, 0xe5, 0xa0, 0x63, 0x26, 0x96, 0x8d, 0x3c, 0x54, 0x5a, 0x48, 0x9e, + 0x42, 0x51, 0xe3, 0xf2, 0xc4, 0xe6, 0x6e, 0x9b, 0xd8, 0xa3, 0x2e, 0xa6, 0x2f, 0x7e, 0x30, 0x5e, + 0xab, 0x2d, 0xf9, 0x75, 0xe9, 0xe9, 0xe0, 0x1b, 0xe6, 0x45, 0x33, 0x68, 0xe1, 0x3d, 0xcd, 0xbb, + 0x0a, 0xae, 0x0a, 0x90, 0x9c, 0x28, 0xcd, 0x23, 0x8e, 0xf2, 0x80, 0x61, 0x6f, 0x51, 0x0a, 0xf3, + 0xc3, 0x69, 0xae, 0x9d, 0x7a, 0xf3, 0xff, 0xcc, 0x92, 0xe4, 0xdd, 0x1f, 0xcd, 0xda, 0xca, 0x84, + 0x19, 0xd2, 0xca, 0x46, 0xa5, 0x31, 0x59, 0x5a, 0xe7, 0x87, 0xf5, 0x64, 0x09, 0x33, 0x99, 0xf3, + 0x64, 0x69, 0x9c, 0x39, 0xcf, 0xa7, 0x3f, 0x6e, 0x3f, 0x28, 0x4d, 0x98, 0xa1, 0xd4, 0xb9, 0x28, + 0x4d, 0x63, 0x4a, 0x9d, 0x6f, 0xf5, 0xf7, 0x00, 0x7b, 0xf5, 0x15, 0x00, 0x00, 0xff, 0xff, 0xcc, + 0x2f, 0xea, 0xd4, 0x46, 0x02, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/resources/extension_feed_item.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/resources/extension_feed_item.pb.go new file mode 100644 index 0000000..fc7a769 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/resources/extension_feed_item.pb.go @@ -0,0 +1,573 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/resources/extension_feed_item.proto + +package resources // import "google.golang.org/genproto/googleapis/ads/googleads/v1/resources" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import wrappers "github.com/golang/protobuf/ptypes/wrappers" +import common "google.golang.org/genproto/googleapis/ads/googleads/v1/common" +import enums "google.golang.org/genproto/googleapis/ads/googleads/v1/enums" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// An extension feed item. +type ExtensionFeedItem struct { + // The resource name of the extension feed item. + // Extension feed item resource names have the form: + // + // `customers/{customer_id}/extensionFeedItems/{feed_item_id}` + ResourceName string `protobuf:"bytes,1,opt,name=resource_name,json=resourceName,proto3" json:"resource_name,omitempty"` + // The extension type of the extension feed item. + // This field is read-only. + ExtensionType enums.ExtensionTypeEnum_ExtensionType `protobuf:"varint,13,opt,name=extension_type,json=extensionType,proto3,enum=google.ads.googleads.v1.enums.ExtensionTypeEnum_ExtensionType" json:"extension_type,omitempty"` + // Start time in which this feed item is effective and can begin serving. + // The format is "YYYY-MM-DD HH:MM:SS". + // Examples: "2018-03-05 09:15:00" or "2018-02-01 14:34:30" + StartDateTime *wrappers.StringValue `protobuf:"bytes,5,opt,name=start_date_time,json=startDateTime,proto3" json:"start_date_time,omitempty"` + // End time in which this feed item is no longer effective and will stop + // serving. + // The format is "YYYY-MM-DD HH:MM:SS". + // Examples: "2018-03-05 09:15:00" or "2018-02-01 14:34:30" + EndDateTime *wrappers.StringValue `protobuf:"bytes,6,opt,name=end_date_time,json=endDateTime,proto3" json:"end_date_time,omitempty"` + // List of non-overlapping schedules specifying all time intervals + // for which the feed item may serve. There can be a maximum of 6 schedules + // per day. + AdSchedules []*common.AdScheduleInfo `protobuf:"bytes,16,rep,name=ad_schedules,json=adSchedules,proto3" json:"ad_schedules,omitempty"` + // The targeted device. + Device enums.FeedItemTargetDeviceEnum_FeedItemTargetDevice `protobuf:"varint,17,opt,name=device,proto3,enum=google.ads.googleads.v1.enums.FeedItemTargetDeviceEnum_FeedItemTargetDevice" json:"device,omitempty"` + // Status of the feed item. + // This field is read-only. + Status enums.FeedItemStatusEnum_FeedItemStatus `protobuf:"varint,4,opt,name=status,proto3,enum=google.ads.googleads.v1.enums.FeedItemStatusEnum_FeedItemStatus" json:"status,omitempty"` + // Extension type. + // + // Types that are valid to be assigned to Extension: + // *ExtensionFeedItem_SitelinkFeedItem + // *ExtensionFeedItem_StructuredSnippetFeedItem + // *ExtensionFeedItem_AppFeedItem + // *ExtensionFeedItem_CallFeedItem + // *ExtensionFeedItem_CalloutFeedItem + // *ExtensionFeedItem_TextMessageFeedItem + // *ExtensionFeedItem_PriceFeedItem + // *ExtensionFeedItem_PromotionFeedItem + // *ExtensionFeedItem_LocationFeedItem + // *ExtensionFeedItem_AffiliateLocationFeedItem + Extension isExtensionFeedItem_Extension `protobuf_oneof:"extension"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ExtensionFeedItem) Reset() { *m = ExtensionFeedItem{} } +func (m *ExtensionFeedItem) String() string { return proto.CompactTextString(m) } +func (*ExtensionFeedItem) ProtoMessage() {} +func (*ExtensionFeedItem) Descriptor() ([]byte, []int) { + return fileDescriptor_extension_feed_item_3b0d7704bf91f757, []int{0} +} +func (m *ExtensionFeedItem) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ExtensionFeedItem.Unmarshal(m, b) +} +func (m *ExtensionFeedItem) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ExtensionFeedItem.Marshal(b, m, deterministic) +} +func (dst *ExtensionFeedItem) XXX_Merge(src proto.Message) { + xxx_messageInfo_ExtensionFeedItem.Merge(dst, src) +} +func (m *ExtensionFeedItem) XXX_Size() int { + return xxx_messageInfo_ExtensionFeedItem.Size(m) +} +func (m *ExtensionFeedItem) XXX_DiscardUnknown() { + xxx_messageInfo_ExtensionFeedItem.DiscardUnknown(m) +} + +var xxx_messageInfo_ExtensionFeedItem proto.InternalMessageInfo + +func (m *ExtensionFeedItem) GetResourceName() string { + if m != nil { + return m.ResourceName + } + return "" +} + +func (m *ExtensionFeedItem) GetExtensionType() enums.ExtensionTypeEnum_ExtensionType { + if m != nil { + return m.ExtensionType + } + return enums.ExtensionTypeEnum_UNSPECIFIED +} + +func (m *ExtensionFeedItem) GetStartDateTime() *wrappers.StringValue { + if m != nil { + return m.StartDateTime + } + return nil +} + +func (m *ExtensionFeedItem) GetEndDateTime() *wrappers.StringValue { + if m != nil { + return m.EndDateTime + } + return nil +} + +func (m *ExtensionFeedItem) GetAdSchedules() []*common.AdScheduleInfo { + if m != nil { + return m.AdSchedules + } + return nil +} + +func (m *ExtensionFeedItem) GetDevice() enums.FeedItemTargetDeviceEnum_FeedItemTargetDevice { + if m != nil { + return m.Device + } + return enums.FeedItemTargetDeviceEnum_UNSPECIFIED +} + +func (m *ExtensionFeedItem) GetStatus() enums.FeedItemStatusEnum_FeedItemStatus { + if m != nil { + return m.Status + } + return enums.FeedItemStatusEnum_UNSPECIFIED +} + +type isExtensionFeedItem_Extension interface { + isExtensionFeedItem_Extension() +} + +type ExtensionFeedItem_SitelinkFeedItem struct { + SitelinkFeedItem *common.SitelinkFeedItem `protobuf:"bytes,2,opt,name=sitelink_feed_item,json=sitelinkFeedItem,proto3,oneof"` +} + +type ExtensionFeedItem_StructuredSnippetFeedItem struct { + StructuredSnippetFeedItem *common.StructuredSnippetFeedItem `protobuf:"bytes,3,opt,name=structured_snippet_feed_item,json=structuredSnippetFeedItem,proto3,oneof"` +} + +type ExtensionFeedItem_AppFeedItem struct { + AppFeedItem *common.AppFeedItem `protobuf:"bytes,7,opt,name=app_feed_item,json=appFeedItem,proto3,oneof"` +} + +type ExtensionFeedItem_CallFeedItem struct { + CallFeedItem *common.CallFeedItem `protobuf:"bytes,8,opt,name=call_feed_item,json=callFeedItem,proto3,oneof"` +} + +type ExtensionFeedItem_CalloutFeedItem struct { + CalloutFeedItem *common.CalloutFeedItem `protobuf:"bytes,9,opt,name=callout_feed_item,json=calloutFeedItem,proto3,oneof"` +} + +type ExtensionFeedItem_TextMessageFeedItem struct { + TextMessageFeedItem *common.TextMessageFeedItem `protobuf:"bytes,10,opt,name=text_message_feed_item,json=textMessageFeedItem,proto3,oneof"` +} + +type ExtensionFeedItem_PriceFeedItem struct { + PriceFeedItem *common.PriceFeedItem `protobuf:"bytes,11,opt,name=price_feed_item,json=priceFeedItem,proto3,oneof"` +} + +type ExtensionFeedItem_PromotionFeedItem struct { + PromotionFeedItem *common.PromotionFeedItem `protobuf:"bytes,12,opt,name=promotion_feed_item,json=promotionFeedItem,proto3,oneof"` +} + +type ExtensionFeedItem_LocationFeedItem struct { + LocationFeedItem *common.LocationFeedItem `protobuf:"bytes,14,opt,name=location_feed_item,json=locationFeedItem,proto3,oneof"` +} + +type ExtensionFeedItem_AffiliateLocationFeedItem struct { + AffiliateLocationFeedItem *common.AffiliateLocationFeedItem `protobuf:"bytes,15,opt,name=affiliate_location_feed_item,json=affiliateLocationFeedItem,proto3,oneof"` +} + +func (*ExtensionFeedItem_SitelinkFeedItem) isExtensionFeedItem_Extension() {} + +func (*ExtensionFeedItem_StructuredSnippetFeedItem) isExtensionFeedItem_Extension() {} + +func (*ExtensionFeedItem_AppFeedItem) isExtensionFeedItem_Extension() {} + +func (*ExtensionFeedItem_CallFeedItem) isExtensionFeedItem_Extension() {} + +func (*ExtensionFeedItem_CalloutFeedItem) isExtensionFeedItem_Extension() {} + +func (*ExtensionFeedItem_TextMessageFeedItem) isExtensionFeedItem_Extension() {} + +func (*ExtensionFeedItem_PriceFeedItem) isExtensionFeedItem_Extension() {} + +func (*ExtensionFeedItem_PromotionFeedItem) isExtensionFeedItem_Extension() {} + +func (*ExtensionFeedItem_LocationFeedItem) isExtensionFeedItem_Extension() {} + +func (*ExtensionFeedItem_AffiliateLocationFeedItem) isExtensionFeedItem_Extension() {} + +func (m *ExtensionFeedItem) GetExtension() isExtensionFeedItem_Extension { + if m != nil { + return m.Extension + } + return nil +} + +func (m *ExtensionFeedItem) GetSitelinkFeedItem() *common.SitelinkFeedItem { + if x, ok := m.GetExtension().(*ExtensionFeedItem_SitelinkFeedItem); ok { + return x.SitelinkFeedItem + } + return nil +} + +func (m *ExtensionFeedItem) GetStructuredSnippetFeedItem() *common.StructuredSnippetFeedItem { + if x, ok := m.GetExtension().(*ExtensionFeedItem_StructuredSnippetFeedItem); ok { + return x.StructuredSnippetFeedItem + } + return nil +} + +func (m *ExtensionFeedItem) GetAppFeedItem() *common.AppFeedItem { + if x, ok := m.GetExtension().(*ExtensionFeedItem_AppFeedItem); ok { + return x.AppFeedItem + } + return nil +} + +func (m *ExtensionFeedItem) GetCallFeedItem() *common.CallFeedItem { + if x, ok := m.GetExtension().(*ExtensionFeedItem_CallFeedItem); ok { + return x.CallFeedItem + } + return nil +} + +func (m *ExtensionFeedItem) GetCalloutFeedItem() *common.CalloutFeedItem { + if x, ok := m.GetExtension().(*ExtensionFeedItem_CalloutFeedItem); ok { + return x.CalloutFeedItem + } + return nil +} + +func (m *ExtensionFeedItem) GetTextMessageFeedItem() *common.TextMessageFeedItem { + if x, ok := m.GetExtension().(*ExtensionFeedItem_TextMessageFeedItem); ok { + return x.TextMessageFeedItem + } + return nil +} + +func (m *ExtensionFeedItem) GetPriceFeedItem() *common.PriceFeedItem { + if x, ok := m.GetExtension().(*ExtensionFeedItem_PriceFeedItem); ok { + return x.PriceFeedItem + } + return nil +} + +func (m *ExtensionFeedItem) GetPromotionFeedItem() *common.PromotionFeedItem { + if x, ok := m.GetExtension().(*ExtensionFeedItem_PromotionFeedItem); ok { + return x.PromotionFeedItem + } + return nil +} + +func (m *ExtensionFeedItem) GetLocationFeedItem() *common.LocationFeedItem { + if x, ok := m.GetExtension().(*ExtensionFeedItem_LocationFeedItem); ok { + return x.LocationFeedItem + } + return nil +} + +func (m *ExtensionFeedItem) GetAffiliateLocationFeedItem() *common.AffiliateLocationFeedItem { + if x, ok := m.GetExtension().(*ExtensionFeedItem_AffiliateLocationFeedItem); ok { + return x.AffiliateLocationFeedItem + } + return nil +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*ExtensionFeedItem) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _ExtensionFeedItem_OneofMarshaler, _ExtensionFeedItem_OneofUnmarshaler, _ExtensionFeedItem_OneofSizer, []interface{}{ + (*ExtensionFeedItem_SitelinkFeedItem)(nil), + (*ExtensionFeedItem_StructuredSnippetFeedItem)(nil), + (*ExtensionFeedItem_AppFeedItem)(nil), + (*ExtensionFeedItem_CallFeedItem)(nil), + (*ExtensionFeedItem_CalloutFeedItem)(nil), + (*ExtensionFeedItem_TextMessageFeedItem)(nil), + (*ExtensionFeedItem_PriceFeedItem)(nil), + (*ExtensionFeedItem_PromotionFeedItem)(nil), + (*ExtensionFeedItem_LocationFeedItem)(nil), + (*ExtensionFeedItem_AffiliateLocationFeedItem)(nil), + } +} + +func _ExtensionFeedItem_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*ExtensionFeedItem) + // extension + switch x := m.Extension.(type) { + case *ExtensionFeedItem_SitelinkFeedItem: + b.EncodeVarint(2<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.SitelinkFeedItem); err != nil { + return err + } + case *ExtensionFeedItem_StructuredSnippetFeedItem: + b.EncodeVarint(3<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.StructuredSnippetFeedItem); err != nil { + return err + } + case *ExtensionFeedItem_AppFeedItem: + b.EncodeVarint(7<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.AppFeedItem); err != nil { + return err + } + case *ExtensionFeedItem_CallFeedItem: + b.EncodeVarint(8<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.CallFeedItem); err != nil { + return err + } + case *ExtensionFeedItem_CalloutFeedItem: + b.EncodeVarint(9<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.CalloutFeedItem); err != nil { + return err + } + case *ExtensionFeedItem_TextMessageFeedItem: + b.EncodeVarint(10<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.TextMessageFeedItem); err != nil { + return err + } + case *ExtensionFeedItem_PriceFeedItem: + b.EncodeVarint(11<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.PriceFeedItem); err != nil { + return err + } + case *ExtensionFeedItem_PromotionFeedItem: + b.EncodeVarint(12<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.PromotionFeedItem); err != nil { + return err + } + case *ExtensionFeedItem_LocationFeedItem: + b.EncodeVarint(14<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.LocationFeedItem); err != nil { + return err + } + case *ExtensionFeedItem_AffiliateLocationFeedItem: + b.EncodeVarint(15<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.AffiliateLocationFeedItem); err != nil { + return err + } + case nil: + default: + return fmt.Errorf("ExtensionFeedItem.Extension has unexpected type %T", x) + } + return nil +} + +func _ExtensionFeedItem_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*ExtensionFeedItem) + switch tag { + case 2: // extension.sitelink_feed_item + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(common.SitelinkFeedItem) + err := b.DecodeMessage(msg) + m.Extension = &ExtensionFeedItem_SitelinkFeedItem{msg} + return true, err + case 3: // extension.structured_snippet_feed_item + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(common.StructuredSnippetFeedItem) + err := b.DecodeMessage(msg) + m.Extension = &ExtensionFeedItem_StructuredSnippetFeedItem{msg} + return true, err + case 7: // extension.app_feed_item + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(common.AppFeedItem) + err := b.DecodeMessage(msg) + m.Extension = &ExtensionFeedItem_AppFeedItem{msg} + return true, err + case 8: // extension.call_feed_item + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(common.CallFeedItem) + err := b.DecodeMessage(msg) + m.Extension = &ExtensionFeedItem_CallFeedItem{msg} + return true, err + case 9: // extension.callout_feed_item + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(common.CalloutFeedItem) + err := b.DecodeMessage(msg) + m.Extension = &ExtensionFeedItem_CalloutFeedItem{msg} + return true, err + case 10: // extension.text_message_feed_item + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(common.TextMessageFeedItem) + err := b.DecodeMessage(msg) + m.Extension = &ExtensionFeedItem_TextMessageFeedItem{msg} + return true, err + case 11: // extension.price_feed_item + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(common.PriceFeedItem) + err := b.DecodeMessage(msg) + m.Extension = &ExtensionFeedItem_PriceFeedItem{msg} + return true, err + case 12: // extension.promotion_feed_item + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(common.PromotionFeedItem) + err := b.DecodeMessage(msg) + m.Extension = &ExtensionFeedItem_PromotionFeedItem{msg} + return true, err + case 14: // extension.location_feed_item + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(common.LocationFeedItem) + err := b.DecodeMessage(msg) + m.Extension = &ExtensionFeedItem_LocationFeedItem{msg} + return true, err + case 15: // extension.affiliate_location_feed_item + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(common.AffiliateLocationFeedItem) + err := b.DecodeMessage(msg) + m.Extension = &ExtensionFeedItem_AffiliateLocationFeedItem{msg} + return true, err + default: + return false, nil + } +} + +func _ExtensionFeedItem_OneofSizer(msg proto.Message) (n int) { + m := msg.(*ExtensionFeedItem) + // extension + switch x := m.Extension.(type) { + case *ExtensionFeedItem_SitelinkFeedItem: + s := proto.Size(x.SitelinkFeedItem) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *ExtensionFeedItem_StructuredSnippetFeedItem: + s := proto.Size(x.StructuredSnippetFeedItem) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *ExtensionFeedItem_AppFeedItem: + s := proto.Size(x.AppFeedItem) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *ExtensionFeedItem_CallFeedItem: + s := proto.Size(x.CallFeedItem) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *ExtensionFeedItem_CalloutFeedItem: + s := proto.Size(x.CalloutFeedItem) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *ExtensionFeedItem_TextMessageFeedItem: + s := proto.Size(x.TextMessageFeedItem) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *ExtensionFeedItem_PriceFeedItem: + s := proto.Size(x.PriceFeedItem) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *ExtensionFeedItem_PromotionFeedItem: + s := proto.Size(x.PromotionFeedItem) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *ExtensionFeedItem_LocationFeedItem: + s := proto.Size(x.LocationFeedItem) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *ExtensionFeedItem_AffiliateLocationFeedItem: + s := proto.Size(x.AffiliateLocationFeedItem) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +func init() { + proto.RegisterType((*ExtensionFeedItem)(nil), "google.ads.googleads.v1.resources.ExtensionFeedItem") +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/resources/extension_feed_item.proto", fileDescriptor_extension_feed_item_3b0d7704bf91f757) +} + +var fileDescriptor_extension_feed_item_3b0d7704bf91f757 = []byte{ + // 805 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x8c, 0x96, 0xdd, 0x8e, 0xdb, 0x44, + 0x14, 0xc7, 0x49, 0x16, 0x16, 0x76, 0x92, 0xec, 0x76, 0xa7, 0x52, 0xe5, 0x56, 0x2b, 0x94, 0x82, + 0x2a, 0x45, 0x82, 0xda, 0x24, 0xe5, 0x06, 0xaf, 0x84, 0x9a, 0x65, 0x4b, 0x29, 0x2a, 0x68, 0xeb, + 0x44, 0x01, 0xa1, 0x20, 0x33, 0xf5, 0x9c, 0x98, 0x01, 0x7b, 0x66, 0xe4, 0x19, 0x2f, 0x5b, 0x89, + 0x3b, 0xde, 0x84, 0x4b, 0xde, 0x80, 0x57, 0xe0, 0x51, 0x78, 0x8a, 0xca, 0xe3, 0x8f, 0x9d, 0xa4, + 0x9b, 0x3a, 0x77, 0x33, 0x67, 0xce, 0xff, 0xff, 0xf3, 0x9c, 0xf9, 0x32, 0x3a, 0x8d, 0x85, 0x88, + 0x13, 0xf0, 0x08, 0x55, 0x5e, 0xd9, 0x2c, 0x5a, 0x97, 0x63, 0x2f, 0x03, 0x25, 0xf2, 0x2c, 0x02, + 0xe5, 0xc1, 0x95, 0x06, 0xae, 0x98, 0xe0, 0xe1, 0x0a, 0x80, 0x86, 0x4c, 0x43, 0xea, 0xca, 0x4c, + 0x68, 0x81, 0xef, 0x97, 0x0a, 0x97, 0x50, 0xe5, 0x36, 0x62, 0xf7, 0x72, 0xec, 0x36, 0xe2, 0x7b, + 0x0f, 0xb7, 0xf9, 0x47, 0x22, 0x4d, 0x05, 0xf7, 0xa2, 0x8c, 0x69, 0xc8, 0x18, 0x29, 0x1d, 0xef, + 0x79, 0x2d, 0xe9, 0xcd, 0xb7, 0xa8, 0x4a, 0x30, 0xd9, 0x26, 0x00, 0x9e, 0xa7, 0xf6, 0xb7, 0xeb, + 0x57, 0x12, 0x2a, 0xcd, 0xe7, 0x6f, 0xd7, 0x34, 0xb3, 0x0c, 0x95, 0x26, 0x3a, 0xaf, 0x49, 0xa7, + 0xbb, 0xaa, 0x34, 0xc9, 0x62, 0xd0, 0x21, 0x85, 0x4b, 0x16, 0xd5, 0xc8, 0x0f, 0x2b, 0xb1, 0xe9, + 0xbd, 0xcc, 0x57, 0xde, 0x1f, 0x19, 0x91, 0x12, 0xb2, 0xda, 0xfc, 0xa4, 0x36, 0x97, 0xcc, 0x23, + 0x9c, 0x0b, 0x4d, 0xf4, 0xf5, 0x24, 0x3f, 0xfa, 0xb7, 0x8f, 0x8e, 0x9f, 0xd4, 0x33, 0xf9, 0x1a, + 0x80, 0x3e, 0xd3, 0x90, 0xe2, 0x8f, 0xd1, 0xa0, 0xae, 0x73, 0xc8, 0x49, 0x0a, 0x4e, 0x67, 0xd8, + 0x19, 0x1d, 0x04, 0xfd, 0x3a, 0xf8, 0x3d, 0x49, 0x01, 0x03, 0x3a, 0x5c, 0xaf, 0x81, 0x33, 0x18, + 0x76, 0x46, 0x87, 0x93, 0x2f, 0xdd, 0x6d, 0x6b, 0x67, 0xa6, 0xe3, 0x36, 0xb8, 0xf9, 0x2b, 0x09, + 0x4f, 0x78, 0x9e, 0xae, 0x47, 0x82, 0x01, 0xd8, 0x5d, 0x7c, 0x8e, 0x8e, 0x94, 0x26, 0x99, 0x0e, + 0x29, 0xd1, 0x10, 0x6a, 0x96, 0x82, 0xf3, 0xde, 0xb0, 0x33, 0xea, 0x4d, 0x4e, 0x6a, 0x4e, 0x3d, + 0x73, 0x77, 0xa6, 0x33, 0xc6, 0xe3, 0x05, 0x49, 0x72, 0x08, 0x06, 0x46, 0x74, 0x4e, 0x34, 0xcc, + 0x59, 0x0a, 0xf8, 0x31, 0x1a, 0x00, 0xa7, 0x96, 0xc7, 0xfe, 0x0e, 0x1e, 0x3d, 0xe0, 0xb4, 0x71, + 0x78, 0x81, 0xfa, 0x84, 0x86, 0x2a, 0xfa, 0x15, 0x68, 0x9e, 0x80, 0x72, 0x6e, 0x0d, 0xf7, 0x46, + 0xbd, 0x89, 0xbb, 0x75, 0xb2, 0xe5, 0xb6, 0x72, 0xa7, 0x74, 0x56, 0x49, 0x9e, 0xf1, 0x95, 0x08, + 0x7a, 0xa4, 0xe9, 0x2b, 0x4c, 0xd1, 0x7e, 0xb9, 0x94, 0xce, 0xb1, 0xa9, 0xdc, 0xf3, 0x96, 0xca, + 0xd5, 0xeb, 0x33, 0x37, 0xdb, 0xe0, 0xdc, 0x48, 0x4d, 0x01, 0x6f, 0x1a, 0x08, 0x2a, 0x6f, 0xfc, + 0x23, 0xda, 0x2f, 0x77, 0x9b, 0xf3, 0xae, 0xa1, 0x3c, 0xde, 0x91, 0x32, 0x33, 0xa2, 0x35, 0xff, + 0x32, 0x14, 0x54, 0x7e, 0xf8, 0x17, 0x84, 0x15, 0xd3, 0x90, 0x30, 0xfe, 0xfb, 0xf5, 0x01, 0x76, + 0xba, 0xa6, 0xb2, 0x9f, 0xb5, 0x15, 0x66, 0x56, 0x29, 0x6b, 0xef, 0x6f, 0xde, 0x09, 0x6e, 0xa9, + 0x8d, 0x18, 0xfe, 0x13, 0x9d, 0x28, 0x9d, 0xe5, 0x91, 0xce, 0x33, 0xa0, 0xa1, 0xe2, 0x4c, 0x4a, + 0xd0, 0x16, 0x6b, 0xcf, 0xb0, 0xbe, 0x68, 0x65, 0x35, 0x1e, 0xb3, 0xd2, 0xc2, 0x82, 0xde, 0x55, + 0xdb, 0x06, 0xf1, 0x0b, 0x34, 0x20, 0x52, 0x5a, 0xb8, 0xf7, 0x0d, 0xee, 0x93, 0xd6, 0x35, 0x97, + 0xd2, 0x02, 0xf4, 0xc8, 0x75, 0x17, 0xcf, 0xd1, 0x61, 0x44, 0x92, 0xc4, 0xf2, 0xfc, 0xc0, 0x78, + 0x7e, 0xda, 0xe6, 0xf9, 0x15, 0x49, 0x12, 0xcb, 0xb4, 0x1f, 0x59, 0x7d, 0xfc, 0x33, 0x3a, 0x2e, + 0xfa, 0x22, 0xb7, 0x6b, 0x73, 0x60, 0x8c, 0xbd, 0x5d, 0x8c, 0x45, 0x6e, 0x57, 0xe4, 0x28, 0x5a, + 0x0f, 0xe1, 0xdf, 0xd0, 0x1d, 0x0d, 0x57, 0x3a, 0x4c, 0x41, 0x29, 0x12, 0x83, 0xc5, 0x40, 0x86, + 0xf1, 0xa8, 0x8d, 0x31, 0x87, 0x2b, 0xfd, 0x5d, 0x29, 0xb6, 0x38, 0xb7, 0xf5, 0x9b, 0x61, 0xfc, + 0x03, 0x3a, 0x92, 0x19, 0x8b, 0x6c, 0x48, 0xcf, 0x40, 0x1e, 0xb6, 0x41, 0x2e, 0x0a, 0x99, 0x65, + 0x3f, 0x90, 0x76, 0x00, 0x47, 0xe8, 0xb6, 0xcc, 0x44, 0x2a, 0xf4, 0xda, 0x73, 0xe3, 0xf4, 0x8d, + 0xf9, 0xb8, 0xdd, 0xbc, 0x92, 0x5a, 0x80, 0x63, 0xb9, 0x19, 0x2c, 0x4e, 0x44, 0x22, 0x22, 0xb2, + 0xc1, 0x38, 0xdc, 0xed, 0x44, 0x3c, 0xaf, 0x94, 0xf6, 0x89, 0x48, 0x36, 0x62, 0xc5, 0x89, 0x20, + 0xab, 0x15, 0x4b, 0x58, 0x71, 0x93, 0xdd, 0xc0, 0x3a, 0xda, 0xed, 0x44, 0x4c, 0x6b, 0x8f, 0x1b, + 0xa0, 0x77, 0xc9, 0xb6, 0xc1, 0xb3, 0x1e, 0x3a, 0x68, 0x6e, 0xe7, 0xb3, 0xbf, 0xba, 0xe8, 0x41, + 0x24, 0x52, 0xb7, 0xf5, 0xa9, 0x3e, 0xbb, 0xf3, 0xc6, 0x13, 0x73, 0x51, 0xdc, 0xb7, 0x17, 0x9d, + 0x9f, 0xbe, 0xad, 0xc4, 0xb1, 0x48, 0x08, 0x8f, 0x5d, 0x91, 0xc5, 0x5e, 0x0c, 0xdc, 0xdc, 0xc6, + 0xf5, 0x53, 0x28, 0x99, 0x7a, 0xcb, 0x3f, 0xc4, 0x69, 0xd3, 0xfa, 0xbb, 0xbb, 0xf7, 0x74, 0x3a, + 0xfd, 0xa7, 0x7b, 0xff, 0x69, 0x69, 0x39, 0xa5, 0xca, 0x2d, 0x9b, 0x45, 0x6b, 0x31, 0x76, 0x83, + 0x3a, 0xf3, 0xbf, 0x3a, 0x67, 0x39, 0xa5, 0x6a, 0xd9, 0xe4, 0x2c, 0x17, 0xe3, 0x65, 0x93, 0xf3, + 0x7f, 0xf7, 0x41, 0x39, 0xe0, 0xfb, 0x53, 0xaa, 0x7c, 0xbf, 0xc9, 0xf2, 0xfd, 0xc5, 0xd8, 0xf7, + 0x9b, 0xbc, 0x97, 0xfb, 0xe6, 0x63, 0x1f, 0xbd, 0x0e, 0x00, 0x00, 0xff, 0xff, 0x3a, 0xc2, 0x6c, + 0xb7, 0xef, 0x08, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/resources/feed.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/resources/feed.pb.go new file mode 100644 index 0000000..64420f8 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/resources/feed.pb.go @@ -0,0 +1,685 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/resources/feed.proto + +package resources // import "google.golang.org/genproto/googleapis/ads/googleads/v1/resources" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import wrappers "github.com/golang/protobuf/ptypes/wrappers" +import enums "google.golang.org/genproto/googleapis/ads/googleads/v1/enums" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// The operator. +type FeedAttributeOperation_Operator int32 + +const ( + // Unspecified. + FeedAttributeOperation_UNSPECIFIED FeedAttributeOperation_Operator = 0 + // Used for return value only. Represents value unknown in this version. + FeedAttributeOperation_UNKNOWN FeedAttributeOperation_Operator = 1 + // Add the attribute to the existing attributes. + FeedAttributeOperation_ADD FeedAttributeOperation_Operator = 2 +) + +var FeedAttributeOperation_Operator_name = map[int32]string{ + 0: "UNSPECIFIED", + 1: "UNKNOWN", + 2: "ADD", +} +var FeedAttributeOperation_Operator_value = map[string]int32{ + "UNSPECIFIED": 0, + "UNKNOWN": 1, + "ADD": 2, +} + +func (x FeedAttributeOperation_Operator) String() string { + return proto.EnumName(FeedAttributeOperation_Operator_name, int32(x)) +} +func (FeedAttributeOperation_Operator) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_feed_0878dc93c3f7dc5a, []int{2, 0} +} + +// A feed. +type Feed struct { + // The resource name of the feed. + // Feed resource names have the form: + // + // `customers/{customer_id}/feeds/{feed_id}` + ResourceName string `protobuf:"bytes,1,opt,name=resource_name,json=resourceName,proto3" json:"resource_name,omitempty"` + // The ID of the feed. + // This field is read-only. + Id *wrappers.Int64Value `protobuf:"bytes,2,opt,name=id,proto3" json:"id,omitempty"` + // Name of the feed. Required. + Name *wrappers.StringValue `protobuf:"bytes,3,opt,name=name,proto3" json:"name,omitempty"` + // The Feed's attributes. Required on CREATE. + // Disallowed on UPDATE. Use attribute_operations to add new attributes. + Attributes []*FeedAttribute `protobuf:"bytes,4,rep,name=attributes,proto3" json:"attributes,omitempty"` + // The list of operations changing the feed attributes. Attributes can only + // be added, not removed. + AttributeOperations []*FeedAttributeOperation `protobuf:"bytes,9,rep,name=attribute_operations,json=attributeOperations,proto3" json:"attribute_operations,omitempty"` + // Specifies who manages the FeedAttributes for the Feed. + Origin enums.FeedOriginEnum_FeedOrigin `protobuf:"varint,5,opt,name=origin,proto3,enum=google.ads.googleads.v1.enums.FeedOriginEnum_FeedOrigin" json:"origin,omitempty"` + // Status of the feed. + // This field is read-only. + Status enums.FeedStatusEnum_FeedStatus `protobuf:"varint,8,opt,name=status,proto3,enum=google.ads.googleads.v1.enums.FeedStatusEnum_FeedStatus" json:"status,omitempty"` + // The system data for the Feed. This data specifies information for + // generating the feed items of the system generated feed. + // + // Types that are valid to be assigned to SystemFeedGenerationData: + // *Feed_PlacesLocationFeedData_ + // *Feed_AffiliateLocationFeedData_ + SystemFeedGenerationData isFeed_SystemFeedGenerationData `protobuf_oneof:"system_feed_generation_data"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Feed) Reset() { *m = Feed{} } +func (m *Feed) String() string { return proto.CompactTextString(m) } +func (*Feed) ProtoMessage() {} +func (*Feed) Descriptor() ([]byte, []int) { + return fileDescriptor_feed_0878dc93c3f7dc5a, []int{0} +} +func (m *Feed) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Feed.Unmarshal(m, b) +} +func (m *Feed) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Feed.Marshal(b, m, deterministic) +} +func (dst *Feed) XXX_Merge(src proto.Message) { + xxx_messageInfo_Feed.Merge(dst, src) +} +func (m *Feed) XXX_Size() int { + return xxx_messageInfo_Feed.Size(m) +} +func (m *Feed) XXX_DiscardUnknown() { + xxx_messageInfo_Feed.DiscardUnknown(m) +} + +var xxx_messageInfo_Feed proto.InternalMessageInfo + +func (m *Feed) GetResourceName() string { + if m != nil { + return m.ResourceName + } + return "" +} + +func (m *Feed) GetId() *wrappers.Int64Value { + if m != nil { + return m.Id + } + return nil +} + +func (m *Feed) GetName() *wrappers.StringValue { + if m != nil { + return m.Name + } + return nil +} + +func (m *Feed) GetAttributes() []*FeedAttribute { + if m != nil { + return m.Attributes + } + return nil +} + +func (m *Feed) GetAttributeOperations() []*FeedAttributeOperation { + if m != nil { + return m.AttributeOperations + } + return nil +} + +func (m *Feed) GetOrigin() enums.FeedOriginEnum_FeedOrigin { + if m != nil { + return m.Origin + } + return enums.FeedOriginEnum_UNSPECIFIED +} + +func (m *Feed) GetStatus() enums.FeedStatusEnum_FeedStatus { + if m != nil { + return m.Status + } + return enums.FeedStatusEnum_UNSPECIFIED +} + +type isFeed_SystemFeedGenerationData interface { + isFeed_SystemFeedGenerationData() +} + +type Feed_PlacesLocationFeedData_ struct { + PlacesLocationFeedData *Feed_PlacesLocationFeedData `protobuf:"bytes,6,opt,name=places_location_feed_data,json=placesLocationFeedData,proto3,oneof"` +} + +type Feed_AffiliateLocationFeedData_ struct { + AffiliateLocationFeedData *Feed_AffiliateLocationFeedData `protobuf:"bytes,7,opt,name=affiliate_location_feed_data,json=affiliateLocationFeedData,proto3,oneof"` +} + +func (*Feed_PlacesLocationFeedData_) isFeed_SystemFeedGenerationData() {} + +func (*Feed_AffiliateLocationFeedData_) isFeed_SystemFeedGenerationData() {} + +func (m *Feed) GetSystemFeedGenerationData() isFeed_SystemFeedGenerationData { + if m != nil { + return m.SystemFeedGenerationData + } + return nil +} + +func (m *Feed) GetPlacesLocationFeedData() *Feed_PlacesLocationFeedData { + if x, ok := m.GetSystemFeedGenerationData().(*Feed_PlacesLocationFeedData_); ok { + return x.PlacesLocationFeedData + } + return nil +} + +func (m *Feed) GetAffiliateLocationFeedData() *Feed_AffiliateLocationFeedData { + if x, ok := m.GetSystemFeedGenerationData().(*Feed_AffiliateLocationFeedData_); ok { + return x.AffiliateLocationFeedData + } + return nil +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*Feed) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _Feed_OneofMarshaler, _Feed_OneofUnmarshaler, _Feed_OneofSizer, []interface{}{ + (*Feed_PlacesLocationFeedData_)(nil), + (*Feed_AffiliateLocationFeedData_)(nil), + } +} + +func _Feed_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*Feed) + // system_feed_generation_data + switch x := m.SystemFeedGenerationData.(type) { + case *Feed_PlacesLocationFeedData_: + b.EncodeVarint(6<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.PlacesLocationFeedData); err != nil { + return err + } + case *Feed_AffiliateLocationFeedData_: + b.EncodeVarint(7<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.AffiliateLocationFeedData); err != nil { + return err + } + case nil: + default: + return fmt.Errorf("Feed.SystemFeedGenerationData has unexpected type %T", x) + } + return nil +} + +func _Feed_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*Feed) + switch tag { + case 6: // system_feed_generation_data.places_location_feed_data + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(Feed_PlacesLocationFeedData) + err := b.DecodeMessage(msg) + m.SystemFeedGenerationData = &Feed_PlacesLocationFeedData_{msg} + return true, err + case 7: // system_feed_generation_data.affiliate_location_feed_data + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(Feed_AffiliateLocationFeedData) + err := b.DecodeMessage(msg) + m.SystemFeedGenerationData = &Feed_AffiliateLocationFeedData_{msg} + return true, err + default: + return false, nil + } +} + +func _Feed_OneofSizer(msg proto.Message) (n int) { + m := msg.(*Feed) + // system_feed_generation_data + switch x := m.SystemFeedGenerationData.(type) { + case *Feed_PlacesLocationFeedData_: + s := proto.Size(x.PlacesLocationFeedData) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *Feed_AffiliateLocationFeedData_: + s := proto.Size(x.AffiliateLocationFeedData) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +// Data used to configure a location feed populated from Google My Business +// Locations. +type Feed_PlacesLocationFeedData struct { + // Required authentication token (from OAuth API) for the email. + // This field can only be specified in a create request. All its subfields + // are not selectable. + OauthInfo *Feed_PlacesLocationFeedData_OAuthInfo `protobuf:"bytes,1,opt,name=oauth_info,json=oauthInfo,proto3" json:"oauth_info,omitempty"` + // Email address of a Google My Business account or email address of a + // manager of the Google My Business account. Required. + EmailAddress *wrappers.StringValue `protobuf:"bytes,2,opt,name=email_address,json=emailAddress,proto3" json:"email_address,omitempty"` + // Plus page ID of the managed business whose locations should be used. If + // this field is not set, then all businesses accessible by the user + // (specified by email_address) are used. + // This field is mutate-only and is not selectable. + BusinessAccountId *wrappers.StringValue `protobuf:"bytes,10,opt,name=business_account_id,json=businessAccountId,proto3" json:"business_account_id,omitempty"` + // Used to filter Google My Business listings by business name. If + // business_name_filter is set, only listings with a matching business name + // are candidates to be sync'd into FeedItems. + BusinessNameFilter *wrappers.StringValue `protobuf:"bytes,4,opt,name=business_name_filter,json=businessNameFilter,proto3" json:"business_name_filter,omitempty"` + // Used to filter Google My Business listings by categories. If entries + // exist in category_filters, only listings that belong to any of the + // categories are candidates to be sync'd into FeedItems. If no entries + // exist in category_filters, then all listings are candidates for syncing. + CategoryFilters []*wrappers.StringValue `protobuf:"bytes,5,rep,name=category_filters,json=categoryFilters,proto3" json:"category_filters,omitempty"` + // Used to filter Google My Business listings by labels. If entries exist in + // label_filters, only listings that has any of the labels set are + // candidates to be synchronized into FeedItems. If no entries exist in + // label_filters, then all listings are candidates for syncing. + LabelFilters []*wrappers.StringValue `protobuf:"bytes,6,rep,name=label_filters,json=labelFilters,proto3" json:"label_filters,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Feed_PlacesLocationFeedData) Reset() { *m = Feed_PlacesLocationFeedData{} } +func (m *Feed_PlacesLocationFeedData) String() string { return proto.CompactTextString(m) } +func (*Feed_PlacesLocationFeedData) ProtoMessage() {} +func (*Feed_PlacesLocationFeedData) Descriptor() ([]byte, []int) { + return fileDescriptor_feed_0878dc93c3f7dc5a, []int{0, 0} +} +func (m *Feed_PlacesLocationFeedData) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Feed_PlacesLocationFeedData.Unmarshal(m, b) +} +func (m *Feed_PlacesLocationFeedData) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Feed_PlacesLocationFeedData.Marshal(b, m, deterministic) +} +func (dst *Feed_PlacesLocationFeedData) XXX_Merge(src proto.Message) { + xxx_messageInfo_Feed_PlacesLocationFeedData.Merge(dst, src) +} +func (m *Feed_PlacesLocationFeedData) XXX_Size() int { + return xxx_messageInfo_Feed_PlacesLocationFeedData.Size(m) +} +func (m *Feed_PlacesLocationFeedData) XXX_DiscardUnknown() { + xxx_messageInfo_Feed_PlacesLocationFeedData.DiscardUnknown(m) +} + +var xxx_messageInfo_Feed_PlacesLocationFeedData proto.InternalMessageInfo + +func (m *Feed_PlacesLocationFeedData) GetOauthInfo() *Feed_PlacesLocationFeedData_OAuthInfo { + if m != nil { + return m.OauthInfo + } + return nil +} + +func (m *Feed_PlacesLocationFeedData) GetEmailAddress() *wrappers.StringValue { + if m != nil { + return m.EmailAddress + } + return nil +} + +func (m *Feed_PlacesLocationFeedData) GetBusinessAccountId() *wrappers.StringValue { + if m != nil { + return m.BusinessAccountId + } + return nil +} + +func (m *Feed_PlacesLocationFeedData) GetBusinessNameFilter() *wrappers.StringValue { + if m != nil { + return m.BusinessNameFilter + } + return nil +} + +func (m *Feed_PlacesLocationFeedData) GetCategoryFilters() []*wrappers.StringValue { + if m != nil { + return m.CategoryFilters + } + return nil +} + +func (m *Feed_PlacesLocationFeedData) GetLabelFilters() []*wrappers.StringValue { + if m != nil { + return m.LabelFilters + } + return nil +} + +// Data used for authorization using OAuth. +type Feed_PlacesLocationFeedData_OAuthInfo struct { + // The HTTP method used to obtain authorization. + HttpMethod *wrappers.StringValue `protobuf:"bytes,1,opt,name=http_method,json=httpMethod,proto3" json:"http_method,omitempty"` + // The HTTP request URL used to obtain authorization. + HttpRequestUrl *wrappers.StringValue `protobuf:"bytes,2,opt,name=http_request_url,json=httpRequestUrl,proto3" json:"http_request_url,omitempty"` + // The HTTP authorization header used to obtain authorization. + HttpAuthorizationHeader *wrappers.StringValue `protobuf:"bytes,3,opt,name=http_authorization_header,json=httpAuthorizationHeader,proto3" json:"http_authorization_header,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Feed_PlacesLocationFeedData_OAuthInfo) Reset() { *m = Feed_PlacesLocationFeedData_OAuthInfo{} } +func (m *Feed_PlacesLocationFeedData_OAuthInfo) String() string { return proto.CompactTextString(m) } +func (*Feed_PlacesLocationFeedData_OAuthInfo) ProtoMessage() {} +func (*Feed_PlacesLocationFeedData_OAuthInfo) Descriptor() ([]byte, []int) { + return fileDescriptor_feed_0878dc93c3f7dc5a, []int{0, 0, 0} +} +func (m *Feed_PlacesLocationFeedData_OAuthInfo) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Feed_PlacesLocationFeedData_OAuthInfo.Unmarshal(m, b) +} +func (m *Feed_PlacesLocationFeedData_OAuthInfo) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Feed_PlacesLocationFeedData_OAuthInfo.Marshal(b, m, deterministic) +} +func (dst *Feed_PlacesLocationFeedData_OAuthInfo) XXX_Merge(src proto.Message) { + xxx_messageInfo_Feed_PlacesLocationFeedData_OAuthInfo.Merge(dst, src) +} +func (m *Feed_PlacesLocationFeedData_OAuthInfo) XXX_Size() int { + return xxx_messageInfo_Feed_PlacesLocationFeedData_OAuthInfo.Size(m) +} +func (m *Feed_PlacesLocationFeedData_OAuthInfo) XXX_DiscardUnknown() { + xxx_messageInfo_Feed_PlacesLocationFeedData_OAuthInfo.DiscardUnknown(m) +} + +var xxx_messageInfo_Feed_PlacesLocationFeedData_OAuthInfo proto.InternalMessageInfo + +func (m *Feed_PlacesLocationFeedData_OAuthInfo) GetHttpMethod() *wrappers.StringValue { + if m != nil { + return m.HttpMethod + } + return nil +} + +func (m *Feed_PlacesLocationFeedData_OAuthInfo) GetHttpRequestUrl() *wrappers.StringValue { + if m != nil { + return m.HttpRequestUrl + } + return nil +} + +func (m *Feed_PlacesLocationFeedData_OAuthInfo) GetHttpAuthorizationHeader() *wrappers.StringValue { + if m != nil { + return m.HttpAuthorizationHeader + } + return nil +} + +// Data used to configure an affiliate location feed populated with the +// specified chains. +type Feed_AffiliateLocationFeedData struct { + // The list of chains that the affiliate location feed will sync the + // locations from. + ChainIds []*wrappers.Int64Value `protobuf:"bytes,1,rep,name=chain_ids,json=chainIds,proto3" json:"chain_ids,omitempty"` + // The relationship the chains have with the advertiser. + RelationshipType enums.AffiliateLocationFeedRelationshipTypeEnum_AffiliateLocationFeedRelationshipType `protobuf:"varint,2,opt,name=relationship_type,json=relationshipType,proto3,enum=google.ads.googleads.v1.enums.AffiliateLocationFeedRelationshipTypeEnum_AffiliateLocationFeedRelationshipType" json:"relationship_type,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Feed_AffiliateLocationFeedData) Reset() { *m = Feed_AffiliateLocationFeedData{} } +func (m *Feed_AffiliateLocationFeedData) String() string { return proto.CompactTextString(m) } +func (*Feed_AffiliateLocationFeedData) ProtoMessage() {} +func (*Feed_AffiliateLocationFeedData) Descriptor() ([]byte, []int) { + return fileDescriptor_feed_0878dc93c3f7dc5a, []int{0, 1} +} +func (m *Feed_AffiliateLocationFeedData) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Feed_AffiliateLocationFeedData.Unmarshal(m, b) +} +func (m *Feed_AffiliateLocationFeedData) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Feed_AffiliateLocationFeedData.Marshal(b, m, deterministic) +} +func (dst *Feed_AffiliateLocationFeedData) XXX_Merge(src proto.Message) { + xxx_messageInfo_Feed_AffiliateLocationFeedData.Merge(dst, src) +} +func (m *Feed_AffiliateLocationFeedData) XXX_Size() int { + return xxx_messageInfo_Feed_AffiliateLocationFeedData.Size(m) +} +func (m *Feed_AffiliateLocationFeedData) XXX_DiscardUnknown() { + xxx_messageInfo_Feed_AffiliateLocationFeedData.DiscardUnknown(m) +} + +var xxx_messageInfo_Feed_AffiliateLocationFeedData proto.InternalMessageInfo + +func (m *Feed_AffiliateLocationFeedData) GetChainIds() []*wrappers.Int64Value { + if m != nil { + return m.ChainIds + } + return nil +} + +func (m *Feed_AffiliateLocationFeedData) GetRelationshipType() enums.AffiliateLocationFeedRelationshipTypeEnum_AffiliateLocationFeedRelationshipType { + if m != nil { + return m.RelationshipType + } + return enums.AffiliateLocationFeedRelationshipTypeEnum_UNSPECIFIED +} + +// FeedAttributes define the types of data expected to be present in a Feed. A +// single FeedAttribute specifies the expected type of the FeedItemAttributes +// with the same FeedAttributeId. Optionally, a FeedAttribute can be marked as +// being part of a FeedItem's unique key. +type FeedAttribute struct { + // ID of the attribute. + Id *wrappers.Int64Value `protobuf:"bytes,1,opt,name=id,proto3" json:"id,omitempty"` + // The name of the attribute. Required. + Name *wrappers.StringValue `protobuf:"bytes,2,opt,name=name,proto3" json:"name,omitempty"` + // Data type for feed attribute. Required. + Type enums.FeedAttributeTypeEnum_FeedAttributeType `protobuf:"varint,3,opt,name=type,proto3,enum=google.ads.googleads.v1.enums.FeedAttributeTypeEnum_FeedAttributeType" json:"type,omitempty"` + // Indicates that data corresponding to this attribute is part of a + // FeedItem's unique key. It defaults to false if it is unspecified. Note + // that a unique key is not required in a Feed's schema, in which case the + // FeedItems must be referenced by their feed_item_id. + IsPartOfKey *wrappers.BoolValue `protobuf:"bytes,4,opt,name=is_part_of_key,json=isPartOfKey,proto3" json:"is_part_of_key,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *FeedAttribute) Reset() { *m = FeedAttribute{} } +func (m *FeedAttribute) String() string { return proto.CompactTextString(m) } +func (*FeedAttribute) ProtoMessage() {} +func (*FeedAttribute) Descriptor() ([]byte, []int) { + return fileDescriptor_feed_0878dc93c3f7dc5a, []int{1} +} +func (m *FeedAttribute) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_FeedAttribute.Unmarshal(m, b) +} +func (m *FeedAttribute) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_FeedAttribute.Marshal(b, m, deterministic) +} +func (dst *FeedAttribute) XXX_Merge(src proto.Message) { + xxx_messageInfo_FeedAttribute.Merge(dst, src) +} +func (m *FeedAttribute) XXX_Size() int { + return xxx_messageInfo_FeedAttribute.Size(m) +} +func (m *FeedAttribute) XXX_DiscardUnknown() { + xxx_messageInfo_FeedAttribute.DiscardUnknown(m) +} + +var xxx_messageInfo_FeedAttribute proto.InternalMessageInfo + +func (m *FeedAttribute) GetId() *wrappers.Int64Value { + if m != nil { + return m.Id + } + return nil +} + +func (m *FeedAttribute) GetName() *wrappers.StringValue { + if m != nil { + return m.Name + } + return nil +} + +func (m *FeedAttribute) GetType() enums.FeedAttributeTypeEnum_FeedAttributeType { + if m != nil { + return m.Type + } + return enums.FeedAttributeTypeEnum_UNSPECIFIED +} + +func (m *FeedAttribute) GetIsPartOfKey() *wrappers.BoolValue { + if m != nil { + return m.IsPartOfKey + } + return nil +} + +// Operation to be performed on a feed attribute list in a mutate. +type FeedAttributeOperation struct { + // Type of list operation to perform. + Operator FeedAttributeOperation_Operator `protobuf:"varint,1,opt,name=operator,proto3,enum=google.ads.googleads.v1.resources.FeedAttributeOperation_Operator" json:"operator,omitempty"` + // The feed attribute being added to the list. + Value *FeedAttribute `protobuf:"bytes,2,opt,name=value,proto3" json:"value,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *FeedAttributeOperation) Reset() { *m = FeedAttributeOperation{} } +func (m *FeedAttributeOperation) String() string { return proto.CompactTextString(m) } +func (*FeedAttributeOperation) ProtoMessage() {} +func (*FeedAttributeOperation) Descriptor() ([]byte, []int) { + return fileDescriptor_feed_0878dc93c3f7dc5a, []int{2} +} +func (m *FeedAttributeOperation) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_FeedAttributeOperation.Unmarshal(m, b) +} +func (m *FeedAttributeOperation) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_FeedAttributeOperation.Marshal(b, m, deterministic) +} +func (dst *FeedAttributeOperation) XXX_Merge(src proto.Message) { + xxx_messageInfo_FeedAttributeOperation.Merge(dst, src) +} +func (m *FeedAttributeOperation) XXX_Size() int { + return xxx_messageInfo_FeedAttributeOperation.Size(m) +} +func (m *FeedAttributeOperation) XXX_DiscardUnknown() { + xxx_messageInfo_FeedAttributeOperation.DiscardUnknown(m) +} + +var xxx_messageInfo_FeedAttributeOperation proto.InternalMessageInfo + +func (m *FeedAttributeOperation) GetOperator() FeedAttributeOperation_Operator { + if m != nil { + return m.Operator + } + return FeedAttributeOperation_UNSPECIFIED +} + +func (m *FeedAttributeOperation) GetValue() *FeedAttribute { + if m != nil { + return m.Value + } + return nil +} + +func init() { + proto.RegisterType((*Feed)(nil), "google.ads.googleads.v1.resources.Feed") + proto.RegisterType((*Feed_PlacesLocationFeedData)(nil), "google.ads.googleads.v1.resources.Feed.PlacesLocationFeedData") + proto.RegisterType((*Feed_PlacesLocationFeedData_OAuthInfo)(nil), "google.ads.googleads.v1.resources.Feed.PlacesLocationFeedData.OAuthInfo") + proto.RegisterType((*Feed_AffiliateLocationFeedData)(nil), "google.ads.googleads.v1.resources.Feed.AffiliateLocationFeedData") + proto.RegisterType((*FeedAttribute)(nil), "google.ads.googleads.v1.resources.FeedAttribute") + proto.RegisterType((*FeedAttributeOperation)(nil), "google.ads.googleads.v1.resources.FeedAttributeOperation") + proto.RegisterEnum("google.ads.googleads.v1.resources.FeedAttributeOperation_Operator", FeedAttributeOperation_Operator_name, FeedAttributeOperation_Operator_value) +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/resources/feed.proto", fileDescriptor_feed_0878dc93c3f7dc5a) +} + +var fileDescriptor_feed_0878dc93c3f7dc5a = []byte{ + // 1031 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xa4, 0x96, 0xcf, 0x6f, 0xe3, 0x44, + 0x14, 0xc7, 0xd7, 0x49, 0xfa, 0x23, 0x93, 0x36, 0x9b, 0x9d, 0x5d, 0x15, 0x37, 0x5b, 0x50, 0xb7, + 0x68, 0xa5, 0x4a, 0x20, 0x67, 0x13, 0x10, 0x2c, 0x41, 0x80, 0x1c, 0xda, 0xb4, 0x61, 0x77, 0x93, + 0xc8, 0xa5, 0x05, 0xad, 0x2a, 0xac, 0x49, 0x3c, 0x71, 0x46, 0x38, 0x1e, 0x33, 0x33, 0x2e, 0x0a, + 0x5c, 0xb9, 0xf1, 0x27, 0x70, 0xe3, 0xc8, 0x9f, 0xc2, 0xff, 0xc0, 0x19, 0x09, 0x71, 0x5b, 0x89, + 0x33, 0xf2, 0x8c, 0x6d, 0x42, 0xf3, 0xa3, 0x2e, 0xdc, 0xc6, 0x33, 0xdf, 0xef, 0xc7, 0xcf, 0xef, + 0xe5, 0xbd, 0x09, 0x78, 0xdb, 0xa5, 0xd4, 0xf5, 0x70, 0x0d, 0x39, 0xbc, 0xa6, 0x96, 0xd1, 0xea, + 0xaa, 0x5e, 0x63, 0x98, 0xd3, 0x90, 0x0d, 0x31, 0xaf, 0x8d, 0x30, 0x76, 0x8c, 0x80, 0x51, 0x41, + 0xe1, 0x23, 0x25, 0x31, 0x90, 0xc3, 0x8d, 0x54, 0x6d, 0x5c, 0xd5, 0x8d, 0x54, 0x5d, 0x7d, 0xb1, + 0x0c, 0x88, 0xfd, 0x70, 0xc2, 0x6b, 0x68, 0x34, 0x22, 0x1e, 0x41, 0x02, 0xdb, 0x1e, 0x1d, 0x22, + 0x41, 0xa8, 0x6f, 0x47, 0x7c, 0x9b, 0x61, 0x4f, 0x3e, 0xf1, 0x31, 0x09, 0x6c, 0x31, 0x0d, 0xb0, + 0x7a, 0x63, 0xf5, 0xfd, 0xd5, 0x38, 0xe9, 0x45, 0x42, 0x30, 0x32, 0x08, 0x05, 0x9e, 0x35, 0xd6, + 0x32, 0x18, 0x29, 0x23, 0x2e, 0xf1, 0x6f, 0x61, 0xe0, 0x02, 0x89, 0x90, 0xc7, 0x86, 0x37, 0x62, + 0x83, 0x7c, 0x1a, 0x84, 0xa3, 0xda, 0xb7, 0x0c, 0x05, 0x01, 0x66, 0xc9, 0xf9, 0x5e, 0x02, 0x0c, + 0x48, 0x0d, 0xf9, 0x3e, 0x15, 0xea, 0x03, 0xd5, 0xe9, 0xc1, 0xef, 0xdb, 0xa0, 0xd0, 0xc6, 0xd8, + 0x81, 0x6f, 0x82, 0xed, 0x24, 0x7b, 0xb6, 0x8f, 0x26, 0x58, 0xd7, 0xf6, 0xb5, 0xc3, 0xa2, 0xb5, + 0x95, 0x6c, 0x76, 0xd1, 0x04, 0xc3, 0xb7, 0x40, 0x8e, 0x38, 0x7a, 0x6e, 0x5f, 0x3b, 0x2c, 0x35, + 0x1e, 0xc6, 0xa9, 0x37, 0x92, 0x17, 0x1b, 0x1d, 0x5f, 0xbc, 0xf7, 0xee, 0x05, 0xf2, 0x42, 0x6c, + 0xe5, 0x88, 0x03, 0x9f, 0x80, 0x82, 0x04, 0xe5, 0xa5, 0x7c, 0x6f, 0x4e, 0x7e, 0x26, 0x18, 0xf1, + 0x5d, 0xa5, 0x97, 0x4a, 0xd8, 0x07, 0x20, 0x4d, 0x22, 0xd7, 0x0b, 0xfb, 0xf9, 0xc3, 0x52, 0xe3, + 0x89, 0x71, 0x63, 0xb1, 0x8d, 0xe8, 0x03, 0xcc, 0xc4, 0x68, 0xcd, 0x30, 0xa0, 0x07, 0x1e, 0xfc, + 0x53, 0x16, 0x1a, 0x60, 0xa6, 0x3e, 0x5e, 0x2f, 0x4a, 0xf6, 0x07, 0xb7, 0x65, 0xf7, 0x12, 0x82, + 0x75, 0x1f, 0xcd, 0xed, 0x71, 0xd8, 0x07, 0xeb, 0xaa, 0x96, 0xfa, 0xda, 0xbe, 0x76, 0x58, 0x6e, + 0x3c, 0x5d, 0xca, 0x97, 0xc5, 0x94, 0xec, 0x9e, 0x34, 0x1c, 0xfb, 0xe1, 0x64, 0xe6, 0xd1, 0x8a, + 0x39, 0x11, 0x51, 0x15, 0x5b, 0xdf, 0xcc, 0x4c, 0x3c, 0x93, 0x86, 0x94, 0xa8, 0x1e, 0xad, 0x98, + 0x03, 0xbf, 0x07, 0xbb, 0x81, 0x87, 0x86, 0x98, 0x5f, 0xfb, 0xe5, 0x3b, 0x48, 0x20, 0x7d, 0x5d, + 0x96, 0xea, 0xe3, 0x8c, 0x69, 0x31, 0xfa, 0x12, 0xf4, 0x3c, 0xe6, 0x44, 0x5b, 0x47, 0x48, 0xa0, + 0xd3, 0x3b, 0xd6, 0x4e, 0xb0, 0xf0, 0x04, 0xfe, 0xa0, 0x81, 0xbd, 0x65, 0xad, 0x27, 0x03, 0xd8, + 0x90, 0x01, 0x98, 0x59, 0x03, 0x30, 0x13, 0xd6, 0x82, 0x18, 0x76, 0xd1, 0xb2, 0xc3, 0xea, 0x6f, + 0x6b, 0x60, 0x67, 0x71, 0xec, 0xd0, 0x05, 0x80, 0xa2, 0x50, 0x8c, 0x6d, 0xe2, 0x8f, 0xa8, 0xec, + 0x81, 0x52, 0xe3, 0xf4, 0xff, 0xe5, 0xc3, 0xe8, 0x99, 0xa1, 0x18, 0x77, 0xfc, 0x11, 0xb5, 0x8a, + 0x92, 0x1d, 0x2d, 0xa1, 0x09, 0xb6, 0xf1, 0x04, 0x11, 0xcf, 0x46, 0x8e, 0xc3, 0x30, 0xe7, 0x71, + 0x57, 0xad, 0x6e, 0x93, 0x2d, 0x69, 0x31, 0x95, 0x03, 0x3e, 0x07, 0xf7, 0x07, 0x21, 0x27, 0x3e, + 0xe6, 0xdc, 0x46, 0xc3, 0x21, 0x0d, 0x7d, 0x61, 0x13, 0x47, 0x07, 0x19, 0x40, 0xf7, 0x12, 0xa3, + 0xa9, 0x7c, 0x1d, 0x07, 0x76, 0xc1, 0x83, 0x94, 0x16, 0x75, 0xa3, 0x3d, 0x22, 0x9e, 0xc0, 0x4c, + 0x2f, 0x64, 0xc0, 0xc1, 0xc4, 0x19, 0x4d, 0x89, 0xb6, 0xf4, 0xc1, 0x13, 0x50, 0x19, 0x22, 0x81, + 0x5d, 0xca, 0xa6, 0x31, 0x8a, 0xeb, 0x6b, 0xb2, 0xed, 0x56, 0xb3, 0xee, 0x26, 0x2e, 0xc5, 0xe1, + 0x51, 0xa6, 0x3c, 0x34, 0xc0, 0x5e, 0x4a, 0x59, 0xcf, 0x40, 0xd9, 0x92, 0x96, 0x18, 0x51, 0xfd, + 0x53, 0x03, 0xc5, 0xb4, 0x0a, 0xf0, 0x23, 0x50, 0x1a, 0x0b, 0x11, 0xd8, 0x13, 0x2c, 0xc6, 0xd4, + 0x89, 0x8b, 0xbc, 0x1a, 0x07, 0x22, 0xc3, 0x0b, 0xa9, 0x87, 0x6d, 0x50, 0x91, 0x76, 0x86, 0xbf, + 0x09, 0x31, 0x17, 0x76, 0xc8, 0xbc, 0x4c, 0xc5, 0x2b, 0x47, 0x2e, 0x4b, 0x99, 0xce, 0x99, 0x07, + 0xbf, 0x04, 0xbb, 0x92, 0x13, 0xfd, 0x24, 0x28, 0x23, 0xdf, 0xa9, 0x5e, 0x18, 0x63, 0xe4, 0x60, + 0x96, 0x69, 0x68, 0xbe, 0x16, 0xd9, 0xcd, 0x59, 0xf7, 0xa9, 0x34, 0x57, 0x5f, 0x69, 0x60, 0x77, + 0x69, 0x6b, 0xc0, 0xa7, 0xa0, 0x38, 0x1c, 0x23, 0xe2, 0xdb, 0xc4, 0xe1, 0xba, 0x26, 0x73, 0xb9, + 0x72, 0x96, 0x6f, 0x4a, 0x75, 0xc7, 0xe1, 0xf0, 0x27, 0x0d, 0xdc, 0x9b, 0xbb, 0x21, 0xe5, 0xb7, + 0x97, 0x1b, 0xfe, 0x0d, 0x93, 0x69, 0x61, 0x3c, 0xd6, 0x0c, 0xec, 0xf3, 0x69, 0x80, 0xe5, 0xd0, + 0xca, 0xa4, 0xb4, 0x2a, 0xec, 0xda, 0x4e, 0xeb, 0x75, 0xf0, 0x90, 0x4f, 0xb9, 0xc0, 0x13, 0x35, + 0x4f, 0x5c, 0xec, 0xc7, 0x83, 0x59, 0x8e, 0x96, 0x83, 0x1f, 0x73, 0x60, 0xfb, 0x5f, 0xc3, 0x3c, + 0xbe, 0xcd, 0xb4, 0xdb, 0xdd, 0x66, 0xb9, 0xcc, 0xb7, 0xd9, 0x4b, 0x50, 0x90, 0xf9, 0xc9, 0xcb, + 0xfc, 0xb4, 0x33, 0x4c, 0xee, 0x34, 0xb4, 0x34, 0x17, 0x73, 0xbb, 0x96, 0x64, 0xc2, 0x4f, 0x40, + 0x99, 0x70, 0x3b, 0x40, 0x4c, 0xd8, 0x74, 0x64, 0x7f, 0x8d, 0xa7, 0x71, 0x9b, 0x56, 0xe7, 0xe2, + 0x6a, 0x51, 0xea, 0xa9, 0xa8, 0x4a, 0x84, 0xf7, 0x11, 0x13, 0xbd, 0xd1, 0x33, 0x3c, 0x3d, 0x78, + 0xa5, 0x81, 0x9d, 0xc5, 0x57, 0x1b, 0xfc, 0x0a, 0x6c, 0xaa, 0x9b, 0x92, 0x32, 0x99, 0x9c, 0x72, + 0xa3, 0xf5, 0x9f, 0xef, 0x49, 0xa3, 0x17, 0x93, 0xac, 0x94, 0x09, 0xdb, 0x60, 0xed, 0x2a, 0x0a, + 0x28, 0x4e, 0xe5, 0xed, 0x2f, 0x78, 0x65, 0x3f, 0xa8, 0x83, 0xcd, 0x84, 0x0e, 0xef, 0x82, 0xd2, + 0x79, 0xf7, 0xac, 0x7f, 0xfc, 0x69, 0xa7, 0xdd, 0x39, 0x3e, 0xaa, 0xdc, 0x81, 0x25, 0xb0, 0x71, + 0xde, 0x7d, 0xd6, 0xed, 0x7d, 0xd1, 0xad, 0x68, 0x70, 0x03, 0xe4, 0xcd, 0xa3, 0xa3, 0x4a, 0xae, + 0xf5, 0x97, 0x06, 0x1e, 0x0f, 0xe9, 0xe4, 0xe6, 0x37, 0xb6, 0x8a, 0xd1, 0x2b, 0xfb, 0x51, 0x12, + 0xfb, 0xda, 0xcb, 0xcf, 0x62, 0xbd, 0x4b, 0x3d, 0xe4, 0xbb, 0x06, 0x65, 0x6e, 0xcd, 0xc5, 0xbe, + 0x4c, 0x71, 0xf2, 0x1f, 0x2d, 0x20, 0x7c, 0xc5, 0x9f, 0xd7, 0x0f, 0xd3, 0xd5, 0xcf, 0xb9, 0xfc, + 0x89, 0x69, 0xfe, 0x92, 0x7b, 0x74, 0xa2, 0x90, 0xa6, 0xc3, 0x0d, 0xb5, 0x8c, 0x56, 0x17, 0x75, + 0xc3, 0x4a, 0x94, 0xbf, 0x26, 0x9a, 0x4b, 0xd3, 0xe1, 0x97, 0xa9, 0xe6, 0xf2, 0xa2, 0x7e, 0x99, + 0x6a, 0xfe, 0xc8, 0x3d, 0x56, 0x07, 0xcd, 0xa6, 0xe9, 0xf0, 0x66, 0x33, 0x55, 0x35, 0x9b, 0x17, + 0xf5, 0x66, 0x33, 0xd5, 0x0d, 0xd6, 0x65, 0xb0, 0xef, 0xfc, 0x1d, 0x00, 0x00, 0xff, 0xff, 0x24, + 0xbb, 0xd9, 0x51, 0x68, 0x0b, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/resources/feed_item.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/resources/feed_item.pb.go new file mode 100644 index 0000000..bbf945b --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/resources/feed_item.pb.go @@ -0,0 +1,579 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/resources/feed_item.proto + +package resources // import "google.golang.org/genproto/googleapis/ads/googleads/v1/resources" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import wrappers "github.com/golang/protobuf/ptypes/wrappers" +import common "google.golang.org/genproto/googleapis/ads/googleads/v1/common" +import enums "google.golang.org/genproto/googleapis/ads/googleads/v1/enums" +import errors "google.golang.org/genproto/googleapis/ads/googleads/v1/errors" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// A feed item. +type FeedItem struct { + // The resource name of the feed item. + // Feed item resource names have the form: + // + // `customers/{customer_id}/feedItems/{feed_id}~{feed_item_id}` + ResourceName string `protobuf:"bytes,1,opt,name=resource_name,json=resourceName,proto3" json:"resource_name,omitempty"` + // The feed to which this feed item belongs. + Feed *wrappers.StringValue `protobuf:"bytes,2,opt,name=feed,proto3" json:"feed,omitempty"` + // The ID of this feed item. + Id *wrappers.Int64Value `protobuf:"bytes,3,opt,name=id,proto3" json:"id,omitempty"` + // Start time in which this feed item is effective and can begin serving. + // The format is "YYYY-MM-DD HH:MM:SS". + // Examples: "2018-03-05 09:15:00" or "2018-02-01 14:34:30" + StartDateTime *wrappers.StringValue `protobuf:"bytes,4,opt,name=start_date_time,json=startDateTime,proto3" json:"start_date_time,omitempty"` + // End time in which this feed item is no longer effective and will stop + // serving. + // The format is "YYYY-MM-DD HH:MM:SS". + // Examples: "2018-03-05 09:15:00" or "2018-02-01 14:34:30" + EndDateTime *wrappers.StringValue `protobuf:"bytes,5,opt,name=end_date_time,json=endDateTime,proto3" json:"end_date_time,omitempty"` + // The feed item's attribute values. + AttributeValues []*FeedItemAttributeValue `protobuf:"bytes,6,rep,name=attribute_values,json=attributeValues,proto3" json:"attribute_values,omitempty"` + // Geo targeting restriction specifies the type of location that can be used + // for targeting. + GeoTargetingRestriction enums.GeoTargetingRestrictionEnum_GeoTargetingRestriction `protobuf:"varint,7,opt,name=geo_targeting_restriction,json=geoTargetingRestriction,proto3,enum=google.ads.googleads.v1.enums.GeoTargetingRestrictionEnum_GeoTargetingRestriction" json:"geo_targeting_restriction,omitempty"` + // The list of mappings used to substitute custom parameter tags in a + // `tracking_url_template`, `final_urls`, or `mobile_final_urls`. + UrlCustomParameters []*common.CustomParameter `protobuf:"bytes,8,rep,name=url_custom_parameters,json=urlCustomParameters,proto3" json:"url_custom_parameters,omitempty"` + // Status of the feed item. + // This field is read-only. + Status enums.FeedItemStatusEnum_FeedItemStatus `protobuf:"varint,9,opt,name=status,proto3,enum=google.ads.googleads.v1.enums.FeedItemStatusEnum_FeedItemStatus" json:"status,omitempty"` + // List of info about a feed item's validation and approval state for active + // feed mappings. There will be an entry in the list for each type of feed + // mapping associated with the feed, e.g. a feed with a sitelink and a call + // feed mapping would cause every feed item associated with that feed to have + // an entry in this list for both sitelink and call. + // This field is read-only. + PolicyInfos []*FeedItemPlaceholderPolicyInfo `protobuf:"bytes,10,rep,name=policy_infos,json=policyInfos,proto3" json:"policy_infos,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *FeedItem) Reset() { *m = FeedItem{} } +func (m *FeedItem) String() string { return proto.CompactTextString(m) } +func (*FeedItem) ProtoMessage() {} +func (*FeedItem) Descriptor() ([]byte, []int) { + return fileDescriptor_feed_item_210b5164f935f220, []int{0} +} +func (m *FeedItem) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_FeedItem.Unmarshal(m, b) +} +func (m *FeedItem) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_FeedItem.Marshal(b, m, deterministic) +} +func (dst *FeedItem) XXX_Merge(src proto.Message) { + xxx_messageInfo_FeedItem.Merge(dst, src) +} +func (m *FeedItem) XXX_Size() int { + return xxx_messageInfo_FeedItem.Size(m) +} +func (m *FeedItem) XXX_DiscardUnknown() { + xxx_messageInfo_FeedItem.DiscardUnknown(m) +} + +var xxx_messageInfo_FeedItem proto.InternalMessageInfo + +func (m *FeedItem) GetResourceName() string { + if m != nil { + return m.ResourceName + } + return "" +} + +func (m *FeedItem) GetFeed() *wrappers.StringValue { + if m != nil { + return m.Feed + } + return nil +} + +func (m *FeedItem) GetId() *wrappers.Int64Value { + if m != nil { + return m.Id + } + return nil +} + +func (m *FeedItem) GetStartDateTime() *wrappers.StringValue { + if m != nil { + return m.StartDateTime + } + return nil +} + +func (m *FeedItem) GetEndDateTime() *wrappers.StringValue { + if m != nil { + return m.EndDateTime + } + return nil +} + +func (m *FeedItem) GetAttributeValues() []*FeedItemAttributeValue { + if m != nil { + return m.AttributeValues + } + return nil +} + +func (m *FeedItem) GetGeoTargetingRestriction() enums.GeoTargetingRestrictionEnum_GeoTargetingRestriction { + if m != nil { + return m.GeoTargetingRestriction + } + return enums.GeoTargetingRestrictionEnum_UNSPECIFIED +} + +func (m *FeedItem) GetUrlCustomParameters() []*common.CustomParameter { + if m != nil { + return m.UrlCustomParameters + } + return nil +} + +func (m *FeedItem) GetStatus() enums.FeedItemStatusEnum_FeedItemStatus { + if m != nil { + return m.Status + } + return enums.FeedItemStatusEnum_UNSPECIFIED +} + +func (m *FeedItem) GetPolicyInfos() []*FeedItemPlaceholderPolicyInfo { + if m != nil { + return m.PolicyInfos + } + return nil +} + +// A feed item attribute value. +type FeedItemAttributeValue struct { + // Id of the feed attribute for which the value is associated with. + FeedAttributeId *wrappers.Int64Value `protobuf:"bytes,1,opt,name=feed_attribute_id,json=feedAttributeId,proto3" json:"feed_attribute_id,omitempty"` + // Int64 value. Should be set if feed_attribute_id refers to a feed attribute + // of type INT64. + IntegerValue *wrappers.Int64Value `protobuf:"bytes,2,opt,name=integer_value,json=integerValue,proto3" json:"integer_value,omitempty"` + // Bool value. Should be set if feed_attribute_id refers to a feed attribute + // of type BOOLEAN. + BooleanValue *wrappers.BoolValue `protobuf:"bytes,3,opt,name=boolean_value,json=booleanValue,proto3" json:"boolean_value,omitempty"` + // String value. Should be set if feed_attribute_id refers to a feed attribute + // of type STRING, URL or DATE_TIME. + // For STRING the maximum length is 1500 characters. For URL the maximum + // length is 2076 characters. For DATE_TIME the format of the string must + // be the same as start and end time for the feed item. + StringValue *wrappers.StringValue `protobuf:"bytes,4,opt,name=string_value,json=stringValue,proto3" json:"string_value,omitempty"` + // Double value. Should be set if feed_attribute_id refers to a feed attribute + // of type DOUBLE. + DoubleValue *wrappers.DoubleValue `protobuf:"bytes,5,opt,name=double_value,json=doubleValue,proto3" json:"double_value,omitempty"` + // Price value. Should be set if feed_attribute_id refers to a feed attribute + // of type PRICE. + PriceValue *common.Money `protobuf:"bytes,6,opt,name=price_value,json=priceValue,proto3" json:"price_value,omitempty"` + // Repeated int64 value. Should be set if feed_attribute_id refers to a feed + // attribute of type INT64_LIST. + IntegerValues []*wrappers.Int64Value `protobuf:"bytes,7,rep,name=integer_values,json=integerValues,proto3" json:"integer_values,omitempty"` + // Repeated bool value. Should be set if feed_attribute_id refers to a feed + // attribute of type BOOLEAN_LIST. + BooleanValues []*wrappers.BoolValue `protobuf:"bytes,8,rep,name=boolean_values,json=booleanValues,proto3" json:"boolean_values,omitempty"` + // Repeated string value. Should be set if feed_attribute_id refers to a feed + // attribute of type STRING_LIST, URL_LIST or DATE_TIME_LIST. + // For STRING_LIST and URL_LIST the total size of the list in bytes may not + // exceed 3000. For DATE_TIME_LIST the number of elements may not exceed 200. + // + // For STRING_LIST the maximum length of each string element is 1500 + // characters. For URL_LIST the maximum length is 2076 characters. For + // DATE_TIME the format of the string must be the same as start and end time + // for the feed item. + StringValues []*wrappers.StringValue `protobuf:"bytes,9,rep,name=string_values,json=stringValues,proto3" json:"string_values,omitempty"` + // Repeated double value. Should be set if feed_attribute_id refers to a feed + // attribute of type DOUBLE_LIST. + DoubleValues []*wrappers.DoubleValue `protobuf:"bytes,10,rep,name=double_values,json=doubleValues,proto3" json:"double_values,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *FeedItemAttributeValue) Reset() { *m = FeedItemAttributeValue{} } +func (m *FeedItemAttributeValue) String() string { return proto.CompactTextString(m) } +func (*FeedItemAttributeValue) ProtoMessage() {} +func (*FeedItemAttributeValue) Descriptor() ([]byte, []int) { + return fileDescriptor_feed_item_210b5164f935f220, []int{1} +} +func (m *FeedItemAttributeValue) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_FeedItemAttributeValue.Unmarshal(m, b) +} +func (m *FeedItemAttributeValue) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_FeedItemAttributeValue.Marshal(b, m, deterministic) +} +func (dst *FeedItemAttributeValue) XXX_Merge(src proto.Message) { + xxx_messageInfo_FeedItemAttributeValue.Merge(dst, src) +} +func (m *FeedItemAttributeValue) XXX_Size() int { + return xxx_messageInfo_FeedItemAttributeValue.Size(m) +} +func (m *FeedItemAttributeValue) XXX_DiscardUnknown() { + xxx_messageInfo_FeedItemAttributeValue.DiscardUnknown(m) +} + +var xxx_messageInfo_FeedItemAttributeValue proto.InternalMessageInfo + +func (m *FeedItemAttributeValue) GetFeedAttributeId() *wrappers.Int64Value { + if m != nil { + return m.FeedAttributeId + } + return nil +} + +func (m *FeedItemAttributeValue) GetIntegerValue() *wrappers.Int64Value { + if m != nil { + return m.IntegerValue + } + return nil +} + +func (m *FeedItemAttributeValue) GetBooleanValue() *wrappers.BoolValue { + if m != nil { + return m.BooleanValue + } + return nil +} + +func (m *FeedItemAttributeValue) GetStringValue() *wrappers.StringValue { + if m != nil { + return m.StringValue + } + return nil +} + +func (m *FeedItemAttributeValue) GetDoubleValue() *wrappers.DoubleValue { + if m != nil { + return m.DoubleValue + } + return nil +} + +func (m *FeedItemAttributeValue) GetPriceValue() *common.Money { + if m != nil { + return m.PriceValue + } + return nil +} + +func (m *FeedItemAttributeValue) GetIntegerValues() []*wrappers.Int64Value { + if m != nil { + return m.IntegerValues + } + return nil +} + +func (m *FeedItemAttributeValue) GetBooleanValues() []*wrappers.BoolValue { + if m != nil { + return m.BooleanValues + } + return nil +} + +func (m *FeedItemAttributeValue) GetStringValues() []*wrappers.StringValue { + if m != nil { + return m.StringValues + } + return nil +} + +func (m *FeedItemAttributeValue) GetDoubleValues() []*wrappers.DoubleValue { + if m != nil { + return m.DoubleValues + } + return nil +} + +// Policy, validation, and quality approval info for a feed item for the +// specified placeholder type. +type FeedItemPlaceholderPolicyInfo struct { + // The placeholder type. + PlaceholderType *wrappers.Int32Value `protobuf:"bytes,1,opt,name=placeholder_type,json=placeholderType,proto3" json:"placeholder_type,omitempty"` + // The FeedMapping that contains the placeholder type. + FeedMappingResourceName *wrappers.StringValue `protobuf:"bytes,2,opt,name=feed_mapping_resource_name,json=feedMappingResourceName,proto3" json:"feed_mapping_resource_name,omitempty"` + // Where the placeholder type is in the review process. + ReviewStatus enums.PolicyReviewStatusEnum_PolicyReviewStatus `protobuf:"varint,3,opt,name=review_status,json=reviewStatus,proto3,enum=google.ads.googleads.v1.enums.PolicyReviewStatusEnum_PolicyReviewStatus" json:"review_status,omitempty"` + // The overall approval status of the placeholder type, calculated based on + // the status of its individual policy topic entries. + ApprovalStatus enums.PolicyApprovalStatusEnum_PolicyApprovalStatus `protobuf:"varint,4,opt,name=approval_status,json=approvalStatus,proto3,enum=google.ads.googleads.v1.enums.PolicyApprovalStatusEnum_PolicyApprovalStatus" json:"approval_status,omitempty"` + // The list of policy findings for the placeholder type. + PolicyTopicEntries []*common.PolicyTopicEntry `protobuf:"bytes,5,rep,name=policy_topic_entries,json=policyTopicEntries,proto3" json:"policy_topic_entries,omitempty"` + // The validation status of the palceholder type. + ValidationStatus enums.FeedItemValidationStatusEnum_FeedItemValidationStatus `protobuf:"varint,6,opt,name=validation_status,json=validationStatus,proto3,enum=google.ads.googleads.v1.enums.FeedItemValidationStatusEnum_FeedItemValidationStatus" json:"validation_status,omitempty"` + // List of placeholder type validation errors. + ValidationErrors []*FeedItemValidationError `protobuf:"bytes,7,rep,name=validation_errors,json=validationErrors,proto3" json:"validation_errors,omitempty"` + // Placeholder type quality evaluation approval status. + QualityApprovalStatus enums.FeedItemQualityApprovalStatusEnum_FeedItemQualityApprovalStatus `protobuf:"varint,8,opt,name=quality_approval_status,json=qualityApprovalStatus,proto3,enum=google.ads.googleads.v1.enums.FeedItemQualityApprovalStatusEnum_FeedItemQualityApprovalStatus" json:"quality_approval_status,omitempty"` + // List of placeholder type quality evaluation disapproval reasons. + QualityDisapprovalReasons []enums.FeedItemQualityDisapprovalReasonEnum_FeedItemQualityDisapprovalReason `protobuf:"varint,9,rep,packed,name=quality_disapproval_reasons,json=qualityDisapprovalReasons,proto3,enum=google.ads.googleads.v1.enums.FeedItemQualityDisapprovalReasonEnum_FeedItemQualityDisapprovalReason" json:"quality_disapproval_reasons,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *FeedItemPlaceholderPolicyInfo) Reset() { *m = FeedItemPlaceholderPolicyInfo{} } +func (m *FeedItemPlaceholderPolicyInfo) String() string { return proto.CompactTextString(m) } +func (*FeedItemPlaceholderPolicyInfo) ProtoMessage() {} +func (*FeedItemPlaceholderPolicyInfo) Descriptor() ([]byte, []int) { + return fileDescriptor_feed_item_210b5164f935f220, []int{2} +} +func (m *FeedItemPlaceholderPolicyInfo) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_FeedItemPlaceholderPolicyInfo.Unmarshal(m, b) +} +func (m *FeedItemPlaceholderPolicyInfo) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_FeedItemPlaceholderPolicyInfo.Marshal(b, m, deterministic) +} +func (dst *FeedItemPlaceholderPolicyInfo) XXX_Merge(src proto.Message) { + xxx_messageInfo_FeedItemPlaceholderPolicyInfo.Merge(dst, src) +} +func (m *FeedItemPlaceholderPolicyInfo) XXX_Size() int { + return xxx_messageInfo_FeedItemPlaceholderPolicyInfo.Size(m) +} +func (m *FeedItemPlaceholderPolicyInfo) XXX_DiscardUnknown() { + xxx_messageInfo_FeedItemPlaceholderPolicyInfo.DiscardUnknown(m) +} + +var xxx_messageInfo_FeedItemPlaceholderPolicyInfo proto.InternalMessageInfo + +func (m *FeedItemPlaceholderPolicyInfo) GetPlaceholderType() *wrappers.Int32Value { + if m != nil { + return m.PlaceholderType + } + return nil +} + +func (m *FeedItemPlaceholderPolicyInfo) GetFeedMappingResourceName() *wrappers.StringValue { + if m != nil { + return m.FeedMappingResourceName + } + return nil +} + +func (m *FeedItemPlaceholderPolicyInfo) GetReviewStatus() enums.PolicyReviewStatusEnum_PolicyReviewStatus { + if m != nil { + return m.ReviewStatus + } + return enums.PolicyReviewStatusEnum_UNSPECIFIED +} + +func (m *FeedItemPlaceholderPolicyInfo) GetApprovalStatus() enums.PolicyApprovalStatusEnum_PolicyApprovalStatus { + if m != nil { + return m.ApprovalStatus + } + return enums.PolicyApprovalStatusEnum_UNSPECIFIED +} + +func (m *FeedItemPlaceholderPolicyInfo) GetPolicyTopicEntries() []*common.PolicyTopicEntry { + if m != nil { + return m.PolicyTopicEntries + } + return nil +} + +func (m *FeedItemPlaceholderPolicyInfo) GetValidationStatus() enums.FeedItemValidationStatusEnum_FeedItemValidationStatus { + if m != nil { + return m.ValidationStatus + } + return enums.FeedItemValidationStatusEnum_UNSPECIFIED +} + +func (m *FeedItemPlaceholderPolicyInfo) GetValidationErrors() []*FeedItemValidationError { + if m != nil { + return m.ValidationErrors + } + return nil +} + +func (m *FeedItemPlaceholderPolicyInfo) GetQualityApprovalStatus() enums.FeedItemQualityApprovalStatusEnum_FeedItemQualityApprovalStatus { + if m != nil { + return m.QualityApprovalStatus + } + return enums.FeedItemQualityApprovalStatusEnum_UNSPECIFIED +} + +func (m *FeedItemPlaceholderPolicyInfo) GetQualityDisapprovalReasons() []enums.FeedItemQualityDisapprovalReasonEnum_FeedItemQualityDisapprovalReason { + if m != nil { + return m.QualityDisapprovalReasons + } + return nil +} + +// Stores a validation error and the set of offending feed attributes which +// together are responsible for causing a feed item validation error. +type FeedItemValidationError struct { + // Error code indicating what validation error was triggered. The description + // of the error can be found in the 'description' field. + ValidationError errors.FeedItemValidationErrorEnum_FeedItemValidationError `protobuf:"varint,1,opt,name=validation_error,json=validationError,proto3,enum=google.ads.googleads.v1.errors.FeedItemValidationErrorEnum_FeedItemValidationError" json:"validation_error,omitempty"` + // The description of the validation error. + Description *wrappers.StringValue `protobuf:"bytes,2,opt,name=description,proto3" json:"description,omitempty"` + // Set of feed attributes in the feed item flagged during validation. If + // empty, no specific feed attributes can be associated with the error + // (e.g. error across the entire feed item). + FeedAttributeIds []*wrappers.Int64Value `protobuf:"bytes,3,rep,name=feed_attribute_ids,json=feedAttributeIds,proto3" json:"feed_attribute_ids,omitempty"` + // Any extra information related to this error which is not captured by + // validation_error and feed_attribute_id (e.g. placeholder field IDs when + // feed_attribute_id is not mapped). Note that extra_info is not localized. + ExtraInfo *wrappers.StringValue `protobuf:"bytes,5,opt,name=extra_info,json=extraInfo,proto3" json:"extra_info,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *FeedItemValidationError) Reset() { *m = FeedItemValidationError{} } +func (m *FeedItemValidationError) String() string { return proto.CompactTextString(m) } +func (*FeedItemValidationError) ProtoMessage() {} +func (*FeedItemValidationError) Descriptor() ([]byte, []int) { + return fileDescriptor_feed_item_210b5164f935f220, []int{3} +} +func (m *FeedItemValidationError) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_FeedItemValidationError.Unmarshal(m, b) +} +func (m *FeedItemValidationError) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_FeedItemValidationError.Marshal(b, m, deterministic) +} +func (dst *FeedItemValidationError) XXX_Merge(src proto.Message) { + xxx_messageInfo_FeedItemValidationError.Merge(dst, src) +} +func (m *FeedItemValidationError) XXX_Size() int { + return xxx_messageInfo_FeedItemValidationError.Size(m) +} +func (m *FeedItemValidationError) XXX_DiscardUnknown() { + xxx_messageInfo_FeedItemValidationError.DiscardUnknown(m) +} + +var xxx_messageInfo_FeedItemValidationError proto.InternalMessageInfo + +func (m *FeedItemValidationError) GetValidationError() errors.FeedItemValidationErrorEnum_FeedItemValidationError { + if m != nil { + return m.ValidationError + } + return errors.FeedItemValidationErrorEnum_UNSPECIFIED +} + +func (m *FeedItemValidationError) GetDescription() *wrappers.StringValue { + if m != nil { + return m.Description + } + return nil +} + +func (m *FeedItemValidationError) GetFeedAttributeIds() []*wrappers.Int64Value { + if m != nil { + return m.FeedAttributeIds + } + return nil +} + +func (m *FeedItemValidationError) GetExtraInfo() *wrappers.StringValue { + if m != nil { + return m.ExtraInfo + } + return nil +} + +func init() { + proto.RegisterType((*FeedItem)(nil), "google.ads.googleads.v1.resources.FeedItem") + proto.RegisterType((*FeedItemAttributeValue)(nil), "google.ads.googleads.v1.resources.FeedItemAttributeValue") + proto.RegisterType((*FeedItemPlaceholderPolicyInfo)(nil), "google.ads.googleads.v1.resources.FeedItemPlaceholderPolicyInfo") + proto.RegisterType((*FeedItemValidationError)(nil), "google.ads.googleads.v1.resources.FeedItemValidationError") +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/resources/feed_item.proto", fileDescriptor_feed_item_210b5164f935f220) +} + +var fileDescriptor_feed_item_210b5164f935f220 = []byte{ + // 1213 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x94, 0x57, 0xdb, 0x6e, 0xdc, 0x44, + 0x18, 0xd6, 0x6e, 0xda, 0x34, 0x99, 0x3d, 0x24, 0x1d, 0x28, 0x71, 0xd3, 0x82, 0xd2, 0xa0, 0x48, + 0x91, 0x2a, 0x79, 0xb3, 0xdb, 0x82, 0x60, 0x2b, 0x68, 0x36, 0xe4, 0xd0, 0x20, 0x8a, 0x82, 0x13, + 0x45, 0x80, 0x22, 0xac, 0x59, 0x7b, 0x62, 0x46, 0xb2, 0x3d, 0xce, 0xcc, 0x78, 0xcb, 0xde, 0x20, + 0xf1, 0x02, 0x5c, 0xf2, 0x00, 0x88, 0x2b, 0xee, 0xb8, 0x44, 0xbc, 0x01, 0x6f, 0xc1, 0x2d, 0x8f, + 0xc0, 0x15, 0xf2, 0xcc, 0xd8, 0xeb, 0x3d, 0x78, 0xd7, 0xb9, 0x1b, 0xcf, 0xfc, 0xdf, 0xf7, 0x1f, + 0xe6, 0x3f, 0x8c, 0x41, 0xdb, 0xa3, 0xd4, 0xf3, 0x71, 0x0b, 0xb9, 0xbc, 0xa5, 0x96, 0xc9, 0x6a, + 0xd0, 0x6e, 0x31, 0xcc, 0x69, 0xcc, 0x1c, 0xcc, 0x5b, 0xd7, 0x18, 0xbb, 0x36, 0x11, 0x38, 0x30, + 0x23, 0x46, 0x05, 0x85, 0x4f, 0x94, 0x9c, 0x89, 0x5c, 0x6e, 0x66, 0x10, 0x73, 0xd0, 0x36, 0x33, + 0xc8, 0xe6, 0x07, 0x45, 0xac, 0x0e, 0x0d, 0x02, 0x1a, 0xb6, 0x9c, 0x98, 0x0b, 0x1a, 0xd8, 0x11, + 0x62, 0x28, 0xc0, 0x02, 0x33, 0xc5, 0xbc, 0xb9, 0xb7, 0x00, 0x26, 0x2d, 0x51, 0x6b, 0x8d, 0x78, + 0xba, 0x00, 0x11, 0x51, 0x9f, 0x38, 0x43, 0x2d, 0x7c, 0x54, 0x24, 0x8c, 0xc3, 0x38, 0xc8, 0xf9, + 0x69, 0xdf, 0xc4, 0xc8, 0x27, 0x62, 0x68, 0xa3, 0x28, 0x62, 0x74, 0x80, 0x7c, 0x9b, 0x0b, 0x24, + 0x62, 0xae, 0x69, 0x5e, 0xdd, 0x96, 0xc6, 0x25, 0x3c, 0x63, 0x62, 0x18, 0xf1, 0xcc, 0xfa, 0xe7, + 0x65, 0x99, 0xc6, 0xf4, 0xbf, 0x2c, 0x8b, 0x1a, 0x20, 0x9f, 0xb8, 0x48, 0x10, 0x1a, 0x8e, 0x13, + 0x7c, 0x32, 0x9f, 0xc0, 0xc3, 0xd4, 0x16, 0x88, 0x79, 0x58, 0x90, 0xd0, 0xb3, 0x19, 0xe6, 0x82, + 0x11, 0x27, 0x61, 0xd1, 0xf0, 0xee, 0x7c, 0xb8, 0x0a, 0x79, 0x41, 0xec, 0x3e, 0x2a, 0x85, 0x65, + 0x78, 0x40, 0xf0, 0x9b, 0xb2, 0x5e, 0x33, 0x46, 0x59, 0x81, 0xdb, 0xf2, 0x4c, 0x13, 0xbc, 0xa7, + 0x09, 0xe4, 0x57, 0x3f, 0xbe, 0x6e, 0xbd, 0x61, 0x28, 0x8a, 0x30, 0x4b, 0x15, 0x3c, 0x4e, 0x15, + 0x44, 0xa4, 0x85, 0xc2, 0x90, 0x0a, 0x49, 0xa1, 0x4f, 0xb7, 0xff, 0x58, 0x06, 0x2b, 0xc7, 0x18, + 0xbb, 0xa7, 0x02, 0x07, 0xf0, 0x7d, 0xd0, 0x48, 0x73, 0xdd, 0x0e, 0x51, 0x80, 0x8d, 0xca, 0x56, + 0x65, 0x77, 0xd5, 0xaa, 0xa7, 0x9b, 0x5f, 0xa2, 0x00, 0xc3, 0x3d, 0x70, 0x27, 0xb1, 0xc9, 0xa8, + 0x6e, 0x55, 0x76, 0x6b, 0x9d, 0xc7, 0xba, 0x54, 0xcc, 0x54, 0xbd, 0x79, 0x2e, 0x18, 0x09, 0xbd, + 0x4b, 0xe4, 0xc7, 0xd8, 0x92, 0x92, 0xf0, 0x29, 0xa8, 0x12, 0xd7, 0x58, 0x92, 0xf2, 0x8f, 0xa6, + 0xe4, 0x4f, 0x43, 0xf1, 0xe1, 0x73, 0x25, 0x5e, 0x25, 0x2e, 0x3c, 0x04, 0x6b, 0x5c, 0x20, 0x26, + 0x6c, 0x17, 0x09, 0x6c, 0x0b, 0x12, 0x60, 0xe3, 0x4e, 0x09, 0x4d, 0x0d, 0x09, 0x3a, 0x44, 0x02, + 0x5f, 0x90, 0x00, 0xc3, 0x7d, 0xd0, 0xc0, 0xa1, 0x9b, 0xe3, 0xb8, 0x5b, 0x82, 0xa3, 0x86, 0x43, + 0x37, 0x63, 0x70, 0xc1, 0x3a, 0x12, 0x82, 0x91, 0x7e, 0x2c, 0x70, 0x12, 0xfa, 0x18, 0x73, 0x63, + 0x79, 0x6b, 0x69, 0xb7, 0xd6, 0xf9, 0xd8, 0x5c, 0xd8, 0x28, 0xcc, 0x34, 0xa4, 0xbd, 0x94, 0x42, + 0x69, 0x58, 0x43, 0x63, 0xdf, 0x1c, 0xfe, 0x5c, 0x01, 0x0f, 0x0b, 0xf3, 0xd2, 0xb8, 0xb7, 0x55, + 0xd9, 0x6d, 0x76, 0xac, 0x42, 0x7d, 0x32, 0xb9, 0xcc, 0x13, 0x4c, 0x2f, 0x52, 0xb8, 0x35, 0x42, + 0x1f, 0x85, 0x71, 0x50, 0x74, 0x66, 0x6d, 0x78, 0xb3, 0x0f, 0xa0, 0x03, 0x1e, 0xc4, 0xcc, 0xb7, + 0x27, 0x1b, 0x19, 0x37, 0x56, 0xa4, 0xef, 0xad, 0x42, 0x5b, 0x74, 0xfb, 0xfa, 0x4c, 0x02, 0xcf, + 0x52, 0x9c, 0xf5, 0x56, 0xcc, 0xfc, 0x89, 0x3d, 0x0e, 0xbf, 0x06, 0xcb, 0xaa, 0x06, 0x8c, 0x55, + 0xe9, 0xe1, 0xfe, 0x02, 0x0f, 0xd3, 0x68, 0x9e, 0x4b, 0x90, 0x74, 0x6c, 0x7c, 0xcb, 0xd2, 0x7c, + 0xd0, 0x01, 0x75, 0x5d, 0x6b, 0x24, 0xbc, 0xa6, 0xdc, 0x00, 0xd2, 0xea, 0xfd, 0x5b, 0xdc, 0xd8, + 0x99, 0x8f, 0x1c, 0xfc, 0x3d, 0xf5, 0x5d, 0xcc, 0xce, 0x24, 0xd3, 0x69, 0x78, 0x4d, 0xad, 0x5a, + 0x94, 0xad, 0xf9, 0xf6, 0x5f, 0x77, 0xc1, 0x3b, 0xb3, 0x2f, 0x18, 0x9e, 0x80, 0xfb, 0xb2, 0x60, + 0x47, 0xa9, 0x43, 0x5c, 0x59, 0x45, 0x0b, 0x32, 0x7f, 0x2d, 0x41, 0x65, 0x5c, 0xa7, 0x6e, 0x92, + 0xc0, 0x24, 0x14, 0xd8, 0xc3, 0x4c, 0x25, 0x9f, 0x2e, 0xb7, 0xb9, 0x24, 0x75, 0x8d, 0x50, 0xa6, + 0xbc, 0x04, 0x8d, 0x3e, 0xa5, 0x3e, 0x46, 0xa1, 0x66, 0x50, 0x05, 0xb8, 0x39, 0xc5, 0x70, 0x40, + 0xa9, 0xaf, 0x09, 0x34, 0x20, 0x25, 0xa8, 0x73, 0x59, 0x1d, 0x1a, 0x5f, 0xa6, 0x0c, 0x6b, 0x7c, + 0xf4, 0x91, 0x10, 0xb8, 0x34, 0xee, 0xfb, 0xba, 0x7e, 0x0a, 0x6b, 0xf0, 0x50, 0x0a, 0x69, 0x02, + 0x77, 0xf4, 0x01, 0x8f, 0x41, 0x2d, 0x62, 0xc4, 0x49, 0xf1, 0xcb, 0x12, 0xbf, 0xb3, 0x28, 0x05, + 0x5f, 0xd3, 0x10, 0x0f, 0x2d, 0x20, 0x91, 0x8a, 0xe7, 0x00, 0x34, 0xc7, 0x82, 0xc9, 0x8d, 0x7b, + 0x32, 0x2f, 0xe6, 0x46, 0xb3, 0x91, 0x8f, 0x26, 0x87, 0x3d, 0xd0, 0x1c, 0x0b, 0x67, 0x5a, 0x11, + 0xf3, 0xe2, 0xd9, 0xc8, 0xc7, 0x33, 0xa1, 0x68, 0xe4, 0x03, 0x9a, 0x64, 0xff, 0xd2, 0xc2, 0x88, + 0xd6, 0x73, 0x11, 0x95, 0x14, 0xf9, 0x90, 0xa6, 0x09, 0x3e, 0x3f, 0xa6, 0xf5, 0x5c, 0x4c, 0xf9, + 0xf6, 0x9f, 0x2b, 0xe0, 0xdd, 0xb9, 0xc9, 0x0e, 0x8f, 0xc1, 0x7a, 0x34, 0x3a, 0xb0, 0xc5, 0x30, + 0xc2, 0xf3, 0x72, 0xf8, 0x59, 0x47, 0xe7, 0x70, 0x0e, 0x74, 0x31, 0x8c, 0x30, 0xfc, 0x06, 0x6c, + 0xca, 0x62, 0x08, 0x50, 0x14, 0xe9, 0xd6, 0x96, 0x9b, 0x2d, 0x65, 0xe6, 0xc7, 0x46, 0x82, 0x7f, + 0xad, 0xe0, 0x56, 0x7e, 0x08, 0x05, 0xc9, 0xa4, 0xca, 0x0d, 0x53, 0x99, 0xdc, 0xcd, 0xce, 0xab, + 0x05, 0x8d, 0x44, 0x39, 0x69, 0x49, 0x64, 0xae, 0x99, 0x4c, 0x6f, 0x27, 0x33, 0x6f, 0xf4, 0x05, + 0x63, 0xb0, 0x36, 0x31, 0xf7, 0x65, 0x35, 0x34, 0x3b, 0x5f, 0x94, 0x52, 0xd8, 0xd3, 0xd8, 0x29, + 0x95, 0xe3, 0x07, 0x56, 0x13, 0x8d, 0x7d, 0xc3, 0x3e, 0x78, 0x5b, 0x77, 0x33, 0x41, 0x23, 0xe2, + 0xd8, 0x38, 0x14, 0x8c, 0x60, 0x6e, 0xdc, 0x95, 0x97, 0xbe, 0xb7, 0xa8, 0x10, 0x94, 0x8e, 0x8b, + 0x04, 0x7a, 0x14, 0x0a, 0x36, 0xb4, 0x60, 0x34, 0xbe, 0x43, 0x30, 0x87, 0x3f, 0x55, 0xc0, 0xfd, + 0xa9, 0x07, 0x95, 0x2c, 0xb5, 0x66, 0xe7, 0xa2, 0x64, 0x5f, 0xbe, 0xcc, 0xf0, 0x33, 0x3a, 0xf4, + 0xe4, 0xa1, 0xb5, 0x3e, 0x98, 0xd8, 0x81, 0xde, 0x98, 0x09, 0xea, 0xe1, 0xa3, 0x4b, 0xb4, 0x7b, + 0x8b, 0xd6, 0x3d, 0xd2, 0x74, 0x94, 0x50, 0xe4, 0x15, 0xc9, 0x0d, 0x0e, 0x7f, 0xa9, 0x80, 0x8d, + 0x82, 0x47, 0xb0, 0xb1, 0x22, 0x5d, 0xfe, 0xae, 0xa4, 0xcb, 0x5f, 0x29, 0x96, 0x19, 0x37, 0x3b, + 0x57, 0xc2, 0x7a, 0x70, 0x33, 0x6b, 0x1b, 0xfe, 0x56, 0x01, 0x8f, 0x8a, 0x9f, 0xd5, 0xaa, 0x53, + 0x34, 0x3b, 0xee, 0xed, 0x8c, 0x3b, 0x1c, 0x11, 0x59, 0x92, 0x67, 0x96, 0x7d, 0x53, 0x42, 0xd6, + 0xc3, 0x9b, 0x82, 0x13, 0xbe, 0xfd, 0x4f, 0x15, 0x6c, 0x14, 0x44, 0x1b, 0xfe, 0x08, 0xd6, 0x27, + 0x2f, 0x51, 0x76, 0x8d, 0x66, 0xe7, 0xbc, 0xd8, 0x6c, 0x79, 0x2d, 0x45, 0x17, 0x58, 0x90, 0x46, + 0xea, 0x72, 0xd7, 0x26, 0x2e, 0x17, 0x7e, 0x0a, 0x6a, 0x2e, 0xe6, 0x0e, 0x23, 0x91, 0x7c, 0x3b, + 0x95, 0x69, 0x2f, 0x79, 0x00, 0x3c, 0x05, 0x70, 0x6a, 0x74, 0x27, 0x7d, 0x65, 0xe1, 0xa0, 0x58, + 0x9f, 0x98, 0xdd, 0x1c, 0xbe, 0x00, 0x00, 0xff, 0x20, 0x18, 0x92, 0x8f, 0x90, 0x52, 0x4f, 0xcf, + 0x55, 0x29, 0x9f, 0x74, 0xdf, 0x83, 0xff, 0x2a, 0x60, 0xc7, 0xa1, 0xc1, 0xe2, 0xbc, 0x3f, 0x68, + 0x64, 0x6d, 0x3c, 0xa1, 0x3c, 0xab, 0x7c, 0xfb, 0xb9, 0xc6, 0x78, 0xd4, 0x47, 0xa1, 0x67, 0x52, + 0xe6, 0xb5, 0x3c, 0x1c, 0x4a, 0x85, 0xe9, 0xbf, 0x45, 0x44, 0xf8, 0x9c, 0x7f, 0xe2, 0x17, 0xd9, + 0xea, 0xd7, 0xea, 0xd2, 0x49, 0xaf, 0xf7, 0x7b, 0xf5, 0xc9, 0x89, 0xa2, 0xec, 0xb9, 0xdc, 0x54, + 0xcb, 0x64, 0x75, 0xd9, 0x36, 0xd3, 0x76, 0xcc, 0xff, 0x4e, 0x65, 0xae, 0x7a, 0x2e, 0xbf, 0xca, + 0x64, 0xae, 0x2e, 0xdb, 0x57, 0x99, 0xcc, 0xbf, 0xd5, 0x1d, 0x75, 0xd0, 0xed, 0xf6, 0x5c, 0xde, + 0xed, 0x66, 0x52, 0xdd, 0xee, 0x65, 0xbb, 0xdb, 0xcd, 0xe4, 0xfa, 0xcb, 0xd2, 0xd8, 0x67, 0xff, + 0x07, 0x00, 0x00, 0xff, 0xff, 0x79, 0xc5, 0x0d, 0x5c, 0xbf, 0x0f, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/resources/feed_item_target.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/resources/feed_item_target.pb.go new file mode 100644 index 0000000..517a056 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/resources/feed_item_target.pb.go @@ -0,0 +1,386 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/resources/feed_item_target.proto + +package resources // import "google.golang.org/genproto/googleapis/ads/googleads/v1/resources" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import wrappers "github.com/golang/protobuf/ptypes/wrappers" +import common "google.golang.org/genproto/googleapis/ads/googleads/v1/common" +import enums "google.golang.org/genproto/googleapis/ads/googleads/v1/enums" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// A feed item target. +type FeedItemTarget struct { + // The resource name of the feed item target. + // Feed item target resource names have the form: + // + // `customers/{customer_id}/feedItemTargets/{feed_id}~{feed_item_id}~{feed_item_target_type}~{feed_item_target_id}` + ResourceName string `protobuf:"bytes,1,opt,name=resource_name,json=resourceName,proto3" json:"resource_name,omitempty"` + // The feed item to which this feed item target belongs. + FeedItem *wrappers.StringValue `protobuf:"bytes,2,opt,name=feed_item,json=feedItem,proto3" json:"feed_item,omitempty"` + // The target type of this feed item target. This field is read-only. + FeedItemTargetType enums.FeedItemTargetTypeEnum_FeedItemTargetType `protobuf:"varint,3,opt,name=feed_item_target_type,json=feedItemTargetType,proto3,enum=google.ads.googleads.v1.enums.FeedItemTargetTypeEnum_FeedItemTargetType" json:"feed_item_target_type,omitempty"` + // The ID of the targeted resource. This field is read-only. + FeedItemTargetId *wrappers.Int64Value `protobuf:"bytes,6,opt,name=feed_item_target_id,json=feedItemTargetId,proto3" json:"feed_item_target_id,omitempty"` + // The targeted resource. + // + // Types that are valid to be assigned to Target: + // *FeedItemTarget_Campaign + // *FeedItemTarget_AdGroup + // *FeedItemTarget_Keyword + // *FeedItemTarget_GeoTargetConstant + // *FeedItemTarget_Device + // *FeedItemTarget_AdSchedule + Target isFeedItemTarget_Target `protobuf_oneof:"target"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *FeedItemTarget) Reset() { *m = FeedItemTarget{} } +func (m *FeedItemTarget) String() string { return proto.CompactTextString(m) } +func (*FeedItemTarget) ProtoMessage() {} +func (*FeedItemTarget) Descriptor() ([]byte, []int) { + return fileDescriptor_feed_item_target_9b79f0bfee17a3b5, []int{0} +} +func (m *FeedItemTarget) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_FeedItemTarget.Unmarshal(m, b) +} +func (m *FeedItemTarget) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_FeedItemTarget.Marshal(b, m, deterministic) +} +func (dst *FeedItemTarget) XXX_Merge(src proto.Message) { + xxx_messageInfo_FeedItemTarget.Merge(dst, src) +} +func (m *FeedItemTarget) XXX_Size() int { + return xxx_messageInfo_FeedItemTarget.Size(m) +} +func (m *FeedItemTarget) XXX_DiscardUnknown() { + xxx_messageInfo_FeedItemTarget.DiscardUnknown(m) +} + +var xxx_messageInfo_FeedItemTarget proto.InternalMessageInfo + +func (m *FeedItemTarget) GetResourceName() string { + if m != nil { + return m.ResourceName + } + return "" +} + +func (m *FeedItemTarget) GetFeedItem() *wrappers.StringValue { + if m != nil { + return m.FeedItem + } + return nil +} + +func (m *FeedItemTarget) GetFeedItemTargetType() enums.FeedItemTargetTypeEnum_FeedItemTargetType { + if m != nil { + return m.FeedItemTargetType + } + return enums.FeedItemTargetTypeEnum_UNSPECIFIED +} + +func (m *FeedItemTarget) GetFeedItemTargetId() *wrappers.Int64Value { + if m != nil { + return m.FeedItemTargetId + } + return nil +} + +type isFeedItemTarget_Target interface { + isFeedItemTarget_Target() +} + +type FeedItemTarget_Campaign struct { + Campaign *wrappers.StringValue `protobuf:"bytes,4,opt,name=campaign,proto3,oneof"` +} + +type FeedItemTarget_AdGroup struct { + AdGroup *wrappers.StringValue `protobuf:"bytes,5,opt,name=ad_group,json=adGroup,proto3,oneof"` +} + +type FeedItemTarget_Keyword struct { + Keyword *common.KeywordInfo `protobuf:"bytes,7,opt,name=keyword,proto3,oneof"` +} + +type FeedItemTarget_GeoTargetConstant struct { + GeoTargetConstant *wrappers.StringValue `protobuf:"bytes,8,opt,name=geo_target_constant,json=geoTargetConstant,proto3,oneof"` +} + +type FeedItemTarget_Device struct { + Device enums.FeedItemTargetDeviceEnum_FeedItemTargetDevice `protobuf:"varint,9,opt,name=device,proto3,enum=google.ads.googleads.v1.enums.FeedItemTargetDeviceEnum_FeedItemTargetDevice,oneof"` +} + +type FeedItemTarget_AdSchedule struct { + AdSchedule *common.AdScheduleInfo `protobuf:"bytes,10,opt,name=ad_schedule,json=adSchedule,proto3,oneof"` +} + +func (*FeedItemTarget_Campaign) isFeedItemTarget_Target() {} + +func (*FeedItemTarget_AdGroup) isFeedItemTarget_Target() {} + +func (*FeedItemTarget_Keyword) isFeedItemTarget_Target() {} + +func (*FeedItemTarget_GeoTargetConstant) isFeedItemTarget_Target() {} + +func (*FeedItemTarget_Device) isFeedItemTarget_Target() {} + +func (*FeedItemTarget_AdSchedule) isFeedItemTarget_Target() {} + +func (m *FeedItemTarget) GetTarget() isFeedItemTarget_Target { + if m != nil { + return m.Target + } + return nil +} + +func (m *FeedItemTarget) GetCampaign() *wrappers.StringValue { + if x, ok := m.GetTarget().(*FeedItemTarget_Campaign); ok { + return x.Campaign + } + return nil +} + +func (m *FeedItemTarget) GetAdGroup() *wrappers.StringValue { + if x, ok := m.GetTarget().(*FeedItemTarget_AdGroup); ok { + return x.AdGroup + } + return nil +} + +func (m *FeedItemTarget) GetKeyword() *common.KeywordInfo { + if x, ok := m.GetTarget().(*FeedItemTarget_Keyword); ok { + return x.Keyword + } + return nil +} + +func (m *FeedItemTarget) GetGeoTargetConstant() *wrappers.StringValue { + if x, ok := m.GetTarget().(*FeedItemTarget_GeoTargetConstant); ok { + return x.GeoTargetConstant + } + return nil +} + +func (m *FeedItemTarget) GetDevice() enums.FeedItemTargetDeviceEnum_FeedItemTargetDevice { + if x, ok := m.GetTarget().(*FeedItemTarget_Device); ok { + return x.Device + } + return enums.FeedItemTargetDeviceEnum_UNSPECIFIED +} + +func (m *FeedItemTarget) GetAdSchedule() *common.AdScheduleInfo { + if x, ok := m.GetTarget().(*FeedItemTarget_AdSchedule); ok { + return x.AdSchedule + } + return nil +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*FeedItemTarget) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _FeedItemTarget_OneofMarshaler, _FeedItemTarget_OneofUnmarshaler, _FeedItemTarget_OneofSizer, []interface{}{ + (*FeedItemTarget_Campaign)(nil), + (*FeedItemTarget_AdGroup)(nil), + (*FeedItemTarget_Keyword)(nil), + (*FeedItemTarget_GeoTargetConstant)(nil), + (*FeedItemTarget_Device)(nil), + (*FeedItemTarget_AdSchedule)(nil), + } +} + +func _FeedItemTarget_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*FeedItemTarget) + // target + switch x := m.Target.(type) { + case *FeedItemTarget_Campaign: + b.EncodeVarint(4<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.Campaign); err != nil { + return err + } + case *FeedItemTarget_AdGroup: + b.EncodeVarint(5<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.AdGroup); err != nil { + return err + } + case *FeedItemTarget_Keyword: + b.EncodeVarint(7<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.Keyword); err != nil { + return err + } + case *FeedItemTarget_GeoTargetConstant: + b.EncodeVarint(8<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.GeoTargetConstant); err != nil { + return err + } + case *FeedItemTarget_Device: + b.EncodeVarint(9<<3 | proto.WireVarint) + b.EncodeVarint(uint64(x.Device)) + case *FeedItemTarget_AdSchedule: + b.EncodeVarint(10<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.AdSchedule); err != nil { + return err + } + case nil: + default: + return fmt.Errorf("FeedItemTarget.Target has unexpected type %T", x) + } + return nil +} + +func _FeedItemTarget_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*FeedItemTarget) + switch tag { + case 4: // target.campaign + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(wrappers.StringValue) + err := b.DecodeMessage(msg) + m.Target = &FeedItemTarget_Campaign{msg} + return true, err + case 5: // target.ad_group + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(wrappers.StringValue) + err := b.DecodeMessage(msg) + m.Target = &FeedItemTarget_AdGroup{msg} + return true, err + case 7: // target.keyword + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(common.KeywordInfo) + err := b.DecodeMessage(msg) + m.Target = &FeedItemTarget_Keyword{msg} + return true, err + case 8: // target.geo_target_constant + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(wrappers.StringValue) + err := b.DecodeMessage(msg) + m.Target = &FeedItemTarget_GeoTargetConstant{msg} + return true, err + case 9: // target.device + if wire != proto.WireVarint { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeVarint() + m.Target = &FeedItemTarget_Device{enums.FeedItemTargetDeviceEnum_FeedItemTargetDevice(x)} + return true, err + case 10: // target.ad_schedule + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(common.AdScheduleInfo) + err := b.DecodeMessage(msg) + m.Target = &FeedItemTarget_AdSchedule{msg} + return true, err + default: + return false, nil + } +} + +func _FeedItemTarget_OneofSizer(msg proto.Message) (n int) { + m := msg.(*FeedItemTarget) + // target + switch x := m.Target.(type) { + case *FeedItemTarget_Campaign: + s := proto.Size(x.Campaign) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *FeedItemTarget_AdGroup: + s := proto.Size(x.AdGroup) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *FeedItemTarget_Keyword: + s := proto.Size(x.Keyword) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *FeedItemTarget_GeoTargetConstant: + s := proto.Size(x.GeoTargetConstant) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *FeedItemTarget_Device: + n += 1 // tag and wire + n += proto.SizeVarint(uint64(x.Device)) + case *FeedItemTarget_AdSchedule: + s := proto.Size(x.AdSchedule) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +func init() { + proto.RegisterType((*FeedItemTarget)(nil), "google.ads.googleads.v1.resources.FeedItemTarget") +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/resources/feed_item_target.proto", fileDescriptor_feed_item_target_9b79f0bfee17a3b5) +} + +var fileDescriptor_feed_item_target_9b79f0bfee17a3b5 = []byte{ + // 581 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x94, 0x54, 0xdd, 0x6a, 0x13, 0x41, + 0x14, 0x6e, 0x52, 0x4d, 0xd3, 0xa9, 0x16, 0x9d, 0x22, 0x2c, 0xb5, 0x48, 0xab, 0x14, 0x0a, 0xe2, + 0x2c, 0xa9, 0x22, 0x76, 0x7b, 0xb5, 0xf5, 0x27, 0x4d, 0x95, 0x52, 0xb7, 0x25, 0x17, 0x12, 0x58, + 0xa6, 0x3b, 0x67, 0xc7, 0xc5, 0xec, 0xcc, 0x32, 0x3b, 0xdb, 0x12, 0xbc, 0xf1, 0x09, 0x7c, 0x08, + 0x2f, 0x7d, 0x14, 0x1f, 0xc5, 0xa7, 0x90, 0xec, 0xcc, 0x2e, 0x84, 0x24, 0xa6, 0xbd, 0x3b, 0x39, + 0xe7, 0xfb, 0xbe, 0xfd, 0xce, 0x77, 0x36, 0x8b, 0xde, 0x70, 0x29, 0xf9, 0x10, 0x5c, 0xca, 0x72, + 0xd7, 0x94, 0xe3, 0xea, 0xaa, 0xe3, 0x2a, 0xc8, 0x65, 0xa1, 0x22, 0xc8, 0xdd, 0x18, 0x80, 0x85, + 0x89, 0x86, 0x34, 0xd4, 0x54, 0x71, 0xd0, 0x24, 0x53, 0x52, 0x4b, 0xbc, 0x63, 0xe0, 0x84, 0xb2, + 0x9c, 0xd4, 0x4c, 0x72, 0xd5, 0x21, 0x35, 0x73, 0xf3, 0xc5, 0x3c, 0xf1, 0x48, 0xa6, 0xa9, 0x14, + 0x6e, 0xa4, 0x12, 0x0d, 0x2a, 0xa1, 0x46, 0x71, 0xf3, 0x70, 0x1e, 0x1c, 0x44, 0x91, 0x4e, 0xfb, + 0x08, 0x19, 0x5c, 0x25, 0x11, 0x58, 0xf2, 0xc1, 0x2d, 0xc9, 0x7a, 0x94, 0x55, 0xd4, 0x27, 0x96, + 0x5a, 0xfe, 0xba, 0x2c, 0x62, 0xf7, 0x5a, 0xd1, 0x2c, 0x03, 0x95, 0xdb, 0xf9, 0x56, 0x25, 0x9d, + 0x25, 0x2e, 0x15, 0x42, 0x6a, 0xaa, 0x13, 0x29, 0xec, 0xf4, 0xe9, 0xcf, 0x16, 0x5a, 0xff, 0x00, + 0xc0, 0x7a, 0x1a, 0xd2, 0x8b, 0x52, 0x1b, 0x3f, 0x43, 0xf7, 0xab, 0x10, 0x42, 0x41, 0x53, 0x70, + 0x1a, 0xdb, 0x8d, 0xbd, 0xd5, 0xe0, 0x5e, 0xd5, 0x3c, 0xa5, 0x29, 0xe0, 0x03, 0xb4, 0x5a, 0x9b, + 0x72, 0x9a, 0xdb, 0x8d, 0xbd, 0xb5, 0xfd, 0x2d, 0x1b, 0x24, 0xa9, 0x9c, 0x90, 0x73, 0xad, 0x12, + 0xc1, 0xfb, 0x74, 0x58, 0x40, 0xd0, 0x8e, 0xed, 0x53, 0xf0, 0x77, 0xf4, 0x68, 0xe6, 0x3e, 0xce, + 0xf2, 0x76, 0x63, 0x6f, 0x7d, 0xff, 0x98, 0xcc, 0x3b, 0x4d, 0x99, 0x05, 0x99, 0x74, 0x7b, 0x31, + 0xca, 0xe0, 0xbd, 0x28, 0xd2, 0x19, 0xed, 0x00, 0xc7, 0x53, 0x3d, 0x7c, 0x82, 0x36, 0xa6, 0x1e, + 0x9e, 0x30, 0xa7, 0x55, 0x6e, 0xf0, 0x78, 0x6a, 0x83, 0x9e, 0xd0, 0xaf, 0x5f, 0x99, 0x05, 0x1e, + 0x4c, 0xaa, 0xf5, 0x18, 0xf6, 0x50, 0x3b, 0xa2, 0x69, 0x46, 0x13, 0x2e, 0x9c, 0x3b, 0x8b, 0x23, + 0x38, 0x5e, 0x0a, 0x6a, 0x3c, 0x3e, 0x40, 0x6d, 0xca, 0x42, 0xae, 0x64, 0x91, 0x39, 0x77, 0x6f, + 0xc4, 0x5d, 0xa1, 0xac, 0x3b, 0x86, 0xe3, 0x2e, 0x5a, 0xf9, 0x06, 0xa3, 0x6b, 0xa9, 0x98, 0xb3, + 0x52, 0x32, 0x9f, 0xcf, 0x4d, 0xcc, 0xbc, 0xa9, 0xe4, 0xa3, 0x81, 0xf7, 0x44, 0x2c, 0xc7, 0x42, + 0x96, 0x8d, 0x4f, 0xd1, 0x06, 0x07, 0x59, 0xa5, 0x10, 0x49, 0x91, 0x6b, 0x2a, 0xb4, 0xd3, 0xbe, + 0x91, 0x9d, 0x87, 0x1c, 0xa4, 0x49, 0xe2, 0xad, 0x25, 0xe2, 0x18, 0xb5, 0xcc, 0x4b, 0xed, 0xac, + 0x96, 0x97, 0xfc, 0x74, 0xab, 0x4b, 0xbe, 0x2b, 0xa9, 0x33, 0x6e, 0x69, 0x06, 0xc7, 0x4b, 0x81, + 0x55, 0xc7, 0x9f, 0xd1, 0x1a, 0x65, 0x61, 0x1e, 0x7d, 0x05, 0x56, 0x0c, 0xc1, 0x41, 0xa5, 0x5f, + 0xb2, 0x28, 0x04, 0x9f, 0x9d, 0x5b, 0x86, 0xcd, 0x01, 0xd1, 0xba, 0x73, 0xd4, 0x46, 0x2d, 0x13, + 0xc3, 0xd1, 0x8f, 0x26, 0xda, 0x8d, 0x64, 0x4a, 0x16, 0x7e, 0x1f, 0x8e, 0x36, 0x26, 0x6d, 0x9e, + 0x8d, 0x73, 0x3a, 0x6b, 0x7c, 0x39, 0xb1, 0x4c, 0x2e, 0x87, 0x54, 0x70, 0x22, 0x15, 0x77, 0x39, + 0x88, 0x32, 0xc5, 0xea, 0xaf, 0x9d, 0x25, 0xf9, 0x7f, 0x3e, 0x59, 0x87, 0x75, 0xf5, 0xab, 0xb9, + 0xdc, 0xf5, 0xfd, 0xdf, 0xcd, 0x9d, 0xae, 0x91, 0xf4, 0x59, 0x4e, 0x4c, 0x39, 0xae, 0xfa, 0x1d, + 0x12, 0x54, 0xc8, 0x3f, 0x15, 0x66, 0xe0, 0xb3, 0x7c, 0x50, 0x63, 0x06, 0xfd, 0xce, 0xa0, 0xc6, + 0xfc, 0x6d, 0xee, 0x9a, 0x81, 0xe7, 0xf9, 0x2c, 0xf7, 0xbc, 0x1a, 0xe5, 0x79, 0xfd, 0x8e, 0xe7, + 0xd5, 0xb8, 0xcb, 0x56, 0x69, 0xf6, 0xe5, 0xbf, 0x00, 0x00, 0x00, 0xff, 0xff, 0x0d, 0x57, 0x18, + 0x19, 0x5e, 0x05, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/resources/feed_mapping.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/resources/feed_mapping.pb.go new file mode 100644 index 0000000..33c0694 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/resources/feed_mapping.pb.go @@ -0,0 +1,993 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/resources/feed_mapping.proto + +package resources // import "google.golang.org/genproto/googleapis/ads/googleads/v1/resources" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import wrappers "github.com/golang/protobuf/ptypes/wrappers" +import enums "google.golang.org/genproto/googleapis/ads/googleads/v1/enums" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// A feed mapping. +type FeedMapping struct { + // The resource name of the feed mapping. + // Feed mapping resource names have the form: + // + // `customers/{customer_id}/feedMappings/{feed_id}~{feed_mapping_id}` + ResourceName string `protobuf:"bytes,1,opt,name=resource_name,json=resourceName,proto3" json:"resource_name,omitempty"` + // The feed of this feed mapping. + Feed *wrappers.StringValue `protobuf:"bytes,2,opt,name=feed,proto3" json:"feed,omitempty"` + // Feed attributes to field mappings. These mappings are a one-to-many + // relationship meaning that 1 feed attribute can be used to populate + // multiple placeholder fields, but 1 placeholder field can only draw + // data from 1 feed attribute. Ad Customizer is an exception, 1 placeholder + // field can be mapped to multiple feed attributes. Required. + AttributeFieldMappings []*AttributeFieldMapping `protobuf:"bytes,5,rep,name=attribute_field_mappings,json=attributeFieldMappings,proto3" json:"attribute_field_mappings,omitempty"` + // Status of the feed mapping. + // This field is read-only. + Status enums.FeedMappingStatusEnum_FeedMappingStatus `protobuf:"varint,6,opt,name=status,proto3,enum=google.ads.googleads.v1.enums.FeedMappingStatusEnum_FeedMappingStatus" json:"status,omitempty"` + // Feed mapping target. Can be either a placeholder or a criterion. For a + // given feed, the active FeedMappings must have unique targets. Required. + // + // Types that are valid to be assigned to Target: + // *FeedMapping_PlaceholderType + // *FeedMapping_CriterionType + Target isFeedMapping_Target `protobuf_oneof:"target"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *FeedMapping) Reset() { *m = FeedMapping{} } +func (m *FeedMapping) String() string { return proto.CompactTextString(m) } +func (*FeedMapping) ProtoMessage() {} +func (*FeedMapping) Descriptor() ([]byte, []int) { + return fileDescriptor_feed_mapping_c1c27da2bf9a0d52, []int{0} +} +func (m *FeedMapping) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_FeedMapping.Unmarshal(m, b) +} +func (m *FeedMapping) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_FeedMapping.Marshal(b, m, deterministic) +} +func (dst *FeedMapping) XXX_Merge(src proto.Message) { + xxx_messageInfo_FeedMapping.Merge(dst, src) +} +func (m *FeedMapping) XXX_Size() int { + return xxx_messageInfo_FeedMapping.Size(m) +} +func (m *FeedMapping) XXX_DiscardUnknown() { + xxx_messageInfo_FeedMapping.DiscardUnknown(m) +} + +var xxx_messageInfo_FeedMapping proto.InternalMessageInfo + +func (m *FeedMapping) GetResourceName() string { + if m != nil { + return m.ResourceName + } + return "" +} + +func (m *FeedMapping) GetFeed() *wrappers.StringValue { + if m != nil { + return m.Feed + } + return nil +} + +func (m *FeedMapping) GetAttributeFieldMappings() []*AttributeFieldMapping { + if m != nil { + return m.AttributeFieldMappings + } + return nil +} + +func (m *FeedMapping) GetStatus() enums.FeedMappingStatusEnum_FeedMappingStatus { + if m != nil { + return m.Status + } + return enums.FeedMappingStatusEnum_UNSPECIFIED +} + +type isFeedMapping_Target interface { + isFeedMapping_Target() +} + +type FeedMapping_PlaceholderType struct { + PlaceholderType enums.PlaceholderTypeEnum_PlaceholderType `protobuf:"varint,3,opt,name=placeholder_type,json=placeholderType,proto3,enum=google.ads.googleads.v1.enums.PlaceholderTypeEnum_PlaceholderType,oneof"` +} + +type FeedMapping_CriterionType struct { + CriterionType enums.FeedMappingCriterionTypeEnum_FeedMappingCriterionType `protobuf:"varint,4,opt,name=criterion_type,json=criterionType,proto3,enum=google.ads.googleads.v1.enums.FeedMappingCriterionTypeEnum_FeedMappingCriterionType,oneof"` +} + +func (*FeedMapping_PlaceholderType) isFeedMapping_Target() {} + +func (*FeedMapping_CriterionType) isFeedMapping_Target() {} + +func (m *FeedMapping) GetTarget() isFeedMapping_Target { + if m != nil { + return m.Target + } + return nil +} + +func (m *FeedMapping) GetPlaceholderType() enums.PlaceholderTypeEnum_PlaceholderType { + if x, ok := m.GetTarget().(*FeedMapping_PlaceholderType); ok { + return x.PlaceholderType + } + return enums.PlaceholderTypeEnum_UNSPECIFIED +} + +func (m *FeedMapping) GetCriterionType() enums.FeedMappingCriterionTypeEnum_FeedMappingCriterionType { + if x, ok := m.GetTarget().(*FeedMapping_CriterionType); ok { + return x.CriterionType + } + return enums.FeedMappingCriterionTypeEnum_UNSPECIFIED +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*FeedMapping) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _FeedMapping_OneofMarshaler, _FeedMapping_OneofUnmarshaler, _FeedMapping_OneofSizer, []interface{}{ + (*FeedMapping_PlaceholderType)(nil), + (*FeedMapping_CriterionType)(nil), + } +} + +func _FeedMapping_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*FeedMapping) + // target + switch x := m.Target.(type) { + case *FeedMapping_PlaceholderType: + b.EncodeVarint(3<<3 | proto.WireVarint) + b.EncodeVarint(uint64(x.PlaceholderType)) + case *FeedMapping_CriterionType: + b.EncodeVarint(4<<3 | proto.WireVarint) + b.EncodeVarint(uint64(x.CriterionType)) + case nil: + default: + return fmt.Errorf("FeedMapping.Target has unexpected type %T", x) + } + return nil +} + +func _FeedMapping_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*FeedMapping) + switch tag { + case 3: // target.placeholder_type + if wire != proto.WireVarint { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeVarint() + m.Target = &FeedMapping_PlaceholderType{enums.PlaceholderTypeEnum_PlaceholderType(x)} + return true, err + case 4: // target.criterion_type + if wire != proto.WireVarint { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeVarint() + m.Target = &FeedMapping_CriterionType{enums.FeedMappingCriterionTypeEnum_FeedMappingCriterionType(x)} + return true, err + default: + return false, nil + } +} + +func _FeedMapping_OneofSizer(msg proto.Message) (n int) { + m := msg.(*FeedMapping) + // target + switch x := m.Target.(type) { + case *FeedMapping_PlaceholderType: + n += 1 // tag and wire + n += proto.SizeVarint(uint64(x.PlaceholderType)) + case *FeedMapping_CriterionType: + n += 1 // tag and wire + n += proto.SizeVarint(uint64(x.CriterionType)) + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +// Maps from feed attribute id to a placeholder or criterion field id. +type AttributeFieldMapping struct { + // Feed attribute from which to map. + FeedAttributeId *wrappers.Int64Value `protobuf:"bytes,1,opt,name=feed_attribute_id,json=feedAttributeId,proto3" json:"feed_attribute_id,omitempty"` + // The placeholder field ID. If a placeholder field enum is not published in + // the current API version, then this field will be populated and the field + // oneof will be empty. + // This field is read-only. + FieldId *wrappers.Int64Value `protobuf:"bytes,2,opt,name=field_id,json=fieldId,proto3" json:"field_id,omitempty"` + // Placeholder or criterion field to be populated using data from + // the above feed attribute. Required. + // + // Types that are valid to be assigned to Field: + // *AttributeFieldMapping_SitelinkField + // *AttributeFieldMapping_CallField + // *AttributeFieldMapping_AppField + // *AttributeFieldMapping_LocationField + // *AttributeFieldMapping_AffiliateLocationField + // *AttributeFieldMapping_CalloutField + // *AttributeFieldMapping_StructuredSnippetField + // *AttributeFieldMapping_MessageField + // *AttributeFieldMapping_PriceField + // *AttributeFieldMapping_PromotionField + // *AttributeFieldMapping_AdCustomizerField + // *AttributeFieldMapping_DsaPageFeedField + // *AttributeFieldMapping_LocationExtensionTargetingField + // *AttributeFieldMapping_EducationField + // *AttributeFieldMapping_FlightField + // *AttributeFieldMapping_CustomField + // *AttributeFieldMapping_HotelField + // *AttributeFieldMapping_RealEstateField + // *AttributeFieldMapping_TravelField + // *AttributeFieldMapping_LocalField + // *AttributeFieldMapping_JobField + Field isAttributeFieldMapping_Field `protobuf_oneof:"field"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *AttributeFieldMapping) Reset() { *m = AttributeFieldMapping{} } +func (m *AttributeFieldMapping) String() string { return proto.CompactTextString(m) } +func (*AttributeFieldMapping) ProtoMessage() {} +func (*AttributeFieldMapping) Descriptor() ([]byte, []int) { + return fileDescriptor_feed_mapping_c1c27da2bf9a0d52, []int{1} +} +func (m *AttributeFieldMapping) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_AttributeFieldMapping.Unmarshal(m, b) +} +func (m *AttributeFieldMapping) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_AttributeFieldMapping.Marshal(b, m, deterministic) +} +func (dst *AttributeFieldMapping) XXX_Merge(src proto.Message) { + xxx_messageInfo_AttributeFieldMapping.Merge(dst, src) +} +func (m *AttributeFieldMapping) XXX_Size() int { + return xxx_messageInfo_AttributeFieldMapping.Size(m) +} +func (m *AttributeFieldMapping) XXX_DiscardUnknown() { + xxx_messageInfo_AttributeFieldMapping.DiscardUnknown(m) +} + +var xxx_messageInfo_AttributeFieldMapping proto.InternalMessageInfo + +func (m *AttributeFieldMapping) GetFeedAttributeId() *wrappers.Int64Value { + if m != nil { + return m.FeedAttributeId + } + return nil +} + +func (m *AttributeFieldMapping) GetFieldId() *wrappers.Int64Value { + if m != nil { + return m.FieldId + } + return nil +} + +type isAttributeFieldMapping_Field interface { + isAttributeFieldMapping_Field() +} + +type AttributeFieldMapping_SitelinkField struct { + SitelinkField enums.SitelinkPlaceholderFieldEnum_SitelinkPlaceholderField `protobuf:"varint,3,opt,name=sitelink_field,json=sitelinkField,proto3,enum=google.ads.googleads.v1.enums.SitelinkPlaceholderFieldEnum_SitelinkPlaceholderField,oneof"` +} + +type AttributeFieldMapping_CallField struct { + CallField enums.CallPlaceholderFieldEnum_CallPlaceholderField `protobuf:"varint,4,opt,name=call_field,json=callField,proto3,enum=google.ads.googleads.v1.enums.CallPlaceholderFieldEnum_CallPlaceholderField,oneof"` +} + +type AttributeFieldMapping_AppField struct { + AppField enums.AppPlaceholderFieldEnum_AppPlaceholderField `protobuf:"varint,5,opt,name=app_field,json=appField,proto3,enum=google.ads.googleads.v1.enums.AppPlaceholderFieldEnum_AppPlaceholderField,oneof"` +} + +type AttributeFieldMapping_LocationField struct { + LocationField enums.LocationPlaceholderFieldEnum_LocationPlaceholderField `protobuf:"varint,6,opt,name=location_field,json=locationField,proto3,enum=google.ads.googleads.v1.enums.LocationPlaceholderFieldEnum_LocationPlaceholderField,oneof"` +} + +type AttributeFieldMapping_AffiliateLocationField struct { + AffiliateLocationField enums.AffiliateLocationPlaceholderFieldEnum_AffiliateLocationPlaceholderField `protobuf:"varint,7,opt,name=affiliate_location_field,json=affiliateLocationField,proto3,enum=google.ads.googleads.v1.enums.AffiliateLocationPlaceholderFieldEnum_AffiliateLocationPlaceholderField,oneof"` +} + +type AttributeFieldMapping_CalloutField struct { + CalloutField enums.CalloutPlaceholderFieldEnum_CalloutPlaceholderField `protobuf:"varint,8,opt,name=callout_field,json=calloutField,proto3,enum=google.ads.googleads.v1.enums.CalloutPlaceholderFieldEnum_CalloutPlaceholderField,oneof"` +} + +type AttributeFieldMapping_StructuredSnippetField struct { + StructuredSnippetField enums.StructuredSnippetPlaceholderFieldEnum_StructuredSnippetPlaceholderField `protobuf:"varint,9,opt,name=structured_snippet_field,json=structuredSnippetField,proto3,enum=google.ads.googleads.v1.enums.StructuredSnippetPlaceholderFieldEnum_StructuredSnippetPlaceholderField,oneof"` +} + +type AttributeFieldMapping_MessageField struct { + MessageField enums.MessagePlaceholderFieldEnum_MessagePlaceholderField `protobuf:"varint,10,opt,name=message_field,json=messageField,proto3,enum=google.ads.googleads.v1.enums.MessagePlaceholderFieldEnum_MessagePlaceholderField,oneof"` +} + +type AttributeFieldMapping_PriceField struct { + PriceField enums.PricePlaceholderFieldEnum_PricePlaceholderField `protobuf:"varint,11,opt,name=price_field,json=priceField,proto3,enum=google.ads.googleads.v1.enums.PricePlaceholderFieldEnum_PricePlaceholderField,oneof"` +} + +type AttributeFieldMapping_PromotionField struct { + PromotionField enums.PromotionPlaceholderFieldEnum_PromotionPlaceholderField `protobuf:"varint,12,opt,name=promotion_field,json=promotionField,proto3,enum=google.ads.googleads.v1.enums.PromotionPlaceholderFieldEnum_PromotionPlaceholderField,oneof"` +} + +type AttributeFieldMapping_AdCustomizerField struct { + AdCustomizerField enums.AdCustomizerPlaceholderFieldEnum_AdCustomizerPlaceholderField `protobuf:"varint,13,opt,name=ad_customizer_field,json=adCustomizerField,proto3,enum=google.ads.googleads.v1.enums.AdCustomizerPlaceholderFieldEnum_AdCustomizerPlaceholderField,oneof"` +} + +type AttributeFieldMapping_DsaPageFeedField struct { + DsaPageFeedField enums.DsaPageFeedCriterionFieldEnum_DsaPageFeedCriterionField `protobuf:"varint,14,opt,name=dsa_page_feed_field,json=dsaPageFeedField,proto3,enum=google.ads.googleads.v1.enums.DsaPageFeedCriterionFieldEnum_DsaPageFeedCriterionField,oneof"` +} + +type AttributeFieldMapping_LocationExtensionTargetingField struct { + LocationExtensionTargetingField enums.LocationExtensionTargetingCriterionFieldEnum_LocationExtensionTargetingCriterionField `protobuf:"varint,15,opt,name=location_extension_targeting_field,json=locationExtensionTargetingField,proto3,enum=google.ads.googleads.v1.enums.LocationExtensionTargetingCriterionFieldEnum_LocationExtensionTargetingCriterionField,oneof"` +} + +type AttributeFieldMapping_EducationField struct { + EducationField enums.EducationPlaceholderFieldEnum_EducationPlaceholderField `protobuf:"varint,16,opt,name=education_field,json=educationField,proto3,enum=google.ads.googleads.v1.enums.EducationPlaceholderFieldEnum_EducationPlaceholderField,oneof"` +} + +type AttributeFieldMapping_FlightField struct { + FlightField enums.FlightPlaceholderFieldEnum_FlightPlaceholderField `protobuf:"varint,17,opt,name=flight_field,json=flightField,proto3,enum=google.ads.googleads.v1.enums.FlightPlaceholderFieldEnum_FlightPlaceholderField,oneof"` +} + +type AttributeFieldMapping_CustomField struct { + CustomField enums.CustomPlaceholderFieldEnum_CustomPlaceholderField `protobuf:"varint,18,opt,name=custom_field,json=customField,proto3,enum=google.ads.googleads.v1.enums.CustomPlaceholderFieldEnum_CustomPlaceholderField,oneof"` +} + +type AttributeFieldMapping_HotelField struct { + HotelField enums.HotelPlaceholderFieldEnum_HotelPlaceholderField `protobuf:"varint,19,opt,name=hotel_field,json=hotelField,proto3,enum=google.ads.googleads.v1.enums.HotelPlaceholderFieldEnum_HotelPlaceholderField,oneof"` +} + +type AttributeFieldMapping_RealEstateField struct { + RealEstateField enums.RealEstatePlaceholderFieldEnum_RealEstatePlaceholderField `protobuf:"varint,20,opt,name=real_estate_field,json=realEstateField,proto3,enum=google.ads.googleads.v1.enums.RealEstatePlaceholderFieldEnum_RealEstatePlaceholderField,oneof"` +} + +type AttributeFieldMapping_TravelField struct { + TravelField enums.TravelPlaceholderFieldEnum_TravelPlaceholderField `protobuf:"varint,21,opt,name=travel_field,json=travelField,proto3,enum=google.ads.googleads.v1.enums.TravelPlaceholderFieldEnum_TravelPlaceholderField,oneof"` +} + +type AttributeFieldMapping_LocalField struct { + LocalField enums.LocalPlaceholderFieldEnum_LocalPlaceholderField `protobuf:"varint,22,opt,name=local_field,json=localField,proto3,enum=google.ads.googleads.v1.enums.LocalPlaceholderFieldEnum_LocalPlaceholderField,oneof"` +} + +type AttributeFieldMapping_JobField struct { + JobField enums.JobPlaceholderFieldEnum_JobPlaceholderField `protobuf:"varint,23,opt,name=job_field,json=jobField,proto3,enum=google.ads.googleads.v1.enums.JobPlaceholderFieldEnum_JobPlaceholderField,oneof"` +} + +func (*AttributeFieldMapping_SitelinkField) isAttributeFieldMapping_Field() {} + +func (*AttributeFieldMapping_CallField) isAttributeFieldMapping_Field() {} + +func (*AttributeFieldMapping_AppField) isAttributeFieldMapping_Field() {} + +func (*AttributeFieldMapping_LocationField) isAttributeFieldMapping_Field() {} + +func (*AttributeFieldMapping_AffiliateLocationField) isAttributeFieldMapping_Field() {} + +func (*AttributeFieldMapping_CalloutField) isAttributeFieldMapping_Field() {} + +func (*AttributeFieldMapping_StructuredSnippetField) isAttributeFieldMapping_Field() {} + +func (*AttributeFieldMapping_MessageField) isAttributeFieldMapping_Field() {} + +func (*AttributeFieldMapping_PriceField) isAttributeFieldMapping_Field() {} + +func (*AttributeFieldMapping_PromotionField) isAttributeFieldMapping_Field() {} + +func (*AttributeFieldMapping_AdCustomizerField) isAttributeFieldMapping_Field() {} + +func (*AttributeFieldMapping_DsaPageFeedField) isAttributeFieldMapping_Field() {} + +func (*AttributeFieldMapping_LocationExtensionTargetingField) isAttributeFieldMapping_Field() {} + +func (*AttributeFieldMapping_EducationField) isAttributeFieldMapping_Field() {} + +func (*AttributeFieldMapping_FlightField) isAttributeFieldMapping_Field() {} + +func (*AttributeFieldMapping_CustomField) isAttributeFieldMapping_Field() {} + +func (*AttributeFieldMapping_HotelField) isAttributeFieldMapping_Field() {} + +func (*AttributeFieldMapping_RealEstateField) isAttributeFieldMapping_Field() {} + +func (*AttributeFieldMapping_TravelField) isAttributeFieldMapping_Field() {} + +func (*AttributeFieldMapping_LocalField) isAttributeFieldMapping_Field() {} + +func (*AttributeFieldMapping_JobField) isAttributeFieldMapping_Field() {} + +func (m *AttributeFieldMapping) GetField() isAttributeFieldMapping_Field { + if m != nil { + return m.Field + } + return nil +} + +func (m *AttributeFieldMapping) GetSitelinkField() enums.SitelinkPlaceholderFieldEnum_SitelinkPlaceholderField { + if x, ok := m.GetField().(*AttributeFieldMapping_SitelinkField); ok { + return x.SitelinkField + } + return enums.SitelinkPlaceholderFieldEnum_UNSPECIFIED +} + +func (m *AttributeFieldMapping) GetCallField() enums.CallPlaceholderFieldEnum_CallPlaceholderField { + if x, ok := m.GetField().(*AttributeFieldMapping_CallField); ok { + return x.CallField + } + return enums.CallPlaceholderFieldEnum_UNSPECIFIED +} + +func (m *AttributeFieldMapping) GetAppField() enums.AppPlaceholderFieldEnum_AppPlaceholderField { + if x, ok := m.GetField().(*AttributeFieldMapping_AppField); ok { + return x.AppField + } + return enums.AppPlaceholderFieldEnum_UNSPECIFIED +} + +func (m *AttributeFieldMapping) GetLocationField() enums.LocationPlaceholderFieldEnum_LocationPlaceholderField { + if x, ok := m.GetField().(*AttributeFieldMapping_LocationField); ok { + return x.LocationField + } + return enums.LocationPlaceholderFieldEnum_UNSPECIFIED +} + +func (m *AttributeFieldMapping) GetAffiliateLocationField() enums.AffiliateLocationPlaceholderFieldEnum_AffiliateLocationPlaceholderField { + if x, ok := m.GetField().(*AttributeFieldMapping_AffiliateLocationField); ok { + return x.AffiliateLocationField + } + return enums.AffiliateLocationPlaceholderFieldEnum_UNSPECIFIED +} + +func (m *AttributeFieldMapping) GetCalloutField() enums.CalloutPlaceholderFieldEnum_CalloutPlaceholderField { + if x, ok := m.GetField().(*AttributeFieldMapping_CalloutField); ok { + return x.CalloutField + } + return enums.CalloutPlaceholderFieldEnum_UNSPECIFIED +} + +func (m *AttributeFieldMapping) GetStructuredSnippetField() enums.StructuredSnippetPlaceholderFieldEnum_StructuredSnippetPlaceholderField { + if x, ok := m.GetField().(*AttributeFieldMapping_StructuredSnippetField); ok { + return x.StructuredSnippetField + } + return enums.StructuredSnippetPlaceholderFieldEnum_UNSPECIFIED +} + +func (m *AttributeFieldMapping) GetMessageField() enums.MessagePlaceholderFieldEnum_MessagePlaceholderField { + if x, ok := m.GetField().(*AttributeFieldMapping_MessageField); ok { + return x.MessageField + } + return enums.MessagePlaceholderFieldEnum_UNSPECIFIED +} + +func (m *AttributeFieldMapping) GetPriceField() enums.PricePlaceholderFieldEnum_PricePlaceholderField { + if x, ok := m.GetField().(*AttributeFieldMapping_PriceField); ok { + return x.PriceField + } + return enums.PricePlaceholderFieldEnum_UNSPECIFIED +} + +func (m *AttributeFieldMapping) GetPromotionField() enums.PromotionPlaceholderFieldEnum_PromotionPlaceholderField { + if x, ok := m.GetField().(*AttributeFieldMapping_PromotionField); ok { + return x.PromotionField + } + return enums.PromotionPlaceholderFieldEnum_UNSPECIFIED +} + +func (m *AttributeFieldMapping) GetAdCustomizerField() enums.AdCustomizerPlaceholderFieldEnum_AdCustomizerPlaceholderField { + if x, ok := m.GetField().(*AttributeFieldMapping_AdCustomizerField); ok { + return x.AdCustomizerField + } + return enums.AdCustomizerPlaceholderFieldEnum_UNSPECIFIED +} + +func (m *AttributeFieldMapping) GetDsaPageFeedField() enums.DsaPageFeedCriterionFieldEnum_DsaPageFeedCriterionField { + if x, ok := m.GetField().(*AttributeFieldMapping_DsaPageFeedField); ok { + return x.DsaPageFeedField + } + return enums.DsaPageFeedCriterionFieldEnum_UNSPECIFIED +} + +func (m *AttributeFieldMapping) GetLocationExtensionTargetingField() enums.LocationExtensionTargetingCriterionFieldEnum_LocationExtensionTargetingCriterionField { + if x, ok := m.GetField().(*AttributeFieldMapping_LocationExtensionTargetingField); ok { + return x.LocationExtensionTargetingField + } + return enums.LocationExtensionTargetingCriterionFieldEnum_UNSPECIFIED +} + +func (m *AttributeFieldMapping) GetEducationField() enums.EducationPlaceholderFieldEnum_EducationPlaceholderField { + if x, ok := m.GetField().(*AttributeFieldMapping_EducationField); ok { + return x.EducationField + } + return enums.EducationPlaceholderFieldEnum_UNSPECIFIED +} + +func (m *AttributeFieldMapping) GetFlightField() enums.FlightPlaceholderFieldEnum_FlightPlaceholderField { + if x, ok := m.GetField().(*AttributeFieldMapping_FlightField); ok { + return x.FlightField + } + return enums.FlightPlaceholderFieldEnum_UNSPECIFIED +} + +func (m *AttributeFieldMapping) GetCustomField() enums.CustomPlaceholderFieldEnum_CustomPlaceholderField { + if x, ok := m.GetField().(*AttributeFieldMapping_CustomField); ok { + return x.CustomField + } + return enums.CustomPlaceholderFieldEnum_UNSPECIFIED +} + +func (m *AttributeFieldMapping) GetHotelField() enums.HotelPlaceholderFieldEnum_HotelPlaceholderField { + if x, ok := m.GetField().(*AttributeFieldMapping_HotelField); ok { + return x.HotelField + } + return enums.HotelPlaceholderFieldEnum_UNSPECIFIED +} + +func (m *AttributeFieldMapping) GetRealEstateField() enums.RealEstatePlaceholderFieldEnum_RealEstatePlaceholderField { + if x, ok := m.GetField().(*AttributeFieldMapping_RealEstateField); ok { + return x.RealEstateField + } + return enums.RealEstatePlaceholderFieldEnum_UNSPECIFIED +} + +func (m *AttributeFieldMapping) GetTravelField() enums.TravelPlaceholderFieldEnum_TravelPlaceholderField { + if x, ok := m.GetField().(*AttributeFieldMapping_TravelField); ok { + return x.TravelField + } + return enums.TravelPlaceholderFieldEnum_UNSPECIFIED +} + +func (m *AttributeFieldMapping) GetLocalField() enums.LocalPlaceholderFieldEnum_LocalPlaceholderField { + if x, ok := m.GetField().(*AttributeFieldMapping_LocalField); ok { + return x.LocalField + } + return enums.LocalPlaceholderFieldEnum_UNSPECIFIED +} + +func (m *AttributeFieldMapping) GetJobField() enums.JobPlaceholderFieldEnum_JobPlaceholderField { + if x, ok := m.GetField().(*AttributeFieldMapping_JobField); ok { + return x.JobField + } + return enums.JobPlaceholderFieldEnum_UNSPECIFIED +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*AttributeFieldMapping) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _AttributeFieldMapping_OneofMarshaler, _AttributeFieldMapping_OneofUnmarshaler, _AttributeFieldMapping_OneofSizer, []interface{}{ + (*AttributeFieldMapping_SitelinkField)(nil), + (*AttributeFieldMapping_CallField)(nil), + (*AttributeFieldMapping_AppField)(nil), + (*AttributeFieldMapping_LocationField)(nil), + (*AttributeFieldMapping_AffiliateLocationField)(nil), + (*AttributeFieldMapping_CalloutField)(nil), + (*AttributeFieldMapping_StructuredSnippetField)(nil), + (*AttributeFieldMapping_MessageField)(nil), + (*AttributeFieldMapping_PriceField)(nil), + (*AttributeFieldMapping_PromotionField)(nil), + (*AttributeFieldMapping_AdCustomizerField)(nil), + (*AttributeFieldMapping_DsaPageFeedField)(nil), + (*AttributeFieldMapping_LocationExtensionTargetingField)(nil), + (*AttributeFieldMapping_EducationField)(nil), + (*AttributeFieldMapping_FlightField)(nil), + (*AttributeFieldMapping_CustomField)(nil), + (*AttributeFieldMapping_HotelField)(nil), + (*AttributeFieldMapping_RealEstateField)(nil), + (*AttributeFieldMapping_TravelField)(nil), + (*AttributeFieldMapping_LocalField)(nil), + (*AttributeFieldMapping_JobField)(nil), + } +} + +func _AttributeFieldMapping_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*AttributeFieldMapping) + // field + switch x := m.Field.(type) { + case *AttributeFieldMapping_SitelinkField: + b.EncodeVarint(3<<3 | proto.WireVarint) + b.EncodeVarint(uint64(x.SitelinkField)) + case *AttributeFieldMapping_CallField: + b.EncodeVarint(4<<3 | proto.WireVarint) + b.EncodeVarint(uint64(x.CallField)) + case *AttributeFieldMapping_AppField: + b.EncodeVarint(5<<3 | proto.WireVarint) + b.EncodeVarint(uint64(x.AppField)) + case *AttributeFieldMapping_LocationField: + b.EncodeVarint(6<<3 | proto.WireVarint) + b.EncodeVarint(uint64(x.LocationField)) + case *AttributeFieldMapping_AffiliateLocationField: + b.EncodeVarint(7<<3 | proto.WireVarint) + b.EncodeVarint(uint64(x.AffiliateLocationField)) + case *AttributeFieldMapping_CalloutField: + b.EncodeVarint(8<<3 | proto.WireVarint) + b.EncodeVarint(uint64(x.CalloutField)) + case *AttributeFieldMapping_StructuredSnippetField: + b.EncodeVarint(9<<3 | proto.WireVarint) + b.EncodeVarint(uint64(x.StructuredSnippetField)) + case *AttributeFieldMapping_MessageField: + b.EncodeVarint(10<<3 | proto.WireVarint) + b.EncodeVarint(uint64(x.MessageField)) + case *AttributeFieldMapping_PriceField: + b.EncodeVarint(11<<3 | proto.WireVarint) + b.EncodeVarint(uint64(x.PriceField)) + case *AttributeFieldMapping_PromotionField: + b.EncodeVarint(12<<3 | proto.WireVarint) + b.EncodeVarint(uint64(x.PromotionField)) + case *AttributeFieldMapping_AdCustomizerField: + b.EncodeVarint(13<<3 | proto.WireVarint) + b.EncodeVarint(uint64(x.AdCustomizerField)) + case *AttributeFieldMapping_DsaPageFeedField: + b.EncodeVarint(14<<3 | proto.WireVarint) + b.EncodeVarint(uint64(x.DsaPageFeedField)) + case *AttributeFieldMapping_LocationExtensionTargetingField: + b.EncodeVarint(15<<3 | proto.WireVarint) + b.EncodeVarint(uint64(x.LocationExtensionTargetingField)) + case *AttributeFieldMapping_EducationField: + b.EncodeVarint(16<<3 | proto.WireVarint) + b.EncodeVarint(uint64(x.EducationField)) + case *AttributeFieldMapping_FlightField: + b.EncodeVarint(17<<3 | proto.WireVarint) + b.EncodeVarint(uint64(x.FlightField)) + case *AttributeFieldMapping_CustomField: + b.EncodeVarint(18<<3 | proto.WireVarint) + b.EncodeVarint(uint64(x.CustomField)) + case *AttributeFieldMapping_HotelField: + b.EncodeVarint(19<<3 | proto.WireVarint) + b.EncodeVarint(uint64(x.HotelField)) + case *AttributeFieldMapping_RealEstateField: + b.EncodeVarint(20<<3 | proto.WireVarint) + b.EncodeVarint(uint64(x.RealEstateField)) + case *AttributeFieldMapping_TravelField: + b.EncodeVarint(21<<3 | proto.WireVarint) + b.EncodeVarint(uint64(x.TravelField)) + case *AttributeFieldMapping_LocalField: + b.EncodeVarint(22<<3 | proto.WireVarint) + b.EncodeVarint(uint64(x.LocalField)) + case *AttributeFieldMapping_JobField: + b.EncodeVarint(23<<3 | proto.WireVarint) + b.EncodeVarint(uint64(x.JobField)) + case nil: + default: + return fmt.Errorf("AttributeFieldMapping.Field has unexpected type %T", x) + } + return nil +} + +func _AttributeFieldMapping_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*AttributeFieldMapping) + switch tag { + case 3: // field.sitelink_field + if wire != proto.WireVarint { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeVarint() + m.Field = &AttributeFieldMapping_SitelinkField{enums.SitelinkPlaceholderFieldEnum_SitelinkPlaceholderField(x)} + return true, err + case 4: // field.call_field + if wire != proto.WireVarint { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeVarint() + m.Field = &AttributeFieldMapping_CallField{enums.CallPlaceholderFieldEnum_CallPlaceholderField(x)} + return true, err + case 5: // field.app_field + if wire != proto.WireVarint { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeVarint() + m.Field = &AttributeFieldMapping_AppField{enums.AppPlaceholderFieldEnum_AppPlaceholderField(x)} + return true, err + case 6: // field.location_field + if wire != proto.WireVarint { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeVarint() + m.Field = &AttributeFieldMapping_LocationField{enums.LocationPlaceholderFieldEnum_LocationPlaceholderField(x)} + return true, err + case 7: // field.affiliate_location_field + if wire != proto.WireVarint { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeVarint() + m.Field = &AttributeFieldMapping_AffiliateLocationField{enums.AffiliateLocationPlaceholderFieldEnum_AffiliateLocationPlaceholderField(x)} + return true, err + case 8: // field.callout_field + if wire != proto.WireVarint { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeVarint() + m.Field = &AttributeFieldMapping_CalloutField{enums.CalloutPlaceholderFieldEnum_CalloutPlaceholderField(x)} + return true, err + case 9: // field.structured_snippet_field + if wire != proto.WireVarint { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeVarint() + m.Field = &AttributeFieldMapping_StructuredSnippetField{enums.StructuredSnippetPlaceholderFieldEnum_StructuredSnippetPlaceholderField(x)} + return true, err + case 10: // field.message_field + if wire != proto.WireVarint { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeVarint() + m.Field = &AttributeFieldMapping_MessageField{enums.MessagePlaceholderFieldEnum_MessagePlaceholderField(x)} + return true, err + case 11: // field.price_field + if wire != proto.WireVarint { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeVarint() + m.Field = &AttributeFieldMapping_PriceField{enums.PricePlaceholderFieldEnum_PricePlaceholderField(x)} + return true, err + case 12: // field.promotion_field + if wire != proto.WireVarint { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeVarint() + m.Field = &AttributeFieldMapping_PromotionField{enums.PromotionPlaceholderFieldEnum_PromotionPlaceholderField(x)} + return true, err + case 13: // field.ad_customizer_field + if wire != proto.WireVarint { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeVarint() + m.Field = &AttributeFieldMapping_AdCustomizerField{enums.AdCustomizerPlaceholderFieldEnum_AdCustomizerPlaceholderField(x)} + return true, err + case 14: // field.dsa_page_feed_field + if wire != proto.WireVarint { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeVarint() + m.Field = &AttributeFieldMapping_DsaPageFeedField{enums.DsaPageFeedCriterionFieldEnum_DsaPageFeedCriterionField(x)} + return true, err + case 15: // field.location_extension_targeting_field + if wire != proto.WireVarint { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeVarint() + m.Field = &AttributeFieldMapping_LocationExtensionTargetingField{enums.LocationExtensionTargetingCriterionFieldEnum_LocationExtensionTargetingCriterionField(x)} + return true, err + case 16: // field.education_field + if wire != proto.WireVarint { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeVarint() + m.Field = &AttributeFieldMapping_EducationField{enums.EducationPlaceholderFieldEnum_EducationPlaceholderField(x)} + return true, err + case 17: // field.flight_field + if wire != proto.WireVarint { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeVarint() + m.Field = &AttributeFieldMapping_FlightField{enums.FlightPlaceholderFieldEnum_FlightPlaceholderField(x)} + return true, err + case 18: // field.custom_field + if wire != proto.WireVarint { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeVarint() + m.Field = &AttributeFieldMapping_CustomField{enums.CustomPlaceholderFieldEnum_CustomPlaceholderField(x)} + return true, err + case 19: // field.hotel_field + if wire != proto.WireVarint { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeVarint() + m.Field = &AttributeFieldMapping_HotelField{enums.HotelPlaceholderFieldEnum_HotelPlaceholderField(x)} + return true, err + case 20: // field.real_estate_field + if wire != proto.WireVarint { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeVarint() + m.Field = &AttributeFieldMapping_RealEstateField{enums.RealEstatePlaceholderFieldEnum_RealEstatePlaceholderField(x)} + return true, err + case 21: // field.travel_field + if wire != proto.WireVarint { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeVarint() + m.Field = &AttributeFieldMapping_TravelField{enums.TravelPlaceholderFieldEnum_TravelPlaceholderField(x)} + return true, err + case 22: // field.local_field + if wire != proto.WireVarint { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeVarint() + m.Field = &AttributeFieldMapping_LocalField{enums.LocalPlaceholderFieldEnum_LocalPlaceholderField(x)} + return true, err + case 23: // field.job_field + if wire != proto.WireVarint { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeVarint() + m.Field = &AttributeFieldMapping_JobField{enums.JobPlaceholderFieldEnum_JobPlaceholderField(x)} + return true, err + default: + return false, nil + } +} + +func _AttributeFieldMapping_OneofSizer(msg proto.Message) (n int) { + m := msg.(*AttributeFieldMapping) + // field + switch x := m.Field.(type) { + case *AttributeFieldMapping_SitelinkField: + n += 1 // tag and wire + n += proto.SizeVarint(uint64(x.SitelinkField)) + case *AttributeFieldMapping_CallField: + n += 1 // tag and wire + n += proto.SizeVarint(uint64(x.CallField)) + case *AttributeFieldMapping_AppField: + n += 1 // tag and wire + n += proto.SizeVarint(uint64(x.AppField)) + case *AttributeFieldMapping_LocationField: + n += 1 // tag and wire + n += proto.SizeVarint(uint64(x.LocationField)) + case *AttributeFieldMapping_AffiliateLocationField: + n += 1 // tag and wire + n += proto.SizeVarint(uint64(x.AffiliateLocationField)) + case *AttributeFieldMapping_CalloutField: + n += 1 // tag and wire + n += proto.SizeVarint(uint64(x.CalloutField)) + case *AttributeFieldMapping_StructuredSnippetField: + n += 1 // tag and wire + n += proto.SizeVarint(uint64(x.StructuredSnippetField)) + case *AttributeFieldMapping_MessageField: + n += 1 // tag and wire + n += proto.SizeVarint(uint64(x.MessageField)) + case *AttributeFieldMapping_PriceField: + n += 1 // tag and wire + n += proto.SizeVarint(uint64(x.PriceField)) + case *AttributeFieldMapping_PromotionField: + n += 1 // tag and wire + n += proto.SizeVarint(uint64(x.PromotionField)) + case *AttributeFieldMapping_AdCustomizerField: + n += 1 // tag and wire + n += proto.SizeVarint(uint64(x.AdCustomizerField)) + case *AttributeFieldMapping_DsaPageFeedField: + n += 1 // tag and wire + n += proto.SizeVarint(uint64(x.DsaPageFeedField)) + case *AttributeFieldMapping_LocationExtensionTargetingField: + n += 1 // tag and wire + n += proto.SizeVarint(uint64(x.LocationExtensionTargetingField)) + case *AttributeFieldMapping_EducationField: + n += 2 // tag and wire + n += proto.SizeVarint(uint64(x.EducationField)) + case *AttributeFieldMapping_FlightField: + n += 2 // tag and wire + n += proto.SizeVarint(uint64(x.FlightField)) + case *AttributeFieldMapping_CustomField: + n += 2 // tag and wire + n += proto.SizeVarint(uint64(x.CustomField)) + case *AttributeFieldMapping_HotelField: + n += 2 // tag and wire + n += proto.SizeVarint(uint64(x.HotelField)) + case *AttributeFieldMapping_RealEstateField: + n += 2 // tag and wire + n += proto.SizeVarint(uint64(x.RealEstateField)) + case *AttributeFieldMapping_TravelField: + n += 2 // tag and wire + n += proto.SizeVarint(uint64(x.TravelField)) + case *AttributeFieldMapping_LocalField: + n += 2 // tag and wire + n += proto.SizeVarint(uint64(x.LocalField)) + case *AttributeFieldMapping_JobField: + n += 2 // tag and wire + n += proto.SizeVarint(uint64(x.JobField)) + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +func init() { + proto.RegisterType((*FeedMapping)(nil), "google.ads.googleads.v1.resources.FeedMapping") + proto.RegisterType((*AttributeFieldMapping)(nil), "google.ads.googleads.v1.resources.AttributeFieldMapping") +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/resources/feed_mapping.proto", fileDescriptor_feed_mapping_c1c27da2bf9a0d52) +} + +var fileDescriptor_feed_mapping_c1c27da2bf9a0d52 = []byte{ + // 1316 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x9c, 0x98, 0x4d, 0x6f, 0xe4, 0x44, + 0x13, 0xc7, 0xed, 0x6c, 0x5e, 0x7b, 0xf2, 0x3a, 0x79, 0x36, 0xcf, 0x68, 0x59, 0x41, 0x36, 0x68, + 0xa5, 0x9c, 0x3c, 0x24, 0x44, 0x0b, 0xcc, 0xf2, 0xa2, 0x49, 0x48, 0xe2, 0x44, 0xbb, 0xab, 0x91, + 0x13, 0x45, 0x08, 0x45, 0x8c, 0x7a, 0xc6, 0x3d, 0x13, 0x07, 0x8f, 0x6d, 0xec, 0x76, 0x20, 0x48, + 0x7b, 0xe0, 0x02, 0x37, 0x3e, 0xc4, 0x1e, 0x39, 0xf2, 0x15, 0xb8, 0x71, 0xe6, 0x53, 0xf0, 0x1d, + 0x90, 0x50, 0x77, 0xd9, 0x3d, 0x4e, 0xb6, 0xdb, 0x6d, 0xe6, 0xe6, 0x74, 0xd5, 0xbf, 0x7e, 0x35, + 0xe5, 0xea, 0x76, 0x75, 0xd0, 0xde, 0x30, 0x0c, 0x87, 0x3e, 0x69, 0x62, 0x37, 0x69, 0xc2, 0x23, + 0x7b, 0xba, 0xd9, 0x69, 0xc6, 0x24, 0x09, 0xd3, 0xb8, 0x4f, 0x92, 0xe6, 0x80, 0x10, 0xb7, 0x3b, + 0xc2, 0x51, 0xe4, 0x05, 0x43, 0x2b, 0x8a, 0x43, 0x1a, 0xd6, 0x9f, 0x80, 0xab, 0x85, 0xdd, 0xc4, + 0x12, 0x2a, 0xeb, 0x66, 0xc7, 0x12, 0xaa, 0x47, 0x07, 0xaa, 0xc0, 0x24, 0x48, 0x47, 0x49, 0x13, + 0xbb, 0xdd, 0x7e, 0x9a, 0xd0, 0x70, 0xe4, 0xfd, 0x48, 0xe2, 0x6e, 0xe4, 0xe3, 0x3e, 0xb9, 0x0a, + 0x7d, 0x97, 0xc4, 0xdd, 0x81, 0x47, 0x7c, 0x17, 0x38, 0x8f, 0x6c, 0x4d, 0x90, 0xc1, 0xc0, 0xf3, + 0x3d, 0x4c, 0x49, 0xd7, 0x0f, 0xfb, 0x98, 0x7a, 0x61, 0xa0, 0x8c, 0xf4, 0x89, 0x26, 0x52, 0x14, + 0x29, 0xa5, 0xad, 0x72, 0x69, 0x1f, 0xfb, 0xbe, 0x52, 0xfb, 0x99, 0x5e, 0x1b, 0xa6, 0x54, 0x29, + 0xff, 0x54, 0x23, 0xe7, 0x15, 0x54, 0xaa, 0xdb, 0xe5, 0x6a, 0x37, 0xc1, 0xdd, 0x08, 0x0f, 0x49, + 0x97, 0xbf, 0xe0, 0x7e, 0xec, 0x51, 0x12, 0xb3, 0x12, 0x16, 0x43, 0x7c, 0x51, 0x1e, 0x82, 0xb8, + 0xa9, 0xa6, 0xee, 0x9a, 0x00, 0xc5, 0xde, 0x2a, 0xa4, 0x40, 0x6f, 0x23, 0x92, 0x05, 0xf8, 0xe8, + 0x3f, 0x04, 0x48, 0x28, 0xa6, 0x69, 0x52, 0xad, 0x76, 0x03, 0xdf, 0x1b, 0x5e, 0xa9, 0x2b, 0xff, + 0xbc, 0x5c, 0x7d, 0x15, 0x52, 0xe2, 0x4f, 0xda, 0x6c, 0xd7, 0x61, 0x6f, 0x52, 0x2e, 0xeb, 0x73, + 0x35, 0xb7, 0xa3, 0x17, 0xf3, 0x97, 0x45, 0x7e, 0xa0, 0x24, 0x48, 0x78, 0xa1, 0x71, 0x3c, 0x24, + 0xf4, 0x6e, 0xf1, 0x8b, 0x11, 0x3f, 0xaf, 0x18, 0x71, 0xc2, 0xfe, 0x1f, 0x91, 0x24, 0x61, 0x1d, + 0xa8, 0x92, 0xef, 0x95, 0xcb, 0x8b, 0xb2, 0x42, 0xcb, 0x68, 0x6a, 0x18, 0xc5, 0x5e, 0x9f, 0x4c, + 0xda, 0xb0, 0x51, 0x1c, 0x8e, 0xc2, 0xd2, 0x9f, 0xac, 0xd9, 0x75, 0x31, 0xc1, 0x7e, 0x97, 0xb0, + 0x4e, 0x55, 0xe7, 0xa0, 0xa9, 0x7a, 0xe2, 0x51, 0xe2, 0x7b, 0xc1, 0xb7, 0x93, 0x1e, 0x9b, 0x09, + 0x8d, 0xd3, 0x3e, 0x4d, 0x63, 0xe2, 0x76, 0x93, 0xc0, 0x8b, 0x22, 0x32, 0xf1, 0x01, 0x44, 0x63, + 0x7c, 0x53, 0xb2, 0x0f, 0xde, 0xcd, 0xd4, 0xfc, 0xaf, 0x5e, 0x3a, 0x68, 0x7e, 0x1f, 0xe3, 0x28, + 0x22, 0x71, 0xbe, 0x45, 0x1f, 0xe7, 0xd1, 0x23, 0xaf, 0x89, 0x83, 0x20, 0xa4, 0xbc, 0x99, 0x32, + 0xeb, 0xd6, 0xef, 0xd3, 0xa8, 0x76, 0x44, 0x88, 0xfb, 0x12, 0x76, 0x77, 0xfd, 0x7d, 0xb4, 0x94, + 0x7f, 0x5e, 0xba, 0x01, 0x1e, 0x91, 0x86, 0xb9, 0x69, 0x6e, 0x2f, 0x38, 0x8b, 0xf9, 0xe2, 0x2b, + 0x3c, 0x22, 0xf5, 0x0f, 0xd0, 0x34, 0x3b, 0x12, 0x1a, 0x53, 0x9b, 0xe6, 0x76, 0x6d, 0xf7, 0x71, + 0xf6, 0x75, 0xb2, 0xf2, 0x0c, 0xac, 0x33, 0x1a, 0x7b, 0xc1, 0xf0, 0x02, 0xfb, 0x29, 0x71, 0xb8, + 0x67, 0x3d, 0x46, 0x0d, 0x4c, 0x69, 0xec, 0xf5, 0x52, 0x4a, 0x20, 0xfb, 0xfc, 0x3c, 0x49, 0x1a, + 0x33, 0x9b, 0x0f, 0xb6, 0x6b, 0xbb, 0x1f, 0x5b, 0xda, 0xcf, 0x9d, 0xd5, 0xce, 0x43, 0x1c, 0xb1, + 0x08, 0x59, 0xca, 0xce, 0x06, 0x96, 0x2d, 0x27, 0xf5, 0x6f, 0xd0, 0x2c, 0x9c, 0x55, 0x8d, 0xd9, + 0x4d, 0x73, 0x7b, 0x79, 0xf7, 0x48, 0x49, 0xe0, 0x75, 0xb6, 0x0a, 0x65, 0x38, 0xe3, 0xba, 0xc3, + 0x20, 0x1d, 0xbd, 0xbd, 0xea, 0x64, 0x51, 0xeb, 0x21, 0x5a, 0xbd, 0xbf, 0x37, 0x1a, 0x0f, 0x38, + 0x69, 0x5f, 0x43, 0xea, 0x8c, 0x65, 0xe7, 0xb7, 0x11, 0xe1, 0x9c, 0x7b, 0x6b, 0xb6, 0xe1, 0xac, + 0x44, 0x77, 0x97, 0xea, 0xaf, 0xd1, 0xf2, 0xdd, 0xd3, 0xbb, 0x31, 0xcd, 0x71, 0xe7, 0xd5, 0x7f, + 0xd8, 0x41, 0xae, 0x17, 0x5c, 0x95, 0xd1, 0x36, 0x9c, 0xa5, 0x7e, 0x71, 0x61, 0x7f, 0x1e, 0xcd, + 0xc2, 0x49, 0xb6, 0xf5, 0xd7, 0x06, 0x7a, 0x28, 0x7d, 0x17, 0xf5, 0x63, 0xb4, 0xc6, 0x3f, 0x16, + 0xe3, 0x97, 0xed, 0xb9, 0xbc, 0x85, 0x6a, 0xbb, 0xef, 0xbc, 0xd5, 0x26, 0x27, 0x01, 0x7d, 0xb6, + 0x07, 0x5d, 0xb2, 0xc2, 0x54, 0x22, 0xe4, 0x89, 0x5b, 0x7f, 0x86, 0xe6, 0xa1, 0x4d, 0xbc, 0xbc, + 0xcd, 0x4a, 0xf5, 0x73, 0xdc, 0xf9, 0xc4, 0x65, 0x35, 0x12, 0x3b, 0x97, 0xaf, 0x65, 0xaf, 0x44, + 0x57, 0xa3, 0xb3, 0x4c, 0x54, 0x78, 0x0d, 0xfc, 0x87, 0xf1, 0x1a, 0xa9, 0x8c, 0xac, 0x46, 0x39, + 0x8d, 0x2f, 0xd4, 0x47, 0x08, 0xf1, 0x51, 0x05, 0xd0, 0xf0, 0x7a, 0x5e, 0x68, 0xd0, 0x07, 0xd8, + 0xf7, 0xa5, 0x58, 0x99, 0xc1, 0x36, 0x9c, 0x05, 0x46, 0x00, 0x9c, 0x87, 0x16, 0xd8, 0x50, 0x05, + 0xb4, 0x19, 0x4e, 0x3b, 0xd5, 0xd0, 0xda, 0x51, 0x24, 0x85, 0x49, 0xd6, 0x6d, 0xc3, 0x99, 0xc7, + 0x51, 0x04, 0xa8, 0xd7, 0x68, 0x59, 0x7c, 0x88, 0x80, 0x37, 0x5b, 0xa9, 0xb0, 0x2f, 0x32, 0x91, + 0x14, 0xaa, 0x32, 0xb2, 0xc2, 0xe6, 0x34, 0xc0, 0xbf, 0x31, 0x51, 0x43, 0x32, 0x89, 0x42, 0x26, + 0x73, 0x3c, 0x93, 0x81, 0xee, 0x97, 0xe7, 0xf2, 0xd2, 0x94, 0xb4, 0x5e, 0xb6, 0xe1, 0x6c, 0xe0, + 0xfb, 0x4e, 0x90, 0xe4, 0x2d, 0x5a, 0xca, 0x87, 0x4d, 0x48, 0x6c, 0x9e, 0x27, 0xe6, 0x54, 0x68, + 0x80, 0x30, 0xa5, 0xca, 0x1e, 0x90, 0xd8, 0x6c, 0xc3, 0x59, 0xcc, 0x50, 0xe3, 0xfa, 0x48, 0x3e, + 0x39, 0x90, 0xc6, 0x42, 0xa5, 0xfa, 0x9c, 0x09, 0xf9, 0x19, 0xa8, 0xe5, 0x7b, 0x41, 0xe7, 0xc5, + 0xea, 0x93, 0xdc, 0x77, 0x12, 0xf5, 0xc9, 0x87, 0x11, 0x48, 0x0c, 0x55, 0xaa, 0xcf, 0x4b, 0xd0, + 0x48, 0xd3, 0x51, 0xd8, 0x58, 0x7d, 0x32, 0x14, 0xa0, 0xbf, 0x43, 0x35, 0x18, 0x49, 0x00, 0x5c, + 0xe3, 0xe0, 0x57, 0xba, 0x73, 0x9a, 0x29, 0xa4, 0x58, 0xa9, 0xc5, 0x36, 0x1c, 0xc4, 0x21, 0x80, + 0xfc, 0xc9, 0x44, 0x2b, 0xe3, 0x49, 0x06, 0xb8, 0x8b, 0x9c, 0x7b, 0xa1, 0xe5, 0x66, 0x2a, 0x05, + 0x5b, 0x61, 0xb5, 0x0d, 0x67, 0x59, 0x00, 0x21, 0x87, 0x5f, 0x4d, 0xb4, 0x7e, 0xf7, 0x16, 0x08, + 0x79, 0x2c, 0xf1, 0x3c, 0x2e, 0x75, 0x3b, 0xc6, 0x3d, 0x10, 0x42, 0xf9, 0x66, 0x29, 0x71, 0xb0, + 0x0d, 0x67, 0x0d, 0x17, 0xec, 0x90, 0xd0, 0x2f, 0x26, 0x5a, 0xbf, 0x7b, 0x27, 0x82, 0x84, 0x96, + 0x2b, 0x15, 0xe6, 0xcb, 0x04, 0x77, 0xd8, 0x2b, 0x25, 0xc4, 0x15, 0x1f, 0xab, 0x71, 0x36, 0x4a, + 0xab, 0x6d, 0x38, 0xab, 0xee, 0xd8, 0x08, 0x99, 0xfc, 0x61, 0xa2, 0xad, 0xd2, 0x61, 0x1d, 0x12, + 0x5b, 0xe1, 0x89, 0xd1, 0x8a, 0xa7, 0xdc, 0x61, 0x1e, 0xe7, 0x3c, 0x0f, 0x23, 0xc9, 0xb3, 0xaa, + 0xb3, 0x6d, 0x38, 0xef, 0xf9, 0x4a, 0xdf, 0x71, 0x93, 0x8d, 0x2f, 0x88, 0x90, 0xf2, 0x6a, 0xa5, + 0x5a, 0x1e, 0xe6, 0x2a, 0xe9, 0x9b, 0x55, 0x5a, 0x59, 0x93, 0x09, 0x20, 0xe4, 0x90, 0xa2, 0xc5, + 0xec, 0xa2, 0x07, 0xfc, 0x35, 0xce, 0xef, 0xe8, 0xa6, 0x12, 0x2e, 0x91, 0xc2, 0xe5, 0x26, 0xdb, + 0x70, 0x6a, 0xc0, 0x11, 0xd8, 0xec, 0x6e, 0x0e, 0xd8, 0x7a, 0x25, 0x2c, 0x34, 0xa4, 0xfc, 0xac, + 0x95, 0x9a, 0x18, 0x16, 0x38, 0xe2, 0x24, 0x81, 0x8b, 0x29, 0x50, 0xd7, 0x2b, 0x9d, 0x24, 0x36, + 0x53, 0x48, 0xa1, 0x52, 0x0b, 0x3b, 0x49, 0x38, 0x04, 0x90, 0x3f, 0x9b, 0x68, 0xad, 0x78, 0xa5, + 0x01, 0xf2, 0xff, 0x38, 0xf9, 0x2b, 0x0d, 0xd9, 0x21, 0xd8, 0x3f, 0xe4, 0x32, 0x29, 0x5e, 0x6d, + 0x66, 0x13, 0x68, 0x2c, 0xac, 0xa2, 0xe4, 0xd9, 0x6d, 0x04, 0x52, 0x78, 0x58, 0xa9, 0xe4, 0xe7, + 0x5c, 0x22, 0xc5, 0xcb, 0x4d, 0xac, 0xe4, 0xc0, 0x11, 0x25, 0x87, 0x3b, 0x39, 0x50, 0x37, 0x2a, + 0x95, 0x9c, 0xed, 0x32, 0x5f, 0x39, 0x75, 0x48, 0x4b, 0xce, 0x21, 0x62, 0xb2, 0xba, 0x0e, 0x7b, + 0x19, 0xf0, 0xff, 0x95, 0x26, 0xab, 0xd3, 0xb0, 0x27, 0xc5, 0x49, 0xd6, 0xd9, 0x64, 0x75, 0x1d, + 0xf6, 0xf8, 0xf3, 0xfe, 0x1c, 0x9a, 0xe1, 0x98, 0xfd, 0x7f, 0x4c, 0xf4, 0xb4, 0x1f, 0x8e, 0xf4, + 0x17, 0xa1, 0xfd, 0xd5, 0xc2, 0xd4, 0xde, 0x61, 0xe3, 0x70, 0xc7, 0xfc, 0xfa, 0x34, 0x93, 0x0d, + 0x43, 0x1f, 0x07, 0x43, 0x2b, 0x8c, 0x87, 0xcd, 0x21, 0x09, 0xf8, 0xb0, 0x9c, 0xdf, 0x2a, 0x23, + 0x2f, 0x29, 0xf9, 0x1f, 0xe4, 0x73, 0xf1, 0xf4, 0x66, 0xea, 0xc1, 0x71, 0xbb, 0xfd, 0xdb, 0xd4, + 0x93, 0x63, 0x08, 0xd9, 0x76, 0x13, 0x0b, 0x1e, 0xd9, 0xd3, 0xc5, 0x8e, 0xe5, 0xe4, 0x9e, 0x7f, + 0xe6, 0x3e, 0x97, 0x6d, 0x37, 0xb9, 0x14, 0x3e, 0x97, 0x17, 0x3b, 0x97, 0xc2, 0xe7, 0xef, 0xa9, + 0xa7, 0x60, 0x68, 0xb5, 0xda, 0x6e, 0xd2, 0x6a, 0x09, 0xaf, 0x56, 0xeb, 0x62, 0xa7, 0xd5, 0x12, + 0x7e, 0xbd, 0x59, 0x9e, 0xec, 0x87, 0xff, 0x06, 0x00, 0x00, 0xff, 0xff, 0x40, 0x51, 0x6f, 0xf7, + 0x2f, 0x15, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/resources/feed_placeholder_view.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/resources/feed_placeholder_view.pb.go new file mode 100644 index 0000000..2f015c4 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/resources/feed_placeholder_view.pb.go @@ -0,0 +1,106 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/resources/feed_placeholder_view.proto + +package resources // import "google.golang.org/genproto/googleapis/ads/googleads/v1/resources" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import enums "google.golang.org/genproto/googleapis/ads/googleads/v1/enums" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// A feed placeholder view. +type FeedPlaceholderView struct { + // The resource name of the feed placeholder view. + // Feed placeholder view resource names have the form: + // + // `customers/{customer_id}/feedPlaceholderViews/{placeholder_type}` + ResourceName string `protobuf:"bytes,1,opt,name=resource_name,json=resourceName,proto3" json:"resource_name,omitempty"` + // The placeholder type of the feed placeholder view. + PlaceholderType enums.PlaceholderTypeEnum_PlaceholderType `protobuf:"varint,2,opt,name=placeholder_type,json=placeholderType,proto3,enum=google.ads.googleads.v1.enums.PlaceholderTypeEnum_PlaceholderType" json:"placeholder_type,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *FeedPlaceholderView) Reset() { *m = FeedPlaceholderView{} } +func (m *FeedPlaceholderView) String() string { return proto.CompactTextString(m) } +func (*FeedPlaceholderView) ProtoMessage() {} +func (*FeedPlaceholderView) Descriptor() ([]byte, []int) { + return fileDescriptor_feed_placeholder_view_1c94e4ff449c09ba, []int{0} +} +func (m *FeedPlaceholderView) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_FeedPlaceholderView.Unmarshal(m, b) +} +func (m *FeedPlaceholderView) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_FeedPlaceholderView.Marshal(b, m, deterministic) +} +func (dst *FeedPlaceholderView) XXX_Merge(src proto.Message) { + xxx_messageInfo_FeedPlaceholderView.Merge(dst, src) +} +func (m *FeedPlaceholderView) XXX_Size() int { + return xxx_messageInfo_FeedPlaceholderView.Size(m) +} +func (m *FeedPlaceholderView) XXX_DiscardUnknown() { + xxx_messageInfo_FeedPlaceholderView.DiscardUnknown(m) +} + +var xxx_messageInfo_FeedPlaceholderView proto.InternalMessageInfo + +func (m *FeedPlaceholderView) GetResourceName() string { + if m != nil { + return m.ResourceName + } + return "" +} + +func (m *FeedPlaceholderView) GetPlaceholderType() enums.PlaceholderTypeEnum_PlaceholderType { + if m != nil { + return m.PlaceholderType + } + return enums.PlaceholderTypeEnum_UNSPECIFIED +} + +func init() { + proto.RegisterType((*FeedPlaceholderView)(nil), "google.ads.googleads.v1.resources.FeedPlaceholderView") +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/resources/feed_placeholder_view.proto", fileDescriptor_feed_placeholder_view_1c94e4ff449c09ba) +} + +var fileDescriptor_feed_placeholder_view_1c94e4ff449c09ba = []byte{ + // 333 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x84, 0x91, 0xcd, 0x4a, 0x3b, 0x31, + 0x14, 0xc5, 0x99, 0xf9, 0xc3, 0x1f, 0x1c, 0xfc, 0xa2, 0x6e, 0x4a, 0x71, 0xd1, 0x2a, 0x85, 0xae, + 0x12, 0x46, 0x5d, 0x45, 0x5c, 0x4c, 0x41, 0x0b, 0x2e, 0xa4, 0x14, 0x99, 0x85, 0x0c, 0x94, 0xd8, + 0x5c, 0xe3, 0xc0, 0xe4, 0x83, 0xc9, 0xb4, 0xa5, 0x0f, 0xe0, 0x8b, 0xb8, 0xd3, 0x47, 0xf1, 0x51, + 0x7c, 0x0a, 0xe9, 0xc4, 0xc4, 0x5a, 0xac, 0xee, 0x0e, 0x37, 0xe7, 0x77, 0xcf, 0xbd, 0x37, 0xd1, + 0x05, 0x57, 0x8a, 0x17, 0x80, 0x29, 0x33, 0xd8, 0xca, 0xa5, 0x9a, 0xc5, 0xb8, 0x04, 0xa3, 0xa6, + 0xe5, 0x04, 0x0c, 0x7e, 0x00, 0x60, 0x63, 0x5d, 0xd0, 0x09, 0x3c, 0xaa, 0x82, 0x41, 0x39, 0x9e, + 0xe5, 0x30, 0x47, 0xba, 0x54, 0x95, 0x6a, 0x74, 0x2c, 0x83, 0x28, 0x33, 0xc8, 0xe3, 0x68, 0x16, + 0x23, 0x8f, 0xb7, 0xce, 0x36, 0x25, 0x80, 0x9c, 0x0a, 0x83, 0x57, 0x1b, 0x57, 0x0b, 0x0d, 0xb6, + 0x71, 0xeb, 0xd0, 0x51, 0x3a, 0xc7, 0x54, 0x4a, 0x55, 0xd1, 0x2a, 0x57, 0xd2, 0xd8, 0xd7, 0xa3, + 0x97, 0x20, 0x3a, 0xb8, 0x02, 0x60, 0xc3, 0x2f, 0x38, 0xcd, 0x61, 0xde, 0x38, 0x8e, 0x76, 0x5c, + 0xf0, 0x58, 0x52, 0x01, 0xcd, 0xa0, 0x1d, 0xf4, 0xb6, 0x46, 0xdb, 0xae, 0x78, 0x43, 0x05, 0x34, + 0x44, 0xb4, 0xbf, 0x1e, 0xda, 0x0c, 0xdb, 0x41, 0x6f, 0xf7, 0xa4, 0x8f, 0x36, 0xad, 0x53, 0xcf, + 0x8a, 0x56, 0xe2, 0x6e, 0x17, 0x1a, 0x2e, 0xe5, 0x54, 0xac, 0xd7, 0x46, 0x7b, 0xfa, 0x7b, 0xa1, + 0xff, 0x14, 0x46, 0xdd, 0x89, 0x12, 0xe8, 0xcf, 0x4b, 0xf5, 0x9b, 0x3f, 0xac, 0x34, 0x5c, 0xee, + 0x3b, 0x0c, 0xee, 0xae, 0x3f, 0x71, 0xae, 0x0a, 0x2a, 0x39, 0x52, 0x25, 0xc7, 0x1c, 0x64, 0x7d, + 0x0d, 0x77, 0x55, 0x9d, 0x9b, 0x5f, 0xbe, 0xf1, 0xdc, 0xab, 0xe7, 0xf0, 0xdf, 0x20, 0x49, 0x5e, + 0xc3, 0xce, 0xc0, 0xb6, 0x4c, 0x98, 0x41, 0x56, 0x2e, 0x55, 0x1a, 0xa3, 0x91, 0x73, 0xbe, 0x39, + 0x4f, 0x96, 0x30, 0x93, 0x79, 0x4f, 0x96, 0xc6, 0x99, 0xf7, 0xbc, 0x87, 0x5d, 0xfb, 0x40, 0x48, + 0xc2, 0x0c, 0x21, 0xde, 0x45, 0x48, 0x1a, 0x13, 0xe2, 0x7d, 0xf7, 0xff, 0xeb, 0x61, 0x4f, 0x3f, + 0x02, 0x00, 0x00, 0xff, 0xff, 0xb5, 0xf2, 0xef, 0x8d, 0x72, 0x02, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/resources/gender_view.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/resources/gender_view.pb.go new file mode 100644 index 0000000..6a01e78 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/resources/gender_view.pb.go @@ -0,0 +1,92 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/resources/gender_view.proto + +package resources // import "google.golang.org/genproto/googleapis/ads/googleads/v1/resources" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// A gender view. +type GenderView struct { + // The resource name of the gender view. + // Gender view resource names have the form: + // + // `customers/{customer_id}/genderViews/{ad_group_id}~{criterion_id}` + ResourceName string `protobuf:"bytes,1,opt,name=resource_name,json=resourceName,proto3" json:"resource_name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GenderView) Reset() { *m = GenderView{} } +func (m *GenderView) String() string { return proto.CompactTextString(m) } +func (*GenderView) ProtoMessage() {} +func (*GenderView) Descriptor() ([]byte, []int) { + return fileDescriptor_gender_view_a5fa3e0190fff0f4, []int{0} +} +func (m *GenderView) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GenderView.Unmarshal(m, b) +} +func (m *GenderView) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GenderView.Marshal(b, m, deterministic) +} +func (dst *GenderView) XXX_Merge(src proto.Message) { + xxx_messageInfo_GenderView.Merge(dst, src) +} +func (m *GenderView) XXX_Size() int { + return xxx_messageInfo_GenderView.Size(m) +} +func (m *GenderView) XXX_DiscardUnknown() { + xxx_messageInfo_GenderView.DiscardUnknown(m) +} + +var xxx_messageInfo_GenderView proto.InternalMessageInfo + +func (m *GenderView) GetResourceName() string { + if m != nil { + return m.ResourceName + } + return "" +} + +func init() { + proto.RegisterType((*GenderView)(nil), "google.ads.googleads.v1.resources.GenderView") +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/resources/gender_view.proto", fileDescriptor_gender_view_a5fa3e0190fff0f4) +} + +var fileDescriptor_gender_view_a5fa3e0190fff0f4 = []byte{ + // 262 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x84, 0x90, 0x31, 0x4b, 0xc4, 0x30, + 0x14, 0xc7, 0x69, 0x05, 0xc1, 0xa0, 0x08, 0x37, 0x89, 0x38, 0x78, 0xca, 0x81, 0x53, 0x42, 0xb8, + 0x2d, 0x4e, 0xb9, 0xa5, 0xe0, 0x20, 0xc7, 0x0d, 0x1d, 0xa4, 0x70, 0xc4, 0x4b, 0x08, 0x81, 0x6b, + 0x5e, 0xc9, 0xab, 0xbd, 0xef, 0xe3, 0xe8, 0x47, 0xf1, 0xa3, 0xf8, 0x19, 0x1c, 0xa4, 0x8d, 0x89, + 0x9b, 0xb7, 0xfd, 0x69, 0x7f, 0xff, 0xdf, 0x7b, 0x2f, 0x64, 0x69, 0x01, 0xec, 0xde, 0x30, 0xa5, + 0x91, 0xc5, 0x38, 0xa6, 0x81, 0xb3, 0x60, 0x10, 0xde, 0xc2, 0xce, 0x20, 0xb3, 0xc6, 0x6b, 0x13, + 0xb6, 0x83, 0x33, 0x07, 0xda, 0x05, 0xe8, 0x61, 0x36, 0x8f, 0x24, 0x55, 0x1a, 0x69, 0x2e, 0xd1, + 0x81, 0xd3, 0x5c, 0xba, 0xbe, 0x49, 0xde, 0xce, 0x31, 0xe5, 0x3d, 0xf4, 0xaa, 0x77, 0xe0, 0x31, + 0x0a, 0xee, 0x38, 0x21, 0xd5, 0x64, 0xad, 0x9d, 0x39, 0xcc, 0xee, 0xc9, 0x45, 0x2a, 0x6e, 0xbd, + 0x6a, 0xcd, 0x55, 0x71, 0x5b, 0x3c, 0x9c, 0x6d, 0xce, 0xd3, 0xc7, 0x67, 0xd5, 0x9a, 0xd5, 0x77, + 0x41, 0x16, 0x3b, 0x68, 0xe9, 0xd1, 0xd1, 0xab, 0xcb, 0x3f, 0xf5, 0x7a, 0x9c, 0xb6, 0x2e, 0x5e, + 0x9e, 0x7e, 0x5b, 0x16, 0xf6, 0xca, 0x5b, 0x0a, 0xc1, 0x8e, 0x57, 0x4d, 0xbb, 0xa4, 0xab, 0x3b, + 0x87, 0xff, 0x3c, 0xc2, 0x63, 0x4e, 0xef, 0xe5, 0x49, 0x25, 0xe5, 0x47, 0x39, 0xaf, 0xa2, 0x52, + 0x6a, 0xa4, 0x31, 0x8e, 0xa9, 0xe6, 0x74, 0x93, 0xc8, 0xcf, 0xc4, 0x34, 0x52, 0x63, 0x93, 0x99, + 0xa6, 0xe6, 0x4d, 0x66, 0xbe, 0xca, 0x45, 0xfc, 0x21, 0x84, 0xd4, 0x28, 0x44, 0xa6, 0x84, 0xa8, + 0xb9, 0x10, 0x99, 0x7b, 0x3d, 0x9d, 0x96, 0x5d, 0xfe, 0x04, 0x00, 0x00, 0xff, 0xff, 0xc3, 0x37, + 0x36, 0xcd, 0xb0, 0x01, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/resources/geo_target_constant.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/resources/geo_target_constant.pb.go new file mode 100644 index 0000000..64d5caf --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/resources/geo_target_constant.pb.go @@ -0,0 +1,161 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/resources/geo_target_constant.proto + +package resources // import "google.golang.org/genproto/googleapis/ads/googleads/v1/resources" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import wrappers "github.com/golang/protobuf/ptypes/wrappers" +import enums "google.golang.org/genproto/googleapis/ads/googleads/v1/enums" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// A geo target constant. +type GeoTargetConstant struct { + // The resource name of the geo target constant. + // Geo target constant resource names have the form: + // + // `geoTargetConstants/{geo_target_constant_id}` + ResourceName string `protobuf:"bytes,1,opt,name=resource_name,json=resourceName,proto3" json:"resource_name,omitempty"` + // The ID of the geo target constant. + Id *wrappers.Int64Value `protobuf:"bytes,3,opt,name=id,proto3" json:"id,omitempty"` + // Geo target constant English name. + Name *wrappers.StringValue `protobuf:"bytes,4,opt,name=name,proto3" json:"name,omitempty"` + // The ISO-3166-1 alpha-2 country code that is associated with the target. + CountryCode *wrappers.StringValue `protobuf:"bytes,5,opt,name=country_code,json=countryCode,proto3" json:"country_code,omitempty"` + // Geo target constant target type. + TargetType *wrappers.StringValue `protobuf:"bytes,6,opt,name=target_type,json=targetType,proto3" json:"target_type,omitempty"` + // Geo target constant status. + Status enums.GeoTargetConstantStatusEnum_GeoTargetConstantStatus `protobuf:"varint,7,opt,name=status,proto3,enum=google.ads.googleads.v1.enums.GeoTargetConstantStatusEnum_GeoTargetConstantStatus" json:"status,omitempty"` + // The fully qualified English name, consisting of the target's name and that + // of its parent and country. + CanonicalName *wrappers.StringValue `protobuf:"bytes,8,opt,name=canonical_name,json=canonicalName,proto3" json:"canonical_name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GeoTargetConstant) Reset() { *m = GeoTargetConstant{} } +func (m *GeoTargetConstant) String() string { return proto.CompactTextString(m) } +func (*GeoTargetConstant) ProtoMessage() {} +func (*GeoTargetConstant) Descriptor() ([]byte, []int) { + return fileDescriptor_geo_target_constant_45d3d633f73a7113, []int{0} +} +func (m *GeoTargetConstant) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GeoTargetConstant.Unmarshal(m, b) +} +func (m *GeoTargetConstant) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GeoTargetConstant.Marshal(b, m, deterministic) +} +func (dst *GeoTargetConstant) XXX_Merge(src proto.Message) { + xxx_messageInfo_GeoTargetConstant.Merge(dst, src) +} +func (m *GeoTargetConstant) XXX_Size() int { + return xxx_messageInfo_GeoTargetConstant.Size(m) +} +func (m *GeoTargetConstant) XXX_DiscardUnknown() { + xxx_messageInfo_GeoTargetConstant.DiscardUnknown(m) +} + +var xxx_messageInfo_GeoTargetConstant proto.InternalMessageInfo + +func (m *GeoTargetConstant) GetResourceName() string { + if m != nil { + return m.ResourceName + } + return "" +} + +func (m *GeoTargetConstant) GetId() *wrappers.Int64Value { + if m != nil { + return m.Id + } + return nil +} + +func (m *GeoTargetConstant) GetName() *wrappers.StringValue { + if m != nil { + return m.Name + } + return nil +} + +func (m *GeoTargetConstant) GetCountryCode() *wrappers.StringValue { + if m != nil { + return m.CountryCode + } + return nil +} + +func (m *GeoTargetConstant) GetTargetType() *wrappers.StringValue { + if m != nil { + return m.TargetType + } + return nil +} + +func (m *GeoTargetConstant) GetStatus() enums.GeoTargetConstantStatusEnum_GeoTargetConstantStatus { + if m != nil { + return m.Status + } + return enums.GeoTargetConstantStatusEnum_UNSPECIFIED +} + +func (m *GeoTargetConstant) GetCanonicalName() *wrappers.StringValue { + if m != nil { + return m.CanonicalName + } + return nil +} + +func init() { + proto.RegisterType((*GeoTargetConstant)(nil), "google.ads.googleads.v1.resources.GeoTargetConstant") +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/resources/geo_target_constant.proto", fileDescriptor_geo_target_constant_45d3d633f73a7113) +} + +var fileDescriptor_geo_target_constant_45d3d633f73a7113 = []byte{ + // 456 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x8c, 0x93, 0x41, 0x6b, 0xd4, 0x40, + 0x14, 0xc7, 0x49, 0xb6, 0xae, 0x3a, 0xdb, 0x16, 0xcc, 0x41, 0x42, 0x2d, 0xb2, 0x55, 0x0a, 0x0b, + 0xc2, 0xc4, 0x54, 0xf1, 0x90, 0xa2, 0x92, 0x2e, 0xb2, 0xe8, 0x41, 0x4a, 0x5a, 0xf6, 0x20, 0x0b, + 0x61, 0x9a, 0x79, 0x0e, 0x91, 0xcd, 0xbc, 0x30, 0x33, 0xa9, 0xec, 0xd9, 0x6f, 0xe2, 0xd1, 0x4f, + 0x22, 0x7e, 0x14, 0x3f, 0x85, 0xec, 0x4c, 0x92, 0xcb, 0xba, 0xba, 0xb7, 0x97, 0xbc, 0xff, 0xef, + 0xff, 0xfe, 0x79, 0x33, 0x21, 0xe7, 0x02, 0x51, 0x2c, 0x21, 0x62, 0x5c, 0x47, 0xae, 0x5c, 0x57, + 0xb7, 0x71, 0xa4, 0x40, 0x63, 0xa3, 0x0a, 0xd0, 0x91, 0x00, 0xcc, 0x0d, 0x53, 0x02, 0x4c, 0x5e, + 0xa0, 0xd4, 0x86, 0x49, 0x43, 0x6b, 0x85, 0x06, 0x83, 0x13, 0x47, 0x50, 0xc6, 0x35, 0xed, 0x61, + 0x7a, 0x1b, 0xd3, 0x1e, 0x3e, 0x7a, 0xb3, 0xcd, 0x1f, 0x64, 0x53, 0xfd, 0xd5, 0x3b, 0xd7, 0x86, + 0x99, 0x46, 0xbb, 0x11, 0x47, 0x8f, 0x5b, 0xde, 0x3e, 0xdd, 0x34, 0x9f, 0xa3, 0xaf, 0x8a, 0xd5, + 0x35, 0xa8, 0xae, 0x7f, 0xdc, 0xf9, 0xd7, 0x65, 0xc4, 0xa4, 0x44, 0xc3, 0x4c, 0x89, 0xb2, 0xed, + 0x3e, 0xf9, 0x39, 0x20, 0x0f, 0x66, 0x80, 0xd7, 0x76, 0xc2, 0xb4, 0x1d, 0x10, 0x3c, 0x25, 0x07, + 0x5d, 0xc0, 0x5c, 0xb2, 0x0a, 0x42, 0x6f, 0xec, 0x4d, 0xee, 0x67, 0xfb, 0xdd, 0xcb, 0x8f, 0xac, + 0x82, 0xe0, 0x19, 0xf1, 0x4b, 0x1e, 0x0e, 0xc6, 0xde, 0x64, 0x74, 0xf6, 0xa8, 0xfd, 0x3a, 0xda, + 0xa5, 0xa0, 0xef, 0xa5, 0x79, 0xf5, 0x72, 0xce, 0x96, 0x0d, 0x64, 0x7e, 0xc9, 0x83, 0xe7, 0x64, + 0xcf, 0x1a, 0xed, 0x59, 0xf9, 0xf1, 0x86, 0xfc, 0xca, 0xa8, 0x52, 0x0a, 0xa7, 0xb7, 0xca, 0xe0, + 0x2d, 0xd9, 0x2f, 0xb0, 0x91, 0x46, 0xad, 0xf2, 0x02, 0x39, 0x84, 0x77, 0x76, 0x20, 0x47, 0x2d, + 0x31, 0x45, 0x0e, 0xc1, 0x6b, 0x32, 0x6a, 0x17, 0x67, 0x56, 0x35, 0x84, 0xc3, 0x1d, 0x78, 0xe2, + 0x80, 0xeb, 0x55, 0x0d, 0xc1, 0x17, 0x32, 0x74, 0x7b, 0x0e, 0xef, 0x8e, 0xbd, 0xc9, 0xe1, 0x59, + 0x46, 0xb7, 0x9d, 0xa5, 0x3d, 0x28, 0xba, 0xb1, 0xc5, 0x2b, 0x4b, 0xbf, 0x93, 0x4d, 0xb5, 0xad, + 0x97, 0xb5, 0x13, 0x82, 0x29, 0x39, 0x2c, 0x98, 0x44, 0x59, 0x16, 0x6c, 0xe9, 0x16, 0x7e, 0x6f, + 0x87, 0xb4, 0x07, 0x3d, 0xb3, 0x3e, 0x8f, 0x8b, 0x6f, 0x3e, 0x39, 0x2d, 0xb0, 0xa2, 0xff, 0xbd, + 0x72, 0x17, 0x0f, 0x37, 0xf2, 0x5c, 0xae, 0xfd, 0x2f, 0xbd, 0x4f, 0x1f, 0x5a, 0x58, 0xe0, 0x92, + 0x49, 0x41, 0x51, 0x89, 0x48, 0x80, 0xb4, 0xd3, 0xbb, 0xcb, 0x59, 0x97, 0xfa, 0x1f, 0xff, 0xc2, + 0x79, 0x5f, 0x7d, 0xf7, 0x07, 0xb3, 0x34, 0xfd, 0xe1, 0x9f, 0xcc, 0x9c, 0x65, 0xca, 0x35, 0x75, + 0xe5, 0xba, 0x9a, 0xc7, 0x34, 0xeb, 0x94, 0xbf, 0x3a, 0xcd, 0x22, 0xe5, 0x7a, 0xd1, 0x6b, 0x16, + 0xf3, 0x78, 0xd1, 0x6b, 0x7e, 0xfb, 0xa7, 0xae, 0x91, 0x24, 0x29, 0xd7, 0x49, 0xd2, 0xab, 0x92, + 0x64, 0x1e, 0x27, 0x49, 0xaf, 0xbb, 0x19, 0xda, 0xb0, 0x2f, 0xfe, 0x04, 0x00, 0x00, 0xff, 0xff, + 0xb9, 0x7f, 0x9c, 0x83, 0xb7, 0x03, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/resources/geographic_view.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/resources/geographic_view.pb.go new file mode 100644 index 0000000..f450a2b --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/resources/geographic_view.pb.go @@ -0,0 +1,127 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/resources/geographic_view.proto + +package resources // import "google.golang.org/genproto/googleapis/ads/googleads/v1/resources" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import wrappers "github.com/golang/protobuf/ptypes/wrappers" +import enums "google.golang.org/genproto/googleapis/ads/googleads/v1/enums" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// A geographic view. +// +// Geographic View includes all metrics aggregated at the country level, +// one row per country. It reports metrics at either actual physical location of +// the user or an area of interest. If other segment fields are used, you may +// get more than one row per country. +type GeographicView struct { + // The resource name of the geographic view. + // Geographic view resource names have the form: + // + // + // `customers/{customer_id}/geographicViews/{country_criterion_id}~{location_type}` + ResourceName string `protobuf:"bytes,1,opt,name=resource_name,json=resourceName,proto3" json:"resource_name,omitempty"` + // CriterionId for the geo target for a country. + CountryGeoTargetConstant *wrappers.StringValue `protobuf:"bytes,2,opt,name=country_geo_target_constant,json=countryGeoTargetConstant,proto3" json:"country_geo_target_constant,omitempty"` + // Type of the geo targeting of the campaign. + LocationType enums.GeoTargetingTypeEnum_GeoTargetingType `protobuf:"varint,3,opt,name=location_type,json=locationType,proto3,enum=google.ads.googleads.v1.enums.GeoTargetingTypeEnum_GeoTargetingType" json:"location_type,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GeographicView) Reset() { *m = GeographicView{} } +func (m *GeographicView) String() string { return proto.CompactTextString(m) } +func (*GeographicView) ProtoMessage() {} +func (*GeographicView) Descriptor() ([]byte, []int) { + return fileDescriptor_geographic_view_6565bde3f2de1438, []int{0} +} +func (m *GeographicView) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GeographicView.Unmarshal(m, b) +} +func (m *GeographicView) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GeographicView.Marshal(b, m, deterministic) +} +func (dst *GeographicView) XXX_Merge(src proto.Message) { + xxx_messageInfo_GeographicView.Merge(dst, src) +} +func (m *GeographicView) XXX_Size() int { + return xxx_messageInfo_GeographicView.Size(m) +} +func (m *GeographicView) XXX_DiscardUnknown() { + xxx_messageInfo_GeographicView.DiscardUnknown(m) +} + +var xxx_messageInfo_GeographicView proto.InternalMessageInfo + +func (m *GeographicView) GetResourceName() string { + if m != nil { + return m.ResourceName + } + return "" +} + +func (m *GeographicView) GetCountryGeoTargetConstant() *wrappers.StringValue { + if m != nil { + return m.CountryGeoTargetConstant + } + return nil +} + +func (m *GeographicView) GetLocationType() enums.GeoTargetingTypeEnum_GeoTargetingType { + if m != nil { + return m.LocationType + } + return enums.GeoTargetingTypeEnum_UNSPECIFIED +} + +func init() { + proto.RegisterType((*GeographicView)(nil), "google.ads.googleads.v1.resources.GeographicView") +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/resources/geographic_view.proto", fileDescriptor_geographic_view_6565bde3f2de1438) +} + +var fileDescriptor_geographic_view_6565bde3f2de1438 = []byte{ + // 402 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x84, 0x52, 0x41, 0x8b, 0xd4, 0x30, + 0x18, 0xa5, 0x5d, 0x10, 0xac, 0xbb, 0x7b, 0xa8, 0x97, 0xb2, 0x2e, 0x32, 0xab, 0x2c, 0xcc, 0x29, + 0xa5, 0x2b, 0x28, 0xc4, 0x53, 0x57, 0xa5, 0xe0, 0x41, 0x96, 0x3a, 0xf4, 0xa0, 0x85, 0x92, 0x69, + 0x3f, 0x63, 0xa0, 0x4d, 0x42, 0x92, 0xce, 0x30, 0x37, 0x7f, 0x8b, 0x47, 0x7f, 0x8a, 0x3f, 0xc5, + 0xff, 0x20, 0x48, 0xd3, 0x26, 0x22, 0x32, 0x7a, 0x7b, 0xc9, 0xf7, 0xde, 0xe3, 0xbd, 0x2f, 0x89, + 0x5e, 0x50, 0x21, 0x68, 0x0f, 0x29, 0xe9, 0x74, 0x3a, 0xc3, 0x09, 0xed, 0xb2, 0x54, 0x81, 0x16, + 0xa3, 0x6a, 0x41, 0xa7, 0x14, 0x04, 0x55, 0x44, 0x7e, 0x66, 0x6d, 0xb3, 0x63, 0xb0, 0x47, 0x52, + 0x09, 0x23, 0xe2, 0xab, 0x99, 0x8d, 0x48, 0xa7, 0x91, 0x17, 0xa2, 0x5d, 0x86, 0xbc, 0xf0, 0xe2, + 0xf9, 0x31, 0x6f, 0xe0, 0xe3, 0x60, 0x7d, 0x1b, 0x43, 0x14, 0x05, 0xc3, 0x38, 0x6d, 0xcc, 0x41, + 0xc2, 0x6c, 0x7d, 0xf1, 0x78, 0xd1, 0xd9, 0xd3, 0x76, 0xfc, 0x94, 0xee, 0x15, 0x91, 0x12, 0x94, + 0x5e, 0xe6, 0x97, 0xce, 0x57, 0xb2, 0x94, 0x70, 0x2e, 0x0c, 0x31, 0x4c, 0xf0, 0x65, 0xfa, 0xe4, + 0x67, 0x10, 0x9d, 0x17, 0x3e, 0x72, 0xc5, 0x60, 0x1f, 0x3f, 0x8d, 0xce, 0x5c, 0xaa, 0x86, 0x93, + 0x01, 0x92, 0x60, 0x15, 0xac, 0xef, 0x97, 0xa7, 0xee, 0xf2, 0x1d, 0x19, 0x20, 0xfe, 0x18, 0x3d, + 0x6a, 0xc5, 0xc8, 0x8d, 0x3a, 0x34, 0xbf, 0x93, 0x35, 0xad, 0xe0, 0xda, 0x10, 0x6e, 0x92, 0x70, + 0x15, 0xac, 0x1f, 0xdc, 0x5c, 0x2e, 0x5d, 0x91, 0xcb, 0x86, 0xde, 0x1b, 0xc5, 0x38, 0xad, 0x48, + 0x3f, 0x42, 0x99, 0x2c, 0x06, 0x05, 0x88, 0x8d, 0x95, 0xbf, 0x5a, 0xd4, 0x31, 0x8b, 0xce, 0x7a, + 0xd1, 0xda, 0x9c, 0xb6, 0x69, 0x72, 0xb2, 0x0a, 0xd6, 0xe7, 0x37, 0xaf, 0xd1, 0xb1, 0x2d, 0xda, + 0x15, 0x21, 0x6f, 0xc4, 0x38, 0xdd, 0x1c, 0x24, 0xbc, 0xe1, 0xe3, 0xf0, 0xd7, 0x65, 0x79, 0xea, + 0xac, 0xa7, 0xd3, 0xed, 0x97, 0x30, 0xba, 0x6e, 0xc5, 0x80, 0xfe, 0xfb, 0x3e, 0xb7, 0x0f, 0xff, + 0x5c, 0xd3, 0xdd, 0x54, 0xe9, 0x2e, 0xf8, 0xf0, 0x76, 0x51, 0x52, 0xd1, 0x13, 0x4e, 0x91, 0x50, + 0x34, 0xa5, 0xc0, 0x6d, 0x61, 0xf7, 0x8c, 0x92, 0xe9, 0x7f, 0xfc, 0x98, 0x97, 0x1e, 0x7d, 0x0d, + 0x4f, 0x8a, 0x3c, 0xff, 0x16, 0x5e, 0x15, 0xb3, 0x65, 0xde, 0x69, 0x34, 0xc3, 0x09, 0x55, 0x19, + 0x2a, 0x1d, 0xf3, 0xbb, 0xe3, 0xd4, 0x79, 0xa7, 0x6b, 0xcf, 0xa9, 0xab, 0xac, 0xf6, 0x9c, 0x1f, + 0xe1, 0xf5, 0x3c, 0xc0, 0x38, 0xef, 0x34, 0xc6, 0x9e, 0x85, 0x71, 0x95, 0x61, 0xec, 0x79, 0xdb, + 0x7b, 0x36, 0xec, 0xb3, 0x5f, 0x01, 0x00, 0x00, 0xff, 0xff, 0xf6, 0x75, 0x6e, 0x6b, 0xdd, 0x02, + 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/resources/google_ads_field.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/resources/google_ads_field.pb.go new file mode 100644 index 0000000..69e1904 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/resources/google_ads_field.pb.go @@ -0,0 +1,256 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/resources/google_ads_field.proto + +package resources // import "google.golang.org/genproto/googleapis/ads/googleads/v1/resources" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import wrappers "github.com/golang/protobuf/ptypes/wrappers" +import enums "google.golang.org/genproto/googleapis/ads/googleads/v1/enums" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// A field or resource (artifact) used by GoogleAdsService. +type GoogleAdsField struct { + // The resource name of the artifact. + // Artifact resource names have the form: + // + // `googleAdsFields/{name}` + ResourceName string `protobuf:"bytes,1,opt,name=resource_name,json=resourceName,proto3" json:"resource_name,omitempty"` + // The name of the artifact. + Name *wrappers.StringValue `protobuf:"bytes,2,opt,name=name,proto3" json:"name,omitempty"` + // The category of the artifact. + Category enums.GoogleAdsFieldCategoryEnum_GoogleAdsFieldCategory `protobuf:"varint,3,opt,name=category,proto3,enum=google.ads.googleads.v1.enums.GoogleAdsFieldCategoryEnum_GoogleAdsFieldCategory" json:"category,omitempty"` + // Whether the artifact can be used in a SELECT clause in search + // queries. + Selectable *wrappers.BoolValue `protobuf:"bytes,4,opt,name=selectable,proto3" json:"selectable,omitempty"` + // Whether the artifact can be used in a WHERE clause in search + // queries. + Filterable *wrappers.BoolValue `protobuf:"bytes,5,opt,name=filterable,proto3" json:"filterable,omitempty"` + // Whether the artifact can be used in a ORDER BY clause in search + // queries. + Sortable *wrappers.BoolValue `protobuf:"bytes,6,opt,name=sortable,proto3" json:"sortable,omitempty"` + // The names of all resources, segments, and metrics that are selectable with + // the described artifact. + SelectableWith []*wrappers.StringValue `protobuf:"bytes,7,rep,name=selectable_with,json=selectableWith,proto3" json:"selectable_with,omitempty"` + // The names of all resources that are selectable with the described + // artifact. Fields from these resources do not segment metrics when included + // in search queries. + // + // This field is only set for artifacts whose category is RESOURCE. + AttributeResources []*wrappers.StringValue `protobuf:"bytes,8,rep,name=attribute_resources,json=attributeResources,proto3" json:"attribute_resources,omitempty"` + // At and beyond version V1 this field lists the names of all metrics that are + // selectable with the described artifact when it is used in the FROM clause. + // It is only set for artifacts whose category is RESOURCE. + // + // Before version V1 this field lists the names of all metrics that are + // selectable with the described artifact. It is only set for artifacts whose + // category is either RESOURCE or SEGMENT + Metrics []*wrappers.StringValue `protobuf:"bytes,9,rep,name=metrics,proto3" json:"metrics,omitempty"` + // At and beyond version V1 this field lists the names of all artifacts, + // whether a segment or another resource, that segment metrics when included + // in search queries and when the described artifact is used in the FROM + // clause. It is only set for artifacts whose category is RESOURCE. + // + // Before version V1 this field lists the names of all artifacts, whether a + // segment or another resource, that segment metrics when included in search + // queries. It is only set for artifacts of category RESOURCE, SEGMENT or + // METRIC. + Segments []*wrappers.StringValue `protobuf:"bytes,10,rep,name=segments,proto3" json:"segments,omitempty"` + // Values the artifact can assume if it is a field of type ENUM. + // + // This field is only set for artifacts of category SEGMENT or ATTRIBUTE. + EnumValues []*wrappers.StringValue `protobuf:"bytes,11,rep,name=enum_values,json=enumValues,proto3" json:"enum_values,omitempty"` + // This field determines the operators that can be used with the artifact + // in WHERE clauses. + DataType enums.GoogleAdsFieldDataTypeEnum_GoogleAdsFieldDataType `protobuf:"varint,12,opt,name=data_type,json=dataType,proto3,enum=google.ads.googleads.v1.enums.GoogleAdsFieldDataTypeEnum_GoogleAdsFieldDataType" json:"data_type,omitempty"` + // The URL of proto describing the artifact's data type. + TypeUrl *wrappers.StringValue `protobuf:"bytes,13,opt,name=type_url,json=typeUrl,proto3" json:"type_url,omitempty"` + // Whether the field artifact is repeated. + IsRepeated *wrappers.BoolValue `protobuf:"bytes,14,opt,name=is_repeated,json=isRepeated,proto3" json:"is_repeated,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GoogleAdsField) Reset() { *m = GoogleAdsField{} } +func (m *GoogleAdsField) String() string { return proto.CompactTextString(m) } +func (*GoogleAdsField) ProtoMessage() {} +func (*GoogleAdsField) Descriptor() ([]byte, []int) { + return fileDescriptor_google_ads_field_cf081ba62a41af56, []int{0} +} +func (m *GoogleAdsField) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GoogleAdsField.Unmarshal(m, b) +} +func (m *GoogleAdsField) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GoogleAdsField.Marshal(b, m, deterministic) +} +func (dst *GoogleAdsField) XXX_Merge(src proto.Message) { + xxx_messageInfo_GoogleAdsField.Merge(dst, src) +} +func (m *GoogleAdsField) XXX_Size() int { + return xxx_messageInfo_GoogleAdsField.Size(m) +} +func (m *GoogleAdsField) XXX_DiscardUnknown() { + xxx_messageInfo_GoogleAdsField.DiscardUnknown(m) +} + +var xxx_messageInfo_GoogleAdsField proto.InternalMessageInfo + +func (m *GoogleAdsField) GetResourceName() string { + if m != nil { + return m.ResourceName + } + return "" +} + +func (m *GoogleAdsField) GetName() *wrappers.StringValue { + if m != nil { + return m.Name + } + return nil +} + +func (m *GoogleAdsField) GetCategory() enums.GoogleAdsFieldCategoryEnum_GoogleAdsFieldCategory { + if m != nil { + return m.Category + } + return enums.GoogleAdsFieldCategoryEnum_UNSPECIFIED +} + +func (m *GoogleAdsField) GetSelectable() *wrappers.BoolValue { + if m != nil { + return m.Selectable + } + return nil +} + +func (m *GoogleAdsField) GetFilterable() *wrappers.BoolValue { + if m != nil { + return m.Filterable + } + return nil +} + +func (m *GoogleAdsField) GetSortable() *wrappers.BoolValue { + if m != nil { + return m.Sortable + } + return nil +} + +func (m *GoogleAdsField) GetSelectableWith() []*wrappers.StringValue { + if m != nil { + return m.SelectableWith + } + return nil +} + +func (m *GoogleAdsField) GetAttributeResources() []*wrappers.StringValue { + if m != nil { + return m.AttributeResources + } + return nil +} + +func (m *GoogleAdsField) GetMetrics() []*wrappers.StringValue { + if m != nil { + return m.Metrics + } + return nil +} + +func (m *GoogleAdsField) GetSegments() []*wrappers.StringValue { + if m != nil { + return m.Segments + } + return nil +} + +func (m *GoogleAdsField) GetEnumValues() []*wrappers.StringValue { + if m != nil { + return m.EnumValues + } + return nil +} + +func (m *GoogleAdsField) GetDataType() enums.GoogleAdsFieldDataTypeEnum_GoogleAdsFieldDataType { + if m != nil { + return m.DataType + } + return enums.GoogleAdsFieldDataTypeEnum_UNSPECIFIED +} + +func (m *GoogleAdsField) GetTypeUrl() *wrappers.StringValue { + if m != nil { + return m.TypeUrl + } + return nil +} + +func (m *GoogleAdsField) GetIsRepeated() *wrappers.BoolValue { + if m != nil { + return m.IsRepeated + } + return nil +} + +func init() { + proto.RegisterType((*GoogleAdsField)(nil), "google.ads.googleads.v1.resources.GoogleAdsField") +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/resources/google_ads_field.proto", fileDescriptor_google_ads_field_cf081ba62a41af56) +} + +var fileDescriptor_google_ads_field_cf081ba62a41af56 = []byte{ + // 581 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x9c, 0x94, 0xdd, 0x6a, 0x13, 0x41, + 0x14, 0xc7, 0x49, 0x52, 0x9b, 0xcd, 0xa4, 0x8d, 0x30, 0xbd, 0x59, 0x42, 0x91, 0x54, 0x29, 0xe4, + 0x6a, 0xd6, 0x54, 0xa8, 0x65, 0x4b, 0x85, 0x8d, 0xd6, 0x82, 0xa0, 0x84, 0x55, 0x23, 0x48, 0x60, + 0x99, 0x64, 0x4f, 0xb6, 0x03, 0xbb, 0x3b, 0xcb, 0xcc, 0x6c, 0x4a, 0xee, 0x7c, 0x16, 0x2f, 0xbd, + 0xf3, 0x35, 0x7c, 0x14, 0x9f, 0x42, 0xf6, 0x6b, 0xd2, 0x52, 0x63, 0x62, 0xef, 0x4e, 0xce, 0xf9, + 0xff, 0xce, 0x57, 0xce, 0x0e, 0x3a, 0x0b, 0x38, 0x0f, 0x42, 0xb0, 0xa8, 0x2f, 0xad, 0xc2, 0xcc, + 0xac, 0xc5, 0xc0, 0x12, 0x20, 0x79, 0x2a, 0x66, 0x50, 0xb9, 0x3d, 0xea, 0x4b, 0x6f, 0xce, 0x20, + 0xf4, 0x49, 0x22, 0xb8, 0xe2, 0xf8, 0xa8, 0xf0, 0x13, 0xea, 0x4b, 0xa2, 0x49, 0xb2, 0x18, 0x10, + 0x4d, 0x76, 0x2f, 0xd6, 0x25, 0x87, 0x38, 0x8d, 0xee, 0x27, 0xf6, 0x66, 0x54, 0x41, 0xc0, 0xc5, + 0xb2, 0xa8, 0xd0, 0x7d, 0xf5, 0x9f, 0xb8, 0x4f, 0x15, 0xf5, 0xd4, 0x32, 0x81, 0x92, 0x7f, 0x52, + 0xf2, 0xf9, 0xaf, 0x69, 0x3a, 0xb7, 0x6e, 0x04, 0x4d, 0x12, 0x10, 0xb2, 0x8c, 0x1f, 0x56, 0xf9, + 0x13, 0x66, 0xd1, 0x38, 0xe6, 0x8a, 0x2a, 0xc6, 0xe3, 0x32, 0xfa, 0xf4, 0x67, 0x13, 0x75, 0xae, + 0x72, 0x81, 0xe3, 0xcb, 0xb7, 0x59, 0x01, 0xfc, 0x0c, 0xed, 0x57, 0xc3, 0x79, 0x31, 0x8d, 0xc0, + 0xac, 0xf5, 0x6a, 0xfd, 0x96, 0xbb, 0x57, 0x39, 0x3f, 0xd0, 0x08, 0xf0, 0x73, 0xb4, 0x93, 0xc7, + 0xea, 0xbd, 0x5a, 0xbf, 0x7d, 0x72, 0x58, 0xee, 0x86, 0x54, 0x4d, 0x90, 0x8f, 0x4a, 0xb0, 0x38, + 0x18, 0xd3, 0x30, 0x05, 0x37, 0x57, 0xe2, 0x10, 0x19, 0xd5, 0xe4, 0x66, 0xa3, 0x57, 0xeb, 0x77, + 0x4e, 0x46, 0x64, 0xdd, 0x72, 0xf3, 0xd1, 0xc9, 0xdd, 0xbe, 0x5e, 0x97, 0xf0, 0x65, 0x9c, 0x46, + 0x6b, 0x42, 0xae, 0xae, 0x80, 0x6d, 0x84, 0x24, 0x84, 0x30, 0x53, 0x74, 0x1a, 0x82, 0xb9, 0x93, + 0x77, 0xd9, 0xbd, 0xd7, 0xe5, 0x90, 0xf3, 0xb0, 0xe8, 0xf1, 0x96, 0x3a, 0x63, 0xe7, 0x2c, 0x54, + 0x20, 0x72, 0xf6, 0xd1, 0x66, 0x76, 0xa5, 0xc6, 0xa7, 0xc8, 0x90, 0x5c, 0x14, 0x55, 0x77, 0x37, + 0x92, 0x5a, 0x8b, 0x2f, 0xd1, 0xe3, 0x55, 0x07, 0xde, 0x0d, 0x53, 0xd7, 0x66, 0xb3, 0xd7, 0xd8, + 0xb8, 0xda, 0xce, 0x0a, 0xfa, 0xc2, 0xd4, 0x35, 0x7e, 0x8f, 0x0e, 0xa8, 0x52, 0x82, 0x4d, 0x53, + 0x05, 0x9e, 0x3e, 0x51, 0xd3, 0xd8, 0x22, 0x15, 0xd6, 0xa0, 0x5b, 0x71, 0xf8, 0x14, 0x35, 0x23, + 0x50, 0x82, 0xcd, 0xa4, 0xd9, 0xda, 0x22, 0x45, 0x25, 0xc6, 0x67, 0xc8, 0x90, 0x10, 0x44, 0x10, + 0x2b, 0x69, 0xa2, 0x2d, 0x40, 0xad, 0xc6, 0x17, 0xa8, 0x9d, 0xfd, 0xf9, 0xde, 0x22, 0xf3, 0x4b, + 0xb3, 0xbd, 0x05, 0x8c, 0x32, 0x20, 0x37, 0x25, 0x8e, 0x50, 0x4b, 0x7f, 0x1f, 0xe6, 0xde, 0x03, + 0xae, 0xec, 0x0d, 0x55, 0xf4, 0xd3, 0x32, 0x81, 0xbf, 0x5c, 0x59, 0x15, 0x72, 0x0d, 0xbf, 0xb4, + 0xf0, 0x4b, 0x64, 0x64, 0x95, 0xbc, 0x54, 0x84, 0xe6, 0xfe, 0x16, 0x5f, 0x42, 0x33, 0x53, 0x7f, + 0x16, 0x21, 0x3e, 0x47, 0x6d, 0x26, 0x3d, 0x01, 0x09, 0x50, 0x05, 0xbe, 0xd9, 0xd9, 0x7c, 0x63, + 0x4c, 0xba, 0xa5, 0x7a, 0xf8, 0xad, 0x8e, 0x8e, 0x67, 0x3c, 0x22, 0x1b, 0x9f, 0xa6, 0xe1, 0xc1, + 0xdd, 0x09, 0x46, 0x59, 0xde, 0x51, 0xed, 0xeb, 0xbb, 0x92, 0x0c, 0x78, 0x48, 0xe3, 0x80, 0x70, + 0x11, 0x58, 0x01, 0xc4, 0x79, 0xd5, 0xea, 0x09, 0x4a, 0x98, 0xfc, 0xc7, 0x6b, 0x79, 0xae, 0xad, + 0xef, 0xf5, 0xc6, 0x95, 0xe3, 0xfc, 0xa8, 0x1f, 0x15, 0x95, 0x88, 0xe3, 0xdf, 0xda, 0x28, 0x19, + 0x0f, 0x88, 0x3e, 0xa6, 0x5f, 0x95, 0x66, 0xe2, 0xf8, 0x72, 0xa2, 0x35, 0x93, 0xf1, 0x60, 0xa2, + 0x35, 0xbf, 0xeb, 0xc7, 0x45, 0xc0, 0xb6, 0x1d, 0x5f, 0xda, 0xb6, 0x56, 0xd9, 0xf6, 0x78, 0x60, + 0xdb, 0x5a, 0x37, 0xdd, 0xcd, 0x9b, 0x7d, 0xf1, 0x27, 0x00, 0x00, 0xff, 0xff, 0xf4, 0xab, 0x95, + 0x0d, 0xd9, 0x05, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/resources/group_placement_view.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/resources/group_placement_view.pb.go new file mode 100644 index 0000000..7836f26 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/resources/group_placement_view.pb.go @@ -0,0 +1,142 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/resources/group_placement_view.proto + +package resources // import "google.golang.org/genproto/googleapis/ads/googleads/v1/resources" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import wrappers "github.com/golang/protobuf/ptypes/wrappers" +import enums "google.golang.org/genproto/googleapis/ads/googleads/v1/enums" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// A group placement view. +type GroupPlacementView struct { + // The resource name of the group placement view. + // Group placement view resource names have the form: + // + // + // `customers/{customer_id}/groupPlacementViews/{ad_group_id}~{base64_placement}` + ResourceName string `protobuf:"bytes,1,opt,name=resource_name,json=resourceName,proto3" json:"resource_name,omitempty"` + // The automatic placement string at group level, e. g. web domain, mobile + // app ID, or a YouTube channel ID. + Placement *wrappers.StringValue `protobuf:"bytes,2,opt,name=placement,proto3" json:"placement,omitempty"` + // Domain name for websites and YouTube channel name for YouTube channels. + DisplayName *wrappers.StringValue `protobuf:"bytes,3,opt,name=display_name,json=displayName,proto3" json:"display_name,omitempty"` + // URL of the group placement, e.g. domain, link to the mobile application in + // app store, or a YouTube channel URL. + TargetUrl *wrappers.StringValue `protobuf:"bytes,4,opt,name=target_url,json=targetUrl,proto3" json:"target_url,omitempty"` + // Type of the placement, e.g. Website, YouTube Channel, Mobile Application. + PlacementType enums.PlacementTypeEnum_PlacementType `protobuf:"varint,5,opt,name=placement_type,json=placementType,proto3,enum=google.ads.googleads.v1.enums.PlacementTypeEnum_PlacementType" json:"placement_type,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GroupPlacementView) Reset() { *m = GroupPlacementView{} } +func (m *GroupPlacementView) String() string { return proto.CompactTextString(m) } +func (*GroupPlacementView) ProtoMessage() {} +func (*GroupPlacementView) Descriptor() ([]byte, []int) { + return fileDescriptor_group_placement_view_0b45087a197a7439, []int{0} +} +func (m *GroupPlacementView) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GroupPlacementView.Unmarshal(m, b) +} +func (m *GroupPlacementView) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GroupPlacementView.Marshal(b, m, deterministic) +} +func (dst *GroupPlacementView) XXX_Merge(src proto.Message) { + xxx_messageInfo_GroupPlacementView.Merge(dst, src) +} +func (m *GroupPlacementView) XXX_Size() int { + return xxx_messageInfo_GroupPlacementView.Size(m) +} +func (m *GroupPlacementView) XXX_DiscardUnknown() { + xxx_messageInfo_GroupPlacementView.DiscardUnknown(m) +} + +var xxx_messageInfo_GroupPlacementView proto.InternalMessageInfo + +func (m *GroupPlacementView) GetResourceName() string { + if m != nil { + return m.ResourceName + } + return "" +} + +func (m *GroupPlacementView) GetPlacement() *wrappers.StringValue { + if m != nil { + return m.Placement + } + return nil +} + +func (m *GroupPlacementView) GetDisplayName() *wrappers.StringValue { + if m != nil { + return m.DisplayName + } + return nil +} + +func (m *GroupPlacementView) GetTargetUrl() *wrappers.StringValue { + if m != nil { + return m.TargetUrl + } + return nil +} + +func (m *GroupPlacementView) GetPlacementType() enums.PlacementTypeEnum_PlacementType { + if m != nil { + return m.PlacementType + } + return enums.PlacementTypeEnum_UNSPECIFIED +} + +func init() { + proto.RegisterType((*GroupPlacementView)(nil), "google.ads.googleads.v1.resources.GroupPlacementView") +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/resources/group_placement_view.proto", fileDescriptor_group_placement_view_0b45087a197a7439) +} + +var fileDescriptor_group_placement_view_0b45087a197a7439 = []byte{ + // 415 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x8c, 0x52, 0xdd, 0x8a, 0xd4, 0x30, + 0x14, 0xa6, 0x5d, 0x15, 0x36, 0xfb, 0x73, 0xd1, 0x1b, 0x87, 0x65, 0x91, 0x59, 0x65, 0x61, 0xae, + 0x12, 0x3a, 0xde, 0x65, 0x45, 0xe9, 0x82, 0x0c, 0x78, 0x21, 0x43, 0xd5, 0x5e, 0x48, 0xa1, 0x64, + 0xa7, 0xc7, 0x50, 0x68, 0x93, 0x90, 0xa4, 0x33, 0xcc, 0xbd, 0x4f, 0xe2, 0xa5, 0x8f, 0xe2, 0x23, + 0xf8, 0x08, 0x3e, 0x85, 0xb4, 0x69, 0xb2, 0x2e, 0xb2, 0xba, 0x77, 0x5f, 0x72, 0xbe, 0xef, 0x3b, + 0xdf, 0xc9, 0x09, 0x7a, 0xc5, 0xa5, 0xe4, 0x2d, 0x10, 0x56, 0x1b, 0xe2, 0xe0, 0x80, 0xb6, 0x29, + 0xd1, 0x60, 0x64, 0xaf, 0x37, 0x60, 0x08, 0xd7, 0xb2, 0x57, 0x95, 0x6a, 0xd9, 0x06, 0x3a, 0x10, + 0xb6, 0xda, 0x36, 0xb0, 0xc3, 0x4a, 0x4b, 0x2b, 0x93, 0x0b, 0x27, 0xc1, 0xac, 0x36, 0x38, 0xa8, + 0xf1, 0x36, 0xc5, 0x41, 0x7d, 0xb6, 0xbc, 0xaf, 0x01, 0x88, 0xbe, 0x33, 0xe4, 0xd6, 0xd6, 0xee, + 0x15, 0x38, 0xdb, 0xb3, 0x67, 0x93, 0x66, 0x3c, 0xdd, 0xf4, 0x5f, 0xc8, 0x4e, 0x33, 0xa5, 0x40, + 0x9b, 0xa9, 0x7e, 0xee, 0x3d, 0x55, 0x43, 0x98, 0x10, 0xd2, 0x32, 0xdb, 0x48, 0x31, 0x55, 0x9f, + 0xff, 0x8c, 0x51, 0xb2, 0x1a, 0x32, 0xaf, 0xbd, 0x77, 0xd1, 0xc0, 0x2e, 0x79, 0x81, 0x4e, 0x7c, + 0xaa, 0x4a, 0xb0, 0x0e, 0x66, 0xd1, 0x3c, 0x5a, 0x1c, 0xe6, 0xc7, 0xfe, 0xf2, 0x3d, 0xeb, 0x20, + 0xa1, 0xe8, 0x30, 0x24, 0x9a, 0xc5, 0xf3, 0x68, 0x71, 0xb4, 0x3c, 0x9f, 0x26, 0xc3, 0x3e, 0x0d, + 0xfe, 0x60, 0x75, 0x23, 0x78, 0xc1, 0xda, 0x1e, 0xf2, 0x5b, 0x7a, 0xf2, 0x06, 0x1d, 0xd7, 0x8d, + 0x51, 0x2d, 0xdb, 0x3b, 0xff, 0x83, 0x07, 0xc8, 0x8f, 0x26, 0xc5, 0xd8, 0xfc, 0x0a, 0x21, 0xcb, + 0x34, 0x07, 0x5b, 0xf5, 0xba, 0x9d, 0x3d, 0x7a, 0x48, 0x77, 0xc7, 0xff, 0xa4, 0xdb, 0x04, 0xd0, + 0xe9, 0xdd, 0xb7, 0x9c, 0x3d, 0x9e, 0x47, 0x8b, 0xd3, 0xe5, 0x6b, 0x7c, 0xdf, 0x8e, 0xc6, 0x05, + 0xe0, 0xf0, 0x48, 0x1f, 0xf7, 0x0a, 0xde, 0x8a, 0xbe, 0xbb, 0x7b, 0x93, 0x9f, 0xa8, 0x3f, 0x8f, + 0xd7, 0x5f, 0x63, 0x74, 0xb9, 0x91, 0x1d, 0xfe, 0xef, 0xe2, 0xaf, 0x9f, 0xfe, 0xbd, 0x83, 0xf5, + 0x30, 0xc3, 0x3a, 0xfa, 0xfc, 0x6e, 0x52, 0x73, 0xd9, 0x32, 0xc1, 0xb1, 0xd4, 0x9c, 0x70, 0x10, + 0xe3, 0x84, 0xfe, 0x8f, 0xa8, 0xc6, 0xfc, 0xe3, 0x4f, 0x5e, 0x05, 0xf4, 0x2d, 0x3e, 0x58, 0x65, + 0xd9, 0xf7, 0xf8, 0x62, 0xe5, 0x2c, 0xb3, 0xda, 0x60, 0x07, 0x07, 0x54, 0xa4, 0x38, 0xf7, 0xcc, + 0x1f, 0x9e, 0x53, 0x66, 0xb5, 0x29, 0x03, 0xa7, 0x2c, 0xd2, 0x32, 0x70, 0x7e, 0xc5, 0x97, 0xae, + 0x40, 0x69, 0x56, 0x1b, 0x4a, 0x03, 0x8b, 0xd2, 0x22, 0xa5, 0x34, 0xf0, 0x6e, 0x9e, 0x8c, 0x61, + 0x5f, 0xfe, 0x0e, 0x00, 0x00, 0xff, 0xff, 0x9d, 0x6f, 0x57, 0xe2, 0x3f, 0x03, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/resources/hotel_group_view.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/resources/hotel_group_view.pb.go new file mode 100644 index 0000000..4132933 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/resources/hotel_group_view.pb.go @@ -0,0 +1,92 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/resources/hotel_group_view.proto + +package resources // import "google.golang.org/genproto/googleapis/ads/googleads/v1/resources" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// A hotel group view. +type HotelGroupView struct { + // The resource name of the hotel group view. + // Hotel Group view resource names have the form: + // + // `customers/{customer_id}/hotelGroupViews/{ad_group_id}~{criterion_id}` + ResourceName string `protobuf:"bytes,1,opt,name=resource_name,json=resourceName,proto3" json:"resource_name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *HotelGroupView) Reset() { *m = HotelGroupView{} } +func (m *HotelGroupView) String() string { return proto.CompactTextString(m) } +func (*HotelGroupView) ProtoMessage() {} +func (*HotelGroupView) Descriptor() ([]byte, []int) { + return fileDescriptor_hotel_group_view_1f6cdac5d30c3d49, []int{0} +} +func (m *HotelGroupView) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_HotelGroupView.Unmarshal(m, b) +} +func (m *HotelGroupView) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_HotelGroupView.Marshal(b, m, deterministic) +} +func (dst *HotelGroupView) XXX_Merge(src proto.Message) { + xxx_messageInfo_HotelGroupView.Merge(dst, src) +} +func (m *HotelGroupView) XXX_Size() int { + return xxx_messageInfo_HotelGroupView.Size(m) +} +func (m *HotelGroupView) XXX_DiscardUnknown() { + xxx_messageInfo_HotelGroupView.DiscardUnknown(m) +} + +var xxx_messageInfo_HotelGroupView proto.InternalMessageInfo + +func (m *HotelGroupView) GetResourceName() string { + if m != nil { + return m.ResourceName + } + return "" +} + +func init() { + proto.RegisterType((*HotelGroupView)(nil), "google.ads.googleads.v1.resources.HotelGroupView") +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/resources/hotel_group_view.proto", fileDescriptor_hotel_group_view_1f6cdac5d30c3d49) +} + +var fileDescriptor_hotel_group_view_1f6cdac5d30c3d49 = []byte{ + // 271 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x84, 0x90, 0xb1, 0x4a, 0xc4, 0x30, + 0x18, 0xc7, 0x69, 0x05, 0xc1, 0xa2, 0x0e, 0xe7, 0x22, 0xe2, 0xe0, 0x29, 0x07, 0x4e, 0x09, 0x41, + 0x04, 0x89, 0x53, 0x6e, 0xa9, 0x38, 0xc8, 0x71, 0x43, 0x07, 0x29, 0x94, 0x78, 0x09, 0x31, 0xd0, + 0xe6, 0x2b, 0x49, 0xaf, 0xb7, 0xfa, 0x2c, 0x8e, 0x3e, 0x8a, 0x8f, 0xe2, 0x53, 0x48, 0x1a, 0x13, + 0x70, 0xd1, 0xed, 0x4f, 0xf2, 0xfb, 0xff, 0xbe, 0x8f, 0xaf, 0xb8, 0x53, 0x00, 0xaa, 0x95, 0x98, + 0x0b, 0x87, 0x43, 0xf4, 0x69, 0x24, 0xd8, 0x4a, 0x07, 0x5b, 0xbb, 0x91, 0x0e, 0xbf, 0xc2, 0x20, + 0xdb, 0x46, 0x59, 0xd8, 0xf6, 0xcd, 0xa8, 0xe5, 0x0e, 0xf5, 0x16, 0x06, 0x98, 0xcd, 0x03, 0x8e, + 0xb8, 0x70, 0x28, 0x35, 0xd1, 0x48, 0x50, 0x6a, 0x9e, 0x9d, 0x47, 0x79, 0xaf, 0x31, 0x37, 0x06, + 0x06, 0x3e, 0x68, 0x30, 0x2e, 0x08, 0x2e, 0x6f, 0x8b, 0xe3, 0x07, 0xaf, 0x2e, 0xbd, 0xb9, 0xd2, + 0x72, 0x37, 0xbb, 0x2a, 0x8e, 0x62, 0xb9, 0x31, 0xbc, 0x93, 0xa7, 0xd9, 0x45, 0x76, 0x7d, 0xb0, + 0x3e, 0x8c, 0x8f, 0x4f, 0xbc, 0x93, 0xcb, 0xb7, 0xbc, 0x58, 0x6c, 0xa0, 0x43, 0xff, 0x8e, 0x5f, + 0x9e, 0xfc, 0xd6, 0xaf, 0xfc, 0xd4, 0x55, 0xf6, 0xfc, 0xf8, 0xd3, 0x54, 0xd0, 0x72, 0xa3, 0x10, + 0x58, 0x85, 0x95, 0x34, 0xd3, 0x4e, 0xf1, 0x04, 0xbd, 0x76, 0x7f, 0x5c, 0xe4, 0x3e, 0xa5, 0xf7, + 0x7c, 0xaf, 0x64, 0xec, 0x23, 0x9f, 0x97, 0x41, 0xc9, 0x84, 0x43, 0x21, 0xfa, 0x54, 0x11, 0xb4, + 0x8e, 0xe4, 0x67, 0x64, 0x6a, 0x26, 0x5c, 0x9d, 0x98, 0xba, 0x22, 0x75, 0x62, 0xbe, 0xf2, 0x45, + 0xf8, 0xa0, 0x94, 0x09, 0x47, 0x69, 0xa2, 0x28, 0xad, 0x08, 0xa5, 0x89, 0x7b, 0xd9, 0x9f, 0x96, + 0xbd, 0xf9, 0x0e, 0x00, 0x00, 0xff, 0xff, 0xfa, 0xd7, 0x25, 0xfa, 0xbd, 0x01, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/resources/hotel_performance_view.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/resources/hotel_performance_view.pb.go new file mode 100644 index 0000000..62c22aa --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/resources/hotel_performance_view.pb.go @@ -0,0 +1,93 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/resources/hotel_performance_view.proto + +package resources // import "google.golang.org/genproto/googleapis/ads/googleads/v1/resources" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// A hotel performance view. +type HotelPerformanceView struct { + // The resource name of the hotel performance view. + // Hotel performance view resource names have the form: + // + // `customers/{customer_id}/hotelPerformanceView` + ResourceName string `protobuf:"bytes,1,opt,name=resource_name,json=resourceName,proto3" json:"resource_name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *HotelPerformanceView) Reset() { *m = HotelPerformanceView{} } +func (m *HotelPerformanceView) String() string { return proto.CompactTextString(m) } +func (*HotelPerformanceView) ProtoMessage() {} +func (*HotelPerformanceView) Descriptor() ([]byte, []int) { + return fileDescriptor_hotel_performance_view_82509a48fa090f7b, []int{0} +} +func (m *HotelPerformanceView) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_HotelPerformanceView.Unmarshal(m, b) +} +func (m *HotelPerformanceView) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_HotelPerformanceView.Marshal(b, m, deterministic) +} +func (dst *HotelPerformanceView) XXX_Merge(src proto.Message) { + xxx_messageInfo_HotelPerformanceView.Merge(dst, src) +} +func (m *HotelPerformanceView) XXX_Size() int { + return xxx_messageInfo_HotelPerformanceView.Size(m) +} +func (m *HotelPerformanceView) XXX_DiscardUnknown() { + xxx_messageInfo_HotelPerformanceView.DiscardUnknown(m) +} + +var xxx_messageInfo_HotelPerformanceView proto.InternalMessageInfo + +func (m *HotelPerformanceView) GetResourceName() string { + if m != nil { + return m.ResourceName + } + return "" +} + +func init() { + proto.RegisterType((*HotelPerformanceView)(nil), "google.ads.googleads.v1.resources.HotelPerformanceView") +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/resources/hotel_performance_view.proto", fileDescriptor_hotel_performance_view_82509a48fa090f7b) +} + +var fileDescriptor_hotel_performance_view_82509a48fa090f7b = []byte{ + // 278 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x84, 0x90, 0x4f, 0x4a, 0xf4, 0x30, + 0x18, 0xc6, 0x69, 0x3f, 0xf8, 0xc0, 0xa2, 0x9b, 0xc1, 0x85, 0x8a, 0x0b, 0x47, 0x19, 0x70, 0x95, + 0x50, 0xdc, 0x65, 0x40, 0xc8, 0x6c, 0x46, 0x5c, 0x48, 0x99, 0x45, 0x17, 0x52, 0x28, 0xb1, 0x79, + 0x8d, 0x81, 0x36, 0x6f, 0x49, 0x6a, 0xe7, 0x06, 0x1e, 0xc4, 0xa5, 0x47, 0xf1, 0x28, 0x9e, 0x42, + 0x3a, 0x31, 0x71, 0x23, 0xba, 0x7b, 0x48, 0x7e, 0xcf, 0x1f, 0xde, 0xec, 0x5a, 0x21, 0xaa, 0x16, + 0xa8, 0x90, 0x8e, 0x7a, 0x39, 0xa9, 0x31, 0xa7, 0x16, 0x1c, 0x3e, 0xdb, 0x06, 0x1c, 0x7d, 0xc2, + 0x01, 0xda, 0xba, 0x07, 0xfb, 0x88, 0xb6, 0x13, 0xa6, 0x81, 0x7a, 0xd4, 0xb0, 0x25, 0xbd, 0xc5, + 0x01, 0x67, 0x73, 0x6f, 0x22, 0x42, 0x3a, 0x12, 0xfd, 0x64, 0xcc, 0x49, 0xf4, 0x9f, 0x9c, 0x86, + 0x8a, 0x5e, 0x53, 0x61, 0x0c, 0x0e, 0x62, 0xd0, 0x68, 0x9c, 0x0f, 0x38, 0x5f, 0x66, 0x87, 0x37, + 0x53, 0x41, 0xf1, 0x9d, 0x5f, 0x6a, 0xd8, 0xce, 0x2e, 0xb2, 0x83, 0x10, 0x51, 0x1b, 0xd1, 0xc1, + 0x51, 0x72, 0x96, 0x5c, 0xee, 0x6d, 0xf6, 0xc3, 0xe3, 0x9d, 0xe8, 0x60, 0xf5, 0x92, 0x66, 0x8b, + 0x06, 0x3b, 0xf2, 0xe7, 0x88, 0xd5, 0xf1, 0x4f, 0x25, 0xc5, 0xb4, 0xa0, 0x48, 0xee, 0x6f, 0xbf, + 0xfc, 0x0a, 0x5b, 0x61, 0x14, 0x41, 0xab, 0xa8, 0x02, 0xb3, 0xdb, 0x17, 0x8e, 0xd2, 0x6b, 0xf7, + 0xcb, 0x8d, 0x96, 0x51, 0xbd, 0xa6, 0xff, 0xd6, 0x9c, 0xbf, 0xa5, 0xf3, 0xb5, 0x8f, 0xe4, 0xd2, + 0x11, 0x2f, 0x27, 0x55, 0xe6, 0x64, 0x13, 0xc8, 0xf7, 0xc0, 0x54, 0x5c, 0xba, 0x2a, 0x32, 0x55, + 0x99, 0x57, 0x91, 0xf9, 0x48, 0x17, 0xfe, 0x83, 0x31, 0x2e, 0x1d, 0x63, 0x91, 0x62, 0xac, 0xcc, + 0x19, 0x8b, 0xdc, 0xc3, 0xff, 0xdd, 0xd8, 0xab, 0xcf, 0x00, 0x00, 0x00, 0xff, 0xff, 0x58, 0x95, + 0x1c, 0x0f, 0xcf, 0x01, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/resources/keyword_plan.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/resources/keyword_plan.pb.go new file mode 100644 index 0000000..424e536 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/resources/keyword_plan.pb.go @@ -0,0 +1,285 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/resources/keyword_plan.proto + +package resources // import "google.golang.org/genproto/googleapis/ads/googleads/v1/resources" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import wrappers "github.com/golang/protobuf/ptypes/wrappers" +import common "google.golang.org/genproto/googleapis/ads/googleads/v1/common" +import enums "google.golang.org/genproto/googleapis/ads/googleads/v1/enums" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// A Keyword Planner plan. +// Max number of saved keyword plans: 10000. +// It's possible to remove plans if limit is reached. +type KeywordPlan struct { + // The resource name of the Keyword Planner plan. + // KeywordPlan resource names have the form: + // + // `customers/{customer_id}/keywordPlans/{kp_plan_id}` + ResourceName string `protobuf:"bytes,1,opt,name=resource_name,json=resourceName,proto3" json:"resource_name,omitempty"` + // The ID of the keyword plan. + Id *wrappers.Int64Value `protobuf:"bytes,2,opt,name=id,proto3" json:"id,omitempty"` + // The name of the keyword plan. + // + // This field is required and should not be empty when creating new keyword + // plans. + Name *wrappers.StringValue `protobuf:"bytes,3,opt,name=name,proto3" json:"name,omitempty"` + // The date period used for forecasting the plan. + ForecastPeriod *KeywordPlanForecastPeriod `protobuf:"bytes,4,opt,name=forecast_period,json=forecastPeriod,proto3" json:"forecast_period,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *KeywordPlan) Reset() { *m = KeywordPlan{} } +func (m *KeywordPlan) String() string { return proto.CompactTextString(m) } +func (*KeywordPlan) ProtoMessage() {} +func (*KeywordPlan) Descriptor() ([]byte, []int) { + return fileDescriptor_keyword_plan_683d8e3ecae641c5, []int{0} +} +func (m *KeywordPlan) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_KeywordPlan.Unmarshal(m, b) +} +func (m *KeywordPlan) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_KeywordPlan.Marshal(b, m, deterministic) +} +func (dst *KeywordPlan) XXX_Merge(src proto.Message) { + xxx_messageInfo_KeywordPlan.Merge(dst, src) +} +func (m *KeywordPlan) XXX_Size() int { + return xxx_messageInfo_KeywordPlan.Size(m) +} +func (m *KeywordPlan) XXX_DiscardUnknown() { + xxx_messageInfo_KeywordPlan.DiscardUnknown(m) +} + +var xxx_messageInfo_KeywordPlan proto.InternalMessageInfo + +func (m *KeywordPlan) GetResourceName() string { + if m != nil { + return m.ResourceName + } + return "" +} + +func (m *KeywordPlan) GetId() *wrappers.Int64Value { + if m != nil { + return m.Id + } + return nil +} + +func (m *KeywordPlan) GetName() *wrappers.StringValue { + if m != nil { + return m.Name + } + return nil +} + +func (m *KeywordPlan) GetForecastPeriod() *KeywordPlanForecastPeriod { + if m != nil { + return m.ForecastPeriod + } + return nil +} + +// The forecasting period associated with the keyword plan. +type KeywordPlanForecastPeriod struct { + // Required. The date used for forecasting the Plan. + // + // Types that are valid to be assigned to Interval: + // *KeywordPlanForecastPeriod_DateInterval + // *KeywordPlanForecastPeriod_DateRange + Interval isKeywordPlanForecastPeriod_Interval `protobuf_oneof:"interval"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *KeywordPlanForecastPeriod) Reset() { *m = KeywordPlanForecastPeriod{} } +func (m *KeywordPlanForecastPeriod) String() string { return proto.CompactTextString(m) } +func (*KeywordPlanForecastPeriod) ProtoMessage() {} +func (*KeywordPlanForecastPeriod) Descriptor() ([]byte, []int) { + return fileDescriptor_keyword_plan_683d8e3ecae641c5, []int{1} +} +func (m *KeywordPlanForecastPeriod) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_KeywordPlanForecastPeriod.Unmarshal(m, b) +} +func (m *KeywordPlanForecastPeriod) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_KeywordPlanForecastPeriod.Marshal(b, m, deterministic) +} +func (dst *KeywordPlanForecastPeriod) XXX_Merge(src proto.Message) { + xxx_messageInfo_KeywordPlanForecastPeriod.Merge(dst, src) +} +func (m *KeywordPlanForecastPeriod) XXX_Size() int { + return xxx_messageInfo_KeywordPlanForecastPeriod.Size(m) +} +func (m *KeywordPlanForecastPeriod) XXX_DiscardUnknown() { + xxx_messageInfo_KeywordPlanForecastPeriod.DiscardUnknown(m) +} + +var xxx_messageInfo_KeywordPlanForecastPeriod proto.InternalMessageInfo + +type isKeywordPlanForecastPeriod_Interval interface { + isKeywordPlanForecastPeriod_Interval() +} + +type KeywordPlanForecastPeriod_DateInterval struct { + DateInterval enums.KeywordPlanForecastIntervalEnum_KeywordPlanForecastInterval `protobuf:"varint,1,opt,name=date_interval,json=dateInterval,proto3,enum=google.ads.googleads.v1.enums.KeywordPlanForecastIntervalEnum_KeywordPlanForecastInterval,oneof"` +} + +type KeywordPlanForecastPeriod_DateRange struct { + DateRange *common.DateRange `protobuf:"bytes,2,opt,name=date_range,json=dateRange,proto3,oneof"` +} + +func (*KeywordPlanForecastPeriod_DateInterval) isKeywordPlanForecastPeriod_Interval() {} + +func (*KeywordPlanForecastPeriod_DateRange) isKeywordPlanForecastPeriod_Interval() {} + +func (m *KeywordPlanForecastPeriod) GetInterval() isKeywordPlanForecastPeriod_Interval { + if m != nil { + return m.Interval + } + return nil +} + +func (m *KeywordPlanForecastPeriod) GetDateInterval() enums.KeywordPlanForecastIntervalEnum_KeywordPlanForecastInterval { + if x, ok := m.GetInterval().(*KeywordPlanForecastPeriod_DateInterval); ok { + return x.DateInterval + } + return enums.KeywordPlanForecastIntervalEnum_UNSPECIFIED +} + +func (m *KeywordPlanForecastPeriod) GetDateRange() *common.DateRange { + if x, ok := m.GetInterval().(*KeywordPlanForecastPeriod_DateRange); ok { + return x.DateRange + } + return nil +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*KeywordPlanForecastPeriod) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _KeywordPlanForecastPeriod_OneofMarshaler, _KeywordPlanForecastPeriod_OneofUnmarshaler, _KeywordPlanForecastPeriod_OneofSizer, []interface{}{ + (*KeywordPlanForecastPeriod_DateInterval)(nil), + (*KeywordPlanForecastPeriod_DateRange)(nil), + } +} + +func _KeywordPlanForecastPeriod_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*KeywordPlanForecastPeriod) + // interval + switch x := m.Interval.(type) { + case *KeywordPlanForecastPeriod_DateInterval: + b.EncodeVarint(1<<3 | proto.WireVarint) + b.EncodeVarint(uint64(x.DateInterval)) + case *KeywordPlanForecastPeriod_DateRange: + b.EncodeVarint(2<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.DateRange); err != nil { + return err + } + case nil: + default: + return fmt.Errorf("KeywordPlanForecastPeriod.Interval has unexpected type %T", x) + } + return nil +} + +func _KeywordPlanForecastPeriod_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*KeywordPlanForecastPeriod) + switch tag { + case 1: // interval.date_interval + if wire != proto.WireVarint { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeVarint() + m.Interval = &KeywordPlanForecastPeriod_DateInterval{enums.KeywordPlanForecastIntervalEnum_KeywordPlanForecastInterval(x)} + return true, err + case 2: // interval.date_range + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(common.DateRange) + err := b.DecodeMessage(msg) + m.Interval = &KeywordPlanForecastPeriod_DateRange{msg} + return true, err + default: + return false, nil + } +} + +func _KeywordPlanForecastPeriod_OneofSizer(msg proto.Message) (n int) { + m := msg.(*KeywordPlanForecastPeriod) + // interval + switch x := m.Interval.(type) { + case *KeywordPlanForecastPeriod_DateInterval: + n += 1 // tag and wire + n += proto.SizeVarint(uint64(x.DateInterval)) + case *KeywordPlanForecastPeriod_DateRange: + s := proto.Size(x.DateRange) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +func init() { + proto.RegisterType((*KeywordPlan)(nil), "google.ads.googleads.v1.resources.KeywordPlan") + proto.RegisterType((*KeywordPlanForecastPeriod)(nil), "google.ads.googleads.v1.resources.KeywordPlanForecastPeriod") +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/resources/keyword_plan.proto", fileDescriptor_keyword_plan_683d8e3ecae641c5) +} + +var fileDescriptor_keyword_plan_683d8e3ecae641c5 = []byte{ + // 486 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x84, 0x93, 0xdd, 0x6a, 0xd4, 0x40, + 0x14, 0xc7, 0x9b, 0xb4, 0x88, 0x9d, 0x7e, 0x28, 0xb9, 0x5a, 0x6b, 0x91, 0xb6, 0x52, 0xa8, 0x0a, + 0x13, 0x53, 0x8b, 0x17, 0xd1, 0x9b, 0x2c, 0x6a, 0x3f, 0x04, 0x59, 0x22, 0xec, 0x45, 0x59, 0x58, + 0xa6, 0x3b, 0x67, 0x43, 0x30, 0x99, 0x09, 0x33, 0x93, 0x2d, 0x5e, 0xfa, 0x2a, 0x5e, 0xfa, 0x28, + 0x3e, 0x8a, 0x2f, 0xa0, 0x37, 0x82, 0x64, 0xbe, 0x68, 0xb1, 0x69, 0xef, 0xce, 0xd9, 0xf9, 0x9d, + 0xff, 0xff, 0x7c, 0x64, 0xd1, 0x51, 0xc1, 0x79, 0x51, 0x41, 0x4c, 0xa8, 0x8c, 0x4d, 0xd8, 0x45, + 0x8b, 0x24, 0x16, 0x20, 0x79, 0x2b, 0x66, 0x20, 0xe3, 0x2f, 0xf0, 0xf5, 0x92, 0x0b, 0x3a, 0x6d, + 0x2a, 0xc2, 0x70, 0x23, 0xb8, 0xe2, 0xd1, 0xae, 0x41, 0x31, 0xa1, 0x12, 0xfb, 0x2a, 0xbc, 0x48, + 0xb0, 0xaf, 0xda, 0x7a, 0xde, 0x27, 0x3c, 0xe3, 0x75, 0xcd, 0x59, 0x4c, 0x89, 0x02, 0x69, 0xe4, + 0xb6, 0x86, 0x7d, 0x2c, 0xb0, 0xb6, 0xbe, 0xde, 0xc0, 0x74, 0xce, 0x05, 0xcc, 0x88, 0x54, 0xd3, + 0x92, 0x29, 0x10, 0x0b, 0x52, 0x59, 0x8d, 0x27, 0x56, 0x43, 0x67, 0x17, 0xed, 0x3c, 0xbe, 0x14, + 0xa4, 0x69, 0x40, 0x38, 0x8f, 0x6d, 0xe7, 0xd1, 0x94, 0x31, 0x61, 0x8c, 0x2b, 0xa2, 0x4a, 0xce, + 0xec, 0xeb, 0xde, 0x9f, 0x00, 0xad, 0x7d, 0x34, 0x36, 0xa3, 0x8a, 0xb0, 0xe8, 0x29, 0xda, 0x70, + 0xa3, 0x4c, 0x19, 0xa9, 0x61, 0x10, 0xec, 0x04, 0x07, 0xab, 0xf9, 0xba, 0xfb, 0xf1, 0x13, 0xa9, + 0x21, 0x7a, 0x81, 0xc2, 0x92, 0x0e, 0xc2, 0x9d, 0xe0, 0x60, 0xed, 0xf0, 0xb1, 0xdd, 0x03, 0x76, + 0xfe, 0xf8, 0x94, 0xa9, 0xd7, 0x47, 0x63, 0x52, 0xb5, 0x90, 0x87, 0x25, 0x8d, 0x5e, 0xa2, 0x15, + 0x2d, 0xb4, 0xac, 0xf1, 0xed, 0xff, 0xf0, 0xcf, 0x4a, 0x94, 0xac, 0x30, 0xbc, 0x26, 0x23, 0x40, + 0x0f, 0xfc, 0xb0, 0x0d, 0x88, 0x92, 0xd3, 0xc1, 0x8a, 0x2e, 0x7e, 0x8b, 0xef, 0x5c, 0x3f, 0xbe, + 0x32, 0xcc, 0x07, 0x2b, 0x32, 0xd2, 0x1a, 0xf9, 0xe6, 0xfc, 0x5a, 0xbe, 0xf7, 0x3b, 0x40, 0x8f, + 0x7a, 0xe9, 0xe8, 0x5b, 0x80, 0x36, 0xba, 0x53, 0xf9, 0x75, 0xeb, 0x4d, 0x6c, 0x1e, 0x9e, 0xf7, + 0xf6, 0xa0, 0x6f, 0x76, 0x93, 0xff, 0xa9, 0x55, 0x78, 0xcf, 0xda, 0xfa, 0xb6, 0xf7, 0x93, 0xa5, + 0x7c, 0xbd, 0xb3, 0x74, 0x79, 0x74, 0x86, 0x90, 0x6e, 0x41, 0x10, 0x56, 0x80, 0xdd, 0xf7, 0xb3, + 0x5e, 0x7f, 0xf3, 0x7d, 0xe1, 0x77, 0x44, 0x41, 0xde, 0x15, 0x9c, 0x2c, 0xe5, 0xab, 0xd4, 0x25, + 0x43, 0x84, 0xee, 0xbb, 0x49, 0x86, 0x7f, 0x03, 0xb4, 0x3f, 0xe3, 0xf5, 0xdd, 0xdb, 0x1c, 0x3e, + 0xbc, 0xd2, 0xee, 0xa8, 0xbb, 0xd8, 0x28, 0x38, 0x3f, 0xb3, 0x65, 0x05, 0xaf, 0x08, 0x2b, 0x30, + 0x17, 0x45, 0x5c, 0x00, 0xd3, 0xf7, 0x74, 0x1f, 0x71, 0x53, 0xca, 0x5b, 0xfe, 0x58, 0x6f, 0x7c, + 0xf4, 0x3d, 0x5c, 0x3e, 0xce, 0xb2, 0x1f, 0xe1, 0xee, 0xb1, 0x91, 0xcc, 0xa8, 0xc4, 0x26, 0xec, + 0xa2, 0x71, 0x82, 0x73, 0x47, 0xfe, 0x74, 0xcc, 0x24, 0xa3, 0x72, 0xe2, 0x99, 0xc9, 0x38, 0x99, + 0x78, 0xe6, 0x57, 0xb8, 0x6f, 0x1e, 0xd2, 0x34, 0xa3, 0x32, 0x4d, 0x3d, 0x95, 0xa6, 0xe3, 0x24, + 0x4d, 0x3d, 0x77, 0x71, 0x4f, 0x37, 0xfb, 0xea, 0x5f, 0x00, 0x00, 0x00, 0xff, 0xff, 0x70, 0x62, + 0xd0, 0x7d, 0x04, 0x04, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/resources/keyword_plan_ad_group.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/resources/keyword_plan_ad_group.pb.go new file mode 100644 index 0000000..f3ec16d --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/resources/keyword_plan_ad_group.pb.go @@ -0,0 +1,143 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/resources/keyword_plan_ad_group.proto + +package resources // import "google.golang.org/genproto/googleapis/ads/googleads/v1/resources" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import wrappers "github.com/golang/protobuf/ptypes/wrappers" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// A Keyword Planner ad group. +// Max number of keyword plan ad groups per plan: 200. +type KeywordPlanAdGroup struct { + // The resource name of the Keyword Planner ad group. + // KeywordPlanAdGroup resource names have the form: + // + // `customers/{customer_id}/keywordPlanAdGroups/{kp_ad_group_id}` + ResourceName string `protobuf:"bytes,1,opt,name=resource_name,json=resourceName,proto3" json:"resource_name,omitempty"` + // The keyword plan campaign to which this ad group belongs. + KeywordPlanCampaign *wrappers.StringValue `protobuf:"bytes,2,opt,name=keyword_plan_campaign,json=keywordPlanCampaign,proto3" json:"keyword_plan_campaign,omitempty"` + // The ID of the keyword plan ad group. + Id *wrappers.Int64Value `protobuf:"bytes,3,opt,name=id,proto3" json:"id,omitempty"` + // The name of the keyword plan ad group. + // + // This field is required and should not be empty when creating keyword plan + // ad group. + Name *wrappers.StringValue `protobuf:"bytes,4,opt,name=name,proto3" json:"name,omitempty"` + // A default ad group max cpc bid in micros in account currency for all + // biddable keywords under the keyword plan ad group. + // If not set, will inherit from parent campaign. + CpcBidMicros *wrappers.Int64Value `protobuf:"bytes,5,opt,name=cpc_bid_micros,json=cpcBidMicros,proto3" json:"cpc_bid_micros,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *KeywordPlanAdGroup) Reset() { *m = KeywordPlanAdGroup{} } +func (m *KeywordPlanAdGroup) String() string { return proto.CompactTextString(m) } +func (*KeywordPlanAdGroup) ProtoMessage() {} +func (*KeywordPlanAdGroup) Descriptor() ([]byte, []int) { + return fileDescriptor_keyword_plan_ad_group_57f0587f66d3b006, []int{0} +} +func (m *KeywordPlanAdGroup) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_KeywordPlanAdGroup.Unmarshal(m, b) +} +func (m *KeywordPlanAdGroup) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_KeywordPlanAdGroup.Marshal(b, m, deterministic) +} +func (dst *KeywordPlanAdGroup) XXX_Merge(src proto.Message) { + xxx_messageInfo_KeywordPlanAdGroup.Merge(dst, src) +} +func (m *KeywordPlanAdGroup) XXX_Size() int { + return xxx_messageInfo_KeywordPlanAdGroup.Size(m) +} +func (m *KeywordPlanAdGroup) XXX_DiscardUnknown() { + xxx_messageInfo_KeywordPlanAdGroup.DiscardUnknown(m) +} + +var xxx_messageInfo_KeywordPlanAdGroup proto.InternalMessageInfo + +func (m *KeywordPlanAdGroup) GetResourceName() string { + if m != nil { + return m.ResourceName + } + return "" +} + +func (m *KeywordPlanAdGroup) GetKeywordPlanCampaign() *wrappers.StringValue { + if m != nil { + return m.KeywordPlanCampaign + } + return nil +} + +func (m *KeywordPlanAdGroup) GetId() *wrappers.Int64Value { + if m != nil { + return m.Id + } + return nil +} + +func (m *KeywordPlanAdGroup) GetName() *wrappers.StringValue { + if m != nil { + return m.Name + } + return nil +} + +func (m *KeywordPlanAdGroup) GetCpcBidMicros() *wrappers.Int64Value { + if m != nil { + return m.CpcBidMicros + } + return nil +} + +func init() { + proto.RegisterType((*KeywordPlanAdGroup)(nil), "google.ads.googleads.v1.resources.KeywordPlanAdGroup") +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/resources/keyword_plan_ad_group.proto", fileDescriptor_keyword_plan_ad_group_57f0587f66d3b006) +} + +var fileDescriptor_keyword_plan_ad_group_57f0587f66d3b006 = []byte{ + // 398 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x8c, 0x92, 0xc1, 0xaa, 0xd4, 0x30, + 0x18, 0x85, 0x69, 0xee, 0x55, 0x30, 0x5e, 0x5d, 0x54, 0xc4, 0x72, 0xbd, 0xc8, 0x8c, 0x32, 0x30, + 0x20, 0xa4, 0x56, 0xc5, 0x45, 0xc4, 0x45, 0xc7, 0xc5, 0xa0, 0xa2, 0x94, 0x11, 0xba, 0x90, 0x42, + 0xc9, 0x24, 0x31, 0x84, 0x69, 0x93, 0x90, 0xb4, 0x33, 0xb8, 0xf7, 0x49, 0x5c, 0x0a, 0xbe, 0x88, + 0x8f, 0xe2, 0x53, 0x48, 0x9b, 0xb6, 0x22, 0x03, 0x8e, 0xbb, 0x43, 0xff, 0xf3, 0xfd, 0xe7, 0xd0, + 0xfc, 0xf0, 0x95, 0xd0, 0x5a, 0x54, 0x3c, 0x26, 0xcc, 0xc5, 0x5e, 0x76, 0x6a, 0x9f, 0xc4, 0x96, + 0x3b, 0xdd, 0x5a, 0xca, 0x5d, 0xbc, 0xe3, 0x5f, 0x0e, 0xda, 0xb2, 0xd2, 0x54, 0x44, 0x95, 0x84, + 0x95, 0xc2, 0xea, 0xd6, 0x20, 0x63, 0x75, 0xa3, 0xc3, 0xb9, 0x67, 0x10, 0x61, 0x0e, 0x4d, 0x38, + 0xda, 0x27, 0x68, 0xc2, 0x2f, 0x1f, 0x0c, 0x09, 0x3d, 0xb0, 0x6d, 0x3f, 0xc7, 0x07, 0x4b, 0x8c, + 0xe1, 0xd6, 0xf9, 0x15, 0x97, 0x57, 0x63, 0x03, 0x23, 0x63, 0xa2, 0x94, 0x6e, 0x48, 0x23, 0xb5, + 0x1a, 0xa6, 0x0f, 0x7f, 0x00, 0x18, 0xbe, 0xf3, 0x05, 0xb2, 0x8a, 0xa8, 0x94, 0xad, 0xbb, 0xf4, + 0xf0, 0x11, 0xbc, 0x35, 0x26, 0x94, 0x8a, 0xd4, 0x3c, 0x0a, 0x66, 0xc1, 0xf2, 0xc6, 0xe6, 0x62, + 0xfc, 0xf8, 0x81, 0xd4, 0x3c, 0xcc, 0xe0, 0xdd, 0xbf, 0xba, 0x53, 0x52, 0x1b, 0x22, 0x85, 0x8a, + 0xc0, 0x2c, 0x58, 0xde, 0x7c, 0x7a, 0x35, 0x34, 0x46, 0x63, 0x33, 0xf4, 0xb1, 0xb1, 0x52, 0x89, + 0x9c, 0x54, 0x2d, 0xdf, 0xdc, 0xd9, 0xfd, 0x49, 0x7d, 0x3d, 0x80, 0xe1, 0x63, 0x08, 0x24, 0x8b, + 0xce, 0x7a, 0xfc, 0xfe, 0x11, 0xfe, 0x46, 0x35, 0x2f, 0x9e, 0x7b, 0x1a, 0x48, 0x16, 0x3e, 0x81, + 0xe7, 0x7d, 0xb5, 0xf3, 0xff, 0x48, 0xeb, 0x9d, 0x61, 0x0a, 0x6f, 0x53, 0x43, 0xcb, 0xad, 0x64, + 0x65, 0x2d, 0xa9, 0xd5, 0x2e, 0xba, 0x76, 0x3a, 0xea, 0x82, 0x1a, 0xba, 0x92, 0xec, 0x7d, 0x0f, + 0xac, 0xbe, 0x02, 0xb8, 0xa0, 0xba, 0x46, 0x27, 0xdf, 0x65, 0x75, 0xef, 0xf8, 0xb7, 0x66, 0xdd, + 0xfa, 0x2c, 0xf8, 0xf4, 0x76, 0xa0, 0x85, 0xae, 0x88, 0x12, 0x48, 0x5b, 0x11, 0x0b, 0xae, 0xfa, + 0xf0, 0xf1, 0x48, 0x8c, 0x74, 0xff, 0xb8, 0x99, 0x97, 0x93, 0xfa, 0x06, 0xce, 0xd6, 0x69, 0xfa, + 0x1d, 0xcc, 0xd7, 0x7e, 0x65, 0xca, 0x1c, 0xf2, 0xb2, 0x53, 0x79, 0x82, 0x36, 0xa3, 0xf3, 0xe7, + 0xe8, 0x29, 0x52, 0xe6, 0x8a, 0xc9, 0x53, 0xe4, 0x49, 0x31, 0x79, 0x7e, 0x81, 0x85, 0x1f, 0x60, + 0x9c, 0x32, 0x87, 0xf1, 0xe4, 0xc2, 0x38, 0x4f, 0x30, 0x9e, 0x7c, 0xdb, 0xeb, 0x7d, 0xd9, 0x67, + 0xbf, 0x03, 0x00, 0x00, 0xff, 0xff, 0xe6, 0xba, 0x06, 0x79, 0xdf, 0x02, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/resources/keyword_plan_campaign.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/resources/keyword_plan_campaign.pb.go new file mode 100644 index 0000000..7df20f0 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/resources/keyword_plan_campaign.pb.go @@ -0,0 +1,228 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/resources/keyword_plan_campaign.proto + +package resources // import "google.golang.org/genproto/googleapis/ads/googleads/v1/resources" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import wrappers "github.com/golang/protobuf/ptypes/wrappers" +import enums "google.golang.org/genproto/googleapis/ads/googleads/v1/enums" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// A Keyword Plan campaign. +// Max number of keyword plan campaigns per plan allowed: 1. +type KeywordPlanCampaign struct { + // The resource name of the Keyword Plan campaign. + // KeywordPlanCampaign resource names have the form: + // + // `customers/{customer_id}/keywordPlanCampaigns/{kp_campaign_id}` + ResourceName string `protobuf:"bytes,1,opt,name=resource_name,json=resourceName,proto3" json:"resource_name,omitempty"` + // The keyword plan this campaign belongs to. + KeywordPlan *wrappers.StringValue `protobuf:"bytes,2,opt,name=keyword_plan,json=keywordPlan,proto3" json:"keyword_plan,omitempty"` + // The ID of the Keyword Plan campaign. + Id *wrappers.Int64Value `protobuf:"bytes,3,opt,name=id,proto3" json:"id,omitempty"` + // The name of the Keyword Plan campaign. + // + // This field is required and should not be empty when creating Keyword Plan + // campaigns. + Name *wrappers.StringValue `protobuf:"bytes,4,opt,name=name,proto3" json:"name,omitempty"` + // The languages targeted for the Keyword Plan campaign. + // Max allowed: 1. + LanguageConstants []*wrappers.StringValue `protobuf:"bytes,5,rep,name=language_constants,json=languageConstants,proto3" json:"language_constants,omitempty"` + // Targeting network. + // + // This field is required and should not be empty when creating Keyword Plan + // campaigns. + KeywordPlanNetwork enums.KeywordPlanNetworkEnum_KeywordPlanNetwork `protobuf:"varint,6,opt,name=keyword_plan_network,json=keywordPlanNetwork,proto3,enum=google.ads.googleads.v1.enums.KeywordPlanNetworkEnum_KeywordPlanNetwork" json:"keyword_plan_network,omitempty"` + // A default max cpc bid in micros, and in the account currency, for all ad + // groups under the campaign. + // + // This field is required and should not be empty when creating Keyword Plan + // campaigns. + CpcBidMicros *wrappers.Int64Value `protobuf:"bytes,7,opt,name=cpc_bid_micros,json=cpcBidMicros,proto3" json:"cpc_bid_micros,omitempty"` + // The geo targets. + // Max number allowed: 20. + GeoTargets []*KeywordPlanGeoTarget `protobuf:"bytes,8,rep,name=geo_targets,json=geoTargets,proto3" json:"geo_targets,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *KeywordPlanCampaign) Reset() { *m = KeywordPlanCampaign{} } +func (m *KeywordPlanCampaign) String() string { return proto.CompactTextString(m) } +func (*KeywordPlanCampaign) ProtoMessage() {} +func (*KeywordPlanCampaign) Descriptor() ([]byte, []int) { + return fileDescriptor_keyword_plan_campaign_f00db65e7bd65657, []int{0} +} +func (m *KeywordPlanCampaign) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_KeywordPlanCampaign.Unmarshal(m, b) +} +func (m *KeywordPlanCampaign) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_KeywordPlanCampaign.Marshal(b, m, deterministic) +} +func (dst *KeywordPlanCampaign) XXX_Merge(src proto.Message) { + xxx_messageInfo_KeywordPlanCampaign.Merge(dst, src) +} +func (m *KeywordPlanCampaign) XXX_Size() int { + return xxx_messageInfo_KeywordPlanCampaign.Size(m) +} +func (m *KeywordPlanCampaign) XXX_DiscardUnknown() { + xxx_messageInfo_KeywordPlanCampaign.DiscardUnknown(m) +} + +var xxx_messageInfo_KeywordPlanCampaign proto.InternalMessageInfo + +func (m *KeywordPlanCampaign) GetResourceName() string { + if m != nil { + return m.ResourceName + } + return "" +} + +func (m *KeywordPlanCampaign) GetKeywordPlan() *wrappers.StringValue { + if m != nil { + return m.KeywordPlan + } + return nil +} + +func (m *KeywordPlanCampaign) GetId() *wrappers.Int64Value { + if m != nil { + return m.Id + } + return nil +} + +func (m *KeywordPlanCampaign) GetName() *wrappers.StringValue { + if m != nil { + return m.Name + } + return nil +} + +func (m *KeywordPlanCampaign) GetLanguageConstants() []*wrappers.StringValue { + if m != nil { + return m.LanguageConstants + } + return nil +} + +func (m *KeywordPlanCampaign) GetKeywordPlanNetwork() enums.KeywordPlanNetworkEnum_KeywordPlanNetwork { + if m != nil { + return m.KeywordPlanNetwork + } + return enums.KeywordPlanNetworkEnum_UNSPECIFIED +} + +func (m *KeywordPlanCampaign) GetCpcBidMicros() *wrappers.Int64Value { + if m != nil { + return m.CpcBidMicros + } + return nil +} + +func (m *KeywordPlanCampaign) GetGeoTargets() []*KeywordPlanGeoTarget { + if m != nil { + return m.GeoTargets + } + return nil +} + +// A geo target. +// Next ID: 3 +type KeywordPlanGeoTarget struct { + // Required. The resource name of the geo target. + GeoTargetConstant *wrappers.StringValue `protobuf:"bytes,1,opt,name=geo_target_constant,json=geoTargetConstant,proto3" json:"geo_target_constant,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *KeywordPlanGeoTarget) Reset() { *m = KeywordPlanGeoTarget{} } +func (m *KeywordPlanGeoTarget) String() string { return proto.CompactTextString(m) } +func (*KeywordPlanGeoTarget) ProtoMessage() {} +func (*KeywordPlanGeoTarget) Descriptor() ([]byte, []int) { + return fileDescriptor_keyword_plan_campaign_f00db65e7bd65657, []int{1} +} +func (m *KeywordPlanGeoTarget) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_KeywordPlanGeoTarget.Unmarshal(m, b) +} +func (m *KeywordPlanGeoTarget) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_KeywordPlanGeoTarget.Marshal(b, m, deterministic) +} +func (dst *KeywordPlanGeoTarget) XXX_Merge(src proto.Message) { + xxx_messageInfo_KeywordPlanGeoTarget.Merge(dst, src) +} +func (m *KeywordPlanGeoTarget) XXX_Size() int { + return xxx_messageInfo_KeywordPlanGeoTarget.Size(m) +} +func (m *KeywordPlanGeoTarget) XXX_DiscardUnknown() { + xxx_messageInfo_KeywordPlanGeoTarget.DiscardUnknown(m) +} + +var xxx_messageInfo_KeywordPlanGeoTarget proto.InternalMessageInfo + +func (m *KeywordPlanGeoTarget) GetGeoTargetConstant() *wrappers.StringValue { + if m != nil { + return m.GeoTargetConstant + } + return nil +} + +func init() { + proto.RegisterType((*KeywordPlanCampaign)(nil), "google.ads.googleads.v1.resources.KeywordPlanCampaign") + proto.RegisterType((*KeywordPlanGeoTarget)(nil), "google.ads.googleads.v1.resources.KeywordPlanGeoTarget") +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/resources/keyword_plan_campaign.proto", fileDescriptor_keyword_plan_campaign_f00db65e7bd65657) +} + +var fileDescriptor_keyword_plan_campaign_f00db65e7bd65657 = []byte{ + // 523 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x8c, 0x93, 0xd1, 0x8a, 0x13, 0x31, + 0x14, 0x86, 0xe9, 0xb4, 0xae, 0x9a, 0xd6, 0x05, 0xb3, 0x7b, 0x31, 0xac, 0x8b, 0x74, 0x57, 0x16, + 0x0a, 0x42, 0xc6, 0xae, 0xa2, 0x32, 0x22, 0x32, 0x5d, 0xa4, 0xea, 0xea, 0x52, 0xaa, 0x14, 0x91, + 0xc2, 0x90, 0x4e, 0x62, 0x08, 0xed, 0x24, 0x43, 0x92, 0xd9, 0xa2, 0xf7, 0xbe, 0x88, 0x97, 0x3e, + 0x85, 0xd7, 0x3e, 0x8a, 0x4f, 0x21, 0xcd, 0x4c, 0x66, 0x8b, 0xdb, 0x75, 0xbc, 0x3b, 0x93, 0xfc, + 0xdf, 0x39, 0xff, 0x9c, 0x73, 0x02, 0x9e, 0x33, 0x29, 0xd9, 0x82, 0x06, 0x98, 0xe8, 0xa0, 0x08, + 0x57, 0xd1, 0x79, 0x3f, 0x50, 0x54, 0xcb, 0x5c, 0x25, 0x54, 0x07, 0x73, 0xfa, 0x65, 0x29, 0x15, + 0x89, 0xb3, 0x05, 0x16, 0x71, 0x82, 0xd3, 0x0c, 0x73, 0x26, 0x50, 0xa6, 0xa4, 0x91, 0xf0, 0xa0, + 0x60, 0x10, 0x26, 0x1a, 0x55, 0x38, 0x3a, 0xef, 0xa3, 0x0a, 0xdf, 0x7b, 0x7a, 0x55, 0x05, 0x2a, + 0xf2, 0xf4, 0xaf, 0xec, 0x82, 0x9a, 0xa5, 0x54, 0xf3, 0x22, 0xf9, 0xde, 0xdd, 0x92, 0xb4, 0x5f, + 0xb3, 0xfc, 0x73, 0xb0, 0x54, 0x38, 0xcb, 0xa8, 0xd2, 0xe5, 0xfd, 0xbe, 0xcb, 0x9c, 0xf1, 0x00, + 0x0b, 0x21, 0x0d, 0x36, 0x5c, 0x8a, 0xf2, 0xf6, 0xf0, 0x67, 0x0b, 0xec, 0x9c, 0x16, 0xc9, 0x47, + 0x0b, 0x2c, 0x4e, 0x4a, 0xe3, 0xf0, 0x1e, 0xb8, 0xe5, 0xcc, 0xc5, 0x02, 0xa7, 0xd4, 0x6f, 0x74, + 0x1b, 0xbd, 0x9b, 0xe3, 0x8e, 0x3b, 0x3c, 0xc3, 0x29, 0x85, 0x2f, 0x40, 0x67, 0xdd, 0x98, 0xef, + 0x75, 0x1b, 0xbd, 0xf6, 0xf1, 0x7e, 0xf9, 0x8f, 0xc8, 0x39, 0x42, 0xef, 0x8d, 0xe2, 0x82, 0x4d, + 0xf0, 0x22, 0xa7, 0xe3, 0xf6, 0xfc, 0xa2, 0x1a, 0xbc, 0x0f, 0x3c, 0x4e, 0xfc, 0xa6, 0xc5, 0xee, + 0x5c, 0xc2, 0x5e, 0x0b, 0xf3, 0xf8, 0x51, 0x41, 0x79, 0x9c, 0xc0, 0x07, 0xa0, 0x65, 0x9d, 0xb4, + 0xfe, 0xa3, 0x8a, 0x55, 0xc2, 0x53, 0x00, 0x17, 0x58, 0xb0, 0x1c, 0x33, 0x1a, 0x27, 0x52, 0x68, + 0x83, 0x85, 0xd1, 0xfe, 0xb5, 0x6e, 0xb3, 0x96, 0xbf, 0xed, 0xb8, 0x13, 0x87, 0xc1, 0xaf, 0x60, + 0x77, 0xd3, 0x14, 0xfc, 0xad, 0x6e, 0xa3, 0xb7, 0x7d, 0xfc, 0x0a, 0x5d, 0x35, 0x63, 0x3b, 0x40, + 0xb4, 0xd6, 0xe3, 0xb3, 0x02, 0x7c, 0x29, 0xf2, 0x74, 0xc3, 0xf1, 0x18, 0xce, 0x2f, 0x9d, 0xc1, + 0x08, 0x6c, 0x27, 0x59, 0x12, 0xcf, 0x38, 0x89, 0x53, 0x9e, 0x28, 0xa9, 0xfd, 0xeb, 0xf5, 0x3d, + 0xeb, 0x24, 0x59, 0x32, 0xe0, 0xe4, 0x9d, 0x05, 0xe0, 0x47, 0xd0, 0x66, 0x54, 0xc6, 0x06, 0x2b, + 0x46, 0x8d, 0xf6, 0x6f, 0xd8, 0x26, 0x3c, 0x41, 0xb5, 0x9b, 0xb9, 0x6e, 0x71, 0x48, 0xe5, 0x07, + 0xcb, 0x8f, 0x01, 0x73, 0xa1, 0x3e, 0x24, 0x60, 0x77, 0x93, 0x06, 0xbe, 0x05, 0x3b, 0x17, 0x15, + 0xab, 0xfe, 0xdb, 0x45, 0xaa, 0x6d, 0x7f, 0x95, 0xde, 0xf5, 0x7f, 0xf0, 0xcd, 0x03, 0x47, 0x89, + 0x4c, 0xeb, 0x0d, 0x0f, 0xfc, 0x0d, 0xfb, 0x3c, 0x5a, 0x55, 0x19, 0x35, 0x3e, 0xbd, 0x29, 0x71, + 0x26, 0x57, 0x03, 0x46, 0x52, 0xb1, 0x80, 0x51, 0x61, 0x3d, 0xb8, 0x67, 0x97, 0x71, 0xfd, 0x8f, + 0x77, 0xfe, 0xac, 0x8a, 0xbe, 0x7b, 0xcd, 0x61, 0x14, 0xfd, 0xf0, 0x0e, 0x86, 0x45, 0xca, 0x88, + 0x68, 0x54, 0x84, 0xab, 0x68, 0xd2, 0x47, 0x63, 0xa7, 0xfc, 0xe5, 0x34, 0xd3, 0x88, 0xe8, 0x69, + 0xa5, 0x99, 0x4e, 0xfa, 0xd3, 0x4a, 0xf3, 0xdb, 0x3b, 0x2a, 0x2e, 0xc2, 0x30, 0x22, 0x3a, 0x0c, + 0x2b, 0x55, 0x18, 0x4e, 0xfa, 0x61, 0x58, 0xe9, 0x66, 0x5b, 0xd6, 0xec, 0xc3, 0x3f, 0x01, 0x00, + 0x00, 0xff, 0xff, 0x0c, 0xaa, 0xfa, 0x04, 0x93, 0x04, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/resources/keyword_plan_keyword.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/resources/keyword_plan_keyword.pb.go new file mode 100644 index 0000000..bf388e8 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/resources/keyword_plan_keyword.pb.go @@ -0,0 +1,153 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/resources/keyword_plan_keyword.proto + +package resources // import "google.golang.org/genproto/googleapis/ads/googleads/v1/resources" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import wrappers "github.com/golang/protobuf/ptypes/wrappers" +import enums "google.golang.org/genproto/googleapis/ads/googleads/v1/enums" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// A Keyword Plan ad group keyword. +// Max number of keyword plan keywords per plan: 2500. +type KeywordPlanKeyword struct { + // The resource name of the Keyword Plan ad group keyword. + // KeywordPlanKeyword resource names have the form: + // + // `customers/{customer_id}/keywordPlanKeywords/{kp_ad_group_keyword_id}` + ResourceName string `protobuf:"bytes,1,opt,name=resource_name,json=resourceName,proto3" json:"resource_name,omitempty"` + // The Keyword Plan ad group to which this keyword belongs. + KeywordPlanAdGroup *wrappers.StringValue `protobuf:"bytes,2,opt,name=keyword_plan_ad_group,json=keywordPlanAdGroup,proto3" json:"keyword_plan_ad_group,omitempty"` + // The ID of the Keyword Plan keyword. + Id *wrappers.Int64Value `protobuf:"bytes,3,opt,name=id,proto3" json:"id,omitempty"` + // The keyword text. + Text *wrappers.StringValue `protobuf:"bytes,4,opt,name=text,proto3" json:"text,omitempty"` + // The keyword match type. + MatchType enums.KeywordMatchTypeEnum_KeywordMatchType `protobuf:"varint,5,opt,name=match_type,json=matchType,proto3,enum=google.ads.googleads.v1.enums.KeywordMatchTypeEnum_KeywordMatchType" json:"match_type,omitempty"` + // A keyword level max cpc bid in micros, in the account currency, that + // overrides the keyword plan ad group cpc bid. + CpcBidMicros *wrappers.Int64Value `protobuf:"bytes,6,opt,name=cpc_bid_micros,json=cpcBidMicros,proto3" json:"cpc_bid_micros,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *KeywordPlanKeyword) Reset() { *m = KeywordPlanKeyword{} } +func (m *KeywordPlanKeyword) String() string { return proto.CompactTextString(m) } +func (*KeywordPlanKeyword) ProtoMessage() {} +func (*KeywordPlanKeyword) Descriptor() ([]byte, []int) { + return fileDescriptor_keyword_plan_keyword_93af4dbc1a58d5ba, []int{0} +} +func (m *KeywordPlanKeyword) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_KeywordPlanKeyword.Unmarshal(m, b) +} +func (m *KeywordPlanKeyword) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_KeywordPlanKeyword.Marshal(b, m, deterministic) +} +func (dst *KeywordPlanKeyword) XXX_Merge(src proto.Message) { + xxx_messageInfo_KeywordPlanKeyword.Merge(dst, src) +} +func (m *KeywordPlanKeyword) XXX_Size() int { + return xxx_messageInfo_KeywordPlanKeyword.Size(m) +} +func (m *KeywordPlanKeyword) XXX_DiscardUnknown() { + xxx_messageInfo_KeywordPlanKeyword.DiscardUnknown(m) +} + +var xxx_messageInfo_KeywordPlanKeyword proto.InternalMessageInfo + +func (m *KeywordPlanKeyword) GetResourceName() string { + if m != nil { + return m.ResourceName + } + return "" +} + +func (m *KeywordPlanKeyword) GetKeywordPlanAdGroup() *wrappers.StringValue { + if m != nil { + return m.KeywordPlanAdGroup + } + return nil +} + +func (m *KeywordPlanKeyword) GetId() *wrappers.Int64Value { + if m != nil { + return m.Id + } + return nil +} + +func (m *KeywordPlanKeyword) GetText() *wrappers.StringValue { + if m != nil { + return m.Text + } + return nil +} + +func (m *KeywordPlanKeyword) GetMatchType() enums.KeywordMatchTypeEnum_KeywordMatchType { + if m != nil { + return m.MatchType + } + return enums.KeywordMatchTypeEnum_UNSPECIFIED +} + +func (m *KeywordPlanKeyword) GetCpcBidMicros() *wrappers.Int64Value { + if m != nil { + return m.CpcBidMicros + } + return nil +} + +func init() { + proto.RegisterType((*KeywordPlanKeyword)(nil), "google.ads.googleads.v1.resources.KeywordPlanKeyword") +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/resources/keyword_plan_keyword.proto", fileDescriptor_keyword_plan_keyword_93af4dbc1a58d5ba) +} + +var fileDescriptor_keyword_plan_keyword_93af4dbc1a58d5ba = []byte{ + // 453 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x8c, 0x92, 0xc1, 0x6a, 0x14, 0x31, + 0x18, 0xc7, 0x99, 0xd9, 0x5a, 0x68, 0xac, 0x3d, 0x04, 0xc4, 0xa1, 0x16, 0xd9, 0x2a, 0x85, 0x05, + 0x21, 0xe3, 0x54, 0xe9, 0x61, 0xf4, 0x32, 0x8b, 0xb2, 0xa8, 0x54, 0x97, 0x55, 0xf6, 0x20, 0x03, + 0x43, 0x36, 0x89, 0x63, 0xe8, 0x4c, 0x12, 0x92, 0x4c, 0xeb, 0xde, 0x7d, 0x01, 0x5f, 0xc1, 0xa3, + 0x8f, 0xe2, 0xa3, 0xf8, 0x14, 0x32, 0x33, 0xc9, 0x2c, 0xb2, 0xd4, 0x7a, 0xfb, 0x27, 0xf9, 0xff, + 0xfe, 0x5f, 0xf2, 0xe5, 0x03, 0x2f, 0x4a, 0x29, 0xcb, 0x8a, 0xc5, 0x98, 0x9a, 0xb8, 0x97, 0xad, + 0xba, 0x4c, 0x62, 0xcd, 0x8c, 0x6c, 0x34, 0x61, 0x26, 0xbe, 0x60, 0xeb, 0x2b, 0xa9, 0x69, 0xa1, + 0x2a, 0x2c, 0x0a, 0xb7, 0x40, 0x4a, 0x4b, 0x2b, 0xe1, 0x71, 0x8f, 0x20, 0x4c, 0x0d, 0x1a, 0x68, + 0x74, 0x99, 0xa0, 0x81, 0x3e, 0x3c, 0xbb, 0xae, 0x00, 0x13, 0x4d, 0xbd, 0x09, 0xaf, 0xb1, 0x25, + 0x5f, 0x0a, 0xbb, 0x56, 0xac, 0x8f, 0x3e, 0x7c, 0xe0, 0xb8, 0x6e, 0xb5, 0x6a, 0x3e, 0xc7, 0x57, + 0x1a, 0x2b, 0xc5, 0xb4, 0x71, 0xe7, 0x47, 0x3e, 0x57, 0xf1, 0x18, 0x0b, 0x21, 0x2d, 0xb6, 0x5c, + 0x0a, 0x77, 0xfa, 0xf0, 0xfb, 0x08, 0xc0, 0xb7, 0x7d, 0xf4, 0xbc, 0xc2, 0xc2, 0x49, 0xf8, 0x08, + 0xdc, 0xf1, 0x37, 0x2b, 0x04, 0xae, 0x59, 0x14, 0x8c, 0x83, 0xc9, 0xde, 0x62, 0xdf, 0x6f, 0xbe, + 0xc3, 0x35, 0x83, 0xef, 0xc1, 0xdd, 0xbf, 0x9e, 0x8c, 0x69, 0x51, 0x6a, 0xd9, 0xa8, 0x28, 0x1c, + 0x07, 0x93, 0xdb, 0xa7, 0x47, 0xee, 0xa5, 0xc8, 0xdf, 0x0c, 0x7d, 0xb0, 0x9a, 0x8b, 0x72, 0x89, + 0xab, 0x86, 0x2d, 0xe0, 0xc5, 0xa6, 0x6a, 0x46, 0x67, 0x2d, 0x07, 0x1f, 0x83, 0x90, 0xd3, 0x68, + 0xd4, 0xd1, 0xf7, 0xb7, 0xe8, 0xd7, 0xc2, 0x9e, 0x3d, 0xeb, 0xe1, 0x90, 0x53, 0xf8, 0x04, 0xec, + 0x58, 0xf6, 0xd5, 0x46, 0x3b, 0xff, 0x51, 0xac, 0x73, 0x42, 0x02, 0xc0, 0xa6, 0x7b, 0xd1, 0xad, + 0x71, 0x30, 0x39, 0x38, 0x7d, 0x89, 0xae, 0xfb, 0x99, 0xae, 0xed, 0xc8, 0x35, 0xe4, 0xbc, 0xe5, + 0x3e, 0xae, 0x15, 0x7b, 0x25, 0x9a, 0x7a, 0x6b, 0x73, 0xb1, 0x57, 0x7b, 0x09, 0x33, 0x70, 0x40, + 0x14, 0x29, 0x56, 0x9c, 0x16, 0x35, 0x27, 0x5a, 0x9a, 0x68, 0xf7, 0xe6, 0xf7, 0xec, 0x13, 0x45, + 0xa6, 0x9c, 0x9e, 0x77, 0xc0, 0xf4, 0x5b, 0x08, 0x4e, 0x88, 0xac, 0xd1, 0x8d, 0x33, 0x33, 0xbd, + 0xb7, 0xfd, 0x75, 0xf3, 0x36, 0x7e, 0x1e, 0x7c, 0x7a, 0xe3, 0xe8, 0x52, 0x56, 0x58, 0x94, 0x48, + 0xea, 0x32, 0x2e, 0x99, 0xe8, 0x8a, 0xfb, 0xf1, 0x52, 0xdc, 0xfc, 0x63, 0x9c, 0x9f, 0x0f, 0xea, + 0x47, 0x38, 0x9a, 0x65, 0xd9, 0xcf, 0xf0, 0x78, 0xd6, 0x47, 0x66, 0xd4, 0xa0, 0x5e, 0xb6, 0x6a, + 0x99, 0xa0, 0x85, 0x77, 0xfe, 0xf2, 0x9e, 0x3c, 0xa3, 0x26, 0x1f, 0x3c, 0xf9, 0x32, 0xc9, 0x07, + 0xcf, 0xef, 0xf0, 0xa4, 0x3f, 0x48, 0xd3, 0x8c, 0x9a, 0x34, 0x1d, 0x5c, 0x69, 0xba, 0x4c, 0xd2, + 0x74, 0xf0, 0xad, 0x76, 0xbb, 0xcb, 0x3e, 0xfd, 0x13, 0x00, 0x00, 0xff, 0xff, 0x77, 0x12, 0xa5, + 0xa6, 0x7a, 0x03, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/resources/keyword_plan_negative_keyword.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/resources/keyword_plan_negative_keyword.pb.go new file mode 100644 index 0000000..e3ddf6b --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/resources/keyword_plan_negative_keyword.pb.go @@ -0,0 +1,143 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/resources/keyword_plan_negative_keyword.proto + +package resources // import "google.golang.org/genproto/googleapis/ads/googleads/v1/resources" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import wrappers "github.com/golang/protobuf/ptypes/wrappers" +import enums "google.golang.org/genproto/googleapis/ads/googleads/v1/enums" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// A Keyword Plan negative keyword. +// Max number of keyword plan negative keywords per plan: 1000. +type KeywordPlanNegativeKeyword struct { + // The resource name of the Keyword Plan negative keyword. + // KeywordPlanNegativeKeyword resource names have the form: + // + // + // `customers/{customer_id}/keywordPlanNegativeKeywords/{kp_negative_keyword_id}` + ResourceName string `protobuf:"bytes,1,opt,name=resource_name,json=resourceName,proto3" json:"resource_name,omitempty"` + // The Keyword Plan campaign to which this negative keyword belongs. + KeywordPlanCampaign *wrappers.StringValue `protobuf:"bytes,2,opt,name=keyword_plan_campaign,json=keywordPlanCampaign,proto3" json:"keyword_plan_campaign,omitempty"` + // The ID of the Keyword Plan negative keyword. + Id *wrappers.Int64Value `protobuf:"bytes,3,opt,name=id,proto3" json:"id,omitempty"` + // The keyword text. + Text *wrappers.StringValue `protobuf:"bytes,4,opt,name=text,proto3" json:"text,omitempty"` + // The keyword match type. + MatchType enums.KeywordMatchTypeEnum_KeywordMatchType `protobuf:"varint,5,opt,name=match_type,json=matchType,proto3,enum=google.ads.googleads.v1.enums.KeywordMatchTypeEnum_KeywordMatchType" json:"match_type,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *KeywordPlanNegativeKeyword) Reset() { *m = KeywordPlanNegativeKeyword{} } +func (m *KeywordPlanNegativeKeyword) String() string { return proto.CompactTextString(m) } +func (*KeywordPlanNegativeKeyword) ProtoMessage() {} +func (*KeywordPlanNegativeKeyword) Descriptor() ([]byte, []int) { + return fileDescriptor_keyword_plan_negative_keyword_5104ab403d28b07d, []int{0} +} +func (m *KeywordPlanNegativeKeyword) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_KeywordPlanNegativeKeyword.Unmarshal(m, b) +} +func (m *KeywordPlanNegativeKeyword) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_KeywordPlanNegativeKeyword.Marshal(b, m, deterministic) +} +func (dst *KeywordPlanNegativeKeyword) XXX_Merge(src proto.Message) { + xxx_messageInfo_KeywordPlanNegativeKeyword.Merge(dst, src) +} +func (m *KeywordPlanNegativeKeyword) XXX_Size() int { + return xxx_messageInfo_KeywordPlanNegativeKeyword.Size(m) +} +func (m *KeywordPlanNegativeKeyword) XXX_DiscardUnknown() { + xxx_messageInfo_KeywordPlanNegativeKeyword.DiscardUnknown(m) +} + +var xxx_messageInfo_KeywordPlanNegativeKeyword proto.InternalMessageInfo + +func (m *KeywordPlanNegativeKeyword) GetResourceName() string { + if m != nil { + return m.ResourceName + } + return "" +} + +func (m *KeywordPlanNegativeKeyword) GetKeywordPlanCampaign() *wrappers.StringValue { + if m != nil { + return m.KeywordPlanCampaign + } + return nil +} + +func (m *KeywordPlanNegativeKeyword) GetId() *wrappers.Int64Value { + if m != nil { + return m.Id + } + return nil +} + +func (m *KeywordPlanNegativeKeyword) GetText() *wrappers.StringValue { + if m != nil { + return m.Text + } + return nil +} + +func (m *KeywordPlanNegativeKeyword) GetMatchType() enums.KeywordMatchTypeEnum_KeywordMatchType { + if m != nil { + return m.MatchType + } + return enums.KeywordMatchTypeEnum_UNSPECIFIED +} + +func init() { + proto.RegisterType((*KeywordPlanNegativeKeyword)(nil), "google.ads.googleads.v1.resources.KeywordPlanNegativeKeyword") +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/resources/keyword_plan_negative_keyword.proto", fileDescriptor_keyword_plan_negative_keyword_5104ab403d28b07d) +} + +var fileDescriptor_keyword_plan_negative_keyword_5104ab403d28b07d = []byte{ + // 436 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x8c, 0x52, 0xdd, 0x6a, 0x14, 0x31, + 0x18, 0x65, 0xa6, 0x55, 0x68, 0xfc, 0xb9, 0x18, 0x11, 0x86, 0xb5, 0xe8, 0x56, 0x29, 0x2c, 0x08, + 0x19, 0xa7, 0x4a, 0x2f, 0xc6, 0xab, 0xa9, 0x96, 0xa2, 0x62, 0x59, 0x56, 0xd9, 0x0b, 0x59, 0x18, + 0xd2, 0x49, 0x8c, 0xa1, 0x93, 0x1f, 0x92, 0xcc, 0xd6, 0x7d, 0x07, 0x9f, 0xc2, 0x4b, 0x1f, 0xc5, + 0x17, 0xf0, 0x1d, 0x7c, 0x0a, 0x99, 0x49, 0x32, 0x8b, 0x2c, 0xab, 0xbd, 0x3b, 0xf9, 0xbe, 0x73, + 0xce, 0xf7, 0x17, 0x70, 0x4a, 0xa5, 0xa4, 0x0d, 0xc9, 0x10, 0x36, 0x99, 0x83, 0x1d, 0x5a, 0xe6, + 0x99, 0x26, 0x46, 0xb6, 0xba, 0x26, 0x26, 0xbb, 0x24, 0xab, 0x2b, 0xa9, 0x71, 0xa5, 0x1a, 0x24, + 0x2a, 0x41, 0x28, 0xb2, 0x6c, 0x49, 0x2a, 0x1f, 0x85, 0x4a, 0x4b, 0x2b, 0x93, 0x03, 0xa7, 0x85, + 0x08, 0x1b, 0x38, 0xd8, 0xc0, 0x65, 0x0e, 0x07, 0x9b, 0xd1, 0xf1, 0xb6, 0x4a, 0x44, 0xb4, 0x7c, + 0x5d, 0x85, 0x23, 0x5b, 0x7f, 0xa9, 0xec, 0x4a, 0x11, 0x67, 0x3d, 0x7a, 0xe8, 0x75, 0xfd, 0xeb, + 0xa2, 0xfd, 0x9c, 0x5d, 0x69, 0xa4, 0x14, 0xd1, 0xc6, 0xe7, 0xf7, 0x83, 0xaf, 0x62, 0x19, 0x12, + 0x42, 0x5a, 0x64, 0x99, 0x14, 0x3e, 0xfb, 0xf8, 0x57, 0x0c, 0x46, 0xef, 0x9c, 0xf5, 0xb4, 0x41, + 0xe2, 0xdc, 0xb7, 0xef, 0x43, 0xc9, 0x13, 0x70, 0x27, 0x74, 0x58, 0x09, 0xc4, 0x49, 0x1a, 0x8d, + 0xa3, 0xc9, 0xde, 0xec, 0x76, 0x08, 0x9e, 0x23, 0x4e, 0x92, 0x29, 0xb8, 0xff, 0xd7, 0x0e, 0x6a, + 0xc4, 0x15, 0x62, 0x54, 0xa4, 0xf1, 0x38, 0x9a, 0xdc, 0x3a, 0xda, 0xf7, 0x13, 0xc3, 0xd0, 0x21, + 0xfc, 0x60, 0x35, 0x13, 0x74, 0x8e, 0x9a, 0x96, 0xcc, 0xee, 0x5d, 0xae, 0xab, 0xbf, 0xf2, 0xc2, + 0xe4, 0x29, 0x88, 0x19, 0x4e, 0x77, 0x7a, 0xf9, 0x83, 0x0d, 0xf9, 0x1b, 0x61, 0x8f, 0x5f, 0x38, + 0x75, 0xcc, 0x70, 0xf2, 0x0c, 0xec, 0x5a, 0xf2, 0xd5, 0xa6, 0xbb, 0xd7, 0xa8, 0xd6, 0x33, 0x93, + 0x1a, 0x80, 0xf5, 0x1a, 0xd3, 0x1b, 0xe3, 0x68, 0x72, 0xf7, 0xe8, 0x35, 0xdc, 0x76, 0xa2, 0x7e, + 0xff, 0xd0, 0x6f, 0xe4, 0x7d, 0xa7, 0xfb, 0xb8, 0x52, 0xe4, 0x54, 0xb4, 0x7c, 0x23, 0x38, 0xdb, + 0xe3, 0x01, 0x9e, 0x7c, 0x8b, 0xc1, 0x61, 0x2d, 0x39, 0xfc, 0xef, 0xe5, 0x4f, 0x1e, 0x6d, 0x3f, + 0xc0, 0xb4, 0x1b, 0x62, 0x1a, 0x7d, 0x7a, 0xeb, 0x5d, 0xa8, 0x6c, 0x90, 0xa0, 0x50, 0x6a, 0x9a, + 0x51, 0x22, 0xfa, 0x11, 0xc3, 0x67, 0x51, 0xcc, 0xfc, 0xe3, 0x97, 0xbe, 0x1c, 0xd0, 0xf7, 0x78, + 0xe7, 0xac, 0x2c, 0x7f, 0xc4, 0x07, 0x67, 0xce, 0xb2, 0xc4, 0x06, 0x3a, 0xd8, 0xa1, 0x79, 0x0e, + 0x67, 0x81, 0xf9, 0x33, 0x70, 0x16, 0x25, 0x36, 0x8b, 0x81, 0xb3, 0x98, 0xe7, 0x8b, 0x81, 0xf3, + 0x3b, 0x3e, 0x74, 0x89, 0xa2, 0x28, 0xb1, 0x29, 0x8a, 0x81, 0x55, 0x14, 0xf3, 0xbc, 0x28, 0x06, + 0xde, 0xc5, 0xcd, 0xbe, 0xd9, 0xe7, 0x7f, 0x02, 0x00, 0x00, 0xff, 0xff, 0x24, 0x2f, 0xaa, 0x5a, + 0x51, 0x03, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/resources/keyword_view.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/resources/keyword_view.pb.go new file mode 100644 index 0000000..3ba40a6 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/resources/keyword_view.pb.go @@ -0,0 +1,92 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/resources/keyword_view.proto + +package resources // import "google.golang.org/genproto/googleapis/ads/googleads/v1/resources" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// A keyword view. +type KeywordView struct { + // The resource name of the keyword view. + // Keyword view resource names have the form: + // + // `customers/{customer_id}/keywordViews/{ad_group_id}~{criterion_id}` + ResourceName string `protobuf:"bytes,1,opt,name=resource_name,json=resourceName,proto3" json:"resource_name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *KeywordView) Reset() { *m = KeywordView{} } +func (m *KeywordView) String() string { return proto.CompactTextString(m) } +func (*KeywordView) ProtoMessage() {} +func (*KeywordView) Descriptor() ([]byte, []int) { + return fileDescriptor_keyword_view_80d6b289c44fb170, []int{0} +} +func (m *KeywordView) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_KeywordView.Unmarshal(m, b) +} +func (m *KeywordView) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_KeywordView.Marshal(b, m, deterministic) +} +func (dst *KeywordView) XXX_Merge(src proto.Message) { + xxx_messageInfo_KeywordView.Merge(dst, src) +} +func (m *KeywordView) XXX_Size() int { + return xxx_messageInfo_KeywordView.Size(m) +} +func (m *KeywordView) XXX_DiscardUnknown() { + xxx_messageInfo_KeywordView.DiscardUnknown(m) +} + +var xxx_messageInfo_KeywordView proto.InternalMessageInfo + +func (m *KeywordView) GetResourceName() string { + if m != nil { + return m.ResourceName + } + return "" +} + +func init() { + proto.RegisterType((*KeywordView)(nil), "google.ads.googleads.v1.resources.KeywordView") +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/resources/keyword_view.proto", fileDescriptor_keyword_view_80d6b289c44fb170) +} + +var fileDescriptor_keyword_view_80d6b289c44fb170 = []byte{ + // 266 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x84, 0x90, 0x41, 0x4a, 0xc4, 0x30, + 0x14, 0x86, 0x69, 0x05, 0xc1, 0xaa, 0x20, 0xb3, 0x12, 0x71, 0xe1, 0x28, 0x03, 0xae, 0x12, 0xa2, + 0xae, 0xe2, 0x2a, 0xb3, 0x19, 0x50, 0x90, 0x61, 0x16, 0x5d, 0x48, 0x61, 0x88, 0x93, 0x10, 0x82, + 0xd3, 0xbc, 0x92, 0x57, 0x5b, 0xbc, 0x8e, 0x4b, 0x8f, 0xe2, 0x51, 0xbc, 0x83, 0x20, 0x6d, 0x4c, + 0x70, 0xa5, 0xbb, 0x9f, 0xe4, 0xfb, 0xbf, 0xf7, 0x92, 0xe2, 0xc6, 0x00, 0x98, 0xad, 0xa6, 0x52, + 0x21, 0x0d, 0x71, 0x48, 0x1d, 0xa3, 0x5e, 0x23, 0xbc, 0xf8, 0x8d, 0x46, 0xfa, 0xac, 0x5f, 0x7b, + 0xf0, 0x6a, 0xdd, 0x59, 0xdd, 0x93, 0xc6, 0x43, 0x0b, 0x93, 0x69, 0x40, 0x89, 0x54, 0x48, 0x52, + 0x8b, 0x74, 0x8c, 0xa4, 0xd6, 0xc9, 0x69, 0x14, 0x37, 0x96, 0x4a, 0xe7, 0xa0, 0x95, 0xad, 0x05, + 0x87, 0x41, 0x70, 0x7e, 0x55, 0xec, 0xdf, 0x07, 0x6d, 0x69, 0x75, 0x3f, 0xb9, 0x28, 0x0e, 0x63, + 0x73, 0xed, 0x64, 0xad, 0x8f, 0xb3, 0xb3, 0xec, 0x72, 0x6f, 0x75, 0x10, 0x0f, 0x1f, 0x64, 0xad, + 0xe7, 0x5f, 0x59, 0x31, 0xdb, 0x40, 0x4d, 0xfe, 0x9d, 0x3d, 0x3f, 0xfa, 0xe5, 0x5e, 0x0e, 0xf3, + 0x96, 0xd9, 0xe3, 0xdd, 0x4f, 0xcd, 0xc0, 0x56, 0x3a, 0x43, 0xc0, 0x1b, 0x6a, 0xb4, 0x1b, 0xb7, + 0x89, 0x0f, 0x6f, 0x2c, 0xfe, 0xf1, 0x0f, 0xb7, 0x29, 0xbd, 0xe5, 0x3b, 0x0b, 0x21, 0xde, 0xf3, + 0xe9, 0x22, 0x28, 0x85, 0x42, 0x12, 0xe2, 0x90, 0x4a, 0x46, 0x56, 0x91, 0xfc, 0x88, 0x4c, 0x25, + 0x14, 0x56, 0x89, 0xa9, 0x4a, 0x56, 0x25, 0xe6, 0x33, 0x9f, 0x85, 0x0b, 0xce, 0x85, 0x42, 0xce, + 0x13, 0xc5, 0x79, 0xc9, 0x38, 0x4f, 0xdc, 0xd3, 0xee, 0xb8, 0xec, 0xf5, 0x77, 0x00, 0x00, 0x00, + 0xff, 0xff, 0x68, 0xdd, 0x79, 0x36, 0xb3, 0x01, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/resources/label.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/resources/label.pb.go new file mode 100644 index 0000000..dc260db --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/resources/label.pb.go @@ -0,0 +1,143 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/resources/label.proto + +package resources // import "google.golang.org/genproto/googleapis/ads/googleads/v1/resources" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import wrappers "github.com/golang/protobuf/ptypes/wrappers" +import common "google.golang.org/genproto/googleapis/ads/googleads/v1/common" +import enums "google.golang.org/genproto/googleapis/ads/googleads/v1/enums" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// A label. +type Label struct { + // Name of the resource. + // Label resource names have the form: + // `customers/{customer_id}/labels/{label_id}` + ResourceName string `protobuf:"bytes,1,opt,name=resource_name,json=resourceName,proto3" json:"resource_name,omitempty"` + // Id of the label. Read only. + Id *wrappers.Int64Value `protobuf:"bytes,2,opt,name=id,proto3" json:"id,omitempty"` + // The name of the label. + // + // This field is required and should not be empty when creating a new label. + // + // The length of this string should be between 1 and 80, inclusive. + Name *wrappers.StringValue `protobuf:"bytes,3,opt,name=name,proto3" json:"name,omitempty"` + // Status of the label. Read only. + Status enums.LabelStatusEnum_LabelStatus `protobuf:"varint,4,opt,name=status,proto3,enum=google.ads.googleads.v1.enums.LabelStatusEnum_LabelStatus" json:"status,omitempty"` + // A type of label displaying text on a colored background. + TextLabel *common.TextLabel `protobuf:"bytes,5,opt,name=text_label,json=textLabel,proto3" json:"text_label,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Label) Reset() { *m = Label{} } +func (m *Label) String() string { return proto.CompactTextString(m) } +func (*Label) ProtoMessage() {} +func (*Label) Descriptor() ([]byte, []int) { + return fileDescriptor_label_8e99450ed5488248, []int{0} +} +func (m *Label) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Label.Unmarshal(m, b) +} +func (m *Label) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Label.Marshal(b, m, deterministic) +} +func (dst *Label) XXX_Merge(src proto.Message) { + xxx_messageInfo_Label.Merge(dst, src) +} +func (m *Label) XXX_Size() int { + return xxx_messageInfo_Label.Size(m) +} +func (m *Label) XXX_DiscardUnknown() { + xxx_messageInfo_Label.DiscardUnknown(m) +} + +var xxx_messageInfo_Label proto.InternalMessageInfo + +func (m *Label) GetResourceName() string { + if m != nil { + return m.ResourceName + } + return "" +} + +func (m *Label) GetId() *wrappers.Int64Value { + if m != nil { + return m.Id + } + return nil +} + +func (m *Label) GetName() *wrappers.StringValue { + if m != nil { + return m.Name + } + return nil +} + +func (m *Label) GetStatus() enums.LabelStatusEnum_LabelStatus { + if m != nil { + return m.Status + } + return enums.LabelStatusEnum_UNSPECIFIED +} + +func (m *Label) GetTextLabel() *common.TextLabel { + if m != nil { + return m.TextLabel + } + return nil +} + +func init() { + proto.RegisterType((*Label)(nil), "google.ads.googleads.v1.resources.Label") +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/resources/label.proto", fileDescriptor_label_8e99450ed5488248) +} + +var fileDescriptor_label_8e99450ed5488248 = []byte{ + // 411 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x84, 0x92, 0xcb, 0x8a, 0xdb, 0x30, + 0x18, 0x85, 0xb1, 0x73, 0x81, 0xa8, 0x97, 0x85, 0x57, 0x26, 0x0d, 0x25, 0x69, 0x09, 0xa4, 0x94, + 0x4a, 0x71, 0x5a, 0xba, 0x50, 0x57, 0x0e, 0x94, 0xb4, 0xa5, 0x94, 0xe0, 0x14, 0x2f, 0x8a, 0x21, + 0x28, 0xb1, 0x6a, 0x0c, 0xb6, 0x64, 0x2c, 0x39, 0xcd, 0xf3, 0x74, 0xd7, 0x3e, 0xca, 0x3c, 0xca, + 0xbc, 0xc0, 0x6c, 0x07, 0x4b, 0x96, 0x27, 0x30, 0x78, 0x66, 0x77, 0x64, 0x7d, 0xe7, 0xe8, 0xbf, + 0x18, 0xbc, 0x4b, 0x38, 0x4f, 0x32, 0x8a, 0x48, 0x2c, 0x90, 0x96, 0xb5, 0x3a, 0x79, 0xa8, 0xa4, + 0x82, 0x57, 0xe5, 0x91, 0x0a, 0x94, 0x91, 0x03, 0xcd, 0x60, 0x51, 0x72, 0xc9, 0x9d, 0x99, 0x66, + 0x20, 0x89, 0x05, 0x6c, 0x71, 0x78, 0xf2, 0x60, 0x8b, 0x8f, 0x51, 0x57, 0xe2, 0x91, 0xe7, 0x39, + 0x67, 0x48, 0xd2, 0xb3, 0xdc, 0x5f, 0x64, 0x8e, 0x97, 0x5d, 0x06, 0xca, 0xaa, 0xbc, 0x79, 0x7e, + 0x2f, 0x24, 0x91, 0x95, 0x68, 0x1c, 0x2f, 0x1b, 0x87, 0x3a, 0x1d, 0xaa, 0xdf, 0xe8, 0x4f, 0x49, + 0x8a, 0x82, 0x96, 0xe6, 0x7e, 0x62, 0x12, 0x8b, 0x14, 0x11, 0xc6, 0xb8, 0x24, 0x32, 0xe5, 0xac, + 0xb9, 0x7d, 0xf5, 0xcf, 0x06, 0x83, 0xef, 0x75, 0xa8, 0xf3, 0x1a, 0x3c, 0x33, 0x75, 0xef, 0x19, + 0xc9, 0xa9, 0x6b, 0x4d, 0xad, 0xc5, 0x28, 0x78, 0x6a, 0x3e, 0xfe, 0x20, 0x39, 0x75, 0xde, 0x02, + 0x3b, 0x8d, 0x5d, 0x7b, 0x6a, 0x2d, 0x9e, 0xac, 0x5e, 0x34, 0x4d, 0x43, 0xf3, 0x32, 0xfc, 0xca, + 0xe4, 0xc7, 0x0f, 0x21, 0xc9, 0x2a, 0x1a, 0xd8, 0x69, 0xec, 0x2c, 0x41, 0x5f, 0x05, 0xf5, 0x14, + 0x3e, 0xb9, 0x87, 0xef, 0x64, 0x99, 0xb2, 0x44, 0xf3, 0x8a, 0x74, 0x02, 0x30, 0xd4, 0xbd, 0xb9, + 0xfd, 0xa9, 0xb5, 0x78, 0xbe, 0xc2, 0xb0, 0x6b, 0xc4, 0x6a, 0x1c, 0x50, 0x55, 0xbe, 0x53, 0x8e, + 0xcf, 0xac, 0xca, 0x2f, 0xcf, 0x41, 0x93, 0xe4, 0x7c, 0x01, 0xe0, 0x6e, 0xca, 0xee, 0x40, 0xd5, + 0xf2, 0xa6, 0x33, 0x57, 0xef, 0x05, 0xfe, 0xa4, 0x67, 0xa9, 0xc2, 0x82, 0x91, 0x34, 0x72, 0x7d, + 0x63, 0x81, 0xf9, 0x91, 0xe7, 0xf0, 0xd1, 0xb5, 0xaf, 0x81, 0x32, 0x6c, 0xeb, 0x46, 0xb7, 0xd6, + 0xaf, 0x6f, 0x8d, 0x21, 0xe1, 0x19, 0x61, 0x09, 0xe4, 0x65, 0x82, 0x12, 0xca, 0xd4, 0x18, 0xcc, + 0x8e, 0x8b, 0x54, 0x3c, 0xf0, 0xd7, 0x7d, 0x6a, 0xd5, 0x5f, 0xbb, 0xb7, 0xf1, 0xfd, 0xff, 0xf6, + 0x6c, 0xa3, 0x23, 0xfd, 0x58, 0x40, 0x2d, 0x6b, 0x15, 0x7a, 0x30, 0x30, 0xe4, 0x95, 0x61, 0x22, + 0x3f, 0x16, 0x51, 0xcb, 0x44, 0xa1, 0x17, 0xb5, 0xcc, 0xb5, 0x3d, 0xd7, 0x17, 0x18, 0xfb, 0xb1, + 0xc0, 0xb8, 0xa5, 0x30, 0x0e, 0x3d, 0x8c, 0x5b, 0xee, 0x30, 0x54, 0xc5, 0xbe, 0xbf, 0x0d, 0x00, + 0x00, 0xff, 0xff, 0xb1, 0x81, 0x7c, 0x83, 0x21, 0x03, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/resources/landing_page_view.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/resources/landing_page_view.pb.go new file mode 100644 index 0000000..09bfdbd --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/resources/landing_page_view.pb.go @@ -0,0 +1,109 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/resources/landing_page_view.proto + +package resources // import "google.golang.org/genproto/googleapis/ads/googleads/v1/resources" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import wrappers "github.com/golang/protobuf/ptypes/wrappers" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// A landing page view with metrics aggregated at the unexpanded final URL +// level. +type LandingPageView struct { + // The resource name of the landing page view. + // Landing page view resource names have the form: + // + // + // `customers/{customer_id}/landingPageViews/{unexpanded_final_url_fingerprint}` + ResourceName string `protobuf:"bytes,1,opt,name=resource_name,json=resourceName,proto3" json:"resource_name,omitempty"` + // The advertiser-specified final URL. + UnexpandedFinalUrl *wrappers.StringValue `protobuf:"bytes,2,opt,name=unexpanded_final_url,json=unexpandedFinalUrl,proto3" json:"unexpanded_final_url,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *LandingPageView) Reset() { *m = LandingPageView{} } +func (m *LandingPageView) String() string { return proto.CompactTextString(m) } +func (*LandingPageView) ProtoMessage() {} +func (*LandingPageView) Descriptor() ([]byte, []int) { + return fileDescriptor_landing_page_view_64530fca7fef0d58, []int{0} +} +func (m *LandingPageView) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_LandingPageView.Unmarshal(m, b) +} +func (m *LandingPageView) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_LandingPageView.Marshal(b, m, deterministic) +} +func (dst *LandingPageView) XXX_Merge(src proto.Message) { + xxx_messageInfo_LandingPageView.Merge(dst, src) +} +func (m *LandingPageView) XXX_Size() int { + return xxx_messageInfo_LandingPageView.Size(m) +} +func (m *LandingPageView) XXX_DiscardUnknown() { + xxx_messageInfo_LandingPageView.DiscardUnknown(m) +} + +var xxx_messageInfo_LandingPageView proto.InternalMessageInfo + +func (m *LandingPageView) GetResourceName() string { + if m != nil { + return m.ResourceName + } + return "" +} + +func (m *LandingPageView) GetUnexpandedFinalUrl() *wrappers.StringValue { + if m != nil { + return m.UnexpandedFinalUrl + } + return nil +} + +func init() { + proto.RegisterType((*LandingPageView)(nil), "google.ads.googleads.v1.resources.LandingPageView") +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/resources/landing_page_view.proto", fileDescriptor_landing_page_view_64530fca7fef0d58) +} + +var fileDescriptor_landing_page_view_64530fca7fef0d58 = []byte{ + // 340 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x84, 0x91, 0x41, 0x4b, 0xc3, 0x30, + 0x18, 0x86, 0x69, 0x05, 0xc1, 0xaa, 0x08, 0x65, 0x87, 0x31, 0x86, 0x6c, 0xca, 0x60, 0xa7, 0x94, + 0xea, 0xc9, 0x78, 0xea, 0x0e, 0x0e, 0x44, 0xc6, 0x98, 0xd8, 0x83, 0x14, 0xca, 0xb7, 0xe5, 0x5b, + 0x08, 0x74, 0x49, 0x49, 0xda, 0xcd, 0xab, 0x17, 0x7f, 0x88, 0x47, 0x7f, 0x8a, 0x3f, 0xc5, 0x5f, + 0x21, 0x5d, 0xdb, 0x08, 0x1e, 0xf4, 0xf6, 0x92, 0xef, 0x79, 0xdf, 0xef, 0x4d, 0xe2, 0xdd, 0x70, + 0xa5, 0x78, 0x86, 0x01, 0x30, 0x13, 0xd4, 0xb2, 0x52, 0xdb, 0x30, 0xd0, 0x68, 0x54, 0xa9, 0x57, + 0x68, 0x82, 0x0c, 0x24, 0x13, 0x92, 0xa7, 0x39, 0x70, 0x4c, 0xb7, 0x02, 0x77, 0x24, 0xd7, 0xaa, + 0x50, 0xfe, 0xb0, 0xe6, 0x09, 0x30, 0x43, 0xac, 0x95, 0x6c, 0x43, 0x62, 0xad, 0xbd, 0xf3, 0x26, + 0x7d, 0x6f, 0x58, 0x96, 0xeb, 0x60, 0xa7, 0x21, 0xcf, 0x51, 0x9b, 0x3a, 0xa2, 0xd7, 0x6f, 0xb7, + 0xe7, 0x22, 0x00, 0x29, 0x55, 0x01, 0x85, 0x50, 0xb2, 0x99, 0x5e, 0xbc, 0x39, 0xde, 0xd9, 0x43, + 0xbd, 0x7c, 0x0e, 0x1c, 0x63, 0x81, 0x3b, 0xff, 0xd2, 0x3b, 0x6d, 0xe3, 0x53, 0x09, 0x1b, 0xec, + 0x3a, 0x03, 0x67, 0x7c, 0xb4, 0x38, 0x69, 0x0f, 0x67, 0xb0, 0x41, 0x7f, 0xe6, 0x75, 0x4a, 0x89, + 0x2f, 0x39, 0x48, 0x86, 0x2c, 0x5d, 0x0b, 0x09, 0x59, 0x5a, 0xea, 0xac, 0xeb, 0x0e, 0x9c, 0xf1, + 0xf1, 0x55, 0xbf, 0x69, 0x4b, 0xda, 0x56, 0xe4, 0xb1, 0xd0, 0x42, 0xf2, 0x18, 0xb2, 0x12, 0x17, + 0xfe, 0x8f, 0xf3, 0xae, 0x32, 0x3e, 0xe9, 0x6c, 0xf2, 0xea, 0x7a, 0xa3, 0x95, 0xda, 0x90, 0x7f, + 0x2f, 0x3c, 0xe9, 0xfc, 0xea, 0x3b, 0xaf, 0x56, 0xcc, 0x9d, 0xe7, 0xfb, 0xc6, 0xca, 0x55, 0x06, + 0x92, 0x13, 0xa5, 0x79, 0xc0, 0x51, 0xee, 0x0b, 0xb4, 0xcf, 0x9e, 0x0b, 0xf3, 0xc7, 0x2f, 0xdc, + 0x5a, 0xf5, 0xee, 0x1e, 0x4c, 0xa3, 0xe8, 0xc3, 0x1d, 0x4e, 0xeb, 0xc8, 0x88, 0x19, 0x52, 0xcb, + 0x4a, 0xc5, 0x21, 0x59, 0xb4, 0xe4, 0x67, 0xcb, 0x24, 0x11, 0x33, 0x89, 0x65, 0x92, 0x38, 0x4c, + 0x2c, 0xf3, 0xe5, 0x8e, 0xea, 0x01, 0xa5, 0x11, 0x33, 0x94, 0x5a, 0x8a, 0xd2, 0x38, 0xa4, 0xd4, + 0x72, 0xcb, 0xc3, 0x7d, 0xd9, 0xeb, 0xef, 0x00, 0x00, 0x00, 0xff, 0xff, 0x7c, 0xcf, 0xa0, 0x93, + 0x31, 0x02, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/resources/language_constant.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/resources/language_constant.pb.go new file mode 100644 index 0000000..17079b4 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/resources/language_constant.pb.go @@ -0,0 +1,137 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/resources/language_constant.proto + +package resources // import "google.golang.org/genproto/googleapis/ads/googleads/v1/resources" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import wrappers "github.com/golang/protobuf/ptypes/wrappers" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// A language. +type LanguageConstant struct { + // The resource name of the language constant. + // Language constant resource names have the form: + // + // `languageConstants/{criterion_id}` + ResourceName string `protobuf:"bytes,1,opt,name=resource_name,json=resourceName,proto3" json:"resource_name,omitempty"` + // The ID of the language constant. + Id *wrappers.Int64Value `protobuf:"bytes,2,opt,name=id,proto3" json:"id,omitempty"` + // The language code, e.g. "en_US", "en_AU", "es", "fr", etc. + Code *wrappers.StringValue `protobuf:"bytes,3,opt,name=code,proto3" json:"code,omitempty"` + // The full name of the language in English, e.g., "English (US)", "Spanish", + // etc. + Name *wrappers.StringValue `protobuf:"bytes,4,opt,name=name,proto3" json:"name,omitempty"` + // Whether the language is targetable. + Targetable *wrappers.BoolValue `protobuf:"bytes,5,opt,name=targetable,proto3" json:"targetable,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *LanguageConstant) Reset() { *m = LanguageConstant{} } +func (m *LanguageConstant) String() string { return proto.CompactTextString(m) } +func (*LanguageConstant) ProtoMessage() {} +func (*LanguageConstant) Descriptor() ([]byte, []int) { + return fileDescriptor_language_constant_7d2a7078a26d5ec1, []int{0} +} +func (m *LanguageConstant) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_LanguageConstant.Unmarshal(m, b) +} +func (m *LanguageConstant) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_LanguageConstant.Marshal(b, m, deterministic) +} +func (dst *LanguageConstant) XXX_Merge(src proto.Message) { + xxx_messageInfo_LanguageConstant.Merge(dst, src) +} +func (m *LanguageConstant) XXX_Size() int { + return xxx_messageInfo_LanguageConstant.Size(m) +} +func (m *LanguageConstant) XXX_DiscardUnknown() { + xxx_messageInfo_LanguageConstant.DiscardUnknown(m) +} + +var xxx_messageInfo_LanguageConstant proto.InternalMessageInfo + +func (m *LanguageConstant) GetResourceName() string { + if m != nil { + return m.ResourceName + } + return "" +} + +func (m *LanguageConstant) GetId() *wrappers.Int64Value { + if m != nil { + return m.Id + } + return nil +} + +func (m *LanguageConstant) GetCode() *wrappers.StringValue { + if m != nil { + return m.Code + } + return nil +} + +func (m *LanguageConstant) GetName() *wrappers.StringValue { + if m != nil { + return m.Name + } + return nil +} + +func (m *LanguageConstant) GetTargetable() *wrappers.BoolValue { + if m != nil { + return m.Targetable + } + return nil +} + +func init() { + proto.RegisterType((*LanguageConstant)(nil), "google.ads.googleads.v1.resources.LanguageConstant") +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/resources/language_constant.proto", fileDescriptor_language_constant_7d2a7078a26d5ec1) +} + +var fileDescriptor_language_constant_7d2a7078a26d5ec1 = []byte{ + // 371 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x8c, 0x92, 0xd1, 0x4a, 0xeb, 0x30, + 0x18, 0xc7, 0x69, 0xb6, 0x73, 0xe0, 0xe4, 0x9c, 0x03, 0x52, 0x10, 0xca, 0x1c, 0xb2, 0x29, 0x83, + 0x81, 0x90, 0x5a, 0x15, 0xc1, 0x78, 0xd5, 0x79, 0x31, 0x14, 0x91, 0x31, 0xa1, 0x17, 0x52, 0x18, + 0x59, 0x13, 0x43, 0xa1, 0x4b, 0x4a, 0x92, 0xce, 0x7b, 0xf1, 0x49, 0xbc, 0xf4, 0x51, 0x7c, 0x14, + 0x9f, 0x42, 0xda, 0xb4, 0x45, 0x1c, 0xa8, 0x77, 0x1f, 0xcd, 0xef, 0xf7, 0xff, 0x7f, 0x24, 0x85, + 0x67, 0x5c, 0x4a, 0x9e, 0x31, 0x9f, 0x50, 0xed, 0xdb, 0xb1, 0x9c, 0xd6, 0x81, 0xaf, 0x98, 0x96, + 0x85, 0x4a, 0x98, 0xf6, 0x33, 0x22, 0x78, 0x41, 0x38, 0x5b, 0x24, 0x52, 0x68, 0x43, 0x84, 0x41, + 0xb9, 0x92, 0x46, 0xba, 0x43, 0xcb, 0x23, 0x42, 0x35, 0x6a, 0x55, 0xb4, 0x0e, 0x50, 0xab, 0xf6, + 0x76, 0xeb, 0xf4, 0x4a, 0x58, 0x16, 0xf7, 0xfe, 0x83, 0x22, 0x79, 0xce, 0x94, 0xb6, 0x11, 0xbd, + 0x7e, 0xd3, 0x9e, 0xa7, 0x3e, 0x11, 0x42, 0x1a, 0x62, 0x52, 0x29, 0xea, 0xd3, 0xbd, 0x27, 0x00, + 0xb7, 0xae, 0xeb, 0xf2, 0x8b, 0xba, 0xdb, 0xdd, 0x87, 0xff, 0x9b, 0xfc, 0x85, 0x20, 0x2b, 0xe6, + 0x39, 0x03, 0x67, 0xfc, 0x67, 0xfe, 0xaf, 0xf9, 0x78, 0x43, 0x56, 0xcc, 0x3d, 0x80, 0x20, 0xa5, + 0x1e, 0x18, 0x38, 0xe3, 0xbf, 0x47, 0x3b, 0xf5, 0x72, 0xa8, 0x59, 0x02, 0x5d, 0x0a, 0x73, 0x7a, + 0x12, 0x91, 0xac, 0x60, 0x73, 0x90, 0x52, 0xf7, 0x10, 0x76, 0x13, 0x49, 0x99, 0xd7, 0xa9, 0xf0, + 0xfe, 0x06, 0x7e, 0x6b, 0x54, 0x2a, 0xb8, 0xe5, 0x2b, 0xb2, 0x34, 0xaa, 0xea, 0xee, 0x4f, 0x8c, + 0x92, 0x74, 0x31, 0x84, 0x86, 0x28, 0xce, 0x0c, 0x59, 0x66, 0xcc, 0xfb, 0x55, 0x79, 0xbd, 0x0d, + 0x6f, 0x22, 0x65, 0x66, 0xad, 0x0f, 0xf4, 0xe4, 0x11, 0xc0, 0x51, 0x22, 0x57, 0xe8, 0xdb, 0xeb, + 0x9e, 0x6c, 0x7f, 0xbe, 0xad, 0x59, 0x99, 0x3c, 0x73, 0xee, 0xae, 0x6a, 0x97, 0xcb, 0xf2, 0x31, + 0x91, 0x54, 0xdc, 0xe7, 0x4c, 0x54, 0xbd, 0xcd, 0xab, 0xe7, 0xa9, 0xfe, 0xe2, 0x27, 0x38, 0x6f, + 0xa7, 0x67, 0xd0, 0x99, 0x86, 0xe1, 0x0b, 0x18, 0x4e, 0x6d, 0x64, 0x48, 0x35, 0xb2, 0x63, 0x39, + 0x45, 0x01, 0x9a, 0x37, 0xe4, 0x6b, 0xc3, 0xc4, 0x21, 0xd5, 0x71, 0xcb, 0xc4, 0x51, 0x10, 0xb7, + 0xcc, 0x1b, 0x18, 0xd9, 0x03, 0x8c, 0x43, 0xaa, 0x31, 0x6e, 0x29, 0x8c, 0xa3, 0x00, 0xe3, 0x96, + 0x5b, 0xfe, 0xae, 0x96, 0x3d, 0x7e, 0x0f, 0x00, 0x00, 0xff, 0xff, 0x54, 0x30, 0x09, 0x79, 0xb0, + 0x02, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/resources/location_view.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/resources/location_view.pb.go new file mode 100644 index 0000000..5f477b1 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/resources/location_view.pb.go @@ -0,0 +1,93 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/resources/location_view.proto + +package resources // import "google.golang.org/genproto/googleapis/ads/googleads/v1/resources" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// A location view summarizes the performance of campaigns by +// Location criteria. +type LocationView struct { + // The resource name of the location view. + // Location view resource names have the form: + // + // `customers/{customer_id}/locationViews/{campaign_id}~{criterion_id}` + ResourceName string `protobuf:"bytes,1,opt,name=resource_name,json=resourceName,proto3" json:"resource_name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *LocationView) Reset() { *m = LocationView{} } +func (m *LocationView) String() string { return proto.CompactTextString(m) } +func (*LocationView) ProtoMessage() {} +func (*LocationView) Descriptor() ([]byte, []int) { + return fileDescriptor_location_view_b17bd9cfa89e2be7, []int{0} +} +func (m *LocationView) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_LocationView.Unmarshal(m, b) +} +func (m *LocationView) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_LocationView.Marshal(b, m, deterministic) +} +func (dst *LocationView) XXX_Merge(src proto.Message) { + xxx_messageInfo_LocationView.Merge(dst, src) +} +func (m *LocationView) XXX_Size() int { + return xxx_messageInfo_LocationView.Size(m) +} +func (m *LocationView) XXX_DiscardUnknown() { + xxx_messageInfo_LocationView.DiscardUnknown(m) +} + +var xxx_messageInfo_LocationView proto.InternalMessageInfo + +func (m *LocationView) GetResourceName() string { + if m != nil { + return m.ResourceName + } + return "" +} + +func init() { + proto.RegisterType((*LocationView)(nil), "google.ads.googleads.v1.resources.LocationView") +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/resources/location_view.proto", fileDescriptor_location_view_b17bd9cfa89e2be7) +} + +var fileDescriptor_location_view_b17bd9cfa89e2be7 = []byte{ + // 264 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xe2, 0x32, 0x4d, 0xcf, 0xcf, 0x4f, + 0xcf, 0x49, 0xd5, 0x4f, 0x4c, 0x29, 0xd6, 0x87, 0x30, 0x41, 0xac, 0x32, 0x43, 0xfd, 0xa2, 0xd4, + 0xe2, 0xfc, 0xd2, 0xa2, 0xe4, 0xd4, 0x62, 0xfd, 0x9c, 0xfc, 0xe4, 0xc4, 0x92, 0xcc, 0xfc, 0xbc, + 0xf8, 0xb2, 0xcc, 0xd4, 0x72, 0xbd, 0x82, 0xa2, 0xfc, 0x92, 0x7c, 0x21, 0x45, 0x88, 0x5a, 0xbd, + 0xc4, 0x94, 0x62, 0x3d, 0xb8, 0x36, 0xbd, 0x32, 0x43, 0x3d, 0xb8, 0x36, 0x29, 0x19, 0x98, 0xc9, + 0x05, 0x99, 0xfa, 0x89, 0x79, 0x79, 0xf9, 0x25, 0x60, 0x43, 0x8a, 0x21, 0x06, 0x28, 0x19, 0x73, + 0xf1, 0xf8, 0x40, 0xcd, 0x0d, 0xcb, 0x4c, 0x2d, 0x17, 0x52, 0xe6, 0xe2, 0x85, 0x69, 0x8d, 0xcf, + 0x4b, 0xcc, 0x4d, 0x95, 0x60, 0x54, 0x60, 0xd4, 0xe0, 0x0c, 0xe2, 0x81, 0x09, 0xfa, 0x25, 0xe6, + 0xa6, 0x3a, 0xfd, 0x63, 0xe4, 0x52, 0x4d, 0xce, 0xcf, 0xd5, 0x23, 0x68, 0xb9, 0x93, 0x20, 0xb2, + 0xe1, 0x01, 0x20, 0x1b, 0x03, 0x18, 0xa3, 0xbc, 0xa0, 0xfa, 0xd2, 0xf3, 0x73, 0x12, 0xf3, 0xd2, + 0xf5, 0xf2, 0x8b, 0xd2, 0xf5, 0xd3, 0x53, 0xf3, 0xc0, 0xee, 0x81, 0xf9, 0xbd, 0x20, 0xb3, 0x18, + 0x4f, 0x50, 0x58, 0xc3, 0x59, 0x8b, 0x98, 0x98, 0xdd, 0x1d, 0x1d, 0x57, 0x31, 0x29, 0xba, 0x43, + 0x8c, 0x74, 0x4c, 0x29, 0xd6, 0x83, 0x30, 0x41, 0xac, 0x30, 0x43, 0xbd, 0x20, 0x98, 0xca, 0x53, + 0x30, 0x35, 0x31, 0x8e, 0x29, 0xc5, 0x31, 0x70, 0x35, 0x31, 0x61, 0x86, 0x31, 0x70, 0x35, 0xaf, + 0x98, 0x54, 0x21, 0x12, 0x56, 0x56, 0x8e, 0x29, 0xc5, 0x56, 0x56, 0x70, 0x55, 0x56, 0x56, 0x61, + 0x86, 0x56, 0x56, 0x70, 0x75, 0x49, 0x6c, 0x60, 0xc7, 0x1a, 0x03, 0x02, 0x00, 0x00, 0xff, 0xff, + 0x98, 0x19, 0x68, 0x8b, 0xb6, 0x01, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/resources/managed_placement_view.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/resources/managed_placement_view.pb.go new file mode 100644 index 0000000..b4436ca --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/resources/managed_placement_view.pb.go @@ -0,0 +1,94 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/resources/managed_placement_view.proto + +package resources // import "google.golang.org/genproto/googleapis/ads/googleads/v1/resources" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// A managed placement view. +type ManagedPlacementView struct { + // The resource name of the Managed Placement view. + // Managed placement view resource names have the form: + // + // + // `customers/{customer_id}/managedPlacementViews/{ad_group_id}~{criterion_id}` + ResourceName string `protobuf:"bytes,1,opt,name=resource_name,json=resourceName,proto3" json:"resource_name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ManagedPlacementView) Reset() { *m = ManagedPlacementView{} } +func (m *ManagedPlacementView) String() string { return proto.CompactTextString(m) } +func (*ManagedPlacementView) ProtoMessage() {} +func (*ManagedPlacementView) Descriptor() ([]byte, []int) { + return fileDescriptor_managed_placement_view_ec6979a30e876ecb, []int{0} +} +func (m *ManagedPlacementView) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ManagedPlacementView.Unmarshal(m, b) +} +func (m *ManagedPlacementView) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ManagedPlacementView.Marshal(b, m, deterministic) +} +func (dst *ManagedPlacementView) XXX_Merge(src proto.Message) { + xxx_messageInfo_ManagedPlacementView.Merge(dst, src) +} +func (m *ManagedPlacementView) XXX_Size() int { + return xxx_messageInfo_ManagedPlacementView.Size(m) +} +func (m *ManagedPlacementView) XXX_DiscardUnknown() { + xxx_messageInfo_ManagedPlacementView.DiscardUnknown(m) +} + +var xxx_messageInfo_ManagedPlacementView proto.InternalMessageInfo + +func (m *ManagedPlacementView) GetResourceName() string { + if m != nil { + return m.ResourceName + } + return "" +} + +func init() { + proto.RegisterType((*ManagedPlacementView)(nil), "google.ads.googleads.v1.resources.ManagedPlacementView") +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/resources/managed_placement_view.proto", fileDescriptor_managed_placement_view_ec6979a30e876ecb) +} + +var fileDescriptor_managed_placement_view_ec6979a30e876ecb = []byte{ + // 275 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x84, 0x90, 0x4f, 0x4a, 0xc4, 0x30, + 0x14, 0xc6, 0x99, 0x0a, 0x82, 0x45, 0x37, 0x83, 0x0b, 0x15, 0x17, 0x8e, 0x32, 0xe0, 0x2a, 0xa1, + 0xb8, 0xcb, 0x80, 0xd0, 0xd9, 0x0c, 0x08, 0x4a, 0x99, 0x45, 0x17, 0x52, 0x28, 0xcf, 0xe6, 0x11, + 0x02, 0x4d, 0x5e, 0x69, 0x6a, 0xe7, 0x06, 0x1e, 0xc4, 0xa5, 0x47, 0xf1, 0x28, 0x9e, 0x42, 0x3a, + 0x31, 0x59, 0x89, 0xee, 0x3e, 0x92, 0xdf, 0xf7, 0x87, 0x97, 0xde, 0x2b, 0x22, 0xd5, 0x22, 0x07, + 0xe9, 0xb8, 0x97, 0x93, 0x1a, 0x33, 0xde, 0xa3, 0xa3, 0xd7, 0xbe, 0x41, 0xc7, 0x0d, 0x58, 0x50, + 0x28, 0xeb, 0xae, 0x85, 0x06, 0x0d, 0xda, 0xa1, 0x1e, 0x35, 0xee, 0x58, 0xd7, 0xd3, 0x40, 0xf3, + 0x85, 0x37, 0x31, 0x90, 0x8e, 0x45, 0x3f, 0x1b, 0x33, 0x16, 0xfd, 0x17, 0x97, 0xa1, 0xa2, 0xd3, + 0x1c, 0xac, 0xa5, 0x01, 0x06, 0x4d, 0xd6, 0xf9, 0x80, 0xeb, 0x55, 0x7a, 0xfa, 0xe8, 0x0b, 0x8a, + 0x90, 0x5f, 0x6a, 0xdc, 0xcd, 0x6f, 0xd2, 0x93, 0x10, 0x51, 0x5b, 0x30, 0x78, 0x36, 0xbb, 0x9a, + 0xdd, 0x1e, 0x6d, 0x8f, 0xc3, 0xe3, 0x13, 0x18, 0x5c, 0xbf, 0x25, 0xe9, 0xb2, 0x21, 0xc3, 0xfe, + 0x1d, 0xb1, 0x3e, 0xff, 0xad, 0xa4, 0x98, 0x16, 0x14, 0xb3, 0xe7, 0x87, 0x1f, 0xbf, 0xa2, 0x16, + 0xac, 0x62, 0xd4, 0x2b, 0xae, 0xd0, 0xee, 0xf7, 0x85, 0xa3, 0x74, 0xda, 0xfd, 0x71, 0xa3, 0x55, + 0x54, 0xef, 0xc9, 0xc1, 0x26, 0xcf, 0x3f, 0x92, 0xc5, 0xc6, 0x47, 0xe6, 0xd2, 0x31, 0x2f, 0x27, + 0x55, 0x66, 0x6c, 0x1b, 0xc8, 0xcf, 0xc0, 0x54, 0xb9, 0x74, 0x55, 0x64, 0xaa, 0x32, 0xab, 0x22, + 0xf3, 0x95, 0x2c, 0xfd, 0x87, 0x10, 0xb9, 0x74, 0x42, 0x44, 0x4a, 0x88, 0x32, 0x13, 0x22, 0x72, + 0x2f, 0x87, 0xfb, 0xb1, 0x77, 0xdf, 0x01, 0x00, 0x00, 0xff, 0xff, 0x9c, 0xab, 0x80, 0xb1, 0xcf, + 0x01, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/resources/media_file.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/resources/media_file.pb.go new file mode 100644 index 0000000..e4defc6 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/resources/media_file.pb.go @@ -0,0 +1,553 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/resources/media_file.proto + +package resources // import "google.golang.org/genproto/googleapis/ads/googleads/v1/resources" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import wrappers "github.com/golang/protobuf/ptypes/wrappers" +import enums "google.golang.org/genproto/googleapis/ads/googleads/v1/enums" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// A media file. +type MediaFile struct { + // The resource name of the media file. + // Media file resource names have the form: + // + // `customers/{customer_id}/mediaFiles/{media_file_id}` + ResourceName string `protobuf:"bytes,1,opt,name=resource_name,json=resourceName,proto3" json:"resource_name,omitempty"` + // The ID of the media file. + Id *wrappers.Int64Value `protobuf:"bytes,2,opt,name=id,proto3" json:"id,omitempty"` + // Type of the media file. + Type enums.MediaTypeEnum_MediaType `protobuf:"varint,5,opt,name=type,proto3,enum=google.ads.googleads.v1.enums.MediaTypeEnum_MediaType" json:"type,omitempty"` + // The mime type of the media file. + MimeType enums.MimeTypeEnum_MimeType `protobuf:"varint,6,opt,name=mime_type,json=mimeType,proto3,enum=google.ads.googleads.v1.enums.MimeTypeEnum_MimeType" json:"mime_type,omitempty"` + // The URL of where the original media file was downloaded from (or a file + // name). + SourceUrl *wrappers.StringValue `protobuf:"bytes,7,opt,name=source_url,json=sourceUrl,proto3" json:"source_url,omitempty"` + // The name of the media file. The name can be used by clients to help + // identify previously uploaded media. + Name *wrappers.StringValue `protobuf:"bytes,8,opt,name=name,proto3" json:"name,omitempty"` + // The size of the media file in bytes. + FileSize *wrappers.Int64Value `protobuf:"bytes,9,opt,name=file_size,json=fileSize,proto3" json:"file_size,omitempty"` + // The specific type of the media file. + // + // Types that are valid to be assigned to Mediatype: + // *MediaFile_Image + // *MediaFile_MediaBundle + // *MediaFile_Audio + // *MediaFile_Video + Mediatype isMediaFile_Mediatype `protobuf_oneof:"mediatype"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *MediaFile) Reset() { *m = MediaFile{} } +func (m *MediaFile) String() string { return proto.CompactTextString(m) } +func (*MediaFile) ProtoMessage() {} +func (*MediaFile) Descriptor() ([]byte, []int) { + return fileDescriptor_media_file_4cd852e94bdefecd, []int{0} +} +func (m *MediaFile) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_MediaFile.Unmarshal(m, b) +} +func (m *MediaFile) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_MediaFile.Marshal(b, m, deterministic) +} +func (dst *MediaFile) XXX_Merge(src proto.Message) { + xxx_messageInfo_MediaFile.Merge(dst, src) +} +func (m *MediaFile) XXX_Size() int { + return xxx_messageInfo_MediaFile.Size(m) +} +func (m *MediaFile) XXX_DiscardUnknown() { + xxx_messageInfo_MediaFile.DiscardUnknown(m) +} + +var xxx_messageInfo_MediaFile proto.InternalMessageInfo + +func (m *MediaFile) GetResourceName() string { + if m != nil { + return m.ResourceName + } + return "" +} + +func (m *MediaFile) GetId() *wrappers.Int64Value { + if m != nil { + return m.Id + } + return nil +} + +func (m *MediaFile) GetType() enums.MediaTypeEnum_MediaType { + if m != nil { + return m.Type + } + return enums.MediaTypeEnum_UNSPECIFIED +} + +func (m *MediaFile) GetMimeType() enums.MimeTypeEnum_MimeType { + if m != nil { + return m.MimeType + } + return enums.MimeTypeEnum_UNSPECIFIED +} + +func (m *MediaFile) GetSourceUrl() *wrappers.StringValue { + if m != nil { + return m.SourceUrl + } + return nil +} + +func (m *MediaFile) GetName() *wrappers.StringValue { + if m != nil { + return m.Name + } + return nil +} + +func (m *MediaFile) GetFileSize() *wrappers.Int64Value { + if m != nil { + return m.FileSize + } + return nil +} + +type isMediaFile_Mediatype interface { + isMediaFile_Mediatype() +} + +type MediaFile_Image struct { + Image *MediaImage `protobuf:"bytes,3,opt,name=image,proto3,oneof"` +} + +type MediaFile_MediaBundle struct { + MediaBundle *MediaBundle `protobuf:"bytes,4,opt,name=media_bundle,json=mediaBundle,proto3,oneof"` +} + +type MediaFile_Audio struct { + Audio *MediaAudio `protobuf:"bytes,10,opt,name=audio,proto3,oneof"` +} + +type MediaFile_Video struct { + Video *MediaVideo `protobuf:"bytes,11,opt,name=video,proto3,oneof"` +} + +func (*MediaFile_Image) isMediaFile_Mediatype() {} + +func (*MediaFile_MediaBundle) isMediaFile_Mediatype() {} + +func (*MediaFile_Audio) isMediaFile_Mediatype() {} + +func (*MediaFile_Video) isMediaFile_Mediatype() {} + +func (m *MediaFile) GetMediatype() isMediaFile_Mediatype { + if m != nil { + return m.Mediatype + } + return nil +} + +func (m *MediaFile) GetImage() *MediaImage { + if x, ok := m.GetMediatype().(*MediaFile_Image); ok { + return x.Image + } + return nil +} + +func (m *MediaFile) GetMediaBundle() *MediaBundle { + if x, ok := m.GetMediatype().(*MediaFile_MediaBundle); ok { + return x.MediaBundle + } + return nil +} + +func (m *MediaFile) GetAudio() *MediaAudio { + if x, ok := m.GetMediatype().(*MediaFile_Audio); ok { + return x.Audio + } + return nil +} + +func (m *MediaFile) GetVideo() *MediaVideo { + if x, ok := m.GetMediatype().(*MediaFile_Video); ok { + return x.Video + } + return nil +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*MediaFile) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _MediaFile_OneofMarshaler, _MediaFile_OneofUnmarshaler, _MediaFile_OneofSizer, []interface{}{ + (*MediaFile_Image)(nil), + (*MediaFile_MediaBundle)(nil), + (*MediaFile_Audio)(nil), + (*MediaFile_Video)(nil), + } +} + +func _MediaFile_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*MediaFile) + // mediatype + switch x := m.Mediatype.(type) { + case *MediaFile_Image: + b.EncodeVarint(3<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.Image); err != nil { + return err + } + case *MediaFile_MediaBundle: + b.EncodeVarint(4<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.MediaBundle); err != nil { + return err + } + case *MediaFile_Audio: + b.EncodeVarint(10<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.Audio); err != nil { + return err + } + case *MediaFile_Video: + b.EncodeVarint(11<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.Video); err != nil { + return err + } + case nil: + default: + return fmt.Errorf("MediaFile.Mediatype has unexpected type %T", x) + } + return nil +} + +func _MediaFile_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*MediaFile) + switch tag { + case 3: // mediatype.image + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(MediaImage) + err := b.DecodeMessage(msg) + m.Mediatype = &MediaFile_Image{msg} + return true, err + case 4: // mediatype.media_bundle + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(MediaBundle) + err := b.DecodeMessage(msg) + m.Mediatype = &MediaFile_MediaBundle{msg} + return true, err + case 10: // mediatype.audio + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(MediaAudio) + err := b.DecodeMessage(msg) + m.Mediatype = &MediaFile_Audio{msg} + return true, err + case 11: // mediatype.video + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(MediaVideo) + err := b.DecodeMessage(msg) + m.Mediatype = &MediaFile_Video{msg} + return true, err + default: + return false, nil + } +} + +func _MediaFile_OneofSizer(msg proto.Message) (n int) { + m := msg.(*MediaFile) + // mediatype + switch x := m.Mediatype.(type) { + case *MediaFile_Image: + s := proto.Size(x.Image) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *MediaFile_MediaBundle: + s := proto.Size(x.MediaBundle) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *MediaFile_Audio: + s := proto.Size(x.Audio) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *MediaFile_Video: + s := proto.Size(x.Video) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +// Encapsulates an Image. +type MediaImage struct { + // Raw image data. + Data *wrappers.BytesValue `protobuf:"bytes,1,opt,name=data,proto3" json:"data,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *MediaImage) Reset() { *m = MediaImage{} } +func (m *MediaImage) String() string { return proto.CompactTextString(m) } +func (*MediaImage) ProtoMessage() {} +func (*MediaImage) Descriptor() ([]byte, []int) { + return fileDescriptor_media_file_4cd852e94bdefecd, []int{1} +} +func (m *MediaImage) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_MediaImage.Unmarshal(m, b) +} +func (m *MediaImage) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_MediaImage.Marshal(b, m, deterministic) +} +func (dst *MediaImage) XXX_Merge(src proto.Message) { + xxx_messageInfo_MediaImage.Merge(dst, src) +} +func (m *MediaImage) XXX_Size() int { + return xxx_messageInfo_MediaImage.Size(m) +} +func (m *MediaImage) XXX_DiscardUnknown() { + xxx_messageInfo_MediaImage.DiscardUnknown(m) +} + +var xxx_messageInfo_MediaImage proto.InternalMessageInfo + +func (m *MediaImage) GetData() *wrappers.BytesValue { + if m != nil { + return m.Data + } + return nil +} + +// Represents a ZIP archive media the content of which contains HTML5 assets. +type MediaBundle struct { + // Raw zipped data. + Data *wrappers.BytesValue `protobuf:"bytes,1,opt,name=data,proto3" json:"data,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *MediaBundle) Reset() { *m = MediaBundle{} } +func (m *MediaBundle) String() string { return proto.CompactTextString(m) } +func (*MediaBundle) ProtoMessage() {} +func (*MediaBundle) Descriptor() ([]byte, []int) { + return fileDescriptor_media_file_4cd852e94bdefecd, []int{2} +} +func (m *MediaBundle) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_MediaBundle.Unmarshal(m, b) +} +func (m *MediaBundle) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_MediaBundle.Marshal(b, m, deterministic) +} +func (dst *MediaBundle) XXX_Merge(src proto.Message) { + xxx_messageInfo_MediaBundle.Merge(dst, src) +} +func (m *MediaBundle) XXX_Size() int { + return xxx_messageInfo_MediaBundle.Size(m) +} +func (m *MediaBundle) XXX_DiscardUnknown() { + xxx_messageInfo_MediaBundle.DiscardUnknown(m) +} + +var xxx_messageInfo_MediaBundle proto.InternalMessageInfo + +func (m *MediaBundle) GetData() *wrappers.BytesValue { + if m != nil { + return m.Data + } + return nil +} + +// Encapsulates an Audio. +type MediaAudio struct { + // The duration of the Audio in milliseconds. + AdDurationMillis *wrappers.Int64Value `protobuf:"bytes,1,opt,name=ad_duration_millis,json=adDurationMillis,proto3" json:"ad_duration_millis,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *MediaAudio) Reset() { *m = MediaAudio{} } +func (m *MediaAudio) String() string { return proto.CompactTextString(m) } +func (*MediaAudio) ProtoMessage() {} +func (*MediaAudio) Descriptor() ([]byte, []int) { + return fileDescriptor_media_file_4cd852e94bdefecd, []int{3} +} +func (m *MediaAudio) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_MediaAudio.Unmarshal(m, b) +} +func (m *MediaAudio) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_MediaAudio.Marshal(b, m, deterministic) +} +func (dst *MediaAudio) XXX_Merge(src proto.Message) { + xxx_messageInfo_MediaAudio.Merge(dst, src) +} +func (m *MediaAudio) XXX_Size() int { + return xxx_messageInfo_MediaAudio.Size(m) +} +func (m *MediaAudio) XXX_DiscardUnknown() { + xxx_messageInfo_MediaAudio.DiscardUnknown(m) +} + +var xxx_messageInfo_MediaAudio proto.InternalMessageInfo + +func (m *MediaAudio) GetAdDurationMillis() *wrappers.Int64Value { + if m != nil { + return m.AdDurationMillis + } + return nil +} + +// Encapsulates a Video. +type MediaVideo struct { + // The duration of the Video in milliseconds. + AdDurationMillis *wrappers.Int64Value `protobuf:"bytes,1,opt,name=ad_duration_millis,json=adDurationMillis,proto3" json:"ad_duration_millis,omitempty"` + // The YouTube video ID (as seen in YouTube URLs). + YoutubeVideoId *wrappers.StringValue `protobuf:"bytes,2,opt,name=youtube_video_id,json=youtubeVideoId,proto3" json:"youtube_video_id,omitempty"` + // The Advertising Digital Identification code for this video, as defined by + // the American Association of Advertising Agencies, used mainly for + // television commercials. + AdvertisingIdCode *wrappers.StringValue `protobuf:"bytes,3,opt,name=advertising_id_code,json=advertisingIdCode,proto3" json:"advertising_id_code,omitempty"` + // The Industry Standard Commercial Identifier code for this video, used + // mainly for television commercials. + IsciCode *wrappers.StringValue `protobuf:"bytes,4,opt,name=isci_code,json=isciCode,proto3" json:"isci_code,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *MediaVideo) Reset() { *m = MediaVideo{} } +func (m *MediaVideo) String() string { return proto.CompactTextString(m) } +func (*MediaVideo) ProtoMessage() {} +func (*MediaVideo) Descriptor() ([]byte, []int) { + return fileDescriptor_media_file_4cd852e94bdefecd, []int{4} +} +func (m *MediaVideo) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_MediaVideo.Unmarshal(m, b) +} +func (m *MediaVideo) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_MediaVideo.Marshal(b, m, deterministic) +} +func (dst *MediaVideo) XXX_Merge(src proto.Message) { + xxx_messageInfo_MediaVideo.Merge(dst, src) +} +func (m *MediaVideo) XXX_Size() int { + return xxx_messageInfo_MediaVideo.Size(m) +} +func (m *MediaVideo) XXX_DiscardUnknown() { + xxx_messageInfo_MediaVideo.DiscardUnknown(m) +} + +var xxx_messageInfo_MediaVideo proto.InternalMessageInfo + +func (m *MediaVideo) GetAdDurationMillis() *wrappers.Int64Value { + if m != nil { + return m.AdDurationMillis + } + return nil +} + +func (m *MediaVideo) GetYoutubeVideoId() *wrappers.StringValue { + if m != nil { + return m.YoutubeVideoId + } + return nil +} + +func (m *MediaVideo) GetAdvertisingIdCode() *wrappers.StringValue { + if m != nil { + return m.AdvertisingIdCode + } + return nil +} + +func (m *MediaVideo) GetIsciCode() *wrappers.StringValue { + if m != nil { + return m.IsciCode + } + return nil +} + +func init() { + proto.RegisterType((*MediaFile)(nil), "google.ads.googleads.v1.resources.MediaFile") + proto.RegisterType((*MediaImage)(nil), "google.ads.googleads.v1.resources.MediaImage") + proto.RegisterType((*MediaBundle)(nil), "google.ads.googleads.v1.resources.MediaBundle") + proto.RegisterType((*MediaAudio)(nil), "google.ads.googleads.v1.resources.MediaAudio") + proto.RegisterType((*MediaVideo)(nil), "google.ads.googleads.v1.resources.MediaVideo") +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/resources/media_file.proto", fileDescriptor_media_file_4cd852e94bdefecd) +} + +var fileDescriptor_media_file_4cd852e94bdefecd = []byte{ + // 683 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xac, 0x95, 0xdf, 0x6a, 0xd4, 0x4c, + 0x18, 0xc6, 0xbb, 0xe9, 0xb6, 0xdf, 0x66, 0xb6, 0x5f, 0xa9, 0xe3, 0x49, 0xa8, 0x45, 0xda, 0x95, + 0x42, 0x41, 0x3a, 0x71, 0x6b, 0x29, 0x9a, 0xa2, 0x90, 0xd5, 0xfe, 0xd9, 0x62, 0xa5, 0xa6, 0xba, + 0x82, 0x2c, 0x84, 0xd9, 0x9d, 0x69, 0x18, 0x48, 0x32, 0x21, 0x93, 0xac, 0x6c, 0x2f, 0xc7, 0x23, + 0xf1, 0x52, 0xbc, 0x06, 0xaf, 0xc0, 0x5b, 0xf0, 0x44, 0x66, 0x32, 0xc9, 0x2e, 0x68, 0xbb, 0x5d, + 0xf0, 0xec, 0x9d, 0xe6, 0x79, 0x7e, 0xf3, 0x3e, 0x6f, 0xdf, 0x64, 0xc1, 0x5e, 0xc0, 0x79, 0x10, + 0x52, 0x1b, 0x13, 0x61, 0x17, 0xa5, 0xac, 0x46, 0x6d, 0x3b, 0xa5, 0x82, 0xe7, 0xe9, 0x90, 0x0a, + 0x3b, 0xa2, 0x84, 0x61, 0xff, 0x8a, 0x85, 0x14, 0x25, 0x29, 0xcf, 0x38, 0xdc, 0x2a, 0x84, 0x08, + 0x13, 0x81, 0x2a, 0x0f, 0x1a, 0xb5, 0x51, 0xe5, 0x59, 0x47, 0x37, 0x61, 0x69, 0x9c, 0x47, 0x25, + 0x32, 0x1b, 0x27, 0x1a, 0xb9, 0xbe, 0x3b, 0x43, 0xcf, 0x22, 0x3a, 0x2d, 0x7f, 0xa8, 0xe5, 0xea, + 0x34, 0xc8, 0xaf, 0xec, 0xcf, 0x29, 0x4e, 0x12, 0x9a, 0x0a, 0xfd, 0x7c, 0xa3, 0xc4, 0x25, 0xcc, + 0xc6, 0x71, 0xcc, 0x33, 0x9c, 0x31, 0x1e, 0xeb, 0xa7, 0xad, 0x1f, 0x4b, 0xc0, 0x3c, 0x97, 0x1d, + 0x1c, 0xb3, 0x90, 0xc2, 0x47, 0xe0, 0xff, 0xb2, 0x6f, 0x3f, 0xc6, 0x11, 0xb5, 0x6a, 0x9b, 0xb5, + 0x1d, 0xd3, 0x5b, 0x29, 0xff, 0xf8, 0x16, 0x47, 0x14, 0x3e, 0x06, 0x06, 0x23, 0x96, 0xb1, 0x59, + 0xdb, 0x69, 0xee, 0x3d, 0xd0, 0xe1, 0x50, 0x79, 0x3b, 0xea, 0xc6, 0xd9, 0xc1, 0x7e, 0x0f, 0x87, + 0x39, 0xf5, 0x0c, 0x46, 0xe0, 0x19, 0xa8, 0xcb, 0x5e, 0xad, 0xa5, 0xcd, 0xda, 0xce, 0xea, 0xde, + 0x01, 0xba, 0x69, 0x5c, 0x2a, 0x1b, 0x52, 0x9d, 0xbc, 0x1f, 0x27, 0xf4, 0x28, 0xce, 0xa3, 0xc9, + 0xc9, 0x53, 0x0c, 0xf8, 0x0e, 0x98, 0x55, 0x78, 0x6b, 0x59, 0x01, 0xf7, 0x67, 0x01, 0x59, 0x44, + 0x27, 0x3c, 0x7d, 0xf0, 0x1a, 0x91, 0xae, 0xe0, 0x21, 0x00, 0x3a, 0x6e, 0x9e, 0x86, 0xd6, 0x7f, + 0x2a, 0xd3, 0xc6, 0x1f, 0x99, 0x2e, 0xb3, 0x94, 0xc5, 0x41, 0x11, 0xca, 0x2c, 0xf4, 0x1f, 0xd2, + 0x10, 0x3e, 0x01, 0x75, 0x35, 0xa4, 0xc6, 0x1d, 0x6c, 0x4a, 0x09, 0x9f, 0x01, 0x53, 0xee, 0x8e, + 0x2f, 0xd8, 0x35, 0xb5, 0xcc, 0xd9, 0x13, 0x6c, 0x48, 0xf5, 0x25, 0xbb, 0xa6, 0xf0, 0x08, 0x2c, + 0xb1, 0x08, 0x07, 0xd4, 0x5a, 0x54, 0xae, 0x5d, 0x34, 0x73, 0xef, 0x8a, 0xf1, 0x75, 0xa5, 0xe9, + 0x74, 0xc1, 0x2b, 0xdc, 0xf0, 0x12, 0xac, 0x14, 0xfb, 0x36, 0xc8, 0x63, 0x12, 0x52, 0xab, 0xae, + 0x68, 0xe8, 0xae, 0xb4, 0x8e, 0x72, 0x9d, 0x2e, 0x78, 0xcd, 0x68, 0x72, 0x94, 0xbd, 0xe1, 0x9c, + 0x30, 0x6e, 0x81, 0xf9, 0x7a, 0x73, 0xa5, 0x49, 0xf6, 0xa6, 0xdc, 0x12, 0x33, 0x62, 0x84, 0x72, + 0xab, 0x39, 0x1f, 0xa6, 0x27, 0x4d, 0x12, 0xa3, 0xdc, 0x9d, 0x26, 0x30, 0x55, 0x73, 0x72, 0x4b, + 0x5a, 0x2f, 0x00, 0x98, 0x8c, 0x01, 0xda, 0xa0, 0x4e, 0x70, 0x86, 0xd5, 0x56, 0xff, 0x6d, 0xf2, + 0x9d, 0x71, 0x46, 0x85, 0xfe, 0x7f, 0x49, 0x61, 0xeb, 0x25, 0x68, 0x4e, 0xe5, 0x9e, 0xdf, 0xff, + 0x51, 0x5f, 0xaf, 0x92, 0xc2, 0x2e, 0x80, 0x98, 0xf8, 0x24, 0x4f, 0xd5, 0x1b, 0xe8, 0x47, 0x2c, + 0x0c, 0x99, 0xb8, 0x11, 0x36, 0xb5, 0x06, 0x6b, 0x98, 0xbc, 0xd6, 0xae, 0x73, 0x65, 0x6a, 0x7d, + 0x35, 0x34, 0x59, 0x85, 0xff, 0x87, 0x64, 0x78, 0x0c, 0xd6, 0xc6, 0x3c, 0xcf, 0xf2, 0x01, 0xf5, + 0xd5, 0x3c, 0xfd, 0xea, 0x5d, 0xbf, 0x7d, 0xc1, 0x57, 0xb5, 0x4b, 0x35, 0xd4, 0x25, 0xf0, 0x0d, + 0xb8, 0x8f, 0xc9, 0x88, 0xa6, 0x19, 0x13, 0x2c, 0x0e, 0x7c, 0x46, 0xfc, 0x21, 0x27, 0xe5, 0xfa, + 0xde, 0x8e, 0xba, 0x37, 0x65, 0xec, 0x92, 0x57, 0x9c, 0x50, 0xf8, 0x1c, 0x98, 0x4c, 0x0c, 0x59, + 0xc1, 0xa8, 0xdf, 0x81, 0xd1, 0x90, 0x72, 0x69, 0xed, 0xfc, 0xaa, 0x81, 0xed, 0x21, 0x8f, 0x66, + 0x6f, 0x53, 0x67, 0xb5, 0xfa, 0x10, 0x5e, 0x48, 0xe4, 0x45, 0xed, 0xd3, 0x99, 0x36, 0x05, 0x3c, + 0xc4, 0x71, 0x80, 0x78, 0x1a, 0xd8, 0x01, 0x8d, 0xd5, 0x85, 0xe5, 0xa7, 0x39, 0x61, 0xe2, 0x96, + 0x1f, 0x8c, 0xc3, 0xaa, 0xfa, 0x62, 0x2c, 0x9e, 0xb8, 0xee, 0x37, 0x63, 0xeb, 0xa4, 0x40, 0xba, + 0x44, 0xa0, 0xa2, 0x94, 0x55, 0xaf, 0x8d, 0xbc, 0x52, 0xf9, 0xbd, 0xd4, 0xf4, 0x5d, 0x22, 0xfa, + 0x95, 0xa6, 0xdf, 0x6b, 0xf7, 0x2b, 0xcd, 0x4f, 0x63, 0xbb, 0x78, 0xe0, 0x38, 0x2e, 0x11, 0x8e, + 0x53, 0xa9, 0x1c, 0xa7, 0xd7, 0x76, 0x9c, 0x4a, 0x37, 0x58, 0x56, 0xcd, 0x3e, 0xfd, 0x1d, 0x00, + 0x00, 0xff, 0xff, 0x21, 0xf9, 0xb6, 0xe6, 0xdc, 0x06, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/resources/merchant_center_link.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/resources/merchant_center_link.pb.go new file mode 100644 index 0000000..f77986b --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/resources/merchant_center_link.pb.go @@ -0,0 +1,133 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/resources/merchant_center_link.proto + +package resources // import "google.golang.org/genproto/googleapis/ads/googleads/v1/resources" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import wrappers "github.com/golang/protobuf/ptypes/wrappers" +import enums "google.golang.org/genproto/googleapis/ads/googleads/v1/enums" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// A data sharing connection, proposed or in use, +// between a Google Ads Customer and a Merchant Center account. +type MerchantCenterLink struct { + // The resource name of the merchant center link. + // Merchant center link resource names have the form: + // + // `customers/{customer_id}/merchantCenterLinks/{merchant_center_id}` + ResourceName string `protobuf:"bytes,1,opt,name=resource_name,json=resourceName,proto3" json:"resource_name,omitempty"` + // The ID of the Merchant Center account. + // This field is readonly. + Id *wrappers.Int64Value `protobuf:"bytes,3,opt,name=id,proto3" json:"id,omitempty"` + // The name of the Merchant Center account. + // This field is readonly. + MerchantCenterAccountName *wrappers.StringValue `protobuf:"bytes,4,opt,name=merchant_center_account_name,json=merchantCenterAccountName,proto3" json:"merchant_center_account_name,omitempty"` + // The status of the link. + Status enums.MerchantCenterLinkStatusEnum_MerchantCenterLinkStatus `protobuf:"varint,5,opt,name=status,proto3,enum=google.ads.googleads.v1.enums.MerchantCenterLinkStatusEnum_MerchantCenterLinkStatus" json:"status,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *MerchantCenterLink) Reset() { *m = MerchantCenterLink{} } +func (m *MerchantCenterLink) String() string { return proto.CompactTextString(m) } +func (*MerchantCenterLink) ProtoMessage() {} +func (*MerchantCenterLink) Descriptor() ([]byte, []int) { + return fileDescriptor_merchant_center_link_a6c67adc47332ce3, []int{0} +} +func (m *MerchantCenterLink) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_MerchantCenterLink.Unmarshal(m, b) +} +func (m *MerchantCenterLink) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_MerchantCenterLink.Marshal(b, m, deterministic) +} +func (dst *MerchantCenterLink) XXX_Merge(src proto.Message) { + xxx_messageInfo_MerchantCenterLink.Merge(dst, src) +} +func (m *MerchantCenterLink) XXX_Size() int { + return xxx_messageInfo_MerchantCenterLink.Size(m) +} +func (m *MerchantCenterLink) XXX_DiscardUnknown() { + xxx_messageInfo_MerchantCenterLink.DiscardUnknown(m) +} + +var xxx_messageInfo_MerchantCenterLink proto.InternalMessageInfo + +func (m *MerchantCenterLink) GetResourceName() string { + if m != nil { + return m.ResourceName + } + return "" +} + +func (m *MerchantCenterLink) GetId() *wrappers.Int64Value { + if m != nil { + return m.Id + } + return nil +} + +func (m *MerchantCenterLink) GetMerchantCenterAccountName() *wrappers.StringValue { + if m != nil { + return m.MerchantCenterAccountName + } + return nil +} + +func (m *MerchantCenterLink) GetStatus() enums.MerchantCenterLinkStatusEnum_MerchantCenterLinkStatus { + if m != nil { + return m.Status + } + return enums.MerchantCenterLinkStatusEnum_UNSPECIFIED +} + +func init() { + proto.RegisterType((*MerchantCenterLink)(nil), "google.ads.googleads.v1.resources.MerchantCenterLink") +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/resources/merchant_center_link.proto", fileDescriptor_merchant_center_link_a6c67adc47332ce3) +} + +var fileDescriptor_merchant_center_link_a6c67adc47332ce3 = []byte{ + // 410 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x84, 0x52, 0xcf, 0x6a, 0xdb, 0x30, + 0x18, 0xc7, 0xce, 0x16, 0x98, 0xf7, 0xe7, 0xe0, 0xcb, 0xb2, 0x2c, 0x8c, 0x64, 0x23, 0x10, 0x18, + 0xc8, 0x38, 0x1b, 0x3b, 0x68, 0x83, 0xe1, 0x8c, 0x11, 0x36, 0xb6, 0x12, 0x9c, 0xe2, 0x43, 0x71, + 0x31, 0x8a, 0xad, 0xba, 0x22, 0xb6, 0x64, 0x24, 0x39, 0x7d, 0x81, 0x3e, 0x49, 0x8f, 0xbd, 0xf6, + 0x2d, 0xfa, 0x28, 0x7d, 0x8a, 0x12, 0xc9, 0x32, 0xb4, 0x69, 0xda, 0xdb, 0x97, 0x7c, 0xbf, 0xbf, + 0x9f, 0xe5, 0xfc, 0xc8, 0x19, 0xcb, 0x0b, 0xec, 0xa1, 0x4c, 0x78, 0x7a, 0xdc, 0x4e, 0x1b, 0xdf, + 0xe3, 0x58, 0xb0, 0x9a, 0xa7, 0x58, 0x78, 0x25, 0xe6, 0xe9, 0x29, 0xa2, 0x32, 0x49, 0x31, 0x95, + 0x98, 0x27, 0x05, 0xa1, 0x6b, 0x50, 0x71, 0x26, 0x99, 0x3b, 0xd2, 0x14, 0x80, 0x32, 0x01, 0x5a, + 0x36, 0xd8, 0xf8, 0xa0, 0x65, 0xf7, 0x7f, 0xee, 0x33, 0xc0, 0xb4, 0x2e, 0x1f, 0x16, 0x4f, 0x84, + 0x44, 0xb2, 0x16, 0xda, 0xa3, 0xff, 0xa1, 0x11, 0x50, 0xbf, 0x56, 0xf5, 0x89, 0x77, 0xc6, 0x51, + 0x55, 0x61, 0x6e, 0xf6, 0x03, 0x63, 0x50, 0x11, 0x0f, 0x51, 0xca, 0x24, 0x92, 0x84, 0xd1, 0x66, + 0xfb, 0xf1, 0xca, 0x76, 0xdc, 0xff, 0x8d, 0xc7, 0x2f, 0x65, 0xf1, 0x8f, 0xd0, 0xb5, 0xfb, 0xc9, + 0x79, 0x6d, 0x22, 0x26, 0x14, 0x95, 0xb8, 0x67, 0x0d, 0xad, 0xc9, 0x8b, 0xf0, 0x95, 0xf9, 0xf3, + 0x00, 0x95, 0xd8, 0xfd, 0xec, 0xd8, 0x24, 0xeb, 0x75, 0x86, 0xd6, 0xe4, 0xe5, 0xf4, 0x7d, 0xd3, + 0x0f, 0x98, 0x18, 0xe0, 0x0f, 0x95, 0xdf, 0xbe, 0x46, 0xa8, 0xa8, 0x71, 0x68, 0x93, 0xcc, 0x3d, + 0x76, 0x06, 0xf7, 0xbb, 0xa0, 0x34, 0x65, 0x35, 0x95, 0xda, 0xe0, 0x99, 0x92, 0x19, 0xec, 0xc8, + 0x2c, 0x25, 0x27, 0x34, 0xd7, 0x3a, 0xef, 0xca, 0x3b, 0x49, 0x03, 0xcd, 0x57, 0x59, 0x0a, 0xa7, + 0xab, 0xaf, 0xd2, 0x7b, 0x3e, 0xb4, 0x26, 0x6f, 0xa6, 0x87, 0x60, 0xdf, 0xe9, 0xd5, 0x5d, 0xc1, + 0x6e, 0xe7, 0xa5, 0xa2, 0xff, 0xa6, 0x75, 0xb9, 0x77, 0x19, 0x36, 0x1e, 0xb3, 0x73, 0xdb, 0x19, + 0xa7, 0xac, 0x04, 0x4f, 0x7e, 0xde, 0xd9, 0xdb, 0x5d, 0xad, 0xc5, 0xb6, 0xda, 0xc2, 0x3a, 0xfa, + 0xdb, 0xb0, 0x73, 0x56, 0x20, 0x9a, 0x03, 0xc6, 0x73, 0x2f, 0xc7, 0x54, 0x15, 0x37, 0x2f, 0xa1, + 0x22, 0xe2, 0x91, 0x97, 0xf7, 0xbd, 0x9d, 0x2e, 0xec, 0xce, 0x3c, 0x08, 0x2e, 0xed, 0xd1, 0x5c, + 0x4b, 0x06, 0x99, 0x00, 0x7a, 0xdc, 0x4e, 0x91, 0x0f, 0x42, 0x83, 0xbc, 0x36, 0x98, 0x38, 0xc8, + 0x44, 0xdc, 0x62, 0xe2, 0xc8, 0x8f, 0x5b, 0xcc, 0x8d, 0x3d, 0xd6, 0x0b, 0x08, 0x83, 0x4c, 0x40, + 0xd8, 0xa2, 0x20, 0x8c, 0x7c, 0x08, 0x5b, 0xdc, 0xaa, 0xab, 0xc2, 0x7e, 0xb9, 0x0d, 0x00, 0x00, + 0xff, 0xff, 0x9f, 0x1d, 0x01, 0xf6, 0x25, 0x03, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/resources/mobile_app_category_constant.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/resources/mobile_app_category_constant.pb.go new file mode 100644 index 0000000..40ea5ef --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/resources/mobile_app_category_constant.pb.go @@ -0,0 +1,116 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/resources/mobile_app_category_constant.proto + +package resources // import "google.golang.org/genproto/googleapis/ads/googleads/v1/resources" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import wrappers "github.com/golang/protobuf/ptypes/wrappers" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// A mobile application category constant. +type MobileAppCategoryConstant struct { + // The resource name of the mobile app category constant. + // Mobile app category constant resource names have the form: + // + // `mobileAppCategoryConstants/{mobile_app_category_id}` + ResourceName string `protobuf:"bytes,1,opt,name=resource_name,json=resourceName,proto3" json:"resource_name,omitempty"` + // The ID of the mobile app category constant. + Id *wrappers.Int32Value `protobuf:"bytes,2,opt,name=id,proto3" json:"id,omitempty"` + // Mobile app category name. + Name *wrappers.StringValue `protobuf:"bytes,3,opt,name=name,proto3" json:"name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *MobileAppCategoryConstant) Reset() { *m = MobileAppCategoryConstant{} } +func (m *MobileAppCategoryConstant) String() string { return proto.CompactTextString(m) } +func (*MobileAppCategoryConstant) ProtoMessage() {} +func (*MobileAppCategoryConstant) Descriptor() ([]byte, []int) { + return fileDescriptor_mobile_app_category_constant_45ea856e6f2b6600, []int{0} +} +func (m *MobileAppCategoryConstant) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_MobileAppCategoryConstant.Unmarshal(m, b) +} +func (m *MobileAppCategoryConstant) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_MobileAppCategoryConstant.Marshal(b, m, deterministic) +} +func (dst *MobileAppCategoryConstant) XXX_Merge(src proto.Message) { + xxx_messageInfo_MobileAppCategoryConstant.Merge(dst, src) +} +func (m *MobileAppCategoryConstant) XXX_Size() int { + return xxx_messageInfo_MobileAppCategoryConstant.Size(m) +} +func (m *MobileAppCategoryConstant) XXX_DiscardUnknown() { + xxx_messageInfo_MobileAppCategoryConstant.DiscardUnknown(m) +} + +var xxx_messageInfo_MobileAppCategoryConstant proto.InternalMessageInfo + +func (m *MobileAppCategoryConstant) GetResourceName() string { + if m != nil { + return m.ResourceName + } + return "" +} + +func (m *MobileAppCategoryConstant) GetId() *wrappers.Int32Value { + if m != nil { + return m.Id + } + return nil +} + +func (m *MobileAppCategoryConstant) GetName() *wrappers.StringValue { + if m != nil { + return m.Name + } + return nil +} + +func init() { + proto.RegisterType((*MobileAppCategoryConstant)(nil), "google.ads.googleads.v1.resources.MobileAppCategoryConstant") +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/resources/mobile_app_category_constant.proto", fileDescriptor_mobile_app_category_constant_45ea856e6f2b6600) +} + +var fileDescriptor_mobile_app_category_constant_45ea856e6f2b6600 = []byte{ + // 352 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x84, 0x91, 0xd1, 0x4a, 0xeb, 0x30, + 0x18, 0xc7, 0x69, 0x77, 0x38, 0x70, 0x7a, 0xf4, 0xa6, 0x57, 0x73, 0x8e, 0xb1, 0x29, 0x83, 0x81, + 0x90, 0xda, 0xed, 0x2e, 0x5e, 0x75, 0x13, 0x86, 0x82, 0x32, 0x26, 0xf4, 0x42, 0x0a, 0x25, 0x6b, + 0x63, 0x28, 0xb4, 0x49, 0x48, 0xb2, 0x89, 0xcf, 0xe0, 0x43, 0x08, 0x5e, 0xfa, 0x28, 0x3e, 0x8a, + 0x4f, 0x21, 0x4b, 0x9a, 0xdc, 0x88, 0x7a, 0xf7, 0xa7, 0xf9, 0xe5, 0xff, 0xfd, 0x9a, 0x2f, 0xb8, + 0x24, 0x8c, 0x91, 0x1a, 0x47, 0xa8, 0x94, 0x91, 0x89, 0xfb, 0xb4, 0x8b, 0x23, 0x81, 0x25, 0xdb, + 0x8a, 0x02, 0xcb, 0xa8, 0x61, 0x9b, 0xaa, 0xc6, 0x39, 0xe2, 0x3c, 0x2f, 0x90, 0xc2, 0x84, 0x89, + 0xa7, 0xbc, 0x60, 0x54, 0x2a, 0x44, 0x15, 0xe0, 0x82, 0x29, 0x16, 0x8e, 0xcc, 0x55, 0x80, 0x4a, + 0x09, 0x5c, 0x0b, 0xd8, 0xc5, 0xc0, 0xb5, 0xf4, 0x06, 0xed, 0x20, 0x7d, 0x61, 0xb3, 0x7d, 0x88, + 0x1e, 0x05, 0xe2, 0x1c, 0x0b, 0x69, 0x2a, 0x7a, 0x7d, 0x2b, 0xc2, 0xab, 0x08, 0x51, 0xca, 0x14, + 0x52, 0x15, 0xa3, 0xed, 0xe9, 0xc9, 0x8b, 0x17, 0x1c, 0xdd, 0x68, 0x8f, 0x84, 0xf3, 0x45, 0x6b, + 0xb1, 0x68, 0x25, 0xc2, 0xd3, 0xe0, 0xd0, 0x0e, 0xca, 0x29, 0x6a, 0x70, 0xd7, 0x1b, 0x7a, 0x93, + 0x7f, 0xeb, 0x03, 0xfb, 0xf1, 0x16, 0x35, 0x38, 0x3c, 0x0b, 0xfc, 0xaa, 0xec, 0xfa, 0x43, 0x6f, + 0xf2, 0x7f, 0x7a, 0xdc, 0x5a, 0x02, 0x6b, 0x03, 0xae, 0xa8, 0x9a, 0x4d, 0x53, 0x54, 0x6f, 0xf1, + 0xda, 0xaf, 0xca, 0xf0, 0x3c, 0xf8, 0xa3, 0x8b, 0x3a, 0x1a, 0xef, 0x7f, 0xc1, 0xef, 0x94, 0xa8, + 0x28, 0x31, 0xbc, 0x26, 0xe7, 0xcf, 0x7e, 0x30, 0x2e, 0x58, 0x03, 0x7e, 0x7d, 0x89, 0xf9, 0xe0, + 0xdb, 0x1f, 0x59, 0xed, 0xeb, 0x57, 0xde, 0xfd, 0x75, 0x5b, 0x42, 0x58, 0x8d, 0x28, 0x01, 0x4c, + 0x90, 0x88, 0x60, 0xaa, 0x87, 0xdb, 0x25, 0xf1, 0x4a, 0xfe, 0xb0, 0xb3, 0x0b, 0x97, 0x5e, 0xfd, + 0xce, 0x32, 0x49, 0xde, 0xfc, 0xd1, 0xd2, 0x54, 0x26, 0xa5, 0x04, 0x26, 0xee, 0x53, 0x1a, 0x83, + 0xb5, 0x25, 0xdf, 0x2d, 0x93, 0x25, 0xa5, 0xcc, 0x1c, 0x93, 0xa5, 0x71, 0xe6, 0x98, 0x0f, 0x7f, + 0x6c, 0x0e, 0x20, 0x4c, 0x4a, 0x09, 0xa1, 0xa3, 0x20, 0x4c, 0x63, 0x08, 0x1d, 0xb7, 0xf9, 0xab, + 0x65, 0x67, 0x9f, 0x01, 0x00, 0x00, 0xff, 0xff, 0xf8, 0x03, 0x3b, 0x4a, 0x5f, 0x02, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/resources/mobile_device_constant.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/resources/mobile_device_constant.pb.go new file mode 100644 index 0000000..629eb53 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/resources/mobile_device_constant.pb.go @@ -0,0 +1,150 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/resources/mobile_device_constant.proto + +package resources // import "google.golang.org/genproto/googleapis/ads/googleads/v1/resources" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import wrappers "github.com/golang/protobuf/ptypes/wrappers" +import enums "google.golang.org/genproto/googleapis/ads/googleads/v1/enums" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// A mobile device constant. +type MobileDeviceConstant struct { + // The resource name of the mobile device constant. + // Mobile device constant resource names have the form: + // + // `mobileDeviceConstants/{criterion_id}` + ResourceName string `protobuf:"bytes,1,opt,name=resource_name,json=resourceName,proto3" json:"resource_name,omitempty"` + // The ID of the mobile device constant. + Id *wrappers.Int64Value `protobuf:"bytes,2,opt,name=id,proto3" json:"id,omitempty"` + // The name of the mobile device. + Name *wrappers.StringValue `protobuf:"bytes,3,opt,name=name,proto3" json:"name,omitempty"` + // The manufacturer of the mobile device. + ManufacturerName *wrappers.StringValue `protobuf:"bytes,4,opt,name=manufacturer_name,json=manufacturerName,proto3" json:"manufacturer_name,omitempty"` + // The operating system of the mobile device. + OperatingSystemName *wrappers.StringValue `protobuf:"bytes,5,opt,name=operating_system_name,json=operatingSystemName,proto3" json:"operating_system_name,omitempty"` + // The type of mobile device. + Type enums.MobileDeviceTypeEnum_MobileDeviceType `protobuf:"varint,6,opt,name=type,proto3,enum=google.ads.googleads.v1.enums.MobileDeviceTypeEnum_MobileDeviceType" json:"type,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *MobileDeviceConstant) Reset() { *m = MobileDeviceConstant{} } +func (m *MobileDeviceConstant) String() string { return proto.CompactTextString(m) } +func (*MobileDeviceConstant) ProtoMessage() {} +func (*MobileDeviceConstant) Descriptor() ([]byte, []int) { + return fileDescriptor_mobile_device_constant_6b896300aee21e10, []int{0} +} +func (m *MobileDeviceConstant) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_MobileDeviceConstant.Unmarshal(m, b) +} +func (m *MobileDeviceConstant) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_MobileDeviceConstant.Marshal(b, m, deterministic) +} +func (dst *MobileDeviceConstant) XXX_Merge(src proto.Message) { + xxx_messageInfo_MobileDeviceConstant.Merge(dst, src) +} +func (m *MobileDeviceConstant) XXX_Size() int { + return xxx_messageInfo_MobileDeviceConstant.Size(m) +} +func (m *MobileDeviceConstant) XXX_DiscardUnknown() { + xxx_messageInfo_MobileDeviceConstant.DiscardUnknown(m) +} + +var xxx_messageInfo_MobileDeviceConstant proto.InternalMessageInfo + +func (m *MobileDeviceConstant) GetResourceName() string { + if m != nil { + return m.ResourceName + } + return "" +} + +func (m *MobileDeviceConstant) GetId() *wrappers.Int64Value { + if m != nil { + return m.Id + } + return nil +} + +func (m *MobileDeviceConstant) GetName() *wrappers.StringValue { + if m != nil { + return m.Name + } + return nil +} + +func (m *MobileDeviceConstant) GetManufacturerName() *wrappers.StringValue { + if m != nil { + return m.ManufacturerName + } + return nil +} + +func (m *MobileDeviceConstant) GetOperatingSystemName() *wrappers.StringValue { + if m != nil { + return m.OperatingSystemName + } + return nil +} + +func (m *MobileDeviceConstant) GetType() enums.MobileDeviceTypeEnum_MobileDeviceType { + if m != nil { + return m.Type + } + return enums.MobileDeviceTypeEnum_UNSPECIFIED +} + +func init() { + proto.RegisterType((*MobileDeviceConstant)(nil), "google.ads.googleads.v1.resources.MobileDeviceConstant") +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/resources/mobile_device_constant.proto", fileDescriptor_mobile_device_constant_6b896300aee21e10) +} + +var fileDescriptor_mobile_device_constant_6b896300aee21e10 = []byte{ + // 448 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x8c, 0x92, 0xdf, 0x6a, 0x13, 0x41, + 0x14, 0xc6, 0xd9, 0x4d, 0x2c, 0x38, 0xfe, 0x41, 0x57, 0x85, 0x58, 0x8b, 0xa4, 0x4a, 0x21, 0x20, + 0xcc, 0xba, 0x55, 0x7a, 0xb1, 0x82, 0xb0, 0xb5, 0x52, 0x2a, 0x28, 0x21, 0x95, 0x20, 0x12, 0x08, + 0x93, 0x9d, 0xd3, 0x61, 0x20, 0xf3, 0x87, 0x99, 0xd9, 0x48, 0x5e, 0xc0, 0x7b, 0x5f, 0xc1, 0x4b, + 0x1f, 0xc5, 0x47, 0xf1, 0x29, 0x24, 0x33, 0xbb, 0x43, 0xb1, 0x56, 0x7b, 0x77, 0x32, 0xe7, 0xfb, + 0x7e, 0xf9, 0xce, 0x9e, 0x83, 0x5e, 0x33, 0xa5, 0xd8, 0x12, 0x72, 0x42, 0x6d, 0x1e, 0xca, 0x4d, + 0xb5, 0x2a, 0x72, 0x03, 0x56, 0x35, 0xa6, 0x06, 0x9b, 0x0b, 0xb5, 0xe0, 0x4b, 0x98, 0x53, 0x58, + 0xf1, 0x1a, 0xe6, 0xb5, 0x92, 0xd6, 0x11, 0xe9, 0xb0, 0x36, 0xca, 0xa9, 0x6c, 0x37, 0x98, 0x30, + 0xa1, 0x16, 0x47, 0x3f, 0x5e, 0x15, 0x38, 0xfa, 0xb7, 0x0f, 0x2e, 0xfb, 0x0b, 0x90, 0x8d, 0xf8, + 0x13, 0xef, 0xd6, 0x1a, 0x02, 0x7a, 0xfb, 0x71, 0xeb, 0xf3, 0xbf, 0x16, 0xcd, 0x59, 0xfe, 0xc5, + 0x10, 0xad, 0xc1, 0xd8, 0xb6, 0xbf, 0xd3, 0x71, 0x35, 0xcf, 0x89, 0x94, 0xca, 0x11, 0xc7, 0x95, + 0x6c, 0xbb, 0x4f, 0xbe, 0xf5, 0xd0, 0xfd, 0xf7, 0x1e, 0x7d, 0xe4, 0xc9, 0x6f, 0xda, 0xdc, 0xd9, + 0x53, 0x74, 0xab, 0xcb, 0x36, 0x97, 0x44, 0xc0, 0x20, 0x19, 0x26, 0xa3, 0xeb, 0x93, 0x9b, 0xdd, + 0xe3, 0x07, 0x22, 0x20, 0x7b, 0x86, 0x52, 0x4e, 0x07, 0xe9, 0x30, 0x19, 0xdd, 0xd8, 0x7f, 0xd4, + 0x0e, 0x86, 0xbb, 0x20, 0xf8, 0x44, 0xba, 0x83, 0x97, 0x53, 0xb2, 0x6c, 0x60, 0x92, 0x72, 0x9a, + 0x3d, 0x47, 0x7d, 0x0f, 0xea, 0x79, 0xf9, 0xce, 0x05, 0xf9, 0xa9, 0x33, 0x5c, 0xb2, 0xa0, 0xf7, + 0xca, 0xec, 0x04, 0xdd, 0x15, 0x44, 0x36, 0x67, 0xa4, 0x76, 0x8d, 0x01, 0x13, 0x72, 0xf4, 0xaf, + 0x60, 0xbf, 0x73, 0xde, 0xe6, 0x93, 0x8e, 0xd1, 0x03, 0xa5, 0xc1, 0x10, 0xc7, 0x25, 0x9b, 0xdb, + 0xb5, 0x75, 0x20, 0x02, 0xee, 0xda, 0x15, 0x70, 0xf7, 0xa2, 0xf5, 0xd4, 0x3b, 0x3d, 0xf1, 0x13, + 0xea, 0x6f, 0xb6, 0x30, 0xd8, 0x1a, 0x26, 0xa3, 0xdb, 0xfb, 0x47, 0xf8, 0xb2, 0x0d, 0xfb, 0xf5, + 0xe1, 0xf3, 0xdf, 0xf8, 0xe3, 0x5a, 0xc3, 0x5b, 0xd9, 0x88, 0x0b, 0x8f, 0x13, 0x4f, 0x3c, 0xfc, + 0x9a, 0xa2, 0xbd, 0x5a, 0x09, 0xfc, 0xdf, 0x9b, 0x39, 0x7c, 0xf8, 0xb7, 0xd5, 0x8d, 0x37, 0x23, + 0x8c, 0x93, 0xcf, 0xef, 0x5a, 0x3f, 0x53, 0x4b, 0x22, 0x19, 0x56, 0x86, 0xe5, 0x0c, 0xa4, 0x1f, + 0xb0, 0x3b, 0x30, 0xcd, 0xed, 0x3f, 0x4e, 0xfa, 0x55, 0xac, 0xbe, 0xa7, 0xbd, 0xe3, 0xaa, 0xfa, + 0x91, 0xee, 0x1e, 0x07, 0x64, 0x45, 0x2d, 0x0e, 0xe5, 0xa6, 0x9a, 0x16, 0x78, 0xd2, 0x29, 0x7f, + 0x76, 0x9a, 0x59, 0x45, 0xed, 0x2c, 0x6a, 0x66, 0xd3, 0x62, 0x16, 0x35, 0xbf, 0xd2, 0xbd, 0xd0, + 0x28, 0xcb, 0x8a, 0xda, 0xb2, 0x8c, 0xaa, 0xb2, 0x9c, 0x16, 0x65, 0x19, 0x75, 0x8b, 0x2d, 0x1f, + 0xf6, 0xc5, 0xef, 0x00, 0x00, 0x00, 0xff, 0xff, 0x6c, 0x4e, 0xe7, 0xc5, 0x7e, 0x03, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/resources/mutate_job.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/resources/mutate_job.pb.go new file mode 100644 index 0000000..2eff05f --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/resources/mutate_job.pb.go @@ -0,0 +1,245 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/resources/mutate_job.proto + +package resources // import "google.golang.org/genproto/googleapis/ads/googleads/v1/resources" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import wrappers "github.com/golang/protobuf/ptypes/wrappers" +import enums "google.golang.org/genproto/googleapis/ads/googleads/v1/enums" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// A list of mutates being processed asynchronously. The mutates are uploaded +// by the user. The mutates themselves aren’t readable and the results of the +// job can only be read using MutateJobService.ListMutateJobResults. +type MutateJob struct { + // The resource name of the mutate job. + // Mutate job resource names have the form: + // + // `customers/{customer_id}/mutateJobs/{mutate_job_id}` + ResourceName string `protobuf:"bytes,1,opt,name=resource_name,json=resourceName,proto3" json:"resource_name,omitempty"` + // ID of this mutate job. + Id *wrappers.Int64Value `protobuf:"bytes,2,opt,name=id,proto3" json:"id,omitempty"` + // The next sequence token to use when adding operations. Only set when the + // mutate job status is PENDING. + NextAddSequenceToken *wrappers.StringValue `protobuf:"bytes,3,opt,name=next_add_sequence_token,json=nextAddSequenceToken,proto3" json:"next_add_sequence_token,omitempty"` + // Contains additional information about this mutate job. + Metadata *MutateJob_MutateJobMetadata `protobuf:"bytes,4,opt,name=metadata,proto3" json:"metadata,omitempty"` + // Status of this mutate job. + Status enums.MutateJobStatusEnum_MutateJobStatus `protobuf:"varint,5,opt,name=status,proto3,enum=google.ads.googleads.v1.enums.MutateJobStatusEnum_MutateJobStatus" json:"status,omitempty"` + // The resource name of the long-running operation that can be used to poll + // for completion. Only set when the mutate job status is RUNNING or DONE. + LongRunningOperation *wrappers.StringValue `protobuf:"bytes,6,opt,name=long_running_operation,json=longRunningOperation,proto3" json:"long_running_operation,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *MutateJob) Reset() { *m = MutateJob{} } +func (m *MutateJob) String() string { return proto.CompactTextString(m) } +func (*MutateJob) ProtoMessage() {} +func (*MutateJob) Descriptor() ([]byte, []int) { + return fileDescriptor_mutate_job_3bb9abe6bc3ebff0, []int{0} +} +func (m *MutateJob) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_MutateJob.Unmarshal(m, b) +} +func (m *MutateJob) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_MutateJob.Marshal(b, m, deterministic) +} +func (dst *MutateJob) XXX_Merge(src proto.Message) { + xxx_messageInfo_MutateJob.Merge(dst, src) +} +func (m *MutateJob) XXX_Size() int { + return xxx_messageInfo_MutateJob.Size(m) +} +func (m *MutateJob) XXX_DiscardUnknown() { + xxx_messageInfo_MutateJob.DiscardUnknown(m) +} + +var xxx_messageInfo_MutateJob proto.InternalMessageInfo + +func (m *MutateJob) GetResourceName() string { + if m != nil { + return m.ResourceName + } + return "" +} + +func (m *MutateJob) GetId() *wrappers.Int64Value { + if m != nil { + return m.Id + } + return nil +} + +func (m *MutateJob) GetNextAddSequenceToken() *wrappers.StringValue { + if m != nil { + return m.NextAddSequenceToken + } + return nil +} + +func (m *MutateJob) GetMetadata() *MutateJob_MutateJobMetadata { + if m != nil { + return m.Metadata + } + return nil +} + +func (m *MutateJob) GetStatus() enums.MutateJobStatusEnum_MutateJobStatus { + if m != nil { + return m.Status + } + return enums.MutateJobStatusEnum_UNSPECIFIED +} + +func (m *MutateJob) GetLongRunningOperation() *wrappers.StringValue { + if m != nil { + return m.LongRunningOperation + } + return nil +} + +// Additional information about the mutate job. This message is also used as +// metadata returned in mutate job Long Running Operations. +type MutateJob_MutateJobMetadata struct { + // The time when this mutate job was created. + // Formatted as yyyy-mm-dd hh:mm:ss. Example: "2018-03-05 09:15:00" + CreationDateTime *wrappers.StringValue `protobuf:"bytes,1,opt,name=creation_date_time,json=creationDateTime,proto3" json:"creation_date_time,omitempty"` + // The time when this mutate job was completed. + // Formatted as yyyy-MM-dd HH:mm:ss. Example: "2018-03-05 09:16:00" + CompletionDateTime *wrappers.StringValue `protobuf:"bytes,2,opt,name=completion_date_time,json=completionDateTime,proto3" json:"completion_date_time,omitempty"` + // The fraction (between 0.0 and 1.0) of mutates that have been processed. + // This is empty if the job hasn't started running yet. + EstimatedCompletionRatio *wrappers.DoubleValue `protobuf:"bytes,3,opt,name=estimated_completion_ratio,json=estimatedCompletionRatio,proto3" json:"estimated_completion_ratio,omitempty"` + // The number of mutate operations in the mutate job. + OperationCount *wrappers.Int64Value `protobuf:"bytes,4,opt,name=operation_count,json=operationCount,proto3" json:"operation_count,omitempty"` + // The number of mutate operations executed by the mutate job. + // Present only if the job has started running. + ExecutedOperationCount *wrappers.Int64Value `protobuf:"bytes,5,opt,name=executed_operation_count,json=executedOperationCount,proto3" json:"executed_operation_count,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *MutateJob_MutateJobMetadata) Reset() { *m = MutateJob_MutateJobMetadata{} } +func (m *MutateJob_MutateJobMetadata) String() string { return proto.CompactTextString(m) } +func (*MutateJob_MutateJobMetadata) ProtoMessage() {} +func (*MutateJob_MutateJobMetadata) Descriptor() ([]byte, []int) { + return fileDescriptor_mutate_job_3bb9abe6bc3ebff0, []int{0, 0} +} +func (m *MutateJob_MutateJobMetadata) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_MutateJob_MutateJobMetadata.Unmarshal(m, b) +} +func (m *MutateJob_MutateJobMetadata) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_MutateJob_MutateJobMetadata.Marshal(b, m, deterministic) +} +func (dst *MutateJob_MutateJobMetadata) XXX_Merge(src proto.Message) { + xxx_messageInfo_MutateJob_MutateJobMetadata.Merge(dst, src) +} +func (m *MutateJob_MutateJobMetadata) XXX_Size() int { + return xxx_messageInfo_MutateJob_MutateJobMetadata.Size(m) +} +func (m *MutateJob_MutateJobMetadata) XXX_DiscardUnknown() { + xxx_messageInfo_MutateJob_MutateJobMetadata.DiscardUnknown(m) +} + +var xxx_messageInfo_MutateJob_MutateJobMetadata proto.InternalMessageInfo + +func (m *MutateJob_MutateJobMetadata) GetCreationDateTime() *wrappers.StringValue { + if m != nil { + return m.CreationDateTime + } + return nil +} + +func (m *MutateJob_MutateJobMetadata) GetCompletionDateTime() *wrappers.StringValue { + if m != nil { + return m.CompletionDateTime + } + return nil +} + +func (m *MutateJob_MutateJobMetadata) GetEstimatedCompletionRatio() *wrappers.DoubleValue { + if m != nil { + return m.EstimatedCompletionRatio + } + return nil +} + +func (m *MutateJob_MutateJobMetadata) GetOperationCount() *wrappers.Int64Value { + if m != nil { + return m.OperationCount + } + return nil +} + +func (m *MutateJob_MutateJobMetadata) GetExecutedOperationCount() *wrappers.Int64Value { + if m != nil { + return m.ExecutedOperationCount + } + return nil +} + +func init() { + proto.RegisterType((*MutateJob)(nil), "google.ads.googleads.v1.resources.MutateJob") + proto.RegisterType((*MutateJob_MutateJobMetadata)(nil), "google.ads.googleads.v1.resources.MutateJob.MutateJobMetadata") +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/resources/mutate_job.proto", fileDescriptor_mutate_job_3bb9abe6bc3ebff0) +} + +var fileDescriptor_mutate_job_3bb9abe6bc3ebff0 = []byte{ + // 581 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x8c, 0x94, 0xdd, 0x6a, 0xd4, 0x4c, + 0x18, 0xc7, 0x49, 0xfa, 0x76, 0x79, 0x3b, 0x6a, 0xd5, 0xa1, 0xd4, 0xb0, 0x16, 0x69, 0x95, 0x42, + 0x41, 0x98, 0xb0, 0xf5, 0xe3, 0x20, 0x82, 0x90, 0x76, 0xa5, 0xb8, 0xd0, 0x0f, 0xb2, 0x75, 0x0f, + 0x96, 0x85, 0x30, 0x9b, 0x79, 0x0c, 0xd1, 0x64, 0x26, 0x66, 0x66, 0x6a, 0xaf, 0xc7, 0x23, 0xf1, + 0x52, 0xbc, 0x13, 0xbd, 0x05, 0x4f, 0x24, 0x5f, 0xb3, 0x75, 0xcb, 0x76, 0x7b, 0xf6, 0x24, 0xf3, + 0xff, 0xfd, 0xe6, 0xeb, 0x61, 0xd0, 0x7e, 0x2c, 0x44, 0x9c, 0x82, 0x4b, 0x99, 0x74, 0xeb, 0xb2, + 0xac, 0x2e, 0x7a, 0x6e, 0x01, 0x52, 0xe8, 0x22, 0x02, 0xe9, 0x66, 0x5a, 0x51, 0x05, 0xe1, 0x27, + 0x31, 0x25, 0x79, 0x21, 0x94, 0xc0, 0x3b, 0x75, 0x90, 0x50, 0x26, 0x89, 0x61, 0xc8, 0x45, 0x8f, + 0x18, 0xa6, 0xfb, 0x6a, 0x91, 0x16, 0xb8, 0xce, 0xae, 0x2a, 0x43, 0xa9, 0xa8, 0xd2, 0xb2, 0x36, + 0x77, 0x9f, 0x34, 0x58, 0xf5, 0x35, 0xd5, 0x1f, 0xdd, 0xaf, 0x05, 0xcd, 0x73, 0x28, 0xda, 0xf1, + 0xad, 0x56, 0x9b, 0x27, 0x2e, 0xe5, 0x5c, 0x28, 0xaa, 0x12, 0xc1, 0x9b, 0xd1, 0xa7, 0xbf, 0x3a, + 0x68, 0xed, 0xb8, 0x32, 0x0f, 0xc4, 0x14, 0x3f, 0x43, 0xf7, 0xda, 0xf5, 0x84, 0x9c, 0x66, 0xe0, + 0x58, 0xdb, 0xd6, 0xde, 0x5a, 0x70, 0xb7, 0xfd, 0x79, 0x42, 0x33, 0xc0, 0xcf, 0x91, 0x9d, 0x30, + 0xc7, 0xde, 0xb6, 0xf6, 0xee, 0xec, 0x3f, 0x6e, 0x36, 0x43, 0xda, 0xd9, 0xc9, 0x7b, 0xae, 0x5e, + 0xbf, 0x1c, 0xd1, 0x54, 0x43, 0x60, 0x27, 0x0c, 0x0f, 0xd1, 0x23, 0x0e, 0x97, 0x2a, 0xa4, 0x8c, + 0x85, 0x12, 0xbe, 0x68, 0xe0, 0x11, 0x84, 0x4a, 0x7c, 0x06, 0xee, 0xac, 0x54, 0x86, 0xad, 0x6b, + 0x86, 0xa1, 0x2a, 0x12, 0x1e, 0xd7, 0x8a, 0x8d, 0x12, 0xf6, 0x19, 0x1b, 0x36, 0xe8, 0x79, 0x49, + 0xe2, 0x31, 0xfa, 0x3f, 0x03, 0x45, 0x19, 0x55, 0xd4, 0xf9, 0xaf, 0xb2, 0xbc, 0x25, 0x4b, 0xcf, + 0x97, 0x98, 0x6d, 0xce, 0xaa, 0xe3, 0xc6, 0x12, 0x18, 0x1f, 0x1e, 0xa3, 0x4e, 0x7d, 0xbc, 0xce, + 0xea, 0xb6, 0xb5, 0xb7, 0xbe, 0x7f, 0xb0, 0xd0, 0x5c, 0x5d, 0xcb, 0xcc, 0x35, 0xac, 0xa8, 0x77, + 0x5c, 0x67, 0xf3, 0xff, 0x82, 0xc6, 0x88, 0x03, 0xb4, 0x99, 0x0a, 0x1e, 0x87, 0x85, 0xe6, 0x3c, + 0xe1, 0x71, 0x28, 0x72, 0x28, 0xaa, 0xdb, 0x70, 0x3a, 0xb7, 0x39, 0x8b, 0x92, 0x0d, 0x6a, 0xf4, + 0xb4, 0x25, 0xbb, 0xdf, 0x57, 0xd0, 0xc3, 0x6b, 0xfb, 0xc1, 0x03, 0x84, 0xa3, 0x02, 0xaa, 0x44, + 0xc8, 0xca, 0xb6, 0x51, 0x49, 0x73, 0x9b, 0xcb, 0x66, 0x79, 0xd0, 0x72, 0x7d, 0xaa, 0xe0, 0x3c, + 0xc9, 0x00, 0x9f, 0xa0, 0x8d, 0x48, 0x64, 0x79, 0x0a, 0x73, 0x36, 0xfb, 0x16, 0x36, 0x3c, 0x23, + 0x8d, 0x6f, 0x8c, 0xba, 0x20, 0x55, 0x92, 0x51, 0x05, 0x2c, 0xbc, 0x62, 0xae, 0x36, 0xb4, 0xb0, + 0x2b, 0xfa, 0x42, 0x4f, 0x53, 0xa8, 0xad, 0x8e, 0xe1, 0x0f, 0x0d, 0x1e, 0x94, 0x34, 0xee, 0xa3, + 0xfb, 0xe6, 0x50, 0xc3, 0x48, 0x68, 0xae, 0x9a, 0x06, 0xb9, 0xb1, 0x51, 0xd7, 0x0d, 0x73, 0x58, + 0x22, 0xf8, 0x03, 0x72, 0xe0, 0x12, 0x22, 0x5d, 0x2e, 0x70, 0x5e, 0xb7, 0xba, 0x5c, 0xb7, 0xd9, + 0xc2, 0xa7, 0xff, 0x68, 0x0f, 0xfe, 0x58, 0x68, 0x37, 0x12, 0xd9, 0xf2, 0x56, 0x3d, 0x58, 0x37, + 0x37, 0x7a, 0x56, 0xfa, 0xcf, 0xac, 0xf1, 0xa0, 0x81, 0x62, 0x91, 0x52, 0x1e, 0x13, 0x51, 0xc4, + 0x6e, 0x0c, 0xbc, 0x9a, 0xbd, 0x7d, 0x2c, 0xf2, 0x44, 0xde, 0xf0, 0x24, 0xbd, 0x31, 0xd5, 0x37, + 0x7b, 0xe5, 0xc8, 0xf7, 0x7f, 0xd8, 0x3b, 0x47, 0xb5, 0xd2, 0x67, 0x92, 0xd4, 0x65, 0x59, 0x8d, + 0x7a, 0x24, 0x68, 0x93, 0x3f, 0xdb, 0xcc, 0xc4, 0x67, 0x72, 0x62, 0x32, 0x93, 0x51, 0x6f, 0x62, + 0x32, 0xbf, 0xed, 0xdd, 0x7a, 0xc0, 0xf3, 0x7c, 0x26, 0x3d, 0xcf, 0xa4, 0x3c, 0x6f, 0xd4, 0xf3, + 0x3c, 0x93, 0x9b, 0x76, 0xaa, 0xc5, 0xbe, 0xf8, 0x1b, 0x00, 0x00, 0xff, 0xff, 0x7f, 0xc4, 0x20, + 0x7f, 0x3e, 0x05, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/resources/operating_system_version_constant.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/resources/operating_system_version_constant.pb.go new file mode 100644 index 0000000..187a659 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/resources/operating_system_version_constant.pb.go @@ -0,0 +1,157 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/resources/operating_system_version_constant.proto + +package resources // import "google.golang.org/genproto/googleapis/ads/googleads/v1/resources" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import wrappers "github.com/golang/protobuf/ptypes/wrappers" +import enums "google.golang.org/genproto/googleapis/ads/googleads/v1/enums" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// A mobile operating system version or a range of versions, depending on +// 'operator_type'. The complete list of available mobile platforms is available +// + // here + Path []*wrappers.StringValue `protobuf:"bytes,4,rep,name=path,proto3" json:"path,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *TopicConstant) Reset() { *m = TopicConstant{} } +func (m *TopicConstant) String() string { return proto.CompactTextString(m) } +func (*TopicConstant) ProtoMessage() {} +func (*TopicConstant) Descriptor() ([]byte, []int) { + return fileDescriptor_topic_constant_ae3a2d9dbb1054e6, []int{0} +} +func (m *TopicConstant) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_TopicConstant.Unmarshal(m, b) +} +func (m *TopicConstant) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_TopicConstant.Marshal(b, m, deterministic) +} +func (dst *TopicConstant) XXX_Merge(src proto.Message) { + xxx_messageInfo_TopicConstant.Merge(dst, src) +} +func (m *TopicConstant) XXX_Size() int { + return xxx_messageInfo_TopicConstant.Size(m) +} +func (m *TopicConstant) XXX_DiscardUnknown() { + xxx_messageInfo_TopicConstant.DiscardUnknown(m) +} + +var xxx_messageInfo_TopicConstant proto.InternalMessageInfo + +func (m *TopicConstant) GetResourceName() string { + if m != nil { + return m.ResourceName + } + return "" +} + +func (m *TopicConstant) GetId() *wrappers.Int64Value { + if m != nil { + return m.Id + } + return nil +} + +func (m *TopicConstant) GetTopicConstantParent() *wrappers.StringValue { + if m != nil { + return m.TopicConstantParent + } + return nil +} + +func (m *TopicConstant) GetPath() []*wrappers.StringValue { + if m != nil { + return m.Path + } + return nil +} + +func init() { + proto.RegisterType((*TopicConstant)(nil), "google.ads.googleads.v1.resources.TopicConstant") +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/resources/topic_constant.proto", fileDescriptor_topic_constant_ae3a2d9dbb1054e6) +} + +var fileDescriptor_topic_constant_ae3a2d9dbb1054e6 = []byte{ + // 360 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x84, 0x91, 0x4f, 0x4b, 0xf3, 0x30, + 0x00, 0xc6, 0x69, 0x37, 0x5e, 0x78, 0xfb, 0xbe, 0xbb, 0x54, 0x84, 0x32, 0x87, 0x6c, 0xca, 0x60, + 0x20, 0xa4, 0x56, 0x65, 0x87, 0x78, 0xea, 0x3c, 0x0c, 0x3d, 0x48, 0x99, 0xd2, 0x83, 0x14, 0x46, + 0xd6, 0xc6, 0x1a, 0xd8, 0x92, 0x90, 0x64, 0xf3, 0xfb, 0x78, 0xf4, 0xa3, 0xf8, 0x3d, 0xbc, 0xf8, + 0x25, 0x94, 0x36, 0x4d, 0x70, 0x08, 0xee, 0xf6, 0xd0, 0xfc, 0x9e, 0x3f, 0x4d, 0xbc, 0x71, 0xc9, + 0x58, 0xb9, 0xc4, 0x21, 0x2a, 0x64, 0xa8, 0x65, 0xa5, 0x36, 0x51, 0x28, 0xb0, 0x64, 0x6b, 0x91, + 0x63, 0x19, 0x2a, 0xc6, 0x49, 0x3e, 0xcf, 0x19, 0x95, 0x0a, 0x51, 0x05, 0xb8, 0x60, 0x8a, 0xf9, + 0x03, 0x0d, 0x03, 0x54, 0x48, 0x60, 0x7d, 0x60, 0x13, 0x01, 0xeb, 0xeb, 0x1e, 0x36, 0xd1, 0xb5, + 0x61, 0xb1, 0x7e, 0x0c, 0x9f, 0x05, 0xe2, 0x1c, 0x0b, 0xa9, 0x23, 0xba, 0x3d, 0x53, 0xcd, 0x49, + 0x88, 0x28, 0x65, 0x0a, 0x29, 0xc2, 0x68, 0x73, 0x7a, 0xf4, 0xee, 0x78, 0x9d, 0xfb, 0xaa, 0xf9, + 0xaa, 0x29, 0xf6, 0x8f, 0xbd, 0x8e, 0x09, 0x9f, 0x53, 0xb4, 0xc2, 0x81, 0xd3, 0x77, 0x46, 0x7f, + 0x67, 0xff, 0xcd, 0xc7, 0x5b, 0xb4, 0xc2, 0xfe, 0x89, 0xe7, 0x92, 0x22, 0x70, 0xfb, 0xce, 0xe8, + 0xdf, 0xd9, 0x41, 0xb3, 0x0c, 0x98, 0x05, 0xe0, 0x9a, 0xaa, 0xf1, 0x45, 0x8a, 0x96, 0x6b, 0x3c, + 0x73, 0x49, 0xe1, 0x27, 0xde, 0xfe, 0xf6, 0xcf, 0xcd, 0x39, 0x12, 0x98, 0xaa, 0xa0, 0x55, 0xfb, + 0x7b, 0x3f, 0xfc, 0x77, 0x4a, 0x10, 0x5a, 0xea, 0x80, 0x3d, 0xf5, 0x7d, 0x5d, 0x52, 0x1b, 0xfd, + 0x53, 0xaf, 0xcd, 0x91, 0x7a, 0x0a, 0xda, 0xfd, 0xd6, 0xce, 0x80, 0x9a, 0x9c, 0x7c, 0x3a, 0xde, + 0x30, 0x67, 0x2b, 0xb0, 0xf3, 0x3e, 0x27, 0xfe, 0xd6, 0x75, 0x24, 0x55, 0x64, 0xe2, 0x3c, 0xdc, + 0x34, 0xc6, 0x92, 0x2d, 0x11, 0x2d, 0x01, 0x13, 0x65, 0x58, 0x62, 0x5a, 0x17, 0x9a, 0x07, 0xe5, + 0x44, 0xfe, 0xf2, 0xbe, 0x97, 0x56, 0xbd, 0xb8, 0xad, 0x69, 0x1c, 0xbf, 0xba, 0x83, 0xa9, 0x8e, + 0x8c, 0x0b, 0x09, 0xb4, 0xac, 0x54, 0x1a, 0x81, 0x99, 0x21, 0xdf, 0x0c, 0x93, 0xc5, 0x85, 0xcc, + 0x2c, 0x93, 0xa5, 0x51, 0x66, 0x99, 0x0f, 0x77, 0xa8, 0x0f, 0x20, 0x8c, 0x0b, 0x09, 0xa1, 0xa5, + 0x20, 0x4c, 0x23, 0x08, 0x2d, 0xb7, 0xf8, 0x53, 0x8f, 0x3d, 0xff, 0x0a, 0x00, 0x00, 0xff, 0xff, + 0xc1, 0x88, 0x02, 0x04, 0x8b, 0x02, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/resources/topic_view.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/resources/topic_view.pb.go new file mode 100644 index 0000000..cc7922e --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/resources/topic_view.pb.go @@ -0,0 +1,92 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/resources/topic_view.proto + +package resources // import "google.golang.org/genproto/googleapis/ads/googleads/v1/resources" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// A topic view. +type TopicView struct { + // The resource name of the topic view. + // Topic view resource names have the form: + // + // `customers/{customer_id}/topicViews/{ad_group_id}~{criterion_id}` + ResourceName string `protobuf:"bytes,1,opt,name=resource_name,json=resourceName,proto3" json:"resource_name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *TopicView) Reset() { *m = TopicView{} } +func (m *TopicView) String() string { return proto.CompactTextString(m) } +func (*TopicView) ProtoMessage() {} +func (*TopicView) Descriptor() ([]byte, []int) { + return fileDescriptor_topic_view_3f7e8b858c6defb8, []int{0} +} +func (m *TopicView) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_TopicView.Unmarshal(m, b) +} +func (m *TopicView) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_TopicView.Marshal(b, m, deterministic) +} +func (dst *TopicView) XXX_Merge(src proto.Message) { + xxx_messageInfo_TopicView.Merge(dst, src) +} +func (m *TopicView) XXX_Size() int { + return xxx_messageInfo_TopicView.Size(m) +} +func (m *TopicView) XXX_DiscardUnknown() { + xxx_messageInfo_TopicView.DiscardUnknown(m) +} + +var xxx_messageInfo_TopicView proto.InternalMessageInfo + +func (m *TopicView) GetResourceName() string { + if m != nil { + return m.ResourceName + } + return "" +} + +func init() { + proto.RegisterType((*TopicView)(nil), "google.ads.googleads.v1.resources.TopicView") +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/resources/topic_view.proto", fileDescriptor_topic_view_3f7e8b858c6defb8) +} + +var fileDescriptor_topic_view_3f7e8b858c6defb8 = []byte{ + // 263 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x84, 0x90, 0x31, 0x4b, 0x03, 0x31, + 0x14, 0xc7, 0xb9, 0x13, 0x84, 0x1e, 0xea, 0xd0, 0x49, 0xc4, 0xc1, 0x2a, 0x05, 0xa7, 0x17, 0xa3, + 0x5b, 0x9c, 0xd2, 0xa5, 0xe0, 0x20, 0xa5, 0xc8, 0x0d, 0x72, 0x50, 0xe2, 0x25, 0x84, 0x40, 0x2f, + 0xef, 0xb8, 0x77, 0x5e, 0xbf, 0x8f, 0xa3, 0x1f, 0xc5, 0x8f, 0xe2, 0x57, 0x70, 0x91, 0x6b, 0x4c, + 0x46, 0xbb, 0xfd, 0x49, 0x7e, 0xff, 0xdf, 0x7b, 0x49, 0x71, 0x6f, 0x11, 0xed, 0xd6, 0x30, 0xa5, + 0x89, 0x85, 0x38, 0xa6, 0x81, 0xb3, 0xce, 0x10, 0xbe, 0x77, 0xb5, 0x21, 0xd6, 0x63, 0xeb, 0xea, + 0xcd, 0xe0, 0xcc, 0x0e, 0xda, 0x0e, 0x7b, 0x9c, 0xce, 0x02, 0x08, 0x4a, 0x13, 0xa4, 0x0e, 0x0c, + 0x1c, 0x52, 0xe7, 0xe2, 0x32, 0x6a, 0x5b, 0xc7, 0x94, 0xf7, 0xd8, 0xab, 0xde, 0xa1, 0xa7, 0x20, + 0xb8, 0xbe, 0x2b, 0x26, 0x2f, 0xa3, 0xb4, 0x74, 0x66, 0x37, 0xbd, 0x29, 0x4e, 0x63, 0x6f, 0xe3, + 0x55, 0x63, 0xce, 0xb3, 0xab, 0xec, 0x76, 0xb2, 0x3e, 0x89, 0x87, 0xcf, 0xaa, 0x31, 0x8b, 0x9f, + 0xac, 0x98, 0xd7, 0xd8, 0xc0, 0xc1, 0xc9, 0x8b, 0xb3, 0x64, 0x5e, 0x8d, 0xb3, 0x56, 0xd9, 0xeb, + 0xd3, 0x5f, 0xc9, 0xe2, 0x56, 0x79, 0x0b, 0xd8, 0x59, 0x66, 0x8d, 0xdf, 0x6f, 0x12, 0x9f, 0xdc, + 0x3a, 0xfa, 0xe7, 0x07, 0x1e, 0x53, 0xfa, 0xc8, 0x8f, 0x96, 0x52, 0x7e, 0xe6, 0xb3, 0x65, 0x50, + 0x4a, 0x4d, 0x10, 0xe2, 0x98, 0x4a, 0x0e, 0xeb, 0x48, 0x7e, 0x45, 0xa6, 0x92, 0x9a, 0xaa, 0xc4, + 0x54, 0x25, 0xaf, 0x12, 0xf3, 0x9d, 0xcf, 0xc3, 0x85, 0x10, 0x52, 0x93, 0x10, 0x89, 0x12, 0xa2, + 0xe4, 0x42, 0x24, 0xee, 0xed, 0x78, 0xbf, 0xec, 0xc3, 0x6f, 0x00, 0x00, 0x00, 0xff, 0xff, 0xf2, + 0xd4, 0x08, 0xb0, 0xad, 0x01, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/resources/user_interest.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/resources/user_interest.pb.go new file mode 100644 index 0000000..3252aca --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/resources/user_interest.pb.go @@ -0,0 +1,165 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/resources/user_interest.proto + +package resources // import "google.golang.org/genproto/googleapis/ads/googleads/v1/resources" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import wrappers "github.com/golang/protobuf/ptypes/wrappers" +import common "google.golang.org/genproto/googleapis/ads/googleads/v1/common" +import enums "google.golang.org/genproto/googleapis/ads/googleads/v1/enums" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// A user interest: a particular interest-based vertical to be targeted. +type UserInterest struct { + // The resource name of the user interest. + // User interest resource names have the form: + // + // `customers/{customer_id}/userInterests/{user_interest_id}` + ResourceName string `protobuf:"bytes,1,opt,name=resource_name,json=resourceName,proto3" json:"resource_name,omitempty"` + // Taxonomy type of the user interest. + TaxonomyType enums.UserInterestTaxonomyTypeEnum_UserInterestTaxonomyType `protobuf:"varint,2,opt,name=taxonomy_type,json=taxonomyType,proto3,enum=google.ads.googleads.v1.enums.UserInterestTaxonomyTypeEnum_UserInterestTaxonomyType" json:"taxonomy_type,omitempty"` + // The ID of the user interest. + UserInterestId *wrappers.Int64Value `protobuf:"bytes,3,opt,name=user_interest_id,json=userInterestId,proto3" json:"user_interest_id,omitempty"` + // The name of the user interest. + Name *wrappers.StringValue `protobuf:"bytes,4,opt,name=name,proto3" json:"name,omitempty"` + // The parent of the user interest. + UserInterestParent *wrappers.StringValue `protobuf:"bytes,5,opt,name=user_interest_parent,json=userInterestParent,proto3" json:"user_interest_parent,omitempty"` + // True if the user interest is launched to all channels and locales. + LaunchedToAll *wrappers.BoolValue `protobuf:"bytes,6,opt,name=launched_to_all,json=launchedToAll,proto3" json:"launched_to_all,omitempty"` + // Availability information of the user interest. + Availabilities []*common.CriterionCategoryAvailability `protobuf:"bytes,7,rep,name=availabilities,proto3" json:"availabilities,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *UserInterest) Reset() { *m = UserInterest{} } +func (m *UserInterest) String() string { return proto.CompactTextString(m) } +func (*UserInterest) ProtoMessage() {} +func (*UserInterest) Descriptor() ([]byte, []int) { + return fileDescriptor_user_interest_36047fb460ea0a9e, []int{0} +} +func (m *UserInterest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_UserInterest.Unmarshal(m, b) +} +func (m *UserInterest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_UserInterest.Marshal(b, m, deterministic) +} +func (dst *UserInterest) XXX_Merge(src proto.Message) { + xxx_messageInfo_UserInterest.Merge(dst, src) +} +func (m *UserInterest) XXX_Size() int { + return xxx_messageInfo_UserInterest.Size(m) +} +func (m *UserInterest) XXX_DiscardUnknown() { + xxx_messageInfo_UserInterest.DiscardUnknown(m) +} + +var xxx_messageInfo_UserInterest proto.InternalMessageInfo + +func (m *UserInterest) GetResourceName() string { + if m != nil { + return m.ResourceName + } + return "" +} + +func (m *UserInterest) GetTaxonomyType() enums.UserInterestTaxonomyTypeEnum_UserInterestTaxonomyType { + if m != nil { + return m.TaxonomyType + } + return enums.UserInterestTaxonomyTypeEnum_UNSPECIFIED +} + +func (m *UserInterest) GetUserInterestId() *wrappers.Int64Value { + if m != nil { + return m.UserInterestId + } + return nil +} + +func (m *UserInterest) GetName() *wrappers.StringValue { + if m != nil { + return m.Name + } + return nil +} + +func (m *UserInterest) GetUserInterestParent() *wrappers.StringValue { + if m != nil { + return m.UserInterestParent + } + return nil +} + +func (m *UserInterest) GetLaunchedToAll() *wrappers.BoolValue { + if m != nil { + return m.LaunchedToAll + } + return nil +} + +func (m *UserInterest) GetAvailabilities() []*common.CriterionCategoryAvailability { + if m != nil { + return m.Availabilities + } + return nil +} + +func init() { + proto.RegisterType((*UserInterest)(nil), "google.ads.googleads.v1.resources.UserInterest") +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/resources/user_interest.proto", fileDescriptor_user_interest_36047fb460ea0a9e) +} + +var fileDescriptor_user_interest_36047fb460ea0a9e = []byte{ + // 519 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x8c, 0x53, 0xdd, 0x8a, 0xd3, 0x40, + 0x14, 0x26, 0x6d, 0x5d, 0x31, 0xdb, 0x56, 0x0d, 0x5e, 0x84, 0xba, 0x48, 0x57, 0x59, 0xe8, 0xd5, + 0xc4, 0xd6, 0x9f, 0x8b, 0x88, 0x48, 0xba, 0x2e, 0x4b, 0xbd, 0x58, 0x4a, 0xad, 0xbd, 0x90, 0x42, + 0x98, 0x26, 0xc7, 0x38, 0x30, 0x99, 0x13, 0x66, 0x26, 0xd5, 0xbc, 0xce, 0x5e, 0xfa, 0x28, 0x3e, + 0x8a, 0x0f, 0x21, 0xd2, 0xfc, 0x91, 0xee, 0x52, 0xf5, 0xee, 0x24, 0xe7, 0xfb, 0x39, 0x67, 0xe6, + 0x1b, 0xf3, 0x55, 0x84, 0x18, 0x71, 0x70, 0x68, 0xa8, 0x9c, 0xa2, 0xdc, 0x55, 0xdb, 0xb1, 0x23, + 0x41, 0x61, 0x2a, 0x03, 0x50, 0x4e, 0xaa, 0x40, 0xfa, 0x4c, 0x68, 0x90, 0xa0, 0x34, 0x49, 0x24, + 0x6a, 0xb4, 0x4e, 0x0b, 0x2c, 0xa1, 0xa1, 0x22, 0x35, 0x8d, 0x6c, 0xc7, 0xa4, 0xa6, 0x0d, 0xde, + 0x1f, 0x52, 0x0e, 0x30, 0x8e, 0x51, 0x38, 0x81, 0x64, 0x1a, 0x24, 0x43, 0xe1, 0x07, 0x54, 0x43, + 0x84, 0x32, 0xf3, 0xe9, 0x96, 0x32, 0x4e, 0x37, 0x8c, 0x33, 0x9d, 0x15, 0x46, 0x83, 0x77, 0x87, + 0x54, 0x40, 0xa4, 0xf1, 0x8d, 0xd9, 0x7c, 0x4d, 0xbf, 0xa3, 0xc0, 0x38, 0xf3, 0x75, 0x96, 0x40, + 0x29, 0xf0, 0xa4, 0x14, 0xc8, 0xbf, 0x36, 0xe9, 0x17, 0xe7, 0x9b, 0xa4, 0x49, 0x02, 0x52, 0x95, + 0xfd, 0x93, 0xca, 0x20, 0x61, 0x0e, 0x15, 0x02, 0x35, 0xd5, 0x0c, 0x45, 0xd9, 0x7d, 0x7a, 0xdd, + 0x31, 0xbb, 0x9f, 0x14, 0xc8, 0x59, 0x69, 0x61, 0x3d, 0x33, 0x7b, 0xd5, 0x8a, 0xbe, 0xa0, 0x31, + 0xd8, 0xc6, 0xd0, 0x18, 0xdd, 0x5b, 0x74, 0xab, 0x9f, 0x57, 0x34, 0x06, 0x2b, 0x33, 0x7b, 0x7b, + 0xa3, 0xd8, 0xad, 0xa1, 0x31, 0xea, 0x4f, 0x96, 0xe4, 0xd0, 0xa9, 0xe5, 0xcb, 0x90, 0xa6, 0xd1, + 0xb2, 0xe4, 0x2f, 0xb3, 0x04, 0x2e, 0x44, 0x1a, 0x1f, 0x6c, 0x2e, 0xba, 0xba, 0xf1, 0x65, 0x5d, + 0x98, 0x0f, 0xf6, 0xcf, 0x84, 0x85, 0x76, 0x7b, 0x68, 0x8c, 0x8e, 0x27, 0x8f, 0x2b, 0xf7, 0xea, + 0x24, 0xc8, 0x4c, 0xe8, 0xd7, 0x2f, 0x57, 0x94, 0xa7, 0xb0, 0xe8, 0xa7, 0x0d, 0xf9, 0x59, 0x68, + 0x3d, 0x37, 0x3b, 0xf9, 0x76, 0x9d, 0x9c, 0x7a, 0x72, 0x8b, 0xfa, 0x51, 0x4b, 0x26, 0xa2, 0x82, + 0x9b, 0x23, 0xad, 0x2b, 0xf3, 0xd1, 0xbe, 0x71, 0x42, 0x25, 0x08, 0x6d, 0xdf, 0xf9, 0x0f, 0x05, + 0xab, 0xe9, 0x3e, 0xcf, 0x79, 0xd6, 0xd4, 0xbc, 0xcf, 0x69, 0x2a, 0x82, 0xaf, 0x10, 0xfa, 0x1a, + 0x7d, 0xca, 0xb9, 0x7d, 0x94, 0x4b, 0x0d, 0x6e, 0x49, 0x4d, 0x11, 0x79, 0x21, 0xd4, 0xab, 0x28, + 0x4b, 0xf4, 0x38, 0xb7, 0xc0, 0xec, 0x37, 0x22, 0xc5, 0x40, 0xd9, 0x77, 0x87, 0xed, 0xd1, 0xf1, + 0xe4, 0xed, 0xc1, 0x8b, 0x28, 0xb2, 0x49, 0xce, 0xab, 0x6c, 0x9e, 0x97, 0xd1, 0xf4, 0x1a, 0xc9, + 0x5c, 0xdc, 0x10, 0x9d, 0xfe, 0x36, 0xcc, 0xb3, 0x00, 0x63, 0xf2, 0xcf, 0x37, 0x31, 0x7d, 0xd8, + 0xbc, 0xc5, 0xf9, 0x6e, 0xfe, 0xb9, 0xf1, 0xf9, 0x43, 0xc9, 0x8b, 0x90, 0x53, 0x11, 0x11, 0x94, + 0x91, 0x13, 0x81, 0xc8, 0xb7, 0xab, 0x22, 0x9f, 0x30, 0xf5, 0x97, 0x17, 0xfa, 0xa6, 0xae, 0xae, + 0x5b, 0xed, 0x4b, 0xcf, 0xfb, 0xd1, 0x3a, 0xbd, 0x2c, 0x24, 0xbd, 0x50, 0x91, 0xa2, 0xdc, 0x55, + 0xab, 0x31, 0x59, 0x54, 0xc8, 0x9f, 0x15, 0x66, 0xed, 0x85, 0x6a, 0x5d, 0x63, 0xd6, 0xab, 0xf1, + 0xba, 0xc6, 0xfc, 0x6a, 0x9d, 0x15, 0x0d, 0xd7, 0xf5, 0x42, 0xe5, 0xba, 0x35, 0xca, 0x75, 0x57, + 0x63, 0xd7, 0xad, 0x71, 0x9b, 0xa3, 0x7c, 0xd8, 0x17, 0x7f, 0x02, 0x00, 0x00, 0xff, 0xff, 0xda, + 0xa5, 0x56, 0xfb, 0x4d, 0x04, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/resources/user_list.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/resources/user_list.pb.go new file mode 100644 index 0000000..ea992bc --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/resources/user_list.pb.go @@ -0,0 +1,544 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/resources/user_list.proto + +package resources // import "google.golang.org/genproto/googleapis/ads/googleads/v1/resources" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import wrappers "github.com/golang/protobuf/ptypes/wrappers" +import common "google.golang.org/genproto/googleapis/ads/googleads/v1/common" +import enums "google.golang.org/genproto/googleapis/ads/googleads/v1/enums" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// A user list. This is a list of users a customer may target. +type UserList struct { + // The resource name of the user list. + // User list resource names have the form: + // + // `customers/{customer_id}/userLists/{user_list_id}` + ResourceName string `protobuf:"bytes,1,opt,name=resource_name,json=resourceName,proto3" json:"resource_name,omitempty"` + // Id of the user list. + Id *wrappers.Int64Value `protobuf:"bytes,2,opt,name=id,proto3" json:"id,omitempty"` + // A flag that indicates if a user may edit a list. Depends on the list + // ownership and list type. For example, external remarketing user lists are + // not editable. + // + // This field is read-only. + ReadOnly *wrappers.BoolValue `protobuf:"bytes,3,opt,name=read_only,json=readOnly,proto3" json:"read_only,omitempty"` + // Name of this user list. Depending on its access_reason, the user list name + // may not be unique (e.g. if access_reason=SHARED) + Name *wrappers.StringValue `protobuf:"bytes,4,opt,name=name,proto3" json:"name,omitempty"` + // Description of this user list. + Description *wrappers.StringValue `protobuf:"bytes,5,opt,name=description,proto3" json:"description,omitempty"` + // Membership status of this user list. Indicates whether a user list is open + // or active. Only open user lists can accumulate more users and can be + // targeted to. + MembershipStatus enums.UserListMembershipStatusEnum_UserListMembershipStatus `protobuf:"varint,6,opt,name=membership_status,json=membershipStatus,proto3,enum=google.ads.googleads.v1.enums.UserListMembershipStatusEnum_UserListMembershipStatus" json:"membership_status,omitempty"` + // An ID from external system. It is used by user list sellers to correlate + // IDs on their systems. + IntegrationCode *wrappers.StringValue `protobuf:"bytes,7,opt,name=integration_code,json=integrationCode,proto3" json:"integration_code,omitempty"` + // Number of days a user's cookie stays on your list since its most recent + // addition to the list. This field must be between 0 and 540 inclusive. + // However, for CRM based userlists, this field can be set to 10000 which + // means no expiration. + // + // It'll be ignored for logical_user_list. + MembershipLifeSpan *wrappers.Int64Value `protobuf:"bytes,8,opt,name=membership_life_span,json=membershipLifeSpan,proto3" json:"membership_life_span,omitempty"` + // Estimated number of users in this user list, on the Google Display Network. + // This value is null if the number of users has not yet been determined. + // + // This field is read-only. + SizeForDisplay *wrappers.Int64Value `protobuf:"bytes,9,opt,name=size_for_display,json=sizeForDisplay,proto3" json:"size_for_display,omitempty"` + // Size range in terms of number of users of the UserList, on the Google + // Display Network. + // + // This field is read-only. + SizeRangeForDisplay enums.UserListSizeRangeEnum_UserListSizeRange `protobuf:"varint,10,opt,name=size_range_for_display,json=sizeRangeForDisplay,proto3,enum=google.ads.googleads.v1.enums.UserListSizeRangeEnum_UserListSizeRange" json:"size_range_for_display,omitempty"` + // Estimated number of users in this user list in the google.com domain. + // These are the users available for targeting in Search campaigns. + // This value is null if the number of users has not yet been determined. + // + // This field is read-only. + SizeForSearch *wrappers.Int64Value `protobuf:"bytes,11,opt,name=size_for_search,json=sizeForSearch,proto3" json:"size_for_search,omitempty"` + // Size range in terms of number of users of the UserList, for Search ads. + // + // This field is read-only. + SizeRangeForSearch enums.UserListSizeRangeEnum_UserListSizeRange `protobuf:"varint,12,opt,name=size_range_for_search,json=sizeRangeForSearch,proto3,enum=google.ads.googleads.v1.enums.UserListSizeRangeEnum_UserListSizeRange" json:"size_range_for_search,omitempty"` + // Type of this list. + // + // This field is read-only. + Type enums.UserListTypeEnum_UserListType `protobuf:"varint,13,opt,name=type,proto3,enum=google.ads.googleads.v1.enums.UserListTypeEnum_UserListType" json:"type,omitempty"` + // Indicating the reason why this user list membership status is closed. It is + // only populated on lists that were automatically closed due to inactivity, + // and will be cleared once the list membership status becomes open. + ClosingReason enums.UserListClosingReasonEnum_UserListClosingReason `protobuf:"varint,14,opt,name=closing_reason,json=closingReason,proto3,enum=google.ads.googleads.v1.enums.UserListClosingReasonEnum_UserListClosingReason" json:"closing_reason,omitempty"` + // Indicates the reason this account has been granted access to the list. + // The reason can be SHARED, OWNED, LICENSED or SUBSCRIBED. + // + // This field is read-only. + AccessReason enums.AccessReasonEnum_AccessReason `protobuf:"varint,15,opt,name=access_reason,json=accessReason,proto3,enum=google.ads.googleads.v1.enums.AccessReasonEnum_AccessReason" json:"access_reason,omitempty"` + // Indicates if this share is still enabled. When a UserList is shared with + // the user this field is set to ENABLED. Later the userList owner can decide + // to revoke the share and make it DISABLED. + // The default value of this field is set to ENABLED. + AccountUserListStatus enums.UserListAccessStatusEnum_UserListAccessStatus `protobuf:"varint,16,opt,name=account_user_list_status,json=accountUserListStatus,proto3,enum=google.ads.googleads.v1.enums.UserListAccessStatusEnum_UserListAccessStatus" json:"account_user_list_status,omitempty"` + // Indicates if this user list is eligible for Google Search Network. + EligibleForSearch *wrappers.BoolValue `protobuf:"bytes,17,opt,name=eligible_for_search,json=eligibleForSearch,proto3" json:"eligible_for_search,omitempty"` + // Indicates this user list is eligible for Google Display Network. + // + // This field is read-only. + EligibleForDisplay *wrappers.BoolValue `protobuf:"bytes,18,opt,name=eligible_for_display,json=eligibleForDisplay,proto3" json:"eligible_for_display,omitempty"` + // The user list. + // + // Exactly one must be set. + // + // Types that are valid to be assigned to UserList: + // *UserList_CrmBasedUserList + // *UserList_SimilarUserList + // *UserList_RuleBasedUserList + // *UserList_LogicalUserList + // *UserList_BasicUserList + UserList isUserList_UserList `protobuf_oneof:"user_list"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *UserList) Reset() { *m = UserList{} } +func (m *UserList) String() string { return proto.CompactTextString(m) } +func (*UserList) ProtoMessage() {} +func (*UserList) Descriptor() ([]byte, []int) { + return fileDescriptor_user_list_263e88425715e3b0, []int{0} +} +func (m *UserList) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_UserList.Unmarshal(m, b) +} +func (m *UserList) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_UserList.Marshal(b, m, deterministic) +} +func (dst *UserList) XXX_Merge(src proto.Message) { + xxx_messageInfo_UserList.Merge(dst, src) +} +func (m *UserList) XXX_Size() int { + return xxx_messageInfo_UserList.Size(m) +} +func (m *UserList) XXX_DiscardUnknown() { + xxx_messageInfo_UserList.DiscardUnknown(m) +} + +var xxx_messageInfo_UserList proto.InternalMessageInfo + +func (m *UserList) GetResourceName() string { + if m != nil { + return m.ResourceName + } + return "" +} + +func (m *UserList) GetId() *wrappers.Int64Value { + if m != nil { + return m.Id + } + return nil +} + +func (m *UserList) GetReadOnly() *wrappers.BoolValue { + if m != nil { + return m.ReadOnly + } + return nil +} + +func (m *UserList) GetName() *wrappers.StringValue { + if m != nil { + return m.Name + } + return nil +} + +func (m *UserList) GetDescription() *wrappers.StringValue { + if m != nil { + return m.Description + } + return nil +} + +func (m *UserList) GetMembershipStatus() enums.UserListMembershipStatusEnum_UserListMembershipStatus { + if m != nil { + return m.MembershipStatus + } + return enums.UserListMembershipStatusEnum_UNSPECIFIED +} + +func (m *UserList) GetIntegrationCode() *wrappers.StringValue { + if m != nil { + return m.IntegrationCode + } + return nil +} + +func (m *UserList) GetMembershipLifeSpan() *wrappers.Int64Value { + if m != nil { + return m.MembershipLifeSpan + } + return nil +} + +func (m *UserList) GetSizeForDisplay() *wrappers.Int64Value { + if m != nil { + return m.SizeForDisplay + } + return nil +} + +func (m *UserList) GetSizeRangeForDisplay() enums.UserListSizeRangeEnum_UserListSizeRange { + if m != nil { + return m.SizeRangeForDisplay + } + return enums.UserListSizeRangeEnum_UNSPECIFIED +} + +func (m *UserList) GetSizeForSearch() *wrappers.Int64Value { + if m != nil { + return m.SizeForSearch + } + return nil +} + +func (m *UserList) GetSizeRangeForSearch() enums.UserListSizeRangeEnum_UserListSizeRange { + if m != nil { + return m.SizeRangeForSearch + } + return enums.UserListSizeRangeEnum_UNSPECIFIED +} + +func (m *UserList) GetType() enums.UserListTypeEnum_UserListType { + if m != nil { + return m.Type + } + return enums.UserListTypeEnum_UNSPECIFIED +} + +func (m *UserList) GetClosingReason() enums.UserListClosingReasonEnum_UserListClosingReason { + if m != nil { + return m.ClosingReason + } + return enums.UserListClosingReasonEnum_UNSPECIFIED +} + +func (m *UserList) GetAccessReason() enums.AccessReasonEnum_AccessReason { + if m != nil { + return m.AccessReason + } + return enums.AccessReasonEnum_UNSPECIFIED +} + +func (m *UserList) GetAccountUserListStatus() enums.UserListAccessStatusEnum_UserListAccessStatus { + if m != nil { + return m.AccountUserListStatus + } + return enums.UserListAccessStatusEnum_UNSPECIFIED +} + +func (m *UserList) GetEligibleForSearch() *wrappers.BoolValue { + if m != nil { + return m.EligibleForSearch + } + return nil +} + +func (m *UserList) GetEligibleForDisplay() *wrappers.BoolValue { + if m != nil { + return m.EligibleForDisplay + } + return nil +} + +type isUserList_UserList interface { + isUserList_UserList() +} + +type UserList_CrmBasedUserList struct { + CrmBasedUserList *common.CrmBasedUserListInfo `protobuf:"bytes,19,opt,name=crm_based_user_list,json=crmBasedUserList,proto3,oneof"` +} + +type UserList_SimilarUserList struct { + SimilarUserList *common.SimilarUserListInfo `protobuf:"bytes,20,opt,name=similar_user_list,json=similarUserList,proto3,oneof"` +} + +type UserList_RuleBasedUserList struct { + RuleBasedUserList *common.RuleBasedUserListInfo `protobuf:"bytes,21,opt,name=rule_based_user_list,json=ruleBasedUserList,proto3,oneof"` +} + +type UserList_LogicalUserList struct { + LogicalUserList *common.LogicalUserListInfo `protobuf:"bytes,22,opt,name=logical_user_list,json=logicalUserList,proto3,oneof"` +} + +type UserList_BasicUserList struct { + BasicUserList *common.BasicUserListInfo `protobuf:"bytes,23,opt,name=basic_user_list,json=basicUserList,proto3,oneof"` +} + +func (*UserList_CrmBasedUserList) isUserList_UserList() {} + +func (*UserList_SimilarUserList) isUserList_UserList() {} + +func (*UserList_RuleBasedUserList) isUserList_UserList() {} + +func (*UserList_LogicalUserList) isUserList_UserList() {} + +func (*UserList_BasicUserList) isUserList_UserList() {} + +func (m *UserList) GetUserList() isUserList_UserList { + if m != nil { + return m.UserList + } + return nil +} + +func (m *UserList) GetCrmBasedUserList() *common.CrmBasedUserListInfo { + if x, ok := m.GetUserList().(*UserList_CrmBasedUserList); ok { + return x.CrmBasedUserList + } + return nil +} + +func (m *UserList) GetSimilarUserList() *common.SimilarUserListInfo { + if x, ok := m.GetUserList().(*UserList_SimilarUserList); ok { + return x.SimilarUserList + } + return nil +} + +func (m *UserList) GetRuleBasedUserList() *common.RuleBasedUserListInfo { + if x, ok := m.GetUserList().(*UserList_RuleBasedUserList); ok { + return x.RuleBasedUserList + } + return nil +} + +func (m *UserList) GetLogicalUserList() *common.LogicalUserListInfo { + if x, ok := m.GetUserList().(*UserList_LogicalUserList); ok { + return x.LogicalUserList + } + return nil +} + +func (m *UserList) GetBasicUserList() *common.BasicUserListInfo { + if x, ok := m.GetUserList().(*UserList_BasicUserList); ok { + return x.BasicUserList + } + return nil +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*UserList) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _UserList_OneofMarshaler, _UserList_OneofUnmarshaler, _UserList_OneofSizer, []interface{}{ + (*UserList_CrmBasedUserList)(nil), + (*UserList_SimilarUserList)(nil), + (*UserList_RuleBasedUserList)(nil), + (*UserList_LogicalUserList)(nil), + (*UserList_BasicUserList)(nil), + } +} + +func _UserList_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*UserList) + // user_list + switch x := m.UserList.(type) { + case *UserList_CrmBasedUserList: + b.EncodeVarint(19<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.CrmBasedUserList); err != nil { + return err + } + case *UserList_SimilarUserList: + b.EncodeVarint(20<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.SimilarUserList); err != nil { + return err + } + case *UserList_RuleBasedUserList: + b.EncodeVarint(21<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.RuleBasedUserList); err != nil { + return err + } + case *UserList_LogicalUserList: + b.EncodeVarint(22<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.LogicalUserList); err != nil { + return err + } + case *UserList_BasicUserList: + b.EncodeVarint(23<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.BasicUserList); err != nil { + return err + } + case nil: + default: + return fmt.Errorf("UserList.UserList has unexpected type %T", x) + } + return nil +} + +func _UserList_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*UserList) + switch tag { + case 19: // user_list.crm_based_user_list + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(common.CrmBasedUserListInfo) + err := b.DecodeMessage(msg) + m.UserList = &UserList_CrmBasedUserList{msg} + return true, err + case 20: // user_list.similar_user_list + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(common.SimilarUserListInfo) + err := b.DecodeMessage(msg) + m.UserList = &UserList_SimilarUserList{msg} + return true, err + case 21: // user_list.rule_based_user_list + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(common.RuleBasedUserListInfo) + err := b.DecodeMessage(msg) + m.UserList = &UserList_RuleBasedUserList{msg} + return true, err + case 22: // user_list.logical_user_list + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(common.LogicalUserListInfo) + err := b.DecodeMessage(msg) + m.UserList = &UserList_LogicalUserList{msg} + return true, err + case 23: // user_list.basic_user_list + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(common.BasicUserListInfo) + err := b.DecodeMessage(msg) + m.UserList = &UserList_BasicUserList{msg} + return true, err + default: + return false, nil + } +} + +func _UserList_OneofSizer(msg proto.Message) (n int) { + m := msg.(*UserList) + // user_list + switch x := m.UserList.(type) { + case *UserList_CrmBasedUserList: + s := proto.Size(x.CrmBasedUserList) + n += 2 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *UserList_SimilarUserList: + s := proto.Size(x.SimilarUserList) + n += 2 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *UserList_RuleBasedUserList: + s := proto.Size(x.RuleBasedUserList) + n += 2 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *UserList_LogicalUserList: + s := proto.Size(x.LogicalUserList) + n += 2 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *UserList_BasicUserList: + s := proto.Size(x.BasicUserList) + n += 2 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +func init() { + proto.RegisterType((*UserList)(nil), "google.ads.googleads.v1.resources.UserList") +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/resources/user_list.proto", fileDescriptor_user_list_263e88425715e3b0) +} + +var fileDescriptor_user_list_263e88425715e3b0 = []byte{ + // 927 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xac, 0x96, 0x4d, 0x6f, 0xdb, 0x36, + 0x1c, 0xc6, 0x67, 0x37, 0xeb, 0x12, 0x26, 0x8e, 0x6d, 0x26, 0xe9, 0x84, 0xac, 0x18, 0xd2, 0x0d, + 0x05, 0x02, 0x0c, 0x90, 0xe6, 0xb4, 0x7b, 0x81, 0x5b, 0x6c, 0xb0, 0xb3, 0x36, 0x6b, 0xe1, 0x76, + 0x81, 0xdc, 0xe5, 0xb0, 0x05, 0x10, 0x68, 0x89, 0x56, 0x08, 0x50, 0xa4, 0x40, 0x4a, 0x1d, 0xdc, + 0x9d, 0x76, 0xd8, 0x17, 0xd9, 0x71, 0x1f, 0x65, 0x1f, 0x65, 0x1f, 0x60, 0x87, 0x9d, 0x06, 0x51, + 0xa2, 0x4c, 0xd9, 0x75, 0xa5, 0xc3, 0x6e, 0xf4, 0x9f, 0xff, 0xdf, 0xf3, 0xe8, 0xe1, 0x8b, 0x2c, + 0x30, 0x08, 0x39, 0x0f, 0x29, 0x76, 0x50, 0x20, 0x9d, 0x7c, 0x98, 0x8d, 0x5e, 0x0f, 0x1c, 0x81, + 0x25, 0x4f, 0x85, 0x8f, 0xa5, 0x93, 0x4a, 0x2c, 0x3c, 0x4a, 0x64, 0x62, 0xc7, 0x82, 0x27, 0x1c, + 0xde, 0xcb, 0xfb, 0x6c, 0x14, 0x48, 0xbb, 0x44, 0xec, 0xd7, 0x03, 0xbb, 0x44, 0x8e, 0x9d, 0x4d, + 0xaa, 0x3e, 0x8f, 0x22, 0xce, 0x96, 0x92, 0x32, 0xd7, 0x3c, 0xde, 0xf8, 0x18, 0x98, 0xa5, 0x91, + 0x74, 0x90, 0xef, 0x63, 0x29, 0x3d, 0x81, 0x91, 0xe4, 0xac, 0x40, 0x1e, 0xbd, 0x1b, 0x29, 0x2d, + 0xbc, 0x02, 0x96, 0x09, 0x4a, 0x52, 0xed, 0xf7, 0xb8, 0x29, 0xec, 0x53, 0x2e, 0x09, 0x0b, 0xab, + 0xd6, 0xdf, 0x36, 0xa5, 0x23, 0x1c, 0xcd, 0xb0, 0x90, 0x37, 0x24, 0xae, 0xda, 0x7f, 0xdd, 0x54, + 0x40, 0x92, 0x37, 0xd8, 0x13, 0x88, 0x85, 0xb8, 0x20, 0xcf, 0x9a, 0x92, 0xc9, 0x22, 0xd6, 0xcc, + 0xc7, 0x05, 0xa3, 0x7e, 0xcd, 0xd2, 0xb9, 0xf3, 0x8b, 0x40, 0x71, 0x8c, 0x85, 0x7e, 0x9a, 0xbb, + 0x5a, 0x33, 0x26, 0x0e, 0x62, 0x8c, 0x27, 0x28, 0x21, 0x9c, 0x15, 0xb3, 0x9f, 0xfc, 0xd3, 0x05, + 0xdb, 0x3f, 0x4a, 0x2c, 0x26, 0x44, 0x26, 0xf0, 0x53, 0xd0, 0xd1, 0xbb, 0xec, 0x31, 0x14, 0x61, + 0xab, 0x75, 0xd2, 0x3a, 0xdd, 0x71, 0xf7, 0x74, 0xf1, 0x25, 0x8a, 0x30, 0xfc, 0x0c, 0xb4, 0x49, + 0x60, 0xb5, 0x4f, 0x5a, 0xa7, 0xbb, 0x67, 0x1f, 0x15, 0x47, 0xc4, 0xd6, 0xe6, 0xf6, 0x33, 0x96, + 0x7c, 0xf9, 0xf0, 0x0a, 0xd1, 0x14, 0xbb, 0x6d, 0x12, 0xc0, 0xaf, 0xc0, 0x8e, 0xc0, 0x28, 0xf0, + 0x38, 0xa3, 0x0b, 0xeb, 0x96, 0x62, 0x8e, 0xd7, 0x98, 0x31, 0xe7, 0x34, 0x47, 0xb6, 0xb3, 0xe6, + 0x1f, 0x18, 0x5d, 0xc0, 0xcf, 0xc1, 0x96, 0x7a, 0x82, 0x2d, 0xc5, 0xdc, 0x5d, 0x63, 0xa6, 0x89, + 0x20, 0x2c, 0xcc, 0x29, 0xd5, 0x09, 0xbf, 0x01, 0xbb, 0x01, 0x96, 0xbe, 0x20, 0x71, 0x96, 0xcf, + 0x7a, 0xbf, 0x01, 0x68, 0x02, 0xf0, 0xb7, 0x16, 0xe8, 0xaf, 0xed, 0xa8, 0x75, 0xfb, 0xa4, 0x75, + 0xba, 0x7f, 0xf6, 0xca, 0xde, 0x74, 0x2b, 0xd4, 0xc6, 0xd8, 0x7a, 0x05, 0x5f, 0x94, 0xfc, 0x54, + 0xe1, 0x4f, 0x58, 0x1a, 0x6d, 0x9c, 0x74, 0x7b, 0xd1, 0x4a, 0x05, 0x5e, 0x80, 0x1e, 0x61, 0x09, + 0x0e, 0x85, 0xda, 0x23, 0xcf, 0xe7, 0x01, 0xb6, 0x3e, 0x68, 0x10, 0xa4, 0x6b, 0x50, 0xe7, 0x3c, + 0xc0, 0xf0, 0x05, 0x38, 0x34, 0xb2, 0x50, 0x32, 0xc7, 0x9e, 0x8c, 0x11, 0xb3, 0xb6, 0xeb, 0xb7, + 0x0d, 0x2e, 0xc1, 0x09, 0x99, 0xe3, 0x69, 0x8c, 0x18, 0x7c, 0x02, 0x7a, 0xea, 0xac, 0xce, 0xb9, + 0xf0, 0x02, 0x22, 0x63, 0x8a, 0x16, 0xd6, 0x4e, 0xbd, 0xd4, 0x7e, 0x06, 0x3d, 0xe5, 0xe2, 0xbb, + 0x1c, 0x81, 0xbf, 0x82, 0x3b, 0xcb, 0x23, 0x5f, 0x11, 0x03, 0x6a, 0x99, 0x9f, 0x36, 0x5c, 0xe6, + 0x29, 0x79, 0x83, 0xdd, 0x4c, 0xa3, 0xb2, 0xbe, 0x65, 0xd5, 0x3d, 0x90, 0x7a, 0x68, 0x98, 0x9f, + 0x83, 0x6e, 0x99, 0x41, 0x62, 0x24, 0xfc, 0x1b, 0x6b, 0xb7, 0x3e, 0x42, 0xa7, 0x88, 0x30, 0x55, + 0x04, 0x5c, 0x80, 0xa3, 0x95, 0x04, 0x85, 0xd4, 0xde, 0xff, 0x1a, 0x00, 0x9a, 0x01, 0x0a, 0xeb, + 0x4b, 0xb0, 0x95, 0xdd, 0x7a, 0xab, 0xa3, 0x9c, 0x1e, 0x37, 0x74, 0x7a, 0xb5, 0x88, 0xab, 0x26, + 0x59, 0xc1, 0x55, 0x4a, 0x30, 0x05, 0xfb, 0xd5, 0x17, 0xa0, 0xb5, 0xaf, 0xb4, 0x5f, 0x36, 0xd4, + 0x3e, 0xcf, 0x61, 0x57, 0xb1, 0x15, 0x93, 0xca, 0x8c, 0xdb, 0xf1, 0xcd, 0x9f, 0x10, 0x81, 0x4e, + 0xe5, 0x8d, 0x6f, 0x75, 0x1b, 0x25, 0x1a, 0x29, 0xc6, 0x30, 0x33, 0x0b, 0xee, 0x1e, 0x32, 0x7e, + 0xc1, 0xdf, 0x5b, 0xc0, 0x42, 0xbe, 0xcf, 0x53, 0x96, 0x78, 0xc6, 0xeb, 0x36, 0xbf, 0xd2, 0x3d, + 0x65, 0x37, 0x69, 0x18, 0x32, 0x77, 0x79, 0xcb, 0x75, 0x36, 0x27, 0xdc, 0xa3, 0xc2, 0xad, 0xdc, + 0xca, 0xfc, 0x3e, 0x3f, 0x07, 0x07, 0x98, 0x92, 0x90, 0xcc, 0x68, 0xe5, 0xb0, 0xf4, 0x6b, 0x5f, + 0x84, 0x7d, 0x8d, 0x2d, 0xf7, 0x7f, 0x02, 0x0e, 0x2b, 0x5a, 0xfa, 0xea, 0xc0, 0x5a, 0x31, 0x68, + 0x88, 0xe9, 0xdb, 0x80, 0xc1, 0x81, 0x2f, 0x22, 0x6f, 0x86, 0x24, 0x0e, 0x96, 0x4b, 0x64, 0x1d, + 0x28, 0xb1, 0x87, 0x1b, 0xd7, 0x26, 0xff, 0x87, 0xb7, 0xcf, 0x45, 0x34, 0xce, 0x48, 0x1d, 0xf7, + 0x19, 0x9b, 0xf3, 0xef, 0xdf, 0x73, 0x7b, 0xfe, 0x4a, 0x1d, 0x22, 0xd0, 0x97, 0x24, 0x22, 0x14, + 0x09, 0xc3, 0xe4, 0x50, 0x99, 0x3c, 0xa8, 0x33, 0x99, 0xe6, 0xe0, 0x8a, 0x47, 0x57, 0x56, 0xcb, + 0xf0, 0x06, 0x1c, 0x8a, 0x94, 0xe2, 0xb5, 0x28, 0x47, 0xca, 0xe5, 0x8b, 0x3a, 0x17, 0x37, 0xa5, + 0xf8, 0x6d, 0x59, 0xfa, 0x62, 0x75, 0x22, 0x0b, 0x43, 0x79, 0x48, 0x7c, 0x44, 0x0d, 0x9b, 0x3b, + 0xcd, 0xc2, 0x4c, 0x72, 0x70, 0x35, 0x0c, 0xad, 0x96, 0xe1, 0xcf, 0xa0, 0x3b, 0x43, 0x92, 0xf8, + 0x86, 0xc1, 0x87, 0xca, 0x60, 0x50, 0x67, 0x30, 0xce, 0xb0, 0x15, 0xf9, 0xce, 0xcc, 0x2c, 0x8e, + 0x77, 0xc1, 0x4e, 0x29, 0x3b, 0xfe, 0xb7, 0x05, 0xee, 0xfb, 0x3c, 0xb2, 0x6b, 0x3f, 0xf7, 0xc6, + 0x1d, 0x2d, 0x70, 0x99, 0x1d, 0xad, 0xcb, 0xd6, 0x4f, 0xcf, 0x0b, 0x26, 0xe4, 0x14, 0xb1, 0xd0, + 0xe6, 0x22, 0x74, 0x42, 0xcc, 0xd4, 0xc1, 0xd3, 0x5f, 0x2d, 0x31, 0x91, 0xef, 0xf8, 0xe8, 0x7c, + 0x54, 0x8e, 0xfe, 0x68, 0xdf, 0xba, 0x18, 0x8d, 0xfe, 0x6c, 0xdf, 0xbb, 0xc8, 0x25, 0x47, 0x81, + 0xb4, 0xf3, 0x61, 0x36, 0xba, 0x1a, 0xd8, 0xae, 0xee, 0xfc, 0x4b, 0xf7, 0x5c, 0x8f, 0x02, 0x79, + 0x5d, 0xf6, 0x5c, 0x5f, 0x0d, 0xae, 0xcb, 0x9e, 0xbf, 0xdb, 0xf7, 0xf3, 0x89, 0xe1, 0x70, 0x14, + 0xc8, 0xe1, 0xb0, 0xec, 0x1a, 0x0e, 0xaf, 0x06, 0xc3, 0x61, 0xd9, 0x37, 0xbb, 0xad, 0x1e, 0xf6, + 0xc1, 0x7f, 0x01, 0x00, 0x00, 0xff, 0xff, 0x06, 0xb6, 0xd5, 0x8a, 0x20, 0x0b, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/resources/video.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/resources/video.pb.go new file mode 100644 index 0000000..55061f6 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/resources/video.pb.go @@ -0,0 +1,136 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/resources/video.proto + +package resources // import "google.golang.org/genproto/googleapis/ads/googleads/v1/resources" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import wrappers "github.com/golang/protobuf/ptypes/wrappers" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// A video. +type Video struct { + // The resource name of the video. + // Video resource names have the form: + // + // `customers/{customer_id}/videos/{video_id}` + ResourceName string `protobuf:"bytes,1,opt,name=resource_name,json=resourceName,proto3" json:"resource_name,omitempty"` + // The ID of the video. + Id *wrappers.StringValue `protobuf:"bytes,2,opt,name=id,proto3" json:"id,omitempty"` + // The owner channel id of the video. + ChannelId *wrappers.StringValue `protobuf:"bytes,3,opt,name=channel_id,json=channelId,proto3" json:"channel_id,omitempty"` + // The duration of the video in milliseconds. + DurationMillis *wrappers.Int64Value `protobuf:"bytes,4,opt,name=duration_millis,json=durationMillis,proto3" json:"duration_millis,omitempty"` + // The title of the video. + Title *wrappers.StringValue `protobuf:"bytes,5,opt,name=title,proto3" json:"title,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Video) Reset() { *m = Video{} } +func (m *Video) String() string { return proto.CompactTextString(m) } +func (*Video) ProtoMessage() {} +func (*Video) Descriptor() ([]byte, []int) { + return fileDescriptor_video_71e1c02985f5b3f4, []int{0} +} +func (m *Video) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Video.Unmarshal(m, b) +} +func (m *Video) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Video.Marshal(b, m, deterministic) +} +func (dst *Video) XXX_Merge(src proto.Message) { + xxx_messageInfo_Video.Merge(dst, src) +} +func (m *Video) XXX_Size() int { + return xxx_messageInfo_Video.Size(m) +} +func (m *Video) XXX_DiscardUnknown() { + xxx_messageInfo_Video.DiscardUnknown(m) +} + +var xxx_messageInfo_Video proto.InternalMessageInfo + +func (m *Video) GetResourceName() string { + if m != nil { + return m.ResourceName + } + return "" +} + +func (m *Video) GetId() *wrappers.StringValue { + if m != nil { + return m.Id + } + return nil +} + +func (m *Video) GetChannelId() *wrappers.StringValue { + if m != nil { + return m.ChannelId + } + return nil +} + +func (m *Video) GetDurationMillis() *wrappers.Int64Value { + if m != nil { + return m.DurationMillis + } + return nil +} + +func (m *Video) GetTitle() *wrappers.StringValue { + if m != nil { + return m.Title + } + return nil +} + +func init() { + proto.RegisterType((*Video)(nil), "google.ads.googleads.v1.resources.Video") +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/resources/video.proto", fileDescriptor_video_71e1c02985f5b3f4) +} + +var fileDescriptor_video_71e1c02985f5b3f4 = []byte{ + // 370 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x8c, 0x91, 0xcf, 0x4a, 0xf3, 0x40, + 0x14, 0xc5, 0x49, 0xfa, 0xf5, 0x83, 0x8e, 0xff, 0x20, 0xab, 0x50, 0x8b, 0xb4, 0x4a, 0xa1, 0x0b, + 0x9d, 0x90, 0x2a, 0x2e, 0xa6, 0xab, 0x14, 0xa1, 0x54, 0x50, 0x4a, 0x85, 0x2c, 0x24, 0x50, 0xa6, + 0x9d, 0x31, 0x0e, 0x24, 0x33, 0x61, 0x26, 0xa9, 0xcf, 0xe1, 0x2b, 0xb8, 0xf4, 0x51, 0x7c, 0x14, + 0x5f, 0xc0, 0xad, 0x24, 0x93, 0xc9, 0x46, 0xd0, 0xee, 0x0e, 0x33, 0xbf, 0x73, 0xce, 0xe5, 0x5e, + 0x70, 0x11, 0x0b, 0x11, 0x27, 0xd4, 0xc3, 0x44, 0x79, 0x5a, 0x96, 0x6a, 0xeb, 0x7b, 0x92, 0x2a, + 0x51, 0xc8, 0x0d, 0x55, 0xde, 0x96, 0x11, 0x2a, 0x60, 0x26, 0x45, 0x2e, 0x9c, 0x81, 0x66, 0x20, + 0x26, 0x0a, 0x36, 0x38, 0xdc, 0xfa, 0xb0, 0xc1, 0xbb, 0x27, 0x75, 0x62, 0x65, 0x58, 0x17, 0x4f, + 0xde, 0x8b, 0xc4, 0x59, 0x46, 0xa5, 0xd2, 0x11, 0xdd, 0x9e, 0x69, 0xcc, 0x98, 0x87, 0x39, 0x17, + 0x39, 0xce, 0x99, 0xe0, 0xf5, 0xef, 0xe9, 0xab, 0x0d, 0xda, 0x61, 0x59, 0xe8, 0x9c, 0x81, 0x03, + 0x13, 0xba, 0xe2, 0x38, 0xa5, 0xae, 0xd5, 0xb7, 0x46, 0x9d, 0xe5, 0xbe, 0x79, 0xbc, 0xc7, 0x29, + 0x75, 0xce, 0x81, 0xcd, 0x88, 0x6b, 0xf7, 0xad, 0xd1, 0xde, 0xb8, 0x57, 0x4f, 0x04, 0x4d, 0x33, + 0x7c, 0xc8, 0x25, 0xe3, 0x71, 0x88, 0x93, 0x82, 0x2e, 0x6d, 0x46, 0x9c, 0x09, 0x00, 0x9b, 0x67, + 0xcc, 0x39, 0x4d, 0x56, 0x8c, 0xb8, 0xad, 0x1d, 0x5c, 0x9d, 0x9a, 0x9f, 0x13, 0xe7, 0x06, 0x1c, + 0x91, 0x42, 0x56, 0xc3, 0xae, 0x52, 0x96, 0x24, 0x4c, 0xb9, 0xff, 0xaa, 0x84, 0xe3, 0x1f, 0x09, + 0x73, 0x9e, 0x5f, 0x5f, 0xe9, 0x80, 0x43, 0xe3, 0xb9, 0xab, 0x2c, 0xce, 0x18, 0xb4, 0x73, 0x96, + 0x27, 0xd4, 0x6d, 0xef, 0xd0, 0xae, 0xd1, 0xe9, 0x97, 0x05, 0x86, 0x1b, 0x91, 0xc2, 0x3f, 0x77, + 0x3f, 0x05, 0xd5, 0xea, 0x16, 0x65, 0xd6, 0xc2, 0x7a, 0xbc, 0xad, 0x0d, 0xb1, 0x48, 0x30, 0x8f, + 0xa1, 0x90, 0xb1, 0x17, 0x53, 0x5e, 0x35, 0x99, 0x5b, 0x67, 0x4c, 0xfd, 0x72, 0xfa, 0x49, 0xa3, + 0xde, 0xec, 0xd6, 0x2c, 0x08, 0xde, 0xed, 0xc1, 0x4c, 0x47, 0x06, 0x44, 0x41, 0x2d, 0x4b, 0x15, + 0xfa, 0x70, 0x69, 0xc8, 0x0f, 0xc3, 0x44, 0x01, 0x51, 0x51, 0xc3, 0x44, 0xa1, 0x1f, 0x35, 0xcc, + 0xa7, 0x3d, 0xd4, 0x1f, 0x08, 0x05, 0x44, 0x21, 0xd4, 0x50, 0x08, 0x85, 0x3e, 0x42, 0x0d, 0xb7, + 0xfe, 0x5f, 0x0d, 0x7b, 0xf9, 0x1d, 0x00, 0x00, 0xff, 0xff, 0x09, 0xfb, 0xcf, 0xd9, 0xa6, 0x02, + 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/services/account_budget_proposal_service.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/services/account_budget_proposal_service.pb.go new file mode 100644 index 0000000..dc78827 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/services/account_budget_proposal_service.pb.go @@ -0,0 +1,534 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/services/account_budget_proposal_service.proto + +package services // import "google.golang.org/genproto/googleapis/ads/googleads/v1/services" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import resources "google.golang.org/genproto/googleapis/ads/googleads/v1/resources" +import _ "google.golang.org/genproto/googleapis/api/annotations" +import field_mask "google.golang.org/genproto/protobuf/field_mask" + +import ( + context "golang.org/x/net/context" + grpc "google.golang.org/grpc" +) + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Request message for +// [AccountBudgetProposalService.GetAccountBudgetProposal][google.ads.googleads.v1.services.AccountBudgetProposalService.GetAccountBudgetProposal]. +type GetAccountBudgetProposalRequest struct { + // The resource name of the account-level budget proposal to fetch. + ResourceName string `protobuf:"bytes,1,opt,name=resource_name,json=resourceName,proto3" json:"resource_name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GetAccountBudgetProposalRequest) Reset() { *m = GetAccountBudgetProposalRequest{} } +func (m *GetAccountBudgetProposalRequest) String() string { return proto.CompactTextString(m) } +func (*GetAccountBudgetProposalRequest) ProtoMessage() {} +func (*GetAccountBudgetProposalRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_account_budget_proposal_service_19af3128d079e096, []int{0} +} +func (m *GetAccountBudgetProposalRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GetAccountBudgetProposalRequest.Unmarshal(m, b) +} +func (m *GetAccountBudgetProposalRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GetAccountBudgetProposalRequest.Marshal(b, m, deterministic) +} +func (dst *GetAccountBudgetProposalRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_GetAccountBudgetProposalRequest.Merge(dst, src) +} +func (m *GetAccountBudgetProposalRequest) XXX_Size() int { + return xxx_messageInfo_GetAccountBudgetProposalRequest.Size(m) +} +func (m *GetAccountBudgetProposalRequest) XXX_DiscardUnknown() { + xxx_messageInfo_GetAccountBudgetProposalRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_GetAccountBudgetProposalRequest proto.InternalMessageInfo + +func (m *GetAccountBudgetProposalRequest) GetResourceName() string { + if m != nil { + return m.ResourceName + } + return "" +} + +// Request message for +// [AccountBudgetProposalService.MutateAccountBudgetProposal][google.ads.googleads.v1.services.AccountBudgetProposalService.MutateAccountBudgetProposal]. +type MutateAccountBudgetProposalRequest struct { + // The ID of the customer. + CustomerId string `protobuf:"bytes,1,opt,name=customer_id,json=customerId,proto3" json:"customer_id,omitempty"` + // The operation to perform on an individual account-level budget proposal. + Operation *AccountBudgetProposalOperation `protobuf:"bytes,2,opt,name=operation,proto3" json:"operation,omitempty"` + // If true, the request is validated but not executed. Only errors are + // returned, not results. + ValidateOnly bool `protobuf:"varint,3,opt,name=validate_only,json=validateOnly,proto3" json:"validate_only,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *MutateAccountBudgetProposalRequest) Reset() { *m = MutateAccountBudgetProposalRequest{} } +func (m *MutateAccountBudgetProposalRequest) String() string { return proto.CompactTextString(m) } +func (*MutateAccountBudgetProposalRequest) ProtoMessage() {} +func (*MutateAccountBudgetProposalRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_account_budget_proposal_service_19af3128d079e096, []int{1} +} +func (m *MutateAccountBudgetProposalRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_MutateAccountBudgetProposalRequest.Unmarshal(m, b) +} +func (m *MutateAccountBudgetProposalRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_MutateAccountBudgetProposalRequest.Marshal(b, m, deterministic) +} +func (dst *MutateAccountBudgetProposalRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_MutateAccountBudgetProposalRequest.Merge(dst, src) +} +func (m *MutateAccountBudgetProposalRequest) XXX_Size() int { + return xxx_messageInfo_MutateAccountBudgetProposalRequest.Size(m) +} +func (m *MutateAccountBudgetProposalRequest) XXX_DiscardUnknown() { + xxx_messageInfo_MutateAccountBudgetProposalRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_MutateAccountBudgetProposalRequest proto.InternalMessageInfo + +func (m *MutateAccountBudgetProposalRequest) GetCustomerId() string { + if m != nil { + return m.CustomerId + } + return "" +} + +func (m *MutateAccountBudgetProposalRequest) GetOperation() *AccountBudgetProposalOperation { + if m != nil { + return m.Operation + } + return nil +} + +func (m *MutateAccountBudgetProposalRequest) GetValidateOnly() bool { + if m != nil { + return m.ValidateOnly + } + return false +} + +// A single operation to propose the creation of a new account-level budget or +// edit/end/remove an existing one. +type AccountBudgetProposalOperation struct { + // FieldMask that determines which budget fields are modified. While budgets + // may be modified, proposals that propose such modifications are final. + // Therefore, update operations are not supported for proposals. + // + // Proposals that modify budgets have the 'update' proposal type. Specifying + // a mask for any other proposal type is considered an error. + UpdateMask *field_mask.FieldMask `protobuf:"bytes,3,opt,name=update_mask,json=updateMask,proto3" json:"update_mask,omitempty"` + // The mutate operation. + // + // Types that are valid to be assigned to Operation: + // *AccountBudgetProposalOperation_Create + // *AccountBudgetProposalOperation_Remove + Operation isAccountBudgetProposalOperation_Operation `protobuf_oneof:"operation"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *AccountBudgetProposalOperation) Reset() { *m = AccountBudgetProposalOperation{} } +func (m *AccountBudgetProposalOperation) String() string { return proto.CompactTextString(m) } +func (*AccountBudgetProposalOperation) ProtoMessage() {} +func (*AccountBudgetProposalOperation) Descriptor() ([]byte, []int) { + return fileDescriptor_account_budget_proposal_service_19af3128d079e096, []int{2} +} +func (m *AccountBudgetProposalOperation) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_AccountBudgetProposalOperation.Unmarshal(m, b) +} +func (m *AccountBudgetProposalOperation) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_AccountBudgetProposalOperation.Marshal(b, m, deterministic) +} +func (dst *AccountBudgetProposalOperation) XXX_Merge(src proto.Message) { + xxx_messageInfo_AccountBudgetProposalOperation.Merge(dst, src) +} +func (m *AccountBudgetProposalOperation) XXX_Size() int { + return xxx_messageInfo_AccountBudgetProposalOperation.Size(m) +} +func (m *AccountBudgetProposalOperation) XXX_DiscardUnknown() { + xxx_messageInfo_AccountBudgetProposalOperation.DiscardUnknown(m) +} + +var xxx_messageInfo_AccountBudgetProposalOperation proto.InternalMessageInfo + +func (m *AccountBudgetProposalOperation) GetUpdateMask() *field_mask.FieldMask { + if m != nil { + return m.UpdateMask + } + return nil +} + +type isAccountBudgetProposalOperation_Operation interface { + isAccountBudgetProposalOperation_Operation() +} + +type AccountBudgetProposalOperation_Create struct { + Create *resources.AccountBudgetProposal `protobuf:"bytes,2,opt,name=create,proto3,oneof"` +} + +type AccountBudgetProposalOperation_Remove struct { + Remove string `protobuf:"bytes,1,opt,name=remove,proto3,oneof"` +} + +func (*AccountBudgetProposalOperation_Create) isAccountBudgetProposalOperation_Operation() {} + +func (*AccountBudgetProposalOperation_Remove) isAccountBudgetProposalOperation_Operation() {} + +func (m *AccountBudgetProposalOperation) GetOperation() isAccountBudgetProposalOperation_Operation { + if m != nil { + return m.Operation + } + return nil +} + +func (m *AccountBudgetProposalOperation) GetCreate() *resources.AccountBudgetProposal { + if x, ok := m.GetOperation().(*AccountBudgetProposalOperation_Create); ok { + return x.Create + } + return nil +} + +func (m *AccountBudgetProposalOperation) GetRemove() string { + if x, ok := m.GetOperation().(*AccountBudgetProposalOperation_Remove); ok { + return x.Remove + } + return "" +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*AccountBudgetProposalOperation) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _AccountBudgetProposalOperation_OneofMarshaler, _AccountBudgetProposalOperation_OneofUnmarshaler, _AccountBudgetProposalOperation_OneofSizer, []interface{}{ + (*AccountBudgetProposalOperation_Create)(nil), + (*AccountBudgetProposalOperation_Remove)(nil), + } +} + +func _AccountBudgetProposalOperation_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*AccountBudgetProposalOperation) + // operation + switch x := m.Operation.(type) { + case *AccountBudgetProposalOperation_Create: + b.EncodeVarint(2<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.Create); err != nil { + return err + } + case *AccountBudgetProposalOperation_Remove: + b.EncodeVarint(1<<3 | proto.WireBytes) + b.EncodeStringBytes(x.Remove) + case nil: + default: + return fmt.Errorf("AccountBudgetProposalOperation.Operation has unexpected type %T", x) + } + return nil +} + +func _AccountBudgetProposalOperation_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*AccountBudgetProposalOperation) + switch tag { + case 2: // operation.create + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(resources.AccountBudgetProposal) + err := b.DecodeMessage(msg) + m.Operation = &AccountBudgetProposalOperation_Create{msg} + return true, err + case 1: // operation.remove + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeStringBytes() + m.Operation = &AccountBudgetProposalOperation_Remove{x} + return true, err + default: + return false, nil + } +} + +func _AccountBudgetProposalOperation_OneofSizer(msg proto.Message) (n int) { + m := msg.(*AccountBudgetProposalOperation) + // operation + switch x := m.Operation.(type) { + case *AccountBudgetProposalOperation_Create: + s := proto.Size(x.Create) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *AccountBudgetProposalOperation_Remove: + n += 1 // tag and wire + n += proto.SizeVarint(uint64(len(x.Remove))) + n += len(x.Remove) + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +// Response message for account-level budget mutate operations. +type MutateAccountBudgetProposalResponse struct { + // The result of the mutate. + Result *MutateAccountBudgetProposalResult `protobuf:"bytes,2,opt,name=result,proto3" json:"result,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *MutateAccountBudgetProposalResponse) Reset() { *m = MutateAccountBudgetProposalResponse{} } +func (m *MutateAccountBudgetProposalResponse) String() string { return proto.CompactTextString(m) } +func (*MutateAccountBudgetProposalResponse) ProtoMessage() {} +func (*MutateAccountBudgetProposalResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_account_budget_proposal_service_19af3128d079e096, []int{3} +} +func (m *MutateAccountBudgetProposalResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_MutateAccountBudgetProposalResponse.Unmarshal(m, b) +} +func (m *MutateAccountBudgetProposalResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_MutateAccountBudgetProposalResponse.Marshal(b, m, deterministic) +} +func (dst *MutateAccountBudgetProposalResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_MutateAccountBudgetProposalResponse.Merge(dst, src) +} +func (m *MutateAccountBudgetProposalResponse) XXX_Size() int { + return xxx_messageInfo_MutateAccountBudgetProposalResponse.Size(m) +} +func (m *MutateAccountBudgetProposalResponse) XXX_DiscardUnknown() { + xxx_messageInfo_MutateAccountBudgetProposalResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_MutateAccountBudgetProposalResponse proto.InternalMessageInfo + +func (m *MutateAccountBudgetProposalResponse) GetResult() *MutateAccountBudgetProposalResult { + if m != nil { + return m.Result + } + return nil +} + +// The result for the account budget proposal mutate. +type MutateAccountBudgetProposalResult struct { + // Returned for successful operations. + ResourceName string `protobuf:"bytes,1,opt,name=resource_name,json=resourceName,proto3" json:"resource_name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *MutateAccountBudgetProposalResult) Reset() { *m = MutateAccountBudgetProposalResult{} } +func (m *MutateAccountBudgetProposalResult) String() string { return proto.CompactTextString(m) } +func (*MutateAccountBudgetProposalResult) ProtoMessage() {} +func (*MutateAccountBudgetProposalResult) Descriptor() ([]byte, []int) { + return fileDescriptor_account_budget_proposal_service_19af3128d079e096, []int{4} +} +func (m *MutateAccountBudgetProposalResult) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_MutateAccountBudgetProposalResult.Unmarshal(m, b) +} +func (m *MutateAccountBudgetProposalResult) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_MutateAccountBudgetProposalResult.Marshal(b, m, deterministic) +} +func (dst *MutateAccountBudgetProposalResult) XXX_Merge(src proto.Message) { + xxx_messageInfo_MutateAccountBudgetProposalResult.Merge(dst, src) +} +func (m *MutateAccountBudgetProposalResult) XXX_Size() int { + return xxx_messageInfo_MutateAccountBudgetProposalResult.Size(m) +} +func (m *MutateAccountBudgetProposalResult) XXX_DiscardUnknown() { + xxx_messageInfo_MutateAccountBudgetProposalResult.DiscardUnknown(m) +} + +var xxx_messageInfo_MutateAccountBudgetProposalResult proto.InternalMessageInfo + +func (m *MutateAccountBudgetProposalResult) GetResourceName() string { + if m != nil { + return m.ResourceName + } + return "" +} + +func init() { + proto.RegisterType((*GetAccountBudgetProposalRequest)(nil), "google.ads.googleads.v1.services.GetAccountBudgetProposalRequest") + proto.RegisterType((*MutateAccountBudgetProposalRequest)(nil), "google.ads.googleads.v1.services.MutateAccountBudgetProposalRequest") + proto.RegisterType((*AccountBudgetProposalOperation)(nil), "google.ads.googleads.v1.services.AccountBudgetProposalOperation") + proto.RegisterType((*MutateAccountBudgetProposalResponse)(nil), "google.ads.googleads.v1.services.MutateAccountBudgetProposalResponse") + proto.RegisterType((*MutateAccountBudgetProposalResult)(nil), "google.ads.googleads.v1.services.MutateAccountBudgetProposalResult") +} + +// Reference imports to suppress errors if they are not otherwise used. +var _ context.Context +var _ grpc.ClientConn + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the grpc package it is being compiled against. +const _ = grpc.SupportPackageIsVersion4 + +// AccountBudgetProposalServiceClient is the client API for AccountBudgetProposalService service. +// +// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://godoc.org/google.golang.org/grpc#ClientConn.NewStream. +type AccountBudgetProposalServiceClient interface { + // Returns an account-level budget proposal in full detail. + GetAccountBudgetProposal(ctx context.Context, in *GetAccountBudgetProposalRequest, opts ...grpc.CallOption) (*resources.AccountBudgetProposal, error) + // Creates, updates, or removes account budget proposals. Operation statuses + // are returned. + MutateAccountBudgetProposal(ctx context.Context, in *MutateAccountBudgetProposalRequest, opts ...grpc.CallOption) (*MutateAccountBudgetProposalResponse, error) +} + +type accountBudgetProposalServiceClient struct { + cc *grpc.ClientConn +} + +func NewAccountBudgetProposalServiceClient(cc *grpc.ClientConn) AccountBudgetProposalServiceClient { + return &accountBudgetProposalServiceClient{cc} +} + +func (c *accountBudgetProposalServiceClient) GetAccountBudgetProposal(ctx context.Context, in *GetAccountBudgetProposalRequest, opts ...grpc.CallOption) (*resources.AccountBudgetProposal, error) { + out := new(resources.AccountBudgetProposal) + err := c.cc.Invoke(ctx, "/google.ads.googleads.v1.services.AccountBudgetProposalService/GetAccountBudgetProposal", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *accountBudgetProposalServiceClient) MutateAccountBudgetProposal(ctx context.Context, in *MutateAccountBudgetProposalRequest, opts ...grpc.CallOption) (*MutateAccountBudgetProposalResponse, error) { + out := new(MutateAccountBudgetProposalResponse) + err := c.cc.Invoke(ctx, "/google.ads.googleads.v1.services.AccountBudgetProposalService/MutateAccountBudgetProposal", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +// AccountBudgetProposalServiceServer is the server API for AccountBudgetProposalService service. +type AccountBudgetProposalServiceServer interface { + // Returns an account-level budget proposal in full detail. + GetAccountBudgetProposal(context.Context, *GetAccountBudgetProposalRequest) (*resources.AccountBudgetProposal, error) + // Creates, updates, or removes account budget proposals. Operation statuses + // are returned. + MutateAccountBudgetProposal(context.Context, *MutateAccountBudgetProposalRequest) (*MutateAccountBudgetProposalResponse, error) +} + +func RegisterAccountBudgetProposalServiceServer(s *grpc.Server, srv AccountBudgetProposalServiceServer) { + s.RegisterService(&_AccountBudgetProposalService_serviceDesc, srv) +} + +func _AccountBudgetProposalService_GetAccountBudgetProposal_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(GetAccountBudgetProposalRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(AccountBudgetProposalServiceServer).GetAccountBudgetProposal(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.ads.googleads.v1.services.AccountBudgetProposalService/GetAccountBudgetProposal", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(AccountBudgetProposalServiceServer).GetAccountBudgetProposal(ctx, req.(*GetAccountBudgetProposalRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _AccountBudgetProposalService_MutateAccountBudgetProposal_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(MutateAccountBudgetProposalRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(AccountBudgetProposalServiceServer).MutateAccountBudgetProposal(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.ads.googleads.v1.services.AccountBudgetProposalService/MutateAccountBudgetProposal", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(AccountBudgetProposalServiceServer).MutateAccountBudgetProposal(ctx, req.(*MutateAccountBudgetProposalRequest)) + } + return interceptor(ctx, in, info, handler) +} + +var _AccountBudgetProposalService_serviceDesc = grpc.ServiceDesc{ + ServiceName: "google.ads.googleads.v1.services.AccountBudgetProposalService", + HandlerType: (*AccountBudgetProposalServiceServer)(nil), + Methods: []grpc.MethodDesc{ + { + MethodName: "GetAccountBudgetProposal", + Handler: _AccountBudgetProposalService_GetAccountBudgetProposal_Handler, + }, + { + MethodName: "MutateAccountBudgetProposal", + Handler: _AccountBudgetProposalService_MutateAccountBudgetProposal_Handler, + }, + }, + Streams: []grpc.StreamDesc{}, + Metadata: "google/ads/googleads/v1/services/account_budget_proposal_service.proto", +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/services/account_budget_proposal_service.proto", fileDescriptor_account_budget_proposal_service_19af3128d079e096) +} + +var fileDescriptor_account_budget_proposal_service_19af3128d079e096 = []byte{ + // 629 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xa4, 0x55, 0x3f, 0x6f, 0xd3, 0x4e, + 0x18, 0xfe, 0xd9, 0x95, 0xa2, 0x5f, 0x2f, 0x65, 0xb9, 0x29, 0x0a, 0x15, 0x4d, 0xdd, 0x0e, 0x51, + 0x06, 0x5b, 0x09, 0x4b, 0xe5, 0xa8, 0x22, 0x0e, 0x90, 0x84, 0xa1, 0x34, 0x32, 0x52, 0x06, 0x88, + 0xb0, 0x2e, 0xf6, 0xd5, 0xb2, 0x6a, 0xfb, 0x8c, 0xef, 0x1c, 0xa9, 0xaa, 0xba, 0x74, 0x62, 0xe7, + 0x1b, 0x30, 0xf2, 0x51, 0xba, 0xb2, 0xc0, 0xce, 0xc4, 0x82, 0xc4, 0x27, 0x40, 0xf6, 0xdd, 0xa5, + 0xad, 0x94, 0xd8, 0x88, 0x6e, 0xaf, 0xcf, 0x8f, 0x9f, 0xe7, 0x7d, 0xde, 0x3f, 0x67, 0x30, 0xf2, + 0x09, 0xf1, 0x43, 0x6c, 0x20, 0x8f, 0x1a, 0x3c, 0xcc, 0xa3, 0x65, 0xd7, 0xa0, 0x38, 0x5d, 0x06, + 0x2e, 0xa6, 0x06, 0x72, 0x5d, 0x92, 0xc5, 0xcc, 0x59, 0x64, 0x9e, 0x8f, 0x99, 0x93, 0xa4, 0x24, + 0x21, 0x14, 0x85, 0x8e, 0x00, 0xe8, 0x49, 0x4a, 0x18, 0x81, 0x2d, 0xfe, 0xb1, 0x8e, 0x3c, 0xaa, + 0xaf, 0x78, 0xf4, 0x65, 0x57, 0x97, 0x3c, 0xcd, 0x67, 0x9b, 0x94, 0x52, 0x4c, 0x49, 0x96, 0x96, + 0x48, 0x71, 0x89, 0xe6, 0xae, 0x24, 0x48, 0x02, 0x03, 0xc5, 0x31, 0x61, 0x88, 0x05, 0x24, 0xa6, + 0xe2, 0xad, 0x48, 0xc0, 0x28, 0x9e, 0x16, 0xd9, 0x99, 0x71, 0x16, 0xe0, 0xd0, 0x73, 0x22, 0x44, + 0xcf, 0x39, 0x42, 0x1b, 0x81, 0xbd, 0x31, 0x66, 0x16, 0xd7, 0x18, 0x16, 0x12, 0x53, 0xa1, 0x60, + 0xe3, 0x0f, 0x19, 0xa6, 0x0c, 0x1e, 0x80, 0x47, 0x32, 0x1b, 0x27, 0x46, 0x11, 0x6e, 0x28, 0x2d, + 0xa5, 0xbd, 0x6d, 0xef, 0xc8, 0xc3, 0xd7, 0x28, 0xc2, 0xda, 0x8d, 0x02, 0xb4, 0x93, 0x8c, 0x21, + 0x86, 0x4b, 0xb9, 0xf6, 0x40, 0xdd, 0xcd, 0x28, 0x23, 0x11, 0x4e, 0x9d, 0xc0, 0x13, 0x4c, 0x40, + 0x1e, 0xbd, 0xf2, 0xe0, 0x7b, 0xb0, 0x4d, 0x12, 0x9c, 0x16, 0x2e, 0x1a, 0x6a, 0x4b, 0x69, 0xd7, + 0x7b, 0x03, 0xbd, 0xaa, 0x8c, 0xfa, 0x5a, 0xcd, 0x53, 0xc9, 0x63, 0xdf, 0x52, 0xe6, 0x66, 0x96, + 0x28, 0x0c, 0x3c, 0xc4, 0xb0, 0x43, 0xe2, 0xf0, 0xa2, 0xb1, 0xd5, 0x52, 0xda, 0xff, 0xdb, 0x3b, + 0xf2, 0xf0, 0x34, 0x0e, 0x2f, 0xb4, 0x6f, 0x0a, 0x78, 0x52, 0x4e, 0x09, 0xfb, 0xa0, 0x9e, 0x25, + 0x05, 0x4b, 0x5e, 0xcc, 0x82, 0xa5, 0xde, 0x6b, 0xca, 0x4c, 0x65, 0xbd, 0xf5, 0x51, 0x5e, 0xef, + 0x13, 0x44, 0xcf, 0x6d, 0xc0, 0xe1, 0x79, 0x0c, 0x6d, 0x50, 0x73, 0x53, 0x8c, 0x18, 0x16, 0x0e, + 0x8f, 0x36, 0x3a, 0x5c, 0x8d, 0xc1, 0x7a, 0x8b, 0x93, 0xff, 0x6c, 0xc1, 0x04, 0x1b, 0xa0, 0x96, + 0xe2, 0x88, 0x2c, 0x45, 0x7b, 0xf2, 0x37, 0xfc, 0x79, 0x58, 0xbf, 0x53, 0x52, 0xed, 0x5a, 0x01, + 0x07, 0xa5, 0x7d, 0xa2, 0x09, 0x89, 0x29, 0x86, 0xef, 0x72, 0x3a, 0x9a, 0x85, 0x4c, 0xa4, 0xf8, + 0xbc, 0xba, 0x09, 0xe5, 0xb4, 0x59, 0xc8, 0x6c, 0x41, 0xa9, 0x4d, 0xc0, 0x7e, 0x25, 0xf8, 0xaf, + 0xc6, 0xae, 0xf7, 0x6b, 0x0b, 0xec, 0xae, 0x25, 0x79, 0xc3, 0xb3, 0x82, 0xdf, 0x15, 0xd0, 0xd8, + 0x34, 0xe0, 0xd0, 0xaa, 0x36, 0x55, 0xb1, 0x1c, 0xcd, 0x7f, 0x6e, 0x9d, 0x36, 0xb8, 0xfe, 0xfa, + 0xe3, 0x93, 0x6a, 0xc2, 0xa3, 0x7c, 0xdd, 0x2f, 0xef, 0x59, 0x3d, 0x96, 0x0b, 0x41, 0x8d, 0x8e, + 0xdc, 0xff, 0xfb, 0x5f, 0x53, 0xa3, 0x73, 0x05, 0x7f, 0x2b, 0xe0, 0x71, 0x49, 0x1d, 0xe1, 0x8b, + 0x07, 0xf6, 0x8c, 0x3b, 0x7c, 0xf9, 0xd0, 0xce, 0x17, 0x03, 0xa5, 0x8d, 0x0a, 0xbb, 0x03, 0xad, + 0x9f, 0xdb, 0xbd, 0xf5, 0x77, 0x79, 0xe7, 0x3a, 0x38, 0xee, 0x5c, 0x6d, 0x70, 0x6b, 0x46, 0x85, + 0x82, 0xa9, 0x74, 0x86, 0x1f, 0x55, 0x70, 0xe8, 0x92, 0xa8, 0x32, 0xa9, 0xe1, 0x7e, 0xd9, 0x5c, + 0x4c, 0xf3, 0x05, 0x9d, 0x2a, 0x6f, 0x27, 0x82, 0xc6, 0x27, 0x21, 0x8a, 0x7d, 0x9d, 0xa4, 0xbe, + 0xe1, 0xe3, 0xb8, 0x58, 0x5f, 0x79, 0x1f, 0x27, 0x01, 0xdd, 0xfc, 0x23, 0xe8, 0xcb, 0xe0, 0xb3, + 0xba, 0x35, 0xb6, 0xac, 0x2f, 0x6a, 0x6b, 0xcc, 0x09, 0x2d, 0x8f, 0xea, 0x3c, 0xcc, 0xa3, 0x59, + 0x57, 0x17, 0xc2, 0xf4, 0x46, 0x42, 0xe6, 0x96, 0x47, 0xe7, 0x2b, 0xc8, 0x7c, 0xd6, 0x9d, 0x4b, + 0xc8, 0x4f, 0xf5, 0x90, 0x9f, 0x9b, 0xa6, 0xe5, 0x51, 0xd3, 0x5c, 0x81, 0x4c, 0x73, 0xd6, 0x35, + 0x4d, 0x09, 0x5b, 0xd4, 0x8a, 0x3c, 0x9f, 0xfe, 0x09, 0x00, 0x00, 0xff, 0xff, 0x83, 0x78, 0xdf, + 0x87, 0xaf, 0x06, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/services/account_budget_service.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/services/account_budget_service.pb.go new file mode 100644 index 0000000..60bbe62 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/services/account_budget_service.pb.go @@ -0,0 +1,176 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/services/account_budget_service.proto + +package services // import "google.golang.org/genproto/googleapis/ads/googleads/v1/services" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import resources "google.golang.org/genproto/googleapis/ads/googleads/v1/resources" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +import ( + context "golang.org/x/net/context" + grpc "google.golang.org/grpc" +) + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Request message for +// [AccountBudgetService.GetAccountBudget][google.ads.googleads.v1.services.AccountBudgetService.GetAccountBudget]. +type GetAccountBudgetRequest struct { + // The resource name of the account-level budget to fetch. + ResourceName string `protobuf:"bytes,1,opt,name=resource_name,json=resourceName,proto3" json:"resource_name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GetAccountBudgetRequest) Reset() { *m = GetAccountBudgetRequest{} } +func (m *GetAccountBudgetRequest) String() string { return proto.CompactTextString(m) } +func (*GetAccountBudgetRequest) ProtoMessage() {} +func (*GetAccountBudgetRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_account_budget_service_f5da40cfa7b98b21, []int{0} +} +func (m *GetAccountBudgetRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GetAccountBudgetRequest.Unmarshal(m, b) +} +func (m *GetAccountBudgetRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GetAccountBudgetRequest.Marshal(b, m, deterministic) +} +func (dst *GetAccountBudgetRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_GetAccountBudgetRequest.Merge(dst, src) +} +func (m *GetAccountBudgetRequest) XXX_Size() int { + return xxx_messageInfo_GetAccountBudgetRequest.Size(m) +} +func (m *GetAccountBudgetRequest) XXX_DiscardUnknown() { + xxx_messageInfo_GetAccountBudgetRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_GetAccountBudgetRequest proto.InternalMessageInfo + +func (m *GetAccountBudgetRequest) GetResourceName() string { + if m != nil { + return m.ResourceName + } + return "" +} + +func init() { + proto.RegisterType((*GetAccountBudgetRequest)(nil), "google.ads.googleads.v1.services.GetAccountBudgetRequest") +} + +// Reference imports to suppress errors if they are not otherwise used. +var _ context.Context +var _ grpc.ClientConn + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the grpc package it is being compiled against. +const _ = grpc.SupportPackageIsVersion4 + +// AccountBudgetServiceClient is the client API for AccountBudgetService service. +// +// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://godoc.org/google.golang.org/grpc#ClientConn.NewStream. +type AccountBudgetServiceClient interface { + // Returns an account-level budget in full detail. + GetAccountBudget(ctx context.Context, in *GetAccountBudgetRequest, opts ...grpc.CallOption) (*resources.AccountBudget, error) +} + +type accountBudgetServiceClient struct { + cc *grpc.ClientConn +} + +func NewAccountBudgetServiceClient(cc *grpc.ClientConn) AccountBudgetServiceClient { + return &accountBudgetServiceClient{cc} +} + +func (c *accountBudgetServiceClient) GetAccountBudget(ctx context.Context, in *GetAccountBudgetRequest, opts ...grpc.CallOption) (*resources.AccountBudget, error) { + out := new(resources.AccountBudget) + err := c.cc.Invoke(ctx, "/google.ads.googleads.v1.services.AccountBudgetService/GetAccountBudget", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +// AccountBudgetServiceServer is the server API for AccountBudgetService service. +type AccountBudgetServiceServer interface { + // Returns an account-level budget in full detail. + GetAccountBudget(context.Context, *GetAccountBudgetRequest) (*resources.AccountBudget, error) +} + +func RegisterAccountBudgetServiceServer(s *grpc.Server, srv AccountBudgetServiceServer) { + s.RegisterService(&_AccountBudgetService_serviceDesc, srv) +} + +func _AccountBudgetService_GetAccountBudget_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(GetAccountBudgetRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(AccountBudgetServiceServer).GetAccountBudget(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.ads.googleads.v1.services.AccountBudgetService/GetAccountBudget", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(AccountBudgetServiceServer).GetAccountBudget(ctx, req.(*GetAccountBudgetRequest)) + } + return interceptor(ctx, in, info, handler) +} + +var _AccountBudgetService_serviceDesc = grpc.ServiceDesc{ + ServiceName: "google.ads.googleads.v1.services.AccountBudgetService", + HandlerType: (*AccountBudgetServiceServer)(nil), + Methods: []grpc.MethodDesc{ + { + MethodName: "GetAccountBudget", + Handler: _AccountBudgetService_GetAccountBudget_Handler, + }, + }, + Streams: []grpc.StreamDesc{}, + Metadata: "google/ads/googleads/v1/services/account_budget_service.proto", +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/services/account_budget_service.proto", fileDescriptor_account_budget_service_f5da40cfa7b98b21) +} + +var fileDescriptor_account_budget_service_f5da40cfa7b98b21 = []byte{ + // 364 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x84, 0x92, 0x4f, 0x4a, 0xf3, 0x40, + 0x18, 0xc6, 0x49, 0x3e, 0xf8, 0xc0, 0xa0, 0x20, 0x41, 0x50, 0x8b, 0x8b, 0x52, 0xbb, 0x90, 0x2e, + 0x66, 0x9a, 0x0a, 0xa2, 0x23, 0x15, 0xd2, 0x4d, 0x5d, 0x49, 0xa9, 0xd0, 0x85, 0x04, 0xca, 0x34, + 0x19, 0x86, 0x40, 0x33, 0x53, 0xe7, 0x9d, 0x74, 0x23, 0x82, 0x78, 0x05, 0x6f, 0xe0, 0xd2, 0x1b, + 0x78, 0x05, 0x97, 0x7a, 0x05, 0x57, 0x9e, 0x42, 0xd2, 0xe9, 0x04, 0xaa, 0x86, 0xee, 0x1e, 0xde, + 0x3c, 0xbf, 0xf7, 0xcf, 0x93, 0xf1, 0xba, 0x5c, 0x4a, 0x3e, 0x65, 0x98, 0x26, 0x80, 0x8d, 0x2c, + 0xd4, 0x3c, 0xc0, 0xc0, 0xd4, 0x3c, 0x8d, 0x19, 0x60, 0x1a, 0xc7, 0x32, 0x17, 0x7a, 0x3c, 0xc9, + 0x13, 0xce, 0xf4, 0x78, 0x59, 0x47, 0x33, 0x25, 0xb5, 0xf4, 0xeb, 0x86, 0x41, 0x34, 0x01, 0x54, + 0xe2, 0x68, 0x1e, 0x20, 0x8b, 0xd7, 0x4e, 0xaa, 0x06, 0x28, 0x06, 0x32, 0x57, 0xbf, 0x27, 0x98, + 0xce, 0xb5, 0x03, 0xcb, 0xcd, 0x52, 0x4c, 0x85, 0x90, 0x9a, 0xea, 0x54, 0x0a, 0x30, 0x5f, 0x1b, + 0x17, 0xde, 0x6e, 0x9f, 0xe9, 0xd0, 0x80, 0xbd, 0x05, 0x37, 0x64, 0xb7, 0x39, 0x03, 0xed, 0x1f, + 0x7a, 0x5b, 0xb6, 0xf5, 0x58, 0xd0, 0x8c, 0xed, 0x39, 0x75, 0xe7, 0x68, 0x63, 0xb8, 0x69, 0x8b, + 0x57, 0x34, 0x63, 0x9d, 0x77, 0xc7, 0xdb, 0x59, 0xa1, 0xaf, 0xcd, 0xbe, 0xfe, 0xab, 0xe3, 0x6d, + 0xff, 0xec, 0xec, 0x9f, 0xa1, 0x75, 0x67, 0xa2, 0x8a, 0x6d, 0x6a, 0xed, 0x4a, 0xb4, 0xbc, 0x1f, + 0xad, 0x80, 0x8d, 0xd3, 0xc7, 0x8f, 0xcf, 0x27, 0xb7, 0xe3, 0xb7, 0x8b, 0x90, 0xee, 0x56, 0x4e, + 0xe9, 0xc6, 0x39, 0x68, 0x99, 0x31, 0x05, 0xb8, 0x65, 0x53, 0x33, 0x14, 0xe0, 0xd6, 0x7d, 0xef, + 0xc1, 0xf5, 0x9a, 0xb1, 0xcc, 0xd6, 0x2e, 0xdb, 0xdb, 0xff, 0xeb, 0xf4, 0x41, 0x11, 0xec, 0xc0, + 0xb9, 0xb9, 0x5c, 0xe2, 0x5c, 0x4e, 0xa9, 0xe0, 0x48, 0x2a, 0x8e, 0x39, 0x13, 0x8b, 0xd8, 0xed, + 0x0f, 0x9c, 0xa5, 0x50, 0xfd, 0x60, 0xce, 0xad, 0x78, 0x76, 0xff, 0xf5, 0xc3, 0xf0, 0xc5, 0xad, + 0xf7, 0x4d, 0xc3, 0x30, 0x01, 0x64, 0x64, 0xa1, 0x46, 0x01, 0x5a, 0x0e, 0x86, 0x37, 0x6b, 0x89, + 0xc2, 0x04, 0xa2, 0xd2, 0x12, 0x8d, 0x82, 0xc8, 0x5a, 0xbe, 0xdc, 0xa6, 0xa9, 0x13, 0x12, 0x26, + 0x40, 0x48, 0x69, 0x22, 0x64, 0x14, 0x10, 0x62, 0x6d, 0x93, 0xff, 0x8b, 0x3d, 0x8f, 0xbf, 0x03, + 0x00, 0x00, 0xff, 0xff, 0x29, 0x16, 0xed, 0x5b, 0xd7, 0x02, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/services/ad_group_ad_label_service.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/services/ad_group_ad_label_service.pb.go new file mode 100644 index 0000000..0179260 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/services/ad_group_ad_label_service.pb.go @@ -0,0 +1,544 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/services/ad_group_ad_label_service.proto + +package services // import "google.golang.org/genproto/googleapis/ads/googleads/v1/services" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "github.com/golang/protobuf/ptypes/wrappers" +import resources "google.golang.org/genproto/googleapis/ads/googleads/v1/resources" +import _ "google.golang.org/genproto/googleapis/api/annotations" +import status "google.golang.org/genproto/googleapis/rpc/status" + +import ( + context "golang.org/x/net/context" + grpc "google.golang.org/grpc" +) + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Request message for [AdGroupAdLabelService.GetAdGroupAdLabel][google.ads.googleads.v1.services.AdGroupAdLabelService.GetAdGroupAdLabel]. +type GetAdGroupAdLabelRequest struct { + // The resource name of the ad group ad label to fetch. + ResourceName string `protobuf:"bytes,1,opt,name=resource_name,json=resourceName,proto3" json:"resource_name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GetAdGroupAdLabelRequest) Reset() { *m = GetAdGroupAdLabelRequest{} } +func (m *GetAdGroupAdLabelRequest) String() string { return proto.CompactTextString(m) } +func (*GetAdGroupAdLabelRequest) ProtoMessage() {} +func (*GetAdGroupAdLabelRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_ad_group_ad_label_service_55352a1446b4673b, []int{0} +} +func (m *GetAdGroupAdLabelRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GetAdGroupAdLabelRequest.Unmarshal(m, b) +} +func (m *GetAdGroupAdLabelRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GetAdGroupAdLabelRequest.Marshal(b, m, deterministic) +} +func (dst *GetAdGroupAdLabelRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_GetAdGroupAdLabelRequest.Merge(dst, src) +} +func (m *GetAdGroupAdLabelRequest) XXX_Size() int { + return xxx_messageInfo_GetAdGroupAdLabelRequest.Size(m) +} +func (m *GetAdGroupAdLabelRequest) XXX_DiscardUnknown() { + xxx_messageInfo_GetAdGroupAdLabelRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_GetAdGroupAdLabelRequest proto.InternalMessageInfo + +func (m *GetAdGroupAdLabelRequest) GetResourceName() string { + if m != nil { + return m.ResourceName + } + return "" +} + +// Request message for [AdGroupAdLabelService.MutateAdGroupAdLabels][google.ads.googleads.v1.services.AdGroupAdLabelService.MutateAdGroupAdLabels]. +type MutateAdGroupAdLabelsRequest struct { + // ID of the customer whose ad group ad labels are being modified. + CustomerId string `protobuf:"bytes,1,opt,name=customer_id,json=customerId,proto3" json:"customer_id,omitempty"` + // The list of operations to perform on ad group ad labels. + Operations []*AdGroupAdLabelOperation `protobuf:"bytes,2,rep,name=operations,proto3" json:"operations,omitempty"` + // If true, successful operations will be carried out and invalid + // operations will return errors. If false, all operations will be carried + // out in one transaction if and only if they are all valid. + // Default is false. + PartialFailure bool `protobuf:"varint,3,opt,name=partial_failure,json=partialFailure,proto3" json:"partial_failure,omitempty"` + // If true, the request is validated but not executed. Only errors are + // returned, not results. + ValidateOnly bool `protobuf:"varint,4,opt,name=validate_only,json=validateOnly,proto3" json:"validate_only,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *MutateAdGroupAdLabelsRequest) Reset() { *m = MutateAdGroupAdLabelsRequest{} } +func (m *MutateAdGroupAdLabelsRequest) String() string { return proto.CompactTextString(m) } +func (*MutateAdGroupAdLabelsRequest) ProtoMessage() {} +func (*MutateAdGroupAdLabelsRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_ad_group_ad_label_service_55352a1446b4673b, []int{1} +} +func (m *MutateAdGroupAdLabelsRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_MutateAdGroupAdLabelsRequest.Unmarshal(m, b) +} +func (m *MutateAdGroupAdLabelsRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_MutateAdGroupAdLabelsRequest.Marshal(b, m, deterministic) +} +func (dst *MutateAdGroupAdLabelsRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_MutateAdGroupAdLabelsRequest.Merge(dst, src) +} +func (m *MutateAdGroupAdLabelsRequest) XXX_Size() int { + return xxx_messageInfo_MutateAdGroupAdLabelsRequest.Size(m) +} +func (m *MutateAdGroupAdLabelsRequest) XXX_DiscardUnknown() { + xxx_messageInfo_MutateAdGroupAdLabelsRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_MutateAdGroupAdLabelsRequest proto.InternalMessageInfo + +func (m *MutateAdGroupAdLabelsRequest) GetCustomerId() string { + if m != nil { + return m.CustomerId + } + return "" +} + +func (m *MutateAdGroupAdLabelsRequest) GetOperations() []*AdGroupAdLabelOperation { + if m != nil { + return m.Operations + } + return nil +} + +func (m *MutateAdGroupAdLabelsRequest) GetPartialFailure() bool { + if m != nil { + return m.PartialFailure + } + return false +} + +func (m *MutateAdGroupAdLabelsRequest) GetValidateOnly() bool { + if m != nil { + return m.ValidateOnly + } + return false +} + +// A single operation (create, remove) on an ad group ad label. +type AdGroupAdLabelOperation struct { + // The mutate operation. + // + // Types that are valid to be assigned to Operation: + // *AdGroupAdLabelOperation_Create + // *AdGroupAdLabelOperation_Remove + Operation isAdGroupAdLabelOperation_Operation `protobuf_oneof:"operation"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *AdGroupAdLabelOperation) Reset() { *m = AdGroupAdLabelOperation{} } +func (m *AdGroupAdLabelOperation) String() string { return proto.CompactTextString(m) } +func (*AdGroupAdLabelOperation) ProtoMessage() {} +func (*AdGroupAdLabelOperation) Descriptor() ([]byte, []int) { + return fileDescriptor_ad_group_ad_label_service_55352a1446b4673b, []int{2} +} +func (m *AdGroupAdLabelOperation) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_AdGroupAdLabelOperation.Unmarshal(m, b) +} +func (m *AdGroupAdLabelOperation) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_AdGroupAdLabelOperation.Marshal(b, m, deterministic) +} +func (dst *AdGroupAdLabelOperation) XXX_Merge(src proto.Message) { + xxx_messageInfo_AdGroupAdLabelOperation.Merge(dst, src) +} +func (m *AdGroupAdLabelOperation) XXX_Size() int { + return xxx_messageInfo_AdGroupAdLabelOperation.Size(m) +} +func (m *AdGroupAdLabelOperation) XXX_DiscardUnknown() { + xxx_messageInfo_AdGroupAdLabelOperation.DiscardUnknown(m) +} + +var xxx_messageInfo_AdGroupAdLabelOperation proto.InternalMessageInfo + +type isAdGroupAdLabelOperation_Operation interface { + isAdGroupAdLabelOperation_Operation() +} + +type AdGroupAdLabelOperation_Create struct { + Create *resources.AdGroupAdLabel `protobuf:"bytes,1,opt,name=create,proto3,oneof"` +} + +type AdGroupAdLabelOperation_Remove struct { + Remove string `protobuf:"bytes,2,opt,name=remove,proto3,oneof"` +} + +func (*AdGroupAdLabelOperation_Create) isAdGroupAdLabelOperation_Operation() {} + +func (*AdGroupAdLabelOperation_Remove) isAdGroupAdLabelOperation_Operation() {} + +func (m *AdGroupAdLabelOperation) GetOperation() isAdGroupAdLabelOperation_Operation { + if m != nil { + return m.Operation + } + return nil +} + +func (m *AdGroupAdLabelOperation) GetCreate() *resources.AdGroupAdLabel { + if x, ok := m.GetOperation().(*AdGroupAdLabelOperation_Create); ok { + return x.Create + } + return nil +} + +func (m *AdGroupAdLabelOperation) GetRemove() string { + if x, ok := m.GetOperation().(*AdGroupAdLabelOperation_Remove); ok { + return x.Remove + } + return "" +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*AdGroupAdLabelOperation) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _AdGroupAdLabelOperation_OneofMarshaler, _AdGroupAdLabelOperation_OneofUnmarshaler, _AdGroupAdLabelOperation_OneofSizer, []interface{}{ + (*AdGroupAdLabelOperation_Create)(nil), + (*AdGroupAdLabelOperation_Remove)(nil), + } +} + +func _AdGroupAdLabelOperation_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*AdGroupAdLabelOperation) + // operation + switch x := m.Operation.(type) { + case *AdGroupAdLabelOperation_Create: + b.EncodeVarint(1<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.Create); err != nil { + return err + } + case *AdGroupAdLabelOperation_Remove: + b.EncodeVarint(2<<3 | proto.WireBytes) + b.EncodeStringBytes(x.Remove) + case nil: + default: + return fmt.Errorf("AdGroupAdLabelOperation.Operation has unexpected type %T", x) + } + return nil +} + +func _AdGroupAdLabelOperation_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*AdGroupAdLabelOperation) + switch tag { + case 1: // operation.create + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(resources.AdGroupAdLabel) + err := b.DecodeMessage(msg) + m.Operation = &AdGroupAdLabelOperation_Create{msg} + return true, err + case 2: // operation.remove + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeStringBytes() + m.Operation = &AdGroupAdLabelOperation_Remove{x} + return true, err + default: + return false, nil + } +} + +func _AdGroupAdLabelOperation_OneofSizer(msg proto.Message) (n int) { + m := msg.(*AdGroupAdLabelOperation) + // operation + switch x := m.Operation.(type) { + case *AdGroupAdLabelOperation_Create: + s := proto.Size(x.Create) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *AdGroupAdLabelOperation_Remove: + n += 1 // tag and wire + n += proto.SizeVarint(uint64(len(x.Remove))) + n += len(x.Remove) + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +// Response message for an ad group ad labels mutate. +type MutateAdGroupAdLabelsResponse struct { + // Errors that pertain to operation failures in the partial failure mode. + // Returned only when partial_failure = true and all errors occur inside the + // operations. If any errors occur outside the operations (e.g. auth errors), + // we return an RPC level error. + PartialFailureError *status.Status `protobuf:"bytes,3,opt,name=partial_failure_error,json=partialFailureError,proto3" json:"partial_failure_error,omitempty"` + // All results for the mutate. + Results []*MutateAdGroupAdLabelResult `protobuf:"bytes,2,rep,name=results,proto3" json:"results,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *MutateAdGroupAdLabelsResponse) Reset() { *m = MutateAdGroupAdLabelsResponse{} } +func (m *MutateAdGroupAdLabelsResponse) String() string { return proto.CompactTextString(m) } +func (*MutateAdGroupAdLabelsResponse) ProtoMessage() {} +func (*MutateAdGroupAdLabelsResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_ad_group_ad_label_service_55352a1446b4673b, []int{3} +} +func (m *MutateAdGroupAdLabelsResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_MutateAdGroupAdLabelsResponse.Unmarshal(m, b) +} +func (m *MutateAdGroupAdLabelsResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_MutateAdGroupAdLabelsResponse.Marshal(b, m, deterministic) +} +func (dst *MutateAdGroupAdLabelsResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_MutateAdGroupAdLabelsResponse.Merge(dst, src) +} +func (m *MutateAdGroupAdLabelsResponse) XXX_Size() int { + return xxx_messageInfo_MutateAdGroupAdLabelsResponse.Size(m) +} +func (m *MutateAdGroupAdLabelsResponse) XXX_DiscardUnknown() { + xxx_messageInfo_MutateAdGroupAdLabelsResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_MutateAdGroupAdLabelsResponse proto.InternalMessageInfo + +func (m *MutateAdGroupAdLabelsResponse) GetPartialFailureError() *status.Status { + if m != nil { + return m.PartialFailureError + } + return nil +} + +func (m *MutateAdGroupAdLabelsResponse) GetResults() []*MutateAdGroupAdLabelResult { + if m != nil { + return m.Results + } + return nil +} + +// The result for an ad group ad label mutate. +type MutateAdGroupAdLabelResult struct { + // Returned for successful operations. + ResourceName string `protobuf:"bytes,1,opt,name=resource_name,json=resourceName,proto3" json:"resource_name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *MutateAdGroupAdLabelResult) Reset() { *m = MutateAdGroupAdLabelResult{} } +func (m *MutateAdGroupAdLabelResult) String() string { return proto.CompactTextString(m) } +func (*MutateAdGroupAdLabelResult) ProtoMessage() {} +func (*MutateAdGroupAdLabelResult) Descriptor() ([]byte, []int) { + return fileDescriptor_ad_group_ad_label_service_55352a1446b4673b, []int{4} +} +func (m *MutateAdGroupAdLabelResult) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_MutateAdGroupAdLabelResult.Unmarshal(m, b) +} +func (m *MutateAdGroupAdLabelResult) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_MutateAdGroupAdLabelResult.Marshal(b, m, deterministic) +} +func (dst *MutateAdGroupAdLabelResult) XXX_Merge(src proto.Message) { + xxx_messageInfo_MutateAdGroupAdLabelResult.Merge(dst, src) +} +func (m *MutateAdGroupAdLabelResult) XXX_Size() int { + return xxx_messageInfo_MutateAdGroupAdLabelResult.Size(m) +} +func (m *MutateAdGroupAdLabelResult) XXX_DiscardUnknown() { + xxx_messageInfo_MutateAdGroupAdLabelResult.DiscardUnknown(m) +} + +var xxx_messageInfo_MutateAdGroupAdLabelResult proto.InternalMessageInfo + +func (m *MutateAdGroupAdLabelResult) GetResourceName() string { + if m != nil { + return m.ResourceName + } + return "" +} + +func init() { + proto.RegisterType((*GetAdGroupAdLabelRequest)(nil), "google.ads.googleads.v1.services.GetAdGroupAdLabelRequest") + proto.RegisterType((*MutateAdGroupAdLabelsRequest)(nil), "google.ads.googleads.v1.services.MutateAdGroupAdLabelsRequest") + proto.RegisterType((*AdGroupAdLabelOperation)(nil), "google.ads.googleads.v1.services.AdGroupAdLabelOperation") + proto.RegisterType((*MutateAdGroupAdLabelsResponse)(nil), "google.ads.googleads.v1.services.MutateAdGroupAdLabelsResponse") + proto.RegisterType((*MutateAdGroupAdLabelResult)(nil), "google.ads.googleads.v1.services.MutateAdGroupAdLabelResult") +} + +// Reference imports to suppress errors if they are not otherwise used. +var _ context.Context +var _ grpc.ClientConn + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the grpc package it is being compiled against. +const _ = grpc.SupportPackageIsVersion4 + +// AdGroupAdLabelServiceClient is the client API for AdGroupAdLabelService service. +// +// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://godoc.org/google.golang.org/grpc#ClientConn.NewStream. +type AdGroupAdLabelServiceClient interface { + // Returns the requested ad group ad label in full detail. + GetAdGroupAdLabel(ctx context.Context, in *GetAdGroupAdLabelRequest, opts ...grpc.CallOption) (*resources.AdGroupAdLabel, error) + // Creates and removes ad group ad labels. + // Operation statuses are returned. + MutateAdGroupAdLabels(ctx context.Context, in *MutateAdGroupAdLabelsRequest, opts ...grpc.CallOption) (*MutateAdGroupAdLabelsResponse, error) +} + +type adGroupAdLabelServiceClient struct { + cc *grpc.ClientConn +} + +func NewAdGroupAdLabelServiceClient(cc *grpc.ClientConn) AdGroupAdLabelServiceClient { + return &adGroupAdLabelServiceClient{cc} +} + +func (c *adGroupAdLabelServiceClient) GetAdGroupAdLabel(ctx context.Context, in *GetAdGroupAdLabelRequest, opts ...grpc.CallOption) (*resources.AdGroupAdLabel, error) { + out := new(resources.AdGroupAdLabel) + err := c.cc.Invoke(ctx, "/google.ads.googleads.v1.services.AdGroupAdLabelService/GetAdGroupAdLabel", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *adGroupAdLabelServiceClient) MutateAdGroupAdLabels(ctx context.Context, in *MutateAdGroupAdLabelsRequest, opts ...grpc.CallOption) (*MutateAdGroupAdLabelsResponse, error) { + out := new(MutateAdGroupAdLabelsResponse) + err := c.cc.Invoke(ctx, "/google.ads.googleads.v1.services.AdGroupAdLabelService/MutateAdGroupAdLabels", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +// AdGroupAdLabelServiceServer is the server API for AdGroupAdLabelService service. +type AdGroupAdLabelServiceServer interface { + // Returns the requested ad group ad label in full detail. + GetAdGroupAdLabel(context.Context, *GetAdGroupAdLabelRequest) (*resources.AdGroupAdLabel, error) + // Creates and removes ad group ad labels. + // Operation statuses are returned. + MutateAdGroupAdLabels(context.Context, *MutateAdGroupAdLabelsRequest) (*MutateAdGroupAdLabelsResponse, error) +} + +func RegisterAdGroupAdLabelServiceServer(s *grpc.Server, srv AdGroupAdLabelServiceServer) { + s.RegisterService(&_AdGroupAdLabelService_serviceDesc, srv) +} + +func _AdGroupAdLabelService_GetAdGroupAdLabel_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(GetAdGroupAdLabelRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(AdGroupAdLabelServiceServer).GetAdGroupAdLabel(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.ads.googleads.v1.services.AdGroupAdLabelService/GetAdGroupAdLabel", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(AdGroupAdLabelServiceServer).GetAdGroupAdLabel(ctx, req.(*GetAdGroupAdLabelRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _AdGroupAdLabelService_MutateAdGroupAdLabels_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(MutateAdGroupAdLabelsRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(AdGroupAdLabelServiceServer).MutateAdGroupAdLabels(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.ads.googleads.v1.services.AdGroupAdLabelService/MutateAdGroupAdLabels", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(AdGroupAdLabelServiceServer).MutateAdGroupAdLabels(ctx, req.(*MutateAdGroupAdLabelsRequest)) + } + return interceptor(ctx, in, info, handler) +} + +var _AdGroupAdLabelService_serviceDesc = grpc.ServiceDesc{ + ServiceName: "google.ads.googleads.v1.services.AdGroupAdLabelService", + HandlerType: (*AdGroupAdLabelServiceServer)(nil), + Methods: []grpc.MethodDesc{ + { + MethodName: "GetAdGroupAdLabel", + Handler: _AdGroupAdLabelService_GetAdGroupAdLabel_Handler, + }, + { + MethodName: "MutateAdGroupAdLabels", + Handler: _AdGroupAdLabelService_MutateAdGroupAdLabels_Handler, + }, + }, + Streams: []grpc.StreamDesc{}, + Metadata: "google/ads/googleads/v1/services/ad_group_ad_label_service.proto", +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/services/ad_group_ad_label_service.proto", fileDescriptor_ad_group_ad_label_service_55352a1446b4673b) +} + +var fileDescriptor_ad_group_ad_label_service_55352a1446b4673b = []byte{ + // 669 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x9c, 0x54, 0x4d, 0x6f, 0xd3, 0x4c, + 0x10, 0x7e, 0xed, 0xbc, 0x2a, 0x74, 0x53, 0x40, 0x2c, 0xaa, 0x6a, 0x45, 0x05, 0x22, 0x53, 0x89, + 0x2a, 0x07, 0x5b, 0x4e, 0xb9, 0xc4, 0x50, 0x8a, 0x2b, 0xd1, 0x14, 0xf1, 0xd1, 0xca, 0x95, 0x22, + 0x81, 0x22, 0x59, 0xdb, 0xec, 0xd6, 0xb2, 0xe4, 0x78, 0xcd, 0xee, 0x3a, 0xa8, 0xaa, 0x7a, 0x80, + 0x3b, 0x27, 0xfe, 0x01, 0x47, 0xae, 0x9c, 0x11, 0x77, 0xae, 0x9c, 0xb8, 0x73, 0x40, 0xfc, 0x0a, + 0x64, 0xaf, 0x37, 0x34, 0x21, 0x56, 0xa0, 0xb7, 0xd9, 0xf9, 0x78, 0x66, 0x9e, 0x99, 0x9d, 0x01, + 0x0f, 0x42, 0x4a, 0xc3, 0x98, 0xd8, 0x08, 0x73, 0x5b, 0x8a, 0xb9, 0x34, 0x72, 0x6c, 0x4e, 0xd8, + 0x28, 0x1a, 0x10, 0x6e, 0x23, 0x1c, 0x84, 0x8c, 0x66, 0x69, 0x80, 0x70, 0x10, 0xa3, 0x43, 0x12, + 0x07, 0xa5, 0xc9, 0x4a, 0x19, 0x15, 0x14, 0x36, 0x65, 0x98, 0x85, 0x30, 0xb7, 0xc6, 0x08, 0xd6, + 0xc8, 0xb1, 0x14, 0x42, 0xa3, 0x53, 0x95, 0x83, 0x11, 0x4e, 0x33, 0x36, 0x33, 0x89, 0x04, 0x6f, + 0xac, 0xaa, 0xd0, 0x34, 0xb2, 0x51, 0x92, 0x50, 0x81, 0x44, 0x44, 0x13, 0x5e, 0x5a, 0x6f, 0x94, + 0xd6, 0xe2, 0x75, 0x98, 0x1d, 0xd9, 0xaf, 0x18, 0x4a, 0x53, 0xc2, 0x94, 0x7d, 0xa5, 0xb4, 0xb3, + 0x74, 0x60, 0x73, 0x81, 0x44, 0x56, 0x1a, 0xcc, 0x2d, 0x60, 0x74, 0x89, 0xf0, 0x70, 0x37, 0xcf, + 0xe9, 0xe1, 0x27, 0x79, 0x46, 0x9f, 0xbc, 0xcc, 0x08, 0x17, 0xf0, 0x16, 0xb8, 0xa4, 0xea, 0x0a, + 0x12, 0x34, 0x24, 0x86, 0xd6, 0xd4, 0xd6, 0x17, 0xfd, 0x25, 0xa5, 0x7c, 0x86, 0x86, 0xc4, 0xfc, + 0xa1, 0x81, 0xd5, 0xa7, 0x99, 0x40, 0x82, 0x4c, 0x82, 0x70, 0x85, 0x72, 0x13, 0xd4, 0x07, 0x19, + 0x17, 0x74, 0x48, 0x58, 0x10, 0xe1, 0x12, 0x03, 0x28, 0xd5, 0x23, 0x0c, 0x9f, 0x03, 0x40, 0x53, + 0xc2, 0x24, 0x1f, 0x43, 0x6f, 0xd6, 0xd6, 0xeb, 0xed, 0x8e, 0x35, 0xaf, 0x97, 0xd6, 0x64, 0xba, + 0x3d, 0x85, 0xe0, 0x9f, 0x01, 0x83, 0xb7, 0xc1, 0x95, 0x14, 0x31, 0x11, 0xa1, 0x38, 0x38, 0x42, + 0x51, 0x9c, 0x31, 0x62, 0xd4, 0x9a, 0xda, 0xfa, 0x45, 0xff, 0x72, 0xa9, 0xde, 0x91, 0xda, 0x9c, + 0xea, 0x08, 0xc5, 0x11, 0x46, 0x82, 0x04, 0x34, 0x89, 0x8f, 0x8d, 0xff, 0x0b, 0xb7, 0x25, 0xa5, + 0xdc, 0x4b, 0xe2, 0x63, 0xf3, 0xad, 0x06, 0x56, 0x2a, 0xb2, 0xc2, 0xc7, 0x60, 0x61, 0xc0, 0x08, + 0x12, 0xb2, 0x49, 0xf5, 0xb6, 0x53, 0x49, 0x60, 0x3c, 0xea, 0x29, 0x06, 0xbb, 0xff, 0xf9, 0x25, + 0x04, 0x34, 0xc0, 0x02, 0x23, 0x43, 0x3a, 0x22, 0x86, 0x9e, 0x77, 0x2b, 0xb7, 0xc8, 0xf7, 0x76, + 0x1d, 0x2c, 0x8e, 0xe9, 0x99, 0x9f, 0x35, 0x70, 0xbd, 0xa2, 0xf5, 0x3c, 0xa5, 0x09, 0x27, 0x70, + 0x07, 0x2c, 0x4f, 0xf1, 0x0f, 0x08, 0x63, 0x94, 0x15, 0x5d, 0xa8, 0xb7, 0xa1, 0x2a, 0x92, 0xa5, + 0x03, 0xeb, 0xa0, 0xf8, 0x16, 0xfe, 0xb5, 0xc9, 0xce, 0x3c, 0xcc, 0xdd, 0x61, 0x0f, 0x5c, 0x60, + 0x84, 0x67, 0xb1, 0x50, 0xf3, 0xb9, 0x37, 0x7f, 0x3e, 0xb3, 0x2a, 0xf3, 0x0b, 0x10, 0x5f, 0x81, + 0x99, 0x1e, 0x68, 0x54, 0xbb, 0xfd, 0xd5, 0xff, 0x6b, 0x7f, 0xac, 0x81, 0xe5, 0xc9, 0xe8, 0x03, + 0x59, 0x01, 0xfc, 0xa4, 0x81, 0xab, 0x7f, 0xfc, 0x6d, 0xe8, 0xce, 0xaf, 0xbc, 0x6a, 0x21, 0x1a, + 0xff, 0x3e, 0x54, 0xb3, 0xf3, 0xe6, 0xeb, 0xf7, 0x77, 0xfa, 0x06, 0x74, 0xf2, 0x2d, 0x3f, 0x99, + 0xa0, 0xb3, 0xa9, 0x76, 0x80, 0xdb, 0x2d, 0x1b, 0x4d, 0x4e, 0xd0, 0x6e, 0x9d, 0xc2, 0x6f, 0x1a, + 0x58, 0x9e, 0x39, 0x5e, 0x78, 0xff, 0x7c, 0xdd, 0x57, 0x2b, 0xd9, 0xd8, 0x3a, 0x77, 0xbc, 0xfc, + 0x57, 0xe6, 0x56, 0xc1, 0xaa, 0x63, 0xde, 0xc9, 0x59, 0xfd, 0xa6, 0x71, 0x72, 0x66, 0xd1, 0x37, + 0x5b, 0xa7, 0xd3, 0xa4, 0xdc, 0x61, 0x01, 0xea, 0x6a, 0xad, 0xed, 0xd7, 0x3a, 0x58, 0x1b, 0xd0, + 0xe1, 0xdc, 0x3a, 0xb6, 0x1b, 0x33, 0x67, 0xbb, 0x9f, 0xdf, 0xae, 0x7d, 0xed, 0xc5, 0x6e, 0x19, + 0x1f, 0xd2, 0x18, 0x25, 0xa1, 0x45, 0x59, 0x68, 0x87, 0x24, 0x29, 0x2e, 0x9b, 0xba, 0xaf, 0x69, + 0xc4, 0xab, 0x4f, 0xfa, 0x5d, 0x25, 0xbc, 0xd7, 0x6b, 0x5d, 0xcf, 0xfb, 0xa0, 0x37, 0xbb, 0x12, + 0xd0, 0xc3, 0xdc, 0x92, 0x62, 0x2e, 0xf5, 0x1c, 0xab, 0x4c, 0xcc, 0xbf, 0x28, 0x97, 0xbe, 0x87, + 0x79, 0x7f, 0xec, 0xd2, 0xef, 0x39, 0x7d, 0xe5, 0xf2, 0x53, 0x5f, 0x93, 0x7a, 0xd7, 0xf5, 0x30, + 0x77, 0xdd, 0xb1, 0x93, 0xeb, 0xf6, 0x1c, 0xd7, 0x55, 0x6e, 0x87, 0x0b, 0x45, 0x9d, 0x1b, 0xbf, + 0x02, 0x00, 0x00, 0xff, 0xff, 0x0e, 0xd7, 0x62, 0x6d, 0x79, 0x06, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/services/ad_group_ad_service.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/services/ad_group_ad_service.pb.go new file mode 100644 index 0000000..79e2197 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/services/ad_group_ad_service.pb.go @@ -0,0 +1,601 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/services/ad_group_ad_service.proto + +package services // import "google.golang.org/genproto/googleapis/ads/googleads/v1/services" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "github.com/golang/protobuf/ptypes/wrappers" +import common "google.golang.org/genproto/googleapis/ads/googleads/v1/common" +import resources "google.golang.org/genproto/googleapis/ads/googleads/v1/resources" +import _ "google.golang.org/genproto/googleapis/api/annotations" +import status "google.golang.org/genproto/googleapis/rpc/status" +import field_mask "google.golang.org/genproto/protobuf/field_mask" + +import ( + context "golang.org/x/net/context" + grpc "google.golang.org/grpc" +) + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Request message for [AdGroupAdService.GetAdGroupAd][google.ads.googleads.v1.services.AdGroupAdService.GetAdGroupAd]. +type GetAdGroupAdRequest struct { + // The resource name of the ad to fetch. + ResourceName string `protobuf:"bytes,1,opt,name=resource_name,json=resourceName,proto3" json:"resource_name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GetAdGroupAdRequest) Reset() { *m = GetAdGroupAdRequest{} } +func (m *GetAdGroupAdRequest) String() string { return proto.CompactTextString(m) } +func (*GetAdGroupAdRequest) ProtoMessage() {} +func (*GetAdGroupAdRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_ad_group_ad_service_2ab9daac9d4494fb, []int{0} +} +func (m *GetAdGroupAdRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GetAdGroupAdRequest.Unmarshal(m, b) +} +func (m *GetAdGroupAdRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GetAdGroupAdRequest.Marshal(b, m, deterministic) +} +func (dst *GetAdGroupAdRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_GetAdGroupAdRequest.Merge(dst, src) +} +func (m *GetAdGroupAdRequest) XXX_Size() int { + return xxx_messageInfo_GetAdGroupAdRequest.Size(m) +} +func (m *GetAdGroupAdRequest) XXX_DiscardUnknown() { + xxx_messageInfo_GetAdGroupAdRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_GetAdGroupAdRequest proto.InternalMessageInfo + +func (m *GetAdGroupAdRequest) GetResourceName() string { + if m != nil { + return m.ResourceName + } + return "" +} + +// Request message for [AdGroupAdService.MutateAdGroupAds][google.ads.googleads.v1.services.AdGroupAdService.MutateAdGroupAds]. +type MutateAdGroupAdsRequest struct { + // The ID of the customer whose ads are being modified. + CustomerId string `protobuf:"bytes,1,opt,name=customer_id,json=customerId,proto3" json:"customer_id,omitempty"` + // The list of operations to perform on individual ads. + Operations []*AdGroupAdOperation `protobuf:"bytes,2,rep,name=operations,proto3" json:"operations,omitempty"` + // If true, successful operations will be carried out and invalid + // operations will return errors. If false, all operations will be carried + // out in one transaction if and only if they are all valid. + // Default is false. + PartialFailure bool `protobuf:"varint,3,opt,name=partial_failure,json=partialFailure,proto3" json:"partial_failure,omitempty"` + // If true, the request is validated but not executed. Only errors are + // returned, not results. + ValidateOnly bool `protobuf:"varint,4,opt,name=validate_only,json=validateOnly,proto3" json:"validate_only,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *MutateAdGroupAdsRequest) Reset() { *m = MutateAdGroupAdsRequest{} } +func (m *MutateAdGroupAdsRequest) String() string { return proto.CompactTextString(m) } +func (*MutateAdGroupAdsRequest) ProtoMessage() {} +func (*MutateAdGroupAdsRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_ad_group_ad_service_2ab9daac9d4494fb, []int{1} +} +func (m *MutateAdGroupAdsRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_MutateAdGroupAdsRequest.Unmarshal(m, b) +} +func (m *MutateAdGroupAdsRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_MutateAdGroupAdsRequest.Marshal(b, m, deterministic) +} +func (dst *MutateAdGroupAdsRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_MutateAdGroupAdsRequest.Merge(dst, src) +} +func (m *MutateAdGroupAdsRequest) XXX_Size() int { + return xxx_messageInfo_MutateAdGroupAdsRequest.Size(m) +} +func (m *MutateAdGroupAdsRequest) XXX_DiscardUnknown() { + xxx_messageInfo_MutateAdGroupAdsRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_MutateAdGroupAdsRequest proto.InternalMessageInfo + +func (m *MutateAdGroupAdsRequest) GetCustomerId() string { + if m != nil { + return m.CustomerId + } + return "" +} + +func (m *MutateAdGroupAdsRequest) GetOperations() []*AdGroupAdOperation { + if m != nil { + return m.Operations + } + return nil +} + +func (m *MutateAdGroupAdsRequest) GetPartialFailure() bool { + if m != nil { + return m.PartialFailure + } + return false +} + +func (m *MutateAdGroupAdsRequest) GetValidateOnly() bool { + if m != nil { + return m.ValidateOnly + } + return false +} + +// A single operation (create, update, remove) on an ad group ad. +type AdGroupAdOperation struct { + // FieldMask that determines which resource fields are modified in an update. + UpdateMask *field_mask.FieldMask `protobuf:"bytes,4,opt,name=update_mask,json=updateMask,proto3" json:"update_mask,omitempty"` + // Configuration for how policies are validated. + PolicyValidationParameter *common.PolicyValidationParameter `protobuf:"bytes,5,opt,name=policy_validation_parameter,json=policyValidationParameter,proto3" json:"policy_validation_parameter,omitempty"` + // The mutate operation. + // + // Types that are valid to be assigned to Operation: + // *AdGroupAdOperation_Create + // *AdGroupAdOperation_Update + // *AdGroupAdOperation_Remove + Operation isAdGroupAdOperation_Operation `protobuf_oneof:"operation"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *AdGroupAdOperation) Reset() { *m = AdGroupAdOperation{} } +func (m *AdGroupAdOperation) String() string { return proto.CompactTextString(m) } +func (*AdGroupAdOperation) ProtoMessage() {} +func (*AdGroupAdOperation) Descriptor() ([]byte, []int) { + return fileDescriptor_ad_group_ad_service_2ab9daac9d4494fb, []int{2} +} +func (m *AdGroupAdOperation) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_AdGroupAdOperation.Unmarshal(m, b) +} +func (m *AdGroupAdOperation) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_AdGroupAdOperation.Marshal(b, m, deterministic) +} +func (dst *AdGroupAdOperation) XXX_Merge(src proto.Message) { + xxx_messageInfo_AdGroupAdOperation.Merge(dst, src) +} +func (m *AdGroupAdOperation) XXX_Size() int { + return xxx_messageInfo_AdGroupAdOperation.Size(m) +} +func (m *AdGroupAdOperation) XXX_DiscardUnknown() { + xxx_messageInfo_AdGroupAdOperation.DiscardUnknown(m) +} + +var xxx_messageInfo_AdGroupAdOperation proto.InternalMessageInfo + +func (m *AdGroupAdOperation) GetUpdateMask() *field_mask.FieldMask { + if m != nil { + return m.UpdateMask + } + return nil +} + +func (m *AdGroupAdOperation) GetPolicyValidationParameter() *common.PolicyValidationParameter { + if m != nil { + return m.PolicyValidationParameter + } + return nil +} + +type isAdGroupAdOperation_Operation interface { + isAdGroupAdOperation_Operation() +} + +type AdGroupAdOperation_Create struct { + Create *resources.AdGroupAd `protobuf:"bytes,1,opt,name=create,proto3,oneof"` +} + +type AdGroupAdOperation_Update struct { + Update *resources.AdGroupAd `protobuf:"bytes,2,opt,name=update,proto3,oneof"` +} + +type AdGroupAdOperation_Remove struct { + Remove string `protobuf:"bytes,3,opt,name=remove,proto3,oneof"` +} + +func (*AdGroupAdOperation_Create) isAdGroupAdOperation_Operation() {} + +func (*AdGroupAdOperation_Update) isAdGroupAdOperation_Operation() {} + +func (*AdGroupAdOperation_Remove) isAdGroupAdOperation_Operation() {} + +func (m *AdGroupAdOperation) GetOperation() isAdGroupAdOperation_Operation { + if m != nil { + return m.Operation + } + return nil +} + +func (m *AdGroupAdOperation) GetCreate() *resources.AdGroupAd { + if x, ok := m.GetOperation().(*AdGroupAdOperation_Create); ok { + return x.Create + } + return nil +} + +func (m *AdGroupAdOperation) GetUpdate() *resources.AdGroupAd { + if x, ok := m.GetOperation().(*AdGroupAdOperation_Update); ok { + return x.Update + } + return nil +} + +func (m *AdGroupAdOperation) GetRemove() string { + if x, ok := m.GetOperation().(*AdGroupAdOperation_Remove); ok { + return x.Remove + } + return "" +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*AdGroupAdOperation) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _AdGroupAdOperation_OneofMarshaler, _AdGroupAdOperation_OneofUnmarshaler, _AdGroupAdOperation_OneofSizer, []interface{}{ + (*AdGroupAdOperation_Create)(nil), + (*AdGroupAdOperation_Update)(nil), + (*AdGroupAdOperation_Remove)(nil), + } +} + +func _AdGroupAdOperation_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*AdGroupAdOperation) + // operation + switch x := m.Operation.(type) { + case *AdGroupAdOperation_Create: + b.EncodeVarint(1<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.Create); err != nil { + return err + } + case *AdGroupAdOperation_Update: + b.EncodeVarint(2<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.Update); err != nil { + return err + } + case *AdGroupAdOperation_Remove: + b.EncodeVarint(3<<3 | proto.WireBytes) + b.EncodeStringBytes(x.Remove) + case nil: + default: + return fmt.Errorf("AdGroupAdOperation.Operation has unexpected type %T", x) + } + return nil +} + +func _AdGroupAdOperation_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*AdGroupAdOperation) + switch tag { + case 1: // operation.create + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(resources.AdGroupAd) + err := b.DecodeMessage(msg) + m.Operation = &AdGroupAdOperation_Create{msg} + return true, err + case 2: // operation.update + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(resources.AdGroupAd) + err := b.DecodeMessage(msg) + m.Operation = &AdGroupAdOperation_Update{msg} + return true, err + case 3: // operation.remove + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeStringBytes() + m.Operation = &AdGroupAdOperation_Remove{x} + return true, err + default: + return false, nil + } +} + +func _AdGroupAdOperation_OneofSizer(msg proto.Message) (n int) { + m := msg.(*AdGroupAdOperation) + // operation + switch x := m.Operation.(type) { + case *AdGroupAdOperation_Create: + s := proto.Size(x.Create) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *AdGroupAdOperation_Update: + s := proto.Size(x.Update) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *AdGroupAdOperation_Remove: + n += 1 // tag and wire + n += proto.SizeVarint(uint64(len(x.Remove))) + n += len(x.Remove) + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +// Response message for an ad group ad mutate. +type MutateAdGroupAdsResponse struct { + // Errors that pertain to operation failures in the partial failure mode. + // Returned only when partial_failure = true and all errors occur inside the + // operations. If any errors occur outside the operations (e.g. auth errors), + // we return an RPC level error. + PartialFailureError *status.Status `protobuf:"bytes,3,opt,name=partial_failure_error,json=partialFailureError,proto3" json:"partial_failure_error,omitempty"` + // All results for the mutate. + Results []*MutateAdGroupAdResult `protobuf:"bytes,2,rep,name=results,proto3" json:"results,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *MutateAdGroupAdsResponse) Reset() { *m = MutateAdGroupAdsResponse{} } +func (m *MutateAdGroupAdsResponse) String() string { return proto.CompactTextString(m) } +func (*MutateAdGroupAdsResponse) ProtoMessage() {} +func (*MutateAdGroupAdsResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_ad_group_ad_service_2ab9daac9d4494fb, []int{3} +} +func (m *MutateAdGroupAdsResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_MutateAdGroupAdsResponse.Unmarshal(m, b) +} +func (m *MutateAdGroupAdsResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_MutateAdGroupAdsResponse.Marshal(b, m, deterministic) +} +func (dst *MutateAdGroupAdsResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_MutateAdGroupAdsResponse.Merge(dst, src) +} +func (m *MutateAdGroupAdsResponse) XXX_Size() int { + return xxx_messageInfo_MutateAdGroupAdsResponse.Size(m) +} +func (m *MutateAdGroupAdsResponse) XXX_DiscardUnknown() { + xxx_messageInfo_MutateAdGroupAdsResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_MutateAdGroupAdsResponse proto.InternalMessageInfo + +func (m *MutateAdGroupAdsResponse) GetPartialFailureError() *status.Status { + if m != nil { + return m.PartialFailureError + } + return nil +} + +func (m *MutateAdGroupAdsResponse) GetResults() []*MutateAdGroupAdResult { + if m != nil { + return m.Results + } + return nil +} + +// The result for the ad mutate. +type MutateAdGroupAdResult struct { + // The resource name returned for successful operations. + ResourceName string `protobuf:"bytes,1,opt,name=resource_name,json=resourceName,proto3" json:"resource_name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *MutateAdGroupAdResult) Reset() { *m = MutateAdGroupAdResult{} } +func (m *MutateAdGroupAdResult) String() string { return proto.CompactTextString(m) } +func (*MutateAdGroupAdResult) ProtoMessage() {} +func (*MutateAdGroupAdResult) Descriptor() ([]byte, []int) { + return fileDescriptor_ad_group_ad_service_2ab9daac9d4494fb, []int{4} +} +func (m *MutateAdGroupAdResult) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_MutateAdGroupAdResult.Unmarshal(m, b) +} +func (m *MutateAdGroupAdResult) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_MutateAdGroupAdResult.Marshal(b, m, deterministic) +} +func (dst *MutateAdGroupAdResult) XXX_Merge(src proto.Message) { + xxx_messageInfo_MutateAdGroupAdResult.Merge(dst, src) +} +func (m *MutateAdGroupAdResult) XXX_Size() int { + return xxx_messageInfo_MutateAdGroupAdResult.Size(m) +} +func (m *MutateAdGroupAdResult) XXX_DiscardUnknown() { + xxx_messageInfo_MutateAdGroupAdResult.DiscardUnknown(m) +} + +var xxx_messageInfo_MutateAdGroupAdResult proto.InternalMessageInfo + +func (m *MutateAdGroupAdResult) GetResourceName() string { + if m != nil { + return m.ResourceName + } + return "" +} + +func init() { + proto.RegisterType((*GetAdGroupAdRequest)(nil), "google.ads.googleads.v1.services.GetAdGroupAdRequest") + proto.RegisterType((*MutateAdGroupAdsRequest)(nil), "google.ads.googleads.v1.services.MutateAdGroupAdsRequest") + proto.RegisterType((*AdGroupAdOperation)(nil), "google.ads.googleads.v1.services.AdGroupAdOperation") + proto.RegisterType((*MutateAdGroupAdsResponse)(nil), "google.ads.googleads.v1.services.MutateAdGroupAdsResponse") + proto.RegisterType((*MutateAdGroupAdResult)(nil), "google.ads.googleads.v1.services.MutateAdGroupAdResult") +} + +// Reference imports to suppress errors if they are not otherwise used. +var _ context.Context +var _ grpc.ClientConn + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the grpc package it is being compiled against. +const _ = grpc.SupportPackageIsVersion4 + +// AdGroupAdServiceClient is the client API for AdGroupAdService service. +// +// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://godoc.org/google.golang.org/grpc#ClientConn.NewStream. +type AdGroupAdServiceClient interface { + // Returns the requested ad in full detail. + GetAdGroupAd(ctx context.Context, in *GetAdGroupAdRequest, opts ...grpc.CallOption) (*resources.AdGroupAd, error) + // Creates, updates, or removes ads. Operation statuses are returned. + MutateAdGroupAds(ctx context.Context, in *MutateAdGroupAdsRequest, opts ...grpc.CallOption) (*MutateAdGroupAdsResponse, error) +} + +type adGroupAdServiceClient struct { + cc *grpc.ClientConn +} + +func NewAdGroupAdServiceClient(cc *grpc.ClientConn) AdGroupAdServiceClient { + return &adGroupAdServiceClient{cc} +} + +func (c *adGroupAdServiceClient) GetAdGroupAd(ctx context.Context, in *GetAdGroupAdRequest, opts ...grpc.CallOption) (*resources.AdGroupAd, error) { + out := new(resources.AdGroupAd) + err := c.cc.Invoke(ctx, "/google.ads.googleads.v1.services.AdGroupAdService/GetAdGroupAd", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *adGroupAdServiceClient) MutateAdGroupAds(ctx context.Context, in *MutateAdGroupAdsRequest, opts ...grpc.CallOption) (*MutateAdGroupAdsResponse, error) { + out := new(MutateAdGroupAdsResponse) + err := c.cc.Invoke(ctx, "/google.ads.googleads.v1.services.AdGroupAdService/MutateAdGroupAds", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +// AdGroupAdServiceServer is the server API for AdGroupAdService service. +type AdGroupAdServiceServer interface { + // Returns the requested ad in full detail. + GetAdGroupAd(context.Context, *GetAdGroupAdRequest) (*resources.AdGroupAd, error) + // Creates, updates, or removes ads. Operation statuses are returned. + MutateAdGroupAds(context.Context, *MutateAdGroupAdsRequest) (*MutateAdGroupAdsResponse, error) +} + +func RegisterAdGroupAdServiceServer(s *grpc.Server, srv AdGroupAdServiceServer) { + s.RegisterService(&_AdGroupAdService_serviceDesc, srv) +} + +func _AdGroupAdService_GetAdGroupAd_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(GetAdGroupAdRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(AdGroupAdServiceServer).GetAdGroupAd(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.ads.googleads.v1.services.AdGroupAdService/GetAdGroupAd", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(AdGroupAdServiceServer).GetAdGroupAd(ctx, req.(*GetAdGroupAdRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _AdGroupAdService_MutateAdGroupAds_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(MutateAdGroupAdsRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(AdGroupAdServiceServer).MutateAdGroupAds(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.ads.googleads.v1.services.AdGroupAdService/MutateAdGroupAds", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(AdGroupAdServiceServer).MutateAdGroupAds(ctx, req.(*MutateAdGroupAdsRequest)) + } + return interceptor(ctx, in, info, handler) +} + +var _AdGroupAdService_serviceDesc = grpc.ServiceDesc{ + ServiceName: "google.ads.googleads.v1.services.AdGroupAdService", + HandlerType: (*AdGroupAdServiceServer)(nil), + Methods: []grpc.MethodDesc{ + { + MethodName: "GetAdGroupAd", + Handler: _AdGroupAdService_GetAdGroupAd_Handler, + }, + { + MethodName: "MutateAdGroupAds", + Handler: _AdGroupAdService_MutateAdGroupAds_Handler, + }, + }, + Streams: []grpc.StreamDesc{}, + Metadata: "google/ads/googleads/v1/services/ad_group_ad_service.proto", +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/services/ad_group_ad_service.proto", fileDescriptor_ad_group_ad_service_2ab9daac9d4494fb) +} + +var fileDescriptor_ad_group_ad_service_2ab9daac9d4494fb = []byte{ + // 766 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x9c, 0x55, 0xc1, 0x6e, 0xd3, 0x4a, + 0x14, 0x7d, 0x4e, 0xde, 0xeb, 0x7b, 0x9d, 0xf4, 0x41, 0x35, 0x55, 0x55, 0x13, 0x10, 0x44, 0xa6, + 0x12, 0x55, 0xa8, 0x6c, 0x25, 0x2d, 0x42, 0x75, 0xe9, 0x22, 0x95, 0x68, 0xca, 0xa2, 0x34, 0xb8, + 0x28, 0x0b, 0x14, 0xc9, 0x9a, 0xda, 0xd3, 0xc8, 0xaa, 0xed, 0x19, 0x66, 0xc6, 0x41, 0x51, 0xd5, + 0x0d, 0x48, 0xfc, 0x00, 0x7f, 0xc0, 0x0e, 0xf6, 0x7c, 0x02, 0x1b, 0xb6, 0xec, 0x59, 0xb1, 0xe2, + 0x1b, 0x58, 0x20, 0x7b, 0x3c, 0x26, 0x4d, 0x1b, 0x85, 0x76, 0x37, 0xbe, 0x73, 0xcf, 0xb9, 0x67, + 0xe6, 0xdc, 0xb9, 0x06, 0x76, 0x9f, 0x90, 0x7e, 0x88, 0x2d, 0xe4, 0x73, 0x4b, 0x2e, 0xd3, 0xd5, + 0xa0, 0x61, 0x71, 0xcc, 0x06, 0x81, 0x87, 0xb9, 0x85, 0x7c, 0xb7, 0xcf, 0x48, 0x42, 0x5d, 0xe4, + 0xbb, 0x79, 0xd0, 0xa4, 0x8c, 0x08, 0x02, 0x6b, 0x12, 0x60, 0x22, 0x9f, 0x9b, 0x05, 0xd6, 0x1c, + 0x34, 0x4c, 0x85, 0xad, 0xde, 0x9f, 0xc4, 0xee, 0x91, 0x28, 0x22, 0xb1, 0x45, 0x49, 0x18, 0x78, + 0x43, 0x49, 0x57, 0x5d, 0x9b, 0x94, 0xcc, 0x30, 0x27, 0x09, 0x1b, 0xd3, 0x92, 0x83, 0x6e, 0x29, + 0x10, 0x0d, 0x2c, 0x14, 0xc7, 0x44, 0x20, 0x11, 0x90, 0x98, 0xe7, 0xbb, 0xb9, 0x42, 0x2b, 0xfb, + 0x3a, 0x4c, 0x8e, 0xac, 0xa3, 0x00, 0x87, 0xbe, 0x1b, 0x21, 0x7e, 0x9c, 0x67, 0xdc, 0x1e, 0xcf, + 0x78, 0xc5, 0x10, 0xa5, 0x98, 0x29, 0x86, 0xa5, 0x7c, 0x9f, 0x51, 0xcf, 0xe2, 0x02, 0x89, 0x24, + 0xdf, 0x30, 0x6c, 0xb0, 0xd0, 0xc6, 0xa2, 0xe5, 0xb7, 0x53, 0x3d, 0x2d, 0xdf, 0xc1, 0x2f, 0x13, + 0xcc, 0x05, 0xbc, 0x0b, 0xfe, 0x57, 0x72, 0xdd, 0x18, 0x45, 0x58, 0xd7, 0x6a, 0xda, 0xca, 0xac, + 0x33, 0xa7, 0x82, 0x4f, 0x51, 0x84, 0x8d, 0x6f, 0x1a, 0x58, 0xda, 0x4b, 0x04, 0x12, 0xb8, 0xc0, + 0x73, 0x45, 0x70, 0x07, 0x54, 0xbc, 0x84, 0x0b, 0x12, 0x61, 0xe6, 0x06, 0x7e, 0x0e, 0x07, 0x2a, + 0xf4, 0xc4, 0x87, 0xcf, 0x01, 0x20, 0x14, 0x33, 0x79, 0x4e, 0xbd, 0x54, 0x2b, 0xaf, 0x54, 0x9a, + 0xeb, 0xe6, 0x34, 0x2b, 0xcc, 0xa2, 0xd2, 0xbe, 0x02, 0x3b, 0x23, 0x3c, 0xf0, 0x1e, 0xb8, 0x4e, + 0x11, 0x13, 0x01, 0x0a, 0xdd, 0x23, 0x14, 0x84, 0x09, 0xc3, 0x7a, 0xb9, 0xa6, 0xad, 0xfc, 0xe7, + 0x5c, 0xcb, 0xc3, 0x3b, 0x32, 0x9a, 0x1e, 0x70, 0x80, 0xc2, 0xc0, 0x47, 0x02, 0xbb, 0x24, 0x0e, + 0x87, 0xfa, 0xdf, 0x59, 0xda, 0x9c, 0x0a, 0xee, 0xc7, 0xe1, 0xd0, 0x78, 0x53, 0x06, 0xf0, 0x7c, + 0x41, 0xb8, 0x09, 0x2a, 0x09, 0xcd, 0x90, 0xa9, 0x03, 0x19, 0xb2, 0xd2, 0xac, 0x2a, 0xed, 0xca, + 0x02, 0x73, 0x27, 0x35, 0x69, 0x0f, 0xf1, 0x63, 0x07, 0xc8, 0xf4, 0x74, 0x0d, 0x87, 0xe0, 0xa6, + 0x6c, 0x17, 0x37, 0x2f, 0x15, 0x90, 0xd8, 0xa5, 0x88, 0xa1, 0x08, 0x0b, 0xcc, 0xf4, 0x7f, 0x32, + 0xb2, 0x8d, 0x89, 0x17, 0x21, 0x3b, 0xce, 0xec, 0x64, 0x14, 0xdd, 0x82, 0xa1, 0xa3, 0x08, 0x9c, + 0x1b, 0x74, 0xd2, 0x16, 0xdc, 0x01, 0x33, 0x1e, 0xc3, 0x48, 0x48, 0x37, 0x2b, 0xcd, 0xd5, 0x89, + 0x55, 0x8a, 0x56, 0xfd, 0x7d, 0xdf, 0xbb, 0x7f, 0x39, 0x39, 0x3a, 0xe5, 0x91, 0x07, 0xd2, 0x4b, + 0x57, 0xe3, 0x91, 0x68, 0xa8, 0x83, 0x19, 0x86, 0x23, 0x32, 0x90, 0x1e, 0xcd, 0xa6, 0x3b, 0xf2, + 0x7b, 0xbb, 0x02, 0x66, 0x0b, 0x53, 0x8d, 0x4f, 0x1a, 0xd0, 0xcf, 0xb7, 0x19, 0xa7, 0x24, 0xe6, + 0xa9, 0x96, 0xc5, 0x31, 0xc3, 0x5d, 0xcc, 0x18, 0x61, 0x19, 0x65, 0xa5, 0x09, 0x95, 0x34, 0x46, + 0x3d, 0xf3, 0x20, 0x6b, 0x7c, 0x67, 0xe1, 0x6c, 0x2b, 0x3c, 0x4e, 0xd3, 0xe1, 0x33, 0xf0, 0x2f, + 0xc3, 0x3c, 0x09, 0x85, 0xea, 0xc5, 0x87, 0xd3, 0x7b, 0x71, 0x4c, 0x94, 0x93, 0xe1, 0x1d, 0xc5, + 0x63, 0x3c, 0x02, 0x8b, 0x17, 0x66, 0xfc, 0xd1, 0xe3, 0x6a, 0xbe, 0x2d, 0x83, 0xf9, 0x02, 0x78, + 0x20, 0x4b, 0xc2, 0x0f, 0x1a, 0x98, 0x1b, 0x7d, 0xae, 0xf0, 0xc1, 0x74, 0x95, 0x17, 0x3c, 0xef, + 0xea, 0xa5, 0x1c, 0x33, 0xd6, 0x5f, 0x7f, 0xfd, 0xfe, 0xae, 0x64, 0xc2, 0xd5, 0x74, 0x8a, 0x9d, + 0x9c, 0x91, 0xbe, 0xa5, 0x5e, 0x34, 0xb7, 0xea, 0x16, 0x2a, 0xec, 0xb1, 0xea, 0xa7, 0xf0, 0xb3, + 0x06, 0xe6, 0xc7, 0x6d, 0x83, 0x1b, 0x97, 0xbe, 0x55, 0x35, 0x51, 0xaa, 0xf6, 0x55, 0xa0, 0xb2, + 0x4b, 0x0c, 0x3b, 0x3b, 0xc1, 0xba, 0x61, 0x65, 0x43, 0xbb, 0x90, 0x7c, 0x32, 0x32, 0xa2, 0xb6, + 0xea, 0xa7, 0x23, 0x07, 0xb0, 0xa3, 0x8c, 0xca, 0xd6, 0xea, 0xdb, 0x3f, 0x35, 0xb0, 0xec, 0x91, + 0x68, 0x6a, 0xf5, 0xed, 0xc5, 0x71, 0xbb, 0x3a, 0xe9, 0x24, 0xe8, 0x68, 0x2f, 0x76, 0x73, 0x68, + 0x9f, 0x84, 0x28, 0xee, 0x9b, 0x84, 0xf5, 0xad, 0x3e, 0x8e, 0xb3, 0x39, 0xa1, 0xfe, 0x10, 0x34, + 0xe0, 0x93, 0xff, 0x5d, 0x9b, 0x6a, 0xf1, 0xbe, 0x54, 0x6e, 0xb7, 0x5a, 0x1f, 0x4b, 0xb5, 0xb6, + 0x24, 0x6c, 0xf9, 0xdc, 0x94, 0xcb, 0x74, 0xd5, 0x6d, 0x98, 0x79, 0x61, 0xfe, 0x45, 0xa5, 0xf4, + 0x5a, 0x3e, 0xef, 0x15, 0x29, 0xbd, 0x6e, 0xa3, 0xa7, 0x52, 0x7e, 0x94, 0x96, 0x65, 0xdc, 0xb6, + 0xd3, 0xf3, 0xda, 0x45, 0x92, 0x6d, 0x77, 0x1b, 0xb6, 0xad, 0xd2, 0x0e, 0x67, 0x32, 0x9d, 0x6b, + 0xbf, 0x02, 0x00, 0x00, 0xff, 0xff, 0x46, 0x3a, 0xc6, 0x70, 0x62, 0x07, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/services/ad_group_audience_view_service.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/services/ad_group_audience_view_service.pb.go new file mode 100644 index 0000000..0938431 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/services/ad_group_audience_view_service.pb.go @@ -0,0 +1,176 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/services/ad_group_audience_view_service.proto + +package services // import "google.golang.org/genproto/googleapis/ads/googleads/v1/services" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import resources "google.golang.org/genproto/googleapis/ads/googleads/v1/resources" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +import ( + context "golang.org/x/net/context" + grpc "google.golang.org/grpc" +) + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Request message for [AdGroupAudienceViewService.GetAdGoupAudienceView][]. +type GetAdGroupAudienceViewRequest struct { + // The resource name of the ad group audience view to fetch. + ResourceName string `protobuf:"bytes,1,opt,name=resource_name,json=resourceName,proto3" json:"resource_name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GetAdGroupAudienceViewRequest) Reset() { *m = GetAdGroupAudienceViewRequest{} } +func (m *GetAdGroupAudienceViewRequest) String() string { return proto.CompactTextString(m) } +func (*GetAdGroupAudienceViewRequest) ProtoMessage() {} +func (*GetAdGroupAudienceViewRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_ad_group_audience_view_service_d1fd343d6456c81d, []int{0} +} +func (m *GetAdGroupAudienceViewRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GetAdGroupAudienceViewRequest.Unmarshal(m, b) +} +func (m *GetAdGroupAudienceViewRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GetAdGroupAudienceViewRequest.Marshal(b, m, deterministic) +} +func (dst *GetAdGroupAudienceViewRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_GetAdGroupAudienceViewRequest.Merge(dst, src) +} +func (m *GetAdGroupAudienceViewRequest) XXX_Size() int { + return xxx_messageInfo_GetAdGroupAudienceViewRequest.Size(m) +} +func (m *GetAdGroupAudienceViewRequest) XXX_DiscardUnknown() { + xxx_messageInfo_GetAdGroupAudienceViewRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_GetAdGroupAudienceViewRequest proto.InternalMessageInfo + +func (m *GetAdGroupAudienceViewRequest) GetResourceName() string { + if m != nil { + return m.ResourceName + } + return "" +} + +func init() { + proto.RegisterType((*GetAdGroupAudienceViewRequest)(nil), "google.ads.googleads.v1.services.GetAdGroupAudienceViewRequest") +} + +// Reference imports to suppress errors if they are not otherwise used. +var _ context.Context +var _ grpc.ClientConn + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the grpc package it is being compiled against. +const _ = grpc.SupportPackageIsVersion4 + +// AdGroupAudienceViewServiceClient is the client API for AdGroupAudienceViewService service. +// +// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://godoc.org/google.golang.org/grpc#ClientConn.NewStream. +type AdGroupAudienceViewServiceClient interface { + // Returns the requested ad group audience view in full detail. + GetAdGroupAudienceView(ctx context.Context, in *GetAdGroupAudienceViewRequest, opts ...grpc.CallOption) (*resources.AdGroupAudienceView, error) +} + +type adGroupAudienceViewServiceClient struct { + cc *grpc.ClientConn +} + +func NewAdGroupAudienceViewServiceClient(cc *grpc.ClientConn) AdGroupAudienceViewServiceClient { + return &adGroupAudienceViewServiceClient{cc} +} + +func (c *adGroupAudienceViewServiceClient) GetAdGroupAudienceView(ctx context.Context, in *GetAdGroupAudienceViewRequest, opts ...grpc.CallOption) (*resources.AdGroupAudienceView, error) { + out := new(resources.AdGroupAudienceView) + err := c.cc.Invoke(ctx, "/google.ads.googleads.v1.services.AdGroupAudienceViewService/GetAdGroupAudienceView", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +// AdGroupAudienceViewServiceServer is the server API for AdGroupAudienceViewService service. +type AdGroupAudienceViewServiceServer interface { + // Returns the requested ad group audience view in full detail. + GetAdGroupAudienceView(context.Context, *GetAdGroupAudienceViewRequest) (*resources.AdGroupAudienceView, error) +} + +func RegisterAdGroupAudienceViewServiceServer(s *grpc.Server, srv AdGroupAudienceViewServiceServer) { + s.RegisterService(&_AdGroupAudienceViewService_serviceDesc, srv) +} + +func _AdGroupAudienceViewService_GetAdGroupAudienceView_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(GetAdGroupAudienceViewRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(AdGroupAudienceViewServiceServer).GetAdGroupAudienceView(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.ads.googleads.v1.services.AdGroupAudienceViewService/GetAdGroupAudienceView", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(AdGroupAudienceViewServiceServer).GetAdGroupAudienceView(ctx, req.(*GetAdGroupAudienceViewRequest)) + } + return interceptor(ctx, in, info, handler) +} + +var _AdGroupAudienceViewService_serviceDesc = grpc.ServiceDesc{ + ServiceName: "google.ads.googleads.v1.services.AdGroupAudienceViewService", + HandlerType: (*AdGroupAudienceViewServiceServer)(nil), + Methods: []grpc.MethodDesc{ + { + MethodName: "GetAdGroupAudienceView", + Handler: _AdGroupAudienceViewService_GetAdGroupAudienceView_Handler, + }, + }, + Streams: []grpc.StreamDesc{}, + Metadata: "google/ads/googleads/v1/services/ad_group_audience_view_service.proto", +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/services/ad_group_audience_view_service.proto", fileDescriptor_ad_group_audience_view_service_d1fd343d6456c81d) +} + +var fileDescriptor_ad_group_audience_view_service_d1fd343d6456c81d = []byte{ + // 377 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x84, 0x52, 0xbd, 0x4a, 0xec, 0x40, + 0x14, 0x26, 0xb9, 0x70, 0xe1, 0x86, 0x7b, 0x9b, 0x14, 0x17, 0x09, 0x8a, 0xcb, 0xba, 0x85, 0x6c, + 0x31, 0x43, 0x14, 0x16, 0x19, 0x71, 0x25, 0x8b, 0x12, 0x2b, 0x59, 0x56, 0x48, 0x21, 0x81, 0x30, + 0x66, 0x86, 0x10, 0xd8, 0x64, 0x62, 0x4e, 0x92, 0x2d, 0xc4, 0xc6, 0xc2, 0x17, 0xf0, 0x0d, 0x2c, + 0x7d, 0x14, 0x3b, 0xf1, 0x15, 0xac, 0xac, 0x7c, 0x04, 0xc9, 0x4e, 0x26, 0x20, 0x6e, 0xdc, 0xee, + 0x63, 0xe6, 0xfb, 0x39, 0xf3, 0x9d, 0x31, 0x4e, 0x23, 0x21, 0xa2, 0x39, 0xc7, 0x94, 0x01, 0x96, + 0xb0, 0x46, 0x95, 0x8d, 0x81, 0xe7, 0x55, 0x1c, 0x72, 0xc0, 0x94, 0x05, 0x51, 0x2e, 0xca, 0x2c, + 0xa0, 0x25, 0x8b, 0x79, 0x1a, 0xf2, 0xa0, 0x8a, 0xf9, 0x22, 0x68, 0xee, 0x51, 0x96, 0x8b, 0x42, + 0x98, 0x3d, 0xa9, 0x45, 0x94, 0x01, 0x6a, 0x6d, 0x50, 0x65, 0x23, 0x65, 0x63, 0x8d, 0xbb, 0x82, + 0x72, 0x0e, 0xa2, 0xcc, 0xbb, 0x93, 0x64, 0x82, 0xb5, 0xa9, 0xf4, 0x59, 0x8c, 0x69, 0x9a, 0x8a, + 0x82, 0x16, 0xb1, 0x48, 0x41, 0xde, 0xf6, 0x4f, 0x8c, 0x2d, 0x97, 0x17, 0x0e, 0x73, 0x6b, 0xbd, + 0xd3, 0xc8, 0xbd, 0x98, 0x2f, 0x66, 0xfc, 0xba, 0xe4, 0x50, 0x98, 0x3b, 0xc6, 0x3f, 0x15, 0x14, + 0xa4, 0x34, 0xe1, 0x1b, 0x5a, 0x4f, 0xdb, 0xfd, 0x33, 0xfb, 0xab, 0x0e, 0xcf, 0x69, 0xc2, 0xf7, + 0x3e, 0x34, 0xc3, 0x5a, 0xe1, 0x71, 0x21, 0xdf, 0x60, 0xbe, 0x68, 0xc6, 0xff, 0xd5, 0x29, 0xe6, + 0x31, 0x5a, 0x57, 0x00, 0xfa, 0x71, 0x3e, 0x6b, 0xd4, 0x69, 0xd0, 0xf6, 0x83, 0x56, 0xc8, 0xfb, + 0xe3, 0xbb, 0xd7, 0xb7, 0x07, 0xfd, 0xc0, 0x1c, 0xd5, 0x55, 0xde, 0x7c, 0x79, 0xe2, 0x51, 0x58, + 0x42, 0x21, 0x12, 0x9e, 0x03, 0x1e, 0x62, 0xfa, 0x5d, 0x0b, 0x78, 0x78, 0x3b, 0xb9, 0xd7, 0x8d, + 0x41, 0x28, 0x92, 0xb5, 0xe3, 0x4f, 0xb6, 0xbb, 0x8b, 0x99, 0xd6, 0x2b, 0x98, 0x6a, 0x97, 0x67, + 0x8d, 0x49, 0x24, 0xe6, 0x34, 0x8d, 0x90, 0xc8, 0x23, 0x1c, 0xf1, 0x74, 0xb9, 0x20, 0xb5, 0xf2, + 0x2c, 0x86, 0xee, 0xaf, 0x76, 0xa8, 0xc0, 0xa3, 0xfe, 0xcb, 0x75, 0x9c, 0x27, 0xbd, 0xe7, 0x4a, + 0x43, 0x87, 0x01, 0x92, 0xb0, 0x46, 0x9e, 0x8d, 0x9a, 0x60, 0x78, 0x56, 0x14, 0xdf, 0x61, 0xe0, + 0xb7, 0x14, 0xdf, 0xb3, 0x7d, 0x45, 0x79, 0xd7, 0x07, 0xf2, 0x9c, 0x10, 0x87, 0x01, 0x21, 0x2d, + 0x89, 0x10, 0xcf, 0x26, 0x44, 0xd1, 0xae, 0x7e, 0x2f, 0xe7, 0xdc, 0xff, 0x0c, 0x00, 0x00, 0xff, + 0xff, 0x44, 0x48, 0x54, 0x5f, 0x11, 0x03, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/services/ad_group_bid_modifier_service.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/services/ad_group_bid_modifier_service.pb.go new file mode 100644 index 0000000..5032636 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/services/ad_group_bid_modifier_service.pb.go @@ -0,0 +1,591 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/services/ad_group_bid_modifier_service.proto + +package services // import "google.golang.org/genproto/googleapis/ads/googleads/v1/services" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "github.com/golang/protobuf/ptypes/wrappers" +import resources "google.golang.org/genproto/googleapis/ads/googleads/v1/resources" +import _ "google.golang.org/genproto/googleapis/api/annotations" +import status "google.golang.org/genproto/googleapis/rpc/status" +import field_mask "google.golang.org/genproto/protobuf/field_mask" + +import ( + context "golang.org/x/net/context" + grpc "google.golang.org/grpc" +) + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Request message for [AdGroupBidModifierService.GetAdGroupBidModifier][google.ads.googleads.v1.services.AdGroupBidModifierService.GetAdGroupBidModifier]. +type GetAdGroupBidModifierRequest struct { + // The resource name of the ad group bid modifier to fetch. + ResourceName string `protobuf:"bytes,1,opt,name=resource_name,json=resourceName,proto3" json:"resource_name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GetAdGroupBidModifierRequest) Reset() { *m = GetAdGroupBidModifierRequest{} } +func (m *GetAdGroupBidModifierRequest) String() string { return proto.CompactTextString(m) } +func (*GetAdGroupBidModifierRequest) ProtoMessage() {} +func (*GetAdGroupBidModifierRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_ad_group_bid_modifier_service_3a4e96624f81a14a, []int{0} +} +func (m *GetAdGroupBidModifierRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GetAdGroupBidModifierRequest.Unmarshal(m, b) +} +func (m *GetAdGroupBidModifierRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GetAdGroupBidModifierRequest.Marshal(b, m, deterministic) +} +func (dst *GetAdGroupBidModifierRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_GetAdGroupBidModifierRequest.Merge(dst, src) +} +func (m *GetAdGroupBidModifierRequest) XXX_Size() int { + return xxx_messageInfo_GetAdGroupBidModifierRequest.Size(m) +} +func (m *GetAdGroupBidModifierRequest) XXX_DiscardUnknown() { + xxx_messageInfo_GetAdGroupBidModifierRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_GetAdGroupBidModifierRequest proto.InternalMessageInfo + +func (m *GetAdGroupBidModifierRequest) GetResourceName() string { + if m != nil { + return m.ResourceName + } + return "" +} + +// Request message for [AdGroupBidModifierService.MutateAdGroupBidModifiers][google.ads.googleads.v1.services.AdGroupBidModifierService.MutateAdGroupBidModifiers]. +type MutateAdGroupBidModifiersRequest struct { + // ID of the customer whose ad group bid modifiers are being modified. + CustomerId string `protobuf:"bytes,1,opt,name=customer_id,json=customerId,proto3" json:"customer_id,omitempty"` + // The list of operations to perform on individual ad group bid modifiers. + Operations []*AdGroupBidModifierOperation `protobuf:"bytes,2,rep,name=operations,proto3" json:"operations,omitempty"` + // If true, successful operations will be carried out and invalid + // operations will return errors. If false, all operations will be carried + // out in one transaction if and only if they are all valid. + // Default is false. + PartialFailure bool `protobuf:"varint,3,opt,name=partial_failure,json=partialFailure,proto3" json:"partial_failure,omitempty"` + // If true, the request is validated but not executed. Only errors are + // returned, not results. + ValidateOnly bool `protobuf:"varint,4,opt,name=validate_only,json=validateOnly,proto3" json:"validate_only,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *MutateAdGroupBidModifiersRequest) Reset() { *m = MutateAdGroupBidModifiersRequest{} } +func (m *MutateAdGroupBidModifiersRequest) String() string { return proto.CompactTextString(m) } +func (*MutateAdGroupBidModifiersRequest) ProtoMessage() {} +func (*MutateAdGroupBidModifiersRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_ad_group_bid_modifier_service_3a4e96624f81a14a, []int{1} +} +func (m *MutateAdGroupBidModifiersRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_MutateAdGroupBidModifiersRequest.Unmarshal(m, b) +} +func (m *MutateAdGroupBidModifiersRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_MutateAdGroupBidModifiersRequest.Marshal(b, m, deterministic) +} +func (dst *MutateAdGroupBidModifiersRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_MutateAdGroupBidModifiersRequest.Merge(dst, src) +} +func (m *MutateAdGroupBidModifiersRequest) XXX_Size() int { + return xxx_messageInfo_MutateAdGroupBidModifiersRequest.Size(m) +} +func (m *MutateAdGroupBidModifiersRequest) XXX_DiscardUnknown() { + xxx_messageInfo_MutateAdGroupBidModifiersRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_MutateAdGroupBidModifiersRequest proto.InternalMessageInfo + +func (m *MutateAdGroupBidModifiersRequest) GetCustomerId() string { + if m != nil { + return m.CustomerId + } + return "" +} + +func (m *MutateAdGroupBidModifiersRequest) GetOperations() []*AdGroupBidModifierOperation { + if m != nil { + return m.Operations + } + return nil +} + +func (m *MutateAdGroupBidModifiersRequest) GetPartialFailure() bool { + if m != nil { + return m.PartialFailure + } + return false +} + +func (m *MutateAdGroupBidModifiersRequest) GetValidateOnly() bool { + if m != nil { + return m.ValidateOnly + } + return false +} + +// A single operation (create, remove, update) on an ad group bid modifier. +type AdGroupBidModifierOperation struct { + // FieldMask that determines which resource fields are modified in an update. + UpdateMask *field_mask.FieldMask `protobuf:"bytes,4,opt,name=update_mask,json=updateMask,proto3" json:"update_mask,omitempty"` + // The mutate operation. + // + // Types that are valid to be assigned to Operation: + // *AdGroupBidModifierOperation_Create + // *AdGroupBidModifierOperation_Update + // *AdGroupBidModifierOperation_Remove + Operation isAdGroupBidModifierOperation_Operation `protobuf_oneof:"operation"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *AdGroupBidModifierOperation) Reset() { *m = AdGroupBidModifierOperation{} } +func (m *AdGroupBidModifierOperation) String() string { return proto.CompactTextString(m) } +func (*AdGroupBidModifierOperation) ProtoMessage() {} +func (*AdGroupBidModifierOperation) Descriptor() ([]byte, []int) { + return fileDescriptor_ad_group_bid_modifier_service_3a4e96624f81a14a, []int{2} +} +func (m *AdGroupBidModifierOperation) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_AdGroupBidModifierOperation.Unmarshal(m, b) +} +func (m *AdGroupBidModifierOperation) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_AdGroupBidModifierOperation.Marshal(b, m, deterministic) +} +func (dst *AdGroupBidModifierOperation) XXX_Merge(src proto.Message) { + xxx_messageInfo_AdGroupBidModifierOperation.Merge(dst, src) +} +func (m *AdGroupBidModifierOperation) XXX_Size() int { + return xxx_messageInfo_AdGroupBidModifierOperation.Size(m) +} +func (m *AdGroupBidModifierOperation) XXX_DiscardUnknown() { + xxx_messageInfo_AdGroupBidModifierOperation.DiscardUnknown(m) +} + +var xxx_messageInfo_AdGroupBidModifierOperation proto.InternalMessageInfo + +func (m *AdGroupBidModifierOperation) GetUpdateMask() *field_mask.FieldMask { + if m != nil { + return m.UpdateMask + } + return nil +} + +type isAdGroupBidModifierOperation_Operation interface { + isAdGroupBidModifierOperation_Operation() +} + +type AdGroupBidModifierOperation_Create struct { + Create *resources.AdGroupBidModifier `protobuf:"bytes,1,opt,name=create,proto3,oneof"` +} + +type AdGroupBidModifierOperation_Update struct { + Update *resources.AdGroupBidModifier `protobuf:"bytes,2,opt,name=update,proto3,oneof"` +} + +type AdGroupBidModifierOperation_Remove struct { + Remove string `protobuf:"bytes,3,opt,name=remove,proto3,oneof"` +} + +func (*AdGroupBidModifierOperation_Create) isAdGroupBidModifierOperation_Operation() {} + +func (*AdGroupBidModifierOperation_Update) isAdGroupBidModifierOperation_Operation() {} + +func (*AdGroupBidModifierOperation_Remove) isAdGroupBidModifierOperation_Operation() {} + +func (m *AdGroupBidModifierOperation) GetOperation() isAdGroupBidModifierOperation_Operation { + if m != nil { + return m.Operation + } + return nil +} + +func (m *AdGroupBidModifierOperation) GetCreate() *resources.AdGroupBidModifier { + if x, ok := m.GetOperation().(*AdGroupBidModifierOperation_Create); ok { + return x.Create + } + return nil +} + +func (m *AdGroupBidModifierOperation) GetUpdate() *resources.AdGroupBidModifier { + if x, ok := m.GetOperation().(*AdGroupBidModifierOperation_Update); ok { + return x.Update + } + return nil +} + +func (m *AdGroupBidModifierOperation) GetRemove() string { + if x, ok := m.GetOperation().(*AdGroupBidModifierOperation_Remove); ok { + return x.Remove + } + return "" +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*AdGroupBidModifierOperation) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _AdGroupBidModifierOperation_OneofMarshaler, _AdGroupBidModifierOperation_OneofUnmarshaler, _AdGroupBidModifierOperation_OneofSizer, []interface{}{ + (*AdGroupBidModifierOperation_Create)(nil), + (*AdGroupBidModifierOperation_Update)(nil), + (*AdGroupBidModifierOperation_Remove)(nil), + } +} + +func _AdGroupBidModifierOperation_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*AdGroupBidModifierOperation) + // operation + switch x := m.Operation.(type) { + case *AdGroupBidModifierOperation_Create: + b.EncodeVarint(1<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.Create); err != nil { + return err + } + case *AdGroupBidModifierOperation_Update: + b.EncodeVarint(2<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.Update); err != nil { + return err + } + case *AdGroupBidModifierOperation_Remove: + b.EncodeVarint(3<<3 | proto.WireBytes) + b.EncodeStringBytes(x.Remove) + case nil: + default: + return fmt.Errorf("AdGroupBidModifierOperation.Operation has unexpected type %T", x) + } + return nil +} + +func _AdGroupBidModifierOperation_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*AdGroupBidModifierOperation) + switch tag { + case 1: // operation.create + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(resources.AdGroupBidModifier) + err := b.DecodeMessage(msg) + m.Operation = &AdGroupBidModifierOperation_Create{msg} + return true, err + case 2: // operation.update + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(resources.AdGroupBidModifier) + err := b.DecodeMessage(msg) + m.Operation = &AdGroupBidModifierOperation_Update{msg} + return true, err + case 3: // operation.remove + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeStringBytes() + m.Operation = &AdGroupBidModifierOperation_Remove{x} + return true, err + default: + return false, nil + } +} + +func _AdGroupBidModifierOperation_OneofSizer(msg proto.Message) (n int) { + m := msg.(*AdGroupBidModifierOperation) + // operation + switch x := m.Operation.(type) { + case *AdGroupBidModifierOperation_Create: + s := proto.Size(x.Create) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *AdGroupBidModifierOperation_Update: + s := proto.Size(x.Update) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *AdGroupBidModifierOperation_Remove: + n += 1 // tag and wire + n += proto.SizeVarint(uint64(len(x.Remove))) + n += len(x.Remove) + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +// Response message for ad group bid modifiers mutate. +type MutateAdGroupBidModifiersResponse struct { + // Errors that pertain to operation failures in the partial failure mode. + // Returned only when partial_failure = true and all errors occur inside the + // operations. If any errors occur outside the operations (e.g. auth errors), + // we return an RPC level error. + PartialFailureError *status.Status `protobuf:"bytes,3,opt,name=partial_failure_error,json=partialFailureError,proto3" json:"partial_failure_error,omitempty"` + // All results for the mutate. + Results []*MutateAdGroupBidModifierResult `protobuf:"bytes,2,rep,name=results,proto3" json:"results,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *MutateAdGroupBidModifiersResponse) Reset() { *m = MutateAdGroupBidModifiersResponse{} } +func (m *MutateAdGroupBidModifiersResponse) String() string { return proto.CompactTextString(m) } +func (*MutateAdGroupBidModifiersResponse) ProtoMessage() {} +func (*MutateAdGroupBidModifiersResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_ad_group_bid_modifier_service_3a4e96624f81a14a, []int{3} +} +func (m *MutateAdGroupBidModifiersResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_MutateAdGroupBidModifiersResponse.Unmarshal(m, b) +} +func (m *MutateAdGroupBidModifiersResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_MutateAdGroupBidModifiersResponse.Marshal(b, m, deterministic) +} +func (dst *MutateAdGroupBidModifiersResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_MutateAdGroupBidModifiersResponse.Merge(dst, src) +} +func (m *MutateAdGroupBidModifiersResponse) XXX_Size() int { + return xxx_messageInfo_MutateAdGroupBidModifiersResponse.Size(m) +} +func (m *MutateAdGroupBidModifiersResponse) XXX_DiscardUnknown() { + xxx_messageInfo_MutateAdGroupBidModifiersResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_MutateAdGroupBidModifiersResponse proto.InternalMessageInfo + +func (m *MutateAdGroupBidModifiersResponse) GetPartialFailureError() *status.Status { + if m != nil { + return m.PartialFailureError + } + return nil +} + +func (m *MutateAdGroupBidModifiersResponse) GetResults() []*MutateAdGroupBidModifierResult { + if m != nil { + return m.Results + } + return nil +} + +// The result for the criterion mutate. +type MutateAdGroupBidModifierResult struct { + // Returned for successful operations. + ResourceName string `protobuf:"bytes,1,opt,name=resource_name,json=resourceName,proto3" json:"resource_name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *MutateAdGroupBidModifierResult) Reset() { *m = MutateAdGroupBidModifierResult{} } +func (m *MutateAdGroupBidModifierResult) String() string { return proto.CompactTextString(m) } +func (*MutateAdGroupBidModifierResult) ProtoMessage() {} +func (*MutateAdGroupBidModifierResult) Descriptor() ([]byte, []int) { + return fileDescriptor_ad_group_bid_modifier_service_3a4e96624f81a14a, []int{4} +} +func (m *MutateAdGroupBidModifierResult) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_MutateAdGroupBidModifierResult.Unmarshal(m, b) +} +func (m *MutateAdGroupBidModifierResult) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_MutateAdGroupBidModifierResult.Marshal(b, m, deterministic) +} +func (dst *MutateAdGroupBidModifierResult) XXX_Merge(src proto.Message) { + xxx_messageInfo_MutateAdGroupBidModifierResult.Merge(dst, src) +} +func (m *MutateAdGroupBidModifierResult) XXX_Size() int { + return xxx_messageInfo_MutateAdGroupBidModifierResult.Size(m) +} +func (m *MutateAdGroupBidModifierResult) XXX_DiscardUnknown() { + xxx_messageInfo_MutateAdGroupBidModifierResult.DiscardUnknown(m) +} + +var xxx_messageInfo_MutateAdGroupBidModifierResult proto.InternalMessageInfo + +func (m *MutateAdGroupBidModifierResult) GetResourceName() string { + if m != nil { + return m.ResourceName + } + return "" +} + +func init() { + proto.RegisterType((*GetAdGroupBidModifierRequest)(nil), "google.ads.googleads.v1.services.GetAdGroupBidModifierRequest") + proto.RegisterType((*MutateAdGroupBidModifiersRequest)(nil), "google.ads.googleads.v1.services.MutateAdGroupBidModifiersRequest") + proto.RegisterType((*AdGroupBidModifierOperation)(nil), "google.ads.googleads.v1.services.AdGroupBidModifierOperation") + proto.RegisterType((*MutateAdGroupBidModifiersResponse)(nil), "google.ads.googleads.v1.services.MutateAdGroupBidModifiersResponse") + proto.RegisterType((*MutateAdGroupBidModifierResult)(nil), "google.ads.googleads.v1.services.MutateAdGroupBidModifierResult") +} + +// Reference imports to suppress errors if they are not otherwise used. +var _ context.Context +var _ grpc.ClientConn + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the grpc package it is being compiled against. +const _ = grpc.SupportPackageIsVersion4 + +// AdGroupBidModifierServiceClient is the client API for AdGroupBidModifierService service. +// +// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://godoc.org/google.golang.org/grpc#ClientConn.NewStream. +type AdGroupBidModifierServiceClient interface { + // Returns the requested ad group bid modifier in full detail. + GetAdGroupBidModifier(ctx context.Context, in *GetAdGroupBidModifierRequest, opts ...grpc.CallOption) (*resources.AdGroupBidModifier, error) + // Creates, updates, or removes ad group bid modifiers. + // Operation statuses are returned. + MutateAdGroupBidModifiers(ctx context.Context, in *MutateAdGroupBidModifiersRequest, opts ...grpc.CallOption) (*MutateAdGroupBidModifiersResponse, error) +} + +type adGroupBidModifierServiceClient struct { + cc *grpc.ClientConn +} + +func NewAdGroupBidModifierServiceClient(cc *grpc.ClientConn) AdGroupBidModifierServiceClient { + return &adGroupBidModifierServiceClient{cc} +} + +func (c *adGroupBidModifierServiceClient) GetAdGroupBidModifier(ctx context.Context, in *GetAdGroupBidModifierRequest, opts ...grpc.CallOption) (*resources.AdGroupBidModifier, error) { + out := new(resources.AdGroupBidModifier) + err := c.cc.Invoke(ctx, "/google.ads.googleads.v1.services.AdGroupBidModifierService/GetAdGroupBidModifier", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *adGroupBidModifierServiceClient) MutateAdGroupBidModifiers(ctx context.Context, in *MutateAdGroupBidModifiersRequest, opts ...grpc.CallOption) (*MutateAdGroupBidModifiersResponse, error) { + out := new(MutateAdGroupBidModifiersResponse) + err := c.cc.Invoke(ctx, "/google.ads.googleads.v1.services.AdGroupBidModifierService/MutateAdGroupBidModifiers", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +// AdGroupBidModifierServiceServer is the server API for AdGroupBidModifierService service. +type AdGroupBidModifierServiceServer interface { + // Returns the requested ad group bid modifier in full detail. + GetAdGroupBidModifier(context.Context, *GetAdGroupBidModifierRequest) (*resources.AdGroupBidModifier, error) + // Creates, updates, or removes ad group bid modifiers. + // Operation statuses are returned. + MutateAdGroupBidModifiers(context.Context, *MutateAdGroupBidModifiersRequest) (*MutateAdGroupBidModifiersResponse, error) +} + +func RegisterAdGroupBidModifierServiceServer(s *grpc.Server, srv AdGroupBidModifierServiceServer) { + s.RegisterService(&_AdGroupBidModifierService_serviceDesc, srv) +} + +func _AdGroupBidModifierService_GetAdGroupBidModifier_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(GetAdGroupBidModifierRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(AdGroupBidModifierServiceServer).GetAdGroupBidModifier(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.ads.googleads.v1.services.AdGroupBidModifierService/GetAdGroupBidModifier", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(AdGroupBidModifierServiceServer).GetAdGroupBidModifier(ctx, req.(*GetAdGroupBidModifierRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _AdGroupBidModifierService_MutateAdGroupBidModifiers_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(MutateAdGroupBidModifiersRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(AdGroupBidModifierServiceServer).MutateAdGroupBidModifiers(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.ads.googleads.v1.services.AdGroupBidModifierService/MutateAdGroupBidModifiers", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(AdGroupBidModifierServiceServer).MutateAdGroupBidModifiers(ctx, req.(*MutateAdGroupBidModifiersRequest)) + } + return interceptor(ctx, in, info, handler) +} + +var _AdGroupBidModifierService_serviceDesc = grpc.ServiceDesc{ + ServiceName: "google.ads.googleads.v1.services.AdGroupBidModifierService", + HandlerType: (*AdGroupBidModifierServiceServer)(nil), + Methods: []grpc.MethodDesc{ + { + MethodName: "GetAdGroupBidModifier", + Handler: _AdGroupBidModifierService_GetAdGroupBidModifier_Handler, + }, + { + MethodName: "MutateAdGroupBidModifiers", + Handler: _AdGroupBidModifierService_MutateAdGroupBidModifiers_Handler, + }, + }, + Streams: []grpc.StreamDesc{}, + Metadata: "google/ads/googleads/v1/services/ad_group_bid_modifier_service.proto", +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/services/ad_group_bid_modifier_service.proto", fileDescriptor_ad_group_bid_modifier_service_3a4e96624f81a14a) +} + +var fileDescriptor_ad_group_bid_modifier_service_3a4e96624f81a14a = []byte{ + // 730 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xa4, 0x95, 0xcd, 0x6e, 0xd3, 0x4a, + 0x14, 0xc7, 0xaf, 0x9d, 0xab, 0xde, 0xdb, 0x49, 0xef, 0x45, 0x1a, 0x54, 0x91, 0x86, 0xaa, 0x04, + 0x53, 0x89, 0x2a, 0x0b, 0x5b, 0x09, 0xaa, 0x40, 0xae, 0x82, 0x48, 0x4a, 0x9b, 0xb2, 0x28, 0xad, + 0x5c, 0xa9, 0x8b, 0x2a, 0xc8, 0x9a, 0x66, 0x26, 0x96, 0x55, 0xdb, 0x63, 0x66, 0xc6, 0x41, 0x55, + 0xd5, 0x0d, 0x12, 0x4f, 0xc0, 0x1b, 0xc0, 0x8e, 0x17, 0x41, 0x42, 0x62, 0xc5, 0x82, 0x17, 0x60, + 0x03, 0x2b, 0x1e, 0x01, 0xd9, 0xe3, 0x09, 0x6d, 0x53, 0x27, 0xa8, 0xdd, 0x9d, 0x9c, 0xf9, 0xe7, + 0x77, 0x3e, 0x67, 0x0c, 0x9e, 0x7a, 0x94, 0x7a, 0x01, 0xb1, 0x10, 0xe6, 0x96, 0x34, 0x53, 0x6b, + 0xd8, 0xb0, 0x38, 0x61, 0x43, 0xbf, 0x4f, 0xb8, 0x85, 0xb0, 0xeb, 0x31, 0x9a, 0xc4, 0xee, 0xa1, + 0x8f, 0xdd, 0x90, 0x62, 0x7f, 0xe0, 0x13, 0xe6, 0xe6, 0xc7, 0x66, 0xcc, 0xa8, 0xa0, 0xb0, 0x26, + 0xff, 0x6a, 0x22, 0xcc, 0xcd, 0x11, 0xc5, 0x1c, 0x36, 0x4c, 0x45, 0xa9, 0xb6, 0x8a, 0xe2, 0x30, + 0xc2, 0x69, 0xc2, 0x0a, 0x03, 0xc9, 0x00, 0xd5, 0x45, 0xf5, 0xf7, 0xd8, 0xb7, 0x50, 0x14, 0x51, + 0x81, 0x84, 0x4f, 0x23, 0x9e, 0x9f, 0xe6, 0xe1, 0xad, 0xec, 0xd7, 0x61, 0x32, 0xb0, 0x06, 0x3e, + 0x09, 0xb0, 0x1b, 0x22, 0x7e, 0x94, 0x2b, 0x96, 0x2e, 0x2a, 0x5e, 0x31, 0x14, 0xc7, 0x84, 0x29, + 0xc2, 0xad, 0xfc, 0x9c, 0xc5, 0x7d, 0x8b, 0x0b, 0x24, 0x92, 0xfc, 0xc0, 0x58, 0x07, 0x8b, 0x5d, + 0x22, 0xda, 0xb8, 0x9b, 0x66, 0xd6, 0xf1, 0xf1, 0x76, 0x9e, 0x97, 0x43, 0x5e, 0x26, 0x84, 0x0b, + 0x78, 0x0f, 0xfc, 0xa7, 0x2a, 0x70, 0x23, 0x14, 0x92, 0x8a, 0x56, 0xd3, 0x56, 0x66, 0x9d, 0x39, + 0xe5, 0x7c, 0x8e, 0x42, 0x62, 0xfc, 0xd4, 0x40, 0x6d, 0x3b, 0x11, 0x48, 0x90, 0x71, 0x10, 0x57, + 0xa4, 0x3b, 0xa0, 0xdc, 0x4f, 0xb8, 0xa0, 0x21, 0x61, 0xae, 0x8f, 0x73, 0x0e, 0x50, 0xae, 0x67, + 0x18, 0xbe, 0x00, 0x80, 0xc6, 0x84, 0xc9, 0xca, 0x2b, 0x7a, 0xad, 0xb4, 0x52, 0x6e, 0xb6, 0xcc, + 0x69, 0x9d, 0x37, 0xc7, 0x43, 0xee, 0x28, 0x8a, 0x73, 0x06, 0x08, 0xef, 0x83, 0x1b, 0x31, 0x62, + 0xc2, 0x47, 0x81, 0x3b, 0x40, 0x7e, 0x90, 0x30, 0x52, 0x29, 0xd5, 0xb4, 0x95, 0x7f, 0x9d, 0xff, + 0x73, 0xf7, 0xa6, 0xf4, 0xa6, 0x25, 0x0f, 0x51, 0xe0, 0x63, 0x24, 0x88, 0x4b, 0xa3, 0xe0, 0xb8, + 0xf2, 0x77, 0x26, 0x9b, 0x53, 0xce, 0x9d, 0x28, 0x38, 0x36, 0xde, 0xeb, 0xe0, 0xf6, 0x84, 0xc8, + 0x70, 0x0d, 0x94, 0x93, 0x38, 0x43, 0xa4, 0x53, 0xca, 0x10, 0xe5, 0x66, 0x55, 0x55, 0xa3, 0xc6, + 0x64, 0x6e, 0xa6, 0x83, 0xdc, 0x46, 0xfc, 0xc8, 0x01, 0x52, 0x9e, 0xda, 0x70, 0x07, 0xcc, 0xf4, + 0x19, 0x41, 0x42, 0x76, 0xbb, 0xdc, 0x5c, 0x2d, 0xec, 0xc2, 0x68, 0xbb, 0x2e, 0x69, 0xc3, 0xd6, + 0x5f, 0x4e, 0x8e, 0x49, 0x81, 0x12, 0x5f, 0xd1, 0xaf, 0x09, 0x94, 0x18, 0x58, 0x01, 0x33, 0x8c, + 0x84, 0x74, 0x28, 0x7b, 0x38, 0x9b, 0x9e, 0xc8, 0xdf, 0x9d, 0x32, 0x98, 0x1d, 0x35, 0xdd, 0xf8, + 0xa8, 0x81, 0xbb, 0x13, 0x16, 0x83, 0xc7, 0x34, 0xe2, 0x04, 0x6e, 0x82, 0xf9, 0x0b, 0x93, 0x71, + 0x09, 0x63, 0x94, 0x65, 0xec, 0x72, 0x13, 0xaa, 0x64, 0x59, 0xdc, 0x37, 0xf7, 0xb2, 0xe5, 0x75, + 0x6e, 0x9e, 0x9f, 0xd9, 0x46, 0x2a, 0x87, 0x07, 0xe0, 0x1f, 0x46, 0x78, 0x12, 0x08, 0xb5, 0x3d, + 0x4f, 0xa6, 0x6f, 0x4f, 0x51, 0x76, 0x4e, 0x06, 0x72, 0x14, 0xd0, 0xd8, 0x00, 0x4b, 0x93, 0xa5, + 0x7f, 0x74, 0x53, 0x9a, 0x5f, 0x4b, 0x60, 0x61, 0x9c, 0xb0, 0x27, 0xb3, 0x81, 0x9f, 0x35, 0x30, + 0x7f, 0xe9, 0x6d, 0x84, 0x8f, 0xa7, 0x57, 0x32, 0xe9, 0x1a, 0x57, 0xaf, 0x36, 0x70, 0xa3, 0xf5, + 0xfa, 0xcb, 0xb7, 0xb7, 0xfa, 0x43, 0xb8, 0x9a, 0xbe, 0x64, 0x27, 0xe7, 0xca, 0x6b, 0xa9, 0x9b, + 0xcb, 0xad, 0xba, 0x85, 0xc6, 0xa7, 0x6b, 0xd5, 0x4f, 0xe1, 0x77, 0x0d, 0x2c, 0x14, 0x8e, 0x1f, + 0x76, 0xae, 0x3e, 0x1d, 0xf5, 0xa8, 0x54, 0xd7, 0xaf, 0xc5, 0x90, 0xfb, 0x67, 0xac, 0x67, 0x55, + 0xb6, 0x8c, 0x47, 0x69, 0x95, 0xbf, 0xcb, 0x3a, 0x39, 0xf3, 0x5c, 0xb5, 0xea, 0xa7, 0x97, 0x15, + 0x69, 0x87, 0x19, 0xdc, 0xd6, 0xea, 0x9d, 0x37, 0x3a, 0x58, 0xee, 0xd3, 0x70, 0x6a, 0x3e, 0x9d, + 0xa5, 0xc2, 0xf9, 0xef, 0xa6, 0xaf, 0xc2, 0xae, 0x76, 0xb0, 0x95, 0x33, 0x3c, 0x1a, 0xa0, 0xc8, + 0x33, 0x29, 0xf3, 0x2c, 0x8f, 0x44, 0xd9, 0x9b, 0xa1, 0xbe, 0x2d, 0xb1, 0xcf, 0x8b, 0x3f, 0x69, + 0x6b, 0xca, 0x78, 0xa7, 0x97, 0xba, 0xed, 0xf6, 0x07, 0xbd, 0xd6, 0x95, 0xc0, 0x36, 0xe6, 0xa6, + 0x34, 0x53, 0x6b, 0xbf, 0x61, 0xe6, 0x81, 0xf9, 0x27, 0x25, 0xe9, 0xb5, 0x31, 0xef, 0x8d, 0x24, + 0xbd, 0xfd, 0x46, 0x4f, 0x49, 0x7e, 0xe8, 0xcb, 0xd2, 0x6f, 0xdb, 0x6d, 0xcc, 0x6d, 0x7b, 0x24, + 0xb2, 0xed, 0xfd, 0x86, 0x6d, 0x2b, 0xd9, 0xe1, 0x4c, 0x96, 0xe7, 0x83, 0x5f, 0x01, 0x00, 0x00, + 0xff, 0xff, 0x79, 0xcc, 0x98, 0xaf, 0x79, 0x07, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/services/ad_group_criterion_label_service.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/services/ad_group_criterion_label_service.pb.go new file mode 100644 index 0000000..2edbd0a --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/services/ad_group_criterion_label_service.pb.go @@ -0,0 +1,547 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/services/ad_group_criterion_label_service.proto + +package services // import "google.golang.org/genproto/googleapis/ads/googleads/v1/services" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "github.com/golang/protobuf/ptypes/wrappers" +import resources "google.golang.org/genproto/googleapis/ads/googleads/v1/resources" +import _ "google.golang.org/genproto/googleapis/api/annotations" +import status "google.golang.org/genproto/googleapis/rpc/status" + +import ( + context "golang.org/x/net/context" + grpc "google.golang.org/grpc" +) + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Request message for +// [AdGroupCriterionLabelService.GetAdGroupCriterionLabel][google.ads.googleads.v1.services.AdGroupCriterionLabelService.GetAdGroupCriterionLabel]. +type GetAdGroupCriterionLabelRequest struct { + // The resource name of the ad group criterion label to fetch. + ResourceName string `protobuf:"bytes,1,opt,name=resource_name,json=resourceName,proto3" json:"resource_name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GetAdGroupCriterionLabelRequest) Reset() { *m = GetAdGroupCriterionLabelRequest{} } +func (m *GetAdGroupCriterionLabelRequest) String() string { return proto.CompactTextString(m) } +func (*GetAdGroupCriterionLabelRequest) ProtoMessage() {} +func (*GetAdGroupCriterionLabelRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_ad_group_criterion_label_service_7d87b5a83f995ffa, []int{0} +} +func (m *GetAdGroupCriterionLabelRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GetAdGroupCriterionLabelRequest.Unmarshal(m, b) +} +func (m *GetAdGroupCriterionLabelRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GetAdGroupCriterionLabelRequest.Marshal(b, m, deterministic) +} +func (dst *GetAdGroupCriterionLabelRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_GetAdGroupCriterionLabelRequest.Merge(dst, src) +} +func (m *GetAdGroupCriterionLabelRequest) XXX_Size() int { + return xxx_messageInfo_GetAdGroupCriterionLabelRequest.Size(m) +} +func (m *GetAdGroupCriterionLabelRequest) XXX_DiscardUnknown() { + xxx_messageInfo_GetAdGroupCriterionLabelRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_GetAdGroupCriterionLabelRequest proto.InternalMessageInfo + +func (m *GetAdGroupCriterionLabelRequest) GetResourceName() string { + if m != nil { + return m.ResourceName + } + return "" +} + +// Request message for +// [AdGroupCriterionLabelService.MutateAdGroupCriterionLabels][google.ads.googleads.v1.services.AdGroupCriterionLabelService.MutateAdGroupCriterionLabels]. +type MutateAdGroupCriterionLabelsRequest struct { + // ID of the customer whose ad group criterion labels are being modified. + CustomerId string `protobuf:"bytes,1,opt,name=customer_id,json=customerId,proto3" json:"customer_id,omitempty"` + // The list of operations to perform on ad group criterion labels. + Operations []*AdGroupCriterionLabelOperation `protobuf:"bytes,2,rep,name=operations,proto3" json:"operations,omitempty"` + // If true, successful operations will be carried out and invalid + // operations will return errors. If false, all operations will be carried + // out in one transaction if and only if they are all valid. + // Default is false. + PartialFailure bool `protobuf:"varint,3,opt,name=partial_failure,json=partialFailure,proto3" json:"partial_failure,omitempty"` + // If true, the request is validated but not executed. Only errors are + // returned, not results. + ValidateOnly bool `protobuf:"varint,4,opt,name=validate_only,json=validateOnly,proto3" json:"validate_only,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *MutateAdGroupCriterionLabelsRequest) Reset() { *m = MutateAdGroupCriterionLabelsRequest{} } +func (m *MutateAdGroupCriterionLabelsRequest) String() string { return proto.CompactTextString(m) } +func (*MutateAdGroupCriterionLabelsRequest) ProtoMessage() {} +func (*MutateAdGroupCriterionLabelsRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_ad_group_criterion_label_service_7d87b5a83f995ffa, []int{1} +} +func (m *MutateAdGroupCriterionLabelsRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_MutateAdGroupCriterionLabelsRequest.Unmarshal(m, b) +} +func (m *MutateAdGroupCriterionLabelsRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_MutateAdGroupCriterionLabelsRequest.Marshal(b, m, deterministic) +} +func (dst *MutateAdGroupCriterionLabelsRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_MutateAdGroupCriterionLabelsRequest.Merge(dst, src) +} +func (m *MutateAdGroupCriterionLabelsRequest) XXX_Size() int { + return xxx_messageInfo_MutateAdGroupCriterionLabelsRequest.Size(m) +} +func (m *MutateAdGroupCriterionLabelsRequest) XXX_DiscardUnknown() { + xxx_messageInfo_MutateAdGroupCriterionLabelsRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_MutateAdGroupCriterionLabelsRequest proto.InternalMessageInfo + +func (m *MutateAdGroupCriterionLabelsRequest) GetCustomerId() string { + if m != nil { + return m.CustomerId + } + return "" +} + +func (m *MutateAdGroupCriterionLabelsRequest) GetOperations() []*AdGroupCriterionLabelOperation { + if m != nil { + return m.Operations + } + return nil +} + +func (m *MutateAdGroupCriterionLabelsRequest) GetPartialFailure() bool { + if m != nil { + return m.PartialFailure + } + return false +} + +func (m *MutateAdGroupCriterionLabelsRequest) GetValidateOnly() bool { + if m != nil { + return m.ValidateOnly + } + return false +} + +// A single operation (create, remove) on an ad group criterion label. +type AdGroupCriterionLabelOperation struct { + // The mutate operation. + // + // Types that are valid to be assigned to Operation: + // *AdGroupCriterionLabelOperation_Create + // *AdGroupCriterionLabelOperation_Remove + Operation isAdGroupCriterionLabelOperation_Operation `protobuf_oneof:"operation"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *AdGroupCriterionLabelOperation) Reset() { *m = AdGroupCriterionLabelOperation{} } +func (m *AdGroupCriterionLabelOperation) String() string { return proto.CompactTextString(m) } +func (*AdGroupCriterionLabelOperation) ProtoMessage() {} +func (*AdGroupCriterionLabelOperation) Descriptor() ([]byte, []int) { + return fileDescriptor_ad_group_criterion_label_service_7d87b5a83f995ffa, []int{2} +} +func (m *AdGroupCriterionLabelOperation) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_AdGroupCriterionLabelOperation.Unmarshal(m, b) +} +func (m *AdGroupCriterionLabelOperation) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_AdGroupCriterionLabelOperation.Marshal(b, m, deterministic) +} +func (dst *AdGroupCriterionLabelOperation) XXX_Merge(src proto.Message) { + xxx_messageInfo_AdGroupCriterionLabelOperation.Merge(dst, src) +} +func (m *AdGroupCriterionLabelOperation) XXX_Size() int { + return xxx_messageInfo_AdGroupCriterionLabelOperation.Size(m) +} +func (m *AdGroupCriterionLabelOperation) XXX_DiscardUnknown() { + xxx_messageInfo_AdGroupCriterionLabelOperation.DiscardUnknown(m) +} + +var xxx_messageInfo_AdGroupCriterionLabelOperation proto.InternalMessageInfo + +type isAdGroupCriterionLabelOperation_Operation interface { + isAdGroupCriterionLabelOperation_Operation() +} + +type AdGroupCriterionLabelOperation_Create struct { + Create *resources.AdGroupCriterionLabel `protobuf:"bytes,1,opt,name=create,proto3,oneof"` +} + +type AdGroupCriterionLabelOperation_Remove struct { + Remove string `protobuf:"bytes,2,opt,name=remove,proto3,oneof"` +} + +func (*AdGroupCriterionLabelOperation_Create) isAdGroupCriterionLabelOperation_Operation() {} + +func (*AdGroupCriterionLabelOperation_Remove) isAdGroupCriterionLabelOperation_Operation() {} + +func (m *AdGroupCriterionLabelOperation) GetOperation() isAdGroupCriterionLabelOperation_Operation { + if m != nil { + return m.Operation + } + return nil +} + +func (m *AdGroupCriterionLabelOperation) GetCreate() *resources.AdGroupCriterionLabel { + if x, ok := m.GetOperation().(*AdGroupCriterionLabelOperation_Create); ok { + return x.Create + } + return nil +} + +func (m *AdGroupCriterionLabelOperation) GetRemove() string { + if x, ok := m.GetOperation().(*AdGroupCriterionLabelOperation_Remove); ok { + return x.Remove + } + return "" +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*AdGroupCriterionLabelOperation) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _AdGroupCriterionLabelOperation_OneofMarshaler, _AdGroupCriterionLabelOperation_OneofUnmarshaler, _AdGroupCriterionLabelOperation_OneofSizer, []interface{}{ + (*AdGroupCriterionLabelOperation_Create)(nil), + (*AdGroupCriterionLabelOperation_Remove)(nil), + } +} + +func _AdGroupCriterionLabelOperation_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*AdGroupCriterionLabelOperation) + // operation + switch x := m.Operation.(type) { + case *AdGroupCriterionLabelOperation_Create: + b.EncodeVarint(1<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.Create); err != nil { + return err + } + case *AdGroupCriterionLabelOperation_Remove: + b.EncodeVarint(2<<3 | proto.WireBytes) + b.EncodeStringBytes(x.Remove) + case nil: + default: + return fmt.Errorf("AdGroupCriterionLabelOperation.Operation has unexpected type %T", x) + } + return nil +} + +func _AdGroupCriterionLabelOperation_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*AdGroupCriterionLabelOperation) + switch tag { + case 1: // operation.create + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(resources.AdGroupCriterionLabel) + err := b.DecodeMessage(msg) + m.Operation = &AdGroupCriterionLabelOperation_Create{msg} + return true, err + case 2: // operation.remove + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeStringBytes() + m.Operation = &AdGroupCriterionLabelOperation_Remove{x} + return true, err + default: + return false, nil + } +} + +func _AdGroupCriterionLabelOperation_OneofSizer(msg proto.Message) (n int) { + m := msg.(*AdGroupCriterionLabelOperation) + // operation + switch x := m.Operation.(type) { + case *AdGroupCriterionLabelOperation_Create: + s := proto.Size(x.Create) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *AdGroupCriterionLabelOperation_Remove: + n += 1 // tag and wire + n += proto.SizeVarint(uint64(len(x.Remove))) + n += len(x.Remove) + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +// Response message for an ad group criterion labels mutate. +type MutateAdGroupCriterionLabelsResponse struct { + // Errors that pertain to operation failures in the partial failure mode. + // Returned only when partial_failure = true and all errors occur inside the + // operations. If any errors occur outside the operations (e.g. auth errors), + // we return an RPC level error. + PartialFailureError *status.Status `protobuf:"bytes,3,opt,name=partial_failure_error,json=partialFailureError,proto3" json:"partial_failure_error,omitempty"` + // All results for the mutate. + Results []*MutateAdGroupCriterionLabelResult `protobuf:"bytes,2,rep,name=results,proto3" json:"results,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *MutateAdGroupCriterionLabelsResponse) Reset() { *m = MutateAdGroupCriterionLabelsResponse{} } +func (m *MutateAdGroupCriterionLabelsResponse) String() string { return proto.CompactTextString(m) } +func (*MutateAdGroupCriterionLabelsResponse) ProtoMessage() {} +func (*MutateAdGroupCriterionLabelsResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_ad_group_criterion_label_service_7d87b5a83f995ffa, []int{3} +} +func (m *MutateAdGroupCriterionLabelsResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_MutateAdGroupCriterionLabelsResponse.Unmarshal(m, b) +} +func (m *MutateAdGroupCriterionLabelsResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_MutateAdGroupCriterionLabelsResponse.Marshal(b, m, deterministic) +} +func (dst *MutateAdGroupCriterionLabelsResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_MutateAdGroupCriterionLabelsResponse.Merge(dst, src) +} +func (m *MutateAdGroupCriterionLabelsResponse) XXX_Size() int { + return xxx_messageInfo_MutateAdGroupCriterionLabelsResponse.Size(m) +} +func (m *MutateAdGroupCriterionLabelsResponse) XXX_DiscardUnknown() { + xxx_messageInfo_MutateAdGroupCriterionLabelsResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_MutateAdGroupCriterionLabelsResponse proto.InternalMessageInfo + +func (m *MutateAdGroupCriterionLabelsResponse) GetPartialFailureError() *status.Status { + if m != nil { + return m.PartialFailureError + } + return nil +} + +func (m *MutateAdGroupCriterionLabelsResponse) GetResults() []*MutateAdGroupCriterionLabelResult { + if m != nil { + return m.Results + } + return nil +} + +// The result for an ad group criterion label mutate. +type MutateAdGroupCriterionLabelResult struct { + // Returned for successful operations. + ResourceName string `protobuf:"bytes,1,opt,name=resource_name,json=resourceName,proto3" json:"resource_name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *MutateAdGroupCriterionLabelResult) Reset() { *m = MutateAdGroupCriterionLabelResult{} } +func (m *MutateAdGroupCriterionLabelResult) String() string { return proto.CompactTextString(m) } +func (*MutateAdGroupCriterionLabelResult) ProtoMessage() {} +func (*MutateAdGroupCriterionLabelResult) Descriptor() ([]byte, []int) { + return fileDescriptor_ad_group_criterion_label_service_7d87b5a83f995ffa, []int{4} +} +func (m *MutateAdGroupCriterionLabelResult) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_MutateAdGroupCriterionLabelResult.Unmarshal(m, b) +} +func (m *MutateAdGroupCriterionLabelResult) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_MutateAdGroupCriterionLabelResult.Marshal(b, m, deterministic) +} +func (dst *MutateAdGroupCriterionLabelResult) XXX_Merge(src proto.Message) { + xxx_messageInfo_MutateAdGroupCriterionLabelResult.Merge(dst, src) +} +func (m *MutateAdGroupCriterionLabelResult) XXX_Size() int { + return xxx_messageInfo_MutateAdGroupCriterionLabelResult.Size(m) +} +func (m *MutateAdGroupCriterionLabelResult) XXX_DiscardUnknown() { + xxx_messageInfo_MutateAdGroupCriterionLabelResult.DiscardUnknown(m) +} + +var xxx_messageInfo_MutateAdGroupCriterionLabelResult proto.InternalMessageInfo + +func (m *MutateAdGroupCriterionLabelResult) GetResourceName() string { + if m != nil { + return m.ResourceName + } + return "" +} + +func init() { + proto.RegisterType((*GetAdGroupCriterionLabelRequest)(nil), "google.ads.googleads.v1.services.GetAdGroupCriterionLabelRequest") + proto.RegisterType((*MutateAdGroupCriterionLabelsRequest)(nil), "google.ads.googleads.v1.services.MutateAdGroupCriterionLabelsRequest") + proto.RegisterType((*AdGroupCriterionLabelOperation)(nil), "google.ads.googleads.v1.services.AdGroupCriterionLabelOperation") + proto.RegisterType((*MutateAdGroupCriterionLabelsResponse)(nil), "google.ads.googleads.v1.services.MutateAdGroupCriterionLabelsResponse") + proto.RegisterType((*MutateAdGroupCriterionLabelResult)(nil), "google.ads.googleads.v1.services.MutateAdGroupCriterionLabelResult") +} + +// Reference imports to suppress errors if they are not otherwise used. +var _ context.Context +var _ grpc.ClientConn + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the grpc package it is being compiled against. +const _ = grpc.SupportPackageIsVersion4 + +// AdGroupCriterionLabelServiceClient is the client API for AdGroupCriterionLabelService service. +// +// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://godoc.org/google.golang.org/grpc#ClientConn.NewStream. +type AdGroupCriterionLabelServiceClient interface { + // Returns the requested ad group criterion label in full detail. + GetAdGroupCriterionLabel(ctx context.Context, in *GetAdGroupCriterionLabelRequest, opts ...grpc.CallOption) (*resources.AdGroupCriterionLabel, error) + // Creates and removes ad group criterion labels. + // Operation statuses are returned. + MutateAdGroupCriterionLabels(ctx context.Context, in *MutateAdGroupCriterionLabelsRequest, opts ...grpc.CallOption) (*MutateAdGroupCriterionLabelsResponse, error) +} + +type adGroupCriterionLabelServiceClient struct { + cc *grpc.ClientConn +} + +func NewAdGroupCriterionLabelServiceClient(cc *grpc.ClientConn) AdGroupCriterionLabelServiceClient { + return &adGroupCriterionLabelServiceClient{cc} +} + +func (c *adGroupCriterionLabelServiceClient) GetAdGroupCriterionLabel(ctx context.Context, in *GetAdGroupCriterionLabelRequest, opts ...grpc.CallOption) (*resources.AdGroupCriterionLabel, error) { + out := new(resources.AdGroupCriterionLabel) + err := c.cc.Invoke(ctx, "/google.ads.googleads.v1.services.AdGroupCriterionLabelService/GetAdGroupCriterionLabel", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *adGroupCriterionLabelServiceClient) MutateAdGroupCriterionLabels(ctx context.Context, in *MutateAdGroupCriterionLabelsRequest, opts ...grpc.CallOption) (*MutateAdGroupCriterionLabelsResponse, error) { + out := new(MutateAdGroupCriterionLabelsResponse) + err := c.cc.Invoke(ctx, "/google.ads.googleads.v1.services.AdGroupCriterionLabelService/MutateAdGroupCriterionLabels", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +// AdGroupCriterionLabelServiceServer is the server API for AdGroupCriterionLabelService service. +type AdGroupCriterionLabelServiceServer interface { + // Returns the requested ad group criterion label in full detail. + GetAdGroupCriterionLabel(context.Context, *GetAdGroupCriterionLabelRequest) (*resources.AdGroupCriterionLabel, error) + // Creates and removes ad group criterion labels. + // Operation statuses are returned. + MutateAdGroupCriterionLabels(context.Context, *MutateAdGroupCriterionLabelsRequest) (*MutateAdGroupCriterionLabelsResponse, error) +} + +func RegisterAdGroupCriterionLabelServiceServer(s *grpc.Server, srv AdGroupCriterionLabelServiceServer) { + s.RegisterService(&_AdGroupCriterionLabelService_serviceDesc, srv) +} + +func _AdGroupCriterionLabelService_GetAdGroupCriterionLabel_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(GetAdGroupCriterionLabelRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(AdGroupCriterionLabelServiceServer).GetAdGroupCriterionLabel(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.ads.googleads.v1.services.AdGroupCriterionLabelService/GetAdGroupCriterionLabel", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(AdGroupCriterionLabelServiceServer).GetAdGroupCriterionLabel(ctx, req.(*GetAdGroupCriterionLabelRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _AdGroupCriterionLabelService_MutateAdGroupCriterionLabels_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(MutateAdGroupCriterionLabelsRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(AdGroupCriterionLabelServiceServer).MutateAdGroupCriterionLabels(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.ads.googleads.v1.services.AdGroupCriterionLabelService/MutateAdGroupCriterionLabels", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(AdGroupCriterionLabelServiceServer).MutateAdGroupCriterionLabels(ctx, req.(*MutateAdGroupCriterionLabelsRequest)) + } + return interceptor(ctx, in, info, handler) +} + +var _AdGroupCriterionLabelService_serviceDesc = grpc.ServiceDesc{ + ServiceName: "google.ads.googleads.v1.services.AdGroupCriterionLabelService", + HandlerType: (*AdGroupCriterionLabelServiceServer)(nil), + Methods: []grpc.MethodDesc{ + { + MethodName: "GetAdGroupCriterionLabel", + Handler: _AdGroupCriterionLabelService_GetAdGroupCriterionLabel_Handler, + }, + { + MethodName: "MutateAdGroupCriterionLabels", + Handler: _AdGroupCriterionLabelService_MutateAdGroupCriterionLabels_Handler, + }, + }, + Streams: []grpc.StreamDesc{}, + Metadata: "google/ads/googleads/v1/services/ad_group_criterion_label_service.proto", +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/services/ad_group_criterion_label_service.proto", fileDescriptor_ad_group_criterion_label_service_7d87b5a83f995ffa) +} + +var fileDescriptor_ad_group_criterion_label_service_7d87b5a83f995ffa = []byte{ + // 677 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xa4, 0x55, 0xcf, 0x6b, 0xd4, 0x5e, + 0x10, 0xff, 0x26, 0xfb, 0xa5, 0xda, 0xb7, 0x55, 0xe1, 0x89, 0x18, 0x96, 0xd2, 0x6e, 0xd3, 0x82, + 0x65, 0x0f, 0x09, 0xbb, 0x5e, 0x4a, 0x4a, 0x61, 0xd3, 0xd2, 0xdd, 0x0a, 0x6a, 0x4b, 0x0a, 0x3d, + 0xc8, 0x4a, 0x7c, 0x9b, 0x4c, 0x43, 0x20, 0x9b, 0x17, 0xdf, 0x7b, 0x59, 0x29, 0xa5, 0x17, 0x4f, + 0xde, 0x3d, 0x7a, 0xf3, 0xe8, 0x9f, 0x22, 0x78, 0xf2, 0xe4, 0xdd, 0x93, 0x47, 0x41, 0xcf, 0x92, + 0x1f, 0x6f, 0x6d, 0x65, 0x77, 0x23, 0xf6, 0x36, 0x99, 0x99, 0xfd, 0x7c, 0xe6, 0x33, 0x33, 0x6f, + 0x16, 0xf5, 0x03, 0x4a, 0x83, 0x08, 0x4c, 0xe2, 0x73, 0xb3, 0x30, 0x33, 0x6b, 0xdc, 0x36, 0x39, + 0xb0, 0x71, 0xe8, 0x01, 0x37, 0x89, 0xef, 0x06, 0x8c, 0xa6, 0x89, 0xeb, 0xb1, 0x50, 0x00, 0x0b, + 0x69, 0xec, 0x46, 0x64, 0x08, 0x91, 0x5b, 0x66, 0x18, 0x09, 0xa3, 0x82, 0xe2, 0x66, 0xf1, 0x6b, + 0x83, 0xf8, 0xdc, 0x98, 0x00, 0x19, 0xe3, 0xb6, 0x21, 0x81, 0x1a, 0xdd, 0x59, 0x54, 0x0c, 0x38, + 0x4d, 0xd9, 0x3c, 0xae, 0x82, 0xa3, 0xb1, 0x2c, 0x11, 0x92, 0xd0, 0x24, 0x71, 0x4c, 0x05, 0x11, + 0x21, 0x8d, 0x79, 0x19, 0x5d, 0x29, 0xa3, 0xf9, 0xd7, 0x30, 0x3d, 0x35, 0x5f, 0x31, 0x92, 0x24, + 0xc0, 0x64, 0xfc, 0x7e, 0x19, 0x67, 0x89, 0x67, 0x72, 0x41, 0x44, 0x5a, 0x06, 0xf4, 0x1e, 0x5a, + 0xed, 0x83, 0xb0, 0xfd, 0x7e, 0x46, 0xbd, 0x27, 0x99, 0x1f, 0x67, 0xc4, 0x0e, 0xbc, 0x4c, 0x81, + 0x0b, 0xbc, 0x8e, 0x6e, 0xc9, 0x2a, 0xdd, 0x98, 0x8c, 0x40, 0x53, 0x9a, 0xca, 0xe6, 0xa2, 0xb3, + 0x24, 0x9d, 0x4f, 0xc9, 0x08, 0xf4, 0x9f, 0x0a, 0x5a, 0x7f, 0x92, 0x0a, 0x22, 0x60, 0x2a, 0x16, + 0x97, 0x60, 0xab, 0xa8, 0xee, 0xa5, 0x5c, 0xd0, 0x11, 0x30, 0x37, 0xf4, 0x4b, 0x28, 0x24, 0x5d, + 0x8f, 0x7c, 0xfc, 0x02, 0x21, 0x9a, 0x00, 0x2b, 0xd4, 0x69, 0x6a, 0xb3, 0xb6, 0x59, 0xef, 0x74, + 0x8d, 0xaa, 0x06, 0x1b, 0x53, 0x59, 0x0f, 0x25, 0x90, 0x73, 0x09, 0x13, 0x3f, 0x40, 0x77, 0x12, + 0xc2, 0x44, 0x48, 0x22, 0xf7, 0x94, 0x84, 0x51, 0xca, 0x40, 0xab, 0x35, 0x95, 0xcd, 0x9b, 0xce, + 0xed, 0xd2, 0xdd, 0x2b, 0xbc, 0x99, 0xf0, 0x31, 0x89, 0x42, 0x9f, 0x08, 0x70, 0x69, 0x1c, 0x9d, + 0x69, 0xff, 0xe7, 0x69, 0x4b, 0xd2, 0x79, 0x18, 0x47, 0x67, 0xfa, 0x3b, 0x05, 0xad, 0xcc, 0x27, + 0xc7, 0x0e, 0x5a, 0xf0, 0x18, 0x10, 0x51, 0x74, 0xae, 0xde, 0xd9, 0x9a, 0x29, 0x67, 0xb2, 0x0d, + 0xd3, 0xf5, 0x1c, 0xfc, 0xe7, 0x94, 0x48, 0x58, 0x43, 0x0b, 0x0c, 0x46, 0x74, 0x0c, 0x9a, 0x9a, + 0xb5, 0x30, 0x8b, 0x14, 0xdf, 0xbb, 0x75, 0xb4, 0x38, 0x11, 0xab, 0x7f, 0x52, 0xd0, 0xc6, 0xfc, + 0xb1, 0xf0, 0x84, 0xc6, 0x1c, 0x70, 0x0f, 0xdd, 0xfb, 0xa3, 0x29, 0x2e, 0x30, 0x46, 0x59, 0xde, + 0x9a, 0x7a, 0x07, 0xcb, 0x92, 0x59, 0xe2, 0x19, 0xc7, 0xf9, 0x02, 0x39, 0x77, 0xaf, 0xb6, 0x6b, + 0x3f, 0x4b, 0xc7, 0xcf, 0xd1, 0x0d, 0x06, 0x3c, 0x8d, 0x84, 0x9c, 0xdd, 0x5e, 0xf5, 0xec, 0xe6, + 0x14, 0xe8, 0xe4, 0x58, 0x8e, 0xc4, 0xd4, 0x0f, 0xd0, 0x5a, 0x65, 0xf6, 0x5f, 0x2d, 0x6c, 0xe7, + 0x7b, 0x0d, 0x2d, 0x4f, 0x05, 0x39, 0x2e, 0xca, 0xc2, 0x5f, 0x14, 0xa4, 0xcd, 0x7a, 0x1a, 0xd8, + 0xae, 0x56, 0x55, 0xf1, 0xac, 0x1a, 0xff, 0xbc, 0x05, 0x7a, 0xf7, 0xf5, 0xe7, 0xaf, 0x6f, 0x55, + 0x0b, 0x6f, 0x65, 0x07, 0xe4, 0xfc, 0x8a, 0xd4, 0x1d, 0xf9, 0x92, 0xb8, 0xd9, 0x32, 0xc9, 0xd4, + 0x91, 0x9b, 0xad, 0x0b, 0xfc, 0x43, 0x41, 0xcb, 0xf3, 0xd6, 0x02, 0xef, 0x5f, 0x6b, 0x6a, 0xf2, + 0xb5, 0x37, 0x7a, 0xd7, 0x85, 0x29, 0xb6, 0x53, 0xef, 0xe5, 0x8a, 0xbb, 0xfa, 0x76, 0xa6, 0xf8, + 0xb7, 0xc4, 0xf3, 0x4b, 0xa7, 0x64, 0xa7, 0x75, 0x31, 0x43, 0xb0, 0x35, 0xca, 0x29, 0x2c, 0xa5, + 0xb5, 0xfb, 0x46, 0x45, 0x1b, 0x1e, 0x1d, 0x55, 0x56, 0xb5, 0xbb, 0x36, 0x6f, 0x35, 0x8e, 0xb2, + 0xcb, 0x79, 0xa4, 0x3c, 0x3b, 0x28, 0x61, 0x02, 0x1a, 0x91, 0x38, 0x30, 0x28, 0x0b, 0xcc, 0x00, + 0xe2, 0xfc, 0xae, 0xca, 0x23, 0x9f, 0x84, 0x7c, 0xf6, 0xdf, 0xcb, 0xb6, 0x34, 0xde, 0xab, 0xb5, + 0xbe, 0x6d, 0x7f, 0x50, 0x9b, 0xfd, 0x02, 0xd0, 0xf6, 0xb9, 0x51, 0x98, 0x99, 0x75, 0xd2, 0x36, + 0x4a, 0x62, 0xfe, 0x51, 0xa6, 0x0c, 0x6c, 0x9f, 0x0f, 0x26, 0x29, 0x83, 0x93, 0xf6, 0x40, 0xa6, + 0x7c, 0x53, 0x37, 0x0a, 0xbf, 0x65, 0xd9, 0x3e, 0xb7, 0xac, 0x49, 0x92, 0x65, 0x9d, 0xb4, 0x2d, + 0x4b, 0xa6, 0x0d, 0x17, 0xf2, 0x3a, 0x1f, 0xfe, 0x0a, 0x00, 0x00, 0xff, 0xff, 0x4c, 0x21, 0x33, + 0x9c, 0x05, 0x07, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/services/ad_group_criterion_service.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/services/ad_group_criterion_service.pb.go new file mode 100644 index 0000000..8426e6c --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/services/ad_group_criterion_service.pb.go @@ -0,0 +1,610 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/services/ad_group_criterion_service.proto + +package services // import "google.golang.org/genproto/googleapis/ads/googleads/v1/services" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "github.com/golang/protobuf/ptypes/wrappers" +import common "google.golang.org/genproto/googleapis/ads/googleads/v1/common" +import resources "google.golang.org/genproto/googleapis/ads/googleads/v1/resources" +import _ "google.golang.org/genproto/googleapis/api/annotations" +import status "google.golang.org/genproto/googleapis/rpc/status" +import field_mask "google.golang.org/genproto/protobuf/field_mask" + +import ( + context "golang.org/x/net/context" + grpc "google.golang.org/grpc" +) + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Request message for [AdGroupCriterionService.GetAdGroupCriterion][google.ads.googleads.v1.services.AdGroupCriterionService.GetAdGroupCriterion]. +type GetAdGroupCriterionRequest struct { + // The resource name of the criterion to fetch. + ResourceName string `protobuf:"bytes,1,opt,name=resource_name,json=resourceName,proto3" json:"resource_name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GetAdGroupCriterionRequest) Reset() { *m = GetAdGroupCriterionRequest{} } +func (m *GetAdGroupCriterionRequest) String() string { return proto.CompactTextString(m) } +func (*GetAdGroupCriterionRequest) ProtoMessage() {} +func (*GetAdGroupCriterionRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_ad_group_criterion_service_c15c43b9a0ae56f4, []int{0} +} +func (m *GetAdGroupCriterionRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GetAdGroupCriterionRequest.Unmarshal(m, b) +} +func (m *GetAdGroupCriterionRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GetAdGroupCriterionRequest.Marshal(b, m, deterministic) +} +func (dst *GetAdGroupCriterionRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_GetAdGroupCriterionRequest.Merge(dst, src) +} +func (m *GetAdGroupCriterionRequest) XXX_Size() int { + return xxx_messageInfo_GetAdGroupCriterionRequest.Size(m) +} +func (m *GetAdGroupCriterionRequest) XXX_DiscardUnknown() { + xxx_messageInfo_GetAdGroupCriterionRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_GetAdGroupCriterionRequest proto.InternalMessageInfo + +func (m *GetAdGroupCriterionRequest) GetResourceName() string { + if m != nil { + return m.ResourceName + } + return "" +} + +// Request message for [AdGroupCriterionService.MutateAdGroupCriteria][google.ads.googleads.v1.services.AdGroupCriterionService.MutateAdGroupCriteria]. +type MutateAdGroupCriteriaRequest struct { + // ID of the customer whose criteria are being modified. + CustomerId string `protobuf:"bytes,1,opt,name=customer_id,json=customerId,proto3" json:"customer_id,omitempty"` + // The list of operations to perform on individual criteria. + Operations []*AdGroupCriterionOperation `protobuf:"bytes,2,rep,name=operations,proto3" json:"operations,omitempty"` + // If true, successful operations will be carried out and invalid + // operations will return errors. If false, all operations will be carried + // out in one transaction if and only if they are all valid. + // Default is false. + PartialFailure bool `protobuf:"varint,3,opt,name=partial_failure,json=partialFailure,proto3" json:"partial_failure,omitempty"` + // If true, the request is validated but not executed. Only errors are + // returned, not results. + ValidateOnly bool `protobuf:"varint,4,opt,name=validate_only,json=validateOnly,proto3" json:"validate_only,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *MutateAdGroupCriteriaRequest) Reset() { *m = MutateAdGroupCriteriaRequest{} } +func (m *MutateAdGroupCriteriaRequest) String() string { return proto.CompactTextString(m) } +func (*MutateAdGroupCriteriaRequest) ProtoMessage() {} +func (*MutateAdGroupCriteriaRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_ad_group_criterion_service_c15c43b9a0ae56f4, []int{1} +} +func (m *MutateAdGroupCriteriaRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_MutateAdGroupCriteriaRequest.Unmarshal(m, b) +} +func (m *MutateAdGroupCriteriaRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_MutateAdGroupCriteriaRequest.Marshal(b, m, deterministic) +} +func (dst *MutateAdGroupCriteriaRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_MutateAdGroupCriteriaRequest.Merge(dst, src) +} +func (m *MutateAdGroupCriteriaRequest) XXX_Size() int { + return xxx_messageInfo_MutateAdGroupCriteriaRequest.Size(m) +} +func (m *MutateAdGroupCriteriaRequest) XXX_DiscardUnknown() { + xxx_messageInfo_MutateAdGroupCriteriaRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_MutateAdGroupCriteriaRequest proto.InternalMessageInfo + +func (m *MutateAdGroupCriteriaRequest) GetCustomerId() string { + if m != nil { + return m.CustomerId + } + return "" +} + +func (m *MutateAdGroupCriteriaRequest) GetOperations() []*AdGroupCriterionOperation { + if m != nil { + return m.Operations + } + return nil +} + +func (m *MutateAdGroupCriteriaRequest) GetPartialFailure() bool { + if m != nil { + return m.PartialFailure + } + return false +} + +func (m *MutateAdGroupCriteriaRequest) GetValidateOnly() bool { + if m != nil { + return m.ValidateOnly + } + return false +} + +// A single operation (create, remove, update) on an ad group criterion. +type AdGroupCriterionOperation struct { + // FieldMask that determines which resource fields are modified in an update. + UpdateMask *field_mask.FieldMask `protobuf:"bytes,4,opt,name=update_mask,json=updateMask,proto3" json:"update_mask,omitempty"` + // The list of policy violation keys that should not cause a + // PolicyViolationError to be reported. Not all policy violations are + // exemptable, please refer to the is_exemptible field in the returned + // PolicyViolationError. + // + // Resources violating these polices will be saved, but will not be eligible + // to serve. They may begin serving at a later time due to a change in + // policies, re-review of the resource, or a change in advertiser + // certificates. + ExemptPolicyViolationKeys []*common.PolicyViolationKey `protobuf:"bytes,5,rep,name=exempt_policy_violation_keys,json=exemptPolicyViolationKeys,proto3" json:"exempt_policy_violation_keys,omitempty"` + // The mutate operation. + // + // Types that are valid to be assigned to Operation: + // *AdGroupCriterionOperation_Create + // *AdGroupCriterionOperation_Update + // *AdGroupCriterionOperation_Remove + Operation isAdGroupCriterionOperation_Operation `protobuf_oneof:"operation"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *AdGroupCriterionOperation) Reset() { *m = AdGroupCriterionOperation{} } +func (m *AdGroupCriterionOperation) String() string { return proto.CompactTextString(m) } +func (*AdGroupCriterionOperation) ProtoMessage() {} +func (*AdGroupCriterionOperation) Descriptor() ([]byte, []int) { + return fileDescriptor_ad_group_criterion_service_c15c43b9a0ae56f4, []int{2} +} +func (m *AdGroupCriterionOperation) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_AdGroupCriterionOperation.Unmarshal(m, b) +} +func (m *AdGroupCriterionOperation) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_AdGroupCriterionOperation.Marshal(b, m, deterministic) +} +func (dst *AdGroupCriterionOperation) XXX_Merge(src proto.Message) { + xxx_messageInfo_AdGroupCriterionOperation.Merge(dst, src) +} +func (m *AdGroupCriterionOperation) XXX_Size() int { + return xxx_messageInfo_AdGroupCriterionOperation.Size(m) +} +func (m *AdGroupCriterionOperation) XXX_DiscardUnknown() { + xxx_messageInfo_AdGroupCriterionOperation.DiscardUnknown(m) +} + +var xxx_messageInfo_AdGroupCriterionOperation proto.InternalMessageInfo + +func (m *AdGroupCriterionOperation) GetUpdateMask() *field_mask.FieldMask { + if m != nil { + return m.UpdateMask + } + return nil +} + +func (m *AdGroupCriterionOperation) GetExemptPolicyViolationKeys() []*common.PolicyViolationKey { + if m != nil { + return m.ExemptPolicyViolationKeys + } + return nil +} + +type isAdGroupCriterionOperation_Operation interface { + isAdGroupCriterionOperation_Operation() +} + +type AdGroupCriterionOperation_Create struct { + Create *resources.AdGroupCriterion `protobuf:"bytes,1,opt,name=create,proto3,oneof"` +} + +type AdGroupCriterionOperation_Update struct { + Update *resources.AdGroupCriterion `protobuf:"bytes,2,opt,name=update,proto3,oneof"` +} + +type AdGroupCriterionOperation_Remove struct { + Remove string `protobuf:"bytes,3,opt,name=remove,proto3,oneof"` +} + +func (*AdGroupCriterionOperation_Create) isAdGroupCriterionOperation_Operation() {} + +func (*AdGroupCriterionOperation_Update) isAdGroupCriterionOperation_Operation() {} + +func (*AdGroupCriterionOperation_Remove) isAdGroupCriterionOperation_Operation() {} + +func (m *AdGroupCriterionOperation) GetOperation() isAdGroupCriterionOperation_Operation { + if m != nil { + return m.Operation + } + return nil +} + +func (m *AdGroupCriterionOperation) GetCreate() *resources.AdGroupCriterion { + if x, ok := m.GetOperation().(*AdGroupCriterionOperation_Create); ok { + return x.Create + } + return nil +} + +func (m *AdGroupCriterionOperation) GetUpdate() *resources.AdGroupCriterion { + if x, ok := m.GetOperation().(*AdGroupCriterionOperation_Update); ok { + return x.Update + } + return nil +} + +func (m *AdGroupCriterionOperation) GetRemove() string { + if x, ok := m.GetOperation().(*AdGroupCriterionOperation_Remove); ok { + return x.Remove + } + return "" +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*AdGroupCriterionOperation) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _AdGroupCriterionOperation_OneofMarshaler, _AdGroupCriterionOperation_OneofUnmarshaler, _AdGroupCriterionOperation_OneofSizer, []interface{}{ + (*AdGroupCriterionOperation_Create)(nil), + (*AdGroupCriterionOperation_Update)(nil), + (*AdGroupCriterionOperation_Remove)(nil), + } +} + +func _AdGroupCriterionOperation_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*AdGroupCriterionOperation) + // operation + switch x := m.Operation.(type) { + case *AdGroupCriterionOperation_Create: + b.EncodeVarint(1<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.Create); err != nil { + return err + } + case *AdGroupCriterionOperation_Update: + b.EncodeVarint(2<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.Update); err != nil { + return err + } + case *AdGroupCriterionOperation_Remove: + b.EncodeVarint(3<<3 | proto.WireBytes) + b.EncodeStringBytes(x.Remove) + case nil: + default: + return fmt.Errorf("AdGroupCriterionOperation.Operation has unexpected type %T", x) + } + return nil +} + +func _AdGroupCriterionOperation_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*AdGroupCriterionOperation) + switch tag { + case 1: // operation.create + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(resources.AdGroupCriterion) + err := b.DecodeMessage(msg) + m.Operation = &AdGroupCriterionOperation_Create{msg} + return true, err + case 2: // operation.update + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(resources.AdGroupCriterion) + err := b.DecodeMessage(msg) + m.Operation = &AdGroupCriterionOperation_Update{msg} + return true, err + case 3: // operation.remove + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeStringBytes() + m.Operation = &AdGroupCriterionOperation_Remove{x} + return true, err + default: + return false, nil + } +} + +func _AdGroupCriterionOperation_OneofSizer(msg proto.Message) (n int) { + m := msg.(*AdGroupCriterionOperation) + // operation + switch x := m.Operation.(type) { + case *AdGroupCriterionOperation_Create: + s := proto.Size(x.Create) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *AdGroupCriterionOperation_Update: + s := proto.Size(x.Update) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *AdGroupCriterionOperation_Remove: + n += 1 // tag and wire + n += proto.SizeVarint(uint64(len(x.Remove))) + n += len(x.Remove) + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +// Response message for an ad group criterion mutate. +type MutateAdGroupCriteriaResponse struct { + // Errors that pertain to operation failures in the partial failure mode. + // Returned only when partial_failure = true and all errors occur inside the + // operations. If any errors occur outside the operations (e.g. auth errors), + // we return an RPC level error. + PartialFailureError *status.Status `protobuf:"bytes,3,opt,name=partial_failure_error,json=partialFailureError,proto3" json:"partial_failure_error,omitempty"` + // All results for the mutate. + Results []*MutateAdGroupCriterionResult `protobuf:"bytes,2,rep,name=results,proto3" json:"results,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *MutateAdGroupCriteriaResponse) Reset() { *m = MutateAdGroupCriteriaResponse{} } +func (m *MutateAdGroupCriteriaResponse) String() string { return proto.CompactTextString(m) } +func (*MutateAdGroupCriteriaResponse) ProtoMessage() {} +func (*MutateAdGroupCriteriaResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_ad_group_criterion_service_c15c43b9a0ae56f4, []int{3} +} +func (m *MutateAdGroupCriteriaResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_MutateAdGroupCriteriaResponse.Unmarshal(m, b) +} +func (m *MutateAdGroupCriteriaResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_MutateAdGroupCriteriaResponse.Marshal(b, m, deterministic) +} +func (dst *MutateAdGroupCriteriaResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_MutateAdGroupCriteriaResponse.Merge(dst, src) +} +func (m *MutateAdGroupCriteriaResponse) XXX_Size() int { + return xxx_messageInfo_MutateAdGroupCriteriaResponse.Size(m) +} +func (m *MutateAdGroupCriteriaResponse) XXX_DiscardUnknown() { + xxx_messageInfo_MutateAdGroupCriteriaResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_MutateAdGroupCriteriaResponse proto.InternalMessageInfo + +func (m *MutateAdGroupCriteriaResponse) GetPartialFailureError() *status.Status { + if m != nil { + return m.PartialFailureError + } + return nil +} + +func (m *MutateAdGroupCriteriaResponse) GetResults() []*MutateAdGroupCriterionResult { + if m != nil { + return m.Results + } + return nil +} + +// The result for the criterion mutate. +type MutateAdGroupCriterionResult struct { + // Returned for successful operations. + ResourceName string `protobuf:"bytes,1,opt,name=resource_name,json=resourceName,proto3" json:"resource_name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *MutateAdGroupCriterionResult) Reset() { *m = MutateAdGroupCriterionResult{} } +func (m *MutateAdGroupCriterionResult) String() string { return proto.CompactTextString(m) } +func (*MutateAdGroupCriterionResult) ProtoMessage() {} +func (*MutateAdGroupCriterionResult) Descriptor() ([]byte, []int) { + return fileDescriptor_ad_group_criterion_service_c15c43b9a0ae56f4, []int{4} +} +func (m *MutateAdGroupCriterionResult) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_MutateAdGroupCriterionResult.Unmarshal(m, b) +} +func (m *MutateAdGroupCriterionResult) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_MutateAdGroupCriterionResult.Marshal(b, m, deterministic) +} +func (dst *MutateAdGroupCriterionResult) XXX_Merge(src proto.Message) { + xxx_messageInfo_MutateAdGroupCriterionResult.Merge(dst, src) +} +func (m *MutateAdGroupCriterionResult) XXX_Size() int { + return xxx_messageInfo_MutateAdGroupCriterionResult.Size(m) +} +func (m *MutateAdGroupCriterionResult) XXX_DiscardUnknown() { + xxx_messageInfo_MutateAdGroupCriterionResult.DiscardUnknown(m) +} + +var xxx_messageInfo_MutateAdGroupCriterionResult proto.InternalMessageInfo + +func (m *MutateAdGroupCriterionResult) GetResourceName() string { + if m != nil { + return m.ResourceName + } + return "" +} + +func init() { + proto.RegisterType((*GetAdGroupCriterionRequest)(nil), "google.ads.googleads.v1.services.GetAdGroupCriterionRequest") + proto.RegisterType((*MutateAdGroupCriteriaRequest)(nil), "google.ads.googleads.v1.services.MutateAdGroupCriteriaRequest") + proto.RegisterType((*AdGroupCriterionOperation)(nil), "google.ads.googleads.v1.services.AdGroupCriterionOperation") + proto.RegisterType((*MutateAdGroupCriteriaResponse)(nil), "google.ads.googleads.v1.services.MutateAdGroupCriteriaResponse") + proto.RegisterType((*MutateAdGroupCriterionResult)(nil), "google.ads.googleads.v1.services.MutateAdGroupCriterionResult") +} + +// Reference imports to suppress errors if they are not otherwise used. +var _ context.Context +var _ grpc.ClientConn + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the grpc package it is being compiled against. +const _ = grpc.SupportPackageIsVersion4 + +// AdGroupCriterionServiceClient is the client API for AdGroupCriterionService service. +// +// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://godoc.org/google.golang.org/grpc#ClientConn.NewStream. +type AdGroupCriterionServiceClient interface { + // Returns the requested criterion in full detail. + GetAdGroupCriterion(ctx context.Context, in *GetAdGroupCriterionRequest, opts ...grpc.CallOption) (*resources.AdGroupCriterion, error) + // Creates, updates, or removes criteria. Operation statuses are returned. + MutateAdGroupCriteria(ctx context.Context, in *MutateAdGroupCriteriaRequest, opts ...grpc.CallOption) (*MutateAdGroupCriteriaResponse, error) +} + +type adGroupCriterionServiceClient struct { + cc *grpc.ClientConn +} + +func NewAdGroupCriterionServiceClient(cc *grpc.ClientConn) AdGroupCriterionServiceClient { + return &adGroupCriterionServiceClient{cc} +} + +func (c *adGroupCriterionServiceClient) GetAdGroupCriterion(ctx context.Context, in *GetAdGroupCriterionRequest, opts ...grpc.CallOption) (*resources.AdGroupCriterion, error) { + out := new(resources.AdGroupCriterion) + err := c.cc.Invoke(ctx, "/google.ads.googleads.v1.services.AdGroupCriterionService/GetAdGroupCriterion", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *adGroupCriterionServiceClient) MutateAdGroupCriteria(ctx context.Context, in *MutateAdGroupCriteriaRequest, opts ...grpc.CallOption) (*MutateAdGroupCriteriaResponse, error) { + out := new(MutateAdGroupCriteriaResponse) + err := c.cc.Invoke(ctx, "/google.ads.googleads.v1.services.AdGroupCriterionService/MutateAdGroupCriteria", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +// AdGroupCriterionServiceServer is the server API for AdGroupCriterionService service. +type AdGroupCriterionServiceServer interface { + // Returns the requested criterion in full detail. + GetAdGroupCriterion(context.Context, *GetAdGroupCriterionRequest) (*resources.AdGroupCriterion, error) + // Creates, updates, or removes criteria. Operation statuses are returned. + MutateAdGroupCriteria(context.Context, *MutateAdGroupCriteriaRequest) (*MutateAdGroupCriteriaResponse, error) +} + +func RegisterAdGroupCriterionServiceServer(s *grpc.Server, srv AdGroupCriterionServiceServer) { + s.RegisterService(&_AdGroupCriterionService_serviceDesc, srv) +} + +func _AdGroupCriterionService_GetAdGroupCriterion_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(GetAdGroupCriterionRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(AdGroupCriterionServiceServer).GetAdGroupCriterion(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.ads.googleads.v1.services.AdGroupCriterionService/GetAdGroupCriterion", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(AdGroupCriterionServiceServer).GetAdGroupCriterion(ctx, req.(*GetAdGroupCriterionRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _AdGroupCriterionService_MutateAdGroupCriteria_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(MutateAdGroupCriteriaRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(AdGroupCriterionServiceServer).MutateAdGroupCriteria(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.ads.googleads.v1.services.AdGroupCriterionService/MutateAdGroupCriteria", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(AdGroupCriterionServiceServer).MutateAdGroupCriteria(ctx, req.(*MutateAdGroupCriteriaRequest)) + } + return interceptor(ctx, in, info, handler) +} + +var _AdGroupCriterionService_serviceDesc = grpc.ServiceDesc{ + ServiceName: "google.ads.googleads.v1.services.AdGroupCriterionService", + HandlerType: (*AdGroupCriterionServiceServer)(nil), + Methods: []grpc.MethodDesc{ + { + MethodName: "GetAdGroupCriterion", + Handler: _AdGroupCriterionService_GetAdGroupCriterion_Handler, + }, + { + MethodName: "MutateAdGroupCriteria", + Handler: _AdGroupCriterionService_MutateAdGroupCriteria_Handler, + }, + }, + Streams: []grpc.StreamDesc{}, + Metadata: "google/ads/googleads/v1/services/ad_group_criterion_service.proto", +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/services/ad_group_criterion_service.proto", fileDescriptor_ad_group_criterion_service_c15c43b9a0ae56f4) +} + +var fileDescriptor_ad_group_criterion_service_c15c43b9a0ae56f4 = []byte{ + // 782 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xa4, 0x95, 0x41, 0x6f, 0xd3, 0x3c, + 0x18, 0xc7, 0xdf, 0xb4, 0x2f, 0x83, 0xb9, 0x03, 0x24, 0x4f, 0xd3, 0xba, 0xaa, 0x40, 0x15, 0x26, + 0x31, 0x15, 0x29, 0x51, 0x3b, 0x2e, 0xcb, 0x18, 0x53, 0x36, 0xb1, 0x0e, 0xa1, 0xb1, 0x29, 0x93, + 0x2a, 0x04, 0x95, 0x22, 0x2f, 0xf1, 0xa2, 0x68, 0x49, 0x1c, 0x6c, 0xa7, 0x50, 0x4d, 0xbb, 0xc0, + 0x37, 0x80, 0x6f, 0xc0, 0x91, 0xaf, 0xc0, 0x09, 0x71, 0xe3, 0xca, 0x89, 0x3b, 0xa7, 0x7d, 0x0a, + 0x94, 0x38, 0xae, 0xd6, 0xae, 0x51, 0xd1, 0xb8, 0x39, 0xf6, 0xff, 0xf9, 0xe5, 0x79, 0xf2, 0x7f, + 0xfc, 0x04, 0x98, 0x1e, 0x21, 0x5e, 0x80, 0x75, 0xe4, 0x32, 0x5d, 0x2c, 0xd3, 0x55, 0xbf, 0xa5, + 0x33, 0x4c, 0xfb, 0xbe, 0x83, 0x99, 0x8e, 0x5c, 0xdb, 0xa3, 0x24, 0x89, 0x6d, 0x87, 0xfa, 0x1c, + 0x53, 0x9f, 0x44, 0x76, 0x7e, 0xa6, 0xc5, 0x94, 0x70, 0x02, 0x1b, 0x22, 0x4e, 0x43, 0x2e, 0xd3, + 0x86, 0x08, 0xad, 0xdf, 0xd2, 0x24, 0xa2, 0xf6, 0xb0, 0xe8, 0x25, 0x0e, 0x09, 0x43, 0x12, 0xe9, + 0x31, 0x09, 0x7c, 0x67, 0x20, 0x70, 0x35, 0xa3, 0x48, 0x4c, 0x31, 0x23, 0x09, 0x9d, 0x9c, 0x52, + 0x1e, 0x5b, 0x97, 0xb1, 0xb1, 0xaf, 0xa3, 0x28, 0x22, 0x1c, 0x71, 0x9f, 0x44, 0x2c, 0x3f, 0xcd, + 0x13, 0xd5, 0xb3, 0xa7, 0xa3, 0xe4, 0x58, 0x3f, 0xf6, 0x71, 0xe0, 0xda, 0x21, 0x62, 0x27, 0xb9, + 0xe2, 0xee, 0xb8, 0xe2, 0x2d, 0x45, 0x71, 0x8c, 0xa9, 0x24, 0x2c, 0xe6, 0xe7, 0x34, 0x76, 0x74, + 0xc6, 0x11, 0x4f, 0xf2, 0x03, 0xd5, 0x04, 0xb5, 0x0e, 0xe6, 0xa6, 0xdb, 0x49, 0xd3, 0xda, 0x96, + 0x59, 0x59, 0xf8, 0x4d, 0x82, 0x19, 0x87, 0xf7, 0xc1, 0x4d, 0x99, 0xbc, 0x1d, 0xa1, 0x10, 0x57, + 0x95, 0x86, 0xb2, 0x32, 0x6b, 0xcd, 0xc9, 0xcd, 0x17, 0x28, 0xc4, 0xea, 0xb9, 0x02, 0xea, 0x7b, + 0x09, 0x47, 0x1c, 0x8f, 0x62, 0x90, 0xa4, 0xdc, 0x03, 0x15, 0x27, 0x61, 0x9c, 0x84, 0x98, 0xda, + 0xbe, 0x9b, 0x33, 0x80, 0xdc, 0x7a, 0xe6, 0xc2, 0xd7, 0x00, 0x90, 0x18, 0x53, 0x51, 0x73, 0xb5, + 0xd4, 0x28, 0xaf, 0x54, 0xda, 0xeb, 0xda, 0x34, 0x77, 0xb4, 0xf1, 0xac, 0xf7, 0x25, 0xc3, 0xba, + 0x80, 0x83, 0x0f, 0xc0, 0xed, 0x18, 0x51, 0xee, 0xa3, 0xc0, 0x3e, 0x46, 0x7e, 0x90, 0x50, 0x5c, + 0x2d, 0x37, 0x94, 0x95, 0x1b, 0xd6, 0xad, 0x7c, 0x7b, 0x47, 0xec, 0xa6, 0xc5, 0xf6, 0x51, 0xe0, + 0xbb, 0x88, 0x63, 0x9b, 0x44, 0xc1, 0xa0, 0xfa, 0x7f, 0x26, 0x9b, 0x93, 0x9b, 0xfb, 0x51, 0x30, + 0x50, 0x3f, 0x96, 0xc1, 0x52, 0xe1, 0x7b, 0xe1, 0x3a, 0xa8, 0x24, 0x71, 0x06, 0x48, 0xbd, 0xc9, + 0x00, 0x95, 0x76, 0x4d, 0x56, 0x22, 0xcd, 0xd1, 0x76, 0x52, 0xfb, 0xf6, 0x10, 0x3b, 0xb1, 0x80, + 0x90, 0xa7, 0x6b, 0xc8, 0x40, 0x1d, 0xbf, 0xc3, 0x61, 0xcc, 0x6d, 0xd1, 0x56, 0x76, 0xdf, 0x27, + 0x41, 0xc6, 0xb5, 0x4f, 0xf0, 0x80, 0x55, 0xaf, 0x65, 0xdf, 0xa5, 0x5d, 0xf8, 0x5d, 0x44, 0x4f, + 0x6a, 0x07, 0x59, 0x70, 0x57, 0xc6, 0x3e, 0xc7, 0x03, 0x6b, 0x49, 0x70, 0x2f, 0x9f, 0x30, 0xb8, + 0x07, 0x66, 0x1c, 0x8a, 0x11, 0x17, 0xd6, 0x56, 0xda, 0xab, 0x85, 0xf8, 0x61, 0x17, 0x5f, 0xfa, + 0xee, 0xbb, 0xff, 0x59, 0x39, 0x24, 0xc5, 0x89, 0x8a, 0xaa, 0xa5, 0x7f, 0xc2, 0x09, 0x08, 0xac, + 0x82, 0x19, 0x8a, 0x43, 0xd2, 0x17, 0x96, 0xcd, 0xa6, 0x27, 0xe2, 0x79, 0xab, 0x02, 0x66, 0x87, + 0x1e, 0xab, 0xdf, 0x14, 0x70, 0xa7, 0xa0, 0x03, 0x59, 0x4c, 0x22, 0x86, 0xe1, 0x0e, 0x58, 0x18, + 0x6b, 0x02, 0x1b, 0x53, 0x4a, 0x68, 0xc6, 0xad, 0xb4, 0xa1, 0x4c, 0x93, 0xc6, 0x8e, 0x76, 0x98, + 0xdd, 0x0f, 0x6b, 0x7e, 0xb4, 0x3d, 0x9e, 0xa6, 0x72, 0xf8, 0x12, 0x5c, 0xa7, 0x98, 0x25, 0x01, + 0x97, 0x6d, 0xfa, 0x64, 0x7a, 0x9b, 0x4e, 0xca, 0x2c, 0xbd, 0x62, 0x29, 0xc6, 0x92, 0x38, 0x75, + 0x7b, 0xf2, 0x25, 0x92, 0xc2, 0xbf, 0xba, 0x8a, 0xed, 0xaf, 0x65, 0xb0, 0x38, 0x1e, 0x7f, 0x28, + 0xf2, 0x80, 0xdf, 0x15, 0x30, 0x3f, 0xe1, 0xaa, 0xc3, 0xc7, 0xd3, 0x2b, 0x28, 0x9e, 0x10, 0xb5, + 0xab, 0x18, 0xac, 0xae, 0xbd, 0xff, 0xf9, 0xfb, 0x53, 0x69, 0x15, 0xb6, 0xd2, 0xe9, 0x78, 0x3a, + 0x52, 0xd6, 0x86, 0x1c, 0x0b, 0x4c, 0x6f, 0xea, 0x68, 0xd4, 0x4d, 0xbd, 0x79, 0x06, 0x7f, 0x29, + 0x60, 0x61, 0xa2, 0xd5, 0xf0, 0x8a, 0x4e, 0xc8, 0x29, 0x55, 0xdb, 0xbc, 0x72, 0xbc, 0xe8, 0x31, + 0x75, 0x33, 0xab, 0x6a, 0x4d, 0x7d, 0x94, 0xfd, 0x20, 0x86, 0x65, 0x9c, 0x5e, 0x98, 0x7d, 0x1b, + 0xcd, 0xb3, 0xf1, 0xa2, 0x8c, 0x30, 0x83, 0x1a, 0x4a, 0x73, 0xeb, 0x43, 0x09, 0x2c, 0x3b, 0x24, + 0x9c, 0x9a, 0xc7, 0x56, 0xbd, 0xc0, 0xe3, 0x83, 0x74, 0xc0, 0x1c, 0x28, 0xaf, 0x76, 0x73, 0x82, + 0x47, 0x02, 0x14, 0x79, 0x1a, 0xa1, 0x9e, 0xee, 0xe1, 0x28, 0x1b, 0x3f, 0xf2, 0xcf, 0x14, 0xfb, + 0xac, 0xf8, 0xd7, 0xb9, 0x2e, 0x17, 0x9f, 0x4b, 0xe5, 0x8e, 0x69, 0x7e, 0x29, 0x35, 0x3a, 0x02, + 0x68, 0xba, 0x4c, 0x13, 0xcb, 0x74, 0xd5, 0x6d, 0x69, 0xf9, 0x8b, 0xd9, 0x0f, 0x29, 0xe9, 0x99, + 0x2e, 0xeb, 0x0d, 0x25, 0xbd, 0x6e, 0xab, 0x27, 0x25, 0xe7, 0xa5, 0x65, 0xb1, 0x6f, 0x18, 0xa6, + 0xcb, 0x0c, 0x63, 0x28, 0x32, 0x8c, 0x6e, 0xcb, 0x30, 0xa4, 0xec, 0x68, 0x26, 0xcb, 0x73, 0xf5, + 0x4f, 0x00, 0x00, 0x00, 0xff, 0xff, 0xb3, 0xfb, 0x3f, 0xae, 0xe1, 0x07, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/services/ad_group_criterion_simulation_service.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/services/ad_group_criterion_simulation_service.pb.go new file mode 100644 index 0000000..42a1aaa --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/services/ad_group_criterion_simulation_service.pb.go @@ -0,0 +1,177 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/services/ad_group_criterion_simulation_service.proto + +package services // import "google.golang.org/genproto/googleapis/ads/googleads/v1/services" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import resources "google.golang.org/genproto/googleapis/ads/googleads/v1/resources" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +import ( + context "golang.org/x/net/context" + grpc "google.golang.org/grpc" +) + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Request message for +// [AdGroupCriterionSimulationService.GetAdGroupCriterionSimulation][google.ads.googleads.v1.services.AdGroupCriterionSimulationService.GetAdGroupCriterionSimulation]. +type GetAdGroupCriterionSimulationRequest struct { + // The resource name of the ad group criterion simulation to fetch. + ResourceName string `protobuf:"bytes,1,opt,name=resource_name,json=resourceName,proto3" json:"resource_name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GetAdGroupCriterionSimulationRequest) Reset() { *m = GetAdGroupCriterionSimulationRequest{} } +func (m *GetAdGroupCriterionSimulationRequest) String() string { return proto.CompactTextString(m) } +func (*GetAdGroupCriterionSimulationRequest) ProtoMessage() {} +func (*GetAdGroupCriterionSimulationRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_ad_group_criterion_simulation_service_1f923f60c32b966e, []int{0} +} +func (m *GetAdGroupCriterionSimulationRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GetAdGroupCriterionSimulationRequest.Unmarshal(m, b) +} +func (m *GetAdGroupCriterionSimulationRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GetAdGroupCriterionSimulationRequest.Marshal(b, m, deterministic) +} +func (dst *GetAdGroupCriterionSimulationRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_GetAdGroupCriterionSimulationRequest.Merge(dst, src) +} +func (m *GetAdGroupCriterionSimulationRequest) XXX_Size() int { + return xxx_messageInfo_GetAdGroupCriterionSimulationRequest.Size(m) +} +func (m *GetAdGroupCriterionSimulationRequest) XXX_DiscardUnknown() { + xxx_messageInfo_GetAdGroupCriterionSimulationRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_GetAdGroupCriterionSimulationRequest proto.InternalMessageInfo + +func (m *GetAdGroupCriterionSimulationRequest) GetResourceName() string { + if m != nil { + return m.ResourceName + } + return "" +} + +func init() { + proto.RegisterType((*GetAdGroupCriterionSimulationRequest)(nil), "google.ads.googleads.v1.services.GetAdGroupCriterionSimulationRequest") +} + +// Reference imports to suppress errors if they are not otherwise used. +var _ context.Context +var _ grpc.ClientConn + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the grpc package it is being compiled against. +const _ = grpc.SupportPackageIsVersion4 + +// AdGroupCriterionSimulationServiceClient is the client API for AdGroupCriterionSimulationService service. +// +// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://godoc.org/google.golang.org/grpc#ClientConn.NewStream. +type AdGroupCriterionSimulationServiceClient interface { + // Returns the requested ad group criterion simulation in full detail. + GetAdGroupCriterionSimulation(ctx context.Context, in *GetAdGroupCriterionSimulationRequest, opts ...grpc.CallOption) (*resources.AdGroupCriterionSimulation, error) +} + +type adGroupCriterionSimulationServiceClient struct { + cc *grpc.ClientConn +} + +func NewAdGroupCriterionSimulationServiceClient(cc *grpc.ClientConn) AdGroupCriterionSimulationServiceClient { + return &adGroupCriterionSimulationServiceClient{cc} +} + +func (c *adGroupCriterionSimulationServiceClient) GetAdGroupCriterionSimulation(ctx context.Context, in *GetAdGroupCriterionSimulationRequest, opts ...grpc.CallOption) (*resources.AdGroupCriterionSimulation, error) { + out := new(resources.AdGroupCriterionSimulation) + err := c.cc.Invoke(ctx, "/google.ads.googleads.v1.services.AdGroupCriterionSimulationService/GetAdGroupCriterionSimulation", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +// AdGroupCriterionSimulationServiceServer is the server API for AdGroupCriterionSimulationService service. +type AdGroupCriterionSimulationServiceServer interface { + // Returns the requested ad group criterion simulation in full detail. + GetAdGroupCriterionSimulation(context.Context, *GetAdGroupCriterionSimulationRequest) (*resources.AdGroupCriterionSimulation, error) +} + +func RegisterAdGroupCriterionSimulationServiceServer(s *grpc.Server, srv AdGroupCriterionSimulationServiceServer) { + s.RegisterService(&_AdGroupCriterionSimulationService_serviceDesc, srv) +} + +func _AdGroupCriterionSimulationService_GetAdGroupCriterionSimulation_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(GetAdGroupCriterionSimulationRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(AdGroupCriterionSimulationServiceServer).GetAdGroupCriterionSimulation(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.ads.googleads.v1.services.AdGroupCriterionSimulationService/GetAdGroupCriterionSimulation", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(AdGroupCriterionSimulationServiceServer).GetAdGroupCriterionSimulation(ctx, req.(*GetAdGroupCriterionSimulationRequest)) + } + return interceptor(ctx, in, info, handler) +} + +var _AdGroupCriterionSimulationService_serviceDesc = grpc.ServiceDesc{ + ServiceName: "google.ads.googleads.v1.services.AdGroupCriterionSimulationService", + HandlerType: (*AdGroupCriterionSimulationServiceServer)(nil), + Methods: []grpc.MethodDesc{ + { + MethodName: "GetAdGroupCriterionSimulation", + Handler: _AdGroupCriterionSimulationService_GetAdGroupCriterionSimulation_Handler, + }, + }, + Streams: []grpc.StreamDesc{}, + Metadata: "google/ads/googleads/v1/services/ad_group_criterion_simulation_service.proto", +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/services/ad_group_criterion_simulation_service.proto", fileDescriptor_ad_group_criterion_simulation_service_1f923f60c32b966e) +} + +var fileDescriptor_ad_group_criterion_simulation_service_1f923f60c32b966e = []byte{ + // 373 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x8c, 0x92, 0xb1, 0x4a, 0xc3, 0x40, + 0x1c, 0xc6, 0x49, 0x04, 0xc1, 0xa0, 0x4b, 0x26, 0x29, 0x0a, 0xb5, 0x16, 0x91, 0x0e, 0x77, 0x44, + 0xb7, 0x93, 0x22, 0xa9, 0xd4, 0x08, 0x8a, 0x94, 0x16, 0x3a, 0x48, 0x20, 0x9c, 0xc9, 0x11, 0x02, + 0x4d, 0x2e, 0xde, 0xff, 0xd2, 0x45, 0x5c, 0xdc, 0x9d, 0xfa, 0x06, 0x8e, 0x3e, 0x8a, 0xab, 0xaf, + 0xe0, 0x24, 0xf8, 0x0e, 0x92, 0x5c, 0x2f, 0xe0, 0x10, 0xe3, 0xf6, 0x71, 0xf7, 0xf1, 0xfb, 0xee, + 0xff, 0xdd, 0xdf, 0xba, 0x89, 0x39, 0x8f, 0x17, 0x0c, 0xd3, 0x08, 0xb0, 0x92, 0xa5, 0x5a, 0x3a, + 0x18, 0x98, 0x58, 0x26, 0x21, 0x03, 0x4c, 0xa3, 0x20, 0x16, 0xbc, 0xc8, 0x83, 0x50, 0x24, 0x92, + 0x89, 0x84, 0x67, 0x01, 0x24, 0x69, 0xb1, 0xa0, 0xb2, 0x92, 0xca, 0x86, 0x72, 0xc1, 0x25, 0xb7, + 0xbb, 0x0a, 0x81, 0x68, 0x04, 0xa8, 0xa6, 0xa1, 0xa5, 0x83, 0x34, 0xad, 0x33, 0x6e, 0xca, 0x13, + 0x0c, 0x78, 0x21, 0x5a, 0x03, 0x55, 0x50, 0x67, 0x4f, 0x63, 0xf2, 0x04, 0xd3, 0x2c, 0xe3, 0xb2, + 0xba, 0x04, 0x75, 0xdb, 0xbb, 0xb6, 0xfa, 0x1e, 0x93, 0x6e, 0xe4, 0x95, 0x98, 0x0b, 0x4d, 0x99, + 0xd5, 0x90, 0x29, 0x7b, 0x28, 0x18, 0x48, 0xfb, 0xd0, 0xda, 0xd1, 0xb1, 0x41, 0x46, 0x53, 0xb6, + 0x6b, 0x74, 0x8d, 0xe3, 0xad, 0xe9, 0xb6, 0x3e, 0xbc, 0xa5, 0x29, 0x3b, 0x59, 0x99, 0xd6, 0x41, + 0x33, 0x6a, 0xa6, 0x06, 0xb3, 0xbf, 0x0d, 0x6b, 0xff, 0xcf, 0x4c, 0xfb, 0x12, 0xb5, 0x95, 0x83, + 0xfe, 0xf3, 0xe8, 0xce, 0xb0, 0x91, 0x53, 0x57, 0x88, 0x9a, 0x29, 0xbd, 0xf1, 0xf3, 0xc7, 0xe7, + 0xca, 0x3c, 0xb7, 0x87, 0x65, 0xe9, 0x8f, 0xbf, 0xc6, 0x1f, 0x86, 0x05, 0x48, 0x9e, 0x32, 0x01, + 0x78, 0x80, 0x69, 0x23, 0x02, 0xf0, 0xe0, 0x69, 0xf4, 0x62, 0x5a, 0xfd, 0x90, 0xa7, 0xad, 0x33, + 0x8d, 0x8e, 0x5a, 0xbb, 0x9b, 0x94, 0x7f, 0x36, 0x31, 0xee, 0xae, 0xd6, 0xac, 0x98, 0x2f, 0x68, + 0x16, 0x23, 0x2e, 0x62, 0x1c, 0xb3, 0xac, 0xfa, 0x51, 0xbd, 0x2a, 0x79, 0x02, 0xcd, 0x9b, 0x7a, + 0xa6, 0xc5, 0xab, 0xb9, 0xe1, 0xb9, 0xee, 0x9b, 0xd9, 0xf5, 0x14, 0xd0, 0x8d, 0x00, 0x29, 0x59, + 0xaa, 0xb9, 0x83, 0xd6, 0xc1, 0xf0, 0xae, 0x2d, 0xbe, 0x1b, 0x81, 0x5f, 0x5b, 0xfc, 0xb9, 0xe3, + 0x6b, 0xcb, 0x97, 0xd9, 0x57, 0xe7, 0x84, 0xb8, 0x11, 0x10, 0x52, 0x9b, 0x08, 0x99, 0x3b, 0x84, + 0x68, 0xdb, 0xfd, 0x66, 0xf5, 0xce, 0xd3, 0x9f, 0x00, 0x00, 0x00, 0xff, 0xff, 0xbd, 0x43, 0x68, + 0x5b, 0x50, 0x03, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/services/ad_group_extension_setting_service.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/services/ad_group_extension_setting_service.pb.go new file mode 100644 index 0000000..6892705 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/services/ad_group_extension_setting_service.pb.go @@ -0,0 +1,597 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/services/ad_group_extension_setting_service.proto + +package services // import "google.golang.org/genproto/googleapis/ads/googleads/v1/services" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "github.com/golang/protobuf/ptypes/wrappers" +import resources "google.golang.org/genproto/googleapis/ads/googleads/v1/resources" +import _ "google.golang.org/genproto/googleapis/api/annotations" +import status "google.golang.org/genproto/googleapis/rpc/status" +import field_mask "google.golang.org/genproto/protobuf/field_mask" + +import ( + context "golang.org/x/net/context" + grpc "google.golang.org/grpc" +) + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Request message for +// [AdGroupExtensionSettingService.GetAdGroupExtensionSetting][google.ads.googleads.v1.services.AdGroupExtensionSettingService.GetAdGroupExtensionSetting]. +type GetAdGroupExtensionSettingRequest struct { + // The resource name of the ad group extension setting to fetch. + ResourceName string `protobuf:"bytes,1,opt,name=resource_name,json=resourceName,proto3" json:"resource_name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GetAdGroupExtensionSettingRequest) Reset() { *m = GetAdGroupExtensionSettingRequest{} } +func (m *GetAdGroupExtensionSettingRequest) String() string { return proto.CompactTextString(m) } +func (*GetAdGroupExtensionSettingRequest) ProtoMessage() {} +func (*GetAdGroupExtensionSettingRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_ad_group_extension_setting_service_fd5e4f4a8f156262, []int{0} +} +func (m *GetAdGroupExtensionSettingRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GetAdGroupExtensionSettingRequest.Unmarshal(m, b) +} +func (m *GetAdGroupExtensionSettingRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GetAdGroupExtensionSettingRequest.Marshal(b, m, deterministic) +} +func (dst *GetAdGroupExtensionSettingRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_GetAdGroupExtensionSettingRequest.Merge(dst, src) +} +func (m *GetAdGroupExtensionSettingRequest) XXX_Size() int { + return xxx_messageInfo_GetAdGroupExtensionSettingRequest.Size(m) +} +func (m *GetAdGroupExtensionSettingRequest) XXX_DiscardUnknown() { + xxx_messageInfo_GetAdGroupExtensionSettingRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_GetAdGroupExtensionSettingRequest proto.InternalMessageInfo + +func (m *GetAdGroupExtensionSettingRequest) GetResourceName() string { + if m != nil { + return m.ResourceName + } + return "" +} + +// Request message for +// [AdGroupExtensionSettingService.MutateAdGroupExtensionSettings][google.ads.googleads.v1.services.AdGroupExtensionSettingService.MutateAdGroupExtensionSettings]. +type MutateAdGroupExtensionSettingsRequest struct { + // The ID of the customer whose ad group extension settings are being + // modified. + CustomerId string `protobuf:"bytes,1,opt,name=customer_id,json=customerId,proto3" json:"customer_id,omitempty"` + // The list of operations to perform on individual ad group extension + // settings. + Operations []*AdGroupExtensionSettingOperation `protobuf:"bytes,2,rep,name=operations,proto3" json:"operations,omitempty"` + // If true, successful operations will be carried out and invalid + // operations will return errors. If false, all operations will be carried + // out in one transaction if and only if they are all valid. + // Default is false. + PartialFailure bool `protobuf:"varint,3,opt,name=partial_failure,json=partialFailure,proto3" json:"partial_failure,omitempty"` + // If true, the request is validated but not executed. Only errors are + // returned, not results. + ValidateOnly bool `protobuf:"varint,4,opt,name=validate_only,json=validateOnly,proto3" json:"validate_only,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *MutateAdGroupExtensionSettingsRequest) Reset() { *m = MutateAdGroupExtensionSettingsRequest{} } +func (m *MutateAdGroupExtensionSettingsRequest) String() string { return proto.CompactTextString(m) } +func (*MutateAdGroupExtensionSettingsRequest) ProtoMessage() {} +func (*MutateAdGroupExtensionSettingsRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_ad_group_extension_setting_service_fd5e4f4a8f156262, []int{1} +} +func (m *MutateAdGroupExtensionSettingsRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_MutateAdGroupExtensionSettingsRequest.Unmarshal(m, b) +} +func (m *MutateAdGroupExtensionSettingsRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_MutateAdGroupExtensionSettingsRequest.Marshal(b, m, deterministic) +} +func (dst *MutateAdGroupExtensionSettingsRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_MutateAdGroupExtensionSettingsRequest.Merge(dst, src) +} +func (m *MutateAdGroupExtensionSettingsRequest) XXX_Size() int { + return xxx_messageInfo_MutateAdGroupExtensionSettingsRequest.Size(m) +} +func (m *MutateAdGroupExtensionSettingsRequest) XXX_DiscardUnknown() { + xxx_messageInfo_MutateAdGroupExtensionSettingsRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_MutateAdGroupExtensionSettingsRequest proto.InternalMessageInfo + +func (m *MutateAdGroupExtensionSettingsRequest) GetCustomerId() string { + if m != nil { + return m.CustomerId + } + return "" +} + +func (m *MutateAdGroupExtensionSettingsRequest) GetOperations() []*AdGroupExtensionSettingOperation { + if m != nil { + return m.Operations + } + return nil +} + +func (m *MutateAdGroupExtensionSettingsRequest) GetPartialFailure() bool { + if m != nil { + return m.PartialFailure + } + return false +} + +func (m *MutateAdGroupExtensionSettingsRequest) GetValidateOnly() bool { + if m != nil { + return m.ValidateOnly + } + return false +} + +// A single operation (create, update, remove) on an ad group extension setting. +type AdGroupExtensionSettingOperation struct { + // FieldMask that determines which resource fields are modified in an update. + UpdateMask *field_mask.FieldMask `protobuf:"bytes,4,opt,name=update_mask,json=updateMask,proto3" json:"update_mask,omitempty"` + // The mutate operation. + // + // Types that are valid to be assigned to Operation: + // *AdGroupExtensionSettingOperation_Create + // *AdGroupExtensionSettingOperation_Update + // *AdGroupExtensionSettingOperation_Remove + Operation isAdGroupExtensionSettingOperation_Operation `protobuf_oneof:"operation"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *AdGroupExtensionSettingOperation) Reset() { *m = AdGroupExtensionSettingOperation{} } +func (m *AdGroupExtensionSettingOperation) String() string { return proto.CompactTextString(m) } +func (*AdGroupExtensionSettingOperation) ProtoMessage() {} +func (*AdGroupExtensionSettingOperation) Descriptor() ([]byte, []int) { + return fileDescriptor_ad_group_extension_setting_service_fd5e4f4a8f156262, []int{2} +} +func (m *AdGroupExtensionSettingOperation) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_AdGroupExtensionSettingOperation.Unmarshal(m, b) +} +func (m *AdGroupExtensionSettingOperation) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_AdGroupExtensionSettingOperation.Marshal(b, m, deterministic) +} +func (dst *AdGroupExtensionSettingOperation) XXX_Merge(src proto.Message) { + xxx_messageInfo_AdGroupExtensionSettingOperation.Merge(dst, src) +} +func (m *AdGroupExtensionSettingOperation) XXX_Size() int { + return xxx_messageInfo_AdGroupExtensionSettingOperation.Size(m) +} +func (m *AdGroupExtensionSettingOperation) XXX_DiscardUnknown() { + xxx_messageInfo_AdGroupExtensionSettingOperation.DiscardUnknown(m) +} + +var xxx_messageInfo_AdGroupExtensionSettingOperation proto.InternalMessageInfo + +func (m *AdGroupExtensionSettingOperation) GetUpdateMask() *field_mask.FieldMask { + if m != nil { + return m.UpdateMask + } + return nil +} + +type isAdGroupExtensionSettingOperation_Operation interface { + isAdGroupExtensionSettingOperation_Operation() +} + +type AdGroupExtensionSettingOperation_Create struct { + Create *resources.AdGroupExtensionSetting `protobuf:"bytes,1,opt,name=create,proto3,oneof"` +} + +type AdGroupExtensionSettingOperation_Update struct { + Update *resources.AdGroupExtensionSetting `protobuf:"bytes,2,opt,name=update,proto3,oneof"` +} + +type AdGroupExtensionSettingOperation_Remove struct { + Remove string `protobuf:"bytes,3,opt,name=remove,proto3,oneof"` +} + +func (*AdGroupExtensionSettingOperation_Create) isAdGroupExtensionSettingOperation_Operation() {} + +func (*AdGroupExtensionSettingOperation_Update) isAdGroupExtensionSettingOperation_Operation() {} + +func (*AdGroupExtensionSettingOperation_Remove) isAdGroupExtensionSettingOperation_Operation() {} + +func (m *AdGroupExtensionSettingOperation) GetOperation() isAdGroupExtensionSettingOperation_Operation { + if m != nil { + return m.Operation + } + return nil +} + +func (m *AdGroupExtensionSettingOperation) GetCreate() *resources.AdGroupExtensionSetting { + if x, ok := m.GetOperation().(*AdGroupExtensionSettingOperation_Create); ok { + return x.Create + } + return nil +} + +func (m *AdGroupExtensionSettingOperation) GetUpdate() *resources.AdGroupExtensionSetting { + if x, ok := m.GetOperation().(*AdGroupExtensionSettingOperation_Update); ok { + return x.Update + } + return nil +} + +func (m *AdGroupExtensionSettingOperation) GetRemove() string { + if x, ok := m.GetOperation().(*AdGroupExtensionSettingOperation_Remove); ok { + return x.Remove + } + return "" +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*AdGroupExtensionSettingOperation) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _AdGroupExtensionSettingOperation_OneofMarshaler, _AdGroupExtensionSettingOperation_OneofUnmarshaler, _AdGroupExtensionSettingOperation_OneofSizer, []interface{}{ + (*AdGroupExtensionSettingOperation_Create)(nil), + (*AdGroupExtensionSettingOperation_Update)(nil), + (*AdGroupExtensionSettingOperation_Remove)(nil), + } +} + +func _AdGroupExtensionSettingOperation_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*AdGroupExtensionSettingOperation) + // operation + switch x := m.Operation.(type) { + case *AdGroupExtensionSettingOperation_Create: + b.EncodeVarint(1<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.Create); err != nil { + return err + } + case *AdGroupExtensionSettingOperation_Update: + b.EncodeVarint(2<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.Update); err != nil { + return err + } + case *AdGroupExtensionSettingOperation_Remove: + b.EncodeVarint(3<<3 | proto.WireBytes) + b.EncodeStringBytes(x.Remove) + case nil: + default: + return fmt.Errorf("AdGroupExtensionSettingOperation.Operation has unexpected type %T", x) + } + return nil +} + +func _AdGroupExtensionSettingOperation_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*AdGroupExtensionSettingOperation) + switch tag { + case 1: // operation.create + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(resources.AdGroupExtensionSetting) + err := b.DecodeMessage(msg) + m.Operation = &AdGroupExtensionSettingOperation_Create{msg} + return true, err + case 2: // operation.update + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(resources.AdGroupExtensionSetting) + err := b.DecodeMessage(msg) + m.Operation = &AdGroupExtensionSettingOperation_Update{msg} + return true, err + case 3: // operation.remove + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeStringBytes() + m.Operation = &AdGroupExtensionSettingOperation_Remove{x} + return true, err + default: + return false, nil + } +} + +func _AdGroupExtensionSettingOperation_OneofSizer(msg proto.Message) (n int) { + m := msg.(*AdGroupExtensionSettingOperation) + // operation + switch x := m.Operation.(type) { + case *AdGroupExtensionSettingOperation_Create: + s := proto.Size(x.Create) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *AdGroupExtensionSettingOperation_Update: + s := proto.Size(x.Update) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *AdGroupExtensionSettingOperation_Remove: + n += 1 // tag and wire + n += proto.SizeVarint(uint64(len(x.Remove))) + n += len(x.Remove) + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +// Response message for an ad group extension setting mutate. +type MutateAdGroupExtensionSettingsResponse struct { + // Errors that pertain to operation failures in the partial failure mode. + // Returned only when partial_failure = true and all errors occur inside the + // operations. If any errors occur outside the operations (e.g. auth errors), + // we return an RPC level error. + PartialFailureError *status.Status `protobuf:"bytes,3,opt,name=partial_failure_error,json=partialFailureError,proto3" json:"partial_failure_error,omitempty"` + // All results for the mutate. + Results []*MutateAdGroupExtensionSettingResult `protobuf:"bytes,2,rep,name=results,proto3" json:"results,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *MutateAdGroupExtensionSettingsResponse) Reset() { + *m = MutateAdGroupExtensionSettingsResponse{} +} +func (m *MutateAdGroupExtensionSettingsResponse) String() string { return proto.CompactTextString(m) } +func (*MutateAdGroupExtensionSettingsResponse) ProtoMessage() {} +func (*MutateAdGroupExtensionSettingsResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_ad_group_extension_setting_service_fd5e4f4a8f156262, []int{3} +} +func (m *MutateAdGroupExtensionSettingsResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_MutateAdGroupExtensionSettingsResponse.Unmarshal(m, b) +} +func (m *MutateAdGroupExtensionSettingsResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_MutateAdGroupExtensionSettingsResponse.Marshal(b, m, deterministic) +} +func (dst *MutateAdGroupExtensionSettingsResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_MutateAdGroupExtensionSettingsResponse.Merge(dst, src) +} +func (m *MutateAdGroupExtensionSettingsResponse) XXX_Size() int { + return xxx_messageInfo_MutateAdGroupExtensionSettingsResponse.Size(m) +} +func (m *MutateAdGroupExtensionSettingsResponse) XXX_DiscardUnknown() { + xxx_messageInfo_MutateAdGroupExtensionSettingsResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_MutateAdGroupExtensionSettingsResponse proto.InternalMessageInfo + +func (m *MutateAdGroupExtensionSettingsResponse) GetPartialFailureError() *status.Status { + if m != nil { + return m.PartialFailureError + } + return nil +} + +func (m *MutateAdGroupExtensionSettingsResponse) GetResults() []*MutateAdGroupExtensionSettingResult { + if m != nil { + return m.Results + } + return nil +} + +// The result for the ad group extension setting mutate. +type MutateAdGroupExtensionSettingResult struct { + // Returned for successful operations. + ResourceName string `protobuf:"bytes,1,opt,name=resource_name,json=resourceName,proto3" json:"resource_name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *MutateAdGroupExtensionSettingResult) Reset() { *m = MutateAdGroupExtensionSettingResult{} } +func (m *MutateAdGroupExtensionSettingResult) String() string { return proto.CompactTextString(m) } +func (*MutateAdGroupExtensionSettingResult) ProtoMessage() {} +func (*MutateAdGroupExtensionSettingResult) Descriptor() ([]byte, []int) { + return fileDescriptor_ad_group_extension_setting_service_fd5e4f4a8f156262, []int{4} +} +func (m *MutateAdGroupExtensionSettingResult) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_MutateAdGroupExtensionSettingResult.Unmarshal(m, b) +} +func (m *MutateAdGroupExtensionSettingResult) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_MutateAdGroupExtensionSettingResult.Marshal(b, m, deterministic) +} +func (dst *MutateAdGroupExtensionSettingResult) XXX_Merge(src proto.Message) { + xxx_messageInfo_MutateAdGroupExtensionSettingResult.Merge(dst, src) +} +func (m *MutateAdGroupExtensionSettingResult) XXX_Size() int { + return xxx_messageInfo_MutateAdGroupExtensionSettingResult.Size(m) +} +func (m *MutateAdGroupExtensionSettingResult) XXX_DiscardUnknown() { + xxx_messageInfo_MutateAdGroupExtensionSettingResult.DiscardUnknown(m) +} + +var xxx_messageInfo_MutateAdGroupExtensionSettingResult proto.InternalMessageInfo + +func (m *MutateAdGroupExtensionSettingResult) GetResourceName() string { + if m != nil { + return m.ResourceName + } + return "" +} + +func init() { + proto.RegisterType((*GetAdGroupExtensionSettingRequest)(nil), "google.ads.googleads.v1.services.GetAdGroupExtensionSettingRequest") + proto.RegisterType((*MutateAdGroupExtensionSettingsRequest)(nil), "google.ads.googleads.v1.services.MutateAdGroupExtensionSettingsRequest") + proto.RegisterType((*AdGroupExtensionSettingOperation)(nil), "google.ads.googleads.v1.services.AdGroupExtensionSettingOperation") + proto.RegisterType((*MutateAdGroupExtensionSettingsResponse)(nil), "google.ads.googleads.v1.services.MutateAdGroupExtensionSettingsResponse") + proto.RegisterType((*MutateAdGroupExtensionSettingResult)(nil), "google.ads.googleads.v1.services.MutateAdGroupExtensionSettingResult") +} + +// Reference imports to suppress errors if they are not otherwise used. +var _ context.Context +var _ grpc.ClientConn + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the grpc package it is being compiled against. +const _ = grpc.SupportPackageIsVersion4 + +// AdGroupExtensionSettingServiceClient is the client API for AdGroupExtensionSettingService service. +// +// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://godoc.org/google.golang.org/grpc#ClientConn.NewStream. +type AdGroupExtensionSettingServiceClient interface { + // Returns the requested ad group extension setting in full detail. + GetAdGroupExtensionSetting(ctx context.Context, in *GetAdGroupExtensionSettingRequest, opts ...grpc.CallOption) (*resources.AdGroupExtensionSetting, error) + // Creates, updates, or removes ad group extension settings. Operation + // statuses are returned. + MutateAdGroupExtensionSettings(ctx context.Context, in *MutateAdGroupExtensionSettingsRequest, opts ...grpc.CallOption) (*MutateAdGroupExtensionSettingsResponse, error) +} + +type adGroupExtensionSettingServiceClient struct { + cc *grpc.ClientConn +} + +func NewAdGroupExtensionSettingServiceClient(cc *grpc.ClientConn) AdGroupExtensionSettingServiceClient { + return &adGroupExtensionSettingServiceClient{cc} +} + +func (c *adGroupExtensionSettingServiceClient) GetAdGroupExtensionSetting(ctx context.Context, in *GetAdGroupExtensionSettingRequest, opts ...grpc.CallOption) (*resources.AdGroupExtensionSetting, error) { + out := new(resources.AdGroupExtensionSetting) + err := c.cc.Invoke(ctx, "/google.ads.googleads.v1.services.AdGroupExtensionSettingService/GetAdGroupExtensionSetting", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *adGroupExtensionSettingServiceClient) MutateAdGroupExtensionSettings(ctx context.Context, in *MutateAdGroupExtensionSettingsRequest, opts ...grpc.CallOption) (*MutateAdGroupExtensionSettingsResponse, error) { + out := new(MutateAdGroupExtensionSettingsResponse) + err := c.cc.Invoke(ctx, "/google.ads.googleads.v1.services.AdGroupExtensionSettingService/MutateAdGroupExtensionSettings", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +// AdGroupExtensionSettingServiceServer is the server API for AdGroupExtensionSettingService service. +type AdGroupExtensionSettingServiceServer interface { + // Returns the requested ad group extension setting in full detail. + GetAdGroupExtensionSetting(context.Context, *GetAdGroupExtensionSettingRequest) (*resources.AdGroupExtensionSetting, error) + // Creates, updates, or removes ad group extension settings. Operation + // statuses are returned. + MutateAdGroupExtensionSettings(context.Context, *MutateAdGroupExtensionSettingsRequest) (*MutateAdGroupExtensionSettingsResponse, error) +} + +func RegisterAdGroupExtensionSettingServiceServer(s *grpc.Server, srv AdGroupExtensionSettingServiceServer) { + s.RegisterService(&_AdGroupExtensionSettingService_serviceDesc, srv) +} + +func _AdGroupExtensionSettingService_GetAdGroupExtensionSetting_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(GetAdGroupExtensionSettingRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(AdGroupExtensionSettingServiceServer).GetAdGroupExtensionSetting(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.ads.googleads.v1.services.AdGroupExtensionSettingService/GetAdGroupExtensionSetting", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(AdGroupExtensionSettingServiceServer).GetAdGroupExtensionSetting(ctx, req.(*GetAdGroupExtensionSettingRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _AdGroupExtensionSettingService_MutateAdGroupExtensionSettings_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(MutateAdGroupExtensionSettingsRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(AdGroupExtensionSettingServiceServer).MutateAdGroupExtensionSettings(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.ads.googleads.v1.services.AdGroupExtensionSettingService/MutateAdGroupExtensionSettings", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(AdGroupExtensionSettingServiceServer).MutateAdGroupExtensionSettings(ctx, req.(*MutateAdGroupExtensionSettingsRequest)) + } + return interceptor(ctx, in, info, handler) +} + +var _AdGroupExtensionSettingService_serviceDesc = grpc.ServiceDesc{ + ServiceName: "google.ads.googleads.v1.services.AdGroupExtensionSettingService", + HandlerType: (*AdGroupExtensionSettingServiceServer)(nil), + Methods: []grpc.MethodDesc{ + { + MethodName: "GetAdGroupExtensionSetting", + Handler: _AdGroupExtensionSettingService_GetAdGroupExtensionSetting_Handler, + }, + { + MethodName: "MutateAdGroupExtensionSettings", + Handler: _AdGroupExtensionSettingService_MutateAdGroupExtensionSettings_Handler, + }, + }, + Streams: []grpc.StreamDesc{}, + Metadata: "google/ads/googleads/v1/services/ad_group_extension_setting_service.proto", +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/services/ad_group_extension_setting_service.proto", fileDescriptor_ad_group_extension_setting_service_fd5e4f4a8f156262) +} + +var fileDescriptor_ad_group_extension_setting_service_fd5e4f4a8f156262 = []byte{ + // 729 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xac, 0x95, 0xc1, 0x6b, 0xd4, 0x4e, + 0x14, 0xc7, 0x7f, 0x49, 0x4b, 0x7f, 0x76, 0x52, 0x15, 0x46, 0xc4, 0x65, 0x91, 0xba, 0xa6, 0x55, + 0xcb, 0x1e, 0x12, 0x76, 0xbd, 0xa5, 0xf6, 0xb0, 0x91, 0x76, 0xb7, 0x42, 0x6d, 0x49, 0xa5, 0x07, + 0x59, 0x08, 0xd3, 0xcd, 0x34, 0x84, 0x26, 0x99, 0x38, 0x33, 0x59, 0x2d, 0xa5, 0x17, 0xf1, 0xe4, + 0xd5, 0xff, 0xc0, 0xa3, 0x47, 0xff, 0x0c, 0x6f, 0xe2, 0x5f, 0x20, 0x78, 0xf2, 0x4f, 0x10, 0x04, + 0x99, 0x4c, 0x66, 0x6d, 0x0b, 0xd9, 0x14, 0xd6, 0xdb, 0xdb, 0x37, 0xdf, 0xfd, 0xbc, 0xf7, 0xe6, + 0xbd, 0x79, 0x01, 0xdb, 0x21, 0x21, 0x61, 0x8c, 0x6d, 0x14, 0x30, 0x5b, 0x9a, 0xc2, 0x1a, 0x77, + 0x6c, 0x86, 0xe9, 0x38, 0x1a, 0x61, 0x66, 0xa3, 0xc0, 0x0f, 0x29, 0xc9, 0x33, 0x1f, 0xbf, 0xe1, + 0x38, 0x65, 0x11, 0x49, 0x7d, 0x86, 0x39, 0x8f, 0xd2, 0xd0, 0x2f, 0x35, 0x56, 0x46, 0x09, 0x27, + 0xb0, 0x25, 0xff, 0x6f, 0xa1, 0x80, 0x59, 0x13, 0x94, 0x35, 0xee, 0x58, 0x0a, 0xd5, 0x74, 0xab, + 0x82, 0x51, 0xcc, 0x48, 0x4e, 0xa7, 0x47, 0x93, 0x51, 0x9a, 0x77, 0x15, 0x23, 0x8b, 0x6c, 0x94, + 0xa6, 0x84, 0x23, 0x1e, 0x91, 0x94, 0x95, 0xa7, 0x65, 0x0e, 0x76, 0xf1, 0xeb, 0x30, 0x3f, 0xb2, + 0x8f, 0x22, 0x1c, 0x07, 0x7e, 0x82, 0xd8, 0x71, 0xa9, 0x58, 0xbe, 0xac, 0x78, 0x4d, 0x51, 0x96, + 0x61, 0xaa, 0x08, 0x77, 0xca, 0x73, 0x9a, 0x8d, 0x6c, 0xc6, 0x11, 0xcf, 0xcb, 0x03, 0x73, 0x00, + 0xee, 0xf7, 0x31, 0xef, 0x05, 0x7d, 0x91, 0xde, 0xa6, 0xca, 0x6e, 0x5f, 0x26, 0xe7, 0xe1, 0x57, + 0x39, 0x66, 0x1c, 0xae, 0x80, 0xeb, 0xaa, 0x16, 0x3f, 0x45, 0x09, 0x6e, 0x68, 0x2d, 0x6d, 0x6d, + 0xd1, 0x5b, 0x52, 0xce, 0xe7, 0x28, 0xc1, 0xe6, 0x2f, 0x0d, 0x3c, 0xd8, 0xc9, 0x39, 0xe2, 0xb8, + 0x82, 0xc6, 0x14, 0xee, 0x1e, 0x30, 0x46, 0x39, 0xe3, 0x24, 0xc1, 0xd4, 0x8f, 0x82, 0x12, 0x06, + 0x94, 0x6b, 0x3b, 0x80, 0x87, 0x00, 0x90, 0x0c, 0x53, 0x79, 0x07, 0x0d, 0xbd, 0x35, 0xb7, 0x66, + 0x74, 0x5d, 0xab, 0xae, 0x11, 0x56, 0x45, 0xdc, 0x5d, 0x85, 0xf2, 0xce, 0x51, 0xe1, 0x23, 0x70, + 0x33, 0x43, 0x94, 0x47, 0x28, 0xf6, 0x8f, 0x50, 0x14, 0xe7, 0x14, 0x37, 0xe6, 0x5a, 0xda, 0xda, + 0x35, 0xef, 0x46, 0xe9, 0xde, 0x92, 0x5e, 0x51, 0xfc, 0x18, 0xc5, 0x51, 0x80, 0x38, 0xf6, 0x49, + 0x1a, 0x9f, 0x34, 0xe6, 0x0b, 0xd9, 0x92, 0x72, 0xee, 0xa6, 0xf1, 0x89, 0xf9, 0x59, 0x07, 0xad, + 0xba, 0xf0, 0x70, 0x1d, 0x18, 0x79, 0x56, 0x70, 0x44, 0xe7, 0x0a, 0x8e, 0xd1, 0x6d, 0xaa, 0xba, + 0x54, 0xeb, 0xac, 0x2d, 0xd1, 0xdc, 0x1d, 0xc4, 0x8e, 0x3d, 0x20, 0xe5, 0xc2, 0x86, 0x2f, 0xc0, + 0xc2, 0x88, 0x62, 0xc4, 0xe5, 0xe5, 0x1b, 0x5d, 0xa7, 0xf2, 0x3e, 0x26, 0x63, 0x57, 0x75, 0x21, + 0x83, 0xff, 0xbc, 0x92, 0x25, 0xa8, 0x32, 0x46, 0x43, 0xff, 0x17, 0x54, 0xc9, 0x82, 0x0d, 0xb0, + 0x40, 0x71, 0x42, 0xc6, 0xf2, 0x4a, 0x17, 0xc5, 0x89, 0xfc, 0xed, 0x1a, 0x60, 0x71, 0xd2, 0x03, + 0xf3, 0xab, 0x06, 0x1e, 0xd6, 0x4d, 0x0c, 0xcb, 0x48, 0xca, 0x30, 0xdc, 0x02, 0xb7, 0x2f, 0x75, + 0xcb, 0xc7, 0x94, 0x12, 0x5a, 0x04, 0x30, 0xba, 0x50, 0xa5, 0x4d, 0xb3, 0x91, 0xb5, 0x5f, 0xcc, + 0xb7, 0x77, 0xeb, 0x62, 0x1f, 0x37, 0x85, 0x1c, 0xfa, 0xe0, 0x7f, 0x8a, 0x59, 0x1e, 0x73, 0x35, + 0x56, 0x9b, 0xf5, 0x63, 0x35, 0x35, 0x45, 0xaf, 0xa0, 0x79, 0x8a, 0x6a, 0x3e, 0x03, 0x2b, 0x57, + 0xd0, 0x5f, 0xe9, 0x45, 0x75, 0xdf, 0xcd, 0x83, 0xe5, 0x0a, 0xcc, 0xbe, 0x4c, 0x0e, 0x7e, 0xd7, + 0x40, 0xb3, 0xfa, 0xfd, 0xc2, 0xa7, 0xf5, 0xd5, 0xd5, 0xbe, 0xfe, 0xe6, 0x0c, 0x33, 0x61, 0xba, + 0x6f, 0xbf, 0xfd, 0xf8, 0xa0, 0x3f, 0x81, 0x8e, 0xd8, 0x87, 0xa7, 0x17, 0x4a, 0xde, 0x50, 0x0f, + 0x9e, 0xd9, 0x6d, 0x1b, 0x55, 0x0c, 0x80, 0xdd, 0x3e, 0x83, 0xbf, 0x35, 0xb0, 0x3c, 0x7d, 0x4c, + 0x60, 0x7f, 0xc6, 0x2e, 0xaa, 0xd5, 0xd4, 0x1c, 0xcc, 0x0e, 0x92, 0x13, 0x6b, 0x0e, 0x8a, 0xca, + 0x5d, 0x73, 0x43, 0x54, 0xfe, 0xb7, 0xd4, 0xd3, 0x73, 0x9b, 0x6f, 0xa3, 0x7d, 0x56, 0x59, 0xb8, + 0x93, 0x14, 0x61, 0x1c, 0xad, 0xed, 0xbe, 0xd7, 0xc1, 0xea, 0x88, 0x24, 0xb5, 0x99, 0xb9, 0x2b, + 0xd3, 0x87, 0x65, 0x4f, 0x2c, 0x98, 0x3d, 0xed, 0xe5, 0xa0, 0x04, 0x85, 0x24, 0x46, 0x69, 0x68, + 0x11, 0x1a, 0xda, 0x21, 0x4e, 0x8b, 0xf5, 0xa3, 0xbe, 0x5f, 0x59, 0xc4, 0xaa, 0xbf, 0x9d, 0xeb, + 0xca, 0xf8, 0xa8, 0xcf, 0xf5, 0x7b, 0xbd, 0x4f, 0x7a, 0xab, 0x2f, 0x81, 0xbd, 0x80, 0x59, 0xd2, + 0x14, 0xd6, 0x41, 0xc7, 0x2a, 0x03, 0xb3, 0x2f, 0x4a, 0x32, 0xec, 0x05, 0x6c, 0x38, 0x91, 0x0c, + 0x0f, 0x3a, 0x43, 0x25, 0xf9, 0xa9, 0xaf, 0x4a, 0xbf, 0xe3, 0xf4, 0x02, 0xe6, 0x38, 0x13, 0x91, + 0xe3, 0x1c, 0x74, 0x1c, 0x47, 0xc9, 0x0e, 0x17, 0x8a, 0x3c, 0x1f, 0xff, 0x09, 0x00, 0x00, 0xff, + 0xff, 0xbb, 0x5c, 0x83, 0x6c, 0xe2, 0x07, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/services/ad_group_feed_service.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/services/ad_group_feed_service.pb.go new file mode 100644 index 0000000..9f98957 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/services/ad_group_feed_service.pb.go @@ -0,0 +1,590 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/services/ad_group_feed_service.proto + +package services // import "google.golang.org/genproto/googleapis/ads/googleads/v1/services" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "github.com/golang/protobuf/ptypes/wrappers" +import resources "google.golang.org/genproto/googleapis/ads/googleads/v1/resources" +import _ "google.golang.org/genproto/googleapis/api/annotations" +import status "google.golang.org/genproto/googleapis/rpc/status" +import field_mask "google.golang.org/genproto/protobuf/field_mask" + +import ( + context "golang.org/x/net/context" + grpc "google.golang.org/grpc" +) + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Request message for [AdGroupFeedService.GetAdGroupFeed][google.ads.googleads.v1.services.AdGroupFeedService.GetAdGroupFeed]. +type GetAdGroupFeedRequest struct { + // The resource name of the ad group feed to fetch. + ResourceName string `protobuf:"bytes,1,opt,name=resource_name,json=resourceName,proto3" json:"resource_name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GetAdGroupFeedRequest) Reset() { *m = GetAdGroupFeedRequest{} } +func (m *GetAdGroupFeedRequest) String() string { return proto.CompactTextString(m) } +func (*GetAdGroupFeedRequest) ProtoMessage() {} +func (*GetAdGroupFeedRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_ad_group_feed_service_c2934f1f20022fd1, []int{0} +} +func (m *GetAdGroupFeedRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GetAdGroupFeedRequest.Unmarshal(m, b) +} +func (m *GetAdGroupFeedRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GetAdGroupFeedRequest.Marshal(b, m, deterministic) +} +func (dst *GetAdGroupFeedRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_GetAdGroupFeedRequest.Merge(dst, src) +} +func (m *GetAdGroupFeedRequest) XXX_Size() int { + return xxx_messageInfo_GetAdGroupFeedRequest.Size(m) +} +func (m *GetAdGroupFeedRequest) XXX_DiscardUnknown() { + xxx_messageInfo_GetAdGroupFeedRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_GetAdGroupFeedRequest proto.InternalMessageInfo + +func (m *GetAdGroupFeedRequest) GetResourceName() string { + if m != nil { + return m.ResourceName + } + return "" +} + +// Request message for [AdGroupFeedService.MutateAdGroupFeeds][google.ads.googleads.v1.services.AdGroupFeedService.MutateAdGroupFeeds]. +type MutateAdGroupFeedsRequest struct { + // The ID of the customer whose ad group feeds are being modified. + CustomerId string `protobuf:"bytes,1,opt,name=customer_id,json=customerId,proto3" json:"customer_id,omitempty"` + // The list of operations to perform on individual ad group feeds. + Operations []*AdGroupFeedOperation `protobuf:"bytes,2,rep,name=operations,proto3" json:"operations,omitempty"` + // If true, successful operations will be carried out and invalid + // operations will return errors. If false, all operations will be carried + // out in one transaction if and only if they are all valid. + // Default is false. + PartialFailure bool `protobuf:"varint,3,opt,name=partial_failure,json=partialFailure,proto3" json:"partial_failure,omitempty"` + // If true, the request is validated but not executed. Only errors are + // returned, not results. + ValidateOnly bool `protobuf:"varint,4,opt,name=validate_only,json=validateOnly,proto3" json:"validate_only,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *MutateAdGroupFeedsRequest) Reset() { *m = MutateAdGroupFeedsRequest{} } +func (m *MutateAdGroupFeedsRequest) String() string { return proto.CompactTextString(m) } +func (*MutateAdGroupFeedsRequest) ProtoMessage() {} +func (*MutateAdGroupFeedsRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_ad_group_feed_service_c2934f1f20022fd1, []int{1} +} +func (m *MutateAdGroupFeedsRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_MutateAdGroupFeedsRequest.Unmarshal(m, b) +} +func (m *MutateAdGroupFeedsRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_MutateAdGroupFeedsRequest.Marshal(b, m, deterministic) +} +func (dst *MutateAdGroupFeedsRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_MutateAdGroupFeedsRequest.Merge(dst, src) +} +func (m *MutateAdGroupFeedsRequest) XXX_Size() int { + return xxx_messageInfo_MutateAdGroupFeedsRequest.Size(m) +} +func (m *MutateAdGroupFeedsRequest) XXX_DiscardUnknown() { + xxx_messageInfo_MutateAdGroupFeedsRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_MutateAdGroupFeedsRequest proto.InternalMessageInfo + +func (m *MutateAdGroupFeedsRequest) GetCustomerId() string { + if m != nil { + return m.CustomerId + } + return "" +} + +func (m *MutateAdGroupFeedsRequest) GetOperations() []*AdGroupFeedOperation { + if m != nil { + return m.Operations + } + return nil +} + +func (m *MutateAdGroupFeedsRequest) GetPartialFailure() bool { + if m != nil { + return m.PartialFailure + } + return false +} + +func (m *MutateAdGroupFeedsRequest) GetValidateOnly() bool { + if m != nil { + return m.ValidateOnly + } + return false +} + +// A single operation (create, update, remove) on an ad group feed. +type AdGroupFeedOperation struct { + // FieldMask that determines which resource fields are modified in an update. + UpdateMask *field_mask.FieldMask `protobuf:"bytes,4,opt,name=update_mask,json=updateMask,proto3" json:"update_mask,omitempty"` + // The mutate operation. + // + // Types that are valid to be assigned to Operation: + // *AdGroupFeedOperation_Create + // *AdGroupFeedOperation_Update + // *AdGroupFeedOperation_Remove + Operation isAdGroupFeedOperation_Operation `protobuf_oneof:"operation"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *AdGroupFeedOperation) Reset() { *m = AdGroupFeedOperation{} } +func (m *AdGroupFeedOperation) String() string { return proto.CompactTextString(m) } +func (*AdGroupFeedOperation) ProtoMessage() {} +func (*AdGroupFeedOperation) Descriptor() ([]byte, []int) { + return fileDescriptor_ad_group_feed_service_c2934f1f20022fd1, []int{2} +} +func (m *AdGroupFeedOperation) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_AdGroupFeedOperation.Unmarshal(m, b) +} +func (m *AdGroupFeedOperation) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_AdGroupFeedOperation.Marshal(b, m, deterministic) +} +func (dst *AdGroupFeedOperation) XXX_Merge(src proto.Message) { + xxx_messageInfo_AdGroupFeedOperation.Merge(dst, src) +} +func (m *AdGroupFeedOperation) XXX_Size() int { + return xxx_messageInfo_AdGroupFeedOperation.Size(m) +} +func (m *AdGroupFeedOperation) XXX_DiscardUnknown() { + xxx_messageInfo_AdGroupFeedOperation.DiscardUnknown(m) +} + +var xxx_messageInfo_AdGroupFeedOperation proto.InternalMessageInfo + +func (m *AdGroupFeedOperation) GetUpdateMask() *field_mask.FieldMask { + if m != nil { + return m.UpdateMask + } + return nil +} + +type isAdGroupFeedOperation_Operation interface { + isAdGroupFeedOperation_Operation() +} + +type AdGroupFeedOperation_Create struct { + Create *resources.AdGroupFeed `protobuf:"bytes,1,opt,name=create,proto3,oneof"` +} + +type AdGroupFeedOperation_Update struct { + Update *resources.AdGroupFeed `protobuf:"bytes,2,opt,name=update,proto3,oneof"` +} + +type AdGroupFeedOperation_Remove struct { + Remove string `protobuf:"bytes,3,opt,name=remove,proto3,oneof"` +} + +func (*AdGroupFeedOperation_Create) isAdGroupFeedOperation_Operation() {} + +func (*AdGroupFeedOperation_Update) isAdGroupFeedOperation_Operation() {} + +func (*AdGroupFeedOperation_Remove) isAdGroupFeedOperation_Operation() {} + +func (m *AdGroupFeedOperation) GetOperation() isAdGroupFeedOperation_Operation { + if m != nil { + return m.Operation + } + return nil +} + +func (m *AdGroupFeedOperation) GetCreate() *resources.AdGroupFeed { + if x, ok := m.GetOperation().(*AdGroupFeedOperation_Create); ok { + return x.Create + } + return nil +} + +func (m *AdGroupFeedOperation) GetUpdate() *resources.AdGroupFeed { + if x, ok := m.GetOperation().(*AdGroupFeedOperation_Update); ok { + return x.Update + } + return nil +} + +func (m *AdGroupFeedOperation) GetRemove() string { + if x, ok := m.GetOperation().(*AdGroupFeedOperation_Remove); ok { + return x.Remove + } + return "" +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*AdGroupFeedOperation) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _AdGroupFeedOperation_OneofMarshaler, _AdGroupFeedOperation_OneofUnmarshaler, _AdGroupFeedOperation_OneofSizer, []interface{}{ + (*AdGroupFeedOperation_Create)(nil), + (*AdGroupFeedOperation_Update)(nil), + (*AdGroupFeedOperation_Remove)(nil), + } +} + +func _AdGroupFeedOperation_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*AdGroupFeedOperation) + // operation + switch x := m.Operation.(type) { + case *AdGroupFeedOperation_Create: + b.EncodeVarint(1<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.Create); err != nil { + return err + } + case *AdGroupFeedOperation_Update: + b.EncodeVarint(2<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.Update); err != nil { + return err + } + case *AdGroupFeedOperation_Remove: + b.EncodeVarint(3<<3 | proto.WireBytes) + b.EncodeStringBytes(x.Remove) + case nil: + default: + return fmt.Errorf("AdGroupFeedOperation.Operation has unexpected type %T", x) + } + return nil +} + +func _AdGroupFeedOperation_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*AdGroupFeedOperation) + switch tag { + case 1: // operation.create + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(resources.AdGroupFeed) + err := b.DecodeMessage(msg) + m.Operation = &AdGroupFeedOperation_Create{msg} + return true, err + case 2: // operation.update + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(resources.AdGroupFeed) + err := b.DecodeMessage(msg) + m.Operation = &AdGroupFeedOperation_Update{msg} + return true, err + case 3: // operation.remove + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeStringBytes() + m.Operation = &AdGroupFeedOperation_Remove{x} + return true, err + default: + return false, nil + } +} + +func _AdGroupFeedOperation_OneofSizer(msg proto.Message) (n int) { + m := msg.(*AdGroupFeedOperation) + // operation + switch x := m.Operation.(type) { + case *AdGroupFeedOperation_Create: + s := proto.Size(x.Create) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *AdGroupFeedOperation_Update: + s := proto.Size(x.Update) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *AdGroupFeedOperation_Remove: + n += 1 // tag and wire + n += proto.SizeVarint(uint64(len(x.Remove))) + n += len(x.Remove) + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +// Response message for an ad group feed mutate. +type MutateAdGroupFeedsResponse struct { + // Errors that pertain to operation failures in the partial failure mode. + // Returned only when partial_failure = true and all errors occur inside the + // operations. If any errors occur outside the operations (e.g. auth errors), + // we return an RPC level error. + PartialFailureError *status.Status `protobuf:"bytes,3,opt,name=partial_failure_error,json=partialFailureError,proto3" json:"partial_failure_error,omitempty"` + // All results for the mutate. + Results []*MutateAdGroupFeedResult `protobuf:"bytes,2,rep,name=results,proto3" json:"results,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *MutateAdGroupFeedsResponse) Reset() { *m = MutateAdGroupFeedsResponse{} } +func (m *MutateAdGroupFeedsResponse) String() string { return proto.CompactTextString(m) } +func (*MutateAdGroupFeedsResponse) ProtoMessage() {} +func (*MutateAdGroupFeedsResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_ad_group_feed_service_c2934f1f20022fd1, []int{3} +} +func (m *MutateAdGroupFeedsResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_MutateAdGroupFeedsResponse.Unmarshal(m, b) +} +func (m *MutateAdGroupFeedsResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_MutateAdGroupFeedsResponse.Marshal(b, m, deterministic) +} +func (dst *MutateAdGroupFeedsResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_MutateAdGroupFeedsResponse.Merge(dst, src) +} +func (m *MutateAdGroupFeedsResponse) XXX_Size() int { + return xxx_messageInfo_MutateAdGroupFeedsResponse.Size(m) +} +func (m *MutateAdGroupFeedsResponse) XXX_DiscardUnknown() { + xxx_messageInfo_MutateAdGroupFeedsResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_MutateAdGroupFeedsResponse proto.InternalMessageInfo + +func (m *MutateAdGroupFeedsResponse) GetPartialFailureError() *status.Status { + if m != nil { + return m.PartialFailureError + } + return nil +} + +func (m *MutateAdGroupFeedsResponse) GetResults() []*MutateAdGroupFeedResult { + if m != nil { + return m.Results + } + return nil +} + +// The result for the ad group feed mutate. +type MutateAdGroupFeedResult struct { + // Returned for successful operations. + ResourceName string `protobuf:"bytes,1,opt,name=resource_name,json=resourceName,proto3" json:"resource_name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *MutateAdGroupFeedResult) Reset() { *m = MutateAdGroupFeedResult{} } +func (m *MutateAdGroupFeedResult) String() string { return proto.CompactTextString(m) } +func (*MutateAdGroupFeedResult) ProtoMessage() {} +func (*MutateAdGroupFeedResult) Descriptor() ([]byte, []int) { + return fileDescriptor_ad_group_feed_service_c2934f1f20022fd1, []int{4} +} +func (m *MutateAdGroupFeedResult) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_MutateAdGroupFeedResult.Unmarshal(m, b) +} +func (m *MutateAdGroupFeedResult) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_MutateAdGroupFeedResult.Marshal(b, m, deterministic) +} +func (dst *MutateAdGroupFeedResult) XXX_Merge(src proto.Message) { + xxx_messageInfo_MutateAdGroupFeedResult.Merge(dst, src) +} +func (m *MutateAdGroupFeedResult) XXX_Size() int { + return xxx_messageInfo_MutateAdGroupFeedResult.Size(m) +} +func (m *MutateAdGroupFeedResult) XXX_DiscardUnknown() { + xxx_messageInfo_MutateAdGroupFeedResult.DiscardUnknown(m) +} + +var xxx_messageInfo_MutateAdGroupFeedResult proto.InternalMessageInfo + +func (m *MutateAdGroupFeedResult) GetResourceName() string { + if m != nil { + return m.ResourceName + } + return "" +} + +func init() { + proto.RegisterType((*GetAdGroupFeedRequest)(nil), "google.ads.googleads.v1.services.GetAdGroupFeedRequest") + proto.RegisterType((*MutateAdGroupFeedsRequest)(nil), "google.ads.googleads.v1.services.MutateAdGroupFeedsRequest") + proto.RegisterType((*AdGroupFeedOperation)(nil), "google.ads.googleads.v1.services.AdGroupFeedOperation") + proto.RegisterType((*MutateAdGroupFeedsResponse)(nil), "google.ads.googleads.v1.services.MutateAdGroupFeedsResponse") + proto.RegisterType((*MutateAdGroupFeedResult)(nil), "google.ads.googleads.v1.services.MutateAdGroupFeedResult") +} + +// Reference imports to suppress errors if they are not otherwise used. +var _ context.Context +var _ grpc.ClientConn + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the grpc package it is being compiled against. +const _ = grpc.SupportPackageIsVersion4 + +// AdGroupFeedServiceClient is the client API for AdGroupFeedService service. +// +// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://godoc.org/google.golang.org/grpc#ClientConn.NewStream. +type AdGroupFeedServiceClient interface { + // Returns the requested ad group feed in full detail. + GetAdGroupFeed(ctx context.Context, in *GetAdGroupFeedRequest, opts ...grpc.CallOption) (*resources.AdGroupFeed, error) + // Creates, updates, or removes ad group feeds. Operation statuses are + // returned. + MutateAdGroupFeeds(ctx context.Context, in *MutateAdGroupFeedsRequest, opts ...grpc.CallOption) (*MutateAdGroupFeedsResponse, error) +} + +type adGroupFeedServiceClient struct { + cc *grpc.ClientConn +} + +func NewAdGroupFeedServiceClient(cc *grpc.ClientConn) AdGroupFeedServiceClient { + return &adGroupFeedServiceClient{cc} +} + +func (c *adGroupFeedServiceClient) GetAdGroupFeed(ctx context.Context, in *GetAdGroupFeedRequest, opts ...grpc.CallOption) (*resources.AdGroupFeed, error) { + out := new(resources.AdGroupFeed) + err := c.cc.Invoke(ctx, "/google.ads.googleads.v1.services.AdGroupFeedService/GetAdGroupFeed", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *adGroupFeedServiceClient) MutateAdGroupFeeds(ctx context.Context, in *MutateAdGroupFeedsRequest, opts ...grpc.CallOption) (*MutateAdGroupFeedsResponse, error) { + out := new(MutateAdGroupFeedsResponse) + err := c.cc.Invoke(ctx, "/google.ads.googleads.v1.services.AdGroupFeedService/MutateAdGroupFeeds", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +// AdGroupFeedServiceServer is the server API for AdGroupFeedService service. +type AdGroupFeedServiceServer interface { + // Returns the requested ad group feed in full detail. + GetAdGroupFeed(context.Context, *GetAdGroupFeedRequest) (*resources.AdGroupFeed, error) + // Creates, updates, or removes ad group feeds. Operation statuses are + // returned. + MutateAdGroupFeeds(context.Context, *MutateAdGroupFeedsRequest) (*MutateAdGroupFeedsResponse, error) +} + +func RegisterAdGroupFeedServiceServer(s *grpc.Server, srv AdGroupFeedServiceServer) { + s.RegisterService(&_AdGroupFeedService_serviceDesc, srv) +} + +func _AdGroupFeedService_GetAdGroupFeed_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(GetAdGroupFeedRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(AdGroupFeedServiceServer).GetAdGroupFeed(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.ads.googleads.v1.services.AdGroupFeedService/GetAdGroupFeed", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(AdGroupFeedServiceServer).GetAdGroupFeed(ctx, req.(*GetAdGroupFeedRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _AdGroupFeedService_MutateAdGroupFeeds_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(MutateAdGroupFeedsRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(AdGroupFeedServiceServer).MutateAdGroupFeeds(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.ads.googleads.v1.services.AdGroupFeedService/MutateAdGroupFeeds", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(AdGroupFeedServiceServer).MutateAdGroupFeeds(ctx, req.(*MutateAdGroupFeedsRequest)) + } + return interceptor(ctx, in, info, handler) +} + +var _AdGroupFeedService_serviceDesc = grpc.ServiceDesc{ + ServiceName: "google.ads.googleads.v1.services.AdGroupFeedService", + HandlerType: (*AdGroupFeedServiceServer)(nil), + Methods: []grpc.MethodDesc{ + { + MethodName: "GetAdGroupFeed", + Handler: _AdGroupFeedService_GetAdGroupFeed_Handler, + }, + { + MethodName: "MutateAdGroupFeeds", + Handler: _AdGroupFeedService_MutateAdGroupFeeds_Handler, + }, + }, + Streams: []grpc.StreamDesc{}, + Metadata: "google/ads/googleads/v1/services/ad_group_feed_service.proto", +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/services/ad_group_feed_service.proto", fileDescriptor_ad_group_feed_service_c2934f1f20022fd1) +} + +var fileDescriptor_ad_group_feed_service_c2934f1f20022fd1 = []byte{ + // 715 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x9c, 0x55, 0xcd, 0x6e, 0xd3, 0x4a, + 0x14, 0xbe, 0x76, 0xae, 0x7a, 0x6f, 0xc7, 0xbd, 0xbd, 0xd2, 0x40, 0xd5, 0x10, 0x21, 0x88, 0x4c, + 0x25, 0xaa, 0x2c, 0xc6, 0x24, 0x88, 0x22, 0xdc, 0x14, 0x29, 0x95, 0x48, 0xca, 0xa2, 0xb4, 0x72, + 0xa5, 0x2c, 0x50, 0x24, 0x6b, 0x1a, 0x4f, 0x2c, 0xab, 0xb6, 0xc7, 0xcc, 0x8c, 0x83, 0xaa, 0xaa, + 0x1b, 0x1e, 0x80, 0x0d, 0x1b, 0xd6, 0x2c, 0xd9, 0xb1, 0xe4, 0x15, 0x90, 0x58, 0xf1, 0x06, 0x88, + 0x15, 0x0f, 0x81, 0x90, 0x3d, 0x9e, 0xe0, 0x34, 0x8d, 0x02, 0xdd, 0x9d, 0x9c, 0xf9, 0xbe, 0xef, + 0xfc, 0x3b, 0xa0, 0xed, 0x53, 0xea, 0x87, 0xc4, 0xc2, 0x1e, 0xb7, 0xa4, 0x99, 0x59, 0xe3, 0xa6, + 0xc5, 0x09, 0x1b, 0x07, 0x43, 0xc2, 0x2d, 0xec, 0xb9, 0x3e, 0xa3, 0x69, 0xe2, 0x8e, 0x08, 0xf1, + 0xdc, 0xc2, 0x8d, 0x12, 0x46, 0x05, 0x85, 0x75, 0x49, 0x41, 0xd8, 0xe3, 0x68, 0xc2, 0x46, 0xe3, + 0x26, 0x52, 0xec, 0xda, 0x83, 0x79, 0xfa, 0x8c, 0x70, 0x9a, 0xb2, 0x99, 0x00, 0x52, 0xb8, 0x76, + 0x53, 0xd1, 0x92, 0xc0, 0xc2, 0x71, 0x4c, 0x05, 0x16, 0x01, 0x8d, 0x79, 0xf1, 0x5a, 0x84, 0xb5, + 0xf2, 0x5f, 0xc7, 0xe9, 0xc8, 0x1a, 0x05, 0x24, 0xf4, 0xdc, 0x08, 0xf3, 0x93, 0x02, 0x71, 0xeb, + 0x22, 0xe2, 0x25, 0xc3, 0x49, 0x42, 0x98, 0x52, 0x58, 0x2f, 0xde, 0x59, 0x32, 0xb4, 0xb8, 0xc0, + 0x22, 0x2d, 0x1e, 0xcc, 0x36, 0x58, 0xeb, 0x11, 0xd1, 0xf1, 0x7a, 0x59, 0x46, 0x5d, 0x42, 0x3c, + 0x87, 0xbc, 0x48, 0x09, 0x17, 0xf0, 0x0e, 0xf8, 0x4f, 0xa5, 0xec, 0xc6, 0x38, 0x22, 0x55, 0xad, + 0xae, 0x6d, 0x2e, 0x3b, 0x2b, 0xca, 0xf9, 0x0c, 0x47, 0xc4, 0xfc, 0xaa, 0x81, 0x1b, 0xfb, 0xa9, + 0xc0, 0x82, 0x94, 0x14, 0xb8, 0x92, 0xb8, 0x0d, 0x8c, 0x61, 0xca, 0x05, 0x8d, 0x08, 0x73, 0x03, + 0xaf, 0x10, 0x00, 0xca, 0xf5, 0xd4, 0x83, 0x7d, 0x00, 0x68, 0x42, 0x98, 0xac, 0xb5, 0xaa, 0xd7, + 0x2b, 0x9b, 0x46, 0x6b, 0x0b, 0x2d, 0xea, 0x31, 0x2a, 0xc5, 0x3a, 0x50, 0x74, 0xa7, 0xa4, 0x04, + 0xef, 0x82, 0xff, 0x13, 0xcc, 0x44, 0x80, 0x43, 0x77, 0x84, 0x83, 0x30, 0x65, 0xa4, 0x5a, 0xa9, + 0x6b, 0x9b, 0xff, 0x3a, 0xab, 0x85, 0xbb, 0x2b, 0xbd, 0x59, 0x91, 0x63, 0x1c, 0x06, 0x1e, 0x16, + 0xc4, 0xa5, 0x71, 0x78, 0x5a, 0xfd, 0x3b, 0x87, 0xad, 0x28, 0xe7, 0x41, 0x1c, 0x9e, 0x9a, 0xaf, + 0x75, 0x70, 0xfd, 0xb2, 0x90, 0x70, 0x1b, 0x18, 0x69, 0x92, 0x73, 0xb3, 0x49, 0xe4, 0x5c, 0xa3, + 0x55, 0x53, 0xf9, 0xab, 0x51, 0xa0, 0x6e, 0x36, 0xac, 0x7d, 0xcc, 0x4f, 0x1c, 0x20, 0xe1, 0x99, + 0x0d, 0xf7, 0xc0, 0xd2, 0x90, 0x11, 0x2c, 0x64, 0x63, 0x8d, 0x16, 0x9a, 0x5b, 0xf7, 0x64, 0x73, + 0xca, 0x85, 0xef, 0xfd, 0xe5, 0x14, 0xfc, 0x4c, 0x49, 0xea, 0x56, 0xf5, 0xab, 0x2a, 0x49, 0x3e, + 0xac, 0x82, 0x25, 0x46, 0x22, 0x3a, 0x96, 0xed, 0x5a, 0xce, 0x5e, 0xe4, 0xef, 0x5d, 0x03, 0x2c, + 0x4f, 0xfa, 0x6b, 0x7e, 0xd4, 0x40, 0xed, 0xb2, 0xa9, 0xf3, 0x84, 0xc6, 0x9c, 0xc0, 0x2e, 0x58, + 0xbb, 0xd0, 0x7d, 0x97, 0x30, 0x46, 0x59, 0x2e, 0x6a, 0xb4, 0xa0, 0x4a, 0x8f, 0x25, 0x43, 0x74, + 0x94, 0xef, 0xa2, 0x73, 0x6d, 0x7a, 0x2e, 0x4f, 0x32, 0x38, 0x3c, 0x02, 0xff, 0x30, 0xc2, 0xd3, + 0x50, 0xa8, 0xd5, 0x78, 0xb4, 0x78, 0x35, 0x66, 0xd2, 0x72, 0x72, 0x05, 0x47, 0x29, 0x99, 0x8f, + 0xc1, 0xfa, 0x1c, 0xcc, 0x6f, 0x6d, 0x7c, 0xeb, 0x6d, 0x05, 0xc0, 0x12, 0xf5, 0x48, 0x06, 0x86, + 0x1f, 0x34, 0xb0, 0x3a, 0x7d, 0x47, 0xf0, 0xe1, 0xe2, 0x6c, 0x2f, 0xbd, 0xbc, 0xda, 0x1f, 0xce, + 0xcf, 0xdc, 0x7a, 0xf5, 0xe5, 0xdb, 0x1b, 0xfd, 0x1e, 0x44, 0xd9, 0x67, 0xe6, 0x6c, 0xaa, 0x84, + 0x1d, 0x75, 0x6c, 0xdc, 0x6a, 0x58, 0xb8, 0x34, 0x2c, 0xab, 0x71, 0x0e, 0x3f, 0x6b, 0x00, 0xce, + 0x8e, 0x11, 0x6e, 0x5f, 0xa1, 0xcb, 0xea, 0xe4, 0x6b, 0xed, 0xab, 0x91, 0xe5, 0xe6, 0x98, 0xed, + 0xbc, 0x92, 0x2d, 0xb3, 0x99, 0x55, 0xf2, 0x2b, 0xf5, 0xb3, 0xd2, 0x57, 0x64, 0xa7, 0x71, 0x3e, + 0x55, 0x88, 0x1d, 0xe5, 0x72, 0xb6, 0xd6, 0xd8, 0xfd, 0xa1, 0x81, 0x8d, 0x21, 0x8d, 0x16, 0x66, + 0xb0, 0xbb, 0x3e, 0x3b, 0xc0, 0xc3, 0xec, 0x58, 0x0f, 0xb5, 0xe7, 0x7b, 0x05, 0xd9, 0xa7, 0x21, + 0x8e, 0x7d, 0x44, 0x99, 0x6f, 0xf9, 0x24, 0xce, 0x4f, 0x59, 0x7d, 0xce, 0x93, 0x80, 0xcf, 0xff, + 0xf7, 0xd8, 0x56, 0xc6, 0x3b, 0xbd, 0xd2, 0xeb, 0x74, 0xde, 0xeb, 0xf5, 0x9e, 0x14, 0xec, 0x78, + 0x1c, 0x49, 0x33, 0xb3, 0xfa, 0x4d, 0x54, 0x04, 0xe6, 0x9f, 0x14, 0x64, 0xd0, 0xf1, 0xf8, 0x60, + 0x02, 0x19, 0xf4, 0x9b, 0x03, 0x05, 0xf9, 0xae, 0x6f, 0x48, 0xbf, 0x6d, 0x77, 0x3c, 0x6e, 0xdb, + 0x13, 0x90, 0x6d, 0xf7, 0x9b, 0xb6, 0xad, 0x60, 0xc7, 0x4b, 0x79, 0x9e, 0xf7, 0x7f, 0x06, 0x00, + 0x00, 0xff, 0xff, 0x0c, 0x80, 0x01, 0xb4, 0xe4, 0x06, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/services/ad_group_label_service.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/services/ad_group_label_service.pb.go new file mode 100644 index 0000000..aa579a9 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/services/ad_group_label_service.pb.go @@ -0,0 +1,544 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/services/ad_group_label_service.proto + +package services // import "google.golang.org/genproto/googleapis/ads/googleads/v1/services" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "github.com/golang/protobuf/ptypes/wrappers" +import resources "google.golang.org/genproto/googleapis/ads/googleads/v1/resources" +import _ "google.golang.org/genproto/googleapis/api/annotations" +import status "google.golang.org/genproto/googleapis/rpc/status" + +import ( + context "golang.org/x/net/context" + grpc "google.golang.org/grpc" +) + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Request message for [AdGroupLabelService.GetAdGroupLabel][google.ads.googleads.v1.services.AdGroupLabelService.GetAdGroupLabel]. +type GetAdGroupLabelRequest struct { + // The resource name of the ad group label to fetch. + ResourceName string `protobuf:"bytes,1,opt,name=resource_name,json=resourceName,proto3" json:"resource_name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GetAdGroupLabelRequest) Reset() { *m = GetAdGroupLabelRequest{} } +func (m *GetAdGroupLabelRequest) String() string { return proto.CompactTextString(m) } +func (*GetAdGroupLabelRequest) ProtoMessage() {} +func (*GetAdGroupLabelRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_ad_group_label_service_5fd3c10fbb229e04, []int{0} +} +func (m *GetAdGroupLabelRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GetAdGroupLabelRequest.Unmarshal(m, b) +} +func (m *GetAdGroupLabelRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GetAdGroupLabelRequest.Marshal(b, m, deterministic) +} +func (dst *GetAdGroupLabelRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_GetAdGroupLabelRequest.Merge(dst, src) +} +func (m *GetAdGroupLabelRequest) XXX_Size() int { + return xxx_messageInfo_GetAdGroupLabelRequest.Size(m) +} +func (m *GetAdGroupLabelRequest) XXX_DiscardUnknown() { + xxx_messageInfo_GetAdGroupLabelRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_GetAdGroupLabelRequest proto.InternalMessageInfo + +func (m *GetAdGroupLabelRequest) GetResourceName() string { + if m != nil { + return m.ResourceName + } + return "" +} + +// Request message for [AdGroupLabelService.MutateAdGroupLabels][google.ads.googleads.v1.services.AdGroupLabelService.MutateAdGroupLabels]. +type MutateAdGroupLabelsRequest struct { + // ID of the customer whose ad group labels are being modified. + CustomerId string `protobuf:"bytes,1,opt,name=customer_id,json=customerId,proto3" json:"customer_id,omitempty"` + // The list of operations to perform on ad group labels. + Operations []*AdGroupLabelOperation `protobuf:"bytes,2,rep,name=operations,proto3" json:"operations,omitempty"` + // If true, successful operations will be carried out and invalid + // operations will return errors. If false, all operations will be carried + // out in one transaction if and only if they are all valid. + // Default is false. + PartialFailure bool `protobuf:"varint,3,opt,name=partial_failure,json=partialFailure,proto3" json:"partial_failure,omitempty"` + // If true, the request is validated but not executed. Only errors are + // returned, not results. + ValidateOnly bool `protobuf:"varint,4,opt,name=validate_only,json=validateOnly,proto3" json:"validate_only,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *MutateAdGroupLabelsRequest) Reset() { *m = MutateAdGroupLabelsRequest{} } +func (m *MutateAdGroupLabelsRequest) String() string { return proto.CompactTextString(m) } +func (*MutateAdGroupLabelsRequest) ProtoMessage() {} +func (*MutateAdGroupLabelsRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_ad_group_label_service_5fd3c10fbb229e04, []int{1} +} +func (m *MutateAdGroupLabelsRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_MutateAdGroupLabelsRequest.Unmarshal(m, b) +} +func (m *MutateAdGroupLabelsRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_MutateAdGroupLabelsRequest.Marshal(b, m, deterministic) +} +func (dst *MutateAdGroupLabelsRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_MutateAdGroupLabelsRequest.Merge(dst, src) +} +func (m *MutateAdGroupLabelsRequest) XXX_Size() int { + return xxx_messageInfo_MutateAdGroupLabelsRequest.Size(m) +} +func (m *MutateAdGroupLabelsRequest) XXX_DiscardUnknown() { + xxx_messageInfo_MutateAdGroupLabelsRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_MutateAdGroupLabelsRequest proto.InternalMessageInfo + +func (m *MutateAdGroupLabelsRequest) GetCustomerId() string { + if m != nil { + return m.CustomerId + } + return "" +} + +func (m *MutateAdGroupLabelsRequest) GetOperations() []*AdGroupLabelOperation { + if m != nil { + return m.Operations + } + return nil +} + +func (m *MutateAdGroupLabelsRequest) GetPartialFailure() bool { + if m != nil { + return m.PartialFailure + } + return false +} + +func (m *MutateAdGroupLabelsRequest) GetValidateOnly() bool { + if m != nil { + return m.ValidateOnly + } + return false +} + +// A single operation (create, remove) on an ad group label. +type AdGroupLabelOperation struct { + // The mutate operation. + // + // Types that are valid to be assigned to Operation: + // *AdGroupLabelOperation_Create + // *AdGroupLabelOperation_Remove + Operation isAdGroupLabelOperation_Operation `protobuf_oneof:"operation"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *AdGroupLabelOperation) Reset() { *m = AdGroupLabelOperation{} } +func (m *AdGroupLabelOperation) String() string { return proto.CompactTextString(m) } +func (*AdGroupLabelOperation) ProtoMessage() {} +func (*AdGroupLabelOperation) Descriptor() ([]byte, []int) { + return fileDescriptor_ad_group_label_service_5fd3c10fbb229e04, []int{2} +} +func (m *AdGroupLabelOperation) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_AdGroupLabelOperation.Unmarshal(m, b) +} +func (m *AdGroupLabelOperation) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_AdGroupLabelOperation.Marshal(b, m, deterministic) +} +func (dst *AdGroupLabelOperation) XXX_Merge(src proto.Message) { + xxx_messageInfo_AdGroupLabelOperation.Merge(dst, src) +} +func (m *AdGroupLabelOperation) XXX_Size() int { + return xxx_messageInfo_AdGroupLabelOperation.Size(m) +} +func (m *AdGroupLabelOperation) XXX_DiscardUnknown() { + xxx_messageInfo_AdGroupLabelOperation.DiscardUnknown(m) +} + +var xxx_messageInfo_AdGroupLabelOperation proto.InternalMessageInfo + +type isAdGroupLabelOperation_Operation interface { + isAdGroupLabelOperation_Operation() +} + +type AdGroupLabelOperation_Create struct { + Create *resources.AdGroupLabel `protobuf:"bytes,1,opt,name=create,proto3,oneof"` +} + +type AdGroupLabelOperation_Remove struct { + Remove string `protobuf:"bytes,2,opt,name=remove,proto3,oneof"` +} + +func (*AdGroupLabelOperation_Create) isAdGroupLabelOperation_Operation() {} + +func (*AdGroupLabelOperation_Remove) isAdGroupLabelOperation_Operation() {} + +func (m *AdGroupLabelOperation) GetOperation() isAdGroupLabelOperation_Operation { + if m != nil { + return m.Operation + } + return nil +} + +func (m *AdGroupLabelOperation) GetCreate() *resources.AdGroupLabel { + if x, ok := m.GetOperation().(*AdGroupLabelOperation_Create); ok { + return x.Create + } + return nil +} + +func (m *AdGroupLabelOperation) GetRemove() string { + if x, ok := m.GetOperation().(*AdGroupLabelOperation_Remove); ok { + return x.Remove + } + return "" +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*AdGroupLabelOperation) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _AdGroupLabelOperation_OneofMarshaler, _AdGroupLabelOperation_OneofUnmarshaler, _AdGroupLabelOperation_OneofSizer, []interface{}{ + (*AdGroupLabelOperation_Create)(nil), + (*AdGroupLabelOperation_Remove)(nil), + } +} + +func _AdGroupLabelOperation_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*AdGroupLabelOperation) + // operation + switch x := m.Operation.(type) { + case *AdGroupLabelOperation_Create: + b.EncodeVarint(1<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.Create); err != nil { + return err + } + case *AdGroupLabelOperation_Remove: + b.EncodeVarint(2<<3 | proto.WireBytes) + b.EncodeStringBytes(x.Remove) + case nil: + default: + return fmt.Errorf("AdGroupLabelOperation.Operation has unexpected type %T", x) + } + return nil +} + +func _AdGroupLabelOperation_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*AdGroupLabelOperation) + switch tag { + case 1: // operation.create + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(resources.AdGroupLabel) + err := b.DecodeMessage(msg) + m.Operation = &AdGroupLabelOperation_Create{msg} + return true, err + case 2: // operation.remove + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeStringBytes() + m.Operation = &AdGroupLabelOperation_Remove{x} + return true, err + default: + return false, nil + } +} + +func _AdGroupLabelOperation_OneofSizer(msg proto.Message) (n int) { + m := msg.(*AdGroupLabelOperation) + // operation + switch x := m.Operation.(type) { + case *AdGroupLabelOperation_Create: + s := proto.Size(x.Create) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *AdGroupLabelOperation_Remove: + n += 1 // tag and wire + n += proto.SizeVarint(uint64(len(x.Remove))) + n += len(x.Remove) + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +// Response message for an ad group labels mutate. +type MutateAdGroupLabelsResponse struct { + // Errors that pertain to operation failures in the partial failure mode. + // Returned only when partial_failure = true and all errors occur inside the + // operations. If any errors occur outside the operations (e.g. auth errors), + // we return an RPC level error. + PartialFailureError *status.Status `protobuf:"bytes,3,opt,name=partial_failure_error,json=partialFailureError,proto3" json:"partial_failure_error,omitempty"` + // All results for the mutate. + Results []*MutateAdGroupLabelResult `protobuf:"bytes,2,rep,name=results,proto3" json:"results,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *MutateAdGroupLabelsResponse) Reset() { *m = MutateAdGroupLabelsResponse{} } +func (m *MutateAdGroupLabelsResponse) String() string { return proto.CompactTextString(m) } +func (*MutateAdGroupLabelsResponse) ProtoMessage() {} +func (*MutateAdGroupLabelsResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_ad_group_label_service_5fd3c10fbb229e04, []int{3} +} +func (m *MutateAdGroupLabelsResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_MutateAdGroupLabelsResponse.Unmarshal(m, b) +} +func (m *MutateAdGroupLabelsResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_MutateAdGroupLabelsResponse.Marshal(b, m, deterministic) +} +func (dst *MutateAdGroupLabelsResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_MutateAdGroupLabelsResponse.Merge(dst, src) +} +func (m *MutateAdGroupLabelsResponse) XXX_Size() int { + return xxx_messageInfo_MutateAdGroupLabelsResponse.Size(m) +} +func (m *MutateAdGroupLabelsResponse) XXX_DiscardUnknown() { + xxx_messageInfo_MutateAdGroupLabelsResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_MutateAdGroupLabelsResponse proto.InternalMessageInfo + +func (m *MutateAdGroupLabelsResponse) GetPartialFailureError() *status.Status { + if m != nil { + return m.PartialFailureError + } + return nil +} + +func (m *MutateAdGroupLabelsResponse) GetResults() []*MutateAdGroupLabelResult { + if m != nil { + return m.Results + } + return nil +} + +// The result for an ad group label mutate. +type MutateAdGroupLabelResult struct { + // Returned for successful operations. + ResourceName string `protobuf:"bytes,1,opt,name=resource_name,json=resourceName,proto3" json:"resource_name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *MutateAdGroupLabelResult) Reset() { *m = MutateAdGroupLabelResult{} } +func (m *MutateAdGroupLabelResult) String() string { return proto.CompactTextString(m) } +func (*MutateAdGroupLabelResult) ProtoMessage() {} +func (*MutateAdGroupLabelResult) Descriptor() ([]byte, []int) { + return fileDescriptor_ad_group_label_service_5fd3c10fbb229e04, []int{4} +} +func (m *MutateAdGroupLabelResult) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_MutateAdGroupLabelResult.Unmarshal(m, b) +} +func (m *MutateAdGroupLabelResult) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_MutateAdGroupLabelResult.Marshal(b, m, deterministic) +} +func (dst *MutateAdGroupLabelResult) XXX_Merge(src proto.Message) { + xxx_messageInfo_MutateAdGroupLabelResult.Merge(dst, src) +} +func (m *MutateAdGroupLabelResult) XXX_Size() int { + return xxx_messageInfo_MutateAdGroupLabelResult.Size(m) +} +func (m *MutateAdGroupLabelResult) XXX_DiscardUnknown() { + xxx_messageInfo_MutateAdGroupLabelResult.DiscardUnknown(m) +} + +var xxx_messageInfo_MutateAdGroupLabelResult proto.InternalMessageInfo + +func (m *MutateAdGroupLabelResult) GetResourceName() string { + if m != nil { + return m.ResourceName + } + return "" +} + +func init() { + proto.RegisterType((*GetAdGroupLabelRequest)(nil), "google.ads.googleads.v1.services.GetAdGroupLabelRequest") + proto.RegisterType((*MutateAdGroupLabelsRequest)(nil), "google.ads.googleads.v1.services.MutateAdGroupLabelsRequest") + proto.RegisterType((*AdGroupLabelOperation)(nil), "google.ads.googleads.v1.services.AdGroupLabelOperation") + proto.RegisterType((*MutateAdGroupLabelsResponse)(nil), "google.ads.googleads.v1.services.MutateAdGroupLabelsResponse") + proto.RegisterType((*MutateAdGroupLabelResult)(nil), "google.ads.googleads.v1.services.MutateAdGroupLabelResult") +} + +// Reference imports to suppress errors if they are not otherwise used. +var _ context.Context +var _ grpc.ClientConn + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the grpc package it is being compiled against. +const _ = grpc.SupportPackageIsVersion4 + +// AdGroupLabelServiceClient is the client API for AdGroupLabelService service. +// +// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://godoc.org/google.golang.org/grpc#ClientConn.NewStream. +type AdGroupLabelServiceClient interface { + // Returns the requested ad group label in full detail. + GetAdGroupLabel(ctx context.Context, in *GetAdGroupLabelRequest, opts ...grpc.CallOption) (*resources.AdGroupLabel, error) + // Creates and removes ad group labels. + // Operation statuses are returned. + MutateAdGroupLabels(ctx context.Context, in *MutateAdGroupLabelsRequest, opts ...grpc.CallOption) (*MutateAdGroupLabelsResponse, error) +} + +type adGroupLabelServiceClient struct { + cc *grpc.ClientConn +} + +func NewAdGroupLabelServiceClient(cc *grpc.ClientConn) AdGroupLabelServiceClient { + return &adGroupLabelServiceClient{cc} +} + +func (c *adGroupLabelServiceClient) GetAdGroupLabel(ctx context.Context, in *GetAdGroupLabelRequest, opts ...grpc.CallOption) (*resources.AdGroupLabel, error) { + out := new(resources.AdGroupLabel) + err := c.cc.Invoke(ctx, "/google.ads.googleads.v1.services.AdGroupLabelService/GetAdGroupLabel", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *adGroupLabelServiceClient) MutateAdGroupLabels(ctx context.Context, in *MutateAdGroupLabelsRequest, opts ...grpc.CallOption) (*MutateAdGroupLabelsResponse, error) { + out := new(MutateAdGroupLabelsResponse) + err := c.cc.Invoke(ctx, "/google.ads.googleads.v1.services.AdGroupLabelService/MutateAdGroupLabels", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +// AdGroupLabelServiceServer is the server API for AdGroupLabelService service. +type AdGroupLabelServiceServer interface { + // Returns the requested ad group label in full detail. + GetAdGroupLabel(context.Context, *GetAdGroupLabelRequest) (*resources.AdGroupLabel, error) + // Creates and removes ad group labels. + // Operation statuses are returned. + MutateAdGroupLabels(context.Context, *MutateAdGroupLabelsRequest) (*MutateAdGroupLabelsResponse, error) +} + +func RegisterAdGroupLabelServiceServer(s *grpc.Server, srv AdGroupLabelServiceServer) { + s.RegisterService(&_AdGroupLabelService_serviceDesc, srv) +} + +func _AdGroupLabelService_GetAdGroupLabel_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(GetAdGroupLabelRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(AdGroupLabelServiceServer).GetAdGroupLabel(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.ads.googleads.v1.services.AdGroupLabelService/GetAdGroupLabel", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(AdGroupLabelServiceServer).GetAdGroupLabel(ctx, req.(*GetAdGroupLabelRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _AdGroupLabelService_MutateAdGroupLabels_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(MutateAdGroupLabelsRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(AdGroupLabelServiceServer).MutateAdGroupLabels(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.ads.googleads.v1.services.AdGroupLabelService/MutateAdGroupLabels", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(AdGroupLabelServiceServer).MutateAdGroupLabels(ctx, req.(*MutateAdGroupLabelsRequest)) + } + return interceptor(ctx, in, info, handler) +} + +var _AdGroupLabelService_serviceDesc = grpc.ServiceDesc{ + ServiceName: "google.ads.googleads.v1.services.AdGroupLabelService", + HandlerType: (*AdGroupLabelServiceServer)(nil), + Methods: []grpc.MethodDesc{ + { + MethodName: "GetAdGroupLabel", + Handler: _AdGroupLabelService_GetAdGroupLabel_Handler, + }, + { + MethodName: "MutateAdGroupLabels", + Handler: _AdGroupLabelService_MutateAdGroupLabels_Handler, + }, + }, + Streams: []grpc.StreamDesc{}, + Metadata: "google/ads/googleads/v1/services/ad_group_label_service.proto", +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/services/ad_group_label_service.proto", fileDescriptor_ad_group_label_service_5fd3c10fbb229e04) +} + +var fileDescriptor_ad_group_label_service_5fd3c10fbb229e04 = []byte{ + // 663 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x9c, 0x54, 0x4f, 0x6b, 0xd4, 0x40, + 0x14, 0x37, 0xbb, 0x52, 0xed, 0x6c, 0xb5, 0x30, 0xa5, 0x1a, 0x56, 0xd1, 0x25, 0x16, 0x2c, 0x7b, + 0xc8, 0xb0, 0x2b, 0x58, 0x89, 0x2e, 0x92, 0x82, 0xdd, 0x16, 0xd4, 0x96, 0x54, 0x2a, 0xc8, 0x42, + 0x98, 0x4d, 0xa6, 0x21, 0x90, 0x64, 0xe2, 0xcc, 0x64, 0xa5, 0x94, 0x5e, 0xbc, 0x79, 0xf6, 0x13, + 0xe8, 0xd1, 0xbb, 0x27, 0xbf, 0x81, 0x37, 0xf1, 0x2b, 0xf4, 0xe4, 0x97, 0x50, 0x92, 0xc9, 0xac, + 0xd9, 0x76, 0x97, 0xd5, 0xde, 0x5e, 0xde, 0x9f, 0xdf, 0xfb, 0xfd, 0xde, 0x7b, 0x19, 0xd0, 0x0b, + 0x28, 0x0d, 0x22, 0x82, 0xb0, 0xcf, 0x91, 0x34, 0x73, 0x6b, 0xd4, 0x41, 0x9c, 0xb0, 0x51, 0xe8, + 0x11, 0x8e, 0xb0, 0xef, 0x06, 0x8c, 0x66, 0xa9, 0x1b, 0xe1, 0x21, 0x89, 0xdc, 0xd2, 0x6f, 0xa6, + 0x8c, 0x0a, 0x0a, 0x5b, 0xb2, 0xc6, 0xc4, 0x3e, 0x37, 0xc7, 0xe5, 0xe6, 0xa8, 0x63, 0xaa, 0xf2, + 0xe6, 0xc3, 0x59, 0x0d, 0x18, 0xe1, 0x34, 0x63, 0xe7, 0x3b, 0x48, 0xe4, 0xe6, 0x6d, 0x55, 0x97, + 0x86, 0x08, 0x27, 0x09, 0x15, 0x58, 0x84, 0x34, 0xe1, 0x65, 0xf4, 0x4e, 0x19, 0x2d, 0xbe, 0x86, + 0xd9, 0x21, 0x7a, 0xc7, 0x70, 0x9a, 0x12, 0xa6, 0xe2, 0x37, 0xcb, 0x38, 0x4b, 0x3d, 0xc4, 0x05, + 0x16, 0x59, 0x19, 0x30, 0x7a, 0xe0, 0x46, 0x9f, 0x08, 0xdb, 0xef, 0xe7, 0x0d, 0x9f, 0xe7, 0xfd, + 0x1c, 0xf2, 0x36, 0x23, 0x5c, 0xc0, 0x7b, 0xe0, 0x9a, 0xa2, 0xe4, 0x26, 0x38, 0x26, 0xba, 0xd6, + 0xd2, 0xd6, 0x17, 0x9d, 0x25, 0xe5, 0x7c, 0x89, 0x63, 0x62, 0x9c, 0x6a, 0xa0, 0xf9, 0x22, 0x13, + 0x58, 0x90, 0x2a, 0x04, 0x57, 0x18, 0x77, 0x41, 0xc3, 0xcb, 0xb8, 0xa0, 0x31, 0x61, 0x6e, 0xe8, + 0x97, 0x08, 0x40, 0xb9, 0x76, 0x7c, 0xf8, 0x1a, 0x00, 0x9a, 0x12, 0x26, 0xb5, 0xe8, 0xb5, 0x56, + 0x7d, 0xbd, 0xd1, 0xdd, 0x30, 0xe7, 0x0d, 0xd1, 0xac, 0x36, 0xdb, 0x55, 0xf5, 0x4e, 0x05, 0x0a, + 0xde, 0x07, 0xcb, 0x29, 0x66, 0x22, 0xc4, 0x91, 0x7b, 0x88, 0xc3, 0x28, 0x63, 0x44, 0xaf, 0xb7, + 0xb4, 0xf5, 0xab, 0xce, 0xf5, 0xd2, 0xbd, 0x25, 0xbd, 0xb9, 0xcc, 0x11, 0x8e, 0x42, 0x1f, 0x0b, + 0xe2, 0xd2, 0x24, 0x3a, 0xd2, 0x2f, 0x17, 0x69, 0x4b, 0xca, 0xb9, 0x9b, 0x44, 0x47, 0xc6, 0x07, + 0x0d, 0xac, 0x4e, 0xed, 0x09, 0x77, 0xc0, 0x82, 0xc7, 0x08, 0x16, 0x72, 0x3c, 0x8d, 0x2e, 0x9a, + 0x49, 0x7e, 0xbc, 0xdf, 0x09, 0xf6, 0xdb, 0x97, 0x9c, 0x12, 0x00, 0xea, 0x60, 0x81, 0x91, 0x98, + 0x8e, 0x88, 0x5e, 0xcb, 0xe7, 0x94, 0x47, 0xe4, 0xf7, 0x66, 0x03, 0x2c, 0x8e, 0xa5, 0x19, 0xdf, + 0x34, 0x70, 0x6b, 0xea, 0xc8, 0x79, 0x4a, 0x13, 0x4e, 0xe0, 0x16, 0x58, 0x3d, 0xa3, 0xdc, 0x25, + 0x8c, 0x51, 0x56, 0xe8, 0x6f, 0x74, 0xa1, 0x22, 0xc8, 0x52, 0xcf, 0xdc, 0x2f, 0x4e, 0xc1, 0x59, + 0x99, 0x9c, 0xc9, 0xb3, 0x3c, 0x1d, 0xbe, 0x02, 0x57, 0x18, 0xe1, 0x59, 0x24, 0xd4, 0x5e, 0xac, + 0xf9, 0x7b, 0x39, 0xcf, 0xcb, 0x29, 0x20, 0x1c, 0x05, 0x65, 0x3c, 0x05, 0xfa, 0xac, 0xa4, 0x7f, + 0xba, 0xb8, 0xee, 0xa7, 0x3a, 0x58, 0xa9, 0xd6, 0xee, 0xcb, 0xde, 0xf0, 0xab, 0x06, 0x96, 0xcf, + 0x5c, 0x32, 0x7c, 0x34, 0x9f, 0xf1, 0xf4, 0xe3, 0x6f, 0xfe, 0xef, 0x1a, 0x8d, 0x8d, 0xf7, 0x3f, + 0x4f, 0x3f, 0xd6, 0x3a, 0x10, 0xe5, 0xbf, 0xf2, 0xf1, 0x84, 0x8c, 0x9e, 0xba, 0x77, 0x8e, 0xda, + 0x08, 0x57, 0x77, 0x86, 0xda, 0x27, 0xf0, 0x87, 0x06, 0x56, 0xa6, 0xac, 0x13, 0x3e, 0xb9, 0xc8, + 0xb4, 0xd5, 0x8f, 0xd7, 0xec, 0x5d, 0xb0, 0x5a, 0xde, 0x90, 0xd1, 0x2b, 0xd4, 0x6c, 0x18, 0xdd, + 0x5c, 0xcd, 0x5f, 0xfa, 0xc7, 0x95, 0x9f, 0xb9, 0xd7, 0x3e, 0x99, 0x14, 0x63, 0xc5, 0x05, 0xa0, + 0xa5, 0xb5, 0x37, 0x7f, 0x6b, 0x60, 0xcd, 0xa3, 0xf1, 0x5c, 0x0e, 0x9b, 0xfa, 0x94, 0x4d, 0xee, + 0xe5, 0xef, 0xd2, 0x9e, 0xf6, 0x66, 0xbb, 0xac, 0x0e, 0x68, 0x84, 0x93, 0xc0, 0xa4, 0x2c, 0x40, + 0x01, 0x49, 0x8a, 0x57, 0x4b, 0x3d, 0x9c, 0x69, 0xc8, 0x67, 0x3f, 0xd4, 0x8f, 0x95, 0xf1, 0xb9, + 0x56, 0xef, 0xdb, 0xf6, 0x97, 0x5a, 0xab, 0x2f, 0x01, 0x6d, 0x9f, 0x9b, 0xd2, 0xcc, 0xad, 0x83, + 0x8e, 0x59, 0x36, 0xe6, 0xdf, 0x55, 0xca, 0xc0, 0xf6, 0xf9, 0x60, 0x9c, 0x32, 0x38, 0xe8, 0x0c, + 0x54, 0xca, 0xaf, 0xda, 0x9a, 0xf4, 0x5b, 0x96, 0xed, 0x73, 0xcb, 0x1a, 0x27, 0x59, 0xd6, 0x41, + 0xc7, 0xb2, 0x54, 0xda, 0x70, 0xa1, 0xe0, 0xf9, 0xe0, 0x4f, 0x00, 0x00, 0x00, 0xff, 0xff, 0xb9, + 0x7c, 0x1a, 0xba, 0x4f, 0x06, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/services/ad_group_service.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/services/ad_group_service.pb.go new file mode 100644 index 0000000..db946e8 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/services/ad_group_service.pb.go @@ -0,0 +1,587 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/services/ad_group_service.proto + +package services // import "google.golang.org/genproto/googleapis/ads/googleads/v1/services" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "github.com/golang/protobuf/ptypes/wrappers" +import resources "google.golang.org/genproto/googleapis/ads/googleads/v1/resources" +import _ "google.golang.org/genproto/googleapis/api/annotations" +import status "google.golang.org/genproto/googleapis/rpc/status" +import field_mask "google.golang.org/genproto/protobuf/field_mask" + +import ( + context "golang.org/x/net/context" + grpc "google.golang.org/grpc" +) + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Request message for [AdGroupService.GetAdGroup][google.ads.googleads.v1.services.AdGroupService.GetAdGroup]. +type GetAdGroupRequest struct { + // The resource name of the ad group to fetch. + ResourceName string `protobuf:"bytes,1,opt,name=resource_name,json=resourceName,proto3" json:"resource_name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GetAdGroupRequest) Reset() { *m = GetAdGroupRequest{} } +func (m *GetAdGroupRequest) String() string { return proto.CompactTextString(m) } +func (*GetAdGroupRequest) ProtoMessage() {} +func (*GetAdGroupRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_ad_group_service_b66f8854dd286f7c, []int{0} +} +func (m *GetAdGroupRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GetAdGroupRequest.Unmarshal(m, b) +} +func (m *GetAdGroupRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GetAdGroupRequest.Marshal(b, m, deterministic) +} +func (dst *GetAdGroupRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_GetAdGroupRequest.Merge(dst, src) +} +func (m *GetAdGroupRequest) XXX_Size() int { + return xxx_messageInfo_GetAdGroupRequest.Size(m) +} +func (m *GetAdGroupRequest) XXX_DiscardUnknown() { + xxx_messageInfo_GetAdGroupRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_GetAdGroupRequest proto.InternalMessageInfo + +func (m *GetAdGroupRequest) GetResourceName() string { + if m != nil { + return m.ResourceName + } + return "" +} + +// Request message for [AdGroupService.MutateAdGroups][google.ads.googleads.v1.services.AdGroupService.MutateAdGroups]. +type MutateAdGroupsRequest struct { + // The ID of the customer whose ad groups are being modified. + CustomerId string `protobuf:"bytes,1,opt,name=customer_id,json=customerId,proto3" json:"customer_id,omitempty"` + // The list of operations to perform on individual ad groups. + Operations []*AdGroupOperation `protobuf:"bytes,2,rep,name=operations,proto3" json:"operations,omitempty"` + // If true, successful operations will be carried out and invalid + // operations will return errors. If false, all operations will be carried + // out in one transaction if and only if they are all valid. + // Default is false. + PartialFailure bool `protobuf:"varint,3,opt,name=partial_failure,json=partialFailure,proto3" json:"partial_failure,omitempty"` + // If true, the request is validated but not executed. Only errors are + // returned, not results. + ValidateOnly bool `protobuf:"varint,4,opt,name=validate_only,json=validateOnly,proto3" json:"validate_only,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *MutateAdGroupsRequest) Reset() { *m = MutateAdGroupsRequest{} } +func (m *MutateAdGroupsRequest) String() string { return proto.CompactTextString(m) } +func (*MutateAdGroupsRequest) ProtoMessage() {} +func (*MutateAdGroupsRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_ad_group_service_b66f8854dd286f7c, []int{1} +} +func (m *MutateAdGroupsRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_MutateAdGroupsRequest.Unmarshal(m, b) +} +func (m *MutateAdGroupsRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_MutateAdGroupsRequest.Marshal(b, m, deterministic) +} +func (dst *MutateAdGroupsRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_MutateAdGroupsRequest.Merge(dst, src) +} +func (m *MutateAdGroupsRequest) XXX_Size() int { + return xxx_messageInfo_MutateAdGroupsRequest.Size(m) +} +func (m *MutateAdGroupsRequest) XXX_DiscardUnknown() { + xxx_messageInfo_MutateAdGroupsRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_MutateAdGroupsRequest proto.InternalMessageInfo + +func (m *MutateAdGroupsRequest) GetCustomerId() string { + if m != nil { + return m.CustomerId + } + return "" +} + +func (m *MutateAdGroupsRequest) GetOperations() []*AdGroupOperation { + if m != nil { + return m.Operations + } + return nil +} + +func (m *MutateAdGroupsRequest) GetPartialFailure() bool { + if m != nil { + return m.PartialFailure + } + return false +} + +func (m *MutateAdGroupsRequest) GetValidateOnly() bool { + if m != nil { + return m.ValidateOnly + } + return false +} + +// A single operation (create, update, remove) on an ad group. +type AdGroupOperation struct { + // FieldMask that determines which resource fields are modified in an update. + UpdateMask *field_mask.FieldMask `protobuf:"bytes,4,opt,name=update_mask,json=updateMask,proto3" json:"update_mask,omitempty"` + // The mutate operation. + // + // Types that are valid to be assigned to Operation: + // *AdGroupOperation_Create + // *AdGroupOperation_Update + // *AdGroupOperation_Remove + Operation isAdGroupOperation_Operation `protobuf_oneof:"operation"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *AdGroupOperation) Reset() { *m = AdGroupOperation{} } +func (m *AdGroupOperation) String() string { return proto.CompactTextString(m) } +func (*AdGroupOperation) ProtoMessage() {} +func (*AdGroupOperation) Descriptor() ([]byte, []int) { + return fileDescriptor_ad_group_service_b66f8854dd286f7c, []int{2} +} +func (m *AdGroupOperation) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_AdGroupOperation.Unmarshal(m, b) +} +func (m *AdGroupOperation) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_AdGroupOperation.Marshal(b, m, deterministic) +} +func (dst *AdGroupOperation) XXX_Merge(src proto.Message) { + xxx_messageInfo_AdGroupOperation.Merge(dst, src) +} +func (m *AdGroupOperation) XXX_Size() int { + return xxx_messageInfo_AdGroupOperation.Size(m) +} +func (m *AdGroupOperation) XXX_DiscardUnknown() { + xxx_messageInfo_AdGroupOperation.DiscardUnknown(m) +} + +var xxx_messageInfo_AdGroupOperation proto.InternalMessageInfo + +func (m *AdGroupOperation) GetUpdateMask() *field_mask.FieldMask { + if m != nil { + return m.UpdateMask + } + return nil +} + +type isAdGroupOperation_Operation interface { + isAdGroupOperation_Operation() +} + +type AdGroupOperation_Create struct { + Create *resources.AdGroup `protobuf:"bytes,1,opt,name=create,proto3,oneof"` +} + +type AdGroupOperation_Update struct { + Update *resources.AdGroup `protobuf:"bytes,2,opt,name=update,proto3,oneof"` +} + +type AdGroupOperation_Remove struct { + Remove string `protobuf:"bytes,3,opt,name=remove,proto3,oneof"` +} + +func (*AdGroupOperation_Create) isAdGroupOperation_Operation() {} + +func (*AdGroupOperation_Update) isAdGroupOperation_Operation() {} + +func (*AdGroupOperation_Remove) isAdGroupOperation_Operation() {} + +func (m *AdGroupOperation) GetOperation() isAdGroupOperation_Operation { + if m != nil { + return m.Operation + } + return nil +} + +func (m *AdGroupOperation) GetCreate() *resources.AdGroup { + if x, ok := m.GetOperation().(*AdGroupOperation_Create); ok { + return x.Create + } + return nil +} + +func (m *AdGroupOperation) GetUpdate() *resources.AdGroup { + if x, ok := m.GetOperation().(*AdGroupOperation_Update); ok { + return x.Update + } + return nil +} + +func (m *AdGroupOperation) GetRemove() string { + if x, ok := m.GetOperation().(*AdGroupOperation_Remove); ok { + return x.Remove + } + return "" +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*AdGroupOperation) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _AdGroupOperation_OneofMarshaler, _AdGroupOperation_OneofUnmarshaler, _AdGroupOperation_OneofSizer, []interface{}{ + (*AdGroupOperation_Create)(nil), + (*AdGroupOperation_Update)(nil), + (*AdGroupOperation_Remove)(nil), + } +} + +func _AdGroupOperation_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*AdGroupOperation) + // operation + switch x := m.Operation.(type) { + case *AdGroupOperation_Create: + b.EncodeVarint(1<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.Create); err != nil { + return err + } + case *AdGroupOperation_Update: + b.EncodeVarint(2<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.Update); err != nil { + return err + } + case *AdGroupOperation_Remove: + b.EncodeVarint(3<<3 | proto.WireBytes) + b.EncodeStringBytes(x.Remove) + case nil: + default: + return fmt.Errorf("AdGroupOperation.Operation has unexpected type %T", x) + } + return nil +} + +func _AdGroupOperation_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*AdGroupOperation) + switch tag { + case 1: // operation.create + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(resources.AdGroup) + err := b.DecodeMessage(msg) + m.Operation = &AdGroupOperation_Create{msg} + return true, err + case 2: // operation.update + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(resources.AdGroup) + err := b.DecodeMessage(msg) + m.Operation = &AdGroupOperation_Update{msg} + return true, err + case 3: // operation.remove + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeStringBytes() + m.Operation = &AdGroupOperation_Remove{x} + return true, err + default: + return false, nil + } +} + +func _AdGroupOperation_OneofSizer(msg proto.Message) (n int) { + m := msg.(*AdGroupOperation) + // operation + switch x := m.Operation.(type) { + case *AdGroupOperation_Create: + s := proto.Size(x.Create) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *AdGroupOperation_Update: + s := proto.Size(x.Update) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *AdGroupOperation_Remove: + n += 1 // tag and wire + n += proto.SizeVarint(uint64(len(x.Remove))) + n += len(x.Remove) + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +// Response message for an ad group mutate. +type MutateAdGroupsResponse struct { + // Errors that pertain to operation failures in the partial failure mode. + // Returned only when partial_failure = true and all errors occur inside the + // operations. If any errors occur outside the operations (e.g. auth errors), + // we return an RPC level error. + PartialFailureError *status.Status `protobuf:"bytes,3,opt,name=partial_failure_error,json=partialFailureError,proto3" json:"partial_failure_error,omitempty"` + // All results for the mutate. + Results []*MutateAdGroupResult `protobuf:"bytes,2,rep,name=results,proto3" json:"results,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *MutateAdGroupsResponse) Reset() { *m = MutateAdGroupsResponse{} } +func (m *MutateAdGroupsResponse) String() string { return proto.CompactTextString(m) } +func (*MutateAdGroupsResponse) ProtoMessage() {} +func (*MutateAdGroupsResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_ad_group_service_b66f8854dd286f7c, []int{3} +} +func (m *MutateAdGroupsResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_MutateAdGroupsResponse.Unmarshal(m, b) +} +func (m *MutateAdGroupsResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_MutateAdGroupsResponse.Marshal(b, m, deterministic) +} +func (dst *MutateAdGroupsResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_MutateAdGroupsResponse.Merge(dst, src) +} +func (m *MutateAdGroupsResponse) XXX_Size() int { + return xxx_messageInfo_MutateAdGroupsResponse.Size(m) +} +func (m *MutateAdGroupsResponse) XXX_DiscardUnknown() { + xxx_messageInfo_MutateAdGroupsResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_MutateAdGroupsResponse proto.InternalMessageInfo + +func (m *MutateAdGroupsResponse) GetPartialFailureError() *status.Status { + if m != nil { + return m.PartialFailureError + } + return nil +} + +func (m *MutateAdGroupsResponse) GetResults() []*MutateAdGroupResult { + if m != nil { + return m.Results + } + return nil +} + +// The result for the ad group mutate. +type MutateAdGroupResult struct { + // Returned for successful operations. + ResourceName string `protobuf:"bytes,1,opt,name=resource_name,json=resourceName,proto3" json:"resource_name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *MutateAdGroupResult) Reset() { *m = MutateAdGroupResult{} } +func (m *MutateAdGroupResult) String() string { return proto.CompactTextString(m) } +func (*MutateAdGroupResult) ProtoMessage() {} +func (*MutateAdGroupResult) Descriptor() ([]byte, []int) { + return fileDescriptor_ad_group_service_b66f8854dd286f7c, []int{4} +} +func (m *MutateAdGroupResult) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_MutateAdGroupResult.Unmarshal(m, b) +} +func (m *MutateAdGroupResult) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_MutateAdGroupResult.Marshal(b, m, deterministic) +} +func (dst *MutateAdGroupResult) XXX_Merge(src proto.Message) { + xxx_messageInfo_MutateAdGroupResult.Merge(dst, src) +} +func (m *MutateAdGroupResult) XXX_Size() int { + return xxx_messageInfo_MutateAdGroupResult.Size(m) +} +func (m *MutateAdGroupResult) XXX_DiscardUnknown() { + xxx_messageInfo_MutateAdGroupResult.DiscardUnknown(m) +} + +var xxx_messageInfo_MutateAdGroupResult proto.InternalMessageInfo + +func (m *MutateAdGroupResult) GetResourceName() string { + if m != nil { + return m.ResourceName + } + return "" +} + +func init() { + proto.RegisterType((*GetAdGroupRequest)(nil), "google.ads.googleads.v1.services.GetAdGroupRequest") + proto.RegisterType((*MutateAdGroupsRequest)(nil), "google.ads.googleads.v1.services.MutateAdGroupsRequest") + proto.RegisterType((*AdGroupOperation)(nil), "google.ads.googleads.v1.services.AdGroupOperation") + proto.RegisterType((*MutateAdGroupsResponse)(nil), "google.ads.googleads.v1.services.MutateAdGroupsResponse") + proto.RegisterType((*MutateAdGroupResult)(nil), "google.ads.googleads.v1.services.MutateAdGroupResult") +} + +// Reference imports to suppress errors if they are not otherwise used. +var _ context.Context +var _ grpc.ClientConn + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the grpc package it is being compiled against. +const _ = grpc.SupportPackageIsVersion4 + +// AdGroupServiceClient is the client API for AdGroupService service. +// +// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://godoc.org/google.golang.org/grpc#ClientConn.NewStream. +type AdGroupServiceClient interface { + // Returns the requested ad group in full detail. + GetAdGroup(ctx context.Context, in *GetAdGroupRequest, opts ...grpc.CallOption) (*resources.AdGroup, error) + // Creates, updates, or removes ad groups. Operation statuses are returned. + MutateAdGroups(ctx context.Context, in *MutateAdGroupsRequest, opts ...grpc.CallOption) (*MutateAdGroupsResponse, error) +} + +type adGroupServiceClient struct { + cc *grpc.ClientConn +} + +func NewAdGroupServiceClient(cc *grpc.ClientConn) AdGroupServiceClient { + return &adGroupServiceClient{cc} +} + +func (c *adGroupServiceClient) GetAdGroup(ctx context.Context, in *GetAdGroupRequest, opts ...grpc.CallOption) (*resources.AdGroup, error) { + out := new(resources.AdGroup) + err := c.cc.Invoke(ctx, "/google.ads.googleads.v1.services.AdGroupService/GetAdGroup", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *adGroupServiceClient) MutateAdGroups(ctx context.Context, in *MutateAdGroupsRequest, opts ...grpc.CallOption) (*MutateAdGroupsResponse, error) { + out := new(MutateAdGroupsResponse) + err := c.cc.Invoke(ctx, "/google.ads.googleads.v1.services.AdGroupService/MutateAdGroups", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +// AdGroupServiceServer is the server API for AdGroupService service. +type AdGroupServiceServer interface { + // Returns the requested ad group in full detail. + GetAdGroup(context.Context, *GetAdGroupRequest) (*resources.AdGroup, error) + // Creates, updates, or removes ad groups. Operation statuses are returned. + MutateAdGroups(context.Context, *MutateAdGroupsRequest) (*MutateAdGroupsResponse, error) +} + +func RegisterAdGroupServiceServer(s *grpc.Server, srv AdGroupServiceServer) { + s.RegisterService(&_AdGroupService_serviceDesc, srv) +} + +func _AdGroupService_GetAdGroup_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(GetAdGroupRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(AdGroupServiceServer).GetAdGroup(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.ads.googleads.v1.services.AdGroupService/GetAdGroup", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(AdGroupServiceServer).GetAdGroup(ctx, req.(*GetAdGroupRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _AdGroupService_MutateAdGroups_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(MutateAdGroupsRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(AdGroupServiceServer).MutateAdGroups(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.ads.googleads.v1.services.AdGroupService/MutateAdGroups", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(AdGroupServiceServer).MutateAdGroups(ctx, req.(*MutateAdGroupsRequest)) + } + return interceptor(ctx, in, info, handler) +} + +var _AdGroupService_serviceDesc = grpc.ServiceDesc{ + ServiceName: "google.ads.googleads.v1.services.AdGroupService", + HandlerType: (*AdGroupServiceServer)(nil), + Methods: []grpc.MethodDesc{ + { + MethodName: "GetAdGroup", + Handler: _AdGroupService_GetAdGroup_Handler, + }, + { + MethodName: "MutateAdGroups", + Handler: _AdGroupService_MutateAdGroups_Handler, + }, + }, + Streams: []grpc.StreamDesc{}, + Metadata: "google/ads/googleads/v1/services/ad_group_service.proto", +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/services/ad_group_service.proto", fileDescriptor_ad_group_service_b66f8854dd286f7c) +} + +var fileDescriptor_ad_group_service_b66f8854dd286f7c = []byte{ + // 702 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x9c, 0x54, 0x4f, 0x4f, 0xd4, 0x40, + 0x14, 0xb7, 0xc5, 0xa0, 0x4c, 0x11, 0x75, 0x08, 0xba, 0xd9, 0x18, 0xdd, 0x54, 0x12, 0xc9, 0x46, + 0x5b, 0xb7, 0xc4, 0x40, 0x4a, 0x38, 0x2c, 0x51, 0x16, 0x0f, 0x08, 0x29, 0x09, 0x07, 0xb3, 0x49, + 0x33, 0x6c, 0x87, 0xa6, 0xa1, 0xed, 0xd4, 0x99, 0xe9, 0x1a, 0x42, 0xb8, 0xf0, 0x15, 0xfc, 0x04, + 0x7a, 0xf4, 0xe6, 0x07, 0xf0, 0x0b, 0x78, 0xf5, 0xe6, 0xd9, 0x93, 0x67, 0x4f, 0x9e, 0xcc, 0x74, + 0x66, 0x16, 0x76, 0x95, 0xac, 0xeb, 0xed, 0xcd, 0x9b, 0xf7, 0xfb, 0xbd, 0xdf, 0xbc, 0x3f, 0x03, + 0x56, 0x62, 0x42, 0xe2, 0x14, 0xbb, 0x28, 0x62, 0xae, 0x34, 0x85, 0xd5, 0x6f, 0xb9, 0x0c, 0xd3, + 0x7e, 0xd2, 0xc3, 0xcc, 0x45, 0x51, 0x18, 0x53, 0x52, 0x16, 0xa1, 0xf2, 0x38, 0x05, 0x25, 0x9c, + 0xc0, 0x86, 0x8c, 0x76, 0x50, 0xc4, 0x9c, 0x01, 0xd0, 0xe9, 0xb7, 0x1c, 0x0d, 0xac, 0x3f, 0xbd, + 0x8c, 0x9a, 0x62, 0x46, 0x4a, 0x7a, 0x91, 0x5b, 0x72, 0xd6, 0xef, 0x69, 0x44, 0x91, 0xb8, 0x28, + 0xcf, 0x09, 0x47, 0x3c, 0x21, 0x39, 0x53, 0xb7, 0x2a, 0xa3, 0x5b, 0x9d, 0x0e, 0xca, 0x43, 0xf7, + 0x30, 0xc1, 0x69, 0x14, 0x66, 0x88, 0x1d, 0xa9, 0x88, 0xfb, 0xa3, 0x11, 0x6f, 0x29, 0x2a, 0x0a, + 0x4c, 0x35, 0xc3, 0x5d, 0x75, 0x4f, 0x8b, 0x9e, 0xcb, 0x38, 0xe2, 0xa5, 0xba, 0xb0, 0x57, 0xc1, + 0xed, 0x0e, 0xe6, 0xed, 0xa8, 0x23, 0xc4, 0x04, 0xf8, 0x4d, 0x89, 0x19, 0x87, 0x0f, 0xc1, 0x0d, + 0xad, 0x34, 0xcc, 0x51, 0x86, 0x6b, 0x46, 0xc3, 0x58, 0x9a, 0x09, 0x66, 0xb5, 0xf3, 0x15, 0xca, + 0xb0, 0xfd, 0xcd, 0x00, 0x0b, 0xdb, 0x25, 0x47, 0x1c, 0x2b, 0x34, 0xd3, 0xf0, 0x07, 0xc0, 0xea, + 0x95, 0x8c, 0x93, 0x0c, 0xd3, 0x30, 0x89, 0x14, 0x18, 0x68, 0xd7, 0xcb, 0x08, 0x06, 0x00, 0x90, + 0x02, 0x53, 0xf9, 0xc6, 0x9a, 0xd9, 0x98, 0x5a, 0xb2, 0x3c, 0xcf, 0x19, 0x57, 0x56, 0x47, 0xe5, + 0xd9, 0xd1, 0xd0, 0xe0, 0x02, 0x0b, 0x7c, 0x04, 0x6e, 0x16, 0x88, 0xf2, 0x04, 0xa5, 0xe1, 0x21, + 0x4a, 0xd2, 0x92, 0xe2, 0xda, 0x54, 0xc3, 0x58, 0xba, 0x1e, 0xcc, 0x29, 0xf7, 0xa6, 0xf4, 0x8a, + 0xc7, 0xf5, 0x51, 0x9a, 0x44, 0x88, 0xe3, 0x90, 0xe4, 0xe9, 0x71, 0xed, 0x6a, 0x15, 0x36, 0xab, + 0x9d, 0x3b, 0x79, 0x7a, 0x6c, 0x9f, 0x99, 0xe0, 0xd6, 0x68, 0x3a, 0xb8, 0x06, 0xac, 0xb2, 0xa8, + 0x70, 0xa2, 0xf2, 0x15, 0xce, 0xf2, 0xea, 0x5a, 0xb7, 0x2e, 0xbd, 0xb3, 0x29, 0x9a, 0xb3, 0x8d, + 0xd8, 0x51, 0x00, 0x64, 0xb8, 0xb0, 0xe1, 0x73, 0x30, 0xdd, 0xa3, 0x18, 0x71, 0x59, 0x4c, 0xcb, + 0x6b, 0x5e, 0xfa, 0xde, 0xc1, 0x90, 0xe8, 0x07, 0x6f, 0x5d, 0x09, 0x14, 0x56, 0xb0, 0x48, 0xce, + 0x9a, 0xf9, 0x3f, 0x2c, 0x12, 0x0b, 0x6b, 0x60, 0x9a, 0xe2, 0x8c, 0xf4, 0x65, 0x89, 0x66, 0xc4, + 0x8d, 0x3c, 0x6f, 0x58, 0x60, 0x66, 0x50, 0x53, 0xfb, 0x93, 0x01, 0xee, 0x8c, 0x76, 0x98, 0x15, + 0x24, 0x67, 0x18, 0x6e, 0x82, 0x85, 0x91, 0x6a, 0x87, 0x98, 0x52, 0x42, 0x2b, 0x42, 0xcb, 0x83, + 0x5a, 0x16, 0x2d, 0x7a, 0xce, 0x5e, 0x35, 0x6f, 0xc1, 0xfc, 0x70, 0x1f, 0x5e, 0x88, 0x70, 0xb8, + 0x03, 0xae, 0x51, 0xcc, 0xca, 0x94, 0xeb, 0x31, 0x78, 0x36, 0x7e, 0x0c, 0x86, 0x24, 0x05, 0x15, + 0x3a, 0xd0, 0x2c, 0xb6, 0x0f, 0xe6, 0xff, 0x72, 0xff, 0x4f, 0x13, 0xed, 0xfd, 0x34, 0xc1, 0x9c, + 0x82, 0xed, 0xc9, 0x64, 0xf0, 0xbd, 0x01, 0xc0, 0xf9, 0x7e, 0xc0, 0xe5, 0xf1, 0xea, 0xfe, 0xd8, + 0xa6, 0xfa, 0x04, 0x3d, 0xb2, 0xbd, 0xb3, 0xaf, 0xdf, 0xdf, 0x99, 0x8f, 0x61, 0x53, 0xfc, 0x16, + 0x27, 0x43, 0x92, 0xd7, 0xf5, 0x02, 0x31, 0xb7, 0xe9, 0x22, 0xd5, 0x10, 0xb7, 0x79, 0x0a, 0x3f, + 0x1b, 0x60, 0x6e, 0xb8, 0x4d, 0x70, 0x65, 0xc2, 0x2a, 0xea, 0xd5, 0xad, 0xaf, 0x4e, 0x0e, 0x94, + 0x13, 0x61, 0xaf, 0x56, 0xca, 0x3d, 0xfb, 0x89, 0x50, 0x7e, 0x2e, 0xf5, 0xe4, 0xc2, 0x4f, 0xb0, + 0xde, 0x3c, 0x1d, 0x08, 0xf7, 0xb3, 0x8a, 0xc6, 0x37, 0x9a, 0x1b, 0xbf, 0x0c, 0xb0, 0xd8, 0x23, + 0xd9, 0xd8, 0xcc, 0x1b, 0xf3, 0xc3, 0xcd, 0xd9, 0x15, 0x0b, 0xb7, 0x6b, 0xbc, 0xde, 0x52, 0xc0, + 0x98, 0xa4, 0x28, 0x8f, 0x1d, 0x42, 0x63, 0x37, 0xc6, 0x79, 0xb5, 0x8e, 0xfa, 0xf7, 0x2d, 0x12, + 0x76, 0xf9, 0x3f, 0xbf, 0xa6, 0x8d, 0x0f, 0xe6, 0x54, 0xa7, 0xdd, 0xfe, 0x68, 0x36, 0x3a, 0x92, + 0xb0, 0x1d, 0x31, 0x47, 0x9a, 0xc2, 0xda, 0x6f, 0x39, 0x2a, 0x31, 0xfb, 0xa2, 0x43, 0xba, 0xed, + 0x88, 0x75, 0x07, 0x21, 0xdd, 0xfd, 0x56, 0x57, 0x87, 0xfc, 0x30, 0x17, 0xa5, 0xdf, 0xf7, 0xdb, + 0x11, 0xf3, 0xfd, 0x41, 0x90, 0xef, 0xef, 0xb7, 0x7c, 0x5f, 0x87, 0x1d, 0x4c, 0x57, 0x3a, 0x97, + 0x7f, 0x07, 0x00, 0x00, 0xff, 0xff, 0x8c, 0x5f, 0x6a, 0x64, 0x8e, 0x06, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/services/ad_group_simulation_service.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/services/ad_group_simulation_service.pb.go new file mode 100644 index 0000000..c672fe7 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/services/ad_group_simulation_service.pb.go @@ -0,0 +1,175 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/services/ad_group_simulation_service.proto + +package services // import "google.golang.org/genproto/googleapis/ads/googleads/v1/services" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import resources "google.golang.org/genproto/googleapis/ads/googleads/v1/resources" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +import ( + context "golang.org/x/net/context" + grpc "google.golang.org/grpc" +) + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Request message for [AdGroupSimulationService.GetAdGroupSimulation][google.ads.googleads.v1.services.AdGroupSimulationService.GetAdGroupSimulation]. +type GetAdGroupSimulationRequest struct { + // The resource name of the ad group simulation to fetch. + ResourceName string `protobuf:"bytes,1,opt,name=resource_name,json=resourceName,proto3" json:"resource_name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GetAdGroupSimulationRequest) Reset() { *m = GetAdGroupSimulationRequest{} } +func (m *GetAdGroupSimulationRequest) String() string { return proto.CompactTextString(m) } +func (*GetAdGroupSimulationRequest) ProtoMessage() {} +func (*GetAdGroupSimulationRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_ad_group_simulation_service_6eeb2b02d2d7da9e, []int{0} +} +func (m *GetAdGroupSimulationRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GetAdGroupSimulationRequest.Unmarshal(m, b) +} +func (m *GetAdGroupSimulationRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GetAdGroupSimulationRequest.Marshal(b, m, deterministic) +} +func (dst *GetAdGroupSimulationRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_GetAdGroupSimulationRequest.Merge(dst, src) +} +func (m *GetAdGroupSimulationRequest) XXX_Size() int { + return xxx_messageInfo_GetAdGroupSimulationRequest.Size(m) +} +func (m *GetAdGroupSimulationRequest) XXX_DiscardUnknown() { + xxx_messageInfo_GetAdGroupSimulationRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_GetAdGroupSimulationRequest proto.InternalMessageInfo + +func (m *GetAdGroupSimulationRequest) GetResourceName() string { + if m != nil { + return m.ResourceName + } + return "" +} + +func init() { + proto.RegisterType((*GetAdGroupSimulationRequest)(nil), "google.ads.googleads.v1.services.GetAdGroupSimulationRequest") +} + +// Reference imports to suppress errors if they are not otherwise used. +var _ context.Context +var _ grpc.ClientConn + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the grpc package it is being compiled against. +const _ = grpc.SupportPackageIsVersion4 + +// AdGroupSimulationServiceClient is the client API for AdGroupSimulationService service. +// +// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://godoc.org/google.golang.org/grpc#ClientConn.NewStream. +type AdGroupSimulationServiceClient interface { + // Returns the requested ad group simulation in full detail. + GetAdGroupSimulation(ctx context.Context, in *GetAdGroupSimulationRequest, opts ...grpc.CallOption) (*resources.AdGroupSimulation, error) +} + +type adGroupSimulationServiceClient struct { + cc *grpc.ClientConn +} + +func NewAdGroupSimulationServiceClient(cc *grpc.ClientConn) AdGroupSimulationServiceClient { + return &adGroupSimulationServiceClient{cc} +} + +func (c *adGroupSimulationServiceClient) GetAdGroupSimulation(ctx context.Context, in *GetAdGroupSimulationRequest, opts ...grpc.CallOption) (*resources.AdGroupSimulation, error) { + out := new(resources.AdGroupSimulation) + err := c.cc.Invoke(ctx, "/google.ads.googleads.v1.services.AdGroupSimulationService/GetAdGroupSimulation", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +// AdGroupSimulationServiceServer is the server API for AdGroupSimulationService service. +type AdGroupSimulationServiceServer interface { + // Returns the requested ad group simulation in full detail. + GetAdGroupSimulation(context.Context, *GetAdGroupSimulationRequest) (*resources.AdGroupSimulation, error) +} + +func RegisterAdGroupSimulationServiceServer(s *grpc.Server, srv AdGroupSimulationServiceServer) { + s.RegisterService(&_AdGroupSimulationService_serviceDesc, srv) +} + +func _AdGroupSimulationService_GetAdGroupSimulation_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(GetAdGroupSimulationRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(AdGroupSimulationServiceServer).GetAdGroupSimulation(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.ads.googleads.v1.services.AdGroupSimulationService/GetAdGroupSimulation", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(AdGroupSimulationServiceServer).GetAdGroupSimulation(ctx, req.(*GetAdGroupSimulationRequest)) + } + return interceptor(ctx, in, info, handler) +} + +var _AdGroupSimulationService_serviceDesc = grpc.ServiceDesc{ + ServiceName: "google.ads.googleads.v1.services.AdGroupSimulationService", + HandlerType: (*AdGroupSimulationServiceServer)(nil), + Methods: []grpc.MethodDesc{ + { + MethodName: "GetAdGroupSimulation", + Handler: _AdGroupSimulationService_GetAdGroupSimulation_Handler, + }, + }, + Streams: []grpc.StreamDesc{}, + Metadata: "google/ads/googleads/v1/services/ad_group_simulation_service.proto", +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/services/ad_group_simulation_service.proto", fileDescriptor_ad_group_simulation_service_6eeb2b02d2d7da9e) +} + +var fileDescriptor_ad_group_simulation_service_6eeb2b02d2d7da9e = []byte{ + // 367 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x84, 0x52, 0xcd, 0x4a, 0xeb, 0x40, + 0x14, 0x26, 0xb9, 0x70, 0xe1, 0x86, 0xeb, 0x26, 0xb8, 0x28, 0x55, 0xa1, 0xd4, 0x2e, 0xa4, 0x8b, + 0x19, 0xa2, 0xc5, 0xc5, 0xd4, 0x2e, 0x92, 0x4d, 0x5c, 0x49, 0x69, 0xa1, 0x0b, 0x09, 0x84, 0xb1, + 0x33, 0x0c, 0x81, 0x26, 0x13, 0xe7, 0x24, 0xdd, 0x88, 0x1b, 0xf1, 0x0d, 0x7c, 0x03, 0x97, 0xbe, + 0x89, 0x6e, 0x7d, 0x05, 0x71, 0xe1, 0x53, 0x48, 0x3a, 0x9d, 0x80, 0xd6, 0xd8, 0xdd, 0xc7, 0x9c, + 0xef, 0xe7, 0xcc, 0x37, 0xe3, 0x04, 0x42, 0x4a, 0xb1, 0xe0, 0x98, 0x32, 0xc0, 0x1a, 0x56, 0x68, + 0xe9, 0x61, 0xe0, 0x6a, 0x99, 0xcc, 0x39, 0x60, 0xca, 0x62, 0xa1, 0x64, 0x99, 0xc7, 0x90, 0xa4, + 0xe5, 0x82, 0x16, 0x89, 0xcc, 0xe2, 0xf5, 0x10, 0xe5, 0x4a, 0x16, 0xd2, 0xed, 0x68, 0x21, 0xa2, + 0x0c, 0x50, 0xed, 0x81, 0x96, 0x1e, 0x32, 0x1e, 0xed, 0x61, 0x53, 0x8a, 0xe2, 0x20, 0x4b, 0xd5, + 0x10, 0xa3, 0xed, 0xdb, 0xfb, 0x46, 0x9c, 0x27, 0x98, 0x66, 0x99, 0x2c, 0x56, 0x43, 0xd0, 0xd3, + 0x6e, 0xe0, 0xec, 0x85, 0xbc, 0xf0, 0x59, 0x58, 0x89, 0xa7, 0xb5, 0x76, 0xc2, 0xaf, 0x4b, 0x0e, + 0x85, 0x7b, 0xe8, 0xec, 0x98, 0x8c, 0x38, 0xa3, 0x29, 0x6f, 0x59, 0x1d, 0xeb, 0xe8, 0xdf, 0xe4, + 0xbf, 0x39, 0xbc, 0xa0, 0x29, 0x3f, 0x7e, 0xb7, 0x9c, 0xd6, 0x86, 0xc3, 0x54, 0x2f, 0xef, 0x3e, + 0x5b, 0xce, 0xee, 0x4f, 0x09, 0xee, 0x08, 0x6d, 0xbb, 0x37, 0xfa, 0x65, 0xb3, 0xf6, 0xa0, 0x51, + 0x5e, 0x97, 0x82, 0x36, 0xc4, 0xdd, 0xb3, 0xbb, 0xd7, 0xb7, 0x07, 0xfb, 0xd4, 0x1d, 0x54, 0xed, + 0xdd, 0x7c, 0xb9, 0xda, 0x68, 0x5e, 0x42, 0x21, 0x53, 0xae, 0x00, 0xf7, 0x31, 0xfd, 0xae, 0x04, + 0xdc, 0xbf, 0x0d, 0xee, 0x6d, 0xa7, 0x37, 0x97, 0xe9, 0xd6, 0xc5, 0x83, 0x83, 0xa6, 0x3a, 0xc6, + 0x55, 0xe9, 0x63, 0xeb, 0xf2, 0x7c, 0x6d, 0x21, 0xe4, 0x82, 0x66, 0x02, 0x49, 0x25, 0xb0, 0xe0, + 0xd9, 0xea, 0x49, 0xcc, 0x0b, 0xe7, 0x09, 0x34, 0x7f, 0xab, 0xa1, 0x01, 0x8f, 0xf6, 0x9f, 0xd0, + 0xf7, 0x9f, 0xec, 0x4e, 0xa8, 0x0d, 0x7d, 0x06, 0x48, 0xc3, 0x0a, 0xcd, 0x3c, 0xb4, 0x0e, 0x86, + 0x17, 0x43, 0x89, 0x7c, 0x06, 0x51, 0x4d, 0x89, 0x66, 0x5e, 0x64, 0x28, 0x1f, 0x76, 0x4f, 0x9f, + 0x13, 0xe2, 0x33, 0x20, 0xa4, 0x26, 0x11, 0x32, 0xf3, 0x08, 0x31, 0xb4, 0xab, 0xbf, 0xab, 0x3d, + 0x4f, 0x3e, 0x03, 0x00, 0x00, 0xff, 0xff, 0xae, 0xdf, 0xb6, 0xd4, 0xfd, 0x02, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/services/ad_parameter_service.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/services/ad_parameter_service.pb.go new file mode 100644 index 0000000..0330a4d --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/services/ad_parameter_service.pb.go @@ -0,0 +1,590 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/services/ad_parameter_service.proto + +package services // import "google.golang.org/genproto/googleapis/ads/googleads/v1/services" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "github.com/golang/protobuf/ptypes/wrappers" +import resources "google.golang.org/genproto/googleapis/ads/googleads/v1/resources" +import _ "google.golang.org/genproto/googleapis/api/annotations" +import status "google.golang.org/genproto/googleapis/rpc/status" +import field_mask "google.golang.org/genproto/protobuf/field_mask" + +import ( + context "golang.org/x/net/context" + grpc "google.golang.org/grpc" +) + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Request message for [AdParameterService.GetAdParameter][google.ads.googleads.v1.services.AdParameterService.GetAdParameter] +type GetAdParameterRequest struct { + // The resource name of the ad parameter to fetch. + ResourceName string `protobuf:"bytes,1,opt,name=resource_name,json=resourceName,proto3" json:"resource_name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GetAdParameterRequest) Reset() { *m = GetAdParameterRequest{} } +func (m *GetAdParameterRequest) String() string { return proto.CompactTextString(m) } +func (*GetAdParameterRequest) ProtoMessage() {} +func (*GetAdParameterRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_ad_parameter_service_06b4e4938dc4aa57, []int{0} +} +func (m *GetAdParameterRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GetAdParameterRequest.Unmarshal(m, b) +} +func (m *GetAdParameterRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GetAdParameterRequest.Marshal(b, m, deterministic) +} +func (dst *GetAdParameterRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_GetAdParameterRequest.Merge(dst, src) +} +func (m *GetAdParameterRequest) XXX_Size() int { + return xxx_messageInfo_GetAdParameterRequest.Size(m) +} +func (m *GetAdParameterRequest) XXX_DiscardUnknown() { + xxx_messageInfo_GetAdParameterRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_GetAdParameterRequest proto.InternalMessageInfo + +func (m *GetAdParameterRequest) GetResourceName() string { + if m != nil { + return m.ResourceName + } + return "" +} + +// Request message for [AdParameterService.MutateAdParameters][google.ads.googleads.v1.services.AdParameterService.MutateAdParameters] +type MutateAdParametersRequest struct { + // The ID of the customer whose ad parameters are being modified. + CustomerId string `protobuf:"bytes,1,opt,name=customer_id,json=customerId,proto3" json:"customer_id,omitempty"` + // The list of operations to perform on individual ad parameters. + Operations []*AdParameterOperation `protobuf:"bytes,2,rep,name=operations,proto3" json:"operations,omitempty"` + // If true, successful operations will be carried out and invalid + // operations will return errors. If false, all operations will be carried + // out in one transaction if and only if they are all valid. + // Default is false. + PartialFailure bool `protobuf:"varint,3,opt,name=partial_failure,json=partialFailure,proto3" json:"partial_failure,omitempty"` + // If true, the request is validated but not executed. Only errors are + // returned, not results. + ValidateOnly bool `protobuf:"varint,4,opt,name=validate_only,json=validateOnly,proto3" json:"validate_only,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *MutateAdParametersRequest) Reset() { *m = MutateAdParametersRequest{} } +func (m *MutateAdParametersRequest) String() string { return proto.CompactTextString(m) } +func (*MutateAdParametersRequest) ProtoMessage() {} +func (*MutateAdParametersRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_ad_parameter_service_06b4e4938dc4aa57, []int{1} +} +func (m *MutateAdParametersRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_MutateAdParametersRequest.Unmarshal(m, b) +} +func (m *MutateAdParametersRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_MutateAdParametersRequest.Marshal(b, m, deterministic) +} +func (dst *MutateAdParametersRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_MutateAdParametersRequest.Merge(dst, src) +} +func (m *MutateAdParametersRequest) XXX_Size() int { + return xxx_messageInfo_MutateAdParametersRequest.Size(m) +} +func (m *MutateAdParametersRequest) XXX_DiscardUnknown() { + xxx_messageInfo_MutateAdParametersRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_MutateAdParametersRequest proto.InternalMessageInfo + +func (m *MutateAdParametersRequest) GetCustomerId() string { + if m != nil { + return m.CustomerId + } + return "" +} + +func (m *MutateAdParametersRequest) GetOperations() []*AdParameterOperation { + if m != nil { + return m.Operations + } + return nil +} + +func (m *MutateAdParametersRequest) GetPartialFailure() bool { + if m != nil { + return m.PartialFailure + } + return false +} + +func (m *MutateAdParametersRequest) GetValidateOnly() bool { + if m != nil { + return m.ValidateOnly + } + return false +} + +// A single operation (create, update, remove) on ad parameter. +type AdParameterOperation struct { + // FieldMask that determines which resource fields are modified in an update. + UpdateMask *field_mask.FieldMask `protobuf:"bytes,4,opt,name=update_mask,json=updateMask,proto3" json:"update_mask,omitempty"` + // The mutate operation. + // + // Types that are valid to be assigned to Operation: + // *AdParameterOperation_Create + // *AdParameterOperation_Update + // *AdParameterOperation_Remove + Operation isAdParameterOperation_Operation `protobuf_oneof:"operation"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *AdParameterOperation) Reset() { *m = AdParameterOperation{} } +func (m *AdParameterOperation) String() string { return proto.CompactTextString(m) } +func (*AdParameterOperation) ProtoMessage() {} +func (*AdParameterOperation) Descriptor() ([]byte, []int) { + return fileDescriptor_ad_parameter_service_06b4e4938dc4aa57, []int{2} +} +func (m *AdParameterOperation) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_AdParameterOperation.Unmarshal(m, b) +} +func (m *AdParameterOperation) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_AdParameterOperation.Marshal(b, m, deterministic) +} +func (dst *AdParameterOperation) XXX_Merge(src proto.Message) { + xxx_messageInfo_AdParameterOperation.Merge(dst, src) +} +func (m *AdParameterOperation) XXX_Size() int { + return xxx_messageInfo_AdParameterOperation.Size(m) +} +func (m *AdParameterOperation) XXX_DiscardUnknown() { + xxx_messageInfo_AdParameterOperation.DiscardUnknown(m) +} + +var xxx_messageInfo_AdParameterOperation proto.InternalMessageInfo + +func (m *AdParameterOperation) GetUpdateMask() *field_mask.FieldMask { + if m != nil { + return m.UpdateMask + } + return nil +} + +type isAdParameterOperation_Operation interface { + isAdParameterOperation_Operation() +} + +type AdParameterOperation_Create struct { + Create *resources.AdParameter `protobuf:"bytes,1,opt,name=create,proto3,oneof"` +} + +type AdParameterOperation_Update struct { + Update *resources.AdParameter `protobuf:"bytes,2,opt,name=update,proto3,oneof"` +} + +type AdParameterOperation_Remove struct { + Remove string `protobuf:"bytes,3,opt,name=remove,proto3,oneof"` +} + +func (*AdParameterOperation_Create) isAdParameterOperation_Operation() {} + +func (*AdParameterOperation_Update) isAdParameterOperation_Operation() {} + +func (*AdParameterOperation_Remove) isAdParameterOperation_Operation() {} + +func (m *AdParameterOperation) GetOperation() isAdParameterOperation_Operation { + if m != nil { + return m.Operation + } + return nil +} + +func (m *AdParameterOperation) GetCreate() *resources.AdParameter { + if x, ok := m.GetOperation().(*AdParameterOperation_Create); ok { + return x.Create + } + return nil +} + +func (m *AdParameterOperation) GetUpdate() *resources.AdParameter { + if x, ok := m.GetOperation().(*AdParameterOperation_Update); ok { + return x.Update + } + return nil +} + +func (m *AdParameterOperation) GetRemove() string { + if x, ok := m.GetOperation().(*AdParameterOperation_Remove); ok { + return x.Remove + } + return "" +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*AdParameterOperation) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _AdParameterOperation_OneofMarshaler, _AdParameterOperation_OneofUnmarshaler, _AdParameterOperation_OneofSizer, []interface{}{ + (*AdParameterOperation_Create)(nil), + (*AdParameterOperation_Update)(nil), + (*AdParameterOperation_Remove)(nil), + } +} + +func _AdParameterOperation_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*AdParameterOperation) + // operation + switch x := m.Operation.(type) { + case *AdParameterOperation_Create: + b.EncodeVarint(1<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.Create); err != nil { + return err + } + case *AdParameterOperation_Update: + b.EncodeVarint(2<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.Update); err != nil { + return err + } + case *AdParameterOperation_Remove: + b.EncodeVarint(3<<3 | proto.WireBytes) + b.EncodeStringBytes(x.Remove) + case nil: + default: + return fmt.Errorf("AdParameterOperation.Operation has unexpected type %T", x) + } + return nil +} + +func _AdParameterOperation_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*AdParameterOperation) + switch tag { + case 1: // operation.create + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(resources.AdParameter) + err := b.DecodeMessage(msg) + m.Operation = &AdParameterOperation_Create{msg} + return true, err + case 2: // operation.update + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(resources.AdParameter) + err := b.DecodeMessage(msg) + m.Operation = &AdParameterOperation_Update{msg} + return true, err + case 3: // operation.remove + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeStringBytes() + m.Operation = &AdParameterOperation_Remove{x} + return true, err + default: + return false, nil + } +} + +func _AdParameterOperation_OneofSizer(msg proto.Message) (n int) { + m := msg.(*AdParameterOperation) + // operation + switch x := m.Operation.(type) { + case *AdParameterOperation_Create: + s := proto.Size(x.Create) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *AdParameterOperation_Update: + s := proto.Size(x.Update) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *AdParameterOperation_Remove: + n += 1 // tag and wire + n += proto.SizeVarint(uint64(len(x.Remove))) + n += len(x.Remove) + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +// Response message for an ad parameter mutate. +type MutateAdParametersResponse struct { + // Errors that pertain to operation failures in the partial failure mode. + // Returned only when partial_failure = true and all errors occur inside the + // operations. If any errors occur outside the operations (e.g. auth errors), + // we return an RPC level error. + PartialFailureError *status.Status `protobuf:"bytes,3,opt,name=partial_failure_error,json=partialFailureError,proto3" json:"partial_failure_error,omitempty"` + // All results for the mutate. + Results []*MutateAdParameterResult `protobuf:"bytes,2,rep,name=results,proto3" json:"results,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *MutateAdParametersResponse) Reset() { *m = MutateAdParametersResponse{} } +func (m *MutateAdParametersResponse) String() string { return proto.CompactTextString(m) } +func (*MutateAdParametersResponse) ProtoMessage() {} +func (*MutateAdParametersResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_ad_parameter_service_06b4e4938dc4aa57, []int{3} +} +func (m *MutateAdParametersResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_MutateAdParametersResponse.Unmarshal(m, b) +} +func (m *MutateAdParametersResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_MutateAdParametersResponse.Marshal(b, m, deterministic) +} +func (dst *MutateAdParametersResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_MutateAdParametersResponse.Merge(dst, src) +} +func (m *MutateAdParametersResponse) XXX_Size() int { + return xxx_messageInfo_MutateAdParametersResponse.Size(m) +} +func (m *MutateAdParametersResponse) XXX_DiscardUnknown() { + xxx_messageInfo_MutateAdParametersResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_MutateAdParametersResponse proto.InternalMessageInfo + +func (m *MutateAdParametersResponse) GetPartialFailureError() *status.Status { + if m != nil { + return m.PartialFailureError + } + return nil +} + +func (m *MutateAdParametersResponse) GetResults() []*MutateAdParameterResult { + if m != nil { + return m.Results + } + return nil +} + +// The result for the ad parameter mutate. +type MutateAdParameterResult struct { + // The resource name returned for successful operations. + ResourceName string `protobuf:"bytes,1,opt,name=resource_name,json=resourceName,proto3" json:"resource_name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *MutateAdParameterResult) Reset() { *m = MutateAdParameterResult{} } +func (m *MutateAdParameterResult) String() string { return proto.CompactTextString(m) } +func (*MutateAdParameterResult) ProtoMessage() {} +func (*MutateAdParameterResult) Descriptor() ([]byte, []int) { + return fileDescriptor_ad_parameter_service_06b4e4938dc4aa57, []int{4} +} +func (m *MutateAdParameterResult) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_MutateAdParameterResult.Unmarshal(m, b) +} +func (m *MutateAdParameterResult) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_MutateAdParameterResult.Marshal(b, m, deterministic) +} +func (dst *MutateAdParameterResult) XXX_Merge(src proto.Message) { + xxx_messageInfo_MutateAdParameterResult.Merge(dst, src) +} +func (m *MutateAdParameterResult) XXX_Size() int { + return xxx_messageInfo_MutateAdParameterResult.Size(m) +} +func (m *MutateAdParameterResult) XXX_DiscardUnknown() { + xxx_messageInfo_MutateAdParameterResult.DiscardUnknown(m) +} + +var xxx_messageInfo_MutateAdParameterResult proto.InternalMessageInfo + +func (m *MutateAdParameterResult) GetResourceName() string { + if m != nil { + return m.ResourceName + } + return "" +} + +func init() { + proto.RegisterType((*GetAdParameterRequest)(nil), "google.ads.googleads.v1.services.GetAdParameterRequest") + proto.RegisterType((*MutateAdParametersRequest)(nil), "google.ads.googleads.v1.services.MutateAdParametersRequest") + proto.RegisterType((*AdParameterOperation)(nil), "google.ads.googleads.v1.services.AdParameterOperation") + proto.RegisterType((*MutateAdParametersResponse)(nil), "google.ads.googleads.v1.services.MutateAdParametersResponse") + proto.RegisterType((*MutateAdParameterResult)(nil), "google.ads.googleads.v1.services.MutateAdParameterResult") +} + +// Reference imports to suppress errors if they are not otherwise used. +var _ context.Context +var _ grpc.ClientConn + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the grpc package it is being compiled against. +const _ = grpc.SupportPackageIsVersion4 + +// AdParameterServiceClient is the client API for AdParameterService service. +// +// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://godoc.org/google.golang.org/grpc#ClientConn.NewStream. +type AdParameterServiceClient interface { + // Returns the requested ad parameter in full detail. + GetAdParameter(ctx context.Context, in *GetAdParameterRequest, opts ...grpc.CallOption) (*resources.AdParameter, error) + // Creates, updates, or removes ad parameters. Operation statuses are + // returned. + MutateAdParameters(ctx context.Context, in *MutateAdParametersRequest, opts ...grpc.CallOption) (*MutateAdParametersResponse, error) +} + +type adParameterServiceClient struct { + cc *grpc.ClientConn +} + +func NewAdParameterServiceClient(cc *grpc.ClientConn) AdParameterServiceClient { + return &adParameterServiceClient{cc} +} + +func (c *adParameterServiceClient) GetAdParameter(ctx context.Context, in *GetAdParameterRequest, opts ...grpc.CallOption) (*resources.AdParameter, error) { + out := new(resources.AdParameter) + err := c.cc.Invoke(ctx, "/google.ads.googleads.v1.services.AdParameterService/GetAdParameter", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *adParameterServiceClient) MutateAdParameters(ctx context.Context, in *MutateAdParametersRequest, opts ...grpc.CallOption) (*MutateAdParametersResponse, error) { + out := new(MutateAdParametersResponse) + err := c.cc.Invoke(ctx, "/google.ads.googleads.v1.services.AdParameterService/MutateAdParameters", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +// AdParameterServiceServer is the server API for AdParameterService service. +type AdParameterServiceServer interface { + // Returns the requested ad parameter in full detail. + GetAdParameter(context.Context, *GetAdParameterRequest) (*resources.AdParameter, error) + // Creates, updates, or removes ad parameters. Operation statuses are + // returned. + MutateAdParameters(context.Context, *MutateAdParametersRequest) (*MutateAdParametersResponse, error) +} + +func RegisterAdParameterServiceServer(s *grpc.Server, srv AdParameterServiceServer) { + s.RegisterService(&_AdParameterService_serviceDesc, srv) +} + +func _AdParameterService_GetAdParameter_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(GetAdParameterRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(AdParameterServiceServer).GetAdParameter(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.ads.googleads.v1.services.AdParameterService/GetAdParameter", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(AdParameterServiceServer).GetAdParameter(ctx, req.(*GetAdParameterRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _AdParameterService_MutateAdParameters_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(MutateAdParametersRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(AdParameterServiceServer).MutateAdParameters(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.ads.googleads.v1.services.AdParameterService/MutateAdParameters", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(AdParameterServiceServer).MutateAdParameters(ctx, req.(*MutateAdParametersRequest)) + } + return interceptor(ctx, in, info, handler) +} + +var _AdParameterService_serviceDesc = grpc.ServiceDesc{ + ServiceName: "google.ads.googleads.v1.services.AdParameterService", + HandlerType: (*AdParameterServiceServer)(nil), + Methods: []grpc.MethodDesc{ + { + MethodName: "GetAdParameter", + Handler: _AdParameterService_GetAdParameter_Handler, + }, + { + MethodName: "MutateAdParameters", + Handler: _AdParameterService_MutateAdParameters_Handler, + }, + }, + Streams: []grpc.StreamDesc{}, + Metadata: "google/ads/googleads/v1/services/ad_parameter_service.proto", +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/services/ad_parameter_service.proto", fileDescriptor_ad_parameter_service_06b4e4938dc4aa57) +} + +var fileDescriptor_ad_parameter_service_06b4e4938dc4aa57 = []byte{ + // 710 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x9c, 0x55, 0x4f, 0x6b, 0xd4, 0x4e, + 0x18, 0xfe, 0x25, 0xfb, 0xa3, 0xda, 0x49, 0xad, 0x30, 0x5a, 0xba, 0x2e, 0xa2, 0x4b, 0x2c, 0x58, + 0xf6, 0x30, 0x71, 0x57, 0xa9, 0x98, 0x6d, 0x85, 0x2d, 0xd8, 0xd6, 0x43, 0x6d, 0x49, 0x61, 0x0f, + 0xb2, 0x10, 0xa6, 0x9b, 0xe9, 0x12, 0x9a, 0x64, 0xe2, 0xcc, 0x64, 0xa5, 0x94, 0x5e, 0xfc, 0x00, + 0x5e, 0xbc, 0x78, 0xf6, 0xe8, 0xcd, 0xa3, 0x5f, 0x41, 0xf0, 0xe4, 0x37, 0x10, 0x4f, 0x7e, 0x08, + 0x91, 0xc9, 0x64, 0xd6, 0x6c, 0xdb, 0x65, 0x75, 0x6f, 0x6f, 0xde, 0x79, 0x9e, 0x67, 0xde, 0xbf, + 0x13, 0xd0, 0x1e, 0x50, 0x3a, 0x88, 0x88, 0x83, 0x03, 0xee, 0x28, 0x53, 0x5a, 0xc3, 0xa6, 0xc3, + 0x09, 0x1b, 0x86, 0x7d, 0xc2, 0x1d, 0x1c, 0xf8, 0x29, 0x66, 0x38, 0x26, 0x82, 0x30, 0xbf, 0xf0, + 0xa2, 0x94, 0x51, 0x41, 0x61, 0x5d, 0x31, 0x10, 0x0e, 0x38, 0x1a, 0x91, 0xd1, 0xb0, 0x89, 0x34, + 0xb9, 0xf6, 0x68, 0x92, 0x3c, 0x23, 0x9c, 0x66, 0xec, 0xbc, 0xbe, 0xd2, 0xad, 0xdd, 0xd6, 0xac, + 0x34, 0x74, 0x70, 0x92, 0x50, 0x81, 0x45, 0x48, 0x13, 0x5e, 0x9c, 0x16, 0xb7, 0x3a, 0xf9, 0xd7, + 0x61, 0x76, 0xe4, 0x1c, 0x85, 0x24, 0x0a, 0xfc, 0x18, 0xf3, 0xe3, 0x02, 0x71, 0xe7, 0x3c, 0xe2, + 0x35, 0xc3, 0x69, 0x4a, 0x98, 0x56, 0x58, 0x2e, 0xce, 0x59, 0xda, 0x77, 0xb8, 0xc0, 0x22, 0x2b, + 0x0e, 0xec, 0x75, 0xb0, 0xb4, 0x4d, 0x44, 0x27, 0xd8, 0xd7, 0x01, 0x79, 0xe4, 0x55, 0x46, 0xb8, + 0x80, 0xf7, 0xc0, 0x35, 0x1d, 0xb1, 0x9f, 0xe0, 0x98, 0x54, 0x8d, 0xba, 0xb1, 0x3a, 0xef, 0x2d, + 0x68, 0xe7, 0x0b, 0x1c, 0x13, 0xfb, 0xbb, 0x01, 0x6e, 0xed, 0x66, 0x02, 0x0b, 0x52, 0x52, 0xe0, + 0x5a, 0xe2, 0x2e, 0xb0, 0xfa, 0x19, 0x17, 0x34, 0x26, 0xcc, 0x0f, 0x83, 0x42, 0x00, 0x68, 0xd7, + 0xf3, 0x00, 0x76, 0x01, 0xa0, 0x29, 0x61, 0x2a, 0xd7, 0xaa, 0x59, 0xaf, 0xac, 0x5a, 0xad, 0x35, + 0x34, 0xad, 0xc4, 0xa8, 0x74, 0xd7, 0x9e, 0xa6, 0x7b, 0x25, 0x25, 0x78, 0x1f, 0x5c, 0x4f, 0x31, + 0x13, 0x21, 0x8e, 0xfc, 0x23, 0x1c, 0x46, 0x19, 0x23, 0xd5, 0x4a, 0xdd, 0x58, 0xbd, 0xea, 0x2d, + 0x16, 0xee, 0x2d, 0xe5, 0x95, 0x49, 0x0e, 0x71, 0x14, 0x06, 0x58, 0x10, 0x9f, 0x26, 0xd1, 0x49, + 0xf5, 0xff, 0x1c, 0xb6, 0xa0, 0x9d, 0x7b, 0x49, 0x74, 0x62, 0xbf, 0x35, 0xc1, 0xcd, 0xcb, 0xae, + 0x84, 0x6d, 0x60, 0x65, 0x69, 0xce, 0x95, 0x9d, 0xc8, 0xb9, 0x56, 0xab, 0xa6, 0xe3, 0xd7, 0xad, + 0x40, 0x5b, 0xb2, 0x59, 0xbb, 0x98, 0x1f, 0x7b, 0x40, 0xc1, 0xa5, 0x0d, 0x77, 0xc0, 0x5c, 0x9f, + 0x11, 0x2c, 0x54, 0x61, 0xad, 0x16, 0x9a, 0x98, 0xf7, 0x68, 0x70, 0xca, 0x89, 0xef, 0xfc, 0xe7, + 0x15, 0x7c, 0xa9, 0xa4, 0x74, 0xab, 0xe6, 0xac, 0x4a, 0x8a, 0x0f, 0xab, 0x60, 0x8e, 0x91, 0x98, + 0x0e, 0x55, 0xb9, 0xe6, 0xe5, 0x89, 0xfa, 0xde, 0xb4, 0xc0, 0xfc, 0xa8, 0xbe, 0xf6, 0x67, 0x03, + 0xd4, 0x2e, 0xeb, 0x3a, 0x4f, 0x69, 0xc2, 0x09, 0xdc, 0x02, 0x4b, 0xe7, 0xaa, 0xef, 0x13, 0xc6, + 0x28, 0xcb, 0x45, 0xad, 0x16, 0xd4, 0xe1, 0xb1, 0xb4, 0x8f, 0x0e, 0xf2, 0x59, 0xf4, 0x6e, 0x8c, + 0xf7, 0xe5, 0x99, 0x84, 0xc3, 0x03, 0x70, 0x85, 0x11, 0x9e, 0x45, 0x42, 0x8f, 0xc6, 0x93, 0xe9, + 0xa3, 0x71, 0x21, 0x2c, 0x2f, 0x57, 0xf0, 0xb4, 0x92, 0xfd, 0x14, 0x2c, 0x4f, 0xc0, 0xfc, 0xd5, + 0xc4, 0xb7, 0xde, 0x57, 0x00, 0x2c, 0x51, 0x0f, 0xd4, 0xc5, 0xf0, 0x93, 0x01, 0x16, 0xc7, 0xf7, + 0x08, 0x3e, 0x9e, 0x1e, 0xed, 0xa5, 0x9b, 0x57, 0xfb, 0xc7, 0xfe, 0xd9, 0x6b, 0x6f, 0xbe, 0xfd, + 0x78, 0x67, 0x3e, 0x80, 0x48, 0xbe, 0x32, 0xa7, 0x63, 0x29, 0x6c, 0xe8, 0x65, 0xe3, 0x4e, 0xc3, + 0xc1, 0xa5, 0x66, 0x39, 0x8d, 0x33, 0xf8, 0xd5, 0x00, 0xf0, 0x62, 0x1b, 0x61, 0x7b, 0x86, 0x2a, + 0xeb, 0x95, 0xaf, 0xad, 0xcf, 0x46, 0x56, 0x93, 0x63, 0xaf, 0xe7, 0x99, 0xac, 0xd9, 0x4d, 0x99, + 0xc9, 0x9f, 0xd0, 0x4f, 0x4b, 0xaf, 0xc8, 0x46, 0xe3, 0x6c, 0x2c, 0x11, 0x37, 0xce, 0xe5, 0x5c, + 0xa3, 0xb1, 0xf9, 0xcb, 0x00, 0x2b, 0x7d, 0x1a, 0x4f, 0x8d, 0x60, 0x73, 0xf9, 0x62, 0x03, 0xf7, + 0xe5, 0xb2, 0xee, 0x1b, 0x2f, 0x77, 0x0a, 0xf2, 0x80, 0x46, 0x38, 0x19, 0x20, 0xca, 0x06, 0xce, + 0x80, 0x24, 0xf9, 0x2a, 0xeb, 0xd7, 0x3c, 0x0d, 0xf9, 0xe4, 0x7f, 0x47, 0x5b, 0x1b, 0x1f, 0xcc, + 0xca, 0x76, 0xa7, 0xf3, 0xd1, 0xac, 0x6f, 0x2b, 0xc1, 0x4e, 0xc0, 0x91, 0x32, 0xa5, 0xd5, 0x6d, + 0xa2, 0xe2, 0x62, 0xfe, 0x45, 0x43, 0x7a, 0x9d, 0x80, 0xf7, 0x46, 0x90, 0x5e, 0xb7, 0xd9, 0xd3, + 0x90, 0x9f, 0xe6, 0x8a, 0xf2, 0xbb, 0x6e, 0x27, 0xe0, 0xae, 0x3b, 0x02, 0xb9, 0x6e, 0xb7, 0xe9, + 0xba, 0x1a, 0x76, 0x38, 0x97, 0xc7, 0xf9, 0xf0, 0x77, 0x00, 0x00, 0x00, 0xff, 0xff, 0x33, 0xf9, + 0x55, 0x2d, 0xe2, 0x06, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/services/ad_schedule_view_service.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/services/ad_schedule_view_service.pb.go new file mode 100644 index 0000000..a84bbc4 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/services/ad_schedule_view_service.pb.go @@ -0,0 +1,175 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/services/ad_schedule_view_service.proto + +package services // import "google.golang.org/genproto/googleapis/ads/googleads/v1/services" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import resources "google.golang.org/genproto/googleapis/ads/googleads/v1/resources" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +import ( + context "golang.org/x/net/context" + grpc "google.golang.org/grpc" +) + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Request message for [AdScheduleViewService.GetAdScheduleView][google.ads.googleads.v1.services.AdScheduleViewService.GetAdScheduleView]. +type GetAdScheduleViewRequest struct { + // The resource name of the ad schedule view to fetch. + ResourceName string `protobuf:"bytes,1,opt,name=resource_name,json=resourceName,proto3" json:"resource_name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GetAdScheduleViewRequest) Reset() { *m = GetAdScheduleViewRequest{} } +func (m *GetAdScheduleViewRequest) String() string { return proto.CompactTextString(m) } +func (*GetAdScheduleViewRequest) ProtoMessage() {} +func (*GetAdScheduleViewRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_ad_schedule_view_service_e3a15fb179c0103d, []int{0} +} +func (m *GetAdScheduleViewRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GetAdScheduleViewRequest.Unmarshal(m, b) +} +func (m *GetAdScheduleViewRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GetAdScheduleViewRequest.Marshal(b, m, deterministic) +} +func (dst *GetAdScheduleViewRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_GetAdScheduleViewRequest.Merge(dst, src) +} +func (m *GetAdScheduleViewRequest) XXX_Size() int { + return xxx_messageInfo_GetAdScheduleViewRequest.Size(m) +} +func (m *GetAdScheduleViewRequest) XXX_DiscardUnknown() { + xxx_messageInfo_GetAdScheduleViewRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_GetAdScheduleViewRequest proto.InternalMessageInfo + +func (m *GetAdScheduleViewRequest) GetResourceName() string { + if m != nil { + return m.ResourceName + } + return "" +} + +func init() { + proto.RegisterType((*GetAdScheduleViewRequest)(nil), "google.ads.googleads.v1.services.GetAdScheduleViewRequest") +} + +// Reference imports to suppress errors if they are not otherwise used. +var _ context.Context +var _ grpc.ClientConn + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the grpc package it is being compiled against. +const _ = grpc.SupportPackageIsVersion4 + +// AdScheduleViewServiceClient is the client API for AdScheduleViewService service. +// +// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://godoc.org/google.golang.org/grpc#ClientConn.NewStream. +type AdScheduleViewServiceClient interface { + // Returns the requested ad schedule view in full detail. + GetAdScheduleView(ctx context.Context, in *GetAdScheduleViewRequest, opts ...grpc.CallOption) (*resources.AdScheduleView, error) +} + +type adScheduleViewServiceClient struct { + cc *grpc.ClientConn +} + +func NewAdScheduleViewServiceClient(cc *grpc.ClientConn) AdScheduleViewServiceClient { + return &adScheduleViewServiceClient{cc} +} + +func (c *adScheduleViewServiceClient) GetAdScheduleView(ctx context.Context, in *GetAdScheduleViewRequest, opts ...grpc.CallOption) (*resources.AdScheduleView, error) { + out := new(resources.AdScheduleView) + err := c.cc.Invoke(ctx, "/google.ads.googleads.v1.services.AdScheduleViewService/GetAdScheduleView", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +// AdScheduleViewServiceServer is the server API for AdScheduleViewService service. +type AdScheduleViewServiceServer interface { + // Returns the requested ad schedule view in full detail. + GetAdScheduleView(context.Context, *GetAdScheduleViewRequest) (*resources.AdScheduleView, error) +} + +func RegisterAdScheduleViewServiceServer(s *grpc.Server, srv AdScheduleViewServiceServer) { + s.RegisterService(&_AdScheduleViewService_serviceDesc, srv) +} + +func _AdScheduleViewService_GetAdScheduleView_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(GetAdScheduleViewRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(AdScheduleViewServiceServer).GetAdScheduleView(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.ads.googleads.v1.services.AdScheduleViewService/GetAdScheduleView", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(AdScheduleViewServiceServer).GetAdScheduleView(ctx, req.(*GetAdScheduleViewRequest)) + } + return interceptor(ctx, in, info, handler) +} + +var _AdScheduleViewService_serviceDesc = grpc.ServiceDesc{ + ServiceName: "google.ads.googleads.v1.services.AdScheduleViewService", + HandlerType: (*AdScheduleViewServiceServer)(nil), + Methods: []grpc.MethodDesc{ + { + MethodName: "GetAdScheduleView", + Handler: _AdScheduleViewService_GetAdScheduleView_Handler, + }, + }, + Streams: []grpc.StreamDesc{}, + Metadata: "google/ads/googleads/v1/services/ad_schedule_view_service.proto", +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/services/ad_schedule_view_service.proto", fileDescriptor_ad_schedule_view_service_e3a15fb179c0103d) +} + +var fileDescriptor_ad_schedule_view_service_e3a15fb179c0103d = []byte{ + // 366 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x84, 0x92, 0xbf, 0x4a, 0xc3, 0x40, + 0x1c, 0xc7, 0x49, 0x04, 0xc1, 0xa0, 0x83, 0x01, 0xa1, 0x04, 0x87, 0x52, 0x3b, 0x48, 0x87, 0x3b, + 0x62, 0x17, 0x3d, 0x91, 0x92, 0x2e, 0x75, 0x92, 0xd2, 0x42, 0x06, 0x09, 0x84, 0x33, 0x77, 0xc4, + 0x40, 0x93, 0xab, 0xf9, 0x25, 0xe9, 0x20, 0x0e, 0xfa, 0x0a, 0xbe, 0x81, 0xa3, 0xef, 0xe0, 0x0b, + 0xb8, 0x3a, 0xf8, 0x02, 0x4e, 0x3e, 0x85, 0xa4, 0x97, 0x0b, 0x04, 0x1b, 0xba, 0x7d, 0xb9, 0xdf, + 0xf7, 0xf3, 0xfd, 0xfd, 0x49, 0x8c, 0x51, 0x28, 0x44, 0xb8, 0xe0, 0x98, 0x32, 0xc0, 0x52, 0x96, + 0xaa, 0xb0, 0x31, 0xf0, 0xb4, 0x88, 0x02, 0x0e, 0x98, 0x32, 0x1f, 0x82, 0x7b, 0xce, 0xf2, 0x05, + 0xf7, 0x8b, 0x88, 0xaf, 0xfc, 0xaa, 0x82, 0x96, 0xa9, 0xc8, 0x84, 0xd9, 0x95, 0x14, 0xa2, 0x0c, + 0x50, 0x1d, 0x80, 0x0a, 0x1b, 0xa9, 0x00, 0xeb, 0xbc, 0xad, 0x45, 0xca, 0x41, 0xe4, 0xe9, 0xa6, + 0x1e, 0x32, 0xdb, 0x3a, 0x56, 0xe4, 0x32, 0xc2, 0x34, 0x49, 0x44, 0x46, 0xb3, 0x48, 0x24, 0x20, + 0xab, 0xbd, 0x91, 0xd1, 0x99, 0xf0, 0xcc, 0x61, 0xf3, 0x8a, 0x74, 0x23, 0xbe, 0x9a, 0xf1, 0x87, + 0x9c, 0x43, 0x66, 0x9e, 0x18, 0x07, 0x2a, 0xdd, 0x4f, 0x68, 0xcc, 0x3b, 0x5a, 0x57, 0x3b, 0xdd, + 0x9b, 0xed, 0xab, 0xc7, 0x1b, 0x1a, 0xf3, 0xb3, 0x6f, 0xcd, 0x38, 0x6a, 0xe2, 0x73, 0x39, 0xb3, + 0xf9, 0xa1, 0x19, 0x87, 0xff, 0xb2, 0x4d, 0x82, 0xb6, 0xed, 0x8a, 0xda, 0x06, 0xb2, 0xec, 0x56, + 0xb6, 0xbe, 0x02, 0x6a, 0x92, 0xbd, 0x8b, 0x97, 0xaf, 0x9f, 0x57, 0x7d, 0x68, 0xda, 0xe5, 0xad, + 0x1e, 0x1b, 0xeb, 0x5c, 0x05, 0x39, 0x64, 0x22, 0xe6, 0x29, 0xe0, 0x01, 0xa6, 0x0d, 0x0c, 0xf0, + 0xe0, 0x69, 0xfc, 0xac, 0x1b, 0xfd, 0x40, 0xc4, 0x5b, 0xe7, 0x1d, 0x5b, 0x1b, 0xf7, 0x9f, 0x96, + 0xf7, 0x9d, 0x6a, 0xb7, 0xd7, 0x15, 0x1f, 0x8a, 0x05, 0x4d, 0x42, 0x24, 0xd2, 0x10, 0x87, 0x3c, + 0x59, 0x5f, 0x5f, 0x7d, 0xc9, 0x65, 0x04, 0xed, 0xff, 0xce, 0xa5, 0x12, 0x6f, 0xfa, 0xce, 0xc4, + 0x71, 0xde, 0xf5, 0xee, 0x44, 0x06, 0x3a, 0x0c, 0x90, 0x94, 0xa5, 0x72, 0x6d, 0x54, 0x35, 0x86, + 0x4f, 0x65, 0xf1, 0x1c, 0x06, 0x5e, 0x6d, 0xf1, 0x5c, 0xdb, 0x53, 0x96, 0x5f, 0xbd, 0x2f, 0xdf, + 0x09, 0x71, 0x18, 0x10, 0x52, 0x9b, 0x08, 0x71, 0x6d, 0x42, 0x94, 0xed, 0x6e, 0x77, 0x3d, 0xe7, + 0xf0, 0x2f, 0x00, 0x00, 0xff, 0xff, 0xcc, 0x00, 0x12, 0xd8, 0xe2, 0x02, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/services/age_range_view_service.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/services/age_range_view_service.pb.go new file mode 100644 index 0000000..4014263 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/services/age_range_view_service.pb.go @@ -0,0 +1,175 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/services/age_range_view_service.proto + +package services // import "google.golang.org/genproto/googleapis/ads/googleads/v1/services" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import resources "google.golang.org/genproto/googleapis/ads/googleads/v1/resources" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +import ( + context "golang.org/x/net/context" + grpc "google.golang.org/grpc" +) + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Request message for [AgeRangeViewService.GetAgeRangeView][google.ads.googleads.v1.services.AgeRangeViewService.GetAgeRangeView]. +type GetAgeRangeViewRequest struct { + // The resource name of the age range view to fetch. + ResourceName string `protobuf:"bytes,1,opt,name=resource_name,json=resourceName,proto3" json:"resource_name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GetAgeRangeViewRequest) Reset() { *m = GetAgeRangeViewRequest{} } +func (m *GetAgeRangeViewRequest) String() string { return proto.CompactTextString(m) } +func (*GetAgeRangeViewRequest) ProtoMessage() {} +func (*GetAgeRangeViewRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_age_range_view_service_77fc07ff34ca1f9c, []int{0} +} +func (m *GetAgeRangeViewRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GetAgeRangeViewRequest.Unmarshal(m, b) +} +func (m *GetAgeRangeViewRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GetAgeRangeViewRequest.Marshal(b, m, deterministic) +} +func (dst *GetAgeRangeViewRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_GetAgeRangeViewRequest.Merge(dst, src) +} +func (m *GetAgeRangeViewRequest) XXX_Size() int { + return xxx_messageInfo_GetAgeRangeViewRequest.Size(m) +} +func (m *GetAgeRangeViewRequest) XXX_DiscardUnknown() { + xxx_messageInfo_GetAgeRangeViewRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_GetAgeRangeViewRequest proto.InternalMessageInfo + +func (m *GetAgeRangeViewRequest) GetResourceName() string { + if m != nil { + return m.ResourceName + } + return "" +} + +func init() { + proto.RegisterType((*GetAgeRangeViewRequest)(nil), "google.ads.googleads.v1.services.GetAgeRangeViewRequest") +} + +// Reference imports to suppress errors if they are not otherwise used. +var _ context.Context +var _ grpc.ClientConn + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the grpc package it is being compiled against. +const _ = grpc.SupportPackageIsVersion4 + +// AgeRangeViewServiceClient is the client API for AgeRangeViewService service. +// +// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://godoc.org/google.golang.org/grpc#ClientConn.NewStream. +type AgeRangeViewServiceClient interface { + // Returns the requested age range view in full detail. + GetAgeRangeView(ctx context.Context, in *GetAgeRangeViewRequest, opts ...grpc.CallOption) (*resources.AgeRangeView, error) +} + +type ageRangeViewServiceClient struct { + cc *grpc.ClientConn +} + +func NewAgeRangeViewServiceClient(cc *grpc.ClientConn) AgeRangeViewServiceClient { + return &ageRangeViewServiceClient{cc} +} + +func (c *ageRangeViewServiceClient) GetAgeRangeView(ctx context.Context, in *GetAgeRangeViewRequest, opts ...grpc.CallOption) (*resources.AgeRangeView, error) { + out := new(resources.AgeRangeView) + err := c.cc.Invoke(ctx, "/google.ads.googleads.v1.services.AgeRangeViewService/GetAgeRangeView", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +// AgeRangeViewServiceServer is the server API for AgeRangeViewService service. +type AgeRangeViewServiceServer interface { + // Returns the requested age range view in full detail. + GetAgeRangeView(context.Context, *GetAgeRangeViewRequest) (*resources.AgeRangeView, error) +} + +func RegisterAgeRangeViewServiceServer(s *grpc.Server, srv AgeRangeViewServiceServer) { + s.RegisterService(&_AgeRangeViewService_serviceDesc, srv) +} + +func _AgeRangeViewService_GetAgeRangeView_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(GetAgeRangeViewRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(AgeRangeViewServiceServer).GetAgeRangeView(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.ads.googleads.v1.services.AgeRangeViewService/GetAgeRangeView", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(AgeRangeViewServiceServer).GetAgeRangeView(ctx, req.(*GetAgeRangeViewRequest)) + } + return interceptor(ctx, in, info, handler) +} + +var _AgeRangeViewService_serviceDesc = grpc.ServiceDesc{ + ServiceName: "google.ads.googleads.v1.services.AgeRangeViewService", + HandlerType: (*AgeRangeViewServiceServer)(nil), + Methods: []grpc.MethodDesc{ + { + MethodName: "GetAgeRangeView", + Handler: _AgeRangeViewService_GetAgeRangeView_Handler, + }, + }, + Streams: []grpc.StreamDesc{}, + Metadata: "google/ads/googleads/v1/services/age_range_view_service.proto", +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/services/age_range_view_service.proto", fileDescriptor_age_range_view_service_77fc07ff34ca1f9c) +} + +var fileDescriptor_age_range_view_service_77fc07ff34ca1f9c = []byte{ + // 364 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x84, 0x92, 0x3f, 0x4b, 0xc3, 0x40, + 0x18, 0xc6, 0x49, 0x04, 0xc1, 0xa0, 0x08, 0x11, 0xa4, 0x14, 0x87, 0x52, 0x3b, 0x48, 0x87, 0x3b, + 0xa2, 0xa0, 0x72, 0xd2, 0x21, 0x5d, 0xea, 0x24, 0xa5, 0x42, 0x06, 0x09, 0x84, 0xb3, 0x79, 0x39, + 0x02, 0x4d, 0xae, 0xde, 0x9b, 0xa6, 0x83, 0xb8, 0xf8, 0x15, 0xfc, 0x06, 0x8e, 0xee, 0x7e, 0x09, + 0xc1, 0xc9, 0xaf, 0xe0, 0xe4, 0x97, 0x50, 0xd2, 0xeb, 0x85, 0xaa, 0x2d, 0xdd, 0x1e, 0xde, 0x3c, + 0xbf, 0xf7, 0xcf, 0x93, 0x73, 0x3a, 0x42, 0x4a, 0x31, 0x02, 0xca, 0x63, 0xa4, 0x5a, 0x96, 0xaa, + 0xf0, 0x28, 0x82, 0x2a, 0x92, 0x21, 0x20, 0xe5, 0x02, 0x22, 0xc5, 0x33, 0x01, 0x51, 0x91, 0xc0, + 0x34, 0x9a, 0xd7, 0xc9, 0x58, 0xc9, 0x5c, 0xba, 0x0d, 0xcd, 0x10, 0x1e, 0x23, 0xa9, 0x70, 0x52, + 0x78, 0xc4, 0xe0, 0xf5, 0xd3, 0x55, 0x03, 0x14, 0xa0, 0x9c, 0xa8, 0xff, 0x13, 0x74, 0xe7, 0xfa, + 0x81, 0xe1, 0xc6, 0x09, 0xe5, 0x59, 0x26, 0x73, 0x9e, 0x27, 0x32, 0x43, 0xfd, 0xb5, 0xd9, 0x71, + 0xf6, 0x7b, 0x90, 0xfb, 0x02, 0x06, 0x25, 0x17, 0x24, 0x30, 0x1d, 0xc0, 0xdd, 0x04, 0x30, 0x77, + 0x0f, 0x9d, 0x1d, 0xd3, 0x39, 0xca, 0x78, 0x0a, 0x35, 0xab, 0x61, 0x1d, 0x6d, 0x0d, 0xb6, 0x4d, + 0xf1, 0x8a, 0xa7, 0x70, 0xfc, 0x6e, 0x39, 0x7b, 0x8b, 0xf0, 0xb5, 0xde, 0xd6, 0x7d, 0xb5, 0x9c, + 0xdd, 0x3f, 0x7d, 0xdd, 0x73, 0xb2, 0xee, 0x46, 0xb2, 0x7c, 0x95, 0x3a, 0x5d, 0x49, 0x56, 0xb7, + 0x93, 0x45, 0xae, 0x79, 0xf6, 0xf8, 0xf1, 0xf9, 0x64, 0x7b, 0x2e, 0x2d, 0xf3, 0xb9, 0xff, 0x75, + 0x46, 0x67, 0x38, 0xc1, 0x5c, 0xa6, 0xa0, 0x90, 0xb6, 0xcb, 0xc0, 0x2a, 0x08, 0x69, 0xfb, 0xa1, + 0xfb, 0x6d, 0x39, 0xad, 0xa1, 0x4c, 0xd7, 0x6e, 0xda, 0xad, 0x2d, 0xb9, 0xba, 0x5f, 0x26, 0xda, + 0xb7, 0x6e, 0x2e, 0xe7, 0xb4, 0x90, 0x23, 0x9e, 0x09, 0x22, 0x95, 0xa0, 0x02, 0xb2, 0x59, 0xde, + 0xe6, 0xcf, 0x8d, 0x13, 0x5c, 0xfd, 0x52, 0x2e, 0x8c, 0x78, 0xb6, 0x37, 0x7a, 0xbe, 0xff, 0x62, + 0x37, 0x7a, 0xba, 0xa1, 0x1f, 0x23, 0xd1, 0xb2, 0x54, 0x81, 0x47, 0xe6, 0x83, 0xf1, 0xcd, 0x58, + 0x42, 0x3f, 0xc6, 0xb0, 0xb2, 0x84, 0x81, 0x17, 0x1a, 0xcb, 0x97, 0xdd, 0xd2, 0x75, 0xc6, 0xfc, + 0x18, 0x19, 0xab, 0x4c, 0x8c, 0x05, 0x1e, 0x63, 0xc6, 0x76, 0xbb, 0x39, 0xdb, 0xf3, 0xe4, 0x27, + 0x00, 0x00, 0xff, 0xff, 0x02, 0x12, 0xf3, 0x69, 0xd0, 0x02, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/services/asset_service.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/services/asset_service.pb.go new file mode 100644 index 0000000..51aaf4f --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/services/asset_service.pb.go @@ -0,0 +1,468 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/services/asset_service.proto + +package services // import "google.golang.org/genproto/googleapis/ads/googleads/v1/services" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import resources "google.golang.org/genproto/googleapis/ads/googleads/v1/resources" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +import ( + context "golang.org/x/net/context" + grpc "google.golang.org/grpc" +) + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Request message for [AssetService.GetAsset][google.ads.googleads.v1.services.AssetService.GetAsset] +type GetAssetRequest struct { + // The resource name of the asset to fetch. + ResourceName string `protobuf:"bytes,1,opt,name=resource_name,json=resourceName,proto3" json:"resource_name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GetAssetRequest) Reset() { *m = GetAssetRequest{} } +func (m *GetAssetRequest) String() string { return proto.CompactTextString(m) } +func (*GetAssetRequest) ProtoMessage() {} +func (*GetAssetRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_asset_service_f6d94be756c262a6, []int{0} +} +func (m *GetAssetRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GetAssetRequest.Unmarshal(m, b) +} +func (m *GetAssetRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GetAssetRequest.Marshal(b, m, deterministic) +} +func (dst *GetAssetRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_GetAssetRequest.Merge(dst, src) +} +func (m *GetAssetRequest) XXX_Size() int { + return xxx_messageInfo_GetAssetRequest.Size(m) +} +func (m *GetAssetRequest) XXX_DiscardUnknown() { + xxx_messageInfo_GetAssetRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_GetAssetRequest proto.InternalMessageInfo + +func (m *GetAssetRequest) GetResourceName() string { + if m != nil { + return m.ResourceName + } + return "" +} + +// Request message for [AssetService.MutateAssets][google.ads.googleads.v1.services.AssetService.MutateAssets] +type MutateAssetsRequest struct { + // The ID of the customer whose assets are being modified. + CustomerId string `protobuf:"bytes,1,opt,name=customer_id,json=customerId,proto3" json:"customer_id,omitempty"` + // The list of operations to perform on individual assets. + Operations []*AssetOperation `protobuf:"bytes,2,rep,name=operations,proto3" json:"operations,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *MutateAssetsRequest) Reset() { *m = MutateAssetsRequest{} } +func (m *MutateAssetsRequest) String() string { return proto.CompactTextString(m) } +func (*MutateAssetsRequest) ProtoMessage() {} +func (*MutateAssetsRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_asset_service_f6d94be756c262a6, []int{1} +} +func (m *MutateAssetsRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_MutateAssetsRequest.Unmarshal(m, b) +} +func (m *MutateAssetsRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_MutateAssetsRequest.Marshal(b, m, deterministic) +} +func (dst *MutateAssetsRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_MutateAssetsRequest.Merge(dst, src) +} +func (m *MutateAssetsRequest) XXX_Size() int { + return xxx_messageInfo_MutateAssetsRequest.Size(m) +} +func (m *MutateAssetsRequest) XXX_DiscardUnknown() { + xxx_messageInfo_MutateAssetsRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_MutateAssetsRequest proto.InternalMessageInfo + +func (m *MutateAssetsRequest) GetCustomerId() string { + if m != nil { + return m.CustomerId + } + return "" +} + +func (m *MutateAssetsRequest) GetOperations() []*AssetOperation { + if m != nil { + return m.Operations + } + return nil +} + +// A single operation to create an asset. +type AssetOperation struct { + // The mutate operation. + // + // Types that are valid to be assigned to Operation: + // *AssetOperation_Create + Operation isAssetOperation_Operation `protobuf_oneof:"operation"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *AssetOperation) Reset() { *m = AssetOperation{} } +func (m *AssetOperation) String() string { return proto.CompactTextString(m) } +func (*AssetOperation) ProtoMessage() {} +func (*AssetOperation) Descriptor() ([]byte, []int) { + return fileDescriptor_asset_service_f6d94be756c262a6, []int{2} +} +func (m *AssetOperation) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_AssetOperation.Unmarshal(m, b) +} +func (m *AssetOperation) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_AssetOperation.Marshal(b, m, deterministic) +} +func (dst *AssetOperation) XXX_Merge(src proto.Message) { + xxx_messageInfo_AssetOperation.Merge(dst, src) +} +func (m *AssetOperation) XXX_Size() int { + return xxx_messageInfo_AssetOperation.Size(m) +} +func (m *AssetOperation) XXX_DiscardUnknown() { + xxx_messageInfo_AssetOperation.DiscardUnknown(m) +} + +var xxx_messageInfo_AssetOperation proto.InternalMessageInfo + +type isAssetOperation_Operation interface { + isAssetOperation_Operation() +} + +type AssetOperation_Create struct { + Create *resources.Asset `protobuf:"bytes,1,opt,name=create,proto3,oneof"` +} + +func (*AssetOperation_Create) isAssetOperation_Operation() {} + +func (m *AssetOperation) GetOperation() isAssetOperation_Operation { + if m != nil { + return m.Operation + } + return nil +} + +func (m *AssetOperation) GetCreate() *resources.Asset { + if x, ok := m.GetOperation().(*AssetOperation_Create); ok { + return x.Create + } + return nil +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*AssetOperation) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _AssetOperation_OneofMarshaler, _AssetOperation_OneofUnmarshaler, _AssetOperation_OneofSizer, []interface{}{ + (*AssetOperation_Create)(nil), + } +} + +func _AssetOperation_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*AssetOperation) + // operation + switch x := m.Operation.(type) { + case *AssetOperation_Create: + b.EncodeVarint(1<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.Create); err != nil { + return err + } + case nil: + default: + return fmt.Errorf("AssetOperation.Operation has unexpected type %T", x) + } + return nil +} + +func _AssetOperation_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*AssetOperation) + switch tag { + case 1: // operation.create + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(resources.Asset) + err := b.DecodeMessage(msg) + m.Operation = &AssetOperation_Create{msg} + return true, err + default: + return false, nil + } +} + +func _AssetOperation_OneofSizer(msg proto.Message) (n int) { + m := msg.(*AssetOperation) + // operation + switch x := m.Operation.(type) { + case *AssetOperation_Create: + s := proto.Size(x.Create) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +// Response message for an asset mutate. +type MutateAssetsResponse struct { + // All results for the mutate. + Results []*MutateAssetResult `protobuf:"bytes,2,rep,name=results,proto3" json:"results,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *MutateAssetsResponse) Reset() { *m = MutateAssetsResponse{} } +func (m *MutateAssetsResponse) String() string { return proto.CompactTextString(m) } +func (*MutateAssetsResponse) ProtoMessage() {} +func (*MutateAssetsResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_asset_service_f6d94be756c262a6, []int{3} +} +func (m *MutateAssetsResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_MutateAssetsResponse.Unmarshal(m, b) +} +func (m *MutateAssetsResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_MutateAssetsResponse.Marshal(b, m, deterministic) +} +func (dst *MutateAssetsResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_MutateAssetsResponse.Merge(dst, src) +} +func (m *MutateAssetsResponse) XXX_Size() int { + return xxx_messageInfo_MutateAssetsResponse.Size(m) +} +func (m *MutateAssetsResponse) XXX_DiscardUnknown() { + xxx_messageInfo_MutateAssetsResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_MutateAssetsResponse proto.InternalMessageInfo + +func (m *MutateAssetsResponse) GetResults() []*MutateAssetResult { + if m != nil { + return m.Results + } + return nil +} + +// The result for the asset mutate. +type MutateAssetResult struct { + // The resource name returned for successful operations. + ResourceName string `protobuf:"bytes,1,opt,name=resource_name,json=resourceName,proto3" json:"resource_name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *MutateAssetResult) Reset() { *m = MutateAssetResult{} } +func (m *MutateAssetResult) String() string { return proto.CompactTextString(m) } +func (*MutateAssetResult) ProtoMessage() {} +func (*MutateAssetResult) Descriptor() ([]byte, []int) { + return fileDescriptor_asset_service_f6d94be756c262a6, []int{4} +} +func (m *MutateAssetResult) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_MutateAssetResult.Unmarshal(m, b) +} +func (m *MutateAssetResult) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_MutateAssetResult.Marshal(b, m, deterministic) +} +func (dst *MutateAssetResult) XXX_Merge(src proto.Message) { + xxx_messageInfo_MutateAssetResult.Merge(dst, src) +} +func (m *MutateAssetResult) XXX_Size() int { + return xxx_messageInfo_MutateAssetResult.Size(m) +} +func (m *MutateAssetResult) XXX_DiscardUnknown() { + xxx_messageInfo_MutateAssetResult.DiscardUnknown(m) +} + +var xxx_messageInfo_MutateAssetResult proto.InternalMessageInfo + +func (m *MutateAssetResult) GetResourceName() string { + if m != nil { + return m.ResourceName + } + return "" +} + +func init() { + proto.RegisterType((*GetAssetRequest)(nil), "google.ads.googleads.v1.services.GetAssetRequest") + proto.RegisterType((*MutateAssetsRequest)(nil), "google.ads.googleads.v1.services.MutateAssetsRequest") + proto.RegisterType((*AssetOperation)(nil), "google.ads.googleads.v1.services.AssetOperation") + proto.RegisterType((*MutateAssetsResponse)(nil), "google.ads.googleads.v1.services.MutateAssetsResponse") + proto.RegisterType((*MutateAssetResult)(nil), "google.ads.googleads.v1.services.MutateAssetResult") +} + +// Reference imports to suppress errors if they are not otherwise used. +var _ context.Context +var _ grpc.ClientConn + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the grpc package it is being compiled against. +const _ = grpc.SupportPackageIsVersion4 + +// AssetServiceClient is the client API for AssetService service. +// +// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://godoc.org/google.golang.org/grpc#ClientConn.NewStream. +type AssetServiceClient interface { + // Returns the requested asset in full detail. + GetAsset(ctx context.Context, in *GetAssetRequest, opts ...grpc.CallOption) (*resources.Asset, error) + // Creates assets. Operation statuses are returned. + MutateAssets(ctx context.Context, in *MutateAssetsRequest, opts ...grpc.CallOption) (*MutateAssetsResponse, error) +} + +type assetServiceClient struct { + cc *grpc.ClientConn +} + +func NewAssetServiceClient(cc *grpc.ClientConn) AssetServiceClient { + return &assetServiceClient{cc} +} + +func (c *assetServiceClient) GetAsset(ctx context.Context, in *GetAssetRequest, opts ...grpc.CallOption) (*resources.Asset, error) { + out := new(resources.Asset) + err := c.cc.Invoke(ctx, "/google.ads.googleads.v1.services.AssetService/GetAsset", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *assetServiceClient) MutateAssets(ctx context.Context, in *MutateAssetsRequest, opts ...grpc.CallOption) (*MutateAssetsResponse, error) { + out := new(MutateAssetsResponse) + err := c.cc.Invoke(ctx, "/google.ads.googleads.v1.services.AssetService/MutateAssets", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +// AssetServiceServer is the server API for AssetService service. +type AssetServiceServer interface { + // Returns the requested asset in full detail. + GetAsset(context.Context, *GetAssetRequest) (*resources.Asset, error) + // Creates assets. Operation statuses are returned. + MutateAssets(context.Context, *MutateAssetsRequest) (*MutateAssetsResponse, error) +} + +func RegisterAssetServiceServer(s *grpc.Server, srv AssetServiceServer) { + s.RegisterService(&_AssetService_serviceDesc, srv) +} + +func _AssetService_GetAsset_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(GetAssetRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(AssetServiceServer).GetAsset(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.ads.googleads.v1.services.AssetService/GetAsset", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(AssetServiceServer).GetAsset(ctx, req.(*GetAssetRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _AssetService_MutateAssets_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(MutateAssetsRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(AssetServiceServer).MutateAssets(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.ads.googleads.v1.services.AssetService/MutateAssets", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(AssetServiceServer).MutateAssets(ctx, req.(*MutateAssetsRequest)) + } + return interceptor(ctx, in, info, handler) +} + +var _AssetService_serviceDesc = grpc.ServiceDesc{ + ServiceName: "google.ads.googleads.v1.services.AssetService", + HandlerType: (*AssetServiceServer)(nil), + Methods: []grpc.MethodDesc{ + { + MethodName: "GetAsset", + Handler: _AssetService_GetAsset_Handler, + }, + { + MethodName: "MutateAssets", + Handler: _AssetService_MutateAssets_Handler, + }, + }, + Streams: []grpc.StreamDesc{}, + Metadata: "google/ads/googleads/v1/services/asset_service.proto", +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/services/asset_service.proto", fileDescriptor_asset_service_f6d94be756c262a6) +} + +var fileDescriptor_asset_service_f6d94be756c262a6 = []byte{ + // 520 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x94, 0x54, 0xbf, 0x6f, 0xd3, 0x40, + 0x14, 0xc6, 0xae, 0x54, 0xe8, 0x25, 0x80, 0x7a, 0x30, 0x54, 0x11, 0x12, 0x91, 0xe9, 0x10, 0x19, + 0x71, 0x8e, 0x53, 0x88, 0xd0, 0xa1, 0x0e, 0xce, 0x92, 0x32, 0x14, 0x22, 0x23, 0x65, 0x40, 0x91, + 0xa2, 0x23, 0x3e, 0x59, 0x96, 0x62, 0x9f, 0xf1, 0x3b, 0x67, 0xa9, 0xba, 0x30, 0xb1, 0x33, 0xb1, + 0x32, 0xb2, 0xf3, 0x4f, 0xb0, 0xf2, 0x1f, 0x20, 0x26, 0xfe, 0x02, 0x46, 0x64, 0x9f, 0xcf, 0x38, + 0x54, 0x51, 0x9a, 0xed, 0xf9, 0xee, 0xfb, 0xbe, 0xf7, 0xbd, 0x1f, 0x67, 0xf4, 0x34, 0x14, 0x22, + 0x5c, 0x72, 0x87, 0x05, 0xe0, 0xa8, 0xb0, 0x88, 0x56, 0xae, 0x03, 0x3c, 0x5b, 0x45, 0x0b, 0x0e, + 0x0e, 0x03, 0xe0, 0x72, 0x5e, 0x7d, 0x92, 0x34, 0x13, 0x52, 0xe0, 0xae, 0x82, 0x12, 0x16, 0x00, + 0xa9, 0x59, 0x64, 0xe5, 0x12, 0xcd, 0xea, 0x3c, 0xd9, 0xa4, 0x9b, 0x71, 0x10, 0x79, 0x56, 0x0b, + 0x2b, 0xc1, 0xce, 0x03, 0x0d, 0x4f, 0x23, 0x87, 0x25, 0x89, 0x90, 0x4c, 0x46, 0x22, 0x01, 0x75, + 0x6b, 0x0d, 0xd1, 0xdd, 0x31, 0x97, 0x5e, 0x81, 0xf7, 0xf9, 0xfb, 0x9c, 0x83, 0xc4, 0x8f, 0xd0, + 0x6d, 0xad, 0x34, 0x4f, 0x58, 0xcc, 0x8f, 0x8c, 0xae, 0xd1, 0x3b, 0xf0, 0xdb, 0xfa, 0xf0, 0x15, + 0x8b, 0xb9, 0xf5, 0xd1, 0x40, 0xf7, 0xce, 0x73, 0xc9, 0x24, 0x2f, 0xb9, 0xa0, 0xc9, 0x0f, 0x51, + 0x6b, 0x91, 0x83, 0x14, 0x31, 0xcf, 0xe6, 0x51, 0x50, 0x51, 0x91, 0x3e, 0x7a, 0x19, 0xe0, 0x09, + 0x42, 0x22, 0xe5, 0x99, 0x32, 0x71, 0x64, 0x76, 0xf7, 0x7a, 0xad, 0x41, 0x9f, 0x6c, 0x2b, 0x9a, + 0x94, 0x59, 0x5e, 0x6b, 0xa2, 0xdf, 0xd0, 0xb0, 0x18, 0xba, 0xb3, 0x7e, 0x8b, 0x47, 0x68, 0x7f, + 0x91, 0x71, 0x26, 0x95, 0xf5, 0xd6, 0xa0, 0xb7, 0x51, 0xbf, 0x6e, 0x99, 0x4a, 0x70, 0x76, 0xc3, + 0xaf, 0x98, 0xa3, 0x16, 0x3a, 0xa8, 0x73, 0x58, 0x1c, 0xdd, 0x5f, 0x2f, 0x16, 0x52, 0x91, 0x00, + 0xc7, 0xe7, 0xe8, 0x66, 0xc6, 0x21, 0x5f, 0x4a, 0x5d, 0xc9, 0xc9, 0xf6, 0x4a, 0x1a, 0x42, 0x7e, + 0xc9, 0xf5, 0xb5, 0x86, 0xf5, 0x1c, 0x1d, 0x5e, 0xb9, 0xbd, 0xd6, 0x38, 0x06, 0x3f, 0x4d, 0xd4, + 0x2e, 0x49, 0x6f, 0x54, 0x1a, 0xfc, 0xd9, 0x40, 0xb7, 0xf4, 0x60, 0xb1, 0xbb, 0xdd, 0xd5, 0x7f, + 0x4b, 0xd0, 0xb9, 0x76, 0xcb, 0xac, 0xfe, 0x87, 0x1f, 0xbf, 0x3e, 0x99, 0x36, 0xee, 0x15, 0x2b, + 0x78, 0xb1, 0x66, 0xf5, 0x54, 0xcf, 0x1d, 0x1c, 0x5b, 0xed, 0x24, 0x38, 0xf6, 0x25, 0xfe, 0x66, + 0xa0, 0x76, 0xb3, 0x9d, 0xf8, 0xd9, 0x4e, 0x5d, 0xd3, 0xbb, 0xd6, 0x19, 0xee, 0x4a, 0x53, 0x53, + 0xb3, 0x86, 0xa5, 0xe3, 0xbe, 0xf5, 0xb8, 0x70, 0xfc, 0xcf, 0xe2, 0x45, 0x63, 0x71, 0x4f, 0xed, + 0xcb, 0xca, 0x30, 0x8d, 0x4b, 0x09, 0x6a, 0xd8, 0xa3, 0x3f, 0x06, 0x3a, 0x5e, 0x88, 0x78, 0x6b, + 0xd6, 0xd1, 0x61, 0x73, 0x14, 0x93, 0xe2, 0x9d, 0x4d, 0x8c, 0xb7, 0x67, 0x15, 0x2d, 0x14, 0x4b, + 0x96, 0x84, 0x44, 0x64, 0xa1, 0x13, 0xf2, 0xa4, 0x7c, 0x85, 0xfa, 0x19, 0xa7, 0x11, 0x6c, 0xfe, + 0x5b, 0xbc, 0xd0, 0xc1, 0x17, 0x73, 0x6f, 0xec, 0x79, 0x5f, 0xcd, 0xee, 0x58, 0x09, 0x7a, 0x01, + 0x10, 0x15, 0x16, 0xd1, 0xd4, 0x25, 0x55, 0x62, 0xf8, 0xae, 0x21, 0x33, 0x2f, 0x80, 0x59, 0x0d, + 0x99, 0x4d, 0xdd, 0x99, 0x86, 0xfc, 0x36, 0x8f, 0xd5, 0x39, 0xa5, 0x5e, 0x00, 0x94, 0xd6, 0x20, + 0x4a, 0xa7, 0x2e, 0xa5, 0x1a, 0xf6, 0x6e, 0xbf, 0xf4, 0x79, 0xf2, 0x37, 0x00, 0x00, 0xff, 0xff, + 0xc0, 0xa5, 0x7e, 0xbf, 0xd4, 0x04, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/services/bidding_strategy_service.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/services/bidding_strategy_service.pb.go new file mode 100644 index 0000000..800d21f --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/services/bidding_strategy_service.pb.go @@ -0,0 +1,591 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/services/bidding_strategy_service.proto + +package services // import "google.golang.org/genproto/googleapis/ads/googleads/v1/services" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "github.com/golang/protobuf/ptypes/wrappers" +import resources "google.golang.org/genproto/googleapis/ads/googleads/v1/resources" +import _ "google.golang.org/genproto/googleapis/api/annotations" +import status "google.golang.org/genproto/googleapis/rpc/status" +import field_mask "google.golang.org/genproto/protobuf/field_mask" + +import ( + context "golang.org/x/net/context" + grpc "google.golang.org/grpc" +) + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Request message for [BiddingStrategyService.GetBiddingStrategy][google.ads.googleads.v1.services.BiddingStrategyService.GetBiddingStrategy]. +type GetBiddingStrategyRequest struct { + // The resource name of the bidding strategy to fetch. + ResourceName string `protobuf:"bytes,1,opt,name=resource_name,json=resourceName,proto3" json:"resource_name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GetBiddingStrategyRequest) Reset() { *m = GetBiddingStrategyRequest{} } +func (m *GetBiddingStrategyRequest) String() string { return proto.CompactTextString(m) } +func (*GetBiddingStrategyRequest) ProtoMessage() {} +func (*GetBiddingStrategyRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_bidding_strategy_service_98c343afe8e94e27, []int{0} +} +func (m *GetBiddingStrategyRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GetBiddingStrategyRequest.Unmarshal(m, b) +} +func (m *GetBiddingStrategyRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GetBiddingStrategyRequest.Marshal(b, m, deterministic) +} +func (dst *GetBiddingStrategyRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_GetBiddingStrategyRequest.Merge(dst, src) +} +func (m *GetBiddingStrategyRequest) XXX_Size() int { + return xxx_messageInfo_GetBiddingStrategyRequest.Size(m) +} +func (m *GetBiddingStrategyRequest) XXX_DiscardUnknown() { + xxx_messageInfo_GetBiddingStrategyRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_GetBiddingStrategyRequest proto.InternalMessageInfo + +func (m *GetBiddingStrategyRequest) GetResourceName() string { + if m != nil { + return m.ResourceName + } + return "" +} + +// Request message for [BiddingStrategyService.MutateBiddingStrategies][google.ads.googleads.v1.services.BiddingStrategyService.MutateBiddingStrategies]. +type MutateBiddingStrategiesRequest struct { + // The ID of the customer whose bidding strategies are being modified. + CustomerId string `protobuf:"bytes,1,opt,name=customer_id,json=customerId,proto3" json:"customer_id,omitempty"` + // The list of operations to perform on individual bidding strategies. + Operations []*BiddingStrategyOperation `protobuf:"bytes,2,rep,name=operations,proto3" json:"operations,omitempty"` + // If true, successful operations will be carried out and invalid + // operations will return errors. If false, all operations will be carried + // out in one transaction if and only if they are all valid. + // Default is false. + PartialFailure bool `protobuf:"varint,3,opt,name=partial_failure,json=partialFailure,proto3" json:"partial_failure,omitempty"` + // If true, the request is validated but not executed. Only errors are + // returned, not results. + ValidateOnly bool `protobuf:"varint,4,opt,name=validate_only,json=validateOnly,proto3" json:"validate_only,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *MutateBiddingStrategiesRequest) Reset() { *m = MutateBiddingStrategiesRequest{} } +func (m *MutateBiddingStrategiesRequest) String() string { return proto.CompactTextString(m) } +func (*MutateBiddingStrategiesRequest) ProtoMessage() {} +func (*MutateBiddingStrategiesRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_bidding_strategy_service_98c343afe8e94e27, []int{1} +} +func (m *MutateBiddingStrategiesRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_MutateBiddingStrategiesRequest.Unmarshal(m, b) +} +func (m *MutateBiddingStrategiesRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_MutateBiddingStrategiesRequest.Marshal(b, m, deterministic) +} +func (dst *MutateBiddingStrategiesRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_MutateBiddingStrategiesRequest.Merge(dst, src) +} +func (m *MutateBiddingStrategiesRequest) XXX_Size() int { + return xxx_messageInfo_MutateBiddingStrategiesRequest.Size(m) +} +func (m *MutateBiddingStrategiesRequest) XXX_DiscardUnknown() { + xxx_messageInfo_MutateBiddingStrategiesRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_MutateBiddingStrategiesRequest proto.InternalMessageInfo + +func (m *MutateBiddingStrategiesRequest) GetCustomerId() string { + if m != nil { + return m.CustomerId + } + return "" +} + +func (m *MutateBiddingStrategiesRequest) GetOperations() []*BiddingStrategyOperation { + if m != nil { + return m.Operations + } + return nil +} + +func (m *MutateBiddingStrategiesRequest) GetPartialFailure() bool { + if m != nil { + return m.PartialFailure + } + return false +} + +func (m *MutateBiddingStrategiesRequest) GetValidateOnly() bool { + if m != nil { + return m.ValidateOnly + } + return false +} + +// A single operation (create, update, remove) on a bidding strategy. +type BiddingStrategyOperation struct { + // FieldMask that determines which resource fields are modified in an update. + UpdateMask *field_mask.FieldMask `protobuf:"bytes,4,opt,name=update_mask,json=updateMask,proto3" json:"update_mask,omitempty"` + // The mutate operation. + // + // Types that are valid to be assigned to Operation: + // *BiddingStrategyOperation_Create + // *BiddingStrategyOperation_Update + // *BiddingStrategyOperation_Remove + Operation isBiddingStrategyOperation_Operation `protobuf_oneof:"operation"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *BiddingStrategyOperation) Reset() { *m = BiddingStrategyOperation{} } +func (m *BiddingStrategyOperation) String() string { return proto.CompactTextString(m) } +func (*BiddingStrategyOperation) ProtoMessage() {} +func (*BiddingStrategyOperation) Descriptor() ([]byte, []int) { + return fileDescriptor_bidding_strategy_service_98c343afe8e94e27, []int{2} +} +func (m *BiddingStrategyOperation) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_BiddingStrategyOperation.Unmarshal(m, b) +} +func (m *BiddingStrategyOperation) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_BiddingStrategyOperation.Marshal(b, m, deterministic) +} +func (dst *BiddingStrategyOperation) XXX_Merge(src proto.Message) { + xxx_messageInfo_BiddingStrategyOperation.Merge(dst, src) +} +func (m *BiddingStrategyOperation) XXX_Size() int { + return xxx_messageInfo_BiddingStrategyOperation.Size(m) +} +func (m *BiddingStrategyOperation) XXX_DiscardUnknown() { + xxx_messageInfo_BiddingStrategyOperation.DiscardUnknown(m) +} + +var xxx_messageInfo_BiddingStrategyOperation proto.InternalMessageInfo + +func (m *BiddingStrategyOperation) GetUpdateMask() *field_mask.FieldMask { + if m != nil { + return m.UpdateMask + } + return nil +} + +type isBiddingStrategyOperation_Operation interface { + isBiddingStrategyOperation_Operation() +} + +type BiddingStrategyOperation_Create struct { + Create *resources.BiddingStrategy `protobuf:"bytes,1,opt,name=create,proto3,oneof"` +} + +type BiddingStrategyOperation_Update struct { + Update *resources.BiddingStrategy `protobuf:"bytes,2,opt,name=update,proto3,oneof"` +} + +type BiddingStrategyOperation_Remove struct { + Remove string `protobuf:"bytes,3,opt,name=remove,proto3,oneof"` +} + +func (*BiddingStrategyOperation_Create) isBiddingStrategyOperation_Operation() {} + +func (*BiddingStrategyOperation_Update) isBiddingStrategyOperation_Operation() {} + +func (*BiddingStrategyOperation_Remove) isBiddingStrategyOperation_Operation() {} + +func (m *BiddingStrategyOperation) GetOperation() isBiddingStrategyOperation_Operation { + if m != nil { + return m.Operation + } + return nil +} + +func (m *BiddingStrategyOperation) GetCreate() *resources.BiddingStrategy { + if x, ok := m.GetOperation().(*BiddingStrategyOperation_Create); ok { + return x.Create + } + return nil +} + +func (m *BiddingStrategyOperation) GetUpdate() *resources.BiddingStrategy { + if x, ok := m.GetOperation().(*BiddingStrategyOperation_Update); ok { + return x.Update + } + return nil +} + +func (m *BiddingStrategyOperation) GetRemove() string { + if x, ok := m.GetOperation().(*BiddingStrategyOperation_Remove); ok { + return x.Remove + } + return "" +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*BiddingStrategyOperation) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _BiddingStrategyOperation_OneofMarshaler, _BiddingStrategyOperation_OneofUnmarshaler, _BiddingStrategyOperation_OneofSizer, []interface{}{ + (*BiddingStrategyOperation_Create)(nil), + (*BiddingStrategyOperation_Update)(nil), + (*BiddingStrategyOperation_Remove)(nil), + } +} + +func _BiddingStrategyOperation_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*BiddingStrategyOperation) + // operation + switch x := m.Operation.(type) { + case *BiddingStrategyOperation_Create: + b.EncodeVarint(1<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.Create); err != nil { + return err + } + case *BiddingStrategyOperation_Update: + b.EncodeVarint(2<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.Update); err != nil { + return err + } + case *BiddingStrategyOperation_Remove: + b.EncodeVarint(3<<3 | proto.WireBytes) + b.EncodeStringBytes(x.Remove) + case nil: + default: + return fmt.Errorf("BiddingStrategyOperation.Operation has unexpected type %T", x) + } + return nil +} + +func _BiddingStrategyOperation_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*BiddingStrategyOperation) + switch tag { + case 1: // operation.create + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(resources.BiddingStrategy) + err := b.DecodeMessage(msg) + m.Operation = &BiddingStrategyOperation_Create{msg} + return true, err + case 2: // operation.update + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(resources.BiddingStrategy) + err := b.DecodeMessage(msg) + m.Operation = &BiddingStrategyOperation_Update{msg} + return true, err + case 3: // operation.remove + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeStringBytes() + m.Operation = &BiddingStrategyOperation_Remove{x} + return true, err + default: + return false, nil + } +} + +func _BiddingStrategyOperation_OneofSizer(msg proto.Message) (n int) { + m := msg.(*BiddingStrategyOperation) + // operation + switch x := m.Operation.(type) { + case *BiddingStrategyOperation_Create: + s := proto.Size(x.Create) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *BiddingStrategyOperation_Update: + s := proto.Size(x.Update) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *BiddingStrategyOperation_Remove: + n += 1 // tag and wire + n += proto.SizeVarint(uint64(len(x.Remove))) + n += len(x.Remove) + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +// Response message for bidding strategy mutate. +type MutateBiddingStrategiesResponse struct { + // Errors that pertain to operation failures in the partial failure mode. + // Returned only when partial_failure = true and all errors occur inside the + // operations. If any errors occur outside the operations (e.g. auth errors), + // we return an RPC level error. + PartialFailureError *status.Status `protobuf:"bytes,3,opt,name=partial_failure_error,json=partialFailureError,proto3" json:"partial_failure_error,omitempty"` + // All results for the mutate. + Results []*MutateBiddingStrategyResult `protobuf:"bytes,2,rep,name=results,proto3" json:"results,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *MutateBiddingStrategiesResponse) Reset() { *m = MutateBiddingStrategiesResponse{} } +func (m *MutateBiddingStrategiesResponse) String() string { return proto.CompactTextString(m) } +func (*MutateBiddingStrategiesResponse) ProtoMessage() {} +func (*MutateBiddingStrategiesResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_bidding_strategy_service_98c343afe8e94e27, []int{3} +} +func (m *MutateBiddingStrategiesResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_MutateBiddingStrategiesResponse.Unmarshal(m, b) +} +func (m *MutateBiddingStrategiesResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_MutateBiddingStrategiesResponse.Marshal(b, m, deterministic) +} +func (dst *MutateBiddingStrategiesResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_MutateBiddingStrategiesResponse.Merge(dst, src) +} +func (m *MutateBiddingStrategiesResponse) XXX_Size() int { + return xxx_messageInfo_MutateBiddingStrategiesResponse.Size(m) +} +func (m *MutateBiddingStrategiesResponse) XXX_DiscardUnknown() { + xxx_messageInfo_MutateBiddingStrategiesResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_MutateBiddingStrategiesResponse proto.InternalMessageInfo + +func (m *MutateBiddingStrategiesResponse) GetPartialFailureError() *status.Status { + if m != nil { + return m.PartialFailureError + } + return nil +} + +func (m *MutateBiddingStrategiesResponse) GetResults() []*MutateBiddingStrategyResult { + if m != nil { + return m.Results + } + return nil +} + +// The result for the bidding strategy mutate. +type MutateBiddingStrategyResult struct { + // Returned for successful operations. + ResourceName string `protobuf:"bytes,1,opt,name=resource_name,json=resourceName,proto3" json:"resource_name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *MutateBiddingStrategyResult) Reset() { *m = MutateBiddingStrategyResult{} } +func (m *MutateBiddingStrategyResult) String() string { return proto.CompactTextString(m) } +func (*MutateBiddingStrategyResult) ProtoMessage() {} +func (*MutateBiddingStrategyResult) Descriptor() ([]byte, []int) { + return fileDescriptor_bidding_strategy_service_98c343afe8e94e27, []int{4} +} +func (m *MutateBiddingStrategyResult) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_MutateBiddingStrategyResult.Unmarshal(m, b) +} +func (m *MutateBiddingStrategyResult) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_MutateBiddingStrategyResult.Marshal(b, m, deterministic) +} +func (dst *MutateBiddingStrategyResult) XXX_Merge(src proto.Message) { + xxx_messageInfo_MutateBiddingStrategyResult.Merge(dst, src) +} +func (m *MutateBiddingStrategyResult) XXX_Size() int { + return xxx_messageInfo_MutateBiddingStrategyResult.Size(m) +} +func (m *MutateBiddingStrategyResult) XXX_DiscardUnknown() { + xxx_messageInfo_MutateBiddingStrategyResult.DiscardUnknown(m) +} + +var xxx_messageInfo_MutateBiddingStrategyResult proto.InternalMessageInfo + +func (m *MutateBiddingStrategyResult) GetResourceName() string { + if m != nil { + return m.ResourceName + } + return "" +} + +func init() { + proto.RegisterType((*GetBiddingStrategyRequest)(nil), "google.ads.googleads.v1.services.GetBiddingStrategyRequest") + proto.RegisterType((*MutateBiddingStrategiesRequest)(nil), "google.ads.googleads.v1.services.MutateBiddingStrategiesRequest") + proto.RegisterType((*BiddingStrategyOperation)(nil), "google.ads.googleads.v1.services.BiddingStrategyOperation") + proto.RegisterType((*MutateBiddingStrategiesResponse)(nil), "google.ads.googleads.v1.services.MutateBiddingStrategiesResponse") + proto.RegisterType((*MutateBiddingStrategyResult)(nil), "google.ads.googleads.v1.services.MutateBiddingStrategyResult") +} + +// Reference imports to suppress errors if they are not otherwise used. +var _ context.Context +var _ grpc.ClientConn + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the grpc package it is being compiled against. +const _ = grpc.SupportPackageIsVersion4 + +// BiddingStrategyServiceClient is the client API for BiddingStrategyService service. +// +// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://godoc.org/google.golang.org/grpc#ClientConn.NewStream. +type BiddingStrategyServiceClient interface { + // Returns the requested bidding strategy in full detail. + GetBiddingStrategy(ctx context.Context, in *GetBiddingStrategyRequest, opts ...grpc.CallOption) (*resources.BiddingStrategy, error) + // Creates, updates, or removes bidding strategies. Operation statuses are + // returned. + MutateBiddingStrategies(ctx context.Context, in *MutateBiddingStrategiesRequest, opts ...grpc.CallOption) (*MutateBiddingStrategiesResponse, error) +} + +type biddingStrategyServiceClient struct { + cc *grpc.ClientConn +} + +func NewBiddingStrategyServiceClient(cc *grpc.ClientConn) BiddingStrategyServiceClient { + return &biddingStrategyServiceClient{cc} +} + +func (c *biddingStrategyServiceClient) GetBiddingStrategy(ctx context.Context, in *GetBiddingStrategyRequest, opts ...grpc.CallOption) (*resources.BiddingStrategy, error) { + out := new(resources.BiddingStrategy) + err := c.cc.Invoke(ctx, "/google.ads.googleads.v1.services.BiddingStrategyService/GetBiddingStrategy", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *biddingStrategyServiceClient) MutateBiddingStrategies(ctx context.Context, in *MutateBiddingStrategiesRequest, opts ...grpc.CallOption) (*MutateBiddingStrategiesResponse, error) { + out := new(MutateBiddingStrategiesResponse) + err := c.cc.Invoke(ctx, "/google.ads.googleads.v1.services.BiddingStrategyService/MutateBiddingStrategies", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +// BiddingStrategyServiceServer is the server API for BiddingStrategyService service. +type BiddingStrategyServiceServer interface { + // Returns the requested bidding strategy in full detail. + GetBiddingStrategy(context.Context, *GetBiddingStrategyRequest) (*resources.BiddingStrategy, error) + // Creates, updates, or removes bidding strategies. Operation statuses are + // returned. + MutateBiddingStrategies(context.Context, *MutateBiddingStrategiesRequest) (*MutateBiddingStrategiesResponse, error) +} + +func RegisterBiddingStrategyServiceServer(s *grpc.Server, srv BiddingStrategyServiceServer) { + s.RegisterService(&_BiddingStrategyService_serviceDesc, srv) +} + +func _BiddingStrategyService_GetBiddingStrategy_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(GetBiddingStrategyRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(BiddingStrategyServiceServer).GetBiddingStrategy(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.ads.googleads.v1.services.BiddingStrategyService/GetBiddingStrategy", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(BiddingStrategyServiceServer).GetBiddingStrategy(ctx, req.(*GetBiddingStrategyRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _BiddingStrategyService_MutateBiddingStrategies_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(MutateBiddingStrategiesRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(BiddingStrategyServiceServer).MutateBiddingStrategies(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.ads.googleads.v1.services.BiddingStrategyService/MutateBiddingStrategies", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(BiddingStrategyServiceServer).MutateBiddingStrategies(ctx, req.(*MutateBiddingStrategiesRequest)) + } + return interceptor(ctx, in, info, handler) +} + +var _BiddingStrategyService_serviceDesc = grpc.ServiceDesc{ + ServiceName: "google.ads.googleads.v1.services.BiddingStrategyService", + HandlerType: (*BiddingStrategyServiceServer)(nil), + Methods: []grpc.MethodDesc{ + { + MethodName: "GetBiddingStrategy", + Handler: _BiddingStrategyService_GetBiddingStrategy_Handler, + }, + { + MethodName: "MutateBiddingStrategies", + Handler: _BiddingStrategyService_MutateBiddingStrategies_Handler, + }, + }, + Streams: []grpc.StreamDesc{}, + Metadata: "google/ads/googleads/v1/services/bidding_strategy_service.proto", +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/services/bidding_strategy_service.proto", fileDescriptor_bidding_strategy_service_98c343afe8e94e27) +} + +var fileDescriptor_bidding_strategy_service_98c343afe8e94e27 = []byte{ + // 721 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xa4, 0x95, 0xd1, 0x6a, 0xd4, 0x4c, + 0x14, 0xc7, 0xbf, 0x64, 0x3f, 0xaa, 0x9d, 0x54, 0x85, 0x11, 0x6d, 0xdc, 0x4a, 0xbb, 0xc4, 0x82, + 0x65, 0x2f, 0x12, 0x76, 0x8b, 0x22, 0x59, 0x8a, 0xcd, 0x82, 0x6d, 0x05, 0x6b, 0x4b, 0x0a, 0x15, + 0xca, 0x42, 0x98, 0x6e, 0xa6, 0x21, 0x34, 0xc9, 0xc4, 0x99, 0xc9, 0xca, 0x52, 0x7a, 0xd3, 0x57, + 0xf0, 0xce, 0x4b, 0x2f, 0x7d, 0x0b, 0x7b, 0xe9, 0xad, 0x4f, 0x20, 0x78, 0xa5, 0x2f, 0x21, 0xc9, + 0x64, 0xb6, 0xed, 0xb6, 0x71, 0xa5, 0xbd, 0x3b, 0x39, 0xf3, 0x9f, 0xdf, 0x9c, 0x33, 0xe7, 0xcc, + 0x09, 0x78, 0x19, 0x10, 0x12, 0x44, 0xd8, 0x42, 0x3e, 0xb3, 0x84, 0x99, 0x5b, 0x83, 0x96, 0xc5, + 0x30, 0x1d, 0x84, 0x7d, 0xcc, 0xac, 0xfd, 0xd0, 0xf7, 0xc3, 0x24, 0xf0, 0x18, 0xa7, 0x88, 0xe3, + 0x60, 0xe8, 0x95, 0x2b, 0x66, 0x4a, 0x09, 0x27, 0xb0, 0x21, 0x76, 0x99, 0xc8, 0x67, 0xe6, 0x08, + 0x60, 0x0e, 0x5a, 0xa6, 0x04, 0xd4, 0x5f, 0x54, 0x1d, 0x41, 0x31, 0x23, 0x19, 0xbd, 0xea, 0x0c, + 0xc1, 0xae, 0x3f, 0x96, 0x3b, 0xd3, 0xd0, 0x42, 0x49, 0x42, 0x38, 0xe2, 0x21, 0x49, 0x58, 0xb9, + 0x5a, 0x9e, 0x6c, 0x15, 0x5f, 0xfb, 0xd9, 0x81, 0x75, 0x10, 0xe2, 0xc8, 0xf7, 0x62, 0xc4, 0x0e, + 0x4b, 0xc5, 0xfc, 0xb8, 0xe2, 0x03, 0x45, 0x69, 0x8a, 0xa9, 0x24, 0xcc, 0x96, 0xeb, 0x34, 0xed, + 0x5b, 0x8c, 0x23, 0x9e, 0x95, 0x0b, 0xc6, 0x2a, 0x78, 0xb4, 0x8e, 0x79, 0x57, 0x44, 0xb5, 0x53, + 0x06, 0xe5, 0xe2, 0xf7, 0x19, 0x66, 0x1c, 0x3e, 0x01, 0x77, 0x64, 0xe4, 0x5e, 0x82, 0x62, 0xac, + 0x2b, 0x0d, 0x65, 0x69, 0xda, 0x9d, 0x91, 0xce, 0xb7, 0x28, 0xc6, 0xc6, 0x6f, 0x05, 0xcc, 0x6f, + 0x66, 0x1c, 0x71, 0x7c, 0x91, 0x12, 0x62, 0x26, 0x39, 0x0b, 0x40, 0xeb, 0x67, 0x8c, 0x93, 0x18, + 0x53, 0x2f, 0xf4, 0x4b, 0x0a, 0x90, 0xae, 0xd7, 0x3e, 0xdc, 0x03, 0x80, 0xa4, 0x98, 0x8a, 0xa4, + 0x75, 0xb5, 0x51, 0x5b, 0xd2, 0xda, 0xb6, 0x39, 0xe9, 0xbe, 0xcd, 0xb1, 0xb0, 0xb7, 0x24, 0xc2, + 0x3d, 0x47, 0x83, 0x4f, 0xc1, 0xbd, 0x14, 0x51, 0x1e, 0xa2, 0xc8, 0x3b, 0x40, 0x61, 0x94, 0x51, + 0xac, 0xd7, 0x1a, 0xca, 0xd2, 0x6d, 0xf7, 0x6e, 0xe9, 0x5e, 0x13, 0xde, 0x3c, 0xdb, 0x01, 0x8a, + 0x42, 0x1f, 0x71, 0xec, 0x91, 0x24, 0x1a, 0xea, 0xff, 0x17, 0xb2, 0x19, 0xe9, 0xdc, 0x4a, 0xa2, + 0xa1, 0xf1, 0x49, 0x05, 0x7a, 0xd5, 0xb1, 0xb0, 0x03, 0xb4, 0x2c, 0x2d, 0xf6, 0xe7, 0xa5, 0x29, + 0xf6, 0x6b, 0xed, 0xba, 0xcc, 0x43, 0xd6, 0xc6, 0x5c, 0xcb, 0xab, 0xb7, 0x89, 0xd8, 0xa1, 0x0b, + 0x84, 0x3c, 0xb7, 0xe1, 0x1b, 0x30, 0xd5, 0xa7, 0x18, 0x71, 0x71, 0xcb, 0x5a, 0xbb, 0x5d, 0x99, + 0xff, 0xa8, 0x9b, 0xc6, 0x2f, 0x60, 0xe3, 0x3f, 0xb7, 0x64, 0xe4, 0x34, 0xc1, 0xd6, 0xd5, 0x9b, + 0xd0, 0x04, 0x03, 0xea, 0x60, 0x8a, 0xe2, 0x98, 0x0c, 0xc4, 0xd5, 0x4d, 0xe7, 0x2b, 0xe2, 0xbb, + 0xab, 0x81, 0xe9, 0xd1, 0x5d, 0x1b, 0xa7, 0x0a, 0x58, 0xa8, 0x6c, 0x05, 0x96, 0x92, 0x84, 0x61, + 0xb8, 0x06, 0x1e, 0x8c, 0x95, 0xc3, 0xc3, 0x94, 0x12, 0x5a, 0x90, 0xb5, 0x36, 0x94, 0x71, 0xd2, + 0xb4, 0x6f, 0xee, 0x14, 0x9d, 0xea, 0xde, 0xbf, 0x58, 0xa8, 0x57, 0xb9, 0x1c, 0xbe, 0x03, 0xb7, + 0x28, 0x66, 0x59, 0xc4, 0x65, 0xbf, 0xac, 0x4c, 0xee, 0x97, 0xab, 0x62, 0x1b, 0xba, 0x05, 0xc5, + 0x95, 0x34, 0xa3, 0x0b, 0xe6, 0xfe, 0xa2, 0xfb, 0xa7, 0x37, 0xd1, 0xfe, 0x5a, 0x03, 0x0f, 0xc7, + 0xb6, 0xef, 0x88, 0x20, 0xe0, 0xa9, 0x02, 0xe0, 0xe5, 0x17, 0x07, 0x3b, 0x93, 0xa3, 0xaf, 0x7c, + 0xa7, 0xf5, 0x6b, 0x14, 0xd7, 0xe8, 0x9c, 0x7c, 0xff, 0xf9, 0x51, 0x7d, 0x06, 0x97, 0xf3, 0xf9, + 0x74, 0x74, 0x21, 0xa5, 0x15, 0xf9, 0x32, 0x99, 0xd5, 0x94, 0x03, 0xeb, 0xac, 0x92, 0x56, 0xf3, + 0x18, 0xfe, 0x50, 0xc0, 0x6c, 0x45, 0xa1, 0xe1, 0xea, 0xf5, 0xea, 0x70, 0x36, 0x2e, 0xea, 0xce, + 0x0d, 0x08, 0xa2, 0xcb, 0x0c, 0xa7, 0xc8, 0xae, 0x63, 0x3c, 0xcf, 0xb3, 0x3b, 0x4b, 0xe7, 0xe8, + 0xdc, 0x18, 0x5a, 0x69, 0x1e, 0x5f, 0x4e, 0xce, 0x8e, 0x0b, 0xb0, 0xad, 0x34, 0xbb, 0x27, 0x2a, + 0x58, 0xec, 0x93, 0x78, 0x62, 0x2c, 0xdd, 0xb9, 0xab, 0x2b, 0xbd, 0x9d, 0x3f, 0xf7, 0x6d, 0x65, + 0x6f, 0xa3, 0x04, 0x04, 0x24, 0x42, 0x49, 0x60, 0x12, 0x1a, 0x58, 0x01, 0x4e, 0x8a, 0x61, 0x20, + 0x7f, 0x12, 0x69, 0xc8, 0xaa, 0x7f, 0x4b, 0x1d, 0x69, 0x7c, 0x56, 0x6b, 0xeb, 0x8e, 0xf3, 0x45, + 0x6d, 0xac, 0x0b, 0xa0, 0xe3, 0x33, 0x53, 0x98, 0xb9, 0xb5, 0xdb, 0x32, 0xcb, 0x83, 0xd9, 0x37, + 0x29, 0xe9, 0x39, 0x3e, 0xeb, 0x8d, 0x24, 0xbd, 0xdd, 0x56, 0x4f, 0x4a, 0x7e, 0xa9, 0x8b, 0xc2, + 0x6f, 0xdb, 0x8e, 0xcf, 0x6c, 0x7b, 0x24, 0xb2, 0xed, 0xdd, 0x96, 0x6d, 0x4b, 0xd9, 0xfe, 0x54, + 0x11, 0xe7, 0xf2, 0x9f, 0x00, 0x00, 0x00, 0xff, 0xff, 0x66, 0xd6, 0xf6, 0x59, 0x3d, 0x07, 0x00, + 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/services/billing_setup_service.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/services/billing_setup_service.pb.go new file mode 100644 index 0000000..81d874d --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/services/billing_setup_service.pb.go @@ -0,0 +1,502 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/services/billing_setup_service.proto + +package services // import "google.golang.org/genproto/googleapis/ads/googleads/v1/services" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import resources "google.golang.org/genproto/googleapis/ads/googleads/v1/resources" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +import ( + context "golang.org/x/net/context" + grpc "google.golang.org/grpc" +) + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Request message for +// [BillingSetupService.GetBillingSetup][google.ads.googleads.v1.services.BillingSetupService.GetBillingSetup]. +type GetBillingSetupRequest struct { + // The resource name of the billing setup to fetch. + ResourceName string `protobuf:"bytes,1,opt,name=resource_name,json=resourceName,proto3" json:"resource_name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GetBillingSetupRequest) Reset() { *m = GetBillingSetupRequest{} } +func (m *GetBillingSetupRequest) String() string { return proto.CompactTextString(m) } +func (*GetBillingSetupRequest) ProtoMessage() {} +func (*GetBillingSetupRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_billing_setup_service_6bff980c1ed14b83, []int{0} +} +func (m *GetBillingSetupRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GetBillingSetupRequest.Unmarshal(m, b) +} +func (m *GetBillingSetupRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GetBillingSetupRequest.Marshal(b, m, deterministic) +} +func (dst *GetBillingSetupRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_GetBillingSetupRequest.Merge(dst, src) +} +func (m *GetBillingSetupRequest) XXX_Size() int { + return xxx_messageInfo_GetBillingSetupRequest.Size(m) +} +func (m *GetBillingSetupRequest) XXX_DiscardUnknown() { + xxx_messageInfo_GetBillingSetupRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_GetBillingSetupRequest proto.InternalMessageInfo + +func (m *GetBillingSetupRequest) GetResourceName() string { + if m != nil { + return m.ResourceName + } + return "" +} + +// Request message for billing setup mutate operations. +type MutateBillingSetupRequest struct { + // Id of the customer to apply the billing setup mutate operation to. + CustomerId string `protobuf:"bytes,1,opt,name=customer_id,json=customerId,proto3" json:"customer_id,omitempty"` + // The operation to perform. + Operation *BillingSetupOperation `protobuf:"bytes,2,opt,name=operation,proto3" json:"operation,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *MutateBillingSetupRequest) Reset() { *m = MutateBillingSetupRequest{} } +func (m *MutateBillingSetupRequest) String() string { return proto.CompactTextString(m) } +func (*MutateBillingSetupRequest) ProtoMessage() {} +func (*MutateBillingSetupRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_billing_setup_service_6bff980c1ed14b83, []int{1} +} +func (m *MutateBillingSetupRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_MutateBillingSetupRequest.Unmarshal(m, b) +} +func (m *MutateBillingSetupRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_MutateBillingSetupRequest.Marshal(b, m, deterministic) +} +func (dst *MutateBillingSetupRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_MutateBillingSetupRequest.Merge(dst, src) +} +func (m *MutateBillingSetupRequest) XXX_Size() int { + return xxx_messageInfo_MutateBillingSetupRequest.Size(m) +} +func (m *MutateBillingSetupRequest) XXX_DiscardUnknown() { + xxx_messageInfo_MutateBillingSetupRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_MutateBillingSetupRequest proto.InternalMessageInfo + +func (m *MutateBillingSetupRequest) GetCustomerId() string { + if m != nil { + return m.CustomerId + } + return "" +} + +func (m *MutateBillingSetupRequest) GetOperation() *BillingSetupOperation { + if m != nil { + return m.Operation + } + return nil +} + +// A single operation on a billing setup, which describes the cancellation of an +// existing billing setup. +type BillingSetupOperation struct { + // Only one of these operations can be set. "Update" operations are not + // supported. + // + // Types that are valid to be assigned to Operation: + // *BillingSetupOperation_Create + // *BillingSetupOperation_Remove + Operation isBillingSetupOperation_Operation `protobuf_oneof:"operation"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *BillingSetupOperation) Reset() { *m = BillingSetupOperation{} } +func (m *BillingSetupOperation) String() string { return proto.CompactTextString(m) } +func (*BillingSetupOperation) ProtoMessage() {} +func (*BillingSetupOperation) Descriptor() ([]byte, []int) { + return fileDescriptor_billing_setup_service_6bff980c1ed14b83, []int{2} +} +func (m *BillingSetupOperation) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_BillingSetupOperation.Unmarshal(m, b) +} +func (m *BillingSetupOperation) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_BillingSetupOperation.Marshal(b, m, deterministic) +} +func (dst *BillingSetupOperation) XXX_Merge(src proto.Message) { + xxx_messageInfo_BillingSetupOperation.Merge(dst, src) +} +func (m *BillingSetupOperation) XXX_Size() int { + return xxx_messageInfo_BillingSetupOperation.Size(m) +} +func (m *BillingSetupOperation) XXX_DiscardUnknown() { + xxx_messageInfo_BillingSetupOperation.DiscardUnknown(m) +} + +var xxx_messageInfo_BillingSetupOperation proto.InternalMessageInfo + +type isBillingSetupOperation_Operation interface { + isBillingSetupOperation_Operation() +} + +type BillingSetupOperation_Create struct { + Create *resources.BillingSetup `protobuf:"bytes,2,opt,name=create,proto3,oneof"` +} + +type BillingSetupOperation_Remove struct { + Remove string `protobuf:"bytes,1,opt,name=remove,proto3,oneof"` +} + +func (*BillingSetupOperation_Create) isBillingSetupOperation_Operation() {} + +func (*BillingSetupOperation_Remove) isBillingSetupOperation_Operation() {} + +func (m *BillingSetupOperation) GetOperation() isBillingSetupOperation_Operation { + if m != nil { + return m.Operation + } + return nil +} + +func (m *BillingSetupOperation) GetCreate() *resources.BillingSetup { + if x, ok := m.GetOperation().(*BillingSetupOperation_Create); ok { + return x.Create + } + return nil +} + +func (m *BillingSetupOperation) GetRemove() string { + if x, ok := m.GetOperation().(*BillingSetupOperation_Remove); ok { + return x.Remove + } + return "" +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*BillingSetupOperation) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _BillingSetupOperation_OneofMarshaler, _BillingSetupOperation_OneofUnmarshaler, _BillingSetupOperation_OneofSizer, []interface{}{ + (*BillingSetupOperation_Create)(nil), + (*BillingSetupOperation_Remove)(nil), + } +} + +func _BillingSetupOperation_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*BillingSetupOperation) + // operation + switch x := m.Operation.(type) { + case *BillingSetupOperation_Create: + b.EncodeVarint(2<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.Create); err != nil { + return err + } + case *BillingSetupOperation_Remove: + b.EncodeVarint(1<<3 | proto.WireBytes) + b.EncodeStringBytes(x.Remove) + case nil: + default: + return fmt.Errorf("BillingSetupOperation.Operation has unexpected type %T", x) + } + return nil +} + +func _BillingSetupOperation_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*BillingSetupOperation) + switch tag { + case 2: // operation.create + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(resources.BillingSetup) + err := b.DecodeMessage(msg) + m.Operation = &BillingSetupOperation_Create{msg} + return true, err + case 1: // operation.remove + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeStringBytes() + m.Operation = &BillingSetupOperation_Remove{x} + return true, err + default: + return false, nil + } +} + +func _BillingSetupOperation_OneofSizer(msg proto.Message) (n int) { + m := msg.(*BillingSetupOperation) + // operation + switch x := m.Operation.(type) { + case *BillingSetupOperation_Create: + s := proto.Size(x.Create) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *BillingSetupOperation_Remove: + n += 1 // tag and wire + n += proto.SizeVarint(uint64(len(x.Remove))) + n += len(x.Remove) + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +// Response message for a billing setup operation. +type MutateBillingSetupResponse struct { + // A result that identifies the resource affected by the mutate request. + Result *MutateBillingSetupResult `protobuf:"bytes,1,opt,name=result,proto3" json:"result,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *MutateBillingSetupResponse) Reset() { *m = MutateBillingSetupResponse{} } +func (m *MutateBillingSetupResponse) String() string { return proto.CompactTextString(m) } +func (*MutateBillingSetupResponse) ProtoMessage() {} +func (*MutateBillingSetupResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_billing_setup_service_6bff980c1ed14b83, []int{3} +} +func (m *MutateBillingSetupResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_MutateBillingSetupResponse.Unmarshal(m, b) +} +func (m *MutateBillingSetupResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_MutateBillingSetupResponse.Marshal(b, m, deterministic) +} +func (dst *MutateBillingSetupResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_MutateBillingSetupResponse.Merge(dst, src) +} +func (m *MutateBillingSetupResponse) XXX_Size() int { + return xxx_messageInfo_MutateBillingSetupResponse.Size(m) +} +func (m *MutateBillingSetupResponse) XXX_DiscardUnknown() { + xxx_messageInfo_MutateBillingSetupResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_MutateBillingSetupResponse proto.InternalMessageInfo + +func (m *MutateBillingSetupResponse) GetResult() *MutateBillingSetupResult { + if m != nil { + return m.Result + } + return nil +} + +// Result for a single billing setup mutate. +type MutateBillingSetupResult struct { + // Returned for successful operations. + ResourceName string `protobuf:"bytes,1,opt,name=resource_name,json=resourceName,proto3" json:"resource_name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *MutateBillingSetupResult) Reset() { *m = MutateBillingSetupResult{} } +func (m *MutateBillingSetupResult) String() string { return proto.CompactTextString(m) } +func (*MutateBillingSetupResult) ProtoMessage() {} +func (*MutateBillingSetupResult) Descriptor() ([]byte, []int) { + return fileDescriptor_billing_setup_service_6bff980c1ed14b83, []int{4} +} +func (m *MutateBillingSetupResult) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_MutateBillingSetupResult.Unmarshal(m, b) +} +func (m *MutateBillingSetupResult) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_MutateBillingSetupResult.Marshal(b, m, deterministic) +} +func (dst *MutateBillingSetupResult) XXX_Merge(src proto.Message) { + xxx_messageInfo_MutateBillingSetupResult.Merge(dst, src) +} +func (m *MutateBillingSetupResult) XXX_Size() int { + return xxx_messageInfo_MutateBillingSetupResult.Size(m) +} +func (m *MutateBillingSetupResult) XXX_DiscardUnknown() { + xxx_messageInfo_MutateBillingSetupResult.DiscardUnknown(m) +} + +var xxx_messageInfo_MutateBillingSetupResult proto.InternalMessageInfo + +func (m *MutateBillingSetupResult) GetResourceName() string { + if m != nil { + return m.ResourceName + } + return "" +} + +func init() { + proto.RegisterType((*GetBillingSetupRequest)(nil), "google.ads.googleads.v1.services.GetBillingSetupRequest") + proto.RegisterType((*MutateBillingSetupRequest)(nil), "google.ads.googleads.v1.services.MutateBillingSetupRequest") + proto.RegisterType((*BillingSetupOperation)(nil), "google.ads.googleads.v1.services.BillingSetupOperation") + proto.RegisterType((*MutateBillingSetupResponse)(nil), "google.ads.googleads.v1.services.MutateBillingSetupResponse") + proto.RegisterType((*MutateBillingSetupResult)(nil), "google.ads.googleads.v1.services.MutateBillingSetupResult") +} + +// Reference imports to suppress errors if they are not otherwise used. +var _ context.Context +var _ grpc.ClientConn + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the grpc package it is being compiled against. +const _ = grpc.SupportPackageIsVersion4 + +// BillingSetupServiceClient is the client API for BillingSetupService service. +// +// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://godoc.org/google.golang.org/grpc#ClientConn.NewStream. +type BillingSetupServiceClient interface { + // Returns a billing setup. + GetBillingSetup(ctx context.Context, in *GetBillingSetupRequest, opts ...grpc.CallOption) (*resources.BillingSetup, error) + // Creates a billing setup, or cancels an existing billing setup. + MutateBillingSetup(ctx context.Context, in *MutateBillingSetupRequest, opts ...grpc.CallOption) (*MutateBillingSetupResponse, error) +} + +type billingSetupServiceClient struct { + cc *grpc.ClientConn +} + +func NewBillingSetupServiceClient(cc *grpc.ClientConn) BillingSetupServiceClient { + return &billingSetupServiceClient{cc} +} + +func (c *billingSetupServiceClient) GetBillingSetup(ctx context.Context, in *GetBillingSetupRequest, opts ...grpc.CallOption) (*resources.BillingSetup, error) { + out := new(resources.BillingSetup) + err := c.cc.Invoke(ctx, "/google.ads.googleads.v1.services.BillingSetupService/GetBillingSetup", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *billingSetupServiceClient) MutateBillingSetup(ctx context.Context, in *MutateBillingSetupRequest, opts ...grpc.CallOption) (*MutateBillingSetupResponse, error) { + out := new(MutateBillingSetupResponse) + err := c.cc.Invoke(ctx, "/google.ads.googleads.v1.services.BillingSetupService/MutateBillingSetup", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +// BillingSetupServiceServer is the server API for BillingSetupService service. +type BillingSetupServiceServer interface { + // Returns a billing setup. + GetBillingSetup(context.Context, *GetBillingSetupRequest) (*resources.BillingSetup, error) + // Creates a billing setup, or cancels an existing billing setup. + MutateBillingSetup(context.Context, *MutateBillingSetupRequest) (*MutateBillingSetupResponse, error) +} + +func RegisterBillingSetupServiceServer(s *grpc.Server, srv BillingSetupServiceServer) { + s.RegisterService(&_BillingSetupService_serviceDesc, srv) +} + +func _BillingSetupService_GetBillingSetup_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(GetBillingSetupRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(BillingSetupServiceServer).GetBillingSetup(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.ads.googleads.v1.services.BillingSetupService/GetBillingSetup", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(BillingSetupServiceServer).GetBillingSetup(ctx, req.(*GetBillingSetupRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _BillingSetupService_MutateBillingSetup_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(MutateBillingSetupRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(BillingSetupServiceServer).MutateBillingSetup(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.ads.googleads.v1.services.BillingSetupService/MutateBillingSetup", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(BillingSetupServiceServer).MutateBillingSetup(ctx, req.(*MutateBillingSetupRequest)) + } + return interceptor(ctx, in, info, handler) +} + +var _BillingSetupService_serviceDesc = grpc.ServiceDesc{ + ServiceName: "google.ads.googleads.v1.services.BillingSetupService", + HandlerType: (*BillingSetupServiceServer)(nil), + Methods: []grpc.MethodDesc{ + { + MethodName: "GetBillingSetup", + Handler: _BillingSetupService_GetBillingSetup_Handler, + }, + { + MethodName: "MutateBillingSetup", + Handler: _BillingSetupService_MutateBillingSetup_Handler, + }, + }, + Streams: []grpc.StreamDesc{}, + Metadata: "google/ads/googleads/v1/services/billing_setup_service.proto", +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/services/billing_setup_service.proto", fileDescriptor_billing_setup_service_6bff980c1ed14b83) +} + +var fileDescriptor_billing_setup_service_6bff980c1ed14b83 = []byte{ + // 545 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x9c, 0x54, 0x31, 0x6f, 0xd3, 0x40, + 0x14, 0xe6, 0x52, 0x29, 0x52, 0x2f, 0x20, 0xa4, 0x43, 0xa0, 0x10, 0x21, 0x11, 0x99, 0x0e, 0x55, + 0x86, 0x3b, 0x39, 0x08, 0x05, 0x5d, 0x1b, 0x21, 0x67, 0x49, 0x3b, 0x00, 0x95, 0x2b, 0x32, 0xa0, + 0x48, 0x91, 0x1b, 0x9f, 0x2c, 0x4b, 0xb6, 0xcf, 0xf8, 0x9d, 0xb3, 0x54, 0x5d, 0xd8, 0x98, 0xbb, + 0x33, 0x30, 0xb2, 0xf3, 0x27, 0x18, 0x58, 0xf8, 0x0b, 0x4c, 0xfc, 0x09, 0x90, 0x7d, 0xbe, 0x34, + 0xa1, 0x8e, 0x42, 0xbb, 0x3d, 0x9f, 0xdf, 0xf7, 0x7d, 0xef, 0x7d, 0xef, 0xdd, 0xe1, 0xc3, 0x40, + 0xca, 0x20, 0x12, 0xcc, 0xf3, 0x81, 0xe9, 0xb0, 0x88, 0x16, 0x36, 0x03, 0x91, 0x2d, 0xc2, 0xb9, + 0x00, 0x76, 0x16, 0x46, 0x51, 0x98, 0x04, 0x33, 0x10, 0x2a, 0x4f, 0x67, 0xd5, 0x31, 0x4d, 0x33, + 0xa9, 0x24, 0xe9, 0x6a, 0x08, 0xf5, 0x7c, 0xa0, 0x4b, 0x34, 0x5d, 0xd8, 0xd4, 0xa0, 0x3b, 0x2f, + 0x36, 0xf1, 0x67, 0x02, 0x64, 0x9e, 0x5d, 0x13, 0xd0, 0xc4, 0x9d, 0x27, 0x06, 0x96, 0x86, 0xcc, + 0x4b, 0x12, 0xa9, 0x3c, 0x15, 0xca, 0x04, 0xf4, 0x5f, 0x6b, 0x88, 0x1f, 0x8d, 0x85, 0x1a, 0x69, + 0xdc, 0x69, 0x01, 0x73, 0xc5, 0x87, 0x5c, 0x80, 0x22, 0xcf, 0xf0, 0x3d, 0x43, 0x3c, 0x4b, 0xbc, + 0x58, 0xb4, 0x51, 0x17, 0xed, 0xef, 0xba, 0x77, 0xcd, 0xe1, 0x1b, 0x2f, 0x16, 0xd6, 0x25, 0xc2, + 0x8f, 0x5f, 0xe7, 0xca, 0x53, 0xa2, 0x8e, 0xe2, 0x29, 0x6e, 0xcd, 0x73, 0x50, 0x32, 0x16, 0xd9, + 0x2c, 0xf4, 0x2b, 0x02, 0x6c, 0x8e, 0x8e, 0x7d, 0xf2, 0x0e, 0xef, 0xca, 0x54, 0x64, 0x65, 0x45, + 0xed, 0x46, 0x17, 0xed, 0xb7, 0xfa, 0x03, 0xba, 0xcd, 0x08, 0xba, 0x2a, 0xf5, 0xd6, 0xc0, 0xdd, + 0x2b, 0x26, 0xeb, 0x13, 0xc2, 0x0f, 0x6b, 0x93, 0xc8, 0x31, 0x6e, 0xce, 0x33, 0xe1, 0x29, 0x51, + 0xa9, 0xb1, 0x8d, 0x6a, 0x4b, 0x53, 0xd7, 0xe4, 0x8e, 0xee, 0xb8, 0x15, 0x01, 0x69, 0xe3, 0x66, + 0x26, 0x62, 0xb9, 0xa8, 0x8c, 0x29, 0xfe, 0xe8, 0xef, 0x51, 0x6b, 0xa5, 0x2b, 0x2b, 0xc5, 0x9d, + 0x3a, 0x83, 0x20, 0x95, 0x09, 0x08, 0xe2, 0x16, 0x24, 0x90, 0x47, 0xaa, 0x24, 0x69, 0xf5, 0xf9, + 0xf6, 0xee, 0x6b, 0xd9, 0xf2, 0x48, 0xb9, 0x15, 0x93, 0xf5, 0x0a, 0xb7, 0x37, 0xe5, 0xfc, 0xd7, + 0x50, 0xfb, 0x9f, 0x77, 0xf0, 0x83, 0x55, 0xec, 0xa9, 0x96, 0x26, 0xdf, 0x10, 0xbe, 0xff, 0xcf, + 0xb2, 0x90, 0x97, 0xdb, 0x0b, 0xae, 0xdf, 0xaf, 0xce, 0x4d, 0xad, 0xb7, 0x06, 0x1f, 0x7f, 0xfe, + 0xba, 0x6c, 0xd8, 0x84, 0x15, 0x3b, 0x7f, 0xbe, 0xd6, 0xc6, 0xd0, 0xec, 0x14, 0xb0, 0x9e, 0xb9, + 0x04, 0x25, 0x08, 0x58, 0xef, 0x82, 0xfc, 0x40, 0x98, 0x5c, 0x77, 0x84, 0x1c, 0xdc, 0xce, 0x6b, + 0x5d, 0xfd, 0xe1, 0x2d, 0x07, 0x55, 0x8e, 0xdd, 0x1a, 0x96, 0xad, 0x0c, 0xac, 0x7e, 0xd1, 0xca, + 0x55, 0xed, 0xe7, 0x2b, 0xb7, 0x65, 0xd8, 0xbb, 0x58, 0xef, 0x84, 0xc7, 0x25, 0x1f, 0x47, 0xbd, + 0xd1, 0x1f, 0x84, 0xf7, 0xe6, 0x32, 0xde, 0x5a, 0xc2, 0xa8, 0x5d, 0x33, 0xc6, 0x93, 0xe2, 0xde, + 0x9f, 0xa0, 0xf7, 0x47, 0x15, 0x3a, 0x90, 0x91, 0x97, 0x04, 0x54, 0x66, 0x01, 0x0b, 0x44, 0x52, + 0xbe, 0x0a, 0xe6, 0x79, 0x49, 0x43, 0xd8, 0xfc, 0x9a, 0x1d, 0x98, 0xe0, 0x4b, 0x63, 0x67, 0xec, + 0x38, 0x5f, 0x1b, 0xdd, 0xb1, 0x26, 0x74, 0x7c, 0xa0, 0x3a, 0x2c, 0xa2, 0x89, 0x4d, 0x2b, 0x61, + 0xf8, 0x6e, 0x52, 0xa6, 0x8e, 0x0f, 0xd3, 0x65, 0xca, 0x74, 0x62, 0x4f, 0x4d, 0xca, 0xef, 0xc6, + 0x9e, 0x3e, 0xe7, 0xdc, 0xf1, 0x81, 0xf3, 0x65, 0x12, 0xe7, 0x13, 0x9b, 0x73, 0x93, 0x76, 0xd6, + 0x2c, 0xeb, 0x7c, 0xfe, 0x37, 0x00, 0x00, 0xff, 0xff, 0x3a, 0x00, 0x54, 0x93, 0x74, 0x05, 0x00, + 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/services/campaign_audience_view_service.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/services/campaign_audience_view_service.pb.go new file mode 100644 index 0000000..b9b32bd --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/services/campaign_audience_view_service.pb.go @@ -0,0 +1,176 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/services/campaign_audience_view_service.proto + +package services // import "google.golang.org/genproto/googleapis/ads/googleads/v1/services" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import resources "google.golang.org/genproto/googleapis/ads/googleads/v1/resources" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +import ( + context "golang.org/x/net/context" + grpc "google.golang.org/grpc" +) + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Request message for [CampaignAudienceViewService.GetCampaignAudienceView][google.ads.googleads.v1.services.CampaignAudienceViewService.GetCampaignAudienceView]. +type GetCampaignAudienceViewRequest struct { + // The resource name of the campaign audience view to fetch. + ResourceName string `protobuf:"bytes,1,opt,name=resource_name,json=resourceName,proto3" json:"resource_name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GetCampaignAudienceViewRequest) Reset() { *m = GetCampaignAudienceViewRequest{} } +func (m *GetCampaignAudienceViewRequest) String() string { return proto.CompactTextString(m) } +func (*GetCampaignAudienceViewRequest) ProtoMessage() {} +func (*GetCampaignAudienceViewRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_campaign_audience_view_service_4246c126b41f8f58, []int{0} +} +func (m *GetCampaignAudienceViewRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GetCampaignAudienceViewRequest.Unmarshal(m, b) +} +func (m *GetCampaignAudienceViewRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GetCampaignAudienceViewRequest.Marshal(b, m, deterministic) +} +func (dst *GetCampaignAudienceViewRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_GetCampaignAudienceViewRequest.Merge(dst, src) +} +func (m *GetCampaignAudienceViewRequest) XXX_Size() int { + return xxx_messageInfo_GetCampaignAudienceViewRequest.Size(m) +} +func (m *GetCampaignAudienceViewRequest) XXX_DiscardUnknown() { + xxx_messageInfo_GetCampaignAudienceViewRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_GetCampaignAudienceViewRequest proto.InternalMessageInfo + +func (m *GetCampaignAudienceViewRequest) GetResourceName() string { + if m != nil { + return m.ResourceName + } + return "" +} + +func init() { + proto.RegisterType((*GetCampaignAudienceViewRequest)(nil), "google.ads.googleads.v1.services.GetCampaignAudienceViewRequest") +} + +// Reference imports to suppress errors if they are not otherwise used. +var _ context.Context +var _ grpc.ClientConn + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the grpc package it is being compiled against. +const _ = grpc.SupportPackageIsVersion4 + +// CampaignAudienceViewServiceClient is the client API for CampaignAudienceViewService service. +// +// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://godoc.org/google.golang.org/grpc#ClientConn.NewStream. +type CampaignAudienceViewServiceClient interface { + // Returns the requested campaign audience view in full detail. + GetCampaignAudienceView(ctx context.Context, in *GetCampaignAudienceViewRequest, opts ...grpc.CallOption) (*resources.CampaignAudienceView, error) +} + +type campaignAudienceViewServiceClient struct { + cc *grpc.ClientConn +} + +func NewCampaignAudienceViewServiceClient(cc *grpc.ClientConn) CampaignAudienceViewServiceClient { + return &campaignAudienceViewServiceClient{cc} +} + +func (c *campaignAudienceViewServiceClient) GetCampaignAudienceView(ctx context.Context, in *GetCampaignAudienceViewRequest, opts ...grpc.CallOption) (*resources.CampaignAudienceView, error) { + out := new(resources.CampaignAudienceView) + err := c.cc.Invoke(ctx, "/google.ads.googleads.v1.services.CampaignAudienceViewService/GetCampaignAudienceView", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +// CampaignAudienceViewServiceServer is the server API for CampaignAudienceViewService service. +type CampaignAudienceViewServiceServer interface { + // Returns the requested campaign audience view in full detail. + GetCampaignAudienceView(context.Context, *GetCampaignAudienceViewRequest) (*resources.CampaignAudienceView, error) +} + +func RegisterCampaignAudienceViewServiceServer(s *grpc.Server, srv CampaignAudienceViewServiceServer) { + s.RegisterService(&_CampaignAudienceViewService_serviceDesc, srv) +} + +func _CampaignAudienceViewService_GetCampaignAudienceView_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(GetCampaignAudienceViewRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(CampaignAudienceViewServiceServer).GetCampaignAudienceView(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.ads.googleads.v1.services.CampaignAudienceViewService/GetCampaignAudienceView", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(CampaignAudienceViewServiceServer).GetCampaignAudienceView(ctx, req.(*GetCampaignAudienceViewRequest)) + } + return interceptor(ctx, in, info, handler) +} + +var _CampaignAudienceViewService_serviceDesc = grpc.ServiceDesc{ + ServiceName: "google.ads.googleads.v1.services.CampaignAudienceViewService", + HandlerType: (*CampaignAudienceViewServiceServer)(nil), + Methods: []grpc.MethodDesc{ + { + MethodName: "GetCampaignAudienceView", + Handler: _CampaignAudienceViewService_GetCampaignAudienceView_Handler, + }, + }, + Streams: []grpc.StreamDesc{}, + Metadata: "google/ads/googleads/v1/services/campaign_audience_view_service.proto", +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/services/campaign_audience_view_service.proto", fileDescriptor_campaign_audience_view_service_4246c126b41f8f58) +} + +var fileDescriptor_campaign_audience_view_service_4246c126b41f8f58 = []byte{ + // 378 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x84, 0x52, 0x3d, 0x6b, 0xdb, 0x40, + 0x18, 0x46, 0x2a, 0x14, 0x2a, 0xda, 0x45, 0x4b, 0x8b, 0x5b, 0x8a, 0x70, 0x3d, 0x14, 0x0f, 0x77, + 0xa8, 0x1d, 0x4c, 0x2e, 0xe4, 0x43, 0x0e, 0xc6, 0x99, 0x82, 0x71, 0x40, 0x43, 0x10, 0x88, 0x8b, + 0x74, 0x88, 0x03, 0xeb, 0x4e, 0xd1, 0x2b, 0xc9, 0x43, 0xc8, 0x92, 0x25, 0x3f, 0x20, 0xff, 0x20, + 0x63, 0x7e, 0x4a, 0xd6, 0x90, 0x7f, 0x90, 0x29, 0x7b, 0xf6, 0x20, 0x9f, 0x4e, 0x24, 0x60, 0xd9, + 0xdb, 0xc3, 0xdd, 0xf3, 0x71, 0xef, 0xf3, 0x9e, 0x35, 0x49, 0xa4, 0x4c, 0x16, 0x0c, 0xd3, 0x18, + 0xb0, 0x82, 0x35, 0xaa, 0x5c, 0x0c, 0x2c, 0xaf, 0x78, 0xc4, 0x00, 0x47, 0x34, 0xcd, 0x28, 0x4f, + 0x44, 0x48, 0xcb, 0x98, 0x33, 0x11, 0xb1, 0xb0, 0xe2, 0x6c, 0x19, 0x36, 0xf7, 0x28, 0xcb, 0x65, + 0x21, 0x6d, 0x47, 0x69, 0x11, 0x8d, 0x01, 0xb5, 0x36, 0xa8, 0x72, 0x91, 0xb6, 0xe9, 0xed, 0x77, + 0x05, 0xe5, 0x0c, 0x64, 0x99, 0x77, 0x27, 0xa9, 0x84, 0xde, 0x2f, 0xad, 0xcf, 0x38, 0xa6, 0x42, + 0xc8, 0x82, 0x16, 0x5c, 0x0a, 0x50, 0xb7, 0xfd, 0x89, 0xf5, 0x7b, 0xca, 0x8a, 0xa3, 0xc6, 0xc0, + 0x6b, 0xf4, 0x3e, 0x67, 0xcb, 0x39, 0xbb, 0x28, 0x19, 0x14, 0xf6, 0x1f, 0xeb, 0x9b, 0x4e, 0x0a, + 0x05, 0x4d, 0xd9, 0x0f, 0xc3, 0x31, 0xfe, 0x7e, 0x99, 0x7f, 0xd5, 0x87, 0x27, 0x34, 0x65, 0xff, + 0x5e, 0x0d, 0xeb, 0xe7, 0x3a, 0x93, 0x53, 0x35, 0x85, 0xfd, 0x64, 0x58, 0xdf, 0x3b, 0x72, 0xec, + 0x43, 0xb4, 0xad, 0x03, 0xb4, 0xf9, 0x89, 0xbd, 0x51, 0xa7, 0x43, 0xdb, 0x11, 0x5a, 0xa7, 0xef, + 0x1f, 0x5c, 0x3f, 0x3e, 0xdf, 0x9a, 0x3b, 0xf6, 0xa8, 0xee, 0xf3, 0xf2, 0xc3, 0x98, 0x7b, 0x51, + 0x09, 0x85, 0x4c, 0x59, 0x0e, 0x78, 0xd8, 0x16, 0xfc, 0x5e, 0x0c, 0x78, 0x78, 0x35, 0xbe, 0x31, + 0xad, 0x41, 0x24, 0xd3, 0xad, 0x13, 0x8c, 0x9d, 0x0d, 0xed, 0xcc, 0xea, 0x4d, 0xcc, 0x8c, 0xb3, + 0xe3, 0xc6, 0x25, 0x91, 0x0b, 0x2a, 0x12, 0x24, 0xf3, 0x04, 0x27, 0x4c, 0xac, 0xf6, 0xa4, 0x37, + 0x9f, 0x71, 0xe8, 0xfe, 0x71, 0xbb, 0x1a, 0xdc, 0x99, 0x9f, 0xa6, 0x9e, 0x77, 0x6f, 0x3a, 0x53, + 0x65, 0xe8, 0xc5, 0x80, 0x14, 0xac, 0x91, 0xef, 0xa2, 0x26, 0x18, 0x1e, 0x34, 0x25, 0xf0, 0x62, + 0x08, 0x5a, 0x4a, 0xe0, 0xbb, 0x81, 0xa6, 0xbc, 0x98, 0x03, 0x75, 0x4e, 0x88, 0x17, 0x03, 0x21, + 0x2d, 0x89, 0x10, 0xdf, 0x25, 0x44, 0xd3, 0xce, 0x3f, 0xaf, 0xde, 0xf9, 0xff, 0x2d, 0x00, 0x00, + 0xff, 0xff, 0x7f, 0x73, 0xad, 0x2f, 0x18, 0x03, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/services/campaign_bid_modifier_service.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/services/campaign_bid_modifier_service.pb.go new file mode 100644 index 0000000..f66efa3 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/services/campaign_bid_modifier_service.pb.go @@ -0,0 +1,591 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/services/campaign_bid_modifier_service.proto + +package services // import "google.golang.org/genproto/googleapis/ads/googleads/v1/services" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "github.com/golang/protobuf/ptypes/wrappers" +import resources "google.golang.org/genproto/googleapis/ads/googleads/v1/resources" +import _ "google.golang.org/genproto/googleapis/api/annotations" +import status "google.golang.org/genproto/googleapis/rpc/status" +import field_mask "google.golang.org/genproto/protobuf/field_mask" + +import ( + context "golang.org/x/net/context" + grpc "google.golang.org/grpc" +) + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Request message for [CampaignBidModifierService.GetCampaignBidModifier][google.ads.googleads.v1.services.CampaignBidModifierService.GetCampaignBidModifier]. +type GetCampaignBidModifierRequest struct { + // The resource name of the campaign bid modifier to fetch. + ResourceName string `protobuf:"bytes,1,opt,name=resource_name,json=resourceName,proto3" json:"resource_name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GetCampaignBidModifierRequest) Reset() { *m = GetCampaignBidModifierRequest{} } +func (m *GetCampaignBidModifierRequest) String() string { return proto.CompactTextString(m) } +func (*GetCampaignBidModifierRequest) ProtoMessage() {} +func (*GetCampaignBidModifierRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_campaign_bid_modifier_service_eb7befde0ae1b2cc, []int{0} +} +func (m *GetCampaignBidModifierRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GetCampaignBidModifierRequest.Unmarshal(m, b) +} +func (m *GetCampaignBidModifierRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GetCampaignBidModifierRequest.Marshal(b, m, deterministic) +} +func (dst *GetCampaignBidModifierRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_GetCampaignBidModifierRequest.Merge(dst, src) +} +func (m *GetCampaignBidModifierRequest) XXX_Size() int { + return xxx_messageInfo_GetCampaignBidModifierRequest.Size(m) +} +func (m *GetCampaignBidModifierRequest) XXX_DiscardUnknown() { + xxx_messageInfo_GetCampaignBidModifierRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_GetCampaignBidModifierRequest proto.InternalMessageInfo + +func (m *GetCampaignBidModifierRequest) GetResourceName() string { + if m != nil { + return m.ResourceName + } + return "" +} + +// Request message for [CampaignBidModifierService.MutateCampaignBidModifier][]. +type MutateCampaignBidModifiersRequest struct { + // ID of the customer whose campaign bid modifiers are being modified. + CustomerId string `protobuf:"bytes,1,opt,name=customer_id,json=customerId,proto3" json:"customer_id,omitempty"` + // The list of operations to perform on individual campaign bid modifiers. + Operations []*CampaignBidModifierOperation `protobuf:"bytes,2,rep,name=operations,proto3" json:"operations,omitempty"` + // If true, successful operations will be carried out and invalid + // operations will return errors. If false, all operations will be carried + // out in one transaction if and only if they are all valid. + // Default is false. + PartialFailure bool `protobuf:"varint,3,opt,name=partial_failure,json=partialFailure,proto3" json:"partial_failure,omitempty"` + // If true, the request is validated but not executed. Only errors are + // returned, not results. + ValidateOnly bool `protobuf:"varint,4,opt,name=validate_only,json=validateOnly,proto3" json:"validate_only,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *MutateCampaignBidModifiersRequest) Reset() { *m = MutateCampaignBidModifiersRequest{} } +func (m *MutateCampaignBidModifiersRequest) String() string { return proto.CompactTextString(m) } +func (*MutateCampaignBidModifiersRequest) ProtoMessage() {} +func (*MutateCampaignBidModifiersRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_campaign_bid_modifier_service_eb7befde0ae1b2cc, []int{1} +} +func (m *MutateCampaignBidModifiersRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_MutateCampaignBidModifiersRequest.Unmarshal(m, b) +} +func (m *MutateCampaignBidModifiersRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_MutateCampaignBidModifiersRequest.Marshal(b, m, deterministic) +} +func (dst *MutateCampaignBidModifiersRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_MutateCampaignBidModifiersRequest.Merge(dst, src) +} +func (m *MutateCampaignBidModifiersRequest) XXX_Size() int { + return xxx_messageInfo_MutateCampaignBidModifiersRequest.Size(m) +} +func (m *MutateCampaignBidModifiersRequest) XXX_DiscardUnknown() { + xxx_messageInfo_MutateCampaignBidModifiersRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_MutateCampaignBidModifiersRequest proto.InternalMessageInfo + +func (m *MutateCampaignBidModifiersRequest) GetCustomerId() string { + if m != nil { + return m.CustomerId + } + return "" +} + +func (m *MutateCampaignBidModifiersRequest) GetOperations() []*CampaignBidModifierOperation { + if m != nil { + return m.Operations + } + return nil +} + +func (m *MutateCampaignBidModifiersRequest) GetPartialFailure() bool { + if m != nil { + return m.PartialFailure + } + return false +} + +func (m *MutateCampaignBidModifiersRequest) GetValidateOnly() bool { + if m != nil { + return m.ValidateOnly + } + return false +} + +// A single operation (create, remove, update) on a campaign bid modifier. +type CampaignBidModifierOperation struct { + // FieldMask that determines which resource fields are modified in an update. + UpdateMask *field_mask.FieldMask `protobuf:"bytes,4,opt,name=update_mask,json=updateMask,proto3" json:"update_mask,omitempty"` + // The mutate operation. + // + // Types that are valid to be assigned to Operation: + // *CampaignBidModifierOperation_Create + // *CampaignBidModifierOperation_Update + // *CampaignBidModifierOperation_Remove + Operation isCampaignBidModifierOperation_Operation `protobuf_oneof:"operation"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *CampaignBidModifierOperation) Reset() { *m = CampaignBidModifierOperation{} } +func (m *CampaignBidModifierOperation) String() string { return proto.CompactTextString(m) } +func (*CampaignBidModifierOperation) ProtoMessage() {} +func (*CampaignBidModifierOperation) Descriptor() ([]byte, []int) { + return fileDescriptor_campaign_bid_modifier_service_eb7befde0ae1b2cc, []int{2} +} +func (m *CampaignBidModifierOperation) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_CampaignBidModifierOperation.Unmarshal(m, b) +} +func (m *CampaignBidModifierOperation) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_CampaignBidModifierOperation.Marshal(b, m, deterministic) +} +func (dst *CampaignBidModifierOperation) XXX_Merge(src proto.Message) { + xxx_messageInfo_CampaignBidModifierOperation.Merge(dst, src) +} +func (m *CampaignBidModifierOperation) XXX_Size() int { + return xxx_messageInfo_CampaignBidModifierOperation.Size(m) +} +func (m *CampaignBidModifierOperation) XXX_DiscardUnknown() { + xxx_messageInfo_CampaignBidModifierOperation.DiscardUnknown(m) +} + +var xxx_messageInfo_CampaignBidModifierOperation proto.InternalMessageInfo + +func (m *CampaignBidModifierOperation) GetUpdateMask() *field_mask.FieldMask { + if m != nil { + return m.UpdateMask + } + return nil +} + +type isCampaignBidModifierOperation_Operation interface { + isCampaignBidModifierOperation_Operation() +} + +type CampaignBidModifierOperation_Create struct { + Create *resources.CampaignBidModifier `protobuf:"bytes,1,opt,name=create,proto3,oneof"` +} + +type CampaignBidModifierOperation_Update struct { + Update *resources.CampaignBidModifier `protobuf:"bytes,2,opt,name=update,proto3,oneof"` +} + +type CampaignBidModifierOperation_Remove struct { + Remove string `protobuf:"bytes,3,opt,name=remove,proto3,oneof"` +} + +func (*CampaignBidModifierOperation_Create) isCampaignBidModifierOperation_Operation() {} + +func (*CampaignBidModifierOperation_Update) isCampaignBidModifierOperation_Operation() {} + +func (*CampaignBidModifierOperation_Remove) isCampaignBidModifierOperation_Operation() {} + +func (m *CampaignBidModifierOperation) GetOperation() isCampaignBidModifierOperation_Operation { + if m != nil { + return m.Operation + } + return nil +} + +func (m *CampaignBidModifierOperation) GetCreate() *resources.CampaignBidModifier { + if x, ok := m.GetOperation().(*CampaignBidModifierOperation_Create); ok { + return x.Create + } + return nil +} + +func (m *CampaignBidModifierOperation) GetUpdate() *resources.CampaignBidModifier { + if x, ok := m.GetOperation().(*CampaignBidModifierOperation_Update); ok { + return x.Update + } + return nil +} + +func (m *CampaignBidModifierOperation) GetRemove() string { + if x, ok := m.GetOperation().(*CampaignBidModifierOperation_Remove); ok { + return x.Remove + } + return "" +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*CampaignBidModifierOperation) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _CampaignBidModifierOperation_OneofMarshaler, _CampaignBidModifierOperation_OneofUnmarshaler, _CampaignBidModifierOperation_OneofSizer, []interface{}{ + (*CampaignBidModifierOperation_Create)(nil), + (*CampaignBidModifierOperation_Update)(nil), + (*CampaignBidModifierOperation_Remove)(nil), + } +} + +func _CampaignBidModifierOperation_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*CampaignBidModifierOperation) + // operation + switch x := m.Operation.(type) { + case *CampaignBidModifierOperation_Create: + b.EncodeVarint(1<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.Create); err != nil { + return err + } + case *CampaignBidModifierOperation_Update: + b.EncodeVarint(2<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.Update); err != nil { + return err + } + case *CampaignBidModifierOperation_Remove: + b.EncodeVarint(3<<3 | proto.WireBytes) + b.EncodeStringBytes(x.Remove) + case nil: + default: + return fmt.Errorf("CampaignBidModifierOperation.Operation has unexpected type %T", x) + } + return nil +} + +func _CampaignBidModifierOperation_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*CampaignBidModifierOperation) + switch tag { + case 1: // operation.create + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(resources.CampaignBidModifier) + err := b.DecodeMessage(msg) + m.Operation = &CampaignBidModifierOperation_Create{msg} + return true, err + case 2: // operation.update + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(resources.CampaignBidModifier) + err := b.DecodeMessage(msg) + m.Operation = &CampaignBidModifierOperation_Update{msg} + return true, err + case 3: // operation.remove + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeStringBytes() + m.Operation = &CampaignBidModifierOperation_Remove{x} + return true, err + default: + return false, nil + } +} + +func _CampaignBidModifierOperation_OneofSizer(msg proto.Message) (n int) { + m := msg.(*CampaignBidModifierOperation) + // operation + switch x := m.Operation.(type) { + case *CampaignBidModifierOperation_Create: + s := proto.Size(x.Create) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *CampaignBidModifierOperation_Update: + s := proto.Size(x.Update) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *CampaignBidModifierOperation_Remove: + n += 1 // tag and wire + n += proto.SizeVarint(uint64(len(x.Remove))) + n += len(x.Remove) + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +// Response message for campaign bid modifiers mutate. +type MutateCampaignBidModifiersResponse struct { + // Errors that pertain to operation failures in the partial failure mode. + // Returned only when partial_failure = true and all errors occur inside the + // operations. If any errors occur outside the operations (e.g. auth errors), + // we return an RPC level error. + PartialFailureError *status.Status `protobuf:"bytes,3,opt,name=partial_failure_error,json=partialFailureError,proto3" json:"partial_failure_error,omitempty"` + // All results for the mutate. + Results []*MutateCampaignBidModifierResult `protobuf:"bytes,2,rep,name=results,proto3" json:"results,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *MutateCampaignBidModifiersResponse) Reset() { *m = MutateCampaignBidModifiersResponse{} } +func (m *MutateCampaignBidModifiersResponse) String() string { return proto.CompactTextString(m) } +func (*MutateCampaignBidModifiersResponse) ProtoMessage() {} +func (*MutateCampaignBidModifiersResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_campaign_bid_modifier_service_eb7befde0ae1b2cc, []int{3} +} +func (m *MutateCampaignBidModifiersResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_MutateCampaignBidModifiersResponse.Unmarshal(m, b) +} +func (m *MutateCampaignBidModifiersResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_MutateCampaignBidModifiersResponse.Marshal(b, m, deterministic) +} +func (dst *MutateCampaignBidModifiersResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_MutateCampaignBidModifiersResponse.Merge(dst, src) +} +func (m *MutateCampaignBidModifiersResponse) XXX_Size() int { + return xxx_messageInfo_MutateCampaignBidModifiersResponse.Size(m) +} +func (m *MutateCampaignBidModifiersResponse) XXX_DiscardUnknown() { + xxx_messageInfo_MutateCampaignBidModifiersResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_MutateCampaignBidModifiersResponse proto.InternalMessageInfo + +func (m *MutateCampaignBidModifiersResponse) GetPartialFailureError() *status.Status { + if m != nil { + return m.PartialFailureError + } + return nil +} + +func (m *MutateCampaignBidModifiersResponse) GetResults() []*MutateCampaignBidModifierResult { + if m != nil { + return m.Results + } + return nil +} + +// The result for the criterion mutate. +type MutateCampaignBidModifierResult struct { + // Returned for successful operations. + ResourceName string `protobuf:"bytes,1,opt,name=resource_name,json=resourceName,proto3" json:"resource_name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *MutateCampaignBidModifierResult) Reset() { *m = MutateCampaignBidModifierResult{} } +func (m *MutateCampaignBidModifierResult) String() string { return proto.CompactTextString(m) } +func (*MutateCampaignBidModifierResult) ProtoMessage() {} +func (*MutateCampaignBidModifierResult) Descriptor() ([]byte, []int) { + return fileDescriptor_campaign_bid_modifier_service_eb7befde0ae1b2cc, []int{4} +} +func (m *MutateCampaignBidModifierResult) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_MutateCampaignBidModifierResult.Unmarshal(m, b) +} +func (m *MutateCampaignBidModifierResult) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_MutateCampaignBidModifierResult.Marshal(b, m, deterministic) +} +func (dst *MutateCampaignBidModifierResult) XXX_Merge(src proto.Message) { + xxx_messageInfo_MutateCampaignBidModifierResult.Merge(dst, src) +} +func (m *MutateCampaignBidModifierResult) XXX_Size() int { + return xxx_messageInfo_MutateCampaignBidModifierResult.Size(m) +} +func (m *MutateCampaignBidModifierResult) XXX_DiscardUnknown() { + xxx_messageInfo_MutateCampaignBidModifierResult.DiscardUnknown(m) +} + +var xxx_messageInfo_MutateCampaignBidModifierResult proto.InternalMessageInfo + +func (m *MutateCampaignBidModifierResult) GetResourceName() string { + if m != nil { + return m.ResourceName + } + return "" +} + +func init() { + proto.RegisterType((*GetCampaignBidModifierRequest)(nil), "google.ads.googleads.v1.services.GetCampaignBidModifierRequest") + proto.RegisterType((*MutateCampaignBidModifiersRequest)(nil), "google.ads.googleads.v1.services.MutateCampaignBidModifiersRequest") + proto.RegisterType((*CampaignBidModifierOperation)(nil), "google.ads.googleads.v1.services.CampaignBidModifierOperation") + proto.RegisterType((*MutateCampaignBidModifiersResponse)(nil), "google.ads.googleads.v1.services.MutateCampaignBidModifiersResponse") + proto.RegisterType((*MutateCampaignBidModifierResult)(nil), "google.ads.googleads.v1.services.MutateCampaignBidModifierResult") +} + +// Reference imports to suppress errors if they are not otherwise used. +var _ context.Context +var _ grpc.ClientConn + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the grpc package it is being compiled against. +const _ = grpc.SupportPackageIsVersion4 + +// CampaignBidModifierServiceClient is the client API for CampaignBidModifierService service. +// +// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://godoc.org/google.golang.org/grpc#ClientConn.NewStream. +type CampaignBidModifierServiceClient interface { + // Returns the requested campaign bid modifier in full detail. + GetCampaignBidModifier(ctx context.Context, in *GetCampaignBidModifierRequest, opts ...grpc.CallOption) (*resources.CampaignBidModifier, error) + // Creates, updates, or removes campaign bid modifiers. + // Operation statuses are returned. + MutateCampaignBidModifiers(ctx context.Context, in *MutateCampaignBidModifiersRequest, opts ...grpc.CallOption) (*MutateCampaignBidModifiersResponse, error) +} + +type campaignBidModifierServiceClient struct { + cc *grpc.ClientConn +} + +func NewCampaignBidModifierServiceClient(cc *grpc.ClientConn) CampaignBidModifierServiceClient { + return &campaignBidModifierServiceClient{cc} +} + +func (c *campaignBidModifierServiceClient) GetCampaignBidModifier(ctx context.Context, in *GetCampaignBidModifierRequest, opts ...grpc.CallOption) (*resources.CampaignBidModifier, error) { + out := new(resources.CampaignBidModifier) + err := c.cc.Invoke(ctx, "/google.ads.googleads.v1.services.CampaignBidModifierService/GetCampaignBidModifier", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *campaignBidModifierServiceClient) MutateCampaignBidModifiers(ctx context.Context, in *MutateCampaignBidModifiersRequest, opts ...grpc.CallOption) (*MutateCampaignBidModifiersResponse, error) { + out := new(MutateCampaignBidModifiersResponse) + err := c.cc.Invoke(ctx, "/google.ads.googleads.v1.services.CampaignBidModifierService/MutateCampaignBidModifiers", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +// CampaignBidModifierServiceServer is the server API for CampaignBidModifierService service. +type CampaignBidModifierServiceServer interface { + // Returns the requested campaign bid modifier in full detail. + GetCampaignBidModifier(context.Context, *GetCampaignBidModifierRequest) (*resources.CampaignBidModifier, error) + // Creates, updates, or removes campaign bid modifiers. + // Operation statuses are returned. + MutateCampaignBidModifiers(context.Context, *MutateCampaignBidModifiersRequest) (*MutateCampaignBidModifiersResponse, error) +} + +func RegisterCampaignBidModifierServiceServer(s *grpc.Server, srv CampaignBidModifierServiceServer) { + s.RegisterService(&_CampaignBidModifierService_serviceDesc, srv) +} + +func _CampaignBidModifierService_GetCampaignBidModifier_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(GetCampaignBidModifierRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(CampaignBidModifierServiceServer).GetCampaignBidModifier(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.ads.googleads.v1.services.CampaignBidModifierService/GetCampaignBidModifier", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(CampaignBidModifierServiceServer).GetCampaignBidModifier(ctx, req.(*GetCampaignBidModifierRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _CampaignBidModifierService_MutateCampaignBidModifiers_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(MutateCampaignBidModifiersRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(CampaignBidModifierServiceServer).MutateCampaignBidModifiers(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.ads.googleads.v1.services.CampaignBidModifierService/MutateCampaignBidModifiers", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(CampaignBidModifierServiceServer).MutateCampaignBidModifiers(ctx, req.(*MutateCampaignBidModifiersRequest)) + } + return interceptor(ctx, in, info, handler) +} + +var _CampaignBidModifierService_serviceDesc = grpc.ServiceDesc{ + ServiceName: "google.ads.googleads.v1.services.CampaignBidModifierService", + HandlerType: (*CampaignBidModifierServiceServer)(nil), + Methods: []grpc.MethodDesc{ + { + MethodName: "GetCampaignBidModifier", + Handler: _CampaignBidModifierService_GetCampaignBidModifier_Handler, + }, + { + MethodName: "MutateCampaignBidModifiers", + Handler: _CampaignBidModifierService_MutateCampaignBidModifiers_Handler, + }, + }, + Streams: []grpc.StreamDesc{}, + Metadata: "google/ads/googleads/v1/services/campaign_bid_modifier_service.proto", +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/services/campaign_bid_modifier_service.proto", fileDescriptor_campaign_bid_modifier_service_eb7befde0ae1b2cc) +} + +var fileDescriptor_campaign_bid_modifier_service_eb7befde0ae1b2cc = []byte{ + // 728 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xa4, 0x95, 0x4f, 0x6e, 0xd3, 0x4c, + 0x14, 0xc0, 0x3f, 0x3b, 0x9f, 0x0a, 0x9d, 0x14, 0x90, 0x06, 0x01, 0x51, 0x54, 0x68, 0x30, 0x95, + 0xa8, 0xb2, 0xb0, 0x95, 0x20, 0x55, 0xe0, 0xaa, 0x45, 0x49, 0x43, 0x5a, 0x16, 0xa5, 0x95, 0x2b, + 0x75, 0x01, 0x11, 0xd6, 0xd4, 0x9e, 0x58, 0xa3, 0xda, 0x1e, 0x33, 0x33, 0x0e, 0xaa, 0xaa, 0x6e, + 0x58, 0x70, 0x01, 0x6e, 0xc0, 0x82, 0x05, 0x37, 0x81, 0x1d, 0xe2, 0x02, 0x2c, 0x58, 0x21, 0xb1, + 0xe1, 0x04, 0xc8, 0x1e, 0x4f, 0x68, 0xab, 0xb8, 0x41, 0x74, 0xf7, 0xfc, 0xe6, 0xf9, 0xf7, 0xfe, + 0xce, 0x1b, 0xd0, 0x0b, 0x28, 0x0d, 0x42, 0x6c, 0x21, 0x9f, 0x5b, 0x52, 0xcc, 0xa4, 0x51, 0xcb, + 0xe2, 0x98, 0x8d, 0x88, 0x87, 0xb9, 0xe5, 0xa1, 0x28, 0x41, 0x24, 0x88, 0xdd, 0x7d, 0xe2, 0xbb, + 0x11, 0xf5, 0xc9, 0x90, 0x60, 0xe6, 0x16, 0xc7, 0x66, 0xc2, 0xa8, 0xa0, 0xb0, 0x21, 0x7f, 0x35, + 0x91, 0xcf, 0xcd, 0x31, 0xc5, 0x1c, 0xb5, 0x4c, 0x45, 0xa9, 0xaf, 0x96, 0xf9, 0x61, 0x98, 0xd3, + 0x94, 0x95, 0x3a, 0x92, 0x0e, 0xea, 0xf3, 0xea, 0xf7, 0x84, 0x58, 0x28, 0x8e, 0xa9, 0x40, 0x82, + 0xd0, 0x98, 0x17, 0xa7, 0x85, 0x7b, 0x2b, 0xff, 0xda, 0x4f, 0x87, 0xd6, 0x90, 0xe0, 0xd0, 0x77, + 0x23, 0xc4, 0x0f, 0x0a, 0x8b, 0x3b, 0x67, 0x2d, 0x5e, 0x33, 0x94, 0x24, 0x98, 0x29, 0xc2, 0xad, + 0xe2, 0x9c, 0x25, 0x9e, 0xc5, 0x05, 0x12, 0x69, 0x71, 0x60, 0xf4, 0xc0, 0xed, 0x0d, 0x2c, 0xd6, + 0x8b, 0xd0, 0xba, 0xc4, 0xdf, 0x2a, 0x02, 0x73, 0xf0, 0xab, 0x14, 0x73, 0x01, 0xef, 0x81, 0x2b, + 0x2a, 0x05, 0x37, 0x46, 0x11, 0xae, 0x69, 0x0d, 0x6d, 0x69, 0xd6, 0x99, 0x53, 0xca, 0x67, 0x28, + 0xc2, 0xc6, 0x2f, 0x0d, 0xdc, 0xdd, 0x4a, 0x05, 0x12, 0x78, 0x02, 0x89, 0x2b, 0xd4, 0x02, 0xa8, + 0x7a, 0x29, 0x17, 0x34, 0xc2, 0xcc, 0x25, 0x7e, 0x01, 0x02, 0x4a, 0xf5, 0xd4, 0x87, 0x2f, 0x01, + 0xa0, 0x09, 0x66, 0x32, 0xf7, 0x9a, 0xde, 0xa8, 0x2c, 0x55, 0xdb, 0x6b, 0xe6, 0xb4, 0xda, 0x9b, + 0x13, 0x7c, 0x6e, 0x2b, 0x8c, 0x73, 0x82, 0x08, 0xef, 0x83, 0x6b, 0x09, 0x62, 0x82, 0xa0, 0xd0, + 0x1d, 0x22, 0x12, 0xa6, 0x0c, 0xd7, 0x2a, 0x0d, 0x6d, 0xe9, 0xb2, 0x73, 0xb5, 0x50, 0xf7, 0xa5, + 0x36, 0x4b, 0x7a, 0x84, 0x42, 0xe2, 0x23, 0x81, 0x5d, 0x1a, 0x87, 0x87, 0xb5, 0xff, 0x73, 0xb3, + 0x39, 0xa5, 0xdc, 0x8e, 0xc3, 0x43, 0xe3, 0x83, 0x0e, 0xe6, 0xcf, 0x73, 0x0d, 0x57, 0x40, 0x35, + 0x4d, 0x72, 0x46, 0xd6, 0xa9, 0x9c, 0x51, 0x6d, 0xd7, 0x55, 0x3e, 0xaa, 0x55, 0x66, 0x3f, 0x6b, + 0xe6, 0x16, 0xe2, 0x07, 0x0e, 0x90, 0xe6, 0x99, 0x0c, 0x77, 0xc0, 0x8c, 0xc7, 0x30, 0x12, 0xb2, + 0xe0, 0xd5, 0xf6, 0x72, 0x69, 0x1d, 0xc6, 0x13, 0x36, 0xa9, 0x10, 0x9b, 0xff, 0x39, 0x05, 0x27, + 0x23, 0x4a, 0x7e, 0x4d, 0xbf, 0x28, 0x51, 0x72, 0x60, 0x0d, 0xcc, 0x30, 0x1c, 0xd1, 0x91, 0x2c, + 0xe3, 0x6c, 0x76, 0x22, 0xbf, 0xbb, 0x55, 0x30, 0x3b, 0xae, 0xbb, 0xf1, 0x49, 0x03, 0xc6, 0x79, + 0xd3, 0xc1, 0x13, 0x1a, 0x73, 0x0c, 0xfb, 0xe0, 0xc6, 0x99, 0xee, 0xb8, 0x98, 0x31, 0xca, 0x72, + 0x78, 0xb5, 0x0d, 0x55, 0xb8, 0x2c, 0xf1, 0xcc, 0xdd, 0x7c, 0x86, 0x9d, 0xeb, 0xa7, 0xfb, 0xf6, + 0x24, 0x33, 0x87, 0x2f, 0xc0, 0x25, 0x86, 0x79, 0x1a, 0x0a, 0x35, 0x42, 0x9d, 0xe9, 0x23, 0x54, + 0x1a, 0x9e, 0x93, 0x93, 0x1c, 0x45, 0x34, 0xfa, 0x60, 0x61, 0x8a, 0xed, 0x5f, 0xdd, 0x98, 0xf6, + 0xb7, 0x0a, 0xa8, 0x4f, 0x40, 0xec, 0xca, 0x80, 0xe0, 0x17, 0x0d, 0xdc, 0x9c, 0x7c, 0x2f, 0xe1, + 0xe3, 0xe9, 0xd9, 0x9c, 0x7b, 0xa3, 0xeb, 0xff, 0xd8, 0x77, 0x63, 0xed, 0xcd, 0xd7, 0xef, 0xef, + 0xf4, 0x87, 0x70, 0x39, 0x5b, 0x6b, 0x47, 0xa7, 0x52, 0x5c, 0x55, 0x97, 0x98, 0x5b, 0xcd, 0xf1, + 0x9e, 0x3b, 0xd9, 0x64, 0xab, 0x79, 0x0c, 0x7f, 0x6a, 0xa0, 0x5e, 0x3e, 0x06, 0x70, 0xfd, 0x02, + 0x5d, 0x52, 0x2b, 0xa6, 0xde, 0xbb, 0x18, 0x44, 0x4e, 0xa2, 0xd1, 0xcb, 0x33, 0x5d, 0x33, 0x1e, + 0x65, 0x99, 0xfe, 0x49, 0xed, 0xe8, 0xc4, 0xf6, 0x5a, 0x6d, 0x1e, 0x4f, 0x4c, 0xd4, 0x8e, 0x72, + 0xbc, 0xad, 0x35, 0xbb, 0x6f, 0x75, 0xb0, 0xe8, 0xd1, 0x68, 0x6a, 0x44, 0xdd, 0x85, 0xf2, 0x41, + 0xd8, 0xc9, 0x96, 0xc4, 0x8e, 0xf6, 0x7c, 0xb3, 0x80, 0x04, 0x34, 0x44, 0x71, 0x60, 0x52, 0x16, + 0x58, 0x01, 0x8e, 0xf3, 0x15, 0xa2, 0x9e, 0x9b, 0x84, 0xf0, 0xf2, 0x57, 0x6e, 0x45, 0x09, 0xef, + 0xf5, 0xca, 0x46, 0xa7, 0xf3, 0x51, 0x6f, 0x6c, 0x48, 0x60, 0xc7, 0xe7, 0xa6, 0x14, 0x33, 0x69, + 0xaf, 0x65, 0x16, 0x8e, 0xf9, 0x67, 0x65, 0x32, 0xe8, 0xf8, 0x7c, 0x30, 0x36, 0x19, 0xec, 0xb5, + 0x06, 0xca, 0xe4, 0x87, 0xbe, 0x28, 0xf5, 0xb6, 0xdd, 0xf1, 0xb9, 0x6d, 0x8f, 0x8d, 0x6c, 0x7b, + 0xaf, 0x65, 0xdb, 0xca, 0x6c, 0x7f, 0x26, 0x8f, 0xf3, 0xc1, 0xef, 0x00, 0x00, 0x00, 0xff, 0xff, + 0x6b, 0x80, 0xdc, 0x8d, 0x8c, 0x07, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/services/campaign_budget_service.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/services/campaign_budget_service.pb.go new file mode 100644 index 0000000..559d847 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/services/campaign_budget_service.pb.go @@ -0,0 +1,590 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/services/campaign_budget_service.proto + +package services // import "google.golang.org/genproto/googleapis/ads/googleads/v1/services" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "github.com/golang/protobuf/ptypes/wrappers" +import resources "google.golang.org/genproto/googleapis/ads/googleads/v1/resources" +import _ "google.golang.org/genproto/googleapis/api/annotations" +import status "google.golang.org/genproto/googleapis/rpc/status" +import field_mask "google.golang.org/genproto/protobuf/field_mask" + +import ( + context "golang.org/x/net/context" + grpc "google.golang.org/grpc" +) + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Request message for [CampaignBudgetService.GetCampaignBudget][google.ads.googleads.v1.services.CampaignBudgetService.GetCampaignBudget]. +type GetCampaignBudgetRequest struct { + // The resource name of the campaign budget to fetch. + ResourceName string `protobuf:"bytes,1,opt,name=resource_name,json=resourceName,proto3" json:"resource_name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GetCampaignBudgetRequest) Reset() { *m = GetCampaignBudgetRequest{} } +func (m *GetCampaignBudgetRequest) String() string { return proto.CompactTextString(m) } +func (*GetCampaignBudgetRequest) ProtoMessage() {} +func (*GetCampaignBudgetRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_campaign_budget_service_7c839c9c61357c69, []int{0} +} +func (m *GetCampaignBudgetRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GetCampaignBudgetRequest.Unmarshal(m, b) +} +func (m *GetCampaignBudgetRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GetCampaignBudgetRequest.Marshal(b, m, deterministic) +} +func (dst *GetCampaignBudgetRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_GetCampaignBudgetRequest.Merge(dst, src) +} +func (m *GetCampaignBudgetRequest) XXX_Size() int { + return xxx_messageInfo_GetCampaignBudgetRequest.Size(m) +} +func (m *GetCampaignBudgetRequest) XXX_DiscardUnknown() { + xxx_messageInfo_GetCampaignBudgetRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_GetCampaignBudgetRequest proto.InternalMessageInfo + +func (m *GetCampaignBudgetRequest) GetResourceName() string { + if m != nil { + return m.ResourceName + } + return "" +} + +// Request message for [CampaignBudgetService.MutateCampaignBudgets][google.ads.googleads.v1.services.CampaignBudgetService.MutateCampaignBudgets]. +type MutateCampaignBudgetsRequest struct { + // The ID of the customer whose campaign budgets are being modified. + CustomerId string `protobuf:"bytes,1,opt,name=customer_id,json=customerId,proto3" json:"customer_id,omitempty"` + // The list of operations to perform on individual campaign budgets. + Operations []*CampaignBudgetOperation `protobuf:"bytes,2,rep,name=operations,proto3" json:"operations,omitempty"` + // If true, successful operations will be carried out and invalid + // operations will return errors. If false, all operations will be carried + // out in one transaction if and only if they are all valid. + // Default is false. + PartialFailure bool `protobuf:"varint,3,opt,name=partial_failure,json=partialFailure,proto3" json:"partial_failure,omitempty"` + // If true, the request is validated but not executed. Only errors are + // returned, not results. + ValidateOnly bool `protobuf:"varint,4,opt,name=validate_only,json=validateOnly,proto3" json:"validate_only,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *MutateCampaignBudgetsRequest) Reset() { *m = MutateCampaignBudgetsRequest{} } +func (m *MutateCampaignBudgetsRequest) String() string { return proto.CompactTextString(m) } +func (*MutateCampaignBudgetsRequest) ProtoMessage() {} +func (*MutateCampaignBudgetsRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_campaign_budget_service_7c839c9c61357c69, []int{1} +} +func (m *MutateCampaignBudgetsRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_MutateCampaignBudgetsRequest.Unmarshal(m, b) +} +func (m *MutateCampaignBudgetsRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_MutateCampaignBudgetsRequest.Marshal(b, m, deterministic) +} +func (dst *MutateCampaignBudgetsRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_MutateCampaignBudgetsRequest.Merge(dst, src) +} +func (m *MutateCampaignBudgetsRequest) XXX_Size() int { + return xxx_messageInfo_MutateCampaignBudgetsRequest.Size(m) +} +func (m *MutateCampaignBudgetsRequest) XXX_DiscardUnknown() { + xxx_messageInfo_MutateCampaignBudgetsRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_MutateCampaignBudgetsRequest proto.InternalMessageInfo + +func (m *MutateCampaignBudgetsRequest) GetCustomerId() string { + if m != nil { + return m.CustomerId + } + return "" +} + +func (m *MutateCampaignBudgetsRequest) GetOperations() []*CampaignBudgetOperation { + if m != nil { + return m.Operations + } + return nil +} + +func (m *MutateCampaignBudgetsRequest) GetPartialFailure() bool { + if m != nil { + return m.PartialFailure + } + return false +} + +func (m *MutateCampaignBudgetsRequest) GetValidateOnly() bool { + if m != nil { + return m.ValidateOnly + } + return false +} + +// A single operation (create, update, remove) on a campaign budget. +type CampaignBudgetOperation struct { + // FieldMask that determines which resource fields are modified in an update. + UpdateMask *field_mask.FieldMask `protobuf:"bytes,4,opt,name=update_mask,json=updateMask,proto3" json:"update_mask,omitempty"` + // The mutate operation. + // + // Types that are valid to be assigned to Operation: + // *CampaignBudgetOperation_Create + // *CampaignBudgetOperation_Update + // *CampaignBudgetOperation_Remove + Operation isCampaignBudgetOperation_Operation `protobuf_oneof:"operation"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *CampaignBudgetOperation) Reset() { *m = CampaignBudgetOperation{} } +func (m *CampaignBudgetOperation) String() string { return proto.CompactTextString(m) } +func (*CampaignBudgetOperation) ProtoMessage() {} +func (*CampaignBudgetOperation) Descriptor() ([]byte, []int) { + return fileDescriptor_campaign_budget_service_7c839c9c61357c69, []int{2} +} +func (m *CampaignBudgetOperation) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_CampaignBudgetOperation.Unmarshal(m, b) +} +func (m *CampaignBudgetOperation) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_CampaignBudgetOperation.Marshal(b, m, deterministic) +} +func (dst *CampaignBudgetOperation) XXX_Merge(src proto.Message) { + xxx_messageInfo_CampaignBudgetOperation.Merge(dst, src) +} +func (m *CampaignBudgetOperation) XXX_Size() int { + return xxx_messageInfo_CampaignBudgetOperation.Size(m) +} +func (m *CampaignBudgetOperation) XXX_DiscardUnknown() { + xxx_messageInfo_CampaignBudgetOperation.DiscardUnknown(m) +} + +var xxx_messageInfo_CampaignBudgetOperation proto.InternalMessageInfo + +func (m *CampaignBudgetOperation) GetUpdateMask() *field_mask.FieldMask { + if m != nil { + return m.UpdateMask + } + return nil +} + +type isCampaignBudgetOperation_Operation interface { + isCampaignBudgetOperation_Operation() +} + +type CampaignBudgetOperation_Create struct { + Create *resources.CampaignBudget `protobuf:"bytes,1,opt,name=create,proto3,oneof"` +} + +type CampaignBudgetOperation_Update struct { + Update *resources.CampaignBudget `protobuf:"bytes,2,opt,name=update,proto3,oneof"` +} + +type CampaignBudgetOperation_Remove struct { + Remove string `protobuf:"bytes,3,opt,name=remove,proto3,oneof"` +} + +func (*CampaignBudgetOperation_Create) isCampaignBudgetOperation_Operation() {} + +func (*CampaignBudgetOperation_Update) isCampaignBudgetOperation_Operation() {} + +func (*CampaignBudgetOperation_Remove) isCampaignBudgetOperation_Operation() {} + +func (m *CampaignBudgetOperation) GetOperation() isCampaignBudgetOperation_Operation { + if m != nil { + return m.Operation + } + return nil +} + +func (m *CampaignBudgetOperation) GetCreate() *resources.CampaignBudget { + if x, ok := m.GetOperation().(*CampaignBudgetOperation_Create); ok { + return x.Create + } + return nil +} + +func (m *CampaignBudgetOperation) GetUpdate() *resources.CampaignBudget { + if x, ok := m.GetOperation().(*CampaignBudgetOperation_Update); ok { + return x.Update + } + return nil +} + +func (m *CampaignBudgetOperation) GetRemove() string { + if x, ok := m.GetOperation().(*CampaignBudgetOperation_Remove); ok { + return x.Remove + } + return "" +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*CampaignBudgetOperation) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _CampaignBudgetOperation_OneofMarshaler, _CampaignBudgetOperation_OneofUnmarshaler, _CampaignBudgetOperation_OneofSizer, []interface{}{ + (*CampaignBudgetOperation_Create)(nil), + (*CampaignBudgetOperation_Update)(nil), + (*CampaignBudgetOperation_Remove)(nil), + } +} + +func _CampaignBudgetOperation_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*CampaignBudgetOperation) + // operation + switch x := m.Operation.(type) { + case *CampaignBudgetOperation_Create: + b.EncodeVarint(1<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.Create); err != nil { + return err + } + case *CampaignBudgetOperation_Update: + b.EncodeVarint(2<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.Update); err != nil { + return err + } + case *CampaignBudgetOperation_Remove: + b.EncodeVarint(3<<3 | proto.WireBytes) + b.EncodeStringBytes(x.Remove) + case nil: + default: + return fmt.Errorf("CampaignBudgetOperation.Operation has unexpected type %T", x) + } + return nil +} + +func _CampaignBudgetOperation_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*CampaignBudgetOperation) + switch tag { + case 1: // operation.create + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(resources.CampaignBudget) + err := b.DecodeMessage(msg) + m.Operation = &CampaignBudgetOperation_Create{msg} + return true, err + case 2: // operation.update + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(resources.CampaignBudget) + err := b.DecodeMessage(msg) + m.Operation = &CampaignBudgetOperation_Update{msg} + return true, err + case 3: // operation.remove + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeStringBytes() + m.Operation = &CampaignBudgetOperation_Remove{x} + return true, err + default: + return false, nil + } +} + +func _CampaignBudgetOperation_OneofSizer(msg proto.Message) (n int) { + m := msg.(*CampaignBudgetOperation) + // operation + switch x := m.Operation.(type) { + case *CampaignBudgetOperation_Create: + s := proto.Size(x.Create) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *CampaignBudgetOperation_Update: + s := proto.Size(x.Update) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *CampaignBudgetOperation_Remove: + n += 1 // tag and wire + n += proto.SizeVarint(uint64(len(x.Remove))) + n += len(x.Remove) + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +// Response message for campaign budget mutate. +type MutateCampaignBudgetsResponse struct { + // Errors that pertain to operation failures in the partial failure mode. + // Returned only when partial_failure = true and all errors occur inside the + // operations. If any errors occur outside the operations (e.g. auth errors), + // we return an RPC level error. + PartialFailureError *status.Status `protobuf:"bytes,3,opt,name=partial_failure_error,json=partialFailureError,proto3" json:"partial_failure_error,omitempty"` + // All results for the mutate. + Results []*MutateCampaignBudgetResult `protobuf:"bytes,2,rep,name=results,proto3" json:"results,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *MutateCampaignBudgetsResponse) Reset() { *m = MutateCampaignBudgetsResponse{} } +func (m *MutateCampaignBudgetsResponse) String() string { return proto.CompactTextString(m) } +func (*MutateCampaignBudgetsResponse) ProtoMessage() {} +func (*MutateCampaignBudgetsResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_campaign_budget_service_7c839c9c61357c69, []int{3} +} +func (m *MutateCampaignBudgetsResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_MutateCampaignBudgetsResponse.Unmarshal(m, b) +} +func (m *MutateCampaignBudgetsResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_MutateCampaignBudgetsResponse.Marshal(b, m, deterministic) +} +func (dst *MutateCampaignBudgetsResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_MutateCampaignBudgetsResponse.Merge(dst, src) +} +func (m *MutateCampaignBudgetsResponse) XXX_Size() int { + return xxx_messageInfo_MutateCampaignBudgetsResponse.Size(m) +} +func (m *MutateCampaignBudgetsResponse) XXX_DiscardUnknown() { + xxx_messageInfo_MutateCampaignBudgetsResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_MutateCampaignBudgetsResponse proto.InternalMessageInfo + +func (m *MutateCampaignBudgetsResponse) GetPartialFailureError() *status.Status { + if m != nil { + return m.PartialFailureError + } + return nil +} + +func (m *MutateCampaignBudgetsResponse) GetResults() []*MutateCampaignBudgetResult { + if m != nil { + return m.Results + } + return nil +} + +// The result for the campaign budget mutate. +type MutateCampaignBudgetResult struct { + // Returned for successful operations. + ResourceName string `protobuf:"bytes,1,opt,name=resource_name,json=resourceName,proto3" json:"resource_name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *MutateCampaignBudgetResult) Reset() { *m = MutateCampaignBudgetResult{} } +func (m *MutateCampaignBudgetResult) String() string { return proto.CompactTextString(m) } +func (*MutateCampaignBudgetResult) ProtoMessage() {} +func (*MutateCampaignBudgetResult) Descriptor() ([]byte, []int) { + return fileDescriptor_campaign_budget_service_7c839c9c61357c69, []int{4} +} +func (m *MutateCampaignBudgetResult) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_MutateCampaignBudgetResult.Unmarshal(m, b) +} +func (m *MutateCampaignBudgetResult) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_MutateCampaignBudgetResult.Marshal(b, m, deterministic) +} +func (dst *MutateCampaignBudgetResult) XXX_Merge(src proto.Message) { + xxx_messageInfo_MutateCampaignBudgetResult.Merge(dst, src) +} +func (m *MutateCampaignBudgetResult) XXX_Size() int { + return xxx_messageInfo_MutateCampaignBudgetResult.Size(m) +} +func (m *MutateCampaignBudgetResult) XXX_DiscardUnknown() { + xxx_messageInfo_MutateCampaignBudgetResult.DiscardUnknown(m) +} + +var xxx_messageInfo_MutateCampaignBudgetResult proto.InternalMessageInfo + +func (m *MutateCampaignBudgetResult) GetResourceName() string { + if m != nil { + return m.ResourceName + } + return "" +} + +func init() { + proto.RegisterType((*GetCampaignBudgetRequest)(nil), "google.ads.googleads.v1.services.GetCampaignBudgetRequest") + proto.RegisterType((*MutateCampaignBudgetsRequest)(nil), "google.ads.googleads.v1.services.MutateCampaignBudgetsRequest") + proto.RegisterType((*CampaignBudgetOperation)(nil), "google.ads.googleads.v1.services.CampaignBudgetOperation") + proto.RegisterType((*MutateCampaignBudgetsResponse)(nil), "google.ads.googleads.v1.services.MutateCampaignBudgetsResponse") + proto.RegisterType((*MutateCampaignBudgetResult)(nil), "google.ads.googleads.v1.services.MutateCampaignBudgetResult") +} + +// Reference imports to suppress errors if they are not otherwise used. +var _ context.Context +var _ grpc.ClientConn + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the grpc package it is being compiled against. +const _ = grpc.SupportPackageIsVersion4 + +// CampaignBudgetServiceClient is the client API for CampaignBudgetService service. +// +// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://godoc.org/google.golang.org/grpc#ClientConn.NewStream. +type CampaignBudgetServiceClient interface { + // Returns the requested Campaign Budget in full detail. + GetCampaignBudget(ctx context.Context, in *GetCampaignBudgetRequest, opts ...grpc.CallOption) (*resources.CampaignBudget, error) + // Creates, updates, or removes campaign budgets. Operation statuses are + // returned. + MutateCampaignBudgets(ctx context.Context, in *MutateCampaignBudgetsRequest, opts ...grpc.CallOption) (*MutateCampaignBudgetsResponse, error) +} + +type campaignBudgetServiceClient struct { + cc *grpc.ClientConn +} + +func NewCampaignBudgetServiceClient(cc *grpc.ClientConn) CampaignBudgetServiceClient { + return &campaignBudgetServiceClient{cc} +} + +func (c *campaignBudgetServiceClient) GetCampaignBudget(ctx context.Context, in *GetCampaignBudgetRequest, opts ...grpc.CallOption) (*resources.CampaignBudget, error) { + out := new(resources.CampaignBudget) + err := c.cc.Invoke(ctx, "/google.ads.googleads.v1.services.CampaignBudgetService/GetCampaignBudget", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *campaignBudgetServiceClient) MutateCampaignBudgets(ctx context.Context, in *MutateCampaignBudgetsRequest, opts ...grpc.CallOption) (*MutateCampaignBudgetsResponse, error) { + out := new(MutateCampaignBudgetsResponse) + err := c.cc.Invoke(ctx, "/google.ads.googleads.v1.services.CampaignBudgetService/MutateCampaignBudgets", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +// CampaignBudgetServiceServer is the server API for CampaignBudgetService service. +type CampaignBudgetServiceServer interface { + // Returns the requested Campaign Budget in full detail. + GetCampaignBudget(context.Context, *GetCampaignBudgetRequest) (*resources.CampaignBudget, error) + // Creates, updates, or removes campaign budgets. Operation statuses are + // returned. + MutateCampaignBudgets(context.Context, *MutateCampaignBudgetsRequest) (*MutateCampaignBudgetsResponse, error) +} + +func RegisterCampaignBudgetServiceServer(s *grpc.Server, srv CampaignBudgetServiceServer) { + s.RegisterService(&_CampaignBudgetService_serviceDesc, srv) +} + +func _CampaignBudgetService_GetCampaignBudget_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(GetCampaignBudgetRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(CampaignBudgetServiceServer).GetCampaignBudget(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.ads.googleads.v1.services.CampaignBudgetService/GetCampaignBudget", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(CampaignBudgetServiceServer).GetCampaignBudget(ctx, req.(*GetCampaignBudgetRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _CampaignBudgetService_MutateCampaignBudgets_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(MutateCampaignBudgetsRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(CampaignBudgetServiceServer).MutateCampaignBudgets(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.ads.googleads.v1.services.CampaignBudgetService/MutateCampaignBudgets", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(CampaignBudgetServiceServer).MutateCampaignBudgets(ctx, req.(*MutateCampaignBudgetsRequest)) + } + return interceptor(ctx, in, info, handler) +} + +var _CampaignBudgetService_serviceDesc = grpc.ServiceDesc{ + ServiceName: "google.ads.googleads.v1.services.CampaignBudgetService", + HandlerType: (*CampaignBudgetServiceServer)(nil), + Methods: []grpc.MethodDesc{ + { + MethodName: "GetCampaignBudget", + Handler: _CampaignBudgetService_GetCampaignBudget_Handler, + }, + { + MethodName: "MutateCampaignBudgets", + Handler: _CampaignBudgetService_MutateCampaignBudgets_Handler, + }, + }, + Streams: []grpc.StreamDesc{}, + Metadata: "google/ads/googleads/v1/services/campaign_budget_service.proto", +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/services/campaign_budget_service.proto", fileDescriptor_campaign_budget_service_7c839c9c61357c69) +} + +var fileDescriptor_campaign_budget_service_7c839c9c61357c69 = []byte{ + // 717 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xa4, 0x55, 0xc1, 0x6b, 0xd4, 0x4e, + 0x14, 0xfe, 0x25, 0xfb, 0xa3, 0xda, 0xd9, 0xaa, 0x38, 0x52, 0xba, 0x84, 0xaa, 0x4b, 0x2c, 0x58, + 0xf6, 0x90, 0x90, 0xad, 0x20, 0x4d, 0x6d, 0xcb, 0xae, 0xd8, 0x56, 0xa4, 0xb6, 0xa4, 0xb0, 0xa0, + 0x2c, 0x84, 0x69, 0x32, 0x0d, 0xa1, 0x49, 0x26, 0xce, 0x4c, 0x56, 0x4a, 0xe9, 0x41, 0xff, 0x05, + 0x0f, 0xde, 0x3d, 0x7a, 0xf5, 0x2c, 0xde, 0xbd, 0x7a, 0xf2, 0xee, 0x41, 0xfc, 0x2b, 0x24, 0x99, + 0xcc, 0xda, 0x5d, 0x37, 0xac, 0xd6, 0xdb, 0xcb, 0x9b, 0xef, 0x7d, 0xef, 0x7d, 0xf3, 0xde, 0x9b, + 0x80, 0x8d, 0x80, 0x90, 0x20, 0xc2, 0x26, 0xf2, 0x99, 0x29, 0xcc, 0xdc, 0x1a, 0x58, 0x26, 0xc3, + 0x74, 0x10, 0x7a, 0x98, 0x99, 0x1e, 0x8a, 0x53, 0x14, 0x06, 0x89, 0x7b, 0x98, 0xf9, 0x01, 0xe6, + 0x6e, 0x79, 0x60, 0xa4, 0x94, 0x70, 0x02, 0x9b, 0x22, 0xc8, 0x40, 0x3e, 0x33, 0x86, 0xf1, 0xc6, + 0xc0, 0x32, 0x64, 0xbc, 0x76, 0xbf, 0x2a, 0x03, 0xc5, 0x8c, 0x64, 0x74, 0x42, 0x0a, 0x41, 0xad, + 0x2d, 0xca, 0xc0, 0x34, 0x34, 0x51, 0x92, 0x10, 0x8e, 0x78, 0x48, 0x12, 0x56, 0x9e, 0x96, 0x89, + 0xcd, 0xe2, 0xeb, 0x30, 0x3b, 0x32, 0x8f, 0x42, 0x1c, 0xf9, 0x6e, 0x8c, 0xd8, 0x71, 0x89, 0xb8, + 0x35, 0x8e, 0x78, 0x49, 0x51, 0x9a, 0x62, 0x2a, 0x19, 0x16, 0xca, 0x73, 0x9a, 0x7a, 0x26, 0xe3, + 0x88, 0x67, 0xe5, 0x81, 0xbe, 0x09, 0x1a, 0xdb, 0x98, 0x3f, 0x2c, 0x8b, 0xea, 0x16, 0x35, 0x39, + 0xf8, 0x45, 0x86, 0x19, 0x87, 0x77, 0xc0, 0x15, 0x59, 0xb7, 0x9b, 0xa0, 0x18, 0x37, 0x94, 0xa6, + 0xb2, 0x3c, 0xeb, 0xcc, 0x49, 0xe7, 0x53, 0x14, 0x63, 0xfd, 0xbb, 0x02, 0x16, 0x77, 0x33, 0x8e, + 0x38, 0x1e, 0x25, 0x61, 0x92, 0xe5, 0x36, 0xa8, 0x7b, 0x19, 0xe3, 0x24, 0xc6, 0xd4, 0x0d, 0xfd, + 0x92, 0x03, 0x48, 0xd7, 0x63, 0x1f, 0x3e, 0x03, 0x80, 0xa4, 0x98, 0x0a, 0xc5, 0x0d, 0xb5, 0x59, + 0x5b, 0xae, 0xb7, 0x57, 0x8d, 0x69, 0x77, 0x6d, 0x8c, 0xa6, 0xdb, 0x93, 0x0c, 0xce, 0x39, 0x32, + 0x78, 0x17, 0x5c, 0x4b, 0x11, 0xe5, 0x21, 0x8a, 0xdc, 0x23, 0x14, 0x46, 0x19, 0xc5, 0x8d, 0x5a, + 0x53, 0x59, 0xbe, 0xec, 0x5c, 0x2d, 0xdd, 0x5b, 0xc2, 0x9b, 0x4b, 0x1d, 0xa0, 0x28, 0xf4, 0x11, + 0xc7, 0x2e, 0x49, 0xa2, 0x93, 0xc6, 0xff, 0x05, 0x6c, 0x4e, 0x3a, 0xf7, 0x92, 0xe8, 0x44, 0x7f, + 0xab, 0x82, 0x85, 0x8a, 0xac, 0x70, 0x0d, 0xd4, 0xb3, 0xb4, 0x08, 0xcf, 0xbb, 0x52, 0x84, 0xd7, + 0xdb, 0x9a, 0x54, 0x21, 0xdb, 0x62, 0x6c, 0xe5, 0x8d, 0xdb, 0x45, 0xec, 0xd8, 0x01, 0x02, 0x9e, + 0xdb, 0xf0, 0x09, 0x98, 0xf1, 0x28, 0x46, 0x5c, 0xdc, 0x70, 0xbd, 0x6d, 0x55, 0xaa, 0x1f, 0xce, + 0xd1, 0x98, 0xfc, 0x9d, 0xff, 0x9c, 0x92, 0x22, 0x27, 0x13, 0xd4, 0x0d, 0xf5, 0x1f, 0xc8, 0x04, + 0x05, 0x6c, 0x80, 0x19, 0x8a, 0x63, 0x32, 0x10, 0xf7, 0x36, 0x9b, 0x9f, 0x88, 0xef, 0x6e, 0x1d, + 0xcc, 0x0e, 0x2f, 0x5a, 0xff, 0xa4, 0x80, 0x9b, 0x15, 0x43, 0xc0, 0x52, 0x92, 0x30, 0x0c, 0xb7, + 0xc0, 0xfc, 0x58, 0x27, 0x5c, 0x4c, 0x29, 0xa1, 0x05, 0x6f, 0xbd, 0x0d, 0x65, 0x91, 0x34, 0xf5, + 0x8c, 0x83, 0x62, 0x40, 0x9d, 0x1b, 0xa3, 0x3d, 0x7a, 0x94, 0xc3, 0x61, 0x0f, 0x5c, 0xa2, 0x98, + 0x65, 0x11, 0x97, 0x93, 0xf2, 0x60, 0xfa, 0xa4, 0x4c, 0xaa, 0xcc, 0x29, 0x48, 0x1c, 0x49, 0xa6, + 0x77, 0x80, 0x56, 0x0d, 0xfb, 0xa3, 0x4d, 0x68, 0x7f, 0xa8, 0x81, 0xf9, 0xd1, 0xe8, 0x03, 0x51, + 0x01, 0xfc, 0xa8, 0x80, 0xeb, 0xbf, 0x6d, 0x19, 0xb4, 0xa7, 0x57, 0x5e, 0xb5, 0x9a, 0xda, 0xdf, + 0x37, 0x55, 0x5f, 0x7d, 0xfd, 0xe5, 0xdb, 0x1b, 0x75, 0x05, 0x5a, 0xf9, 0x7b, 0x74, 0x3a, 0x22, + 0x67, 0x5d, 0x6e, 0x23, 0x33, 0x5b, 0xc3, 0x07, 0xaa, 0xec, 0xa0, 0xd9, 0x3a, 0x83, 0x5f, 0x15, + 0x30, 0x3f, 0xb1, 0xbd, 0x70, 0xe3, 0x62, 0xb7, 0x2f, 0x1f, 0x07, 0x6d, 0xf3, 0xc2, 0xf1, 0x62, + 0xae, 0xf4, 0xcd, 0x42, 0xd5, 0xaa, 0x7e, 0x2f, 0x57, 0xf5, 0x4b, 0xc6, 0xe9, 0xb9, 0x27, 0x67, + 0xbd, 0x75, 0x36, 0x2e, 0xca, 0x8e, 0x0b, 0x52, 0x5b, 0x69, 0x75, 0x5f, 0xa9, 0x60, 0xc9, 0x23, + 0xf1, 0xd4, 0x3a, 0xba, 0xda, 0xc4, 0xde, 0xee, 0xe7, 0x9b, 0xbd, 0xaf, 0x3c, 0xdf, 0x29, 0xe3, + 0x03, 0x12, 0xa1, 0x24, 0x30, 0x08, 0x0d, 0xcc, 0x00, 0x27, 0xc5, 0xde, 0xcb, 0x3f, 0x41, 0x1a, + 0xb2, 0xea, 0x5f, 0xcf, 0x9a, 0x34, 0xde, 0xa9, 0xb5, 0xed, 0x4e, 0xe7, 0xbd, 0xda, 0xdc, 0x16, + 0x84, 0x1d, 0x9f, 0x19, 0xc2, 0xcc, 0xad, 0x9e, 0x65, 0x94, 0x89, 0xd9, 0x67, 0x09, 0xe9, 0x77, + 0x7c, 0xd6, 0x1f, 0x42, 0xfa, 0x3d, 0xab, 0x2f, 0x21, 0x3f, 0xd4, 0x25, 0xe1, 0xb7, 0xed, 0x8e, + 0xcf, 0x6c, 0x7b, 0x08, 0xb2, 0xed, 0x9e, 0x65, 0xdb, 0x12, 0x76, 0x38, 0x53, 0xd4, 0xb9, 0xf2, + 0x33, 0x00, 0x00, 0xff, 0xff, 0x5b, 0x22, 0xee, 0x20, 0x21, 0x07, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/services/campaign_criterion_service.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/services/campaign_criterion_service.pb.go new file mode 100644 index 0000000..2b47f77 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/services/campaign_criterion_service.pb.go @@ -0,0 +1,589 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/services/campaign_criterion_service.proto + +package services // import "google.golang.org/genproto/googleapis/ads/googleads/v1/services" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "github.com/golang/protobuf/ptypes/wrappers" +import resources "google.golang.org/genproto/googleapis/ads/googleads/v1/resources" +import _ "google.golang.org/genproto/googleapis/api/annotations" +import status "google.golang.org/genproto/googleapis/rpc/status" +import field_mask "google.golang.org/genproto/protobuf/field_mask" + +import ( + context "golang.org/x/net/context" + grpc "google.golang.org/grpc" +) + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Request message for [CampaignCriterionService.GetCampaignCriterion][google.ads.googleads.v1.services.CampaignCriterionService.GetCampaignCriterion]. +type GetCampaignCriterionRequest struct { + // The resource name of the criterion to fetch. + ResourceName string `protobuf:"bytes,1,opt,name=resource_name,json=resourceName,proto3" json:"resource_name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GetCampaignCriterionRequest) Reset() { *m = GetCampaignCriterionRequest{} } +func (m *GetCampaignCriterionRequest) String() string { return proto.CompactTextString(m) } +func (*GetCampaignCriterionRequest) ProtoMessage() {} +func (*GetCampaignCriterionRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_campaign_criterion_service_6844638016c8baf0, []int{0} +} +func (m *GetCampaignCriterionRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GetCampaignCriterionRequest.Unmarshal(m, b) +} +func (m *GetCampaignCriterionRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GetCampaignCriterionRequest.Marshal(b, m, deterministic) +} +func (dst *GetCampaignCriterionRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_GetCampaignCriterionRequest.Merge(dst, src) +} +func (m *GetCampaignCriterionRequest) XXX_Size() int { + return xxx_messageInfo_GetCampaignCriterionRequest.Size(m) +} +func (m *GetCampaignCriterionRequest) XXX_DiscardUnknown() { + xxx_messageInfo_GetCampaignCriterionRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_GetCampaignCriterionRequest proto.InternalMessageInfo + +func (m *GetCampaignCriterionRequest) GetResourceName() string { + if m != nil { + return m.ResourceName + } + return "" +} + +// Request message for [CampaignCriterionService.MutateCampaignCriteria][google.ads.googleads.v1.services.CampaignCriterionService.MutateCampaignCriteria]. +type MutateCampaignCriteriaRequest struct { + // The ID of the customer whose criteria are being modified. + CustomerId string `protobuf:"bytes,1,opt,name=customer_id,json=customerId,proto3" json:"customer_id,omitempty"` + // The list of operations to perform on individual criteria. + Operations []*CampaignCriterionOperation `protobuf:"bytes,2,rep,name=operations,proto3" json:"operations,omitempty"` + // If true, successful operations will be carried out and invalid + // operations will return errors. If false, all operations will be carried + // out in one transaction if and only if they are all valid. + // Default is false. + PartialFailure bool `protobuf:"varint,3,opt,name=partial_failure,json=partialFailure,proto3" json:"partial_failure,omitempty"` + // If true, the request is validated but not executed. Only errors are + // returned, not results. + ValidateOnly bool `protobuf:"varint,4,opt,name=validate_only,json=validateOnly,proto3" json:"validate_only,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *MutateCampaignCriteriaRequest) Reset() { *m = MutateCampaignCriteriaRequest{} } +func (m *MutateCampaignCriteriaRequest) String() string { return proto.CompactTextString(m) } +func (*MutateCampaignCriteriaRequest) ProtoMessage() {} +func (*MutateCampaignCriteriaRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_campaign_criterion_service_6844638016c8baf0, []int{1} +} +func (m *MutateCampaignCriteriaRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_MutateCampaignCriteriaRequest.Unmarshal(m, b) +} +func (m *MutateCampaignCriteriaRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_MutateCampaignCriteriaRequest.Marshal(b, m, deterministic) +} +func (dst *MutateCampaignCriteriaRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_MutateCampaignCriteriaRequest.Merge(dst, src) +} +func (m *MutateCampaignCriteriaRequest) XXX_Size() int { + return xxx_messageInfo_MutateCampaignCriteriaRequest.Size(m) +} +func (m *MutateCampaignCriteriaRequest) XXX_DiscardUnknown() { + xxx_messageInfo_MutateCampaignCriteriaRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_MutateCampaignCriteriaRequest proto.InternalMessageInfo + +func (m *MutateCampaignCriteriaRequest) GetCustomerId() string { + if m != nil { + return m.CustomerId + } + return "" +} + +func (m *MutateCampaignCriteriaRequest) GetOperations() []*CampaignCriterionOperation { + if m != nil { + return m.Operations + } + return nil +} + +func (m *MutateCampaignCriteriaRequest) GetPartialFailure() bool { + if m != nil { + return m.PartialFailure + } + return false +} + +func (m *MutateCampaignCriteriaRequest) GetValidateOnly() bool { + if m != nil { + return m.ValidateOnly + } + return false +} + +// A single operation (create, update, remove) on a campaign criterion. +type CampaignCriterionOperation struct { + // FieldMask that determines which resource fields are modified in an update. + UpdateMask *field_mask.FieldMask `protobuf:"bytes,4,opt,name=update_mask,json=updateMask,proto3" json:"update_mask,omitempty"` + // The mutate operation. + // + // Types that are valid to be assigned to Operation: + // *CampaignCriterionOperation_Create + // *CampaignCriterionOperation_Update + // *CampaignCriterionOperation_Remove + Operation isCampaignCriterionOperation_Operation `protobuf_oneof:"operation"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *CampaignCriterionOperation) Reset() { *m = CampaignCriterionOperation{} } +func (m *CampaignCriterionOperation) String() string { return proto.CompactTextString(m) } +func (*CampaignCriterionOperation) ProtoMessage() {} +func (*CampaignCriterionOperation) Descriptor() ([]byte, []int) { + return fileDescriptor_campaign_criterion_service_6844638016c8baf0, []int{2} +} +func (m *CampaignCriterionOperation) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_CampaignCriterionOperation.Unmarshal(m, b) +} +func (m *CampaignCriterionOperation) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_CampaignCriterionOperation.Marshal(b, m, deterministic) +} +func (dst *CampaignCriterionOperation) XXX_Merge(src proto.Message) { + xxx_messageInfo_CampaignCriterionOperation.Merge(dst, src) +} +func (m *CampaignCriterionOperation) XXX_Size() int { + return xxx_messageInfo_CampaignCriterionOperation.Size(m) +} +func (m *CampaignCriterionOperation) XXX_DiscardUnknown() { + xxx_messageInfo_CampaignCriterionOperation.DiscardUnknown(m) +} + +var xxx_messageInfo_CampaignCriterionOperation proto.InternalMessageInfo + +func (m *CampaignCriterionOperation) GetUpdateMask() *field_mask.FieldMask { + if m != nil { + return m.UpdateMask + } + return nil +} + +type isCampaignCriterionOperation_Operation interface { + isCampaignCriterionOperation_Operation() +} + +type CampaignCriterionOperation_Create struct { + Create *resources.CampaignCriterion `protobuf:"bytes,1,opt,name=create,proto3,oneof"` +} + +type CampaignCriterionOperation_Update struct { + Update *resources.CampaignCriterion `protobuf:"bytes,2,opt,name=update,proto3,oneof"` +} + +type CampaignCriterionOperation_Remove struct { + Remove string `protobuf:"bytes,3,opt,name=remove,proto3,oneof"` +} + +func (*CampaignCriterionOperation_Create) isCampaignCriterionOperation_Operation() {} + +func (*CampaignCriterionOperation_Update) isCampaignCriterionOperation_Operation() {} + +func (*CampaignCriterionOperation_Remove) isCampaignCriterionOperation_Operation() {} + +func (m *CampaignCriterionOperation) GetOperation() isCampaignCriterionOperation_Operation { + if m != nil { + return m.Operation + } + return nil +} + +func (m *CampaignCriterionOperation) GetCreate() *resources.CampaignCriterion { + if x, ok := m.GetOperation().(*CampaignCriterionOperation_Create); ok { + return x.Create + } + return nil +} + +func (m *CampaignCriterionOperation) GetUpdate() *resources.CampaignCriterion { + if x, ok := m.GetOperation().(*CampaignCriterionOperation_Update); ok { + return x.Update + } + return nil +} + +func (m *CampaignCriterionOperation) GetRemove() string { + if x, ok := m.GetOperation().(*CampaignCriterionOperation_Remove); ok { + return x.Remove + } + return "" +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*CampaignCriterionOperation) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _CampaignCriterionOperation_OneofMarshaler, _CampaignCriterionOperation_OneofUnmarshaler, _CampaignCriterionOperation_OneofSizer, []interface{}{ + (*CampaignCriterionOperation_Create)(nil), + (*CampaignCriterionOperation_Update)(nil), + (*CampaignCriterionOperation_Remove)(nil), + } +} + +func _CampaignCriterionOperation_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*CampaignCriterionOperation) + // operation + switch x := m.Operation.(type) { + case *CampaignCriterionOperation_Create: + b.EncodeVarint(1<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.Create); err != nil { + return err + } + case *CampaignCriterionOperation_Update: + b.EncodeVarint(2<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.Update); err != nil { + return err + } + case *CampaignCriterionOperation_Remove: + b.EncodeVarint(3<<3 | proto.WireBytes) + b.EncodeStringBytes(x.Remove) + case nil: + default: + return fmt.Errorf("CampaignCriterionOperation.Operation has unexpected type %T", x) + } + return nil +} + +func _CampaignCriterionOperation_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*CampaignCriterionOperation) + switch tag { + case 1: // operation.create + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(resources.CampaignCriterion) + err := b.DecodeMessage(msg) + m.Operation = &CampaignCriterionOperation_Create{msg} + return true, err + case 2: // operation.update + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(resources.CampaignCriterion) + err := b.DecodeMessage(msg) + m.Operation = &CampaignCriterionOperation_Update{msg} + return true, err + case 3: // operation.remove + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeStringBytes() + m.Operation = &CampaignCriterionOperation_Remove{x} + return true, err + default: + return false, nil + } +} + +func _CampaignCriterionOperation_OneofSizer(msg proto.Message) (n int) { + m := msg.(*CampaignCriterionOperation) + // operation + switch x := m.Operation.(type) { + case *CampaignCriterionOperation_Create: + s := proto.Size(x.Create) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *CampaignCriterionOperation_Update: + s := proto.Size(x.Update) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *CampaignCriterionOperation_Remove: + n += 1 // tag and wire + n += proto.SizeVarint(uint64(len(x.Remove))) + n += len(x.Remove) + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +// Response message for campaign criterion mutate. +type MutateCampaignCriteriaResponse struct { + // Errors that pertain to operation failures in the partial failure mode. + // Returned only when partial_failure = true and all errors occur inside the + // operations. If any errors occur outside the operations (e.g. auth errors), + // we return an RPC level error. + PartialFailureError *status.Status `protobuf:"bytes,3,opt,name=partial_failure_error,json=partialFailureError,proto3" json:"partial_failure_error,omitempty"` + // All results for the mutate. + Results []*MutateCampaignCriterionResult `protobuf:"bytes,2,rep,name=results,proto3" json:"results,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *MutateCampaignCriteriaResponse) Reset() { *m = MutateCampaignCriteriaResponse{} } +func (m *MutateCampaignCriteriaResponse) String() string { return proto.CompactTextString(m) } +func (*MutateCampaignCriteriaResponse) ProtoMessage() {} +func (*MutateCampaignCriteriaResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_campaign_criterion_service_6844638016c8baf0, []int{3} +} +func (m *MutateCampaignCriteriaResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_MutateCampaignCriteriaResponse.Unmarshal(m, b) +} +func (m *MutateCampaignCriteriaResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_MutateCampaignCriteriaResponse.Marshal(b, m, deterministic) +} +func (dst *MutateCampaignCriteriaResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_MutateCampaignCriteriaResponse.Merge(dst, src) +} +func (m *MutateCampaignCriteriaResponse) XXX_Size() int { + return xxx_messageInfo_MutateCampaignCriteriaResponse.Size(m) +} +func (m *MutateCampaignCriteriaResponse) XXX_DiscardUnknown() { + xxx_messageInfo_MutateCampaignCriteriaResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_MutateCampaignCriteriaResponse proto.InternalMessageInfo + +func (m *MutateCampaignCriteriaResponse) GetPartialFailureError() *status.Status { + if m != nil { + return m.PartialFailureError + } + return nil +} + +func (m *MutateCampaignCriteriaResponse) GetResults() []*MutateCampaignCriterionResult { + if m != nil { + return m.Results + } + return nil +} + +// The result for the criterion mutate. +type MutateCampaignCriterionResult struct { + // Returned for successful operations. + ResourceName string `protobuf:"bytes,1,opt,name=resource_name,json=resourceName,proto3" json:"resource_name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *MutateCampaignCriterionResult) Reset() { *m = MutateCampaignCriterionResult{} } +func (m *MutateCampaignCriterionResult) String() string { return proto.CompactTextString(m) } +func (*MutateCampaignCriterionResult) ProtoMessage() {} +func (*MutateCampaignCriterionResult) Descriptor() ([]byte, []int) { + return fileDescriptor_campaign_criterion_service_6844638016c8baf0, []int{4} +} +func (m *MutateCampaignCriterionResult) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_MutateCampaignCriterionResult.Unmarshal(m, b) +} +func (m *MutateCampaignCriterionResult) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_MutateCampaignCriterionResult.Marshal(b, m, deterministic) +} +func (dst *MutateCampaignCriterionResult) XXX_Merge(src proto.Message) { + xxx_messageInfo_MutateCampaignCriterionResult.Merge(dst, src) +} +func (m *MutateCampaignCriterionResult) XXX_Size() int { + return xxx_messageInfo_MutateCampaignCriterionResult.Size(m) +} +func (m *MutateCampaignCriterionResult) XXX_DiscardUnknown() { + xxx_messageInfo_MutateCampaignCriterionResult.DiscardUnknown(m) +} + +var xxx_messageInfo_MutateCampaignCriterionResult proto.InternalMessageInfo + +func (m *MutateCampaignCriterionResult) GetResourceName() string { + if m != nil { + return m.ResourceName + } + return "" +} + +func init() { + proto.RegisterType((*GetCampaignCriterionRequest)(nil), "google.ads.googleads.v1.services.GetCampaignCriterionRequest") + proto.RegisterType((*MutateCampaignCriteriaRequest)(nil), "google.ads.googleads.v1.services.MutateCampaignCriteriaRequest") + proto.RegisterType((*CampaignCriterionOperation)(nil), "google.ads.googleads.v1.services.CampaignCriterionOperation") + proto.RegisterType((*MutateCampaignCriteriaResponse)(nil), "google.ads.googleads.v1.services.MutateCampaignCriteriaResponse") + proto.RegisterType((*MutateCampaignCriterionResult)(nil), "google.ads.googleads.v1.services.MutateCampaignCriterionResult") +} + +// Reference imports to suppress errors if they are not otherwise used. +var _ context.Context +var _ grpc.ClientConn + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the grpc package it is being compiled against. +const _ = grpc.SupportPackageIsVersion4 + +// CampaignCriterionServiceClient is the client API for CampaignCriterionService service. +// +// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://godoc.org/google.golang.org/grpc#ClientConn.NewStream. +type CampaignCriterionServiceClient interface { + // Returns the requested criterion in full detail. + GetCampaignCriterion(ctx context.Context, in *GetCampaignCriterionRequest, opts ...grpc.CallOption) (*resources.CampaignCriterion, error) + // Creates, updates, or removes criteria. Operation statuses are returned. + MutateCampaignCriteria(ctx context.Context, in *MutateCampaignCriteriaRequest, opts ...grpc.CallOption) (*MutateCampaignCriteriaResponse, error) +} + +type campaignCriterionServiceClient struct { + cc *grpc.ClientConn +} + +func NewCampaignCriterionServiceClient(cc *grpc.ClientConn) CampaignCriterionServiceClient { + return &campaignCriterionServiceClient{cc} +} + +func (c *campaignCriterionServiceClient) GetCampaignCriterion(ctx context.Context, in *GetCampaignCriterionRequest, opts ...grpc.CallOption) (*resources.CampaignCriterion, error) { + out := new(resources.CampaignCriterion) + err := c.cc.Invoke(ctx, "/google.ads.googleads.v1.services.CampaignCriterionService/GetCampaignCriterion", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *campaignCriterionServiceClient) MutateCampaignCriteria(ctx context.Context, in *MutateCampaignCriteriaRequest, opts ...grpc.CallOption) (*MutateCampaignCriteriaResponse, error) { + out := new(MutateCampaignCriteriaResponse) + err := c.cc.Invoke(ctx, "/google.ads.googleads.v1.services.CampaignCriterionService/MutateCampaignCriteria", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +// CampaignCriterionServiceServer is the server API for CampaignCriterionService service. +type CampaignCriterionServiceServer interface { + // Returns the requested criterion in full detail. + GetCampaignCriterion(context.Context, *GetCampaignCriterionRequest) (*resources.CampaignCriterion, error) + // Creates, updates, or removes criteria. Operation statuses are returned. + MutateCampaignCriteria(context.Context, *MutateCampaignCriteriaRequest) (*MutateCampaignCriteriaResponse, error) +} + +func RegisterCampaignCriterionServiceServer(s *grpc.Server, srv CampaignCriterionServiceServer) { + s.RegisterService(&_CampaignCriterionService_serviceDesc, srv) +} + +func _CampaignCriterionService_GetCampaignCriterion_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(GetCampaignCriterionRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(CampaignCriterionServiceServer).GetCampaignCriterion(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.ads.googleads.v1.services.CampaignCriterionService/GetCampaignCriterion", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(CampaignCriterionServiceServer).GetCampaignCriterion(ctx, req.(*GetCampaignCriterionRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _CampaignCriterionService_MutateCampaignCriteria_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(MutateCampaignCriteriaRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(CampaignCriterionServiceServer).MutateCampaignCriteria(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.ads.googleads.v1.services.CampaignCriterionService/MutateCampaignCriteria", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(CampaignCriterionServiceServer).MutateCampaignCriteria(ctx, req.(*MutateCampaignCriteriaRequest)) + } + return interceptor(ctx, in, info, handler) +} + +var _CampaignCriterionService_serviceDesc = grpc.ServiceDesc{ + ServiceName: "google.ads.googleads.v1.services.CampaignCriterionService", + HandlerType: (*CampaignCriterionServiceServer)(nil), + Methods: []grpc.MethodDesc{ + { + MethodName: "GetCampaignCriterion", + Handler: _CampaignCriterionService_GetCampaignCriterion_Handler, + }, + { + MethodName: "MutateCampaignCriteria", + Handler: _CampaignCriterionService_MutateCampaignCriteria_Handler, + }, + }, + Streams: []grpc.StreamDesc{}, + Metadata: "google/ads/googleads/v1/services/campaign_criterion_service.proto", +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/services/campaign_criterion_service.proto", fileDescriptor_campaign_criterion_service_6844638016c8baf0) +} + +var fileDescriptor_campaign_criterion_service_6844638016c8baf0 = []byte{ + // 721 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xa4, 0x95, 0x4f, 0x6b, 0xd4, 0x4c, + 0x1c, 0xc7, 0x9f, 0x64, 0x1f, 0xfa, 0x3c, 0x9d, 0xad, 0x0a, 0xe3, 0xbf, 0xb0, 0xda, 0xba, 0xc4, + 0x82, 0x65, 0x0f, 0x09, 0xbb, 0xd6, 0x4b, 0x6a, 0xb1, 0xbb, 0xd5, 0xb6, 0x1e, 0xfa, 0x87, 0x14, + 0x0a, 0xca, 0x42, 0x98, 0x26, 0xd3, 0x10, 0x9a, 0x64, 0xe2, 0xcc, 0x64, 0xa5, 0x94, 0x5e, 0xc4, + 0x77, 0xe0, 0x1b, 0x10, 0x8f, 0xbe, 0x0d, 0x05, 0xc1, 0xab, 0x67, 0x6f, 0x9e, 0xc4, 0x17, 0x21, + 0x93, 0xc9, 0xac, 0x6d, 0x77, 0xe3, 0x4a, 0xbd, 0xfd, 0xf2, 0x9b, 0x6f, 0x3e, 0xf3, 0xfb, 0x37, + 0x33, 0xa0, 0x1b, 0x12, 0x12, 0xc6, 0xd8, 0x46, 0x01, 0xb3, 0xa5, 0x29, 0xac, 0x41, 0xdb, 0x66, + 0x98, 0x0e, 0x22, 0x1f, 0x33, 0xdb, 0x47, 0x49, 0x86, 0xa2, 0x30, 0xf5, 0x7c, 0x1a, 0x71, 0x4c, + 0x23, 0x92, 0x7a, 0xe5, 0x9a, 0x95, 0x51, 0xc2, 0x09, 0x6c, 0xca, 0xff, 0x2c, 0x14, 0x30, 0x6b, + 0x88, 0xb0, 0x06, 0x6d, 0x4b, 0x21, 0x1a, 0x4e, 0xd5, 0x26, 0x14, 0x33, 0x92, 0xd3, 0xf1, 0xbb, + 0x48, 0x7a, 0xe3, 0xb6, 0xfa, 0x37, 0x8b, 0x6c, 0x94, 0xa6, 0x84, 0x23, 0x1e, 0x91, 0x94, 0x95, + 0xab, 0xe5, 0xde, 0x76, 0xf1, 0xb5, 0x9f, 0x1f, 0xd8, 0x07, 0x11, 0x8e, 0x03, 0x2f, 0x41, 0xec, + 0xb0, 0x54, 0xcc, 0x9d, 0x57, 0xbc, 0xa4, 0x28, 0xcb, 0x30, 0x55, 0x84, 0x9b, 0xe5, 0x3a, 0xcd, + 0x7c, 0x9b, 0x71, 0xc4, 0xf3, 0x72, 0xc1, 0xec, 0x81, 0x5b, 0xeb, 0x98, 0xaf, 0x96, 0x71, 0xad, + 0xaa, 0xb0, 0x5c, 0xfc, 0x22, 0xc7, 0x8c, 0xc3, 0xbb, 0xe0, 0x92, 0x8a, 0xde, 0x4b, 0x51, 0x82, + 0x0d, 0xad, 0xa9, 0x2d, 0x4c, 0xbb, 0x33, 0xca, 0xb9, 0x85, 0x12, 0x6c, 0xfe, 0xd0, 0xc0, 0xec, + 0x66, 0xce, 0x11, 0xc7, 0xe7, 0x38, 0x48, 0x61, 0xee, 0x80, 0xba, 0x9f, 0x33, 0x4e, 0x12, 0x4c, + 0xbd, 0x28, 0x28, 0x21, 0x40, 0xb9, 0x9e, 0x06, 0xb0, 0x0f, 0x00, 0xc9, 0x30, 0x95, 0x59, 0x1b, + 0x7a, 0xb3, 0xb6, 0x50, 0xef, 0x3c, 0xb4, 0x26, 0x95, 0xdc, 0x1a, 0x89, 0x7b, 0x5b, 0x41, 0xdc, + 0x53, 0x3c, 0x78, 0x0f, 0x5c, 0xc9, 0x10, 0xe5, 0x11, 0x8a, 0xbd, 0x03, 0x14, 0xc5, 0x39, 0xc5, + 0x46, 0xad, 0xa9, 0x2d, 0xfc, 0xef, 0x5e, 0x2e, 0xdd, 0x6b, 0xd2, 0x2b, 0xd2, 0x1d, 0xa0, 0x38, + 0x0a, 0x10, 0xc7, 0x1e, 0x49, 0xe3, 0x23, 0xe3, 0xdf, 0x42, 0x36, 0xa3, 0x9c, 0xdb, 0x69, 0x7c, + 0x64, 0xbe, 0xd5, 0x41, 0xa3, 0x7a, 0x63, 0xb8, 0x04, 0xea, 0x79, 0x56, 0x10, 0x44, 0x7f, 0x0a, + 0x42, 0xbd, 0xd3, 0x50, 0xb9, 0xa8, 0x06, 0x59, 0x6b, 0xa2, 0x85, 0x9b, 0x88, 0x1d, 0xba, 0x40, + 0xca, 0x85, 0x0d, 0xb7, 0xc0, 0x94, 0x4f, 0x31, 0xe2, 0xb2, 0xd0, 0xf5, 0xce, 0x62, 0x65, 0x0d, + 0x86, 0x43, 0x35, 0x5a, 0x84, 0x8d, 0x7f, 0xdc, 0x92, 0x22, 0x78, 0x92, 0x6e, 0xe8, 0x7f, 0xc7, + 0x93, 0x14, 0x68, 0x80, 0x29, 0x8a, 0x13, 0x32, 0x90, 0x05, 0x9c, 0x16, 0x2b, 0xf2, 0xbb, 0x57, + 0x07, 0xd3, 0xc3, 0x8a, 0x9b, 0x1f, 0x34, 0x30, 0x57, 0x35, 0x11, 0x2c, 0x23, 0x29, 0xc3, 0x70, + 0x0d, 0x5c, 0x3f, 0xd7, 0x13, 0x0f, 0x53, 0x4a, 0x68, 0x01, 0xae, 0x77, 0xa0, 0x0a, 0x94, 0x66, + 0xbe, 0xb5, 0x5b, 0x4c, 0xac, 0x7b, 0xf5, 0x6c, 0xb7, 0x9e, 0x08, 0x39, 0x7c, 0x06, 0xfe, 0xa3, + 0x98, 0xe5, 0x31, 0x57, 0x63, 0xf3, 0x68, 0xf2, 0xd8, 0x8c, 0x0d, 0x4d, 0x0c, 0xbd, 0xe0, 0xb8, + 0x8a, 0x67, 0x3e, 0xae, 0x18, 0x6b, 0xa5, 0xfc, 0xa3, 0xd3, 0xd1, 0xf9, 0x58, 0x03, 0xc6, 0x08, + 0x60, 0x57, 0x86, 0x02, 0x3f, 0x69, 0xe0, 0xda, 0xb8, 0xf3, 0x07, 0x97, 0x27, 0x67, 0xf1, 0x9b, + 0x73, 0xdb, 0xb8, 0x50, 0x9f, 0x4d, 0xe7, 0xd5, 0x97, 0x6f, 0x6f, 0xf4, 0x45, 0xd8, 0x11, 0xb7, + 0xd6, 0xf1, 0x99, 0xd4, 0x96, 0xd5, 0x61, 0x65, 0x76, 0x6b, 0x78, 0x8d, 0xa9, 0xa6, 0xda, 0xad, + 0x13, 0xf8, 0x55, 0x03, 0x37, 0xc6, 0xb7, 0x1c, 0x5e, 0xb4, 0x23, 0xea, 0xfa, 0x68, 0xac, 0x5c, + 0x1c, 0x20, 0xa7, 0xcd, 0x5c, 0x29, 0x32, 0x73, 0xcc, 0x07, 0x22, 0xb3, 0x5f, 0xa9, 0x1c, 0x9f, + 0xba, 0x95, 0x96, 0x5b, 0x27, 0x23, 0x89, 0x39, 0x49, 0x81, 0x75, 0xb4, 0x56, 0xef, 0xb5, 0x0e, + 0xe6, 0x7d, 0x92, 0x4c, 0x8c, 0xa4, 0x37, 0x5b, 0xd5, 0xec, 0x1d, 0x71, 0xf4, 0x77, 0xb4, 0xe7, + 0x1b, 0x25, 0x22, 0x24, 0x31, 0x4a, 0x43, 0x8b, 0xd0, 0xd0, 0x0e, 0x71, 0x5a, 0x5c, 0x0c, 0xea, + 0xdd, 0xc8, 0x22, 0x56, 0xfd, 0x56, 0x2d, 0x29, 0xe3, 0x9d, 0x5e, 0x5b, 0xef, 0x76, 0xdf, 0xeb, + 0xcd, 0x75, 0x09, 0xec, 0x06, 0xcc, 0x92, 0xa6, 0xb0, 0xf6, 0xda, 0x56, 0xb9, 0x31, 0xfb, 0xac, + 0x24, 0xfd, 0x6e, 0xc0, 0xfa, 0x43, 0x49, 0x7f, 0xaf, 0xdd, 0x57, 0x92, 0xef, 0xfa, 0xbc, 0xf4, + 0x3b, 0x4e, 0x37, 0x60, 0x8e, 0x33, 0x14, 0x39, 0xce, 0x5e, 0xdb, 0x71, 0x94, 0x6c, 0x7f, 0xaa, + 0x88, 0xf3, 0xfe, 0xcf, 0x00, 0x00, 0x00, 0xff, 0xff, 0x85, 0x31, 0xc3, 0xac, 0x52, 0x07, 0x00, + 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/services/campaign_criterion_simulation_service.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/services/campaign_criterion_simulation_service.pb.go new file mode 100644 index 0000000..71038de --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/services/campaign_criterion_simulation_service.pb.go @@ -0,0 +1,177 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/services/campaign_criterion_simulation_service.proto + +package services // import "google.golang.org/genproto/googleapis/ads/googleads/v1/services" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import resources "google.golang.org/genproto/googleapis/ads/googleads/v1/resources" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +import ( + context "golang.org/x/net/context" + grpc "google.golang.org/grpc" +) + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Request message for +// [CampaignCriterionSimulationService.GetCampaignCriterionSimulation][google.ads.googleads.v1.services.CampaignCriterionSimulationService.GetCampaignCriterionSimulation]. +type GetCampaignCriterionSimulationRequest struct { + // The resource name of the campaign criterion simulation to fetch. + ResourceName string `protobuf:"bytes,1,opt,name=resource_name,json=resourceName,proto3" json:"resource_name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GetCampaignCriterionSimulationRequest) Reset() { *m = GetCampaignCriterionSimulationRequest{} } +func (m *GetCampaignCriterionSimulationRequest) String() string { return proto.CompactTextString(m) } +func (*GetCampaignCriterionSimulationRequest) ProtoMessage() {} +func (*GetCampaignCriterionSimulationRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_campaign_criterion_simulation_service_b00eeea876e3218c, []int{0} +} +func (m *GetCampaignCriterionSimulationRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GetCampaignCriterionSimulationRequest.Unmarshal(m, b) +} +func (m *GetCampaignCriterionSimulationRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GetCampaignCriterionSimulationRequest.Marshal(b, m, deterministic) +} +func (dst *GetCampaignCriterionSimulationRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_GetCampaignCriterionSimulationRequest.Merge(dst, src) +} +func (m *GetCampaignCriterionSimulationRequest) XXX_Size() int { + return xxx_messageInfo_GetCampaignCriterionSimulationRequest.Size(m) +} +func (m *GetCampaignCriterionSimulationRequest) XXX_DiscardUnknown() { + xxx_messageInfo_GetCampaignCriterionSimulationRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_GetCampaignCriterionSimulationRequest proto.InternalMessageInfo + +func (m *GetCampaignCriterionSimulationRequest) GetResourceName() string { + if m != nil { + return m.ResourceName + } + return "" +} + +func init() { + proto.RegisterType((*GetCampaignCriterionSimulationRequest)(nil), "google.ads.googleads.v1.services.GetCampaignCriterionSimulationRequest") +} + +// Reference imports to suppress errors if they are not otherwise used. +var _ context.Context +var _ grpc.ClientConn + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the grpc package it is being compiled against. +const _ = grpc.SupportPackageIsVersion4 + +// CampaignCriterionSimulationServiceClient is the client API for CampaignCriterionSimulationService service. +// +// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://godoc.org/google.golang.org/grpc#ClientConn.NewStream. +type CampaignCriterionSimulationServiceClient interface { + // Returns the requested campaign criterion simulation in full detail. + GetCampaignCriterionSimulation(ctx context.Context, in *GetCampaignCriterionSimulationRequest, opts ...grpc.CallOption) (*resources.CampaignCriterionSimulation, error) +} + +type campaignCriterionSimulationServiceClient struct { + cc *grpc.ClientConn +} + +func NewCampaignCriterionSimulationServiceClient(cc *grpc.ClientConn) CampaignCriterionSimulationServiceClient { + return &campaignCriterionSimulationServiceClient{cc} +} + +func (c *campaignCriterionSimulationServiceClient) GetCampaignCriterionSimulation(ctx context.Context, in *GetCampaignCriterionSimulationRequest, opts ...grpc.CallOption) (*resources.CampaignCriterionSimulation, error) { + out := new(resources.CampaignCriterionSimulation) + err := c.cc.Invoke(ctx, "/google.ads.googleads.v1.services.CampaignCriterionSimulationService/GetCampaignCriterionSimulation", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +// CampaignCriterionSimulationServiceServer is the server API for CampaignCriterionSimulationService service. +type CampaignCriterionSimulationServiceServer interface { + // Returns the requested campaign criterion simulation in full detail. + GetCampaignCriterionSimulation(context.Context, *GetCampaignCriterionSimulationRequest) (*resources.CampaignCriterionSimulation, error) +} + +func RegisterCampaignCriterionSimulationServiceServer(s *grpc.Server, srv CampaignCriterionSimulationServiceServer) { + s.RegisterService(&_CampaignCriterionSimulationService_serviceDesc, srv) +} + +func _CampaignCriterionSimulationService_GetCampaignCriterionSimulation_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(GetCampaignCriterionSimulationRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(CampaignCriterionSimulationServiceServer).GetCampaignCriterionSimulation(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.ads.googleads.v1.services.CampaignCriterionSimulationService/GetCampaignCriterionSimulation", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(CampaignCriterionSimulationServiceServer).GetCampaignCriterionSimulation(ctx, req.(*GetCampaignCriterionSimulationRequest)) + } + return interceptor(ctx, in, info, handler) +} + +var _CampaignCriterionSimulationService_serviceDesc = grpc.ServiceDesc{ + ServiceName: "google.ads.googleads.v1.services.CampaignCriterionSimulationService", + HandlerType: (*CampaignCriterionSimulationServiceServer)(nil), + Methods: []grpc.MethodDesc{ + { + MethodName: "GetCampaignCriterionSimulation", + Handler: _CampaignCriterionSimulationService_GetCampaignCriterionSimulation_Handler, + }, + }, + Streams: []grpc.StreamDesc{}, + Metadata: "google/ads/googleads/v1/services/campaign_criterion_simulation_service.proto", +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/services/campaign_criterion_simulation_service.proto", fileDescriptor_campaign_criterion_simulation_service_b00eeea876e3218c) +} + +var fileDescriptor_campaign_criterion_simulation_service_b00eeea876e3218c = []byte{ + // 378 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x8c, 0x92, 0xc1, 0x4a, 0xeb, 0x40, + 0x18, 0x85, 0x49, 0x2e, 0x5c, 0xb8, 0xe1, 0xde, 0x4d, 0x56, 0x97, 0x22, 0x52, 0x6a, 0x45, 0xe9, + 0x62, 0x86, 0xe8, 0x6e, 0xc4, 0x62, 0x5a, 0x34, 0x2e, 0x8a, 0x94, 0x16, 0xba, 0x90, 0x40, 0x19, + 0x93, 0x21, 0x04, 0x9a, 0x99, 0x38, 0xff, 0xa4, 0x1b, 0x71, 0xe3, 0x03, 0xb8, 0x71, 0xe3, 0xda, + 0xa5, 0x8f, 0xe2, 0xd6, 0x57, 0x70, 0xe5, 0xce, 0x37, 0x90, 0x64, 0x3a, 0x01, 0xc1, 0x36, 0xee, + 0x0e, 0x33, 0x87, 0xef, 0xcc, 0x9c, 0xff, 0x77, 0x46, 0x89, 0x10, 0xc9, 0x82, 0x61, 0x1a, 0x03, + 0xd6, 0xb2, 0x54, 0x4b, 0x0f, 0x03, 0x93, 0xcb, 0x34, 0x62, 0x80, 0x23, 0x9a, 0xe5, 0x34, 0x4d, + 0xf8, 0x3c, 0x92, 0xa9, 0x62, 0x32, 0x15, 0x7c, 0x0e, 0x69, 0x56, 0x2c, 0xa8, 0xaa, 0xa4, 0xb6, + 0xa1, 0x5c, 0x0a, 0x25, 0xdc, 0xb6, 0x46, 0x20, 0x1a, 0x03, 0xaa, 0x69, 0x68, 0xe9, 0x21, 0x43, + 0x6b, 0x9d, 0xae, 0xcb, 0x93, 0x0c, 0x44, 0x21, 0x1b, 0x03, 0x75, 0x50, 0x6b, 0xcb, 0x60, 0xf2, + 0x14, 0x53, 0xce, 0x85, 0xaa, 0x2e, 0x41, 0xdf, 0x76, 0x46, 0xce, 0x6e, 0xc0, 0xd4, 0x70, 0xc5, + 0x19, 0x1a, 0xcc, 0xb4, 0xa6, 0x4c, 0xd8, 0x75, 0xc1, 0x40, 0xb9, 0x3b, 0xce, 0x3f, 0x93, 0x3b, + 0xe7, 0x34, 0x63, 0xff, 0xad, 0xb6, 0xb5, 0xff, 0x67, 0xf2, 0xd7, 0x1c, 0x5e, 0xd0, 0x8c, 0x1d, + 0x3c, 0xda, 0x4e, 0x67, 0x03, 0x6b, 0xaa, 0xbf, 0xe6, 0x7e, 0x58, 0xce, 0xf6, 0xe6, 0x54, 0x37, + 0x40, 0x4d, 0xfd, 0xa0, 0x1f, 0xbd, 0xbb, 0xd5, 0x5f, 0x0b, 0xaa, 0x6b, 0x44, 0x1b, 0x30, 0x9d, + 0xb3, 0xbb, 0xd7, 0xb7, 0x07, 0xfb, 0xc4, 0xed, 0x97, 0xcd, 0xdf, 0x7c, 0xa9, 0xe0, 0x38, 0x2a, + 0x40, 0x89, 0x8c, 0x49, 0xc0, 0xbd, 0x7a, 0x14, 0xdf, 0x30, 0x00, 0xf7, 0x6e, 0x07, 0xf7, 0xb6, + 0xd3, 0x8d, 0x44, 0xd6, 0xf8, 0xad, 0xc1, 0x5e, 0x73, 0x81, 0xe3, 0x72, 0x74, 0x63, 0xeb, 0xf2, + 0x7c, 0x05, 0x4b, 0xc4, 0x82, 0xf2, 0x04, 0x09, 0x99, 0xe0, 0x84, 0xf1, 0x6a, 0xb0, 0x66, 0x63, + 0xf2, 0x14, 0xd6, 0x2f, 0xec, 0x91, 0x11, 0x4f, 0xf6, 0xaf, 0xc0, 0xf7, 0x9f, 0xed, 0x76, 0xa0, + 0x81, 0x7e, 0x0c, 0x48, 0xcb, 0x52, 0xcd, 0x3c, 0xb4, 0x0a, 0x86, 0x17, 0x63, 0x09, 0xfd, 0x18, + 0xc2, 0xda, 0x12, 0xce, 0xbc, 0xd0, 0x58, 0xde, 0xed, 0xae, 0x3e, 0x27, 0xc4, 0x8f, 0x81, 0x90, + 0xda, 0x44, 0xc8, 0xcc, 0x23, 0xc4, 0xd8, 0xae, 0x7e, 0x57, 0xef, 0x3c, 0xfc, 0x0c, 0x00, 0x00, + 0xff, 0xff, 0x21, 0x54, 0x12, 0xce, 0x57, 0x03, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/services/campaign_extension_setting_service.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/services/campaign_extension_setting_service.pb.go new file mode 100644 index 0000000..0ed9327 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/services/campaign_extension_setting_service.pb.go @@ -0,0 +1,599 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/services/campaign_extension_setting_service.proto + +package services // import "google.golang.org/genproto/googleapis/ads/googleads/v1/services" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "github.com/golang/protobuf/ptypes/wrappers" +import resources "google.golang.org/genproto/googleapis/ads/googleads/v1/resources" +import _ "google.golang.org/genproto/googleapis/api/annotations" +import status "google.golang.org/genproto/googleapis/rpc/status" +import field_mask "google.golang.org/genproto/protobuf/field_mask" + +import ( + context "golang.org/x/net/context" + grpc "google.golang.org/grpc" +) + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Request message for +// [CampaignExtensionSettingService.GetCampaignExtensionSetting][google.ads.googleads.v1.services.CampaignExtensionSettingService.GetCampaignExtensionSetting]. +type GetCampaignExtensionSettingRequest struct { + // The resource name of the campaign extension setting to fetch. + ResourceName string `protobuf:"bytes,1,opt,name=resource_name,json=resourceName,proto3" json:"resource_name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GetCampaignExtensionSettingRequest) Reset() { *m = GetCampaignExtensionSettingRequest{} } +func (m *GetCampaignExtensionSettingRequest) String() string { return proto.CompactTextString(m) } +func (*GetCampaignExtensionSettingRequest) ProtoMessage() {} +func (*GetCampaignExtensionSettingRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_campaign_extension_setting_service_79af28ecb9bc5deb, []int{0} +} +func (m *GetCampaignExtensionSettingRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GetCampaignExtensionSettingRequest.Unmarshal(m, b) +} +func (m *GetCampaignExtensionSettingRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GetCampaignExtensionSettingRequest.Marshal(b, m, deterministic) +} +func (dst *GetCampaignExtensionSettingRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_GetCampaignExtensionSettingRequest.Merge(dst, src) +} +func (m *GetCampaignExtensionSettingRequest) XXX_Size() int { + return xxx_messageInfo_GetCampaignExtensionSettingRequest.Size(m) +} +func (m *GetCampaignExtensionSettingRequest) XXX_DiscardUnknown() { + xxx_messageInfo_GetCampaignExtensionSettingRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_GetCampaignExtensionSettingRequest proto.InternalMessageInfo + +func (m *GetCampaignExtensionSettingRequest) GetResourceName() string { + if m != nil { + return m.ResourceName + } + return "" +} + +// Request message for +// [CampaignExtensionSettingService.MutateCampaignExtensionSettings][google.ads.googleads.v1.services.CampaignExtensionSettingService.MutateCampaignExtensionSettings]. +type MutateCampaignExtensionSettingsRequest struct { + // The ID of the customer whose campaign extension settings are being + // modified. + CustomerId string `protobuf:"bytes,1,opt,name=customer_id,json=customerId,proto3" json:"customer_id,omitempty"` + // The list of operations to perform on individual campaign extension + // settings. + Operations []*CampaignExtensionSettingOperation `protobuf:"bytes,2,rep,name=operations,proto3" json:"operations,omitempty"` + // If true, successful operations will be carried out and invalid + // operations will return errors. If false, all operations will be carried + // out in one transaction if and only if they are all valid. + // Default is false. + PartialFailure bool `protobuf:"varint,3,opt,name=partial_failure,json=partialFailure,proto3" json:"partial_failure,omitempty"` + // If true, the request is validated but not executed. Only errors are + // returned, not results. + ValidateOnly bool `protobuf:"varint,4,opt,name=validate_only,json=validateOnly,proto3" json:"validate_only,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *MutateCampaignExtensionSettingsRequest) Reset() { + *m = MutateCampaignExtensionSettingsRequest{} +} +func (m *MutateCampaignExtensionSettingsRequest) String() string { return proto.CompactTextString(m) } +func (*MutateCampaignExtensionSettingsRequest) ProtoMessage() {} +func (*MutateCampaignExtensionSettingsRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_campaign_extension_setting_service_79af28ecb9bc5deb, []int{1} +} +func (m *MutateCampaignExtensionSettingsRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_MutateCampaignExtensionSettingsRequest.Unmarshal(m, b) +} +func (m *MutateCampaignExtensionSettingsRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_MutateCampaignExtensionSettingsRequest.Marshal(b, m, deterministic) +} +func (dst *MutateCampaignExtensionSettingsRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_MutateCampaignExtensionSettingsRequest.Merge(dst, src) +} +func (m *MutateCampaignExtensionSettingsRequest) XXX_Size() int { + return xxx_messageInfo_MutateCampaignExtensionSettingsRequest.Size(m) +} +func (m *MutateCampaignExtensionSettingsRequest) XXX_DiscardUnknown() { + xxx_messageInfo_MutateCampaignExtensionSettingsRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_MutateCampaignExtensionSettingsRequest proto.InternalMessageInfo + +func (m *MutateCampaignExtensionSettingsRequest) GetCustomerId() string { + if m != nil { + return m.CustomerId + } + return "" +} + +func (m *MutateCampaignExtensionSettingsRequest) GetOperations() []*CampaignExtensionSettingOperation { + if m != nil { + return m.Operations + } + return nil +} + +func (m *MutateCampaignExtensionSettingsRequest) GetPartialFailure() bool { + if m != nil { + return m.PartialFailure + } + return false +} + +func (m *MutateCampaignExtensionSettingsRequest) GetValidateOnly() bool { + if m != nil { + return m.ValidateOnly + } + return false +} + +// A single operation (create, update, remove) on a campaign extension setting. +type CampaignExtensionSettingOperation struct { + // FieldMask that determines which resource fields are modified in an update. + UpdateMask *field_mask.FieldMask `protobuf:"bytes,4,opt,name=update_mask,json=updateMask,proto3" json:"update_mask,omitempty"` + // The mutate operation. + // + // Types that are valid to be assigned to Operation: + // *CampaignExtensionSettingOperation_Create + // *CampaignExtensionSettingOperation_Update + // *CampaignExtensionSettingOperation_Remove + Operation isCampaignExtensionSettingOperation_Operation `protobuf_oneof:"operation"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *CampaignExtensionSettingOperation) Reset() { *m = CampaignExtensionSettingOperation{} } +func (m *CampaignExtensionSettingOperation) String() string { return proto.CompactTextString(m) } +func (*CampaignExtensionSettingOperation) ProtoMessage() {} +func (*CampaignExtensionSettingOperation) Descriptor() ([]byte, []int) { + return fileDescriptor_campaign_extension_setting_service_79af28ecb9bc5deb, []int{2} +} +func (m *CampaignExtensionSettingOperation) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_CampaignExtensionSettingOperation.Unmarshal(m, b) +} +func (m *CampaignExtensionSettingOperation) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_CampaignExtensionSettingOperation.Marshal(b, m, deterministic) +} +func (dst *CampaignExtensionSettingOperation) XXX_Merge(src proto.Message) { + xxx_messageInfo_CampaignExtensionSettingOperation.Merge(dst, src) +} +func (m *CampaignExtensionSettingOperation) XXX_Size() int { + return xxx_messageInfo_CampaignExtensionSettingOperation.Size(m) +} +func (m *CampaignExtensionSettingOperation) XXX_DiscardUnknown() { + xxx_messageInfo_CampaignExtensionSettingOperation.DiscardUnknown(m) +} + +var xxx_messageInfo_CampaignExtensionSettingOperation proto.InternalMessageInfo + +func (m *CampaignExtensionSettingOperation) GetUpdateMask() *field_mask.FieldMask { + if m != nil { + return m.UpdateMask + } + return nil +} + +type isCampaignExtensionSettingOperation_Operation interface { + isCampaignExtensionSettingOperation_Operation() +} + +type CampaignExtensionSettingOperation_Create struct { + Create *resources.CampaignExtensionSetting `protobuf:"bytes,1,opt,name=create,proto3,oneof"` +} + +type CampaignExtensionSettingOperation_Update struct { + Update *resources.CampaignExtensionSetting `protobuf:"bytes,2,opt,name=update,proto3,oneof"` +} + +type CampaignExtensionSettingOperation_Remove struct { + Remove string `protobuf:"bytes,3,opt,name=remove,proto3,oneof"` +} + +func (*CampaignExtensionSettingOperation_Create) isCampaignExtensionSettingOperation_Operation() {} + +func (*CampaignExtensionSettingOperation_Update) isCampaignExtensionSettingOperation_Operation() {} + +func (*CampaignExtensionSettingOperation_Remove) isCampaignExtensionSettingOperation_Operation() {} + +func (m *CampaignExtensionSettingOperation) GetOperation() isCampaignExtensionSettingOperation_Operation { + if m != nil { + return m.Operation + } + return nil +} + +func (m *CampaignExtensionSettingOperation) GetCreate() *resources.CampaignExtensionSetting { + if x, ok := m.GetOperation().(*CampaignExtensionSettingOperation_Create); ok { + return x.Create + } + return nil +} + +func (m *CampaignExtensionSettingOperation) GetUpdate() *resources.CampaignExtensionSetting { + if x, ok := m.GetOperation().(*CampaignExtensionSettingOperation_Update); ok { + return x.Update + } + return nil +} + +func (m *CampaignExtensionSettingOperation) GetRemove() string { + if x, ok := m.GetOperation().(*CampaignExtensionSettingOperation_Remove); ok { + return x.Remove + } + return "" +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*CampaignExtensionSettingOperation) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _CampaignExtensionSettingOperation_OneofMarshaler, _CampaignExtensionSettingOperation_OneofUnmarshaler, _CampaignExtensionSettingOperation_OneofSizer, []interface{}{ + (*CampaignExtensionSettingOperation_Create)(nil), + (*CampaignExtensionSettingOperation_Update)(nil), + (*CampaignExtensionSettingOperation_Remove)(nil), + } +} + +func _CampaignExtensionSettingOperation_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*CampaignExtensionSettingOperation) + // operation + switch x := m.Operation.(type) { + case *CampaignExtensionSettingOperation_Create: + b.EncodeVarint(1<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.Create); err != nil { + return err + } + case *CampaignExtensionSettingOperation_Update: + b.EncodeVarint(2<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.Update); err != nil { + return err + } + case *CampaignExtensionSettingOperation_Remove: + b.EncodeVarint(3<<3 | proto.WireBytes) + b.EncodeStringBytes(x.Remove) + case nil: + default: + return fmt.Errorf("CampaignExtensionSettingOperation.Operation has unexpected type %T", x) + } + return nil +} + +func _CampaignExtensionSettingOperation_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*CampaignExtensionSettingOperation) + switch tag { + case 1: // operation.create + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(resources.CampaignExtensionSetting) + err := b.DecodeMessage(msg) + m.Operation = &CampaignExtensionSettingOperation_Create{msg} + return true, err + case 2: // operation.update + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(resources.CampaignExtensionSetting) + err := b.DecodeMessage(msg) + m.Operation = &CampaignExtensionSettingOperation_Update{msg} + return true, err + case 3: // operation.remove + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeStringBytes() + m.Operation = &CampaignExtensionSettingOperation_Remove{x} + return true, err + default: + return false, nil + } +} + +func _CampaignExtensionSettingOperation_OneofSizer(msg proto.Message) (n int) { + m := msg.(*CampaignExtensionSettingOperation) + // operation + switch x := m.Operation.(type) { + case *CampaignExtensionSettingOperation_Create: + s := proto.Size(x.Create) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *CampaignExtensionSettingOperation_Update: + s := proto.Size(x.Update) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *CampaignExtensionSettingOperation_Remove: + n += 1 // tag and wire + n += proto.SizeVarint(uint64(len(x.Remove))) + n += len(x.Remove) + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +// Response message for a campaign extension setting mutate. +type MutateCampaignExtensionSettingsResponse struct { + // Errors that pertain to operation failures in the partial failure mode. + // Returned only when partial_failure = true and all errors occur inside the + // operations. If any errors occur outside the operations (e.g. auth errors), + // we return an RPC level error. + PartialFailureError *status.Status `protobuf:"bytes,3,opt,name=partial_failure_error,json=partialFailureError,proto3" json:"partial_failure_error,omitempty"` + // All results for the mutate. + Results []*MutateCampaignExtensionSettingResult `protobuf:"bytes,2,rep,name=results,proto3" json:"results,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *MutateCampaignExtensionSettingsResponse) Reset() { + *m = MutateCampaignExtensionSettingsResponse{} +} +func (m *MutateCampaignExtensionSettingsResponse) String() string { return proto.CompactTextString(m) } +func (*MutateCampaignExtensionSettingsResponse) ProtoMessage() {} +func (*MutateCampaignExtensionSettingsResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_campaign_extension_setting_service_79af28ecb9bc5deb, []int{3} +} +func (m *MutateCampaignExtensionSettingsResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_MutateCampaignExtensionSettingsResponse.Unmarshal(m, b) +} +func (m *MutateCampaignExtensionSettingsResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_MutateCampaignExtensionSettingsResponse.Marshal(b, m, deterministic) +} +func (dst *MutateCampaignExtensionSettingsResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_MutateCampaignExtensionSettingsResponse.Merge(dst, src) +} +func (m *MutateCampaignExtensionSettingsResponse) XXX_Size() int { + return xxx_messageInfo_MutateCampaignExtensionSettingsResponse.Size(m) +} +func (m *MutateCampaignExtensionSettingsResponse) XXX_DiscardUnknown() { + xxx_messageInfo_MutateCampaignExtensionSettingsResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_MutateCampaignExtensionSettingsResponse proto.InternalMessageInfo + +func (m *MutateCampaignExtensionSettingsResponse) GetPartialFailureError() *status.Status { + if m != nil { + return m.PartialFailureError + } + return nil +} + +func (m *MutateCampaignExtensionSettingsResponse) GetResults() []*MutateCampaignExtensionSettingResult { + if m != nil { + return m.Results + } + return nil +} + +// The result for the campaign extension setting mutate. +type MutateCampaignExtensionSettingResult struct { + // Returned for successful operations. + ResourceName string `protobuf:"bytes,1,opt,name=resource_name,json=resourceName,proto3" json:"resource_name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *MutateCampaignExtensionSettingResult) Reset() { *m = MutateCampaignExtensionSettingResult{} } +func (m *MutateCampaignExtensionSettingResult) String() string { return proto.CompactTextString(m) } +func (*MutateCampaignExtensionSettingResult) ProtoMessage() {} +func (*MutateCampaignExtensionSettingResult) Descriptor() ([]byte, []int) { + return fileDescriptor_campaign_extension_setting_service_79af28ecb9bc5deb, []int{4} +} +func (m *MutateCampaignExtensionSettingResult) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_MutateCampaignExtensionSettingResult.Unmarshal(m, b) +} +func (m *MutateCampaignExtensionSettingResult) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_MutateCampaignExtensionSettingResult.Marshal(b, m, deterministic) +} +func (dst *MutateCampaignExtensionSettingResult) XXX_Merge(src proto.Message) { + xxx_messageInfo_MutateCampaignExtensionSettingResult.Merge(dst, src) +} +func (m *MutateCampaignExtensionSettingResult) XXX_Size() int { + return xxx_messageInfo_MutateCampaignExtensionSettingResult.Size(m) +} +func (m *MutateCampaignExtensionSettingResult) XXX_DiscardUnknown() { + xxx_messageInfo_MutateCampaignExtensionSettingResult.DiscardUnknown(m) +} + +var xxx_messageInfo_MutateCampaignExtensionSettingResult proto.InternalMessageInfo + +func (m *MutateCampaignExtensionSettingResult) GetResourceName() string { + if m != nil { + return m.ResourceName + } + return "" +} + +func init() { + proto.RegisterType((*GetCampaignExtensionSettingRequest)(nil), "google.ads.googleads.v1.services.GetCampaignExtensionSettingRequest") + proto.RegisterType((*MutateCampaignExtensionSettingsRequest)(nil), "google.ads.googleads.v1.services.MutateCampaignExtensionSettingsRequest") + proto.RegisterType((*CampaignExtensionSettingOperation)(nil), "google.ads.googleads.v1.services.CampaignExtensionSettingOperation") + proto.RegisterType((*MutateCampaignExtensionSettingsResponse)(nil), "google.ads.googleads.v1.services.MutateCampaignExtensionSettingsResponse") + proto.RegisterType((*MutateCampaignExtensionSettingResult)(nil), "google.ads.googleads.v1.services.MutateCampaignExtensionSettingResult") +} + +// Reference imports to suppress errors if they are not otherwise used. +var _ context.Context +var _ grpc.ClientConn + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the grpc package it is being compiled against. +const _ = grpc.SupportPackageIsVersion4 + +// CampaignExtensionSettingServiceClient is the client API for CampaignExtensionSettingService service. +// +// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://godoc.org/google.golang.org/grpc#ClientConn.NewStream. +type CampaignExtensionSettingServiceClient interface { + // Returns the requested campaign extension setting in full detail. + GetCampaignExtensionSetting(ctx context.Context, in *GetCampaignExtensionSettingRequest, opts ...grpc.CallOption) (*resources.CampaignExtensionSetting, error) + // Creates, updates, or removes campaign extension settings. Operation + // statuses are returned. + MutateCampaignExtensionSettings(ctx context.Context, in *MutateCampaignExtensionSettingsRequest, opts ...grpc.CallOption) (*MutateCampaignExtensionSettingsResponse, error) +} + +type campaignExtensionSettingServiceClient struct { + cc *grpc.ClientConn +} + +func NewCampaignExtensionSettingServiceClient(cc *grpc.ClientConn) CampaignExtensionSettingServiceClient { + return &campaignExtensionSettingServiceClient{cc} +} + +func (c *campaignExtensionSettingServiceClient) GetCampaignExtensionSetting(ctx context.Context, in *GetCampaignExtensionSettingRequest, opts ...grpc.CallOption) (*resources.CampaignExtensionSetting, error) { + out := new(resources.CampaignExtensionSetting) + err := c.cc.Invoke(ctx, "/google.ads.googleads.v1.services.CampaignExtensionSettingService/GetCampaignExtensionSetting", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *campaignExtensionSettingServiceClient) MutateCampaignExtensionSettings(ctx context.Context, in *MutateCampaignExtensionSettingsRequest, opts ...grpc.CallOption) (*MutateCampaignExtensionSettingsResponse, error) { + out := new(MutateCampaignExtensionSettingsResponse) + err := c.cc.Invoke(ctx, "/google.ads.googleads.v1.services.CampaignExtensionSettingService/MutateCampaignExtensionSettings", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +// CampaignExtensionSettingServiceServer is the server API for CampaignExtensionSettingService service. +type CampaignExtensionSettingServiceServer interface { + // Returns the requested campaign extension setting in full detail. + GetCampaignExtensionSetting(context.Context, *GetCampaignExtensionSettingRequest) (*resources.CampaignExtensionSetting, error) + // Creates, updates, or removes campaign extension settings. Operation + // statuses are returned. + MutateCampaignExtensionSettings(context.Context, *MutateCampaignExtensionSettingsRequest) (*MutateCampaignExtensionSettingsResponse, error) +} + +func RegisterCampaignExtensionSettingServiceServer(s *grpc.Server, srv CampaignExtensionSettingServiceServer) { + s.RegisterService(&_CampaignExtensionSettingService_serviceDesc, srv) +} + +func _CampaignExtensionSettingService_GetCampaignExtensionSetting_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(GetCampaignExtensionSettingRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(CampaignExtensionSettingServiceServer).GetCampaignExtensionSetting(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.ads.googleads.v1.services.CampaignExtensionSettingService/GetCampaignExtensionSetting", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(CampaignExtensionSettingServiceServer).GetCampaignExtensionSetting(ctx, req.(*GetCampaignExtensionSettingRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _CampaignExtensionSettingService_MutateCampaignExtensionSettings_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(MutateCampaignExtensionSettingsRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(CampaignExtensionSettingServiceServer).MutateCampaignExtensionSettings(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.ads.googleads.v1.services.CampaignExtensionSettingService/MutateCampaignExtensionSettings", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(CampaignExtensionSettingServiceServer).MutateCampaignExtensionSettings(ctx, req.(*MutateCampaignExtensionSettingsRequest)) + } + return interceptor(ctx, in, info, handler) +} + +var _CampaignExtensionSettingService_serviceDesc = grpc.ServiceDesc{ + ServiceName: "google.ads.googleads.v1.services.CampaignExtensionSettingService", + HandlerType: (*CampaignExtensionSettingServiceServer)(nil), + Methods: []grpc.MethodDesc{ + { + MethodName: "GetCampaignExtensionSetting", + Handler: _CampaignExtensionSettingService_GetCampaignExtensionSetting_Handler, + }, + { + MethodName: "MutateCampaignExtensionSettings", + Handler: _CampaignExtensionSettingService_MutateCampaignExtensionSettings_Handler, + }, + }, + Streams: []grpc.StreamDesc{}, + Metadata: "google/ads/googleads/v1/services/campaign_extension_setting_service.proto", +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/services/campaign_extension_setting_service.proto", fileDescriptor_campaign_extension_setting_service_79af28ecb9bc5deb) +} + +var fileDescriptor_campaign_extension_setting_service_79af28ecb9bc5deb = []byte{ + // 728 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xac, 0x95, 0x41, 0x4f, 0xd4, 0x40, + 0x14, 0xc7, 0x6d, 0x21, 0x28, 0x53, 0xd4, 0x64, 0x8c, 0x71, 0xb3, 0x1a, 0x59, 0xeb, 0x46, 0xc8, + 0x1e, 0xda, 0xec, 0x7a, 0xeb, 0x06, 0x93, 0xdd, 0x95, 0x85, 0x8d, 0x41, 0x48, 0x89, 0x1c, 0xcc, + 0x26, 0x75, 0x68, 0x87, 0xa6, 0xa1, 0xed, 0xd4, 0x99, 0xe9, 0x2a, 0x21, 0x1c, 0xf4, 0x6a, 0x3c, + 0xf9, 0x0d, 0x3c, 0x7a, 0xf7, 0x4b, 0x78, 0xe5, 0x2b, 0xe8, 0xc5, 0xcf, 0xe0, 0xc1, 0xb4, 0xd3, + 0x59, 0x01, 0x53, 0x4a, 0x02, 0xb7, 0xd7, 0x37, 0xff, 0xfe, 0xde, 0x7b, 0xf3, 0x5e, 0x5f, 0xc1, + 0xc8, 0x27, 0xc4, 0x0f, 0xb1, 0x89, 0x3c, 0x66, 0x0a, 0x33, 0xb3, 0x26, 0x6d, 0x93, 0x61, 0x3a, + 0x09, 0x5c, 0xcc, 0x4c, 0x17, 0x45, 0x09, 0x0a, 0xfc, 0xd8, 0xc1, 0xef, 0x39, 0x8e, 0x59, 0x40, + 0x62, 0x87, 0x61, 0xce, 0x83, 0xd8, 0x77, 0x0a, 0x8d, 0x91, 0x50, 0xc2, 0x09, 0x6c, 0x88, 0xf7, + 0x0d, 0xe4, 0x31, 0x63, 0x8a, 0x32, 0x26, 0x6d, 0x43, 0xa2, 0xea, 0xfd, 0xb2, 0x60, 0x14, 0x33, + 0x92, 0xd2, 0xf3, 0xa3, 0x89, 0x28, 0xf5, 0x07, 0x92, 0x91, 0x04, 0x26, 0x8a, 0x63, 0xc2, 0x11, + 0x0f, 0x48, 0xcc, 0x8a, 0xd3, 0x22, 0x07, 0x33, 0x7f, 0xda, 0x4d, 0xf7, 0xcc, 0xbd, 0x00, 0x87, + 0x9e, 0x13, 0x21, 0xb6, 0x5f, 0x28, 0x1e, 0x9e, 0x55, 0xbc, 0xa3, 0x28, 0x49, 0x30, 0x95, 0x84, + 0x7b, 0xc5, 0x39, 0x4d, 0x5c, 0x93, 0x71, 0xc4, 0xd3, 0xe2, 0x40, 0x1f, 0x01, 0x7d, 0x0d, 0xf3, + 0x41, 0x91, 0xdf, 0xaa, 0x4c, 0x6f, 0x5b, 0x64, 0x67, 0xe3, 0xb7, 0x29, 0x66, 0x1c, 0x3e, 0x06, + 0x37, 0x65, 0x31, 0x4e, 0x8c, 0x22, 0x5c, 0x53, 0x1a, 0xca, 0xf2, 0xbc, 0xbd, 0x20, 0x9d, 0x2f, + 0x51, 0x84, 0xf5, 0x3f, 0x0a, 0x78, 0xb2, 0x91, 0x72, 0xc4, 0x71, 0x19, 0x8e, 0x49, 0xde, 0x22, + 0xd0, 0xdc, 0x94, 0x71, 0x12, 0x61, 0xea, 0x04, 0x5e, 0x41, 0x03, 0xd2, 0x35, 0xf2, 0xa0, 0x0b, + 0x00, 0x49, 0x30, 0x15, 0xb7, 0x50, 0x53, 0x1b, 0x33, 0xcb, 0x5a, 0x67, 0x60, 0x54, 0xb5, 0xc2, + 0x28, 0x0b, 0xbc, 0x29, 0x59, 0xf6, 0x09, 0x2c, 0x5c, 0x02, 0xb7, 0x13, 0x44, 0x79, 0x80, 0x42, + 0x67, 0x0f, 0x05, 0x61, 0x4a, 0x71, 0x6d, 0xa6, 0xa1, 0x2c, 0xdf, 0xb0, 0x6f, 0x15, 0xee, 0xa1, + 0xf0, 0x66, 0xe5, 0x4f, 0x50, 0x18, 0x78, 0x88, 0x63, 0x87, 0xc4, 0xe1, 0x41, 0x6d, 0x36, 0x97, + 0x2d, 0x48, 0xe7, 0x66, 0x1c, 0x1e, 0xe8, 0xdf, 0x55, 0xf0, 0xa8, 0x32, 0x3e, 0xec, 0x02, 0x2d, + 0x4d, 0x72, 0x50, 0xd6, 0xbd, 0x1c, 0xa4, 0x75, 0xea, 0xb2, 0x32, 0xd9, 0x3e, 0x63, 0x98, 0x35, + 0x78, 0x03, 0xb1, 0x7d, 0x1b, 0x08, 0x79, 0x66, 0xc3, 0x57, 0x60, 0xce, 0xa5, 0x18, 0x71, 0x71, + 0xff, 0x5a, 0xa7, 0x5b, 0x7a, 0x23, 0xd3, 0xd1, 0x2b, 0xbd, 0x92, 0xf5, 0x6b, 0x76, 0x01, 0xcb, + 0xb0, 0x22, 0x48, 0x4d, 0xbd, 0x12, 0xac, 0x80, 0xc1, 0x1a, 0x98, 0xa3, 0x38, 0x22, 0x13, 0x71, + 0xab, 0xf3, 0xd9, 0x89, 0x78, 0xee, 0x6b, 0x60, 0x7e, 0xda, 0x06, 0xfd, 0x58, 0x01, 0x4b, 0x95, + 0x63, 0xc3, 0x12, 0x12, 0x33, 0x0c, 0x87, 0xe0, 0xee, 0x99, 0x8e, 0x39, 0x98, 0x52, 0x42, 0xf3, + 0x08, 0x5a, 0x07, 0xca, 0xc4, 0x69, 0xe2, 0x1a, 0xdb, 0xf9, 0x98, 0xdb, 0x77, 0x4e, 0xf7, 0x72, + 0x35, 0x93, 0xc3, 0x37, 0xe0, 0x3a, 0xc5, 0x2c, 0x0d, 0xb9, 0x9c, 0xad, 0x61, 0xf5, 0x6c, 0x9d, + 0x9f, 0xa3, 0x9d, 0xe3, 0x6c, 0x89, 0xd5, 0x5f, 0x80, 0xe6, 0x45, 0x5e, 0xb8, 0xd0, 0x97, 0xd5, + 0xf9, 0x3c, 0x0b, 0x16, 0xcb, 0x38, 0xdb, 0x22, 0x3f, 0xf8, 0x4b, 0x01, 0xf7, 0xcf, 0xf9, 0x92, + 0xe1, 0xf3, 0xea, 0x0a, 0xab, 0x17, 0x41, 0xfd, 0x32, 0xa3, 0xa1, 0x0f, 0x3e, 0x1e, 0xff, 0xfc, + 0xa2, 0xae, 0xc0, 0x6e, 0xb6, 0x1c, 0x0f, 0x4f, 0x95, 0xbd, 0x22, 0xbf, 0x7d, 0x66, 0xb6, 0xa6, + 0xdb, 0xf2, 0xbf, 0x39, 0x30, 0x5b, 0x47, 0xf0, 0x83, 0x0a, 0x16, 0x2b, 0xc6, 0x05, 0xae, 0x5f, + 0xb6, 0x9b, 0x72, 0x51, 0xd5, 0x47, 0x57, 0x40, 0x12, 0xb3, 0xab, 0x8f, 0xf2, 0xea, 0x07, 0xfa, + 0xb3, 0xac, 0xfa, 0x7f, 0xe5, 0x1e, 0x9e, 0x58, 0x84, 0x2b, 0xad, 0xa3, 0xf2, 0xe2, 0xad, 0x28, + 0x0f, 0x64, 0x29, 0xad, 0xfe, 0x27, 0x15, 0x34, 0x5d, 0x12, 0x55, 0xe6, 0xd6, 0x6f, 0x56, 0x4c, + 0xcd, 0x56, 0xb6, 0x6f, 0xb6, 0x94, 0xd7, 0xeb, 0x05, 0xc9, 0x27, 0x21, 0x8a, 0x7d, 0x83, 0x50, + 0xdf, 0xf4, 0x71, 0x9c, 0x6f, 0x23, 0xf9, 0x4b, 0x4b, 0x02, 0x56, 0xfe, 0x3b, 0xed, 0x4a, 0xe3, + 0xab, 0x3a, 0xb3, 0xd6, 0xeb, 0x7d, 0x53, 0x1b, 0x6b, 0x02, 0xd8, 0xf3, 0x98, 0x21, 0xcc, 0xcc, + 0xda, 0x69, 0x1b, 0x45, 0x60, 0xf6, 0x43, 0x4a, 0xc6, 0x3d, 0x8f, 0x8d, 0xa7, 0x92, 0xf1, 0x4e, + 0x7b, 0x2c, 0x25, 0xbf, 0xd5, 0xa6, 0xf0, 0x5b, 0x56, 0xcf, 0x63, 0x96, 0x35, 0x15, 0x59, 0xd6, + 0x4e, 0xdb, 0xb2, 0xa4, 0x6c, 0x77, 0x2e, 0xcf, 0xf3, 0xe9, 0xdf, 0x00, 0x00, 0x00, 0xff, 0xff, + 0x75, 0x32, 0x38, 0x15, 0xf5, 0x07, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/services/campaign_feed_service.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/services/campaign_feed_service.pb.go new file mode 100644 index 0000000..424b21a --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/services/campaign_feed_service.pb.go @@ -0,0 +1,590 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/services/campaign_feed_service.proto + +package services // import "google.golang.org/genproto/googleapis/ads/googleads/v1/services" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "github.com/golang/protobuf/ptypes/wrappers" +import resources "google.golang.org/genproto/googleapis/ads/googleads/v1/resources" +import _ "google.golang.org/genproto/googleapis/api/annotations" +import status "google.golang.org/genproto/googleapis/rpc/status" +import field_mask "google.golang.org/genproto/protobuf/field_mask" + +import ( + context "golang.org/x/net/context" + grpc "google.golang.org/grpc" +) + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Request message for [CampaignFeedService.GetCampaignFeed][google.ads.googleads.v1.services.CampaignFeedService.GetCampaignFeed]. +type GetCampaignFeedRequest struct { + // The resource name of the campaign feed to fetch. + ResourceName string `protobuf:"bytes,1,opt,name=resource_name,json=resourceName,proto3" json:"resource_name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GetCampaignFeedRequest) Reset() { *m = GetCampaignFeedRequest{} } +func (m *GetCampaignFeedRequest) String() string { return proto.CompactTextString(m) } +func (*GetCampaignFeedRequest) ProtoMessage() {} +func (*GetCampaignFeedRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_campaign_feed_service_8c918da333fda613, []int{0} +} +func (m *GetCampaignFeedRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GetCampaignFeedRequest.Unmarshal(m, b) +} +func (m *GetCampaignFeedRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GetCampaignFeedRequest.Marshal(b, m, deterministic) +} +func (dst *GetCampaignFeedRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_GetCampaignFeedRequest.Merge(dst, src) +} +func (m *GetCampaignFeedRequest) XXX_Size() int { + return xxx_messageInfo_GetCampaignFeedRequest.Size(m) +} +func (m *GetCampaignFeedRequest) XXX_DiscardUnknown() { + xxx_messageInfo_GetCampaignFeedRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_GetCampaignFeedRequest proto.InternalMessageInfo + +func (m *GetCampaignFeedRequest) GetResourceName() string { + if m != nil { + return m.ResourceName + } + return "" +} + +// Request message for [CampaignFeedService.MutateCampaignFeeds][google.ads.googleads.v1.services.CampaignFeedService.MutateCampaignFeeds]. +type MutateCampaignFeedsRequest struct { + // The ID of the customer whose campaign feeds are being modified. + CustomerId string `protobuf:"bytes,1,opt,name=customer_id,json=customerId,proto3" json:"customer_id,omitempty"` + // The list of operations to perform on individual campaign feeds. + Operations []*CampaignFeedOperation `protobuf:"bytes,2,rep,name=operations,proto3" json:"operations,omitempty"` + // If true, successful operations will be carried out and invalid + // operations will return errors. If false, all operations will be carried + // out in one transaction if and only if they are all valid. + // Default is false. + PartialFailure bool `protobuf:"varint,3,opt,name=partial_failure,json=partialFailure,proto3" json:"partial_failure,omitempty"` + // If true, the request is validated but not executed. Only errors are + // returned, not results. + ValidateOnly bool `protobuf:"varint,4,opt,name=validate_only,json=validateOnly,proto3" json:"validate_only,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *MutateCampaignFeedsRequest) Reset() { *m = MutateCampaignFeedsRequest{} } +func (m *MutateCampaignFeedsRequest) String() string { return proto.CompactTextString(m) } +func (*MutateCampaignFeedsRequest) ProtoMessage() {} +func (*MutateCampaignFeedsRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_campaign_feed_service_8c918da333fda613, []int{1} +} +func (m *MutateCampaignFeedsRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_MutateCampaignFeedsRequest.Unmarshal(m, b) +} +func (m *MutateCampaignFeedsRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_MutateCampaignFeedsRequest.Marshal(b, m, deterministic) +} +func (dst *MutateCampaignFeedsRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_MutateCampaignFeedsRequest.Merge(dst, src) +} +func (m *MutateCampaignFeedsRequest) XXX_Size() int { + return xxx_messageInfo_MutateCampaignFeedsRequest.Size(m) +} +func (m *MutateCampaignFeedsRequest) XXX_DiscardUnknown() { + xxx_messageInfo_MutateCampaignFeedsRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_MutateCampaignFeedsRequest proto.InternalMessageInfo + +func (m *MutateCampaignFeedsRequest) GetCustomerId() string { + if m != nil { + return m.CustomerId + } + return "" +} + +func (m *MutateCampaignFeedsRequest) GetOperations() []*CampaignFeedOperation { + if m != nil { + return m.Operations + } + return nil +} + +func (m *MutateCampaignFeedsRequest) GetPartialFailure() bool { + if m != nil { + return m.PartialFailure + } + return false +} + +func (m *MutateCampaignFeedsRequest) GetValidateOnly() bool { + if m != nil { + return m.ValidateOnly + } + return false +} + +// A single operation (create, update, remove) on a campaign feed. +type CampaignFeedOperation struct { + // FieldMask that determines which resource fields are modified in an update. + UpdateMask *field_mask.FieldMask `protobuf:"bytes,4,opt,name=update_mask,json=updateMask,proto3" json:"update_mask,omitempty"` + // The mutate operation. + // + // Types that are valid to be assigned to Operation: + // *CampaignFeedOperation_Create + // *CampaignFeedOperation_Update + // *CampaignFeedOperation_Remove + Operation isCampaignFeedOperation_Operation `protobuf_oneof:"operation"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *CampaignFeedOperation) Reset() { *m = CampaignFeedOperation{} } +func (m *CampaignFeedOperation) String() string { return proto.CompactTextString(m) } +func (*CampaignFeedOperation) ProtoMessage() {} +func (*CampaignFeedOperation) Descriptor() ([]byte, []int) { + return fileDescriptor_campaign_feed_service_8c918da333fda613, []int{2} +} +func (m *CampaignFeedOperation) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_CampaignFeedOperation.Unmarshal(m, b) +} +func (m *CampaignFeedOperation) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_CampaignFeedOperation.Marshal(b, m, deterministic) +} +func (dst *CampaignFeedOperation) XXX_Merge(src proto.Message) { + xxx_messageInfo_CampaignFeedOperation.Merge(dst, src) +} +func (m *CampaignFeedOperation) XXX_Size() int { + return xxx_messageInfo_CampaignFeedOperation.Size(m) +} +func (m *CampaignFeedOperation) XXX_DiscardUnknown() { + xxx_messageInfo_CampaignFeedOperation.DiscardUnknown(m) +} + +var xxx_messageInfo_CampaignFeedOperation proto.InternalMessageInfo + +func (m *CampaignFeedOperation) GetUpdateMask() *field_mask.FieldMask { + if m != nil { + return m.UpdateMask + } + return nil +} + +type isCampaignFeedOperation_Operation interface { + isCampaignFeedOperation_Operation() +} + +type CampaignFeedOperation_Create struct { + Create *resources.CampaignFeed `protobuf:"bytes,1,opt,name=create,proto3,oneof"` +} + +type CampaignFeedOperation_Update struct { + Update *resources.CampaignFeed `protobuf:"bytes,2,opt,name=update,proto3,oneof"` +} + +type CampaignFeedOperation_Remove struct { + Remove string `protobuf:"bytes,3,opt,name=remove,proto3,oneof"` +} + +func (*CampaignFeedOperation_Create) isCampaignFeedOperation_Operation() {} + +func (*CampaignFeedOperation_Update) isCampaignFeedOperation_Operation() {} + +func (*CampaignFeedOperation_Remove) isCampaignFeedOperation_Operation() {} + +func (m *CampaignFeedOperation) GetOperation() isCampaignFeedOperation_Operation { + if m != nil { + return m.Operation + } + return nil +} + +func (m *CampaignFeedOperation) GetCreate() *resources.CampaignFeed { + if x, ok := m.GetOperation().(*CampaignFeedOperation_Create); ok { + return x.Create + } + return nil +} + +func (m *CampaignFeedOperation) GetUpdate() *resources.CampaignFeed { + if x, ok := m.GetOperation().(*CampaignFeedOperation_Update); ok { + return x.Update + } + return nil +} + +func (m *CampaignFeedOperation) GetRemove() string { + if x, ok := m.GetOperation().(*CampaignFeedOperation_Remove); ok { + return x.Remove + } + return "" +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*CampaignFeedOperation) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _CampaignFeedOperation_OneofMarshaler, _CampaignFeedOperation_OneofUnmarshaler, _CampaignFeedOperation_OneofSizer, []interface{}{ + (*CampaignFeedOperation_Create)(nil), + (*CampaignFeedOperation_Update)(nil), + (*CampaignFeedOperation_Remove)(nil), + } +} + +func _CampaignFeedOperation_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*CampaignFeedOperation) + // operation + switch x := m.Operation.(type) { + case *CampaignFeedOperation_Create: + b.EncodeVarint(1<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.Create); err != nil { + return err + } + case *CampaignFeedOperation_Update: + b.EncodeVarint(2<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.Update); err != nil { + return err + } + case *CampaignFeedOperation_Remove: + b.EncodeVarint(3<<3 | proto.WireBytes) + b.EncodeStringBytes(x.Remove) + case nil: + default: + return fmt.Errorf("CampaignFeedOperation.Operation has unexpected type %T", x) + } + return nil +} + +func _CampaignFeedOperation_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*CampaignFeedOperation) + switch tag { + case 1: // operation.create + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(resources.CampaignFeed) + err := b.DecodeMessage(msg) + m.Operation = &CampaignFeedOperation_Create{msg} + return true, err + case 2: // operation.update + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(resources.CampaignFeed) + err := b.DecodeMessage(msg) + m.Operation = &CampaignFeedOperation_Update{msg} + return true, err + case 3: // operation.remove + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeStringBytes() + m.Operation = &CampaignFeedOperation_Remove{x} + return true, err + default: + return false, nil + } +} + +func _CampaignFeedOperation_OneofSizer(msg proto.Message) (n int) { + m := msg.(*CampaignFeedOperation) + // operation + switch x := m.Operation.(type) { + case *CampaignFeedOperation_Create: + s := proto.Size(x.Create) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *CampaignFeedOperation_Update: + s := proto.Size(x.Update) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *CampaignFeedOperation_Remove: + n += 1 // tag and wire + n += proto.SizeVarint(uint64(len(x.Remove))) + n += len(x.Remove) + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +// Response message for a campaign feed mutate. +type MutateCampaignFeedsResponse struct { + // Errors that pertain to operation failures in the partial failure mode. + // Returned only when partial_failure = true and all errors occur inside the + // operations. If any errors occur outside the operations (e.g. auth errors), + // we return an RPC level error. + PartialFailureError *status.Status `protobuf:"bytes,3,opt,name=partial_failure_error,json=partialFailureError,proto3" json:"partial_failure_error,omitempty"` + // All results for the mutate. + Results []*MutateCampaignFeedResult `protobuf:"bytes,2,rep,name=results,proto3" json:"results,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *MutateCampaignFeedsResponse) Reset() { *m = MutateCampaignFeedsResponse{} } +func (m *MutateCampaignFeedsResponse) String() string { return proto.CompactTextString(m) } +func (*MutateCampaignFeedsResponse) ProtoMessage() {} +func (*MutateCampaignFeedsResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_campaign_feed_service_8c918da333fda613, []int{3} +} +func (m *MutateCampaignFeedsResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_MutateCampaignFeedsResponse.Unmarshal(m, b) +} +func (m *MutateCampaignFeedsResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_MutateCampaignFeedsResponse.Marshal(b, m, deterministic) +} +func (dst *MutateCampaignFeedsResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_MutateCampaignFeedsResponse.Merge(dst, src) +} +func (m *MutateCampaignFeedsResponse) XXX_Size() int { + return xxx_messageInfo_MutateCampaignFeedsResponse.Size(m) +} +func (m *MutateCampaignFeedsResponse) XXX_DiscardUnknown() { + xxx_messageInfo_MutateCampaignFeedsResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_MutateCampaignFeedsResponse proto.InternalMessageInfo + +func (m *MutateCampaignFeedsResponse) GetPartialFailureError() *status.Status { + if m != nil { + return m.PartialFailureError + } + return nil +} + +func (m *MutateCampaignFeedsResponse) GetResults() []*MutateCampaignFeedResult { + if m != nil { + return m.Results + } + return nil +} + +// The result for the campaign feed mutate. +type MutateCampaignFeedResult struct { + // Returned for successful operations. + ResourceName string `protobuf:"bytes,1,opt,name=resource_name,json=resourceName,proto3" json:"resource_name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *MutateCampaignFeedResult) Reset() { *m = MutateCampaignFeedResult{} } +func (m *MutateCampaignFeedResult) String() string { return proto.CompactTextString(m) } +func (*MutateCampaignFeedResult) ProtoMessage() {} +func (*MutateCampaignFeedResult) Descriptor() ([]byte, []int) { + return fileDescriptor_campaign_feed_service_8c918da333fda613, []int{4} +} +func (m *MutateCampaignFeedResult) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_MutateCampaignFeedResult.Unmarshal(m, b) +} +func (m *MutateCampaignFeedResult) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_MutateCampaignFeedResult.Marshal(b, m, deterministic) +} +func (dst *MutateCampaignFeedResult) XXX_Merge(src proto.Message) { + xxx_messageInfo_MutateCampaignFeedResult.Merge(dst, src) +} +func (m *MutateCampaignFeedResult) XXX_Size() int { + return xxx_messageInfo_MutateCampaignFeedResult.Size(m) +} +func (m *MutateCampaignFeedResult) XXX_DiscardUnknown() { + xxx_messageInfo_MutateCampaignFeedResult.DiscardUnknown(m) +} + +var xxx_messageInfo_MutateCampaignFeedResult proto.InternalMessageInfo + +func (m *MutateCampaignFeedResult) GetResourceName() string { + if m != nil { + return m.ResourceName + } + return "" +} + +func init() { + proto.RegisterType((*GetCampaignFeedRequest)(nil), "google.ads.googleads.v1.services.GetCampaignFeedRequest") + proto.RegisterType((*MutateCampaignFeedsRequest)(nil), "google.ads.googleads.v1.services.MutateCampaignFeedsRequest") + proto.RegisterType((*CampaignFeedOperation)(nil), "google.ads.googleads.v1.services.CampaignFeedOperation") + proto.RegisterType((*MutateCampaignFeedsResponse)(nil), "google.ads.googleads.v1.services.MutateCampaignFeedsResponse") + proto.RegisterType((*MutateCampaignFeedResult)(nil), "google.ads.googleads.v1.services.MutateCampaignFeedResult") +} + +// Reference imports to suppress errors if they are not otherwise used. +var _ context.Context +var _ grpc.ClientConn + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the grpc package it is being compiled against. +const _ = grpc.SupportPackageIsVersion4 + +// CampaignFeedServiceClient is the client API for CampaignFeedService service. +// +// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://godoc.org/google.golang.org/grpc#ClientConn.NewStream. +type CampaignFeedServiceClient interface { + // Returns the requested campaign feed in full detail. + GetCampaignFeed(ctx context.Context, in *GetCampaignFeedRequest, opts ...grpc.CallOption) (*resources.CampaignFeed, error) + // Creates, updates, or removes campaign feeds. Operation statuses are + // returned. + MutateCampaignFeeds(ctx context.Context, in *MutateCampaignFeedsRequest, opts ...grpc.CallOption) (*MutateCampaignFeedsResponse, error) +} + +type campaignFeedServiceClient struct { + cc *grpc.ClientConn +} + +func NewCampaignFeedServiceClient(cc *grpc.ClientConn) CampaignFeedServiceClient { + return &campaignFeedServiceClient{cc} +} + +func (c *campaignFeedServiceClient) GetCampaignFeed(ctx context.Context, in *GetCampaignFeedRequest, opts ...grpc.CallOption) (*resources.CampaignFeed, error) { + out := new(resources.CampaignFeed) + err := c.cc.Invoke(ctx, "/google.ads.googleads.v1.services.CampaignFeedService/GetCampaignFeed", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *campaignFeedServiceClient) MutateCampaignFeeds(ctx context.Context, in *MutateCampaignFeedsRequest, opts ...grpc.CallOption) (*MutateCampaignFeedsResponse, error) { + out := new(MutateCampaignFeedsResponse) + err := c.cc.Invoke(ctx, "/google.ads.googleads.v1.services.CampaignFeedService/MutateCampaignFeeds", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +// CampaignFeedServiceServer is the server API for CampaignFeedService service. +type CampaignFeedServiceServer interface { + // Returns the requested campaign feed in full detail. + GetCampaignFeed(context.Context, *GetCampaignFeedRequest) (*resources.CampaignFeed, error) + // Creates, updates, or removes campaign feeds. Operation statuses are + // returned. + MutateCampaignFeeds(context.Context, *MutateCampaignFeedsRequest) (*MutateCampaignFeedsResponse, error) +} + +func RegisterCampaignFeedServiceServer(s *grpc.Server, srv CampaignFeedServiceServer) { + s.RegisterService(&_CampaignFeedService_serviceDesc, srv) +} + +func _CampaignFeedService_GetCampaignFeed_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(GetCampaignFeedRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(CampaignFeedServiceServer).GetCampaignFeed(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.ads.googleads.v1.services.CampaignFeedService/GetCampaignFeed", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(CampaignFeedServiceServer).GetCampaignFeed(ctx, req.(*GetCampaignFeedRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _CampaignFeedService_MutateCampaignFeeds_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(MutateCampaignFeedsRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(CampaignFeedServiceServer).MutateCampaignFeeds(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.ads.googleads.v1.services.CampaignFeedService/MutateCampaignFeeds", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(CampaignFeedServiceServer).MutateCampaignFeeds(ctx, req.(*MutateCampaignFeedsRequest)) + } + return interceptor(ctx, in, info, handler) +} + +var _CampaignFeedService_serviceDesc = grpc.ServiceDesc{ + ServiceName: "google.ads.googleads.v1.services.CampaignFeedService", + HandlerType: (*CampaignFeedServiceServer)(nil), + Methods: []grpc.MethodDesc{ + { + MethodName: "GetCampaignFeed", + Handler: _CampaignFeedService_GetCampaignFeed_Handler, + }, + { + MethodName: "MutateCampaignFeeds", + Handler: _CampaignFeedService_MutateCampaignFeeds_Handler, + }, + }, + Streams: []grpc.StreamDesc{}, + Metadata: "google/ads/googleads/v1/services/campaign_feed_service.proto", +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/services/campaign_feed_service.proto", fileDescriptor_campaign_feed_service_8c918da333fda613) +} + +var fileDescriptor_campaign_feed_service_8c918da333fda613 = []byte{ + // 714 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x9c, 0x55, 0xc1, 0x6e, 0xd3, 0x4c, + 0x10, 0xfe, 0x9d, 0xfc, 0x2a, 0x74, 0x5d, 0xa8, 0xb4, 0x55, 0xc1, 0x0a, 0x08, 0x22, 0x53, 0x89, + 0x2a, 0x07, 0xaf, 0x12, 0x84, 0x8a, 0xdc, 0x46, 0x28, 0x45, 0xa4, 0xed, 0xa1, 0xb4, 0x72, 0x51, + 0x91, 0x50, 0x24, 0x6b, 0x6b, 0x6f, 0x2c, 0xab, 0xb6, 0xd7, 0xec, 0xae, 0x83, 0xaa, 0xaa, 0x17, + 0xde, 0x00, 0xf1, 0x04, 0x70, 0xe4, 0xce, 0x89, 0x37, 0xe0, 0x86, 0x78, 0x85, 0x9e, 0x78, 0x09, + 0x90, 0xbd, 0xde, 0x90, 0xb4, 0x89, 0x02, 0xbd, 0x8d, 0x67, 0xbf, 0xf9, 0x66, 0x66, 0xbf, 0xd9, + 0x31, 0xd8, 0x08, 0x28, 0x0d, 0x22, 0x82, 0xb0, 0xcf, 0x91, 0x34, 0x73, 0x6b, 0xd0, 0x44, 0x9c, + 0xb0, 0x41, 0xe8, 0x11, 0x8e, 0x3c, 0x1c, 0xa7, 0x38, 0x0c, 0x12, 0xb7, 0x4f, 0x88, 0xef, 0x96, + 0x6e, 0x2b, 0x65, 0x54, 0x50, 0x58, 0x97, 0x21, 0x16, 0xf6, 0xb9, 0x35, 0x8c, 0xb6, 0x06, 0x4d, + 0x4b, 0x45, 0xd7, 0x1e, 0x4f, 0xe3, 0x67, 0x84, 0xd3, 0x8c, 0x5d, 0x4a, 0x20, 0x89, 0x6b, 0x77, + 0x55, 0x58, 0x1a, 0x22, 0x9c, 0x24, 0x54, 0x60, 0x11, 0xd2, 0x84, 0x97, 0xa7, 0x65, 0x5a, 0x54, + 0x7c, 0x1d, 0x65, 0x7d, 0xd4, 0x0f, 0x49, 0xe4, 0xbb, 0x31, 0xe6, 0xc7, 0x25, 0xe2, 0xde, 0x45, + 0xc4, 0x5b, 0x86, 0xd3, 0x94, 0x30, 0xc5, 0x70, 0xbb, 0x3c, 0x67, 0xa9, 0x87, 0xb8, 0xc0, 0x22, + 0x2b, 0x0f, 0xcc, 0x36, 0xb8, 0xb5, 0x45, 0xc4, 0xb3, 0xb2, 0xa4, 0x2e, 0x21, 0xbe, 0x43, 0xde, + 0x64, 0x84, 0x0b, 0xf8, 0x00, 0xdc, 0x50, 0x35, 0xbb, 0x09, 0x8e, 0x89, 0xa1, 0xd5, 0xb5, 0xd5, + 0x79, 0x67, 0x41, 0x39, 0x5f, 0xe0, 0x98, 0x98, 0xe7, 0x1a, 0xa8, 0xed, 0x66, 0x02, 0x0b, 0x32, + 0x4a, 0xc1, 0x15, 0xc7, 0x7d, 0xa0, 0x7b, 0x19, 0x17, 0x34, 0x26, 0xcc, 0x0d, 0xfd, 0x92, 0x01, + 0x28, 0xd7, 0x8e, 0x0f, 0x5f, 0x01, 0x40, 0x53, 0xc2, 0x64, 0xb7, 0x46, 0xa5, 0x5e, 0x5d, 0xd5, + 0x5b, 0x6b, 0xd6, 0xac, 0x5b, 0xb6, 0x46, 0x93, 0xed, 0xa9, 0x78, 0x67, 0x84, 0x0a, 0x3e, 0x04, + 0x8b, 0x29, 0x66, 0x22, 0xc4, 0x91, 0xdb, 0xc7, 0x61, 0x94, 0x31, 0x62, 0x54, 0xeb, 0xda, 0xea, + 0x75, 0xe7, 0x66, 0xe9, 0xee, 0x4a, 0x6f, 0xde, 0xe6, 0x00, 0x47, 0xa1, 0x8f, 0x05, 0x71, 0x69, + 0x12, 0x9d, 0x18, 0xff, 0x17, 0xb0, 0x05, 0xe5, 0xdc, 0x4b, 0xa2, 0x13, 0xf3, 0x7d, 0x05, 0x2c, + 0x4f, 0xcc, 0x09, 0xd7, 0x81, 0x9e, 0xa5, 0x45, 0x70, 0xae, 0x46, 0x11, 0xac, 0xb7, 0x6a, 0xaa, + 0x03, 0x25, 0x87, 0xd5, 0xcd, 0x05, 0xdb, 0xc5, 0xfc, 0xd8, 0x01, 0x12, 0x9e, 0xdb, 0x70, 0x07, + 0xcc, 0x79, 0x8c, 0x60, 0x21, 0xef, 0x56, 0x6f, 0xa1, 0xa9, 0x9d, 0x0f, 0xa7, 0x67, 0xac, 0xf5, + 0xed, 0xff, 0x9c, 0x92, 0x20, 0xa7, 0x92, 0xc4, 0x46, 0xe5, 0xca, 0x54, 0x92, 0x00, 0x1a, 0x60, + 0x8e, 0x91, 0x98, 0x0e, 0xe4, 0x8d, 0xcd, 0xe7, 0x27, 0xf2, 0x7b, 0x53, 0x07, 0xf3, 0xc3, 0x2b, + 0x36, 0xbf, 0x6a, 0xe0, 0xce, 0x44, 0xe9, 0x79, 0x4a, 0x13, 0x4e, 0x60, 0x17, 0x2c, 0x5f, 0x50, + 0xc0, 0x25, 0x8c, 0x51, 0x56, 0xb0, 0xea, 0x2d, 0xa8, 0x0a, 0x64, 0xa9, 0x67, 0x1d, 0x14, 0x23, + 0xe9, 0x2c, 0x8d, 0x6b, 0xf3, 0x3c, 0x87, 0xc3, 0x97, 0xe0, 0x1a, 0x23, 0x3c, 0x8b, 0x84, 0x9a, + 0x0f, 0x7b, 0xf6, 0x7c, 0x5c, 0xae, 0xcb, 0x29, 0x28, 0x1c, 0x45, 0x65, 0x3e, 0x05, 0xc6, 0x34, + 0xd0, 0x5f, 0x4d, 0x7e, 0xeb, 0x63, 0x15, 0x2c, 0x8d, 0xc6, 0x1e, 0xc8, 0xdc, 0xf0, 0x8b, 0x06, + 0x16, 0x2f, 0xbc, 0x28, 0xf8, 0x64, 0x76, 0xc5, 0x93, 0x1f, 0x61, 0xed, 0x5f, 0x65, 0x34, 0xd7, + 0xde, 0xfd, 0x38, 0xff, 0x50, 0x69, 0x42, 0x94, 0xef, 0x9c, 0xd3, 0xb1, 0x36, 0xda, 0xea, 0xdd, + 0x71, 0xd4, 0x18, 0x2e, 0xa1, 0x42, 0x33, 0xd4, 0x38, 0x83, 0xdf, 0x35, 0xb0, 0x34, 0x41, 0x4e, + 0xb8, 0x71, 0x95, 0xdb, 0x56, 0x0b, 0xa0, 0xd6, 0xbe, 0x62, 0xb4, 0x9c, 0x21, 0xb3, 0x5d, 0x74, + 0xb3, 0x66, 0xb6, 0xf2, 0x6e, 0xfe, 0x94, 0x7f, 0x3a, 0xb2, 0x54, 0xda, 0x8d, 0xb3, 0xf1, 0x66, + 0xec, 0xb8, 0x20, 0xb4, 0xb5, 0xc6, 0xe6, 0x2f, 0x0d, 0xac, 0x78, 0x34, 0x9e, 0x59, 0xc3, 0xa6, + 0x31, 0x41, 0xc9, 0xfd, 0xfc, 0xed, 0xee, 0x6b, 0xaf, 0xb7, 0xcb, 0xe8, 0x80, 0x46, 0x38, 0x09, + 0x2c, 0xca, 0x02, 0x14, 0x90, 0xa4, 0x78, 0xd9, 0x6a, 0xc3, 0xa7, 0x21, 0x9f, 0xfe, 0x43, 0x59, + 0x57, 0xc6, 0xa7, 0x4a, 0x75, 0xab, 0xd3, 0xf9, 0x5c, 0xa9, 0x6f, 0x49, 0xc2, 0x8e, 0xcf, 0x2d, + 0x69, 0xe6, 0xd6, 0x61, 0xd3, 0x2a, 0x13, 0xf3, 0x6f, 0x0a, 0xd2, 0xeb, 0xf8, 0xbc, 0x37, 0x84, + 0xf4, 0x0e, 0x9b, 0x3d, 0x05, 0xf9, 0x59, 0x59, 0x91, 0x7e, 0xdb, 0xee, 0xf8, 0xdc, 0xb6, 0x87, + 0x20, 0xdb, 0x3e, 0x6c, 0xda, 0xb6, 0x82, 0x1d, 0xcd, 0x15, 0x75, 0x3e, 0xfa, 0x1d, 0x00, 0x00, + 0xff, 0xff, 0x1c, 0x27, 0x98, 0x94, 0xf7, 0x06, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/services/campaign_label_service.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/services/campaign_label_service.pb.go new file mode 100644 index 0000000..cf316c2 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/services/campaign_label_service.pb.go @@ -0,0 +1,544 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/services/campaign_label_service.proto + +package services // import "google.golang.org/genproto/googleapis/ads/googleads/v1/services" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "github.com/golang/protobuf/ptypes/wrappers" +import resources "google.golang.org/genproto/googleapis/ads/googleads/v1/resources" +import _ "google.golang.org/genproto/googleapis/api/annotations" +import status "google.golang.org/genproto/googleapis/rpc/status" + +import ( + context "golang.org/x/net/context" + grpc "google.golang.org/grpc" +) + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Request message for [CampaignLabelService.GetCampaignLabel][google.ads.googleads.v1.services.CampaignLabelService.GetCampaignLabel]. +type GetCampaignLabelRequest struct { + // The resource name of the campaign-label relationship to fetch. + ResourceName string `protobuf:"bytes,1,opt,name=resource_name,json=resourceName,proto3" json:"resource_name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GetCampaignLabelRequest) Reset() { *m = GetCampaignLabelRequest{} } +func (m *GetCampaignLabelRequest) String() string { return proto.CompactTextString(m) } +func (*GetCampaignLabelRequest) ProtoMessage() {} +func (*GetCampaignLabelRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_campaign_label_service_a33f567caeaa39d2, []int{0} +} +func (m *GetCampaignLabelRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GetCampaignLabelRequest.Unmarshal(m, b) +} +func (m *GetCampaignLabelRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GetCampaignLabelRequest.Marshal(b, m, deterministic) +} +func (dst *GetCampaignLabelRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_GetCampaignLabelRequest.Merge(dst, src) +} +func (m *GetCampaignLabelRequest) XXX_Size() int { + return xxx_messageInfo_GetCampaignLabelRequest.Size(m) +} +func (m *GetCampaignLabelRequest) XXX_DiscardUnknown() { + xxx_messageInfo_GetCampaignLabelRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_GetCampaignLabelRequest proto.InternalMessageInfo + +func (m *GetCampaignLabelRequest) GetResourceName() string { + if m != nil { + return m.ResourceName + } + return "" +} + +// Request message for [CampaignLabelService.MutateCampaignLabels][google.ads.googleads.v1.services.CampaignLabelService.MutateCampaignLabels]. +type MutateCampaignLabelsRequest struct { + // ID of the customer whose campaign-label relationships are being modified. + CustomerId string `protobuf:"bytes,1,opt,name=customer_id,json=customerId,proto3" json:"customer_id,omitempty"` + // The list of operations to perform on campaign-label relationships. + Operations []*CampaignLabelOperation `protobuf:"bytes,2,rep,name=operations,proto3" json:"operations,omitempty"` + // If true, successful operations will be carried out and invalid + // operations will return errors. If false, all operations will be carried + // out in one transaction if and only if they are all valid. + // Default is false. + PartialFailure bool `protobuf:"varint,3,opt,name=partial_failure,json=partialFailure,proto3" json:"partial_failure,omitempty"` + // If true, the request is validated but not executed. Only errors are + // returned, not results. + ValidateOnly bool `protobuf:"varint,4,opt,name=validate_only,json=validateOnly,proto3" json:"validate_only,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *MutateCampaignLabelsRequest) Reset() { *m = MutateCampaignLabelsRequest{} } +func (m *MutateCampaignLabelsRequest) String() string { return proto.CompactTextString(m) } +func (*MutateCampaignLabelsRequest) ProtoMessage() {} +func (*MutateCampaignLabelsRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_campaign_label_service_a33f567caeaa39d2, []int{1} +} +func (m *MutateCampaignLabelsRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_MutateCampaignLabelsRequest.Unmarshal(m, b) +} +func (m *MutateCampaignLabelsRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_MutateCampaignLabelsRequest.Marshal(b, m, deterministic) +} +func (dst *MutateCampaignLabelsRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_MutateCampaignLabelsRequest.Merge(dst, src) +} +func (m *MutateCampaignLabelsRequest) XXX_Size() int { + return xxx_messageInfo_MutateCampaignLabelsRequest.Size(m) +} +func (m *MutateCampaignLabelsRequest) XXX_DiscardUnknown() { + xxx_messageInfo_MutateCampaignLabelsRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_MutateCampaignLabelsRequest proto.InternalMessageInfo + +func (m *MutateCampaignLabelsRequest) GetCustomerId() string { + if m != nil { + return m.CustomerId + } + return "" +} + +func (m *MutateCampaignLabelsRequest) GetOperations() []*CampaignLabelOperation { + if m != nil { + return m.Operations + } + return nil +} + +func (m *MutateCampaignLabelsRequest) GetPartialFailure() bool { + if m != nil { + return m.PartialFailure + } + return false +} + +func (m *MutateCampaignLabelsRequest) GetValidateOnly() bool { + if m != nil { + return m.ValidateOnly + } + return false +} + +// A single operation (create, remove) on a campaign-label relationship. +type CampaignLabelOperation struct { + // The mutate operation. + // + // Types that are valid to be assigned to Operation: + // *CampaignLabelOperation_Create + // *CampaignLabelOperation_Remove + Operation isCampaignLabelOperation_Operation `protobuf_oneof:"operation"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *CampaignLabelOperation) Reset() { *m = CampaignLabelOperation{} } +func (m *CampaignLabelOperation) String() string { return proto.CompactTextString(m) } +func (*CampaignLabelOperation) ProtoMessage() {} +func (*CampaignLabelOperation) Descriptor() ([]byte, []int) { + return fileDescriptor_campaign_label_service_a33f567caeaa39d2, []int{2} +} +func (m *CampaignLabelOperation) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_CampaignLabelOperation.Unmarshal(m, b) +} +func (m *CampaignLabelOperation) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_CampaignLabelOperation.Marshal(b, m, deterministic) +} +func (dst *CampaignLabelOperation) XXX_Merge(src proto.Message) { + xxx_messageInfo_CampaignLabelOperation.Merge(dst, src) +} +func (m *CampaignLabelOperation) XXX_Size() int { + return xxx_messageInfo_CampaignLabelOperation.Size(m) +} +func (m *CampaignLabelOperation) XXX_DiscardUnknown() { + xxx_messageInfo_CampaignLabelOperation.DiscardUnknown(m) +} + +var xxx_messageInfo_CampaignLabelOperation proto.InternalMessageInfo + +type isCampaignLabelOperation_Operation interface { + isCampaignLabelOperation_Operation() +} + +type CampaignLabelOperation_Create struct { + Create *resources.CampaignLabel `protobuf:"bytes,1,opt,name=create,proto3,oneof"` +} + +type CampaignLabelOperation_Remove struct { + Remove string `protobuf:"bytes,2,opt,name=remove,proto3,oneof"` +} + +func (*CampaignLabelOperation_Create) isCampaignLabelOperation_Operation() {} + +func (*CampaignLabelOperation_Remove) isCampaignLabelOperation_Operation() {} + +func (m *CampaignLabelOperation) GetOperation() isCampaignLabelOperation_Operation { + if m != nil { + return m.Operation + } + return nil +} + +func (m *CampaignLabelOperation) GetCreate() *resources.CampaignLabel { + if x, ok := m.GetOperation().(*CampaignLabelOperation_Create); ok { + return x.Create + } + return nil +} + +func (m *CampaignLabelOperation) GetRemove() string { + if x, ok := m.GetOperation().(*CampaignLabelOperation_Remove); ok { + return x.Remove + } + return "" +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*CampaignLabelOperation) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _CampaignLabelOperation_OneofMarshaler, _CampaignLabelOperation_OneofUnmarshaler, _CampaignLabelOperation_OneofSizer, []interface{}{ + (*CampaignLabelOperation_Create)(nil), + (*CampaignLabelOperation_Remove)(nil), + } +} + +func _CampaignLabelOperation_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*CampaignLabelOperation) + // operation + switch x := m.Operation.(type) { + case *CampaignLabelOperation_Create: + b.EncodeVarint(1<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.Create); err != nil { + return err + } + case *CampaignLabelOperation_Remove: + b.EncodeVarint(2<<3 | proto.WireBytes) + b.EncodeStringBytes(x.Remove) + case nil: + default: + return fmt.Errorf("CampaignLabelOperation.Operation has unexpected type %T", x) + } + return nil +} + +func _CampaignLabelOperation_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*CampaignLabelOperation) + switch tag { + case 1: // operation.create + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(resources.CampaignLabel) + err := b.DecodeMessage(msg) + m.Operation = &CampaignLabelOperation_Create{msg} + return true, err + case 2: // operation.remove + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeStringBytes() + m.Operation = &CampaignLabelOperation_Remove{x} + return true, err + default: + return false, nil + } +} + +func _CampaignLabelOperation_OneofSizer(msg proto.Message) (n int) { + m := msg.(*CampaignLabelOperation) + // operation + switch x := m.Operation.(type) { + case *CampaignLabelOperation_Create: + s := proto.Size(x.Create) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *CampaignLabelOperation_Remove: + n += 1 // tag and wire + n += proto.SizeVarint(uint64(len(x.Remove))) + n += len(x.Remove) + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +// Response message for a campaign labels mutate. +type MutateCampaignLabelsResponse struct { + // Errors that pertain to operation failures in the partial failure mode. + // Returned only when partial_failure = true and all errors occur inside the + // operations. If any errors occur outside the operations (e.g. auth errors), + // we return an RPC level error. + PartialFailureError *status.Status `protobuf:"bytes,3,opt,name=partial_failure_error,json=partialFailureError,proto3" json:"partial_failure_error,omitempty"` + // All results for the mutate. + Results []*MutateCampaignLabelResult `protobuf:"bytes,2,rep,name=results,proto3" json:"results,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *MutateCampaignLabelsResponse) Reset() { *m = MutateCampaignLabelsResponse{} } +func (m *MutateCampaignLabelsResponse) String() string { return proto.CompactTextString(m) } +func (*MutateCampaignLabelsResponse) ProtoMessage() {} +func (*MutateCampaignLabelsResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_campaign_label_service_a33f567caeaa39d2, []int{3} +} +func (m *MutateCampaignLabelsResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_MutateCampaignLabelsResponse.Unmarshal(m, b) +} +func (m *MutateCampaignLabelsResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_MutateCampaignLabelsResponse.Marshal(b, m, deterministic) +} +func (dst *MutateCampaignLabelsResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_MutateCampaignLabelsResponse.Merge(dst, src) +} +func (m *MutateCampaignLabelsResponse) XXX_Size() int { + return xxx_messageInfo_MutateCampaignLabelsResponse.Size(m) +} +func (m *MutateCampaignLabelsResponse) XXX_DiscardUnknown() { + xxx_messageInfo_MutateCampaignLabelsResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_MutateCampaignLabelsResponse proto.InternalMessageInfo + +func (m *MutateCampaignLabelsResponse) GetPartialFailureError() *status.Status { + if m != nil { + return m.PartialFailureError + } + return nil +} + +func (m *MutateCampaignLabelsResponse) GetResults() []*MutateCampaignLabelResult { + if m != nil { + return m.Results + } + return nil +} + +// The result for a campaign label mutate. +type MutateCampaignLabelResult struct { + // Returned for successful operations. + ResourceName string `protobuf:"bytes,1,opt,name=resource_name,json=resourceName,proto3" json:"resource_name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *MutateCampaignLabelResult) Reset() { *m = MutateCampaignLabelResult{} } +func (m *MutateCampaignLabelResult) String() string { return proto.CompactTextString(m) } +func (*MutateCampaignLabelResult) ProtoMessage() {} +func (*MutateCampaignLabelResult) Descriptor() ([]byte, []int) { + return fileDescriptor_campaign_label_service_a33f567caeaa39d2, []int{4} +} +func (m *MutateCampaignLabelResult) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_MutateCampaignLabelResult.Unmarshal(m, b) +} +func (m *MutateCampaignLabelResult) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_MutateCampaignLabelResult.Marshal(b, m, deterministic) +} +func (dst *MutateCampaignLabelResult) XXX_Merge(src proto.Message) { + xxx_messageInfo_MutateCampaignLabelResult.Merge(dst, src) +} +func (m *MutateCampaignLabelResult) XXX_Size() int { + return xxx_messageInfo_MutateCampaignLabelResult.Size(m) +} +func (m *MutateCampaignLabelResult) XXX_DiscardUnknown() { + xxx_messageInfo_MutateCampaignLabelResult.DiscardUnknown(m) +} + +var xxx_messageInfo_MutateCampaignLabelResult proto.InternalMessageInfo + +func (m *MutateCampaignLabelResult) GetResourceName() string { + if m != nil { + return m.ResourceName + } + return "" +} + +func init() { + proto.RegisterType((*GetCampaignLabelRequest)(nil), "google.ads.googleads.v1.services.GetCampaignLabelRequest") + proto.RegisterType((*MutateCampaignLabelsRequest)(nil), "google.ads.googleads.v1.services.MutateCampaignLabelsRequest") + proto.RegisterType((*CampaignLabelOperation)(nil), "google.ads.googleads.v1.services.CampaignLabelOperation") + proto.RegisterType((*MutateCampaignLabelsResponse)(nil), "google.ads.googleads.v1.services.MutateCampaignLabelsResponse") + proto.RegisterType((*MutateCampaignLabelResult)(nil), "google.ads.googleads.v1.services.MutateCampaignLabelResult") +} + +// Reference imports to suppress errors if they are not otherwise used. +var _ context.Context +var _ grpc.ClientConn + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the grpc package it is being compiled against. +const _ = grpc.SupportPackageIsVersion4 + +// CampaignLabelServiceClient is the client API for CampaignLabelService service. +// +// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://godoc.org/google.golang.org/grpc#ClientConn.NewStream. +type CampaignLabelServiceClient interface { + // Returns the requested campaign-label relationship in full detail. + GetCampaignLabel(ctx context.Context, in *GetCampaignLabelRequest, opts ...grpc.CallOption) (*resources.CampaignLabel, error) + // Creates and removes campaign-label relationships. + // Operation statuses are returned. + MutateCampaignLabels(ctx context.Context, in *MutateCampaignLabelsRequest, opts ...grpc.CallOption) (*MutateCampaignLabelsResponse, error) +} + +type campaignLabelServiceClient struct { + cc *grpc.ClientConn +} + +func NewCampaignLabelServiceClient(cc *grpc.ClientConn) CampaignLabelServiceClient { + return &campaignLabelServiceClient{cc} +} + +func (c *campaignLabelServiceClient) GetCampaignLabel(ctx context.Context, in *GetCampaignLabelRequest, opts ...grpc.CallOption) (*resources.CampaignLabel, error) { + out := new(resources.CampaignLabel) + err := c.cc.Invoke(ctx, "/google.ads.googleads.v1.services.CampaignLabelService/GetCampaignLabel", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *campaignLabelServiceClient) MutateCampaignLabels(ctx context.Context, in *MutateCampaignLabelsRequest, opts ...grpc.CallOption) (*MutateCampaignLabelsResponse, error) { + out := new(MutateCampaignLabelsResponse) + err := c.cc.Invoke(ctx, "/google.ads.googleads.v1.services.CampaignLabelService/MutateCampaignLabels", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +// CampaignLabelServiceServer is the server API for CampaignLabelService service. +type CampaignLabelServiceServer interface { + // Returns the requested campaign-label relationship in full detail. + GetCampaignLabel(context.Context, *GetCampaignLabelRequest) (*resources.CampaignLabel, error) + // Creates and removes campaign-label relationships. + // Operation statuses are returned. + MutateCampaignLabels(context.Context, *MutateCampaignLabelsRequest) (*MutateCampaignLabelsResponse, error) +} + +func RegisterCampaignLabelServiceServer(s *grpc.Server, srv CampaignLabelServiceServer) { + s.RegisterService(&_CampaignLabelService_serviceDesc, srv) +} + +func _CampaignLabelService_GetCampaignLabel_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(GetCampaignLabelRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(CampaignLabelServiceServer).GetCampaignLabel(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.ads.googleads.v1.services.CampaignLabelService/GetCampaignLabel", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(CampaignLabelServiceServer).GetCampaignLabel(ctx, req.(*GetCampaignLabelRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _CampaignLabelService_MutateCampaignLabels_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(MutateCampaignLabelsRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(CampaignLabelServiceServer).MutateCampaignLabels(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.ads.googleads.v1.services.CampaignLabelService/MutateCampaignLabels", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(CampaignLabelServiceServer).MutateCampaignLabels(ctx, req.(*MutateCampaignLabelsRequest)) + } + return interceptor(ctx, in, info, handler) +} + +var _CampaignLabelService_serviceDesc = grpc.ServiceDesc{ + ServiceName: "google.ads.googleads.v1.services.CampaignLabelService", + HandlerType: (*CampaignLabelServiceServer)(nil), + Methods: []grpc.MethodDesc{ + { + MethodName: "GetCampaignLabel", + Handler: _CampaignLabelService_GetCampaignLabel_Handler, + }, + { + MethodName: "MutateCampaignLabels", + Handler: _CampaignLabelService_MutateCampaignLabels_Handler, + }, + }, + Streams: []grpc.StreamDesc{}, + Metadata: "google/ads/googleads/v1/services/campaign_label_service.proto", +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/services/campaign_label_service.proto", fileDescriptor_campaign_label_service_a33f567caeaa39d2) +} + +var fileDescriptor_campaign_label_service_a33f567caeaa39d2 = []byte{ + // 668 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x9c, 0x54, 0xdd, 0x6a, 0xd4, 0x4e, + 0x14, 0xff, 0x27, 0xfb, 0xa7, 0xda, 0xd9, 0xfa, 0xc1, 0x58, 0x6d, 0xba, 0x16, 0x5d, 0x62, 0xc1, + 0xb2, 0x17, 0x49, 0x77, 0x0b, 0x52, 0x53, 0xb6, 0xb8, 0x15, 0xdb, 0x2a, 0x6a, 0x4b, 0x8a, 0x45, + 0x64, 0x21, 0x4c, 0x93, 0x69, 0x08, 0x24, 0x99, 0x38, 0x33, 0x59, 0x29, 0xa5, 0x20, 0xde, 0x7a, + 0xe9, 0x1b, 0x78, 0xd9, 0x37, 0xf0, 0xc2, 0x17, 0xf0, 0x56, 0x7c, 0x03, 0xf1, 0xc2, 0xa7, 0x90, + 0x64, 0x32, 0xb1, 0x59, 0x77, 0x59, 0xed, 0xdd, 0x99, 0xf3, 0xf1, 0x3b, 0xe7, 0x77, 0x3e, 0x06, + 0x74, 0x7d, 0x42, 0xfc, 0x10, 0x9b, 0xc8, 0x63, 0xa6, 0x10, 0x33, 0x69, 0xd0, 0x36, 0x19, 0xa6, + 0x83, 0xc0, 0xc5, 0xcc, 0x74, 0x51, 0x94, 0xa0, 0xc0, 0x8f, 0x9d, 0x10, 0x1d, 0xe0, 0xd0, 0x29, + 0xf4, 0x46, 0x42, 0x09, 0x27, 0xb0, 0x29, 0x62, 0x0c, 0xe4, 0x31, 0xa3, 0x0c, 0x37, 0x06, 0x6d, + 0x43, 0x86, 0x37, 0xee, 0x8d, 0x4b, 0x40, 0x31, 0x23, 0x29, 0xfd, 0x33, 0x83, 0x40, 0x6e, 0x2c, + 0xc8, 0xb8, 0x24, 0x30, 0x51, 0x1c, 0x13, 0x8e, 0x78, 0x40, 0x62, 0x56, 0x58, 0x6f, 0x15, 0xd6, + 0xfc, 0x75, 0x90, 0x1e, 0x9a, 0x6f, 0x28, 0x4a, 0x12, 0x4c, 0xa5, 0x7d, 0xae, 0xb0, 0xd3, 0xc4, + 0x35, 0x19, 0x47, 0x3c, 0x2d, 0x0c, 0xfa, 0x3a, 0x98, 0xdb, 0xc2, 0xfc, 0x61, 0x91, 0xf1, 0x69, + 0x96, 0xd0, 0xc6, 0xaf, 0x53, 0xcc, 0x38, 0xbc, 0x03, 0x2e, 0xc9, 0x9a, 0x9c, 0x18, 0x45, 0x58, + 0x53, 0x9a, 0xca, 0xd2, 0xb4, 0x3d, 0x23, 0x95, 0xcf, 0x51, 0x84, 0xf5, 0x1f, 0x0a, 0xb8, 0xf9, + 0x2c, 0xe5, 0x88, 0xe3, 0x0a, 0x06, 0x93, 0x20, 0xb7, 0x41, 0xdd, 0x4d, 0x19, 0x27, 0x11, 0xa6, + 0x4e, 0xe0, 0x15, 0x10, 0x40, 0xaa, 0x1e, 0x7b, 0xf0, 0x25, 0x00, 0x24, 0xc1, 0x54, 0xb0, 0xd1, + 0xd4, 0x66, 0x6d, 0xa9, 0xde, 0x59, 0x35, 0x26, 0xb5, 0xd1, 0xa8, 0x64, 0xdb, 0x91, 0x00, 0xf6, + 0x19, 0x2c, 0x78, 0x17, 0x5c, 0x49, 0x10, 0xe5, 0x01, 0x0a, 0x9d, 0x43, 0x14, 0x84, 0x29, 0xc5, + 0x5a, 0xad, 0xa9, 0x2c, 0x5d, 0xb4, 0x2f, 0x17, 0xea, 0x4d, 0xa1, 0xcd, 0x88, 0x0e, 0x50, 0x18, + 0x78, 0x88, 0x63, 0x87, 0xc4, 0xe1, 0x91, 0xf6, 0x7f, 0xee, 0x36, 0x23, 0x95, 0x3b, 0x71, 0x78, + 0xa4, 0xbf, 0x57, 0xc0, 0x8d, 0xd1, 0x49, 0xe1, 0x13, 0x30, 0xe5, 0x52, 0x8c, 0xb8, 0xe8, 0x50, + 0xbd, 0xb3, 0x3c, 0xb6, 0xfc, 0x72, 0xc6, 0xd5, 0xfa, 0xb7, 0xff, 0xb3, 0x0b, 0x04, 0xa8, 0x81, + 0x29, 0x8a, 0x23, 0x32, 0xc0, 0x9a, 0x9a, 0xb5, 0x2a, 0xb3, 0x88, 0xf7, 0x46, 0x1d, 0x4c, 0x97, + 0xe4, 0xf4, 0xcf, 0x0a, 0x58, 0x18, 0xdd, 0x76, 0x96, 0x90, 0x98, 0x61, 0xb8, 0x09, 0xae, 0x0f, + 0x91, 0x77, 0x30, 0xa5, 0x84, 0xe6, 0x2d, 0xa8, 0x77, 0xa0, 0x2c, 0x91, 0x26, 0xae, 0xb1, 0x97, + 0x2f, 0x84, 0x7d, 0xad, 0xda, 0x96, 0x47, 0x99, 0x3b, 0x7c, 0x01, 0x2e, 0x50, 0xcc, 0xd2, 0x90, + 0xcb, 0xd9, 0xac, 0x4d, 0x9e, 0xcd, 0x88, 0xc2, 0xec, 0x1c, 0xc3, 0x96, 0x58, 0xfa, 0x03, 0x30, + 0x3f, 0xd6, 0xeb, 0xaf, 0x16, 0xaf, 0x73, 0x5a, 0x03, 0xb3, 0x95, 0xe0, 0x3d, 0x91, 0x1e, 0x7e, + 0x52, 0xc0, 0xd5, 0xe1, 0x95, 0x86, 0xf7, 0x27, 0x57, 0x3d, 0xe6, 0x0c, 0x1a, 0xff, 0x3c, 0x4d, + 0x7d, 0xf5, 0xdd, 0xd7, 0xef, 0x1f, 0xd4, 0x0e, 0x5c, 0xce, 0xce, 0xfa, 0xb8, 0x42, 0xa5, 0x2b, + 0x37, 0x9f, 0x99, 0xad, 0xf2, 0xce, 0xc5, 0xe8, 0xcc, 0xd6, 0x09, 0xfc, 0xa6, 0x80, 0xd9, 0x51, + 0x63, 0x85, 0xdd, 0x73, 0x75, 0x5d, 0x5e, 0x61, 0x63, 0xfd, 0xbc, 0xe1, 0x62, 0x9b, 0xf4, 0xf5, + 0x9c, 0xd1, 0xaa, 0xbe, 0x92, 0x31, 0xfa, 0x4d, 0xe1, 0xf8, 0xcc, 0x69, 0x77, 0x5b, 0x27, 0x43, + 0x84, 0xac, 0x28, 0x87, 0xb4, 0x94, 0xd6, 0xc6, 0x5b, 0x15, 0x2c, 0xba, 0x24, 0x9a, 0x58, 0xc5, + 0xc6, 0xfc, 0xa8, 0x91, 0xee, 0x66, 0x3f, 0xd5, 0xae, 0xf2, 0x6a, 0xbb, 0x08, 0xf7, 0x49, 0x88, + 0x62, 0xdf, 0x20, 0xd4, 0x37, 0x7d, 0x1c, 0xe7, 0xff, 0x98, 0xfc, 0x4a, 0x93, 0x80, 0x8d, 0xff, + 0xba, 0xd7, 0xa4, 0xf0, 0x51, 0xad, 0x6d, 0xf5, 0x7a, 0xa7, 0x6a, 0x73, 0x4b, 0x00, 0xf6, 0x3c, + 0x66, 0x08, 0x31, 0x93, 0xf6, 0xdb, 0x46, 0x91, 0x98, 0x7d, 0x91, 0x2e, 0xfd, 0x9e, 0xc7, 0xfa, + 0xa5, 0x4b, 0x7f, 0xbf, 0xdd, 0x97, 0x2e, 0x3f, 0xd5, 0x45, 0xa1, 0xb7, 0xac, 0x9e, 0xc7, 0x2c, + 0xab, 0x74, 0xb2, 0xac, 0xfd, 0xb6, 0x65, 0x49, 0xb7, 0x83, 0xa9, 0xbc, 0xce, 0x95, 0x5f, 0x01, + 0x00, 0x00, 0xff, 0xff, 0x6a, 0xf6, 0xcc, 0x51, 0x61, 0x06, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/services/campaign_service.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/services/campaign_service.pb.go new file mode 100644 index 0000000..a4c4ffd --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/services/campaign_service.pb.go @@ -0,0 +1,587 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/services/campaign_service.proto + +package services // import "google.golang.org/genproto/googleapis/ads/googleads/v1/services" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "github.com/golang/protobuf/ptypes/wrappers" +import resources "google.golang.org/genproto/googleapis/ads/googleads/v1/resources" +import _ "google.golang.org/genproto/googleapis/api/annotations" +import status "google.golang.org/genproto/googleapis/rpc/status" +import field_mask "google.golang.org/genproto/protobuf/field_mask" + +import ( + context "golang.org/x/net/context" + grpc "google.golang.org/grpc" +) + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Request message for [CampaignService.GetCampaign][google.ads.googleads.v1.services.CampaignService.GetCampaign]. +type GetCampaignRequest struct { + // The resource name of the campaign to fetch. + ResourceName string `protobuf:"bytes,1,opt,name=resource_name,json=resourceName,proto3" json:"resource_name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GetCampaignRequest) Reset() { *m = GetCampaignRequest{} } +func (m *GetCampaignRequest) String() string { return proto.CompactTextString(m) } +func (*GetCampaignRequest) ProtoMessage() {} +func (*GetCampaignRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_campaign_service_1e7ae00c38b671a3, []int{0} +} +func (m *GetCampaignRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GetCampaignRequest.Unmarshal(m, b) +} +func (m *GetCampaignRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GetCampaignRequest.Marshal(b, m, deterministic) +} +func (dst *GetCampaignRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_GetCampaignRequest.Merge(dst, src) +} +func (m *GetCampaignRequest) XXX_Size() int { + return xxx_messageInfo_GetCampaignRequest.Size(m) +} +func (m *GetCampaignRequest) XXX_DiscardUnknown() { + xxx_messageInfo_GetCampaignRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_GetCampaignRequest proto.InternalMessageInfo + +func (m *GetCampaignRequest) GetResourceName() string { + if m != nil { + return m.ResourceName + } + return "" +} + +// Request message for [CampaignService.MutateCampaigns][google.ads.googleads.v1.services.CampaignService.MutateCampaigns]. +type MutateCampaignsRequest struct { + // The ID of the customer whose campaigns are being modified. + CustomerId string `protobuf:"bytes,1,opt,name=customer_id,json=customerId,proto3" json:"customer_id,omitempty"` + // The list of operations to perform on individual campaigns. + Operations []*CampaignOperation `protobuf:"bytes,2,rep,name=operations,proto3" json:"operations,omitempty"` + // If true, successful operations will be carried out and invalid + // operations will return errors. If false, all operations will be carried + // out in one transaction if and only if they are all valid. + // Default is false. + PartialFailure bool `protobuf:"varint,3,opt,name=partial_failure,json=partialFailure,proto3" json:"partial_failure,omitempty"` + // If true, the request is validated but not executed. Only errors are + // returned, not results. + ValidateOnly bool `protobuf:"varint,4,opt,name=validate_only,json=validateOnly,proto3" json:"validate_only,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *MutateCampaignsRequest) Reset() { *m = MutateCampaignsRequest{} } +func (m *MutateCampaignsRequest) String() string { return proto.CompactTextString(m) } +func (*MutateCampaignsRequest) ProtoMessage() {} +func (*MutateCampaignsRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_campaign_service_1e7ae00c38b671a3, []int{1} +} +func (m *MutateCampaignsRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_MutateCampaignsRequest.Unmarshal(m, b) +} +func (m *MutateCampaignsRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_MutateCampaignsRequest.Marshal(b, m, deterministic) +} +func (dst *MutateCampaignsRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_MutateCampaignsRequest.Merge(dst, src) +} +func (m *MutateCampaignsRequest) XXX_Size() int { + return xxx_messageInfo_MutateCampaignsRequest.Size(m) +} +func (m *MutateCampaignsRequest) XXX_DiscardUnknown() { + xxx_messageInfo_MutateCampaignsRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_MutateCampaignsRequest proto.InternalMessageInfo + +func (m *MutateCampaignsRequest) GetCustomerId() string { + if m != nil { + return m.CustomerId + } + return "" +} + +func (m *MutateCampaignsRequest) GetOperations() []*CampaignOperation { + if m != nil { + return m.Operations + } + return nil +} + +func (m *MutateCampaignsRequest) GetPartialFailure() bool { + if m != nil { + return m.PartialFailure + } + return false +} + +func (m *MutateCampaignsRequest) GetValidateOnly() bool { + if m != nil { + return m.ValidateOnly + } + return false +} + +// A single operation (create, update, remove) on a campaign. +type CampaignOperation struct { + // FieldMask that determines which resource fields are modified in an update. + UpdateMask *field_mask.FieldMask `protobuf:"bytes,4,opt,name=update_mask,json=updateMask,proto3" json:"update_mask,omitempty"` + // The mutate operation. + // + // Types that are valid to be assigned to Operation: + // *CampaignOperation_Create + // *CampaignOperation_Update + // *CampaignOperation_Remove + Operation isCampaignOperation_Operation `protobuf_oneof:"operation"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *CampaignOperation) Reset() { *m = CampaignOperation{} } +func (m *CampaignOperation) String() string { return proto.CompactTextString(m) } +func (*CampaignOperation) ProtoMessage() {} +func (*CampaignOperation) Descriptor() ([]byte, []int) { + return fileDescriptor_campaign_service_1e7ae00c38b671a3, []int{2} +} +func (m *CampaignOperation) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_CampaignOperation.Unmarshal(m, b) +} +func (m *CampaignOperation) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_CampaignOperation.Marshal(b, m, deterministic) +} +func (dst *CampaignOperation) XXX_Merge(src proto.Message) { + xxx_messageInfo_CampaignOperation.Merge(dst, src) +} +func (m *CampaignOperation) XXX_Size() int { + return xxx_messageInfo_CampaignOperation.Size(m) +} +func (m *CampaignOperation) XXX_DiscardUnknown() { + xxx_messageInfo_CampaignOperation.DiscardUnknown(m) +} + +var xxx_messageInfo_CampaignOperation proto.InternalMessageInfo + +func (m *CampaignOperation) GetUpdateMask() *field_mask.FieldMask { + if m != nil { + return m.UpdateMask + } + return nil +} + +type isCampaignOperation_Operation interface { + isCampaignOperation_Operation() +} + +type CampaignOperation_Create struct { + Create *resources.Campaign `protobuf:"bytes,1,opt,name=create,proto3,oneof"` +} + +type CampaignOperation_Update struct { + Update *resources.Campaign `protobuf:"bytes,2,opt,name=update,proto3,oneof"` +} + +type CampaignOperation_Remove struct { + Remove string `protobuf:"bytes,3,opt,name=remove,proto3,oneof"` +} + +func (*CampaignOperation_Create) isCampaignOperation_Operation() {} + +func (*CampaignOperation_Update) isCampaignOperation_Operation() {} + +func (*CampaignOperation_Remove) isCampaignOperation_Operation() {} + +func (m *CampaignOperation) GetOperation() isCampaignOperation_Operation { + if m != nil { + return m.Operation + } + return nil +} + +func (m *CampaignOperation) GetCreate() *resources.Campaign { + if x, ok := m.GetOperation().(*CampaignOperation_Create); ok { + return x.Create + } + return nil +} + +func (m *CampaignOperation) GetUpdate() *resources.Campaign { + if x, ok := m.GetOperation().(*CampaignOperation_Update); ok { + return x.Update + } + return nil +} + +func (m *CampaignOperation) GetRemove() string { + if x, ok := m.GetOperation().(*CampaignOperation_Remove); ok { + return x.Remove + } + return "" +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*CampaignOperation) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _CampaignOperation_OneofMarshaler, _CampaignOperation_OneofUnmarshaler, _CampaignOperation_OneofSizer, []interface{}{ + (*CampaignOperation_Create)(nil), + (*CampaignOperation_Update)(nil), + (*CampaignOperation_Remove)(nil), + } +} + +func _CampaignOperation_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*CampaignOperation) + // operation + switch x := m.Operation.(type) { + case *CampaignOperation_Create: + b.EncodeVarint(1<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.Create); err != nil { + return err + } + case *CampaignOperation_Update: + b.EncodeVarint(2<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.Update); err != nil { + return err + } + case *CampaignOperation_Remove: + b.EncodeVarint(3<<3 | proto.WireBytes) + b.EncodeStringBytes(x.Remove) + case nil: + default: + return fmt.Errorf("CampaignOperation.Operation has unexpected type %T", x) + } + return nil +} + +func _CampaignOperation_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*CampaignOperation) + switch tag { + case 1: // operation.create + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(resources.Campaign) + err := b.DecodeMessage(msg) + m.Operation = &CampaignOperation_Create{msg} + return true, err + case 2: // operation.update + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(resources.Campaign) + err := b.DecodeMessage(msg) + m.Operation = &CampaignOperation_Update{msg} + return true, err + case 3: // operation.remove + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeStringBytes() + m.Operation = &CampaignOperation_Remove{x} + return true, err + default: + return false, nil + } +} + +func _CampaignOperation_OneofSizer(msg proto.Message) (n int) { + m := msg.(*CampaignOperation) + // operation + switch x := m.Operation.(type) { + case *CampaignOperation_Create: + s := proto.Size(x.Create) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *CampaignOperation_Update: + s := proto.Size(x.Update) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *CampaignOperation_Remove: + n += 1 // tag and wire + n += proto.SizeVarint(uint64(len(x.Remove))) + n += len(x.Remove) + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +// Response message for campaign mutate. +type MutateCampaignsResponse struct { + // Errors that pertain to operation failures in the partial failure mode. + // Returned only when partial_failure = true and all errors occur inside the + // operations. If any errors occur outside the operations (e.g. auth errors), + // we return an RPC level error. + PartialFailureError *status.Status `protobuf:"bytes,3,opt,name=partial_failure_error,json=partialFailureError,proto3" json:"partial_failure_error,omitempty"` + // All results for the mutate. + Results []*MutateCampaignResult `protobuf:"bytes,2,rep,name=results,proto3" json:"results,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *MutateCampaignsResponse) Reset() { *m = MutateCampaignsResponse{} } +func (m *MutateCampaignsResponse) String() string { return proto.CompactTextString(m) } +func (*MutateCampaignsResponse) ProtoMessage() {} +func (*MutateCampaignsResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_campaign_service_1e7ae00c38b671a3, []int{3} +} +func (m *MutateCampaignsResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_MutateCampaignsResponse.Unmarshal(m, b) +} +func (m *MutateCampaignsResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_MutateCampaignsResponse.Marshal(b, m, deterministic) +} +func (dst *MutateCampaignsResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_MutateCampaignsResponse.Merge(dst, src) +} +func (m *MutateCampaignsResponse) XXX_Size() int { + return xxx_messageInfo_MutateCampaignsResponse.Size(m) +} +func (m *MutateCampaignsResponse) XXX_DiscardUnknown() { + xxx_messageInfo_MutateCampaignsResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_MutateCampaignsResponse proto.InternalMessageInfo + +func (m *MutateCampaignsResponse) GetPartialFailureError() *status.Status { + if m != nil { + return m.PartialFailureError + } + return nil +} + +func (m *MutateCampaignsResponse) GetResults() []*MutateCampaignResult { + if m != nil { + return m.Results + } + return nil +} + +// The result for the campaign mutate. +type MutateCampaignResult struct { + // Returned for successful operations. + ResourceName string `protobuf:"bytes,1,opt,name=resource_name,json=resourceName,proto3" json:"resource_name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *MutateCampaignResult) Reset() { *m = MutateCampaignResult{} } +func (m *MutateCampaignResult) String() string { return proto.CompactTextString(m) } +func (*MutateCampaignResult) ProtoMessage() {} +func (*MutateCampaignResult) Descriptor() ([]byte, []int) { + return fileDescriptor_campaign_service_1e7ae00c38b671a3, []int{4} +} +func (m *MutateCampaignResult) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_MutateCampaignResult.Unmarshal(m, b) +} +func (m *MutateCampaignResult) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_MutateCampaignResult.Marshal(b, m, deterministic) +} +func (dst *MutateCampaignResult) XXX_Merge(src proto.Message) { + xxx_messageInfo_MutateCampaignResult.Merge(dst, src) +} +func (m *MutateCampaignResult) XXX_Size() int { + return xxx_messageInfo_MutateCampaignResult.Size(m) +} +func (m *MutateCampaignResult) XXX_DiscardUnknown() { + xxx_messageInfo_MutateCampaignResult.DiscardUnknown(m) +} + +var xxx_messageInfo_MutateCampaignResult proto.InternalMessageInfo + +func (m *MutateCampaignResult) GetResourceName() string { + if m != nil { + return m.ResourceName + } + return "" +} + +func init() { + proto.RegisterType((*GetCampaignRequest)(nil), "google.ads.googleads.v1.services.GetCampaignRequest") + proto.RegisterType((*MutateCampaignsRequest)(nil), "google.ads.googleads.v1.services.MutateCampaignsRequest") + proto.RegisterType((*CampaignOperation)(nil), "google.ads.googleads.v1.services.CampaignOperation") + proto.RegisterType((*MutateCampaignsResponse)(nil), "google.ads.googleads.v1.services.MutateCampaignsResponse") + proto.RegisterType((*MutateCampaignResult)(nil), "google.ads.googleads.v1.services.MutateCampaignResult") +} + +// Reference imports to suppress errors if they are not otherwise used. +var _ context.Context +var _ grpc.ClientConn + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the grpc package it is being compiled against. +const _ = grpc.SupportPackageIsVersion4 + +// CampaignServiceClient is the client API for CampaignService service. +// +// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://godoc.org/google.golang.org/grpc#ClientConn.NewStream. +type CampaignServiceClient interface { + // Returns the requested campaign in full detail. + GetCampaign(ctx context.Context, in *GetCampaignRequest, opts ...grpc.CallOption) (*resources.Campaign, error) + // Creates, updates, or removes campaigns. Operation statuses are returned. + MutateCampaigns(ctx context.Context, in *MutateCampaignsRequest, opts ...grpc.CallOption) (*MutateCampaignsResponse, error) +} + +type campaignServiceClient struct { + cc *grpc.ClientConn +} + +func NewCampaignServiceClient(cc *grpc.ClientConn) CampaignServiceClient { + return &campaignServiceClient{cc} +} + +func (c *campaignServiceClient) GetCampaign(ctx context.Context, in *GetCampaignRequest, opts ...grpc.CallOption) (*resources.Campaign, error) { + out := new(resources.Campaign) + err := c.cc.Invoke(ctx, "/google.ads.googleads.v1.services.CampaignService/GetCampaign", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *campaignServiceClient) MutateCampaigns(ctx context.Context, in *MutateCampaignsRequest, opts ...grpc.CallOption) (*MutateCampaignsResponse, error) { + out := new(MutateCampaignsResponse) + err := c.cc.Invoke(ctx, "/google.ads.googleads.v1.services.CampaignService/MutateCampaigns", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +// CampaignServiceServer is the server API for CampaignService service. +type CampaignServiceServer interface { + // Returns the requested campaign in full detail. + GetCampaign(context.Context, *GetCampaignRequest) (*resources.Campaign, error) + // Creates, updates, or removes campaigns. Operation statuses are returned. + MutateCampaigns(context.Context, *MutateCampaignsRequest) (*MutateCampaignsResponse, error) +} + +func RegisterCampaignServiceServer(s *grpc.Server, srv CampaignServiceServer) { + s.RegisterService(&_CampaignService_serviceDesc, srv) +} + +func _CampaignService_GetCampaign_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(GetCampaignRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(CampaignServiceServer).GetCampaign(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.ads.googleads.v1.services.CampaignService/GetCampaign", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(CampaignServiceServer).GetCampaign(ctx, req.(*GetCampaignRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _CampaignService_MutateCampaigns_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(MutateCampaignsRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(CampaignServiceServer).MutateCampaigns(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.ads.googleads.v1.services.CampaignService/MutateCampaigns", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(CampaignServiceServer).MutateCampaigns(ctx, req.(*MutateCampaignsRequest)) + } + return interceptor(ctx, in, info, handler) +} + +var _CampaignService_serviceDesc = grpc.ServiceDesc{ + ServiceName: "google.ads.googleads.v1.services.CampaignService", + HandlerType: (*CampaignServiceServer)(nil), + Methods: []grpc.MethodDesc{ + { + MethodName: "GetCampaign", + Handler: _CampaignService_GetCampaign_Handler, + }, + { + MethodName: "MutateCampaigns", + Handler: _CampaignService_MutateCampaigns_Handler, + }, + }, + Streams: []grpc.StreamDesc{}, + Metadata: "google/ads/googleads/v1/services/campaign_service.proto", +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/services/campaign_service.proto", fileDescriptor_campaign_service_1e7ae00c38b671a3) +} + +var fileDescriptor_campaign_service_1e7ae00c38b671a3 = []byte{ + // 704 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x9c, 0x54, 0x4f, 0x6b, 0xd4, 0x4e, + 0x18, 0xfe, 0x25, 0xfd, 0x51, 0xed, 0xa4, 0x5a, 0x1c, 0xab, 0x5d, 0x16, 0xd1, 0x25, 0x16, 0x2c, + 0x5b, 0x9c, 0xb8, 0xbb, 0xa2, 0x36, 0xa5, 0x87, 0xad, 0xf4, 0x8f, 0x87, 0xda, 0x92, 0x42, 0x0f, + 0xb2, 0x10, 0xa6, 0xc9, 0x34, 0x84, 0x26, 0x99, 0x38, 0x33, 0x59, 0x29, 0xa5, 0x17, 0xc1, 0x4f, + 0xe0, 0x37, 0x10, 0xbc, 0x78, 0xf5, 0x13, 0x78, 0xf5, 0xea, 0xd5, 0xa3, 0x27, 0xbf, 0x82, 0x08, + 0x92, 0x4c, 0x26, 0xdd, 0x6e, 0x5d, 0xd6, 0xf6, 0x36, 0x79, 0xf3, 0x3c, 0xcf, 0xfb, 0xcc, 0xfb, + 0x67, 0xc0, 0xd3, 0x80, 0xd2, 0x20, 0x22, 0x16, 0xf6, 0xb9, 0x25, 0x8f, 0xf9, 0xa9, 0xdf, 0xb2, + 0x38, 0x61, 0xfd, 0xd0, 0x23, 0xdc, 0xf2, 0x70, 0x9c, 0xe2, 0x30, 0x48, 0xdc, 0x32, 0x82, 0x52, + 0x46, 0x05, 0x85, 0x0d, 0x89, 0x46, 0xd8, 0xe7, 0xa8, 0x22, 0xa2, 0x7e, 0x0b, 0x29, 0x62, 0xfd, + 0xd1, 0x28, 0x69, 0x46, 0x38, 0xcd, 0xd8, 0xa0, 0xb6, 0xd4, 0xac, 0xdf, 0x51, 0x8c, 0x34, 0xb4, + 0x70, 0x92, 0x50, 0x81, 0x45, 0x48, 0x13, 0x5e, 0xfe, 0x2d, 0x33, 0x5a, 0xc5, 0xd7, 0x7e, 0x76, + 0x60, 0x1d, 0x84, 0x24, 0xf2, 0xdd, 0x18, 0xf3, 0xc3, 0x12, 0x71, 0x77, 0x18, 0xf1, 0x86, 0xe1, + 0x34, 0x25, 0x4c, 0x29, 0xcc, 0x95, 0xff, 0x59, 0xea, 0x59, 0x5c, 0x60, 0x91, 0x95, 0x3f, 0xcc, + 0x25, 0x00, 0x37, 0x88, 0x78, 0x5e, 0xba, 0x71, 0xc8, 0xeb, 0x8c, 0x70, 0x01, 0xef, 0x83, 0x6b, + 0xca, 0xaa, 0x9b, 0xe0, 0x98, 0xd4, 0xb4, 0x86, 0xb6, 0x30, 0xe5, 0x4c, 0xab, 0xe0, 0x4b, 0x1c, + 0x13, 0xf3, 0xbb, 0x06, 0x6e, 0x6f, 0x65, 0x02, 0x0b, 0xa2, 0xe8, 0x5c, 0xf1, 0xef, 0x01, 0xc3, + 0xcb, 0xb8, 0xa0, 0x31, 0x61, 0x6e, 0xe8, 0x97, 0x6c, 0xa0, 0x42, 0x2f, 0x7c, 0xb8, 0x0b, 0x00, + 0x4d, 0x09, 0x93, 0xb7, 0xac, 0xe9, 0x8d, 0x89, 0x05, 0xa3, 0xdd, 0x41, 0xe3, 0x0a, 0x8b, 0x54, + 0xa2, 0x6d, 0xc5, 0x75, 0x06, 0x64, 0xe0, 0x03, 0x30, 0x93, 0x62, 0x26, 0x42, 0x1c, 0xb9, 0x07, + 0x38, 0x8c, 0x32, 0x46, 0x6a, 0x13, 0x0d, 0x6d, 0xe1, 0xaa, 0x73, 0xbd, 0x0c, 0xaf, 0xcb, 0x68, + 0x7e, 0xbd, 0x3e, 0x8e, 0x42, 0x1f, 0x0b, 0xe2, 0xd2, 0x24, 0x3a, 0xaa, 0xfd, 0x5f, 0xc0, 0xa6, + 0x55, 0x70, 0x3b, 0x89, 0x8e, 0xcc, 0x77, 0x3a, 0xb8, 0x71, 0x2e, 0x1f, 0x5c, 0x06, 0x46, 0x96, + 0x16, 0xc4, 0xbc, 0xfa, 0x05, 0xd1, 0x68, 0xd7, 0x95, 0x73, 0x55, 0x7e, 0xb4, 0x9e, 0x37, 0x68, + 0x0b, 0xf3, 0x43, 0x07, 0x48, 0x78, 0x7e, 0x86, 0x6b, 0x60, 0xd2, 0x63, 0x04, 0x0b, 0x59, 0x4f, + 0xa3, 0xbd, 0x38, 0xf2, 0xc6, 0xd5, 0xa0, 0x54, 0x57, 0xde, 0xfc, 0xcf, 0x29, 0xc9, 0xb9, 0x8c, + 0x14, 0xad, 0xe9, 0x97, 0x92, 0x91, 0x64, 0x58, 0x03, 0x93, 0x8c, 0xc4, 0xb4, 0x2f, 0xab, 0x34, + 0x95, 0xff, 0x91, 0xdf, 0xab, 0x06, 0x98, 0xaa, 0xca, 0x6a, 0x7e, 0xd6, 0xc0, 0xdc, 0xb9, 0x36, + 0xf3, 0x94, 0x26, 0x9c, 0xc0, 0x75, 0x70, 0x6b, 0xa8, 0xe2, 0x2e, 0x61, 0x8c, 0xb2, 0x42, 0xd1, + 0x68, 0x43, 0x65, 0x8c, 0xa5, 0x1e, 0xda, 0x2d, 0xc6, 0xce, 0xb9, 0x79, 0xb6, 0x17, 0x6b, 0x39, + 0x1c, 0xee, 0x80, 0x2b, 0x8c, 0xf0, 0x2c, 0x12, 0x6a, 0x16, 0x9e, 0x8c, 0x9f, 0x85, 0xb3, 0x9e, + 0x9c, 0x82, 0xee, 0x28, 0x19, 0x73, 0x19, 0xcc, 0xfe, 0x0d, 0xf0, 0x4f, 0x93, 0xdd, 0xfe, 0xad, + 0x83, 0x19, 0xc5, 0xdb, 0x95, 0xf9, 0xe0, 0x47, 0x0d, 0x18, 0x03, 0x9b, 0x02, 0x1f, 0x8f, 0x77, + 0x78, 0x7e, 0xb1, 0xea, 0x17, 0x69, 0x95, 0xd9, 0x79, 0xfb, 0xed, 0xc7, 0x7b, 0xfd, 0x21, 0x5c, + 0xcc, 0x9f, 0x8e, 0xe3, 0x33, 0xb6, 0x57, 0xd4, 0x2e, 0x71, 0xab, 0x59, 0xbd, 0x25, 0xdc, 0x6a, + 0x9e, 0xc0, 0x2f, 0x1a, 0x98, 0x19, 0x6a, 0x17, 0x7c, 0x76, 0xd1, 0x6a, 0xaa, 0x45, 0xae, 0x2f, + 0x5d, 0x82, 0x29, 0x67, 0xc3, 0x5c, 0x2a, 0xdc, 0x77, 0x4c, 0x94, 0xbb, 0x3f, 0xb5, 0x7b, 0x3c, + 0xf0, 0x30, 0xac, 0x34, 0x4f, 0x4e, 0xcd, 0xdb, 0x71, 0x21, 0x64, 0x6b, 0xcd, 0xd5, 0x5f, 0x1a, + 0x98, 0xf7, 0x68, 0x3c, 0x36, 0xf7, 0xea, 0xec, 0x50, 0x97, 0x76, 0xf2, 0xfd, 0xdb, 0xd1, 0x5e, + 0x6d, 0x96, 0xcc, 0x80, 0x46, 0x38, 0x09, 0x10, 0x65, 0x81, 0x15, 0x90, 0xa4, 0xd8, 0x4e, 0xf5, + 0x20, 0xa7, 0x21, 0x1f, 0xfd, 0xf4, 0x2f, 0xab, 0xc3, 0x07, 0x7d, 0x62, 0xa3, 0xdb, 0xfd, 0xa4, + 0x37, 0x36, 0xa4, 0x60, 0xd7, 0xe7, 0x48, 0x1e, 0xf3, 0xd3, 0x5e, 0x0b, 0x95, 0x89, 0xf9, 0x57, + 0x05, 0xe9, 0x75, 0x7d, 0xde, 0xab, 0x20, 0xbd, 0xbd, 0x56, 0x4f, 0x41, 0x7e, 0xea, 0xf3, 0x32, + 0x6e, 0xdb, 0x5d, 0x9f, 0xdb, 0x76, 0x05, 0xb2, 0xed, 0xbd, 0x96, 0x6d, 0x2b, 0xd8, 0xfe, 0x64, + 0xe1, 0xb3, 0xf3, 0x27, 0x00, 0x00, 0xff, 0xff, 0x56, 0xbf, 0x71, 0xcc, 0xa1, 0x06, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/services/campaign_shared_set_service.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/services/campaign_shared_set_service.pb.go new file mode 100644 index 0000000..2f33488 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/services/campaign_shared_set_service.pb.go @@ -0,0 +1,542 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/services/campaign_shared_set_service.proto + +package services // import "google.golang.org/genproto/googleapis/ads/googleads/v1/services" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "github.com/golang/protobuf/ptypes/wrappers" +import resources "google.golang.org/genproto/googleapis/ads/googleads/v1/resources" +import _ "google.golang.org/genproto/googleapis/api/annotations" +import status "google.golang.org/genproto/googleapis/rpc/status" + +import ( + context "golang.org/x/net/context" + grpc "google.golang.org/grpc" +) + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Request message for [CampaignSharedSetService.GetCampaignSharedSet][google.ads.googleads.v1.services.CampaignSharedSetService.GetCampaignSharedSet]. +type GetCampaignSharedSetRequest struct { + // The resource name of the campaign shared set to fetch. + ResourceName string `protobuf:"bytes,1,opt,name=resource_name,json=resourceName,proto3" json:"resource_name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GetCampaignSharedSetRequest) Reset() { *m = GetCampaignSharedSetRequest{} } +func (m *GetCampaignSharedSetRequest) String() string { return proto.CompactTextString(m) } +func (*GetCampaignSharedSetRequest) ProtoMessage() {} +func (*GetCampaignSharedSetRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_campaign_shared_set_service_3cd8ec5aeb965a55, []int{0} +} +func (m *GetCampaignSharedSetRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GetCampaignSharedSetRequest.Unmarshal(m, b) +} +func (m *GetCampaignSharedSetRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GetCampaignSharedSetRequest.Marshal(b, m, deterministic) +} +func (dst *GetCampaignSharedSetRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_GetCampaignSharedSetRequest.Merge(dst, src) +} +func (m *GetCampaignSharedSetRequest) XXX_Size() int { + return xxx_messageInfo_GetCampaignSharedSetRequest.Size(m) +} +func (m *GetCampaignSharedSetRequest) XXX_DiscardUnknown() { + xxx_messageInfo_GetCampaignSharedSetRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_GetCampaignSharedSetRequest proto.InternalMessageInfo + +func (m *GetCampaignSharedSetRequest) GetResourceName() string { + if m != nil { + return m.ResourceName + } + return "" +} + +// Request message for [CampaignSharedSetService.MutateCampaignSharedSets][google.ads.googleads.v1.services.CampaignSharedSetService.MutateCampaignSharedSets]. +type MutateCampaignSharedSetsRequest struct { + // The ID of the customer whose campaign shared sets are being modified. + CustomerId string `protobuf:"bytes,1,opt,name=customer_id,json=customerId,proto3" json:"customer_id,omitempty"` + // The list of operations to perform on individual campaign shared sets. + Operations []*CampaignSharedSetOperation `protobuf:"bytes,2,rep,name=operations,proto3" json:"operations,omitempty"` + // If true, successful operations will be carried out and invalid + // operations will return errors. If false, all operations will be carried + // out in one transaction if and only if they are all valid. + // Default is false. + PartialFailure bool `protobuf:"varint,3,opt,name=partial_failure,json=partialFailure,proto3" json:"partial_failure,omitempty"` + // If true, the request is validated but not executed. Only errors are + // returned, not results. + ValidateOnly bool `protobuf:"varint,4,opt,name=validate_only,json=validateOnly,proto3" json:"validate_only,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *MutateCampaignSharedSetsRequest) Reset() { *m = MutateCampaignSharedSetsRequest{} } +func (m *MutateCampaignSharedSetsRequest) String() string { return proto.CompactTextString(m) } +func (*MutateCampaignSharedSetsRequest) ProtoMessage() {} +func (*MutateCampaignSharedSetsRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_campaign_shared_set_service_3cd8ec5aeb965a55, []int{1} +} +func (m *MutateCampaignSharedSetsRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_MutateCampaignSharedSetsRequest.Unmarshal(m, b) +} +func (m *MutateCampaignSharedSetsRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_MutateCampaignSharedSetsRequest.Marshal(b, m, deterministic) +} +func (dst *MutateCampaignSharedSetsRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_MutateCampaignSharedSetsRequest.Merge(dst, src) +} +func (m *MutateCampaignSharedSetsRequest) XXX_Size() int { + return xxx_messageInfo_MutateCampaignSharedSetsRequest.Size(m) +} +func (m *MutateCampaignSharedSetsRequest) XXX_DiscardUnknown() { + xxx_messageInfo_MutateCampaignSharedSetsRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_MutateCampaignSharedSetsRequest proto.InternalMessageInfo + +func (m *MutateCampaignSharedSetsRequest) GetCustomerId() string { + if m != nil { + return m.CustomerId + } + return "" +} + +func (m *MutateCampaignSharedSetsRequest) GetOperations() []*CampaignSharedSetOperation { + if m != nil { + return m.Operations + } + return nil +} + +func (m *MutateCampaignSharedSetsRequest) GetPartialFailure() bool { + if m != nil { + return m.PartialFailure + } + return false +} + +func (m *MutateCampaignSharedSetsRequest) GetValidateOnly() bool { + if m != nil { + return m.ValidateOnly + } + return false +} + +// A single operation (create, remove) on an campaign shared set. +type CampaignSharedSetOperation struct { + // The mutate operation. + // + // Types that are valid to be assigned to Operation: + // *CampaignSharedSetOperation_Create + // *CampaignSharedSetOperation_Remove + Operation isCampaignSharedSetOperation_Operation `protobuf_oneof:"operation"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *CampaignSharedSetOperation) Reset() { *m = CampaignSharedSetOperation{} } +func (m *CampaignSharedSetOperation) String() string { return proto.CompactTextString(m) } +func (*CampaignSharedSetOperation) ProtoMessage() {} +func (*CampaignSharedSetOperation) Descriptor() ([]byte, []int) { + return fileDescriptor_campaign_shared_set_service_3cd8ec5aeb965a55, []int{2} +} +func (m *CampaignSharedSetOperation) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_CampaignSharedSetOperation.Unmarshal(m, b) +} +func (m *CampaignSharedSetOperation) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_CampaignSharedSetOperation.Marshal(b, m, deterministic) +} +func (dst *CampaignSharedSetOperation) XXX_Merge(src proto.Message) { + xxx_messageInfo_CampaignSharedSetOperation.Merge(dst, src) +} +func (m *CampaignSharedSetOperation) XXX_Size() int { + return xxx_messageInfo_CampaignSharedSetOperation.Size(m) +} +func (m *CampaignSharedSetOperation) XXX_DiscardUnknown() { + xxx_messageInfo_CampaignSharedSetOperation.DiscardUnknown(m) +} + +var xxx_messageInfo_CampaignSharedSetOperation proto.InternalMessageInfo + +type isCampaignSharedSetOperation_Operation interface { + isCampaignSharedSetOperation_Operation() +} + +type CampaignSharedSetOperation_Create struct { + Create *resources.CampaignSharedSet `protobuf:"bytes,1,opt,name=create,proto3,oneof"` +} + +type CampaignSharedSetOperation_Remove struct { + Remove string `protobuf:"bytes,3,opt,name=remove,proto3,oneof"` +} + +func (*CampaignSharedSetOperation_Create) isCampaignSharedSetOperation_Operation() {} + +func (*CampaignSharedSetOperation_Remove) isCampaignSharedSetOperation_Operation() {} + +func (m *CampaignSharedSetOperation) GetOperation() isCampaignSharedSetOperation_Operation { + if m != nil { + return m.Operation + } + return nil +} + +func (m *CampaignSharedSetOperation) GetCreate() *resources.CampaignSharedSet { + if x, ok := m.GetOperation().(*CampaignSharedSetOperation_Create); ok { + return x.Create + } + return nil +} + +func (m *CampaignSharedSetOperation) GetRemove() string { + if x, ok := m.GetOperation().(*CampaignSharedSetOperation_Remove); ok { + return x.Remove + } + return "" +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*CampaignSharedSetOperation) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _CampaignSharedSetOperation_OneofMarshaler, _CampaignSharedSetOperation_OneofUnmarshaler, _CampaignSharedSetOperation_OneofSizer, []interface{}{ + (*CampaignSharedSetOperation_Create)(nil), + (*CampaignSharedSetOperation_Remove)(nil), + } +} + +func _CampaignSharedSetOperation_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*CampaignSharedSetOperation) + // operation + switch x := m.Operation.(type) { + case *CampaignSharedSetOperation_Create: + b.EncodeVarint(1<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.Create); err != nil { + return err + } + case *CampaignSharedSetOperation_Remove: + b.EncodeVarint(3<<3 | proto.WireBytes) + b.EncodeStringBytes(x.Remove) + case nil: + default: + return fmt.Errorf("CampaignSharedSetOperation.Operation has unexpected type %T", x) + } + return nil +} + +func _CampaignSharedSetOperation_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*CampaignSharedSetOperation) + switch tag { + case 1: // operation.create + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(resources.CampaignSharedSet) + err := b.DecodeMessage(msg) + m.Operation = &CampaignSharedSetOperation_Create{msg} + return true, err + case 3: // operation.remove + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeStringBytes() + m.Operation = &CampaignSharedSetOperation_Remove{x} + return true, err + default: + return false, nil + } +} + +func _CampaignSharedSetOperation_OneofSizer(msg proto.Message) (n int) { + m := msg.(*CampaignSharedSetOperation) + // operation + switch x := m.Operation.(type) { + case *CampaignSharedSetOperation_Create: + s := proto.Size(x.Create) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *CampaignSharedSetOperation_Remove: + n += 1 // tag and wire + n += proto.SizeVarint(uint64(len(x.Remove))) + n += len(x.Remove) + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +// Response message for a campaign shared set mutate. +type MutateCampaignSharedSetsResponse struct { + // Errors that pertain to operation failures in the partial failure mode. + // Returned only when partial_failure = true and all errors occur inside the + // operations. If any errors occur outside the operations (e.g. auth errors), + // we return an RPC level error. + PartialFailureError *status.Status `protobuf:"bytes,3,opt,name=partial_failure_error,json=partialFailureError,proto3" json:"partial_failure_error,omitempty"` + // All results for the mutate. + Results []*MutateCampaignSharedSetResult `protobuf:"bytes,2,rep,name=results,proto3" json:"results,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *MutateCampaignSharedSetsResponse) Reset() { *m = MutateCampaignSharedSetsResponse{} } +func (m *MutateCampaignSharedSetsResponse) String() string { return proto.CompactTextString(m) } +func (*MutateCampaignSharedSetsResponse) ProtoMessage() {} +func (*MutateCampaignSharedSetsResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_campaign_shared_set_service_3cd8ec5aeb965a55, []int{3} +} +func (m *MutateCampaignSharedSetsResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_MutateCampaignSharedSetsResponse.Unmarshal(m, b) +} +func (m *MutateCampaignSharedSetsResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_MutateCampaignSharedSetsResponse.Marshal(b, m, deterministic) +} +func (dst *MutateCampaignSharedSetsResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_MutateCampaignSharedSetsResponse.Merge(dst, src) +} +func (m *MutateCampaignSharedSetsResponse) XXX_Size() int { + return xxx_messageInfo_MutateCampaignSharedSetsResponse.Size(m) +} +func (m *MutateCampaignSharedSetsResponse) XXX_DiscardUnknown() { + xxx_messageInfo_MutateCampaignSharedSetsResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_MutateCampaignSharedSetsResponse proto.InternalMessageInfo + +func (m *MutateCampaignSharedSetsResponse) GetPartialFailureError() *status.Status { + if m != nil { + return m.PartialFailureError + } + return nil +} + +func (m *MutateCampaignSharedSetsResponse) GetResults() []*MutateCampaignSharedSetResult { + if m != nil { + return m.Results + } + return nil +} + +// The result for the campaign shared set mutate. +type MutateCampaignSharedSetResult struct { + // Returned for successful operations. + ResourceName string `protobuf:"bytes,1,opt,name=resource_name,json=resourceName,proto3" json:"resource_name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *MutateCampaignSharedSetResult) Reset() { *m = MutateCampaignSharedSetResult{} } +func (m *MutateCampaignSharedSetResult) String() string { return proto.CompactTextString(m) } +func (*MutateCampaignSharedSetResult) ProtoMessage() {} +func (*MutateCampaignSharedSetResult) Descriptor() ([]byte, []int) { + return fileDescriptor_campaign_shared_set_service_3cd8ec5aeb965a55, []int{4} +} +func (m *MutateCampaignSharedSetResult) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_MutateCampaignSharedSetResult.Unmarshal(m, b) +} +func (m *MutateCampaignSharedSetResult) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_MutateCampaignSharedSetResult.Marshal(b, m, deterministic) +} +func (dst *MutateCampaignSharedSetResult) XXX_Merge(src proto.Message) { + xxx_messageInfo_MutateCampaignSharedSetResult.Merge(dst, src) +} +func (m *MutateCampaignSharedSetResult) XXX_Size() int { + return xxx_messageInfo_MutateCampaignSharedSetResult.Size(m) +} +func (m *MutateCampaignSharedSetResult) XXX_DiscardUnknown() { + xxx_messageInfo_MutateCampaignSharedSetResult.DiscardUnknown(m) +} + +var xxx_messageInfo_MutateCampaignSharedSetResult proto.InternalMessageInfo + +func (m *MutateCampaignSharedSetResult) GetResourceName() string { + if m != nil { + return m.ResourceName + } + return "" +} + +func init() { + proto.RegisterType((*GetCampaignSharedSetRequest)(nil), "google.ads.googleads.v1.services.GetCampaignSharedSetRequest") + proto.RegisterType((*MutateCampaignSharedSetsRequest)(nil), "google.ads.googleads.v1.services.MutateCampaignSharedSetsRequest") + proto.RegisterType((*CampaignSharedSetOperation)(nil), "google.ads.googleads.v1.services.CampaignSharedSetOperation") + proto.RegisterType((*MutateCampaignSharedSetsResponse)(nil), "google.ads.googleads.v1.services.MutateCampaignSharedSetsResponse") + proto.RegisterType((*MutateCampaignSharedSetResult)(nil), "google.ads.googleads.v1.services.MutateCampaignSharedSetResult") +} + +// Reference imports to suppress errors if they are not otherwise used. +var _ context.Context +var _ grpc.ClientConn + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the grpc package it is being compiled against. +const _ = grpc.SupportPackageIsVersion4 + +// CampaignSharedSetServiceClient is the client API for CampaignSharedSetService service. +// +// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://godoc.org/google.golang.org/grpc#ClientConn.NewStream. +type CampaignSharedSetServiceClient interface { + // Returns the requested campaign shared set in full detail. + GetCampaignSharedSet(ctx context.Context, in *GetCampaignSharedSetRequest, opts ...grpc.CallOption) (*resources.CampaignSharedSet, error) + // Creates or removes campaign shared sets. Operation statuses are returned. + MutateCampaignSharedSets(ctx context.Context, in *MutateCampaignSharedSetsRequest, opts ...grpc.CallOption) (*MutateCampaignSharedSetsResponse, error) +} + +type campaignSharedSetServiceClient struct { + cc *grpc.ClientConn +} + +func NewCampaignSharedSetServiceClient(cc *grpc.ClientConn) CampaignSharedSetServiceClient { + return &campaignSharedSetServiceClient{cc} +} + +func (c *campaignSharedSetServiceClient) GetCampaignSharedSet(ctx context.Context, in *GetCampaignSharedSetRequest, opts ...grpc.CallOption) (*resources.CampaignSharedSet, error) { + out := new(resources.CampaignSharedSet) + err := c.cc.Invoke(ctx, "/google.ads.googleads.v1.services.CampaignSharedSetService/GetCampaignSharedSet", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *campaignSharedSetServiceClient) MutateCampaignSharedSets(ctx context.Context, in *MutateCampaignSharedSetsRequest, opts ...grpc.CallOption) (*MutateCampaignSharedSetsResponse, error) { + out := new(MutateCampaignSharedSetsResponse) + err := c.cc.Invoke(ctx, "/google.ads.googleads.v1.services.CampaignSharedSetService/MutateCampaignSharedSets", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +// CampaignSharedSetServiceServer is the server API for CampaignSharedSetService service. +type CampaignSharedSetServiceServer interface { + // Returns the requested campaign shared set in full detail. + GetCampaignSharedSet(context.Context, *GetCampaignSharedSetRequest) (*resources.CampaignSharedSet, error) + // Creates or removes campaign shared sets. Operation statuses are returned. + MutateCampaignSharedSets(context.Context, *MutateCampaignSharedSetsRequest) (*MutateCampaignSharedSetsResponse, error) +} + +func RegisterCampaignSharedSetServiceServer(s *grpc.Server, srv CampaignSharedSetServiceServer) { + s.RegisterService(&_CampaignSharedSetService_serviceDesc, srv) +} + +func _CampaignSharedSetService_GetCampaignSharedSet_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(GetCampaignSharedSetRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(CampaignSharedSetServiceServer).GetCampaignSharedSet(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.ads.googleads.v1.services.CampaignSharedSetService/GetCampaignSharedSet", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(CampaignSharedSetServiceServer).GetCampaignSharedSet(ctx, req.(*GetCampaignSharedSetRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _CampaignSharedSetService_MutateCampaignSharedSets_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(MutateCampaignSharedSetsRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(CampaignSharedSetServiceServer).MutateCampaignSharedSets(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.ads.googleads.v1.services.CampaignSharedSetService/MutateCampaignSharedSets", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(CampaignSharedSetServiceServer).MutateCampaignSharedSets(ctx, req.(*MutateCampaignSharedSetsRequest)) + } + return interceptor(ctx, in, info, handler) +} + +var _CampaignSharedSetService_serviceDesc = grpc.ServiceDesc{ + ServiceName: "google.ads.googleads.v1.services.CampaignSharedSetService", + HandlerType: (*CampaignSharedSetServiceServer)(nil), + Methods: []grpc.MethodDesc{ + { + MethodName: "GetCampaignSharedSet", + Handler: _CampaignSharedSetService_GetCampaignSharedSet_Handler, + }, + { + MethodName: "MutateCampaignSharedSets", + Handler: _CampaignSharedSetService_MutateCampaignSharedSets_Handler, + }, + }, + Streams: []grpc.StreamDesc{}, + Metadata: "google/ads/googleads/v1/services/campaign_shared_set_service.proto", +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/services/campaign_shared_set_service.proto", fileDescriptor_campaign_shared_set_service_3cd8ec5aeb965a55) +} + +var fileDescriptor_campaign_shared_set_service_3cd8ec5aeb965a55 = []byte{ + // 671 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xa4, 0x55, 0x41, 0x6b, 0xd4, 0x4e, + 0x14, 0xff, 0x67, 0xf7, 0x4f, 0xb5, 0xb3, 0x55, 0x61, 0x54, 0x0c, 0xab, 0xb5, 0x4b, 0x2c, 0x58, + 0xf6, 0x90, 0xb0, 0x6b, 0x51, 0x48, 0x5b, 0x24, 0x51, 0xdb, 0x7a, 0xb0, 0x2d, 0x59, 0x58, 0x50, + 0x16, 0xc2, 0x34, 0x99, 0xc6, 0x40, 0x92, 0x89, 0x33, 0x93, 0x95, 0x52, 0x7a, 0x11, 0xbf, 0x41, + 0xbf, 0x81, 0x47, 0xbf, 0x87, 0x07, 0x3d, 0x78, 0xf1, 0x2b, 0xe8, 0xc5, 0x83, 0x9f, 0x41, 0x92, + 0xc9, 0xa4, 0xad, 0xdb, 0x74, 0xa5, 0xde, 0x5e, 0xde, 0x7b, 0xf3, 0x7b, 0xef, 0x37, 0xbf, 0x37, + 0x2f, 0xc0, 0x0e, 0x08, 0x09, 0x22, 0x6c, 0x20, 0x9f, 0x19, 0xc2, 0xcc, 0xad, 0x71, 0xcf, 0x60, + 0x98, 0x8e, 0x43, 0x0f, 0x33, 0xc3, 0x43, 0x71, 0x8a, 0xc2, 0x20, 0x71, 0xd9, 0x6b, 0x44, 0xb1, + 0xef, 0x32, 0xcc, 0xdd, 0x32, 0xa8, 0xa7, 0x94, 0x70, 0x02, 0x3b, 0xe2, 0xa0, 0x8e, 0x7c, 0xa6, + 0x57, 0x18, 0xfa, 0xb8, 0xa7, 0x4b, 0x8c, 0xf6, 0x4a, 0x5d, 0x15, 0x8a, 0x19, 0xc9, 0x68, 0x4d, + 0x19, 0x01, 0xdf, 0xbe, 0x23, 0x0f, 0xa7, 0xa1, 0x81, 0x92, 0x84, 0x70, 0xc4, 0x43, 0x92, 0xb0, + 0x32, 0x7a, 0xb7, 0x8c, 0x16, 0x5f, 0xbb, 0xd9, 0x9e, 0xf1, 0x96, 0xa2, 0x34, 0xc5, 0x54, 0xc6, + 0x6f, 0x95, 0x71, 0x9a, 0x7a, 0x06, 0xe3, 0x88, 0x67, 0x65, 0x40, 0xb3, 0xc1, 0xed, 0x0d, 0xcc, + 0x9f, 0x94, 0x65, 0x07, 0x45, 0xd5, 0x01, 0xe6, 0x0e, 0x7e, 0x93, 0x61, 0xc6, 0xe1, 0x3d, 0x70, + 0x45, 0x36, 0xe7, 0x26, 0x28, 0xc6, 0xaa, 0xd2, 0x51, 0x96, 0x66, 0x9d, 0x39, 0xe9, 0xdc, 0x42, + 0x31, 0xd6, 0x7e, 0x29, 0x60, 0xe1, 0x45, 0xc6, 0x11, 0xc7, 0x13, 0x38, 0x4c, 0x02, 0x2d, 0x80, + 0x96, 0x97, 0x31, 0x4e, 0x62, 0x4c, 0xdd, 0xd0, 0x2f, 0x61, 0x80, 0x74, 0x3d, 0xf7, 0xe1, 0x08, + 0x00, 0x92, 0x62, 0x2a, 0x58, 0xa9, 0x8d, 0x4e, 0x73, 0xa9, 0xd5, 0x5f, 0xd5, 0xa7, 0xdd, 0xa9, + 0x3e, 0x51, 0x71, 0x5b, 0x82, 0x38, 0x27, 0xf0, 0xe0, 0x7d, 0x70, 0x2d, 0x45, 0x94, 0x87, 0x28, + 0x72, 0xf7, 0x50, 0x18, 0x65, 0x14, 0xab, 0xcd, 0x8e, 0xb2, 0x74, 0xd9, 0xb9, 0x5a, 0xba, 0xd7, + 0x85, 0x37, 0x27, 0x3c, 0x46, 0x51, 0xe8, 0x23, 0x8e, 0x5d, 0x92, 0x44, 0xfb, 0xea, 0xff, 0x45, + 0xda, 0x9c, 0x74, 0x6e, 0x27, 0xd1, 0xbe, 0x76, 0xa4, 0x80, 0x76, 0x7d, 0x61, 0xb8, 0x05, 0x66, + 0x3c, 0x8a, 0x11, 0x17, 0xb7, 0xd5, 0xea, 0x2f, 0xd7, 0xd2, 0xa8, 0x84, 0x9f, 0xe4, 0xb1, 0xf9, + 0x9f, 0x53, 0xa2, 0x40, 0x15, 0xcc, 0x50, 0x1c, 0x93, 0xb1, 0xe8, 0x79, 0x36, 0x8f, 0x88, 0x6f, + 0xbb, 0x05, 0x66, 0x2b, 0x92, 0xda, 0x27, 0x05, 0x74, 0xea, 0x65, 0x60, 0x29, 0x49, 0x18, 0x86, + 0xeb, 0xe0, 0xe6, 0x1f, 0x17, 0xe1, 0x62, 0x4a, 0x09, 0x2d, 0xa0, 0x5b, 0x7d, 0x28, 0x5b, 0xa5, + 0xa9, 0xa7, 0x0f, 0x8a, 0x41, 0x71, 0xae, 0x9f, 0xbe, 0xa2, 0x67, 0x79, 0x3a, 0x7c, 0x09, 0x2e, + 0x51, 0xcc, 0xb2, 0x88, 0x4b, 0xad, 0x1e, 0x4f, 0xd7, 0xaa, 0xa6, 0x39, 0xa7, 0xc0, 0x71, 0x24, + 0x9e, 0xf6, 0x14, 0xcc, 0x9f, 0x9b, 0xf9, 0x57, 0x43, 0xd9, 0xff, 0xda, 0x04, 0xea, 0x04, 0xc0, + 0x40, 0xb4, 0x02, 0x3f, 0x2b, 0xe0, 0xc6, 0x59, 0x63, 0x0f, 0xd7, 0xa6, 0xb3, 0x38, 0xe7, 0xb9, + 0xb4, 0x2f, 0xa4, 0xb4, 0xb6, 0xfa, 0xee, 0xdb, 0xf7, 0xa3, 0xc6, 0x43, 0xb8, 0x9c, 0xef, 0x82, + 0x83, 0x53, 0xd4, 0xd6, 0xe4, 0x0b, 0x61, 0x46, 0xb7, 0x5a, 0x0e, 0xc7, 0xb2, 0x1a, 0xdd, 0x43, + 0xf8, 0x43, 0x01, 0x6a, 0x9d, 0xec, 0xd0, 0xba, 0xb0, 0x2a, 0xf2, 0xe5, 0xb6, 0xed, 0x7f, 0x81, + 0x10, 0x53, 0xa7, 0xd9, 0x05, 0xc3, 0x55, 0xed, 0x51, 0xce, 0xf0, 0x98, 0xd2, 0xc1, 0x89, 0x95, + 0xb0, 0xd6, 0x3d, 0x3c, 0x83, 0xa0, 0x19, 0x17, 0xd0, 0xa6, 0xd2, 0xb5, 0xdf, 0x37, 0xc0, 0xa2, + 0x47, 0xe2, 0xa9, 0xdd, 0xd8, 0xf3, 0x75, 0xb2, 0xef, 0xe4, 0x1b, 0x6f, 0x47, 0x79, 0xb5, 0x59, + 0x42, 0x04, 0x24, 0x42, 0x49, 0xa0, 0x13, 0x1a, 0x18, 0x01, 0x4e, 0x8a, 0x7d, 0x28, 0xf7, 0x72, + 0x1a, 0xb2, 0xfa, 0x9f, 0xc1, 0x8a, 0x34, 0x3e, 0x34, 0x9a, 0x1b, 0x96, 0xf5, 0xb1, 0xd1, 0xd9, + 0x10, 0x80, 0x96, 0xcf, 0x74, 0x61, 0xe6, 0xd6, 0xb0, 0xa7, 0x97, 0x85, 0xd9, 0x17, 0x99, 0x32, + 0xb2, 0x7c, 0x36, 0xaa, 0x52, 0x46, 0xc3, 0xde, 0x48, 0xa6, 0xfc, 0x6c, 0x2c, 0x0a, 0xbf, 0x69, + 0x5a, 0x3e, 0x33, 0xcd, 0x2a, 0xc9, 0x34, 0x87, 0x3d, 0xd3, 0x94, 0x69, 0xbb, 0x33, 0x45, 0x9f, + 0x0f, 0x7e, 0x07, 0x00, 0x00, 0xff, 0xff, 0xd3, 0x69, 0x26, 0xa6, 0xb3, 0x06, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/services/carrier_constant_service.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/services/carrier_constant_service.pb.go new file mode 100644 index 0000000..56d2b5a --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/services/carrier_constant_service.pb.go @@ -0,0 +1,175 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/services/carrier_constant_service.proto + +package services // import "google.golang.org/genproto/googleapis/ads/googleads/v1/services" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import resources "google.golang.org/genproto/googleapis/ads/googleads/v1/resources" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +import ( + context "golang.org/x/net/context" + grpc "google.golang.org/grpc" +) + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Request message for [CarrierConstantService.GetCarrierConstant][google.ads.googleads.v1.services.CarrierConstantService.GetCarrierConstant]. +type GetCarrierConstantRequest struct { + // Resource name of the carrier constant to fetch. + ResourceName string `protobuf:"bytes,1,opt,name=resource_name,json=resourceName,proto3" json:"resource_name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GetCarrierConstantRequest) Reset() { *m = GetCarrierConstantRequest{} } +func (m *GetCarrierConstantRequest) String() string { return proto.CompactTextString(m) } +func (*GetCarrierConstantRequest) ProtoMessage() {} +func (*GetCarrierConstantRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_carrier_constant_service_1becf49bbaa2ca47, []int{0} +} +func (m *GetCarrierConstantRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GetCarrierConstantRequest.Unmarshal(m, b) +} +func (m *GetCarrierConstantRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GetCarrierConstantRequest.Marshal(b, m, deterministic) +} +func (dst *GetCarrierConstantRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_GetCarrierConstantRequest.Merge(dst, src) +} +func (m *GetCarrierConstantRequest) XXX_Size() int { + return xxx_messageInfo_GetCarrierConstantRequest.Size(m) +} +func (m *GetCarrierConstantRequest) XXX_DiscardUnknown() { + xxx_messageInfo_GetCarrierConstantRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_GetCarrierConstantRequest proto.InternalMessageInfo + +func (m *GetCarrierConstantRequest) GetResourceName() string { + if m != nil { + return m.ResourceName + } + return "" +} + +func init() { + proto.RegisterType((*GetCarrierConstantRequest)(nil), "google.ads.googleads.v1.services.GetCarrierConstantRequest") +} + +// Reference imports to suppress errors if they are not otherwise used. +var _ context.Context +var _ grpc.ClientConn + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the grpc package it is being compiled against. +const _ = grpc.SupportPackageIsVersion4 + +// CarrierConstantServiceClient is the client API for CarrierConstantService service. +// +// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://godoc.org/google.golang.org/grpc#ClientConn.NewStream. +type CarrierConstantServiceClient interface { + // Returns the requested carrier constant in full detail. + GetCarrierConstant(ctx context.Context, in *GetCarrierConstantRequest, opts ...grpc.CallOption) (*resources.CarrierConstant, error) +} + +type carrierConstantServiceClient struct { + cc *grpc.ClientConn +} + +func NewCarrierConstantServiceClient(cc *grpc.ClientConn) CarrierConstantServiceClient { + return &carrierConstantServiceClient{cc} +} + +func (c *carrierConstantServiceClient) GetCarrierConstant(ctx context.Context, in *GetCarrierConstantRequest, opts ...grpc.CallOption) (*resources.CarrierConstant, error) { + out := new(resources.CarrierConstant) + err := c.cc.Invoke(ctx, "/google.ads.googleads.v1.services.CarrierConstantService/GetCarrierConstant", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +// CarrierConstantServiceServer is the server API for CarrierConstantService service. +type CarrierConstantServiceServer interface { + // Returns the requested carrier constant in full detail. + GetCarrierConstant(context.Context, *GetCarrierConstantRequest) (*resources.CarrierConstant, error) +} + +func RegisterCarrierConstantServiceServer(s *grpc.Server, srv CarrierConstantServiceServer) { + s.RegisterService(&_CarrierConstantService_serviceDesc, srv) +} + +func _CarrierConstantService_GetCarrierConstant_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(GetCarrierConstantRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(CarrierConstantServiceServer).GetCarrierConstant(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.ads.googleads.v1.services.CarrierConstantService/GetCarrierConstant", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(CarrierConstantServiceServer).GetCarrierConstant(ctx, req.(*GetCarrierConstantRequest)) + } + return interceptor(ctx, in, info, handler) +} + +var _CarrierConstantService_serviceDesc = grpc.ServiceDesc{ + ServiceName: "google.ads.googleads.v1.services.CarrierConstantService", + HandlerType: (*CarrierConstantServiceServer)(nil), + Methods: []grpc.MethodDesc{ + { + MethodName: "GetCarrierConstant", + Handler: _CarrierConstantService_GetCarrierConstant_Handler, + }, + }, + Streams: []grpc.StreamDesc{}, + Metadata: "google/ads/googleads/v1/services/carrier_constant_service.proto", +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/services/carrier_constant_service.proto", fileDescriptor_carrier_constant_service_1becf49bbaa2ca47) +} + +var fileDescriptor_carrier_constant_service_1becf49bbaa2ca47 = []byte{ + // 356 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x84, 0x52, 0xc1, 0x4a, 0xf3, 0x40, + 0x18, 0x24, 0xf9, 0xe1, 0x07, 0x83, 0x5e, 0x72, 0x10, 0xad, 0x1e, 0x4a, 0x2d, 0x52, 0x3c, 0xec, + 0x92, 0x7a, 0x91, 0x2d, 0xa2, 0x69, 0x0f, 0xf5, 0x24, 0xa5, 0x42, 0x0f, 0x12, 0x28, 0x6b, 0xb2, + 0x2c, 0x81, 0x76, 0xb7, 0xee, 0xb7, 0xcd, 0x45, 0xbc, 0xf4, 0x15, 0x7c, 0x03, 0x8f, 0xde, 0x7d, + 0x09, 0x4f, 0x82, 0xaf, 0xe0, 0xc9, 0xa7, 0x90, 0x74, 0xb3, 0x81, 0xd6, 0x86, 0xde, 0x86, 0xfd, + 0x66, 0xe6, 0x9b, 0x6f, 0x12, 0xef, 0x8a, 0x4b, 0xc9, 0x27, 0x0c, 0xd3, 0x04, 0xb0, 0x81, 0x39, + 0xca, 0x02, 0x0c, 0x4c, 0x65, 0x69, 0xcc, 0x00, 0xc7, 0x54, 0xa9, 0x94, 0xa9, 0x71, 0x2c, 0x05, + 0x68, 0x2a, 0xf4, 0xb8, 0x98, 0xa0, 0x99, 0x92, 0x5a, 0xfa, 0x75, 0xa3, 0x42, 0x34, 0x01, 0x54, + 0x1a, 0xa0, 0x2c, 0x40, 0xd6, 0xa0, 0x76, 0x51, 0xb5, 0x42, 0x31, 0x90, 0x73, 0xb5, 0x69, 0x87, + 0xf1, 0xae, 0x1d, 0x5b, 0xe5, 0x2c, 0xc5, 0x54, 0x08, 0xa9, 0xa9, 0x4e, 0xa5, 0x00, 0x33, 0x6d, + 0x5c, 0x7b, 0x87, 0x7d, 0xa6, 0x7b, 0x46, 0xda, 0x2b, 0x94, 0x43, 0xf6, 0x38, 0x67, 0xa0, 0xfd, + 0x13, 0x6f, 0xcf, 0xda, 0x8f, 0x05, 0x9d, 0xb2, 0x03, 0xa7, 0xee, 0xb4, 0x76, 0x86, 0xbb, 0xf6, + 0xf1, 0x96, 0x4e, 0x59, 0xfb, 0xd3, 0xf1, 0xf6, 0xd7, 0xf4, 0x77, 0x26, 0xb5, 0xff, 0xee, 0x78, + 0xfe, 0x5f, 0x77, 0xbf, 0x83, 0xb6, 0x9d, 0x8b, 0x2a, 0x33, 0xd5, 0xda, 0x95, 0xe2, 0xb2, 0x09, + 0xb4, 0x26, 0x6d, 0xa0, 0xc5, 0xd7, 0xf7, 0x8b, 0xdb, 0xf2, 0x4f, 0xf3, 0xc2, 0x9e, 0x56, 0x4e, + 0xba, 0x8c, 0x57, 0xb9, 0x80, 0xcf, 0x9e, 0xbb, 0x0b, 0xd7, 0x6b, 0xc6, 0x72, 0xba, 0x35, 0x66, + 0xf7, 0x68, 0xf3, 0xe1, 0x83, 0xbc, 0xda, 0x81, 0x73, 0x7f, 0x53, 0x18, 0x70, 0x39, 0xa1, 0x82, + 0x23, 0xa9, 0x38, 0xe6, 0x4c, 0x2c, 0x8b, 0xb7, 0x1f, 0x71, 0x96, 0x42, 0xf5, 0x6f, 0xd3, 0xb1, + 0xe0, 0xd5, 0xfd, 0xd7, 0x0f, 0xc3, 0x37, 0xb7, 0xde, 0x37, 0x86, 0x61, 0x02, 0xc8, 0xc0, 0x1c, + 0x8d, 0x02, 0x54, 0x2c, 0x86, 0x0f, 0x4b, 0x89, 0xc2, 0x04, 0xa2, 0x92, 0x12, 0x8d, 0x82, 0xc8, + 0x52, 0x7e, 0xdc, 0xa6, 0x79, 0x27, 0x24, 0x4c, 0x80, 0x90, 0x92, 0x44, 0xc8, 0x28, 0x20, 0xc4, + 0xd2, 0x1e, 0xfe, 0x2f, 0x73, 0x9e, 0xff, 0x06, 0x00, 0x00, 0xff, 0xff, 0xf8, 0xf6, 0x00, 0xfb, + 0xdd, 0x02, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/services/change_status_service.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/services/change_status_service.pb.go new file mode 100644 index 0000000..cf95393 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/services/change_status_service.pb.go @@ -0,0 +1,175 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/services/change_status_service.proto + +package services // import "google.golang.org/genproto/googleapis/ads/googleads/v1/services" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import resources "google.golang.org/genproto/googleapis/ads/googleads/v1/resources" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +import ( + context "golang.org/x/net/context" + grpc "google.golang.org/grpc" +) + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Request message for '[ChangeStatusService.GetChangeStatus][google.ads.googleads.v1.services.ChangeStatusService.GetChangeStatus]'. +type GetChangeStatusRequest struct { + // The resource name of the change status to fetch. + ResourceName string `protobuf:"bytes,1,opt,name=resource_name,json=resourceName,proto3" json:"resource_name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GetChangeStatusRequest) Reset() { *m = GetChangeStatusRequest{} } +func (m *GetChangeStatusRequest) String() string { return proto.CompactTextString(m) } +func (*GetChangeStatusRequest) ProtoMessage() {} +func (*GetChangeStatusRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_change_status_service_7363c6ead5b74186, []int{0} +} +func (m *GetChangeStatusRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GetChangeStatusRequest.Unmarshal(m, b) +} +func (m *GetChangeStatusRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GetChangeStatusRequest.Marshal(b, m, deterministic) +} +func (dst *GetChangeStatusRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_GetChangeStatusRequest.Merge(dst, src) +} +func (m *GetChangeStatusRequest) XXX_Size() int { + return xxx_messageInfo_GetChangeStatusRequest.Size(m) +} +func (m *GetChangeStatusRequest) XXX_DiscardUnknown() { + xxx_messageInfo_GetChangeStatusRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_GetChangeStatusRequest proto.InternalMessageInfo + +func (m *GetChangeStatusRequest) GetResourceName() string { + if m != nil { + return m.ResourceName + } + return "" +} + +func init() { + proto.RegisterType((*GetChangeStatusRequest)(nil), "google.ads.googleads.v1.services.GetChangeStatusRequest") +} + +// Reference imports to suppress errors if they are not otherwise used. +var _ context.Context +var _ grpc.ClientConn + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the grpc package it is being compiled against. +const _ = grpc.SupportPackageIsVersion4 + +// ChangeStatusServiceClient is the client API for ChangeStatusService service. +// +// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://godoc.org/google.golang.org/grpc#ClientConn.NewStream. +type ChangeStatusServiceClient interface { + // Returns the requested change status in full detail. + GetChangeStatus(ctx context.Context, in *GetChangeStatusRequest, opts ...grpc.CallOption) (*resources.ChangeStatus, error) +} + +type changeStatusServiceClient struct { + cc *grpc.ClientConn +} + +func NewChangeStatusServiceClient(cc *grpc.ClientConn) ChangeStatusServiceClient { + return &changeStatusServiceClient{cc} +} + +func (c *changeStatusServiceClient) GetChangeStatus(ctx context.Context, in *GetChangeStatusRequest, opts ...grpc.CallOption) (*resources.ChangeStatus, error) { + out := new(resources.ChangeStatus) + err := c.cc.Invoke(ctx, "/google.ads.googleads.v1.services.ChangeStatusService/GetChangeStatus", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +// ChangeStatusServiceServer is the server API for ChangeStatusService service. +type ChangeStatusServiceServer interface { + // Returns the requested change status in full detail. + GetChangeStatus(context.Context, *GetChangeStatusRequest) (*resources.ChangeStatus, error) +} + +func RegisterChangeStatusServiceServer(s *grpc.Server, srv ChangeStatusServiceServer) { + s.RegisterService(&_ChangeStatusService_serviceDesc, srv) +} + +func _ChangeStatusService_GetChangeStatus_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(GetChangeStatusRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(ChangeStatusServiceServer).GetChangeStatus(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.ads.googleads.v1.services.ChangeStatusService/GetChangeStatus", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(ChangeStatusServiceServer).GetChangeStatus(ctx, req.(*GetChangeStatusRequest)) + } + return interceptor(ctx, in, info, handler) +} + +var _ChangeStatusService_serviceDesc = grpc.ServiceDesc{ + ServiceName: "google.ads.googleads.v1.services.ChangeStatusService", + HandlerType: (*ChangeStatusServiceServer)(nil), + Methods: []grpc.MethodDesc{ + { + MethodName: "GetChangeStatus", + Handler: _ChangeStatusService_GetChangeStatus_Handler, + }, + }, + Streams: []grpc.StreamDesc{}, + Metadata: "google/ads/googleads/v1/services/change_status_service.proto", +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/services/change_status_service.proto", fileDescriptor_change_status_service_7363c6ead5b74186) +} + +var fileDescriptor_change_status_service_7363c6ead5b74186 = []byte{ + // 362 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x84, 0x92, 0xcf, 0x4a, 0xf3, 0x40, + 0x14, 0xc5, 0x49, 0x3e, 0xf8, 0xc0, 0xa0, 0x08, 0x11, 0xa4, 0x14, 0x17, 0xa5, 0x76, 0x21, 0x5d, + 0xcc, 0x18, 0x45, 0x91, 0xd1, 0x2e, 0x52, 0x17, 0x75, 0x25, 0xa5, 0x85, 0x2e, 0x24, 0x50, 0xc6, + 0x64, 0x18, 0x03, 0xcd, 0x4c, 0xcd, 0x9d, 0x74, 0x23, 0x6e, 0x7c, 0x05, 0xdf, 0xc0, 0xa5, 0x6b, + 0x9f, 0x42, 0xdc, 0xf9, 0x0a, 0xae, 0x7c, 0x09, 0x25, 0x99, 0x4c, 0xa8, 0xda, 0xd2, 0xdd, 0xe1, + 0xe6, 0xfc, 0xce, 0xfd, 0x93, 0x71, 0xce, 0xb8, 0x94, 0x7c, 0xc2, 0x30, 0x8d, 0x00, 0x6b, 0x99, + 0xab, 0x99, 0x87, 0x81, 0xa5, 0xb3, 0x38, 0x64, 0x80, 0xc3, 0x1b, 0x2a, 0x38, 0x1b, 0x83, 0xa2, + 0x2a, 0x83, 0x71, 0x59, 0x46, 0xd3, 0x54, 0x2a, 0xe9, 0x36, 0x34, 0x82, 0x68, 0x04, 0xa8, 0xa2, + 0xd1, 0xcc, 0x43, 0x86, 0xae, 0x1f, 0x2d, 0xcb, 0x4f, 0x19, 0xc8, 0x2c, 0xfd, 0xd3, 0x40, 0x07, + 0xd7, 0x77, 0x0c, 0x36, 0x8d, 0x31, 0x15, 0x42, 0x2a, 0xaa, 0x62, 0x29, 0xca, 0xaf, 0xcd, 0x8e, + 0xb3, 0xdd, 0x63, 0xea, 0xbc, 0xe0, 0x86, 0x05, 0x36, 0x60, 0xb7, 0x19, 0x03, 0xe5, 0xee, 0x3a, + 0x1b, 0x26, 0x78, 0x2c, 0x68, 0xc2, 0x6a, 0x56, 0xc3, 0xda, 0x5b, 0x1b, 0xac, 0x9b, 0xe2, 0x25, + 0x4d, 0xd8, 0xc1, 0x9b, 0xe5, 0x6c, 0xcd, 0xc3, 0x43, 0x3d, 0xac, 0xfb, 0x62, 0x39, 0x9b, 0xbf, + 0x72, 0xdd, 0x13, 0xb4, 0x6a, 0x45, 0xb4, 0x78, 0x94, 0x3a, 0x5e, 0x4a, 0x56, 0xab, 0xa3, 0x79, + 0xae, 0x79, 0xfc, 0xf0, 0xfe, 0xf1, 0x68, 0xef, 0xbb, 0x28, 0x3f, 0xcf, 0xdd, 0x8f, 0x35, 0x3a, + 0x61, 0x06, 0x4a, 0x26, 0x2c, 0x05, 0xdc, 0x2e, 0xef, 0xa5, 0x21, 0xdc, 0xbe, 0xef, 0x7e, 0x59, + 0x4e, 0x2b, 0x94, 0xc9, 0xca, 0x41, 0xbb, 0xb5, 0x05, 0x4b, 0xf7, 0xf3, 0x83, 0xf6, 0xad, 0xab, + 0x8b, 0x92, 0xe6, 0x72, 0x42, 0x05, 0x47, 0x32, 0xe5, 0x98, 0x33, 0x51, 0x9c, 0xdb, 0xfc, 0xb7, + 0x69, 0x0c, 0xcb, 0x9f, 0xc9, 0xa9, 0x11, 0x4f, 0xf6, 0xbf, 0x9e, 0xef, 0x3f, 0xdb, 0x8d, 0x9e, + 0x0e, 0xf4, 0x23, 0x40, 0x5a, 0xe6, 0x6a, 0xe4, 0xa1, 0xb2, 0x31, 0xbc, 0x1a, 0x4b, 0xe0, 0x47, + 0x10, 0x54, 0x96, 0x60, 0xe4, 0x05, 0xc6, 0xf2, 0x69, 0xb7, 0x74, 0x9d, 0x10, 0x3f, 0x02, 0x42, + 0x2a, 0x13, 0x21, 0x23, 0x8f, 0x10, 0x63, 0xbb, 0xfe, 0x5f, 0xcc, 0x79, 0xf8, 0x1d, 0x00, 0x00, + 0xff, 0xff, 0xa5, 0x0c, 0x9d, 0x37, 0xcd, 0x02, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/services/click_view_service.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/services/click_view_service.pb.go new file mode 100644 index 0000000..81591da --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/services/click_view_service.pb.go @@ -0,0 +1,175 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/services/click_view_service.proto + +package services // import "google.golang.org/genproto/googleapis/ads/googleads/v1/services" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import resources "google.golang.org/genproto/googleapis/ads/googleads/v1/resources" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +import ( + context "golang.org/x/net/context" + grpc "google.golang.org/grpc" +) + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Request message for [ClickViewService.GetClickView][google.ads.googleads.v1.services.ClickViewService.GetClickView]. +type GetClickViewRequest struct { + // The resource name of the click view to fetch. + ResourceName string `protobuf:"bytes,1,opt,name=resource_name,json=resourceName,proto3" json:"resource_name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GetClickViewRequest) Reset() { *m = GetClickViewRequest{} } +func (m *GetClickViewRequest) String() string { return proto.CompactTextString(m) } +func (*GetClickViewRequest) ProtoMessage() {} +func (*GetClickViewRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_click_view_service_27445b6aae79d7f5, []int{0} +} +func (m *GetClickViewRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GetClickViewRequest.Unmarshal(m, b) +} +func (m *GetClickViewRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GetClickViewRequest.Marshal(b, m, deterministic) +} +func (dst *GetClickViewRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_GetClickViewRequest.Merge(dst, src) +} +func (m *GetClickViewRequest) XXX_Size() int { + return xxx_messageInfo_GetClickViewRequest.Size(m) +} +func (m *GetClickViewRequest) XXX_DiscardUnknown() { + xxx_messageInfo_GetClickViewRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_GetClickViewRequest proto.InternalMessageInfo + +func (m *GetClickViewRequest) GetResourceName() string { + if m != nil { + return m.ResourceName + } + return "" +} + +func init() { + proto.RegisterType((*GetClickViewRequest)(nil), "google.ads.googleads.v1.services.GetClickViewRequest") +} + +// Reference imports to suppress errors if they are not otherwise used. +var _ context.Context +var _ grpc.ClientConn + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the grpc package it is being compiled against. +const _ = grpc.SupportPackageIsVersion4 + +// ClickViewServiceClient is the client API for ClickViewService service. +// +// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://godoc.org/google.golang.org/grpc#ClientConn.NewStream. +type ClickViewServiceClient interface { + // Returns the requested click view in full detail. + GetClickView(ctx context.Context, in *GetClickViewRequest, opts ...grpc.CallOption) (*resources.ClickView, error) +} + +type clickViewServiceClient struct { + cc *grpc.ClientConn +} + +func NewClickViewServiceClient(cc *grpc.ClientConn) ClickViewServiceClient { + return &clickViewServiceClient{cc} +} + +func (c *clickViewServiceClient) GetClickView(ctx context.Context, in *GetClickViewRequest, opts ...grpc.CallOption) (*resources.ClickView, error) { + out := new(resources.ClickView) + err := c.cc.Invoke(ctx, "/google.ads.googleads.v1.services.ClickViewService/GetClickView", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +// ClickViewServiceServer is the server API for ClickViewService service. +type ClickViewServiceServer interface { + // Returns the requested click view in full detail. + GetClickView(context.Context, *GetClickViewRequest) (*resources.ClickView, error) +} + +func RegisterClickViewServiceServer(s *grpc.Server, srv ClickViewServiceServer) { + s.RegisterService(&_ClickViewService_serviceDesc, srv) +} + +func _ClickViewService_GetClickView_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(GetClickViewRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(ClickViewServiceServer).GetClickView(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.ads.googleads.v1.services.ClickViewService/GetClickView", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(ClickViewServiceServer).GetClickView(ctx, req.(*GetClickViewRequest)) + } + return interceptor(ctx, in, info, handler) +} + +var _ClickViewService_serviceDesc = grpc.ServiceDesc{ + ServiceName: "google.ads.googleads.v1.services.ClickViewService", + HandlerType: (*ClickViewServiceServer)(nil), + Methods: []grpc.MethodDesc{ + { + MethodName: "GetClickView", + Handler: _ClickViewService_GetClickView_Handler, + }, + }, + Streams: []grpc.StreamDesc{}, + Metadata: "google/ads/googleads/v1/services/click_view_service.proto", +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/services/click_view_service.proto", fileDescriptor_click_view_service_27445b6aae79d7f5) +} + +var fileDescriptor_click_view_service_27445b6aae79d7f5 = []byte{ + // 362 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x84, 0x92, 0xb1, 0x4a, 0xc3, 0x50, + 0x14, 0x86, 0x49, 0x04, 0xc1, 0x50, 0x41, 0x22, 0x82, 0x14, 0x87, 0x52, 0x3b, 0x48, 0x29, 0xf7, + 0x92, 0xaa, 0x83, 0x57, 0x1c, 0x52, 0x87, 0x3a, 0x49, 0xa9, 0x90, 0x41, 0x02, 0xe5, 0x9a, 0x1c, + 0xc2, 0xc5, 0x26, 0xb7, 0xe6, 0xa4, 0xe9, 0x20, 0x2e, 0xbe, 0x82, 0x6f, 0xe0, 0xa6, 0x0f, 0xe1, + 0x03, 0xb8, 0xfa, 0x0a, 0x4e, 0x3e, 0x83, 0x83, 0xa4, 0xb7, 0x37, 0x58, 0x31, 0x74, 0xfb, 0x39, + 0xf9, 0xbf, 0x3f, 0xff, 0x39, 0x89, 0x75, 0x12, 0x49, 0x19, 0x8d, 0x81, 0xf2, 0x10, 0xa9, 0x92, + 0x85, 0xca, 0x1d, 0x8a, 0x90, 0xe6, 0x22, 0x00, 0xa4, 0xc1, 0x58, 0x04, 0xb7, 0xa3, 0x5c, 0xc0, + 0x6c, 0xb4, 0x98, 0x91, 0x49, 0x2a, 0x33, 0x69, 0x37, 0x94, 0x9f, 0xf0, 0x10, 0x49, 0x89, 0x92, + 0xdc, 0x21, 0x1a, 0xad, 0x77, 0xab, 0xc2, 0x53, 0x40, 0x39, 0x4d, 0x97, 0xd3, 0x55, 0x6a, 0x7d, + 0x4f, 0x33, 0x13, 0x41, 0x79, 0x92, 0xc8, 0x8c, 0x67, 0x42, 0x26, 0xa8, 0x9e, 0x36, 0x99, 0xb5, + 0xdd, 0x87, 0xec, 0xbc, 0x80, 0x3c, 0x01, 0xb3, 0x21, 0xdc, 0x4d, 0x01, 0x33, 0x7b, 0xdf, 0xda, + 0xd4, 0x91, 0xa3, 0x84, 0xc7, 0xb0, 0x6b, 0x34, 0x8c, 0x83, 0x8d, 0x61, 0x4d, 0x0f, 0x2f, 0x79, + 0x0c, 0xdd, 0x37, 0xc3, 0xda, 0x2a, 0xc9, 0x2b, 0xd5, 0xd1, 0x7e, 0x31, 0xac, 0xda, 0xef, 0x44, + 0xfb, 0x98, 0xac, 0x5a, 0x8b, 0xfc, 0xd3, 0xa0, 0xde, 0xa9, 0xc4, 0xca, 0x5d, 0x49, 0x09, 0x35, + 0x8f, 0x1e, 0x3f, 0x3e, 0x9f, 0x4c, 0x62, 0x77, 0x8a, 0x63, 0xdc, 0x2f, 0x55, 0x3f, 0x0b, 0xa6, + 0x98, 0xc9, 0x18, 0x52, 0xa4, 0x6d, 0x75, 0x9d, 0x82, 0x40, 0xda, 0x7e, 0xe8, 0x7d, 0x1b, 0x56, + 0x2b, 0x90, 0xf1, 0xca, 0x82, 0xbd, 0x9d, 0xbf, 0x6b, 0x0e, 0x8a, 0xe3, 0x0d, 0x8c, 0xeb, 0x8b, + 0x05, 0x1a, 0xc9, 0x31, 0x4f, 0x22, 0x22, 0xd3, 0x88, 0x46, 0x90, 0xcc, 0x4f, 0xab, 0x3f, 0xd0, + 0x44, 0x60, 0xf5, 0xcf, 0x70, 0xaa, 0xc5, 0xb3, 0xb9, 0xd6, 0x77, 0xdd, 0x57, 0xb3, 0xd1, 0x57, + 0x81, 0x6e, 0x88, 0x44, 0xc9, 0x42, 0x79, 0x0e, 0x59, 0xbc, 0x18, 0xdf, 0xb5, 0xc5, 0x77, 0x43, + 0xf4, 0x4b, 0x8b, 0xef, 0x39, 0xbe, 0xb6, 0x7c, 0x99, 0x2d, 0x35, 0x67, 0xcc, 0x0d, 0x91, 0xb1, + 0xd2, 0xc4, 0x98, 0xe7, 0x30, 0xa6, 0x6d, 0x37, 0xeb, 0xf3, 0x9e, 0x87, 0x3f, 0x01, 0x00, 0x00, + 0xff, 0xff, 0xe1, 0xc6, 0x06, 0x3b, 0xb3, 0x02, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/services/conversion_action_service.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/services/conversion_action_service.pb.go new file mode 100644 index 0000000..e25d256 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/services/conversion_action_service.pb.go @@ -0,0 +1,590 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/services/conversion_action_service.proto + +package services // import "google.golang.org/genproto/googleapis/ads/googleads/v1/services" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "github.com/golang/protobuf/ptypes/wrappers" +import resources "google.golang.org/genproto/googleapis/ads/googleads/v1/resources" +import _ "google.golang.org/genproto/googleapis/api/annotations" +import status "google.golang.org/genproto/googleapis/rpc/status" +import field_mask "google.golang.org/genproto/protobuf/field_mask" + +import ( + context "golang.org/x/net/context" + grpc "google.golang.org/grpc" +) + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Request message for [ConversionActionService.GetConversionAction][google.ads.googleads.v1.services.ConversionActionService.GetConversionAction]. +type GetConversionActionRequest struct { + // The resource name of the conversion action to fetch. + ResourceName string `protobuf:"bytes,1,opt,name=resource_name,json=resourceName,proto3" json:"resource_name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GetConversionActionRequest) Reset() { *m = GetConversionActionRequest{} } +func (m *GetConversionActionRequest) String() string { return proto.CompactTextString(m) } +func (*GetConversionActionRequest) ProtoMessage() {} +func (*GetConversionActionRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_conversion_action_service_185b1d4d9f9a10fa, []int{0} +} +func (m *GetConversionActionRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GetConversionActionRequest.Unmarshal(m, b) +} +func (m *GetConversionActionRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GetConversionActionRequest.Marshal(b, m, deterministic) +} +func (dst *GetConversionActionRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_GetConversionActionRequest.Merge(dst, src) +} +func (m *GetConversionActionRequest) XXX_Size() int { + return xxx_messageInfo_GetConversionActionRequest.Size(m) +} +func (m *GetConversionActionRequest) XXX_DiscardUnknown() { + xxx_messageInfo_GetConversionActionRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_GetConversionActionRequest proto.InternalMessageInfo + +func (m *GetConversionActionRequest) GetResourceName() string { + if m != nil { + return m.ResourceName + } + return "" +} + +// Request message for [ConversionActionService.MutateConversionActions][google.ads.googleads.v1.services.ConversionActionService.MutateConversionActions]. +type MutateConversionActionsRequest struct { + // The ID of the customer whose conversion actions are being modified. + CustomerId string `protobuf:"bytes,1,opt,name=customer_id,json=customerId,proto3" json:"customer_id,omitempty"` + // The list of operations to perform on individual conversion actions. + Operations []*ConversionActionOperation `protobuf:"bytes,2,rep,name=operations,proto3" json:"operations,omitempty"` + // If true, successful operations will be carried out and invalid + // operations will return errors. If false, all operations will be carried + // out in one transaction if and only if they are all valid. + // Default is false. + PartialFailure bool `protobuf:"varint,3,opt,name=partial_failure,json=partialFailure,proto3" json:"partial_failure,omitempty"` + // If true, the request is validated but not executed. Only errors are + // returned, not results. + ValidateOnly bool `protobuf:"varint,4,opt,name=validate_only,json=validateOnly,proto3" json:"validate_only,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *MutateConversionActionsRequest) Reset() { *m = MutateConversionActionsRequest{} } +func (m *MutateConversionActionsRequest) String() string { return proto.CompactTextString(m) } +func (*MutateConversionActionsRequest) ProtoMessage() {} +func (*MutateConversionActionsRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_conversion_action_service_185b1d4d9f9a10fa, []int{1} +} +func (m *MutateConversionActionsRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_MutateConversionActionsRequest.Unmarshal(m, b) +} +func (m *MutateConversionActionsRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_MutateConversionActionsRequest.Marshal(b, m, deterministic) +} +func (dst *MutateConversionActionsRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_MutateConversionActionsRequest.Merge(dst, src) +} +func (m *MutateConversionActionsRequest) XXX_Size() int { + return xxx_messageInfo_MutateConversionActionsRequest.Size(m) +} +func (m *MutateConversionActionsRequest) XXX_DiscardUnknown() { + xxx_messageInfo_MutateConversionActionsRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_MutateConversionActionsRequest proto.InternalMessageInfo + +func (m *MutateConversionActionsRequest) GetCustomerId() string { + if m != nil { + return m.CustomerId + } + return "" +} + +func (m *MutateConversionActionsRequest) GetOperations() []*ConversionActionOperation { + if m != nil { + return m.Operations + } + return nil +} + +func (m *MutateConversionActionsRequest) GetPartialFailure() bool { + if m != nil { + return m.PartialFailure + } + return false +} + +func (m *MutateConversionActionsRequest) GetValidateOnly() bool { + if m != nil { + return m.ValidateOnly + } + return false +} + +// A single operation (create, update, remove) on a conversion action. +type ConversionActionOperation struct { + // FieldMask that determines which resource fields are modified in an update. + UpdateMask *field_mask.FieldMask `protobuf:"bytes,4,opt,name=update_mask,json=updateMask,proto3" json:"update_mask,omitempty"` + // The mutate operation. + // + // Types that are valid to be assigned to Operation: + // *ConversionActionOperation_Create + // *ConversionActionOperation_Update + // *ConversionActionOperation_Remove + Operation isConversionActionOperation_Operation `protobuf_oneof:"operation"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ConversionActionOperation) Reset() { *m = ConversionActionOperation{} } +func (m *ConversionActionOperation) String() string { return proto.CompactTextString(m) } +func (*ConversionActionOperation) ProtoMessage() {} +func (*ConversionActionOperation) Descriptor() ([]byte, []int) { + return fileDescriptor_conversion_action_service_185b1d4d9f9a10fa, []int{2} +} +func (m *ConversionActionOperation) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ConversionActionOperation.Unmarshal(m, b) +} +func (m *ConversionActionOperation) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ConversionActionOperation.Marshal(b, m, deterministic) +} +func (dst *ConversionActionOperation) XXX_Merge(src proto.Message) { + xxx_messageInfo_ConversionActionOperation.Merge(dst, src) +} +func (m *ConversionActionOperation) XXX_Size() int { + return xxx_messageInfo_ConversionActionOperation.Size(m) +} +func (m *ConversionActionOperation) XXX_DiscardUnknown() { + xxx_messageInfo_ConversionActionOperation.DiscardUnknown(m) +} + +var xxx_messageInfo_ConversionActionOperation proto.InternalMessageInfo + +func (m *ConversionActionOperation) GetUpdateMask() *field_mask.FieldMask { + if m != nil { + return m.UpdateMask + } + return nil +} + +type isConversionActionOperation_Operation interface { + isConversionActionOperation_Operation() +} + +type ConversionActionOperation_Create struct { + Create *resources.ConversionAction `protobuf:"bytes,1,opt,name=create,proto3,oneof"` +} + +type ConversionActionOperation_Update struct { + Update *resources.ConversionAction `protobuf:"bytes,2,opt,name=update,proto3,oneof"` +} + +type ConversionActionOperation_Remove struct { + Remove string `protobuf:"bytes,3,opt,name=remove,proto3,oneof"` +} + +func (*ConversionActionOperation_Create) isConversionActionOperation_Operation() {} + +func (*ConversionActionOperation_Update) isConversionActionOperation_Operation() {} + +func (*ConversionActionOperation_Remove) isConversionActionOperation_Operation() {} + +func (m *ConversionActionOperation) GetOperation() isConversionActionOperation_Operation { + if m != nil { + return m.Operation + } + return nil +} + +func (m *ConversionActionOperation) GetCreate() *resources.ConversionAction { + if x, ok := m.GetOperation().(*ConversionActionOperation_Create); ok { + return x.Create + } + return nil +} + +func (m *ConversionActionOperation) GetUpdate() *resources.ConversionAction { + if x, ok := m.GetOperation().(*ConversionActionOperation_Update); ok { + return x.Update + } + return nil +} + +func (m *ConversionActionOperation) GetRemove() string { + if x, ok := m.GetOperation().(*ConversionActionOperation_Remove); ok { + return x.Remove + } + return "" +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*ConversionActionOperation) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _ConversionActionOperation_OneofMarshaler, _ConversionActionOperation_OneofUnmarshaler, _ConversionActionOperation_OneofSizer, []interface{}{ + (*ConversionActionOperation_Create)(nil), + (*ConversionActionOperation_Update)(nil), + (*ConversionActionOperation_Remove)(nil), + } +} + +func _ConversionActionOperation_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*ConversionActionOperation) + // operation + switch x := m.Operation.(type) { + case *ConversionActionOperation_Create: + b.EncodeVarint(1<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.Create); err != nil { + return err + } + case *ConversionActionOperation_Update: + b.EncodeVarint(2<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.Update); err != nil { + return err + } + case *ConversionActionOperation_Remove: + b.EncodeVarint(3<<3 | proto.WireBytes) + b.EncodeStringBytes(x.Remove) + case nil: + default: + return fmt.Errorf("ConversionActionOperation.Operation has unexpected type %T", x) + } + return nil +} + +func _ConversionActionOperation_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*ConversionActionOperation) + switch tag { + case 1: // operation.create + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(resources.ConversionAction) + err := b.DecodeMessage(msg) + m.Operation = &ConversionActionOperation_Create{msg} + return true, err + case 2: // operation.update + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(resources.ConversionAction) + err := b.DecodeMessage(msg) + m.Operation = &ConversionActionOperation_Update{msg} + return true, err + case 3: // operation.remove + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeStringBytes() + m.Operation = &ConversionActionOperation_Remove{x} + return true, err + default: + return false, nil + } +} + +func _ConversionActionOperation_OneofSizer(msg proto.Message) (n int) { + m := msg.(*ConversionActionOperation) + // operation + switch x := m.Operation.(type) { + case *ConversionActionOperation_Create: + s := proto.Size(x.Create) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *ConversionActionOperation_Update: + s := proto.Size(x.Update) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *ConversionActionOperation_Remove: + n += 1 // tag and wire + n += proto.SizeVarint(uint64(len(x.Remove))) + n += len(x.Remove) + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +// Response message for [ConversionActionService.MutateConversionActions][google.ads.googleads.v1.services.ConversionActionService.MutateConversionActions]. +type MutateConversionActionsResponse struct { + // Errors that pertain to operation failures in the partial failure mode. + // Returned only when partial_failure = true and all errors occur inside the + // operations. If any errors occur outside the operations (e.g. auth errors), + // we return an RPC level error. + PartialFailureError *status.Status `protobuf:"bytes,3,opt,name=partial_failure_error,json=partialFailureError,proto3" json:"partial_failure_error,omitempty"` + // All results for the mutate. + Results []*MutateConversionActionResult `protobuf:"bytes,2,rep,name=results,proto3" json:"results,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *MutateConversionActionsResponse) Reset() { *m = MutateConversionActionsResponse{} } +func (m *MutateConversionActionsResponse) String() string { return proto.CompactTextString(m) } +func (*MutateConversionActionsResponse) ProtoMessage() {} +func (*MutateConversionActionsResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_conversion_action_service_185b1d4d9f9a10fa, []int{3} +} +func (m *MutateConversionActionsResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_MutateConversionActionsResponse.Unmarshal(m, b) +} +func (m *MutateConversionActionsResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_MutateConversionActionsResponse.Marshal(b, m, deterministic) +} +func (dst *MutateConversionActionsResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_MutateConversionActionsResponse.Merge(dst, src) +} +func (m *MutateConversionActionsResponse) XXX_Size() int { + return xxx_messageInfo_MutateConversionActionsResponse.Size(m) +} +func (m *MutateConversionActionsResponse) XXX_DiscardUnknown() { + xxx_messageInfo_MutateConversionActionsResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_MutateConversionActionsResponse proto.InternalMessageInfo + +func (m *MutateConversionActionsResponse) GetPartialFailureError() *status.Status { + if m != nil { + return m.PartialFailureError + } + return nil +} + +func (m *MutateConversionActionsResponse) GetResults() []*MutateConversionActionResult { + if m != nil { + return m.Results + } + return nil +} + +// The result for the conversion action mutate. +type MutateConversionActionResult struct { + // Returned for successful operations. + ResourceName string `protobuf:"bytes,1,opt,name=resource_name,json=resourceName,proto3" json:"resource_name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *MutateConversionActionResult) Reset() { *m = MutateConversionActionResult{} } +func (m *MutateConversionActionResult) String() string { return proto.CompactTextString(m) } +func (*MutateConversionActionResult) ProtoMessage() {} +func (*MutateConversionActionResult) Descriptor() ([]byte, []int) { + return fileDescriptor_conversion_action_service_185b1d4d9f9a10fa, []int{4} +} +func (m *MutateConversionActionResult) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_MutateConversionActionResult.Unmarshal(m, b) +} +func (m *MutateConversionActionResult) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_MutateConversionActionResult.Marshal(b, m, deterministic) +} +func (dst *MutateConversionActionResult) XXX_Merge(src proto.Message) { + xxx_messageInfo_MutateConversionActionResult.Merge(dst, src) +} +func (m *MutateConversionActionResult) XXX_Size() int { + return xxx_messageInfo_MutateConversionActionResult.Size(m) +} +func (m *MutateConversionActionResult) XXX_DiscardUnknown() { + xxx_messageInfo_MutateConversionActionResult.DiscardUnknown(m) +} + +var xxx_messageInfo_MutateConversionActionResult proto.InternalMessageInfo + +func (m *MutateConversionActionResult) GetResourceName() string { + if m != nil { + return m.ResourceName + } + return "" +} + +func init() { + proto.RegisterType((*GetConversionActionRequest)(nil), "google.ads.googleads.v1.services.GetConversionActionRequest") + proto.RegisterType((*MutateConversionActionsRequest)(nil), "google.ads.googleads.v1.services.MutateConversionActionsRequest") + proto.RegisterType((*ConversionActionOperation)(nil), "google.ads.googleads.v1.services.ConversionActionOperation") + proto.RegisterType((*MutateConversionActionsResponse)(nil), "google.ads.googleads.v1.services.MutateConversionActionsResponse") + proto.RegisterType((*MutateConversionActionResult)(nil), "google.ads.googleads.v1.services.MutateConversionActionResult") +} + +// Reference imports to suppress errors if they are not otherwise used. +var _ context.Context +var _ grpc.ClientConn + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the grpc package it is being compiled against. +const _ = grpc.SupportPackageIsVersion4 + +// ConversionActionServiceClient is the client API for ConversionActionService service. +// +// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://godoc.org/google.golang.org/grpc#ClientConn.NewStream. +type ConversionActionServiceClient interface { + // Returns the requested conversion action. + GetConversionAction(ctx context.Context, in *GetConversionActionRequest, opts ...grpc.CallOption) (*resources.ConversionAction, error) + // Creates, updates or removes conversion actions. Operation statuses are + // returned. + MutateConversionActions(ctx context.Context, in *MutateConversionActionsRequest, opts ...grpc.CallOption) (*MutateConversionActionsResponse, error) +} + +type conversionActionServiceClient struct { + cc *grpc.ClientConn +} + +func NewConversionActionServiceClient(cc *grpc.ClientConn) ConversionActionServiceClient { + return &conversionActionServiceClient{cc} +} + +func (c *conversionActionServiceClient) GetConversionAction(ctx context.Context, in *GetConversionActionRequest, opts ...grpc.CallOption) (*resources.ConversionAction, error) { + out := new(resources.ConversionAction) + err := c.cc.Invoke(ctx, "/google.ads.googleads.v1.services.ConversionActionService/GetConversionAction", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *conversionActionServiceClient) MutateConversionActions(ctx context.Context, in *MutateConversionActionsRequest, opts ...grpc.CallOption) (*MutateConversionActionsResponse, error) { + out := new(MutateConversionActionsResponse) + err := c.cc.Invoke(ctx, "/google.ads.googleads.v1.services.ConversionActionService/MutateConversionActions", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +// ConversionActionServiceServer is the server API for ConversionActionService service. +type ConversionActionServiceServer interface { + // Returns the requested conversion action. + GetConversionAction(context.Context, *GetConversionActionRequest) (*resources.ConversionAction, error) + // Creates, updates or removes conversion actions. Operation statuses are + // returned. + MutateConversionActions(context.Context, *MutateConversionActionsRequest) (*MutateConversionActionsResponse, error) +} + +func RegisterConversionActionServiceServer(s *grpc.Server, srv ConversionActionServiceServer) { + s.RegisterService(&_ConversionActionService_serviceDesc, srv) +} + +func _ConversionActionService_GetConversionAction_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(GetConversionActionRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(ConversionActionServiceServer).GetConversionAction(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.ads.googleads.v1.services.ConversionActionService/GetConversionAction", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(ConversionActionServiceServer).GetConversionAction(ctx, req.(*GetConversionActionRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _ConversionActionService_MutateConversionActions_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(MutateConversionActionsRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(ConversionActionServiceServer).MutateConversionActions(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.ads.googleads.v1.services.ConversionActionService/MutateConversionActions", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(ConversionActionServiceServer).MutateConversionActions(ctx, req.(*MutateConversionActionsRequest)) + } + return interceptor(ctx, in, info, handler) +} + +var _ConversionActionService_serviceDesc = grpc.ServiceDesc{ + ServiceName: "google.ads.googleads.v1.services.ConversionActionService", + HandlerType: (*ConversionActionServiceServer)(nil), + Methods: []grpc.MethodDesc{ + { + MethodName: "GetConversionAction", + Handler: _ConversionActionService_GetConversionAction_Handler, + }, + { + MethodName: "MutateConversionActions", + Handler: _ConversionActionService_MutateConversionActions_Handler, + }, + }, + Streams: []grpc.StreamDesc{}, + Metadata: "google/ads/googleads/v1/services/conversion_action_service.proto", +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/services/conversion_action_service.proto", fileDescriptor_conversion_action_service_185b1d4d9f9a10fa) +} + +var fileDescriptor_conversion_action_service_185b1d4d9f9a10fa = []byte{ + // 716 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xa4, 0x95, 0x4f, 0x4f, 0xd4, 0x4c, + 0x18, 0xc0, 0xdf, 0x76, 0xdf, 0xa0, 0x4c, 0x51, 0x93, 0x21, 0x86, 0x75, 0x43, 0x60, 0x53, 0x49, + 0x24, 0x7b, 0x68, 0xb3, 0x4b, 0x34, 0xb1, 0x2b, 0x86, 0x42, 0x04, 0x3c, 0x20, 0xa4, 0x24, 0xc4, + 0xe8, 0x26, 0xcd, 0xd0, 0x0e, 0x9b, 0x86, 0xb6, 0x53, 0x67, 0xa6, 0x6b, 0x08, 0xe1, 0xa2, 0x1f, + 0xc1, 0xb3, 0x17, 0x8f, 0x7e, 0x0d, 0xc3, 0xc5, 0xab, 0x9f, 0xc0, 0xc4, 0x93, 0xf1, 0x43, 0x98, + 0xe9, 0x74, 0x56, 0x58, 0xa8, 0x6b, 0xe0, 0xb4, 0xcf, 0x3e, 0xcf, 0x33, 0xbf, 0xe7, 0xef, 0x4c, + 0xc1, 0x4a, 0x9f, 0x90, 0x7e, 0x8c, 0x6d, 0x14, 0x32, 0x5b, 0x8a, 0x42, 0x1a, 0xb4, 0x6d, 0x86, + 0xe9, 0x20, 0x0a, 0x30, 0xb3, 0x03, 0x92, 0x0e, 0x30, 0x65, 0x11, 0x49, 0x7d, 0x14, 0x70, 0xf1, + 0x53, 0x9a, 0xac, 0x8c, 0x12, 0x4e, 0x60, 0x53, 0x1e, 0xb3, 0x50, 0xc8, 0xac, 0x21, 0xc1, 0x1a, + 0xb4, 0x2d, 0x45, 0x68, 0x3c, 0xae, 0x8a, 0x41, 0x31, 0x23, 0x39, 0xbd, 0x34, 0x88, 0x84, 0x37, + 0x66, 0xd5, 0xd1, 0x2c, 0xb2, 0x51, 0x9a, 0x12, 0x8e, 0x84, 0x91, 0x95, 0xd6, 0x32, 0xb4, 0x5d, + 0xfc, 0xdb, 0xcf, 0x0f, 0xec, 0x83, 0x08, 0xc7, 0xa1, 0x9f, 0x20, 0x76, 0x58, 0x7a, 0xcc, 0x8d, + 0x7a, 0xbc, 0xa5, 0x28, 0xcb, 0x30, 0x55, 0x84, 0x99, 0xd2, 0x4e, 0xb3, 0xc0, 0x66, 0x1c, 0xf1, + 0xbc, 0x34, 0x98, 0x2e, 0x68, 0x6c, 0x60, 0xbe, 0x36, 0x4c, 0xcb, 0x2d, 0xb2, 0xf2, 0xf0, 0x9b, + 0x1c, 0x33, 0x0e, 0xef, 0x83, 0x5b, 0x2a, 0x77, 0x3f, 0x45, 0x09, 0xae, 0x6b, 0x4d, 0x6d, 0x71, + 0xd2, 0x9b, 0x52, 0xca, 0x17, 0x28, 0xc1, 0xe6, 0x2f, 0x0d, 0xcc, 0x6d, 0xe5, 0x1c, 0x71, 0x3c, + 0x8a, 0x61, 0x8a, 0x33, 0x0f, 0x8c, 0x20, 0x67, 0x9c, 0x24, 0x98, 0xfa, 0x51, 0x58, 0x52, 0x80, + 0x52, 0x3d, 0x0f, 0xe1, 0x6b, 0x00, 0x48, 0x86, 0xa9, 0xac, 0xba, 0xae, 0x37, 0x6b, 0x8b, 0x46, + 0xa7, 0x6b, 0x8d, 0xeb, 0xb8, 0x35, 0x1a, 0x70, 0x5b, 0x31, 0xbc, 0x33, 0x38, 0xf8, 0x00, 0xdc, + 0xc9, 0x10, 0xe5, 0x11, 0x8a, 0xfd, 0x03, 0x14, 0xc5, 0x39, 0xc5, 0xf5, 0x5a, 0x53, 0x5b, 0xbc, + 0xe9, 0xdd, 0x2e, 0xd5, 0xeb, 0x52, 0x2b, 0xca, 0x1d, 0xa0, 0x38, 0x0a, 0x11, 0xc7, 0x3e, 0x49, + 0xe3, 0xa3, 0xfa, 0xff, 0x85, 0xdb, 0x94, 0x52, 0x6e, 0xa7, 0xf1, 0x91, 0xf9, 0x51, 0x07, 0xf7, + 0x2a, 0xe3, 0xc2, 0x2e, 0x30, 0xf2, 0xac, 0x00, 0x88, 0xe9, 0x14, 0x00, 0xa3, 0xd3, 0x50, 0x95, + 0xa8, 0xf1, 0x58, 0xeb, 0x62, 0x80, 0x5b, 0x88, 0x1d, 0x7a, 0x40, 0xba, 0x0b, 0x19, 0x6e, 0x81, + 0x89, 0x80, 0x62, 0xc4, 0x65, 0x9f, 0x8d, 0xce, 0x52, 0x65, 0x07, 0x86, 0x1b, 0x75, 0xa1, 0x05, + 0x9b, 0xff, 0x79, 0x25, 0x44, 0xe0, 0x24, 0xbc, 0xae, 0x5f, 0x0b, 0x27, 0x21, 0xb0, 0x0e, 0x26, + 0x28, 0x4e, 0xc8, 0x40, 0x76, 0x6f, 0x52, 0x58, 0xe4, 0xff, 0x55, 0x03, 0x4c, 0x0e, 0xdb, 0x6d, + 0x7e, 0xd1, 0xc0, 0x7c, 0xe5, 0x3a, 0xb0, 0x8c, 0xa4, 0x0c, 0xc3, 0x75, 0x70, 0x77, 0x64, 0x22, + 0x3e, 0xa6, 0x94, 0xd0, 0x82, 0x6c, 0x74, 0xa0, 0x4a, 0x94, 0x66, 0x81, 0xb5, 0x5b, 0xac, 0xab, + 0x37, 0x7d, 0x7e, 0x56, 0xcf, 0x84, 0x3b, 0x7c, 0x09, 0x6e, 0x50, 0xcc, 0xf2, 0x98, 0xab, 0x9d, + 0x79, 0x3a, 0x7e, 0x67, 0x2e, 0xcf, 0xcd, 0x2b, 0x30, 0x9e, 0xc2, 0x99, 0x6b, 0x60, 0xf6, 0x6f, + 0x8e, 0xff, 0x74, 0x33, 0x3a, 0xa7, 0x35, 0x30, 0x33, 0x7a, 0x7e, 0x57, 0xe6, 0x01, 0x4f, 0x35, + 0x30, 0x7d, 0xc9, 0xcd, 0x83, 0x4f, 0xc6, 0x57, 0x50, 0x7d, 0x61, 0x1b, 0x57, 0x19, 0xb1, 0xd9, + 0x7d, 0xf7, 0xed, 0xc7, 0x07, 0xfd, 0x21, 0x5c, 0x12, 0x6f, 0xd5, 0xf1, 0xb9, 0xb2, 0x96, 0xd5, + 0x1d, 0x65, 0x76, 0xeb, 0xcc, 0xe3, 0x55, 0xce, 0xd3, 0x6e, 0x9d, 0xc0, 0xef, 0x1a, 0x98, 0xa9, + 0x18, 0x37, 0x5c, 0xb9, 0xea, 0x34, 0xd4, 0xc3, 0xd1, 0x70, 0xaf, 0x41, 0x90, 0xbb, 0x66, 0xba, + 0x45, 0x75, 0x5d, 0xf3, 0x91, 0xa8, 0xee, 0x4f, 0x39, 0xc7, 0x67, 0x1e, 0xa4, 0xe5, 0xd6, 0xc9, + 0xc5, 0xe2, 0x9c, 0xa4, 0x00, 0x3b, 0x5a, 0x6b, 0xf5, 0xbd, 0x0e, 0x16, 0x02, 0x92, 0x8c, 0xcd, + 0x65, 0x75, 0xb6, 0x62, 0xda, 0x3b, 0xe2, 0xde, 0xef, 0x68, 0xaf, 0x36, 0x4b, 0x42, 0x9f, 0xc4, + 0x28, 0xed, 0x5b, 0x84, 0xf6, 0xed, 0x3e, 0x4e, 0x8b, 0x57, 0x41, 0x7d, 0x31, 0xb2, 0x88, 0x55, + 0x7f, 0xa4, 0xba, 0x4a, 0xf8, 0xa4, 0xd7, 0x36, 0x5c, 0xf7, 0xb3, 0xde, 0xdc, 0x90, 0x40, 0x37, + 0x64, 0x96, 0x14, 0x85, 0xb4, 0xd7, 0xb6, 0xca, 0xc0, 0xec, 0xab, 0x72, 0xe9, 0xb9, 0x21, 0xeb, + 0x0d, 0x5d, 0x7a, 0x7b, 0xed, 0x9e, 0x72, 0xf9, 0xa9, 0x2f, 0x48, 0xbd, 0xe3, 0xb8, 0x21, 0x73, + 0x9c, 0xa1, 0x93, 0xe3, 0xec, 0xb5, 0x1d, 0x47, 0xb9, 0xed, 0x4f, 0x14, 0x79, 0x2e, 0xfd, 0x0e, + 0x00, 0x00, 0xff, 0xff, 0xdd, 0x78, 0x76, 0x77, 0x4b, 0x07, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/services/conversion_adjustment_upload_service.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/services/conversion_adjustment_upload_service.pb.go new file mode 100644 index 0000000..109376e --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/services/conversion_adjustment_upload_service.pb.go @@ -0,0 +1,764 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/services/conversion_adjustment_upload_service.proto + +package services // import "google.golang.org/genproto/googleapis/ads/googleads/v1/services" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import wrappers "github.com/golang/protobuf/ptypes/wrappers" +import enums "google.golang.org/genproto/googleapis/ads/googleads/v1/enums" +import _ "google.golang.org/genproto/googleapis/api/annotations" +import status "google.golang.org/genproto/googleapis/rpc/status" + +import ( + context "golang.org/x/net/context" + grpc "google.golang.org/grpc" +) + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Request message for +// [ConversionAdjustmentUploadService.UploadConversionAdjustments][google.ads.googleads.v1.services.ConversionAdjustmentUploadService.UploadConversionAdjustments]. +type UploadConversionAdjustmentsRequest struct { + // The ID of the customer performing the upload. + CustomerId string `protobuf:"bytes,1,opt,name=customer_id,json=customerId,proto3" json:"customer_id,omitempty"` + // The conversion adjustments that are being uploaded. + ConversionAdjustments []*ConversionAdjustment `protobuf:"bytes,2,rep,name=conversion_adjustments,json=conversionAdjustments,proto3" json:"conversion_adjustments,omitempty"` + // If true, successful operations will be carried out and invalid + // operations will return errors. If false, all operations will be carried out + // in one transaction if and only if they are all valid. This should always be + // set to true. + PartialFailure bool `protobuf:"varint,3,opt,name=partial_failure,json=partialFailure,proto3" json:"partial_failure,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *UploadConversionAdjustmentsRequest) Reset() { *m = UploadConversionAdjustmentsRequest{} } +func (m *UploadConversionAdjustmentsRequest) String() string { return proto.CompactTextString(m) } +func (*UploadConversionAdjustmentsRequest) ProtoMessage() {} +func (*UploadConversionAdjustmentsRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_conversion_adjustment_upload_service_f73ceda272931531, []int{0} +} +func (m *UploadConversionAdjustmentsRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_UploadConversionAdjustmentsRequest.Unmarshal(m, b) +} +func (m *UploadConversionAdjustmentsRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_UploadConversionAdjustmentsRequest.Marshal(b, m, deterministic) +} +func (dst *UploadConversionAdjustmentsRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_UploadConversionAdjustmentsRequest.Merge(dst, src) +} +func (m *UploadConversionAdjustmentsRequest) XXX_Size() int { + return xxx_messageInfo_UploadConversionAdjustmentsRequest.Size(m) +} +func (m *UploadConversionAdjustmentsRequest) XXX_DiscardUnknown() { + xxx_messageInfo_UploadConversionAdjustmentsRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_UploadConversionAdjustmentsRequest proto.InternalMessageInfo + +func (m *UploadConversionAdjustmentsRequest) GetCustomerId() string { + if m != nil { + return m.CustomerId + } + return "" +} + +func (m *UploadConversionAdjustmentsRequest) GetConversionAdjustments() []*ConversionAdjustment { + if m != nil { + return m.ConversionAdjustments + } + return nil +} + +func (m *UploadConversionAdjustmentsRequest) GetPartialFailure() bool { + if m != nil { + return m.PartialFailure + } + return false +} + +// Response message for +// [ConversionAdjustmentUploadService.UploadConversionAdjustments][google.ads.googleads.v1.services.ConversionAdjustmentUploadService.UploadConversionAdjustments]. +type UploadConversionAdjustmentsResponse struct { + // Errors that pertain to conversion adjustment failures in the partial + // failure mode. Returned when all errors occur inside the adjustments. If any + // errors occur outside the adjustments (e.g. auth errors), we return an RPC + // level error. + PartialFailureError *status.Status `protobuf:"bytes,1,opt,name=partial_failure_error,json=partialFailureError,proto3" json:"partial_failure_error,omitempty"` + // Returned for successfully processed conversion adjustments. Proto will be + // empty for rows that received an error. Results are not returned when + // validate_only is true. + Results []*ConversionAdjustmentResult `protobuf:"bytes,2,rep,name=results,proto3" json:"results,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *UploadConversionAdjustmentsResponse) Reset() { *m = UploadConversionAdjustmentsResponse{} } +func (m *UploadConversionAdjustmentsResponse) String() string { return proto.CompactTextString(m) } +func (*UploadConversionAdjustmentsResponse) ProtoMessage() {} +func (*UploadConversionAdjustmentsResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_conversion_adjustment_upload_service_f73ceda272931531, []int{1} +} +func (m *UploadConversionAdjustmentsResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_UploadConversionAdjustmentsResponse.Unmarshal(m, b) +} +func (m *UploadConversionAdjustmentsResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_UploadConversionAdjustmentsResponse.Marshal(b, m, deterministic) +} +func (dst *UploadConversionAdjustmentsResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_UploadConversionAdjustmentsResponse.Merge(dst, src) +} +func (m *UploadConversionAdjustmentsResponse) XXX_Size() int { + return xxx_messageInfo_UploadConversionAdjustmentsResponse.Size(m) +} +func (m *UploadConversionAdjustmentsResponse) XXX_DiscardUnknown() { + xxx_messageInfo_UploadConversionAdjustmentsResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_UploadConversionAdjustmentsResponse proto.InternalMessageInfo + +func (m *UploadConversionAdjustmentsResponse) GetPartialFailureError() *status.Status { + if m != nil { + return m.PartialFailureError + } + return nil +} + +func (m *UploadConversionAdjustmentsResponse) GetResults() []*ConversionAdjustmentResult { + if m != nil { + return m.Results + } + return nil +} + +// A conversion adjustment. +type ConversionAdjustment struct { + // Resource name of the conversion action associated with this conversion + // adjustment. Note: Although this resource name consists of a customer id and + // a conversion action id, validation will ignore the customer id and use the + // conversion action id as the sole identifier of the conversion action. + ConversionAction *wrappers.StringValue `protobuf:"bytes,3,opt,name=conversion_action,json=conversionAction,proto3" json:"conversion_action,omitempty"` + // The date time at which the adjustment occurred. Must be after the + // conversion_date_time. The timezone must be specified. The format is + // "yyyy-mm-dd hh:mm:ss+|-hh:mm", e.g. "2019-01-01 12:32:45-08:00". + AdjustmentDateTime *wrappers.StringValue `protobuf:"bytes,4,opt,name=adjustment_date_time,json=adjustmentDateTime,proto3" json:"adjustment_date_time,omitempty"` + // The adjustment type. + AdjustmentType enums.ConversionAdjustmentTypeEnum_ConversionAdjustmentType `protobuf:"varint,5,opt,name=adjustment_type,json=adjustmentType,proto3,enum=google.ads.googleads.v1.enums.ConversionAdjustmentTypeEnum_ConversionAdjustmentType" json:"adjustment_type,omitempty"` + // Information needed to restate the conversion's value. + // Required for restatements. Should not be supplied for retractions. An error + // will be returned if provided for a retraction. + RestatementValue *RestatementValue `protobuf:"bytes,6,opt,name=restatement_value,json=restatementValue,proto3" json:"restatement_value,omitempty"` + // Identifies the conversion to be adjusted. + // + // Types that are valid to be assigned to ConversionIdentifier: + // *ConversionAdjustment_GclidDateTimePair + // *ConversionAdjustment_OrderId + ConversionIdentifier isConversionAdjustment_ConversionIdentifier `protobuf_oneof:"conversion_identifier"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ConversionAdjustment) Reset() { *m = ConversionAdjustment{} } +func (m *ConversionAdjustment) String() string { return proto.CompactTextString(m) } +func (*ConversionAdjustment) ProtoMessage() {} +func (*ConversionAdjustment) Descriptor() ([]byte, []int) { + return fileDescriptor_conversion_adjustment_upload_service_f73ceda272931531, []int{2} +} +func (m *ConversionAdjustment) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ConversionAdjustment.Unmarshal(m, b) +} +func (m *ConversionAdjustment) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ConversionAdjustment.Marshal(b, m, deterministic) +} +func (dst *ConversionAdjustment) XXX_Merge(src proto.Message) { + xxx_messageInfo_ConversionAdjustment.Merge(dst, src) +} +func (m *ConversionAdjustment) XXX_Size() int { + return xxx_messageInfo_ConversionAdjustment.Size(m) +} +func (m *ConversionAdjustment) XXX_DiscardUnknown() { + xxx_messageInfo_ConversionAdjustment.DiscardUnknown(m) +} + +var xxx_messageInfo_ConversionAdjustment proto.InternalMessageInfo + +func (m *ConversionAdjustment) GetConversionAction() *wrappers.StringValue { + if m != nil { + return m.ConversionAction + } + return nil +} + +func (m *ConversionAdjustment) GetAdjustmentDateTime() *wrappers.StringValue { + if m != nil { + return m.AdjustmentDateTime + } + return nil +} + +func (m *ConversionAdjustment) GetAdjustmentType() enums.ConversionAdjustmentTypeEnum_ConversionAdjustmentType { + if m != nil { + return m.AdjustmentType + } + return enums.ConversionAdjustmentTypeEnum_UNSPECIFIED +} + +func (m *ConversionAdjustment) GetRestatementValue() *RestatementValue { + if m != nil { + return m.RestatementValue + } + return nil +} + +type isConversionAdjustment_ConversionIdentifier interface { + isConversionAdjustment_ConversionIdentifier() +} + +type ConversionAdjustment_GclidDateTimePair struct { + GclidDateTimePair *GclidDateTimePair `protobuf:"bytes,1,opt,name=gclid_date_time_pair,json=gclidDateTimePair,proto3,oneof"` +} + +type ConversionAdjustment_OrderId struct { + OrderId *wrappers.StringValue `protobuf:"bytes,2,opt,name=order_id,json=orderId,proto3,oneof"` +} + +func (*ConversionAdjustment_GclidDateTimePair) isConversionAdjustment_ConversionIdentifier() {} + +func (*ConversionAdjustment_OrderId) isConversionAdjustment_ConversionIdentifier() {} + +func (m *ConversionAdjustment) GetConversionIdentifier() isConversionAdjustment_ConversionIdentifier { + if m != nil { + return m.ConversionIdentifier + } + return nil +} + +func (m *ConversionAdjustment) GetGclidDateTimePair() *GclidDateTimePair { + if x, ok := m.GetConversionIdentifier().(*ConversionAdjustment_GclidDateTimePair); ok { + return x.GclidDateTimePair + } + return nil +} + +func (m *ConversionAdjustment) GetOrderId() *wrappers.StringValue { + if x, ok := m.GetConversionIdentifier().(*ConversionAdjustment_OrderId); ok { + return x.OrderId + } + return nil +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*ConversionAdjustment) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _ConversionAdjustment_OneofMarshaler, _ConversionAdjustment_OneofUnmarshaler, _ConversionAdjustment_OneofSizer, []interface{}{ + (*ConversionAdjustment_GclidDateTimePair)(nil), + (*ConversionAdjustment_OrderId)(nil), + } +} + +func _ConversionAdjustment_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*ConversionAdjustment) + // conversion_identifier + switch x := m.ConversionIdentifier.(type) { + case *ConversionAdjustment_GclidDateTimePair: + b.EncodeVarint(1<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.GclidDateTimePair); err != nil { + return err + } + case *ConversionAdjustment_OrderId: + b.EncodeVarint(2<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.OrderId); err != nil { + return err + } + case nil: + default: + return fmt.Errorf("ConversionAdjustment.ConversionIdentifier has unexpected type %T", x) + } + return nil +} + +func _ConversionAdjustment_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*ConversionAdjustment) + switch tag { + case 1: // conversion_identifier.gclid_date_time_pair + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(GclidDateTimePair) + err := b.DecodeMessage(msg) + m.ConversionIdentifier = &ConversionAdjustment_GclidDateTimePair{msg} + return true, err + case 2: // conversion_identifier.order_id + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(wrappers.StringValue) + err := b.DecodeMessage(msg) + m.ConversionIdentifier = &ConversionAdjustment_OrderId{msg} + return true, err + default: + return false, nil + } +} + +func _ConversionAdjustment_OneofSizer(msg proto.Message) (n int) { + m := msg.(*ConversionAdjustment) + // conversion_identifier + switch x := m.ConversionIdentifier.(type) { + case *ConversionAdjustment_GclidDateTimePair: + s := proto.Size(x.GclidDateTimePair) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *ConversionAdjustment_OrderId: + s := proto.Size(x.OrderId) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +// Contains information needed to restate a conversion's value. +type RestatementValue struct { + // The restated conversion value. This is the value of the conversion after + // restatement. For example, to change the value of a conversion from 100 to + // 70, an adjusted value of 70 should be reported. + AdjustedValue *wrappers.DoubleValue `protobuf:"bytes,1,opt,name=adjusted_value,json=adjustedValue,proto3" json:"adjusted_value,omitempty"` + // The currency of the restated value. If not provided, then the default + // currency from the conversion action is used, and if that is not set then + // the account currency is used. This is the ISO 4217 3-character currency + // code e.g. USD or EUR. + CurrencyCode *wrappers.StringValue `protobuf:"bytes,2,opt,name=currency_code,json=currencyCode,proto3" json:"currency_code,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *RestatementValue) Reset() { *m = RestatementValue{} } +func (m *RestatementValue) String() string { return proto.CompactTextString(m) } +func (*RestatementValue) ProtoMessage() {} +func (*RestatementValue) Descriptor() ([]byte, []int) { + return fileDescriptor_conversion_adjustment_upload_service_f73ceda272931531, []int{3} +} +func (m *RestatementValue) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_RestatementValue.Unmarshal(m, b) +} +func (m *RestatementValue) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_RestatementValue.Marshal(b, m, deterministic) +} +func (dst *RestatementValue) XXX_Merge(src proto.Message) { + xxx_messageInfo_RestatementValue.Merge(dst, src) +} +func (m *RestatementValue) XXX_Size() int { + return xxx_messageInfo_RestatementValue.Size(m) +} +func (m *RestatementValue) XXX_DiscardUnknown() { + xxx_messageInfo_RestatementValue.DiscardUnknown(m) +} + +var xxx_messageInfo_RestatementValue proto.InternalMessageInfo + +func (m *RestatementValue) GetAdjustedValue() *wrappers.DoubleValue { + if m != nil { + return m.AdjustedValue + } + return nil +} + +func (m *RestatementValue) GetCurrencyCode() *wrappers.StringValue { + if m != nil { + return m.CurrencyCode + } + return nil +} + +// Uniquely identifies a conversion that was reported without an order ID +// specified. +type GclidDateTimePair struct { + // Google click ID (gclid) associated with the original conversion for this + // adjustment. + Gclid *wrappers.StringValue `protobuf:"bytes,1,opt,name=gclid,proto3" json:"gclid,omitempty"` + // The date time at which the original conversion for this adjustment + // occurred. The timezone must be specified. The format is "yyyy-mm-dd + // hh:mm:ss+|-hh:mm", e.g. "2019-01-01 12:32:45-08:00". + ConversionDateTime *wrappers.StringValue `protobuf:"bytes,2,opt,name=conversion_date_time,json=conversionDateTime,proto3" json:"conversion_date_time,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GclidDateTimePair) Reset() { *m = GclidDateTimePair{} } +func (m *GclidDateTimePair) String() string { return proto.CompactTextString(m) } +func (*GclidDateTimePair) ProtoMessage() {} +func (*GclidDateTimePair) Descriptor() ([]byte, []int) { + return fileDescriptor_conversion_adjustment_upload_service_f73ceda272931531, []int{4} +} +func (m *GclidDateTimePair) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GclidDateTimePair.Unmarshal(m, b) +} +func (m *GclidDateTimePair) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GclidDateTimePair.Marshal(b, m, deterministic) +} +func (dst *GclidDateTimePair) XXX_Merge(src proto.Message) { + xxx_messageInfo_GclidDateTimePair.Merge(dst, src) +} +func (m *GclidDateTimePair) XXX_Size() int { + return xxx_messageInfo_GclidDateTimePair.Size(m) +} +func (m *GclidDateTimePair) XXX_DiscardUnknown() { + xxx_messageInfo_GclidDateTimePair.DiscardUnknown(m) +} + +var xxx_messageInfo_GclidDateTimePair proto.InternalMessageInfo + +func (m *GclidDateTimePair) GetGclid() *wrappers.StringValue { + if m != nil { + return m.Gclid + } + return nil +} + +func (m *GclidDateTimePair) GetConversionDateTime() *wrappers.StringValue { + if m != nil { + return m.ConversionDateTime + } + return nil +} + +// Information identifying a successfully processed ConversionAdjustment. +type ConversionAdjustmentResult struct { + // Resource name of the conversion action associated with this conversion + // adjustment. + ConversionAction *wrappers.StringValue `protobuf:"bytes,3,opt,name=conversion_action,json=conversionAction,proto3" json:"conversion_action,omitempty"` + // The date time at which the adjustment occurred. The format is + // "yyyy-mm-dd hh:mm:ss+|-hh:mm", e.g. "2019-01-01 12:32:45-08:00". + AdjustmentDateTime *wrappers.StringValue `protobuf:"bytes,4,opt,name=adjustment_date_time,json=adjustmentDateTime,proto3" json:"adjustment_date_time,omitempty"` + // The adjustment type. + AdjustmentType enums.ConversionAdjustmentTypeEnum_ConversionAdjustmentType `protobuf:"varint,5,opt,name=adjustment_type,json=adjustmentType,proto3,enum=google.ads.googleads.v1.enums.ConversionAdjustmentTypeEnum_ConversionAdjustmentType" json:"adjustment_type,omitempty"` + // Identifies the conversion that was adjusted. + // + // Types that are valid to be assigned to ConversionIdentifier: + // *ConversionAdjustmentResult_GclidDateTimePair + // *ConversionAdjustmentResult_OrderId + ConversionIdentifier isConversionAdjustmentResult_ConversionIdentifier `protobuf_oneof:"conversion_identifier"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ConversionAdjustmentResult) Reset() { *m = ConversionAdjustmentResult{} } +func (m *ConversionAdjustmentResult) String() string { return proto.CompactTextString(m) } +func (*ConversionAdjustmentResult) ProtoMessage() {} +func (*ConversionAdjustmentResult) Descriptor() ([]byte, []int) { + return fileDescriptor_conversion_adjustment_upload_service_f73ceda272931531, []int{5} +} +func (m *ConversionAdjustmentResult) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ConversionAdjustmentResult.Unmarshal(m, b) +} +func (m *ConversionAdjustmentResult) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ConversionAdjustmentResult.Marshal(b, m, deterministic) +} +func (dst *ConversionAdjustmentResult) XXX_Merge(src proto.Message) { + xxx_messageInfo_ConversionAdjustmentResult.Merge(dst, src) +} +func (m *ConversionAdjustmentResult) XXX_Size() int { + return xxx_messageInfo_ConversionAdjustmentResult.Size(m) +} +func (m *ConversionAdjustmentResult) XXX_DiscardUnknown() { + xxx_messageInfo_ConversionAdjustmentResult.DiscardUnknown(m) +} + +var xxx_messageInfo_ConversionAdjustmentResult proto.InternalMessageInfo + +func (m *ConversionAdjustmentResult) GetConversionAction() *wrappers.StringValue { + if m != nil { + return m.ConversionAction + } + return nil +} + +func (m *ConversionAdjustmentResult) GetAdjustmentDateTime() *wrappers.StringValue { + if m != nil { + return m.AdjustmentDateTime + } + return nil +} + +func (m *ConversionAdjustmentResult) GetAdjustmentType() enums.ConversionAdjustmentTypeEnum_ConversionAdjustmentType { + if m != nil { + return m.AdjustmentType + } + return enums.ConversionAdjustmentTypeEnum_UNSPECIFIED +} + +type isConversionAdjustmentResult_ConversionIdentifier interface { + isConversionAdjustmentResult_ConversionIdentifier() +} + +type ConversionAdjustmentResult_GclidDateTimePair struct { + GclidDateTimePair *GclidDateTimePair `protobuf:"bytes,1,opt,name=gclid_date_time_pair,json=gclidDateTimePair,proto3,oneof"` +} + +type ConversionAdjustmentResult_OrderId struct { + OrderId *wrappers.StringValue `protobuf:"bytes,2,opt,name=order_id,json=orderId,proto3,oneof"` +} + +func (*ConversionAdjustmentResult_GclidDateTimePair) isConversionAdjustmentResult_ConversionIdentifier() { +} + +func (*ConversionAdjustmentResult_OrderId) isConversionAdjustmentResult_ConversionIdentifier() {} + +func (m *ConversionAdjustmentResult) GetConversionIdentifier() isConversionAdjustmentResult_ConversionIdentifier { + if m != nil { + return m.ConversionIdentifier + } + return nil +} + +func (m *ConversionAdjustmentResult) GetGclidDateTimePair() *GclidDateTimePair { + if x, ok := m.GetConversionIdentifier().(*ConversionAdjustmentResult_GclidDateTimePair); ok { + return x.GclidDateTimePair + } + return nil +} + +func (m *ConversionAdjustmentResult) GetOrderId() *wrappers.StringValue { + if x, ok := m.GetConversionIdentifier().(*ConversionAdjustmentResult_OrderId); ok { + return x.OrderId + } + return nil +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*ConversionAdjustmentResult) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _ConversionAdjustmentResult_OneofMarshaler, _ConversionAdjustmentResult_OneofUnmarshaler, _ConversionAdjustmentResult_OneofSizer, []interface{}{ + (*ConversionAdjustmentResult_GclidDateTimePair)(nil), + (*ConversionAdjustmentResult_OrderId)(nil), + } +} + +func _ConversionAdjustmentResult_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*ConversionAdjustmentResult) + // conversion_identifier + switch x := m.ConversionIdentifier.(type) { + case *ConversionAdjustmentResult_GclidDateTimePair: + b.EncodeVarint(1<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.GclidDateTimePair); err != nil { + return err + } + case *ConversionAdjustmentResult_OrderId: + b.EncodeVarint(2<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.OrderId); err != nil { + return err + } + case nil: + default: + return fmt.Errorf("ConversionAdjustmentResult.ConversionIdentifier has unexpected type %T", x) + } + return nil +} + +func _ConversionAdjustmentResult_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*ConversionAdjustmentResult) + switch tag { + case 1: // conversion_identifier.gclid_date_time_pair + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(GclidDateTimePair) + err := b.DecodeMessage(msg) + m.ConversionIdentifier = &ConversionAdjustmentResult_GclidDateTimePair{msg} + return true, err + case 2: // conversion_identifier.order_id + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(wrappers.StringValue) + err := b.DecodeMessage(msg) + m.ConversionIdentifier = &ConversionAdjustmentResult_OrderId{msg} + return true, err + default: + return false, nil + } +} + +func _ConversionAdjustmentResult_OneofSizer(msg proto.Message) (n int) { + m := msg.(*ConversionAdjustmentResult) + // conversion_identifier + switch x := m.ConversionIdentifier.(type) { + case *ConversionAdjustmentResult_GclidDateTimePair: + s := proto.Size(x.GclidDateTimePair) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *ConversionAdjustmentResult_OrderId: + s := proto.Size(x.OrderId) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +func init() { + proto.RegisterType((*UploadConversionAdjustmentsRequest)(nil), "google.ads.googleads.v1.services.UploadConversionAdjustmentsRequest") + proto.RegisterType((*UploadConversionAdjustmentsResponse)(nil), "google.ads.googleads.v1.services.UploadConversionAdjustmentsResponse") + proto.RegisterType((*ConversionAdjustment)(nil), "google.ads.googleads.v1.services.ConversionAdjustment") + proto.RegisterType((*RestatementValue)(nil), "google.ads.googleads.v1.services.RestatementValue") + proto.RegisterType((*GclidDateTimePair)(nil), "google.ads.googleads.v1.services.GclidDateTimePair") + proto.RegisterType((*ConversionAdjustmentResult)(nil), "google.ads.googleads.v1.services.ConversionAdjustmentResult") +} + +// Reference imports to suppress errors if they are not otherwise used. +var _ context.Context +var _ grpc.ClientConn + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the grpc package it is being compiled against. +const _ = grpc.SupportPackageIsVersion4 + +// ConversionAdjustmentUploadServiceClient is the client API for ConversionAdjustmentUploadService service. +// +// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://godoc.org/google.golang.org/grpc#ClientConn.NewStream. +type ConversionAdjustmentUploadServiceClient interface { + // Processes the given conversion adjustments. + UploadConversionAdjustments(ctx context.Context, in *UploadConversionAdjustmentsRequest, opts ...grpc.CallOption) (*UploadConversionAdjustmentsResponse, error) +} + +type conversionAdjustmentUploadServiceClient struct { + cc *grpc.ClientConn +} + +func NewConversionAdjustmentUploadServiceClient(cc *grpc.ClientConn) ConversionAdjustmentUploadServiceClient { + return &conversionAdjustmentUploadServiceClient{cc} +} + +func (c *conversionAdjustmentUploadServiceClient) UploadConversionAdjustments(ctx context.Context, in *UploadConversionAdjustmentsRequest, opts ...grpc.CallOption) (*UploadConversionAdjustmentsResponse, error) { + out := new(UploadConversionAdjustmentsResponse) + err := c.cc.Invoke(ctx, "/google.ads.googleads.v1.services.ConversionAdjustmentUploadService/UploadConversionAdjustments", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +// ConversionAdjustmentUploadServiceServer is the server API for ConversionAdjustmentUploadService service. +type ConversionAdjustmentUploadServiceServer interface { + // Processes the given conversion adjustments. + UploadConversionAdjustments(context.Context, *UploadConversionAdjustmentsRequest) (*UploadConversionAdjustmentsResponse, error) +} + +func RegisterConversionAdjustmentUploadServiceServer(s *grpc.Server, srv ConversionAdjustmentUploadServiceServer) { + s.RegisterService(&_ConversionAdjustmentUploadService_serviceDesc, srv) +} + +func _ConversionAdjustmentUploadService_UploadConversionAdjustments_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(UploadConversionAdjustmentsRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(ConversionAdjustmentUploadServiceServer).UploadConversionAdjustments(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.ads.googleads.v1.services.ConversionAdjustmentUploadService/UploadConversionAdjustments", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(ConversionAdjustmentUploadServiceServer).UploadConversionAdjustments(ctx, req.(*UploadConversionAdjustmentsRequest)) + } + return interceptor(ctx, in, info, handler) +} + +var _ConversionAdjustmentUploadService_serviceDesc = grpc.ServiceDesc{ + ServiceName: "google.ads.googleads.v1.services.ConversionAdjustmentUploadService", + HandlerType: (*ConversionAdjustmentUploadServiceServer)(nil), + Methods: []grpc.MethodDesc{ + { + MethodName: "UploadConversionAdjustments", + Handler: _ConversionAdjustmentUploadService_UploadConversionAdjustments_Handler, + }, + }, + Streams: []grpc.StreamDesc{}, + Metadata: "google/ads/googleads/v1/services/conversion_adjustment_upload_service.proto", +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/services/conversion_adjustment_upload_service.proto", fileDescriptor_conversion_adjustment_upload_service_f73ceda272931531) +} + +var fileDescriptor_conversion_adjustment_upload_service_f73ceda272931531 = []byte{ + // 821 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xec, 0x56, 0xcb, 0x6f, 0xfb, 0x44, + 0x10, 0xfe, 0xd9, 0xf9, 0x3d, 0xca, 0x86, 0x3e, 0xb2, 0xb4, 0x34, 0x0a, 0x15, 0x04, 0x53, 0x41, + 0xd4, 0x83, 0xad, 0xa4, 0x12, 0x52, 0x0d, 0x54, 0x4a, 0x9b, 0xbe, 0x84, 0x84, 0x2a, 0xb7, 0xe4, + 0x80, 0x22, 0x59, 0x5b, 0xef, 0xc6, 0x5a, 0x64, 0x7b, 0xcd, 0xee, 0x3a, 0xa8, 0x42, 0xbd, 0x70, + 0xe7, 0x84, 0x04, 0x12, 0x47, 0x8e, 0xfc, 0x1f, 0x5c, 0x90, 0x38, 0x71, 0xe6, 0xc6, 0x89, 0x13, + 0xe2, 0x2f, 0x40, 0x7e, 0x6c, 0x92, 0x86, 0xb8, 0xae, 0xe0, 0x84, 0xc4, 0x6d, 0xbd, 0x33, 0xf3, + 0xcd, 0x37, 0xdf, 0xce, 0x68, 0x0c, 0x3e, 0xf4, 0x19, 0xf3, 0x03, 0x62, 0x21, 0x2c, 0xac, 0xfc, + 0x98, 0x9e, 0x26, 0x5d, 0x4b, 0x10, 0x3e, 0xa1, 0x1e, 0x11, 0x96, 0xc7, 0xa2, 0x09, 0xe1, 0x82, + 0xb2, 0xc8, 0x45, 0xf8, 0xd3, 0x44, 0xc8, 0x90, 0x44, 0xd2, 0x4d, 0xe2, 0x80, 0x21, 0xec, 0x16, + 0x5e, 0x66, 0xcc, 0x99, 0x64, 0xb0, 0x9d, 0x23, 0x98, 0x08, 0x0b, 0x73, 0x0a, 0x66, 0x4e, 0xba, + 0xa6, 0x02, 0x6b, 0x1d, 0x96, 0xa5, 0x23, 0x51, 0x12, 0x96, 0xe5, 0x92, 0xb7, 0x71, 0x91, 0xa1, + 0xb5, 0xa3, 0xe2, 0x63, 0x6a, 0xa1, 0x28, 0x62, 0x12, 0x49, 0xca, 0x22, 0x51, 0x58, 0x5f, 0x2f, + 0xac, 0xd9, 0xd7, 0x4d, 0x32, 0xb6, 0x3e, 0xe7, 0x28, 0x8e, 0x09, 0x57, 0xf6, 0xed, 0xc2, 0xce, + 0x63, 0xcf, 0x12, 0x12, 0xc9, 0xa4, 0x30, 0x18, 0xbf, 0x6a, 0xc0, 0xf8, 0x38, 0xab, 0xe8, 0x78, + 0xca, 0xa0, 0x3f, 0x25, 0x20, 0x1c, 0xf2, 0x59, 0x42, 0x84, 0x84, 0x6f, 0x80, 0xba, 0x97, 0x08, + 0xc9, 0x42, 0xc2, 0x5d, 0x8a, 0x9b, 0x5a, 0x5b, 0xeb, 0xbc, 0xe4, 0x00, 0x75, 0x75, 0x81, 0x61, + 0x08, 0x5e, 0x5d, 0x5a, 0x82, 0x68, 0xea, 0xed, 0x5a, 0xa7, 0xde, 0x7b, 0xd7, 0xac, 0x52, 0xc8, + 0x5c, 0x46, 0xc0, 0xd9, 0xf2, 0x96, 0xd1, 0x82, 0xef, 0x80, 0xf5, 0x18, 0x71, 0x49, 0x51, 0xe0, + 0x8e, 0x11, 0x0d, 0x12, 0x4e, 0x9a, 0xb5, 0xb6, 0xd6, 0x59, 0x71, 0xd6, 0x8a, 0xeb, 0xd3, 0xfc, + 0xd6, 0xf8, 0x51, 0x03, 0x6f, 0x3d, 0x58, 0x9f, 0x88, 0x59, 0x24, 0x08, 0x3c, 0x05, 0x5b, 0x0b, + 0x80, 0x2e, 0xe1, 0x9c, 0xf1, 0xac, 0xd4, 0x7a, 0x0f, 0x2a, 0xfa, 0x3c, 0xf6, 0xcc, 0xab, 0x4c, + 0x40, 0xe7, 0x95, 0xfb, 0xa9, 0x4e, 0x52, 0x77, 0x38, 0x04, 0x2f, 0x38, 0x11, 0x49, 0x30, 0x2d, + 0xfc, 0xfd, 0x7f, 0x58, 0x78, 0x06, 0xe2, 0x28, 0x30, 0xe3, 0xe7, 0xa7, 0x60, 0x73, 0x99, 0x1f, + 0xbc, 0x00, 0x8d, 0x79, 0xe1, 0xbd, 0xb4, 0x2b, 0x32, 0x2d, 0xea, 0xbd, 0x1d, 0x95, 0x5a, 0x75, + 0x85, 0x79, 0x25, 0x39, 0x8d, 0xfc, 0x21, 0x0a, 0x12, 0xe2, 0x6c, 0xcc, 0x29, 0x9b, 0x45, 0xc1, + 0x8f, 0xc0, 0xe6, 0x5c, 0xef, 0x61, 0x24, 0x89, 0x2b, 0x69, 0x48, 0x9a, 0x4f, 0x1f, 0x81, 0x06, + 0x67, 0x91, 0x03, 0x24, 0xc9, 0x35, 0x0d, 0x09, 0xbc, 0x03, 0xeb, 0x0b, 0xbd, 0xdc, 0x7c, 0xd6, + 0xd6, 0x3a, 0x6b, 0xbd, 0xeb, 0x52, 0x4d, 0xb2, 0x61, 0x58, 0x2a, 0xc8, 0xf5, 0x6d, 0x4c, 0x4e, + 0xa2, 0x24, 0x2c, 0x35, 0x3a, 0x6b, 0xe8, 0xde, 0x37, 0x74, 0x41, 0x83, 0x93, 0xb4, 0xd9, 0x49, + 0x96, 0x7f, 0x92, 0xf2, 0x6c, 0x3e, 0xcf, 0x6a, 0xe9, 0x55, 0x3f, 0x8a, 0x33, 0x0b, 0x2d, 0xf4, + 0xe2, 0x0b, 0x37, 0x70, 0x0c, 0x36, 0x7d, 0x2f, 0xa0, 0x78, 0x26, 0x95, 0x1b, 0x23, 0xaa, 0x5a, + 0x66, 0xbf, 0x3a, 0xc7, 0x59, 0x1a, 0xad, 0xe4, 0xba, 0x44, 0x94, 0x9f, 0x3f, 0x71, 0x1a, 0xfe, + 0xe2, 0x25, 0x3c, 0x00, 0x2b, 0x8c, 0xe3, 0x7c, 0xf2, 0xf4, 0xea, 0xb7, 0x38, 0x7f, 0xe2, 0xbc, + 0xc8, 0xfc, 0x2f, 0xf0, 0xd1, 0x36, 0x98, 0x1b, 0x20, 0x97, 0x62, 0x12, 0x49, 0x3a, 0xa6, 0x84, + 0x1b, 0xdf, 0x69, 0x60, 0x63, 0xb1, 0x44, 0x78, 0x0c, 0x0a, 0x0d, 0x09, 0x2e, 0xe4, 0xd2, 0x4a, + 0xd2, 0x0d, 0x58, 0x72, 0x13, 0x90, 0x5c, 0x98, 0x55, 0x15, 0x93, 0x83, 0xf4, 0xc1, 0xaa, 0x97, + 0x70, 0x4e, 0x22, 0xef, 0xd6, 0xf5, 0x18, 0x26, 0x8f, 0xa1, 0xec, 0xbc, 0xac, 0x42, 0x8e, 0x19, + 0x26, 0xc6, 0xb7, 0x1a, 0x68, 0xfc, 0x4d, 0x1b, 0xd8, 0x03, 0xcf, 0x32, 0x6d, 0x4a, 0x49, 0xcd, + 0x03, 0xe6, 0xae, 0x69, 0x4b, 0xcf, 0xd5, 0x3f, 0x6b, 0xe9, 0xc7, 0x70, 0x82, 0xb3, 0x48, 0xc5, + 0xc3, 0xf8, 0xb3, 0x06, 0x5a, 0xe5, 0xe3, 0xfa, 0xff, 0x30, 0x96, 0x0e, 0xe3, 0x7f, 0x78, 0x56, + 0x7a, 0xdf, 0xe8, 0xe0, 0xcd, 0x65, 0x85, 0xe6, 0x7b, 0xe5, 0x2a, 0x67, 0x08, 0xff, 0xd0, 0xc0, + 0x6b, 0x0f, 0x6c, 0x1a, 0x38, 0xa8, 0xae, 0xb1, 0x7a, 0x11, 0xb7, 0x4e, 0xfe, 0x25, 0x4a, 0xbe, + 0xee, 0x8c, 0xc1, 0x97, 0xbf, 0xfc, 0xf6, 0xb5, 0x7e, 0x68, 0x1c, 0xa4, 0x7f, 0x20, 0x6a, 0x8d, + 0x0b, 0xeb, 0x8b, 0xb9, 0x25, 0xff, 0xc1, 0xde, 0x9d, 0x9d, 0x94, 0x43, 0xd9, 0xda, 0xde, 0xd1, + 0x57, 0x3a, 0xd8, 0xf5, 0x58, 0x58, 0x49, 0xe9, 0xe8, 0xed, 0x4a, 0xf9, 0x2e, 0xd3, 0xc7, 0xb9, + 0xd4, 0x3e, 0x39, 0x2f, 0xb0, 0x7c, 0x16, 0xa0, 0xc8, 0x37, 0x19, 0xf7, 0x2d, 0x9f, 0x44, 0xd9, + 0xd3, 0xa9, 0xdf, 0xa6, 0x98, 0x8a, 0xf2, 0x9f, 0xb6, 0xf7, 0xd4, 0xe1, 0x7b, 0xbd, 0x76, 0xd6, + 0xef, 0xff, 0xa0, 0xb7, 0xcf, 0x72, 0xc0, 0x3e, 0x16, 0x66, 0x7e, 0x4c, 0x4f, 0xc3, 0xae, 0x59, + 0x24, 0x16, 0x3f, 0x29, 0x97, 0x51, 0x1f, 0x8b, 0xd1, 0xd4, 0x65, 0x34, 0xec, 0x8e, 0x94, 0xcb, + 0xef, 0xfa, 0x6e, 0x7e, 0x6f, 0xdb, 0x7d, 0x2c, 0x6c, 0x7b, 0xea, 0x64, 0xdb, 0xc3, 0xae, 0x6d, + 0x2b, 0xb7, 0x9b, 0xe7, 0x19, 0xcf, 0xfd, 0xbf, 0x02, 0x00, 0x00, 0xff, 0xff, 0x38, 0x44, 0x47, + 0x09, 0x5b, 0x0a, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/services/conversion_upload_service.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/services/conversion_upload_service.pb.go new file mode 100644 index 0000000..729c2a9 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/services/conversion_upload_service.pb.go @@ -0,0 +1,817 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/services/conversion_upload_service.proto + +package services // import "google.golang.org/genproto/googleapis/ads/googleads/v1/services" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import wrappers "github.com/golang/protobuf/ptypes/wrappers" +import _ "google.golang.org/genproto/googleapis/api/annotations" +import status "google.golang.org/genproto/googleapis/rpc/status" + +import ( + context "golang.org/x/net/context" + grpc "google.golang.org/grpc" +) + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Request message for [ConversionUploadService.UploadClickConversions][google.ads.googleads.v1.services.ConversionUploadService.UploadClickConversions]. +type UploadClickConversionsRequest struct { + // The ID of the customer performing the upload. + CustomerId string `protobuf:"bytes,1,opt,name=customer_id,json=customerId,proto3" json:"customer_id,omitempty"` + // The conversions that are being uploaded. + Conversions []*ClickConversion `protobuf:"bytes,2,rep,name=conversions,proto3" json:"conversions,omitempty"` + // If true, successful operations will be carried out and invalid + // operations will return errors. If false, all operations will be carried + // out in one transaction if and only if they are all valid. + // This should always be set to true. + PartialFailure bool `protobuf:"varint,3,opt,name=partial_failure,json=partialFailure,proto3" json:"partial_failure,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *UploadClickConversionsRequest) Reset() { *m = UploadClickConversionsRequest{} } +func (m *UploadClickConversionsRequest) String() string { return proto.CompactTextString(m) } +func (*UploadClickConversionsRequest) ProtoMessage() {} +func (*UploadClickConversionsRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_conversion_upload_service_97d02deb0eb593c3, []int{0} +} +func (m *UploadClickConversionsRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_UploadClickConversionsRequest.Unmarshal(m, b) +} +func (m *UploadClickConversionsRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_UploadClickConversionsRequest.Marshal(b, m, deterministic) +} +func (dst *UploadClickConversionsRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_UploadClickConversionsRequest.Merge(dst, src) +} +func (m *UploadClickConversionsRequest) XXX_Size() int { + return xxx_messageInfo_UploadClickConversionsRequest.Size(m) +} +func (m *UploadClickConversionsRequest) XXX_DiscardUnknown() { + xxx_messageInfo_UploadClickConversionsRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_UploadClickConversionsRequest proto.InternalMessageInfo + +func (m *UploadClickConversionsRequest) GetCustomerId() string { + if m != nil { + return m.CustomerId + } + return "" +} + +func (m *UploadClickConversionsRequest) GetConversions() []*ClickConversion { + if m != nil { + return m.Conversions + } + return nil +} + +func (m *UploadClickConversionsRequest) GetPartialFailure() bool { + if m != nil { + return m.PartialFailure + } + return false +} + +// Response message for [ConversionUploadService.UploadClickConversions][google.ads.googleads.v1.services.ConversionUploadService.UploadClickConversions]. +type UploadClickConversionsResponse struct { + // Errors that pertain to conversion failures in the partial failure mode. + // Returned when all errors occur inside the conversions. If any errors occur + // outside the conversions (e.g. auth errors), we return an RPC level error. + PartialFailureError *status.Status `protobuf:"bytes,1,opt,name=partial_failure_error,json=partialFailureError,proto3" json:"partial_failure_error,omitempty"` + // Returned for successfully processed conversions. Proto will be empty for + // rows that received an error. Results are not returned when validate_only is + // true. + Results []*ClickConversionResult `protobuf:"bytes,2,rep,name=results,proto3" json:"results,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *UploadClickConversionsResponse) Reset() { *m = UploadClickConversionsResponse{} } +func (m *UploadClickConversionsResponse) String() string { return proto.CompactTextString(m) } +func (*UploadClickConversionsResponse) ProtoMessage() {} +func (*UploadClickConversionsResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_conversion_upload_service_97d02deb0eb593c3, []int{1} +} +func (m *UploadClickConversionsResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_UploadClickConversionsResponse.Unmarshal(m, b) +} +func (m *UploadClickConversionsResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_UploadClickConversionsResponse.Marshal(b, m, deterministic) +} +func (dst *UploadClickConversionsResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_UploadClickConversionsResponse.Merge(dst, src) +} +func (m *UploadClickConversionsResponse) XXX_Size() int { + return xxx_messageInfo_UploadClickConversionsResponse.Size(m) +} +func (m *UploadClickConversionsResponse) XXX_DiscardUnknown() { + xxx_messageInfo_UploadClickConversionsResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_UploadClickConversionsResponse proto.InternalMessageInfo + +func (m *UploadClickConversionsResponse) GetPartialFailureError() *status.Status { + if m != nil { + return m.PartialFailureError + } + return nil +} + +func (m *UploadClickConversionsResponse) GetResults() []*ClickConversionResult { + if m != nil { + return m.Results + } + return nil +} + +// Request message for [ConversionUploadService.UploadCallConversions][google.ads.googleads.v1.services.ConversionUploadService.UploadCallConversions]. +type UploadCallConversionsRequest struct { + // The ID of the customer performing the upload. + CustomerId string `protobuf:"bytes,1,opt,name=customer_id,json=customerId,proto3" json:"customer_id,omitempty"` + // The conversions that are being uploaded. + Conversions []*CallConversion `protobuf:"bytes,2,rep,name=conversions,proto3" json:"conversions,omitempty"` + // If true, successful operations will be carried out and invalid + // operations will return errors. If false, all operations will be carried + // out in one transaction if and only if they are all valid. + // This should always be set to true. + PartialFailure bool `protobuf:"varint,3,opt,name=partial_failure,json=partialFailure,proto3" json:"partial_failure,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *UploadCallConversionsRequest) Reset() { *m = UploadCallConversionsRequest{} } +func (m *UploadCallConversionsRequest) String() string { return proto.CompactTextString(m) } +func (*UploadCallConversionsRequest) ProtoMessage() {} +func (*UploadCallConversionsRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_conversion_upload_service_97d02deb0eb593c3, []int{2} +} +func (m *UploadCallConversionsRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_UploadCallConversionsRequest.Unmarshal(m, b) +} +func (m *UploadCallConversionsRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_UploadCallConversionsRequest.Marshal(b, m, deterministic) +} +func (dst *UploadCallConversionsRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_UploadCallConversionsRequest.Merge(dst, src) +} +func (m *UploadCallConversionsRequest) XXX_Size() int { + return xxx_messageInfo_UploadCallConversionsRequest.Size(m) +} +func (m *UploadCallConversionsRequest) XXX_DiscardUnknown() { + xxx_messageInfo_UploadCallConversionsRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_UploadCallConversionsRequest proto.InternalMessageInfo + +func (m *UploadCallConversionsRequest) GetCustomerId() string { + if m != nil { + return m.CustomerId + } + return "" +} + +func (m *UploadCallConversionsRequest) GetConversions() []*CallConversion { + if m != nil { + return m.Conversions + } + return nil +} + +func (m *UploadCallConversionsRequest) GetPartialFailure() bool { + if m != nil { + return m.PartialFailure + } + return false +} + +// Response message for [ConversionUploadService.UploadCallConversions][google.ads.googleads.v1.services.ConversionUploadService.UploadCallConversions]. +type UploadCallConversionsResponse struct { + // Errors that pertain to conversion failures in the partial failure mode. + // Returned when all errors occur inside the conversions. If any errors occur + // outside the conversions (e.g. auth errors), we return an RPC level error. + PartialFailureError *status.Status `protobuf:"bytes,1,opt,name=partial_failure_error,json=partialFailureError,proto3" json:"partial_failure_error,omitempty"` + // Returned for successfully processed conversions. Proto will be empty for + // rows that received an error. Results are not returned when validate_only is + // true. + Results []*CallConversionResult `protobuf:"bytes,2,rep,name=results,proto3" json:"results,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *UploadCallConversionsResponse) Reset() { *m = UploadCallConversionsResponse{} } +func (m *UploadCallConversionsResponse) String() string { return proto.CompactTextString(m) } +func (*UploadCallConversionsResponse) ProtoMessage() {} +func (*UploadCallConversionsResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_conversion_upload_service_97d02deb0eb593c3, []int{3} +} +func (m *UploadCallConversionsResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_UploadCallConversionsResponse.Unmarshal(m, b) +} +func (m *UploadCallConversionsResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_UploadCallConversionsResponse.Marshal(b, m, deterministic) +} +func (dst *UploadCallConversionsResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_UploadCallConversionsResponse.Merge(dst, src) +} +func (m *UploadCallConversionsResponse) XXX_Size() int { + return xxx_messageInfo_UploadCallConversionsResponse.Size(m) +} +func (m *UploadCallConversionsResponse) XXX_DiscardUnknown() { + xxx_messageInfo_UploadCallConversionsResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_UploadCallConversionsResponse proto.InternalMessageInfo + +func (m *UploadCallConversionsResponse) GetPartialFailureError() *status.Status { + if m != nil { + return m.PartialFailureError + } + return nil +} + +func (m *UploadCallConversionsResponse) GetResults() []*CallConversionResult { + if m != nil { + return m.Results + } + return nil +} + +// A click conversion. +type ClickConversion struct { + // The Google click ID (gclid) associated with this conversion. + Gclid *wrappers.StringValue `protobuf:"bytes,1,opt,name=gclid,proto3" json:"gclid,omitempty"` + // Resource name of the conversion action associated with this conversion. + // Note: Although this resource name consists of a customer id and a + // conversion action id, validation will ignore the customer id and use the + // conversion action id as the sole identifier of the conversion action. + ConversionAction *wrappers.StringValue `protobuf:"bytes,2,opt,name=conversion_action,json=conversionAction,proto3" json:"conversion_action,omitempty"` + // The date time at which the conversion occurred. Must be after + // the click time. The timezone must be specified. The format is + // "yyyy-mm-dd hh:mm:ss+|-hh:mm", e.g. “2019-01-01 12:32:45-08:00”. + ConversionDateTime *wrappers.StringValue `protobuf:"bytes,3,opt,name=conversion_date_time,json=conversionDateTime,proto3" json:"conversion_date_time,omitempty"` + // The value of the conversion for the advertiser. + ConversionValue *wrappers.DoubleValue `protobuf:"bytes,4,opt,name=conversion_value,json=conversionValue,proto3" json:"conversion_value,omitempty"` + // Currency associated with the conversion value. This is the ISO 4217 + // 3-character currency code. For example: USD, EUR. + CurrencyCode *wrappers.StringValue `protobuf:"bytes,5,opt,name=currency_code,json=currencyCode,proto3" json:"currency_code,omitempty"` + // The order ID associated with the conversion. An order id can only be used + // for one conversion per conversion action. + OrderId *wrappers.StringValue `protobuf:"bytes,6,opt,name=order_id,json=orderId,proto3" json:"order_id,omitempty"` + // Additional data about externally attributed conversions. This field + // is required for conversions with an externally attributed conversion + // action, but should not be set otherwise. + ExternalAttributionData *ExternalAttributionData `protobuf:"bytes,7,opt,name=external_attribution_data,json=externalAttributionData,proto3" json:"external_attribution_data,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ClickConversion) Reset() { *m = ClickConversion{} } +func (m *ClickConversion) String() string { return proto.CompactTextString(m) } +func (*ClickConversion) ProtoMessage() {} +func (*ClickConversion) Descriptor() ([]byte, []int) { + return fileDescriptor_conversion_upload_service_97d02deb0eb593c3, []int{4} +} +func (m *ClickConversion) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ClickConversion.Unmarshal(m, b) +} +func (m *ClickConversion) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ClickConversion.Marshal(b, m, deterministic) +} +func (dst *ClickConversion) XXX_Merge(src proto.Message) { + xxx_messageInfo_ClickConversion.Merge(dst, src) +} +func (m *ClickConversion) XXX_Size() int { + return xxx_messageInfo_ClickConversion.Size(m) +} +func (m *ClickConversion) XXX_DiscardUnknown() { + xxx_messageInfo_ClickConversion.DiscardUnknown(m) +} + +var xxx_messageInfo_ClickConversion proto.InternalMessageInfo + +func (m *ClickConversion) GetGclid() *wrappers.StringValue { + if m != nil { + return m.Gclid + } + return nil +} + +func (m *ClickConversion) GetConversionAction() *wrappers.StringValue { + if m != nil { + return m.ConversionAction + } + return nil +} + +func (m *ClickConversion) GetConversionDateTime() *wrappers.StringValue { + if m != nil { + return m.ConversionDateTime + } + return nil +} + +func (m *ClickConversion) GetConversionValue() *wrappers.DoubleValue { + if m != nil { + return m.ConversionValue + } + return nil +} + +func (m *ClickConversion) GetCurrencyCode() *wrappers.StringValue { + if m != nil { + return m.CurrencyCode + } + return nil +} + +func (m *ClickConversion) GetOrderId() *wrappers.StringValue { + if m != nil { + return m.OrderId + } + return nil +} + +func (m *ClickConversion) GetExternalAttributionData() *ExternalAttributionData { + if m != nil { + return m.ExternalAttributionData + } + return nil +} + +// A call conversion. +type CallConversion struct { + // The caller id from which this call was placed. Caller id is expected to be + // in E.164 format with preceding '+' sign. e.g. "+16502531234". + CallerId *wrappers.StringValue `protobuf:"bytes,1,opt,name=caller_id,json=callerId,proto3" json:"caller_id,omitempty"` + // The date time at which the call occurred. The timezone must be specified. + // The format is "yyyy-mm-dd hh:mm:ss+|-hh:mm", + // e.g. "2019-01-01 12:32:45-08:00". + CallStartDateTime *wrappers.StringValue `protobuf:"bytes,2,opt,name=call_start_date_time,json=callStartDateTime,proto3" json:"call_start_date_time,omitempty"` + // Resource name of the conversion action associated with this conversion. + // Note: Although this resource name consists of a customer id and a + // conversion action id, validation will ignore the customer id and use the + // conversion action id as the sole identifier of the conversion action. + ConversionAction *wrappers.StringValue `protobuf:"bytes,3,opt,name=conversion_action,json=conversionAction,proto3" json:"conversion_action,omitempty"` + // The date time at which the conversion occurred. Must be after the call + // time. The timezone must be specified. The format is + // "yyyy-mm-dd hh:mm:ss+|-hh:mm", e.g. "2019-01-01 12:32:45-08:00". + ConversionDateTime *wrappers.StringValue `protobuf:"bytes,4,opt,name=conversion_date_time,json=conversionDateTime,proto3" json:"conversion_date_time,omitempty"` + // The value of the conversion for the advertiser. + ConversionValue *wrappers.DoubleValue `protobuf:"bytes,5,opt,name=conversion_value,json=conversionValue,proto3" json:"conversion_value,omitempty"` + // Currency associated with the conversion value. This is the ISO 4217 + // 3-character currency code. For example: USD, EUR. + CurrencyCode *wrappers.StringValue `protobuf:"bytes,6,opt,name=currency_code,json=currencyCode,proto3" json:"currency_code,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *CallConversion) Reset() { *m = CallConversion{} } +func (m *CallConversion) String() string { return proto.CompactTextString(m) } +func (*CallConversion) ProtoMessage() {} +func (*CallConversion) Descriptor() ([]byte, []int) { + return fileDescriptor_conversion_upload_service_97d02deb0eb593c3, []int{5} +} +func (m *CallConversion) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_CallConversion.Unmarshal(m, b) +} +func (m *CallConversion) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_CallConversion.Marshal(b, m, deterministic) +} +func (dst *CallConversion) XXX_Merge(src proto.Message) { + xxx_messageInfo_CallConversion.Merge(dst, src) +} +func (m *CallConversion) XXX_Size() int { + return xxx_messageInfo_CallConversion.Size(m) +} +func (m *CallConversion) XXX_DiscardUnknown() { + xxx_messageInfo_CallConversion.DiscardUnknown(m) +} + +var xxx_messageInfo_CallConversion proto.InternalMessageInfo + +func (m *CallConversion) GetCallerId() *wrappers.StringValue { + if m != nil { + return m.CallerId + } + return nil +} + +func (m *CallConversion) GetCallStartDateTime() *wrappers.StringValue { + if m != nil { + return m.CallStartDateTime + } + return nil +} + +func (m *CallConversion) GetConversionAction() *wrappers.StringValue { + if m != nil { + return m.ConversionAction + } + return nil +} + +func (m *CallConversion) GetConversionDateTime() *wrappers.StringValue { + if m != nil { + return m.ConversionDateTime + } + return nil +} + +func (m *CallConversion) GetConversionValue() *wrappers.DoubleValue { + if m != nil { + return m.ConversionValue + } + return nil +} + +func (m *CallConversion) GetCurrencyCode() *wrappers.StringValue { + if m != nil { + return m.CurrencyCode + } + return nil +} + +// Contains additional information about externally attributed conversions. +type ExternalAttributionData struct { + // Represents the fraction of the conversion that is attributed to the + // Google Ads click. + ExternalAttributionCredit *wrappers.DoubleValue `protobuf:"bytes,1,opt,name=external_attribution_credit,json=externalAttributionCredit,proto3" json:"external_attribution_credit,omitempty"` + // Specifies the attribution model name. + ExternalAttributionModel *wrappers.StringValue `protobuf:"bytes,2,opt,name=external_attribution_model,json=externalAttributionModel,proto3" json:"external_attribution_model,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ExternalAttributionData) Reset() { *m = ExternalAttributionData{} } +func (m *ExternalAttributionData) String() string { return proto.CompactTextString(m) } +func (*ExternalAttributionData) ProtoMessage() {} +func (*ExternalAttributionData) Descriptor() ([]byte, []int) { + return fileDescriptor_conversion_upload_service_97d02deb0eb593c3, []int{6} +} +func (m *ExternalAttributionData) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ExternalAttributionData.Unmarshal(m, b) +} +func (m *ExternalAttributionData) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ExternalAttributionData.Marshal(b, m, deterministic) +} +func (dst *ExternalAttributionData) XXX_Merge(src proto.Message) { + xxx_messageInfo_ExternalAttributionData.Merge(dst, src) +} +func (m *ExternalAttributionData) XXX_Size() int { + return xxx_messageInfo_ExternalAttributionData.Size(m) +} +func (m *ExternalAttributionData) XXX_DiscardUnknown() { + xxx_messageInfo_ExternalAttributionData.DiscardUnknown(m) +} + +var xxx_messageInfo_ExternalAttributionData proto.InternalMessageInfo + +func (m *ExternalAttributionData) GetExternalAttributionCredit() *wrappers.DoubleValue { + if m != nil { + return m.ExternalAttributionCredit + } + return nil +} + +func (m *ExternalAttributionData) GetExternalAttributionModel() *wrappers.StringValue { + if m != nil { + return m.ExternalAttributionModel + } + return nil +} + +// Identifying information for a successfully processed ClickConversion. +type ClickConversionResult struct { + // The Google Click ID (gclid) associated with this conversion. + Gclid *wrappers.StringValue `protobuf:"bytes,1,opt,name=gclid,proto3" json:"gclid,omitempty"` + // Resource name of the conversion action associated with this conversion. + ConversionAction *wrappers.StringValue `protobuf:"bytes,2,opt,name=conversion_action,json=conversionAction,proto3" json:"conversion_action,omitempty"` + // The date time at which the conversion occurred. The format is + // "yyyy-mm-dd hh:mm:ss+|-hh:mm", e.g. “2019-01-01 12:32:45-08:00”. + ConversionDateTime *wrappers.StringValue `protobuf:"bytes,3,opt,name=conversion_date_time,json=conversionDateTime,proto3" json:"conversion_date_time,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ClickConversionResult) Reset() { *m = ClickConversionResult{} } +func (m *ClickConversionResult) String() string { return proto.CompactTextString(m) } +func (*ClickConversionResult) ProtoMessage() {} +func (*ClickConversionResult) Descriptor() ([]byte, []int) { + return fileDescriptor_conversion_upload_service_97d02deb0eb593c3, []int{7} +} +func (m *ClickConversionResult) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ClickConversionResult.Unmarshal(m, b) +} +func (m *ClickConversionResult) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ClickConversionResult.Marshal(b, m, deterministic) +} +func (dst *ClickConversionResult) XXX_Merge(src proto.Message) { + xxx_messageInfo_ClickConversionResult.Merge(dst, src) +} +func (m *ClickConversionResult) XXX_Size() int { + return xxx_messageInfo_ClickConversionResult.Size(m) +} +func (m *ClickConversionResult) XXX_DiscardUnknown() { + xxx_messageInfo_ClickConversionResult.DiscardUnknown(m) +} + +var xxx_messageInfo_ClickConversionResult proto.InternalMessageInfo + +func (m *ClickConversionResult) GetGclid() *wrappers.StringValue { + if m != nil { + return m.Gclid + } + return nil +} + +func (m *ClickConversionResult) GetConversionAction() *wrappers.StringValue { + if m != nil { + return m.ConversionAction + } + return nil +} + +func (m *ClickConversionResult) GetConversionDateTime() *wrappers.StringValue { + if m != nil { + return m.ConversionDateTime + } + return nil +} + +// Identifying information for a successfully processed CallConversionUpload. +type CallConversionResult struct { + // The caller id from which this call was placed. Caller id is expected to be + // in E.164 format with preceding '+' sign. + CallerId *wrappers.StringValue `protobuf:"bytes,1,opt,name=caller_id,json=callerId,proto3" json:"caller_id,omitempty"` + // The date time at which the call occurred. The format is + // "yyyy-mm-dd hh:mm:ss+|-hh:mm", e.g. "2019-01-01 12:32:45-08:00". + CallStartDateTime *wrappers.StringValue `protobuf:"bytes,2,opt,name=call_start_date_time,json=callStartDateTime,proto3" json:"call_start_date_time,omitempty"` + // Resource name of the conversion action associated with this conversion. + ConversionAction *wrappers.StringValue `protobuf:"bytes,3,opt,name=conversion_action,json=conversionAction,proto3" json:"conversion_action,omitempty"` + // The date time at which the conversion occurred. The format is + // "yyyy-mm-dd hh:mm:ss+|-hh:mm", e.g. "2019-01-01 12:32:45-08:00". + ConversionDateTime *wrappers.StringValue `protobuf:"bytes,4,opt,name=conversion_date_time,json=conversionDateTime,proto3" json:"conversion_date_time,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *CallConversionResult) Reset() { *m = CallConversionResult{} } +func (m *CallConversionResult) String() string { return proto.CompactTextString(m) } +func (*CallConversionResult) ProtoMessage() {} +func (*CallConversionResult) Descriptor() ([]byte, []int) { + return fileDescriptor_conversion_upload_service_97d02deb0eb593c3, []int{8} +} +func (m *CallConversionResult) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_CallConversionResult.Unmarshal(m, b) +} +func (m *CallConversionResult) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_CallConversionResult.Marshal(b, m, deterministic) +} +func (dst *CallConversionResult) XXX_Merge(src proto.Message) { + xxx_messageInfo_CallConversionResult.Merge(dst, src) +} +func (m *CallConversionResult) XXX_Size() int { + return xxx_messageInfo_CallConversionResult.Size(m) +} +func (m *CallConversionResult) XXX_DiscardUnknown() { + xxx_messageInfo_CallConversionResult.DiscardUnknown(m) +} + +var xxx_messageInfo_CallConversionResult proto.InternalMessageInfo + +func (m *CallConversionResult) GetCallerId() *wrappers.StringValue { + if m != nil { + return m.CallerId + } + return nil +} + +func (m *CallConversionResult) GetCallStartDateTime() *wrappers.StringValue { + if m != nil { + return m.CallStartDateTime + } + return nil +} + +func (m *CallConversionResult) GetConversionAction() *wrappers.StringValue { + if m != nil { + return m.ConversionAction + } + return nil +} + +func (m *CallConversionResult) GetConversionDateTime() *wrappers.StringValue { + if m != nil { + return m.ConversionDateTime + } + return nil +} + +func init() { + proto.RegisterType((*UploadClickConversionsRequest)(nil), "google.ads.googleads.v1.services.UploadClickConversionsRequest") + proto.RegisterType((*UploadClickConversionsResponse)(nil), "google.ads.googleads.v1.services.UploadClickConversionsResponse") + proto.RegisterType((*UploadCallConversionsRequest)(nil), "google.ads.googleads.v1.services.UploadCallConversionsRequest") + proto.RegisterType((*UploadCallConversionsResponse)(nil), "google.ads.googleads.v1.services.UploadCallConversionsResponse") + proto.RegisterType((*ClickConversion)(nil), "google.ads.googleads.v1.services.ClickConversion") + proto.RegisterType((*CallConversion)(nil), "google.ads.googleads.v1.services.CallConversion") + proto.RegisterType((*ExternalAttributionData)(nil), "google.ads.googleads.v1.services.ExternalAttributionData") + proto.RegisterType((*ClickConversionResult)(nil), "google.ads.googleads.v1.services.ClickConversionResult") + proto.RegisterType((*CallConversionResult)(nil), "google.ads.googleads.v1.services.CallConversionResult") +} + +// Reference imports to suppress errors if they are not otherwise used. +var _ context.Context +var _ grpc.ClientConn + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the grpc package it is being compiled against. +const _ = grpc.SupportPackageIsVersion4 + +// ConversionUploadServiceClient is the client API for ConversionUploadService service. +// +// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://godoc.org/google.golang.org/grpc#ClientConn.NewStream. +type ConversionUploadServiceClient interface { + // Processes the given click conversions. + UploadClickConversions(ctx context.Context, in *UploadClickConversionsRequest, opts ...grpc.CallOption) (*UploadClickConversionsResponse, error) + // Processes the given call conversions. + UploadCallConversions(ctx context.Context, in *UploadCallConversionsRequest, opts ...grpc.CallOption) (*UploadCallConversionsResponse, error) +} + +type conversionUploadServiceClient struct { + cc *grpc.ClientConn +} + +func NewConversionUploadServiceClient(cc *grpc.ClientConn) ConversionUploadServiceClient { + return &conversionUploadServiceClient{cc} +} + +func (c *conversionUploadServiceClient) UploadClickConversions(ctx context.Context, in *UploadClickConversionsRequest, opts ...grpc.CallOption) (*UploadClickConversionsResponse, error) { + out := new(UploadClickConversionsResponse) + err := c.cc.Invoke(ctx, "/google.ads.googleads.v1.services.ConversionUploadService/UploadClickConversions", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *conversionUploadServiceClient) UploadCallConversions(ctx context.Context, in *UploadCallConversionsRequest, opts ...grpc.CallOption) (*UploadCallConversionsResponse, error) { + out := new(UploadCallConversionsResponse) + err := c.cc.Invoke(ctx, "/google.ads.googleads.v1.services.ConversionUploadService/UploadCallConversions", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +// ConversionUploadServiceServer is the server API for ConversionUploadService service. +type ConversionUploadServiceServer interface { + // Processes the given click conversions. + UploadClickConversions(context.Context, *UploadClickConversionsRequest) (*UploadClickConversionsResponse, error) + // Processes the given call conversions. + UploadCallConversions(context.Context, *UploadCallConversionsRequest) (*UploadCallConversionsResponse, error) +} + +func RegisterConversionUploadServiceServer(s *grpc.Server, srv ConversionUploadServiceServer) { + s.RegisterService(&_ConversionUploadService_serviceDesc, srv) +} + +func _ConversionUploadService_UploadClickConversions_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(UploadClickConversionsRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(ConversionUploadServiceServer).UploadClickConversions(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.ads.googleads.v1.services.ConversionUploadService/UploadClickConversions", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(ConversionUploadServiceServer).UploadClickConversions(ctx, req.(*UploadClickConversionsRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _ConversionUploadService_UploadCallConversions_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(UploadCallConversionsRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(ConversionUploadServiceServer).UploadCallConversions(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.ads.googleads.v1.services.ConversionUploadService/UploadCallConversions", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(ConversionUploadServiceServer).UploadCallConversions(ctx, req.(*UploadCallConversionsRequest)) + } + return interceptor(ctx, in, info, handler) +} + +var _ConversionUploadService_serviceDesc = grpc.ServiceDesc{ + ServiceName: "google.ads.googleads.v1.services.ConversionUploadService", + HandlerType: (*ConversionUploadServiceServer)(nil), + Methods: []grpc.MethodDesc{ + { + MethodName: "UploadClickConversions", + Handler: _ConversionUploadService_UploadClickConversions_Handler, + }, + { + MethodName: "UploadCallConversions", + Handler: _ConversionUploadService_UploadCallConversions_Handler, + }, + }, + Streams: []grpc.StreamDesc{}, + Metadata: "google/ads/googleads/v1/services/conversion_upload_service.proto", +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/services/conversion_upload_service.proto", fileDescriptor_conversion_upload_service_97d02deb0eb593c3) +} + +var fileDescriptor_conversion_upload_service_97d02deb0eb593c3 = []byte{ + // 900 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xec, 0x57, 0x4d, 0x6f, 0xe3, 0x44, + 0x18, 0x96, 0x93, 0x7e, 0xed, 0x04, 0xb6, 0xec, 0xd0, 0x52, 0x6f, 0x28, 0x4b, 0x64, 0xad, 0x44, + 0xd5, 0x83, 0x4d, 0xb2, 0x88, 0xa5, 0x46, 0x6c, 0xd7, 0xdb, 0xee, 0x96, 0x1e, 0x16, 0x15, 0x07, + 0x7a, 0x58, 0x45, 0xb2, 0xa6, 0x9e, 0x59, 0xcb, 0x62, 0xe2, 0x31, 0x33, 0xe3, 0x00, 0x42, 0x5c, + 0xe0, 0x27, 0x70, 0xe5, 0xc4, 0x91, 0x7f, 0x80, 0x8a, 0x90, 0xb8, 0x72, 0xed, 0x85, 0x03, 0x47, + 0xc4, 0x81, 0x5f, 0x81, 0xfc, 0x31, 0x71, 0x1a, 0x39, 0xc4, 0xad, 0x72, 0x42, 0x7b, 0x9b, 0xcc, + 0xbc, 0xcf, 0xf3, 0x7e, 0x3d, 0x33, 0x79, 0x0d, 0x1e, 0x06, 0x8c, 0x05, 0x94, 0x58, 0x08, 0x0b, + 0x2b, 0x5f, 0xa6, 0xab, 0x51, 0xd7, 0x12, 0x84, 0x8f, 0x42, 0x9f, 0x08, 0xcb, 0x67, 0xd1, 0x88, + 0x70, 0x11, 0xb2, 0xc8, 0x4b, 0x62, 0xca, 0x10, 0xf6, 0x8a, 0x23, 0x33, 0xe6, 0x4c, 0x32, 0xd8, + 0xc9, 0x61, 0x26, 0xc2, 0xc2, 0x1c, 0x33, 0x98, 0xa3, 0xae, 0xa9, 0x18, 0xda, 0xdb, 0xca, 0x47, + 0x1c, 0x5a, 0x28, 0x8a, 0x98, 0x44, 0x32, 0x64, 0x91, 0xc8, 0xf1, 0xed, 0x3b, 0xc5, 0x69, 0xf6, + 0xeb, 0x2c, 0x79, 0x6e, 0x7d, 0xc1, 0x51, 0x1c, 0x13, 0xae, 0xce, 0xb7, 0x8a, 0x73, 0x1e, 0xfb, + 0x96, 0x90, 0x48, 0x26, 0xc5, 0x81, 0xf1, 0xab, 0x06, 0xde, 0xf8, 0x34, 0x8b, 0xe8, 0x80, 0x86, + 0xfe, 0x67, 0x07, 0xe3, 0x38, 0x85, 0x4b, 0x3e, 0x4f, 0x88, 0x90, 0xf0, 0x4d, 0xd0, 0xf2, 0x13, + 0x21, 0xd9, 0x90, 0x70, 0x2f, 0xc4, 0xba, 0xd6, 0xd1, 0x76, 0x6e, 0xb8, 0x40, 0x6d, 0x1d, 0x63, + 0xd8, 0x07, 0xad, 0x32, 0x3d, 0xa1, 0x37, 0x3a, 0xcd, 0x9d, 0x56, 0xaf, 0x6b, 0xce, 0xcb, 0xc8, + 0x9c, 0x72, 0xe8, 0x4e, 0xb2, 0xc0, 0xb7, 0xc0, 0x7a, 0x8c, 0xb8, 0x0c, 0x11, 0xf5, 0x9e, 0xa3, + 0x90, 0x26, 0x9c, 0xe8, 0xcd, 0x8e, 0xb6, 0xb3, 0xe6, 0xde, 0x2c, 0xb6, 0x9f, 0xe4, 0xbb, 0xc6, + 0xb9, 0x06, 0xee, 0xcc, 0x4a, 0x40, 0xc4, 0x2c, 0x12, 0x04, 0x3e, 0x01, 0x9b, 0x53, 0x5c, 0x1e, + 0xe1, 0x9c, 0xf1, 0x2c, 0x97, 0x56, 0x0f, 0xaa, 0x50, 0x79, 0xec, 0x9b, 0xfd, 0xac, 0x38, 0xee, + 0xab, 0x97, 0xbd, 0x3c, 0x4e, 0xcd, 0xe1, 0xc7, 0x60, 0x95, 0x13, 0x91, 0x50, 0xa9, 0x92, 0xbc, + 0x7f, 0xf5, 0x24, 0x33, 0xbc, 0xab, 0x78, 0x8c, 0x5f, 0x34, 0xb0, 0x5d, 0x44, 0x8f, 0x28, 0xbd, + 0x4e, 0xf5, 0xdd, 0xaa, 0xea, 0xbf, 0x5d, 0x23, 0xb0, 0x4b, 0xfe, 0xae, 0x59, 0xfc, 0x9f, 0x4b, + 0xf5, 0x4c, 0x87, 0xbf, 0xe0, 0xda, 0x9f, 0x4c, 0xd7, 0xfe, 0xdd, 0x2b, 0xa7, 0x38, 0x55, 0xfa, + 0x1f, 0x96, 0xc0, 0xfa, 0x54, 0x77, 0x60, 0x0f, 0x2c, 0x07, 0x3e, 0x2d, 0xea, 0xdc, 0xea, 0x6d, + 0x2b, 0x1f, 0xea, 0x5a, 0x99, 0x7d, 0xc9, 0xc3, 0x28, 0x38, 0x45, 0x34, 0x21, 0x6e, 0x6e, 0x0a, + 0x8f, 0xc1, 0xad, 0x89, 0xdb, 0x8d, 0xfc, 0xf4, 0x5a, 0xea, 0x8d, 0x1a, 0xf8, 0x57, 0x4a, 0x98, + 0x93, 0xa1, 0xe0, 0x47, 0x60, 0x63, 0x82, 0x0a, 0x23, 0x49, 0x3c, 0x19, 0x0e, 0xf3, 0xe2, 0xcf, + 0x63, 0x83, 0x25, 0xf2, 0x10, 0x49, 0xf2, 0x49, 0x38, 0x24, 0xf0, 0x08, 0x4c, 0xf8, 0xf0, 0x46, + 0xa9, 0x9d, 0xbe, 0x34, 0x83, 0xeb, 0x90, 0x25, 0x67, 0x94, 0xe4, 0x5c, 0xeb, 0x25, 0x2a, 0xdb, + 0x80, 0x0e, 0x78, 0xd9, 0x4f, 0x38, 0x27, 0x91, 0xff, 0x95, 0xe7, 0x33, 0x4c, 0xf4, 0xe5, 0x1a, + 0x11, 0xbd, 0xa4, 0x20, 0x07, 0x0c, 0x13, 0x78, 0x1f, 0xac, 0x31, 0x8e, 0x73, 0x15, 0xaf, 0xd4, + 0x40, 0xaf, 0x66, 0xd6, 0xc7, 0x18, 0x26, 0xe0, 0x36, 0xf9, 0x52, 0x12, 0x1e, 0x21, 0xea, 0x21, + 0x29, 0x79, 0x78, 0x96, 0xc8, 0xa2, 0x3c, 0x48, 0x5f, 0xcd, 0x98, 0xf6, 0xe6, 0x6b, 0xe1, 0x71, + 0x41, 0xe1, 0x94, 0x0c, 0x87, 0x48, 0x22, 0x77, 0x8b, 0x54, 0x1f, 0x18, 0xbf, 0x35, 0xc1, 0xcd, + 0xcb, 0x02, 0x82, 0x7b, 0xe0, 0x86, 0x8f, 0x28, 0x2d, 0x6f, 0xe2, 0xbc, 0x1c, 0xd6, 0x72, 0xf3, + 0x63, 0x0c, 0x9f, 0x82, 0x8d, 0x74, 0xed, 0x09, 0x89, 0xb8, 0x9c, 0xe8, 0x6c, 0x1d, 0x9d, 0xdc, + 0x4a, 0x91, 0xfd, 0x14, 0x38, 0x6e, 0x6c, 0xa5, 0xe6, 0x9a, 0x0b, 0xd5, 0xdc, 0xd2, 0x02, 0x35, + 0xb7, 0xbc, 0x10, 0xcd, 0xad, 0x5c, 0x55, 0x73, 0xc6, 0x85, 0x06, 0xb6, 0x66, 0x34, 0x1e, 0x0e, + 0xc0, 0xeb, 0x95, 0xb2, 0xf2, 0x39, 0xc1, 0xa1, 0x9c, 0xd9, 0xde, 0xc9, 0x90, 0x6f, 0x57, 0x68, + 0xe7, 0x20, 0x83, 0xc3, 0x67, 0xa0, 0x5d, 0xc9, 0x3e, 0x64, 0x98, 0xd0, 0x5a, 0x5d, 0xd7, 0x2b, + 0xc8, 0x9f, 0xa6, 0x68, 0xe3, 0x6f, 0x0d, 0x6c, 0x56, 0xfe, 0xad, 0xfc, 0xcf, 0x9e, 0x2f, 0xe3, + 0xbc, 0x01, 0x36, 0xaa, 0xde, 0xf0, 0x17, 0x17, 0xb1, 0x46, 0xf5, 0x7a, 0x17, 0x4d, 0xb0, 0x55, + 0x56, 0x2e, 0xff, 0x97, 0xee, 0xe7, 0xcf, 0x21, 0xfc, 0x53, 0x03, 0xaf, 0x55, 0x0f, 0x4d, 0x70, + 0x7f, 0xfe, 0x5b, 0xfa, 0x9f, 0xf3, 0x62, 0xfb, 0xe1, 0xf5, 0x09, 0xf2, 0x99, 0xc1, 0xd8, 0xff, + 0xf6, 0xe2, 0xaf, 0xef, 0x1b, 0x7b, 0xc6, 0x3b, 0xe9, 0x08, 0xad, 0x46, 0x1d, 0x61, 0x7d, 0x3d, + 0x31, 0x08, 0x7d, 0xb0, 0xfb, 0x8d, 0x9d, 0x54, 0xb2, 0xd8, 0xda, 0x2e, 0xfc, 0x43, 0x03, 0x9b, + 0x95, 0x63, 0x09, 0x7c, 0x50, 0x3b, 0xb8, 0xca, 0x71, 0xac, 0xbd, 0x7f, 0x6d, 0x7c, 0x91, 0xdb, + 0x83, 0x2c, 0xb7, 0xf7, 0x8c, 0x7b, 0xb5, 0x72, 0xbb, 0x4c, 0x62, 0x6b, 0xbb, 0x8f, 0xbe, 0x6b, + 0x80, 0xbb, 0x3e, 0x1b, 0xce, 0x0d, 0xe3, 0xd1, 0xf6, 0x8c, 0xde, 0x9f, 0xa4, 0xfa, 0x39, 0xd1, + 0x9e, 0x7d, 0x58, 0x30, 0x04, 0x8c, 0xa2, 0x28, 0x30, 0x19, 0x0f, 0xac, 0x80, 0x44, 0x99, 0xba, + 0xd4, 0x37, 0x4c, 0x1c, 0x8a, 0xd9, 0x9f, 0x34, 0xef, 0xab, 0xc5, 0x8f, 0x8d, 0xe6, 0x91, 0xe3, + 0xfc, 0xd4, 0xe8, 0x1c, 0xe5, 0x84, 0x0e, 0x16, 0x66, 0xbe, 0x4c, 0x57, 0xa7, 0x5d, 0xb3, 0x70, + 0x2c, 0x7e, 0x57, 0x26, 0x03, 0x07, 0x8b, 0xc1, 0xd8, 0x64, 0x70, 0xda, 0x1d, 0x28, 0x93, 0x7f, + 0x1a, 0x77, 0xf3, 0x7d, 0xdb, 0x76, 0xb0, 0xb0, 0xed, 0xb1, 0x91, 0x6d, 0x9f, 0x76, 0x6d, 0x5b, + 0x99, 0x9d, 0xad, 0x64, 0x71, 0xde, 0xfb, 0x37, 0x00, 0x00, 0xff, 0xff, 0xc0, 0xa3, 0xe2, 0x9c, + 0x79, 0x0d, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/services/custom_interest_service.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/services/custom_interest_service.pb.go new file mode 100644 index 0000000..0d9edc1 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/services/custom_interest_service.pb.go @@ -0,0 +1,529 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/services/custom_interest_service.proto + +package services // import "google.golang.org/genproto/googleapis/ads/googleads/v1/services" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "github.com/golang/protobuf/ptypes/wrappers" +import resources "google.golang.org/genproto/googleapis/ads/googleads/v1/resources" +import _ "google.golang.org/genproto/googleapis/api/annotations" +import field_mask "google.golang.org/genproto/protobuf/field_mask" + +import ( + context "golang.org/x/net/context" + grpc "google.golang.org/grpc" +) + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Request message for [CustomInterestService.GetCustomInterest][google.ads.googleads.v1.services.CustomInterestService.GetCustomInterest]. +type GetCustomInterestRequest struct { + // The resource name of the custom interest to fetch. + ResourceName string `protobuf:"bytes,1,opt,name=resource_name,json=resourceName,proto3" json:"resource_name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GetCustomInterestRequest) Reset() { *m = GetCustomInterestRequest{} } +func (m *GetCustomInterestRequest) String() string { return proto.CompactTextString(m) } +func (*GetCustomInterestRequest) ProtoMessage() {} +func (*GetCustomInterestRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_custom_interest_service_85b11e27e8f607fc, []int{0} +} +func (m *GetCustomInterestRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GetCustomInterestRequest.Unmarshal(m, b) +} +func (m *GetCustomInterestRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GetCustomInterestRequest.Marshal(b, m, deterministic) +} +func (dst *GetCustomInterestRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_GetCustomInterestRequest.Merge(dst, src) +} +func (m *GetCustomInterestRequest) XXX_Size() int { + return xxx_messageInfo_GetCustomInterestRequest.Size(m) +} +func (m *GetCustomInterestRequest) XXX_DiscardUnknown() { + xxx_messageInfo_GetCustomInterestRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_GetCustomInterestRequest proto.InternalMessageInfo + +func (m *GetCustomInterestRequest) GetResourceName() string { + if m != nil { + return m.ResourceName + } + return "" +} + +// Request message for [CustomInterestService.MutateCustomInterests][google.ads.googleads.v1.services.CustomInterestService.MutateCustomInterests]. +type MutateCustomInterestsRequest struct { + // The ID of the customer whose custom interests are being modified. + CustomerId string `protobuf:"bytes,1,opt,name=customer_id,json=customerId,proto3" json:"customer_id,omitempty"` + // The list of operations to perform on individual custom interests. + Operations []*CustomInterestOperation `protobuf:"bytes,2,rep,name=operations,proto3" json:"operations,omitempty"` + // If true, the request is validated but not executed. Only errors are + // returned, not results. + ValidateOnly bool `protobuf:"varint,4,opt,name=validate_only,json=validateOnly,proto3" json:"validate_only,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *MutateCustomInterestsRequest) Reset() { *m = MutateCustomInterestsRequest{} } +func (m *MutateCustomInterestsRequest) String() string { return proto.CompactTextString(m) } +func (*MutateCustomInterestsRequest) ProtoMessage() {} +func (*MutateCustomInterestsRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_custom_interest_service_85b11e27e8f607fc, []int{1} +} +func (m *MutateCustomInterestsRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_MutateCustomInterestsRequest.Unmarshal(m, b) +} +func (m *MutateCustomInterestsRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_MutateCustomInterestsRequest.Marshal(b, m, deterministic) +} +func (dst *MutateCustomInterestsRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_MutateCustomInterestsRequest.Merge(dst, src) +} +func (m *MutateCustomInterestsRequest) XXX_Size() int { + return xxx_messageInfo_MutateCustomInterestsRequest.Size(m) +} +func (m *MutateCustomInterestsRequest) XXX_DiscardUnknown() { + xxx_messageInfo_MutateCustomInterestsRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_MutateCustomInterestsRequest proto.InternalMessageInfo + +func (m *MutateCustomInterestsRequest) GetCustomerId() string { + if m != nil { + return m.CustomerId + } + return "" +} + +func (m *MutateCustomInterestsRequest) GetOperations() []*CustomInterestOperation { + if m != nil { + return m.Operations + } + return nil +} + +func (m *MutateCustomInterestsRequest) GetValidateOnly() bool { + if m != nil { + return m.ValidateOnly + } + return false +} + +// A single operation (create, update) on a custom interest. +type CustomInterestOperation struct { + // FieldMask that determines which resource fields are modified in an update. + UpdateMask *field_mask.FieldMask `protobuf:"bytes,4,opt,name=update_mask,json=updateMask,proto3" json:"update_mask,omitempty"` + // The mutate operation. + // + // Types that are valid to be assigned to Operation: + // *CustomInterestOperation_Create + // *CustomInterestOperation_Update + Operation isCustomInterestOperation_Operation `protobuf_oneof:"operation"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *CustomInterestOperation) Reset() { *m = CustomInterestOperation{} } +func (m *CustomInterestOperation) String() string { return proto.CompactTextString(m) } +func (*CustomInterestOperation) ProtoMessage() {} +func (*CustomInterestOperation) Descriptor() ([]byte, []int) { + return fileDescriptor_custom_interest_service_85b11e27e8f607fc, []int{2} +} +func (m *CustomInterestOperation) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_CustomInterestOperation.Unmarshal(m, b) +} +func (m *CustomInterestOperation) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_CustomInterestOperation.Marshal(b, m, deterministic) +} +func (dst *CustomInterestOperation) XXX_Merge(src proto.Message) { + xxx_messageInfo_CustomInterestOperation.Merge(dst, src) +} +func (m *CustomInterestOperation) XXX_Size() int { + return xxx_messageInfo_CustomInterestOperation.Size(m) +} +func (m *CustomInterestOperation) XXX_DiscardUnknown() { + xxx_messageInfo_CustomInterestOperation.DiscardUnknown(m) +} + +var xxx_messageInfo_CustomInterestOperation proto.InternalMessageInfo + +func (m *CustomInterestOperation) GetUpdateMask() *field_mask.FieldMask { + if m != nil { + return m.UpdateMask + } + return nil +} + +type isCustomInterestOperation_Operation interface { + isCustomInterestOperation_Operation() +} + +type CustomInterestOperation_Create struct { + Create *resources.CustomInterest `protobuf:"bytes,1,opt,name=create,proto3,oneof"` +} + +type CustomInterestOperation_Update struct { + Update *resources.CustomInterest `protobuf:"bytes,2,opt,name=update,proto3,oneof"` +} + +func (*CustomInterestOperation_Create) isCustomInterestOperation_Operation() {} + +func (*CustomInterestOperation_Update) isCustomInterestOperation_Operation() {} + +func (m *CustomInterestOperation) GetOperation() isCustomInterestOperation_Operation { + if m != nil { + return m.Operation + } + return nil +} + +func (m *CustomInterestOperation) GetCreate() *resources.CustomInterest { + if x, ok := m.GetOperation().(*CustomInterestOperation_Create); ok { + return x.Create + } + return nil +} + +func (m *CustomInterestOperation) GetUpdate() *resources.CustomInterest { + if x, ok := m.GetOperation().(*CustomInterestOperation_Update); ok { + return x.Update + } + return nil +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*CustomInterestOperation) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _CustomInterestOperation_OneofMarshaler, _CustomInterestOperation_OneofUnmarshaler, _CustomInterestOperation_OneofSizer, []interface{}{ + (*CustomInterestOperation_Create)(nil), + (*CustomInterestOperation_Update)(nil), + } +} + +func _CustomInterestOperation_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*CustomInterestOperation) + // operation + switch x := m.Operation.(type) { + case *CustomInterestOperation_Create: + b.EncodeVarint(1<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.Create); err != nil { + return err + } + case *CustomInterestOperation_Update: + b.EncodeVarint(2<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.Update); err != nil { + return err + } + case nil: + default: + return fmt.Errorf("CustomInterestOperation.Operation has unexpected type %T", x) + } + return nil +} + +func _CustomInterestOperation_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*CustomInterestOperation) + switch tag { + case 1: // operation.create + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(resources.CustomInterest) + err := b.DecodeMessage(msg) + m.Operation = &CustomInterestOperation_Create{msg} + return true, err + case 2: // operation.update + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(resources.CustomInterest) + err := b.DecodeMessage(msg) + m.Operation = &CustomInterestOperation_Update{msg} + return true, err + default: + return false, nil + } +} + +func _CustomInterestOperation_OneofSizer(msg proto.Message) (n int) { + m := msg.(*CustomInterestOperation) + // operation + switch x := m.Operation.(type) { + case *CustomInterestOperation_Create: + s := proto.Size(x.Create) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *CustomInterestOperation_Update: + s := proto.Size(x.Update) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +// Response message for custom interest mutate. +type MutateCustomInterestsResponse struct { + // All results for the mutate. + Results []*MutateCustomInterestResult `protobuf:"bytes,2,rep,name=results,proto3" json:"results,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *MutateCustomInterestsResponse) Reset() { *m = MutateCustomInterestsResponse{} } +func (m *MutateCustomInterestsResponse) String() string { return proto.CompactTextString(m) } +func (*MutateCustomInterestsResponse) ProtoMessage() {} +func (*MutateCustomInterestsResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_custom_interest_service_85b11e27e8f607fc, []int{3} +} +func (m *MutateCustomInterestsResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_MutateCustomInterestsResponse.Unmarshal(m, b) +} +func (m *MutateCustomInterestsResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_MutateCustomInterestsResponse.Marshal(b, m, deterministic) +} +func (dst *MutateCustomInterestsResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_MutateCustomInterestsResponse.Merge(dst, src) +} +func (m *MutateCustomInterestsResponse) XXX_Size() int { + return xxx_messageInfo_MutateCustomInterestsResponse.Size(m) +} +func (m *MutateCustomInterestsResponse) XXX_DiscardUnknown() { + xxx_messageInfo_MutateCustomInterestsResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_MutateCustomInterestsResponse proto.InternalMessageInfo + +func (m *MutateCustomInterestsResponse) GetResults() []*MutateCustomInterestResult { + if m != nil { + return m.Results + } + return nil +} + +// The result for the custom interest mutate. +type MutateCustomInterestResult struct { + // Returned for successful operations. + ResourceName string `protobuf:"bytes,1,opt,name=resource_name,json=resourceName,proto3" json:"resource_name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *MutateCustomInterestResult) Reset() { *m = MutateCustomInterestResult{} } +func (m *MutateCustomInterestResult) String() string { return proto.CompactTextString(m) } +func (*MutateCustomInterestResult) ProtoMessage() {} +func (*MutateCustomInterestResult) Descriptor() ([]byte, []int) { + return fileDescriptor_custom_interest_service_85b11e27e8f607fc, []int{4} +} +func (m *MutateCustomInterestResult) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_MutateCustomInterestResult.Unmarshal(m, b) +} +func (m *MutateCustomInterestResult) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_MutateCustomInterestResult.Marshal(b, m, deterministic) +} +func (dst *MutateCustomInterestResult) XXX_Merge(src proto.Message) { + xxx_messageInfo_MutateCustomInterestResult.Merge(dst, src) +} +func (m *MutateCustomInterestResult) XXX_Size() int { + return xxx_messageInfo_MutateCustomInterestResult.Size(m) +} +func (m *MutateCustomInterestResult) XXX_DiscardUnknown() { + xxx_messageInfo_MutateCustomInterestResult.DiscardUnknown(m) +} + +var xxx_messageInfo_MutateCustomInterestResult proto.InternalMessageInfo + +func (m *MutateCustomInterestResult) GetResourceName() string { + if m != nil { + return m.ResourceName + } + return "" +} + +func init() { + proto.RegisterType((*GetCustomInterestRequest)(nil), "google.ads.googleads.v1.services.GetCustomInterestRequest") + proto.RegisterType((*MutateCustomInterestsRequest)(nil), "google.ads.googleads.v1.services.MutateCustomInterestsRequest") + proto.RegisterType((*CustomInterestOperation)(nil), "google.ads.googleads.v1.services.CustomInterestOperation") + proto.RegisterType((*MutateCustomInterestsResponse)(nil), "google.ads.googleads.v1.services.MutateCustomInterestsResponse") + proto.RegisterType((*MutateCustomInterestResult)(nil), "google.ads.googleads.v1.services.MutateCustomInterestResult") +} + +// Reference imports to suppress errors if they are not otherwise used. +var _ context.Context +var _ grpc.ClientConn + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the grpc package it is being compiled against. +const _ = grpc.SupportPackageIsVersion4 + +// CustomInterestServiceClient is the client API for CustomInterestService service. +// +// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://godoc.org/google.golang.org/grpc#ClientConn.NewStream. +type CustomInterestServiceClient interface { + // Returns the requested custom interest in full detail. + GetCustomInterest(ctx context.Context, in *GetCustomInterestRequest, opts ...grpc.CallOption) (*resources.CustomInterest, error) + // Creates or updates custom interests. Operation statuses are returned. + MutateCustomInterests(ctx context.Context, in *MutateCustomInterestsRequest, opts ...grpc.CallOption) (*MutateCustomInterestsResponse, error) +} + +type customInterestServiceClient struct { + cc *grpc.ClientConn +} + +func NewCustomInterestServiceClient(cc *grpc.ClientConn) CustomInterestServiceClient { + return &customInterestServiceClient{cc} +} + +func (c *customInterestServiceClient) GetCustomInterest(ctx context.Context, in *GetCustomInterestRequest, opts ...grpc.CallOption) (*resources.CustomInterest, error) { + out := new(resources.CustomInterest) + err := c.cc.Invoke(ctx, "/google.ads.googleads.v1.services.CustomInterestService/GetCustomInterest", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *customInterestServiceClient) MutateCustomInterests(ctx context.Context, in *MutateCustomInterestsRequest, opts ...grpc.CallOption) (*MutateCustomInterestsResponse, error) { + out := new(MutateCustomInterestsResponse) + err := c.cc.Invoke(ctx, "/google.ads.googleads.v1.services.CustomInterestService/MutateCustomInterests", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +// CustomInterestServiceServer is the server API for CustomInterestService service. +type CustomInterestServiceServer interface { + // Returns the requested custom interest in full detail. + GetCustomInterest(context.Context, *GetCustomInterestRequest) (*resources.CustomInterest, error) + // Creates or updates custom interests. Operation statuses are returned. + MutateCustomInterests(context.Context, *MutateCustomInterestsRequest) (*MutateCustomInterestsResponse, error) +} + +func RegisterCustomInterestServiceServer(s *grpc.Server, srv CustomInterestServiceServer) { + s.RegisterService(&_CustomInterestService_serviceDesc, srv) +} + +func _CustomInterestService_GetCustomInterest_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(GetCustomInterestRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(CustomInterestServiceServer).GetCustomInterest(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.ads.googleads.v1.services.CustomInterestService/GetCustomInterest", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(CustomInterestServiceServer).GetCustomInterest(ctx, req.(*GetCustomInterestRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _CustomInterestService_MutateCustomInterests_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(MutateCustomInterestsRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(CustomInterestServiceServer).MutateCustomInterests(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.ads.googleads.v1.services.CustomInterestService/MutateCustomInterests", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(CustomInterestServiceServer).MutateCustomInterests(ctx, req.(*MutateCustomInterestsRequest)) + } + return interceptor(ctx, in, info, handler) +} + +var _CustomInterestService_serviceDesc = grpc.ServiceDesc{ + ServiceName: "google.ads.googleads.v1.services.CustomInterestService", + HandlerType: (*CustomInterestServiceServer)(nil), + Methods: []grpc.MethodDesc{ + { + MethodName: "GetCustomInterest", + Handler: _CustomInterestService_GetCustomInterest_Handler, + }, + { + MethodName: "MutateCustomInterests", + Handler: _CustomInterestService_MutateCustomInterests_Handler, + }, + }, + Streams: []grpc.StreamDesc{}, + Metadata: "google/ads/googleads/v1/services/custom_interest_service.proto", +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/services/custom_interest_service.proto", fileDescriptor_custom_interest_service_85b11e27e8f607fc) +} + +var fileDescriptor_custom_interest_service_85b11e27e8f607fc = []byte{ + // 633 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xa4, 0x94, 0x41, 0x6b, 0xd4, 0x40, + 0x14, 0xc7, 0x4d, 0x2a, 0xd5, 0x4e, 0xea, 0xc1, 0x40, 0x71, 0x09, 0x55, 0x97, 0xd8, 0xc3, 0xb2, + 0x87, 0x84, 0x6c, 0x05, 0xd9, 0xd4, 0x76, 0xc9, 0x0a, 0x6e, 0x8b, 0xd4, 0x96, 0x08, 0x0b, 0xca, + 0x42, 0x98, 0x6e, 0xa6, 0x21, 0x34, 0xc9, 0xc4, 0x99, 0xc9, 0x96, 0x52, 0x7a, 0xd0, 0xaf, 0xe0, + 0x37, 0xf0, 0xe8, 0xd5, 0xb3, 0x78, 0xf7, 0xea, 0xc9, 0xbb, 0x27, 0xbf, 0x83, 0x20, 0xc9, 0x64, + 0x62, 0x77, 0xdd, 0xb0, 0x5a, 0x6f, 0x2f, 0x93, 0xf7, 0x7e, 0xef, 0xfd, 0xdf, 0x9b, 0x37, 0x60, + 0x27, 0xc0, 0x38, 0x88, 0x90, 0x09, 0x7d, 0x6a, 0x72, 0x33, 0xb7, 0x26, 0x96, 0x49, 0x11, 0x99, + 0x84, 0x63, 0x44, 0xcd, 0x71, 0x46, 0x19, 0x8e, 0xbd, 0x30, 0x61, 0x88, 0x20, 0xca, 0xbc, 0xf2, + 0x87, 0x91, 0x12, 0xcc, 0xb0, 0xda, 0xe4, 0x41, 0x06, 0xf4, 0xa9, 0x51, 0xc5, 0x1b, 0x13, 0xcb, + 0x10, 0xf1, 0xda, 0xa3, 0xba, 0x0c, 0x04, 0x51, 0x9c, 0x91, 0x39, 0x29, 0x38, 0x5a, 0x5b, 0x17, + 0x81, 0x69, 0x68, 0xc2, 0x24, 0xc1, 0x0c, 0xb2, 0x10, 0x27, 0xb4, 0xfc, 0x5b, 0x26, 0x36, 0x8b, + 0xaf, 0xa3, 0xec, 0xd8, 0x3c, 0x0e, 0x51, 0xe4, 0x7b, 0x31, 0xa4, 0x27, 0xa5, 0xc7, 0xbd, 0x59, + 0x8f, 0x53, 0x02, 0xd3, 0x14, 0x91, 0x92, 0xa0, 0xf7, 0x40, 0x63, 0x80, 0xd8, 0x93, 0x22, 0xf7, + 0x5e, 0x99, 0xda, 0x45, 0xaf, 0x33, 0x44, 0x99, 0xfa, 0x00, 0xdc, 0x12, 0xe5, 0x79, 0x09, 0x8c, + 0x51, 0x43, 0x6a, 0x4a, 0xad, 0x15, 0x77, 0x55, 0x1c, 0x3e, 0x87, 0x31, 0xd2, 0x3f, 0x4b, 0x60, + 0x7d, 0x3f, 0x63, 0x90, 0xa1, 0x69, 0x08, 0x15, 0x94, 0xfb, 0x40, 0xe1, 0xd2, 0x10, 0xf1, 0x42, + 0xbf, 0x64, 0x00, 0x71, 0xb4, 0xe7, 0xab, 0x2f, 0x01, 0xc0, 0x29, 0x22, 0x5c, 0x58, 0x43, 0x6e, + 0x2e, 0xb5, 0x94, 0x4e, 0xd7, 0x58, 0xd4, 0x52, 0x63, 0x3a, 0xdd, 0x81, 0x20, 0xb8, 0x97, 0x60, + 0xb9, 0x82, 0x09, 0x8c, 0x42, 0x1f, 0x32, 0xe4, 0xe1, 0x24, 0x3a, 0x6b, 0x5c, 0x6f, 0x4a, 0xad, + 0x9b, 0xee, 0xaa, 0x38, 0x3c, 0x48, 0xa2, 0x33, 0xfd, 0xa7, 0x04, 0xee, 0xd4, 0xc0, 0xd4, 0x2d, + 0xa0, 0x64, 0x69, 0x11, 0x9e, 0xf7, 0xb4, 0x08, 0x57, 0x3a, 0x9a, 0x28, 0x4e, 0x34, 0xd5, 0x78, + 0x9a, 0xb7, 0x7d, 0x1f, 0xd2, 0x13, 0x17, 0x70, 0xf7, 0xdc, 0x56, 0x9f, 0x81, 0xe5, 0x31, 0x41, + 0x90, 0xf1, 0xc6, 0x29, 0x1d, 0xab, 0x56, 0x54, 0x75, 0x0b, 0x66, 0x54, 0xed, 0x5e, 0x73, 0x4b, + 0x44, 0x0e, 0xe3, 0xe8, 0x86, 0xfc, 0x1f, 0x30, 0x8e, 0xe8, 0x2b, 0x60, 0xa5, 0xea, 0x92, 0x7e, + 0x0a, 0xee, 0xd6, 0x0c, 0x90, 0xa6, 0x38, 0xa1, 0x48, 0x1d, 0x82, 0x1b, 0x04, 0xd1, 0x2c, 0x62, + 0x62, 0x3a, 0x8f, 0x17, 0x4f, 0x67, 0x1e, 0xd1, 0x2d, 0x20, 0xae, 0x80, 0xe9, 0x0e, 0xd0, 0xea, + 0xdd, 0xfe, 0xea, 0xf6, 0x75, 0x3e, 0x2e, 0x81, 0xb5, 0xe9, 0xe8, 0x17, 0xbc, 0x02, 0xf5, 0x93, + 0x04, 0x6e, 0xff, 0x71, 0xb3, 0x55, 0x7b, 0x71, 0xe5, 0x75, 0xeb, 0xa0, 0xfd, 0x7b, 0xc7, 0xf5, + 0xee, 0xdb, 0xaf, 0xdf, 0xdf, 0xc9, 0x9b, 0xaa, 0x95, 0xaf, 0xfa, 0xf9, 0x94, 0x9c, 0x6d, 0xb1, + 0x01, 0xd4, 0x6c, 0x97, 0xbb, 0x5f, 0x75, 0xde, 0x6c, 0x5f, 0xa8, 0xdf, 0x24, 0xb0, 0x36, 0x77, + 0x2c, 0xea, 0xce, 0xd5, 0xba, 0x2f, 0x16, 0x52, 0xeb, 0x5d, 0x39, 0x9e, 0xdf, 0x07, 0xbd, 0x57, + 0xa8, 0xea, 0xea, 0x0f, 0x73, 0x55, 0xbf, 0x65, 0x9c, 0x5f, 0x5a, 0xf3, 0xed, 0xf6, 0xc5, 0xac, + 0x28, 0x3b, 0x2e, 0xa0, 0xb6, 0xd4, 0xee, 0xbf, 0x91, 0xc1, 0xc6, 0x18, 0xc7, 0x0b, 0xeb, 0xe8, + 0x6b, 0x73, 0x67, 0x7b, 0x98, 0xaf, 0xdd, 0xa1, 0xf4, 0x6a, 0xb7, 0x8c, 0x0f, 0x70, 0x04, 0x93, + 0xc0, 0xc0, 0x24, 0x30, 0x03, 0x94, 0x14, 0x4b, 0x29, 0x1e, 0xd9, 0x34, 0xa4, 0xf5, 0xaf, 0xfa, + 0x96, 0x30, 0xde, 0xcb, 0x4b, 0x03, 0xc7, 0xf9, 0x20, 0x37, 0x07, 0x1c, 0xe8, 0xf8, 0xd4, 0xe0, + 0x66, 0x6e, 0x0d, 0x2d, 0xa3, 0x4c, 0x4c, 0xbf, 0x08, 0x97, 0x91, 0xe3, 0xd3, 0x51, 0xe5, 0x32, + 0x1a, 0x5a, 0x23, 0xe1, 0xf2, 0x43, 0xde, 0xe0, 0xe7, 0xb6, 0xed, 0xf8, 0xd4, 0xb6, 0x2b, 0x27, + 0xdb, 0x1e, 0x5a, 0xb6, 0x2d, 0xdc, 0x8e, 0x96, 0x8b, 0x3a, 0x37, 0x7f, 0x05, 0x00, 0x00, 0xff, + 0xff, 0x62, 0xea, 0xe7, 0xce, 0x7c, 0x06, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/services/customer_client_link_service.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/services/customer_client_link_service.pb.go new file mode 100644 index 0000000..0787a47 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/services/customer_client_link_service.pb.go @@ -0,0 +1,518 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/services/customer_client_link_service.proto + +package services // import "google.golang.org/genproto/googleapis/ads/googleads/v1/services" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "github.com/golang/protobuf/ptypes/wrappers" +import resources "google.golang.org/genproto/googleapis/ads/googleads/v1/resources" +import _ "google.golang.org/genproto/googleapis/api/annotations" +import field_mask "google.golang.org/genproto/protobuf/field_mask" + +import ( + context "golang.org/x/net/context" + grpc "google.golang.org/grpc" +) + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Request message for [CustomerClientLinkService.GetCustomerClientLink][google.ads.googleads.v1.services.CustomerClientLinkService.GetCustomerClientLink]. +type GetCustomerClientLinkRequest struct { + // The resource name of the customer client link to fetch. + ResourceName string `protobuf:"bytes,1,opt,name=resource_name,json=resourceName,proto3" json:"resource_name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GetCustomerClientLinkRequest) Reset() { *m = GetCustomerClientLinkRequest{} } +func (m *GetCustomerClientLinkRequest) String() string { return proto.CompactTextString(m) } +func (*GetCustomerClientLinkRequest) ProtoMessage() {} +func (*GetCustomerClientLinkRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_customer_client_link_service_ec1d3dfd8da034fb, []int{0} +} +func (m *GetCustomerClientLinkRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GetCustomerClientLinkRequest.Unmarshal(m, b) +} +func (m *GetCustomerClientLinkRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GetCustomerClientLinkRequest.Marshal(b, m, deterministic) +} +func (dst *GetCustomerClientLinkRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_GetCustomerClientLinkRequest.Merge(dst, src) +} +func (m *GetCustomerClientLinkRequest) XXX_Size() int { + return xxx_messageInfo_GetCustomerClientLinkRequest.Size(m) +} +func (m *GetCustomerClientLinkRequest) XXX_DiscardUnknown() { + xxx_messageInfo_GetCustomerClientLinkRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_GetCustomerClientLinkRequest proto.InternalMessageInfo + +func (m *GetCustomerClientLinkRequest) GetResourceName() string { + if m != nil { + return m.ResourceName + } + return "" +} + +// Request message for [CustomerClientLinkService.MutateCustomerClientLink][google.ads.googleads.v1.services.CustomerClientLinkService.MutateCustomerClientLink]. +type MutateCustomerClientLinkRequest struct { + // The ID of the customer whose customer link are being modified. + CustomerId string `protobuf:"bytes,1,opt,name=customer_id,json=customerId,proto3" json:"customer_id,omitempty"` + // The operation to perform on the individual CustomerClientLink. + Operation *CustomerClientLinkOperation `protobuf:"bytes,2,opt,name=operation,proto3" json:"operation,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *MutateCustomerClientLinkRequest) Reset() { *m = MutateCustomerClientLinkRequest{} } +func (m *MutateCustomerClientLinkRequest) String() string { return proto.CompactTextString(m) } +func (*MutateCustomerClientLinkRequest) ProtoMessage() {} +func (*MutateCustomerClientLinkRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_customer_client_link_service_ec1d3dfd8da034fb, []int{1} +} +func (m *MutateCustomerClientLinkRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_MutateCustomerClientLinkRequest.Unmarshal(m, b) +} +func (m *MutateCustomerClientLinkRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_MutateCustomerClientLinkRequest.Marshal(b, m, deterministic) +} +func (dst *MutateCustomerClientLinkRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_MutateCustomerClientLinkRequest.Merge(dst, src) +} +func (m *MutateCustomerClientLinkRequest) XXX_Size() int { + return xxx_messageInfo_MutateCustomerClientLinkRequest.Size(m) +} +func (m *MutateCustomerClientLinkRequest) XXX_DiscardUnknown() { + xxx_messageInfo_MutateCustomerClientLinkRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_MutateCustomerClientLinkRequest proto.InternalMessageInfo + +func (m *MutateCustomerClientLinkRequest) GetCustomerId() string { + if m != nil { + return m.CustomerId + } + return "" +} + +func (m *MutateCustomerClientLinkRequest) GetOperation() *CustomerClientLinkOperation { + if m != nil { + return m.Operation + } + return nil +} + +// A single operation (create, update) on a CustomerClientLink. +type CustomerClientLinkOperation struct { + // FieldMask that determines which resource fields are modified in an update. + UpdateMask *field_mask.FieldMask `protobuf:"bytes,4,opt,name=update_mask,json=updateMask,proto3" json:"update_mask,omitempty"` + // The mutate operation. + // + // Types that are valid to be assigned to Operation: + // *CustomerClientLinkOperation_Create + // *CustomerClientLinkOperation_Update + Operation isCustomerClientLinkOperation_Operation `protobuf_oneof:"operation"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *CustomerClientLinkOperation) Reset() { *m = CustomerClientLinkOperation{} } +func (m *CustomerClientLinkOperation) String() string { return proto.CompactTextString(m) } +func (*CustomerClientLinkOperation) ProtoMessage() {} +func (*CustomerClientLinkOperation) Descriptor() ([]byte, []int) { + return fileDescriptor_customer_client_link_service_ec1d3dfd8da034fb, []int{2} +} +func (m *CustomerClientLinkOperation) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_CustomerClientLinkOperation.Unmarshal(m, b) +} +func (m *CustomerClientLinkOperation) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_CustomerClientLinkOperation.Marshal(b, m, deterministic) +} +func (dst *CustomerClientLinkOperation) XXX_Merge(src proto.Message) { + xxx_messageInfo_CustomerClientLinkOperation.Merge(dst, src) +} +func (m *CustomerClientLinkOperation) XXX_Size() int { + return xxx_messageInfo_CustomerClientLinkOperation.Size(m) +} +func (m *CustomerClientLinkOperation) XXX_DiscardUnknown() { + xxx_messageInfo_CustomerClientLinkOperation.DiscardUnknown(m) +} + +var xxx_messageInfo_CustomerClientLinkOperation proto.InternalMessageInfo + +func (m *CustomerClientLinkOperation) GetUpdateMask() *field_mask.FieldMask { + if m != nil { + return m.UpdateMask + } + return nil +} + +type isCustomerClientLinkOperation_Operation interface { + isCustomerClientLinkOperation_Operation() +} + +type CustomerClientLinkOperation_Create struct { + Create *resources.CustomerClientLink `protobuf:"bytes,1,opt,name=create,proto3,oneof"` +} + +type CustomerClientLinkOperation_Update struct { + Update *resources.CustomerClientLink `protobuf:"bytes,2,opt,name=update,proto3,oneof"` +} + +func (*CustomerClientLinkOperation_Create) isCustomerClientLinkOperation_Operation() {} + +func (*CustomerClientLinkOperation_Update) isCustomerClientLinkOperation_Operation() {} + +func (m *CustomerClientLinkOperation) GetOperation() isCustomerClientLinkOperation_Operation { + if m != nil { + return m.Operation + } + return nil +} + +func (m *CustomerClientLinkOperation) GetCreate() *resources.CustomerClientLink { + if x, ok := m.GetOperation().(*CustomerClientLinkOperation_Create); ok { + return x.Create + } + return nil +} + +func (m *CustomerClientLinkOperation) GetUpdate() *resources.CustomerClientLink { + if x, ok := m.GetOperation().(*CustomerClientLinkOperation_Update); ok { + return x.Update + } + return nil +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*CustomerClientLinkOperation) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _CustomerClientLinkOperation_OneofMarshaler, _CustomerClientLinkOperation_OneofUnmarshaler, _CustomerClientLinkOperation_OneofSizer, []interface{}{ + (*CustomerClientLinkOperation_Create)(nil), + (*CustomerClientLinkOperation_Update)(nil), + } +} + +func _CustomerClientLinkOperation_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*CustomerClientLinkOperation) + // operation + switch x := m.Operation.(type) { + case *CustomerClientLinkOperation_Create: + b.EncodeVarint(1<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.Create); err != nil { + return err + } + case *CustomerClientLinkOperation_Update: + b.EncodeVarint(2<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.Update); err != nil { + return err + } + case nil: + default: + return fmt.Errorf("CustomerClientLinkOperation.Operation has unexpected type %T", x) + } + return nil +} + +func _CustomerClientLinkOperation_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*CustomerClientLinkOperation) + switch tag { + case 1: // operation.create + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(resources.CustomerClientLink) + err := b.DecodeMessage(msg) + m.Operation = &CustomerClientLinkOperation_Create{msg} + return true, err + case 2: // operation.update + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(resources.CustomerClientLink) + err := b.DecodeMessage(msg) + m.Operation = &CustomerClientLinkOperation_Update{msg} + return true, err + default: + return false, nil + } +} + +func _CustomerClientLinkOperation_OneofSizer(msg proto.Message) (n int) { + m := msg.(*CustomerClientLinkOperation) + // operation + switch x := m.Operation.(type) { + case *CustomerClientLinkOperation_Create: + s := proto.Size(x.Create) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *CustomerClientLinkOperation_Update: + s := proto.Size(x.Update) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +// Response message for a CustomerClientLink mutate. +type MutateCustomerClientLinkResponse struct { + // A result that identifies the resource affected by the mutate request. + Result *MutateCustomerClientLinkResult `protobuf:"bytes,1,opt,name=result,proto3" json:"result,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *MutateCustomerClientLinkResponse) Reset() { *m = MutateCustomerClientLinkResponse{} } +func (m *MutateCustomerClientLinkResponse) String() string { return proto.CompactTextString(m) } +func (*MutateCustomerClientLinkResponse) ProtoMessage() {} +func (*MutateCustomerClientLinkResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_customer_client_link_service_ec1d3dfd8da034fb, []int{3} +} +func (m *MutateCustomerClientLinkResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_MutateCustomerClientLinkResponse.Unmarshal(m, b) +} +func (m *MutateCustomerClientLinkResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_MutateCustomerClientLinkResponse.Marshal(b, m, deterministic) +} +func (dst *MutateCustomerClientLinkResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_MutateCustomerClientLinkResponse.Merge(dst, src) +} +func (m *MutateCustomerClientLinkResponse) XXX_Size() int { + return xxx_messageInfo_MutateCustomerClientLinkResponse.Size(m) +} +func (m *MutateCustomerClientLinkResponse) XXX_DiscardUnknown() { + xxx_messageInfo_MutateCustomerClientLinkResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_MutateCustomerClientLinkResponse proto.InternalMessageInfo + +func (m *MutateCustomerClientLinkResponse) GetResult() *MutateCustomerClientLinkResult { + if m != nil { + return m.Result + } + return nil +} + +// The result for a single customer client link mutate. +type MutateCustomerClientLinkResult struct { + // Returned for successful operations. + ResourceName string `protobuf:"bytes,1,opt,name=resource_name,json=resourceName,proto3" json:"resource_name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *MutateCustomerClientLinkResult) Reset() { *m = MutateCustomerClientLinkResult{} } +func (m *MutateCustomerClientLinkResult) String() string { return proto.CompactTextString(m) } +func (*MutateCustomerClientLinkResult) ProtoMessage() {} +func (*MutateCustomerClientLinkResult) Descriptor() ([]byte, []int) { + return fileDescriptor_customer_client_link_service_ec1d3dfd8da034fb, []int{4} +} +func (m *MutateCustomerClientLinkResult) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_MutateCustomerClientLinkResult.Unmarshal(m, b) +} +func (m *MutateCustomerClientLinkResult) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_MutateCustomerClientLinkResult.Marshal(b, m, deterministic) +} +func (dst *MutateCustomerClientLinkResult) XXX_Merge(src proto.Message) { + xxx_messageInfo_MutateCustomerClientLinkResult.Merge(dst, src) +} +func (m *MutateCustomerClientLinkResult) XXX_Size() int { + return xxx_messageInfo_MutateCustomerClientLinkResult.Size(m) +} +func (m *MutateCustomerClientLinkResult) XXX_DiscardUnknown() { + xxx_messageInfo_MutateCustomerClientLinkResult.DiscardUnknown(m) +} + +var xxx_messageInfo_MutateCustomerClientLinkResult proto.InternalMessageInfo + +func (m *MutateCustomerClientLinkResult) GetResourceName() string { + if m != nil { + return m.ResourceName + } + return "" +} + +func init() { + proto.RegisterType((*GetCustomerClientLinkRequest)(nil), "google.ads.googleads.v1.services.GetCustomerClientLinkRequest") + proto.RegisterType((*MutateCustomerClientLinkRequest)(nil), "google.ads.googleads.v1.services.MutateCustomerClientLinkRequest") + proto.RegisterType((*CustomerClientLinkOperation)(nil), "google.ads.googleads.v1.services.CustomerClientLinkOperation") + proto.RegisterType((*MutateCustomerClientLinkResponse)(nil), "google.ads.googleads.v1.services.MutateCustomerClientLinkResponse") + proto.RegisterType((*MutateCustomerClientLinkResult)(nil), "google.ads.googleads.v1.services.MutateCustomerClientLinkResult") +} + +// Reference imports to suppress errors if they are not otherwise used. +var _ context.Context +var _ grpc.ClientConn + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the grpc package it is being compiled against. +const _ = grpc.SupportPackageIsVersion4 + +// CustomerClientLinkServiceClient is the client API for CustomerClientLinkService service. +// +// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://godoc.org/google.golang.org/grpc#ClientConn.NewStream. +type CustomerClientLinkServiceClient interface { + // Returns the requested CustomerClientLink in full detail. + GetCustomerClientLink(ctx context.Context, in *GetCustomerClientLinkRequest, opts ...grpc.CallOption) (*resources.CustomerClientLink, error) + // Creates or updates a customer client link. Operation statuses are returned. + MutateCustomerClientLink(ctx context.Context, in *MutateCustomerClientLinkRequest, opts ...grpc.CallOption) (*MutateCustomerClientLinkResponse, error) +} + +type customerClientLinkServiceClient struct { + cc *grpc.ClientConn +} + +func NewCustomerClientLinkServiceClient(cc *grpc.ClientConn) CustomerClientLinkServiceClient { + return &customerClientLinkServiceClient{cc} +} + +func (c *customerClientLinkServiceClient) GetCustomerClientLink(ctx context.Context, in *GetCustomerClientLinkRequest, opts ...grpc.CallOption) (*resources.CustomerClientLink, error) { + out := new(resources.CustomerClientLink) + err := c.cc.Invoke(ctx, "/google.ads.googleads.v1.services.CustomerClientLinkService/GetCustomerClientLink", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *customerClientLinkServiceClient) MutateCustomerClientLink(ctx context.Context, in *MutateCustomerClientLinkRequest, opts ...grpc.CallOption) (*MutateCustomerClientLinkResponse, error) { + out := new(MutateCustomerClientLinkResponse) + err := c.cc.Invoke(ctx, "/google.ads.googleads.v1.services.CustomerClientLinkService/MutateCustomerClientLink", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +// CustomerClientLinkServiceServer is the server API for CustomerClientLinkService service. +type CustomerClientLinkServiceServer interface { + // Returns the requested CustomerClientLink in full detail. + GetCustomerClientLink(context.Context, *GetCustomerClientLinkRequest) (*resources.CustomerClientLink, error) + // Creates or updates a customer client link. Operation statuses are returned. + MutateCustomerClientLink(context.Context, *MutateCustomerClientLinkRequest) (*MutateCustomerClientLinkResponse, error) +} + +func RegisterCustomerClientLinkServiceServer(s *grpc.Server, srv CustomerClientLinkServiceServer) { + s.RegisterService(&_CustomerClientLinkService_serviceDesc, srv) +} + +func _CustomerClientLinkService_GetCustomerClientLink_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(GetCustomerClientLinkRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(CustomerClientLinkServiceServer).GetCustomerClientLink(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.ads.googleads.v1.services.CustomerClientLinkService/GetCustomerClientLink", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(CustomerClientLinkServiceServer).GetCustomerClientLink(ctx, req.(*GetCustomerClientLinkRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _CustomerClientLinkService_MutateCustomerClientLink_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(MutateCustomerClientLinkRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(CustomerClientLinkServiceServer).MutateCustomerClientLink(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.ads.googleads.v1.services.CustomerClientLinkService/MutateCustomerClientLink", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(CustomerClientLinkServiceServer).MutateCustomerClientLink(ctx, req.(*MutateCustomerClientLinkRequest)) + } + return interceptor(ctx, in, info, handler) +} + +var _CustomerClientLinkService_serviceDesc = grpc.ServiceDesc{ + ServiceName: "google.ads.googleads.v1.services.CustomerClientLinkService", + HandlerType: (*CustomerClientLinkServiceServer)(nil), + Methods: []grpc.MethodDesc{ + { + MethodName: "GetCustomerClientLink", + Handler: _CustomerClientLinkService_GetCustomerClientLink_Handler, + }, + { + MethodName: "MutateCustomerClientLink", + Handler: _CustomerClientLinkService_MutateCustomerClientLink_Handler, + }, + }, + Streams: []grpc.StreamDesc{}, + Metadata: "google/ads/googleads/v1/services/customer_client_link_service.proto", +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/services/customer_client_link_service.proto", fileDescriptor_customer_client_link_service_ec1d3dfd8da034fb) +} + +var fileDescriptor_customer_client_link_service_ec1d3dfd8da034fb = []byte{ + // 612 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xa4, 0x94, 0x41, 0x8b, 0xd3, 0x40, + 0x14, 0xc7, 0x4d, 0x56, 0x0a, 0x3b, 0xd5, 0x4b, 0x40, 0xa8, 0x75, 0xe9, 0x96, 0xb8, 0x87, 0xa5, + 0x87, 0x09, 0xad, 0x2c, 0x4a, 0xd6, 0x8a, 0x69, 0xd1, 0xae, 0xe0, 0xba, 0x4b, 0x84, 0x22, 0x5a, + 0x28, 0xb3, 0xcd, 0x6c, 0x08, 0x4d, 0x32, 0x71, 0x66, 0x52, 0x0f, 0xeb, 0x5e, 0x04, 0x0f, 0x9e, + 0xfd, 0x00, 0x82, 0x47, 0x3f, 0x8a, 0xe0, 0x49, 0x3f, 0x82, 0x78, 0xf0, 0x53, 0x48, 0x32, 0x33, + 0xe9, 0xea, 0x36, 0xad, 0xec, 0xde, 0x5e, 0x26, 0xef, 0xfd, 0xde, 0xfb, 0xbf, 0x37, 0x6f, 0x40, + 0xdf, 0x27, 0xc4, 0x0f, 0xb1, 0x85, 0x3c, 0x66, 0x09, 0x33, 0xb3, 0x66, 0x6d, 0x8b, 0x61, 0x3a, + 0x0b, 0x26, 0x98, 0x59, 0x93, 0x94, 0x71, 0x12, 0x61, 0x3a, 0x9e, 0x84, 0x01, 0x8e, 0xf9, 0x38, + 0x0c, 0xe2, 0xe9, 0x58, 0xfe, 0x85, 0x09, 0x25, 0x9c, 0x18, 0x4d, 0x11, 0x09, 0x91, 0xc7, 0x60, + 0x01, 0x81, 0xb3, 0x36, 0x54, 0x90, 0xfa, 0xfd, 0xb2, 0x34, 0x14, 0x33, 0x92, 0xd2, 0xb2, 0x3c, + 0x82, 0x5f, 0xdf, 0x50, 0xd1, 0x49, 0x60, 0xa1, 0x38, 0x26, 0x1c, 0xf1, 0x80, 0xc4, 0x4c, 0xfe, + 0x95, 0xd9, 0xad, 0xfc, 0xeb, 0x28, 0x3d, 0xb6, 0x8e, 0x03, 0x1c, 0x7a, 0xe3, 0x08, 0x31, 0x15, + 0xdf, 0xf8, 0xd7, 0xe3, 0x0d, 0x45, 0x49, 0x82, 0xa9, 0x24, 0x98, 0x7d, 0xb0, 0x31, 0xc0, 0xbc, + 0x2f, 0x0b, 0xe8, 0xe7, 0xf9, 0x9f, 0x06, 0xf1, 0xd4, 0xc5, 0xaf, 0x53, 0xcc, 0xb8, 0x71, 0x1b, + 0x5c, 0x57, 0x75, 0x8e, 0x63, 0x14, 0xe1, 0x9a, 0xd6, 0xd4, 0xb6, 0xd7, 0xdd, 0x6b, 0xea, 0xf0, + 0x19, 0x8a, 0xb0, 0xf9, 0x49, 0x03, 0x9b, 0xfb, 0x29, 0x47, 0x1c, 0x97, 0x83, 0x36, 0x41, 0xb5, + 0x90, 0x19, 0x78, 0x12, 0x03, 0xd4, 0xd1, 0x13, 0xcf, 0x78, 0x05, 0xd6, 0x49, 0x82, 0x69, 0xae, + 0xaf, 0xa6, 0x37, 0xb5, 0xed, 0x6a, 0xa7, 0x0b, 0x57, 0x75, 0x17, 0x9e, 0x4f, 0x78, 0xa0, 0x20, + 0xee, 0x9c, 0x67, 0x7e, 0xd0, 0xc1, 0xad, 0x25, 0xae, 0xc6, 0x2e, 0xa8, 0xa6, 0x89, 0x87, 0x38, + 0xce, 0x7b, 0x57, 0xbb, 0x9a, 0xa7, 0xaf, 0xab, 0xf4, 0xaa, 0x79, 0xf0, 0x71, 0xd6, 0xde, 0x7d, + 0xc4, 0xa6, 0x2e, 0x10, 0xee, 0x99, 0x6d, 0x1c, 0x80, 0xca, 0x84, 0x62, 0xc4, 0x45, 0x73, 0xaa, + 0x9d, 0x9d, 0xd2, 0xb2, 0x8b, 0x91, 0x2f, 0xa8, 0x7b, 0xef, 0x8a, 0x2b, 0x31, 0x19, 0x50, 0xe0, + 0x65, 0x1f, 0x2e, 0x0e, 0x14, 0x98, 0x5e, 0xf5, 0x4c, 0x6f, 0xcd, 0xb7, 0xa0, 0x59, 0x3e, 0x2c, + 0x96, 0x90, 0x98, 0x61, 0xe3, 0x05, 0xa8, 0x50, 0xcc, 0xd2, 0x90, 0x4b, 0x49, 0x0f, 0x57, 0x4f, + 0x62, 0x09, 0x33, 0x0d, 0xb9, 0x2b, 0x79, 0xe6, 0x23, 0xd0, 0x58, 0xee, 0xf9, 0x5f, 0x57, 0xae, + 0xf3, 0x63, 0x0d, 0xdc, 0x3c, 0x4f, 0x78, 0x2e, 0x8a, 0x31, 0xbe, 0x69, 0xe0, 0xc6, 0xc2, 0x6b, + 0x6d, 0x3c, 0x58, 0x2d, 0x64, 0xd9, 0x3e, 0xd4, 0x2f, 0x36, 0x0a, 0xb3, 0xfb, 0xee, 0xfb, 0xcf, + 0x8f, 0xfa, 0x5d, 0x63, 0x27, 0x5b, 0xfc, 0x93, 0xbf, 0xe4, 0x75, 0xd5, 0x0e, 0x30, 0xab, 0x55, + 0xbc, 0x04, 0xf3, 0x50, 0x66, 0xb5, 0x4e, 0x8d, 0x5f, 0x1a, 0xa8, 0x95, 0x75, 0xcd, 0x70, 0x2e, + 0x33, 0x1b, 0xa1, 0xaa, 0x77, 0xa9, 0xf1, 0xe6, 0x57, 0xc6, 0xec, 0xe7, 0x12, 0xbb, 0xe6, 0xbd, + 0x4c, 0xe2, 0x5c, 0xd3, 0xc9, 0x99, 0xad, 0xef, 0xb6, 0x4e, 0x17, 0x29, 0xb4, 0xa3, 0x9c, 0x6d, + 0x6b, 0xad, 0xde, 0x7b, 0x1d, 0x6c, 0x4d, 0x48, 0xb4, 0xb2, 0x9c, 0x5e, 0xa3, 0x74, 0xf8, 0x87, + 0xd9, 0xb2, 0x1e, 0x6a, 0x2f, 0xf7, 0x24, 0xc3, 0x27, 0x21, 0x8a, 0x7d, 0x48, 0xa8, 0x6f, 0xf9, + 0x38, 0xce, 0x57, 0x59, 0xbd, 0xc3, 0x49, 0xc0, 0xca, 0x5f, 0xff, 0x5d, 0x65, 0x7c, 0xd6, 0xd7, + 0x06, 0x8e, 0xf3, 0x45, 0x6f, 0x0e, 0x04, 0xd0, 0xf1, 0x18, 0x14, 0x66, 0x66, 0x0d, 0xdb, 0x50, + 0x26, 0x66, 0x5f, 0x95, 0xcb, 0xc8, 0xf1, 0xd8, 0xa8, 0x70, 0x19, 0x0d, 0xdb, 0x23, 0xe5, 0xf2, + 0x5b, 0xdf, 0x12, 0xe7, 0xb6, 0xed, 0x78, 0xcc, 0xb6, 0x0b, 0x27, 0xdb, 0x1e, 0xb6, 0x6d, 0x5b, + 0xb9, 0x1d, 0x55, 0xf2, 0x3a, 0xef, 0xfc, 0x09, 0x00, 0x00, 0xff, 0xff, 0xea, 0x97, 0x0d, 0xdb, + 0xa4, 0x06, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/services/customer_client_service.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/services/customer_client_service.pb.go new file mode 100644 index 0000000..4f17ded --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/services/customer_client_service.pb.go @@ -0,0 +1,175 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/services/customer_client_service.proto + +package services // import "google.golang.org/genproto/googleapis/ads/googleads/v1/services" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import resources "google.golang.org/genproto/googleapis/ads/googleads/v1/resources" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +import ( + context "golang.org/x/net/context" + grpc "google.golang.org/grpc" +) + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Request message for [CustomerClientService.GetCustomerClient][google.ads.googleads.v1.services.CustomerClientService.GetCustomerClient]. +type GetCustomerClientRequest struct { + // The resource name of the client to fetch. + ResourceName string `protobuf:"bytes,1,opt,name=resource_name,json=resourceName,proto3" json:"resource_name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GetCustomerClientRequest) Reset() { *m = GetCustomerClientRequest{} } +func (m *GetCustomerClientRequest) String() string { return proto.CompactTextString(m) } +func (*GetCustomerClientRequest) ProtoMessage() {} +func (*GetCustomerClientRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_customer_client_service_bf7d81d922f0354a, []int{0} +} +func (m *GetCustomerClientRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GetCustomerClientRequest.Unmarshal(m, b) +} +func (m *GetCustomerClientRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GetCustomerClientRequest.Marshal(b, m, deterministic) +} +func (dst *GetCustomerClientRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_GetCustomerClientRequest.Merge(dst, src) +} +func (m *GetCustomerClientRequest) XXX_Size() int { + return xxx_messageInfo_GetCustomerClientRequest.Size(m) +} +func (m *GetCustomerClientRequest) XXX_DiscardUnknown() { + xxx_messageInfo_GetCustomerClientRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_GetCustomerClientRequest proto.InternalMessageInfo + +func (m *GetCustomerClientRequest) GetResourceName() string { + if m != nil { + return m.ResourceName + } + return "" +} + +func init() { + proto.RegisterType((*GetCustomerClientRequest)(nil), "google.ads.googleads.v1.services.GetCustomerClientRequest") +} + +// Reference imports to suppress errors if they are not otherwise used. +var _ context.Context +var _ grpc.ClientConn + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the grpc package it is being compiled against. +const _ = grpc.SupportPackageIsVersion4 + +// CustomerClientServiceClient is the client API for CustomerClientService service. +// +// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://godoc.org/google.golang.org/grpc#ClientConn.NewStream. +type CustomerClientServiceClient interface { + // Returns the requested client in full detail. + GetCustomerClient(ctx context.Context, in *GetCustomerClientRequest, opts ...grpc.CallOption) (*resources.CustomerClient, error) +} + +type customerClientServiceClient struct { + cc *grpc.ClientConn +} + +func NewCustomerClientServiceClient(cc *grpc.ClientConn) CustomerClientServiceClient { + return &customerClientServiceClient{cc} +} + +func (c *customerClientServiceClient) GetCustomerClient(ctx context.Context, in *GetCustomerClientRequest, opts ...grpc.CallOption) (*resources.CustomerClient, error) { + out := new(resources.CustomerClient) + err := c.cc.Invoke(ctx, "/google.ads.googleads.v1.services.CustomerClientService/GetCustomerClient", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +// CustomerClientServiceServer is the server API for CustomerClientService service. +type CustomerClientServiceServer interface { + // Returns the requested client in full detail. + GetCustomerClient(context.Context, *GetCustomerClientRequest) (*resources.CustomerClient, error) +} + +func RegisterCustomerClientServiceServer(s *grpc.Server, srv CustomerClientServiceServer) { + s.RegisterService(&_CustomerClientService_serviceDesc, srv) +} + +func _CustomerClientService_GetCustomerClient_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(GetCustomerClientRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(CustomerClientServiceServer).GetCustomerClient(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.ads.googleads.v1.services.CustomerClientService/GetCustomerClient", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(CustomerClientServiceServer).GetCustomerClient(ctx, req.(*GetCustomerClientRequest)) + } + return interceptor(ctx, in, info, handler) +} + +var _CustomerClientService_serviceDesc = grpc.ServiceDesc{ + ServiceName: "google.ads.googleads.v1.services.CustomerClientService", + HandlerType: (*CustomerClientServiceServer)(nil), + Methods: []grpc.MethodDesc{ + { + MethodName: "GetCustomerClient", + Handler: _CustomerClientService_GetCustomerClient_Handler, + }, + }, + Streams: []grpc.StreamDesc{}, + Metadata: "google/ads/googleads/v1/services/customer_client_service.proto", +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/services/customer_client_service.proto", fileDescriptor_customer_client_service_bf7d81d922f0354a) +} + +var fileDescriptor_customer_client_service_bf7d81d922f0354a = []byte{ + // 360 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x84, 0x92, 0xbf, 0x4a, 0xc3, 0x40, + 0x1c, 0xc7, 0x49, 0x04, 0xc1, 0xa0, 0x83, 0x01, 0xa1, 0x04, 0x87, 0x52, 0x3b, 0x48, 0x87, 0x3b, + 0xce, 0x0e, 0xe2, 0x89, 0x4a, 0xda, 0xa1, 0x4e, 0x52, 0x2a, 0x74, 0x90, 0x40, 0x39, 0x93, 0x23, + 0x04, 0x9a, 0xbb, 0x7a, 0xbf, 0x6b, 0x17, 0x71, 0xd0, 0x57, 0xf0, 0x0d, 0x1c, 0x7d, 0x07, 0x5f, + 0xc0, 0xd5, 0xc1, 0x17, 0x70, 0xf2, 0x29, 0x24, 0xbd, 0x5c, 0xa0, 0xd8, 0xd0, 0xed, 0xcb, 0xfd, + 0xbe, 0x9f, 0xdf, 0x9f, 0x6f, 0xe2, 0x5d, 0xa6, 0x52, 0xa6, 0x53, 0x8e, 0x59, 0x02, 0xd8, 0xc8, + 0x42, 0x2d, 0x08, 0x06, 0xae, 0x16, 0x59, 0xcc, 0x01, 0xc7, 0x73, 0xd0, 0x32, 0xe7, 0x6a, 0x12, + 0x4f, 0x33, 0x2e, 0xf4, 0xa4, 0x2c, 0xa0, 0x99, 0x92, 0x5a, 0xfa, 0x4d, 0x03, 0x21, 0x96, 0x00, + 0xaa, 0x78, 0xb4, 0x20, 0xc8, 0xf2, 0xc1, 0x69, 0xdd, 0x04, 0xc5, 0x41, 0xce, 0xd5, 0x9a, 0x11, + 0xa6, 0x75, 0x70, 0x68, 0xc1, 0x59, 0x86, 0x99, 0x10, 0x52, 0x33, 0x9d, 0x49, 0x01, 0xa6, 0xda, + 0xba, 0xf2, 0x1a, 0x03, 0xae, 0xfb, 0x25, 0xd9, 0x5f, 0x82, 0x23, 0xfe, 0x30, 0xe7, 0xa0, 0xfd, + 0x23, 0x6f, 0xcf, 0x36, 0x9f, 0x08, 0x96, 0xf3, 0x86, 0xd3, 0x74, 0x8e, 0x77, 0x46, 0xbb, 0xf6, + 0xf1, 0x86, 0xe5, 0xfc, 0xe4, 0xdb, 0xf1, 0x0e, 0x56, 0xf1, 0x5b, 0xb3, 0xb2, 0xff, 0xe1, 0x78, + 0xfb, 0xff, 0x7a, 0xfb, 0x14, 0x6d, 0x3a, 0x15, 0xd5, 0x2d, 0x14, 0x90, 0x5a, 0xb6, 0x0a, 0x01, + 0xad, 0x92, 0xad, 0xb3, 0x97, 0xaf, 0x9f, 0x57, 0xb7, 0xeb, 0x93, 0x22, 0xaa, 0xc7, 0x95, 0x73, + 0x2e, 0x6c, 0x5e, 0x80, 0x3b, 0x55, 0x76, 0x06, 0x03, 0xdc, 0x79, 0xea, 0x3d, 0xbb, 0x5e, 0x3b, + 0x96, 0xf9, 0xc6, 0x7d, 0x7b, 0xc1, 0xda, 0xfb, 0x87, 0x45, 0xbe, 0x43, 0xe7, 0xee, 0xba, 0xe4, + 0x53, 0x39, 0x65, 0x22, 0x45, 0x52, 0xa5, 0x38, 0xe5, 0x62, 0x99, 0xbe, 0xfd, 0x90, 0xb3, 0x0c, + 0xea, 0xff, 0x9c, 0x73, 0x2b, 0xde, 0xdc, 0xad, 0x41, 0x18, 0xbe, 0xbb, 0xcd, 0x81, 0x69, 0x18, + 0x26, 0x80, 0x8c, 0x2c, 0xd4, 0x98, 0xa0, 0x72, 0x30, 0x7c, 0x5a, 0x4b, 0x14, 0x26, 0x10, 0x55, + 0x96, 0x68, 0x4c, 0x22, 0x6b, 0xf9, 0x75, 0xdb, 0xe6, 0x9d, 0xd2, 0x30, 0x01, 0x4a, 0x2b, 0x13, + 0xa5, 0x63, 0x42, 0xa9, 0xb5, 0xdd, 0x6f, 0x2f, 0xf7, 0xec, 0xfe, 0x05, 0x00, 0x00, 0xff, 0xff, + 0x14, 0x0f, 0x26, 0x19, 0xe0, 0x02, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/services/customer_extension_setting_service.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/services/customer_extension_setting_service.pb.go new file mode 100644 index 0000000..9d0df43 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/services/customer_extension_setting_service.pb.go @@ -0,0 +1,599 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/services/customer_extension_setting_service.proto + +package services // import "google.golang.org/genproto/googleapis/ads/googleads/v1/services" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "github.com/golang/protobuf/ptypes/wrappers" +import resources "google.golang.org/genproto/googleapis/ads/googleads/v1/resources" +import _ "google.golang.org/genproto/googleapis/api/annotations" +import status "google.golang.org/genproto/googleapis/rpc/status" +import field_mask "google.golang.org/genproto/protobuf/field_mask" + +import ( + context "golang.org/x/net/context" + grpc "google.golang.org/grpc" +) + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Request message for +// [CustomerExtensionSettingService.GetCustomerExtensionSetting][google.ads.googleads.v1.services.CustomerExtensionSettingService.GetCustomerExtensionSetting]. +type GetCustomerExtensionSettingRequest struct { + // The resource name of the customer extension setting to fetch. + ResourceName string `protobuf:"bytes,1,opt,name=resource_name,json=resourceName,proto3" json:"resource_name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GetCustomerExtensionSettingRequest) Reset() { *m = GetCustomerExtensionSettingRequest{} } +func (m *GetCustomerExtensionSettingRequest) String() string { return proto.CompactTextString(m) } +func (*GetCustomerExtensionSettingRequest) ProtoMessage() {} +func (*GetCustomerExtensionSettingRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_customer_extension_setting_service_7aa4f56bacfec343, []int{0} +} +func (m *GetCustomerExtensionSettingRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GetCustomerExtensionSettingRequest.Unmarshal(m, b) +} +func (m *GetCustomerExtensionSettingRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GetCustomerExtensionSettingRequest.Marshal(b, m, deterministic) +} +func (dst *GetCustomerExtensionSettingRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_GetCustomerExtensionSettingRequest.Merge(dst, src) +} +func (m *GetCustomerExtensionSettingRequest) XXX_Size() int { + return xxx_messageInfo_GetCustomerExtensionSettingRequest.Size(m) +} +func (m *GetCustomerExtensionSettingRequest) XXX_DiscardUnknown() { + xxx_messageInfo_GetCustomerExtensionSettingRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_GetCustomerExtensionSettingRequest proto.InternalMessageInfo + +func (m *GetCustomerExtensionSettingRequest) GetResourceName() string { + if m != nil { + return m.ResourceName + } + return "" +} + +// Request message for +// [CustomerExtensionSettingService.MutateCustomerExtensionSettings][google.ads.googleads.v1.services.CustomerExtensionSettingService.MutateCustomerExtensionSettings]. +type MutateCustomerExtensionSettingsRequest struct { + // The ID of the customer whose customer extension settings are being + // modified. + CustomerId string `protobuf:"bytes,1,opt,name=customer_id,json=customerId,proto3" json:"customer_id,omitempty"` + // The list of operations to perform on individual customer extension + // settings. + Operations []*CustomerExtensionSettingOperation `protobuf:"bytes,2,rep,name=operations,proto3" json:"operations,omitempty"` + // If true, successful operations will be carried out and invalid + // operations will return errors. If false, all operations will be carried + // out in one transaction if and only if they are all valid. + // Default is false. + PartialFailure bool `protobuf:"varint,3,opt,name=partial_failure,json=partialFailure,proto3" json:"partial_failure,omitempty"` + // If true, the request is validated but not executed. Only errors are + // returned, not results. + ValidateOnly bool `protobuf:"varint,4,opt,name=validate_only,json=validateOnly,proto3" json:"validate_only,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *MutateCustomerExtensionSettingsRequest) Reset() { + *m = MutateCustomerExtensionSettingsRequest{} +} +func (m *MutateCustomerExtensionSettingsRequest) String() string { return proto.CompactTextString(m) } +func (*MutateCustomerExtensionSettingsRequest) ProtoMessage() {} +func (*MutateCustomerExtensionSettingsRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_customer_extension_setting_service_7aa4f56bacfec343, []int{1} +} +func (m *MutateCustomerExtensionSettingsRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_MutateCustomerExtensionSettingsRequest.Unmarshal(m, b) +} +func (m *MutateCustomerExtensionSettingsRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_MutateCustomerExtensionSettingsRequest.Marshal(b, m, deterministic) +} +func (dst *MutateCustomerExtensionSettingsRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_MutateCustomerExtensionSettingsRequest.Merge(dst, src) +} +func (m *MutateCustomerExtensionSettingsRequest) XXX_Size() int { + return xxx_messageInfo_MutateCustomerExtensionSettingsRequest.Size(m) +} +func (m *MutateCustomerExtensionSettingsRequest) XXX_DiscardUnknown() { + xxx_messageInfo_MutateCustomerExtensionSettingsRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_MutateCustomerExtensionSettingsRequest proto.InternalMessageInfo + +func (m *MutateCustomerExtensionSettingsRequest) GetCustomerId() string { + if m != nil { + return m.CustomerId + } + return "" +} + +func (m *MutateCustomerExtensionSettingsRequest) GetOperations() []*CustomerExtensionSettingOperation { + if m != nil { + return m.Operations + } + return nil +} + +func (m *MutateCustomerExtensionSettingsRequest) GetPartialFailure() bool { + if m != nil { + return m.PartialFailure + } + return false +} + +func (m *MutateCustomerExtensionSettingsRequest) GetValidateOnly() bool { + if m != nil { + return m.ValidateOnly + } + return false +} + +// A single operation (create, update, remove) on a customer extension setting. +type CustomerExtensionSettingOperation struct { + // FieldMask that determines which resource fields are modified in an update. + UpdateMask *field_mask.FieldMask `protobuf:"bytes,4,opt,name=update_mask,json=updateMask,proto3" json:"update_mask,omitempty"` + // The mutate operation. + // + // Types that are valid to be assigned to Operation: + // *CustomerExtensionSettingOperation_Create + // *CustomerExtensionSettingOperation_Update + // *CustomerExtensionSettingOperation_Remove + Operation isCustomerExtensionSettingOperation_Operation `protobuf_oneof:"operation"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *CustomerExtensionSettingOperation) Reset() { *m = CustomerExtensionSettingOperation{} } +func (m *CustomerExtensionSettingOperation) String() string { return proto.CompactTextString(m) } +func (*CustomerExtensionSettingOperation) ProtoMessage() {} +func (*CustomerExtensionSettingOperation) Descriptor() ([]byte, []int) { + return fileDescriptor_customer_extension_setting_service_7aa4f56bacfec343, []int{2} +} +func (m *CustomerExtensionSettingOperation) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_CustomerExtensionSettingOperation.Unmarshal(m, b) +} +func (m *CustomerExtensionSettingOperation) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_CustomerExtensionSettingOperation.Marshal(b, m, deterministic) +} +func (dst *CustomerExtensionSettingOperation) XXX_Merge(src proto.Message) { + xxx_messageInfo_CustomerExtensionSettingOperation.Merge(dst, src) +} +func (m *CustomerExtensionSettingOperation) XXX_Size() int { + return xxx_messageInfo_CustomerExtensionSettingOperation.Size(m) +} +func (m *CustomerExtensionSettingOperation) XXX_DiscardUnknown() { + xxx_messageInfo_CustomerExtensionSettingOperation.DiscardUnknown(m) +} + +var xxx_messageInfo_CustomerExtensionSettingOperation proto.InternalMessageInfo + +func (m *CustomerExtensionSettingOperation) GetUpdateMask() *field_mask.FieldMask { + if m != nil { + return m.UpdateMask + } + return nil +} + +type isCustomerExtensionSettingOperation_Operation interface { + isCustomerExtensionSettingOperation_Operation() +} + +type CustomerExtensionSettingOperation_Create struct { + Create *resources.CustomerExtensionSetting `protobuf:"bytes,1,opt,name=create,proto3,oneof"` +} + +type CustomerExtensionSettingOperation_Update struct { + Update *resources.CustomerExtensionSetting `protobuf:"bytes,2,opt,name=update,proto3,oneof"` +} + +type CustomerExtensionSettingOperation_Remove struct { + Remove string `protobuf:"bytes,3,opt,name=remove,proto3,oneof"` +} + +func (*CustomerExtensionSettingOperation_Create) isCustomerExtensionSettingOperation_Operation() {} + +func (*CustomerExtensionSettingOperation_Update) isCustomerExtensionSettingOperation_Operation() {} + +func (*CustomerExtensionSettingOperation_Remove) isCustomerExtensionSettingOperation_Operation() {} + +func (m *CustomerExtensionSettingOperation) GetOperation() isCustomerExtensionSettingOperation_Operation { + if m != nil { + return m.Operation + } + return nil +} + +func (m *CustomerExtensionSettingOperation) GetCreate() *resources.CustomerExtensionSetting { + if x, ok := m.GetOperation().(*CustomerExtensionSettingOperation_Create); ok { + return x.Create + } + return nil +} + +func (m *CustomerExtensionSettingOperation) GetUpdate() *resources.CustomerExtensionSetting { + if x, ok := m.GetOperation().(*CustomerExtensionSettingOperation_Update); ok { + return x.Update + } + return nil +} + +func (m *CustomerExtensionSettingOperation) GetRemove() string { + if x, ok := m.GetOperation().(*CustomerExtensionSettingOperation_Remove); ok { + return x.Remove + } + return "" +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*CustomerExtensionSettingOperation) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _CustomerExtensionSettingOperation_OneofMarshaler, _CustomerExtensionSettingOperation_OneofUnmarshaler, _CustomerExtensionSettingOperation_OneofSizer, []interface{}{ + (*CustomerExtensionSettingOperation_Create)(nil), + (*CustomerExtensionSettingOperation_Update)(nil), + (*CustomerExtensionSettingOperation_Remove)(nil), + } +} + +func _CustomerExtensionSettingOperation_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*CustomerExtensionSettingOperation) + // operation + switch x := m.Operation.(type) { + case *CustomerExtensionSettingOperation_Create: + b.EncodeVarint(1<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.Create); err != nil { + return err + } + case *CustomerExtensionSettingOperation_Update: + b.EncodeVarint(2<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.Update); err != nil { + return err + } + case *CustomerExtensionSettingOperation_Remove: + b.EncodeVarint(3<<3 | proto.WireBytes) + b.EncodeStringBytes(x.Remove) + case nil: + default: + return fmt.Errorf("CustomerExtensionSettingOperation.Operation has unexpected type %T", x) + } + return nil +} + +func _CustomerExtensionSettingOperation_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*CustomerExtensionSettingOperation) + switch tag { + case 1: // operation.create + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(resources.CustomerExtensionSetting) + err := b.DecodeMessage(msg) + m.Operation = &CustomerExtensionSettingOperation_Create{msg} + return true, err + case 2: // operation.update + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(resources.CustomerExtensionSetting) + err := b.DecodeMessage(msg) + m.Operation = &CustomerExtensionSettingOperation_Update{msg} + return true, err + case 3: // operation.remove + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeStringBytes() + m.Operation = &CustomerExtensionSettingOperation_Remove{x} + return true, err + default: + return false, nil + } +} + +func _CustomerExtensionSettingOperation_OneofSizer(msg proto.Message) (n int) { + m := msg.(*CustomerExtensionSettingOperation) + // operation + switch x := m.Operation.(type) { + case *CustomerExtensionSettingOperation_Create: + s := proto.Size(x.Create) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *CustomerExtensionSettingOperation_Update: + s := proto.Size(x.Update) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *CustomerExtensionSettingOperation_Remove: + n += 1 // tag and wire + n += proto.SizeVarint(uint64(len(x.Remove))) + n += len(x.Remove) + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +// Response message for a customer extension setting mutate. +type MutateCustomerExtensionSettingsResponse struct { + // Errors that pertain to operation failures in the partial failure mode. + // Returned only when partial_failure = true and all errors occur inside the + // operations. If any errors occur outside the operations (e.g. auth errors), + // we return an RPC level error. + PartialFailureError *status.Status `protobuf:"bytes,3,opt,name=partial_failure_error,json=partialFailureError,proto3" json:"partial_failure_error,omitempty"` + // All results for the mutate. + Results []*MutateCustomerExtensionSettingResult `protobuf:"bytes,2,rep,name=results,proto3" json:"results,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *MutateCustomerExtensionSettingsResponse) Reset() { + *m = MutateCustomerExtensionSettingsResponse{} +} +func (m *MutateCustomerExtensionSettingsResponse) String() string { return proto.CompactTextString(m) } +func (*MutateCustomerExtensionSettingsResponse) ProtoMessage() {} +func (*MutateCustomerExtensionSettingsResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_customer_extension_setting_service_7aa4f56bacfec343, []int{3} +} +func (m *MutateCustomerExtensionSettingsResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_MutateCustomerExtensionSettingsResponse.Unmarshal(m, b) +} +func (m *MutateCustomerExtensionSettingsResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_MutateCustomerExtensionSettingsResponse.Marshal(b, m, deterministic) +} +func (dst *MutateCustomerExtensionSettingsResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_MutateCustomerExtensionSettingsResponse.Merge(dst, src) +} +func (m *MutateCustomerExtensionSettingsResponse) XXX_Size() int { + return xxx_messageInfo_MutateCustomerExtensionSettingsResponse.Size(m) +} +func (m *MutateCustomerExtensionSettingsResponse) XXX_DiscardUnknown() { + xxx_messageInfo_MutateCustomerExtensionSettingsResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_MutateCustomerExtensionSettingsResponse proto.InternalMessageInfo + +func (m *MutateCustomerExtensionSettingsResponse) GetPartialFailureError() *status.Status { + if m != nil { + return m.PartialFailureError + } + return nil +} + +func (m *MutateCustomerExtensionSettingsResponse) GetResults() []*MutateCustomerExtensionSettingResult { + if m != nil { + return m.Results + } + return nil +} + +// The result for the customer extension setting mutate. +type MutateCustomerExtensionSettingResult struct { + // Returned for successful operations. + ResourceName string `protobuf:"bytes,1,opt,name=resource_name,json=resourceName,proto3" json:"resource_name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *MutateCustomerExtensionSettingResult) Reset() { *m = MutateCustomerExtensionSettingResult{} } +func (m *MutateCustomerExtensionSettingResult) String() string { return proto.CompactTextString(m) } +func (*MutateCustomerExtensionSettingResult) ProtoMessage() {} +func (*MutateCustomerExtensionSettingResult) Descriptor() ([]byte, []int) { + return fileDescriptor_customer_extension_setting_service_7aa4f56bacfec343, []int{4} +} +func (m *MutateCustomerExtensionSettingResult) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_MutateCustomerExtensionSettingResult.Unmarshal(m, b) +} +func (m *MutateCustomerExtensionSettingResult) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_MutateCustomerExtensionSettingResult.Marshal(b, m, deterministic) +} +func (dst *MutateCustomerExtensionSettingResult) XXX_Merge(src proto.Message) { + xxx_messageInfo_MutateCustomerExtensionSettingResult.Merge(dst, src) +} +func (m *MutateCustomerExtensionSettingResult) XXX_Size() int { + return xxx_messageInfo_MutateCustomerExtensionSettingResult.Size(m) +} +func (m *MutateCustomerExtensionSettingResult) XXX_DiscardUnknown() { + xxx_messageInfo_MutateCustomerExtensionSettingResult.DiscardUnknown(m) +} + +var xxx_messageInfo_MutateCustomerExtensionSettingResult proto.InternalMessageInfo + +func (m *MutateCustomerExtensionSettingResult) GetResourceName() string { + if m != nil { + return m.ResourceName + } + return "" +} + +func init() { + proto.RegisterType((*GetCustomerExtensionSettingRequest)(nil), "google.ads.googleads.v1.services.GetCustomerExtensionSettingRequest") + proto.RegisterType((*MutateCustomerExtensionSettingsRequest)(nil), "google.ads.googleads.v1.services.MutateCustomerExtensionSettingsRequest") + proto.RegisterType((*CustomerExtensionSettingOperation)(nil), "google.ads.googleads.v1.services.CustomerExtensionSettingOperation") + proto.RegisterType((*MutateCustomerExtensionSettingsResponse)(nil), "google.ads.googleads.v1.services.MutateCustomerExtensionSettingsResponse") + proto.RegisterType((*MutateCustomerExtensionSettingResult)(nil), "google.ads.googleads.v1.services.MutateCustomerExtensionSettingResult") +} + +// Reference imports to suppress errors if they are not otherwise used. +var _ context.Context +var _ grpc.ClientConn + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the grpc package it is being compiled against. +const _ = grpc.SupportPackageIsVersion4 + +// CustomerExtensionSettingServiceClient is the client API for CustomerExtensionSettingService service. +// +// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://godoc.org/google.golang.org/grpc#ClientConn.NewStream. +type CustomerExtensionSettingServiceClient interface { + // Returns the requested customer extension setting in full detail. + GetCustomerExtensionSetting(ctx context.Context, in *GetCustomerExtensionSettingRequest, opts ...grpc.CallOption) (*resources.CustomerExtensionSetting, error) + // Creates, updates, or removes customer extension settings. Operation + // statuses are returned. + MutateCustomerExtensionSettings(ctx context.Context, in *MutateCustomerExtensionSettingsRequest, opts ...grpc.CallOption) (*MutateCustomerExtensionSettingsResponse, error) +} + +type customerExtensionSettingServiceClient struct { + cc *grpc.ClientConn +} + +func NewCustomerExtensionSettingServiceClient(cc *grpc.ClientConn) CustomerExtensionSettingServiceClient { + return &customerExtensionSettingServiceClient{cc} +} + +func (c *customerExtensionSettingServiceClient) GetCustomerExtensionSetting(ctx context.Context, in *GetCustomerExtensionSettingRequest, opts ...grpc.CallOption) (*resources.CustomerExtensionSetting, error) { + out := new(resources.CustomerExtensionSetting) + err := c.cc.Invoke(ctx, "/google.ads.googleads.v1.services.CustomerExtensionSettingService/GetCustomerExtensionSetting", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *customerExtensionSettingServiceClient) MutateCustomerExtensionSettings(ctx context.Context, in *MutateCustomerExtensionSettingsRequest, opts ...grpc.CallOption) (*MutateCustomerExtensionSettingsResponse, error) { + out := new(MutateCustomerExtensionSettingsResponse) + err := c.cc.Invoke(ctx, "/google.ads.googleads.v1.services.CustomerExtensionSettingService/MutateCustomerExtensionSettings", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +// CustomerExtensionSettingServiceServer is the server API for CustomerExtensionSettingService service. +type CustomerExtensionSettingServiceServer interface { + // Returns the requested customer extension setting in full detail. + GetCustomerExtensionSetting(context.Context, *GetCustomerExtensionSettingRequest) (*resources.CustomerExtensionSetting, error) + // Creates, updates, or removes customer extension settings. Operation + // statuses are returned. + MutateCustomerExtensionSettings(context.Context, *MutateCustomerExtensionSettingsRequest) (*MutateCustomerExtensionSettingsResponse, error) +} + +func RegisterCustomerExtensionSettingServiceServer(s *grpc.Server, srv CustomerExtensionSettingServiceServer) { + s.RegisterService(&_CustomerExtensionSettingService_serviceDesc, srv) +} + +func _CustomerExtensionSettingService_GetCustomerExtensionSetting_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(GetCustomerExtensionSettingRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(CustomerExtensionSettingServiceServer).GetCustomerExtensionSetting(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.ads.googleads.v1.services.CustomerExtensionSettingService/GetCustomerExtensionSetting", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(CustomerExtensionSettingServiceServer).GetCustomerExtensionSetting(ctx, req.(*GetCustomerExtensionSettingRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _CustomerExtensionSettingService_MutateCustomerExtensionSettings_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(MutateCustomerExtensionSettingsRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(CustomerExtensionSettingServiceServer).MutateCustomerExtensionSettings(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.ads.googleads.v1.services.CustomerExtensionSettingService/MutateCustomerExtensionSettings", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(CustomerExtensionSettingServiceServer).MutateCustomerExtensionSettings(ctx, req.(*MutateCustomerExtensionSettingsRequest)) + } + return interceptor(ctx, in, info, handler) +} + +var _CustomerExtensionSettingService_serviceDesc = grpc.ServiceDesc{ + ServiceName: "google.ads.googleads.v1.services.CustomerExtensionSettingService", + HandlerType: (*CustomerExtensionSettingServiceServer)(nil), + Methods: []grpc.MethodDesc{ + { + MethodName: "GetCustomerExtensionSetting", + Handler: _CustomerExtensionSettingService_GetCustomerExtensionSetting_Handler, + }, + { + MethodName: "MutateCustomerExtensionSettings", + Handler: _CustomerExtensionSettingService_MutateCustomerExtensionSettings_Handler, + }, + }, + Streams: []grpc.StreamDesc{}, + Metadata: "google/ads/googleads/v1/services/customer_extension_setting_service.proto", +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/services/customer_extension_setting_service.proto", fileDescriptor_customer_extension_setting_service_7aa4f56bacfec343) +} + +var fileDescriptor_customer_extension_setting_service_7aa4f56bacfec343 = []byte{ + // 722 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xac, 0x95, 0xcf, 0x4e, 0xd4, 0x40, + 0x1c, 0xc7, 0x6d, 0x21, 0x28, 0xb3, 0xa8, 0xc9, 0x18, 0xe3, 0x66, 0x35, 0xb2, 0xd6, 0x8d, 0x90, + 0x3d, 0xb4, 0xd9, 0xf5, 0xd6, 0x0d, 0x26, 0xbb, 0x2b, 0x0b, 0x1b, 0x83, 0x90, 0x12, 0x39, 0x98, + 0x4d, 0xea, 0xd0, 0x0e, 0x4d, 0x43, 0xdb, 0xa9, 0x33, 0xd3, 0x55, 0x42, 0x38, 0xe8, 0xd5, 0x78, + 0xf2, 0x0d, 0x3c, 0x7a, 0xf7, 0x25, 0xbc, 0xf2, 0x0a, 0x7a, 0xf1, 0x19, 0x3c, 0x98, 0x76, 0x3a, + 0x15, 0x30, 0xa5, 0x24, 0x70, 0x9b, 0xfe, 0xe6, 0xdb, 0xcf, 0xef, 0x6f, 0x7f, 0x05, 0x63, 0x8f, + 0x10, 0x2f, 0xc0, 0x06, 0x72, 0x99, 0x21, 0x8e, 0xe9, 0x69, 0xda, 0x31, 0x18, 0xa6, 0x53, 0xdf, + 0xc1, 0xcc, 0x70, 0x12, 0xc6, 0x49, 0x88, 0xa9, 0x8d, 0xdf, 0x73, 0x1c, 0x31, 0x9f, 0x44, 0x36, + 0xc3, 0x9c, 0xfb, 0x91, 0x67, 0xe7, 0x1a, 0x3d, 0xa6, 0x84, 0x13, 0xd8, 0x14, 0xef, 0xeb, 0xc8, + 0x65, 0x7a, 0x81, 0xd2, 0xa7, 0x1d, 0x5d, 0xa2, 0x1a, 0x83, 0x32, 0x67, 0x14, 0x33, 0x92, 0xd0, + 0xf3, 0xbd, 0x09, 0x2f, 0x8d, 0x07, 0x92, 0x11, 0xfb, 0x06, 0x8a, 0x22, 0xc2, 0x11, 0xf7, 0x49, + 0xc4, 0xf2, 0xdb, 0x3c, 0x06, 0x23, 0x7b, 0xda, 0x4d, 0xf6, 0x8c, 0x3d, 0x1f, 0x07, 0xae, 0x1d, + 0x22, 0xb6, 0x9f, 0x2b, 0x1e, 0x9e, 0x55, 0xbc, 0xa3, 0x28, 0x8e, 0x31, 0x95, 0x84, 0x7b, 0xf9, + 0x3d, 0x8d, 0x1d, 0x83, 0x71, 0xc4, 0x93, 0xfc, 0x42, 0x1b, 0x03, 0x6d, 0x0d, 0xf3, 0x61, 0x1e, + 0xdf, 0xaa, 0x0c, 0x6f, 0x5b, 0x44, 0x67, 0xe1, 0xb7, 0x09, 0x66, 0x1c, 0x3e, 0x06, 0x37, 0x65, + 0x32, 0x76, 0x84, 0x42, 0x5c, 0x57, 0x9a, 0xca, 0xf2, 0xbc, 0xb5, 0x20, 0x8d, 0x2f, 0x51, 0x88, + 0xb5, 0x3f, 0x0a, 0x78, 0xb2, 0x91, 0x70, 0xc4, 0x71, 0x19, 0x8e, 0x49, 0xde, 0x22, 0xa8, 0x15, + 0x25, 0xf1, 0xdd, 0x9c, 0x06, 0xa4, 0x69, 0xec, 0x42, 0x07, 0x00, 0x12, 0x63, 0x2a, 0xaa, 0x50, + 0x57, 0x9b, 0x33, 0xcb, 0xb5, 0xee, 0x50, 0xaf, 0x6a, 0x85, 0x5e, 0xe6, 0x78, 0x53, 0xb2, 0xac, + 0x13, 0x58, 0xb8, 0x04, 0x6e, 0xc7, 0x88, 0x72, 0x1f, 0x05, 0xf6, 0x1e, 0xf2, 0x83, 0x84, 0xe2, + 0xfa, 0x4c, 0x53, 0x59, 0xbe, 0x61, 0xdd, 0xca, 0xcd, 0x23, 0x61, 0x4d, 0xd3, 0x9f, 0xa2, 0xc0, + 0x77, 0x11, 0xc7, 0x36, 0x89, 0x82, 0x83, 0xfa, 0x6c, 0x26, 0x5b, 0x90, 0xc6, 0xcd, 0x28, 0x38, + 0xd0, 0xbe, 0xab, 0xe0, 0x51, 0xa5, 0x7f, 0xd8, 0x03, 0xb5, 0x24, 0xce, 0x40, 0x69, 0xf7, 0x32, + 0x50, 0xad, 0xdb, 0x90, 0x99, 0xc9, 0xf6, 0xe9, 0xa3, 0xb4, 0xc1, 0x1b, 0x88, 0xed, 0x5b, 0x40, + 0xc8, 0xd3, 0x33, 0x7c, 0x05, 0xe6, 0x1c, 0x8a, 0x11, 0x17, 0xf5, 0xaf, 0x75, 0x7b, 0xa5, 0x15, + 0x29, 0x46, 0xaf, 0xb4, 0x24, 0xeb, 0xd7, 0xac, 0x1c, 0x96, 0x62, 0x85, 0x93, 0xba, 0x7a, 0x25, + 0x58, 0x01, 0x83, 0x75, 0x30, 0x47, 0x71, 0x48, 0xa6, 0xa2, 0xaa, 0xf3, 0xe9, 0x8d, 0x78, 0x1e, + 0xd4, 0xc0, 0x7c, 0xd1, 0x06, 0xed, 0x58, 0x01, 0x4b, 0x95, 0x63, 0xc3, 0x62, 0x12, 0x31, 0x0c, + 0x47, 0xe0, 0xee, 0x99, 0x8e, 0xd9, 0x98, 0x52, 0x42, 0x33, 0x0f, 0xb5, 0x2e, 0x94, 0x81, 0xd3, + 0xd8, 0xd1, 0xb7, 0xb3, 0x31, 0xb7, 0xee, 0x9c, 0xee, 0xe5, 0x6a, 0x2a, 0x87, 0x6f, 0xc0, 0x75, + 0x8a, 0x59, 0x12, 0x70, 0x39, 0x5b, 0xa3, 0xea, 0xd9, 0x3a, 0x3f, 0x46, 0x2b, 0xc3, 0x59, 0x12, + 0xab, 0xbd, 0x00, 0xad, 0x8b, 0xbc, 0x70, 0xa1, 0x2f, 0xab, 0xfb, 0x79, 0x16, 0x2c, 0x96, 0x71, + 0xb6, 0x45, 0x7c, 0xf0, 0x97, 0x02, 0xee, 0x9f, 0xf3, 0x25, 0xc3, 0xe7, 0xd5, 0x19, 0x56, 0x2f, + 0x82, 0xc6, 0x65, 0x46, 0x43, 0x1b, 0x7e, 0x3c, 0xfe, 0xf9, 0x45, 0x5d, 0x81, 0xbd, 0x74, 0x39, + 0x1e, 0x9e, 0x4a, 0x7b, 0x45, 0x7e, 0xfb, 0xcc, 0x68, 0x17, 0xdb, 0xf2, 0xbf, 0x39, 0x30, 0xda, + 0x47, 0xf0, 0x83, 0x0a, 0x16, 0x2b, 0xc6, 0x05, 0xae, 0x5f, 0xb6, 0x9b, 0x72, 0x51, 0x35, 0xc6, + 0x57, 0x40, 0x12, 0xb3, 0xab, 0x8d, 0xb3, 0xec, 0x87, 0xda, 0xb3, 0x34, 0xfb, 0x7f, 0xe9, 0x1e, + 0x9e, 0x58, 0x84, 0x2b, 0xed, 0xa3, 0xf2, 0xe4, 0xcd, 0x30, 0x73, 0x64, 0x2a, 0xed, 0xc1, 0x27, + 0x15, 0xb4, 0x1c, 0x12, 0x56, 0xc6, 0x36, 0x68, 0x55, 0x4c, 0xcd, 0x56, 0xba, 0x6f, 0xb6, 0x94, + 0xd7, 0xeb, 0x39, 0xc9, 0x23, 0x01, 0x8a, 0x3c, 0x9d, 0x50, 0xcf, 0xf0, 0x70, 0x94, 0x6d, 0x23, + 0xf9, 0x4b, 0x8b, 0x7d, 0x56, 0xfe, 0x3b, 0xed, 0xc9, 0xc3, 0x57, 0x75, 0x66, 0xad, 0xdf, 0xff, + 0xa6, 0x36, 0xd7, 0x04, 0xb0, 0xef, 0x32, 0x5d, 0x1c, 0xd3, 0xd3, 0x4e, 0x47, 0xcf, 0x1d, 0xb3, + 0x1f, 0x52, 0x32, 0xe9, 0xbb, 0x6c, 0x52, 0x48, 0x26, 0x3b, 0x9d, 0x89, 0x94, 0xfc, 0x56, 0x5b, + 0xc2, 0x6e, 0x9a, 0x7d, 0x97, 0x99, 0x66, 0x21, 0x32, 0xcd, 0x9d, 0x8e, 0x69, 0x4a, 0xd9, 0xee, + 0x5c, 0x16, 0xe7, 0xd3, 0xbf, 0x01, 0x00, 0x00, 0xff, 0xff, 0xac, 0xac, 0x54, 0xea, 0xf5, 0x07, + 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/services/customer_feed_service.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/services/customer_feed_service.pb.go new file mode 100644 index 0000000..efa0cd1 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/services/customer_feed_service.pb.go @@ -0,0 +1,590 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/services/customer_feed_service.proto + +package services // import "google.golang.org/genproto/googleapis/ads/googleads/v1/services" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "github.com/golang/protobuf/ptypes/wrappers" +import resources "google.golang.org/genproto/googleapis/ads/googleads/v1/resources" +import _ "google.golang.org/genproto/googleapis/api/annotations" +import status "google.golang.org/genproto/googleapis/rpc/status" +import field_mask "google.golang.org/genproto/protobuf/field_mask" + +import ( + context "golang.org/x/net/context" + grpc "google.golang.org/grpc" +) + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Request message for [CustomerFeedService.GetCustomerFeed][google.ads.googleads.v1.services.CustomerFeedService.GetCustomerFeed]. +type GetCustomerFeedRequest struct { + // The resource name of the customer feed to fetch. + ResourceName string `protobuf:"bytes,1,opt,name=resource_name,json=resourceName,proto3" json:"resource_name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GetCustomerFeedRequest) Reset() { *m = GetCustomerFeedRequest{} } +func (m *GetCustomerFeedRequest) String() string { return proto.CompactTextString(m) } +func (*GetCustomerFeedRequest) ProtoMessage() {} +func (*GetCustomerFeedRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_customer_feed_service_643a1eae045d32fc, []int{0} +} +func (m *GetCustomerFeedRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GetCustomerFeedRequest.Unmarshal(m, b) +} +func (m *GetCustomerFeedRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GetCustomerFeedRequest.Marshal(b, m, deterministic) +} +func (dst *GetCustomerFeedRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_GetCustomerFeedRequest.Merge(dst, src) +} +func (m *GetCustomerFeedRequest) XXX_Size() int { + return xxx_messageInfo_GetCustomerFeedRequest.Size(m) +} +func (m *GetCustomerFeedRequest) XXX_DiscardUnknown() { + xxx_messageInfo_GetCustomerFeedRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_GetCustomerFeedRequest proto.InternalMessageInfo + +func (m *GetCustomerFeedRequest) GetResourceName() string { + if m != nil { + return m.ResourceName + } + return "" +} + +// Request message for [CustomerFeedService.MutateCustomerFeeds][google.ads.googleads.v1.services.CustomerFeedService.MutateCustomerFeeds]. +type MutateCustomerFeedsRequest struct { + // The ID of the customer whose customer feeds are being modified. + CustomerId string `protobuf:"bytes,1,opt,name=customer_id,json=customerId,proto3" json:"customer_id,omitempty"` + // The list of operations to perform on individual customer feeds. + Operations []*CustomerFeedOperation `protobuf:"bytes,2,rep,name=operations,proto3" json:"operations,omitempty"` + // If true, successful operations will be carried out and invalid + // operations will return errors. If false, all operations will be carried + // out in one transaction if and only if they are all valid. + // Default is false. + PartialFailure bool `protobuf:"varint,3,opt,name=partial_failure,json=partialFailure,proto3" json:"partial_failure,omitempty"` + // If true, the request is validated but not executed. Only errors are + // returned, not results. + ValidateOnly bool `protobuf:"varint,4,opt,name=validate_only,json=validateOnly,proto3" json:"validate_only,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *MutateCustomerFeedsRequest) Reset() { *m = MutateCustomerFeedsRequest{} } +func (m *MutateCustomerFeedsRequest) String() string { return proto.CompactTextString(m) } +func (*MutateCustomerFeedsRequest) ProtoMessage() {} +func (*MutateCustomerFeedsRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_customer_feed_service_643a1eae045d32fc, []int{1} +} +func (m *MutateCustomerFeedsRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_MutateCustomerFeedsRequest.Unmarshal(m, b) +} +func (m *MutateCustomerFeedsRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_MutateCustomerFeedsRequest.Marshal(b, m, deterministic) +} +func (dst *MutateCustomerFeedsRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_MutateCustomerFeedsRequest.Merge(dst, src) +} +func (m *MutateCustomerFeedsRequest) XXX_Size() int { + return xxx_messageInfo_MutateCustomerFeedsRequest.Size(m) +} +func (m *MutateCustomerFeedsRequest) XXX_DiscardUnknown() { + xxx_messageInfo_MutateCustomerFeedsRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_MutateCustomerFeedsRequest proto.InternalMessageInfo + +func (m *MutateCustomerFeedsRequest) GetCustomerId() string { + if m != nil { + return m.CustomerId + } + return "" +} + +func (m *MutateCustomerFeedsRequest) GetOperations() []*CustomerFeedOperation { + if m != nil { + return m.Operations + } + return nil +} + +func (m *MutateCustomerFeedsRequest) GetPartialFailure() bool { + if m != nil { + return m.PartialFailure + } + return false +} + +func (m *MutateCustomerFeedsRequest) GetValidateOnly() bool { + if m != nil { + return m.ValidateOnly + } + return false +} + +// A single operation (create, update, remove) on a customer feed. +type CustomerFeedOperation struct { + // FieldMask that determines which resource fields are modified in an update. + UpdateMask *field_mask.FieldMask `protobuf:"bytes,4,opt,name=update_mask,json=updateMask,proto3" json:"update_mask,omitempty"` + // The mutate operation. + // + // Types that are valid to be assigned to Operation: + // *CustomerFeedOperation_Create + // *CustomerFeedOperation_Update + // *CustomerFeedOperation_Remove + Operation isCustomerFeedOperation_Operation `protobuf_oneof:"operation"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *CustomerFeedOperation) Reset() { *m = CustomerFeedOperation{} } +func (m *CustomerFeedOperation) String() string { return proto.CompactTextString(m) } +func (*CustomerFeedOperation) ProtoMessage() {} +func (*CustomerFeedOperation) Descriptor() ([]byte, []int) { + return fileDescriptor_customer_feed_service_643a1eae045d32fc, []int{2} +} +func (m *CustomerFeedOperation) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_CustomerFeedOperation.Unmarshal(m, b) +} +func (m *CustomerFeedOperation) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_CustomerFeedOperation.Marshal(b, m, deterministic) +} +func (dst *CustomerFeedOperation) XXX_Merge(src proto.Message) { + xxx_messageInfo_CustomerFeedOperation.Merge(dst, src) +} +func (m *CustomerFeedOperation) XXX_Size() int { + return xxx_messageInfo_CustomerFeedOperation.Size(m) +} +func (m *CustomerFeedOperation) XXX_DiscardUnknown() { + xxx_messageInfo_CustomerFeedOperation.DiscardUnknown(m) +} + +var xxx_messageInfo_CustomerFeedOperation proto.InternalMessageInfo + +func (m *CustomerFeedOperation) GetUpdateMask() *field_mask.FieldMask { + if m != nil { + return m.UpdateMask + } + return nil +} + +type isCustomerFeedOperation_Operation interface { + isCustomerFeedOperation_Operation() +} + +type CustomerFeedOperation_Create struct { + Create *resources.CustomerFeed `protobuf:"bytes,1,opt,name=create,proto3,oneof"` +} + +type CustomerFeedOperation_Update struct { + Update *resources.CustomerFeed `protobuf:"bytes,2,opt,name=update,proto3,oneof"` +} + +type CustomerFeedOperation_Remove struct { + Remove string `protobuf:"bytes,3,opt,name=remove,proto3,oneof"` +} + +func (*CustomerFeedOperation_Create) isCustomerFeedOperation_Operation() {} + +func (*CustomerFeedOperation_Update) isCustomerFeedOperation_Operation() {} + +func (*CustomerFeedOperation_Remove) isCustomerFeedOperation_Operation() {} + +func (m *CustomerFeedOperation) GetOperation() isCustomerFeedOperation_Operation { + if m != nil { + return m.Operation + } + return nil +} + +func (m *CustomerFeedOperation) GetCreate() *resources.CustomerFeed { + if x, ok := m.GetOperation().(*CustomerFeedOperation_Create); ok { + return x.Create + } + return nil +} + +func (m *CustomerFeedOperation) GetUpdate() *resources.CustomerFeed { + if x, ok := m.GetOperation().(*CustomerFeedOperation_Update); ok { + return x.Update + } + return nil +} + +func (m *CustomerFeedOperation) GetRemove() string { + if x, ok := m.GetOperation().(*CustomerFeedOperation_Remove); ok { + return x.Remove + } + return "" +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*CustomerFeedOperation) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _CustomerFeedOperation_OneofMarshaler, _CustomerFeedOperation_OneofUnmarshaler, _CustomerFeedOperation_OneofSizer, []interface{}{ + (*CustomerFeedOperation_Create)(nil), + (*CustomerFeedOperation_Update)(nil), + (*CustomerFeedOperation_Remove)(nil), + } +} + +func _CustomerFeedOperation_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*CustomerFeedOperation) + // operation + switch x := m.Operation.(type) { + case *CustomerFeedOperation_Create: + b.EncodeVarint(1<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.Create); err != nil { + return err + } + case *CustomerFeedOperation_Update: + b.EncodeVarint(2<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.Update); err != nil { + return err + } + case *CustomerFeedOperation_Remove: + b.EncodeVarint(3<<3 | proto.WireBytes) + b.EncodeStringBytes(x.Remove) + case nil: + default: + return fmt.Errorf("CustomerFeedOperation.Operation has unexpected type %T", x) + } + return nil +} + +func _CustomerFeedOperation_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*CustomerFeedOperation) + switch tag { + case 1: // operation.create + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(resources.CustomerFeed) + err := b.DecodeMessage(msg) + m.Operation = &CustomerFeedOperation_Create{msg} + return true, err + case 2: // operation.update + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(resources.CustomerFeed) + err := b.DecodeMessage(msg) + m.Operation = &CustomerFeedOperation_Update{msg} + return true, err + case 3: // operation.remove + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeStringBytes() + m.Operation = &CustomerFeedOperation_Remove{x} + return true, err + default: + return false, nil + } +} + +func _CustomerFeedOperation_OneofSizer(msg proto.Message) (n int) { + m := msg.(*CustomerFeedOperation) + // operation + switch x := m.Operation.(type) { + case *CustomerFeedOperation_Create: + s := proto.Size(x.Create) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *CustomerFeedOperation_Update: + s := proto.Size(x.Update) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *CustomerFeedOperation_Remove: + n += 1 // tag and wire + n += proto.SizeVarint(uint64(len(x.Remove))) + n += len(x.Remove) + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +// Response message for a customer feed mutate. +type MutateCustomerFeedsResponse struct { + // Errors that pertain to operation failures in the partial failure mode. + // Returned only when partial_failure = true and all errors occur inside the + // operations. If any errors occur outside the operations (e.g. auth errors), + // we return an RPC level error. + PartialFailureError *status.Status `protobuf:"bytes,3,opt,name=partial_failure_error,json=partialFailureError,proto3" json:"partial_failure_error,omitempty"` + // All results for the mutate. + Results []*MutateCustomerFeedResult `protobuf:"bytes,2,rep,name=results,proto3" json:"results,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *MutateCustomerFeedsResponse) Reset() { *m = MutateCustomerFeedsResponse{} } +func (m *MutateCustomerFeedsResponse) String() string { return proto.CompactTextString(m) } +func (*MutateCustomerFeedsResponse) ProtoMessage() {} +func (*MutateCustomerFeedsResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_customer_feed_service_643a1eae045d32fc, []int{3} +} +func (m *MutateCustomerFeedsResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_MutateCustomerFeedsResponse.Unmarshal(m, b) +} +func (m *MutateCustomerFeedsResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_MutateCustomerFeedsResponse.Marshal(b, m, deterministic) +} +func (dst *MutateCustomerFeedsResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_MutateCustomerFeedsResponse.Merge(dst, src) +} +func (m *MutateCustomerFeedsResponse) XXX_Size() int { + return xxx_messageInfo_MutateCustomerFeedsResponse.Size(m) +} +func (m *MutateCustomerFeedsResponse) XXX_DiscardUnknown() { + xxx_messageInfo_MutateCustomerFeedsResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_MutateCustomerFeedsResponse proto.InternalMessageInfo + +func (m *MutateCustomerFeedsResponse) GetPartialFailureError() *status.Status { + if m != nil { + return m.PartialFailureError + } + return nil +} + +func (m *MutateCustomerFeedsResponse) GetResults() []*MutateCustomerFeedResult { + if m != nil { + return m.Results + } + return nil +} + +// The result for the customer feed mutate. +type MutateCustomerFeedResult struct { + // Returned for successful operations. + ResourceName string `protobuf:"bytes,1,opt,name=resource_name,json=resourceName,proto3" json:"resource_name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *MutateCustomerFeedResult) Reset() { *m = MutateCustomerFeedResult{} } +func (m *MutateCustomerFeedResult) String() string { return proto.CompactTextString(m) } +func (*MutateCustomerFeedResult) ProtoMessage() {} +func (*MutateCustomerFeedResult) Descriptor() ([]byte, []int) { + return fileDescriptor_customer_feed_service_643a1eae045d32fc, []int{4} +} +func (m *MutateCustomerFeedResult) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_MutateCustomerFeedResult.Unmarshal(m, b) +} +func (m *MutateCustomerFeedResult) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_MutateCustomerFeedResult.Marshal(b, m, deterministic) +} +func (dst *MutateCustomerFeedResult) XXX_Merge(src proto.Message) { + xxx_messageInfo_MutateCustomerFeedResult.Merge(dst, src) +} +func (m *MutateCustomerFeedResult) XXX_Size() int { + return xxx_messageInfo_MutateCustomerFeedResult.Size(m) +} +func (m *MutateCustomerFeedResult) XXX_DiscardUnknown() { + xxx_messageInfo_MutateCustomerFeedResult.DiscardUnknown(m) +} + +var xxx_messageInfo_MutateCustomerFeedResult proto.InternalMessageInfo + +func (m *MutateCustomerFeedResult) GetResourceName() string { + if m != nil { + return m.ResourceName + } + return "" +} + +func init() { + proto.RegisterType((*GetCustomerFeedRequest)(nil), "google.ads.googleads.v1.services.GetCustomerFeedRequest") + proto.RegisterType((*MutateCustomerFeedsRequest)(nil), "google.ads.googleads.v1.services.MutateCustomerFeedsRequest") + proto.RegisterType((*CustomerFeedOperation)(nil), "google.ads.googleads.v1.services.CustomerFeedOperation") + proto.RegisterType((*MutateCustomerFeedsResponse)(nil), "google.ads.googleads.v1.services.MutateCustomerFeedsResponse") + proto.RegisterType((*MutateCustomerFeedResult)(nil), "google.ads.googleads.v1.services.MutateCustomerFeedResult") +} + +// Reference imports to suppress errors if they are not otherwise used. +var _ context.Context +var _ grpc.ClientConn + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the grpc package it is being compiled against. +const _ = grpc.SupportPackageIsVersion4 + +// CustomerFeedServiceClient is the client API for CustomerFeedService service. +// +// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://godoc.org/google.golang.org/grpc#ClientConn.NewStream. +type CustomerFeedServiceClient interface { + // Returns the requested customer feed in full detail. + GetCustomerFeed(ctx context.Context, in *GetCustomerFeedRequest, opts ...grpc.CallOption) (*resources.CustomerFeed, error) + // Creates, updates, or removes customer feeds. Operation statuses are + // returned. + MutateCustomerFeeds(ctx context.Context, in *MutateCustomerFeedsRequest, opts ...grpc.CallOption) (*MutateCustomerFeedsResponse, error) +} + +type customerFeedServiceClient struct { + cc *grpc.ClientConn +} + +func NewCustomerFeedServiceClient(cc *grpc.ClientConn) CustomerFeedServiceClient { + return &customerFeedServiceClient{cc} +} + +func (c *customerFeedServiceClient) GetCustomerFeed(ctx context.Context, in *GetCustomerFeedRequest, opts ...grpc.CallOption) (*resources.CustomerFeed, error) { + out := new(resources.CustomerFeed) + err := c.cc.Invoke(ctx, "/google.ads.googleads.v1.services.CustomerFeedService/GetCustomerFeed", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *customerFeedServiceClient) MutateCustomerFeeds(ctx context.Context, in *MutateCustomerFeedsRequest, opts ...grpc.CallOption) (*MutateCustomerFeedsResponse, error) { + out := new(MutateCustomerFeedsResponse) + err := c.cc.Invoke(ctx, "/google.ads.googleads.v1.services.CustomerFeedService/MutateCustomerFeeds", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +// CustomerFeedServiceServer is the server API for CustomerFeedService service. +type CustomerFeedServiceServer interface { + // Returns the requested customer feed in full detail. + GetCustomerFeed(context.Context, *GetCustomerFeedRequest) (*resources.CustomerFeed, error) + // Creates, updates, or removes customer feeds. Operation statuses are + // returned. + MutateCustomerFeeds(context.Context, *MutateCustomerFeedsRequest) (*MutateCustomerFeedsResponse, error) +} + +func RegisterCustomerFeedServiceServer(s *grpc.Server, srv CustomerFeedServiceServer) { + s.RegisterService(&_CustomerFeedService_serviceDesc, srv) +} + +func _CustomerFeedService_GetCustomerFeed_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(GetCustomerFeedRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(CustomerFeedServiceServer).GetCustomerFeed(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.ads.googleads.v1.services.CustomerFeedService/GetCustomerFeed", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(CustomerFeedServiceServer).GetCustomerFeed(ctx, req.(*GetCustomerFeedRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _CustomerFeedService_MutateCustomerFeeds_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(MutateCustomerFeedsRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(CustomerFeedServiceServer).MutateCustomerFeeds(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.ads.googleads.v1.services.CustomerFeedService/MutateCustomerFeeds", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(CustomerFeedServiceServer).MutateCustomerFeeds(ctx, req.(*MutateCustomerFeedsRequest)) + } + return interceptor(ctx, in, info, handler) +} + +var _CustomerFeedService_serviceDesc = grpc.ServiceDesc{ + ServiceName: "google.ads.googleads.v1.services.CustomerFeedService", + HandlerType: (*CustomerFeedServiceServer)(nil), + Methods: []grpc.MethodDesc{ + { + MethodName: "GetCustomerFeed", + Handler: _CustomerFeedService_GetCustomerFeed_Handler, + }, + { + MethodName: "MutateCustomerFeeds", + Handler: _CustomerFeedService_MutateCustomerFeeds_Handler, + }, + }, + Streams: []grpc.StreamDesc{}, + Metadata: "google/ads/googleads/v1/services/customer_feed_service.proto", +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/services/customer_feed_service.proto", fileDescriptor_customer_feed_service_643a1eae045d32fc) +} + +var fileDescriptor_customer_feed_service_643a1eae045d32fc = []byte{ + // 710 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x9c, 0x55, 0xc1, 0x6e, 0xd3, 0x4c, + 0x10, 0xfe, 0xed, 0xfc, 0x2a, 0x74, 0x5d, 0xa8, 0xb4, 0x55, 0xc1, 0x0a, 0x08, 0x22, 0x53, 0x89, + 0x2a, 0x07, 0xaf, 0x12, 0x84, 0x8a, 0xdc, 0x46, 0x28, 0x45, 0xa4, 0xed, 0xa1, 0xb4, 0x72, 0x51, + 0x91, 0x50, 0x24, 0x6b, 0x1b, 0x6f, 0x22, 0xab, 0xb6, 0xd7, 0xec, 0xae, 0x83, 0xaa, 0xaa, 0x17, + 0xde, 0x00, 0xf1, 0x04, 0x70, 0xe4, 0xce, 0x89, 0x37, 0xe0, 0x86, 0x78, 0x85, 0x9e, 0x78, 0x09, + 0x90, 0xbd, 0x5e, 0x93, 0xb4, 0x89, 0x02, 0xbd, 0x8d, 0x67, 0xbf, 0xf9, 0x66, 0x66, 0xbf, 0xd9, + 0x31, 0xd8, 0x18, 0x50, 0x3a, 0x08, 0x09, 0xc2, 0x3e, 0x47, 0xd2, 0xcc, 0xac, 0x61, 0x03, 0x71, + 0xc2, 0x86, 0x41, 0x8f, 0x70, 0xd4, 0x4b, 0xb9, 0xa0, 0x11, 0x61, 0x5e, 0x9f, 0x10, 0xdf, 0x2b, + 0xdc, 0x76, 0xc2, 0xa8, 0xa0, 0xb0, 0x26, 0x43, 0x6c, 0xec, 0x73, 0xbb, 0x8c, 0xb6, 0x87, 0x0d, + 0x5b, 0x45, 0x57, 0x1f, 0x4f, 0xe3, 0x67, 0x84, 0xd3, 0x94, 0x5d, 0x4a, 0x20, 0x89, 0xab, 0x77, + 0x55, 0x58, 0x12, 0x20, 0x1c, 0xc7, 0x54, 0x60, 0x11, 0xd0, 0x98, 0x17, 0xa7, 0x45, 0x5a, 0x94, + 0x7f, 0x1d, 0xa5, 0x7d, 0xd4, 0x0f, 0x48, 0xe8, 0x7b, 0x11, 0xe6, 0xc7, 0x05, 0xe2, 0xde, 0x45, + 0xc4, 0x5b, 0x86, 0x93, 0x84, 0x30, 0xc5, 0x70, 0xbb, 0x38, 0x67, 0x49, 0x0f, 0x71, 0x81, 0x45, + 0x5a, 0x1c, 0x58, 0x2d, 0x70, 0x6b, 0x8b, 0x88, 0x67, 0x45, 0x49, 0x1d, 0x42, 0x7c, 0x97, 0xbc, + 0x49, 0x09, 0x17, 0xf0, 0x01, 0xb8, 0xa1, 0x6a, 0xf6, 0x62, 0x1c, 0x11, 0x53, 0xab, 0x69, 0xab, + 0xf3, 0xee, 0x82, 0x72, 0xbe, 0xc0, 0x11, 0xb1, 0xce, 0x35, 0x50, 0xdd, 0x4d, 0x05, 0x16, 0x64, + 0x94, 0x82, 0x2b, 0x8e, 0xfb, 0xc0, 0x28, 0xbb, 0x0d, 0xfc, 0x82, 0x01, 0x28, 0xd7, 0x8e, 0x0f, + 0x5f, 0x01, 0x40, 0x13, 0xc2, 0x64, 0xb7, 0xa6, 0x5e, 0xab, 0xac, 0x1a, 0xcd, 0x35, 0x7b, 0xd6, + 0x2d, 0xdb, 0xa3, 0xc9, 0xf6, 0x54, 0xbc, 0x3b, 0x42, 0x05, 0x1f, 0x82, 0xc5, 0x04, 0x33, 0x11, + 0xe0, 0xd0, 0xeb, 0xe3, 0x20, 0x4c, 0x19, 0x31, 0x2b, 0x35, 0x6d, 0xf5, 0xba, 0x7b, 0xb3, 0x70, + 0x77, 0xa4, 0x37, 0x6b, 0x73, 0x88, 0xc3, 0xc0, 0xc7, 0x82, 0x78, 0x34, 0x0e, 0x4f, 0xcc, 0xff, + 0x73, 0xd8, 0x82, 0x72, 0xee, 0xc5, 0xe1, 0x89, 0xf5, 0x5e, 0x07, 0xcb, 0x13, 0x73, 0xc2, 0x75, + 0x60, 0xa4, 0x49, 0x1e, 0x9c, 0xa9, 0x91, 0x07, 0x1b, 0xcd, 0xaa, 0xea, 0x40, 0xc9, 0x61, 0x77, + 0x32, 0xc1, 0x76, 0x31, 0x3f, 0x76, 0x81, 0x84, 0x67, 0x36, 0xdc, 0x01, 0x73, 0x3d, 0x46, 0xb0, + 0x90, 0x77, 0x6b, 0x34, 0xd1, 0xd4, 0xce, 0xcb, 0xe9, 0x19, 0x6b, 0x7d, 0xfb, 0x3f, 0xb7, 0x20, + 0xc8, 0xa8, 0x24, 0xb1, 0xa9, 0x5f, 0x99, 0x4a, 0x12, 0x40, 0x13, 0xcc, 0x31, 0x12, 0xd1, 0xa1, + 0xbc, 0xb1, 0xf9, 0xec, 0x44, 0x7e, 0x6f, 0x1a, 0x60, 0xbe, 0xbc, 0x62, 0xeb, 0xab, 0x06, 0xee, + 0x4c, 0x94, 0x9e, 0x27, 0x34, 0xe6, 0x04, 0x76, 0xc0, 0xf2, 0x05, 0x05, 0x3c, 0xc2, 0x18, 0x65, + 0x39, 0xab, 0xd1, 0x84, 0xaa, 0x40, 0x96, 0xf4, 0xec, 0x83, 0x7c, 0x24, 0xdd, 0xa5, 0x71, 0x6d, + 0x9e, 0x67, 0x70, 0xf8, 0x12, 0x5c, 0x63, 0x84, 0xa7, 0xa1, 0x50, 0xf3, 0xe1, 0xcc, 0x9e, 0x8f, + 0xcb, 0x75, 0xb9, 0x39, 0x85, 0xab, 0xa8, 0xac, 0xa7, 0xc0, 0x9c, 0x06, 0xfa, 0xab, 0xc9, 0x6f, + 0x7e, 0xac, 0x80, 0xa5, 0xd1, 0xd8, 0x03, 0x99, 0x1b, 0x7e, 0xd1, 0xc0, 0xe2, 0x85, 0x17, 0x05, + 0x9f, 0xcc, 0xae, 0x78, 0xf2, 0x23, 0xac, 0xfe, 0xab, 0x8c, 0xd6, 0xda, 0xbb, 0x1f, 0xe7, 0x1f, + 0xf4, 0x06, 0x44, 0xd9, 0xce, 0x39, 0x1d, 0x6b, 0xa3, 0xa5, 0xde, 0x1d, 0x47, 0xf5, 0x72, 0x09, + 0xe5, 0x9a, 0xa1, 0xfa, 0x19, 0xfc, 0xae, 0x81, 0xa5, 0x09, 0x72, 0xc2, 0x8d, 0xab, 0xdc, 0xb6, + 0x5a, 0x00, 0xd5, 0xd6, 0x15, 0xa3, 0xe5, 0x0c, 0x59, 0xad, 0xbc, 0x9b, 0x35, 0xab, 0x99, 0x75, + 0xf3, 0xa7, 0xfc, 0xd3, 0x91, 0xa5, 0xd2, 0xaa, 0x9f, 0x8d, 0x37, 0xe3, 0x44, 0x39, 0xa1, 0xa3, + 0xd5, 0x37, 0x7f, 0x69, 0x60, 0xa5, 0x47, 0xa3, 0x99, 0x35, 0x6c, 0x9a, 0x13, 0x94, 0xdc, 0xcf, + 0xde, 0xee, 0xbe, 0xf6, 0x7a, 0xbb, 0x88, 0x1e, 0xd0, 0x10, 0xc7, 0x03, 0x9b, 0xb2, 0x01, 0x1a, + 0x90, 0x38, 0x7f, 0xd9, 0x6a, 0xc3, 0x27, 0x01, 0x9f, 0xfe, 0x43, 0x59, 0x57, 0xc6, 0x27, 0xbd, + 0xb2, 0xd5, 0x6e, 0x7f, 0xd6, 0x6b, 0x5b, 0x92, 0xb0, 0xed, 0x73, 0x5b, 0x9a, 0x99, 0x75, 0xd8, + 0xb0, 0x8b, 0xc4, 0xfc, 0x9b, 0x82, 0x74, 0xdb, 0x3e, 0xef, 0x96, 0x90, 0xee, 0x61, 0xa3, 0xab, + 0x20, 0x3f, 0xf5, 0x15, 0xe9, 0x77, 0x9c, 0xb6, 0xcf, 0x1d, 0xa7, 0x04, 0x39, 0xce, 0x61, 0xc3, + 0x71, 0x14, 0xec, 0x68, 0x2e, 0xaf, 0xf3, 0xd1, 0xef, 0x00, 0x00, 0x00, 0xff, 0xff, 0x2b, 0x70, + 0x21, 0x28, 0xf7, 0x06, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/services/customer_label_service.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/services/customer_label_service.pb.go new file mode 100644 index 0000000..c5166a7 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/services/customer_label_service.pb.go @@ -0,0 +1,544 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/services/customer_label_service.proto + +package services // import "google.golang.org/genproto/googleapis/ads/googleads/v1/services" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "github.com/golang/protobuf/ptypes/wrappers" +import resources "google.golang.org/genproto/googleapis/ads/googleads/v1/resources" +import _ "google.golang.org/genproto/googleapis/api/annotations" +import status "google.golang.org/genproto/googleapis/rpc/status" + +import ( + context "golang.org/x/net/context" + grpc "google.golang.org/grpc" +) + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Request message for [CustomerLabelService.GetCustomerLabel][google.ads.googleads.v1.services.CustomerLabelService.GetCustomerLabel]. +type GetCustomerLabelRequest struct { + // The resource name of the customer-label relationship to fetch. + ResourceName string `protobuf:"bytes,1,opt,name=resource_name,json=resourceName,proto3" json:"resource_name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GetCustomerLabelRequest) Reset() { *m = GetCustomerLabelRequest{} } +func (m *GetCustomerLabelRequest) String() string { return proto.CompactTextString(m) } +func (*GetCustomerLabelRequest) ProtoMessage() {} +func (*GetCustomerLabelRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_customer_label_service_af6ae2408ce6ca33, []int{0} +} +func (m *GetCustomerLabelRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GetCustomerLabelRequest.Unmarshal(m, b) +} +func (m *GetCustomerLabelRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GetCustomerLabelRequest.Marshal(b, m, deterministic) +} +func (dst *GetCustomerLabelRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_GetCustomerLabelRequest.Merge(dst, src) +} +func (m *GetCustomerLabelRequest) XXX_Size() int { + return xxx_messageInfo_GetCustomerLabelRequest.Size(m) +} +func (m *GetCustomerLabelRequest) XXX_DiscardUnknown() { + xxx_messageInfo_GetCustomerLabelRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_GetCustomerLabelRequest proto.InternalMessageInfo + +func (m *GetCustomerLabelRequest) GetResourceName() string { + if m != nil { + return m.ResourceName + } + return "" +} + +// Request message for [CustomerLabelService.MutateCustomerLabels][google.ads.googleads.v1.services.CustomerLabelService.MutateCustomerLabels]. +type MutateCustomerLabelsRequest struct { + // ID of the customer whose customer-label relationships are being modified. + CustomerId string `protobuf:"bytes,1,opt,name=customer_id,json=customerId,proto3" json:"customer_id,omitempty"` + // The list of operations to perform on customer-label relationships. + Operations []*CustomerLabelOperation `protobuf:"bytes,2,rep,name=operations,proto3" json:"operations,omitempty"` + // If true, successful operations will be carried out and invalid + // operations will return errors. If false, all operations will be carried + // out in one transaction if and only if they are all valid. + // Default is false. + PartialFailure bool `protobuf:"varint,3,opt,name=partial_failure,json=partialFailure,proto3" json:"partial_failure,omitempty"` + // If true, the request is validated but not executed. Only errors are + // returned, not results. + ValidateOnly bool `protobuf:"varint,4,opt,name=validate_only,json=validateOnly,proto3" json:"validate_only,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *MutateCustomerLabelsRequest) Reset() { *m = MutateCustomerLabelsRequest{} } +func (m *MutateCustomerLabelsRequest) String() string { return proto.CompactTextString(m) } +func (*MutateCustomerLabelsRequest) ProtoMessage() {} +func (*MutateCustomerLabelsRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_customer_label_service_af6ae2408ce6ca33, []int{1} +} +func (m *MutateCustomerLabelsRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_MutateCustomerLabelsRequest.Unmarshal(m, b) +} +func (m *MutateCustomerLabelsRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_MutateCustomerLabelsRequest.Marshal(b, m, deterministic) +} +func (dst *MutateCustomerLabelsRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_MutateCustomerLabelsRequest.Merge(dst, src) +} +func (m *MutateCustomerLabelsRequest) XXX_Size() int { + return xxx_messageInfo_MutateCustomerLabelsRequest.Size(m) +} +func (m *MutateCustomerLabelsRequest) XXX_DiscardUnknown() { + xxx_messageInfo_MutateCustomerLabelsRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_MutateCustomerLabelsRequest proto.InternalMessageInfo + +func (m *MutateCustomerLabelsRequest) GetCustomerId() string { + if m != nil { + return m.CustomerId + } + return "" +} + +func (m *MutateCustomerLabelsRequest) GetOperations() []*CustomerLabelOperation { + if m != nil { + return m.Operations + } + return nil +} + +func (m *MutateCustomerLabelsRequest) GetPartialFailure() bool { + if m != nil { + return m.PartialFailure + } + return false +} + +func (m *MutateCustomerLabelsRequest) GetValidateOnly() bool { + if m != nil { + return m.ValidateOnly + } + return false +} + +// A single operation (create, remove) on a customer-label relationship. +type CustomerLabelOperation struct { + // The mutate operation. + // + // Types that are valid to be assigned to Operation: + // *CustomerLabelOperation_Create + // *CustomerLabelOperation_Remove + Operation isCustomerLabelOperation_Operation `protobuf_oneof:"operation"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *CustomerLabelOperation) Reset() { *m = CustomerLabelOperation{} } +func (m *CustomerLabelOperation) String() string { return proto.CompactTextString(m) } +func (*CustomerLabelOperation) ProtoMessage() {} +func (*CustomerLabelOperation) Descriptor() ([]byte, []int) { + return fileDescriptor_customer_label_service_af6ae2408ce6ca33, []int{2} +} +func (m *CustomerLabelOperation) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_CustomerLabelOperation.Unmarshal(m, b) +} +func (m *CustomerLabelOperation) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_CustomerLabelOperation.Marshal(b, m, deterministic) +} +func (dst *CustomerLabelOperation) XXX_Merge(src proto.Message) { + xxx_messageInfo_CustomerLabelOperation.Merge(dst, src) +} +func (m *CustomerLabelOperation) XXX_Size() int { + return xxx_messageInfo_CustomerLabelOperation.Size(m) +} +func (m *CustomerLabelOperation) XXX_DiscardUnknown() { + xxx_messageInfo_CustomerLabelOperation.DiscardUnknown(m) +} + +var xxx_messageInfo_CustomerLabelOperation proto.InternalMessageInfo + +type isCustomerLabelOperation_Operation interface { + isCustomerLabelOperation_Operation() +} + +type CustomerLabelOperation_Create struct { + Create *resources.CustomerLabel `protobuf:"bytes,1,opt,name=create,proto3,oneof"` +} + +type CustomerLabelOperation_Remove struct { + Remove string `protobuf:"bytes,2,opt,name=remove,proto3,oneof"` +} + +func (*CustomerLabelOperation_Create) isCustomerLabelOperation_Operation() {} + +func (*CustomerLabelOperation_Remove) isCustomerLabelOperation_Operation() {} + +func (m *CustomerLabelOperation) GetOperation() isCustomerLabelOperation_Operation { + if m != nil { + return m.Operation + } + return nil +} + +func (m *CustomerLabelOperation) GetCreate() *resources.CustomerLabel { + if x, ok := m.GetOperation().(*CustomerLabelOperation_Create); ok { + return x.Create + } + return nil +} + +func (m *CustomerLabelOperation) GetRemove() string { + if x, ok := m.GetOperation().(*CustomerLabelOperation_Remove); ok { + return x.Remove + } + return "" +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*CustomerLabelOperation) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _CustomerLabelOperation_OneofMarshaler, _CustomerLabelOperation_OneofUnmarshaler, _CustomerLabelOperation_OneofSizer, []interface{}{ + (*CustomerLabelOperation_Create)(nil), + (*CustomerLabelOperation_Remove)(nil), + } +} + +func _CustomerLabelOperation_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*CustomerLabelOperation) + // operation + switch x := m.Operation.(type) { + case *CustomerLabelOperation_Create: + b.EncodeVarint(1<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.Create); err != nil { + return err + } + case *CustomerLabelOperation_Remove: + b.EncodeVarint(2<<3 | proto.WireBytes) + b.EncodeStringBytes(x.Remove) + case nil: + default: + return fmt.Errorf("CustomerLabelOperation.Operation has unexpected type %T", x) + } + return nil +} + +func _CustomerLabelOperation_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*CustomerLabelOperation) + switch tag { + case 1: // operation.create + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(resources.CustomerLabel) + err := b.DecodeMessage(msg) + m.Operation = &CustomerLabelOperation_Create{msg} + return true, err + case 2: // operation.remove + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeStringBytes() + m.Operation = &CustomerLabelOperation_Remove{x} + return true, err + default: + return false, nil + } +} + +func _CustomerLabelOperation_OneofSizer(msg proto.Message) (n int) { + m := msg.(*CustomerLabelOperation) + // operation + switch x := m.Operation.(type) { + case *CustomerLabelOperation_Create: + s := proto.Size(x.Create) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *CustomerLabelOperation_Remove: + n += 1 // tag and wire + n += proto.SizeVarint(uint64(len(x.Remove))) + n += len(x.Remove) + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +// Response message for a customer labels mutate. +type MutateCustomerLabelsResponse struct { + // Errors that pertain to operation failures in the partial failure mode. + // Returned only when partial_failure = true and all errors occur inside the + // operations. If any errors occur outside the operations (e.g. auth errors), + // we return an RPC level error. + PartialFailureError *status.Status `protobuf:"bytes,3,opt,name=partial_failure_error,json=partialFailureError,proto3" json:"partial_failure_error,omitempty"` + // All results for the mutate. + Results []*MutateCustomerLabelResult `protobuf:"bytes,2,rep,name=results,proto3" json:"results,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *MutateCustomerLabelsResponse) Reset() { *m = MutateCustomerLabelsResponse{} } +func (m *MutateCustomerLabelsResponse) String() string { return proto.CompactTextString(m) } +func (*MutateCustomerLabelsResponse) ProtoMessage() {} +func (*MutateCustomerLabelsResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_customer_label_service_af6ae2408ce6ca33, []int{3} +} +func (m *MutateCustomerLabelsResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_MutateCustomerLabelsResponse.Unmarshal(m, b) +} +func (m *MutateCustomerLabelsResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_MutateCustomerLabelsResponse.Marshal(b, m, deterministic) +} +func (dst *MutateCustomerLabelsResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_MutateCustomerLabelsResponse.Merge(dst, src) +} +func (m *MutateCustomerLabelsResponse) XXX_Size() int { + return xxx_messageInfo_MutateCustomerLabelsResponse.Size(m) +} +func (m *MutateCustomerLabelsResponse) XXX_DiscardUnknown() { + xxx_messageInfo_MutateCustomerLabelsResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_MutateCustomerLabelsResponse proto.InternalMessageInfo + +func (m *MutateCustomerLabelsResponse) GetPartialFailureError() *status.Status { + if m != nil { + return m.PartialFailureError + } + return nil +} + +func (m *MutateCustomerLabelsResponse) GetResults() []*MutateCustomerLabelResult { + if m != nil { + return m.Results + } + return nil +} + +// The result for a customer label mutate. +type MutateCustomerLabelResult struct { + // Returned for successful operations. + ResourceName string `protobuf:"bytes,1,opt,name=resource_name,json=resourceName,proto3" json:"resource_name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *MutateCustomerLabelResult) Reset() { *m = MutateCustomerLabelResult{} } +func (m *MutateCustomerLabelResult) String() string { return proto.CompactTextString(m) } +func (*MutateCustomerLabelResult) ProtoMessage() {} +func (*MutateCustomerLabelResult) Descriptor() ([]byte, []int) { + return fileDescriptor_customer_label_service_af6ae2408ce6ca33, []int{4} +} +func (m *MutateCustomerLabelResult) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_MutateCustomerLabelResult.Unmarshal(m, b) +} +func (m *MutateCustomerLabelResult) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_MutateCustomerLabelResult.Marshal(b, m, deterministic) +} +func (dst *MutateCustomerLabelResult) XXX_Merge(src proto.Message) { + xxx_messageInfo_MutateCustomerLabelResult.Merge(dst, src) +} +func (m *MutateCustomerLabelResult) XXX_Size() int { + return xxx_messageInfo_MutateCustomerLabelResult.Size(m) +} +func (m *MutateCustomerLabelResult) XXX_DiscardUnknown() { + xxx_messageInfo_MutateCustomerLabelResult.DiscardUnknown(m) +} + +var xxx_messageInfo_MutateCustomerLabelResult proto.InternalMessageInfo + +func (m *MutateCustomerLabelResult) GetResourceName() string { + if m != nil { + return m.ResourceName + } + return "" +} + +func init() { + proto.RegisterType((*GetCustomerLabelRequest)(nil), "google.ads.googleads.v1.services.GetCustomerLabelRequest") + proto.RegisterType((*MutateCustomerLabelsRequest)(nil), "google.ads.googleads.v1.services.MutateCustomerLabelsRequest") + proto.RegisterType((*CustomerLabelOperation)(nil), "google.ads.googleads.v1.services.CustomerLabelOperation") + proto.RegisterType((*MutateCustomerLabelsResponse)(nil), "google.ads.googleads.v1.services.MutateCustomerLabelsResponse") + proto.RegisterType((*MutateCustomerLabelResult)(nil), "google.ads.googleads.v1.services.MutateCustomerLabelResult") +} + +// Reference imports to suppress errors if they are not otherwise used. +var _ context.Context +var _ grpc.ClientConn + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the grpc package it is being compiled against. +const _ = grpc.SupportPackageIsVersion4 + +// CustomerLabelServiceClient is the client API for CustomerLabelService service. +// +// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://godoc.org/google.golang.org/grpc#ClientConn.NewStream. +type CustomerLabelServiceClient interface { + // Returns the requested customer-label relationship in full detail. + GetCustomerLabel(ctx context.Context, in *GetCustomerLabelRequest, opts ...grpc.CallOption) (*resources.CustomerLabel, error) + // Creates and removes customer-label relationships. + // Operation statuses are returned. + MutateCustomerLabels(ctx context.Context, in *MutateCustomerLabelsRequest, opts ...grpc.CallOption) (*MutateCustomerLabelsResponse, error) +} + +type customerLabelServiceClient struct { + cc *grpc.ClientConn +} + +func NewCustomerLabelServiceClient(cc *grpc.ClientConn) CustomerLabelServiceClient { + return &customerLabelServiceClient{cc} +} + +func (c *customerLabelServiceClient) GetCustomerLabel(ctx context.Context, in *GetCustomerLabelRequest, opts ...grpc.CallOption) (*resources.CustomerLabel, error) { + out := new(resources.CustomerLabel) + err := c.cc.Invoke(ctx, "/google.ads.googleads.v1.services.CustomerLabelService/GetCustomerLabel", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *customerLabelServiceClient) MutateCustomerLabels(ctx context.Context, in *MutateCustomerLabelsRequest, opts ...grpc.CallOption) (*MutateCustomerLabelsResponse, error) { + out := new(MutateCustomerLabelsResponse) + err := c.cc.Invoke(ctx, "/google.ads.googleads.v1.services.CustomerLabelService/MutateCustomerLabels", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +// CustomerLabelServiceServer is the server API for CustomerLabelService service. +type CustomerLabelServiceServer interface { + // Returns the requested customer-label relationship in full detail. + GetCustomerLabel(context.Context, *GetCustomerLabelRequest) (*resources.CustomerLabel, error) + // Creates and removes customer-label relationships. + // Operation statuses are returned. + MutateCustomerLabels(context.Context, *MutateCustomerLabelsRequest) (*MutateCustomerLabelsResponse, error) +} + +func RegisterCustomerLabelServiceServer(s *grpc.Server, srv CustomerLabelServiceServer) { + s.RegisterService(&_CustomerLabelService_serviceDesc, srv) +} + +func _CustomerLabelService_GetCustomerLabel_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(GetCustomerLabelRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(CustomerLabelServiceServer).GetCustomerLabel(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.ads.googleads.v1.services.CustomerLabelService/GetCustomerLabel", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(CustomerLabelServiceServer).GetCustomerLabel(ctx, req.(*GetCustomerLabelRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _CustomerLabelService_MutateCustomerLabels_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(MutateCustomerLabelsRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(CustomerLabelServiceServer).MutateCustomerLabels(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.ads.googleads.v1.services.CustomerLabelService/MutateCustomerLabels", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(CustomerLabelServiceServer).MutateCustomerLabels(ctx, req.(*MutateCustomerLabelsRequest)) + } + return interceptor(ctx, in, info, handler) +} + +var _CustomerLabelService_serviceDesc = grpc.ServiceDesc{ + ServiceName: "google.ads.googleads.v1.services.CustomerLabelService", + HandlerType: (*CustomerLabelServiceServer)(nil), + Methods: []grpc.MethodDesc{ + { + MethodName: "GetCustomerLabel", + Handler: _CustomerLabelService_GetCustomerLabel_Handler, + }, + { + MethodName: "MutateCustomerLabels", + Handler: _CustomerLabelService_MutateCustomerLabels_Handler, + }, + }, + Streams: []grpc.StreamDesc{}, + Metadata: "google/ads/googleads/v1/services/customer_label_service.proto", +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/services/customer_label_service.proto", fileDescriptor_customer_label_service_af6ae2408ce6ca33) +} + +var fileDescriptor_customer_label_service_af6ae2408ce6ca33 = []byte{ + // 663 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x9c, 0x54, 0xdd, 0x6a, 0xd4, 0x4e, + 0x14, 0xff, 0x27, 0xfb, 0xa7, 0xda, 0xd9, 0xfa, 0xc1, 0x58, 0x6d, 0xba, 0x16, 0x5d, 0x62, 0xc1, + 0xb2, 0x17, 0x49, 0x77, 0x0b, 0x52, 0x53, 0xb6, 0xb8, 0x15, 0xdb, 0x2a, 0x6a, 0x4b, 0x8a, 0x45, + 0x64, 0x21, 0x4c, 0x93, 0x69, 0x08, 0x24, 0x99, 0x38, 0x33, 0x59, 0x29, 0xa5, 0x20, 0xde, 0x7a, + 0xe9, 0x1b, 0x78, 0xd9, 0x37, 0xf0, 0xc2, 0x17, 0xf0, 0x56, 0x7c, 0x03, 0xf1, 0xc2, 0xa7, 0x90, + 0x64, 0x32, 0xb1, 0x59, 0x77, 0x59, 0xed, 0xdd, 0x99, 0xf3, 0xf1, 0x3b, 0xe7, 0x77, 0x3e, 0x06, + 0x74, 0x7d, 0x42, 0xfc, 0x10, 0x9b, 0xc8, 0x63, 0xa6, 0x10, 0x33, 0x69, 0xd0, 0x36, 0x19, 0xa6, + 0x83, 0xc0, 0xc5, 0xcc, 0x74, 0x53, 0xc6, 0x49, 0x84, 0xa9, 0x13, 0xa2, 0x03, 0x1c, 0x3a, 0x85, + 0xde, 0x48, 0x28, 0xe1, 0x04, 0x36, 0x45, 0x8c, 0x81, 0x3c, 0x66, 0x94, 0xe1, 0xc6, 0xa0, 0x6d, + 0xc8, 0xf0, 0xc6, 0xbd, 0x71, 0x09, 0x28, 0x66, 0x24, 0xa5, 0x7f, 0x66, 0x10, 0xc8, 0x8d, 0x05, + 0x19, 0x97, 0x04, 0x26, 0x8a, 0x63, 0xc2, 0x11, 0x0f, 0x48, 0xcc, 0x0a, 0xeb, 0xad, 0xc2, 0x9a, + 0xbf, 0x0e, 0xd2, 0x43, 0xf3, 0x0d, 0x45, 0x49, 0x82, 0xa9, 0xb4, 0xcf, 0x15, 0x76, 0x9a, 0xb8, + 0x26, 0xe3, 0x88, 0xa7, 0x85, 0x41, 0x5f, 0x07, 0x73, 0x5b, 0x98, 0x3f, 0x2c, 0x32, 0x3e, 0xcd, + 0x12, 0xda, 0xf8, 0x75, 0x8a, 0x19, 0x87, 0x77, 0xc0, 0x25, 0x59, 0x93, 0x13, 0xa3, 0x08, 0x6b, + 0x4a, 0x53, 0x59, 0x9a, 0xb6, 0x67, 0xa4, 0xf2, 0x39, 0x8a, 0xb0, 0xfe, 0x43, 0x01, 0x37, 0x9f, + 0xa5, 0x1c, 0x71, 0x5c, 0xc1, 0x60, 0x12, 0xe4, 0x36, 0xa8, 0x97, 0x74, 0x02, 0xaf, 0x80, 0x00, + 0x52, 0xf5, 0xd8, 0x83, 0x2f, 0x01, 0x20, 0x09, 0xa6, 0x82, 0x8d, 0xa6, 0x36, 0x6b, 0x4b, 0xf5, + 0xce, 0xaa, 0x31, 0xa9, 0x8d, 0x46, 0x25, 0xdb, 0x8e, 0x04, 0xb0, 0xcf, 0x60, 0xc1, 0xbb, 0xe0, + 0x4a, 0x82, 0x28, 0x0f, 0x50, 0xe8, 0x1c, 0xa2, 0x20, 0x4c, 0x29, 0xd6, 0x6a, 0x4d, 0x65, 0xe9, + 0xa2, 0x7d, 0xb9, 0x50, 0x6f, 0x0a, 0x6d, 0x46, 0x74, 0x80, 0xc2, 0xc0, 0x43, 0x1c, 0x3b, 0x24, + 0x0e, 0x8f, 0xb4, 0xff, 0x73, 0xb7, 0x19, 0xa9, 0xdc, 0x89, 0xc3, 0x23, 0xfd, 0xbd, 0x02, 0x6e, + 0x8c, 0x4e, 0x0a, 0x9f, 0x80, 0x29, 0x97, 0x62, 0xc4, 0x45, 0x87, 0xea, 0x9d, 0xe5, 0xb1, 0xe5, + 0x97, 0x33, 0xae, 0xd6, 0xbf, 0xfd, 0x9f, 0x5d, 0x20, 0x40, 0x0d, 0x4c, 0x51, 0x1c, 0x91, 0x01, + 0xd6, 0xd4, 0xac, 0x55, 0x99, 0x45, 0xbc, 0x37, 0xea, 0x60, 0xba, 0x24, 0xa7, 0x7f, 0x56, 0xc0, + 0xc2, 0xe8, 0xb6, 0xb3, 0x84, 0xc4, 0x0c, 0xc3, 0x4d, 0x70, 0x7d, 0x88, 0xbc, 0x83, 0x29, 0x25, + 0x34, 0x6f, 0x41, 0xbd, 0x03, 0x65, 0x89, 0x34, 0x71, 0x8d, 0xbd, 0x7c, 0x21, 0xec, 0x6b, 0xd5, + 0xb6, 0x3c, 0xca, 0xdc, 0xe1, 0x0b, 0x70, 0x81, 0x62, 0x96, 0x86, 0x5c, 0xce, 0x66, 0x6d, 0xf2, + 0x6c, 0x46, 0x14, 0x66, 0xe7, 0x18, 0xb6, 0xc4, 0xd2, 0x1f, 0x80, 0xf9, 0xb1, 0x5e, 0x7f, 0xb5, + 0x78, 0x9d, 0xd3, 0x1a, 0x98, 0xad, 0x04, 0xef, 0x89, 0xf4, 0xf0, 0x93, 0x02, 0xae, 0x0e, 0xaf, + 0x34, 0xbc, 0x3f, 0xb9, 0xea, 0x31, 0x67, 0xd0, 0xf8, 0xe7, 0x69, 0xea, 0xab, 0xef, 0xbe, 0x7e, + 0xff, 0xa0, 0x76, 0xe0, 0x72, 0x76, 0xd6, 0xc7, 0x15, 0x2a, 0x5d, 0xb9, 0xf9, 0xcc, 0x6c, 0x95, + 0x77, 0x2e, 0x46, 0x67, 0xb6, 0x4e, 0xe0, 0x37, 0x05, 0xcc, 0x8e, 0x1a, 0x2b, 0xec, 0x9e, 0xab, + 0xeb, 0xf2, 0x0a, 0x1b, 0xeb, 0xe7, 0x0d, 0x17, 0xdb, 0xa4, 0xaf, 0xe7, 0x8c, 0x56, 0xf5, 0x95, + 0x8c, 0xd1, 0x6f, 0x0a, 0xc7, 0x67, 0x4e, 0xbb, 0xdb, 0x3a, 0x19, 0x22, 0x64, 0x45, 0x39, 0xa4, + 0xa5, 0xb4, 0x36, 0xde, 0xaa, 0x60, 0xd1, 0x25, 0xd1, 0xc4, 0x2a, 0x36, 0xe6, 0x47, 0x8d, 0x74, + 0x37, 0xfb, 0xa9, 0x76, 0x95, 0x57, 0xdb, 0x45, 0xb8, 0x4f, 0x42, 0x14, 0xfb, 0x06, 0xa1, 0xbe, + 0xe9, 0xe3, 0x38, 0xff, 0xc7, 0xe4, 0x57, 0x9a, 0x04, 0x6c, 0xfc, 0xd7, 0xbd, 0x26, 0x85, 0x8f, + 0x6a, 0x6d, 0xab, 0xd7, 0x3b, 0x55, 0x9b, 0x5b, 0x02, 0xb0, 0xe7, 0x31, 0x43, 0x88, 0x99, 0xb4, + 0xdf, 0x36, 0x8a, 0xc4, 0xec, 0x8b, 0x74, 0xe9, 0xf7, 0x3c, 0xd6, 0x2f, 0x5d, 0xfa, 0xfb, 0xed, + 0xbe, 0x74, 0xf9, 0xa9, 0x2e, 0x0a, 0xbd, 0x65, 0xf5, 0x3c, 0x66, 0x59, 0xa5, 0x93, 0x65, 0xed, + 0xb7, 0x2d, 0x4b, 0xba, 0x1d, 0x4c, 0xe5, 0x75, 0xae, 0xfc, 0x0a, 0x00, 0x00, 0xff, 0xff, 0xdd, + 0xb3, 0x0e, 0x47, 0x61, 0x06, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/services/customer_manager_link_service.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/services/customer_manager_link_service.pb.go new file mode 100644 index 0000000..c7763dc --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/services/customer_manager_link_service.pb.go @@ -0,0 +1,487 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/services/customer_manager_link_service.proto + +package services // import "google.golang.org/genproto/googleapis/ads/googleads/v1/services" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import resources "google.golang.org/genproto/googleapis/ads/googleads/v1/resources" +import _ "google.golang.org/genproto/googleapis/api/annotations" +import field_mask "google.golang.org/genproto/protobuf/field_mask" + +import ( + context "golang.org/x/net/context" + grpc "google.golang.org/grpc" +) + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Request message for [CustomerManagerLinkService.GetCustomerManagerLink][google.ads.googleads.v1.services.CustomerManagerLinkService.GetCustomerManagerLink]. +type GetCustomerManagerLinkRequest struct { + // The resource name of the CustomerManagerLink to fetch. + ResourceName string `protobuf:"bytes,1,opt,name=resource_name,json=resourceName,proto3" json:"resource_name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GetCustomerManagerLinkRequest) Reset() { *m = GetCustomerManagerLinkRequest{} } +func (m *GetCustomerManagerLinkRequest) String() string { return proto.CompactTextString(m) } +func (*GetCustomerManagerLinkRequest) ProtoMessage() {} +func (*GetCustomerManagerLinkRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_customer_manager_link_service_89a13cddd5698861, []int{0} +} +func (m *GetCustomerManagerLinkRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GetCustomerManagerLinkRequest.Unmarshal(m, b) +} +func (m *GetCustomerManagerLinkRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GetCustomerManagerLinkRequest.Marshal(b, m, deterministic) +} +func (dst *GetCustomerManagerLinkRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_GetCustomerManagerLinkRequest.Merge(dst, src) +} +func (m *GetCustomerManagerLinkRequest) XXX_Size() int { + return xxx_messageInfo_GetCustomerManagerLinkRequest.Size(m) +} +func (m *GetCustomerManagerLinkRequest) XXX_DiscardUnknown() { + xxx_messageInfo_GetCustomerManagerLinkRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_GetCustomerManagerLinkRequest proto.InternalMessageInfo + +func (m *GetCustomerManagerLinkRequest) GetResourceName() string { + if m != nil { + return m.ResourceName + } + return "" +} + +// Request message for [CustomerManagerLinkService.MutateCustomerManagerLink][google.ads.googleads.v1.services.CustomerManagerLinkService.MutateCustomerManagerLink]. +type MutateCustomerManagerLinkRequest struct { + // The ID of the customer whose customer manager links are being modified. + CustomerId string `protobuf:"bytes,1,opt,name=customer_id,json=customerId,proto3" json:"customer_id,omitempty"` + // The list of operations to perform on individual customer manager links. + Operations []*CustomerManagerLinkOperation `protobuf:"bytes,2,rep,name=operations,proto3" json:"operations,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *MutateCustomerManagerLinkRequest) Reset() { *m = MutateCustomerManagerLinkRequest{} } +func (m *MutateCustomerManagerLinkRequest) String() string { return proto.CompactTextString(m) } +func (*MutateCustomerManagerLinkRequest) ProtoMessage() {} +func (*MutateCustomerManagerLinkRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_customer_manager_link_service_89a13cddd5698861, []int{1} +} +func (m *MutateCustomerManagerLinkRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_MutateCustomerManagerLinkRequest.Unmarshal(m, b) +} +func (m *MutateCustomerManagerLinkRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_MutateCustomerManagerLinkRequest.Marshal(b, m, deterministic) +} +func (dst *MutateCustomerManagerLinkRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_MutateCustomerManagerLinkRequest.Merge(dst, src) +} +func (m *MutateCustomerManagerLinkRequest) XXX_Size() int { + return xxx_messageInfo_MutateCustomerManagerLinkRequest.Size(m) +} +func (m *MutateCustomerManagerLinkRequest) XXX_DiscardUnknown() { + xxx_messageInfo_MutateCustomerManagerLinkRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_MutateCustomerManagerLinkRequest proto.InternalMessageInfo + +func (m *MutateCustomerManagerLinkRequest) GetCustomerId() string { + if m != nil { + return m.CustomerId + } + return "" +} + +func (m *MutateCustomerManagerLinkRequest) GetOperations() []*CustomerManagerLinkOperation { + if m != nil { + return m.Operations + } + return nil +} + +// Updates the status of a CustomerManagerLink. +// The following actions are possible: +// 1. Update operation with status ACTIVE accepts a pending invitation. +// 2. Update operation with status REFUSED declines a pending invitation. +// 3. Update operation with status INACTIVE terminates link to manager. +type CustomerManagerLinkOperation struct { + // FieldMask that determines which resource fields are modified in an update. + UpdateMask *field_mask.FieldMask `protobuf:"bytes,4,opt,name=update_mask,json=updateMask,proto3" json:"update_mask,omitempty"` + // The mutate operation. + // + // Types that are valid to be assigned to Operation: + // *CustomerManagerLinkOperation_Update + Operation isCustomerManagerLinkOperation_Operation `protobuf_oneof:"operation"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *CustomerManagerLinkOperation) Reset() { *m = CustomerManagerLinkOperation{} } +func (m *CustomerManagerLinkOperation) String() string { return proto.CompactTextString(m) } +func (*CustomerManagerLinkOperation) ProtoMessage() {} +func (*CustomerManagerLinkOperation) Descriptor() ([]byte, []int) { + return fileDescriptor_customer_manager_link_service_89a13cddd5698861, []int{2} +} +func (m *CustomerManagerLinkOperation) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_CustomerManagerLinkOperation.Unmarshal(m, b) +} +func (m *CustomerManagerLinkOperation) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_CustomerManagerLinkOperation.Marshal(b, m, deterministic) +} +func (dst *CustomerManagerLinkOperation) XXX_Merge(src proto.Message) { + xxx_messageInfo_CustomerManagerLinkOperation.Merge(dst, src) +} +func (m *CustomerManagerLinkOperation) XXX_Size() int { + return xxx_messageInfo_CustomerManagerLinkOperation.Size(m) +} +func (m *CustomerManagerLinkOperation) XXX_DiscardUnknown() { + xxx_messageInfo_CustomerManagerLinkOperation.DiscardUnknown(m) +} + +var xxx_messageInfo_CustomerManagerLinkOperation proto.InternalMessageInfo + +func (m *CustomerManagerLinkOperation) GetUpdateMask() *field_mask.FieldMask { + if m != nil { + return m.UpdateMask + } + return nil +} + +type isCustomerManagerLinkOperation_Operation interface { + isCustomerManagerLinkOperation_Operation() +} + +type CustomerManagerLinkOperation_Update struct { + Update *resources.CustomerManagerLink `protobuf:"bytes,2,opt,name=update,proto3,oneof"` +} + +func (*CustomerManagerLinkOperation_Update) isCustomerManagerLinkOperation_Operation() {} + +func (m *CustomerManagerLinkOperation) GetOperation() isCustomerManagerLinkOperation_Operation { + if m != nil { + return m.Operation + } + return nil +} + +func (m *CustomerManagerLinkOperation) GetUpdate() *resources.CustomerManagerLink { + if x, ok := m.GetOperation().(*CustomerManagerLinkOperation_Update); ok { + return x.Update + } + return nil +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*CustomerManagerLinkOperation) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _CustomerManagerLinkOperation_OneofMarshaler, _CustomerManagerLinkOperation_OneofUnmarshaler, _CustomerManagerLinkOperation_OneofSizer, []interface{}{ + (*CustomerManagerLinkOperation_Update)(nil), + } +} + +func _CustomerManagerLinkOperation_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*CustomerManagerLinkOperation) + // operation + switch x := m.Operation.(type) { + case *CustomerManagerLinkOperation_Update: + b.EncodeVarint(2<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.Update); err != nil { + return err + } + case nil: + default: + return fmt.Errorf("CustomerManagerLinkOperation.Operation has unexpected type %T", x) + } + return nil +} + +func _CustomerManagerLinkOperation_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*CustomerManagerLinkOperation) + switch tag { + case 2: // operation.update + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(resources.CustomerManagerLink) + err := b.DecodeMessage(msg) + m.Operation = &CustomerManagerLinkOperation_Update{msg} + return true, err + default: + return false, nil + } +} + +func _CustomerManagerLinkOperation_OneofSizer(msg proto.Message) (n int) { + m := msg.(*CustomerManagerLinkOperation) + // operation + switch x := m.Operation.(type) { + case *CustomerManagerLinkOperation_Update: + s := proto.Size(x.Update) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +// Response message for a CustomerManagerLink mutate. +type MutateCustomerManagerLinkResponse struct { + // A result that identifies the resource affected by the mutate request. + Results []*MutateCustomerManagerLinkResult `protobuf:"bytes,1,rep,name=results,proto3" json:"results,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *MutateCustomerManagerLinkResponse) Reset() { *m = MutateCustomerManagerLinkResponse{} } +func (m *MutateCustomerManagerLinkResponse) String() string { return proto.CompactTextString(m) } +func (*MutateCustomerManagerLinkResponse) ProtoMessage() {} +func (*MutateCustomerManagerLinkResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_customer_manager_link_service_89a13cddd5698861, []int{3} +} +func (m *MutateCustomerManagerLinkResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_MutateCustomerManagerLinkResponse.Unmarshal(m, b) +} +func (m *MutateCustomerManagerLinkResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_MutateCustomerManagerLinkResponse.Marshal(b, m, deterministic) +} +func (dst *MutateCustomerManagerLinkResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_MutateCustomerManagerLinkResponse.Merge(dst, src) +} +func (m *MutateCustomerManagerLinkResponse) XXX_Size() int { + return xxx_messageInfo_MutateCustomerManagerLinkResponse.Size(m) +} +func (m *MutateCustomerManagerLinkResponse) XXX_DiscardUnknown() { + xxx_messageInfo_MutateCustomerManagerLinkResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_MutateCustomerManagerLinkResponse proto.InternalMessageInfo + +func (m *MutateCustomerManagerLinkResponse) GetResults() []*MutateCustomerManagerLinkResult { + if m != nil { + return m.Results + } + return nil +} + +// The result for the customer manager link mutate. +type MutateCustomerManagerLinkResult struct { + // Returned for successful operations. + ResourceName string `protobuf:"bytes,1,opt,name=resource_name,json=resourceName,proto3" json:"resource_name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *MutateCustomerManagerLinkResult) Reset() { *m = MutateCustomerManagerLinkResult{} } +func (m *MutateCustomerManagerLinkResult) String() string { return proto.CompactTextString(m) } +func (*MutateCustomerManagerLinkResult) ProtoMessage() {} +func (*MutateCustomerManagerLinkResult) Descriptor() ([]byte, []int) { + return fileDescriptor_customer_manager_link_service_89a13cddd5698861, []int{4} +} +func (m *MutateCustomerManagerLinkResult) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_MutateCustomerManagerLinkResult.Unmarshal(m, b) +} +func (m *MutateCustomerManagerLinkResult) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_MutateCustomerManagerLinkResult.Marshal(b, m, deterministic) +} +func (dst *MutateCustomerManagerLinkResult) XXX_Merge(src proto.Message) { + xxx_messageInfo_MutateCustomerManagerLinkResult.Merge(dst, src) +} +func (m *MutateCustomerManagerLinkResult) XXX_Size() int { + return xxx_messageInfo_MutateCustomerManagerLinkResult.Size(m) +} +func (m *MutateCustomerManagerLinkResult) XXX_DiscardUnknown() { + xxx_messageInfo_MutateCustomerManagerLinkResult.DiscardUnknown(m) +} + +var xxx_messageInfo_MutateCustomerManagerLinkResult proto.InternalMessageInfo + +func (m *MutateCustomerManagerLinkResult) GetResourceName() string { + if m != nil { + return m.ResourceName + } + return "" +} + +func init() { + proto.RegisterType((*GetCustomerManagerLinkRequest)(nil), "google.ads.googleads.v1.services.GetCustomerManagerLinkRequest") + proto.RegisterType((*MutateCustomerManagerLinkRequest)(nil), "google.ads.googleads.v1.services.MutateCustomerManagerLinkRequest") + proto.RegisterType((*CustomerManagerLinkOperation)(nil), "google.ads.googleads.v1.services.CustomerManagerLinkOperation") + proto.RegisterType((*MutateCustomerManagerLinkResponse)(nil), "google.ads.googleads.v1.services.MutateCustomerManagerLinkResponse") + proto.RegisterType((*MutateCustomerManagerLinkResult)(nil), "google.ads.googleads.v1.services.MutateCustomerManagerLinkResult") +} + +// Reference imports to suppress errors if they are not otherwise used. +var _ context.Context +var _ grpc.ClientConn + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the grpc package it is being compiled against. +const _ = grpc.SupportPackageIsVersion4 + +// CustomerManagerLinkServiceClient is the client API for CustomerManagerLinkService service. +// +// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://godoc.org/google.golang.org/grpc#ClientConn.NewStream. +type CustomerManagerLinkServiceClient interface { + // Returns the requested CustomerManagerLink in full detail. + GetCustomerManagerLink(ctx context.Context, in *GetCustomerManagerLinkRequest, opts ...grpc.CallOption) (*resources.CustomerManagerLink, error) + // Creates or updates customer manager links. Operation statuses are returned. + MutateCustomerManagerLink(ctx context.Context, in *MutateCustomerManagerLinkRequest, opts ...grpc.CallOption) (*MutateCustomerManagerLinkResponse, error) +} + +type customerManagerLinkServiceClient struct { + cc *grpc.ClientConn +} + +func NewCustomerManagerLinkServiceClient(cc *grpc.ClientConn) CustomerManagerLinkServiceClient { + return &customerManagerLinkServiceClient{cc} +} + +func (c *customerManagerLinkServiceClient) GetCustomerManagerLink(ctx context.Context, in *GetCustomerManagerLinkRequest, opts ...grpc.CallOption) (*resources.CustomerManagerLink, error) { + out := new(resources.CustomerManagerLink) + err := c.cc.Invoke(ctx, "/google.ads.googleads.v1.services.CustomerManagerLinkService/GetCustomerManagerLink", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *customerManagerLinkServiceClient) MutateCustomerManagerLink(ctx context.Context, in *MutateCustomerManagerLinkRequest, opts ...grpc.CallOption) (*MutateCustomerManagerLinkResponse, error) { + out := new(MutateCustomerManagerLinkResponse) + err := c.cc.Invoke(ctx, "/google.ads.googleads.v1.services.CustomerManagerLinkService/MutateCustomerManagerLink", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +// CustomerManagerLinkServiceServer is the server API for CustomerManagerLinkService service. +type CustomerManagerLinkServiceServer interface { + // Returns the requested CustomerManagerLink in full detail. + GetCustomerManagerLink(context.Context, *GetCustomerManagerLinkRequest) (*resources.CustomerManagerLink, error) + // Creates or updates customer manager links. Operation statuses are returned. + MutateCustomerManagerLink(context.Context, *MutateCustomerManagerLinkRequest) (*MutateCustomerManagerLinkResponse, error) +} + +func RegisterCustomerManagerLinkServiceServer(s *grpc.Server, srv CustomerManagerLinkServiceServer) { + s.RegisterService(&_CustomerManagerLinkService_serviceDesc, srv) +} + +func _CustomerManagerLinkService_GetCustomerManagerLink_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(GetCustomerManagerLinkRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(CustomerManagerLinkServiceServer).GetCustomerManagerLink(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.ads.googleads.v1.services.CustomerManagerLinkService/GetCustomerManagerLink", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(CustomerManagerLinkServiceServer).GetCustomerManagerLink(ctx, req.(*GetCustomerManagerLinkRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _CustomerManagerLinkService_MutateCustomerManagerLink_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(MutateCustomerManagerLinkRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(CustomerManagerLinkServiceServer).MutateCustomerManagerLink(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.ads.googleads.v1.services.CustomerManagerLinkService/MutateCustomerManagerLink", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(CustomerManagerLinkServiceServer).MutateCustomerManagerLink(ctx, req.(*MutateCustomerManagerLinkRequest)) + } + return interceptor(ctx, in, info, handler) +} + +var _CustomerManagerLinkService_serviceDesc = grpc.ServiceDesc{ + ServiceName: "google.ads.googleads.v1.services.CustomerManagerLinkService", + HandlerType: (*CustomerManagerLinkServiceServer)(nil), + Methods: []grpc.MethodDesc{ + { + MethodName: "GetCustomerManagerLink", + Handler: _CustomerManagerLinkService_GetCustomerManagerLink_Handler, + }, + { + MethodName: "MutateCustomerManagerLink", + Handler: _CustomerManagerLinkService_MutateCustomerManagerLink_Handler, + }, + }, + Streams: []grpc.StreamDesc{}, + Metadata: "google/ads/googleads/v1/services/customer_manager_link_service.proto", +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/services/customer_manager_link_service.proto", fileDescriptor_customer_manager_link_service_89a13cddd5698861) +} + +var fileDescriptor_customer_manager_link_service_89a13cddd5698861 = []byte{ + // 597 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xa4, 0x54, 0x41, 0x8b, 0xd3, 0x40, + 0x14, 0x36, 0xa9, 0xac, 0xec, 0x54, 0x2f, 0x73, 0x90, 0x5a, 0x56, 0x1a, 0xe3, 0x1e, 0x4a, 0x0f, + 0x13, 0x5a, 0x61, 0xd1, 0x2c, 0xad, 0xa4, 0x5b, 0xb6, 0x2b, 0x58, 0x2d, 0x11, 0x7a, 0xd0, 0x62, + 0x99, 0x6d, 0x66, 0x43, 0x68, 0x92, 0x89, 0x99, 0x49, 0x2f, 0xcb, 0x82, 0x78, 0xf0, 0x0f, 0xf8, + 0x0f, 0xf4, 0xe6, 0x4f, 0xf0, 0x27, 0x78, 0x13, 0xcf, 0xde, 0x3c, 0xed, 0xaf, 0x90, 0x64, 0x32, + 0xb1, 0x42, 0xd3, 0xca, 0xee, 0xed, 0x65, 0xf2, 0xde, 0xf7, 0xbe, 0xef, 0x9b, 0xf7, 0x06, 0x0c, + 0x5c, 0x4a, 0x5d, 0x9f, 0x18, 0xd8, 0x61, 0x86, 0x08, 0xd3, 0x68, 0xd9, 0x36, 0x18, 0x89, 0x97, + 0xde, 0x9c, 0x30, 0x63, 0x9e, 0x30, 0x4e, 0x03, 0x12, 0xcf, 0x02, 0x1c, 0x62, 0x97, 0xc4, 0x33, + 0xdf, 0x0b, 0x17, 0xb3, 0xfc, 0x37, 0x8a, 0x62, 0xca, 0x29, 0xd4, 0x44, 0x29, 0xc2, 0x0e, 0x43, + 0x05, 0x0a, 0x5a, 0xb6, 0x91, 0x44, 0xa9, 0x77, 0xcb, 0xfa, 0xc4, 0x84, 0xd1, 0x24, 0x2e, 0x6d, + 0x24, 0x1a, 0xd4, 0xf7, 0x64, 0x79, 0xe4, 0x19, 0x38, 0x0c, 0x29, 0xc7, 0xdc, 0xa3, 0x21, 0xcb, + 0xff, 0xe6, 0xed, 0x8d, 0xec, 0xeb, 0x34, 0x39, 0x33, 0xce, 0x3c, 0xe2, 0x3b, 0xb3, 0x00, 0xb3, + 0xbc, 0x5e, 0x1f, 0x80, 0xfb, 0x43, 0xc2, 0x8f, 0xf2, 0x0e, 0x23, 0xd1, 0xe0, 0xb9, 0x17, 0x2e, + 0x6c, 0xf2, 0x2e, 0x21, 0x8c, 0xc3, 0x87, 0xe0, 0x8e, 0x64, 0x32, 0x0b, 0x71, 0x40, 0x6a, 0x8a, + 0xa6, 0x34, 0x77, 0xed, 0xdb, 0xf2, 0xf0, 0x05, 0x0e, 0x88, 0xfe, 0x45, 0x01, 0xda, 0x28, 0xe1, + 0x98, 0x93, 0x0d, 0x48, 0x0d, 0x50, 0x2d, 0x94, 0x78, 0x4e, 0x8e, 0x03, 0xe4, 0xd1, 0x33, 0x07, + 0xbe, 0x05, 0x80, 0x46, 0x24, 0x16, 0x0a, 0x6a, 0xaa, 0x56, 0x69, 0x56, 0x3b, 0x3d, 0xb4, 0xcd, + 0x41, 0xb4, 0xa6, 0xe5, 0x4b, 0x09, 0x63, 0xaf, 0x20, 0xea, 0xdf, 0x14, 0xb0, 0xb7, 0x29, 0x19, + 0x1e, 0x82, 0x6a, 0x12, 0x39, 0x98, 0x93, 0xcc, 0xa1, 0xda, 0x4d, 0x4d, 0x69, 0x56, 0x3b, 0x75, + 0xc9, 0x40, 0x9a, 0x88, 0x8e, 0x53, 0x13, 0x47, 0x98, 0x2d, 0x6c, 0x20, 0xd2, 0xd3, 0x18, 0x8e, + 0xc1, 0x8e, 0xf8, 0xaa, 0xa9, 0x59, 0xdd, 0x41, 0x29, 0xf3, 0xe2, 0x66, 0xd7, 0x51, 0x3f, 0xb9, + 0x61, 0xe7, 0x38, 0xfd, 0x2a, 0xd8, 0x2d, 0xd8, 0xeb, 0xef, 0x15, 0xf0, 0x60, 0x83, 0xc5, 0x2c, + 0xa2, 0x21, 0x23, 0xf0, 0x0d, 0xb8, 0x15, 0x13, 0x96, 0xf8, 0x9c, 0xd5, 0x94, 0xcc, 0x3f, 0x6b, + 0xbb, 0x7f, 0x9b, 0x50, 0x13, 0x9f, 0xdb, 0x12, 0x51, 0x3f, 0x06, 0x8d, 0x2d, 0xb9, 0xff, 0x35, + 0x2d, 0x9d, 0x5f, 0x15, 0x50, 0x5f, 0x03, 0xf1, 0x4a, 0x10, 0x82, 0x3f, 0x14, 0x70, 0x77, 0xfd, + 0x4c, 0xc2, 0xa7, 0xdb, 0xd5, 0x6c, 0x9c, 0xe6, 0xfa, 0x15, 0x2f, 0x45, 0xef, 0x7d, 0xf8, 0xf9, + 0xfb, 0x93, 0xfa, 0x18, 0x1e, 0xa4, 0x9b, 0x79, 0xfe, 0x8f, 0xc4, 0xae, 0x9c, 0x60, 0x66, 0xb4, + 0x8a, 0x55, 0x5d, 0xa9, 0x65, 0x46, 0xeb, 0x02, 0x5e, 0x2a, 0xe0, 0x5e, 0xa9, 0x77, 0xb0, 0x7f, + 0xad, 0x4b, 0x12, 0xca, 0x8e, 0xae, 0x77, 0xd1, 0xd9, 0xf8, 0xe8, 0x83, 0x4c, 0x66, 0x4f, 0x7f, + 0x92, 0xca, 0xfc, 0xab, 0xeb, 0x7c, 0x65, 0x6f, 0xbb, 0xad, 0x8b, 0xb5, 0x2a, 0xcd, 0x20, 0x43, + 0x37, 0x95, 0x56, 0xff, 0xa3, 0x0a, 0xf6, 0xe7, 0x34, 0xd8, 0x4a, 0xa8, 0xdf, 0x28, 0x9f, 0x82, + 0x71, 0xba, 0x6c, 0x63, 0xe5, 0xf5, 0x49, 0x0e, 0xe2, 0x52, 0x1f, 0x87, 0x2e, 0xa2, 0xb1, 0x6b, + 0xb8, 0x24, 0xcc, 0x56, 0x51, 0x3e, 0x97, 0x91, 0xc7, 0xca, 0x5f, 0xe9, 0x43, 0x19, 0x7c, 0x56, + 0x2b, 0x43, 0xcb, 0xfa, 0xaa, 0x6a, 0x43, 0x01, 0x68, 0x39, 0x0c, 0x89, 0x30, 0x8d, 0x26, 0x6d, + 0x94, 0x37, 0x66, 0xdf, 0x65, 0xca, 0xd4, 0x72, 0xd8, 0xb4, 0x48, 0x99, 0x4e, 0xda, 0x53, 0x99, + 0x72, 0xa9, 0xee, 0x8b, 0x73, 0xd3, 0xb4, 0x1c, 0x66, 0x9a, 0x45, 0x92, 0x69, 0x4e, 0xda, 0xa6, + 0x29, 0xd3, 0x4e, 0x77, 0x32, 0x9e, 0x8f, 0xfe, 0x04, 0x00, 0x00, 0xff, 0xff, 0x5c, 0xb5, 0x81, + 0xa2, 0x4c, 0x06, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/services/customer_negative_criterion_service.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/services/customer_negative_criterion_service.pb.go new file mode 100644 index 0000000..13e3e27 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/services/customer_negative_criterion_service.pb.go @@ -0,0 +1,546 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/services/customer_negative_criterion_service.proto + +package services // import "google.golang.org/genproto/googleapis/ads/googleads/v1/services" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "github.com/golang/protobuf/ptypes/wrappers" +import resources "google.golang.org/genproto/googleapis/ads/googleads/v1/resources" +import _ "google.golang.org/genproto/googleapis/api/annotations" +import status "google.golang.org/genproto/googleapis/rpc/status" + +import ( + context "golang.org/x/net/context" + grpc "google.golang.org/grpc" +) + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Request message for +// [CustomerNegativeCriterionService.GetCustomerNegativeCriterion][google.ads.googleads.v1.services.CustomerNegativeCriterionService.GetCustomerNegativeCriterion]. +type GetCustomerNegativeCriterionRequest struct { + // The resource name of the criterion to fetch. + ResourceName string `protobuf:"bytes,1,opt,name=resource_name,json=resourceName,proto3" json:"resource_name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GetCustomerNegativeCriterionRequest) Reset() { *m = GetCustomerNegativeCriterionRequest{} } +func (m *GetCustomerNegativeCriterionRequest) String() string { return proto.CompactTextString(m) } +func (*GetCustomerNegativeCriterionRequest) ProtoMessage() {} +func (*GetCustomerNegativeCriterionRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_customer_negative_criterion_service_bef57624ec14ad68, []int{0} +} +func (m *GetCustomerNegativeCriterionRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GetCustomerNegativeCriterionRequest.Unmarshal(m, b) +} +func (m *GetCustomerNegativeCriterionRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GetCustomerNegativeCriterionRequest.Marshal(b, m, deterministic) +} +func (dst *GetCustomerNegativeCriterionRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_GetCustomerNegativeCriterionRequest.Merge(dst, src) +} +func (m *GetCustomerNegativeCriterionRequest) XXX_Size() int { + return xxx_messageInfo_GetCustomerNegativeCriterionRequest.Size(m) +} +func (m *GetCustomerNegativeCriterionRequest) XXX_DiscardUnknown() { + xxx_messageInfo_GetCustomerNegativeCriterionRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_GetCustomerNegativeCriterionRequest proto.InternalMessageInfo + +func (m *GetCustomerNegativeCriterionRequest) GetResourceName() string { + if m != nil { + return m.ResourceName + } + return "" +} + +// Request message for +// [CustomerNegativeCriterionService.MutateCustomerNegativeCriteria][google.ads.googleads.v1.services.CustomerNegativeCriterionService.MutateCustomerNegativeCriteria]. +type MutateCustomerNegativeCriteriaRequest struct { + // The ID of the customer whose criteria are being modified. + CustomerId string `protobuf:"bytes,1,opt,name=customer_id,json=customerId,proto3" json:"customer_id,omitempty"` + // The list of operations to perform on individual criteria. + Operations []*CustomerNegativeCriterionOperation `protobuf:"bytes,2,rep,name=operations,proto3" json:"operations,omitempty"` + // If true, successful operations will be carried out and invalid + // operations will return errors. If false, all operations will be carried + // out in one transaction if and only if they are all valid. + // Default is false. + PartialFailure bool `protobuf:"varint,3,opt,name=partial_failure,json=partialFailure,proto3" json:"partial_failure,omitempty"` + // If true, the request is validated but not executed. Only errors are + // returned, not results. + ValidateOnly bool `protobuf:"varint,4,opt,name=validate_only,json=validateOnly,proto3" json:"validate_only,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *MutateCustomerNegativeCriteriaRequest) Reset() { *m = MutateCustomerNegativeCriteriaRequest{} } +func (m *MutateCustomerNegativeCriteriaRequest) String() string { return proto.CompactTextString(m) } +func (*MutateCustomerNegativeCriteriaRequest) ProtoMessage() {} +func (*MutateCustomerNegativeCriteriaRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_customer_negative_criterion_service_bef57624ec14ad68, []int{1} +} +func (m *MutateCustomerNegativeCriteriaRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_MutateCustomerNegativeCriteriaRequest.Unmarshal(m, b) +} +func (m *MutateCustomerNegativeCriteriaRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_MutateCustomerNegativeCriteriaRequest.Marshal(b, m, deterministic) +} +func (dst *MutateCustomerNegativeCriteriaRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_MutateCustomerNegativeCriteriaRequest.Merge(dst, src) +} +func (m *MutateCustomerNegativeCriteriaRequest) XXX_Size() int { + return xxx_messageInfo_MutateCustomerNegativeCriteriaRequest.Size(m) +} +func (m *MutateCustomerNegativeCriteriaRequest) XXX_DiscardUnknown() { + xxx_messageInfo_MutateCustomerNegativeCriteriaRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_MutateCustomerNegativeCriteriaRequest proto.InternalMessageInfo + +func (m *MutateCustomerNegativeCriteriaRequest) GetCustomerId() string { + if m != nil { + return m.CustomerId + } + return "" +} + +func (m *MutateCustomerNegativeCriteriaRequest) GetOperations() []*CustomerNegativeCriterionOperation { + if m != nil { + return m.Operations + } + return nil +} + +func (m *MutateCustomerNegativeCriteriaRequest) GetPartialFailure() bool { + if m != nil { + return m.PartialFailure + } + return false +} + +func (m *MutateCustomerNegativeCriteriaRequest) GetValidateOnly() bool { + if m != nil { + return m.ValidateOnly + } + return false +} + +// A single operation (create or remove) on a customer level negative criterion. +type CustomerNegativeCriterionOperation struct { + // The mutate operation. + // + // Types that are valid to be assigned to Operation: + // *CustomerNegativeCriterionOperation_Create + // *CustomerNegativeCriterionOperation_Remove + Operation isCustomerNegativeCriterionOperation_Operation `protobuf_oneof:"operation"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *CustomerNegativeCriterionOperation) Reset() { *m = CustomerNegativeCriterionOperation{} } +func (m *CustomerNegativeCriterionOperation) String() string { return proto.CompactTextString(m) } +func (*CustomerNegativeCriterionOperation) ProtoMessage() {} +func (*CustomerNegativeCriterionOperation) Descriptor() ([]byte, []int) { + return fileDescriptor_customer_negative_criterion_service_bef57624ec14ad68, []int{2} +} +func (m *CustomerNegativeCriterionOperation) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_CustomerNegativeCriterionOperation.Unmarshal(m, b) +} +func (m *CustomerNegativeCriterionOperation) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_CustomerNegativeCriterionOperation.Marshal(b, m, deterministic) +} +func (dst *CustomerNegativeCriterionOperation) XXX_Merge(src proto.Message) { + xxx_messageInfo_CustomerNegativeCriterionOperation.Merge(dst, src) +} +func (m *CustomerNegativeCriterionOperation) XXX_Size() int { + return xxx_messageInfo_CustomerNegativeCriterionOperation.Size(m) +} +func (m *CustomerNegativeCriterionOperation) XXX_DiscardUnknown() { + xxx_messageInfo_CustomerNegativeCriterionOperation.DiscardUnknown(m) +} + +var xxx_messageInfo_CustomerNegativeCriterionOperation proto.InternalMessageInfo + +type isCustomerNegativeCriterionOperation_Operation interface { + isCustomerNegativeCriterionOperation_Operation() +} + +type CustomerNegativeCriterionOperation_Create struct { + Create *resources.CustomerNegativeCriterion `protobuf:"bytes,1,opt,name=create,proto3,oneof"` +} + +type CustomerNegativeCriterionOperation_Remove struct { + Remove string `protobuf:"bytes,2,opt,name=remove,proto3,oneof"` +} + +func (*CustomerNegativeCriterionOperation_Create) isCustomerNegativeCriterionOperation_Operation() {} + +func (*CustomerNegativeCriterionOperation_Remove) isCustomerNegativeCriterionOperation_Operation() {} + +func (m *CustomerNegativeCriterionOperation) GetOperation() isCustomerNegativeCriterionOperation_Operation { + if m != nil { + return m.Operation + } + return nil +} + +func (m *CustomerNegativeCriterionOperation) GetCreate() *resources.CustomerNegativeCriterion { + if x, ok := m.GetOperation().(*CustomerNegativeCriterionOperation_Create); ok { + return x.Create + } + return nil +} + +func (m *CustomerNegativeCriterionOperation) GetRemove() string { + if x, ok := m.GetOperation().(*CustomerNegativeCriterionOperation_Remove); ok { + return x.Remove + } + return "" +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*CustomerNegativeCriterionOperation) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _CustomerNegativeCriterionOperation_OneofMarshaler, _CustomerNegativeCriterionOperation_OneofUnmarshaler, _CustomerNegativeCriterionOperation_OneofSizer, []interface{}{ + (*CustomerNegativeCriterionOperation_Create)(nil), + (*CustomerNegativeCriterionOperation_Remove)(nil), + } +} + +func _CustomerNegativeCriterionOperation_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*CustomerNegativeCriterionOperation) + // operation + switch x := m.Operation.(type) { + case *CustomerNegativeCriterionOperation_Create: + b.EncodeVarint(1<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.Create); err != nil { + return err + } + case *CustomerNegativeCriterionOperation_Remove: + b.EncodeVarint(2<<3 | proto.WireBytes) + b.EncodeStringBytes(x.Remove) + case nil: + default: + return fmt.Errorf("CustomerNegativeCriterionOperation.Operation has unexpected type %T", x) + } + return nil +} + +func _CustomerNegativeCriterionOperation_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*CustomerNegativeCriterionOperation) + switch tag { + case 1: // operation.create + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(resources.CustomerNegativeCriterion) + err := b.DecodeMessage(msg) + m.Operation = &CustomerNegativeCriterionOperation_Create{msg} + return true, err + case 2: // operation.remove + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeStringBytes() + m.Operation = &CustomerNegativeCriterionOperation_Remove{x} + return true, err + default: + return false, nil + } +} + +func _CustomerNegativeCriterionOperation_OneofSizer(msg proto.Message) (n int) { + m := msg.(*CustomerNegativeCriterionOperation) + // operation + switch x := m.Operation.(type) { + case *CustomerNegativeCriterionOperation_Create: + s := proto.Size(x.Create) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *CustomerNegativeCriterionOperation_Remove: + n += 1 // tag and wire + n += proto.SizeVarint(uint64(len(x.Remove))) + n += len(x.Remove) + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +// Response message for customer negative criterion mutate. +type MutateCustomerNegativeCriteriaResponse struct { + // Errors that pertain to operation failures in the partial failure mode. + // Returned only when partial_failure = true and all errors occur inside the + // operations. If any errors occur outside the operations (e.g. auth errors), + // we return an RPC level error. + PartialFailureError *status.Status `protobuf:"bytes,3,opt,name=partial_failure_error,json=partialFailureError,proto3" json:"partial_failure_error,omitempty"` + // All results for the mutate. + Results []*MutateCustomerNegativeCriteriaResult `protobuf:"bytes,2,rep,name=results,proto3" json:"results,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *MutateCustomerNegativeCriteriaResponse) Reset() { + *m = MutateCustomerNegativeCriteriaResponse{} +} +func (m *MutateCustomerNegativeCriteriaResponse) String() string { return proto.CompactTextString(m) } +func (*MutateCustomerNegativeCriteriaResponse) ProtoMessage() {} +func (*MutateCustomerNegativeCriteriaResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_customer_negative_criterion_service_bef57624ec14ad68, []int{3} +} +func (m *MutateCustomerNegativeCriteriaResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_MutateCustomerNegativeCriteriaResponse.Unmarshal(m, b) +} +func (m *MutateCustomerNegativeCriteriaResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_MutateCustomerNegativeCriteriaResponse.Marshal(b, m, deterministic) +} +func (dst *MutateCustomerNegativeCriteriaResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_MutateCustomerNegativeCriteriaResponse.Merge(dst, src) +} +func (m *MutateCustomerNegativeCriteriaResponse) XXX_Size() int { + return xxx_messageInfo_MutateCustomerNegativeCriteriaResponse.Size(m) +} +func (m *MutateCustomerNegativeCriteriaResponse) XXX_DiscardUnknown() { + xxx_messageInfo_MutateCustomerNegativeCriteriaResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_MutateCustomerNegativeCriteriaResponse proto.InternalMessageInfo + +func (m *MutateCustomerNegativeCriteriaResponse) GetPartialFailureError() *status.Status { + if m != nil { + return m.PartialFailureError + } + return nil +} + +func (m *MutateCustomerNegativeCriteriaResponse) GetResults() []*MutateCustomerNegativeCriteriaResult { + if m != nil { + return m.Results + } + return nil +} + +// The result for the criterion mutate. +type MutateCustomerNegativeCriteriaResult struct { + // Returned for successful operations. + ResourceName string `protobuf:"bytes,1,opt,name=resource_name,json=resourceName,proto3" json:"resource_name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *MutateCustomerNegativeCriteriaResult) Reset() { *m = MutateCustomerNegativeCriteriaResult{} } +func (m *MutateCustomerNegativeCriteriaResult) String() string { return proto.CompactTextString(m) } +func (*MutateCustomerNegativeCriteriaResult) ProtoMessage() {} +func (*MutateCustomerNegativeCriteriaResult) Descriptor() ([]byte, []int) { + return fileDescriptor_customer_negative_criterion_service_bef57624ec14ad68, []int{4} +} +func (m *MutateCustomerNegativeCriteriaResult) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_MutateCustomerNegativeCriteriaResult.Unmarshal(m, b) +} +func (m *MutateCustomerNegativeCriteriaResult) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_MutateCustomerNegativeCriteriaResult.Marshal(b, m, deterministic) +} +func (dst *MutateCustomerNegativeCriteriaResult) XXX_Merge(src proto.Message) { + xxx_messageInfo_MutateCustomerNegativeCriteriaResult.Merge(dst, src) +} +func (m *MutateCustomerNegativeCriteriaResult) XXX_Size() int { + return xxx_messageInfo_MutateCustomerNegativeCriteriaResult.Size(m) +} +func (m *MutateCustomerNegativeCriteriaResult) XXX_DiscardUnknown() { + xxx_messageInfo_MutateCustomerNegativeCriteriaResult.DiscardUnknown(m) +} + +var xxx_messageInfo_MutateCustomerNegativeCriteriaResult proto.InternalMessageInfo + +func (m *MutateCustomerNegativeCriteriaResult) GetResourceName() string { + if m != nil { + return m.ResourceName + } + return "" +} + +func init() { + proto.RegisterType((*GetCustomerNegativeCriterionRequest)(nil), "google.ads.googleads.v1.services.GetCustomerNegativeCriterionRequest") + proto.RegisterType((*MutateCustomerNegativeCriteriaRequest)(nil), "google.ads.googleads.v1.services.MutateCustomerNegativeCriteriaRequest") + proto.RegisterType((*CustomerNegativeCriterionOperation)(nil), "google.ads.googleads.v1.services.CustomerNegativeCriterionOperation") + proto.RegisterType((*MutateCustomerNegativeCriteriaResponse)(nil), "google.ads.googleads.v1.services.MutateCustomerNegativeCriteriaResponse") + proto.RegisterType((*MutateCustomerNegativeCriteriaResult)(nil), "google.ads.googleads.v1.services.MutateCustomerNegativeCriteriaResult") +} + +// Reference imports to suppress errors if they are not otherwise used. +var _ context.Context +var _ grpc.ClientConn + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the grpc package it is being compiled against. +const _ = grpc.SupportPackageIsVersion4 + +// CustomerNegativeCriterionServiceClient is the client API for CustomerNegativeCriterionService service. +// +// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://godoc.org/google.golang.org/grpc#ClientConn.NewStream. +type CustomerNegativeCriterionServiceClient interface { + // Returns the requested criterion in full detail. + GetCustomerNegativeCriterion(ctx context.Context, in *GetCustomerNegativeCriterionRequest, opts ...grpc.CallOption) (*resources.CustomerNegativeCriterion, error) + // Creates or removes criteria. Operation statuses are returned. + MutateCustomerNegativeCriteria(ctx context.Context, in *MutateCustomerNegativeCriteriaRequest, opts ...grpc.CallOption) (*MutateCustomerNegativeCriteriaResponse, error) +} + +type customerNegativeCriterionServiceClient struct { + cc *grpc.ClientConn +} + +func NewCustomerNegativeCriterionServiceClient(cc *grpc.ClientConn) CustomerNegativeCriterionServiceClient { + return &customerNegativeCriterionServiceClient{cc} +} + +func (c *customerNegativeCriterionServiceClient) GetCustomerNegativeCriterion(ctx context.Context, in *GetCustomerNegativeCriterionRequest, opts ...grpc.CallOption) (*resources.CustomerNegativeCriterion, error) { + out := new(resources.CustomerNegativeCriterion) + err := c.cc.Invoke(ctx, "/google.ads.googleads.v1.services.CustomerNegativeCriterionService/GetCustomerNegativeCriterion", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *customerNegativeCriterionServiceClient) MutateCustomerNegativeCriteria(ctx context.Context, in *MutateCustomerNegativeCriteriaRequest, opts ...grpc.CallOption) (*MutateCustomerNegativeCriteriaResponse, error) { + out := new(MutateCustomerNegativeCriteriaResponse) + err := c.cc.Invoke(ctx, "/google.ads.googleads.v1.services.CustomerNegativeCriterionService/MutateCustomerNegativeCriteria", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +// CustomerNegativeCriterionServiceServer is the server API for CustomerNegativeCriterionService service. +type CustomerNegativeCriterionServiceServer interface { + // Returns the requested criterion in full detail. + GetCustomerNegativeCriterion(context.Context, *GetCustomerNegativeCriterionRequest) (*resources.CustomerNegativeCriterion, error) + // Creates or removes criteria. Operation statuses are returned. + MutateCustomerNegativeCriteria(context.Context, *MutateCustomerNegativeCriteriaRequest) (*MutateCustomerNegativeCriteriaResponse, error) +} + +func RegisterCustomerNegativeCriterionServiceServer(s *grpc.Server, srv CustomerNegativeCriterionServiceServer) { + s.RegisterService(&_CustomerNegativeCriterionService_serviceDesc, srv) +} + +func _CustomerNegativeCriterionService_GetCustomerNegativeCriterion_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(GetCustomerNegativeCriterionRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(CustomerNegativeCriterionServiceServer).GetCustomerNegativeCriterion(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.ads.googleads.v1.services.CustomerNegativeCriterionService/GetCustomerNegativeCriterion", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(CustomerNegativeCriterionServiceServer).GetCustomerNegativeCriterion(ctx, req.(*GetCustomerNegativeCriterionRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _CustomerNegativeCriterionService_MutateCustomerNegativeCriteria_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(MutateCustomerNegativeCriteriaRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(CustomerNegativeCriterionServiceServer).MutateCustomerNegativeCriteria(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.ads.googleads.v1.services.CustomerNegativeCriterionService/MutateCustomerNegativeCriteria", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(CustomerNegativeCriterionServiceServer).MutateCustomerNegativeCriteria(ctx, req.(*MutateCustomerNegativeCriteriaRequest)) + } + return interceptor(ctx, in, info, handler) +} + +var _CustomerNegativeCriterionService_serviceDesc = grpc.ServiceDesc{ + ServiceName: "google.ads.googleads.v1.services.CustomerNegativeCriterionService", + HandlerType: (*CustomerNegativeCriterionServiceServer)(nil), + Methods: []grpc.MethodDesc{ + { + MethodName: "GetCustomerNegativeCriterion", + Handler: _CustomerNegativeCriterionService_GetCustomerNegativeCriterion_Handler, + }, + { + MethodName: "MutateCustomerNegativeCriteria", + Handler: _CustomerNegativeCriterionService_MutateCustomerNegativeCriteria_Handler, + }, + }, + Streams: []grpc.StreamDesc{}, + Metadata: "google/ads/googleads/v1/services/customer_negative_criterion_service.proto", +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/services/customer_negative_criterion_service.proto", fileDescriptor_customer_negative_criterion_service_bef57624ec14ad68) +} + +var fileDescriptor_customer_negative_criterion_service_bef57624ec14ad68 = []byte{ + // 670 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xac, 0x55, 0x4f, 0x6b, 0xd4, 0x40, + 0x14, 0x37, 0x69, 0xa9, 0x76, 0xb6, 0x2a, 0x8c, 0x88, 0x4b, 0x29, 0x75, 0x49, 0x5b, 0x2d, 0x7b, + 0x48, 0xd8, 0xf5, 0x16, 0xdb, 0xc3, 0x6e, 0x6d, 0x77, 0xad, 0xd8, 0x96, 0x14, 0xf6, 0x20, 0x0b, + 0x71, 0x9a, 0xbc, 0x86, 0x40, 0x36, 0x13, 0x67, 0x26, 0x91, 0x52, 0x7a, 0xf1, 0xdc, 0x9b, 0xdf, + 0x40, 0x6f, 0x7e, 0x14, 0x8f, 0xfa, 0x15, 0x04, 0xc1, 0xcf, 0xa0, 0x20, 0xf9, 0x33, 0xb1, 0x2d, + 0xa4, 0x59, 0xa8, 0xb7, 0x97, 0xf7, 0x5e, 0x7e, 0xef, 0xfd, 0x7e, 0xf3, 0xe6, 0x0d, 0xda, 0xf5, + 0x28, 0xf5, 0x02, 0x30, 0x88, 0xcb, 0x8d, 0xdc, 0x4c, 0xad, 0xa4, 0x63, 0x70, 0x60, 0x89, 0xef, + 0x00, 0x37, 0x9c, 0x98, 0x0b, 0x3a, 0x01, 0x66, 0x87, 0xe0, 0x11, 0xe1, 0x27, 0x60, 0x3b, 0xcc, + 0x17, 0xc0, 0x7c, 0x1a, 0xda, 0x45, 0x92, 0x1e, 0x31, 0x2a, 0x28, 0x6e, 0xe5, 0x00, 0x3a, 0x71, + 0xb9, 0x5e, 0x62, 0xe9, 0x49, 0x47, 0x97, 0x58, 0x8b, 0x5b, 0x55, 0xd5, 0x18, 0x70, 0x1a, 0xb3, + 0x9a, 0x72, 0x79, 0x99, 0xc5, 0x25, 0x09, 0x12, 0xf9, 0x06, 0x09, 0x43, 0x2a, 0x88, 0xf0, 0x69, + 0xc8, 0x8b, 0xe8, 0x72, 0x11, 0xcd, 0xbe, 0x8e, 0xe2, 0x63, 0xe3, 0x3d, 0x23, 0x51, 0x04, 0x4c, + 0xc6, 0x1f, 0x15, 0x71, 0x16, 0x39, 0x06, 0x17, 0x44, 0xc4, 0x45, 0x40, 0xdb, 0x45, 0x2b, 0x03, + 0x10, 0x5b, 0x45, 0xf9, 0xbd, 0xa2, 0xfa, 0x96, 0x2c, 0x6e, 0xc1, 0xbb, 0x18, 0xb8, 0xc0, 0x2b, + 0xe8, 0xae, 0x6c, 0xd6, 0x0e, 0xc9, 0x04, 0x9a, 0x4a, 0x4b, 0x59, 0x9f, 0xb7, 0x16, 0xa4, 0x73, + 0x8f, 0x4c, 0x40, 0xfb, 0xad, 0xa0, 0xb5, 0xd7, 0xb1, 0x20, 0x02, 0x2a, 0xf0, 0x88, 0x84, 0x7b, + 0x8c, 0x1a, 0x25, 0x63, 0xdf, 0x2d, 0xc0, 0x90, 0x74, 0xbd, 0x74, 0xb1, 0x8b, 0x10, 0x8d, 0x80, + 0xe5, 0x1c, 0x9b, 0x6a, 0x6b, 0x66, 0xbd, 0xd1, 0x7d, 0xa1, 0xd7, 0x29, 0xad, 0x57, 0xf2, 0xd8, + 0x97, 0x60, 0xd6, 0x05, 0x5c, 0xfc, 0x14, 0xdd, 0x8f, 0x08, 0x13, 0x3e, 0x09, 0xec, 0x63, 0xe2, + 0x07, 0x31, 0x83, 0xe6, 0x4c, 0x4b, 0x59, 0xbf, 0x63, 0xdd, 0x2b, 0xdc, 0x3b, 0xb9, 0x37, 0xa5, + 0x9f, 0x90, 0xc0, 0x77, 0x89, 0x00, 0x9b, 0x86, 0xc1, 0x49, 0x73, 0x36, 0x4b, 0x5b, 0x90, 0xce, + 0xfd, 0x30, 0x38, 0xd1, 0x3e, 0x2b, 0x48, 0xab, 0x6f, 0x00, 0x8f, 0xd0, 0x9c, 0xc3, 0x80, 0x88, + 0x5c, 0xc3, 0x46, 0x77, 0xa3, 0x92, 0x56, 0x39, 0x1e, 0xd5, 0xbc, 0x86, 0xb7, 0xac, 0x02, 0x0d, + 0x37, 0xd1, 0x1c, 0x83, 0x09, 0x4d, 0xa0, 0xa9, 0xa6, 0x72, 0xa6, 0x91, 0xfc, 0xbb, 0xdf, 0x40, + 0xf3, 0x25, 0x69, 0xed, 0x9b, 0x82, 0x9e, 0xd4, 0x1d, 0x12, 0x8f, 0x68, 0xc8, 0x01, 0xef, 0xa0, + 0x87, 0x57, 0xe4, 0xb1, 0x81, 0x31, 0xca, 0x32, 0x91, 0x1a, 0x5d, 0x2c, 0x1b, 0x67, 0x91, 0xa3, + 0x1f, 0x66, 0x43, 0x65, 0x3d, 0xb8, 0x2c, 0xdc, 0x76, 0x9a, 0x8e, 0xdf, 0xa2, 0xdb, 0x0c, 0x78, + 0x1c, 0x08, 0x79, 0x92, 0x3b, 0xf5, 0x27, 0x59, 0xdb, 0x62, 0x1c, 0x08, 0x4b, 0xc2, 0x6a, 0xaf, + 0xd0, 0xea, 0x34, 0x3f, 0x4c, 0x35, 0xc6, 0xdd, 0xf3, 0x59, 0xd4, 0xaa, 0x14, 0xfc, 0x30, 0x6f, + 0x10, 0xff, 0x54, 0xd0, 0xd2, 0x75, 0x17, 0x07, 0x6f, 0xd7, 0x73, 0x9c, 0xe2, 0xe2, 0x2d, 0xde, + 0x68, 0x3a, 0xb4, 0xfe, 0x87, 0xef, 0x3f, 0x3e, 0xaa, 0x1b, 0xd8, 0x4c, 0xb7, 0xcd, 0xe9, 0x25, + 0xea, 0x9b, 0xf2, 0xb6, 0x71, 0xa3, 0x5d, 0xae, 0x9f, 0xab, 0xb2, 0x19, 0xed, 0x33, 0xfc, 0x47, + 0x41, 0xcb, 0xd7, 0x8b, 0x8b, 0x07, 0x37, 0x3f, 0xcf, 0x9c, 0xed, 0xf0, 0x3f, 0x0c, 0x46, 0x36, + 0xbb, 0xda, 0x30, 0x63, 0xde, 0xd7, 0x36, 0x53, 0xe6, 0xff, 0xa8, 0x9e, 0x5e, 0x58, 0x3b, 0x9b, + 0xed, 0xb3, 0x4a, 0xe2, 0xe6, 0x24, 0x2b, 0x63, 0x2a, 0xed, 0xfe, 0xb9, 0x8a, 0x56, 0x1d, 0x3a, + 0xa9, 0xed, 0xac, 0xbf, 0x56, 0x37, 0x34, 0x07, 0xe9, 0xc6, 0x3d, 0x50, 0xde, 0x0c, 0x0b, 0x28, + 0x8f, 0x06, 0x24, 0xf4, 0x74, 0xca, 0x3c, 0xc3, 0x83, 0x30, 0xdb, 0xc7, 0xf2, 0x7d, 0x88, 0x7c, + 0x5e, 0xfd, 0x38, 0x3d, 0x97, 0xc6, 0x27, 0x75, 0x66, 0xd0, 0xeb, 0x7d, 0x51, 0x5b, 0x83, 0x1c, + 0xb0, 0xe7, 0x72, 0x3d, 0x37, 0x53, 0x6b, 0xd4, 0xd1, 0x8b, 0xc2, 0xfc, 0xab, 0x4c, 0x19, 0xf7, + 0x5c, 0x3e, 0x2e, 0x53, 0xc6, 0xa3, 0xce, 0x58, 0xa6, 0xfc, 0x52, 0x57, 0x73, 0xbf, 0x69, 0xf6, + 0x5c, 0x6e, 0x9a, 0x65, 0x92, 0x69, 0x8e, 0x3a, 0xa6, 0x29, 0xd3, 0x8e, 0xe6, 0xb2, 0x3e, 0x9f, + 0xfd, 0x0d, 0x00, 0x00, 0xff, 0xff, 0x87, 0xdb, 0xa3, 0xde, 0x43, 0x07, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/services/customer_service.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/services/customer_service.pb.go new file mode 100644 index 0000000..dfec96c --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/services/customer_service.pb.go @@ -0,0 +1,666 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/services/customer_service.proto + +package services // import "google.golang.org/genproto/googleapis/ads/googleads/v1/services" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "github.com/golang/protobuf/ptypes/wrappers" +import resources "google.golang.org/genproto/googleapis/ads/googleads/v1/resources" +import _ "google.golang.org/genproto/googleapis/api/annotations" +import field_mask "google.golang.org/genproto/protobuf/field_mask" + +import ( + context "golang.org/x/net/context" + grpc "google.golang.org/grpc" +) + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Request message for [CustomerService.GetCustomer][google.ads.googleads.v1.services.CustomerService.GetCustomer]. +type GetCustomerRequest struct { + // The resource name of the customer to fetch. + ResourceName string `protobuf:"bytes,1,opt,name=resource_name,json=resourceName,proto3" json:"resource_name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GetCustomerRequest) Reset() { *m = GetCustomerRequest{} } +func (m *GetCustomerRequest) String() string { return proto.CompactTextString(m) } +func (*GetCustomerRequest) ProtoMessage() {} +func (*GetCustomerRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_customer_service_2bf00998210897da, []int{0} +} +func (m *GetCustomerRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GetCustomerRequest.Unmarshal(m, b) +} +func (m *GetCustomerRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GetCustomerRequest.Marshal(b, m, deterministic) +} +func (dst *GetCustomerRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_GetCustomerRequest.Merge(dst, src) +} +func (m *GetCustomerRequest) XXX_Size() int { + return xxx_messageInfo_GetCustomerRequest.Size(m) +} +func (m *GetCustomerRequest) XXX_DiscardUnknown() { + xxx_messageInfo_GetCustomerRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_GetCustomerRequest proto.InternalMessageInfo + +func (m *GetCustomerRequest) GetResourceName() string { + if m != nil { + return m.ResourceName + } + return "" +} + +// Request message for [CustomerService.MutateCustomer][google.ads.googleads.v1.services.CustomerService.MutateCustomer]. +type MutateCustomerRequest struct { + // The ID of the customer being modified. + CustomerId string `protobuf:"bytes,1,opt,name=customer_id,json=customerId,proto3" json:"customer_id,omitempty"` + // The operation to perform on the customer + Operation *CustomerOperation `protobuf:"bytes,4,opt,name=operation,proto3" json:"operation,omitempty"` + // If true, the request is validated but not executed. Only errors are + // returned, not results. + ValidateOnly bool `protobuf:"varint,5,opt,name=validate_only,json=validateOnly,proto3" json:"validate_only,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *MutateCustomerRequest) Reset() { *m = MutateCustomerRequest{} } +func (m *MutateCustomerRequest) String() string { return proto.CompactTextString(m) } +func (*MutateCustomerRequest) ProtoMessage() {} +func (*MutateCustomerRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_customer_service_2bf00998210897da, []int{1} +} +func (m *MutateCustomerRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_MutateCustomerRequest.Unmarshal(m, b) +} +func (m *MutateCustomerRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_MutateCustomerRequest.Marshal(b, m, deterministic) +} +func (dst *MutateCustomerRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_MutateCustomerRequest.Merge(dst, src) +} +func (m *MutateCustomerRequest) XXX_Size() int { + return xxx_messageInfo_MutateCustomerRequest.Size(m) +} +func (m *MutateCustomerRequest) XXX_DiscardUnknown() { + xxx_messageInfo_MutateCustomerRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_MutateCustomerRequest proto.InternalMessageInfo + +func (m *MutateCustomerRequest) GetCustomerId() string { + if m != nil { + return m.CustomerId + } + return "" +} + +func (m *MutateCustomerRequest) GetOperation() *CustomerOperation { + if m != nil { + return m.Operation + } + return nil +} + +func (m *MutateCustomerRequest) GetValidateOnly() bool { + if m != nil { + return m.ValidateOnly + } + return false +} + +// Request message for [CustomerService.CreateCustomerClient][google.ads.googleads.v1.services.CustomerService.CreateCustomerClient]. +type CreateCustomerClientRequest struct { + // The ID of the Manager under whom client customer is being created. + CustomerId string `protobuf:"bytes,1,opt,name=customer_id,json=customerId,proto3" json:"customer_id,omitempty"` + // The new client customer to create. The resource name on this customer + // will be ignored. + CustomerClient *resources.Customer `protobuf:"bytes,2,opt,name=customer_client,json=customerClient,proto3" json:"customer_client,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *CreateCustomerClientRequest) Reset() { *m = CreateCustomerClientRequest{} } +func (m *CreateCustomerClientRequest) String() string { return proto.CompactTextString(m) } +func (*CreateCustomerClientRequest) ProtoMessage() {} +func (*CreateCustomerClientRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_customer_service_2bf00998210897da, []int{2} +} +func (m *CreateCustomerClientRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_CreateCustomerClientRequest.Unmarshal(m, b) +} +func (m *CreateCustomerClientRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_CreateCustomerClientRequest.Marshal(b, m, deterministic) +} +func (dst *CreateCustomerClientRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_CreateCustomerClientRequest.Merge(dst, src) +} +func (m *CreateCustomerClientRequest) XXX_Size() int { + return xxx_messageInfo_CreateCustomerClientRequest.Size(m) +} +func (m *CreateCustomerClientRequest) XXX_DiscardUnknown() { + xxx_messageInfo_CreateCustomerClientRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_CreateCustomerClientRequest proto.InternalMessageInfo + +func (m *CreateCustomerClientRequest) GetCustomerId() string { + if m != nil { + return m.CustomerId + } + return "" +} + +func (m *CreateCustomerClientRequest) GetCustomerClient() *resources.Customer { + if m != nil { + return m.CustomerClient + } + return nil +} + +// A single update on a customer. +type CustomerOperation struct { + // Mutate operation. Only updates are supported for customer. + Update *resources.Customer `protobuf:"bytes,1,opt,name=update,proto3" json:"update,omitempty"` + // FieldMask that determines which resource fields are modified in an update. + UpdateMask *field_mask.FieldMask `protobuf:"bytes,2,opt,name=update_mask,json=updateMask,proto3" json:"update_mask,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *CustomerOperation) Reset() { *m = CustomerOperation{} } +func (m *CustomerOperation) String() string { return proto.CompactTextString(m) } +func (*CustomerOperation) ProtoMessage() {} +func (*CustomerOperation) Descriptor() ([]byte, []int) { + return fileDescriptor_customer_service_2bf00998210897da, []int{3} +} +func (m *CustomerOperation) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_CustomerOperation.Unmarshal(m, b) +} +func (m *CustomerOperation) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_CustomerOperation.Marshal(b, m, deterministic) +} +func (dst *CustomerOperation) XXX_Merge(src proto.Message) { + xxx_messageInfo_CustomerOperation.Merge(dst, src) +} +func (m *CustomerOperation) XXX_Size() int { + return xxx_messageInfo_CustomerOperation.Size(m) +} +func (m *CustomerOperation) XXX_DiscardUnknown() { + xxx_messageInfo_CustomerOperation.DiscardUnknown(m) +} + +var xxx_messageInfo_CustomerOperation proto.InternalMessageInfo + +func (m *CustomerOperation) GetUpdate() *resources.Customer { + if m != nil { + return m.Update + } + return nil +} + +func (m *CustomerOperation) GetUpdateMask() *field_mask.FieldMask { + if m != nil { + return m.UpdateMask + } + return nil +} + +// Response message for CreateCustomerClient mutate. +type CreateCustomerClientResponse struct { + // The resource name of the newly created customer client. + ResourceName string `protobuf:"bytes,2,opt,name=resource_name,json=resourceName,proto3" json:"resource_name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *CreateCustomerClientResponse) Reset() { *m = CreateCustomerClientResponse{} } +func (m *CreateCustomerClientResponse) String() string { return proto.CompactTextString(m) } +func (*CreateCustomerClientResponse) ProtoMessage() {} +func (*CreateCustomerClientResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_customer_service_2bf00998210897da, []int{4} +} +func (m *CreateCustomerClientResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_CreateCustomerClientResponse.Unmarshal(m, b) +} +func (m *CreateCustomerClientResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_CreateCustomerClientResponse.Marshal(b, m, deterministic) +} +func (dst *CreateCustomerClientResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_CreateCustomerClientResponse.Merge(dst, src) +} +func (m *CreateCustomerClientResponse) XXX_Size() int { + return xxx_messageInfo_CreateCustomerClientResponse.Size(m) +} +func (m *CreateCustomerClientResponse) XXX_DiscardUnknown() { + xxx_messageInfo_CreateCustomerClientResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_CreateCustomerClientResponse proto.InternalMessageInfo + +func (m *CreateCustomerClientResponse) GetResourceName() string { + if m != nil { + return m.ResourceName + } + return "" +} + +// Response message for customer mutate. +type MutateCustomerResponse struct { + // Result for the mutate. + Result *MutateCustomerResult `protobuf:"bytes,2,opt,name=result,proto3" json:"result,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *MutateCustomerResponse) Reset() { *m = MutateCustomerResponse{} } +func (m *MutateCustomerResponse) String() string { return proto.CompactTextString(m) } +func (*MutateCustomerResponse) ProtoMessage() {} +func (*MutateCustomerResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_customer_service_2bf00998210897da, []int{5} +} +func (m *MutateCustomerResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_MutateCustomerResponse.Unmarshal(m, b) +} +func (m *MutateCustomerResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_MutateCustomerResponse.Marshal(b, m, deterministic) +} +func (dst *MutateCustomerResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_MutateCustomerResponse.Merge(dst, src) +} +func (m *MutateCustomerResponse) XXX_Size() int { + return xxx_messageInfo_MutateCustomerResponse.Size(m) +} +func (m *MutateCustomerResponse) XXX_DiscardUnknown() { + xxx_messageInfo_MutateCustomerResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_MutateCustomerResponse proto.InternalMessageInfo + +func (m *MutateCustomerResponse) GetResult() *MutateCustomerResult { + if m != nil { + return m.Result + } + return nil +} + +// The result for the customer mutate. +type MutateCustomerResult struct { + // Returned for successful operations. + ResourceName string `protobuf:"bytes,1,opt,name=resource_name,json=resourceName,proto3" json:"resource_name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *MutateCustomerResult) Reset() { *m = MutateCustomerResult{} } +func (m *MutateCustomerResult) String() string { return proto.CompactTextString(m) } +func (*MutateCustomerResult) ProtoMessage() {} +func (*MutateCustomerResult) Descriptor() ([]byte, []int) { + return fileDescriptor_customer_service_2bf00998210897da, []int{6} +} +func (m *MutateCustomerResult) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_MutateCustomerResult.Unmarshal(m, b) +} +func (m *MutateCustomerResult) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_MutateCustomerResult.Marshal(b, m, deterministic) +} +func (dst *MutateCustomerResult) XXX_Merge(src proto.Message) { + xxx_messageInfo_MutateCustomerResult.Merge(dst, src) +} +func (m *MutateCustomerResult) XXX_Size() int { + return xxx_messageInfo_MutateCustomerResult.Size(m) +} +func (m *MutateCustomerResult) XXX_DiscardUnknown() { + xxx_messageInfo_MutateCustomerResult.DiscardUnknown(m) +} + +var xxx_messageInfo_MutateCustomerResult proto.InternalMessageInfo + +func (m *MutateCustomerResult) GetResourceName() string { + if m != nil { + return m.ResourceName + } + return "" +} + +// Request message for [CustomerService.ListAccessibleCustomers][google.ads.googleads.v1.services.CustomerService.ListAccessibleCustomers]. +type ListAccessibleCustomersRequest struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ListAccessibleCustomersRequest) Reset() { *m = ListAccessibleCustomersRequest{} } +func (m *ListAccessibleCustomersRequest) String() string { return proto.CompactTextString(m) } +func (*ListAccessibleCustomersRequest) ProtoMessage() {} +func (*ListAccessibleCustomersRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_customer_service_2bf00998210897da, []int{7} +} +func (m *ListAccessibleCustomersRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ListAccessibleCustomersRequest.Unmarshal(m, b) +} +func (m *ListAccessibleCustomersRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ListAccessibleCustomersRequest.Marshal(b, m, deterministic) +} +func (dst *ListAccessibleCustomersRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_ListAccessibleCustomersRequest.Merge(dst, src) +} +func (m *ListAccessibleCustomersRequest) XXX_Size() int { + return xxx_messageInfo_ListAccessibleCustomersRequest.Size(m) +} +func (m *ListAccessibleCustomersRequest) XXX_DiscardUnknown() { + xxx_messageInfo_ListAccessibleCustomersRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_ListAccessibleCustomersRequest proto.InternalMessageInfo + +// Response message for [CustomerService.ListAccessibleCustomers][google.ads.googleads.v1.services.CustomerService.ListAccessibleCustomers]. +type ListAccessibleCustomersResponse struct { + // Resource name of customers directly accessible by the + // user authenticating the call. + ResourceNames []string `protobuf:"bytes,1,rep,name=resource_names,json=resourceNames,proto3" json:"resource_names,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ListAccessibleCustomersResponse) Reset() { *m = ListAccessibleCustomersResponse{} } +func (m *ListAccessibleCustomersResponse) String() string { return proto.CompactTextString(m) } +func (*ListAccessibleCustomersResponse) ProtoMessage() {} +func (*ListAccessibleCustomersResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_customer_service_2bf00998210897da, []int{8} +} +func (m *ListAccessibleCustomersResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ListAccessibleCustomersResponse.Unmarshal(m, b) +} +func (m *ListAccessibleCustomersResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ListAccessibleCustomersResponse.Marshal(b, m, deterministic) +} +func (dst *ListAccessibleCustomersResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_ListAccessibleCustomersResponse.Merge(dst, src) +} +func (m *ListAccessibleCustomersResponse) XXX_Size() int { + return xxx_messageInfo_ListAccessibleCustomersResponse.Size(m) +} +func (m *ListAccessibleCustomersResponse) XXX_DiscardUnknown() { + xxx_messageInfo_ListAccessibleCustomersResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_ListAccessibleCustomersResponse proto.InternalMessageInfo + +func (m *ListAccessibleCustomersResponse) GetResourceNames() []string { + if m != nil { + return m.ResourceNames + } + return nil +} + +func init() { + proto.RegisterType((*GetCustomerRequest)(nil), "google.ads.googleads.v1.services.GetCustomerRequest") + proto.RegisterType((*MutateCustomerRequest)(nil), "google.ads.googleads.v1.services.MutateCustomerRequest") + proto.RegisterType((*CreateCustomerClientRequest)(nil), "google.ads.googleads.v1.services.CreateCustomerClientRequest") + proto.RegisterType((*CustomerOperation)(nil), "google.ads.googleads.v1.services.CustomerOperation") + proto.RegisterType((*CreateCustomerClientResponse)(nil), "google.ads.googleads.v1.services.CreateCustomerClientResponse") + proto.RegisterType((*MutateCustomerResponse)(nil), "google.ads.googleads.v1.services.MutateCustomerResponse") + proto.RegisterType((*MutateCustomerResult)(nil), "google.ads.googleads.v1.services.MutateCustomerResult") + proto.RegisterType((*ListAccessibleCustomersRequest)(nil), "google.ads.googleads.v1.services.ListAccessibleCustomersRequest") + proto.RegisterType((*ListAccessibleCustomersResponse)(nil), "google.ads.googleads.v1.services.ListAccessibleCustomersResponse") +} + +// Reference imports to suppress errors if they are not otherwise used. +var _ context.Context +var _ grpc.ClientConn + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the grpc package it is being compiled against. +const _ = grpc.SupportPackageIsVersion4 + +// CustomerServiceClient is the client API for CustomerService service. +// +// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://godoc.org/google.golang.org/grpc#ClientConn.NewStream. +type CustomerServiceClient interface { + // Returns the requested customer in full detail. + GetCustomer(ctx context.Context, in *GetCustomerRequest, opts ...grpc.CallOption) (*resources.Customer, error) + // Updates a customer. Operation statuses are returned. + MutateCustomer(ctx context.Context, in *MutateCustomerRequest, opts ...grpc.CallOption) (*MutateCustomerResponse, error) + // Returns resource names of customers directly accessible by the + // user authenticating the call. + ListAccessibleCustomers(ctx context.Context, in *ListAccessibleCustomersRequest, opts ...grpc.CallOption) (*ListAccessibleCustomersResponse, error) + // Creates a new client under manager. The new client customer is returned. + CreateCustomerClient(ctx context.Context, in *CreateCustomerClientRequest, opts ...grpc.CallOption) (*CreateCustomerClientResponse, error) +} + +type customerServiceClient struct { + cc *grpc.ClientConn +} + +func NewCustomerServiceClient(cc *grpc.ClientConn) CustomerServiceClient { + return &customerServiceClient{cc} +} + +func (c *customerServiceClient) GetCustomer(ctx context.Context, in *GetCustomerRequest, opts ...grpc.CallOption) (*resources.Customer, error) { + out := new(resources.Customer) + err := c.cc.Invoke(ctx, "/google.ads.googleads.v1.services.CustomerService/GetCustomer", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *customerServiceClient) MutateCustomer(ctx context.Context, in *MutateCustomerRequest, opts ...grpc.CallOption) (*MutateCustomerResponse, error) { + out := new(MutateCustomerResponse) + err := c.cc.Invoke(ctx, "/google.ads.googleads.v1.services.CustomerService/MutateCustomer", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *customerServiceClient) ListAccessibleCustomers(ctx context.Context, in *ListAccessibleCustomersRequest, opts ...grpc.CallOption) (*ListAccessibleCustomersResponse, error) { + out := new(ListAccessibleCustomersResponse) + err := c.cc.Invoke(ctx, "/google.ads.googleads.v1.services.CustomerService/ListAccessibleCustomers", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *customerServiceClient) CreateCustomerClient(ctx context.Context, in *CreateCustomerClientRequest, opts ...grpc.CallOption) (*CreateCustomerClientResponse, error) { + out := new(CreateCustomerClientResponse) + err := c.cc.Invoke(ctx, "/google.ads.googleads.v1.services.CustomerService/CreateCustomerClient", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +// CustomerServiceServer is the server API for CustomerService service. +type CustomerServiceServer interface { + // Returns the requested customer in full detail. + GetCustomer(context.Context, *GetCustomerRequest) (*resources.Customer, error) + // Updates a customer. Operation statuses are returned. + MutateCustomer(context.Context, *MutateCustomerRequest) (*MutateCustomerResponse, error) + // Returns resource names of customers directly accessible by the + // user authenticating the call. + ListAccessibleCustomers(context.Context, *ListAccessibleCustomersRequest) (*ListAccessibleCustomersResponse, error) + // Creates a new client under manager. The new client customer is returned. + CreateCustomerClient(context.Context, *CreateCustomerClientRequest) (*CreateCustomerClientResponse, error) +} + +func RegisterCustomerServiceServer(s *grpc.Server, srv CustomerServiceServer) { + s.RegisterService(&_CustomerService_serviceDesc, srv) +} + +func _CustomerService_GetCustomer_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(GetCustomerRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(CustomerServiceServer).GetCustomer(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.ads.googleads.v1.services.CustomerService/GetCustomer", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(CustomerServiceServer).GetCustomer(ctx, req.(*GetCustomerRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _CustomerService_MutateCustomer_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(MutateCustomerRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(CustomerServiceServer).MutateCustomer(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.ads.googleads.v1.services.CustomerService/MutateCustomer", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(CustomerServiceServer).MutateCustomer(ctx, req.(*MutateCustomerRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _CustomerService_ListAccessibleCustomers_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(ListAccessibleCustomersRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(CustomerServiceServer).ListAccessibleCustomers(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.ads.googleads.v1.services.CustomerService/ListAccessibleCustomers", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(CustomerServiceServer).ListAccessibleCustomers(ctx, req.(*ListAccessibleCustomersRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _CustomerService_CreateCustomerClient_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(CreateCustomerClientRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(CustomerServiceServer).CreateCustomerClient(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.ads.googleads.v1.services.CustomerService/CreateCustomerClient", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(CustomerServiceServer).CreateCustomerClient(ctx, req.(*CreateCustomerClientRequest)) + } + return interceptor(ctx, in, info, handler) +} + +var _CustomerService_serviceDesc = grpc.ServiceDesc{ + ServiceName: "google.ads.googleads.v1.services.CustomerService", + HandlerType: (*CustomerServiceServer)(nil), + Methods: []grpc.MethodDesc{ + { + MethodName: "GetCustomer", + Handler: _CustomerService_GetCustomer_Handler, + }, + { + MethodName: "MutateCustomer", + Handler: _CustomerService_MutateCustomer_Handler, + }, + { + MethodName: "ListAccessibleCustomers", + Handler: _CustomerService_ListAccessibleCustomers_Handler, + }, + { + MethodName: "CreateCustomerClient", + Handler: _CustomerService_CreateCustomerClient_Handler, + }, + }, + Streams: []grpc.StreamDesc{}, + Metadata: "google/ads/googleads/v1/services/customer_service.proto", +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/services/customer_service.proto", fileDescriptor_customer_service_2bf00998210897da) +} + +var fileDescriptor_customer_service_2bf00998210897da = []byte{ + // 746 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xa4, 0x55, 0xcd, 0x6e, 0xd3, 0x4c, + 0x14, 0xd5, 0xa4, 0xdf, 0x17, 0xd1, 0x49, 0x7f, 0xc4, 0xa8, 0x40, 0x14, 0xaa, 0x36, 0x98, 0x56, + 0x0d, 0x41, 0xd8, 0xa4, 0x45, 0x14, 0x5c, 0x05, 0xe1, 0x46, 0xa2, 0x45, 0xa2, 0x3f, 0x18, 0xd4, + 0x05, 0x8a, 0x14, 0xb9, 0xf6, 0x34, 0x58, 0x75, 0x3c, 0xc6, 0x63, 0x07, 0x55, 0x55, 0x37, 0x6c, + 0x59, 0x02, 0x0b, 0xb6, 0x2c, 0x59, 0xb1, 0xe0, 0x29, 0x90, 0x58, 0xc1, 0x23, 0xb0, 0xe2, 0x15, + 0xd8, 0x20, 0x8f, 0x67, 0xdc, 0xfc, 0xb9, 0x69, 0xcb, 0xee, 0x66, 0x72, 0xcf, 0xb9, 0xe7, 0xde, + 0x39, 0x73, 0x0d, 0x97, 0x9b, 0x84, 0x34, 0x1d, 0xac, 0x18, 0x16, 0x55, 0xe2, 0x30, 0x8a, 0xda, + 0x15, 0x85, 0x62, 0xbf, 0x6d, 0x9b, 0x98, 0x2a, 0x66, 0x48, 0x03, 0xd2, 0xc2, 0x7e, 0x83, 0x9f, + 0xc8, 0x9e, 0x4f, 0x02, 0x82, 0x8a, 0x71, 0xb6, 0x6c, 0x58, 0x54, 0x4e, 0x80, 0x72, 0xbb, 0x22, + 0x0b, 0x60, 0xe1, 0x76, 0x1a, 0xb5, 0x8f, 0x29, 0x09, 0xfd, 0x4e, 0xee, 0x98, 0xb3, 0x30, 0x2d, + 0x10, 0x9e, 0xad, 0x18, 0xae, 0x4b, 0x02, 0x23, 0xb0, 0x89, 0x4b, 0xf9, 0xbf, 0xbc, 0xa2, 0xc2, + 0x7e, 0xed, 0x86, 0x7b, 0xca, 0x9e, 0x8d, 0x1d, 0xab, 0xd1, 0x32, 0xe8, 0x3e, 0xcf, 0x98, 0xe9, + 0xcd, 0x78, 0xed, 0x1b, 0x9e, 0x87, 0x7d, 0xce, 0x20, 0xdd, 0x87, 0x68, 0x0d, 0x07, 0x35, 0x5e, + 0x54, 0xc7, 0xaf, 0x42, 0x4c, 0x03, 0x74, 0x1d, 0x8e, 0x0b, 0x45, 0x0d, 0xd7, 0x68, 0xe1, 0x3c, + 0x28, 0x82, 0xd2, 0xa8, 0x3e, 0x26, 0x0e, 0x37, 0x8d, 0x16, 0x96, 0xbe, 0x00, 0x78, 0x69, 0x23, + 0x0c, 0x8c, 0x00, 0xf7, 0xc2, 0x67, 0x61, 0x2e, 0x19, 0x91, 0x6d, 0x71, 0x30, 0x14, 0x47, 0x8f, + 0x2d, 0xf4, 0x14, 0x8e, 0x12, 0x0f, 0xfb, 0xac, 0x97, 0xfc, 0x7f, 0x45, 0x50, 0xca, 0x2d, 0x2e, + 0xc9, 0xc3, 0xa6, 0x27, 0x8b, 0x32, 0x5b, 0x02, 0xaa, 0x1f, 0xb3, 0x44, 0x92, 0xdb, 0x86, 0x63, + 0x5b, 0x46, 0x80, 0x1b, 0xc4, 0x75, 0x0e, 0xf2, 0xff, 0x17, 0x41, 0xe9, 0x82, 0x3e, 0x26, 0x0e, + 0xb7, 0x5c, 0xe7, 0x40, 0x7a, 0x0f, 0xe0, 0xd5, 0x9a, 0x8f, 0x3b, 0x24, 0xd7, 0x1c, 0x1b, 0xbb, + 0xc1, 0xa9, 0x85, 0x3f, 0x87, 0x93, 0x49, 0x82, 0xc9, 0xa0, 0xf9, 0x0c, 0x93, 0x7f, 0x33, 0x55, + 0x7e, 0x72, 0xb5, 0x89, 0x7e, 0x7d, 0xc2, 0xec, 0xaa, 0x2e, 0x7d, 0x00, 0xf0, 0x62, 0x5f, 0x73, + 0xa8, 0x06, 0xb3, 0xa1, 0x17, 0x49, 0x67, 0x3a, 0xce, 0x58, 0x82, 0x43, 0xd1, 0x0a, 0xcc, 0xc5, + 0x11, 0x33, 0x05, 0x17, 0x5b, 0x10, 0x4c, 0xc2, 0x15, 0xf2, 0xa3, 0xc8, 0x37, 0x1b, 0x06, 0xdd, + 0xd7, 0x61, 0x9c, 0x1e, 0xc5, 0x52, 0x0d, 0x4e, 0x0f, 0x9e, 0x16, 0xf5, 0x88, 0x4b, 0x71, 0xbf, + 0x4d, 0x32, 0x03, 0x6c, 0xf2, 0x12, 0x5e, 0xee, 0x75, 0x09, 0x87, 0x6f, 0xc2, 0xac, 0x8f, 0x69, + 0xe8, 0x88, 0x19, 0xde, 0x1d, 0x6e, 0x81, 0x3e, 0xa6, 0xd0, 0x09, 0x74, 0xce, 0x22, 0xad, 0xc0, + 0xa9, 0x41, 0xff, 0x9f, 0xce, 0xcd, 0x45, 0x38, 0xf3, 0xc4, 0xa6, 0x81, 0x66, 0x9a, 0x98, 0x52, + 0x7b, 0xd7, 0x49, 0x48, 0x28, 0x37, 0x87, 0xb4, 0x0e, 0x67, 0x53, 0x33, 0x78, 0x47, 0xf3, 0x70, + 0xa2, 0xab, 0x12, 0xcd, 0x83, 0xe2, 0x48, 0x69, 0x54, 0x1f, 0xef, 0x2c, 0x45, 0x17, 0xdf, 0x66, + 0xe1, 0xa4, 0x00, 0x3f, 0x8b, 0x5b, 0x43, 0x1f, 0x01, 0xcc, 0x75, 0xbc, 0x44, 0x74, 0x67, 0xf8, + 0x30, 0xfa, 0x1f, 0x6e, 0xe1, 0x2c, 0x1e, 0x91, 0x16, 0xde, 0xfc, 0xf8, 0xf5, 0x2e, 0x73, 0x0d, + 0xcd, 0x46, 0x1b, 0xe8, 0xb0, 0x4b, 0x78, 0x55, 0x38, 0x94, 0x2a, 0xe5, 0x23, 0xf4, 0x15, 0xc0, + 0x89, 0xee, 0xc9, 0xa2, 0xe5, 0xb3, 0xdf, 0x55, 0xac, 0xf0, 0xde, 0x39, 0x2e, 0x99, 0x0d, 0x57, + 0x52, 0x98, 0xdc, 0x1b, 0xd2, 0x5c, 0x24, 0xf7, 0x58, 0xdf, 0x61, 0xc7, 0x8b, 0xad, 0x96, 0x8f, + 0xd4, 0x16, 0x43, 0xab, 0xa0, 0x8c, 0xbe, 0x03, 0x78, 0x25, 0xe5, 0xc6, 0xd0, 0xc3, 0xe1, 0x32, + 0x4e, 0xb6, 0x43, 0x41, 0xfb, 0x07, 0x06, 0xde, 0xd1, 0x2d, 0xd6, 0xd1, 0x02, 0x9a, 0xef, 0xea, + 0x48, 0x75, 0x52, 0x34, 0xff, 0x04, 0x70, 0x6a, 0xd0, 0x7b, 0x44, 0xd5, 0x53, 0xec, 0xce, 0xf4, + 0xad, 0x57, 0x78, 0x70, 0x5e, 0x38, 0x6f, 0xa3, 0xca, 0xda, 0x58, 0x96, 0x16, 0x4f, 0xbe, 0x18, + 0x73, 0x00, 0x87, 0x0a, 0xca, 0xab, 0x7f, 0x00, 0x9c, 0x33, 0x49, 0x6b, 0xa8, 0x88, 0xd5, 0xa9, + 0x9e, 0x37, 0xb3, 0x1d, 0x6d, 0xaf, 0x6d, 0xf0, 0x62, 0x9d, 0x23, 0x9b, 0xc4, 0x31, 0xdc, 0xa6, + 0x4c, 0xfc, 0xa6, 0xd2, 0xc4, 0x2e, 0xdb, 0x6d, 0xe2, 0x2b, 0xeb, 0xd9, 0x34, 0xfd, 0x7b, 0xbe, + 0x22, 0x82, 0x4f, 0x99, 0x91, 0x35, 0x4d, 0xfb, 0x9c, 0x29, 0xae, 0xc5, 0x84, 0x9a, 0x45, 0xe5, + 0x38, 0x8c, 0xa2, 0x9d, 0x8a, 0xcc, 0x0b, 0xd3, 0x6f, 0x22, 0xa5, 0xae, 0x59, 0xb4, 0x9e, 0xa4, + 0xd4, 0x77, 0x2a, 0x75, 0x91, 0xf2, 0x3b, 0x33, 0x17, 0x9f, 0xab, 0xaa, 0x66, 0x51, 0x55, 0x4d, + 0x92, 0x54, 0x75, 0xa7, 0xa2, 0xaa, 0x22, 0x6d, 0x37, 0xcb, 0x74, 0x2e, 0xfd, 0x0d, 0x00, 0x00, + 0xff, 0xff, 0x3c, 0x0a, 0xed, 0x23, 0x76, 0x08, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/services/detail_placement_view_service.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/services/detail_placement_view_service.pb.go new file mode 100644 index 0000000..55fce0b --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/services/detail_placement_view_service.pb.go @@ -0,0 +1,176 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/services/detail_placement_view_service.proto + +package services // import "google.golang.org/genproto/googleapis/ads/googleads/v1/services" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import resources "google.golang.org/genproto/googleapis/ads/googleads/v1/resources" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +import ( + context "golang.org/x/net/context" + grpc "google.golang.org/grpc" +) + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Request message for [DetailPlacementViewService.GetDetailPlacementView][google.ads.googleads.v1.services.DetailPlacementViewService.GetDetailPlacementView]. +type GetDetailPlacementViewRequest struct { + // The resource name of the Detail Placement view to fetch. + ResourceName string `protobuf:"bytes,1,opt,name=resource_name,json=resourceName,proto3" json:"resource_name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GetDetailPlacementViewRequest) Reset() { *m = GetDetailPlacementViewRequest{} } +func (m *GetDetailPlacementViewRequest) String() string { return proto.CompactTextString(m) } +func (*GetDetailPlacementViewRequest) ProtoMessage() {} +func (*GetDetailPlacementViewRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_detail_placement_view_service_70dfd9a0f630143c, []int{0} +} +func (m *GetDetailPlacementViewRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GetDetailPlacementViewRequest.Unmarshal(m, b) +} +func (m *GetDetailPlacementViewRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GetDetailPlacementViewRequest.Marshal(b, m, deterministic) +} +func (dst *GetDetailPlacementViewRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_GetDetailPlacementViewRequest.Merge(dst, src) +} +func (m *GetDetailPlacementViewRequest) XXX_Size() int { + return xxx_messageInfo_GetDetailPlacementViewRequest.Size(m) +} +func (m *GetDetailPlacementViewRequest) XXX_DiscardUnknown() { + xxx_messageInfo_GetDetailPlacementViewRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_GetDetailPlacementViewRequest proto.InternalMessageInfo + +func (m *GetDetailPlacementViewRequest) GetResourceName() string { + if m != nil { + return m.ResourceName + } + return "" +} + +func init() { + proto.RegisterType((*GetDetailPlacementViewRequest)(nil), "google.ads.googleads.v1.services.GetDetailPlacementViewRequest") +} + +// Reference imports to suppress errors if they are not otherwise used. +var _ context.Context +var _ grpc.ClientConn + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the grpc package it is being compiled against. +const _ = grpc.SupportPackageIsVersion4 + +// DetailPlacementViewServiceClient is the client API for DetailPlacementViewService service. +// +// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://godoc.org/google.golang.org/grpc#ClientConn.NewStream. +type DetailPlacementViewServiceClient interface { + // Returns the requested Detail Placement view in full detail. + GetDetailPlacementView(ctx context.Context, in *GetDetailPlacementViewRequest, opts ...grpc.CallOption) (*resources.DetailPlacementView, error) +} + +type detailPlacementViewServiceClient struct { + cc *grpc.ClientConn +} + +func NewDetailPlacementViewServiceClient(cc *grpc.ClientConn) DetailPlacementViewServiceClient { + return &detailPlacementViewServiceClient{cc} +} + +func (c *detailPlacementViewServiceClient) GetDetailPlacementView(ctx context.Context, in *GetDetailPlacementViewRequest, opts ...grpc.CallOption) (*resources.DetailPlacementView, error) { + out := new(resources.DetailPlacementView) + err := c.cc.Invoke(ctx, "/google.ads.googleads.v1.services.DetailPlacementViewService/GetDetailPlacementView", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +// DetailPlacementViewServiceServer is the server API for DetailPlacementViewService service. +type DetailPlacementViewServiceServer interface { + // Returns the requested Detail Placement view in full detail. + GetDetailPlacementView(context.Context, *GetDetailPlacementViewRequest) (*resources.DetailPlacementView, error) +} + +func RegisterDetailPlacementViewServiceServer(s *grpc.Server, srv DetailPlacementViewServiceServer) { + s.RegisterService(&_DetailPlacementViewService_serviceDesc, srv) +} + +func _DetailPlacementViewService_GetDetailPlacementView_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(GetDetailPlacementViewRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(DetailPlacementViewServiceServer).GetDetailPlacementView(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.ads.googleads.v1.services.DetailPlacementViewService/GetDetailPlacementView", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(DetailPlacementViewServiceServer).GetDetailPlacementView(ctx, req.(*GetDetailPlacementViewRequest)) + } + return interceptor(ctx, in, info, handler) +} + +var _DetailPlacementViewService_serviceDesc = grpc.ServiceDesc{ + ServiceName: "google.ads.googleads.v1.services.DetailPlacementViewService", + HandlerType: (*DetailPlacementViewServiceServer)(nil), + Methods: []grpc.MethodDesc{ + { + MethodName: "GetDetailPlacementView", + Handler: _DetailPlacementViewService_GetDetailPlacementView_Handler, + }, + }, + Streams: []grpc.StreamDesc{}, + Metadata: "google/ads/googleads/v1/services/detail_placement_view_service.proto", +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/services/detail_placement_view_service.proto", fileDescriptor_detail_placement_view_service_70dfd9a0f630143c) +} + +var fileDescriptor_detail_placement_view_service_70dfd9a0f630143c = []byte{ + // 373 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x84, 0x52, 0xbf, 0x4a, 0xfb, 0x40, + 0x1c, 0x27, 0xf9, 0xc1, 0x0f, 0x0c, 0xba, 0x64, 0x10, 0x09, 0x8a, 0xa5, 0x76, 0x90, 0x0e, 0x77, + 0x44, 0xa1, 0xc8, 0x49, 0x95, 0x94, 0x42, 0x9d, 0xa4, 0x54, 0xc8, 0x20, 0x81, 0x70, 0x26, 0x5f, + 0x42, 0x20, 0xc9, 0xc5, 0xdc, 0x35, 0x1d, 0xc4, 0xc5, 0xc1, 0x17, 0xf0, 0x0d, 0x1c, 0x7d, 0x14, + 0x37, 0xf1, 0x15, 0x9c, 0x9c, 0x7c, 0x04, 0x49, 0x2f, 0x17, 0x28, 0x34, 0x76, 0xfb, 0x70, 0xdf, + 0xcf, 0x9f, 0xef, 0x9f, 0x33, 0xc6, 0x11, 0x63, 0x51, 0x02, 0x98, 0x86, 0x1c, 0x4b, 0x58, 0xa1, + 0xd2, 0xc6, 0x1c, 0x8a, 0x32, 0x0e, 0x80, 0xe3, 0x10, 0x04, 0x8d, 0x13, 0x3f, 0x4f, 0x68, 0x00, + 0x29, 0x64, 0xc2, 0x2f, 0x63, 0x58, 0xf8, 0x75, 0x19, 0xe5, 0x05, 0x13, 0xcc, 0xec, 0x48, 0x29, + 0xa2, 0x21, 0x47, 0x8d, 0x0b, 0x2a, 0x6d, 0xa4, 0x5c, 0xac, 0x61, 0x5b, 0x4e, 0x01, 0x9c, 0xcd, + 0x8b, 0xd6, 0x20, 0x19, 0x60, 0xed, 0x2b, 0x79, 0x1e, 0x63, 0x9a, 0x65, 0x4c, 0x50, 0x11, 0xb3, + 0x8c, 0xcb, 0x6a, 0x77, 0x6c, 0x1c, 0x4c, 0x40, 0x8c, 0x97, 0xfa, 0xa9, 0x92, 0xbb, 0x31, 0x2c, + 0x66, 0x70, 0x3f, 0x07, 0x2e, 0xcc, 0x23, 0x63, 0x47, 0xe5, 0xf8, 0x19, 0x4d, 0x61, 0x4f, 0xeb, + 0x68, 0xc7, 0x5b, 0xb3, 0x6d, 0xf5, 0x78, 0x4d, 0x53, 0x38, 0xf9, 0xd1, 0x0c, 0x6b, 0x8d, 0xc7, + 0x8d, 0x1c, 0xc1, 0xfc, 0xd0, 0x8c, 0xdd, 0xf5, 0x29, 0xe6, 0x25, 0xda, 0x34, 0x3f, 0xfa, 0xb3, + 0x3f, 0x6b, 0xd0, 0x6a, 0xd0, 0xac, 0x07, 0xad, 0x91, 0x77, 0x2f, 0x9e, 0x3e, 0xbf, 0x5e, 0xf4, + 0x33, 0x73, 0x50, 0x6d, 0xf2, 0x61, 0x65, 0xc4, 0x61, 0x30, 0xe7, 0x82, 0xa5, 0x50, 0x70, 0xdc, + 0xaf, 0x57, 0xbb, 0xa2, 0xe5, 0xb8, 0xff, 0x38, 0x7a, 0xd6, 0x8d, 0x5e, 0xc0, 0xd2, 0x8d, 0xed, + 0x8f, 0x0e, 0xdb, 0x17, 0x33, 0xad, 0x4e, 0x30, 0xd5, 0x6e, 0xaf, 0x6a, 0x93, 0x88, 0x25, 0x34, + 0x8b, 0x10, 0x2b, 0x22, 0x1c, 0x41, 0xb6, 0x3c, 0x90, 0xba, 0x78, 0x1e, 0xf3, 0xf6, 0x8f, 0x76, + 0xae, 0xc0, 0xab, 0xfe, 0x6f, 0xe2, 0x38, 0x6f, 0x7a, 0x67, 0x22, 0x0d, 0x9d, 0x90, 0x23, 0x09, + 0x2b, 0xe4, 0xda, 0xa8, 0x0e, 0xe6, 0xef, 0x8a, 0xe2, 0x39, 0x21, 0xf7, 0x1a, 0x8a, 0xe7, 0xda, + 0x9e, 0xa2, 0x7c, 0xeb, 0x3d, 0xf9, 0x4e, 0x88, 0x13, 0x72, 0x42, 0x1a, 0x12, 0x21, 0xae, 0x4d, + 0x88, 0xa2, 0xdd, 0xfd, 0x5f, 0xf6, 0x79, 0xfa, 0x1b, 0x00, 0x00, 0xff, 0xff, 0x3d, 0x02, 0x09, + 0x37, 0x0f, 0x03, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/services/display_keyword_view_service.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/services/display_keyword_view_service.pb.go new file mode 100644 index 0000000..098dad2 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/services/display_keyword_view_service.pb.go @@ -0,0 +1,176 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/services/display_keyword_view_service.proto + +package services // import "google.golang.org/genproto/googleapis/ads/googleads/v1/services" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import resources "google.golang.org/genproto/googleapis/ads/googleads/v1/resources" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +import ( + context "golang.org/x/net/context" + grpc "google.golang.org/grpc" +) + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Request message for [DisplayKeywordViewService.GetDisplayKeywordView][google.ads.googleads.v1.services.DisplayKeywordViewService.GetDisplayKeywordView]. +type GetDisplayKeywordViewRequest struct { + // The resource name of the display keyword view to fetch. + ResourceName string `protobuf:"bytes,1,opt,name=resource_name,json=resourceName,proto3" json:"resource_name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GetDisplayKeywordViewRequest) Reset() { *m = GetDisplayKeywordViewRequest{} } +func (m *GetDisplayKeywordViewRequest) String() string { return proto.CompactTextString(m) } +func (*GetDisplayKeywordViewRequest) ProtoMessage() {} +func (*GetDisplayKeywordViewRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_display_keyword_view_service_277caaf988ad7c73, []int{0} +} +func (m *GetDisplayKeywordViewRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GetDisplayKeywordViewRequest.Unmarshal(m, b) +} +func (m *GetDisplayKeywordViewRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GetDisplayKeywordViewRequest.Marshal(b, m, deterministic) +} +func (dst *GetDisplayKeywordViewRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_GetDisplayKeywordViewRequest.Merge(dst, src) +} +func (m *GetDisplayKeywordViewRequest) XXX_Size() int { + return xxx_messageInfo_GetDisplayKeywordViewRequest.Size(m) +} +func (m *GetDisplayKeywordViewRequest) XXX_DiscardUnknown() { + xxx_messageInfo_GetDisplayKeywordViewRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_GetDisplayKeywordViewRequest proto.InternalMessageInfo + +func (m *GetDisplayKeywordViewRequest) GetResourceName() string { + if m != nil { + return m.ResourceName + } + return "" +} + +func init() { + proto.RegisterType((*GetDisplayKeywordViewRequest)(nil), "google.ads.googleads.v1.services.GetDisplayKeywordViewRequest") +} + +// Reference imports to suppress errors if they are not otherwise used. +var _ context.Context +var _ grpc.ClientConn + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the grpc package it is being compiled against. +const _ = grpc.SupportPackageIsVersion4 + +// DisplayKeywordViewServiceClient is the client API for DisplayKeywordViewService service. +// +// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://godoc.org/google.golang.org/grpc#ClientConn.NewStream. +type DisplayKeywordViewServiceClient interface { + // Returns the requested display keyword view in full detail. + GetDisplayKeywordView(ctx context.Context, in *GetDisplayKeywordViewRequest, opts ...grpc.CallOption) (*resources.DisplayKeywordView, error) +} + +type displayKeywordViewServiceClient struct { + cc *grpc.ClientConn +} + +func NewDisplayKeywordViewServiceClient(cc *grpc.ClientConn) DisplayKeywordViewServiceClient { + return &displayKeywordViewServiceClient{cc} +} + +func (c *displayKeywordViewServiceClient) GetDisplayKeywordView(ctx context.Context, in *GetDisplayKeywordViewRequest, opts ...grpc.CallOption) (*resources.DisplayKeywordView, error) { + out := new(resources.DisplayKeywordView) + err := c.cc.Invoke(ctx, "/google.ads.googleads.v1.services.DisplayKeywordViewService/GetDisplayKeywordView", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +// DisplayKeywordViewServiceServer is the server API for DisplayKeywordViewService service. +type DisplayKeywordViewServiceServer interface { + // Returns the requested display keyword view in full detail. + GetDisplayKeywordView(context.Context, *GetDisplayKeywordViewRequest) (*resources.DisplayKeywordView, error) +} + +func RegisterDisplayKeywordViewServiceServer(s *grpc.Server, srv DisplayKeywordViewServiceServer) { + s.RegisterService(&_DisplayKeywordViewService_serviceDesc, srv) +} + +func _DisplayKeywordViewService_GetDisplayKeywordView_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(GetDisplayKeywordViewRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(DisplayKeywordViewServiceServer).GetDisplayKeywordView(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.ads.googleads.v1.services.DisplayKeywordViewService/GetDisplayKeywordView", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(DisplayKeywordViewServiceServer).GetDisplayKeywordView(ctx, req.(*GetDisplayKeywordViewRequest)) + } + return interceptor(ctx, in, info, handler) +} + +var _DisplayKeywordViewService_serviceDesc = grpc.ServiceDesc{ + ServiceName: "google.ads.googleads.v1.services.DisplayKeywordViewService", + HandlerType: (*DisplayKeywordViewServiceServer)(nil), + Methods: []grpc.MethodDesc{ + { + MethodName: "GetDisplayKeywordView", + Handler: _DisplayKeywordViewService_GetDisplayKeywordView_Handler, + }, + }, + Streams: []grpc.StreamDesc{}, + Metadata: "google/ads/googleads/v1/services/display_keyword_view_service.proto", +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/services/display_keyword_view_service.proto", fileDescriptor_display_keyword_view_service_277caaf988ad7c73) +} + +var fileDescriptor_display_keyword_view_service_277caaf988ad7c73 = []byte{ + // 373 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x84, 0x52, 0xcf, 0x4a, 0xc3, 0x30, + 0x18, 0xa7, 0x15, 0x04, 0x8b, 0x5e, 0x0a, 0x82, 0x8e, 0x21, 0x63, 0xee, 0x20, 0x3b, 0x24, 0x54, + 0x19, 0x42, 0x74, 0x42, 0x37, 0x61, 0x82, 0x20, 0x63, 0x42, 0x0f, 0x52, 0x28, 0x71, 0x0d, 0xa5, + 0xb8, 0x36, 0x35, 0x5f, 0xd7, 0x31, 0xc4, 0x8b, 0xe0, 0x13, 0xf8, 0x06, 0x1e, 0x7d, 0x14, 0xc1, + 0x93, 0xaf, 0xe0, 0x49, 0x5f, 0x42, 0xba, 0x2c, 0x45, 0x99, 0x75, 0xb7, 0x1f, 0xc9, 0xef, 0xcf, + 0x97, 0xdf, 0x17, 0xa3, 0x1b, 0x70, 0x1e, 0x8c, 0x18, 0xa6, 0x3e, 0x60, 0x09, 0x73, 0x94, 0x59, + 0x18, 0x98, 0xc8, 0xc2, 0x21, 0x03, 0xec, 0x87, 0x90, 0x8c, 0xe8, 0xd4, 0xbb, 0x61, 0xd3, 0x09, + 0x17, 0xbe, 0x97, 0x85, 0x6c, 0xe2, 0xcd, 0x6f, 0x51, 0x22, 0x78, 0xca, 0xcd, 0x9a, 0x54, 0x22, + 0xea, 0x03, 0x2a, 0x4c, 0x50, 0x66, 0x21, 0x65, 0x52, 0x39, 0x2e, 0x8b, 0x11, 0x0c, 0xf8, 0x58, + 0x94, 0xe5, 0x48, 0xff, 0x4a, 0x55, 0xa9, 0x93, 0x10, 0xd3, 0x38, 0xe6, 0x29, 0x4d, 0x43, 0x1e, + 0x83, 0xbc, 0xad, 0x77, 0x8d, 0x6a, 0x8f, 0xa5, 0xa7, 0x52, 0x7e, 0x2e, 0xd5, 0x4e, 0xc8, 0x26, + 0x03, 0x76, 0x3b, 0x66, 0x90, 0x9a, 0xbb, 0xc6, 0x86, 0x4a, 0xf1, 0x62, 0x1a, 0xb1, 0x2d, 0xad, + 0xa6, 0xed, 0xad, 0x0d, 0xd6, 0xd5, 0xe1, 0x05, 0x8d, 0xd8, 0xfe, 0x97, 0x66, 0x6c, 0x2f, 0x5a, + 0x5c, 0xca, 0xf9, 0xcd, 0x37, 0xcd, 0xd8, 0xfc, 0x33, 0xc3, 0x3c, 0x41, 0xcb, 0xde, 0x8e, 0xfe, + 0x1b, 0xae, 0xd2, 0x2a, 0xd5, 0x17, 0xcd, 0xa0, 0x45, 0x75, 0xbd, 0xfd, 0xf0, 0xfe, 0xf1, 0xa4, + 0x1f, 0x9a, 0xad, 0xbc, 0xc3, 0xbb, 0x5f, 0xcf, 0x6b, 0x0f, 0xc7, 0x90, 0xf2, 0x88, 0x09, 0xc0, + 0x4d, 0x55, 0xea, 0x0f, 0x29, 0xe0, 0xe6, 0x7d, 0xe7, 0x51, 0x37, 0x1a, 0x43, 0x1e, 0x2d, 0x9d, + 0xbd, 0xb3, 0x53, 0xda, 0x49, 0x3f, 0xef, 0xbe, 0xaf, 0x5d, 0x9d, 0xcd, 0x3d, 0x02, 0x3e, 0xa2, + 0x71, 0x80, 0xb8, 0x08, 0x70, 0xc0, 0xe2, 0xd9, 0x66, 0xd4, 0xa6, 0x93, 0x10, 0xca, 0xff, 0xd7, + 0x91, 0x02, 0xcf, 0xfa, 0x4a, 0xcf, 0xb6, 0x5f, 0xf4, 0x5a, 0x4f, 0x1a, 0xda, 0x3e, 0x20, 0x09, + 0x73, 0xe4, 0x58, 0x68, 0x1e, 0x0c, 0xaf, 0x8a, 0xe2, 0xda, 0x3e, 0xb8, 0x05, 0xc5, 0x75, 0x2c, + 0x57, 0x51, 0x3e, 0xf5, 0x86, 0x3c, 0x27, 0xc4, 0xf6, 0x81, 0x90, 0x82, 0x44, 0x88, 0x63, 0x11, + 0xa2, 0x68, 0xd7, 0xab, 0xb3, 0x39, 0x0f, 0xbe, 0x03, 0x00, 0x00, 0xff, 0xff, 0x56, 0xf2, 0xb0, + 0x91, 0x06, 0x03, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/services/domain_category_service.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/services/domain_category_service.pb.go new file mode 100644 index 0000000..a1dd9e9 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/services/domain_category_service.pb.go @@ -0,0 +1,176 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/services/domain_category_service.proto + +package services // import "google.golang.org/genproto/googleapis/ads/googleads/v1/services" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import resources "google.golang.org/genproto/googleapis/ads/googleads/v1/resources" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +import ( + context "golang.org/x/net/context" + grpc "google.golang.org/grpc" +) + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Request message for +// [DomainCategoryService.GetDomainCategory][google.ads.googleads.v1.services.DomainCategoryService.GetDomainCategory]. +type GetDomainCategoryRequest struct { + // Resource name of the domain category to fetch. + ResourceName string `protobuf:"bytes,1,opt,name=resource_name,json=resourceName,proto3" json:"resource_name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GetDomainCategoryRequest) Reset() { *m = GetDomainCategoryRequest{} } +func (m *GetDomainCategoryRequest) String() string { return proto.CompactTextString(m) } +func (*GetDomainCategoryRequest) ProtoMessage() {} +func (*GetDomainCategoryRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_domain_category_service_e150c47857abcd62, []int{0} +} +func (m *GetDomainCategoryRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GetDomainCategoryRequest.Unmarshal(m, b) +} +func (m *GetDomainCategoryRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GetDomainCategoryRequest.Marshal(b, m, deterministic) +} +func (dst *GetDomainCategoryRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_GetDomainCategoryRequest.Merge(dst, src) +} +func (m *GetDomainCategoryRequest) XXX_Size() int { + return xxx_messageInfo_GetDomainCategoryRequest.Size(m) +} +func (m *GetDomainCategoryRequest) XXX_DiscardUnknown() { + xxx_messageInfo_GetDomainCategoryRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_GetDomainCategoryRequest proto.InternalMessageInfo + +func (m *GetDomainCategoryRequest) GetResourceName() string { + if m != nil { + return m.ResourceName + } + return "" +} + +func init() { + proto.RegisterType((*GetDomainCategoryRequest)(nil), "google.ads.googleads.v1.services.GetDomainCategoryRequest") +} + +// Reference imports to suppress errors if they are not otherwise used. +var _ context.Context +var _ grpc.ClientConn + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the grpc package it is being compiled against. +const _ = grpc.SupportPackageIsVersion4 + +// DomainCategoryServiceClient is the client API for DomainCategoryService service. +// +// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://godoc.org/google.golang.org/grpc#ClientConn.NewStream. +type DomainCategoryServiceClient interface { + // Returns the requested domain category. + GetDomainCategory(ctx context.Context, in *GetDomainCategoryRequest, opts ...grpc.CallOption) (*resources.DomainCategory, error) +} + +type domainCategoryServiceClient struct { + cc *grpc.ClientConn +} + +func NewDomainCategoryServiceClient(cc *grpc.ClientConn) DomainCategoryServiceClient { + return &domainCategoryServiceClient{cc} +} + +func (c *domainCategoryServiceClient) GetDomainCategory(ctx context.Context, in *GetDomainCategoryRequest, opts ...grpc.CallOption) (*resources.DomainCategory, error) { + out := new(resources.DomainCategory) + err := c.cc.Invoke(ctx, "/google.ads.googleads.v1.services.DomainCategoryService/GetDomainCategory", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +// DomainCategoryServiceServer is the server API for DomainCategoryService service. +type DomainCategoryServiceServer interface { + // Returns the requested domain category. + GetDomainCategory(context.Context, *GetDomainCategoryRequest) (*resources.DomainCategory, error) +} + +func RegisterDomainCategoryServiceServer(s *grpc.Server, srv DomainCategoryServiceServer) { + s.RegisterService(&_DomainCategoryService_serviceDesc, srv) +} + +func _DomainCategoryService_GetDomainCategory_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(GetDomainCategoryRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(DomainCategoryServiceServer).GetDomainCategory(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.ads.googleads.v1.services.DomainCategoryService/GetDomainCategory", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(DomainCategoryServiceServer).GetDomainCategory(ctx, req.(*GetDomainCategoryRequest)) + } + return interceptor(ctx, in, info, handler) +} + +var _DomainCategoryService_serviceDesc = grpc.ServiceDesc{ + ServiceName: "google.ads.googleads.v1.services.DomainCategoryService", + HandlerType: (*DomainCategoryServiceServer)(nil), + Methods: []grpc.MethodDesc{ + { + MethodName: "GetDomainCategory", + Handler: _DomainCategoryService_GetDomainCategory_Handler, + }, + }, + Streams: []grpc.StreamDesc{}, + Metadata: "google/ads/googleads/v1/services/domain_category_service.proto", +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/services/domain_category_service.proto", fileDescriptor_domain_category_service_e150c47857abcd62) +} + +var fileDescriptor_domain_category_service_e150c47857abcd62 = []byte{ + // 367 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x84, 0x92, 0x41, 0x4a, 0xfb, 0x40, + 0x18, 0xc5, 0x49, 0xfe, 0xf0, 0x07, 0x83, 0x2e, 0x0c, 0x08, 0x25, 0xb8, 0x28, 0xb5, 0x0b, 0xe9, + 0x62, 0x86, 0x54, 0x41, 0x18, 0x51, 0x49, 0x15, 0xea, 0x4a, 0x4a, 0x85, 0x2e, 0x24, 0x50, 0xc6, + 0x64, 0x18, 0x02, 0xcd, 0x4c, 0x9d, 0x6f, 0x5a, 0x10, 0x71, 0xa1, 0x57, 0xf0, 0x06, 0x2e, 0x3d, + 0x84, 0x07, 0x70, 0xeb, 0xc6, 0x03, 0xb8, 0xf2, 0x14, 0x92, 0x4e, 0x26, 0x50, 0x6c, 0xe8, 0xee, + 0x31, 0xf3, 0x7e, 0xdf, 0xf7, 0xe6, 0x25, 0xde, 0x29, 0x97, 0x92, 0x4f, 0x18, 0xa6, 0x29, 0x60, + 0x23, 0x0b, 0x35, 0x0f, 0x31, 0x30, 0x35, 0xcf, 0x12, 0x06, 0x38, 0x95, 0x39, 0xcd, 0xc4, 0x38, + 0xa1, 0x9a, 0x71, 0xa9, 0xee, 0xc7, 0xe5, 0x05, 0x9a, 0x2a, 0xa9, 0xa5, 0xdf, 0x34, 0x10, 0xa2, + 0x29, 0xa0, 0x8a, 0x47, 0xf3, 0x10, 0x59, 0x3e, 0x38, 0xaa, 0xdb, 0xa0, 0x18, 0xc8, 0x99, 0x5a, + 0xb1, 0xc2, 0x8c, 0x0e, 0x76, 0x2d, 0x38, 0xcd, 0x30, 0x15, 0x42, 0x6a, 0xaa, 0x33, 0x29, 0xc0, + 0xdc, 0xb6, 0xce, 0xbc, 0x46, 0x9f, 0xe9, 0x8b, 0x05, 0x79, 0x5e, 0x82, 0x43, 0x76, 0x37, 0x63, + 0xa0, 0xfd, 0x3d, 0x6f, 0xcb, 0x0e, 0x1f, 0x0b, 0x9a, 0xb3, 0x86, 0xd3, 0x74, 0xf6, 0x37, 0x86, + 0x9b, 0xf6, 0xf0, 0x8a, 0xe6, 0xac, 0xfb, 0xe5, 0x78, 0x3b, 0xcb, 0xf8, 0xb5, 0x89, 0xec, 0xbf, + 0x3b, 0xde, 0xf6, 0x9f, 0xd9, 0x3e, 0x41, 0xeb, 0x9e, 0x8a, 0xea, 0x02, 0x05, 0x61, 0x2d, 0x5b, + 0x95, 0x80, 0x96, 0xc9, 0x16, 0x79, 0xfe, 0xfc, 0x7e, 0x71, 0x0f, 0xfd, 0x6e, 0x51, 0xd5, 0xc3, + 0xd2, 0x73, 0x4e, 0x92, 0x19, 0x68, 0x99, 0x33, 0x05, 0xb8, 0x53, 0x76, 0x57, 0x62, 0x19, 0x03, + 0xdc, 0x79, 0xec, 0x3d, 0xb9, 0x5e, 0x3b, 0x91, 0xf9, 0xda, 0xc0, 0xbd, 0x60, 0x65, 0x01, 0x83, + 0xa2, 0xe0, 0x81, 0x73, 0x73, 0x59, 0xf2, 0x5c, 0x4e, 0xa8, 0xe0, 0x48, 0x2a, 0x8e, 0x39, 0x13, + 0x8b, 0xfa, 0xed, 0x97, 0x9c, 0x66, 0x50, 0xff, 0xeb, 0x1c, 0x5b, 0xf1, 0xea, 0xfe, 0xeb, 0x47, + 0xd1, 0x9b, 0xdb, 0xec, 0x9b, 0x81, 0x51, 0x0a, 0xc8, 0xc8, 0x42, 0x8d, 0x42, 0x54, 0x2e, 0x86, + 0x0f, 0x6b, 0x89, 0xa3, 0x14, 0xe2, 0xca, 0x12, 0x8f, 0xc2, 0xd8, 0x5a, 0x7e, 0xdc, 0xb6, 0x39, + 0x27, 0x24, 0x4a, 0x81, 0x90, 0xca, 0x44, 0xc8, 0x28, 0x24, 0xc4, 0xda, 0x6e, 0xff, 0x2f, 0x72, + 0x1e, 0xfc, 0x06, 0x00, 0x00, 0xff, 0xff, 0x0a, 0xaa, 0x05, 0x1b, 0xe1, 0x02, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/services/dynamic_search_ads_search_term_view_service.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/services/dynamic_search_ads_search_term_view_service.pb.go new file mode 100644 index 0000000..3a1277e --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/services/dynamic_search_ads_search_term_view_service.pb.go @@ -0,0 +1,180 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/services/dynamic_search_ads_search_term_view_service.proto + +package services // import "google.golang.org/genproto/googleapis/ads/googleads/v1/services" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import resources "google.golang.org/genproto/googleapis/ads/googleads/v1/resources" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +import ( + context "golang.org/x/net/context" + grpc "google.golang.org/grpc" +) + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Request message for +// [DynamicSearchAdsSearchTermViewService.GetDynamicSearchAdsSearchTermView][google.ads.googleads.v1.services.DynamicSearchAdsSearchTermViewService.GetDynamicSearchAdsSearchTermView]. +type GetDynamicSearchAdsSearchTermViewRequest struct { + // The resource name of the dynamic search ads search term view to fetch. + ResourceName string `protobuf:"bytes,1,opt,name=resource_name,json=resourceName,proto3" json:"resource_name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GetDynamicSearchAdsSearchTermViewRequest) Reset() { + *m = GetDynamicSearchAdsSearchTermViewRequest{} +} +func (m *GetDynamicSearchAdsSearchTermViewRequest) String() string { return proto.CompactTextString(m) } +func (*GetDynamicSearchAdsSearchTermViewRequest) ProtoMessage() {} +func (*GetDynamicSearchAdsSearchTermViewRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_dynamic_search_ads_search_term_view_service_fdd5eab22ce903b6, []int{0} +} +func (m *GetDynamicSearchAdsSearchTermViewRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GetDynamicSearchAdsSearchTermViewRequest.Unmarshal(m, b) +} +func (m *GetDynamicSearchAdsSearchTermViewRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GetDynamicSearchAdsSearchTermViewRequest.Marshal(b, m, deterministic) +} +func (dst *GetDynamicSearchAdsSearchTermViewRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_GetDynamicSearchAdsSearchTermViewRequest.Merge(dst, src) +} +func (m *GetDynamicSearchAdsSearchTermViewRequest) XXX_Size() int { + return xxx_messageInfo_GetDynamicSearchAdsSearchTermViewRequest.Size(m) +} +func (m *GetDynamicSearchAdsSearchTermViewRequest) XXX_DiscardUnknown() { + xxx_messageInfo_GetDynamicSearchAdsSearchTermViewRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_GetDynamicSearchAdsSearchTermViewRequest proto.InternalMessageInfo + +func (m *GetDynamicSearchAdsSearchTermViewRequest) GetResourceName() string { + if m != nil { + return m.ResourceName + } + return "" +} + +func init() { + proto.RegisterType((*GetDynamicSearchAdsSearchTermViewRequest)(nil), "google.ads.googleads.v1.services.GetDynamicSearchAdsSearchTermViewRequest") +} + +// Reference imports to suppress errors if they are not otherwise used. +var _ context.Context +var _ grpc.ClientConn + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the grpc package it is being compiled against. +const _ = grpc.SupportPackageIsVersion4 + +// DynamicSearchAdsSearchTermViewServiceClient is the client API for DynamicSearchAdsSearchTermViewService service. +// +// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://godoc.org/google.golang.org/grpc#ClientConn.NewStream. +type DynamicSearchAdsSearchTermViewServiceClient interface { + // Returns the requested dynamic search ads search term view in full detail. + GetDynamicSearchAdsSearchTermView(ctx context.Context, in *GetDynamicSearchAdsSearchTermViewRequest, opts ...grpc.CallOption) (*resources.DynamicSearchAdsSearchTermView, error) +} + +type dynamicSearchAdsSearchTermViewServiceClient struct { + cc *grpc.ClientConn +} + +func NewDynamicSearchAdsSearchTermViewServiceClient(cc *grpc.ClientConn) DynamicSearchAdsSearchTermViewServiceClient { + return &dynamicSearchAdsSearchTermViewServiceClient{cc} +} + +func (c *dynamicSearchAdsSearchTermViewServiceClient) GetDynamicSearchAdsSearchTermView(ctx context.Context, in *GetDynamicSearchAdsSearchTermViewRequest, opts ...grpc.CallOption) (*resources.DynamicSearchAdsSearchTermView, error) { + out := new(resources.DynamicSearchAdsSearchTermView) + err := c.cc.Invoke(ctx, "/google.ads.googleads.v1.services.DynamicSearchAdsSearchTermViewService/GetDynamicSearchAdsSearchTermView", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +// DynamicSearchAdsSearchTermViewServiceServer is the server API for DynamicSearchAdsSearchTermViewService service. +type DynamicSearchAdsSearchTermViewServiceServer interface { + // Returns the requested dynamic search ads search term view in full detail. + GetDynamicSearchAdsSearchTermView(context.Context, *GetDynamicSearchAdsSearchTermViewRequest) (*resources.DynamicSearchAdsSearchTermView, error) +} + +func RegisterDynamicSearchAdsSearchTermViewServiceServer(s *grpc.Server, srv DynamicSearchAdsSearchTermViewServiceServer) { + s.RegisterService(&_DynamicSearchAdsSearchTermViewService_serviceDesc, srv) +} + +func _DynamicSearchAdsSearchTermViewService_GetDynamicSearchAdsSearchTermView_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(GetDynamicSearchAdsSearchTermViewRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(DynamicSearchAdsSearchTermViewServiceServer).GetDynamicSearchAdsSearchTermView(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.ads.googleads.v1.services.DynamicSearchAdsSearchTermViewService/GetDynamicSearchAdsSearchTermView", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(DynamicSearchAdsSearchTermViewServiceServer).GetDynamicSearchAdsSearchTermView(ctx, req.(*GetDynamicSearchAdsSearchTermViewRequest)) + } + return interceptor(ctx, in, info, handler) +} + +var _DynamicSearchAdsSearchTermViewService_serviceDesc = grpc.ServiceDesc{ + ServiceName: "google.ads.googleads.v1.services.DynamicSearchAdsSearchTermViewService", + HandlerType: (*DynamicSearchAdsSearchTermViewServiceServer)(nil), + Methods: []grpc.MethodDesc{ + { + MethodName: "GetDynamicSearchAdsSearchTermView", + Handler: _DynamicSearchAdsSearchTermViewService_GetDynamicSearchAdsSearchTermView_Handler, + }, + }, + Streams: []grpc.StreamDesc{}, + Metadata: "google/ads/googleads/v1/services/dynamic_search_ads_search_term_view_service.proto", +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/services/dynamic_search_ads_search_term_view_service.proto", fileDescriptor_dynamic_search_ads_search_term_view_service_fdd5eab22ce903b6) +} + +var fileDescriptor_dynamic_search_ads_search_term_view_service_fdd5eab22ce903b6 = []byte{ + // 391 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x8c, 0x92, 0xcd, 0x6a, 0xdb, 0x40, + 0x14, 0x85, 0x91, 0x0a, 0x85, 0x8a, 0x76, 0xa3, 0x55, 0x31, 0x5d, 0xb8, 0xae, 0x0b, 0xc6, 0x8b, + 0x19, 0xd4, 0xee, 0xa6, 0x74, 0x31, 0x6e, 0xc1, 0xfd, 0x81, 0xd6, 0xd8, 0x41, 0x8b, 0x20, 0x10, + 0x13, 0xcd, 0x45, 0x11, 0x58, 0x1a, 0x67, 0xae, 0x2c, 0x13, 0x42, 0x36, 0x79, 0x83, 0x90, 0x37, + 0xc8, 0x26, 0x90, 0x47, 0xc9, 0x36, 0xaf, 0x90, 0x55, 0xde, 0x21, 0x10, 0xe4, 0xf1, 0x08, 0xb2, + 0x90, 0xed, 0xdd, 0x61, 0xe6, 0xf0, 0x9d, 0xb9, 0xf7, 0x8c, 0x37, 0x4d, 0x95, 0x4a, 0xe7, 0x40, + 0x85, 0x44, 0x6a, 0x64, 0xad, 0xaa, 0x80, 0x22, 0xe8, 0x2a, 0x4b, 0x00, 0xa9, 0x3c, 0x2d, 0x44, + 0x9e, 0x25, 0x31, 0x82, 0xd0, 0xc9, 0x71, 0x2c, 0x24, 0x5a, 0x59, 0x82, 0xce, 0xe3, 0x2a, 0x83, + 0x55, 0xbc, 0x31, 0x93, 0x85, 0x56, 0xa5, 0xf2, 0xbb, 0x06, 0x44, 0x84, 0x44, 0xd2, 0x30, 0x49, + 0x15, 0x10, 0xcb, 0xec, 0xfc, 0x6d, 0x4b, 0xd5, 0x80, 0x6a, 0xa9, 0xf7, 0x8c, 0x35, 0x71, 0x9d, + 0x0f, 0x16, 0xb6, 0xc8, 0xa8, 0x28, 0x0a, 0x55, 0x8a, 0x32, 0x53, 0x05, 0x9a, 0xdb, 0xde, 0x7f, + 0x6f, 0x30, 0x86, 0xf2, 0xa7, 0xa1, 0xcd, 0xd6, 0x04, 0x2e, 0xd1, 0x88, 0x03, 0xd0, 0x79, 0x98, + 0xc1, 0x6a, 0x0a, 0x27, 0x4b, 0xc0, 0xd2, 0xff, 0xe4, 0xbd, 0xb3, 0x0f, 0x88, 0x0b, 0x91, 0xc3, + 0x7b, 0xa7, 0xeb, 0x0c, 0xde, 0x4c, 0xdf, 0xda, 0xc3, 0x7f, 0x22, 0x87, 0x2f, 0x37, 0xae, 0xf7, + 0x79, 0x3b, 0x6e, 0x66, 0xc6, 0xf4, 0x9f, 0x1c, 0xef, 0xe3, 0xce, 0x6c, 0xff, 0x0f, 0xd9, 0xb5, + 0x2e, 0xb2, 0xef, 0x00, 0x1d, 0xde, 0xca, 0x6a, 0x16, 0x4b, 0xb6, 0x93, 0x7a, 0xbf, 0x2f, 0xee, + 0x1f, 0xae, 0xdc, 0x1f, 0x3e, 0xaf, 0xeb, 0x38, 0x7b, 0xb1, 0x8e, 0xef, 0xc9, 0x12, 0x4b, 0x95, + 0x83, 0x46, 0x3a, 0xb4, 0xfd, 0xb4, 0x60, 0x90, 0x0e, 0xcf, 0x47, 0x97, 0xae, 0xd7, 0x4f, 0x54, + 0xbe, 0x73, 0xbe, 0xd1, 0x70, 0xaf, 0x7d, 0x4e, 0xea, 0x3e, 0x27, 0xce, 0xe1, 0xaf, 0x0d, 0x2f, + 0x55, 0x73, 0x51, 0xa4, 0x44, 0xe9, 0x94, 0xa6, 0x50, 0xac, 0xdb, 0xb6, 0x9f, 0x69, 0x91, 0x61, + 0xfb, 0x8f, 0xfe, 0x66, 0xc5, 0xb5, 0xfb, 0x6a, 0xcc, 0xf9, 0xad, 0xdb, 0x1d, 0x1b, 0x20, 0x97, + 0x48, 0x8c, 0xac, 0x55, 0x18, 0x90, 0x4d, 0x30, 0xde, 0x59, 0x4b, 0xc4, 0x25, 0x46, 0x8d, 0x25, + 0x0a, 0x83, 0xc8, 0x5a, 0x1e, 0xdd, 0xbe, 0x39, 0x67, 0x8c, 0x4b, 0x64, 0xac, 0x31, 0x31, 0x16, + 0x06, 0x8c, 0x59, 0xdb, 0xd1, 0xeb, 0xf5, 0x3b, 0xbf, 0x3e, 0x07, 0x00, 0x00, 0xff, 0xff, 0x0e, + 0x7a, 0x64, 0x1d, 0x78, 0x03, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/services/expanded_landing_page_view_service.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/services/expanded_landing_page_view_service.pb.go new file mode 100644 index 0000000..37a0344 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/services/expanded_landing_page_view_service.pb.go @@ -0,0 +1,179 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/services/expanded_landing_page_view_service.proto + +package services // import "google.golang.org/genproto/googleapis/ads/googleads/v1/services" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "github.com/golang/protobuf/ptypes/wrappers" +import resources "google.golang.org/genproto/googleapis/ads/googleads/v1/resources" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +import ( + context "golang.org/x/net/context" + grpc "google.golang.org/grpc" +) + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Request message for +// [ExpandedLandingPageViewService.GetExpandedLandingPageView][google.ads.googleads.v1.services.ExpandedLandingPageViewService.GetExpandedLandingPageView]. +type GetExpandedLandingPageViewRequest struct { + // The resource name of the expanded landing page view to fetch. + ResourceName string `protobuf:"bytes,1,opt,name=resource_name,json=resourceName,proto3" json:"resource_name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GetExpandedLandingPageViewRequest) Reset() { *m = GetExpandedLandingPageViewRequest{} } +func (m *GetExpandedLandingPageViewRequest) String() string { return proto.CompactTextString(m) } +func (*GetExpandedLandingPageViewRequest) ProtoMessage() {} +func (*GetExpandedLandingPageViewRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_expanded_landing_page_view_service_f73ccb2dea9fee66, []int{0} +} +func (m *GetExpandedLandingPageViewRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GetExpandedLandingPageViewRequest.Unmarshal(m, b) +} +func (m *GetExpandedLandingPageViewRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GetExpandedLandingPageViewRequest.Marshal(b, m, deterministic) +} +func (dst *GetExpandedLandingPageViewRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_GetExpandedLandingPageViewRequest.Merge(dst, src) +} +func (m *GetExpandedLandingPageViewRequest) XXX_Size() int { + return xxx_messageInfo_GetExpandedLandingPageViewRequest.Size(m) +} +func (m *GetExpandedLandingPageViewRequest) XXX_DiscardUnknown() { + xxx_messageInfo_GetExpandedLandingPageViewRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_GetExpandedLandingPageViewRequest proto.InternalMessageInfo + +func (m *GetExpandedLandingPageViewRequest) GetResourceName() string { + if m != nil { + return m.ResourceName + } + return "" +} + +func init() { + proto.RegisterType((*GetExpandedLandingPageViewRequest)(nil), "google.ads.googleads.v1.services.GetExpandedLandingPageViewRequest") +} + +// Reference imports to suppress errors if they are not otherwise used. +var _ context.Context +var _ grpc.ClientConn + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the grpc package it is being compiled against. +const _ = grpc.SupportPackageIsVersion4 + +// ExpandedLandingPageViewServiceClient is the client API for ExpandedLandingPageViewService service. +// +// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://godoc.org/google.golang.org/grpc#ClientConn.NewStream. +type ExpandedLandingPageViewServiceClient interface { + // Returns the requested expanded landing page view in full detail. + GetExpandedLandingPageView(ctx context.Context, in *GetExpandedLandingPageViewRequest, opts ...grpc.CallOption) (*resources.ExpandedLandingPageView, error) +} + +type expandedLandingPageViewServiceClient struct { + cc *grpc.ClientConn +} + +func NewExpandedLandingPageViewServiceClient(cc *grpc.ClientConn) ExpandedLandingPageViewServiceClient { + return &expandedLandingPageViewServiceClient{cc} +} + +func (c *expandedLandingPageViewServiceClient) GetExpandedLandingPageView(ctx context.Context, in *GetExpandedLandingPageViewRequest, opts ...grpc.CallOption) (*resources.ExpandedLandingPageView, error) { + out := new(resources.ExpandedLandingPageView) + err := c.cc.Invoke(ctx, "/google.ads.googleads.v1.services.ExpandedLandingPageViewService/GetExpandedLandingPageView", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +// ExpandedLandingPageViewServiceServer is the server API for ExpandedLandingPageViewService service. +type ExpandedLandingPageViewServiceServer interface { + // Returns the requested expanded landing page view in full detail. + GetExpandedLandingPageView(context.Context, *GetExpandedLandingPageViewRequest) (*resources.ExpandedLandingPageView, error) +} + +func RegisterExpandedLandingPageViewServiceServer(s *grpc.Server, srv ExpandedLandingPageViewServiceServer) { + s.RegisterService(&_ExpandedLandingPageViewService_serviceDesc, srv) +} + +func _ExpandedLandingPageViewService_GetExpandedLandingPageView_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(GetExpandedLandingPageViewRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(ExpandedLandingPageViewServiceServer).GetExpandedLandingPageView(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.ads.googleads.v1.services.ExpandedLandingPageViewService/GetExpandedLandingPageView", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(ExpandedLandingPageViewServiceServer).GetExpandedLandingPageView(ctx, req.(*GetExpandedLandingPageViewRequest)) + } + return interceptor(ctx, in, info, handler) +} + +var _ExpandedLandingPageViewService_serviceDesc = grpc.ServiceDesc{ + ServiceName: "google.ads.googleads.v1.services.ExpandedLandingPageViewService", + HandlerType: (*ExpandedLandingPageViewServiceServer)(nil), + Methods: []grpc.MethodDesc{ + { + MethodName: "GetExpandedLandingPageView", + Handler: _ExpandedLandingPageViewService_GetExpandedLandingPageView_Handler, + }, + }, + Streams: []grpc.StreamDesc{}, + Metadata: "google/ads/googleads/v1/services/expanded_landing_page_view_service.proto", +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/services/expanded_landing_page_view_service.proto", fileDescriptor_expanded_landing_page_view_service_f73ccb2dea9fee66) +} + +var fileDescriptor_expanded_landing_page_view_service_f73ccb2dea9fee66 = []byte{ + // 398 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x84, 0x52, 0x31, 0x8b, 0xdb, 0x30, + 0x14, 0xc6, 0x2e, 0x14, 0x6a, 0xda, 0xc5, 0x53, 0x31, 0x25, 0xa4, 0x49, 0x86, 0x92, 0x41, 0xc2, + 0xed, 0xa6, 0xb6, 0x83, 0x5d, 0x4a, 0x52, 0x28, 0x25, 0xa4, 0xe0, 0xa1, 0x18, 0x8c, 0x12, 0xbd, + 0x0a, 0x43, 0x2c, 0xb9, 0x92, 0xed, 0x14, 0x4a, 0x97, 0xd2, 0xe9, 0xd6, 0xfb, 0x07, 0x37, 0xde, + 0x4f, 0xb9, 0xf5, 0x7e, 0xc1, 0xc1, 0x4d, 0xf7, 0x2b, 0x0e, 0x47, 0x96, 0xe1, 0x0e, 0x9c, 0x6c, + 0x9f, 0xf4, 0x3e, 0x7d, 0xdf, 0x7b, 0xef, 0x93, 0xf7, 0x85, 0x4b, 0xc9, 0x77, 0x80, 0x29, 0xd3, + 0xd8, 0xc0, 0x16, 0x35, 0x21, 0xd6, 0xa0, 0x9a, 0x7c, 0x0b, 0x1a, 0xc3, 0xef, 0x92, 0x0a, 0x06, + 0x2c, 0xdb, 0x51, 0xc1, 0x72, 0xc1, 0xb3, 0x92, 0x72, 0xc8, 0x9a, 0x1c, 0xf6, 0x59, 0xc7, 0x41, + 0xa5, 0x92, 0x95, 0xf4, 0xc7, 0xe6, 0x3d, 0xa2, 0x4c, 0xa3, 0x5e, 0x0a, 0x35, 0x21, 0xb2, 0x52, + 0x41, 0x3c, 0x64, 0xa6, 0x40, 0xcb, 0x5a, 0x1d, 0x77, 0x33, 0x2e, 0xc1, 0x2b, 0xab, 0x51, 0xe6, + 0x98, 0x0a, 0x21, 0x2b, 0x5a, 0xe5, 0x52, 0xe8, 0xae, 0x3a, 0xea, 0xaa, 0x87, 0xd3, 0xa6, 0xfe, + 0x89, 0xf7, 0x8a, 0x96, 0x25, 0xa8, 0xae, 0x3e, 0x59, 0x7a, 0xaf, 0x17, 0x50, 0x7d, 0xee, 0x4c, + 0xbe, 0x1a, 0x8f, 0x15, 0xe5, 0x90, 0xe4, 0xb0, 0x5f, 0xc3, 0xaf, 0x1a, 0x74, 0xe5, 0x4f, 0xbd, + 0x17, 0xb6, 0xa1, 0x4c, 0xd0, 0x02, 0x5e, 0x3a, 0x63, 0xe7, 0xcd, 0xb3, 0xf5, 0x73, 0x7b, 0xf9, + 0x8d, 0x16, 0xf0, 0xf6, 0xbf, 0xeb, 0x8d, 0x06, 0x74, 0xbe, 0x9b, 0x79, 0xfd, 0x1b, 0xc7, 0x0b, + 0x86, 0xdd, 0xfc, 0x4f, 0xe8, 0xd4, 0xc2, 0xd0, 0xc9, 0x5e, 0x03, 0x32, 0x28, 0xd2, 0xef, 0x14, + 0x0d, 0x48, 0x4c, 0xe2, 0x7f, 0xd7, 0xb7, 0xe7, 0xee, 0x07, 0x9f, 0xb4, 0x11, 0xfc, 0x79, 0x30, + 0xf2, 0xc7, 0x6d, 0xad, 0x2b, 0x59, 0x80, 0xd2, 0x78, 0xde, 0x67, 0xf2, 0xe8, 0xbd, 0xc6, 0xf3, + 0xbf, 0xf1, 0x99, 0xeb, 0xcd, 0xb6, 0xb2, 0x38, 0x39, 0x4a, 0x3c, 0x3d, 0xbe, 0xac, 0x55, 0x1b, + 0xcf, 0xca, 0xf9, 0xb1, 0xec, 0x84, 0xb8, 0xdc, 0x51, 0xc1, 0x91, 0x54, 0x1c, 0x73, 0x10, 0x87, + 0xf0, 0xec, 0x97, 0x29, 0x73, 0x3d, 0xfc, 0x5d, 0xdf, 0x5b, 0x70, 0xe1, 0x3e, 0x59, 0x44, 0xd1, + 0xa5, 0x3b, 0x5e, 0x18, 0xc1, 0x88, 0x69, 0x64, 0x60, 0x8b, 0x92, 0x10, 0x75, 0xc6, 0xfa, 0xca, + 0x52, 0xd2, 0x88, 0xe9, 0xb4, 0xa7, 0xa4, 0x49, 0x98, 0x5a, 0xca, 0x9d, 0x3b, 0x33, 0xf7, 0x84, + 0x44, 0x4c, 0x13, 0xd2, 0x93, 0x08, 0x49, 0x42, 0x42, 0x2c, 0x6d, 0xf3, 0xf4, 0xd0, 0xe7, 0xbb, + 0xfb, 0x00, 0x00, 0x00, 0xff, 0xff, 0xe5, 0xc2, 0x24, 0x75, 0x55, 0x03, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/services/extension_feed_item_service.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/services/extension_feed_item_service.pb.go new file mode 100644 index 0000000..132127f --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/services/extension_feed_item_service.pb.go @@ -0,0 +1,562 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/services/extension_feed_item_service.proto + +package services // import "google.golang.org/genproto/googleapis/ads/googleads/v1/services" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "github.com/golang/protobuf/ptypes/wrappers" +import resources "google.golang.org/genproto/googleapis/ads/googleads/v1/resources" +import _ "google.golang.org/genproto/googleapis/api/annotations" +import field_mask "google.golang.org/genproto/protobuf/field_mask" + +import ( + context "golang.org/x/net/context" + grpc "google.golang.org/grpc" +) + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Request message for [ExtensionFeedItemService.GetExtensionFeedItem][google.ads.googleads.v1.services.ExtensionFeedItemService.GetExtensionFeedItem]. +type GetExtensionFeedItemRequest struct { + // The resource name of the extension feed item to fetch. + ResourceName string `protobuf:"bytes,1,opt,name=resource_name,json=resourceName,proto3" json:"resource_name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GetExtensionFeedItemRequest) Reset() { *m = GetExtensionFeedItemRequest{} } +func (m *GetExtensionFeedItemRequest) String() string { return proto.CompactTextString(m) } +func (*GetExtensionFeedItemRequest) ProtoMessage() {} +func (*GetExtensionFeedItemRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_extension_feed_item_service_7f4cbaf9bfb19d81, []int{0} +} +func (m *GetExtensionFeedItemRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GetExtensionFeedItemRequest.Unmarshal(m, b) +} +func (m *GetExtensionFeedItemRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GetExtensionFeedItemRequest.Marshal(b, m, deterministic) +} +func (dst *GetExtensionFeedItemRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_GetExtensionFeedItemRequest.Merge(dst, src) +} +func (m *GetExtensionFeedItemRequest) XXX_Size() int { + return xxx_messageInfo_GetExtensionFeedItemRequest.Size(m) +} +func (m *GetExtensionFeedItemRequest) XXX_DiscardUnknown() { + xxx_messageInfo_GetExtensionFeedItemRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_GetExtensionFeedItemRequest proto.InternalMessageInfo + +func (m *GetExtensionFeedItemRequest) GetResourceName() string { + if m != nil { + return m.ResourceName + } + return "" +} + +// Request message for [ExtensionFeedItemService.MutateExtensionFeedItems][google.ads.googleads.v1.services.ExtensionFeedItemService.MutateExtensionFeedItems]. +type MutateExtensionFeedItemsRequest struct { + // The ID of the customer whose extension feed items are being + // modified. + CustomerId string `protobuf:"bytes,1,opt,name=customer_id,json=customerId,proto3" json:"customer_id,omitempty"` + // The list of operations to perform on individual extension feed items. + Operations []*ExtensionFeedItemOperation `protobuf:"bytes,2,rep,name=operations,proto3" json:"operations,omitempty"` + // If true, the request is validated but not executed. Only errors are + // returned, not results. + ValidateOnly bool `protobuf:"varint,4,opt,name=validate_only,json=validateOnly,proto3" json:"validate_only,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *MutateExtensionFeedItemsRequest) Reset() { *m = MutateExtensionFeedItemsRequest{} } +func (m *MutateExtensionFeedItemsRequest) String() string { return proto.CompactTextString(m) } +func (*MutateExtensionFeedItemsRequest) ProtoMessage() {} +func (*MutateExtensionFeedItemsRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_extension_feed_item_service_7f4cbaf9bfb19d81, []int{1} +} +func (m *MutateExtensionFeedItemsRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_MutateExtensionFeedItemsRequest.Unmarshal(m, b) +} +func (m *MutateExtensionFeedItemsRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_MutateExtensionFeedItemsRequest.Marshal(b, m, deterministic) +} +func (dst *MutateExtensionFeedItemsRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_MutateExtensionFeedItemsRequest.Merge(dst, src) +} +func (m *MutateExtensionFeedItemsRequest) XXX_Size() int { + return xxx_messageInfo_MutateExtensionFeedItemsRequest.Size(m) +} +func (m *MutateExtensionFeedItemsRequest) XXX_DiscardUnknown() { + xxx_messageInfo_MutateExtensionFeedItemsRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_MutateExtensionFeedItemsRequest proto.InternalMessageInfo + +func (m *MutateExtensionFeedItemsRequest) GetCustomerId() string { + if m != nil { + return m.CustomerId + } + return "" +} + +func (m *MutateExtensionFeedItemsRequest) GetOperations() []*ExtensionFeedItemOperation { + if m != nil { + return m.Operations + } + return nil +} + +func (m *MutateExtensionFeedItemsRequest) GetValidateOnly() bool { + if m != nil { + return m.ValidateOnly + } + return false +} + +// A single operation (create, update, remove) on an extension feed item. +type ExtensionFeedItemOperation struct { + // FieldMask that determines which resource fields are modified in an update. + UpdateMask *field_mask.FieldMask `protobuf:"bytes,4,opt,name=update_mask,json=updateMask,proto3" json:"update_mask,omitempty"` + // The mutate operation. + // + // Types that are valid to be assigned to Operation: + // *ExtensionFeedItemOperation_Create + // *ExtensionFeedItemOperation_Update + // *ExtensionFeedItemOperation_Remove + Operation isExtensionFeedItemOperation_Operation `protobuf_oneof:"operation"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ExtensionFeedItemOperation) Reset() { *m = ExtensionFeedItemOperation{} } +func (m *ExtensionFeedItemOperation) String() string { return proto.CompactTextString(m) } +func (*ExtensionFeedItemOperation) ProtoMessage() {} +func (*ExtensionFeedItemOperation) Descriptor() ([]byte, []int) { + return fileDescriptor_extension_feed_item_service_7f4cbaf9bfb19d81, []int{2} +} +func (m *ExtensionFeedItemOperation) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ExtensionFeedItemOperation.Unmarshal(m, b) +} +func (m *ExtensionFeedItemOperation) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ExtensionFeedItemOperation.Marshal(b, m, deterministic) +} +func (dst *ExtensionFeedItemOperation) XXX_Merge(src proto.Message) { + xxx_messageInfo_ExtensionFeedItemOperation.Merge(dst, src) +} +func (m *ExtensionFeedItemOperation) XXX_Size() int { + return xxx_messageInfo_ExtensionFeedItemOperation.Size(m) +} +func (m *ExtensionFeedItemOperation) XXX_DiscardUnknown() { + xxx_messageInfo_ExtensionFeedItemOperation.DiscardUnknown(m) +} + +var xxx_messageInfo_ExtensionFeedItemOperation proto.InternalMessageInfo + +func (m *ExtensionFeedItemOperation) GetUpdateMask() *field_mask.FieldMask { + if m != nil { + return m.UpdateMask + } + return nil +} + +type isExtensionFeedItemOperation_Operation interface { + isExtensionFeedItemOperation_Operation() +} + +type ExtensionFeedItemOperation_Create struct { + Create *resources.ExtensionFeedItem `protobuf:"bytes,1,opt,name=create,proto3,oneof"` +} + +type ExtensionFeedItemOperation_Update struct { + Update *resources.ExtensionFeedItem `protobuf:"bytes,2,opt,name=update,proto3,oneof"` +} + +type ExtensionFeedItemOperation_Remove struct { + Remove string `protobuf:"bytes,3,opt,name=remove,proto3,oneof"` +} + +func (*ExtensionFeedItemOperation_Create) isExtensionFeedItemOperation_Operation() {} + +func (*ExtensionFeedItemOperation_Update) isExtensionFeedItemOperation_Operation() {} + +func (*ExtensionFeedItemOperation_Remove) isExtensionFeedItemOperation_Operation() {} + +func (m *ExtensionFeedItemOperation) GetOperation() isExtensionFeedItemOperation_Operation { + if m != nil { + return m.Operation + } + return nil +} + +func (m *ExtensionFeedItemOperation) GetCreate() *resources.ExtensionFeedItem { + if x, ok := m.GetOperation().(*ExtensionFeedItemOperation_Create); ok { + return x.Create + } + return nil +} + +func (m *ExtensionFeedItemOperation) GetUpdate() *resources.ExtensionFeedItem { + if x, ok := m.GetOperation().(*ExtensionFeedItemOperation_Update); ok { + return x.Update + } + return nil +} + +func (m *ExtensionFeedItemOperation) GetRemove() string { + if x, ok := m.GetOperation().(*ExtensionFeedItemOperation_Remove); ok { + return x.Remove + } + return "" +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*ExtensionFeedItemOperation) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _ExtensionFeedItemOperation_OneofMarshaler, _ExtensionFeedItemOperation_OneofUnmarshaler, _ExtensionFeedItemOperation_OneofSizer, []interface{}{ + (*ExtensionFeedItemOperation_Create)(nil), + (*ExtensionFeedItemOperation_Update)(nil), + (*ExtensionFeedItemOperation_Remove)(nil), + } +} + +func _ExtensionFeedItemOperation_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*ExtensionFeedItemOperation) + // operation + switch x := m.Operation.(type) { + case *ExtensionFeedItemOperation_Create: + b.EncodeVarint(1<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.Create); err != nil { + return err + } + case *ExtensionFeedItemOperation_Update: + b.EncodeVarint(2<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.Update); err != nil { + return err + } + case *ExtensionFeedItemOperation_Remove: + b.EncodeVarint(3<<3 | proto.WireBytes) + b.EncodeStringBytes(x.Remove) + case nil: + default: + return fmt.Errorf("ExtensionFeedItemOperation.Operation has unexpected type %T", x) + } + return nil +} + +func _ExtensionFeedItemOperation_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*ExtensionFeedItemOperation) + switch tag { + case 1: // operation.create + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(resources.ExtensionFeedItem) + err := b.DecodeMessage(msg) + m.Operation = &ExtensionFeedItemOperation_Create{msg} + return true, err + case 2: // operation.update + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(resources.ExtensionFeedItem) + err := b.DecodeMessage(msg) + m.Operation = &ExtensionFeedItemOperation_Update{msg} + return true, err + case 3: // operation.remove + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeStringBytes() + m.Operation = &ExtensionFeedItemOperation_Remove{x} + return true, err + default: + return false, nil + } +} + +func _ExtensionFeedItemOperation_OneofSizer(msg proto.Message) (n int) { + m := msg.(*ExtensionFeedItemOperation) + // operation + switch x := m.Operation.(type) { + case *ExtensionFeedItemOperation_Create: + s := proto.Size(x.Create) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *ExtensionFeedItemOperation_Update: + s := proto.Size(x.Update) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *ExtensionFeedItemOperation_Remove: + n += 1 // tag and wire + n += proto.SizeVarint(uint64(len(x.Remove))) + n += len(x.Remove) + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +// Response message for an extension feed item mutate. +type MutateExtensionFeedItemsResponse struct { + // All results for the mutate. + Results []*MutateExtensionFeedItemResult `protobuf:"bytes,2,rep,name=results,proto3" json:"results,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *MutateExtensionFeedItemsResponse) Reset() { *m = MutateExtensionFeedItemsResponse{} } +func (m *MutateExtensionFeedItemsResponse) String() string { return proto.CompactTextString(m) } +func (*MutateExtensionFeedItemsResponse) ProtoMessage() {} +func (*MutateExtensionFeedItemsResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_extension_feed_item_service_7f4cbaf9bfb19d81, []int{3} +} +func (m *MutateExtensionFeedItemsResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_MutateExtensionFeedItemsResponse.Unmarshal(m, b) +} +func (m *MutateExtensionFeedItemsResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_MutateExtensionFeedItemsResponse.Marshal(b, m, deterministic) +} +func (dst *MutateExtensionFeedItemsResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_MutateExtensionFeedItemsResponse.Merge(dst, src) +} +func (m *MutateExtensionFeedItemsResponse) XXX_Size() int { + return xxx_messageInfo_MutateExtensionFeedItemsResponse.Size(m) +} +func (m *MutateExtensionFeedItemsResponse) XXX_DiscardUnknown() { + xxx_messageInfo_MutateExtensionFeedItemsResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_MutateExtensionFeedItemsResponse proto.InternalMessageInfo + +func (m *MutateExtensionFeedItemsResponse) GetResults() []*MutateExtensionFeedItemResult { + if m != nil { + return m.Results + } + return nil +} + +// The result for the extension feed item mutate. +type MutateExtensionFeedItemResult struct { + // Returned for successful operations. + ResourceName string `protobuf:"bytes,1,opt,name=resource_name,json=resourceName,proto3" json:"resource_name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *MutateExtensionFeedItemResult) Reset() { *m = MutateExtensionFeedItemResult{} } +func (m *MutateExtensionFeedItemResult) String() string { return proto.CompactTextString(m) } +func (*MutateExtensionFeedItemResult) ProtoMessage() {} +func (*MutateExtensionFeedItemResult) Descriptor() ([]byte, []int) { + return fileDescriptor_extension_feed_item_service_7f4cbaf9bfb19d81, []int{4} +} +func (m *MutateExtensionFeedItemResult) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_MutateExtensionFeedItemResult.Unmarshal(m, b) +} +func (m *MutateExtensionFeedItemResult) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_MutateExtensionFeedItemResult.Marshal(b, m, deterministic) +} +func (dst *MutateExtensionFeedItemResult) XXX_Merge(src proto.Message) { + xxx_messageInfo_MutateExtensionFeedItemResult.Merge(dst, src) +} +func (m *MutateExtensionFeedItemResult) XXX_Size() int { + return xxx_messageInfo_MutateExtensionFeedItemResult.Size(m) +} +func (m *MutateExtensionFeedItemResult) XXX_DiscardUnknown() { + xxx_messageInfo_MutateExtensionFeedItemResult.DiscardUnknown(m) +} + +var xxx_messageInfo_MutateExtensionFeedItemResult proto.InternalMessageInfo + +func (m *MutateExtensionFeedItemResult) GetResourceName() string { + if m != nil { + return m.ResourceName + } + return "" +} + +func init() { + proto.RegisterType((*GetExtensionFeedItemRequest)(nil), "google.ads.googleads.v1.services.GetExtensionFeedItemRequest") + proto.RegisterType((*MutateExtensionFeedItemsRequest)(nil), "google.ads.googleads.v1.services.MutateExtensionFeedItemsRequest") + proto.RegisterType((*ExtensionFeedItemOperation)(nil), "google.ads.googleads.v1.services.ExtensionFeedItemOperation") + proto.RegisterType((*MutateExtensionFeedItemsResponse)(nil), "google.ads.googleads.v1.services.MutateExtensionFeedItemsResponse") + proto.RegisterType((*MutateExtensionFeedItemResult)(nil), "google.ads.googleads.v1.services.MutateExtensionFeedItemResult") +} + +// Reference imports to suppress errors if they are not otherwise used. +var _ context.Context +var _ grpc.ClientConn + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the grpc package it is being compiled against. +const _ = grpc.SupportPackageIsVersion4 + +// ExtensionFeedItemServiceClient is the client API for ExtensionFeedItemService service. +// +// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://godoc.org/google.golang.org/grpc#ClientConn.NewStream. +type ExtensionFeedItemServiceClient interface { + // Returns the requested extension feed item in full detail. + GetExtensionFeedItem(ctx context.Context, in *GetExtensionFeedItemRequest, opts ...grpc.CallOption) (*resources.ExtensionFeedItem, error) + // Creates, updates, or removes extension feed items. Operation + // statuses are returned. + MutateExtensionFeedItems(ctx context.Context, in *MutateExtensionFeedItemsRequest, opts ...grpc.CallOption) (*MutateExtensionFeedItemsResponse, error) +} + +type extensionFeedItemServiceClient struct { + cc *grpc.ClientConn +} + +func NewExtensionFeedItemServiceClient(cc *grpc.ClientConn) ExtensionFeedItemServiceClient { + return &extensionFeedItemServiceClient{cc} +} + +func (c *extensionFeedItemServiceClient) GetExtensionFeedItem(ctx context.Context, in *GetExtensionFeedItemRequest, opts ...grpc.CallOption) (*resources.ExtensionFeedItem, error) { + out := new(resources.ExtensionFeedItem) + err := c.cc.Invoke(ctx, "/google.ads.googleads.v1.services.ExtensionFeedItemService/GetExtensionFeedItem", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *extensionFeedItemServiceClient) MutateExtensionFeedItems(ctx context.Context, in *MutateExtensionFeedItemsRequest, opts ...grpc.CallOption) (*MutateExtensionFeedItemsResponse, error) { + out := new(MutateExtensionFeedItemsResponse) + err := c.cc.Invoke(ctx, "/google.ads.googleads.v1.services.ExtensionFeedItemService/MutateExtensionFeedItems", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +// ExtensionFeedItemServiceServer is the server API for ExtensionFeedItemService service. +type ExtensionFeedItemServiceServer interface { + // Returns the requested extension feed item in full detail. + GetExtensionFeedItem(context.Context, *GetExtensionFeedItemRequest) (*resources.ExtensionFeedItem, error) + // Creates, updates, or removes extension feed items. Operation + // statuses are returned. + MutateExtensionFeedItems(context.Context, *MutateExtensionFeedItemsRequest) (*MutateExtensionFeedItemsResponse, error) +} + +func RegisterExtensionFeedItemServiceServer(s *grpc.Server, srv ExtensionFeedItemServiceServer) { + s.RegisterService(&_ExtensionFeedItemService_serviceDesc, srv) +} + +func _ExtensionFeedItemService_GetExtensionFeedItem_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(GetExtensionFeedItemRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(ExtensionFeedItemServiceServer).GetExtensionFeedItem(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.ads.googleads.v1.services.ExtensionFeedItemService/GetExtensionFeedItem", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(ExtensionFeedItemServiceServer).GetExtensionFeedItem(ctx, req.(*GetExtensionFeedItemRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _ExtensionFeedItemService_MutateExtensionFeedItems_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(MutateExtensionFeedItemsRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(ExtensionFeedItemServiceServer).MutateExtensionFeedItems(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.ads.googleads.v1.services.ExtensionFeedItemService/MutateExtensionFeedItems", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(ExtensionFeedItemServiceServer).MutateExtensionFeedItems(ctx, req.(*MutateExtensionFeedItemsRequest)) + } + return interceptor(ctx, in, info, handler) +} + +var _ExtensionFeedItemService_serviceDesc = grpc.ServiceDesc{ + ServiceName: "google.ads.googleads.v1.services.ExtensionFeedItemService", + HandlerType: (*ExtensionFeedItemServiceServer)(nil), + Methods: []grpc.MethodDesc{ + { + MethodName: "GetExtensionFeedItem", + Handler: _ExtensionFeedItemService_GetExtensionFeedItem_Handler, + }, + { + MethodName: "MutateExtensionFeedItems", + Handler: _ExtensionFeedItemService_MutateExtensionFeedItems_Handler, + }, + }, + Streams: []grpc.StreamDesc{}, + Metadata: "google/ads/googleads/v1/services/extension_feed_item_service.proto", +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/services/extension_feed_item_service.proto", fileDescriptor_extension_feed_item_service_7f4cbaf9bfb19d81) +} + +var fileDescriptor_extension_feed_item_service_7f4cbaf9bfb19d81 = []byte{ + // 656 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xa4, 0x95, 0xcd, 0x6e, 0xd3, 0x40, + 0x10, 0xc7, 0xb1, 0x83, 0x0a, 0xdd, 0x94, 0x8b, 0xc5, 0xc1, 0x0a, 0x94, 0x46, 0xa6, 0x87, 0x2a, + 0x07, 0x5b, 0x09, 0x15, 0x48, 0x6e, 0x2b, 0x14, 0x0b, 0xfa, 0x71, 0xe8, 0x87, 0x8c, 0x54, 0x09, + 0x14, 0xc9, 0xda, 0xc6, 0xd3, 0xc8, 0xaa, 0xed, 0x35, 0xbb, 0xeb, 0x40, 0x55, 0xf5, 0x82, 0x78, + 0x03, 0x5e, 0x00, 0x71, 0xe4, 0x3d, 0x38, 0xc0, 0x81, 0x0b, 0xaf, 0x00, 0x17, 0x9e, 0x02, 0xad, + 0xd7, 0xeb, 0x16, 0x52, 0x37, 0xa8, 0xbd, 0x4d, 0x76, 0xc7, 0xbf, 0xd9, 0xff, 0xfc, 0x77, 0x36, + 0xc8, 0x1b, 0x11, 0x32, 0x8a, 0xc1, 0xc1, 0x21, 0x73, 0x64, 0x28, 0xa2, 0x71, 0xd7, 0x61, 0x40, + 0xc7, 0xd1, 0x10, 0x98, 0x03, 0x6f, 0x39, 0xa4, 0x2c, 0x22, 0x69, 0x70, 0x08, 0x10, 0x06, 0x11, + 0x87, 0x24, 0x28, 0x37, 0xed, 0x8c, 0x12, 0x4e, 0x8c, 0xb6, 0xfc, 0xd0, 0xc6, 0x21, 0xb3, 0x2b, + 0x86, 0x3d, 0xee, 0xda, 0x8a, 0xd1, 0x5a, 0xa9, 0xab, 0x42, 0x81, 0x91, 0x9c, 0xd6, 0x94, 0x91, + 0xf8, 0xd6, 0x7d, 0xf5, 0x71, 0x16, 0x39, 0x38, 0x4d, 0x09, 0xc7, 0x3c, 0x22, 0x29, 0x2b, 0x77, + 0xcb, 0xe2, 0x4e, 0xf1, 0xeb, 0x20, 0x3f, 0x74, 0x0e, 0x23, 0x88, 0xc3, 0x20, 0xc1, 0xec, 0xa8, + 0xcc, 0x78, 0xf0, 0x6f, 0xc6, 0x1b, 0x8a, 0xb3, 0x0c, 0x68, 0x49, 0xb0, 0x3c, 0x74, 0x6f, 0x03, + 0xf8, 0x73, 0x55, 0x7f, 0x1d, 0x20, 0xdc, 0xe2, 0x90, 0xf8, 0xf0, 0x3a, 0x07, 0xc6, 0x8d, 0x87, + 0xe8, 0x8e, 0x3a, 0x65, 0x90, 0xe2, 0x04, 0x4c, 0xad, 0xad, 0x2d, 0xcd, 0xfa, 0x73, 0x6a, 0x71, + 0x07, 0x27, 0x60, 0x7d, 0xd1, 0xd0, 0xc2, 0x76, 0xce, 0x31, 0x87, 0x09, 0x0e, 0x53, 0xa0, 0x05, + 0xd4, 0x1c, 0xe6, 0x8c, 0x93, 0x04, 0x68, 0x10, 0x85, 0x25, 0x06, 0xa9, 0xa5, 0xad, 0xd0, 0x18, + 0x20, 0x44, 0x32, 0xa0, 0x52, 0x9e, 0xa9, 0xb7, 0x1b, 0x4b, 0xcd, 0xde, 0xaa, 0x3d, 0xad, 0xb9, + 0xf6, 0x44, 0xc5, 0x5d, 0x05, 0xf1, 0xcf, 0xf1, 0x84, 0x8e, 0x31, 0x8e, 0xa3, 0x10, 0x73, 0x08, + 0x48, 0x1a, 0x1f, 0x9b, 0x37, 0xdb, 0xda, 0xd2, 0x6d, 0x7f, 0x4e, 0x2d, 0xee, 0xa6, 0xf1, 0xb1, + 0xf5, 0x51, 0x47, 0xad, 0x7a, 0x9e, 0xb1, 0x82, 0x9a, 0x79, 0x56, 0x10, 0x44, 0x7f, 0x0b, 0x42, + 0xb3, 0xd7, 0x52, 0x47, 0x54, 0x0d, 0xb6, 0xd7, 0x85, 0x05, 0xdb, 0x98, 0x1d, 0xf9, 0x48, 0xa6, + 0x8b, 0xd8, 0xd8, 0x41, 0x33, 0x43, 0x0a, 0x98, 0xcb, 0x0e, 0x36, 0x7b, 0xcb, 0xb5, 0xd2, 0xaa, + 0x5b, 0x31, 0xa9, 0x6d, 0xf3, 0x86, 0x5f, 0x52, 0x04, 0x4f, 0xd2, 0x4d, 0xfd, 0x7a, 0x3c, 0x49, + 0x31, 0x4c, 0x34, 0x43, 0x21, 0x21, 0x63, 0x30, 0x1b, 0xc2, 0x1a, 0xb1, 0x23, 0x7f, 0x7b, 0x4d, + 0x34, 0x5b, 0x35, 0xd2, 0x3a, 0x45, 0xed, 0x7a, 0xa7, 0x59, 0x46, 0x52, 0x06, 0xc6, 0x4b, 0x74, + 0x8b, 0x02, 0xcb, 0x63, 0xae, 0x6c, 0x7c, 0x3a, 0xdd, 0xc6, 0x1a, 0xa8, 0x5f, 0x70, 0x7c, 0xc5, + 0xb3, 0x9e, 0xa1, 0xf9, 0x4b, 0x33, 0xff, 0xeb, 0xbe, 0xf6, 0xbe, 0x37, 0x90, 0x39, 0x01, 0x78, + 0x21, 0x8f, 0x62, 0x7c, 0xd5, 0xd0, 0xdd, 0x8b, 0x26, 0xc2, 0x58, 0x9b, 0xae, 0xe2, 0x92, 0x49, + 0x6a, 0x5d, 0xc9, 0x20, 0x6b, 0xf5, 0xdd, 0x8f, 0x9f, 0x1f, 0xf4, 0xc7, 0xc6, 0xb2, 0x78, 0x2f, + 0x4e, 0xfe, 0x92, 0xb6, 0xa6, 0x86, 0x87, 0x39, 0x9d, 0xb3, 0x07, 0xa4, 0xb2, 0xc3, 0xe9, 0x9c, + 0x1a, 0xbf, 0x34, 0x64, 0xd6, 0xd9, 0x65, 0xf4, 0xaf, 0xec, 0x8a, 0x1a, 0xea, 0x96, 0x77, 0x1d, + 0x84, 0xbc, 0x2d, 0x96, 0x57, 0x28, 0x5c, 0xb5, 0x9e, 0x08, 0x85, 0x67, 0x92, 0x4e, 0xce, 0xbd, + 0x16, 0x6b, 0x9d, 0xd3, 0x0b, 0x04, 0xba, 0x49, 0x81, 0x76, 0xb5, 0x8e, 0xf7, 0x5e, 0x47, 0x8b, + 0x43, 0x92, 0x4c, 0x3d, 0x8d, 0x37, 0x5f, 0x67, 0xfb, 0x9e, 0x98, 0xde, 0x3d, 0xed, 0xd5, 0x66, + 0x89, 0x18, 0x91, 0x18, 0xa7, 0x23, 0x9b, 0xd0, 0x91, 0x33, 0x82, 0xb4, 0x98, 0x6d, 0xf5, 0x76, + 0x67, 0x11, 0xab, 0xff, 0xc3, 0x58, 0x51, 0xc1, 0x27, 0xbd, 0xb1, 0xd1, 0xef, 0x7f, 0xd6, 0xdb, + 0x1b, 0x12, 0xd8, 0x0f, 0x99, 0x2d, 0x43, 0x11, 0xed, 0x77, 0xed, 0xb2, 0x30, 0xfb, 0xa6, 0x52, + 0x06, 0xfd, 0x90, 0x0d, 0xaa, 0x94, 0xc1, 0x7e, 0x77, 0xa0, 0x52, 0x7e, 0xeb, 0x8b, 0x72, 0xdd, + 0x75, 0xfb, 0x21, 0x73, 0xdd, 0x2a, 0xc9, 0x75, 0xf7, 0xbb, 0xae, 0xab, 0xd2, 0x0e, 0x66, 0x8a, + 0x73, 0x3e, 0xfa, 0x13, 0x00, 0x00, 0xff, 0xff, 0xf1, 0x46, 0x4e, 0x27, 0xd7, 0x06, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/services/feed_item_service.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/services/feed_item_service.pb.go new file mode 100644 index 0000000..b07af9f --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/services/feed_item_service.pb.go @@ -0,0 +1,590 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/services/feed_item_service.proto + +package services // import "google.golang.org/genproto/googleapis/ads/googleads/v1/services" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "github.com/golang/protobuf/ptypes/wrappers" +import resources "google.golang.org/genproto/googleapis/ads/googleads/v1/resources" +import _ "google.golang.org/genproto/googleapis/api/annotations" +import status "google.golang.org/genproto/googleapis/rpc/status" +import field_mask "google.golang.org/genproto/protobuf/field_mask" + +import ( + context "golang.org/x/net/context" + grpc "google.golang.org/grpc" +) + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Request message for [FeedItemService.GetFeedItem][google.ads.googleads.v1.services.FeedItemService.GetFeedItem]. +type GetFeedItemRequest struct { + // The resource name of the feed item to fetch. + ResourceName string `protobuf:"bytes,1,opt,name=resource_name,json=resourceName,proto3" json:"resource_name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GetFeedItemRequest) Reset() { *m = GetFeedItemRequest{} } +func (m *GetFeedItemRequest) String() string { return proto.CompactTextString(m) } +func (*GetFeedItemRequest) ProtoMessage() {} +func (*GetFeedItemRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_feed_item_service_07bfb4ad3b8f0fac, []int{0} +} +func (m *GetFeedItemRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GetFeedItemRequest.Unmarshal(m, b) +} +func (m *GetFeedItemRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GetFeedItemRequest.Marshal(b, m, deterministic) +} +func (dst *GetFeedItemRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_GetFeedItemRequest.Merge(dst, src) +} +func (m *GetFeedItemRequest) XXX_Size() int { + return xxx_messageInfo_GetFeedItemRequest.Size(m) +} +func (m *GetFeedItemRequest) XXX_DiscardUnknown() { + xxx_messageInfo_GetFeedItemRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_GetFeedItemRequest proto.InternalMessageInfo + +func (m *GetFeedItemRequest) GetResourceName() string { + if m != nil { + return m.ResourceName + } + return "" +} + +// Request message for [FeedItemService.MutateFeedItems][google.ads.googleads.v1.services.FeedItemService.MutateFeedItems]. +type MutateFeedItemsRequest struct { + // The ID of the customer whose feed items are being modified. + CustomerId string `protobuf:"bytes,1,opt,name=customer_id,json=customerId,proto3" json:"customer_id,omitempty"` + // The list of operations to perform on individual feed items. + Operations []*FeedItemOperation `protobuf:"bytes,2,rep,name=operations,proto3" json:"operations,omitempty"` + // If true, successful operations will be carried out and invalid + // operations will return errors. If false, all operations will be carried + // out in one transaction if and only if they are all valid. + // Default is false. + PartialFailure bool `protobuf:"varint,3,opt,name=partial_failure,json=partialFailure,proto3" json:"partial_failure,omitempty"` + // If true, the request is validated but not executed. Only errors are + // returned, not results. + ValidateOnly bool `protobuf:"varint,4,opt,name=validate_only,json=validateOnly,proto3" json:"validate_only,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *MutateFeedItemsRequest) Reset() { *m = MutateFeedItemsRequest{} } +func (m *MutateFeedItemsRequest) String() string { return proto.CompactTextString(m) } +func (*MutateFeedItemsRequest) ProtoMessage() {} +func (*MutateFeedItemsRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_feed_item_service_07bfb4ad3b8f0fac, []int{1} +} +func (m *MutateFeedItemsRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_MutateFeedItemsRequest.Unmarshal(m, b) +} +func (m *MutateFeedItemsRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_MutateFeedItemsRequest.Marshal(b, m, deterministic) +} +func (dst *MutateFeedItemsRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_MutateFeedItemsRequest.Merge(dst, src) +} +func (m *MutateFeedItemsRequest) XXX_Size() int { + return xxx_messageInfo_MutateFeedItemsRequest.Size(m) +} +func (m *MutateFeedItemsRequest) XXX_DiscardUnknown() { + xxx_messageInfo_MutateFeedItemsRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_MutateFeedItemsRequest proto.InternalMessageInfo + +func (m *MutateFeedItemsRequest) GetCustomerId() string { + if m != nil { + return m.CustomerId + } + return "" +} + +func (m *MutateFeedItemsRequest) GetOperations() []*FeedItemOperation { + if m != nil { + return m.Operations + } + return nil +} + +func (m *MutateFeedItemsRequest) GetPartialFailure() bool { + if m != nil { + return m.PartialFailure + } + return false +} + +func (m *MutateFeedItemsRequest) GetValidateOnly() bool { + if m != nil { + return m.ValidateOnly + } + return false +} + +// A single operation (create, update, remove) on an feed item. +type FeedItemOperation struct { + // FieldMask that determines which resource fields are modified in an update. + UpdateMask *field_mask.FieldMask `protobuf:"bytes,4,opt,name=update_mask,json=updateMask,proto3" json:"update_mask,omitempty"` + // The mutate operation. + // + // Types that are valid to be assigned to Operation: + // *FeedItemOperation_Create + // *FeedItemOperation_Update + // *FeedItemOperation_Remove + Operation isFeedItemOperation_Operation `protobuf_oneof:"operation"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *FeedItemOperation) Reset() { *m = FeedItemOperation{} } +func (m *FeedItemOperation) String() string { return proto.CompactTextString(m) } +func (*FeedItemOperation) ProtoMessage() {} +func (*FeedItemOperation) Descriptor() ([]byte, []int) { + return fileDescriptor_feed_item_service_07bfb4ad3b8f0fac, []int{2} +} +func (m *FeedItemOperation) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_FeedItemOperation.Unmarshal(m, b) +} +func (m *FeedItemOperation) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_FeedItemOperation.Marshal(b, m, deterministic) +} +func (dst *FeedItemOperation) XXX_Merge(src proto.Message) { + xxx_messageInfo_FeedItemOperation.Merge(dst, src) +} +func (m *FeedItemOperation) XXX_Size() int { + return xxx_messageInfo_FeedItemOperation.Size(m) +} +func (m *FeedItemOperation) XXX_DiscardUnknown() { + xxx_messageInfo_FeedItemOperation.DiscardUnknown(m) +} + +var xxx_messageInfo_FeedItemOperation proto.InternalMessageInfo + +func (m *FeedItemOperation) GetUpdateMask() *field_mask.FieldMask { + if m != nil { + return m.UpdateMask + } + return nil +} + +type isFeedItemOperation_Operation interface { + isFeedItemOperation_Operation() +} + +type FeedItemOperation_Create struct { + Create *resources.FeedItem `protobuf:"bytes,1,opt,name=create,proto3,oneof"` +} + +type FeedItemOperation_Update struct { + Update *resources.FeedItem `protobuf:"bytes,2,opt,name=update,proto3,oneof"` +} + +type FeedItemOperation_Remove struct { + Remove string `protobuf:"bytes,3,opt,name=remove,proto3,oneof"` +} + +func (*FeedItemOperation_Create) isFeedItemOperation_Operation() {} + +func (*FeedItemOperation_Update) isFeedItemOperation_Operation() {} + +func (*FeedItemOperation_Remove) isFeedItemOperation_Operation() {} + +func (m *FeedItemOperation) GetOperation() isFeedItemOperation_Operation { + if m != nil { + return m.Operation + } + return nil +} + +func (m *FeedItemOperation) GetCreate() *resources.FeedItem { + if x, ok := m.GetOperation().(*FeedItemOperation_Create); ok { + return x.Create + } + return nil +} + +func (m *FeedItemOperation) GetUpdate() *resources.FeedItem { + if x, ok := m.GetOperation().(*FeedItemOperation_Update); ok { + return x.Update + } + return nil +} + +func (m *FeedItemOperation) GetRemove() string { + if x, ok := m.GetOperation().(*FeedItemOperation_Remove); ok { + return x.Remove + } + return "" +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*FeedItemOperation) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _FeedItemOperation_OneofMarshaler, _FeedItemOperation_OneofUnmarshaler, _FeedItemOperation_OneofSizer, []interface{}{ + (*FeedItemOperation_Create)(nil), + (*FeedItemOperation_Update)(nil), + (*FeedItemOperation_Remove)(nil), + } +} + +func _FeedItemOperation_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*FeedItemOperation) + // operation + switch x := m.Operation.(type) { + case *FeedItemOperation_Create: + b.EncodeVarint(1<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.Create); err != nil { + return err + } + case *FeedItemOperation_Update: + b.EncodeVarint(2<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.Update); err != nil { + return err + } + case *FeedItemOperation_Remove: + b.EncodeVarint(3<<3 | proto.WireBytes) + b.EncodeStringBytes(x.Remove) + case nil: + default: + return fmt.Errorf("FeedItemOperation.Operation has unexpected type %T", x) + } + return nil +} + +func _FeedItemOperation_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*FeedItemOperation) + switch tag { + case 1: // operation.create + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(resources.FeedItem) + err := b.DecodeMessage(msg) + m.Operation = &FeedItemOperation_Create{msg} + return true, err + case 2: // operation.update + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(resources.FeedItem) + err := b.DecodeMessage(msg) + m.Operation = &FeedItemOperation_Update{msg} + return true, err + case 3: // operation.remove + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeStringBytes() + m.Operation = &FeedItemOperation_Remove{x} + return true, err + default: + return false, nil + } +} + +func _FeedItemOperation_OneofSizer(msg proto.Message) (n int) { + m := msg.(*FeedItemOperation) + // operation + switch x := m.Operation.(type) { + case *FeedItemOperation_Create: + s := proto.Size(x.Create) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *FeedItemOperation_Update: + s := proto.Size(x.Update) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *FeedItemOperation_Remove: + n += 1 // tag and wire + n += proto.SizeVarint(uint64(len(x.Remove))) + n += len(x.Remove) + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +// Response message for an feed item mutate. +type MutateFeedItemsResponse struct { + // Errors that pertain to operation failures in the partial failure mode. + // Returned only when partial_failure = true and all errors occur inside the + // operations. If any errors occur outside the operations (e.g. auth errors), + // we return an RPC level error. + PartialFailureError *status.Status `protobuf:"bytes,3,opt,name=partial_failure_error,json=partialFailureError,proto3" json:"partial_failure_error,omitempty"` + // All results for the mutate. + Results []*MutateFeedItemResult `protobuf:"bytes,2,rep,name=results,proto3" json:"results,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *MutateFeedItemsResponse) Reset() { *m = MutateFeedItemsResponse{} } +func (m *MutateFeedItemsResponse) String() string { return proto.CompactTextString(m) } +func (*MutateFeedItemsResponse) ProtoMessage() {} +func (*MutateFeedItemsResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_feed_item_service_07bfb4ad3b8f0fac, []int{3} +} +func (m *MutateFeedItemsResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_MutateFeedItemsResponse.Unmarshal(m, b) +} +func (m *MutateFeedItemsResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_MutateFeedItemsResponse.Marshal(b, m, deterministic) +} +func (dst *MutateFeedItemsResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_MutateFeedItemsResponse.Merge(dst, src) +} +func (m *MutateFeedItemsResponse) XXX_Size() int { + return xxx_messageInfo_MutateFeedItemsResponse.Size(m) +} +func (m *MutateFeedItemsResponse) XXX_DiscardUnknown() { + xxx_messageInfo_MutateFeedItemsResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_MutateFeedItemsResponse proto.InternalMessageInfo + +func (m *MutateFeedItemsResponse) GetPartialFailureError() *status.Status { + if m != nil { + return m.PartialFailureError + } + return nil +} + +func (m *MutateFeedItemsResponse) GetResults() []*MutateFeedItemResult { + if m != nil { + return m.Results + } + return nil +} + +// The result for the feed item mutate. +type MutateFeedItemResult struct { + // Returned for successful operations. + ResourceName string `protobuf:"bytes,1,opt,name=resource_name,json=resourceName,proto3" json:"resource_name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *MutateFeedItemResult) Reset() { *m = MutateFeedItemResult{} } +func (m *MutateFeedItemResult) String() string { return proto.CompactTextString(m) } +func (*MutateFeedItemResult) ProtoMessage() {} +func (*MutateFeedItemResult) Descriptor() ([]byte, []int) { + return fileDescriptor_feed_item_service_07bfb4ad3b8f0fac, []int{4} +} +func (m *MutateFeedItemResult) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_MutateFeedItemResult.Unmarshal(m, b) +} +func (m *MutateFeedItemResult) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_MutateFeedItemResult.Marshal(b, m, deterministic) +} +func (dst *MutateFeedItemResult) XXX_Merge(src proto.Message) { + xxx_messageInfo_MutateFeedItemResult.Merge(dst, src) +} +func (m *MutateFeedItemResult) XXX_Size() int { + return xxx_messageInfo_MutateFeedItemResult.Size(m) +} +func (m *MutateFeedItemResult) XXX_DiscardUnknown() { + xxx_messageInfo_MutateFeedItemResult.DiscardUnknown(m) +} + +var xxx_messageInfo_MutateFeedItemResult proto.InternalMessageInfo + +func (m *MutateFeedItemResult) GetResourceName() string { + if m != nil { + return m.ResourceName + } + return "" +} + +func init() { + proto.RegisterType((*GetFeedItemRequest)(nil), "google.ads.googleads.v1.services.GetFeedItemRequest") + proto.RegisterType((*MutateFeedItemsRequest)(nil), "google.ads.googleads.v1.services.MutateFeedItemsRequest") + proto.RegisterType((*FeedItemOperation)(nil), "google.ads.googleads.v1.services.FeedItemOperation") + proto.RegisterType((*MutateFeedItemsResponse)(nil), "google.ads.googleads.v1.services.MutateFeedItemsResponse") + proto.RegisterType((*MutateFeedItemResult)(nil), "google.ads.googleads.v1.services.MutateFeedItemResult") +} + +// Reference imports to suppress errors if they are not otherwise used. +var _ context.Context +var _ grpc.ClientConn + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the grpc package it is being compiled against. +const _ = grpc.SupportPackageIsVersion4 + +// FeedItemServiceClient is the client API for FeedItemService service. +// +// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://godoc.org/google.golang.org/grpc#ClientConn.NewStream. +type FeedItemServiceClient interface { + // Returns the requested feed item in full detail. + GetFeedItem(ctx context.Context, in *GetFeedItemRequest, opts ...grpc.CallOption) (*resources.FeedItem, error) + // Creates, updates, or removes feed items. Operation statuses are + // returned. + MutateFeedItems(ctx context.Context, in *MutateFeedItemsRequest, opts ...grpc.CallOption) (*MutateFeedItemsResponse, error) +} + +type feedItemServiceClient struct { + cc *grpc.ClientConn +} + +func NewFeedItemServiceClient(cc *grpc.ClientConn) FeedItemServiceClient { + return &feedItemServiceClient{cc} +} + +func (c *feedItemServiceClient) GetFeedItem(ctx context.Context, in *GetFeedItemRequest, opts ...grpc.CallOption) (*resources.FeedItem, error) { + out := new(resources.FeedItem) + err := c.cc.Invoke(ctx, "/google.ads.googleads.v1.services.FeedItemService/GetFeedItem", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *feedItemServiceClient) MutateFeedItems(ctx context.Context, in *MutateFeedItemsRequest, opts ...grpc.CallOption) (*MutateFeedItemsResponse, error) { + out := new(MutateFeedItemsResponse) + err := c.cc.Invoke(ctx, "/google.ads.googleads.v1.services.FeedItemService/MutateFeedItems", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +// FeedItemServiceServer is the server API for FeedItemService service. +type FeedItemServiceServer interface { + // Returns the requested feed item in full detail. + GetFeedItem(context.Context, *GetFeedItemRequest) (*resources.FeedItem, error) + // Creates, updates, or removes feed items. Operation statuses are + // returned. + MutateFeedItems(context.Context, *MutateFeedItemsRequest) (*MutateFeedItemsResponse, error) +} + +func RegisterFeedItemServiceServer(s *grpc.Server, srv FeedItemServiceServer) { + s.RegisterService(&_FeedItemService_serviceDesc, srv) +} + +func _FeedItemService_GetFeedItem_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(GetFeedItemRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(FeedItemServiceServer).GetFeedItem(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.ads.googleads.v1.services.FeedItemService/GetFeedItem", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(FeedItemServiceServer).GetFeedItem(ctx, req.(*GetFeedItemRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _FeedItemService_MutateFeedItems_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(MutateFeedItemsRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(FeedItemServiceServer).MutateFeedItems(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.ads.googleads.v1.services.FeedItemService/MutateFeedItems", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(FeedItemServiceServer).MutateFeedItems(ctx, req.(*MutateFeedItemsRequest)) + } + return interceptor(ctx, in, info, handler) +} + +var _FeedItemService_serviceDesc = grpc.ServiceDesc{ + ServiceName: "google.ads.googleads.v1.services.FeedItemService", + HandlerType: (*FeedItemServiceServer)(nil), + Methods: []grpc.MethodDesc{ + { + MethodName: "GetFeedItem", + Handler: _FeedItemService_GetFeedItem_Handler, + }, + { + MethodName: "MutateFeedItems", + Handler: _FeedItemService_MutateFeedItems_Handler, + }, + }, + Streams: []grpc.StreamDesc{}, + Metadata: "google/ads/googleads/v1/services/feed_item_service.proto", +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/services/feed_item_service.proto", fileDescriptor_feed_item_service_07bfb4ad3b8f0fac) +} + +var fileDescriptor_feed_item_service_07bfb4ad3b8f0fac = []byte{ + // 708 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x9c, 0x54, 0x4f, 0x6b, 0x13, 0x41, + 0x14, 0x77, 0xb7, 0x52, 0xed, 0x6c, 0xb5, 0x38, 0x56, 0x1b, 0x82, 0x68, 0x58, 0x0b, 0x96, 0x14, + 0x77, 0x49, 0x22, 0xd2, 0x6e, 0xe9, 0x21, 0x85, 0xa6, 0xed, 0xa1, 0xb6, 0x6c, 0xa1, 0x07, 0x09, + 0x2c, 0xd3, 0xec, 0x4b, 0x58, 0xba, 0xbb, 0xb3, 0xce, 0xcc, 0x46, 0x4a, 0xe9, 0x45, 0xf0, 0x13, + 0xf8, 0x0d, 0x04, 0x2f, 0x5e, 0xfd, 0x04, 0x5e, 0xbd, 0x7a, 0xf5, 0xe8, 0xc9, 0xaf, 0x20, 0x82, + 0xec, 0xce, 0x4e, 0xda, 0xa4, 0x86, 0xd8, 0xde, 0xde, 0xbc, 0xf9, 0xfd, 0x7e, 0xef, 0xcd, 0xfb, + 0x33, 0x68, 0xa5, 0x47, 0x69, 0x2f, 0x04, 0x9b, 0xf8, 0xdc, 0x96, 0x66, 0x66, 0xf5, 0x6b, 0x36, + 0x07, 0xd6, 0x0f, 0x3a, 0xc0, 0xed, 0x2e, 0x80, 0xef, 0x05, 0x02, 0x22, 0xaf, 0x70, 0x59, 0x09, + 0xa3, 0x82, 0xe2, 0x8a, 0x84, 0x5b, 0xc4, 0xe7, 0xd6, 0x80, 0x69, 0xf5, 0x6b, 0x96, 0x62, 0x96, + 0x6b, 0xe3, 0xb4, 0x19, 0x70, 0x9a, 0xb2, 0x21, 0x71, 0x29, 0x5a, 0x7e, 0xa4, 0x28, 0x49, 0x60, + 0x93, 0x38, 0xa6, 0x82, 0x88, 0x80, 0xc6, 0xbc, 0xb8, 0x2d, 0x42, 0xda, 0xf9, 0xe9, 0x28, 0xed, + 0xda, 0xdd, 0x00, 0x42, 0xdf, 0x8b, 0x08, 0x3f, 0x2e, 0x10, 0x8f, 0x47, 0x11, 0x6f, 0x19, 0x49, + 0x12, 0x60, 0x4a, 0x61, 0xa1, 0xb8, 0x67, 0x49, 0xc7, 0xe6, 0x82, 0x88, 0xb4, 0xb8, 0x30, 0x57, + 0x11, 0xde, 0x02, 0xd1, 0x02, 0xf0, 0x77, 0x04, 0x44, 0x2e, 0xbc, 0x49, 0x81, 0x0b, 0xfc, 0x14, + 0xdd, 0x51, 0xb9, 0x7a, 0x31, 0x89, 0xa0, 0xa4, 0x55, 0xb4, 0xa5, 0x19, 0x77, 0x56, 0x39, 0x5f, + 0x91, 0x08, 0xcc, 0x1f, 0x1a, 0x7a, 0xb8, 0x9b, 0x0a, 0x22, 0x40, 0xd1, 0xb9, 0xe2, 0x3f, 0x41, + 0x46, 0x27, 0xe5, 0x82, 0x46, 0xc0, 0xbc, 0xc0, 0x2f, 0xd8, 0x48, 0xb9, 0x76, 0x7c, 0x7c, 0x80, + 0x10, 0x4d, 0x80, 0xc9, 0x57, 0x96, 0xf4, 0xca, 0xd4, 0x92, 0x51, 0x6f, 0x58, 0x93, 0x2a, 0x6b, + 0xa9, 0x40, 0x7b, 0x8a, 0xeb, 0x5e, 0x90, 0xc1, 0xcf, 0xd0, 0x5c, 0x42, 0x98, 0x08, 0x48, 0xe8, + 0x75, 0x49, 0x10, 0xa6, 0x0c, 0x4a, 0x53, 0x15, 0x6d, 0xe9, 0xb6, 0x7b, 0xb7, 0x70, 0xb7, 0xa4, + 0x37, 0x7b, 0x5e, 0x9f, 0x84, 0x81, 0x4f, 0x04, 0x78, 0x34, 0x0e, 0x4f, 0x4a, 0x37, 0x73, 0xd8, + 0xac, 0x72, 0xee, 0xc5, 0xe1, 0x89, 0xf9, 0x5e, 0x47, 0xf7, 0x2e, 0xc5, 0xc3, 0x6b, 0xc8, 0x48, + 0x93, 0x9c, 0x98, 0x55, 0x3f, 0x27, 0x1a, 0xf5, 0xb2, 0xca, 0x5c, 0x95, 0xdf, 0x6a, 0x65, 0x0d, + 0xda, 0x25, 0xfc, 0xd8, 0x45, 0x12, 0x9e, 0xd9, 0x78, 0x13, 0x4d, 0x77, 0x18, 0x10, 0x21, 0xeb, + 0x69, 0xd4, 0x97, 0xc7, 0xbe, 0x78, 0x30, 0x29, 0x83, 0x27, 0x6f, 0xdf, 0x70, 0x0b, 0x72, 0x26, + 0x23, 0x45, 0x4b, 0xfa, 0xb5, 0x64, 0x24, 0x19, 0x97, 0xd0, 0x34, 0x83, 0x88, 0xf6, 0x65, 0x95, + 0x66, 0xb2, 0x1b, 0x79, 0xde, 0x30, 0xd0, 0xcc, 0xa0, 0xac, 0xe6, 0x17, 0x0d, 0x2d, 0x5c, 0x6a, + 0x33, 0x4f, 0x68, 0xcc, 0x01, 0xb7, 0xd0, 0x83, 0x91, 0x8a, 0x7b, 0xc0, 0x18, 0x65, 0xb9, 0xa2, + 0x51, 0xc7, 0x2a, 0x31, 0x96, 0x74, 0xac, 0x83, 0x7c, 0xec, 0xdc, 0xfb, 0xc3, 0xbd, 0xd8, 0xcc, + 0xe0, 0x78, 0x1f, 0xdd, 0x62, 0xc0, 0xd3, 0x50, 0xa8, 0x59, 0x78, 0x39, 0x79, 0x16, 0x86, 0x73, + 0x72, 0x73, 0xba, 0xab, 0x64, 0xcc, 0x35, 0x34, 0xff, 0x2f, 0xc0, 0x7f, 0x4d, 0x76, 0xfd, 0x8f, + 0x8e, 0xe6, 0x14, 0xef, 0x40, 0xc6, 0xc3, 0x9f, 0x34, 0x64, 0x5c, 0xd8, 0x14, 0xfc, 0x62, 0x72, + 0x86, 0x97, 0x17, 0xab, 0x7c, 0x95, 0x56, 0x99, 0x8d, 0x77, 0xdf, 0x7f, 0x7e, 0xd0, 0x9f, 0xe3, + 0xe5, 0xec, 0xef, 0x38, 0x1d, 0x4a, 0x7b, 0x5d, 0xed, 0x12, 0xb7, 0xab, 0xf9, 0x67, 0x92, 0xf7, + 0xc5, 0xae, 0x9e, 0xe1, 0xaf, 0x1a, 0x9a, 0x1b, 0x69, 0x17, 0x5e, 0xb9, 0x6a, 0x35, 0xd5, 0x22, + 0x97, 0x57, 0xaf, 0xc1, 0x94, 0xb3, 0x61, 0xae, 0xe6, 0xd9, 0x37, 0x4c, 0x2b, 0xcb, 0xfe, 0x3c, + 0xdd, 0xd3, 0x0b, 0x1f, 0xc3, 0x7a, 0xf5, 0xec, 0x3c, 0x79, 0x27, 0xca, 0x85, 0x1c, 0xad, 0xba, + 0xf1, 0x5b, 0x43, 0x8b, 0x1d, 0x1a, 0x4d, 0x8c, 0xbd, 0x31, 0x3f, 0xd2, 0xa5, 0xfd, 0x6c, 0xff, + 0xf6, 0xb5, 0xd7, 0xdb, 0x05, 0xb3, 0x47, 0x43, 0x12, 0xf7, 0x2c, 0xca, 0x7a, 0x76, 0x0f, 0xe2, + 0x7c, 0x3b, 0xd5, 0x8f, 0x9c, 0x04, 0x7c, 0xfc, 0xe7, 0xbf, 0xa6, 0x8c, 0x8f, 0xfa, 0xd4, 0x56, + 0xb3, 0xf9, 0x59, 0xaf, 0x6c, 0x49, 0xc1, 0xa6, 0xcf, 0x2d, 0x69, 0x66, 0xd6, 0x61, 0xcd, 0x2a, + 0x02, 0xf3, 0x6f, 0x0a, 0xd2, 0x6e, 0xfa, 0xbc, 0x3d, 0x80, 0xb4, 0x0f, 0x6b, 0x6d, 0x05, 0xf9, + 0xa5, 0x2f, 0x4a, 0xbf, 0xe3, 0x34, 0x7d, 0xee, 0x38, 0x03, 0x90, 0xe3, 0x1c, 0xd6, 0x1c, 0x47, + 0xc1, 0x8e, 0xa6, 0xf3, 0x3c, 0x1b, 0x7f, 0x03, 0x00, 0x00, 0xff, 0xff, 0x02, 0x4f, 0xc6, 0x7f, + 0xa3, 0x06, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/services/feed_item_target_service.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/services/feed_item_target_service.pb.go new file mode 100644 index 0000000..bb54fef --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/services/feed_item_target_service.pb.go @@ -0,0 +1,499 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/services/feed_item_target_service.proto + +package services // import "google.golang.org/genproto/googleapis/ads/googleads/v1/services" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import resources "google.golang.org/genproto/googleapis/ads/googleads/v1/resources" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +import ( + context "golang.org/x/net/context" + grpc "google.golang.org/grpc" +) + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Request message for [FeedItemTargetService.GetFeedItemTarget][google.ads.googleads.v1.services.FeedItemTargetService.GetFeedItemTarget]. +type GetFeedItemTargetRequest struct { + // The resource name of the feed item targets to fetch. + ResourceName string `protobuf:"bytes,1,opt,name=resource_name,json=resourceName,proto3" json:"resource_name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GetFeedItemTargetRequest) Reset() { *m = GetFeedItemTargetRequest{} } +func (m *GetFeedItemTargetRequest) String() string { return proto.CompactTextString(m) } +func (*GetFeedItemTargetRequest) ProtoMessage() {} +func (*GetFeedItemTargetRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_feed_item_target_service_7676c6ec99ca33e2, []int{0} +} +func (m *GetFeedItemTargetRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GetFeedItemTargetRequest.Unmarshal(m, b) +} +func (m *GetFeedItemTargetRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GetFeedItemTargetRequest.Marshal(b, m, deterministic) +} +func (dst *GetFeedItemTargetRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_GetFeedItemTargetRequest.Merge(dst, src) +} +func (m *GetFeedItemTargetRequest) XXX_Size() int { + return xxx_messageInfo_GetFeedItemTargetRequest.Size(m) +} +func (m *GetFeedItemTargetRequest) XXX_DiscardUnknown() { + xxx_messageInfo_GetFeedItemTargetRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_GetFeedItemTargetRequest proto.InternalMessageInfo + +func (m *GetFeedItemTargetRequest) GetResourceName() string { + if m != nil { + return m.ResourceName + } + return "" +} + +// Request message for [FeedItemTargetService.MutateFeedItemTargets][google.ads.googleads.v1.services.FeedItemTargetService.MutateFeedItemTargets]. +type MutateFeedItemTargetsRequest struct { + // The ID of the customer whose feed item targets are being modified. + CustomerId string `protobuf:"bytes,1,opt,name=customer_id,json=customerId,proto3" json:"customer_id,omitempty"` + // The list of operations to perform on individual feed item targets. + Operations []*FeedItemTargetOperation `protobuf:"bytes,2,rep,name=operations,proto3" json:"operations,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *MutateFeedItemTargetsRequest) Reset() { *m = MutateFeedItemTargetsRequest{} } +func (m *MutateFeedItemTargetsRequest) String() string { return proto.CompactTextString(m) } +func (*MutateFeedItemTargetsRequest) ProtoMessage() {} +func (*MutateFeedItemTargetsRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_feed_item_target_service_7676c6ec99ca33e2, []int{1} +} +func (m *MutateFeedItemTargetsRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_MutateFeedItemTargetsRequest.Unmarshal(m, b) +} +func (m *MutateFeedItemTargetsRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_MutateFeedItemTargetsRequest.Marshal(b, m, deterministic) +} +func (dst *MutateFeedItemTargetsRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_MutateFeedItemTargetsRequest.Merge(dst, src) +} +func (m *MutateFeedItemTargetsRequest) XXX_Size() int { + return xxx_messageInfo_MutateFeedItemTargetsRequest.Size(m) +} +func (m *MutateFeedItemTargetsRequest) XXX_DiscardUnknown() { + xxx_messageInfo_MutateFeedItemTargetsRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_MutateFeedItemTargetsRequest proto.InternalMessageInfo + +func (m *MutateFeedItemTargetsRequest) GetCustomerId() string { + if m != nil { + return m.CustomerId + } + return "" +} + +func (m *MutateFeedItemTargetsRequest) GetOperations() []*FeedItemTargetOperation { + if m != nil { + return m.Operations + } + return nil +} + +// A single operation (create, remove) on an feed item target. +type FeedItemTargetOperation struct { + // The mutate operation. + // + // Types that are valid to be assigned to Operation: + // *FeedItemTargetOperation_Create + // *FeedItemTargetOperation_Remove + Operation isFeedItemTargetOperation_Operation `protobuf_oneof:"operation"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *FeedItemTargetOperation) Reset() { *m = FeedItemTargetOperation{} } +func (m *FeedItemTargetOperation) String() string { return proto.CompactTextString(m) } +func (*FeedItemTargetOperation) ProtoMessage() {} +func (*FeedItemTargetOperation) Descriptor() ([]byte, []int) { + return fileDescriptor_feed_item_target_service_7676c6ec99ca33e2, []int{2} +} +func (m *FeedItemTargetOperation) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_FeedItemTargetOperation.Unmarshal(m, b) +} +func (m *FeedItemTargetOperation) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_FeedItemTargetOperation.Marshal(b, m, deterministic) +} +func (dst *FeedItemTargetOperation) XXX_Merge(src proto.Message) { + xxx_messageInfo_FeedItemTargetOperation.Merge(dst, src) +} +func (m *FeedItemTargetOperation) XXX_Size() int { + return xxx_messageInfo_FeedItemTargetOperation.Size(m) +} +func (m *FeedItemTargetOperation) XXX_DiscardUnknown() { + xxx_messageInfo_FeedItemTargetOperation.DiscardUnknown(m) +} + +var xxx_messageInfo_FeedItemTargetOperation proto.InternalMessageInfo + +type isFeedItemTargetOperation_Operation interface { + isFeedItemTargetOperation_Operation() +} + +type FeedItemTargetOperation_Create struct { + Create *resources.FeedItemTarget `protobuf:"bytes,1,opt,name=create,proto3,oneof"` +} + +type FeedItemTargetOperation_Remove struct { + Remove string `protobuf:"bytes,2,opt,name=remove,proto3,oneof"` +} + +func (*FeedItemTargetOperation_Create) isFeedItemTargetOperation_Operation() {} + +func (*FeedItemTargetOperation_Remove) isFeedItemTargetOperation_Operation() {} + +func (m *FeedItemTargetOperation) GetOperation() isFeedItemTargetOperation_Operation { + if m != nil { + return m.Operation + } + return nil +} + +func (m *FeedItemTargetOperation) GetCreate() *resources.FeedItemTarget { + if x, ok := m.GetOperation().(*FeedItemTargetOperation_Create); ok { + return x.Create + } + return nil +} + +func (m *FeedItemTargetOperation) GetRemove() string { + if x, ok := m.GetOperation().(*FeedItemTargetOperation_Remove); ok { + return x.Remove + } + return "" +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*FeedItemTargetOperation) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _FeedItemTargetOperation_OneofMarshaler, _FeedItemTargetOperation_OneofUnmarshaler, _FeedItemTargetOperation_OneofSizer, []interface{}{ + (*FeedItemTargetOperation_Create)(nil), + (*FeedItemTargetOperation_Remove)(nil), + } +} + +func _FeedItemTargetOperation_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*FeedItemTargetOperation) + // operation + switch x := m.Operation.(type) { + case *FeedItemTargetOperation_Create: + b.EncodeVarint(1<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.Create); err != nil { + return err + } + case *FeedItemTargetOperation_Remove: + b.EncodeVarint(2<<3 | proto.WireBytes) + b.EncodeStringBytes(x.Remove) + case nil: + default: + return fmt.Errorf("FeedItemTargetOperation.Operation has unexpected type %T", x) + } + return nil +} + +func _FeedItemTargetOperation_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*FeedItemTargetOperation) + switch tag { + case 1: // operation.create + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(resources.FeedItemTarget) + err := b.DecodeMessage(msg) + m.Operation = &FeedItemTargetOperation_Create{msg} + return true, err + case 2: // operation.remove + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeStringBytes() + m.Operation = &FeedItemTargetOperation_Remove{x} + return true, err + default: + return false, nil + } +} + +func _FeedItemTargetOperation_OneofSizer(msg proto.Message) (n int) { + m := msg.(*FeedItemTargetOperation) + // operation + switch x := m.Operation.(type) { + case *FeedItemTargetOperation_Create: + s := proto.Size(x.Create) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *FeedItemTargetOperation_Remove: + n += 1 // tag and wire + n += proto.SizeVarint(uint64(len(x.Remove))) + n += len(x.Remove) + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +// Response message for an feed item target mutate. +type MutateFeedItemTargetsResponse struct { + // All results for the mutate. + Results []*MutateFeedItemTargetResult `protobuf:"bytes,2,rep,name=results,proto3" json:"results,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *MutateFeedItemTargetsResponse) Reset() { *m = MutateFeedItemTargetsResponse{} } +func (m *MutateFeedItemTargetsResponse) String() string { return proto.CompactTextString(m) } +func (*MutateFeedItemTargetsResponse) ProtoMessage() {} +func (*MutateFeedItemTargetsResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_feed_item_target_service_7676c6ec99ca33e2, []int{3} +} +func (m *MutateFeedItemTargetsResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_MutateFeedItemTargetsResponse.Unmarshal(m, b) +} +func (m *MutateFeedItemTargetsResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_MutateFeedItemTargetsResponse.Marshal(b, m, deterministic) +} +func (dst *MutateFeedItemTargetsResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_MutateFeedItemTargetsResponse.Merge(dst, src) +} +func (m *MutateFeedItemTargetsResponse) XXX_Size() int { + return xxx_messageInfo_MutateFeedItemTargetsResponse.Size(m) +} +func (m *MutateFeedItemTargetsResponse) XXX_DiscardUnknown() { + xxx_messageInfo_MutateFeedItemTargetsResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_MutateFeedItemTargetsResponse proto.InternalMessageInfo + +func (m *MutateFeedItemTargetsResponse) GetResults() []*MutateFeedItemTargetResult { + if m != nil { + return m.Results + } + return nil +} + +// The result for the feed item target mutate. +type MutateFeedItemTargetResult struct { + // Returned for successful operations. + ResourceName string `protobuf:"bytes,1,opt,name=resource_name,json=resourceName,proto3" json:"resource_name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *MutateFeedItemTargetResult) Reset() { *m = MutateFeedItemTargetResult{} } +func (m *MutateFeedItemTargetResult) String() string { return proto.CompactTextString(m) } +func (*MutateFeedItemTargetResult) ProtoMessage() {} +func (*MutateFeedItemTargetResult) Descriptor() ([]byte, []int) { + return fileDescriptor_feed_item_target_service_7676c6ec99ca33e2, []int{4} +} +func (m *MutateFeedItemTargetResult) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_MutateFeedItemTargetResult.Unmarshal(m, b) +} +func (m *MutateFeedItemTargetResult) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_MutateFeedItemTargetResult.Marshal(b, m, deterministic) +} +func (dst *MutateFeedItemTargetResult) XXX_Merge(src proto.Message) { + xxx_messageInfo_MutateFeedItemTargetResult.Merge(dst, src) +} +func (m *MutateFeedItemTargetResult) XXX_Size() int { + return xxx_messageInfo_MutateFeedItemTargetResult.Size(m) +} +func (m *MutateFeedItemTargetResult) XXX_DiscardUnknown() { + xxx_messageInfo_MutateFeedItemTargetResult.DiscardUnknown(m) +} + +var xxx_messageInfo_MutateFeedItemTargetResult proto.InternalMessageInfo + +func (m *MutateFeedItemTargetResult) GetResourceName() string { + if m != nil { + return m.ResourceName + } + return "" +} + +func init() { + proto.RegisterType((*GetFeedItemTargetRequest)(nil), "google.ads.googleads.v1.services.GetFeedItemTargetRequest") + proto.RegisterType((*MutateFeedItemTargetsRequest)(nil), "google.ads.googleads.v1.services.MutateFeedItemTargetsRequest") + proto.RegisterType((*FeedItemTargetOperation)(nil), "google.ads.googleads.v1.services.FeedItemTargetOperation") + proto.RegisterType((*MutateFeedItemTargetsResponse)(nil), "google.ads.googleads.v1.services.MutateFeedItemTargetsResponse") + proto.RegisterType((*MutateFeedItemTargetResult)(nil), "google.ads.googleads.v1.services.MutateFeedItemTargetResult") +} + +// Reference imports to suppress errors if they are not otherwise used. +var _ context.Context +var _ grpc.ClientConn + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the grpc package it is being compiled against. +const _ = grpc.SupportPackageIsVersion4 + +// FeedItemTargetServiceClient is the client API for FeedItemTargetService service. +// +// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://godoc.org/google.golang.org/grpc#ClientConn.NewStream. +type FeedItemTargetServiceClient interface { + // Returns the requested feed item targets in full detail. + GetFeedItemTarget(ctx context.Context, in *GetFeedItemTargetRequest, opts ...grpc.CallOption) (*resources.FeedItemTarget, error) + // Creates or removes feed item targets. Operation statuses are returned. + MutateFeedItemTargets(ctx context.Context, in *MutateFeedItemTargetsRequest, opts ...grpc.CallOption) (*MutateFeedItemTargetsResponse, error) +} + +type feedItemTargetServiceClient struct { + cc *grpc.ClientConn +} + +func NewFeedItemTargetServiceClient(cc *grpc.ClientConn) FeedItemTargetServiceClient { + return &feedItemTargetServiceClient{cc} +} + +func (c *feedItemTargetServiceClient) GetFeedItemTarget(ctx context.Context, in *GetFeedItemTargetRequest, opts ...grpc.CallOption) (*resources.FeedItemTarget, error) { + out := new(resources.FeedItemTarget) + err := c.cc.Invoke(ctx, "/google.ads.googleads.v1.services.FeedItemTargetService/GetFeedItemTarget", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *feedItemTargetServiceClient) MutateFeedItemTargets(ctx context.Context, in *MutateFeedItemTargetsRequest, opts ...grpc.CallOption) (*MutateFeedItemTargetsResponse, error) { + out := new(MutateFeedItemTargetsResponse) + err := c.cc.Invoke(ctx, "/google.ads.googleads.v1.services.FeedItemTargetService/MutateFeedItemTargets", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +// FeedItemTargetServiceServer is the server API for FeedItemTargetService service. +type FeedItemTargetServiceServer interface { + // Returns the requested feed item targets in full detail. + GetFeedItemTarget(context.Context, *GetFeedItemTargetRequest) (*resources.FeedItemTarget, error) + // Creates or removes feed item targets. Operation statuses are returned. + MutateFeedItemTargets(context.Context, *MutateFeedItemTargetsRequest) (*MutateFeedItemTargetsResponse, error) +} + +func RegisterFeedItemTargetServiceServer(s *grpc.Server, srv FeedItemTargetServiceServer) { + s.RegisterService(&_FeedItemTargetService_serviceDesc, srv) +} + +func _FeedItemTargetService_GetFeedItemTarget_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(GetFeedItemTargetRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(FeedItemTargetServiceServer).GetFeedItemTarget(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.ads.googleads.v1.services.FeedItemTargetService/GetFeedItemTarget", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(FeedItemTargetServiceServer).GetFeedItemTarget(ctx, req.(*GetFeedItemTargetRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _FeedItemTargetService_MutateFeedItemTargets_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(MutateFeedItemTargetsRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(FeedItemTargetServiceServer).MutateFeedItemTargets(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.ads.googleads.v1.services.FeedItemTargetService/MutateFeedItemTargets", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(FeedItemTargetServiceServer).MutateFeedItemTargets(ctx, req.(*MutateFeedItemTargetsRequest)) + } + return interceptor(ctx, in, info, handler) +} + +var _FeedItemTargetService_serviceDesc = grpc.ServiceDesc{ + ServiceName: "google.ads.googleads.v1.services.FeedItemTargetService", + HandlerType: (*FeedItemTargetServiceServer)(nil), + Methods: []grpc.MethodDesc{ + { + MethodName: "GetFeedItemTarget", + Handler: _FeedItemTargetService_GetFeedItemTarget_Handler, + }, + { + MethodName: "MutateFeedItemTargets", + Handler: _FeedItemTargetService_MutateFeedItemTargets_Handler, + }, + }, + Streams: []grpc.StreamDesc{}, + Metadata: "google/ads/googleads/v1/services/feed_item_target_service.proto", +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/services/feed_item_target_service.proto", fileDescriptor_feed_item_target_service_7676c6ec99ca33e2) +} + +var fileDescriptor_feed_item_target_service_7676c6ec99ca33e2 = []byte{ + // 559 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x9c, 0x94, 0x3f, 0x6f, 0xd3, 0x40, + 0x14, 0xc0, 0xb1, 0x2b, 0x05, 0xf5, 0x02, 0x03, 0x27, 0x55, 0x44, 0x56, 0x11, 0x91, 0xe9, 0x50, + 0x65, 0x38, 0xcb, 0x29, 0x03, 0x39, 0x28, 0x91, 0x33, 0x90, 0x56, 0x08, 0xa8, 0x0c, 0x8a, 0x04, + 0x8a, 0x14, 0x1d, 0xf1, 0xc3, 0xb2, 0x14, 0xfb, 0xc2, 0xdd, 0x25, 0x0c, 0x55, 0x07, 0xd8, 0x99, + 0xd8, 0x18, 0x19, 0x59, 0x99, 0xf9, 0x02, 0xac, 0x4c, 0xec, 0x4c, 0x7c, 0x0a, 0x64, 0x5f, 0xce, + 0x24, 0x25, 0x56, 0xa0, 0xdb, 0xbb, 0xbb, 0xf7, 0x7e, 0xef, 0xef, 0x3d, 0xd4, 0x8d, 0x39, 0x8f, + 0x27, 0xe0, 0xb1, 0x48, 0x7a, 0x5a, 0xcc, 0xa5, 0xb9, 0xef, 0x49, 0x10, 0xf3, 0x64, 0x0c, 0xd2, + 0x7b, 0x05, 0x10, 0x8d, 0x12, 0x05, 0xe9, 0x48, 0x31, 0x11, 0x83, 0x1a, 0x2d, 0x5e, 0xc8, 0x54, + 0x70, 0xc5, 0x71, 0x53, 0x5b, 0x11, 0x16, 0x49, 0x52, 0x02, 0xc8, 0xdc, 0x27, 0x06, 0xe0, 0xdc, + 0xa9, 0x72, 0x21, 0x40, 0xf2, 0x99, 0x58, 0xe7, 0x43, 0xb3, 0x9d, 0x5d, 0x63, 0x39, 0x4d, 0x3c, + 0x96, 0x65, 0x5c, 0x31, 0x95, 0xf0, 0x4c, 0xea, 0x57, 0xb7, 0x8b, 0x1a, 0x7d, 0x50, 0x0f, 0x00, + 0xa2, 0x63, 0x05, 0xe9, 0xb3, 0xc2, 0x30, 0x84, 0xd7, 0x33, 0x90, 0x0a, 0xdf, 0x42, 0x57, 0x0d, + 0x7d, 0x94, 0xb1, 0x14, 0x1a, 0x56, 0xd3, 0xda, 0xdf, 0x0e, 0xaf, 0x98, 0xcb, 0xc7, 0x2c, 0x05, + 0xf7, 0xa3, 0x85, 0x76, 0x1f, 0xcd, 0x14, 0x53, 0xb0, 0x0a, 0x91, 0x86, 0x72, 0x13, 0xd5, 0xc7, + 0x33, 0xa9, 0x78, 0x0a, 0x62, 0x94, 0x44, 0x0b, 0x06, 0x32, 0x57, 0xc7, 0x11, 0x7e, 0x8e, 0x10, + 0x9f, 0x82, 0xd0, 0x61, 0x35, 0xec, 0xe6, 0xd6, 0x7e, 0xbd, 0xdd, 0x21, 0x9b, 0x2a, 0x42, 0x56, + 0xdd, 0x3d, 0x31, 0x84, 0x70, 0x09, 0xe6, 0xbe, 0xb7, 0xd0, 0xf5, 0x0a, 0x3d, 0xfc, 0x10, 0xd5, + 0xc6, 0x02, 0x98, 0xd2, 0x69, 0xd5, 0xdb, 0x7e, 0xa5, 0xcb, 0xb2, 0xc4, 0xe7, 0x7c, 0x1e, 0x5d, + 0x0a, 0x17, 0x08, 0xdc, 0x40, 0x35, 0x01, 0x29, 0x9f, 0x43, 0xc3, 0xce, 0xf3, 0xcb, 0x5f, 0xf4, + 0xb9, 0x57, 0x47, 0xdb, 0x65, 0x40, 0xee, 0x1b, 0x74, 0xa3, 0xa2, 0x56, 0x72, 0xca, 0x33, 0x09, + 0x78, 0x80, 0x2e, 0x0b, 0x90, 0xb3, 0x89, 0x32, 0x85, 0xb8, 0xb7, 0xb9, 0x10, 0xeb, 0x88, 0x61, + 0x01, 0x09, 0x0d, 0xcc, 0x0d, 0x90, 0x53, 0xad, 0xf6, 0x4f, 0x8d, 0x6e, 0x7f, 0xd9, 0x42, 0x3b, + 0xab, 0xd6, 0x4f, 0x75, 0x04, 0xf8, 0xab, 0x85, 0xae, 0xfd, 0x35, 0x44, 0x98, 0x6e, 0x8e, 0xbc, + 0x6a, 0xf2, 0x9c, 0xff, 0xef, 0x85, 0xdb, 0x79, 0xf7, 0xfd, 0xe7, 0x07, 0xfb, 0x00, 0xfb, 0xf9, + 0xa7, 0x38, 0x5d, 0x49, 0xe7, 0xd0, 0x0c, 0x9b, 0xf4, 0x5a, 0xc5, 0x2f, 0x59, 0xaa, 0xbc, 0xd7, + 0x3a, 0xc3, 0x3f, 0x2c, 0xb4, 0xb3, 0xb6, 0x2d, 0xf8, 0xfe, 0xc5, 0xaa, 0x6f, 0x66, 0xdf, 0xe9, + 0x5e, 0xd8, 0x5e, 0xcf, 0x83, 0xdb, 0x2d, 0xb2, 0xea, 0xb8, 0xb7, 0xf3, 0xac, 0xfe, 0xa4, 0x71, + 0xba, 0xf4, 0xa3, 0x0e, 0x5b, 0x67, 0xe7, 0x93, 0xa2, 0x69, 0x01, 0xa5, 0x56, 0xab, 0xf7, 0xd6, + 0x46, 0x7b, 0x63, 0x9e, 0x6e, 0x8c, 0xa3, 0xe7, 0xac, 0xed, 0xed, 0x49, 0xbe, 0x24, 0x4e, 0xac, + 0x17, 0x47, 0x0b, 0xfb, 0x98, 0x4f, 0x58, 0x16, 0x13, 0x2e, 0x62, 0x2f, 0x86, 0xac, 0x58, 0x21, + 0x66, 0x1d, 0x4d, 0x13, 0x59, 0xbd, 0x00, 0xef, 0x1a, 0xe1, 0x93, 0xbd, 0xd5, 0x0f, 0x82, 0xcf, + 0x76, 0xb3, 0xaf, 0x81, 0x41, 0x24, 0x89, 0x16, 0x73, 0x69, 0xe0, 0x93, 0x85, 0x63, 0xf9, 0xcd, + 0xa8, 0x0c, 0x83, 0x48, 0x0e, 0x4b, 0x95, 0xe1, 0xc0, 0x1f, 0x1a, 0x95, 0x5f, 0xf6, 0x9e, 0xbe, + 0xa7, 0x34, 0x88, 0x24, 0xa5, 0xa5, 0x12, 0xa5, 0x03, 0x9f, 0x52, 0xa3, 0xf6, 0xb2, 0x56, 0xc4, + 0x79, 0xf0, 0x3b, 0x00, 0x00, 0xff, 0xff, 0xd9, 0xad, 0xe7, 0xfc, 0xa7, 0x05, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/services/feed_mapping_service.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/services/feed_mapping_service.pb.go new file mode 100644 index 0000000..657899d --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/services/feed_mapping_service.pb.go @@ -0,0 +1,544 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/services/feed_mapping_service.proto + +package services // import "google.golang.org/genproto/googleapis/ads/googleads/v1/services" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "github.com/golang/protobuf/ptypes/wrappers" +import resources "google.golang.org/genproto/googleapis/ads/googleads/v1/resources" +import _ "google.golang.org/genproto/googleapis/api/annotations" +import status "google.golang.org/genproto/googleapis/rpc/status" + +import ( + context "golang.org/x/net/context" + grpc "google.golang.org/grpc" +) + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Request message for [FeedMappingService.GetFeedMapping][google.ads.googleads.v1.services.FeedMappingService.GetFeedMapping]. +type GetFeedMappingRequest struct { + // The resource name of the feed mapping to fetch. + ResourceName string `protobuf:"bytes,1,opt,name=resource_name,json=resourceName,proto3" json:"resource_name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GetFeedMappingRequest) Reset() { *m = GetFeedMappingRequest{} } +func (m *GetFeedMappingRequest) String() string { return proto.CompactTextString(m) } +func (*GetFeedMappingRequest) ProtoMessage() {} +func (*GetFeedMappingRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_feed_mapping_service_f33bc33a71e46f7d, []int{0} +} +func (m *GetFeedMappingRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GetFeedMappingRequest.Unmarshal(m, b) +} +func (m *GetFeedMappingRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GetFeedMappingRequest.Marshal(b, m, deterministic) +} +func (dst *GetFeedMappingRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_GetFeedMappingRequest.Merge(dst, src) +} +func (m *GetFeedMappingRequest) XXX_Size() int { + return xxx_messageInfo_GetFeedMappingRequest.Size(m) +} +func (m *GetFeedMappingRequest) XXX_DiscardUnknown() { + xxx_messageInfo_GetFeedMappingRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_GetFeedMappingRequest proto.InternalMessageInfo + +func (m *GetFeedMappingRequest) GetResourceName() string { + if m != nil { + return m.ResourceName + } + return "" +} + +// Request message for [FeedMappingService.MutateFeedMappings][google.ads.googleads.v1.services.FeedMappingService.MutateFeedMappings]. +type MutateFeedMappingsRequest struct { + // The ID of the customer whose feed mappings are being modified. + CustomerId string `protobuf:"bytes,1,opt,name=customer_id,json=customerId,proto3" json:"customer_id,omitempty"` + // The list of operations to perform on individual feed mappings. + Operations []*FeedMappingOperation `protobuf:"bytes,2,rep,name=operations,proto3" json:"operations,omitempty"` + // If true, successful operations will be carried out and invalid + // operations will return errors. If false, all operations will be carried + // out in one transaction if and only if they are all valid. + // Default is false. + PartialFailure bool `protobuf:"varint,3,opt,name=partial_failure,json=partialFailure,proto3" json:"partial_failure,omitempty"` + // If true, the request is validated but not executed. Only errors are + // returned, not results. + ValidateOnly bool `protobuf:"varint,4,opt,name=validate_only,json=validateOnly,proto3" json:"validate_only,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *MutateFeedMappingsRequest) Reset() { *m = MutateFeedMappingsRequest{} } +func (m *MutateFeedMappingsRequest) String() string { return proto.CompactTextString(m) } +func (*MutateFeedMappingsRequest) ProtoMessage() {} +func (*MutateFeedMappingsRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_feed_mapping_service_f33bc33a71e46f7d, []int{1} +} +func (m *MutateFeedMappingsRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_MutateFeedMappingsRequest.Unmarshal(m, b) +} +func (m *MutateFeedMappingsRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_MutateFeedMappingsRequest.Marshal(b, m, deterministic) +} +func (dst *MutateFeedMappingsRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_MutateFeedMappingsRequest.Merge(dst, src) +} +func (m *MutateFeedMappingsRequest) XXX_Size() int { + return xxx_messageInfo_MutateFeedMappingsRequest.Size(m) +} +func (m *MutateFeedMappingsRequest) XXX_DiscardUnknown() { + xxx_messageInfo_MutateFeedMappingsRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_MutateFeedMappingsRequest proto.InternalMessageInfo + +func (m *MutateFeedMappingsRequest) GetCustomerId() string { + if m != nil { + return m.CustomerId + } + return "" +} + +func (m *MutateFeedMappingsRequest) GetOperations() []*FeedMappingOperation { + if m != nil { + return m.Operations + } + return nil +} + +func (m *MutateFeedMappingsRequest) GetPartialFailure() bool { + if m != nil { + return m.PartialFailure + } + return false +} + +func (m *MutateFeedMappingsRequest) GetValidateOnly() bool { + if m != nil { + return m.ValidateOnly + } + return false +} + +// A single operation (create, remove) on a feed mapping. +type FeedMappingOperation struct { + // The mutate operation. + // + // Types that are valid to be assigned to Operation: + // *FeedMappingOperation_Create + // *FeedMappingOperation_Remove + Operation isFeedMappingOperation_Operation `protobuf_oneof:"operation"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *FeedMappingOperation) Reset() { *m = FeedMappingOperation{} } +func (m *FeedMappingOperation) String() string { return proto.CompactTextString(m) } +func (*FeedMappingOperation) ProtoMessage() {} +func (*FeedMappingOperation) Descriptor() ([]byte, []int) { + return fileDescriptor_feed_mapping_service_f33bc33a71e46f7d, []int{2} +} +func (m *FeedMappingOperation) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_FeedMappingOperation.Unmarshal(m, b) +} +func (m *FeedMappingOperation) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_FeedMappingOperation.Marshal(b, m, deterministic) +} +func (dst *FeedMappingOperation) XXX_Merge(src proto.Message) { + xxx_messageInfo_FeedMappingOperation.Merge(dst, src) +} +func (m *FeedMappingOperation) XXX_Size() int { + return xxx_messageInfo_FeedMappingOperation.Size(m) +} +func (m *FeedMappingOperation) XXX_DiscardUnknown() { + xxx_messageInfo_FeedMappingOperation.DiscardUnknown(m) +} + +var xxx_messageInfo_FeedMappingOperation proto.InternalMessageInfo + +type isFeedMappingOperation_Operation interface { + isFeedMappingOperation_Operation() +} + +type FeedMappingOperation_Create struct { + Create *resources.FeedMapping `protobuf:"bytes,1,opt,name=create,proto3,oneof"` +} + +type FeedMappingOperation_Remove struct { + Remove string `protobuf:"bytes,3,opt,name=remove,proto3,oneof"` +} + +func (*FeedMappingOperation_Create) isFeedMappingOperation_Operation() {} + +func (*FeedMappingOperation_Remove) isFeedMappingOperation_Operation() {} + +func (m *FeedMappingOperation) GetOperation() isFeedMappingOperation_Operation { + if m != nil { + return m.Operation + } + return nil +} + +func (m *FeedMappingOperation) GetCreate() *resources.FeedMapping { + if x, ok := m.GetOperation().(*FeedMappingOperation_Create); ok { + return x.Create + } + return nil +} + +func (m *FeedMappingOperation) GetRemove() string { + if x, ok := m.GetOperation().(*FeedMappingOperation_Remove); ok { + return x.Remove + } + return "" +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*FeedMappingOperation) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _FeedMappingOperation_OneofMarshaler, _FeedMappingOperation_OneofUnmarshaler, _FeedMappingOperation_OneofSizer, []interface{}{ + (*FeedMappingOperation_Create)(nil), + (*FeedMappingOperation_Remove)(nil), + } +} + +func _FeedMappingOperation_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*FeedMappingOperation) + // operation + switch x := m.Operation.(type) { + case *FeedMappingOperation_Create: + b.EncodeVarint(1<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.Create); err != nil { + return err + } + case *FeedMappingOperation_Remove: + b.EncodeVarint(3<<3 | proto.WireBytes) + b.EncodeStringBytes(x.Remove) + case nil: + default: + return fmt.Errorf("FeedMappingOperation.Operation has unexpected type %T", x) + } + return nil +} + +func _FeedMappingOperation_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*FeedMappingOperation) + switch tag { + case 1: // operation.create + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(resources.FeedMapping) + err := b.DecodeMessage(msg) + m.Operation = &FeedMappingOperation_Create{msg} + return true, err + case 3: // operation.remove + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeStringBytes() + m.Operation = &FeedMappingOperation_Remove{x} + return true, err + default: + return false, nil + } +} + +func _FeedMappingOperation_OneofSizer(msg proto.Message) (n int) { + m := msg.(*FeedMappingOperation) + // operation + switch x := m.Operation.(type) { + case *FeedMappingOperation_Create: + s := proto.Size(x.Create) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *FeedMappingOperation_Remove: + n += 1 // tag and wire + n += proto.SizeVarint(uint64(len(x.Remove))) + n += len(x.Remove) + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +// Response message for a feed mapping mutate. +type MutateFeedMappingsResponse struct { + // Errors that pertain to operation failures in the partial failure mode. + // Returned only when partial_failure = true and all errors occur inside the + // operations. If any errors occur outside the operations (e.g. auth errors), + // we return an RPC level error. + PartialFailureError *status.Status `protobuf:"bytes,3,opt,name=partial_failure_error,json=partialFailureError,proto3" json:"partial_failure_error,omitempty"` + // All results for the mutate. + Results []*MutateFeedMappingResult `protobuf:"bytes,2,rep,name=results,proto3" json:"results,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *MutateFeedMappingsResponse) Reset() { *m = MutateFeedMappingsResponse{} } +func (m *MutateFeedMappingsResponse) String() string { return proto.CompactTextString(m) } +func (*MutateFeedMappingsResponse) ProtoMessage() {} +func (*MutateFeedMappingsResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_feed_mapping_service_f33bc33a71e46f7d, []int{3} +} +func (m *MutateFeedMappingsResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_MutateFeedMappingsResponse.Unmarshal(m, b) +} +func (m *MutateFeedMappingsResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_MutateFeedMappingsResponse.Marshal(b, m, deterministic) +} +func (dst *MutateFeedMappingsResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_MutateFeedMappingsResponse.Merge(dst, src) +} +func (m *MutateFeedMappingsResponse) XXX_Size() int { + return xxx_messageInfo_MutateFeedMappingsResponse.Size(m) +} +func (m *MutateFeedMappingsResponse) XXX_DiscardUnknown() { + xxx_messageInfo_MutateFeedMappingsResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_MutateFeedMappingsResponse proto.InternalMessageInfo + +func (m *MutateFeedMappingsResponse) GetPartialFailureError() *status.Status { + if m != nil { + return m.PartialFailureError + } + return nil +} + +func (m *MutateFeedMappingsResponse) GetResults() []*MutateFeedMappingResult { + if m != nil { + return m.Results + } + return nil +} + +// The result for the feed mapping mutate. +type MutateFeedMappingResult struct { + // Returned for successful operations. + ResourceName string `protobuf:"bytes,1,opt,name=resource_name,json=resourceName,proto3" json:"resource_name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *MutateFeedMappingResult) Reset() { *m = MutateFeedMappingResult{} } +func (m *MutateFeedMappingResult) String() string { return proto.CompactTextString(m) } +func (*MutateFeedMappingResult) ProtoMessage() {} +func (*MutateFeedMappingResult) Descriptor() ([]byte, []int) { + return fileDescriptor_feed_mapping_service_f33bc33a71e46f7d, []int{4} +} +func (m *MutateFeedMappingResult) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_MutateFeedMappingResult.Unmarshal(m, b) +} +func (m *MutateFeedMappingResult) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_MutateFeedMappingResult.Marshal(b, m, deterministic) +} +func (dst *MutateFeedMappingResult) XXX_Merge(src proto.Message) { + xxx_messageInfo_MutateFeedMappingResult.Merge(dst, src) +} +func (m *MutateFeedMappingResult) XXX_Size() int { + return xxx_messageInfo_MutateFeedMappingResult.Size(m) +} +func (m *MutateFeedMappingResult) XXX_DiscardUnknown() { + xxx_messageInfo_MutateFeedMappingResult.DiscardUnknown(m) +} + +var xxx_messageInfo_MutateFeedMappingResult proto.InternalMessageInfo + +func (m *MutateFeedMappingResult) GetResourceName() string { + if m != nil { + return m.ResourceName + } + return "" +} + +func init() { + proto.RegisterType((*GetFeedMappingRequest)(nil), "google.ads.googleads.v1.services.GetFeedMappingRequest") + proto.RegisterType((*MutateFeedMappingsRequest)(nil), "google.ads.googleads.v1.services.MutateFeedMappingsRequest") + proto.RegisterType((*FeedMappingOperation)(nil), "google.ads.googleads.v1.services.FeedMappingOperation") + proto.RegisterType((*MutateFeedMappingsResponse)(nil), "google.ads.googleads.v1.services.MutateFeedMappingsResponse") + proto.RegisterType((*MutateFeedMappingResult)(nil), "google.ads.googleads.v1.services.MutateFeedMappingResult") +} + +// Reference imports to suppress errors if they are not otherwise used. +var _ context.Context +var _ grpc.ClientConn + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the grpc package it is being compiled against. +const _ = grpc.SupportPackageIsVersion4 + +// FeedMappingServiceClient is the client API for FeedMappingService service. +// +// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://godoc.org/google.golang.org/grpc#ClientConn.NewStream. +type FeedMappingServiceClient interface { + // Returns the requested feed mapping in full detail. + GetFeedMapping(ctx context.Context, in *GetFeedMappingRequest, opts ...grpc.CallOption) (*resources.FeedMapping, error) + // Creates or removes feed mappings. Operation statuses are + // returned. + MutateFeedMappings(ctx context.Context, in *MutateFeedMappingsRequest, opts ...grpc.CallOption) (*MutateFeedMappingsResponse, error) +} + +type feedMappingServiceClient struct { + cc *grpc.ClientConn +} + +func NewFeedMappingServiceClient(cc *grpc.ClientConn) FeedMappingServiceClient { + return &feedMappingServiceClient{cc} +} + +func (c *feedMappingServiceClient) GetFeedMapping(ctx context.Context, in *GetFeedMappingRequest, opts ...grpc.CallOption) (*resources.FeedMapping, error) { + out := new(resources.FeedMapping) + err := c.cc.Invoke(ctx, "/google.ads.googleads.v1.services.FeedMappingService/GetFeedMapping", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *feedMappingServiceClient) MutateFeedMappings(ctx context.Context, in *MutateFeedMappingsRequest, opts ...grpc.CallOption) (*MutateFeedMappingsResponse, error) { + out := new(MutateFeedMappingsResponse) + err := c.cc.Invoke(ctx, "/google.ads.googleads.v1.services.FeedMappingService/MutateFeedMappings", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +// FeedMappingServiceServer is the server API for FeedMappingService service. +type FeedMappingServiceServer interface { + // Returns the requested feed mapping in full detail. + GetFeedMapping(context.Context, *GetFeedMappingRequest) (*resources.FeedMapping, error) + // Creates or removes feed mappings. Operation statuses are + // returned. + MutateFeedMappings(context.Context, *MutateFeedMappingsRequest) (*MutateFeedMappingsResponse, error) +} + +func RegisterFeedMappingServiceServer(s *grpc.Server, srv FeedMappingServiceServer) { + s.RegisterService(&_FeedMappingService_serviceDesc, srv) +} + +func _FeedMappingService_GetFeedMapping_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(GetFeedMappingRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(FeedMappingServiceServer).GetFeedMapping(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.ads.googleads.v1.services.FeedMappingService/GetFeedMapping", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(FeedMappingServiceServer).GetFeedMapping(ctx, req.(*GetFeedMappingRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _FeedMappingService_MutateFeedMappings_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(MutateFeedMappingsRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(FeedMappingServiceServer).MutateFeedMappings(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.ads.googleads.v1.services.FeedMappingService/MutateFeedMappings", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(FeedMappingServiceServer).MutateFeedMappings(ctx, req.(*MutateFeedMappingsRequest)) + } + return interceptor(ctx, in, info, handler) +} + +var _FeedMappingService_serviceDesc = grpc.ServiceDesc{ + ServiceName: "google.ads.googleads.v1.services.FeedMappingService", + HandlerType: (*FeedMappingServiceServer)(nil), + Methods: []grpc.MethodDesc{ + { + MethodName: "GetFeedMapping", + Handler: _FeedMappingService_GetFeedMapping_Handler, + }, + { + MethodName: "MutateFeedMappings", + Handler: _FeedMappingService_MutateFeedMappings_Handler, + }, + }, + Streams: []grpc.StreamDesc{}, + Metadata: "google/ads/googleads/v1/services/feed_mapping_service.proto", +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/services/feed_mapping_service.proto", fileDescriptor_feed_mapping_service_f33bc33a71e46f7d) +} + +var fileDescriptor_feed_mapping_service_f33bc33a71e46f7d = []byte{ + // 663 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x9c, 0x54, 0x4f, 0x6b, 0x13, 0x4f, + 0x18, 0xfe, 0x6d, 0xf2, 0xa3, 0xda, 0x49, 0xad, 0x30, 0x5a, 0x1a, 0x83, 0x68, 0x58, 0x0b, 0x96, + 0x1c, 0x66, 0x4d, 0x94, 0x8a, 0xdb, 0x56, 0x48, 0xc1, 0xb6, 0x1e, 0x6a, 0xcb, 0x16, 0x72, 0x90, + 0xc0, 0x32, 0xcd, 0xbe, 0x5d, 0x16, 0x76, 0x77, 0xd6, 0x99, 0xd9, 0x48, 0x29, 0xbd, 0x78, 0xf1, + 0x03, 0x78, 0xf1, 0xec, 0xd1, 0x9b, 0x47, 0xbf, 0x82, 0xe0, 0xc9, 0x6f, 0x20, 0x9e, 0xfc, 0x10, + 0x22, 0xbb, 0xb3, 0x13, 0x37, 0x6d, 0x42, 0xb4, 0xb7, 0x99, 0xf7, 0x7d, 0x9f, 0xe7, 0x7d, 0xde, + 0x3f, 0x33, 0x68, 0xdd, 0x67, 0xcc, 0x0f, 0xc1, 0xa2, 0x9e, 0xb0, 0xd4, 0x31, 0x3b, 0x0d, 0xdb, + 0x96, 0x00, 0x3e, 0x0c, 0x06, 0x20, 0xac, 0x63, 0x00, 0xcf, 0x8d, 0x68, 0x92, 0x04, 0xb1, 0xef, + 0x16, 0x56, 0x92, 0x70, 0x26, 0x19, 0x6e, 0x2a, 0x04, 0xa1, 0x9e, 0x20, 0x23, 0x30, 0x19, 0xb6, + 0x89, 0x06, 0x37, 0x1e, 0x4d, 0xa3, 0xe7, 0x20, 0x58, 0xca, 0xcf, 0xf3, 0x2b, 0xde, 0xc6, 0x6d, + 0x8d, 0x4a, 0x02, 0x8b, 0xc6, 0x31, 0x93, 0x54, 0x06, 0x2c, 0x16, 0x85, 0xf7, 0x4e, 0xe1, 0xcd, + 0x6f, 0x47, 0xe9, 0xb1, 0xf5, 0x9a, 0xd3, 0x24, 0x01, 0xae, 0xfd, 0xcb, 0x85, 0x9f, 0x27, 0x03, + 0x4b, 0x48, 0x2a, 0xd3, 0xc2, 0x61, 0x6e, 0xa0, 0xa5, 0x1d, 0x90, 0xdb, 0x00, 0xde, 0x9e, 0x4a, + 0xe7, 0xc0, 0xab, 0x14, 0x84, 0xc4, 0xf7, 0xd0, 0x35, 0xad, 0xc7, 0x8d, 0x69, 0x04, 0x75, 0xa3, + 0x69, 0xac, 0xce, 0x3b, 0x0b, 0xda, 0xf8, 0x82, 0x46, 0x60, 0x7e, 0x37, 0xd0, 0xad, 0xbd, 0x54, + 0x52, 0x09, 0x25, 0x06, 0xa1, 0x29, 0xee, 0xa2, 0xda, 0x20, 0x15, 0x92, 0x45, 0xc0, 0xdd, 0xc0, + 0x2b, 0x08, 0x90, 0x36, 0x3d, 0xf7, 0x70, 0x0f, 0x21, 0x96, 0x00, 0x57, 0x95, 0xd4, 0x2b, 0xcd, + 0xea, 0x6a, 0xad, 0xb3, 0x46, 0x66, 0x35, 0x90, 0x94, 0x72, 0xed, 0x6b, 0xb8, 0x53, 0x62, 0xc2, + 0xf7, 0xd1, 0xf5, 0x84, 0x72, 0x19, 0xd0, 0xd0, 0x3d, 0xa6, 0x41, 0x98, 0x72, 0xa8, 0x57, 0x9b, + 0xc6, 0xea, 0x55, 0x67, 0xb1, 0x30, 0x6f, 0x2b, 0x6b, 0x56, 0xe4, 0x90, 0x86, 0x81, 0x47, 0x25, + 0xb8, 0x2c, 0x0e, 0x4f, 0xea, 0xff, 0xe7, 0x61, 0x0b, 0xda, 0xb8, 0x1f, 0x87, 0x27, 0xe6, 0x5b, + 0x03, 0xdd, 0x9c, 0x94, 0x12, 0xef, 0xa2, 0xb9, 0x01, 0x07, 0x2a, 0x55, 0x6f, 0x6a, 0x1d, 0x32, + 0x55, 0xfa, 0x68, 0xb2, 0x65, 0xed, 0xbb, 0xff, 0x39, 0x05, 0x1e, 0xd7, 0xd1, 0x1c, 0x87, 0x88, + 0x0d, 0x95, 0xce, 0xf9, 0xcc, 0xa3, 0xee, 0x5b, 0x35, 0x34, 0x3f, 0x2a, 0xcc, 0xfc, 0x6c, 0xa0, + 0xc6, 0xa4, 0x76, 0x8b, 0x84, 0xc5, 0x02, 0xf0, 0x36, 0x5a, 0x3a, 0x57, 0xb6, 0x0b, 0x9c, 0x33, + 0x9e, 0x93, 0xd6, 0x3a, 0x58, 0xcb, 0xe3, 0xc9, 0x80, 0x1c, 0xe6, 0x4b, 0xe0, 0xdc, 0x18, 0x6f, + 0xc8, 0xb3, 0x2c, 0x1c, 0x1f, 0xa2, 0x2b, 0x1c, 0x44, 0x1a, 0x4a, 0x3d, 0x93, 0x27, 0xb3, 0x67, + 0x72, 0x41, 0x96, 0x93, 0x33, 0x38, 0x9a, 0xc9, 0x7c, 0x8a, 0x96, 0xa7, 0xc4, 0xfc, 0xd5, 0xaa, + 0x75, 0xde, 0x57, 0x11, 0x2e, 0x41, 0x0f, 0x55, 0x62, 0xfc, 0xc9, 0x40, 0x8b, 0xe3, 0x0b, 0x8c, + 0x1f, 0xcf, 0x56, 0x3b, 0x71, 0xe5, 0x1b, 0xff, 0x38, 0x3f, 0x73, 0xed, 0xcd, 0xb7, 0x1f, 0xef, + 0x2a, 0x0f, 0x30, 0xc9, 0x1e, 0xef, 0xe9, 0x58, 0x09, 0x9b, 0x7a, 0xcb, 0x85, 0xd5, 0xca, 0x5f, + 0xb3, 0x1e, 0x96, 0xd5, 0x3a, 0xc3, 0x5f, 0x0d, 0x84, 0x2f, 0x8e, 0x11, 0xaf, 0x5f, 0xa2, 0xcb, + 0xfa, 0xad, 0x35, 0x36, 0x2e, 0x07, 0x56, 0x9b, 0x63, 0x6e, 0xe4, 0x95, 0xac, 0x99, 0xed, 0xac, + 0x92, 0x3f, 0xd2, 0x4f, 0x4b, 0xcf, 0x77, 0xb3, 0x75, 0x36, 0x56, 0x88, 0x1d, 0xe5, 0x74, 0xb6, + 0xd1, 0xda, 0xfa, 0x65, 0xa0, 0x95, 0x01, 0x8b, 0x66, 0x2a, 0xd8, 0x5a, 0xbe, 0x38, 0xc0, 0x83, + 0xec, 0x17, 0x3a, 0x30, 0x5e, 0xee, 0x16, 0x60, 0x9f, 0x85, 0x34, 0xf6, 0x09, 0xe3, 0xbe, 0xe5, + 0x43, 0x9c, 0xff, 0x51, 0xfa, 0x93, 0x4c, 0x02, 0x31, 0xfd, 0x4b, 0x5e, 0xd7, 0x87, 0x0f, 0x95, + 0xea, 0x4e, 0xb7, 0xfb, 0xb1, 0xd2, 0xdc, 0x51, 0x84, 0x5d, 0x4f, 0x10, 0x75, 0xcc, 0x4e, 0xbd, + 0x36, 0x29, 0x12, 0x8b, 0x2f, 0x3a, 0xa4, 0xdf, 0xf5, 0x44, 0x7f, 0x14, 0xd2, 0xef, 0xb5, 0xfb, + 0x3a, 0xe4, 0x67, 0x65, 0x45, 0xd9, 0x6d, 0xbb, 0xeb, 0x09, 0xdb, 0x1e, 0x05, 0xd9, 0x76, 0xaf, + 0x6d, 0xdb, 0x3a, 0xec, 0x68, 0x2e, 0xd7, 0xf9, 0xf0, 0x77, 0x00, 0x00, 0x00, 0xff, 0xff, 0xd1, + 0x89, 0x6c, 0x2f, 0x39, 0x06, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/services/feed_placeholder_view_service.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/services/feed_placeholder_view_service.pb.go new file mode 100644 index 0000000..e75e4d0 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/services/feed_placeholder_view_service.pb.go @@ -0,0 +1,176 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/services/feed_placeholder_view_service.proto + +package services // import "google.golang.org/genproto/googleapis/ads/googleads/v1/services" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import resources "google.golang.org/genproto/googleapis/ads/googleads/v1/resources" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +import ( + context "golang.org/x/net/context" + grpc "google.golang.org/grpc" +) + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Request message for [FeedPlaceholderViewService.GetFeedPlaceholderView][google.ads.googleads.v1.services.FeedPlaceholderViewService.GetFeedPlaceholderView]. +type GetFeedPlaceholderViewRequest struct { + // The resource name of the feed placeholder view to fetch. + ResourceName string `protobuf:"bytes,1,opt,name=resource_name,json=resourceName,proto3" json:"resource_name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GetFeedPlaceholderViewRequest) Reset() { *m = GetFeedPlaceholderViewRequest{} } +func (m *GetFeedPlaceholderViewRequest) String() string { return proto.CompactTextString(m) } +func (*GetFeedPlaceholderViewRequest) ProtoMessage() {} +func (*GetFeedPlaceholderViewRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_feed_placeholder_view_service_20149c4acfccfc2b, []int{0} +} +func (m *GetFeedPlaceholderViewRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GetFeedPlaceholderViewRequest.Unmarshal(m, b) +} +func (m *GetFeedPlaceholderViewRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GetFeedPlaceholderViewRequest.Marshal(b, m, deterministic) +} +func (dst *GetFeedPlaceholderViewRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_GetFeedPlaceholderViewRequest.Merge(dst, src) +} +func (m *GetFeedPlaceholderViewRequest) XXX_Size() int { + return xxx_messageInfo_GetFeedPlaceholderViewRequest.Size(m) +} +func (m *GetFeedPlaceholderViewRequest) XXX_DiscardUnknown() { + xxx_messageInfo_GetFeedPlaceholderViewRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_GetFeedPlaceholderViewRequest proto.InternalMessageInfo + +func (m *GetFeedPlaceholderViewRequest) GetResourceName() string { + if m != nil { + return m.ResourceName + } + return "" +} + +func init() { + proto.RegisterType((*GetFeedPlaceholderViewRequest)(nil), "google.ads.googleads.v1.services.GetFeedPlaceholderViewRequest") +} + +// Reference imports to suppress errors if they are not otherwise used. +var _ context.Context +var _ grpc.ClientConn + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the grpc package it is being compiled against. +const _ = grpc.SupportPackageIsVersion4 + +// FeedPlaceholderViewServiceClient is the client API for FeedPlaceholderViewService service. +// +// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://godoc.org/google.golang.org/grpc#ClientConn.NewStream. +type FeedPlaceholderViewServiceClient interface { + // Returns the requested feed placeholder view in full detail. + GetFeedPlaceholderView(ctx context.Context, in *GetFeedPlaceholderViewRequest, opts ...grpc.CallOption) (*resources.FeedPlaceholderView, error) +} + +type feedPlaceholderViewServiceClient struct { + cc *grpc.ClientConn +} + +func NewFeedPlaceholderViewServiceClient(cc *grpc.ClientConn) FeedPlaceholderViewServiceClient { + return &feedPlaceholderViewServiceClient{cc} +} + +func (c *feedPlaceholderViewServiceClient) GetFeedPlaceholderView(ctx context.Context, in *GetFeedPlaceholderViewRequest, opts ...grpc.CallOption) (*resources.FeedPlaceholderView, error) { + out := new(resources.FeedPlaceholderView) + err := c.cc.Invoke(ctx, "/google.ads.googleads.v1.services.FeedPlaceholderViewService/GetFeedPlaceholderView", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +// FeedPlaceholderViewServiceServer is the server API for FeedPlaceholderViewService service. +type FeedPlaceholderViewServiceServer interface { + // Returns the requested feed placeholder view in full detail. + GetFeedPlaceholderView(context.Context, *GetFeedPlaceholderViewRequest) (*resources.FeedPlaceholderView, error) +} + +func RegisterFeedPlaceholderViewServiceServer(s *grpc.Server, srv FeedPlaceholderViewServiceServer) { + s.RegisterService(&_FeedPlaceholderViewService_serviceDesc, srv) +} + +func _FeedPlaceholderViewService_GetFeedPlaceholderView_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(GetFeedPlaceholderViewRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(FeedPlaceholderViewServiceServer).GetFeedPlaceholderView(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.ads.googleads.v1.services.FeedPlaceholderViewService/GetFeedPlaceholderView", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(FeedPlaceholderViewServiceServer).GetFeedPlaceholderView(ctx, req.(*GetFeedPlaceholderViewRequest)) + } + return interceptor(ctx, in, info, handler) +} + +var _FeedPlaceholderViewService_serviceDesc = grpc.ServiceDesc{ + ServiceName: "google.ads.googleads.v1.services.FeedPlaceholderViewService", + HandlerType: (*FeedPlaceholderViewServiceServer)(nil), + Methods: []grpc.MethodDesc{ + { + MethodName: "GetFeedPlaceholderView", + Handler: _FeedPlaceholderViewService_GetFeedPlaceholderView_Handler, + }, + }, + Streams: []grpc.StreamDesc{}, + Metadata: "google/ads/googleads/v1/services/feed_placeholder_view_service.proto", +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/services/feed_placeholder_view_service.proto", fileDescriptor_feed_placeholder_view_service_20149c4acfccfc2b) +} + +var fileDescriptor_feed_placeholder_view_service_20149c4acfccfc2b = []byte{ + // 377 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x84, 0x52, 0xcd, 0x4a, 0xeb, 0x40, + 0x14, 0x26, 0xb9, 0x70, 0xe1, 0x86, 0x7b, 0x37, 0x59, 0x5c, 0x24, 0x28, 0x96, 0xda, 0x85, 0x74, + 0x31, 0x43, 0x14, 0x8a, 0x8c, 0x54, 0x49, 0x29, 0xd6, 0x95, 0x94, 0x0a, 0x59, 0x48, 0x20, 0x8c, + 0x99, 0x63, 0x0c, 0x24, 0x99, 0x98, 0x49, 0xd3, 0x85, 0xb8, 0x71, 0xe1, 0x0b, 0xf8, 0x06, 0x2e, + 0x7d, 0x14, 0x77, 0xe2, 0x2b, 0xb8, 0x72, 0xe5, 0x23, 0x48, 0x3a, 0x9d, 0x88, 0xd2, 0xd8, 0xdd, + 0xc7, 0x9c, 0xef, 0xe7, 0xfc, 0x8c, 0x31, 0x0c, 0x39, 0x0f, 0x63, 0xc0, 0x94, 0x09, 0x2c, 0x61, + 0x85, 0x4a, 0x1b, 0x0b, 0xc8, 0xcb, 0x28, 0x00, 0x81, 0x2f, 0x00, 0x98, 0x9f, 0xc5, 0x34, 0x80, + 0x4b, 0x1e, 0x33, 0xc8, 0xfd, 0x32, 0x82, 0x99, 0xbf, 0x28, 0xa3, 0x2c, 0xe7, 0x05, 0x37, 0x5b, + 0x52, 0x8a, 0x28, 0x13, 0xa8, 0x76, 0x41, 0xa5, 0x8d, 0x94, 0x8b, 0xd5, 0x6f, 0xca, 0xc9, 0x41, + 0xf0, 0x69, 0xde, 0x18, 0x24, 0x03, 0xac, 0x75, 0x25, 0xcf, 0x22, 0x4c, 0xd3, 0x94, 0x17, 0xb4, + 0x88, 0x78, 0x2a, 0x64, 0xb5, 0x3d, 0x34, 0x36, 0x46, 0x50, 0x1c, 0x01, 0xb0, 0xf1, 0xa7, 0xdc, + 0x8d, 0x60, 0x36, 0x81, 0xab, 0x29, 0x88, 0xc2, 0xdc, 0x32, 0xfe, 0xa9, 0x1c, 0x3f, 0xa5, 0x09, + 0xac, 0x69, 0x2d, 0x6d, 0xfb, 0xcf, 0xe4, 0xaf, 0x7a, 0x3c, 0xa1, 0x09, 0xec, 0xbc, 0x6b, 0x86, + 0xb5, 0xc4, 0xe3, 0x54, 0x8e, 0x60, 0x3e, 0x6b, 0xc6, 0xff, 0xe5, 0x29, 0xe6, 0x21, 0x5a, 0x35, + 0x3f, 0xfa, 0xb1, 0x3f, 0xab, 0xd7, 0x68, 0x50, 0xaf, 0x07, 0x2d, 0x91, 0xb7, 0x0f, 0x6e, 0x5f, + 0x5e, 0xef, 0xf5, 0x3d, 0xb3, 0x57, 0x6d, 0xf2, 0xfa, 0xcb, 0x88, 0xfd, 0x60, 0x2a, 0x0a, 0x9e, + 0x40, 0x2e, 0x70, 0x77, 0xbe, 0xda, 0x6f, 0x5a, 0x81, 0xbb, 0x37, 0x83, 0x3b, 0xdd, 0xe8, 0x04, + 0x3c, 0x59, 0xd9, 0xfe, 0x60, 0xb3, 0x79, 0x31, 0xe3, 0xea, 0x04, 0x63, 0xed, 0xec, 0x78, 0x61, + 0x12, 0xf2, 0x98, 0xa6, 0x21, 0xe2, 0x79, 0x88, 0x43, 0x48, 0xe7, 0x07, 0x52, 0x17, 0xcf, 0x22, + 0xd1, 0xfc, 0xd1, 0xf6, 0x15, 0x78, 0xd0, 0x7f, 0x8d, 0x1c, 0xe7, 0x51, 0x6f, 0x8d, 0xa4, 0xa1, + 0xc3, 0x04, 0x92, 0xb0, 0x42, 0xae, 0x8d, 0x16, 0xc1, 0xe2, 0x49, 0x51, 0x3c, 0x87, 0x09, 0xaf, + 0xa6, 0x78, 0xae, 0xed, 0x29, 0xca, 0x9b, 0xde, 0x91, 0xef, 0x84, 0x38, 0x4c, 0x10, 0x52, 0x93, + 0x08, 0x71, 0x6d, 0x42, 0x14, 0xed, 0xfc, 0xf7, 0xbc, 0xcf, 0xdd, 0x8f, 0x00, 0x00, 0x00, 0xff, + 0xff, 0xee, 0xb8, 0x0d, 0xf6, 0x0f, 0x03, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/services/feed_service.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/services/feed_service.pb.go new file mode 100644 index 0000000..845ca8e --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/services/feed_service.pb.go @@ -0,0 +1,589 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/services/feed_service.proto + +package services // import "google.golang.org/genproto/googleapis/ads/googleads/v1/services" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "github.com/golang/protobuf/ptypes/wrappers" +import resources "google.golang.org/genproto/googleapis/ads/googleads/v1/resources" +import _ "google.golang.org/genproto/googleapis/api/annotations" +import status "google.golang.org/genproto/googleapis/rpc/status" +import field_mask "google.golang.org/genproto/protobuf/field_mask" + +import ( + context "golang.org/x/net/context" + grpc "google.golang.org/grpc" +) + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Request message for [FeedService.GetFeed][google.ads.googleads.v1.services.FeedService.GetFeed]. +type GetFeedRequest struct { + // The resource name of the feed to fetch. + ResourceName string `protobuf:"bytes,1,opt,name=resource_name,json=resourceName,proto3" json:"resource_name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GetFeedRequest) Reset() { *m = GetFeedRequest{} } +func (m *GetFeedRequest) String() string { return proto.CompactTextString(m) } +func (*GetFeedRequest) ProtoMessage() {} +func (*GetFeedRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_feed_service_ed598965e23a13be, []int{0} +} +func (m *GetFeedRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GetFeedRequest.Unmarshal(m, b) +} +func (m *GetFeedRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GetFeedRequest.Marshal(b, m, deterministic) +} +func (dst *GetFeedRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_GetFeedRequest.Merge(dst, src) +} +func (m *GetFeedRequest) XXX_Size() int { + return xxx_messageInfo_GetFeedRequest.Size(m) +} +func (m *GetFeedRequest) XXX_DiscardUnknown() { + xxx_messageInfo_GetFeedRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_GetFeedRequest proto.InternalMessageInfo + +func (m *GetFeedRequest) GetResourceName() string { + if m != nil { + return m.ResourceName + } + return "" +} + +// Request message for [FeedService.MutateFeeds][google.ads.googleads.v1.services.FeedService.MutateFeeds]. +type MutateFeedsRequest struct { + // The ID of the customer whose feeds are being modified. + CustomerId string `protobuf:"bytes,1,opt,name=customer_id,json=customerId,proto3" json:"customer_id,omitempty"` + // The list of operations to perform on individual feeds. + Operations []*FeedOperation `protobuf:"bytes,2,rep,name=operations,proto3" json:"operations,omitempty"` + // If true, successful operations will be carried out and invalid + // operations will return errors. If false, all operations will be carried + // out in one transaction if and only if they are all valid. + // Default is false. + PartialFailure bool `protobuf:"varint,3,opt,name=partial_failure,json=partialFailure,proto3" json:"partial_failure,omitempty"` + // If true, the request is validated but not executed. Only errors are + // returned, not results. + ValidateOnly bool `protobuf:"varint,4,opt,name=validate_only,json=validateOnly,proto3" json:"validate_only,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *MutateFeedsRequest) Reset() { *m = MutateFeedsRequest{} } +func (m *MutateFeedsRequest) String() string { return proto.CompactTextString(m) } +func (*MutateFeedsRequest) ProtoMessage() {} +func (*MutateFeedsRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_feed_service_ed598965e23a13be, []int{1} +} +func (m *MutateFeedsRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_MutateFeedsRequest.Unmarshal(m, b) +} +func (m *MutateFeedsRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_MutateFeedsRequest.Marshal(b, m, deterministic) +} +func (dst *MutateFeedsRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_MutateFeedsRequest.Merge(dst, src) +} +func (m *MutateFeedsRequest) XXX_Size() int { + return xxx_messageInfo_MutateFeedsRequest.Size(m) +} +func (m *MutateFeedsRequest) XXX_DiscardUnknown() { + xxx_messageInfo_MutateFeedsRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_MutateFeedsRequest proto.InternalMessageInfo + +func (m *MutateFeedsRequest) GetCustomerId() string { + if m != nil { + return m.CustomerId + } + return "" +} + +func (m *MutateFeedsRequest) GetOperations() []*FeedOperation { + if m != nil { + return m.Operations + } + return nil +} + +func (m *MutateFeedsRequest) GetPartialFailure() bool { + if m != nil { + return m.PartialFailure + } + return false +} + +func (m *MutateFeedsRequest) GetValidateOnly() bool { + if m != nil { + return m.ValidateOnly + } + return false +} + +// A single operation (create, update, remove) on an feed. +type FeedOperation struct { + // FieldMask that determines which resource fields are modified in an update. + UpdateMask *field_mask.FieldMask `protobuf:"bytes,4,opt,name=update_mask,json=updateMask,proto3" json:"update_mask,omitempty"` + // The mutate operation. + // + // Types that are valid to be assigned to Operation: + // *FeedOperation_Create + // *FeedOperation_Update + // *FeedOperation_Remove + Operation isFeedOperation_Operation `protobuf_oneof:"operation"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *FeedOperation) Reset() { *m = FeedOperation{} } +func (m *FeedOperation) String() string { return proto.CompactTextString(m) } +func (*FeedOperation) ProtoMessage() {} +func (*FeedOperation) Descriptor() ([]byte, []int) { + return fileDescriptor_feed_service_ed598965e23a13be, []int{2} +} +func (m *FeedOperation) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_FeedOperation.Unmarshal(m, b) +} +func (m *FeedOperation) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_FeedOperation.Marshal(b, m, deterministic) +} +func (dst *FeedOperation) XXX_Merge(src proto.Message) { + xxx_messageInfo_FeedOperation.Merge(dst, src) +} +func (m *FeedOperation) XXX_Size() int { + return xxx_messageInfo_FeedOperation.Size(m) +} +func (m *FeedOperation) XXX_DiscardUnknown() { + xxx_messageInfo_FeedOperation.DiscardUnknown(m) +} + +var xxx_messageInfo_FeedOperation proto.InternalMessageInfo + +func (m *FeedOperation) GetUpdateMask() *field_mask.FieldMask { + if m != nil { + return m.UpdateMask + } + return nil +} + +type isFeedOperation_Operation interface { + isFeedOperation_Operation() +} + +type FeedOperation_Create struct { + Create *resources.Feed `protobuf:"bytes,1,opt,name=create,proto3,oneof"` +} + +type FeedOperation_Update struct { + Update *resources.Feed `protobuf:"bytes,2,opt,name=update,proto3,oneof"` +} + +type FeedOperation_Remove struct { + Remove string `protobuf:"bytes,3,opt,name=remove,proto3,oneof"` +} + +func (*FeedOperation_Create) isFeedOperation_Operation() {} + +func (*FeedOperation_Update) isFeedOperation_Operation() {} + +func (*FeedOperation_Remove) isFeedOperation_Operation() {} + +func (m *FeedOperation) GetOperation() isFeedOperation_Operation { + if m != nil { + return m.Operation + } + return nil +} + +func (m *FeedOperation) GetCreate() *resources.Feed { + if x, ok := m.GetOperation().(*FeedOperation_Create); ok { + return x.Create + } + return nil +} + +func (m *FeedOperation) GetUpdate() *resources.Feed { + if x, ok := m.GetOperation().(*FeedOperation_Update); ok { + return x.Update + } + return nil +} + +func (m *FeedOperation) GetRemove() string { + if x, ok := m.GetOperation().(*FeedOperation_Remove); ok { + return x.Remove + } + return "" +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*FeedOperation) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _FeedOperation_OneofMarshaler, _FeedOperation_OneofUnmarshaler, _FeedOperation_OneofSizer, []interface{}{ + (*FeedOperation_Create)(nil), + (*FeedOperation_Update)(nil), + (*FeedOperation_Remove)(nil), + } +} + +func _FeedOperation_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*FeedOperation) + // operation + switch x := m.Operation.(type) { + case *FeedOperation_Create: + b.EncodeVarint(1<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.Create); err != nil { + return err + } + case *FeedOperation_Update: + b.EncodeVarint(2<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.Update); err != nil { + return err + } + case *FeedOperation_Remove: + b.EncodeVarint(3<<3 | proto.WireBytes) + b.EncodeStringBytes(x.Remove) + case nil: + default: + return fmt.Errorf("FeedOperation.Operation has unexpected type %T", x) + } + return nil +} + +func _FeedOperation_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*FeedOperation) + switch tag { + case 1: // operation.create + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(resources.Feed) + err := b.DecodeMessage(msg) + m.Operation = &FeedOperation_Create{msg} + return true, err + case 2: // operation.update + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(resources.Feed) + err := b.DecodeMessage(msg) + m.Operation = &FeedOperation_Update{msg} + return true, err + case 3: // operation.remove + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeStringBytes() + m.Operation = &FeedOperation_Remove{x} + return true, err + default: + return false, nil + } +} + +func _FeedOperation_OneofSizer(msg proto.Message) (n int) { + m := msg.(*FeedOperation) + // operation + switch x := m.Operation.(type) { + case *FeedOperation_Create: + s := proto.Size(x.Create) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *FeedOperation_Update: + s := proto.Size(x.Update) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *FeedOperation_Remove: + n += 1 // tag and wire + n += proto.SizeVarint(uint64(len(x.Remove))) + n += len(x.Remove) + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +// Response message for an feed mutate. +type MutateFeedsResponse struct { + // Errors that pertain to operation failures in the partial failure mode. + // Returned only when partial_failure = true and all errors occur inside the + // operations. If any errors occur outside the operations (e.g. auth errors), + // we return an RPC level error. + PartialFailureError *status.Status `protobuf:"bytes,3,opt,name=partial_failure_error,json=partialFailureError,proto3" json:"partial_failure_error,omitempty"` + // All results for the mutate. + Results []*MutateFeedResult `protobuf:"bytes,2,rep,name=results,proto3" json:"results,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *MutateFeedsResponse) Reset() { *m = MutateFeedsResponse{} } +func (m *MutateFeedsResponse) String() string { return proto.CompactTextString(m) } +func (*MutateFeedsResponse) ProtoMessage() {} +func (*MutateFeedsResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_feed_service_ed598965e23a13be, []int{3} +} +func (m *MutateFeedsResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_MutateFeedsResponse.Unmarshal(m, b) +} +func (m *MutateFeedsResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_MutateFeedsResponse.Marshal(b, m, deterministic) +} +func (dst *MutateFeedsResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_MutateFeedsResponse.Merge(dst, src) +} +func (m *MutateFeedsResponse) XXX_Size() int { + return xxx_messageInfo_MutateFeedsResponse.Size(m) +} +func (m *MutateFeedsResponse) XXX_DiscardUnknown() { + xxx_messageInfo_MutateFeedsResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_MutateFeedsResponse proto.InternalMessageInfo + +func (m *MutateFeedsResponse) GetPartialFailureError() *status.Status { + if m != nil { + return m.PartialFailureError + } + return nil +} + +func (m *MutateFeedsResponse) GetResults() []*MutateFeedResult { + if m != nil { + return m.Results + } + return nil +} + +// The result for the feed mutate. +type MutateFeedResult struct { + // Returned for successful operations. + ResourceName string `protobuf:"bytes,1,opt,name=resource_name,json=resourceName,proto3" json:"resource_name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *MutateFeedResult) Reset() { *m = MutateFeedResult{} } +func (m *MutateFeedResult) String() string { return proto.CompactTextString(m) } +func (*MutateFeedResult) ProtoMessage() {} +func (*MutateFeedResult) Descriptor() ([]byte, []int) { + return fileDescriptor_feed_service_ed598965e23a13be, []int{4} +} +func (m *MutateFeedResult) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_MutateFeedResult.Unmarshal(m, b) +} +func (m *MutateFeedResult) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_MutateFeedResult.Marshal(b, m, deterministic) +} +func (dst *MutateFeedResult) XXX_Merge(src proto.Message) { + xxx_messageInfo_MutateFeedResult.Merge(dst, src) +} +func (m *MutateFeedResult) XXX_Size() int { + return xxx_messageInfo_MutateFeedResult.Size(m) +} +func (m *MutateFeedResult) XXX_DiscardUnknown() { + xxx_messageInfo_MutateFeedResult.DiscardUnknown(m) +} + +var xxx_messageInfo_MutateFeedResult proto.InternalMessageInfo + +func (m *MutateFeedResult) GetResourceName() string { + if m != nil { + return m.ResourceName + } + return "" +} + +func init() { + proto.RegisterType((*GetFeedRequest)(nil), "google.ads.googleads.v1.services.GetFeedRequest") + proto.RegisterType((*MutateFeedsRequest)(nil), "google.ads.googleads.v1.services.MutateFeedsRequest") + proto.RegisterType((*FeedOperation)(nil), "google.ads.googleads.v1.services.FeedOperation") + proto.RegisterType((*MutateFeedsResponse)(nil), "google.ads.googleads.v1.services.MutateFeedsResponse") + proto.RegisterType((*MutateFeedResult)(nil), "google.ads.googleads.v1.services.MutateFeedResult") +} + +// Reference imports to suppress errors if they are not otherwise used. +var _ context.Context +var _ grpc.ClientConn + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the grpc package it is being compiled against. +const _ = grpc.SupportPackageIsVersion4 + +// FeedServiceClient is the client API for FeedService service. +// +// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://godoc.org/google.golang.org/grpc#ClientConn.NewStream. +type FeedServiceClient interface { + // Returns the requested feed in full detail. + GetFeed(ctx context.Context, in *GetFeedRequest, opts ...grpc.CallOption) (*resources.Feed, error) + // Creates, updates, or removes feeds. Operation statuses are + // returned. + MutateFeeds(ctx context.Context, in *MutateFeedsRequest, opts ...grpc.CallOption) (*MutateFeedsResponse, error) +} + +type feedServiceClient struct { + cc *grpc.ClientConn +} + +func NewFeedServiceClient(cc *grpc.ClientConn) FeedServiceClient { + return &feedServiceClient{cc} +} + +func (c *feedServiceClient) GetFeed(ctx context.Context, in *GetFeedRequest, opts ...grpc.CallOption) (*resources.Feed, error) { + out := new(resources.Feed) + err := c.cc.Invoke(ctx, "/google.ads.googleads.v1.services.FeedService/GetFeed", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *feedServiceClient) MutateFeeds(ctx context.Context, in *MutateFeedsRequest, opts ...grpc.CallOption) (*MutateFeedsResponse, error) { + out := new(MutateFeedsResponse) + err := c.cc.Invoke(ctx, "/google.ads.googleads.v1.services.FeedService/MutateFeeds", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +// FeedServiceServer is the server API for FeedService service. +type FeedServiceServer interface { + // Returns the requested feed in full detail. + GetFeed(context.Context, *GetFeedRequest) (*resources.Feed, error) + // Creates, updates, or removes feeds. Operation statuses are + // returned. + MutateFeeds(context.Context, *MutateFeedsRequest) (*MutateFeedsResponse, error) +} + +func RegisterFeedServiceServer(s *grpc.Server, srv FeedServiceServer) { + s.RegisterService(&_FeedService_serviceDesc, srv) +} + +func _FeedService_GetFeed_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(GetFeedRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(FeedServiceServer).GetFeed(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.ads.googleads.v1.services.FeedService/GetFeed", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(FeedServiceServer).GetFeed(ctx, req.(*GetFeedRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _FeedService_MutateFeeds_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(MutateFeedsRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(FeedServiceServer).MutateFeeds(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.ads.googleads.v1.services.FeedService/MutateFeeds", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(FeedServiceServer).MutateFeeds(ctx, req.(*MutateFeedsRequest)) + } + return interceptor(ctx, in, info, handler) +} + +var _FeedService_serviceDesc = grpc.ServiceDesc{ + ServiceName: "google.ads.googleads.v1.services.FeedService", + HandlerType: (*FeedServiceServer)(nil), + Methods: []grpc.MethodDesc{ + { + MethodName: "GetFeed", + Handler: _FeedService_GetFeed_Handler, + }, + { + MethodName: "MutateFeeds", + Handler: _FeedService_MutateFeeds_Handler, + }, + }, + Streams: []grpc.StreamDesc{}, + Metadata: "google/ads/googleads/v1/services/feed_service.proto", +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/services/feed_service.proto", fileDescriptor_feed_service_ed598965e23a13be) +} + +var fileDescriptor_feed_service_ed598965e23a13be = []byte{ + // 694 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x94, 0x54, 0x41, 0x6b, 0x13, 0x41, + 0x14, 0x76, 0xb7, 0xd2, 0xda, 0xd9, 0xb6, 0x96, 0x29, 0x62, 0x08, 0xa2, 0x61, 0x2d, 0xb4, 0x06, + 0xd9, 0x31, 0xa9, 0x45, 0xd8, 0xd2, 0x43, 0x0a, 0xa6, 0x15, 0xac, 0x2d, 0x5b, 0xe8, 0x41, 0x02, + 0xcb, 0x34, 0x3b, 0x09, 0x4b, 0x77, 0x77, 0xd6, 0x99, 0xd9, 0x48, 0x29, 0xbd, 0xf8, 0x17, 0xc4, + 0x3f, 0xe0, 0x51, 0x4f, 0xfe, 0x0d, 0xaf, 0x7a, 0xf1, 0xee, 0xc9, 0x3f, 0x20, 0xde, 0x64, 0x66, + 0x76, 0xd2, 0xa4, 0x50, 0xd2, 0xdc, 0xde, 0xbe, 0xf9, 0xbe, 0xef, 0x7d, 0xf3, 0xde, 0xbe, 0x01, + 0x1b, 0x7d, 0x4a, 0xfb, 0x09, 0x41, 0x38, 0xe2, 0x48, 0x87, 0x32, 0x1a, 0x34, 0x10, 0x27, 0x6c, + 0x10, 0x77, 0x09, 0x47, 0x3d, 0x42, 0xa2, 0xb0, 0xfc, 0xf2, 0x72, 0x46, 0x05, 0x85, 0x35, 0x8d, + 0xf4, 0x70, 0xc4, 0xbd, 0x21, 0xc9, 0x1b, 0x34, 0x3c, 0x43, 0xaa, 0x3e, 0xbd, 0x4e, 0x96, 0x11, + 0x4e, 0x0b, 0x66, 0x74, 0xb5, 0x5e, 0xf5, 0x81, 0x41, 0xe7, 0x31, 0xc2, 0x59, 0x46, 0x05, 0x16, + 0x31, 0xcd, 0x78, 0x79, 0x5a, 0x56, 0x43, 0xea, 0xeb, 0xa4, 0xe8, 0xa1, 0x5e, 0x4c, 0x92, 0x28, + 0x4c, 0x31, 0x3f, 0x2d, 0x11, 0x0f, 0xaf, 0x22, 0xde, 0x33, 0x9c, 0xe7, 0x84, 0x19, 0x85, 0xfb, + 0xe5, 0x39, 0xcb, 0xbb, 0x88, 0x0b, 0x2c, 0x8a, 0xf2, 0xc0, 0xdd, 0x04, 0x4b, 0xbb, 0x44, 0xb4, + 0x09, 0x89, 0x02, 0xf2, 0xae, 0x20, 0x5c, 0xc0, 0xc7, 0x60, 0xd1, 0x58, 0x0c, 0x33, 0x9c, 0x92, + 0x8a, 0x55, 0xb3, 0xd6, 0xe7, 0x83, 0x05, 0x93, 0x7c, 0x83, 0x53, 0xe2, 0xfe, 0xb4, 0x00, 0xdc, + 0x2f, 0x04, 0x16, 0x44, 0x52, 0xb9, 0xe1, 0x3e, 0x02, 0x4e, 0xb7, 0xe0, 0x82, 0xa6, 0x84, 0x85, + 0x71, 0x54, 0x32, 0x81, 0x49, 0xbd, 0x8a, 0xe0, 0x01, 0x00, 0x34, 0x27, 0x4c, 0xdf, 0xae, 0x62, + 0xd7, 0x66, 0xd6, 0x9d, 0x26, 0xf2, 0x26, 0x35, 0xd3, 0x93, 0x45, 0x0e, 0x0c, 0x2f, 0x18, 0x91, + 0x80, 0x6b, 0xe0, 0x6e, 0x8e, 0x99, 0x88, 0x71, 0x12, 0xf6, 0x70, 0x9c, 0x14, 0x8c, 0x54, 0x66, + 0x6a, 0xd6, 0xfa, 0x9d, 0x60, 0xa9, 0x4c, 0xb7, 0x75, 0x56, 0x5e, 0x6b, 0x80, 0x93, 0x38, 0xc2, + 0x82, 0x84, 0x34, 0x4b, 0xce, 0x2a, 0xb7, 0x15, 0x6c, 0xc1, 0x24, 0x0f, 0xb2, 0xe4, 0xcc, 0xfd, + 0x67, 0x81, 0xc5, 0xb1, 0x5a, 0x70, 0x0b, 0x38, 0x45, 0xae, 0x48, 0xb2, 0xdb, 0x8a, 0xe4, 0x34, + 0xab, 0xc6, 0xb1, 0x69, 0xb7, 0xd7, 0x96, 0x03, 0xd9, 0xc7, 0xfc, 0x34, 0x00, 0x1a, 0x2e, 0x63, + 0xd8, 0x02, 0xb3, 0x5d, 0x46, 0xb0, 0xd0, 0x3d, 0x74, 0x9a, 0x6b, 0xd7, 0xde, 0x74, 0xf8, 0x53, + 0xa8, 0xab, 0xee, 0xdd, 0x0a, 0x4a, 0xa2, 0x94, 0xd0, 0x82, 0x15, 0x7b, 0x6a, 0x09, 0x4d, 0x84, + 0x15, 0x30, 0xcb, 0x48, 0x4a, 0x07, 0xba, 0x33, 0xf3, 0xf2, 0x44, 0x7f, 0xef, 0x38, 0x60, 0x7e, + 0xd8, 0x4a, 0xf7, 0xab, 0x05, 0x56, 0xc6, 0x46, 0xca, 0x73, 0x9a, 0x71, 0x02, 0xdb, 0xe0, 0xde, + 0x95, 0x0e, 0x87, 0x84, 0x31, 0xca, 0x94, 0x9a, 0xd3, 0x84, 0xc6, 0x10, 0xcb, 0xbb, 0xde, 0x91, + 0xfa, 0xb5, 0x82, 0x95, 0xf1, 0xde, 0xbf, 0x94, 0x70, 0xf8, 0x1a, 0xcc, 0x31, 0xc2, 0x8b, 0x44, + 0x98, 0xb9, 0x37, 0x27, 0xcf, 0xfd, 0xd2, 0x4f, 0xa0, 0xa8, 0x81, 0x91, 0x70, 0x5f, 0x80, 0xe5, + 0xab, 0x87, 0x37, 0xfa, 0x73, 0x9b, 0xbf, 0x6c, 0xe0, 0x48, 0xce, 0x91, 0xae, 0x01, 0x3f, 0x59, + 0x60, 0xae, 0xdc, 0x00, 0xf8, 0x6c, 0xb2, 0xa3, 0xf1, 0x65, 0xa9, 0xde, 0x74, 0x1c, 0x2e, 0xfa, + 0xf0, 0xe3, 0xf7, 0x47, 0xfb, 0x09, 0x5c, 0x93, 0x4f, 0xc0, 0xf9, 0x98, 0xcd, 0x6d, 0xb3, 0x1f, + 0x1c, 0xd5, 0xd5, 0x9b, 0xc0, 0x51, 0xfd, 0x02, 0x7e, 0xb3, 0x80, 0x33, 0x32, 0x0e, 0xf8, 0x7c, + 0x9a, 0x6e, 0x99, 0x85, 0xac, 0x6e, 0x4e, 0xc9, 0xd2, 0x33, 0x77, 0x37, 0x95, 0x5b, 0xe4, 0xd6, + 0xa5, 0xdb, 0x4b, 0x7b, 0xe7, 0x23, 0xcb, 0xbd, 0x5d, 0xbf, 0xd0, 0x66, 0xfd, 0x54, 0x09, 0xf8, + 0x56, 0x7d, 0xe7, 0xaf, 0x05, 0x56, 0xbb, 0x34, 0x9d, 0x58, 0x73, 0x67, 0x79, 0x64, 0x02, 0x87, + 0x72, 0x87, 0x0e, 0xad, 0xb7, 0x7b, 0x25, 0xab, 0x4f, 0x13, 0x9c, 0xf5, 0x3d, 0xca, 0xfa, 0xa8, + 0x4f, 0x32, 0xb5, 0x61, 0xe6, 0x01, 0xcd, 0x63, 0x7e, 0xfd, 0x33, 0xbd, 0x65, 0x82, 0xcf, 0xf6, + 0xcc, 0x6e, 0xab, 0xf5, 0xc5, 0xae, 0xed, 0x6a, 0xc1, 0x56, 0xc4, 0x3d, 0x1d, 0xca, 0xe8, 0xb8, + 0xe1, 0x95, 0x85, 0xf9, 0x77, 0x03, 0xe9, 0xb4, 0x22, 0xde, 0x19, 0x42, 0x3a, 0xc7, 0x8d, 0x8e, + 0x81, 0xfc, 0xb1, 0x57, 0x75, 0xde, 0xf7, 0x5b, 0x11, 0xf7, 0xfd, 0x21, 0xc8, 0xf7, 0x8f, 0x1b, + 0xbe, 0x6f, 0x60, 0x27, 0xb3, 0xca, 0xe7, 0xc6, 0xff, 0x00, 0x00, 0x00, 0xff, 0xff, 0xf2, 0x05, + 0x04, 0x92, 0x4d, 0x06, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/services/gender_view_service.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/services/gender_view_service.pb.go new file mode 100644 index 0000000..a016402 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/services/gender_view_service.pb.go @@ -0,0 +1,175 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/services/gender_view_service.proto + +package services // import "google.golang.org/genproto/googleapis/ads/googleads/v1/services" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import resources "google.golang.org/genproto/googleapis/ads/googleads/v1/resources" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +import ( + context "golang.org/x/net/context" + grpc "google.golang.org/grpc" +) + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Request message for [GenderViewService.GetGenderView][google.ads.googleads.v1.services.GenderViewService.GetGenderView]. +type GetGenderViewRequest struct { + // The resource name of the gender view to fetch. + ResourceName string `protobuf:"bytes,1,opt,name=resource_name,json=resourceName,proto3" json:"resource_name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GetGenderViewRequest) Reset() { *m = GetGenderViewRequest{} } +func (m *GetGenderViewRequest) String() string { return proto.CompactTextString(m) } +func (*GetGenderViewRequest) ProtoMessage() {} +func (*GetGenderViewRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_gender_view_service_cdbdad6124719293, []int{0} +} +func (m *GetGenderViewRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GetGenderViewRequest.Unmarshal(m, b) +} +func (m *GetGenderViewRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GetGenderViewRequest.Marshal(b, m, deterministic) +} +func (dst *GetGenderViewRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_GetGenderViewRequest.Merge(dst, src) +} +func (m *GetGenderViewRequest) XXX_Size() int { + return xxx_messageInfo_GetGenderViewRequest.Size(m) +} +func (m *GetGenderViewRequest) XXX_DiscardUnknown() { + xxx_messageInfo_GetGenderViewRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_GetGenderViewRequest proto.InternalMessageInfo + +func (m *GetGenderViewRequest) GetResourceName() string { + if m != nil { + return m.ResourceName + } + return "" +} + +func init() { + proto.RegisterType((*GetGenderViewRequest)(nil), "google.ads.googleads.v1.services.GetGenderViewRequest") +} + +// Reference imports to suppress errors if they are not otherwise used. +var _ context.Context +var _ grpc.ClientConn + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the grpc package it is being compiled against. +const _ = grpc.SupportPackageIsVersion4 + +// GenderViewServiceClient is the client API for GenderViewService service. +// +// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://godoc.org/google.golang.org/grpc#ClientConn.NewStream. +type GenderViewServiceClient interface { + // Returns the requested gender view in full detail. + GetGenderView(ctx context.Context, in *GetGenderViewRequest, opts ...grpc.CallOption) (*resources.GenderView, error) +} + +type genderViewServiceClient struct { + cc *grpc.ClientConn +} + +func NewGenderViewServiceClient(cc *grpc.ClientConn) GenderViewServiceClient { + return &genderViewServiceClient{cc} +} + +func (c *genderViewServiceClient) GetGenderView(ctx context.Context, in *GetGenderViewRequest, opts ...grpc.CallOption) (*resources.GenderView, error) { + out := new(resources.GenderView) + err := c.cc.Invoke(ctx, "/google.ads.googleads.v1.services.GenderViewService/GetGenderView", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +// GenderViewServiceServer is the server API for GenderViewService service. +type GenderViewServiceServer interface { + // Returns the requested gender view in full detail. + GetGenderView(context.Context, *GetGenderViewRequest) (*resources.GenderView, error) +} + +func RegisterGenderViewServiceServer(s *grpc.Server, srv GenderViewServiceServer) { + s.RegisterService(&_GenderViewService_serviceDesc, srv) +} + +func _GenderViewService_GetGenderView_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(GetGenderViewRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(GenderViewServiceServer).GetGenderView(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.ads.googleads.v1.services.GenderViewService/GetGenderView", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(GenderViewServiceServer).GetGenderView(ctx, req.(*GetGenderViewRequest)) + } + return interceptor(ctx, in, info, handler) +} + +var _GenderViewService_serviceDesc = grpc.ServiceDesc{ + ServiceName: "google.ads.googleads.v1.services.GenderViewService", + HandlerType: (*GenderViewServiceServer)(nil), + Methods: []grpc.MethodDesc{ + { + MethodName: "GetGenderView", + Handler: _GenderViewService_GetGenderView_Handler, + }, + }, + Streams: []grpc.StreamDesc{}, + Metadata: "google/ads/googleads/v1/services/gender_view_service.proto", +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/services/gender_view_service.proto", fileDescriptor_gender_view_service_cdbdad6124719293) +} + +var fileDescriptor_gender_view_service_cdbdad6124719293 = []byte{ + // 361 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x84, 0x92, 0xb1, 0x4a, 0xc3, 0x50, + 0x14, 0x86, 0x49, 0x04, 0xc1, 0x60, 0x07, 0x83, 0x88, 0x14, 0x87, 0x52, 0x3b, 0x48, 0xa1, 0xf7, + 0x12, 0x8b, 0x0e, 0xb7, 0x38, 0xa4, 0x4b, 0x9c, 0xa4, 0x54, 0xc8, 0x20, 0x81, 0x72, 0x6d, 0x0e, + 0x21, 0xd0, 0xe4, 0xd6, 0x7b, 0xd2, 0x74, 0x10, 0x17, 0x5f, 0xc1, 0x37, 0x70, 0x14, 0x7c, 0x0a, + 0x37, 0x57, 0x5f, 0xc1, 0xc9, 0x77, 0x10, 0x24, 0xbd, 0xb9, 0x29, 0x55, 0x4b, 0xb7, 0x9f, 0xd3, + 0xff, 0xfb, 0xef, 0x7f, 0x4e, 0x63, 0xb1, 0x48, 0x88, 0x68, 0x02, 0x94, 0x87, 0x48, 0x95, 0x2c, + 0x54, 0xee, 0x50, 0x04, 0x99, 0xc7, 0x63, 0x40, 0x1a, 0x41, 0x1a, 0x82, 0x1c, 0xe5, 0x31, 0xcc, + 0x47, 0xe5, 0x90, 0x4c, 0xa5, 0xc8, 0x84, 0xdd, 0x50, 0x00, 0xe1, 0x21, 0x92, 0x8a, 0x25, 0xb9, + 0x43, 0x34, 0x5b, 0xef, 0xae, 0x4b, 0x97, 0x80, 0x62, 0x26, 0x7f, 0xc5, 0xab, 0xd8, 0xfa, 0x91, + 0x86, 0xa6, 0x31, 0xe5, 0x69, 0x2a, 0x32, 0x9e, 0xc5, 0x22, 0x45, 0xf5, 0x6b, 0xb3, 0x67, 0xed, + 0x7b, 0x90, 0x79, 0x0b, 0xca, 0x8f, 0x61, 0x3e, 0x84, 0xbb, 0x19, 0x60, 0x66, 0x1f, 0x5b, 0x35, + 0x1d, 0x3a, 0x4a, 0x79, 0x02, 0x87, 0x46, 0xc3, 0x38, 0xd9, 0x19, 0xee, 0xea, 0xe1, 0x15, 0x4f, + 0xe0, 0xf4, 0xcd, 0xb0, 0xf6, 0x96, 0xe8, 0xb5, 0xaa, 0x69, 0xbf, 0x1a, 0x56, 0x6d, 0x25, 0xd3, + 0x3e, 0x27, 0x9b, 0x56, 0x23, 0xff, 0x95, 0xa8, 0x77, 0xd6, 0x72, 0xd5, 0xc2, 0x64, 0x49, 0x35, + 0xcf, 0x1e, 0x3f, 0x3e, 0x9f, 0x4c, 0x6a, 0x77, 0x8a, 0x93, 0xdc, 0xaf, 0xd4, 0xbf, 0x18, 0xcf, + 0x30, 0x13, 0x09, 0x48, 0xa4, 0xed, 0xf2, 0x46, 0x05, 0x82, 0xb4, 0xfd, 0xd0, 0xff, 0x36, 0xac, + 0xd6, 0x58, 0x24, 0x1b, 0x3b, 0xf6, 0x0f, 0xfe, 0xec, 0x3a, 0x28, 0x6e, 0x38, 0x30, 0x6e, 0x2e, + 0x4b, 0x36, 0x12, 0x13, 0x9e, 0x46, 0x44, 0xc8, 0xa8, 0x78, 0x64, 0x71, 0x61, 0xfd, 0x47, 0x4d, + 0x63, 0x5c, 0xff, 0x55, 0xf4, 0xb4, 0x78, 0x36, 0xb7, 0x3c, 0xd7, 0x7d, 0x31, 0x1b, 0x9e, 0x0a, + 0x74, 0x43, 0x24, 0x4a, 0x16, 0xca, 0x77, 0x48, 0xf9, 0x30, 0xbe, 0x6b, 0x4b, 0xe0, 0x86, 0x18, + 0x54, 0x96, 0xc0, 0x77, 0x02, 0x6d, 0xf9, 0x32, 0x5b, 0x6a, 0xce, 0x98, 0x1b, 0x22, 0x63, 0x95, + 0x89, 0x31, 0xdf, 0x61, 0x4c, 0xdb, 0x6e, 0xb7, 0x17, 0x3d, 0xbb, 0x3f, 0x01, 0x00, 0x00, 0xff, + 0xff, 0x04, 0x5c, 0x26, 0x09, 0xbc, 0x02, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/services/geo_target_constant_service.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/services/geo_target_constant_service.pb.go new file mode 100644 index 0000000..62b4075 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/services/geo_target_constant_service.pb.go @@ -0,0 +1,616 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/services/geo_target_constant_service.proto + +package services // import "google.golang.org/genproto/googleapis/ads/googleads/v1/services" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import wrappers "github.com/golang/protobuf/ptypes/wrappers" +import resources "google.golang.org/genproto/googleapis/ads/googleads/v1/resources" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +import ( + context "golang.org/x/net/context" + grpc "google.golang.org/grpc" +) + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Request message for [GeoTargetConstantService.GetGeoTargetConstant][google.ads.googleads.v1.services.GeoTargetConstantService.GetGeoTargetConstant]. +type GetGeoTargetConstantRequest struct { + // The resource name of the geo target constant to fetch. + ResourceName string `protobuf:"bytes,1,opt,name=resource_name,json=resourceName,proto3" json:"resource_name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GetGeoTargetConstantRequest) Reset() { *m = GetGeoTargetConstantRequest{} } +func (m *GetGeoTargetConstantRequest) String() string { return proto.CompactTextString(m) } +func (*GetGeoTargetConstantRequest) ProtoMessage() {} +func (*GetGeoTargetConstantRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_geo_target_constant_service_194f6c4b7231ee1c, []int{0} +} +func (m *GetGeoTargetConstantRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GetGeoTargetConstantRequest.Unmarshal(m, b) +} +func (m *GetGeoTargetConstantRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GetGeoTargetConstantRequest.Marshal(b, m, deterministic) +} +func (dst *GetGeoTargetConstantRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_GetGeoTargetConstantRequest.Merge(dst, src) +} +func (m *GetGeoTargetConstantRequest) XXX_Size() int { + return xxx_messageInfo_GetGeoTargetConstantRequest.Size(m) +} +func (m *GetGeoTargetConstantRequest) XXX_DiscardUnknown() { + xxx_messageInfo_GetGeoTargetConstantRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_GetGeoTargetConstantRequest proto.InternalMessageInfo + +func (m *GetGeoTargetConstantRequest) GetResourceName() string { + if m != nil { + return m.ResourceName + } + return "" +} + +// Request message for +// [GeoTargetConstantService.SuggestGeoTargetConstantsRequest][]. +type SuggestGeoTargetConstantsRequest struct { + // If possible, returned geo targets are translated using this locale. If not, + // en is used by default. This is also used as a hint for returned geo + // targets. + Locale *wrappers.StringValue `protobuf:"bytes,3,opt,name=locale,proto3" json:"locale,omitempty"` + // Returned geo targets are restricted to this country code. + CountryCode *wrappers.StringValue `protobuf:"bytes,5,opt,name=country_code,json=countryCode,proto3" json:"country_code,omitempty"` + // Required. A selector of geo target constants. + // + // Types that are valid to be assigned to Query: + // *SuggestGeoTargetConstantsRequest_LocationNames_ + // *SuggestGeoTargetConstantsRequest_GeoTargets_ + Query isSuggestGeoTargetConstantsRequest_Query `protobuf_oneof:"query"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *SuggestGeoTargetConstantsRequest) Reset() { *m = SuggestGeoTargetConstantsRequest{} } +func (m *SuggestGeoTargetConstantsRequest) String() string { return proto.CompactTextString(m) } +func (*SuggestGeoTargetConstantsRequest) ProtoMessage() {} +func (*SuggestGeoTargetConstantsRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_geo_target_constant_service_194f6c4b7231ee1c, []int{1} +} +func (m *SuggestGeoTargetConstantsRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_SuggestGeoTargetConstantsRequest.Unmarshal(m, b) +} +func (m *SuggestGeoTargetConstantsRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_SuggestGeoTargetConstantsRequest.Marshal(b, m, deterministic) +} +func (dst *SuggestGeoTargetConstantsRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_SuggestGeoTargetConstantsRequest.Merge(dst, src) +} +func (m *SuggestGeoTargetConstantsRequest) XXX_Size() int { + return xxx_messageInfo_SuggestGeoTargetConstantsRequest.Size(m) +} +func (m *SuggestGeoTargetConstantsRequest) XXX_DiscardUnknown() { + xxx_messageInfo_SuggestGeoTargetConstantsRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_SuggestGeoTargetConstantsRequest proto.InternalMessageInfo + +func (m *SuggestGeoTargetConstantsRequest) GetLocale() *wrappers.StringValue { + if m != nil { + return m.Locale + } + return nil +} + +func (m *SuggestGeoTargetConstantsRequest) GetCountryCode() *wrappers.StringValue { + if m != nil { + return m.CountryCode + } + return nil +} + +type isSuggestGeoTargetConstantsRequest_Query interface { + isSuggestGeoTargetConstantsRequest_Query() +} + +type SuggestGeoTargetConstantsRequest_LocationNames_ struct { + LocationNames *SuggestGeoTargetConstantsRequest_LocationNames `protobuf:"bytes,1,opt,name=location_names,json=locationNames,proto3,oneof"` +} + +type SuggestGeoTargetConstantsRequest_GeoTargets_ struct { + GeoTargets *SuggestGeoTargetConstantsRequest_GeoTargets `protobuf:"bytes,2,opt,name=geo_targets,json=geoTargets,proto3,oneof"` +} + +func (*SuggestGeoTargetConstantsRequest_LocationNames_) isSuggestGeoTargetConstantsRequest_Query() {} + +func (*SuggestGeoTargetConstantsRequest_GeoTargets_) isSuggestGeoTargetConstantsRequest_Query() {} + +func (m *SuggestGeoTargetConstantsRequest) GetQuery() isSuggestGeoTargetConstantsRequest_Query { + if m != nil { + return m.Query + } + return nil +} + +func (m *SuggestGeoTargetConstantsRequest) GetLocationNames() *SuggestGeoTargetConstantsRequest_LocationNames { + if x, ok := m.GetQuery().(*SuggestGeoTargetConstantsRequest_LocationNames_); ok { + return x.LocationNames + } + return nil +} + +func (m *SuggestGeoTargetConstantsRequest) GetGeoTargets() *SuggestGeoTargetConstantsRequest_GeoTargets { + if x, ok := m.GetQuery().(*SuggestGeoTargetConstantsRequest_GeoTargets_); ok { + return x.GeoTargets + } + return nil +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*SuggestGeoTargetConstantsRequest) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _SuggestGeoTargetConstantsRequest_OneofMarshaler, _SuggestGeoTargetConstantsRequest_OneofUnmarshaler, _SuggestGeoTargetConstantsRequest_OneofSizer, []interface{}{ + (*SuggestGeoTargetConstantsRequest_LocationNames_)(nil), + (*SuggestGeoTargetConstantsRequest_GeoTargets_)(nil), + } +} + +func _SuggestGeoTargetConstantsRequest_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*SuggestGeoTargetConstantsRequest) + // query + switch x := m.Query.(type) { + case *SuggestGeoTargetConstantsRequest_LocationNames_: + b.EncodeVarint(1<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.LocationNames); err != nil { + return err + } + case *SuggestGeoTargetConstantsRequest_GeoTargets_: + b.EncodeVarint(2<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.GeoTargets); err != nil { + return err + } + case nil: + default: + return fmt.Errorf("SuggestGeoTargetConstantsRequest.Query has unexpected type %T", x) + } + return nil +} + +func _SuggestGeoTargetConstantsRequest_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*SuggestGeoTargetConstantsRequest) + switch tag { + case 1: // query.location_names + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(SuggestGeoTargetConstantsRequest_LocationNames) + err := b.DecodeMessage(msg) + m.Query = &SuggestGeoTargetConstantsRequest_LocationNames_{msg} + return true, err + case 2: // query.geo_targets + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(SuggestGeoTargetConstantsRequest_GeoTargets) + err := b.DecodeMessage(msg) + m.Query = &SuggestGeoTargetConstantsRequest_GeoTargets_{msg} + return true, err + default: + return false, nil + } +} + +func _SuggestGeoTargetConstantsRequest_OneofSizer(msg proto.Message) (n int) { + m := msg.(*SuggestGeoTargetConstantsRequest) + // query + switch x := m.Query.(type) { + case *SuggestGeoTargetConstantsRequest_LocationNames_: + s := proto.Size(x.LocationNames) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *SuggestGeoTargetConstantsRequest_GeoTargets_: + s := proto.Size(x.GeoTargets) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +// A list of location names. +type SuggestGeoTargetConstantsRequest_LocationNames struct { + // A list of location names. + Names []*wrappers.StringValue `protobuf:"bytes,1,rep,name=names,proto3" json:"names,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *SuggestGeoTargetConstantsRequest_LocationNames) Reset() { + *m = SuggestGeoTargetConstantsRequest_LocationNames{} +} +func (m *SuggestGeoTargetConstantsRequest_LocationNames) String() string { + return proto.CompactTextString(m) +} +func (*SuggestGeoTargetConstantsRequest_LocationNames) ProtoMessage() {} +func (*SuggestGeoTargetConstantsRequest_LocationNames) Descriptor() ([]byte, []int) { + return fileDescriptor_geo_target_constant_service_194f6c4b7231ee1c, []int{1, 0} +} +func (m *SuggestGeoTargetConstantsRequest_LocationNames) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_SuggestGeoTargetConstantsRequest_LocationNames.Unmarshal(m, b) +} +func (m *SuggestGeoTargetConstantsRequest_LocationNames) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_SuggestGeoTargetConstantsRequest_LocationNames.Marshal(b, m, deterministic) +} +func (dst *SuggestGeoTargetConstantsRequest_LocationNames) XXX_Merge(src proto.Message) { + xxx_messageInfo_SuggestGeoTargetConstantsRequest_LocationNames.Merge(dst, src) +} +func (m *SuggestGeoTargetConstantsRequest_LocationNames) XXX_Size() int { + return xxx_messageInfo_SuggestGeoTargetConstantsRequest_LocationNames.Size(m) +} +func (m *SuggestGeoTargetConstantsRequest_LocationNames) XXX_DiscardUnknown() { + xxx_messageInfo_SuggestGeoTargetConstantsRequest_LocationNames.DiscardUnknown(m) +} + +var xxx_messageInfo_SuggestGeoTargetConstantsRequest_LocationNames proto.InternalMessageInfo + +func (m *SuggestGeoTargetConstantsRequest_LocationNames) GetNames() []*wrappers.StringValue { + if m != nil { + return m.Names + } + return nil +} + +// A list of geo target constant resource names. +type SuggestGeoTargetConstantsRequest_GeoTargets struct { + // A list of geo target constant resource names. + GeoTargetConstants []*wrappers.StringValue `protobuf:"bytes,1,rep,name=geo_target_constants,json=geoTargetConstants,proto3" json:"geo_target_constants,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *SuggestGeoTargetConstantsRequest_GeoTargets) Reset() { + *m = SuggestGeoTargetConstantsRequest_GeoTargets{} +} +func (m *SuggestGeoTargetConstantsRequest_GeoTargets) String() string { + return proto.CompactTextString(m) +} +func (*SuggestGeoTargetConstantsRequest_GeoTargets) ProtoMessage() {} +func (*SuggestGeoTargetConstantsRequest_GeoTargets) Descriptor() ([]byte, []int) { + return fileDescriptor_geo_target_constant_service_194f6c4b7231ee1c, []int{1, 1} +} +func (m *SuggestGeoTargetConstantsRequest_GeoTargets) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_SuggestGeoTargetConstantsRequest_GeoTargets.Unmarshal(m, b) +} +func (m *SuggestGeoTargetConstantsRequest_GeoTargets) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_SuggestGeoTargetConstantsRequest_GeoTargets.Marshal(b, m, deterministic) +} +func (dst *SuggestGeoTargetConstantsRequest_GeoTargets) XXX_Merge(src proto.Message) { + xxx_messageInfo_SuggestGeoTargetConstantsRequest_GeoTargets.Merge(dst, src) +} +func (m *SuggestGeoTargetConstantsRequest_GeoTargets) XXX_Size() int { + return xxx_messageInfo_SuggestGeoTargetConstantsRequest_GeoTargets.Size(m) +} +func (m *SuggestGeoTargetConstantsRequest_GeoTargets) XXX_DiscardUnknown() { + xxx_messageInfo_SuggestGeoTargetConstantsRequest_GeoTargets.DiscardUnknown(m) +} + +var xxx_messageInfo_SuggestGeoTargetConstantsRequest_GeoTargets proto.InternalMessageInfo + +func (m *SuggestGeoTargetConstantsRequest_GeoTargets) GetGeoTargetConstants() []*wrappers.StringValue { + if m != nil { + return m.GeoTargetConstants + } + return nil +} + +// Response message for [GeoTargetConstantService.SuggestGeoTargetConstants][google.ads.googleads.v1.services.GeoTargetConstantService.SuggestGeoTargetConstants] +type SuggestGeoTargetConstantsResponse struct { + // Geo target constant suggestions. + GeoTargetConstantSuggestions []*GeoTargetConstantSuggestion `protobuf:"bytes,1,rep,name=geo_target_constant_suggestions,json=geoTargetConstantSuggestions,proto3" json:"geo_target_constant_suggestions,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *SuggestGeoTargetConstantsResponse) Reset() { *m = SuggestGeoTargetConstantsResponse{} } +func (m *SuggestGeoTargetConstantsResponse) String() string { return proto.CompactTextString(m) } +func (*SuggestGeoTargetConstantsResponse) ProtoMessage() {} +func (*SuggestGeoTargetConstantsResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_geo_target_constant_service_194f6c4b7231ee1c, []int{2} +} +func (m *SuggestGeoTargetConstantsResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_SuggestGeoTargetConstantsResponse.Unmarshal(m, b) +} +func (m *SuggestGeoTargetConstantsResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_SuggestGeoTargetConstantsResponse.Marshal(b, m, deterministic) +} +func (dst *SuggestGeoTargetConstantsResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_SuggestGeoTargetConstantsResponse.Merge(dst, src) +} +func (m *SuggestGeoTargetConstantsResponse) XXX_Size() int { + return xxx_messageInfo_SuggestGeoTargetConstantsResponse.Size(m) +} +func (m *SuggestGeoTargetConstantsResponse) XXX_DiscardUnknown() { + xxx_messageInfo_SuggestGeoTargetConstantsResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_SuggestGeoTargetConstantsResponse proto.InternalMessageInfo + +func (m *SuggestGeoTargetConstantsResponse) GetGeoTargetConstantSuggestions() []*GeoTargetConstantSuggestion { + if m != nil { + return m.GeoTargetConstantSuggestions + } + return nil +} + +// A geo target constant suggestion. +type GeoTargetConstantSuggestion struct { + // The language this GeoTargetConstantSuggestion is currently translated to. + // It affects the name of geo target fields. For example, if locale=en, then + // name=Spain. If locale=es, then name=España. The default locale will be + // returned if no translation exists for the locale in the request. + Locale *wrappers.StringValue `protobuf:"bytes,1,opt,name=locale,proto3" json:"locale,omitempty"` + // Approximate user population that will be targeted, rounded to the + // nearest 100. + Reach *wrappers.Int64Value `protobuf:"bytes,2,opt,name=reach,proto3" json:"reach,omitempty"` + // If the request searched by location name, this is the location name that + // matched the geo target. + SearchTerm *wrappers.StringValue `protobuf:"bytes,3,opt,name=search_term,json=searchTerm,proto3" json:"search_term,omitempty"` + // The GeoTargetConstant result. + GeoTargetConstant *resources.GeoTargetConstant `protobuf:"bytes,4,opt,name=geo_target_constant,json=geoTargetConstant,proto3" json:"geo_target_constant,omitempty"` + // The list of parents of the geo target constant. + GeoTargetConstantParents []*resources.GeoTargetConstant `protobuf:"bytes,5,rep,name=geo_target_constant_parents,json=geoTargetConstantParents,proto3" json:"geo_target_constant_parents,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GeoTargetConstantSuggestion) Reset() { *m = GeoTargetConstantSuggestion{} } +func (m *GeoTargetConstantSuggestion) String() string { return proto.CompactTextString(m) } +func (*GeoTargetConstantSuggestion) ProtoMessage() {} +func (*GeoTargetConstantSuggestion) Descriptor() ([]byte, []int) { + return fileDescriptor_geo_target_constant_service_194f6c4b7231ee1c, []int{3} +} +func (m *GeoTargetConstantSuggestion) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GeoTargetConstantSuggestion.Unmarshal(m, b) +} +func (m *GeoTargetConstantSuggestion) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GeoTargetConstantSuggestion.Marshal(b, m, deterministic) +} +func (dst *GeoTargetConstantSuggestion) XXX_Merge(src proto.Message) { + xxx_messageInfo_GeoTargetConstantSuggestion.Merge(dst, src) +} +func (m *GeoTargetConstantSuggestion) XXX_Size() int { + return xxx_messageInfo_GeoTargetConstantSuggestion.Size(m) +} +func (m *GeoTargetConstantSuggestion) XXX_DiscardUnknown() { + xxx_messageInfo_GeoTargetConstantSuggestion.DiscardUnknown(m) +} + +var xxx_messageInfo_GeoTargetConstantSuggestion proto.InternalMessageInfo + +func (m *GeoTargetConstantSuggestion) GetLocale() *wrappers.StringValue { + if m != nil { + return m.Locale + } + return nil +} + +func (m *GeoTargetConstantSuggestion) GetReach() *wrappers.Int64Value { + if m != nil { + return m.Reach + } + return nil +} + +func (m *GeoTargetConstantSuggestion) GetSearchTerm() *wrappers.StringValue { + if m != nil { + return m.SearchTerm + } + return nil +} + +func (m *GeoTargetConstantSuggestion) GetGeoTargetConstant() *resources.GeoTargetConstant { + if m != nil { + return m.GeoTargetConstant + } + return nil +} + +func (m *GeoTargetConstantSuggestion) GetGeoTargetConstantParents() []*resources.GeoTargetConstant { + if m != nil { + return m.GeoTargetConstantParents + } + return nil +} + +func init() { + proto.RegisterType((*GetGeoTargetConstantRequest)(nil), "google.ads.googleads.v1.services.GetGeoTargetConstantRequest") + proto.RegisterType((*SuggestGeoTargetConstantsRequest)(nil), "google.ads.googleads.v1.services.SuggestGeoTargetConstantsRequest") + proto.RegisterType((*SuggestGeoTargetConstantsRequest_LocationNames)(nil), "google.ads.googleads.v1.services.SuggestGeoTargetConstantsRequest.LocationNames") + proto.RegisterType((*SuggestGeoTargetConstantsRequest_GeoTargets)(nil), "google.ads.googleads.v1.services.SuggestGeoTargetConstantsRequest.GeoTargets") + proto.RegisterType((*SuggestGeoTargetConstantsResponse)(nil), "google.ads.googleads.v1.services.SuggestGeoTargetConstantsResponse") + proto.RegisterType((*GeoTargetConstantSuggestion)(nil), "google.ads.googleads.v1.services.GeoTargetConstantSuggestion") +} + +// Reference imports to suppress errors if they are not otherwise used. +var _ context.Context +var _ grpc.ClientConn + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the grpc package it is being compiled against. +const _ = grpc.SupportPackageIsVersion4 + +// GeoTargetConstantServiceClient is the client API for GeoTargetConstantService service. +// +// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://godoc.org/google.golang.org/grpc#ClientConn.NewStream. +type GeoTargetConstantServiceClient interface { + // Returns the requested geo target constant in full detail. + GetGeoTargetConstant(ctx context.Context, in *GetGeoTargetConstantRequest, opts ...grpc.CallOption) (*resources.GeoTargetConstant, error) + // Returns GeoTargetConstant suggestions by location name or by resource name. + SuggestGeoTargetConstants(ctx context.Context, in *SuggestGeoTargetConstantsRequest, opts ...grpc.CallOption) (*SuggestGeoTargetConstantsResponse, error) +} + +type geoTargetConstantServiceClient struct { + cc *grpc.ClientConn +} + +func NewGeoTargetConstantServiceClient(cc *grpc.ClientConn) GeoTargetConstantServiceClient { + return &geoTargetConstantServiceClient{cc} +} + +func (c *geoTargetConstantServiceClient) GetGeoTargetConstant(ctx context.Context, in *GetGeoTargetConstantRequest, opts ...grpc.CallOption) (*resources.GeoTargetConstant, error) { + out := new(resources.GeoTargetConstant) + err := c.cc.Invoke(ctx, "/google.ads.googleads.v1.services.GeoTargetConstantService/GetGeoTargetConstant", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *geoTargetConstantServiceClient) SuggestGeoTargetConstants(ctx context.Context, in *SuggestGeoTargetConstantsRequest, opts ...grpc.CallOption) (*SuggestGeoTargetConstantsResponse, error) { + out := new(SuggestGeoTargetConstantsResponse) + err := c.cc.Invoke(ctx, "/google.ads.googleads.v1.services.GeoTargetConstantService/SuggestGeoTargetConstants", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +// GeoTargetConstantServiceServer is the server API for GeoTargetConstantService service. +type GeoTargetConstantServiceServer interface { + // Returns the requested geo target constant in full detail. + GetGeoTargetConstant(context.Context, *GetGeoTargetConstantRequest) (*resources.GeoTargetConstant, error) + // Returns GeoTargetConstant suggestions by location name or by resource name. + SuggestGeoTargetConstants(context.Context, *SuggestGeoTargetConstantsRequest) (*SuggestGeoTargetConstantsResponse, error) +} + +func RegisterGeoTargetConstantServiceServer(s *grpc.Server, srv GeoTargetConstantServiceServer) { + s.RegisterService(&_GeoTargetConstantService_serviceDesc, srv) +} + +func _GeoTargetConstantService_GetGeoTargetConstant_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(GetGeoTargetConstantRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(GeoTargetConstantServiceServer).GetGeoTargetConstant(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.ads.googleads.v1.services.GeoTargetConstantService/GetGeoTargetConstant", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(GeoTargetConstantServiceServer).GetGeoTargetConstant(ctx, req.(*GetGeoTargetConstantRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _GeoTargetConstantService_SuggestGeoTargetConstants_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(SuggestGeoTargetConstantsRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(GeoTargetConstantServiceServer).SuggestGeoTargetConstants(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.ads.googleads.v1.services.GeoTargetConstantService/SuggestGeoTargetConstants", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(GeoTargetConstantServiceServer).SuggestGeoTargetConstants(ctx, req.(*SuggestGeoTargetConstantsRequest)) + } + return interceptor(ctx, in, info, handler) +} + +var _GeoTargetConstantService_serviceDesc = grpc.ServiceDesc{ + ServiceName: "google.ads.googleads.v1.services.GeoTargetConstantService", + HandlerType: (*GeoTargetConstantServiceServer)(nil), + Methods: []grpc.MethodDesc{ + { + MethodName: "GetGeoTargetConstant", + Handler: _GeoTargetConstantService_GetGeoTargetConstant_Handler, + }, + { + MethodName: "SuggestGeoTargetConstants", + Handler: _GeoTargetConstantService_SuggestGeoTargetConstants_Handler, + }, + }, + Streams: []grpc.StreamDesc{}, + Metadata: "google/ads/googleads/v1/services/geo_target_constant_service.proto", +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/services/geo_target_constant_service.proto", fileDescriptor_geo_target_constant_service_194f6c4b7231ee1c) +} + +var fileDescriptor_geo_target_constant_service_194f6c4b7231ee1c = []byte{ + // 714 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xac, 0x55, 0xcd, 0x6e, 0xd3, 0x4c, + 0x14, 0xfd, 0x9c, 0x34, 0xfd, 0xc4, 0xa4, 0x45, 0x62, 0xe8, 0xc2, 0xa4, 0xa5, 0x04, 0xb7, 0x8b, + 0xd2, 0x85, 0x4d, 0x42, 0xc5, 0xc2, 0x55, 0x85, 0x92, 0x2c, 0x52, 0x24, 0xa8, 0xa2, 0xb4, 0xca, + 0x02, 0x45, 0x8a, 0xa6, 0xf6, 0x65, 0x1a, 0x29, 0xf1, 0xb8, 0x33, 0xe3, 0xa2, 0x0a, 0xb1, 0x41, + 0x7d, 0x03, 0x5e, 0x00, 0xb1, 0xac, 0x58, 0xb1, 0xe7, 0x05, 0xd8, 0xb0, 0xe0, 0x15, 0x58, 0xf1, + 0x14, 0xc8, 0xf6, 0xd8, 0x6d, 0xe5, 0x98, 0x84, 0x96, 0xdd, 0x78, 0xee, 0xbd, 0xe7, 0xcc, 0xb9, + 0x7f, 0x46, 0x4d, 0xca, 0x18, 0x1d, 0x81, 0x45, 0x5c, 0x61, 0xc5, 0xc7, 0xf0, 0x74, 0x52, 0xb3, + 0x04, 0xf0, 0x93, 0xa1, 0x03, 0xc2, 0xa2, 0xc0, 0x06, 0x92, 0x70, 0x0a, 0x72, 0xe0, 0x30, 0x4f, + 0x48, 0xe2, 0xc9, 0x81, 0x32, 0x9a, 0x3e, 0x67, 0x92, 0xe1, 0x6a, 0x1c, 0x68, 0x12, 0x57, 0x98, + 0x29, 0x86, 0x79, 0x52, 0x33, 0x13, 0x8c, 0xca, 0x76, 0x1e, 0x0b, 0x07, 0xc1, 0x02, 0x9e, 0x43, + 0x13, 0xc3, 0x57, 0x56, 0x92, 0x60, 0x7f, 0x68, 0x11, 0xcf, 0x63, 0x92, 0xc8, 0x21, 0xf3, 0x84, + 0xb2, 0xae, 0x2a, 0x6b, 0xf4, 0x75, 0x18, 0xbc, 0xb6, 0xde, 0x70, 0xe2, 0xfb, 0xc0, 0x95, 0xdd, + 0x68, 0xa2, 0xe5, 0x36, 0xc8, 0x36, 0xb0, 0x83, 0x08, 0xbc, 0xa5, 0xb0, 0xbb, 0x70, 0x1c, 0x80, + 0x90, 0x78, 0x0d, 0x2d, 0x26, 0x6f, 0x18, 0x78, 0x64, 0x0c, 0xba, 0x56, 0xd5, 0x36, 0x6e, 0x75, + 0x17, 0x92, 0xcb, 0x3d, 0x32, 0x06, 0xe3, 0xcb, 0x1c, 0xaa, 0xee, 0x07, 0x94, 0x82, 0xc8, 0x02, + 0x89, 0x04, 0x69, 0x0b, 0xcd, 0x8f, 0x98, 0x43, 0x46, 0xa0, 0x17, 0xab, 0xda, 0x46, 0xb9, 0xbe, + 0xa2, 0x72, 0x61, 0x26, 0x2f, 0x33, 0xf7, 0x25, 0x1f, 0x7a, 0xb4, 0x47, 0x46, 0x01, 0x74, 0x95, + 0x2f, 0x7e, 0x86, 0x16, 0x1c, 0x16, 0x78, 0x92, 0x9f, 0x0e, 0x1c, 0xe6, 0x82, 0x5e, 0x9a, 0x21, + 0xb6, 0xac, 0x22, 0x5a, 0xcc, 0x05, 0x7c, 0x8a, 0x6e, 0x87, 0x50, 0x61, 0x4a, 0x22, 0x01, 0x22, + 0x52, 0x50, 0xae, 0x77, 0xcc, 0x69, 0x55, 0x31, 0xa7, 0x49, 0x32, 0x5f, 0x28, 0xe0, 0x30, 0x09, + 0x62, 0xf7, 0xbf, 0xee, 0xe2, 0xe8, 0xf2, 0x05, 0xf6, 0x51, 0xf9, 0xa2, 0x6a, 0x42, 0x2f, 0x44, + 0xbc, 0x2f, 0xff, 0x01, 0x6f, 0x6a, 0x09, 0x49, 0x11, 0x4d, 0xbf, 0x2a, 0x2d, 0xb4, 0x78, 0xe5, + 0x4d, 0xb8, 0x8e, 0x4a, 0x89, 0xe8, 0xe2, 0xd4, 0xbc, 0xc5, 0xae, 0x95, 0x3e, 0x42, 0x17, 0x04, + 0x78, 0x0f, 0x2d, 0x4d, 0x68, 0xbd, 0xd9, 0x00, 0x31, 0xcd, 0x48, 0x68, 0xfe, 0x8f, 0x4a, 0xc7, + 0x01, 0xf0, 0x53, 0xe3, 0x5c, 0x43, 0x0f, 0xff, 0xa0, 0x54, 0xf8, 0xcc, 0x13, 0x80, 0xcf, 0x34, + 0xf4, 0x60, 0xe2, 0x84, 0xc5, 0x91, 0x61, 0xa3, 0xab, 0xa7, 0xec, 0x4c, 0x4f, 0x6c, 0x86, 0x67, + 0x3f, 0x45, 0xe9, 0xae, 0xd0, 0x7c, 0xa3, 0x30, 0x3e, 0x16, 0xc3, 0x31, 0xc9, 0x75, 0xb8, 0xd4, + 0xdc, 0xda, 0x5f, 0x34, 0x77, 0x0d, 0x95, 0x38, 0x10, 0xe7, 0x48, 0xb5, 0xc6, 0x72, 0x26, 0xe8, + 0xb9, 0x27, 0x9f, 0x6e, 0xa9, 0xe2, 0x44, 0x9e, 0x78, 0x07, 0x95, 0x05, 0x10, 0xee, 0x1c, 0x0d, + 0x24, 0xf0, 0xf1, 0x4c, 0xa3, 0x84, 0xe2, 0x80, 0x03, 0xe0, 0x63, 0xec, 0xa2, 0xbb, 0x13, 0xb2, + 0xa9, 0xcf, 0x45, 0x30, 0x5b, 0xb9, 0x19, 0x4c, 0xd7, 0x50, 0x36, 0x85, 0xdd, 0x3b, 0x99, 0xc4, + 0x61, 0x81, 0x96, 0x27, 0xd5, 0xcc, 0x27, 0x1c, 0xc2, 0xd6, 0x29, 0x45, 0xf5, 0xba, 0x1e, 0x9b, + 0x9e, 0x61, 0xeb, 0xc4, 0xa8, 0xf5, 0xcf, 0x45, 0xa4, 0x67, 0x4b, 0x14, 0x97, 0x1e, 0x7f, 0xd5, + 0xd0, 0xd2, 0xa4, 0x35, 0x87, 0x67, 0xea, 0x9a, 0xdc, 0xf5, 0x58, 0xb9, 0x96, 0x08, 0xe3, 0xf1, + 0xfb, 0x1f, 0x3f, 0x3f, 0x14, 0x36, 0xf1, 0x46, 0xb8, 0xe2, 0xdf, 0x5e, 0xd9, 0xaf, 0x3b, 0xd9, + 0x91, 0xb1, 0x36, 0xdf, 0xe1, 0xef, 0x1a, 0xba, 0x97, 0x3b, 0x2c, 0xb8, 0x79, 0xf3, 0x9d, 0x52, + 0x69, 0xdd, 0x08, 0x23, 0x9e, 0x56, 0xe3, 0x51, 0x24, 0x6c, 0xcd, 0x58, 0x0d, 0x85, 0x65, 0x95, + 0xd8, 0x6a, 0x6a, 0x6d, 0x6d, 0xb3, 0x79, 0x56, 0x40, 0xeb, 0x0e, 0x1b, 0x4f, 0x65, 0x6d, 0xde, + 0xcf, 0x2b, 0x6a, 0x27, 0x6c, 0xf6, 0x8e, 0xf6, 0x6a, 0x57, 0x41, 0x50, 0x36, 0x22, 0x1e, 0x35, + 0x19, 0xa7, 0x16, 0x05, 0x2f, 0x1a, 0x85, 0xe4, 0x67, 0xea, 0x0f, 0x45, 0xfe, 0x1f, 0x7c, 0x3b, + 0x39, 0x7c, 0x2a, 0x14, 0xdb, 0x8d, 0xc6, 0x79, 0xa1, 0xda, 0x8e, 0x01, 0x1b, 0xae, 0x30, 0xe3, + 0x63, 0x78, 0xea, 0xd5, 0x4c, 0x45, 0x2c, 0xbe, 0x25, 0x2e, 0xfd, 0x86, 0x2b, 0xfa, 0xa9, 0x4b, + 0xbf, 0x57, 0xeb, 0x27, 0x2e, 0xbf, 0x0a, 0xeb, 0xf1, 0xbd, 0x6d, 0x37, 0x5c, 0x61, 0xdb, 0xa9, + 0x93, 0x6d, 0xf7, 0x6a, 0xb6, 0x9d, 0xb8, 0x1d, 0xce, 0x47, 0xef, 0x7c, 0xf2, 0x3b, 0x00, 0x00, + 0xff, 0xff, 0x1e, 0xee, 0xdc, 0x5b, 0x68, 0x08, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/services/geographic_view_service.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/services/geographic_view_service.pb.go new file mode 100644 index 0000000..eccf4a9 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/services/geographic_view_service.pb.go @@ -0,0 +1,175 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/services/geographic_view_service.proto + +package services // import "google.golang.org/genproto/googleapis/ads/googleads/v1/services" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import resources "google.golang.org/genproto/googleapis/ads/googleads/v1/resources" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +import ( + context "golang.org/x/net/context" + grpc "google.golang.org/grpc" +) + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Request message for [GeographicViewService.GetGeographicView][google.ads.googleads.v1.services.GeographicViewService.GetGeographicView]. +type GetGeographicViewRequest struct { + // The resource name of the geographic view to fetch. + ResourceName string `protobuf:"bytes,1,opt,name=resource_name,json=resourceName,proto3" json:"resource_name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GetGeographicViewRequest) Reset() { *m = GetGeographicViewRequest{} } +func (m *GetGeographicViewRequest) String() string { return proto.CompactTextString(m) } +func (*GetGeographicViewRequest) ProtoMessage() {} +func (*GetGeographicViewRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_geographic_view_service_33f5abb48adfe50d, []int{0} +} +func (m *GetGeographicViewRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GetGeographicViewRequest.Unmarshal(m, b) +} +func (m *GetGeographicViewRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GetGeographicViewRequest.Marshal(b, m, deterministic) +} +func (dst *GetGeographicViewRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_GetGeographicViewRequest.Merge(dst, src) +} +func (m *GetGeographicViewRequest) XXX_Size() int { + return xxx_messageInfo_GetGeographicViewRequest.Size(m) +} +func (m *GetGeographicViewRequest) XXX_DiscardUnknown() { + xxx_messageInfo_GetGeographicViewRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_GetGeographicViewRequest proto.InternalMessageInfo + +func (m *GetGeographicViewRequest) GetResourceName() string { + if m != nil { + return m.ResourceName + } + return "" +} + +func init() { + proto.RegisterType((*GetGeographicViewRequest)(nil), "google.ads.googleads.v1.services.GetGeographicViewRequest") +} + +// Reference imports to suppress errors if they are not otherwise used. +var _ context.Context +var _ grpc.ClientConn + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the grpc package it is being compiled against. +const _ = grpc.SupportPackageIsVersion4 + +// GeographicViewServiceClient is the client API for GeographicViewService service. +// +// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://godoc.org/google.golang.org/grpc#ClientConn.NewStream. +type GeographicViewServiceClient interface { + // Returns the requested geographic view in full detail. + GetGeographicView(ctx context.Context, in *GetGeographicViewRequest, opts ...grpc.CallOption) (*resources.GeographicView, error) +} + +type geographicViewServiceClient struct { + cc *grpc.ClientConn +} + +func NewGeographicViewServiceClient(cc *grpc.ClientConn) GeographicViewServiceClient { + return &geographicViewServiceClient{cc} +} + +func (c *geographicViewServiceClient) GetGeographicView(ctx context.Context, in *GetGeographicViewRequest, opts ...grpc.CallOption) (*resources.GeographicView, error) { + out := new(resources.GeographicView) + err := c.cc.Invoke(ctx, "/google.ads.googleads.v1.services.GeographicViewService/GetGeographicView", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +// GeographicViewServiceServer is the server API for GeographicViewService service. +type GeographicViewServiceServer interface { + // Returns the requested geographic view in full detail. + GetGeographicView(context.Context, *GetGeographicViewRequest) (*resources.GeographicView, error) +} + +func RegisterGeographicViewServiceServer(s *grpc.Server, srv GeographicViewServiceServer) { + s.RegisterService(&_GeographicViewService_serviceDesc, srv) +} + +func _GeographicViewService_GetGeographicView_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(GetGeographicViewRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(GeographicViewServiceServer).GetGeographicView(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.ads.googleads.v1.services.GeographicViewService/GetGeographicView", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(GeographicViewServiceServer).GetGeographicView(ctx, req.(*GetGeographicViewRequest)) + } + return interceptor(ctx, in, info, handler) +} + +var _GeographicViewService_serviceDesc = grpc.ServiceDesc{ + ServiceName: "google.ads.googleads.v1.services.GeographicViewService", + HandlerType: (*GeographicViewServiceServer)(nil), + Methods: []grpc.MethodDesc{ + { + MethodName: "GetGeographicView", + Handler: _GeographicViewService_GetGeographicView_Handler, + }, + }, + Streams: []grpc.StreamDesc{}, + Metadata: "google/ads/googleads/v1/services/geographic_view_service.proto", +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/services/geographic_view_service.proto", fileDescriptor_geographic_view_service_33f5abb48adfe50d) +} + +var fileDescriptor_geographic_view_service_33f5abb48adfe50d = []byte{ + // 365 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x84, 0x92, 0xbf, 0x4a, 0xc3, 0x40, + 0x1c, 0xc7, 0x49, 0x04, 0xc1, 0xa0, 0x83, 0x01, 0xa1, 0x04, 0x87, 0x52, 0x3b, 0x48, 0x87, 0x3b, + 0x62, 0x07, 0xf1, 0x44, 0x25, 0x5d, 0xe2, 0x24, 0xa5, 0x42, 0x06, 0x09, 0x94, 0x33, 0x39, 0xce, + 0x40, 0x93, 0x8b, 0xf7, 0x4b, 0xd3, 0x41, 0x1c, 0xf4, 0x15, 0x7c, 0x03, 0x47, 0xdf, 0xc1, 0x17, + 0x70, 0x75, 0xf0, 0x05, 0x9c, 0x7c, 0x0a, 0x49, 0xaf, 0x17, 0x2c, 0x36, 0x74, 0xfb, 0x72, 0xbf, + 0xef, 0xe7, 0xf7, 0xe7, 0x9b, 0x58, 0xe7, 0x5c, 0x08, 0x3e, 0x61, 0x98, 0xc6, 0x80, 0x95, 0xac, + 0x54, 0xe9, 0x62, 0x60, 0xb2, 0x4c, 0x22, 0x06, 0x98, 0x33, 0xc1, 0x25, 0xcd, 0xef, 0x92, 0x68, + 0x5c, 0x26, 0x6c, 0x36, 0x5e, 0x14, 0x50, 0x2e, 0x45, 0x21, 0xec, 0xb6, 0x82, 0x10, 0x8d, 0x01, + 0xd5, 0x3c, 0x2a, 0x5d, 0xa4, 0x79, 0xe7, 0xb8, 0x69, 0x82, 0x64, 0x20, 0xa6, 0x72, 0xc5, 0x08, + 0xd5, 0xda, 0xd9, 0xd7, 0x60, 0x9e, 0x60, 0x9a, 0x65, 0xa2, 0xa0, 0x45, 0x22, 0x32, 0x50, 0xd5, + 0xce, 0x85, 0xd5, 0xf2, 0x59, 0xe1, 0xd7, 0x64, 0x90, 0xb0, 0xd9, 0x88, 0xdd, 0x4f, 0x19, 0x14, + 0xf6, 0x81, 0xb5, 0xa3, 0x9b, 0x8f, 0x33, 0x9a, 0xb2, 0x96, 0xd1, 0x36, 0x0e, 0xb7, 0x46, 0xdb, + 0xfa, 0xf1, 0x8a, 0xa6, 0xec, 0xe8, 0xcb, 0xb0, 0xf6, 0x96, 0xf1, 0x6b, 0xb5, 0xb2, 0xfd, 0x6e, + 0x58, 0xbb, 0xff, 0x7a, 0xdb, 0x04, 0xad, 0x3b, 0x15, 0x35, 0x2d, 0xe4, 0xb8, 0x8d, 0x6c, 0x1d, + 0x02, 0x5a, 0x26, 0x3b, 0x27, 0xcf, 0x9f, 0xdf, 0x2f, 0x66, 0xdf, 0x76, 0xab, 0xa8, 0x1e, 0x96, + 0xce, 0x39, 0x8b, 0xa6, 0x50, 0x88, 0x94, 0x49, 0xc0, 0xbd, 0x3f, 0xd9, 0x55, 0x18, 0xe0, 0xde, + 0xe3, 0xe0, 0xc9, 0xb4, 0xba, 0x91, 0x48, 0xd7, 0xee, 0x3b, 0x70, 0x56, 0xde, 0x3f, 0xac, 0xf2, + 0x1d, 0x1a, 0x37, 0x97, 0x0b, 0x9e, 0x8b, 0x09, 0xcd, 0x38, 0x12, 0x92, 0x63, 0xce, 0xb2, 0x79, + 0xfa, 0xfa, 0x43, 0xe6, 0x09, 0x34, 0xff, 0x39, 0xa7, 0x5a, 0xbc, 0x9a, 0x1b, 0xbe, 0xe7, 0xbd, + 0x99, 0x6d, 0x5f, 0x35, 0xf4, 0x62, 0x40, 0x4a, 0x56, 0x2a, 0x70, 0xd1, 0x62, 0x30, 0x7c, 0x68, + 0x4b, 0xe8, 0xc5, 0x10, 0xd6, 0x96, 0x30, 0x70, 0x43, 0x6d, 0xf9, 0x31, 0xbb, 0xea, 0x9d, 0x10, + 0x2f, 0x06, 0x42, 0x6a, 0x13, 0x21, 0x81, 0x4b, 0x88, 0xb6, 0xdd, 0x6e, 0xce, 0xf7, 0xec, 0xff, + 0x06, 0x00, 0x00, 0xff, 0xff, 0x20, 0xad, 0x3a, 0xfd, 0xe0, 0x02, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/services/google_ads_field_service.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/services/google_ads_field_service.pb.go new file mode 100644 index 0000000..25dcdb5 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/services/google_ads_field_service.pb.go @@ -0,0 +1,345 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/services/google_ads_field_service.proto + +package services // import "google.golang.org/genproto/googleapis/ads/googleads/v1/services" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import resources "google.golang.org/genproto/googleapis/ads/googleads/v1/resources" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +import ( + context "golang.org/x/net/context" + grpc "google.golang.org/grpc" +) + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Request message for [GoogleAdsFieldService.GetGoogleAdsField][google.ads.googleads.v1.services.GoogleAdsFieldService.GetGoogleAdsField]. +type GetGoogleAdsFieldRequest struct { + // The resource name of the field to get. + ResourceName string `protobuf:"bytes,1,opt,name=resource_name,json=resourceName,proto3" json:"resource_name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GetGoogleAdsFieldRequest) Reset() { *m = GetGoogleAdsFieldRequest{} } +func (m *GetGoogleAdsFieldRequest) String() string { return proto.CompactTextString(m) } +func (*GetGoogleAdsFieldRequest) ProtoMessage() {} +func (*GetGoogleAdsFieldRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_google_ads_field_service_c68db3ae861d47e1, []int{0} +} +func (m *GetGoogleAdsFieldRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GetGoogleAdsFieldRequest.Unmarshal(m, b) +} +func (m *GetGoogleAdsFieldRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GetGoogleAdsFieldRequest.Marshal(b, m, deterministic) +} +func (dst *GetGoogleAdsFieldRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_GetGoogleAdsFieldRequest.Merge(dst, src) +} +func (m *GetGoogleAdsFieldRequest) XXX_Size() int { + return xxx_messageInfo_GetGoogleAdsFieldRequest.Size(m) +} +func (m *GetGoogleAdsFieldRequest) XXX_DiscardUnknown() { + xxx_messageInfo_GetGoogleAdsFieldRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_GetGoogleAdsFieldRequest proto.InternalMessageInfo + +func (m *GetGoogleAdsFieldRequest) GetResourceName() string { + if m != nil { + return m.ResourceName + } + return "" +} + +// Request message for [GoogleAdsFieldService.SearchGoogleAdsFields][google.ads.googleads.v1.services.GoogleAdsFieldService.SearchGoogleAdsFields]. +type SearchGoogleAdsFieldsRequest struct { + // The query string. + Query string `protobuf:"bytes,1,opt,name=query,proto3" json:"query,omitempty"` + // Token of the page to retrieve. If not specified, the first page of + // results will be returned. Use the value obtained from `next_page_token` + // in the previous response in order to request the next page of results. + PageToken string `protobuf:"bytes,2,opt,name=page_token,json=pageToken,proto3" json:"page_token,omitempty"` + // Number of elements to retrieve in a single page. + // When too large a page is requested, the server may decide to further + // limit the number of returned resources. + PageSize int32 `protobuf:"varint,3,opt,name=page_size,json=pageSize,proto3" json:"page_size,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *SearchGoogleAdsFieldsRequest) Reset() { *m = SearchGoogleAdsFieldsRequest{} } +func (m *SearchGoogleAdsFieldsRequest) String() string { return proto.CompactTextString(m) } +func (*SearchGoogleAdsFieldsRequest) ProtoMessage() {} +func (*SearchGoogleAdsFieldsRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_google_ads_field_service_c68db3ae861d47e1, []int{1} +} +func (m *SearchGoogleAdsFieldsRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_SearchGoogleAdsFieldsRequest.Unmarshal(m, b) +} +func (m *SearchGoogleAdsFieldsRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_SearchGoogleAdsFieldsRequest.Marshal(b, m, deterministic) +} +func (dst *SearchGoogleAdsFieldsRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_SearchGoogleAdsFieldsRequest.Merge(dst, src) +} +func (m *SearchGoogleAdsFieldsRequest) XXX_Size() int { + return xxx_messageInfo_SearchGoogleAdsFieldsRequest.Size(m) +} +func (m *SearchGoogleAdsFieldsRequest) XXX_DiscardUnknown() { + xxx_messageInfo_SearchGoogleAdsFieldsRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_SearchGoogleAdsFieldsRequest proto.InternalMessageInfo + +func (m *SearchGoogleAdsFieldsRequest) GetQuery() string { + if m != nil { + return m.Query + } + return "" +} + +func (m *SearchGoogleAdsFieldsRequest) GetPageToken() string { + if m != nil { + return m.PageToken + } + return "" +} + +func (m *SearchGoogleAdsFieldsRequest) GetPageSize() int32 { + if m != nil { + return m.PageSize + } + return 0 +} + +// Response message for [GoogleAdsFieldService.SearchGoogleAdsFields][google.ads.googleads.v1.services.GoogleAdsFieldService.SearchGoogleAdsFields]. +type SearchGoogleAdsFieldsResponse struct { + // The list of fields that matched the query. + Results []*resources.GoogleAdsField `protobuf:"bytes,1,rep,name=results,proto3" json:"results,omitempty"` + // Pagination token used to retrieve the next page of results. Pass the + // content of this string as the `page_token` attribute of the next request. + // `next_page_token` is not returned for the last page. + NextPageToken string `protobuf:"bytes,2,opt,name=next_page_token,json=nextPageToken,proto3" json:"next_page_token,omitempty"` + // Total number of results that match the query ignoring the LIMIT clause. + TotalResultsCount int64 `protobuf:"varint,3,opt,name=total_results_count,json=totalResultsCount,proto3" json:"total_results_count,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *SearchGoogleAdsFieldsResponse) Reset() { *m = SearchGoogleAdsFieldsResponse{} } +func (m *SearchGoogleAdsFieldsResponse) String() string { return proto.CompactTextString(m) } +func (*SearchGoogleAdsFieldsResponse) ProtoMessage() {} +func (*SearchGoogleAdsFieldsResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_google_ads_field_service_c68db3ae861d47e1, []int{2} +} +func (m *SearchGoogleAdsFieldsResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_SearchGoogleAdsFieldsResponse.Unmarshal(m, b) +} +func (m *SearchGoogleAdsFieldsResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_SearchGoogleAdsFieldsResponse.Marshal(b, m, deterministic) +} +func (dst *SearchGoogleAdsFieldsResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_SearchGoogleAdsFieldsResponse.Merge(dst, src) +} +func (m *SearchGoogleAdsFieldsResponse) XXX_Size() int { + return xxx_messageInfo_SearchGoogleAdsFieldsResponse.Size(m) +} +func (m *SearchGoogleAdsFieldsResponse) XXX_DiscardUnknown() { + xxx_messageInfo_SearchGoogleAdsFieldsResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_SearchGoogleAdsFieldsResponse proto.InternalMessageInfo + +func (m *SearchGoogleAdsFieldsResponse) GetResults() []*resources.GoogleAdsField { + if m != nil { + return m.Results + } + return nil +} + +func (m *SearchGoogleAdsFieldsResponse) GetNextPageToken() string { + if m != nil { + return m.NextPageToken + } + return "" +} + +func (m *SearchGoogleAdsFieldsResponse) GetTotalResultsCount() int64 { + if m != nil { + return m.TotalResultsCount + } + return 0 +} + +func init() { + proto.RegisterType((*GetGoogleAdsFieldRequest)(nil), "google.ads.googleads.v1.services.GetGoogleAdsFieldRequest") + proto.RegisterType((*SearchGoogleAdsFieldsRequest)(nil), "google.ads.googleads.v1.services.SearchGoogleAdsFieldsRequest") + proto.RegisterType((*SearchGoogleAdsFieldsResponse)(nil), "google.ads.googleads.v1.services.SearchGoogleAdsFieldsResponse") +} + +// Reference imports to suppress errors if they are not otherwise used. +var _ context.Context +var _ grpc.ClientConn + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the grpc package it is being compiled against. +const _ = grpc.SupportPackageIsVersion4 + +// GoogleAdsFieldServiceClient is the client API for GoogleAdsFieldService service. +// +// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://godoc.org/google.golang.org/grpc#ClientConn.NewStream. +type GoogleAdsFieldServiceClient interface { + // Returns just the requested field. + GetGoogleAdsField(ctx context.Context, in *GetGoogleAdsFieldRequest, opts ...grpc.CallOption) (*resources.GoogleAdsField, error) + // Returns all fields that match the search query. + SearchGoogleAdsFields(ctx context.Context, in *SearchGoogleAdsFieldsRequest, opts ...grpc.CallOption) (*SearchGoogleAdsFieldsResponse, error) +} + +type googleAdsFieldServiceClient struct { + cc *grpc.ClientConn +} + +func NewGoogleAdsFieldServiceClient(cc *grpc.ClientConn) GoogleAdsFieldServiceClient { + return &googleAdsFieldServiceClient{cc} +} + +func (c *googleAdsFieldServiceClient) GetGoogleAdsField(ctx context.Context, in *GetGoogleAdsFieldRequest, opts ...grpc.CallOption) (*resources.GoogleAdsField, error) { + out := new(resources.GoogleAdsField) + err := c.cc.Invoke(ctx, "/google.ads.googleads.v1.services.GoogleAdsFieldService/GetGoogleAdsField", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *googleAdsFieldServiceClient) SearchGoogleAdsFields(ctx context.Context, in *SearchGoogleAdsFieldsRequest, opts ...grpc.CallOption) (*SearchGoogleAdsFieldsResponse, error) { + out := new(SearchGoogleAdsFieldsResponse) + err := c.cc.Invoke(ctx, "/google.ads.googleads.v1.services.GoogleAdsFieldService/SearchGoogleAdsFields", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +// GoogleAdsFieldServiceServer is the server API for GoogleAdsFieldService service. +type GoogleAdsFieldServiceServer interface { + // Returns just the requested field. + GetGoogleAdsField(context.Context, *GetGoogleAdsFieldRequest) (*resources.GoogleAdsField, error) + // Returns all fields that match the search query. + SearchGoogleAdsFields(context.Context, *SearchGoogleAdsFieldsRequest) (*SearchGoogleAdsFieldsResponse, error) +} + +func RegisterGoogleAdsFieldServiceServer(s *grpc.Server, srv GoogleAdsFieldServiceServer) { + s.RegisterService(&_GoogleAdsFieldService_serviceDesc, srv) +} + +func _GoogleAdsFieldService_GetGoogleAdsField_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(GetGoogleAdsFieldRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(GoogleAdsFieldServiceServer).GetGoogleAdsField(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.ads.googleads.v1.services.GoogleAdsFieldService/GetGoogleAdsField", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(GoogleAdsFieldServiceServer).GetGoogleAdsField(ctx, req.(*GetGoogleAdsFieldRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _GoogleAdsFieldService_SearchGoogleAdsFields_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(SearchGoogleAdsFieldsRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(GoogleAdsFieldServiceServer).SearchGoogleAdsFields(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.ads.googleads.v1.services.GoogleAdsFieldService/SearchGoogleAdsFields", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(GoogleAdsFieldServiceServer).SearchGoogleAdsFields(ctx, req.(*SearchGoogleAdsFieldsRequest)) + } + return interceptor(ctx, in, info, handler) +} + +var _GoogleAdsFieldService_serviceDesc = grpc.ServiceDesc{ + ServiceName: "google.ads.googleads.v1.services.GoogleAdsFieldService", + HandlerType: (*GoogleAdsFieldServiceServer)(nil), + Methods: []grpc.MethodDesc{ + { + MethodName: "GetGoogleAdsField", + Handler: _GoogleAdsFieldService_GetGoogleAdsField_Handler, + }, + { + MethodName: "SearchGoogleAdsFields", + Handler: _GoogleAdsFieldService_SearchGoogleAdsFields_Handler, + }, + }, + Streams: []grpc.StreamDesc{}, + Metadata: "google/ads/googleads/v1/services/google_ads_field_service.proto", +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/services/google_ads_field_service.proto", fileDescriptor_google_ads_field_service_c68db3ae861d47e1) +} + +var fileDescriptor_google_ads_field_service_c68db3ae861d47e1 = []byte{ + // 537 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x9c, 0x54, 0x41, 0x8b, 0xd3, 0x40, + 0x14, 0x26, 0x29, 0xab, 0xee, 0xe8, 0x22, 0x3b, 0xba, 0x50, 0xe2, 0x2e, 0x96, 0xb8, 0xab, 0x65, + 0xc1, 0x09, 0x59, 0x2f, 0x32, 0xa2, 0x25, 0x2b, 0x58, 0x41, 0x90, 0xd2, 0x4a, 0x0f, 0x52, 0x08, + 0x63, 0xf3, 0x8c, 0xc1, 0x76, 0x26, 0x9b, 0x99, 0x16, 0x5d, 0xf1, 0xa0, 0x77, 0x4f, 0xfe, 0x03, + 0x8f, 0xde, 0xfc, 0x05, 0x5e, 0x3c, 0x79, 0xf5, 0x2f, 0x78, 0xf2, 0x57, 0xc8, 0x64, 0x3a, 0xc5, + 0xee, 0x36, 0x16, 0xbd, 0x4d, 0xde, 0xf7, 0xbe, 0xef, 0x7b, 0xf3, 0xde, 0x9b, 0xa0, 0x56, 0x2a, + 0x44, 0x3a, 0x82, 0x80, 0x25, 0x32, 0x30, 0x47, 0x7d, 0x9a, 0x86, 0x81, 0x84, 0x62, 0x9a, 0x0d, + 0xc1, 0x46, 0x63, 0x96, 0xc8, 0xf8, 0x79, 0x06, 0xa3, 0x24, 0x9e, 0x21, 0x24, 0x2f, 0x84, 0x12, + 0xb8, 0x61, 0x70, 0xc2, 0x12, 0x49, 0xe6, 0x02, 0x64, 0x1a, 0x12, 0x2b, 0xe0, 0xdd, 0xae, 0xb2, + 0x28, 0x40, 0x8a, 0x49, 0xb1, 0xcc, 0xc3, 0x68, 0x7b, 0xdb, 0x96, 0x99, 0x67, 0x01, 0xe3, 0x5c, + 0x28, 0xa6, 0x32, 0xc1, 0xa5, 0x41, 0xfd, 0x16, 0xaa, 0xb7, 0x41, 0xb5, 0xcb, 0x94, 0x28, 0x91, + 0x0f, 0x34, 0xb1, 0x0b, 0x47, 0x13, 0x90, 0x0a, 0x5f, 0x43, 0x1b, 0x56, 0x3d, 0xe6, 0x6c, 0x0c, + 0x75, 0xa7, 0xe1, 0x34, 0xd7, 0xbb, 0x17, 0x6c, 0xf0, 0x31, 0x1b, 0x83, 0x9f, 0xa3, 0xed, 0x1e, + 0xb0, 0x62, 0xf8, 0x62, 0x51, 0x43, 0x5a, 0x91, 0xcb, 0x68, 0xed, 0x68, 0x02, 0xc5, 0xeb, 0x19, + 0xd9, 0x7c, 0xe0, 0x1d, 0x84, 0x72, 0x96, 0x42, 0xac, 0xc4, 0x4b, 0xe0, 0x75, 0xb7, 0x84, 0xd6, + 0x75, 0xe4, 0x89, 0x0e, 0xe0, 0x2b, 0xa8, 0xfc, 0x88, 0x65, 0x76, 0x0c, 0xf5, 0x5a, 0xc3, 0x69, + 0xae, 0x75, 0xcf, 0xe9, 0x40, 0x2f, 0x3b, 0x06, 0xff, 0x9b, 0x83, 0x76, 0x2a, 0x2c, 0x65, 0x2e, + 0xb8, 0x04, 0xfc, 0x08, 0x9d, 0x2d, 0x40, 0x4e, 0x46, 0x4a, 0xd6, 0x9d, 0x46, 0xad, 0x79, 0xfe, + 0x20, 0x24, 0x55, 0x0d, 0x9e, 0xb7, 0x8f, 0x9c, 0xe8, 0x81, 0x55, 0xc0, 0xd7, 0xd1, 0x45, 0x0e, + 0xaf, 0x54, 0x7c, 0xaa, 0xde, 0x0d, 0x1d, 0xee, 0xcc, 0x6b, 0x26, 0xe8, 0x92, 0x12, 0x8a, 0x8d, + 0xe2, 0x19, 0x31, 0x1e, 0x8a, 0x09, 0x57, 0x65, 0xf5, 0xb5, 0xee, 0x66, 0x09, 0x75, 0x0d, 0x72, + 0x5f, 0x03, 0x07, 0x1f, 0x6a, 0x68, 0x6b, 0xd1, 0xb3, 0x67, 0x86, 0x8d, 0xbf, 0x38, 0x68, 0xf3, + 0xd4, 0x50, 0x30, 0x25, 0xab, 0x96, 0x84, 0x54, 0x4d, 0xd2, 0xfb, 0xf7, 0xfb, 0xfb, 0x37, 0xdf, + 0xff, 0xf8, 0xf9, 0xd1, 0xbd, 0x81, 0xf7, 0xf4, 0x92, 0xbd, 0x59, 0xd8, 0x83, 0xbb, 0xe9, 0x62, + 0xdf, 0x83, 0xfd, 0xb7, 0xf8, 0xab, 0x83, 0xb6, 0x96, 0x0e, 0x05, 0xdf, 0x5b, 0x5d, 0xf7, 0xdf, + 0x16, 0xc8, 0x6b, 0xfd, 0x37, 0xdf, 0x6c, 0x83, 0xbf, 0x57, 0xde, 0xe4, 0xaa, 0xef, 0xe9, 0x9b, + 0x9c, 0x28, 0x9d, 0xca, 0x92, 0x4a, 0x9d, 0xfd, 0xc3, 0x77, 0x2e, 0xda, 0x1d, 0x8a, 0xf1, 0x4a, + 0xb7, 0x43, 0x6f, 0xe9, 0xd4, 0x3a, 0xfa, 0x39, 0x75, 0x9c, 0xa7, 0x0f, 0x67, 0xfc, 0x54, 0x8c, + 0x18, 0x4f, 0x89, 0x28, 0xd2, 0x20, 0x05, 0x5e, 0x3e, 0x36, 0xfb, 0x70, 0xf3, 0x4c, 0x56, 0xff, + 0x2a, 0xee, 0xd8, 0xc3, 0x27, 0xb7, 0xd6, 0x8e, 0xa2, 0xcf, 0x6e, 0xc3, 0xd8, 0x91, 0x28, 0xf9, + 0x63, 0x46, 0xa4, 0x1f, 0x92, 0x99, 0xb1, 0xfc, 0x6e, 0x53, 0x06, 0x51, 0x22, 0x07, 0xf3, 0x94, + 0x41, 0x3f, 0x1c, 0xd8, 0x94, 0x5f, 0xee, 0xae, 0x89, 0x53, 0x1a, 0x25, 0x92, 0xd2, 0x79, 0x12, + 0xa5, 0xfd, 0x90, 0x52, 0x9b, 0xf6, 0xec, 0x4c, 0x59, 0xe7, 0xad, 0xdf, 0x01, 0x00, 0x00, 0xff, + 0xff, 0xea, 0xe2, 0xe3, 0x8f, 0xd1, 0x04, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/services/google_ads_service.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/services/google_ads_service.pb.go new file mode 100644 index 0000000..c42d5ac --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/services/google_ads_service.pb.go @@ -0,0 +1,4322 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/services/google_ads_service.proto + +package services // import "google.golang.org/genproto/googleapis/ads/googleads/v1/services" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import common "google.golang.org/genproto/googleapis/ads/googleads/v1/common" +import resources "google.golang.org/genproto/googleapis/ads/googleads/v1/resources" +import _ "google.golang.org/genproto/googleapis/api/annotations" +import status "google.golang.org/genproto/googleapis/rpc/status" +import field_mask "google.golang.org/genproto/protobuf/field_mask" + +import ( + context "golang.org/x/net/context" + grpc "google.golang.org/grpc" +) + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Request message for [GoogleAdsService.Search][google.ads.googleads.v1.services.GoogleAdsService.Search]. +type SearchGoogleAdsRequest struct { + // The ID of the customer being queried. + CustomerId string `protobuf:"bytes,1,opt,name=customer_id,json=customerId,proto3" json:"customer_id,omitempty"` + // The query string. + Query string `protobuf:"bytes,2,opt,name=query,proto3" json:"query,omitempty"` + // Token of the page to retrieve. If not specified, the first + // page of results will be returned. Use the value obtained from + // `next_page_token` in the previous response in order to request + // the next page of results. + PageToken string `protobuf:"bytes,3,opt,name=page_token,json=pageToken,proto3" json:"page_token,omitempty"` + // Number of elements to retrieve in a single page. + // When too large a page is requested, the server may decide to + // further limit the number of returned resources. + PageSize int32 `protobuf:"varint,4,opt,name=page_size,json=pageSize,proto3" json:"page_size,omitempty"` + // If true, the request is validated but not executed. + ValidateOnly bool `protobuf:"varint,5,opt,name=validate_only,json=validateOnly,proto3" json:"validate_only,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *SearchGoogleAdsRequest) Reset() { *m = SearchGoogleAdsRequest{} } +func (m *SearchGoogleAdsRequest) String() string { return proto.CompactTextString(m) } +func (*SearchGoogleAdsRequest) ProtoMessage() {} +func (*SearchGoogleAdsRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_google_ads_service_75d9a9baabc9a843, []int{0} +} +func (m *SearchGoogleAdsRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_SearchGoogleAdsRequest.Unmarshal(m, b) +} +func (m *SearchGoogleAdsRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_SearchGoogleAdsRequest.Marshal(b, m, deterministic) +} +func (dst *SearchGoogleAdsRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_SearchGoogleAdsRequest.Merge(dst, src) +} +func (m *SearchGoogleAdsRequest) XXX_Size() int { + return xxx_messageInfo_SearchGoogleAdsRequest.Size(m) +} +func (m *SearchGoogleAdsRequest) XXX_DiscardUnknown() { + xxx_messageInfo_SearchGoogleAdsRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_SearchGoogleAdsRequest proto.InternalMessageInfo + +func (m *SearchGoogleAdsRequest) GetCustomerId() string { + if m != nil { + return m.CustomerId + } + return "" +} + +func (m *SearchGoogleAdsRequest) GetQuery() string { + if m != nil { + return m.Query + } + return "" +} + +func (m *SearchGoogleAdsRequest) GetPageToken() string { + if m != nil { + return m.PageToken + } + return "" +} + +func (m *SearchGoogleAdsRequest) GetPageSize() int32 { + if m != nil { + return m.PageSize + } + return 0 +} + +func (m *SearchGoogleAdsRequest) GetValidateOnly() bool { + if m != nil { + return m.ValidateOnly + } + return false +} + +// Response message for [GoogleAdsService.Search][google.ads.googleads.v1.services.GoogleAdsService.Search]. +type SearchGoogleAdsResponse struct { + // The list of rows that matched the query. + Results []*GoogleAdsRow `protobuf:"bytes,1,rep,name=results,proto3" json:"results,omitempty"` + // Pagination token used to retrieve the next page of results. + // Pass the content of this string as the `page_token` attribute of + // the next request. `next_page_token` is not returned for the last + // page. + NextPageToken string `protobuf:"bytes,2,opt,name=next_page_token,json=nextPageToken,proto3" json:"next_page_token,omitempty"` + // Total number of results that match the query ignoring the LIMIT + // clause. + TotalResultsCount int64 `protobuf:"varint,3,opt,name=total_results_count,json=totalResultsCount,proto3" json:"total_results_count,omitempty"` + // FieldMask that represents what fields were requested by the user. + FieldMask *field_mask.FieldMask `protobuf:"bytes,5,opt,name=field_mask,json=fieldMask,proto3" json:"field_mask,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *SearchGoogleAdsResponse) Reset() { *m = SearchGoogleAdsResponse{} } +func (m *SearchGoogleAdsResponse) String() string { return proto.CompactTextString(m) } +func (*SearchGoogleAdsResponse) ProtoMessage() {} +func (*SearchGoogleAdsResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_google_ads_service_75d9a9baabc9a843, []int{1} +} +func (m *SearchGoogleAdsResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_SearchGoogleAdsResponse.Unmarshal(m, b) +} +func (m *SearchGoogleAdsResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_SearchGoogleAdsResponse.Marshal(b, m, deterministic) +} +func (dst *SearchGoogleAdsResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_SearchGoogleAdsResponse.Merge(dst, src) +} +func (m *SearchGoogleAdsResponse) XXX_Size() int { + return xxx_messageInfo_SearchGoogleAdsResponse.Size(m) +} +func (m *SearchGoogleAdsResponse) XXX_DiscardUnknown() { + xxx_messageInfo_SearchGoogleAdsResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_SearchGoogleAdsResponse proto.InternalMessageInfo + +func (m *SearchGoogleAdsResponse) GetResults() []*GoogleAdsRow { + if m != nil { + return m.Results + } + return nil +} + +func (m *SearchGoogleAdsResponse) GetNextPageToken() string { + if m != nil { + return m.NextPageToken + } + return "" +} + +func (m *SearchGoogleAdsResponse) GetTotalResultsCount() int64 { + if m != nil { + return m.TotalResultsCount + } + return 0 +} + +func (m *SearchGoogleAdsResponse) GetFieldMask() *field_mask.FieldMask { + if m != nil { + return m.FieldMask + } + return nil +} + +// A returned row from the query. +type GoogleAdsRow struct { + // The account budget in the query. + AccountBudget *resources.AccountBudget `protobuf:"bytes,42,opt,name=account_budget,json=accountBudget,proto3" json:"account_budget,omitempty"` + // The account budget proposal referenced in the query. + AccountBudgetProposal *resources.AccountBudgetProposal `protobuf:"bytes,43,opt,name=account_budget_proposal,json=accountBudgetProposal,proto3" json:"account_budget_proposal,omitempty"` + // The ad group referenced in the query. + AdGroup *resources.AdGroup `protobuf:"bytes,3,opt,name=ad_group,json=adGroup,proto3" json:"ad_group,omitempty"` + // The ad referenced in the query. + AdGroupAd *resources.AdGroupAd `protobuf:"bytes,16,opt,name=ad_group_ad,json=adGroupAd,proto3" json:"ad_group_ad,omitempty"` + // The ad group ad label referenced in the query. + AdGroupAdLabel *resources.AdGroupAdLabel `protobuf:"bytes,120,opt,name=ad_group_ad_label,json=adGroupAdLabel,proto3" json:"ad_group_ad_label,omitempty"` + // The ad group audience view referenced in the query. + AdGroupAudienceView *resources.AdGroupAudienceView `protobuf:"bytes,57,opt,name=ad_group_audience_view,json=adGroupAudienceView,proto3" json:"ad_group_audience_view,omitempty"` + // The bid modifier referenced in the query. + AdGroupBidModifier *resources.AdGroupBidModifier `protobuf:"bytes,24,opt,name=ad_group_bid_modifier,json=adGroupBidModifier,proto3" json:"ad_group_bid_modifier,omitempty"` + // The criterion referenced in the query. + AdGroupCriterion *resources.AdGroupCriterion `protobuf:"bytes,17,opt,name=ad_group_criterion,json=adGroupCriterion,proto3" json:"ad_group_criterion,omitempty"` + // The ad group criterion label referenced in the query. + AdGroupCriterionLabel *resources.AdGroupCriterionLabel `protobuf:"bytes,121,opt,name=ad_group_criterion_label,json=adGroupCriterionLabel,proto3" json:"ad_group_criterion_label,omitempty"` + // The ad group criterion simulation referenced in the query. + AdGroupCriterionSimulation *resources.AdGroupCriterionSimulation `protobuf:"bytes,110,opt,name=ad_group_criterion_simulation,json=adGroupCriterionSimulation,proto3" json:"ad_group_criterion_simulation,omitempty"` + // The ad group extension setting referenced in the query. + AdGroupExtensionSetting *resources.AdGroupExtensionSetting `protobuf:"bytes,112,opt,name=ad_group_extension_setting,json=adGroupExtensionSetting,proto3" json:"ad_group_extension_setting,omitempty"` + // The ad group feed referenced in the query. + AdGroupFeed *resources.AdGroupFeed `protobuf:"bytes,67,opt,name=ad_group_feed,json=adGroupFeed,proto3" json:"ad_group_feed,omitempty"` + // The ad group label referenced in the query. + AdGroupLabel *resources.AdGroupLabel `protobuf:"bytes,115,opt,name=ad_group_label,json=adGroupLabel,proto3" json:"ad_group_label,omitempty"` + // The ad group simulation referenced in the query. + AdGroupSimulation *resources.AdGroupSimulation `protobuf:"bytes,107,opt,name=ad_group_simulation,json=adGroupSimulation,proto3" json:"ad_group_simulation,omitempty"` + // The ad parameter referenced in the query. + AdParameter *resources.AdParameter `protobuf:"bytes,130,opt,name=ad_parameter,json=adParameter,proto3" json:"ad_parameter,omitempty"` + // The age range view referenced in the query. + AgeRangeView *resources.AgeRangeView `protobuf:"bytes,48,opt,name=age_range_view,json=ageRangeView,proto3" json:"age_range_view,omitempty"` + // The ad schedule view referenced in the query. + AdScheduleView *resources.AdScheduleView `protobuf:"bytes,89,opt,name=ad_schedule_view,json=adScheduleView,proto3" json:"ad_schedule_view,omitempty"` + // The domain category referenced in the query. + DomainCategory *resources.DomainCategory `protobuf:"bytes,91,opt,name=domain_category,json=domainCategory,proto3" json:"domain_category,omitempty"` + // The asset referenced in the query. + Asset *resources.Asset `protobuf:"bytes,105,opt,name=asset,proto3" json:"asset,omitempty"` + // The bidding strategy referenced in the query. + BiddingStrategy *resources.BiddingStrategy `protobuf:"bytes,18,opt,name=bidding_strategy,json=biddingStrategy,proto3" json:"bidding_strategy,omitempty"` + // The billing setup referenced in the query. + BillingSetup *resources.BillingSetup `protobuf:"bytes,41,opt,name=billing_setup,json=billingSetup,proto3" json:"billing_setup,omitempty"` + // The campaign budget referenced in the query. + CampaignBudget *resources.CampaignBudget `protobuf:"bytes,19,opt,name=campaign_budget,json=campaignBudget,proto3" json:"campaign_budget,omitempty"` + // The campaign referenced in the query. + Campaign *resources.Campaign `protobuf:"bytes,2,opt,name=campaign,proto3" json:"campaign,omitempty"` + // The campaign audience view referenced in the query. + CampaignAudienceView *resources.CampaignAudienceView `protobuf:"bytes,69,opt,name=campaign_audience_view,json=campaignAudienceView,proto3" json:"campaign_audience_view,omitempty"` + // The campaign bid modifier referenced in the query. + CampaignBidModifier *resources.CampaignBidModifier `protobuf:"bytes,26,opt,name=campaign_bid_modifier,json=campaignBidModifier,proto3" json:"campaign_bid_modifier,omitempty"` + // The campaign criterion referenced in the query. + CampaignCriterion *resources.CampaignCriterion `protobuf:"bytes,20,opt,name=campaign_criterion,json=campaignCriterion,proto3" json:"campaign_criterion,omitempty"` + // The campaign criterion simulation referenced in the query. + CampaignCriterionSimulation *resources.CampaignCriterionSimulation `protobuf:"bytes,111,opt,name=campaign_criterion_simulation,json=campaignCriterionSimulation,proto3" json:"campaign_criterion_simulation,omitempty"` + // The campaign extension setting referenced in the query. + CampaignExtensionSetting *resources.CampaignExtensionSetting `protobuf:"bytes,113,opt,name=campaign_extension_setting,json=campaignExtensionSetting,proto3" json:"campaign_extension_setting,omitempty"` + // The campaign feed referenced in the query. + CampaignFeed *resources.CampaignFeed `protobuf:"bytes,63,opt,name=campaign_feed,json=campaignFeed,proto3" json:"campaign_feed,omitempty"` + // The campaign label referenced in the query. + CampaignLabel *resources.CampaignLabel `protobuf:"bytes,108,opt,name=campaign_label,json=campaignLabel,proto3" json:"campaign_label,omitempty"` + // Campaign Shared Set referenced in AWQL query. + CampaignSharedSet *resources.CampaignSharedSet `protobuf:"bytes,30,opt,name=campaign_shared_set,json=campaignSharedSet,proto3" json:"campaign_shared_set,omitempty"` + // The carrier constant referenced in the query. + CarrierConstant *resources.CarrierConstant `protobuf:"bytes,66,opt,name=carrier_constant,json=carrierConstant,proto3" json:"carrier_constant,omitempty"` + // The ChangeStatus referenced in the query. + ChangeStatus *resources.ChangeStatus `protobuf:"bytes,37,opt,name=change_status,json=changeStatus,proto3" json:"change_status,omitempty"` + // The conversion action referenced in the query. + ConversionAction *resources.ConversionAction `protobuf:"bytes,103,opt,name=conversion_action,json=conversionAction,proto3" json:"conversion_action,omitempty"` + // The ClickView referenced in the query. + ClickView *resources.ClickView `protobuf:"bytes,122,opt,name=click_view,json=clickView,proto3" json:"click_view,omitempty"` + // The CustomInterest referenced in the query. + CustomInterest *resources.CustomInterest `protobuf:"bytes,104,opt,name=custom_interest,json=customInterest,proto3" json:"custom_interest,omitempty"` + // The customer referenced in the query. + Customer *resources.Customer `protobuf:"bytes,1,opt,name=customer,proto3" json:"customer,omitempty"` + // The CustomerManagerLink referenced in the query. + CustomerManagerLink *resources.CustomerManagerLink `protobuf:"bytes,61,opt,name=customer_manager_link,json=customerManagerLink,proto3" json:"customer_manager_link,omitempty"` + // The CustomerClientLink referenced in the query. + CustomerClientLink *resources.CustomerClientLink `protobuf:"bytes,62,opt,name=customer_client_link,json=customerClientLink,proto3" json:"customer_client_link,omitempty"` + // The CustomerClient referenced in the query. + CustomerClient *resources.CustomerClient `protobuf:"bytes,70,opt,name=customer_client,json=customerClient,proto3" json:"customer_client,omitempty"` + // The customer extension setting referenced in the query. + CustomerExtensionSetting *resources.CustomerExtensionSetting `protobuf:"bytes,114,opt,name=customer_extension_setting,json=customerExtensionSetting,proto3" json:"customer_extension_setting,omitempty"` + // The customer feed referenced in the query. + CustomerFeed *resources.CustomerFeed `protobuf:"bytes,64,opt,name=customer_feed,json=customerFeed,proto3" json:"customer_feed,omitempty"` + // The customer label referenced in the query. + CustomerLabel *resources.CustomerLabel `protobuf:"bytes,124,opt,name=customer_label,json=customerLabel,proto3" json:"customer_label,omitempty"` + // The customer negative criterion referenced in the query. + CustomerNegativeCriterion *resources.CustomerNegativeCriterion `protobuf:"bytes,88,opt,name=customer_negative_criterion,json=customerNegativeCriterion,proto3" json:"customer_negative_criterion,omitempty"` + // The detail placement view referenced in the query. + DetailPlacementView *resources.DetailPlacementView `protobuf:"bytes,118,opt,name=detail_placement_view,json=detailPlacementView,proto3" json:"detail_placement_view,omitempty"` + // The display keyword view referenced in the query. + DisplayKeywordView *resources.DisplayKeywordView `protobuf:"bytes,47,opt,name=display_keyword_view,json=displayKeywordView,proto3" json:"display_keyword_view,omitempty"` + // The dynamic search ads search term view referenced in the query. + DynamicSearchAdsSearchTermView *resources.DynamicSearchAdsSearchTermView `protobuf:"bytes,106,opt,name=dynamic_search_ads_search_term_view,json=dynamicSearchAdsSearchTermView,proto3" json:"dynamic_search_ads_search_term_view,omitempty"` + // The expanded landing page view referenced in the query. + ExpandedLandingPageView *resources.ExpandedLandingPageView `protobuf:"bytes,128,opt,name=expanded_landing_page_view,json=expandedLandingPageView,proto3" json:"expanded_landing_page_view,omitempty"` + // The extension feed item referenced in the query. + ExtensionFeedItem *resources.ExtensionFeedItem `protobuf:"bytes,85,opt,name=extension_feed_item,json=extensionFeedItem,proto3" json:"extension_feed_item,omitempty"` + // The feed referenced in the query. + Feed *resources.Feed `protobuf:"bytes,46,opt,name=feed,proto3" json:"feed,omitempty"` + // The feed item referenced in the query. + FeedItem *resources.FeedItem `protobuf:"bytes,50,opt,name=feed_item,json=feedItem,proto3" json:"feed_item,omitempty"` + // The feed item target referenced in the query. + FeedItemTarget *resources.FeedItemTarget `protobuf:"bytes,116,opt,name=feed_item_target,json=feedItemTarget,proto3" json:"feed_item_target,omitempty"` + // The feed mapping referenced in the query. + FeedMapping *resources.FeedMapping `protobuf:"bytes,58,opt,name=feed_mapping,json=feedMapping,proto3" json:"feed_mapping,omitempty"` + // The feed placeholder view referenced in the query. + FeedPlaceholderView *resources.FeedPlaceholderView `protobuf:"bytes,97,opt,name=feed_placeholder_view,json=feedPlaceholderView,proto3" json:"feed_placeholder_view,omitempty"` + // The gender view referenced in the query. + GenderView *resources.GenderView `protobuf:"bytes,40,opt,name=gender_view,json=genderView,proto3" json:"gender_view,omitempty"` + // The geo target constant referenced in the query. + GeoTargetConstant *resources.GeoTargetConstant `protobuf:"bytes,23,opt,name=geo_target_constant,json=geoTargetConstant,proto3" json:"geo_target_constant,omitempty"` + // The geographic view referenced in the query. + GeographicView *resources.GeographicView `protobuf:"bytes,125,opt,name=geographic_view,json=geographicView,proto3" json:"geographic_view,omitempty"` + // The group placement view referenced in the query. + GroupPlacementView *resources.GroupPlacementView `protobuf:"bytes,119,opt,name=group_placement_view,json=groupPlacementView,proto3" json:"group_placement_view,omitempty"` + // The hotel group view referenced in the query. + HotelGroupView *resources.HotelGroupView `protobuf:"bytes,51,opt,name=hotel_group_view,json=hotelGroupView,proto3" json:"hotel_group_view,omitempty"` + // The hotel performance view referenced in the query. + HotelPerformanceView *resources.HotelPerformanceView `protobuf:"bytes,71,opt,name=hotel_performance_view,json=hotelPerformanceView,proto3" json:"hotel_performance_view,omitempty"` + // The keyword view referenced in the query. + KeywordView *resources.KeywordView `protobuf:"bytes,21,opt,name=keyword_view,json=keywordView,proto3" json:"keyword_view,omitempty"` + // The keyword plan referenced in the query. + KeywordPlan *resources.KeywordPlan `protobuf:"bytes,32,opt,name=keyword_plan,json=keywordPlan,proto3" json:"keyword_plan,omitempty"` + // The keyword plan campaign referenced in the query. + KeywordPlanCampaign *resources.KeywordPlanCampaign `protobuf:"bytes,33,opt,name=keyword_plan_campaign,json=keywordPlanCampaign,proto3" json:"keyword_plan_campaign,omitempty"` + // The keyword plan negative keyword referenced in the query. + KeywordPlanNegativeKeyword *resources.KeywordPlanNegativeKeyword `protobuf:"bytes,34,opt,name=keyword_plan_negative_keyword,json=keywordPlanNegativeKeyword,proto3" json:"keyword_plan_negative_keyword,omitempty"` + // The keyword plan ad group referenced in the query. + KeywordPlanAdGroup *resources.KeywordPlanAdGroup `protobuf:"bytes,35,opt,name=keyword_plan_ad_group,json=keywordPlanAdGroup,proto3" json:"keyword_plan_ad_group,omitempty"` + // The keyword plan keyword referenced in the query. + KeywordPlanKeyword *resources.KeywordPlanKeyword `protobuf:"bytes,36,opt,name=keyword_plan_keyword,json=keywordPlanKeyword,proto3" json:"keyword_plan_keyword,omitempty"` + // The label referenced in the query. + Label *resources.Label `protobuf:"bytes,52,opt,name=label,proto3" json:"label,omitempty"` + // The landing page view referenced in the query. + LandingPageView *resources.LandingPageView `protobuf:"bytes,126,opt,name=landing_page_view,json=landingPageView,proto3" json:"landing_page_view,omitempty"` + // The language constant referenced in the query. + LanguageConstant *resources.LanguageConstant `protobuf:"bytes,55,opt,name=language_constant,json=languageConstant,proto3" json:"language_constant,omitempty"` + // The location view referenced in the query. + LocationView *resources.LocationView `protobuf:"bytes,123,opt,name=location_view,json=locationView,proto3" json:"location_view,omitempty"` + // The managed placement view referenced in the query. + ManagedPlacementView *resources.ManagedPlacementView `protobuf:"bytes,53,opt,name=managed_placement_view,json=managedPlacementView,proto3" json:"managed_placement_view,omitempty"` + // The media file referenced in the query. + MediaFile *resources.MediaFile `protobuf:"bytes,90,opt,name=media_file,json=mediaFile,proto3" json:"media_file,omitempty"` + // The mobile app category constant referenced in the query. + MobileAppCategoryConstant *resources.MobileAppCategoryConstant `protobuf:"bytes,87,opt,name=mobile_app_category_constant,json=mobileAppCategoryConstant,proto3" json:"mobile_app_category_constant,omitempty"` + // The mobile device constant referenced in the query. + MobileDeviceConstant *resources.MobileDeviceConstant `protobuf:"bytes,98,opt,name=mobile_device_constant,json=mobileDeviceConstant,proto3" json:"mobile_device_constant,omitempty"` + // The mutate job referenced in the query. + MutateJob *resources.MutateJob `protobuf:"bytes,127,opt,name=mutate_job,json=mutateJob,proto3" json:"mutate_job,omitempty"` + // The operating system version constant referenced in the query. + OperatingSystemVersionConstant *resources.OperatingSystemVersionConstant `protobuf:"bytes,86,opt,name=operating_system_version_constant,json=operatingSystemVersionConstant,proto3" json:"operating_system_version_constant,omitempty"` + // The paid organic search term view referenced in the query. + PaidOrganicSearchTermView *resources.PaidOrganicSearchTermView `protobuf:"bytes,129,opt,name=paid_organic_search_term_view,json=paidOrganicSearchTermView,proto3" json:"paid_organic_search_term_view,omitempty"` + // The parental status view referenced in the query. + ParentalStatusView *resources.ParentalStatusView `protobuf:"bytes,45,opt,name=parental_status_view,json=parentalStatusView,proto3" json:"parental_status_view,omitempty"` + // The Product Bidding Category referenced in the query. + ProductBiddingCategoryConstant *resources.ProductBiddingCategoryConstant `protobuf:"bytes,109,opt,name=product_bidding_category_constant,json=productBiddingCategoryConstant,proto3" json:"product_bidding_category_constant,omitempty"` + // The product group view referenced in the query. + ProductGroupView *resources.ProductGroupView `protobuf:"bytes,54,opt,name=product_group_view,json=productGroupView,proto3" json:"product_group_view,omitempty"` + // The recommendation referenced in the query. + Recommendation *resources.Recommendation `protobuf:"bytes,22,opt,name=recommendation,proto3" json:"recommendation,omitempty"` + // The search term view referenced in the query. + SearchTermView *resources.SearchTermView `protobuf:"bytes,68,opt,name=search_term_view,json=searchTermView,proto3" json:"search_term_view,omitempty"` + // The shared set referenced in the query. + SharedCriterion *resources.SharedCriterion `protobuf:"bytes,29,opt,name=shared_criterion,json=sharedCriterion,proto3" json:"shared_criterion,omitempty"` + // The shared set referenced in the query. + SharedSet *resources.SharedSet `protobuf:"bytes,27,opt,name=shared_set,json=sharedSet,proto3" json:"shared_set,omitempty"` + // The shopping performance view referenced in the query. + ShoppingPerformanceView *resources.ShoppingPerformanceView `protobuf:"bytes,117,opt,name=shopping_performance_view,json=shoppingPerformanceView,proto3" json:"shopping_performance_view,omitempty"` + // The topic view referenced in the query. + TopicView *resources.TopicView `protobuf:"bytes,44,opt,name=topic_view,json=topicView,proto3" json:"topic_view,omitempty"` + // The user interest referenced in the query. + UserInterest *resources.UserInterest `protobuf:"bytes,59,opt,name=user_interest,json=userInterest,proto3" json:"user_interest,omitempty"` + // The user list referenced in the query. + UserList *resources.UserList `protobuf:"bytes,38,opt,name=user_list,json=userList,proto3" json:"user_list,omitempty"` + // The remarketing action referenced in the query. + RemarketingAction *resources.RemarketingAction `protobuf:"bytes,60,opt,name=remarketing_action,json=remarketingAction,proto3" json:"remarketing_action,omitempty"` + // The topic constant referenced in the query. + TopicConstant *resources.TopicConstant `protobuf:"bytes,31,opt,name=topic_constant,json=topicConstant,proto3" json:"topic_constant,omitempty"` + // The video referenced in the query. + Video *resources.Video `protobuf:"bytes,39,opt,name=video,proto3" json:"video,omitempty"` + // The metrics. + Metrics *common.Metrics `protobuf:"bytes,4,opt,name=metrics,proto3" json:"metrics,omitempty"` + // The segments. + Segments *common.Segments `protobuf:"bytes,102,opt,name=segments,proto3" json:"segments,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GoogleAdsRow) Reset() { *m = GoogleAdsRow{} } +func (m *GoogleAdsRow) String() string { return proto.CompactTextString(m) } +func (*GoogleAdsRow) ProtoMessage() {} +func (*GoogleAdsRow) Descriptor() ([]byte, []int) { + return fileDescriptor_google_ads_service_75d9a9baabc9a843, []int{2} +} +func (m *GoogleAdsRow) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GoogleAdsRow.Unmarshal(m, b) +} +func (m *GoogleAdsRow) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GoogleAdsRow.Marshal(b, m, deterministic) +} +func (dst *GoogleAdsRow) XXX_Merge(src proto.Message) { + xxx_messageInfo_GoogleAdsRow.Merge(dst, src) +} +func (m *GoogleAdsRow) XXX_Size() int { + return xxx_messageInfo_GoogleAdsRow.Size(m) +} +func (m *GoogleAdsRow) XXX_DiscardUnknown() { + xxx_messageInfo_GoogleAdsRow.DiscardUnknown(m) +} + +var xxx_messageInfo_GoogleAdsRow proto.InternalMessageInfo + +func (m *GoogleAdsRow) GetAccountBudget() *resources.AccountBudget { + if m != nil { + return m.AccountBudget + } + return nil +} + +func (m *GoogleAdsRow) GetAccountBudgetProposal() *resources.AccountBudgetProposal { + if m != nil { + return m.AccountBudgetProposal + } + return nil +} + +func (m *GoogleAdsRow) GetAdGroup() *resources.AdGroup { + if m != nil { + return m.AdGroup + } + return nil +} + +func (m *GoogleAdsRow) GetAdGroupAd() *resources.AdGroupAd { + if m != nil { + return m.AdGroupAd + } + return nil +} + +func (m *GoogleAdsRow) GetAdGroupAdLabel() *resources.AdGroupAdLabel { + if m != nil { + return m.AdGroupAdLabel + } + return nil +} + +func (m *GoogleAdsRow) GetAdGroupAudienceView() *resources.AdGroupAudienceView { + if m != nil { + return m.AdGroupAudienceView + } + return nil +} + +func (m *GoogleAdsRow) GetAdGroupBidModifier() *resources.AdGroupBidModifier { + if m != nil { + return m.AdGroupBidModifier + } + return nil +} + +func (m *GoogleAdsRow) GetAdGroupCriterion() *resources.AdGroupCriterion { + if m != nil { + return m.AdGroupCriterion + } + return nil +} + +func (m *GoogleAdsRow) GetAdGroupCriterionLabel() *resources.AdGroupCriterionLabel { + if m != nil { + return m.AdGroupCriterionLabel + } + return nil +} + +func (m *GoogleAdsRow) GetAdGroupCriterionSimulation() *resources.AdGroupCriterionSimulation { + if m != nil { + return m.AdGroupCriterionSimulation + } + return nil +} + +func (m *GoogleAdsRow) GetAdGroupExtensionSetting() *resources.AdGroupExtensionSetting { + if m != nil { + return m.AdGroupExtensionSetting + } + return nil +} + +func (m *GoogleAdsRow) GetAdGroupFeed() *resources.AdGroupFeed { + if m != nil { + return m.AdGroupFeed + } + return nil +} + +func (m *GoogleAdsRow) GetAdGroupLabel() *resources.AdGroupLabel { + if m != nil { + return m.AdGroupLabel + } + return nil +} + +func (m *GoogleAdsRow) GetAdGroupSimulation() *resources.AdGroupSimulation { + if m != nil { + return m.AdGroupSimulation + } + return nil +} + +func (m *GoogleAdsRow) GetAdParameter() *resources.AdParameter { + if m != nil { + return m.AdParameter + } + return nil +} + +func (m *GoogleAdsRow) GetAgeRangeView() *resources.AgeRangeView { + if m != nil { + return m.AgeRangeView + } + return nil +} + +func (m *GoogleAdsRow) GetAdScheduleView() *resources.AdScheduleView { + if m != nil { + return m.AdScheduleView + } + return nil +} + +func (m *GoogleAdsRow) GetDomainCategory() *resources.DomainCategory { + if m != nil { + return m.DomainCategory + } + return nil +} + +func (m *GoogleAdsRow) GetAsset() *resources.Asset { + if m != nil { + return m.Asset + } + return nil +} + +func (m *GoogleAdsRow) GetBiddingStrategy() *resources.BiddingStrategy { + if m != nil { + return m.BiddingStrategy + } + return nil +} + +func (m *GoogleAdsRow) GetBillingSetup() *resources.BillingSetup { + if m != nil { + return m.BillingSetup + } + return nil +} + +func (m *GoogleAdsRow) GetCampaignBudget() *resources.CampaignBudget { + if m != nil { + return m.CampaignBudget + } + return nil +} + +func (m *GoogleAdsRow) GetCampaign() *resources.Campaign { + if m != nil { + return m.Campaign + } + return nil +} + +func (m *GoogleAdsRow) GetCampaignAudienceView() *resources.CampaignAudienceView { + if m != nil { + return m.CampaignAudienceView + } + return nil +} + +func (m *GoogleAdsRow) GetCampaignBidModifier() *resources.CampaignBidModifier { + if m != nil { + return m.CampaignBidModifier + } + return nil +} + +func (m *GoogleAdsRow) GetCampaignCriterion() *resources.CampaignCriterion { + if m != nil { + return m.CampaignCriterion + } + return nil +} + +func (m *GoogleAdsRow) GetCampaignCriterionSimulation() *resources.CampaignCriterionSimulation { + if m != nil { + return m.CampaignCriterionSimulation + } + return nil +} + +func (m *GoogleAdsRow) GetCampaignExtensionSetting() *resources.CampaignExtensionSetting { + if m != nil { + return m.CampaignExtensionSetting + } + return nil +} + +func (m *GoogleAdsRow) GetCampaignFeed() *resources.CampaignFeed { + if m != nil { + return m.CampaignFeed + } + return nil +} + +func (m *GoogleAdsRow) GetCampaignLabel() *resources.CampaignLabel { + if m != nil { + return m.CampaignLabel + } + return nil +} + +func (m *GoogleAdsRow) GetCampaignSharedSet() *resources.CampaignSharedSet { + if m != nil { + return m.CampaignSharedSet + } + return nil +} + +func (m *GoogleAdsRow) GetCarrierConstant() *resources.CarrierConstant { + if m != nil { + return m.CarrierConstant + } + return nil +} + +func (m *GoogleAdsRow) GetChangeStatus() *resources.ChangeStatus { + if m != nil { + return m.ChangeStatus + } + return nil +} + +func (m *GoogleAdsRow) GetConversionAction() *resources.ConversionAction { + if m != nil { + return m.ConversionAction + } + return nil +} + +func (m *GoogleAdsRow) GetClickView() *resources.ClickView { + if m != nil { + return m.ClickView + } + return nil +} + +func (m *GoogleAdsRow) GetCustomInterest() *resources.CustomInterest { + if m != nil { + return m.CustomInterest + } + return nil +} + +func (m *GoogleAdsRow) GetCustomer() *resources.Customer { + if m != nil { + return m.Customer + } + return nil +} + +func (m *GoogleAdsRow) GetCustomerManagerLink() *resources.CustomerManagerLink { + if m != nil { + return m.CustomerManagerLink + } + return nil +} + +func (m *GoogleAdsRow) GetCustomerClientLink() *resources.CustomerClientLink { + if m != nil { + return m.CustomerClientLink + } + return nil +} + +func (m *GoogleAdsRow) GetCustomerClient() *resources.CustomerClient { + if m != nil { + return m.CustomerClient + } + return nil +} + +func (m *GoogleAdsRow) GetCustomerExtensionSetting() *resources.CustomerExtensionSetting { + if m != nil { + return m.CustomerExtensionSetting + } + return nil +} + +func (m *GoogleAdsRow) GetCustomerFeed() *resources.CustomerFeed { + if m != nil { + return m.CustomerFeed + } + return nil +} + +func (m *GoogleAdsRow) GetCustomerLabel() *resources.CustomerLabel { + if m != nil { + return m.CustomerLabel + } + return nil +} + +func (m *GoogleAdsRow) GetCustomerNegativeCriterion() *resources.CustomerNegativeCriterion { + if m != nil { + return m.CustomerNegativeCriterion + } + return nil +} + +func (m *GoogleAdsRow) GetDetailPlacementView() *resources.DetailPlacementView { + if m != nil { + return m.DetailPlacementView + } + return nil +} + +func (m *GoogleAdsRow) GetDisplayKeywordView() *resources.DisplayKeywordView { + if m != nil { + return m.DisplayKeywordView + } + return nil +} + +func (m *GoogleAdsRow) GetDynamicSearchAdsSearchTermView() *resources.DynamicSearchAdsSearchTermView { + if m != nil { + return m.DynamicSearchAdsSearchTermView + } + return nil +} + +func (m *GoogleAdsRow) GetExpandedLandingPageView() *resources.ExpandedLandingPageView { + if m != nil { + return m.ExpandedLandingPageView + } + return nil +} + +func (m *GoogleAdsRow) GetExtensionFeedItem() *resources.ExtensionFeedItem { + if m != nil { + return m.ExtensionFeedItem + } + return nil +} + +func (m *GoogleAdsRow) GetFeed() *resources.Feed { + if m != nil { + return m.Feed + } + return nil +} + +func (m *GoogleAdsRow) GetFeedItem() *resources.FeedItem { + if m != nil { + return m.FeedItem + } + return nil +} + +func (m *GoogleAdsRow) GetFeedItemTarget() *resources.FeedItemTarget { + if m != nil { + return m.FeedItemTarget + } + return nil +} + +func (m *GoogleAdsRow) GetFeedMapping() *resources.FeedMapping { + if m != nil { + return m.FeedMapping + } + return nil +} + +func (m *GoogleAdsRow) GetFeedPlaceholderView() *resources.FeedPlaceholderView { + if m != nil { + return m.FeedPlaceholderView + } + return nil +} + +func (m *GoogleAdsRow) GetGenderView() *resources.GenderView { + if m != nil { + return m.GenderView + } + return nil +} + +func (m *GoogleAdsRow) GetGeoTargetConstant() *resources.GeoTargetConstant { + if m != nil { + return m.GeoTargetConstant + } + return nil +} + +func (m *GoogleAdsRow) GetGeographicView() *resources.GeographicView { + if m != nil { + return m.GeographicView + } + return nil +} + +func (m *GoogleAdsRow) GetGroupPlacementView() *resources.GroupPlacementView { + if m != nil { + return m.GroupPlacementView + } + return nil +} + +func (m *GoogleAdsRow) GetHotelGroupView() *resources.HotelGroupView { + if m != nil { + return m.HotelGroupView + } + return nil +} + +func (m *GoogleAdsRow) GetHotelPerformanceView() *resources.HotelPerformanceView { + if m != nil { + return m.HotelPerformanceView + } + return nil +} + +func (m *GoogleAdsRow) GetKeywordView() *resources.KeywordView { + if m != nil { + return m.KeywordView + } + return nil +} + +func (m *GoogleAdsRow) GetKeywordPlan() *resources.KeywordPlan { + if m != nil { + return m.KeywordPlan + } + return nil +} + +func (m *GoogleAdsRow) GetKeywordPlanCampaign() *resources.KeywordPlanCampaign { + if m != nil { + return m.KeywordPlanCampaign + } + return nil +} + +func (m *GoogleAdsRow) GetKeywordPlanNegativeKeyword() *resources.KeywordPlanNegativeKeyword { + if m != nil { + return m.KeywordPlanNegativeKeyword + } + return nil +} + +func (m *GoogleAdsRow) GetKeywordPlanAdGroup() *resources.KeywordPlanAdGroup { + if m != nil { + return m.KeywordPlanAdGroup + } + return nil +} + +func (m *GoogleAdsRow) GetKeywordPlanKeyword() *resources.KeywordPlanKeyword { + if m != nil { + return m.KeywordPlanKeyword + } + return nil +} + +func (m *GoogleAdsRow) GetLabel() *resources.Label { + if m != nil { + return m.Label + } + return nil +} + +func (m *GoogleAdsRow) GetLandingPageView() *resources.LandingPageView { + if m != nil { + return m.LandingPageView + } + return nil +} + +func (m *GoogleAdsRow) GetLanguageConstant() *resources.LanguageConstant { + if m != nil { + return m.LanguageConstant + } + return nil +} + +func (m *GoogleAdsRow) GetLocationView() *resources.LocationView { + if m != nil { + return m.LocationView + } + return nil +} + +func (m *GoogleAdsRow) GetManagedPlacementView() *resources.ManagedPlacementView { + if m != nil { + return m.ManagedPlacementView + } + return nil +} + +func (m *GoogleAdsRow) GetMediaFile() *resources.MediaFile { + if m != nil { + return m.MediaFile + } + return nil +} + +func (m *GoogleAdsRow) GetMobileAppCategoryConstant() *resources.MobileAppCategoryConstant { + if m != nil { + return m.MobileAppCategoryConstant + } + return nil +} + +func (m *GoogleAdsRow) GetMobileDeviceConstant() *resources.MobileDeviceConstant { + if m != nil { + return m.MobileDeviceConstant + } + return nil +} + +func (m *GoogleAdsRow) GetMutateJob() *resources.MutateJob { + if m != nil { + return m.MutateJob + } + return nil +} + +func (m *GoogleAdsRow) GetOperatingSystemVersionConstant() *resources.OperatingSystemVersionConstant { + if m != nil { + return m.OperatingSystemVersionConstant + } + return nil +} + +func (m *GoogleAdsRow) GetPaidOrganicSearchTermView() *resources.PaidOrganicSearchTermView { + if m != nil { + return m.PaidOrganicSearchTermView + } + return nil +} + +func (m *GoogleAdsRow) GetParentalStatusView() *resources.ParentalStatusView { + if m != nil { + return m.ParentalStatusView + } + return nil +} + +func (m *GoogleAdsRow) GetProductBiddingCategoryConstant() *resources.ProductBiddingCategoryConstant { + if m != nil { + return m.ProductBiddingCategoryConstant + } + return nil +} + +func (m *GoogleAdsRow) GetProductGroupView() *resources.ProductGroupView { + if m != nil { + return m.ProductGroupView + } + return nil +} + +func (m *GoogleAdsRow) GetRecommendation() *resources.Recommendation { + if m != nil { + return m.Recommendation + } + return nil +} + +func (m *GoogleAdsRow) GetSearchTermView() *resources.SearchTermView { + if m != nil { + return m.SearchTermView + } + return nil +} + +func (m *GoogleAdsRow) GetSharedCriterion() *resources.SharedCriterion { + if m != nil { + return m.SharedCriterion + } + return nil +} + +func (m *GoogleAdsRow) GetSharedSet() *resources.SharedSet { + if m != nil { + return m.SharedSet + } + return nil +} + +func (m *GoogleAdsRow) GetShoppingPerformanceView() *resources.ShoppingPerformanceView { + if m != nil { + return m.ShoppingPerformanceView + } + return nil +} + +func (m *GoogleAdsRow) GetTopicView() *resources.TopicView { + if m != nil { + return m.TopicView + } + return nil +} + +func (m *GoogleAdsRow) GetUserInterest() *resources.UserInterest { + if m != nil { + return m.UserInterest + } + return nil +} + +func (m *GoogleAdsRow) GetUserList() *resources.UserList { + if m != nil { + return m.UserList + } + return nil +} + +func (m *GoogleAdsRow) GetRemarketingAction() *resources.RemarketingAction { + if m != nil { + return m.RemarketingAction + } + return nil +} + +func (m *GoogleAdsRow) GetTopicConstant() *resources.TopicConstant { + if m != nil { + return m.TopicConstant + } + return nil +} + +func (m *GoogleAdsRow) GetVideo() *resources.Video { + if m != nil { + return m.Video + } + return nil +} + +func (m *GoogleAdsRow) GetMetrics() *common.Metrics { + if m != nil { + return m.Metrics + } + return nil +} + +func (m *GoogleAdsRow) GetSegments() *common.Segments { + if m != nil { + return m.Segments + } + return nil +} + +// Request message for [GoogleAdsService.Mutate][google.ads.googleads.v1.services.GoogleAdsService.Mutate]. +type MutateGoogleAdsRequest struct { + // The ID of the customer whose resources are being modified. + CustomerId string `protobuf:"bytes,1,opt,name=customer_id,json=customerId,proto3" json:"customer_id,omitempty"` + // The list of operations to perform on individual resources. + MutateOperations []*MutateOperation `protobuf:"bytes,2,rep,name=mutate_operations,json=mutateOperations,proto3" json:"mutate_operations,omitempty"` + // If true, successful operations will be carried out and invalid + // operations will return errors. If false, all operations will be carried + // out in one transaction if and only if they are all valid. + // Default is false. + PartialFailure bool `protobuf:"varint,3,opt,name=partial_failure,json=partialFailure,proto3" json:"partial_failure,omitempty"` + // If true, the request is validated but not executed. Only errors are + // returned, not results. + ValidateOnly bool `protobuf:"varint,4,opt,name=validate_only,json=validateOnly,proto3" json:"validate_only,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *MutateGoogleAdsRequest) Reset() { *m = MutateGoogleAdsRequest{} } +func (m *MutateGoogleAdsRequest) String() string { return proto.CompactTextString(m) } +func (*MutateGoogleAdsRequest) ProtoMessage() {} +func (*MutateGoogleAdsRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_google_ads_service_75d9a9baabc9a843, []int{3} +} +func (m *MutateGoogleAdsRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_MutateGoogleAdsRequest.Unmarshal(m, b) +} +func (m *MutateGoogleAdsRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_MutateGoogleAdsRequest.Marshal(b, m, deterministic) +} +func (dst *MutateGoogleAdsRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_MutateGoogleAdsRequest.Merge(dst, src) +} +func (m *MutateGoogleAdsRequest) XXX_Size() int { + return xxx_messageInfo_MutateGoogleAdsRequest.Size(m) +} +func (m *MutateGoogleAdsRequest) XXX_DiscardUnknown() { + xxx_messageInfo_MutateGoogleAdsRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_MutateGoogleAdsRequest proto.InternalMessageInfo + +func (m *MutateGoogleAdsRequest) GetCustomerId() string { + if m != nil { + return m.CustomerId + } + return "" +} + +func (m *MutateGoogleAdsRequest) GetMutateOperations() []*MutateOperation { + if m != nil { + return m.MutateOperations + } + return nil +} + +func (m *MutateGoogleAdsRequest) GetPartialFailure() bool { + if m != nil { + return m.PartialFailure + } + return false +} + +func (m *MutateGoogleAdsRequest) GetValidateOnly() bool { + if m != nil { + return m.ValidateOnly + } + return false +} + +// Response message for [GoogleAdsService.Mutate][google.ads.googleads.v1.services.GoogleAdsService.Mutate]. +type MutateGoogleAdsResponse struct { + // Errors that pertain to operation failures in the partial failure mode. + // Returned only when partial_failure = true and all errors occur inside the + // operations. If any errors occur outside the operations (e.g. auth errors), + // we return an RPC level error. + PartialFailureError *status.Status `protobuf:"bytes,3,opt,name=partial_failure_error,json=partialFailureError,proto3" json:"partial_failure_error,omitempty"` + // All responses for the mutate. + MutateOperationResponses []*MutateOperationResponse `protobuf:"bytes,1,rep,name=mutate_operation_responses,json=mutateOperationResponses,proto3" json:"mutate_operation_responses,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *MutateGoogleAdsResponse) Reset() { *m = MutateGoogleAdsResponse{} } +func (m *MutateGoogleAdsResponse) String() string { return proto.CompactTextString(m) } +func (*MutateGoogleAdsResponse) ProtoMessage() {} +func (*MutateGoogleAdsResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_google_ads_service_75d9a9baabc9a843, []int{4} +} +func (m *MutateGoogleAdsResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_MutateGoogleAdsResponse.Unmarshal(m, b) +} +func (m *MutateGoogleAdsResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_MutateGoogleAdsResponse.Marshal(b, m, deterministic) +} +func (dst *MutateGoogleAdsResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_MutateGoogleAdsResponse.Merge(dst, src) +} +func (m *MutateGoogleAdsResponse) XXX_Size() int { + return xxx_messageInfo_MutateGoogleAdsResponse.Size(m) +} +func (m *MutateGoogleAdsResponse) XXX_DiscardUnknown() { + xxx_messageInfo_MutateGoogleAdsResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_MutateGoogleAdsResponse proto.InternalMessageInfo + +func (m *MutateGoogleAdsResponse) GetPartialFailureError() *status.Status { + if m != nil { + return m.PartialFailureError + } + return nil +} + +func (m *MutateGoogleAdsResponse) GetMutateOperationResponses() []*MutateOperationResponse { + if m != nil { + return m.MutateOperationResponses + } + return nil +} + +// A single operation (create, update, remove) on a resource. +type MutateOperation struct { + // The mutate operation. + // + // Types that are valid to be assigned to Operation: + // *MutateOperation_AdGroupAdLabelOperation + // *MutateOperation_AdGroupAdOperation + // *MutateOperation_AdGroupBidModifierOperation + // *MutateOperation_AdGroupCriterionLabelOperation + // *MutateOperation_AdGroupCriterionOperation + // *MutateOperation_AdGroupExtensionSettingOperation + // *MutateOperation_AdGroupFeedOperation + // *MutateOperation_AdGroupLabelOperation + // *MutateOperation_AdGroupOperation + // *MutateOperation_AdParameterOperation + // *MutateOperation_AssetOperation + // *MutateOperation_BiddingStrategyOperation + // *MutateOperation_CampaignBidModifierOperation + // *MutateOperation_CampaignBudgetOperation + // *MutateOperation_CampaignCriterionOperation + // *MutateOperation_CampaignExtensionSettingOperation + // *MutateOperation_CampaignFeedOperation + // *MutateOperation_CampaignLabelOperation + // *MutateOperation_CampaignOperation + // *MutateOperation_CampaignSharedSetOperation + // *MutateOperation_ConversionActionOperation + // *MutateOperation_CustomerExtensionSettingOperation + // *MutateOperation_CustomerFeedOperation + // *MutateOperation_CustomerLabelOperation + // *MutateOperation_CustomerNegativeCriterionOperation + // *MutateOperation_CustomerOperation + // *MutateOperation_ExtensionFeedItemOperation + // *MutateOperation_FeedItemOperation + // *MutateOperation_FeedItemTargetOperation + // *MutateOperation_FeedMappingOperation + // *MutateOperation_FeedOperation + // *MutateOperation_LabelOperation + // *MutateOperation_MediaFileOperation + // *MutateOperation_RemarketingActionOperation + // *MutateOperation_SharedCriterionOperation + // *MutateOperation_SharedSetOperation + // *MutateOperation_UserListOperation + Operation isMutateOperation_Operation `protobuf_oneof:"operation"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *MutateOperation) Reset() { *m = MutateOperation{} } +func (m *MutateOperation) String() string { return proto.CompactTextString(m) } +func (*MutateOperation) ProtoMessage() {} +func (*MutateOperation) Descriptor() ([]byte, []int) { + return fileDescriptor_google_ads_service_75d9a9baabc9a843, []int{5} +} +func (m *MutateOperation) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_MutateOperation.Unmarshal(m, b) +} +func (m *MutateOperation) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_MutateOperation.Marshal(b, m, deterministic) +} +func (dst *MutateOperation) XXX_Merge(src proto.Message) { + xxx_messageInfo_MutateOperation.Merge(dst, src) +} +func (m *MutateOperation) XXX_Size() int { + return xxx_messageInfo_MutateOperation.Size(m) +} +func (m *MutateOperation) XXX_DiscardUnknown() { + xxx_messageInfo_MutateOperation.DiscardUnknown(m) +} + +var xxx_messageInfo_MutateOperation proto.InternalMessageInfo + +type isMutateOperation_Operation interface { + isMutateOperation_Operation() +} + +type MutateOperation_AdGroupAdLabelOperation struct { + AdGroupAdLabelOperation *AdGroupAdLabelOperation `protobuf:"bytes,17,opt,name=ad_group_ad_label_operation,json=adGroupAdLabelOperation,proto3,oneof"` +} + +type MutateOperation_AdGroupAdOperation struct { + AdGroupAdOperation *AdGroupAdOperation `protobuf:"bytes,1,opt,name=ad_group_ad_operation,json=adGroupAdOperation,proto3,oneof"` +} + +type MutateOperation_AdGroupBidModifierOperation struct { + AdGroupBidModifierOperation *AdGroupBidModifierOperation `protobuf:"bytes,2,opt,name=ad_group_bid_modifier_operation,json=adGroupBidModifierOperation,proto3,oneof"` +} + +type MutateOperation_AdGroupCriterionLabelOperation struct { + AdGroupCriterionLabelOperation *AdGroupCriterionLabelOperation `protobuf:"bytes,18,opt,name=ad_group_criterion_label_operation,json=adGroupCriterionLabelOperation,proto3,oneof"` +} + +type MutateOperation_AdGroupCriterionOperation struct { + AdGroupCriterionOperation *AdGroupCriterionOperation `protobuf:"bytes,3,opt,name=ad_group_criterion_operation,json=adGroupCriterionOperation,proto3,oneof"` +} + +type MutateOperation_AdGroupExtensionSettingOperation struct { + AdGroupExtensionSettingOperation *AdGroupExtensionSettingOperation `protobuf:"bytes,19,opt,name=ad_group_extension_setting_operation,json=adGroupExtensionSettingOperation,proto3,oneof"` +} + +type MutateOperation_AdGroupFeedOperation struct { + AdGroupFeedOperation *AdGroupFeedOperation `protobuf:"bytes,20,opt,name=ad_group_feed_operation,json=adGroupFeedOperation,proto3,oneof"` +} + +type MutateOperation_AdGroupLabelOperation struct { + AdGroupLabelOperation *AdGroupLabelOperation `protobuf:"bytes,21,opt,name=ad_group_label_operation,json=adGroupLabelOperation,proto3,oneof"` +} + +type MutateOperation_AdGroupOperation struct { + AdGroupOperation *AdGroupOperation `protobuf:"bytes,5,opt,name=ad_group_operation,json=adGroupOperation,proto3,oneof"` +} + +type MutateOperation_AdParameterOperation struct { + AdParameterOperation *AdParameterOperation `protobuf:"bytes,22,opt,name=ad_parameter_operation,json=adParameterOperation,proto3,oneof"` +} + +type MutateOperation_AssetOperation struct { + AssetOperation *AssetOperation `protobuf:"bytes,23,opt,name=asset_operation,json=assetOperation,proto3,oneof"` +} + +type MutateOperation_BiddingStrategyOperation struct { + BiddingStrategyOperation *BiddingStrategyOperation `protobuf:"bytes,6,opt,name=bidding_strategy_operation,json=biddingStrategyOperation,proto3,oneof"` +} + +type MutateOperation_CampaignBidModifierOperation struct { + CampaignBidModifierOperation *CampaignBidModifierOperation `protobuf:"bytes,7,opt,name=campaign_bid_modifier_operation,json=campaignBidModifierOperation,proto3,oneof"` +} + +type MutateOperation_CampaignBudgetOperation struct { + CampaignBudgetOperation *CampaignBudgetOperation `protobuf:"bytes,8,opt,name=campaign_budget_operation,json=campaignBudgetOperation,proto3,oneof"` +} + +type MutateOperation_CampaignCriterionOperation struct { + CampaignCriterionOperation *CampaignCriterionOperation `protobuf:"bytes,13,opt,name=campaign_criterion_operation,json=campaignCriterionOperation,proto3,oneof"` +} + +type MutateOperation_CampaignExtensionSettingOperation struct { + CampaignExtensionSettingOperation *CampaignExtensionSettingOperation `protobuf:"bytes,26,opt,name=campaign_extension_setting_operation,json=campaignExtensionSettingOperation,proto3,oneof"` +} + +type MutateOperation_CampaignFeedOperation struct { + CampaignFeedOperation *CampaignFeedOperation `protobuf:"bytes,27,opt,name=campaign_feed_operation,json=campaignFeedOperation,proto3,oneof"` +} + +type MutateOperation_CampaignLabelOperation struct { + CampaignLabelOperation *CampaignLabelOperation `protobuf:"bytes,28,opt,name=campaign_label_operation,json=campaignLabelOperation,proto3,oneof"` +} + +type MutateOperation_CampaignOperation struct { + CampaignOperation *CampaignOperation `protobuf:"bytes,10,opt,name=campaign_operation,json=campaignOperation,proto3,oneof"` +} + +type MutateOperation_CampaignSharedSetOperation struct { + CampaignSharedSetOperation *CampaignSharedSetOperation `protobuf:"bytes,11,opt,name=campaign_shared_set_operation,json=campaignSharedSetOperation,proto3,oneof"` +} + +type MutateOperation_ConversionActionOperation struct { + ConversionActionOperation *ConversionActionOperation `protobuf:"bytes,12,opt,name=conversion_action_operation,json=conversionActionOperation,proto3,oneof"` +} + +type MutateOperation_CustomerExtensionSettingOperation struct { + CustomerExtensionSettingOperation *CustomerExtensionSettingOperation `protobuf:"bytes,30,opt,name=customer_extension_setting_operation,json=customerExtensionSettingOperation,proto3,oneof"` +} + +type MutateOperation_CustomerFeedOperation struct { + CustomerFeedOperation *CustomerFeedOperation `protobuf:"bytes,31,opt,name=customer_feed_operation,json=customerFeedOperation,proto3,oneof"` +} + +type MutateOperation_CustomerLabelOperation struct { + CustomerLabelOperation *CustomerLabelOperation `protobuf:"bytes,32,opt,name=customer_label_operation,json=customerLabelOperation,proto3,oneof"` +} + +type MutateOperation_CustomerNegativeCriterionOperation struct { + CustomerNegativeCriterionOperation *CustomerNegativeCriterionOperation `protobuf:"bytes,34,opt,name=customer_negative_criterion_operation,json=customerNegativeCriterionOperation,proto3,oneof"` +} + +type MutateOperation_CustomerOperation struct { + CustomerOperation *CustomerOperation `protobuf:"bytes,35,opt,name=customer_operation,json=customerOperation,proto3,oneof"` +} + +type MutateOperation_ExtensionFeedItemOperation struct { + ExtensionFeedItemOperation *ExtensionFeedItemOperation `protobuf:"bytes,36,opt,name=extension_feed_item_operation,json=extensionFeedItemOperation,proto3,oneof"` +} + +type MutateOperation_FeedItemOperation struct { + FeedItemOperation *FeedItemOperation `protobuf:"bytes,37,opt,name=feed_item_operation,json=feedItemOperation,proto3,oneof"` +} + +type MutateOperation_FeedItemTargetOperation struct { + FeedItemTargetOperation *FeedItemTargetOperation `protobuf:"bytes,38,opt,name=feed_item_target_operation,json=feedItemTargetOperation,proto3,oneof"` +} + +type MutateOperation_FeedMappingOperation struct { + FeedMappingOperation *FeedMappingOperation `protobuf:"bytes,39,opt,name=feed_mapping_operation,json=feedMappingOperation,proto3,oneof"` +} + +type MutateOperation_FeedOperation struct { + FeedOperation *FeedOperation `protobuf:"bytes,40,opt,name=feed_operation,json=feedOperation,proto3,oneof"` +} + +type MutateOperation_LabelOperation struct { + LabelOperation *LabelOperation `protobuf:"bytes,41,opt,name=label_operation,json=labelOperation,proto3,oneof"` +} + +type MutateOperation_MediaFileOperation struct { + MediaFileOperation *MediaFileOperation `protobuf:"bytes,42,opt,name=media_file_operation,json=mediaFileOperation,proto3,oneof"` +} + +type MutateOperation_RemarketingActionOperation struct { + RemarketingActionOperation *RemarketingActionOperation `protobuf:"bytes,43,opt,name=remarketing_action_operation,json=remarketingActionOperation,proto3,oneof"` +} + +type MutateOperation_SharedCriterionOperation struct { + SharedCriterionOperation *SharedCriterionOperation `protobuf:"bytes,14,opt,name=shared_criterion_operation,json=sharedCriterionOperation,proto3,oneof"` +} + +type MutateOperation_SharedSetOperation struct { + SharedSetOperation *SharedSetOperation `protobuf:"bytes,15,opt,name=shared_set_operation,json=sharedSetOperation,proto3,oneof"` +} + +type MutateOperation_UserListOperation struct { + UserListOperation *UserListOperation `protobuf:"bytes,16,opt,name=user_list_operation,json=userListOperation,proto3,oneof"` +} + +func (*MutateOperation_AdGroupAdLabelOperation) isMutateOperation_Operation() {} + +func (*MutateOperation_AdGroupAdOperation) isMutateOperation_Operation() {} + +func (*MutateOperation_AdGroupBidModifierOperation) isMutateOperation_Operation() {} + +func (*MutateOperation_AdGroupCriterionLabelOperation) isMutateOperation_Operation() {} + +func (*MutateOperation_AdGroupCriterionOperation) isMutateOperation_Operation() {} + +func (*MutateOperation_AdGroupExtensionSettingOperation) isMutateOperation_Operation() {} + +func (*MutateOperation_AdGroupFeedOperation) isMutateOperation_Operation() {} + +func (*MutateOperation_AdGroupLabelOperation) isMutateOperation_Operation() {} + +func (*MutateOperation_AdGroupOperation) isMutateOperation_Operation() {} + +func (*MutateOperation_AdParameterOperation) isMutateOperation_Operation() {} + +func (*MutateOperation_AssetOperation) isMutateOperation_Operation() {} + +func (*MutateOperation_BiddingStrategyOperation) isMutateOperation_Operation() {} + +func (*MutateOperation_CampaignBidModifierOperation) isMutateOperation_Operation() {} + +func (*MutateOperation_CampaignBudgetOperation) isMutateOperation_Operation() {} + +func (*MutateOperation_CampaignCriterionOperation) isMutateOperation_Operation() {} + +func (*MutateOperation_CampaignExtensionSettingOperation) isMutateOperation_Operation() {} + +func (*MutateOperation_CampaignFeedOperation) isMutateOperation_Operation() {} + +func (*MutateOperation_CampaignLabelOperation) isMutateOperation_Operation() {} + +func (*MutateOperation_CampaignOperation) isMutateOperation_Operation() {} + +func (*MutateOperation_CampaignSharedSetOperation) isMutateOperation_Operation() {} + +func (*MutateOperation_ConversionActionOperation) isMutateOperation_Operation() {} + +func (*MutateOperation_CustomerExtensionSettingOperation) isMutateOperation_Operation() {} + +func (*MutateOperation_CustomerFeedOperation) isMutateOperation_Operation() {} + +func (*MutateOperation_CustomerLabelOperation) isMutateOperation_Operation() {} + +func (*MutateOperation_CustomerNegativeCriterionOperation) isMutateOperation_Operation() {} + +func (*MutateOperation_CustomerOperation) isMutateOperation_Operation() {} + +func (*MutateOperation_ExtensionFeedItemOperation) isMutateOperation_Operation() {} + +func (*MutateOperation_FeedItemOperation) isMutateOperation_Operation() {} + +func (*MutateOperation_FeedItemTargetOperation) isMutateOperation_Operation() {} + +func (*MutateOperation_FeedMappingOperation) isMutateOperation_Operation() {} + +func (*MutateOperation_FeedOperation) isMutateOperation_Operation() {} + +func (*MutateOperation_LabelOperation) isMutateOperation_Operation() {} + +func (*MutateOperation_MediaFileOperation) isMutateOperation_Operation() {} + +func (*MutateOperation_RemarketingActionOperation) isMutateOperation_Operation() {} + +func (*MutateOperation_SharedCriterionOperation) isMutateOperation_Operation() {} + +func (*MutateOperation_SharedSetOperation) isMutateOperation_Operation() {} + +func (*MutateOperation_UserListOperation) isMutateOperation_Operation() {} + +func (m *MutateOperation) GetOperation() isMutateOperation_Operation { + if m != nil { + return m.Operation + } + return nil +} + +func (m *MutateOperation) GetAdGroupAdLabelOperation() *AdGroupAdLabelOperation { + if x, ok := m.GetOperation().(*MutateOperation_AdGroupAdLabelOperation); ok { + return x.AdGroupAdLabelOperation + } + return nil +} + +func (m *MutateOperation) GetAdGroupAdOperation() *AdGroupAdOperation { + if x, ok := m.GetOperation().(*MutateOperation_AdGroupAdOperation); ok { + return x.AdGroupAdOperation + } + return nil +} + +func (m *MutateOperation) GetAdGroupBidModifierOperation() *AdGroupBidModifierOperation { + if x, ok := m.GetOperation().(*MutateOperation_AdGroupBidModifierOperation); ok { + return x.AdGroupBidModifierOperation + } + return nil +} + +func (m *MutateOperation) GetAdGroupCriterionLabelOperation() *AdGroupCriterionLabelOperation { + if x, ok := m.GetOperation().(*MutateOperation_AdGroupCriterionLabelOperation); ok { + return x.AdGroupCriterionLabelOperation + } + return nil +} + +func (m *MutateOperation) GetAdGroupCriterionOperation() *AdGroupCriterionOperation { + if x, ok := m.GetOperation().(*MutateOperation_AdGroupCriterionOperation); ok { + return x.AdGroupCriterionOperation + } + return nil +} + +func (m *MutateOperation) GetAdGroupExtensionSettingOperation() *AdGroupExtensionSettingOperation { + if x, ok := m.GetOperation().(*MutateOperation_AdGroupExtensionSettingOperation); ok { + return x.AdGroupExtensionSettingOperation + } + return nil +} + +func (m *MutateOperation) GetAdGroupFeedOperation() *AdGroupFeedOperation { + if x, ok := m.GetOperation().(*MutateOperation_AdGroupFeedOperation); ok { + return x.AdGroupFeedOperation + } + return nil +} + +func (m *MutateOperation) GetAdGroupLabelOperation() *AdGroupLabelOperation { + if x, ok := m.GetOperation().(*MutateOperation_AdGroupLabelOperation); ok { + return x.AdGroupLabelOperation + } + return nil +} + +func (m *MutateOperation) GetAdGroupOperation() *AdGroupOperation { + if x, ok := m.GetOperation().(*MutateOperation_AdGroupOperation); ok { + return x.AdGroupOperation + } + return nil +} + +func (m *MutateOperation) GetAdParameterOperation() *AdParameterOperation { + if x, ok := m.GetOperation().(*MutateOperation_AdParameterOperation); ok { + return x.AdParameterOperation + } + return nil +} + +func (m *MutateOperation) GetAssetOperation() *AssetOperation { + if x, ok := m.GetOperation().(*MutateOperation_AssetOperation); ok { + return x.AssetOperation + } + return nil +} + +func (m *MutateOperation) GetBiddingStrategyOperation() *BiddingStrategyOperation { + if x, ok := m.GetOperation().(*MutateOperation_BiddingStrategyOperation); ok { + return x.BiddingStrategyOperation + } + return nil +} + +func (m *MutateOperation) GetCampaignBidModifierOperation() *CampaignBidModifierOperation { + if x, ok := m.GetOperation().(*MutateOperation_CampaignBidModifierOperation); ok { + return x.CampaignBidModifierOperation + } + return nil +} + +func (m *MutateOperation) GetCampaignBudgetOperation() *CampaignBudgetOperation { + if x, ok := m.GetOperation().(*MutateOperation_CampaignBudgetOperation); ok { + return x.CampaignBudgetOperation + } + return nil +} + +func (m *MutateOperation) GetCampaignCriterionOperation() *CampaignCriterionOperation { + if x, ok := m.GetOperation().(*MutateOperation_CampaignCriterionOperation); ok { + return x.CampaignCriterionOperation + } + return nil +} + +func (m *MutateOperation) GetCampaignExtensionSettingOperation() *CampaignExtensionSettingOperation { + if x, ok := m.GetOperation().(*MutateOperation_CampaignExtensionSettingOperation); ok { + return x.CampaignExtensionSettingOperation + } + return nil +} + +func (m *MutateOperation) GetCampaignFeedOperation() *CampaignFeedOperation { + if x, ok := m.GetOperation().(*MutateOperation_CampaignFeedOperation); ok { + return x.CampaignFeedOperation + } + return nil +} + +func (m *MutateOperation) GetCampaignLabelOperation() *CampaignLabelOperation { + if x, ok := m.GetOperation().(*MutateOperation_CampaignLabelOperation); ok { + return x.CampaignLabelOperation + } + return nil +} + +func (m *MutateOperation) GetCampaignOperation() *CampaignOperation { + if x, ok := m.GetOperation().(*MutateOperation_CampaignOperation); ok { + return x.CampaignOperation + } + return nil +} + +func (m *MutateOperation) GetCampaignSharedSetOperation() *CampaignSharedSetOperation { + if x, ok := m.GetOperation().(*MutateOperation_CampaignSharedSetOperation); ok { + return x.CampaignSharedSetOperation + } + return nil +} + +func (m *MutateOperation) GetConversionActionOperation() *ConversionActionOperation { + if x, ok := m.GetOperation().(*MutateOperation_ConversionActionOperation); ok { + return x.ConversionActionOperation + } + return nil +} + +func (m *MutateOperation) GetCustomerExtensionSettingOperation() *CustomerExtensionSettingOperation { + if x, ok := m.GetOperation().(*MutateOperation_CustomerExtensionSettingOperation); ok { + return x.CustomerExtensionSettingOperation + } + return nil +} + +func (m *MutateOperation) GetCustomerFeedOperation() *CustomerFeedOperation { + if x, ok := m.GetOperation().(*MutateOperation_CustomerFeedOperation); ok { + return x.CustomerFeedOperation + } + return nil +} + +func (m *MutateOperation) GetCustomerLabelOperation() *CustomerLabelOperation { + if x, ok := m.GetOperation().(*MutateOperation_CustomerLabelOperation); ok { + return x.CustomerLabelOperation + } + return nil +} + +func (m *MutateOperation) GetCustomerNegativeCriterionOperation() *CustomerNegativeCriterionOperation { + if x, ok := m.GetOperation().(*MutateOperation_CustomerNegativeCriterionOperation); ok { + return x.CustomerNegativeCriterionOperation + } + return nil +} + +func (m *MutateOperation) GetCustomerOperation() *CustomerOperation { + if x, ok := m.GetOperation().(*MutateOperation_CustomerOperation); ok { + return x.CustomerOperation + } + return nil +} + +func (m *MutateOperation) GetExtensionFeedItemOperation() *ExtensionFeedItemOperation { + if x, ok := m.GetOperation().(*MutateOperation_ExtensionFeedItemOperation); ok { + return x.ExtensionFeedItemOperation + } + return nil +} + +func (m *MutateOperation) GetFeedItemOperation() *FeedItemOperation { + if x, ok := m.GetOperation().(*MutateOperation_FeedItemOperation); ok { + return x.FeedItemOperation + } + return nil +} + +func (m *MutateOperation) GetFeedItemTargetOperation() *FeedItemTargetOperation { + if x, ok := m.GetOperation().(*MutateOperation_FeedItemTargetOperation); ok { + return x.FeedItemTargetOperation + } + return nil +} + +func (m *MutateOperation) GetFeedMappingOperation() *FeedMappingOperation { + if x, ok := m.GetOperation().(*MutateOperation_FeedMappingOperation); ok { + return x.FeedMappingOperation + } + return nil +} + +func (m *MutateOperation) GetFeedOperation() *FeedOperation { + if x, ok := m.GetOperation().(*MutateOperation_FeedOperation); ok { + return x.FeedOperation + } + return nil +} + +func (m *MutateOperation) GetLabelOperation() *LabelOperation { + if x, ok := m.GetOperation().(*MutateOperation_LabelOperation); ok { + return x.LabelOperation + } + return nil +} + +func (m *MutateOperation) GetMediaFileOperation() *MediaFileOperation { + if x, ok := m.GetOperation().(*MutateOperation_MediaFileOperation); ok { + return x.MediaFileOperation + } + return nil +} + +func (m *MutateOperation) GetRemarketingActionOperation() *RemarketingActionOperation { + if x, ok := m.GetOperation().(*MutateOperation_RemarketingActionOperation); ok { + return x.RemarketingActionOperation + } + return nil +} + +func (m *MutateOperation) GetSharedCriterionOperation() *SharedCriterionOperation { + if x, ok := m.GetOperation().(*MutateOperation_SharedCriterionOperation); ok { + return x.SharedCriterionOperation + } + return nil +} + +func (m *MutateOperation) GetSharedSetOperation() *SharedSetOperation { + if x, ok := m.GetOperation().(*MutateOperation_SharedSetOperation); ok { + return x.SharedSetOperation + } + return nil +} + +func (m *MutateOperation) GetUserListOperation() *UserListOperation { + if x, ok := m.GetOperation().(*MutateOperation_UserListOperation); ok { + return x.UserListOperation + } + return nil +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*MutateOperation) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _MutateOperation_OneofMarshaler, _MutateOperation_OneofUnmarshaler, _MutateOperation_OneofSizer, []interface{}{ + (*MutateOperation_AdGroupAdLabelOperation)(nil), + (*MutateOperation_AdGroupAdOperation)(nil), + (*MutateOperation_AdGroupBidModifierOperation)(nil), + (*MutateOperation_AdGroupCriterionLabelOperation)(nil), + (*MutateOperation_AdGroupCriterionOperation)(nil), + (*MutateOperation_AdGroupExtensionSettingOperation)(nil), + (*MutateOperation_AdGroupFeedOperation)(nil), + (*MutateOperation_AdGroupLabelOperation)(nil), + (*MutateOperation_AdGroupOperation)(nil), + (*MutateOperation_AdParameterOperation)(nil), + (*MutateOperation_AssetOperation)(nil), + (*MutateOperation_BiddingStrategyOperation)(nil), + (*MutateOperation_CampaignBidModifierOperation)(nil), + (*MutateOperation_CampaignBudgetOperation)(nil), + (*MutateOperation_CampaignCriterionOperation)(nil), + (*MutateOperation_CampaignExtensionSettingOperation)(nil), + (*MutateOperation_CampaignFeedOperation)(nil), + (*MutateOperation_CampaignLabelOperation)(nil), + (*MutateOperation_CampaignOperation)(nil), + (*MutateOperation_CampaignSharedSetOperation)(nil), + (*MutateOperation_ConversionActionOperation)(nil), + (*MutateOperation_CustomerExtensionSettingOperation)(nil), + (*MutateOperation_CustomerFeedOperation)(nil), + (*MutateOperation_CustomerLabelOperation)(nil), + (*MutateOperation_CustomerNegativeCriterionOperation)(nil), + (*MutateOperation_CustomerOperation)(nil), + (*MutateOperation_ExtensionFeedItemOperation)(nil), + (*MutateOperation_FeedItemOperation)(nil), + (*MutateOperation_FeedItemTargetOperation)(nil), + (*MutateOperation_FeedMappingOperation)(nil), + (*MutateOperation_FeedOperation)(nil), + (*MutateOperation_LabelOperation)(nil), + (*MutateOperation_MediaFileOperation)(nil), + (*MutateOperation_RemarketingActionOperation)(nil), + (*MutateOperation_SharedCriterionOperation)(nil), + (*MutateOperation_SharedSetOperation)(nil), + (*MutateOperation_UserListOperation)(nil), + } +} + +func _MutateOperation_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*MutateOperation) + // operation + switch x := m.Operation.(type) { + case *MutateOperation_AdGroupAdLabelOperation: + b.EncodeVarint(17<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.AdGroupAdLabelOperation); err != nil { + return err + } + case *MutateOperation_AdGroupAdOperation: + b.EncodeVarint(1<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.AdGroupAdOperation); err != nil { + return err + } + case *MutateOperation_AdGroupBidModifierOperation: + b.EncodeVarint(2<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.AdGroupBidModifierOperation); err != nil { + return err + } + case *MutateOperation_AdGroupCriterionLabelOperation: + b.EncodeVarint(18<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.AdGroupCriterionLabelOperation); err != nil { + return err + } + case *MutateOperation_AdGroupCriterionOperation: + b.EncodeVarint(3<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.AdGroupCriterionOperation); err != nil { + return err + } + case *MutateOperation_AdGroupExtensionSettingOperation: + b.EncodeVarint(19<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.AdGroupExtensionSettingOperation); err != nil { + return err + } + case *MutateOperation_AdGroupFeedOperation: + b.EncodeVarint(20<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.AdGroupFeedOperation); err != nil { + return err + } + case *MutateOperation_AdGroupLabelOperation: + b.EncodeVarint(21<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.AdGroupLabelOperation); err != nil { + return err + } + case *MutateOperation_AdGroupOperation: + b.EncodeVarint(5<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.AdGroupOperation); err != nil { + return err + } + case *MutateOperation_AdParameterOperation: + b.EncodeVarint(22<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.AdParameterOperation); err != nil { + return err + } + case *MutateOperation_AssetOperation: + b.EncodeVarint(23<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.AssetOperation); err != nil { + return err + } + case *MutateOperation_BiddingStrategyOperation: + b.EncodeVarint(6<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.BiddingStrategyOperation); err != nil { + return err + } + case *MutateOperation_CampaignBidModifierOperation: + b.EncodeVarint(7<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.CampaignBidModifierOperation); err != nil { + return err + } + case *MutateOperation_CampaignBudgetOperation: + b.EncodeVarint(8<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.CampaignBudgetOperation); err != nil { + return err + } + case *MutateOperation_CampaignCriterionOperation: + b.EncodeVarint(13<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.CampaignCriterionOperation); err != nil { + return err + } + case *MutateOperation_CampaignExtensionSettingOperation: + b.EncodeVarint(26<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.CampaignExtensionSettingOperation); err != nil { + return err + } + case *MutateOperation_CampaignFeedOperation: + b.EncodeVarint(27<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.CampaignFeedOperation); err != nil { + return err + } + case *MutateOperation_CampaignLabelOperation: + b.EncodeVarint(28<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.CampaignLabelOperation); err != nil { + return err + } + case *MutateOperation_CampaignOperation: + b.EncodeVarint(10<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.CampaignOperation); err != nil { + return err + } + case *MutateOperation_CampaignSharedSetOperation: + b.EncodeVarint(11<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.CampaignSharedSetOperation); err != nil { + return err + } + case *MutateOperation_ConversionActionOperation: + b.EncodeVarint(12<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.ConversionActionOperation); err != nil { + return err + } + case *MutateOperation_CustomerExtensionSettingOperation: + b.EncodeVarint(30<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.CustomerExtensionSettingOperation); err != nil { + return err + } + case *MutateOperation_CustomerFeedOperation: + b.EncodeVarint(31<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.CustomerFeedOperation); err != nil { + return err + } + case *MutateOperation_CustomerLabelOperation: + b.EncodeVarint(32<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.CustomerLabelOperation); err != nil { + return err + } + case *MutateOperation_CustomerNegativeCriterionOperation: + b.EncodeVarint(34<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.CustomerNegativeCriterionOperation); err != nil { + return err + } + case *MutateOperation_CustomerOperation: + b.EncodeVarint(35<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.CustomerOperation); err != nil { + return err + } + case *MutateOperation_ExtensionFeedItemOperation: + b.EncodeVarint(36<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.ExtensionFeedItemOperation); err != nil { + return err + } + case *MutateOperation_FeedItemOperation: + b.EncodeVarint(37<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.FeedItemOperation); err != nil { + return err + } + case *MutateOperation_FeedItemTargetOperation: + b.EncodeVarint(38<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.FeedItemTargetOperation); err != nil { + return err + } + case *MutateOperation_FeedMappingOperation: + b.EncodeVarint(39<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.FeedMappingOperation); err != nil { + return err + } + case *MutateOperation_FeedOperation: + b.EncodeVarint(40<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.FeedOperation); err != nil { + return err + } + case *MutateOperation_LabelOperation: + b.EncodeVarint(41<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.LabelOperation); err != nil { + return err + } + case *MutateOperation_MediaFileOperation: + b.EncodeVarint(42<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.MediaFileOperation); err != nil { + return err + } + case *MutateOperation_RemarketingActionOperation: + b.EncodeVarint(43<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.RemarketingActionOperation); err != nil { + return err + } + case *MutateOperation_SharedCriterionOperation: + b.EncodeVarint(14<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.SharedCriterionOperation); err != nil { + return err + } + case *MutateOperation_SharedSetOperation: + b.EncodeVarint(15<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.SharedSetOperation); err != nil { + return err + } + case *MutateOperation_UserListOperation: + b.EncodeVarint(16<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.UserListOperation); err != nil { + return err + } + case nil: + default: + return fmt.Errorf("MutateOperation.Operation has unexpected type %T", x) + } + return nil +} + +func _MutateOperation_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*MutateOperation) + switch tag { + case 17: // operation.ad_group_ad_label_operation + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(AdGroupAdLabelOperation) + err := b.DecodeMessage(msg) + m.Operation = &MutateOperation_AdGroupAdLabelOperation{msg} + return true, err + case 1: // operation.ad_group_ad_operation + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(AdGroupAdOperation) + err := b.DecodeMessage(msg) + m.Operation = &MutateOperation_AdGroupAdOperation{msg} + return true, err + case 2: // operation.ad_group_bid_modifier_operation + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(AdGroupBidModifierOperation) + err := b.DecodeMessage(msg) + m.Operation = &MutateOperation_AdGroupBidModifierOperation{msg} + return true, err + case 18: // operation.ad_group_criterion_label_operation + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(AdGroupCriterionLabelOperation) + err := b.DecodeMessage(msg) + m.Operation = &MutateOperation_AdGroupCriterionLabelOperation{msg} + return true, err + case 3: // operation.ad_group_criterion_operation + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(AdGroupCriterionOperation) + err := b.DecodeMessage(msg) + m.Operation = &MutateOperation_AdGroupCriterionOperation{msg} + return true, err + case 19: // operation.ad_group_extension_setting_operation + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(AdGroupExtensionSettingOperation) + err := b.DecodeMessage(msg) + m.Operation = &MutateOperation_AdGroupExtensionSettingOperation{msg} + return true, err + case 20: // operation.ad_group_feed_operation + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(AdGroupFeedOperation) + err := b.DecodeMessage(msg) + m.Operation = &MutateOperation_AdGroupFeedOperation{msg} + return true, err + case 21: // operation.ad_group_label_operation + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(AdGroupLabelOperation) + err := b.DecodeMessage(msg) + m.Operation = &MutateOperation_AdGroupLabelOperation{msg} + return true, err + case 5: // operation.ad_group_operation + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(AdGroupOperation) + err := b.DecodeMessage(msg) + m.Operation = &MutateOperation_AdGroupOperation{msg} + return true, err + case 22: // operation.ad_parameter_operation + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(AdParameterOperation) + err := b.DecodeMessage(msg) + m.Operation = &MutateOperation_AdParameterOperation{msg} + return true, err + case 23: // operation.asset_operation + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(AssetOperation) + err := b.DecodeMessage(msg) + m.Operation = &MutateOperation_AssetOperation{msg} + return true, err + case 6: // operation.bidding_strategy_operation + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(BiddingStrategyOperation) + err := b.DecodeMessage(msg) + m.Operation = &MutateOperation_BiddingStrategyOperation{msg} + return true, err + case 7: // operation.campaign_bid_modifier_operation + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(CampaignBidModifierOperation) + err := b.DecodeMessage(msg) + m.Operation = &MutateOperation_CampaignBidModifierOperation{msg} + return true, err + case 8: // operation.campaign_budget_operation + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(CampaignBudgetOperation) + err := b.DecodeMessage(msg) + m.Operation = &MutateOperation_CampaignBudgetOperation{msg} + return true, err + case 13: // operation.campaign_criterion_operation + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(CampaignCriterionOperation) + err := b.DecodeMessage(msg) + m.Operation = &MutateOperation_CampaignCriterionOperation{msg} + return true, err + case 26: // operation.campaign_extension_setting_operation + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(CampaignExtensionSettingOperation) + err := b.DecodeMessage(msg) + m.Operation = &MutateOperation_CampaignExtensionSettingOperation{msg} + return true, err + case 27: // operation.campaign_feed_operation + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(CampaignFeedOperation) + err := b.DecodeMessage(msg) + m.Operation = &MutateOperation_CampaignFeedOperation{msg} + return true, err + case 28: // operation.campaign_label_operation + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(CampaignLabelOperation) + err := b.DecodeMessage(msg) + m.Operation = &MutateOperation_CampaignLabelOperation{msg} + return true, err + case 10: // operation.campaign_operation + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(CampaignOperation) + err := b.DecodeMessage(msg) + m.Operation = &MutateOperation_CampaignOperation{msg} + return true, err + case 11: // operation.campaign_shared_set_operation + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(CampaignSharedSetOperation) + err := b.DecodeMessage(msg) + m.Operation = &MutateOperation_CampaignSharedSetOperation{msg} + return true, err + case 12: // operation.conversion_action_operation + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(ConversionActionOperation) + err := b.DecodeMessage(msg) + m.Operation = &MutateOperation_ConversionActionOperation{msg} + return true, err + case 30: // operation.customer_extension_setting_operation + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(CustomerExtensionSettingOperation) + err := b.DecodeMessage(msg) + m.Operation = &MutateOperation_CustomerExtensionSettingOperation{msg} + return true, err + case 31: // operation.customer_feed_operation + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(CustomerFeedOperation) + err := b.DecodeMessage(msg) + m.Operation = &MutateOperation_CustomerFeedOperation{msg} + return true, err + case 32: // operation.customer_label_operation + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(CustomerLabelOperation) + err := b.DecodeMessage(msg) + m.Operation = &MutateOperation_CustomerLabelOperation{msg} + return true, err + case 34: // operation.customer_negative_criterion_operation + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(CustomerNegativeCriterionOperation) + err := b.DecodeMessage(msg) + m.Operation = &MutateOperation_CustomerNegativeCriterionOperation{msg} + return true, err + case 35: // operation.customer_operation + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(CustomerOperation) + err := b.DecodeMessage(msg) + m.Operation = &MutateOperation_CustomerOperation{msg} + return true, err + case 36: // operation.extension_feed_item_operation + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(ExtensionFeedItemOperation) + err := b.DecodeMessage(msg) + m.Operation = &MutateOperation_ExtensionFeedItemOperation{msg} + return true, err + case 37: // operation.feed_item_operation + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(FeedItemOperation) + err := b.DecodeMessage(msg) + m.Operation = &MutateOperation_FeedItemOperation{msg} + return true, err + case 38: // operation.feed_item_target_operation + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(FeedItemTargetOperation) + err := b.DecodeMessage(msg) + m.Operation = &MutateOperation_FeedItemTargetOperation{msg} + return true, err + case 39: // operation.feed_mapping_operation + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(FeedMappingOperation) + err := b.DecodeMessage(msg) + m.Operation = &MutateOperation_FeedMappingOperation{msg} + return true, err + case 40: // operation.feed_operation + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(FeedOperation) + err := b.DecodeMessage(msg) + m.Operation = &MutateOperation_FeedOperation{msg} + return true, err + case 41: // operation.label_operation + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(LabelOperation) + err := b.DecodeMessage(msg) + m.Operation = &MutateOperation_LabelOperation{msg} + return true, err + case 42: // operation.media_file_operation + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(MediaFileOperation) + err := b.DecodeMessage(msg) + m.Operation = &MutateOperation_MediaFileOperation{msg} + return true, err + case 43: // operation.remarketing_action_operation + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(RemarketingActionOperation) + err := b.DecodeMessage(msg) + m.Operation = &MutateOperation_RemarketingActionOperation{msg} + return true, err + case 14: // operation.shared_criterion_operation + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(SharedCriterionOperation) + err := b.DecodeMessage(msg) + m.Operation = &MutateOperation_SharedCriterionOperation{msg} + return true, err + case 15: // operation.shared_set_operation + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(SharedSetOperation) + err := b.DecodeMessage(msg) + m.Operation = &MutateOperation_SharedSetOperation{msg} + return true, err + case 16: // operation.user_list_operation + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(UserListOperation) + err := b.DecodeMessage(msg) + m.Operation = &MutateOperation_UserListOperation{msg} + return true, err + default: + return false, nil + } +} + +func _MutateOperation_OneofSizer(msg proto.Message) (n int) { + m := msg.(*MutateOperation) + // operation + switch x := m.Operation.(type) { + case *MutateOperation_AdGroupAdLabelOperation: + s := proto.Size(x.AdGroupAdLabelOperation) + n += 2 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *MutateOperation_AdGroupAdOperation: + s := proto.Size(x.AdGroupAdOperation) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *MutateOperation_AdGroupBidModifierOperation: + s := proto.Size(x.AdGroupBidModifierOperation) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *MutateOperation_AdGroupCriterionLabelOperation: + s := proto.Size(x.AdGroupCriterionLabelOperation) + n += 2 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *MutateOperation_AdGroupCriterionOperation: + s := proto.Size(x.AdGroupCriterionOperation) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *MutateOperation_AdGroupExtensionSettingOperation: + s := proto.Size(x.AdGroupExtensionSettingOperation) + n += 2 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *MutateOperation_AdGroupFeedOperation: + s := proto.Size(x.AdGroupFeedOperation) + n += 2 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *MutateOperation_AdGroupLabelOperation: + s := proto.Size(x.AdGroupLabelOperation) + n += 2 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *MutateOperation_AdGroupOperation: + s := proto.Size(x.AdGroupOperation) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *MutateOperation_AdParameterOperation: + s := proto.Size(x.AdParameterOperation) + n += 2 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *MutateOperation_AssetOperation: + s := proto.Size(x.AssetOperation) + n += 2 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *MutateOperation_BiddingStrategyOperation: + s := proto.Size(x.BiddingStrategyOperation) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *MutateOperation_CampaignBidModifierOperation: + s := proto.Size(x.CampaignBidModifierOperation) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *MutateOperation_CampaignBudgetOperation: + s := proto.Size(x.CampaignBudgetOperation) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *MutateOperation_CampaignCriterionOperation: + s := proto.Size(x.CampaignCriterionOperation) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *MutateOperation_CampaignExtensionSettingOperation: + s := proto.Size(x.CampaignExtensionSettingOperation) + n += 2 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *MutateOperation_CampaignFeedOperation: + s := proto.Size(x.CampaignFeedOperation) + n += 2 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *MutateOperation_CampaignLabelOperation: + s := proto.Size(x.CampaignLabelOperation) + n += 2 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *MutateOperation_CampaignOperation: + s := proto.Size(x.CampaignOperation) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *MutateOperation_CampaignSharedSetOperation: + s := proto.Size(x.CampaignSharedSetOperation) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *MutateOperation_ConversionActionOperation: + s := proto.Size(x.ConversionActionOperation) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *MutateOperation_CustomerExtensionSettingOperation: + s := proto.Size(x.CustomerExtensionSettingOperation) + n += 2 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *MutateOperation_CustomerFeedOperation: + s := proto.Size(x.CustomerFeedOperation) + n += 2 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *MutateOperation_CustomerLabelOperation: + s := proto.Size(x.CustomerLabelOperation) + n += 2 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *MutateOperation_CustomerNegativeCriterionOperation: + s := proto.Size(x.CustomerNegativeCriterionOperation) + n += 2 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *MutateOperation_CustomerOperation: + s := proto.Size(x.CustomerOperation) + n += 2 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *MutateOperation_ExtensionFeedItemOperation: + s := proto.Size(x.ExtensionFeedItemOperation) + n += 2 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *MutateOperation_FeedItemOperation: + s := proto.Size(x.FeedItemOperation) + n += 2 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *MutateOperation_FeedItemTargetOperation: + s := proto.Size(x.FeedItemTargetOperation) + n += 2 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *MutateOperation_FeedMappingOperation: + s := proto.Size(x.FeedMappingOperation) + n += 2 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *MutateOperation_FeedOperation: + s := proto.Size(x.FeedOperation) + n += 2 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *MutateOperation_LabelOperation: + s := proto.Size(x.LabelOperation) + n += 2 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *MutateOperation_MediaFileOperation: + s := proto.Size(x.MediaFileOperation) + n += 2 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *MutateOperation_RemarketingActionOperation: + s := proto.Size(x.RemarketingActionOperation) + n += 2 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *MutateOperation_SharedCriterionOperation: + s := proto.Size(x.SharedCriterionOperation) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *MutateOperation_SharedSetOperation: + s := proto.Size(x.SharedSetOperation) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *MutateOperation_UserListOperation: + s := proto.Size(x.UserListOperation) + n += 2 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +// Response message for the resource mutate. +type MutateOperationResponse struct { + // The mutate response + // + // Types that are valid to be assigned to Response: + // *MutateOperationResponse_AdGroupAdLabelResult + // *MutateOperationResponse_AdGroupAdResult + // *MutateOperationResponse_AdGroupBidModifierResult + // *MutateOperationResponse_AdGroupCriterionLabelResult + // *MutateOperationResponse_AdGroupCriterionResult + // *MutateOperationResponse_AdGroupExtensionSettingResult + // *MutateOperationResponse_AdGroupFeedResult + // *MutateOperationResponse_AdGroupLabelResult + // *MutateOperationResponse_AdGroupResult + // *MutateOperationResponse_AdParameterResult + // *MutateOperationResponse_AssetResult + // *MutateOperationResponse_BiddingStrategyResult + // *MutateOperationResponse_CampaignBidModifierResult + // *MutateOperationResponse_CampaignBudgetResult + // *MutateOperationResponse_CampaignCriterionResult + // *MutateOperationResponse_CampaignExtensionSettingResult + // *MutateOperationResponse_CampaignFeedResult + // *MutateOperationResponse_CampaignLabelResult + // *MutateOperationResponse_CampaignResult + // *MutateOperationResponse_CampaignSharedSetResult + // *MutateOperationResponse_ConversionActionResult + // *MutateOperationResponse_CustomerExtensionSettingResult + // *MutateOperationResponse_CustomerFeedResult + // *MutateOperationResponse_CustomerLabelResult + // *MutateOperationResponse_CustomerNegativeCriterionResult + // *MutateOperationResponse_CustomerResult + // *MutateOperationResponse_ExtensionFeedItemResult + // *MutateOperationResponse_FeedItemResult + // *MutateOperationResponse_FeedItemTargetResult + // *MutateOperationResponse_FeedMappingResult + // *MutateOperationResponse_FeedResult + // *MutateOperationResponse_LabelResult + // *MutateOperationResponse_MediaFileResult + // *MutateOperationResponse_RemarketingActionResult + // *MutateOperationResponse_SharedCriterionResult + // *MutateOperationResponse_SharedSetResult + // *MutateOperationResponse_UserListResult + Response isMutateOperationResponse_Response `protobuf_oneof:"response"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *MutateOperationResponse) Reset() { *m = MutateOperationResponse{} } +func (m *MutateOperationResponse) String() string { return proto.CompactTextString(m) } +func (*MutateOperationResponse) ProtoMessage() {} +func (*MutateOperationResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_google_ads_service_75d9a9baabc9a843, []int{6} +} +func (m *MutateOperationResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_MutateOperationResponse.Unmarshal(m, b) +} +func (m *MutateOperationResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_MutateOperationResponse.Marshal(b, m, deterministic) +} +func (dst *MutateOperationResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_MutateOperationResponse.Merge(dst, src) +} +func (m *MutateOperationResponse) XXX_Size() int { + return xxx_messageInfo_MutateOperationResponse.Size(m) +} +func (m *MutateOperationResponse) XXX_DiscardUnknown() { + xxx_messageInfo_MutateOperationResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_MutateOperationResponse proto.InternalMessageInfo + +type isMutateOperationResponse_Response interface { + isMutateOperationResponse_Response() +} + +type MutateOperationResponse_AdGroupAdLabelResult struct { + AdGroupAdLabelResult *MutateAdGroupAdLabelResult `protobuf:"bytes,17,opt,name=ad_group_ad_label_result,json=adGroupAdLabelResult,proto3,oneof"` +} + +type MutateOperationResponse_AdGroupAdResult struct { + AdGroupAdResult *MutateAdGroupAdResult `protobuf:"bytes,1,opt,name=ad_group_ad_result,json=adGroupAdResult,proto3,oneof"` +} + +type MutateOperationResponse_AdGroupBidModifierResult struct { + AdGroupBidModifierResult *MutateAdGroupBidModifierResult `protobuf:"bytes,2,opt,name=ad_group_bid_modifier_result,json=adGroupBidModifierResult,proto3,oneof"` +} + +type MutateOperationResponse_AdGroupCriterionLabelResult struct { + AdGroupCriterionLabelResult *MutateAdGroupCriterionLabelResult `protobuf:"bytes,18,opt,name=ad_group_criterion_label_result,json=adGroupCriterionLabelResult,proto3,oneof"` +} + +type MutateOperationResponse_AdGroupCriterionResult struct { + AdGroupCriterionResult *MutateAdGroupCriterionResult `protobuf:"bytes,3,opt,name=ad_group_criterion_result,json=adGroupCriterionResult,proto3,oneof"` +} + +type MutateOperationResponse_AdGroupExtensionSettingResult struct { + AdGroupExtensionSettingResult *MutateAdGroupExtensionSettingResult `protobuf:"bytes,19,opt,name=ad_group_extension_setting_result,json=adGroupExtensionSettingResult,proto3,oneof"` +} + +type MutateOperationResponse_AdGroupFeedResult struct { + AdGroupFeedResult *MutateAdGroupFeedResult `protobuf:"bytes,20,opt,name=ad_group_feed_result,json=adGroupFeedResult,proto3,oneof"` +} + +type MutateOperationResponse_AdGroupLabelResult struct { + AdGroupLabelResult *MutateAdGroupLabelResult `protobuf:"bytes,21,opt,name=ad_group_label_result,json=adGroupLabelResult,proto3,oneof"` +} + +type MutateOperationResponse_AdGroupResult struct { + AdGroupResult *MutateAdGroupResult `protobuf:"bytes,5,opt,name=ad_group_result,json=adGroupResult,proto3,oneof"` +} + +type MutateOperationResponse_AdParameterResult struct { + AdParameterResult *MutateAdParameterResult `protobuf:"bytes,22,opt,name=ad_parameter_result,json=adParameterResult,proto3,oneof"` +} + +type MutateOperationResponse_AssetResult struct { + AssetResult *MutateAssetResult `protobuf:"bytes,23,opt,name=asset_result,json=assetResult,proto3,oneof"` +} + +type MutateOperationResponse_BiddingStrategyResult struct { + BiddingStrategyResult *MutateBiddingStrategyResult `protobuf:"bytes,6,opt,name=bidding_strategy_result,json=biddingStrategyResult,proto3,oneof"` +} + +type MutateOperationResponse_CampaignBidModifierResult struct { + CampaignBidModifierResult *MutateCampaignBidModifierResult `protobuf:"bytes,7,opt,name=campaign_bid_modifier_result,json=campaignBidModifierResult,proto3,oneof"` +} + +type MutateOperationResponse_CampaignBudgetResult struct { + CampaignBudgetResult *MutateCampaignBudgetResult `protobuf:"bytes,8,opt,name=campaign_budget_result,json=campaignBudgetResult,proto3,oneof"` +} + +type MutateOperationResponse_CampaignCriterionResult struct { + CampaignCriterionResult *MutateCampaignCriterionResult `protobuf:"bytes,13,opt,name=campaign_criterion_result,json=campaignCriterionResult,proto3,oneof"` +} + +type MutateOperationResponse_CampaignExtensionSettingResult struct { + CampaignExtensionSettingResult *MutateCampaignExtensionSettingResult `protobuf:"bytes,26,opt,name=campaign_extension_setting_result,json=campaignExtensionSettingResult,proto3,oneof"` +} + +type MutateOperationResponse_CampaignFeedResult struct { + CampaignFeedResult *MutateCampaignFeedResult `protobuf:"bytes,27,opt,name=campaign_feed_result,json=campaignFeedResult,proto3,oneof"` +} + +type MutateOperationResponse_CampaignLabelResult struct { + CampaignLabelResult *MutateCampaignLabelResult `protobuf:"bytes,28,opt,name=campaign_label_result,json=campaignLabelResult,proto3,oneof"` +} + +type MutateOperationResponse_CampaignResult struct { + CampaignResult *MutateCampaignResult `protobuf:"bytes,10,opt,name=campaign_result,json=campaignResult,proto3,oneof"` +} + +type MutateOperationResponse_CampaignSharedSetResult struct { + CampaignSharedSetResult *MutateCampaignSharedSetResult `protobuf:"bytes,11,opt,name=campaign_shared_set_result,json=campaignSharedSetResult,proto3,oneof"` +} + +type MutateOperationResponse_ConversionActionResult struct { + ConversionActionResult *MutateConversionActionResult `protobuf:"bytes,12,opt,name=conversion_action_result,json=conversionActionResult,proto3,oneof"` +} + +type MutateOperationResponse_CustomerExtensionSettingResult struct { + CustomerExtensionSettingResult *MutateCustomerExtensionSettingResult `protobuf:"bytes,30,opt,name=customer_extension_setting_result,json=customerExtensionSettingResult,proto3,oneof"` +} + +type MutateOperationResponse_CustomerFeedResult struct { + CustomerFeedResult *MutateCustomerFeedResult `protobuf:"bytes,31,opt,name=customer_feed_result,json=customerFeedResult,proto3,oneof"` +} + +type MutateOperationResponse_CustomerLabelResult struct { + CustomerLabelResult *MutateCustomerLabelResult `protobuf:"bytes,32,opt,name=customer_label_result,json=customerLabelResult,proto3,oneof"` +} + +type MutateOperationResponse_CustomerNegativeCriterionResult struct { + CustomerNegativeCriterionResult *MutateCustomerNegativeCriteriaResult `protobuf:"bytes,34,opt,name=customer_negative_criterion_result,json=customerNegativeCriterionResult,proto3,oneof"` +} + +type MutateOperationResponse_CustomerResult struct { + CustomerResult *MutateCustomerResult `protobuf:"bytes,35,opt,name=customer_result,json=customerResult,proto3,oneof"` +} + +type MutateOperationResponse_ExtensionFeedItemResult struct { + ExtensionFeedItemResult *MutateExtensionFeedItemResult `protobuf:"bytes,36,opt,name=extension_feed_item_result,json=extensionFeedItemResult,proto3,oneof"` +} + +type MutateOperationResponse_FeedItemResult struct { + FeedItemResult *MutateFeedItemResult `protobuf:"bytes,37,opt,name=feed_item_result,json=feedItemResult,proto3,oneof"` +} + +type MutateOperationResponse_FeedItemTargetResult struct { + FeedItemTargetResult *MutateFeedItemTargetResult `protobuf:"bytes,38,opt,name=feed_item_target_result,json=feedItemTargetResult,proto3,oneof"` +} + +type MutateOperationResponse_FeedMappingResult struct { + FeedMappingResult *MutateFeedMappingResult `protobuf:"bytes,39,opt,name=feed_mapping_result,json=feedMappingResult,proto3,oneof"` +} + +type MutateOperationResponse_FeedResult struct { + FeedResult *MutateFeedResult `protobuf:"bytes,40,opt,name=feed_result,json=feedResult,proto3,oneof"` +} + +type MutateOperationResponse_LabelResult struct { + LabelResult *MutateLabelResult `protobuf:"bytes,41,opt,name=label_result,json=labelResult,proto3,oneof"` +} + +type MutateOperationResponse_MediaFileResult struct { + MediaFileResult *MutateMediaFileResult `protobuf:"bytes,42,opt,name=media_file_result,json=mediaFileResult,proto3,oneof"` +} + +type MutateOperationResponse_RemarketingActionResult struct { + RemarketingActionResult *MutateRemarketingActionResult `protobuf:"bytes,43,opt,name=remarketing_action_result,json=remarketingActionResult,proto3,oneof"` +} + +type MutateOperationResponse_SharedCriterionResult struct { + SharedCriterionResult *MutateSharedCriterionResult `protobuf:"bytes,14,opt,name=shared_criterion_result,json=sharedCriterionResult,proto3,oneof"` +} + +type MutateOperationResponse_SharedSetResult struct { + SharedSetResult *MutateSharedSetResult `protobuf:"bytes,15,opt,name=shared_set_result,json=sharedSetResult,proto3,oneof"` +} + +type MutateOperationResponse_UserListResult struct { + UserListResult *MutateUserListResult `protobuf:"bytes,16,opt,name=user_list_result,json=userListResult,proto3,oneof"` +} + +func (*MutateOperationResponse_AdGroupAdLabelResult) isMutateOperationResponse_Response() {} + +func (*MutateOperationResponse_AdGroupAdResult) isMutateOperationResponse_Response() {} + +func (*MutateOperationResponse_AdGroupBidModifierResult) isMutateOperationResponse_Response() {} + +func (*MutateOperationResponse_AdGroupCriterionLabelResult) isMutateOperationResponse_Response() {} + +func (*MutateOperationResponse_AdGroupCriterionResult) isMutateOperationResponse_Response() {} + +func (*MutateOperationResponse_AdGroupExtensionSettingResult) isMutateOperationResponse_Response() {} + +func (*MutateOperationResponse_AdGroupFeedResult) isMutateOperationResponse_Response() {} + +func (*MutateOperationResponse_AdGroupLabelResult) isMutateOperationResponse_Response() {} + +func (*MutateOperationResponse_AdGroupResult) isMutateOperationResponse_Response() {} + +func (*MutateOperationResponse_AdParameterResult) isMutateOperationResponse_Response() {} + +func (*MutateOperationResponse_AssetResult) isMutateOperationResponse_Response() {} + +func (*MutateOperationResponse_BiddingStrategyResult) isMutateOperationResponse_Response() {} + +func (*MutateOperationResponse_CampaignBidModifierResult) isMutateOperationResponse_Response() {} + +func (*MutateOperationResponse_CampaignBudgetResult) isMutateOperationResponse_Response() {} + +func (*MutateOperationResponse_CampaignCriterionResult) isMutateOperationResponse_Response() {} + +func (*MutateOperationResponse_CampaignExtensionSettingResult) isMutateOperationResponse_Response() {} + +func (*MutateOperationResponse_CampaignFeedResult) isMutateOperationResponse_Response() {} + +func (*MutateOperationResponse_CampaignLabelResult) isMutateOperationResponse_Response() {} + +func (*MutateOperationResponse_CampaignResult) isMutateOperationResponse_Response() {} + +func (*MutateOperationResponse_CampaignSharedSetResult) isMutateOperationResponse_Response() {} + +func (*MutateOperationResponse_ConversionActionResult) isMutateOperationResponse_Response() {} + +func (*MutateOperationResponse_CustomerExtensionSettingResult) isMutateOperationResponse_Response() {} + +func (*MutateOperationResponse_CustomerFeedResult) isMutateOperationResponse_Response() {} + +func (*MutateOperationResponse_CustomerLabelResult) isMutateOperationResponse_Response() {} + +func (*MutateOperationResponse_CustomerNegativeCriterionResult) isMutateOperationResponse_Response() {} + +func (*MutateOperationResponse_CustomerResult) isMutateOperationResponse_Response() {} + +func (*MutateOperationResponse_ExtensionFeedItemResult) isMutateOperationResponse_Response() {} + +func (*MutateOperationResponse_FeedItemResult) isMutateOperationResponse_Response() {} + +func (*MutateOperationResponse_FeedItemTargetResult) isMutateOperationResponse_Response() {} + +func (*MutateOperationResponse_FeedMappingResult) isMutateOperationResponse_Response() {} + +func (*MutateOperationResponse_FeedResult) isMutateOperationResponse_Response() {} + +func (*MutateOperationResponse_LabelResult) isMutateOperationResponse_Response() {} + +func (*MutateOperationResponse_MediaFileResult) isMutateOperationResponse_Response() {} + +func (*MutateOperationResponse_RemarketingActionResult) isMutateOperationResponse_Response() {} + +func (*MutateOperationResponse_SharedCriterionResult) isMutateOperationResponse_Response() {} + +func (*MutateOperationResponse_SharedSetResult) isMutateOperationResponse_Response() {} + +func (*MutateOperationResponse_UserListResult) isMutateOperationResponse_Response() {} + +func (m *MutateOperationResponse) GetResponse() isMutateOperationResponse_Response { + if m != nil { + return m.Response + } + return nil +} + +func (m *MutateOperationResponse) GetAdGroupAdLabelResult() *MutateAdGroupAdLabelResult { + if x, ok := m.GetResponse().(*MutateOperationResponse_AdGroupAdLabelResult); ok { + return x.AdGroupAdLabelResult + } + return nil +} + +func (m *MutateOperationResponse) GetAdGroupAdResult() *MutateAdGroupAdResult { + if x, ok := m.GetResponse().(*MutateOperationResponse_AdGroupAdResult); ok { + return x.AdGroupAdResult + } + return nil +} + +func (m *MutateOperationResponse) GetAdGroupBidModifierResult() *MutateAdGroupBidModifierResult { + if x, ok := m.GetResponse().(*MutateOperationResponse_AdGroupBidModifierResult); ok { + return x.AdGroupBidModifierResult + } + return nil +} + +func (m *MutateOperationResponse) GetAdGroupCriterionLabelResult() *MutateAdGroupCriterionLabelResult { + if x, ok := m.GetResponse().(*MutateOperationResponse_AdGroupCriterionLabelResult); ok { + return x.AdGroupCriterionLabelResult + } + return nil +} + +func (m *MutateOperationResponse) GetAdGroupCriterionResult() *MutateAdGroupCriterionResult { + if x, ok := m.GetResponse().(*MutateOperationResponse_AdGroupCriterionResult); ok { + return x.AdGroupCriterionResult + } + return nil +} + +func (m *MutateOperationResponse) GetAdGroupExtensionSettingResult() *MutateAdGroupExtensionSettingResult { + if x, ok := m.GetResponse().(*MutateOperationResponse_AdGroupExtensionSettingResult); ok { + return x.AdGroupExtensionSettingResult + } + return nil +} + +func (m *MutateOperationResponse) GetAdGroupFeedResult() *MutateAdGroupFeedResult { + if x, ok := m.GetResponse().(*MutateOperationResponse_AdGroupFeedResult); ok { + return x.AdGroupFeedResult + } + return nil +} + +func (m *MutateOperationResponse) GetAdGroupLabelResult() *MutateAdGroupLabelResult { + if x, ok := m.GetResponse().(*MutateOperationResponse_AdGroupLabelResult); ok { + return x.AdGroupLabelResult + } + return nil +} + +func (m *MutateOperationResponse) GetAdGroupResult() *MutateAdGroupResult { + if x, ok := m.GetResponse().(*MutateOperationResponse_AdGroupResult); ok { + return x.AdGroupResult + } + return nil +} + +func (m *MutateOperationResponse) GetAdParameterResult() *MutateAdParameterResult { + if x, ok := m.GetResponse().(*MutateOperationResponse_AdParameterResult); ok { + return x.AdParameterResult + } + return nil +} + +func (m *MutateOperationResponse) GetAssetResult() *MutateAssetResult { + if x, ok := m.GetResponse().(*MutateOperationResponse_AssetResult); ok { + return x.AssetResult + } + return nil +} + +func (m *MutateOperationResponse) GetBiddingStrategyResult() *MutateBiddingStrategyResult { + if x, ok := m.GetResponse().(*MutateOperationResponse_BiddingStrategyResult); ok { + return x.BiddingStrategyResult + } + return nil +} + +func (m *MutateOperationResponse) GetCampaignBidModifierResult() *MutateCampaignBidModifierResult { + if x, ok := m.GetResponse().(*MutateOperationResponse_CampaignBidModifierResult); ok { + return x.CampaignBidModifierResult + } + return nil +} + +func (m *MutateOperationResponse) GetCampaignBudgetResult() *MutateCampaignBudgetResult { + if x, ok := m.GetResponse().(*MutateOperationResponse_CampaignBudgetResult); ok { + return x.CampaignBudgetResult + } + return nil +} + +func (m *MutateOperationResponse) GetCampaignCriterionResult() *MutateCampaignCriterionResult { + if x, ok := m.GetResponse().(*MutateOperationResponse_CampaignCriterionResult); ok { + return x.CampaignCriterionResult + } + return nil +} + +func (m *MutateOperationResponse) GetCampaignExtensionSettingResult() *MutateCampaignExtensionSettingResult { + if x, ok := m.GetResponse().(*MutateOperationResponse_CampaignExtensionSettingResult); ok { + return x.CampaignExtensionSettingResult + } + return nil +} + +func (m *MutateOperationResponse) GetCampaignFeedResult() *MutateCampaignFeedResult { + if x, ok := m.GetResponse().(*MutateOperationResponse_CampaignFeedResult); ok { + return x.CampaignFeedResult + } + return nil +} + +func (m *MutateOperationResponse) GetCampaignLabelResult() *MutateCampaignLabelResult { + if x, ok := m.GetResponse().(*MutateOperationResponse_CampaignLabelResult); ok { + return x.CampaignLabelResult + } + return nil +} + +func (m *MutateOperationResponse) GetCampaignResult() *MutateCampaignResult { + if x, ok := m.GetResponse().(*MutateOperationResponse_CampaignResult); ok { + return x.CampaignResult + } + return nil +} + +func (m *MutateOperationResponse) GetCampaignSharedSetResult() *MutateCampaignSharedSetResult { + if x, ok := m.GetResponse().(*MutateOperationResponse_CampaignSharedSetResult); ok { + return x.CampaignSharedSetResult + } + return nil +} + +func (m *MutateOperationResponse) GetConversionActionResult() *MutateConversionActionResult { + if x, ok := m.GetResponse().(*MutateOperationResponse_ConversionActionResult); ok { + return x.ConversionActionResult + } + return nil +} + +func (m *MutateOperationResponse) GetCustomerExtensionSettingResult() *MutateCustomerExtensionSettingResult { + if x, ok := m.GetResponse().(*MutateOperationResponse_CustomerExtensionSettingResult); ok { + return x.CustomerExtensionSettingResult + } + return nil +} + +func (m *MutateOperationResponse) GetCustomerFeedResult() *MutateCustomerFeedResult { + if x, ok := m.GetResponse().(*MutateOperationResponse_CustomerFeedResult); ok { + return x.CustomerFeedResult + } + return nil +} + +func (m *MutateOperationResponse) GetCustomerLabelResult() *MutateCustomerLabelResult { + if x, ok := m.GetResponse().(*MutateOperationResponse_CustomerLabelResult); ok { + return x.CustomerLabelResult + } + return nil +} + +func (m *MutateOperationResponse) GetCustomerNegativeCriterionResult() *MutateCustomerNegativeCriteriaResult { + if x, ok := m.GetResponse().(*MutateOperationResponse_CustomerNegativeCriterionResult); ok { + return x.CustomerNegativeCriterionResult + } + return nil +} + +func (m *MutateOperationResponse) GetCustomerResult() *MutateCustomerResult { + if x, ok := m.GetResponse().(*MutateOperationResponse_CustomerResult); ok { + return x.CustomerResult + } + return nil +} + +func (m *MutateOperationResponse) GetExtensionFeedItemResult() *MutateExtensionFeedItemResult { + if x, ok := m.GetResponse().(*MutateOperationResponse_ExtensionFeedItemResult); ok { + return x.ExtensionFeedItemResult + } + return nil +} + +func (m *MutateOperationResponse) GetFeedItemResult() *MutateFeedItemResult { + if x, ok := m.GetResponse().(*MutateOperationResponse_FeedItemResult); ok { + return x.FeedItemResult + } + return nil +} + +func (m *MutateOperationResponse) GetFeedItemTargetResult() *MutateFeedItemTargetResult { + if x, ok := m.GetResponse().(*MutateOperationResponse_FeedItemTargetResult); ok { + return x.FeedItemTargetResult + } + return nil +} + +func (m *MutateOperationResponse) GetFeedMappingResult() *MutateFeedMappingResult { + if x, ok := m.GetResponse().(*MutateOperationResponse_FeedMappingResult); ok { + return x.FeedMappingResult + } + return nil +} + +func (m *MutateOperationResponse) GetFeedResult() *MutateFeedResult { + if x, ok := m.GetResponse().(*MutateOperationResponse_FeedResult); ok { + return x.FeedResult + } + return nil +} + +func (m *MutateOperationResponse) GetLabelResult() *MutateLabelResult { + if x, ok := m.GetResponse().(*MutateOperationResponse_LabelResult); ok { + return x.LabelResult + } + return nil +} + +func (m *MutateOperationResponse) GetMediaFileResult() *MutateMediaFileResult { + if x, ok := m.GetResponse().(*MutateOperationResponse_MediaFileResult); ok { + return x.MediaFileResult + } + return nil +} + +func (m *MutateOperationResponse) GetRemarketingActionResult() *MutateRemarketingActionResult { + if x, ok := m.GetResponse().(*MutateOperationResponse_RemarketingActionResult); ok { + return x.RemarketingActionResult + } + return nil +} + +func (m *MutateOperationResponse) GetSharedCriterionResult() *MutateSharedCriterionResult { + if x, ok := m.GetResponse().(*MutateOperationResponse_SharedCriterionResult); ok { + return x.SharedCriterionResult + } + return nil +} + +func (m *MutateOperationResponse) GetSharedSetResult() *MutateSharedSetResult { + if x, ok := m.GetResponse().(*MutateOperationResponse_SharedSetResult); ok { + return x.SharedSetResult + } + return nil +} + +func (m *MutateOperationResponse) GetUserListResult() *MutateUserListResult { + if x, ok := m.GetResponse().(*MutateOperationResponse_UserListResult); ok { + return x.UserListResult + } + return nil +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*MutateOperationResponse) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _MutateOperationResponse_OneofMarshaler, _MutateOperationResponse_OneofUnmarshaler, _MutateOperationResponse_OneofSizer, []interface{}{ + (*MutateOperationResponse_AdGroupAdLabelResult)(nil), + (*MutateOperationResponse_AdGroupAdResult)(nil), + (*MutateOperationResponse_AdGroupBidModifierResult)(nil), + (*MutateOperationResponse_AdGroupCriterionLabelResult)(nil), + (*MutateOperationResponse_AdGroupCriterionResult)(nil), + (*MutateOperationResponse_AdGroupExtensionSettingResult)(nil), + (*MutateOperationResponse_AdGroupFeedResult)(nil), + (*MutateOperationResponse_AdGroupLabelResult)(nil), + (*MutateOperationResponse_AdGroupResult)(nil), + (*MutateOperationResponse_AdParameterResult)(nil), + (*MutateOperationResponse_AssetResult)(nil), + (*MutateOperationResponse_BiddingStrategyResult)(nil), + (*MutateOperationResponse_CampaignBidModifierResult)(nil), + (*MutateOperationResponse_CampaignBudgetResult)(nil), + (*MutateOperationResponse_CampaignCriterionResult)(nil), + (*MutateOperationResponse_CampaignExtensionSettingResult)(nil), + (*MutateOperationResponse_CampaignFeedResult)(nil), + (*MutateOperationResponse_CampaignLabelResult)(nil), + (*MutateOperationResponse_CampaignResult)(nil), + (*MutateOperationResponse_CampaignSharedSetResult)(nil), + (*MutateOperationResponse_ConversionActionResult)(nil), + (*MutateOperationResponse_CustomerExtensionSettingResult)(nil), + (*MutateOperationResponse_CustomerFeedResult)(nil), + (*MutateOperationResponse_CustomerLabelResult)(nil), + (*MutateOperationResponse_CustomerNegativeCriterionResult)(nil), + (*MutateOperationResponse_CustomerResult)(nil), + (*MutateOperationResponse_ExtensionFeedItemResult)(nil), + (*MutateOperationResponse_FeedItemResult)(nil), + (*MutateOperationResponse_FeedItemTargetResult)(nil), + (*MutateOperationResponse_FeedMappingResult)(nil), + (*MutateOperationResponse_FeedResult)(nil), + (*MutateOperationResponse_LabelResult)(nil), + (*MutateOperationResponse_MediaFileResult)(nil), + (*MutateOperationResponse_RemarketingActionResult)(nil), + (*MutateOperationResponse_SharedCriterionResult)(nil), + (*MutateOperationResponse_SharedSetResult)(nil), + (*MutateOperationResponse_UserListResult)(nil), + } +} + +func _MutateOperationResponse_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*MutateOperationResponse) + // response + switch x := m.Response.(type) { + case *MutateOperationResponse_AdGroupAdLabelResult: + b.EncodeVarint(17<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.AdGroupAdLabelResult); err != nil { + return err + } + case *MutateOperationResponse_AdGroupAdResult: + b.EncodeVarint(1<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.AdGroupAdResult); err != nil { + return err + } + case *MutateOperationResponse_AdGroupBidModifierResult: + b.EncodeVarint(2<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.AdGroupBidModifierResult); err != nil { + return err + } + case *MutateOperationResponse_AdGroupCriterionLabelResult: + b.EncodeVarint(18<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.AdGroupCriterionLabelResult); err != nil { + return err + } + case *MutateOperationResponse_AdGroupCriterionResult: + b.EncodeVarint(3<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.AdGroupCriterionResult); err != nil { + return err + } + case *MutateOperationResponse_AdGroupExtensionSettingResult: + b.EncodeVarint(19<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.AdGroupExtensionSettingResult); err != nil { + return err + } + case *MutateOperationResponse_AdGroupFeedResult: + b.EncodeVarint(20<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.AdGroupFeedResult); err != nil { + return err + } + case *MutateOperationResponse_AdGroupLabelResult: + b.EncodeVarint(21<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.AdGroupLabelResult); err != nil { + return err + } + case *MutateOperationResponse_AdGroupResult: + b.EncodeVarint(5<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.AdGroupResult); err != nil { + return err + } + case *MutateOperationResponse_AdParameterResult: + b.EncodeVarint(22<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.AdParameterResult); err != nil { + return err + } + case *MutateOperationResponse_AssetResult: + b.EncodeVarint(23<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.AssetResult); err != nil { + return err + } + case *MutateOperationResponse_BiddingStrategyResult: + b.EncodeVarint(6<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.BiddingStrategyResult); err != nil { + return err + } + case *MutateOperationResponse_CampaignBidModifierResult: + b.EncodeVarint(7<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.CampaignBidModifierResult); err != nil { + return err + } + case *MutateOperationResponse_CampaignBudgetResult: + b.EncodeVarint(8<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.CampaignBudgetResult); err != nil { + return err + } + case *MutateOperationResponse_CampaignCriterionResult: + b.EncodeVarint(13<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.CampaignCriterionResult); err != nil { + return err + } + case *MutateOperationResponse_CampaignExtensionSettingResult: + b.EncodeVarint(26<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.CampaignExtensionSettingResult); err != nil { + return err + } + case *MutateOperationResponse_CampaignFeedResult: + b.EncodeVarint(27<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.CampaignFeedResult); err != nil { + return err + } + case *MutateOperationResponse_CampaignLabelResult: + b.EncodeVarint(28<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.CampaignLabelResult); err != nil { + return err + } + case *MutateOperationResponse_CampaignResult: + b.EncodeVarint(10<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.CampaignResult); err != nil { + return err + } + case *MutateOperationResponse_CampaignSharedSetResult: + b.EncodeVarint(11<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.CampaignSharedSetResult); err != nil { + return err + } + case *MutateOperationResponse_ConversionActionResult: + b.EncodeVarint(12<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.ConversionActionResult); err != nil { + return err + } + case *MutateOperationResponse_CustomerExtensionSettingResult: + b.EncodeVarint(30<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.CustomerExtensionSettingResult); err != nil { + return err + } + case *MutateOperationResponse_CustomerFeedResult: + b.EncodeVarint(31<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.CustomerFeedResult); err != nil { + return err + } + case *MutateOperationResponse_CustomerLabelResult: + b.EncodeVarint(32<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.CustomerLabelResult); err != nil { + return err + } + case *MutateOperationResponse_CustomerNegativeCriterionResult: + b.EncodeVarint(34<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.CustomerNegativeCriterionResult); err != nil { + return err + } + case *MutateOperationResponse_CustomerResult: + b.EncodeVarint(35<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.CustomerResult); err != nil { + return err + } + case *MutateOperationResponse_ExtensionFeedItemResult: + b.EncodeVarint(36<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.ExtensionFeedItemResult); err != nil { + return err + } + case *MutateOperationResponse_FeedItemResult: + b.EncodeVarint(37<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.FeedItemResult); err != nil { + return err + } + case *MutateOperationResponse_FeedItemTargetResult: + b.EncodeVarint(38<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.FeedItemTargetResult); err != nil { + return err + } + case *MutateOperationResponse_FeedMappingResult: + b.EncodeVarint(39<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.FeedMappingResult); err != nil { + return err + } + case *MutateOperationResponse_FeedResult: + b.EncodeVarint(40<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.FeedResult); err != nil { + return err + } + case *MutateOperationResponse_LabelResult: + b.EncodeVarint(41<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.LabelResult); err != nil { + return err + } + case *MutateOperationResponse_MediaFileResult: + b.EncodeVarint(42<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.MediaFileResult); err != nil { + return err + } + case *MutateOperationResponse_RemarketingActionResult: + b.EncodeVarint(43<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.RemarketingActionResult); err != nil { + return err + } + case *MutateOperationResponse_SharedCriterionResult: + b.EncodeVarint(14<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.SharedCriterionResult); err != nil { + return err + } + case *MutateOperationResponse_SharedSetResult: + b.EncodeVarint(15<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.SharedSetResult); err != nil { + return err + } + case *MutateOperationResponse_UserListResult: + b.EncodeVarint(16<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.UserListResult); err != nil { + return err + } + case nil: + default: + return fmt.Errorf("MutateOperationResponse.Response has unexpected type %T", x) + } + return nil +} + +func _MutateOperationResponse_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*MutateOperationResponse) + switch tag { + case 17: // response.ad_group_ad_label_result + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(MutateAdGroupAdLabelResult) + err := b.DecodeMessage(msg) + m.Response = &MutateOperationResponse_AdGroupAdLabelResult{msg} + return true, err + case 1: // response.ad_group_ad_result + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(MutateAdGroupAdResult) + err := b.DecodeMessage(msg) + m.Response = &MutateOperationResponse_AdGroupAdResult{msg} + return true, err + case 2: // response.ad_group_bid_modifier_result + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(MutateAdGroupBidModifierResult) + err := b.DecodeMessage(msg) + m.Response = &MutateOperationResponse_AdGroupBidModifierResult{msg} + return true, err + case 18: // response.ad_group_criterion_label_result + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(MutateAdGroupCriterionLabelResult) + err := b.DecodeMessage(msg) + m.Response = &MutateOperationResponse_AdGroupCriterionLabelResult{msg} + return true, err + case 3: // response.ad_group_criterion_result + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(MutateAdGroupCriterionResult) + err := b.DecodeMessage(msg) + m.Response = &MutateOperationResponse_AdGroupCriterionResult{msg} + return true, err + case 19: // response.ad_group_extension_setting_result + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(MutateAdGroupExtensionSettingResult) + err := b.DecodeMessage(msg) + m.Response = &MutateOperationResponse_AdGroupExtensionSettingResult{msg} + return true, err + case 20: // response.ad_group_feed_result + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(MutateAdGroupFeedResult) + err := b.DecodeMessage(msg) + m.Response = &MutateOperationResponse_AdGroupFeedResult{msg} + return true, err + case 21: // response.ad_group_label_result + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(MutateAdGroupLabelResult) + err := b.DecodeMessage(msg) + m.Response = &MutateOperationResponse_AdGroupLabelResult{msg} + return true, err + case 5: // response.ad_group_result + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(MutateAdGroupResult) + err := b.DecodeMessage(msg) + m.Response = &MutateOperationResponse_AdGroupResult{msg} + return true, err + case 22: // response.ad_parameter_result + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(MutateAdParameterResult) + err := b.DecodeMessage(msg) + m.Response = &MutateOperationResponse_AdParameterResult{msg} + return true, err + case 23: // response.asset_result + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(MutateAssetResult) + err := b.DecodeMessage(msg) + m.Response = &MutateOperationResponse_AssetResult{msg} + return true, err + case 6: // response.bidding_strategy_result + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(MutateBiddingStrategyResult) + err := b.DecodeMessage(msg) + m.Response = &MutateOperationResponse_BiddingStrategyResult{msg} + return true, err + case 7: // response.campaign_bid_modifier_result + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(MutateCampaignBidModifierResult) + err := b.DecodeMessage(msg) + m.Response = &MutateOperationResponse_CampaignBidModifierResult{msg} + return true, err + case 8: // response.campaign_budget_result + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(MutateCampaignBudgetResult) + err := b.DecodeMessage(msg) + m.Response = &MutateOperationResponse_CampaignBudgetResult{msg} + return true, err + case 13: // response.campaign_criterion_result + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(MutateCampaignCriterionResult) + err := b.DecodeMessage(msg) + m.Response = &MutateOperationResponse_CampaignCriterionResult{msg} + return true, err + case 26: // response.campaign_extension_setting_result + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(MutateCampaignExtensionSettingResult) + err := b.DecodeMessage(msg) + m.Response = &MutateOperationResponse_CampaignExtensionSettingResult{msg} + return true, err + case 27: // response.campaign_feed_result + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(MutateCampaignFeedResult) + err := b.DecodeMessage(msg) + m.Response = &MutateOperationResponse_CampaignFeedResult{msg} + return true, err + case 28: // response.campaign_label_result + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(MutateCampaignLabelResult) + err := b.DecodeMessage(msg) + m.Response = &MutateOperationResponse_CampaignLabelResult{msg} + return true, err + case 10: // response.campaign_result + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(MutateCampaignResult) + err := b.DecodeMessage(msg) + m.Response = &MutateOperationResponse_CampaignResult{msg} + return true, err + case 11: // response.campaign_shared_set_result + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(MutateCampaignSharedSetResult) + err := b.DecodeMessage(msg) + m.Response = &MutateOperationResponse_CampaignSharedSetResult{msg} + return true, err + case 12: // response.conversion_action_result + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(MutateConversionActionResult) + err := b.DecodeMessage(msg) + m.Response = &MutateOperationResponse_ConversionActionResult{msg} + return true, err + case 30: // response.customer_extension_setting_result + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(MutateCustomerExtensionSettingResult) + err := b.DecodeMessage(msg) + m.Response = &MutateOperationResponse_CustomerExtensionSettingResult{msg} + return true, err + case 31: // response.customer_feed_result + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(MutateCustomerFeedResult) + err := b.DecodeMessage(msg) + m.Response = &MutateOperationResponse_CustomerFeedResult{msg} + return true, err + case 32: // response.customer_label_result + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(MutateCustomerLabelResult) + err := b.DecodeMessage(msg) + m.Response = &MutateOperationResponse_CustomerLabelResult{msg} + return true, err + case 34: // response.customer_negative_criterion_result + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(MutateCustomerNegativeCriteriaResult) + err := b.DecodeMessage(msg) + m.Response = &MutateOperationResponse_CustomerNegativeCriterionResult{msg} + return true, err + case 35: // response.customer_result + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(MutateCustomerResult) + err := b.DecodeMessage(msg) + m.Response = &MutateOperationResponse_CustomerResult{msg} + return true, err + case 36: // response.extension_feed_item_result + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(MutateExtensionFeedItemResult) + err := b.DecodeMessage(msg) + m.Response = &MutateOperationResponse_ExtensionFeedItemResult{msg} + return true, err + case 37: // response.feed_item_result + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(MutateFeedItemResult) + err := b.DecodeMessage(msg) + m.Response = &MutateOperationResponse_FeedItemResult{msg} + return true, err + case 38: // response.feed_item_target_result + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(MutateFeedItemTargetResult) + err := b.DecodeMessage(msg) + m.Response = &MutateOperationResponse_FeedItemTargetResult{msg} + return true, err + case 39: // response.feed_mapping_result + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(MutateFeedMappingResult) + err := b.DecodeMessage(msg) + m.Response = &MutateOperationResponse_FeedMappingResult{msg} + return true, err + case 40: // response.feed_result + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(MutateFeedResult) + err := b.DecodeMessage(msg) + m.Response = &MutateOperationResponse_FeedResult{msg} + return true, err + case 41: // response.label_result + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(MutateLabelResult) + err := b.DecodeMessage(msg) + m.Response = &MutateOperationResponse_LabelResult{msg} + return true, err + case 42: // response.media_file_result + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(MutateMediaFileResult) + err := b.DecodeMessage(msg) + m.Response = &MutateOperationResponse_MediaFileResult{msg} + return true, err + case 43: // response.remarketing_action_result + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(MutateRemarketingActionResult) + err := b.DecodeMessage(msg) + m.Response = &MutateOperationResponse_RemarketingActionResult{msg} + return true, err + case 14: // response.shared_criterion_result + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(MutateSharedCriterionResult) + err := b.DecodeMessage(msg) + m.Response = &MutateOperationResponse_SharedCriterionResult{msg} + return true, err + case 15: // response.shared_set_result + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(MutateSharedSetResult) + err := b.DecodeMessage(msg) + m.Response = &MutateOperationResponse_SharedSetResult{msg} + return true, err + case 16: // response.user_list_result + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(MutateUserListResult) + err := b.DecodeMessage(msg) + m.Response = &MutateOperationResponse_UserListResult{msg} + return true, err + default: + return false, nil + } +} + +func _MutateOperationResponse_OneofSizer(msg proto.Message) (n int) { + m := msg.(*MutateOperationResponse) + // response + switch x := m.Response.(type) { + case *MutateOperationResponse_AdGroupAdLabelResult: + s := proto.Size(x.AdGroupAdLabelResult) + n += 2 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *MutateOperationResponse_AdGroupAdResult: + s := proto.Size(x.AdGroupAdResult) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *MutateOperationResponse_AdGroupBidModifierResult: + s := proto.Size(x.AdGroupBidModifierResult) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *MutateOperationResponse_AdGroupCriterionLabelResult: + s := proto.Size(x.AdGroupCriterionLabelResult) + n += 2 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *MutateOperationResponse_AdGroupCriterionResult: + s := proto.Size(x.AdGroupCriterionResult) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *MutateOperationResponse_AdGroupExtensionSettingResult: + s := proto.Size(x.AdGroupExtensionSettingResult) + n += 2 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *MutateOperationResponse_AdGroupFeedResult: + s := proto.Size(x.AdGroupFeedResult) + n += 2 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *MutateOperationResponse_AdGroupLabelResult: + s := proto.Size(x.AdGroupLabelResult) + n += 2 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *MutateOperationResponse_AdGroupResult: + s := proto.Size(x.AdGroupResult) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *MutateOperationResponse_AdParameterResult: + s := proto.Size(x.AdParameterResult) + n += 2 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *MutateOperationResponse_AssetResult: + s := proto.Size(x.AssetResult) + n += 2 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *MutateOperationResponse_BiddingStrategyResult: + s := proto.Size(x.BiddingStrategyResult) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *MutateOperationResponse_CampaignBidModifierResult: + s := proto.Size(x.CampaignBidModifierResult) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *MutateOperationResponse_CampaignBudgetResult: + s := proto.Size(x.CampaignBudgetResult) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *MutateOperationResponse_CampaignCriterionResult: + s := proto.Size(x.CampaignCriterionResult) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *MutateOperationResponse_CampaignExtensionSettingResult: + s := proto.Size(x.CampaignExtensionSettingResult) + n += 2 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *MutateOperationResponse_CampaignFeedResult: + s := proto.Size(x.CampaignFeedResult) + n += 2 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *MutateOperationResponse_CampaignLabelResult: + s := proto.Size(x.CampaignLabelResult) + n += 2 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *MutateOperationResponse_CampaignResult: + s := proto.Size(x.CampaignResult) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *MutateOperationResponse_CampaignSharedSetResult: + s := proto.Size(x.CampaignSharedSetResult) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *MutateOperationResponse_ConversionActionResult: + s := proto.Size(x.ConversionActionResult) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *MutateOperationResponse_CustomerExtensionSettingResult: + s := proto.Size(x.CustomerExtensionSettingResult) + n += 2 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *MutateOperationResponse_CustomerFeedResult: + s := proto.Size(x.CustomerFeedResult) + n += 2 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *MutateOperationResponse_CustomerLabelResult: + s := proto.Size(x.CustomerLabelResult) + n += 2 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *MutateOperationResponse_CustomerNegativeCriterionResult: + s := proto.Size(x.CustomerNegativeCriterionResult) + n += 2 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *MutateOperationResponse_CustomerResult: + s := proto.Size(x.CustomerResult) + n += 2 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *MutateOperationResponse_ExtensionFeedItemResult: + s := proto.Size(x.ExtensionFeedItemResult) + n += 2 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *MutateOperationResponse_FeedItemResult: + s := proto.Size(x.FeedItemResult) + n += 2 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *MutateOperationResponse_FeedItemTargetResult: + s := proto.Size(x.FeedItemTargetResult) + n += 2 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *MutateOperationResponse_FeedMappingResult: + s := proto.Size(x.FeedMappingResult) + n += 2 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *MutateOperationResponse_FeedResult: + s := proto.Size(x.FeedResult) + n += 2 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *MutateOperationResponse_LabelResult: + s := proto.Size(x.LabelResult) + n += 2 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *MutateOperationResponse_MediaFileResult: + s := proto.Size(x.MediaFileResult) + n += 2 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *MutateOperationResponse_RemarketingActionResult: + s := proto.Size(x.RemarketingActionResult) + n += 2 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *MutateOperationResponse_SharedCriterionResult: + s := proto.Size(x.SharedCriterionResult) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *MutateOperationResponse_SharedSetResult: + s := proto.Size(x.SharedSetResult) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *MutateOperationResponse_UserListResult: + s := proto.Size(x.UserListResult) + n += 2 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +func init() { + proto.RegisterType((*SearchGoogleAdsRequest)(nil), "google.ads.googleads.v1.services.SearchGoogleAdsRequest") + proto.RegisterType((*SearchGoogleAdsResponse)(nil), "google.ads.googleads.v1.services.SearchGoogleAdsResponse") + proto.RegisterType((*GoogleAdsRow)(nil), "google.ads.googleads.v1.services.GoogleAdsRow") + proto.RegisterType((*MutateGoogleAdsRequest)(nil), "google.ads.googleads.v1.services.MutateGoogleAdsRequest") + proto.RegisterType((*MutateGoogleAdsResponse)(nil), "google.ads.googleads.v1.services.MutateGoogleAdsResponse") + proto.RegisterType((*MutateOperation)(nil), "google.ads.googleads.v1.services.MutateOperation") + proto.RegisterType((*MutateOperationResponse)(nil), "google.ads.googleads.v1.services.MutateOperationResponse") +} + +// Reference imports to suppress errors if they are not otherwise used. +var _ context.Context +var _ grpc.ClientConn + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the grpc package it is being compiled against. +const _ = grpc.SupportPackageIsVersion4 + +// GoogleAdsServiceClient is the client API for GoogleAdsService service. +// +// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://godoc.org/google.golang.org/grpc#ClientConn.NewStream. +type GoogleAdsServiceClient interface { + // Returns all rows that match the search query. + Search(ctx context.Context, in *SearchGoogleAdsRequest, opts ...grpc.CallOption) (*SearchGoogleAdsResponse, error) + // Creates, updates, or removes resources. This method supports atomic + // transactions with multiple types of resources. For example, you can + // atomically create a campaign and a campaign budget, or perform up to + // thousands of mutates atomically. + // + // This method is essentially a wrapper around a series of mutate methods. The + // only features it offers over calling those methods directly are: + // - Atomic transactions + // - Temp resource names (described below) + // - Somewhat reduced latency over making a series of mutate calls. + // + // Note: Only resources that support atomic transactions are included, so this + // method can't replace all calls to individual services. + // + // ## Atomic Transaction Benefits + // + // Atomicity makes error handling much easier. If you're making a series of + // changes and one fails, it can leave your account in an inconsistent state. + // With atomicity, you either reach the desired state directly, or the request + // fails and you can retry. + // + // ## Temp Resource Names + // + // Temp resource names are a special type of resource name used to create a + // resource and reference that resource in the same request. For example, if a + // campaign budget is created with 'resource_name' equal to + // 'customers/123/campaignBudgets/-1', that resource name can be reused in + // the 'Campaign.budget' field in the same request. That way, the two + // resources are created and linked atomically. + // + // To create a temp resource name, put a negative number in the part of the + // name that the server would normally allocate. + // + // Note: + // - Resources must be created with a temp name before the name can be reused. + // For example, the previous CampaignBudget+Campaign example would fail if + // the mutate order was reversed. + // - Temp names are not remembered across requests. + // - There's no limit to the number of temp names in a request. + // - Each temp name must use a unique negative number, even if the resource + // types differ. + // + // ## Latency + // + // It's important to group mutates by resource type or the request may time + // out and fail. Latency is roughly equal to a series of calls to individual + // mutate methods, where each change in resource type is a new call. For + // example, mutating 10 campaigns then 10 ad groups is like 2 calls, while + // mutating 1 campaign, 1 ad group, 1 campaign, 1 ad group is like 4 calls. + Mutate(ctx context.Context, in *MutateGoogleAdsRequest, opts ...grpc.CallOption) (*MutateGoogleAdsResponse, error) +} + +type googleAdsServiceClient struct { + cc *grpc.ClientConn +} + +func NewGoogleAdsServiceClient(cc *grpc.ClientConn) GoogleAdsServiceClient { + return &googleAdsServiceClient{cc} +} + +func (c *googleAdsServiceClient) Search(ctx context.Context, in *SearchGoogleAdsRequest, opts ...grpc.CallOption) (*SearchGoogleAdsResponse, error) { + out := new(SearchGoogleAdsResponse) + err := c.cc.Invoke(ctx, "/google.ads.googleads.v1.services.GoogleAdsService/Search", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *googleAdsServiceClient) Mutate(ctx context.Context, in *MutateGoogleAdsRequest, opts ...grpc.CallOption) (*MutateGoogleAdsResponse, error) { + out := new(MutateGoogleAdsResponse) + err := c.cc.Invoke(ctx, "/google.ads.googleads.v1.services.GoogleAdsService/Mutate", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +// GoogleAdsServiceServer is the server API for GoogleAdsService service. +type GoogleAdsServiceServer interface { + // Returns all rows that match the search query. + Search(context.Context, *SearchGoogleAdsRequest) (*SearchGoogleAdsResponse, error) + // Creates, updates, or removes resources. This method supports atomic + // transactions with multiple types of resources. For example, you can + // atomically create a campaign and a campaign budget, or perform up to + // thousands of mutates atomically. + // + // This method is essentially a wrapper around a series of mutate methods. The + // only features it offers over calling those methods directly are: + // - Atomic transactions + // - Temp resource names (described below) + // - Somewhat reduced latency over making a series of mutate calls. + // + // Note: Only resources that support atomic transactions are included, so this + // method can't replace all calls to individual services. + // + // ## Atomic Transaction Benefits + // + // Atomicity makes error handling much easier. If you're making a series of + // changes and one fails, it can leave your account in an inconsistent state. + // With atomicity, you either reach the desired state directly, or the request + // fails and you can retry. + // + // ## Temp Resource Names + // + // Temp resource names are a special type of resource name used to create a + // resource and reference that resource in the same request. For example, if a + // campaign budget is created with 'resource_name' equal to + // 'customers/123/campaignBudgets/-1', that resource name can be reused in + // the 'Campaign.budget' field in the same request. That way, the two + // resources are created and linked atomically. + // + // To create a temp resource name, put a negative number in the part of the + // name that the server would normally allocate. + // + // Note: + // - Resources must be created with a temp name before the name can be reused. + // For example, the previous CampaignBudget+Campaign example would fail if + // the mutate order was reversed. + // - Temp names are not remembered across requests. + // - There's no limit to the number of temp names in a request. + // - Each temp name must use a unique negative number, even if the resource + // types differ. + // + // ## Latency + // + // It's important to group mutates by resource type or the request may time + // out and fail. Latency is roughly equal to a series of calls to individual + // mutate methods, where each change in resource type is a new call. For + // example, mutating 10 campaigns then 10 ad groups is like 2 calls, while + // mutating 1 campaign, 1 ad group, 1 campaign, 1 ad group is like 4 calls. + Mutate(context.Context, *MutateGoogleAdsRequest) (*MutateGoogleAdsResponse, error) +} + +func RegisterGoogleAdsServiceServer(s *grpc.Server, srv GoogleAdsServiceServer) { + s.RegisterService(&_GoogleAdsService_serviceDesc, srv) +} + +func _GoogleAdsService_Search_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(SearchGoogleAdsRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(GoogleAdsServiceServer).Search(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.ads.googleads.v1.services.GoogleAdsService/Search", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(GoogleAdsServiceServer).Search(ctx, req.(*SearchGoogleAdsRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _GoogleAdsService_Mutate_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(MutateGoogleAdsRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(GoogleAdsServiceServer).Mutate(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.ads.googleads.v1.services.GoogleAdsService/Mutate", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(GoogleAdsServiceServer).Mutate(ctx, req.(*MutateGoogleAdsRequest)) + } + return interceptor(ctx, in, info, handler) +} + +var _GoogleAdsService_serviceDesc = grpc.ServiceDesc{ + ServiceName: "google.ads.googleads.v1.services.GoogleAdsService", + HandlerType: (*GoogleAdsServiceServer)(nil), + Methods: []grpc.MethodDesc{ + { + MethodName: "Search", + Handler: _GoogleAdsService_Search_Handler, + }, + { + MethodName: "Mutate", + Handler: _GoogleAdsService_Mutate_Handler, + }, + }, + Streams: []grpc.StreamDesc{}, + Metadata: "google/ads/googleads/v1/services/google_ads_service.proto", +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/services/google_ads_service.proto", fileDescriptor_google_ads_service_75d9a9baabc9a843) +} + +var fileDescriptor_google_ads_service_75d9a9baabc9a843 = []byte{ + // 5126 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xa4, 0x5d, 0xcd, 0x73, 0xdd, 0x46, + 0x72, 0xcf, 0x93, 0x56, 0x36, 0x35, 0xa4, 0xf8, 0x31, 0x24, 0xc5, 0x11, 0xf5, 0x69, 0x4a, 0xb6, + 0x65, 0xd9, 0x26, 0x2d, 0xc9, 0x5f, 0xa2, 0x3e, 0x6c, 0x92, 0xfa, 0xb0, 0xd7, 0xd2, 0x9a, 0x01, + 0x6d, 0xd9, 0x6b, 0x7b, 0xf7, 0xed, 0x10, 0x18, 0x3e, 0xc2, 0x7c, 0x0f, 0x78, 0x02, 0xf0, 0x28, + 0xd1, 0x5e, 0x7b, 0x6d, 0x57, 0x52, 0x49, 0x55, 0x52, 0x5b, 0x95, 0xda, 0x4d, 0xaa, 0x72, 0xcd, + 0x31, 0xa7, 0x1c, 0xf3, 0x37, 0xe4, 0x9a, 0x5b, 0xaa, 0x72, 0x4b, 0x55, 0xaa, 0xf2, 0x07, 0xe4, + 0x94, 0x43, 0x0a, 0x83, 0xc1, 0x60, 0x66, 0x30, 0x78, 0xaf, 0x87, 0x7b, 0x7b, 0xe8, 0xc1, 0xaf, + 0xbb, 0x31, 0xe8, 0xee, 0xe9, 0x69, 0xf4, 0x90, 0xe8, 0x46, 0x27, 0x8e, 0x3b, 0x5d, 0xb6, 0x42, + 0x83, 0x74, 0xa5, 0xf8, 0x99, 0xff, 0xda, 0xbf, 0xba, 0x92, 0xb2, 0x64, 0x3f, 0xf4, 0x59, 0x49, + 0x6d, 0xd3, 0x20, 0x6d, 0x0b, 0xda, 0x72, 0x3f, 0x89, 0xb3, 0x18, 0x5f, 0x28, 0x46, 0x96, 0x69, + 0x90, 0x2e, 0x4b, 0xe8, 0xf2, 0xfe, 0xd5, 0xe5, 0x12, 0xba, 0xf8, 0x5a, 0x13, 0x73, 0x3f, 0xee, + 0xf5, 0xe2, 0x68, 0xa5, 0xc7, 0xb2, 0x24, 0xf4, 0xd3, 0x82, 0xdf, 0xe2, 0xeb, 0x23, 0xee, 0x4e, + 0x59, 0xa7, 0xc7, 0xa2, 0xac, 0xbc, 0xfd, 0xed, 0xa6, 0xdb, 0x13, 0x96, 0xc6, 0x83, 0x24, 0x57, + 0x9d, 0xfa, 0x7e, 0x3c, 0x88, 0xb2, 0xf6, 0xf6, 0x20, 0xe8, 0xb0, 0x4c, 0xe0, 0xde, 0x73, 0xc5, + 0xb5, 0xfb, 0x49, 0xdc, 0x8f, 0x53, 0xda, 0x15, 0x0c, 0xde, 0x00, 0x30, 0x08, 0xda, 0x9d, 0x24, + 0x1e, 0xf4, 0x05, 0xe2, 0x3a, 0x1c, 0xd1, 0xa6, 0x81, 0x00, 0xdd, 0x70, 0x02, 0xb5, 0xbb, 0x74, + 0x9b, 0x95, 0x1a, 0xde, 0x71, 0x81, 0x0e, 0x82, 0x90, 0x45, 0x3e, 0x6b, 0xef, 0x87, 0xec, 0xa9, + 0xc0, 0xdf, 0x76, 0xc0, 0x6f, 0x87, 0x41, 0xbb, 0x17, 0x07, 0xe1, 0x4e, 0xc8, 0x12, 0x01, 0x5f, + 0x75, 0x80, 0xfb, 0x49, 0x98, 0xb1, 0x24, 0x8c, 0x23, 0x81, 0x7d, 0xff, 0x30, 0x58, 0xed, 0xe1, + 0xef, 0x1d, 0x8a, 0x43, 0x1a, 0xf6, 0x06, 0x5d, 0x9a, 0x55, 0x8a, 0xac, 0x3b, 0xb0, 0x61, 0xcf, + 0x32, 0x16, 0xa5, 0x9c, 0x0d, 0xcb, 0xb2, 0x30, 0xea, 0x08, 0x1e, 0x6f, 0x39, 0xf0, 0xd8, 0x61, + 0x2c, 0x70, 0xb0, 0xec, 0x12, 0xa6, 0x3e, 0xf9, 0x4d, 0x07, 0x5c, 0xed, 0x79, 0xdf, 0x04, 0x81, + 0xfb, 0x34, 0xa1, 0x3d, 0x96, 0xc9, 0x57, 0xfd, 0x2e, 0x08, 0x95, 0xfa, 0xbb, 0x2c, 0x18, 0x74, + 0x35, 0x1b, 0x83, 0x3c, 0x64, 0x87, 0xb5, 0x13, 0x1a, 0x75, 0x34, 0xdc, 0xeb, 0x00, 0x5c, 0x9a, + 0x4a, 0x6f, 0x07, 0x28, 0xb8, 0x1d, 0x06, 0x41, 0x18, 0x75, 0xda, 0x69, 0x96, 0xd0, 0x8c, 0x75, + 0x0e, 0xe0, 0x2f, 0x6f, 0x3b, 0xec, 0x76, 0x39, 0x92, 0x65, 0xd2, 0xd7, 0x01, 0xd1, 0xc1, 0xa7, + 0xbd, 0x3e, 0x0d, 0x3b, 0x11, 0xdc, 0x5b, 0x4b, 0xc4, 0x61, 0xbd, 0x55, 0xe2, 0x2d, 0xde, 0xfa, + 0x8e, 0x0b, 0x5c, 0x0d, 0xa4, 0xab, 0x0e, 0x40, 0xd3, 0xcd, 0xef, 0x1d, 0x06, 0x7b, 0x28, 0x27, + 0x95, 0x6c, 0xfe, 0x04, 0x27, 0x95, 0x3c, 0xdc, 0x9c, 0x54, 0xc2, 0x1c, 0x9d, 0x54, 0xe2, 0xd2, + 0x5d, 0x9a, 0xb0, 0xa0, 0xed, 0x64, 0xcd, 0x3e, 0x4d, 0x92, 0x90, 0x25, 0x6d, 0x3f, 0x8e, 0xd2, + 0x8c, 0x46, 0x99, 0xc3, 0x53, 0xee, 0x72, 0x5f, 0x4b, 0x33, 0x9a, 0x0d, 0xca, 0x45, 0xf6, 0x1a, + 0x00, 0xd6, 0x0d, 0xfd, 0x3d, 0xd5, 0x1e, 0x01, 0x0b, 0x97, 0x1f, 0x47, 0xfb, 0x2c, 0xe1, 0x2f, + 0x83, 0xfa, 0xca, 0xfb, 0x84, 0xd8, 0xe2, 0x20, 0xcd, 0xe2, 0x5e, 0x3b, 0x8c, 0x32, 0x96, 0xb0, + 0x34, 0x73, 0xf0, 0x3a, 0x0e, 0x74, 0x32, 0x7b, 0x81, 0x68, 0xfb, 0xdd, 0x90, 0xc9, 0x99, 0xbc, + 0xe5, 0x0c, 0x6c, 0x77, 0xc3, 0x68, 0xcf, 0xc1, 0x62, 0x4b, 0xf4, 0x9f, 0x62, 0xb1, 0x25, 0x0f, + 0x47, 0x8b, 0x2d, 0x61, 0xaa, 0xc5, 0xde, 0x76, 0xc0, 0xf5, 0x68, 0x44, 0x3b, 0x39, 0xbe, 0x7a, + 0xe2, 0x0d, 0x07, 0x78, 0xc4, 0x3a, 0x34, 0x0b, 0xf7, 0x59, 0x2d, 0x5e, 0x00, 0x74, 0x08, 0x58, + 0x46, 0xc3, 0x6e, 0xbb, 0xdf, 0xa5, 0x3e, 0xcb, 0xf3, 0x44, 0xd5, 0x24, 0x01, 0xef, 0x2c, 0x08, + 0xd3, 0x7e, 0x97, 0x1e, 0xb4, 0xf7, 0xd8, 0xc1, 0xd3, 0x38, 0x09, 0x54, 0x34, 0xc0, 0x54, 0x82, + 0xb8, 0x47, 0xc3, 0xa8, 0xed, 0xe7, 0x0b, 0x48, 0x9c, 0x94, 0x4b, 0xc8, 0x47, 0x00, 0xe0, 0x41, + 0x44, 0x7b, 0xa1, 0xdf, 0x4e, 0x19, 0x4d, 0xfc, 0x5d, 0x91, 0x65, 0xf3, 0x9f, 0x19, 0x4b, 0x7a, + 0xaa, 0x16, 0x00, 0xcb, 0x61, 0xcf, 0xfa, 0x34, 0x0a, 0x58, 0x9e, 0x0c, 0x46, 0x7c, 0x49, 0xeb, + 0x53, 0x7d, 0xf1, 0xbc, 0x09, 0xe1, 0x51, 0x1a, 0x5d, 0x6e, 0x3a, 0xed, 0x30, 0x63, 0x3d, 0x01, + 0x7e, 0x6d, 0x34, 0x58, 0xb1, 0xb6, 0xab, 0xb0, 0xbb, 0x55, 0x01, 0xef, 0x3a, 0x40, 0xda, 0x19, + 0x4d, 0xaa, 0xa5, 0xe8, 0x4d, 0x20, 0xb2, 0x47, 0xfb, 0xfd, 0xca, 0x8f, 0x6e, 0x03, 0x51, 0xdc, + 0xa4, 0x76, 0xe3, 0x6e, 0xc0, 0x12, 0x75, 0x32, 0x01, 0x59, 0x7d, 0x87, 0x45, 0x06, 0xe8, 0x26, + 0x04, 0x14, 0x8b, 0xa7, 0x33, 0x83, 0xf8, 0x3b, 0x20, 0x70, 0x27, 0xa1, 0xfd, 0xdd, 0xd0, 0x77, + 0xb4, 0xff, 0x22, 0x2d, 0xb4, 0x7a, 0x0f, 0xe0, 0xbd, 0xec, 0xc6, 0x19, 0xeb, 0x8a, 0xd4, 0x52, + 0x41, 0xde, 0x81, 0x22, 0xfb, 0x2c, 0xd9, 0x89, 0x93, 0x1e, 0x35, 0x52, 0x1b, 0xc0, 0x7b, 0x2d, + 0xfd, 0xb5, 0xdf, 0xa5, 0x0e, 0xc1, 0x42, 0x45, 0xb5, 0x8d, 0xdd, 0x9a, 0x2b, 0xdc, 0x48, 0xe7, + 0x6e, 0x39, 0xc2, 0xc5, 0x05, 0x3c, 0x31, 0xd2, 0xd0, 0x32, 0x62, 0xea, 0x6c, 0x1c, 0x26, 0xce, + 0x2d, 0xb7, 0x56, 0x17, 0x86, 0x1b, 0x90, 0xdb, 0xed, 0x81, 0x08, 0x06, 0xed, 0x0c, 0x72, 0x98, + 0x7b, 0x26, 0xd3, 0x8d, 0x7d, 0x9e, 0x25, 0x3a, 0x9a, 0x62, 0xb1, 0x78, 0x05, 0x76, 0x27, 0x00, + 0x64, 0x42, 0x3d, 0x16, 0x84, 0xb4, 0xbd, 0x13, 0x76, 0x45, 0x85, 0x64, 0xf1, 0x2e, 0x00, 0x13, + 0x6f, 0x87, 0x5d, 0xd6, 0xa6, 0xfd, 0xbe, 0x5c, 0x3c, 0xcc, 0x07, 0xbe, 0x03, 0xe6, 0x12, 0xb0, + 0xfd, 0xd0, 0xaf, 0x4d, 0x18, 0x44, 0xf3, 0x41, 0x46, 0x33, 0xd6, 0xfe, 0x3a, 0xde, 0x16, 0x98, + 0x0f, 0x47, 0x63, 0xe2, 0x3e, 0x4b, 0x68, 0xc6, 0xb7, 0x3f, 0x07, 0x69, 0x1e, 0x91, 0xcb, 0x8c, + 0xce, 0x10, 0x0f, 0xb0, 0xe8, 0x3e, 0x0d, 0x83, 0x76, 0x9c, 0x74, 0x68, 0x54, 0xad, 0x84, 0xe6, + 0xf2, 0x77, 0x0b, 0xc2, 0x26, 0x61, 0x51, 0x46, 0xbb, 0x22, 0x85, 0x55, 0xd1, 0x80, 0xe7, 0xe9, + 0x27, 0x71, 0x30, 0xf0, 0xb3, 0x76, 0xb9, 0x1d, 0x6c, 0x7a, 0x1d, 0xab, 0x70, 0x56, 0xb5, 0x78, + 0x08, 0x48, 0xc1, 0x12, 0xe6, 0xc7, 0xbd, 0x1e, 0x8b, 0x02, 0x75, 0x9f, 0xb3, 0x0a, 0xc1, 0xf5, + 0x68, 0xb2, 0xc7, 0xf8, 0x0b, 0xd1, 0x72, 0x6a, 0x40, 0xf4, 0x6e, 0x98, 0x72, 0x08, 0xb2, 0xd8, + 0xa1, 0x98, 0xe9, 0xda, 0x35, 0x30, 0xb2, 0xda, 0xdb, 0xac, 0x41, 0x30, 0x31, 0x5f, 0xbf, 0x9b, + 0x96, 0x0b, 0xc0, 0xf4, 0x66, 0x71, 0x3f, 0xf4, 0x0f, 0xe1, 0x21, 0x05, 0x4e, 0x91, 0x05, 0x08, + 0x43, 0x83, 0x94, 0x25, 0xe6, 0x46, 0xe5, 0x2a, 0x10, 0xd6, 0x0d, 0x25, 0x04, 0x10, 0x95, 0xf7, + 0xc3, 0x80, 0xc5, 0xa3, 0x2a, 0x68, 0xb2, 0xa2, 0x5b, 0x2b, 0x1b, 0xea, 0x85, 0xdd, 0x66, 0x6b, + 0xb3, 0x72, 0xd0, 0xb1, 0x77, 0xe1, 0x58, 0xb5, 0x16, 0x61, 0x70, 0x79, 0x00, 0xe7, 0x62, 0x14, + 0x01, 0x0d, 0x46, 0x6b, 0x87, 0x61, 0xa4, 0xb3, 0xf8, 0x10, 0xce, 0xa2, 0xb6, 0x61, 0x33, 0x58, + 0xdd, 0x82, 0xb3, 0xe2, 0x89, 0xa7, 0x8e, 0xbe, 0x0d, 0x47, 0xdb, 0xa6, 0xe2, 0x1d, 0x38, 0x5c, + 0x07, 0xde, 0x84, 0x00, 0x65, 0x61, 0xd0, 0x00, 0xbf, 0x39, 0x1a, 0x9c, 0xa6, 0x2c, 0x33, 0x50, + 0xef, 0x8d, 0x44, 0x99, 0x45, 0x3b, 0x67, 0x33, 0xb4, 0x96, 0xc4, 0x0c, 0x2e, 0x77, 0x1c, 0xb8, + 0x14, 0x9f, 0x0a, 0x5c, 0xad, 0xcf, 0x56, 0xe4, 0x72, 0xb4, 0xbe, 0xe6, 0x02, 0x97, 0xb3, 0xf5, + 0x69, 0x75, 0x2e, 0x67, 0xeb, 0xd3, 0xcb, 0x5d, 0xce, 0xd6, 0x57, 0x55, 0xbd, 0x34, 0xe0, 0xba, + 0x03, 0x50, 0x2e, 0x29, 0x06, 0x8f, 0xd1, 0x21, 0xb1, 0x56, 0x90, 0x72, 0x7f, 0x0d, 0x8d, 0x55, + 0x1b, 0xf7, 0xd7, 0xa0, 0x16, 0x6f, 0xdc, 0x5f, 0x83, 0x56, 0xc3, 0x31, 0xe0, 0x3f, 0x87, 0xc3, + 0xeb, 0xb5, 0x18, 0xf7, 0x57, 0x5a, 0xf2, 0x72, 0x7d, 0xa5, 0x96, 0x22, 0x84, 0xc1, 0xe3, 0xdd, + 0x91, 0x3c, 0x9a, 0x90, 0xef, 0x39, 0x20, 0xc5, 0x36, 0xdc, 0x35, 0x1e, 0xaa, 0xb5, 0x06, 0x03, + 0x7c, 0x1d, 0x06, 0x76, 0x0d, 0xa2, 0xb6, 0x77, 0x3d, 0xfa, 0xd3, 0x6e, 0xb5, 0x61, 0x71, 0x0e, + 0x5c, 0xf5, 0x74, 0xd3, 0x79, 0x9a, 0xcd, 0xdc, 0xd1, 0x59, 0xfd, 0x46, 0x7f, 0x1f, 0x6d, 0x1c, + 0x32, 0xc7, 0x32, 0x90, 0x67, 0x4a, 0x64, 0x3f, 0x5c, 0xa1, 0x51, 0x14, 0x67, 0x3c, 0x0b, 0x2f, + 0xab, 0xe1, 0xe2, 0x8b, 0xf7, 0x0a, 0xbf, 0xda, 0x1e, 0xec, 0xac, 0xec, 0x84, 0xac, 0x9b, 0xbf, + 0xe9, 0xb4, 0x2c, 0x76, 0x2e, 0x88, 0x3b, 0x92, 0xbe, 0xbf, 0xa2, 0x16, 0xd2, 0x97, 0xfe, 0xa5, + 0x85, 0x4e, 0x6e, 0xf1, 0x34, 0xfb, 0x01, 0xbf, 0x63, 0x2d, 0x48, 0x3d, 0xf6, 0x64, 0xc0, 0xd2, + 0x0c, 0x9f, 0x47, 0xe3, 0xd2, 0x51, 0xc2, 0x80, 0xb4, 0x2e, 0xb4, 0x2e, 0x1f, 0xf7, 0x50, 0x49, + 0xfa, 0x30, 0xc0, 0x73, 0xe8, 0xd8, 0x93, 0x01, 0x4b, 0x0e, 0xc8, 0x11, 0x3e, 0x54, 0x5c, 0xe0, + 0xb3, 0x08, 0xf1, 0x5d, 0x75, 0x16, 0xef, 0xb1, 0x88, 0x1c, 0xe5, 0x43, 0xc7, 0x73, 0xca, 0x27, + 0x39, 0x01, 0x9f, 0x46, 0xfc, 0xa2, 0x9d, 0x86, 0xdf, 0x30, 0xf2, 0xb3, 0x0b, 0xad, 0xcb, 0xc7, + 0xbc, 0xb1, 0x9c, 0xb0, 0x15, 0x7e, 0xc3, 0xf0, 0x45, 0x74, 0x62, 0x9f, 0x76, 0xc3, 0x20, 0xdf, + 0xf4, 0xc5, 0x51, 0xf7, 0x80, 0x1c, 0xbb, 0xd0, 0xba, 0x3c, 0xe6, 0x4d, 0x94, 0xc4, 0x8f, 0xa3, + 0xee, 0xc1, 0xd2, 0xff, 0xb6, 0xd0, 0x42, 0x4d, 0xe5, 0xb4, 0x1f, 0x47, 0x29, 0xc3, 0x1f, 0xa0, + 0xe7, 0x13, 0x96, 0x0e, 0xba, 0x59, 0x4a, 0x5a, 0x17, 0x8e, 0x5e, 0x1e, 0xbf, 0xb6, 0xbc, 0x3c, + 0xaa, 0x1b, 0x60, 0xb9, 0xe2, 0x12, 0x3f, 0xf5, 0x4a, 0x38, 0x7e, 0x09, 0x4d, 0x45, 0xec, 0x59, + 0xd6, 0x56, 0x9e, 0xa5, 0x78, 0xcc, 0x13, 0x39, 0x79, 0x53, 0x3e, 0xcf, 0x32, 0x9a, 0xcd, 0xe2, + 0x7c, 0x73, 0x27, 0x80, 0x6d, 0xfe, 0x89, 0x9e, 0x3f, 0xf7, 0x51, 0x6f, 0x86, 0x0f, 0x79, 0xc5, + 0xc8, 0x46, 0x3e, 0x80, 0x6f, 0x20, 0x54, 0xbd, 0x1d, 0xfe, 0x7c, 0xe3, 0xd7, 0x16, 0x4b, 0x25, + 0xcb, 0x17, 0xb8, 0x7c, 0x3f, 0xbf, 0xe5, 0x11, 0x4d, 0xf7, 0xbc, 0xe3, 0x3b, 0xe5, 0xcf, 0xa5, + 0xff, 0xbc, 0x8f, 0x26, 0x54, 0x65, 0xf1, 0x67, 0x68, 0x52, 0x6f, 0x09, 0x20, 0x57, 0x38, 0xbf, + 0x37, 0x1a, 0x1f, 0x5a, 0xa6, 0xe6, 0xcb, 0x6b, 0x05, 0x70, 0x9d, 0xe3, 0xbc, 0x13, 0x54, 0xbd, + 0xc4, 0x7d, 0xb4, 0xd0, 0xd0, 0x6b, 0x40, 0x5e, 0xe5, 0x12, 0xde, 0x75, 0x95, 0xb0, 0x29, 0xf0, + 0xde, 0x3c, 0xb5, 0x91, 0xf1, 0x3d, 0x34, 0x56, 0xa6, 0x79, 0x7c, 0xee, 0xc6, 0xaf, 0x5d, 0x81, + 0x88, 0x08, 0x1e, 0xe4, 0x08, 0xef, 0x79, 0x5a, 0xfc, 0xc0, 0x0f, 0xd1, 0xb8, 0xb2, 0x07, 0x20, + 0xd3, 0x9c, 0xd3, 0x6b, 0x70, 0x4e, 0x6b, 0x81, 0x77, 0x9c, 0x96, 0x3f, 0xf1, 0x57, 0x68, 0xa6, + 0xb6, 0x27, 0x21, 0xcf, 0x38, 0xcf, 0xab, 0x2e, 0x3c, 0x1f, 0xe6, 0x40, 0x6f, 0x92, 0x6a, 0xd7, + 0x78, 0x0f, 0x9d, 0xb4, 0x77, 0x3b, 0x90, 0x1b, 0x5c, 0xc4, 0xdb, 0x0e, 0x22, 0x04, 0xfc, 0x71, + 0xc8, 0x9e, 0x7a, 0xb3, 0xb4, 0x4e, 0xc4, 0xbb, 0x68, 0xde, 0xba, 0xc1, 0x21, 0x84, 0xcb, 0x7a, + 0x0b, 0x2e, 0x6b, 0x3d, 0x0c, 0x1e, 0x09, 0xb0, 0x87, 0x69, 0x8d, 0x86, 0x29, 0xc2, 0xf5, 0xbd, + 0x0b, 0x99, 0xe1, 0x62, 0xae, 0xc3, 0xc5, 0x6c, 0x94, 0x50, 0x6f, 0x9a, 0x1a, 0x14, 0xfc, 0x04, + 0x91, 0xa6, 0x7d, 0x16, 0x39, 0x80, 0xdb, 0xa7, 0xc1, 0xb6, 0x78, 0x4b, 0xf3, 0xd4, 0x46, 0xc6, + 0x3f, 0xb4, 0xd0, 0xd9, 0xa1, 0xed, 0x19, 0x24, 0xe2, 0x82, 0x6f, 0x1f, 0x42, 0xf0, 0x96, 0x64, + 0xe2, 0x2d, 0xd2, 0xc6, 0x31, 0xfc, 0x14, 0x2d, 0x36, 0xef, 0xe8, 0x48, 0x9f, 0x8b, 0x5f, 0x85, + 0x8b, 0xbf, 0x57, 0xb2, 0xd8, 0x2a, 0x38, 0x78, 0x0b, 0xd4, 0x3e, 0x80, 0x3d, 0x74, 0x42, 0xdb, + 0xff, 0x91, 0x0d, 0x2e, 0x6b, 0x19, 0x2e, 0xeb, 0x3e, 0x63, 0x81, 0x37, 0x4e, 0xab, 0x0b, 0xfc, + 0x29, 0x9a, 0xd4, 0x77, 0x85, 0x24, 0xe5, 0x4c, 0x57, 0xe0, 0x4c, 0x8b, 0xf7, 0x35, 0x41, 0x95, + 0x2b, 0x1c, 0xa0, 0x59, 0x4b, 0x2b, 0x09, 0xd9, 0xe3, 0xbc, 0xdf, 0x84, 0xf3, 0x56, 0x5e, 0xc9, + 0x0c, 0x35, 0x49, 0xd8, 0x43, 0x13, 0xea, 0xd6, 0x92, 0xfc, 0xd4, 0x72, 0x98, 0x90, 0xcd, 0x12, + 0x96, 0x4f, 0x88, 0xbc, 0xe0, 0x13, 0xa2, 0xf5, 0x95, 0x90, 0x37, 0xe0, 0x13, 0xd2, 0x61, 0x5e, + 0x8e, 0xe3, 0xee, 0x3f, 0x41, 0x95, 0x2b, 0xfc, 0x25, 0x9a, 0x36, 0x1b, 0x5d, 0xc8, 0x2f, 0x1d, + 0x22, 0xd8, 0x96, 0x40, 0x72, 0xd6, 0x93, 0x54, 0xbb, 0xc6, 0x5f, 0xa0, 0x29, 0xe3, 0x03, 0x23, + 0xf9, 0x12, 0xcc, 0xfb, 0x2e, 0x47, 0x6e, 0x08, 0xa0, 0x37, 0x19, 0x68, 0xd7, 0xf8, 0x0e, 0x3a, + 0xc6, 0x77, 0xe0, 0x24, 0xe4, 0x1c, 0x2f, 0x43, 0xb4, 0xcd, 0xef, 0xf7, 0x0a, 0x18, 0xfe, 0x15, + 0x9a, 0x36, 0xf7, 0xe2, 0x04, 0x73, 0x56, 0xd7, 0x00, 0xac, 0xd6, 0x0b, 0xe8, 0x96, 0x40, 0x7a, + 0x53, 0xdb, 0x3a, 0x01, 0x7f, 0x82, 0x4e, 0x68, 0x5d, 0x36, 0xe4, 0x15, 0xf0, 0xdb, 0x5a, 0x2f, + 0x70, 0x5b, 0x39, 0xcc, 0x9b, 0xd8, 0x56, 0xae, 0xf2, 0x09, 0x35, 0x76, 0xee, 0x64, 0x16, 0x3c, + 0xa1, 0x1b, 0x02, 0x29, 0x96, 0xf4, 0x49, 0x5f, 0xbb, 0xc6, 0x0f, 0xd0, 0x58, 0x49, 0xe1, 0x99, + 0xcc, 0xf8, 0xb5, 0x57, 0x1d, 0x98, 0x7a, 0x12, 0x8c, 0x7b, 0xe8, 0xa4, 0xbd, 0xef, 0x87, 0xdc, + 0xe3, 0x6c, 0xdf, 0x71, 0x60, 0xab, 0x2d, 0x5c, 0x73, 0xbe, 0x85, 0x8a, 0xbf, 0x46, 0xf3, 0xd6, + 0x9a, 0x08, 0x59, 0x04, 0xaf, 0x92, 0x72, 0x66, 0x94, 0xa5, 0x6b, 0xd6, 0xaf, 0x13, 0xb1, 0x8f, + 0x70, 0xbd, 0xf2, 0x41, 0xe6, 0xc0, 0xd1, 0xa3, 0x14, 0x54, 0x2d, 0x5e, 0x33, 0xbe, 0x49, 0xc2, + 0x3f, 0xb5, 0xd0, 0xd9, 0xa1, 0x4d, 0x44, 0x24, 0xe6, 0x02, 0xef, 0x1c, 0x46, 0xa0, 0x12, 0xb8, + 0x4e, 0xfb, 0xcd, 0x83, 0xf8, 0x00, 0x2d, 0x36, 0x17, 0x68, 0xc8, 0x13, 0xae, 0xc0, 0x4d, 0x07, + 0x05, 0x6a, 0xab, 0x09, 0xf1, 0x1b, 0x46, 0x72, 0xd7, 0xd1, 0x0a, 0x3a, 0xe4, 0x3d, 0xb0, 0xeb, + 0x94, 0xd2, 0xf8, 0x7a, 0x32, 0xe1, 0x2b, 0x57, 0x79, 0x2e, 0xac, 0x17, 0x7a, 0x48, 0x17, 0x9c, + 0x0b, 0x97, 0x6c, 0x8b, 0x15, 0x45, 0x6a, 0x27, 0x97, 0x14, 0x4b, 0x25, 0x87, 0x9c, 0x73, 0x36, + 0x8a, 0x2d, 0x0e, 0xde, 0x62, 0x59, 0x65, 0x14, 0x92, 0x94, 0x87, 0x2b, 0xb3, 0x43, 0x8a, 0xac, + 0x83, 0xc3, 0xd5, 0x46, 0x01, 0xdd, 0x10, 0x48, 0x6f, 0xca, 0xd7, 0x09, 0x7c, 0xce, 0xd5, 0x36, + 0x2a, 0xf2, 0x22, 0x7c, 0xce, 0x39, 0x6e, 0x8b, 0xc3, 0xbc, 0x09, 0x5f, 0xb9, 0xc2, 0xbf, 0x41, + 0x33, 0xb5, 0x02, 0x15, 0xe9, 0x80, 0x33, 0xbd, 0x0d, 0x89, 0x5d, 0xe3, 0x50, 0x6f, 0xda, 0x37, + 0x28, 0xf8, 0x23, 0x84, 0xaa, 0x3e, 0x2e, 0xf2, 0x0d, 0x38, 0x9d, 0xdf, 0xc8, 0x41, 0x3c, 0xa8, + 0x1c, 0xf7, 0xcb, 0x9f, 0x3c, 0xba, 0xea, 0x5d, 0x5a, 0x64, 0x17, 0x1e, 0x5d, 0x39, 0xf2, 0x43, + 0x01, 0xf4, 0x26, 0x7d, 0xed, 0x9a, 0x47, 0x57, 0xb1, 0x33, 0x26, 0x2d, 0x78, 0x74, 0x15, 0x10, + 0x4f, 0x82, 0x79, 0xb8, 0xb3, 0x75, 0x2d, 0x91, 0xdb, 0xf0, 0x70, 0x27, 0xf0, 0x8f, 0x0a, 0xf8, + 0xc3, 0x30, 0xda, 0xf3, 0x66, 0xfd, 0x3a, 0x11, 0x77, 0xd0, 0x9c, 0xad, 0x25, 0x8c, 0xdc, 0x01, + 0xef, 0x09, 0x4a, 0x51, 0x1b, 0x1c, 0xcd, 0x25, 0x61, 0xbf, 0x46, 0xab, 0x66, 0x5e, 0x0a, 0x22, + 0xf7, 0x1d, 0x67, 0xbe, 0xe4, 0x57, 0xce, 0x7c, 0x79, 0xcd, 0x23, 0x59, 0x63, 0x8d, 0x93, 0x24, + 0xf0, 0x48, 0x26, 0x98, 0x58, 0x22, 0x59, 0xc3, 0x08, 0xf7, 0x2a, 0xb5, 0x26, 0x4a, 0xde, 0x87, + 0x7b, 0x95, 0xc0, 0x89, 0x48, 0xa6, 0x5c, 0xf1, 0x48, 0xa6, 0xd5, 0x4a, 0xc9, 0x6f, 0xe1, 0x91, + 0x4c, 0x00, 0xcb, 0x48, 0xa6, 0x5e, 0xe2, 0xdf, 0xa2, 0xd3, 0x43, 0xaa, 0xa8, 0xe4, 0x73, 0x2e, + 0xe5, 0x96, 0x83, 0x94, 0x5f, 0x08, 0x26, 0xd5, 0x72, 0x77, 0xca, 0x6f, 0x1a, 0xca, 0x0d, 0xdb, + 0xda, 0x0a, 0x47, 0xf6, 0xc1, 0x86, 0x7d, 0x97, 0xe3, 0x37, 0x4b, 0x78, 0xb1, 0xdb, 0x0d, 0xea, + 0xc4, 0xdc, 0xb0, 0x6d, 0x7d, 0x73, 0x64, 0x05, 0x6c, 0xd8, 0x77, 0x0b, 0xf8, 0x47, 0x05, 0x9a, + 0x4b, 0xc2, 0x41, 0x8d, 0x86, 0x7f, 0xdf, 0x42, 0x17, 0x01, 0xad, 0x72, 0xe4, 0x6b, 0x2e, 0x78, + 0x0d, 0x22, 0xb8, 0xe0, 0x56, 0x14, 0xb8, 0xd6, 0x82, 0xb4, 0xf8, 0xf1, 0x09, 0x4b, 0x7a, 0x5c, + 0x89, 0x73, 0xc1, 0xd0, 0x71, 0xfc, 0x0c, 0x2d, 0x36, 0x77, 0xdb, 0x91, 0x1f, 0x5a, 0xe0, 0x5d, + 0xe2, 0x3d, 0xc1, 0xe5, 0x61, 0xc1, 0x64, 0x93, 0x8a, 0xed, 0xc5, 0x02, 0xb3, 0x0f, 0xe4, 0xeb, + 0xa4, 0xa5, 0x3c, 0x4e, 0x3e, 0x05, 0xaf, 0x93, 0xd2, 0xbd, 0x72, 0x2f, 0xf8, 0x30, 0x63, 0x3d, + 0x6f, 0x86, 0x99, 0x24, 0x7c, 0x13, 0xfd, 0x8c, 0x7b, 0xda, 0x32, 0x67, 0xfb, 0x32, 0x80, 0x2d, + 0xf7, 0x30, 0x0e, 0xc2, 0x1f, 0xa0, 0xe3, 0x95, 0x62, 0xd7, 0xc0, 0x51, 0x5a, 0xea, 0x33, 0xb6, + 0x53, 0xaa, 0xf1, 0x25, 0x9a, 0x36, 0xab, 0xf1, 0x24, 0x03, 0x47, 0xb4, 0x92, 0xe1, 0x27, 0x1c, + 0xe8, 0x4d, 0xee, 0x68, 0xd7, 0xf8, 0xcf, 0xd1, 0x84, 0x5a, 0xa9, 0x27, 0xab, 0xe0, 0xdd, 0x65, + 0xce, 0xf8, 0x51, 0x81, 0xf2, 0xc6, 0x77, 0xaa, 0x8b, 0xdc, 0xf9, 0xac, 0x2d, 0x83, 0x84, 0x82, + 0x9d, 0x2f, 0xe7, 0xbd, 0x59, 0xc1, 0x0b, 0xe7, 0xdb, 0xa9, 0x13, 0xf1, 0x2f, 0xd0, 0xb8, 0xd2, + 0x5f, 0x48, 0x2e, 0x73, 0x09, 0xaf, 0x03, 0x24, 0x3c, 0xe0, 0x28, 0xce, 0x18, 0x75, 0xe4, 0xef, + 0xdc, 0xb0, 0x2c, 0xad, 0x87, 0x64, 0x01, 0x6c, 0x58, 0x0f, 0x58, 0x5c, 0xcc, 0xac, 0xcc, 0x8f, + 0x66, 0x3a, 0x26, 0x29, 0x5f, 0xa2, 0x8c, 0x1e, 0x45, 0xf2, 0x1d, 0xf8, 0x85, 0x3e, 0x90, 0xc8, + 0x62, 0x9f, 0xdc, 0xd1, 0xae, 0xf3, 0x70, 0x64, 0x6b, 0x63, 0x24, 0x4f, 0xc1, 0xe1, 0x88, 0x57, + 0x20, 0xf4, 0xc0, 0x87, 0x3b, 0x35, 0x5a, 0x6e, 0x96, 0x66, 0xc7, 0x23, 0xb9, 0x0e, 0x7e, 0x8a, + 0x0f, 0x72, 0x28, 0x97, 0x54, 0x3c, 0xc5, 0xae, 0x76, 0x9d, 0xef, 0xfb, 0xec, 0x4d, 0x91, 0xe4, + 0x01, 0x78, 0xdf, 0xc7, 0x45, 0x6c, 0x56, 0xf8, 0x62, 0xdf, 0xb7, 0x6b, 0xa1, 0xe6, 0x5e, 0xa0, + 0xc5, 0xee, 0x79, 0xb0, 0x17, 0xa8, 0x41, 0x7b, 0x7c, 0x4f, 0x89, 0xd6, 0x0a, 0xcb, 0x7e, 0x97, + 0x46, 0xe4, 0x82, 0x2b, 0xcb, 0xcd, 0x2e, 0x8d, 0x24, 0xcb, 0xfc, 0x22, 0x77, 0x2c, 0x6b, 0xd3, + 0x25, 0x79, 0x01, 0xec, 0x58, 0x0a, 0x6f, 0xb9, 0xdb, 0x9e, 0xdd, 0xab, 0x13, 0x79, 0x0d, 0x72, + 0x68, 0x93, 0x25, 0x59, 0x02, 0xd7, 0x20, 0x15, 0xa1, 0xe5, 0x52, 0x2d, 0x48, 0xde, 0xe2, 0x5e, + 0xe3, 0x18, 0xde, 0x35, 0x1e, 0x57, 0xd6, 0xec, 0x2f, 0x82, 0x4d, 0x59, 0x91, 0x5c, 0x96, 0xef, + 0xf1, 0x5e, 0x8d, 0x96, 0xfb, 0x8c, 0xad, 0x1d, 0x95, 0x5c, 0x3a, 0x8c, 0xa0, 0xf2, 0xd1, 0x54, + 0x41, 0xe5, 0x23, 0xdd, 0x41, 0xc7, 0x8a, 0x2c, 0xeb, 0x4d, 0x70, 0xa1, 0xa9, 0xc8, 0xae, 0x0a, + 0x18, 0xfe, 0x35, 0x9a, 0xa9, 0x2f, 0xb4, 0xdf, 0x83, 0xb7, 0x6e, 0xe6, 0xfa, 0x3a, 0xd5, 0x35, + 0xd6, 0xd5, 0xdf, 0x70, 0xfe, 0x7a, 0xcb, 0x29, 0x79, 0x07, 0xbc, 0xc9, 0x7a, 0x28, 0xb0, 0x32, + 0xf6, 0x4d, 0x77, 0x0d, 0x4a, 0x9e, 0xc6, 0x6a, 0x9d, 0xa9, 0xe4, 0x5b, 0x70, 0x1a, 0xfb, 0x50, + 0xe0, 0x8a, 0xca, 0x63, 0x57, 0xb9, 0xca, 0xc3, 0x85, 0xbd, 0x71, 0x95, 0xbc, 0x05, 0x0e, 0x17, + 0xc5, 0x66, 0x25, 0xd0, 0x03, 0xdf, 0x5c, 0xcf, 0x42, 0xcd, 0x77, 0x8a, 0xd5, 0x67, 0x63, 0xf2, + 0x05, 0x78, 0xa7, 0xf8, 0x28, 0x07, 0xdd, 0x0f, 0xbb, 0xcc, 0x3b, 0xde, 0x2b, 0x7f, 0xe2, 0xef, + 0xd0, 0x99, 0x61, 0x0d, 0xb0, 0xe4, 0x33, 0x70, 0xaa, 0xfc, 0x88, 0xb3, 0x59, 0xeb, 0xf7, 0xcb, + 0xc2, 0xa6, 0x7c, 0x0f, 0xa7, 0x7a, 0x4d, 0x43, 0x7c, 0xea, 0xac, 0x9d, 0xb3, 0x64, 0x1b, 0x3e, + 0x75, 0x9c, 0xc1, 0x5d, 0x8e, 0x97, 0x32, 0xe7, 0x7a, 0x16, 0x2a, 0x9f, 0x3a, 0xd9, 0x68, 0x4b, + 0x7e, 0x07, 0x9f, 0x3a, 0x0e, 0xfa, 0x79, 0xbc, 0xed, 0x1d, 0xef, 0x95, 0x3f, 0xf1, 0xdf, 0xb6, + 0xd0, 0x0b, 0x23, 0x5b, 0x70, 0xc9, 0x63, 0x70, 0x3e, 0xfc, 0x71, 0xc9, 0x6b, 0x8b, 0xb3, 0x7a, + 0x5c, 0x70, 0x92, 0x4f, 0x74, 0x2e, 0x1e, 0x3a, 0x8e, 0x7f, 0x87, 0xce, 0x0e, 0xed, 0xe2, 0x25, + 0x3f, 0xb6, 0xc0, 0xef, 0x72, 0x93, 0x86, 0xc1, 0xc7, 0x05, 0x1f, 0x23, 0x2b, 0x3f, 0xd5, 0x6f, + 0x1a, 0xca, 0xe3, 0x98, 0xad, 0xff, 0x97, 0xbc, 0x0e, 0x8e, 0x63, 0x9b, 0x02, 0x5e, 0x14, 0x5d, + 0x8a, 0xb5, 0xbf, 0x5f, 0xa3, 0xf1, 0x89, 0x1f, 0xd9, 0x2b, 0x4c, 0x7a, 0xe0, 0x89, 0xdf, 0x2c, + 0x78, 0x89, 0x4a, 0x78, 0xcd, 0x7c, 0xcf, 0xf5, 0x87, 0x8e, 0x63, 0x8a, 0x70, 0xbd, 0xdd, 0x98, + 0xbc, 0x0d, 0x8e, 0x5b, 0x42, 0x7c, 0x95, 0x8e, 0x4c, 0xf7, 0x0d, 0x0a, 0xfe, 0x25, 0x9a, 0xd4, + 0xbb, 0x92, 0xc9, 0x49, 0x70, 0xae, 0xe3, 0x69, 0x40, 0xcf, 0x60, 0x94, 0x27, 0x52, 0x35, 0x4b, + 0xb9, 0x0b, 0x66, 0x6e, 0x58, 0xc7, 0x64, 0xaa, 0x9b, 0xc4, 0xaf, 0xd0, 0xb4, 0xd9, 0x63, 0x42, + 0xce, 0x82, 0x17, 0x8c, 0xa2, 0x66, 0x58, 0x6d, 0xb9, 0xa7, 0x52, 0x9d, 0x90, 0xbb, 0xb3, 0x52, + 0xa7, 0x3c, 0x0d, 0x76, 0xe7, 0xaa, 0x3e, 0x79, 0x3c, 0x95, 0x75, 0xc9, 0x7d, 0x74, 0xaa, 0xb1, + 0xbb, 0x99, 0x0c, 0xc0, 0xbb, 0xc9, 0x2d, 0xc1, 0xc3, 0x4c, 0xfd, 0x16, 0x52, 0xfb, 0x40, 0xfe, + 0x10, 0x55, 0x6b, 0x33, 0x79, 0x0d, 0xfc, 0x10, 0x9f, 0xe4, 0xa0, 0xa2, 0xf0, 0x97, 0x95, 0x3f, + 0xf3, 0x05, 0x4e, 0xeb, 0x79, 0x26, 0x37, 0xc1, 0x0b, 0xdc, 0xa7, 0x29, 0x4b, 0x64, 0xd1, 0x6f, + 0x62, 0xa0, 0x5c, 0xe5, 0xbb, 0x49, 0xd9, 0xae, 0x43, 0x5e, 0x02, 0xef, 0x26, 0x73, 0x8e, 0x0f, + 0xc3, 0x34, 0xf3, 0xc6, 0x06, 0xe2, 0x17, 0xf6, 0x11, 0xae, 0xf7, 0x2d, 0x91, 0x5b, 0xe0, 0x0d, + 0x8e, 0x57, 0x81, 0x45, 0x25, 0x75, 0x26, 0x31, 0x49, 0xf8, 0x33, 0x34, 0xa9, 0x37, 0x99, 0x93, + 0xf3, 0xe0, 0xb2, 0x12, 0x9f, 0x55, 0xe9, 0xfa, 0x27, 0x32, 0xf5, 0x32, 0x4f, 0xa0, 0x78, 0x9f, + 0x37, 0x79, 0x19, 0x9c, 0x40, 0x3d, 0xce, 0xef, 0xf7, 0x0a, 0x18, 0x5e, 0x43, 0xcf, 0x8b, 0x3f, + 0xb8, 0xc1, 0xfb, 0x81, 0x86, 0xed, 0xea, 0x8b, 0xbf, 0xb8, 0xb1, 0xfc, 0xa8, 0xb8, 0xdd, 0x2b, + 0x71, 0xf8, 0x2e, 0x1a, 0x2b, 0xff, 0x0a, 0x07, 0xd9, 0x19, 0xa1, 0x85, 0xe0, 0xb1, 0x25, 0xee, + 0xf7, 0x24, 0x72, 0xe9, 0xbf, 0x5b, 0xe8, 0x64, 0xb1, 0xa6, 0xb9, 0xf7, 0x42, 0xfd, 0x1a, 0xcd, + 0x88, 0x35, 0x54, 0x2c, 0x48, 0x71, 0x94, 0x92, 0x23, 0xbc, 0x05, 0xe9, 0xea, 0xe8, 0x16, 0xa4, + 0x42, 0xea, 0xc7, 0x25, 0xd2, 0x9b, 0xee, 0xe9, 0x84, 0x14, 0xbf, 0x8c, 0xa6, 0xfa, 0x34, 0xc9, + 0x42, 0xda, 0x6d, 0xef, 0xd0, 0xb0, 0x3b, 0x48, 0x18, 0x6f, 0x93, 0x19, 0xf3, 0x26, 0x05, 0xf9, + 0x7e, 0x41, 0xad, 0xb7, 0x50, 0xfd, 0xcc, 0xd2, 0x42, 0xf5, 0x1f, 0x2d, 0xb4, 0x50, 0x7b, 0x52, + 0xd1, 0x42, 0x75, 0x1f, 0xcd, 0x1b, 0x92, 0xda, 0x2c, 0x49, 0xe2, 0x44, 0xb4, 0xe5, 0xe0, 0xf2, + 0x69, 0x92, 0xbe, 0xbf, 0x2c, 0xbe, 0x0a, 0xcc, 0xea, 0x3a, 0xdc, 0xcb, 0x6f, 0xc7, 0x4f, 0xd1, + 0xa2, 0x39, 0x23, 0xed, 0x44, 0x08, 0x29, 0xbb, 0xb3, 0x6e, 0xb8, 0x4f, 0x8d, 0xe0, 0xe0, 0x91, + 0x9e, 0x7d, 0x20, 0x5d, 0xfa, 0xf1, 0x22, 0x9a, 0x32, 0x50, 0xf8, 0x00, 0x9d, 0xae, 0x9f, 0x2e, + 0x90, 0x7a, 0x89, 0xee, 0x14, 0x80, 0x36, 0x7a, 0x4b, 0x8f, 0xe4, 0xff, 0xc1, 0x9f, 0xc9, 0xee, + 0x09, 0x73, 0x08, 0x87, 0x4a, 0xe7, 0x0d, 0x0d, 0x14, 0xa1, 0xad, 0x11, 0xfe, 0x5d, 0x17, 0xaa, + 0xca, 0xc3, 0xb4, 0x46, 0xc5, 0x7f, 0xd9, 0x42, 0xe7, 0xed, 0xc7, 0x18, 0x2a, 0xa9, 0x47, 0x46, + 0x6c, 0x11, 0x4d, 0xa9, 0xca, 0xe7, 0x51, 0x55, 0xfc, 0x69, 0xda, 0x3c, 0x8c, 0x7f, 0xdf, 0x42, + 0x4b, 0x8d, 0x07, 0x21, 0x2a, 0x55, 0x8a, 0xcf, 0xf1, 0xef, 0x83, 0x55, 0xd1, 0x5b, 0x72, 0x54, + 0x6d, 0xce, 0xd1, 0xa1, 0x77, 0xe0, 0xef, 0xd1, 0x19, 0x8b, 0x3e, 0x95, 0x26, 0x47, 0x47, 0x7c, + 0x25, 0x68, 0xd4, 0x44, 0x55, 0xe2, 0x14, 0x6d, 0x1a, 0xc4, 0x7f, 0x6c, 0xa1, 0x4b, 0x43, 0x4e, + 0x63, 0x54, 0x8a, 0x14, 0x5f, 0xfb, 0xd7, 0xc1, 0x8a, 0x98, 0x9f, 0x24, 0x54, 0x7d, 0x2e, 0xd0, + 0x11, 0xf7, 0xe0, 0x18, 0x2d, 0xe8, 0x07, 0x3b, 0x2a, 0x45, 0xe6, 0x46, 0x94, 0x2f, 0x4c, 0x45, + 0xee, 0x33, 0xa6, 0x99, 0xe7, 0x1c, 0xb5, 0xd0, 0x71, 0xa2, 0x34, 0x6e, 0x99, 0xd6, 0x30, 0x3f, + 0x62, 0x6b, 0x63, 0x4a, 0xac, 0x19, 0xc1, 0x3c, 0xb5, 0x0d, 0xe0, 0x6d, 0xa5, 0x1f, 0xad, 0x92, + 0x76, 0x6c, 0x44, 0xbe, 0x65, 0x4a, 0x53, 0x05, 0x95, 0x0d, 0x69, 0x95, 0x8c, 0x88, 0xb7, 0xf2, + 0x55, 0x67, 0x4d, 0x2a, 0x39, 0x27, 0xe1, 0xf3, 0x28, 0x7b, 0x81, 0x8c, 0x79, 0xac, 0xd3, 0xf1, + 0x97, 0x68, 0xaa, 0x38, 0x9e, 0x52, 0x09, 0x5a, 0x18, 0xb1, 0x98, 0x57, 0x82, 0x72, 0xa0, 0x2a, + 0x62, 0x92, 0x6a, 0x14, 0xfc, 0x0d, 0x5a, 0xac, 0x9d, 0x62, 0xa9, 0xe4, 0x3c, 0x37, 0x22, 0xe7, + 0x93, 0x72, 0x8c, 0x16, 0x1a, 0x55, 0x22, 0xd9, 0x6e, 0x18, 0xc3, 0x7f, 0xd5, 0x42, 0xe7, 0xed, + 0x27, 0x60, 0x2a, 0x0d, 0x9e, 0x1f, 0xd1, 0x1d, 0x21, 0x35, 0xb0, 0xb4, 0x7d, 0xa8, 0x5a, 0x9c, + 0xf1, 0x87, 0x8c, 0xe3, 0xa7, 0xe8, 0x94, 0x79, 0x88, 0xa6, 0x52, 0x61, 0x0c, 0xba, 0x5e, 0xe8, + 0x3d, 0x39, 0xda, 0x7a, 0xe1, 0xdb, 0x87, 0xf0, 0x0f, 0x2d, 0x74, 0xc6, 0xd2, 0x1e, 0x52, 0x09, + 0x3f, 0x31, 0x62, 0xc3, 0x5a, 0x13, 0x6e, 0x8d, 0x56, 0x8b, 0x7e, 0xe3, 0x28, 0xfe, 0x87, 0x16, + 0xba, 0x34, 0xe4, 0xf8, 0x4e, 0xa5, 0x4a, 0xd1, 0x82, 0xb3, 0x01, 0x57, 0x65, 0x58, 0xbc, 0x7a, + 0xc1, 0x1f, 0x75, 0x13, 0x7e, 0x82, 0x16, 0xf4, 0xb3, 0x40, 0x95, 0x2a, 0xa7, 0xa1, 0xe1, 0x43, + 0xed, 0x21, 0xd1, 0xc2, 0x87, 0x6f, 0x1b, 0xc0, 0x19, 0x22, 0xc6, 0x01, 0xa2, 0x4a, 0xe6, 0x99, + 0x11, 0xbd, 0xa6, 0x35, 0x99, 0xb5, 0x98, 0x75, 0xd2, 0xb7, 0x8e, 0xe0, 0x40, 0x69, 0x44, 0xaa, + 0xe4, 0xa1, 0x11, 0xbb, 0xe7, 0x9a, 0x3c, 0x55, 0x94, 0x6c, 0x3a, 0xa9, 0xa4, 0xfc, 0xa8, 0x76, + 0x22, 0x29, 0xa7, 0x16, 0x2a, 0x89, 0xe3, 0xae, 0xb6, 0x26, 0x37, 0x91, 0x56, 0x5b, 0xab, 0x8f, + 0xe2, 0xef, 0xd0, 0xe9, 0xfa, 0x21, 0xa7, 0x4a, 0x81, 0x09, 0xe8, 0xca, 0x6c, 0x36, 0x93, 0x68, + 0x2b, 0xb3, 0xdf, 0x34, 0x58, 0x98, 0x7a, 0xf3, 0x11, 0xa9, 0x4a, 0x91, 0x73, 0x60, 0x53, 0x6f, + 0xe8, 0x16, 0xd0, 0x4d, 0x7d, 0xd4, 0x4d, 0xdc, 0xd4, 0xb5, 0xf3, 0x56, 0x95, 0x2a, 0xe7, 0xc1, + 0xa6, 0xae, 0xb4, 0x15, 0xe8, 0xa6, 0x6e, 0x1b, 0xe0, 0xa6, 0xae, 0x1f, 0xd2, 0xaa, 0x64, 0x5e, + 0x00, 0x9b, 0xba, 0xda, 0x72, 0xa0, 0x9b, 0xba, 0x75, 0x04, 0xff, 0x63, 0x0b, 0xbd, 0x38, 0xec, + 0x70, 0x57, 0xa5, 0x43, 0xf1, 0x75, 0xe3, 0x2e, 0x5c, 0x87, 0x5a, 0x13, 0x82, 0xaa, 0xcf, 0x92, + 0x3f, 0xf2, 0x2e, 0xee, 0x86, 0xa5, 0x6a, 0x95, 0x1e, 0x17, 0xc1, 0x6e, 0x28, 0xb0, 0xba, 0x1b, + 0x9a, 0x44, 0xee, 0x86, 0xb6, 0x93, 0x65, 0x95, 0xc4, 0x4b, 0x50, 0x37, 0xac, 0x7d, 0x43, 0xd7, + 0xdc, 0x90, 0x35, 0x8e, 0x62, 0x86, 0x66, 0x6d, 0x82, 0x5f, 0x84, 0x3e, 0xaa, 0x4d, 0xde, 0xcc, + 0x4e, 0x4d, 0xcc, 0x33, 0xb4, 0x58, 0x3b, 0xc5, 0x56, 0x49, 0x7b, 0x09, 0xba, 0xac, 0xea, 0x1f, + 0xd0, 0xb5, 0x65, 0x75, 0xc7, 0x3e, 0x94, 0xa7, 0x68, 0xda, 0xf1, 0xb7, 0x4a, 0xea, 0xcb, 0xd0, + 0x14, 0x4d, 0xf9, 0xba, 0xae, 0xa5, 0x68, 0x3b, 0x16, 0x3a, 0xfe, 0x1c, 0x4d, 0x1a, 0x6e, 0x7b, + 0x79, 0x44, 0xd1, 0x49, 0x93, 0xa3, 0x0a, 0x38, 0xb1, 0xa3, 0xb9, 0xe9, 0x97, 0x68, 0xca, 0xf4, + 0xce, 0x57, 0xa0, 0xc9, 0x5f, 0xcd, 0x2b, 0x27, 0xbb, 0xba, 0x37, 0xee, 0xa2, 0x39, 0xe5, 0xf4, + 0x5d, 0x25, 0xe1, 0x0a, 0x74, 0xb3, 0x2a, 0xbf, 0xa7, 0x68, 0x9b, 0xd5, 0x5e, 0x8d, 0xca, 0xf3, + 0x1c, 0xcb, 0x69, 0xbd, 0x4a, 0xe4, 0xab, 0x50, 0xa3, 0xaf, 0x95, 0xbf, 0x34, 0xa3, 0x4f, 0x1a, + 0x47, 0xf3, 0x4c, 0xb7, 0x76, 0xd8, 0xaf, 0x92, 0x3f, 0x09, 0xcd, 0x74, 0x8d, 0x8a, 0xac, 0x96, + 0xe9, 0xa6, 0x0d, 0x63, 0xf9, 0x44, 0x5b, 0x57, 0xdc, 0x29, 0xe8, 0x44, 0x5b, 0x57, 0x5a, 0x9c, + 0xd6, 0x57, 0x58, 0x86, 0x66, 0xab, 0x63, 0x85, 0x95, 0xa0, 0x69, 0xa8, 0x6b, 0x97, 0x05, 0x4b, + 0xcd, 0xb5, 0x07, 0x26, 0x71, 0x7d, 0x1c, 0x1d, 0x97, 0xcc, 0x97, 0xfe, 0xb8, 0x54, 0x16, 0x98, + 0x6a, 0x05, 0x1a, 0xbc, 0xaf, 0x6c, 0x02, 0x65, 0x2d, 0xa6, 0x38, 0x3d, 0x27, 0x0a, 0x31, 0xb7, + 0xa0, 0x65, 0x21, 0xe3, 0x84, 0x15, 0xe7, 0xa1, 0x6c, 0x3e, 0x35, 0x3a, 0xde, 0x51, 0x36, 0x82, + 0x34, 0x28, 0x25, 0xb6, 0xa0, 0x8b, 0xa9, 0x21, 0x51, 0x0a, 0x9b, 0xa2, 0x3a, 0x09, 0xff, 0xd4, + 0x52, 0xaa, 0x0d, 0xda, 0x1e, 0x46, 0x88, 0x3c, 0x02, 0xad, 0x7b, 0x68, 0x22, 0xd5, 0xe6, 0xf5, + 0x52, 0x36, 0xa1, 0x0d, 0x63, 0xf8, 0x6f, 0xd4, 0x52, 0x90, 0x59, 0x82, 0x11, 0x7a, 0x60, 0x68, + 0x4a, 0xa3, 0xe9, 0x61, 0x9c, 0x97, 0x2a, 0x55, 0x39, 0x4d, 0x9b, 0x87, 0xf1, 0xb7, 0xe8, 0x94, + 0x45, 0x19, 0xa1, 0xc6, 0x51, 0xe8, 0x7e, 0xce, 0xae, 0x86, 0xd4, 0xe0, 0x24, 0xb5, 0x8e, 0xe0, + 0xbf, 0x6b, 0xa1, 0x17, 0x86, 0x14, 0x5f, 0x84, 0x16, 0x45, 0xe5, 0xe5, 0x9e, 0xa3, 0x16, 0xb5, + 0x66, 0xd1, 0x52, 0x99, 0xb3, 0x74, 0xd8, 0x0d, 0xb8, 0x8b, 0xe6, 0xf4, 0xca, 0x8b, 0xd0, 0x62, + 0x0e, 0xba, 0x02, 0x6a, 0x5a, 0xf0, 0x16, 0xb7, 0x52, 0xf2, 0x0c, 0x35, 0x89, 0x38, 0x56, 0x4a, + 0x90, 0x9a, 0x05, 0xcc, 0x43, 0x43, 0x9c, 0x26, 0x4e, 0x7f, 0xf1, 0x98, 0xd6, 0xa8, 0xb8, 0x8d, + 0xa6, 0xa4, 0x40, 0x21, 0xea, 0xd8, 0x88, 0xef, 0x9d, 0x76, 0x51, 0x52, 0xca, 0x09, 0xaa, 0x12, + 0xf0, 0x1e, 0x3f, 0xe7, 0x55, 0x15, 0x5c, 0x84, 0x90, 0x93, 0xae, 0xd3, 0x57, 0x1d, 0xc6, 0x52, + 0xa6, 0xcf, 0x20, 0xe2, 0xcf, 0xd1, 0x44, 0x51, 0x6d, 0x11, 0x52, 0x16, 0xa0, 0x91, 0x53, 0x48, + 0xe1, 0xc7, 0x92, 0x4a, 0xfe, 0xe3, 0xb4, 0xba, 0xc4, 0x4f, 0xd1, 0x42, 0xad, 0xd4, 0x22, 0x84, + 0x3c, 0x07, 0xad, 0xd3, 0x16, 0x42, 0xcc, 0x03, 0x4b, 0xa5, 0xb8, 0xf9, 0x6d, 0xdb, 0x00, 0xfe, + 0x0b, 0xb5, 0xc8, 0x60, 0x8b, 0x51, 0xcf, 0x8f, 0xf8, 0x4e, 0x6c, 0x88, 0xb7, 0x9d, 0xb0, 0x29, + 0x55, 0x38, 0xe5, 0x37, 0x0d, 0xe2, 0x4c, 0x39, 0x4a, 0x24, 0x8a, 0x2c, 0x42, 0xfe, 0x98, 0xdb, + 0x42, 0x60, 0x9c, 0x7d, 0x92, 0x0b, 0x81, 0x6f, 0xa1, 0xe3, 0xef, 0x94, 0xd2, 0x4e, 0x2d, 0x1a, + 0x15, 0xd5, 0x95, 0xf7, 0x5c, 0x05, 0xd7, 0xc3, 0xd1, 0x82, 0x6f, 0x1f, 0xc2, 0x7f, 0x68, 0xa1, + 0x17, 0x86, 0x54, 0x57, 0x84, 0x1e, 0x45, 0x69, 0xe5, 0xbe, 0xab, 0x1e, 0x8d, 0x01, 0xe9, 0x9c, + 0x3f, 0xf4, 0x0e, 0x1c, 0xa1, 0x39, 0xbd, 0xb4, 0x22, 0xf4, 0x38, 0xed, 0x16, 0x22, 0xb4, 0x13, + 0x3a, 0x32, 0x44, 0xf8, 0x35, 0x2a, 0x7e, 0xa2, 0x1c, 0xeb, 0xd2, 0x62, 0xd2, 0x19, 0xe8, 0x8e, + 0x5f, 0x17, 0xa8, 0x07, 0xa5, 0x59, 0xbf, 0x4e, 0xc6, 0x54, 0x39, 0x5d, 0x27, 0x84, 0x21, 0x68, + 0xee, 0xaf, 0x0b, 0x93, 0x72, 0xe4, 0x99, 0x23, 0x21, 0xe2, 0x7b, 0xe5, 0x58, 0x95, 0x92, 0xde, + 0x09, 0x69, 0xe3, 0x87, 0xb3, 0xad, 0xea, 0xc3, 0x7c, 0xcd, 0xb6, 0x8c, 0x21, 0xfc, 0x0d, 0x22, + 0xf5, 0x6a, 0x8a, 0x90, 0x3e, 0xe1, 0xb6, 0xce, 0xd6, 0x4e, 0xe7, 0xc8, 0x75, 0xd6, 0xb7, 0x8e, + 0x14, 0x76, 0xdd, 0x5c, 0x4a, 0x11, 0x5a, 0x9c, 0x73, 0xb4, 0xeb, 0xa6, 0x53, 0x19, 0x95, 0x5d, + 0x0f, 0xbd, 0x83, 0xdb, 0xb5, 0x56, 0x47, 0x11, 0x7a, 0x9c, 0x77, 0xb4, 0x6b, 0xf5, 0xbc, 0x46, + 0x65, 0xd7, 0x35, 0x2a, 0xb7, 0x6b, 0xbd, 0x88, 0x22, 0x04, 0x5e, 0x70, 0xb4, 0x6b, 0xed, 0x24, + 0x47, 0x65, 0xd7, 0x75, 0x32, 0xfe, 0xfb, 0x16, 0x5a, 0x1a, 0x56, 0x41, 0x11, 0x0a, 0x2c, 0x1d, + 0x6e, 0xe6, 0x8d, 0xf2, 0x08, 0x95, 0xba, 0x9c, 0x6f, 0x2c, 0xa0, 0x28, 0xfe, 0x56, 0xaa, 0x25, + 0x74, 0xb8, 0xe8, 0xe8, 0x6f, 0xe5, 0xf9, 0xa8, 0xca, 0xdf, 0x34, 0x4a, 0xee, 0x6f, 0xb6, 0xca, + 0x89, 0x90, 0x76, 0xc9, 0xcd, 0xdf, 0xea, 0x07, 0x10, 0xa4, 0xbf, 0x31, 0xfb, 0x10, 0xde, 0x56, + 0xcf, 0x01, 0x08, 0xa9, 0x2f, 0xba, 0x3d, 0x63, 0x4d, 0x98, 0x3c, 0x0e, 0x20, 0x64, 0x0c, 0xd0, + 0x42, 0xad, 0x66, 0x22, 0x44, 0xbd, 0xe4, 0xb6, 0x4a, 0x1a, 0xe7, 0x0e, 0xe4, 0x2a, 0xb9, 0x63, + 0xa1, 0xe7, 0x29, 0x96, 0x56, 0x30, 0x11, 0x22, 0x5f, 0x76, 0x4b, 0xb1, 0xd4, 0x13, 0x09, 0x32, + 0xc5, 0xda, 0x31, 0x89, 0xf8, 0x53, 0x34, 0xae, 0x3a, 0xe7, 0x65, 0xe8, 0xd7, 0xb9, 0x4a, 0x88, + 0xe4, 0x8e, 0x76, 0x2a, 0x67, 0xfc, 0x1c, 0x4d, 0x68, 0x3e, 0xf8, 0x8a, 0x5b, 0xe6, 0xa6, 0xfb, + 0xde, 0x78, 0x57, 0xf1, 0x39, 0x86, 0x66, 0x94, 0x3a, 0x89, 0x60, 0x7f, 0xc5, 0x6d, 0x2f, 0x59, + 0xb5, 0x9e, 0xca, 0xbd, 0x64, 0x4f, 0x27, 0xe5, 0xa9, 0x8a, 0xa5, 0x46, 0x22, 0xc4, 0xbd, 0xea, + 0x66, 0xde, 0xf5, 0x2e, 0x21, 0x69, 0xde, 0x89, 0x7d, 0x28, 0xcf, 0x4f, 0x6b, 0x05, 0x12, 0x21, + 0x7c, 0xd2, 0x2d, 0x3f, 0x35, 0xbb, 0xd6, 0x64, 0x7e, 0x9a, 0xda, 0x06, 0xf2, 0xe9, 0xad, 0x2f, + 0x9f, 0x53, 0x6e, 0xd3, 0x5b, 0x5f, 0x36, 0xa7, 0x52, 0x63, 0xb9, 0xdc, 0x46, 0xd3, 0x55, 0x69, + 0x44, 0x48, 0x99, 0x76, 0x73, 0x5f, 0xd9, 0xce, 0x25, 0xdd, 0x77, 0xa0, 0x51, 0xd6, 0x11, 0x1a, + 0x2b, 0xdb, 0x5e, 0xae, 0xfd, 0xf5, 0x51, 0x34, 0x2d, 0x3b, 0x6e, 0xb6, 0x0a, 0x46, 0xf8, 0x5f, + 0x5b, 0xe8, 0xb9, 0xa2, 0x63, 0x10, 0x03, 0xea, 0xeb, 0xf6, 0x3f, 0xd6, 0xb4, 0x78, 0xe3, 0x10, + 0xc8, 0x42, 0xa9, 0xa5, 0x1b, 0x3f, 0xfd, 0xfb, 0x7f, 0xfd, 0xe1, 0xc8, 0xf5, 0xa5, 0x65, 0xfe, + 0x3f, 0x8d, 0x44, 0x7c, 0x4d, 0x57, 0xbe, 0x55, 0x1a, 0x9e, 0x6e, 0x5f, 0xf9, 0x4e, 0xfc, 0xc1, + 0xaa, 0xb5, 0x20, 0x5d, 0x2d, 0xda, 0x19, 0x57, 0x5b, 0x57, 0xb8, 0xea, 0xc5, 0x34, 0x40, 0x54, + 0xb7, 0xf7, 0x56, 0x2d, 0xde, 0x38, 0x04, 0xf2, 0xb0, 0xaa, 0x17, 0x4d, 0x43, 0xab, 0xad, 0x2b, + 0xeb, 0xff, 0xd7, 0x42, 0x97, 0xfc, 0xb8, 0x37, 0x52, 0xf6, 0xfa, 0xbc, 0xf9, 0xc2, 0x36, 0x93, + 0x38, 0x8b, 0x37, 0x5b, 0x5f, 0x7c, 0x20, 0xa0, 0x9d, 0xb8, 0x4b, 0xa3, 0xce, 0x72, 0x9c, 0x74, + 0x56, 0x3a, 0x2c, 0xe2, 0x7f, 0xc2, 0xa9, 0xfc, 0xeb, 0x5e, 0xfd, 0x30, 0x6d, 0xfe, 0x63, 0x5f, + 0x37, 0xcb, 0x1f, 0xff, 0x74, 0xe4, 0xe8, 0x83, 0xb5, 0xb5, 0x7f, 0x3e, 0x72, 0xa1, 0x90, 0xb4, + 0xbc, 0x16, 0x28, 0x7f, 0x94, 0x6a, 0xf9, 0xf1, 0xd5, 0x65, 0x21, 0x38, 0xfd, 0xb7, 0xf2, 0x96, + 0xaf, 0xd6, 0x82, 0xf4, 0x2b, 0x79, 0xcb, 0x57, 0x8f, 0xaf, 0x7e, 0x55, 0xde, 0xf2, 0x3f, 0x47, + 0x2e, 0x15, 0xf4, 0xd5, 0xd5, 0xfc, 0x79, 0x57, 0xe5, 0x4d, 0xab, 0xab, 0x8f, 0xaf, 0xae, 0xae, + 0x96, 0xb7, 0x6d, 0x3f, 0xc7, 0xf5, 0xbc, 0xfe, 0xff, 0x01, 0x00, 0x00, 0xff, 0xff, 0x62, 0xac, + 0x7f, 0x75, 0x68, 0x6b, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/services/group_placement_view_service.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/services/group_placement_view_service.pb.go new file mode 100644 index 0000000..1004178 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/services/group_placement_view_service.pb.go @@ -0,0 +1,176 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/services/group_placement_view_service.proto + +package services // import "google.golang.org/genproto/googleapis/ads/googleads/v1/services" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import resources "google.golang.org/genproto/googleapis/ads/googleads/v1/resources" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +import ( + context "golang.org/x/net/context" + grpc "google.golang.org/grpc" +) + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Request message for [GroupPlacementViewService.GetGroupPlacementView][google.ads.googleads.v1.services.GroupPlacementViewService.GetGroupPlacementView]. +type GetGroupPlacementViewRequest struct { + // The resource name of the Group Placement view to fetch. + ResourceName string `protobuf:"bytes,1,opt,name=resource_name,json=resourceName,proto3" json:"resource_name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GetGroupPlacementViewRequest) Reset() { *m = GetGroupPlacementViewRequest{} } +func (m *GetGroupPlacementViewRequest) String() string { return proto.CompactTextString(m) } +func (*GetGroupPlacementViewRequest) ProtoMessage() {} +func (*GetGroupPlacementViewRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_group_placement_view_service_0ce1a68201b7e811, []int{0} +} +func (m *GetGroupPlacementViewRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GetGroupPlacementViewRequest.Unmarshal(m, b) +} +func (m *GetGroupPlacementViewRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GetGroupPlacementViewRequest.Marshal(b, m, deterministic) +} +func (dst *GetGroupPlacementViewRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_GetGroupPlacementViewRequest.Merge(dst, src) +} +func (m *GetGroupPlacementViewRequest) XXX_Size() int { + return xxx_messageInfo_GetGroupPlacementViewRequest.Size(m) +} +func (m *GetGroupPlacementViewRequest) XXX_DiscardUnknown() { + xxx_messageInfo_GetGroupPlacementViewRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_GetGroupPlacementViewRequest proto.InternalMessageInfo + +func (m *GetGroupPlacementViewRequest) GetResourceName() string { + if m != nil { + return m.ResourceName + } + return "" +} + +func init() { + proto.RegisterType((*GetGroupPlacementViewRequest)(nil), "google.ads.googleads.v1.services.GetGroupPlacementViewRequest") +} + +// Reference imports to suppress errors if they are not otherwise used. +var _ context.Context +var _ grpc.ClientConn + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the grpc package it is being compiled against. +const _ = grpc.SupportPackageIsVersion4 + +// GroupPlacementViewServiceClient is the client API for GroupPlacementViewService service. +// +// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://godoc.org/google.golang.org/grpc#ClientConn.NewStream. +type GroupPlacementViewServiceClient interface { + // Returns the requested Group Placement view in full detail. + GetGroupPlacementView(ctx context.Context, in *GetGroupPlacementViewRequest, opts ...grpc.CallOption) (*resources.GroupPlacementView, error) +} + +type groupPlacementViewServiceClient struct { + cc *grpc.ClientConn +} + +func NewGroupPlacementViewServiceClient(cc *grpc.ClientConn) GroupPlacementViewServiceClient { + return &groupPlacementViewServiceClient{cc} +} + +func (c *groupPlacementViewServiceClient) GetGroupPlacementView(ctx context.Context, in *GetGroupPlacementViewRequest, opts ...grpc.CallOption) (*resources.GroupPlacementView, error) { + out := new(resources.GroupPlacementView) + err := c.cc.Invoke(ctx, "/google.ads.googleads.v1.services.GroupPlacementViewService/GetGroupPlacementView", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +// GroupPlacementViewServiceServer is the server API for GroupPlacementViewService service. +type GroupPlacementViewServiceServer interface { + // Returns the requested Group Placement view in full detail. + GetGroupPlacementView(context.Context, *GetGroupPlacementViewRequest) (*resources.GroupPlacementView, error) +} + +func RegisterGroupPlacementViewServiceServer(s *grpc.Server, srv GroupPlacementViewServiceServer) { + s.RegisterService(&_GroupPlacementViewService_serviceDesc, srv) +} + +func _GroupPlacementViewService_GetGroupPlacementView_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(GetGroupPlacementViewRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(GroupPlacementViewServiceServer).GetGroupPlacementView(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.ads.googleads.v1.services.GroupPlacementViewService/GetGroupPlacementView", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(GroupPlacementViewServiceServer).GetGroupPlacementView(ctx, req.(*GetGroupPlacementViewRequest)) + } + return interceptor(ctx, in, info, handler) +} + +var _GroupPlacementViewService_serviceDesc = grpc.ServiceDesc{ + ServiceName: "google.ads.googleads.v1.services.GroupPlacementViewService", + HandlerType: (*GroupPlacementViewServiceServer)(nil), + Methods: []grpc.MethodDesc{ + { + MethodName: "GetGroupPlacementView", + Handler: _GroupPlacementViewService_GetGroupPlacementView_Handler, + }, + }, + Streams: []grpc.StreamDesc{}, + Metadata: "google/ads/googleads/v1/services/group_placement_view_service.proto", +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/services/group_placement_view_service.proto", fileDescriptor_group_placement_view_service_0ce1a68201b7e811) +} + +var fileDescriptor_group_placement_view_service_0ce1a68201b7e811 = []byte{ + // 375 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x84, 0x52, 0xcf, 0x4a, 0xe3, 0x40, + 0x18, 0x27, 0x59, 0x58, 0xd8, 0xb0, 0x7b, 0x09, 0x2c, 0xec, 0x96, 0xb2, 0x94, 0x6e, 0x0f, 0xd2, + 0xc3, 0x0c, 0x51, 0x8a, 0x30, 0x5a, 0x21, 0xed, 0x21, 0x9e, 0xa4, 0x54, 0xc8, 0x41, 0x02, 0x61, + 0x4c, 0x3e, 0x42, 0xa0, 0xc9, 0xc4, 0x7c, 0x49, 0x7a, 0x10, 0x2f, 0x82, 0x4f, 0xe0, 0x1b, 0x78, + 0xf4, 0x51, 0x04, 0x4f, 0xbe, 0x82, 0x27, 0x7d, 0x09, 0x49, 0xa7, 0x13, 0x28, 0x35, 0xf6, 0xf6, + 0x63, 0xe6, 0xf7, 0xe7, 0x9b, 0xdf, 0x37, 0xc6, 0x34, 0x12, 0x22, 0x5a, 0x00, 0xe5, 0x21, 0x52, + 0x09, 0x6b, 0x54, 0x59, 0x14, 0x21, 0xaf, 0xe2, 0x00, 0x90, 0x46, 0xb9, 0x28, 0x33, 0x3f, 0x5b, + 0xf0, 0x00, 0x12, 0x48, 0x0b, 0xbf, 0x8a, 0x61, 0xe9, 0xaf, 0x6f, 0x49, 0x96, 0x8b, 0x42, 0x98, + 0x3d, 0xa9, 0x24, 0x3c, 0x44, 0xd2, 0x98, 0x90, 0xca, 0x22, 0xca, 0xa4, 0x73, 0xdc, 0x16, 0x93, + 0x03, 0x8a, 0x32, 0x6f, 0xcb, 0x91, 0xfe, 0x9d, 0xae, 0x52, 0x67, 0x31, 0xe5, 0x69, 0x2a, 0x0a, + 0x5e, 0xc4, 0x22, 0x45, 0x79, 0xdb, 0x9f, 0x1a, 0x5d, 0x07, 0x0a, 0xa7, 0x96, 0xcf, 0x94, 0xda, + 0x8d, 0x61, 0x39, 0x87, 0xab, 0x12, 0xb0, 0x30, 0xff, 0x1b, 0xbf, 0x54, 0x8a, 0x9f, 0xf2, 0x04, + 0xfe, 0x68, 0x3d, 0x6d, 0xef, 0xc7, 0xfc, 0xa7, 0x3a, 0x3c, 0xe3, 0x09, 0xec, 0xbf, 0x6b, 0xc6, + 0xdf, 0x6d, 0x8b, 0x73, 0x39, 0xbf, 0xf9, 0xac, 0x19, 0xbf, 0x3f, 0xcd, 0x30, 0x4f, 0xc8, 0xae, + 0xb7, 0x93, 0xaf, 0x86, 0xeb, 0x8c, 0x5a, 0xf5, 0x4d, 0x33, 0x64, 0x5b, 0xdd, 0x1f, 0xdf, 0xbe, + 0xbc, 0xde, 0xeb, 0x87, 0xe6, 0xa8, 0xee, 0xf0, 0x7a, 0xe3, 0x79, 0xe3, 0xa0, 0xc4, 0x42, 0x24, + 0x90, 0x23, 0x1d, 0xca, 0x52, 0x37, 0xa4, 0x48, 0x87, 0x37, 0x93, 0x3b, 0xdd, 0x18, 0x04, 0x22, + 0xd9, 0x39, 0xfb, 0xe4, 0x5f, 0x6b, 0x27, 0xb3, 0xba, 0xfb, 0x99, 0x76, 0x71, 0xba, 0xf6, 0x88, + 0xc4, 0x82, 0xa7, 0x11, 0x11, 0x79, 0x44, 0x23, 0x48, 0x57, 0x9b, 0x51, 0x9b, 0xce, 0x62, 0x6c, + 0xff, 0x5f, 0x47, 0x0a, 0x3c, 0xe8, 0xdf, 0x1c, 0xdb, 0x7e, 0xd4, 0x7b, 0x8e, 0x34, 0xb4, 0x43, + 0x24, 0x12, 0xd6, 0xc8, 0xb5, 0xc8, 0x3a, 0x18, 0x9f, 0x14, 0xc5, 0xb3, 0x43, 0xf4, 0x1a, 0x8a, + 0xe7, 0x5a, 0x9e, 0xa2, 0xbc, 0xe9, 0x03, 0x79, 0xce, 0x98, 0x1d, 0x22, 0x63, 0x0d, 0x89, 0x31, + 0xd7, 0x62, 0x4c, 0xd1, 0x2e, 0xbf, 0xaf, 0xe6, 0x3c, 0xf8, 0x08, 0x00, 0x00, 0xff, 0xff, 0x20, + 0x98, 0xd3, 0x44, 0x06, 0x03, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/services/hotel_group_view_service.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/services/hotel_group_view_service.pb.go new file mode 100644 index 0000000..ec9efd5 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/services/hotel_group_view_service.pb.go @@ -0,0 +1,176 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/services/hotel_group_view_service.proto + +package services // import "google.golang.org/genproto/googleapis/ads/googleads/v1/services" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import resources "google.golang.org/genproto/googleapis/ads/googleads/v1/resources" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +import ( + context "golang.org/x/net/context" + grpc "google.golang.org/grpc" +) + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Request message for [HotelGroupViewService.GetHotelGroupView][google.ads.googleads.v1.services.HotelGroupViewService.GetHotelGroupView]. +type GetHotelGroupViewRequest struct { + // Resource name of the Hotel Group View to fetch. + ResourceName string `protobuf:"bytes,1,opt,name=resource_name,json=resourceName,proto3" json:"resource_name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GetHotelGroupViewRequest) Reset() { *m = GetHotelGroupViewRequest{} } +func (m *GetHotelGroupViewRequest) String() string { return proto.CompactTextString(m) } +func (*GetHotelGroupViewRequest) ProtoMessage() {} +func (*GetHotelGroupViewRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_hotel_group_view_service_1b795ba7f86fa296, []int{0} +} +func (m *GetHotelGroupViewRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GetHotelGroupViewRequest.Unmarshal(m, b) +} +func (m *GetHotelGroupViewRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GetHotelGroupViewRequest.Marshal(b, m, deterministic) +} +func (dst *GetHotelGroupViewRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_GetHotelGroupViewRequest.Merge(dst, src) +} +func (m *GetHotelGroupViewRequest) XXX_Size() int { + return xxx_messageInfo_GetHotelGroupViewRequest.Size(m) +} +func (m *GetHotelGroupViewRequest) XXX_DiscardUnknown() { + xxx_messageInfo_GetHotelGroupViewRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_GetHotelGroupViewRequest proto.InternalMessageInfo + +func (m *GetHotelGroupViewRequest) GetResourceName() string { + if m != nil { + return m.ResourceName + } + return "" +} + +func init() { + proto.RegisterType((*GetHotelGroupViewRequest)(nil), "google.ads.googleads.v1.services.GetHotelGroupViewRequest") +} + +// Reference imports to suppress errors if they are not otherwise used. +var _ context.Context +var _ grpc.ClientConn + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the grpc package it is being compiled against. +const _ = grpc.SupportPackageIsVersion4 + +// HotelGroupViewServiceClient is the client API for HotelGroupViewService service. +// +// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://godoc.org/google.golang.org/grpc#ClientConn.NewStream. +type HotelGroupViewServiceClient interface { + // Returns the requested Hotel Group View in full detail. + GetHotelGroupView(ctx context.Context, in *GetHotelGroupViewRequest, opts ...grpc.CallOption) (*resources.HotelGroupView, error) +} + +type hotelGroupViewServiceClient struct { + cc *grpc.ClientConn +} + +func NewHotelGroupViewServiceClient(cc *grpc.ClientConn) HotelGroupViewServiceClient { + return &hotelGroupViewServiceClient{cc} +} + +func (c *hotelGroupViewServiceClient) GetHotelGroupView(ctx context.Context, in *GetHotelGroupViewRequest, opts ...grpc.CallOption) (*resources.HotelGroupView, error) { + out := new(resources.HotelGroupView) + err := c.cc.Invoke(ctx, "/google.ads.googleads.v1.services.HotelGroupViewService/GetHotelGroupView", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +// HotelGroupViewServiceServer is the server API for HotelGroupViewService service. +type HotelGroupViewServiceServer interface { + // Returns the requested Hotel Group View in full detail. + GetHotelGroupView(context.Context, *GetHotelGroupViewRequest) (*resources.HotelGroupView, error) +} + +func RegisterHotelGroupViewServiceServer(s *grpc.Server, srv HotelGroupViewServiceServer) { + s.RegisterService(&_HotelGroupViewService_serviceDesc, srv) +} + +func _HotelGroupViewService_GetHotelGroupView_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(GetHotelGroupViewRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(HotelGroupViewServiceServer).GetHotelGroupView(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.ads.googleads.v1.services.HotelGroupViewService/GetHotelGroupView", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(HotelGroupViewServiceServer).GetHotelGroupView(ctx, req.(*GetHotelGroupViewRequest)) + } + return interceptor(ctx, in, info, handler) +} + +var _HotelGroupViewService_serviceDesc = grpc.ServiceDesc{ + ServiceName: "google.ads.googleads.v1.services.HotelGroupViewService", + HandlerType: (*HotelGroupViewServiceServer)(nil), + Methods: []grpc.MethodDesc{ + { + MethodName: "GetHotelGroupView", + Handler: _HotelGroupViewService_GetHotelGroupView_Handler, + }, + }, + Streams: []grpc.StreamDesc{}, + Metadata: "google/ads/googleads/v1/services/hotel_group_view_service.proto", +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/services/hotel_group_view_service.proto", fileDescriptor_hotel_group_view_service_1b795ba7f86fa296) +} + +var fileDescriptor_hotel_group_view_service_1b795ba7f86fa296 = []byte{ + // 371 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x84, 0x92, 0xc1, 0x4a, 0xeb, 0x40, + 0x14, 0x86, 0x49, 0x2e, 0x5c, 0xb8, 0xe1, 0xba, 0x30, 0x20, 0x94, 0xe0, 0xa2, 0xd4, 0x2e, 0xa4, + 0x8b, 0x19, 0x62, 0x37, 0x3a, 0x22, 0x25, 0xdd, 0xa4, 0x2b, 0x29, 0x15, 0xb2, 0x90, 0x40, 0x88, + 0xcd, 0x21, 0x06, 0x9a, 0x4c, 0xcc, 0x99, 0xa4, 0x0b, 0x71, 0xa1, 0xaf, 0xe0, 0x1b, 0xb8, 0xf4, + 0x1d, 0x7c, 0x01, 0xb7, 0x2e, 0x7c, 0x01, 0x57, 0x3e, 0x85, 0xa4, 0xd3, 0x09, 0x14, 0x1b, 0xba, + 0xfb, 0x99, 0xf9, 0xbf, 0x7f, 0xce, 0xf9, 0x13, 0x63, 0x14, 0x73, 0x1e, 0x2f, 0x80, 0x86, 0x11, + 0x52, 0x29, 0x6b, 0x55, 0xd9, 0x14, 0xa1, 0xa8, 0x92, 0x39, 0x20, 0xbd, 0xe5, 0x02, 0x16, 0x41, + 0x5c, 0xf0, 0x32, 0x0f, 0xaa, 0x04, 0x96, 0xc1, 0xfa, 0x86, 0xe4, 0x05, 0x17, 0xdc, 0xec, 0x4a, + 0x8a, 0x84, 0x11, 0x92, 0x26, 0x80, 0x54, 0x36, 0x51, 0x01, 0xd6, 0x69, 0xdb, 0x13, 0x05, 0x20, + 0x2f, 0x8b, 0x6d, 0x6f, 0xc8, 0x6c, 0xeb, 0x50, 0x91, 0x79, 0x42, 0xc3, 0x2c, 0xe3, 0x22, 0x14, + 0x09, 0xcf, 0x50, 0xde, 0xf6, 0x46, 0x46, 0xc7, 0x05, 0x31, 0xa9, 0x51, 0xb7, 0x26, 0xbd, 0x04, + 0x96, 0x33, 0xb8, 0x2b, 0x01, 0x85, 0x79, 0x64, 0xec, 0xa9, 0xf4, 0x20, 0x0b, 0x53, 0xe8, 0x68, + 0x5d, 0xed, 0xf8, 0xdf, 0xec, 0xbf, 0x3a, 0xbc, 0x0c, 0x53, 0x38, 0xf9, 0xd4, 0x8c, 0x83, 0x4d, + 0xfc, 0x4a, 0xce, 0x6c, 0xbe, 0x69, 0xc6, 0xfe, 0xaf, 0x6c, 0x93, 0x91, 0x5d, 0xbb, 0x92, 0xb6, + 0x81, 0x2c, 0xbb, 0x95, 0x6d, 0x5a, 0x20, 0x9b, 0x64, 0xef, 0xec, 0xe9, 0xe3, 0xeb, 0x59, 0x1f, + 0x9a, 0x76, 0xdd, 0xd5, 0xfd, 0xc6, 0x3a, 0x17, 0xf3, 0x12, 0x05, 0x4f, 0xa1, 0x40, 0x3a, 0x90, + 0xe5, 0x35, 0x18, 0xd2, 0xc1, 0xc3, 0xf8, 0x51, 0x37, 0xfa, 0x73, 0x9e, 0xee, 0x9c, 0x77, 0x6c, + 0x6d, 0xdd, 0x7f, 0x5a, 0xf7, 0x3b, 0xd5, 0xae, 0x27, 0x6b, 0x3e, 0xe6, 0x8b, 0x30, 0x8b, 0x09, + 0x2f, 0x62, 0x1a, 0x43, 0xb6, 0x6a, 0x5f, 0x7d, 0xc9, 0x3c, 0xc1, 0xf6, 0x7f, 0xe7, 0x5c, 0x89, + 0x17, 0xfd, 0x8f, 0xeb, 0x38, 0xaf, 0x7a, 0xd7, 0x95, 0x81, 0x4e, 0x84, 0x44, 0xca, 0x5a, 0x79, + 0x36, 0x59, 0x3f, 0x8c, 0xef, 0xca, 0xe2, 0x3b, 0x11, 0xfa, 0x8d, 0xc5, 0xf7, 0x6c, 0x5f, 0x59, + 0xbe, 0xf5, 0xbe, 0x3c, 0x67, 0xcc, 0x89, 0x90, 0xb1, 0xc6, 0xc4, 0x98, 0x67, 0x33, 0xa6, 0x6c, + 0x37, 0x7f, 0x57, 0x73, 0x0e, 0x7f, 0x02, 0x00, 0x00, 0xff, 0xff, 0x34, 0xd6, 0x3e, 0x66, 0xe2, + 0x02, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/services/hotel_performance_view_service.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/services/hotel_performance_view_service.pb.go new file mode 100644 index 0000000..0915d8a --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/services/hotel_performance_view_service.pb.go @@ -0,0 +1,176 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/services/hotel_performance_view_service.proto + +package services // import "google.golang.org/genproto/googleapis/ads/googleads/v1/services" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import resources "google.golang.org/genproto/googleapis/ads/googleads/v1/resources" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +import ( + context "golang.org/x/net/context" + grpc "google.golang.org/grpc" +) + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Request message for [HotelPerformanceViewService.GetHotelPerformanceView][google.ads.googleads.v1.services.HotelPerformanceViewService.GetHotelPerformanceView]. +type GetHotelPerformanceViewRequest struct { + // Resource name of the Hotel Performance View to fetch. + ResourceName string `protobuf:"bytes,1,opt,name=resource_name,json=resourceName,proto3" json:"resource_name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GetHotelPerformanceViewRequest) Reset() { *m = GetHotelPerformanceViewRequest{} } +func (m *GetHotelPerformanceViewRequest) String() string { return proto.CompactTextString(m) } +func (*GetHotelPerformanceViewRequest) ProtoMessage() {} +func (*GetHotelPerformanceViewRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_hotel_performance_view_service_31c85b21d7ab4aff, []int{0} +} +func (m *GetHotelPerformanceViewRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GetHotelPerformanceViewRequest.Unmarshal(m, b) +} +func (m *GetHotelPerformanceViewRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GetHotelPerformanceViewRequest.Marshal(b, m, deterministic) +} +func (dst *GetHotelPerformanceViewRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_GetHotelPerformanceViewRequest.Merge(dst, src) +} +func (m *GetHotelPerformanceViewRequest) XXX_Size() int { + return xxx_messageInfo_GetHotelPerformanceViewRequest.Size(m) +} +func (m *GetHotelPerformanceViewRequest) XXX_DiscardUnknown() { + xxx_messageInfo_GetHotelPerformanceViewRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_GetHotelPerformanceViewRequest proto.InternalMessageInfo + +func (m *GetHotelPerformanceViewRequest) GetResourceName() string { + if m != nil { + return m.ResourceName + } + return "" +} + +func init() { + proto.RegisterType((*GetHotelPerformanceViewRequest)(nil), "google.ads.googleads.v1.services.GetHotelPerformanceViewRequest") +} + +// Reference imports to suppress errors if they are not otherwise used. +var _ context.Context +var _ grpc.ClientConn + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the grpc package it is being compiled against. +const _ = grpc.SupportPackageIsVersion4 + +// HotelPerformanceViewServiceClient is the client API for HotelPerformanceViewService service. +// +// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://godoc.org/google.golang.org/grpc#ClientConn.NewStream. +type HotelPerformanceViewServiceClient interface { + // Returns the requested Hotel Performance View in full detail. + GetHotelPerformanceView(ctx context.Context, in *GetHotelPerformanceViewRequest, opts ...grpc.CallOption) (*resources.HotelPerformanceView, error) +} + +type hotelPerformanceViewServiceClient struct { + cc *grpc.ClientConn +} + +func NewHotelPerformanceViewServiceClient(cc *grpc.ClientConn) HotelPerformanceViewServiceClient { + return &hotelPerformanceViewServiceClient{cc} +} + +func (c *hotelPerformanceViewServiceClient) GetHotelPerformanceView(ctx context.Context, in *GetHotelPerformanceViewRequest, opts ...grpc.CallOption) (*resources.HotelPerformanceView, error) { + out := new(resources.HotelPerformanceView) + err := c.cc.Invoke(ctx, "/google.ads.googleads.v1.services.HotelPerformanceViewService/GetHotelPerformanceView", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +// HotelPerformanceViewServiceServer is the server API for HotelPerformanceViewService service. +type HotelPerformanceViewServiceServer interface { + // Returns the requested Hotel Performance View in full detail. + GetHotelPerformanceView(context.Context, *GetHotelPerformanceViewRequest) (*resources.HotelPerformanceView, error) +} + +func RegisterHotelPerformanceViewServiceServer(s *grpc.Server, srv HotelPerformanceViewServiceServer) { + s.RegisterService(&_HotelPerformanceViewService_serviceDesc, srv) +} + +func _HotelPerformanceViewService_GetHotelPerformanceView_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(GetHotelPerformanceViewRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(HotelPerformanceViewServiceServer).GetHotelPerformanceView(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.ads.googleads.v1.services.HotelPerformanceViewService/GetHotelPerformanceView", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(HotelPerformanceViewServiceServer).GetHotelPerformanceView(ctx, req.(*GetHotelPerformanceViewRequest)) + } + return interceptor(ctx, in, info, handler) +} + +var _HotelPerformanceViewService_serviceDesc = grpc.ServiceDesc{ + ServiceName: "google.ads.googleads.v1.services.HotelPerformanceViewService", + HandlerType: (*HotelPerformanceViewServiceServer)(nil), + Methods: []grpc.MethodDesc{ + { + MethodName: "GetHotelPerformanceView", + Handler: _HotelPerformanceViewService_GetHotelPerformanceView_Handler, + }, + }, + Streams: []grpc.StreamDesc{}, + Metadata: "google/ads/googleads/v1/services/hotel_performance_view_service.proto", +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/services/hotel_performance_view_service.proto", fileDescriptor_hotel_performance_view_service_31c85b21d7ab4aff) +} + +var fileDescriptor_hotel_performance_view_service_31c85b21d7ab4aff = []byte{ + // 374 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x84, 0x52, 0xcd, 0x4a, 0xf3, 0x40, + 0x14, 0x25, 0xf9, 0xe0, 0x03, 0x83, 0x6e, 0xb2, 0x51, 0xaa, 0x48, 0xa8, 0x5d, 0x88, 0x8b, 0x09, + 0x51, 0x51, 0x18, 0x7f, 0x30, 0x85, 0xd2, 0xae, 0xa4, 0x54, 0xc8, 0x42, 0x02, 0x61, 0x4c, 0xae, + 0x31, 0xd0, 0x64, 0xe2, 0xcc, 0x34, 0x5d, 0x88, 0x1b, 0x37, 0x3e, 0x80, 0x6f, 0xe0, 0xd2, 0x47, + 0x71, 0xa9, 0xaf, 0xe0, 0xca, 0xa5, 0x4f, 0x20, 0xe9, 0x74, 0x22, 0x4a, 0xd3, 0xee, 0x0e, 0x33, + 0xe7, 0xe7, 0xce, 0xb9, 0x63, 0x74, 0x62, 0x4a, 0xe3, 0x21, 0xd8, 0x24, 0xe2, 0xb6, 0x84, 0x25, + 0x2a, 0x1c, 0x9b, 0x03, 0x2b, 0x92, 0x10, 0xb8, 0x7d, 0x43, 0x05, 0x0c, 0x83, 0x1c, 0xd8, 0x35, + 0x65, 0x29, 0xc9, 0x42, 0x08, 0x8a, 0x04, 0xc6, 0xc1, 0xf4, 0x1e, 0xe5, 0x8c, 0x0a, 0x6a, 0x5a, + 0x52, 0x8b, 0x48, 0xc4, 0x51, 0x65, 0x83, 0x0a, 0x07, 0x29, 0x9b, 0xc6, 0x69, 0x5d, 0x10, 0x03, + 0x4e, 0x47, 0xac, 0x3e, 0x49, 0x26, 0x34, 0x36, 0x94, 0x3e, 0x4f, 0x6c, 0x92, 0x65, 0x54, 0x10, + 0x91, 0xd0, 0x8c, 0xcb, 0xdb, 0x66, 0xc7, 0xd8, 0xec, 0x82, 0xe8, 0x95, 0x06, 0xfd, 0x1f, 0xbd, + 0x97, 0xc0, 0x78, 0x00, 0xb7, 0x23, 0xe0, 0xc2, 0xdc, 0x32, 0x56, 0x54, 0x52, 0x90, 0x91, 0x14, + 0xd6, 0x34, 0x4b, 0xdb, 0x5e, 0x1a, 0x2c, 0xab, 0xc3, 0x73, 0x92, 0xc2, 0xee, 0x97, 0x66, 0xac, + 0xcf, 0x32, 0xb9, 0x90, 0xaf, 0x30, 0xdf, 0x34, 0x63, 0xb5, 0x26, 0xc7, 0x3c, 0x43, 0x8b, 0x3a, + 0x40, 0xf3, 0x47, 0x6c, 0x1c, 0xd6, 0x3a, 0x54, 0x1d, 0xa1, 0x59, 0xfa, 0xe6, 0xf1, 0xc3, 0xfb, + 0xc7, 0x93, 0x7e, 0x60, 0xee, 0x97, 0x7d, 0xde, 0xfd, 0x7a, 0xe6, 0x49, 0x38, 0xe2, 0x82, 0xa6, + 0xc0, 0xb8, 0xbd, 0x23, 0x0b, 0xfe, 0x23, 0xbe, 0x6f, 0x3f, 0xea, 0x46, 0x2b, 0xa4, 0xe9, 0xc2, + 0xf1, 0xdb, 0xd6, 0x9c, 0x6a, 0xfa, 0xe5, 0x1a, 0xfa, 0xda, 0x65, 0x6f, 0xea, 0x12, 0xd3, 0x21, + 0xc9, 0x62, 0x44, 0x59, 0x6c, 0xc7, 0x90, 0x4d, 0x96, 0xa4, 0xd6, 0x9e, 0x27, 0xbc, 0xfe, 0xbb, + 0x1d, 0x29, 0xf0, 0xac, 0xff, 0xeb, 0xba, 0xee, 0x8b, 0x6e, 0x75, 0xa5, 0xa1, 0x1b, 0x71, 0x24, + 0x61, 0x89, 0x3c, 0x07, 0x4d, 0x83, 0xf9, 0xab, 0xa2, 0xf8, 0x6e, 0xc4, 0xfd, 0x8a, 0xe2, 0x7b, + 0x8e, 0xaf, 0x28, 0x9f, 0x7a, 0x4b, 0x9e, 0x63, 0xec, 0x46, 0x1c, 0xe3, 0x8a, 0x84, 0xb1, 0xe7, + 0x60, 0xac, 0x68, 0x57, 0xff, 0x27, 0x73, 0xee, 0x7d, 0x07, 0x00, 0x00, 0xff, 0xff, 0xe6, 0x4e, + 0xc0, 0xba, 0x15, 0x03, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/services/keyword_plan_ad_group_service.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/services/keyword_plan_ad_group_service.pb.go new file mode 100644 index 0000000..51cc63a --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/services/keyword_plan_ad_group_service.pb.go @@ -0,0 +1,592 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/services/keyword_plan_ad_group_service.proto + +package services // import "google.golang.org/genproto/googleapis/ads/googleads/v1/services" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "github.com/golang/protobuf/ptypes/wrappers" +import resources "google.golang.org/genproto/googleapis/ads/googleads/v1/resources" +import _ "google.golang.org/genproto/googleapis/api/annotations" +import status "google.golang.org/genproto/googleapis/rpc/status" +import field_mask "google.golang.org/genproto/protobuf/field_mask" + +import ( + context "golang.org/x/net/context" + grpc "google.golang.org/grpc" +) + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Request message for [KeywordPlanAdGroupService.GetKeywordPlanAdGroup][google.ads.googleads.v1.services.KeywordPlanAdGroupService.GetKeywordPlanAdGroup]. +type GetKeywordPlanAdGroupRequest struct { + // The resource name of the Keyword Plan ad group to fetch. + ResourceName string `protobuf:"bytes,1,opt,name=resource_name,json=resourceName,proto3" json:"resource_name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GetKeywordPlanAdGroupRequest) Reset() { *m = GetKeywordPlanAdGroupRequest{} } +func (m *GetKeywordPlanAdGroupRequest) String() string { return proto.CompactTextString(m) } +func (*GetKeywordPlanAdGroupRequest) ProtoMessage() {} +func (*GetKeywordPlanAdGroupRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_keyword_plan_ad_group_service_b366b1d5e0114e4e, []int{0} +} +func (m *GetKeywordPlanAdGroupRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GetKeywordPlanAdGroupRequest.Unmarshal(m, b) +} +func (m *GetKeywordPlanAdGroupRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GetKeywordPlanAdGroupRequest.Marshal(b, m, deterministic) +} +func (dst *GetKeywordPlanAdGroupRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_GetKeywordPlanAdGroupRequest.Merge(dst, src) +} +func (m *GetKeywordPlanAdGroupRequest) XXX_Size() int { + return xxx_messageInfo_GetKeywordPlanAdGroupRequest.Size(m) +} +func (m *GetKeywordPlanAdGroupRequest) XXX_DiscardUnknown() { + xxx_messageInfo_GetKeywordPlanAdGroupRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_GetKeywordPlanAdGroupRequest proto.InternalMessageInfo + +func (m *GetKeywordPlanAdGroupRequest) GetResourceName() string { + if m != nil { + return m.ResourceName + } + return "" +} + +// Request message for [KeywordPlanAdGroupService.MutateKeywordPlanAdGroups][google.ads.googleads.v1.services.KeywordPlanAdGroupService.MutateKeywordPlanAdGroups]. +type MutateKeywordPlanAdGroupsRequest struct { + // The ID of the customer whose Keyword Plan ad groups are being modified. + CustomerId string `protobuf:"bytes,1,opt,name=customer_id,json=customerId,proto3" json:"customer_id,omitempty"` + // The list of operations to perform on individual Keyword Plan ad groups. + Operations []*KeywordPlanAdGroupOperation `protobuf:"bytes,2,rep,name=operations,proto3" json:"operations,omitempty"` + // If true, successful operations will be carried out and invalid + // operations will return errors. If false, all operations will be carried + // out in one transaction if and only if they are all valid. + // Default is false. + PartialFailure bool `protobuf:"varint,3,opt,name=partial_failure,json=partialFailure,proto3" json:"partial_failure,omitempty"` + // If true, the request is validated but not executed. Only errors are + // returned, not results. + ValidateOnly bool `protobuf:"varint,4,opt,name=validate_only,json=validateOnly,proto3" json:"validate_only,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *MutateKeywordPlanAdGroupsRequest) Reset() { *m = MutateKeywordPlanAdGroupsRequest{} } +func (m *MutateKeywordPlanAdGroupsRequest) String() string { return proto.CompactTextString(m) } +func (*MutateKeywordPlanAdGroupsRequest) ProtoMessage() {} +func (*MutateKeywordPlanAdGroupsRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_keyword_plan_ad_group_service_b366b1d5e0114e4e, []int{1} +} +func (m *MutateKeywordPlanAdGroupsRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_MutateKeywordPlanAdGroupsRequest.Unmarshal(m, b) +} +func (m *MutateKeywordPlanAdGroupsRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_MutateKeywordPlanAdGroupsRequest.Marshal(b, m, deterministic) +} +func (dst *MutateKeywordPlanAdGroupsRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_MutateKeywordPlanAdGroupsRequest.Merge(dst, src) +} +func (m *MutateKeywordPlanAdGroupsRequest) XXX_Size() int { + return xxx_messageInfo_MutateKeywordPlanAdGroupsRequest.Size(m) +} +func (m *MutateKeywordPlanAdGroupsRequest) XXX_DiscardUnknown() { + xxx_messageInfo_MutateKeywordPlanAdGroupsRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_MutateKeywordPlanAdGroupsRequest proto.InternalMessageInfo + +func (m *MutateKeywordPlanAdGroupsRequest) GetCustomerId() string { + if m != nil { + return m.CustomerId + } + return "" +} + +func (m *MutateKeywordPlanAdGroupsRequest) GetOperations() []*KeywordPlanAdGroupOperation { + if m != nil { + return m.Operations + } + return nil +} + +func (m *MutateKeywordPlanAdGroupsRequest) GetPartialFailure() bool { + if m != nil { + return m.PartialFailure + } + return false +} + +func (m *MutateKeywordPlanAdGroupsRequest) GetValidateOnly() bool { + if m != nil { + return m.ValidateOnly + } + return false +} + +// A single operation (create, update, remove) on a Keyword Plan ad group. +type KeywordPlanAdGroupOperation struct { + // The FieldMask that determines which resource fields are modified in an + // update. + UpdateMask *field_mask.FieldMask `protobuf:"bytes,4,opt,name=update_mask,json=updateMask,proto3" json:"update_mask,omitempty"` + // The mutate operation. + // + // Types that are valid to be assigned to Operation: + // *KeywordPlanAdGroupOperation_Create + // *KeywordPlanAdGroupOperation_Update + // *KeywordPlanAdGroupOperation_Remove + Operation isKeywordPlanAdGroupOperation_Operation `protobuf_oneof:"operation"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *KeywordPlanAdGroupOperation) Reset() { *m = KeywordPlanAdGroupOperation{} } +func (m *KeywordPlanAdGroupOperation) String() string { return proto.CompactTextString(m) } +func (*KeywordPlanAdGroupOperation) ProtoMessage() {} +func (*KeywordPlanAdGroupOperation) Descriptor() ([]byte, []int) { + return fileDescriptor_keyword_plan_ad_group_service_b366b1d5e0114e4e, []int{2} +} +func (m *KeywordPlanAdGroupOperation) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_KeywordPlanAdGroupOperation.Unmarshal(m, b) +} +func (m *KeywordPlanAdGroupOperation) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_KeywordPlanAdGroupOperation.Marshal(b, m, deterministic) +} +func (dst *KeywordPlanAdGroupOperation) XXX_Merge(src proto.Message) { + xxx_messageInfo_KeywordPlanAdGroupOperation.Merge(dst, src) +} +func (m *KeywordPlanAdGroupOperation) XXX_Size() int { + return xxx_messageInfo_KeywordPlanAdGroupOperation.Size(m) +} +func (m *KeywordPlanAdGroupOperation) XXX_DiscardUnknown() { + xxx_messageInfo_KeywordPlanAdGroupOperation.DiscardUnknown(m) +} + +var xxx_messageInfo_KeywordPlanAdGroupOperation proto.InternalMessageInfo + +func (m *KeywordPlanAdGroupOperation) GetUpdateMask() *field_mask.FieldMask { + if m != nil { + return m.UpdateMask + } + return nil +} + +type isKeywordPlanAdGroupOperation_Operation interface { + isKeywordPlanAdGroupOperation_Operation() +} + +type KeywordPlanAdGroupOperation_Create struct { + Create *resources.KeywordPlanAdGroup `protobuf:"bytes,1,opt,name=create,proto3,oneof"` +} + +type KeywordPlanAdGroupOperation_Update struct { + Update *resources.KeywordPlanAdGroup `protobuf:"bytes,2,opt,name=update,proto3,oneof"` +} + +type KeywordPlanAdGroupOperation_Remove struct { + Remove string `protobuf:"bytes,3,opt,name=remove,proto3,oneof"` +} + +func (*KeywordPlanAdGroupOperation_Create) isKeywordPlanAdGroupOperation_Operation() {} + +func (*KeywordPlanAdGroupOperation_Update) isKeywordPlanAdGroupOperation_Operation() {} + +func (*KeywordPlanAdGroupOperation_Remove) isKeywordPlanAdGroupOperation_Operation() {} + +func (m *KeywordPlanAdGroupOperation) GetOperation() isKeywordPlanAdGroupOperation_Operation { + if m != nil { + return m.Operation + } + return nil +} + +func (m *KeywordPlanAdGroupOperation) GetCreate() *resources.KeywordPlanAdGroup { + if x, ok := m.GetOperation().(*KeywordPlanAdGroupOperation_Create); ok { + return x.Create + } + return nil +} + +func (m *KeywordPlanAdGroupOperation) GetUpdate() *resources.KeywordPlanAdGroup { + if x, ok := m.GetOperation().(*KeywordPlanAdGroupOperation_Update); ok { + return x.Update + } + return nil +} + +func (m *KeywordPlanAdGroupOperation) GetRemove() string { + if x, ok := m.GetOperation().(*KeywordPlanAdGroupOperation_Remove); ok { + return x.Remove + } + return "" +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*KeywordPlanAdGroupOperation) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _KeywordPlanAdGroupOperation_OneofMarshaler, _KeywordPlanAdGroupOperation_OneofUnmarshaler, _KeywordPlanAdGroupOperation_OneofSizer, []interface{}{ + (*KeywordPlanAdGroupOperation_Create)(nil), + (*KeywordPlanAdGroupOperation_Update)(nil), + (*KeywordPlanAdGroupOperation_Remove)(nil), + } +} + +func _KeywordPlanAdGroupOperation_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*KeywordPlanAdGroupOperation) + // operation + switch x := m.Operation.(type) { + case *KeywordPlanAdGroupOperation_Create: + b.EncodeVarint(1<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.Create); err != nil { + return err + } + case *KeywordPlanAdGroupOperation_Update: + b.EncodeVarint(2<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.Update); err != nil { + return err + } + case *KeywordPlanAdGroupOperation_Remove: + b.EncodeVarint(3<<3 | proto.WireBytes) + b.EncodeStringBytes(x.Remove) + case nil: + default: + return fmt.Errorf("KeywordPlanAdGroupOperation.Operation has unexpected type %T", x) + } + return nil +} + +func _KeywordPlanAdGroupOperation_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*KeywordPlanAdGroupOperation) + switch tag { + case 1: // operation.create + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(resources.KeywordPlanAdGroup) + err := b.DecodeMessage(msg) + m.Operation = &KeywordPlanAdGroupOperation_Create{msg} + return true, err + case 2: // operation.update + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(resources.KeywordPlanAdGroup) + err := b.DecodeMessage(msg) + m.Operation = &KeywordPlanAdGroupOperation_Update{msg} + return true, err + case 3: // operation.remove + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeStringBytes() + m.Operation = &KeywordPlanAdGroupOperation_Remove{x} + return true, err + default: + return false, nil + } +} + +func _KeywordPlanAdGroupOperation_OneofSizer(msg proto.Message) (n int) { + m := msg.(*KeywordPlanAdGroupOperation) + // operation + switch x := m.Operation.(type) { + case *KeywordPlanAdGroupOperation_Create: + s := proto.Size(x.Create) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *KeywordPlanAdGroupOperation_Update: + s := proto.Size(x.Update) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *KeywordPlanAdGroupOperation_Remove: + n += 1 // tag and wire + n += proto.SizeVarint(uint64(len(x.Remove))) + n += len(x.Remove) + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +// Response message for a Keyword Plan ad group mutate. +type MutateKeywordPlanAdGroupsResponse struct { + // Errors that pertain to operation failures in the partial failure mode. + // Returned only when partial_failure = true and all errors occur inside the + // operations. If any errors occur outside the operations (e.g. auth errors), + // we return an RPC level error. + PartialFailureError *status.Status `protobuf:"bytes,3,opt,name=partial_failure_error,json=partialFailureError,proto3" json:"partial_failure_error,omitempty"` + // All results for the mutate. + Results []*MutateKeywordPlanAdGroupResult `protobuf:"bytes,2,rep,name=results,proto3" json:"results,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *MutateKeywordPlanAdGroupsResponse) Reset() { *m = MutateKeywordPlanAdGroupsResponse{} } +func (m *MutateKeywordPlanAdGroupsResponse) String() string { return proto.CompactTextString(m) } +func (*MutateKeywordPlanAdGroupsResponse) ProtoMessage() {} +func (*MutateKeywordPlanAdGroupsResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_keyword_plan_ad_group_service_b366b1d5e0114e4e, []int{3} +} +func (m *MutateKeywordPlanAdGroupsResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_MutateKeywordPlanAdGroupsResponse.Unmarshal(m, b) +} +func (m *MutateKeywordPlanAdGroupsResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_MutateKeywordPlanAdGroupsResponse.Marshal(b, m, deterministic) +} +func (dst *MutateKeywordPlanAdGroupsResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_MutateKeywordPlanAdGroupsResponse.Merge(dst, src) +} +func (m *MutateKeywordPlanAdGroupsResponse) XXX_Size() int { + return xxx_messageInfo_MutateKeywordPlanAdGroupsResponse.Size(m) +} +func (m *MutateKeywordPlanAdGroupsResponse) XXX_DiscardUnknown() { + xxx_messageInfo_MutateKeywordPlanAdGroupsResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_MutateKeywordPlanAdGroupsResponse proto.InternalMessageInfo + +func (m *MutateKeywordPlanAdGroupsResponse) GetPartialFailureError() *status.Status { + if m != nil { + return m.PartialFailureError + } + return nil +} + +func (m *MutateKeywordPlanAdGroupsResponse) GetResults() []*MutateKeywordPlanAdGroupResult { + if m != nil { + return m.Results + } + return nil +} + +// The result for the Keyword Plan ad group mutate. +type MutateKeywordPlanAdGroupResult struct { + // Returned for successful operations. + ResourceName string `protobuf:"bytes,1,opt,name=resource_name,json=resourceName,proto3" json:"resource_name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *MutateKeywordPlanAdGroupResult) Reset() { *m = MutateKeywordPlanAdGroupResult{} } +func (m *MutateKeywordPlanAdGroupResult) String() string { return proto.CompactTextString(m) } +func (*MutateKeywordPlanAdGroupResult) ProtoMessage() {} +func (*MutateKeywordPlanAdGroupResult) Descriptor() ([]byte, []int) { + return fileDescriptor_keyword_plan_ad_group_service_b366b1d5e0114e4e, []int{4} +} +func (m *MutateKeywordPlanAdGroupResult) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_MutateKeywordPlanAdGroupResult.Unmarshal(m, b) +} +func (m *MutateKeywordPlanAdGroupResult) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_MutateKeywordPlanAdGroupResult.Marshal(b, m, deterministic) +} +func (dst *MutateKeywordPlanAdGroupResult) XXX_Merge(src proto.Message) { + xxx_messageInfo_MutateKeywordPlanAdGroupResult.Merge(dst, src) +} +func (m *MutateKeywordPlanAdGroupResult) XXX_Size() int { + return xxx_messageInfo_MutateKeywordPlanAdGroupResult.Size(m) +} +func (m *MutateKeywordPlanAdGroupResult) XXX_DiscardUnknown() { + xxx_messageInfo_MutateKeywordPlanAdGroupResult.DiscardUnknown(m) +} + +var xxx_messageInfo_MutateKeywordPlanAdGroupResult proto.InternalMessageInfo + +func (m *MutateKeywordPlanAdGroupResult) GetResourceName() string { + if m != nil { + return m.ResourceName + } + return "" +} + +func init() { + proto.RegisterType((*GetKeywordPlanAdGroupRequest)(nil), "google.ads.googleads.v1.services.GetKeywordPlanAdGroupRequest") + proto.RegisterType((*MutateKeywordPlanAdGroupsRequest)(nil), "google.ads.googleads.v1.services.MutateKeywordPlanAdGroupsRequest") + proto.RegisterType((*KeywordPlanAdGroupOperation)(nil), "google.ads.googleads.v1.services.KeywordPlanAdGroupOperation") + proto.RegisterType((*MutateKeywordPlanAdGroupsResponse)(nil), "google.ads.googleads.v1.services.MutateKeywordPlanAdGroupsResponse") + proto.RegisterType((*MutateKeywordPlanAdGroupResult)(nil), "google.ads.googleads.v1.services.MutateKeywordPlanAdGroupResult") +} + +// Reference imports to suppress errors if they are not otherwise used. +var _ context.Context +var _ grpc.ClientConn + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the grpc package it is being compiled against. +const _ = grpc.SupportPackageIsVersion4 + +// KeywordPlanAdGroupServiceClient is the client API for KeywordPlanAdGroupService service. +// +// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://godoc.org/google.golang.org/grpc#ClientConn.NewStream. +type KeywordPlanAdGroupServiceClient interface { + // Returns the requested Keyword Plan ad group in full detail. + GetKeywordPlanAdGroup(ctx context.Context, in *GetKeywordPlanAdGroupRequest, opts ...grpc.CallOption) (*resources.KeywordPlanAdGroup, error) + // Creates, updates, or removes Keyword Plan ad groups. Operation statuses are + // returned. + MutateKeywordPlanAdGroups(ctx context.Context, in *MutateKeywordPlanAdGroupsRequest, opts ...grpc.CallOption) (*MutateKeywordPlanAdGroupsResponse, error) +} + +type keywordPlanAdGroupServiceClient struct { + cc *grpc.ClientConn +} + +func NewKeywordPlanAdGroupServiceClient(cc *grpc.ClientConn) KeywordPlanAdGroupServiceClient { + return &keywordPlanAdGroupServiceClient{cc} +} + +func (c *keywordPlanAdGroupServiceClient) GetKeywordPlanAdGroup(ctx context.Context, in *GetKeywordPlanAdGroupRequest, opts ...grpc.CallOption) (*resources.KeywordPlanAdGroup, error) { + out := new(resources.KeywordPlanAdGroup) + err := c.cc.Invoke(ctx, "/google.ads.googleads.v1.services.KeywordPlanAdGroupService/GetKeywordPlanAdGroup", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *keywordPlanAdGroupServiceClient) MutateKeywordPlanAdGroups(ctx context.Context, in *MutateKeywordPlanAdGroupsRequest, opts ...grpc.CallOption) (*MutateKeywordPlanAdGroupsResponse, error) { + out := new(MutateKeywordPlanAdGroupsResponse) + err := c.cc.Invoke(ctx, "/google.ads.googleads.v1.services.KeywordPlanAdGroupService/MutateKeywordPlanAdGroups", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +// KeywordPlanAdGroupServiceServer is the server API for KeywordPlanAdGroupService service. +type KeywordPlanAdGroupServiceServer interface { + // Returns the requested Keyword Plan ad group in full detail. + GetKeywordPlanAdGroup(context.Context, *GetKeywordPlanAdGroupRequest) (*resources.KeywordPlanAdGroup, error) + // Creates, updates, or removes Keyword Plan ad groups. Operation statuses are + // returned. + MutateKeywordPlanAdGroups(context.Context, *MutateKeywordPlanAdGroupsRequest) (*MutateKeywordPlanAdGroupsResponse, error) +} + +func RegisterKeywordPlanAdGroupServiceServer(s *grpc.Server, srv KeywordPlanAdGroupServiceServer) { + s.RegisterService(&_KeywordPlanAdGroupService_serviceDesc, srv) +} + +func _KeywordPlanAdGroupService_GetKeywordPlanAdGroup_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(GetKeywordPlanAdGroupRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(KeywordPlanAdGroupServiceServer).GetKeywordPlanAdGroup(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.ads.googleads.v1.services.KeywordPlanAdGroupService/GetKeywordPlanAdGroup", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(KeywordPlanAdGroupServiceServer).GetKeywordPlanAdGroup(ctx, req.(*GetKeywordPlanAdGroupRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _KeywordPlanAdGroupService_MutateKeywordPlanAdGroups_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(MutateKeywordPlanAdGroupsRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(KeywordPlanAdGroupServiceServer).MutateKeywordPlanAdGroups(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.ads.googleads.v1.services.KeywordPlanAdGroupService/MutateKeywordPlanAdGroups", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(KeywordPlanAdGroupServiceServer).MutateKeywordPlanAdGroups(ctx, req.(*MutateKeywordPlanAdGroupsRequest)) + } + return interceptor(ctx, in, info, handler) +} + +var _KeywordPlanAdGroupService_serviceDesc = grpc.ServiceDesc{ + ServiceName: "google.ads.googleads.v1.services.KeywordPlanAdGroupService", + HandlerType: (*KeywordPlanAdGroupServiceServer)(nil), + Methods: []grpc.MethodDesc{ + { + MethodName: "GetKeywordPlanAdGroup", + Handler: _KeywordPlanAdGroupService_GetKeywordPlanAdGroup_Handler, + }, + { + MethodName: "MutateKeywordPlanAdGroups", + Handler: _KeywordPlanAdGroupService_MutateKeywordPlanAdGroups_Handler, + }, + }, + Streams: []grpc.StreamDesc{}, + Metadata: "google/ads/googleads/v1/services/keyword_plan_ad_group_service.proto", +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/services/keyword_plan_ad_group_service.proto", fileDescriptor_keyword_plan_ad_group_service_b366b1d5e0114e4e) +} + +var fileDescriptor_keyword_plan_ad_group_service_b366b1d5e0114e4e = []byte{ + // 730 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xa4, 0x95, 0xdd, 0x6a, 0xd4, 0x4e, + 0x14, 0xc0, 0xff, 0xc9, 0xfe, 0xa9, 0x76, 0xb6, 0x2a, 0x8c, 0x14, 0xb7, 0x6b, 0xa9, 0x6b, 0x2c, + 0x58, 0xf6, 0x22, 0x61, 0x57, 0x8a, 0x92, 0xb2, 0xe2, 0x6e, 0x6d, 0xb7, 0x22, 0xb5, 0x25, 0x85, + 0x5e, 0x94, 0x95, 0x30, 0xdd, 0x4c, 0x43, 0x68, 0x92, 0x89, 0x33, 0x93, 0x2d, 0xa5, 0xf4, 0x46, + 0xf0, 0x09, 0x7c, 0x03, 0xbd, 0xf3, 0x45, 0x04, 0xc1, 0x2b, 0x2f, 0x7c, 0x01, 0x6f, 0xf4, 0xca, + 0x47, 0x90, 0xc9, 0x64, 0xd6, 0x7e, 0x65, 0x57, 0xda, 0xbb, 0x93, 0x33, 0x27, 0xbf, 0xf3, 0x39, + 0x67, 0xc0, 0x73, 0x9f, 0x10, 0x3f, 0xc4, 0x16, 0xf2, 0x98, 0x25, 0x45, 0x21, 0x0d, 0x1a, 0x16, + 0xc3, 0x74, 0x10, 0xf4, 0x31, 0xb3, 0xf6, 0xf1, 0xe1, 0x01, 0xa1, 0x9e, 0x9b, 0x84, 0x28, 0x76, + 0x91, 0xe7, 0xfa, 0x94, 0xa4, 0x89, 0x9b, 0x1f, 0x9b, 0x09, 0x25, 0x9c, 0xc0, 0x9a, 0xfc, 0xd5, + 0x44, 0x1e, 0x33, 0x87, 0x14, 0x73, 0xd0, 0x30, 0x15, 0xa5, 0xda, 0x2a, 0xf2, 0x43, 0x31, 0x23, + 0x29, 0x2d, 0x74, 0x24, 0x1d, 0x54, 0x67, 0xd5, 0xef, 0x49, 0x60, 0xa1, 0x38, 0x26, 0x1c, 0xf1, + 0x80, 0xc4, 0x2c, 0x3f, 0xcd, 0xdd, 0x5b, 0xd9, 0xd7, 0x6e, 0xba, 0x67, 0xed, 0x05, 0x38, 0xf4, + 0xdc, 0x08, 0xb1, 0xfd, 0xdc, 0x62, 0xee, 0xac, 0xc5, 0x01, 0x45, 0x49, 0x82, 0xa9, 0x22, 0xdc, + 0xc9, 0xcf, 0x69, 0xd2, 0xb7, 0x18, 0x47, 0x3c, 0xcd, 0x0f, 0x8c, 0x65, 0x30, 0xdb, 0xc5, 0xfc, + 0xa5, 0x0c, 0x6d, 0x33, 0x44, 0x71, 0xdb, 0xeb, 0x8a, 0xb8, 0x1c, 0xfc, 0x26, 0xc5, 0x8c, 0xc3, + 0x07, 0xe0, 0x86, 0xca, 0xc0, 0x8d, 0x51, 0x84, 0x2b, 0x5a, 0x4d, 0x5b, 0x98, 0x74, 0xa6, 0x94, + 0xf2, 0x15, 0x8a, 0xb0, 0xf1, 0x5b, 0x03, 0xb5, 0xf5, 0x94, 0x23, 0x8e, 0xcf, 0x83, 0x98, 0x22, + 0xdd, 0x03, 0xe5, 0x7e, 0xca, 0x38, 0x89, 0x30, 0x75, 0x03, 0x2f, 0xe7, 0x00, 0xa5, 0x7a, 0xe1, + 0xc1, 0xd7, 0x00, 0x90, 0x04, 0x53, 0x99, 0x79, 0x45, 0xaf, 0x95, 0x16, 0xca, 0xcd, 0x96, 0x39, + 0xae, 0xf2, 0xe6, 0x79, 0x97, 0x1b, 0x8a, 0xe2, 0x9c, 0x00, 0xc2, 0x87, 0xe0, 0x56, 0x82, 0x28, + 0x0f, 0x50, 0xe8, 0xee, 0xa1, 0x20, 0x4c, 0x29, 0xae, 0x94, 0x6a, 0xda, 0xc2, 0x75, 0xe7, 0x66, + 0xae, 0x5e, 0x95, 0x5a, 0x91, 0xf2, 0x00, 0x85, 0x81, 0x87, 0x38, 0x76, 0x49, 0x1c, 0x1e, 0x56, + 0xfe, 0xcf, 0xcc, 0xa6, 0x94, 0x72, 0x23, 0x0e, 0x0f, 0x8d, 0x8f, 0x3a, 0xb8, 0x3b, 0xc2, 0x33, + 0x5c, 0x02, 0xe5, 0x34, 0xc9, 0x10, 0xa2, 0x4b, 0x19, 0xa2, 0xdc, 0xac, 0xaa, 0x6c, 0x54, 0x9b, + 0xcc, 0x55, 0xd1, 0xc8, 0x75, 0xc4, 0xf6, 0x1d, 0x20, 0xcd, 0x85, 0x0c, 0x37, 0xc0, 0x44, 0x9f, + 0x62, 0xc4, 0x65, 0xb5, 0xcb, 0xcd, 0xc5, 0xc2, 0x2a, 0x0c, 0xa7, 0xeb, 0x82, 0x32, 0xac, 0xfd, + 0xe7, 0xe4, 0x18, 0x01, 0x94, 0xf8, 0x8a, 0x7e, 0x45, 0xa0, 0xc4, 0xc0, 0x0a, 0x98, 0xa0, 0x38, + 0x22, 0x03, 0x59, 0xc3, 0x49, 0x71, 0x22, 0xbf, 0x3b, 0x65, 0x30, 0x39, 0x2c, 0xba, 0xf1, 0x59, + 0x03, 0xf7, 0x47, 0x0c, 0x06, 0x4b, 0x48, 0xcc, 0x30, 0x5c, 0x05, 0xd3, 0x67, 0x3a, 0xe3, 0x62, + 0x4a, 0x09, 0xcd, 0xd8, 0xe5, 0x26, 0x54, 0xc1, 0xd2, 0xa4, 0x6f, 0x6e, 0x65, 0xc3, 0xeb, 0xdc, + 0x3e, 0xdd, 0xb3, 0x15, 0x61, 0x0e, 0x77, 0xc0, 0x35, 0x8a, 0x59, 0x1a, 0x72, 0x35, 0x3d, 0xcf, + 0xc6, 0x4f, 0x4f, 0x51, 0x74, 0x4e, 0x06, 0x72, 0x14, 0xd0, 0x58, 0x01, 0x73, 0xa3, 0x4d, 0xff, + 0xe9, 0xa6, 0x34, 0xbf, 0x97, 0xc0, 0xcc, 0x79, 0xc2, 0x96, 0x8c, 0x06, 0x7e, 0xd5, 0xc0, 0xf4, + 0x85, 0xb7, 0x11, 0x3e, 0x1d, 0x9f, 0xc9, 0xa8, 0x6b, 0x5c, 0xbd, 0x5c, 0xc3, 0x8d, 0xd6, 0xdb, + 0x6f, 0x3f, 0xde, 0xeb, 0x8f, 0xe1, 0xa2, 0xd8, 0x64, 0x47, 0xa7, 0xd2, 0x6b, 0xa9, 0x9b, 0xcb, + 0xac, 0xba, 0x5a, 0x6d, 0x27, 0xbb, 0x6b, 0xd5, 0x8f, 0xe1, 0x4f, 0x0d, 0xcc, 0x14, 0xb6, 0x1f, + 0x76, 0x2e, 0xdf, 0x1d, 0xb5, 0x54, 0xaa, 0xcb, 0x57, 0x62, 0xc8, 0xf9, 0x33, 0x96, 0xb3, 0x2c, + 0x5b, 0xc6, 0x13, 0x91, 0xe5, 0xdf, 0xb4, 0x8e, 0x4e, 0xac, 0xab, 0x56, 0xfd, 0xf8, 0xa2, 0x24, + 0xed, 0x28, 0x83, 0xdb, 0x5a, 0xbd, 0xf3, 0x4e, 0x07, 0xf3, 0x7d, 0x12, 0x8d, 0x8d, 0xa7, 0x33, + 0x57, 0xd8, 0xff, 0x4d, 0xb1, 0x15, 0x36, 0xb5, 0x9d, 0xb5, 0x9c, 0xe1, 0x93, 0x10, 0xc5, 0xbe, + 0x49, 0xa8, 0x6f, 0xf9, 0x38, 0xce, 0x76, 0x86, 0x7a, 0x5b, 0x92, 0x80, 0x15, 0x3f, 0x69, 0x4b, + 0x4a, 0xf8, 0xa0, 0x97, 0xba, 0xed, 0xf6, 0x27, 0xbd, 0xd6, 0x95, 0xc0, 0xb6, 0xc7, 0x4c, 0x29, + 0x0a, 0x69, 0xbb, 0x61, 0xe6, 0x8e, 0xd9, 0x17, 0x65, 0xd2, 0x6b, 0x7b, 0xac, 0x37, 0x34, 0xe9, + 0x6d, 0x37, 0x7a, 0xca, 0xe4, 0x97, 0x3e, 0x2f, 0xf5, 0xb6, 0xdd, 0xf6, 0x98, 0x6d, 0x0f, 0x8d, + 0x6c, 0x7b, 0xbb, 0x61, 0xdb, 0xca, 0x6c, 0x77, 0x22, 0x8b, 0xf3, 0xd1, 0x9f, 0x00, 0x00, 0x00, + 0xff, 0xff, 0xc6, 0x4d, 0x17, 0xa6, 0x79, 0x07, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/services/keyword_plan_campaign_service.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/services/keyword_plan_campaign_service.pb.go new file mode 100644 index 0000000..0866d3b --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/services/keyword_plan_campaign_service.pb.go @@ -0,0 +1,593 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/services/keyword_plan_campaign_service.proto + +package services // import "google.golang.org/genproto/googleapis/ads/googleads/v1/services" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "github.com/golang/protobuf/ptypes/wrappers" +import resources "google.golang.org/genproto/googleapis/ads/googleads/v1/resources" +import _ "google.golang.org/genproto/googleapis/api/annotations" +import status "google.golang.org/genproto/googleapis/rpc/status" +import field_mask "google.golang.org/genproto/protobuf/field_mask" + +import ( + context "golang.org/x/net/context" + grpc "google.golang.org/grpc" +) + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Request message for [KeywordPlanCampaignService.GetKeywordPlanCampaign][google.ads.googleads.v1.services.KeywordPlanCampaignService.GetKeywordPlanCampaign]. +type GetKeywordPlanCampaignRequest struct { + // The resource name of the Keyword Plan campaign to fetch. + ResourceName string `protobuf:"bytes,1,opt,name=resource_name,json=resourceName,proto3" json:"resource_name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GetKeywordPlanCampaignRequest) Reset() { *m = GetKeywordPlanCampaignRequest{} } +func (m *GetKeywordPlanCampaignRequest) String() string { return proto.CompactTextString(m) } +func (*GetKeywordPlanCampaignRequest) ProtoMessage() {} +func (*GetKeywordPlanCampaignRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_keyword_plan_campaign_service_3a49b026094afda2, []int{0} +} +func (m *GetKeywordPlanCampaignRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GetKeywordPlanCampaignRequest.Unmarshal(m, b) +} +func (m *GetKeywordPlanCampaignRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GetKeywordPlanCampaignRequest.Marshal(b, m, deterministic) +} +func (dst *GetKeywordPlanCampaignRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_GetKeywordPlanCampaignRequest.Merge(dst, src) +} +func (m *GetKeywordPlanCampaignRequest) XXX_Size() int { + return xxx_messageInfo_GetKeywordPlanCampaignRequest.Size(m) +} +func (m *GetKeywordPlanCampaignRequest) XXX_DiscardUnknown() { + xxx_messageInfo_GetKeywordPlanCampaignRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_GetKeywordPlanCampaignRequest proto.InternalMessageInfo + +func (m *GetKeywordPlanCampaignRequest) GetResourceName() string { + if m != nil { + return m.ResourceName + } + return "" +} + +// Request message for +// [KeywordPlanCampaignService.MutateKeywordPlanCampaigns][google.ads.googleads.v1.services.KeywordPlanCampaignService.MutateKeywordPlanCampaigns]. +type MutateKeywordPlanCampaignsRequest struct { + // The ID of the customer whose Keyword Plan campaigns are being modified. + CustomerId string `protobuf:"bytes,1,opt,name=customer_id,json=customerId,proto3" json:"customer_id,omitempty"` + // The list of operations to perform on individual Keyword Plan campaigns. + Operations []*KeywordPlanCampaignOperation `protobuf:"bytes,2,rep,name=operations,proto3" json:"operations,omitempty"` + // If true, successful operations will be carried out and invalid + // operations will return errors. If false, all operations will be carried + // out in one transaction if and only if they are all valid. + // Default is false. + PartialFailure bool `protobuf:"varint,3,opt,name=partial_failure,json=partialFailure,proto3" json:"partial_failure,omitempty"` + // If true, the request is validated but not executed. Only errors are + // returned, not results. + ValidateOnly bool `protobuf:"varint,4,opt,name=validate_only,json=validateOnly,proto3" json:"validate_only,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *MutateKeywordPlanCampaignsRequest) Reset() { *m = MutateKeywordPlanCampaignsRequest{} } +func (m *MutateKeywordPlanCampaignsRequest) String() string { return proto.CompactTextString(m) } +func (*MutateKeywordPlanCampaignsRequest) ProtoMessage() {} +func (*MutateKeywordPlanCampaignsRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_keyword_plan_campaign_service_3a49b026094afda2, []int{1} +} +func (m *MutateKeywordPlanCampaignsRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_MutateKeywordPlanCampaignsRequest.Unmarshal(m, b) +} +func (m *MutateKeywordPlanCampaignsRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_MutateKeywordPlanCampaignsRequest.Marshal(b, m, deterministic) +} +func (dst *MutateKeywordPlanCampaignsRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_MutateKeywordPlanCampaignsRequest.Merge(dst, src) +} +func (m *MutateKeywordPlanCampaignsRequest) XXX_Size() int { + return xxx_messageInfo_MutateKeywordPlanCampaignsRequest.Size(m) +} +func (m *MutateKeywordPlanCampaignsRequest) XXX_DiscardUnknown() { + xxx_messageInfo_MutateKeywordPlanCampaignsRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_MutateKeywordPlanCampaignsRequest proto.InternalMessageInfo + +func (m *MutateKeywordPlanCampaignsRequest) GetCustomerId() string { + if m != nil { + return m.CustomerId + } + return "" +} + +func (m *MutateKeywordPlanCampaignsRequest) GetOperations() []*KeywordPlanCampaignOperation { + if m != nil { + return m.Operations + } + return nil +} + +func (m *MutateKeywordPlanCampaignsRequest) GetPartialFailure() bool { + if m != nil { + return m.PartialFailure + } + return false +} + +func (m *MutateKeywordPlanCampaignsRequest) GetValidateOnly() bool { + if m != nil { + return m.ValidateOnly + } + return false +} + +// A single operation (create, update, remove) on a Keyword Plan campaign. +type KeywordPlanCampaignOperation struct { + // The FieldMask that determines which resource fields are modified in an + // update. + UpdateMask *field_mask.FieldMask `protobuf:"bytes,4,opt,name=update_mask,json=updateMask,proto3" json:"update_mask,omitempty"` + // The mutate operation. + // + // Types that are valid to be assigned to Operation: + // *KeywordPlanCampaignOperation_Create + // *KeywordPlanCampaignOperation_Update + // *KeywordPlanCampaignOperation_Remove + Operation isKeywordPlanCampaignOperation_Operation `protobuf_oneof:"operation"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *KeywordPlanCampaignOperation) Reset() { *m = KeywordPlanCampaignOperation{} } +func (m *KeywordPlanCampaignOperation) String() string { return proto.CompactTextString(m) } +func (*KeywordPlanCampaignOperation) ProtoMessage() {} +func (*KeywordPlanCampaignOperation) Descriptor() ([]byte, []int) { + return fileDescriptor_keyword_plan_campaign_service_3a49b026094afda2, []int{2} +} +func (m *KeywordPlanCampaignOperation) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_KeywordPlanCampaignOperation.Unmarshal(m, b) +} +func (m *KeywordPlanCampaignOperation) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_KeywordPlanCampaignOperation.Marshal(b, m, deterministic) +} +func (dst *KeywordPlanCampaignOperation) XXX_Merge(src proto.Message) { + xxx_messageInfo_KeywordPlanCampaignOperation.Merge(dst, src) +} +func (m *KeywordPlanCampaignOperation) XXX_Size() int { + return xxx_messageInfo_KeywordPlanCampaignOperation.Size(m) +} +func (m *KeywordPlanCampaignOperation) XXX_DiscardUnknown() { + xxx_messageInfo_KeywordPlanCampaignOperation.DiscardUnknown(m) +} + +var xxx_messageInfo_KeywordPlanCampaignOperation proto.InternalMessageInfo + +func (m *KeywordPlanCampaignOperation) GetUpdateMask() *field_mask.FieldMask { + if m != nil { + return m.UpdateMask + } + return nil +} + +type isKeywordPlanCampaignOperation_Operation interface { + isKeywordPlanCampaignOperation_Operation() +} + +type KeywordPlanCampaignOperation_Create struct { + Create *resources.KeywordPlanCampaign `protobuf:"bytes,1,opt,name=create,proto3,oneof"` +} + +type KeywordPlanCampaignOperation_Update struct { + Update *resources.KeywordPlanCampaign `protobuf:"bytes,2,opt,name=update,proto3,oneof"` +} + +type KeywordPlanCampaignOperation_Remove struct { + Remove string `protobuf:"bytes,3,opt,name=remove,proto3,oneof"` +} + +func (*KeywordPlanCampaignOperation_Create) isKeywordPlanCampaignOperation_Operation() {} + +func (*KeywordPlanCampaignOperation_Update) isKeywordPlanCampaignOperation_Operation() {} + +func (*KeywordPlanCampaignOperation_Remove) isKeywordPlanCampaignOperation_Operation() {} + +func (m *KeywordPlanCampaignOperation) GetOperation() isKeywordPlanCampaignOperation_Operation { + if m != nil { + return m.Operation + } + return nil +} + +func (m *KeywordPlanCampaignOperation) GetCreate() *resources.KeywordPlanCampaign { + if x, ok := m.GetOperation().(*KeywordPlanCampaignOperation_Create); ok { + return x.Create + } + return nil +} + +func (m *KeywordPlanCampaignOperation) GetUpdate() *resources.KeywordPlanCampaign { + if x, ok := m.GetOperation().(*KeywordPlanCampaignOperation_Update); ok { + return x.Update + } + return nil +} + +func (m *KeywordPlanCampaignOperation) GetRemove() string { + if x, ok := m.GetOperation().(*KeywordPlanCampaignOperation_Remove); ok { + return x.Remove + } + return "" +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*KeywordPlanCampaignOperation) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _KeywordPlanCampaignOperation_OneofMarshaler, _KeywordPlanCampaignOperation_OneofUnmarshaler, _KeywordPlanCampaignOperation_OneofSizer, []interface{}{ + (*KeywordPlanCampaignOperation_Create)(nil), + (*KeywordPlanCampaignOperation_Update)(nil), + (*KeywordPlanCampaignOperation_Remove)(nil), + } +} + +func _KeywordPlanCampaignOperation_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*KeywordPlanCampaignOperation) + // operation + switch x := m.Operation.(type) { + case *KeywordPlanCampaignOperation_Create: + b.EncodeVarint(1<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.Create); err != nil { + return err + } + case *KeywordPlanCampaignOperation_Update: + b.EncodeVarint(2<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.Update); err != nil { + return err + } + case *KeywordPlanCampaignOperation_Remove: + b.EncodeVarint(3<<3 | proto.WireBytes) + b.EncodeStringBytes(x.Remove) + case nil: + default: + return fmt.Errorf("KeywordPlanCampaignOperation.Operation has unexpected type %T", x) + } + return nil +} + +func _KeywordPlanCampaignOperation_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*KeywordPlanCampaignOperation) + switch tag { + case 1: // operation.create + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(resources.KeywordPlanCampaign) + err := b.DecodeMessage(msg) + m.Operation = &KeywordPlanCampaignOperation_Create{msg} + return true, err + case 2: // operation.update + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(resources.KeywordPlanCampaign) + err := b.DecodeMessage(msg) + m.Operation = &KeywordPlanCampaignOperation_Update{msg} + return true, err + case 3: // operation.remove + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeStringBytes() + m.Operation = &KeywordPlanCampaignOperation_Remove{x} + return true, err + default: + return false, nil + } +} + +func _KeywordPlanCampaignOperation_OneofSizer(msg proto.Message) (n int) { + m := msg.(*KeywordPlanCampaignOperation) + // operation + switch x := m.Operation.(type) { + case *KeywordPlanCampaignOperation_Create: + s := proto.Size(x.Create) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *KeywordPlanCampaignOperation_Update: + s := proto.Size(x.Update) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *KeywordPlanCampaignOperation_Remove: + n += 1 // tag and wire + n += proto.SizeVarint(uint64(len(x.Remove))) + n += len(x.Remove) + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +// Response message for a Keyword Plan campaign mutate. +type MutateKeywordPlanCampaignsResponse struct { + // Errors that pertain to operation failures in the partial failure mode. + // Returned only when partial_failure = true and all errors occur inside the + // operations. If any errors occur outside the operations (e.g. auth errors), + // we return an RPC level error. + PartialFailureError *status.Status `protobuf:"bytes,3,opt,name=partial_failure_error,json=partialFailureError,proto3" json:"partial_failure_error,omitempty"` + // All results for the mutate. + Results []*MutateKeywordPlanCampaignResult `protobuf:"bytes,2,rep,name=results,proto3" json:"results,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *MutateKeywordPlanCampaignsResponse) Reset() { *m = MutateKeywordPlanCampaignsResponse{} } +func (m *MutateKeywordPlanCampaignsResponse) String() string { return proto.CompactTextString(m) } +func (*MutateKeywordPlanCampaignsResponse) ProtoMessage() {} +func (*MutateKeywordPlanCampaignsResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_keyword_plan_campaign_service_3a49b026094afda2, []int{3} +} +func (m *MutateKeywordPlanCampaignsResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_MutateKeywordPlanCampaignsResponse.Unmarshal(m, b) +} +func (m *MutateKeywordPlanCampaignsResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_MutateKeywordPlanCampaignsResponse.Marshal(b, m, deterministic) +} +func (dst *MutateKeywordPlanCampaignsResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_MutateKeywordPlanCampaignsResponse.Merge(dst, src) +} +func (m *MutateKeywordPlanCampaignsResponse) XXX_Size() int { + return xxx_messageInfo_MutateKeywordPlanCampaignsResponse.Size(m) +} +func (m *MutateKeywordPlanCampaignsResponse) XXX_DiscardUnknown() { + xxx_messageInfo_MutateKeywordPlanCampaignsResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_MutateKeywordPlanCampaignsResponse proto.InternalMessageInfo + +func (m *MutateKeywordPlanCampaignsResponse) GetPartialFailureError() *status.Status { + if m != nil { + return m.PartialFailureError + } + return nil +} + +func (m *MutateKeywordPlanCampaignsResponse) GetResults() []*MutateKeywordPlanCampaignResult { + if m != nil { + return m.Results + } + return nil +} + +// The result for the Keyword Plan campaign mutate. +type MutateKeywordPlanCampaignResult struct { + // Returned for successful operations. + ResourceName string `protobuf:"bytes,1,opt,name=resource_name,json=resourceName,proto3" json:"resource_name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *MutateKeywordPlanCampaignResult) Reset() { *m = MutateKeywordPlanCampaignResult{} } +func (m *MutateKeywordPlanCampaignResult) String() string { return proto.CompactTextString(m) } +func (*MutateKeywordPlanCampaignResult) ProtoMessage() {} +func (*MutateKeywordPlanCampaignResult) Descriptor() ([]byte, []int) { + return fileDescriptor_keyword_plan_campaign_service_3a49b026094afda2, []int{4} +} +func (m *MutateKeywordPlanCampaignResult) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_MutateKeywordPlanCampaignResult.Unmarshal(m, b) +} +func (m *MutateKeywordPlanCampaignResult) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_MutateKeywordPlanCampaignResult.Marshal(b, m, deterministic) +} +func (dst *MutateKeywordPlanCampaignResult) XXX_Merge(src proto.Message) { + xxx_messageInfo_MutateKeywordPlanCampaignResult.Merge(dst, src) +} +func (m *MutateKeywordPlanCampaignResult) XXX_Size() int { + return xxx_messageInfo_MutateKeywordPlanCampaignResult.Size(m) +} +func (m *MutateKeywordPlanCampaignResult) XXX_DiscardUnknown() { + xxx_messageInfo_MutateKeywordPlanCampaignResult.DiscardUnknown(m) +} + +var xxx_messageInfo_MutateKeywordPlanCampaignResult proto.InternalMessageInfo + +func (m *MutateKeywordPlanCampaignResult) GetResourceName() string { + if m != nil { + return m.ResourceName + } + return "" +} + +func init() { + proto.RegisterType((*GetKeywordPlanCampaignRequest)(nil), "google.ads.googleads.v1.services.GetKeywordPlanCampaignRequest") + proto.RegisterType((*MutateKeywordPlanCampaignsRequest)(nil), "google.ads.googleads.v1.services.MutateKeywordPlanCampaignsRequest") + proto.RegisterType((*KeywordPlanCampaignOperation)(nil), "google.ads.googleads.v1.services.KeywordPlanCampaignOperation") + proto.RegisterType((*MutateKeywordPlanCampaignsResponse)(nil), "google.ads.googleads.v1.services.MutateKeywordPlanCampaignsResponse") + proto.RegisterType((*MutateKeywordPlanCampaignResult)(nil), "google.ads.googleads.v1.services.MutateKeywordPlanCampaignResult") +} + +// Reference imports to suppress errors if they are not otherwise used. +var _ context.Context +var _ grpc.ClientConn + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the grpc package it is being compiled against. +const _ = grpc.SupportPackageIsVersion4 + +// KeywordPlanCampaignServiceClient is the client API for KeywordPlanCampaignService service. +// +// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://godoc.org/google.golang.org/grpc#ClientConn.NewStream. +type KeywordPlanCampaignServiceClient interface { + // Returns the requested Keyword Plan campaign in full detail. + GetKeywordPlanCampaign(ctx context.Context, in *GetKeywordPlanCampaignRequest, opts ...grpc.CallOption) (*resources.KeywordPlanCampaign, error) + // Creates, updates, or removes Keyword Plan campaigns. Operation statuses are + // returned. + MutateKeywordPlanCampaigns(ctx context.Context, in *MutateKeywordPlanCampaignsRequest, opts ...grpc.CallOption) (*MutateKeywordPlanCampaignsResponse, error) +} + +type keywordPlanCampaignServiceClient struct { + cc *grpc.ClientConn +} + +func NewKeywordPlanCampaignServiceClient(cc *grpc.ClientConn) KeywordPlanCampaignServiceClient { + return &keywordPlanCampaignServiceClient{cc} +} + +func (c *keywordPlanCampaignServiceClient) GetKeywordPlanCampaign(ctx context.Context, in *GetKeywordPlanCampaignRequest, opts ...grpc.CallOption) (*resources.KeywordPlanCampaign, error) { + out := new(resources.KeywordPlanCampaign) + err := c.cc.Invoke(ctx, "/google.ads.googleads.v1.services.KeywordPlanCampaignService/GetKeywordPlanCampaign", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *keywordPlanCampaignServiceClient) MutateKeywordPlanCampaigns(ctx context.Context, in *MutateKeywordPlanCampaignsRequest, opts ...grpc.CallOption) (*MutateKeywordPlanCampaignsResponse, error) { + out := new(MutateKeywordPlanCampaignsResponse) + err := c.cc.Invoke(ctx, "/google.ads.googleads.v1.services.KeywordPlanCampaignService/MutateKeywordPlanCampaigns", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +// KeywordPlanCampaignServiceServer is the server API for KeywordPlanCampaignService service. +type KeywordPlanCampaignServiceServer interface { + // Returns the requested Keyword Plan campaign in full detail. + GetKeywordPlanCampaign(context.Context, *GetKeywordPlanCampaignRequest) (*resources.KeywordPlanCampaign, error) + // Creates, updates, or removes Keyword Plan campaigns. Operation statuses are + // returned. + MutateKeywordPlanCampaigns(context.Context, *MutateKeywordPlanCampaignsRequest) (*MutateKeywordPlanCampaignsResponse, error) +} + +func RegisterKeywordPlanCampaignServiceServer(s *grpc.Server, srv KeywordPlanCampaignServiceServer) { + s.RegisterService(&_KeywordPlanCampaignService_serviceDesc, srv) +} + +func _KeywordPlanCampaignService_GetKeywordPlanCampaign_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(GetKeywordPlanCampaignRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(KeywordPlanCampaignServiceServer).GetKeywordPlanCampaign(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.ads.googleads.v1.services.KeywordPlanCampaignService/GetKeywordPlanCampaign", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(KeywordPlanCampaignServiceServer).GetKeywordPlanCampaign(ctx, req.(*GetKeywordPlanCampaignRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _KeywordPlanCampaignService_MutateKeywordPlanCampaigns_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(MutateKeywordPlanCampaignsRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(KeywordPlanCampaignServiceServer).MutateKeywordPlanCampaigns(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.ads.googleads.v1.services.KeywordPlanCampaignService/MutateKeywordPlanCampaigns", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(KeywordPlanCampaignServiceServer).MutateKeywordPlanCampaigns(ctx, req.(*MutateKeywordPlanCampaignsRequest)) + } + return interceptor(ctx, in, info, handler) +} + +var _KeywordPlanCampaignService_serviceDesc = grpc.ServiceDesc{ + ServiceName: "google.ads.googleads.v1.services.KeywordPlanCampaignService", + HandlerType: (*KeywordPlanCampaignServiceServer)(nil), + Methods: []grpc.MethodDesc{ + { + MethodName: "GetKeywordPlanCampaign", + Handler: _KeywordPlanCampaignService_GetKeywordPlanCampaign_Handler, + }, + { + MethodName: "MutateKeywordPlanCampaigns", + Handler: _KeywordPlanCampaignService_MutateKeywordPlanCampaigns_Handler, + }, + }, + Streams: []grpc.StreamDesc{}, + Metadata: "google/ads/googleads/v1/services/keyword_plan_campaign_service.proto", +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/services/keyword_plan_campaign_service.proto", fileDescriptor_keyword_plan_campaign_service_3a49b026094afda2) +} + +var fileDescriptor_keyword_plan_campaign_service_3a49b026094afda2 = []byte{ + // 728 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xa4, 0x95, 0x4f, 0x6b, 0xd4, 0x4c, + 0x18, 0xc0, 0xdf, 0x64, 0x5f, 0xaa, 0x9d, 0x54, 0x85, 0x11, 0x75, 0x09, 0xd5, 0xae, 0xb1, 0x60, + 0xd9, 0x43, 0xc2, 0xae, 0x50, 0x34, 0xa5, 0x95, 0x6d, 0xd7, 0x6d, 0x45, 0x6a, 0x97, 0x14, 0x7a, + 0xd0, 0xc5, 0x30, 0x4d, 0xa6, 0x21, 0x34, 0xc9, 0xc4, 0x99, 0xc9, 0x96, 0x52, 0x7a, 0xf1, 0xe0, + 0x17, 0xf0, 0x1b, 0x78, 0xf0, 0xe0, 0x37, 0xd1, 0x9b, 0xf8, 0x05, 0x3c, 0x78, 0x12, 0xbc, 0xf8, + 0x09, 0x24, 0x99, 0xcc, 0xda, 0x96, 0x4d, 0x57, 0xec, 0xed, 0xc9, 0x33, 0x4f, 0x7e, 0xcf, 0xdf, + 0x79, 0x06, 0x74, 0x03, 0x42, 0x82, 0x08, 0x5b, 0xc8, 0x67, 0x96, 0x10, 0x73, 0x69, 0xd8, 0xb2, + 0x18, 0xa6, 0xc3, 0xd0, 0xc3, 0xcc, 0xda, 0xc7, 0x87, 0x07, 0x84, 0xfa, 0x6e, 0x1a, 0xa1, 0xc4, + 0xf5, 0x50, 0x9c, 0xa2, 0x30, 0x48, 0xdc, 0xf2, 0xd8, 0x4c, 0x29, 0xe1, 0x04, 0x36, 0xc4, 0xaf, + 0x26, 0xf2, 0x99, 0x39, 0xa2, 0x98, 0xc3, 0x96, 0x29, 0x29, 0xfa, 0x72, 0x95, 0x1f, 0x8a, 0x19, + 0xc9, 0x68, 0xa5, 0x23, 0xe1, 0x40, 0x9f, 0x95, 0xbf, 0xa7, 0xa1, 0x85, 0x92, 0x84, 0x70, 0xc4, + 0x43, 0x92, 0xb0, 0xf2, 0xb4, 0x74, 0x6f, 0x15, 0x5f, 0xbb, 0xd9, 0x9e, 0xb5, 0x17, 0xe2, 0xc8, + 0x77, 0x63, 0xc4, 0xf6, 0x4b, 0x8b, 0x3b, 0x67, 0x2d, 0x0e, 0x28, 0x4a, 0x53, 0x4c, 0x25, 0xe1, + 0x56, 0x79, 0x4e, 0x53, 0xcf, 0x62, 0x1c, 0xf1, 0xac, 0x3c, 0x30, 0xba, 0xe0, 0xf6, 0x3a, 0xe6, + 0xcf, 0x44, 0x68, 0xfd, 0x08, 0x25, 0x6b, 0x65, 0x60, 0x0e, 0x7e, 0x9d, 0x61, 0xc6, 0xe1, 0x3d, + 0x70, 0x45, 0xa6, 0xe0, 0x26, 0x28, 0xc6, 0x75, 0xa5, 0xa1, 0x2c, 0x4c, 0x3b, 0x33, 0x52, 0xf9, + 0x1c, 0xc5, 0xd8, 0xf8, 0xa5, 0x80, 0xbb, 0x9b, 0x19, 0x47, 0x1c, 0x8f, 0x21, 0x31, 0x89, 0x9a, + 0x03, 0x9a, 0x97, 0x31, 0x4e, 0x62, 0x4c, 0xdd, 0xd0, 0x2f, 0x41, 0x40, 0xaa, 0x9e, 0xfa, 0xf0, + 0x15, 0x00, 0x24, 0xc5, 0x54, 0xe4, 0x5e, 0x57, 0x1b, 0xb5, 0x05, 0xad, 0xbd, 0x62, 0x4e, 0xaa, + 0xbd, 0x39, 0xc6, 0xe7, 0x96, 0xc4, 0x38, 0x27, 0x88, 0xf0, 0x3e, 0xb8, 0x96, 0x22, 0xca, 0x43, + 0x14, 0xb9, 0x7b, 0x28, 0x8c, 0x32, 0x8a, 0xeb, 0xb5, 0x86, 0xb2, 0x70, 0xd9, 0xb9, 0x5a, 0xaa, + 0x7b, 0x42, 0x9b, 0x27, 0x3d, 0x44, 0x51, 0xe8, 0x23, 0x8e, 0x5d, 0x92, 0x44, 0x87, 0xf5, 0xff, + 0x0b, 0xb3, 0x19, 0xa9, 0xdc, 0x4a, 0xa2, 0x43, 0xe3, 0x83, 0x0a, 0x66, 0xcf, 0x73, 0x0d, 0x97, + 0x80, 0x96, 0xa5, 0x05, 0x23, 0xef, 0x54, 0xc1, 0xd0, 0xda, 0xba, 0xcc, 0x47, 0xb6, 0xca, 0xec, + 0xe5, 0xcd, 0xdc, 0x44, 0x6c, 0xdf, 0x01, 0xc2, 0x3c, 0x97, 0x61, 0x1f, 0x4c, 0x79, 0x14, 0x23, + 0x2e, 0x0a, 0xae, 0xb5, 0x17, 0x2b, 0xeb, 0x30, 0x9a, 0xb0, 0x71, 0x85, 0xd8, 0xf8, 0xcf, 0x29, + 0x39, 0x39, 0x51, 0xf0, 0xeb, 0xea, 0x45, 0x89, 0x82, 0x03, 0xeb, 0x60, 0x8a, 0xe2, 0x98, 0x0c, + 0x45, 0x19, 0xa7, 0xf3, 0x13, 0xf1, 0xbd, 0xaa, 0x81, 0xe9, 0x51, 0xdd, 0x8d, 0x4f, 0x0a, 0x30, + 0xce, 0x9b, 0x0e, 0x96, 0x92, 0x84, 0x61, 0xd8, 0x03, 0x37, 0xce, 0x74, 0xc7, 0xc5, 0x94, 0x12, + 0x5a, 0xc0, 0xb5, 0x36, 0x94, 0xe1, 0xd2, 0xd4, 0x33, 0xb7, 0x8b, 0x19, 0x76, 0xae, 0x9f, 0xee, + 0xdb, 0x93, 0xdc, 0x1c, 0xbe, 0x04, 0x97, 0x28, 0x66, 0x59, 0xc4, 0xe5, 0x08, 0x75, 0x26, 0x8f, + 0x50, 0x65, 0x78, 0x4e, 0x41, 0x72, 0x24, 0xd1, 0xe8, 0x81, 0xb9, 0x09, 0xb6, 0x7f, 0x75, 0x63, + 0xda, 0xdf, 0x6a, 0x40, 0x1f, 0x83, 0xd8, 0x16, 0x01, 0xc1, 0x2f, 0x0a, 0xb8, 0x39, 0xfe, 0x5e, + 0xc2, 0xc7, 0x93, 0xb3, 0x39, 0xf7, 0x46, 0xeb, 0xff, 0xd8, 0x77, 0x63, 0xe5, 0xcd, 0xd7, 0xef, + 0xef, 0xd4, 0x87, 0x70, 0x31, 0x5f, 0x6b, 0x47, 0xa7, 0x52, 0x5c, 0x96, 0x97, 0x98, 0x59, 0x4d, + 0xb9, 0xe7, 0x4e, 0x35, 0xd9, 0x6a, 0x1e, 0xc3, 0x9f, 0x0a, 0xd0, 0xab, 0xc7, 0x00, 0xae, 0x5d, + 0xa0, 0x4b, 0x72, 0xc5, 0xe8, 0xdd, 0x8b, 0x41, 0xc4, 0x24, 0x1a, 0xdd, 0x22, 0xd3, 0x15, 0xe3, + 0x51, 0x9e, 0xe9, 0x9f, 0xd4, 0x8e, 0x4e, 0x6c, 0xaf, 0xe5, 0xe6, 0xf1, 0xd8, 0x44, 0xed, 0xb8, + 0xc0, 0xdb, 0x4a, 0x73, 0xf5, 0xad, 0x0a, 0xe6, 0x3d, 0x12, 0x4f, 0x8c, 0x68, 0x75, 0xae, 0x7a, + 0x10, 0xfa, 0xf9, 0x92, 0xe8, 0x2b, 0x2f, 0x36, 0x4a, 0x48, 0x40, 0x22, 0x94, 0x04, 0x26, 0xa1, + 0x81, 0x15, 0xe0, 0xa4, 0x58, 0x21, 0xf2, 0xb9, 0x49, 0x43, 0x56, 0xfd, 0xca, 0x2d, 0x49, 0xe1, + 0xbd, 0x5a, 0x5b, 0xef, 0x74, 0x3e, 0xaa, 0x8d, 0x75, 0x01, 0xec, 0xf8, 0xcc, 0x14, 0x62, 0x2e, + 0xed, 0xb4, 0xcc, 0xd2, 0x31, 0xfb, 0x2c, 0x4d, 0x06, 0x1d, 0x9f, 0x0d, 0x46, 0x26, 0x83, 0x9d, + 0xd6, 0x40, 0x9a, 0xfc, 0x50, 0xe7, 0x85, 0xde, 0xb6, 0x3b, 0x3e, 0xb3, 0xed, 0x91, 0x91, 0x6d, + 0xef, 0xb4, 0x6c, 0x5b, 0x9a, 0xed, 0x4e, 0x15, 0x71, 0x3e, 0xf8, 0x1d, 0x00, 0x00, 0xff, 0xff, + 0x79, 0x2e, 0x5d, 0x8a, 0x8c, 0x07, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/services/keyword_plan_idea_service.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/services/keyword_plan_idea_service.pb.go new file mode 100644 index 0000000..3227046 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/services/keyword_plan_idea_service.pb.go @@ -0,0 +1,605 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/services/keyword_plan_idea_service.proto + +package services // import "google.golang.org/genproto/googleapis/ads/googleads/v1/services" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import wrappers "github.com/golang/protobuf/ptypes/wrappers" +import common "google.golang.org/genproto/googleapis/ads/googleads/v1/common" +import enums "google.golang.org/genproto/googleapis/ads/googleads/v1/enums" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +import ( + context "golang.org/x/net/context" + grpc "google.golang.org/grpc" +) + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Request message for [KeywordIdeaService.GenerateKeywordIdeas][]. +type GenerateKeywordIdeasRequest struct { + // The ID of the customer with the recommendation. + CustomerId string `protobuf:"bytes,1,opt,name=customer_id,json=customerId,proto3" json:"customer_id,omitempty"` + // The resource name of the language to target. + // Required + Language *wrappers.StringValue `protobuf:"bytes,7,opt,name=language,proto3" json:"language,omitempty"` + // The resource names of the location to target. + // Max 10 + GeoTargetConstants []*wrappers.StringValue `protobuf:"bytes,8,rep,name=geo_target_constants,json=geoTargetConstants,proto3" json:"geo_target_constants,omitempty"` + // Targeting network. + KeywordPlanNetwork enums.KeywordPlanNetworkEnum_KeywordPlanNetwork `protobuf:"varint,9,opt,name=keyword_plan_network,json=keywordPlanNetwork,proto3,enum=google.ads.googleads.v1.enums.KeywordPlanNetworkEnum_KeywordPlanNetwork" json:"keyword_plan_network,omitempty"` + // The type of seed to generate keyword ideas. + // + // Types that are valid to be assigned to Seed: + // *GenerateKeywordIdeasRequest_KeywordAndUrlSeed + // *GenerateKeywordIdeasRequest_KeywordSeed + // *GenerateKeywordIdeasRequest_UrlSeed + Seed isGenerateKeywordIdeasRequest_Seed `protobuf_oneof:"seed"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GenerateKeywordIdeasRequest) Reset() { *m = GenerateKeywordIdeasRequest{} } +func (m *GenerateKeywordIdeasRequest) String() string { return proto.CompactTextString(m) } +func (*GenerateKeywordIdeasRequest) ProtoMessage() {} +func (*GenerateKeywordIdeasRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_keyword_plan_idea_service_2549e6cddf55a7a9, []int{0} +} +func (m *GenerateKeywordIdeasRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GenerateKeywordIdeasRequest.Unmarshal(m, b) +} +func (m *GenerateKeywordIdeasRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GenerateKeywordIdeasRequest.Marshal(b, m, deterministic) +} +func (dst *GenerateKeywordIdeasRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_GenerateKeywordIdeasRequest.Merge(dst, src) +} +func (m *GenerateKeywordIdeasRequest) XXX_Size() int { + return xxx_messageInfo_GenerateKeywordIdeasRequest.Size(m) +} +func (m *GenerateKeywordIdeasRequest) XXX_DiscardUnknown() { + xxx_messageInfo_GenerateKeywordIdeasRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_GenerateKeywordIdeasRequest proto.InternalMessageInfo + +func (m *GenerateKeywordIdeasRequest) GetCustomerId() string { + if m != nil { + return m.CustomerId + } + return "" +} + +func (m *GenerateKeywordIdeasRequest) GetLanguage() *wrappers.StringValue { + if m != nil { + return m.Language + } + return nil +} + +func (m *GenerateKeywordIdeasRequest) GetGeoTargetConstants() []*wrappers.StringValue { + if m != nil { + return m.GeoTargetConstants + } + return nil +} + +func (m *GenerateKeywordIdeasRequest) GetKeywordPlanNetwork() enums.KeywordPlanNetworkEnum_KeywordPlanNetwork { + if m != nil { + return m.KeywordPlanNetwork + } + return enums.KeywordPlanNetworkEnum_UNSPECIFIED +} + +type isGenerateKeywordIdeasRequest_Seed interface { + isGenerateKeywordIdeasRequest_Seed() +} + +type GenerateKeywordIdeasRequest_KeywordAndUrlSeed struct { + KeywordAndUrlSeed *KeywordAndUrlSeed `protobuf:"bytes,2,opt,name=keyword_and_url_seed,json=keywordAndUrlSeed,proto3,oneof"` +} + +type GenerateKeywordIdeasRequest_KeywordSeed struct { + KeywordSeed *KeywordSeed `protobuf:"bytes,3,opt,name=keyword_seed,json=keywordSeed,proto3,oneof"` +} + +type GenerateKeywordIdeasRequest_UrlSeed struct { + UrlSeed *UrlSeed `protobuf:"bytes,5,opt,name=url_seed,json=urlSeed,proto3,oneof"` +} + +func (*GenerateKeywordIdeasRequest_KeywordAndUrlSeed) isGenerateKeywordIdeasRequest_Seed() {} + +func (*GenerateKeywordIdeasRequest_KeywordSeed) isGenerateKeywordIdeasRequest_Seed() {} + +func (*GenerateKeywordIdeasRequest_UrlSeed) isGenerateKeywordIdeasRequest_Seed() {} + +func (m *GenerateKeywordIdeasRequest) GetSeed() isGenerateKeywordIdeasRequest_Seed { + if m != nil { + return m.Seed + } + return nil +} + +func (m *GenerateKeywordIdeasRequest) GetKeywordAndUrlSeed() *KeywordAndUrlSeed { + if x, ok := m.GetSeed().(*GenerateKeywordIdeasRequest_KeywordAndUrlSeed); ok { + return x.KeywordAndUrlSeed + } + return nil +} + +func (m *GenerateKeywordIdeasRequest) GetKeywordSeed() *KeywordSeed { + if x, ok := m.GetSeed().(*GenerateKeywordIdeasRequest_KeywordSeed); ok { + return x.KeywordSeed + } + return nil +} + +func (m *GenerateKeywordIdeasRequest) GetUrlSeed() *UrlSeed { + if x, ok := m.GetSeed().(*GenerateKeywordIdeasRequest_UrlSeed); ok { + return x.UrlSeed + } + return nil +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*GenerateKeywordIdeasRequest) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _GenerateKeywordIdeasRequest_OneofMarshaler, _GenerateKeywordIdeasRequest_OneofUnmarshaler, _GenerateKeywordIdeasRequest_OneofSizer, []interface{}{ + (*GenerateKeywordIdeasRequest_KeywordAndUrlSeed)(nil), + (*GenerateKeywordIdeasRequest_KeywordSeed)(nil), + (*GenerateKeywordIdeasRequest_UrlSeed)(nil), + } +} + +func _GenerateKeywordIdeasRequest_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*GenerateKeywordIdeasRequest) + // seed + switch x := m.Seed.(type) { + case *GenerateKeywordIdeasRequest_KeywordAndUrlSeed: + b.EncodeVarint(2<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.KeywordAndUrlSeed); err != nil { + return err + } + case *GenerateKeywordIdeasRequest_KeywordSeed: + b.EncodeVarint(3<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.KeywordSeed); err != nil { + return err + } + case *GenerateKeywordIdeasRequest_UrlSeed: + b.EncodeVarint(5<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.UrlSeed); err != nil { + return err + } + case nil: + default: + return fmt.Errorf("GenerateKeywordIdeasRequest.Seed has unexpected type %T", x) + } + return nil +} + +func _GenerateKeywordIdeasRequest_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*GenerateKeywordIdeasRequest) + switch tag { + case 2: // seed.keyword_and_url_seed + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(KeywordAndUrlSeed) + err := b.DecodeMessage(msg) + m.Seed = &GenerateKeywordIdeasRequest_KeywordAndUrlSeed{msg} + return true, err + case 3: // seed.keyword_seed + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(KeywordSeed) + err := b.DecodeMessage(msg) + m.Seed = &GenerateKeywordIdeasRequest_KeywordSeed{msg} + return true, err + case 5: // seed.url_seed + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(UrlSeed) + err := b.DecodeMessage(msg) + m.Seed = &GenerateKeywordIdeasRequest_UrlSeed{msg} + return true, err + default: + return false, nil + } +} + +func _GenerateKeywordIdeasRequest_OneofSizer(msg proto.Message) (n int) { + m := msg.(*GenerateKeywordIdeasRequest) + // seed + switch x := m.Seed.(type) { + case *GenerateKeywordIdeasRequest_KeywordAndUrlSeed: + s := proto.Size(x.KeywordAndUrlSeed) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *GenerateKeywordIdeasRequest_KeywordSeed: + s := proto.Size(x.KeywordSeed) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *GenerateKeywordIdeasRequest_UrlSeed: + s := proto.Size(x.UrlSeed) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +// Keyword And Url Seed +type KeywordAndUrlSeed struct { + // The URL to crawl in order to generate keyword ideas. + Url *wrappers.StringValue `protobuf:"bytes,1,opt,name=url,proto3" json:"url,omitempty"` + // Requires at least one keyword. + Keywords []*wrappers.StringValue `protobuf:"bytes,2,rep,name=keywords,proto3" json:"keywords,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *KeywordAndUrlSeed) Reset() { *m = KeywordAndUrlSeed{} } +func (m *KeywordAndUrlSeed) String() string { return proto.CompactTextString(m) } +func (*KeywordAndUrlSeed) ProtoMessage() {} +func (*KeywordAndUrlSeed) Descriptor() ([]byte, []int) { + return fileDescriptor_keyword_plan_idea_service_2549e6cddf55a7a9, []int{1} +} +func (m *KeywordAndUrlSeed) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_KeywordAndUrlSeed.Unmarshal(m, b) +} +func (m *KeywordAndUrlSeed) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_KeywordAndUrlSeed.Marshal(b, m, deterministic) +} +func (dst *KeywordAndUrlSeed) XXX_Merge(src proto.Message) { + xxx_messageInfo_KeywordAndUrlSeed.Merge(dst, src) +} +func (m *KeywordAndUrlSeed) XXX_Size() int { + return xxx_messageInfo_KeywordAndUrlSeed.Size(m) +} +func (m *KeywordAndUrlSeed) XXX_DiscardUnknown() { + xxx_messageInfo_KeywordAndUrlSeed.DiscardUnknown(m) +} + +var xxx_messageInfo_KeywordAndUrlSeed proto.InternalMessageInfo + +func (m *KeywordAndUrlSeed) GetUrl() *wrappers.StringValue { + if m != nil { + return m.Url + } + return nil +} + +func (m *KeywordAndUrlSeed) GetKeywords() []*wrappers.StringValue { + if m != nil { + return m.Keywords + } + return nil +} + +// Keyword Seed +type KeywordSeed struct { + // Requires at least one keyword. + Keywords []*wrappers.StringValue `protobuf:"bytes,1,rep,name=keywords,proto3" json:"keywords,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *KeywordSeed) Reset() { *m = KeywordSeed{} } +func (m *KeywordSeed) String() string { return proto.CompactTextString(m) } +func (*KeywordSeed) ProtoMessage() {} +func (*KeywordSeed) Descriptor() ([]byte, []int) { + return fileDescriptor_keyword_plan_idea_service_2549e6cddf55a7a9, []int{2} +} +func (m *KeywordSeed) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_KeywordSeed.Unmarshal(m, b) +} +func (m *KeywordSeed) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_KeywordSeed.Marshal(b, m, deterministic) +} +func (dst *KeywordSeed) XXX_Merge(src proto.Message) { + xxx_messageInfo_KeywordSeed.Merge(dst, src) +} +func (m *KeywordSeed) XXX_Size() int { + return xxx_messageInfo_KeywordSeed.Size(m) +} +func (m *KeywordSeed) XXX_DiscardUnknown() { + xxx_messageInfo_KeywordSeed.DiscardUnknown(m) +} + +var xxx_messageInfo_KeywordSeed proto.InternalMessageInfo + +func (m *KeywordSeed) GetKeywords() []*wrappers.StringValue { + if m != nil { + return m.Keywords + } + return nil +} + +// Url Seed +type UrlSeed struct { + // The URL to crawl in order to generate keyword ideas. + Url *wrappers.StringValue `protobuf:"bytes,1,opt,name=url,proto3" json:"url,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *UrlSeed) Reset() { *m = UrlSeed{} } +func (m *UrlSeed) String() string { return proto.CompactTextString(m) } +func (*UrlSeed) ProtoMessage() {} +func (*UrlSeed) Descriptor() ([]byte, []int) { + return fileDescriptor_keyword_plan_idea_service_2549e6cddf55a7a9, []int{3} +} +func (m *UrlSeed) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_UrlSeed.Unmarshal(m, b) +} +func (m *UrlSeed) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_UrlSeed.Marshal(b, m, deterministic) +} +func (dst *UrlSeed) XXX_Merge(src proto.Message) { + xxx_messageInfo_UrlSeed.Merge(dst, src) +} +func (m *UrlSeed) XXX_Size() int { + return xxx_messageInfo_UrlSeed.Size(m) +} +func (m *UrlSeed) XXX_DiscardUnknown() { + xxx_messageInfo_UrlSeed.DiscardUnknown(m) +} + +var xxx_messageInfo_UrlSeed proto.InternalMessageInfo + +func (m *UrlSeed) GetUrl() *wrappers.StringValue { + if m != nil { + return m.Url + } + return nil +} + +// Response message for [KeywordIdeaService.GenerateKeywordIdeas][]. +type GenerateKeywordIdeaResponse struct { + // Results of generating keyword ideas. + Results []*GenerateKeywordIdeaResult `protobuf:"bytes,1,rep,name=results,proto3" json:"results,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GenerateKeywordIdeaResponse) Reset() { *m = GenerateKeywordIdeaResponse{} } +func (m *GenerateKeywordIdeaResponse) String() string { return proto.CompactTextString(m) } +func (*GenerateKeywordIdeaResponse) ProtoMessage() {} +func (*GenerateKeywordIdeaResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_keyword_plan_idea_service_2549e6cddf55a7a9, []int{4} +} +func (m *GenerateKeywordIdeaResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GenerateKeywordIdeaResponse.Unmarshal(m, b) +} +func (m *GenerateKeywordIdeaResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GenerateKeywordIdeaResponse.Marshal(b, m, deterministic) +} +func (dst *GenerateKeywordIdeaResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_GenerateKeywordIdeaResponse.Merge(dst, src) +} +func (m *GenerateKeywordIdeaResponse) XXX_Size() int { + return xxx_messageInfo_GenerateKeywordIdeaResponse.Size(m) +} +func (m *GenerateKeywordIdeaResponse) XXX_DiscardUnknown() { + xxx_messageInfo_GenerateKeywordIdeaResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_GenerateKeywordIdeaResponse proto.InternalMessageInfo + +func (m *GenerateKeywordIdeaResponse) GetResults() []*GenerateKeywordIdeaResult { + if m != nil { + return m.Results + } + return nil +} + +// The result of generating keyword ideas. +type GenerateKeywordIdeaResult struct { + // Text of the keyword idea. + // As in Keyword Plan historical metrics, this text may not be an actual + // keyword, but the canonical form of multiple keywords. + // See KeywordPlanKeywordHistoricalMetrics message in KeywordPlanService. + Text *wrappers.StringValue `protobuf:"bytes,2,opt,name=text,proto3" json:"text,omitempty"` + // The historical metrics for the keyword + KeywordIdeaMetrics *common.KeywordPlanHistoricalMetrics `protobuf:"bytes,3,opt,name=keyword_idea_metrics,json=keywordIdeaMetrics,proto3" json:"keyword_idea_metrics,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GenerateKeywordIdeaResult) Reset() { *m = GenerateKeywordIdeaResult{} } +func (m *GenerateKeywordIdeaResult) String() string { return proto.CompactTextString(m) } +func (*GenerateKeywordIdeaResult) ProtoMessage() {} +func (*GenerateKeywordIdeaResult) Descriptor() ([]byte, []int) { + return fileDescriptor_keyword_plan_idea_service_2549e6cddf55a7a9, []int{5} +} +func (m *GenerateKeywordIdeaResult) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GenerateKeywordIdeaResult.Unmarshal(m, b) +} +func (m *GenerateKeywordIdeaResult) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GenerateKeywordIdeaResult.Marshal(b, m, deterministic) +} +func (dst *GenerateKeywordIdeaResult) XXX_Merge(src proto.Message) { + xxx_messageInfo_GenerateKeywordIdeaResult.Merge(dst, src) +} +func (m *GenerateKeywordIdeaResult) XXX_Size() int { + return xxx_messageInfo_GenerateKeywordIdeaResult.Size(m) +} +func (m *GenerateKeywordIdeaResult) XXX_DiscardUnknown() { + xxx_messageInfo_GenerateKeywordIdeaResult.DiscardUnknown(m) +} + +var xxx_messageInfo_GenerateKeywordIdeaResult proto.InternalMessageInfo + +func (m *GenerateKeywordIdeaResult) GetText() *wrappers.StringValue { + if m != nil { + return m.Text + } + return nil +} + +func (m *GenerateKeywordIdeaResult) GetKeywordIdeaMetrics() *common.KeywordPlanHistoricalMetrics { + if m != nil { + return m.KeywordIdeaMetrics + } + return nil +} + +func init() { + proto.RegisterType((*GenerateKeywordIdeasRequest)(nil), "google.ads.googleads.v1.services.GenerateKeywordIdeasRequest") + proto.RegisterType((*KeywordAndUrlSeed)(nil), "google.ads.googleads.v1.services.KeywordAndUrlSeed") + proto.RegisterType((*KeywordSeed)(nil), "google.ads.googleads.v1.services.KeywordSeed") + proto.RegisterType((*UrlSeed)(nil), "google.ads.googleads.v1.services.UrlSeed") + proto.RegisterType((*GenerateKeywordIdeaResponse)(nil), "google.ads.googleads.v1.services.GenerateKeywordIdeaResponse") + proto.RegisterType((*GenerateKeywordIdeaResult)(nil), "google.ads.googleads.v1.services.GenerateKeywordIdeaResult") +} + +// Reference imports to suppress errors if they are not otherwise used. +var _ context.Context +var _ grpc.ClientConn + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the grpc package it is being compiled against. +const _ = grpc.SupportPackageIsVersion4 + +// KeywordPlanIdeaServiceClient is the client API for KeywordPlanIdeaService service. +// +// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://godoc.org/google.golang.org/grpc#ClientConn.NewStream. +type KeywordPlanIdeaServiceClient interface { + // Returns a list of keyword ideas. + GenerateKeywordIdeas(ctx context.Context, in *GenerateKeywordIdeasRequest, opts ...grpc.CallOption) (*GenerateKeywordIdeaResponse, error) +} + +type keywordPlanIdeaServiceClient struct { + cc *grpc.ClientConn +} + +func NewKeywordPlanIdeaServiceClient(cc *grpc.ClientConn) KeywordPlanIdeaServiceClient { + return &keywordPlanIdeaServiceClient{cc} +} + +func (c *keywordPlanIdeaServiceClient) GenerateKeywordIdeas(ctx context.Context, in *GenerateKeywordIdeasRequest, opts ...grpc.CallOption) (*GenerateKeywordIdeaResponse, error) { + out := new(GenerateKeywordIdeaResponse) + err := c.cc.Invoke(ctx, "/google.ads.googleads.v1.services.KeywordPlanIdeaService/GenerateKeywordIdeas", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +// KeywordPlanIdeaServiceServer is the server API for KeywordPlanIdeaService service. +type KeywordPlanIdeaServiceServer interface { + // Returns a list of keyword ideas. + GenerateKeywordIdeas(context.Context, *GenerateKeywordIdeasRequest) (*GenerateKeywordIdeaResponse, error) +} + +func RegisterKeywordPlanIdeaServiceServer(s *grpc.Server, srv KeywordPlanIdeaServiceServer) { + s.RegisterService(&_KeywordPlanIdeaService_serviceDesc, srv) +} + +func _KeywordPlanIdeaService_GenerateKeywordIdeas_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(GenerateKeywordIdeasRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(KeywordPlanIdeaServiceServer).GenerateKeywordIdeas(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.ads.googleads.v1.services.KeywordPlanIdeaService/GenerateKeywordIdeas", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(KeywordPlanIdeaServiceServer).GenerateKeywordIdeas(ctx, req.(*GenerateKeywordIdeasRequest)) + } + return interceptor(ctx, in, info, handler) +} + +var _KeywordPlanIdeaService_serviceDesc = grpc.ServiceDesc{ + ServiceName: "google.ads.googleads.v1.services.KeywordPlanIdeaService", + HandlerType: (*KeywordPlanIdeaServiceServer)(nil), + Methods: []grpc.MethodDesc{ + { + MethodName: "GenerateKeywordIdeas", + Handler: _KeywordPlanIdeaService_GenerateKeywordIdeas_Handler, + }, + }, + Streams: []grpc.StreamDesc{}, + Metadata: "google/ads/googleads/v1/services/keyword_plan_idea_service.proto", +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/services/keyword_plan_idea_service.proto", fileDescriptor_keyword_plan_idea_service_2549e6cddf55a7a9) +} + +var fileDescriptor_keyword_plan_idea_service_2549e6cddf55a7a9 = []byte{ + // 727 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x9c, 0x55, 0xcf, 0x6e, 0xd3, 0x4e, + 0x10, 0xfe, 0x39, 0xe9, 0xaf, 0x69, 0x37, 0x08, 0xa9, 0xab, 0x0a, 0x85, 0xb6, 0x82, 0x28, 0xea, + 0xa1, 0x54, 0xc2, 0x26, 0xe9, 0x81, 0xe2, 0x12, 0x89, 0x14, 0x41, 0x52, 0x21, 0xaa, 0xca, 0xa5, + 0x39, 0xa0, 0x48, 0xd6, 0x36, 0x9e, 0x5a, 0x56, 0x9c, 0xdd, 0xb0, 0xbb, 0x6e, 0xf9, 0xa3, 0x5e, + 0xfa, 0x0a, 0xbc, 0x01, 0x47, 0xde, 0x81, 0x17, 0xe0, 0x84, 0xd4, 0x57, 0xe0, 0xc4, 0x81, 0x67, + 0x40, 0xb6, 0x77, 0x93, 0xb4, 0x75, 0x48, 0xe9, 0x6d, 0x3c, 0xfb, 0xcd, 0x37, 0xdf, 0xce, 0xcc, + 0x8e, 0xd1, 0x33, 0x9f, 0x31, 0x3f, 0x04, 0x8b, 0x78, 0xc2, 0x4a, 0xcd, 0xd8, 0x3a, 0xae, 0x5a, + 0x02, 0xf8, 0x71, 0xd0, 0x05, 0x61, 0xf5, 0xe0, 0xc3, 0x09, 0xe3, 0x9e, 0x3b, 0x08, 0x09, 0x75, + 0x03, 0x0f, 0x88, 0xab, 0x8e, 0xcc, 0x01, 0x67, 0x92, 0xe1, 0x72, 0x1a, 0x66, 0x12, 0x4f, 0x98, + 0x43, 0x06, 0xf3, 0xb8, 0x6a, 0x6a, 0x86, 0xa5, 0xcd, 0x49, 0x39, 0xba, 0xac, 0xdf, 0x67, 0xf4, + 0x62, 0x86, 0xd4, 0x97, 0x72, 0x4f, 0x8e, 0x04, 0x1a, 0xf5, 0x2f, 0x49, 0xa3, 0x20, 0x4f, 0x18, + 0xef, 0xa9, 0xc8, 0x15, 0x1d, 0x39, 0x08, 0x2c, 0x42, 0x29, 0x93, 0x44, 0x06, 0x8c, 0x0a, 0x75, + 0x7a, 0x4f, 0x9d, 0x26, 0x5f, 0x87, 0xd1, 0x91, 0x75, 0xc2, 0xc9, 0x60, 0x00, 0x5c, 0x9d, 0x57, + 0x7e, 0xcc, 0xa0, 0xe5, 0x26, 0x50, 0xe0, 0x44, 0xc2, 0xab, 0x34, 0xc9, 0x8e, 0x07, 0x44, 0x38, + 0xf0, 0x2e, 0x02, 0x21, 0xf1, 0x7d, 0x54, 0xec, 0x46, 0x42, 0xb2, 0x3e, 0x70, 0x37, 0xf0, 0x4a, + 0x46, 0xd9, 0x58, 0x9b, 0x77, 0x90, 0x76, 0xed, 0x78, 0x78, 0x13, 0xcd, 0x85, 0x84, 0xfa, 0x11, + 0xf1, 0xa1, 0x54, 0x28, 0x1b, 0x6b, 0xc5, 0xda, 0x8a, 0x2a, 0x8e, 0xa9, 0x73, 0x9a, 0xfb, 0x92, + 0x07, 0xd4, 0x6f, 0x93, 0x30, 0x02, 0x67, 0x88, 0xc6, 0xbb, 0x68, 0xd1, 0x07, 0xe6, 0x4a, 0xc2, + 0x7d, 0x90, 0x6e, 0x97, 0x51, 0x21, 0x09, 0x95, 0xa2, 0x34, 0x57, 0xce, 0x4f, 0x65, 0xc1, 0x3e, + 0xb0, 0x37, 0x49, 0xe0, 0x73, 0x1d, 0x87, 0x3f, 0xa2, 0xc5, 0xac, 0x32, 0x95, 0xe6, 0xcb, 0xc6, + 0xda, 0xed, 0x5a, 0xcb, 0x9c, 0xd4, 0xbd, 0xa4, 0xc2, 0xa6, 0xba, 0xfc, 0x5e, 0x48, 0xe8, 0x6e, + 0x1a, 0xf8, 0x82, 0x46, 0xfd, 0x0c, 0xb7, 0x83, 0x7b, 0x57, 0x7c, 0xf8, 0x68, 0x94, 0x9b, 0x50, + 0xcf, 0x8d, 0x78, 0xe8, 0x0a, 0x00, 0xaf, 0x94, 0x4b, 0x2a, 0xb2, 0x61, 0x4e, 0x9b, 0x1c, 0x9d, + 0xa7, 0x41, 0xbd, 0x03, 0x1e, 0xee, 0x03, 0x78, 0xad, 0xff, 0x9c, 0x85, 0xde, 0x65, 0x27, 0x76, + 0xd0, 0x2d, 0x9d, 0x27, 0xe1, 0xcf, 0x27, 0xfc, 0x0f, 0xaf, 0xcd, 0xaf, 0x98, 0x8b, 0xbd, 0xd1, + 0x27, 0x7e, 0x89, 0xe6, 0x86, 0x7a, 0xff, 0x4f, 0xf8, 0x1e, 0x4c, 0xe7, 0x1b, 0xa9, 0x2c, 0x44, + 0xa9, 0xb9, 0x3d, 0x8b, 0x66, 0x62, 0x8e, 0xca, 0x29, 0x5a, 0xb8, 0x72, 0x1b, 0x6c, 0xa2, 0x7c, + 0xc4, 0xc3, 0x64, 0x7e, 0xa6, 0xf5, 0x36, 0x06, 0xc6, 0x63, 0xa5, 0x34, 0x8a, 0x52, 0xee, 0x1a, + 0x03, 0x31, 0x44, 0x57, 0x9a, 0xa8, 0x38, 0x76, 0xd9, 0x0b, 0x44, 0xc6, 0x3f, 0x11, 0x3d, 0x41, + 0x85, 0x1b, 0xaa, 0xaf, 0xc8, 0xcc, 0x47, 0xe5, 0x80, 0x18, 0x30, 0x2a, 0x00, 0x1f, 0xa0, 0x02, + 0x07, 0x11, 0x85, 0x52, 0x4b, 0xda, 0x9a, 0x5e, 0xf0, 0x6c, 0xbe, 0x28, 0x94, 0x8e, 0xe6, 0xaa, + 0x7c, 0x33, 0xd0, 0xdd, 0x89, 0x30, 0xfc, 0x08, 0xcd, 0x48, 0x78, 0x2f, 0xd5, 0x48, 0xfe, 0xfd, + 0x12, 0x09, 0x12, 0xd3, 0xd1, 0x50, 0x27, 0xdb, 0xb0, 0x0f, 0x92, 0x07, 0x5d, 0xa1, 0x86, 0xee, + 0xe9, 0x44, 0xcd, 0x6a, 0xb1, 0x8d, 0x3d, 0x9d, 0x56, 0x20, 0x24, 0xe3, 0x41, 0x97, 0x84, 0xaf, + 0x53, 0x8e, 0xe1, 0x23, 0x8a, 0x05, 0x2a, 0x5f, 0xed, 0xb7, 0x81, 0xee, 0x8c, 0x05, 0xc5, 0x47, + 0xfb, 0xe9, 0xf5, 0xf1, 0xb9, 0x81, 0x16, 0xb3, 0xd6, 0x14, 0xae, 0xdf, 0xa8, 0x72, 0x7a, 0xbd, + 0x2d, 0xd5, 0x6f, 0x5a, 0xf8, 0xa4, 0x91, 0x95, 0xfa, 0xd9, 0xf9, 0xcf, 0xcf, 0xb9, 0xc7, 0x95, + 0x5a, 0xb2, 0xe3, 0xd5, 0x52, 0x14, 0xd6, 0xa7, 0xb1, 0x95, 0x59, 0x5f, 0x3f, 0xb5, 0xfd, 0x0c, + 0x05, 0xb6, 0xb1, 0xbe, 0x7d, 0x96, 0x43, 0xab, 0x5d, 0xd6, 0x9f, 0xaa, 0x61, 0x7b, 0x39, 0xbb, + 0x2c, 0x7b, 0x71, 0xef, 0xf6, 0x8c, 0xb7, 0x2d, 0x45, 0xe0, 0xb3, 0x78, 0xb9, 0x9a, 0x8c, 0xfb, + 0x96, 0x0f, 0x34, 0xe9, 0xac, 0xfe, 0x99, 0x0c, 0x02, 0x31, 0xf9, 0xcf, 0xb7, 0xa5, 0x8d, 0x2f, + 0xb9, 0x7c, 0xb3, 0xd1, 0xf8, 0x9a, 0x2b, 0x37, 0x53, 0xc2, 0x86, 0x27, 0xcc, 0xd4, 0x8c, 0xad, + 0x76, 0xd5, 0x54, 0x89, 0xc5, 0x77, 0x0d, 0xe9, 0x34, 0x3c, 0xd1, 0x19, 0x42, 0x3a, 0xed, 0x6a, + 0x47, 0x43, 0x7e, 0xe5, 0x56, 0x53, 0xbf, 0x6d, 0x37, 0x3c, 0x61, 0xdb, 0x43, 0x90, 0x6d, 0xb7, + 0xab, 0xb6, 0xad, 0x61, 0x87, 0xb3, 0x89, 0xce, 0x8d, 0x3f, 0x01, 0x00, 0x00, 0xff, 0xff, 0xfd, + 0x13, 0x9c, 0xd6, 0xa0, 0x07, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/services/keyword_plan_keyword_service.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/services/keyword_plan_keyword_service.pb.go new file mode 100644 index 0000000..b514421 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/services/keyword_plan_keyword_service.pb.go @@ -0,0 +1,592 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/services/keyword_plan_keyword_service.proto + +package services // import "google.golang.org/genproto/googleapis/ads/googleads/v1/services" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "github.com/golang/protobuf/ptypes/wrappers" +import resources "google.golang.org/genproto/googleapis/ads/googleads/v1/resources" +import _ "google.golang.org/genproto/googleapis/api/annotations" +import status "google.golang.org/genproto/googleapis/rpc/status" +import field_mask "google.golang.org/genproto/protobuf/field_mask" + +import ( + context "golang.org/x/net/context" + grpc "google.golang.org/grpc" +) + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Request message for [KeywordPlanKeywordService.GetKeywordPlanKeyword][google.ads.googleads.v1.services.KeywordPlanKeywordService.GetKeywordPlanKeyword]. +type GetKeywordPlanKeywordRequest struct { + // The resource name of the ad group keyword to fetch. + ResourceName string `protobuf:"bytes,1,opt,name=resource_name,json=resourceName,proto3" json:"resource_name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GetKeywordPlanKeywordRequest) Reset() { *m = GetKeywordPlanKeywordRequest{} } +func (m *GetKeywordPlanKeywordRequest) String() string { return proto.CompactTextString(m) } +func (*GetKeywordPlanKeywordRequest) ProtoMessage() {} +func (*GetKeywordPlanKeywordRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_keyword_plan_keyword_service_6de2825a1b31e85e, []int{0} +} +func (m *GetKeywordPlanKeywordRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GetKeywordPlanKeywordRequest.Unmarshal(m, b) +} +func (m *GetKeywordPlanKeywordRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GetKeywordPlanKeywordRequest.Marshal(b, m, deterministic) +} +func (dst *GetKeywordPlanKeywordRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_GetKeywordPlanKeywordRequest.Merge(dst, src) +} +func (m *GetKeywordPlanKeywordRequest) XXX_Size() int { + return xxx_messageInfo_GetKeywordPlanKeywordRequest.Size(m) +} +func (m *GetKeywordPlanKeywordRequest) XXX_DiscardUnknown() { + xxx_messageInfo_GetKeywordPlanKeywordRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_GetKeywordPlanKeywordRequest proto.InternalMessageInfo + +func (m *GetKeywordPlanKeywordRequest) GetResourceName() string { + if m != nil { + return m.ResourceName + } + return "" +} + +// Request message for [KeywordPlanKeywordService.MutateKeywordPlanKeywords][google.ads.googleads.v1.services.KeywordPlanKeywordService.MutateKeywordPlanKeywords]. +type MutateKeywordPlanKeywordsRequest struct { + // The ID of the customer whose Keyword Plan keywords are being modified. + CustomerId string `protobuf:"bytes,1,opt,name=customer_id,json=customerId,proto3" json:"customer_id,omitempty"` + // The list of operations to perform on individual Keyword Plan keywords. + Operations []*KeywordPlanKeywordOperation `protobuf:"bytes,2,rep,name=operations,proto3" json:"operations,omitempty"` + // If true, successful operations will be carried out and invalid + // operations will return errors. If false, all operations will be carried + // out in one transaction if and only if they are all valid. + // Default is false. + PartialFailure bool `protobuf:"varint,3,opt,name=partial_failure,json=partialFailure,proto3" json:"partial_failure,omitempty"` + // If true, the request is validated but not executed. Only errors are + // returned, not results. + ValidateOnly bool `protobuf:"varint,4,opt,name=validate_only,json=validateOnly,proto3" json:"validate_only,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *MutateKeywordPlanKeywordsRequest) Reset() { *m = MutateKeywordPlanKeywordsRequest{} } +func (m *MutateKeywordPlanKeywordsRequest) String() string { return proto.CompactTextString(m) } +func (*MutateKeywordPlanKeywordsRequest) ProtoMessage() {} +func (*MutateKeywordPlanKeywordsRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_keyword_plan_keyword_service_6de2825a1b31e85e, []int{1} +} +func (m *MutateKeywordPlanKeywordsRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_MutateKeywordPlanKeywordsRequest.Unmarshal(m, b) +} +func (m *MutateKeywordPlanKeywordsRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_MutateKeywordPlanKeywordsRequest.Marshal(b, m, deterministic) +} +func (dst *MutateKeywordPlanKeywordsRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_MutateKeywordPlanKeywordsRequest.Merge(dst, src) +} +func (m *MutateKeywordPlanKeywordsRequest) XXX_Size() int { + return xxx_messageInfo_MutateKeywordPlanKeywordsRequest.Size(m) +} +func (m *MutateKeywordPlanKeywordsRequest) XXX_DiscardUnknown() { + xxx_messageInfo_MutateKeywordPlanKeywordsRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_MutateKeywordPlanKeywordsRequest proto.InternalMessageInfo + +func (m *MutateKeywordPlanKeywordsRequest) GetCustomerId() string { + if m != nil { + return m.CustomerId + } + return "" +} + +func (m *MutateKeywordPlanKeywordsRequest) GetOperations() []*KeywordPlanKeywordOperation { + if m != nil { + return m.Operations + } + return nil +} + +func (m *MutateKeywordPlanKeywordsRequest) GetPartialFailure() bool { + if m != nil { + return m.PartialFailure + } + return false +} + +func (m *MutateKeywordPlanKeywordsRequest) GetValidateOnly() bool { + if m != nil { + return m.ValidateOnly + } + return false +} + +// A single operation (create, update, remove) on a Keyword Plan keyword. +type KeywordPlanKeywordOperation struct { + // The FieldMask that determines which resource fields are modified in an + // update. + UpdateMask *field_mask.FieldMask `protobuf:"bytes,4,opt,name=update_mask,json=updateMask,proto3" json:"update_mask,omitempty"` + // The mutate operation. + // + // Types that are valid to be assigned to Operation: + // *KeywordPlanKeywordOperation_Create + // *KeywordPlanKeywordOperation_Update + // *KeywordPlanKeywordOperation_Remove + Operation isKeywordPlanKeywordOperation_Operation `protobuf_oneof:"operation"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *KeywordPlanKeywordOperation) Reset() { *m = KeywordPlanKeywordOperation{} } +func (m *KeywordPlanKeywordOperation) String() string { return proto.CompactTextString(m) } +func (*KeywordPlanKeywordOperation) ProtoMessage() {} +func (*KeywordPlanKeywordOperation) Descriptor() ([]byte, []int) { + return fileDescriptor_keyword_plan_keyword_service_6de2825a1b31e85e, []int{2} +} +func (m *KeywordPlanKeywordOperation) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_KeywordPlanKeywordOperation.Unmarshal(m, b) +} +func (m *KeywordPlanKeywordOperation) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_KeywordPlanKeywordOperation.Marshal(b, m, deterministic) +} +func (dst *KeywordPlanKeywordOperation) XXX_Merge(src proto.Message) { + xxx_messageInfo_KeywordPlanKeywordOperation.Merge(dst, src) +} +func (m *KeywordPlanKeywordOperation) XXX_Size() int { + return xxx_messageInfo_KeywordPlanKeywordOperation.Size(m) +} +func (m *KeywordPlanKeywordOperation) XXX_DiscardUnknown() { + xxx_messageInfo_KeywordPlanKeywordOperation.DiscardUnknown(m) +} + +var xxx_messageInfo_KeywordPlanKeywordOperation proto.InternalMessageInfo + +func (m *KeywordPlanKeywordOperation) GetUpdateMask() *field_mask.FieldMask { + if m != nil { + return m.UpdateMask + } + return nil +} + +type isKeywordPlanKeywordOperation_Operation interface { + isKeywordPlanKeywordOperation_Operation() +} + +type KeywordPlanKeywordOperation_Create struct { + Create *resources.KeywordPlanKeyword `protobuf:"bytes,1,opt,name=create,proto3,oneof"` +} + +type KeywordPlanKeywordOperation_Update struct { + Update *resources.KeywordPlanKeyword `protobuf:"bytes,2,opt,name=update,proto3,oneof"` +} + +type KeywordPlanKeywordOperation_Remove struct { + Remove string `protobuf:"bytes,3,opt,name=remove,proto3,oneof"` +} + +func (*KeywordPlanKeywordOperation_Create) isKeywordPlanKeywordOperation_Operation() {} + +func (*KeywordPlanKeywordOperation_Update) isKeywordPlanKeywordOperation_Operation() {} + +func (*KeywordPlanKeywordOperation_Remove) isKeywordPlanKeywordOperation_Operation() {} + +func (m *KeywordPlanKeywordOperation) GetOperation() isKeywordPlanKeywordOperation_Operation { + if m != nil { + return m.Operation + } + return nil +} + +func (m *KeywordPlanKeywordOperation) GetCreate() *resources.KeywordPlanKeyword { + if x, ok := m.GetOperation().(*KeywordPlanKeywordOperation_Create); ok { + return x.Create + } + return nil +} + +func (m *KeywordPlanKeywordOperation) GetUpdate() *resources.KeywordPlanKeyword { + if x, ok := m.GetOperation().(*KeywordPlanKeywordOperation_Update); ok { + return x.Update + } + return nil +} + +func (m *KeywordPlanKeywordOperation) GetRemove() string { + if x, ok := m.GetOperation().(*KeywordPlanKeywordOperation_Remove); ok { + return x.Remove + } + return "" +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*KeywordPlanKeywordOperation) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _KeywordPlanKeywordOperation_OneofMarshaler, _KeywordPlanKeywordOperation_OneofUnmarshaler, _KeywordPlanKeywordOperation_OneofSizer, []interface{}{ + (*KeywordPlanKeywordOperation_Create)(nil), + (*KeywordPlanKeywordOperation_Update)(nil), + (*KeywordPlanKeywordOperation_Remove)(nil), + } +} + +func _KeywordPlanKeywordOperation_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*KeywordPlanKeywordOperation) + // operation + switch x := m.Operation.(type) { + case *KeywordPlanKeywordOperation_Create: + b.EncodeVarint(1<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.Create); err != nil { + return err + } + case *KeywordPlanKeywordOperation_Update: + b.EncodeVarint(2<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.Update); err != nil { + return err + } + case *KeywordPlanKeywordOperation_Remove: + b.EncodeVarint(3<<3 | proto.WireBytes) + b.EncodeStringBytes(x.Remove) + case nil: + default: + return fmt.Errorf("KeywordPlanKeywordOperation.Operation has unexpected type %T", x) + } + return nil +} + +func _KeywordPlanKeywordOperation_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*KeywordPlanKeywordOperation) + switch tag { + case 1: // operation.create + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(resources.KeywordPlanKeyword) + err := b.DecodeMessage(msg) + m.Operation = &KeywordPlanKeywordOperation_Create{msg} + return true, err + case 2: // operation.update + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(resources.KeywordPlanKeyword) + err := b.DecodeMessage(msg) + m.Operation = &KeywordPlanKeywordOperation_Update{msg} + return true, err + case 3: // operation.remove + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeStringBytes() + m.Operation = &KeywordPlanKeywordOperation_Remove{x} + return true, err + default: + return false, nil + } +} + +func _KeywordPlanKeywordOperation_OneofSizer(msg proto.Message) (n int) { + m := msg.(*KeywordPlanKeywordOperation) + // operation + switch x := m.Operation.(type) { + case *KeywordPlanKeywordOperation_Create: + s := proto.Size(x.Create) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *KeywordPlanKeywordOperation_Update: + s := proto.Size(x.Update) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *KeywordPlanKeywordOperation_Remove: + n += 1 // tag and wire + n += proto.SizeVarint(uint64(len(x.Remove))) + n += len(x.Remove) + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +// Response message for a Keyword Plan keyword mutate. +type MutateKeywordPlanKeywordsResponse struct { + // Errors that pertain to operation failures in the partial failure mode. + // Returned only when partial_failure = true and all errors occur inside the + // operations. If any errors occur outside the operations (e.g. auth errors), + // we return an RPC level error. + PartialFailureError *status.Status `protobuf:"bytes,3,opt,name=partial_failure_error,json=partialFailureError,proto3" json:"partial_failure_error,omitempty"` + // All results for the mutate. + Results []*MutateKeywordPlanKeywordResult `protobuf:"bytes,2,rep,name=results,proto3" json:"results,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *MutateKeywordPlanKeywordsResponse) Reset() { *m = MutateKeywordPlanKeywordsResponse{} } +func (m *MutateKeywordPlanKeywordsResponse) String() string { return proto.CompactTextString(m) } +func (*MutateKeywordPlanKeywordsResponse) ProtoMessage() {} +func (*MutateKeywordPlanKeywordsResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_keyword_plan_keyword_service_6de2825a1b31e85e, []int{3} +} +func (m *MutateKeywordPlanKeywordsResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_MutateKeywordPlanKeywordsResponse.Unmarshal(m, b) +} +func (m *MutateKeywordPlanKeywordsResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_MutateKeywordPlanKeywordsResponse.Marshal(b, m, deterministic) +} +func (dst *MutateKeywordPlanKeywordsResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_MutateKeywordPlanKeywordsResponse.Merge(dst, src) +} +func (m *MutateKeywordPlanKeywordsResponse) XXX_Size() int { + return xxx_messageInfo_MutateKeywordPlanKeywordsResponse.Size(m) +} +func (m *MutateKeywordPlanKeywordsResponse) XXX_DiscardUnknown() { + xxx_messageInfo_MutateKeywordPlanKeywordsResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_MutateKeywordPlanKeywordsResponse proto.InternalMessageInfo + +func (m *MutateKeywordPlanKeywordsResponse) GetPartialFailureError() *status.Status { + if m != nil { + return m.PartialFailureError + } + return nil +} + +func (m *MutateKeywordPlanKeywordsResponse) GetResults() []*MutateKeywordPlanKeywordResult { + if m != nil { + return m.Results + } + return nil +} + +// The result for the Keyword Plan keyword mutate. +type MutateKeywordPlanKeywordResult struct { + // Returned for successful operations. + ResourceName string `protobuf:"bytes,1,opt,name=resource_name,json=resourceName,proto3" json:"resource_name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *MutateKeywordPlanKeywordResult) Reset() { *m = MutateKeywordPlanKeywordResult{} } +func (m *MutateKeywordPlanKeywordResult) String() string { return proto.CompactTextString(m) } +func (*MutateKeywordPlanKeywordResult) ProtoMessage() {} +func (*MutateKeywordPlanKeywordResult) Descriptor() ([]byte, []int) { + return fileDescriptor_keyword_plan_keyword_service_6de2825a1b31e85e, []int{4} +} +func (m *MutateKeywordPlanKeywordResult) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_MutateKeywordPlanKeywordResult.Unmarshal(m, b) +} +func (m *MutateKeywordPlanKeywordResult) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_MutateKeywordPlanKeywordResult.Marshal(b, m, deterministic) +} +func (dst *MutateKeywordPlanKeywordResult) XXX_Merge(src proto.Message) { + xxx_messageInfo_MutateKeywordPlanKeywordResult.Merge(dst, src) +} +func (m *MutateKeywordPlanKeywordResult) XXX_Size() int { + return xxx_messageInfo_MutateKeywordPlanKeywordResult.Size(m) +} +func (m *MutateKeywordPlanKeywordResult) XXX_DiscardUnknown() { + xxx_messageInfo_MutateKeywordPlanKeywordResult.DiscardUnknown(m) +} + +var xxx_messageInfo_MutateKeywordPlanKeywordResult proto.InternalMessageInfo + +func (m *MutateKeywordPlanKeywordResult) GetResourceName() string { + if m != nil { + return m.ResourceName + } + return "" +} + +func init() { + proto.RegisterType((*GetKeywordPlanKeywordRequest)(nil), "google.ads.googleads.v1.services.GetKeywordPlanKeywordRequest") + proto.RegisterType((*MutateKeywordPlanKeywordsRequest)(nil), "google.ads.googleads.v1.services.MutateKeywordPlanKeywordsRequest") + proto.RegisterType((*KeywordPlanKeywordOperation)(nil), "google.ads.googleads.v1.services.KeywordPlanKeywordOperation") + proto.RegisterType((*MutateKeywordPlanKeywordsResponse)(nil), "google.ads.googleads.v1.services.MutateKeywordPlanKeywordsResponse") + proto.RegisterType((*MutateKeywordPlanKeywordResult)(nil), "google.ads.googleads.v1.services.MutateKeywordPlanKeywordResult") +} + +// Reference imports to suppress errors if they are not otherwise used. +var _ context.Context +var _ grpc.ClientConn + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the grpc package it is being compiled against. +const _ = grpc.SupportPackageIsVersion4 + +// KeywordPlanKeywordServiceClient is the client API for KeywordPlanKeywordService service. +// +// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://godoc.org/google.golang.org/grpc#ClientConn.NewStream. +type KeywordPlanKeywordServiceClient interface { + // Returns the requested Keyword Plan keyword in full detail. + GetKeywordPlanKeyword(ctx context.Context, in *GetKeywordPlanKeywordRequest, opts ...grpc.CallOption) (*resources.KeywordPlanKeyword, error) + // Creates, updates, or removes Keyword Plan keywords. Operation statuses are + // returned. + MutateKeywordPlanKeywords(ctx context.Context, in *MutateKeywordPlanKeywordsRequest, opts ...grpc.CallOption) (*MutateKeywordPlanKeywordsResponse, error) +} + +type keywordPlanKeywordServiceClient struct { + cc *grpc.ClientConn +} + +func NewKeywordPlanKeywordServiceClient(cc *grpc.ClientConn) KeywordPlanKeywordServiceClient { + return &keywordPlanKeywordServiceClient{cc} +} + +func (c *keywordPlanKeywordServiceClient) GetKeywordPlanKeyword(ctx context.Context, in *GetKeywordPlanKeywordRequest, opts ...grpc.CallOption) (*resources.KeywordPlanKeyword, error) { + out := new(resources.KeywordPlanKeyword) + err := c.cc.Invoke(ctx, "/google.ads.googleads.v1.services.KeywordPlanKeywordService/GetKeywordPlanKeyword", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *keywordPlanKeywordServiceClient) MutateKeywordPlanKeywords(ctx context.Context, in *MutateKeywordPlanKeywordsRequest, opts ...grpc.CallOption) (*MutateKeywordPlanKeywordsResponse, error) { + out := new(MutateKeywordPlanKeywordsResponse) + err := c.cc.Invoke(ctx, "/google.ads.googleads.v1.services.KeywordPlanKeywordService/MutateKeywordPlanKeywords", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +// KeywordPlanKeywordServiceServer is the server API for KeywordPlanKeywordService service. +type KeywordPlanKeywordServiceServer interface { + // Returns the requested Keyword Plan keyword in full detail. + GetKeywordPlanKeyword(context.Context, *GetKeywordPlanKeywordRequest) (*resources.KeywordPlanKeyword, error) + // Creates, updates, or removes Keyword Plan keywords. Operation statuses are + // returned. + MutateKeywordPlanKeywords(context.Context, *MutateKeywordPlanKeywordsRequest) (*MutateKeywordPlanKeywordsResponse, error) +} + +func RegisterKeywordPlanKeywordServiceServer(s *grpc.Server, srv KeywordPlanKeywordServiceServer) { + s.RegisterService(&_KeywordPlanKeywordService_serviceDesc, srv) +} + +func _KeywordPlanKeywordService_GetKeywordPlanKeyword_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(GetKeywordPlanKeywordRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(KeywordPlanKeywordServiceServer).GetKeywordPlanKeyword(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.ads.googleads.v1.services.KeywordPlanKeywordService/GetKeywordPlanKeyword", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(KeywordPlanKeywordServiceServer).GetKeywordPlanKeyword(ctx, req.(*GetKeywordPlanKeywordRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _KeywordPlanKeywordService_MutateKeywordPlanKeywords_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(MutateKeywordPlanKeywordsRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(KeywordPlanKeywordServiceServer).MutateKeywordPlanKeywords(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.ads.googleads.v1.services.KeywordPlanKeywordService/MutateKeywordPlanKeywords", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(KeywordPlanKeywordServiceServer).MutateKeywordPlanKeywords(ctx, req.(*MutateKeywordPlanKeywordsRequest)) + } + return interceptor(ctx, in, info, handler) +} + +var _KeywordPlanKeywordService_serviceDesc = grpc.ServiceDesc{ + ServiceName: "google.ads.googleads.v1.services.KeywordPlanKeywordService", + HandlerType: (*KeywordPlanKeywordServiceServer)(nil), + Methods: []grpc.MethodDesc{ + { + MethodName: "GetKeywordPlanKeyword", + Handler: _KeywordPlanKeywordService_GetKeywordPlanKeyword_Handler, + }, + { + MethodName: "MutateKeywordPlanKeywords", + Handler: _KeywordPlanKeywordService_MutateKeywordPlanKeywords_Handler, + }, + }, + Streams: []grpc.StreamDesc{}, + Metadata: "google/ads/googleads/v1/services/keyword_plan_keyword_service.proto", +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/services/keyword_plan_keyword_service.proto", fileDescriptor_keyword_plan_keyword_service_6de2825a1b31e85e) +} + +var fileDescriptor_keyword_plan_keyword_service_6de2825a1b31e85e = []byte{ + // 721 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xa4, 0x95, 0xdd, 0x6a, 0xd4, 0x4e, + 0x14, 0xc0, 0xff, 0xc9, 0xfe, 0xa9, 0x76, 0x52, 0x15, 0x46, 0x8a, 0xdb, 0xb5, 0xd4, 0x35, 0x16, + 0x2c, 0x7b, 0x91, 0xb0, 0x2b, 0x45, 0x49, 0x5d, 0x71, 0x77, 0xe9, 0x87, 0x48, 0x6d, 0x49, 0xa1, + 0x17, 0x65, 0x25, 0x4c, 0x37, 0xd3, 0x25, 0x34, 0xc9, 0xc4, 0x99, 0xc9, 0x96, 0x52, 0x7a, 0x23, + 0xf8, 0x04, 0xbe, 0x81, 0xde, 0xf9, 0x22, 0x82, 0xe0, 0x95, 0x17, 0xbe, 0x80, 0x37, 0x7a, 0xe5, + 0x23, 0x48, 0x32, 0x33, 0x6b, 0xbf, 0xb2, 0x2b, 0xed, 0xdd, 0x99, 0x33, 0x27, 0xbf, 0xf3, 0x39, + 0x27, 0xa0, 0xd3, 0x27, 0xa4, 0x1f, 0x62, 0x1b, 0xf9, 0xcc, 0x16, 0x62, 0x26, 0x0d, 0xea, 0x36, + 0xc3, 0x74, 0x10, 0xf4, 0x30, 0xb3, 0xf7, 0xf1, 0xe1, 0x01, 0xa1, 0xbe, 0x97, 0x84, 0x28, 0xf6, + 0xd4, 0x41, 0xde, 0x5a, 0x09, 0x25, 0x9c, 0xc0, 0xaa, 0xf8, 0xd2, 0x42, 0x3e, 0xb3, 0x86, 0x10, + 0x6b, 0x50, 0xb7, 0x14, 0xa4, 0xf2, 0xb4, 0xc8, 0x0d, 0xc5, 0x8c, 0xa4, 0xb4, 0xc8, 0x8f, 0xe0, + 0x57, 0x66, 0xd5, 0xd7, 0x49, 0x60, 0xa3, 0x38, 0x26, 0x1c, 0xf1, 0x80, 0xc4, 0x4c, 0xde, 0x4a, + 0xef, 0x76, 0x7e, 0xda, 0x4d, 0xf7, 0xec, 0xbd, 0x00, 0x87, 0xbe, 0x17, 0x21, 0xb6, 0x2f, 0x2d, + 0xe6, 0xce, 0x5a, 0x1c, 0x50, 0x94, 0x24, 0x98, 0x2a, 0xc2, 0x1d, 0x79, 0x4f, 0x93, 0x9e, 0xcd, + 0x38, 0xe2, 0xa9, 0xbc, 0x30, 0x3b, 0x60, 0x76, 0x15, 0xf3, 0x97, 0x22, 0x98, 0xcd, 0x10, 0xc5, + 0x52, 0x74, 0xf1, 0x9b, 0x14, 0x33, 0x0e, 0x1f, 0x80, 0x1b, 0x2a, 0x01, 0x2f, 0x46, 0x11, 0x2e, + 0x6b, 0x55, 0x6d, 0x61, 0xd2, 0x9d, 0x52, 0xca, 0x57, 0x28, 0xc2, 0xe6, 0x6f, 0x0d, 0x54, 0xd7, + 0x53, 0x8e, 0x38, 0x3e, 0x0f, 0x62, 0x8a, 0x74, 0x0f, 0x18, 0xbd, 0x94, 0x71, 0x12, 0x61, 0xea, + 0x05, 0xbe, 0xe4, 0x00, 0xa5, 0x7a, 0xe1, 0xc3, 0xd7, 0x00, 0x90, 0x04, 0x53, 0x91, 0x79, 0x59, + 0xaf, 0x96, 0x16, 0x8c, 0x46, 0xd3, 0x1a, 0x57, 0x78, 0xeb, 0xbc, 0xcb, 0x0d, 0x45, 0x71, 0x4f, + 0x00, 0xe1, 0x43, 0x70, 0x2b, 0x41, 0x94, 0x07, 0x28, 0xf4, 0xf6, 0x50, 0x10, 0xa6, 0x14, 0x97, + 0x4b, 0x55, 0x6d, 0xe1, 0xba, 0x7b, 0x53, 0xaa, 0x57, 0x84, 0x36, 0x4b, 0x79, 0x80, 0xc2, 0xc0, + 0x47, 0x1c, 0x7b, 0x24, 0x0e, 0x0f, 0xcb, 0xff, 0xe7, 0x66, 0x53, 0x4a, 0xb9, 0x11, 0x87, 0x87, + 0xe6, 0x47, 0x1d, 0xdc, 0x1d, 0xe1, 0x19, 0x2e, 0x01, 0x23, 0x4d, 0x72, 0x44, 0xd6, 0xa5, 0x1c, + 0x61, 0x34, 0x2a, 0x2a, 0x1b, 0xd5, 0x26, 0x6b, 0x25, 0x6b, 0xe4, 0x3a, 0x62, 0xfb, 0x2e, 0x10, + 0xe6, 0x99, 0x0c, 0x37, 0xc0, 0x44, 0x8f, 0x62, 0xc4, 0x45, 0xb5, 0x8d, 0xc6, 0x62, 0x61, 0x15, + 0x86, 0xc3, 0x75, 0x41, 0x19, 0xd6, 0xfe, 0x73, 0x25, 0x26, 0x03, 0x0a, 0x7c, 0x59, 0xbf, 0x22, + 0x50, 0x60, 0x60, 0x19, 0x4c, 0x50, 0x1c, 0x91, 0x81, 0xa8, 0xe1, 0x64, 0x76, 0x23, 0xce, 0x6d, + 0x03, 0x4c, 0x0e, 0x8b, 0x6e, 0x7e, 0xd6, 0xc0, 0xfd, 0x11, 0x83, 0xc1, 0x12, 0x12, 0x33, 0x0c, + 0x57, 0xc0, 0xf4, 0x99, 0xce, 0x78, 0x98, 0x52, 0x42, 0x73, 0xb6, 0xd1, 0x80, 0x2a, 0x58, 0x9a, + 0xf4, 0xac, 0xad, 0x7c, 0x78, 0xdd, 0xdb, 0xa7, 0x7b, 0xb6, 0x9c, 0x99, 0xc3, 0x1d, 0x70, 0x8d, + 0x62, 0x96, 0x86, 0x5c, 0x4d, 0xcf, 0xf3, 0xf1, 0xd3, 0x53, 0x14, 0x9d, 0x9b, 0x83, 0x5c, 0x05, + 0x34, 0x97, 0xc1, 0xdc, 0x68, 0xd3, 0x7f, 0x7a, 0x29, 0x8d, 0xef, 0x25, 0x30, 0x73, 0x9e, 0xb0, + 0x25, 0xa2, 0x81, 0x5f, 0x35, 0x30, 0x7d, 0xe1, 0x6b, 0x84, 0xcf, 0xc6, 0x67, 0x32, 0xea, 0x19, + 0x57, 0x2e, 0xd7, 0x70, 0xb3, 0xf9, 0xf6, 0xdb, 0x8f, 0xf7, 0xfa, 0x63, 0xb8, 0x98, 0x2d, 0xb2, + 0xa3, 0x53, 0xe9, 0x35, 0xd5, 0xcb, 0x65, 0x76, 0x4d, 0x6d, 0xb6, 0x93, 0xdd, 0xb5, 0x6b, 0xc7, + 0xf0, 0xa7, 0x06, 0x66, 0x0a, 0xdb, 0x0f, 0xdb, 0x97, 0xef, 0x8e, 0x5a, 0x2a, 0x95, 0xce, 0x95, + 0x18, 0x62, 0xfe, 0xcc, 0x4e, 0x9e, 0x65, 0xd3, 0x7c, 0x92, 0x65, 0xf9, 0x37, 0xad, 0xa3, 0x13, + 0xeb, 0xaa, 0x59, 0x3b, 0xbe, 0x28, 0x49, 0x27, 0xca, 0xe1, 0x8e, 0x56, 0x6b, 0xbf, 0xd3, 0xc1, + 0x7c, 0x8f, 0x44, 0x63, 0xe3, 0x69, 0xcf, 0x15, 0xf6, 0x7f, 0x33, 0xdb, 0x0a, 0x9b, 0xda, 0xce, + 0x9a, 0x64, 0xf4, 0x49, 0x88, 0xe2, 0xbe, 0x45, 0x68, 0xdf, 0xee, 0xe3, 0x38, 0xdf, 0x19, 0xea, + 0xd7, 0x92, 0x04, 0xac, 0xf8, 0x87, 0xb6, 0xa4, 0x84, 0x0f, 0x7a, 0x69, 0xb5, 0xd5, 0xfa, 0xa4, + 0x57, 0x57, 0x05, 0xb0, 0xe5, 0x33, 0x4b, 0x88, 0x99, 0xb4, 0x5d, 0xb7, 0xa4, 0x63, 0xf6, 0x45, + 0x99, 0x74, 0x5b, 0x3e, 0xeb, 0x0e, 0x4d, 0xba, 0xdb, 0xf5, 0xae, 0x32, 0xf9, 0xa5, 0xcf, 0x0b, + 0xbd, 0xe3, 0xb4, 0x7c, 0xe6, 0x38, 0x43, 0x23, 0xc7, 0xd9, 0xae, 0x3b, 0x8e, 0x32, 0xdb, 0x9d, + 0xc8, 0xe3, 0x7c, 0xf4, 0x27, 0x00, 0x00, 0xff, 0xff, 0x26, 0x9e, 0xc6, 0xcd, 0x77, 0x07, 0x00, + 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/services/keyword_plan_negative_keyword_service.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/services/keyword_plan_negative_keyword_service.pb.go new file mode 100644 index 0000000..f4c97f5 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/services/keyword_plan_negative_keyword_service.pb.go @@ -0,0 +1,602 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/services/keyword_plan_negative_keyword_service.proto + +package services // import "google.golang.org/genproto/googleapis/ads/googleads/v1/services" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "github.com/golang/protobuf/ptypes/wrappers" +import resources "google.golang.org/genproto/googleapis/ads/googleads/v1/resources" +import _ "google.golang.org/genproto/googleapis/api/annotations" +import status "google.golang.org/genproto/googleapis/rpc/status" +import field_mask "google.golang.org/genproto/protobuf/field_mask" + +import ( + context "golang.org/x/net/context" + grpc "google.golang.org/grpc" +) + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Request message for +// [KeywordPlanNegativeKeywordService.GetKeywordPlanNegativeKeyword][google.ads.googleads.v1.services.KeywordPlanNegativeKeywordService.GetKeywordPlanNegativeKeyword]. +type GetKeywordPlanNegativeKeywordRequest struct { + // The resource name of the plan to fetch. + ResourceName string `protobuf:"bytes,1,opt,name=resource_name,json=resourceName,proto3" json:"resource_name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GetKeywordPlanNegativeKeywordRequest) Reset() { *m = GetKeywordPlanNegativeKeywordRequest{} } +func (m *GetKeywordPlanNegativeKeywordRequest) String() string { return proto.CompactTextString(m) } +func (*GetKeywordPlanNegativeKeywordRequest) ProtoMessage() {} +func (*GetKeywordPlanNegativeKeywordRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_keyword_plan_negative_keyword_service_aebb6835f57b72b0, []int{0} +} +func (m *GetKeywordPlanNegativeKeywordRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GetKeywordPlanNegativeKeywordRequest.Unmarshal(m, b) +} +func (m *GetKeywordPlanNegativeKeywordRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GetKeywordPlanNegativeKeywordRequest.Marshal(b, m, deterministic) +} +func (dst *GetKeywordPlanNegativeKeywordRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_GetKeywordPlanNegativeKeywordRequest.Merge(dst, src) +} +func (m *GetKeywordPlanNegativeKeywordRequest) XXX_Size() int { + return xxx_messageInfo_GetKeywordPlanNegativeKeywordRequest.Size(m) +} +func (m *GetKeywordPlanNegativeKeywordRequest) XXX_DiscardUnknown() { + xxx_messageInfo_GetKeywordPlanNegativeKeywordRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_GetKeywordPlanNegativeKeywordRequest proto.InternalMessageInfo + +func (m *GetKeywordPlanNegativeKeywordRequest) GetResourceName() string { + if m != nil { + return m.ResourceName + } + return "" +} + +// Request message for +// [KeywordPlanNegativeKeywordService.MutateKeywordPlanNegativeKeywords][google.ads.googleads.v1.services.KeywordPlanNegativeKeywordService.MutateKeywordPlanNegativeKeywords]. +type MutateKeywordPlanNegativeKeywordsRequest struct { + // The ID of the customer whose negative keywords are being modified. + CustomerId string `protobuf:"bytes,1,opt,name=customer_id,json=customerId,proto3" json:"customer_id,omitempty"` + // The list of operations to perform on individual Keyword Plan negative + // keywords. + Operations []*KeywordPlanNegativeKeywordOperation `protobuf:"bytes,2,rep,name=operations,proto3" json:"operations,omitempty"` + // If true, successful operations will be carried out and invalid + // operations will return errors. If false, all operations will be carried + // out in one transaction if and only if they are all valid. + // Default is false. + PartialFailure bool `protobuf:"varint,3,opt,name=partial_failure,json=partialFailure,proto3" json:"partial_failure,omitempty"` + // If true, the request is validated but not executed. Only errors are + // returned, not results. + ValidateOnly bool `protobuf:"varint,4,opt,name=validate_only,json=validateOnly,proto3" json:"validate_only,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *MutateKeywordPlanNegativeKeywordsRequest) Reset() { + *m = MutateKeywordPlanNegativeKeywordsRequest{} +} +func (m *MutateKeywordPlanNegativeKeywordsRequest) String() string { return proto.CompactTextString(m) } +func (*MutateKeywordPlanNegativeKeywordsRequest) ProtoMessage() {} +func (*MutateKeywordPlanNegativeKeywordsRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_keyword_plan_negative_keyword_service_aebb6835f57b72b0, []int{1} +} +func (m *MutateKeywordPlanNegativeKeywordsRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_MutateKeywordPlanNegativeKeywordsRequest.Unmarshal(m, b) +} +func (m *MutateKeywordPlanNegativeKeywordsRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_MutateKeywordPlanNegativeKeywordsRequest.Marshal(b, m, deterministic) +} +func (dst *MutateKeywordPlanNegativeKeywordsRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_MutateKeywordPlanNegativeKeywordsRequest.Merge(dst, src) +} +func (m *MutateKeywordPlanNegativeKeywordsRequest) XXX_Size() int { + return xxx_messageInfo_MutateKeywordPlanNegativeKeywordsRequest.Size(m) +} +func (m *MutateKeywordPlanNegativeKeywordsRequest) XXX_DiscardUnknown() { + xxx_messageInfo_MutateKeywordPlanNegativeKeywordsRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_MutateKeywordPlanNegativeKeywordsRequest proto.InternalMessageInfo + +func (m *MutateKeywordPlanNegativeKeywordsRequest) GetCustomerId() string { + if m != nil { + return m.CustomerId + } + return "" +} + +func (m *MutateKeywordPlanNegativeKeywordsRequest) GetOperations() []*KeywordPlanNegativeKeywordOperation { + if m != nil { + return m.Operations + } + return nil +} + +func (m *MutateKeywordPlanNegativeKeywordsRequest) GetPartialFailure() bool { + if m != nil { + return m.PartialFailure + } + return false +} + +func (m *MutateKeywordPlanNegativeKeywordsRequest) GetValidateOnly() bool { + if m != nil { + return m.ValidateOnly + } + return false +} + +// A single operation (create, update, remove) on a Keyword Plan negative +// keyword. +type KeywordPlanNegativeKeywordOperation struct { + // The FieldMask that determines which resource fields are modified in an + // update. + UpdateMask *field_mask.FieldMask `protobuf:"bytes,4,opt,name=update_mask,json=updateMask,proto3" json:"update_mask,omitempty"` + // The mutate operation. + // + // Types that are valid to be assigned to Operation: + // *KeywordPlanNegativeKeywordOperation_Create + // *KeywordPlanNegativeKeywordOperation_Update + // *KeywordPlanNegativeKeywordOperation_Remove + Operation isKeywordPlanNegativeKeywordOperation_Operation `protobuf_oneof:"operation"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *KeywordPlanNegativeKeywordOperation) Reset() { *m = KeywordPlanNegativeKeywordOperation{} } +func (m *KeywordPlanNegativeKeywordOperation) String() string { return proto.CompactTextString(m) } +func (*KeywordPlanNegativeKeywordOperation) ProtoMessage() {} +func (*KeywordPlanNegativeKeywordOperation) Descriptor() ([]byte, []int) { + return fileDescriptor_keyword_plan_negative_keyword_service_aebb6835f57b72b0, []int{2} +} +func (m *KeywordPlanNegativeKeywordOperation) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_KeywordPlanNegativeKeywordOperation.Unmarshal(m, b) +} +func (m *KeywordPlanNegativeKeywordOperation) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_KeywordPlanNegativeKeywordOperation.Marshal(b, m, deterministic) +} +func (dst *KeywordPlanNegativeKeywordOperation) XXX_Merge(src proto.Message) { + xxx_messageInfo_KeywordPlanNegativeKeywordOperation.Merge(dst, src) +} +func (m *KeywordPlanNegativeKeywordOperation) XXX_Size() int { + return xxx_messageInfo_KeywordPlanNegativeKeywordOperation.Size(m) +} +func (m *KeywordPlanNegativeKeywordOperation) XXX_DiscardUnknown() { + xxx_messageInfo_KeywordPlanNegativeKeywordOperation.DiscardUnknown(m) +} + +var xxx_messageInfo_KeywordPlanNegativeKeywordOperation proto.InternalMessageInfo + +func (m *KeywordPlanNegativeKeywordOperation) GetUpdateMask() *field_mask.FieldMask { + if m != nil { + return m.UpdateMask + } + return nil +} + +type isKeywordPlanNegativeKeywordOperation_Operation interface { + isKeywordPlanNegativeKeywordOperation_Operation() +} + +type KeywordPlanNegativeKeywordOperation_Create struct { + Create *resources.KeywordPlanNegativeKeyword `protobuf:"bytes,1,opt,name=create,proto3,oneof"` +} + +type KeywordPlanNegativeKeywordOperation_Update struct { + Update *resources.KeywordPlanNegativeKeyword `protobuf:"bytes,2,opt,name=update,proto3,oneof"` +} + +type KeywordPlanNegativeKeywordOperation_Remove struct { + Remove string `protobuf:"bytes,3,opt,name=remove,proto3,oneof"` +} + +func (*KeywordPlanNegativeKeywordOperation_Create) isKeywordPlanNegativeKeywordOperation_Operation() {} + +func (*KeywordPlanNegativeKeywordOperation_Update) isKeywordPlanNegativeKeywordOperation_Operation() {} + +func (*KeywordPlanNegativeKeywordOperation_Remove) isKeywordPlanNegativeKeywordOperation_Operation() {} + +func (m *KeywordPlanNegativeKeywordOperation) GetOperation() isKeywordPlanNegativeKeywordOperation_Operation { + if m != nil { + return m.Operation + } + return nil +} + +func (m *KeywordPlanNegativeKeywordOperation) GetCreate() *resources.KeywordPlanNegativeKeyword { + if x, ok := m.GetOperation().(*KeywordPlanNegativeKeywordOperation_Create); ok { + return x.Create + } + return nil +} + +func (m *KeywordPlanNegativeKeywordOperation) GetUpdate() *resources.KeywordPlanNegativeKeyword { + if x, ok := m.GetOperation().(*KeywordPlanNegativeKeywordOperation_Update); ok { + return x.Update + } + return nil +} + +func (m *KeywordPlanNegativeKeywordOperation) GetRemove() string { + if x, ok := m.GetOperation().(*KeywordPlanNegativeKeywordOperation_Remove); ok { + return x.Remove + } + return "" +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*KeywordPlanNegativeKeywordOperation) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _KeywordPlanNegativeKeywordOperation_OneofMarshaler, _KeywordPlanNegativeKeywordOperation_OneofUnmarshaler, _KeywordPlanNegativeKeywordOperation_OneofSizer, []interface{}{ + (*KeywordPlanNegativeKeywordOperation_Create)(nil), + (*KeywordPlanNegativeKeywordOperation_Update)(nil), + (*KeywordPlanNegativeKeywordOperation_Remove)(nil), + } +} + +func _KeywordPlanNegativeKeywordOperation_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*KeywordPlanNegativeKeywordOperation) + // operation + switch x := m.Operation.(type) { + case *KeywordPlanNegativeKeywordOperation_Create: + b.EncodeVarint(1<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.Create); err != nil { + return err + } + case *KeywordPlanNegativeKeywordOperation_Update: + b.EncodeVarint(2<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.Update); err != nil { + return err + } + case *KeywordPlanNegativeKeywordOperation_Remove: + b.EncodeVarint(3<<3 | proto.WireBytes) + b.EncodeStringBytes(x.Remove) + case nil: + default: + return fmt.Errorf("KeywordPlanNegativeKeywordOperation.Operation has unexpected type %T", x) + } + return nil +} + +func _KeywordPlanNegativeKeywordOperation_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*KeywordPlanNegativeKeywordOperation) + switch tag { + case 1: // operation.create + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(resources.KeywordPlanNegativeKeyword) + err := b.DecodeMessage(msg) + m.Operation = &KeywordPlanNegativeKeywordOperation_Create{msg} + return true, err + case 2: // operation.update + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(resources.KeywordPlanNegativeKeyword) + err := b.DecodeMessage(msg) + m.Operation = &KeywordPlanNegativeKeywordOperation_Update{msg} + return true, err + case 3: // operation.remove + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeStringBytes() + m.Operation = &KeywordPlanNegativeKeywordOperation_Remove{x} + return true, err + default: + return false, nil + } +} + +func _KeywordPlanNegativeKeywordOperation_OneofSizer(msg proto.Message) (n int) { + m := msg.(*KeywordPlanNegativeKeywordOperation) + // operation + switch x := m.Operation.(type) { + case *KeywordPlanNegativeKeywordOperation_Create: + s := proto.Size(x.Create) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *KeywordPlanNegativeKeywordOperation_Update: + s := proto.Size(x.Update) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *KeywordPlanNegativeKeywordOperation_Remove: + n += 1 // tag and wire + n += proto.SizeVarint(uint64(len(x.Remove))) + n += len(x.Remove) + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +// Response message for a Keyword Plan negative keyword mutate. +type MutateKeywordPlanNegativeKeywordsResponse struct { + // Errors that pertain to operation failures in the partial failure mode. + // Returned only when partial_failure = true and all errors occur inside the + // operations. If any errors occur outside the operations (e.g. auth errors), + // we return an RPC level error. + PartialFailureError *status.Status `protobuf:"bytes,3,opt,name=partial_failure_error,json=partialFailureError,proto3" json:"partial_failure_error,omitempty"` + // All results for the mutate. + Results []*MutateKeywordPlanNegativeKeywordResult `protobuf:"bytes,2,rep,name=results,proto3" json:"results,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *MutateKeywordPlanNegativeKeywordsResponse) Reset() { + *m = MutateKeywordPlanNegativeKeywordsResponse{} +} +func (m *MutateKeywordPlanNegativeKeywordsResponse) String() string { return proto.CompactTextString(m) } +func (*MutateKeywordPlanNegativeKeywordsResponse) ProtoMessage() {} +func (*MutateKeywordPlanNegativeKeywordsResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_keyword_plan_negative_keyword_service_aebb6835f57b72b0, []int{3} +} +func (m *MutateKeywordPlanNegativeKeywordsResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_MutateKeywordPlanNegativeKeywordsResponse.Unmarshal(m, b) +} +func (m *MutateKeywordPlanNegativeKeywordsResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_MutateKeywordPlanNegativeKeywordsResponse.Marshal(b, m, deterministic) +} +func (dst *MutateKeywordPlanNegativeKeywordsResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_MutateKeywordPlanNegativeKeywordsResponse.Merge(dst, src) +} +func (m *MutateKeywordPlanNegativeKeywordsResponse) XXX_Size() int { + return xxx_messageInfo_MutateKeywordPlanNegativeKeywordsResponse.Size(m) +} +func (m *MutateKeywordPlanNegativeKeywordsResponse) XXX_DiscardUnknown() { + xxx_messageInfo_MutateKeywordPlanNegativeKeywordsResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_MutateKeywordPlanNegativeKeywordsResponse proto.InternalMessageInfo + +func (m *MutateKeywordPlanNegativeKeywordsResponse) GetPartialFailureError() *status.Status { + if m != nil { + return m.PartialFailureError + } + return nil +} + +func (m *MutateKeywordPlanNegativeKeywordsResponse) GetResults() []*MutateKeywordPlanNegativeKeywordResult { + if m != nil { + return m.Results + } + return nil +} + +// The result for the Keyword Plan negative keyword mutate. +type MutateKeywordPlanNegativeKeywordResult struct { + // Returned for successful operations. + ResourceName string `protobuf:"bytes,1,opt,name=resource_name,json=resourceName,proto3" json:"resource_name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *MutateKeywordPlanNegativeKeywordResult) Reset() { + *m = MutateKeywordPlanNegativeKeywordResult{} +} +func (m *MutateKeywordPlanNegativeKeywordResult) String() string { return proto.CompactTextString(m) } +func (*MutateKeywordPlanNegativeKeywordResult) ProtoMessage() {} +func (*MutateKeywordPlanNegativeKeywordResult) Descriptor() ([]byte, []int) { + return fileDescriptor_keyword_plan_negative_keyword_service_aebb6835f57b72b0, []int{4} +} +func (m *MutateKeywordPlanNegativeKeywordResult) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_MutateKeywordPlanNegativeKeywordResult.Unmarshal(m, b) +} +func (m *MutateKeywordPlanNegativeKeywordResult) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_MutateKeywordPlanNegativeKeywordResult.Marshal(b, m, deterministic) +} +func (dst *MutateKeywordPlanNegativeKeywordResult) XXX_Merge(src proto.Message) { + xxx_messageInfo_MutateKeywordPlanNegativeKeywordResult.Merge(dst, src) +} +func (m *MutateKeywordPlanNegativeKeywordResult) XXX_Size() int { + return xxx_messageInfo_MutateKeywordPlanNegativeKeywordResult.Size(m) +} +func (m *MutateKeywordPlanNegativeKeywordResult) XXX_DiscardUnknown() { + xxx_messageInfo_MutateKeywordPlanNegativeKeywordResult.DiscardUnknown(m) +} + +var xxx_messageInfo_MutateKeywordPlanNegativeKeywordResult proto.InternalMessageInfo + +func (m *MutateKeywordPlanNegativeKeywordResult) GetResourceName() string { + if m != nil { + return m.ResourceName + } + return "" +} + +func init() { + proto.RegisterType((*GetKeywordPlanNegativeKeywordRequest)(nil), "google.ads.googleads.v1.services.GetKeywordPlanNegativeKeywordRequest") + proto.RegisterType((*MutateKeywordPlanNegativeKeywordsRequest)(nil), "google.ads.googleads.v1.services.MutateKeywordPlanNegativeKeywordsRequest") + proto.RegisterType((*KeywordPlanNegativeKeywordOperation)(nil), "google.ads.googleads.v1.services.KeywordPlanNegativeKeywordOperation") + proto.RegisterType((*MutateKeywordPlanNegativeKeywordsResponse)(nil), "google.ads.googleads.v1.services.MutateKeywordPlanNegativeKeywordsResponse") + proto.RegisterType((*MutateKeywordPlanNegativeKeywordResult)(nil), "google.ads.googleads.v1.services.MutateKeywordPlanNegativeKeywordResult") +} + +// Reference imports to suppress errors if they are not otherwise used. +var _ context.Context +var _ grpc.ClientConn + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the grpc package it is being compiled against. +const _ = grpc.SupportPackageIsVersion4 + +// KeywordPlanNegativeKeywordServiceClient is the client API for KeywordPlanNegativeKeywordService service. +// +// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://godoc.org/google.golang.org/grpc#ClientConn.NewStream. +type KeywordPlanNegativeKeywordServiceClient interface { + // Returns the requested plan in full detail. + GetKeywordPlanNegativeKeyword(ctx context.Context, in *GetKeywordPlanNegativeKeywordRequest, opts ...grpc.CallOption) (*resources.KeywordPlanNegativeKeyword, error) + // Creates, updates, or removes Keyword Plan negative keywords. Operation + // statuses are returned. + MutateKeywordPlanNegativeKeywords(ctx context.Context, in *MutateKeywordPlanNegativeKeywordsRequest, opts ...grpc.CallOption) (*MutateKeywordPlanNegativeKeywordsResponse, error) +} + +type keywordPlanNegativeKeywordServiceClient struct { + cc *grpc.ClientConn +} + +func NewKeywordPlanNegativeKeywordServiceClient(cc *grpc.ClientConn) KeywordPlanNegativeKeywordServiceClient { + return &keywordPlanNegativeKeywordServiceClient{cc} +} + +func (c *keywordPlanNegativeKeywordServiceClient) GetKeywordPlanNegativeKeyword(ctx context.Context, in *GetKeywordPlanNegativeKeywordRequest, opts ...grpc.CallOption) (*resources.KeywordPlanNegativeKeyword, error) { + out := new(resources.KeywordPlanNegativeKeyword) + err := c.cc.Invoke(ctx, "/google.ads.googleads.v1.services.KeywordPlanNegativeKeywordService/GetKeywordPlanNegativeKeyword", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *keywordPlanNegativeKeywordServiceClient) MutateKeywordPlanNegativeKeywords(ctx context.Context, in *MutateKeywordPlanNegativeKeywordsRequest, opts ...grpc.CallOption) (*MutateKeywordPlanNegativeKeywordsResponse, error) { + out := new(MutateKeywordPlanNegativeKeywordsResponse) + err := c.cc.Invoke(ctx, "/google.ads.googleads.v1.services.KeywordPlanNegativeKeywordService/MutateKeywordPlanNegativeKeywords", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +// KeywordPlanNegativeKeywordServiceServer is the server API for KeywordPlanNegativeKeywordService service. +type KeywordPlanNegativeKeywordServiceServer interface { + // Returns the requested plan in full detail. + GetKeywordPlanNegativeKeyword(context.Context, *GetKeywordPlanNegativeKeywordRequest) (*resources.KeywordPlanNegativeKeyword, error) + // Creates, updates, or removes Keyword Plan negative keywords. Operation + // statuses are returned. + MutateKeywordPlanNegativeKeywords(context.Context, *MutateKeywordPlanNegativeKeywordsRequest) (*MutateKeywordPlanNegativeKeywordsResponse, error) +} + +func RegisterKeywordPlanNegativeKeywordServiceServer(s *grpc.Server, srv KeywordPlanNegativeKeywordServiceServer) { + s.RegisterService(&_KeywordPlanNegativeKeywordService_serviceDesc, srv) +} + +func _KeywordPlanNegativeKeywordService_GetKeywordPlanNegativeKeyword_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(GetKeywordPlanNegativeKeywordRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(KeywordPlanNegativeKeywordServiceServer).GetKeywordPlanNegativeKeyword(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.ads.googleads.v1.services.KeywordPlanNegativeKeywordService/GetKeywordPlanNegativeKeyword", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(KeywordPlanNegativeKeywordServiceServer).GetKeywordPlanNegativeKeyword(ctx, req.(*GetKeywordPlanNegativeKeywordRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _KeywordPlanNegativeKeywordService_MutateKeywordPlanNegativeKeywords_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(MutateKeywordPlanNegativeKeywordsRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(KeywordPlanNegativeKeywordServiceServer).MutateKeywordPlanNegativeKeywords(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.ads.googleads.v1.services.KeywordPlanNegativeKeywordService/MutateKeywordPlanNegativeKeywords", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(KeywordPlanNegativeKeywordServiceServer).MutateKeywordPlanNegativeKeywords(ctx, req.(*MutateKeywordPlanNegativeKeywordsRequest)) + } + return interceptor(ctx, in, info, handler) +} + +var _KeywordPlanNegativeKeywordService_serviceDesc = grpc.ServiceDesc{ + ServiceName: "google.ads.googleads.v1.services.KeywordPlanNegativeKeywordService", + HandlerType: (*KeywordPlanNegativeKeywordServiceServer)(nil), + Methods: []grpc.MethodDesc{ + { + MethodName: "GetKeywordPlanNegativeKeyword", + Handler: _KeywordPlanNegativeKeywordService_GetKeywordPlanNegativeKeyword_Handler, + }, + { + MethodName: "MutateKeywordPlanNegativeKeywords", + Handler: _KeywordPlanNegativeKeywordService_MutateKeywordPlanNegativeKeywords_Handler, + }, + }, + Streams: []grpc.StreamDesc{}, + Metadata: "google/ads/googleads/v1/services/keyword_plan_negative_keyword_service.proto", +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/services/keyword_plan_negative_keyword_service.proto", fileDescriptor_keyword_plan_negative_keyword_service_aebb6835f57b72b0) +} + +var fileDescriptor_keyword_plan_negative_keyword_service_aebb6835f57b72b0 = []byte{ + // 736 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xac, 0x95, 0xcd, 0x6a, 0xdb, 0x4a, + 0x14, 0xc7, 0xaf, 0x94, 0x90, 0x7b, 0x33, 0xca, 0x6d, 0x61, 0x4a, 0xa9, 0x31, 0xfd, 0x70, 0x94, + 0x90, 0xba, 0x5e, 0x48, 0xd8, 0xdd, 0x29, 0x98, 0xd6, 0x01, 0x3b, 0x69, 0xdd, 0x7c, 0xa0, 0x40, + 0x0a, 0xc5, 0x20, 0x26, 0xd6, 0x44, 0x88, 0x48, 0x1a, 0x75, 0x66, 0xe4, 0x10, 0x42, 0xa0, 0x74, + 0xd7, 0x45, 0x57, 0x7d, 0x80, 0x42, 0x97, 0x7d, 0x85, 0xbe, 0x41, 0xb7, 0x5d, 0xf4, 0x05, 0xba, + 0x2a, 0xf4, 0x1d, 0x8a, 0x34, 0x33, 0x6e, 0x12, 0xea, 0xc8, 0x90, 0xec, 0x8e, 0xce, 0xfc, 0xf5, + 0x3b, 0x67, 0xce, 0x39, 0x3a, 0x02, 0x2f, 0x02, 0x42, 0x82, 0x08, 0xdb, 0xc8, 0x67, 0xb6, 0x30, + 0x73, 0x6b, 0xd4, 0xb4, 0x19, 0xa6, 0xa3, 0x70, 0x88, 0x99, 0x7d, 0x88, 0x8f, 0x8f, 0x08, 0xf5, + 0xbd, 0x34, 0x42, 0x89, 0x97, 0xe0, 0x00, 0xf1, 0x70, 0x84, 0x3d, 0xe5, 0x95, 0x32, 0x2b, 0xa5, + 0x84, 0x13, 0x58, 0x13, 0x08, 0x0b, 0xf9, 0xcc, 0x1a, 0xd3, 0xac, 0x51, 0xd3, 0x52, 0xb4, 0x6a, + 0x77, 0x52, 0x3c, 0x8a, 0x19, 0xc9, 0x68, 0x69, 0x40, 0x11, 0xa8, 0x7a, 0x57, 0x61, 0xd2, 0xd0, + 0x46, 0x49, 0x42, 0x38, 0xe2, 0x21, 0x49, 0x98, 0x3c, 0x95, 0x69, 0xd8, 0xc5, 0xd3, 0x7e, 0x76, + 0x60, 0x1f, 0x84, 0x38, 0xf2, 0xbd, 0x18, 0xb1, 0x43, 0xa9, 0xb8, 0x7f, 0x51, 0x71, 0x44, 0x51, + 0x9a, 0x62, 0xaa, 0x08, 0x77, 0xe4, 0x39, 0x4d, 0x87, 0x36, 0xe3, 0x88, 0x67, 0xf2, 0xc0, 0xec, + 0x83, 0xe5, 0x75, 0xcc, 0xfb, 0x22, 0x99, 0x9d, 0x08, 0x25, 0x5b, 0x32, 0x41, 0xe9, 0x72, 0xf1, + 0xeb, 0x0c, 0x33, 0x0e, 0x97, 0xc0, 0xff, 0xea, 0x46, 0x5e, 0x82, 0x62, 0x5c, 0xd1, 0x6a, 0x5a, + 0x7d, 0xde, 0x5d, 0x50, 0xce, 0x2d, 0x14, 0x63, 0xf3, 0x8d, 0x0e, 0xea, 0x9b, 0x19, 0x47, 0x1c, + 0x4f, 0x06, 0x32, 0x45, 0x7c, 0x00, 0x8c, 0x61, 0xc6, 0x38, 0x89, 0x31, 0xf5, 0x42, 0x5f, 0xf2, + 0x80, 0x72, 0x3d, 0xf3, 0x21, 0x06, 0x80, 0xa4, 0x98, 0x8a, 0x4a, 0x54, 0xf4, 0xda, 0x4c, 0xdd, + 0x68, 0x75, 0xad, 0xb2, 0x8e, 0x58, 0x93, 0x43, 0x6f, 0x2b, 0x9a, 0x7b, 0x06, 0x0c, 0x1f, 0x82, + 0x9b, 0x29, 0xa2, 0x3c, 0x44, 0x91, 0x77, 0x80, 0xc2, 0x28, 0xa3, 0xb8, 0x32, 0x53, 0xd3, 0xea, + 0xff, 0xb9, 0x37, 0xa4, 0xbb, 0x27, 0xbc, 0x79, 0x09, 0x46, 0x28, 0x0a, 0x7d, 0xc4, 0xb1, 0x47, + 0x92, 0xe8, 0xb8, 0x32, 0x5b, 0xc8, 0x16, 0x94, 0x73, 0x3b, 0x89, 0x8e, 0xcd, 0x2f, 0x3a, 0x58, + 0x9a, 0x22, 0x03, 0xb8, 0x0a, 0x8c, 0x2c, 0x2d, 0x50, 0x79, 0x17, 0x0b, 0x94, 0xd1, 0xaa, 0xaa, + 0xdb, 0xa9, 0x36, 0x5a, 0xbd, 0xbc, 0xd1, 0x9b, 0x88, 0x1d, 0xba, 0x40, 0xc8, 0x73, 0x1b, 0xbe, + 0x04, 0x73, 0x43, 0x8a, 0x11, 0x17, 0x5d, 0x30, 0x5a, 0xed, 0x89, 0x55, 0x19, 0x4f, 0xe1, 0x25, + 0x65, 0xd9, 0xf8, 0xc7, 0x95, 0xb8, 0x1c, 0x2c, 0xc2, 0x54, 0xf4, 0x6b, 0x02, 0x0b, 0x1c, 0xac, + 0x80, 0x39, 0x8a, 0x63, 0x32, 0x12, 0xb5, 0x9d, 0xcf, 0x4f, 0xc4, 0xf3, 0x9a, 0x01, 0xe6, 0xc7, + 0xcd, 0x30, 0xbf, 0x6b, 0xe0, 0xd1, 0x14, 0x03, 0xc4, 0x52, 0x92, 0x30, 0x0c, 0x7b, 0xe0, 0xf6, + 0x85, 0xce, 0x79, 0x98, 0x52, 0x42, 0x8b, 0x18, 0x46, 0x0b, 0xaa, 0xe4, 0x69, 0x3a, 0xb4, 0x76, + 0x8b, 0xa1, 0x77, 0x6f, 0x9d, 0xef, 0x69, 0x37, 0x97, 0xc3, 0x7d, 0xf0, 0x2f, 0xc5, 0x2c, 0x8b, + 0xb8, 0x9a, 0xb2, 0x8d, 0xf2, 0x29, 0x2b, 0xcb, 0xd2, 0x2d, 0x80, 0xae, 0x02, 0x9b, 0x9b, 0x60, + 0x65, 0xba, 0x57, 0xa6, 0xfa, 0xd2, 0x5a, 0x1f, 0x67, 0xc1, 0xe2, 0x64, 0xd2, 0xae, 0xc8, 0x12, + 0xfe, 0xd2, 0xc0, 0xbd, 0x4b, 0xbf, 0x6e, 0xd8, 0x2b, 0xbf, 0xe9, 0x34, 0xeb, 0xa1, 0x7a, 0xb5, + 0x41, 0x31, 0xbb, 0x6f, 0xbf, 0xfd, 0xf8, 0xa0, 0x3f, 0x81, 0xed, 0x7c, 0x73, 0x9e, 0x9c, 0xbb, + 0x7e, 0x5b, 0x6d, 0x04, 0x66, 0x37, 0xd4, 0x2a, 0xfd, 0xdb, 0x54, 0xd8, 0x8d, 0x53, 0xf8, 0x4e, + 0x07, 0x8b, 0xa5, 0xe3, 0x03, 0x9f, 0x5f, 0xbd, 0xbb, 0x6a, 0x89, 0x55, 0xfb, 0xd7, 0xc2, 0x12, + 0xf3, 0x6c, 0xf6, 0x8b, 0x2a, 0x74, 0xcd, 0xa7, 0x79, 0x15, 0xfe, 0x5c, 0xfb, 0xe4, 0xcc, 0x9a, + 0x6c, 0x37, 0x4e, 0x2f, 0x2b, 0x82, 0x13, 0x17, 0xc1, 0x1c, 0xad, 0xb1, 0xf6, 0x5e, 0x07, 0xcb, + 0x43, 0x12, 0x97, 0xe6, 0xb7, 0xb6, 0x52, 0x3a, 0x47, 0x3b, 0xf9, 0x36, 0xda, 0xd1, 0x5e, 0x6d, + 0x48, 0x56, 0x40, 0x22, 0x94, 0x04, 0x16, 0xa1, 0x81, 0x1d, 0xe0, 0xa4, 0xd8, 0x55, 0xea, 0xdf, + 0x97, 0x86, 0x6c, 0xf2, 0xaf, 0x77, 0x55, 0x19, 0x9f, 0xf4, 0x99, 0xf5, 0x4e, 0xe7, 0xb3, 0x5e, + 0x5b, 0x17, 0xc0, 0x8e, 0xcf, 0x2c, 0x61, 0xe6, 0xd6, 0x5e, 0xd3, 0x92, 0x81, 0xd9, 0x57, 0x25, + 0x19, 0x74, 0x7c, 0x36, 0x18, 0x4b, 0x06, 0x7b, 0xcd, 0x81, 0x92, 0xfc, 0xd4, 0x97, 0x85, 0xdf, + 0x71, 0x3a, 0x3e, 0x73, 0x9c, 0xb1, 0xc8, 0x71, 0xf6, 0x9a, 0x8e, 0xa3, 0x64, 0xfb, 0x73, 0x45, + 0x9e, 0x8f, 0x7f, 0x07, 0x00, 0x00, 0xff, 0xff, 0x21, 0xab, 0x07, 0x2a, 0x21, 0x08, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/services/keyword_plan_service.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/services/keyword_plan_service.pb.go new file mode 100644 index 0000000..466f891 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/services/keyword_plan_service.pb.go @@ -0,0 +1,1170 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/services/keyword_plan_service.proto + +package services // import "google.golang.org/genproto/googleapis/ads/googleads/v1/services" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import wrappers "github.com/golang/protobuf/ptypes/wrappers" +import common "google.golang.org/genproto/googleapis/ads/googleads/v1/common" +import resources "google.golang.org/genproto/googleapis/ads/googleads/v1/resources" +import _ "google.golang.org/genproto/googleapis/api/annotations" +import status "google.golang.org/genproto/googleapis/rpc/status" +import field_mask "google.golang.org/genproto/protobuf/field_mask" + +import ( + context "golang.org/x/net/context" + grpc "google.golang.org/grpc" +) + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Request message for [KeywordPlanService.GetKeywordPlan][google.ads.googleads.v1.services.KeywordPlanService.GetKeywordPlan]. +type GetKeywordPlanRequest struct { + // The resource name of the plan to fetch. + ResourceName string `protobuf:"bytes,1,opt,name=resource_name,json=resourceName,proto3" json:"resource_name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GetKeywordPlanRequest) Reset() { *m = GetKeywordPlanRequest{} } +func (m *GetKeywordPlanRequest) String() string { return proto.CompactTextString(m) } +func (*GetKeywordPlanRequest) ProtoMessage() {} +func (*GetKeywordPlanRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_keyword_plan_service_b419730041d0dd6d, []int{0} +} +func (m *GetKeywordPlanRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GetKeywordPlanRequest.Unmarshal(m, b) +} +func (m *GetKeywordPlanRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GetKeywordPlanRequest.Marshal(b, m, deterministic) +} +func (dst *GetKeywordPlanRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_GetKeywordPlanRequest.Merge(dst, src) +} +func (m *GetKeywordPlanRequest) XXX_Size() int { + return xxx_messageInfo_GetKeywordPlanRequest.Size(m) +} +func (m *GetKeywordPlanRequest) XXX_DiscardUnknown() { + xxx_messageInfo_GetKeywordPlanRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_GetKeywordPlanRequest proto.InternalMessageInfo + +func (m *GetKeywordPlanRequest) GetResourceName() string { + if m != nil { + return m.ResourceName + } + return "" +} + +// Request message for [KeywordPlanService.MutateKeywordPlans][google.ads.googleads.v1.services.KeywordPlanService.MutateKeywordPlans]. +type MutateKeywordPlansRequest struct { + // The ID of the customer whose keyword plans are being modified. + CustomerId string `protobuf:"bytes,1,opt,name=customer_id,json=customerId,proto3" json:"customer_id,omitempty"` + // The list of operations to perform on individual keyword plans. + Operations []*KeywordPlanOperation `protobuf:"bytes,2,rep,name=operations,proto3" json:"operations,omitempty"` + // If true, successful operations will be carried out and invalid + // operations will return errors. If false, all operations will be carried + // out in one transaction if and only if they are all valid. + // Default is false. + PartialFailure bool `protobuf:"varint,3,opt,name=partial_failure,json=partialFailure,proto3" json:"partial_failure,omitempty"` + // If true, the request is validated but not executed. Only errors are + // returned, not results. + ValidateOnly bool `protobuf:"varint,4,opt,name=validate_only,json=validateOnly,proto3" json:"validate_only,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *MutateKeywordPlansRequest) Reset() { *m = MutateKeywordPlansRequest{} } +func (m *MutateKeywordPlansRequest) String() string { return proto.CompactTextString(m) } +func (*MutateKeywordPlansRequest) ProtoMessage() {} +func (*MutateKeywordPlansRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_keyword_plan_service_b419730041d0dd6d, []int{1} +} +func (m *MutateKeywordPlansRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_MutateKeywordPlansRequest.Unmarshal(m, b) +} +func (m *MutateKeywordPlansRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_MutateKeywordPlansRequest.Marshal(b, m, deterministic) +} +func (dst *MutateKeywordPlansRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_MutateKeywordPlansRequest.Merge(dst, src) +} +func (m *MutateKeywordPlansRequest) XXX_Size() int { + return xxx_messageInfo_MutateKeywordPlansRequest.Size(m) +} +func (m *MutateKeywordPlansRequest) XXX_DiscardUnknown() { + xxx_messageInfo_MutateKeywordPlansRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_MutateKeywordPlansRequest proto.InternalMessageInfo + +func (m *MutateKeywordPlansRequest) GetCustomerId() string { + if m != nil { + return m.CustomerId + } + return "" +} + +func (m *MutateKeywordPlansRequest) GetOperations() []*KeywordPlanOperation { + if m != nil { + return m.Operations + } + return nil +} + +func (m *MutateKeywordPlansRequest) GetPartialFailure() bool { + if m != nil { + return m.PartialFailure + } + return false +} + +func (m *MutateKeywordPlansRequest) GetValidateOnly() bool { + if m != nil { + return m.ValidateOnly + } + return false +} + +// A single operation (create, update, remove) on a keyword plan. +type KeywordPlanOperation struct { + // The FieldMask that determines which resource fields are modified in an + // update. + UpdateMask *field_mask.FieldMask `protobuf:"bytes,4,opt,name=update_mask,json=updateMask,proto3" json:"update_mask,omitempty"` + // The mutate operation. + // + // Types that are valid to be assigned to Operation: + // *KeywordPlanOperation_Create + // *KeywordPlanOperation_Update + // *KeywordPlanOperation_Remove + Operation isKeywordPlanOperation_Operation `protobuf_oneof:"operation"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *KeywordPlanOperation) Reset() { *m = KeywordPlanOperation{} } +func (m *KeywordPlanOperation) String() string { return proto.CompactTextString(m) } +func (*KeywordPlanOperation) ProtoMessage() {} +func (*KeywordPlanOperation) Descriptor() ([]byte, []int) { + return fileDescriptor_keyword_plan_service_b419730041d0dd6d, []int{2} +} +func (m *KeywordPlanOperation) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_KeywordPlanOperation.Unmarshal(m, b) +} +func (m *KeywordPlanOperation) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_KeywordPlanOperation.Marshal(b, m, deterministic) +} +func (dst *KeywordPlanOperation) XXX_Merge(src proto.Message) { + xxx_messageInfo_KeywordPlanOperation.Merge(dst, src) +} +func (m *KeywordPlanOperation) XXX_Size() int { + return xxx_messageInfo_KeywordPlanOperation.Size(m) +} +func (m *KeywordPlanOperation) XXX_DiscardUnknown() { + xxx_messageInfo_KeywordPlanOperation.DiscardUnknown(m) +} + +var xxx_messageInfo_KeywordPlanOperation proto.InternalMessageInfo + +func (m *KeywordPlanOperation) GetUpdateMask() *field_mask.FieldMask { + if m != nil { + return m.UpdateMask + } + return nil +} + +type isKeywordPlanOperation_Operation interface { + isKeywordPlanOperation_Operation() +} + +type KeywordPlanOperation_Create struct { + Create *resources.KeywordPlan `protobuf:"bytes,1,opt,name=create,proto3,oneof"` +} + +type KeywordPlanOperation_Update struct { + Update *resources.KeywordPlan `protobuf:"bytes,2,opt,name=update,proto3,oneof"` +} + +type KeywordPlanOperation_Remove struct { + Remove string `protobuf:"bytes,3,opt,name=remove,proto3,oneof"` +} + +func (*KeywordPlanOperation_Create) isKeywordPlanOperation_Operation() {} + +func (*KeywordPlanOperation_Update) isKeywordPlanOperation_Operation() {} + +func (*KeywordPlanOperation_Remove) isKeywordPlanOperation_Operation() {} + +func (m *KeywordPlanOperation) GetOperation() isKeywordPlanOperation_Operation { + if m != nil { + return m.Operation + } + return nil +} + +func (m *KeywordPlanOperation) GetCreate() *resources.KeywordPlan { + if x, ok := m.GetOperation().(*KeywordPlanOperation_Create); ok { + return x.Create + } + return nil +} + +func (m *KeywordPlanOperation) GetUpdate() *resources.KeywordPlan { + if x, ok := m.GetOperation().(*KeywordPlanOperation_Update); ok { + return x.Update + } + return nil +} + +func (m *KeywordPlanOperation) GetRemove() string { + if x, ok := m.GetOperation().(*KeywordPlanOperation_Remove); ok { + return x.Remove + } + return "" +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*KeywordPlanOperation) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _KeywordPlanOperation_OneofMarshaler, _KeywordPlanOperation_OneofUnmarshaler, _KeywordPlanOperation_OneofSizer, []interface{}{ + (*KeywordPlanOperation_Create)(nil), + (*KeywordPlanOperation_Update)(nil), + (*KeywordPlanOperation_Remove)(nil), + } +} + +func _KeywordPlanOperation_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*KeywordPlanOperation) + // operation + switch x := m.Operation.(type) { + case *KeywordPlanOperation_Create: + b.EncodeVarint(1<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.Create); err != nil { + return err + } + case *KeywordPlanOperation_Update: + b.EncodeVarint(2<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.Update); err != nil { + return err + } + case *KeywordPlanOperation_Remove: + b.EncodeVarint(3<<3 | proto.WireBytes) + b.EncodeStringBytes(x.Remove) + case nil: + default: + return fmt.Errorf("KeywordPlanOperation.Operation has unexpected type %T", x) + } + return nil +} + +func _KeywordPlanOperation_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*KeywordPlanOperation) + switch tag { + case 1: // operation.create + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(resources.KeywordPlan) + err := b.DecodeMessage(msg) + m.Operation = &KeywordPlanOperation_Create{msg} + return true, err + case 2: // operation.update + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(resources.KeywordPlan) + err := b.DecodeMessage(msg) + m.Operation = &KeywordPlanOperation_Update{msg} + return true, err + case 3: // operation.remove + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeStringBytes() + m.Operation = &KeywordPlanOperation_Remove{x} + return true, err + default: + return false, nil + } +} + +func _KeywordPlanOperation_OneofSizer(msg proto.Message) (n int) { + m := msg.(*KeywordPlanOperation) + // operation + switch x := m.Operation.(type) { + case *KeywordPlanOperation_Create: + s := proto.Size(x.Create) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *KeywordPlanOperation_Update: + s := proto.Size(x.Update) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *KeywordPlanOperation_Remove: + n += 1 // tag and wire + n += proto.SizeVarint(uint64(len(x.Remove))) + n += len(x.Remove) + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +// Response message for a keyword plan mutate. +type MutateKeywordPlansResponse struct { + // Errors that pertain to operation failures in the partial failure mode. + // Returned only when partial_failure = true and all errors occur inside the + // operations. If any errors occur outside the operations (e.g. auth errors), + // we return an RPC level error. + PartialFailureError *status.Status `protobuf:"bytes,3,opt,name=partial_failure_error,json=partialFailureError,proto3" json:"partial_failure_error,omitempty"` + // All results for the mutate. + Results []*MutateKeywordPlansResult `protobuf:"bytes,2,rep,name=results,proto3" json:"results,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *MutateKeywordPlansResponse) Reset() { *m = MutateKeywordPlansResponse{} } +func (m *MutateKeywordPlansResponse) String() string { return proto.CompactTextString(m) } +func (*MutateKeywordPlansResponse) ProtoMessage() {} +func (*MutateKeywordPlansResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_keyword_plan_service_b419730041d0dd6d, []int{3} +} +func (m *MutateKeywordPlansResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_MutateKeywordPlansResponse.Unmarshal(m, b) +} +func (m *MutateKeywordPlansResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_MutateKeywordPlansResponse.Marshal(b, m, deterministic) +} +func (dst *MutateKeywordPlansResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_MutateKeywordPlansResponse.Merge(dst, src) +} +func (m *MutateKeywordPlansResponse) XXX_Size() int { + return xxx_messageInfo_MutateKeywordPlansResponse.Size(m) +} +func (m *MutateKeywordPlansResponse) XXX_DiscardUnknown() { + xxx_messageInfo_MutateKeywordPlansResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_MutateKeywordPlansResponse proto.InternalMessageInfo + +func (m *MutateKeywordPlansResponse) GetPartialFailureError() *status.Status { + if m != nil { + return m.PartialFailureError + } + return nil +} + +func (m *MutateKeywordPlansResponse) GetResults() []*MutateKeywordPlansResult { + if m != nil { + return m.Results + } + return nil +} + +// The result for the keyword plan mutate. +type MutateKeywordPlansResult struct { + // Returned for successful operations. + ResourceName string `protobuf:"bytes,1,opt,name=resource_name,json=resourceName,proto3" json:"resource_name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *MutateKeywordPlansResult) Reset() { *m = MutateKeywordPlansResult{} } +func (m *MutateKeywordPlansResult) String() string { return proto.CompactTextString(m) } +func (*MutateKeywordPlansResult) ProtoMessage() {} +func (*MutateKeywordPlansResult) Descriptor() ([]byte, []int) { + return fileDescriptor_keyword_plan_service_b419730041d0dd6d, []int{4} +} +func (m *MutateKeywordPlansResult) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_MutateKeywordPlansResult.Unmarshal(m, b) +} +func (m *MutateKeywordPlansResult) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_MutateKeywordPlansResult.Marshal(b, m, deterministic) +} +func (dst *MutateKeywordPlansResult) XXX_Merge(src proto.Message) { + xxx_messageInfo_MutateKeywordPlansResult.Merge(dst, src) +} +func (m *MutateKeywordPlansResult) XXX_Size() int { + return xxx_messageInfo_MutateKeywordPlansResult.Size(m) +} +func (m *MutateKeywordPlansResult) XXX_DiscardUnknown() { + xxx_messageInfo_MutateKeywordPlansResult.DiscardUnknown(m) +} + +var xxx_messageInfo_MutateKeywordPlansResult proto.InternalMessageInfo + +func (m *MutateKeywordPlansResult) GetResourceName() string { + if m != nil { + return m.ResourceName + } + return "" +} + +// Request message for [KeywordPlanService.GenerateForecastMetrics][google.ads.googleads.v1.services.KeywordPlanService.GenerateForecastMetrics]. +type GenerateForecastMetricsRequest struct { + // The resource name of the keyword plan to be forecasted. + KeywordPlan string `protobuf:"bytes,1,opt,name=keyword_plan,json=keywordPlan,proto3" json:"keyword_plan,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GenerateForecastMetricsRequest) Reset() { *m = GenerateForecastMetricsRequest{} } +func (m *GenerateForecastMetricsRequest) String() string { return proto.CompactTextString(m) } +func (*GenerateForecastMetricsRequest) ProtoMessage() {} +func (*GenerateForecastMetricsRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_keyword_plan_service_b419730041d0dd6d, []int{5} +} +func (m *GenerateForecastMetricsRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GenerateForecastMetricsRequest.Unmarshal(m, b) +} +func (m *GenerateForecastMetricsRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GenerateForecastMetricsRequest.Marshal(b, m, deterministic) +} +func (dst *GenerateForecastMetricsRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_GenerateForecastMetricsRequest.Merge(dst, src) +} +func (m *GenerateForecastMetricsRequest) XXX_Size() int { + return xxx_messageInfo_GenerateForecastMetricsRequest.Size(m) +} +func (m *GenerateForecastMetricsRequest) XXX_DiscardUnknown() { + xxx_messageInfo_GenerateForecastMetricsRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_GenerateForecastMetricsRequest proto.InternalMessageInfo + +func (m *GenerateForecastMetricsRequest) GetKeywordPlan() string { + if m != nil { + return m.KeywordPlan + } + return "" +} + +// Response message for [KeywordPlanService.GenerateForecastMetrics][google.ads.googleads.v1.services.KeywordPlanService.GenerateForecastMetrics]. +type GenerateForecastMetricsResponse struct { + // List of campaign forecasts. + // One maximum. + CampaignForecasts []*KeywordPlanCampaignForecast `protobuf:"bytes,1,rep,name=campaign_forecasts,json=campaignForecasts,proto3" json:"campaign_forecasts,omitempty"` + // List of ad group forecasts. + AdGroupForecasts []*KeywordPlanAdGroupForecast `protobuf:"bytes,2,rep,name=ad_group_forecasts,json=adGroupForecasts,proto3" json:"ad_group_forecasts,omitempty"` + // List of keyword forecasts. + KeywordForecasts []*KeywordPlanKeywordForecast `protobuf:"bytes,3,rep,name=keyword_forecasts,json=keywordForecasts,proto3" json:"keyword_forecasts,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GenerateForecastMetricsResponse) Reset() { *m = GenerateForecastMetricsResponse{} } +func (m *GenerateForecastMetricsResponse) String() string { return proto.CompactTextString(m) } +func (*GenerateForecastMetricsResponse) ProtoMessage() {} +func (*GenerateForecastMetricsResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_keyword_plan_service_b419730041d0dd6d, []int{6} +} +func (m *GenerateForecastMetricsResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GenerateForecastMetricsResponse.Unmarshal(m, b) +} +func (m *GenerateForecastMetricsResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GenerateForecastMetricsResponse.Marshal(b, m, deterministic) +} +func (dst *GenerateForecastMetricsResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_GenerateForecastMetricsResponse.Merge(dst, src) +} +func (m *GenerateForecastMetricsResponse) XXX_Size() int { + return xxx_messageInfo_GenerateForecastMetricsResponse.Size(m) +} +func (m *GenerateForecastMetricsResponse) XXX_DiscardUnknown() { + xxx_messageInfo_GenerateForecastMetricsResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_GenerateForecastMetricsResponse proto.InternalMessageInfo + +func (m *GenerateForecastMetricsResponse) GetCampaignForecasts() []*KeywordPlanCampaignForecast { + if m != nil { + return m.CampaignForecasts + } + return nil +} + +func (m *GenerateForecastMetricsResponse) GetAdGroupForecasts() []*KeywordPlanAdGroupForecast { + if m != nil { + return m.AdGroupForecasts + } + return nil +} + +func (m *GenerateForecastMetricsResponse) GetKeywordForecasts() []*KeywordPlanKeywordForecast { + if m != nil { + return m.KeywordForecasts + } + return nil +} + +// A campaign forecast. +type KeywordPlanCampaignForecast struct { + // The resource name of the Keyword Plan campaign related to the forecast. + // + // `customers/{customer_id}/keywordPlanCampaigns/{keyword+plan_campaign_id}` + KeywordPlanCampaign *wrappers.StringValue `protobuf:"bytes,1,opt,name=keyword_plan_campaign,json=keywordPlanCampaign,proto3" json:"keyword_plan_campaign,omitempty"` + // The forecast for the Keyword Plan campaign. + CampaignForecast *ForecastMetrics `protobuf:"bytes,2,opt,name=campaign_forecast,json=campaignForecast,proto3" json:"campaign_forecast,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *KeywordPlanCampaignForecast) Reset() { *m = KeywordPlanCampaignForecast{} } +func (m *KeywordPlanCampaignForecast) String() string { return proto.CompactTextString(m) } +func (*KeywordPlanCampaignForecast) ProtoMessage() {} +func (*KeywordPlanCampaignForecast) Descriptor() ([]byte, []int) { + return fileDescriptor_keyword_plan_service_b419730041d0dd6d, []int{7} +} +func (m *KeywordPlanCampaignForecast) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_KeywordPlanCampaignForecast.Unmarshal(m, b) +} +func (m *KeywordPlanCampaignForecast) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_KeywordPlanCampaignForecast.Marshal(b, m, deterministic) +} +func (dst *KeywordPlanCampaignForecast) XXX_Merge(src proto.Message) { + xxx_messageInfo_KeywordPlanCampaignForecast.Merge(dst, src) +} +func (m *KeywordPlanCampaignForecast) XXX_Size() int { + return xxx_messageInfo_KeywordPlanCampaignForecast.Size(m) +} +func (m *KeywordPlanCampaignForecast) XXX_DiscardUnknown() { + xxx_messageInfo_KeywordPlanCampaignForecast.DiscardUnknown(m) +} + +var xxx_messageInfo_KeywordPlanCampaignForecast proto.InternalMessageInfo + +func (m *KeywordPlanCampaignForecast) GetKeywordPlanCampaign() *wrappers.StringValue { + if m != nil { + return m.KeywordPlanCampaign + } + return nil +} + +func (m *KeywordPlanCampaignForecast) GetCampaignForecast() *ForecastMetrics { + if m != nil { + return m.CampaignForecast + } + return nil +} + +// An ad group forecast. +type KeywordPlanAdGroupForecast struct { + // The resource name of the Keyword Plan ad group related to the forecast. + // + // `customers/{customer_id}/keywordPlanAdGroups/{keyword_plan_ad_group_id}` + KeywordPlanAdGroup *wrappers.StringValue `protobuf:"bytes,1,opt,name=keyword_plan_ad_group,json=keywordPlanAdGroup,proto3" json:"keyword_plan_ad_group,omitempty"` + // The forecast for the Keyword Plan ad group. + AdGroupForecast *ForecastMetrics `protobuf:"bytes,2,opt,name=ad_group_forecast,json=adGroupForecast,proto3" json:"ad_group_forecast,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *KeywordPlanAdGroupForecast) Reset() { *m = KeywordPlanAdGroupForecast{} } +func (m *KeywordPlanAdGroupForecast) String() string { return proto.CompactTextString(m) } +func (*KeywordPlanAdGroupForecast) ProtoMessage() {} +func (*KeywordPlanAdGroupForecast) Descriptor() ([]byte, []int) { + return fileDescriptor_keyword_plan_service_b419730041d0dd6d, []int{8} +} +func (m *KeywordPlanAdGroupForecast) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_KeywordPlanAdGroupForecast.Unmarshal(m, b) +} +func (m *KeywordPlanAdGroupForecast) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_KeywordPlanAdGroupForecast.Marshal(b, m, deterministic) +} +func (dst *KeywordPlanAdGroupForecast) XXX_Merge(src proto.Message) { + xxx_messageInfo_KeywordPlanAdGroupForecast.Merge(dst, src) +} +func (m *KeywordPlanAdGroupForecast) XXX_Size() int { + return xxx_messageInfo_KeywordPlanAdGroupForecast.Size(m) +} +func (m *KeywordPlanAdGroupForecast) XXX_DiscardUnknown() { + xxx_messageInfo_KeywordPlanAdGroupForecast.DiscardUnknown(m) +} + +var xxx_messageInfo_KeywordPlanAdGroupForecast proto.InternalMessageInfo + +func (m *KeywordPlanAdGroupForecast) GetKeywordPlanAdGroup() *wrappers.StringValue { + if m != nil { + return m.KeywordPlanAdGroup + } + return nil +} + +func (m *KeywordPlanAdGroupForecast) GetAdGroupForecast() *ForecastMetrics { + if m != nil { + return m.AdGroupForecast + } + return nil +} + +// A keyword forecast. +type KeywordPlanKeywordForecast struct { + // The resource name of the Keyword Plan keyword related to the forecast. + // + // + // `customers/{customer_id}/keywordPlanAdGroupKeywords/{keyword_plan_ad_group_keyword_id}` + KeywordPlanAdGroupKeyword *wrappers.StringValue `protobuf:"bytes,1,opt,name=keyword_plan_ad_group_keyword,json=keywordPlanAdGroupKeyword,proto3" json:"keyword_plan_ad_group_keyword,omitempty"` + // The forecast for the Keyword Plan keyword. + KeywordForecast *ForecastMetrics `protobuf:"bytes,2,opt,name=keyword_forecast,json=keywordForecast,proto3" json:"keyword_forecast,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *KeywordPlanKeywordForecast) Reset() { *m = KeywordPlanKeywordForecast{} } +func (m *KeywordPlanKeywordForecast) String() string { return proto.CompactTextString(m) } +func (*KeywordPlanKeywordForecast) ProtoMessage() {} +func (*KeywordPlanKeywordForecast) Descriptor() ([]byte, []int) { + return fileDescriptor_keyword_plan_service_b419730041d0dd6d, []int{9} +} +func (m *KeywordPlanKeywordForecast) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_KeywordPlanKeywordForecast.Unmarshal(m, b) +} +func (m *KeywordPlanKeywordForecast) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_KeywordPlanKeywordForecast.Marshal(b, m, deterministic) +} +func (dst *KeywordPlanKeywordForecast) XXX_Merge(src proto.Message) { + xxx_messageInfo_KeywordPlanKeywordForecast.Merge(dst, src) +} +func (m *KeywordPlanKeywordForecast) XXX_Size() int { + return xxx_messageInfo_KeywordPlanKeywordForecast.Size(m) +} +func (m *KeywordPlanKeywordForecast) XXX_DiscardUnknown() { + xxx_messageInfo_KeywordPlanKeywordForecast.DiscardUnknown(m) +} + +var xxx_messageInfo_KeywordPlanKeywordForecast proto.InternalMessageInfo + +func (m *KeywordPlanKeywordForecast) GetKeywordPlanAdGroupKeyword() *wrappers.StringValue { + if m != nil { + return m.KeywordPlanAdGroupKeyword + } + return nil +} + +func (m *KeywordPlanKeywordForecast) GetKeywordForecast() *ForecastMetrics { + if m != nil { + return m.KeywordForecast + } + return nil +} + +// Forecast metrics. +type ForecastMetrics struct { + // Impressions + Impressions *wrappers.DoubleValue `protobuf:"bytes,1,opt,name=impressions,proto3" json:"impressions,omitempty"` + // Ctr + Ctr *wrappers.DoubleValue `protobuf:"bytes,2,opt,name=ctr,proto3" json:"ctr,omitempty"` + // AVG cpc + AverageCpc *wrappers.Int64Value `protobuf:"bytes,3,opt,name=average_cpc,json=averageCpc,proto3" json:"average_cpc,omitempty"` + // Clicks + Clicks *wrappers.DoubleValue `protobuf:"bytes,5,opt,name=clicks,proto3" json:"clicks,omitempty"` + // Cost + CostMicros *wrappers.Int64Value `protobuf:"bytes,6,opt,name=cost_micros,json=costMicros,proto3" json:"cost_micros,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ForecastMetrics) Reset() { *m = ForecastMetrics{} } +func (m *ForecastMetrics) String() string { return proto.CompactTextString(m) } +func (*ForecastMetrics) ProtoMessage() {} +func (*ForecastMetrics) Descriptor() ([]byte, []int) { + return fileDescriptor_keyword_plan_service_b419730041d0dd6d, []int{10} +} +func (m *ForecastMetrics) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ForecastMetrics.Unmarshal(m, b) +} +func (m *ForecastMetrics) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ForecastMetrics.Marshal(b, m, deterministic) +} +func (dst *ForecastMetrics) XXX_Merge(src proto.Message) { + xxx_messageInfo_ForecastMetrics.Merge(dst, src) +} +func (m *ForecastMetrics) XXX_Size() int { + return xxx_messageInfo_ForecastMetrics.Size(m) +} +func (m *ForecastMetrics) XXX_DiscardUnknown() { + xxx_messageInfo_ForecastMetrics.DiscardUnknown(m) +} + +var xxx_messageInfo_ForecastMetrics proto.InternalMessageInfo + +func (m *ForecastMetrics) GetImpressions() *wrappers.DoubleValue { + if m != nil { + return m.Impressions + } + return nil +} + +func (m *ForecastMetrics) GetCtr() *wrappers.DoubleValue { + if m != nil { + return m.Ctr + } + return nil +} + +func (m *ForecastMetrics) GetAverageCpc() *wrappers.Int64Value { + if m != nil { + return m.AverageCpc + } + return nil +} + +func (m *ForecastMetrics) GetClicks() *wrappers.DoubleValue { + if m != nil { + return m.Clicks + } + return nil +} + +func (m *ForecastMetrics) GetCostMicros() *wrappers.Int64Value { + if m != nil { + return m.CostMicros + } + return nil +} + +// Request message for [KeywordPlanService.GenerateHistoricalMetrics][google.ads.googleads.v1.services.KeywordPlanService.GenerateHistoricalMetrics]. +type GenerateHistoricalMetricsRequest struct { + // The resource name of the keyword plan of which historical metrics are + // requested. + KeywordPlan string `protobuf:"bytes,1,opt,name=keyword_plan,json=keywordPlan,proto3" json:"keyword_plan,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GenerateHistoricalMetricsRequest) Reset() { *m = GenerateHistoricalMetricsRequest{} } +func (m *GenerateHistoricalMetricsRequest) String() string { return proto.CompactTextString(m) } +func (*GenerateHistoricalMetricsRequest) ProtoMessage() {} +func (*GenerateHistoricalMetricsRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_keyword_plan_service_b419730041d0dd6d, []int{11} +} +func (m *GenerateHistoricalMetricsRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GenerateHistoricalMetricsRequest.Unmarshal(m, b) +} +func (m *GenerateHistoricalMetricsRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GenerateHistoricalMetricsRequest.Marshal(b, m, deterministic) +} +func (dst *GenerateHistoricalMetricsRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_GenerateHistoricalMetricsRequest.Merge(dst, src) +} +func (m *GenerateHistoricalMetricsRequest) XXX_Size() int { + return xxx_messageInfo_GenerateHistoricalMetricsRequest.Size(m) +} +func (m *GenerateHistoricalMetricsRequest) XXX_DiscardUnknown() { + xxx_messageInfo_GenerateHistoricalMetricsRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_GenerateHistoricalMetricsRequest proto.InternalMessageInfo + +func (m *GenerateHistoricalMetricsRequest) GetKeywordPlan() string { + if m != nil { + return m.KeywordPlan + } + return "" +} + +// Response message for [KeywordPlanService.GenerateHistoricalMetrics][google.ads.googleads.v1.services.KeywordPlanService.GenerateHistoricalMetrics]. +type GenerateHistoricalMetricsResponse struct { + // List of keyword historical metrics. + Metrics []*KeywordPlanKeywordHistoricalMetrics `protobuf:"bytes,1,rep,name=metrics,proto3" json:"metrics,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GenerateHistoricalMetricsResponse) Reset() { *m = GenerateHistoricalMetricsResponse{} } +func (m *GenerateHistoricalMetricsResponse) String() string { return proto.CompactTextString(m) } +func (*GenerateHistoricalMetricsResponse) ProtoMessage() {} +func (*GenerateHistoricalMetricsResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_keyword_plan_service_b419730041d0dd6d, []int{12} +} +func (m *GenerateHistoricalMetricsResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GenerateHistoricalMetricsResponse.Unmarshal(m, b) +} +func (m *GenerateHistoricalMetricsResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GenerateHistoricalMetricsResponse.Marshal(b, m, deterministic) +} +func (dst *GenerateHistoricalMetricsResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_GenerateHistoricalMetricsResponse.Merge(dst, src) +} +func (m *GenerateHistoricalMetricsResponse) XXX_Size() int { + return xxx_messageInfo_GenerateHistoricalMetricsResponse.Size(m) +} +func (m *GenerateHistoricalMetricsResponse) XXX_DiscardUnknown() { + xxx_messageInfo_GenerateHistoricalMetricsResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_GenerateHistoricalMetricsResponse proto.InternalMessageInfo + +func (m *GenerateHistoricalMetricsResponse) GetMetrics() []*KeywordPlanKeywordHistoricalMetrics { + if m != nil { + return m.Metrics + } + return nil +} + +// A keyword historical metrics. +type KeywordPlanKeywordHistoricalMetrics struct { + // The text of the query associated with one or more ad_group_keywords in the + // plan. + // + // Note that we de-dupe your keywords list, eliminating close variants before + // returning the plan's keywords as text. For example, if your plan originally + // contained the keywords 'car' and 'cars', the returned search query will + // only contain 'car'. + SearchQuery *wrappers.StringValue `protobuf:"bytes,1,opt,name=search_query,json=searchQuery,proto3" json:"search_query,omitempty"` + // The historical metrics for the query associated with one or more + // ad_group_keywords in the plan. + KeywordMetrics *common.KeywordPlanHistoricalMetrics `protobuf:"bytes,2,opt,name=keyword_metrics,json=keywordMetrics,proto3" json:"keyword_metrics,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *KeywordPlanKeywordHistoricalMetrics) Reset() { *m = KeywordPlanKeywordHistoricalMetrics{} } +func (m *KeywordPlanKeywordHistoricalMetrics) String() string { return proto.CompactTextString(m) } +func (*KeywordPlanKeywordHistoricalMetrics) ProtoMessage() {} +func (*KeywordPlanKeywordHistoricalMetrics) Descriptor() ([]byte, []int) { + return fileDescriptor_keyword_plan_service_b419730041d0dd6d, []int{13} +} +func (m *KeywordPlanKeywordHistoricalMetrics) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_KeywordPlanKeywordHistoricalMetrics.Unmarshal(m, b) +} +func (m *KeywordPlanKeywordHistoricalMetrics) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_KeywordPlanKeywordHistoricalMetrics.Marshal(b, m, deterministic) +} +func (dst *KeywordPlanKeywordHistoricalMetrics) XXX_Merge(src proto.Message) { + xxx_messageInfo_KeywordPlanKeywordHistoricalMetrics.Merge(dst, src) +} +func (m *KeywordPlanKeywordHistoricalMetrics) XXX_Size() int { + return xxx_messageInfo_KeywordPlanKeywordHistoricalMetrics.Size(m) +} +func (m *KeywordPlanKeywordHistoricalMetrics) XXX_DiscardUnknown() { + xxx_messageInfo_KeywordPlanKeywordHistoricalMetrics.DiscardUnknown(m) +} + +var xxx_messageInfo_KeywordPlanKeywordHistoricalMetrics proto.InternalMessageInfo + +func (m *KeywordPlanKeywordHistoricalMetrics) GetSearchQuery() *wrappers.StringValue { + if m != nil { + return m.SearchQuery + } + return nil +} + +func (m *KeywordPlanKeywordHistoricalMetrics) GetKeywordMetrics() *common.KeywordPlanHistoricalMetrics { + if m != nil { + return m.KeywordMetrics + } + return nil +} + +func init() { + proto.RegisterType((*GetKeywordPlanRequest)(nil), "google.ads.googleads.v1.services.GetKeywordPlanRequest") + proto.RegisterType((*MutateKeywordPlansRequest)(nil), "google.ads.googleads.v1.services.MutateKeywordPlansRequest") + proto.RegisterType((*KeywordPlanOperation)(nil), "google.ads.googleads.v1.services.KeywordPlanOperation") + proto.RegisterType((*MutateKeywordPlansResponse)(nil), "google.ads.googleads.v1.services.MutateKeywordPlansResponse") + proto.RegisterType((*MutateKeywordPlansResult)(nil), "google.ads.googleads.v1.services.MutateKeywordPlansResult") + proto.RegisterType((*GenerateForecastMetricsRequest)(nil), "google.ads.googleads.v1.services.GenerateForecastMetricsRequest") + proto.RegisterType((*GenerateForecastMetricsResponse)(nil), "google.ads.googleads.v1.services.GenerateForecastMetricsResponse") + proto.RegisterType((*KeywordPlanCampaignForecast)(nil), "google.ads.googleads.v1.services.KeywordPlanCampaignForecast") + proto.RegisterType((*KeywordPlanAdGroupForecast)(nil), "google.ads.googleads.v1.services.KeywordPlanAdGroupForecast") + proto.RegisterType((*KeywordPlanKeywordForecast)(nil), "google.ads.googleads.v1.services.KeywordPlanKeywordForecast") + proto.RegisterType((*ForecastMetrics)(nil), "google.ads.googleads.v1.services.ForecastMetrics") + proto.RegisterType((*GenerateHistoricalMetricsRequest)(nil), "google.ads.googleads.v1.services.GenerateHistoricalMetricsRequest") + proto.RegisterType((*GenerateHistoricalMetricsResponse)(nil), "google.ads.googleads.v1.services.GenerateHistoricalMetricsResponse") + proto.RegisterType((*KeywordPlanKeywordHistoricalMetrics)(nil), "google.ads.googleads.v1.services.KeywordPlanKeywordHistoricalMetrics") +} + +// Reference imports to suppress errors if they are not otherwise used. +var _ context.Context +var _ grpc.ClientConn + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the grpc package it is being compiled against. +const _ = grpc.SupportPackageIsVersion4 + +// KeywordPlanServiceClient is the client API for KeywordPlanService service. +// +// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://godoc.org/google.golang.org/grpc#ClientConn.NewStream. +type KeywordPlanServiceClient interface { + // Returns the requested plan in full detail. + GetKeywordPlan(ctx context.Context, in *GetKeywordPlanRequest, opts ...grpc.CallOption) (*resources.KeywordPlan, error) + // Creates, updates, or removes keyword plans. Operation statuses are + // returned. + MutateKeywordPlans(ctx context.Context, in *MutateKeywordPlansRequest, opts ...grpc.CallOption) (*MutateKeywordPlansResponse, error) + // Returns the requested Keyword Plan forecasts. + GenerateForecastMetrics(ctx context.Context, in *GenerateForecastMetricsRequest, opts ...grpc.CallOption) (*GenerateForecastMetricsResponse, error) + // Returns the requested Keyword Plan historical metrics. + GenerateHistoricalMetrics(ctx context.Context, in *GenerateHistoricalMetricsRequest, opts ...grpc.CallOption) (*GenerateHistoricalMetricsResponse, error) +} + +type keywordPlanServiceClient struct { + cc *grpc.ClientConn +} + +func NewKeywordPlanServiceClient(cc *grpc.ClientConn) KeywordPlanServiceClient { + return &keywordPlanServiceClient{cc} +} + +func (c *keywordPlanServiceClient) GetKeywordPlan(ctx context.Context, in *GetKeywordPlanRequest, opts ...grpc.CallOption) (*resources.KeywordPlan, error) { + out := new(resources.KeywordPlan) + err := c.cc.Invoke(ctx, "/google.ads.googleads.v1.services.KeywordPlanService/GetKeywordPlan", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *keywordPlanServiceClient) MutateKeywordPlans(ctx context.Context, in *MutateKeywordPlansRequest, opts ...grpc.CallOption) (*MutateKeywordPlansResponse, error) { + out := new(MutateKeywordPlansResponse) + err := c.cc.Invoke(ctx, "/google.ads.googleads.v1.services.KeywordPlanService/MutateKeywordPlans", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *keywordPlanServiceClient) GenerateForecastMetrics(ctx context.Context, in *GenerateForecastMetricsRequest, opts ...grpc.CallOption) (*GenerateForecastMetricsResponse, error) { + out := new(GenerateForecastMetricsResponse) + err := c.cc.Invoke(ctx, "/google.ads.googleads.v1.services.KeywordPlanService/GenerateForecastMetrics", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *keywordPlanServiceClient) GenerateHistoricalMetrics(ctx context.Context, in *GenerateHistoricalMetricsRequest, opts ...grpc.CallOption) (*GenerateHistoricalMetricsResponse, error) { + out := new(GenerateHistoricalMetricsResponse) + err := c.cc.Invoke(ctx, "/google.ads.googleads.v1.services.KeywordPlanService/GenerateHistoricalMetrics", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +// KeywordPlanServiceServer is the server API for KeywordPlanService service. +type KeywordPlanServiceServer interface { + // Returns the requested plan in full detail. + GetKeywordPlan(context.Context, *GetKeywordPlanRequest) (*resources.KeywordPlan, error) + // Creates, updates, or removes keyword plans. Operation statuses are + // returned. + MutateKeywordPlans(context.Context, *MutateKeywordPlansRequest) (*MutateKeywordPlansResponse, error) + // Returns the requested Keyword Plan forecasts. + GenerateForecastMetrics(context.Context, *GenerateForecastMetricsRequest) (*GenerateForecastMetricsResponse, error) + // Returns the requested Keyword Plan historical metrics. + GenerateHistoricalMetrics(context.Context, *GenerateHistoricalMetricsRequest) (*GenerateHistoricalMetricsResponse, error) +} + +func RegisterKeywordPlanServiceServer(s *grpc.Server, srv KeywordPlanServiceServer) { + s.RegisterService(&_KeywordPlanService_serviceDesc, srv) +} + +func _KeywordPlanService_GetKeywordPlan_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(GetKeywordPlanRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(KeywordPlanServiceServer).GetKeywordPlan(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.ads.googleads.v1.services.KeywordPlanService/GetKeywordPlan", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(KeywordPlanServiceServer).GetKeywordPlan(ctx, req.(*GetKeywordPlanRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _KeywordPlanService_MutateKeywordPlans_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(MutateKeywordPlansRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(KeywordPlanServiceServer).MutateKeywordPlans(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.ads.googleads.v1.services.KeywordPlanService/MutateKeywordPlans", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(KeywordPlanServiceServer).MutateKeywordPlans(ctx, req.(*MutateKeywordPlansRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _KeywordPlanService_GenerateForecastMetrics_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(GenerateForecastMetricsRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(KeywordPlanServiceServer).GenerateForecastMetrics(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.ads.googleads.v1.services.KeywordPlanService/GenerateForecastMetrics", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(KeywordPlanServiceServer).GenerateForecastMetrics(ctx, req.(*GenerateForecastMetricsRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _KeywordPlanService_GenerateHistoricalMetrics_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(GenerateHistoricalMetricsRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(KeywordPlanServiceServer).GenerateHistoricalMetrics(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.ads.googleads.v1.services.KeywordPlanService/GenerateHistoricalMetrics", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(KeywordPlanServiceServer).GenerateHistoricalMetrics(ctx, req.(*GenerateHistoricalMetricsRequest)) + } + return interceptor(ctx, in, info, handler) +} + +var _KeywordPlanService_serviceDesc = grpc.ServiceDesc{ + ServiceName: "google.ads.googleads.v1.services.KeywordPlanService", + HandlerType: (*KeywordPlanServiceServer)(nil), + Methods: []grpc.MethodDesc{ + { + MethodName: "GetKeywordPlan", + Handler: _KeywordPlanService_GetKeywordPlan_Handler, + }, + { + MethodName: "MutateKeywordPlans", + Handler: _KeywordPlanService_MutateKeywordPlans_Handler, + }, + { + MethodName: "GenerateForecastMetrics", + Handler: _KeywordPlanService_GenerateForecastMetrics_Handler, + }, + { + MethodName: "GenerateHistoricalMetrics", + Handler: _KeywordPlanService_GenerateHistoricalMetrics_Handler, + }, + }, + Streams: []grpc.StreamDesc{}, + Metadata: "google/ads/googleads/v1/services/keyword_plan_service.proto", +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/services/keyword_plan_service.proto", fileDescriptor_keyword_plan_service_b419730041d0dd6d) +} + +var fileDescriptor_keyword_plan_service_b419730041d0dd6d = []byte{ + // 1234 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xa4, 0x57, 0x41, 0x6f, 0xdc, 0x44, + 0x14, 0xc6, 0x1b, 0x48, 0xe9, 0xdb, 0xd0, 0x36, 0x53, 0xaa, 0x6e, 0xb7, 0xa5, 0x4d, 0xdd, 0x4a, + 0x54, 0x39, 0xd8, 0x6c, 0xa8, 0x02, 0x72, 0x52, 0xca, 0x26, 0x24, 0x9b, 0x02, 0xa1, 0x61, 0x8b, + 0x72, 0x40, 0xa1, 0xd6, 0xd4, 0x9e, 0x2c, 0x66, 0x6d, 0x8f, 0x3b, 0x63, 0x6f, 0x15, 0x55, 0xbd, + 0x20, 0xc4, 0x91, 0x0b, 0xff, 0x80, 0x23, 0x37, 0xc4, 0x8d, 0x9f, 0x80, 0xa8, 0x38, 0x70, 0xe3, + 0x88, 0x10, 0x07, 0x4e, 0x1c, 0x39, 0x21, 0x64, 0xcf, 0xcc, 0xd6, 0x6b, 0xaf, 0x93, 0xcd, 0xf6, + 0x36, 0x7e, 0x7e, 0xef, 0x7b, 0xef, 0x7b, 0xef, 0xcd, 0xf3, 0x33, 0xac, 0xf4, 0x28, 0xed, 0xf9, + 0xc4, 0xc4, 0x2e, 0x37, 0xc5, 0x31, 0x3d, 0x0d, 0x5a, 0x26, 0x27, 0x6c, 0xe0, 0x39, 0x84, 0x9b, + 0x7d, 0x72, 0xf0, 0x88, 0x32, 0xd7, 0x8e, 0x7c, 0x1c, 0xda, 0x52, 0x6a, 0x44, 0x8c, 0xc6, 0x14, + 0x2d, 0x08, 0x0b, 0x03, 0xbb, 0xdc, 0x18, 0x1a, 0x1b, 0x83, 0x96, 0xa1, 0x8c, 0x9b, 0x6f, 0x57, + 0xc1, 0x3b, 0x34, 0x08, 0x68, 0x38, 0x0a, 0x2e, 0x64, 0x02, 0xbb, 0x79, 0xb3, 0xca, 0x92, 0x11, + 0x4e, 0x13, 0x56, 0x8c, 0x4c, 0x5a, 0x5d, 0x52, 0x56, 0x91, 0x67, 0xe2, 0x30, 0xa4, 0x31, 0x8e, + 0x3d, 0x1a, 0x72, 0xf9, 0x56, 0xc6, 0x6b, 0x66, 0x4f, 0x0f, 0x92, 0x7d, 0x73, 0xdf, 0x23, 0xbe, + 0x6b, 0x07, 0x98, 0xf7, 0xa5, 0xc6, 0xe5, 0xa2, 0xc6, 0x23, 0x86, 0xa3, 0x88, 0x30, 0x85, 0x70, + 0x5e, 0xbe, 0x67, 0x91, 0x63, 0xf2, 0x18, 0xc7, 0x89, 0x7c, 0xa1, 0xaf, 0xc2, 0xb9, 0x0e, 0x89, + 0x3f, 0x10, 0x11, 0xed, 0xf8, 0x38, 0xec, 0x92, 0x87, 0x09, 0xe1, 0x31, 0xba, 0x06, 0xaf, 0xa8, + 0x88, 0xed, 0x10, 0x07, 0xa4, 0xa1, 0x2d, 0x68, 0x37, 0x4e, 0x76, 0xe7, 0x94, 0xf0, 0x23, 0x1c, + 0x10, 0xfd, 0x0f, 0x0d, 0x2e, 0x6c, 0x27, 0x31, 0x8e, 0x49, 0x0e, 0x81, 0x2b, 0x88, 0x2b, 0x50, + 0x77, 0x12, 0x1e, 0xd3, 0x80, 0x30, 0xdb, 0x73, 0x25, 0x00, 0x28, 0xd1, 0x1d, 0x17, 0xed, 0x02, + 0xd0, 0x88, 0x30, 0xc1, 0xb5, 0x51, 0x5b, 0x98, 0xb9, 0x51, 0x5f, 0x5a, 0x36, 0x8e, 0x2a, 0x8e, + 0x91, 0xf3, 0x75, 0x57, 0x99, 0x77, 0x73, 0x48, 0xe8, 0x75, 0x38, 0x1d, 0x61, 0x16, 0x7b, 0xd8, + 0xb7, 0xf7, 0xb1, 0xe7, 0x27, 0x8c, 0x34, 0x66, 0x16, 0xb4, 0x1b, 0x2f, 0x77, 0x4f, 0x49, 0xf1, + 0xa6, 0x90, 0xa6, 0x24, 0x07, 0xd8, 0xf7, 0x5c, 0x1c, 0x13, 0x9b, 0x86, 0xfe, 0x41, 0xe3, 0xc5, + 0x4c, 0x6d, 0x4e, 0x09, 0xef, 0x86, 0xfe, 0x81, 0xfe, 0x4d, 0x0d, 0x5e, 0x1d, 0xe7, 0x12, 0xad, + 0x40, 0x3d, 0x89, 0x32, 0xdb, 0xb4, 0x12, 0x99, 0x6d, 0x7d, 0xa9, 0xa9, 0xe2, 0x57, 0xa5, 0x30, + 0x36, 0xd3, 0x62, 0x6d, 0x63, 0xde, 0xef, 0x82, 0x50, 0x4f, 0xcf, 0x68, 0x0b, 0x66, 0x1d, 0x46, + 0x70, 0x2c, 0x12, 0x5b, 0x5f, 0x32, 0x2a, 0x79, 0x0f, 0x1b, 0x27, 0x4f, 0x7c, 0xeb, 0x85, 0xae, + 0xb4, 0x4f, 0x91, 0x04, 0x6e, 0xa3, 0x36, 0x2d, 0x92, 0xb0, 0x47, 0x0d, 0x98, 0x65, 0x24, 0xa0, + 0x03, 0x91, 0xae, 0x93, 0xe9, 0x1b, 0xf1, 0xbc, 0x56, 0x87, 0x93, 0xc3, 0xfc, 0xea, 0x3f, 0x69, + 0xd0, 0x1c, 0x57, 0x75, 0x1e, 0xd1, 0x90, 0x13, 0xb4, 0x09, 0xe7, 0x0a, 0xd9, 0xb7, 0x09, 0x63, + 0x94, 0x65, 0xa0, 0xf5, 0x25, 0xa4, 0xc2, 0x63, 0x91, 0x63, 0xdc, 0xcb, 0x7a, 0xb1, 0x7b, 0x76, + 0xb4, 0x2e, 0x1b, 0xa9, 0x3a, 0xfa, 0x04, 0x4e, 0x30, 0xc2, 0x13, 0x3f, 0x56, 0xad, 0x61, 0x1d, + 0xdd, 0x1a, 0x63, 0xc3, 0x4a, 0xfc, 0xb8, 0xab, 0xa0, 0xf4, 0xdb, 0xd0, 0xa8, 0x52, 0x9a, 0xac, + 0xe7, 0xd7, 0xe1, 0x72, 0x87, 0x84, 0x69, 0x2e, 0xc8, 0x26, 0x65, 0xc4, 0xc1, 0x3c, 0xde, 0x26, + 0x31, 0xf3, 0x9c, 0x61, 0xdf, 0x5f, 0x85, 0xb9, 0xfc, 0x15, 0x97, 0x28, 0xf5, 0xfe, 0x33, 0x87, + 0xfa, 0x5f, 0x35, 0xb8, 0x52, 0x89, 0x22, 0xf3, 0xe8, 0x03, 0x72, 0x70, 0x10, 0x61, 0xaf, 0x17, + 0xda, 0xfb, 0x52, 0x87, 0x37, 0xb4, 0x2c, 0x15, 0xb7, 0x8e, 0x75, 0x4b, 0xd6, 0x25, 0x8c, 0xf2, + 0xd4, 0x9d, 0x77, 0x0a, 0x12, 0x8e, 0xbe, 0x00, 0x84, 0x5d, 0xbb, 0xc7, 0x68, 0x12, 0xe5, 0xbc, + 0x89, 0xc4, 0xaf, 0x1e, 0xcb, 0x5b, 0xdb, 0xed, 0xa4, 0x28, 0x43, 0x67, 0x67, 0xf0, 0xa8, 0x80, + 0x23, 0x0f, 0xe6, 0x55, 0x82, 0x9e, 0xb9, 0x9a, 0x99, 0xc2, 0x95, 0x3c, 0x3e, 0x73, 0xd5, 0x1f, + 0x15, 0x70, 0xfd, 0x57, 0x0d, 0x2e, 0x1e, 0x92, 0x09, 0xb4, 0x03, 0xe7, 0x46, 0x67, 0xb9, 0x54, + 0x90, 0xb7, 0xf2, 0x52, 0xe9, 0x36, 0xdf, 0x8b, 0x99, 0x17, 0xf6, 0x76, 0xb1, 0x9f, 0x90, 0xee, + 0xd9, 0x7e, 0x19, 0x19, 0xdd, 0x87, 0xf9, 0x52, 0xd9, 0xe4, 0xcd, 0x6c, 0x1d, 0x4d, 0xae, 0xd8, + 0x0c, 0x67, 0x8a, 0x95, 0xd2, 0x7f, 0xd1, 0xa0, 0x59, 0x9d, 0x6d, 0x74, 0xb7, 0x40, 0x48, 0x15, + 0x75, 0x22, 0x42, 0xa8, 0x5f, 0x02, 0x46, 0x9f, 0xc1, 0x7c, 0xa9, 0x31, 0xa6, 0xe7, 0x73, 0xba, + 0xd0, 0x0c, 0xfa, 0xef, 0xa3, 0x74, 0x0a, 0x15, 0x45, 0xf7, 0xe1, 0xb5, 0xb1, 0x74, 0x6c, 0x29, + 0x9d, 0x88, 0xd6, 0x85, 0x32, 0x2d, 0xe9, 0x07, 0xed, 0xc1, 0x99, 0x62, 0x2b, 0x3e, 0x07, 0xb9, + 0x42, 0xfb, 0xe9, 0x3f, 0xd6, 0xe0, 0x74, 0x41, 0x09, 0xbd, 0x03, 0x75, 0x2f, 0x88, 0x18, 0xe1, + 0x3c, 0xfb, 0xea, 0x55, 0xc5, 0xff, 0x1e, 0x4d, 0x1e, 0xf8, 0x44, 0xc4, 0x9f, 0x37, 0x40, 0x06, + 0xcc, 0x38, 0x31, 0x93, 0x41, 0x1e, 0x6e, 0x97, 0x2a, 0xa2, 0x55, 0xa8, 0xe3, 0x01, 0x61, 0xb8, + 0x47, 0x6c, 0x27, 0x72, 0xe4, 0x10, 0xbe, 0x58, 0xb2, 0xbb, 0x13, 0xc6, 0xcb, 0x37, 0x85, 0x19, + 0x48, 0xfd, 0xf5, 0xc8, 0x41, 0x37, 0x61, 0xd6, 0xf1, 0x3d, 0xa7, 0xcf, 0x1b, 0x2f, 0x4d, 0xe0, + 0x50, 0xea, 0xa6, 0x3e, 0x1d, 0xca, 0x63, 0x3b, 0xf0, 0x1c, 0x46, 0x79, 0x63, 0x76, 0x02, 0x9f, + 0xa9, 0xfe, 0x76, 0xa6, 0xae, 0x6f, 0xc0, 0x82, 0x9a, 0x8d, 0x5b, 0x1e, 0x8f, 0x29, 0xf3, 0x1c, + 0xec, 0x1f, 0x7f, 0xc6, 0x7e, 0xa5, 0xc1, 0xd5, 0x43, 0x70, 0xe4, 0x94, 0xb5, 0xe1, 0x44, 0x20, + 0x44, 0x72, 0xb4, 0x6e, 0x4c, 0x33, 0x81, 0xca, 0xf8, 0x0a, 0x55, 0x7f, 0xaa, 0xc1, 0xb5, 0x09, + 0x0c, 0xd0, 0x6d, 0x98, 0xe3, 0x04, 0x33, 0xe7, 0x73, 0xfb, 0x61, 0x42, 0xd8, 0xc1, 0x44, 0x8d, + 0x5d, 0x17, 0x16, 0x1f, 0xa7, 0x06, 0x88, 0x80, 0xea, 0x3f, 0x5b, 0x31, 0x12, 0x4d, 0x52, 0x3d, + 0x53, 0xe5, 0xe6, 0x9a, 0xdf, 0x06, 0x4a, 0x44, 0x4e, 0x49, 0x50, 0xf9, 0xbc, 0xf4, 0xf5, 0x09, + 0x40, 0x39, 0x83, 0x7b, 0x22, 0x29, 0xe8, 0x07, 0x0d, 0x4e, 0x8d, 0x6e, 0x92, 0xe8, 0xad, 0xa3, + 0x33, 0x39, 0x76, 0xf7, 0x6c, 0x1e, 0x73, 0x83, 0xd1, 0x97, 0xbf, 0xfc, 0xed, 0xcf, 0x6f, 0x6b, + 0x6f, 0x20, 0x23, 0xdd, 0xb3, 0x1f, 0x8f, 0x7c, 0xc2, 0x6f, 0xa9, 0x75, 0x93, 0x9b, 0x8b, 0x66, + 0xae, 0x3b, 0xb8, 0xb9, 0xf8, 0x04, 0x3d, 0xd5, 0x00, 0x95, 0x97, 0x01, 0xb4, 0x32, 0xdd, 0x9e, + 0x21, 0x62, 0x5f, 0x9d, 0x72, 0x49, 0xc9, 0xba, 0x51, 0x5f, 0xcd, 0x98, 0x2c, 0xeb, 0xad, 0xec, + 0x5f, 0x63, 0x18, 0xfa, 0xe3, 0xdc, 0x1e, 0x7d, 0x6b, 0xf1, 0xc9, 0x08, 0x11, 0x2b, 0xc8, 0xe0, + 0x2c, 0x6d, 0x11, 0xfd, 0xa3, 0xc1, 0xf9, 0x8a, 0xad, 0x02, 0xbd, 0x3b, 0x49, 0x31, 0x0e, 0x5b, + 0x6b, 0x9a, 0xed, 0xe7, 0x40, 0x90, 0xf4, 0x76, 0x32, 0x7a, 0xef, 0xeb, 0x1b, 0x59, 0xa1, 0xf2, + 0x17, 0xf8, 0xb0, 0x3a, 0x59, 0xbd, 0xf1, 0xb0, 0x29, 0xe5, 0x7f, 0x35, 0xb8, 0x50, 0x79, 0xc9, + 0xd1, 0xda, 0xe4, 0x21, 0x57, 0x4d, 0x9a, 0xe6, 0xfa, 0x73, 0x61, 0x48, 0xe2, 0xdd, 0x8c, 0xf8, + 0x87, 0x7a, 0x67, 0x3a, 0xe2, 0x25, 0x60, 0x4b, 0x5b, 0x5c, 0xfb, 0x4f, 0x83, 0xeb, 0x0e, 0x0d, + 0x8e, 0x0c, 0x6f, 0xed, 0x7c, 0xf9, 0xba, 0xee, 0xa4, 0xd3, 0x64, 0x47, 0xfb, 0x74, 0x4b, 0x1a, + 0xf7, 0xa8, 0x8f, 0xc3, 0x9e, 0x41, 0x59, 0xcf, 0xec, 0x91, 0x30, 0x9b, 0x35, 0xea, 0xef, 0x35, + 0xf2, 0x78, 0xf5, 0x5f, 0xf6, 0x8a, 0x3a, 0x7c, 0x57, 0x9b, 0xe9, 0xb4, 0xdb, 0xdf, 0xd7, 0x16, + 0x3a, 0x02, 0xb0, 0xed, 0x72, 0x43, 0x1c, 0xd3, 0xd3, 0x6e, 0xcb, 0x90, 0x8e, 0xf9, 0xcf, 0x4a, + 0x65, 0xaf, 0xed, 0xf2, 0xbd, 0xa1, 0xca, 0xde, 0x6e, 0x6b, 0x4f, 0xa9, 0xfc, 0x5d, 0xbb, 0x2e, + 0xe4, 0x96, 0xd5, 0x76, 0xb9, 0x65, 0x0d, 0x95, 0x2c, 0x6b, 0xb7, 0x65, 0x59, 0x4a, 0xed, 0xc1, + 0x6c, 0x16, 0xe7, 0x9b, 0xff, 0x07, 0x00, 0x00, 0xff, 0xff, 0xff, 0xb9, 0x04, 0x88, 0x0c, 0x10, + 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/services/keyword_view_service.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/services/keyword_view_service.pb.go new file mode 100644 index 0000000..0b36709 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/services/keyword_view_service.pb.go @@ -0,0 +1,175 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/services/keyword_view_service.proto + +package services // import "google.golang.org/genproto/googleapis/ads/googleads/v1/services" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import resources "google.golang.org/genproto/googleapis/ads/googleads/v1/resources" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +import ( + context "golang.org/x/net/context" + grpc "google.golang.org/grpc" +) + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Request message for [KeywordViewService.GetKeywordView][google.ads.googleads.v1.services.KeywordViewService.GetKeywordView]. +type GetKeywordViewRequest struct { + // The resource name of the keyword view to fetch. + ResourceName string `protobuf:"bytes,1,opt,name=resource_name,json=resourceName,proto3" json:"resource_name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GetKeywordViewRequest) Reset() { *m = GetKeywordViewRequest{} } +func (m *GetKeywordViewRequest) String() string { return proto.CompactTextString(m) } +func (*GetKeywordViewRequest) ProtoMessage() {} +func (*GetKeywordViewRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_keyword_view_service_e831bad7780f5509, []int{0} +} +func (m *GetKeywordViewRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GetKeywordViewRequest.Unmarshal(m, b) +} +func (m *GetKeywordViewRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GetKeywordViewRequest.Marshal(b, m, deterministic) +} +func (dst *GetKeywordViewRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_GetKeywordViewRequest.Merge(dst, src) +} +func (m *GetKeywordViewRequest) XXX_Size() int { + return xxx_messageInfo_GetKeywordViewRequest.Size(m) +} +func (m *GetKeywordViewRequest) XXX_DiscardUnknown() { + xxx_messageInfo_GetKeywordViewRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_GetKeywordViewRequest proto.InternalMessageInfo + +func (m *GetKeywordViewRequest) GetResourceName() string { + if m != nil { + return m.ResourceName + } + return "" +} + +func init() { + proto.RegisterType((*GetKeywordViewRequest)(nil), "google.ads.googleads.v1.services.GetKeywordViewRequest") +} + +// Reference imports to suppress errors if they are not otherwise used. +var _ context.Context +var _ grpc.ClientConn + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the grpc package it is being compiled against. +const _ = grpc.SupportPackageIsVersion4 + +// KeywordViewServiceClient is the client API for KeywordViewService service. +// +// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://godoc.org/google.golang.org/grpc#ClientConn.NewStream. +type KeywordViewServiceClient interface { + // Returns the requested keyword view in full detail. + GetKeywordView(ctx context.Context, in *GetKeywordViewRequest, opts ...grpc.CallOption) (*resources.KeywordView, error) +} + +type keywordViewServiceClient struct { + cc *grpc.ClientConn +} + +func NewKeywordViewServiceClient(cc *grpc.ClientConn) KeywordViewServiceClient { + return &keywordViewServiceClient{cc} +} + +func (c *keywordViewServiceClient) GetKeywordView(ctx context.Context, in *GetKeywordViewRequest, opts ...grpc.CallOption) (*resources.KeywordView, error) { + out := new(resources.KeywordView) + err := c.cc.Invoke(ctx, "/google.ads.googleads.v1.services.KeywordViewService/GetKeywordView", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +// KeywordViewServiceServer is the server API for KeywordViewService service. +type KeywordViewServiceServer interface { + // Returns the requested keyword view in full detail. + GetKeywordView(context.Context, *GetKeywordViewRequest) (*resources.KeywordView, error) +} + +func RegisterKeywordViewServiceServer(s *grpc.Server, srv KeywordViewServiceServer) { + s.RegisterService(&_KeywordViewService_serviceDesc, srv) +} + +func _KeywordViewService_GetKeywordView_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(GetKeywordViewRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(KeywordViewServiceServer).GetKeywordView(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.ads.googleads.v1.services.KeywordViewService/GetKeywordView", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(KeywordViewServiceServer).GetKeywordView(ctx, req.(*GetKeywordViewRequest)) + } + return interceptor(ctx, in, info, handler) +} + +var _KeywordViewService_serviceDesc = grpc.ServiceDesc{ + ServiceName: "google.ads.googleads.v1.services.KeywordViewService", + HandlerType: (*KeywordViewServiceServer)(nil), + Methods: []grpc.MethodDesc{ + { + MethodName: "GetKeywordView", + Handler: _KeywordViewService_GetKeywordView_Handler, + }, + }, + Streams: []grpc.StreamDesc{}, + Metadata: "google/ads/googleads/v1/services/keyword_view_service.proto", +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/services/keyword_view_service.proto", fileDescriptor_keyword_view_service_e831bad7780f5509) +} + +var fileDescriptor_keyword_view_service_e831bad7780f5509 = []byte{ + // 363 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x84, 0x92, 0xb1, 0x4a, 0xc3, 0x50, + 0x14, 0x86, 0x49, 0x04, 0xc1, 0xa0, 0x0e, 0x01, 0x51, 0x8a, 0x43, 0xa9, 0x1d, 0xa4, 0xc3, 0xbd, + 0x46, 0x45, 0xe1, 0x56, 0x87, 0x74, 0xa9, 0x20, 0x48, 0xa9, 0x90, 0x41, 0x02, 0xe5, 0xda, 0x1c, + 0x42, 0xb0, 0xc9, 0xad, 0xf7, 0xa4, 0x29, 0x22, 0x2e, 0xbe, 0x82, 0x6f, 0xe0, 0xe8, 0xe6, 0x5b, + 0x88, 0xab, 0xaf, 0xe0, 0xe4, 0x43, 0x88, 0xa4, 0xb7, 0x37, 0xb4, 0x6a, 0xe8, 0xf6, 0x73, 0xf2, + 0x7f, 0x7f, 0xce, 0xf9, 0x13, 0xab, 0x19, 0x0a, 0x11, 0x0e, 0x80, 0xf2, 0x00, 0xa9, 0x92, 0xb9, + 0xca, 0x1c, 0x8a, 0x20, 0xb3, 0xa8, 0x0f, 0x48, 0x6f, 0xe0, 0x6e, 0x2c, 0x64, 0xd0, 0xcb, 0x22, + 0x18, 0xf7, 0xa6, 0x53, 0x32, 0x94, 0x22, 0x15, 0x76, 0x55, 0x11, 0x84, 0x07, 0x48, 0x0a, 0x98, + 0x64, 0x0e, 0xd1, 0x70, 0xe5, 0xb0, 0x2c, 0x5e, 0x02, 0x8a, 0x91, 0xfc, 0x9d, 0xaf, 0x72, 0x2b, + 0xdb, 0x9a, 0x1a, 0x46, 0x94, 0x27, 0x89, 0x48, 0x79, 0x1a, 0x89, 0x04, 0xd5, 0xd3, 0xda, 0x89, + 0xb5, 0xd1, 0x86, 0xf4, 0x5c, 0x61, 0x5e, 0x04, 0xe3, 0x2e, 0xdc, 0x8e, 0x00, 0x53, 0x7b, 0xc7, + 0x5a, 0xd3, 0xb1, 0xbd, 0x84, 0xc7, 0xb0, 0x65, 0x54, 0x8d, 0xdd, 0x95, 0xee, 0xaa, 0x1e, 0x5e, + 0xf0, 0x18, 0xf6, 0xdf, 0x0c, 0xcb, 0x9e, 0x61, 0x2f, 0xd5, 0xa6, 0xf6, 0xab, 0x61, 0xad, 0xcf, + 0xa7, 0xda, 0xc7, 0x64, 0xd1, 0x79, 0xe4, 0xdf, 0x3d, 0x2a, 0xa4, 0x14, 0x2c, 0xae, 0x26, 0x33, + 0x58, 0xed, 0xe8, 0xf1, 0xe3, 0xf3, 0xc9, 0xdc, 0xb3, 0x49, 0x5e, 0xcc, 0xfd, 0xdc, 0x09, 0xa7, + 0xfd, 0x11, 0xa6, 0x22, 0x06, 0x89, 0xb4, 0xa1, 0x9b, 0xca, 0x19, 0xa4, 0x8d, 0x87, 0xd6, 0xb7, + 0x61, 0xd5, 0xfb, 0x22, 0x5e, 0xb8, 0x66, 0x6b, 0xf3, 0xef, 0xc1, 0x9d, 0xbc, 0xca, 0x8e, 0x71, + 0x75, 0x36, 0x85, 0x43, 0x31, 0xe0, 0x49, 0x48, 0x84, 0x0c, 0x69, 0x08, 0xc9, 0xa4, 0x68, 0xfd, + 0xc1, 0x86, 0x11, 0x96, 0xff, 0x1e, 0x4d, 0x2d, 0x9e, 0xcd, 0xa5, 0xb6, 0xeb, 0xbe, 0x98, 0xd5, + 0xb6, 0x0a, 0x74, 0x03, 0x24, 0x4a, 0xe6, 0xca, 0x73, 0xc8, 0xf4, 0xc5, 0xf8, 0xae, 0x2d, 0xbe, + 0x1b, 0xa0, 0x5f, 0x58, 0x7c, 0xcf, 0xf1, 0xb5, 0xe5, 0xcb, 0xac, 0xab, 0x39, 0x63, 0x6e, 0x80, + 0x8c, 0x15, 0x26, 0xc6, 0x3c, 0x87, 0x31, 0x6d, 0xbb, 0x5e, 0x9e, 0xec, 0x79, 0xf0, 0x13, 0x00, + 0x00, 0xff, 0xff, 0x0d, 0x72, 0x33, 0x52, 0xc5, 0x02, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/services/label_service.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/services/label_service.pb.go new file mode 100644 index 0000000..89bd159 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/services/label_service.pb.go @@ -0,0 +1,587 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/services/label_service.proto + +package services // import "google.golang.org/genproto/googleapis/ads/googleads/v1/services" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "github.com/golang/protobuf/ptypes/wrappers" +import resources "google.golang.org/genproto/googleapis/ads/googleads/v1/resources" +import _ "google.golang.org/genproto/googleapis/api/annotations" +import status "google.golang.org/genproto/googleapis/rpc/status" +import field_mask "google.golang.org/genproto/protobuf/field_mask" + +import ( + context "golang.org/x/net/context" + grpc "google.golang.org/grpc" +) + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Request message for [LabelService.GetLabel][google.ads.googleads.v1.services.LabelService.GetLabel]. +type GetLabelRequest struct { + // The resource name of the label to fetch. + ResourceName string `protobuf:"bytes,1,opt,name=resource_name,json=resourceName,proto3" json:"resource_name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GetLabelRequest) Reset() { *m = GetLabelRequest{} } +func (m *GetLabelRequest) String() string { return proto.CompactTextString(m) } +func (*GetLabelRequest) ProtoMessage() {} +func (*GetLabelRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_label_service_11259d6fcb5427fb, []int{0} +} +func (m *GetLabelRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GetLabelRequest.Unmarshal(m, b) +} +func (m *GetLabelRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GetLabelRequest.Marshal(b, m, deterministic) +} +func (dst *GetLabelRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_GetLabelRequest.Merge(dst, src) +} +func (m *GetLabelRequest) XXX_Size() int { + return xxx_messageInfo_GetLabelRequest.Size(m) +} +func (m *GetLabelRequest) XXX_DiscardUnknown() { + xxx_messageInfo_GetLabelRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_GetLabelRequest proto.InternalMessageInfo + +func (m *GetLabelRequest) GetResourceName() string { + if m != nil { + return m.ResourceName + } + return "" +} + +// Request message for [LabelService.MutateLabels][google.ads.googleads.v1.services.LabelService.MutateLabels]. +type MutateLabelsRequest struct { + // ID of the customer whose labels are being modified. + CustomerId string `protobuf:"bytes,1,opt,name=customer_id,json=customerId,proto3" json:"customer_id,omitempty"` + // The list of operations to perform on labels. + Operations []*LabelOperation `protobuf:"bytes,2,rep,name=operations,proto3" json:"operations,omitempty"` + // If true, successful operations will be carried out and invalid + // operations will return errors. If false, all operations will be carried + // out in one transaction if and only if they are all valid. + // Default is false. + PartialFailure bool `protobuf:"varint,3,opt,name=partial_failure,json=partialFailure,proto3" json:"partial_failure,omitempty"` + // If true, the request is validated but not executed. Only errors are + // returned, not results. + ValidateOnly bool `protobuf:"varint,4,opt,name=validate_only,json=validateOnly,proto3" json:"validate_only,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *MutateLabelsRequest) Reset() { *m = MutateLabelsRequest{} } +func (m *MutateLabelsRequest) String() string { return proto.CompactTextString(m) } +func (*MutateLabelsRequest) ProtoMessage() {} +func (*MutateLabelsRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_label_service_11259d6fcb5427fb, []int{1} +} +func (m *MutateLabelsRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_MutateLabelsRequest.Unmarshal(m, b) +} +func (m *MutateLabelsRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_MutateLabelsRequest.Marshal(b, m, deterministic) +} +func (dst *MutateLabelsRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_MutateLabelsRequest.Merge(dst, src) +} +func (m *MutateLabelsRequest) XXX_Size() int { + return xxx_messageInfo_MutateLabelsRequest.Size(m) +} +func (m *MutateLabelsRequest) XXX_DiscardUnknown() { + xxx_messageInfo_MutateLabelsRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_MutateLabelsRequest proto.InternalMessageInfo + +func (m *MutateLabelsRequest) GetCustomerId() string { + if m != nil { + return m.CustomerId + } + return "" +} + +func (m *MutateLabelsRequest) GetOperations() []*LabelOperation { + if m != nil { + return m.Operations + } + return nil +} + +func (m *MutateLabelsRequest) GetPartialFailure() bool { + if m != nil { + return m.PartialFailure + } + return false +} + +func (m *MutateLabelsRequest) GetValidateOnly() bool { + if m != nil { + return m.ValidateOnly + } + return false +} + +// A single operation (create, remove, update) on a label. +type LabelOperation struct { + // FieldMask that determines which resource fields are modified in an update. + UpdateMask *field_mask.FieldMask `protobuf:"bytes,4,opt,name=update_mask,json=updateMask,proto3" json:"update_mask,omitempty"` + // The mutate operation. + // + // Types that are valid to be assigned to Operation: + // *LabelOperation_Create + // *LabelOperation_Update + // *LabelOperation_Remove + Operation isLabelOperation_Operation `protobuf_oneof:"operation"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *LabelOperation) Reset() { *m = LabelOperation{} } +func (m *LabelOperation) String() string { return proto.CompactTextString(m) } +func (*LabelOperation) ProtoMessage() {} +func (*LabelOperation) Descriptor() ([]byte, []int) { + return fileDescriptor_label_service_11259d6fcb5427fb, []int{2} +} +func (m *LabelOperation) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_LabelOperation.Unmarshal(m, b) +} +func (m *LabelOperation) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_LabelOperation.Marshal(b, m, deterministic) +} +func (dst *LabelOperation) XXX_Merge(src proto.Message) { + xxx_messageInfo_LabelOperation.Merge(dst, src) +} +func (m *LabelOperation) XXX_Size() int { + return xxx_messageInfo_LabelOperation.Size(m) +} +func (m *LabelOperation) XXX_DiscardUnknown() { + xxx_messageInfo_LabelOperation.DiscardUnknown(m) +} + +var xxx_messageInfo_LabelOperation proto.InternalMessageInfo + +func (m *LabelOperation) GetUpdateMask() *field_mask.FieldMask { + if m != nil { + return m.UpdateMask + } + return nil +} + +type isLabelOperation_Operation interface { + isLabelOperation_Operation() +} + +type LabelOperation_Create struct { + Create *resources.Label `protobuf:"bytes,1,opt,name=create,proto3,oneof"` +} + +type LabelOperation_Update struct { + Update *resources.Label `protobuf:"bytes,2,opt,name=update,proto3,oneof"` +} + +type LabelOperation_Remove struct { + Remove string `protobuf:"bytes,3,opt,name=remove,proto3,oneof"` +} + +func (*LabelOperation_Create) isLabelOperation_Operation() {} + +func (*LabelOperation_Update) isLabelOperation_Operation() {} + +func (*LabelOperation_Remove) isLabelOperation_Operation() {} + +func (m *LabelOperation) GetOperation() isLabelOperation_Operation { + if m != nil { + return m.Operation + } + return nil +} + +func (m *LabelOperation) GetCreate() *resources.Label { + if x, ok := m.GetOperation().(*LabelOperation_Create); ok { + return x.Create + } + return nil +} + +func (m *LabelOperation) GetUpdate() *resources.Label { + if x, ok := m.GetOperation().(*LabelOperation_Update); ok { + return x.Update + } + return nil +} + +func (m *LabelOperation) GetRemove() string { + if x, ok := m.GetOperation().(*LabelOperation_Remove); ok { + return x.Remove + } + return "" +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*LabelOperation) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _LabelOperation_OneofMarshaler, _LabelOperation_OneofUnmarshaler, _LabelOperation_OneofSizer, []interface{}{ + (*LabelOperation_Create)(nil), + (*LabelOperation_Update)(nil), + (*LabelOperation_Remove)(nil), + } +} + +func _LabelOperation_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*LabelOperation) + // operation + switch x := m.Operation.(type) { + case *LabelOperation_Create: + b.EncodeVarint(1<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.Create); err != nil { + return err + } + case *LabelOperation_Update: + b.EncodeVarint(2<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.Update); err != nil { + return err + } + case *LabelOperation_Remove: + b.EncodeVarint(3<<3 | proto.WireBytes) + b.EncodeStringBytes(x.Remove) + case nil: + default: + return fmt.Errorf("LabelOperation.Operation has unexpected type %T", x) + } + return nil +} + +func _LabelOperation_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*LabelOperation) + switch tag { + case 1: // operation.create + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(resources.Label) + err := b.DecodeMessage(msg) + m.Operation = &LabelOperation_Create{msg} + return true, err + case 2: // operation.update + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(resources.Label) + err := b.DecodeMessage(msg) + m.Operation = &LabelOperation_Update{msg} + return true, err + case 3: // operation.remove + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeStringBytes() + m.Operation = &LabelOperation_Remove{x} + return true, err + default: + return false, nil + } +} + +func _LabelOperation_OneofSizer(msg proto.Message) (n int) { + m := msg.(*LabelOperation) + // operation + switch x := m.Operation.(type) { + case *LabelOperation_Create: + s := proto.Size(x.Create) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *LabelOperation_Update: + s := proto.Size(x.Update) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *LabelOperation_Remove: + n += 1 // tag and wire + n += proto.SizeVarint(uint64(len(x.Remove))) + n += len(x.Remove) + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +// Response message for a labels mutate. +type MutateLabelsResponse struct { + // Errors that pertain to operation failures in the partial failure mode. + // Returned only when partial_failure = true and all errors occur inside the + // operations. If any errors occur outside the operations (e.g. auth errors), + // we return an RPC level error. + PartialFailureError *status.Status `protobuf:"bytes,3,opt,name=partial_failure_error,json=partialFailureError,proto3" json:"partial_failure_error,omitempty"` + // All results for the mutate. + Results []*MutateLabelResult `protobuf:"bytes,2,rep,name=results,proto3" json:"results,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *MutateLabelsResponse) Reset() { *m = MutateLabelsResponse{} } +func (m *MutateLabelsResponse) String() string { return proto.CompactTextString(m) } +func (*MutateLabelsResponse) ProtoMessage() {} +func (*MutateLabelsResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_label_service_11259d6fcb5427fb, []int{3} +} +func (m *MutateLabelsResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_MutateLabelsResponse.Unmarshal(m, b) +} +func (m *MutateLabelsResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_MutateLabelsResponse.Marshal(b, m, deterministic) +} +func (dst *MutateLabelsResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_MutateLabelsResponse.Merge(dst, src) +} +func (m *MutateLabelsResponse) XXX_Size() int { + return xxx_messageInfo_MutateLabelsResponse.Size(m) +} +func (m *MutateLabelsResponse) XXX_DiscardUnknown() { + xxx_messageInfo_MutateLabelsResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_MutateLabelsResponse proto.InternalMessageInfo + +func (m *MutateLabelsResponse) GetPartialFailureError() *status.Status { + if m != nil { + return m.PartialFailureError + } + return nil +} + +func (m *MutateLabelsResponse) GetResults() []*MutateLabelResult { + if m != nil { + return m.Results + } + return nil +} + +// The result for a label mutate. +type MutateLabelResult struct { + // Returned for successful operations. + ResourceName string `protobuf:"bytes,1,opt,name=resource_name,json=resourceName,proto3" json:"resource_name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *MutateLabelResult) Reset() { *m = MutateLabelResult{} } +func (m *MutateLabelResult) String() string { return proto.CompactTextString(m) } +func (*MutateLabelResult) ProtoMessage() {} +func (*MutateLabelResult) Descriptor() ([]byte, []int) { + return fileDescriptor_label_service_11259d6fcb5427fb, []int{4} +} +func (m *MutateLabelResult) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_MutateLabelResult.Unmarshal(m, b) +} +func (m *MutateLabelResult) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_MutateLabelResult.Marshal(b, m, deterministic) +} +func (dst *MutateLabelResult) XXX_Merge(src proto.Message) { + xxx_messageInfo_MutateLabelResult.Merge(dst, src) +} +func (m *MutateLabelResult) XXX_Size() int { + return xxx_messageInfo_MutateLabelResult.Size(m) +} +func (m *MutateLabelResult) XXX_DiscardUnknown() { + xxx_messageInfo_MutateLabelResult.DiscardUnknown(m) +} + +var xxx_messageInfo_MutateLabelResult proto.InternalMessageInfo + +func (m *MutateLabelResult) GetResourceName() string { + if m != nil { + return m.ResourceName + } + return "" +} + +func init() { + proto.RegisterType((*GetLabelRequest)(nil), "google.ads.googleads.v1.services.GetLabelRequest") + proto.RegisterType((*MutateLabelsRequest)(nil), "google.ads.googleads.v1.services.MutateLabelsRequest") + proto.RegisterType((*LabelOperation)(nil), "google.ads.googleads.v1.services.LabelOperation") + proto.RegisterType((*MutateLabelsResponse)(nil), "google.ads.googleads.v1.services.MutateLabelsResponse") + proto.RegisterType((*MutateLabelResult)(nil), "google.ads.googleads.v1.services.MutateLabelResult") +} + +// Reference imports to suppress errors if they are not otherwise used. +var _ context.Context +var _ grpc.ClientConn + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the grpc package it is being compiled against. +const _ = grpc.SupportPackageIsVersion4 + +// LabelServiceClient is the client API for LabelService service. +// +// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://godoc.org/google.golang.org/grpc#ClientConn.NewStream. +type LabelServiceClient interface { + // Returns the requested label in full detail. + GetLabel(ctx context.Context, in *GetLabelRequest, opts ...grpc.CallOption) (*resources.Label, error) + // Creates, updates, or removes labels. Operation statuses are returned. + MutateLabels(ctx context.Context, in *MutateLabelsRequest, opts ...grpc.CallOption) (*MutateLabelsResponse, error) +} + +type labelServiceClient struct { + cc *grpc.ClientConn +} + +func NewLabelServiceClient(cc *grpc.ClientConn) LabelServiceClient { + return &labelServiceClient{cc} +} + +func (c *labelServiceClient) GetLabel(ctx context.Context, in *GetLabelRequest, opts ...grpc.CallOption) (*resources.Label, error) { + out := new(resources.Label) + err := c.cc.Invoke(ctx, "/google.ads.googleads.v1.services.LabelService/GetLabel", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *labelServiceClient) MutateLabels(ctx context.Context, in *MutateLabelsRequest, opts ...grpc.CallOption) (*MutateLabelsResponse, error) { + out := new(MutateLabelsResponse) + err := c.cc.Invoke(ctx, "/google.ads.googleads.v1.services.LabelService/MutateLabels", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +// LabelServiceServer is the server API for LabelService service. +type LabelServiceServer interface { + // Returns the requested label in full detail. + GetLabel(context.Context, *GetLabelRequest) (*resources.Label, error) + // Creates, updates, or removes labels. Operation statuses are returned. + MutateLabels(context.Context, *MutateLabelsRequest) (*MutateLabelsResponse, error) +} + +func RegisterLabelServiceServer(s *grpc.Server, srv LabelServiceServer) { + s.RegisterService(&_LabelService_serviceDesc, srv) +} + +func _LabelService_GetLabel_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(GetLabelRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(LabelServiceServer).GetLabel(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.ads.googleads.v1.services.LabelService/GetLabel", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(LabelServiceServer).GetLabel(ctx, req.(*GetLabelRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _LabelService_MutateLabels_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(MutateLabelsRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(LabelServiceServer).MutateLabels(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.ads.googleads.v1.services.LabelService/MutateLabels", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(LabelServiceServer).MutateLabels(ctx, req.(*MutateLabelsRequest)) + } + return interceptor(ctx, in, info, handler) +} + +var _LabelService_serviceDesc = grpc.ServiceDesc{ + ServiceName: "google.ads.googleads.v1.services.LabelService", + HandlerType: (*LabelServiceServer)(nil), + Methods: []grpc.MethodDesc{ + { + MethodName: "GetLabel", + Handler: _LabelService_GetLabel_Handler, + }, + { + MethodName: "MutateLabels", + Handler: _LabelService_MutateLabels_Handler, + }, + }, + Streams: []grpc.StreamDesc{}, + Metadata: "google/ads/googleads/v1/services/label_service.proto", +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/services/label_service.proto", fileDescriptor_label_service_11259d6fcb5427fb) +} + +var fileDescriptor_label_service_11259d6fcb5427fb = []byte{ + // 699 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x94, 0x54, 0x41, 0x6b, 0xd4, 0x40, + 0x14, 0x36, 0xa9, 0xd4, 0x76, 0xb2, 0xb6, 0x74, 0xaa, 0xb8, 0x2c, 0xa2, 0x4b, 0x2c, 0xb8, 0xac, + 0x98, 0x74, 0xb7, 0x5a, 0x24, 0xa5, 0x87, 0x5d, 0xb0, 0xad, 0x60, 0x6d, 0x49, 0xa1, 0x07, 0x59, + 0x08, 0xd3, 0xcd, 0x74, 0x09, 0x4d, 0x32, 0x71, 0x66, 0xb2, 0x52, 0x4a, 0x2f, 0xfe, 0x05, 0x4f, + 0x5e, 0x3d, 0x0a, 0x1e, 0xfd, 0x13, 0x5e, 0x05, 0x7f, 0x80, 0x78, 0xf2, 0x17, 0x78, 0xf0, 0x20, + 0x33, 0x93, 0x89, 0xbb, 0x95, 0xb2, 0xdd, 0xdb, 0xcb, 0x9b, 0xef, 0xfb, 0xde, 0x37, 0xef, 0xe5, + 0x0d, 0x78, 0x32, 0x20, 0x64, 0x10, 0x63, 0x17, 0x85, 0xcc, 0x55, 0xa1, 0x88, 0x86, 0x2d, 0x97, + 0x61, 0x3a, 0x8c, 0xfa, 0x98, 0xb9, 0x31, 0x3a, 0xc2, 0x71, 0x50, 0x7c, 0x3a, 0x19, 0x25, 0x9c, + 0xc0, 0xba, 0x82, 0x3a, 0x28, 0x64, 0x4e, 0xc9, 0x72, 0x86, 0x2d, 0x47, 0xb3, 0x6a, 0x8f, 0x2f, + 0xd3, 0xa5, 0x98, 0x91, 0x9c, 0x96, 0xc2, 0x4a, 0xb0, 0x76, 0x57, 0xc3, 0xb3, 0xc8, 0x45, 0x69, + 0x4a, 0x38, 0xe2, 0x11, 0x49, 0x59, 0x71, 0x5a, 0x94, 0x73, 0xe5, 0xd7, 0x51, 0x7e, 0xec, 0x1e, + 0x47, 0x38, 0x0e, 0x83, 0x04, 0xb1, 0x93, 0x02, 0x71, 0xef, 0x22, 0xe2, 0x2d, 0x45, 0x59, 0x86, + 0xa9, 0x56, 0xb8, 0x53, 0x9c, 0xd3, 0xac, 0xef, 0x32, 0x8e, 0x78, 0x5e, 0x1c, 0xd8, 0xeb, 0x60, + 0x71, 0x1b, 0xf3, 0x97, 0xc2, 0x8a, 0x8f, 0xdf, 0xe4, 0x98, 0x71, 0xf8, 0x00, 0xdc, 0xd4, 0x26, + 0x83, 0x14, 0x25, 0xb8, 0x6a, 0xd4, 0x8d, 0xc6, 0xbc, 0x5f, 0xd1, 0xc9, 0x57, 0x28, 0xc1, 0xf6, + 0x77, 0x03, 0x2c, 0xef, 0xe6, 0x1c, 0x71, 0x2c, 0xb9, 0x4c, 0x93, 0xef, 0x03, 0xab, 0x9f, 0x33, + 0x4e, 0x12, 0x4c, 0x83, 0x28, 0x2c, 0xa8, 0x40, 0xa7, 0x5e, 0x84, 0x70, 0x1f, 0x00, 0x92, 0x61, + 0xaa, 0xee, 0x57, 0x35, 0xeb, 0x33, 0x0d, 0xab, 0xbd, 0xea, 0x4c, 0xea, 0xa7, 0x23, 0xab, 0xec, + 0x69, 0xa2, 0x3f, 0xa2, 0x01, 0x1f, 0x82, 0xc5, 0x0c, 0x51, 0x1e, 0xa1, 0x38, 0x38, 0x46, 0x51, + 0x9c, 0x53, 0x5c, 0x9d, 0xa9, 0x1b, 0x8d, 0x39, 0x7f, 0xa1, 0x48, 0x6f, 0xa9, 0xac, 0xb8, 0xd8, + 0x10, 0xc5, 0x51, 0x88, 0x38, 0x0e, 0x48, 0x1a, 0x9f, 0x56, 0xaf, 0x4b, 0x58, 0x45, 0x27, 0xf7, + 0xd2, 0xf8, 0xd4, 0xfe, 0x63, 0x80, 0x85, 0xf1, 0x62, 0x70, 0x03, 0x58, 0x79, 0x26, 0x59, 0xa2, + 0xe3, 0x92, 0x65, 0xb5, 0x6b, 0xda, 0xb3, 0x6e, 0xb9, 0xb3, 0x25, 0x86, 0xb2, 0x8b, 0xd8, 0x89, + 0x0f, 0x14, 0x5c, 0xc4, 0xb0, 0x0b, 0x66, 0xfb, 0x14, 0x23, 0xae, 0xda, 0x68, 0xb5, 0x1b, 0x97, + 0xde, 0xb5, 0xfc, 0x33, 0xd4, 0x65, 0x77, 0xae, 0xf9, 0x05, 0x53, 0x68, 0x28, 0xc5, 0xaa, 0x39, + 0xbd, 0x86, 0x62, 0xc2, 0x2a, 0x98, 0xa5, 0x38, 0x21, 0x43, 0xd5, 0x9c, 0x79, 0x71, 0xa2, 0xbe, + 0xbb, 0x16, 0x98, 0x2f, 0xbb, 0x69, 0x7f, 0x36, 0xc0, 0xad, 0xf1, 0xb9, 0xb2, 0x8c, 0xa4, 0x0c, + 0xc3, 0x2d, 0x70, 0xfb, 0x42, 0x97, 0x03, 0x4c, 0x29, 0xa1, 0x52, 0xce, 0x6a, 0x43, 0x6d, 0x89, + 0x66, 0x7d, 0xe7, 0x40, 0xfe, 0x61, 0xfe, 0xf2, 0x78, 0xff, 0x9f, 0x0b, 0x38, 0xdc, 0x05, 0x37, + 0x28, 0x66, 0x79, 0xcc, 0xf5, 0xf0, 0xd7, 0x26, 0x0f, 0x7f, 0xc4, 0x90, 0x2f, 0xb9, 0xbe, 0xd6, + 0xb0, 0x9f, 0x81, 0xa5, 0xff, 0x4e, 0xaf, 0xf4, 0x07, 0xb7, 0x7f, 0x98, 0xa0, 0x22, 0x49, 0x07, + 0xaa, 0x0c, 0xfc, 0x60, 0x80, 0x39, 0xbd, 0x0b, 0xb0, 0x35, 0xd9, 0xd5, 0x85, 0xbd, 0xa9, 0x5d, + 0x79, 0x2a, 0xf6, 0xea, 0xbb, 0x6f, 0x3f, 0xdf, 0x9b, 0x4d, 0xd8, 0x10, 0x0f, 0xc2, 0xd9, 0x98, + 0xd5, 0x4d, 0xbd, 0x2a, 0xcc, 0x6d, 0xaa, 0x17, 0x82, 0xb9, 0xcd, 0x73, 0xf8, 0xc5, 0x00, 0x95, + 0xd1, 0xb1, 0xc0, 0xa7, 0x53, 0x75, 0x4d, 0xaf, 0x67, 0x6d, 0x7d, 0x5a, 0x9a, 0x9a, 0xbe, 0xbd, + 0x2e, 0x1d, 0xaf, 0xda, 0x8f, 0x84, 0xe3, 0x7f, 0x16, 0xcf, 0x46, 0x76, 0x7d, 0xb3, 0x79, 0x5e, + 0x18, 0xf6, 0x12, 0x29, 0xe1, 0x19, 0xcd, 0xee, 0x6f, 0x03, 0xac, 0xf4, 0x49, 0x32, 0xb1, 0x6a, + 0x77, 0x69, 0x74, 0x14, 0xfb, 0x62, 0xa5, 0xf6, 0x8d, 0xd7, 0x3b, 0x05, 0x6d, 0x40, 0x62, 0x94, + 0x0e, 0x1c, 0x42, 0x07, 0xee, 0x00, 0xa7, 0x72, 0xe1, 0xf4, 0xa3, 0x9a, 0x45, 0xec, 0xf2, 0xb7, + 0x7b, 0x43, 0x07, 0x1f, 0xcd, 0x99, 0xed, 0x4e, 0xe7, 0x93, 0x59, 0xdf, 0x56, 0x82, 0x9d, 0x90, + 0x39, 0x2a, 0x14, 0xd1, 0x61, 0xcb, 0x29, 0x0a, 0xb3, 0xaf, 0x1a, 0xd2, 0xeb, 0x84, 0xac, 0x57, + 0x42, 0x7a, 0x87, 0xad, 0x9e, 0x86, 0xfc, 0x32, 0x57, 0x54, 0xde, 0xf3, 0x3a, 0x21, 0xf3, 0xbc, + 0x12, 0xe4, 0x79, 0x87, 0x2d, 0xcf, 0xd3, 0xb0, 0xa3, 0x59, 0xe9, 0x73, 0xed, 0x6f, 0x00, 0x00, + 0x00, 0xff, 0xff, 0x38, 0xc2, 0xb6, 0xf8, 0x62, 0x06, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/services/landing_page_view_service.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/services/landing_page_view_service.pb.go new file mode 100644 index 0000000..8063d7b --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/services/landing_page_view_service.pb.go @@ -0,0 +1,178 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/services/landing_page_view_service.proto + +package services // import "google.golang.org/genproto/googleapis/ads/googleads/v1/services" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "github.com/golang/protobuf/ptypes/wrappers" +import resources "google.golang.org/genproto/googleapis/ads/googleads/v1/resources" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +import ( + context "golang.org/x/net/context" + grpc "google.golang.org/grpc" +) + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Request message for [LandingPageViewService.GetLandingPageView][google.ads.googleads.v1.services.LandingPageViewService.GetLandingPageView]. +type GetLandingPageViewRequest struct { + // The resource name of the landing page view to fetch. + ResourceName string `protobuf:"bytes,1,opt,name=resource_name,json=resourceName,proto3" json:"resource_name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GetLandingPageViewRequest) Reset() { *m = GetLandingPageViewRequest{} } +func (m *GetLandingPageViewRequest) String() string { return proto.CompactTextString(m) } +func (*GetLandingPageViewRequest) ProtoMessage() {} +func (*GetLandingPageViewRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_landing_page_view_service_969504292394555a, []int{0} +} +func (m *GetLandingPageViewRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GetLandingPageViewRequest.Unmarshal(m, b) +} +func (m *GetLandingPageViewRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GetLandingPageViewRequest.Marshal(b, m, deterministic) +} +func (dst *GetLandingPageViewRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_GetLandingPageViewRequest.Merge(dst, src) +} +func (m *GetLandingPageViewRequest) XXX_Size() int { + return xxx_messageInfo_GetLandingPageViewRequest.Size(m) +} +func (m *GetLandingPageViewRequest) XXX_DiscardUnknown() { + xxx_messageInfo_GetLandingPageViewRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_GetLandingPageViewRequest proto.InternalMessageInfo + +func (m *GetLandingPageViewRequest) GetResourceName() string { + if m != nil { + return m.ResourceName + } + return "" +} + +func init() { + proto.RegisterType((*GetLandingPageViewRequest)(nil), "google.ads.googleads.v1.services.GetLandingPageViewRequest") +} + +// Reference imports to suppress errors if they are not otherwise used. +var _ context.Context +var _ grpc.ClientConn + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the grpc package it is being compiled against. +const _ = grpc.SupportPackageIsVersion4 + +// LandingPageViewServiceClient is the client API for LandingPageViewService service. +// +// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://godoc.org/google.golang.org/grpc#ClientConn.NewStream. +type LandingPageViewServiceClient interface { + // Returns the requested landing page view in full detail. + GetLandingPageView(ctx context.Context, in *GetLandingPageViewRequest, opts ...grpc.CallOption) (*resources.LandingPageView, error) +} + +type landingPageViewServiceClient struct { + cc *grpc.ClientConn +} + +func NewLandingPageViewServiceClient(cc *grpc.ClientConn) LandingPageViewServiceClient { + return &landingPageViewServiceClient{cc} +} + +func (c *landingPageViewServiceClient) GetLandingPageView(ctx context.Context, in *GetLandingPageViewRequest, opts ...grpc.CallOption) (*resources.LandingPageView, error) { + out := new(resources.LandingPageView) + err := c.cc.Invoke(ctx, "/google.ads.googleads.v1.services.LandingPageViewService/GetLandingPageView", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +// LandingPageViewServiceServer is the server API for LandingPageViewService service. +type LandingPageViewServiceServer interface { + // Returns the requested landing page view in full detail. + GetLandingPageView(context.Context, *GetLandingPageViewRequest) (*resources.LandingPageView, error) +} + +func RegisterLandingPageViewServiceServer(s *grpc.Server, srv LandingPageViewServiceServer) { + s.RegisterService(&_LandingPageViewService_serviceDesc, srv) +} + +func _LandingPageViewService_GetLandingPageView_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(GetLandingPageViewRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(LandingPageViewServiceServer).GetLandingPageView(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.ads.googleads.v1.services.LandingPageViewService/GetLandingPageView", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(LandingPageViewServiceServer).GetLandingPageView(ctx, req.(*GetLandingPageViewRequest)) + } + return interceptor(ctx, in, info, handler) +} + +var _LandingPageViewService_serviceDesc = grpc.ServiceDesc{ + ServiceName: "google.ads.googleads.v1.services.LandingPageViewService", + HandlerType: (*LandingPageViewServiceServer)(nil), + Methods: []grpc.MethodDesc{ + { + MethodName: "GetLandingPageView", + Handler: _LandingPageViewService_GetLandingPageView_Handler, + }, + }, + Streams: []grpc.StreamDesc{}, + Metadata: "google/ads/googleads/v1/services/landing_page_view_service.proto", +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/services/landing_page_view_service.proto", fileDescriptor_landing_page_view_service_969504292394555a) +} + +var fileDescriptor_landing_page_view_service_969504292394555a = []byte{ + // 387 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x84, 0x52, 0xbf, 0x6a, 0xdb, 0x40, + 0x18, 0x47, 0x2a, 0x14, 0x2a, 0xda, 0x45, 0x43, 0x69, 0xdd, 0x52, 0x8c, 0xeb, 0xa1, 0x78, 0xb8, + 0x43, 0x6e, 0x97, 0x9e, 0x09, 0x58, 0x5e, 0x9c, 0x21, 0x04, 0xe3, 0x80, 0x86, 0x20, 0x10, 0x67, + 0xeb, 0xcb, 0x21, 0xb0, 0xee, 0x94, 0x3b, 0x49, 0x1e, 0x42, 0x16, 0xbf, 0x42, 0xde, 0x20, 0x63, + 0xde, 0x22, 0x6b, 0xd6, 0xac, 0x19, 0x33, 0xe5, 0x29, 0x82, 0x7c, 0x3a, 0x41, 0x1c, 0x0b, 0x6f, + 0x3f, 0xdd, 0xf7, 0xfb, 0xf3, 0xdd, 0xef, 0xe4, 0x8c, 0x99, 0x10, 0x6c, 0x05, 0x98, 0xc6, 0x0a, + 0x6b, 0x58, 0xa1, 0xd2, 0xc3, 0x0a, 0x64, 0x99, 0x2c, 0x41, 0xe1, 0x15, 0xe5, 0x71, 0xc2, 0x59, + 0x94, 0x51, 0x06, 0x51, 0x99, 0xc0, 0x3a, 0xaa, 0x47, 0x28, 0x93, 0x22, 0x17, 0x6e, 0x57, 0xcb, + 0x10, 0x8d, 0x15, 0x6a, 0x1c, 0x50, 0xe9, 0x21, 0xe3, 0xd0, 0xf9, 0xdf, 0x96, 0x21, 0x41, 0x89, + 0x42, 0xee, 0x0d, 0xd1, 0xe6, 0x9d, 0x9f, 0x46, 0x9a, 0x25, 0x98, 0x72, 0x2e, 0x72, 0x9a, 0x27, + 0x82, 0xab, 0x7a, 0xfa, 0xab, 0x9e, 0x6e, 0xbf, 0x16, 0xc5, 0x05, 0x5e, 0x4b, 0x9a, 0x65, 0x20, + 0xeb, 0x79, 0x6f, 0xec, 0x7c, 0x9f, 0x42, 0x7e, 0xa2, 0xbd, 0x67, 0x94, 0x41, 0x90, 0xc0, 0x7a, + 0x0e, 0x97, 0x05, 0xa8, 0xdc, 0xfd, 0xed, 0x7c, 0x31, 0xf9, 0x11, 0xa7, 0x29, 0x7c, 0xb3, 0xba, + 0xd6, 0x9f, 0x4f, 0xf3, 0xcf, 0xe6, 0xf0, 0x94, 0xa6, 0x30, 0x7c, 0xb2, 0x9c, 0xaf, 0x3b, 0xfa, + 0x33, 0x7d, 0x2d, 0xf7, 0xde, 0x72, 0xdc, 0xf7, 0xee, 0xee, 0x08, 0x1d, 0xea, 0x03, 0xb5, 0xee, + 0xd4, 0x19, 0xb6, 0x8a, 0x9b, 0xaa, 0xd0, 0x8e, 0xb4, 0x47, 0x36, 0x8f, 0xcf, 0x37, 0xf6, 0x3f, + 0x77, 0x58, 0x35, 0x7a, 0xf5, 0xe6, 0x4a, 0x47, 0xcb, 0x42, 0xe5, 0x22, 0x05, 0xa9, 0xf0, 0xc0, + 0x54, 0x6c, 0x74, 0x0a, 0x0f, 0xae, 0x27, 0x1b, 0xdb, 0xe9, 0x2f, 0x45, 0x7a, 0x70, 0xe5, 0xc9, + 0x8f, 0xfd, 0x25, 0xcc, 0xaa, 0x9a, 0x67, 0xd6, 0xf9, 0x71, 0x6d, 0xc0, 0xc4, 0x8a, 0x72, 0x86, + 0x84, 0x64, 0x98, 0x01, 0xdf, 0x3e, 0x82, 0x79, 0xf1, 0x2c, 0x51, 0xed, 0x3f, 0xd9, 0xc8, 0x80, + 0x5b, 0xfb, 0xc3, 0xd4, 0xf7, 0xef, 0xec, 0xee, 0x54, 0x1b, 0xfa, 0xb1, 0x42, 0x1a, 0x56, 0x28, + 0xf0, 0x50, 0x1d, 0xac, 0x1e, 0x0c, 0x25, 0xf4, 0x63, 0x15, 0x36, 0x94, 0x30, 0xf0, 0x42, 0x43, + 0x79, 0xb1, 0xfb, 0xfa, 0x9c, 0x10, 0x3f, 0x56, 0x84, 0x34, 0x24, 0x42, 0x02, 0x8f, 0x10, 0x43, + 0x5b, 0x7c, 0xdc, 0xee, 0xf9, 0xf7, 0x35, 0x00, 0x00, 0xff, 0xff, 0x3e, 0xa1, 0x45, 0x50, 0x0b, + 0x03, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/services/language_constant_service.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/services/language_constant_service.pb.go new file mode 100644 index 0000000..f4ee977 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/services/language_constant_service.pb.go @@ -0,0 +1,175 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/services/language_constant_service.proto + +package services // import "google.golang.org/genproto/googleapis/ads/googleads/v1/services" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import resources "google.golang.org/genproto/googleapis/ads/googleads/v1/resources" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +import ( + context "golang.org/x/net/context" + grpc "google.golang.org/grpc" +) + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Request message for [LanguageConstantService.GetLanguageConstant][google.ads.googleads.v1.services.LanguageConstantService.GetLanguageConstant]. +type GetLanguageConstantRequest struct { + // Resource name of the language constant to fetch. + ResourceName string `protobuf:"bytes,1,opt,name=resource_name,json=resourceName,proto3" json:"resource_name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GetLanguageConstantRequest) Reset() { *m = GetLanguageConstantRequest{} } +func (m *GetLanguageConstantRequest) String() string { return proto.CompactTextString(m) } +func (*GetLanguageConstantRequest) ProtoMessage() {} +func (*GetLanguageConstantRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_language_constant_service_a8b709dbef4a243c, []int{0} +} +func (m *GetLanguageConstantRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GetLanguageConstantRequest.Unmarshal(m, b) +} +func (m *GetLanguageConstantRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GetLanguageConstantRequest.Marshal(b, m, deterministic) +} +func (dst *GetLanguageConstantRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_GetLanguageConstantRequest.Merge(dst, src) +} +func (m *GetLanguageConstantRequest) XXX_Size() int { + return xxx_messageInfo_GetLanguageConstantRequest.Size(m) +} +func (m *GetLanguageConstantRequest) XXX_DiscardUnknown() { + xxx_messageInfo_GetLanguageConstantRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_GetLanguageConstantRequest proto.InternalMessageInfo + +func (m *GetLanguageConstantRequest) GetResourceName() string { + if m != nil { + return m.ResourceName + } + return "" +} + +func init() { + proto.RegisterType((*GetLanguageConstantRequest)(nil), "google.ads.googleads.v1.services.GetLanguageConstantRequest") +} + +// Reference imports to suppress errors if they are not otherwise used. +var _ context.Context +var _ grpc.ClientConn + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the grpc package it is being compiled against. +const _ = grpc.SupportPackageIsVersion4 + +// LanguageConstantServiceClient is the client API for LanguageConstantService service. +// +// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://godoc.org/google.golang.org/grpc#ClientConn.NewStream. +type LanguageConstantServiceClient interface { + // Returns the requested language constant. + GetLanguageConstant(ctx context.Context, in *GetLanguageConstantRequest, opts ...grpc.CallOption) (*resources.LanguageConstant, error) +} + +type languageConstantServiceClient struct { + cc *grpc.ClientConn +} + +func NewLanguageConstantServiceClient(cc *grpc.ClientConn) LanguageConstantServiceClient { + return &languageConstantServiceClient{cc} +} + +func (c *languageConstantServiceClient) GetLanguageConstant(ctx context.Context, in *GetLanguageConstantRequest, opts ...grpc.CallOption) (*resources.LanguageConstant, error) { + out := new(resources.LanguageConstant) + err := c.cc.Invoke(ctx, "/google.ads.googleads.v1.services.LanguageConstantService/GetLanguageConstant", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +// LanguageConstantServiceServer is the server API for LanguageConstantService service. +type LanguageConstantServiceServer interface { + // Returns the requested language constant. + GetLanguageConstant(context.Context, *GetLanguageConstantRequest) (*resources.LanguageConstant, error) +} + +func RegisterLanguageConstantServiceServer(s *grpc.Server, srv LanguageConstantServiceServer) { + s.RegisterService(&_LanguageConstantService_serviceDesc, srv) +} + +func _LanguageConstantService_GetLanguageConstant_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(GetLanguageConstantRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(LanguageConstantServiceServer).GetLanguageConstant(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.ads.googleads.v1.services.LanguageConstantService/GetLanguageConstant", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(LanguageConstantServiceServer).GetLanguageConstant(ctx, req.(*GetLanguageConstantRequest)) + } + return interceptor(ctx, in, info, handler) +} + +var _LanguageConstantService_serviceDesc = grpc.ServiceDesc{ + ServiceName: "google.ads.googleads.v1.services.LanguageConstantService", + HandlerType: (*LanguageConstantServiceServer)(nil), + Methods: []grpc.MethodDesc{ + { + MethodName: "GetLanguageConstant", + Handler: _LanguageConstantService_GetLanguageConstant_Handler, + }, + }, + Streams: []grpc.StreamDesc{}, + Metadata: "google/ads/googleads/v1/services/language_constant_service.proto", +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/services/language_constant_service.proto", fileDescriptor_language_constant_service_a8b709dbef4a243c) +} + +var fileDescriptor_language_constant_service_a8b709dbef4a243c = []byte{ + // 355 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x84, 0x52, 0x4d, 0x4a, 0xc3, 0x40, + 0x14, 0x26, 0x11, 0x04, 0x07, 0xdd, 0xc4, 0x85, 0x12, 0xba, 0x28, 0xb5, 0xe0, 0xcf, 0x62, 0x86, + 0xd8, 0x95, 0xa3, 0x82, 0x53, 0x17, 0x75, 0x21, 0x52, 0x2a, 0x74, 0x21, 0x81, 0x32, 0x36, 0xc3, + 0x10, 0x68, 0x67, 0x6a, 0xde, 0xb4, 0x1b, 0x71, 0xa3, 0x47, 0xf0, 0x06, 0x2e, 0xbd, 0x81, 0x57, + 0x70, 0x2b, 0xde, 0xc0, 0x95, 0xa7, 0x90, 0x74, 0x32, 0x01, 0x4b, 0x43, 0x77, 0x1f, 0xf3, 0xbe, + 0x9f, 0xf7, 0xbe, 0x04, 0x5d, 0x48, 0xad, 0xe5, 0x48, 0x10, 0x9e, 0x00, 0xb1, 0x30, 0x47, 0xb3, + 0x88, 0x80, 0xc8, 0x66, 0xe9, 0x50, 0x00, 0x19, 0x71, 0x25, 0xa7, 0x5c, 0x8a, 0xc1, 0x50, 0x2b, + 0x30, 0x5c, 0x99, 0x41, 0x31, 0xc2, 0x93, 0x4c, 0x1b, 0x1d, 0xd4, 0xad, 0x0c, 0xf3, 0x04, 0x70, + 0xe9, 0x80, 0x67, 0x11, 0x76, 0x0e, 0xe1, 0x49, 0x55, 0x46, 0x26, 0x40, 0x4f, 0xb3, 0xa5, 0x21, + 0xd6, 0x3c, 0xac, 0x39, 0xe9, 0x24, 0x25, 0x5c, 0x29, 0x6d, 0xb8, 0x49, 0xb5, 0x02, 0x3b, 0x6d, + 0x30, 0x14, 0x76, 0x84, 0xb9, 0x2e, 0xb4, 0x97, 0x85, 0xb4, 0x27, 0x1e, 0xa6, 0x02, 0x4c, 0xb0, + 0x87, 0xb6, 0x5c, 0xc0, 0x40, 0xf1, 0xb1, 0xd8, 0xf5, 0xea, 0xde, 0xc1, 0x46, 0x6f, 0xd3, 0x3d, + 0xde, 0xf0, 0xb1, 0x38, 0xfe, 0xf6, 0xd0, 0xce, 0xa2, 0xc1, 0xad, 0x5d, 0x3c, 0xf8, 0xf0, 0xd0, + 0xf6, 0x12, 0xff, 0xe0, 0x0c, 0xaf, 0x3a, 0x19, 0x57, 0xaf, 0x15, 0xb6, 0x2a, 0xd5, 0x65, 0x1d, + 0x78, 0x51, 0xdb, 0x20, 0xcf, 0x5f, 0x3f, 0xaf, 0xfe, 0x61, 0xb0, 0x9f, 0xd7, 0xf6, 0xf8, 0xef, + 0xac, 0xf3, 0xd1, 0x02, 0x19, 0xc8, 0xd1, 0x53, 0xfb, 0xc5, 0x47, 0xcd, 0xa1, 0x1e, 0xaf, 0xdc, + 0xb4, 0x5d, 0xab, 0xb8, 0xbe, 0x9b, 0x37, 0xdc, 0xf5, 0xee, 0xae, 0x0a, 0x07, 0xa9, 0xf3, 0x1c, + 0xac, 0x33, 0x49, 0xa4, 0x50, 0xf3, 0xfe, 0xdd, 0xc7, 0x9c, 0xa4, 0x50, 0xfd, 0xff, 0x9c, 0x3a, + 0xf0, 0xe6, 0xaf, 0x75, 0x18, 0x7b, 0xf7, 0xeb, 0x1d, 0x6b, 0xc8, 0x12, 0xc0, 0x16, 0xe6, 0xa8, + 0x1f, 0xe1, 0x22, 0x18, 0x3e, 0x1d, 0x25, 0x66, 0x09, 0xc4, 0x25, 0x25, 0xee, 0x47, 0xb1, 0xa3, + 0xfc, 0xfa, 0x4d, 0xfb, 0x4e, 0x29, 0x4b, 0x80, 0xd2, 0x92, 0x44, 0x69, 0x3f, 0xa2, 0xd4, 0xd1, + 0xee, 0xd7, 0xe7, 0x7b, 0xb6, 0xfe, 0x02, 0x00, 0x00, 0xff, 0xff, 0xd6, 0xf6, 0xfe, 0xfa, 0xe6, + 0x02, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/services/location_view_service.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/services/location_view_service.pb.go new file mode 100644 index 0000000..476216f --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/services/location_view_service.pb.go @@ -0,0 +1,175 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/services/location_view_service.proto + +package services // import "google.golang.org/genproto/googleapis/ads/googleads/v1/services" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import resources "google.golang.org/genproto/googleapis/ads/googleads/v1/resources" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +import ( + context "golang.org/x/net/context" + grpc "google.golang.org/grpc" +) + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Request message for [LocationViewService.GetLocationView][google.ads.googleads.v1.services.LocationViewService.GetLocationView]. +type GetLocationViewRequest struct { + // The resource name of the location view to fetch. + ResourceName string `protobuf:"bytes,1,opt,name=resource_name,json=resourceName,proto3" json:"resource_name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GetLocationViewRequest) Reset() { *m = GetLocationViewRequest{} } +func (m *GetLocationViewRequest) String() string { return proto.CompactTextString(m) } +func (*GetLocationViewRequest) ProtoMessage() {} +func (*GetLocationViewRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_location_view_service_3a92c03e961aee1c, []int{0} +} +func (m *GetLocationViewRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GetLocationViewRequest.Unmarshal(m, b) +} +func (m *GetLocationViewRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GetLocationViewRequest.Marshal(b, m, deterministic) +} +func (dst *GetLocationViewRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_GetLocationViewRequest.Merge(dst, src) +} +func (m *GetLocationViewRequest) XXX_Size() int { + return xxx_messageInfo_GetLocationViewRequest.Size(m) +} +func (m *GetLocationViewRequest) XXX_DiscardUnknown() { + xxx_messageInfo_GetLocationViewRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_GetLocationViewRequest proto.InternalMessageInfo + +func (m *GetLocationViewRequest) GetResourceName() string { + if m != nil { + return m.ResourceName + } + return "" +} + +func init() { + proto.RegisterType((*GetLocationViewRequest)(nil), "google.ads.googleads.v1.services.GetLocationViewRequest") +} + +// Reference imports to suppress errors if they are not otherwise used. +var _ context.Context +var _ grpc.ClientConn + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the grpc package it is being compiled against. +const _ = grpc.SupportPackageIsVersion4 + +// LocationViewServiceClient is the client API for LocationViewService service. +// +// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://godoc.org/google.golang.org/grpc#ClientConn.NewStream. +type LocationViewServiceClient interface { + // Returns the requested location view in full detail. + GetLocationView(ctx context.Context, in *GetLocationViewRequest, opts ...grpc.CallOption) (*resources.LocationView, error) +} + +type locationViewServiceClient struct { + cc *grpc.ClientConn +} + +func NewLocationViewServiceClient(cc *grpc.ClientConn) LocationViewServiceClient { + return &locationViewServiceClient{cc} +} + +func (c *locationViewServiceClient) GetLocationView(ctx context.Context, in *GetLocationViewRequest, opts ...grpc.CallOption) (*resources.LocationView, error) { + out := new(resources.LocationView) + err := c.cc.Invoke(ctx, "/google.ads.googleads.v1.services.LocationViewService/GetLocationView", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +// LocationViewServiceServer is the server API for LocationViewService service. +type LocationViewServiceServer interface { + // Returns the requested location view in full detail. + GetLocationView(context.Context, *GetLocationViewRequest) (*resources.LocationView, error) +} + +func RegisterLocationViewServiceServer(s *grpc.Server, srv LocationViewServiceServer) { + s.RegisterService(&_LocationViewService_serviceDesc, srv) +} + +func _LocationViewService_GetLocationView_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(GetLocationViewRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(LocationViewServiceServer).GetLocationView(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.ads.googleads.v1.services.LocationViewService/GetLocationView", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(LocationViewServiceServer).GetLocationView(ctx, req.(*GetLocationViewRequest)) + } + return interceptor(ctx, in, info, handler) +} + +var _LocationViewService_serviceDesc = grpc.ServiceDesc{ + ServiceName: "google.ads.googleads.v1.services.LocationViewService", + HandlerType: (*LocationViewServiceServer)(nil), + Methods: []grpc.MethodDesc{ + { + MethodName: "GetLocationView", + Handler: _LocationViewService_GetLocationView_Handler, + }, + }, + Streams: []grpc.StreamDesc{}, + Metadata: "google/ads/googleads/v1/services/location_view_service.proto", +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/services/location_view_service.proto", fileDescriptor_location_view_service_3a92c03e961aee1c) +} + +var fileDescriptor_location_view_service_3a92c03e961aee1c = []byte{ + // 362 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x84, 0x92, 0x3f, 0x4b, 0xc3, 0x40, + 0x18, 0xc6, 0x49, 0x04, 0xc1, 0xa0, 0x08, 0x11, 0xa4, 0x14, 0x87, 0x52, 0x3b, 0x48, 0x87, 0x3b, + 0xa2, 0x88, 0x72, 0xda, 0x21, 0x5d, 0xea, 0x20, 0x52, 0x2a, 0x64, 0x90, 0x40, 0x39, 0x93, 0x97, + 0x10, 0x68, 0x72, 0x35, 0xef, 0x35, 0x1d, 0xc4, 0xc5, 0xaf, 0xe0, 0x37, 0x70, 0x74, 0xf7, 0x4b, + 0x08, 0x4e, 0x7e, 0x05, 0x27, 0xbf, 0x84, 0x92, 0x5e, 0x2e, 0x54, 0x6d, 0xe9, 0xf6, 0xf0, 0xe6, + 0xf9, 0x3d, 0xef, 0x9f, 0x9c, 0x75, 0x1e, 0x09, 0x11, 0x8d, 0x80, 0xf2, 0x10, 0xa9, 0x92, 0x85, + 0xca, 0x1d, 0x8a, 0x90, 0xe5, 0x71, 0x00, 0x48, 0x47, 0x22, 0xe0, 0x32, 0x16, 0xe9, 0x30, 0x8f, + 0x61, 0x3a, 0x2c, 0xcb, 0x64, 0x9c, 0x09, 0x29, 0xec, 0x86, 0x42, 0x08, 0x0f, 0x91, 0x54, 0x34, + 0xc9, 0x1d, 0xa2, 0xe9, 0xfa, 0xf1, 0xb2, 0xfc, 0x0c, 0x50, 0x4c, 0xb2, 0x7f, 0x0d, 0x54, 0x70, + 0x7d, 0x4f, 0x63, 0xe3, 0x98, 0xf2, 0x34, 0x15, 0x72, 0xe6, 0x40, 0xf5, 0xb5, 0xd9, 0xb1, 0x76, + 0x7b, 0x20, 0x2f, 0x4b, 0xce, 0x8b, 0x61, 0x3a, 0x80, 0xbb, 0x09, 0xa0, 0xb4, 0xf7, 0xad, 0x2d, + 0x1d, 0x3c, 0x4c, 0x79, 0x02, 0x35, 0xa3, 0x61, 0x1c, 0x6c, 0x0c, 0x36, 0x75, 0xf1, 0x8a, 0x27, + 0x70, 0xf8, 0x6e, 0x58, 0x3b, 0xf3, 0xf0, 0xb5, 0x1a, 0xd6, 0x7e, 0x35, 0xac, 0xed, 0x3f, 0xb9, + 0xf6, 0x29, 0x59, 0xb5, 0x22, 0x59, 0x3c, 0x4a, 0x9d, 0x2e, 0x25, 0xab, 0xd5, 0xc9, 0x3c, 0xd7, + 0x3c, 0x79, 0xfc, 0xf8, 0x7c, 0x32, 0x1d, 0x9b, 0x16, 0xe7, 0xb9, 0xff, 0xb5, 0x46, 0x27, 0x98, + 0xa0, 0x14, 0x09, 0x64, 0x48, 0xdb, 0xd5, 0xbd, 0x0a, 0x08, 0x69, 0xfb, 0xa1, 0xfb, 0x6d, 0x58, + 0xad, 0x40, 0x24, 0x2b, 0x27, 0xed, 0xd6, 0x16, 0x6c, 0xdd, 0x2f, 0x2e, 0xda, 0x37, 0x6e, 0x2e, + 0x4a, 0x3a, 0x12, 0x23, 0x9e, 0x46, 0x44, 0x64, 0x11, 0x8d, 0x20, 0x9d, 0xdd, 0x5b, 0xff, 0xb8, + 0x71, 0x8c, 0xcb, 0xdf, 0xc9, 0x99, 0x16, 0xcf, 0xe6, 0x5a, 0xcf, 0x75, 0x5f, 0xcc, 0x46, 0x4f, + 0x05, 0xba, 0x21, 0x12, 0x25, 0x0b, 0xe5, 0x39, 0xa4, 0x6c, 0x8c, 0x6f, 0xda, 0xe2, 0xbb, 0x21, + 0xfa, 0x95, 0xc5, 0xf7, 0x1c, 0x5f, 0x5b, 0xbe, 0xcc, 0x96, 0xaa, 0x33, 0xe6, 0x86, 0xc8, 0x58, + 0x65, 0x62, 0xcc, 0x73, 0x18, 0xd3, 0xb6, 0xdb, 0xf5, 0xd9, 0x9c, 0x47, 0x3f, 0x01, 0x00, 0x00, + 0xff, 0xff, 0x91, 0x8a, 0x03, 0x21, 0xce, 0x02, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/services/managed_placement_view_service.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/services/managed_placement_view_service.pb.go new file mode 100644 index 0000000..fad0474 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/services/managed_placement_view_service.pb.go @@ -0,0 +1,176 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/services/managed_placement_view_service.proto + +package services // import "google.golang.org/genproto/googleapis/ads/googleads/v1/services" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import resources "google.golang.org/genproto/googleapis/ads/googleads/v1/resources" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +import ( + context "golang.org/x/net/context" + grpc "google.golang.org/grpc" +) + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Request message for [ManagedPlacementViewService.GetManagedPlacementView][google.ads.googleads.v1.services.ManagedPlacementViewService.GetManagedPlacementView]. +type GetManagedPlacementViewRequest struct { + // The resource name of the Managed Placement View to fetch. + ResourceName string `protobuf:"bytes,1,opt,name=resource_name,json=resourceName,proto3" json:"resource_name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GetManagedPlacementViewRequest) Reset() { *m = GetManagedPlacementViewRequest{} } +func (m *GetManagedPlacementViewRequest) String() string { return proto.CompactTextString(m) } +func (*GetManagedPlacementViewRequest) ProtoMessage() {} +func (*GetManagedPlacementViewRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_managed_placement_view_service_836fe3ce69526adc, []int{0} +} +func (m *GetManagedPlacementViewRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GetManagedPlacementViewRequest.Unmarshal(m, b) +} +func (m *GetManagedPlacementViewRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GetManagedPlacementViewRequest.Marshal(b, m, deterministic) +} +func (dst *GetManagedPlacementViewRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_GetManagedPlacementViewRequest.Merge(dst, src) +} +func (m *GetManagedPlacementViewRequest) XXX_Size() int { + return xxx_messageInfo_GetManagedPlacementViewRequest.Size(m) +} +func (m *GetManagedPlacementViewRequest) XXX_DiscardUnknown() { + xxx_messageInfo_GetManagedPlacementViewRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_GetManagedPlacementViewRequest proto.InternalMessageInfo + +func (m *GetManagedPlacementViewRequest) GetResourceName() string { + if m != nil { + return m.ResourceName + } + return "" +} + +func init() { + proto.RegisterType((*GetManagedPlacementViewRequest)(nil), "google.ads.googleads.v1.services.GetManagedPlacementViewRequest") +} + +// Reference imports to suppress errors if they are not otherwise used. +var _ context.Context +var _ grpc.ClientConn + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the grpc package it is being compiled against. +const _ = grpc.SupportPackageIsVersion4 + +// ManagedPlacementViewServiceClient is the client API for ManagedPlacementViewService service. +// +// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://godoc.org/google.golang.org/grpc#ClientConn.NewStream. +type ManagedPlacementViewServiceClient interface { + // Returns the requested Managed Placement view in full detail. + GetManagedPlacementView(ctx context.Context, in *GetManagedPlacementViewRequest, opts ...grpc.CallOption) (*resources.ManagedPlacementView, error) +} + +type managedPlacementViewServiceClient struct { + cc *grpc.ClientConn +} + +func NewManagedPlacementViewServiceClient(cc *grpc.ClientConn) ManagedPlacementViewServiceClient { + return &managedPlacementViewServiceClient{cc} +} + +func (c *managedPlacementViewServiceClient) GetManagedPlacementView(ctx context.Context, in *GetManagedPlacementViewRequest, opts ...grpc.CallOption) (*resources.ManagedPlacementView, error) { + out := new(resources.ManagedPlacementView) + err := c.cc.Invoke(ctx, "/google.ads.googleads.v1.services.ManagedPlacementViewService/GetManagedPlacementView", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +// ManagedPlacementViewServiceServer is the server API for ManagedPlacementViewService service. +type ManagedPlacementViewServiceServer interface { + // Returns the requested Managed Placement view in full detail. + GetManagedPlacementView(context.Context, *GetManagedPlacementViewRequest) (*resources.ManagedPlacementView, error) +} + +func RegisterManagedPlacementViewServiceServer(s *grpc.Server, srv ManagedPlacementViewServiceServer) { + s.RegisterService(&_ManagedPlacementViewService_serviceDesc, srv) +} + +func _ManagedPlacementViewService_GetManagedPlacementView_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(GetManagedPlacementViewRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(ManagedPlacementViewServiceServer).GetManagedPlacementView(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.ads.googleads.v1.services.ManagedPlacementViewService/GetManagedPlacementView", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(ManagedPlacementViewServiceServer).GetManagedPlacementView(ctx, req.(*GetManagedPlacementViewRequest)) + } + return interceptor(ctx, in, info, handler) +} + +var _ManagedPlacementViewService_serviceDesc = grpc.ServiceDesc{ + ServiceName: "google.ads.googleads.v1.services.ManagedPlacementViewService", + HandlerType: (*ManagedPlacementViewServiceServer)(nil), + Methods: []grpc.MethodDesc{ + { + MethodName: "GetManagedPlacementView", + Handler: _ManagedPlacementViewService_GetManagedPlacementView_Handler, + }, + }, + Streams: []grpc.StreamDesc{}, + Metadata: "google/ads/googleads/v1/services/managed_placement_view_service.proto", +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/services/managed_placement_view_service.proto", fileDescriptor_managed_placement_view_service_836fe3ce69526adc) +} + +var fileDescriptor_managed_placement_view_service_836fe3ce69526adc = []byte{ + // 378 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x84, 0x52, 0x3d, 0x6b, 0xdb, 0x40, + 0x18, 0x46, 0x2a, 0x14, 0x2a, 0xda, 0x45, 0x4b, 0x8b, 0x5b, 0x8a, 0x70, 0x3d, 0x14, 0x0f, 0x77, + 0xa8, 0x1d, 0x4c, 0x2e, 0xe4, 0x43, 0x06, 0xe3, 0x2c, 0x09, 0xc6, 0x01, 0x0d, 0x41, 0x20, 0x2e, + 0xd2, 0x8b, 0x10, 0x58, 0x77, 0x8a, 0x5e, 0x59, 0x1e, 0x42, 0x96, 0x2c, 0xf9, 0x01, 0xf9, 0x07, + 0x19, 0xf3, 0x53, 0xb2, 0x86, 0xfc, 0x83, 0x4c, 0xd9, 0xb3, 0x07, 0xf9, 0x7c, 0x02, 0x83, 0x65, + 0x6f, 0x0f, 0x77, 0xcf, 0xc7, 0xbd, 0xcf, 0x7b, 0xd6, 0x28, 0x91, 0x32, 0x99, 0x01, 0xe5, 0x31, + 0x52, 0x05, 0x6b, 0x54, 0xb9, 0x14, 0xa1, 0xa8, 0xd2, 0x08, 0x90, 0x66, 0x5c, 0xf0, 0x04, 0xe2, + 0x30, 0x9f, 0xf1, 0x08, 0x32, 0x10, 0x65, 0x58, 0xa5, 0xb0, 0x08, 0x57, 0xf7, 0x24, 0x2f, 0x64, + 0x29, 0x6d, 0x47, 0x69, 0x09, 0x8f, 0x91, 0x34, 0x36, 0xa4, 0x72, 0x89, 0xb6, 0xe9, 0x1c, 0xb6, + 0x05, 0x15, 0x80, 0x72, 0x5e, 0xb4, 0x27, 0xa9, 0x84, 0xce, 0x2f, 0xad, 0xcf, 0x53, 0xca, 0x85, + 0x90, 0x25, 0x2f, 0x53, 0x29, 0x50, 0xdd, 0x76, 0x47, 0xd6, 0xef, 0x31, 0x94, 0xa7, 0xca, 0x60, + 0xa2, 0xf5, 0x7e, 0x0a, 0x8b, 0x29, 0x5c, 0xcd, 0x01, 0x4b, 0xfb, 0x8f, 0xf5, 0x4d, 0x27, 0x85, + 0x82, 0x67, 0xf0, 0xc3, 0x70, 0x8c, 0xbf, 0x5f, 0xa6, 0x5f, 0xf5, 0xe1, 0x19, 0xcf, 0xe0, 0xdf, + 0xbb, 0x61, 0xfd, 0xdc, 0x64, 0x72, 0xae, 0xa6, 0xb0, 0x5f, 0x0c, 0xeb, 0x7b, 0x4b, 0x8e, 0x7d, + 0x4c, 0x76, 0x75, 0x40, 0xb6, 0x3f, 0xb1, 0x33, 0x68, 0x75, 0x68, 0x3a, 0x22, 0x9b, 0xf4, 0xdd, + 0xa3, 0xdb, 0xe7, 0xd7, 0x7b, 0x73, 0xcf, 0x1e, 0xd4, 0x7d, 0x5e, 0xaf, 0x8d, 0x79, 0x10, 0xcd, + 0xb1, 0x94, 0x19, 0x14, 0x48, 0xfb, 0xba, 0xe0, 0x35, 0x31, 0xd2, 0xfe, 0xcd, 0xf0, 0xce, 0xb4, + 0x7a, 0x91, 0xcc, 0x76, 0x4e, 0x30, 0x74, 0xb6, 0xb4, 0x33, 0xa9, 0x37, 0x31, 0x31, 0x2e, 0x4e, + 0x56, 0x2e, 0x89, 0x9c, 0x71, 0x91, 0x10, 0x59, 0x24, 0x34, 0x01, 0xb1, 0xdc, 0x93, 0xde, 0x7c, + 0x9e, 0x62, 0xfb, 0x8f, 0xdb, 0xd7, 0xe0, 0xc1, 0xfc, 0x34, 0xf6, 0xbc, 0x47, 0xd3, 0x19, 0x2b, + 0x43, 0x2f, 0x46, 0xa2, 0x60, 0x8d, 0x7c, 0x97, 0xac, 0x82, 0xf1, 0x49, 0x53, 0x02, 0x2f, 0xc6, + 0xa0, 0xa1, 0x04, 0xbe, 0x1b, 0x68, 0xca, 0x9b, 0xd9, 0x53, 0xe7, 0x8c, 0x79, 0x31, 0x32, 0xd6, + 0x90, 0x18, 0xf3, 0x5d, 0xc6, 0x34, 0xed, 0xf2, 0xf3, 0xf2, 0x9d, 0xff, 0x3f, 0x02, 0x00, 0x00, + 0xff, 0xff, 0xc9, 0xd9, 0x16, 0xaa, 0x18, 0x03, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/services/media_file_service.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/services/media_file_service.pb.go new file mode 100644 index 0000000..178759c --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/services/media_file_service.pb.go @@ -0,0 +1,512 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/services/media_file_service.proto + +package services // import "google.golang.org/genproto/googleapis/ads/googleads/v1/services" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "github.com/golang/protobuf/ptypes/wrappers" +import resources "google.golang.org/genproto/googleapis/ads/googleads/v1/resources" +import _ "google.golang.org/genproto/googleapis/api/annotations" +import status "google.golang.org/genproto/googleapis/rpc/status" + +import ( + context "golang.org/x/net/context" + grpc "google.golang.org/grpc" +) + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Request message for [MediaFileService.GetMediaFile][google.ads.googleads.v1.services.MediaFileService.GetMediaFile] +type GetMediaFileRequest struct { + // The resource name of the media file to fetch. + ResourceName string `protobuf:"bytes,1,opt,name=resource_name,json=resourceName,proto3" json:"resource_name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GetMediaFileRequest) Reset() { *m = GetMediaFileRequest{} } +func (m *GetMediaFileRequest) String() string { return proto.CompactTextString(m) } +func (*GetMediaFileRequest) ProtoMessage() {} +func (*GetMediaFileRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_media_file_service_a9f3cbbb085ac9a6, []int{0} +} +func (m *GetMediaFileRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GetMediaFileRequest.Unmarshal(m, b) +} +func (m *GetMediaFileRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GetMediaFileRequest.Marshal(b, m, deterministic) +} +func (dst *GetMediaFileRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_GetMediaFileRequest.Merge(dst, src) +} +func (m *GetMediaFileRequest) XXX_Size() int { + return xxx_messageInfo_GetMediaFileRequest.Size(m) +} +func (m *GetMediaFileRequest) XXX_DiscardUnknown() { + xxx_messageInfo_GetMediaFileRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_GetMediaFileRequest proto.InternalMessageInfo + +func (m *GetMediaFileRequest) GetResourceName() string { + if m != nil { + return m.ResourceName + } + return "" +} + +// Request message for [MediaFileService.MutateMediaFiles][google.ads.googleads.v1.services.MediaFileService.MutateMediaFiles] +type MutateMediaFilesRequest struct { + // The ID of the customer whose media files are being modified. + CustomerId string `protobuf:"bytes,1,opt,name=customer_id,json=customerId,proto3" json:"customer_id,omitempty"` + // The list of operations to perform on individual media file. + Operations []*MediaFileOperation `protobuf:"bytes,2,rep,name=operations,proto3" json:"operations,omitempty"` + // If true, successful operations will be carried out and invalid + // operations will return errors. If false, all operations will be carried + // out in one transaction if and only if they are all valid. + // Default is false. + PartialFailure bool `protobuf:"varint,3,opt,name=partial_failure,json=partialFailure,proto3" json:"partial_failure,omitempty"` + // If true, the request is validated but not executed. Only errors are + // returned, not results. + ValidateOnly bool `protobuf:"varint,4,opt,name=validate_only,json=validateOnly,proto3" json:"validate_only,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *MutateMediaFilesRequest) Reset() { *m = MutateMediaFilesRequest{} } +func (m *MutateMediaFilesRequest) String() string { return proto.CompactTextString(m) } +func (*MutateMediaFilesRequest) ProtoMessage() {} +func (*MutateMediaFilesRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_media_file_service_a9f3cbbb085ac9a6, []int{1} +} +func (m *MutateMediaFilesRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_MutateMediaFilesRequest.Unmarshal(m, b) +} +func (m *MutateMediaFilesRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_MutateMediaFilesRequest.Marshal(b, m, deterministic) +} +func (dst *MutateMediaFilesRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_MutateMediaFilesRequest.Merge(dst, src) +} +func (m *MutateMediaFilesRequest) XXX_Size() int { + return xxx_messageInfo_MutateMediaFilesRequest.Size(m) +} +func (m *MutateMediaFilesRequest) XXX_DiscardUnknown() { + xxx_messageInfo_MutateMediaFilesRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_MutateMediaFilesRequest proto.InternalMessageInfo + +func (m *MutateMediaFilesRequest) GetCustomerId() string { + if m != nil { + return m.CustomerId + } + return "" +} + +func (m *MutateMediaFilesRequest) GetOperations() []*MediaFileOperation { + if m != nil { + return m.Operations + } + return nil +} + +func (m *MutateMediaFilesRequest) GetPartialFailure() bool { + if m != nil { + return m.PartialFailure + } + return false +} + +func (m *MutateMediaFilesRequest) GetValidateOnly() bool { + if m != nil { + return m.ValidateOnly + } + return false +} + +// A single operation to create media file. +type MediaFileOperation struct { + // The mutate operation. + // + // Types that are valid to be assigned to Operation: + // *MediaFileOperation_Create + Operation isMediaFileOperation_Operation `protobuf_oneof:"operation"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *MediaFileOperation) Reset() { *m = MediaFileOperation{} } +func (m *MediaFileOperation) String() string { return proto.CompactTextString(m) } +func (*MediaFileOperation) ProtoMessage() {} +func (*MediaFileOperation) Descriptor() ([]byte, []int) { + return fileDescriptor_media_file_service_a9f3cbbb085ac9a6, []int{2} +} +func (m *MediaFileOperation) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_MediaFileOperation.Unmarshal(m, b) +} +func (m *MediaFileOperation) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_MediaFileOperation.Marshal(b, m, deterministic) +} +func (dst *MediaFileOperation) XXX_Merge(src proto.Message) { + xxx_messageInfo_MediaFileOperation.Merge(dst, src) +} +func (m *MediaFileOperation) XXX_Size() int { + return xxx_messageInfo_MediaFileOperation.Size(m) +} +func (m *MediaFileOperation) XXX_DiscardUnknown() { + xxx_messageInfo_MediaFileOperation.DiscardUnknown(m) +} + +var xxx_messageInfo_MediaFileOperation proto.InternalMessageInfo + +type isMediaFileOperation_Operation interface { + isMediaFileOperation_Operation() +} + +type MediaFileOperation_Create struct { + Create *resources.MediaFile `protobuf:"bytes,1,opt,name=create,proto3,oneof"` +} + +func (*MediaFileOperation_Create) isMediaFileOperation_Operation() {} + +func (m *MediaFileOperation) GetOperation() isMediaFileOperation_Operation { + if m != nil { + return m.Operation + } + return nil +} + +func (m *MediaFileOperation) GetCreate() *resources.MediaFile { + if x, ok := m.GetOperation().(*MediaFileOperation_Create); ok { + return x.Create + } + return nil +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*MediaFileOperation) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _MediaFileOperation_OneofMarshaler, _MediaFileOperation_OneofUnmarshaler, _MediaFileOperation_OneofSizer, []interface{}{ + (*MediaFileOperation_Create)(nil), + } +} + +func _MediaFileOperation_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*MediaFileOperation) + // operation + switch x := m.Operation.(type) { + case *MediaFileOperation_Create: + b.EncodeVarint(1<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.Create); err != nil { + return err + } + case nil: + default: + return fmt.Errorf("MediaFileOperation.Operation has unexpected type %T", x) + } + return nil +} + +func _MediaFileOperation_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*MediaFileOperation) + switch tag { + case 1: // operation.create + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(resources.MediaFile) + err := b.DecodeMessage(msg) + m.Operation = &MediaFileOperation_Create{msg} + return true, err + default: + return false, nil + } +} + +func _MediaFileOperation_OneofSizer(msg proto.Message) (n int) { + m := msg.(*MediaFileOperation) + // operation + switch x := m.Operation.(type) { + case *MediaFileOperation_Create: + s := proto.Size(x.Create) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +// Response message for a media file mutate. +type MutateMediaFilesResponse struct { + // Errors that pertain to operation failures in the partial failure mode. + // Returned only when partial_failure = true and all errors occur inside the + // operations. If any errors occur outside the operations (e.g. auth errors), + // we return an RPC level error. + PartialFailureError *status.Status `protobuf:"bytes,3,opt,name=partial_failure_error,json=partialFailureError,proto3" json:"partial_failure_error,omitempty"` + // All results for the mutate. + Results []*MutateMediaFileResult `protobuf:"bytes,2,rep,name=results,proto3" json:"results,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *MutateMediaFilesResponse) Reset() { *m = MutateMediaFilesResponse{} } +func (m *MutateMediaFilesResponse) String() string { return proto.CompactTextString(m) } +func (*MutateMediaFilesResponse) ProtoMessage() {} +func (*MutateMediaFilesResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_media_file_service_a9f3cbbb085ac9a6, []int{3} +} +func (m *MutateMediaFilesResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_MutateMediaFilesResponse.Unmarshal(m, b) +} +func (m *MutateMediaFilesResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_MutateMediaFilesResponse.Marshal(b, m, deterministic) +} +func (dst *MutateMediaFilesResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_MutateMediaFilesResponse.Merge(dst, src) +} +func (m *MutateMediaFilesResponse) XXX_Size() int { + return xxx_messageInfo_MutateMediaFilesResponse.Size(m) +} +func (m *MutateMediaFilesResponse) XXX_DiscardUnknown() { + xxx_messageInfo_MutateMediaFilesResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_MutateMediaFilesResponse proto.InternalMessageInfo + +func (m *MutateMediaFilesResponse) GetPartialFailureError() *status.Status { + if m != nil { + return m.PartialFailureError + } + return nil +} + +func (m *MutateMediaFilesResponse) GetResults() []*MutateMediaFileResult { + if m != nil { + return m.Results + } + return nil +} + +// The result for the media file mutate. +type MutateMediaFileResult struct { + // The resource name returned for successful operations. + ResourceName string `protobuf:"bytes,1,opt,name=resource_name,json=resourceName,proto3" json:"resource_name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *MutateMediaFileResult) Reset() { *m = MutateMediaFileResult{} } +func (m *MutateMediaFileResult) String() string { return proto.CompactTextString(m) } +func (*MutateMediaFileResult) ProtoMessage() {} +func (*MutateMediaFileResult) Descriptor() ([]byte, []int) { + return fileDescriptor_media_file_service_a9f3cbbb085ac9a6, []int{4} +} +func (m *MutateMediaFileResult) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_MutateMediaFileResult.Unmarshal(m, b) +} +func (m *MutateMediaFileResult) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_MutateMediaFileResult.Marshal(b, m, deterministic) +} +func (dst *MutateMediaFileResult) XXX_Merge(src proto.Message) { + xxx_messageInfo_MutateMediaFileResult.Merge(dst, src) +} +func (m *MutateMediaFileResult) XXX_Size() int { + return xxx_messageInfo_MutateMediaFileResult.Size(m) +} +func (m *MutateMediaFileResult) XXX_DiscardUnknown() { + xxx_messageInfo_MutateMediaFileResult.DiscardUnknown(m) +} + +var xxx_messageInfo_MutateMediaFileResult proto.InternalMessageInfo + +func (m *MutateMediaFileResult) GetResourceName() string { + if m != nil { + return m.ResourceName + } + return "" +} + +func init() { + proto.RegisterType((*GetMediaFileRequest)(nil), "google.ads.googleads.v1.services.GetMediaFileRequest") + proto.RegisterType((*MutateMediaFilesRequest)(nil), "google.ads.googleads.v1.services.MutateMediaFilesRequest") + proto.RegisterType((*MediaFileOperation)(nil), "google.ads.googleads.v1.services.MediaFileOperation") + proto.RegisterType((*MutateMediaFilesResponse)(nil), "google.ads.googleads.v1.services.MutateMediaFilesResponse") + proto.RegisterType((*MutateMediaFileResult)(nil), "google.ads.googleads.v1.services.MutateMediaFileResult") +} + +// Reference imports to suppress errors if they are not otherwise used. +var _ context.Context +var _ grpc.ClientConn + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the grpc package it is being compiled against. +const _ = grpc.SupportPackageIsVersion4 + +// MediaFileServiceClient is the client API for MediaFileService service. +// +// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://godoc.org/google.golang.org/grpc#ClientConn.NewStream. +type MediaFileServiceClient interface { + // Returns the requested media file in full detail. + GetMediaFile(ctx context.Context, in *GetMediaFileRequest, opts ...grpc.CallOption) (*resources.MediaFile, error) + // Creates media files. Operation statuses are returned. + MutateMediaFiles(ctx context.Context, in *MutateMediaFilesRequest, opts ...grpc.CallOption) (*MutateMediaFilesResponse, error) +} + +type mediaFileServiceClient struct { + cc *grpc.ClientConn +} + +func NewMediaFileServiceClient(cc *grpc.ClientConn) MediaFileServiceClient { + return &mediaFileServiceClient{cc} +} + +func (c *mediaFileServiceClient) GetMediaFile(ctx context.Context, in *GetMediaFileRequest, opts ...grpc.CallOption) (*resources.MediaFile, error) { + out := new(resources.MediaFile) + err := c.cc.Invoke(ctx, "/google.ads.googleads.v1.services.MediaFileService/GetMediaFile", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *mediaFileServiceClient) MutateMediaFiles(ctx context.Context, in *MutateMediaFilesRequest, opts ...grpc.CallOption) (*MutateMediaFilesResponse, error) { + out := new(MutateMediaFilesResponse) + err := c.cc.Invoke(ctx, "/google.ads.googleads.v1.services.MediaFileService/MutateMediaFiles", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +// MediaFileServiceServer is the server API for MediaFileService service. +type MediaFileServiceServer interface { + // Returns the requested media file in full detail. + GetMediaFile(context.Context, *GetMediaFileRequest) (*resources.MediaFile, error) + // Creates media files. Operation statuses are returned. + MutateMediaFiles(context.Context, *MutateMediaFilesRequest) (*MutateMediaFilesResponse, error) +} + +func RegisterMediaFileServiceServer(s *grpc.Server, srv MediaFileServiceServer) { + s.RegisterService(&_MediaFileService_serviceDesc, srv) +} + +func _MediaFileService_GetMediaFile_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(GetMediaFileRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(MediaFileServiceServer).GetMediaFile(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.ads.googleads.v1.services.MediaFileService/GetMediaFile", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(MediaFileServiceServer).GetMediaFile(ctx, req.(*GetMediaFileRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _MediaFileService_MutateMediaFiles_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(MutateMediaFilesRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(MediaFileServiceServer).MutateMediaFiles(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.ads.googleads.v1.services.MediaFileService/MutateMediaFiles", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(MediaFileServiceServer).MutateMediaFiles(ctx, req.(*MutateMediaFilesRequest)) + } + return interceptor(ctx, in, info, handler) +} + +var _MediaFileService_serviceDesc = grpc.ServiceDesc{ + ServiceName: "google.ads.googleads.v1.services.MediaFileService", + HandlerType: (*MediaFileServiceServer)(nil), + Methods: []grpc.MethodDesc{ + { + MethodName: "GetMediaFile", + Handler: _MediaFileService_GetMediaFile_Handler, + }, + { + MethodName: "MutateMediaFiles", + Handler: _MediaFileService_MutateMediaFiles_Handler, + }, + }, + Streams: []grpc.StreamDesc{}, + Metadata: "google/ads/googleads/v1/services/media_file_service.proto", +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/services/media_file_service.proto", fileDescriptor_media_file_service_a9f3cbbb085ac9a6) +} + +var fileDescriptor_media_file_service_a9f3cbbb085ac9a6 = []byte{ + // 646 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x9c, 0x54, 0xcd, 0x6e, 0xd3, 0x4c, + 0x14, 0xfd, 0x9c, 0x7c, 0x2a, 0x74, 0x52, 0xa0, 0x9a, 0xaa, 0x6a, 0x14, 0x21, 0x88, 0x4c, 0x25, + 0xaa, 0xa8, 0x1a, 0x2b, 0xa1, 0x08, 0x75, 0xa0, 0x8b, 0x54, 0x22, 0x2d, 0x8b, 0xd2, 0xe2, 0xa2, + 0x2e, 0x50, 0x24, 0x6b, 0x6a, 0x4f, 0xad, 0x91, 0x6c, 0x8f, 0x99, 0x19, 0x07, 0x55, 0x55, 0x37, + 0x2c, 0x78, 0x01, 0xde, 0x80, 0x1d, 0xec, 0x79, 0x04, 0x36, 0x6c, 0xd9, 0xb3, 0x62, 0xc5, 0x33, + 0xb0, 0x40, 0xf6, 0x78, 0xdc, 0xa4, 0x3f, 0x6a, 0xcb, 0xee, 0xfa, 0xde, 0x73, 0xee, 0x3d, 0xf7, + 0xc7, 0x03, 0x56, 0x43, 0xce, 0xc3, 0x88, 0x3a, 0x24, 0x90, 0x8e, 0x36, 0x73, 0x6b, 0xd4, 0x75, + 0x24, 0x15, 0x23, 0xe6, 0x53, 0xe9, 0xc4, 0x34, 0x60, 0xc4, 0x3b, 0x60, 0x11, 0xf5, 0x4a, 0x1f, + 0x4a, 0x05, 0x57, 0x1c, 0xb6, 0x35, 0x1e, 0x91, 0x40, 0xa2, 0x8a, 0x8a, 0x46, 0x5d, 0x64, 0xa8, + 0xad, 0xde, 0x45, 0xc9, 0x05, 0x95, 0x3c, 0x13, 0x93, 0xd9, 0x75, 0xd6, 0xd6, 0x5d, 0xc3, 0x49, + 0x99, 0x43, 0x92, 0x84, 0x2b, 0xa2, 0x18, 0x4f, 0x64, 0x19, 0xbd, 0x57, 0x46, 0x8b, 0xaf, 0xfd, + 0xec, 0xc0, 0x79, 0x27, 0x48, 0x9a, 0x52, 0x61, 0xe2, 0x0b, 0x65, 0x5c, 0xa4, 0xbe, 0x23, 0x15, + 0x51, 0x59, 0x19, 0xb0, 0x31, 0x98, 0xdb, 0xa0, 0x6a, 0x2b, 0xaf, 0x36, 0x60, 0x11, 0x75, 0xe9, + 0xdb, 0x8c, 0x4a, 0x05, 0x1f, 0x80, 0x5b, 0x46, 0x8b, 0x97, 0x90, 0x98, 0x36, 0xad, 0xb6, 0xb5, + 0x34, 0xed, 0xce, 0x18, 0xe7, 0x4b, 0x12, 0x53, 0xfb, 0xa7, 0x05, 0x16, 0xb6, 0x32, 0x45, 0x14, + 0xad, 0xf8, 0xd2, 0x24, 0xb8, 0x0f, 0x1a, 0x7e, 0x26, 0x15, 0x8f, 0xa9, 0xf0, 0x58, 0x50, 0xd2, + 0x81, 0x71, 0xbd, 0x08, 0xe0, 0x6b, 0x00, 0x78, 0x4a, 0x85, 0xee, 0xa2, 0x59, 0x6b, 0xd7, 0x97, + 0x1a, 0xbd, 0x15, 0x74, 0xd9, 0xe8, 0x50, 0x55, 0x69, 0xdb, 0x90, 0xdd, 0xb1, 0x3c, 0xf0, 0x21, + 0xb8, 0x93, 0x12, 0xa1, 0x18, 0x89, 0xbc, 0x03, 0xc2, 0xa2, 0x4c, 0xd0, 0x66, 0xbd, 0x6d, 0x2d, + 0xdd, 0x74, 0x6f, 0x97, 0xee, 0x81, 0xf6, 0xe6, 0x0d, 0x8e, 0x48, 0xc4, 0x02, 0xa2, 0xa8, 0xc7, + 0x93, 0xe8, 0xb0, 0xf9, 0x7f, 0x01, 0x9b, 0x31, 0xce, 0xed, 0x24, 0x3a, 0xb4, 0x19, 0x80, 0x67, + 0xeb, 0xc1, 0x01, 0x98, 0xf2, 0x05, 0x25, 0x4a, 0x0f, 0xa5, 0xd1, 0x5b, 0xbe, 0x50, 0x75, 0xb5, + 0xce, 0x13, 0xd9, 0x9b, 0xff, 0xb9, 0x25, 0x7b, 0xbd, 0x01, 0xa6, 0x2b, 0xe5, 0xf6, 0x57, 0x0b, + 0x34, 0xcf, 0xce, 0x52, 0xa6, 0x3c, 0x91, 0x14, 0x0e, 0xc0, 0xfc, 0xa9, 0xae, 0x3c, 0x2a, 0x04, + 0x17, 0x45, 0x6f, 0x8d, 0x1e, 0x34, 0x02, 0x44, 0xea, 0xa3, 0xdd, 0x62, 0xbb, 0xee, 0xdc, 0x64, + 0xbf, 0xcf, 0x73, 0x38, 0x7c, 0x05, 0x6e, 0x08, 0x2a, 0xb3, 0x48, 0x99, 0x81, 0x3f, 0xb9, 0xc2, + 0xc0, 0x27, 0x45, 0xb9, 0x05, 0xdf, 0x35, 0x79, 0xec, 0x67, 0x60, 0xfe, 0x5c, 0xc4, 0x95, 0x2e, + 0xa8, 0xf7, 0xa1, 0x0e, 0x66, 0x2b, 0xe2, 0xae, 0x2e, 0x09, 0x3f, 0x5b, 0x60, 0x66, 0xfc, 0x26, + 0xe1, 0xe3, 0xcb, 0x55, 0x9e, 0x73, 0xc3, 0xad, 0x6b, 0xed, 0xc5, 0x5e, 0x79, 0xff, 0xe3, 0xd7, + 0xc7, 0x1a, 0x82, 0xcb, 0xf9, 0x7f, 0x78, 0x34, 0x21, 0x7d, 0xcd, 0x9c, 0xad, 0x74, 0x3a, 0xfa, + 0xc7, 0x2c, 0xd6, 0xe3, 0x74, 0x8e, 0xe1, 0x37, 0x0b, 0xcc, 0x9e, 0x5e, 0x1b, 0x5c, 0xbd, 0xf6, + 0x54, 0xcd, 0x6f, 0xd3, 0xc2, 0xff, 0x42, 0xd5, 0x57, 0x62, 0xe3, 0xa2, 0x83, 0x15, 0xdb, 0xc9, + 0x3b, 0x38, 0x91, 0x7c, 0x34, 0xf6, 0x1f, 0xae, 0x75, 0x8e, 0xc7, 0x1a, 0xc0, 0x71, 0x91, 0x0a, + 0x5b, 0x9d, 0xf5, 0x3f, 0x16, 0x58, 0xf4, 0x79, 0x7c, 0x69, 0xf5, 0xf5, 0xf9, 0xd3, 0xeb, 0xda, + 0xc9, 0x9f, 0x91, 0x1d, 0xeb, 0xcd, 0x66, 0x49, 0x0d, 0x79, 0x44, 0x92, 0x10, 0x71, 0x11, 0x3a, + 0x21, 0x4d, 0x8a, 0x47, 0xc6, 0xbc, 0x71, 0x29, 0x93, 0x17, 0xbf, 0xa7, 0x4f, 0x8d, 0xf1, 0xa9, + 0x56, 0xdf, 0xe8, 0xf7, 0xbf, 0xd4, 0xda, 0x1b, 0x3a, 0x61, 0x3f, 0x90, 0x48, 0x9b, 0xb9, 0xb5, + 0xd7, 0x45, 0x65, 0x61, 0xf9, 0xdd, 0x40, 0x86, 0xfd, 0x40, 0x0e, 0x2b, 0xc8, 0x70, 0xaf, 0x3b, + 0x34, 0x90, 0xdf, 0xb5, 0x45, 0xed, 0xc7, 0xb8, 0x1f, 0x48, 0x8c, 0x2b, 0x10, 0xc6, 0x7b, 0x5d, + 0x8c, 0x0d, 0x6c, 0x7f, 0xaa, 0xd0, 0xf9, 0xe8, 0x6f, 0x00, 0x00, 0x00, 0xff, 0xff, 0xf1, 0x84, + 0xc8, 0x18, 0xf6, 0x05, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/services/merchant_center_link_service.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/services/merchant_center_link_service.pb.go new file mode 100644 index 0000000..8701e5f --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/services/merchant_center_link_service.pb.go @@ -0,0 +1,635 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/services/merchant_center_link_service.proto + +package services // import "google.golang.org/genproto/googleapis/ads/googleads/v1/services" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import resources "google.golang.org/genproto/googleapis/ads/googleads/v1/resources" +import _ "google.golang.org/genproto/googleapis/api/annotations" +import field_mask "google.golang.org/genproto/protobuf/field_mask" + +import ( + context "golang.org/x/net/context" + grpc "google.golang.org/grpc" +) + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Request message for [MerchantCenterLinkService.ListMerchantCenterLinks][google.ads.googleads.v1.services.MerchantCenterLinkService.ListMerchantCenterLinks]. +type ListMerchantCenterLinksRequest struct { + // The ID of the customer onto which to apply the Merchant Center link list + // operation. + CustomerId string `protobuf:"bytes,1,opt,name=customer_id,json=customerId,proto3" json:"customer_id,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ListMerchantCenterLinksRequest) Reset() { *m = ListMerchantCenterLinksRequest{} } +func (m *ListMerchantCenterLinksRequest) String() string { return proto.CompactTextString(m) } +func (*ListMerchantCenterLinksRequest) ProtoMessage() {} +func (*ListMerchantCenterLinksRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_merchant_center_link_service_b9987cf1a2e346cc, []int{0} +} +func (m *ListMerchantCenterLinksRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ListMerchantCenterLinksRequest.Unmarshal(m, b) +} +func (m *ListMerchantCenterLinksRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ListMerchantCenterLinksRequest.Marshal(b, m, deterministic) +} +func (dst *ListMerchantCenterLinksRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_ListMerchantCenterLinksRequest.Merge(dst, src) +} +func (m *ListMerchantCenterLinksRequest) XXX_Size() int { + return xxx_messageInfo_ListMerchantCenterLinksRequest.Size(m) +} +func (m *ListMerchantCenterLinksRequest) XXX_DiscardUnknown() { + xxx_messageInfo_ListMerchantCenterLinksRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_ListMerchantCenterLinksRequest proto.InternalMessageInfo + +func (m *ListMerchantCenterLinksRequest) GetCustomerId() string { + if m != nil { + return m.CustomerId + } + return "" +} + +// Response message for [MerchantCenterLinkService.ListMerchantCenterLinks][google.ads.googleads.v1.services.MerchantCenterLinkService.ListMerchantCenterLinks]. +type ListMerchantCenterLinksResponse struct { + // Merchant Center links available for the requested customer + MerchantCenterLinks []*resources.MerchantCenterLink `protobuf:"bytes,1,rep,name=merchant_center_links,json=merchantCenterLinks,proto3" json:"merchant_center_links,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ListMerchantCenterLinksResponse) Reset() { *m = ListMerchantCenterLinksResponse{} } +func (m *ListMerchantCenterLinksResponse) String() string { return proto.CompactTextString(m) } +func (*ListMerchantCenterLinksResponse) ProtoMessage() {} +func (*ListMerchantCenterLinksResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_merchant_center_link_service_b9987cf1a2e346cc, []int{1} +} +func (m *ListMerchantCenterLinksResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ListMerchantCenterLinksResponse.Unmarshal(m, b) +} +func (m *ListMerchantCenterLinksResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ListMerchantCenterLinksResponse.Marshal(b, m, deterministic) +} +func (dst *ListMerchantCenterLinksResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_ListMerchantCenterLinksResponse.Merge(dst, src) +} +func (m *ListMerchantCenterLinksResponse) XXX_Size() int { + return xxx_messageInfo_ListMerchantCenterLinksResponse.Size(m) +} +func (m *ListMerchantCenterLinksResponse) XXX_DiscardUnknown() { + xxx_messageInfo_ListMerchantCenterLinksResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_ListMerchantCenterLinksResponse proto.InternalMessageInfo + +func (m *ListMerchantCenterLinksResponse) GetMerchantCenterLinks() []*resources.MerchantCenterLink { + if m != nil { + return m.MerchantCenterLinks + } + return nil +} + +// Request message for [MerchantCenterLinkService.GetMerchantCenterLink][google.ads.googleads.v1.services.MerchantCenterLinkService.GetMerchantCenterLink]. +type GetMerchantCenterLinkRequest struct { + // Resource name of the Merchant Center link. + ResourceName string `protobuf:"bytes,1,opt,name=resource_name,json=resourceName,proto3" json:"resource_name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GetMerchantCenterLinkRequest) Reset() { *m = GetMerchantCenterLinkRequest{} } +func (m *GetMerchantCenterLinkRequest) String() string { return proto.CompactTextString(m) } +func (*GetMerchantCenterLinkRequest) ProtoMessage() {} +func (*GetMerchantCenterLinkRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_merchant_center_link_service_b9987cf1a2e346cc, []int{2} +} +func (m *GetMerchantCenterLinkRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GetMerchantCenterLinkRequest.Unmarshal(m, b) +} +func (m *GetMerchantCenterLinkRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GetMerchantCenterLinkRequest.Marshal(b, m, deterministic) +} +func (dst *GetMerchantCenterLinkRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_GetMerchantCenterLinkRequest.Merge(dst, src) +} +func (m *GetMerchantCenterLinkRequest) XXX_Size() int { + return xxx_messageInfo_GetMerchantCenterLinkRequest.Size(m) +} +func (m *GetMerchantCenterLinkRequest) XXX_DiscardUnknown() { + xxx_messageInfo_GetMerchantCenterLinkRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_GetMerchantCenterLinkRequest proto.InternalMessageInfo + +func (m *GetMerchantCenterLinkRequest) GetResourceName() string { + if m != nil { + return m.ResourceName + } + return "" +} + +// Request message for [MerchantCenterLinkService.MutateMerchantCenterLink][google.ads.googleads.v1.services.MerchantCenterLinkService.MutateMerchantCenterLink]. +type MutateMerchantCenterLinkRequest struct { + // The ID of the customer being modified. + CustomerId string `protobuf:"bytes,1,opt,name=customer_id,json=customerId,proto3" json:"customer_id,omitempty"` + // The operation to perform on the link + Operation *MerchantCenterLinkOperation `protobuf:"bytes,2,opt,name=operation,proto3" json:"operation,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *MutateMerchantCenterLinkRequest) Reset() { *m = MutateMerchantCenterLinkRequest{} } +func (m *MutateMerchantCenterLinkRequest) String() string { return proto.CompactTextString(m) } +func (*MutateMerchantCenterLinkRequest) ProtoMessage() {} +func (*MutateMerchantCenterLinkRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_merchant_center_link_service_b9987cf1a2e346cc, []int{3} +} +func (m *MutateMerchantCenterLinkRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_MutateMerchantCenterLinkRequest.Unmarshal(m, b) +} +func (m *MutateMerchantCenterLinkRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_MutateMerchantCenterLinkRequest.Marshal(b, m, deterministic) +} +func (dst *MutateMerchantCenterLinkRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_MutateMerchantCenterLinkRequest.Merge(dst, src) +} +func (m *MutateMerchantCenterLinkRequest) XXX_Size() int { + return xxx_messageInfo_MutateMerchantCenterLinkRequest.Size(m) +} +func (m *MutateMerchantCenterLinkRequest) XXX_DiscardUnknown() { + xxx_messageInfo_MutateMerchantCenterLinkRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_MutateMerchantCenterLinkRequest proto.InternalMessageInfo + +func (m *MutateMerchantCenterLinkRequest) GetCustomerId() string { + if m != nil { + return m.CustomerId + } + return "" +} + +func (m *MutateMerchantCenterLinkRequest) GetOperation() *MerchantCenterLinkOperation { + if m != nil { + return m.Operation + } + return nil +} + +// A single update on a Merchant Center link. +type MerchantCenterLinkOperation struct { + // FieldMask that determines which resource fields are modified in an update. + UpdateMask *field_mask.FieldMask `protobuf:"bytes,3,opt,name=update_mask,json=updateMask,proto3" json:"update_mask,omitempty"` + // The operation to perform + // + // Types that are valid to be assigned to Operation: + // *MerchantCenterLinkOperation_Update + // *MerchantCenterLinkOperation_Remove + Operation isMerchantCenterLinkOperation_Operation `protobuf_oneof:"operation"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *MerchantCenterLinkOperation) Reset() { *m = MerchantCenterLinkOperation{} } +func (m *MerchantCenterLinkOperation) String() string { return proto.CompactTextString(m) } +func (*MerchantCenterLinkOperation) ProtoMessage() {} +func (*MerchantCenterLinkOperation) Descriptor() ([]byte, []int) { + return fileDescriptor_merchant_center_link_service_b9987cf1a2e346cc, []int{4} +} +func (m *MerchantCenterLinkOperation) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_MerchantCenterLinkOperation.Unmarshal(m, b) +} +func (m *MerchantCenterLinkOperation) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_MerchantCenterLinkOperation.Marshal(b, m, deterministic) +} +func (dst *MerchantCenterLinkOperation) XXX_Merge(src proto.Message) { + xxx_messageInfo_MerchantCenterLinkOperation.Merge(dst, src) +} +func (m *MerchantCenterLinkOperation) XXX_Size() int { + return xxx_messageInfo_MerchantCenterLinkOperation.Size(m) +} +func (m *MerchantCenterLinkOperation) XXX_DiscardUnknown() { + xxx_messageInfo_MerchantCenterLinkOperation.DiscardUnknown(m) +} + +var xxx_messageInfo_MerchantCenterLinkOperation proto.InternalMessageInfo + +func (m *MerchantCenterLinkOperation) GetUpdateMask() *field_mask.FieldMask { + if m != nil { + return m.UpdateMask + } + return nil +} + +type isMerchantCenterLinkOperation_Operation interface { + isMerchantCenterLinkOperation_Operation() +} + +type MerchantCenterLinkOperation_Update struct { + Update *resources.MerchantCenterLink `protobuf:"bytes,1,opt,name=update,proto3,oneof"` +} + +type MerchantCenterLinkOperation_Remove struct { + Remove string `protobuf:"bytes,2,opt,name=remove,proto3,oneof"` +} + +func (*MerchantCenterLinkOperation_Update) isMerchantCenterLinkOperation_Operation() {} + +func (*MerchantCenterLinkOperation_Remove) isMerchantCenterLinkOperation_Operation() {} + +func (m *MerchantCenterLinkOperation) GetOperation() isMerchantCenterLinkOperation_Operation { + if m != nil { + return m.Operation + } + return nil +} + +func (m *MerchantCenterLinkOperation) GetUpdate() *resources.MerchantCenterLink { + if x, ok := m.GetOperation().(*MerchantCenterLinkOperation_Update); ok { + return x.Update + } + return nil +} + +func (m *MerchantCenterLinkOperation) GetRemove() string { + if x, ok := m.GetOperation().(*MerchantCenterLinkOperation_Remove); ok { + return x.Remove + } + return "" +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*MerchantCenterLinkOperation) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _MerchantCenterLinkOperation_OneofMarshaler, _MerchantCenterLinkOperation_OneofUnmarshaler, _MerchantCenterLinkOperation_OneofSizer, []interface{}{ + (*MerchantCenterLinkOperation_Update)(nil), + (*MerchantCenterLinkOperation_Remove)(nil), + } +} + +func _MerchantCenterLinkOperation_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*MerchantCenterLinkOperation) + // operation + switch x := m.Operation.(type) { + case *MerchantCenterLinkOperation_Update: + b.EncodeVarint(1<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.Update); err != nil { + return err + } + case *MerchantCenterLinkOperation_Remove: + b.EncodeVarint(2<<3 | proto.WireBytes) + b.EncodeStringBytes(x.Remove) + case nil: + default: + return fmt.Errorf("MerchantCenterLinkOperation.Operation has unexpected type %T", x) + } + return nil +} + +func _MerchantCenterLinkOperation_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*MerchantCenterLinkOperation) + switch tag { + case 1: // operation.update + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(resources.MerchantCenterLink) + err := b.DecodeMessage(msg) + m.Operation = &MerchantCenterLinkOperation_Update{msg} + return true, err + case 2: // operation.remove + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeStringBytes() + m.Operation = &MerchantCenterLinkOperation_Remove{x} + return true, err + default: + return false, nil + } +} + +func _MerchantCenterLinkOperation_OneofSizer(msg proto.Message) (n int) { + m := msg.(*MerchantCenterLinkOperation) + // operation + switch x := m.Operation.(type) { + case *MerchantCenterLinkOperation_Update: + s := proto.Size(x.Update) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *MerchantCenterLinkOperation_Remove: + n += 1 // tag and wire + n += proto.SizeVarint(uint64(len(x.Remove))) + n += len(x.Remove) + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +// Response message for Merchant Center link mutate. +type MutateMerchantCenterLinkResponse struct { + // Result for the mutate. + Result *MutateMerchantCenterLinkResult `protobuf:"bytes,2,opt,name=result,proto3" json:"result,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *MutateMerchantCenterLinkResponse) Reset() { *m = MutateMerchantCenterLinkResponse{} } +func (m *MutateMerchantCenterLinkResponse) String() string { return proto.CompactTextString(m) } +func (*MutateMerchantCenterLinkResponse) ProtoMessage() {} +func (*MutateMerchantCenterLinkResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_merchant_center_link_service_b9987cf1a2e346cc, []int{5} +} +func (m *MutateMerchantCenterLinkResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_MutateMerchantCenterLinkResponse.Unmarshal(m, b) +} +func (m *MutateMerchantCenterLinkResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_MutateMerchantCenterLinkResponse.Marshal(b, m, deterministic) +} +func (dst *MutateMerchantCenterLinkResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_MutateMerchantCenterLinkResponse.Merge(dst, src) +} +func (m *MutateMerchantCenterLinkResponse) XXX_Size() int { + return xxx_messageInfo_MutateMerchantCenterLinkResponse.Size(m) +} +func (m *MutateMerchantCenterLinkResponse) XXX_DiscardUnknown() { + xxx_messageInfo_MutateMerchantCenterLinkResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_MutateMerchantCenterLinkResponse proto.InternalMessageInfo + +func (m *MutateMerchantCenterLinkResponse) GetResult() *MutateMerchantCenterLinkResult { + if m != nil { + return m.Result + } + return nil +} + +// The result for the Merchant Center link mutate. +type MutateMerchantCenterLinkResult struct { + // Returned for successful operations. + ResourceName string `protobuf:"bytes,1,opt,name=resource_name,json=resourceName,proto3" json:"resource_name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *MutateMerchantCenterLinkResult) Reset() { *m = MutateMerchantCenterLinkResult{} } +func (m *MutateMerchantCenterLinkResult) String() string { return proto.CompactTextString(m) } +func (*MutateMerchantCenterLinkResult) ProtoMessage() {} +func (*MutateMerchantCenterLinkResult) Descriptor() ([]byte, []int) { + return fileDescriptor_merchant_center_link_service_b9987cf1a2e346cc, []int{6} +} +func (m *MutateMerchantCenterLinkResult) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_MutateMerchantCenterLinkResult.Unmarshal(m, b) +} +func (m *MutateMerchantCenterLinkResult) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_MutateMerchantCenterLinkResult.Marshal(b, m, deterministic) +} +func (dst *MutateMerchantCenterLinkResult) XXX_Merge(src proto.Message) { + xxx_messageInfo_MutateMerchantCenterLinkResult.Merge(dst, src) +} +func (m *MutateMerchantCenterLinkResult) XXX_Size() int { + return xxx_messageInfo_MutateMerchantCenterLinkResult.Size(m) +} +func (m *MutateMerchantCenterLinkResult) XXX_DiscardUnknown() { + xxx_messageInfo_MutateMerchantCenterLinkResult.DiscardUnknown(m) +} + +var xxx_messageInfo_MutateMerchantCenterLinkResult proto.InternalMessageInfo + +func (m *MutateMerchantCenterLinkResult) GetResourceName() string { + if m != nil { + return m.ResourceName + } + return "" +} + +func init() { + proto.RegisterType((*ListMerchantCenterLinksRequest)(nil), "google.ads.googleads.v1.services.ListMerchantCenterLinksRequest") + proto.RegisterType((*ListMerchantCenterLinksResponse)(nil), "google.ads.googleads.v1.services.ListMerchantCenterLinksResponse") + proto.RegisterType((*GetMerchantCenterLinkRequest)(nil), "google.ads.googleads.v1.services.GetMerchantCenterLinkRequest") + proto.RegisterType((*MutateMerchantCenterLinkRequest)(nil), "google.ads.googleads.v1.services.MutateMerchantCenterLinkRequest") + proto.RegisterType((*MerchantCenterLinkOperation)(nil), "google.ads.googleads.v1.services.MerchantCenterLinkOperation") + proto.RegisterType((*MutateMerchantCenterLinkResponse)(nil), "google.ads.googleads.v1.services.MutateMerchantCenterLinkResponse") + proto.RegisterType((*MutateMerchantCenterLinkResult)(nil), "google.ads.googleads.v1.services.MutateMerchantCenterLinkResult") +} + +// Reference imports to suppress errors if they are not otherwise used. +var _ context.Context +var _ grpc.ClientConn + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the grpc package it is being compiled against. +const _ = grpc.SupportPackageIsVersion4 + +// MerchantCenterLinkServiceClient is the client API for MerchantCenterLinkService service. +// +// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://godoc.org/google.golang.org/grpc#ClientConn.NewStream. +type MerchantCenterLinkServiceClient interface { + // Returns Merchant Center links available tor this customer. + ListMerchantCenterLinks(ctx context.Context, in *ListMerchantCenterLinksRequest, opts ...grpc.CallOption) (*ListMerchantCenterLinksResponse, error) + // Returns the Merchant Center link in full detail. + GetMerchantCenterLink(ctx context.Context, in *GetMerchantCenterLinkRequest, opts ...grpc.CallOption) (*resources.MerchantCenterLink, error) + // Updates status or removes a Merchant Center link. + MutateMerchantCenterLink(ctx context.Context, in *MutateMerchantCenterLinkRequest, opts ...grpc.CallOption) (*MutateMerchantCenterLinkResponse, error) +} + +type merchantCenterLinkServiceClient struct { + cc *grpc.ClientConn +} + +func NewMerchantCenterLinkServiceClient(cc *grpc.ClientConn) MerchantCenterLinkServiceClient { + return &merchantCenterLinkServiceClient{cc} +} + +func (c *merchantCenterLinkServiceClient) ListMerchantCenterLinks(ctx context.Context, in *ListMerchantCenterLinksRequest, opts ...grpc.CallOption) (*ListMerchantCenterLinksResponse, error) { + out := new(ListMerchantCenterLinksResponse) + err := c.cc.Invoke(ctx, "/google.ads.googleads.v1.services.MerchantCenterLinkService/ListMerchantCenterLinks", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *merchantCenterLinkServiceClient) GetMerchantCenterLink(ctx context.Context, in *GetMerchantCenterLinkRequest, opts ...grpc.CallOption) (*resources.MerchantCenterLink, error) { + out := new(resources.MerchantCenterLink) + err := c.cc.Invoke(ctx, "/google.ads.googleads.v1.services.MerchantCenterLinkService/GetMerchantCenterLink", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *merchantCenterLinkServiceClient) MutateMerchantCenterLink(ctx context.Context, in *MutateMerchantCenterLinkRequest, opts ...grpc.CallOption) (*MutateMerchantCenterLinkResponse, error) { + out := new(MutateMerchantCenterLinkResponse) + err := c.cc.Invoke(ctx, "/google.ads.googleads.v1.services.MerchantCenterLinkService/MutateMerchantCenterLink", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +// MerchantCenterLinkServiceServer is the server API for MerchantCenterLinkService service. +type MerchantCenterLinkServiceServer interface { + // Returns Merchant Center links available tor this customer. + ListMerchantCenterLinks(context.Context, *ListMerchantCenterLinksRequest) (*ListMerchantCenterLinksResponse, error) + // Returns the Merchant Center link in full detail. + GetMerchantCenterLink(context.Context, *GetMerchantCenterLinkRequest) (*resources.MerchantCenterLink, error) + // Updates status or removes a Merchant Center link. + MutateMerchantCenterLink(context.Context, *MutateMerchantCenterLinkRequest) (*MutateMerchantCenterLinkResponse, error) +} + +func RegisterMerchantCenterLinkServiceServer(s *grpc.Server, srv MerchantCenterLinkServiceServer) { + s.RegisterService(&_MerchantCenterLinkService_serviceDesc, srv) +} + +func _MerchantCenterLinkService_ListMerchantCenterLinks_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(ListMerchantCenterLinksRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(MerchantCenterLinkServiceServer).ListMerchantCenterLinks(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.ads.googleads.v1.services.MerchantCenterLinkService/ListMerchantCenterLinks", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(MerchantCenterLinkServiceServer).ListMerchantCenterLinks(ctx, req.(*ListMerchantCenterLinksRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _MerchantCenterLinkService_GetMerchantCenterLink_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(GetMerchantCenterLinkRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(MerchantCenterLinkServiceServer).GetMerchantCenterLink(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.ads.googleads.v1.services.MerchantCenterLinkService/GetMerchantCenterLink", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(MerchantCenterLinkServiceServer).GetMerchantCenterLink(ctx, req.(*GetMerchantCenterLinkRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _MerchantCenterLinkService_MutateMerchantCenterLink_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(MutateMerchantCenterLinkRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(MerchantCenterLinkServiceServer).MutateMerchantCenterLink(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.ads.googleads.v1.services.MerchantCenterLinkService/MutateMerchantCenterLink", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(MerchantCenterLinkServiceServer).MutateMerchantCenterLink(ctx, req.(*MutateMerchantCenterLinkRequest)) + } + return interceptor(ctx, in, info, handler) +} + +var _MerchantCenterLinkService_serviceDesc = grpc.ServiceDesc{ + ServiceName: "google.ads.googleads.v1.services.MerchantCenterLinkService", + HandlerType: (*MerchantCenterLinkServiceServer)(nil), + Methods: []grpc.MethodDesc{ + { + MethodName: "ListMerchantCenterLinks", + Handler: _MerchantCenterLinkService_ListMerchantCenterLinks_Handler, + }, + { + MethodName: "GetMerchantCenterLink", + Handler: _MerchantCenterLinkService_GetMerchantCenterLink_Handler, + }, + { + MethodName: "MutateMerchantCenterLink", + Handler: _MerchantCenterLinkService_MutateMerchantCenterLink_Handler, + }, + }, + Streams: []grpc.StreamDesc{}, + Metadata: "google/ads/googleads/v1/services/merchant_center_link_service.proto", +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/services/merchant_center_link_service.proto", fileDescriptor_merchant_center_link_service_b9987cf1a2e346cc) +} + +var fileDescriptor_merchant_center_link_service_b9987cf1a2e346cc = []byte{ + // 679 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xa4, 0x55, 0xcf, 0x6b, 0x13, 0x4f, + 0x14, 0xff, 0x4e, 0xca, 0xb7, 0xd0, 0x89, 0x5e, 0x46, 0x8a, 0x31, 0x96, 0x36, 0xac, 0x3d, 0x94, + 0x1c, 0x66, 0x49, 0x4b, 0x51, 0xb7, 0x46, 0xdc, 0x04, 0x6d, 0x85, 0xd6, 0x96, 0x15, 0x8a, 0x68, + 0x20, 0x4c, 0xb3, 0xd3, 0xb8, 0x34, 0xbb, 0xb3, 0xee, 0xcc, 0xe6, 0x52, 0x7b, 0x11, 0xbc, 0x79, + 0xf3, 0x0f, 0x10, 0x3c, 0x7a, 0xf2, 0xef, 0x10, 0xbc, 0xe8, 0xc9, 0xbb, 0x78, 0xf0, 0xaf, 0x90, + 0x9d, 0x1f, 0x69, 0xa5, 0xbb, 0x49, 0x6d, 0x6e, 0x6f, 0x66, 0xde, 0x7c, 0xde, 0xfb, 0xbc, 0xf7, + 0x79, 0x33, 0xb0, 0xdd, 0x67, 0xac, 0x3f, 0xa0, 0x36, 0xf1, 0xb9, 0xad, 0xcc, 0xcc, 0x1a, 0x36, + 0x6c, 0x4e, 0x93, 0x61, 0xd0, 0xa3, 0xdc, 0x0e, 0x69, 0xd2, 0x7b, 0x49, 0x22, 0xd1, 0xed, 0xd1, + 0x48, 0xd0, 0xa4, 0x3b, 0x08, 0xa2, 0xa3, 0xae, 0x3e, 0xc5, 0x71, 0xc2, 0x04, 0x43, 0x35, 0x75, + 0x13, 0x13, 0x9f, 0xe3, 0x11, 0x08, 0x1e, 0x36, 0xb0, 0x01, 0xa9, 0xde, 0x2b, 0x0a, 0x93, 0x50, + 0xce, 0xd2, 0xa4, 0x28, 0x8e, 0xc2, 0xaf, 0x2e, 0x98, 0xdb, 0x71, 0x60, 0x93, 0x28, 0x62, 0x82, + 0x88, 0x80, 0x45, 0x5c, 0x9f, 0xea, 0xe8, 0xb6, 0x5c, 0x1d, 0xa4, 0x87, 0xf6, 0x61, 0x40, 0x07, + 0x7e, 0x37, 0x24, 0x5c, 0xdf, 0xb7, 0x5c, 0xb8, 0xb8, 0x1d, 0x70, 0xb1, 0xa3, 0x23, 0xb4, 0x65, + 0x80, 0xed, 0x20, 0x3a, 0xe2, 0x1e, 0x7d, 0x95, 0x52, 0x2e, 0xd0, 0x12, 0x2c, 0xf7, 0x52, 0x2e, + 0x58, 0x48, 0x93, 0x6e, 0xe0, 0x57, 0x40, 0x0d, 0xac, 0xcc, 0x79, 0xd0, 0x6c, 0x3d, 0xf6, 0xad, + 0x77, 0x00, 0x2e, 0x15, 0x62, 0xf0, 0x98, 0x45, 0x9c, 0xa2, 0x00, 0xce, 0xe7, 0x91, 0xe0, 0x15, + 0x50, 0x9b, 0x59, 0x29, 0xaf, 0xae, 0xe3, 0xa2, 0x32, 0x8d, 0x8a, 0x80, 0xcf, 0xc3, 0x7b, 0xd7, + 0xc2, 0xf3, 0x21, 0xad, 0x36, 0x5c, 0xd8, 0xa4, 0x39, 0xc9, 0x18, 0x3e, 0xb7, 0xe0, 0x55, 0x03, + 0xda, 0x8d, 0x48, 0x48, 0x35, 0xa3, 0x2b, 0x66, 0xf3, 0x09, 0x09, 0xa9, 0xf5, 0x01, 0xc0, 0xa5, + 0x9d, 0x54, 0x10, 0x41, 0x8b, 0x81, 0x26, 0x15, 0x06, 0xbd, 0x80, 0x73, 0x2c, 0xa6, 0x89, 0xec, + 0x48, 0xa5, 0x54, 0x03, 0x2b, 0xe5, 0xd5, 0x26, 0x9e, 0xa4, 0x87, 0x1c, 0x9e, 0xbb, 0x06, 0xc4, + 0x3b, 0xc5, 0xb3, 0xbe, 0x01, 0x78, 0x73, 0x8c, 0x2b, 0xda, 0x80, 0xe5, 0x34, 0xf6, 0x89, 0xa0, + 0xb2, 0xdb, 0x95, 0x19, 0x19, 0xbe, 0x6a, 0xc2, 0x1b, 0x41, 0xe0, 0x47, 0x99, 0x20, 0x76, 0x08, + 0x3f, 0xf2, 0xa0, 0x72, 0xcf, 0x6c, 0xb4, 0x0b, 0x67, 0xd5, 0x4a, 0xb2, 0xba, 0x6c, 0x7f, 0xb6, + 0xfe, 0xf3, 0x34, 0x0c, 0xaa, 0xc0, 0xd9, 0x84, 0x86, 0x6c, 0x48, 0x65, 0x1d, 0xe6, 0xb2, 0x13, + 0xb5, 0x6e, 0x95, 0xcf, 0x14, 0xc9, 0x7a, 0x0d, 0x6b, 0xc5, 0x55, 0xd7, 0x52, 0x7a, 0x96, 0x41, + 0xf1, 0x74, 0x20, 0x74, 0x49, 0x1f, 0x5c, 0xa0, 0xa4, 0xc5, 0x98, 0xe9, 0x40, 0x78, 0x1a, 0xcf, + 0x7a, 0x08, 0x17, 0xc7, 0x7b, 0x5e, 0x48, 0x3b, 0xab, 0x9f, 0xff, 0x87, 0x37, 0xce, 0x23, 0x3c, + 0x55, 0xc9, 0xa0, 0x1f, 0x00, 0x5e, 0x2f, 0x98, 0x16, 0x74, 0x01, 0x2a, 0xe3, 0x87, 0xb5, 0xea, + 0x4e, 0x81, 0xa0, 0xea, 0x6b, 0xdd, 0x7d, 0xf3, 0xfd, 0xe7, 0xfb, 0xd2, 0x1a, 0x6a, 0x64, 0x6f, + 0x90, 0x51, 0x33, 0xb7, 0x8f, 0xcf, 0x68, 0xbd, 0x59, 0x3f, 0xb1, 0x73, 0x46, 0x0f, 0x7d, 0x05, + 0x70, 0x3e, 0x77, 0xf6, 0xd0, 0xfd, 0xc9, 0x79, 0x8d, 0x1b, 0xda, 0xea, 0xe5, 0x04, 0x68, 0x35, + 0x25, 0x97, 0xdb, 0x68, 0x3d, 0xe3, 0x72, 0xfc, 0x57, 0xeb, 0x9a, 0xa7, 0xd4, 0xea, 0x79, 0x64, + 0xec, 0xfa, 0x09, 0xfa, 0x05, 0x60, 0xa5, 0x48, 0x11, 0xc8, 0x9d, 0x46, 0x77, 0x8a, 0x55, 0x6b, + 0x2a, 0xe9, 0xaa, 0x76, 0xb5, 0x25, 0xc5, 0xa6, 0x75, 0xe7, 0x9f, 0xdb, 0xe5, 0x84, 0x12, 0xdb, + 0x01, 0xf5, 0xd6, 0xdb, 0x12, 0x5c, 0xee, 0xb1, 0x70, 0x62, 0x3a, 0xad, 0xc5, 0x42, 0x61, 0xef, + 0x65, 0x2f, 0xca, 0x1e, 0x78, 0xbe, 0xa5, 0x31, 0xfa, 0x6c, 0x40, 0xa2, 0x3e, 0x66, 0x49, 0xdf, + 0xee, 0xd3, 0x48, 0xbe, 0x37, 0xe6, 0x7b, 0x8b, 0x03, 0x5e, 0xfc, 0xa9, 0x6e, 0x18, 0xe3, 0x63, + 0x69, 0x66, 0xd3, 0x75, 0x3f, 0x95, 0x6a, 0x9b, 0x0a, 0xd0, 0xf5, 0x39, 0x56, 0x66, 0x66, 0xed, + 0x37, 0xb0, 0x0e, 0xcc, 0xbf, 0x18, 0x97, 0x8e, 0xeb, 0xf3, 0xce, 0xc8, 0xa5, 0xb3, 0xdf, 0xe8, + 0x18, 0x97, 0xdf, 0xa5, 0x65, 0xb5, 0xef, 0x38, 0xae, 0xcf, 0x1d, 0x67, 0xe4, 0xe4, 0x38, 0xfb, + 0x0d, 0xc7, 0x31, 0x6e, 0x07, 0xb3, 0x32, 0xcf, 0xb5, 0x3f, 0x01, 0x00, 0x00, 0xff, 0xff, 0x7e, + 0xe0, 0x79, 0xc5, 0xfb, 0x07, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/services/mobile_app_category_constant_service.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/services/mobile_app_category_constant_service.pb.go new file mode 100644 index 0000000..4f98cb9 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/services/mobile_app_category_constant_service.pb.go @@ -0,0 +1,177 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/services/mobile_app_category_constant_service.proto + +package services // import "google.golang.org/genproto/googleapis/ads/googleads/v1/services" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import resources "google.golang.org/genproto/googleapis/ads/googleads/v1/resources" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +import ( + context "golang.org/x/net/context" + grpc "google.golang.org/grpc" +) + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Request message for +// [MobileAppCategoryConstantService.GetMobileAppCategoryConstant][google.ads.googleads.v1.services.MobileAppCategoryConstantService.GetMobileAppCategoryConstant]. +type GetMobileAppCategoryConstantRequest struct { + // Resource name of the mobile app category constant to fetch. + ResourceName string `protobuf:"bytes,1,opt,name=resource_name,json=resourceName,proto3" json:"resource_name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GetMobileAppCategoryConstantRequest) Reset() { *m = GetMobileAppCategoryConstantRequest{} } +func (m *GetMobileAppCategoryConstantRequest) String() string { return proto.CompactTextString(m) } +func (*GetMobileAppCategoryConstantRequest) ProtoMessage() {} +func (*GetMobileAppCategoryConstantRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_mobile_app_category_constant_service_8ab2dafc094f53e2, []int{0} +} +func (m *GetMobileAppCategoryConstantRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GetMobileAppCategoryConstantRequest.Unmarshal(m, b) +} +func (m *GetMobileAppCategoryConstantRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GetMobileAppCategoryConstantRequest.Marshal(b, m, deterministic) +} +func (dst *GetMobileAppCategoryConstantRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_GetMobileAppCategoryConstantRequest.Merge(dst, src) +} +func (m *GetMobileAppCategoryConstantRequest) XXX_Size() int { + return xxx_messageInfo_GetMobileAppCategoryConstantRequest.Size(m) +} +func (m *GetMobileAppCategoryConstantRequest) XXX_DiscardUnknown() { + xxx_messageInfo_GetMobileAppCategoryConstantRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_GetMobileAppCategoryConstantRequest proto.InternalMessageInfo + +func (m *GetMobileAppCategoryConstantRequest) GetResourceName() string { + if m != nil { + return m.ResourceName + } + return "" +} + +func init() { + proto.RegisterType((*GetMobileAppCategoryConstantRequest)(nil), "google.ads.googleads.v1.services.GetMobileAppCategoryConstantRequest") +} + +// Reference imports to suppress errors if they are not otherwise used. +var _ context.Context +var _ grpc.ClientConn + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the grpc package it is being compiled against. +const _ = grpc.SupportPackageIsVersion4 + +// MobileAppCategoryConstantServiceClient is the client API for MobileAppCategoryConstantService service. +// +// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://godoc.org/google.golang.org/grpc#ClientConn.NewStream. +type MobileAppCategoryConstantServiceClient interface { + // Returns the requested mobile app category constant. + GetMobileAppCategoryConstant(ctx context.Context, in *GetMobileAppCategoryConstantRequest, opts ...grpc.CallOption) (*resources.MobileAppCategoryConstant, error) +} + +type mobileAppCategoryConstantServiceClient struct { + cc *grpc.ClientConn +} + +func NewMobileAppCategoryConstantServiceClient(cc *grpc.ClientConn) MobileAppCategoryConstantServiceClient { + return &mobileAppCategoryConstantServiceClient{cc} +} + +func (c *mobileAppCategoryConstantServiceClient) GetMobileAppCategoryConstant(ctx context.Context, in *GetMobileAppCategoryConstantRequest, opts ...grpc.CallOption) (*resources.MobileAppCategoryConstant, error) { + out := new(resources.MobileAppCategoryConstant) + err := c.cc.Invoke(ctx, "/google.ads.googleads.v1.services.MobileAppCategoryConstantService/GetMobileAppCategoryConstant", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +// MobileAppCategoryConstantServiceServer is the server API for MobileAppCategoryConstantService service. +type MobileAppCategoryConstantServiceServer interface { + // Returns the requested mobile app category constant. + GetMobileAppCategoryConstant(context.Context, *GetMobileAppCategoryConstantRequest) (*resources.MobileAppCategoryConstant, error) +} + +func RegisterMobileAppCategoryConstantServiceServer(s *grpc.Server, srv MobileAppCategoryConstantServiceServer) { + s.RegisterService(&_MobileAppCategoryConstantService_serviceDesc, srv) +} + +func _MobileAppCategoryConstantService_GetMobileAppCategoryConstant_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(GetMobileAppCategoryConstantRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(MobileAppCategoryConstantServiceServer).GetMobileAppCategoryConstant(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.ads.googleads.v1.services.MobileAppCategoryConstantService/GetMobileAppCategoryConstant", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(MobileAppCategoryConstantServiceServer).GetMobileAppCategoryConstant(ctx, req.(*GetMobileAppCategoryConstantRequest)) + } + return interceptor(ctx, in, info, handler) +} + +var _MobileAppCategoryConstantService_serviceDesc = grpc.ServiceDesc{ + ServiceName: "google.ads.googleads.v1.services.MobileAppCategoryConstantService", + HandlerType: (*MobileAppCategoryConstantServiceServer)(nil), + Methods: []grpc.MethodDesc{ + { + MethodName: "GetMobileAppCategoryConstant", + Handler: _MobileAppCategoryConstantService_GetMobileAppCategoryConstant_Handler, + }, + }, + Streams: []grpc.StreamDesc{}, + Metadata: "google/ads/googleads/v1/services/mobile_app_category_constant_service.proto", +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/services/mobile_app_category_constant_service.proto", fileDescriptor_mobile_app_category_constant_service_8ab2dafc094f53e2) +} + +var fileDescriptor_mobile_app_category_constant_service_8ab2dafc094f53e2 = []byte{ + // 371 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x8c, 0x92, 0x31, 0x4b, 0xfb, 0x40, + 0x18, 0xc6, 0x49, 0xfe, 0xf0, 0x07, 0x83, 0x2e, 0x99, 0xa4, 0x74, 0x08, 0x6d, 0x05, 0x71, 0xb8, + 0x98, 0xba, 0xc8, 0xa9, 0x43, 0x5a, 0xa5, 0xa2, 0x28, 0xa5, 0x42, 0x07, 0x09, 0x84, 0x6b, 0x72, + 0x84, 0x40, 0x73, 0x77, 0xe6, 0xbd, 0x16, 0x44, 0x5c, 0x3a, 0xbb, 0xf9, 0x0d, 0x1c, 0xfd, 0x28, + 0xae, 0xce, 0x6e, 0x4e, 0x7e, 0x0a, 0x49, 0x2f, 0x17, 0x70, 0x88, 0x71, 0x7b, 0xb8, 0x3c, 0xf9, + 0x3d, 0xf7, 0x3e, 0xf7, 0x5a, 0x97, 0x09, 0xe7, 0xc9, 0x9c, 0xba, 0x24, 0x06, 0x57, 0xc9, 0x42, + 0x2d, 0x3d, 0x17, 0x68, 0xbe, 0x4c, 0x23, 0x0a, 0x6e, 0xc6, 0x67, 0xe9, 0x9c, 0x86, 0x44, 0x88, + 0x30, 0x22, 0x92, 0x26, 0x3c, 0xbf, 0x0f, 0x23, 0xce, 0x40, 0x12, 0x26, 0xc3, 0xd2, 0x85, 0x44, + 0xce, 0x25, 0xb7, 0x1d, 0x45, 0x40, 0x24, 0x06, 0x54, 0xc1, 0xd0, 0xd2, 0x43, 0x1a, 0xd6, 0x3a, + 0xad, 0x8b, 0xcb, 0x29, 0xf0, 0x45, 0xde, 0x94, 0xa7, 0x72, 0x5a, 0x6d, 0x4d, 0x11, 0xa9, 0x4b, + 0x18, 0xe3, 0x92, 0xc8, 0x94, 0x33, 0x50, 0x5f, 0x3b, 0x17, 0x56, 0x77, 0x44, 0xe5, 0xd5, 0x1a, + 0xe3, 0x0b, 0x31, 0x2c, 0x21, 0xc3, 0x92, 0x31, 0xa1, 0x77, 0x0b, 0x0a, 0xd2, 0xee, 0x5a, 0x5b, + 0x3a, 0x34, 0x64, 0x24, 0xa3, 0xdb, 0x86, 0x63, 0xec, 0x6e, 0x4c, 0x36, 0xf5, 0xe1, 0x35, 0xc9, + 0x68, 0x7f, 0x65, 0x5a, 0x4e, 0x2d, 0xe9, 0x46, 0x4d, 0x65, 0x7f, 0x18, 0x56, 0xfb, 0xb7, 0x44, + 0xfb, 0x0c, 0x35, 0x15, 0x83, 0xfe, 0x70, 0xe3, 0xd6, 0x71, 0x2d, 0xa6, 0x6a, 0x0f, 0xd5, 0x42, + 0x3a, 0x87, 0xab, 0xf7, 0xcf, 0x67, 0xb3, 0x6f, 0xef, 0x17, 0x75, 0x3f, 0xfc, 0x18, 0xfd, 0x24, + 0xab, 0xfb, 0x0b, 0xdc, 0xbd, 0xc7, 0xc1, 0x93, 0x69, 0xf5, 0x22, 0x9e, 0x35, 0x0e, 0x31, 0xd8, + 0x69, 0xaa, 0x6a, 0x5c, 0x3c, 0xd0, 0xd8, 0xb8, 0x3d, 0x2f, 0x51, 0x09, 0x9f, 0x13, 0x96, 0x20, + 0x9e, 0x27, 0x6e, 0x42, 0xd9, 0xfa, 0xf9, 0xf4, 0x5a, 0x88, 0x14, 0xea, 0x97, 0xf2, 0x48, 0x8b, + 0x17, 0xf3, 0xdf, 0xc8, 0xf7, 0x5f, 0x4d, 0x67, 0xa4, 0x80, 0x7e, 0x0c, 0x48, 0xc9, 0x42, 0x4d, + 0x3d, 0x54, 0x06, 0xc3, 0x9b, 0xb6, 0x04, 0x7e, 0x0c, 0x41, 0x65, 0x09, 0xa6, 0x5e, 0xa0, 0x2d, + 0x5f, 0x66, 0x4f, 0x9d, 0x63, 0xec, 0xc7, 0x80, 0x71, 0x65, 0xc2, 0x78, 0xea, 0x61, 0xac, 0x6d, + 0xb3, 0xff, 0xeb, 0x7b, 0x1e, 0x7c, 0x07, 0x00, 0x00, 0xff, 0xff, 0xfd, 0xfe, 0xdd, 0xd9, 0x3b, + 0x03, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/services/mobile_device_constant_service.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/services/mobile_device_constant_service.pb.go new file mode 100644 index 0000000..743ec61 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/services/mobile_device_constant_service.pb.go @@ -0,0 +1,175 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/services/mobile_device_constant_service.proto + +package services // import "google.golang.org/genproto/googleapis/ads/googleads/v1/services" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import resources "google.golang.org/genproto/googleapis/ads/googleads/v1/resources" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +import ( + context "golang.org/x/net/context" + grpc "google.golang.org/grpc" +) + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Request message for [MobileDeviceConstantService.GetMobileDeviceConstant][google.ads.googleads.v1.services.MobileDeviceConstantService.GetMobileDeviceConstant]. +type GetMobileDeviceConstantRequest struct { + // Resource name of the mobile device to fetch. + ResourceName string `protobuf:"bytes,1,opt,name=resource_name,json=resourceName,proto3" json:"resource_name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GetMobileDeviceConstantRequest) Reset() { *m = GetMobileDeviceConstantRequest{} } +func (m *GetMobileDeviceConstantRequest) String() string { return proto.CompactTextString(m) } +func (*GetMobileDeviceConstantRequest) ProtoMessage() {} +func (*GetMobileDeviceConstantRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_mobile_device_constant_service_eac6f777d9ebb18e, []int{0} +} +func (m *GetMobileDeviceConstantRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GetMobileDeviceConstantRequest.Unmarshal(m, b) +} +func (m *GetMobileDeviceConstantRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GetMobileDeviceConstantRequest.Marshal(b, m, deterministic) +} +func (dst *GetMobileDeviceConstantRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_GetMobileDeviceConstantRequest.Merge(dst, src) +} +func (m *GetMobileDeviceConstantRequest) XXX_Size() int { + return xxx_messageInfo_GetMobileDeviceConstantRequest.Size(m) +} +func (m *GetMobileDeviceConstantRequest) XXX_DiscardUnknown() { + xxx_messageInfo_GetMobileDeviceConstantRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_GetMobileDeviceConstantRequest proto.InternalMessageInfo + +func (m *GetMobileDeviceConstantRequest) GetResourceName() string { + if m != nil { + return m.ResourceName + } + return "" +} + +func init() { + proto.RegisterType((*GetMobileDeviceConstantRequest)(nil), "google.ads.googleads.v1.services.GetMobileDeviceConstantRequest") +} + +// Reference imports to suppress errors if they are not otherwise used. +var _ context.Context +var _ grpc.ClientConn + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the grpc package it is being compiled against. +const _ = grpc.SupportPackageIsVersion4 + +// MobileDeviceConstantServiceClient is the client API for MobileDeviceConstantService service. +// +// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://godoc.org/google.golang.org/grpc#ClientConn.NewStream. +type MobileDeviceConstantServiceClient interface { + // Returns the requested mobile device constant in full detail. + GetMobileDeviceConstant(ctx context.Context, in *GetMobileDeviceConstantRequest, opts ...grpc.CallOption) (*resources.MobileDeviceConstant, error) +} + +type mobileDeviceConstantServiceClient struct { + cc *grpc.ClientConn +} + +func NewMobileDeviceConstantServiceClient(cc *grpc.ClientConn) MobileDeviceConstantServiceClient { + return &mobileDeviceConstantServiceClient{cc} +} + +func (c *mobileDeviceConstantServiceClient) GetMobileDeviceConstant(ctx context.Context, in *GetMobileDeviceConstantRequest, opts ...grpc.CallOption) (*resources.MobileDeviceConstant, error) { + out := new(resources.MobileDeviceConstant) + err := c.cc.Invoke(ctx, "/google.ads.googleads.v1.services.MobileDeviceConstantService/GetMobileDeviceConstant", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +// MobileDeviceConstantServiceServer is the server API for MobileDeviceConstantService service. +type MobileDeviceConstantServiceServer interface { + // Returns the requested mobile device constant in full detail. + GetMobileDeviceConstant(context.Context, *GetMobileDeviceConstantRequest) (*resources.MobileDeviceConstant, error) +} + +func RegisterMobileDeviceConstantServiceServer(s *grpc.Server, srv MobileDeviceConstantServiceServer) { + s.RegisterService(&_MobileDeviceConstantService_serviceDesc, srv) +} + +func _MobileDeviceConstantService_GetMobileDeviceConstant_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(GetMobileDeviceConstantRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(MobileDeviceConstantServiceServer).GetMobileDeviceConstant(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.ads.googleads.v1.services.MobileDeviceConstantService/GetMobileDeviceConstant", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(MobileDeviceConstantServiceServer).GetMobileDeviceConstant(ctx, req.(*GetMobileDeviceConstantRequest)) + } + return interceptor(ctx, in, info, handler) +} + +var _MobileDeviceConstantService_serviceDesc = grpc.ServiceDesc{ + ServiceName: "google.ads.googleads.v1.services.MobileDeviceConstantService", + HandlerType: (*MobileDeviceConstantServiceServer)(nil), + Methods: []grpc.MethodDesc{ + { + MethodName: "GetMobileDeviceConstant", + Handler: _MobileDeviceConstantService_GetMobileDeviceConstant_Handler, + }, + }, + Streams: []grpc.StreamDesc{}, + Metadata: "google/ads/googleads/v1/services/mobile_device_constant_service.proto", +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/services/mobile_device_constant_service.proto", fileDescriptor_mobile_device_constant_service_eac6f777d9ebb18e) +} + +var fileDescriptor_mobile_device_constant_service_eac6f777d9ebb18e = []byte{ + // 362 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x84, 0x52, 0xbf, 0x4a, 0xc3, 0x40, + 0x18, 0x27, 0x11, 0x04, 0x83, 0x2e, 0x59, 0x94, 0x2a, 0x12, 0x6a, 0x07, 0x51, 0xbc, 0x23, 0x76, + 0x10, 0x4e, 0x14, 0x53, 0x2d, 0x75, 0x51, 0x4a, 0x85, 0x0e, 0x12, 0x08, 0xd7, 0xe6, 0x08, 0x81, + 0xe6, 0xae, 0xe6, 0xbb, 0x76, 0x11, 0x17, 0x17, 0x1f, 0xc0, 0x37, 0x70, 0xf4, 0x4d, 0x74, 0xf5, + 0x15, 0x5c, 0xf4, 0x29, 0x24, 0xb9, 0x5c, 0x40, 0x48, 0xda, 0xed, 0x97, 0xbb, 0xdf, 0x9f, 0xef, + 0x7e, 0x5f, 0xac, 0x6e, 0x24, 0x44, 0x34, 0x61, 0x98, 0x86, 0x80, 0x15, 0xcc, 0xd0, 0xdc, 0xc5, + 0xc0, 0xd2, 0x79, 0x3c, 0x66, 0x80, 0x13, 0x31, 0x8a, 0x27, 0x2c, 0x08, 0x59, 0xf6, 0x19, 0x8c, + 0x05, 0x07, 0x49, 0xb9, 0x0c, 0x8a, 0x7b, 0x34, 0x4d, 0x85, 0x14, 0xb6, 0xa3, 0xb4, 0x88, 0x86, + 0x80, 0x4a, 0x1b, 0x34, 0x77, 0x91, 0xb6, 0x69, 0x9c, 0xd7, 0x05, 0xa5, 0x0c, 0xc4, 0x2c, 0xad, + 0x4f, 0x52, 0x09, 0x8d, 0x1d, 0xad, 0x9f, 0xc6, 0x98, 0x72, 0x2e, 0x24, 0x95, 0xb1, 0xe0, 0xa0, + 0x6e, 0x9b, 0x5d, 0x6b, 0xb7, 0xc7, 0xe4, 0x4d, 0x6e, 0x70, 0x95, 0xeb, 0x2f, 0x0b, 0xf9, 0x80, + 0x3d, 0xcc, 0x18, 0x48, 0x7b, 0xcf, 0xda, 0xd0, 0x49, 0x01, 0xa7, 0x09, 0xdb, 0x32, 0x1c, 0x63, + 0x7f, 0x6d, 0xb0, 0xae, 0x0f, 0x6f, 0x69, 0xc2, 0x8e, 0x7f, 0x0c, 0x6b, 0xbb, 0xca, 0xe4, 0x4e, + 0xbd, 0xc2, 0xfe, 0x30, 0xac, 0xcd, 0x9a, 0x1c, 0xfb, 0x02, 0x2d, 0xeb, 0x00, 0x2d, 0x1e, 0xb1, + 0x71, 0x52, 0xeb, 0x50, 0x76, 0x84, 0xaa, 0xf4, 0xcd, 0xf6, 0xf3, 0xd7, 0xf7, 0xab, 0x79, 0x64, + 0x1f, 0x66, 0x7d, 0x3e, 0xfe, 0x7b, 0xe6, 0x59, 0x52, 0x21, 0x00, 0x7c, 0xf0, 0xd4, 0x79, 0x31, + 0xad, 0xd6, 0x58, 0x24, 0x4b, 0xa7, 0xee, 0x38, 0x0b, 0x1a, 0xe9, 0x67, 0xed, 0xf7, 0x8d, 0xfb, + 0xeb, 0xc2, 0x25, 0x12, 0x13, 0xca, 0x23, 0x24, 0xd2, 0x08, 0x47, 0x8c, 0xe7, 0xbb, 0xd1, 0xdb, + 0x9e, 0xc6, 0x50, 0xff, 0x97, 0x9d, 0x6a, 0xf0, 0x66, 0xae, 0xf4, 0x3c, 0xef, 0xdd, 0x74, 0x7a, + 0xca, 0xd0, 0x0b, 0x01, 0x29, 0x98, 0xa1, 0xa1, 0x8b, 0x8a, 0x60, 0xf8, 0xd4, 0x14, 0xdf, 0x0b, + 0xc1, 0x2f, 0x29, 0xfe, 0xd0, 0xf5, 0x35, 0xe5, 0xd7, 0x6c, 0xa9, 0x73, 0x42, 0xbc, 0x10, 0x08, + 0x29, 0x49, 0x84, 0x0c, 0x5d, 0x42, 0x34, 0x6d, 0xb4, 0x9a, 0xcf, 0xd9, 0xfe, 0x0b, 0x00, 0x00, + 0xff, 0xff, 0xc5, 0x86, 0x86, 0x6e, 0x0c, 0x03, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/services/mutate_job_service.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/services/mutate_job_service.pb.go new file mode 100644 index 0000000..db3e3e6 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/services/mutate_job_service.pb.go @@ -0,0 +1,783 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/services/mutate_job_service.proto + +package services // import "google.golang.org/genproto/googleapis/ads/googleads/v1/services" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import resources "google.golang.org/genproto/googleapis/ads/googleads/v1/resources" +import _ "google.golang.org/genproto/googleapis/api/annotations" +import longrunning "google.golang.org/genproto/googleapis/longrunning" +import status "google.golang.org/genproto/googleapis/rpc/status" + +import ( + context "golang.org/x/net/context" + grpc "google.golang.org/grpc" +) + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Request message for [MutateJobService.CreateMutateJobRequest][] +type CreateMutateJobRequest struct { + // The ID of the customer for which to create a mutate job. + CustomerId string `protobuf:"bytes,1,opt,name=customer_id,json=customerId,proto3" json:"customer_id,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *CreateMutateJobRequest) Reset() { *m = CreateMutateJobRequest{} } +func (m *CreateMutateJobRequest) String() string { return proto.CompactTextString(m) } +func (*CreateMutateJobRequest) ProtoMessage() {} +func (*CreateMutateJobRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_mutate_job_service_ffc52ee77e0c9b27, []int{0} +} +func (m *CreateMutateJobRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_CreateMutateJobRequest.Unmarshal(m, b) +} +func (m *CreateMutateJobRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_CreateMutateJobRequest.Marshal(b, m, deterministic) +} +func (dst *CreateMutateJobRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_CreateMutateJobRequest.Merge(dst, src) +} +func (m *CreateMutateJobRequest) XXX_Size() int { + return xxx_messageInfo_CreateMutateJobRequest.Size(m) +} +func (m *CreateMutateJobRequest) XXX_DiscardUnknown() { + xxx_messageInfo_CreateMutateJobRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_CreateMutateJobRequest proto.InternalMessageInfo + +func (m *CreateMutateJobRequest) GetCustomerId() string { + if m != nil { + return m.CustomerId + } + return "" +} + +// Response message for [MutateJobService.CreateMutateJobResponse][] +type CreateMutateJobResponse struct { + // The resource name of the MutateJob. + ResourceName string `protobuf:"bytes,1,opt,name=resource_name,json=resourceName,proto3" json:"resource_name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *CreateMutateJobResponse) Reset() { *m = CreateMutateJobResponse{} } +func (m *CreateMutateJobResponse) String() string { return proto.CompactTextString(m) } +func (*CreateMutateJobResponse) ProtoMessage() {} +func (*CreateMutateJobResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_mutate_job_service_ffc52ee77e0c9b27, []int{1} +} +func (m *CreateMutateJobResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_CreateMutateJobResponse.Unmarshal(m, b) +} +func (m *CreateMutateJobResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_CreateMutateJobResponse.Marshal(b, m, deterministic) +} +func (dst *CreateMutateJobResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_CreateMutateJobResponse.Merge(dst, src) +} +func (m *CreateMutateJobResponse) XXX_Size() int { + return xxx_messageInfo_CreateMutateJobResponse.Size(m) +} +func (m *CreateMutateJobResponse) XXX_DiscardUnknown() { + xxx_messageInfo_CreateMutateJobResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_CreateMutateJobResponse proto.InternalMessageInfo + +func (m *CreateMutateJobResponse) GetResourceName() string { + if m != nil { + return m.ResourceName + } + return "" +} + +// Request message for [MutateJobService.GetMutateJob][google.ads.googleads.v1.services.MutateJobService.GetMutateJob] +type GetMutateJobRequest struct { + // The resource name of the MutateJob to get. + ResourceName string `protobuf:"bytes,1,opt,name=resource_name,json=resourceName,proto3" json:"resource_name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GetMutateJobRequest) Reset() { *m = GetMutateJobRequest{} } +func (m *GetMutateJobRequest) String() string { return proto.CompactTextString(m) } +func (*GetMutateJobRequest) ProtoMessage() {} +func (*GetMutateJobRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_mutate_job_service_ffc52ee77e0c9b27, []int{2} +} +func (m *GetMutateJobRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GetMutateJobRequest.Unmarshal(m, b) +} +func (m *GetMutateJobRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GetMutateJobRequest.Marshal(b, m, deterministic) +} +func (dst *GetMutateJobRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_GetMutateJobRequest.Merge(dst, src) +} +func (m *GetMutateJobRequest) XXX_Size() int { + return xxx_messageInfo_GetMutateJobRequest.Size(m) +} +func (m *GetMutateJobRequest) XXX_DiscardUnknown() { + xxx_messageInfo_GetMutateJobRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_GetMutateJobRequest proto.InternalMessageInfo + +func (m *GetMutateJobRequest) GetResourceName() string { + if m != nil { + return m.ResourceName + } + return "" +} + +// Request message for [MutateJobService.RunMutateJob][google.ads.googleads.v1.services.MutateJobService.RunMutateJob] +type RunMutateJobRequest struct { + // The resource name of the MutateJob to run. + ResourceName string `protobuf:"bytes,1,opt,name=resource_name,json=resourceName,proto3" json:"resource_name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *RunMutateJobRequest) Reset() { *m = RunMutateJobRequest{} } +func (m *RunMutateJobRequest) String() string { return proto.CompactTextString(m) } +func (*RunMutateJobRequest) ProtoMessage() {} +func (*RunMutateJobRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_mutate_job_service_ffc52ee77e0c9b27, []int{3} +} +func (m *RunMutateJobRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_RunMutateJobRequest.Unmarshal(m, b) +} +func (m *RunMutateJobRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_RunMutateJobRequest.Marshal(b, m, deterministic) +} +func (dst *RunMutateJobRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_RunMutateJobRequest.Merge(dst, src) +} +func (m *RunMutateJobRequest) XXX_Size() int { + return xxx_messageInfo_RunMutateJobRequest.Size(m) +} +func (m *RunMutateJobRequest) XXX_DiscardUnknown() { + xxx_messageInfo_RunMutateJobRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_RunMutateJobRequest proto.InternalMessageInfo + +func (m *RunMutateJobRequest) GetResourceName() string { + if m != nil { + return m.ResourceName + } + return "" +} + +// Request message for [MutateJobService.AddMutateJobOperations][google.ads.googleads.v1.services.MutateJobService.AddMutateJobOperations] +type AddMutateJobOperationsRequest struct { + // The resource name of the MutateJob. + ResourceName string `protobuf:"bytes,1,opt,name=resource_name,json=resourceName,proto3" json:"resource_name,omitempty"` + // A token used to enforce sequencing. + // + // The first AddMutateJobOperations request for a MutateJob should not set + // sequence_token. Subsequent requests must set sequence_token to the value of + // next_sequence_token received in the previous AddMutateJobOperations + // response. + SequenceToken string `protobuf:"bytes,2,opt,name=sequence_token,json=sequenceToken,proto3" json:"sequence_token,omitempty"` + // The list of mutates being added. + // + // Operations can use negative integers as temp ids to signify dependencies + // between entities created in this MutateJob. For example, a customer with + // id = 1234 can create a campaign and an ad group in that same campaign by + // creating a campaign in the first operation with the resource name + // explicitly set to "customers/1234/campaigns/-1", and creating an ad group + // in the second operation with the campaign field also set to + // "customers/1234/campaigns/-1". + MutateOperations []*MutateOperation `protobuf:"bytes,3,rep,name=mutate_operations,json=mutateOperations,proto3" json:"mutate_operations,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *AddMutateJobOperationsRequest) Reset() { *m = AddMutateJobOperationsRequest{} } +func (m *AddMutateJobOperationsRequest) String() string { return proto.CompactTextString(m) } +func (*AddMutateJobOperationsRequest) ProtoMessage() {} +func (*AddMutateJobOperationsRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_mutate_job_service_ffc52ee77e0c9b27, []int{4} +} +func (m *AddMutateJobOperationsRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_AddMutateJobOperationsRequest.Unmarshal(m, b) +} +func (m *AddMutateJobOperationsRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_AddMutateJobOperationsRequest.Marshal(b, m, deterministic) +} +func (dst *AddMutateJobOperationsRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_AddMutateJobOperationsRequest.Merge(dst, src) +} +func (m *AddMutateJobOperationsRequest) XXX_Size() int { + return xxx_messageInfo_AddMutateJobOperationsRequest.Size(m) +} +func (m *AddMutateJobOperationsRequest) XXX_DiscardUnknown() { + xxx_messageInfo_AddMutateJobOperationsRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_AddMutateJobOperationsRequest proto.InternalMessageInfo + +func (m *AddMutateJobOperationsRequest) GetResourceName() string { + if m != nil { + return m.ResourceName + } + return "" +} + +func (m *AddMutateJobOperationsRequest) GetSequenceToken() string { + if m != nil { + return m.SequenceToken + } + return "" +} + +func (m *AddMutateJobOperationsRequest) GetMutateOperations() []*MutateOperation { + if m != nil { + return m.MutateOperations + } + return nil +} + +// Response message for [MutateJobService.AddMutateJobOperations][google.ads.googleads.v1.services.MutateJobService.AddMutateJobOperations] +type AddMutateJobOperationsResponse struct { + // The total number of operations added so far for this job. + TotalOperations int64 `protobuf:"varint,1,opt,name=total_operations,json=totalOperations,proto3" json:"total_operations,omitempty"` + // The sequence token to be used when calling AddMutateJobOperations again if + // more operations need to be added. The next AddMutateJobOperations request + // must set the sequence_token field to the value of this field. + NextSequenceToken string `protobuf:"bytes,2,opt,name=next_sequence_token,json=nextSequenceToken,proto3" json:"next_sequence_token,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *AddMutateJobOperationsResponse) Reset() { *m = AddMutateJobOperationsResponse{} } +func (m *AddMutateJobOperationsResponse) String() string { return proto.CompactTextString(m) } +func (*AddMutateJobOperationsResponse) ProtoMessage() {} +func (*AddMutateJobOperationsResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_mutate_job_service_ffc52ee77e0c9b27, []int{5} +} +func (m *AddMutateJobOperationsResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_AddMutateJobOperationsResponse.Unmarshal(m, b) +} +func (m *AddMutateJobOperationsResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_AddMutateJobOperationsResponse.Marshal(b, m, deterministic) +} +func (dst *AddMutateJobOperationsResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_AddMutateJobOperationsResponse.Merge(dst, src) +} +func (m *AddMutateJobOperationsResponse) XXX_Size() int { + return xxx_messageInfo_AddMutateJobOperationsResponse.Size(m) +} +func (m *AddMutateJobOperationsResponse) XXX_DiscardUnknown() { + xxx_messageInfo_AddMutateJobOperationsResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_AddMutateJobOperationsResponse proto.InternalMessageInfo + +func (m *AddMutateJobOperationsResponse) GetTotalOperations() int64 { + if m != nil { + return m.TotalOperations + } + return 0 +} + +func (m *AddMutateJobOperationsResponse) GetNextSequenceToken() string { + if m != nil { + return m.NextSequenceToken + } + return "" +} + +// Request message for [MutateJobService.ListMutateJobResults][google.ads.googleads.v1.services.MutateJobService.ListMutateJobResults]. +type ListMutateJobResultsRequest struct { + // The resource name of the MutateJob whose results are being listed. + ResourceName string `protobuf:"bytes,1,opt,name=resource_name,json=resourceName,proto3" json:"resource_name,omitempty"` + // Token of the page to retrieve. If not specified, the first + // page of results will be returned. Use the value obtained from + // `next_page_token` in the previous response in order to request + // the next page of results. + PageToken string `protobuf:"bytes,2,opt,name=page_token,json=pageToken,proto3" json:"page_token,omitempty"` + // Number of elements to retrieve in a single page. + // When a page request is too large, the server may decide to + // further limit the number of returned resources. + PageSize int32 `protobuf:"varint,3,opt,name=page_size,json=pageSize,proto3" json:"page_size,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ListMutateJobResultsRequest) Reset() { *m = ListMutateJobResultsRequest{} } +func (m *ListMutateJobResultsRequest) String() string { return proto.CompactTextString(m) } +func (*ListMutateJobResultsRequest) ProtoMessage() {} +func (*ListMutateJobResultsRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_mutate_job_service_ffc52ee77e0c9b27, []int{6} +} +func (m *ListMutateJobResultsRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ListMutateJobResultsRequest.Unmarshal(m, b) +} +func (m *ListMutateJobResultsRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ListMutateJobResultsRequest.Marshal(b, m, deterministic) +} +func (dst *ListMutateJobResultsRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_ListMutateJobResultsRequest.Merge(dst, src) +} +func (m *ListMutateJobResultsRequest) XXX_Size() int { + return xxx_messageInfo_ListMutateJobResultsRequest.Size(m) +} +func (m *ListMutateJobResultsRequest) XXX_DiscardUnknown() { + xxx_messageInfo_ListMutateJobResultsRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_ListMutateJobResultsRequest proto.InternalMessageInfo + +func (m *ListMutateJobResultsRequest) GetResourceName() string { + if m != nil { + return m.ResourceName + } + return "" +} + +func (m *ListMutateJobResultsRequest) GetPageToken() string { + if m != nil { + return m.PageToken + } + return "" +} + +func (m *ListMutateJobResultsRequest) GetPageSize() int32 { + if m != nil { + return m.PageSize + } + return 0 +} + +// Response message for [MutateJobService.ListMutateJobResults][google.ads.googleads.v1.services.MutateJobService.ListMutateJobResults]. +type ListMutateJobResultsResponse struct { + // The list of rows that matched the query. + Results []*MutateJobResult `protobuf:"bytes,1,rep,name=results,proto3" json:"results,omitempty"` + // Pagination token used to retrieve the next page of results. + // Pass the content of this string as the `page_token` attribute of + // the next request. `next_page_token` is not returned for the last + // page. + NextPageToken string `protobuf:"bytes,2,opt,name=next_page_token,json=nextPageToken,proto3" json:"next_page_token,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ListMutateJobResultsResponse) Reset() { *m = ListMutateJobResultsResponse{} } +func (m *ListMutateJobResultsResponse) String() string { return proto.CompactTextString(m) } +func (*ListMutateJobResultsResponse) ProtoMessage() {} +func (*ListMutateJobResultsResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_mutate_job_service_ffc52ee77e0c9b27, []int{7} +} +func (m *ListMutateJobResultsResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ListMutateJobResultsResponse.Unmarshal(m, b) +} +func (m *ListMutateJobResultsResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ListMutateJobResultsResponse.Marshal(b, m, deterministic) +} +func (dst *ListMutateJobResultsResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_ListMutateJobResultsResponse.Merge(dst, src) +} +func (m *ListMutateJobResultsResponse) XXX_Size() int { + return xxx_messageInfo_ListMutateJobResultsResponse.Size(m) +} +func (m *ListMutateJobResultsResponse) XXX_DiscardUnknown() { + xxx_messageInfo_ListMutateJobResultsResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_ListMutateJobResultsResponse proto.InternalMessageInfo + +func (m *ListMutateJobResultsResponse) GetResults() []*MutateJobResult { + if m != nil { + return m.Results + } + return nil +} + +func (m *ListMutateJobResultsResponse) GetNextPageToken() string { + if m != nil { + return m.NextPageToken + } + return "" +} + +// MutateJob result. +type MutateJobResult struct { + // Index of the mutate operation. + OperationIndex int64 `protobuf:"varint,1,opt,name=operation_index,json=operationIndex,proto3" json:"operation_index,omitempty"` + // Response for the mutate. + // May be empty if errors occurred. + MutateOperationResponse *MutateOperationResponse `protobuf:"bytes,2,opt,name=mutate_operation_response,json=mutateOperationResponse,proto3" json:"mutate_operation_response,omitempty"` + // Details of the errors when processing the operation. + Status *status.Status `protobuf:"bytes,3,opt,name=status,proto3" json:"status,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *MutateJobResult) Reset() { *m = MutateJobResult{} } +func (m *MutateJobResult) String() string { return proto.CompactTextString(m) } +func (*MutateJobResult) ProtoMessage() {} +func (*MutateJobResult) Descriptor() ([]byte, []int) { + return fileDescriptor_mutate_job_service_ffc52ee77e0c9b27, []int{8} +} +func (m *MutateJobResult) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_MutateJobResult.Unmarshal(m, b) +} +func (m *MutateJobResult) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_MutateJobResult.Marshal(b, m, deterministic) +} +func (dst *MutateJobResult) XXX_Merge(src proto.Message) { + xxx_messageInfo_MutateJobResult.Merge(dst, src) +} +func (m *MutateJobResult) XXX_Size() int { + return xxx_messageInfo_MutateJobResult.Size(m) +} +func (m *MutateJobResult) XXX_DiscardUnknown() { + xxx_messageInfo_MutateJobResult.DiscardUnknown(m) +} + +var xxx_messageInfo_MutateJobResult proto.InternalMessageInfo + +func (m *MutateJobResult) GetOperationIndex() int64 { + if m != nil { + return m.OperationIndex + } + return 0 +} + +func (m *MutateJobResult) GetMutateOperationResponse() *MutateOperationResponse { + if m != nil { + return m.MutateOperationResponse + } + return nil +} + +func (m *MutateJobResult) GetStatus() *status.Status { + if m != nil { + return m.Status + } + return nil +} + +func init() { + proto.RegisterType((*CreateMutateJobRequest)(nil), "google.ads.googleads.v1.services.CreateMutateJobRequest") + proto.RegisterType((*CreateMutateJobResponse)(nil), "google.ads.googleads.v1.services.CreateMutateJobResponse") + proto.RegisterType((*GetMutateJobRequest)(nil), "google.ads.googleads.v1.services.GetMutateJobRequest") + proto.RegisterType((*RunMutateJobRequest)(nil), "google.ads.googleads.v1.services.RunMutateJobRequest") + proto.RegisterType((*AddMutateJobOperationsRequest)(nil), "google.ads.googleads.v1.services.AddMutateJobOperationsRequest") + proto.RegisterType((*AddMutateJobOperationsResponse)(nil), "google.ads.googleads.v1.services.AddMutateJobOperationsResponse") + proto.RegisterType((*ListMutateJobResultsRequest)(nil), "google.ads.googleads.v1.services.ListMutateJobResultsRequest") + proto.RegisterType((*ListMutateJobResultsResponse)(nil), "google.ads.googleads.v1.services.ListMutateJobResultsResponse") + proto.RegisterType((*MutateJobResult)(nil), "google.ads.googleads.v1.services.MutateJobResult") +} + +// Reference imports to suppress errors if they are not otherwise used. +var _ context.Context +var _ grpc.ClientConn + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the grpc package it is being compiled against. +const _ = grpc.SupportPackageIsVersion4 + +// MutateJobServiceClient is the client API for MutateJobService service. +// +// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://godoc.org/google.golang.org/grpc#ClientConn.NewStream. +type MutateJobServiceClient interface { + // Creates a mutate job. + CreateMutateJob(ctx context.Context, in *CreateMutateJobRequest, opts ...grpc.CallOption) (*CreateMutateJobResponse, error) + // Returns the mutate job. + GetMutateJob(ctx context.Context, in *GetMutateJobRequest, opts ...grpc.CallOption) (*resources.MutateJob, error) + // Returns the results of the mutate job. The job must be done. + // Supports standard list paging. + ListMutateJobResults(ctx context.Context, in *ListMutateJobResultsRequest, opts ...grpc.CallOption) (*ListMutateJobResultsResponse, error) + // Runs the mutate job. + // + // The Operation.metadata field type is MutateJobMetadata. When finished, the + // long running operation will not contain errors or a response. Instead, use + // ListMutateJobResults to get the results of the job. + RunMutateJob(ctx context.Context, in *RunMutateJobRequest, opts ...grpc.CallOption) (*longrunning.Operation, error) + // Add operations to the mutate job. + AddMutateJobOperations(ctx context.Context, in *AddMutateJobOperationsRequest, opts ...grpc.CallOption) (*AddMutateJobOperationsResponse, error) +} + +type mutateJobServiceClient struct { + cc *grpc.ClientConn +} + +func NewMutateJobServiceClient(cc *grpc.ClientConn) MutateJobServiceClient { + return &mutateJobServiceClient{cc} +} + +func (c *mutateJobServiceClient) CreateMutateJob(ctx context.Context, in *CreateMutateJobRequest, opts ...grpc.CallOption) (*CreateMutateJobResponse, error) { + out := new(CreateMutateJobResponse) + err := c.cc.Invoke(ctx, "/google.ads.googleads.v1.services.MutateJobService/CreateMutateJob", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *mutateJobServiceClient) GetMutateJob(ctx context.Context, in *GetMutateJobRequest, opts ...grpc.CallOption) (*resources.MutateJob, error) { + out := new(resources.MutateJob) + err := c.cc.Invoke(ctx, "/google.ads.googleads.v1.services.MutateJobService/GetMutateJob", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *mutateJobServiceClient) ListMutateJobResults(ctx context.Context, in *ListMutateJobResultsRequest, opts ...grpc.CallOption) (*ListMutateJobResultsResponse, error) { + out := new(ListMutateJobResultsResponse) + err := c.cc.Invoke(ctx, "/google.ads.googleads.v1.services.MutateJobService/ListMutateJobResults", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *mutateJobServiceClient) RunMutateJob(ctx context.Context, in *RunMutateJobRequest, opts ...grpc.CallOption) (*longrunning.Operation, error) { + out := new(longrunning.Operation) + err := c.cc.Invoke(ctx, "/google.ads.googleads.v1.services.MutateJobService/RunMutateJob", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *mutateJobServiceClient) AddMutateJobOperations(ctx context.Context, in *AddMutateJobOperationsRequest, opts ...grpc.CallOption) (*AddMutateJobOperationsResponse, error) { + out := new(AddMutateJobOperationsResponse) + err := c.cc.Invoke(ctx, "/google.ads.googleads.v1.services.MutateJobService/AddMutateJobOperations", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +// MutateJobServiceServer is the server API for MutateJobService service. +type MutateJobServiceServer interface { + // Creates a mutate job. + CreateMutateJob(context.Context, *CreateMutateJobRequest) (*CreateMutateJobResponse, error) + // Returns the mutate job. + GetMutateJob(context.Context, *GetMutateJobRequest) (*resources.MutateJob, error) + // Returns the results of the mutate job. The job must be done. + // Supports standard list paging. + ListMutateJobResults(context.Context, *ListMutateJobResultsRequest) (*ListMutateJobResultsResponse, error) + // Runs the mutate job. + // + // The Operation.metadata field type is MutateJobMetadata. When finished, the + // long running operation will not contain errors or a response. Instead, use + // ListMutateJobResults to get the results of the job. + RunMutateJob(context.Context, *RunMutateJobRequest) (*longrunning.Operation, error) + // Add operations to the mutate job. + AddMutateJobOperations(context.Context, *AddMutateJobOperationsRequest) (*AddMutateJobOperationsResponse, error) +} + +func RegisterMutateJobServiceServer(s *grpc.Server, srv MutateJobServiceServer) { + s.RegisterService(&_MutateJobService_serviceDesc, srv) +} + +func _MutateJobService_CreateMutateJob_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(CreateMutateJobRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(MutateJobServiceServer).CreateMutateJob(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.ads.googleads.v1.services.MutateJobService/CreateMutateJob", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(MutateJobServiceServer).CreateMutateJob(ctx, req.(*CreateMutateJobRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _MutateJobService_GetMutateJob_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(GetMutateJobRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(MutateJobServiceServer).GetMutateJob(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.ads.googleads.v1.services.MutateJobService/GetMutateJob", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(MutateJobServiceServer).GetMutateJob(ctx, req.(*GetMutateJobRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _MutateJobService_ListMutateJobResults_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(ListMutateJobResultsRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(MutateJobServiceServer).ListMutateJobResults(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.ads.googleads.v1.services.MutateJobService/ListMutateJobResults", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(MutateJobServiceServer).ListMutateJobResults(ctx, req.(*ListMutateJobResultsRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _MutateJobService_RunMutateJob_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(RunMutateJobRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(MutateJobServiceServer).RunMutateJob(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.ads.googleads.v1.services.MutateJobService/RunMutateJob", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(MutateJobServiceServer).RunMutateJob(ctx, req.(*RunMutateJobRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _MutateJobService_AddMutateJobOperations_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(AddMutateJobOperationsRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(MutateJobServiceServer).AddMutateJobOperations(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.ads.googleads.v1.services.MutateJobService/AddMutateJobOperations", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(MutateJobServiceServer).AddMutateJobOperations(ctx, req.(*AddMutateJobOperationsRequest)) + } + return interceptor(ctx, in, info, handler) +} + +var _MutateJobService_serviceDesc = grpc.ServiceDesc{ + ServiceName: "google.ads.googleads.v1.services.MutateJobService", + HandlerType: (*MutateJobServiceServer)(nil), + Methods: []grpc.MethodDesc{ + { + MethodName: "CreateMutateJob", + Handler: _MutateJobService_CreateMutateJob_Handler, + }, + { + MethodName: "GetMutateJob", + Handler: _MutateJobService_GetMutateJob_Handler, + }, + { + MethodName: "ListMutateJobResults", + Handler: _MutateJobService_ListMutateJobResults_Handler, + }, + { + MethodName: "RunMutateJob", + Handler: _MutateJobService_RunMutateJob_Handler, + }, + { + MethodName: "AddMutateJobOperations", + Handler: _MutateJobService_AddMutateJobOperations_Handler, + }, + }, + Streams: []grpc.StreamDesc{}, + Metadata: "google/ads/googleads/v1/services/mutate_job_service.proto", +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/services/mutate_job_service.proto", fileDescriptor_mutate_job_service_ffc52ee77e0c9b27) +} + +var fileDescriptor_mutate_job_service_ffc52ee77e0c9b27 = []byte{ + // 867 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x9c, 0x56, 0x4f, 0x6f, 0xe3, 0x44, + 0x14, 0xd7, 0x24, 0x62, 0x97, 0x9d, 0xb4, 0x9b, 0xee, 0x2c, 0x6c, 0x83, 0x77, 0x0b, 0x91, 0x77, + 0x81, 0x12, 0xad, 0x6c, 0x12, 0x76, 0xa5, 0xcd, 0xac, 0xba, 0x6d, 0x8a, 0x50, 0x69, 0xf9, 0x57, + 0x39, 0xa8, 0x07, 0x14, 0x61, 0x39, 0xf1, 0xc8, 0x32, 0x24, 0x33, 0xc6, 0x33, 0xae, 0xaa, 0x56, + 0xe5, 0xc0, 0x57, 0xe8, 0x27, 0x80, 0x1b, 0x7c, 0x0c, 0x8e, 0x70, 0xe4, 0xc0, 0x89, 0x0b, 0x42, + 0x1c, 0xf8, 0x0c, 0x1c, 0x56, 0xb6, 0x67, 0x1c, 0x27, 0x75, 0x9a, 0x26, 0xb7, 0xc9, 0x6f, 0xde, + 0xef, 0xcd, 0xef, 0xf7, 0xe6, 0xe5, 0x79, 0x60, 0xdb, 0x63, 0xcc, 0x1b, 0x12, 0xd3, 0x71, 0xb9, + 0x99, 0x2e, 0xe3, 0xd5, 0x71, 0xd3, 0xe4, 0x24, 0x3c, 0xf6, 0x07, 0x84, 0x9b, 0xa3, 0x48, 0x38, + 0x82, 0xd8, 0xdf, 0xb0, 0xbe, 0x2d, 0x31, 0x23, 0x08, 0x99, 0x60, 0xa8, 0x9e, 0xc6, 0x1b, 0x8e, + 0xcb, 0x8d, 0x8c, 0x6a, 0x1c, 0x37, 0x0d, 0x45, 0xd5, 0x5a, 0xb3, 0x92, 0x87, 0x84, 0xb3, 0x28, + 0x9c, 0xcc, 0x9e, 0x66, 0xd5, 0xe6, 0x0b, 0x4a, 0x51, 0xdb, 0x71, 0xf9, 0xa4, 0x20, 0xed, 0x81, + 0xa2, 0x06, 0xbe, 0xe9, 0x50, 0xca, 0x84, 0x23, 0x7c, 0x46, 0xb9, 0xdc, 0x7d, 0x28, 0x77, 0x87, + 0x8c, 0x7a, 0x61, 0x44, 0xa9, 0x4f, 0x3d, 0x93, 0x05, 0x24, 0x9c, 0x08, 0x5a, 0x97, 0x41, 0x61, + 0x30, 0x30, 0xb9, 0x70, 0x44, 0x24, 0x37, 0xf4, 0x36, 0xbc, 0xf7, 0x61, 0x48, 0x1c, 0x41, 0x3e, + 0x4b, 0x04, 0x1f, 0xb0, 0xbe, 0x45, 0xbe, 0x8b, 0x08, 0x17, 0xe8, 0x2d, 0x58, 0x19, 0x44, 0x5c, + 0xb0, 0x11, 0x09, 0x6d, 0xdf, 0xad, 0x81, 0x3a, 0xd8, 0xbc, 0x65, 0x41, 0x05, 0xed, 0xbb, 0xfa, + 0x0b, 0xb8, 0x7e, 0x89, 0xca, 0x03, 0x46, 0x39, 0x41, 0x0f, 0xe1, 0xaa, 0x2a, 0x85, 0x4d, 0x9d, + 0x11, 0x91, 0xec, 0x15, 0x05, 0x7e, 0xee, 0x8c, 0x88, 0x8e, 0xe1, 0xdd, 0x3d, 0x22, 0x2e, 0x9d, + 0x7b, 0x5d, 0xae, 0x15, 0xd1, 0xe5, 0xb8, 0xbf, 0x03, 0xb8, 0xd1, 0x71, 0xdd, 0x8c, 0xfc, 0x45, + 0x56, 0xac, 0x45, 0xd2, 0xa0, 0xb7, 0xe1, 0x6d, 0x1e, 0xc7, 0xd3, 0x01, 0xb1, 0x05, 0xfb, 0x96, + 0xd0, 0x5a, 0x29, 0x89, 0x5a, 0x55, 0xe8, 0x97, 0x31, 0x88, 0xbe, 0x86, 0x77, 0x64, 0x2f, 0x8c, + 0x2f, 0xa5, 0x56, 0xae, 0x97, 0x37, 0x2b, 0xad, 0xa6, 0x31, 0xaf, 0xd3, 0x8c, 0x54, 0x64, 0xa6, + 0xd0, 0x5a, 0x1b, 0x4d, 0x02, 0x5c, 0x3f, 0x83, 0x6f, 0xce, 0x32, 0x23, 0x2f, 0xe3, 0x3d, 0xb8, + 0x26, 0x98, 0x70, 0x86, 0x79, 0x01, 0xb1, 0xa1, 0xb2, 0x55, 0x4d, 0xf0, 0x31, 0x05, 0x19, 0xf0, + 0x2e, 0x25, 0x27, 0xc2, 0x2e, 0x34, 0x76, 0x27, 0xde, 0xea, 0xe6, 0xcd, 0xe9, 0xdf, 0xc3, 0xfb, + 0x9f, 0xfa, 0x3c, 0x7f, 0x87, 0x3c, 0x1a, 0x8a, 0xc5, 0xea, 0xb8, 0x01, 0x61, 0xe0, 0x78, 0x93, + 0x47, 0xdd, 0x8a, 0x91, 0xb4, 0x7e, 0xf7, 0x61, 0xf2, 0xc3, 0xe6, 0xfe, 0x29, 0xa9, 0x95, 0xeb, + 0x60, 0xf3, 0x15, 0xeb, 0xd5, 0x18, 0xe8, 0xfa, 0xa7, 0x44, 0xbf, 0x00, 0xf0, 0x41, 0xb1, 0x00, + 0xe9, 0xfd, 0x13, 0x78, 0x33, 0x4c, 0xa1, 0x1a, 0x58, 0xac, 0xe6, 0x59, 0x32, 0x4b, 0x65, 0x40, + 0xef, 0xc0, 0x6a, 0x52, 0x9d, 0x4b, 0x72, 0x57, 0x63, 0xf8, 0x50, 0x49, 0xd6, 0xff, 0x02, 0xb0, + 0x3a, 0x95, 0x04, 0xbd, 0x0b, 0xab, 0x59, 0xf9, 0x6d, 0x9f, 0xba, 0xe4, 0x44, 0xde, 0xc1, 0xed, + 0x0c, 0xde, 0x8f, 0x51, 0x14, 0xc1, 0x37, 0xa6, 0xfb, 0xc5, 0x0e, 0xa5, 0x9d, 0xe4, 0xb8, 0x4a, + 0xab, 0xbd, 0x78, 0xdf, 0xc8, 0x04, 0xd6, 0xfa, 0xa8, 0x78, 0x03, 0x35, 0xe0, 0x8d, 0x74, 0x2e, + 0x24, 0x35, 0xae, 0xb4, 0x90, 0x3a, 0x23, 0x0c, 0x06, 0x46, 0x37, 0xd9, 0xb1, 0x64, 0x44, 0xeb, + 0xdf, 0x9b, 0x70, 0x2d, 0xf3, 0xd7, 0x4d, 0x8f, 0x44, 0xbf, 0x02, 0x58, 0x9d, 0x1a, 0x07, 0xe8, + 0xd9, 0x7c, 0xa1, 0xc5, 0xc3, 0x47, 0x6b, 0x2f, 0xc1, 0x4c, 0x9d, 0xe8, 0xf8, 0x87, 0x3f, 0xfe, + 0xb9, 0x28, 0x3d, 0xd1, 0xcd, 0x78, 0xb8, 0xaa, 0x71, 0xc5, 0xcd, 0xb3, 0xdc, 0x30, 0xdb, 0x6a, + 0x9c, 0xcb, 0x01, 0x7d, 0xc0, 0xfa, 0x1c, 0x0f, 0x92, 0x4c, 0x18, 0x34, 0xd0, 0xcf, 0x00, 0xae, + 0xe4, 0x67, 0x12, 0x7a, 0x3a, 0x5f, 0x47, 0xc1, 0x0c, 0xd3, 0x1e, 0xcf, 0xa4, 0x65, 0x5f, 0x88, + 0x71, 0x9b, 0xe9, 0x4f, 0x12, 0xc5, 0x06, 0x7a, 0x1c, 0x2b, 0x3e, 0x9b, 0xf8, 0xc7, 0x6c, 0x8d, + 0x0d, 0x34, 0x72, 0x92, 0xcd, 0xc6, 0x39, 0xfa, 0x13, 0xc0, 0xd7, 0x8a, 0x7a, 0x1f, 0x6d, 0xcd, + 0xd7, 0x7c, 0xc5, 0x9f, 0x56, 0x7b, 0xb1, 0x2c, 0x5d, 0xd6, 0x7f, 0x27, 0x71, 0x83, 0xd1, 0xb3, + 0x45, 0xdc, 0xe0, 0xa1, 0xcf, 0x85, 0x32, 0xf0, 0x23, 0x80, 0x2b, 0xf9, 0xe9, 0x7e, 0x9d, 0x5b, + 0x28, 0xf8, 0x1a, 0x68, 0x1b, 0x8a, 0x96, 0xfb, 0x34, 0x1a, 0x59, 0xeb, 0xeb, 0xcf, 0x13, 0xa1, + 0x4f, 0xf5, 0xf7, 0x17, 0x12, 0x1a, 0x46, 0x34, 0xee, 0x94, 0xbf, 0x01, 0xbc, 0x57, 0x3c, 0x77, + 0xd1, 0xf6, 0x7c, 0xb5, 0x57, 0x7e, 0x7e, 0xb4, 0x9d, 0xe5, 0x13, 0xc8, 0x3b, 0xf8, 0x28, 0xb1, + 0xb6, 0xad, 0xe3, 0x85, 0xac, 0x39, 0xae, 0x3b, 0xce, 0x85, 0x41, 0x63, 0xf7, 0x7f, 0x00, 0x1f, + 0x0d, 0xd8, 0x68, 0xae, 0x9c, 0xdd, 0xd7, 0xa7, 0xc7, 0xc1, 0x61, 0xfc, 0xb8, 0x38, 0x04, 0x5f, + 0x7d, 0x2c, 0xa9, 0x1e, 0x1b, 0x3a, 0xd4, 0x33, 0x58, 0xe8, 0x99, 0x1e, 0xa1, 0xc9, 0xd3, 0x43, + 0xbd, 0x82, 0x02, 0x9f, 0xcf, 0x7e, 0x14, 0x3d, 0x57, 0x8b, 0x9f, 0x4a, 0xe5, 0xbd, 0x4e, 0xe7, + 0x97, 0x52, 0x7d, 0x2f, 0x4d, 0xd8, 0x71, 0xb9, 0x91, 0x2e, 0xe3, 0xd5, 0x51, 0xd3, 0x90, 0x07, + 0xf3, 0xdf, 0x54, 0x48, 0xaf, 0xe3, 0xf2, 0x5e, 0x16, 0xd2, 0x3b, 0x6a, 0xf6, 0x54, 0xc8, 0x7f, + 0xa5, 0x47, 0x29, 0x8e, 0x71, 0xc7, 0xe5, 0x18, 0x67, 0x41, 0x18, 0x1f, 0x35, 0x31, 0x56, 0x61, + 0xfd, 0x1b, 0x89, 0xce, 0x0f, 0x5e, 0x06, 0x00, 0x00, 0xff, 0xff, 0xa4, 0x3d, 0xcf, 0xf4, 0x4c, + 0x0a, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/services/operating_system_version_constant_service.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/services/operating_system_version_constant_service.pb.go new file mode 100644 index 0000000..a90cab4 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/services/operating_system_version_constant_service.pb.go @@ -0,0 +1,179 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/services/operating_system_version_constant_service.proto + +package services // import "google.golang.org/genproto/googleapis/ads/googleads/v1/services" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import resources "google.golang.org/genproto/googleapis/ads/googleads/v1/resources" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +import ( + context "golang.org/x/net/context" + grpc "google.golang.org/grpc" +) + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Request message for +// [OperatingSystemVersionConstantService.GetOperatingSystemVersionConstant][google.ads.googleads.v1.services.OperatingSystemVersionConstantService.GetOperatingSystemVersionConstant]. +type GetOperatingSystemVersionConstantRequest struct { + // Resource name of the OS version to fetch. + ResourceName string `protobuf:"bytes,1,opt,name=resource_name,json=resourceName,proto3" json:"resource_name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GetOperatingSystemVersionConstantRequest) Reset() { + *m = GetOperatingSystemVersionConstantRequest{} +} +func (m *GetOperatingSystemVersionConstantRequest) String() string { return proto.CompactTextString(m) } +func (*GetOperatingSystemVersionConstantRequest) ProtoMessage() {} +func (*GetOperatingSystemVersionConstantRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_operating_system_version_constant_service_2677ec2344d0da1f, []int{0} +} +func (m *GetOperatingSystemVersionConstantRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GetOperatingSystemVersionConstantRequest.Unmarshal(m, b) +} +func (m *GetOperatingSystemVersionConstantRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GetOperatingSystemVersionConstantRequest.Marshal(b, m, deterministic) +} +func (dst *GetOperatingSystemVersionConstantRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_GetOperatingSystemVersionConstantRequest.Merge(dst, src) +} +func (m *GetOperatingSystemVersionConstantRequest) XXX_Size() int { + return xxx_messageInfo_GetOperatingSystemVersionConstantRequest.Size(m) +} +func (m *GetOperatingSystemVersionConstantRequest) XXX_DiscardUnknown() { + xxx_messageInfo_GetOperatingSystemVersionConstantRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_GetOperatingSystemVersionConstantRequest proto.InternalMessageInfo + +func (m *GetOperatingSystemVersionConstantRequest) GetResourceName() string { + if m != nil { + return m.ResourceName + } + return "" +} + +func init() { + proto.RegisterType((*GetOperatingSystemVersionConstantRequest)(nil), "google.ads.googleads.v1.services.GetOperatingSystemVersionConstantRequest") +} + +// Reference imports to suppress errors if they are not otherwise used. +var _ context.Context +var _ grpc.ClientConn + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the grpc package it is being compiled against. +const _ = grpc.SupportPackageIsVersion4 + +// OperatingSystemVersionConstantServiceClient is the client API for OperatingSystemVersionConstantService service. +// +// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://godoc.org/google.golang.org/grpc#ClientConn.NewStream. +type OperatingSystemVersionConstantServiceClient interface { + // Returns the requested OS version constant in full detail. + GetOperatingSystemVersionConstant(ctx context.Context, in *GetOperatingSystemVersionConstantRequest, opts ...grpc.CallOption) (*resources.OperatingSystemVersionConstant, error) +} + +type operatingSystemVersionConstantServiceClient struct { + cc *grpc.ClientConn +} + +func NewOperatingSystemVersionConstantServiceClient(cc *grpc.ClientConn) OperatingSystemVersionConstantServiceClient { + return &operatingSystemVersionConstantServiceClient{cc} +} + +func (c *operatingSystemVersionConstantServiceClient) GetOperatingSystemVersionConstant(ctx context.Context, in *GetOperatingSystemVersionConstantRequest, opts ...grpc.CallOption) (*resources.OperatingSystemVersionConstant, error) { + out := new(resources.OperatingSystemVersionConstant) + err := c.cc.Invoke(ctx, "/google.ads.googleads.v1.services.OperatingSystemVersionConstantService/GetOperatingSystemVersionConstant", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +// OperatingSystemVersionConstantServiceServer is the server API for OperatingSystemVersionConstantService service. +type OperatingSystemVersionConstantServiceServer interface { + // Returns the requested OS version constant in full detail. + GetOperatingSystemVersionConstant(context.Context, *GetOperatingSystemVersionConstantRequest) (*resources.OperatingSystemVersionConstant, error) +} + +func RegisterOperatingSystemVersionConstantServiceServer(s *grpc.Server, srv OperatingSystemVersionConstantServiceServer) { + s.RegisterService(&_OperatingSystemVersionConstantService_serviceDesc, srv) +} + +func _OperatingSystemVersionConstantService_GetOperatingSystemVersionConstant_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(GetOperatingSystemVersionConstantRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(OperatingSystemVersionConstantServiceServer).GetOperatingSystemVersionConstant(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.ads.googleads.v1.services.OperatingSystemVersionConstantService/GetOperatingSystemVersionConstant", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(OperatingSystemVersionConstantServiceServer).GetOperatingSystemVersionConstant(ctx, req.(*GetOperatingSystemVersionConstantRequest)) + } + return interceptor(ctx, in, info, handler) +} + +var _OperatingSystemVersionConstantService_serviceDesc = grpc.ServiceDesc{ + ServiceName: "google.ads.googleads.v1.services.OperatingSystemVersionConstantService", + HandlerType: (*OperatingSystemVersionConstantServiceServer)(nil), + Methods: []grpc.MethodDesc{ + { + MethodName: "GetOperatingSystemVersionConstant", + Handler: _OperatingSystemVersionConstantService_GetOperatingSystemVersionConstant_Handler, + }, + }, + Streams: []grpc.StreamDesc{}, + Metadata: "google/ads/googleads/v1/services/operating_system_version_constant_service.proto", +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/services/operating_system_version_constant_service.proto", fileDescriptor_operating_system_version_constant_service_2677ec2344d0da1f) +} + +var fileDescriptor_operating_system_version_constant_service_2677ec2344d0da1f = []byte{ + // 376 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x8c, 0x92, 0xcf, 0x4a, 0xf3, 0x40, + 0x14, 0xc5, 0x49, 0x3e, 0xf8, 0xc0, 0xa0, 0x9b, 0xac, 0xa4, 0xb8, 0xa8, 0xb5, 0x42, 0xe9, 0x62, + 0x42, 0x14, 0x11, 0x46, 0xba, 0x48, 0x5d, 0x54, 0x5d, 0xd8, 0xd2, 0x42, 0x16, 0x12, 0x08, 0x63, + 0x33, 0x84, 0x40, 0x33, 0x37, 0xe6, 0x4e, 0x03, 0x22, 0x6e, 0x7c, 0x03, 0x71, 0xe9, 0xce, 0xa5, + 0x8f, 0xe2, 0xd6, 0x57, 0x70, 0xe5, 0xce, 0x37, 0x90, 0x74, 0x32, 0x01, 0x17, 0xfd, 0xb3, 0x3b, + 0x4c, 0x4e, 0x7e, 0x67, 0xee, 0xb9, 0x63, 0x8d, 0x62, 0x80, 0x78, 0xc6, 0x1d, 0x16, 0xa1, 0xa3, + 0x64, 0xa9, 0x0a, 0xd7, 0x41, 0x9e, 0x17, 0xc9, 0x94, 0xa3, 0x03, 0x19, 0xcf, 0x99, 0x4c, 0x44, + 0x1c, 0xe2, 0x3d, 0x4a, 0x9e, 0x86, 0x05, 0xcf, 0x31, 0x01, 0x11, 0x4e, 0x41, 0xa0, 0x64, 0x42, + 0x86, 0x95, 0x95, 0x64, 0x39, 0x48, 0xb0, 0x9b, 0x0a, 0x43, 0x58, 0x84, 0xa4, 0x26, 0x92, 0xc2, + 0x25, 0x9a, 0xd8, 0xb8, 0x5c, 0x96, 0x99, 0x73, 0x84, 0x79, 0xbe, 0x51, 0xa8, 0x0a, 0x6b, 0xec, + 0x69, 0x54, 0x96, 0x38, 0x4c, 0x08, 0x90, 0x4c, 0x26, 0x20, 0x50, 0x7d, 0x6d, 0x0d, 0xad, 0xce, + 0x80, 0xcb, 0xa1, 0x66, 0x4d, 0x16, 0x28, 0x5f, 0x91, 0xce, 0x2b, 0xd0, 0x98, 0xdf, 0xcd, 0x39, + 0x4a, 0xfb, 0xc0, 0xda, 0xd1, 0xf1, 0xa1, 0x60, 0x29, 0xdf, 0x35, 0x9a, 0x46, 0x67, 0x6b, 0xbc, + 0xad, 0x0f, 0xaf, 0x59, 0xca, 0x8f, 0x5e, 0x4d, 0xeb, 0x70, 0x35, 0x6e, 0xa2, 0x86, 0xb4, 0x7f, + 0x0c, 0x6b, 0x7f, 0x6d, 0xb6, 0x7d, 0x45, 0xd6, 0x95, 0x45, 0x36, 0x1d, 0xa0, 0xe1, 0x2d, 0x65, + 0xd5, 0xb5, 0x92, 0xd5, 0xa4, 0x56, 0xef, 0xe9, 0xf3, 0xeb, 0xc5, 0x3c, 0xb5, 0x4f, 0xca, 0x65, + 0x3c, 0xfc, 0xa9, 0xa3, 0x07, 0x2b, 0x7f, 0x45, 0xa7, 0xfb, 0xd8, 0x7f, 0x36, 0xad, 0xf6, 0x14, + 0xd2, 0xb5, 0x33, 0xf5, 0xbb, 0x1b, 0x75, 0x38, 0x2a, 0x77, 0x38, 0x32, 0x6e, 0x2e, 0x2a, 0x5e, + 0x0c, 0x33, 0x26, 0x62, 0x02, 0x79, 0xec, 0xc4, 0x5c, 0x2c, 0x36, 0xac, 0x9f, 0x4f, 0x96, 0xe0, + 0xf2, 0x17, 0x7c, 0xa6, 0xc5, 0x9b, 0xf9, 0x6f, 0xe0, 0x79, 0xef, 0x66, 0x73, 0xa0, 0x80, 0x5e, + 0x84, 0x44, 0xc9, 0x52, 0xf9, 0x2e, 0xa9, 0x82, 0xf1, 0x43, 0x5b, 0x02, 0x2f, 0xc2, 0xa0, 0xb6, + 0x04, 0xbe, 0x1b, 0x68, 0xcb, 0xb7, 0xd9, 0x56, 0xe7, 0x94, 0x7a, 0x11, 0x52, 0x5a, 0x9b, 0x28, + 0xf5, 0x5d, 0x4a, 0xb5, 0xed, 0xf6, 0xff, 0xe2, 0x9e, 0xc7, 0xbf, 0x01, 0x00, 0x00, 0xff, 0xff, + 0xcc, 0x3c, 0xee, 0x41, 0x68, 0x03, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/services/paid_organic_search_term_view_service.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/services/paid_organic_search_term_view_service.pb.go new file mode 100644 index 0000000..9a74a88 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/services/paid_organic_search_term_view_service.pb.go @@ -0,0 +1,178 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/services/paid_organic_search_term_view_service.proto + +package services // import "google.golang.org/genproto/googleapis/ads/googleads/v1/services" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import resources "google.golang.org/genproto/googleapis/ads/googleads/v1/resources" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +import ( + context "golang.org/x/net/context" + grpc "google.golang.org/grpc" +) + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Request message for +// [PaidOrganicSearchTermViewService.GetPaidOrganicSearchTermView][google.ads.googleads.v1.services.PaidOrganicSearchTermViewService.GetPaidOrganicSearchTermView]. +type GetPaidOrganicSearchTermViewRequest struct { + // The resource name of the paid organic search term view to fetch. + ResourceName string `protobuf:"bytes,1,opt,name=resource_name,json=resourceName,proto3" json:"resource_name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GetPaidOrganicSearchTermViewRequest) Reset() { *m = GetPaidOrganicSearchTermViewRequest{} } +func (m *GetPaidOrganicSearchTermViewRequest) String() string { return proto.CompactTextString(m) } +func (*GetPaidOrganicSearchTermViewRequest) ProtoMessage() {} +func (*GetPaidOrganicSearchTermViewRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_paid_organic_search_term_view_service_30d5905997917b5b, []int{0} +} +func (m *GetPaidOrganicSearchTermViewRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GetPaidOrganicSearchTermViewRequest.Unmarshal(m, b) +} +func (m *GetPaidOrganicSearchTermViewRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GetPaidOrganicSearchTermViewRequest.Marshal(b, m, deterministic) +} +func (dst *GetPaidOrganicSearchTermViewRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_GetPaidOrganicSearchTermViewRequest.Merge(dst, src) +} +func (m *GetPaidOrganicSearchTermViewRequest) XXX_Size() int { + return xxx_messageInfo_GetPaidOrganicSearchTermViewRequest.Size(m) +} +func (m *GetPaidOrganicSearchTermViewRequest) XXX_DiscardUnknown() { + xxx_messageInfo_GetPaidOrganicSearchTermViewRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_GetPaidOrganicSearchTermViewRequest proto.InternalMessageInfo + +func (m *GetPaidOrganicSearchTermViewRequest) GetResourceName() string { + if m != nil { + return m.ResourceName + } + return "" +} + +func init() { + proto.RegisterType((*GetPaidOrganicSearchTermViewRequest)(nil), "google.ads.googleads.v1.services.GetPaidOrganicSearchTermViewRequest") +} + +// Reference imports to suppress errors if they are not otherwise used. +var _ context.Context +var _ grpc.ClientConn + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the grpc package it is being compiled against. +const _ = grpc.SupportPackageIsVersion4 + +// PaidOrganicSearchTermViewServiceClient is the client API for PaidOrganicSearchTermViewService service. +// +// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://godoc.org/google.golang.org/grpc#ClientConn.NewStream. +type PaidOrganicSearchTermViewServiceClient interface { + // Returns the requested paid organic search term view in full detail. + GetPaidOrganicSearchTermView(ctx context.Context, in *GetPaidOrganicSearchTermViewRequest, opts ...grpc.CallOption) (*resources.PaidOrganicSearchTermView, error) +} + +type paidOrganicSearchTermViewServiceClient struct { + cc *grpc.ClientConn +} + +func NewPaidOrganicSearchTermViewServiceClient(cc *grpc.ClientConn) PaidOrganicSearchTermViewServiceClient { + return &paidOrganicSearchTermViewServiceClient{cc} +} + +func (c *paidOrganicSearchTermViewServiceClient) GetPaidOrganicSearchTermView(ctx context.Context, in *GetPaidOrganicSearchTermViewRequest, opts ...grpc.CallOption) (*resources.PaidOrganicSearchTermView, error) { + out := new(resources.PaidOrganicSearchTermView) + err := c.cc.Invoke(ctx, "/google.ads.googleads.v1.services.PaidOrganicSearchTermViewService/GetPaidOrganicSearchTermView", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +// PaidOrganicSearchTermViewServiceServer is the server API for PaidOrganicSearchTermViewService service. +type PaidOrganicSearchTermViewServiceServer interface { + // Returns the requested paid organic search term view in full detail. + GetPaidOrganicSearchTermView(context.Context, *GetPaidOrganicSearchTermViewRequest) (*resources.PaidOrganicSearchTermView, error) +} + +func RegisterPaidOrganicSearchTermViewServiceServer(s *grpc.Server, srv PaidOrganicSearchTermViewServiceServer) { + s.RegisterService(&_PaidOrganicSearchTermViewService_serviceDesc, srv) +} + +func _PaidOrganicSearchTermViewService_GetPaidOrganicSearchTermView_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(GetPaidOrganicSearchTermViewRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(PaidOrganicSearchTermViewServiceServer).GetPaidOrganicSearchTermView(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.ads.googleads.v1.services.PaidOrganicSearchTermViewService/GetPaidOrganicSearchTermView", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(PaidOrganicSearchTermViewServiceServer).GetPaidOrganicSearchTermView(ctx, req.(*GetPaidOrganicSearchTermViewRequest)) + } + return interceptor(ctx, in, info, handler) +} + +var _PaidOrganicSearchTermViewService_serviceDesc = grpc.ServiceDesc{ + ServiceName: "google.ads.googleads.v1.services.PaidOrganicSearchTermViewService", + HandlerType: (*PaidOrganicSearchTermViewServiceServer)(nil), + Methods: []grpc.MethodDesc{ + { + MethodName: "GetPaidOrganicSearchTermView", + Handler: _PaidOrganicSearchTermViewService_GetPaidOrganicSearchTermView_Handler, + }, + }, + Streams: []grpc.StreamDesc{}, + Metadata: "google/ads/googleads/v1/services/paid_organic_search_term_view_service.proto", +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/services/paid_organic_search_term_view_service.proto", fileDescriptor_paid_organic_search_term_view_service_30d5905997917b5b) +} + +var fileDescriptor_paid_organic_search_term_view_service_30d5905997917b5b = []byte{ + // 389 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x8c, 0x92, 0x41, 0x8b, 0xd3, 0x40, + 0x18, 0x86, 0x49, 0x04, 0xc1, 0xa0, 0x97, 0x9c, 0xa4, 0xf4, 0x10, 0xda, 0x0a, 0xd2, 0xc3, 0x0c, + 0xd1, 0xdb, 0x58, 0x85, 0x14, 0x4b, 0x45, 0x44, 0x4b, 0x2b, 0x39, 0x48, 0x20, 0x8c, 0xc9, 0x47, + 0x1c, 0x68, 0x32, 0x71, 0xbe, 0x34, 0x3d, 0x88, 0x17, 0xcf, 0xe2, 0xc5, 0x7f, 0xe0, 0xd1, 0x9f, + 0xb2, 0xd7, 0xfd, 0x0b, 0x7b, 0xd9, 0xfd, 0x15, 0x4b, 0x32, 0x9d, 0xc0, 0xc2, 0x66, 0xb3, 0xb7, + 0x97, 0x99, 0x97, 0xe7, 0x9d, 0xef, 0xfd, 0xc6, 0xf9, 0x90, 0x49, 0x99, 0xed, 0x81, 0xf2, 0x14, + 0xa9, 0x96, 0x8d, 0xaa, 0x7d, 0x8a, 0xa0, 0x6a, 0x91, 0x00, 0xd2, 0x92, 0x8b, 0x34, 0x96, 0x2a, + 0xe3, 0x85, 0x48, 0x62, 0x04, 0xae, 0x92, 0x6f, 0x71, 0x05, 0x2a, 0x8f, 0x6b, 0x01, 0xc7, 0xf8, + 0x64, 0x23, 0xa5, 0x92, 0x95, 0x74, 0x3d, 0x8d, 0x20, 0x3c, 0x45, 0xd2, 0xd1, 0x48, 0xed, 0x13, + 0x43, 0x1b, 0xad, 0xfa, 0xf2, 0x14, 0xa0, 0x3c, 0xa8, 0xc1, 0x40, 0x1d, 0x34, 0x1a, 0x1b, 0x4c, + 0x29, 0x28, 0x2f, 0x0a, 0x59, 0xf1, 0x4a, 0xc8, 0x02, 0xf5, 0xed, 0xe4, 0xbd, 0x33, 0x5d, 0x43, + 0xb5, 0xe1, 0x22, 0xfd, 0xa4, 0x31, 0xbb, 0x96, 0xf2, 0x19, 0x54, 0x1e, 0x0a, 0x38, 0x6e, 0xe1, + 0xfb, 0x01, 0xb0, 0x72, 0xa7, 0xce, 0x13, 0x93, 0x1a, 0x17, 0x3c, 0x87, 0xa7, 0x96, 0x67, 0x3d, + 0x7f, 0xb4, 0x7d, 0x6c, 0x0e, 0x3f, 0xf2, 0x1c, 0x5e, 0xfc, 0xb1, 0x1d, 0xaf, 0x97, 0xb4, 0xd3, + 0x63, 0xb9, 0x97, 0x96, 0x33, 0xbe, 0x2b, 0xd1, 0x5d, 0x91, 0xa1, 0x66, 0xc8, 0x3d, 0x5e, 0x3c, + 0x5a, 0xf4, 0x62, 0xba, 0xfa, 0x48, 0x2f, 0x64, 0xf2, 0xf6, 0xd7, 0xf9, 0xc5, 0x5f, 0xfb, 0x8d, + 0xbb, 0x68, 0xfa, 0xfe, 0x71, 0x63, 0xf4, 0xd7, 0xc9, 0x01, 0x2b, 0x99, 0x83, 0x42, 0x3a, 0x6f, + 0x17, 0x70, 0x2b, 0x01, 0xe9, 0xfc, 0xe7, 0xf2, 0xb7, 0xed, 0xcc, 0x12, 0x99, 0x0f, 0x0e, 0xb4, + 0x7c, 0x36, 0x54, 0xdb, 0xa6, 0x59, 0xd6, 0xc6, 0xfa, 0xf2, 0xee, 0x84, 0xca, 0xe4, 0x9e, 0x17, + 0x19, 0x91, 0x2a, 0xa3, 0x19, 0x14, 0xed, 0x2a, 0xcd, 0x1f, 0x29, 0x05, 0xf6, 0x7f, 0xd1, 0x57, + 0x46, 0xfc, 0xb3, 0x1f, 0xac, 0x83, 0xe0, 0xbf, 0xed, 0xad, 0x35, 0x30, 0x48, 0x91, 0x68, 0xd9, + 0xa8, 0xd0, 0x27, 0xa7, 0x60, 0x3c, 0x33, 0x96, 0x28, 0x48, 0x31, 0xea, 0x2c, 0x51, 0xe8, 0x47, + 0xc6, 0x72, 0x65, 0xcf, 0xf4, 0x39, 0x63, 0x41, 0x8a, 0x8c, 0x75, 0x26, 0xc6, 0x42, 0x9f, 0x31, + 0x63, 0xfb, 0xfa, 0xb0, 0x7d, 0xe7, 0xcb, 0xeb, 0x00, 0x00, 0x00, 0xff, 0xff, 0x55, 0x18, 0xd6, + 0x92, 0x49, 0x03, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/services/parental_status_view_service.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/services/parental_status_view_service.pb.go new file mode 100644 index 0000000..6421139 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/services/parental_status_view_service.pb.go @@ -0,0 +1,176 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/services/parental_status_view_service.proto + +package services // import "google.golang.org/genproto/googleapis/ads/googleads/v1/services" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import resources "google.golang.org/genproto/googleapis/ads/googleads/v1/resources" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +import ( + context "golang.org/x/net/context" + grpc "google.golang.org/grpc" +) + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Request message for [ParentalStatusViewService.GetParentalStatusView][google.ads.googleads.v1.services.ParentalStatusViewService.GetParentalStatusView]. +type GetParentalStatusViewRequest struct { + // The resource name of the parental status view to fetch. + ResourceName string `protobuf:"bytes,1,opt,name=resource_name,json=resourceName,proto3" json:"resource_name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GetParentalStatusViewRequest) Reset() { *m = GetParentalStatusViewRequest{} } +func (m *GetParentalStatusViewRequest) String() string { return proto.CompactTextString(m) } +func (*GetParentalStatusViewRequest) ProtoMessage() {} +func (*GetParentalStatusViewRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_parental_status_view_service_d789c0781624ed91, []int{0} +} +func (m *GetParentalStatusViewRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GetParentalStatusViewRequest.Unmarshal(m, b) +} +func (m *GetParentalStatusViewRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GetParentalStatusViewRequest.Marshal(b, m, deterministic) +} +func (dst *GetParentalStatusViewRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_GetParentalStatusViewRequest.Merge(dst, src) +} +func (m *GetParentalStatusViewRequest) XXX_Size() int { + return xxx_messageInfo_GetParentalStatusViewRequest.Size(m) +} +func (m *GetParentalStatusViewRequest) XXX_DiscardUnknown() { + xxx_messageInfo_GetParentalStatusViewRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_GetParentalStatusViewRequest proto.InternalMessageInfo + +func (m *GetParentalStatusViewRequest) GetResourceName() string { + if m != nil { + return m.ResourceName + } + return "" +} + +func init() { + proto.RegisterType((*GetParentalStatusViewRequest)(nil), "google.ads.googleads.v1.services.GetParentalStatusViewRequest") +} + +// Reference imports to suppress errors if they are not otherwise used. +var _ context.Context +var _ grpc.ClientConn + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the grpc package it is being compiled against. +const _ = grpc.SupportPackageIsVersion4 + +// ParentalStatusViewServiceClient is the client API for ParentalStatusViewService service. +// +// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://godoc.org/google.golang.org/grpc#ClientConn.NewStream. +type ParentalStatusViewServiceClient interface { + // Returns the requested parental status view in full detail. + GetParentalStatusView(ctx context.Context, in *GetParentalStatusViewRequest, opts ...grpc.CallOption) (*resources.ParentalStatusView, error) +} + +type parentalStatusViewServiceClient struct { + cc *grpc.ClientConn +} + +func NewParentalStatusViewServiceClient(cc *grpc.ClientConn) ParentalStatusViewServiceClient { + return &parentalStatusViewServiceClient{cc} +} + +func (c *parentalStatusViewServiceClient) GetParentalStatusView(ctx context.Context, in *GetParentalStatusViewRequest, opts ...grpc.CallOption) (*resources.ParentalStatusView, error) { + out := new(resources.ParentalStatusView) + err := c.cc.Invoke(ctx, "/google.ads.googleads.v1.services.ParentalStatusViewService/GetParentalStatusView", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +// ParentalStatusViewServiceServer is the server API for ParentalStatusViewService service. +type ParentalStatusViewServiceServer interface { + // Returns the requested parental status view in full detail. + GetParentalStatusView(context.Context, *GetParentalStatusViewRequest) (*resources.ParentalStatusView, error) +} + +func RegisterParentalStatusViewServiceServer(s *grpc.Server, srv ParentalStatusViewServiceServer) { + s.RegisterService(&_ParentalStatusViewService_serviceDesc, srv) +} + +func _ParentalStatusViewService_GetParentalStatusView_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(GetParentalStatusViewRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(ParentalStatusViewServiceServer).GetParentalStatusView(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.ads.googleads.v1.services.ParentalStatusViewService/GetParentalStatusView", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(ParentalStatusViewServiceServer).GetParentalStatusView(ctx, req.(*GetParentalStatusViewRequest)) + } + return interceptor(ctx, in, info, handler) +} + +var _ParentalStatusViewService_serviceDesc = grpc.ServiceDesc{ + ServiceName: "google.ads.googleads.v1.services.ParentalStatusViewService", + HandlerType: (*ParentalStatusViewServiceServer)(nil), + Methods: []grpc.MethodDesc{ + { + MethodName: "GetParentalStatusView", + Handler: _ParentalStatusViewService_GetParentalStatusView_Handler, + }, + }, + Streams: []grpc.StreamDesc{}, + Metadata: "google/ads/googleads/v1/services/parental_status_view_service.proto", +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/services/parental_status_view_service.proto", fileDescriptor_parental_status_view_service_d789c0781624ed91) +} + +var fileDescriptor_parental_status_view_service_d789c0781624ed91 = []byte{ + // 371 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x84, 0x52, 0xcf, 0x4a, 0xc3, 0x30, + 0x18, 0xa7, 0x15, 0x04, 0x8b, 0x5e, 0x0a, 0x82, 0x8e, 0x21, 0x63, 0xee, 0x20, 0x3b, 0x24, 0x54, + 0x19, 0x42, 0x74, 0x42, 0xb7, 0xc3, 0x3c, 0xc9, 0xd8, 0xa0, 0x07, 0x29, 0x94, 0xb8, 0x86, 0x52, + 0x58, 0x9b, 0xda, 0x2f, 0xed, 0x0e, 0xe2, 0x45, 0xf0, 0x09, 0x7c, 0x03, 0x8f, 0x3e, 0x8a, 0xe0, + 0xc9, 0x57, 0xf0, 0xa4, 0x2f, 0x21, 0x5d, 0x9a, 0x8a, 0xcc, 0xba, 0xdb, 0x8f, 0xe4, 0xf7, 0xe7, + 0xcb, 0xef, 0x8b, 0x31, 0x0c, 0x38, 0x0f, 0xe6, 0x0c, 0x53, 0x1f, 0xb0, 0x84, 0x05, 0xca, 0x2d, + 0x0c, 0x2c, 0xcd, 0xc3, 0x19, 0x03, 0x9c, 0xd0, 0x94, 0xc5, 0x82, 0xce, 0x3d, 0x10, 0x54, 0x64, + 0xe0, 0xe5, 0x21, 0x5b, 0x78, 0xe5, 0x2d, 0x4a, 0x52, 0x2e, 0xb8, 0xd9, 0x92, 0x4a, 0x44, 0x7d, + 0x40, 0x95, 0x09, 0xca, 0x2d, 0xa4, 0x4c, 0x1a, 0xe7, 0x75, 0x31, 0x29, 0x03, 0x9e, 0xa5, 0x75, + 0x39, 0xd2, 0xbf, 0xd1, 0x54, 0xea, 0x24, 0xc4, 0x34, 0x8e, 0xb9, 0xa0, 0x22, 0xe4, 0x31, 0xc8, + 0xdb, 0xf6, 0xd0, 0x68, 0x8e, 0x98, 0x18, 0x97, 0xf2, 0xe9, 0x52, 0xed, 0x84, 0x6c, 0x31, 0x61, + 0xb7, 0x19, 0x03, 0x61, 0x1e, 0x1a, 0x3b, 0x2a, 0xc5, 0x8b, 0x69, 0xc4, 0xf6, 0xb4, 0x96, 0x76, + 0xb4, 0x35, 0xd9, 0x56, 0x87, 0x57, 0x34, 0x62, 0xc7, 0x5f, 0x9a, 0xb1, 0xbf, 0x6a, 0x31, 0x95, + 0xf3, 0x9b, 0x6f, 0x9a, 0xb1, 0xfb, 0x67, 0x86, 0x79, 0x81, 0xd6, 0xbd, 0x1d, 0xfd, 0x37, 0x5c, + 0xa3, 0x57, 0xab, 0xaf, 0x9a, 0x41, 0xab, 0xea, 0x76, 0xff, 0xe1, 0xfd, 0xe3, 0x49, 0x3f, 0x35, + 0x7b, 0x45, 0x87, 0x77, 0xbf, 0x9e, 0xd7, 0x9f, 0x65, 0x20, 0x78, 0xc4, 0x52, 0xc0, 0xdd, 0xaa, + 0xd4, 0x1f, 0x29, 0xe0, 0xee, 0xfd, 0xe0, 0x51, 0x37, 0x3a, 0x33, 0x1e, 0xad, 0x9d, 0x7d, 0x70, + 0x50, 0xdb, 0xc9, 0xb8, 0xe8, 0x7e, 0xac, 0x5d, 0x5f, 0x96, 0x1e, 0x01, 0x9f, 0xd3, 0x38, 0x40, + 0x3c, 0x0d, 0x70, 0xc0, 0xe2, 0xe5, 0x66, 0xd4, 0xa6, 0x93, 0x10, 0xea, 0xff, 0xd7, 0x99, 0x02, + 0xcf, 0xfa, 0xc6, 0xc8, 0xb6, 0x5f, 0xf4, 0xd6, 0x48, 0x1a, 0xda, 0x3e, 0x20, 0x09, 0x0b, 0xe4, + 0x58, 0xa8, 0x0c, 0x86, 0x57, 0x45, 0x71, 0x6d, 0x1f, 0xdc, 0x8a, 0xe2, 0x3a, 0x96, 0xab, 0x28, + 0x9f, 0x7a, 0x47, 0x9e, 0x13, 0x62, 0xfb, 0x40, 0x48, 0x45, 0x22, 0xc4, 0xb1, 0x08, 0x51, 0xb4, + 0x9b, 0xcd, 0xe5, 0x9c, 0x27, 0xdf, 0x01, 0x00, 0x00, 0xff, 0xff, 0x08, 0x89, 0x70, 0x7e, 0x06, + 0x03, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/services/payments_account_service.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/services/payments_account_service.pb.go new file mode 100644 index 0000000..b9aae9e --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/services/payments_account_service.pb.go @@ -0,0 +1,222 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/services/payments_account_service.proto + +package services // import "google.golang.org/genproto/googleapis/ads/googleads/v1/services" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import resources "google.golang.org/genproto/googleapis/ads/googleads/v1/resources" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +import ( + context "golang.org/x/net/context" + grpc "google.golang.org/grpc" +) + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Request message for fetching all accessible Payments accounts. +type ListPaymentsAccountsRequest struct { + // The ID of the customer to apply the PaymentsAccount list operation to. + CustomerId string `protobuf:"bytes,1,opt,name=customer_id,json=customerId,proto3" json:"customer_id,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ListPaymentsAccountsRequest) Reset() { *m = ListPaymentsAccountsRequest{} } +func (m *ListPaymentsAccountsRequest) String() string { return proto.CompactTextString(m) } +func (*ListPaymentsAccountsRequest) ProtoMessage() {} +func (*ListPaymentsAccountsRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_payments_account_service_4cd5864e195abdc3, []int{0} +} +func (m *ListPaymentsAccountsRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ListPaymentsAccountsRequest.Unmarshal(m, b) +} +func (m *ListPaymentsAccountsRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ListPaymentsAccountsRequest.Marshal(b, m, deterministic) +} +func (dst *ListPaymentsAccountsRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_ListPaymentsAccountsRequest.Merge(dst, src) +} +func (m *ListPaymentsAccountsRequest) XXX_Size() int { + return xxx_messageInfo_ListPaymentsAccountsRequest.Size(m) +} +func (m *ListPaymentsAccountsRequest) XXX_DiscardUnknown() { + xxx_messageInfo_ListPaymentsAccountsRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_ListPaymentsAccountsRequest proto.InternalMessageInfo + +func (m *ListPaymentsAccountsRequest) GetCustomerId() string { + if m != nil { + return m.CustomerId + } + return "" +} + +// Response message for [PaymentsAccountService.ListPaymentsAccounts][google.ads.googleads.v1.services.PaymentsAccountService.ListPaymentsAccounts]. +type ListPaymentsAccountsResponse struct { + // The list of accessible Payments accounts. + PaymentsAccounts []*resources.PaymentsAccount `protobuf:"bytes,1,rep,name=payments_accounts,json=paymentsAccounts,proto3" json:"payments_accounts,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ListPaymentsAccountsResponse) Reset() { *m = ListPaymentsAccountsResponse{} } +func (m *ListPaymentsAccountsResponse) String() string { return proto.CompactTextString(m) } +func (*ListPaymentsAccountsResponse) ProtoMessage() {} +func (*ListPaymentsAccountsResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_payments_account_service_4cd5864e195abdc3, []int{1} +} +func (m *ListPaymentsAccountsResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ListPaymentsAccountsResponse.Unmarshal(m, b) +} +func (m *ListPaymentsAccountsResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ListPaymentsAccountsResponse.Marshal(b, m, deterministic) +} +func (dst *ListPaymentsAccountsResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_ListPaymentsAccountsResponse.Merge(dst, src) +} +func (m *ListPaymentsAccountsResponse) XXX_Size() int { + return xxx_messageInfo_ListPaymentsAccountsResponse.Size(m) +} +func (m *ListPaymentsAccountsResponse) XXX_DiscardUnknown() { + xxx_messageInfo_ListPaymentsAccountsResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_ListPaymentsAccountsResponse proto.InternalMessageInfo + +func (m *ListPaymentsAccountsResponse) GetPaymentsAccounts() []*resources.PaymentsAccount { + if m != nil { + return m.PaymentsAccounts + } + return nil +} + +func init() { + proto.RegisterType((*ListPaymentsAccountsRequest)(nil), "google.ads.googleads.v1.services.ListPaymentsAccountsRequest") + proto.RegisterType((*ListPaymentsAccountsResponse)(nil), "google.ads.googleads.v1.services.ListPaymentsAccountsResponse") +} + +// Reference imports to suppress errors if they are not otherwise used. +var _ context.Context +var _ grpc.ClientConn + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the grpc package it is being compiled against. +const _ = grpc.SupportPackageIsVersion4 + +// PaymentsAccountServiceClient is the client API for PaymentsAccountService service. +// +// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://godoc.org/google.golang.org/grpc#ClientConn.NewStream. +type PaymentsAccountServiceClient interface { + // Returns all Payments accounts associated with all managers + // between the login customer ID and specified serving customer in the + // hierarchy, inclusive. + ListPaymentsAccounts(ctx context.Context, in *ListPaymentsAccountsRequest, opts ...grpc.CallOption) (*ListPaymentsAccountsResponse, error) +} + +type paymentsAccountServiceClient struct { + cc *grpc.ClientConn +} + +func NewPaymentsAccountServiceClient(cc *grpc.ClientConn) PaymentsAccountServiceClient { + return &paymentsAccountServiceClient{cc} +} + +func (c *paymentsAccountServiceClient) ListPaymentsAccounts(ctx context.Context, in *ListPaymentsAccountsRequest, opts ...grpc.CallOption) (*ListPaymentsAccountsResponse, error) { + out := new(ListPaymentsAccountsResponse) + err := c.cc.Invoke(ctx, "/google.ads.googleads.v1.services.PaymentsAccountService/ListPaymentsAccounts", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +// PaymentsAccountServiceServer is the server API for PaymentsAccountService service. +type PaymentsAccountServiceServer interface { + // Returns all Payments accounts associated with all managers + // between the login customer ID and specified serving customer in the + // hierarchy, inclusive. + ListPaymentsAccounts(context.Context, *ListPaymentsAccountsRequest) (*ListPaymentsAccountsResponse, error) +} + +func RegisterPaymentsAccountServiceServer(s *grpc.Server, srv PaymentsAccountServiceServer) { + s.RegisterService(&_PaymentsAccountService_serviceDesc, srv) +} + +func _PaymentsAccountService_ListPaymentsAccounts_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(ListPaymentsAccountsRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(PaymentsAccountServiceServer).ListPaymentsAccounts(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.ads.googleads.v1.services.PaymentsAccountService/ListPaymentsAccounts", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(PaymentsAccountServiceServer).ListPaymentsAccounts(ctx, req.(*ListPaymentsAccountsRequest)) + } + return interceptor(ctx, in, info, handler) +} + +var _PaymentsAccountService_serviceDesc = grpc.ServiceDesc{ + ServiceName: "google.ads.googleads.v1.services.PaymentsAccountService", + HandlerType: (*PaymentsAccountServiceServer)(nil), + Methods: []grpc.MethodDesc{ + { + MethodName: "ListPaymentsAccounts", + Handler: _PaymentsAccountService_ListPaymentsAccounts_Handler, + }, + }, + Streams: []grpc.StreamDesc{}, + Metadata: "google/ads/googleads/v1/services/payments_account_service.proto", +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/services/payments_account_service.proto", fileDescriptor_payments_account_service_4cd5864e195abdc3) +} + +var fileDescriptor_payments_account_service_4cd5864e195abdc3 = []byte{ + // 399 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x9c, 0x52, 0xcf, 0x6a, 0xdb, 0x30, + 0x18, 0xc7, 0x0e, 0x0c, 0xa6, 0x5c, 0x36, 0x33, 0x46, 0x48, 0x02, 0x33, 0x21, 0x87, 0xb0, 0x83, + 0x34, 0x67, 0x30, 0x86, 0x46, 0x32, 0x9c, 0x4b, 0x36, 0xd8, 0x21, 0x64, 0x90, 0xc3, 0x30, 0x18, + 0xcf, 0x16, 0xc6, 0x90, 0x48, 0xae, 0x3f, 0x39, 0x50, 0x4a, 0x29, 0xe4, 0x15, 0xfa, 0x06, 0x3d, + 0xf6, 0x51, 0x0a, 0x3d, 0xf5, 0x15, 0x7a, 0x28, 0x7d, 0x8a, 0xe2, 0xc8, 0x32, 0xc5, 0x38, 0x0d, + 0xf4, 0xf6, 0x43, 0xfa, 0xfd, 0xd1, 0xf7, 0xd3, 0x87, 0x7e, 0xc6, 0x42, 0xc4, 0x6b, 0x46, 0x82, + 0x08, 0x88, 0x82, 0x05, 0xda, 0x3a, 0x04, 0x58, 0xb6, 0x4d, 0x42, 0x06, 0x24, 0x0d, 0x4e, 0x37, + 0x8c, 0x4b, 0xf0, 0x83, 0x30, 0x14, 0x39, 0x97, 0x7e, 0x79, 0x83, 0xd3, 0x4c, 0x48, 0x61, 0xd9, + 0x4a, 0x85, 0x83, 0x08, 0x70, 0x65, 0x80, 0xb7, 0x0e, 0xd6, 0x06, 0xdd, 0xef, 0x87, 0x22, 0x32, + 0x06, 0x22, 0xcf, 0x9a, 0x32, 0x94, 0x77, 0xb7, 0xaf, 0x95, 0x69, 0x42, 0x02, 0xce, 0x85, 0x0c, + 0x64, 0x22, 0x38, 0xa8, 0xdb, 0xc1, 0x14, 0xf5, 0xfe, 0x24, 0x20, 0x17, 0xa5, 0xd6, 0x55, 0x52, + 0x58, 0xb2, 0x93, 0x9c, 0x81, 0xb4, 0x3e, 0xa1, 0x76, 0x98, 0x83, 0x14, 0x1b, 0x96, 0xf9, 0x49, + 0xd4, 0x31, 0x6c, 0x63, 0xf4, 0x76, 0x89, 0xf4, 0xd1, 0xef, 0x68, 0x70, 0x81, 0xfa, 0xcd, 0x7a, + 0x48, 0x05, 0x07, 0x66, 0xf9, 0xe8, 0x7d, 0xfd, 0x5d, 0xd0, 0x31, 0xec, 0xd6, 0xa8, 0x3d, 0x1e, + 0xe3, 0x43, 0x53, 0x57, 0x33, 0xe1, 0x9a, 0xef, 0xf2, 0x5d, 0x5a, 0x0b, 0x1a, 0x3f, 0x18, 0xe8, + 0x63, 0x8d, 0xf5, 0x57, 0x95, 0x66, 0xdd, 0x1a, 0xe8, 0x43, 0xd3, 0xe3, 0xac, 0x09, 0x3e, 0xd6, + 0x37, 0x7e, 0xa1, 0x94, 0xee, 0xf4, 0xb5, 0x72, 0xd5, 0xc9, 0xe0, 0xdb, 0xee, 0xee, 0xfe, 0xd2, + 0xfc, 0x62, 0xe1, 0xe2, 0xff, 0x74, 0x97, 0x40, 0xce, 0x9e, 0x35, 0x3d, 0xf9, 0x7c, 0x4e, 0xea, + 0xa3, 0xce, 0x76, 0x26, 0x1a, 0x86, 0x62, 0x73, 0x34, 0x7d, 0xd6, 0x6b, 0x2e, 0x64, 0x51, 0xfc, + 0xf8, 0xc2, 0xf8, 0xf7, 0xab, 0x34, 0x88, 0xc5, 0x3a, 0xe0, 0x31, 0x16, 0x59, 0x4c, 0x62, 0xc6, + 0xf7, 0xfb, 0xa0, 0x77, 0x2b, 0x4d, 0xe0, 0xf0, 0x36, 0xff, 0xd0, 0xe0, 0xca, 0x6c, 0xcd, 0x5d, + 0xf7, 0xda, 0xb4, 0xe7, 0xca, 0xd0, 0x8d, 0x00, 0x2b, 0x58, 0xa0, 0x95, 0x83, 0xcb, 0x60, 0xb8, + 0xd1, 0x14, 0xcf, 0x8d, 0xc0, 0xab, 0x28, 0xde, 0xca, 0xf1, 0x34, 0xe5, 0xd1, 0x1c, 0xaa, 0x73, + 0x4a, 0xdd, 0x08, 0x28, 0xad, 0x48, 0x94, 0xae, 0x1c, 0x4a, 0x35, 0xed, 0xff, 0x9b, 0xfd, 0x3b, + 0xbf, 0x3e, 0x05, 0x00, 0x00, 0xff, 0xff, 0xdc, 0xe9, 0x84, 0xbb, 0x74, 0x03, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/services/product_bidding_category_constant_service.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/services/product_bidding_category_constant_service.pb.go new file mode 100644 index 0000000..87b9f2b --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/services/product_bidding_category_constant_service.pb.go @@ -0,0 +1,179 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/services/product_bidding_category_constant_service.proto + +package services // import "google.golang.org/genproto/googleapis/ads/googleads/v1/services" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import resources "google.golang.org/genproto/googleapis/ads/googleads/v1/resources" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +import ( + context "golang.org/x/net/context" + grpc "google.golang.org/grpc" +) + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Request message for +// [ProductBiddingCategoryService.GetProductBiddingCategory][]. +type GetProductBiddingCategoryConstantRequest struct { + // Resource name of the Product Bidding Category to fetch. + ResourceName string `protobuf:"bytes,1,opt,name=resource_name,json=resourceName,proto3" json:"resource_name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GetProductBiddingCategoryConstantRequest) Reset() { + *m = GetProductBiddingCategoryConstantRequest{} +} +func (m *GetProductBiddingCategoryConstantRequest) String() string { return proto.CompactTextString(m) } +func (*GetProductBiddingCategoryConstantRequest) ProtoMessage() {} +func (*GetProductBiddingCategoryConstantRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_product_bidding_category_constant_service_6978b02f4af5cdee, []int{0} +} +func (m *GetProductBiddingCategoryConstantRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GetProductBiddingCategoryConstantRequest.Unmarshal(m, b) +} +func (m *GetProductBiddingCategoryConstantRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GetProductBiddingCategoryConstantRequest.Marshal(b, m, deterministic) +} +func (dst *GetProductBiddingCategoryConstantRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_GetProductBiddingCategoryConstantRequest.Merge(dst, src) +} +func (m *GetProductBiddingCategoryConstantRequest) XXX_Size() int { + return xxx_messageInfo_GetProductBiddingCategoryConstantRequest.Size(m) +} +func (m *GetProductBiddingCategoryConstantRequest) XXX_DiscardUnknown() { + xxx_messageInfo_GetProductBiddingCategoryConstantRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_GetProductBiddingCategoryConstantRequest proto.InternalMessageInfo + +func (m *GetProductBiddingCategoryConstantRequest) GetResourceName() string { + if m != nil { + return m.ResourceName + } + return "" +} + +func init() { + proto.RegisterType((*GetProductBiddingCategoryConstantRequest)(nil), "google.ads.googleads.v1.services.GetProductBiddingCategoryConstantRequest") +} + +// Reference imports to suppress errors if they are not otherwise used. +var _ context.Context +var _ grpc.ClientConn + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the grpc package it is being compiled against. +const _ = grpc.SupportPackageIsVersion4 + +// ProductBiddingCategoryConstantServiceClient is the client API for ProductBiddingCategoryConstantService service. +// +// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://godoc.org/google.golang.org/grpc#ClientConn.NewStream. +type ProductBiddingCategoryConstantServiceClient interface { + // Returns the requested Product Bidding Category in full detail. + GetProductBiddingCategoryConstant(ctx context.Context, in *GetProductBiddingCategoryConstantRequest, opts ...grpc.CallOption) (*resources.ProductBiddingCategoryConstant, error) +} + +type productBiddingCategoryConstantServiceClient struct { + cc *grpc.ClientConn +} + +func NewProductBiddingCategoryConstantServiceClient(cc *grpc.ClientConn) ProductBiddingCategoryConstantServiceClient { + return &productBiddingCategoryConstantServiceClient{cc} +} + +func (c *productBiddingCategoryConstantServiceClient) GetProductBiddingCategoryConstant(ctx context.Context, in *GetProductBiddingCategoryConstantRequest, opts ...grpc.CallOption) (*resources.ProductBiddingCategoryConstant, error) { + out := new(resources.ProductBiddingCategoryConstant) + err := c.cc.Invoke(ctx, "/google.ads.googleads.v1.services.ProductBiddingCategoryConstantService/GetProductBiddingCategoryConstant", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +// ProductBiddingCategoryConstantServiceServer is the server API for ProductBiddingCategoryConstantService service. +type ProductBiddingCategoryConstantServiceServer interface { + // Returns the requested Product Bidding Category in full detail. + GetProductBiddingCategoryConstant(context.Context, *GetProductBiddingCategoryConstantRequest) (*resources.ProductBiddingCategoryConstant, error) +} + +func RegisterProductBiddingCategoryConstantServiceServer(s *grpc.Server, srv ProductBiddingCategoryConstantServiceServer) { + s.RegisterService(&_ProductBiddingCategoryConstantService_serviceDesc, srv) +} + +func _ProductBiddingCategoryConstantService_GetProductBiddingCategoryConstant_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(GetProductBiddingCategoryConstantRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(ProductBiddingCategoryConstantServiceServer).GetProductBiddingCategoryConstant(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.ads.googleads.v1.services.ProductBiddingCategoryConstantService/GetProductBiddingCategoryConstant", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(ProductBiddingCategoryConstantServiceServer).GetProductBiddingCategoryConstant(ctx, req.(*GetProductBiddingCategoryConstantRequest)) + } + return interceptor(ctx, in, info, handler) +} + +var _ProductBiddingCategoryConstantService_serviceDesc = grpc.ServiceDesc{ + ServiceName: "google.ads.googleads.v1.services.ProductBiddingCategoryConstantService", + HandlerType: (*ProductBiddingCategoryConstantServiceServer)(nil), + Methods: []grpc.MethodDesc{ + { + MethodName: "GetProductBiddingCategoryConstant", + Handler: _ProductBiddingCategoryConstantService_GetProductBiddingCategoryConstant_Handler, + }, + }, + Streams: []grpc.StreamDesc{}, + Metadata: "google/ads/googleads/v1/services/product_bidding_category_constant_service.proto", +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/services/product_bidding_category_constant_service.proto", fileDescriptor_product_bidding_category_constant_service_6978b02f4af5cdee) +} + +var fileDescriptor_product_bidding_category_constant_service_6978b02f4af5cdee = []byte{ + // 379 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x8c, 0x92, 0xcf, 0x4a, 0xeb, 0x40, + 0x18, 0xc5, 0x49, 0x2e, 0x5c, 0xb8, 0xe1, 0xde, 0x4d, 0x56, 0x97, 0xe2, 0xa2, 0xd6, 0x0a, 0xa5, + 0x8b, 0x09, 0x51, 0x44, 0x18, 0xe9, 0x22, 0xed, 0xa2, 0xea, 0x42, 0x43, 0x85, 0x2e, 0x24, 0x10, + 0xa6, 0x99, 0x61, 0x08, 0xb4, 0x33, 0x71, 0xbe, 0x69, 0x41, 0xc4, 0x8d, 0x6f, 0x20, 0x2e, 0xdd, + 0xb9, 0xf4, 0x51, 0xdc, 0xfa, 0x0a, 0xae, 0xdc, 0xf9, 0x06, 0x92, 0x4e, 0x26, 0xe0, 0xa2, 0x7f, + 0x76, 0x87, 0xc9, 0xc9, 0xef, 0xcc, 0x77, 0xbe, 0xf1, 0x62, 0x2e, 0x25, 0x9f, 0xb2, 0x80, 0x50, + 0x08, 0x8c, 0x2c, 0xd5, 0x22, 0x0c, 0x80, 0xa9, 0x45, 0x9e, 0x31, 0x08, 0x0a, 0x25, 0xe9, 0x3c, + 0xd3, 0xe9, 0x24, 0xa7, 0x34, 0x17, 0x3c, 0xcd, 0x88, 0x66, 0x5c, 0xaa, 0xdb, 0x34, 0x93, 0x02, + 0x34, 0x11, 0x3a, 0xad, 0xac, 0xa8, 0x50, 0x52, 0x4b, 0xbf, 0x69, 0x30, 0x88, 0x50, 0x40, 0x35, + 0x11, 0x2d, 0x42, 0x64, 0x89, 0x8d, 0xb3, 0x55, 0x99, 0x8a, 0x81, 0x9c, 0xab, 0xad, 0x42, 0x4d, + 0x58, 0x63, 0xc7, 0xa2, 0x8a, 0x3c, 0x20, 0x42, 0x48, 0x4d, 0x74, 0x2e, 0x05, 0x98, 0xaf, 0xad, + 0x4b, 0xaf, 0x33, 0x64, 0x3a, 0x36, 0xac, 0xbe, 0x41, 0x0d, 0x2a, 0xd2, 0xa0, 0x02, 0x8d, 0xd8, + 0xcd, 0x9c, 0x81, 0xf6, 0xf7, 0xbc, 0x7f, 0x36, 0x3e, 0x15, 0x64, 0xc6, 0xfe, 0x3b, 0x4d, 0xa7, + 0xf3, 0x67, 0xf4, 0xd7, 0x1e, 0x5e, 0x90, 0x19, 0x3b, 0x78, 0x76, 0xbd, 0xfd, 0xf5, 0xb8, 0x2b, + 0x33, 0xa4, 0xff, 0xe5, 0x78, 0xbb, 0x1b, 0xb3, 0xfd, 0x73, 0xb4, 0xa9, 0x2c, 0xb4, 0xed, 0x00, + 0x8d, 0x68, 0x25, 0xab, 0xae, 0x15, 0xad, 0x27, 0xb5, 0x7a, 0x0f, 0xef, 0x1f, 0x4f, 0xee, 0xb1, + 0x7f, 0x54, 0x2e, 0xe3, 0xee, 0x47, 0x1d, 0xbd, 0x62, 0xed, 0xaf, 0x10, 0x74, 0xef, 0xfb, 0x8f, + 0xae, 0xd7, 0xce, 0xe4, 0x6c, 0xe3, 0x4c, 0xfd, 0xee, 0x56, 0x1d, 0xc6, 0xe5, 0x0e, 0x63, 0xe7, + 0xfa, 0xb4, 0xe2, 0x71, 0x39, 0x25, 0x82, 0x23, 0xa9, 0x78, 0xc0, 0x99, 0x58, 0x6e, 0xd8, 0x3e, + 0x9f, 0x22, 0x87, 0xd5, 0x2f, 0xf8, 0xc4, 0x8a, 0x17, 0xf7, 0xd7, 0x30, 0x8a, 0x5e, 0xdd, 0xe6, + 0xd0, 0x00, 0x23, 0x0a, 0xc8, 0xc8, 0x52, 0x8d, 0x43, 0x54, 0x05, 0xc3, 0x9b, 0xb5, 0x24, 0x11, + 0x85, 0xa4, 0xb6, 0x24, 0xe3, 0x30, 0xb1, 0x96, 0x4f, 0xb7, 0x6d, 0xce, 0x31, 0x8e, 0x28, 0x60, + 0x5c, 0x9b, 0x30, 0x1e, 0x87, 0x18, 0x5b, 0xdb, 0xe4, 0xf7, 0xf2, 0x9e, 0x87, 0xdf, 0x01, 0x00, + 0x00, 0xff, 0xff, 0x6e, 0x93, 0x85, 0xef, 0x68, 0x03, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/services/product_group_view_service.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/services/product_group_view_service.pb.go new file mode 100644 index 0000000..72428b3 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/services/product_group_view_service.pb.go @@ -0,0 +1,176 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/services/product_group_view_service.proto + +package services // import "google.golang.org/genproto/googleapis/ads/googleads/v1/services" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import resources "google.golang.org/genproto/googleapis/ads/googleads/v1/resources" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +import ( + context "golang.org/x/net/context" + grpc "google.golang.org/grpc" +) + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Request message for [ProductGroupViewService.GetProductGroupView][google.ads.googleads.v1.services.ProductGroupViewService.GetProductGroupView]. +type GetProductGroupViewRequest struct { + // The resource name of the product group view to fetch. + ResourceName string `protobuf:"bytes,1,opt,name=resource_name,json=resourceName,proto3" json:"resource_name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GetProductGroupViewRequest) Reset() { *m = GetProductGroupViewRequest{} } +func (m *GetProductGroupViewRequest) String() string { return proto.CompactTextString(m) } +func (*GetProductGroupViewRequest) ProtoMessage() {} +func (*GetProductGroupViewRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_product_group_view_service_b224563427f47e03, []int{0} +} +func (m *GetProductGroupViewRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GetProductGroupViewRequest.Unmarshal(m, b) +} +func (m *GetProductGroupViewRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GetProductGroupViewRequest.Marshal(b, m, deterministic) +} +func (dst *GetProductGroupViewRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_GetProductGroupViewRequest.Merge(dst, src) +} +func (m *GetProductGroupViewRequest) XXX_Size() int { + return xxx_messageInfo_GetProductGroupViewRequest.Size(m) +} +func (m *GetProductGroupViewRequest) XXX_DiscardUnknown() { + xxx_messageInfo_GetProductGroupViewRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_GetProductGroupViewRequest proto.InternalMessageInfo + +func (m *GetProductGroupViewRequest) GetResourceName() string { + if m != nil { + return m.ResourceName + } + return "" +} + +func init() { + proto.RegisterType((*GetProductGroupViewRequest)(nil), "google.ads.googleads.v1.services.GetProductGroupViewRequest") +} + +// Reference imports to suppress errors if they are not otherwise used. +var _ context.Context +var _ grpc.ClientConn + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the grpc package it is being compiled against. +const _ = grpc.SupportPackageIsVersion4 + +// ProductGroupViewServiceClient is the client API for ProductGroupViewService service. +// +// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://godoc.org/google.golang.org/grpc#ClientConn.NewStream. +type ProductGroupViewServiceClient interface { + // Returns the requested product group view in full detail. + GetProductGroupView(ctx context.Context, in *GetProductGroupViewRequest, opts ...grpc.CallOption) (*resources.ProductGroupView, error) +} + +type productGroupViewServiceClient struct { + cc *grpc.ClientConn +} + +func NewProductGroupViewServiceClient(cc *grpc.ClientConn) ProductGroupViewServiceClient { + return &productGroupViewServiceClient{cc} +} + +func (c *productGroupViewServiceClient) GetProductGroupView(ctx context.Context, in *GetProductGroupViewRequest, opts ...grpc.CallOption) (*resources.ProductGroupView, error) { + out := new(resources.ProductGroupView) + err := c.cc.Invoke(ctx, "/google.ads.googleads.v1.services.ProductGroupViewService/GetProductGroupView", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +// ProductGroupViewServiceServer is the server API for ProductGroupViewService service. +type ProductGroupViewServiceServer interface { + // Returns the requested product group view in full detail. + GetProductGroupView(context.Context, *GetProductGroupViewRequest) (*resources.ProductGroupView, error) +} + +func RegisterProductGroupViewServiceServer(s *grpc.Server, srv ProductGroupViewServiceServer) { + s.RegisterService(&_ProductGroupViewService_serviceDesc, srv) +} + +func _ProductGroupViewService_GetProductGroupView_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(GetProductGroupViewRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(ProductGroupViewServiceServer).GetProductGroupView(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.ads.googleads.v1.services.ProductGroupViewService/GetProductGroupView", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(ProductGroupViewServiceServer).GetProductGroupView(ctx, req.(*GetProductGroupViewRequest)) + } + return interceptor(ctx, in, info, handler) +} + +var _ProductGroupViewService_serviceDesc = grpc.ServiceDesc{ + ServiceName: "google.ads.googleads.v1.services.ProductGroupViewService", + HandlerType: (*ProductGroupViewServiceServer)(nil), + Methods: []grpc.MethodDesc{ + { + MethodName: "GetProductGroupView", + Handler: _ProductGroupViewService_GetProductGroupView_Handler, + }, + }, + Streams: []grpc.StreamDesc{}, + Metadata: "google/ads/googleads/v1/services/product_group_view_service.proto", +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/services/product_group_view_service.proto", fileDescriptor_product_group_view_service_b224563427f47e03) +} + +var fileDescriptor_product_group_view_service_b224563427f47e03 = []byte{ + // 369 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x84, 0x52, 0x31, 0x4b, 0xfb, 0x40, + 0x1c, 0x25, 0xf9, 0xc3, 0x1f, 0x0c, 0xba, 0xc4, 0x41, 0x09, 0x1d, 0x4a, 0xed, 0x20, 0x1d, 0xee, + 0x88, 0xc5, 0xe5, 0xaa, 0x43, 0xba, 0xc4, 0x49, 0x4a, 0x85, 0x0c, 0x12, 0x08, 0x31, 0x39, 0x8e, + 0x40, 0x93, 0x8b, 0xf7, 0xbb, 0xa4, 0x83, 0xb8, 0xe8, 0x47, 0xf0, 0x1b, 0x38, 0xfa, 0x3d, 0x5c, + 0x5c, 0xfd, 0x04, 0x82, 0x93, 0x9f, 0x42, 0xd2, 0xeb, 0x05, 0x2c, 0x0d, 0xdd, 0x1e, 0x77, 0xef, + 0xbd, 0xdf, 0xbb, 0xf7, 0x3b, 0xcb, 0x63, 0x9c, 0xb3, 0x05, 0xc5, 0x71, 0x0a, 0x58, 0xc1, 0x06, + 0xd5, 0x2e, 0x06, 0x2a, 0xea, 0x2c, 0xa1, 0x80, 0x4b, 0xc1, 0xd3, 0x2a, 0x91, 0x11, 0x13, 0xbc, + 0x2a, 0xa3, 0x3a, 0xa3, 0xcb, 0x68, 0x7d, 0x87, 0x4a, 0xc1, 0x25, 0xb7, 0xfb, 0x4a, 0x87, 0xe2, + 0x14, 0x50, 0x6b, 0x81, 0x6a, 0x17, 0x69, 0x0b, 0x87, 0x74, 0x0d, 0x11, 0x14, 0x78, 0x25, 0xb6, + 0x4f, 0x51, 0xee, 0x4e, 0x4f, 0x6b, 0xcb, 0x0c, 0xc7, 0x45, 0xc1, 0x65, 0x2c, 0x33, 0x5e, 0x80, + 0xba, 0x1d, 0x78, 0x96, 0xe3, 0x53, 0x39, 0x53, 0x62, 0xbf, 0xd1, 0x06, 0x19, 0x5d, 0xce, 0xe9, + 0x7d, 0x45, 0x41, 0xda, 0x27, 0xd6, 0x81, 0x9e, 0x10, 0x15, 0x71, 0x4e, 0x8f, 0x8d, 0xbe, 0x71, + 0xba, 0x37, 0xdf, 0xd7, 0x87, 0xd7, 0x71, 0x4e, 0xcf, 0xbe, 0x0c, 0xeb, 0x68, 0xd3, 0xe0, 0x46, + 0x25, 0xb7, 0xdf, 0x0d, 0xeb, 0x70, 0x8b, 0xbf, 0x7d, 0x81, 0x76, 0xbd, 0x19, 0x75, 0xc7, 0x72, + 0xc6, 0x9d, 0xea, 0xb6, 0x0f, 0xb4, 0xa9, 0x1d, 0x4c, 0x9e, 0x3e, 0xbf, 0x5f, 0xcc, 0x73, 0x7b, + 0xdc, 0xf4, 0xf6, 0xf0, 0xe7, 0x59, 0x97, 0x49, 0x05, 0x92, 0xe7, 0x54, 0x00, 0x1e, 0xe9, 0x22, + 0x5b, 0x21, 0xe0, 0xd1, 0xe3, 0xf4, 0xd9, 0xb4, 0x86, 0x09, 0xcf, 0x77, 0xa6, 0x9e, 0xf6, 0x3a, + 0x9a, 0x98, 0x35, 0x6d, 0xcf, 0x8c, 0xdb, 0xab, 0xb5, 0x03, 0xe3, 0x8b, 0xb8, 0x60, 0x88, 0x0b, + 0x86, 0x19, 0x2d, 0x56, 0xbb, 0xd0, 0x9b, 0x2d, 0x33, 0xe8, 0xfe, 0x4d, 0x13, 0x0d, 0x5e, 0xcd, + 0x7f, 0xbe, 0xe7, 0xbd, 0x99, 0x7d, 0x5f, 0x19, 0x7a, 0x29, 0x20, 0x05, 0x1b, 0x14, 0xb8, 0x68, + 0x3d, 0x18, 0x3e, 0x34, 0x25, 0xf4, 0x52, 0x08, 0x5b, 0x4a, 0x18, 0xb8, 0xa1, 0xa6, 0xfc, 0x98, + 0x43, 0x75, 0x4e, 0x88, 0x97, 0x02, 0x21, 0x2d, 0x89, 0x90, 0xc0, 0x25, 0x44, 0xd3, 0xee, 0xfe, + 0xaf, 0x72, 0x8e, 0x7f, 0x03, 0x00, 0x00, 0xff, 0xff, 0xea, 0xc2, 0x72, 0x20, 0xf4, 0x02, 0x00, + 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/services/recommendation_service.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/services/recommendation_service.pb.go new file mode 100644 index 0000000..0e14a08 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/services/recommendation_service.pb.go @@ -0,0 +1,1427 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/services/recommendation_service.proto + +package services // import "google.golang.org/genproto/googleapis/ads/googleads/v1/services" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import wrappers "github.com/golang/protobuf/ptypes/wrappers" +import common "google.golang.org/genproto/googleapis/ads/googleads/v1/common" +import enums "google.golang.org/genproto/googleapis/ads/googleads/v1/enums" +import resources "google.golang.org/genproto/googleapis/ads/googleads/v1/resources" +import _ "google.golang.org/genproto/googleapis/api/annotations" +import status "google.golang.org/genproto/googleapis/rpc/status" + +import ( + context "golang.org/x/net/context" + grpc "google.golang.org/grpc" +) + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Request message for [RecommendationService.GetRecommendation][google.ads.googleads.v1.services.RecommendationService.GetRecommendation]. +type GetRecommendationRequest struct { + // The resource name of the recommendation to fetch. + ResourceName string `protobuf:"bytes,1,opt,name=resource_name,json=resourceName,proto3" json:"resource_name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GetRecommendationRequest) Reset() { *m = GetRecommendationRequest{} } +func (m *GetRecommendationRequest) String() string { return proto.CompactTextString(m) } +func (*GetRecommendationRequest) ProtoMessage() {} +func (*GetRecommendationRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_recommendation_service_6b12c247ec5a1022, []int{0} +} +func (m *GetRecommendationRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GetRecommendationRequest.Unmarshal(m, b) +} +func (m *GetRecommendationRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GetRecommendationRequest.Marshal(b, m, deterministic) +} +func (dst *GetRecommendationRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_GetRecommendationRequest.Merge(dst, src) +} +func (m *GetRecommendationRequest) XXX_Size() int { + return xxx_messageInfo_GetRecommendationRequest.Size(m) +} +func (m *GetRecommendationRequest) XXX_DiscardUnknown() { + xxx_messageInfo_GetRecommendationRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_GetRecommendationRequest proto.InternalMessageInfo + +func (m *GetRecommendationRequest) GetResourceName() string { + if m != nil { + return m.ResourceName + } + return "" +} + +// Request message for [RecommendationService.ApplyRecommendation][google.ads.googleads.v1.services.RecommendationService.ApplyRecommendation]. +type ApplyRecommendationRequest struct { + // The ID of the customer with the recommendation. + CustomerId string `protobuf:"bytes,1,opt,name=customer_id,json=customerId,proto3" json:"customer_id,omitempty"` + // The list of operations to apply recommendations. + // If partial_failure=false all recommendations should be of the same type + // There is a limit of 100 operations per request. + Operations []*ApplyRecommendationOperation `protobuf:"bytes,2,rep,name=operations,proto3" json:"operations,omitempty"` + // If true, successful operations will be carried out and invalid + // operations will return errors. If false, operations will be carried + // out as a transaction if and only if they are all valid. + // Default is false. + PartialFailure bool `protobuf:"varint,3,opt,name=partial_failure,json=partialFailure,proto3" json:"partial_failure,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ApplyRecommendationRequest) Reset() { *m = ApplyRecommendationRequest{} } +func (m *ApplyRecommendationRequest) String() string { return proto.CompactTextString(m) } +func (*ApplyRecommendationRequest) ProtoMessage() {} +func (*ApplyRecommendationRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_recommendation_service_6b12c247ec5a1022, []int{1} +} +func (m *ApplyRecommendationRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ApplyRecommendationRequest.Unmarshal(m, b) +} +func (m *ApplyRecommendationRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ApplyRecommendationRequest.Marshal(b, m, deterministic) +} +func (dst *ApplyRecommendationRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_ApplyRecommendationRequest.Merge(dst, src) +} +func (m *ApplyRecommendationRequest) XXX_Size() int { + return xxx_messageInfo_ApplyRecommendationRequest.Size(m) +} +func (m *ApplyRecommendationRequest) XXX_DiscardUnknown() { + xxx_messageInfo_ApplyRecommendationRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_ApplyRecommendationRequest proto.InternalMessageInfo + +func (m *ApplyRecommendationRequest) GetCustomerId() string { + if m != nil { + return m.CustomerId + } + return "" +} + +func (m *ApplyRecommendationRequest) GetOperations() []*ApplyRecommendationOperation { + if m != nil { + return m.Operations + } + return nil +} + +func (m *ApplyRecommendationRequest) GetPartialFailure() bool { + if m != nil { + return m.PartialFailure + } + return false +} + +// Information about the operation to apply a recommendation and any parameters +// to customize it. +type ApplyRecommendationOperation struct { + // The resource name of the recommendation to apply. + ResourceName string `protobuf:"bytes,1,opt,name=resource_name,json=resourceName,proto3" json:"resource_name,omitempty"` + // Parameters to use when applying the recommendation. + // + // Types that are valid to be assigned to ApplyParameters: + // *ApplyRecommendationOperation_CampaignBudget + // *ApplyRecommendationOperation_TextAd + // *ApplyRecommendationOperation_Keyword + // *ApplyRecommendationOperation_TargetCpaOptIn + // *ApplyRecommendationOperation_CalloutExtension + // *ApplyRecommendationOperation_CallExtension + // *ApplyRecommendationOperation_SitelinkExtension + // *ApplyRecommendationOperation_MoveUnusedBudget + ApplyParameters isApplyRecommendationOperation_ApplyParameters `protobuf_oneof:"apply_parameters"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ApplyRecommendationOperation) Reset() { *m = ApplyRecommendationOperation{} } +func (m *ApplyRecommendationOperation) String() string { return proto.CompactTextString(m) } +func (*ApplyRecommendationOperation) ProtoMessage() {} +func (*ApplyRecommendationOperation) Descriptor() ([]byte, []int) { + return fileDescriptor_recommendation_service_6b12c247ec5a1022, []int{2} +} +func (m *ApplyRecommendationOperation) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ApplyRecommendationOperation.Unmarshal(m, b) +} +func (m *ApplyRecommendationOperation) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ApplyRecommendationOperation.Marshal(b, m, deterministic) +} +func (dst *ApplyRecommendationOperation) XXX_Merge(src proto.Message) { + xxx_messageInfo_ApplyRecommendationOperation.Merge(dst, src) +} +func (m *ApplyRecommendationOperation) XXX_Size() int { + return xxx_messageInfo_ApplyRecommendationOperation.Size(m) +} +func (m *ApplyRecommendationOperation) XXX_DiscardUnknown() { + xxx_messageInfo_ApplyRecommendationOperation.DiscardUnknown(m) +} + +var xxx_messageInfo_ApplyRecommendationOperation proto.InternalMessageInfo + +func (m *ApplyRecommendationOperation) GetResourceName() string { + if m != nil { + return m.ResourceName + } + return "" +} + +type isApplyRecommendationOperation_ApplyParameters interface { + isApplyRecommendationOperation_ApplyParameters() +} + +type ApplyRecommendationOperation_CampaignBudget struct { + CampaignBudget *ApplyRecommendationOperation_CampaignBudgetParameters `protobuf:"bytes,2,opt,name=campaign_budget,json=campaignBudget,proto3,oneof"` +} + +type ApplyRecommendationOperation_TextAd struct { + TextAd *ApplyRecommendationOperation_TextAdParameters `protobuf:"bytes,3,opt,name=text_ad,json=textAd,proto3,oneof"` +} + +type ApplyRecommendationOperation_Keyword struct { + Keyword *ApplyRecommendationOperation_KeywordParameters `protobuf:"bytes,4,opt,name=keyword,proto3,oneof"` +} + +type ApplyRecommendationOperation_TargetCpaOptIn struct { + TargetCpaOptIn *ApplyRecommendationOperation_TargetCpaOptInParameters `protobuf:"bytes,5,opt,name=target_cpa_opt_in,json=targetCpaOptIn,proto3,oneof"` +} + +type ApplyRecommendationOperation_CalloutExtension struct { + CalloutExtension *ApplyRecommendationOperation_CalloutExtensionParameters `protobuf:"bytes,6,opt,name=callout_extension,json=calloutExtension,proto3,oneof"` +} + +type ApplyRecommendationOperation_CallExtension struct { + CallExtension *ApplyRecommendationOperation_CallExtensionParameters `protobuf:"bytes,7,opt,name=call_extension,json=callExtension,proto3,oneof"` +} + +type ApplyRecommendationOperation_SitelinkExtension struct { + SitelinkExtension *ApplyRecommendationOperation_SitelinkExtensionParameters `protobuf:"bytes,8,opt,name=sitelink_extension,json=sitelinkExtension,proto3,oneof"` +} + +type ApplyRecommendationOperation_MoveUnusedBudget struct { + MoveUnusedBudget *ApplyRecommendationOperation_MoveUnusedBudgetParameters `protobuf:"bytes,9,opt,name=move_unused_budget,json=moveUnusedBudget,proto3,oneof"` +} + +func (*ApplyRecommendationOperation_CampaignBudget) isApplyRecommendationOperation_ApplyParameters() {} + +func (*ApplyRecommendationOperation_TextAd) isApplyRecommendationOperation_ApplyParameters() {} + +func (*ApplyRecommendationOperation_Keyword) isApplyRecommendationOperation_ApplyParameters() {} + +func (*ApplyRecommendationOperation_TargetCpaOptIn) isApplyRecommendationOperation_ApplyParameters() {} + +func (*ApplyRecommendationOperation_CalloutExtension) isApplyRecommendationOperation_ApplyParameters() { +} + +func (*ApplyRecommendationOperation_CallExtension) isApplyRecommendationOperation_ApplyParameters() {} + +func (*ApplyRecommendationOperation_SitelinkExtension) isApplyRecommendationOperation_ApplyParameters() { +} + +func (*ApplyRecommendationOperation_MoveUnusedBudget) isApplyRecommendationOperation_ApplyParameters() { +} + +func (m *ApplyRecommendationOperation) GetApplyParameters() isApplyRecommendationOperation_ApplyParameters { + if m != nil { + return m.ApplyParameters + } + return nil +} + +func (m *ApplyRecommendationOperation) GetCampaignBudget() *ApplyRecommendationOperation_CampaignBudgetParameters { + if x, ok := m.GetApplyParameters().(*ApplyRecommendationOperation_CampaignBudget); ok { + return x.CampaignBudget + } + return nil +} + +func (m *ApplyRecommendationOperation) GetTextAd() *ApplyRecommendationOperation_TextAdParameters { + if x, ok := m.GetApplyParameters().(*ApplyRecommendationOperation_TextAd); ok { + return x.TextAd + } + return nil +} + +func (m *ApplyRecommendationOperation) GetKeyword() *ApplyRecommendationOperation_KeywordParameters { + if x, ok := m.GetApplyParameters().(*ApplyRecommendationOperation_Keyword); ok { + return x.Keyword + } + return nil +} + +func (m *ApplyRecommendationOperation) GetTargetCpaOptIn() *ApplyRecommendationOperation_TargetCpaOptInParameters { + if x, ok := m.GetApplyParameters().(*ApplyRecommendationOperation_TargetCpaOptIn); ok { + return x.TargetCpaOptIn + } + return nil +} + +func (m *ApplyRecommendationOperation) GetCalloutExtension() *ApplyRecommendationOperation_CalloutExtensionParameters { + if x, ok := m.GetApplyParameters().(*ApplyRecommendationOperation_CalloutExtension); ok { + return x.CalloutExtension + } + return nil +} + +func (m *ApplyRecommendationOperation) GetCallExtension() *ApplyRecommendationOperation_CallExtensionParameters { + if x, ok := m.GetApplyParameters().(*ApplyRecommendationOperation_CallExtension); ok { + return x.CallExtension + } + return nil +} + +func (m *ApplyRecommendationOperation) GetSitelinkExtension() *ApplyRecommendationOperation_SitelinkExtensionParameters { + if x, ok := m.GetApplyParameters().(*ApplyRecommendationOperation_SitelinkExtension); ok { + return x.SitelinkExtension + } + return nil +} + +func (m *ApplyRecommendationOperation) GetMoveUnusedBudget() *ApplyRecommendationOperation_MoveUnusedBudgetParameters { + if x, ok := m.GetApplyParameters().(*ApplyRecommendationOperation_MoveUnusedBudget); ok { + return x.MoveUnusedBudget + } + return nil +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*ApplyRecommendationOperation) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _ApplyRecommendationOperation_OneofMarshaler, _ApplyRecommendationOperation_OneofUnmarshaler, _ApplyRecommendationOperation_OneofSizer, []interface{}{ + (*ApplyRecommendationOperation_CampaignBudget)(nil), + (*ApplyRecommendationOperation_TextAd)(nil), + (*ApplyRecommendationOperation_Keyword)(nil), + (*ApplyRecommendationOperation_TargetCpaOptIn)(nil), + (*ApplyRecommendationOperation_CalloutExtension)(nil), + (*ApplyRecommendationOperation_CallExtension)(nil), + (*ApplyRecommendationOperation_SitelinkExtension)(nil), + (*ApplyRecommendationOperation_MoveUnusedBudget)(nil), + } +} + +func _ApplyRecommendationOperation_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*ApplyRecommendationOperation) + // apply_parameters + switch x := m.ApplyParameters.(type) { + case *ApplyRecommendationOperation_CampaignBudget: + b.EncodeVarint(2<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.CampaignBudget); err != nil { + return err + } + case *ApplyRecommendationOperation_TextAd: + b.EncodeVarint(3<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.TextAd); err != nil { + return err + } + case *ApplyRecommendationOperation_Keyword: + b.EncodeVarint(4<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.Keyword); err != nil { + return err + } + case *ApplyRecommendationOperation_TargetCpaOptIn: + b.EncodeVarint(5<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.TargetCpaOptIn); err != nil { + return err + } + case *ApplyRecommendationOperation_CalloutExtension: + b.EncodeVarint(6<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.CalloutExtension); err != nil { + return err + } + case *ApplyRecommendationOperation_CallExtension: + b.EncodeVarint(7<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.CallExtension); err != nil { + return err + } + case *ApplyRecommendationOperation_SitelinkExtension: + b.EncodeVarint(8<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.SitelinkExtension); err != nil { + return err + } + case *ApplyRecommendationOperation_MoveUnusedBudget: + b.EncodeVarint(9<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.MoveUnusedBudget); err != nil { + return err + } + case nil: + default: + return fmt.Errorf("ApplyRecommendationOperation.ApplyParameters has unexpected type %T", x) + } + return nil +} + +func _ApplyRecommendationOperation_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*ApplyRecommendationOperation) + switch tag { + case 2: // apply_parameters.campaign_budget + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(ApplyRecommendationOperation_CampaignBudgetParameters) + err := b.DecodeMessage(msg) + m.ApplyParameters = &ApplyRecommendationOperation_CampaignBudget{msg} + return true, err + case 3: // apply_parameters.text_ad + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(ApplyRecommendationOperation_TextAdParameters) + err := b.DecodeMessage(msg) + m.ApplyParameters = &ApplyRecommendationOperation_TextAd{msg} + return true, err + case 4: // apply_parameters.keyword + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(ApplyRecommendationOperation_KeywordParameters) + err := b.DecodeMessage(msg) + m.ApplyParameters = &ApplyRecommendationOperation_Keyword{msg} + return true, err + case 5: // apply_parameters.target_cpa_opt_in + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(ApplyRecommendationOperation_TargetCpaOptInParameters) + err := b.DecodeMessage(msg) + m.ApplyParameters = &ApplyRecommendationOperation_TargetCpaOptIn{msg} + return true, err + case 6: // apply_parameters.callout_extension + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(ApplyRecommendationOperation_CalloutExtensionParameters) + err := b.DecodeMessage(msg) + m.ApplyParameters = &ApplyRecommendationOperation_CalloutExtension{msg} + return true, err + case 7: // apply_parameters.call_extension + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(ApplyRecommendationOperation_CallExtensionParameters) + err := b.DecodeMessage(msg) + m.ApplyParameters = &ApplyRecommendationOperation_CallExtension{msg} + return true, err + case 8: // apply_parameters.sitelink_extension + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(ApplyRecommendationOperation_SitelinkExtensionParameters) + err := b.DecodeMessage(msg) + m.ApplyParameters = &ApplyRecommendationOperation_SitelinkExtension{msg} + return true, err + case 9: // apply_parameters.move_unused_budget + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(ApplyRecommendationOperation_MoveUnusedBudgetParameters) + err := b.DecodeMessage(msg) + m.ApplyParameters = &ApplyRecommendationOperation_MoveUnusedBudget{msg} + return true, err + default: + return false, nil + } +} + +func _ApplyRecommendationOperation_OneofSizer(msg proto.Message) (n int) { + m := msg.(*ApplyRecommendationOperation) + // apply_parameters + switch x := m.ApplyParameters.(type) { + case *ApplyRecommendationOperation_CampaignBudget: + s := proto.Size(x.CampaignBudget) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *ApplyRecommendationOperation_TextAd: + s := proto.Size(x.TextAd) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *ApplyRecommendationOperation_Keyword: + s := proto.Size(x.Keyword) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *ApplyRecommendationOperation_TargetCpaOptIn: + s := proto.Size(x.TargetCpaOptIn) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *ApplyRecommendationOperation_CalloutExtension: + s := proto.Size(x.CalloutExtension) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *ApplyRecommendationOperation_CallExtension: + s := proto.Size(x.CallExtension) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *ApplyRecommendationOperation_SitelinkExtension: + s := proto.Size(x.SitelinkExtension) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *ApplyRecommendationOperation_MoveUnusedBudget: + s := proto.Size(x.MoveUnusedBudget) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +// Parameters to use when applying a campaign budget recommendation. +type ApplyRecommendationOperation_CampaignBudgetParameters struct { + // New budget amount to set for target budget resource. This is a required + // field. + NewBudgetAmountMicros *wrappers.Int64Value `protobuf:"bytes,1,opt,name=new_budget_amount_micros,json=newBudgetAmountMicros,proto3" json:"new_budget_amount_micros,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ApplyRecommendationOperation_CampaignBudgetParameters) Reset() { + *m = ApplyRecommendationOperation_CampaignBudgetParameters{} +} +func (m *ApplyRecommendationOperation_CampaignBudgetParameters) String() string { + return proto.CompactTextString(m) +} +func (*ApplyRecommendationOperation_CampaignBudgetParameters) ProtoMessage() {} +func (*ApplyRecommendationOperation_CampaignBudgetParameters) Descriptor() ([]byte, []int) { + return fileDescriptor_recommendation_service_6b12c247ec5a1022, []int{2, 0} +} +func (m *ApplyRecommendationOperation_CampaignBudgetParameters) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ApplyRecommendationOperation_CampaignBudgetParameters.Unmarshal(m, b) +} +func (m *ApplyRecommendationOperation_CampaignBudgetParameters) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ApplyRecommendationOperation_CampaignBudgetParameters.Marshal(b, m, deterministic) +} +func (dst *ApplyRecommendationOperation_CampaignBudgetParameters) XXX_Merge(src proto.Message) { + xxx_messageInfo_ApplyRecommendationOperation_CampaignBudgetParameters.Merge(dst, src) +} +func (m *ApplyRecommendationOperation_CampaignBudgetParameters) XXX_Size() int { + return xxx_messageInfo_ApplyRecommendationOperation_CampaignBudgetParameters.Size(m) +} +func (m *ApplyRecommendationOperation_CampaignBudgetParameters) XXX_DiscardUnknown() { + xxx_messageInfo_ApplyRecommendationOperation_CampaignBudgetParameters.DiscardUnknown(m) +} + +var xxx_messageInfo_ApplyRecommendationOperation_CampaignBudgetParameters proto.InternalMessageInfo + +func (m *ApplyRecommendationOperation_CampaignBudgetParameters) GetNewBudgetAmountMicros() *wrappers.Int64Value { + if m != nil { + return m.NewBudgetAmountMicros + } + return nil +} + +// Parameters to use when applying a text ad recommendation. +type ApplyRecommendationOperation_TextAdParameters struct { + // New ad to add to recommended ad group. All necessary fields need to be + // set in this message. This is a required field. + Ad *resources.Ad `protobuf:"bytes,1,opt,name=ad,proto3" json:"ad,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ApplyRecommendationOperation_TextAdParameters) Reset() { + *m = ApplyRecommendationOperation_TextAdParameters{} +} +func (m *ApplyRecommendationOperation_TextAdParameters) String() string { + return proto.CompactTextString(m) +} +func (*ApplyRecommendationOperation_TextAdParameters) ProtoMessage() {} +func (*ApplyRecommendationOperation_TextAdParameters) Descriptor() ([]byte, []int) { + return fileDescriptor_recommendation_service_6b12c247ec5a1022, []int{2, 1} +} +func (m *ApplyRecommendationOperation_TextAdParameters) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ApplyRecommendationOperation_TextAdParameters.Unmarshal(m, b) +} +func (m *ApplyRecommendationOperation_TextAdParameters) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ApplyRecommendationOperation_TextAdParameters.Marshal(b, m, deterministic) +} +func (dst *ApplyRecommendationOperation_TextAdParameters) XXX_Merge(src proto.Message) { + xxx_messageInfo_ApplyRecommendationOperation_TextAdParameters.Merge(dst, src) +} +func (m *ApplyRecommendationOperation_TextAdParameters) XXX_Size() int { + return xxx_messageInfo_ApplyRecommendationOperation_TextAdParameters.Size(m) +} +func (m *ApplyRecommendationOperation_TextAdParameters) XXX_DiscardUnknown() { + xxx_messageInfo_ApplyRecommendationOperation_TextAdParameters.DiscardUnknown(m) +} + +var xxx_messageInfo_ApplyRecommendationOperation_TextAdParameters proto.InternalMessageInfo + +func (m *ApplyRecommendationOperation_TextAdParameters) GetAd() *resources.Ad { + if m != nil { + return m.Ad + } + return nil +} + +// Parameters to use when applying keyword recommendation. +type ApplyRecommendationOperation_KeywordParameters struct { + // The ad group resource to add keyword to. This is a required field. + AdGroup *wrappers.StringValue `protobuf:"bytes,1,opt,name=ad_group,json=adGroup,proto3" json:"ad_group,omitempty"` + // The match type of the keyword. This is a required field. + MatchType enums.KeywordMatchTypeEnum_KeywordMatchType `protobuf:"varint,2,opt,name=match_type,json=matchType,proto3,enum=google.ads.googleads.v1.enums.KeywordMatchTypeEnum_KeywordMatchType" json:"match_type,omitempty"` + // Optional, CPC bid to set for the keyword. If not set, keyword will use + // bid based on bidding strategy used by target ad group. + CpcBidMicros *wrappers.Int64Value `protobuf:"bytes,3,opt,name=cpc_bid_micros,json=cpcBidMicros,proto3" json:"cpc_bid_micros,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ApplyRecommendationOperation_KeywordParameters) Reset() { + *m = ApplyRecommendationOperation_KeywordParameters{} +} +func (m *ApplyRecommendationOperation_KeywordParameters) String() string { + return proto.CompactTextString(m) +} +func (*ApplyRecommendationOperation_KeywordParameters) ProtoMessage() {} +func (*ApplyRecommendationOperation_KeywordParameters) Descriptor() ([]byte, []int) { + return fileDescriptor_recommendation_service_6b12c247ec5a1022, []int{2, 2} +} +func (m *ApplyRecommendationOperation_KeywordParameters) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ApplyRecommendationOperation_KeywordParameters.Unmarshal(m, b) +} +func (m *ApplyRecommendationOperation_KeywordParameters) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ApplyRecommendationOperation_KeywordParameters.Marshal(b, m, deterministic) +} +func (dst *ApplyRecommendationOperation_KeywordParameters) XXX_Merge(src proto.Message) { + xxx_messageInfo_ApplyRecommendationOperation_KeywordParameters.Merge(dst, src) +} +func (m *ApplyRecommendationOperation_KeywordParameters) XXX_Size() int { + return xxx_messageInfo_ApplyRecommendationOperation_KeywordParameters.Size(m) +} +func (m *ApplyRecommendationOperation_KeywordParameters) XXX_DiscardUnknown() { + xxx_messageInfo_ApplyRecommendationOperation_KeywordParameters.DiscardUnknown(m) +} + +var xxx_messageInfo_ApplyRecommendationOperation_KeywordParameters proto.InternalMessageInfo + +func (m *ApplyRecommendationOperation_KeywordParameters) GetAdGroup() *wrappers.StringValue { + if m != nil { + return m.AdGroup + } + return nil +} + +func (m *ApplyRecommendationOperation_KeywordParameters) GetMatchType() enums.KeywordMatchTypeEnum_KeywordMatchType { + if m != nil { + return m.MatchType + } + return enums.KeywordMatchTypeEnum_UNSPECIFIED +} + +func (m *ApplyRecommendationOperation_KeywordParameters) GetCpcBidMicros() *wrappers.Int64Value { + if m != nil { + return m.CpcBidMicros + } + return nil +} + +// Parameters to use when applying Target CPA recommendation. +type ApplyRecommendationOperation_TargetCpaOptInParameters struct { + // Average CPA to use for Target CPA bidding strategy. This is a required + // field. + TargetCpaMicros *wrappers.Int64Value `protobuf:"bytes,1,opt,name=target_cpa_micros,json=targetCpaMicros,proto3" json:"target_cpa_micros,omitempty"` + // Optional, budget amount to set for the campaign. + NewCampaignBudgetAmountMicros *wrappers.Int64Value `protobuf:"bytes,2,opt,name=new_campaign_budget_amount_micros,json=newCampaignBudgetAmountMicros,proto3" json:"new_campaign_budget_amount_micros,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ApplyRecommendationOperation_TargetCpaOptInParameters) Reset() { + *m = ApplyRecommendationOperation_TargetCpaOptInParameters{} +} +func (m *ApplyRecommendationOperation_TargetCpaOptInParameters) String() string { + return proto.CompactTextString(m) +} +func (*ApplyRecommendationOperation_TargetCpaOptInParameters) ProtoMessage() {} +func (*ApplyRecommendationOperation_TargetCpaOptInParameters) Descriptor() ([]byte, []int) { + return fileDescriptor_recommendation_service_6b12c247ec5a1022, []int{2, 3} +} +func (m *ApplyRecommendationOperation_TargetCpaOptInParameters) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ApplyRecommendationOperation_TargetCpaOptInParameters.Unmarshal(m, b) +} +func (m *ApplyRecommendationOperation_TargetCpaOptInParameters) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ApplyRecommendationOperation_TargetCpaOptInParameters.Marshal(b, m, deterministic) +} +func (dst *ApplyRecommendationOperation_TargetCpaOptInParameters) XXX_Merge(src proto.Message) { + xxx_messageInfo_ApplyRecommendationOperation_TargetCpaOptInParameters.Merge(dst, src) +} +func (m *ApplyRecommendationOperation_TargetCpaOptInParameters) XXX_Size() int { + return xxx_messageInfo_ApplyRecommendationOperation_TargetCpaOptInParameters.Size(m) +} +func (m *ApplyRecommendationOperation_TargetCpaOptInParameters) XXX_DiscardUnknown() { + xxx_messageInfo_ApplyRecommendationOperation_TargetCpaOptInParameters.DiscardUnknown(m) +} + +var xxx_messageInfo_ApplyRecommendationOperation_TargetCpaOptInParameters proto.InternalMessageInfo + +func (m *ApplyRecommendationOperation_TargetCpaOptInParameters) GetTargetCpaMicros() *wrappers.Int64Value { + if m != nil { + return m.TargetCpaMicros + } + return nil +} + +func (m *ApplyRecommendationOperation_TargetCpaOptInParameters) GetNewCampaignBudgetAmountMicros() *wrappers.Int64Value { + if m != nil { + return m.NewCampaignBudgetAmountMicros + } + return nil +} + +// Parameters to use when applying callout extension recommendation. +type ApplyRecommendationOperation_CalloutExtensionParameters struct { + // Callout extensions to be added. This is a required field. + CalloutExtensions []*common.CalloutFeedItem `protobuf:"bytes,1,rep,name=callout_extensions,json=calloutExtensions,proto3" json:"callout_extensions,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ApplyRecommendationOperation_CalloutExtensionParameters) Reset() { + *m = ApplyRecommendationOperation_CalloutExtensionParameters{} +} +func (m *ApplyRecommendationOperation_CalloutExtensionParameters) String() string { + return proto.CompactTextString(m) +} +func (*ApplyRecommendationOperation_CalloutExtensionParameters) ProtoMessage() {} +func (*ApplyRecommendationOperation_CalloutExtensionParameters) Descriptor() ([]byte, []int) { + return fileDescriptor_recommendation_service_6b12c247ec5a1022, []int{2, 4} +} +func (m *ApplyRecommendationOperation_CalloutExtensionParameters) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ApplyRecommendationOperation_CalloutExtensionParameters.Unmarshal(m, b) +} +func (m *ApplyRecommendationOperation_CalloutExtensionParameters) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ApplyRecommendationOperation_CalloutExtensionParameters.Marshal(b, m, deterministic) +} +func (dst *ApplyRecommendationOperation_CalloutExtensionParameters) XXX_Merge(src proto.Message) { + xxx_messageInfo_ApplyRecommendationOperation_CalloutExtensionParameters.Merge(dst, src) +} +func (m *ApplyRecommendationOperation_CalloutExtensionParameters) XXX_Size() int { + return xxx_messageInfo_ApplyRecommendationOperation_CalloutExtensionParameters.Size(m) +} +func (m *ApplyRecommendationOperation_CalloutExtensionParameters) XXX_DiscardUnknown() { + xxx_messageInfo_ApplyRecommendationOperation_CalloutExtensionParameters.DiscardUnknown(m) +} + +var xxx_messageInfo_ApplyRecommendationOperation_CalloutExtensionParameters proto.InternalMessageInfo + +func (m *ApplyRecommendationOperation_CalloutExtensionParameters) GetCalloutExtensions() []*common.CalloutFeedItem { + if m != nil { + return m.CalloutExtensions + } + return nil +} + +// Parameters to use when applying call extension recommendation. +type ApplyRecommendationOperation_CallExtensionParameters struct { + // Call extensions to be added. This is a required field. + CallExtensions []*common.CallFeedItem `protobuf:"bytes,1,rep,name=call_extensions,json=callExtensions,proto3" json:"call_extensions,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ApplyRecommendationOperation_CallExtensionParameters) Reset() { + *m = ApplyRecommendationOperation_CallExtensionParameters{} +} +func (m *ApplyRecommendationOperation_CallExtensionParameters) String() string { + return proto.CompactTextString(m) +} +func (*ApplyRecommendationOperation_CallExtensionParameters) ProtoMessage() {} +func (*ApplyRecommendationOperation_CallExtensionParameters) Descriptor() ([]byte, []int) { + return fileDescriptor_recommendation_service_6b12c247ec5a1022, []int{2, 5} +} +func (m *ApplyRecommendationOperation_CallExtensionParameters) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ApplyRecommendationOperation_CallExtensionParameters.Unmarshal(m, b) +} +func (m *ApplyRecommendationOperation_CallExtensionParameters) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ApplyRecommendationOperation_CallExtensionParameters.Marshal(b, m, deterministic) +} +func (dst *ApplyRecommendationOperation_CallExtensionParameters) XXX_Merge(src proto.Message) { + xxx_messageInfo_ApplyRecommendationOperation_CallExtensionParameters.Merge(dst, src) +} +func (m *ApplyRecommendationOperation_CallExtensionParameters) XXX_Size() int { + return xxx_messageInfo_ApplyRecommendationOperation_CallExtensionParameters.Size(m) +} +func (m *ApplyRecommendationOperation_CallExtensionParameters) XXX_DiscardUnknown() { + xxx_messageInfo_ApplyRecommendationOperation_CallExtensionParameters.DiscardUnknown(m) +} + +var xxx_messageInfo_ApplyRecommendationOperation_CallExtensionParameters proto.InternalMessageInfo + +func (m *ApplyRecommendationOperation_CallExtensionParameters) GetCallExtensions() []*common.CallFeedItem { + if m != nil { + return m.CallExtensions + } + return nil +} + +// Parameters to use when applying call extension recommendation. +type ApplyRecommendationOperation_SitelinkExtensionParameters struct { + // Sitelink extensions to be added. This is a required field. + SitelinkExtensions []*common.SitelinkFeedItem `protobuf:"bytes,1,rep,name=sitelink_extensions,json=sitelinkExtensions,proto3" json:"sitelink_extensions,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ApplyRecommendationOperation_SitelinkExtensionParameters) Reset() { + *m = ApplyRecommendationOperation_SitelinkExtensionParameters{} +} +func (m *ApplyRecommendationOperation_SitelinkExtensionParameters) String() string { + return proto.CompactTextString(m) +} +func (*ApplyRecommendationOperation_SitelinkExtensionParameters) ProtoMessage() {} +func (*ApplyRecommendationOperation_SitelinkExtensionParameters) Descriptor() ([]byte, []int) { + return fileDescriptor_recommendation_service_6b12c247ec5a1022, []int{2, 6} +} +func (m *ApplyRecommendationOperation_SitelinkExtensionParameters) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ApplyRecommendationOperation_SitelinkExtensionParameters.Unmarshal(m, b) +} +func (m *ApplyRecommendationOperation_SitelinkExtensionParameters) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ApplyRecommendationOperation_SitelinkExtensionParameters.Marshal(b, m, deterministic) +} +func (dst *ApplyRecommendationOperation_SitelinkExtensionParameters) XXX_Merge(src proto.Message) { + xxx_messageInfo_ApplyRecommendationOperation_SitelinkExtensionParameters.Merge(dst, src) +} +func (m *ApplyRecommendationOperation_SitelinkExtensionParameters) XXX_Size() int { + return xxx_messageInfo_ApplyRecommendationOperation_SitelinkExtensionParameters.Size(m) +} +func (m *ApplyRecommendationOperation_SitelinkExtensionParameters) XXX_DiscardUnknown() { + xxx_messageInfo_ApplyRecommendationOperation_SitelinkExtensionParameters.DiscardUnknown(m) +} + +var xxx_messageInfo_ApplyRecommendationOperation_SitelinkExtensionParameters proto.InternalMessageInfo + +func (m *ApplyRecommendationOperation_SitelinkExtensionParameters) GetSitelinkExtensions() []*common.SitelinkFeedItem { + if m != nil { + return m.SitelinkExtensions + } + return nil +} + +// Parameters to use when applying move unused budget recommendation. +type ApplyRecommendationOperation_MoveUnusedBudgetParameters struct { + // Budget amount to move from excess budget to constrained budget. This is + // a required field. + BudgetMicrosToMove *wrappers.Int64Value `protobuf:"bytes,1,opt,name=budget_micros_to_move,json=budgetMicrosToMove,proto3" json:"budget_micros_to_move,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ApplyRecommendationOperation_MoveUnusedBudgetParameters) Reset() { + *m = ApplyRecommendationOperation_MoveUnusedBudgetParameters{} +} +func (m *ApplyRecommendationOperation_MoveUnusedBudgetParameters) String() string { + return proto.CompactTextString(m) +} +func (*ApplyRecommendationOperation_MoveUnusedBudgetParameters) ProtoMessage() {} +func (*ApplyRecommendationOperation_MoveUnusedBudgetParameters) Descriptor() ([]byte, []int) { + return fileDescriptor_recommendation_service_6b12c247ec5a1022, []int{2, 7} +} +func (m *ApplyRecommendationOperation_MoveUnusedBudgetParameters) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ApplyRecommendationOperation_MoveUnusedBudgetParameters.Unmarshal(m, b) +} +func (m *ApplyRecommendationOperation_MoveUnusedBudgetParameters) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ApplyRecommendationOperation_MoveUnusedBudgetParameters.Marshal(b, m, deterministic) +} +func (dst *ApplyRecommendationOperation_MoveUnusedBudgetParameters) XXX_Merge(src proto.Message) { + xxx_messageInfo_ApplyRecommendationOperation_MoveUnusedBudgetParameters.Merge(dst, src) +} +func (m *ApplyRecommendationOperation_MoveUnusedBudgetParameters) XXX_Size() int { + return xxx_messageInfo_ApplyRecommendationOperation_MoveUnusedBudgetParameters.Size(m) +} +func (m *ApplyRecommendationOperation_MoveUnusedBudgetParameters) XXX_DiscardUnknown() { + xxx_messageInfo_ApplyRecommendationOperation_MoveUnusedBudgetParameters.DiscardUnknown(m) +} + +var xxx_messageInfo_ApplyRecommendationOperation_MoveUnusedBudgetParameters proto.InternalMessageInfo + +func (m *ApplyRecommendationOperation_MoveUnusedBudgetParameters) GetBudgetMicrosToMove() *wrappers.Int64Value { + if m != nil { + return m.BudgetMicrosToMove + } + return nil +} + +// Response message for [RecommendationService.ApplyRecommendation][google.ads.googleads.v1.services.RecommendationService.ApplyRecommendation]. +type ApplyRecommendationResponse struct { + // Results of operations to apply recommendations. + Results []*ApplyRecommendationResult `protobuf:"bytes,1,rep,name=results,proto3" json:"results,omitempty"` + // Errors that pertain to operation failures in the partial failure mode. + // Returned only when partial_failure = true and all errors occur inside the + // operations. If any errors occur outside the operations (e.g. auth errors) + // we return the RPC level error. + PartialFailureError *status.Status `protobuf:"bytes,2,opt,name=partial_failure_error,json=partialFailureError,proto3" json:"partial_failure_error,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ApplyRecommendationResponse) Reset() { *m = ApplyRecommendationResponse{} } +func (m *ApplyRecommendationResponse) String() string { return proto.CompactTextString(m) } +func (*ApplyRecommendationResponse) ProtoMessage() {} +func (*ApplyRecommendationResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_recommendation_service_6b12c247ec5a1022, []int{3} +} +func (m *ApplyRecommendationResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ApplyRecommendationResponse.Unmarshal(m, b) +} +func (m *ApplyRecommendationResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ApplyRecommendationResponse.Marshal(b, m, deterministic) +} +func (dst *ApplyRecommendationResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_ApplyRecommendationResponse.Merge(dst, src) +} +func (m *ApplyRecommendationResponse) XXX_Size() int { + return xxx_messageInfo_ApplyRecommendationResponse.Size(m) +} +func (m *ApplyRecommendationResponse) XXX_DiscardUnknown() { + xxx_messageInfo_ApplyRecommendationResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_ApplyRecommendationResponse proto.InternalMessageInfo + +func (m *ApplyRecommendationResponse) GetResults() []*ApplyRecommendationResult { + if m != nil { + return m.Results + } + return nil +} + +func (m *ApplyRecommendationResponse) GetPartialFailureError() *status.Status { + if m != nil { + return m.PartialFailureError + } + return nil +} + +// The result of applying a recommendation. +type ApplyRecommendationResult struct { + // Returned for successful applies. + ResourceName string `protobuf:"bytes,1,opt,name=resource_name,json=resourceName,proto3" json:"resource_name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ApplyRecommendationResult) Reset() { *m = ApplyRecommendationResult{} } +func (m *ApplyRecommendationResult) String() string { return proto.CompactTextString(m) } +func (*ApplyRecommendationResult) ProtoMessage() {} +func (*ApplyRecommendationResult) Descriptor() ([]byte, []int) { + return fileDescriptor_recommendation_service_6b12c247ec5a1022, []int{4} +} +func (m *ApplyRecommendationResult) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ApplyRecommendationResult.Unmarshal(m, b) +} +func (m *ApplyRecommendationResult) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ApplyRecommendationResult.Marshal(b, m, deterministic) +} +func (dst *ApplyRecommendationResult) XXX_Merge(src proto.Message) { + xxx_messageInfo_ApplyRecommendationResult.Merge(dst, src) +} +func (m *ApplyRecommendationResult) XXX_Size() int { + return xxx_messageInfo_ApplyRecommendationResult.Size(m) +} +func (m *ApplyRecommendationResult) XXX_DiscardUnknown() { + xxx_messageInfo_ApplyRecommendationResult.DiscardUnknown(m) +} + +var xxx_messageInfo_ApplyRecommendationResult proto.InternalMessageInfo + +func (m *ApplyRecommendationResult) GetResourceName() string { + if m != nil { + return m.ResourceName + } + return "" +} + +// Request message for [RecommendationService.DismissRecommendation][google.ads.googleads.v1.services.RecommendationService.DismissRecommendation]. +type DismissRecommendationRequest struct { + // The ID of the customer with the recommendation. + CustomerId string `protobuf:"bytes,1,opt,name=customer_id,json=customerId,proto3" json:"customer_id,omitempty"` + // The list of operations to dismiss recommendations. + // If partial_failure=false all recommendations should be of the same type + // There is a limit of 100 operations per request. + Operations []*DismissRecommendationRequest_DismissRecommendationOperation `protobuf:"bytes,3,rep,name=operations,proto3" json:"operations,omitempty"` + // If true, successful operations will be carried out and invalid + // operations will return errors. If false, operations will be carried in a + // single transaction if and only if they are all valid. + // Default is false. + PartialFailure bool `protobuf:"varint,2,opt,name=partial_failure,json=partialFailure,proto3" json:"partial_failure,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *DismissRecommendationRequest) Reset() { *m = DismissRecommendationRequest{} } +func (m *DismissRecommendationRequest) String() string { return proto.CompactTextString(m) } +func (*DismissRecommendationRequest) ProtoMessage() {} +func (*DismissRecommendationRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_recommendation_service_6b12c247ec5a1022, []int{5} +} +func (m *DismissRecommendationRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_DismissRecommendationRequest.Unmarshal(m, b) +} +func (m *DismissRecommendationRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_DismissRecommendationRequest.Marshal(b, m, deterministic) +} +func (dst *DismissRecommendationRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_DismissRecommendationRequest.Merge(dst, src) +} +func (m *DismissRecommendationRequest) XXX_Size() int { + return xxx_messageInfo_DismissRecommendationRequest.Size(m) +} +func (m *DismissRecommendationRequest) XXX_DiscardUnknown() { + xxx_messageInfo_DismissRecommendationRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_DismissRecommendationRequest proto.InternalMessageInfo + +func (m *DismissRecommendationRequest) GetCustomerId() string { + if m != nil { + return m.CustomerId + } + return "" +} + +func (m *DismissRecommendationRequest) GetOperations() []*DismissRecommendationRequest_DismissRecommendationOperation { + if m != nil { + return m.Operations + } + return nil +} + +func (m *DismissRecommendationRequest) GetPartialFailure() bool { + if m != nil { + return m.PartialFailure + } + return false +} + +// Operation to dismiss a single recommendation identified by resource_name. +type DismissRecommendationRequest_DismissRecommendationOperation struct { + // The resource name of the recommendation to dismiss. + ResourceName string `protobuf:"bytes,1,opt,name=resource_name,json=resourceName,proto3" json:"resource_name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *DismissRecommendationRequest_DismissRecommendationOperation) Reset() { + *m = DismissRecommendationRequest_DismissRecommendationOperation{} +} +func (m *DismissRecommendationRequest_DismissRecommendationOperation) String() string { + return proto.CompactTextString(m) +} +func (*DismissRecommendationRequest_DismissRecommendationOperation) ProtoMessage() {} +func (*DismissRecommendationRequest_DismissRecommendationOperation) Descriptor() ([]byte, []int) { + return fileDescriptor_recommendation_service_6b12c247ec5a1022, []int{5, 0} +} +func (m *DismissRecommendationRequest_DismissRecommendationOperation) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_DismissRecommendationRequest_DismissRecommendationOperation.Unmarshal(m, b) +} +func (m *DismissRecommendationRequest_DismissRecommendationOperation) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_DismissRecommendationRequest_DismissRecommendationOperation.Marshal(b, m, deterministic) +} +func (dst *DismissRecommendationRequest_DismissRecommendationOperation) XXX_Merge(src proto.Message) { + xxx_messageInfo_DismissRecommendationRequest_DismissRecommendationOperation.Merge(dst, src) +} +func (m *DismissRecommendationRequest_DismissRecommendationOperation) XXX_Size() int { + return xxx_messageInfo_DismissRecommendationRequest_DismissRecommendationOperation.Size(m) +} +func (m *DismissRecommendationRequest_DismissRecommendationOperation) XXX_DiscardUnknown() { + xxx_messageInfo_DismissRecommendationRequest_DismissRecommendationOperation.DiscardUnknown(m) +} + +var xxx_messageInfo_DismissRecommendationRequest_DismissRecommendationOperation proto.InternalMessageInfo + +func (m *DismissRecommendationRequest_DismissRecommendationOperation) GetResourceName() string { + if m != nil { + return m.ResourceName + } + return "" +} + +// Response message for [RecommendationService.DismissRecommendation][google.ads.googleads.v1.services.RecommendationService.DismissRecommendation]. +type DismissRecommendationResponse struct { + // Results of operations to dismiss recommendations. + Results []*DismissRecommendationResponse_DismissRecommendationResult `protobuf:"bytes,1,rep,name=results,proto3" json:"results,omitempty"` + // Errors that pertain to operation failures in the partial failure mode. + // Returned only when partial_failure = true and all errors occur inside the + // operations. If any errors occur outside the operations (e.g. auth errors) + // we return the RPC level error. + PartialFailureError *status.Status `protobuf:"bytes,2,opt,name=partial_failure_error,json=partialFailureError,proto3" json:"partial_failure_error,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *DismissRecommendationResponse) Reset() { *m = DismissRecommendationResponse{} } +func (m *DismissRecommendationResponse) String() string { return proto.CompactTextString(m) } +func (*DismissRecommendationResponse) ProtoMessage() {} +func (*DismissRecommendationResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_recommendation_service_6b12c247ec5a1022, []int{6} +} +func (m *DismissRecommendationResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_DismissRecommendationResponse.Unmarshal(m, b) +} +func (m *DismissRecommendationResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_DismissRecommendationResponse.Marshal(b, m, deterministic) +} +func (dst *DismissRecommendationResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_DismissRecommendationResponse.Merge(dst, src) +} +func (m *DismissRecommendationResponse) XXX_Size() int { + return xxx_messageInfo_DismissRecommendationResponse.Size(m) +} +func (m *DismissRecommendationResponse) XXX_DiscardUnknown() { + xxx_messageInfo_DismissRecommendationResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_DismissRecommendationResponse proto.InternalMessageInfo + +func (m *DismissRecommendationResponse) GetResults() []*DismissRecommendationResponse_DismissRecommendationResult { + if m != nil { + return m.Results + } + return nil +} + +func (m *DismissRecommendationResponse) GetPartialFailureError() *status.Status { + if m != nil { + return m.PartialFailureError + } + return nil +} + +// The result of dismissing a recommendation. +type DismissRecommendationResponse_DismissRecommendationResult struct { + // Returned for successful dismissals. + ResourceName string `protobuf:"bytes,1,opt,name=resource_name,json=resourceName,proto3" json:"resource_name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *DismissRecommendationResponse_DismissRecommendationResult) Reset() { + *m = DismissRecommendationResponse_DismissRecommendationResult{} +} +func (m *DismissRecommendationResponse_DismissRecommendationResult) String() string { + return proto.CompactTextString(m) +} +func (*DismissRecommendationResponse_DismissRecommendationResult) ProtoMessage() {} +func (*DismissRecommendationResponse_DismissRecommendationResult) Descriptor() ([]byte, []int) { + return fileDescriptor_recommendation_service_6b12c247ec5a1022, []int{6, 0} +} +func (m *DismissRecommendationResponse_DismissRecommendationResult) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_DismissRecommendationResponse_DismissRecommendationResult.Unmarshal(m, b) +} +func (m *DismissRecommendationResponse_DismissRecommendationResult) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_DismissRecommendationResponse_DismissRecommendationResult.Marshal(b, m, deterministic) +} +func (dst *DismissRecommendationResponse_DismissRecommendationResult) XXX_Merge(src proto.Message) { + xxx_messageInfo_DismissRecommendationResponse_DismissRecommendationResult.Merge(dst, src) +} +func (m *DismissRecommendationResponse_DismissRecommendationResult) XXX_Size() int { + return xxx_messageInfo_DismissRecommendationResponse_DismissRecommendationResult.Size(m) +} +func (m *DismissRecommendationResponse_DismissRecommendationResult) XXX_DiscardUnknown() { + xxx_messageInfo_DismissRecommendationResponse_DismissRecommendationResult.DiscardUnknown(m) +} + +var xxx_messageInfo_DismissRecommendationResponse_DismissRecommendationResult proto.InternalMessageInfo + +func (m *DismissRecommendationResponse_DismissRecommendationResult) GetResourceName() string { + if m != nil { + return m.ResourceName + } + return "" +} + +func init() { + proto.RegisterType((*GetRecommendationRequest)(nil), "google.ads.googleads.v1.services.GetRecommendationRequest") + proto.RegisterType((*ApplyRecommendationRequest)(nil), "google.ads.googleads.v1.services.ApplyRecommendationRequest") + proto.RegisterType((*ApplyRecommendationOperation)(nil), "google.ads.googleads.v1.services.ApplyRecommendationOperation") + proto.RegisterType((*ApplyRecommendationOperation_CampaignBudgetParameters)(nil), "google.ads.googleads.v1.services.ApplyRecommendationOperation.CampaignBudgetParameters") + proto.RegisterType((*ApplyRecommendationOperation_TextAdParameters)(nil), "google.ads.googleads.v1.services.ApplyRecommendationOperation.TextAdParameters") + proto.RegisterType((*ApplyRecommendationOperation_KeywordParameters)(nil), "google.ads.googleads.v1.services.ApplyRecommendationOperation.KeywordParameters") + proto.RegisterType((*ApplyRecommendationOperation_TargetCpaOptInParameters)(nil), "google.ads.googleads.v1.services.ApplyRecommendationOperation.TargetCpaOptInParameters") + proto.RegisterType((*ApplyRecommendationOperation_CalloutExtensionParameters)(nil), "google.ads.googleads.v1.services.ApplyRecommendationOperation.CalloutExtensionParameters") + proto.RegisterType((*ApplyRecommendationOperation_CallExtensionParameters)(nil), "google.ads.googleads.v1.services.ApplyRecommendationOperation.CallExtensionParameters") + proto.RegisterType((*ApplyRecommendationOperation_SitelinkExtensionParameters)(nil), "google.ads.googleads.v1.services.ApplyRecommendationOperation.SitelinkExtensionParameters") + proto.RegisterType((*ApplyRecommendationOperation_MoveUnusedBudgetParameters)(nil), "google.ads.googleads.v1.services.ApplyRecommendationOperation.MoveUnusedBudgetParameters") + proto.RegisterType((*ApplyRecommendationResponse)(nil), "google.ads.googleads.v1.services.ApplyRecommendationResponse") + proto.RegisterType((*ApplyRecommendationResult)(nil), "google.ads.googleads.v1.services.ApplyRecommendationResult") + proto.RegisterType((*DismissRecommendationRequest)(nil), "google.ads.googleads.v1.services.DismissRecommendationRequest") + proto.RegisterType((*DismissRecommendationRequest_DismissRecommendationOperation)(nil), "google.ads.googleads.v1.services.DismissRecommendationRequest.DismissRecommendationOperation") + proto.RegisterType((*DismissRecommendationResponse)(nil), "google.ads.googleads.v1.services.DismissRecommendationResponse") + proto.RegisterType((*DismissRecommendationResponse_DismissRecommendationResult)(nil), "google.ads.googleads.v1.services.DismissRecommendationResponse.DismissRecommendationResult") +} + +// Reference imports to suppress errors if they are not otherwise used. +var _ context.Context +var _ grpc.ClientConn + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the grpc package it is being compiled against. +const _ = grpc.SupportPackageIsVersion4 + +// RecommendationServiceClient is the client API for RecommendationService service. +// +// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://godoc.org/google.golang.org/grpc#ClientConn.NewStream. +type RecommendationServiceClient interface { + // Returns the requested recommendation in full detail. + GetRecommendation(ctx context.Context, in *GetRecommendationRequest, opts ...grpc.CallOption) (*resources.Recommendation, error) + // Applies given recommendations with corresponding apply parameters. + ApplyRecommendation(ctx context.Context, in *ApplyRecommendationRequest, opts ...grpc.CallOption) (*ApplyRecommendationResponse, error) + // Dismisses given recommendations. + DismissRecommendation(ctx context.Context, in *DismissRecommendationRequest, opts ...grpc.CallOption) (*DismissRecommendationResponse, error) +} + +type recommendationServiceClient struct { + cc *grpc.ClientConn +} + +func NewRecommendationServiceClient(cc *grpc.ClientConn) RecommendationServiceClient { + return &recommendationServiceClient{cc} +} + +func (c *recommendationServiceClient) GetRecommendation(ctx context.Context, in *GetRecommendationRequest, opts ...grpc.CallOption) (*resources.Recommendation, error) { + out := new(resources.Recommendation) + err := c.cc.Invoke(ctx, "/google.ads.googleads.v1.services.RecommendationService/GetRecommendation", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *recommendationServiceClient) ApplyRecommendation(ctx context.Context, in *ApplyRecommendationRequest, opts ...grpc.CallOption) (*ApplyRecommendationResponse, error) { + out := new(ApplyRecommendationResponse) + err := c.cc.Invoke(ctx, "/google.ads.googleads.v1.services.RecommendationService/ApplyRecommendation", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *recommendationServiceClient) DismissRecommendation(ctx context.Context, in *DismissRecommendationRequest, opts ...grpc.CallOption) (*DismissRecommendationResponse, error) { + out := new(DismissRecommendationResponse) + err := c.cc.Invoke(ctx, "/google.ads.googleads.v1.services.RecommendationService/DismissRecommendation", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +// RecommendationServiceServer is the server API for RecommendationService service. +type RecommendationServiceServer interface { + // Returns the requested recommendation in full detail. + GetRecommendation(context.Context, *GetRecommendationRequest) (*resources.Recommendation, error) + // Applies given recommendations with corresponding apply parameters. + ApplyRecommendation(context.Context, *ApplyRecommendationRequest) (*ApplyRecommendationResponse, error) + // Dismisses given recommendations. + DismissRecommendation(context.Context, *DismissRecommendationRequest) (*DismissRecommendationResponse, error) +} + +func RegisterRecommendationServiceServer(s *grpc.Server, srv RecommendationServiceServer) { + s.RegisterService(&_RecommendationService_serviceDesc, srv) +} + +func _RecommendationService_GetRecommendation_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(GetRecommendationRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(RecommendationServiceServer).GetRecommendation(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.ads.googleads.v1.services.RecommendationService/GetRecommendation", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(RecommendationServiceServer).GetRecommendation(ctx, req.(*GetRecommendationRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _RecommendationService_ApplyRecommendation_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(ApplyRecommendationRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(RecommendationServiceServer).ApplyRecommendation(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.ads.googleads.v1.services.RecommendationService/ApplyRecommendation", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(RecommendationServiceServer).ApplyRecommendation(ctx, req.(*ApplyRecommendationRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _RecommendationService_DismissRecommendation_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(DismissRecommendationRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(RecommendationServiceServer).DismissRecommendation(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.ads.googleads.v1.services.RecommendationService/DismissRecommendation", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(RecommendationServiceServer).DismissRecommendation(ctx, req.(*DismissRecommendationRequest)) + } + return interceptor(ctx, in, info, handler) +} + +var _RecommendationService_serviceDesc = grpc.ServiceDesc{ + ServiceName: "google.ads.googleads.v1.services.RecommendationService", + HandlerType: (*RecommendationServiceServer)(nil), + Methods: []grpc.MethodDesc{ + { + MethodName: "GetRecommendation", + Handler: _RecommendationService_GetRecommendation_Handler, + }, + { + MethodName: "ApplyRecommendation", + Handler: _RecommendationService_ApplyRecommendation_Handler, + }, + { + MethodName: "DismissRecommendation", + Handler: _RecommendationService_DismissRecommendation_Handler, + }, + }, + Streams: []grpc.StreamDesc{}, + Metadata: "google/ads/googleads/v1/services/recommendation_service.proto", +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/services/recommendation_service.proto", fileDescriptor_recommendation_service_6b12c247ec5a1022) +} + +var fileDescriptor_recommendation_service_6b12c247ec5a1022 = []byte{ + // 1333 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xb4, 0x58, 0xc1, 0x73, 0x14, 0x45, + 0x17, 0xff, 0x66, 0xf2, 0x41, 0xe0, 0x05, 0x42, 0xb6, 0xa9, 0x14, 0xfb, 0x4d, 0x02, 0x5f, 0x5c, + 0xb1, 0x4c, 0xa5, 0xac, 0x19, 0x37, 0x08, 0xea, 0x60, 0x80, 0x0d, 0x84, 0x90, 0xb2, 0x80, 0xd4, + 0x24, 0xc4, 0x12, 0x23, 0x53, 0x9d, 0x99, 0x66, 0x1d, 0xd9, 0x99, 0x1e, 0xbb, 0x7b, 0x12, 0x52, + 0x48, 0xa9, 0x94, 0x07, 0x29, 0x8f, 0xfe, 0x07, 0x1c, 0x3d, 0xf8, 0x17, 0xe8, 0x51, 0x0f, 0x1c, + 0xf5, 0xe6, 0xd9, 0x93, 0x67, 0xff, 0x00, 0x6b, 0x7a, 0xa6, 0x97, 0xcc, 0xee, 0x4e, 0x76, 0xcd, + 0xe2, 0xad, 0xb7, 0x5f, 0xbf, 0xdf, 0xef, 0xbd, 0xd7, 0xaf, 0x7f, 0xdd, 0xb3, 0xb0, 0xd0, 0xa4, + 0xb4, 0xd9, 0x22, 0x16, 0xf6, 0xb9, 0x95, 0x0d, 0xd3, 0xd1, 0x76, 0xdd, 0xe2, 0x84, 0x6d, 0x07, + 0x1e, 0xe1, 0x16, 0x23, 0x1e, 0x0d, 0x43, 0x12, 0xf9, 0x58, 0x04, 0x34, 0x72, 0xf3, 0x79, 0x33, + 0x66, 0x54, 0x50, 0x34, 0x93, 0xf9, 0x98, 0xd8, 0xe7, 0x66, 0xdb, 0xdd, 0xdc, 0xae, 0x9b, 0xca, + 0xdd, 0xb0, 0xca, 0x08, 0x52, 0x54, 0x1a, 0x59, 0xe4, 0xa1, 0x20, 0x11, 0x0f, 0x68, 0xc4, 0x33, + 0x48, 0xe3, 0x42, 0x99, 0x03, 0x89, 0x92, 0x90, 0x5b, 0x0f, 0xc8, 0xee, 0x0e, 0x65, 0xbe, 0x1b, + 0x62, 0xe1, 0x7d, 0xe2, 0x8a, 0xdd, 0x38, 0x0f, 0xc5, 0x98, 0x2b, 0xf3, 0x63, 0x84, 0xd3, 0x84, + 0xa5, 0xa9, 0x60, 0xbf, 0x1f, 0xc7, 0x8b, 0xb5, 0xc5, 0xb4, 0x73, 0xbf, 0x69, 0xe5, 0x17, 0x07, + 0x16, 0x8e, 0x22, 0x2a, 0xa4, 0x51, 0x45, 0x7e, 0x26, 0xb7, 0xca, 0x5f, 0x5b, 0xc9, 0x7d, 0x6b, + 0x87, 0xe1, 0x38, 0x26, 0x4c, 0xd9, 0x4f, 0xe5, 0x76, 0x16, 0x7b, 0x16, 0x17, 0x58, 0x24, 0xb9, + 0xa1, 0x76, 0x19, 0xaa, 0xcb, 0x44, 0x38, 0x05, 0x46, 0x87, 0x7c, 0x96, 0x10, 0x2e, 0xd0, 0xab, + 0x70, 0x5c, 0x05, 0xe5, 0x46, 0x38, 0x24, 0x55, 0x6d, 0x46, 0x9b, 0x3d, 0xea, 0x1c, 0x53, 0x93, + 0xb7, 0x70, 0x48, 0x6a, 0xbf, 0x68, 0x60, 0x34, 0xe2, 0xb8, 0xb5, 0xdb, 0x1b, 0xe3, 0xff, 0x30, + 0xe6, 0x25, 0x5c, 0xd0, 0x90, 0x30, 0x37, 0xf0, 0x73, 0x04, 0x50, 0x53, 0x2b, 0x3e, 0xba, 0x07, + 0x40, 0x63, 0xc2, 0xb2, 0x6c, 0xaa, 0xfa, 0xcc, 0xc8, 0xec, 0xd8, 0xfc, 0x25, 0xb3, 0xdf, 0xde, + 0x9a, 0x3d, 0x28, 0x6f, 0x2b, 0x18, 0x67, 0x0f, 0x22, 0x7a, 0x1d, 0x4e, 0xc4, 0x98, 0x89, 0x00, + 0xb7, 0xdc, 0xfb, 0x38, 0x68, 0x25, 0x8c, 0x54, 0x47, 0x66, 0xb4, 0xd9, 0x23, 0xce, 0x78, 0x3e, + 0x7d, 0x3d, 0x9b, 0xad, 0xfd, 0x5c, 0x81, 0xe9, 0xfd, 0x50, 0x07, 0x2a, 0x07, 0x7a, 0xa2, 0xc1, + 0x09, 0x0f, 0x87, 0x31, 0x0e, 0x9a, 0x91, 0xbb, 0x95, 0xf8, 0x4d, 0x22, 0xaa, 0xfa, 0x8c, 0x36, + 0x3b, 0x36, 0xff, 0xc1, 0x70, 0x49, 0x99, 0x57, 0x73, 0xd4, 0x45, 0x09, 0xba, 0x8a, 0x19, 0x0e, + 0x89, 0x20, 0x8c, 0xdf, 0xf8, 0x8f, 0x33, 0xee, 0x15, 0x6c, 0xe8, 0x53, 0x18, 0x15, 0xe4, 0xa1, + 0x70, 0xb1, 0x2f, 0x73, 0x1d, 0x9b, 0xbf, 0x3d, 0x24, 0xf7, 0x3a, 0x79, 0x28, 0x1a, 0x7e, 0x81, + 0xf3, 0xb0, 0x90, 0x73, 0xa8, 0x05, 0xa3, 0xf9, 0xb9, 0xa8, 0xfe, 0x57, 0x72, 0xad, 0x0e, 0xc9, + 0xf5, 0x7e, 0x86, 0x56, 0x20, 0x53, 0x14, 0xe8, 0x6b, 0x0d, 0x2a, 0x02, 0xb3, 0x26, 0x11, 0xae, + 0x17, 0x63, 0x97, 0xc6, 0xc2, 0x0d, 0xa2, 0xea, 0xa1, 0x97, 0x52, 0xe0, 0x75, 0x89, 0x7b, 0x35, + 0xc6, 0xb7, 0x63, 0xb1, 0x12, 0x15, 0x0b, 0x2c, 0x0a, 0x36, 0xf4, 0x8d, 0x06, 0x15, 0x0f, 0xb7, + 0x5a, 0x34, 0x11, 0x6e, 0x5b, 0x45, 0xaa, 0x87, 0x65, 0x18, 0x1f, 0x0e, 0xbd, 0xcf, 0x12, 0x77, + 0x49, 0xc1, 0x16, 0x02, 0x99, 0xf0, 0x3a, 0xac, 0xe8, 0x0b, 0x18, 0x4f, 0xe7, 0xf6, 0x84, 0x31, + 0x2a, 0xc3, 0xd8, 0x78, 0x09, 0x61, 0xf4, 0x8e, 0xe1, 0xb8, 0xb7, 0xd7, 0x84, 0xbe, 0xd5, 0x00, + 0xf1, 0x40, 0x90, 0x56, 0x10, 0x3d, 0xd8, 0x13, 0xc5, 0x11, 0x19, 0xc5, 0xdd, 0x21, 0xa3, 0x58, + 0xcb, 0x81, 0x7b, 0x47, 0x52, 0xe1, 0x9d, 0x66, 0xf4, 0x54, 0x03, 0x14, 0xd2, 0x6d, 0xe2, 0x26, + 0x51, 0xc2, 0x89, 0xaf, 0x8e, 0xe0, 0xd1, 0x97, 0xb2, 0x35, 0x37, 0xe9, 0x36, 0xb9, 0x23, 0x71, + 0x7b, 0x1c, 0xc2, 0x89, 0xb0, 0xc3, 0x6a, 0xc4, 0x50, 0x2d, 0x3b, 0xb4, 0x68, 0x1d, 0xaa, 0x11, + 0xd9, 0xc9, 0xc3, 0x73, 0x71, 0x48, 0x93, 0x48, 0xb8, 0x61, 0xe0, 0x31, 0xca, 0xa5, 0xae, 0x8c, + 0xcd, 0x4f, 0xa9, 0x60, 0x95, 0xa6, 0x9b, 0x2b, 0x91, 0xb8, 0xf0, 0xd6, 0x06, 0x6e, 0x25, 0xc4, + 0x99, 0x8c, 0xc8, 0x4e, 0x86, 0xd9, 0x90, 0xae, 0x37, 0xa5, 0xa7, 0xb1, 0x02, 0x13, 0x9d, 0x47, + 0x15, 0x9d, 0x07, 0x1d, 0xfb, 0x39, 0xe6, 0x6b, 0xa5, 0x05, 0x68, 0xdf, 0x3e, 0x66, 0xc3, 0x77, + 0x74, 0xec, 0x1b, 0x7f, 0x69, 0x50, 0xe9, 0x3a, 0x8a, 0xe8, 0x6d, 0x38, 0x82, 0x7d, 0xb7, 0xc9, + 0x68, 0x12, 0xe7, 0x90, 0xd3, 0x5d, 0x61, 0xae, 0x09, 0x16, 0x44, 0xcd, 0x2c, 0xce, 0x51, 0xec, + 0x2f, 0xa7, 0x8b, 0x91, 0x07, 0xf0, 0xe2, 0xda, 0x94, 0x8a, 0x38, 0x3e, 0x7f, 0xad, 0x34, 0x1a, + 0x79, 0xdf, 0x2a, 0x25, 0xb8, 0x99, 0xfa, 0xad, 0xef, 0xc6, 0x64, 0x29, 0x4a, 0xc2, 0xae, 0x49, + 0xe7, 0x68, 0xa8, 0x86, 0xa8, 0x01, 0xe3, 0x5e, 0xec, 0xb9, 0x5b, 0x81, 0xaf, 0x4a, 0x39, 0xd2, + 0xbf, 0x94, 0xc7, 0xbc, 0xd8, 0x5b, 0x0c, 0xfc, 0xbc, 0x82, 0xcf, 0x35, 0xa8, 0x96, 0x09, 0x01, + 0x5a, 0x2e, 0x88, 0xcf, 0xe0, 0xbb, 0x75, 0xa2, 0x2d, 0x1f, 0x19, 0x0b, 0x22, 0xf0, 0x4a, 0xba, + 0xfb, 0x1d, 0x17, 0x45, 0x47, 0x1b, 0xe8, 0xfd, 0x81, 0x4f, 0x47, 0x64, 0xa7, 0xd8, 0x62, 0x85, + 0x76, 0xf8, 0x1c, 0x8c, 0x72, 0x35, 0x41, 0xf7, 0x00, 0x75, 0x69, 0x58, 0x9a, 0x4e, 0x7a, 0x03, + 0x5b, 0xa5, 0x5b, 0x93, 0xbd, 0x9d, 0x94, 0x4a, 0x5d, 0x27, 0xc4, 0x5f, 0x11, 0x24, 0x74, 0x2a, + 0x9d, 0xc2, 0xc4, 0x8d, 0x18, 0x4e, 0x95, 0x88, 0x08, 0xba, 0x93, 0x5e, 0x92, 0x7b, 0x45, 0x4b, + 0xf1, 0xbe, 0x31, 0x08, 0x6f, 0x9b, 0x74, 0xbc, 0xa0, 0x44, 0xdc, 0xf8, 0x52, 0x83, 0xa9, 0x7d, + 0x14, 0x03, 0x61, 0x38, 0xd9, 0xad, 0x54, 0x8a, 0xfa, 0xcd, 0x7e, 0xd4, 0x0a, 0xb9, 0x4d, 0x8f, + 0xba, 0xe4, 0x87, 0x1b, 0x2d, 0x30, 0xca, 0x55, 0x02, 0xdd, 0x82, 0xc9, 0x7c, 0xab, 0xb3, 0x3d, + 0x76, 0x05, 0x75, 0x53, 0xd9, 0x18, 0xa4, 0x89, 0x50, 0xe6, 0x99, 0x6d, 0xed, 0x3a, 0x4d, 0x59, + 0x16, 0x11, 0x4c, 0xe0, 0x54, 0xb0, 0xdc, 0xb8, 0xcd, 0x51, 0xfb, 0x51, 0x83, 0xa9, 0x9e, 0x0f, + 0x32, 0x1e, 0xd3, 0x88, 0x13, 0x74, 0x07, 0x46, 0x19, 0xe1, 0x49, 0x4b, 0xa8, 0xc4, 0x2f, 0x1e, + 0x48, 0x15, 0x1d, 0x89, 0xe1, 0x28, 0x2c, 0x74, 0x1d, 0x26, 0x3b, 0xde, 0x59, 0x2e, 0x61, 0x8c, + 0xb2, 0xbc, 0x8d, 0x91, 0x22, 0x61, 0xb1, 0x67, 0xae, 0xc9, 0x17, 0xa8, 0x73, 0xb2, 0xf8, 0x02, + 0x5b, 0x4a, 0x97, 0xd7, 0xae, 0xc0, 0xff, 0x4a, 0xd9, 0x06, 0x7b, 0x91, 0xfe, 0xa0, 0xc3, 0xf4, + 0xb5, 0x80, 0x87, 0x01, 0xe7, 0x07, 0x7c, 0x93, 0x3e, 0x2e, 0xbc, 0x49, 0x47, 0x64, 0x95, 0x3e, + 0xee, 0x5f, 0xa5, 0xfd, 0x48, 0x7b, 0x1b, 0x07, 0x7e, 0xb2, 0xea, 0xbd, 0x9e, 0xac, 0xc6, 0x12, + 0x9c, 0xd9, 0x1f, 0x76, 0xb0, 0x82, 0x3d, 0xd3, 0xe1, 0x74, 0x49, 0xec, 0x79, 0xcf, 0x24, 0x9d, + 0x3d, 0xf3, 0xd1, 0x81, 0xab, 0x91, 0x21, 0x96, 0x5a, 0xff, 0x8d, 0x9e, 0x32, 0x16, 0x61, 0x6a, + 0x1f, 0xbe, 0x81, 0x8a, 0x34, 0xff, 0xf4, 0x10, 0x4c, 0x16, 0xbd, 0xd7, 0xb2, 0x4c, 0xd1, 0x4f, + 0x1a, 0x54, 0xba, 0xbe, 0xa1, 0x90, 0xdd, 0xbf, 0x42, 0x65, 0x1f, 0x5e, 0x46, 0x7d, 0x80, 0x6b, + 0xba, 0xe8, 0x59, 0x7b, 0xf7, 0xc9, 0x6f, 0x7f, 0x7c, 0xa7, 0x9f, 0x43, 0xf5, 0xf4, 0x53, 0xf2, + 0x51, 0x21, 0x9d, 0x05, 0xd5, 0xd9, 0xdc, 0x9a, 0xeb, 0xf8, 0xb6, 0xe4, 0xd6, 0xdc, 0x63, 0xf4, + 0xab, 0x06, 0x27, 0x7b, 0x1c, 0x39, 0xf4, 0xde, 0x01, 0x75, 0x21, 0xcb, 0x61, 0xe1, 0xa0, 0xaa, + 0x22, 0xfb, 0xa3, 0x76, 0x49, 0xe6, 0xf3, 0x4e, 0xed, 0x9c, 0xfc, 0x5e, 0x6f, 0x27, 0xf0, 0x68, + 0xcf, 0xc1, 0x5d, 0x98, 0x7b, 0xdc, 0x99, 0x8e, 0x2d, 0xd5, 0xd0, 0xd6, 0xe6, 0xd0, 0xef, 0x1a, + 0x4c, 0xf6, 0xdc, 0x73, 0x74, 0x69, 0xb8, 0x83, 0x6c, 0x5c, 0x1e, 0xb2, 0xf5, 0x6b, 0x57, 0x64, + 0x6a, 0x76, 0xed, 0xfc, 0x3f, 0x4b, 0xcd, 0xcf, 0x40, 0x6d, 0x6d, 0x6e, 0xf1, 0x2b, 0x1d, 0xce, + 0x7a, 0x34, 0xec, 0x1b, 0xc8, 0xa2, 0xd1, 0xb3, 0x63, 0x57, 0xd3, 0xdb, 0x65, 0x55, 0xbb, 0x7b, + 0x23, 0xf7, 0x6f, 0xd2, 0x16, 0x8e, 0x9a, 0x26, 0x65, 0x4d, 0xab, 0x49, 0x22, 0x79, 0xf7, 0xa8, + 0xbf, 0x26, 0xe2, 0x80, 0x97, 0xff, 0x3f, 0x73, 0x51, 0x0d, 0x9e, 0xe9, 0x23, 0xcb, 0x8d, 0xc6, + 0xf7, 0xfa, 0xcc, 0x72, 0x06, 0xd8, 0xf0, 0xb9, 0x99, 0x0d, 0xd3, 0xd1, 0x46, 0xdd, 0xcc, 0x89, + 0xf9, 0x73, 0xb5, 0x64, 0xb3, 0xe1, 0xf3, 0xcd, 0xf6, 0x92, 0xcd, 0x8d, 0xfa, 0xa6, 0x5a, 0xf2, + 0xa7, 0x7e, 0x36, 0x9b, 0xb7, 0xed, 0x86, 0xcf, 0x6d, 0xbb, 0xbd, 0xc8, 0xb6, 0x37, 0xea, 0xb6, + 0xad, 0x96, 0x6d, 0x1d, 0x96, 0x71, 0x9e, 0xfb, 0x3b, 0x00, 0x00, 0xff, 0xff, 0xb3, 0x73, 0x91, + 0x69, 0x46, 0x12, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/services/remarketing_action_service.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/services/remarketing_action_service.pb.go new file mode 100644 index 0000000..1ecdcda --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/services/remarketing_action_service.pb.go @@ -0,0 +1,559 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/services/remarketing_action_service.proto + +package services // import "google.golang.org/genproto/googleapis/ads/googleads/v1/services" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "github.com/golang/protobuf/ptypes/wrappers" +import resources "google.golang.org/genproto/googleapis/ads/googleads/v1/resources" +import _ "google.golang.org/genproto/googleapis/api/annotations" +import status "google.golang.org/genproto/googleapis/rpc/status" +import field_mask "google.golang.org/genproto/protobuf/field_mask" + +import ( + context "golang.org/x/net/context" + grpc "google.golang.org/grpc" +) + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Request message for [RemarketingActionService.GetRemarketingAction][google.ads.googleads.v1.services.RemarketingActionService.GetRemarketingAction]. +type GetRemarketingActionRequest struct { + // The resource name of the remarketing action to fetch. + ResourceName string `protobuf:"bytes,1,opt,name=resource_name,json=resourceName,proto3" json:"resource_name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GetRemarketingActionRequest) Reset() { *m = GetRemarketingActionRequest{} } +func (m *GetRemarketingActionRequest) String() string { return proto.CompactTextString(m) } +func (*GetRemarketingActionRequest) ProtoMessage() {} +func (*GetRemarketingActionRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_remarketing_action_service_3bbcd94af93bf94e, []int{0} +} +func (m *GetRemarketingActionRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GetRemarketingActionRequest.Unmarshal(m, b) +} +func (m *GetRemarketingActionRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GetRemarketingActionRequest.Marshal(b, m, deterministic) +} +func (dst *GetRemarketingActionRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_GetRemarketingActionRequest.Merge(dst, src) +} +func (m *GetRemarketingActionRequest) XXX_Size() int { + return xxx_messageInfo_GetRemarketingActionRequest.Size(m) +} +func (m *GetRemarketingActionRequest) XXX_DiscardUnknown() { + xxx_messageInfo_GetRemarketingActionRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_GetRemarketingActionRequest proto.InternalMessageInfo + +func (m *GetRemarketingActionRequest) GetResourceName() string { + if m != nil { + return m.ResourceName + } + return "" +} + +// Request message for [RemarketingActionService.MutateRemarketingActions][google.ads.googleads.v1.services.RemarketingActionService.MutateRemarketingActions]. +type MutateRemarketingActionsRequest struct { + // The ID of the customer whose remarketing actions are being modified. + CustomerId string `protobuf:"bytes,1,opt,name=customer_id,json=customerId,proto3" json:"customer_id,omitempty"` + // The list of operations to perform on individual remarketing actions. + Operations []*RemarketingActionOperation `protobuf:"bytes,2,rep,name=operations,proto3" json:"operations,omitempty"` + // If true, successful operations will be carried out and invalid + // operations will return errors. If false, all operations will be carried + // out in one transaction if and only if they are all valid. + // Default is false. + PartialFailure bool `protobuf:"varint,3,opt,name=partial_failure,json=partialFailure,proto3" json:"partial_failure,omitempty"` + // If true, the request is validated but not executed. Only errors are + // returned, not results. + ValidateOnly bool `protobuf:"varint,4,opt,name=validate_only,json=validateOnly,proto3" json:"validate_only,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *MutateRemarketingActionsRequest) Reset() { *m = MutateRemarketingActionsRequest{} } +func (m *MutateRemarketingActionsRequest) String() string { return proto.CompactTextString(m) } +func (*MutateRemarketingActionsRequest) ProtoMessage() {} +func (*MutateRemarketingActionsRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_remarketing_action_service_3bbcd94af93bf94e, []int{1} +} +func (m *MutateRemarketingActionsRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_MutateRemarketingActionsRequest.Unmarshal(m, b) +} +func (m *MutateRemarketingActionsRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_MutateRemarketingActionsRequest.Marshal(b, m, deterministic) +} +func (dst *MutateRemarketingActionsRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_MutateRemarketingActionsRequest.Merge(dst, src) +} +func (m *MutateRemarketingActionsRequest) XXX_Size() int { + return xxx_messageInfo_MutateRemarketingActionsRequest.Size(m) +} +func (m *MutateRemarketingActionsRequest) XXX_DiscardUnknown() { + xxx_messageInfo_MutateRemarketingActionsRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_MutateRemarketingActionsRequest proto.InternalMessageInfo + +func (m *MutateRemarketingActionsRequest) GetCustomerId() string { + if m != nil { + return m.CustomerId + } + return "" +} + +func (m *MutateRemarketingActionsRequest) GetOperations() []*RemarketingActionOperation { + if m != nil { + return m.Operations + } + return nil +} + +func (m *MutateRemarketingActionsRequest) GetPartialFailure() bool { + if m != nil { + return m.PartialFailure + } + return false +} + +func (m *MutateRemarketingActionsRequest) GetValidateOnly() bool { + if m != nil { + return m.ValidateOnly + } + return false +} + +// A single operation (create, update) on a remarketing action. +type RemarketingActionOperation struct { + // FieldMask that determines which resource fields are modified in an update. + UpdateMask *field_mask.FieldMask `protobuf:"bytes,4,opt,name=update_mask,json=updateMask,proto3" json:"update_mask,omitempty"` + // The mutate operation. + // + // Types that are valid to be assigned to Operation: + // *RemarketingActionOperation_Create + // *RemarketingActionOperation_Update + Operation isRemarketingActionOperation_Operation `protobuf_oneof:"operation"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *RemarketingActionOperation) Reset() { *m = RemarketingActionOperation{} } +func (m *RemarketingActionOperation) String() string { return proto.CompactTextString(m) } +func (*RemarketingActionOperation) ProtoMessage() {} +func (*RemarketingActionOperation) Descriptor() ([]byte, []int) { + return fileDescriptor_remarketing_action_service_3bbcd94af93bf94e, []int{2} +} +func (m *RemarketingActionOperation) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_RemarketingActionOperation.Unmarshal(m, b) +} +func (m *RemarketingActionOperation) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_RemarketingActionOperation.Marshal(b, m, deterministic) +} +func (dst *RemarketingActionOperation) XXX_Merge(src proto.Message) { + xxx_messageInfo_RemarketingActionOperation.Merge(dst, src) +} +func (m *RemarketingActionOperation) XXX_Size() int { + return xxx_messageInfo_RemarketingActionOperation.Size(m) +} +func (m *RemarketingActionOperation) XXX_DiscardUnknown() { + xxx_messageInfo_RemarketingActionOperation.DiscardUnknown(m) +} + +var xxx_messageInfo_RemarketingActionOperation proto.InternalMessageInfo + +func (m *RemarketingActionOperation) GetUpdateMask() *field_mask.FieldMask { + if m != nil { + return m.UpdateMask + } + return nil +} + +type isRemarketingActionOperation_Operation interface { + isRemarketingActionOperation_Operation() +} + +type RemarketingActionOperation_Create struct { + Create *resources.RemarketingAction `protobuf:"bytes,1,opt,name=create,proto3,oneof"` +} + +type RemarketingActionOperation_Update struct { + Update *resources.RemarketingAction `protobuf:"bytes,2,opt,name=update,proto3,oneof"` +} + +func (*RemarketingActionOperation_Create) isRemarketingActionOperation_Operation() {} + +func (*RemarketingActionOperation_Update) isRemarketingActionOperation_Operation() {} + +func (m *RemarketingActionOperation) GetOperation() isRemarketingActionOperation_Operation { + if m != nil { + return m.Operation + } + return nil +} + +func (m *RemarketingActionOperation) GetCreate() *resources.RemarketingAction { + if x, ok := m.GetOperation().(*RemarketingActionOperation_Create); ok { + return x.Create + } + return nil +} + +func (m *RemarketingActionOperation) GetUpdate() *resources.RemarketingAction { + if x, ok := m.GetOperation().(*RemarketingActionOperation_Update); ok { + return x.Update + } + return nil +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*RemarketingActionOperation) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _RemarketingActionOperation_OneofMarshaler, _RemarketingActionOperation_OneofUnmarshaler, _RemarketingActionOperation_OneofSizer, []interface{}{ + (*RemarketingActionOperation_Create)(nil), + (*RemarketingActionOperation_Update)(nil), + } +} + +func _RemarketingActionOperation_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*RemarketingActionOperation) + // operation + switch x := m.Operation.(type) { + case *RemarketingActionOperation_Create: + b.EncodeVarint(1<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.Create); err != nil { + return err + } + case *RemarketingActionOperation_Update: + b.EncodeVarint(2<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.Update); err != nil { + return err + } + case nil: + default: + return fmt.Errorf("RemarketingActionOperation.Operation has unexpected type %T", x) + } + return nil +} + +func _RemarketingActionOperation_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*RemarketingActionOperation) + switch tag { + case 1: // operation.create + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(resources.RemarketingAction) + err := b.DecodeMessage(msg) + m.Operation = &RemarketingActionOperation_Create{msg} + return true, err + case 2: // operation.update + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(resources.RemarketingAction) + err := b.DecodeMessage(msg) + m.Operation = &RemarketingActionOperation_Update{msg} + return true, err + default: + return false, nil + } +} + +func _RemarketingActionOperation_OneofSizer(msg proto.Message) (n int) { + m := msg.(*RemarketingActionOperation) + // operation + switch x := m.Operation.(type) { + case *RemarketingActionOperation_Create: + s := proto.Size(x.Create) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *RemarketingActionOperation_Update: + s := proto.Size(x.Update) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +// Response message for remarketing action mutate. +type MutateRemarketingActionsResponse struct { + // Errors that pertain to operation failures in the partial failure mode. + // Returned only when partial_failure = true and all errors occur inside the + // operations. If any errors occur outside the operations (e.g. auth errors), + // we return an RPC level error. + PartialFailureError *status.Status `protobuf:"bytes,3,opt,name=partial_failure_error,json=partialFailureError,proto3" json:"partial_failure_error,omitempty"` + // All results for the mutate. + Results []*MutateRemarketingActionResult `protobuf:"bytes,2,rep,name=results,proto3" json:"results,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *MutateRemarketingActionsResponse) Reset() { *m = MutateRemarketingActionsResponse{} } +func (m *MutateRemarketingActionsResponse) String() string { return proto.CompactTextString(m) } +func (*MutateRemarketingActionsResponse) ProtoMessage() {} +func (*MutateRemarketingActionsResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_remarketing_action_service_3bbcd94af93bf94e, []int{3} +} +func (m *MutateRemarketingActionsResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_MutateRemarketingActionsResponse.Unmarshal(m, b) +} +func (m *MutateRemarketingActionsResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_MutateRemarketingActionsResponse.Marshal(b, m, deterministic) +} +func (dst *MutateRemarketingActionsResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_MutateRemarketingActionsResponse.Merge(dst, src) +} +func (m *MutateRemarketingActionsResponse) XXX_Size() int { + return xxx_messageInfo_MutateRemarketingActionsResponse.Size(m) +} +func (m *MutateRemarketingActionsResponse) XXX_DiscardUnknown() { + xxx_messageInfo_MutateRemarketingActionsResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_MutateRemarketingActionsResponse proto.InternalMessageInfo + +func (m *MutateRemarketingActionsResponse) GetPartialFailureError() *status.Status { + if m != nil { + return m.PartialFailureError + } + return nil +} + +func (m *MutateRemarketingActionsResponse) GetResults() []*MutateRemarketingActionResult { + if m != nil { + return m.Results + } + return nil +} + +// The result for the remarketing action mutate. +type MutateRemarketingActionResult struct { + // Returned for successful operations. + ResourceName string `protobuf:"bytes,1,opt,name=resource_name,json=resourceName,proto3" json:"resource_name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *MutateRemarketingActionResult) Reset() { *m = MutateRemarketingActionResult{} } +func (m *MutateRemarketingActionResult) String() string { return proto.CompactTextString(m) } +func (*MutateRemarketingActionResult) ProtoMessage() {} +func (*MutateRemarketingActionResult) Descriptor() ([]byte, []int) { + return fileDescriptor_remarketing_action_service_3bbcd94af93bf94e, []int{4} +} +func (m *MutateRemarketingActionResult) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_MutateRemarketingActionResult.Unmarshal(m, b) +} +func (m *MutateRemarketingActionResult) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_MutateRemarketingActionResult.Marshal(b, m, deterministic) +} +func (dst *MutateRemarketingActionResult) XXX_Merge(src proto.Message) { + xxx_messageInfo_MutateRemarketingActionResult.Merge(dst, src) +} +func (m *MutateRemarketingActionResult) XXX_Size() int { + return xxx_messageInfo_MutateRemarketingActionResult.Size(m) +} +func (m *MutateRemarketingActionResult) XXX_DiscardUnknown() { + xxx_messageInfo_MutateRemarketingActionResult.DiscardUnknown(m) +} + +var xxx_messageInfo_MutateRemarketingActionResult proto.InternalMessageInfo + +func (m *MutateRemarketingActionResult) GetResourceName() string { + if m != nil { + return m.ResourceName + } + return "" +} + +func init() { + proto.RegisterType((*GetRemarketingActionRequest)(nil), "google.ads.googleads.v1.services.GetRemarketingActionRequest") + proto.RegisterType((*MutateRemarketingActionsRequest)(nil), "google.ads.googleads.v1.services.MutateRemarketingActionsRequest") + proto.RegisterType((*RemarketingActionOperation)(nil), "google.ads.googleads.v1.services.RemarketingActionOperation") + proto.RegisterType((*MutateRemarketingActionsResponse)(nil), "google.ads.googleads.v1.services.MutateRemarketingActionsResponse") + proto.RegisterType((*MutateRemarketingActionResult)(nil), "google.ads.googleads.v1.services.MutateRemarketingActionResult") +} + +// Reference imports to suppress errors if they are not otherwise used. +var _ context.Context +var _ grpc.ClientConn + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the grpc package it is being compiled against. +const _ = grpc.SupportPackageIsVersion4 + +// RemarketingActionServiceClient is the client API for RemarketingActionService service. +// +// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://godoc.org/google.golang.org/grpc#ClientConn.NewStream. +type RemarketingActionServiceClient interface { + // Returns the requested remarketing action in full detail. + GetRemarketingAction(ctx context.Context, in *GetRemarketingActionRequest, opts ...grpc.CallOption) (*resources.RemarketingAction, error) + // Creates or updates remarketing actions. Operation statuses are returned. + MutateRemarketingActions(ctx context.Context, in *MutateRemarketingActionsRequest, opts ...grpc.CallOption) (*MutateRemarketingActionsResponse, error) +} + +type remarketingActionServiceClient struct { + cc *grpc.ClientConn +} + +func NewRemarketingActionServiceClient(cc *grpc.ClientConn) RemarketingActionServiceClient { + return &remarketingActionServiceClient{cc} +} + +func (c *remarketingActionServiceClient) GetRemarketingAction(ctx context.Context, in *GetRemarketingActionRequest, opts ...grpc.CallOption) (*resources.RemarketingAction, error) { + out := new(resources.RemarketingAction) + err := c.cc.Invoke(ctx, "/google.ads.googleads.v1.services.RemarketingActionService/GetRemarketingAction", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *remarketingActionServiceClient) MutateRemarketingActions(ctx context.Context, in *MutateRemarketingActionsRequest, opts ...grpc.CallOption) (*MutateRemarketingActionsResponse, error) { + out := new(MutateRemarketingActionsResponse) + err := c.cc.Invoke(ctx, "/google.ads.googleads.v1.services.RemarketingActionService/MutateRemarketingActions", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +// RemarketingActionServiceServer is the server API for RemarketingActionService service. +type RemarketingActionServiceServer interface { + // Returns the requested remarketing action in full detail. + GetRemarketingAction(context.Context, *GetRemarketingActionRequest) (*resources.RemarketingAction, error) + // Creates or updates remarketing actions. Operation statuses are returned. + MutateRemarketingActions(context.Context, *MutateRemarketingActionsRequest) (*MutateRemarketingActionsResponse, error) +} + +func RegisterRemarketingActionServiceServer(s *grpc.Server, srv RemarketingActionServiceServer) { + s.RegisterService(&_RemarketingActionService_serviceDesc, srv) +} + +func _RemarketingActionService_GetRemarketingAction_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(GetRemarketingActionRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(RemarketingActionServiceServer).GetRemarketingAction(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.ads.googleads.v1.services.RemarketingActionService/GetRemarketingAction", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(RemarketingActionServiceServer).GetRemarketingAction(ctx, req.(*GetRemarketingActionRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _RemarketingActionService_MutateRemarketingActions_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(MutateRemarketingActionsRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(RemarketingActionServiceServer).MutateRemarketingActions(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.ads.googleads.v1.services.RemarketingActionService/MutateRemarketingActions", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(RemarketingActionServiceServer).MutateRemarketingActions(ctx, req.(*MutateRemarketingActionsRequest)) + } + return interceptor(ctx, in, info, handler) +} + +var _RemarketingActionService_serviceDesc = grpc.ServiceDesc{ + ServiceName: "google.ads.googleads.v1.services.RemarketingActionService", + HandlerType: (*RemarketingActionServiceServer)(nil), + Methods: []grpc.MethodDesc{ + { + MethodName: "GetRemarketingAction", + Handler: _RemarketingActionService_GetRemarketingAction_Handler, + }, + { + MethodName: "MutateRemarketingActions", + Handler: _RemarketingActionService_MutateRemarketingActions_Handler, + }, + }, + Streams: []grpc.StreamDesc{}, + Metadata: "google/ads/googleads/v1/services/remarketing_action_service.proto", +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/services/remarketing_action_service.proto", fileDescriptor_remarketing_action_service_3bbcd94af93bf94e) +} + +var fileDescriptor_remarketing_action_service_3bbcd94af93bf94e = []byte{ + // 705 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xa4, 0x95, 0xcf, 0x6a, 0xd4, 0x40, + 0x1c, 0xc7, 0x4d, 0x56, 0xaa, 0x9d, 0x54, 0x85, 0x51, 0x31, 0xac, 0xd6, 0x2e, 0xb1, 0x60, 0xd9, + 0x43, 0xc2, 0xae, 0x45, 0x21, 0x6d, 0x91, 0x2c, 0xda, 0xd6, 0x43, 0xff, 0x90, 0x42, 0x41, 0x59, + 0x08, 0xd3, 0x64, 0x1a, 0x42, 0x93, 0x4c, 0x9c, 0x99, 0xac, 0x94, 0xd2, 0x8b, 0x88, 0x2f, 0xe0, + 0x1b, 0x78, 0xf4, 0x3d, 0x3c, 0xe8, 0xc1, 0x8b, 0xaf, 0xa0, 0x17, 0x0f, 0x3e, 0x83, 0x24, 0x93, + 0xd9, 0xfe, 0xd9, 0xa6, 0x2b, 0xed, 0x6d, 0xf6, 0x37, 0xdf, 0xfd, 0xfc, 0xfe, 0x7c, 0x67, 0x26, + 0xc0, 0x09, 0x09, 0x09, 0x63, 0x6c, 0xa1, 0x80, 0x59, 0x62, 0x59, 0xac, 0x06, 0x1d, 0x8b, 0x61, + 0x3a, 0x88, 0x7c, 0xcc, 0x2c, 0x8a, 0x13, 0x44, 0xf7, 0x30, 0x8f, 0xd2, 0xd0, 0x43, 0x3e, 0x8f, + 0x48, 0xea, 0x55, 0x7b, 0x66, 0x46, 0x09, 0x27, 0xb0, 0x25, 0xfe, 0x67, 0xa2, 0x80, 0x99, 0x43, + 0x84, 0x39, 0xe8, 0x98, 0x12, 0xd1, 0xb4, 0xeb, 0x92, 0x50, 0xcc, 0x48, 0x4e, 0xcf, 0xce, 0x22, + 0xe8, 0xcd, 0x07, 0xf2, 0xbf, 0x59, 0x64, 0xa1, 0x34, 0x25, 0x1c, 0x15, 0x9b, 0xac, 0xda, 0xad, + 0x72, 0x5b, 0xe5, 0xaf, 0x9d, 0x7c, 0xd7, 0xda, 0x8d, 0x70, 0x1c, 0x78, 0x09, 0x62, 0x7b, 0x95, + 0xe2, 0xe1, 0x69, 0xc5, 0x3b, 0x8a, 0xb2, 0x0c, 0x53, 0x49, 0xb8, 0x57, 0xed, 0xd3, 0xcc, 0xb7, + 0x18, 0x47, 0x3c, 0xaf, 0x36, 0x8c, 0x1e, 0xb8, 0xbf, 0x82, 0xb9, 0x7b, 0x54, 0x97, 0x53, 0x96, + 0xe5, 0xe2, 0xb7, 0x39, 0x66, 0x1c, 0x3e, 0x02, 0x37, 0x64, 0xf5, 0x5e, 0x8a, 0x12, 0xac, 0x2b, + 0x2d, 0x65, 0x6e, 0xd2, 0x9d, 0x92, 0xc1, 0x75, 0x94, 0x60, 0xe3, 0xaf, 0x02, 0x66, 0xd6, 0x72, + 0x8e, 0x38, 0x1e, 0xe1, 0x30, 0x09, 0x9a, 0x01, 0x9a, 0x9f, 0x33, 0x4e, 0x12, 0x4c, 0xbd, 0x28, + 0xa8, 0x30, 0x40, 0x86, 0x5e, 0x05, 0xb0, 0x0f, 0x00, 0xc9, 0x30, 0x15, 0x7d, 0xeb, 0x6a, 0xab, + 0x31, 0xa7, 0x75, 0x17, 0xcd, 0x71, 0x43, 0x37, 0x47, 0x32, 0x6e, 0x48, 0x88, 0x7b, 0x8c, 0x07, + 0x1f, 0x83, 0x5b, 0x19, 0xa2, 0x3c, 0x42, 0xb1, 0xb7, 0x8b, 0xa2, 0x38, 0xa7, 0x58, 0x6f, 0xb4, + 0x94, 0xb9, 0xeb, 0xee, 0xcd, 0x2a, 0xbc, 0x2c, 0xa2, 0x45, 0xc3, 0x03, 0x14, 0x47, 0x01, 0xe2, + 0xd8, 0x23, 0x69, 0xbc, 0xaf, 0x5f, 0x2d, 0x65, 0x53, 0x32, 0xb8, 0x91, 0xc6, 0xfb, 0xc6, 0x47, + 0x15, 0x34, 0xeb, 0x13, 0xc3, 0x05, 0xa0, 0xe5, 0x59, 0x49, 0x28, 0x1c, 0x2a, 0x09, 0x5a, 0xb7, + 0x29, 0x7b, 0x91, 0x16, 0x99, 0xcb, 0x85, 0x89, 0x6b, 0x88, 0xed, 0xb9, 0x40, 0xc8, 0x8b, 0x35, + 0x5c, 0x07, 0x13, 0x3e, 0xc5, 0x88, 0x8b, 0x51, 0x6b, 0xdd, 0xf9, 0xda, 0x19, 0x0c, 0x8f, 0xd5, + 0xe8, 0x10, 0x56, 0xaf, 0xb8, 0x15, 0xa5, 0xe0, 0x09, 0xba, 0xae, 0x5e, 0x8e, 0x27, 0x28, 0x3d, + 0x0d, 0x4c, 0x0e, 0xe7, 0x6a, 0x7c, 0x55, 0x40, 0xab, 0xde, 0x79, 0x96, 0x91, 0x94, 0x61, 0xb8, + 0x0c, 0xee, 0x9e, 0x9a, 0xbd, 0x87, 0x29, 0x25, 0xb4, 0x74, 0x40, 0xeb, 0x42, 0x59, 0x10, 0xcd, + 0x7c, 0x73, 0xab, 0x3c, 0x9b, 0xee, 0xed, 0x93, 0xae, 0xbc, 0x2c, 0xe4, 0xf0, 0x35, 0xb8, 0x46, + 0x31, 0xcb, 0x63, 0x2e, 0x8f, 0xc7, 0xf3, 0xf1, 0xc7, 0xa3, 0xa6, 0x38, 0xb7, 0xe4, 0xb8, 0x92, + 0x67, 0xbc, 0x00, 0xd3, 0xe7, 0x2a, 0xff, 0xeb, 0x1e, 0x74, 0x7f, 0x34, 0x80, 0x3e, 0x02, 0xd8, + 0x12, 0xa5, 0xc0, 0x6f, 0x0a, 0xb8, 0x73, 0xd6, 0x4d, 0x83, 0x4b, 0xe3, 0xbb, 0x38, 0xe7, 0x86, + 0x36, 0x2f, 0xe4, 0xa7, 0xb1, 0xf8, 0xfe, 0xe7, 0xaf, 0x4f, 0xea, 0x53, 0x38, 0x5f, 0xbc, 0x4f, + 0x07, 0x27, 0x5a, 0x5b, 0x92, 0x97, 0x92, 0x59, 0xed, 0xe3, 0x0f, 0x56, 0x65, 0xab, 0xd5, 0x3e, + 0x84, 0xbf, 0x15, 0xa0, 0xd7, 0xd9, 0x0e, 0x9d, 0x0b, 0xbb, 0x22, 0x1f, 0x8b, 0x66, 0xef, 0x32, + 0x08, 0x71, 0xea, 0x8c, 0x5e, 0xd9, 0xe1, 0xa2, 0xf1, 0xac, 0xe8, 0xf0, 0xa8, 0xa5, 0x83, 0x63, + 0xaf, 0xd0, 0x52, 0xfb, 0xf0, 0x8c, 0x06, 0xed, 0xa4, 0x44, 0xdb, 0x4a, 0xbb, 0xf7, 0x41, 0x05, + 0xb3, 0x3e, 0x49, 0xc6, 0x56, 0xd3, 0x9b, 0xae, 0xb3, 0x7d, 0xb3, 0xb8, 0xec, 0x9b, 0xca, 0x9b, + 0xd5, 0x0a, 0x11, 0x92, 0x18, 0xa5, 0xa1, 0x49, 0x68, 0x68, 0x85, 0x38, 0x2d, 0x9f, 0x02, 0xf9, + 0xad, 0xc8, 0x22, 0x56, 0xff, 0x7d, 0x5a, 0x90, 0x8b, 0xcf, 0x6a, 0x63, 0xc5, 0x71, 0xbe, 0xa8, + 0xad, 0x15, 0x01, 0x74, 0x02, 0x66, 0x8a, 0x65, 0xb1, 0xda, 0xee, 0x98, 0x55, 0x62, 0xf6, 0x5d, + 0x4a, 0xfa, 0x4e, 0xc0, 0xfa, 0x43, 0x49, 0x7f, 0xbb, 0xd3, 0x97, 0x92, 0x3f, 0xea, 0xac, 0x88, + 0xdb, 0xb6, 0x13, 0x30, 0xdb, 0x1e, 0x8a, 0x6c, 0x7b, 0xbb, 0x63, 0xdb, 0x52, 0xb6, 0x33, 0x51, + 0xd6, 0xf9, 0xe4, 0x5f, 0x00, 0x00, 0x00, 0xff, 0xff, 0xf4, 0x49, 0x1e, 0x99, 0x46, 0x07, 0x00, + 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/services/search_term_view_service.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/services/search_term_view_service.pb.go new file mode 100644 index 0000000..5f24163 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/services/search_term_view_service.pb.go @@ -0,0 +1,176 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/services/search_term_view_service.proto + +package services // import "google.golang.org/genproto/googleapis/ads/googleads/v1/services" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import resources "google.golang.org/genproto/googleapis/ads/googleads/v1/resources" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +import ( + context "golang.org/x/net/context" + grpc "google.golang.org/grpc" +) + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Request message for [SearchTermViewService.GetSearchTermView][google.ads.googleads.v1.services.SearchTermViewService.GetSearchTermView]. +type GetSearchTermViewRequest struct { + // The resource name of the search term view to fetch. + ResourceName string `protobuf:"bytes,1,opt,name=resource_name,json=resourceName,proto3" json:"resource_name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GetSearchTermViewRequest) Reset() { *m = GetSearchTermViewRequest{} } +func (m *GetSearchTermViewRequest) String() string { return proto.CompactTextString(m) } +func (*GetSearchTermViewRequest) ProtoMessage() {} +func (*GetSearchTermViewRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_search_term_view_service_4af6bf4cca847c0e, []int{0} +} +func (m *GetSearchTermViewRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GetSearchTermViewRequest.Unmarshal(m, b) +} +func (m *GetSearchTermViewRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GetSearchTermViewRequest.Marshal(b, m, deterministic) +} +func (dst *GetSearchTermViewRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_GetSearchTermViewRequest.Merge(dst, src) +} +func (m *GetSearchTermViewRequest) XXX_Size() int { + return xxx_messageInfo_GetSearchTermViewRequest.Size(m) +} +func (m *GetSearchTermViewRequest) XXX_DiscardUnknown() { + xxx_messageInfo_GetSearchTermViewRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_GetSearchTermViewRequest proto.InternalMessageInfo + +func (m *GetSearchTermViewRequest) GetResourceName() string { + if m != nil { + return m.ResourceName + } + return "" +} + +func init() { + proto.RegisterType((*GetSearchTermViewRequest)(nil), "google.ads.googleads.v1.services.GetSearchTermViewRequest") +} + +// Reference imports to suppress errors if they are not otherwise used. +var _ context.Context +var _ grpc.ClientConn + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the grpc package it is being compiled against. +const _ = grpc.SupportPackageIsVersion4 + +// SearchTermViewServiceClient is the client API for SearchTermViewService service. +// +// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://godoc.org/google.golang.org/grpc#ClientConn.NewStream. +type SearchTermViewServiceClient interface { + // Returns the attributes of the requested search term view. + GetSearchTermView(ctx context.Context, in *GetSearchTermViewRequest, opts ...grpc.CallOption) (*resources.SearchTermView, error) +} + +type searchTermViewServiceClient struct { + cc *grpc.ClientConn +} + +func NewSearchTermViewServiceClient(cc *grpc.ClientConn) SearchTermViewServiceClient { + return &searchTermViewServiceClient{cc} +} + +func (c *searchTermViewServiceClient) GetSearchTermView(ctx context.Context, in *GetSearchTermViewRequest, opts ...grpc.CallOption) (*resources.SearchTermView, error) { + out := new(resources.SearchTermView) + err := c.cc.Invoke(ctx, "/google.ads.googleads.v1.services.SearchTermViewService/GetSearchTermView", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +// SearchTermViewServiceServer is the server API for SearchTermViewService service. +type SearchTermViewServiceServer interface { + // Returns the attributes of the requested search term view. + GetSearchTermView(context.Context, *GetSearchTermViewRequest) (*resources.SearchTermView, error) +} + +func RegisterSearchTermViewServiceServer(s *grpc.Server, srv SearchTermViewServiceServer) { + s.RegisterService(&_SearchTermViewService_serviceDesc, srv) +} + +func _SearchTermViewService_GetSearchTermView_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(GetSearchTermViewRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(SearchTermViewServiceServer).GetSearchTermView(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.ads.googleads.v1.services.SearchTermViewService/GetSearchTermView", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(SearchTermViewServiceServer).GetSearchTermView(ctx, req.(*GetSearchTermViewRequest)) + } + return interceptor(ctx, in, info, handler) +} + +var _SearchTermViewService_serviceDesc = grpc.ServiceDesc{ + ServiceName: "google.ads.googleads.v1.services.SearchTermViewService", + HandlerType: (*SearchTermViewServiceServer)(nil), + Methods: []grpc.MethodDesc{ + { + MethodName: "GetSearchTermView", + Handler: _SearchTermViewService_GetSearchTermView_Handler, + }, + }, + Streams: []grpc.StreamDesc{}, + Metadata: "google/ads/googleads/v1/services/search_term_view_service.proto", +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/services/search_term_view_service.proto", fileDescriptor_search_term_view_service_4af6bf4cca847c0e) +} + +var fileDescriptor_search_term_view_service_4af6bf4cca847c0e = []byte{ + // 372 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x84, 0x52, 0x31, 0x6a, 0xe3, 0x40, + 0x14, 0x45, 0x5a, 0x58, 0x58, 0xb1, 0x5b, 0xac, 0x60, 0xc1, 0x88, 0x2d, 0x8c, 0xd7, 0xc5, 0xe2, + 0x62, 0x06, 0xc5, 0x4d, 0x32, 0x21, 0x18, 0xb9, 0x71, 0xaa, 0x60, 0xec, 0xa0, 0x22, 0x08, 0xc4, + 0x44, 0xfa, 0x28, 0x02, 0x4b, 0xe3, 0xcc, 0x97, 0xe5, 0x22, 0xa4, 0x48, 0xae, 0x90, 0x1b, 0xa4, + 0xcc, 0x1d, 0x72, 0x81, 0xb4, 0x29, 0x72, 0x81, 0x54, 0x39, 0x45, 0x90, 0x47, 0x23, 0x30, 0xb1, + 0x70, 0xf7, 0x98, 0xff, 0xde, 0xfb, 0xef, 0x3f, 0xc9, 0x1a, 0x25, 0x42, 0x24, 0x0b, 0xa0, 0x3c, + 0x46, 0xaa, 0x60, 0x85, 0x4a, 0x97, 0x22, 0xc8, 0x32, 0x8d, 0x00, 0x29, 0x02, 0x97, 0xd1, 0x55, + 0x58, 0x80, 0xcc, 0xc2, 0x32, 0x85, 0x75, 0x58, 0x4f, 0xc8, 0x52, 0x8a, 0x42, 0xd8, 0x5d, 0xa5, + 0x22, 0x3c, 0x46, 0xd2, 0x18, 0x90, 0xd2, 0x25, 0xda, 0xc0, 0x39, 0x6c, 0x5b, 0x21, 0x01, 0xc5, + 0x4a, 0xee, 0xda, 0xa1, 0xbc, 0x9d, 0xbf, 0x5a, 0xb9, 0x4c, 0x29, 0xcf, 0x73, 0x51, 0xf0, 0x22, + 0x15, 0x39, 0xaa, 0x69, 0x6f, 0x64, 0x75, 0x26, 0x50, 0xcc, 0x37, 0xd2, 0x73, 0x90, 0x99, 0x9f, + 0xc2, 0x7a, 0x06, 0xd7, 0x2b, 0xc0, 0xc2, 0xfe, 0x67, 0xfd, 0xd2, 0xee, 0x61, 0xce, 0x33, 0xe8, + 0x18, 0x5d, 0xe3, 0xff, 0x8f, 0xd9, 0x4f, 0xfd, 0x78, 0xc6, 0x33, 0x38, 0x78, 0x33, 0xac, 0x3f, + 0xdb, 0xf2, 0xb9, 0xca, 0x6c, 0x3f, 0x1b, 0xd6, 0xef, 0x2f, 0xde, 0x36, 0x23, 0xfb, 0x6e, 0x25, + 0x6d, 0x81, 0x1c, 0xb7, 0x55, 0xdb, 0xb4, 0x40, 0xb6, 0x95, 0xbd, 0xa3, 0xfb, 0xd7, 0xf7, 0x07, + 0x73, 0x68, 0xbb, 0x55, 0x57, 0x37, 0x5b, 0xe7, 0x9c, 0x44, 0x2b, 0x2c, 0x44, 0x06, 0x12, 0xe9, + 0xa0, 0x2e, 0x4f, 0xcb, 0x90, 0x0e, 0x6e, 0xc7, 0x77, 0xa6, 0xd5, 0x8f, 0x44, 0xb6, 0x37, 0xef, + 0xd8, 0xd9, 0x79, 0xff, 0xb4, 0xea, 0x77, 0x6a, 0x5c, 0x9c, 0xd6, 0xfa, 0x44, 0x2c, 0x78, 0x9e, + 0x10, 0x21, 0x13, 0x9a, 0x40, 0xbe, 0x69, 0x5f, 0x7f, 0xc9, 0x65, 0x8a, 0xed, 0xff, 0xce, 0xb1, + 0x06, 0x8f, 0xe6, 0xb7, 0x89, 0xe7, 0x3d, 0x99, 0xdd, 0x89, 0x32, 0xf4, 0x62, 0x24, 0x0a, 0x56, + 0xc8, 0x77, 0x49, 0xbd, 0x18, 0x5f, 0x34, 0x25, 0xf0, 0x62, 0x0c, 0x1a, 0x4a, 0xe0, 0xbb, 0x81, + 0xa6, 0x7c, 0x98, 0x7d, 0xf5, 0xce, 0x98, 0x17, 0x23, 0x63, 0x0d, 0x89, 0x31, 0xdf, 0x65, 0x4c, + 0xd3, 0x2e, 0xbf, 0x6f, 0x72, 0x0e, 0x3f, 0x03, 0x00, 0x00, 0xff, 0xff, 0xbf, 0x3e, 0x6e, 0xc9, + 0xe2, 0x02, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/services/shared_criterion_service.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/services/shared_criterion_service.pb.go new file mode 100644 index 0000000..70d9a39 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/services/shared_criterion_service.pb.go @@ -0,0 +1,542 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/services/shared_criterion_service.proto + +package services // import "google.golang.org/genproto/googleapis/ads/googleads/v1/services" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "github.com/golang/protobuf/ptypes/wrappers" +import resources "google.golang.org/genproto/googleapis/ads/googleads/v1/resources" +import _ "google.golang.org/genproto/googleapis/api/annotations" +import status "google.golang.org/genproto/googleapis/rpc/status" + +import ( + context "golang.org/x/net/context" + grpc "google.golang.org/grpc" +) + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Request message for [SharedCriterionService.GetSharedCriterion][google.ads.googleads.v1.services.SharedCriterionService.GetSharedCriterion]. +type GetSharedCriterionRequest struct { + // The resource name of the shared criterion to fetch. + ResourceName string `protobuf:"bytes,1,opt,name=resource_name,json=resourceName,proto3" json:"resource_name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GetSharedCriterionRequest) Reset() { *m = GetSharedCriterionRequest{} } +func (m *GetSharedCriterionRequest) String() string { return proto.CompactTextString(m) } +func (*GetSharedCriterionRequest) ProtoMessage() {} +func (*GetSharedCriterionRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_shared_criterion_service_f3244070d9ea4ad5, []int{0} +} +func (m *GetSharedCriterionRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GetSharedCriterionRequest.Unmarshal(m, b) +} +func (m *GetSharedCriterionRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GetSharedCriterionRequest.Marshal(b, m, deterministic) +} +func (dst *GetSharedCriterionRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_GetSharedCriterionRequest.Merge(dst, src) +} +func (m *GetSharedCriterionRequest) XXX_Size() int { + return xxx_messageInfo_GetSharedCriterionRequest.Size(m) +} +func (m *GetSharedCriterionRequest) XXX_DiscardUnknown() { + xxx_messageInfo_GetSharedCriterionRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_GetSharedCriterionRequest proto.InternalMessageInfo + +func (m *GetSharedCriterionRequest) GetResourceName() string { + if m != nil { + return m.ResourceName + } + return "" +} + +// Request message for [SharedCriterionService.MutateSharedCriteria][google.ads.googleads.v1.services.SharedCriterionService.MutateSharedCriteria]. +type MutateSharedCriteriaRequest struct { + // The ID of the customer whose shared criteria are being modified. + CustomerId string `protobuf:"bytes,1,opt,name=customer_id,json=customerId,proto3" json:"customer_id,omitempty"` + // The list of operations to perform on individual shared criteria. + Operations []*SharedCriterionOperation `protobuf:"bytes,2,rep,name=operations,proto3" json:"operations,omitempty"` + // If true, successful operations will be carried out and invalid + // operations will return errors. If false, all operations will be carried + // out in one transaction if and only if they are all valid. + // Default is false. + PartialFailure bool `protobuf:"varint,3,opt,name=partial_failure,json=partialFailure,proto3" json:"partial_failure,omitempty"` + // If true, the request is validated but not executed. Only errors are + // returned, not results. + ValidateOnly bool `protobuf:"varint,4,opt,name=validate_only,json=validateOnly,proto3" json:"validate_only,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *MutateSharedCriteriaRequest) Reset() { *m = MutateSharedCriteriaRequest{} } +func (m *MutateSharedCriteriaRequest) String() string { return proto.CompactTextString(m) } +func (*MutateSharedCriteriaRequest) ProtoMessage() {} +func (*MutateSharedCriteriaRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_shared_criterion_service_f3244070d9ea4ad5, []int{1} +} +func (m *MutateSharedCriteriaRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_MutateSharedCriteriaRequest.Unmarshal(m, b) +} +func (m *MutateSharedCriteriaRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_MutateSharedCriteriaRequest.Marshal(b, m, deterministic) +} +func (dst *MutateSharedCriteriaRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_MutateSharedCriteriaRequest.Merge(dst, src) +} +func (m *MutateSharedCriteriaRequest) XXX_Size() int { + return xxx_messageInfo_MutateSharedCriteriaRequest.Size(m) +} +func (m *MutateSharedCriteriaRequest) XXX_DiscardUnknown() { + xxx_messageInfo_MutateSharedCriteriaRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_MutateSharedCriteriaRequest proto.InternalMessageInfo + +func (m *MutateSharedCriteriaRequest) GetCustomerId() string { + if m != nil { + return m.CustomerId + } + return "" +} + +func (m *MutateSharedCriteriaRequest) GetOperations() []*SharedCriterionOperation { + if m != nil { + return m.Operations + } + return nil +} + +func (m *MutateSharedCriteriaRequest) GetPartialFailure() bool { + if m != nil { + return m.PartialFailure + } + return false +} + +func (m *MutateSharedCriteriaRequest) GetValidateOnly() bool { + if m != nil { + return m.ValidateOnly + } + return false +} + +// A single operation (create, remove) on an shared criterion. +type SharedCriterionOperation struct { + // The mutate operation. + // + // Types that are valid to be assigned to Operation: + // *SharedCriterionOperation_Create + // *SharedCriterionOperation_Remove + Operation isSharedCriterionOperation_Operation `protobuf_oneof:"operation"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *SharedCriterionOperation) Reset() { *m = SharedCriterionOperation{} } +func (m *SharedCriterionOperation) String() string { return proto.CompactTextString(m) } +func (*SharedCriterionOperation) ProtoMessage() {} +func (*SharedCriterionOperation) Descriptor() ([]byte, []int) { + return fileDescriptor_shared_criterion_service_f3244070d9ea4ad5, []int{2} +} +func (m *SharedCriterionOperation) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_SharedCriterionOperation.Unmarshal(m, b) +} +func (m *SharedCriterionOperation) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_SharedCriterionOperation.Marshal(b, m, deterministic) +} +func (dst *SharedCriterionOperation) XXX_Merge(src proto.Message) { + xxx_messageInfo_SharedCriterionOperation.Merge(dst, src) +} +func (m *SharedCriterionOperation) XXX_Size() int { + return xxx_messageInfo_SharedCriterionOperation.Size(m) +} +func (m *SharedCriterionOperation) XXX_DiscardUnknown() { + xxx_messageInfo_SharedCriterionOperation.DiscardUnknown(m) +} + +var xxx_messageInfo_SharedCriterionOperation proto.InternalMessageInfo + +type isSharedCriterionOperation_Operation interface { + isSharedCriterionOperation_Operation() +} + +type SharedCriterionOperation_Create struct { + Create *resources.SharedCriterion `protobuf:"bytes,1,opt,name=create,proto3,oneof"` +} + +type SharedCriterionOperation_Remove struct { + Remove string `protobuf:"bytes,3,opt,name=remove,proto3,oneof"` +} + +func (*SharedCriterionOperation_Create) isSharedCriterionOperation_Operation() {} + +func (*SharedCriterionOperation_Remove) isSharedCriterionOperation_Operation() {} + +func (m *SharedCriterionOperation) GetOperation() isSharedCriterionOperation_Operation { + if m != nil { + return m.Operation + } + return nil +} + +func (m *SharedCriterionOperation) GetCreate() *resources.SharedCriterion { + if x, ok := m.GetOperation().(*SharedCriterionOperation_Create); ok { + return x.Create + } + return nil +} + +func (m *SharedCriterionOperation) GetRemove() string { + if x, ok := m.GetOperation().(*SharedCriterionOperation_Remove); ok { + return x.Remove + } + return "" +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*SharedCriterionOperation) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _SharedCriterionOperation_OneofMarshaler, _SharedCriterionOperation_OneofUnmarshaler, _SharedCriterionOperation_OneofSizer, []interface{}{ + (*SharedCriterionOperation_Create)(nil), + (*SharedCriterionOperation_Remove)(nil), + } +} + +func _SharedCriterionOperation_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*SharedCriterionOperation) + // operation + switch x := m.Operation.(type) { + case *SharedCriterionOperation_Create: + b.EncodeVarint(1<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.Create); err != nil { + return err + } + case *SharedCriterionOperation_Remove: + b.EncodeVarint(3<<3 | proto.WireBytes) + b.EncodeStringBytes(x.Remove) + case nil: + default: + return fmt.Errorf("SharedCriterionOperation.Operation has unexpected type %T", x) + } + return nil +} + +func _SharedCriterionOperation_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*SharedCriterionOperation) + switch tag { + case 1: // operation.create + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(resources.SharedCriterion) + err := b.DecodeMessage(msg) + m.Operation = &SharedCriterionOperation_Create{msg} + return true, err + case 3: // operation.remove + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeStringBytes() + m.Operation = &SharedCriterionOperation_Remove{x} + return true, err + default: + return false, nil + } +} + +func _SharedCriterionOperation_OneofSizer(msg proto.Message) (n int) { + m := msg.(*SharedCriterionOperation) + // operation + switch x := m.Operation.(type) { + case *SharedCriterionOperation_Create: + s := proto.Size(x.Create) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *SharedCriterionOperation_Remove: + n += 1 // tag and wire + n += proto.SizeVarint(uint64(len(x.Remove))) + n += len(x.Remove) + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +// Response message for a shared criterion mutate. +type MutateSharedCriteriaResponse struct { + // Errors that pertain to operation failures in the partial failure mode. + // Returned only when partial_failure = true and all errors occur inside the + // operations. If any errors occur outside the operations (e.g. auth errors), + // we return an RPC level error. + PartialFailureError *status.Status `protobuf:"bytes,3,opt,name=partial_failure_error,json=partialFailureError,proto3" json:"partial_failure_error,omitempty"` + // All results for the mutate. + Results []*MutateSharedCriterionResult `protobuf:"bytes,2,rep,name=results,proto3" json:"results,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *MutateSharedCriteriaResponse) Reset() { *m = MutateSharedCriteriaResponse{} } +func (m *MutateSharedCriteriaResponse) String() string { return proto.CompactTextString(m) } +func (*MutateSharedCriteriaResponse) ProtoMessage() {} +func (*MutateSharedCriteriaResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_shared_criterion_service_f3244070d9ea4ad5, []int{3} +} +func (m *MutateSharedCriteriaResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_MutateSharedCriteriaResponse.Unmarshal(m, b) +} +func (m *MutateSharedCriteriaResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_MutateSharedCriteriaResponse.Marshal(b, m, deterministic) +} +func (dst *MutateSharedCriteriaResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_MutateSharedCriteriaResponse.Merge(dst, src) +} +func (m *MutateSharedCriteriaResponse) XXX_Size() int { + return xxx_messageInfo_MutateSharedCriteriaResponse.Size(m) +} +func (m *MutateSharedCriteriaResponse) XXX_DiscardUnknown() { + xxx_messageInfo_MutateSharedCriteriaResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_MutateSharedCriteriaResponse proto.InternalMessageInfo + +func (m *MutateSharedCriteriaResponse) GetPartialFailureError() *status.Status { + if m != nil { + return m.PartialFailureError + } + return nil +} + +func (m *MutateSharedCriteriaResponse) GetResults() []*MutateSharedCriterionResult { + if m != nil { + return m.Results + } + return nil +} + +// The result for the shared criterion mutate. +type MutateSharedCriterionResult struct { + // Returned for successful operations. + ResourceName string `protobuf:"bytes,1,opt,name=resource_name,json=resourceName,proto3" json:"resource_name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *MutateSharedCriterionResult) Reset() { *m = MutateSharedCriterionResult{} } +func (m *MutateSharedCriterionResult) String() string { return proto.CompactTextString(m) } +func (*MutateSharedCriterionResult) ProtoMessage() {} +func (*MutateSharedCriterionResult) Descriptor() ([]byte, []int) { + return fileDescriptor_shared_criterion_service_f3244070d9ea4ad5, []int{4} +} +func (m *MutateSharedCriterionResult) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_MutateSharedCriterionResult.Unmarshal(m, b) +} +func (m *MutateSharedCriterionResult) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_MutateSharedCriterionResult.Marshal(b, m, deterministic) +} +func (dst *MutateSharedCriterionResult) XXX_Merge(src proto.Message) { + xxx_messageInfo_MutateSharedCriterionResult.Merge(dst, src) +} +func (m *MutateSharedCriterionResult) XXX_Size() int { + return xxx_messageInfo_MutateSharedCriterionResult.Size(m) +} +func (m *MutateSharedCriterionResult) XXX_DiscardUnknown() { + xxx_messageInfo_MutateSharedCriterionResult.DiscardUnknown(m) +} + +var xxx_messageInfo_MutateSharedCriterionResult proto.InternalMessageInfo + +func (m *MutateSharedCriterionResult) GetResourceName() string { + if m != nil { + return m.ResourceName + } + return "" +} + +func init() { + proto.RegisterType((*GetSharedCriterionRequest)(nil), "google.ads.googleads.v1.services.GetSharedCriterionRequest") + proto.RegisterType((*MutateSharedCriteriaRequest)(nil), "google.ads.googleads.v1.services.MutateSharedCriteriaRequest") + proto.RegisterType((*SharedCriterionOperation)(nil), "google.ads.googleads.v1.services.SharedCriterionOperation") + proto.RegisterType((*MutateSharedCriteriaResponse)(nil), "google.ads.googleads.v1.services.MutateSharedCriteriaResponse") + proto.RegisterType((*MutateSharedCriterionResult)(nil), "google.ads.googleads.v1.services.MutateSharedCriterionResult") +} + +// Reference imports to suppress errors if they are not otherwise used. +var _ context.Context +var _ grpc.ClientConn + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the grpc package it is being compiled against. +const _ = grpc.SupportPackageIsVersion4 + +// SharedCriterionServiceClient is the client API for SharedCriterionService service. +// +// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://godoc.org/google.golang.org/grpc#ClientConn.NewStream. +type SharedCriterionServiceClient interface { + // Returns the requested shared criterion in full detail. + GetSharedCriterion(ctx context.Context, in *GetSharedCriterionRequest, opts ...grpc.CallOption) (*resources.SharedCriterion, error) + // Creates or removes shared criteria. Operation statuses are returned. + MutateSharedCriteria(ctx context.Context, in *MutateSharedCriteriaRequest, opts ...grpc.CallOption) (*MutateSharedCriteriaResponse, error) +} + +type sharedCriterionServiceClient struct { + cc *grpc.ClientConn +} + +func NewSharedCriterionServiceClient(cc *grpc.ClientConn) SharedCriterionServiceClient { + return &sharedCriterionServiceClient{cc} +} + +func (c *sharedCriterionServiceClient) GetSharedCriterion(ctx context.Context, in *GetSharedCriterionRequest, opts ...grpc.CallOption) (*resources.SharedCriterion, error) { + out := new(resources.SharedCriterion) + err := c.cc.Invoke(ctx, "/google.ads.googleads.v1.services.SharedCriterionService/GetSharedCriterion", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *sharedCriterionServiceClient) MutateSharedCriteria(ctx context.Context, in *MutateSharedCriteriaRequest, opts ...grpc.CallOption) (*MutateSharedCriteriaResponse, error) { + out := new(MutateSharedCriteriaResponse) + err := c.cc.Invoke(ctx, "/google.ads.googleads.v1.services.SharedCriterionService/MutateSharedCriteria", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +// SharedCriterionServiceServer is the server API for SharedCriterionService service. +type SharedCriterionServiceServer interface { + // Returns the requested shared criterion in full detail. + GetSharedCriterion(context.Context, *GetSharedCriterionRequest) (*resources.SharedCriterion, error) + // Creates or removes shared criteria. Operation statuses are returned. + MutateSharedCriteria(context.Context, *MutateSharedCriteriaRequest) (*MutateSharedCriteriaResponse, error) +} + +func RegisterSharedCriterionServiceServer(s *grpc.Server, srv SharedCriterionServiceServer) { + s.RegisterService(&_SharedCriterionService_serviceDesc, srv) +} + +func _SharedCriterionService_GetSharedCriterion_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(GetSharedCriterionRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(SharedCriterionServiceServer).GetSharedCriterion(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.ads.googleads.v1.services.SharedCriterionService/GetSharedCriterion", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(SharedCriterionServiceServer).GetSharedCriterion(ctx, req.(*GetSharedCriterionRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _SharedCriterionService_MutateSharedCriteria_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(MutateSharedCriteriaRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(SharedCriterionServiceServer).MutateSharedCriteria(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.ads.googleads.v1.services.SharedCriterionService/MutateSharedCriteria", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(SharedCriterionServiceServer).MutateSharedCriteria(ctx, req.(*MutateSharedCriteriaRequest)) + } + return interceptor(ctx, in, info, handler) +} + +var _SharedCriterionService_serviceDesc = grpc.ServiceDesc{ + ServiceName: "google.ads.googleads.v1.services.SharedCriterionService", + HandlerType: (*SharedCriterionServiceServer)(nil), + Methods: []grpc.MethodDesc{ + { + MethodName: "GetSharedCriterion", + Handler: _SharedCriterionService_GetSharedCriterion_Handler, + }, + { + MethodName: "MutateSharedCriteria", + Handler: _SharedCriterionService_MutateSharedCriteria_Handler, + }, + }, + Streams: []grpc.StreamDesc{}, + Metadata: "google/ads/googleads/v1/services/shared_criterion_service.proto", +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/services/shared_criterion_service.proto", fileDescriptor_shared_criterion_service_f3244070d9ea4ad5) +} + +var fileDescriptor_shared_criterion_service_f3244070d9ea4ad5 = []byte{ + // 666 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x9c, 0x54, 0xcd, 0x6e, 0xd3, 0x4c, + 0x14, 0xfd, 0xec, 0x7c, 0x2a, 0x74, 0x52, 0x40, 0x1a, 0xfe, 0x4c, 0x5a, 0x41, 0x64, 0x2a, 0x51, + 0x65, 0x61, 0x13, 0x77, 0x53, 0xb9, 0x6a, 0x21, 0x41, 0xb4, 0x45, 0x02, 0x5a, 0x39, 0x52, 0x90, + 0xaa, 0x48, 0xd6, 0xd4, 0x9e, 0x1a, 0x4b, 0xb6, 0xc7, 0xcc, 0x8c, 0x83, 0xaa, 0xaa, 0x9b, 0xbe, + 0x00, 0x0b, 0xde, 0x80, 0x25, 0x4b, 0x5e, 0xa0, 0x7b, 0xb6, 0x88, 0x37, 0x60, 0x81, 0x78, 0x0a, + 0x64, 0x8f, 0x27, 0x34, 0xc1, 0x51, 0x50, 0x76, 0x33, 0xf7, 0xde, 0x39, 0xf7, 0x9c, 0xfb, 0x33, + 0xe0, 0x49, 0x40, 0x48, 0x10, 0x61, 0x13, 0xf9, 0xcc, 0x14, 0xc7, 0xfc, 0x34, 0x6c, 0x9b, 0x0c, + 0xd3, 0x61, 0xe8, 0x61, 0x66, 0xb2, 0xb7, 0x88, 0x62, 0xdf, 0xf5, 0x68, 0xc8, 0x31, 0x0d, 0x49, + 0xe2, 0x96, 0x1e, 0x23, 0xa5, 0x84, 0x13, 0xd8, 0x14, 0xaf, 0x0c, 0xe4, 0x33, 0x63, 0x04, 0x60, + 0x0c, 0xdb, 0x86, 0x04, 0x68, 0x6c, 0x4c, 0x4b, 0x41, 0x31, 0x23, 0x19, 0xad, 0xca, 0x21, 0xb0, + 0x1b, 0x2b, 0xf2, 0x65, 0x1a, 0x9a, 0x28, 0x49, 0x08, 0x47, 0x3c, 0x24, 0x09, 0x2b, 0xbd, 0xf7, + 0x4b, 0x6f, 0x71, 0x3b, 0xca, 0x8e, 0xcd, 0xf7, 0x14, 0xa5, 0x29, 0xa6, 0xd2, 0x7f, 0xb7, 0xf4, + 0xd3, 0xd4, 0x33, 0x19, 0x47, 0x3c, 0x2b, 0x1d, 0xfa, 0x53, 0x70, 0x6f, 0x17, 0xf3, 0x5e, 0x91, + 0xf3, 0x99, 0x4c, 0xe9, 0xe0, 0x77, 0x19, 0x66, 0x1c, 0x3e, 0x04, 0xd7, 0x24, 0x2f, 0x37, 0x41, + 0x31, 0xd6, 0x94, 0xa6, 0xb2, 0xb6, 0xe8, 0x2c, 0x49, 0xe3, 0x6b, 0x14, 0x63, 0xfd, 0xa7, 0x02, + 0x96, 0x5f, 0x65, 0x1c, 0x71, 0x3c, 0x86, 0x82, 0x24, 0xc8, 0x03, 0x50, 0xf7, 0x32, 0xc6, 0x49, + 0x8c, 0xa9, 0x1b, 0xfa, 0x25, 0x04, 0x90, 0xa6, 0x17, 0x3e, 0x3c, 0x04, 0x80, 0xa4, 0x98, 0x0a, + 0x3d, 0x9a, 0xda, 0xac, 0xad, 0xd5, 0x2d, 0xdb, 0x98, 0x55, 0x4a, 0x63, 0x82, 0xf3, 0xbe, 0x84, + 0x70, 0x2e, 0xa1, 0xc1, 0x47, 0xe0, 0x46, 0x8a, 0x28, 0x0f, 0x51, 0xe4, 0x1e, 0xa3, 0x30, 0xca, + 0x28, 0xd6, 0x6a, 0x4d, 0x65, 0xed, 0xaa, 0x73, 0xbd, 0x34, 0xef, 0x08, 0x6b, 0x2e, 0x75, 0x88, + 0xa2, 0xd0, 0x47, 0x1c, 0xbb, 0x24, 0x89, 0x4e, 0xb4, 0xff, 0x8b, 0xb0, 0x25, 0x69, 0xdc, 0x4f, + 0xa2, 0x13, 0xfd, 0x83, 0x02, 0xb4, 0x69, 0x69, 0xe1, 0x4b, 0xb0, 0xe0, 0x51, 0x8c, 0xb8, 0xa8, + 0x52, 0xdd, 0xb2, 0xa6, 0x4a, 0x18, 0xf5, 0x7a, 0x52, 0xc3, 0xde, 0x7f, 0x4e, 0x89, 0x01, 0x35, + 0xb0, 0x40, 0x71, 0x4c, 0x86, 0x82, 0xef, 0x62, 0xee, 0x11, 0xf7, 0x6e, 0x1d, 0x2c, 0x8e, 0x04, + 0xea, 0x17, 0x0a, 0x58, 0xa9, 0x2e, 0x3e, 0x4b, 0x49, 0xc2, 0x30, 0xdc, 0x01, 0xb7, 0x27, 0x0a, + 0xe0, 0x62, 0x4a, 0x09, 0x2d, 0x60, 0xeb, 0x16, 0x94, 0x24, 0x69, 0xea, 0x19, 0xbd, 0x62, 0x30, + 0x9c, 0x9b, 0xe3, 0xa5, 0x79, 0x9e, 0x87, 0xc3, 0x37, 0xe0, 0x0a, 0xc5, 0x2c, 0x8b, 0xb8, 0xec, + 0xd0, 0xd6, 0xec, 0x0e, 0x55, 0x10, 0xcb, 0x67, 0x2b, 0x47, 0x71, 0x24, 0x9a, 0xde, 0xad, 0x9c, + 0x1e, 0x19, 0xf7, 0x4f, 0x23, 0x68, 0x7d, 0xa9, 0x81, 0x3b, 0x13, 0xcf, 0x7b, 0x82, 0x04, 0xbc, + 0x50, 0x00, 0xfc, 0x7b, 0xc0, 0xe1, 0xe6, 0x6c, 0xf6, 0x53, 0xd7, 0xa2, 0x31, 0x47, 0x67, 0xf5, + 0x8d, 0xf3, 0x6f, 0x3f, 0x3e, 0xaa, 0x16, 0x7c, 0x9c, 0x2f, 0xfb, 0xe9, 0x98, 0xa4, 0x2d, 0xb9, + 0x0b, 0xcc, 0x6c, 0x95, 0xdb, 0x2f, 0xdb, 0x68, 0xb6, 0xce, 0xe0, 0x77, 0x05, 0xdc, 0xaa, 0x6a, + 0x31, 0x9c, 0xaf, 0x03, 0x72, 0x2f, 0x1b, 0xdb, 0xf3, 0x3e, 0x17, 0x93, 0xa5, 0x6f, 0x17, 0x8a, + 0x36, 0xf4, 0xf5, 0x5c, 0xd1, 0x1f, 0x09, 0xa7, 0x97, 0x96, 0x7d, 0xab, 0x75, 0x36, 0x21, 0xc8, + 0x8e, 0x0b, 0x48, 0x5b, 0x69, 0x75, 0xcf, 0x55, 0xb0, 0xea, 0x91, 0x78, 0x26, 0x8b, 0xee, 0x72, + 0x75, 0x6b, 0x0f, 0xf2, 0xff, 0xeb, 0x40, 0x39, 0xdc, 0x2b, 0x01, 0x02, 0x12, 0xa1, 0x24, 0x30, + 0x08, 0x0d, 0xcc, 0x00, 0x27, 0xc5, 0xef, 0x26, 0xbf, 0xd8, 0x34, 0x64, 0xd3, 0x3f, 0xf5, 0x4d, + 0x79, 0xf8, 0xa4, 0xd6, 0x76, 0x3b, 0x9d, 0xcf, 0x6a, 0x73, 0x57, 0x00, 0x76, 0x7c, 0x66, 0x88, + 0x63, 0x7e, 0xea, 0xb7, 0x8d, 0x32, 0x31, 0xfb, 0x2a, 0x43, 0x06, 0x1d, 0x9f, 0x0d, 0x46, 0x21, + 0x83, 0x7e, 0x7b, 0x20, 0x43, 0x7e, 0xa9, 0xab, 0xc2, 0x6e, 0xdb, 0x1d, 0x9f, 0xd9, 0xf6, 0x28, + 0xc8, 0xb6, 0xfb, 0x6d, 0xdb, 0x96, 0x61, 0x47, 0x0b, 0x05, 0xcf, 0xf5, 0xdf, 0x01, 0x00, 0x00, + 0xff, 0xff, 0xbe, 0x16, 0x12, 0x8a, 0x7b, 0x06, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/services/shared_set_service.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/services/shared_set_service.pb.go new file mode 100644 index 0000000..6b2ad6d --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/services/shared_set_service.pb.go @@ -0,0 +1,588 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/services/shared_set_service.proto + +package services // import "google.golang.org/genproto/googleapis/ads/googleads/v1/services" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "github.com/golang/protobuf/ptypes/wrappers" +import resources "google.golang.org/genproto/googleapis/ads/googleads/v1/resources" +import _ "google.golang.org/genproto/googleapis/api/annotations" +import status "google.golang.org/genproto/googleapis/rpc/status" +import field_mask "google.golang.org/genproto/protobuf/field_mask" + +import ( + context "golang.org/x/net/context" + grpc "google.golang.org/grpc" +) + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Request message for [SharedSetService.GetSharedSet][google.ads.googleads.v1.services.SharedSetService.GetSharedSet]. +type GetSharedSetRequest struct { + // The resource name of the shared set to fetch. + ResourceName string `protobuf:"bytes,1,opt,name=resource_name,json=resourceName,proto3" json:"resource_name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GetSharedSetRequest) Reset() { *m = GetSharedSetRequest{} } +func (m *GetSharedSetRequest) String() string { return proto.CompactTextString(m) } +func (*GetSharedSetRequest) ProtoMessage() {} +func (*GetSharedSetRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_shared_set_service_62e398adbb29dcc8, []int{0} +} +func (m *GetSharedSetRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GetSharedSetRequest.Unmarshal(m, b) +} +func (m *GetSharedSetRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GetSharedSetRequest.Marshal(b, m, deterministic) +} +func (dst *GetSharedSetRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_GetSharedSetRequest.Merge(dst, src) +} +func (m *GetSharedSetRequest) XXX_Size() int { + return xxx_messageInfo_GetSharedSetRequest.Size(m) +} +func (m *GetSharedSetRequest) XXX_DiscardUnknown() { + xxx_messageInfo_GetSharedSetRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_GetSharedSetRequest proto.InternalMessageInfo + +func (m *GetSharedSetRequest) GetResourceName() string { + if m != nil { + return m.ResourceName + } + return "" +} + +// Request message for [SharedSetService.MutateSharedSets][google.ads.googleads.v1.services.SharedSetService.MutateSharedSets]. +type MutateSharedSetsRequest struct { + // The ID of the customer whose shared sets are being modified. + CustomerId string `protobuf:"bytes,1,opt,name=customer_id,json=customerId,proto3" json:"customer_id,omitempty"` + // The list of operations to perform on individual shared sets. + Operations []*SharedSetOperation `protobuf:"bytes,2,rep,name=operations,proto3" json:"operations,omitempty"` + // If true, successful operations will be carried out and invalid + // operations will return errors. If false, all operations will be carried + // out in one transaction if and only if they are all valid. + // Default is false. + PartialFailure bool `protobuf:"varint,3,opt,name=partial_failure,json=partialFailure,proto3" json:"partial_failure,omitempty"` + // If true, the request is validated but not executed. Only errors are + // returned, not results. + ValidateOnly bool `protobuf:"varint,4,opt,name=validate_only,json=validateOnly,proto3" json:"validate_only,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *MutateSharedSetsRequest) Reset() { *m = MutateSharedSetsRequest{} } +func (m *MutateSharedSetsRequest) String() string { return proto.CompactTextString(m) } +func (*MutateSharedSetsRequest) ProtoMessage() {} +func (*MutateSharedSetsRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_shared_set_service_62e398adbb29dcc8, []int{1} +} +func (m *MutateSharedSetsRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_MutateSharedSetsRequest.Unmarshal(m, b) +} +func (m *MutateSharedSetsRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_MutateSharedSetsRequest.Marshal(b, m, deterministic) +} +func (dst *MutateSharedSetsRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_MutateSharedSetsRequest.Merge(dst, src) +} +func (m *MutateSharedSetsRequest) XXX_Size() int { + return xxx_messageInfo_MutateSharedSetsRequest.Size(m) +} +func (m *MutateSharedSetsRequest) XXX_DiscardUnknown() { + xxx_messageInfo_MutateSharedSetsRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_MutateSharedSetsRequest proto.InternalMessageInfo + +func (m *MutateSharedSetsRequest) GetCustomerId() string { + if m != nil { + return m.CustomerId + } + return "" +} + +func (m *MutateSharedSetsRequest) GetOperations() []*SharedSetOperation { + if m != nil { + return m.Operations + } + return nil +} + +func (m *MutateSharedSetsRequest) GetPartialFailure() bool { + if m != nil { + return m.PartialFailure + } + return false +} + +func (m *MutateSharedSetsRequest) GetValidateOnly() bool { + if m != nil { + return m.ValidateOnly + } + return false +} + +// A single operation (create, update, remove) on an shared set. +type SharedSetOperation struct { + // FieldMask that determines which resource fields are modified in an update. + UpdateMask *field_mask.FieldMask `protobuf:"bytes,4,opt,name=update_mask,json=updateMask,proto3" json:"update_mask,omitempty"` + // The mutate operation. + // + // Types that are valid to be assigned to Operation: + // *SharedSetOperation_Create + // *SharedSetOperation_Update + // *SharedSetOperation_Remove + Operation isSharedSetOperation_Operation `protobuf_oneof:"operation"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *SharedSetOperation) Reset() { *m = SharedSetOperation{} } +func (m *SharedSetOperation) String() string { return proto.CompactTextString(m) } +func (*SharedSetOperation) ProtoMessage() {} +func (*SharedSetOperation) Descriptor() ([]byte, []int) { + return fileDescriptor_shared_set_service_62e398adbb29dcc8, []int{2} +} +func (m *SharedSetOperation) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_SharedSetOperation.Unmarshal(m, b) +} +func (m *SharedSetOperation) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_SharedSetOperation.Marshal(b, m, deterministic) +} +func (dst *SharedSetOperation) XXX_Merge(src proto.Message) { + xxx_messageInfo_SharedSetOperation.Merge(dst, src) +} +func (m *SharedSetOperation) XXX_Size() int { + return xxx_messageInfo_SharedSetOperation.Size(m) +} +func (m *SharedSetOperation) XXX_DiscardUnknown() { + xxx_messageInfo_SharedSetOperation.DiscardUnknown(m) +} + +var xxx_messageInfo_SharedSetOperation proto.InternalMessageInfo + +func (m *SharedSetOperation) GetUpdateMask() *field_mask.FieldMask { + if m != nil { + return m.UpdateMask + } + return nil +} + +type isSharedSetOperation_Operation interface { + isSharedSetOperation_Operation() +} + +type SharedSetOperation_Create struct { + Create *resources.SharedSet `protobuf:"bytes,1,opt,name=create,proto3,oneof"` +} + +type SharedSetOperation_Update struct { + Update *resources.SharedSet `protobuf:"bytes,2,opt,name=update,proto3,oneof"` +} + +type SharedSetOperation_Remove struct { + Remove string `protobuf:"bytes,3,opt,name=remove,proto3,oneof"` +} + +func (*SharedSetOperation_Create) isSharedSetOperation_Operation() {} + +func (*SharedSetOperation_Update) isSharedSetOperation_Operation() {} + +func (*SharedSetOperation_Remove) isSharedSetOperation_Operation() {} + +func (m *SharedSetOperation) GetOperation() isSharedSetOperation_Operation { + if m != nil { + return m.Operation + } + return nil +} + +func (m *SharedSetOperation) GetCreate() *resources.SharedSet { + if x, ok := m.GetOperation().(*SharedSetOperation_Create); ok { + return x.Create + } + return nil +} + +func (m *SharedSetOperation) GetUpdate() *resources.SharedSet { + if x, ok := m.GetOperation().(*SharedSetOperation_Update); ok { + return x.Update + } + return nil +} + +func (m *SharedSetOperation) GetRemove() string { + if x, ok := m.GetOperation().(*SharedSetOperation_Remove); ok { + return x.Remove + } + return "" +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*SharedSetOperation) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _SharedSetOperation_OneofMarshaler, _SharedSetOperation_OneofUnmarshaler, _SharedSetOperation_OneofSizer, []interface{}{ + (*SharedSetOperation_Create)(nil), + (*SharedSetOperation_Update)(nil), + (*SharedSetOperation_Remove)(nil), + } +} + +func _SharedSetOperation_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*SharedSetOperation) + // operation + switch x := m.Operation.(type) { + case *SharedSetOperation_Create: + b.EncodeVarint(1<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.Create); err != nil { + return err + } + case *SharedSetOperation_Update: + b.EncodeVarint(2<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.Update); err != nil { + return err + } + case *SharedSetOperation_Remove: + b.EncodeVarint(3<<3 | proto.WireBytes) + b.EncodeStringBytes(x.Remove) + case nil: + default: + return fmt.Errorf("SharedSetOperation.Operation has unexpected type %T", x) + } + return nil +} + +func _SharedSetOperation_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*SharedSetOperation) + switch tag { + case 1: // operation.create + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(resources.SharedSet) + err := b.DecodeMessage(msg) + m.Operation = &SharedSetOperation_Create{msg} + return true, err + case 2: // operation.update + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(resources.SharedSet) + err := b.DecodeMessage(msg) + m.Operation = &SharedSetOperation_Update{msg} + return true, err + case 3: // operation.remove + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeStringBytes() + m.Operation = &SharedSetOperation_Remove{x} + return true, err + default: + return false, nil + } +} + +func _SharedSetOperation_OneofSizer(msg proto.Message) (n int) { + m := msg.(*SharedSetOperation) + // operation + switch x := m.Operation.(type) { + case *SharedSetOperation_Create: + s := proto.Size(x.Create) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *SharedSetOperation_Update: + s := proto.Size(x.Update) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *SharedSetOperation_Remove: + n += 1 // tag and wire + n += proto.SizeVarint(uint64(len(x.Remove))) + n += len(x.Remove) + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +// Response message for a shared set mutate. +type MutateSharedSetsResponse struct { + // Errors that pertain to operation failures in the partial failure mode. + // Returned only when partial_failure = true and all errors occur inside the + // operations. If any errors occur outside the operations (e.g. auth errors), + // we return an RPC level error. + PartialFailureError *status.Status `protobuf:"bytes,3,opt,name=partial_failure_error,json=partialFailureError,proto3" json:"partial_failure_error,omitempty"` + // All results for the mutate. + Results []*MutateSharedSetResult `protobuf:"bytes,2,rep,name=results,proto3" json:"results,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *MutateSharedSetsResponse) Reset() { *m = MutateSharedSetsResponse{} } +func (m *MutateSharedSetsResponse) String() string { return proto.CompactTextString(m) } +func (*MutateSharedSetsResponse) ProtoMessage() {} +func (*MutateSharedSetsResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_shared_set_service_62e398adbb29dcc8, []int{3} +} +func (m *MutateSharedSetsResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_MutateSharedSetsResponse.Unmarshal(m, b) +} +func (m *MutateSharedSetsResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_MutateSharedSetsResponse.Marshal(b, m, deterministic) +} +func (dst *MutateSharedSetsResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_MutateSharedSetsResponse.Merge(dst, src) +} +func (m *MutateSharedSetsResponse) XXX_Size() int { + return xxx_messageInfo_MutateSharedSetsResponse.Size(m) +} +func (m *MutateSharedSetsResponse) XXX_DiscardUnknown() { + xxx_messageInfo_MutateSharedSetsResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_MutateSharedSetsResponse proto.InternalMessageInfo + +func (m *MutateSharedSetsResponse) GetPartialFailureError() *status.Status { + if m != nil { + return m.PartialFailureError + } + return nil +} + +func (m *MutateSharedSetsResponse) GetResults() []*MutateSharedSetResult { + if m != nil { + return m.Results + } + return nil +} + +// The result for the shared set mutate. +type MutateSharedSetResult struct { + // Returned for successful operations. + ResourceName string `protobuf:"bytes,1,opt,name=resource_name,json=resourceName,proto3" json:"resource_name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *MutateSharedSetResult) Reset() { *m = MutateSharedSetResult{} } +func (m *MutateSharedSetResult) String() string { return proto.CompactTextString(m) } +func (*MutateSharedSetResult) ProtoMessage() {} +func (*MutateSharedSetResult) Descriptor() ([]byte, []int) { + return fileDescriptor_shared_set_service_62e398adbb29dcc8, []int{4} +} +func (m *MutateSharedSetResult) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_MutateSharedSetResult.Unmarshal(m, b) +} +func (m *MutateSharedSetResult) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_MutateSharedSetResult.Marshal(b, m, deterministic) +} +func (dst *MutateSharedSetResult) XXX_Merge(src proto.Message) { + xxx_messageInfo_MutateSharedSetResult.Merge(dst, src) +} +func (m *MutateSharedSetResult) XXX_Size() int { + return xxx_messageInfo_MutateSharedSetResult.Size(m) +} +func (m *MutateSharedSetResult) XXX_DiscardUnknown() { + xxx_messageInfo_MutateSharedSetResult.DiscardUnknown(m) +} + +var xxx_messageInfo_MutateSharedSetResult proto.InternalMessageInfo + +func (m *MutateSharedSetResult) GetResourceName() string { + if m != nil { + return m.ResourceName + } + return "" +} + +func init() { + proto.RegisterType((*GetSharedSetRequest)(nil), "google.ads.googleads.v1.services.GetSharedSetRequest") + proto.RegisterType((*MutateSharedSetsRequest)(nil), "google.ads.googleads.v1.services.MutateSharedSetsRequest") + proto.RegisterType((*SharedSetOperation)(nil), "google.ads.googleads.v1.services.SharedSetOperation") + proto.RegisterType((*MutateSharedSetsResponse)(nil), "google.ads.googleads.v1.services.MutateSharedSetsResponse") + proto.RegisterType((*MutateSharedSetResult)(nil), "google.ads.googleads.v1.services.MutateSharedSetResult") +} + +// Reference imports to suppress errors if they are not otherwise used. +var _ context.Context +var _ grpc.ClientConn + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the grpc package it is being compiled against. +const _ = grpc.SupportPackageIsVersion4 + +// SharedSetServiceClient is the client API for SharedSetService service. +// +// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://godoc.org/google.golang.org/grpc#ClientConn.NewStream. +type SharedSetServiceClient interface { + // Returns the requested shared set in full detail. + GetSharedSet(ctx context.Context, in *GetSharedSetRequest, opts ...grpc.CallOption) (*resources.SharedSet, error) + // Creates, updates, or removes shared sets. Operation statuses are returned. + MutateSharedSets(ctx context.Context, in *MutateSharedSetsRequest, opts ...grpc.CallOption) (*MutateSharedSetsResponse, error) +} + +type sharedSetServiceClient struct { + cc *grpc.ClientConn +} + +func NewSharedSetServiceClient(cc *grpc.ClientConn) SharedSetServiceClient { + return &sharedSetServiceClient{cc} +} + +func (c *sharedSetServiceClient) GetSharedSet(ctx context.Context, in *GetSharedSetRequest, opts ...grpc.CallOption) (*resources.SharedSet, error) { + out := new(resources.SharedSet) + err := c.cc.Invoke(ctx, "/google.ads.googleads.v1.services.SharedSetService/GetSharedSet", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *sharedSetServiceClient) MutateSharedSets(ctx context.Context, in *MutateSharedSetsRequest, opts ...grpc.CallOption) (*MutateSharedSetsResponse, error) { + out := new(MutateSharedSetsResponse) + err := c.cc.Invoke(ctx, "/google.ads.googleads.v1.services.SharedSetService/MutateSharedSets", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +// SharedSetServiceServer is the server API for SharedSetService service. +type SharedSetServiceServer interface { + // Returns the requested shared set in full detail. + GetSharedSet(context.Context, *GetSharedSetRequest) (*resources.SharedSet, error) + // Creates, updates, or removes shared sets. Operation statuses are returned. + MutateSharedSets(context.Context, *MutateSharedSetsRequest) (*MutateSharedSetsResponse, error) +} + +func RegisterSharedSetServiceServer(s *grpc.Server, srv SharedSetServiceServer) { + s.RegisterService(&_SharedSetService_serviceDesc, srv) +} + +func _SharedSetService_GetSharedSet_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(GetSharedSetRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(SharedSetServiceServer).GetSharedSet(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.ads.googleads.v1.services.SharedSetService/GetSharedSet", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(SharedSetServiceServer).GetSharedSet(ctx, req.(*GetSharedSetRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _SharedSetService_MutateSharedSets_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(MutateSharedSetsRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(SharedSetServiceServer).MutateSharedSets(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.ads.googleads.v1.services.SharedSetService/MutateSharedSets", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(SharedSetServiceServer).MutateSharedSets(ctx, req.(*MutateSharedSetsRequest)) + } + return interceptor(ctx, in, info, handler) +} + +var _SharedSetService_serviceDesc = grpc.ServiceDesc{ + ServiceName: "google.ads.googleads.v1.services.SharedSetService", + HandlerType: (*SharedSetServiceServer)(nil), + Methods: []grpc.MethodDesc{ + { + MethodName: "GetSharedSet", + Handler: _SharedSetService_GetSharedSet_Handler, + }, + { + MethodName: "MutateSharedSets", + Handler: _SharedSetService_MutateSharedSets_Handler, + }, + }, + Streams: []grpc.StreamDesc{}, + Metadata: "google/ads/googleads/v1/services/shared_set_service.proto", +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/services/shared_set_service.proto", fileDescriptor_shared_set_service_62e398adbb29dcc8) +} + +var fileDescriptor_shared_set_service_62e398adbb29dcc8 = []byte{ + // 705 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x9c, 0x55, 0xcf, 0x4e, 0xd4, 0x40, + 0x1c, 0xb6, 0xbb, 0x06, 0x65, 0x8a, 0x4a, 0x86, 0x10, 0x9a, 0x8d, 0xd1, 0x4d, 0x25, 0x91, 0x6c, + 0x48, 0x9b, 0x5d, 0x31, 0x86, 0x41, 0x0e, 0x4b, 0x22, 0xe0, 0x01, 0xc1, 0xae, 0xe1, 0x60, 0x36, + 0x69, 0x86, 0xed, 0xb0, 0x36, 0xb4, 0x9d, 0x3a, 0x33, 0x5d, 0x43, 0x08, 0x17, 0x0f, 0xc6, 0xbb, + 0x6f, 0xe0, 0x4d, 0xef, 0x3e, 0x82, 0x17, 0xaf, 0xde, 0x3d, 0x79, 0xf2, 0x19, 0x3c, 0x98, 0xe9, + 0x74, 0xca, 0xb2, 0x40, 0x56, 0xb8, 0x4d, 0x7f, 0xf3, 0x7d, 0xdf, 0xef, 0xff, 0x14, 0x2c, 0xf7, + 0x29, 0xed, 0x47, 0xc4, 0xc5, 0x01, 0x77, 0xd5, 0x51, 0x9e, 0x06, 0x4d, 0x97, 0x13, 0x36, 0x08, + 0x7b, 0x84, 0xbb, 0xfc, 0x0d, 0x66, 0x24, 0xf0, 0x39, 0x11, 0x7e, 0x61, 0x73, 0x52, 0x46, 0x05, + 0x85, 0x75, 0x85, 0x77, 0x70, 0xc0, 0x9d, 0x92, 0xea, 0x0c, 0x9a, 0x8e, 0xa6, 0xd6, 0x5a, 0x17, + 0x89, 0x33, 0xc2, 0x69, 0xc6, 0x4e, 0xab, 0x2b, 0xd5, 0xda, 0x5d, 0xcd, 0x49, 0x43, 0x17, 0x27, + 0x09, 0x15, 0x58, 0x84, 0x34, 0xe1, 0xc5, 0x6d, 0xe1, 0xd3, 0xcd, 0xbf, 0xf6, 0xb2, 0x7d, 0x77, + 0x3f, 0x24, 0x51, 0xe0, 0xc7, 0x98, 0x1f, 0x14, 0x88, 0x7b, 0xa3, 0x88, 0x77, 0x0c, 0xa7, 0x29, + 0x61, 0x5a, 0x61, 0xae, 0xb8, 0x67, 0x69, 0xcf, 0xe5, 0x02, 0x8b, 0xac, 0xb8, 0xb0, 0x11, 0x98, + 0xd9, 0x20, 0xa2, 0x93, 0xc7, 0xd3, 0x21, 0xc2, 0x23, 0x6f, 0x33, 0xc2, 0x05, 0x7c, 0x00, 0x6e, + 0xe9, 0x68, 0xfd, 0x04, 0xc7, 0xc4, 0x32, 0xea, 0xc6, 0xc2, 0xa4, 0x37, 0xa5, 0x8d, 0x2f, 0x70, + 0x4c, 0xec, 0x5f, 0x06, 0x98, 0xdb, 0xca, 0x04, 0x16, 0xa4, 0xe4, 0x73, 0x2d, 0x70, 0x1f, 0x98, + 0xbd, 0x8c, 0x0b, 0x1a, 0x13, 0xe6, 0x87, 0x41, 0x41, 0x07, 0xda, 0xf4, 0x3c, 0x80, 0xaf, 0x00, + 0xa0, 0x29, 0x61, 0x2a, 0x4f, 0xab, 0x52, 0xaf, 0x2e, 0x98, 0xad, 0x25, 0x67, 0x5c, 0x71, 0x9d, + 0xd2, 0xd3, 0xb6, 0x26, 0x7b, 0x43, 0x3a, 0xf0, 0x21, 0xb8, 0x93, 0x62, 0x26, 0x42, 0x1c, 0xf9, + 0xfb, 0x38, 0x8c, 0x32, 0x46, 0xac, 0x6a, 0xdd, 0x58, 0xb8, 0xe9, 0xdd, 0x2e, 0xcc, 0xeb, 0xca, + 0x2a, 0x13, 0x1c, 0xe0, 0x28, 0x0c, 0xb0, 0x20, 0x3e, 0x4d, 0xa2, 0x43, 0xeb, 0x7a, 0x0e, 0x9b, + 0xd2, 0xc6, 0xed, 0x24, 0x3a, 0xb4, 0x3f, 0x56, 0x00, 0x3c, 0xeb, 0x10, 0xae, 0x00, 0x33, 0x4b, + 0x73, 0xa6, 0xec, 0x40, 0xce, 0x34, 0x5b, 0x35, 0x1d, 0xbb, 0x6e, 0x81, 0xb3, 0x2e, 0x9b, 0xb4, + 0x85, 0xf9, 0x81, 0x07, 0x14, 0x5c, 0x9e, 0xe1, 0x3a, 0x98, 0xe8, 0x31, 0x82, 0x85, 0x2a, 0xa9, + 0xd9, 0x5a, 0xbc, 0x30, 0xe7, 0x72, 0x5c, 0x4e, 0x92, 0xde, 0xbc, 0xe6, 0x15, 0x6c, 0xa9, 0xa3, + 0x54, 0xad, 0xca, 0xd5, 0x74, 0x14, 0x1b, 0x5a, 0x60, 0x82, 0x91, 0x98, 0x0e, 0x54, 0xa1, 0x26, + 0xe5, 0x8d, 0xfa, 0x5e, 0x33, 0xc1, 0x64, 0x59, 0x59, 0xfb, 0x9b, 0x01, 0xac, 0xb3, 0xbd, 0xe6, + 0x29, 0x4d, 0xb8, 0x8c, 0x65, 0x76, 0xa4, 0xea, 0x3e, 0x61, 0x8c, 0xb2, 0x5c, 0xd2, 0x6c, 0x41, + 0x1d, 0x1a, 0x4b, 0x7b, 0x4e, 0x27, 0x9f, 0x3e, 0x6f, 0xe6, 0x74, 0x3f, 0x9e, 0x49, 0x38, 0x7c, + 0x09, 0x6e, 0x30, 0xc2, 0xb3, 0x48, 0xe8, 0x81, 0x78, 0x32, 0x7e, 0x20, 0x46, 0x82, 0xf2, 0x72, + 0xbe, 0xa7, 0x75, 0xec, 0xa7, 0x60, 0xf6, 0x5c, 0xc4, 0x7f, 0x4d, 0x78, 0xeb, 0x43, 0x15, 0x4c, + 0x97, 0xc4, 0x8e, 0x72, 0x09, 0xbf, 0x18, 0x60, 0x6a, 0x78, 0x67, 0xe0, 0xe3, 0xf1, 0x51, 0x9e, + 0xb3, 0x63, 0xb5, 0x4b, 0x75, 0xcc, 0x5e, 0x7a, 0xff, 0xf3, 0xf7, 0xa7, 0x8a, 0x03, 0x17, 0xe5, + 0x4b, 0x72, 0x74, 0x2a, 0xf4, 0x55, 0xbd, 0x56, 0xdc, 0x6d, 0x14, 0x4f, 0x8b, 0x6c, 0x8f, 0xdb, + 0x38, 0x86, 0xdf, 0x0d, 0x30, 0x3d, 0xda, 0x36, 0xb8, 0x7c, 0xe9, 0xaa, 0xea, 0xb5, 0xae, 0xa1, + 0xab, 0x50, 0xd5, 0x94, 0xd8, 0x28, 0xcf, 0x60, 0xc9, 0x76, 0x65, 0x06, 0x27, 0x21, 0x1f, 0x0d, + 0xbd, 0x13, 0xab, 0x8d, 0xe3, 0xa1, 0x04, 0x50, 0x9c, 0x4b, 0x21, 0xa3, 0xb1, 0xf6, 0xd7, 0x00, + 0xf3, 0x3d, 0x1a, 0x8f, 0xf5, 0xbe, 0x36, 0x3b, 0xda, 0xae, 0x1d, 0xb9, 0x8e, 0x3b, 0xc6, 0xeb, + 0xcd, 0x82, 0xda, 0xa7, 0x11, 0x4e, 0xfa, 0x0e, 0x65, 0x7d, 0xb7, 0x4f, 0x92, 0x7c, 0x59, 0xf5, + 0x2b, 0x9d, 0x86, 0xfc, 0xe2, 0x3f, 0xc2, 0x8a, 0x3e, 0x7c, 0xae, 0x54, 0x37, 0xda, 0xed, 0xaf, + 0x95, 0xfa, 0x86, 0x12, 0x6c, 0x07, 0xdc, 0x51, 0x47, 0x79, 0xda, 0x6d, 0x3a, 0x85, 0x63, 0xfe, + 0x43, 0x43, 0xba, 0xed, 0x80, 0x77, 0x4b, 0x48, 0x77, 0xb7, 0xd9, 0xd5, 0x90, 0x3f, 0x95, 0x79, + 0x65, 0x47, 0xa8, 0x1d, 0x70, 0x84, 0x4a, 0x10, 0x42, 0xbb, 0x4d, 0x84, 0x34, 0x6c, 0x6f, 0x22, + 0x8f, 0xf3, 0xd1, 0xbf, 0x00, 0x00, 0x00, 0xff, 0xff, 0x94, 0x23, 0x58, 0x23, 0xb8, 0x06, 0x00, + 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/services/shopping_performance_view_service.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/services/shopping_performance_view_service.pb.go new file mode 100644 index 0000000..bbb8340 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/services/shopping_performance_view_service.pb.go @@ -0,0 +1,177 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/services/shopping_performance_view_service.proto + +package services // import "google.golang.org/genproto/googleapis/ads/googleads/v1/services" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import resources "google.golang.org/genproto/googleapis/ads/googleads/v1/resources" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +import ( + context "golang.org/x/net/context" + grpc "google.golang.org/grpc" +) + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Request message for +// [ShoppingPerformanceViewService.GetShoppingPerformanceView][google.ads.googleads.v1.services.ShoppingPerformanceViewService.GetShoppingPerformanceView]. +type GetShoppingPerformanceViewRequest struct { + // The resource name of the Shopping performance view to fetch. + ResourceName string `protobuf:"bytes,1,opt,name=resource_name,json=resourceName,proto3" json:"resource_name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GetShoppingPerformanceViewRequest) Reset() { *m = GetShoppingPerformanceViewRequest{} } +func (m *GetShoppingPerformanceViewRequest) String() string { return proto.CompactTextString(m) } +func (*GetShoppingPerformanceViewRequest) ProtoMessage() {} +func (*GetShoppingPerformanceViewRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_shopping_performance_view_service_5aa39331bdf25925, []int{0} +} +func (m *GetShoppingPerformanceViewRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GetShoppingPerformanceViewRequest.Unmarshal(m, b) +} +func (m *GetShoppingPerformanceViewRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GetShoppingPerformanceViewRequest.Marshal(b, m, deterministic) +} +func (dst *GetShoppingPerformanceViewRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_GetShoppingPerformanceViewRequest.Merge(dst, src) +} +func (m *GetShoppingPerformanceViewRequest) XXX_Size() int { + return xxx_messageInfo_GetShoppingPerformanceViewRequest.Size(m) +} +func (m *GetShoppingPerformanceViewRequest) XXX_DiscardUnknown() { + xxx_messageInfo_GetShoppingPerformanceViewRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_GetShoppingPerformanceViewRequest proto.InternalMessageInfo + +func (m *GetShoppingPerformanceViewRequest) GetResourceName() string { + if m != nil { + return m.ResourceName + } + return "" +} + +func init() { + proto.RegisterType((*GetShoppingPerformanceViewRequest)(nil), "google.ads.googleads.v1.services.GetShoppingPerformanceViewRequest") +} + +// Reference imports to suppress errors if they are not otherwise used. +var _ context.Context +var _ grpc.ClientConn + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the grpc package it is being compiled against. +const _ = grpc.SupportPackageIsVersion4 + +// ShoppingPerformanceViewServiceClient is the client API for ShoppingPerformanceViewService service. +// +// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://godoc.org/google.golang.org/grpc#ClientConn.NewStream. +type ShoppingPerformanceViewServiceClient interface { + // Returns the requested Shopping performance view in full detail. + GetShoppingPerformanceView(ctx context.Context, in *GetShoppingPerformanceViewRequest, opts ...grpc.CallOption) (*resources.ShoppingPerformanceView, error) +} + +type shoppingPerformanceViewServiceClient struct { + cc *grpc.ClientConn +} + +func NewShoppingPerformanceViewServiceClient(cc *grpc.ClientConn) ShoppingPerformanceViewServiceClient { + return &shoppingPerformanceViewServiceClient{cc} +} + +func (c *shoppingPerformanceViewServiceClient) GetShoppingPerformanceView(ctx context.Context, in *GetShoppingPerformanceViewRequest, opts ...grpc.CallOption) (*resources.ShoppingPerformanceView, error) { + out := new(resources.ShoppingPerformanceView) + err := c.cc.Invoke(ctx, "/google.ads.googleads.v1.services.ShoppingPerformanceViewService/GetShoppingPerformanceView", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +// ShoppingPerformanceViewServiceServer is the server API for ShoppingPerformanceViewService service. +type ShoppingPerformanceViewServiceServer interface { + // Returns the requested Shopping performance view in full detail. + GetShoppingPerformanceView(context.Context, *GetShoppingPerformanceViewRequest) (*resources.ShoppingPerformanceView, error) +} + +func RegisterShoppingPerformanceViewServiceServer(s *grpc.Server, srv ShoppingPerformanceViewServiceServer) { + s.RegisterService(&_ShoppingPerformanceViewService_serviceDesc, srv) +} + +func _ShoppingPerformanceViewService_GetShoppingPerformanceView_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(GetShoppingPerformanceViewRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(ShoppingPerformanceViewServiceServer).GetShoppingPerformanceView(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.ads.googleads.v1.services.ShoppingPerformanceViewService/GetShoppingPerformanceView", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(ShoppingPerformanceViewServiceServer).GetShoppingPerformanceView(ctx, req.(*GetShoppingPerformanceViewRequest)) + } + return interceptor(ctx, in, info, handler) +} + +var _ShoppingPerformanceViewService_serviceDesc = grpc.ServiceDesc{ + ServiceName: "google.ads.googleads.v1.services.ShoppingPerformanceViewService", + HandlerType: (*ShoppingPerformanceViewServiceServer)(nil), + Methods: []grpc.MethodDesc{ + { + MethodName: "GetShoppingPerformanceView", + Handler: _ShoppingPerformanceViewService_GetShoppingPerformanceView_Handler, + }, + }, + Streams: []grpc.StreamDesc{}, + Metadata: "google/ads/googleads/v1/services/shopping_performance_view_service.proto", +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/services/shopping_performance_view_service.proto", fileDescriptor_shopping_performance_view_service_5aa39331bdf25925) +} + +var fileDescriptor_shopping_performance_view_service_5aa39331bdf25925 = []byte{ + // 378 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x84, 0x92, 0xb1, 0x4b, 0xe4, 0x40, + 0x14, 0xc6, 0x49, 0x0e, 0x0e, 0x2e, 0xdc, 0x35, 0xa9, 0x8e, 0x70, 0x1c, 0x7b, 0xbb, 0x5b, 0x1c, + 0x57, 0x4c, 0xc8, 0x59, 0x88, 0x23, 0x22, 0x59, 0x8b, 0xdd, 0x4a, 0x96, 0x5d, 0x48, 0x21, 0x81, + 0x30, 0x26, 0xcf, 0x18, 0xd8, 0xcc, 0xc4, 0x79, 0xd9, 0x6c, 0x21, 0x16, 0x5a, 0xda, 0xfa, 0x1f, + 0x58, 0xfa, 0xa7, 0xd8, 0xda, 0x5b, 0x59, 0xf9, 0x57, 0x48, 0x76, 0x32, 0x11, 0x85, 0xec, 0x76, + 0x1f, 0xc9, 0x37, 0xbf, 0x6f, 0xde, 0x37, 0xcf, 0x9a, 0xa4, 0x42, 0xa4, 0x0b, 0x70, 0x59, 0x82, + 0xae, 0x92, 0xb5, 0xaa, 0x3c, 0x17, 0x41, 0x56, 0x59, 0x0c, 0xe8, 0xe2, 0xb9, 0x28, 0x8a, 0x8c, + 0xa7, 0x51, 0x01, 0xf2, 0x4c, 0xc8, 0x9c, 0xf1, 0x18, 0xa2, 0x2a, 0x83, 0x55, 0xd4, 0x58, 0x48, + 0x21, 0x45, 0x29, 0xec, 0x9e, 0x3a, 0x4e, 0x58, 0x82, 0xa4, 0x25, 0x91, 0xca, 0x23, 0x9a, 0xe4, + 0xf8, 0x5d, 0x59, 0x12, 0x50, 0x2c, 0xe5, 0xc6, 0x30, 0x15, 0xe2, 0xfc, 0xd2, 0x88, 0x22, 0x73, + 0x19, 0xe7, 0xa2, 0x64, 0x65, 0x26, 0x38, 0xaa, 0xbf, 0xfd, 0x89, 0xf5, 0x67, 0x0c, 0xe5, 0xbc, + 0x61, 0x4c, 0xdf, 0x11, 0x41, 0x06, 0xab, 0x19, 0x5c, 0x2c, 0x01, 0x4b, 0x7b, 0x60, 0xfd, 0xd0, + 0x79, 0x11, 0x67, 0x39, 0xfc, 0x34, 0x7a, 0xc6, 0xdf, 0x6f, 0xb3, 0xef, 0xfa, 0xe3, 0x31, 0xcb, + 0xe1, 0xff, 0xb5, 0x69, 0xfd, 0xee, 0xe0, 0xcc, 0xd5, 0x38, 0xf6, 0xb3, 0x61, 0x39, 0xdd, 0x69, + 0xf6, 0x11, 0xd9, 0xd6, 0x07, 0xd9, 0x7a, 0x57, 0x87, 0x76, 0x42, 0xda, 0xca, 0x48, 0x07, 0xa2, + 0x7f, 0x78, 0xf3, 0xf4, 0x72, 0x67, 0xee, 0xd9, 0xbb, 0x75, 0xc3, 0x97, 0x1f, 0x46, 0x3e, 0x88, + 0x97, 0x58, 0x8a, 0x1c, 0x24, 0xba, 0xff, 0xda, 0xca, 0x3f, 0x9d, 0xbf, 0x1a, 0xdd, 0x9a, 0xd6, + 0x30, 0x16, 0xf9, 0xd6, 0x39, 0x46, 0x83, 0xcd, 0x4d, 0x4d, 0xeb, 0xb7, 0x99, 0x1a, 0x27, 0xcd, + 0xaa, 0x91, 0x54, 0x2c, 0x18, 0x4f, 0x89, 0x90, 0xa9, 0x9b, 0x02, 0x5f, 0xbf, 0x9c, 0x5e, 0x87, + 0x22, 0xc3, 0xee, 0x4d, 0xdc, 0xd7, 0xe2, 0xde, 0xfc, 0x32, 0xf6, 0xfd, 0x07, 0xb3, 0x37, 0x56, + 0x40, 0x3f, 0x41, 0xa2, 0x64, 0xad, 0x02, 0x8f, 0x34, 0xc1, 0xf8, 0xa8, 0x2d, 0xa1, 0x9f, 0x60, + 0xd8, 0x5a, 0xc2, 0xc0, 0x0b, 0xb5, 0xe5, 0xd5, 0x1c, 0xaa, 0xef, 0x94, 0xfa, 0x09, 0x52, 0xda, + 0x9a, 0x28, 0x0d, 0x3c, 0x4a, 0xb5, 0xed, 0xf4, 0xeb, 0xfa, 0x9e, 0x3b, 0x6f, 0x01, 0x00, 0x00, + 0xff, 0xff, 0x6b, 0x59, 0x13, 0x0f, 0x30, 0x03, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/services/topic_constant_service.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/services/topic_constant_service.pb.go new file mode 100644 index 0000000..ab8462e --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/services/topic_constant_service.pb.go @@ -0,0 +1,175 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/services/topic_constant_service.proto + +package services // import "google.golang.org/genproto/googleapis/ads/googleads/v1/services" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import resources "google.golang.org/genproto/googleapis/ads/googleads/v1/resources" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +import ( + context "golang.org/x/net/context" + grpc "google.golang.org/grpc" +) + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Request message for [TopicConstantService.GetTopicConstant][google.ads.googleads.v1.services.TopicConstantService.GetTopicConstant]. +type GetTopicConstantRequest struct { + // Resource name of the Topic to fetch. + ResourceName string `protobuf:"bytes,1,opt,name=resource_name,json=resourceName,proto3" json:"resource_name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GetTopicConstantRequest) Reset() { *m = GetTopicConstantRequest{} } +func (m *GetTopicConstantRequest) String() string { return proto.CompactTextString(m) } +func (*GetTopicConstantRequest) ProtoMessage() {} +func (*GetTopicConstantRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_topic_constant_service_a951bc1267bf7762, []int{0} +} +func (m *GetTopicConstantRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GetTopicConstantRequest.Unmarshal(m, b) +} +func (m *GetTopicConstantRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GetTopicConstantRequest.Marshal(b, m, deterministic) +} +func (dst *GetTopicConstantRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_GetTopicConstantRequest.Merge(dst, src) +} +func (m *GetTopicConstantRequest) XXX_Size() int { + return xxx_messageInfo_GetTopicConstantRequest.Size(m) +} +func (m *GetTopicConstantRequest) XXX_DiscardUnknown() { + xxx_messageInfo_GetTopicConstantRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_GetTopicConstantRequest proto.InternalMessageInfo + +func (m *GetTopicConstantRequest) GetResourceName() string { + if m != nil { + return m.ResourceName + } + return "" +} + +func init() { + proto.RegisterType((*GetTopicConstantRequest)(nil), "google.ads.googleads.v1.services.GetTopicConstantRequest") +} + +// Reference imports to suppress errors if they are not otherwise used. +var _ context.Context +var _ grpc.ClientConn + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the grpc package it is being compiled against. +const _ = grpc.SupportPackageIsVersion4 + +// TopicConstantServiceClient is the client API for TopicConstantService service. +// +// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://godoc.org/google.golang.org/grpc#ClientConn.NewStream. +type TopicConstantServiceClient interface { + // Returns the requested topic constant in full detail. + GetTopicConstant(ctx context.Context, in *GetTopicConstantRequest, opts ...grpc.CallOption) (*resources.TopicConstant, error) +} + +type topicConstantServiceClient struct { + cc *grpc.ClientConn +} + +func NewTopicConstantServiceClient(cc *grpc.ClientConn) TopicConstantServiceClient { + return &topicConstantServiceClient{cc} +} + +func (c *topicConstantServiceClient) GetTopicConstant(ctx context.Context, in *GetTopicConstantRequest, opts ...grpc.CallOption) (*resources.TopicConstant, error) { + out := new(resources.TopicConstant) + err := c.cc.Invoke(ctx, "/google.ads.googleads.v1.services.TopicConstantService/GetTopicConstant", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +// TopicConstantServiceServer is the server API for TopicConstantService service. +type TopicConstantServiceServer interface { + // Returns the requested topic constant in full detail. + GetTopicConstant(context.Context, *GetTopicConstantRequest) (*resources.TopicConstant, error) +} + +func RegisterTopicConstantServiceServer(s *grpc.Server, srv TopicConstantServiceServer) { + s.RegisterService(&_TopicConstantService_serviceDesc, srv) +} + +func _TopicConstantService_GetTopicConstant_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(GetTopicConstantRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(TopicConstantServiceServer).GetTopicConstant(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.ads.googleads.v1.services.TopicConstantService/GetTopicConstant", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(TopicConstantServiceServer).GetTopicConstant(ctx, req.(*GetTopicConstantRequest)) + } + return interceptor(ctx, in, info, handler) +} + +var _TopicConstantService_serviceDesc = grpc.ServiceDesc{ + ServiceName: "google.ads.googleads.v1.services.TopicConstantService", + HandlerType: (*TopicConstantServiceServer)(nil), + Methods: []grpc.MethodDesc{ + { + MethodName: "GetTopicConstant", + Handler: _TopicConstantService_GetTopicConstant_Handler, + }, + }, + Streams: []grpc.StreamDesc{}, + Metadata: "google/ads/googleads/v1/services/topic_constant_service.proto", +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/services/topic_constant_service.proto", fileDescriptor_topic_constant_service_a951bc1267bf7762) +} + +var fileDescriptor_topic_constant_service_a951bc1267bf7762 = []byte{ + // 354 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xe2, 0xb2, 0x4d, 0xcf, 0xcf, 0x4f, + 0xcf, 0x49, 0xd5, 0x4f, 0x4c, 0x29, 0xd6, 0x87, 0x30, 0x41, 0xac, 0x32, 0x43, 0xfd, 0xe2, 0xd4, + 0xa2, 0xb2, 0xcc, 0xe4, 0xd4, 0x62, 0xfd, 0x92, 0xfc, 0x82, 0xcc, 0xe4, 0xf8, 0xe4, 0xfc, 0xbc, + 0xe2, 0x92, 0xc4, 0xbc, 0x92, 0x78, 0xa8, 0xb8, 0x5e, 0x41, 0x51, 0x7e, 0x49, 0xbe, 0x90, 0x02, + 0x44, 0x8f, 0x5e, 0x62, 0x4a, 0xb1, 0x1e, 0x5c, 0xbb, 0x5e, 0x99, 0xa1, 0x1e, 0x4c, 0xbb, 0x94, + 0x19, 0x2e, 0x0b, 0x8a, 0x52, 0x8b, 0xf3, 0x4b, 0x8b, 0x30, 0x6d, 0x80, 0x98, 0x2c, 0x25, 0x03, + 0xd3, 0x57, 0x90, 0xa9, 0x9f, 0x98, 0x97, 0x97, 0x5f, 0x92, 0x58, 0x92, 0x99, 0x9f, 0x57, 0x0c, + 0x91, 0x55, 0xb2, 0xe3, 0x12, 0x77, 0x4f, 0x2d, 0x09, 0x01, 0x69, 0x74, 0x86, 0xea, 0x0b, 0x4a, + 0x2d, 0x2c, 0x4d, 0x2d, 0x2e, 0x11, 0x52, 0xe6, 0xe2, 0x85, 0x19, 0x1d, 0x9f, 0x97, 0x98, 0x9b, + 0x2a, 0xc1, 0xa8, 0xc0, 0xa8, 0xc1, 0x19, 0xc4, 0x03, 0x13, 0xf4, 0x4b, 0xcc, 0x4d, 0x35, 0x3a, + 0xc6, 0xc8, 0x25, 0x82, 0xa2, 0x3b, 0x18, 0xe2, 0x5e, 0xa1, 0xb5, 0x8c, 0x5c, 0x02, 0xe8, 0x26, + 0x0b, 0x59, 0xea, 0x11, 0xf2, 0xa6, 0x1e, 0x0e, 0xd7, 0x48, 0x19, 0xe0, 0xd4, 0x0a, 0xf7, 0xbf, + 0x1e, 0x8a, 0x46, 0x25, 0x9d, 0xa6, 0xcb, 0x4f, 0x26, 0x33, 0xa9, 0x09, 0xa9, 0x80, 0x02, 0xa9, + 0x1a, 0xc5, 0x2b, 0xb6, 0x25, 0xc8, 0x2a, 0x8b, 0xf5, 0xb5, 0x6a, 0x9d, 0x1a, 0x98, 0xb8, 0x54, + 0x92, 0xf3, 0x73, 0x09, 0x3a, 0xd0, 0x49, 0x12, 0x9b, 0x77, 0x03, 0x40, 0x81, 0x19, 0xc0, 0x18, + 0xe5, 0x01, 0xd5, 0x9e, 0x9e, 0x9f, 0x93, 0x98, 0x97, 0xae, 0x97, 0x5f, 0x94, 0xae, 0x9f, 0x9e, + 0x9a, 0x07, 0x0e, 0x6a, 0x58, 0xa4, 0x15, 0x64, 0x16, 0xe3, 0x4e, 0x24, 0xd6, 0x30, 0xc6, 0x22, + 0x26, 0x66, 0x77, 0x47, 0xc7, 0x55, 0x4c, 0x0a, 0xee, 0x10, 0x03, 0x1d, 0x53, 0x8a, 0xf5, 0x20, + 0x4c, 0x10, 0x2b, 0xcc, 0x50, 0x0f, 0x6a, 0x71, 0xf1, 0x29, 0x98, 0x92, 0x18, 0xc7, 0x94, 0xe2, + 0x18, 0xb8, 0x92, 0x98, 0x30, 0xc3, 0x18, 0x98, 0x92, 0x57, 0x4c, 0x2a, 0x10, 0x71, 0x2b, 0x2b, + 0xc7, 0x94, 0x62, 0x2b, 0x2b, 0xb8, 0x22, 0x2b, 0xab, 0x30, 0x43, 0x2b, 0x2b, 0x98, 0xb2, 0x24, + 0x36, 0xb0, 0x3b, 0x8d, 0x01, 0x01, 0x00, 0x00, 0xff, 0xff, 0x19, 0x59, 0x7b, 0xd1, 0xcb, 0x02, + 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/services/topic_view_service.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/services/topic_view_service.pb.go new file mode 100644 index 0000000..d6f3b3b --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/services/topic_view_service.pb.go @@ -0,0 +1,175 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/services/topic_view_service.proto + +package services // import "google.golang.org/genproto/googleapis/ads/googleads/v1/services" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import resources "google.golang.org/genproto/googleapis/ads/googleads/v1/resources" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +import ( + context "golang.org/x/net/context" + grpc "google.golang.org/grpc" +) + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Request message for [TopicViewService.GetTopicView][google.ads.googleads.v1.services.TopicViewService.GetTopicView]. +type GetTopicViewRequest struct { + // The resource name of the topic view to fetch. + ResourceName string `protobuf:"bytes,1,opt,name=resource_name,json=resourceName,proto3" json:"resource_name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GetTopicViewRequest) Reset() { *m = GetTopicViewRequest{} } +func (m *GetTopicViewRequest) String() string { return proto.CompactTextString(m) } +func (*GetTopicViewRequest) ProtoMessage() {} +func (*GetTopicViewRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_topic_view_service_42250129c59eb567, []int{0} +} +func (m *GetTopicViewRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GetTopicViewRequest.Unmarshal(m, b) +} +func (m *GetTopicViewRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GetTopicViewRequest.Marshal(b, m, deterministic) +} +func (dst *GetTopicViewRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_GetTopicViewRequest.Merge(dst, src) +} +func (m *GetTopicViewRequest) XXX_Size() int { + return xxx_messageInfo_GetTopicViewRequest.Size(m) +} +func (m *GetTopicViewRequest) XXX_DiscardUnknown() { + xxx_messageInfo_GetTopicViewRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_GetTopicViewRequest proto.InternalMessageInfo + +func (m *GetTopicViewRequest) GetResourceName() string { + if m != nil { + return m.ResourceName + } + return "" +} + +func init() { + proto.RegisterType((*GetTopicViewRequest)(nil), "google.ads.googleads.v1.services.GetTopicViewRequest") +} + +// Reference imports to suppress errors if they are not otherwise used. +var _ context.Context +var _ grpc.ClientConn + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the grpc package it is being compiled against. +const _ = grpc.SupportPackageIsVersion4 + +// TopicViewServiceClient is the client API for TopicViewService service. +// +// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://godoc.org/google.golang.org/grpc#ClientConn.NewStream. +type TopicViewServiceClient interface { + // Returns the requested topic view in full detail. + GetTopicView(ctx context.Context, in *GetTopicViewRequest, opts ...grpc.CallOption) (*resources.TopicView, error) +} + +type topicViewServiceClient struct { + cc *grpc.ClientConn +} + +func NewTopicViewServiceClient(cc *grpc.ClientConn) TopicViewServiceClient { + return &topicViewServiceClient{cc} +} + +func (c *topicViewServiceClient) GetTopicView(ctx context.Context, in *GetTopicViewRequest, opts ...grpc.CallOption) (*resources.TopicView, error) { + out := new(resources.TopicView) + err := c.cc.Invoke(ctx, "/google.ads.googleads.v1.services.TopicViewService/GetTopicView", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +// TopicViewServiceServer is the server API for TopicViewService service. +type TopicViewServiceServer interface { + // Returns the requested topic view in full detail. + GetTopicView(context.Context, *GetTopicViewRequest) (*resources.TopicView, error) +} + +func RegisterTopicViewServiceServer(s *grpc.Server, srv TopicViewServiceServer) { + s.RegisterService(&_TopicViewService_serviceDesc, srv) +} + +func _TopicViewService_GetTopicView_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(GetTopicViewRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(TopicViewServiceServer).GetTopicView(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.ads.googleads.v1.services.TopicViewService/GetTopicView", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(TopicViewServiceServer).GetTopicView(ctx, req.(*GetTopicViewRequest)) + } + return interceptor(ctx, in, info, handler) +} + +var _TopicViewService_serviceDesc = grpc.ServiceDesc{ + ServiceName: "google.ads.googleads.v1.services.TopicViewService", + HandlerType: (*TopicViewServiceServer)(nil), + Methods: []grpc.MethodDesc{ + { + MethodName: "GetTopicView", + Handler: _TopicViewService_GetTopicView_Handler, + }, + }, + Streams: []grpc.StreamDesc{}, + Metadata: "google/ads/googleads/v1/services/topic_view_service.proto", +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/services/topic_view_service.proto", fileDescriptor_topic_view_service_42250129c59eb567) +} + +var fileDescriptor_topic_view_service_42250129c59eb567 = []byte{ + // 363 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xe2, 0xb2, 0x4c, 0xcf, 0xcf, 0x4f, + 0xcf, 0x49, 0xd5, 0x4f, 0x4c, 0x29, 0xd6, 0x87, 0x30, 0x41, 0xac, 0x32, 0x43, 0xfd, 0xe2, 0xd4, + 0xa2, 0xb2, 0xcc, 0xe4, 0xd4, 0x62, 0xfd, 0x92, 0xfc, 0x82, 0xcc, 0xe4, 0xf8, 0xb2, 0xcc, 0xd4, + 0xf2, 0x78, 0xa8, 0x98, 0x5e, 0x41, 0x51, 0x7e, 0x49, 0xbe, 0x90, 0x02, 0x44, 0xbd, 0x5e, 0x62, + 0x4a, 0xb1, 0x1e, 0x5c, 0xab, 0x5e, 0x99, 0xa1, 0x1e, 0x4c, 0xab, 0x94, 0x11, 0x2e, 0xc3, 0x8b, + 0x52, 0x8b, 0xf3, 0x4b, 0x8b, 0x50, 0x4d, 0x87, 0x98, 0x2a, 0x25, 0x03, 0xd3, 0x53, 0x90, 0xa9, + 0x9f, 0x98, 0x97, 0x97, 0x5f, 0x92, 0x58, 0x92, 0x99, 0x9f, 0x57, 0x0c, 0x91, 0x55, 0xb2, 0xe2, + 0x12, 0x76, 0x4f, 0x2d, 0x09, 0x01, 0x69, 0x0a, 0xcb, 0x4c, 0x2d, 0x0f, 0x4a, 0x2d, 0x2c, 0x4d, + 0x2d, 0x2e, 0x11, 0x52, 0xe6, 0xe2, 0x85, 0x19, 0x19, 0x9f, 0x97, 0x98, 0x9b, 0x2a, 0xc1, 0xa8, + 0xc0, 0xa8, 0xc1, 0x19, 0xc4, 0x03, 0x13, 0xf4, 0x4b, 0xcc, 0x4d, 0x35, 0xda, 0xc7, 0xc8, 0x25, + 0x00, 0xd7, 0x19, 0x0c, 0x71, 0xa3, 0xd0, 0x4a, 0x46, 0x2e, 0x1e, 0x64, 0x13, 0x85, 0x4c, 0xf5, + 0x08, 0x79, 0x4b, 0x0f, 0x8b, 0x0b, 0xa4, 0x74, 0x70, 0x6a, 0x83, 0xfb, 0x55, 0x0f, 0xae, 0x49, + 0xc9, 0xa4, 0xe9, 0xf2, 0x93, 0xc9, 0x4c, 0x7a, 0x42, 0x3a, 0xa0, 0xc0, 0xa8, 0x46, 0x71, 0xba, + 0x6d, 0x72, 0x69, 0x71, 0x49, 0x7e, 0x6e, 0x6a, 0x51, 0xb1, 0xbe, 0x16, 0x24, 0x74, 0x40, 0x3a, + 0x8a, 0xf5, 0xb5, 0x6a, 0x9d, 0xfe, 0x30, 0x72, 0xa9, 0x24, 0xe7, 0xe7, 0x12, 0x74, 0xa0, 0x93, + 0x28, 0xba, 0x37, 0x03, 0x40, 0x81, 0x17, 0xc0, 0x18, 0xe5, 0x01, 0xd5, 0x9a, 0x9e, 0x9f, 0x93, + 0x98, 0x97, 0xae, 0x97, 0x5f, 0x94, 0xae, 0x9f, 0x9e, 0x9a, 0x07, 0x0e, 0x5a, 0x58, 0x04, 0x15, + 0x64, 0x16, 0xe3, 0x4e, 0x0c, 0xd6, 0x30, 0xc6, 0x22, 0x26, 0x66, 0x77, 0x47, 0xc7, 0x55, 0x4c, + 0x0a, 0xee, 0x10, 0x03, 0x1d, 0x53, 0x8a, 0xf5, 0x20, 0x4c, 0x10, 0x2b, 0xcc, 0x50, 0x0f, 0x6a, + 0x71, 0xf1, 0x29, 0x98, 0x92, 0x18, 0xc7, 0x94, 0xe2, 0x18, 0xb8, 0x92, 0x98, 0x30, 0xc3, 0x18, + 0x98, 0x92, 0x57, 0x4c, 0x2a, 0x10, 0x71, 0x2b, 0x2b, 0xc7, 0x94, 0x62, 0x2b, 0x2b, 0xb8, 0x22, + 0x2b, 0xab, 0x30, 0x43, 0x2b, 0x2b, 0x98, 0xb2, 0x24, 0x36, 0xb0, 0x3b, 0x8d, 0x01, 0x01, 0x00, + 0x00, 0xff, 0xff, 0x5f, 0xc0, 0x75, 0x26, 0xb3, 0x02, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/services/user_interest_service.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/services/user_interest_service.pb.go new file mode 100644 index 0000000..e8f8c2a --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/services/user_interest_service.pb.go @@ -0,0 +1,175 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/services/user_interest_service.proto + +package services // import "google.golang.org/genproto/googleapis/ads/googleads/v1/services" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import resources "google.golang.org/genproto/googleapis/ads/googleads/v1/resources" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +import ( + context "golang.org/x/net/context" + grpc "google.golang.org/grpc" +) + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Request message for [UserInterestService.GetUserInterest][google.ads.googleads.v1.services.UserInterestService.GetUserInterest]. +type GetUserInterestRequest struct { + // Resource name of the UserInterest to fetch. + ResourceName string `protobuf:"bytes,1,opt,name=resource_name,json=resourceName,proto3" json:"resource_name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GetUserInterestRequest) Reset() { *m = GetUserInterestRequest{} } +func (m *GetUserInterestRequest) String() string { return proto.CompactTextString(m) } +func (*GetUserInterestRequest) ProtoMessage() {} +func (*GetUserInterestRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_user_interest_service_76561fc7778b0898, []int{0} +} +func (m *GetUserInterestRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GetUserInterestRequest.Unmarshal(m, b) +} +func (m *GetUserInterestRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GetUserInterestRequest.Marshal(b, m, deterministic) +} +func (dst *GetUserInterestRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_GetUserInterestRequest.Merge(dst, src) +} +func (m *GetUserInterestRequest) XXX_Size() int { + return xxx_messageInfo_GetUserInterestRequest.Size(m) +} +func (m *GetUserInterestRequest) XXX_DiscardUnknown() { + xxx_messageInfo_GetUserInterestRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_GetUserInterestRequest proto.InternalMessageInfo + +func (m *GetUserInterestRequest) GetResourceName() string { + if m != nil { + return m.ResourceName + } + return "" +} + +func init() { + proto.RegisterType((*GetUserInterestRequest)(nil), "google.ads.googleads.v1.services.GetUserInterestRequest") +} + +// Reference imports to suppress errors if they are not otherwise used. +var _ context.Context +var _ grpc.ClientConn + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the grpc package it is being compiled against. +const _ = grpc.SupportPackageIsVersion4 + +// UserInterestServiceClient is the client API for UserInterestService service. +// +// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://godoc.org/google.golang.org/grpc#ClientConn.NewStream. +type UserInterestServiceClient interface { + // Returns the requested user interest in full detail + GetUserInterest(ctx context.Context, in *GetUserInterestRequest, opts ...grpc.CallOption) (*resources.UserInterest, error) +} + +type userInterestServiceClient struct { + cc *grpc.ClientConn +} + +func NewUserInterestServiceClient(cc *grpc.ClientConn) UserInterestServiceClient { + return &userInterestServiceClient{cc} +} + +func (c *userInterestServiceClient) GetUserInterest(ctx context.Context, in *GetUserInterestRequest, opts ...grpc.CallOption) (*resources.UserInterest, error) { + out := new(resources.UserInterest) + err := c.cc.Invoke(ctx, "/google.ads.googleads.v1.services.UserInterestService/GetUserInterest", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +// UserInterestServiceServer is the server API for UserInterestService service. +type UserInterestServiceServer interface { + // Returns the requested user interest in full detail + GetUserInterest(context.Context, *GetUserInterestRequest) (*resources.UserInterest, error) +} + +func RegisterUserInterestServiceServer(s *grpc.Server, srv UserInterestServiceServer) { + s.RegisterService(&_UserInterestService_serviceDesc, srv) +} + +func _UserInterestService_GetUserInterest_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(GetUserInterestRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(UserInterestServiceServer).GetUserInterest(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.ads.googleads.v1.services.UserInterestService/GetUserInterest", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(UserInterestServiceServer).GetUserInterest(ctx, req.(*GetUserInterestRequest)) + } + return interceptor(ctx, in, info, handler) +} + +var _UserInterestService_serviceDesc = grpc.ServiceDesc{ + ServiceName: "google.ads.googleads.v1.services.UserInterestService", + HandlerType: (*UserInterestServiceServer)(nil), + Methods: []grpc.MethodDesc{ + { + MethodName: "GetUserInterest", + Handler: _UserInterestService_GetUserInterest_Handler, + }, + }, + Streams: []grpc.StreamDesc{}, + Metadata: "google/ads/googleads/v1/services/user_interest_service.proto", +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/services/user_interest_service.proto", fileDescriptor_user_interest_service_76561fc7778b0898) +} + +var fileDescriptor_user_interest_service_76561fc7778b0898 = []byte{ + // 364 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x84, 0x92, 0xcf, 0x4a, 0xeb, 0x40, + 0x14, 0xc6, 0x49, 0x2e, 0x5c, 0xb8, 0xe1, 0x5e, 0x2e, 0x44, 0x90, 0x52, 0x5c, 0x94, 0xda, 0x85, + 0x74, 0x31, 0x43, 0x14, 0x51, 0x46, 0xbb, 0x48, 0x37, 0xd5, 0x8d, 0x94, 0x8a, 0x5d, 0x48, 0xa0, + 0x8c, 0xcd, 0x21, 0x04, 0x9a, 0x99, 0x3a, 0x67, 0xd2, 0x8d, 0xb8, 0xf1, 0x15, 0x7c, 0x03, 0x97, + 0xee, 0x7d, 0x09, 0xc1, 0x95, 0xaf, 0xe0, 0xca, 0x97, 0x50, 0xd2, 0xc9, 0x84, 0xaa, 0x2d, 0xdd, + 0x7d, 0x9c, 0x7c, 0xbf, 0xef, 0xfc, 0xc9, 0x78, 0xc7, 0x89, 0x94, 0xc9, 0x04, 0x28, 0x8f, 0x91, + 0x1a, 0x59, 0xa8, 0x59, 0x40, 0x11, 0xd4, 0x2c, 0x1d, 0x03, 0xd2, 0x1c, 0x41, 0x8d, 0x52, 0xa1, + 0x41, 0x01, 0xea, 0x51, 0x59, 0x26, 0x53, 0x25, 0xb5, 0xf4, 0x1b, 0x06, 0x21, 0x3c, 0x46, 0x52, + 0xd1, 0x64, 0x16, 0x10, 0x4b, 0xd7, 0xf7, 0x57, 0xe5, 0x2b, 0x40, 0x99, 0xab, 0x1f, 0x0d, 0x4c, + 0x70, 0x7d, 0xcb, 0x62, 0xd3, 0x94, 0x72, 0x21, 0xa4, 0xe6, 0x3a, 0x95, 0x02, 0xcd, 0xd7, 0x66, + 0xc7, 0xdb, 0xec, 0x81, 0xbe, 0x40, 0x50, 0xa7, 0x25, 0x36, 0x80, 0xeb, 0x1c, 0x50, 0xfb, 0xdb, + 0xde, 0x3f, 0x1b, 0x3c, 0x12, 0x3c, 0x83, 0x9a, 0xd3, 0x70, 0x76, 0xfe, 0x0c, 0xfe, 0xda, 0xe2, + 0x19, 0xcf, 0x60, 0xf7, 0xc5, 0xf1, 0x36, 0x16, 0xe1, 0x73, 0x33, 0xac, 0xff, 0xe4, 0x78, 0xff, + 0xbf, 0xe5, 0xfa, 0x87, 0x64, 0xdd, 0x8a, 0x64, 0xf9, 0x28, 0x75, 0xba, 0x92, 0xac, 0x56, 0x27, + 0x8b, 0x5c, 0xf3, 0xe0, 0xee, 0xf5, 0xed, 0xde, 0x0d, 0x7c, 0x5a, 0x9c, 0xe7, 0xe6, 0xcb, 0x1a, + 0x9d, 0x71, 0x8e, 0x5a, 0x66, 0xa0, 0x90, 0xb6, 0xe7, 0xf7, 0xb2, 0x10, 0xd2, 0xf6, 0x6d, 0xf7, + 0xc3, 0xf1, 0x5a, 0x63, 0x99, 0xad, 0x9d, 0xb4, 0x5b, 0x5b, 0xb2, 0x75, 0xbf, 0xb8, 0x68, 0xdf, + 0xb9, 0x3c, 0x29, 0xe9, 0x44, 0x4e, 0xb8, 0x48, 0x88, 0x54, 0x09, 0x4d, 0x40, 0xcc, 0xef, 0x6d, + 0x7f, 0xdc, 0x34, 0xc5, 0xd5, 0xef, 0xe4, 0xc8, 0x8a, 0x07, 0xf7, 0x57, 0x2f, 0x0c, 0x1f, 0xdd, + 0x46, 0xcf, 0x04, 0x86, 0x31, 0x12, 0x23, 0x0b, 0x35, 0x0c, 0x48, 0xd9, 0x18, 0x9f, 0xad, 0x25, + 0x0a, 0x63, 0x8c, 0x2a, 0x4b, 0x34, 0x0c, 0x22, 0x6b, 0x79, 0x77, 0x5b, 0xa6, 0xce, 0x58, 0x18, + 0x23, 0x63, 0x95, 0x89, 0xb1, 0x61, 0xc0, 0x98, 0xb5, 0x5d, 0xfd, 0x9e, 0xcf, 0xb9, 0xf7, 0x19, + 0x00, 0x00, 0xff, 0xff, 0x92, 0xe6, 0xf7, 0x12, 0xce, 0x02, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/services/user_list_service.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/services/user_list_service.pb.go new file mode 100644 index 0000000..f5c2951 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/services/user_list_service.pb.go @@ -0,0 +1,588 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/services/user_list_service.proto + +package services // import "google.golang.org/genproto/googleapis/ads/googleads/v1/services" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "github.com/golang/protobuf/ptypes/wrappers" +import resources "google.golang.org/genproto/googleapis/ads/googleads/v1/resources" +import _ "google.golang.org/genproto/googleapis/api/annotations" +import status "google.golang.org/genproto/googleapis/rpc/status" +import field_mask "google.golang.org/genproto/protobuf/field_mask" + +import ( + context "golang.org/x/net/context" + grpc "google.golang.org/grpc" +) + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Request message for [UserListService.GetUserList][google.ads.googleads.v1.services.UserListService.GetUserList]. +type GetUserListRequest struct { + // The resource name of the user list to fetch. + ResourceName string `protobuf:"bytes,1,opt,name=resource_name,json=resourceName,proto3" json:"resource_name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GetUserListRequest) Reset() { *m = GetUserListRequest{} } +func (m *GetUserListRequest) String() string { return proto.CompactTextString(m) } +func (*GetUserListRequest) ProtoMessage() {} +func (*GetUserListRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_user_list_service_a2edc6a3997d8389, []int{0} +} +func (m *GetUserListRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GetUserListRequest.Unmarshal(m, b) +} +func (m *GetUserListRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GetUserListRequest.Marshal(b, m, deterministic) +} +func (dst *GetUserListRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_GetUserListRequest.Merge(dst, src) +} +func (m *GetUserListRequest) XXX_Size() int { + return xxx_messageInfo_GetUserListRequest.Size(m) +} +func (m *GetUserListRequest) XXX_DiscardUnknown() { + xxx_messageInfo_GetUserListRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_GetUserListRequest proto.InternalMessageInfo + +func (m *GetUserListRequest) GetResourceName() string { + if m != nil { + return m.ResourceName + } + return "" +} + +// Request message for [UserListService.MutateUserLists][google.ads.googleads.v1.services.UserListService.MutateUserLists]. +type MutateUserListsRequest struct { + // The ID of the customer whose user lists are being modified. + CustomerId string `protobuf:"bytes,1,opt,name=customer_id,json=customerId,proto3" json:"customer_id,omitempty"` + // The list of operations to perform on individual user lists. + Operations []*UserListOperation `protobuf:"bytes,2,rep,name=operations,proto3" json:"operations,omitempty"` + // If true, successful operations will be carried out and invalid + // operations will return errors. If false, all operations will be carried + // out in one transaction if and only if they are all valid. + // Default is false. + PartialFailure bool `protobuf:"varint,3,opt,name=partial_failure,json=partialFailure,proto3" json:"partial_failure,omitempty"` + // If true, the request is validated but not executed. Only errors are + // returned, not results. + ValidateOnly bool `protobuf:"varint,4,opt,name=validate_only,json=validateOnly,proto3" json:"validate_only,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *MutateUserListsRequest) Reset() { *m = MutateUserListsRequest{} } +func (m *MutateUserListsRequest) String() string { return proto.CompactTextString(m) } +func (*MutateUserListsRequest) ProtoMessage() {} +func (*MutateUserListsRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_user_list_service_a2edc6a3997d8389, []int{1} +} +func (m *MutateUserListsRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_MutateUserListsRequest.Unmarshal(m, b) +} +func (m *MutateUserListsRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_MutateUserListsRequest.Marshal(b, m, deterministic) +} +func (dst *MutateUserListsRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_MutateUserListsRequest.Merge(dst, src) +} +func (m *MutateUserListsRequest) XXX_Size() int { + return xxx_messageInfo_MutateUserListsRequest.Size(m) +} +func (m *MutateUserListsRequest) XXX_DiscardUnknown() { + xxx_messageInfo_MutateUserListsRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_MutateUserListsRequest proto.InternalMessageInfo + +func (m *MutateUserListsRequest) GetCustomerId() string { + if m != nil { + return m.CustomerId + } + return "" +} + +func (m *MutateUserListsRequest) GetOperations() []*UserListOperation { + if m != nil { + return m.Operations + } + return nil +} + +func (m *MutateUserListsRequest) GetPartialFailure() bool { + if m != nil { + return m.PartialFailure + } + return false +} + +func (m *MutateUserListsRequest) GetValidateOnly() bool { + if m != nil { + return m.ValidateOnly + } + return false +} + +// A single operation (create, update) on a user list. +type UserListOperation struct { + // FieldMask that determines which resource fields are modified in an update. + UpdateMask *field_mask.FieldMask `protobuf:"bytes,4,opt,name=update_mask,json=updateMask,proto3" json:"update_mask,omitempty"` + // The mutate operation. + // + // Types that are valid to be assigned to Operation: + // *UserListOperation_Create + // *UserListOperation_Update + // *UserListOperation_Remove + Operation isUserListOperation_Operation `protobuf_oneof:"operation"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *UserListOperation) Reset() { *m = UserListOperation{} } +func (m *UserListOperation) String() string { return proto.CompactTextString(m) } +func (*UserListOperation) ProtoMessage() {} +func (*UserListOperation) Descriptor() ([]byte, []int) { + return fileDescriptor_user_list_service_a2edc6a3997d8389, []int{2} +} +func (m *UserListOperation) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_UserListOperation.Unmarshal(m, b) +} +func (m *UserListOperation) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_UserListOperation.Marshal(b, m, deterministic) +} +func (dst *UserListOperation) XXX_Merge(src proto.Message) { + xxx_messageInfo_UserListOperation.Merge(dst, src) +} +func (m *UserListOperation) XXX_Size() int { + return xxx_messageInfo_UserListOperation.Size(m) +} +func (m *UserListOperation) XXX_DiscardUnknown() { + xxx_messageInfo_UserListOperation.DiscardUnknown(m) +} + +var xxx_messageInfo_UserListOperation proto.InternalMessageInfo + +func (m *UserListOperation) GetUpdateMask() *field_mask.FieldMask { + if m != nil { + return m.UpdateMask + } + return nil +} + +type isUserListOperation_Operation interface { + isUserListOperation_Operation() +} + +type UserListOperation_Create struct { + Create *resources.UserList `protobuf:"bytes,1,opt,name=create,proto3,oneof"` +} + +type UserListOperation_Update struct { + Update *resources.UserList `protobuf:"bytes,2,opt,name=update,proto3,oneof"` +} + +type UserListOperation_Remove struct { + Remove string `protobuf:"bytes,3,opt,name=remove,proto3,oneof"` +} + +func (*UserListOperation_Create) isUserListOperation_Operation() {} + +func (*UserListOperation_Update) isUserListOperation_Operation() {} + +func (*UserListOperation_Remove) isUserListOperation_Operation() {} + +func (m *UserListOperation) GetOperation() isUserListOperation_Operation { + if m != nil { + return m.Operation + } + return nil +} + +func (m *UserListOperation) GetCreate() *resources.UserList { + if x, ok := m.GetOperation().(*UserListOperation_Create); ok { + return x.Create + } + return nil +} + +func (m *UserListOperation) GetUpdate() *resources.UserList { + if x, ok := m.GetOperation().(*UserListOperation_Update); ok { + return x.Update + } + return nil +} + +func (m *UserListOperation) GetRemove() string { + if x, ok := m.GetOperation().(*UserListOperation_Remove); ok { + return x.Remove + } + return "" +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*UserListOperation) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _UserListOperation_OneofMarshaler, _UserListOperation_OneofUnmarshaler, _UserListOperation_OneofSizer, []interface{}{ + (*UserListOperation_Create)(nil), + (*UserListOperation_Update)(nil), + (*UserListOperation_Remove)(nil), + } +} + +func _UserListOperation_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*UserListOperation) + // operation + switch x := m.Operation.(type) { + case *UserListOperation_Create: + b.EncodeVarint(1<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.Create); err != nil { + return err + } + case *UserListOperation_Update: + b.EncodeVarint(2<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.Update); err != nil { + return err + } + case *UserListOperation_Remove: + b.EncodeVarint(3<<3 | proto.WireBytes) + b.EncodeStringBytes(x.Remove) + case nil: + default: + return fmt.Errorf("UserListOperation.Operation has unexpected type %T", x) + } + return nil +} + +func _UserListOperation_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*UserListOperation) + switch tag { + case 1: // operation.create + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(resources.UserList) + err := b.DecodeMessage(msg) + m.Operation = &UserListOperation_Create{msg} + return true, err + case 2: // operation.update + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(resources.UserList) + err := b.DecodeMessage(msg) + m.Operation = &UserListOperation_Update{msg} + return true, err + case 3: // operation.remove + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeStringBytes() + m.Operation = &UserListOperation_Remove{x} + return true, err + default: + return false, nil + } +} + +func _UserListOperation_OneofSizer(msg proto.Message) (n int) { + m := msg.(*UserListOperation) + // operation + switch x := m.Operation.(type) { + case *UserListOperation_Create: + s := proto.Size(x.Create) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *UserListOperation_Update: + s := proto.Size(x.Update) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *UserListOperation_Remove: + n += 1 // tag and wire + n += proto.SizeVarint(uint64(len(x.Remove))) + n += len(x.Remove) + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +// Response message for user list mutate. +type MutateUserListsResponse struct { + // Errors that pertain to operation failures in the partial failure mode. + // Returned only when partial_failure = true and all errors occur inside the + // operations. If any errors occur outside the operations (e.g. auth errors), + // we return an RPC level error. + PartialFailureError *status.Status `protobuf:"bytes,3,opt,name=partial_failure_error,json=partialFailureError,proto3" json:"partial_failure_error,omitempty"` + // All results for the mutate. + Results []*MutateUserListResult `protobuf:"bytes,2,rep,name=results,proto3" json:"results,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *MutateUserListsResponse) Reset() { *m = MutateUserListsResponse{} } +func (m *MutateUserListsResponse) String() string { return proto.CompactTextString(m) } +func (*MutateUserListsResponse) ProtoMessage() {} +func (*MutateUserListsResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_user_list_service_a2edc6a3997d8389, []int{3} +} +func (m *MutateUserListsResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_MutateUserListsResponse.Unmarshal(m, b) +} +func (m *MutateUserListsResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_MutateUserListsResponse.Marshal(b, m, deterministic) +} +func (dst *MutateUserListsResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_MutateUserListsResponse.Merge(dst, src) +} +func (m *MutateUserListsResponse) XXX_Size() int { + return xxx_messageInfo_MutateUserListsResponse.Size(m) +} +func (m *MutateUserListsResponse) XXX_DiscardUnknown() { + xxx_messageInfo_MutateUserListsResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_MutateUserListsResponse proto.InternalMessageInfo + +func (m *MutateUserListsResponse) GetPartialFailureError() *status.Status { + if m != nil { + return m.PartialFailureError + } + return nil +} + +func (m *MutateUserListsResponse) GetResults() []*MutateUserListResult { + if m != nil { + return m.Results + } + return nil +} + +// The result for the user list mutate. +type MutateUserListResult struct { + // Returned for successful operations. + ResourceName string `protobuf:"bytes,1,opt,name=resource_name,json=resourceName,proto3" json:"resource_name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *MutateUserListResult) Reset() { *m = MutateUserListResult{} } +func (m *MutateUserListResult) String() string { return proto.CompactTextString(m) } +func (*MutateUserListResult) ProtoMessage() {} +func (*MutateUserListResult) Descriptor() ([]byte, []int) { + return fileDescriptor_user_list_service_a2edc6a3997d8389, []int{4} +} +func (m *MutateUserListResult) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_MutateUserListResult.Unmarshal(m, b) +} +func (m *MutateUserListResult) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_MutateUserListResult.Marshal(b, m, deterministic) +} +func (dst *MutateUserListResult) XXX_Merge(src proto.Message) { + xxx_messageInfo_MutateUserListResult.Merge(dst, src) +} +func (m *MutateUserListResult) XXX_Size() int { + return xxx_messageInfo_MutateUserListResult.Size(m) +} +func (m *MutateUserListResult) XXX_DiscardUnknown() { + xxx_messageInfo_MutateUserListResult.DiscardUnknown(m) +} + +var xxx_messageInfo_MutateUserListResult proto.InternalMessageInfo + +func (m *MutateUserListResult) GetResourceName() string { + if m != nil { + return m.ResourceName + } + return "" +} + +func init() { + proto.RegisterType((*GetUserListRequest)(nil), "google.ads.googleads.v1.services.GetUserListRequest") + proto.RegisterType((*MutateUserListsRequest)(nil), "google.ads.googleads.v1.services.MutateUserListsRequest") + proto.RegisterType((*UserListOperation)(nil), "google.ads.googleads.v1.services.UserListOperation") + proto.RegisterType((*MutateUserListsResponse)(nil), "google.ads.googleads.v1.services.MutateUserListsResponse") + proto.RegisterType((*MutateUserListResult)(nil), "google.ads.googleads.v1.services.MutateUserListResult") +} + +// Reference imports to suppress errors if they are not otherwise used. +var _ context.Context +var _ grpc.ClientConn + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the grpc package it is being compiled against. +const _ = grpc.SupportPackageIsVersion4 + +// UserListServiceClient is the client API for UserListService service. +// +// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://godoc.org/google.golang.org/grpc#ClientConn.NewStream. +type UserListServiceClient interface { + // Returns the requested user list. + GetUserList(ctx context.Context, in *GetUserListRequest, opts ...grpc.CallOption) (*resources.UserList, error) + // Creates or updates user lists. Operation statuses are returned. + MutateUserLists(ctx context.Context, in *MutateUserListsRequest, opts ...grpc.CallOption) (*MutateUserListsResponse, error) +} + +type userListServiceClient struct { + cc *grpc.ClientConn +} + +func NewUserListServiceClient(cc *grpc.ClientConn) UserListServiceClient { + return &userListServiceClient{cc} +} + +func (c *userListServiceClient) GetUserList(ctx context.Context, in *GetUserListRequest, opts ...grpc.CallOption) (*resources.UserList, error) { + out := new(resources.UserList) + err := c.cc.Invoke(ctx, "/google.ads.googleads.v1.services.UserListService/GetUserList", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *userListServiceClient) MutateUserLists(ctx context.Context, in *MutateUserListsRequest, opts ...grpc.CallOption) (*MutateUserListsResponse, error) { + out := new(MutateUserListsResponse) + err := c.cc.Invoke(ctx, "/google.ads.googleads.v1.services.UserListService/MutateUserLists", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +// UserListServiceServer is the server API for UserListService service. +type UserListServiceServer interface { + // Returns the requested user list. + GetUserList(context.Context, *GetUserListRequest) (*resources.UserList, error) + // Creates or updates user lists. Operation statuses are returned. + MutateUserLists(context.Context, *MutateUserListsRequest) (*MutateUserListsResponse, error) +} + +func RegisterUserListServiceServer(s *grpc.Server, srv UserListServiceServer) { + s.RegisterService(&_UserListService_serviceDesc, srv) +} + +func _UserListService_GetUserList_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(GetUserListRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(UserListServiceServer).GetUserList(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.ads.googleads.v1.services.UserListService/GetUserList", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(UserListServiceServer).GetUserList(ctx, req.(*GetUserListRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _UserListService_MutateUserLists_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(MutateUserListsRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(UserListServiceServer).MutateUserLists(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.ads.googleads.v1.services.UserListService/MutateUserLists", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(UserListServiceServer).MutateUserLists(ctx, req.(*MutateUserListsRequest)) + } + return interceptor(ctx, in, info, handler) +} + +var _UserListService_serviceDesc = grpc.ServiceDesc{ + ServiceName: "google.ads.googleads.v1.services.UserListService", + HandlerType: (*UserListServiceServer)(nil), + Methods: []grpc.MethodDesc{ + { + MethodName: "GetUserList", + Handler: _UserListService_GetUserList_Handler, + }, + { + MethodName: "MutateUserLists", + Handler: _UserListService_MutateUserLists_Handler, + }, + }, + Streams: []grpc.StreamDesc{}, + Metadata: "google/ads/googleads/v1/services/user_list_service.proto", +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/services/user_list_service.proto", fileDescriptor_user_list_service_a2edc6a3997d8389) +} + +var fileDescriptor_user_list_service_a2edc6a3997d8389 = []byte{ + // 710 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x9c, 0x54, 0x4f, 0x6b, 0xd4, 0x4e, + 0x18, 0xfe, 0x25, 0xfd, 0x51, 0xed, 0xa4, 0x5a, 0x1c, 0xab, 0x5d, 0x16, 0xd1, 0x25, 0x16, 0x2c, + 0x5b, 0x9c, 0xb0, 0xbb, 0x22, 0x6d, 0x4a, 0x0f, 0x5b, 0xe8, 0x1f, 0xc1, 0xda, 0x92, 0x62, 0x0f, + 0xb2, 0x10, 0xa6, 0x9b, 0xe9, 0x12, 0x9a, 0x64, 0xe2, 0xcc, 0x64, 0xa5, 0x94, 0x5e, 0x04, 0x3f, + 0x81, 0xdf, 0x40, 0xf0, 0xe2, 0xd5, 0x4f, 0xe0, 0xd5, 0xab, 0x57, 0x8f, 0x9e, 0xfc, 0x0a, 0x22, + 0x48, 0x66, 0x32, 0x69, 0x77, 0xeb, 0xb2, 0xb6, 0xb7, 0x77, 0xde, 0x79, 0x9e, 0xe7, 0x7d, 0xe7, + 0xfd, 0x33, 0x60, 0xa9, 0x47, 0x69, 0x2f, 0x22, 0x0e, 0x0e, 0xb8, 0xa3, 0xcc, 0xdc, 0xea, 0x37, + 0x1c, 0x4e, 0x58, 0x3f, 0xec, 0x12, 0xee, 0x64, 0x9c, 0x30, 0x3f, 0x0a, 0xb9, 0xf0, 0x0b, 0x17, + 0x4a, 0x19, 0x15, 0x14, 0xd6, 0x14, 0x1c, 0xe1, 0x80, 0xa3, 0x92, 0x89, 0xfa, 0x0d, 0xa4, 0x99, + 0xd5, 0xc6, 0x28, 0x6d, 0x46, 0x38, 0xcd, 0xd8, 0x80, 0xb8, 0x12, 0xad, 0xde, 0xd3, 0x94, 0x34, + 0x74, 0x70, 0x92, 0x50, 0x81, 0x45, 0x48, 0x13, 0x5e, 0xdc, 0x16, 0x21, 0x1d, 0x79, 0x3a, 0xc8, + 0x0e, 0x9d, 0xc3, 0x90, 0x44, 0x81, 0x1f, 0x63, 0x7e, 0x54, 0x20, 0xee, 0x0f, 0x23, 0xde, 0x30, + 0x9c, 0xa6, 0x84, 0x69, 0x85, 0xb9, 0xe2, 0x9e, 0xa5, 0x5d, 0x87, 0x0b, 0x2c, 0xb2, 0xe2, 0xc2, + 0x5e, 0x06, 0x70, 0x93, 0x88, 0x97, 0x9c, 0xb0, 0xe7, 0x21, 0x17, 0x1e, 0x79, 0x9d, 0x11, 0x2e, + 0xe0, 0x43, 0x70, 0x43, 0xe7, 0xea, 0x27, 0x38, 0x26, 0x15, 0xa3, 0x66, 0x2c, 0x4c, 0x79, 0xd3, + 0xda, 0xf9, 0x02, 0xc7, 0xc4, 0xfe, 0x6e, 0x80, 0xbb, 0xdb, 0x99, 0xc0, 0x82, 0x68, 0x3a, 0xd7, + 0xfc, 0x07, 0xc0, 0xea, 0x66, 0x5c, 0xd0, 0x98, 0x30, 0x3f, 0x0c, 0x0a, 0x36, 0xd0, 0xae, 0x67, + 0x01, 0xdc, 0x03, 0x80, 0xa6, 0x84, 0xa9, 0x57, 0x56, 0xcc, 0xda, 0xc4, 0x82, 0xd5, 0x6c, 0xa1, + 0x71, 0x95, 0x45, 0x3a, 0xd0, 0x8e, 0xe6, 0x7a, 0xe7, 0x64, 0xe0, 0x23, 0x30, 0x93, 0x62, 0x26, + 0x42, 0x1c, 0xf9, 0x87, 0x38, 0x8c, 0x32, 0x46, 0x2a, 0x13, 0x35, 0x63, 0xe1, 0xba, 0x77, 0xb3, + 0x70, 0x6f, 0x28, 0x6f, 0xfe, 0xbc, 0x3e, 0x8e, 0xc2, 0x00, 0x0b, 0xe2, 0xd3, 0x24, 0x3a, 0xae, + 0xfc, 0x2f, 0x61, 0xd3, 0xda, 0xb9, 0x93, 0x44, 0xc7, 0xf6, 0x3b, 0x13, 0xdc, 0xba, 0x10, 0x0f, + 0xae, 0x00, 0x2b, 0x4b, 0x25, 0x31, 0xaf, 0xbe, 0x24, 0x5a, 0xcd, 0xaa, 0xce, 0x5c, 0x97, 0x1f, + 0x6d, 0xe4, 0x0d, 0xda, 0xc6, 0xfc, 0xc8, 0x03, 0x0a, 0x9e, 0xdb, 0x70, 0x1d, 0x4c, 0x76, 0x19, + 0xc1, 0x42, 0xd5, 0xd3, 0x6a, 0x2e, 0x8e, 0x7c, 0x71, 0x39, 0x29, 0xe5, 0x93, 0xb7, 0xfe, 0xf3, + 0x0a, 0x72, 0x2e, 0xa3, 0x44, 0x2b, 0xe6, 0x95, 0x64, 0x14, 0x19, 0x56, 0xc0, 0x24, 0x23, 0x31, + 0xed, 0xab, 0x2a, 0x4d, 0xe5, 0x37, 0xea, 0xbc, 0x66, 0x81, 0xa9, 0xb2, 0xac, 0xf6, 0x67, 0x03, + 0xcc, 0x5d, 0x68, 0x33, 0x4f, 0x69, 0xc2, 0x09, 0xdc, 0x00, 0x77, 0x86, 0x2a, 0xee, 0x13, 0xc6, + 0x28, 0x93, 0x8a, 0x56, 0x13, 0xea, 0xc4, 0x58, 0xda, 0x45, 0x7b, 0x72, 0xec, 0xbc, 0xdb, 0x83, + 0xbd, 0x58, 0xcf, 0xe1, 0x70, 0x17, 0x5c, 0x63, 0x84, 0x67, 0x91, 0xd0, 0xb3, 0xf0, 0x74, 0xfc, + 0x2c, 0x0c, 0xe6, 0xe4, 0x49, 0xba, 0xa7, 0x65, 0xec, 0x15, 0x30, 0xfb, 0x37, 0xc0, 0x3f, 0x4d, + 0x76, 0xf3, 0xb7, 0x09, 0x66, 0x34, 0x6f, 0x4f, 0xc5, 0x83, 0x1f, 0x0d, 0x60, 0x9d, 0xdb, 0x14, + 0xf8, 0x64, 0x7c, 0x86, 0x17, 0x17, 0xab, 0x7a, 0x99, 0x56, 0xd9, 0xad, 0xb7, 0xdf, 0x7e, 0xbc, + 0x37, 0x1f, 0xc3, 0xc5, 0xfc, 0xef, 0x38, 0x19, 0x48, 0x7b, 0x55, 0xef, 0x12, 0x77, 0xea, 0xf2, + 0x33, 0x91, 0x7d, 0x71, 0xea, 0xa7, 0xf0, 0x8b, 0x01, 0x66, 0x86, 0xda, 0x05, 0x97, 0x2e, 0x5b, + 0x4d, 0xbd, 0xc8, 0xd5, 0xe5, 0x2b, 0x30, 0xd5, 0x6c, 0xd8, 0xcb, 0x32, 0xfb, 0x96, 0x8d, 0xf2, + 0xec, 0xcf, 0xd2, 0x3d, 0x39, 0xf7, 0x31, 0xac, 0xd6, 0x4f, 0xcf, 0x92, 0x77, 0x63, 0x29, 0xe4, + 0x1a, 0xf5, 0xb5, 0x5f, 0x06, 0x98, 0xef, 0xd2, 0x78, 0x6c, 0xec, 0xb5, 0xd9, 0xa1, 0x2e, 0xed, + 0xe6, 0xfb, 0xb7, 0x6b, 0xbc, 0xda, 0x2a, 0x98, 0x3d, 0x1a, 0xe1, 0xa4, 0x87, 0x28, 0xeb, 0x39, + 0x3d, 0x92, 0xc8, 0xed, 0xd4, 0x3f, 0x72, 0x1a, 0xf2, 0xd1, 0x9f, 0xff, 0x8a, 0x36, 0x3e, 0x98, + 0x13, 0x9b, 0xed, 0xf6, 0x27, 0xb3, 0xb6, 0xa9, 0x04, 0xdb, 0x01, 0x47, 0xca, 0xcc, 0xad, 0xfd, + 0x06, 0x2a, 0x02, 0xf3, 0xaf, 0x1a, 0xd2, 0x69, 0x07, 0xbc, 0x53, 0x42, 0x3a, 0xfb, 0x8d, 0x8e, + 0x86, 0xfc, 0x34, 0xe7, 0x95, 0xdf, 0x75, 0xdb, 0x01, 0x77, 0xdd, 0x12, 0xe4, 0xba, 0xfb, 0x0d, + 0xd7, 0xd5, 0xb0, 0x83, 0x49, 0x99, 0x67, 0xeb, 0x4f, 0x00, 0x00, 0x00, 0xff, 0xff, 0xfe, 0x0d, + 0xd1, 0x85, 0xa3, 0x06, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/services/video_service.pb.go b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/services/video_service.pb.go new file mode 100644 index 0000000..28af455 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/ads/googleads/v1/services/video_service.pb.go @@ -0,0 +1,174 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/ads/googleads/v1/services/video_service.proto + +package services // import "google.golang.org/genproto/googleapis/ads/googleads/v1/services" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import resources "google.golang.org/genproto/googleapis/ads/googleads/v1/resources" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +import ( + context "golang.org/x/net/context" + grpc "google.golang.org/grpc" +) + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Request message for [VideoService.GetVideo][google.ads.googleads.v1.services.VideoService.GetVideo]. +type GetVideoRequest struct { + // The resource name of the video to fetch. + ResourceName string `protobuf:"bytes,1,opt,name=resource_name,json=resourceName,proto3" json:"resource_name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GetVideoRequest) Reset() { *m = GetVideoRequest{} } +func (m *GetVideoRequest) String() string { return proto.CompactTextString(m) } +func (*GetVideoRequest) ProtoMessage() {} +func (*GetVideoRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_video_service_4f246e1a4123a787, []int{0} +} +func (m *GetVideoRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GetVideoRequest.Unmarshal(m, b) +} +func (m *GetVideoRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GetVideoRequest.Marshal(b, m, deterministic) +} +func (dst *GetVideoRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_GetVideoRequest.Merge(dst, src) +} +func (m *GetVideoRequest) XXX_Size() int { + return xxx_messageInfo_GetVideoRequest.Size(m) +} +func (m *GetVideoRequest) XXX_DiscardUnknown() { + xxx_messageInfo_GetVideoRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_GetVideoRequest proto.InternalMessageInfo + +func (m *GetVideoRequest) GetResourceName() string { + if m != nil { + return m.ResourceName + } + return "" +} + +func init() { + proto.RegisterType((*GetVideoRequest)(nil), "google.ads.googleads.v1.services.GetVideoRequest") +} + +// Reference imports to suppress errors if they are not otherwise used. +var _ context.Context +var _ grpc.ClientConn + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the grpc package it is being compiled against. +const _ = grpc.SupportPackageIsVersion4 + +// VideoServiceClient is the client API for VideoService service. +// +// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://godoc.org/google.golang.org/grpc#ClientConn.NewStream. +type VideoServiceClient interface { + // Returns the requested video in full detail. + GetVideo(ctx context.Context, in *GetVideoRequest, opts ...grpc.CallOption) (*resources.Video, error) +} + +type videoServiceClient struct { + cc *grpc.ClientConn +} + +func NewVideoServiceClient(cc *grpc.ClientConn) VideoServiceClient { + return &videoServiceClient{cc} +} + +func (c *videoServiceClient) GetVideo(ctx context.Context, in *GetVideoRequest, opts ...grpc.CallOption) (*resources.Video, error) { + out := new(resources.Video) + err := c.cc.Invoke(ctx, "/google.ads.googleads.v1.services.VideoService/GetVideo", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +// VideoServiceServer is the server API for VideoService service. +type VideoServiceServer interface { + // Returns the requested video in full detail. + GetVideo(context.Context, *GetVideoRequest) (*resources.Video, error) +} + +func RegisterVideoServiceServer(s *grpc.Server, srv VideoServiceServer) { + s.RegisterService(&_VideoService_serviceDesc, srv) +} + +func _VideoService_GetVideo_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(GetVideoRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(VideoServiceServer).GetVideo(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.ads.googleads.v1.services.VideoService/GetVideo", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(VideoServiceServer).GetVideo(ctx, req.(*GetVideoRequest)) + } + return interceptor(ctx, in, info, handler) +} + +var _VideoService_serviceDesc = grpc.ServiceDesc{ + ServiceName: "google.ads.googleads.v1.services.VideoService", + HandlerType: (*VideoServiceServer)(nil), + Methods: []grpc.MethodDesc{ + { + MethodName: "GetVideo", + Handler: _VideoService_GetVideo_Handler, + }, + }, + Streams: []grpc.StreamDesc{}, + Metadata: "google/ads/googleads/v1/services/video_service.proto", +} + +func init() { + proto.RegisterFile("google/ads/googleads/v1/services/video_service.proto", fileDescriptor_video_service_4f246e1a4123a787) +} + +var fileDescriptor_video_service_4f246e1a4123a787 = []byte{ + // 349 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x84, 0x91, 0x31, 0x4b, 0xfb, 0x40, + 0x18, 0xc6, 0x49, 0xfe, 0xf0, 0x47, 0x43, 0x45, 0xcc, 0x24, 0xc5, 0xa1, 0xd4, 0x0e, 0xa5, 0xe0, + 0x9d, 0x51, 0x71, 0x38, 0x71, 0x48, 0x97, 0x3a, 0x49, 0xa9, 0x90, 0x41, 0x02, 0xe5, 0x6c, 0x5e, + 0x42, 0xa0, 0xc9, 0x5b, 0xf3, 0x5e, 0xb3, 0x88, 0x8b, 0x5f, 0xc1, 0xc9, 0xd5, 0xb1, 0x1f, 0xc5, + 0xd5, 0xaf, 0xe0, 0xe4, 0x27, 0x70, 0x94, 0xe4, 0x7a, 0x41, 0x85, 0xd0, 0xed, 0xc9, 0xe5, 0xf7, + 0x3c, 0xf7, 0xbe, 0xcf, 0x39, 0x67, 0x31, 0x62, 0x3c, 0x07, 0x2e, 0x23, 0xe2, 0x5a, 0x96, 0xaa, + 0xf0, 0x38, 0x41, 0x5e, 0x24, 0x33, 0x20, 0x5e, 0x24, 0x11, 0xe0, 0x74, 0xfd, 0xc9, 0x16, 0x39, + 0x2a, 0x74, 0x3b, 0x1a, 0x65, 0x32, 0x22, 0x56, 0xbb, 0x58, 0xe1, 0x31, 0xe3, 0x6a, 0x1f, 0x35, + 0xe5, 0xe6, 0x40, 0xb8, 0xcc, 0xeb, 0x60, 0x1d, 0xd8, 0x3e, 0x30, 0xf8, 0x22, 0xe1, 0x32, 0xcb, + 0x50, 0x49, 0x95, 0x60, 0x46, 0xfa, 0x6f, 0xf7, 0xdc, 0xd9, 0x1d, 0x81, 0x0a, 0x4a, 0x7e, 0x02, + 0xf7, 0x4b, 0x20, 0xe5, 0x1e, 0x3a, 0x3b, 0x26, 0x69, 0x9a, 0xc9, 0x14, 0xf6, 0xad, 0x8e, 0xd5, + 0xdf, 0x9e, 0xb4, 0xcc, 0xe1, 0xb5, 0x4c, 0xe1, 0x64, 0x65, 0x39, 0xad, 0xca, 0x75, 0xa3, 0xc7, + 0x72, 0x5f, 0x2c, 0x67, 0xcb, 0x24, 0xb9, 0x1e, 0xdb, 0xb4, 0x05, 0xfb, 0x73, 0x6b, 0xbb, 0xdf, + 0x68, 0xa9, 0xd7, 0x62, 0x95, 0xa1, 0x7b, 0xfc, 0xf4, 0xfe, 0xf1, 0x6c, 0x0f, 0xdc, 0x7e, 0xb9, + 0xf3, 0xc3, 0xaf, 0x51, 0x2f, 0x67, 0x4b, 0x52, 0x98, 0x42, 0x4e, 0x7c, 0xa0, 0x4b, 0x20, 0x3e, + 0x78, 0x1c, 0x7e, 0x59, 0x4e, 0x6f, 0x86, 0xe9, 0xc6, 0xa1, 0x86, 0x7b, 0x3f, 0x57, 0x1a, 0x97, + 0x05, 0x8d, 0xad, 0xdb, 0xab, 0xb5, 0x2d, 0xc6, 0xb9, 0xcc, 0x62, 0x86, 0x79, 0xcc, 0x63, 0xc8, + 0xaa, 0xfa, 0x4c, 0xff, 0x8b, 0x84, 0x9a, 0x9f, 0xf9, 0xc2, 0x88, 0x57, 0xfb, 0xdf, 0xc8, 0xf7, + 0x57, 0x76, 0x67, 0xa4, 0x03, 0xfd, 0x88, 0x98, 0x96, 0xa5, 0x0a, 0x3c, 0xb6, 0xbe, 0x98, 0xde, + 0x0c, 0x12, 0xfa, 0x11, 0x85, 0x35, 0x12, 0x06, 0x5e, 0x68, 0x90, 0x4f, 0xbb, 0xa7, 0xcf, 0x85, + 0xf0, 0x23, 0x12, 0xa2, 0x86, 0x84, 0x08, 0x3c, 0x21, 0x0c, 0x76, 0xf7, 0xbf, 0x9a, 0xf3, 0xf4, + 0x3b, 0x00, 0x00, 0xff, 0xff, 0x24, 0x5c, 0xd6, 0x52, 0x8d, 0x02, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/api/annotations/annotations.pb.go b/vendor/google.golang.org/genproto/googleapis/api/annotations/annotations.pb.go new file mode 100644 index 0000000..c7bb539 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/api/annotations/annotations.pb.go @@ -0,0 +1,54 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/api/annotations.proto + +package annotations // import "google.golang.org/genproto/googleapis/api/annotations" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import descriptor "github.com/golang/protobuf/protoc-gen-go/descriptor" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +var E_Http = &proto.ExtensionDesc{ + ExtendedType: (*descriptor.MethodOptions)(nil), + ExtensionType: (*HttpRule)(nil), + Field: 72295728, + Name: "google.api.http", + Tag: "bytes,72295728,opt,name=http", + Filename: "google/api/annotations.proto", +} + +func init() { + proto.RegisterExtension(E_Http) +} + +func init() { + proto.RegisterFile("google/api/annotations.proto", fileDescriptor_annotations_e7ebfecbdd9698dc) +} + +var fileDescriptor_annotations_e7ebfecbdd9698dc = []byte{ + // 208 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xe2, 0x92, 0x49, 0xcf, 0xcf, 0x4f, + 0xcf, 0x49, 0xd5, 0x4f, 0x2c, 0xc8, 0xd4, 0x4f, 0xcc, 0xcb, 0xcb, 0x2f, 0x49, 0x2c, 0xc9, 0xcc, + 0xcf, 0x2b, 0xd6, 0x2b, 0x28, 0xca, 0x2f, 0xc9, 0x17, 0xe2, 0x82, 0xc8, 0xea, 0x25, 0x16, 0x64, + 0x4a, 0x89, 0x22, 0xa9, 0xcc, 0x28, 0x29, 0x29, 0x80, 0x28, 0x91, 0x52, 0x80, 0x0a, 0x83, 0x79, + 0x49, 0xa5, 0x69, 0xfa, 0x29, 0xa9, 0xc5, 0xc9, 0x45, 0x99, 0x05, 0x25, 0xf9, 0x45, 0x10, 0x15, + 0x56, 0xde, 0x5c, 0x2c, 0x20, 0xf5, 0x42, 0x72, 0x7a, 0x50, 0xd3, 0x60, 0x4a, 0xf5, 0x7c, 0x53, + 0x4b, 0x32, 0xf2, 0x53, 0xfc, 0x0b, 0xc0, 0x56, 0x4a, 0x6c, 0x38, 0xb5, 0x47, 0x49, 0x81, 0x51, + 0x83, 0xdb, 0x48, 0x44, 0x0f, 0x61, 0xad, 0x9e, 0x47, 0x49, 0x49, 0x41, 0x50, 0x69, 0x4e, 0x6a, + 0x10, 0xd8, 0x10, 0xa7, 0x3c, 0x2e, 0xbe, 0xe4, 0xfc, 0x5c, 0x24, 0x05, 0x4e, 0x02, 0x8e, 0x08, + 0x67, 0x07, 0x80, 0x4c, 0x0e, 0x60, 0x8c, 0x72, 0x84, 0xca, 0xa7, 0xe7, 0xe7, 0x24, 0xe6, 0xa5, + 0xeb, 0xe5, 0x17, 0xa5, 0xeb, 0xa7, 0xa7, 0xe6, 0x81, 0xed, 0xd5, 0x87, 0x48, 0x25, 0x16, 0x64, + 0x16, 0xa3, 0x7b, 0xda, 0x1a, 0x89, 0xbd, 0x88, 0x89, 0xc5, 0xdd, 0x31, 0xc0, 0x33, 0x89, 0x0d, + 0xac, 0xc9, 0x18, 0x10, 0x00, 0x00, 0xff, 0xff, 0xe3, 0x29, 0x19, 0x62, 0x28, 0x01, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/api/annotations/client.pb.go b/vendor/google.golang.org/genproto/googleapis/api/annotations/client.pb.go new file mode 100644 index 0000000..589e00d --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/api/annotations/client.pb.go @@ -0,0 +1,76 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/api/client.proto + +package annotations // import "google.golang.org/genproto/googleapis/api/annotations" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import descriptor "github.com/golang/protobuf/protoc-gen-go/descriptor" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +var E_MethodSignature = &proto.ExtensionDesc{ + ExtendedType: (*descriptor.MethodOptions)(nil), + ExtensionType: ([]string)(nil), + Field: 1051, + Name: "google.api.method_signature", + Tag: "bytes,1051,rep,name=method_signature,json=methodSignature", + Filename: "google/api/client.proto", +} + +var E_DefaultHost = &proto.ExtensionDesc{ + ExtendedType: (*descriptor.ServiceOptions)(nil), + ExtensionType: (*string)(nil), + Field: 1049, + Name: "google.api.default_host", + Tag: "bytes,1049,opt,name=default_host,json=defaultHost", + Filename: "google/api/client.proto", +} + +var E_OauthScopes = &proto.ExtensionDesc{ + ExtendedType: (*descriptor.ServiceOptions)(nil), + ExtensionType: (*string)(nil), + Field: 1050, + Name: "google.api.oauth_scopes", + Tag: "bytes,1050,opt,name=oauth_scopes,json=oauthScopes", + Filename: "google/api/client.proto", +} + +func init() { + proto.RegisterExtension(E_MethodSignature) + proto.RegisterExtension(E_DefaultHost) + proto.RegisterExtension(E_OauthScopes) +} + +func init() { proto.RegisterFile("google/api/client.proto", fileDescriptor_client_66b9a733374708c1) } + +var fileDescriptor_client_66b9a733374708c1 = []byte{ + // 262 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x84, 0x90, 0x3f, 0x4f, 0xc3, 0x30, + 0x10, 0xc5, 0x55, 0x40, 0xa8, 0x75, 0x11, 0xa0, 0x2c, 0x20, 0x06, 0xc8, 0xd8, 0xc9, 0x1e, 0xd8, + 0xca, 0xd4, 0x76, 0xe0, 0x8f, 0x84, 0x88, 0x9a, 0x8d, 0x25, 0x72, 0x9d, 0xab, 0x63, 0x29, 0xf5, + 0x59, 0xf6, 0x85, 0xef, 0x02, 0x6c, 0x7c, 0x52, 0x54, 0xc7, 0x11, 0x48, 0x0c, 0x6c, 0x27, 0xbd, + 0xf7, 0xfb, 0x9d, 0xf4, 0xd8, 0x85, 0x46, 0xd4, 0x2d, 0x08, 0xe9, 0x8c, 0x50, 0xad, 0x01, 0x4b, + 0xdc, 0x79, 0x24, 0xcc, 0x58, 0x1f, 0x70, 0xe9, 0xcc, 0x55, 0x9e, 0x4a, 0x31, 0xd9, 0x74, 0x5b, + 0x51, 0x43, 0x50, 0xde, 0x38, 0x42, 0xdf, 0xb7, 0xe7, 0x4f, 0xec, 0x7c, 0x07, 0xd4, 0x60, 0x5d, + 0x05, 0xa3, 0xad, 0xa4, 0xce, 0x43, 0x76, 0xcd, 0x93, 0x62, 0xc0, 0xf8, 0x73, 0xac, 0xbc, 0x38, + 0x32, 0x68, 0xc3, 0xe5, 0xe7, 0x38, 0x3f, 0x9c, 0x4d, 0xd6, 0x67, 0x3d, 0x58, 0x0e, 0xdc, 0x7c, + 0xc5, 0x4e, 0x6a, 0xd8, 0xca, 0xae, 0xa5, 0xaa, 0xc1, 0x40, 0xd9, 0xcd, 0x1f, 0x4f, 0x09, 0xfe, + 0xcd, 0x28, 0x18, 0x44, 0xef, 0xe3, 0x7c, 0x34, 0x9b, 0xac, 0xa7, 0x89, 0x7a, 0xc0, 0x40, 0x7b, + 0x09, 0xca, 0x8e, 0x9a, 0x2a, 0x28, 0x74, 0x10, 0xfe, 0x97, 0x7c, 0x24, 0x49, 0xa4, 0xca, 0x08, + 0x2d, 0x0d, 0x3b, 0x55, 0xb8, 0xe3, 0x3f, 0x4b, 0x2c, 0xa7, 0xab, 0xb8, 0x51, 0xb1, 0x97, 0x14, + 0xa3, 0xd7, 0x45, 0x8a, 0x34, 0xb6, 0xd2, 0x6a, 0x8e, 0x5e, 0x0b, 0x0d, 0x36, 0xbe, 0x10, 0x7d, + 0x24, 0x9d, 0x09, 0x71, 0x5c, 0x69, 0x2d, 0x92, 0x8c, 0xbf, 0xee, 0x7e, 0xdd, 0x5f, 0x07, 0x47, + 0xf7, 0x8b, 0xe2, 0x71, 0x73, 0x1c, 0xa1, 0xdb, 0xef, 0x00, 0x00, 0x00, 0xff, 0xff, 0xcc, 0xc2, + 0xcf, 0x71, 0x90, 0x01, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/api/annotations/field_behavior.pb.go b/vendor/google.golang.org/genproto/googleapis/api/annotations/field_behavior.pb.go new file mode 100644 index 0000000..4fc5c40 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/api/annotations/field_behavior.pb.go @@ -0,0 +1,119 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/api/field_behavior.proto + +package annotations // import "google.golang.org/genproto/googleapis/api/annotations" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import descriptor "github.com/golang/protobuf/protoc-gen-go/descriptor" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// An indicator of the behavior of a given field (for example, that a field +// is required in requests, or given as output but ignored as input). +// This **does not** change the behavior in protocol buffers itself; it only +// denotes the behavior and may affect how API tooling handles the field. +// +// Note: This enum **may** receive new values in the future. +type FieldBehavior int32 + +const ( + // Conventional default for enums. Do not use this. + FieldBehavior_FIELD_BEHAVIOR_UNSPECIFIED FieldBehavior = 0 + // Specifically denotes a field as optional. + // While all fields in protocol buffers are optional, this may be specified + // for emphasis if appropriate. + FieldBehavior_OPTIONAL FieldBehavior = 1 + // Denotes a field as required. + // This indicates that the field **must** be provided as part of the request, + // and failure to do so will cause an error (usually `INVALID_ARGUMENT`). + FieldBehavior_REQUIRED FieldBehavior = 2 + // Denotes a field as output only. + // This indicates that the field is provided in responses, but including the + // field in a request does nothing (the server *must* ignore it and + // *must not* throw an error as a result of the field's presence). + FieldBehavior_OUTPUT_ONLY FieldBehavior = 3 + // Denotes a field as input only. + // This indicates that the field is provided in requests, and the + // corresponding field is not included in output. + FieldBehavior_INPUT_ONLY FieldBehavior = 4 + // Denotes a field as immutable. + // This indicates that the field may be set once in a request to create a + // resource, but may not be changed thereafter. + FieldBehavior_IMMUTABLE FieldBehavior = 5 +) + +var FieldBehavior_name = map[int32]string{ + 0: "FIELD_BEHAVIOR_UNSPECIFIED", + 1: "OPTIONAL", + 2: "REQUIRED", + 3: "OUTPUT_ONLY", + 4: "INPUT_ONLY", + 5: "IMMUTABLE", +} +var FieldBehavior_value = map[string]int32{ + "FIELD_BEHAVIOR_UNSPECIFIED": 0, + "OPTIONAL": 1, + "REQUIRED": 2, + "OUTPUT_ONLY": 3, + "INPUT_ONLY": 4, + "IMMUTABLE": 5, +} + +func (x FieldBehavior) String() string { + return proto.EnumName(FieldBehavior_name, int32(x)) +} +func (FieldBehavior) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_field_behavior_540d8be7c570e386, []int{0} +} + +var E_FieldBehavior = &proto.ExtensionDesc{ + ExtendedType: (*descriptor.FieldOptions)(nil), + ExtensionType: ([]FieldBehavior)(nil), + Field: 1052, + Name: "google.api.field_behavior", + Tag: "varint,1052,rep,name=field_behavior,json=fieldBehavior,enum=google.api.FieldBehavior", + Filename: "google/api/field_behavior.proto", +} + +func init() { + proto.RegisterEnum("google.api.FieldBehavior", FieldBehavior_name, FieldBehavior_value) + proto.RegisterExtension(E_FieldBehavior) +} + +func init() { + proto.RegisterFile("google/api/field_behavior.proto", fileDescriptor_field_behavior_540d8be7c570e386) +} + +var fileDescriptor_field_behavior_540d8be7c570e386 = []byte{ + // 303 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x54, 0x90, 0x4f, 0x4f, 0xb3, 0x30, + 0x1c, 0xc7, 0x9f, 0xfd, 0x79, 0xcc, 0xac, 0x0e, 0x49, 0x4f, 0xba, 0x44, 0xdd, 0xd1, 0x78, 0x28, + 0x89, 0xde, 0xf4, 0x04, 0xae, 0xd3, 0x26, 0x8c, 0x56, 0x04, 0x13, 0xbd, 0x60, 0xb7, 0xb1, 0xda, + 0x64, 0xd2, 0x06, 0xd0, 0x8b, 0x6f, 0xc5, 0x93, 0xaf, 0xd4, 0xd0, 0x31, 0x85, 0x5b, 0xbf, 0xf9, + 0x7d, 0xfa, 0xeb, 0xe7, 0x5b, 0x70, 0x2a, 0x94, 0x12, 0xeb, 0xd4, 0xe1, 0x5a, 0x3a, 0x2b, 0x99, + 0xae, 0x97, 0xc9, 0x3c, 0x7d, 0xe5, 0x1f, 0x52, 0xe5, 0x48, 0xe7, 0xaa, 0x54, 0x10, 0x6c, 0x00, + 0xc4, 0xb5, 0x1c, 0x8d, 0x6b, 0xd8, 0x4c, 0xe6, 0xef, 0x2b, 0x67, 0x99, 0x16, 0x8b, 0x5c, 0xea, + 0x72, 0x4b, 0x9f, 0x7f, 0x82, 0xe1, 0xb4, 0xda, 0xe2, 0xd5, 0x4b, 0xe0, 0x09, 0x18, 0x4d, 0x09, + 0xf6, 0x27, 0x89, 0x87, 0xef, 0xdc, 0x47, 0x42, 0xc3, 0x24, 0x0e, 0x1e, 0x18, 0xbe, 0x21, 0x53, + 0x82, 0x27, 0xf6, 0x3f, 0xb8, 0x0f, 0x06, 0x94, 0x45, 0x84, 0x06, 0xae, 0x6f, 0x77, 0xaa, 0x14, + 0xe2, 0xfb, 0x98, 0x84, 0x78, 0x62, 0x77, 0xe1, 0x01, 0xd8, 0xa3, 0x71, 0xc4, 0xe2, 0x28, 0xa1, + 0x81, 0xff, 0x64, 0xf7, 0xa0, 0x05, 0x00, 0x09, 0x7e, 0x73, 0x1f, 0x0e, 0xc1, 0x2e, 0x99, 0xcd, + 0xe2, 0xc8, 0xf5, 0x7c, 0x6c, 0xff, 0xbf, 0x7a, 0x01, 0x56, 0xbb, 0x02, 0x3c, 0x46, 0xb5, 0xfd, + 0xd6, 0x18, 0x19, 0x3b, 0xaa, 0x4b, 0xa9, 0xb2, 0xe2, 0xf0, 0x6b, 0x30, 0xee, 0x9d, 0x59, 0x17, + 0x47, 0xe8, 0xaf, 0x23, 0x6a, 0xe9, 0x87, 0xc3, 0x55, 0x33, 0x7a, 0x1a, 0x58, 0x0b, 0xf5, 0xd6, + 0xc0, 0x3d, 0xd8, 0xe2, 0x59, 0xf5, 0x0c, 0xeb, 0x3c, 0xbb, 0x35, 0x21, 0xd4, 0x9a, 0x67, 0x02, + 0xa9, 0x5c, 0x38, 0x22, 0xcd, 0x8c, 0x84, 0xb3, 0x19, 0x71, 0x2d, 0x0b, 0xf3, 0xe9, 0x3c, 0xcb, + 0x54, 0xc9, 0x8d, 0xcf, 0x75, 0xe3, 0xfc, 0xdd, 0xed, 0xdf, 0xba, 0x8c, 0xcc, 0x77, 0xcc, 0xa5, + 0xcb, 0x9f, 0x00, 0x00, 0x00, 0xff, 0xff, 0xfc, 0x94, 0x57, 0x94, 0xa8, 0x01, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/api/annotations/http.pb.go b/vendor/google.golang.org/genproto/googleapis/api/annotations/http.pb.go new file mode 100644 index 0000000..5ee365a --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/api/annotations/http.pb.go @@ -0,0 +1,745 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/api/http.proto + +package annotations // import "google.golang.org/genproto/googleapis/api/annotations" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Defines the HTTP configuration for an API service. It contains a list of +// [HttpRule][google.api.HttpRule], each specifying the mapping of an RPC method +// to one or more HTTP REST API methods. +type Http struct { + // A list of HTTP configuration rules that apply to individual API methods. + // + // **NOTE:** All service configuration rules follow "last one wins" order. + Rules []*HttpRule `protobuf:"bytes,1,rep,name=rules,proto3" json:"rules,omitempty"` + // When set to true, URL path parameters will be fully URI-decoded except in + // cases of single segment matches in reserved expansion, where "%2F" will be + // left encoded. + // + // The default behavior is to not decode RFC 6570 reserved characters in multi + // segment matches. + FullyDecodeReservedExpansion bool `protobuf:"varint,2,opt,name=fully_decode_reserved_expansion,json=fullyDecodeReservedExpansion,proto3" json:"fully_decode_reserved_expansion,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Http) Reset() { *m = Http{} } +func (m *Http) String() string { return proto.CompactTextString(m) } +func (*Http) ProtoMessage() {} +func (*Http) Descriptor() ([]byte, []int) { + return fileDescriptor_http_1e78ce0f8b702f4c, []int{0} +} +func (m *Http) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Http.Unmarshal(m, b) +} +func (m *Http) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Http.Marshal(b, m, deterministic) +} +func (dst *Http) XXX_Merge(src proto.Message) { + xxx_messageInfo_Http.Merge(dst, src) +} +func (m *Http) XXX_Size() int { + return xxx_messageInfo_Http.Size(m) +} +func (m *Http) XXX_DiscardUnknown() { + xxx_messageInfo_Http.DiscardUnknown(m) +} + +var xxx_messageInfo_Http proto.InternalMessageInfo + +func (m *Http) GetRules() []*HttpRule { + if m != nil { + return m.Rules + } + return nil +} + +func (m *Http) GetFullyDecodeReservedExpansion() bool { + if m != nil { + return m.FullyDecodeReservedExpansion + } + return false +} + +// # gRPC Transcoding +// +// gRPC Transcoding is a feature for mapping between a gRPC method and one or +// more HTTP REST endpoints. It allows developers to build a single API service +// that supports both gRPC APIs and REST APIs. Many systems, including [Google +// APIs](https://github.com/googleapis/googleapis), +// [Cloud Endpoints](https://cloud.google.com/endpoints), [gRPC +// Gateway](https://github.com/grpc-ecosystem/grpc-gateway), +// and [Envoy](https://github.com/envoyproxy/envoy) proxy support this feature +// and use it for large scale production services. +// +// `HttpRule` defines the schema of the gRPC/REST mapping. The mapping specifies +// how different portions of the gRPC request message are mapped to the URL +// path, URL query parameters, and HTTP request body. It also controls how the +// gRPC response message is mapped to the HTTP response body. `HttpRule` is +// typically specified as an `google.api.http` annotation on the gRPC method. +// +// Each mapping specifies a URL path template and an HTTP method. The path +// template may refer to one or more fields in the gRPC request message, as long +// as each field is a non-repeated field with a primitive (non-message) type. +// The path template controls how fields of the request message are mapped to +// the URL path. +// +// Example: +// +// service Messaging { +// rpc GetMessage(GetMessageRequest) returns (Message) { +// option (google.api.http) = { +// get: "/v1/{name=messages/*}" +// }; +// } +// } +// message GetMessageRequest { +// string name = 1; // Mapped to URL path. +// } +// message Message { +// string text = 1; // The resource content. +// } +// +// This enables an HTTP REST to gRPC mapping as below: +// +// HTTP | gRPC +// -----|----- +// `GET /v1/messages/123456` | `GetMessage(name: "messages/123456")` +// +// Any fields in the request message which are not bound by the path template +// automatically become HTTP query parameters if there is no HTTP request body. +// For example: +// +// service Messaging { +// rpc GetMessage(GetMessageRequest) returns (Message) { +// option (google.api.http) = { +// get:"/v1/messages/{message_id}" +// }; +// } +// } +// message GetMessageRequest { +// message SubMessage { +// string subfield = 1; +// } +// string message_id = 1; // Mapped to URL path. +// int64 revision = 2; // Mapped to URL query parameter `revision`. +// SubMessage sub = 3; // Mapped to URL query parameter `sub.subfield`. +// } +// +// This enables a HTTP JSON to RPC mapping as below: +// +// HTTP | gRPC +// -----|----- +// `GET /v1/messages/123456?revision=2&sub.subfield=foo` | +// `GetMessage(message_id: "123456" revision: 2 sub: SubMessage(subfield: +// "foo"))` +// +// Note that fields which are mapped to URL query parameters must have a +// primitive type or a repeated primitive type or a non-repeated message type. +// In the case of a repeated type, the parameter can be repeated in the URL +// as `...?param=A¶m=B`. In the case of a message type, each field of the +// message is mapped to a separate parameter, such as +// `...?foo.a=A&foo.b=B&foo.c=C`. +// +// For HTTP methods that allow a request body, the `body` field +// specifies the mapping. Consider a REST update method on the +// message resource collection: +// +// service Messaging { +// rpc UpdateMessage(UpdateMessageRequest) returns (Message) { +// option (google.api.http) = { +// patch: "/v1/messages/{message_id}" +// body: "message" +// }; +// } +// } +// message UpdateMessageRequest { +// string message_id = 1; // mapped to the URL +// Message message = 2; // mapped to the body +// } +// +// The following HTTP JSON to RPC mapping is enabled, where the +// representation of the JSON in the request body is determined by +// protos JSON encoding: +// +// HTTP | gRPC +// -----|----- +// `PATCH /v1/messages/123456 { "text": "Hi!" }` | `UpdateMessage(message_id: +// "123456" message { text: "Hi!" })` +// +// The special name `*` can be used in the body mapping to define that +// every field not bound by the path template should be mapped to the +// request body. This enables the following alternative definition of +// the update method: +// +// service Messaging { +// rpc UpdateMessage(Message) returns (Message) { +// option (google.api.http) = { +// patch: "/v1/messages/{message_id}" +// body: "*" +// }; +// } +// } +// message Message { +// string message_id = 1; +// string text = 2; +// } +// +// +// The following HTTP JSON to RPC mapping is enabled: +// +// HTTP | gRPC +// -----|----- +// `PATCH /v1/messages/123456 { "text": "Hi!" }` | `UpdateMessage(message_id: +// "123456" text: "Hi!")` +// +// Note that when using `*` in the body mapping, it is not possible to +// have HTTP parameters, as all fields not bound by the path end in +// the body. This makes this option more rarely used in practice when +// defining REST APIs. The common usage of `*` is in custom methods +// which don't use the URL at all for transferring data. +// +// It is possible to define multiple HTTP methods for one RPC by using +// the `additional_bindings` option. Example: +// +// service Messaging { +// rpc GetMessage(GetMessageRequest) returns (Message) { +// option (google.api.http) = { +// get: "/v1/messages/{message_id}" +// additional_bindings { +// get: "/v1/users/{user_id}/messages/{message_id}" +// } +// }; +// } +// } +// message GetMessageRequest { +// string message_id = 1; +// string user_id = 2; +// } +// +// This enables the following two alternative HTTP JSON to RPC mappings: +// +// HTTP | gRPC +// -----|----- +// `GET /v1/messages/123456` | `GetMessage(message_id: "123456")` +// `GET /v1/users/me/messages/123456` | `GetMessage(user_id: "me" message_id: +// "123456")` +// +// ## Rules for HTTP mapping +// +// 1. Leaf request fields (recursive expansion nested messages in the request +// message) are classified into three categories: +// - Fields referred by the path template. They are passed via the URL path. +// - Fields referred by the [HttpRule.body][google.api.HttpRule.body]. They are passed via the HTTP +// request body. +// - All other fields are passed via the URL query parameters, and the +// parameter name is the field path in the request message. A repeated +// field can be represented as multiple query parameters under the same +// name. +// 2. If [HttpRule.body][google.api.HttpRule.body] is "*", there is no URL query parameter, all fields +// are passed via URL path and HTTP request body. +// 3. If [HttpRule.body][google.api.HttpRule.body] is omitted, there is no HTTP request body, all +// fields are passed via URL path and URL query parameters. +// +// ### Path template syntax +// +// Template = "/" Segments [ Verb ] ; +// Segments = Segment { "/" Segment } ; +// Segment = "*" | "**" | LITERAL | Variable ; +// Variable = "{" FieldPath [ "=" Segments ] "}" ; +// FieldPath = IDENT { "." IDENT } ; +// Verb = ":" LITERAL ; +// +// The syntax `*` matches a single URL path segment. The syntax `**` matches +// zero or more URL path segments, which must be the last part of the URL path +// except the `Verb`. +// +// The syntax `Variable` matches part of the URL path as specified by its +// template. A variable template must not contain other variables. If a variable +// matches a single path segment, its template may be omitted, e.g. `{var}` +// is equivalent to `{var=*}`. +// +// The syntax `LITERAL` matches literal text in the URL path. If the `LITERAL` +// contains any reserved character, such characters should be percent-encoded +// before the matching. +// +// If a variable contains exactly one path segment, such as `"{var}"` or +// `"{var=*}"`, when such a variable is expanded into a URL path on the client +// side, all characters except `[-_.~0-9a-zA-Z]` are percent-encoded. The +// server side does the reverse decoding. Such variables show up in the +// [Discovery +// Document](https://developers.google.com/discovery/v1/reference/apis) as +// `{var}`. +// +// If a variable contains multiple path segments, such as `"{var=foo/*}"` +// or `"{var=**}"`, when such a variable is expanded into a URL path on the +// client side, all characters except `[-_.~/0-9a-zA-Z]` are percent-encoded. +// The server side does the reverse decoding, except "%2F" and "%2f" are left +// unchanged. Such variables show up in the +// [Discovery +// Document](https://developers.google.com/discovery/v1/reference/apis) as +// `{+var}`. +// +// ## Using gRPC API Service Configuration +// +// gRPC API Service Configuration (service config) is a configuration language +// for configuring a gRPC service to become a user-facing product. The +// service config is simply the YAML representation of the `google.api.Service` +// proto message. +// +// As an alternative to annotating your proto file, you can configure gRPC +// transcoding in your service config YAML files. You do this by specifying a +// `HttpRule` that maps the gRPC method to a REST endpoint, achieving the same +// effect as the proto annotation. This can be particularly useful if you +// have a proto that is reused in multiple services. Note that any transcoding +// specified in the service config will override any matching transcoding +// configuration in the proto. +// +// Example: +// +// http: +// rules: +// # Selects a gRPC method and applies HttpRule to it. +// - selector: example.v1.Messaging.GetMessage +// get: /v1/messages/{message_id}/{sub.subfield} +// +// ## Special notes +// +// When gRPC Transcoding is used to map a gRPC to JSON REST endpoints, the +// proto to JSON conversion must follow the [proto3 +// specification](https://developers.google.com/protocol-buffers/docs/proto3#json). +// +// While the single segment variable follows the semantics of +// [RFC 6570](https://tools.ietf.org/html/rfc6570) Section 3.2.2 Simple String +// Expansion, the multi segment variable **does not** follow RFC 6570 Section +// 3.2.3 Reserved Expansion. The reason is that the Reserved Expansion +// does not expand special characters like `?` and `#`, which would lead +// to invalid URLs. As the result, gRPC Transcoding uses a custom encoding +// for multi segment variables. +// +// The path variables **must not** refer to any repeated or mapped field, +// because client libraries are not capable of handling such variable expansion. +// +// The path variables **must not** capture the leading "/" character. The reason +// is that the most common use case "{var}" does not capture the leading "/" +// character. For consistency, all path variables must share the same behavior. +// +// Repeated message fields must not be mapped to URL query parameters, because +// no client library can support such complicated mapping. +// +// If an API needs to use a JSON array for request or response body, it can map +// the request or response body to a repeated field. However, some gRPC +// Transcoding implementations may not support this feature. +type HttpRule struct { + // Selects a method to which this rule applies. + // + // Refer to [selector][google.api.DocumentationRule.selector] for syntax details. + Selector string `protobuf:"bytes,1,opt,name=selector,proto3" json:"selector,omitempty"` + // Determines the URL pattern is matched by this rules. This pattern can be + // used with any of the {get|put|post|delete|patch} methods. A custom method + // can be defined using the 'custom' field. + // + // Types that are valid to be assigned to Pattern: + // *HttpRule_Get + // *HttpRule_Put + // *HttpRule_Post + // *HttpRule_Delete + // *HttpRule_Patch + // *HttpRule_Custom + Pattern isHttpRule_Pattern `protobuf_oneof:"pattern"` + // The name of the request field whose value is mapped to the HTTP request + // body, or `*` for mapping all request fields not captured by the path + // pattern to the HTTP body, or omitted for not having any HTTP request body. + // + // NOTE: the referred field must be present at the top-level of the request + // message type. + Body string `protobuf:"bytes,7,opt,name=body,proto3" json:"body,omitempty"` + // Optional. The name of the response field whose value is mapped to the HTTP + // response body. When omitted, the entire response message will be used + // as the HTTP response body. + // + // NOTE: The referred field must be present at the top-level of the response + // message type. + ResponseBody string `protobuf:"bytes,12,opt,name=response_body,json=responseBody,proto3" json:"response_body,omitempty"` + // Additional HTTP bindings for the selector. Nested bindings must + // not contain an `additional_bindings` field themselves (that is, + // the nesting may only be one level deep). + AdditionalBindings []*HttpRule `protobuf:"bytes,11,rep,name=additional_bindings,json=additionalBindings,proto3" json:"additional_bindings,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *HttpRule) Reset() { *m = HttpRule{} } +func (m *HttpRule) String() string { return proto.CompactTextString(m) } +func (*HttpRule) ProtoMessage() {} +func (*HttpRule) Descriptor() ([]byte, []int) { + return fileDescriptor_http_1e78ce0f8b702f4c, []int{1} +} +func (m *HttpRule) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_HttpRule.Unmarshal(m, b) +} +func (m *HttpRule) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_HttpRule.Marshal(b, m, deterministic) +} +func (dst *HttpRule) XXX_Merge(src proto.Message) { + xxx_messageInfo_HttpRule.Merge(dst, src) +} +func (m *HttpRule) XXX_Size() int { + return xxx_messageInfo_HttpRule.Size(m) +} +func (m *HttpRule) XXX_DiscardUnknown() { + xxx_messageInfo_HttpRule.DiscardUnknown(m) +} + +var xxx_messageInfo_HttpRule proto.InternalMessageInfo + +func (m *HttpRule) GetSelector() string { + if m != nil { + return m.Selector + } + return "" +} + +type isHttpRule_Pattern interface { + isHttpRule_Pattern() +} + +type HttpRule_Get struct { + Get string `protobuf:"bytes,2,opt,name=get,proto3,oneof"` +} + +type HttpRule_Put struct { + Put string `protobuf:"bytes,3,opt,name=put,proto3,oneof"` +} + +type HttpRule_Post struct { + Post string `protobuf:"bytes,4,opt,name=post,proto3,oneof"` +} + +type HttpRule_Delete struct { + Delete string `protobuf:"bytes,5,opt,name=delete,proto3,oneof"` +} + +type HttpRule_Patch struct { + Patch string `protobuf:"bytes,6,opt,name=patch,proto3,oneof"` +} + +type HttpRule_Custom struct { + Custom *CustomHttpPattern `protobuf:"bytes,8,opt,name=custom,proto3,oneof"` +} + +func (*HttpRule_Get) isHttpRule_Pattern() {} + +func (*HttpRule_Put) isHttpRule_Pattern() {} + +func (*HttpRule_Post) isHttpRule_Pattern() {} + +func (*HttpRule_Delete) isHttpRule_Pattern() {} + +func (*HttpRule_Patch) isHttpRule_Pattern() {} + +func (*HttpRule_Custom) isHttpRule_Pattern() {} + +func (m *HttpRule) GetPattern() isHttpRule_Pattern { + if m != nil { + return m.Pattern + } + return nil +} + +func (m *HttpRule) GetGet() string { + if x, ok := m.GetPattern().(*HttpRule_Get); ok { + return x.Get + } + return "" +} + +func (m *HttpRule) GetPut() string { + if x, ok := m.GetPattern().(*HttpRule_Put); ok { + return x.Put + } + return "" +} + +func (m *HttpRule) GetPost() string { + if x, ok := m.GetPattern().(*HttpRule_Post); ok { + return x.Post + } + return "" +} + +func (m *HttpRule) GetDelete() string { + if x, ok := m.GetPattern().(*HttpRule_Delete); ok { + return x.Delete + } + return "" +} + +func (m *HttpRule) GetPatch() string { + if x, ok := m.GetPattern().(*HttpRule_Patch); ok { + return x.Patch + } + return "" +} + +func (m *HttpRule) GetCustom() *CustomHttpPattern { + if x, ok := m.GetPattern().(*HttpRule_Custom); ok { + return x.Custom + } + return nil +} + +func (m *HttpRule) GetBody() string { + if m != nil { + return m.Body + } + return "" +} + +func (m *HttpRule) GetResponseBody() string { + if m != nil { + return m.ResponseBody + } + return "" +} + +func (m *HttpRule) GetAdditionalBindings() []*HttpRule { + if m != nil { + return m.AdditionalBindings + } + return nil +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*HttpRule) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _HttpRule_OneofMarshaler, _HttpRule_OneofUnmarshaler, _HttpRule_OneofSizer, []interface{}{ + (*HttpRule_Get)(nil), + (*HttpRule_Put)(nil), + (*HttpRule_Post)(nil), + (*HttpRule_Delete)(nil), + (*HttpRule_Patch)(nil), + (*HttpRule_Custom)(nil), + } +} + +func _HttpRule_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*HttpRule) + // pattern + switch x := m.Pattern.(type) { + case *HttpRule_Get: + b.EncodeVarint(2<<3 | proto.WireBytes) + b.EncodeStringBytes(x.Get) + case *HttpRule_Put: + b.EncodeVarint(3<<3 | proto.WireBytes) + b.EncodeStringBytes(x.Put) + case *HttpRule_Post: + b.EncodeVarint(4<<3 | proto.WireBytes) + b.EncodeStringBytes(x.Post) + case *HttpRule_Delete: + b.EncodeVarint(5<<3 | proto.WireBytes) + b.EncodeStringBytes(x.Delete) + case *HttpRule_Patch: + b.EncodeVarint(6<<3 | proto.WireBytes) + b.EncodeStringBytes(x.Patch) + case *HttpRule_Custom: + b.EncodeVarint(8<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.Custom); err != nil { + return err + } + case nil: + default: + return fmt.Errorf("HttpRule.Pattern has unexpected type %T", x) + } + return nil +} + +func _HttpRule_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*HttpRule) + switch tag { + case 2: // pattern.get + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeStringBytes() + m.Pattern = &HttpRule_Get{x} + return true, err + case 3: // pattern.put + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeStringBytes() + m.Pattern = &HttpRule_Put{x} + return true, err + case 4: // pattern.post + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeStringBytes() + m.Pattern = &HttpRule_Post{x} + return true, err + case 5: // pattern.delete + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeStringBytes() + m.Pattern = &HttpRule_Delete{x} + return true, err + case 6: // pattern.patch + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeStringBytes() + m.Pattern = &HttpRule_Patch{x} + return true, err + case 8: // pattern.custom + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(CustomHttpPattern) + err := b.DecodeMessage(msg) + m.Pattern = &HttpRule_Custom{msg} + return true, err + default: + return false, nil + } +} + +func _HttpRule_OneofSizer(msg proto.Message) (n int) { + m := msg.(*HttpRule) + // pattern + switch x := m.Pattern.(type) { + case *HttpRule_Get: + n += 1 // tag and wire + n += proto.SizeVarint(uint64(len(x.Get))) + n += len(x.Get) + case *HttpRule_Put: + n += 1 // tag and wire + n += proto.SizeVarint(uint64(len(x.Put))) + n += len(x.Put) + case *HttpRule_Post: + n += 1 // tag and wire + n += proto.SizeVarint(uint64(len(x.Post))) + n += len(x.Post) + case *HttpRule_Delete: + n += 1 // tag and wire + n += proto.SizeVarint(uint64(len(x.Delete))) + n += len(x.Delete) + case *HttpRule_Patch: + n += 1 // tag and wire + n += proto.SizeVarint(uint64(len(x.Patch))) + n += len(x.Patch) + case *HttpRule_Custom: + s := proto.Size(x.Custom) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +// A custom pattern is used for defining custom HTTP verb. +type CustomHttpPattern struct { + // The name of this custom HTTP verb. + Kind string `protobuf:"bytes,1,opt,name=kind,proto3" json:"kind,omitempty"` + // The path matched by this custom verb. + Path string `protobuf:"bytes,2,opt,name=path,proto3" json:"path,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *CustomHttpPattern) Reset() { *m = CustomHttpPattern{} } +func (m *CustomHttpPattern) String() string { return proto.CompactTextString(m) } +func (*CustomHttpPattern) ProtoMessage() {} +func (*CustomHttpPattern) Descriptor() ([]byte, []int) { + return fileDescriptor_http_1e78ce0f8b702f4c, []int{2} +} +func (m *CustomHttpPattern) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_CustomHttpPattern.Unmarshal(m, b) +} +func (m *CustomHttpPattern) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_CustomHttpPattern.Marshal(b, m, deterministic) +} +func (dst *CustomHttpPattern) XXX_Merge(src proto.Message) { + xxx_messageInfo_CustomHttpPattern.Merge(dst, src) +} +func (m *CustomHttpPattern) XXX_Size() int { + return xxx_messageInfo_CustomHttpPattern.Size(m) +} +func (m *CustomHttpPattern) XXX_DiscardUnknown() { + xxx_messageInfo_CustomHttpPattern.DiscardUnknown(m) +} + +var xxx_messageInfo_CustomHttpPattern proto.InternalMessageInfo + +func (m *CustomHttpPattern) GetKind() string { + if m != nil { + return m.Kind + } + return "" +} + +func (m *CustomHttpPattern) GetPath() string { + if m != nil { + return m.Path + } + return "" +} + +func init() { + proto.RegisterType((*Http)(nil), "google.api.Http") + proto.RegisterType((*HttpRule)(nil), "google.api.HttpRule") + proto.RegisterType((*CustomHttpPattern)(nil), "google.api.CustomHttpPattern") +} + +func init() { proto.RegisterFile("google/api/http.proto", fileDescriptor_http_1e78ce0f8b702f4c) } + +var fileDescriptor_http_1e78ce0f8b702f4c = []byte{ + // 419 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x7c, 0x92, 0xc1, 0x8e, 0xd3, 0x30, + 0x10, 0x86, 0x49, 0x9b, 0x76, 0xdb, 0xe9, 0x82, 0x84, 0x59, 0x90, 0x85, 0x40, 0x54, 0xe5, 0x52, + 0x71, 0x48, 0xa5, 0xe5, 0xc0, 0x61, 0x4f, 0x1b, 0xa8, 0x58, 0x6e, 0x55, 0x8e, 0x5c, 0x22, 0x37, + 0x1e, 0x52, 0x83, 0xd7, 0xb6, 0xe2, 0x09, 0xa2, 0xaf, 0xc3, 0x63, 0xf1, 0x24, 0x1c, 0x91, 0x9d, + 0x84, 0x56, 0x42, 0xe2, 0x36, 0xf3, 0xff, 0x9f, 0xa7, 0x7f, 0x27, 0x03, 0x4f, 0x6b, 0x6b, 0x6b, + 0x8d, 0x1b, 0xe1, 0xd4, 0xe6, 0x40, 0xe4, 0x32, 0xd7, 0x58, 0xb2, 0x0c, 0x3a, 0x39, 0x13, 0x4e, + 0xad, 0x8e, 0x90, 0xde, 0x11, 0x39, 0xf6, 0x06, 0x26, 0x4d, 0xab, 0xd1, 0xf3, 0x64, 0x39, 0x5e, + 0x2f, 0xae, 0xaf, 0xb2, 0x13, 0x93, 0x05, 0xa0, 0x68, 0x35, 0x16, 0x1d, 0xc2, 0xb6, 0xf0, 0xea, + 0x4b, 0xab, 0xf5, 0xb1, 0x94, 0x58, 0x59, 0x89, 0x65, 0x83, 0x1e, 0x9b, 0xef, 0x28, 0x4b, 0xfc, + 0xe1, 0x84, 0xf1, 0xca, 0x1a, 0x3e, 0x5a, 0x26, 0xeb, 0x59, 0xf1, 0x22, 0x62, 0x1f, 0x22, 0x55, + 0xf4, 0xd0, 0x76, 0x60, 0x56, 0xbf, 0x46, 0x30, 0x1b, 0x46, 0xb3, 0xe7, 0x30, 0xf3, 0xa8, 0xb1, + 0x22, 0xdb, 0xf0, 0x64, 0x99, 0xac, 0xe7, 0xc5, 0xdf, 0x9e, 0x31, 0x18, 0xd7, 0x48, 0x71, 0xe6, + 0xfc, 0xee, 0x41, 0x11, 0x9a, 0xa0, 0xb9, 0x96, 0xf8, 0x78, 0xd0, 0x5c, 0x4b, 0xec, 0x0a, 0x52, + 0x67, 0x3d, 0xf1, 0xb4, 0x17, 0x63, 0xc7, 0x38, 0x4c, 0x25, 0x6a, 0x24, 0xe4, 0x93, 0x5e, 0xef, + 0x7b, 0xf6, 0x0c, 0x26, 0x4e, 0x50, 0x75, 0xe0, 0xd3, 0xde, 0xe8, 0x5a, 0xf6, 0x0e, 0xa6, 0x55, + 0xeb, 0xc9, 0xde, 0xf3, 0xd9, 0x32, 0x59, 0x2f, 0xae, 0x5f, 0x9e, 0x2f, 0xe3, 0x7d, 0x74, 0x42, + 0xee, 0x9d, 0x20, 0xc2, 0xc6, 0x84, 0x81, 0x1d, 0xce, 0x18, 0xa4, 0x7b, 0x2b, 0x8f, 0xfc, 0x22, + 0xfe, 0x81, 0x58, 0xb3, 0xd7, 0xf0, 0xb0, 0x41, 0xef, 0xac, 0xf1, 0x58, 0x46, 0xf3, 0x32, 0x9a, + 0x97, 0x83, 0x98, 0x07, 0x68, 0x0b, 0x4f, 0x84, 0x94, 0x8a, 0x94, 0x35, 0x42, 0x97, 0x7b, 0x65, + 0xa4, 0x32, 0xb5, 0xe7, 0x8b, 0xff, 0x7c, 0x0b, 0x76, 0x7a, 0x90, 0xf7, 0x7c, 0x3e, 0x87, 0x0b, + 0xd7, 0x85, 0x5a, 0xdd, 0xc0, 0xe3, 0x7f, 0x92, 0x86, 0x7c, 0xdf, 0x94, 0x91, 0xfd, 0x82, 0x63, + 0x1d, 0x34, 0x27, 0xe8, 0xd0, 0x6d, 0xb7, 0x88, 0x75, 0xfe, 0x15, 0x1e, 0x55, 0xf6, 0xfe, 0xec, + 0x67, 0xf3, 0x79, 0x1c, 0x13, 0xae, 0x67, 0x97, 0x7c, 0xbe, 0xed, 0x8d, 0xda, 0x6a, 0x61, 0xea, + 0xcc, 0x36, 0xf5, 0xa6, 0x46, 0x13, 0x6f, 0x6b, 0xd3, 0x59, 0xc2, 0x29, 0x1f, 0xaf, 0x4e, 0x18, + 0x63, 0x49, 0x84, 0x98, 0xfe, 0xe6, 0xac, 0xfe, 0x9d, 0x24, 0x3f, 0x47, 0xe9, 0xc7, 0xdb, 0xdd, + 0xa7, 0xfd, 0x34, 0xbe, 0x7b, 0xfb, 0x27, 0x00, 0x00, 0xff, 0xff, 0xae, 0xde, 0xa1, 0xd0, 0xac, + 0x02, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/api/annotations/resource.pb.go b/vendor/google.golang.org/genproto/googleapis/api/annotations/resource.pb.go new file mode 100644 index 0000000..3622e03 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/api/annotations/resource.pb.go @@ -0,0 +1,321 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/api/resource.proto + +package annotations // import "google.golang.org/genproto/googleapis/api/annotations" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import descriptor "github.com/golang/protobuf/protoc-gen-go/descriptor" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// A description of the historical or future-looking state of the +// resource pattern. +type ResourceDescriptor_History int32 + +const ( + // The "unset" value. + ResourceDescriptor_HISTORY_UNSPECIFIED ResourceDescriptor_History = 0 + // The resource originally had one pattern and launched as such, and + // additional patterns were added later. + ResourceDescriptor_ORIGINALLY_SINGLE_PATTERN ResourceDescriptor_History = 1 + // The resource has one pattern, but the API owner expects to add more + // later. (This is the inverse of ORIGINALLY_SINGLE_PATTERN, and prevents + // that from being necessary once there are multiple patterns.) + ResourceDescriptor_FUTURE_MULTI_PATTERN ResourceDescriptor_History = 2 +) + +var ResourceDescriptor_History_name = map[int32]string{ + 0: "HISTORY_UNSPECIFIED", + 1: "ORIGINALLY_SINGLE_PATTERN", + 2: "FUTURE_MULTI_PATTERN", +} +var ResourceDescriptor_History_value = map[string]int32{ + "HISTORY_UNSPECIFIED": 0, + "ORIGINALLY_SINGLE_PATTERN": 1, + "FUTURE_MULTI_PATTERN": 2, +} + +func (x ResourceDescriptor_History) String() string { + return proto.EnumName(ResourceDescriptor_History_name, int32(x)) +} +func (ResourceDescriptor_History) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_resource_8ecf418b4524f3f1, []int{0, 0} +} + +// A simple descriptor of a resource type. +// +// ResourceDescriptor annotates a resource message (either by means of a +// protobuf annotation or use in the service config), and associates the +// resource's schema, the resource type, and the pattern of the resource name. +// +// Example: +// +// message Topic { +// // Indicates this message defines a resource schema. +// // Declares the resource type in the format of {service}/{kind}. +// // For Kubernetes resources, the format is {api group}/{kind}. +// option (google.api.resource) = { +// type: "pubsub.googleapis.com/Topic" +// pattern: "projects/{project}/topics/{topic}" +// }; +// } +// +// Sometimes, resources have multiple patterns, typically because they can +// live under multiple parents. +// +// Example: +// +// message LogEntry { +// option (google.api.resource) = { +// type: "logging.googleapis.com/LogEntry" +// pattern: "projects/{project}/logs/{log}" +// pattern: "organizations/{organization}/logs/{log}" +// pattern: "folders/{folder}/logs/{log}" +// pattern: "billingAccounts/{billing_account}/logs/{log}" +// }; +// } +type ResourceDescriptor struct { + // The full name of the resource type. It must be in the format of + // {service_name}/{resource_type_kind}. The resource type names are + // singular and do not contain version numbers. + // + // For example: `storage.googleapis.com/Bucket` + // + // The value of the resource_type_kind must follow the regular expression + // /[A-Z][a-zA-Z0-9]+/. It must start with upper case character and + // recommended to use PascalCase (UpperCamelCase). The maximum number of + // characters allowed for the resource_type_kind is 100. + Type string `protobuf:"bytes,1,opt,name=type,proto3" json:"type,omitempty"` + // Required. The valid pattern or patterns for this resource's names. + // + // Examples: + // - "projects/{project}/topics/{topic}" + // - "projects/{project}/knowledgeBases/{knowledge_base}" + // + // The components in braces correspond to the IDs for each resource in the + // hierarchy. It is expected that, if multiple patterns are provided, + // the same component name (e.g. "project") refers to IDs of the same + // type of resource. + Pattern []string `protobuf:"bytes,2,rep,name=pattern,proto3" json:"pattern,omitempty"` + // Optional. The field on the resource that designates the resource name + // field. If omitted, this is assumed to be "name". + NameField string `protobuf:"bytes,3,opt,name=name_field,json=nameField,proto3" json:"name_field,omitempty"` + // Optional. The historical or future-looking state of the resource pattern. + // + // Example: + // // The InspectTemplate message originally only supported resource + // // names with organization, and project was added later. + // message InspectTemplate { + // option (google.api.resource) = { + // type: "dlp.googleapis.com/InspectTemplate" + // pattern: "organizations/{organization}/inspectTemplates/{inspect_template}" + // pattern: "projects/{project}/inspectTemplates/{inspect_template}" + // history: ORIGINALLY_SINGLE_PATTERN + // }; + // } + History ResourceDescriptor_History `protobuf:"varint,4,opt,name=history,proto3,enum=google.api.ResourceDescriptor_History" json:"history,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ResourceDescriptor) Reset() { *m = ResourceDescriptor{} } +func (m *ResourceDescriptor) String() string { return proto.CompactTextString(m) } +func (*ResourceDescriptor) ProtoMessage() {} +func (*ResourceDescriptor) Descriptor() ([]byte, []int) { + return fileDescriptor_resource_8ecf418b4524f3f1, []int{0} +} +func (m *ResourceDescriptor) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ResourceDescriptor.Unmarshal(m, b) +} +func (m *ResourceDescriptor) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ResourceDescriptor.Marshal(b, m, deterministic) +} +func (dst *ResourceDescriptor) XXX_Merge(src proto.Message) { + xxx_messageInfo_ResourceDescriptor.Merge(dst, src) +} +func (m *ResourceDescriptor) XXX_Size() int { + return xxx_messageInfo_ResourceDescriptor.Size(m) +} +func (m *ResourceDescriptor) XXX_DiscardUnknown() { + xxx_messageInfo_ResourceDescriptor.DiscardUnknown(m) +} + +var xxx_messageInfo_ResourceDescriptor proto.InternalMessageInfo + +func (m *ResourceDescriptor) GetType() string { + if m != nil { + return m.Type + } + return "" +} + +func (m *ResourceDescriptor) GetPattern() []string { + if m != nil { + return m.Pattern + } + return nil +} + +func (m *ResourceDescriptor) GetNameField() string { + if m != nil { + return m.NameField + } + return "" +} + +func (m *ResourceDescriptor) GetHistory() ResourceDescriptor_History { + if m != nil { + return m.History + } + return ResourceDescriptor_HISTORY_UNSPECIFIED +} + +// An annotation designating that this field is a reference to a resource +// defined by another message. +type ResourceReference struct { + // The unified resource type name of the type that this field references. + // Marks this as a field referring to a resource in another message. + // + // Example: + // + // message Subscription { + // string topic = 2 [(google.api.resource_reference) = { + // type = "pubsub.googleapis.com/Topic" + // }]; + // } + Type string `protobuf:"bytes,1,opt,name=type,proto3" json:"type,omitempty"` + // The fully-qualified message name of a child of the type that this field + // references. + // + // This is useful for `parent` fields where a resource has more than one + // possible type of parent. + // + // Example: + // + // message ListLogEntriesRequest { + // string parent = 1 [(google.api.resource_reference) = { + // child_type: "logging.googleapis.com/LogEntry" + // }; + // } + // + // If the referenced message is in the same proto package, the service name + // may be omitted: + // + // message ListLogEntriesRequest { + // string parent = 1 + // [(google.api.resource_reference).child_type = "LogEntry"]; + // } + ChildType string `protobuf:"bytes,2,opt,name=child_type,json=childType,proto3" json:"child_type,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ResourceReference) Reset() { *m = ResourceReference{} } +func (m *ResourceReference) String() string { return proto.CompactTextString(m) } +func (*ResourceReference) ProtoMessage() {} +func (*ResourceReference) Descriptor() ([]byte, []int) { + return fileDescriptor_resource_8ecf418b4524f3f1, []int{1} +} +func (m *ResourceReference) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ResourceReference.Unmarshal(m, b) +} +func (m *ResourceReference) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ResourceReference.Marshal(b, m, deterministic) +} +func (dst *ResourceReference) XXX_Merge(src proto.Message) { + xxx_messageInfo_ResourceReference.Merge(dst, src) +} +func (m *ResourceReference) XXX_Size() int { + return xxx_messageInfo_ResourceReference.Size(m) +} +func (m *ResourceReference) XXX_DiscardUnknown() { + xxx_messageInfo_ResourceReference.DiscardUnknown(m) +} + +var xxx_messageInfo_ResourceReference proto.InternalMessageInfo + +func (m *ResourceReference) GetType() string { + if m != nil { + return m.Type + } + return "" +} + +func (m *ResourceReference) GetChildType() string { + if m != nil { + return m.ChildType + } + return "" +} + +var E_ResourceReference = &proto.ExtensionDesc{ + ExtendedType: (*descriptor.FieldOptions)(nil), + ExtensionType: (*ResourceReference)(nil), + Field: 1055, + Name: "google.api.resource_reference", + Tag: "bytes,1055,opt,name=resource_reference,json=resourceReference", + Filename: "google/api/resource.proto", +} + +var E_Resource = &proto.ExtensionDesc{ + ExtendedType: (*descriptor.MessageOptions)(nil), + ExtensionType: (*ResourceDescriptor)(nil), + Field: 1053, + Name: "google.api.resource", + Tag: "bytes,1053,opt,name=resource", + Filename: "google/api/resource.proto", +} + +func init() { + proto.RegisterType((*ResourceDescriptor)(nil), "google.api.ResourceDescriptor") + proto.RegisterType((*ResourceReference)(nil), "google.api.ResourceReference") + proto.RegisterEnum("google.api.ResourceDescriptor_History", ResourceDescriptor_History_name, ResourceDescriptor_History_value) + proto.RegisterExtension(E_ResourceReference) + proto.RegisterExtension(E_Resource) +} + +func init() { proto.RegisterFile("google/api/resource.proto", fileDescriptor_resource_8ecf418b4524f3f1) } + +var fileDescriptor_resource_8ecf418b4524f3f1 = []byte{ + // 430 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x7c, 0x52, 0x41, 0x6f, 0xd3, 0x30, + 0x18, 0x25, 0x59, 0x45, 0xd7, 0x0f, 0x31, 0x6d, 0x06, 0x89, 0x0c, 0x29, 0x10, 0xf5, 0x80, 0x7a, + 0x4a, 0xa4, 0x71, 0x1b, 0x17, 0x3a, 0x96, 0x76, 0x91, 0xba, 0x36, 0x72, 0xd3, 0xc3, 0x00, 0x29, + 0xf2, 0xd2, 0xaf, 0x59, 0xa4, 0xcc, 0xb6, 0x9c, 0xec, 0xd0, 0x1b, 0x7f, 0x04, 0x21, 0xf1, 0x2b, + 0x39, 0xa2, 0x3a, 0x71, 0x98, 0xd8, 0xb4, 0x9b, 0xf3, 0xde, 0xfb, 0xbe, 0xf7, 0xfc, 0x1c, 0x38, + 0xce, 0x85, 0xc8, 0x4b, 0x0c, 0x98, 0x2c, 0x02, 0x85, 0x95, 0xb8, 0x53, 0x19, 0xfa, 0x52, 0x89, + 0x5a, 0x10, 0x68, 0x28, 0x9f, 0xc9, 0xe2, 0xad, 0xd7, 0xca, 0x34, 0x73, 0x7d, 0xb7, 0x09, 0xd6, + 0x58, 0x65, 0xaa, 0x90, 0xb5, 0x50, 0x8d, 0x7a, 0xf8, 0xc3, 0x06, 0x42, 0xdb, 0x05, 0xe7, 0x1d, + 0x49, 0x08, 0xf4, 0xea, 0xad, 0x44, 0xc7, 0xf2, 0xac, 0xd1, 0x80, 0xea, 0x33, 0x71, 0xa0, 0x2f, + 0x59, 0x5d, 0xa3, 0xe2, 0x8e, 0xed, 0xed, 0x8d, 0x06, 0xd4, 0x7c, 0x12, 0x17, 0x80, 0xb3, 0x5b, + 0x4c, 0x37, 0x05, 0x96, 0x6b, 0x67, 0x4f, 0xcf, 0x0c, 0x76, 0xc8, 0x64, 0x07, 0x90, 0xcf, 0xd0, + 0xbf, 0x29, 0xaa, 0x5a, 0xa8, 0xad, 0xd3, 0xf3, 0xac, 0xd1, 0xc1, 0xc9, 0x07, 0xff, 0x5f, 0x46, + 0xff, 0xa1, 0xbb, 0x7f, 0xd1, 0xa8, 0xa9, 0x19, 0x1b, 0x7e, 0x83, 0x7e, 0x8b, 0x91, 0x37, 0xf0, + 0xea, 0x22, 0x5a, 0x26, 0x0b, 0x7a, 0x95, 0xae, 0xe6, 0xcb, 0x38, 0xfc, 0x12, 0x4d, 0xa2, 0xf0, + 0xfc, 0xf0, 0x19, 0x71, 0xe1, 0x78, 0x41, 0xa3, 0x69, 0x34, 0x1f, 0xcf, 0x66, 0x57, 0xe9, 0x32, + 0x9a, 0x4f, 0x67, 0x61, 0x1a, 0x8f, 0x93, 0x24, 0xa4, 0xf3, 0x43, 0x8b, 0x38, 0xf0, 0x7a, 0xb2, + 0x4a, 0x56, 0x34, 0x4c, 0x2f, 0x57, 0xb3, 0x24, 0xea, 0x18, 0x7b, 0x38, 0x81, 0x23, 0x93, 0x81, + 0xe2, 0x06, 0x15, 0xf2, 0x0c, 0x1f, 0x2d, 0xc0, 0x05, 0xc8, 0x6e, 0x8a, 0x72, 0x9d, 0x6a, 0xc6, + 0x6e, 0xae, 0xa9, 0x91, 0x64, 0x2b, 0xf1, 0xb4, 0x04, 0x62, 0x9e, 0x22, 0x55, 0xdd, 0x22, 0xd7, + 0xdc, 0xd5, 0xbc, 0x81, 0xaf, 0x4b, 0x59, 0xc8, 0xba, 0x10, 0xbc, 0x72, 0x7e, 0xed, 0x7b, 0xd6, + 0xe8, 0xc5, 0x89, 0xfb, 0x58, 0x23, 0x5d, 0x1a, 0x7a, 0xa4, 0xfe, 0x87, 0x4e, 0xbf, 0xc3, 0xbe, + 0x01, 0xc9, 0xfb, 0x07, 0x1e, 0x97, 0x58, 0x55, 0x2c, 0x47, 0xe3, 0xf2, 0xb3, 0x71, 0x79, 0xf7, + 0x74, 0xef, 0xb4, 0xdb, 0x78, 0xc6, 0xe1, 0x20, 0x13, 0xb7, 0xf7, 0xe4, 0x67, 0x2f, 0x8d, 0x3e, + 0xde, 0x79, 0xc4, 0xd6, 0xd7, 0x71, 0x4b, 0xe6, 0xa2, 0x64, 0x3c, 0xf7, 0x85, 0xca, 0x83, 0x1c, + 0xb9, 0x4e, 0x10, 0x34, 0x14, 0x93, 0x45, 0xa5, 0xff, 0x50, 0xc6, 0xb9, 0xa8, 0x99, 0x8e, 0xf2, + 0xe9, 0xde, 0xf9, 0x8f, 0x65, 0xfd, 0xb6, 0x7b, 0xd3, 0x71, 0x1c, 0x5d, 0x3f, 0xd7, 0x73, 0x1f, + 0xff, 0x06, 0x00, 0x00, 0xff, 0xff, 0xb5, 0x1e, 0x07, 0x80, 0xd8, 0x02, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/api/authorization_config.pb.go b/vendor/google.golang.org/genproto/googleapis/api/authorization_config.pb.go new file mode 100644 index 0000000..caddc13 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/api/authorization_config.pb.go @@ -0,0 +1,93 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/api/experimental/authorization_config.proto + +package api // import "google.golang.org/genproto/googleapis/api" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Configuration of authorization. +// +// This section determines the authorization provider, if unspecified, then no +// authorization check will be done. +// +// Example: +// +// experimental: +// authorization: +// provider: firebaserules.googleapis.com +type AuthorizationConfig struct { + // The name of the authorization provider, such as + // firebaserules.googleapis.com. + Provider string `protobuf:"bytes,1,opt,name=provider,proto3" json:"provider,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *AuthorizationConfig) Reset() { *m = AuthorizationConfig{} } +func (m *AuthorizationConfig) String() string { return proto.CompactTextString(m) } +func (*AuthorizationConfig) ProtoMessage() {} +func (*AuthorizationConfig) Descriptor() ([]byte, []int) { + return fileDescriptor_authorization_config_4cffac9b7afd314b, []int{0} +} +func (m *AuthorizationConfig) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_AuthorizationConfig.Unmarshal(m, b) +} +func (m *AuthorizationConfig) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_AuthorizationConfig.Marshal(b, m, deterministic) +} +func (dst *AuthorizationConfig) XXX_Merge(src proto.Message) { + xxx_messageInfo_AuthorizationConfig.Merge(dst, src) +} +func (m *AuthorizationConfig) XXX_Size() int { + return xxx_messageInfo_AuthorizationConfig.Size(m) +} +func (m *AuthorizationConfig) XXX_DiscardUnknown() { + xxx_messageInfo_AuthorizationConfig.DiscardUnknown(m) +} + +var xxx_messageInfo_AuthorizationConfig proto.InternalMessageInfo + +func (m *AuthorizationConfig) GetProvider() string { + if m != nil { + return m.Provider + } + return "" +} + +func init() { + proto.RegisterType((*AuthorizationConfig)(nil), "google.api.AuthorizationConfig") +} + +func init() { + proto.RegisterFile("google/api/experimental/authorization_config.proto", fileDescriptor_authorization_config_4cffac9b7afd314b) +} + +var fileDescriptor_authorization_config_4cffac9b7afd314b = []byte{ + // 180 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xe2, 0x32, 0x4a, 0xcf, 0xcf, 0x4f, + 0xcf, 0x49, 0xd5, 0x4f, 0x2c, 0xc8, 0xd4, 0x4f, 0xad, 0x28, 0x48, 0x2d, 0xca, 0xcc, 0x4d, 0xcd, + 0x2b, 0x49, 0xcc, 0xd1, 0x4f, 0x2c, 0x2d, 0xc9, 0xc8, 0x2f, 0xca, 0xac, 0x4a, 0x2c, 0xc9, 0xcc, + 0xcf, 0x8b, 0x4f, 0xce, 0xcf, 0x4b, 0xcb, 0x4c, 0xd7, 0x2b, 0x28, 0xca, 0x2f, 0xc9, 0x17, 0xe2, + 0x82, 0xe8, 0xd1, 0x4b, 0x2c, 0xc8, 0x54, 0x32, 0xe4, 0x12, 0x76, 0x44, 0x56, 0xe9, 0x0c, 0x56, + 0x28, 0x24, 0xc5, 0xc5, 0x51, 0x50, 0x94, 0x5f, 0x96, 0x99, 0x92, 0x5a, 0x24, 0xc1, 0xa8, 0xc0, + 0xa8, 0xc1, 0x19, 0x04, 0xe7, 0x3b, 0x25, 0x71, 0xf1, 0x25, 0xe7, 0xe7, 0xea, 0x21, 0x0c, 0x71, + 0x92, 0xc0, 0x62, 0x44, 0x00, 0xc8, 0xaa, 0x00, 0xc6, 0x28, 0x5d, 0xa8, 0xba, 0xf4, 0xfc, 0x9c, + 0xc4, 0xbc, 0x74, 0xbd, 0xfc, 0xa2, 0x74, 0xfd, 0xf4, 0xd4, 0x3c, 0xb0, 0x43, 0xf4, 0x21, 0x52, + 0x89, 0x05, 0x99, 0xc5, 0x20, 0xf7, 0x5b, 0x27, 0x16, 0x64, 0x2e, 0x62, 0x62, 0x71, 0x77, 0x0c, + 0xf0, 0x4c, 0x62, 0x03, 0x2b, 0x30, 0x06, 0x04, 0x00, 0x00, 0xff, 0xff, 0x52, 0x27, 0x0c, 0xba, + 0xdf, 0x00, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/api/configchange/config_change.pb.go b/vendor/google.golang.org/genproto/googleapis/api/configchange/config_change.pb.go new file mode 100644 index 0000000..09d036a --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/api/configchange/config_change.pb.go @@ -0,0 +1,227 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/api/config_change.proto + +package configchange // import "google.golang.org/genproto/googleapis/api/configchange" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Classifies set of possible modifications to an object in the service +// configuration. +type ChangeType int32 + +const ( + // No value was provided. + ChangeType_CHANGE_TYPE_UNSPECIFIED ChangeType = 0 + // The changed object exists in the 'new' service configuration, but not + // in the 'old' service configuration. + ChangeType_ADDED ChangeType = 1 + // The changed object exists in the 'old' service configuration, but not + // in the 'new' service configuration. + ChangeType_REMOVED ChangeType = 2 + // The changed object exists in both service configurations, but its value + // is different. + ChangeType_MODIFIED ChangeType = 3 +) + +var ChangeType_name = map[int32]string{ + 0: "CHANGE_TYPE_UNSPECIFIED", + 1: "ADDED", + 2: "REMOVED", + 3: "MODIFIED", +} +var ChangeType_value = map[string]int32{ + "CHANGE_TYPE_UNSPECIFIED": 0, + "ADDED": 1, + "REMOVED": 2, + "MODIFIED": 3, +} + +func (x ChangeType) String() string { + return proto.EnumName(ChangeType_name, int32(x)) +} +func (ChangeType) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_config_change_22274c34b306f4cb, []int{0} +} + +// Output generated from semantically comparing two versions of a service +// configuration. +// +// Includes detailed information about a field that have changed with +// applicable advice about potential consequences for the change, such as +// backwards-incompatibility. +type ConfigChange struct { + // Object hierarchy path to the change, with levels separated by a '.' + // character. For repeated fields, an applicable unique identifier field is + // used for the index (usually selector, name, or id). For maps, the term + // 'key' is used. If the field has no unique identifier, the numeric index + // is used. + // Examples: + // - visibility.rules[selector=="google.LibraryService.ListBooks"].restriction + // - quota.metric_rules[selector=="google"].metric_costs[key=="reads"].value + // - logging.producer_destinations[0] + Element string `protobuf:"bytes,1,opt,name=element,proto3" json:"element,omitempty"` + // Value of the changed object in the old Service configuration, + // in JSON format. This field will not be populated if ChangeType == ADDED. + OldValue string `protobuf:"bytes,2,opt,name=old_value,json=oldValue,proto3" json:"old_value,omitempty"` + // Value of the changed object in the new Service configuration, + // in JSON format. This field will not be populated if ChangeType == REMOVED. + NewValue string `protobuf:"bytes,3,opt,name=new_value,json=newValue,proto3" json:"new_value,omitempty"` + // The type for this change, either ADDED, REMOVED, or MODIFIED. + ChangeType ChangeType `protobuf:"varint,4,opt,name=change_type,json=changeType,proto3,enum=google.api.ChangeType" json:"change_type,omitempty"` + // Collection of advice provided for this change, useful for determining the + // possible impact of this change. + Advices []*Advice `protobuf:"bytes,5,rep,name=advices,proto3" json:"advices,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ConfigChange) Reset() { *m = ConfigChange{} } +func (m *ConfigChange) String() string { return proto.CompactTextString(m) } +func (*ConfigChange) ProtoMessage() {} +func (*ConfigChange) Descriptor() ([]byte, []int) { + return fileDescriptor_config_change_22274c34b306f4cb, []int{0} +} +func (m *ConfigChange) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ConfigChange.Unmarshal(m, b) +} +func (m *ConfigChange) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ConfigChange.Marshal(b, m, deterministic) +} +func (dst *ConfigChange) XXX_Merge(src proto.Message) { + xxx_messageInfo_ConfigChange.Merge(dst, src) +} +func (m *ConfigChange) XXX_Size() int { + return xxx_messageInfo_ConfigChange.Size(m) +} +func (m *ConfigChange) XXX_DiscardUnknown() { + xxx_messageInfo_ConfigChange.DiscardUnknown(m) +} + +var xxx_messageInfo_ConfigChange proto.InternalMessageInfo + +func (m *ConfigChange) GetElement() string { + if m != nil { + return m.Element + } + return "" +} + +func (m *ConfigChange) GetOldValue() string { + if m != nil { + return m.OldValue + } + return "" +} + +func (m *ConfigChange) GetNewValue() string { + if m != nil { + return m.NewValue + } + return "" +} + +func (m *ConfigChange) GetChangeType() ChangeType { + if m != nil { + return m.ChangeType + } + return ChangeType_CHANGE_TYPE_UNSPECIFIED +} + +func (m *ConfigChange) GetAdvices() []*Advice { + if m != nil { + return m.Advices + } + return nil +} + +// Generated advice about this change, used for providing more +// information about how a change will affect the existing service. +type Advice struct { + // Useful description for why this advice was applied and what actions should + // be taken to mitigate any implied risks. + Description string `protobuf:"bytes,2,opt,name=description,proto3" json:"description,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Advice) Reset() { *m = Advice{} } +func (m *Advice) String() string { return proto.CompactTextString(m) } +func (*Advice) ProtoMessage() {} +func (*Advice) Descriptor() ([]byte, []int) { + return fileDescriptor_config_change_22274c34b306f4cb, []int{1} +} +func (m *Advice) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Advice.Unmarshal(m, b) +} +func (m *Advice) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Advice.Marshal(b, m, deterministic) +} +func (dst *Advice) XXX_Merge(src proto.Message) { + xxx_messageInfo_Advice.Merge(dst, src) +} +func (m *Advice) XXX_Size() int { + return xxx_messageInfo_Advice.Size(m) +} +func (m *Advice) XXX_DiscardUnknown() { + xxx_messageInfo_Advice.DiscardUnknown(m) +} + +var xxx_messageInfo_Advice proto.InternalMessageInfo + +func (m *Advice) GetDescription() string { + if m != nil { + return m.Description + } + return "" +} + +func init() { + proto.RegisterType((*ConfigChange)(nil), "google.api.ConfigChange") + proto.RegisterType((*Advice)(nil), "google.api.Advice") + proto.RegisterEnum("google.api.ChangeType", ChangeType_name, ChangeType_value) +} + +func init() { + proto.RegisterFile("google/api/config_change.proto", fileDescriptor_config_change_22274c34b306f4cb) +} + +var fileDescriptor_config_change_22274c34b306f4cb = []byte{ + // 338 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x4c, 0x91, 0xcd, 0x4e, 0xc2, 0x40, + 0x14, 0x85, 0x2d, 0xff, 0xdc, 0x12, 0x82, 0xb3, 0xd0, 0x26, 0x24, 0xa6, 0x61, 0x45, 0x88, 0x69, + 0x13, 0x5c, 0xb8, 0x70, 0x55, 0xda, 0x8a, 0x2c, 0x80, 0xa6, 0x22, 0x89, 0x6e, 0x9a, 0xb1, 0x1d, + 0xc7, 0x49, 0xca, 0xcc, 0x08, 0x15, 0xc2, 0xeb, 0xf8, 0x36, 0xbe, 0x95, 0xa1, 0x03, 0xd2, 0xdd, + 0x9c, 0xf9, 0xce, 0xcd, 0x3d, 0x39, 0x17, 0x6e, 0xa8, 0x10, 0x34, 0x25, 0x36, 0x96, 0xcc, 0x8e, + 0x05, 0xff, 0x60, 0x34, 0x8a, 0x3f, 0x31, 0xa7, 0xc4, 0x92, 0x6b, 0x91, 0x09, 0x04, 0x8a, 0x5b, + 0x58, 0xb2, 0xde, 0xaf, 0x06, 0x2d, 0x37, 0xf7, 0xb8, 0xb9, 0x05, 0x19, 0x50, 0x27, 0x29, 0x59, + 0x11, 0x9e, 0x19, 0x9a, 0xa9, 0xf5, 0x9b, 0xe1, 0x49, 0xa2, 0x2e, 0x34, 0x45, 0x9a, 0x44, 0x5b, + 0x9c, 0x7e, 0x13, 0xa3, 0x94, 0xb3, 0x86, 0x48, 0x93, 0xe5, 0x41, 0x1f, 0x20, 0x27, 0xbb, 0x23, + 0x2c, 0x2b, 0xc8, 0xc9, 0x4e, 0xc1, 0x7b, 0xd0, 0x55, 0x80, 0x28, 0xdb, 0x4b, 0x62, 0x54, 0x4c, + 0xad, 0xdf, 0x1e, 0x5e, 0x59, 0xe7, 0x18, 0x96, 0x5a, 0xbe, 0xd8, 0x4b, 0x12, 0x42, 0xfc, 0xff, + 0x46, 0xb7, 0x50, 0xc7, 0xc9, 0x96, 0xc5, 0x64, 0x63, 0x54, 0xcd, 0x72, 0x5f, 0x1f, 0xa2, 0xe2, + 0x90, 0x93, 0xa3, 0xf0, 0x64, 0xe9, 0x0d, 0xa0, 0xa6, 0xbe, 0x90, 0x09, 0x7a, 0x42, 0x36, 0xf1, + 0x9a, 0xc9, 0x8c, 0x09, 0x7e, 0x0c, 0x5b, 0xfc, 0x1a, 0xcc, 0x01, 0xce, 0x3b, 0x51, 0x17, 0xae, + 0xdd, 0x27, 0x67, 0x36, 0xf6, 0xa3, 0xc5, 0x6b, 0xe0, 0x47, 0x2f, 0xb3, 0xe7, 0xc0, 0x77, 0x27, + 0x8f, 0x13, 0xdf, 0xeb, 0x5c, 0xa0, 0x26, 0x54, 0x1d, 0xcf, 0xf3, 0xbd, 0x8e, 0x86, 0x74, 0xa8, + 0x87, 0xfe, 0x74, 0xbe, 0xf4, 0xbd, 0x4e, 0x09, 0xb5, 0xa0, 0x31, 0x9d, 0x7b, 0xca, 0x55, 0x1e, + 0x7d, 0x41, 0x3b, 0x16, 0xab, 0x42, 0xbc, 0xd1, 0x65, 0xb1, 0xd7, 0xe0, 0xd0, 0x7c, 0xa0, 0xbd, + 0xb9, 0x47, 0x03, 0x15, 0x29, 0xe6, 0xd4, 0x12, 0x6b, 0x6a, 0x53, 0xc2, 0xf3, 0xbb, 0xd8, 0x0a, + 0x61, 0xc9, 0x36, 0x85, 0xd3, 0xa9, 0x36, 0x1e, 0x8a, 0xe2, 0xa7, 0x54, 0x19, 0x3b, 0xc1, 0xe4, + 0xbd, 0x96, 0x8f, 0xdd, 0xfd, 0x05, 0x00, 0x00, 0xff, 0xff, 0x46, 0x8b, 0xd3, 0xf5, 0xf0, 0x01, + 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/api/distribution/distribution.pb.go b/vendor/google.golang.org/genproto/googleapis/api/distribution/distribution.pb.go new file mode 100644 index 0000000..7e30f53 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/api/distribution/distribution.pb.go @@ -0,0 +1,714 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/api/distribution.proto + +package distribution // import "google.golang.org/genproto/googleapis/api/distribution" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import any "github.com/golang/protobuf/ptypes/any" +import timestamp "github.com/golang/protobuf/ptypes/timestamp" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// `Distribution` contains summary statistics for a population of values. It +// optionally contains a histogram representing the distribution of those values +// across a set of buckets. +// +// The summary statistics are the count, mean, sum of the squared deviation from +// the mean, the minimum, and the maximum of the set of population of values. +// The histogram is based on a sequence of buckets and gives a count of values +// that fall into each bucket. The boundaries of the buckets are given either +// explicitly or by formulas for buckets of fixed or exponentially increasing +// widths. +// +// Although it is not forbidden, it is generally a bad idea to include +// non-finite values (infinities or NaNs) in the population of values, as this +// will render the `mean` and `sum_of_squared_deviation` fields meaningless. +type Distribution struct { + // The number of values in the population. Must be non-negative. This value + // must equal the sum of the values in `bucket_counts` if a histogram is + // provided. + Count int64 `protobuf:"varint,1,opt,name=count,proto3" json:"count,omitempty"` + // The arithmetic mean of the values in the population. If `count` is zero + // then this field must be zero. + Mean float64 `protobuf:"fixed64,2,opt,name=mean,proto3" json:"mean,omitempty"` + // The sum of squared deviations from the mean of the values in the + // population. For values x_i this is: + // + // Sum[i=1..n]((x_i - mean)^2) + // + // Knuth, "The Art of Computer Programming", Vol. 2, page 323, 3rd edition + // describes Welford's method for accumulating this sum in one pass. + // + // If `count` is zero then this field must be zero. + SumOfSquaredDeviation float64 `protobuf:"fixed64,3,opt,name=sum_of_squared_deviation,json=sumOfSquaredDeviation,proto3" json:"sum_of_squared_deviation,omitempty"` + // If specified, contains the range of the population values. The field + // must not be present if the `count` is zero. + Range *Distribution_Range `protobuf:"bytes,4,opt,name=range,proto3" json:"range,omitempty"` + // Defines the histogram bucket boundaries. If the distribution does not + // contain a histogram, then omit this field. + BucketOptions *Distribution_BucketOptions `protobuf:"bytes,6,opt,name=bucket_options,json=bucketOptions,proto3" json:"bucket_options,omitempty"` + // The number of values in each bucket of the histogram, as described in + // `bucket_options`. If the distribution does not have a histogram, then omit + // this field. If there is a histogram, then the sum of the values in + // `bucket_counts` must equal the value in the `count` field of the + // distribution. + // + // If present, `bucket_counts` should contain N values, where N is the number + // of buckets specified in `bucket_options`. If you supply fewer than N + // values, the remaining values are assumed to be 0. + // + // The order of the values in `bucket_counts` follows the bucket numbering + // schemes described for the three bucket types. The first value must be the + // count for the underflow bucket (number 0). The next N-2 values are the + // counts for the finite buckets (number 1 through N-2). The N'th value in + // `bucket_counts` is the count for the overflow bucket (number N-1). + BucketCounts []int64 `protobuf:"varint,7,rep,packed,name=bucket_counts,json=bucketCounts,proto3" json:"bucket_counts,omitempty"` + // Must be in increasing order of `value` field. + Exemplars []*Distribution_Exemplar `protobuf:"bytes,10,rep,name=exemplars,proto3" json:"exemplars,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Distribution) Reset() { *m = Distribution{} } +func (m *Distribution) String() string { return proto.CompactTextString(m) } +func (*Distribution) ProtoMessage() {} +func (*Distribution) Descriptor() ([]byte, []int) { + return fileDescriptor_distribution_f8b8947deb60fb1d, []int{0} +} +func (m *Distribution) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Distribution.Unmarshal(m, b) +} +func (m *Distribution) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Distribution.Marshal(b, m, deterministic) +} +func (dst *Distribution) XXX_Merge(src proto.Message) { + xxx_messageInfo_Distribution.Merge(dst, src) +} +func (m *Distribution) XXX_Size() int { + return xxx_messageInfo_Distribution.Size(m) +} +func (m *Distribution) XXX_DiscardUnknown() { + xxx_messageInfo_Distribution.DiscardUnknown(m) +} + +var xxx_messageInfo_Distribution proto.InternalMessageInfo + +func (m *Distribution) GetCount() int64 { + if m != nil { + return m.Count + } + return 0 +} + +func (m *Distribution) GetMean() float64 { + if m != nil { + return m.Mean + } + return 0 +} + +func (m *Distribution) GetSumOfSquaredDeviation() float64 { + if m != nil { + return m.SumOfSquaredDeviation + } + return 0 +} + +func (m *Distribution) GetRange() *Distribution_Range { + if m != nil { + return m.Range + } + return nil +} + +func (m *Distribution) GetBucketOptions() *Distribution_BucketOptions { + if m != nil { + return m.BucketOptions + } + return nil +} + +func (m *Distribution) GetBucketCounts() []int64 { + if m != nil { + return m.BucketCounts + } + return nil +} + +func (m *Distribution) GetExemplars() []*Distribution_Exemplar { + if m != nil { + return m.Exemplars + } + return nil +} + +// The range of the population values. +type Distribution_Range struct { + // The minimum of the population values. + Min float64 `protobuf:"fixed64,1,opt,name=min,proto3" json:"min,omitempty"` + // The maximum of the population values. + Max float64 `protobuf:"fixed64,2,opt,name=max,proto3" json:"max,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Distribution_Range) Reset() { *m = Distribution_Range{} } +func (m *Distribution_Range) String() string { return proto.CompactTextString(m) } +func (*Distribution_Range) ProtoMessage() {} +func (*Distribution_Range) Descriptor() ([]byte, []int) { + return fileDescriptor_distribution_f8b8947deb60fb1d, []int{0, 0} +} +func (m *Distribution_Range) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Distribution_Range.Unmarshal(m, b) +} +func (m *Distribution_Range) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Distribution_Range.Marshal(b, m, deterministic) +} +func (dst *Distribution_Range) XXX_Merge(src proto.Message) { + xxx_messageInfo_Distribution_Range.Merge(dst, src) +} +func (m *Distribution_Range) XXX_Size() int { + return xxx_messageInfo_Distribution_Range.Size(m) +} +func (m *Distribution_Range) XXX_DiscardUnknown() { + xxx_messageInfo_Distribution_Range.DiscardUnknown(m) +} + +var xxx_messageInfo_Distribution_Range proto.InternalMessageInfo + +func (m *Distribution_Range) GetMin() float64 { + if m != nil { + return m.Min + } + return 0 +} + +func (m *Distribution_Range) GetMax() float64 { + if m != nil { + return m.Max + } + return 0 +} + +// `BucketOptions` describes the bucket boundaries used to create a histogram +// for the distribution. The buckets can be in a linear sequence, an +// exponential sequence, or each bucket can be specified explicitly. +// `BucketOptions` does not include the number of values in each bucket. +// +// A bucket has an inclusive lower bound and exclusive upper bound for the +// values that are counted for that bucket. The upper bound of a bucket must +// be strictly greater than the lower bound. The sequence of N buckets for a +// distribution consists of an underflow bucket (number 0), zero or more +// finite buckets (number 1 through N - 2) and an overflow bucket (number N - +// 1). The buckets are contiguous: the lower bound of bucket i (i > 0) is the +// same as the upper bound of bucket i - 1. The buckets span the whole range +// of finite values: lower bound of the underflow bucket is -infinity and the +// upper bound of the overflow bucket is +infinity. The finite buckets are +// so-called because both bounds are finite. +type Distribution_BucketOptions struct { + // Exactly one of these three fields must be set. + // + // Types that are valid to be assigned to Options: + // *Distribution_BucketOptions_LinearBuckets + // *Distribution_BucketOptions_ExponentialBuckets + // *Distribution_BucketOptions_ExplicitBuckets + Options isDistribution_BucketOptions_Options `protobuf_oneof:"options"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Distribution_BucketOptions) Reset() { *m = Distribution_BucketOptions{} } +func (m *Distribution_BucketOptions) String() string { return proto.CompactTextString(m) } +func (*Distribution_BucketOptions) ProtoMessage() {} +func (*Distribution_BucketOptions) Descriptor() ([]byte, []int) { + return fileDescriptor_distribution_f8b8947deb60fb1d, []int{0, 1} +} +func (m *Distribution_BucketOptions) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Distribution_BucketOptions.Unmarshal(m, b) +} +func (m *Distribution_BucketOptions) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Distribution_BucketOptions.Marshal(b, m, deterministic) +} +func (dst *Distribution_BucketOptions) XXX_Merge(src proto.Message) { + xxx_messageInfo_Distribution_BucketOptions.Merge(dst, src) +} +func (m *Distribution_BucketOptions) XXX_Size() int { + return xxx_messageInfo_Distribution_BucketOptions.Size(m) +} +func (m *Distribution_BucketOptions) XXX_DiscardUnknown() { + xxx_messageInfo_Distribution_BucketOptions.DiscardUnknown(m) +} + +var xxx_messageInfo_Distribution_BucketOptions proto.InternalMessageInfo + +type isDistribution_BucketOptions_Options interface { + isDistribution_BucketOptions_Options() +} + +type Distribution_BucketOptions_LinearBuckets struct { + LinearBuckets *Distribution_BucketOptions_Linear `protobuf:"bytes,1,opt,name=linear_buckets,json=linearBuckets,proto3,oneof"` +} + +type Distribution_BucketOptions_ExponentialBuckets struct { + ExponentialBuckets *Distribution_BucketOptions_Exponential `protobuf:"bytes,2,opt,name=exponential_buckets,json=exponentialBuckets,proto3,oneof"` +} + +type Distribution_BucketOptions_ExplicitBuckets struct { + ExplicitBuckets *Distribution_BucketOptions_Explicit `protobuf:"bytes,3,opt,name=explicit_buckets,json=explicitBuckets,proto3,oneof"` +} + +func (*Distribution_BucketOptions_LinearBuckets) isDistribution_BucketOptions_Options() {} + +func (*Distribution_BucketOptions_ExponentialBuckets) isDistribution_BucketOptions_Options() {} + +func (*Distribution_BucketOptions_ExplicitBuckets) isDistribution_BucketOptions_Options() {} + +func (m *Distribution_BucketOptions) GetOptions() isDistribution_BucketOptions_Options { + if m != nil { + return m.Options + } + return nil +} + +func (m *Distribution_BucketOptions) GetLinearBuckets() *Distribution_BucketOptions_Linear { + if x, ok := m.GetOptions().(*Distribution_BucketOptions_LinearBuckets); ok { + return x.LinearBuckets + } + return nil +} + +func (m *Distribution_BucketOptions) GetExponentialBuckets() *Distribution_BucketOptions_Exponential { + if x, ok := m.GetOptions().(*Distribution_BucketOptions_ExponentialBuckets); ok { + return x.ExponentialBuckets + } + return nil +} + +func (m *Distribution_BucketOptions) GetExplicitBuckets() *Distribution_BucketOptions_Explicit { + if x, ok := m.GetOptions().(*Distribution_BucketOptions_ExplicitBuckets); ok { + return x.ExplicitBuckets + } + return nil +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*Distribution_BucketOptions) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _Distribution_BucketOptions_OneofMarshaler, _Distribution_BucketOptions_OneofUnmarshaler, _Distribution_BucketOptions_OneofSizer, []interface{}{ + (*Distribution_BucketOptions_LinearBuckets)(nil), + (*Distribution_BucketOptions_ExponentialBuckets)(nil), + (*Distribution_BucketOptions_ExplicitBuckets)(nil), + } +} + +func _Distribution_BucketOptions_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*Distribution_BucketOptions) + // options + switch x := m.Options.(type) { + case *Distribution_BucketOptions_LinearBuckets: + b.EncodeVarint(1<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.LinearBuckets); err != nil { + return err + } + case *Distribution_BucketOptions_ExponentialBuckets: + b.EncodeVarint(2<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.ExponentialBuckets); err != nil { + return err + } + case *Distribution_BucketOptions_ExplicitBuckets: + b.EncodeVarint(3<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.ExplicitBuckets); err != nil { + return err + } + case nil: + default: + return fmt.Errorf("Distribution_BucketOptions.Options has unexpected type %T", x) + } + return nil +} + +func _Distribution_BucketOptions_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*Distribution_BucketOptions) + switch tag { + case 1: // options.linear_buckets + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(Distribution_BucketOptions_Linear) + err := b.DecodeMessage(msg) + m.Options = &Distribution_BucketOptions_LinearBuckets{msg} + return true, err + case 2: // options.exponential_buckets + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(Distribution_BucketOptions_Exponential) + err := b.DecodeMessage(msg) + m.Options = &Distribution_BucketOptions_ExponentialBuckets{msg} + return true, err + case 3: // options.explicit_buckets + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(Distribution_BucketOptions_Explicit) + err := b.DecodeMessage(msg) + m.Options = &Distribution_BucketOptions_ExplicitBuckets{msg} + return true, err + default: + return false, nil + } +} + +func _Distribution_BucketOptions_OneofSizer(msg proto.Message) (n int) { + m := msg.(*Distribution_BucketOptions) + // options + switch x := m.Options.(type) { + case *Distribution_BucketOptions_LinearBuckets: + s := proto.Size(x.LinearBuckets) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *Distribution_BucketOptions_ExponentialBuckets: + s := proto.Size(x.ExponentialBuckets) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *Distribution_BucketOptions_ExplicitBuckets: + s := proto.Size(x.ExplicitBuckets) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +// Specifies a linear sequence of buckets that all have the same width +// (except overflow and underflow). Each bucket represents a constant +// absolute uncertainty on the specific value in the bucket. +// +// There are `num_finite_buckets + 2` (= N) buckets. Bucket `i` has the +// following boundaries: +// +// Upper bound (0 <= i < N-1): offset + (width * i). +// Lower bound (1 <= i < N): offset + (width * (i - 1)). +type Distribution_BucketOptions_Linear struct { + // Must be greater than 0. + NumFiniteBuckets int32 `protobuf:"varint,1,opt,name=num_finite_buckets,json=numFiniteBuckets,proto3" json:"num_finite_buckets,omitempty"` + // Must be greater than 0. + Width float64 `protobuf:"fixed64,2,opt,name=width,proto3" json:"width,omitempty"` + // Lower bound of the first bucket. + Offset float64 `protobuf:"fixed64,3,opt,name=offset,proto3" json:"offset,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Distribution_BucketOptions_Linear) Reset() { *m = Distribution_BucketOptions_Linear{} } +func (m *Distribution_BucketOptions_Linear) String() string { return proto.CompactTextString(m) } +func (*Distribution_BucketOptions_Linear) ProtoMessage() {} +func (*Distribution_BucketOptions_Linear) Descriptor() ([]byte, []int) { + return fileDescriptor_distribution_f8b8947deb60fb1d, []int{0, 1, 0} +} +func (m *Distribution_BucketOptions_Linear) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Distribution_BucketOptions_Linear.Unmarshal(m, b) +} +func (m *Distribution_BucketOptions_Linear) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Distribution_BucketOptions_Linear.Marshal(b, m, deterministic) +} +func (dst *Distribution_BucketOptions_Linear) XXX_Merge(src proto.Message) { + xxx_messageInfo_Distribution_BucketOptions_Linear.Merge(dst, src) +} +func (m *Distribution_BucketOptions_Linear) XXX_Size() int { + return xxx_messageInfo_Distribution_BucketOptions_Linear.Size(m) +} +func (m *Distribution_BucketOptions_Linear) XXX_DiscardUnknown() { + xxx_messageInfo_Distribution_BucketOptions_Linear.DiscardUnknown(m) +} + +var xxx_messageInfo_Distribution_BucketOptions_Linear proto.InternalMessageInfo + +func (m *Distribution_BucketOptions_Linear) GetNumFiniteBuckets() int32 { + if m != nil { + return m.NumFiniteBuckets + } + return 0 +} + +func (m *Distribution_BucketOptions_Linear) GetWidth() float64 { + if m != nil { + return m.Width + } + return 0 +} + +func (m *Distribution_BucketOptions_Linear) GetOffset() float64 { + if m != nil { + return m.Offset + } + return 0 +} + +// Specifies an exponential sequence of buckets that have a width that is +// proportional to the value of the lower bound. Each bucket represents a +// constant relative uncertainty on a specific value in the bucket. +// +// There are `num_finite_buckets + 2` (= N) buckets. Bucket `i` has the +// following boundaries: +// +// Upper bound (0 <= i < N-1): scale * (growth_factor ^ i). +// Lower bound (1 <= i < N): scale * (growth_factor ^ (i - 1)). +type Distribution_BucketOptions_Exponential struct { + // Must be greater than 0. + NumFiniteBuckets int32 `protobuf:"varint,1,opt,name=num_finite_buckets,json=numFiniteBuckets,proto3" json:"num_finite_buckets,omitempty"` + // Must be greater than 1. + GrowthFactor float64 `protobuf:"fixed64,2,opt,name=growth_factor,json=growthFactor,proto3" json:"growth_factor,omitempty"` + // Must be greater than 0. + Scale float64 `protobuf:"fixed64,3,opt,name=scale,proto3" json:"scale,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Distribution_BucketOptions_Exponential) Reset() { + *m = Distribution_BucketOptions_Exponential{} +} +func (m *Distribution_BucketOptions_Exponential) String() string { return proto.CompactTextString(m) } +func (*Distribution_BucketOptions_Exponential) ProtoMessage() {} +func (*Distribution_BucketOptions_Exponential) Descriptor() ([]byte, []int) { + return fileDescriptor_distribution_f8b8947deb60fb1d, []int{0, 1, 1} +} +func (m *Distribution_BucketOptions_Exponential) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Distribution_BucketOptions_Exponential.Unmarshal(m, b) +} +func (m *Distribution_BucketOptions_Exponential) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Distribution_BucketOptions_Exponential.Marshal(b, m, deterministic) +} +func (dst *Distribution_BucketOptions_Exponential) XXX_Merge(src proto.Message) { + xxx_messageInfo_Distribution_BucketOptions_Exponential.Merge(dst, src) +} +func (m *Distribution_BucketOptions_Exponential) XXX_Size() int { + return xxx_messageInfo_Distribution_BucketOptions_Exponential.Size(m) +} +func (m *Distribution_BucketOptions_Exponential) XXX_DiscardUnknown() { + xxx_messageInfo_Distribution_BucketOptions_Exponential.DiscardUnknown(m) +} + +var xxx_messageInfo_Distribution_BucketOptions_Exponential proto.InternalMessageInfo + +func (m *Distribution_BucketOptions_Exponential) GetNumFiniteBuckets() int32 { + if m != nil { + return m.NumFiniteBuckets + } + return 0 +} + +func (m *Distribution_BucketOptions_Exponential) GetGrowthFactor() float64 { + if m != nil { + return m.GrowthFactor + } + return 0 +} + +func (m *Distribution_BucketOptions_Exponential) GetScale() float64 { + if m != nil { + return m.Scale + } + return 0 +} + +// Specifies a set of buckets with arbitrary widths. +// +// There are `size(bounds) + 1` (= N) buckets. Bucket `i` has the following +// boundaries: +// +// Upper bound (0 <= i < N-1): bounds[i] +// Lower bound (1 <= i < N); bounds[i - 1] +// +// The `bounds` field must contain at least one element. If `bounds` has +// only one element, then there are no finite buckets, and that single +// element is the common boundary of the overflow and underflow buckets. +type Distribution_BucketOptions_Explicit struct { + // The values must be monotonically increasing. + Bounds []float64 `protobuf:"fixed64,1,rep,packed,name=bounds,proto3" json:"bounds,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Distribution_BucketOptions_Explicit) Reset() { *m = Distribution_BucketOptions_Explicit{} } +func (m *Distribution_BucketOptions_Explicit) String() string { return proto.CompactTextString(m) } +func (*Distribution_BucketOptions_Explicit) ProtoMessage() {} +func (*Distribution_BucketOptions_Explicit) Descriptor() ([]byte, []int) { + return fileDescriptor_distribution_f8b8947deb60fb1d, []int{0, 1, 2} +} +func (m *Distribution_BucketOptions_Explicit) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Distribution_BucketOptions_Explicit.Unmarshal(m, b) +} +func (m *Distribution_BucketOptions_Explicit) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Distribution_BucketOptions_Explicit.Marshal(b, m, deterministic) +} +func (dst *Distribution_BucketOptions_Explicit) XXX_Merge(src proto.Message) { + xxx_messageInfo_Distribution_BucketOptions_Explicit.Merge(dst, src) +} +func (m *Distribution_BucketOptions_Explicit) XXX_Size() int { + return xxx_messageInfo_Distribution_BucketOptions_Explicit.Size(m) +} +func (m *Distribution_BucketOptions_Explicit) XXX_DiscardUnknown() { + xxx_messageInfo_Distribution_BucketOptions_Explicit.DiscardUnknown(m) +} + +var xxx_messageInfo_Distribution_BucketOptions_Explicit proto.InternalMessageInfo + +func (m *Distribution_BucketOptions_Explicit) GetBounds() []float64 { + if m != nil { + return m.Bounds + } + return nil +} + +// Exemplars are example points that may be used to annotate aggregated +// distribution values. They are metadata that gives information about a +// particular value added to a Distribution bucket, such as a trace ID that +// was active when a value was added. They may contain further information, +// such as a example values and timestamps, origin, etc. +type Distribution_Exemplar struct { + // Value of the exemplar point. This value determines to which bucket the + // exemplar belongs. + Value float64 `protobuf:"fixed64,1,opt,name=value,proto3" json:"value,omitempty"` + // The observation (sampling) time of the above value. + Timestamp *timestamp.Timestamp `protobuf:"bytes,2,opt,name=timestamp,proto3" json:"timestamp,omitempty"` + // Contextual information about the example value. Examples are: + // + // Trace: type.googleapis.com/google.monitoring.v3.SpanContext + // + // Literal string: type.googleapis.com/google.protobuf.StringValue + // + // Labels dropped during aggregation: + // type.googleapis.com/google.monitoring.v3.DroppedLabels + // + // There may be only a single attachment of any given message type in a + // single exemplar, and this is enforced by the system. + Attachments []*any.Any `protobuf:"bytes,3,rep,name=attachments,proto3" json:"attachments,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Distribution_Exemplar) Reset() { *m = Distribution_Exemplar{} } +func (m *Distribution_Exemplar) String() string { return proto.CompactTextString(m) } +func (*Distribution_Exemplar) ProtoMessage() {} +func (*Distribution_Exemplar) Descriptor() ([]byte, []int) { + return fileDescriptor_distribution_f8b8947deb60fb1d, []int{0, 2} +} +func (m *Distribution_Exemplar) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Distribution_Exemplar.Unmarshal(m, b) +} +func (m *Distribution_Exemplar) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Distribution_Exemplar.Marshal(b, m, deterministic) +} +func (dst *Distribution_Exemplar) XXX_Merge(src proto.Message) { + xxx_messageInfo_Distribution_Exemplar.Merge(dst, src) +} +func (m *Distribution_Exemplar) XXX_Size() int { + return xxx_messageInfo_Distribution_Exemplar.Size(m) +} +func (m *Distribution_Exemplar) XXX_DiscardUnknown() { + xxx_messageInfo_Distribution_Exemplar.DiscardUnknown(m) +} + +var xxx_messageInfo_Distribution_Exemplar proto.InternalMessageInfo + +func (m *Distribution_Exemplar) GetValue() float64 { + if m != nil { + return m.Value + } + return 0 +} + +func (m *Distribution_Exemplar) GetTimestamp() *timestamp.Timestamp { + if m != nil { + return m.Timestamp + } + return nil +} + +func (m *Distribution_Exemplar) GetAttachments() []*any.Any { + if m != nil { + return m.Attachments + } + return nil +} + +func init() { + proto.RegisterType((*Distribution)(nil), "google.api.Distribution") + proto.RegisterType((*Distribution_Range)(nil), "google.api.Distribution.Range") + proto.RegisterType((*Distribution_BucketOptions)(nil), "google.api.Distribution.BucketOptions") + proto.RegisterType((*Distribution_BucketOptions_Linear)(nil), "google.api.Distribution.BucketOptions.Linear") + proto.RegisterType((*Distribution_BucketOptions_Exponential)(nil), "google.api.Distribution.BucketOptions.Exponential") + proto.RegisterType((*Distribution_BucketOptions_Explicit)(nil), "google.api.Distribution.BucketOptions.Explicit") + proto.RegisterType((*Distribution_Exemplar)(nil), "google.api.Distribution.Exemplar") +} + +func init() { + proto.RegisterFile("google/api/distribution.proto", fileDescriptor_distribution_f8b8947deb60fb1d) +} + +var fileDescriptor_distribution_f8b8947deb60fb1d = []byte{ + // 631 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x94, 0x54, 0xed, 0x6a, 0xd4, 0x40, + 0x14, 0x6d, 0x9a, 0xdd, 0x6d, 0x7b, 0xb7, 0x5b, 0xeb, 0x58, 0x25, 0x06, 0xd4, 0xb5, 0x05, 0x59, + 0x50, 0xb3, 0xb0, 0x8a, 0x0a, 0xfe, 0x90, 0x6e, 0x3f, 0xac, 0xa0, 0xb4, 0x8c, 0xe2, 0x0f, 0x11, + 0xc2, 0x6c, 0x76, 0x92, 0x0e, 0x26, 0x33, 0x69, 0x32, 0x69, 0xb7, 0xaf, 0xe1, 0x23, 0xf8, 0x16, + 0xbe, 0x8a, 0x4f, 0x23, 0xf3, 0x91, 0x6e, 0x6a, 0x29, 0xd4, 0x7f, 0xb9, 0xf7, 0x9c, 0x7b, 0xce, + 0xbd, 0x73, 0x67, 0x02, 0x0f, 0x12, 0x21, 0x92, 0x94, 0x0e, 0x49, 0xce, 0x86, 0x53, 0x56, 0xca, + 0x82, 0x4d, 0x2a, 0xc9, 0x04, 0x0f, 0xf2, 0x42, 0x48, 0x81, 0xc0, 0xc0, 0x01, 0xc9, 0x99, 0x7f, + 0xdf, 0x52, 0x35, 0x32, 0xa9, 0xe2, 0x21, 0xe1, 0xe7, 0x86, 0xe6, 0x3f, 0xfa, 0x17, 0x92, 0x2c, + 0xa3, 0xa5, 0x24, 0x59, 0x6e, 0x08, 0x9b, 0x7f, 0x96, 0x61, 0x75, 0xb7, 0x21, 0x8f, 0x36, 0xa0, + 0x1d, 0x89, 0x8a, 0x4b, 0xcf, 0xe9, 0x3b, 0x03, 0x17, 0x9b, 0x00, 0x21, 0x68, 0x65, 0x94, 0x70, + 0x6f, 0xb1, 0xef, 0x0c, 0x1c, 0xac, 0xbf, 0xd1, 0x6b, 0xf0, 0xca, 0x2a, 0x0b, 0x45, 0x1c, 0x96, + 0x27, 0x15, 0x29, 0xe8, 0x34, 0x9c, 0xd2, 0x53, 0x46, 0x94, 0x8a, 0xe7, 0x6a, 0xde, 0xdd, 0xb2, + 0xca, 0x0e, 0xe3, 0xcf, 0x06, 0xdd, 0xad, 0x41, 0xf4, 0x12, 0xda, 0x05, 0xe1, 0x09, 0xf5, 0x5a, + 0x7d, 0x67, 0xd0, 0x1d, 0x3d, 0x0c, 0xe6, 0xb3, 0x04, 0xcd, 0x5e, 0x02, 0xac, 0x58, 0xd8, 0x90, + 0xd1, 0x27, 0x58, 0x9b, 0x54, 0xd1, 0x0f, 0x2a, 0x43, 0x91, 0x2b, 0xb4, 0xf4, 0x3a, 0xba, 0xfc, + 0xc9, 0xb5, 0xe5, 0x63, 0x4d, 0x3f, 0x34, 0x6c, 0xdc, 0x9b, 0x34, 0x43, 0xb4, 0x05, 0x36, 0x11, + 0xea, 0x09, 0x4b, 0x6f, 0xa9, 0xef, 0x0e, 0x5c, 0xbc, 0x6a, 0x92, 0x3b, 0x3a, 0x87, 0xde, 0xc1, + 0x0a, 0x9d, 0xd1, 0x2c, 0x4f, 0x49, 0x51, 0x7a, 0xd0, 0x77, 0x07, 0xdd, 0xd1, 0xe3, 0x6b, 0xed, + 0xf6, 0x2c, 0x13, 0xcf, 0x6b, 0xfc, 0xa7, 0xd0, 0xd6, 0x43, 0xa0, 0x75, 0x70, 0x33, 0xc6, 0xf5, + 0xa1, 0x3a, 0x58, 0x7d, 0xea, 0x0c, 0x99, 0xd9, 0x13, 0x55, 0x9f, 0xfe, 0xef, 0x16, 0xf4, 0x2e, + 0xf5, 0x8c, 0xbe, 0xc2, 0x5a, 0xca, 0x38, 0x25, 0x45, 0x68, 0xda, 0x2a, 0xb5, 0x40, 0x77, 0xf4, + 0xfc, 0x66, 0x33, 0x07, 0x1f, 0x75, 0xf1, 0xc1, 0x02, 0xee, 0x19, 0x19, 0x83, 0x96, 0x88, 0xc2, + 0x1d, 0x3a, 0xcb, 0x05, 0xa7, 0x5c, 0x32, 0x92, 0x5e, 0x88, 0x2f, 0x6a, 0xf1, 0xd1, 0x0d, 0xc5, + 0xf7, 0xe6, 0x0a, 0x07, 0x0b, 0x18, 0x35, 0x04, 0x6b, 0x9b, 0xef, 0xb0, 0x4e, 0x67, 0x79, 0xca, + 0x22, 0x26, 0x2f, 0x3c, 0x5c, 0xed, 0x31, 0xbc, 0xb9, 0x87, 0x2e, 0x3f, 0x58, 0xc0, 0xb7, 0x6a, + 0x29, 0xab, 0xee, 0x4f, 0xa1, 0x63, 0xe6, 0x43, 0xcf, 0x00, 0xf1, 0x2a, 0x0b, 0x63, 0xc6, 0x99, + 0xa4, 0x97, 0x8e, 0xaa, 0x8d, 0xd7, 0x79, 0x95, 0xed, 0x6b, 0xa0, 0xee, 0x6a, 0x03, 0xda, 0x67, + 0x6c, 0x2a, 0x8f, 0xed, 0xd1, 0x9b, 0x00, 0xdd, 0x83, 0x8e, 0x88, 0xe3, 0x92, 0x4a, 0x7b, 0x77, + 0x6d, 0xe4, 0x9f, 0x42, 0xb7, 0x31, 0xe8, 0x7f, 0x5a, 0x6d, 0x41, 0x2f, 0x29, 0xc4, 0x99, 0x3c, + 0x0e, 0x63, 0x12, 0x49, 0x51, 0x58, 0xcb, 0x55, 0x93, 0xdc, 0xd7, 0x39, 0xd5, 0x4f, 0x19, 0x91, + 0x94, 0x5a, 0x63, 0x13, 0xf8, 0x9b, 0xb0, 0x5c, 0x0f, 0xaf, 0x7a, 0x9b, 0x88, 0x8a, 0x4f, 0x95, + 0x91, 0xab, 0x7a, 0x33, 0xd1, 0x78, 0x05, 0x96, 0xec, 0x5b, 0xf0, 0x7f, 0x3a, 0x8a, 0x6f, 0xae, + 0x9d, 0x52, 0x3c, 0x25, 0x69, 0x45, 0xed, 0x75, 0x33, 0x01, 0x7a, 0x03, 0x2b, 0x17, 0xaf, 0xdf, + 0xae, 0xda, 0xaf, 0xd7, 0x50, 0xff, 0x1f, 0x82, 0x2f, 0x35, 0x03, 0xcf, 0xc9, 0xe8, 0x15, 0x74, + 0x89, 0x94, 0x24, 0x3a, 0xce, 0x28, 0xd7, 0x2b, 0x54, 0x0f, 0x61, 0xe3, 0x4a, 0xed, 0x36, 0x3f, + 0xc7, 0x4d, 0xe2, 0xf8, 0x04, 0xd6, 0x22, 0x91, 0x35, 0x56, 0x3d, 0xbe, 0xdd, 0xdc, 0xf5, 0x91, + 0x2a, 0x3c, 0x72, 0xbe, 0xed, 0x58, 0x42, 0x22, 0x52, 0xc2, 0x93, 0x40, 0x14, 0xc9, 0x30, 0xa1, + 0x5c, 0xcb, 0x0e, 0x0d, 0x44, 0x72, 0x56, 0x5e, 0xf9, 0x13, 0xbe, 0x6d, 0x06, 0xbf, 0x16, 0x5b, + 0xef, 0xb7, 0x8f, 0x3e, 0x4c, 0x3a, 0xba, 0xec, 0xc5, 0xdf, 0x00, 0x00, 0x00, 0xff, 0xff, 0x89, + 0xf1, 0xc2, 0x23, 0x3f, 0x05, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/api/experimental.pb.go b/vendor/google.golang.org/genproto/googleapis/api/experimental.pb.go new file mode 100644 index 0000000..0864b43 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/api/experimental.pb.go @@ -0,0 +1,86 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/api/experimental/experimental.proto + +package api // import "google.golang.org/genproto/googleapis/api" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Experimental service configuration. These configuration options can +// only be used by whitelisted users. +type Experimental struct { + // Authorization configuration. + Authorization *AuthorizationConfig `protobuf:"bytes,8,opt,name=authorization,proto3" json:"authorization,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Experimental) Reset() { *m = Experimental{} } +func (m *Experimental) String() string { return proto.CompactTextString(m) } +func (*Experimental) ProtoMessage() {} +func (*Experimental) Descriptor() ([]byte, []int) { + return fileDescriptor_experimental_fa165d72ce8e6da4, []int{0} +} +func (m *Experimental) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Experimental.Unmarshal(m, b) +} +func (m *Experimental) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Experimental.Marshal(b, m, deterministic) +} +func (dst *Experimental) XXX_Merge(src proto.Message) { + xxx_messageInfo_Experimental.Merge(dst, src) +} +func (m *Experimental) XXX_Size() int { + return xxx_messageInfo_Experimental.Size(m) +} +func (m *Experimental) XXX_DiscardUnknown() { + xxx_messageInfo_Experimental.DiscardUnknown(m) +} + +var xxx_messageInfo_Experimental proto.InternalMessageInfo + +func (m *Experimental) GetAuthorization() *AuthorizationConfig { + if m != nil { + return m.Authorization + } + return nil +} + +func init() { + proto.RegisterType((*Experimental)(nil), "google.api.Experimental") +} + +func init() { + proto.RegisterFile("google/api/experimental/experimental.proto", fileDescriptor_experimental_fa165d72ce8e6da4) +} + +var fileDescriptor_experimental_fa165d72ce8e6da4 = []byte{ + // 204 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xe2, 0xd2, 0x4a, 0xcf, 0xcf, 0x4f, + 0xcf, 0x49, 0xd5, 0x4f, 0x2c, 0xc8, 0xd4, 0x4f, 0xad, 0x28, 0x48, 0x2d, 0xca, 0xcc, 0x4d, 0xcd, + 0x2b, 0x49, 0xcc, 0x41, 0xe1, 0xe8, 0x15, 0x14, 0xe5, 0x97, 0xe4, 0x0b, 0x71, 0x41, 0xd4, 0xea, + 0x25, 0x16, 0x64, 0x4a, 0xc9, 0x20, 0xe9, 0x4b, 0xcc, 0xcb, 0xcb, 0x2f, 0x49, 0x2c, 0xc9, 0xcc, + 0xcf, 0x2b, 0x86, 0xa8, 0x94, 0x32, 0xc2, 0x65, 0x6a, 0x62, 0x69, 0x49, 0x46, 0x7e, 0x51, 0x66, + 0x15, 0x58, 0x75, 0x7c, 0x72, 0x7e, 0x5e, 0x5a, 0x66, 0x3a, 0x44, 0x8f, 0x52, 0x28, 0x17, 0x8f, + 0x2b, 0x92, 0x52, 0x21, 0x57, 0x2e, 0x5e, 0x14, 0xd5, 0x12, 0x1c, 0x0a, 0x8c, 0x1a, 0xdc, 0x46, + 0xf2, 0x7a, 0x08, 0x57, 0xe8, 0x39, 0x22, 0x2b, 0x70, 0x06, 0x9b, 0x16, 0x84, 0xaa, 0xcb, 0x29, + 0x9a, 0x8b, 0x2f, 0x39, 0x3f, 0x17, 0x49, 0x93, 0x93, 0x20, 0xb2, 0x35, 0x01, 0x20, 0xbb, 0x03, + 0x18, 0xa3, 0x74, 0xa1, 0x0a, 0xd2, 0xf3, 0x73, 0x12, 0xf3, 0xd2, 0xf5, 0xf2, 0x8b, 0xd2, 0xf5, + 0xd3, 0x53, 0xf3, 0xc0, 0x2e, 0xd3, 0x87, 0x48, 0x25, 0x16, 0x64, 0x16, 0x83, 0x3c, 0x64, 0x9d, + 0x58, 0x90, 0xb9, 0x88, 0x89, 0xc5, 0xdd, 0x31, 0xc0, 0x33, 0x89, 0x0d, 0xac, 0xc0, 0x18, 0x10, + 0x00, 0x00, 0xff, 0xff, 0xa0, 0x95, 0x20, 0xe5, 0x46, 0x01, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/api/expr/v1alpha1/cel_service.pb.go b/vendor/google.golang.org/genproto/googleapis/api/expr/v1alpha1/cel_service.pb.go new file mode 100644 index 0000000..c8cdd64 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/api/expr/v1alpha1/cel_service.pb.go @@ -0,0 +1,195 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/api/expr/v1alpha1/cel_service.proto + +package expr // import "google.golang.org/genproto/googleapis/api/expr/v1alpha1" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" + +import ( + context "golang.org/x/net/context" + grpc "google.golang.org/grpc" +) + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Reference imports to suppress errors if they are not otherwise used. +var _ context.Context +var _ grpc.ClientConn + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the grpc package it is being compiled against. +const _ = grpc.SupportPackageIsVersion4 + +// CelServiceClient is the client API for CelService service. +// +// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://godoc.org/google.golang.org/grpc#ClientConn.NewStream. +type CelServiceClient interface { + // Transforms CEL source text into a parsed representation. + Parse(ctx context.Context, in *ParseRequest, opts ...grpc.CallOption) (*ParseResponse, error) + // Runs static checks on a parsed CEL representation and return + // an annotated representation, or a set of issues. + Check(ctx context.Context, in *CheckRequest, opts ...grpc.CallOption) (*CheckResponse, error) + // Evaluates a parsed or annotation CEL representation given + // values of external bindings. + Eval(ctx context.Context, in *EvalRequest, opts ...grpc.CallOption) (*EvalResponse, error) +} + +type celServiceClient struct { + cc *grpc.ClientConn +} + +func NewCelServiceClient(cc *grpc.ClientConn) CelServiceClient { + return &celServiceClient{cc} +} + +func (c *celServiceClient) Parse(ctx context.Context, in *ParseRequest, opts ...grpc.CallOption) (*ParseResponse, error) { + out := new(ParseResponse) + err := c.cc.Invoke(ctx, "/google.api.expr.v1alpha1.CelService/Parse", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *celServiceClient) Check(ctx context.Context, in *CheckRequest, opts ...grpc.CallOption) (*CheckResponse, error) { + out := new(CheckResponse) + err := c.cc.Invoke(ctx, "/google.api.expr.v1alpha1.CelService/Check", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *celServiceClient) Eval(ctx context.Context, in *EvalRequest, opts ...grpc.CallOption) (*EvalResponse, error) { + out := new(EvalResponse) + err := c.cc.Invoke(ctx, "/google.api.expr.v1alpha1.CelService/Eval", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +// CelServiceServer is the server API for CelService service. +type CelServiceServer interface { + // Transforms CEL source text into a parsed representation. + Parse(context.Context, *ParseRequest) (*ParseResponse, error) + // Runs static checks on a parsed CEL representation and return + // an annotated representation, or a set of issues. + Check(context.Context, *CheckRequest) (*CheckResponse, error) + // Evaluates a parsed or annotation CEL representation given + // values of external bindings. + Eval(context.Context, *EvalRequest) (*EvalResponse, error) +} + +func RegisterCelServiceServer(s *grpc.Server, srv CelServiceServer) { + s.RegisterService(&_CelService_serviceDesc, srv) +} + +func _CelService_Parse_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(ParseRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(CelServiceServer).Parse(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.api.expr.v1alpha1.CelService/Parse", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(CelServiceServer).Parse(ctx, req.(*ParseRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _CelService_Check_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(CheckRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(CelServiceServer).Check(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.api.expr.v1alpha1.CelService/Check", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(CelServiceServer).Check(ctx, req.(*CheckRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _CelService_Eval_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(EvalRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(CelServiceServer).Eval(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.api.expr.v1alpha1.CelService/Eval", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(CelServiceServer).Eval(ctx, req.(*EvalRequest)) + } + return interceptor(ctx, in, info, handler) +} + +var _CelService_serviceDesc = grpc.ServiceDesc{ + ServiceName: "google.api.expr.v1alpha1.CelService", + HandlerType: (*CelServiceServer)(nil), + Methods: []grpc.MethodDesc{ + { + MethodName: "Parse", + Handler: _CelService_Parse_Handler, + }, + { + MethodName: "Check", + Handler: _CelService_Check_Handler, + }, + { + MethodName: "Eval", + Handler: _CelService_Eval_Handler, + }, + }, + Streams: []grpc.StreamDesc{}, + Metadata: "google/api/expr/v1alpha1/cel_service.proto", +} + +func init() { + proto.RegisterFile("google/api/expr/v1alpha1/cel_service.proto", fileDescriptor_cel_service_35d601a3638fea4b) +} + +var fileDescriptor_cel_service_35d601a3638fea4b = []byte{ + // 240 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x8c, 0xd1, 0x31, 0x4b, 0xc4, 0x30, + 0x14, 0xc0, 0x71, 0x2b, 0xea, 0x90, 0x45, 0xc8, 0x24, 0x87, 0x93, 0xe0, 0x09, 0x0e, 0x09, 0x77, + 0x8e, 0x3a, 0xdd, 0xe1, 0x5e, 0x74, 0x10, 0x6e, 0x91, 0x67, 0x78, 0xe6, 0x82, 0x69, 0x5e, 0x4c, + 0x6a, 0xf1, 0xcb, 0xf8, 0x3d, 0x1d, 0x25, 0x69, 0xab, 0x88, 0xc4, 0xde, 0xd8, 0xbe, 0x5f, 0xfe, + 0x81, 0x17, 0x76, 0xa9, 0x89, 0xb4, 0x45, 0x09, 0xde, 0x48, 0x7c, 0xf7, 0x41, 0x76, 0x0b, 0xb0, + 0x7e, 0x0b, 0x0b, 0xa9, 0xd0, 0x3e, 0x46, 0x0c, 0x9d, 0x51, 0x28, 0x7c, 0xa0, 0x96, 0xf8, 0x49, + 0x6f, 0x05, 0x78, 0x23, 0x92, 0x15, 0xa3, 0x9d, 0x2d, 0xcb, 0x15, 0x72, 0xcf, 0x14, 0x1a, 0x70, + 0x0a, 0x7f, 0xd7, 0x96, 0x1f, 0xfb, 0x8c, 0xad, 0xd1, 0xde, 0xf7, 0x3f, 0xf9, 0x86, 0x1d, 0xd6, + 0x10, 0x22, 0xf2, 0xb9, 0x28, 0x5d, 0x23, 0x32, 0xb8, 0xc3, 0xd7, 0x37, 0x8c, 0xed, 0xec, 0x62, + 0xd2, 0x45, 0x4f, 0x2e, 0xe2, 0xd9, 0x5e, 0x6a, 0xaf, 0xb7, 0xa8, 0x5e, 0xfe, 0x6b, 0x67, 0xb0, + 0x43, 0x7b, 0x70, 0xdf, 0xed, 0x07, 0x76, 0x70, 0xdb, 0x81, 0xe5, 0xe7, 0xe5, 0x23, 0x69, 0x3e, + 0x96, 0xe7, 0x53, 0x6c, 0x0c, 0xaf, 0x02, 0x3b, 0x55, 0xd4, 0x14, 0xf9, 0xea, 0xf8, 0x67, 0x79, + 0x75, 0x5a, 0x68, 0x5d, 0x6d, 0x6e, 0x06, 0xac, 0xc9, 0x82, 0xd3, 0x82, 0x82, 0x96, 0x1a, 0x5d, + 0x5e, 0xb7, 0xec, 0x47, 0xe0, 0x4d, 0xfc, 0xfb, 0x4a, 0xd7, 0xe9, 0xeb, 0xb3, 0xaa, 0x9e, 0x8e, + 0xb2, 0xbd, 0xfa, 0x0a, 0x00, 0x00, 0xff, 0xff, 0x3e, 0x97, 0x50, 0xb8, 0x16, 0x02, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/api/expr/v1alpha1/checked.pb.go b/vendor/google.golang.org/genproto/googleapis/api/expr/v1alpha1/checked.pb.go new file mode 100644 index 0000000..b76f7db --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/api/expr/v1alpha1/checked.pb.go @@ -0,0 +1,1428 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/api/expr/v1alpha1/checked.proto + +package expr // import "google.golang.org/genproto/googleapis/api/expr/v1alpha1" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import empty "github.com/golang/protobuf/ptypes/empty" +import _struct "github.com/golang/protobuf/ptypes/struct" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// CEL primitive types. +type Type_PrimitiveType int32 + +const ( + // Unspecified type. + Type_PRIMITIVE_TYPE_UNSPECIFIED Type_PrimitiveType = 0 + // Boolean type. + Type_BOOL Type_PrimitiveType = 1 + // Int64 type. + // + // Proto-based integer values are widened to int64. + Type_INT64 Type_PrimitiveType = 2 + // Uint64 type. + // + // Proto-based unsigned integer values are widened to uint64. + Type_UINT64 Type_PrimitiveType = 3 + // Double type. + // + // Proto-based float values are widened to double values. + Type_DOUBLE Type_PrimitiveType = 4 + // String type. + Type_STRING Type_PrimitiveType = 5 + // Bytes type. + Type_BYTES Type_PrimitiveType = 6 +) + +var Type_PrimitiveType_name = map[int32]string{ + 0: "PRIMITIVE_TYPE_UNSPECIFIED", + 1: "BOOL", + 2: "INT64", + 3: "UINT64", + 4: "DOUBLE", + 5: "STRING", + 6: "BYTES", +} +var Type_PrimitiveType_value = map[string]int32{ + "PRIMITIVE_TYPE_UNSPECIFIED": 0, + "BOOL": 1, + "INT64": 2, + "UINT64": 3, + "DOUBLE": 4, + "STRING": 5, + "BYTES": 6, +} + +func (x Type_PrimitiveType) String() string { + return proto.EnumName(Type_PrimitiveType_name, int32(x)) +} +func (Type_PrimitiveType) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_checked_c2e2f3dfa4cfeebf, []int{1, 0} +} + +// Well-known protobuf types treated with first-class support in CEL. +type Type_WellKnownType int32 + +const ( + // Unspecified type. + Type_WELL_KNOWN_TYPE_UNSPECIFIED Type_WellKnownType = 0 + // Well-known protobuf.Any type. + // + // Any types are a polymorphic message type. During type-checking they are + // treated like `DYN` types, but at runtime they are resolved to a specific + // message type specified at evaluation time. + Type_ANY Type_WellKnownType = 1 + // Well-known protobuf.Timestamp type, internally referenced as `timestamp`. + Type_TIMESTAMP Type_WellKnownType = 2 + // Well-known protobuf.Duration type, internally referenced as `duration`. + Type_DURATION Type_WellKnownType = 3 +) + +var Type_WellKnownType_name = map[int32]string{ + 0: "WELL_KNOWN_TYPE_UNSPECIFIED", + 1: "ANY", + 2: "TIMESTAMP", + 3: "DURATION", +} +var Type_WellKnownType_value = map[string]int32{ + "WELL_KNOWN_TYPE_UNSPECIFIED": 0, + "ANY": 1, + "TIMESTAMP": 2, + "DURATION": 3, +} + +func (x Type_WellKnownType) String() string { + return proto.EnumName(Type_WellKnownType_name, int32(x)) +} +func (Type_WellKnownType) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_checked_c2e2f3dfa4cfeebf, []int{1, 1} +} + +// A CEL expression which has been successfully type checked. +type CheckedExpr struct { + // A map from expression ids to resolved references. + // + // The following entries are in this table: + // + // - An Ident or Select expression is represented here if it resolves to a + // declaration. For instance, if `a.b.c` is represented by + // `select(select(id(a), b), c)`, and `a.b` resolves to a declaration, + // while `c` is a field selection, then the reference is attached to the + // nested select expression (but not to the id or or the outer select). + // In turn, if `a` resolves to a declaration and `b.c` are field selections, + // the reference is attached to the ident expression. + // - Every Call expression has an entry here, identifying the function being + // called. + // - Every CreateStruct expression for a message has an entry, identifying + // the message. + ReferenceMap map[int64]*Reference `protobuf:"bytes,2,rep,name=reference_map,json=referenceMap,proto3" json:"reference_map,omitempty" protobuf_key:"varint,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` + // A map from expression ids to types. + // + // Every expression node which has a type different than DYN has a mapping + // here. If an expression has type DYN, it is omitted from this map to save + // space. + TypeMap map[int64]*Type `protobuf:"bytes,3,rep,name=type_map,json=typeMap,proto3" json:"type_map,omitempty" protobuf_key:"varint,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` + // The source info derived from input that generated the parsed `expr` and + // any optimizations made during the type-checking pass. + SourceInfo *SourceInfo `protobuf:"bytes,5,opt,name=source_info,json=sourceInfo,proto3" json:"source_info,omitempty"` + // The checked expression. Semantically equivalent to the parsed `expr`, but + // may have structural differences. + Expr *Expr `protobuf:"bytes,4,opt,name=expr,proto3" json:"expr,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *CheckedExpr) Reset() { *m = CheckedExpr{} } +func (m *CheckedExpr) String() string { return proto.CompactTextString(m) } +func (*CheckedExpr) ProtoMessage() {} +func (*CheckedExpr) Descriptor() ([]byte, []int) { + return fileDescriptor_checked_c2e2f3dfa4cfeebf, []int{0} +} +func (m *CheckedExpr) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_CheckedExpr.Unmarshal(m, b) +} +func (m *CheckedExpr) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_CheckedExpr.Marshal(b, m, deterministic) +} +func (dst *CheckedExpr) XXX_Merge(src proto.Message) { + xxx_messageInfo_CheckedExpr.Merge(dst, src) +} +func (m *CheckedExpr) XXX_Size() int { + return xxx_messageInfo_CheckedExpr.Size(m) +} +func (m *CheckedExpr) XXX_DiscardUnknown() { + xxx_messageInfo_CheckedExpr.DiscardUnknown(m) +} + +var xxx_messageInfo_CheckedExpr proto.InternalMessageInfo + +func (m *CheckedExpr) GetReferenceMap() map[int64]*Reference { + if m != nil { + return m.ReferenceMap + } + return nil +} + +func (m *CheckedExpr) GetTypeMap() map[int64]*Type { + if m != nil { + return m.TypeMap + } + return nil +} + +func (m *CheckedExpr) GetSourceInfo() *SourceInfo { + if m != nil { + return m.SourceInfo + } + return nil +} + +func (m *CheckedExpr) GetExpr() *Expr { + if m != nil { + return m.Expr + } + return nil +} + +// Represents a CEL type. +type Type struct { + // The kind of type. + // + // Types that are valid to be assigned to TypeKind: + // *Type_Dyn + // *Type_Null + // *Type_Primitive + // *Type_Wrapper + // *Type_WellKnown + // *Type_ListType_ + // *Type_MapType_ + // *Type_Function + // *Type_MessageType + // *Type_TypeParam + // *Type_Type + // *Type_Error + // *Type_AbstractType_ + TypeKind isType_TypeKind `protobuf_oneof:"type_kind"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Type) Reset() { *m = Type{} } +func (m *Type) String() string { return proto.CompactTextString(m) } +func (*Type) ProtoMessage() {} +func (*Type) Descriptor() ([]byte, []int) { + return fileDescriptor_checked_c2e2f3dfa4cfeebf, []int{1} +} +func (m *Type) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Type.Unmarshal(m, b) +} +func (m *Type) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Type.Marshal(b, m, deterministic) +} +func (dst *Type) XXX_Merge(src proto.Message) { + xxx_messageInfo_Type.Merge(dst, src) +} +func (m *Type) XXX_Size() int { + return xxx_messageInfo_Type.Size(m) +} +func (m *Type) XXX_DiscardUnknown() { + xxx_messageInfo_Type.DiscardUnknown(m) +} + +var xxx_messageInfo_Type proto.InternalMessageInfo + +type isType_TypeKind interface { + isType_TypeKind() +} + +type Type_Dyn struct { + Dyn *empty.Empty `protobuf:"bytes,1,opt,name=dyn,proto3,oneof"` +} + +type Type_Null struct { + Null _struct.NullValue `protobuf:"varint,2,opt,name=null,proto3,enum=google.protobuf.NullValue,oneof"` +} + +type Type_Primitive struct { + Primitive Type_PrimitiveType `protobuf:"varint,3,opt,name=primitive,proto3,enum=google.api.expr.v1alpha1.Type_PrimitiveType,oneof"` +} + +type Type_Wrapper struct { + Wrapper Type_PrimitiveType `protobuf:"varint,4,opt,name=wrapper,proto3,enum=google.api.expr.v1alpha1.Type_PrimitiveType,oneof"` +} + +type Type_WellKnown struct { + WellKnown Type_WellKnownType `protobuf:"varint,5,opt,name=well_known,json=wellKnown,proto3,enum=google.api.expr.v1alpha1.Type_WellKnownType,oneof"` +} + +type Type_ListType_ struct { + ListType *Type_ListType `protobuf:"bytes,6,opt,name=list_type,json=listType,proto3,oneof"` +} + +type Type_MapType_ struct { + MapType *Type_MapType `protobuf:"bytes,7,opt,name=map_type,json=mapType,proto3,oneof"` +} + +type Type_Function struct { + Function *Type_FunctionType `protobuf:"bytes,8,opt,name=function,proto3,oneof"` +} + +type Type_MessageType struct { + MessageType string `protobuf:"bytes,9,opt,name=message_type,json=messageType,proto3,oneof"` +} + +type Type_TypeParam struct { + TypeParam string `protobuf:"bytes,10,opt,name=type_param,json=typeParam,proto3,oneof"` +} + +type Type_Type struct { + Type *Type `protobuf:"bytes,11,opt,name=type,proto3,oneof"` +} + +type Type_Error struct { + Error *empty.Empty `protobuf:"bytes,12,opt,name=error,proto3,oneof"` +} + +type Type_AbstractType_ struct { + AbstractType *Type_AbstractType `protobuf:"bytes,14,opt,name=abstract_type,json=abstractType,proto3,oneof"` +} + +func (*Type_Dyn) isType_TypeKind() {} + +func (*Type_Null) isType_TypeKind() {} + +func (*Type_Primitive) isType_TypeKind() {} + +func (*Type_Wrapper) isType_TypeKind() {} + +func (*Type_WellKnown) isType_TypeKind() {} + +func (*Type_ListType_) isType_TypeKind() {} + +func (*Type_MapType_) isType_TypeKind() {} + +func (*Type_Function) isType_TypeKind() {} + +func (*Type_MessageType) isType_TypeKind() {} + +func (*Type_TypeParam) isType_TypeKind() {} + +func (*Type_Type) isType_TypeKind() {} + +func (*Type_Error) isType_TypeKind() {} + +func (*Type_AbstractType_) isType_TypeKind() {} + +func (m *Type) GetTypeKind() isType_TypeKind { + if m != nil { + return m.TypeKind + } + return nil +} + +func (m *Type) GetDyn() *empty.Empty { + if x, ok := m.GetTypeKind().(*Type_Dyn); ok { + return x.Dyn + } + return nil +} + +func (m *Type) GetNull() _struct.NullValue { + if x, ok := m.GetTypeKind().(*Type_Null); ok { + return x.Null + } + return _struct.NullValue_NULL_VALUE +} + +func (m *Type) GetPrimitive() Type_PrimitiveType { + if x, ok := m.GetTypeKind().(*Type_Primitive); ok { + return x.Primitive + } + return Type_PRIMITIVE_TYPE_UNSPECIFIED +} + +func (m *Type) GetWrapper() Type_PrimitiveType { + if x, ok := m.GetTypeKind().(*Type_Wrapper); ok { + return x.Wrapper + } + return Type_PRIMITIVE_TYPE_UNSPECIFIED +} + +func (m *Type) GetWellKnown() Type_WellKnownType { + if x, ok := m.GetTypeKind().(*Type_WellKnown); ok { + return x.WellKnown + } + return Type_WELL_KNOWN_TYPE_UNSPECIFIED +} + +func (m *Type) GetListType() *Type_ListType { + if x, ok := m.GetTypeKind().(*Type_ListType_); ok { + return x.ListType + } + return nil +} + +func (m *Type) GetMapType() *Type_MapType { + if x, ok := m.GetTypeKind().(*Type_MapType_); ok { + return x.MapType + } + return nil +} + +func (m *Type) GetFunction() *Type_FunctionType { + if x, ok := m.GetTypeKind().(*Type_Function); ok { + return x.Function + } + return nil +} + +func (m *Type) GetMessageType() string { + if x, ok := m.GetTypeKind().(*Type_MessageType); ok { + return x.MessageType + } + return "" +} + +func (m *Type) GetTypeParam() string { + if x, ok := m.GetTypeKind().(*Type_TypeParam); ok { + return x.TypeParam + } + return "" +} + +func (m *Type) GetType() *Type { + if x, ok := m.GetTypeKind().(*Type_Type); ok { + return x.Type + } + return nil +} + +func (m *Type) GetError() *empty.Empty { + if x, ok := m.GetTypeKind().(*Type_Error); ok { + return x.Error + } + return nil +} + +func (m *Type) GetAbstractType() *Type_AbstractType { + if x, ok := m.GetTypeKind().(*Type_AbstractType_); ok { + return x.AbstractType + } + return nil +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*Type) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _Type_OneofMarshaler, _Type_OneofUnmarshaler, _Type_OneofSizer, []interface{}{ + (*Type_Dyn)(nil), + (*Type_Null)(nil), + (*Type_Primitive)(nil), + (*Type_Wrapper)(nil), + (*Type_WellKnown)(nil), + (*Type_ListType_)(nil), + (*Type_MapType_)(nil), + (*Type_Function)(nil), + (*Type_MessageType)(nil), + (*Type_TypeParam)(nil), + (*Type_Type)(nil), + (*Type_Error)(nil), + (*Type_AbstractType_)(nil), + } +} + +func _Type_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*Type) + // type_kind + switch x := m.TypeKind.(type) { + case *Type_Dyn: + b.EncodeVarint(1<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.Dyn); err != nil { + return err + } + case *Type_Null: + b.EncodeVarint(2<<3 | proto.WireVarint) + b.EncodeVarint(uint64(x.Null)) + case *Type_Primitive: + b.EncodeVarint(3<<3 | proto.WireVarint) + b.EncodeVarint(uint64(x.Primitive)) + case *Type_Wrapper: + b.EncodeVarint(4<<3 | proto.WireVarint) + b.EncodeVarint(uint64(x.Wrapper)) + case *Type_WellKnown: + b.EncodeVarint(5<<3 | proto.WireVarint) + b.EncodeVarint(uint64(x.WellKnown)) + case *Type_ListType_: + b.EncodeVarint(6<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.ListType); err != nil { + return err + } + case *Type_MapType_: + b.EncodeVarint(7<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.MapType); err != nil { + return err + } + case *Type_Function: + b.EncodeVarint(8<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.Function); err != nil { + return err + } + case *Type_MessageType: + b.EncodeVarint(9<<3 | proto.WireBytes) + b.EncodeStringBytes(x.MessageType) + case *Type_TypeParam: + b.EncodeVarint(10<<3 | proto.WireBytes) + b.EncodeStringBytes(x.TypeParam) + case *Type_Type: + b.EncodeVarint(11<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.Type); err != nil { + return err + } + case *Type_Error: + b.EncodeVarint(12<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.Error); err != nil { + return err + } + case *Type_AbstractType_: + b.EncodeVarint(14<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.AbstractType); err != nil { + return err + } + case nil: + default: + return fmt.Errorf("Type.TypeKind has unexpected type %T", x) + } + return nil +} + +func _Type_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*Type) + switch tag { + case 1: // type_kind.dyn + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(empty.Empty) + err := b.DecodeMessage(msg) + m.TypeKind = &Type_Dyn{msg} + return true, err + case 2: // type_kind.null + if wire != proto.WireVarint { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeVarint() + m.TypeKind = &Type_Null{_struct.NullValue(x)} + return true, err + case 3: // type_kind.primitive + if wire != proto.WireVarint { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeVarint() + m.TypeKind = &Type_Primitive{Type_PrimitiveType(x)} + return true, err + case 4: // type_kind.wrapper + if wire != proto.WireVarint { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeVarint() + m.TypeKind = &Type_Wrapper{Type_PrimitiveType(x)} + return true, err + case 5: // type_kind.well_known + if wire != proto.WireVarint { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeVarint() + m.TypeKind = &Type_WellKnown{Type_WellKnownType(x)} + return true, err + case 6: // type_kind.list_type + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(Type_ListType) + err := b.DecodeMessage(msg) + m.TypeKind = &Type_ListType_{msg} + return true, err + case 7: // type_kind.map_type + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(Type_MapType) + err := b.DecodeMessage(msg) + m.TypeKind = &Type_MapType_{msg} + return true, err + case 8: // type_kind.function + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(Type_FunctionType) + err := b.DecodeMessage(msg) + m.TypeKind = &Type_Function{msg} + return true, err + case 9: // type_kind.message_type + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeStringBytes() + m.TypeKind = &Type_MessageType{x} + return true, err + case 10: // type_kind.type_param + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeStringBytes() + m.TypeKind = &Type_TypeParam{x} + return true, err + case 11: // type_kind.type + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(Type) + err := b.DecodeMessage(msg) + m.TypeKind = &Type_Type{msg} + return true, err + case 12: // type_kind.error + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(empty.Empty) + err := b.DecodeMessage(msg) + m.TypeKind = &Type_Error{msg} + return true, err + case 14: // type_kind.abstract_type + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(Type_AbstractType) + err := b.DecodeMessage(msg) + m.TypeKind = &Type_AbstractType_{msg} + return true, err + default: + return false, nil + } +} + +func _Type_OneofSizer(msg proto.Message) (n int) { + m := msg.(*Type) + // type_kind + switch x := m.TypeKind.(type) { + case *Type_Dyn: + s := proto.Size(x.Dyn) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *Type_Null: + n += 1 // tag and wire + n += proto.SizeVarint(uint64(x.Null)) + case *Type_Primitive: + n += 1 // tag and wire + n += proto.SizeVarint(uint64(x.Primitive)) + case *Type_Wrapper: + n += 1 // tag and wire + n += proto.SizeVarint(uint64(x.Wrapper)) + case *Type_WellKnown: + n += 1 // tag and wire + n += proto.SizeVarint(uint64(x.WellKnown)) + case *Type_ListType_: + s := proto.Size(x.ListType) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *Type_MapType_: + s := proto.Size(x.MapType) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *Type_Function: + s := proto.Size(x.Function) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *Type_MessageType: + n += 1 // tag and wire + n += proto.SizeVarint(uint64(len(x.MessageType))) + n += len(x.MessageType) + case *Type_TypeParam: + n += 1 // tag and wire + n += proto.SizeVarint(uint64(len(x.TypeParam))) + n += len(x.TypeParam) + case *Type_Type: + s := proto.Size(x.Type) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *Type_Error: + s := proto.Size(x.Error) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *Type_AbstractType_: + s := proto.Size(x.AbstractType) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +// List type with typed elements, e.g. `list`. +type Type_ListType struct { + // The element type. + ElemType *Type `protobuf:"bytes,1,opt,name=elem_type,json=elemType,proto3" json:"elem_type,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Type_ListType) Reset() { *m = Type_ListType{} } +func (m *Type_ListType) String() string { return proto.CompactTextString(m) } +func (*Type_ListType) ProtoMessage() {} +func (*Type_ListType) Descriptor() ([]byte, []int) { + return fileDescriptor_checked_c2e2f3dfa4cfeebf, []int{1, 0} +} +func (m *Type_ListType) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Type_ListType.Unmarshal(m, b) +} +func (m *Type_ListType) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Type_ListType.Marshal(b, m, deterministic) +} +func (dst *Type_ListType) XXX_Merge(src proto.Message) { + xxx_messageInfo_Type_ListType.Merge(dst, src) +} +func (m *Type_ListType) XXX_Size() int { + return xxx_messageInfo_Type_ListType.Size(m) +} +func (m *Type_ListType) XXX_DiscardUnknown() { + xxx_messageInfo_Type_ListType.DiscardUnknown(m) +} + +var xxx_messageInfo_Type_ListType proto.InternalMessageInfo + +func (m *Type_ListType) GetElemType() *Type { + if m != nil { + return m.ElemType + } + return nil +} + +// Map type with parameterized key and value types, e.g. `map`. +type Type_MapType struct { + // The type of the key. + KeyType *Type `protobuf:"bytes,1,opt,name=key_type,json=keyType,proto3" json:"key_type,omitempty"` + // The type of the value. + ValueType *Type `protobuf:"bytes,2,opt,name=value_type,json=valueType,proto3" json:"value_type,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Type_MapType) Reset() { *m = Type_MapType{} } +func (m *Type_MapType) String() string { return proto.CompactTextString(m) } +func (*Type_MapType) ProtoMessage() {} +func (*Type_MapType) Descriptor() ([]byte, []int) { + return fileDescriptor_checked_c2e2f3dfa4cfeebf, []int{1, 1} +} +func (m *Type_MapType) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Type_MapType.Unmarshal(m, b) +} +func (m *Type_MapType) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Type_MapType.Marshal(b, m, deterministic) +} +func (dst *Type_MapType) XXX_Merge(src proto.Message) { + xxx_messageInfo_Type_MapType.Merge(dst, src) +} +func (m *Type_MapType) XXX_Size() int { + return xxx_messageInfo_Type_MapType.Size(m) +} +func (m *Type_MapType) XXX_DiscardUnknown() { + xxx_messageInfo_Type_MapType.DiscardUnknown(m) +} + +var xxx_messageInfo_Type_MapType proto.InternalMessageInfo + +func (m *Type_MapType) GetKeyType() *Type { + if m != nil { + return m.KeyType + } + return nil +} + +func (m *Type_MapType) GetValueType() *Type { + if m != nil { + return m.ValueType + } + return nil +} + +// Function type with result and arg types. +type Type_FunctionType struct { + // Result type of the function. + ResultType *Type `protobuf:"bytes,1,opt,name=result_type,json=resultType,proto3" json:"result_type,omitempty"` + // Argument types of the function. + ArgTypes []*Type `protobuf:"bytes,2,rep,name=arg_types,json=argTypes,proto3" json:"arg_types,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Type_FunctionType) Reset() { *m = Type_FunctionType{} } +func (m *Type_FunctionType) String() string { return proto.CompactTextString(m) } +func (*Type_FunctionType) ProtoMessage() {} +func (*Type_FunctionType) Descriptor() ([]byte, []int) { + return fileDescriptor_checked_c2e2f3dfa4cfeebf, []int{1, 2} +} +func (m *Type_FunctionType) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Type_FunctionType.Unmarshal(m, b) +} +func (m *Type_FunctionType) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Type_FunctionType.Marshal(b, m, deterministic) +} +func (dst *Type_FunctionType) XXX_Merge(src proto.Message) { + xxx_messageInfo_Type_FunctionType.Merge(dst, src) +} +func (m *Type_FunctionType) XXX_Size() int { + return xxx_messageInfo_Type_FunctionType.Size(m) +} +func (m *Type_FunctionType) XXX_DiscardUnknown() { + xxx_messageInfo_Type_FunctionType.DiscardUnknown(m) +} + +var xxx_messageInfo_Type_FunctionType proto.InternalMessageInfo + +func (m *Type_FunctionType) GetResultType() *Type { + if m != nil { + return m.ResultType + } + return nil +} + +func (m *Type_FunctionType) GetArgTypes() []*Type { + if m != nil { + return m.ArgTypes + } + return nil +} + +// Application defined abstract type. +type Type_AbstractType struct { + // The fully qualified name of this abstract type. + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // Parameter types for this abstract type. + ParameterTypes []*Type `protobuf:"bytes,2,rep,name=parameter_types,json=parameterTypes,proto3" json:"parameter_types,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Type_AbstractType) Reset() { *m = Type_AbstractType{} } +func (m *Type_AbstractType) String() string { return proto.CompactTextString(m) } +func (*Type_AbstractType) ProtoMessage() {} +func (*Type_AbstractType) Descriptor() ([]byte, []int) { + return fileDescriptor_checked_c2e2f3dfa4cfeebf, []int{1, 3} +} +func (m *Type_AbstractType) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Type_AbstractType.Unmarshal(m, b) +} +func (m *Type_AbstractType) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Type_AbstractType.Marshal(b, m, deterministic) +} +func (dst *Type_AbstractType) XXX_Merge(src proto.Message) { + xxx_messageInfo_Type_AbstractType.Merge(dst, src) +} +func (m *Type_AbstractType) XXX_Size() int { + return xxx_messageInfo_Type_AbstractType.Size(m) +} +func (m *Type_AbstractType) XXX_DiscardUnknown() { + xxx_messageInfo_Type_AbstractType.DiscardUnknown(m) +} + +var xxx_messageInfo_Type_AbstractType proto.InternalMessageInfo + +func (m *Type_AbstractType) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *Type_AbstractType) GetParameterTypes() []*Type { + if m != nil { + return m.ParameterTypes + } + return nil +} + +// Represents a declaration of a named value or function. +// +// A declaration is part of the contract between the expression, the agent +// evaluating that expression, and the caller requesting evaluation. +type Decl struct { + // The fully qualified name of the declaration. + // + // Declarations are organized in containers and this represents the full path + // to the declaration in its container, as in `google.api.expr.Decl`. + // + // Declarations used as + // [FunctionDecl.Overload][google.api.expr.v1alpha1.Decl.FunctionDecl.Overload] + // parameters may or may not have a name depending on whether the overload is + // function declaration or a function definition containing a result + // [Expr][google.api.expr.v1alpha1.Expr]. + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // Required. The declaration kind. + // + // Types that are valid to be assigned to DeclKind: + // *Decl_Ident + // *Decl_Function + DeclKind isDecl_DeclKind `protobuf_oneof:"decl_kind"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Decl) Reset() { *m = Decl{} } +func (m *Decl) String() string { return proto.CompactTextString(m) } +func (*Decl) ProtoMessage() {} +func (*Decl) Descriptor() ([]byte, []int) { + return fileDescriptor_checked_c2e2f3dfa4cfeebf, []int{2} +} +func (m *Decl) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Decl.Unmarshal(m, b) +} +func (m *Decl) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Decl.Marshal(b, m, deterministic) +} +func (dst *Decl) XXX_Merge(src proto.Message) { + xxx_messageInfo_Decl.Merge(dst, src) +} +func (m *Decl) XXX_Size() int { + return xxx_messageInfo_Decl.Size(m) +} +func (m *Decl) XXX_DiscardUnknown() { + xxx_messageInfo_Decl.DiscardUnknown(m) +} + +var xxx_messageInfo_Decl proto.InternalMessageInfo + +func (m *Decl) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +type isDecl_DeclKind interface { + isDecl_DeclKind() +} + +type Decl_Ident struct { + Ident *Decl_IdentDecl `protobuf:"bytes,2,opt,name=ident,proto3,oneof"` +} + +type Decl_Function struct { + Function *Decl_FunctionDecl `protobuf:"bytes,3,opt,name=function,proto3,oneof"` +} + +func (*Decl_Ident) isDecl_DeclKind() {} + +func (*Decl_Function) isDecl_DeclKind() {} + +func (m *Decl) GetDeclKind() isDecl_DeclKind { + if m != nil { + return m.DeclKind + } + return nil +} + +func (m *Decl) GetIdent() *Decl_IdentDecl { + if x, ok := m.GetDeclKind().(*Decl_Ident); ok { + return x.Ident + } + return nil +} + +func (m *Decl) GetFunction() *Decl_FunctionDecl { + if x, ok := m.GetDeclKind().(*Decl_Function); ok { + return x.Function + } + return nil +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*Decl) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _Decl_OneofMarshaler, _Decl_OneofUnmarshaler, _Decl_OneofSizer, []interface{}{ + (*Decl_Ident)(nil), + (*Decl_Function)(nil), + } +} + +func _Decl_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*Decl) + // decl_kind + switch x := m.DeclKind.(type) { + case *Decl_Ident: + b.EncodeVarint(2<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.Ident); err != nil { + return err + } + case *Decl_Function: + b.EncodeVarint(3<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.Function); err != nil { + return err + } + case nil: + default: + return fmt.Errorf("Decl.DeclKind has unexpected type %T", x) + } + return nil +} + +func _Decl_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*Decl) + switch tag { + case 2: // decl_kind.ident + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(Decl_IdentDecl) + err := b.DecodeMessage(msg) + m.DeclKind = &Decl_Ident{msg} + return true, err + case 3: // decl_kind.function + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(Decl_FunctionDecl) + err := b.DecodeMessage(msg) + m.DeclKind = &Decl_Function{msg} + return true, err + default: + return false, nil + } +} + +func _Decl_OneofSizer(msg proto.Message) (n int) { + m := msg.(*Decl) + // decl_kind + switch x := m.DeclKind.(type) { + case *Decl_Ident: + s := proto.Size(x.Ident) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *Decl_Function: + s := proto.Size(x.Function) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +// Identifier declaration which specifies its type and optional `Expr` value. +// +// An identifier without a value is a declaration that must be provided at +// evaluation time. An identifier with a value should resolve to a constant, +// but may be used in conjunction with other identifiers bound at evaluation +// time. +type Decl_IdentDecl struct { + // Required. The type of the identifier. + Type *Type `protobuf:"bytes,1,opt,name=type,proto3" json:"type,omitempty"` + // The constant value of the identifier. If not specified, the identifier + // must be supplied at evaluation time. + Value *Constant `protobuf:"bytes,2,opt,name=value,proto3" json:"value,omitempty"` + // Documentation string for the identifier. + Doc string `protobuf:"bytes,3,opt,name=doc,proto3" json:"doc,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Decl_IdentDecl) Reset() { *m = Decl_IdentDecl{} } +func (m *Decl_IdentDecl) String() string { return proto.CompactTextString(m) } +func (*Decl_IdentDecl) ProtoMessage() {} +func (*Decl_IdentDecl) Descriptor() ([]byte, []int) { + return fileDescriptor_checked_c2e2f3dfa4cfeebf, []int{2, 0} +} +func (m *Decl_IdentDecl) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Decl_IdentDecl.Unmarshal(m, b) +} +func (m *Decl_IdentDecl) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Decl_IdentDecl.Marshal(b, m, deterministic) +} +func (dst *Decl_IdentDecl) XXX_Merge(src proto.Message) { + xxx_messageInfo_Decl_IdentDecl.Merge(dst, src) +} +func (m *Decl_IdentDecl) XXX_Size() int { + return xxx_messageInfo_Decl_IdentDecl.Size(m) +} +func (m *Decl_IdentDecl) XXX_DiscardUnknown() { + xxx_messageInfo_Decl_IdentDecl.DiscardUnknown(m) +} + +var xxx_messageInfo_Decl_IdentDecl proto.InternalMessageInfo + +func (m *Decl_IdentDecl) GetType() *Type { + if m != nil { + return m.Type + } + return nil +} + +func (m *Decl_IdentDecl) GetValue() *Constant { + if m != nil { + return m.Value + } + return nil +} + +func (m *Decl_IdentDecl) GetDoc() string { + if m != nil { + return m.Doc + } + return "" +} + +// Function declaration specifies one or more overloads which indicate the +// function's parameter types and return type, and may optionally specify a +// function definition in terms of CEL expressions. +// +// Functions have no observable side-effects (there may be side-effects like +// logging which are not observable from CEL). +type Decl_FunctionDecl struct { + // Required. List of function overloads, must contain at least one overload. + Overloads []*Decl_FunctionDecl_Overload `protobuf:"bytes,1,rep,name=overloads,proto3" json:"overloads,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Decl_FunctionDecl) Reset() { *m = Decl_FunctionDecl{} } +func (m *Decl_FunctionDecl) String() string { return proto.CompactTextString(m) } +func (*Decl_FunctionDecl) ProtoMessage() {} +func (*Decl_FunctionDecl) Descriptor() ([]byte, []int) { + return fileDescriptor_checked_c2e2f3dfa4cfeebf, []int{2, 1} +} +func (m *Decl_FunctionDecl) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Decl_FunctionDecl.Unmarshal(m, b) +} +func (m *Decl_FunctionDecl) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Decl_FunctionDecl.Marshal(b, m, deterministic) +} +func (dst *Decl_FunctionDecl) XXX_Merge(src proto.Message) { + xxx_messageInfo_Decl_FunctionDecl.Merge(dst, src) +} +func (m *Decl_FunctionDecl) XXX_Size() int { + return xxx_messageInfo_Decl_FunctionDecl.Size(m) +} +func (m *Decl_FunctionDecl) XXX_DiscardUnknown() { + xxx_messageInfo_Decl_FunctionDecl.DiscardUnknown(m) +} + +var xxx_messageInfo_Decl_FunctionDecl proto.InternalMessageInfo + +func (m *Decl_FunctionDecl) GetOverloads() []*Decl_FunctionDecl_Overload { + if m != nil { + return m.Overloads + } + return nil +} + +// An overload indicates a function's parameter types and return type, and +// may optionally include a function body described in terms of +// [Expr][google.api.expr.v1alpha1.Expr] values. +// +// Functions overloads are declared in either a function or method +// call-style. For methods, the `params[0]` is the expected type of the +// target receiver. +// +// Overloads must have non-overlapping argument types after erasure of all +// parameterized type variables (similar as type erasure in Java). +type Decl_FunctionDecl_Overload struct { + // Required. Globally unique overload name of the function which reflects + // the function name and argument types. + // + // This will be used by a [Reference][google.api.expr.v1alpha1.Reference] + // to indicate the `overload_id` that was resolved for the function + // `name`. + OverloadId string `protobuf:"bytes,1,opt,name=overload_id,json=overloadId,proto3" json:"overload_id,omitempty"` + // List of function parameter [Type][google.api.expr.v1alpha1.Type] + // values. + // + // Param types are disjoint after generic type parameters have been + // replaced with the type `DYN`. Since the `DYN` type is compatible with + // any other type, this means that if `A` is a type parameter, the + // function types `int` and `int` are not disjoint. Likewise, + // `map` is not disjoint from `map`. + // + // When the `result_type` of a function is a generic type param, the + // type param name also appears as the `type` of on at least one params. + Params []*Type `protobuf:"bytes,2,rep,name=params,proto3" json:"params,omitempty"` + // The type param names associated with the function declaration. + // + // For example, `function ex(K key, map map) : V` would yield + // the type params of `K, V`. + TypeParams []string `protobuf:"bytes,3,rep,name=type_params,json=typeParams,proto3" json:"type_params,omitempty"` + // Required. The result type of the function. For example, the operator + // `string.isEmpty()` would have `result_type` of `kind: BOOL`. + ResultType *Type `protobuf:"bytes,4,opt,name=result_type,json=resultType,proto3" json:"result_type,omitempty"` + // Whether the function is to be used in a method call-style `x.f(...)` + // of a function call-style `f(x, ...)`. + // + // For methods, the first parameter declaration, `params[0]` is the + // expected type of the target receiver. + IsInstanceFunction bool `protobuf:"varint,5,opt,name=is_instance_function,json=isInstanceFunction,proto3" json:"is_instance_function,omitempty"` + // Documentation string for the overload. + Doc string `protobuf:"bytes,6,opt,name=doc,proto3" json:"doc,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Decl_FunctionDecl_Overload) Reset() { *m = Decl_FunctionDecl_Overload{} } +func (m *Decl_FunctionDecl_Overload) String() string { return proto.CompactTextString(m) } +func (*Decl_FunctionDecl_Overload) ProtoMessage() {} +func (*Decl_FunctionDecl_Overload) Descriptor() ([]byte, []int) { + return fileDescriptor_checked_c2e2f3dfa4cfeebf, []int{2, 1, 0} +} +func (m *Decl_FunctionDecl_Overload) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Decl_FunctionDecl_Overload.Unmarshal(m, b) +} +func (m *Decl_FunctionDecl_Overload) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Decl_FunctionDecl_Overload.Marshal(b, m, deterministic) +} +func (dst *Decl_FunctionDecl_Overload) XXX_Merge(src proto.Message) { + xxx_messageInfo_Decl_FunctionDecl_Overload.Merge(dst, src) +} +func (m *Decl_FunctionDecl_Overload) XXX_Size() int { + return xxx_messageInfo_Decl_FunctionDecl_Overload.Size(m) +} +func (m *Decl_FunctionDecl_Overload) XXX_DiscardUnknown() { + xxx_messageInfo_Decl_FunctionDecl_Overload.DiscardUnknown(m) +} + +var xxx_messageInfo_Decl_FunctionDecl_Overload proto.InternalMessageInfo + +func (m *Decl_FunctionDecl_Overload) GetOverloadId() string { + if m != nil { + return m.OverloadId + } + return "" +} + +func (m *Decl_FunctionDecl_Overload) GetParams() []*Type { + if m != nil { + return m.Params + } + return nil +} + +func (m *Decl_FunctionDecl_Overload) GetTypeParams() []string { + if m != nil { + return m.TypeParams + } + return nil +} + +func (m *Decl_FunctionDecl_Overload) GetResultType() *Type { + if m != nil { + return m.ResultType + } + return nil +} + +func (m *Decl_FunctionDecl_Overload) GetIsInstanceFunction() bool { + if m != nil { + return m.IsInstanceFunction + } + return false +} + +func (m *Decl_FunctionDecl_Overload) GetDoc() string { + if m != nil { + return m.Doc + } + return "" +} + +// Describes a resolved reference to a declaration. +type Reference struct { + // The fully qualified name of the declaration. + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // For references to functions, this is a list of `Overload.overload_id` + // values which match according to typing rules. + // + // If the list has more than one element, overload resolution among the + // presented candidates must happen at runtime because of dynamic types. The + // type checker attempts to narrow down this list as much as possible. + // + // Empty if this is not a reference to a + // [Decl.FunctionDecl][google.api.expr.v1alpha1.Decl.FunctionDecl]. + OverloadId []string `protobuf:"bytes,3,rep,name=overload_id,json=overloadId,proto3" json:"overload_id,omitempty"` + // For references to constants, this may contain the value of the + // constant if known at compile time. + Value *Constant `protobuf:"bytes,4,opt,name=value,proto3" json:"value,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Reference) Reset() { *m = Reference{} } +func (m *Reference) String() string { return proto.CompactTextString(m) } +func (*Reference) ProtoMessage() {} +func (*Reference) Descriptor() ([]byte, []int) { + return fileDescriptor_checked_c2e2f3dfa4cfeebf, []int{3} +} +func (m *Reference) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Reference.Unmarshal(m, b) +} +func (m *Reference) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Reference.Marshal(b, m, deterministic) +} +func (dst *Reference) XXX_Merge(src proto.Message) { + xxx_messageInfo_Reference.Merge(dst, src) +} +func (m *Reference) XXX_Size() int { + return xxx_messageInfo_Reference.Size(m) +} +func (m *Reference) XXX_DiscardUnknown() { + xxx_messageInfo_Reference.DiscardUnknown(m) +} + +var xxx_messageInfo_Reference proto.InternalMessageInfo + +func (m *Reference) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *Reference) GetOverloadId() []string { + if m != nil { + return m.OverloadId + } + return nil +} + +func (m *Reference) GetValue() *Constant { + if m != nil { + return m.Value + } + return nil +} + +func init() { + proto.RegisterType((*CheckedExpr)(nil), "google.api.expr.v1alpha1.CheckedExpr") + proto.RegisterMapType((map[int64]*Reference)(nil), "google.api.expr.v1alpha1.CheckedExpr.ReferenceMapEntry") + proto.RegisterMapType((map[int64]*Type)(nil), "google.api.expr.v1alpha1.CheckedExpr.TypeMapEntry") + proto.RegisterType((*Type)(nil), "google.api.expr.v1alpha1.Type") + proto.RegisterType((*Type_ListType)(nil), "google.api.expr.v1alpha1.Type.ListType") + proto.RegisterType((*Type_MapType)(nil), "google.api.expr.v1alpha1.Type.MapType") + proto.RegisterType((*Type_FunctionType)(nil), "google.api.expr.v1alpha1.Type.FunctionType") + proto.RegisterType((*Type_AbstractType)(nil), "google.api.expr.v1alpha1.Type.AbstractType") + proto.RegisterType((*Decl)(nil), "google.api.expr.v1alpha1.Decl") + proto.RegisterType((*Decl_IdentDecl)(nil), "google.api.expr.v1alpha1.Decl.IdentDecl") + proto.RegisterType((*Decl_FunctionDecl)(nil), "google.api.expr.v1alpha1.Decl.FunctionDecl") + proto.RegisterType((*Decl_FunctionDecl_Overload)(nil), "google.api.expr.v1alpha1.Decl.FunctionDecl.Overload") + proto.RegisterType((*Reference)(nil), "google.api.expr.v1alpha1.Reference") + proto.RegisterEnum("google.api.expr.v1alpha1.Type_PrimitiveType", Type_PrimitiveType_name, Type_PrimitiveType_value) + proto.RegisterEnum("google.api.expr.v1alpha1.Type_WellKnownType", Type_WellKnownType_name, Type_WellKnownType_value) +} + +func init() { + proto.RegisterFile("google/api/expr/v1alpha1/checked.proto", fileDescriptor_checked_c2e2f3dfa4cfeebf) +} + +var fileDescriptor_checked_c2e2f3dfa4cfeebf = []byte{ + // 1144 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x94, 0x56, 0x5f, 0x6f, 0xdb, 0xb6, + 0x17, 0x8d, 0x6c, 0xd9, 0x96, 0xae, 0x9c, 0xfe, 0xfc, 0x23, 0x86, 0x41, 0x50, 0x8b, 0x36, 0x70, + 0xb7, 0x2e, 0xd8, 0x06, 0xb9, 0xf5, 0x82, 0xae, 0x5d, 0x37, 0x6c, 0x71, 0xa2, 0x24, 0x42, 0xfd, + 0x0f, 0x8a, 0x93, 0x20, 0xc5, 0x00, 0x81, 0x91, 0x19, 0x57, 0xb0, 0x2c, 0x09, 0x94, 0x9c, 0xc4, + 0x7b, 0xdd, 0xd3, 0xb0, 0x7d, 0x9b, 0x7d, 0x89, 0x7d, 0x9a, 0xbd, 0x6e, 0x8f, 0x03, 0x29, 0xc9, + 0x71, 0xfe, 0x38, 0xb6, 0xdf, 0xae, 0xc8, 0x73, 0x0e, 0x2f, 0x2f, 0xcf, 0x25, 0x05, 0x2f, 0x06, + 0x41, 0x30, 0xf0, 0x48, 0x0d, 0x87, 0x6e, 0x8d, 0x5c, 0x85, 0xb4, 0x76, 0xf1, 0x0a, 0x7b, 0xe1, + 0x47, 0xfc, 0xaa, 0xe6, 0x7c, 0x24, 0xce, 0x90, 0xf4, 0xf5, 0x90, 0x06, 0x71, 0x80, 0xd4, 0x04, + 0xa7, 0xe3, 0xd0, 0xd5, 0x19, 0x4e, 0xcf, 0x70, 0xda, 0xe7, 0x73, 0x15, 0xa2, 0x89, 0x1f, 0xe3, + 0xab, 0x44, 0x40, 0x7b, 0x9c, 0xc2, 0xf8, 0xd7, 0xd9, 0xf8, 0xbc, 0x46, 0x46, 0x61, 0x3c, 0x49, + 0x27, 0x9f, 0xdc, 0x9e, 0x8c, 0x62, 0x3a, 0x76, 0xe2, 0x64, 0xb6, 0xfa, 0x4f, 0x1e, 0x94, 0x9d, + 0x24, 0x1b, 0xe3, 0x2a, 0xa4, 0xe8, 0x67, 0x58, 0xa7, 0xe4, 0x9c, 0x50, 0xe2, 0x3b, 0xc4, 0x1e, + 0xe1, 0x50, 0xcd, 0x6d, 0xe4, 0x37, 0x95, 0xfa, 0xb7, 0xfa, 0xbc, 0x1c, 0xf5, 0x19, 0xb6, 0x6e, + 0x65, 0xd4, 0x16, 0x0e, 0x0d, 0x3f, 0xa6, 0x13, 0xab, 0x4c, 0x67, 0x86, 0x50, 0x0b, 0xa4, 0x78, + 0x12, 0x26, 0xc2, 0x79, 0x2e, 0x5c, 0x5f, 0x4e, 0xb8, 0x37, 0x09, 0xaf, 0x35, 0x4b, 0x71, 0xf2, + 0x85, 0x0c, 0x50, 0xa2, 0x60, 0x4c, 0x1d, 0x62, 0xbb, 0xfe, 0x79, 0xa0, 0x16, 0x36, 0x84, 0x4d, + 0xa5, 0xfe, 0xd9, 0x7c, 0xc5, 0x43, 0x0e, 0x36, 0xfd, 0xf3, 0xc0, 0x82, 0x68, 0x1a, 0xa3, 0x3a, + 0x88, 0x0c, 0xa7, 0x8a, 0x9c, 0xff, 0x74, 0x3e, 0x9f, 0xa5, 0x62, 0x71, 0xac, 0xd6, 0x87, 0xff, + 0xdf, 0xd9, 0x2c, 0xaa, 0x40, 0x7e, 0x48, 0x26, 0xaa, 0xb0, 0x21, 0x6c, 0xe6, 0x2d, 0x16, 0xa2, + 0xb7, 0x50, 0xb8, 0xc0, 0xde, 0x98, 0xa8, 0x39, 0xae, 0xfd, 0x7c, 0xbe, 0xf6, 0x54, 0xcd, 0x4a, + 0x18, 0xdf, 0xe5, 0xde, 0x08, 0xda, 0x07, 0x28, 0xcf, 0xee, 0xfc, 0x9e, 0x05, 0xb6, 0x6e, 0x2e, + 0xf0, 0x40, 0xf2, 0x4c, 0x68, 0x46, 0xbb, 0xfa, 0x97, 0x02, 0x22, 0x1b, 0x43, 0x5f, 0x42, 0xbe, + 0x3f, 0xf1, 0xb9, 0xa8, 0x52, 0xff, 0x34, 0x13, 0xc8, 0xec, 0xa2, 0x1b, 0xcc, 0x4b, 0x07, 0x6b, + 0x16, 0x03, 0xa1, 0x97, 0x20, 0xfa, 0x63, 0xcf, 0xe3, 0xab, 0x3d, 0xaa, 0x6b, 0x77, 0xc0, 0xed, + 0xb1, 0xe7, 0x1d, 0xb3, 0x25, 0x0e, 0xd6, 0x2c, 0x8e, 0x44, 0x4d, 0x90, 0x43, 0xea, 0x8e, 0xdc, + 0xd8, 0xbd, 0x20, 0x6a, 0x9e, 0xd3, 0xbe, 0x7e, 0x38, 0x49, 0xbd, 0x9b, 0xe1, 0xd9, 0xd7, 0xc1, + 0x9a, 0x75, 0x2d, 0x80, 0x0e, 0xa0, 0x74, 0x49, 0x71, 0x18, 0x92, 0xe4, 0xb4, 0x56, 0xd7, 0xca, + 0xe8, 0xa8, 0x05, 0x70, 0x49, 0x3c, 0xcf, 0x1e, 0xfa, 0xc1, 0xa5, 0xcf, 0xad, 0xb3, 0x58, 0xec, + 0x84, 0x78, 0xde, 0x7b, 0x86, 0xcf, 0x12, 0xbb, 0xcc, 0x06, 0xd0, 0x1e, 0xc8, 0x9e, 0x1b, 0xc5, + 0x36, 0xb3, 0xa6, 0x5a, 0xe4, 0xa5, 0xfc, 0x62, 0x81, 0x5a, 0xd3, 0x8d, 0xe2, 0x54, 0x48, 0xf2, + 0xd2, 0x18, 0xed, 0x80, 0x34, 0xc2, 0x61, 0x22, 0x53, 0xe2, 0x32, 0x2f, 0x16, 0xc8, 0xb4, 0x70, + 0x98, 0xed, 0x6d, 0x94, 0x84, 0xc8, 0x04, 0xe9, 0x7c, 0xec, 0x3b, 0xb1, 0x1b, 0xf8, 0xaa, 0xc4, + 0x45, 0xbe, 0x5a, 0x20, 0xb2, 0x97, 0xc2, 0xb3, 0x7c, 0x32, 0x3a, 0x7a, 0x0e, 0xe5, 0x11, 0x89, + 0x22, 0x3c, 0x20, 0x49, 0x4e, 0xf2, 0x86, 0xb0, 0x29, 0x1f, 0xac, 0x59, 0x4a, 0x3a, 0xca, 0xd7, + 0x7b, 0x06, 0xc0, 0xdb, 0x3a, 0xc4, 0x14, 0x8f, 0x54, 0x48, 0x21, 0x32, 0x1b, 0xeb, 0xb2, 0x21, + 0xb4, 0x05, 0x22, 0x67, 0x2b, 0xcb, 0x98, 0x94, 0x59, 0x87, 0xa1, 0x91, 0x0e, 0x05, 0x42, 0x69, + 0x40, 0xd5, 0xf2, 0x02, 0x6b, 0x26, 0x30, 0x64, 0xc1, 0x3a, 0x3e, 0x8b, 0x62, 0x8a, 0x9d, 0xf4, + 0x1c, 0x1e, 0x2d, 0xb5, 0xf7, 0xed, 0x94, 0x93, 0xae, 0x5d, 0xc6, 0x33, 0xdf, 0xda, 0x3e, 0x48, + 0xd9, 0x39, 0xa1, 0x77, 0x20, 0x13, 0x8f, 0x8c, 0x12, 0x6d, 0x61, 0xa9, 0x7e, 0x93, 0x18, 0x81, + 0x0b, 0xfd, 0x2a, 0x40, 0x29, 0x3d, 0x2a, 0xf4, 0x16, 0xa4, 0x21, 0x99, 0xac, 0xa2, 0x53, 0x1a, + 0x92, 0x09, 0xa7, 0xfe, 0x00, 0xc0, 0x5b, 0x38, 0x21, 0x2f, 0xd7, 0xf4, 0x32, 0x67, 0xf0, 0x2c, + 0xfe, 0x10, 0xa0, 0x3c, 0x7b, 0xd6, 0xe8, 0x47, 0x50, 0x28, 0x89, 0xc6, 0x5e, 0xbc, 0x4a, 0x36, + 0x90, 0x50, 0xb2, 0xa2, 0x60, 0x3a, 0xe0, 0xec, 0x28, 0x7d, 0x2c, 0x16, 0x16, 0x05, 0xd3, 0x01, + 0x0b, 0x22, 0x6d, 0x08, 0xe5, 0xd9, 0xea, 0x23, 0x04, 0xa2, 0x8f, 0x47, 0x49, 0x1a, 0xb2, 0xc5, + 0x63, 0xb4, 0x0f, 0xff, 0xe3, 0xbe, 0x22, 0x31, 0xa1, 0x2b, 0x2d, 0xf3, 0x68, 0x4a, 0xe3, 0x8b, + 0x55, 0x23, 0x58, 0xbf, 0x71, 0x1b, 0xa0, 0xa7, 0xa0, 0x75, 0x2d, 0xb3, 0x65, 0xf6, 0xcc, 0x63, + 0xc3, 0xee, 0x9d, 0x76, 0x0d, 0xfb, 0xa8, 0x7d, 0xd8, 0x35, 0x76, 0xcc, 0x3d, 0xd3, 0xd8, 0xad, + 0xac, 0x21, 0x09, 0xc4, 0x46, 0xa7, 0xd3, 0xac, 0x08, 0x48, 0x86, 0x82, 0xd9, 0xee, 0xbd, 0xde, + 0xaa, 0xe4, 0x10, 0x40, 0xf1, 0x28, 0x89, 0xf3, 0x2c, 0xde, 0xed, 0x1c, 0x35, 0x9a, 0x46, 0x45, + 0x64, 0xf1, 0x61, 0xcf, 0x32, 0xdb, 0xfb, 0x95, 0x02, 0x83, 0x37, 0x4e, 0x7b, 0xc6, 0x61, 0xa5, + 0x58, 0x3d, 0x86, 0xf5, 0x1b, 0xb7, 0x06, 0x7a, 0x06, 0x8f, 0x4f, 0x8c, 0x66, 0xd3, 0x7e, 0xdf, + 0xee, 0x9c, 0xb4, 0xef, 0x5b, 0xb5, 0x04, 0xf9, 0xed, 0xf6, 0x69, 0x45, 0x40, 0xeb, 0x20, 0xf7, + 0xcc, 0x96, 0x71, 0xd8, 0xdb, 0x6e, 0x75, 0x2b, 0x39, 0x54, 0x06, 0x69, 0xf7, 0xc8, 0xda, 0xee, + 0x99, 0x9d, 0x76, 0x25, 0xdf, 0x50, 0x80, 0xb7, 0x97, 0x3d, 0x74, 0xfd, 0x7e, 0xf5, 0xcf, 0x02, + 0x88, 0xbb, 0xc4, 0xf1, 0xee, 0xad, 0xdf, 0x4f, 0x50, 0x70, 0xfb, 0xc4, 0x8f, 0x53, 0xb3, 0x6c, + 0xce, 0xaf, 0x1a, 0x93, 0xd0, 0x4d, 0x86, 0x65, 0x11, 0xeb, 0x2b, 0x4e, 0xbc, 0x71, 0x9d, 0xe4, + 0x17, 0xb5, 0x14, 0x17, 0xc9, 0x2c, 0x96, 0xea, 0x4c, 0xe9, 0xda, 0xef, 0x02, 0xc8, 0xd3, 0x15, + 0xd8, 0xc3, 0xbb, 0x82, 0xeb, 0x92, 0x4b, 0xe1, 0xcd, 0xcd, 0x07, 0xaf, 0xfa, 0xc0, 0xff, 0x43, + 0xe0, 0x47, 0x31, 0xf6, 0xe3, 0xf4, 0xd1, 0x63, 0x8f, 0x67, 0x3f, 0x70, 0xf8, 0x0e, 0x64, 0x8b, + 0x85, 0xda, 0xdf, 0xb9, 0xeb, 0x6e, 0xe0, 0x09, 0x59, 0x20, 0x07, 0x17, 0x84, 0x7a, 0x01, 0xee, + 0x47, 0xaa, 0xc0, 0x5d, 0xb6, 0xb5, 0xc2, 0x56, 0xf5, 0x4e, 0x4a, 0xb6, 0xae, 0x65, 0xb4, 0xdf, + 0x72, 0x20, 0x65, 0xe3, 0xe8, 0x19, 0x28, 0xd9, 0x8c, 0xed, 0xf6, 0xd3, 0x73, 0x82, 0x6c, 0xc8, + 0xec, 0xa3, 0xd7, 0x50, 0xe4, 0xb6, 0x5d, 0xd6, 0xe4, 0x29, 0x9a, 0x09, 0x5f, 0x5f, 0xc1, 0x11, + 0xff, 0xb9, 0x92, 0x2d, 0x98, 0xde, 0xc0, 0xd1, 0xed, 0x46, 0x17, 0x57, 0x6e, 0xf4, 0x97, 0xf0, + 0x89, 0x1b, 0xd9, 0x2e, 0xaf, 0xa9, 0x43, 0xec, 0xa9, 0x23, 0xd8, 0xd3, 0x29, 0x59, 0xc8, 0x8d, + 0xcc, 0x74, 0x2a, 0xab, 0x48, 0x56, 0xf0, 0xe2, 0xb4, 0xe0, 0xcc, 0xb5, 0x7d, 0xe2, 0x78, 0x89, + 0x6b, 0x7f, 0x01, 0x79, 0xfa, 0xd3, 0x73, 0xaf, 0x73, 0x6f, 0x15, 0x2b, 0xdd, 0xd3, 0x4c, 0xb1, + 0xa6, 0x5e, 0x10, 0x57, 0xf4, 0x42, 0xc3, 0x83, 0x27, 0x4e, 0x30, 0x9a, 0x8b, 0x6f, 0xc8, 0xec, + 0x38, 0xbb, 0xec, 0x9d, 0xe9, 0x0a, 0x1f, 0xbe, 0x4f, 0x61, 0x83, 0xc0, 0xc3, 0xfe, 0x40, 0x0f, + 0xe8, 0xa0, 0x36, 0x20, 0x3e, 0x7f, 0x85, 0x6a, 0xc9, 0x14, 0x0e, 0xdd, 0xe8, 0xee, 0x4f, 0xfa, + 0x3b, 0xf6, 0xf5, 0xaf, 0x20, 0x9c, 0x15, 0x39, 0xf6, 0x9b, 0xff, 0x02, 0x00, 0x00, 0xff, 0xff, + 0xee, 0x02, 0xe6, 0x8f, 0x11, 0x0c, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/api/expr/v1alpha1/conformance_service.pb.go b/vendor/google.golang.org/genproto/googleapis/api/expr/v1alpha1/conformance_service.pb.go new file mode 100644 index 0000000..63577b0 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/api/expr/v1alpha1/conformance_service.pb.go @@ -0,0 +1,799 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/api/expr/v1alpha1/conformance_service.proto + +package expr // import "google.golang.org/genproto/googleapis/api/expr/v1alpha1" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import status "google.golang.org/genproto/googleapis/rpc/status" + +import ( + context "golang.org/x/net/context" + grpc "google.golang.org/grpc" +) + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Severities of issues. +type IssueDetails_Severity int32 + +const ( + // An unspecified severity. + IssueDetails_SEVERITY_UNSPECIFIED IssueDetails_Severity = 0 + // Deprecation issue for statements and method that may no longer be + // supported or maintained. + IssueDetails_DEPRECATION IssueDetails_Severity = 1 + // Warnings such as: unused variables. + IssueDetails_WARNING IssueDetails_Severity = 2 + // Errors such as: unmatched curly braces or variable redefinition. + IssueDetails_ERROR IssueDetails_Severity = 3 +) + +var IssueDetails_Severity_name = map[int32]string{ + 0: "SEVERITY_UNSPECIFIED", + 1: "DEPRECATION", + 2: "WARNING", + 3: "ERROR", +} +var IssueDetails_Severity_value = map[string]int32{ + "SEVERITY_UNSPECIFIED": 0, + "DEPRECATION": 1, + "WARNING": 2, + "ERROR": 3, +} + +func (x IssueDetails_Severity) String() string { + return proto.EnumName(IssueDetails_Severity_name, int32(x)) +} +func (IssueDetails_Severity) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_conformance_service_35e6de9bb7108510, []int{6, 0} +} + +// Request message for the Parse method. +type ParseRequest struct { + // Required. Source text in CEL syntax. + CelSource string `protobuf:"bytes,1,opt,name=cel_source,json=celSource,proto3" json:"cel_source,omitempty"` + // Tag for version of CEL syntax, for future use. + SyntaxVersion string `protobuf:"bytes,2,opt,name=syntax_version,json=syntaxVersion,proto3" json:"syntax_version,omitempty"` + // File or resource for source text, used in + // [SourceInfo][google.api.expr.v1alpha1.SourceInfo]. + SourceLocation string `protobuf:"bytes,3,opt,name=source_location,json=sourceLocation,proto3" json:"source_location,omitempty"` + // Prevent macro expansion. See "Macros" in Language Defiinition. + DisableMacros bool `protobuf:"varint,4,opt,name=disable_macros,json=disableMacros,proto3" json:"disable_macros,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ParseRequest) Reset() { *m = ParseRequest{} } +func (m *ParseRequest) String() string { return proto.CompactTextString(m) } +func (*ParseRequest) ProtoMessage() {} +func (*ParseRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_conformance_service_35e6de9bb7108510, []int{0} +} +func (m *ParseRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ParseRequest.Unmarshal(m, b) +} +func (m *ParseRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ParseRequest.Marshal(b, m, deterministic) +} +func (dst *ParseRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_ParseRequest.Merge(dst, src) +} +func (m *ParseRequest) XXX_Size() int { + return xxx_messageInfo_ParseRequest.Size(m) +} +func (m *ParseRequest) XXX_DiscardUnknown() { + xxx_messageInfo_ParseRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_ParseRequest proto.InternalMessageInfo + +func (m *ParseRequest) GetCelSource() string { + if m != nil { + return m.CelSource + } + return "" +} + +func (m *ParseRequest) GetSyntaxVersion() string { + if m != nil { + return m.SyntaxVersion + } + return "" +} + +func (m *ParseRequest) GetSourceLocation() string { + if m != nil { + return m.SourceLocation + } + return "" +} + +func (m *ParseRequest) GetDisableMacros() bool { + if m != nil { + return m.DisableMacros + } + return false +} + +// Response message for the Parse method. +type ParseResponse struct { + // The parsed representation, or unset if parsing failed. + ParsedExpr *ParsedExpr `protobuf:"bytes,1,opt,name=parsed_expr,json=parsedExpr,proto3" json:"parsed_expr,omitempty"` + // Any number of issues with [StatusDetails][] as the details. + Issues []*status.Status `protobuf:"bytes,2,rep,name=issues,proto3" json:"issues,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ParseResponse) Reset() { *m = ParseResponse{} } +func (m *ParseResponse) String() string { return proto.CompactTextString(m) } +func (*ParseResponse) ProtoMessage() {} +func (*ParseResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_conformance_service_35e6de9bb7108510, []int{1} +} +func (m *ParseResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ParseResponse.Unmarshal(m, b) +} +func (m *ParseResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ParseResponse.Marshal(b, m, deterministic) +} +func (dst *ParseResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_ParseResponse.Merge(dst, src) +} +func (m *ParseResponse) XXX_Size() int { + return xxx_messageInfo_ParseResponse.Size(m) +} +func (m *ParseResponse) XXX_DiscardUnknown() { + xxx_messageInfo_ParseResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_ParseResponse proto.InternalMessageInfo + +func (m *ParseResponse) GetParsedExpr() *ParsedExpr { + if m != nil { + return m.ParsedExpr + } + return nil +} + +func (m *ParseResponse) GetIssues() []*status.Status { + if m != nil { + return m.Issues + } + return nil +} + +// Request message for the Check method. +type CheckRequest struct { + // Required. The parsed representation of the CEL program. + ParsedExpr *ParsedExpr `protobuf:"bytes,1,opt,name=parsed_expr,json=parsedExpr,proto3" json:"parsed_expr,omitempty"` + // Declarations of types for external variables and functions. + // Required if program uses external variables or functions + // not in the default environment. + TypeEnv []*Decl `protobuf:"bytes,2,rep,name=type_env,json=typeEnv,proto3" json:"type_env,omitempty"` + // The protocol buffer context. See "Name Resolution" in the + // Language Definition. + Container string `protobuf:"bytes,3,opt,name=container,proto3" json:"container,omitempty"` + // If true, use only the declarations in + // [type_env][google.api.expr.v1alpha1.CheckRequest.type_env]. If false + // (default), add declarations for the standard definitions to the type + // environment. See "Standard Definitions" in the Language Definition. + NoStdEnv bool `protobuf:"varint,4,opt,name=no_std_env,json=noStdEnv,proto3" json:"no_std_env,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *CheckRequest) Reset() { *m = CheckRequest{} } +func (m *CheckRequest) String() string { return proto.CompactTextString(m) } +func (*CheckRequest) ProtoMessage() {} +func (*CheckRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_conformance_service_35e6de9bb7108510, []int{2} +} +func (m *CheckRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_CheckRequest.Unmarshal(m, b) +} +func (m *CheckRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_CheckRequest.Marshal(b, m, deterministic) +} +func (dst *CheckRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_CheckRequest.Merge(dst, src) +} +func (m *CheckRequest) XXX_Size() int { + return xxx_messageInfo_CheckRequest.Size(m) +} +func (m *CheckRequest) XXX_DiscardUnknown() { + xxx_messageInfo_CheckRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_CheckRequest proto.InternalMessageInfo + +func (m *CheckRequest) GetParsedExpr() *ParsedExpr { + if m != nil { + return m.ParsedExpr + } + return nil +} + +func (m *CheckRequest) GetTypeEnv() []*Decl { + if m != nil { + return m.TypeEnv + } + return nil +} + +func (m *CheckRequest) GetContainer() string { + if m != nil { + return m.Container + } + return "" +} + +func (m *CheckRequest) GetNoStdEnv() bool { + if m != nil { + return m.NoStdEnv + } + return false +} + +// Response message for the Check method. +type CheckResponse struct { + // The annotated representation, or unset if checking failed. + CheckedExpr *CheckedExpr `protobuf:"bytes,1,opt,name=checked_expr,json=checkedExpr,proto3" json:"checked_expr,omitempty"` + // Any number of issues with [StatusDetails][] as the details. + Issues []*status.Status `protobuf:"bytes,2,rep,name=issues,proto3" json:"issues,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *CheckResponse) Reset() { *m = CheckResponse{} } +func (m *CheckResponse) String() string { return proto.CompactTextString(m) } +func (*CheckResponse) ProtoMessage() {} +func (*CheckResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_conformance_service_35e6de9bb7108510, []int{3} +} +func (m *CheckResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_CheckResponse.Unmarshal(m, b) +} +func (m *CheckResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_CheckResponse.Marshal(b, m, deterministic) +} +func (dst *CheckResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_CheckResponse.Merge(dst, src) +} +func (m *CheckResponse) XXX_Size() int { + return xxx_messageInfo_CheckResponse.Size(m) +} +func (m *CheckResponse) XXX_DiscardUnknown() { + xxx_messageInfo_CheckResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_CheckResponse proto.InternalMessageInfo + +func (m *CheckResponse) GetCheckedExpr() *CheckedExpr { + if m != nil { + return m.CheckedExpr + } + return nil +} + +func (m *CheckResponse) GetIssues() []*status.Status { + if m != nil { + return m.Issues + } + return nil +} + +// Request message for the Eval method. +type EvalRequest struct { + // Required. Either the parsed or annotated representation of the CEL program. + // + // Types that are valid to be assigned to ExprKind: + // *EvalRequest_ParsedExpr + // *EvalRequest_CheckedExpr + ExprKind isEvalRequest_ExprKind `protobuf_oneof:"expr_kind"` + // Bindings for the external variables. The types SHOULD be compatible + // with the type environment in + // [CheckRequest][google.api.expr.v1alpha1.CheckRequest], if checked. + Bindings map[string]*ExprValue `protobuf:"bytes,3,rep,name=bindings,proto3" json:"bindings,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` + // SHOULD be the same container as used in + // [CheckRequest][google.api.expr.v1alpha1.CheckRequest], if checked. + Container string `protobuf:"bytes,4,opt,name=container,proto3" json:"container,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *EvalRequest) Reset() { *m = EvalRequest{} } +func (m *EvalRequest) String() string { return proto.CompactTextString(m) } +func (*EvalRequest) ProtoMessage() {} +func (*EvalRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_conformance_service_35e6de9bb7108510, []int{4} +} +func (m *EvalRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_EvalRequest.Unmarshal(m, b) +} +func (m *EvalRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_EvalRequest.Marshal(b, m, deterministic) +} +func (dst *EvalRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_EvalRequest.Merge(dst, src) +} +func (m *EvalRequest) XXX_Size() int { + return xxx_messageInfo_EvalRequest.Size(m) +} +func (m *EvalRequest) XXX_DiscardUnknown() { + xxx_messageInfo_EvalRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_EvalRequest proto.InternalMessageInfo + +type isEvalRequest_ExprKind interface { + isEvalRequest_ExprKind() +} + +type EvalRequest_ParsedExpr struct { + ParsedExpr *ParsedExpr `protobuf:"bytes,1,opt,name=parsed_expr,json=parsedExpr,proto3,oneof"` +} + +type EvalRequest_CheckedExpr struct { + CheckedExpr *CheckedExpr `protobuf:"bytes,2,opt,name=checked_expr,json=checkedExpr,proto3,oneof"` +} + +func (*EvalRequest_ParsedExpr) isEvalRequest_ExprKind() {} + +func (*EvalRequest_CheckedExpr) isEvalRequest_ExprKind() {} + +func (m *EvalRequest) GetExprKind() isEvalRequest_ExprKind { + if m != nil { + return m.ExprKind + } + return nil +} + +func (m *EvalRequest) GetParsedExpr() *ParsedExpr { + if x, ok := m.GetExprKind().(*EvalRequest_ParsedExpr); ok { + return x.ParsedExpr + } + return nil +} + +func (m *EvalRequest) GetCheckedExpr() *CheckedExpr { + if x, ok := m.GetExprKind().(*EvalRequest_CheckedExpr); ok { + return x.CheckedExpr + } + return nil +} + +func (m *EvalRequest) GetBindings() map[string]*ExprValue { + if m != nil { + return m.Bindings + } + return nil +} + +func (m *EvalRequest) GetContainer() string { + if m != nil { + return m.Container + } + return "" +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*EvalRequest) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _EvalRequest_OneofMarshaler, _EvalRequest_OneofUnmarshaler, _EvalRequest_OneofSizer, []interface{}{ + (*EvalRequest_ParsedExpr)(nil), + (*EvalRequest_CheckedExpr)(nil), + } +} + +func _EvalRequest_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*EvalRequest) + // expr_kind + switch x := m.ExprKind.(type) { + case *EvalRequest_ParsedExpr: + b.EncodeVarint(1<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.ParsedExpr); err != nil { + return err + } + case *EvalRequest_CheckedExpr: + b.EncodeVarint(2<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.CheckedExpr); err != nil { + return err + } + case nil: + default: + return fmt.Errorf("EvalRequest.ExprKind has unexpected type %T", x) + } + return nil +} + +func _EvalRequest_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*EvalRequest) + switch tag { + case 1: // expr_kind.parsed_expr + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(ParsedExpr) + err := b.DecodeMessage(msg) + m.ExprKind = &EvalRequest_ParsedExpr{msg} + return true, err + case 2: // expr_kind.checked_expr + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(CheckedExpr) + err := b.DecodeMessage(msg) + m.ExprKind = &EvalRequest_CheckedExpr{msg} + return true, err + default: + return false, nil + } +} + +func _EvalRequest_OneofSizer(msg proto.Message) (n int) { + m := msg.(*EvalRequest) + // expr_kind + switch x := m.ExprKind.(type) { + case *EvalRequest_ParsedExpr: + s := proto.Size(x.ParsedExpr) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *EvalRequest_CheckedExpr: + s := proto.Size(x.CheckedExpr) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +// Response message for the Eval method. +type EvalResponse struct { + // The execution result, or unset if execution couldn't start. + Result *ExprValue `protobuf:"bytes,1,opt,name=result,proto3" json:"result,omitempty"` + // Any number of issues with [StatusDetails][] as the details. + // Note that CEL execution errors are reified into + // [ExprValue][google.api.expr.v1alpha1.ExprValue]. Nevertheless, we'll allow + // out-of-band issues to be raised, which also makes the replies more regular. + Issues []*status.Status `protobuf:"bytes,2,rep,name=issues,proto3" json:"issues,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *EvalResponse) Reset() { *m = EvalResponse{} } +func (m *EvalResponse) String() string { return proto.CompactTextString(m) } +func (*EvalResponse) ProtoMessage() {} +func (*EvalResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_conformance_service_35e6de9bb7108510, []int{5} +} +func (m *EvalResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_EvalResponse.Unmarshal(m, b) +} +func (m *EvalResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_EvalResponse.Marshal(b, m, deterministic) +} +func (dst *EvalResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_EvalResponse.Merge(dst, src) +} +func (m *EvalResponse) XXX_Size() int { + return xxx_messageInfo_EvalResponse.Size(m) +} +func (m *EvalResponse) XXX_DiscardUnknown() { + xxx_messageInfo_EvalResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_EvalResponse proto.InternalMessageInfo + +func (m *EvalResponse) GetResult() *ExprValue { + if m != nil { + return m.Result + } + return nil +} + +func (m *EvalResponse) GetIssues() []*status.Status { + if m != nil { + return m.Issues + } + return nil +} + +// Warnings or errors in service execution are represented by +// [google.rpc.Status][google.rpc.Status] messages, with the following message +// in the details field. +type IssueDetails struct { + // The severity of the issue. + Severity IssueDetails_Severity `protobuf:"varint,1,opt,name=severity,proto3,enum=google.api.expr.v1alpha1.IssueDetails_Severity" json:"severity,omitempty"` + // Position in the source, if known. + Position *SourcePosition `protobuf:"bytes,2,opt,name=position,proto3" json:"position,omitempty"` + // Expression ID from [Expr][google.api.expr.v1alpha1.Expr], 0 if unknown. + Id int64 `protobuf:"varint,3,opt,name=id,proto3" json:"id,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *IssueDetails) Reset() { *m = IssueDetails{} } +func (m *IssueDetails) String() string { return proto.CompactTextString(m) } +func (*IssueDetails) ProtoMessage() {} +func (*IssueDetails) Descriptor() ([]byte, []int) { + return fileDescriptor_conformance_service_35e6de9bb7108510, []int{6} +} +func (m *IssueDetails) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_IssueDetails.Unmarshal(m, b) +} +func (m *IssueDetails) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_IssueDetails.Marshal(b, m, deterministic) +} +func (dst *IssueDetails) XXX_Merge(src proto.Message) { + xxx_messageInfo_IssueDetails.Merge(dst, src) +} +func (m *IssueDetails) XXX_Size() int { + return xxx_messageInfo_IssueDetails.Size(m) +} +func (m *IssueDetails) XXX_DiscardUnknown() { + xxx_messageInfo_IssueDetails.DiscardUnknown(m) +} + +var xxx_messageInfo_IssueDetails proto.InternalMessageInfo + +func (m *IssueDetails) GetSeverity() IssueDetails_Severity { + if m != nil { + return m.Severity + } + return IssueDetails_SEVERITY_UNSPECIFIED +} + +func (m *IssueDetails) GetPosition() *SourcePosition { + if m != nil { + return m.Position + } + return nil +} + +func (m *IssueDetails) GetId() int64 { + if m != nil { + return m.Id + } + return 0 +} + +func init() { + proto.RegisterType((*ParseRequest)(nil), "google.api.expr.v1alpha1.ParseRequest") + proto.RegisterType((*ParseResponse)(nil), "google.api.expr.v1alpha1.ParseResponse") + proto.RegisterType((*CheckRequest)(nil), "google.api.expr.v1alpha1.CheckRequest") + proto.RegisterType((*CheckResponse)(nil), "google.api.expr.v1alpha1.CheckResponse") + proto.RegisterType((*EvalRequest)(nil), "google.api.expr.v1alpha1.EvalRequest") + proto.RegisterMapType((map[string]*ExprValue)(nil), "google.api.expr.v1alpha1.EvalRequest.BindingsEntry") + proto.RegisterType((*EvalResponse)(nil), "google.api.expr.v1alpha1.EvalResponse") + proto.RegisterType((*IssueDetails)(nil), "google.api.expr.v1alpha1.IssueDetails") + proto.RegisterEnum("google.api.expr.v1alpha1.IssueDetails_Severity", IssueDetails_Severity_name, IssueDetails_Severity_value) +} + +// Reference imports to suppress errors if they are not otherwise used. +var _ context.Context +var _ grpc.ClientConn + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the grpc package it is being compiled against. +const _ = grpc.SupportPackageIsVersion4 + +// ConformanceServiceClient is the client API for ConformanceService service. +// +// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://godoc.org/google.golang.org/grpc#ClientConn.NewStream. +type ConformanceServiceClient interface { + // Transforms CEL source text into a parsed representation. + Parse(ctx context.Context, in *ParseRequest, opts ...grpc.CallOption) (*ParseResponse, error) + // Runs static checks on a parsed CEL representation and return + // an annotated representation, or a set of issues. + Check(ctx context.Context, in *CheckRequest, opts ...grpc.CallOption) (*CheckResponse, error) + // Evaluates a parsed or annotation CEL representation given + // values of external bindings. + Eval(ctx context.Context, in *EvalRequest, opts ...grpc.CallOption) (*EvalResponse, error) +} + +type conformanceServiceClient struct { + cc *grpc.ClientConn +} + +func NewConformanceServiceClient(cc *grpc.ClientConn) ConformanceServiceClient { + return &conformanceServiceClient{cc} +} + +func (c *conformanceServiceClient) Parse(ctx context.Context, in *ParseRequest, opts ...grpc.CallOption) (*ParseResponse, error) { + out := new(ParseResponse) + err := c.cc.Invoke(ctx, "/google.api.expr.v1alpha1.ConformanceService/Parse", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *conformanceServiceClient) Check(ctx context.Context, in *CheckRequest, opts ...grpc.CallOption) (*CheckResponse, error) { + out := new(CheckResponse) + err := c.cc.Invoke(ctx, "/google.api.expr.v1alpha1.ConformanceService/Check", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *conformanceServiceClient) Eval(ctx context.Context, in *EvalRequest, opts ...grpc.CallOption) (*EvalResponse, error) { + out := new(EvalResponse) + err := c.cc.Invoke(ctx, "/google.api.expr.v1alpha1.ConformanceService/Eval", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +// ConformanceServiceServer is the server API for ConformanceService service. +type ConformanceServiceServer interface { + // Transforms CEL source text into a parsed representation. + Parse(context.Context, *ParseRequest) (*ParseResponse, error) + // Runs static checks on a parsed CEL representation and return + // an annotated representation, or a set of issues. + Check(context.Context, *CheckRequest) (*CheckResponse, error) + // Evaluates a parsed or annotation CEL representation given + // values of external bindings. + Eval(context.Context, *EvalRequest) (*EvalResponse, error) +} + +func RegisterConformanceServiceServer(s *grpc.Server, srv ConformanceServiceServer) { + s.RegisterService(&_ConformanceService_serviceDesc, srv) +} + +func _ConformanceService_Parse_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(ParseRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(ConformanceServiceServer).Parse(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.api.expr.v1alpha1.ConformanceService/Parse", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(ConformanceServiceServer).Parse(ctx, req.(*ParseRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _ConformanceService_Check_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(CheckRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(ConformanceServiceServer).Check(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.api.expr.v1alpha1.ConformanceService/Check", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(ConformanceServiceServer).Check(ctx, req.(*CheckRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _ConformanceService_Eval_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(EvalRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(ConformanceServiceServer).Eval(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.api.expr.v1alpha1.ConformanceService/Eval", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(ConformanceServiceServer).Eval(ctx, req.(*EvalRequest)) + } + return interceptor(ctx, in, info, handler) +} + +var _ConformanceService_serviceDesc = grpc.ServiceDesc{ + ServiceName: "google.api.expr.v1alpha1.ConformanceService", + HandlerType: (*ConformanceServiceServer)(nil), + Methods: []grpc.MethodDesc{ + { + MethodName: "Parse", + Handler: _ConformanceService_Parse_Handler, + }, + { + MethodName: "Check", + Handler: _ConformanceService_Check_Handler, + }, + { + MethodName: "Eval", + Handler: _ConformanceService_Eval_Handler, + }, + }, + Streams: []grpc.StreamDesc{}, + Metadata: "google/api/expr/v1alpha1/conformance_service.proto", +} + +func init() { + proto.RegisterFile("google/api/expr/v1alpha1/conformance_service.proto", fileDescriptor_conformance_service_35e6de9bb7108510) +} + +var fileDescriptor_conformance_service_35e6de9bb7108510 = []byte{ + // 807 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xac, 0x55, 0x41, 0x6f, 0xdb, 0x36, + 0x18, 0xb5, 0xe4, 0x24, 0xb5, 0x3f, 0xd9, 0xa9, 0x41, 0x0c, 0xa8, 0x61, 0x64, 0x43, 0xa0, 0x2e, + 0x69, 0xb0, 0x83, 0x84, 0xba, 0x97, 0x75, 0xdd, 0xa5, 0xb1, 0xb5, 0xc6, 0xdb, 0x9a, 0x18, 0x74, + 0x97, 0x62, 0xbd, 0x68, 0x8c, 0xc4, 0xb9, 0x44, 0x14, 0x52, 0x23, 0x65, 0xcd, 0xde, 0x69, 0x18, + 0xb0, 0x7f, 0xb2, 0xfd, 0x9b, 0xfd, 0xa0, 0x1d, 0x07, 0x89, 0xb4, 0x63, 0xb7, 0x50, 0xd2, 0x0c, + 0xbd, 0x49, 0x9f, 0xde, 0x7b, 0xfa, 0xde, 0xe3, 0x47, 0x12, 0xfa, 0x53, 0x21, 0xa6, 0x09, 0xf5, + 0x49, 0xca, 0x7c, 0x3a, 0x4f, 0xa5, 0x9f, 0x3f, 0x26, 0x49, 0xfa, 0x96, 0x3c, 0xf6, 0x23, 0xc1, + 0x7f, 0x16, 0xf2, 0x8a, 0xf0, 0x88, 0x86, 0x8a, 0xca, 0x9c, 0x45, 0xd4, 0x4b, 0xa5, 0xc8, 0x04, + 0xea, 0x6a, 0x8e, 0x47, 0x52, 0xe6, 0x15, 0x1c, 0x6f, 0xc9, 0xe9, 0x1d, 0x56, 0xab, 0xbd, 0xa5, + 0xd1, 0x25, 0x8d, 0xb5, 0x42, 0xef, 0x61, 0x25, 0x8e, 0xe6, 0x24, 0x31, 0xa0, 0x83, 0x4a, 0x90, + 0x5a, 0xf0, 0x8c, 0xcc, 0x0d, 0xec, 0x81, 0x81, 0xc9, 0x34, 0xf2, 0x55, 0x46, 0xb2, 0x99, 0xd2, + 0x1f, 0xdc, 0xbf, 0x2c, 0x68, 0x8d, 0x89, 0x54, 0x14, 0xd3, 0x5f, 0x66, 0x54, 0x65, 0xe8, 0x53, + 0x80, 0x88, 0x26, 0xa1, 0x12, 0x33, 0x19, 0xd1, 0xae, 0xb5, 0x6f, 0x1d, 0x35, 0x71, 0x33, 0xa2, + 0xc9, 0xa4, 0x2c, 0xa0, 0x03, 0xd8, 0xd5, 0xc2, 0x61, 0x4e, 0xa5, 0x62, 0x82, 0x77, 0xed, 0x12, + 0xd2, 0xd6, 0xd5, 0x73, 0x5d, 0x44, 0x8f, 0xe0, 0xbe, 0x56, 0x08, 0x13, 0x11, 0x91, 0xac, 0xc0, + 0xd5, 0x4b, 0xdc, 0xae, 0x2e, 0x7f, 0x6f, 0xaa, 0x85, 0x5e, 0xcc, 0x14, 0xb9, 0x48, 0x68, 0x78, + 0x45, 0x22, 0x29, 0x54, 0x77, 0x6b, 0xdf, 0x3a, 0x6a, 0xe0, 0xb6, 0xa9, 0xbe, 0x2c, 0x8b, 0xee, + 0x1f, 0x16, 0xb4, 0x4d, 0x9b, 0x2a, 0x15, 0x5c, 0x51, 0x14, 0x80, 0x93, 0x16, 0x85, 0x38, 0x2c, + 0x6c, 0x97, 0x8d, 0x3a, 0xfd, 0xcf, 0xbd, 0xaa, 0xd4, 0xbd, 0x92, 0x1d, 0x07, 0xf3, 0x54, 0x62, + 0x48, 0x57, 0xcf, 0xe8, 0x0b, 0xd8, 0x61, 0x4a, 0xcd, 0xa8, 0xea, 0xda, 0xfb, 0xf5, 0x23, 0xa7, + 0x8f, 0x96, 0x0a, 0x32, 0x8d, 0xbc, 0x49, 0x99, 0x14, 0x36, 0x08, 0xf7, 0x1f, 0x0b, 0x5a, 0x83, + 0x62, 0x89, 0x96, 0x59, 0x7d, 0xa4, 0x1e, 0x9e, 0x42, 0x23, 0x5b, 0xa4, 0x34, 0xa4, 0x3c, 0x37, + 0x5d, 0x7c, 0x56, 0xad, 0x31, 0xa4, 0x51, 0x82, 0xef, 0x15, 0xf8, 0x80, 0xe7, 0x68, 0x0f, 0x9a, + 0x91, 0xe0, 0x19, 0x61, 0x9c, 0x4a, 0x93, 0xf0, 0x75, 0x01, 0xed, 0x01, 0x70, 0x11, 0xaa, 0x2c, + 0x2e, 0xa5, 0x75, 0xb0, 0x0d, 0x2e, 0x26, 0x59, 0x1c, 0xf0, 0xdc, 0xfd, 0xd3, 0x82, 0xb6, 0xb1, + 0x63, 0x32, 0x3d, 0x81, 0x96, 0x19, 0xc1, 0x75, 0x43, 0x07, 0xd5, 0xcd, 0x0c, 0x34, 0xba, 0x74, + 0xe4, 0x44, 0xd7, 0x2f, 0x77, 0x8a, 0xf5, 0xf7, 0x3a, 0x38, 0x41, 0x4e, 0x92, 0x65, 0xaa, 0x2f, + 0xfe, 0x77, 0xaa, 0x27, 0xb5, 0x8d, 0x5c, 0xbf, 0x7d, 0xc7, 0x8e, 0x7d, 0x07, 0x3b, 0x27, 0xb5, + 0x4d, 0x43, 0x67, 0xd0, 0xb8, 0x60, 0x3c, 0x66, 0x7c, 0xaa, 0xba, 0xf5, 0xd2, 0xd2, 0x93, 0x6a, + 0x9d, 0x35, 0x37, 0xde, 0xb1, 0x61, 0x05, 0x3c, 0x93, 0x0b, 0xbc, 0x12, 0xd9, 0x5c, 0xb9, 0xad, + 0x77, 0x56, 0xae, 0xf7, 0x13, 0xb4, 0x37, 0x88, 0xa8, 0x03, 0xf5, 0x4b, 0xba, 0x30, 0xfb, 0xb1, + 0x78, 0x44, 0x4f, 0x61, 0x3b, 0x27, 0xc9, 0x8c, 0x1a, 0x5b, 0x0f, 0x6f, 0x68, 0x67, 0x9e, 0xca, + 0xf3, 0x02, 0x8a, 0x35, 0xe3, 0x2b, 0xfb, 0x4b, 0xeb, 0xd8, 0x81, 0x66, 0x81, 0x0a, 0x2f, 0x19, + 0x8f, 0xdd, 0x5f, 0xa1, 0xa5, 0x7b, 0x36, 0x83, 0xf0, 0x0c, 0x76, 0x24, 0x55, 0xb3, 0x24, 0x33, + 0xe9, 0x7f, 0x90, 0xb8, 0xa1, 0xdc, 0x6d, 0xed, 0x6d, 0x68, 0x8d, 0x8a, 0xc7, 0x21, 0xcd, 0x08, + 0x4b, 0x14, 0xfa, 0x0e, 0x1a, 0x8a, 0xe6, 0x54, 0xb2, 0x4c, 0x9b, 0xdd, 0xed, 0xfb, 0xd5, 0xff, + 0x5e, 0x67, 0x7a, 0x13, 0x43, 0xc3, 0x2b, 0x01, 0x34, 0x84, 0x46, 0x2a, 0x14, 0xcb, 0x96, 0xc7, + 0x94, 0xd3, 0x3f, 0xaa, 0x16, 0xd3, 0x07, 0xdc, 0xd8, 0xe0, 0xf1, 0x8a, 0x89, 0x76, 0xc1, 0x66, + 0x71, 0xb9, 0xb9, 0xea, 0xd8, 0x66, 0xb1, 0xfb, 0x12, 0x1a, 0xcb, 0x7f, 0xa1, 0x2e, 0x7c, 0x32, + 0x09, 0xce, 0x03, 0x3c, 0x7a, 0xf5, 0x63, 0xf8, 0xc3, 0xe9, 0x64, 0x1c, 0x0c, 0x46, 0xdf, 0x8c, + 0x82, 0x61, 0xa7, 0x86, 0xee, 0x83, 0x33, 0x0c, 0xc6, 0x38, 0x18, 0x3c, 0x7f, 0x35, 0x3a, 0x3b, + 0xed, 0x58, 0xc8, 0x81, 0x7b, 0xaf, 0x9f, 0xe3, 0xd3, 0xd1, 0xe9, 0x8b, 0x8e, 0x8d, 0x9a, 0xb0, + 0x1d, 0x60, 0x7c, 0x86, 0x3b, 0xf5, 0xfe, 0xdf, 0x36, 0xa0, 0xc1, 0xf5, 0x35, 0x32, 0xd1, 0xb7, + 0x08, 0x7a, 0x03, 0xdb, 0xe5, 0x60, 0xa3, 0xc3, 0x5b, 0x26, 0xdf, 0x0c, 0x5a, 0xef, 0xd1, 0xad, + 0x38, 0xbd, 0xb8, 0x6e, 0xad, 0xd0, 0x2e, 0x47, 0xfd, 0x26, 0xed, 0xf5, 0x83, 0xee, 0x26, 0xed, + 0x8d, 0x13, 0xc4, 0xad, 0xa1, 0xd7, 0xb0, 0x55, 0x8c, 0x12, 0x3a, 0xf8, 0xa0, 0xed, 0xd1, 0x3b, + 0xbc, 0x0d, 0xb6, 0x14, 0x3e, 0xfe, 0x0d, 0xf6, 0x22, 0x71, 0x55, 0x09, 0x3f, 0x7e, 0xf0, 0x7e, + 0x88, 0xe3, 0xe2, 0x8a, 0x1b, 0x5b, 0x6f, 0xbe, 0x36, 0xa4, 0xa9, 0x48, 0x08, 0x9f, 0x7a, 0x42, + 0x4e, 0xfd, 0x29, 0xe5, 0xe5, 0x05, 0xe8, 0xeb, 0x4f, 0x24, 0x65, 0xea, 0xfd, 0x3b, 0xf4, 0x59, + 0xf1, 0xf6, 0xaf, 0x65, 0x5d, 0xec, 0x94, 0xd8, 0x27, 0xff, 0x05, 0x00, 0x00, 0xff, 0xff, 0xf9, + 0x66, 0xbb, 0xae, 0x09, 0x08, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/api/expr/v1alpha1/eval.pb.go b/vendor/google.golang.org/genproto/googleapis/api/expr/v1alpha1/eval.pb.go new file mode 100644 index 0000000..580f397 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/api/expr/v1alpha1/eval.pb.go @@ -0,0 +1,427 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/api/expr/v1alpha1/eval.proto + +package expr // import "google.golang.org/genproto/googleapis/api/expr/v1alpha1" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import status "google.golang.org/genproto/googleapis/rpc/status" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// The state of an evaluation. +// +// Can represent an inital, partial, or completed state of evaluation. +type EvalState struct { + // The unique values referenced in this message. + Values []*ExprValue `protobuf:"bytes,1,rep,name=values,proto3" json:"values,omitempty"` + // An ordered list of results. + // + // Tracks the flow of evaluation through the expression. + // May be sparse. + Results []*EvalState_Result `protobuf:"bytes,3,rep,name=results,proto3" json:"results,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *EvalState) Reset() { *m = EvalState{} } +func (m *EvalState) String() string { return proto.CompactTextString(m) } +func (*EvalState) ProtoMessage() {} +func (*EvalState) Descriptor() ([]byte, []int) { + return fileDescriptor_eval_f3602fc52254f2fe, []int{0} +} +func (m *EvalState) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_EvalState.Unmarshal(m, b) +} +func (m *EvalState) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_EvalState.Marshal(b, m, deterministic) +} +func (dst *EvalState) XXX_Merge(src proto.Message) { + xxx_messageInfo_EvalState.Merge(dst, src) +} +func (m *EvalState) XXX_Size() int { + return xxx_messageInfo_EvalState.Size(m) +} +func (m *EvalState) XXX_DiscardUnknown() { + xxx_messageInfo_EvalState.DiscardUnknown(m) +} + +var xxx_messageInfo_EvalState proto.InternalMessageInfo + +func (m *EvalState) GetValues() []*ExprValue { + if m != nil { + return m.Values + } + return nil +} + +func (m *EvalState) GetResults() []*EvalState_Result { + if m != nil { + return m.Results + } + return nil +} + +// A single evalution result. +type EvalState_Result struct { + // The id of the expression this result if for. + Expr int64 `protobuf:"varint,1,opt,name=expr,proto3" json:"expr,omitempty"` + // The index in `values` of the resulting value. + Value int64 `protobuf:"varint,2,opt,name=value,proto3" json:"value,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *EvalState_Result) Reset() { *m = EvalState_Result{} } +func (m *EvalState_Result) String() string { return proto.CompactTextString(m) } +func (*EvalState_Result) ProtoMessage() {} +func (*EvalState_Result) Descriptor() ([]byte, []int) { + return fileDescriptor_eval_f3602fc52254f2fe, []int{0, 0} +} +func (m *EvalState_Result) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_EvalState_Result.Unmarshal(m, b) +} +func (m *EvalState_Result) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_EvalState_Result.Marshal(b, m, deterministic) +} +func (dst *EvalState_Result) XXX_Merge(src proto.Message) { + xxx_messageInfo_EvalState_Result.Merge(dst, src) +} +func (m *EvalState_Result) XXX_Size() int { + return xxx_messageInfo_EvalState_Result.Size(m) +} +func (m *EvalState_Result) XXX_DiscardUnknown() { + xxx_messageInfo_EvalState_Result.DiscardUnknown(m) +} + +var xxx_messageInfo_EvalState_Result proto.InternalMessageInfo + +func (m *EvalState_Result) GetExpr() int64 { + if m != nil { + return m.Expr + } + return 0 +} + +func (m *EvalState_Result) GetValue() int64 { + if m != nil { + return m.Value + } + return 0 +} + +// The value of an evaluated expression. +type ExprValue struct { + // An expression can resolve to a value, error or unknown. + // + // Types that are valid to be assigned to Kind: + // *ExprValue_Value + // *ExprValue_Error + // *ExprValue_Unknown + Kind isExprValue_Kind `protobuf_oneof:"kind"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ExprValue) Reset() { *m = ExprValue{} } +func (m *ExprValue) String() string { return proto.CompactTextString(m) } +func (*ExprValue) ProtoMessage() {} +func (*ExprValue) Descriptor() ([]byte, []int) { + return fileDescriptor_eval_f3602fc52254f2fe, []int{1} +} +func (m *ExprValue) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ExprValue.Unmarshal(m, b) +} +func (m *ExprValue) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ExprValue.Marshal(b, m, deterministic) +} +func (dst *ExprValue) XXX_Merge(src proto.Message) { + xxx_messageInfo_ExprValue.Merge(dst, src) +} +func (m *ExprValue) XXX_Size() int { + return xxx_messageInfo_ExprValue.Size(m) +} +func (m *ExprValue) XXX_DiscardUnknown() { + xxx_messageInfo_ExprValue.DiscardUnknown(m) +} + +var xxx_messageInfo_ExprValue proto.InternalMessageInfo + +type isExprValue_Kind interface { + isExprValue_Kind() +} + +type ExprValue_Value struct { + Value *Value `protobuf:"bytes,1,opt,name=value,proto3,oneof"` +} + +type ExprValue_Error struct { + Error *ErrorSet `protobuf:"bytes,2,opt,name=error,proto3,oneof"` +} + +type ExprValue_Unknown struct { + Unknown *UnknownSet `protobuf:"bytes,3,opt,name=unknown,proto3,oneof"` +} + +func (*ExprValue_Value) isExprValue_Kind() {} + +func (*ExprValue_Error) isExprValue_Kind() {} + +func (*ExprValue_Unknown) isExprValue_Kind() {} + +func (m *ExprValue) GetKind() isExprValue_Kind { + if m != nil { + return m.Kind + } + return nil +} + +func (m *ExprValue) GetValue() *Value { + if x, ok := m.GetKind().(*ExprValue_Value); ok { + return x.Value + } + return nil +} + +func (m *ExprValue) GetError() *ErrorSet { + if x, ok := m.GetKind().(*ExprValue_Error); ok { + return x.Error + } + return nil +} + +func (m *ExprValue) GetUnknown() *UnknownSet { + if x, ok := m.GetKind().(*ExprValue_Unknown); ok { + return x.Unknown + } + return nil +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*ExprValue) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _ExprValue_OneofMarshaler, _ExprValue_OneofUnmarshaler, _ExprValue_OneofSizer, []interface{}{ + (*ExprValue_Value)(nil), + (*ExprValue_Error)(nil), + (*ExprValue_Unknown)(nil), + } +} + +func _ExprValue_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*ExprValue) + // kind + switch x := m.Kind.(type) { + case *ExprValue_Value: + b.EncodeVarint(1<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.Value); err != nil { + return err + } + case *ExprValue_Error: + b.EncodeVarint(2<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.Error); err != nil { + return err + } + case *ExprValue_Unknown: + b.EncodeVarint(3<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.Unknown); err != nil { + return err + } + case nil: + default: + return fmt.Errorf("ExprValue.Kind has unexpected type %T", x) + } + return nil +} + +func _ExprValue_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*ExprValue) + switch tag { + case 1: // kind.value + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(Value) + err := b.DecodeMessage(msg) + m.Kind = &ExprValue_Value{msg} + return true, err + case 2: // kind.error + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(ErrorSet) + err := b.DecodeMessage(msg) + m.Kind = &ExprValue_Error{msg} + return true, err + case 3: // kind.unknown + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(UnknownSet) + err := b.DecodeMessage(msg) + m.Kind = &ExprValue_Unknown{msg} + return true, err + default: + return false, nil + } +} + +func _ExprValue_OneofSizer(msg proto.Message) (n int) { + m := msg.(*ExprValue) + // kind + switch x := m.Kind.(type) { + case *ExprValue_Value: + s := proto.Size(x.Value) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *ExprValue_Error: + s := proto.Size(x.Error) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *ExprValue_Unknown: + s := proto.Size(x.Unknown) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +// A set of errors. +// +// The errors included depend on the context. See `ExprValue.error`. +type ErrorSet struct { + // The errors in the set. + Errors []*status.Status `protobuf:"bytes,1,rep,name=errors,proto3" json:"errors,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ErrorSet) Reset() { *m = ErrorSet{} } +func (m *ErrorSet) String() string { return proto.CompactTextString(m) } +func (*ErrorSet) ProtoMessage() {} +func (*ErrorSet) Descriptor() ([]byte, []int) { + return fileDescriptor_eval_f3602fc52254f2fe, []int{2} +} +func (m *ErrorSet) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ErrorSet.Unmarshal(m, b) +} +func (m *ErrorSet) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ErrorSet.Marshal(b, m, deterministic) +} +func (dst *ErrorSet) XXX_Merge(src proto.Message) { + xxx_messageInfo_ErrorSet.Merge(dst, src) +} +func (m *ErrorSet) XXX_Size() int { + return xxx_messageInfo_ErrorSet.Size(m) +} +func (m *ErrorSet) XXX_DiscardUnknown() { + xxx_messageInfo_ErrorSet.DiscardUnknown(m) +} + +var xxx_messageInfo_ErrorSet proto.InternalMessageInfo + +func (m *ErrorSet) GetErrors() []*status.Status { + if m != nil { + return m.Errors + } + return nil +} + +// A set of expressions for which the value is unknown. +// +// The unknowns included depend on the context. See `ExprValue.unknown`. +type UnknownSet struct { + // The ids of the expressions with unknown values. + Exprs []int64 `protobuf:"varint,1,rep,packed,name=exprs,proto3" json:"exprs,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *UnknownSet) Reset() { *m = UnknownSet{} } +func (m *UnknownSet) String() string { return proto.CompactTextString(m) } +func (*UnknownSet) ProtoMessage() {} +func (*UnknownSet) Descriptor() ([]byte, []int) { + return fileDescriptor_eval_f3602fc52254f2fe, []int{3} +} +func (m *UnknownSet) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_UnknownSet.Unmarshal(m, b) +} +func (m *UnknownSet) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_UnknownSet.Marshal(b, m, deterministic) +} +func (dst *UnknownSet) XXX_Merge(src proto.Message) { + xxx_messageInfo_UnknownSet.Merge(dst, src) +} +func (m *UnknownSet) XXX_Size() int { + return xxx_messageInfo_UnknownSet.Size(m) +} +func (m *UnknownSet) XXX_DiscardUnknown() { + xxx_messageInfo_UnknownSet.DiscardUnknown(m) +} + +var xxx_messageInfo_UnknownSet proto.InternalMessageInfo + +func (m *UnknownSet) GetExprs() []int64 { + if m != nil { + return m.Exprs + } + return nil +} + +func init() { + proto.RegisterType((*EvalState)(nil), "google.api.expr.v1alpha1.EvalState") + proto.RegisterType((*EvalState_Result)(nil), "google.api.expr.v1alpha1.EvalState.Result") + proto.RegisterType((*ExprValue)(nil), "google.api.expr.v1alpha1.ExprValue") + proto.RegisterType((*ErrorSet)(nil), "google.api.expr.v1alpha1.ErrorSet") + proto.RegisterType((*UnknownSet)(nil), "google.api.expr.v1alpha1.UnknownSet") +} + +func init() { + proto.RegisterFile("google/api/expr/v1alpha1/eval.proto", fileDescriptor_eval_f3602fc52254f2fe) +} + +var fileDescriptor_eval_f3602fc52254f2fe = []byte{ + // 367 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x7c, 0x92, 0xcf, 0x4b, 0xeb, 0x40, + 0x10, 0xc7, 0x5f, 0x5e, 0xda, 0xf4, 0xbd, 0xe9, 0x6d, 0x11, 0x0c, 0x45, 0xb0, 0xa4, 0x3d, 0x94, + 0x1e, 0x36, 0x34, 0x82, 0x82, 0xf5, 0x20, 0xc5, 0x82, 0xc7, 0x92, 0xa2, 0x07, 0x6f, 0x6b, 0x5d, + 0x62, 0xe8, 0x9a, 0x5d, 0x36, 0x3f, 0xec, 0xdf, 0xe7, 0xd1, 0xbf, 0xc8, 0xa3, 0xec, 0x6c, 0x16, + 0x0f, 0x92, 0xde, 0x3a, 0xbb, 0x9f, 0xcf, 0x77, 0xa6, 0xd9, 0x81, 0x49, 0x26, 0x65, 0x26, 0x78, + 0xcc, 0x54, 0x1e, 0xf3, 0x83, 0xd2, 0x71, 0xb3, 0x60, 0x42, 0xbd, 0xb2, 0x45, 0xcc, 0x1b, 0x26, + 0xa8, 0xd2, 0xb2, 0x92, 0x24, 0xb4, 0x10, 0x65, 0x2a, 0xa7, 0x06, 0xa2, 0x0e, 0x1a, 0x4d, 0x3b, + 0xf5, 0x86, 0x89, 0x9a, 0x5b, 0x7f, 0x74, 0xda, 0x52, 0x5a, 0xed, 0xe2, 0xb2, 0x62, 0x55, 0x5d, + 0xda, 0x8b, 0xe8, 0xc3, 0x83, 0xff, 0xeb, 0x86, 0x89, 0x6d, 0xc5, 0x2a, 0x4e, 0x96, 0x10, 0xa0, + 0x55, 0x86, 0xde, 0xd8, 0x9f, 0x0d, 0x93, 0x09, 0xed, 0xea, 0x4b, 0xd7, 0x07, 0xa5, 0x1f, 0x0d, + 0x9b, 0xb6, 0x0a, 0xb9, 0x83, 0x81, 0xe6, 0x65, 0x2d, 0xaa, 0x32, 0xf4, 0xd1, 0x9e, 0x1f, 0xb1, + 0x5d, 0x4b, 0x9a, 0xa2, 0x92, 0x3a, 0x75, 0x94, 0x40, 0x60, 0x8f, 0x08, 0x81, 0x9e, 0x91, 0x42, + 0x6f, 0xec, 0xcd, 0xfc, 0x14, 0x7f, 0x93, 0x13, 0xe8, 0x63, 0xb7, 0xf0, 0x2f, 0x1e, 0xda, 0x22, + 0xfa, 0x34, 0x7f, 0xc2, 0xcd, 0x43, 0xae, 0x1c, 0x63, 0xc4, 0x61, 0x72, 0xde, 0x3d, 0x05, 0xf2, + 0xf7, 0x7f, 0xda, 0x18, 0x72, 0x0d, 0x7d, 0xae, 0xb5, 0xd4, 0x18, 0x3e, 0x4c, 0xa2, 0x23, 0xe3, + 0x1b, 0x6c, 0xcb, 0x2b, 0xe3, 0xa2, 0x42, 0x6e, 0x61, 0x50, 0x17, 0xfb, 0x42, 0xbe, 0x17, 0xa1, + 0x8f, 0xf6, 0xb4, 0xdb, 0x7e, 0xb0, 0xa0, 0xf5, 0x9d, 0xb6, 0x0a, 0xa0, 0xb7, 0xcf, 0x8b, 0x97, + 0xe8, 0x12, 0xfe, 0xb9, 0x78, 0x32, 0x87, 0x00, 0xe3, 0xdd, 0x7b, 0x10, 0x17, 0xaa, 0xd5, 0x8e, + 0x6e, 0xf1, 0x1d, 0xd3, 0x96, 0x88, 0x22, 0x80, 0x9f, 0x60, 0xf3, 0xa1, 0x4c, 0x53, 0x2b, 0xfa, + 0xa9, 0x2d, 0x56, 0x02, 0xce, 0x76, 0xf2, 0xad, 0x73, 0xb2, 0x15, 0xae, 0xc2, 0xc6, 0x2c, 0xc6, + 0xc6, 0x7b, 0xba, 0x69, 0xb1, 0x4c, 0x0a, 0x56, 0x64, 0x54, 0xea, 0x2c, 0xce, 0x78, 0x81, 0x6b, + 0x13, 0xdb, 0x2b, 0xa6, 0xf2, 0xf2, 0xf7, 0xe2, 0x2d, 0x4d, 0xf5, 0xe5, 0x79, 0xcf, 0x01, 0xb2, + 0x17, 0xdf, 0x01, 0x00, 0x00, 0xff, 0xff, 0x9d, 0x62, 0xde, 0x1d, 0xe2, 0x02, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/api/expr/v1alpha1/explain.pb.go b/vendor/google.golang.org/genproto/googleapis/api/expr/v1alpha1/explain.pb.go new file mode 100644 index 0000000..e720868 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/api/expr/v1alpha1/explain.pb.go @@ -0,0 +1,157 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/api/expr/v1alpha1/explain.proto + +package expr // import "google.golang.org/genproto/googleapis/api/expr/v1alpha1" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Values of intermediate expressions produced when evaluating expression. +// Deprecated, use `EvalState` instead. +// +// Deprecated: Do not use. +type Explain struct { + // All of the observed values. + // + // The field value_index is an index in the values list. + // Separating values from steps is needed to remove redundant values. + Values []*Value `protobuf:"bytes,1,rep,name=values,proto3" json:"values,omitempty"` + // List of steps. + // + // Repeated evaluations of the same expression generate new ExprStep + // instances. The order of such ExprStep instances matches the order of + // elements returned by Comprehension.iter_range. + ExprSteps []*Explain_ExprStep `protobuf:"bytes,2,rep,name=expr_steps,json=exprSteps,proto3" json:"expr_steps,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Explain) Reset() { *m = Explain{} } +func (m *Explain) String() string { return proto.CompactTextString(m) } +func (*Explain) ProtoMessage() {} +func (*Explain) Descriptor() ([]byte, []int) { + return fileDescriptor_explain_9d6f7c319e1e26a4, []int{0} +} +func (m *Explain) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Explain.Unmarshal(m, b) +} +func (m *Explain) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Explain.Marshal(b, m, deterministic) +} +func (dst *Explain) XXX_Merge(src proto.Message) { + xxx_messageInfo_Explain.Merge(dst, src) +} +func (m *Explain) XXX_Size() int { + return xxx_messageInfo_Explain.Size(m) +} +func (m *Explain) XXX_DiscardUnknown() { + xxx_messageInfo_Explain.DiscardUnknown(m) +} + +var xxx_messageInfo_Explain proto.InternalMessageInfo + +func (m *Explain) GetValues() []*Value { + if m != nil { + return m.Values + } + return nil +} + +func (m *Explain) GetExprSteps() []*Explain_ExprStep { + if m != nil { + return m.ExprSteps + } + return nil +} + +// ID and value index of one step. +type Explain_ExprStep struct { + // ID of corresponding Expr node. + Id int64 `protobuf:"varint,1,opt,name=id,proto3" json:"id,omitempty"` + // Index of the value in the values list. + ValueIndex int32 `protobuf:"varint,2,opt,name=value_index,json=valueIndex,proto3" json:"value_index,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Explain_ExprStep) Reset() { *m = Explain_ExprStep{} } +func (m *Explain_ExprStep) String() string { return proto.CompactTextString(m) } +func (*Explain_ExprStep) ProtoMessage() {} +func (*Explain_ExprStep) Descriptor() ([]byte, []int) { + return fileDescriptor_explain_9d6f7c319e1e26a4, []int{0, 0} +} +func (m *Explain_ExprStep) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Explain_ExprStep.Unmarshal(m, b) +} +func (m *Explain_ExprStep) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Explain_ExprStep.Marshal(b, m, deterministic) +} +func (dst *Explain_ExprStep) XXX_Merge(src proto.Message) { + xxx_messageInfo_Explain_ExprStep.Merge(dst, src) +} +func (m *Explain_ExprStep) XXX_Size() int { + return xxx_messageInfo_Explain_ExprStep.Size(m) +} +func (m *Explain_ExprStep) XXX_DiscardUnknown() { + xxx_messageInfo_Explain_ExprStep.DiscardUnknown(m) +} + +var xxx_messageInfo_Explain_ExprStep proto.InternalMessageInfo + +func (m *Explain_ExprStep) GetId() int64 { + if m != nil { + return m.Id + } + return 0 +} + +func (m *Explain_ExprStep) GetValueIndex() int32 { + if m != nil { + return m.ValueIndex + } + return 0 +} + +func init() { + proto.RegisterType((*Explain)(nil), "google.api.expr.v1alpha1.Explain") + proto.RegisterType((*Explain_ExprStep)(nil), "google.api.expr.v1alpha1.Explain.ExprStep") +} + +func init() { + proto.RegisterFile("google/api/expr/v1alpha1/explain.proto", fileDescriptor_explain_9d6f7c319e1e26a4) +} + +var fileDescriptor_explain_9d6f7c319e1e26a4 = []byte{ + // 261 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x74, 0x90, 0xb1, 0x4b, 0x03, 0x31, + 0x14, 0xc6, 0x79, 0x29, 0x56, 0x7d, 0x15, 0x87, 0x4c, 0xa1, 0x08, 0x3d, 0x44, 0xe4, 0x70, 0x48, + 0xa8, 0x0e, 0x82, 0x75, 0x2a, 0x38, 0x74, 0x2b, 0x27, 0x38, 0xb8, 0x94, 0xe8, 0x85, 0x18, 0x88, + 0x97, 0x70, 0x39, 0x4b, 0xff, 0x4a, 0xff, 0x1e, 0x47, 0x49, 0x2e, 0x37, 0x95, 0x9b, 0xee, 0xde, + 0xfb, 0x7e, 0xdf, 0xf7, 0x91, 0x87, 0xb7, 0xda, 0x39, 0x6d, 0x95, 0x90, 0xde, 0x08, 0x75, 0xf0, + 0xad, 0xd8, 0x2f, 0xa5, 0xf5, 0x5f, 0x72, 0x19, 0x27, 0x2b, 0x4d, 0xc3, 0x7d, 0xeb, 0x3a, 0x47, + 0x59, 0xcf, 0x71, 0xe9, 0x0d, 0x8f, 0x1c, 0x1f, 0xb8, 0xf9, 0xcd, 0x68, 0xc2, 0x5e, 0xda, 0x1f, + 0xd5, 0xfb, 0xaf, 0x7f, 0x01, 0x4f, 0x5f, 0xfa, 0x44, 0xfa, 0x88, 0xd3, 0x24, 0x05, 0x06, 0xc5, + 0xa4, 0x9c, 0xdd, 0x2f, 0xf8, 0x58, 0x38, 0x7f, 0x8b, 0x5c, 0x95, 0x71, 0xba, 0x41, 0x8c, 0xf2, + 0x2e, 0x74, 0xca, 0x07, 0x46, 0x92, 0xf9, 0x6e, 0xdc, 0x9c, 0xfb, 0xe2, 0xb7, 0x7d, 0xed, 0x94, + 0xaf, 0xce, 0x55, 0xfe, 0x0b, 0xf3, 0x15, 0x9e, 0x0d, 0x6b, 0x7a, 0x89, 0xc4, 0xd4, 0x0c, 0x0a, + 0x28, 0x27, 0x15, 0x31, 0x35, 0x5d, 0xe0, 0x2c, 0x15, 0xee, 0x4c, 0x53, 0xab, 0x03, 0x23, 0x05, + 0x94, 0x27, 0x15, 0xa6, 0xd5, 0x26, 0x6e, 0x9e, 0x08, 0x83, 0xb5, 0xc3, 0xab, 0x4f, 0xf7, 0x3d, + 0x5a, 0xbe, 0xbe, 0xc8, 0xed, 0xdb, 0xf8, 0xfc, 0x2d, 0xbc, 0x3f, 0x67, 0x52, 0x3b, 0x2b, 0x1b, + 0xcd, 0x5d, 0xab, 0x85, 0x56, 0x4d, 0x3a, 0x8e, 0xe8, 0x25, 0xe9, 0x4d, 0x38, 0xbe, 0xe2, 0x2a, + 0x4e, 0x7f, 0x00, 0x1f, 0xd3, 0xc4, 0x3e, 0xfc, 0x07, 0x00, 0x00, 0xff, 0xff, 0x34, 0xf2, 0xb9, + 0x9e, 0xb2, 0x01, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/api/expr/v1alpha1/syntax.pb.go b/vendor/google.golang.org/genproto/googleapis/api/expr/v1alpha1/syntax.pb.go new file mode 100644 index 0000000..c26f0de --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/api/expr/v1alpha1/syntax.pb.go @@ -0,0 +1,1572 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/api/expr/v1alpha1/syntax.proto + +package expr // import "google.golang.org/genproto/googleapis/api/expr/v1alpha1" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import duration "github.com/golang/protobuf/ptypes/duration" +import _struct "github.com/golang/protobuf/ptypes/struct" +import timestamp "github.com/golang/protobuf/ptypes/timestamp" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// An expression together with source information as returned by the parser. +type ParsedExpr struct { + // The parsed expression. + Expr *Expr `protobuf:"bytes,2,opt,name=expr,proto3" json:"expr,omitempty"` + // The source info derived from input that generated the parsed `expr`. + SourceInfo *SourceInfo `protobuf:"bytes,3,opt,name=source_info,json=sourceInfo,proto3" json:"source_info,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ParsedExpr) Reset() { *m = ParsedExpr{} } +func (m *ParsedExpr) String() string { return proto.CompactTextString(m) } +func (*ParsedExpr) ProtoMessage() {} +func (*ParsedExpr) Descriptor() ([]byte, []int) { + return fileDescriptor_syntax_467031a7c35e4eba, []int{0} +} +func (m *ParsedExpr) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ParsedExpr.Unmarshal(m, b) +} +func (m *ParsedExpr) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ParsedExpr.Marshal(b, m, deterministic) +} +func (dst *ParsedExpr) XXX_Merge(src proto.Message) { + xxx_messageInfo_ParsedExpr.Merge(dst, src) +} +func (m *ParsedExpr) XXX_Size() int { + return xxx_messageInfo_ParsedExpr.Size(m) +} +func (m *ParsedExpr) XXX_DiscardUnknown() { + xxx_messageInfo_ParsedExpr.DiscardUnknown(m) +} + +var xxx_messageInfo_ParsedExpr proto.InternalMessageInfo + +func (m *ParsedExpr) GetExpr() *Expr { + if m != nil { + return m.Expr + } + return nil +} + +func (m *ParsedExpr) GetSourceInfo() *SourceInfo { + if m != nil { + return m.SourceInfo + } + return nil +} + +// An abstract representation of a common expression. +// +// Expressions are abstractly represented as a collection of identifiers, +// select statements, function calls, literals, and comprehensions. All +// operators with the exception of the '.' operator are modelled as function +// calls. This makes it easy to represent new operators into the existing AST. +// +// All references within expressions must resolve to a +// [Decl][google.api.expr.v1alpha1.Decl] provided at type-check for an +// expression to be valid. A reference may either be a bare identifier `name` or +// a qualified identifier `google.api.name`. References may either refer to a +// value or a function declaration. +// +// For example, the expression `google.api.name.startsWith('expr')` references +// the declaration `google.api.name` within a +// [Expr.Select][google.api.expr.v1alpha1.Expr.Select] expression, and the +// function declaration `startsWith`. +type Expr struct { + // Required. An id assigned to this node by the parser which is unique in a + // given expression tree. This is used to associate type information and other + // attributes to a node in the parse tree. + Id int64 `protobuf:"varint,2,opt,name=id,proto3" json:"id,omitempty"` + // Required. Variants of expressions. + // + // Types that are valid to be assigned to ExprKind: + // *Expr_ConstExpr + // *Expr_IdentExpr + // *Expr_SelectExpr + // *Expr_CallExpr + // *Expr_ListExpr + // *Expr_StructExpr + // *Expr_ComprehensionExpr + ExprKind isExpr_ExprKind `protobuf_oneof:"expr_kind"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Expr) Reset() { *m = Expr{} } +func (m *Expr) String() string { return proto.CompactTextString(m) } +func (*Expr) ProtoMessage() {} +func (*Expr) Descriptor() ([]byte, []int) { + return fileDescriptor_syntax_467031a7c35e4eba, []int{1} +} +func (m *Expr) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Expr.Unmarshal(m, b) +} +func (m *Expr) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Expr.Marshal(b, m, deterministic) +} +func (dst *Expr) XXX_Merge(src proto.Message) { + xxx_messageInfo_Expr.Merge(dst, src) +} +func (m *Expr) XXX_Size() int { + return xxx_messageInfo_Expr.Size(m) +} +func (m *Expr) XXX_DiscardUnknown() { + xxx_messageInfo_Expr.DiscardUnknown(m) +} + +var xxx_messageInfo_Expr proto.InternalMessageInfo + +func (m *Expr) GetId() int64 { + if m != nil { + return m.Id + } + return 0 +} + +type isExpr_ExprKind interface { + isExpr_ExprKind() +} + +type Expr_ConstExpr struct { + ConstExpr *Constant `protobuf:"bytes,3,opt,name=const_expr,json=constExpr,proto3,oneof"` +} + +type Expr_IdentExpr struct { + IdentExpr *Expr_Ident `protobuf:"bytes,4,opt,name=ident_expr,json=identExpr,proto3,oneof"` +} + +type Expr_SelectExpr struct { + SelectExpr *Expr_Select `protobuf:"bytes,5,opt,name=select_expr,json=selectExpr,proto3,oneof"` +} + +type Expr_CallExpr struct { + CallExpr *Expr_Call `protobuf:"bytes,6,opt,name=call_expr,json=callExpr,proto3,oneof"` +} + +type Expr_ListExpr struct { + ListExpr *Expr_CreateList `protobuf:"bytes,7,opt,name=list_expr,json=listExpr,proto3,oneof"` +} + +type Expr_StructExpr struct { + StructExpr *Expr_CreateStruct `protobuf:"bytes,8,opt,name=struct_expr,json=structExpr,proto3,oneof"` +} + +type Expr_ComprehensionExpr struct { + ComprehensionExpr *Expr_Comprehension `protobuf:"bytes,9,opt,name=comprehension_expr,json=comprehensionExpr,proto3,oneof"` +} + +func (*Expr_ConstExpr) isExpr_ExprKind() {} + +func (*Expr_IdentExpr) isExpr_ExprKind() {} + +func (*Expr_SelectExpr) isExpr_ExprKind() {} + +func (*Expr_CallExpr) isExpr_ExprKind() {} + +func (*Expr_ListExpr) isExpr_ExprKind() {} + +func (*Expr_StructExpr) isExpr_ExprKind() {} + +func (*Expr_ComprehensionExpr) isExpr_ExprKind() {} + +func (m *Expr) GetExprKind() isExpr_ExprKind { + if m != nil { + return m.ExprKind + } + return nil +} + +func (m *Expr) GetConstExpr() *Constant { + if x, ok := m.GetExprKind().(*Expr_ConstExpr); ok { + return x.ConstExpr + } + return nil +} + +func (m *Expr) GetIdentExpr() *Expr_Ident { + if x, ok := m.GetExprKind().(*Expr_IdentExpr); ok { + return x.IdentExpr + } + return nil +} + +func (m *Expr) GetSelectExpr() *Expr_Select { + if x, ok := m.GetExprKind().(*Expr_SelectExpr); ok { + return x.SelectExpr + } + return nil +} + +func (m *Expr) GetCallExpr() *Expr_Call { + if x, ok := m.GetExprKind().(*Expr_CallExpr); ok { + return x.CallExpr + } + return nil +} + +func (m *Expr) GetListExpr() *Expr_CreateList { + if x, ok := m.GetExprKind().(*Expr_ListExpr); ok { + return x.ListExpr + } + return nil +} + +func (m *Expr) GetStructExpr() *Expr_CreateStruct { + if x, ok := m.GetExprKind().(*Expr_StructExpr); ok { + return x.StructExpr + } + return nil +} + +func (m *Expr) GetComprehensionExpr() *Expr_Comprehension { + if x, ok := m.GetExprKind().(*Expr_ComprehensionExpr); ok { + return x.ComprehensionExpr + } + return nil +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*Expr) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _Expr_OneofMarshaler, _Expr_OneofUnmarshaler, _Expr_OneofSizer, []interface{}{ + (*Expr_ConstExpr)(nil), + (*Expr_IdentExpr)(nil), + (*Expr_SelectExpr)(nil), + (*Expr_CallExpr)(nil), + (*Expr_ListExpr)(nil), + (*Expr_StructExpr)(nil), + (*Expr_ComprehensionExpr)(nil), + } +} + +func _Expr_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*Expr) + // expr_kind + switch x := m.ExprKind.(type) { + case *Expr_ConstExpr: + b.EncodeVarint(3<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.ConstExpr); err != nil { + return err + } + case *Expr_IdentExpr: + b.EncodeVarint(4<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.IdentExpr); err != nil { + return err + } + case *Expr_SelectExpr: + b.EncodeVarint(5<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.SelectExpr); err != nil { + return err + } + case *Expr_CallExpr: + b.EncodeVarint(6<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.CallExpr); err != nil { + return err + } + case *Expr_ListExpr: + b.EncodeVarint(7<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.ListExpr); err != nil { + return err + } + case *Expr_StructExpr: + b.EncodeVarint(8<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.StructExpr); err != nil { + return err + } + case *Expr_ComprehensionExpr: + b.EncodeVarint(9<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.ComprehensionExpr); err != nil { + return err + } + case nil: + default: + return fmt.Errorf("Expr.ExprKind has unexpected type %T", x) + } + return nil +} + +func _Expr_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*Expr) + switch tag { + case 3: // expr_kind.const_expr + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(Constant) + err := b.DecodeMessage(msg) + m.ExprKind = &Expr_ConstExpr{msg} + return true, err + case 4: // expr_kind.ident_expr + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(Expr_Ident) + err := b.DecodeMessage(msg) + m.ExprKind = &Expr_IdentExpr{msg} + return true, err + case 5: // expr_kind.select_expr + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(Expr_Select) + err := b.DecodeMessage(msg) + m.ExprKind = &Expr_SelectExpr{msg} + return true, err + case 6: // expr_kind.call_expr + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(Expr_Call) + err := b.DecodeMessage(msg) + m.ExprKind = &Expr_CallExpr{msg} + return true, err + case 7: // expr_kind.list_expr + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(Expr_CreateList) + err := b.DecodeMessage(msg) + m.ExprKind = &Expr_ListExpr{msg} + return true, err + case 8: // expr_kind.struct_expr + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(Expr_CreateStruct) + err := b.DecodeMessage(msg) + m.ExprKind = &Expr_StructExpr{msg} + return true, err + case 9: // expr_kind.comprehension_expr + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(Expr_Comprehension) + err := b.DecodeMessage(msg) + m.ExprKind = &Expr_ComprehensionExpr{msg} + return true, err + default: + return false, nil + } +} + +func _Expr_OneofSizer(msg proto.Message) (n int) { + m := msg.(*Expr) + // expr_kind + switch x := m.ExprKind.(type) { + case *Expr_ConstExpr: + s := proto.Size(x.ConstExpr) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *Expr_IdentExpr: + s := proto.Size(x.IdentExpr) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *Expr_SelectExpr: + s := proto.Size(x.SelectExpr) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *Expr_CallExpr: + s := proto.Size(x.CallExpr) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *Expr_ListExpr: + s := proto.Size(x.ListExpr) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *Expr_StructExpr: + s := proto.Size(x.StructExpr) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *Expr_ComprehensionExpr: + s := proto.Size(x.ComprehensionExpr) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +// An identifier expression. e.g. `request`. +type Expr_Ident struct { + // Required. Holds a single, unqualified identifier, possibly preceded by a + // '.'. + // + // Qualified names are represented by the + // [Expr.Select][google.api.expr.v1alpha1.Expr.Select] expression. + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Expr_Ident) Reset() { *m = Expr_Ident{} } +func (m *Expr_Ident) String() string { return proto.CompactTextString(m) } +func (*Expr_Ident) ProtoMessage() {} +func (*Expr_Ident) Descriptor() ([]byte, []int) { + return fileDescriptor_syntax_467031a7c35e4eba, []int{1, 0} +} +func (m *Expr_Ident) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Expr_Ident.Unmarshal(m, b) +} +func (m *Expr_Ident) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Expr_Ident.Marshal(b, m, deterministic) +} +func (dst *Expr_Ident) XXX_Merge(src proto.Message) { + xxx_messageInfo_Expr_Ident.Merge(dst, src) +} +func (m *Expr_Ident) XXX_Size() int { + return xxx_messageInfo_Expr_Ident.Size(m) +} +func (m *Expr_Ident) XXX_DiscardUnknown() { + xxx_messageInfo_Expr_Ident.DiscardUnknown(m) +} + +var xxx_messageInfo_Expr_Ident proto.InternalMessageInfo + +func (m *Expr_Ident) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +// A field selection expression. e.g. `request.auth`. +type Expr_Select struct { + // Required. The target of the selection expression. + // + // For example, in the select expression `request.auth`, the `request` + // portion of the expression is the `operand`. + Operand *Expr `protobuf:"bytes,1,opt,name=operand,proto3" json:"operand,omitempty"` + // Required. The name of the field to select. + // + // For example, in the select expression `request.auth`, the `auth` portion + // of the expression would be the `field`. + Field string `protobuf:"bytes,2,opt,name=field,proto3" json:"field,omitempty"` + // Whether the select is to be interpreted as a field presence test. + // + // This results from the macro `has(request.auth)`. + TestOnly bool `protobuf:"varint,3,opt,name=test_only,json=testOnly,proto3" json:"test_only,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Expr_Select) Reset() { *m = Expr_Select{} } +func (m *Expr_Select) String() string { return proto.CompactTextString(m) } +func (*Expr_Select) ProtoMessage() {} +func (*Expr_Select) Descriptor() ([]byte, []int) { + return fileDescriptor_syntax_467031a7c35e4eba, []int{1, 1} +} +func (m *Expr_Select) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Expr_Select.Unmarshal(m, b) +} +func (m *Expr_Select) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Expr_Select.Marshal(b, m, deterministic) +} +func (dst *Expr_Select) XXX_Merge(src proto.Message) { + xxx_messageInfo_Expr_Select.Merge(dst, src) +} +func (m *Expr_Select) XXX_Size() int { + return xxx_messageInfo_Expr_Select.Size(m) +} +func (m *Expr_Select) XXX_DiscardUnknown() { + xxx_messageInfo_Expr_Select.DiscardUnknown(m) +} + +var xxx_messageInfo_Expr_Select proto.InternalMessageInfo + +func (m *Expr_Select) GetOperand() *Expr { + if m != nil { + return m.Operand + } + return nil +} + +func (m *Expr_Select) GetField() string { + if m != nil { + return m.Field + } + return "" +} + +func (m *Expr_Select) GetTestOnly() bool { + if m != nil { + return m.TestOnly + } + return false +} + +// A call expression, including calls to predefined functions and operators. +// +// For example, `value == 10`, `size(map_value)`. +type Expr_Call struct { + // The target of an method call-style expression. For example, `x` in + // `x.f()`. + Target *Expr `protobuf:"bytes,1,opt,name=target,proto3" json:"target,omitempty"` + // Required. The name of the function or method being called. + Function string `protobuf:"bytes,2,opt,name=function,proto3" json:"function,omitempty"` + // The arguments. + Args []*Expr `protobuf:"bytes,3,rep,name=args,proto3" json:"args,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Expr_Call) Reset() { *m = Expr_Call{} } +func (m *Expr_Call) String() string { return proto.CompactTextString(m) } +func (*Expr_Call) ProtoMessage() {} +func (*Expr_Call) Descriptor() ([]byte, []int) { + return fileDescriptor_syntax_467031a7c35e4eba, []int{1, 2} +} +func (m *Expr_Call) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Expr_Call.Unmarshal(m, b) +} +func (m *Expr_Call) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Expr_Call.Marshal(b, m, deterministic) +} +func (dst *Expr_Call) XXX_Merge(src proto.Message) { + xxx_messageInfo_Expr_Call.Merge(dst, src) +} +func (m *Expr_Call) XXX_Size() int { + return xxx_messageInfo_Expr_Call.Size(m) +} +func (m *Expr_Call) XXX_DiscardUnknown() { + xxx_messageInfo_Expr_Call.DiscardUnknown(m) +} + +var xxx_messageInfo_Expr_Call proto.InternalMessageInfo + +func (m *Expr_Call) GetTarget() *Expr { + if m != nil { + return m.Target + } + return nil +} + +func (m *Expr_Call) GetFunction() string { + if m != nil { + return m.Function + } + return "" +} + +func (m *Expr_Call) GetArgs() []*Expr { + if m != nil { + return m.Args + } + return nil +} + +// A list creation expression. +// +// Lists may either be homogenous, e.g. `[1, 2, 3]`, or heterogenous, e.g. +// `dyn([1, 'hello', 2.0])` +type Expr_CreateList struct { + // The elements part of the list. + Elements []*Expr `protobuf:"bytes,1,rep,name=elements,proto3" json:"elements,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Expr_CreateList) Reset() { *m = Expr_CreateList{} } +func (m *Expr_CreateList) String() string { return proto.CompactTextString(m) } +func (*Expr_CreateList) ProtoMessage() {} +func (*Expr_CreateList) Descriptor() ([]byte, []int) { + return fileDescriptor_syntax_467031a7c35e4eba, []int{1, 3} +} +func (m *Expr_CreateList) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Expr_CreateList.Unmarshal(m, b) +} +func (m *Expr_CreateList) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Expr_CreateList.Marshal(b, m, deterministic) +} +func (dst *Expr_CreateList) XXX_Merge(src proto.Message) { + xxx_messageInfo_Expr_CreateList.Merge(dst, src) +} +func (m *Expr_CreateList) XXX_Size() int { + return xxx_messageInfo_Expr_CreateList.Size(m) +} +func (m *Expr_CreateList) XXX_DiscardUnknown() { + xxx_messageInfo_Expr_CreateList.DiscardUnknown(m) +} + +var xxx_messageInfo_Expr_CreateList proto.InternalMessageInfo + +func (m *Expr_CreateList) GetElements() []*Expr { + if m != nil { + return m.Elements + } + return nil +} + +// A map or message creation expression. +// +// Maps are constructed as `{'key_name': 'value'}`. Message construction is +// similar, but prefixed with a type name and composed of field ids: +// `types.MyType{field_id: 'value'}`. +type Expr_CreateStruct struct { + // The type name of the message to be created, empty when creating map + // literals. + MessageName string `protobuf:"bytes,1,opt,name=message_name,json=messageName,proto3" json:"message_name,omitempty"` + // The entries in the creation expression. + Entries []*Expr_CreateStruct_Entry `protobuf:"bytes,2,rep,name=entries,proto3" json:"entries,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Expr_CreateStruct) Reset() { *m = Expr_CreateStruct{} } +func (m *Expr_CreateStruct) String() string { return proto.CompactTextString(m) } +func (*Expr_CreateStruct) ProtoMessage() {} +func (*Expr_CreateStruct) Descriptor() ([]byte, []int) { + return fileDescriptor_syntax_467031a7c35e4eba, []int{1, 4} +} +func (m *Expr_CreateStruct) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Expr_CreateStruct.Unmarshal(m, b) +} +func (m *Expr_CreateStruct) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Expr_CreateStruct.Marshal(b, m, deterministic) +} +func (dst *Expr_CreateStruct) XXX_Merge(src proto.Message) { + xxx_messageInfo_Expr_CreateStruct.Merge(dst, src) +} +func (m *Expr_CreateStruct) XXX_Size() int { + return xxx_messageInfo_Expr_CreateStruct.Size(m) +} +func (m *Expr_CreateStruct) XXX_DiscardUnknown() { + xxx_messageInfo_Expr_CreateStruct.DiscardUnknown(m) +} + +var xxx_messageInfo_Expr_CreateStruct proto.InternalMessageInfo + +func (m *Expr_CreateStruct) GetMessageName() string { + if m != nil { + return m.MessageName + } + return "" +} + +func (m *Expr_CreateStruct) GetEntries() []*Expr_CreateStruct_Entry { + if m != nil { + return m.Entries + } + return nil +} + +// Represents an entry. +type Expr_CreateStruct_Entry struct { + // Required. An id assigned to this node by the parser which is unique + // in a given expression tree. This is used to associate type + // information and other attributes to the node. + Id int64 `protobuf:"varint,1,opt,name=id,proto3" json:"id,omitempty"` + // The `Entry` key kinds. + // + // Types that are valid to be assigned to KeyKind: + // *Expr_CreateStruct_Entry_FieldKey + // *Expr_CreateStruct_Entry_MapKey + KeyKind isExpr_CreateStruct_Entry_KeyKind `protobuf_oneof:"key_kind"` + // Required. The value assigned to the key. + Value *Expr `protobuf:"bytes,4,opt,name=value,proto3" json:"value,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Expr_CreateStruct_Entry) Reset() { *m = Expr_CreateStruct_Entry{} } +func (m *Expr_CreateStruct_Entry) String() string { return proto.CompactTextString(m) } +func (*Expr_CreateStruct_Entry) ProtoMessage() {} +func (*Expr_CreateStruct_Entry) Descriptor() ([]byte, []int) { + return fileDescriptor_syntax_467031a7c35e4eba, []int{1, 4, 0} +} +func (m *Expr_CreateStruct_Entry) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Expr_CreateStruct_Entry.Unmarshal(m, b) +} +func (m *Expr_CreateStruct_Entry) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Expr_CreateStruct_Entry.Marshal(b, m, deterministic) +} +func (dst *Expr_CreateStruct_Entry) XXX_Merge(src proto.Message) { + xxx_messageInfo_Expr_CreateStruct_Entry.Merge(dst, src) +} +func (m *Expr_CreateStruct_Entry) XXX_Size() int { + return xxx_messageInfo_Expr_CreateStruct_Entry.Size(m) +} +func (m *Expr_CreateStruct_Entry) XXX_DiscardUnknown() { + xxx_messageInfo_Expr_CreateStruct_Entry.DiscardUnknown(m) +} + +var xxx_messageInfo_Expr_CreateStruct_Entry proto.InternalMessageInfo + +func (m *Expr_CreateStruct_Entry) GetId() int64 { + if m != nil { + return m.Id + } + return 0 +} + +type isExpr_CreateStruct_Entry_KeyKind interface { + isExpr_CreateStruct_Entry_KeyKind() +} + +type Expr_CreateStruct_Entry_FieldKey struct { + FieldKey string `protobuf:"bytes,2,opt,name=field_key,json=fieldKey,proto3,oneof"` +} + +type Expr_CreateStruct_Entry_MapKey struct { + MapKey *Expr `protobuf:"bytes,3,opt,name=map_key,json=mapKey,proto3,oneof"` +} + +func (*Expr_CreateStruct_Entry_FieldKey) isExpr_CreateStruct_Entry_KeyKind() {} + +func (*Expr_CreateStruct_Entry_MapKey) isExpr_CreateStruct_Entry_KeyKind() {} + +func (m *Expr_CreateStruct_Entry) GetKeyKind() isExpr_CreateStruct_Entry_KeyKind { + if m != nil { + return m.KeyKind + } + return nil +} + +func (m *Expr_CreateStruct_Entry) GetFieldKey() string { + if x, ok := m.GetKeyKind().(*Expr_CreateStruct_Entry_FieldKey); ok { + return x.FieldKey + } + return "" +} + +func (m *Expr_CreateStruct_Entry) GetMapKey() *Expr { + if x, ok := m.GetKeyKind().(*Expr_CreateStruct_Entry_MapKey); ok { + return x.MapKey + } + return nil +} + +func (m *Expr_CreateStruct_Entry) GetValue() *Expr { + if m != nil { + return m.Value + } + return nil +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*Expr_CreateStruct_Entry) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _Expr_CreateStruct_Entry_OneofMarshaler, _Expr_CreateStruct_Entry_OneofUnmarshaler, _Expr_CreateStruct_Entry_OneofSizer, []interface{}{ + (*Expr_CreateStruct_Entry_FieldKey)(nil), + (*Expr_CreateStruct_Entry_MapKey)(nil), + } +} + +func _Expr_CreateStruct_Entry_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*Expr_CreateStruct_Entry) + // key_kind + switch x := m.KeyKind.(type) { + case *Expr_CreateStruct_Entry_FieldKey: + b.EncodeVarint(2<<3 | proto.WireBytes) + b.EncodeStringBytes(x.FieldKey) + case *Expr_CreateStruct_Entry_MapKey: + b.EncodeVarint(3<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.MapKey); err != nil { + return err + } + case nil: + default: + return fmt.Errorf("Expr_CreateStruct_Entry.KeyKind has unexpected type %T", x) + } + return nil +} + +func _Expr_CreateStruct_Entry_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*Expr_CreateStruct_Entry) + switch tag { + case 2: // key_kind.field_key + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeStringBytes() + m.KeyKind = &Expr_CreateStruct_Entry_FieldKey{x} + return true, err + case 3: // key_kind.map_key + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(Expr) + err := b.DecodeMessage(msg) + m.KeyKind = &Expr_CreateStruct_Entry_MapKey{msg} + return true, err + default: + return false, nil + } +} + +func _Expr_CreateStruct_Entry_OneofSizer(msg proto.Message) (n int) { + m := msg.(*Expr_CreateStruct_Entry) + // key_kind + switch x := m.KeyKind.(type) { + case *Expr_CreateStruct_Entry_FieldKey: + n += 1 // tag and wire + n += proto.SizeVarint(uint64(len(x.FieldKey))) + n += len(x.FieldKey) + case *Expr_CreateStruct_Entry_MapKey: + s := proto.Size(x.MapKey) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +// A comprehension expression applied to a list or map. +// +// Comprehensions are not part of the core syntax, but enabled with macros. +// A macro matches a specific call signature within a parsed AST and replaces +// the call with an alternate AST block. Macro expansion happens at parse +// time. +// +// The following macros are supported within CEL: +// +// Aggregate type macros may be applied to all elements in a list or all keys +// in a map: +// +// * `all`, `exists`, `exists_one` - test a predicate expression against +// the inputs and return `true` if the predicate is satisfied for all, +// any, or only one value `list.all(x, x < 10)`. +// * `filter` - test a predicate expression against the inputs and return +// the subset of elements which satisfy the predicate: +// `payments.filter(p, p > 1000)`. +// * `map` - apply an expression to all elements in the input and return the +// output aggregate type: `[1, 2, 3].map(i, i * i)`. +// +// The `has(m.x)` macro tests whether the property `x` is present in struct +// `m`. The semantics of this macro depend on the type of `m`. For proto2 +// messages `has(m.x)` is defined as 'defined, but not set`. For proto3, the +// macro tests whether the property is set to its default. For map and struct +// types, the macro tests whether the property `x` is defined on `m`. +type Expr_Comprehension struct { + // The name of the iteration variable. + IterVar string `protobuf:"bytes,1,opt,name=iter_var,json=iterVar,proto3" json:"iter_var,omitempty"` + // The range over which var iterates. + IterRange *Expr `protobuf:"bytes,2,opt,name=iter_range,json=iterRange,proto3" json:"iter_range,omitempty"` + // The name of the variable used for accumulation of the result. + AccuVar string `protobuf:"bytes,3,opt,name=accu_var,json=accuVar,proto3" json:"accu_var,omitempty"` + // The initial value of the accumulator. + AccuInit *Expr `protobuf:"bytes,4,opt,name=accu_init,json=accuInit,proto3" json:"accu_init,omitempty"` + // An expression which can contain iter_var and accu_var. + // + // Returns false when the result has been computed and may be used as + // a hint to short-circuit the remainder of the comprehension. + LoopCondition *Expr `protobuf:"bytes,5,opt,name=loop_condition,json=loopCondition,proto3" json:"loop_condition,omitempty"` + // An expression which can contain iter_var and accu_var. + // + // Computes the next value of accu_var. + LoopStep *Expr `protobuf:"bytes,6,opt,name=loop_step,json=loopStep,proto3" json:"loop_step,omitempty"` + // An expression which can contain accu_var. + // + // Computes the result. + Result *Expr `protobuf:"bytes,7,opt,name=result,proto3" json:"result,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Expr_Comprehension) Reset() { *m = Expr_Comprehension{} } +func (m *Expr_Comprehension) String() string { return proto.CompactTextString(m) } +func (*Expr_Comprehension) ProtoMessage() {} +func (*Expr_Comprehension) Descriptor() ([]byte, []int) { + return fileDescriptor_syntax_467031a7c35e4eba, []int{1, 5} +} +func (m *Expr_Comprehension) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Expr_Comprehension.Unmarshal(m, b) +} +func (m *Expr_Comprehension) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Expr_Comprehension.Marshal(b, m, deterministic) +} +func (dst *Expr_Comprehension) XXX_Merge(src proto.Message) { + xxx_messageInfo_Expr_Comprehension.Merge(dst, src) +} +func (m *Expr_Comprehension) XXX_Size() int { + return xxx_messageInfo_Expr_Comprehension.Size(m) +} +func (m *Expr_Comprehension) XXX_DiscardUnknown() { + xxx_messageInfo_Expr_Comprehension.DiscardUnknown(m) +} + +var xxx_messageInfo_Expr_Comprehension proto.InternalMessageInfo + +func (m *Expr_Comprehension) GetIterVar() string { + if m != nil { + return m.IterVar + } + return "" +} + +func (m *Expr_Comprehension) GetIterRange() *Expr { + if m != nil { + return m.IterRange + } + return nil +} + +func (m *Expr_Comprehension) GetAccuVar() string { + if m != nil { + return m.AccuVar + } + return "" +} + +func (m *Expr_Comprehension) GetAccuInit() *Expr { + if m != nil { + return m.AccuInit + } + return nil +} + +func (m *Expr_Comprehension) GetLoopCondition() *Expr { + if m != nil { + return m.LoopCondition + } + return nil +} + +func (m *Expr_Comprehension) GetLoopStep() *Expr { + if m != nil { + return m.LoopStep + } + return nil +} + +func (m *Expr_Comprehension) GetResult() *Expr { + if m != nil { + return m.Result + } + return nil +} + +// Represents a primitive literal. +// +// Named 'Constant' here for backwards compatibility. +// +// This is similar as the primitives supported in the well-known type +// `google.protobuf.Value`, but richer so it can represent CEL's full range of +// primitives. +// +// Lists and structs are not included as constants as these aggregate types may +// contain [Expr][google.api.expr.v1alpha1.Expr] elements which require +// evaluation and are thus not constant. +// +// Examples of literals include: `"hello"`, `b'bytes'`, `1u`, `4.2`, `-2`, +// `true`, `null`. +type Constant struct { + // Required. The valid constant kinds. + // + // Types that are valid to be assigned to ConstantKind: + // *Constant_NullValue + // *Constant_BoolValue + // *Constant_Int64Value + // *Constant_Uint64Value + // *Constant_DoubleValue + // *Constant_StringValue + // *Constant_BytesValue + // *Constant_DurationValue + // *Constant_TimestampValue + ConstantKind isConstant_ConstantKind `protobuf_oneof:"constant_kind"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Constant) Reset() { *m = Constant{} } +func (m *Constant) String() string { return proto.CompactTextString(m) } +func (*Constant) ProtoMessage() {} +func (*Constant) Descriptor() ([]byte, []int) { + return fileDescriptor_syntax_467031a7c35e4eba, []int{2} +} +func (m *Constant) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Constant.Unmarshal(m, b) +} +func (m *Constant) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Constant.Marshal(b, m, deterministic) +} +func (dst *Constant) XXX_Merge(src proto.Message) { + xxx_messageInfo_Constant.Merge(dst, src) +} +func (m *Constant) XXX_Size() int { + return xxx_messageInfo_Constant.Size(m) +} +func (m *Constant) XXX_DiscardUnknown() { + xxx_messageInfo_Constant.DiscardUnknown(m) +} + +var xxx_messageInfo_Constant proto.InternalMessageInfo + +type isConstant_ConstantKind interface { + isConstant_ConstantKind() +} + +type Constant_NullValue struct { + NullValue _struct.NullValue `protobuf:"varint,1,opt,name=null_value,json=nullValue,proto3,enum=google.protobuf.NullValue,oneof"` +} + +type Constant_BoolValue struct { + BoolValue bool `protobuf:"varint,2,opt,name=bool_value,json=boolValue,proto3,oneof"` +} + +type Constant_Int64Value struct { + Int64Value int64 `protobuf:"varint,3,opt,name=int64_value,json=int64Value,proto3,oneof"` +} + +type Constant_Uint64Value struct { + Uint64Value uint64 `protobuf:"varint,4,opt,name=uint64_value,json=uint64Value,proto3,oneof"` +} + +type Constant_DoubleValue struct { + DoubleValue float64 `protobuf:"fixed64,5,opt,name=double_value,json=doubleValue,proto3,oneof"` +} + +type Constant_StringValue struct { + StringValue string `protobuf:"bytes,6,opt,name=string_value,json=stringValue,proto3,oneof"` +} + +type Constant_BytesValue struct { + BytesValue []byte `protobuf:"bytes,7,opt,name=bytes_value,json=bytesValue,proto3,oneof"` +} + +type Constant_DurationValue struct { + DurationValue *duration.Duration `protobuf:"bytes,8,opt,name=duration_value,json=durationValue,proto3,oneof"` +} + +type Constant_TimestampValue struct { + TimestampValue *timestamp.Timestamp `protobuf:"bytes,9,opt,name=timestamp_value,json=timestampValue,proto3,oneof"` +} + +func (*Constant_NullValue) isConstant_ConstantKind() {} + +func (*Constant_BoolValue) isConstant_ConstantKind() {} + +func (*Constant_Int64Value) isConstant_ConstantKind() {} + +func (*Constant_Uint64Value) isConstant_ConstantKind() {} + +func (*Constant_DoubleValue) isConstant_ConstantKind() {} + +func (*Constant_StringValue) isConstant_ConstantKind() {} + +func (*Constant_BytesValue) isConstant_ConstantKind() {} + +func (*Constant_DurationValue) isConstant_ConstantKind() {} + +func (*Constant_TimestampValue) isConstant_ConstantKind() {} + +func (m *Constant) GetConstantKind() isConstant_ConstantKind { + if m != nil { + return m.ConstantKind + } + return nil +} + +func (m *Constant) GetNullValue() _struct.NullValue { + if x, ok := m.GetConstantKind().(*Constant_NullValue); ok { + return x.NullValue + } + return _struct.NullValue_NULL_VALUE +} + +func (m *Constant) GetBoolValue() bool { + if x, ok := m.GetConstantKind().(*Constant_BoolValue); ok { + return x.BoolValue + } + return false +} + +func (m *Constant) GetInt64Value() int64 { + if x, ok := m.GetConstantKind().(*Constant_Int64Value); ok { + return x.Int64Value + } + return 0 +} + +func (m *Constant) GetUint64Value() uint64 { + if x, ok := m.GetConstantKind().(*Constant_Uint64Value); ok { + return x.Uint64Value + } + return 0 +} + +func (m *Constant) GetDoubleValue() float64 { + if x, ok := m.GetConstantKind().(*Constant_DoubleValue); ok { + return x.DoubleValue + } + return 0 +} + +func (m *Constant) GetStringValue() string { + if x, ok := m.GetConstantKind().(*Constant_StringValue); ok { + return x.StringValue + } + return "" +} + +func (m *Constant) GetBytesValue() []byte { + if x, ok := m.GetConstantKind().(*Constant_BytesValue); ok { + return x.BytesValue + } + return nil +} + +func (m *Constant) GetDurationValue() *duration.Duration { + if x, ok := m.GetConstantKind().(*Constant_DurationValue); ok { + return x.DurationValue + } + return nil +} + +func (m *Constant) GetTimestampValue() *timestamp.Timestamp { + if x, ok := m.GetConstantKind().(*Constant_TimestampValue); ok { + return x.TimestampValue + } + return nil +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*Constant) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _Constant_OneofMarshaler, _Constant_OneofUnmarshaler, _Constant_OneofSizer, []interface{}{ + (*Constant_NullValue)(nil), + (*Constant_BoolValue)(nil), + (*Constant_Int64Value)(nil), + (*Constant_Uint64Value)(nil), + (*Constant_DoubleValue)(nil), + (*Constant_StringValue)(nil), + (*Constant_BytesValue)(nil), + (*Constant_DurationValue)(nil), + (*Constant_TimestampValue)(nil), + } +} + +func _Constant_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*Constant) + // constant_kind + switch x := m.ConstantKind.(type) { + case *Constant_NullValue: + b.EncodeVarint(1<<3 | proto.WireVarint) + b.EncodeVarint(uint64(x.NullValue)) + case *Constant_BoolValue: + t := uint64(0) + if x.BoolValue { + t = 1 + } + b.EncodeVarint(2<<3 | proto.WireVarint) + b.EncodeVarint(t) + case *Constant_Int64Value: + b.EncodeVarint(3<<3 | proto.WireVarint) + b.EncodeVarint(uint64(x.Int64Value)) + case *Constant_Uint64Value: + b.EncodeVarint(4<<3 | proto.WireVarint) + b.EncodeVarint(uint64(x.Uint64Value)) + case *Constant_DoubleValue: + b.EncodeVarint(5<<3 | proto.WireFixed64) + b.EncodeFixed64(math.Float64bits(x.DoubleValue)) + case *Constant_StringValue: + b.EncodeVarint(6<<3 | proto.WireBytes) + b.EncodeStringBytes(x.StringValue) + case *Constant_BytesValue: + b.EncodeVarint(7<<3 | proto.WireBytes) + b.EncodeRawBytes(x.BytesValue) + case *Constant_DurationValue: + b.EncodeVarint(8<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.DurationValue); err != nil { + return err + } + case *Constant_TimestampValue: + b.EncodeVarint(9<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.TimestampValue); err != nil { + return err + } + case nil: + default: + return fmt.Errorf("Constant.ConstantKind has unexpected type %T", x) + } + return nil +} + +func _Constant_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*Constant) + switch tag { + case 1: // constant_kind.null_value + if wire != proto.WireVarint { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeVarint() + m.ConstantKind = &Constant_NullValue{_struct.NullValue(x)} + return true, err + case 2: // constant_kind.bool_value + if wire != proto.WireVarint { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeVarint() + m.ConstantKind = &Constant_BoolValue{x != 0} + return true, err + case 3: // constant_kind.int64_value + if wire != proto.WireVarint { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeVarint() + m.ConstantKind = &Constant_Int64Value{int64(x)} + return true, err + case 4: // constant_kind.uint64_value + if wire != proto.WireVarint { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeVarint() + m.ConstantKind = &Constant_Uint64Value{x} + return true, err + case 5: // constant_kind.double_value + if wire != proto.WireFixed64 { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeFixed64() + m.ConstantKind = &Constant_DoubleValue{math.Float64frombits(x)} + return true, err + case 6: // constant_kind.string_value + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeStringBytes() + m.ConstantKind = &Constant_StringValue{x} + return true, err + case 7: // constant_kind.bytes_value + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeRawBytes(true) + m.ConstantKind = &Constant_BytesValue{x} + return true, err + case 8: // constant_kind.duration_value + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(duration.Duration) + err := b.DecodeMessage(msg) + m.ConstantKind = &Constant_DurationValue{msg} + return true, err + case 9: // constant_kind.timestamp_value + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(timestamp.Timestamp) + err := b.DecodeMessage(msg) + m.ConstantKind = &Constant_TimestampValue{msg} + return true, err + default: + return false, nil + } +} + +func _Constant_OneofSizer(msg proto.Message) (n int) { + m := msg.(*Constant) + // constant_kind + switch x := m.ConstantKind.(type) { + case *Constant_NullValue: + n += 1 // tag and wire + n += proto.SizeVarint(uint64(x.NullValue)) + case *Constant_BoolValue: + n += 1 // tag and wire + n += 1 + case *Constant_Int64Value: + n += 1 // tag and wire + n += proto.SizeVarint(uint64(x.Int64Value)) + case *Constant_Uint64Value: + n += 1 // tag and wire + n += proto.SizeVarint(uint64(x.Uint64Value)) + case *Constant_DoubleValue: + n += 1 // tag and wire + n += 8 + case *Constant_StringValue: + n += 1 // tag and wire + n += proto.SizeVarint(uint64(len(x.StringValue))) + n += len(x.StringValue) + case *Constant_BytesValue: + n += 1 // tag and wire + n += proto.SizeVarint(uint64(len(x.BytesValue))) + n += len(x.BytesValue) + case *Constant_DurationValue: + s := proto.Size(x.DurationValue) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *Constant_TimestampValue: + s := proto.Size(x.TimestampValue) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +// Source information collected at parse time. +type SourceInfo struct { + // The syntax version of the source, e.g. `cel1`. + SyntaxVersion string `protobuf:"bytes,1,opt,name=syntax_version,json=syntaxVersion,proto3" json:"syntax_version,omitempty"` + // The location name. All position information attached to an expression is + // relative to this location. + // + // The location could be a file, UI element, or similar. For example, + // `acme/app/AnvilPolicy.cel`. + Location string `protobuf:"bytes,2,opt,name=location,proto3" json:"location,omitempty"` + // Monotonically increasing list of character offsets where newlines appear. + // + // The line number of a given position is the index `i` where for a given + // `id` the `line_offsets[i] < id_positions[id] < line_offsets[i+1]`. The + // column may be derivd from `id_positions[id] - line_offsets[i]`. + LineOffsets []int32 `protobuf:"varint,3,rep,packed,name=line_offsets,json=lineOffsets,proto3" json:"line_offsets,omitempty"` + // A map from the parse node id (e.g. `Expr.id`) to the character offset + // within source. + Positions map[int64]int32 `protobuf:"bytes,4,rep,name=positions,proto3" json:"positions,omitempty" protobuf_key:"varint,1,opt,name=key,proto3" protobuf_val:"varint,2,opt,name=value,proto3"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *SourceInfo) Reset() { *m = SourceInfo{} } +func (m *SourceInfo) String() string { return proto.CompactTextString(m) } +func (*SourceInfo) ProtoMessage() {} +func (*SourceInfo) Descriptor() ([]byte, []int) { + return fileDescriptor_syntax_467031a7c35e4eba, []int{3} +} +func (m *SourceInfo) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_SourceInfo.Unmarshal(m, b) +} +func (m *SourceInfo) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_SourceInfo.Marshal(b, m, deterministic) +} +func (dst *SourceInfo) XXX_Merge(src proto.Message) { + xxx_messageInfo_SourceInfo.Merge(dst, src) +} +func (m *SourceInfo) XXX_Size() int { + return xxx_messageInfo_SourceInfo.Size(m) +} +func (m *SourceInfo) XXX_DiscardUnknown() { + xxx_messageInfo_SourceInfo.DiscardUnknown(m) +} + +var xxx_messageInfo_SourceInfo proto.InternalMessageInfo + +func (m *SourceInfo) GetSyntaxVersion() string { + if m != nil { + return m.SyntaxVersion + } + return "" +} + +func (m *SourceInfo) GetLocation() string { + if m != nil { + return m.Location + } + return "" +} + +func (m *SourceInfo) GetLineOffsets() []int32 { + if m != nil { + return m.LineOffsets + } + return nil +} + +func (m *SourceInfo) GetPositions() map[int64]int32 { + if m != nil { + return m.Positions + } + return nil +} + +// A specific position in source. +type SourcePosition struct { + // The soucre location name (e.g. file name). + Location string `protobuf:"bytes,1,opt,name=location,proto3" json:"location,omitempty"` + // The character offset. + Offset int32 `protobuf:"varint,2,opt,name=offset,proto3" json:"offset,omitempty"` + // The 1-based index of the starting line in the source text + // where the issue occurs, or 0 if unknown. + Line int32 `protobuf:"varint,3,opt,name=line,proto3" json:"line,omitempty"` + // The 0-based index of the starting position within the line of source text + // where the issue occurs. Only meaningful if line is nonzero. + Column int32 `protobuf:"varint,4,opt,name=column,proto3" json:"column,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *SourcePosition) Reset() { *m = SourcePosition{} } +func (m *SourcePosition) String() string { return proto.CompactTextString(m) } +func (*SourcePosition) ProtoMessage() {} +func (*SourcePosition) Descriptor() ([]byte, []int) { + return fileDescriptor_syntax_467031a7c35e4eba, []int{4} +} +func (m *SourcePosition) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_SourcePosition.Unmarshal(m, b) +} +func (m *SourcePosition) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_SourcePosition.Marshal(b, m, deterministic) +} +func (dst *SourcePosition) XXX_Merge(src proto.Message) { + xxx_messageInfo_SourcePosition.Merge(dst, src) +} +func (m *SourcePosition) XXX_Size() int { + return xxx_messageInfo_SourcePosition.Size(m) +} +func (m *SourcePosition) XXX_DiscardUnknown() { + xxx_messageInfo_SourcePosition.DiscardUnknown(m) +} + +var xxx_messageInfo_SourcePosition proto.InternalMessageInfo + +func (m *SourcePosition) GetLocation() string { + if m != nil { + return m.Location + } + return "" +} + +func (m *SourcePosition) GetOffset() int32 { + if m != nil { + return m.Offset + } + return 0 +} + +func (m *SourcePosition) GetLine() int32 { + if m != nil { + return m.Line + } + return 0 +} + +func (m *SourcePosition) GetColumn() int32 { + if m != nil { + return m.Column + } + return 0 +} + +func init() { + proto.RegisterType((*ParsedExpr)(nil), "google.api.expr.v1alpha1.ParsedExpr") + proto.RegisterType((*Expr)(nil), "google.api.expr.v1alpha1.Expr") + proto.RegisterType((*Expr_Ident)(nil), "google.api.expr.v1alpha1.Expr.Ident") + proto.RegisterType((*Expr_Select)(nil), "google.api.expr.v1alpha1.Expr.Select") + proto.RegisterType((*Expr_Call)(nil), "google.api.expr.v1alpha1.Expr.Call") + proto.RegisterType((*Expr_CreateList)(nil), "google.api.expr.v1alpha1.Expr.CreateList") + proto.RegisterType((*Expr_CreateStruct)(nil), "google.api.expr.v1alpha1.Expr.CreateStruct") + proto.RegisterType((*Expr_CreateStruct_Entry)(nil), "google.api.expr.v1alpha1.Expr.CreateStruct.Entry") + proto.RegisterType((*Expr_Comprehension)(nil), "google.api.expr.v1alpha1.Expr.Comprehension") + proto.RegisterType((*Constant)(nil), "google.api.expr.v1alpha1.Constant") + proto.RegisterType((*SourceInfo)(nil), "google.api.expr.v1alpha1.SourceInfo") + proto.RegisterMapType((map[int64]int32)(nil), "google.api.expr.v1alpha1.SourceInfo.PositionsEntry") + proto.RegisterType((*SourcePosition)(nil), "google.api.expr.v1alpha1.SourcePosition") +} + +func init() { + proto.RegisterFile("google/api/expr/v1alpha1/syntax.proto", fileDescriptor_syntax_467031a7c35e4eba) +} + +var fileDescriptor_syntax_467031a7c35e4eba = []byte{ + // 1134 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x94, 0x56, 0xcf, 0x6e, 0x1b, 0xb7, + 0x13, 0xd6, 0xea, 0x9f, 0xb5, 0x23, 0x5b, 0xf9, 0xfd, 0x88, 0xa2, 0x50, 0x36, 0x69, 0xe2, 0x38, + 0x35, 0x90, 0xa2, 0x85, 0x04, 0x3b, 0x41, 0x90, 0xc6, 0xe9, 0x45, 0xae, 0x0b, 0x19, 0x29, 0x1c, + 0x77, 0x5d, 0xf8, 0x50, 0xa0, 0x10, 0xe8, 0x15, 0xa5, 0x2c, 0x4c, 0x91, 0x8b, 0x25, 0xd7, 0xb0, + 0xce, 0x3d, 0xf4, 0xd6, 0x97, 0x69, 0x5f, 0xa0, 0xef, 0xd1, 0x07, 0xe9, 0xa5, 0x40, 0x31, 0x43, + 0xae, 0xfc, 0x0f, 0x86, 0xd4, 0x1b, 0x39, 0xfc, 0xbe, 0x8f, 0xc3, 0x99, 0xe1, 0x90, 0xb0, 0x3d, + 0xd5, 0x7a, 0x2a, 0x45, 0x9f, 0x67, 0x69, 0x5f, 0x5c, 0x66, 0x79, 0xff, 0x62, 0x87, 0xcb, 0xec, + 0x23, 0xdf, 0xe9, 0x9b, 0xb9, 0xb2, 0xfc, 0xb2, 0x97, 0xe5, 0xda, 0x6a, 0xd6, 0x75, 0xb0, 0x1e, + 0xcf, 0xd2, 0x1e, 0xc2, 0x7a, 0x25, 0x2c, 0x7a, 0xe2, 0x05, 0x08, 0x77, 0x56, 0x4c, 0xfa, 0xe3, + 0x22, 0xe7, 0x36, 0xd5, 0xca, 0x31, 0xa3, 0xc7, 0xb7, 0xd7, 0x8d, 0xcd, 0x8b, 0xc4, 0xfa, 0xd5, + 0xa7, 0xb7, 0x57, 0x6d, 0x3a, 0x13, 0xc6, 0xf2, 0x59, 0xe6, 0x00, 0x5b, 0xbf, 0x06, 0x00, 0xc7, + 0x3c, 0x37, 0x62, 0x7c, 0x70, 0x99, 0xe5, 0x6c, 0x17, 0xea, 0xb8, 0x7d, 0xb7, 0xba, 0x19, 0xbc, + 0x68, 0xef, 0x3e, 0xe9, 0xdd, 0xe7, 0x56, 0x0f, 0xd1, 0x31, 0x61, 0xd9, 0x01, 0xb4, 0x8d, 0x2e, + 0xf2, 0x44, 0x8c, 0x52, 0x35, 0xd1, 0xdd, 0x1a, 0x51, 0x3f, 0xbf, 0x9f, 0x7a, 0x42, 0xe0, 0x43, + 0x35, 0xd1, 0x31, 0x98, 0xc5, 0x78, 0xeb, 0xaf, 0x75, 0xa8, 0x93, 0x0f, 0x1d, 0xa8, 0xa6, 0x63, + 0xf2, 0xa0, 0x16, 0x57, 0xd3, 0x31, 0xdb, 0x07, 0x48, 0xb4, 0x32, 0x76, 0x44, 0x9e, 0x39, 0xf9, + 0xad, 0xfb, 0xe5, 0xf7, 0x11, 0xcb, 0x95, 0x1d, 0x56, 0xe2, 0x90, 0x78, 0x07, 0xce, 0x49, 0x48, + 0xc7, 0x42, 0x79, 0x91, 0xfa, 0x32, 0x1f, 0x91, 0xd3, 0x3b, 0x44, 0x02, 0xca, 0x10, 0x93, 0x64, + 0x86, 0xd0, 0x36, 0x42, 0x8a, 0xc4, 0xeb, 0x34, 0x48, 0x67, 0x7b, 0x89, 0xce, 0x09, 0x31, 0x86, + 0x95, 0x18, 0x1c, 0x97, 0x94, 0x06, 0x10, 0x26, 0x5c, 0x4a, 0xa7, 0xd3, 0x24, 0x9d, 0xe7, 0x4b, + 0x74, 0xf6, 0xb9, 0x94, 0xc3, 0x4a, 0xdc, 0x42, 0x9e, 0xf7, 0x26, 0x94, 0x69, 0x19, 0x98, 0x35, + 0xd2, 0xf8, 0x62, 0x99, 0x46, 0x2e, 0xb8, 0x15, 0xdf, 0xa7, 0x06, 0xfd, 0x69, 0x21, 0x9b, 0x94, + 0x8e, 0xa0, 0xed, 0xea, 0xc6, 0x69, 0xb5, 0x48, 0xeb, 0xcb, 0x95, 0xb4, 0x4e, 0x88, 0x47, 0xa7, + 0xa3, 0x11, 0xe9, 0xfd, 0x0c, 0x2c, 0xd1, 0xb3, 0x2c, 0x17, 0x1f, 0x85, 0x32, 0xa9, 0x56, 0x4e, + 0x36, 0x24, 0xd9, 0xaf, 0x96, 0xc9, 0x5e, 0x27, 0x0e, 0x2b, 0xf1, 0xff, 0x6f, 0x28, 0x21, 0x24, + 0x7a, 0x04, 0x0d, 0x4a, 0x0e, 0x63, 0x50, 0x57, 0x7c, 0x26, 0xba, 0xc1, 0x66, 0xf0, 0x22, 0x8c, + 0x69, 0x1c, 0x15, 0xd0, 0x74, 0x11, 0x67, 0x6f, 0x60, 0x4d, 0x67, 0x22, 0xe7, 0x6a, 0x4c, 0x80, + 0xe5, 0x05, 0x5d, 0xc2, 0xd9, 0x27, 0xd0, 0x98, 0xa4, 0x42, 0xba, 0x32, 0x0c, 0x63, 0x37, 0x61, + 0x8f, 0x20, 0xb4, 0xc2, 0xd8, 0x91, 0x56, 0x72, 0x4e, 0x85, 0xd8, 0x8a, 0x5b, 0x68, 0xf8, 0xa0, + 0xe4, 0x3c, 0xfa, 0x2d, 0x80, 0x3a, 0x66, 0x88, 0xbd, 0x86, 0xa6, 0xe5, 0xf9, 0x54, 0xd8, 0x15, + 0x37, 0xf5, 0x68, 0x16, 0x41, 0x6b, 0x52, 0xa8, 0x04, 0xef, 0xb6, 0xdf, 0x76, 0x31, 0xc7, 0x7b, + 0xc9, 0xf3, 0xa9, 0xe9, 0xd6, 0x36, 0x6b, 0xab, 0xdc, 0x4b, 0xc4, 0x46, 0x43, 0x80, 0xab, 0x6c, + 0xb3, 0xb7, 0xd0, 0x12, 0x52, 0xcc, 0x84, 0xb2, 0xa6, 0x1b, 0xac, 0xa4, 0xb2, 0xc0, 0x47, 0x7f, + 0x54, 0x61, 0xfd, 0x7a, 0xb2, 0xd9, 0x33, 0x58, 0x9f, 0x09, 0x63, 0xf8, 0x54, 0x8c, 0xae, 0x85, + 0xbf, 0xed, 0x6d, 0x47, 0x7c, 0x26, 0xd8, 0x7b, 0x58, 0x13, 0xca, 0xe6, 0xa9, 0x30, 0xdd, 0x2a, + 0x6d, 0xb7, 0xf3, 0x1f, 0xaa, 0xa9, 0x77, 0xa0, 0x6c, 0x3e, 0x8f, 0x4b, 0x85, 0xe8, 0xf7, 0x00, + 0x1a, 0x64, 0xf2, 0xcd, 0x21, 0x58, 0x34, 0x87, 0xcf, 0x20, 0xa4, 0xdc, 0x8c, 0xce, 0xc5, 0xdc, + 0x45, 0x0d, 0xeb, 0x9a, 0x4c, 0xef, 0xc5, 0x9c, 0x7d, 0x0d, 0x6b, 0x33, 0x9e, 0xd1, 0x62, 0x6d, + 0x95, 0x64, 0x0c, 0x2b, 0x71, 0x73, 0xc6, 0x33, 0xa4, 0xbe, 0x82, 0xc6, 0x05, 0x97, 0x85, 0xf0, + 0xcd, 0x62, 0x59, 0xb4, 0x1c, 0x78, 0x00, 0xd0, 0x3a, 0x17, 0xf3, 0xd1, 0x79, 0xaa, 0xc6, 0xd1, + 0x3f, 0x55, 0xd8, 0xb8, 0x51, 0xcc, 0xec, 0x21, 0xb4, 0x52, 0x2b, 0xf2, 0xd1, 0x05, 0xcf, 0x7d, + 0xcc, 0xd6, 0x70, 0x7e, 0xca, 0x73, 0xf6, 0x0d, 0x00, 0x2d, 0xe5, 0x5c, 0x4d, 0xc5, 0x8a, 0xfd, + 0x37, 0x44, 0x46, 0x8c, 0x04, 0x54, 0xe6, 0x49, 0x52, 0x90, 0x72, 0xcd, 0x29, 0xe3, 0x1c, 0x95, + 0xf7, 0x20, 0xa4, 0xa5, 0x54, 0xa5, 0x76, 0xc5, 0xc3, 0x90, 0xd6, 0xa1, 0x4a, 0x2d, 0x3b, 0x80, + 0x8e, 0xd4, 0x3a, 0x1b, 0x25, 0x5a, 0x8d, 0x53, 0x2a, 0xcd, 0xc6, 0x4a, 0x0a, 0x1b, 0xc8, 0xda, + 0x2f, 0x49, 0xe8, 0x03, 0xc9, 0x18, 0x2b, 0x32, 0xdf, 0xed, 0x96, 0xfa, 0x80, 0x84, 0x13, 0x2b, + 0x32, 0xbc, 0x50, 0xb9, 0x30, 0x85, 0xb4, 0xbe, 0xc7, 0x2d, 0xbd, 0x50, 0x0e, 0x3d, 0x68, 0x43, + 0x88, 0xab, 0x94, 0x8c, 0xad, 0x3f, 0x6b, 0xd0, 0x2a, 0x9f, 0x06, 0xb6, 0x07, 0xa0, 0x0a, 0x29, + 0x47, 0x2e, 0xc1, 0x98, 0x89, 0xce, 0x6e, 0x54, 0xaa, 0x96, 0x6f, 0x65, 0xef, 0xa8, 0x90, 0xf2, + 0x14, 0x11, 0xf8, 0x06, 0xa8, 0x72, 0xc2, 0x9e, 0x02, 0x9c, 0x69, 0x5d, 0x92, 0x31, 0x53, 0x2d, + 0x04, 0xa0, 0xcd, 0x01, 0x9e, 0x41, 0x3b, 0x55, 0xf6, 0xf5, 0x2b, 0x8f, 0xc0, 0x74, 0xd4, 0xb0, + 0x3f, 0x92, 0xd1, 0x41, 0x9e, 0xc3, 0x7a, 0x71, 0x1d, 0x83, 0x69, 0xa9, 0x0f, 0x2b, 0x71, 0xbb, + 0xb8, 0x09, 0x1a, 0xeb, 0xe2, 0x4c, 0x0a, 0x0f, 0xc2, 0xc8, 0x07, 0x08, 0x72, 0xd6, 0x05, 0xc8, + 0xd8, 0x3c, 0x55, 0x53, 0x0f, 0x6a, 0xfa, 0x3b, 0xd0, 0x76, 0xd6, 0x85, 0x47, 0x67, 0x73, 0x2b, + 0x8c, 0xc7, 0x60, 0x18, 0xd7, 0xd1, 0x23, 0x32, 0x3a, 0xc8, 0x77, 0xd0, 0x29, 0x7f, 0x16, 0x1e, + 0xe5, 0x1e, 0x81, 0x87, 0x77, 0xc2, 0xf2, 0xad, 0x87, 0x0d, 0xaa, 0x5d, 0xf4, 0x66, 0xa3, 0xa4, + 0x39, 0x9d, 0x43, 0x78, 0xb0, 0xf8, 0x63, 0x78, 0x21, 0xd7, 0xf6, 0xef, 0xc6, 0xf7, 0xc7, 0x12, + 0xe7, 0x95, 0x3a, 0x0b, 0x22, 0x49, 0x0d, 0x1e, 0xc0, 0x46, 0xe2, 0x33, 0xe6, 0x72, 0xf8, 0x4b, + 0x15, 0xe0, 0xea, 0xf7, 0xc0, 0xb6, 0xa1, 0xe3, 0x3e, 0x51, 0xa3, 0x0b, 0x91, 0xe3, 0xfd, 0xf2, + 0x77, 0x6a, 0xc3, 0x59, 0x4f, 0x9d, 0x11, 0xfb, 0xaa, 0xd4, 0x09, 0xbf, 0xde, 0x57, 0xcb, 0x39, + 0x36, 0x32, 0x99, 0x2a, 0x31, 0xd2, 0x93, 0x89, 0x11, 0xd6, 0xf5, 0xd7, 0x46, 0xdc, 0x46, 0xdb, + 0x07, 0x67, 0x62, 0x3f, 0x40, 0x98, 0x69, 0x43, 0x65, 0x6c, 0xba, 0x75, 0x6a, 0x65, 0x2f, 0x57, + 0xf9, 0xdc, 0xf4, 0x8e, 0x4b, 0x96, 0x6b, 0x66, 0x57, 0x2a, 0xd1, 0x3b, 0xe8, 0xdc, 0x5c, 0x64, + 0xff, 0x83, 0x1a, 0xf6, 0x28, 0xd7, 0xd7, 0x70, 0x88, 0x2f, 0xd0, 0x55, 0x81, 0x35, 0x7c, 0x7b, + 0x79, 0x5b, 0x7d, 0x13, 0x6c, 0x65, 0xd0, 0x71, 0xbb, 0x94, 0x1a, 0x37, 0x4e, 0x18, 0xdc, 0x3a, + 0xe1, 0xa7, 0xd0, 0x74, 0x87, 0xf3, 0x42, 0x7e, 0x86, 0x2f, 0x27, 0x9e, 0x92, 0xaa, 0xb3, 0x11, + 0xd3, 0x18, 0xb1, 0x89, 0x96, 0xc5, 0x4c, 0x51, 0x3d, 0x36, 0x62, 0x3f, 0x1b, 0x28, 0x78, 0x9c, + 0xe8, 0xd9, 0xbd, 0x87, 0x1e, 0xb4, 0x4f, 0x28, 0xe0, 0xc7, 0x98, 0xd8, 0xe3, 0xe0, 0xa7, 0x77, + 0x1e, 0x38, 0xd5, 0x92, 0xab, 0x69, 0x4f, 0xe7, 0xd3, 0xfe, 0x54, 0x28, 0x4a, 0x7b, 0xdf, 0x2d, + 0xf1, 0x2c, 0x35, 0x77, 0xbf, 0xc4, 0x7b, 0x38, 0xfb, 0x3b, 0x08, 0xce, 0x9a, 0x84, 0x7d, 0xf9, + 0x6f, 0x00, 0x00, 0x00, 0xff, 0xff, 0x0d, 0x25, 0xe3, 0xe8, 0x3d, 0x0b, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/api/expr/v1alpha1/value.pb.go b/vendor/google.golang.org/genproto/googleapis/api/expr/v1alpha1/value.pb.go new file mode 100644 index 0000000..c79bf02 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/api/expr/v1alpha1/value.pb.go @@ -0,0 +1,708 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/api/expr/v1alpha1/value.proto + +package expr // import "google.golang.org/genproto/googleapis/api/expr/v1alpha1" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import any "github.com/golang/protobuf/ptypes/any" +import _struct "github.com/golang/protobuf/ptypes/struct" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Represents a CEL value. +// +// This is similar to `google.protobuf.Value`, but can represent CEL's full +// range of values. +type Value struct { + // Required. The valid kinds of values. + // + // Types that are valid to be assigned to Kind: + // *Value_NullValue + // *Value_BoolValue + // *Value_Int64Value + // *Value_Uint64Value + // *Value_DoubleValue + // *Value_StringValue + // *Value_BytesValue + // *Value_EnumValue + // *Value_ObjectValue + // *Value_MapValue + // *Value_ListValue + // *Value_TypeValue + Kind isValue_Kind `protobuf_oneof:"kind"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Value) Reset() { *m = Value{} } +func (m *Value) String() string { return proto.CompactTextString(m) } +func (*Value) ProtoMessage() {} +func (*Value) Descriptor() ([]byte, []int) { + return fileDescriptor_value_cbcd1b7bb7a24782, []int{0} +} +func (m *Value) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Value.Unmarshal(m, b) +} +func (m *Value) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Value.Marshal(b, m, deterministic) +} +func (dst *Value) XXX_Merge(src proto.Message) { + xxx_messageInfo_Value.Merge(dst, src) +} +func (m *Value) XXX_Size() int { + return xxx_messageInfo_Value.Size(m) +} +func (m *Value) XXX_DiscardUnknown() { + xxx_messageInfo_Value.DiscardUnknown(m) +} + +var xxx_messageInfo_Value proto.InternalMessageInfo + +type isValue_Kind interface { + isValue_Kind() +} + +type Value_NullValue struct { + NullValue _struct.NullValue `protobuf:"varint,1,opt,name=null_value,json=nullValue,proto3,enum=google.protobuf.NullValue,oneof"` +} + +type Value_BoolValue struct { + BoolValue bool `protobuf:"varint,2,opt,name=bool_value,json=boolValue,proto3,oneof"` +} + +type Value_Int64Value struct { + Int64Value int64 `protobuf:"varint,3,opt,name=int64_value,json=int64Value,proto3,oneof"` +} + +type Value_Uint64Value struct { + Uint64Value uint64 `protobuf:"varint,4,opt,name=uint64_value,json=uint64Value,proto3,oneof"` +} + +type Value_DoubleValue struct { + DoubleValue float64 `protobuf:"fixed64,5,opt,name=double_value,json=doubleValue,proto3,oneof"` +} + +type Value_StringValue struct { + StringValue string `protobuf:"bytes,6,opt,name=string_value,json=stringValue,proto3,oneof"` +} + +type Value_BytesValue struct { + BytesValue []byte `protobuf:"bytes,7,opt,name=bytes_value,json=bytesValue,proto3,oneof"` +} + +type Value_EnumValue struct { + EnumValue *EnumValue `protobuf:"bytes,9,opt,name=enum_value,json=enumValue,proto3,oneof"` +} + +type Value_ObjectValue struct { + ObjectValue *any.Any `protobuf:"bytes,10,opt,name=object_value,json=objectValue,proto3,oneof"` +} + +type Value_MapValue struct { + MapValue *MapValue `protobuf:"bytes,11,opt,name=map_value,json=mapValue,proto3,oneof"` +} + +type Value_ListValue struct { + ListValue *ListValue `protobuf:"bytes,12,opt,name=list_value,json=listValue,proto3,oneof"` +} + +type Value_TypeValue struct { + TypeValue string `protobuf:"bytes,15,opt,name=type_value,json=typeValue,proto3,oneof"` +} + +func (*Value_NullValue) isValue_Kind() {} + +func (*Value_BoolValue) isValue_Kind() {} + +func (*Value_Int64Value) isValue_Kind() {} + +func (*Value_Uint64Value) isValue_Kind() {} + +func (*Value_DoubleValue) isValue_Kind() {} + +func (*Value_StringValue) isValue_Kind() {} + +func (*Value_BytesValue) isValue_Kind() {} + +func (*Value_EnumValue) isValue_Kind() {} + +func (*Value_ObjectValue) isValue_Kind() {} + +func (*Value_MapValue) isValue_Kind() {} + +func (*Value_ListValue) isValue_Kind() {} + +func (*Value_TypeValue) isValue_Kind() {} + +func (m *Value) GetKind() isValue_Kind { + if m != nil { + return m.Kind + } + return nil +} + +func (m *Value) GetNullValue() _struct.NullValue { + if x, ok := m.GetKind().(*Value_NullValue); ok { + return x.NullValue + } + return _struct.NullValue_NULL_VALUE +} + +func (m *Value) GetBoolValue() bool { + if x, ok := m.GetKind().(*Value_BoolValue); ok { + return x.BoolValue + } + return false +} + +func (m *Value) GetInt64Value() int64 { + if x, ok := m.GetKind().(*Value_Int64Value); ok { + return x.Int64Value + } + return 0 +} + +func (m *Value) GetUint64Value() uint64 { + if x, ok := m.GetKind().(*Value_Uint64Value); ok { + return x.Uint64Value + } + return 0 +} + +func (m *Value) GetDoubleValue() float64 { + if x, ok := m.GetKind().(*Value_DoubleValue); ok { + return x.DoubleValue + } + return 0 +} + +func (m *Value) GetStringValue() string { + if x, ok := m.GetKind().(*Value_StringValue); ok { + return x.StringValue + } + return "" +} + +func (m *Value) GetBytesValue() []byte { + if x, ok := m.GetKind().(*Value_BytesValue); ok { + return x.BytesValue + } + return nil +} + +func (m *Value) GetEnumValue() *EnumValue { + if x, ok := m.GetKind().(*Value_EnumValue); ok { + return x.EnumValue + } + return nil +} + +func (m *Value) GetObjectValue() *any.Any { + if x, ok := m.GetKind().(*Value_ObjectValue); ok { + return x.ObjectValue + } + return nil +} + +func (m *Value) GetMapValue() *MapValue { + if x, ok := m.GetKind().(*Value_MapValue); ok { + return x.MapValue + } + return nil +} + +func (m *Value) GetListValue() *ListValue { + if x, ok := m.GetKind().(*Value_ListValue); ok { + return x.ListValue + } + return nil +} + +func (m *Value) GetTypeValue() string { + if x, ok := m.GetKind().(*Value_TypeValue); ok { + return x.TypeValue + } + return "" +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*Value) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _Value_OneofMarshaler, _Value_OneofUnmarshaler, _Value_OneofSizer, []interface{}{ + (*Value_NullValue)(nil), + (*Value_BoolValue)(nil), + (*Value_Int64Value)(nil), + (*Value_Uint64Value)(nil), + (*Value_DoubleValue)(nil), + (*Value_StringValue)(nil), + (*Value_BytesValue)(nil), + (*Value_EnumValue)(nil), + (*Value_ObjectValue)(nil), + (*Value_MapValue)(nil), + (*Value_ListValue)(nil), + (*Value_TypeValue)(nil), + } +} + +func _Value_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*Value) + // kind + switch x := m.Kind.(type) { + case *Value_NullValue: + b.EncodeVarint(1<<3 | proto.WireVarint) + b.EncodeVarint(uint64(x.NullValue)) + case *Value_BoolValue: + t := uint64(0) + if x.BoolValue { + t = 1 + } + b.EncodeVarint(2<<3 | proto.WireVarint) + b.EncodeVarint(t) + case *Value_Int64Value: + b.EncodeVarint(3<<3 | proto.WireVarint) + b.EncodeVarint(uint64(x.Int64Value)) + case *Value_Uint64Value: + b.EncodeVarint(4<<3 | proto.WireVarint) + b.EncodeVarint(uint64(x.Uint64Value)) + case *Value_DoubleValue: + b.EncodeVarint(5<<3 | proto.WireFixed64) + b.EncodeFixed64(math.Float64bits(x.DoubleValue)) + case *Value_StringValue: + b.EncodeVarint(6<<3 | proto.WireBytes) + b.EncodeStringBytes(x.StringValue) + case *Value_BytesValue: + b.EncodeVarint(7<<3 | proto.WireBytes) + b.EncodeRawBytes(x.BytesValue) + case *Value_EnumValue: + b.EncodeVarint(9<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.EnumValue); err != nil { + return err + } + case *Value_ObjectValue: + b.EncodeVarint(10<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.ObjectValue); err != nil { + return err + } + case *Value_MapValue: + b.EncodeVarint(11<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.MapValue); err != nil { + return err + } + case *Value_ListValue: + b.EncodeVarint(12<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.ListValue); err != nil { + return err + } + case *Value_TypeValue: + b.EncodeVarint(15<<3 | proto.WireBytes) + b.EncodeStringBytes(x.TypeValue) + case nil: + default: + return fmt.Errorf("Value.Kind has unexpected type %T", x) + } + return nil +} + +func _Value_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*Value) + switch tag { + case 1: // kind.null_value + if wire != proto.WireVarint { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeVarint() + m.Kind = &Value_NullValue{_struct.NullValue(x)} + return true, err + case 2: // kind.bool_value + if wire != proto.WireVarint { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeVarint() + m.Kind = &Value_BoolValue{x != 0} + return true, err + case 3: // kind.int64_value + if wire != proto.WireVarint { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeVarint() + m.Kind = &Value_Int64Value{int64(x)} + return true, err + case 4: // kind.uint64_value + if wire != proto.WireVarint { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeVarint() + m.Kind = &Value_Uint64Value{x} + return true, err + case 5: // kind.double_value + if wire != proto.WireFixed64 { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeFixed64() + m.Kind = &Value_DoubleValue{math.Float64frombits(x)} + return true, err + case 6: // kind.string_value + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeStringBytes() + m.Kind = &Value_StringValue{x} + return true, err + case 7: // kind.bytes_value + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeRawBytes(true) + m.Kind = &Value_BytesValue{x} + return true, err + case 9: // kind.enum_value + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(EnumValue) + err := b.DecodeMessage(msg) + m.Kind = &Value_EnumValue{msg} + return true, err + case 10: // kind.object_value + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(any.Any) + err := b.DecodeMessage(msg) + m.Kind = &Value_ObjectValue{msg} + return true, err + case 11: // kind.map_value + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(MapValue) + err := b.DecodeMessage(msg) + m.Kind = &Value_MapValue{msg} + return true, err + case 12: // kind.list_value + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(ListValue) + err := b.DecodeMessage(msg) + m.Kind = &Value_ListValue{msg} + return true, err + case 15: // kind.type_value + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeStringBytes() + m.Kind = &Value_TypeValue{x} + return true, err + default: + return false, nil + } +} + +func _Value_OneofSizer(msg proto.Message) (n int) { + m := msg.(*Value) + // kind + switch x := m.Kind.(type) { + case *Value_NullValue: + n += 1 // tag and wire + n += proto.SizeVarint(uint64(x.NullValue)) + case *Value_BoolValue: + n += 1 // tag and wire + n += 1 + case *Value_Int64Value: + n += 1 // tag and wire + n += proto.SizeVarint(uint64(x.Int64Value)) + case *Value_Uint64Value: + n += 1 // tag and wire + n += proto.SizeVarint(uint64(x.Uint64Value)) + case *Value_DoubleValue: + n += 1 // tag and wire + n += 8 + case *Value_StringValue: + n += 1 // tag and wire + n += proto.SizeVarint(uint64(len(x.StringValue))) + n += len(x.StringValue) + case *Value_BytesValue: + n += 1 // tag and wire + n += proto.SizeVarint(uint64(len(x.BytesValue))) + n += len(x.BytesValue) + case *Value_EnumValue: + s := proto.Size(x.EnumValue) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *Value_ObjectValue: + s := proto.Size(x.ObjectValue) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *Value_MapValue: + s := proto.Size(x.MapValue) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *Value_ListValue: + s := proto.Size(x.ListValue) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *Value_TypeValue: + n += 1 // tag and wire + n += proto.SizeVarint(uint64(len(x.TypeValue))) + n += len(x.TypeValue) + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +// An enum value. +type EnumValue struct { + // The fully qualified name of the enum type. + Type string `protobuf:"bytes,1,opt,name=type,proto3" json:"type,omitempty"` + // The value of the enum. + Value int32 `protobuf:"varint,2,opt,name=value,proto3" json:"value,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *EnumValue) Reset() { *m = EnumValue{} } +func (m *EnumValue) String() string { return proto.CompactTextString(m) } +func (*EnumValue) ProtoMessage() {} +func (*EnumValue) Descriptor() ([]byte, []int) { + return fileDescriptor_value_cbcd1b7bb7a24782, []int{1} +} +func (m *EnumValue) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_EnumValue.Unmarshal(m, b) +} +func (m *EnumValue) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_EnumValue.Marshal(b, m, deterministic) +} +func (dst *EnumValue) XXX_Merge(src proto.Message) { + xxx_messageInfo_EnumValue.Merge(dst, src) +} +func (m *EnumValue) XXX_Size() int { + return xxx_messageInfo_EnumValue.Size(m) +} +func (m *EnumValue) XXX_DiscardUnknown() { + xxx_messageInfo_EnumValue.DiscardUnknown(m) +} + +var xxx_messageInfo_EnumValue proto.InternalMessageInfo + +func (m *EnumValue) GetType() string { + if m != nil { + return m.Type + } + return "" +} + +func (m *EnumValue) GetValue() int32 { + if m != nil { + return m.Value + } + return 0 +} + +// A list. +// +// Wrapped in a message so 'not set' and empty can be differentiated, which is +// required for use in a 'oneof'. +type ListValue struct { + // The ordered values in the list. + Values []*Value `protobuf:"bytes,1,rep,name=values,proto3" json:"values,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ListValue) Reset() { *m = ListValue{} } +func (m *ListValue) String() string { return proto.CompactTextString(m) } +func (*ListValue) ProtoMessage() {} +func (*ListValue) Descriptor() ([]byte, []int) { + return fileDescriptor_value_cbcd1b7bb7a24782, []int{2} +} +func (m *ListValue) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ListValue.Unmarshal(m, b) +} +func (m *ListValue) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ListValue.Marshal(b, m, deterministic) +} +func (dst *ListValue) XXX_Merge(src proto.Message) { + xxx_messageInfo_ListValue.Merge(dst, src) +} +func (m *ListValue) XXX_Size() int { + return xxx_messageInfo_ListValue.Size(m) +} +func (m *ListValue) XXX_DiscardUnknown() { + xxx_messageInfo_ListValue.DiscardUnknown(m) +} + +var xxx_messageInfo_ListValue proto.InternalMessageInfo + +func (m *ListValue) GetValues() []*Value { + if m != nil { + return m.Values + } + return nil +} + +// A map. +// +// Wrapped in a message so 'not set' and empty can be differentiated, which is +// required for use in a 'oneof'. +type MapValue struct { + // The set of map entries. + // + // CEL has fewer restrictions on keys, so a protobuf map represenation + // cannot be used. + Entries []*MapValue_Entry `protobuf:"bytes,1,rep,name=entries,proto3" json:"entries,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *MapValue) Reset() { *m = MapValue{} } +func (m *MapValue) String() string { return proto.CompactTextString(m) } +func (*MapValue) ProtoMessage() {} +func (*MapValue) Descriptor() ([]byte, []int) { + return fileDescriptor_value_cbcd1b7bb7a24782, []int{3} +} +func (m *MapValue) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_MapValue.Unmarshal(m, b) +} +func (m *MapValue) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_MapValue.Marshal(b, m, deterministic) +} +func (dst *MapValue) XXX_Merge(src proto.Message) { + xxx_messageInfo_MapValue.Merge(dst, src) +} +func (m *MapValue) XXX_Size() int { + return xxx_messageInfo_MapValue.Size(m) +} +func (m *MapValue) XXX_DiscardUnknown() { + xxx_messageInfo_MapValue.DiscardUnknown(m) +} + +var xxx_messageInfo_MapValue proto.InternalMessageInfo + +func (m *MapValue) GetEntries() []*MapValue_Entry { + if m != nil { + return m.Entries + } + return nil +} + +// An entry in the map. +type MapValue_Entry struct { + // The key. + // + // Must be unique with in the map. + // Currently only boolean, int, uint, and string values can be keys. + Key *Value `protobuf:"bytes,1,opt,name=key,proto3" json:"key,omitempty"` + // The value. + Value *Value `protobuf:"bytes,2,opt,name=value,proto3" json:"value,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *MapValue_Entry) Reset() { *m = MapValue_Entry{} } +func (m *MapValue_Entry) String() string { return proto.CompactTextString(m) } +func (*MapValue_Entry) ProtoMessage() {} +func (*MapValue_Entry) Descriptor() ([]byte, []int) { + return fileDescriptor_value_cbcd1b7bb7a24782, []int{3, 0} +} +func (m *MapValue_Entry) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_MapValue_Entry.Unmarshal(m, b) +} +func (m *MapValue_Entry) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_MapValue_Entry.Marshal(b, m, deterministic) +} +func (dst *MapValue_Entry) XXX_Merge(src proto.Message) { + xxx_messageInfo_MapValue_Entry.Merge(dst, src) +} +func (m *MapValue_Entry) XXX_Size() int { + return xxx_messageInfo_MapValue_Entry.Size(m) +} +func (m *MapValue_Entry) XXX_DiscardUnknown() { + xxx_messageInfo_MapValue_Entry.DiscardUnknown(m) +} + +var xxx_messageInfo_MapValue_Entry proto.InternalMessageInfo + +func (m *MapValue_Entry) GetKey() *Value { + if m != nil { + return m.Key + } + return nil +} + +func (m *MapValue_Entry) GetValue() *Value { + if m != nil { + return m.Value + } + return nil +} + +func init() { + proto.RegisterType((*Value)(nil), "google.api.expr.v1alpha1.Value") + proto.RegisterType((*EnumValue)(nil), "google.api.expr.v1alpha1.EnumValue") + proto.RegisterType((*ListValue)(nil), "google.api.expr.v1alpha1.ListValue") + proto.RegisterType((*MapValue)(nil), "google.api.expr.v1alpha1.MapValue") + proto.RegisterType((*MapValue_Entry)(nil), "google.api.expr.v1alpha1.MapValue.Entry") +} + +func init() { + proto.RegisterFile("google/api/expr/v1alpha1/value.proto", fileDescriptor_value_cbcd1b7bb7a24782) +} + +var fileDescriptor_value_cbcd1b7bb7a24782 = []byte{ + // 518 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x8c, 0x94, 0xcb, 0x6e, 0xd4, 0x30, + 0x14, 0x86, 0x6b, 0xe6, 0xd2, 0xc9, 0x99, 0x11, 0x48, 0x56, 0x17, 0xc3, 0xa8, 0x52, 0x43, 0xca, + 0x22, 0xab, 0x44, 0x33, 0x50, 0x10, 0x2a, 0x9b, 0x8e, 0x5a, 0x69, 0x16, 0x80, 0xaa, 0x2c, 0x58, + 0xb0, 0x41, 0xce, 0xd4, 0x84, 0x50, 0xc7, 0x0e, 0x89, 0x5d, 0x91, 0xc7, 0xe3, 0x01, 0x78, 0x1f, + 0x96, 0xc8, 0xb7, 0x50, 0xa8, 0x46, 0xed, 0x2e, 0xe7, 0xf7, 0xf7, 0xfb, 0x5c, 0x7c, 0x14, 0x78, + 0x5e, 0x08, 0x51, 0x30, 0x9a, 0x92, 0xba, 0x4c, 0xe9, 0x8f, 0xba, 0x49, 0x6f, 0x96, 0x84, 0xd5, + 0x5f, 0xc9, 0x32, 0xbd, 0x21, 0x4c, 0xd1, 0xa4, 0x6e, 0x84, 0x14, 0x78, 0x6e, 0xa9, 0x84, 0xd4, + 0x65, 0xa2, 0xa9, 0xc4, 0x53, 0x8b, 0xa7, 0xce, 0x6f, 0xb8, 0x5c, 0x7d, 0x49, 0x09, 0xef, 0xac, + 0x69, 0x71, 0xf8, 0xff, 0x51, 0x2b, 0x1b, 0xb5, 0x95, 0xf6, 0x34, 0xfa, 0x35, 0x84, 0xd1, 0x47, + 0x9d, 0x02, 0x9f, 0x02, 0x70, 0xc5, 0xd8, 0x67, 0x93, 0x70, 0x8e, 0x42, 0x14, 0x3f, 0x5e, 0x2d, + 0x12, 0x97, 0xd1, 0x9b, 0x93, 0x0f, 0x8a, 0x31, 0xc3, 0x6f, 0xf6, 0xb2, 0x80, 0xfb, 0x00, 0x1f, + 0x01, 0xe4, 0x42, 0x78, 0xf3, 0xa3, 0x10, 0xc5, 0x13, 0x0d, 0x68, 0xcd, 0x02, 0xcf, 0x60, 0x5a, + 0x72, 0xf9, 0xea, 0xa5, 0x23, 0x06, 0x21, 0x8a, 0x07, 0x9b, 0xbd, 0x0c, 0x8c, 0x68, 0x91, 0x63, + 0x98, 0xa9, 0xdb, 0xcc, 0x30, 0x44, 0xf1, 0x70, 0xb3, 0x97, 0x4d, 0xd5, 0xbf, 0xd0, 0x95, 0x50, + 0x39, 0xa3, 0x0e, 0x1a, 0x85, 0x28, 0x46, 0x1a, 0xb2, 0x6a, 0x0f, 0xb5, 0xb2, 0x29, 0x79, 0xe1, + 0xa0, 0x71, 0x88, 0xe2, 0x40, 0x43, 0x56, 0xed, 0x2b, 0xca, 0x3b, 0x49, 0x5b, 0xc7, 0xec, 0x87, + 0x28, 0x9e, 0xe9, 0x8a, 0x8c, 0x68, 0x91, 0x73, 0x00, 0xca, 0x55, 0xe5, 0x88, 0x20, 0x44, 0xf1, + 0x74, 0x75, 0x9c, 0xec, 0x7a, 0x84, 0xe4, 0x82, 0xab, 0xaa, 0x9f, 0x0d, 0xf5, 0x01, 0x7e, 0x03, + 0x33, 0x91, 0x7f, 0xa3, 0x5b, 0xe9, 0xee, 0x01, 0x73, 0xcf, 0xc1, 0x9d, 0xd1, 0x9e, 0xf1, 0x4e, + 0xd7, 0x68, 0x59, 0x6b, 0x3d, 0x83, 0xa0, 0x22, 0xb5, 0xf3, 0x4d, 0x8d, 0x2f, 0xda, 0x9d, 0xff, + 0x3d, 0xa9, 0x7d, 0xfa, 0x49, 0xe5, 0xbe, 0x75, 0x0f, 0xac, 0x6c, 0x7d, 0xee, 0xd9, 0x7d, 0x3d, + 0xbc, 0x2b, 0x5b, 0xd9, 0xf7, 0xc0, 0x7c, 0xa0, 0xdf, 0x57, 0x76, 0xb5, 0x1f, 0xfa, 0x13, 0x37, + 0xcf, 0x40, 0x6b, 0x06, 0x58, 0x8f, 0x61, 0x78, 0x5d, 0xf2, 0xab, 0xe8, 0x04, 0x82, 0x7e, 0x0c, + 0x18, 0xc3, 0x50, 0x13, 0x66, 0x99, 0x82, 0xcc, 0x7c, 0xe3, 0x03, 0x18, 0xfd, 0x5d, 0x92, 0x51, + 0x66, 0x83, 0xe8, 0x1c, 0x82, 0x3e, 0x33, 0x7e, 0x0d, 0x63, 0xa3, 0xb6, 0x73, 0x14, 0x0e, 0xe2, + 0xe9, 0xea, 0x68, 0x77, 0xb9, 0xc6, 0x90, 0x39, 0x3c, 0xfa, 0x89, 0x60, 0xe2, 0x87, 0x80, 0xd7, + 0xb0, 0x4f, 0xb9, 0x6c, 0xca, 0xfe, 0x9a, 0xf8, 0xfe, 0xc9, 0x25, 0x17, 0x5c, 0x36, 0x5d, 0xe6, + 0x8d, 0x8b, 0xef, 0x30, 0x32, 0x0a, 0x5e, 0xc2, 0xe0, 0x9a, 0x76, 0xa6, 0x91, 0x07, 0xd4, 0xa3, + 0x59, 0x7c, 0x72, 0xbb, 0xd1, 0x07, 0x98, 0x2c, 0xbd, 0xae, 0xe0, 0x70, 0x2b, 0xaa, 0x9d, 0xf0, + 0x1a, 0x0c, 0x7d, 0xa9, 0x97, 0xe6, 0x12, 0x7d, 0x7a, 0xeb, 0xb8, 0x42, 0x30, 0xc2, 0x8b, 0x44, + 0x34, 0x45, 0x5a, 0x50, 0x6e, 0x56, 0x2a, 0xb5, 0x47, 0xa4, 0x2e, 0xdb, 0xbb, 0xbf, 0x95, 0x53, + 0x1d, 0xfd, 0x46, 0x28, 0x1f, 0x1b, 0xf6, 0xc5, 0x9f, 0x00, 0x00, 0x00, 0xff, 0xff, 0xf9, 0x53, + 0x8e, 0x99, 0x81, 0x04, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/api/expr/v1beta1/decl.pb.go b/vendor/google.golang.org/genproto/googleapis/api/expr/v1beta1/decl.pb.go new file mode 100644 index 0000000..12fa1dd --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/api/expr/v1beta1/decl.pb.go @@ -0,0 +1,403 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/api/expr/v1beta1/decl.proto + +package expr // import "google.golang.org/genproto/googleapis/api/expr/v1beta1" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// A declaration. +type Decl struct { + // The id of the declaration. + Id int32 `protobuf:"varint,1,opt,name=id,proto3" json:"id,omitempty"` + // The name of the declaration. + Name string `protobuf:"bytes,2,opt,name=name,proto3" json:"name,omitempty"` + // The documentation string for the declaration. + Doc string `protobuf:"bytes,3,opt,name=doc,proto3" json:"doc,omitempty"` + // The kind of declaration. + // + // Types that are valid to be assigned to Kind: + // *Decl_Ident + // *Decl_Function + Kind isDecl_Kind `protobuf_oneof:"kind"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Decl) Reset() { *m = Decl{} } +func (m *Decl) String() string { return proto.CompactTextString(m) } +func (*Decl) ProtoMessage() {} +func (*Decl) Descriptor() ([]byte, []int) { + return fileDescriptor_decl_1e9fea2f12eba89c, []int{0} +} +func (m *Decl) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Decl.Unmarshal(m, b) +} +func (m *Decl) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Decl.Marshal(b, m, deterministic) +} +func (dst *Decl) XXX_Merge(src proto.Message) { + xxx_messageInfo_Decl.Merge(dst, src) +} +func (m *Decl) XXX_Size() int { + return xxx_messageInfo_Decl.Size(m) +} +func (m *Decl) XXX_DiscardUnknown() { + xxx_messageInfo_Decl.DiscardUnknown(m) +} + +var xxx_messageInfo_Decl proto.InternalMessageInfo + +func (m *Decl) GetId() int32 { + if m != nil { + return m.Id + } + return 0 +} + +func (m *Decl) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *Decl) GetDoc() string { + if m != nil { + return m.Doc + } + return "" +} + +type isDecl_Kind interface { + isDecl_Kind() +} + +type Decl_Ident struct { + Ident *IdentDecl `protobuf:"bytes,4,opt,name=ident,proto3,oneof"` +} + +type Decl_Function struct { + Function *FunctionDecl `protobuf:"bytes,5,opt,name=function,proto3,oneof"` +} + +func (*Decl_Ident) isDecl_Kind() {} + +func (*Decl_Function) isDecl_Kind() {} + +func (m *Decl) GetKind() isDecl_Kind { + if m != nil { + return m.Kind + } + return nil +} + +func (m *Decl) GetIdent() *IdentDecl { + if x, ok := m.GetKind().(*Decl_Ident); ok { + return x.Ident + } + return nil +} + +func (m *Decl) GetFunction() *FunctionDecl { + if x, ok := m.GetKind().(*Decl_Function); ok { + return x.Function + } + return nil +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*Decl) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _Decl_OneofMarshaler, _Decl_OneofUnmarshaler, _Decl_OneofSizer, []interface{}{ + (*Decl_Ident)(nil), + (*Decl_Function)(nil), + } +} + +func _Decl_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*Decl) + // kind + switch x := m.Kind.(type) { + case *Decl_Ident: + b.EncodeVarint(4<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.Ident); err != nil { + return err + } + case *Decl_Function: + b.EncodeVarint(5<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.Function); err != nil { + return err + } + case nil: + default: + return fmt.Errorf("Decl.Kind has unexpected type %T", x) + } + return nil +} + +func _Decl_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*Decl) + switch tag { + case 4: // kind.ident + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(IdentDecl) + err := b.DecodeMessage(msg) + m.Kind = &Decl_Ident{msg} + return true, err + case 5: // kind.function + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(FunctionDecl) + err := b.DecodeMessage(msg) + m.Kind = &Decl_Function{msg} + return true, err + default: + return false, nil + } +} + +func _Decl_OneofSizer(msg proto.Message) (n int) { + m := msg.(*Decl) + // kind + switch x := m.Kind.(type) { + case *Decl_Ident: + s := proto.Size(x.Ident) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *Decl_Function: + s := proto.Size(x.Function) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +// The declared type of a variable. +// +// Extends runtime type values with extra information used for type checking +// and dispatching. +type DeclType struct { + // The expression id of the declared type, if applicable. + Id int32 `protobuf:"varint,1,opt,name=id,proto3" json:"id,omitempty"` + // The type name, e.g. 'int', 'my.type.Type' or 'T' + Type string `protobuf:"bytes,2,opt,name=type,proto3" json:"type,omitempty"` + // An ordered list of type parameters, e.g. ``. + // Only applies to a subset of types, e.g. `map`, `list`. + TypeParams []*DeclType `protobuf:"bytes,4,rep,name=type_params,json=typeParams,proto3" json:"type_params,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *DeclType) Reset() { *m = DeclType{} } +func (m *DeclType) String() string { return proto.CompactTextString(m) } +func (*DeclType) ProtoMessage() {} +func (*DeclType) Descriptor() ([]byte, []int) { + return fileDescriptor_decl_1e9fea2f12eba89c, []int{1} +} +func (m *DeclType) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_DeclType.Unmarshal(m, b) +} +func (m *DeclType) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_DeclType.Marshal(b, m, deterministic) +} +func (dst *DeclType) XXX_Merge(src proto.Message) { + xxx_messageInfo_DeclType.Merge(dst, src) +} +func (m *DeclType) XXX_Size() int { + return xxx_messageInfo_DeclType.Size(m) +} +func (m *DeclType) XXX_DiscardUnknown() { + xxx_messageInfo_DeclType.DiscardUnknown(m) +} + +var xxx_messageInfo_DeclType proto.InternalMessageInfo + +func (m *DeclType) GetId() int32 { + if m != nil { + return m.Id + } + return 0 +} + +func (m *DeclType) GetType() string { + if m != nil { + return m.Type + } + return "" +} + +func (m *DeclType) GetTypeParams() []*DeclType { + if m != nil { + return m.TypeParams + } + return nil +} + +// An identifier declaration. +type IdentDecl struct { + // Optional type of the identifier. + Type *DeclType `protobuf:"bytes,3,opt,name=type,proto3" json:"type,omitempty"` + // Optional value of the identifier. + Value *Expr `protobuf:"bytes,4,opt,name=value,proto3" json:"value,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *IdentDecl) Reset() { *m = IdentDecl{} } +func (m *IdentDecl) String() string { return proto.CompactTextString(m) } +func (*IdentDecl) ProtoMessage() {} +func (*IdentDecl) Descriptor() ([]byte, []int) { + return fileDescriptor_decl_1e9fea2f12eba89c, []int{2} +} +func (m *IdentDecl) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_IdentDecl.Unmarshal(m, b) +} +func (m *IdentDecl) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_IdentDecl.Marshal(b, m, deterministic) +} +func (dst *IdentDecl) XXX_Merge(src proto.Message) { + xxx_messageInfo_IdentDecl.Merge(dst, src) +} +func (m *IdentDecl) XXX_Size() int { + return xxx_messageInfo_IdentDecl.Size(m) +} +func (m *IdentDecl) XXX_DiscardUnknown() { + xxx_messageInfo_IdentDecl.DiscardUnknown(m) +} + +var xxx_messageInfo_IdentDecl proto.InternalMessageInfo + +func (m *IdentDecl) GetType() *DeclType { + if m != nil { + return m.Type + } + return nil +} + +func (m *IdentDecl) GetValue() *Expr { + if m != nil { + return m.Value + } + return nil +} + +// A function declaration. +type FunctionDecl struct { + // The function arguments. + Args []*IdentDecl `protobuf:"bytes,1,rep,name=args,proto3" json:"args,omitempty"` + // Optional declared return type. + ReturnType *DeclType `protobuf:"bytes,2,opt,name=return_type,json=returnType,proto3" json:"return_type,omitempty"` + // If the first argument of the function is the receiver. + ReceiverFunction bool `protobuf:"varint,3,opt,name=receiver_function,json=receiverFunction,proto3" json:"receiver_function,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *FunctionDecl) Reset() { *m = FunctionDecl{} } +func (m *FunctionDecl) String() string { return proto.CompactTextString(m) } +func (*FunctionDecl) ProtoMessage() {} +func (*FunctionDecl) Descriptor() ([]byte, []int) { + return fileDescriptor_decl_1e9fea2f12eba89c, []int{3} +} +func (m *FunctionDecl) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_FunctionDecl.Unmarshal(m, b) +} +func (m *FunctionDecl) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_FunctionDecl.Marshal(b, m, deterministic) +} +func (dst *FunctionDecl) XXX_Merge(src proto.Message) { + xxx_messageInfo_FunctionDecl.Merge(dst, src) +} +func (m *FunctionDecl) XXX_Size() int { + return xxx_messageInfo_FunctionDecl.Size(m) +} +func (m *FunctionDecl) XXX_DiscardUnknown() { + xxx_messageInfo_FunctionDecl.DiscardUnknown(m) +} + +var xxx_messageInfo_FunctionDecl proto.InternalMessageInfo + +func (m *FunctionDecl) GetArgs() []*IdentDecl { + if m != nil { + return m.Args + } + return nil +} + +func (m *FunctionDecl) GetReturnType() *DeclType { + if m != nil { + return m.ReturnType + } + return nil +} + +func (m *FunctionDecl) GetReceiverFunction() bool { + if m != nil { + return m.ReceiverFunction + } + return false +} + +func init() { + proto.RegisterType((*Decl)(nil), "google.api.expr.v1beta1.Decl") + proto.RegisterType((*DeclType)(nil), "google.api.expr.v1beta1.DeclType") + proto.RegisterType((*IdentDecl)(nil), "google.api.expr.v1beta1.IdentDecl") + proto.RegisterType((*FunctionDecl)(nil), "google.api.expr.v1beta1.FunctionDecl") +} + +func init() { + proto.RegisterFile("google/api/expr/v1beta1/decl.proto", fileDescriptor_decl_1e9fea2f12eba89c) +} + +var fileDescriptor_decl_1e9fea2f12eba89c = []byte{ + // 398 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x8c, 0x92, 0xcd, 0x4a, 0xeb, 0x40, + 0x14, 0xc7, 0xef, 0x34, 0x49, 0x69, 0x4f, 0x2f, 0x97, 0xde, 0xd9, 0xdc, 0x70, 0x45, 0x88, 0x01, + 0x21, 0x20, 0x24, 0xb4, 0x45, 0x17, 0x76, 0x17, 0x3f, 0xd0, 0x5d, 0x09, 0xae, 0xdc, 0x94, 0x69, + 0x32, 0x86, 0xd1, 0x74, 0x66, 0x98, 0xa6, 0xb5, 0x7d, 0x32, 0x9f, 0xc0, 0x77, 0x72, 0x29, 0x33, + 0x49, 0x83, 0xa0, 0x81, 0xae, 0x7a, 0x9a, 0xf3, 0xff, 0x9d, 0x8f, 0xff, 0x1c, 0xf0, 0x73, 0x21, + 0xf2, 0x82, 0x46, 0x44, 0xb2, 0x88, 0x6e, 0xa5, 0x8a, 0x36, 0xa3, 0x05, 0x2d, 0xc9, 0x28, 0xca, + 0x68, 0x5a, 0x84, 0x52, 0x89, 0x52, 0xe0, 0x7f, 0x95, 0x26, 0x24, 0x92, 0x85, 0x5a, 0x13, 0xd6, + 0x9a, 0xff, 0xad, 0xb0, 0x51, 0x19, 0xd8, 0x7f, 0x47, 0x60, 0x5f, 0xd3, 0xb4, 0xc0, 0x7f, 0xa0, + 0xc3, 0x32, 0x17, 0x79, 0x28, 0x70, 0x92, 0x0e, 0xcb, 0x30, 0x06, 0x9b, 0x93, 0x25, 0x75, 0x3b, + 0x1e, 0x0a, 0xfa, 0x89, 0x89, 0xf1, 0x10, 0xac, 0x4c, 0xa4, 0xae, 0x65, 0x3e, 0xe9, 0x10, 0x5f, + 0x82, 0xc3, 0x32, 0xca, 0x4b, 0xd7, 0xf6, 0x50, 0x30, 0x18, 0xfb, 0x61, 0xcb, 0x2c, 0xe1, 0xbd, + 0x56, 0xe9, 0x46, 0x77, 0xbf, 0x92, 0x0a, 0xc1, 0x57, 0xd0, 0x7b, 0x5a, 0xf3, 0xb4, 0x64, 0x82, + 0xbb, 0x8e, 0xc1, 0x4f, 0x5b, 0xf1, 0xdb, 0x5a, 0x58, 0x57, 0x68, 0xc0, 0xb8, 0x0b, 0xf6, 0x0b, + 0xe3, 0x99, 0xaf, 0xa0, 0xa7, 0x73, 0x0f, 0x3b, 0x49, 0x7f, 0x5a, 0xa5, 0xdc, 0xc9, 0x66, 0x15, + 0x1d, 0xe3, 0x18, 0x06, 0xfa, 0x77, 0x2e, 0x89, 0x22, 0xcb, 0x95, 0x6b, 0x7b, 0x56, 0x30, 0x18, + 0x9f, 0xb4, 0xf6, 0xdf, 0xd7, 0x4e, 0x40, 0x53, 0x33, 0x03, 0xf9, 0xaf, 0xd0, 0x6f, 0xd6, 0xc2, + 0xe7, 0x75, 0x13, 0xcb, 0x6c, 0x72, 0x40, 0xa5, 0x6a, 0x8e, 0x09, 0x38, 0x1b, 0x52, 0xac, 0x69, + 0x6d, 0xe0, 0x71, 0x2b, 0x77, 0xb3, 0x95, 0x2a, 0xa9, 0xb4, 0xfe, 0x1b, 0x82, 0xdf, 0x5f, 0x1d, + 0xc1, 0x17, 0x60, 0x13, 0x95, 0xaf, 0x5c, 0x64, 0xd6, 0x38, 0xe0, 0x15, 0x12, 0xa3, 0xd7, 0x2e, + 0x28, 0x5a, 0xae, 0x15, 0x9f, 0x37, 0x06, 0x1d, 0xe6, 0x42, 0x45, 0x19, 0xb7, 0xcf, 0xe0, 0xaf, + 0xa2, 0x29, 0x65, 0x1b, 0xaa, 0xe6, 0xcd, 0x7b, 0x6a, 0x17, 0x7a, 0xc9, 0x70, 0x9f, 0xd8, 0x0f, + 0x1b, 0x3f, 0xc3, 0x51, 0x2a, 0x96, 0x6d, 0x0d, 0xe2, 0xbe, 0xee, 0x30, 0xd3, 0x87, 0x39, 0x43, + 0x8f, 0xd3, 0x5a, 0x95, 0x8b, 0x82, 0xf0, 0x3c, 0x14, 0x2a, 0x8f, 0x72, 0xca, 0xcd, 0xd9, 0x46, + 0x55, 0x8a, 0x48, 0xb6, 0xfa, 0x76, 0xdd, 0x53, 0xfd, 0xe7, 0x03, 0xa1, 0x45, 0xd7, 0x48, 0x27, + 0x9f, 0x01, 0x00, 0x00, 0xff, 0xff, 0x10, 0x20, 0xb6, 0xbc, 0x44, 0x03, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/api/expr/v1beta1/eval.pb.go b/vendor/google.golang.org/genproto/googleapis/api/expr/v1beta1/eval.pb.go new file mode 100644 index 0000000..a1fb7f0 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/api/expr/v1beta1/eval.pb.go @@ -0,0 +1,470 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/api/expr/v1beta1/eval.proto + +package expr // import "google.golang.org/genproto/googleapis/api/expr/v1beta1" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import status "google.golang.org/genproto/googleapis/rpc/status" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// The state of an evaluation. +// +// Can represent an initial, partial, or completed state of evaluation. +type EvalState struct { + // The unique values referenced in this message. + Values []*ExprValue `protobuf:"bytes,1,rep,name=values,proto3" json:"values,omitempty"` + // An ordered list of results. + // + // Tracks the flow of evaluation through the expression. + // May be sparse. + Results []*EvalState_Result `protobuf:"bytes,3,rep,name=results,proto3" json:"results,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *EvalState) Reset() { *m = EvalState{} } +func (m *EvalState) String() string { return proto.CompactTextString(m) } +func (*EvalState) ProtoMessage() {} +func (*EvalState) Descriptor() ([]byte, []int) { + return fileDescriptor_eval_1837ac708dd48de9, []int{0} +} +func (m *EvalState) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_EvalState.Unmarshal(m, b) +} +func (m *EvalState) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_EvalState.Marshal(b, m, deterministic) +} +func (dst *EvalState) XXX_Merge(src proto.Message) { + xxx_messageInfo_EvalState.Merge(dst, src) +} +func (m *EvalState) XXX_Size() int { + return xxx_messageInfo_EvalState.Size(m) +} +func (m *EvalState) XXX_DiscardUnknown() { + xxx_messageInfo_EvalState.DiscardUnknown(m) +} + +var xxx_messageInfo_EvalState proto.InternalMessageInfo + +func (m *EvalState) GetValues() []*ExprValue { + if m != nil { + return m.Values + } + return nil +} + +func (m *EvalState) GetResults() []*EvalState_Result { + if m != nil { + return m.Results + } + return nil +} + +// A single evaluation result. +type EvalState_Result struct { + // The expression this result is for. + Expr *IdRef `protobuf:"bytes,1,opt,name=expr,proto3" json:"expr,omitempty"` + // The index in `values` of the resulting value. + Value int32 `protobuf:"varint,2,opt,name=value,proto3" json:"value,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *EvalState_Result) Reset() { *m = EvalState_Result{} } +func (m *EvalState_Result) String() string { return proto.CompactTextString(m) } +func (*EvalState_Result) ProtoMessage() {} +func (*EvalState_Result) Descriptor() ([]byte, []int) { + return fileDescriptor_eval_1837ac708dd48de9, []int{0, 0} +} +func (m *EvalState_Result) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_EvalState_Result.Unmarshal(m, b) +} +func (m *EvalState_Result) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_EvalState_Result.Marshal(b, m, deterministic) +} +func (dst *EvalState_Result) XXX_Merge(src proto.Message) { + xxx_messageInfo_EvalState_Result.Merge(dst, src) +} +func (m *EvalState_Result) XXX_Size() int { + return xxx_messageInfo_EvalState_Result.Size(m) +} +func (m *EvalState_Result) XXX_DiscardUnknown() { + xxx_messageInfo_EvalState_Result.DiscardUnknown(m) +} + +var xxx_messageInfo_EvalState_Result proto.InternalMessageInfo + +func (m *EvalState_Result) GetExpr() *IdRef { + if m != nil { + return m.Expr + } + return nil +} + +func (m *EvalState_Result) GetValue() int32 { + if m != nil { + return m.Value + } + return 0 +} + +// The value of an evaluated expression. +type ExprValue struct { + // An expression can resolve to a value, error or unknown. + // + // Types that are valid to be assigned to Kind: + // *ExprValue_Value + // *ExprValue_Error + // *ExprValue_Unknown + Kind isExprValue_Kind `protobuf_oneof:"kind"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ExprValue) Reset() { *m = ExprValue{} } +func (m *ExprValue) String() string { return proto.CompactTextString(m) } +func (*ExprValue) ProtoMessage() {} +func (*ExprValue) Descriptor() ([]byte, []int) { + return fileDescriptor_eval_1837ac708dd48de9, []int{1} +} +func (m *ExprValue) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ExprValue.Unmarshal(m, b) +} +func (m *ExprValue) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ExprValue.Marshal(b, m, deterministic) +} +func (dst *ExprValue) XXX_Merge(src proto.Message) { + xxx_messageInfo_ExprValue.Merge(dst, src) +} +func (m *ExprValue) XXX_Size() int { + return xxx_messageInfo_ExprValue.Size(m) +} +func (m *ExprValue) XXX_DiscardUnknown() { + xxx_messageInfo_ExprValue.DiscardUnknown(m) +} + +var xxx_messageInfo_ExprValue proto.InternalMessageInfo + +type isExprValue_Kind interface { + isExprValue_Kind() +} + +type ExprValue_Value struct { + Value *Value `protobuf:"bytes,1,opt,name=value,proto3,oneof"` +} + +type ExprValue_Error struct { + Error *ErrorSet `protobuf:"bytes,2,opt,name=error,proto3,oneof"` +} + +type ExprValue_Unknown struct { + Unknown *UnknownSet `protobuf:"bytes,3,opt,name=unknown,proto3,oneof"` +} + +func (*ExprValue_Value) isExprValue_Kind() {} + +func (*ExprValue_Error) isExprValue_Kind() {} + +func (*ExprValue_Unknown) isExprValue_Kind() {} + +func (m *ExprValue) GetKind() isExprValue_Kind { + if m != nil { + return m.Kind + } + return nil +} + +func (m *ExprValue) GetValue() *Value { + if x, ok := m.GetKind().(*ExprValue_Value); ok { + return x.Value + } + return nil +} + +func (m *ExprValue) GetError() *ErrorSet { + if x, ok := m.GetKind().(*ExprValue_Error); ok { + return x.Error + } + return nil +} + +func (m *ExprValue) GetUnknown() *UnknownSet { + if x, ok := m.GetKind().(*ExprValue_Unknown); ok { + return x.Unknown + } + return nil +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*ExprValue) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _ExprValue_OneofMarshaler, _ExprValue_OneofUnmarshaler, _ExprValue_OneofSizer, []interface{}{ + (*ExprValue_Value)(nil), + (*ExprValue_Error)(nil), + (*ExprValue_Unknown)(nil), + } +} + +func _ExprValue_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*ExprValue) + // kind + switch x := m.Kind.(type) { + case *ExprValue_Value: + b.EncodeVarint(1<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.Value); err != nil { + return err + } + case *ExprValue_Error: + b.EncodeVarint(2<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.Error); err != nil { + return err + } + case *ExprValue_Unknown: + b.EncodeVarint(3<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.Unknown); err != nil { + return err + } + case nil: + default: + return fmt.Errorf("ExprValue.Kind has unexpected type %T", x) + } + return nil +} + +func _ExprValue_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*ExprValue) + switch tag { + case 1: // kind.value + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(Value) + err := b.DecodeMessage(msg) + m.Kind = &ExprValue_Value{msg} + return true, err + case 2: // kind.error + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(ErrorSet) + err := b.DecodeMessage(msg) + m.Kind = &ExprValue_Error{msg} + return true, err + case 3: // kind.unknown + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(UnknownSet) + err := b.DecodeMessage(msg) + m.Kind = &ExprValue_Unknown{msg} + return true, err + default: + return false, nil + } +} + +func _ExprValue_OneofSizer(msg proto.Message) (n int) { + m := msg.(*ExprValue) + // kind + switch x := m.Kind.(type) { + case *ExprValue_Value: + s := proto.Size(x.Value) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *ExprValue_Error: + s := proto.Size(x.Error) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *ExprValue_Unknown: + s := proto.Size(x.Unknown) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +// A set of errors. +// +// The errors included depend on the context. See `ExprValue.error`. +type ErrorSet struct { + // The errors in the set. + Errors []*status.Status `protobuf:"bytes,1,rep,name=errors,proto3" json:"errors,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ErrorSet) Reset() { *m = ErrorSet{} } +func (m *ErrorSet) String() string { return proto.CompactTextString(m) } +func (*ErrorSet) ProtoMessage() {} +func (*ErrorSet) Descriptor() ([]byte, []int) { + return fileDescriptor_eval_1837ac708dd48de9, []int{2} +} +func (m *ErrorSet) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ErrorSet.Unmarshal(m, b) +} +func (m *ErrorSet) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ErrorSet.Marshal(b, m, deterministic) +} +func (dst *ErrorSet) XXX_Merge(src proto.Message) { + xxx_messageInfo_ErrorSet.Merge(dst, src) +} +func (m *ErrorSet) XXX_Size() int { + return xxx_messageInfo_ErrorSet.Size(m) +} +func (m *ErrorSet) XXX_DiscardUnknown() { + xxx_messageInfo_ErrorSet.DiscardUnknown(m) +} + +var xxx_messageInfo_ErrorSet proto.InternalMessageInfo + +func (m *ErrorSet) GetErrors() []*status.Status { + if m != nil { + return m.Errors + } + return nil +} + +// A set of expressions for which the value is unknown. +// +// The unknowns included depend on the context. See `ExprValue.unknown`. +type UnknownSet struct { + // The ids of the expressions with unknown values. + Exprs []*IdRef `protobuf:"bytes,1,rep,name=exprs,proto3" json:"exprs,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *UnknownSet) Reset() { *m = UnknownSet{} } +func (m *UnknownSet) String() string { return proto.CompactTextString(m) } +func (*UnknownSet) ProtoMessage() {} +func (*UnknownSet) Descriptor() ([]byte, []int) { + return fileDescriptor_eval_1837ac708dd48de9, []int{3} +} +func (m *UnknownSet) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_UnknownSet.Unmarshal(m, b) +} +func (m *UnknownSet) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_UnknownSet.Marshal(b, m, deterministic) +} +func (dst *UnknownSet) XXX_Merge(src proto.Message) { + xxx_messageInfo_UnknownSet.Merge(dst, src) +} +func (m *UnknownSet) XXX_Size() int { + return xxx_messageInfo_UnknownSet.Size(m) +} +func (m *UnknownSet) XXX_DiscardUnknown() { + xxx_messageInfo_UnknownSet.DiscardUnknown(m) +} + +var xxx_messageInfo_UnknownSet proto.InternalMessageInfo + +func (m *UnknownSet) GetExprs() []*IdRef { + if m != nil { + return m.Exprs + } + return nil +} + +// A reference to an expression id. +type IdRef struct { + // The expression id. + Id int32 `protobuf:"varint,1,opt,name=id,proto3" json:"id,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *IdRef) Reset() { *m = IdRef{} } +func (m *IdRef) String() string { return proto.CompactTextString(m) } +func (*IdRef) ProtoMessage() {} +func (*IdRef) Descriptor() ([]byte, []int) { + return fileDescriptor_eval_1837ac708dd48de9, []int{4} +} +func (m *IdRef) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_IdRef.Unmarshal(m, b) +} +func (m *IdRef) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_IdRef.Marshal(b, m, deterministic) +} +func (dst *IdRef) XXX_Merge(src proto.Message) { + xxx_messageInfo_IdRef.Merge(dst, src) +} +func (m *IdRef) XXX_Size() int { + return xxx_messageInfo_IdRef.Size(m) +} +func (m *IdRef) XXX_DiscardUnknown() { + xxx_messageInfo_IdRef.DiscardUnknown(m) +} + +var xxx_messageInfo_IdRef proto.InternalMessageInfo + +func (m *IdRef) GetId() int32 { + if m != nil { + return m.Id + } + return 0 +} + +func init() { + proto.RegisterType((*EvalState)(nil), "google.api.expr.v1beta1.EvalState") + proto.RegisterType((*EvalState_Result)(nil), "google.api.expr.v1beta1.EvalState.Result") + proto.RegisterType((*ExprValue)(nil), "google.api.expr.v1beta1.ExprValue") + proto.RegisterType((*ErrorSet)(nil), "google.api.expr.v1beta1.ErrorSet") + proto.RegisterType((*UnknownSet)(nil), "google.api.expr.v1beta1.UnknownSet") + proto.RegisterType((*IdRef)(nil), "google.api.expr.v1beta1.IdRef") +} + +func init() { + proto.RegisterFile("google/api/expr/v1beta1/eval.proto", fileDescriptor_eval_1837ac708dd48de9) +} + +var fileDescriptor_eval_1837ac708dd48de9 = []byte{ + // 392 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x84, 0x92, 0xcf, 0x8b, 0xd4, 0x30, + 0x14, 0xc7, 0xcd, 0xcc, 0xb6, 0xab, 0x6f, 0xc0, 0x43, 0x10, 0xb6, 0x8c, 0x20, 0x63, 0xf7, 0x32, + 0x7a, 0x48, 0xd8, 0x2a, 0x0b, 0xba, 0x07, 0xa1, 0xb2, 0xa0, 0xb7, 0x25, 0x83, 0x1e, 0xbc, 0x65, + 0xa7, 0xb1, 0xd4, 0xa9, 0x4d, 0x48, 0xd3, 0x3a, 0xff, 0xa1, 0xff, 0x86, 0x7f, 0x86, 0x47, 0xc9, + 0x4b, 0x83, 0x87, 0xa5, 0xcc, 0xf1, 0x25, 0x9f, 0xcf, 0xf7, 0xe5, 0xc7, 0x83, 0xbc, 0xd6, 0xba, + 0x6e, 0x15, 0x97, 0xa6, 0xe1, 0xea, 0x68, 0x2c, 0x1f, 0xaf, 0xee, 0x95, 0x93, 0x57, 0x5c, 0x8d, + 0xb2, 0x65, 0xc6, 0x6a, 0xa7, 0xe9, 0x45, 0x60, 0x98, 0x34, 0x0d, 0xf3, 0x0c, 0x9b, 0x98, 0xf5, + 0xe5, 0x9c, 0x3c, 0xca, 0x76, 0x50, 0xc1, 0x5e, 0x4f, 0x36, 0xb7, 0x66, 0xcf, 0x7b, 0x27, 0xdd, + 0xd0, 0x87, 0x8d, 0xfc, 0x0f, 0x81, 0x27, 0xb7, 0xa3, 0x6c, 0x77, 0x4e, 0x3a, 0x45, 0xdf, 0x43, + 0x8a, 0x56, 0x9f, 0x91, 0xcd, 0x72, 0xbb, 0x2a, 0x72, 0x36, 0xd3, 0x95, 0xdd, 0x1e, 0x8d, 0xfd, + 0xea, 0x51, 0x31, 0x19, 0xf4, 0x23, 0x9c, 0x5b, 0xd5, 0x0f, 0xad, 0xeb, 0xb3, 0x25, 0xca, 0xaf, + 0xe6, 0xe5, 0xd8, 0x90, 0x09, 0x34, 0x44, 0x34, 0xd7, 0x02, 0xd2, 0xb0, 0x44, 0x0b, 0x38, 0xf3, + 0x4e, 0x46, 0x36, 0x64, 0xbb, 0x2a, 0x5e, 0xcc, 0x66, 0x7d, 0xae, 0x84, 0xfa, 0x2e, 0x90, 0xa5, + 0xcf, 0x20, 0xc1, 0xc3, 0x64, 0x8b, 0x0d, 0xd9, 0x26, 0x22, 0x14, 0xf9, 0x6f, 0x7f, 0xc5, 0x78, + 0x5c, 0x7a, 0x1d, 0x99, 0x53, 0xc1, 0x88, 0x7f, 0x7a, 0x34, 0xa5, 0xd0, 0x77, 0x90, 0x28, 0x6b, + 0xb5, 0xc5, 0xec, 0x55, 0xf1, 0x72, 0xfe, 0x72, 0x9e, 0xda, 0x29, 0xe7, 0x55, 0x34, 0xe8, 0x07, + 0x38, 0x1f, 0xba, 0x43, 0xa7, 0x7f, 0x75, 0xd9, 0x12, 0xe5, 0xcb, 0x59, 0xf9, 0x4b, 0xe0, 0x82, + 0x1e, 0xad, 0x32, 0x85, 0xb3, 0x43, 0xd3, 0x55, 0xf9, 0x35, 0x3c, 0x8e, 0xe9, 0xf4, 0x35, 0xa4, + 0x98, 0x1e, 0xbf, 0x8a, 0xc6, 0x4c, 0x6b, 0xf6, 0x6c, 0x87, 0x5f, 0x2c, 0x26, 0x22, 0x2f, 0x01, + 0xfe, 0x07, 0xd3, 0xb7, 0x90, 0xf8, 0x9e, 0x51, 0x3c, 0xf5, 0xb4, 0x01, 0xce, 0x2f, 0x20, 0xc1, + 0x9a, 0x3e, 0x85, 0x45, 0x53, 0xe1, 0xeb, 0x25, 0x62, 0xd1, 0x54, 0xe5, 0x0f, 0x78, 0xbe, 0xd7, + 0x3f, 0xe7, 0x42, 0x4a, 0x9c, 0xae, 0x3b, 0x3f, 0x6b, 0x77, 0xe4, 0xdb, 0xcd, 0x44, 0xd5, 0xba, + 0x95, 0x5d, 0xcd, 0xb4, 0xad, 0x79, 0xad, 0x3a, 0x9c, 0x44, 0x1e, 0xb6, 0xa4, 0x69, 0xfa, 0x07, + 0xa3, 0x7c, 0xe3, 0x8b, 0xbf, 0x84, 0xdc, 0xa7, 0x88, 0xbe, 0xf9, 0x17, 0x00, 0x00, 0xff, 0xff, + 0x33, 0xb5, 0xd5, 0x2b, 0x31, 0x03, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/api/expr/v1beta1/expr.pb.go b/vendor/google.golang.org/genproto/googleapis/api/expr/v1beta1/expr.pb.go new file mode 100644 index 0000000..e3d9d47 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/api/expr/v1beta1/expr.pb.go @@ -0,0 +1,1351 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/api/expr/v1beta1/expr.proto + +package expr // import "google.golang.org/genproto/googleapis/api/expr/v1beta1" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _struct "github.com/golang/protobuf/ptypes/struct" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// An expression together with source information as returned by the parser. +type ParsedExpr struct { + // The parsed expression. + Expr *Expr `protobuf:"bytes,2,opt,name=expr,proto3" json:"expr,omitempty"` + // The source info derived from input that generated the parsed `expr`. + SourceInfo *SourceInfo `protobuf:"bytes,3,opt,name=source_info,json=sourceInfo,proto3" json:"source_info,omitempty"` + // The syntax version of the source, e.g. `cel1`. + SyntaxVersion string `protobuf:"bytes,4,opt,name=syntax_version,json=syntaxVersion,proto3" json:"syntax_version,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ParsedExpr) Reset() { *m = ParsedExpr{} } +func (m *ParsedExpr) String() string { return proto.CompactTextString(m) } +func (*ParsedExpr) ProtoMessage() {} +func (*ParsedExpr) Descriptor() ([]byte, []int) { + return fileDescriptor_expr_53c57acc058f7369, []int{0} +} +func (m *ParsedExpr) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ParsedExpr.Unmarshal(m, b) +} +func (m *ParsedExpr) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ParsedExpr.Marshal(b, m, deterministic) +} +func (dst *ParsedExpr) XXX_Merge(src proto.Message) { + xxx_messageInfo_ParsedExpr.Merge(dst, src) +} +func (m *ParsedExpr) XXX_Size() int { + return xxx_messageInfo_ParsedExpr.Size(m) +} +func (m *ParsedExpr) XXX_DiscardUnknown() { + xxx_messageInfo_ParsedExpr.DiscardUnknown(m) +} + +var xxx_messageInfo_ParsedExpr proto.InternalMessageInfo + +func (m *ParsedExpr) GetExpr() *Expr { + if m != nil { + return m.Expr + } + return nil +} + +func (m *ParsedExpr) GetSourceInfo() *SourceInfo { + if m != nil { + return m.SourceInfo + } + return nil +} + +func (m *ParsedExpr) GetSyntaxVersion() string { + if m != nil { + return m.SyntaxVersion + } + return "" +} + +// An abstract representation of a common expression. +// +// Expressions are abstractly represented as a collection of identifiers, +// select statements, function calls, literals, and comprehensions. All +// operators with the exception of the '.' operator are modelled as function +// calls. This makes it easy to represent new operators into the existing AST. +// +// All references within expressions must resolve to a +// [Decl][google.api.expr.v1beta1.Decl] provided at type-check for an expression +// to be valid. A reference may either be a bare identifier `name` or a +// qualified identifier `google.api.name`. References may either refer to a +// value or a function declaration. +// +// For example, the expression `google.api.name.startsWith('expr')` references +// the declaration `google.api.name` within a +// [Expr.Select][google.api.expr.v1beta1.Expr.Select] expression, and the +// function declaration `startsWith`. +type Expr struct { + // Required. An id assigned to this node by the parser which is unique in a + // given expression tree. This is used to associate type information and other + // attributes to a node in the parse tree. + Id int32 `protobuf:"varint,2,opt,name=id,proto3" json:"id,omitempty"` + // Required. Variants of expressions. + // + // Types that are valid to be assigned to ExprKind: + // *Expr_LiteralExpr + // *Expr_IdentExpr + // *Expr_SelectExpr + // *Expr_CallExpr + // *Expr_ListExpr + // *Expr_StructExpr + // *Expr_ComprehensionExpr + ExprKind isExpr_ExprKind `protobuf_oneof:"expr_kind"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Expr) Reset() { *m = Expr{} } +func (m *Expr) String() string { return proto.CompactTextString(m) } +func (*Expr) ProtoMessage() {} +func (*Expr) Descriptor() ([]byte, []int) { + return fileDescriptor_expr_53c57acc058f7369, []int{1} +} +func (m *Expr) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Expr.Unmarshal(m, b) +} +func (m *Expr) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Expr.Marshal(b, m, deterministic) +} +func (dst *Expr) XXX_Merge(src proto.Message) { + xxx_messageInfo_Expr.Merge(dst, src) +} +func (m *Expr) XXX_Size() int { + return xxx_messageInfo_Expr.Size(m) +} +func (m *Expr) XXX_DiscardUnknown() { + xxx_messageInfo_Expr.DiscardUnknown(m) +} + +var xxx_messageInfo_Expr proto.InternalMessageInfo + +func (m *Expr) GetId() int32 { + if m != nil { + return m.Id + } + return 0 +} + +type isExpr_ExprKind interface { + isExpr_ExprKind() +} + +type Expr_LiteralExpr struct { + LiteralExpr *Literal `protobuf:"bytes,3,opt,name=literal_expr,json=literalExpr,proto3,oneof"` +} + +type Expr_IdentExpr struct { + IdentExpr *Expr_Ident `protobuf:"bytes,4,opt,name=ident_expr,json=identExpr,proto3,oneof"` +} + +type Expr_SelectExpr struct { + SelectExpr *Expr_Select `protobuf:"bytes,5,opt,name=select_expr,json=selectExpr,proto3,oneof"` +} + +type Expr_CallExpr struct { + CallExpr *Expr_Call `protobuf:"bytes,6,opt,name=call_expr,json=callExpr,proto3,oneof"` +} + +type Expr_ListExpr struct { + ListExpr *Expr_CreateList `protobuf:"bytes,7,opt,name=list_expr,json=listExpr,proto3,oneof"` +} + +type Expr_StructExpr struct { + StructExpr *Expr_CreateStruct `protobuf:"bytes,8,opt,name=struct_expr,json=structExpr,proto3,oneof"` +} + +type Expr_ComprehensionExpr struct { + ComprehensionExpr *Expr_Comprehension `protobuf:"bytes,9,opt,name=comprehension_expr,json=comprehensionExpr,proto3,oneof"` +} + +func (*Expr_LiteralExpr) isExpr_ExprKind() {} + +func (*Expr_IdentExpr) isExpr_ExprKind() {} + +func (*Expr_SelectExpr) isExpr_ExprKind() {} + +func (*Expr_CallExpr) isExpr_ExprKind() {} + +func (*Expr_ListExpr) isExpr_ExprKind() {} + +func (*Expr_StructExpr) isExpr_ExprKind() {} + +func (*Expr_ComprehensionExpr) isExpr_ExprKind() {} + +func (m *Expr) GetExprKind() isExpr_ExprKind { + if m != nil { + return m.ExprKind + } + return nil +} + +func (m *Expr) GetLiteralExpr() *Literal { + if x, ok := m.GetExprKind().(*Expr_LiteralExpr); ok { + return x.LiteralExpr + } + return nil +} + +func (m *Expr) GetIdentExpr() *Expr_Ident { + if x, ok := m.GetExprKind().(*Expr_IdentExpr); ok { + return x.IdentExpr + } + return nil +} + +func (m *Expr) GetSelectExpr() *Expr_Select { + if x, ok := m.GetExprKind().(*Expr_SelectExpr); ok { + return x.SelectExpr + } + return nil +} + +func (m *Expr) GetCallExpr() *Expr_Call { + if x, ok := m.GetExprKind().(*Expr_CallExpr); ok { + return x.CallExpr + } + return nil +} + +func (m *Expr) GetListExpr() *Expr_CreateList { + if x, ok := m.GetExprKind().(*Expr_ListExpr); ok { + return x.ListExpr + } + return nil +} + +func (m *Expr) GetStructExpr() *Expr_CreateStruct { + if x, ok := m.GetExprKind().(*Expr_StructExpr); ok { + return x.StructExpr + } + return nil +} + +func (m *Expr) GetComprehensionExpr() *Expr_Comprehension { + if x, ok := m.GetExprKind().(*Expr_ComprehensionExpr); ok { + return x.ComprehensionExpr + } + return nil +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*Expr) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _Expr_OneofMarshaler, _Expr_OneofUnmarshaler, _Expr_OneofSizer, []interface{}{ + (*Expr_LiteralExpr)(nil), + (*Expr_IdentExpr)(nil), + (*Expr_SelectExpr)(nil), + (*Expr_CallExpr)(nil), + (*Expr_ListExpr)(nil), + (*Expr_StructExpr)(nil), + (*Expr_ComprehensionExpr)(nil), + } +} + +func _Expr_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*Expr) + // expr_kind + switch x := m.ExprKind.(type) { + case *Expr_LiteralExpr: + b.EncodeVarint(3<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.LiteralExpr); err != nil { + return err + } + case *Expr_IdentExpr: + b.EncodeVarint(4<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.IdentExpr); err != nil { + return err + } + case *Expr_SelectExpr: + b.EncodeVarint(5<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.SelectExpr); err != nil { + return err + } + case *Expr_CallExpr: + b.EncodeVarint(6<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.CallExpr); err != nil { + return err + } + case *Expr_ListExpr: + b.EncodeVarint(7<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.ListExpr); err != nil { + return err + } + case *Expr_StructExpr: + b.EncodeVarint(8<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.StructExpr); err != nil { + return err + } + case *Expr_ComprehensionExpr: + b.EncodeVarint(9<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.ComprehensionExpr); err != nil { + return err + } + case nil: + default: + return fmt.Errorf("Expr.ExprKind has unexpected type %T", x) + } + return nil +} + +func _Expr_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*Expr) + switch tag { + case 3: // expr_kind.literal_expr + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(Literal) + err := b.DecodeMessage(msg) + m.ExprKind = &Expr_LiteralExpr{msg} + return true, err + case 4: // expr_kind.ident_expr + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(Expr_Ident) + err := b.DecodeMessage(msg) + m.ExprKind = &Expr_IdentExpr{msg} + return true, err + case 5: // expr_kind.select_expr + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(Expr_Select) + err := b.DecodeMessage(msg) + m.ExprKind = &Expr_SelectExpr{msg} + return true, err + case 6: // expr_kind.call_expr + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(Expr_Call) + err := b.DecodeMessage(msg) + m.ExprKind = &Expr_CallExpr{msg} + return true, err + case 7: // expr_kind.list_expr + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(Expr_CreateList) + err := b.DecodeMessage(msg) + m.ExprKind = &Expr_ListExpr{msg} + return true, err + case 8: // expr_kind.struct_expr + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(Expr_CreateStruct) + err := b.DecodeMessage(msg) + m.ExprKind = &Expr_StructExpr{msg} + return true, err + case 9: // expr_kind.comprehension_expr + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(Expr_Comprehension) + err := b.DecodeMessage(msg) + m.ExprKind = &Expr_ComprehensionExpr{msg} + return true, err + default: + return false, nil + } +} + +func _Expr_OneofSizer(msg proto.Message) (n int) { + m := msg.(*Expr) + // expr_kind + switch x := m.ExprKind.(type) { + case *Expr_LiteralExpr: + s := proto.Size(x.LiteralExpr) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *Expr_IdentExpr: + s := proto.Size(x.IdentExpr) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *Expr_SelectExpr: + s := proto.Size(x.SelectExpr) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *Expr_CallExpr: + s := proto.Size(x.CallExpr) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *Expr_ListExpr: + s := proto.Size(x.ListExpr) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *Expr_StructExpr: + s := proto.Size(x.StructExpr) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *Expr_ComprehensionExpr: + s := proto.Size(x.ComprehensionExpr) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +// An identifier expression. e.g. `request`. +type Expr_Ident struct { + // Required. Holds a single, unqualified identifier, possibly preceded by a + // '.'. + // + // Qualified names are represented by the + // [Expr.Select][google.api.expr.v1beta1.Expr.Select] expression. + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Expr_Ident) Reset() { *m = Expr_Ident{} } +func (m *Expr_Ident) String() string { return proto.CompactTextString(m) } +func (*Expr_Ident) ProtoMessage() {} +func (*Expr_Ident) Descriptor() ([]byte, []int) { + return fileDescriptor_expr_53c57acc058f7369, []int{1, 0} +} +func (m *Expr_Ident) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Expr_Ident.Unmarshal(m, b) +} +func (m *Expr_Ident) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Expr_Ident.Marshal(b, m, deterministic) +} +func (dst *Expr_Ident) XXX_Merge(src proto.Message) { + xxx_messageInfo_Expr_Ident.Merge(dst, src) +} +func (m *Expr_Ident) XXX_Size() int { + return xxx_messageInfo_Expr_Ident.Size(m) +} +func (m *Expr_Ident) XXX_DiscardUnknown() { + xxx_messageInfo_Expr_Ident.DiscardUnknown(m) +} + +var xxx_messageInfo_Expr_Ident proto.InternalMessageInfo + +func (m *Expr_Ident) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +// A field selection expression. e.g. `request.auth`. +type Expr_Select struct { + // Required. The target of the selection expression. + // + // For example, in the select expression `request.auth`, the `request` + // portion of the expression is the `operand`. + Operand *Expr `protobuf:"bytes,1,opt,name=operand,proto3" json:"operand,omitempty"` + // Required. The name of the field to select. + // + // For example, in the select expression `request.auth`, the `auth` portion + // of the expression would be the `field`. + Field string `protobuf:"bytes,2,opt,name=field,proto3" json:"field,omitempty"` + // Whether the select is to be interpreted as a field presence test. + // + // This results from the macro `has(request.auth)`. + TestOnly bool `protobuf:"varint,3,opt,name=test_only,json=testOnly,proto3" json:"test_only,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Expr_Select) Reset() { *m = Expr_Select{} } +func (m *Expr_Select) String() string { return proto.CompactTextString(m) } +func (*Expr_Select) ProtoMessage() {} +func (*Expr_Select) Descriptor() ([]byte, []int) { + return fileDescriptor_expr_53c57acc058f7369, []int{1, 1} +} +func (m *Expr_Select) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Expr_Select.Unmarshal(m, b) +} +func (m *Expr_Select) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Expr_Select.Marshal(b, m, deterministic) +} +func (dst *Expr_Select) XXX_Merge(src proto.Message) { + xxx_messageInfo_Expr_Select.Merge(dst, src) +} +func (m *Expr_Select) XXX_Size() int { + return xxx_messageInfo_Expr_Select.Size(m) +} +func (m *Expr_Select) XXX_DiscardUnknown() { + xxx_messageInfo_Expr_Select.DiscardUnknown(m) +} + +var xxx_messageInfo_Expr_Select proto.InternalMessageInfo + +func (m *Expr_Select) GetOperand() *Expr { + if m != nil { + return m.Operand + } + return nil +} + +func (m *Expr_Select) GetField() string { + if m != nil { + return m.Field + } + return "" +} + +func (m *Expr_Select) GetTestOnly() bool { + if m != nil { + return m.TestOnly + } + return false +} + +// A call expression, including calls to predefined functions and operators. +// +// For example, `value == 10`, `size(map_value)`. +type Expr_Call struct { + // The target of an method call-style expression. For example, `x` in + // `x.f()`. + Target *Expr `protobuf:"bytes,1,opt,name=target,proto3" json:"target,omitempty"` + // Required. The name of the function or method being called. + Function string `protobuf:"bytes,2,opt,name=function,proto3" json:"function,omitempty"` + // The arguments. + Args []*Expr `protobuf:"bytes,3,rep,name=args,proto3" json:"args,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Expr_Call) Reset() { *m = Expr_Call{} } +func (m *Expr_Call) String() string { return proto.CompactTextString(m) } +func (*Expr_Call) ProtoMessage() {} +func (*Expr_Call) Descriptor() ([]byte, []int) { + return fileDescriptor_expr_53c57acc058f7369, []int{1, 2} +} +func (m *Expr_Call) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Expr_Call.Unmarshal(m, b) +} +func (m *Expr_Call) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Expr_Call.Marshal(b, m, deterministic) +} +func (dst *Expr_Call) XXX_Merge(src proto.Message) { + xxx_messageInfo_Expr_Call.Merge(dst, src) +} +func (m *Expr_Call) XXX_Size() int { + return xxx_messageInfo_Expr_Call.Size(m) +} +func (m *Expr_Call) XXX_DiscardUnknown() { + xxx_messageInfo_Expr_Call.DiscardUnknown(m) +} + +var xxx_messageInfo_Expr_Call proto.InternalMessageInfo + +func (m *Expr_Call) GetTarget() *Expr { + if m != nil { + return m.Target + } + return nil +} + +func (m *Expr_Call) GetFunction() string { + if m != nil { + return m.Function + } + return "" +} + +func (m *Expr_Call) GetArgs() []*Expr { + if m != nil { + return m.Args + } + return nil +} + +// A list creation expression. +// +// Lists may either be homogenous, e.g. `[1, 2, 3]`, or heterogenous, e.g. +// `dyn([1, 'hello', 2.0])` +type Expr_CreateList struct { + // The elements part of the list. + Elements []*Expr `protobuf:"bytes,1,rep,name=elements,proto3" json:"elements,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Expr_CreateList) Reset() { *m = Expr_CreateList{} } +func (m *Expr_CreateList) String() string { return proto.CompactTextString(m) } +func (*Expr_CreateList) ProtoMessage() {} +func (*Expr_CreateList) Descriptor() ([]byte, []int) { + return fileDescriptor_expr_53c57acc058f7369, []int{1, 3} +} +func (m *Expr_CreateList) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Expr_CreateList.Unmarshal(m, b) +} +func (m *Expr_CreateList) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Expr_CreateList.Marshal(b, m, deterministic) +} +func (dst *Expr_CreateList) XXX_Merge(src proto.Message) { + xxx_messageInfo_Expr_CreateList.Merge(dst, src) +} +func (m *Expr_CreateList) XXX_Size() int { + return xxx_messageInfo_Expr_CreateList.Size(m) +} +func (m *Expr_CreateList) XXX_DiscardUnknown() { + xxx_messageInfo_Expr_CreateList.DiscardUnknown(m) +} + +var xxx_messageInfo_Expr_CreateList proto.InternalMessageInfo + +func (m *Expr_CreateList) GetElements() []*Expr { + if m != nil { + return m.Elements + } + return nil +} + +// A map or message creation expression. +// +// Maps are constructed as `{'key_name': 'value'}`. Message construction is +// similar, but prefixed with a type name and composed of field ids: +// `types.MyType{field_id: 'value'}`. +type Expr_CreateStruct struct { + // The type name of the message to be created, empty when creating map + // literals. + Type string `protobuf:"bytes,1,opt,name=type,proto3" json:"type,omitempty"` + // The entries in the creation expression. + Entries []*Expr_CreateStruct_Entry `protobuf:"bytes,2,rep,name=entries,proto3" json:"entries,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Expr_CreateStruct) Reset() { *m = Expr_CreateStruct{} } +func (m *Expr_CreateStruct) String() string { return proto.CompactTextString(m) } +func (*Expr_CreateStruct) ProtoMessage() {} +func (*Expr_CreateStruct) Descriptor() ([]byte, []int) { + return fileDescriptor_expr_53c57acc058f7369, []int{1, 4} +} +func (m *Expr_CreateStruct) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Expr_CreateStruct.Unmarshal(m, b) +} +func (m *Expr_CreateStruct) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Expr_CreateStruct.Marshal(b, m, deterministic) +} +func (dst *Expr_CreateStruct) XXX_Merge(src proto.Message) { + xxx_messageInfo_Expr_CreateStruct.Merge(dst, src) +} +func (m *Expr_CreateStruct) XXX_Size() int { + return xxx_messageInfo_Expr_CreateStruct.Size(m) +} +func (m *Expr_CreateStruct) XXX_DiscardUnknown() { + xxx_messageInfo_Expr_CreateStruct.DiscardUnknown(m) +} + +var xxx_messageInfo_Expr_CreateStruct proto.InternalMessageInfo + +func (m *Expr_CreateStruct) GetType() string { + if m != nil { + return m.Type + } + return "" +} + +func (m *Expr_CreateStruct) GetEntries() []*Expr_CreateStruct_Entry { + if m != nil { + return m.Entries + } + return nil +} + +// Represents an entry. +type Expr_CreateStruct_Entry struct { + // Required. An id assigned to this node by the parser which is unique + // in a given expression tree. This is used to associate type + // information and other attributes to the node. + Id int32 `protobuf:"varint,1,opt,name=id,proto3" json:"id,omitempty"` + // The `Entry` key kinds. + // + // Types that are valid to be assigned to KeyKind: + // *Expr_CreateStruct_Entry_FieldKey + // *Expr_CreateStruct_Entry_MapKey + KeyKind isExpr_CreateStruct_Entry_KeyKind `protobuf_oneof:"key_kind"` + // Required. The value assigned to the key. + Value *Expr `protobuf:"bytes,4,opt,name=value,proto3" json:"value,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Expr_CreateStruct_Entry) Reset() { *m = Expr_CreateStruct_Entry{} } +func (m *Expr_CreateStruct_Entry) String() string { return proto.CompactTextString(m) } +func (*Expr_CreateStruct_Entry) ProtoMessage() {} +func (*Expr_CreateStruct_Entry) Descriptor() ([]byte, []int) { + return fileDescriptor_expr_53c57acc058f7369, []int{1, 4, 0} +} +func (m *Expr_CreateStruct_Entry) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Expr_CreateStruct_Entry.Unmarshal(m, b) +} +func (m *Expr_CreateStruct_Entry) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Expr_CreateStruct_Entry.Marshal(b, m, deterministic) +} +func (dst *Expr_CreateStruct_Entry) XXX_Merge(src proto.Message) { + xxx_messageInfo_Expr_CreateStruct_Entry.Merge(dst, src) +} +func (m *Expr_CreateStruct_Entry) XXX_Size() int { + return xxx_messageInfo_Expr_CreateStruct_Entry.Size(m) +} +func (m *Expr_CreateStruct_Entry) XXX_DiscardUnknown() { + xxx_messageInfo_Expr_CreateStruct_Entry.DiscardUnknown(m) +} + +var xxx_messageInfo_Expr_CreateStruct_Entry proto.InternalMessageInfo + +func (m *Expr_CreateStruct_Entry) GetId() int32 { + if m != nil { + return m.Id + } + return 0 +} + +type isExpr_CreateStruct_Entry_KeyKind interface { + isExpr_CreateStruct_Entry_KeyKind() +} + +type Expr_CreateStruct_Entry_FieldKey struct { + FieldKey string `protobuf:"bytes,2,opt,name=field_key,json=fieldKey,proto3,oneof"` +} + +type Expr_CreateStruct_Entry_MapKey struct { + MapKey *Expr `protobuf:"bytes,3,opt,name=map_key,json=mapKey,proto3,oneof"` +} + +func (*Expr_CreateStruct_Entry_FieldKey) isExpr_CreateStruct_Entry_KeyKind() {} + +func (*Expr_CreateStruct_Entry_MapKey) isExpr_CreateStruct_Entry_KeyKind() {} + +func (m *Expr_CreateStruct_Entry) GetKeyKind() isExpr_CreateStruct_Entry_KeyKind { + if m != nil { + return m.KeyKind + } + return nil +} + +func (m *Expr_CreateStruct_Entry) GetFieldKey() string { + if x, ok := m.GetKeyKind().(*Expr_CreateStruct_Entry_FieldKey); ok { + return x.FieldKey + } + return "" +} + +func (m *Expr_CreateStruct_Entry) GetMapKey() *Expr { + if x, ok := m.GetKeyKind().(*Expr_CreateStruct_Entry_MapKey); ok { + return x.MapKey + } + return nil +} + +func (m *Expr_CreateStruct_Entry) GetValue() *Expr { + if m != nil { + return m.Value + } + return nil +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*Expr_CreateStruct_Entry) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _Expr_CreateStruct_Entry_OneofMarshaler, _Expr_CreateStruct_Entry_OneofUnmarshaler, _Expr_CreateStruct_Entry_OneofSizer, []interface{}{ + (*Expr_CreateStruct_Entry_FieldKey)(nil), + (*Expr_CreateStruct_Entry_MapKey)(nil), + } +} + +func _Expr_CreateStruct_Entry_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*Expr_CreateStruct_Entry) + // key_kind + switch x := m.KeyKind.(type) { + case *Expr_CreateStruct_Entry_FieldKey: + b.EncodeVarint(2<<3 | proto.WireBytes) + b.EncodeStringBytes(x.FieldKey) + case *Expr_CreateStruct_Entry_MapKey: + b.EncodeVarint(3<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.MapKey); err != nil { + return err + } + case nil: + default: + return fmt.Errorf("Expr_CreateStruct_Entry.KeyKind has unexpected type %T", x) + } + return nil +} + +func _Expr_CreateStruct_Entry_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*Expr_CreateStruct_Entry) + switch tag { + case 2: // key_kind.field_key + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeStringBytes() + m.KeyKind = &Expr_CreateStruct_Entry_FieldKey{x} + return true, err + case 3: // key_kind.map_key + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(Expr) + err := b.DecodeMessage(msg) + m.KeyKind = &Expr_CreateStruct_Entry_MapKey{msg} + return true, err + default: + return false, nil + } +} + +func _Expr_CreateStruct_Entry_OneofSizer(msg proto.Message) (n int) { + m := msg.(*Expr_CreateStruct_Entry) + // key_kind + switch x := m.KeyKind.(type) { + case *Expr_CreateStruct_Entry_FieldKey: + n += 1 // tag and wire + n += proto.SizeVarint(uint64(len(x.FieldKey))) + n += len(x.FieldKey) + case *Expr_CreateStruct_Entry_MapKey: + s := proto.Size(x.MapKey) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +// A comprehension expression applied to a list or map. +// +// Comprehensions are not part of the core syntax, but enabled with macros. +// A macro matches a specific call signature within a parsed AST and replaces +// the call with an alternate AST block. Macro expansion happens at parse +// time. +// +// The following macros are supported within CEL: +// +// Aggregate type macros may be applied to all elements in a list or all keys +// in a map: +// +// * `all`, `exists`, `exists_one` - test a predicate expression against +// the inputs and return `true` if the predicate is satisfied for all, +// any, or only one value `list.all(x, x < 10)`. +// * `filter` - test a predicate expression against the inputs and return +// the subset of elements which satisfy the predicate: +// `payments.filter(p, p > 1000)`. +// * `map` - apply an expression to all elements in the input and return the +// output aggregate type: `[1, 2, 3].map(i, i * i)`. +// +// The `has(m.x)` macro tests whether the property `x` is present in struct +// `m`. The semantics of this macro depend on the type of `m`. For proto2 +// messages `has(m.x)` is defined as 'defined, but not set`. For proto3, the +// macro tests whether the property is set to its default. For map and struct +// types, the macro tests whether the property `x` is defined on `m`. +type Expr_Comprehension struct { + // The name of the iteration variable. + IterVar string `protobuf:"bytes,1,opt,name=iter_var,json=iterVar,proto3" json:"iter_var,omitempty"` + // The range over which var iterates. + IterRange *Expr `protobuf:"bytes,2,opt,name=iter_range,json=iterRange,proto3" json:"iter_range,omitempty"` + // The name of the variable used for accumulation of the result. + AccuVar string `protobuf:"bytes,3,opt,name=accu_var,json=accuVar,proto3" json:"accu_var,omitempty"` + // The initial value of the accumulator. + AccuInit *Expr `protobuf:"bytes,4,opt,name=accu_init,json=accuInit,proto3" json:"accu_init,omitempty"` + // An expression which can contain iter_var and accu_var. + // + // Returns false when the result has been computed and may be used as + // a hint to short-circuit the remainder of the comprehension. + LoopCondition *Expr `protobuf:"bytes,5,opt,name=loop_condition,json=loopCondition,proto3" json:"loop_condition,omitempty"` + // An expression which can contain iter_var and accu_var. + // + // Computes the next value of accu_var. + LoopStep *Expr `protobuf:"bytes,6,opt,name=loop_step,json=loopStep,proto3" json:"loop_step,omitempty"` + // An expression which can contain accu_var. + // + // Computes the result. + Result *Expr `protobuf:"bytes,7,opt,name=result,proto3" json:"result,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Expr_Comprehension) Reset() { *m = Expr_Comprehension{} } +func (m *Expr_Comprehension) String() string { return proto.CompactTextString(m) } +func (*Expr_Comprehension) ProtoMessage() {} +func (*Expr_Comprehension) Descriptor() ([]byte, []int) { + return fileDescriptor_expr_53c57acc058f7369, []int{1, 5} +} +func (m *Expr_Comprehension) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Expr_Comprehension.Unmarshal(m, b) +} +func (m *Expr_Comprehension) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Expr_Comprehension.Marshal(b, m, deterministic) +} +func (dst *Expr_Comprehension) XXX_Merge(src proto.Message) { + xxx_messageInfo_Expr_Comprehension.Merge(dst, src) +} +func (m *Expr_Comprehension) XXX_Size() int { + return xxx_messageInfo_Expr_Comprehension.Size(m) +} +func (m *Expr_Comprehension) XXX_DiscardUnknown() { + xxx_messageInfo_Expr_Comprehension.DiscardUnknown(m) +} + +var xxx_messageInfo_Expr_Comprehension proto.InternalMessageInfo + +func (m *Expr_Comprehension) GetIterVar() string { + if m != nil { + return m.IterVar + } + return "" +} + +func (m *Expr_Comprehension) GetIterRange() *Expr { + if m != nil { + return m.IterRange + } + return nil +} + +func (m *Expr_Comprehension) GetAccuVar() string { + if m != nil { + return m.AccuVar + } + return "" +} + +func (m *Expr_Comprehension) GetAccuInit() *Expr { + if m != nil { + return m.AccuInit + } + return nil +} + +func (m *Expr_Comprehension) GetLoopCondition() *Expr { + if m != nil { + return m.LoopCondition + } + return nil +} + +func (m *Expr_Comprehension) GetLoopStep() *Expr { + if m != nil { + return m.LoopStep + } + return nil +} + +func (m *Expr_Comprehension) GetResult() *Expr { + if m != nil { + return m.Result + } + return nil +} + +// Represents a primitive literal. +// +// This is similar to the primitives supported in the well-known type +// `google.protobuf.Value`, but richer so it can represent CEL's full range of +// primitives. +// +// Lists and structs are not included as constants as these aggregate types may +// contain [Expr][google.api.expr.v1beta1.Expr] elements which require +// evaluation and are thus not constant. +// +// Examples of literals include: `"hello"`, `b'bytes'`, `1u`, `4.2`, `-2`, +// `true`, `null`. +type Literal struct { + // Required. The valid constant kinds. + // + // Types that are valid to be assigned to ConstantKind: + // *Literal_NullValue + // *Literal_BoolValue + // *Literal_Int64Value + // *Literal_Uint64Value + // *Literal_DoubleValue + // *Literal_StringValue + // *Literal_BytesValue + ConstantKind isLiteral_ConstantKind `protobuf_oneof:"constant_kind"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Literal) Reset() { *m = Literal{} } +func (m *Literal) String() string { return proto.CompactTextString(m) } +func (*Literal) ProtoMessage() {} +func (*Literal) Descriptor() ([]byte, []int) { + return fileDescriptor_expr_53c57acc058f7369, []int{2} +} +func (m *Literal) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Literal.Unmarshal(m, b) +} +func (m *Literal) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Literal.Marshal(b, m, deterministic) +} +func (dst *Literal) XXX_Merge(src proto.Message) { + xxx_messageInfo_Literal.Merge(dst, src) +} +func (m *Literal) XXX_Size() int { + return xxx_messageInfo_Literal.Size(m) +} +func (m *Literal) XXX_DiscardUnknown() { + xxx_messageInfo_Literal.DiscardUnknown(m) +} + +var xxx_messageInfo_Literal proto.InternalMessageInfo + +type isLiteral_ConstantKind interface { + isLiteral_ConstantKind() +} + +type Literal_NullValue struct { + NullValue _struct.NullValue `protobuf:"varint,1,opt,name=null_value,json=nullValue,proto3,enum=google.protobuf.NullValue,oneof"` +} + +type Literal_BoolValue struct { + BoolValue bool `protobuf:"varint,2,opt,name=bool_value,json=boolValue,proto3,oneof"` +} + +type Literal_Int64Value struct { + Int64Value int64 `protobuf:"varint,3,opt,name=int64_value,json=int64Value,proto3,oneof"` +} + +type Literal_Uint64Value struct { + Uint64Value uint64 `protobuf:"varint,4,opt,name=uint64_value,json=uint64Value,proto3,oneof"` +} + +type Literal_DoubleValue struct { + DoubleValue float64 `protobuf:"fixed64,5,opt,name=double_value,json=doubleValue,proto3,oneof"` +} + +type Literal_StringValue struct { + StringValue string `protobuf:"bytes,6,opt,name=string_value,json=stringValue,proto3,oneof"` +} + +type Literal_BytesValue struct { + BytesValue []byte `protobuf:"bytes,7,opt,name=bytes_value,json=bytesValue,proto3,oneof"` +} + +func (*Literal_NullValue) isLiteral_ConstantKind() {} + +func (*Literal_BoolValue) isLiteral_ConstantKind() {} + +func (*Literal_Int64Value) isLiteral_ConstantKind() {} + +func (*Literal_Uint64Value) isLiteral_ConstantKind() {} + +func (*Literal_DoubleValue) isLiteral_ConstantKind() {} + +func (*Literal_StringValue) isLiteral_ConstantKind() {} + +func (*Literal_BytesValue) isLiteral_ConstantKind() {} + +func (m *Literal) GetConstantKind() isLiteral_ConstantKind { + if m != nil { + return m.ConstantKind + } + return nil +} + +func (m *Literal) GetNullValue() _struct.NullValue { + if x, ok := m.GetConstantKind().(*Literal_NullValue); ok { + return x.NullValue + } + return _struct.NullValue_NULL_VALUE +} + +func (m *Literal) GetBoolValue() bool { + if x, ok := m.GetConstantKind().(*Literal_BoolValue); ok { + return x.BoolValue + } + return false +} + +func (m *Literal) GetInt64Value() int64 { + if x, ok := m.GetConstantKind().(*Literal_Int64Value); ok { + return x.Int64Value + } + return 0 +} + +func (m *Literal) GetUint64Value() uint64 { + if x, ok := m.GetConstantKind().(*Literal_Uint64Value); ok { + return x.Uint64Value + } + return 0 +} + +func (m *Literal) GetDoubleValue() float64 { + if x, ok := m.GetConstantKind().(*Literal_DoubleValue); ok { + return x.DoubleValue + } + return 0 +} + +func (m *Literal) GetStringValue() string { + if x, ok := m.GetConstantKind().(*Literal_StringValue); ok { + return x.StringValue + } + return "" +} + +func (m *Literal) GetBytesValue() []byte { + if x, ok := m.GetConstantKind().(*Literal_BytesValue); ok { + return x.BytesValue + } + return nil +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*Literal) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _Literal_OneofMarshaler, _Literal_OneofUnmarshaler, _Literal_OneofSizer, []interface{}{ + (*Literal_NullValue)(nil), + (*Literal_BoolValue)(nil), + (*Literal_Int64Value)(nil), + (*Literal_Uint64Value)(nil), + (*Literal_DoubleValue)(nil), + (*Literal_StringValue)(nil), + (*Literal_BytesValue)(nil), + } +} + +func _Literal_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*Literal) + // constant_kind + switch x := m.ConstantKind.(type) { + case *Literal_NullValue: + b.EncodeVarint(1<<3 | proto.WireVarint) + b.EncodeVarint(uint64(x.NullValue)) + case *Literal_BoolValue: + t := uint64(0) + if x.BoolValue { + t = 1 + } + b.EncodeVarint(2<<3 | proto.WireVarint) + b.EncodeVarint(t) + case *Literal_Int64Value: + b.EncodeVarint(3<<3 | proto.WireVarint) + b.EncodeVarint(uint64(x.Int64Value)) + case *Literal_Uint64Value: + b.EncodeVarint(4<<3 | proto.WireVarint) + b.EncodeVarint(uint64(x.Uint64Value)) + case *Literal_DoubleValue: + b.EncodeVarint(5<<3 | proto.WireFixed64) + b.EncodeFixed64(math.Float64bits(x.DoubleValue)) + case *Literal_StringValue: + b.EncodeVarint(6<<3 | proto.WireBytes) + b.EncodeStringBytes(x.StringValue) + case *Literal_BytesValue: + b.EncodeVarint(7<<3 | proto.WireBytes) + b.EncodeRawBytes(x.BytesValue) + case nil: + default: + return fmt.Errorf("Literal.ConstantKind has unexpected type %T", x) + } + return nil +} + +func _Literal_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*Literal) + switch tag { + case 1: // constant_kind.null_value + if wire != proto.WireVarint { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeVarint() + m.ConstantKind = &Literal_NullValue{_struct.NullValue(x)} + return true, err + case 2: // constant_kind.bool_value + if wire != proto.WireVarint { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeVarint() + m.ConstantKind = &Literal_BoolValue{x != 0} + return true, err + case 3: // constant_kind.int64_value + if wire != proto.WireVarint { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeVarint() + m.ConstantKind = &Literal_Int64Value{int64(x)} + return true, err + case 4: // constant_kind.uint64_value + if wire != proto.WireVarint { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeVarint() + m.ConstantKind = &Literal_Uint64Value{x} + return true, err + case 5: // constant_kind.double_value + if wire != proto.WireFixed64 { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeFixed64() + m.ConstantKind = &Literal_DoubleValue{math.Float64frombits(x)} + return true, err + case 6: // constant_kind.string_value + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeStringBytes() + m.ConstantKind = &Literal_StringValue{x} + return true, err + case 7: // constant_kind.bytes_value + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeRawBytes(true) + m.ConstantKind = &Literal_BytesValue{x} + return true, err + default: + return false, nil + } +} + +func _Literal_OneofSizer(msg proto.Message) (n int) { + m := msg.(*Literal) + // constant_kind + switch x := m.ConstantKind.(type) { + case *Literal_NullValue: + n += 1 // tag and wire + n += proto.SizeVarint(uint64(x.NullValue)) + case *Literal_BoolValue: + n += 1 // tag and wire + n += 1 + case *Literal_Int64Value: + n += 1 // tag and wire + n += proto.SizeVarint(uint64(x.Int64Value)) + case *Literal_Uint64Value: + n += 1 // tag and wire + n += proto.SizeVarint(uint64(x.Uint64Value)) + case *Literal_DoubleValue: + n += 1 // tag and wire + n += 8 + case *Literal_StringValue: + n += 1 // tag and wire + n += proto.SizeVarint(uint64(len(x.StringValue))) + n += len(x.StringValue) + case *Literal_BytesValue: + n += 1 // tag and wire + n += proto.SizeVarint(uint64(len(x.BytesValue))) + n += len(x.BytesValue) + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +func init() { + proto.RegisterType((*ParsedExpr)(nil), "google.api.expr.v1beta1.ParsedExpr") + proto.RegisterType((*Expr)(nil), "google.api.expr.v1beta1.Expr") + proto.RegisterType((*Expr_Ident)(nil), "google.api.expr.v1beta1.Expr.Ident") + proto.RegisterType((*Expr_Select)(nil), "google.api.expr.v1beta1.Expr.Select") + proto.RegisterType((*Expr_Call)(nil), "google.api.expr.v1beta1.Expr.Call") + proto.RegisterType((*Expr_CreateList)(nil), "google.api.expr.v1beta1.Expr.CreateList") + proto.RegisterType((*Expr_CreateStruct)(nil), "google.api.expr.v1beta1.Expr.CreateStruct") + proto.RegisterType((*Expr_CreateStruct_Entry)(nil), "google.api.expr.v1beta1.Expr.CreateStruct.Entry") + proto.RegisterType((*Expr_Comprehension)(nil), "google.api.expr.v1beta1.Expr.Comprehension") + proto.RegisterType((*Literal)(nil), "google.api.expr.v1beta1.Literal") +} + +func init() { + proto.RegisterFile("google/api/expr/v1beta1/expr.proto", fileDescriptor_expr_53c57acc058f7369) +} + +var fileDescriptor_expr_53c57acc058f7369 = []byte{ + // 936 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x8c, 0x96, 0x6f, 0x6f, 0x23, 0xb5, + 0x13, 0xc7, 0xbb, 0xf9, 0xbb, 0x3b, 0x69, 0xfb, 0xd3, 0xcf, 0x42, 0x22, 0x6c, 0x39, 0x51, 0x7a, + 0x87, 0x54, 0x81, 0x94, 0xd0, 0x3b, 0xfe, 0x97, 0x27, 0xf4, 0xae, 0xba, 0x14, 0x0e, 0xa8, 0xb6, + 0x52, 0x1f, 0x20, 0xa4, 0xc8, 0xd9, 0x38, 0x8b, 0xa9, 0x63, 0xaf, 0xbc, 0xde, 0xaa, 0x79, 0x0f, + 0xbc, 0x0a, 0x1e, 0xc3, 0x03, 0x5e, 0x00, 0xef, 0xeb, 0x1e, 0xa2, 0x19, 0x3b, 0xa1, 0x15, 0xca, + 0x25, 0xcf, 0xd6, 0xe3, 0xcf, 0x7c, 0x3d, 0x9e, 0x19, 0x4f, 0x02, 0x47, 0x85, 0x31, 0x85, 0x12, + 0x43, 0x5e, 0xca, 0xa1, 0xb8, 0x2b, 0xed, 0xf0, 0xf6, 0x64, 0x22, 0x1c, 0x3f, 0xa1, 0xc5, 0xa0, + 0xb4, 0xc6, 0x19, 0xf6, 0xb6, 0x67, 0x06, 0xbc, 0x94, 0x03, 0x32, 0x07, 0x26, 0x7d, 0xb2, 0xce, + 0xb9, 0x32, 0xb5, 0xcd, 0x85, 0x77, 0x4f, 0xdf, 0x0d, 0x14, 0xad, 0x26, 0xf5, 0x6c, 0x58, 0x39, + 0x5b, 0xe7, 0xce, 0xef, 0x1e, 0xfd, 0x11, 0x01, 0x5c, 0x72, 0x5b, 0x89, 0xe9, 0xf9, 0x5d, 0x69, + 0xd9, 0x09, 0xb4, 0x50, 0xa9, 0xdf, 0x38, 0x8c, 0x8e, 0x7b, 0x4f, 0x1f, 0x0d, 0xd6, 0x1c, 0x3d, + 0x40, 0x38, 0x23, 0x94, 0xbd, 0x80, 0x9e, 0x3f, 0x6f, 0x2c, 0xf5, 0xcc, 0xf4, 0x9b, 0xe4, 0xf9, + 0x78, 0xad, 0xe7, 0x15, 0xb1, 0x17, 0x7a, 0x66, 0x32, 0xa8, 0x56, 0xdf, 0xec, 0x03, 0xd8, 0xaf, + 0x16, 0xda, 0xf1, 0xbb, 0xf1, 0xad, 0xb0, 0x95, 0x34, 0xba, 0xdf, 0x3a, 0x8c, 0x8e, 0x93, 0x6c, + 0xcf, 0x5b, 0xaf, 0xbd, 0xf1, 0xe8, 0xef, 0x5d, 0x68, 0x51, 0xa0, 0xfb, 0xd0, 0x90, 0x53, 0x0a, + 0xb3, 0x9d, 0x35, 0xe4, 0x94, 0x9d, 0xc3, 0xae, 0x92, 0x4e, 0x58, 0xae, 0xc6, 0x74, 0x01, 0x1f, + 0xc6, 0xe1, 0xda, 0x30, 0x5e, 0x79, 0x78, 0xb4, 0x93, 0xf5, 0x82, 0xdf, 0xb9, 0xbf, 0x0c, 0xc8, + 0xa9, 0xd0, 0xce, 0x8b, 0xb4, 0x36, 0xdc, 0x05, 0x5d, 0x06, 0x17, 0xc8, 0x8f, 0x76, 0xb2, 0x84, + 0x1c, 0x49, 0xe5, 0x25, 0xf4, 0x2a, 0xa1, 0x44, 0x1e, 0x64, 0xda, 0x24, 0xf3, 0xe4, 0xcd, 0x32, + 0x57, 0xe4, 0x30, 0xda, 0xc9, 0xc0, 0xbb, 0x92, 0xd0, 0x37, 0x90, 0xe4, 0x5c, 0x85, 0x2b, 0x75, + 0x48, 0xe6, 0xe8, 0xcd, 0x32, 0xcf, 0xb9, 0xc2, 0x4b, 0xc5, 0xe8, 0x16, 0x62, 0x49, 0x94, 0xac, + 0x42, 0x24, 0x5d, 0x92, 0x38, 0xde, 0x20, 0x61, 0x05, 0x77, 0xe2, 0x95, 0xac, 0x30, 0x9a, 0x18, + 0x9d, 0x49, 0xe8, 0x7b, 0xe8, 0xf9, 0xce, 0xf1, 0x52, 0x31, 0x49, 0x7d, 0xb8, 0x8d, 0xd4, 0x15, + 0xb9, 0xd1, 0xd5, 0xe8, 0x8b, 0xe4, 0x7e, 0x06, 0x96, 0x9b, 0x79, 0x69, 0xc5, 0x2f, 0x42, 0x63, + 0x69, 0xbd, 0x6a, 0x42, 0xaa, 0x1f, 0x6d, 0x50, 0xbd, 0xef, 0x37, 0xda, 0xc9, 0xfe, 0xff, 0x40, + 0x08, 0x91, 0xf4, 0x00, 0xda, 0x54, 0x17, 0xc6, 0xa0, 0xa5, 0xf9, 0x5c, 0xf4, 0x23, 0xea, 0x26, + 0xfa, 0x4e, 0x1d, 0x74, 0x7c, 0xb6, 0xd9, 0xe7, 0xd0, 0x35, 0xa5, 0xb0, 0x5c, 0x4f, 0x09, 0xd8, + 0xd8, 0xf1, 0x4b, 0x9a, 0xbd, 0x05, 0xed, 0x99, 0x14, 0xca, 0x77, 0x60, 0x92, 0xf9, 0x05, 0x3b, + 0x80, 0xc4, 0x89, 0xca, 0x8d, 0x8d, 0x56, 0x0b, 0xea, 0xc0, 0x38, 0x8b, 0xd1, 0xf0, 0xa3, 0x56, + 0x8b, 0xf4, 0xb7, 0x08, 0x5a, 0x58, 0x1d, 0xf6, 0x29, 0x74, 0x1c, 0xb7, 0x85, 0x70, 0xdb, 0x9d, + 0x19, 0x60, 0x96, 0x42, 0x3c, 0xab, 0x75, 0xee, 0xf0, 0x6d, 0xf8, 0x53, 0x57, 0x6b, 0x7c, 0xb6, + 0xdc, 0x16, 0x55, 0xbf, 0x79, 0xd8, 0xdc, 0xe2, 0xd9, 0x22, 0x9a, 0xbe, 0x04, 0xf8, 0xb7, 0xd0, + 0xec, 0x4b, 0x88, 0x85, 0x12, 0x73, 0xa1, 0x5d, 0xd5, 0x8f, 0xb6, 0x11, 0x59, 0xe1, 0xe9, 0xef, + 0x0d, 0xd8, 0xbd, 0x5f, 0x67, 0x4c, 0xb9, 0x5b, 0x94, 0xab, 0x94, 0xe3, 0x37, 0xfb, 0x16, 0xba, + 0x42, 0x3b, 0x2b, 0x45, 0xd5, 0x6f, 0x90, 0xfc, 0xc7, 0xdb, 0x37, 0xce, 0xe0, 0x5c, 0x3b, 0xbb, + 0xc8, 0x96, 0x02, 0xe9, 0x5f, 0x11, 0xb4, 0xc9, 0x14, 0x86, 0x40, 0xb4, 0x1a, 0x02, 0x8f, 0x20, + 0xa1, 0x42, 0x8c, 0x6f, 0xc4, 0xc2, 0xe7, 0x08, 0x3b, 0x98, 0x4c, 0xdf, 0x89, 0x05, 0xfb, 0x02, + 0xba, 0x73, 0x5e, 0xd2, 0x66, 0x73, 0x8b, 0xcc, 0x8f, 0x76, 0xb2, 0xce, 0x9c, 0x97, 0xe8, 0xf9, + 0x0c, 0xda, 0xb7, 0x5c, 0xd5, 0x22, 0x4c, 0x84, 0x0d, 0xb9, 0xf1, 0xec, 0x19, 0x40, 0x7c, 0x23, + 0x16, 0xe3, 0x1b, 0xa9, 0xa7, 0xe9, 0xeb, 0x06, 0xec, 0x3d, 0x68, 0x5b, 0xf6, 0x0e, 0xc4, 0x38, + 0x77, 0xc6, 0xb7, 0xdc, 0x86, 0x4c, 0x75, 0x71, 0x7d, 0xcd, 0x2d, 0xfb, 0x1a, 0x80, 0xb6, 0x2c, + 0xd7, 0x85, 0xd8, 0x6e, 0x14, 0x27, 0xe8, 0x90, 0x21, 0x8f, 0xc2, 0x3c, 0xcf, 0x6b, 0x12, 0x6e, + 0x7a, 0x61, 0x5c, 0xa3, 0xf0, 0x57, 0x90, 0xd0, 0x96, 0xd4, 0xd2, 0x6d, 0x77, 0x15, 0x92, 0xba, + 0xd0, 0xd2, 0xb1, 0x17, 0xb0, 0xaf, 0x8c, 0x29, 0xc7, 0xb9, 0xd1, 0x53, 0x49, 0x4d, 0xd8, 0xde, + 0x46, 0x60, 0x0f, 0x9d, 0x9e, 0x2f, 0x7d, 0x30, 0x02, 0x52, 0xa9, 0x9c, 0x28, 0xc3, 0x40, 0xdb, + 0x14, 0x01, 0xf2, 0x57, 0x4e, 0x94, 0xf8, 0x6e, 0xac, 0xa8, 0x6a, 0xe5, 0xc2, 0x18, 0xdb, 0xf4, + 0x6e, 0x3c, 0x7c, 0xd6, 0x83, 0x04, 0x37, 0xa9, 0x0e, 0x47, 0x7f, 0x36, 0xa0, 0x1b, 0x46, 0x3f, + 0x3b, 0x05, 0xd0, 0xb5, 0x52, 0x63, 0x5f, 0x59, 0xac, 0xc1, 0xfe, 0xd3, 0x74, 0xa9, 0xb9, 0xfc, + 0xb5, 0x1c, 0xfc, 0x50, 0x2b, 0x75, 0x8d, 0x04, 0x8e, 0x78, 0xbd, 0x5c, 0xb0, 0xf7, 0x00, 0x26, + 0xc6, 0x2c, 0x9d, 0xb1, 0x46, 0x31, 0x02, 0x68, 0xf3, 0xc0, 0xfb, 0xd0, 0x93, 0xda, 0x7d, 0xf6, + 0x49, 0x20, 0xb0, 0x12, 0x4d, 0x1c, 0x81, 0x64, 0xf4, 0xc8, 0x63, 0xd8, 0xad, 0xef, 0x33, 0x58, + 0x91, 0x16, 0xfe, 0x22, 0xd5, 0x0f, 0xa1, 0xa9, 0xa9, 0x27, 0x4a, 0x04, 0x08, 0xb3, 0x1e, 0x21, + 0xe4, 0xad, 0x2b, 0xa8, 0x72, 0x56, 0xea, 0x22, 0x40, 0x9d, 0xd0, 0xfb, 0x3d, 0x6f, 0x5d, 0x45, + 0x34, 0x59, 0x38, 0x51, 0x05, 0x06, 0x93, 0xb8, 0x8b, 0x11, 0x91, 0x91, 0x90, 0xb3, 0xff, 0xc1, + 0x5e, 0x6e, 0x74, 0xe5, 0xb8, 0x76, 0x94, 0xaf, 0xb3, 0x5f, 0xe1, 0x20, 0x37, 0xf3, 0x75, 0x89, + 0x3e, 0x4b, 0x30, 0xd3, 0x97, 0x98, 0xaa, 0xcb, 0xe8, 0xa7, 0xd3, 0x40, 0x15, 0x46, 0x71, 0x5d, + 0x0c, 0x8c, 0x2d, 0x86, 0x85, 0xd0, 0x94, 0xc8, 0xa1, 0xdf, 0xe2, 0xa5, 0xac, 0xfe, 0xf3, 0x6f, + 0xe5, 0x14, 0x17, 0xaf, 0xa3, 0x68, 0xd2, 0x21, 0xf4, 0xd9, 0x3f, 0x01, 0x00, 0x00, 0xff, 0xff, + 0xcb, 0xa2, 0x13, 0x9a, 0x14, 0x09, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/api/expr/v1beta1/source.pb.go b/vendor/google.golang.org/genproto/googleapis/api/expr/v1beta1/source.pb.go new file mode 100644 index 0000000..8d25220 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/api/expr/v1beta1/source.pb.go @@ -0,0 +1,189 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/api/expr/v1beta1/source.proto + +package expr // import "google.golang.org/genproto/googleapis/api/expr/v1beta1" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Source information collected at parse time. +type SourceInfo struct { + // The location name. All position information attached to an expression is + // relative to this location. + // + // The location could be a file, UI element, or similar. For example, + // `acme/app/AnvilPolicy.cel`. + Location string `protobuf:"bytes,2,opt,name=location,proto3" json:"location,omitempty"` + // Monotonically increasing list of character offsets where newlines appear. + // + // The line number of a given position is the index `i` where for a given + // `id` the `line_offsets[i] < id_positions[id] < line_offsets[i+1]`. The + // column may be derivd from `id_positions[id] - line_offsets[i]`. + LineOffsets []int32 `protobuf:"varint,3,rep,packed,name=line_offsets,json=lineOffsets,proto3" json:"line_offsets,omitempty"` + // A map from the parse node id (e.g. `Expr.id`) to the character offset + // within source. + Positions map[int32]int32 `protobuf:"bytes,4,rep,name=positions,proto3" json:"positions,omitempty" protobuf_key:"varint,1,opt,name=key,proto3" protobuf_val:"varint,2,opt,name=value,proto3"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *SourceInfo) Reset() { *m = SourceInfo{} } +func (m *SourceInfo) String() string { return proto.CompactTextString(m) } +func (*SourceInfo) ProtoMessage() {} +func (*SourceInfo) Descriptor() ([]byte, []int) { + return fileDescriptor_source_9a101adf77e26b45, []int{0} +} +func (m *SourceInfo) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_SourceInfo.Unmarshal(m, b) +} +func (m *SourceInfo) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_SourceInfo.Marshal(b, m, deterministic) +} +func (dst *SourceInfo) XXX_Merge(src proto.Message) { + xxx_messageInfo_SourceInfo.Merge(dst, src) +} +func (m *SourceInfo) XXX_Size() int { + return xxx_messageInfo_SourceInfo.Size(m) +} +func (m *SourceInfo) XXX_DiscardUnknown() { + xxx_messageInfo_SourceInfo.DiscardUnknown(m) +} + +var xxx_messageInfo_SourceInfo proto.InternalMessageInfo + +func (m *SourceInfo) GetLocation() string { + if m != nil { + return m.Location + } + return "" +} + +func (m *SourceInfo) GetLineOffsets() []int32 { + if m != nil { + return m.LineOffsets + } + return nil +} + +func (m *SourceInfo) GetPositions() map[int32]int32 { + if m != nil { + return m.Positions + } + return nil +} + +// A specific position in source. +type SourcePosition struct { + // The soucre location name (e.g. file name). + Location string `protobuf:"bytes,1,opt,name=location,proto3" json:"location,omitempty"` + // The character offset. + Offset int32 `protobuf:"varint,2,opt,name=offset,proto3" json:"offset,omitempty"` + // The 1-based index of the starting line in the source text + // where the issue occurs, or 0 if unknown. + Line int32 `protobuf:"varint,3,opt,name=line,proto3" json:"line,omitempty"` + // The 0-based index of the starting position within the line of source text + // where the issue occurs. Only meaningful if line is nonzer.. + Column int32 `protobuf:"varint,4,opt,name=column,proto3" json:"column,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *SourcePosition) Reset() { *m = SourcePosition{} } +func (m *SourcePosition) String() string { return proto.CompactTextString(m) } +func (*SourcePosition) ProtoMessage() {} +func (*SourcePosition) Descriptor() ([]byte, []int) { + return fileDescriptor_source_9a101adf77e26b45, []int{1} +} +func (m *SourcePosition) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_SourcePosition.Unmarshal(m, b) +} +func (m *SourcePosition) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_SourcePosition.Marshal(b, m, deterministic) +} +func (dst *SourcePosition) XXX_Merge(src proto.Message) { + xxx_messageInfo_SourcePosition.Merge(dst, src) +} +func (m *SourcePosition) XXX_Size() int { + return xxx_messageInfo_SourcePosition.Size(m) +} +func (m *SourcePosition) XXX_DiscardUnknown() { + xxx_messageInfo_SourcePosition.DiscardUnknown(m) +} + +var xxx_messageInfo_SourcePosition proto.InternalMessageInfo + +func (m *SourcePosition) GetLocation() string { + if m != nil { + return m.Location + } + return "" +} + +func (m *SourcePosition) GetOffset() int32 { + if m != nil { + return m.Offset + } + return 0 +} + +func (m *SourcePosition) GetLine() int32 { + if m != nil { + return m.Line + } + return 0 +} + +func (m *SourcePosition) GetColumn() int32 { + if m != nil { + return m.Column + } + return 0 +} + +func init() { + proto.RegisterType((*SourceInfo)(nil), "google.api.expr.v1beta1.SourceInfo") + proto.RegisterMapType((map[int32]int32)(nil), "google.api.expr.v1beta1.SourceInfo.PositionsEntry") + proto.RegisterType((*SourcePosition)(nil), "google.api.expr.v1beta1.SourcePosition") +} + +func init() { + proto.RegisterFile("google/api/expr/v1beta1/source.proto", fileDescriptor_source_9a101adf77e26b45) +} + +var fileDescriptor_source_9a101adf77e26b45 = []byte{ + // 311 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x74, 0x91, 0x41, 0x4b, 0x3b, 0x31, + 0x10, 0xc5, 0x49, 0xb7, 0x5b, 0xfe, 0x9d, 0xfe, 0x29, 0x12, 0x44, 0x97, 0x7a, 0x59, 0x8b, 0x87, + 0x3d, 0x65, 0x69, 0xbd, 0x88, 0xf5, 0x54, 0xf0, 0xe0, 0xc9, 0xb2, 0xde, 0xbc, 0x48, 0xba, 0xa4, + 0x4b, 0x30, 0xcd, 0x84, 0xcd, 0xb6, 0xd8, 0xcf, 0xea, 0x17, 0xf1, 0x28, 0x49, 0xb6, 0x96, 0x2a, + 0xbd, 0xe5, 0xcd, 0xfc, 0x32, 0x33, 0x8f, 0x07, 0x37, 0x15, 0x62, 0xa5, 0x44, 0xce, 0x8d, 0xcc, + 0xc5, 0x87, 0xa9, 0xf3, 0xed, 0x64, 0x29, 0x1a, 0x3e, 0xc9, 0x2d, 0x6e, 0xea, 0x52, 0x30, 0x53, + 0x63, 0x83, 0xf4, 0x32, 0x50, 0x8c, 0x1b, 0xc9, 0x1c, 0xc5, 0x5a, 0x6a, 0xfc, 0x49, 0x00, 0x5e, + 0x3c, 0xf9, 0xa4, 0x57, 0x48, 0x47, 0xf0, 0x4f, 0x61, 0xc9, 0x1b, 0x89, 0x3a, 0xe9, 0xa4, 0x24, + 0xeb, 0x17, 0x3f, 0x9a, 0x5e, 0xc3, 0x7f, 0x25, 0xb5, 0x78, 0xc3, 0xd5, 0xca, 0x8a, 0xc6, 0x26, + 0x51, 0x1a, 0x65, 0x71, 0x31, 0x70, 0xb5, 0xe7, 0x50, 0xa2, 0x0b, 0xe8, 0x1b, 0xb4, 0xd2, 0xe1, + 0x36, 0xe9, 0xa6, 0x51, 0x36, 0x98, 0x4e, 0xd9, 0x89, 0xd5, 0xec, 0xb0, 0x96, 0x2d, 0xf6, 0x9f, + 0x1e, 0x75, 0x53, 0xef, 0x8a, 0xc3, 0x90, 0xd1, 0x03, 0x0c, 0x8f, 0x9b, 0xf4, 0x0c, 0xa2, 0x77, + 0xb1, 0x4b, 0x48, 0x4a, 0xb2, 0xb8, 0x70, 0x4f, 0x7a, 0x0e, 0xf1, 0x96, 0xab, 0x8d, 0xf0, 0x17, + 0xc7, 0x45, 0x10, 0xf7, 0x9d, 0x3b, 0x32, 0x36, 0x30, 0x0c, 0x5b, 0xf6, 0x33, 0x8e, 0x0c, 0x92, + 0x5f, 0x06, 0x2f, 0xa0, 0x17, 0xbc, 0xb5, 0x83, 0x5a, 0x45, 0x29, 0x74, 0x9d, 0xc9, 0x24, 0xf2, + 0x55, 0xff, 0x76, 0x6c, 0x89, 0x6a, 0xb3, 0xd6, 0x49, 0x37, 0xb0, 0x41, 0xcd, 0x15, 0x5c, 0x95, + 0xb8, 0x3e, 0xe5, 0x79, 0x3e, 0x68, 0xcf, 0x71, 0xa1, 0x2c, 0xc8, 0xeb, 0xac, 0xe5, 0x2a, 0x54, + 0x5c, 0x57, 0x0c, 0xeb, 0x2a, 0xaf, 0x84, 0xf6, 0x91, 0xe5, 0xa1, 0xc5, 0x8d, 0xb4, 0x7f, 0xb2, + 0x9d, 0x39, 0xf1, 0x45, 0xc8, 0xb2, 0xe7, 0xd1, 0xdb, 0xef, 0x00, 0x00, 0x00, 0xff, 0xff, 0xdf, + 0x8a, 0x1a, 0x6c, 0x05, 0x02, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/api/expr/v1beta1/value.pb.go b/vendor/google.golang.org/genproto/googleapis/api/expr/v1beta1/value.pb.go new file mode 100644 index 0000000..902e1fb --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/api/expr/v1beta1/value.pb.go @@ -0,0 +1,708 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/api/expr/v1beta1/value.proto + +package expr // import "google.golang.org/genproto/googleapis/api/expr/v1beta1" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import any "github.com/golang/protobuf/ptypes/any" +import _struct "github.com/golang/protobuf/ptypes/struct" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Represents a CEL value. +// +// This is similar to `google.protobuf.Value`, but can represent CEL's full +// range of values. +type Value struct { + // Required. The valid kinds of values. + // + // Types that are valid to be assigned to Kind: + // *Value_NullValue + // *Value_BoolValue + // *Value_Int64Value + // *Value_Uint64Value + // *Value_DoubleValue + // *Value_StringValue + // *Value_BytesValue + // *Value_EnumValue + // *Value_ObjectValue + // *Value_MapValue + // *Value_ListValue + // *Value_TypeValue + Kind isValue_Kind `protobuf_oneof:"kind"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Value) Reset() { *m = Value{} } +func (m *Value) String() string { return proto.CompactTextString(m) } +func (*Value) ProtoMessage() {} +func (*Value) Descriptor() ([]byte, []int) { + return fileDescriptor_value_20f19fe52ddbfe7f, []int{0} +} +func (m *Value) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Value.Unmarshal(m, b) +} +func (m *Value) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Value.Marshal(b, m, deterministic) +} +func (dst *Value) XXX_Merge(src proto.Message) { + xxx_messageInfo_Value.Merge(dst, src) +} +func (m *Value) XXX_Size() int { + return xxx_messageInfo_Value.Size(m) +} +func (m *Value) XXX_DiscardUnknown() { + xxx_messageInfo_Value.DiscardUnknown(m) +} + +var xxx_messageInfo_Value proto.InternalMessageInfo + +type isValue_Kind interface { + isValue_Kind() +} + +type Value_NullValue struct { + NullValue _struct.NullValue `protobuf:"varint,1,opt,name=null_value,json=nullValue,proto3,enum=google.protobuf.NullValue,oneof"` +} + +type Value_BoolValue struct { + BoolValue bool `protobuf:"varint,2,opt,name=bool_value,json=boolValue,proto3,oneof"` +} + +type Value_Int64Value struct { + Int64Value int64 `protobuf:"varint,3,opt,name=int64_value,json=int64Value,proto3,oneof"` +} + +type Value_Uint64Value struct { + Uint64Value uint64 `protobuf:"varint,4,opt,name=uint64_value,json=uint64Value,proto3,oneof"` +} + +type Value_DoubleValue struct { + DoubleValue float64 `protobuf:"fixed64,5,opt,name=double_value,json=doubleValue,proto3,oneof"` +} + +type Value_StringValue struct { + StringValue string `protobuf:"bytes,6,opt,name=string_value,json=stringValue,proto3,oneof"` +} + +type Value_BytesValue struct { + BytesValue []byte `protobuf:"bytes,7,opt,name=bytes_value,json=bytesValue,proto3,oneof"` +} + +type Value_EnumValue struct { + EnumValue *EnumValue `protobuf:"bytes,9,opt,name=enum_value,json=enumValue,proto3,oneof"` +} + +type Value_ObjectValue struct { + ObjectValue *any.Any `protobuf:"bytes,10,opt,name=object_value,json=objectValue,proto3,oneof"` +} + +type Value_MapValue struct { + MapValue *MapValue `protobuf:"bytes,11,opt,name=map_value,json=mapValue,proto3,oneof"` +} + +type Value_ListValue struct { + ListValue *ListValue `protobuf:"bytes,12,opt,name=list_value,json=listValue,proto3,oneof"` +} + +type Value_TypeValue struct { + TypeValue string `protobuf:"bytes,15,opt,name=type_value,json=typeValue,proto3,oneof"` +} + +func (*Value_NullValue) isValue_Kind() {} + +func (*Value_BoolValue) isValue_Kind() {} + +func (*Value_Int64Value) isValue_Kind() {} + +func (*Value_Uint64Value) isValue_Kind() {} + +func (*Value_DoubleValue) isValue_Kind() {} + +func (*Value_StringValue) isValue_Kind() {} + +func (*Value_BytesValue) isValue_Kind() {} + +func (*Value_EnumValue) isValue_Kind() {} + +func (*Value_ObjectValue) isValue_Kind() {} + +func (*Value_MapValue) isValue_Kind() {} + +func (*Value_ListValue) isValue_Kind() {} + +func (*Value_TypeValue) isValue_Kind() {} + +func (m *Value) GetKind() isValue_Kind { + if m != nil { + return m.Kind + } + return nil +} + +func (m *Value) GetNullValue() _struct.NullValue { + if x, ok := m.GetKind().(*Value_NullValue); ok { + return x.NullValue + } + return _struct.NullValue_NULL_VALUE +} + +func (m *Value) GetBoolValue() bool { + if x, ok := m.GetKind().(*Value_BoolValue); ok { + return x.BoolValue + } + return false +} + +func (m *Value) GetInt64Value() int64 { + if x, ok := m.GetKind().(*Value_Int64Value); ok { + return x.Int64Value + } + return 0 +} + +func (m *Value) GetUint64Value() uint64 { + if x, ok := m.GetKind().(*Value_Uint64Value); ok { + return x.Uint64Value + } + return 0 +} + +func (m *Value) GetDoubleValue() float64 { + if x, ok := m.GetKind().(*Value_DoubleValue); ok { + return x.DoubleValue + } + return 0 +} + +func (m *Value) GetStringValue() string { + if x, ok := m.GetKind().(*Value_StringValue); ok { + return x.StringValue + } + return "" +} + +func (m *Value) GetBytesValue() []byte { + if x, ok := m.GetKind().(*Value_BytesValue); ok { + return x.BytesValue + } + return nil +} + +func (m *Value) GetEnumValue() *EnumValue { + if x, ok := m.GetKind().(*Value_EnumValue); ok { + return x.EnumValue + } + return nil +} + +func (m *Value) GetObjectValue() *any.Any { + if x, ok := m.GetKind().(*Value_ObjectValue); ok { + return x.ObjectValue + } + return nil +} + +func (m *Value) GetMapValue() *MapValue { + if x, ok := m.GetKind().(*Value_MapValue); ok { + return x.MapValue + } + return nil +} + +func (m *Value) GetListValue() *ListValue { + if x, ok := m.GetKind().(*Value_ListValue); ok { + return x.ListValue + } + return nil +} + +func (m *Value) GetTypeValue() string { + if x, ok := m.GetKind().(*Value_TypeValue); ok { + return x.TypeValue + } + return "" +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*Value) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _Value_OneofMarshaler, _Value_OneofUnmarshaler, _Value_OneofSizer, []interface{}{ + (*Value_NullValue)(nil), + (*Value_BoolValue)(nil), + (*Value_Int64Value)(nil), + (*Value_Uint64Value)(nil), + (*Value_DoubleValue)(nil), + (*Value_StringValue)(nil), + (*Value_BytesValue)(nil), + (*Value_EnumValue)(nil), + (*Value_ObjectValue)(nil), + (*Value_MapValue)(nil), + (*Value_ListValue)(nil), + (*Value_TypeValue)(nil), + } +} + +func _Value_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*Value) + // kind + switch x := m.Kind.(type) { + case *Value_NullValue: + b.EncodeVarint(1<<3 | proto.WireVarint) + b.EncodeVarint(uint64(x.NullValue)) + case *Value_BoolValue: + t := uint64(0) + if x.BoolValue { + t = 1 + } + b.EncodeVarint(2<<3 | proto.WireVarint) + b.EncodeVarint(t) + case *Value_Int64Value: + b.EncodeVarint(3<<3 | proto.WireVarint) + b.EncodeVarint(uint64(x.Int64Value)) + case *Value_Uint64Value: + b.EncodeVarint(4<<3 | proto.WireVarint) + b.EncodeVarint(uint64(x.Uint64Value)) + case *Value_DoubleValue: + b.EncodeVarint(5<<3 | proto.WireFixed64) + b.EncodeFixed64(math.Float64bits(x.DoubleValue)) + case *Value_StringValue: + b.EncodeVarint(6<<3 | proto.WireBytes) + b.EncodeStringBytes(x.StringValue) + case *Value_BytesValue: + b.EncodeVarint(7<<3 | proto.WireBytes) + b.EncodeRawBytes(x.BytesValue) + case *Value_EnumValue: + b.EncodeVarint(9<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.EnumValue); err != nil { + return err + } + case *Value_ObjectValue: + b.EncodeVarint(10<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.ObjectValue); err != nil { + return err + } + case *Value_MapValue: + b.EncodeVarint(11<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.MapValue); err != nil { + return err + } + case *Value_ListValue: + b.EncodeVarint(12<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.ListValue); err != nil { + return err + } + case *Value_TypeValue: + b.EncodeVarint(15<<3 | proto.WireBytes) + b.EncodeStringBytes(x.TypeValue) + case nil: + default: + return fmt.Errorf("Value.Kind has unexpected type %T", x) + } + return nil +} + +func _Value_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*Value) + switch tag { + case 1: // kind.null_value + if wire != proto.WireVarint { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeVarint() + m.Kind = &Value_NullValue{_struct.NullValue(x)} + return true, err + case 2: // kind.bool_value + if wire != proto.WireVarint { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeVarint() + m.Kind = &Value_BoolValue{x != 0} + return true, err + case 3: // kind.int64_value + if wire != proto.WireVarint { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeVarint() + m.Kind = &Value_Int64Value{int64(x)} + return true, err + case 4: // kind.uint64_value + if wire != proto.WireVarint { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeVarint() + m.Kind = &Value_Uint64Value{x} + return true, err + case 5: // kind.double_value + if wire != proto.WireFixed64 { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeFixed64() + m.Kind = &Value_DoubleValue{math.Float64frombits(x)} + return true, err + case 6: // kind.string_value + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeStringBytes() + m.Kind = &Value_StringValue{x} + return true, err + case 7: // kind.bytes_value + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeRawBytes(true) + m.Kind = &Value_BytesValue{x} + return true, err + case 9: // kind.enum_value + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(EnumValue) + err := b.DecodeMessage(msg) + m.Kind = &Value_EnumValue{msg} + return true, err + case 10: // kind.object_value + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(any.Any) + err := b.DecodeMessage(msg) + m.Kind = &Value_ObjectValue{msg} + return true, err + case 11: // kind.map_value + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(MapValue) + err := b.DecodeMessage(msg) + m.Kind = &Value_MapValue{msg} + return true, err + case 12: // kind.list_value + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(ListValue) + err := b.DecodeMessage(msg) + m.Kind = &Value_ListValue{msg} + return true, err + case 15: // kind.type_value + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeStringBytes() + m.Kind = &Value_TypeValue{x} + return true, err + default: + return false, nil + } +} + +func _Value_OneofSizer(msg proto.Message) (n int) { + m := msg.(*Value) + // kind + switch x := m.Kind.(type) { + case *Value_NullValue: + n += 1 // tag and wire + n += proto.SizeVarint(uint64(x.NullValue)) + case *Value_BoolValue: + n += 1 // tag and wire + n += 1 + case *Value_Int64Value: + n += 1 // tag and wire + n += proto.SizeVarint(uint64(x.Int64Value)) + case *Value_Uint64Value: + n += 1 // tag and wire + n += proto.SizeVarint(uint64(x.Uint64Value)) + case *Value_DoubleValue: + n += 1 // tag and wire + n += 8 + case *Value_StringValue: + n += 1 // tag and wire + n += proto.SizeVarint(uint64(len(x.StringValue))) + n += len(x.StringValue) + case *Value_BytesValue: + n += 1 // tag and wire + n += proto.SizeVarint(uint64(len(x.BytesValue))) + n += len(x.BytesValue) + case *Value_EnumValue: + s := proto.Size(x.EnumValue) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *Value_ObjectValue: + s := proto.Size(x.ObjectValue) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *Value_MapValue: + s := proto.Size(x.MapValue) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *Value_ListValue: + s := proto.Size(x.ListValue) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *Value_TypeValue: + n += 1 // tag and wire + n += proto.SizeVarint(uint64(len(x.TypeValue))) + n += len(x.TypeValue) + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +// An enum value. +type EnumValue struct { + // The fully qualified name of the enum type. + Type string `protobuf:"bytes,1,opt,name=type,proto3" json:"type,omitempty"` + // The value of the enum. + Value int32 `protobuf:"varint,2,opt,name=value,proto3" json:"value,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *EnumValue) Reset() { *m = EnumValue{} } +func (m *EnumValue) String() string { return proto.CompactTextString(m) } +func (*EnumValue) ProtoMessage() {} +func (*EnumValue) Descriptor() ([]byte, []int) { + return fileDescriptor_value_20f19fe52ddbfe7f, []int{1} +} +func (m *EnumValue) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_EnumValue.Unmarshal(m, b) +} +func (m *EnumValue) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_EnumValue.Marshal(b, m, deterministic) +} +func (dst *EnumValue) XXX_Merge(src proto.Message) { + xxx_messageInfo_EnumValue.Merge(dst, src) +} +func (m *EnumValue) XXX_Size() int { + return xxx_messageInfo_EnumValue.Size(m) +} +func (m *EnumValue) XXX_DiscardUnknown() { + xxx_messageInfo_EnumValue.DiscardUnknown(m) +} + +var xxx_messageInfo_EnumValue proto.InternalMessageInfo + +func (m *EnumValue) GetType() string { + if m != nil { + return m.Type + } + return "" +} + +func (m *EnumValue) GetValue() int32 { + if m != nil { + return m.Value + } + return 0 +} + +// A list. +// +// Wrapped in a message so 'not set' and empty can be differentiated, which is +// required for use in a 'oneof'. +type ListValue struct { + // The ordered values in the list. + Values []*Value `protobuf:"bytes,1,rep,name=values,proto3" json:"values,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ListValue) Reset() { *m = ListValue{} } +func (m *ListValue) String() string { return proto.CompactTextString(m) } +func (*ListValue) ProtoMessage() {} +func (*ListValue) Descriptor() ([]byte, []int) { + return fileDescriptor_value_20f19fe52ddbfe7f, []int{2} +} +func (m *ListValue) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ListValue.Unmarshal(m, b) +} +func (m *ListValue) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ListValue.Marshal(b, m, deterministic) +} +func (dst *ListValue) XXX_Merge(src proto.Message) { + xxx_messageInfo_ListValue.Merge(dst, src) +} +func (m *ListValue) XXX_Size() int { + return xxx_messageInfo_ListValue.Size(m) +} +func (m *ListValue) XXX_DiscardUnknown() { + xxx_messageInfo_ListValue.DiscardUnknown(m) +} + +var xxx_messageInfo_ListValue proto.InternalMessageInfo + +func (m *ListValue) GetValues() []*Value { + if m != nil { + return m.Values + } + return nil +} + +// A map. +// +// Wrapped in a message so 'not set' and empty can be differentiated, which is +// required for use in a 'oneof'. +type MapValue struct { + // The set of map entries. + // + // CEL has fewer restrictions on keys, so a protobuf map represenation + // cannot be used. + Entries []*MapValue_Entry `protobuf:"bytes,1,rep,name=entries,proto3" json:"entries,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *MapValue) Reset() { *m = MapValue{} } +func (m *MapValue) String() string { return proto.CompactTextString(m) } +func (*MapValue) ProtoMessage() {} +func (*MapValue) Descriptor() ([]byte, []int) { + return fileDescriptor_value_20f19fe52ddbfe7f, []int{3} +} +func (m *MapValue) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_MapValue.Unmarshal(m, b) +} +func (m *MapValue) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_MapValue.Marshal(b, m, deterministic) +} +func (dst *MapValue) XXX_Merge(src proto.Message) { + xxx_messageInfo_MapValue.Merge(dst, src) +} +func (m *MapValue) XXX_Size() int { + return xxx_messageInfo_MapValue.Size(m) +} +func (m *MapValue) XXX_DiscardUnknown() { + xxx_messageInfo_MapValue.DiscardUnknown(m) +} + +var xxx_messageInfo_MapValue proto.InternalMessageInfo + +func (m *MapValue) GetEntries() []*MapValue_Entry { + if m != nil { + return m.Entries + } + return nil +} + +// An entry in the map. +type MapValue_Entry struct { + // The key. + // + // Must be unique with in the map. + // Currently only boolean, int, uint, and string values can be keys. + Key *Value `protobuf:"bytes,1,opt,name=key,proto3" json:"key,omitempty"` + // The value. + Value *Value `protobuf:"bytes,2,opt,name=value,proto3" json:"value,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *MapValue_Entry) Reset() { *m = MapValue_Entry{} } +func (m *MapValue_Entry) String() string { return proto.CompactTextString(m) } +func (*MapValue_Entry) ProtoMessage() {} +func (*MapValue_Entry) Descriptor() ([]byte, []int) { + return fileDescriptor_value_20f19fe52ddbfe7f, []int{3, 0} +} +func (m *MapValue_Entry) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_MapValue_Entry.Unmarshal(m, b) +} +func (m *MapValue_Entry) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_MapValue_Entry.Marshal(b, m, deterministic) +} +func (dst *MapValue_Entry) XXX_Merge(src proto.Message) { + xxx_messageInfo_MapValue_Entry.Merge(dst, src) +} +func (m *MapValue_Entry) XXX_Size() int { + return xxx_messageInfo_MapValue_Entry.Size(m) +} +func (m *MapValue_Entry) XXX_DiscardUnknown() { + xxx_messageInfo_MapValue_Entry.DiscardUnknown(m) +} + +var xxx_messageInfo_MapValue_Entry proto.InternalMessageInfo + +func (m *MapValue_Entry) GetKey() *Value { + if m != nil { + return m.Key + } + return nil +} + +func (m *MapValue_Entry) GetValue() *Value { + if m != nil { + return m.Value + } + return nil +} + +func init() { + proto.RegisterType((*Value)(nil), "google.api.expr.v1beta1.Value") + proto.RegisterType((*EnumValue)(nil), "google.api.expr.v1beta1.EnumValue") + proto.RegisterType((*ListValue)(nil), "google.api.expr.v1beta1.ListValue") + proto.RegisterType((*MapValue)(nil), "google.api.expr.v1beta1.MapValue") + proto.RegisterType((*MapValue_Entry)(nil), "google.api.expr.v1beta1.MapValue.Entry") +} + +func init() { + proto.RegisterFile("google/api/expr/v1beta1/value.proto", fileDescriptor_value_20f19fe52ddbfe7f) +} + +var fileDescriptor_value_20f19fe52ddbfe7f = []byte{ + // 516 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x84, 0x94, 0x4d, 0x6f, 0xd3, 0x30, + 0x18, 0xc7, 0x6b, 0xfa, 0xb2, 0xe6, 0x69, 0x05, 0x92, 0x35, 0x89, 0x51, 0x10, 0x64, 0xdd, 0x81, + 0x9c, 0x1c, 0x56, 0xc6, 0x24, 0xd4, 0x0b, 0xeb, 0x34, 0xa9, 0x07, 0x40, 0x53, 0x0e, 0x1c, 0xb8, + 0xa0, 0xa4, 0x33, 0x51, 0xa8, 0x63, 0x47, 0x89, 0x3d, 0x91, 0x2f, 0xc7, 0x07, 0xe0, 0x13, 0x71, + 0x44, 0x7e, 0x0b, 0x63, 0x53, 0xd5, 0x5b, 0x9e, 0xbf, 0x7f, 0x7f, 0x3f, 0x2f, 0x7e, 0x14, 0x38, + 0xc9, 0x85, 0xc8, 0x19, 0x8d, 0xd3, 0xaa, 0x88, 0xe9, 0xcf, 0xaa, 0x8e, 0x6f, 0x4f, 0x33, 0x2a, + 0xd3, 0xd3, 0xf8, 0x36, 0x65, 0x8a, 0x92, 0xaa, 0x16, 0x52, 0xe0, 0xa7, 0x16, 0x22, 0x69, 0x55, + 0x10, 0x0d, 0x11, 0x07, 0xcd, 0x9e, 0x39, 0xb7, 0xc1, 0x32, 0xf5, 0x3d, 0x4e, 0x79, 0x6b, 0x3d, + 0xb3, 0x17, 0xf7, 0x8f, 0x1a, 0x59, 0xab, 0x8d, 0xb4, 0xa7, 0xf3, 0xdf, 0x03, 0x18, 0x7e, 0xd1, + 0x19, 0xf0, 0x12, 0x80, 0x2b, 0xc6, 0xbe, 0x99, 0x7c, 0x47, 0x28, 0x44, 0xd1, 0xe3, 0xc5, 0x8c, + 0xb8, 0x84, 0xde, 0x4c, 0x3e, 0x2b, 0xc6, 0x0c, 0xbf, 0xee, 0x25, 0x01, 0xf7, 0x01, 0x7e, 0x05, + 0x90, 0x09, 0xe1, 0xcd, 0x8f, 0x42, 0x14, 0x8d, 0x35, 0xa0, 0x35, 0x0b, 0x1c, 0xc3, 0xa4, 0xe0, + 0xf2, 0xfc, 0xcc, 0x11, 0xfd, 0x10, 0x45, 0xfd, 0x75, 0x2f, 0x01, 0x23, 0x5a, 0xe4, 0x04, 0xa6, + 0xea, 0x2e, 0x33, 0x08, 0x51, 0x34, 0x58, 0xf7, 0x92, 0x89, 0xfa, 0x1f, 0xba, 0x11, 0x2a, 0x63, + 0xd4, 0x41, 0xc3, 0x10, 0x45, 0x48, 0x43, 0x56, 0xed, 0xa0, 0x46, 0xd6, 0x05, 0xcf, 0x1d, 0x34, + 0x0a, 0x51, 0x14, 0x68, 0xc8, 0xaa, 0x5d, 0x45, 0x59, 0x2b, 0x69, 0xe3, 0x98, 0x83, 0x10, 0x45, + 0x53, 0x5d, 0x91, 0x11, 0x2d, 0x72, 0x09, 0x40, 0xb9, 0x2a, 0x1d, 0x11, 0x84, 0x28, 0x9a, 0x2c, + 0xe6, 0x64, 0xc7, 0x1b, 0x90, 0x2b, 0xae, 0xca, 0x6e, 0x34, 0xd4, 0x07, 0xf8, 0x3d, 0x4c, 0x45, + 0xf6, 0x83, 0x6e, 0xa4, 0xbb, 0x06, 0xcc, 0x35, 0x87, 0x0f, 0x26, 0x7b, 0xc1, 0x5b, 0x5d, 0xa2, + 0x65, 0xad, 0xf5, 0x03, 0x04, 0x65, 0x5a, 0x39, 0xdf, 0xc4, 0xf8, 0x8e, 0x77, 0xa6, 0xff, 0x94, + 0x56, 0x3e, 0xfb, 0xb8, 0x74, 0xdf, 0xba, 0x03, 0x56, 0x34, 0x3e, 0xf5, 0x74, 0x4f, 0x07, 0x1f, + 0x8b, 0x46, 0x76, 0x1d, 0x30, 0x1f, 0xe8, 0xc7, 0x95, 0x6d, 0xe5, 0x27, 0xfe, 0xc4, 0x0d, 0x33, + 0xd0, 0x9a, 0x01, 0x56, 0x23, 0x18, 0x6c, 0x0b, 0x7e, 0x33, 0x7f, 0x07, 0x41, 0x37, 0x04, 0x8c, + 0x61, 0xa0, 0x09, 0xb3, 0x49, 0x41, 0x62, 0xbe, 0xf1, 0x21, 0x0c, 0xff, 0x6d, 0xc8, 0x30, 0xb1, + 0xc1, 0xfc, 0x12, 0x82, 0x2e, 0x33, 0x3e, 0x87, 0x91, 0x51, 0x9b, 0x23, 0x14, 0xf6, 0xa3, 0xc9, + 0xe2, 0xe5, 0xce, 0x6a, 0x0d, 0x9f, 0x38, 0x7a, 0xfe, 0x0b, 0xc1, 0xd8, 0x8f, 0x00, 0x5f, 0xc0, + 0x01, 0xe5, 0xb2, 0x2e, 0xba, 0x5b, 0x5e, 0xef, 0x1d, 0x1b, 0xb9, 0xe2, 0xb2, 0x6e, 0x13, 0xef, + 0x9b, 0x09, 0x18, 0x1a, 0x05, 0xbf, 0x81, 0xfe, 0x96, 0xb6, 0xa6, 0x8d, 0xfd, 0xd5, 0x68, 0x14, + 0x9f, 0xdd, 0xed, 0x72, 0xbf, 0xc7, 0xc2, 0xab, 0x2d, 0x3c, 0xdf, 0x88, 0x72, 0x17, 0xbb, 0x02, + 0x03, 0x5f, 0xeb, 0x6d, 0xb9, 0x46, 0x5f, 0x97, 0x0e, 0xcb, 0x05, 0x4b, 0x79, 0x4e, 0x44, 0x9d, + 0xc7, 0x39, 0xe5, 0x66, 0x97, 0x62, 0x7b, 0x94, 0x56, 0x45, 0xf3, 0xe0, 0x67, 0xb2, 0xd4, 0xc1, + 0x1f, 0x84, 0xb2, 0x91, 0x41, 0xdf, 0xfe, 0x0d, 0x00, 0x00, 0xff, 0xff, 0xb1, 0x11, 0xf9, 0xd9, + 0x76, 0x04, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/api/httpbody/httpbody.pb.go b/vendor/google.golang.org/genproto/googleapis/api/httpbody/httpbody.pb.go new file mode 100644 index 0000000..08f5b8d --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/api/httpbody/httpbody.pb.go @@ -0,0 +1,142 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/api/httpbody.proto + +package httpbody // import "google.golang.org/genproto/googleapis/api/httpbody" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import any "github.com/golang/protobuf/ptypes/any" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Message that represents an arbitrary HTTP body. It should only be used for +// payload formats that can't be represented as JSON, such as raw binary or +// an HTML page. +// +// +// This message can be used both in streaming and non-streaming API methods in +// the request as well as the response. +// +// It can be used as a top-level request field, which is convenient if one +// wants to extract parameters from either the URL or HTTP template into the +// request fields and also want access to the raw HTTP body. +// +// Example: +// +// message GetResourceRequest { +// // A unique request id. +// string request_id = 1; +// +// // The raw HTTP body is bound to this field. +// google.api.HttpBody http_body = 2; +// } +// +// service ResourceService { +// rpc GetResource(GetResourceRequest) returns (google.api.HttpBody); +// rpc UpdateResource(google.api.HttpBody) returns +// (google.protobuf.Empty); +// } +// +// Example with streaming methods: +// +// service CaldavService { +// rpc GetCalendar(stream google.api.HttpBody) +// returns (stream google.api.HttpBody); +// rpc UpdateCalendar(stream google.api.HttpBody) +// returns (stream google.api.HttpBody); +// } +// +// Use of this type only changes how the request and response bodies are +// handled, all other features will continue to work unchanged. +type HttpBody struct { + // The HTTP Content-Type header value specifying the content type of the body. + ContentType string `protobuf:"bytes,1,opt,name=content_type,json=contentType,proto3" json:"content_type,omitempty"` + // The HTTP request/response body as raw binary. + Data []byte `protobuf:"bytes,2,opt,name=data,proto3" json:"data,omitempty"` + // Application specific response metadata. Must be set in the first response + // for streaming APIs. + Extensions []*any.Any `protobuf:"bytes,3,rep,name=extensions,proto3" json:"extensions,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *HttpBody) Reset() { *m = HttpBody{} } +func (m *HttpBody) String() string { return proto.CompactTextString(m) } +func (*HttpBody) ProtoMessage() {} +func (*HttpBody) Descriptor() ([]byte, []int) { + return fileDescriptor_httpbody_b1715ad6d52ffa1f, []int{0} +} +func (m *HttpBody) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_HttpBody.Unmarshal(m, b) +} +func (m *HttpBody) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_HttpBody.Marshal(b, m, deterministic) +} +func (dst *HttpBody) XXX_Merge(src proto.Message) { + xxx_messageInfo_HttpBody.Merge(dst, src) +} +func (m *HttpBody) XXX_Size() int { + return xxx_messageInfo_HttpBody.Size(m) +} +func (m *HttpBody) XXX_DiscardUnknown() { + xxx_messageInfo_HttpBody.DiscardUnknown(m) +} + +var xxx_messageInfo_HttpBody proto.InternalMessageInfo + +func (m *HttpBody) GetContentType() string { + if m != nil { + return m.ContentType + } + return "" +} + +func (m *HttpBody) GetData() []byte { + if m != nil { + return m.Data + } + return nil +} + +func (m *HttpBody) GetExtensions() []*any.Any { + if m != nil { + return m.Extensions + } + return nil +} + +func init() { + proto.RegisterType((*HttpBody)(nil), "google.api.HttpBody") +} + +func init() { proto.RegisterFile("google/api/httpbody.proto", fileDescriptor_httpbody_b1715ad6d52ffa1f) } + +var fileDescriptor_httpbody_b1715ad6d52ffa1f = []byte{ + // 229 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x54, 0x8f, 0x31, 0x4f, 0xc3, 0x30, + 0x10, 0x85, 0xe5, 0xb6, 0x42, 0x70, 0x2d, 0x0c, 0x16, 0x43, 0x60, 0x0a, 0x4c, 0x99, 0x6c, 0x09, + 0xd8, 0x3a, 0x35, 0x0b, 0xb0, 0x45, 0x11, 0x13, 0x0b, 0x72, 0x1a, 0xe3, 0x46, 0x2a, 0x77, 0xa7, + 0xe6, 0x10, 0xf8, 0xef, 0xf0, 0x2b, 0x19, 0x11, 0x69, 0x2c, 0xe8, 0xf6, 0xe4, 0xef, 0x3d, 0xbf, + 0x77, 0x70, 0x11, 0x88, 0xc2, 0xd6, 0x5b, 0xc7, 0x9d, 0xdd, 0x88, 0x70, 0x43, 0x6d, 0x34, 0xbc, + 0x23, 0x21, 0x0d, 0x7b, 0x64, 0x1c, 0x77, 0x97, 0xc9, 0x36, 0x90, 0xe6, 0xfd, 0xd5, 0x3a, 0x1c, + 0x6d, 0xd7, 0x1f, 0x70, 0xfc, 0x20, 0xc2, 0x25, 0xb5, 0x51, 0x5f, 0xc1, 0x62, 0x4d, 0x28, 0x1e, + 0xe5, 0x45, 0x22, 0xfb, 0x4c, 0xe5, 0xaa, 0x38, 0xa9, 0xe7, 0xe3, 0xdb, 0x53, 0x64, 0xaf, 0x35, + 0xcc, 0x5a, 0x27, 0x2e, 0x9b, 0xe4, 0xaa, 0x58, 0xd4, 0x83, 0xd6, 0x77, 0x00, 0xfe, 0x53, 0x3c, + 0xf6, 0x1d, 0x61, 0x9f, 0x4d, 0xf3, 0x69, 0x31, 0xbf, 0x39, 0x37, 0x63, 0x7d, 0xaa, 0x34, 0x2b, + 0x8c, 0xf5, 0x3f, 0x5f, 0xb9, 0x81, 0xb3, 0x35, 0xbd, 0x99, 0xbf, 0x95, 0xe5, 0x69, 0x1a, 0x52, + 0xfd, 0x66, 0x2a, 0xf5, 0xbc, 0x1c, 0x61, 0xa0, 0xad, 0xc3, 0x60, 0x68, 0x17, 0x6c, 0xf0, 0x38, + 0xfc, 0x68, 0xf7, 0xc8, 0x71, 0xd7, 0x1f, 0x1c, 0xbf, 0x4c, 0xe2, 0x5b, 0xa9, 0xaf, 0xc9, 0xec, + 0x7e, 0x55, 0x3d, 0x36, 0x47, 0x43, 0xe2, 0xf6, 0x27, 0x00, 0x00, 0xff, 0xff, 0x78, 0xb9, 0x16, + 0x2b, 0x2d, 0x01, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/api/label/label.pb.go b/vendor/google.golang.org/genproto/googleapis/api/label/label.pb.go new file mode 100644 index 0000000..5824c0d --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/api/label/label.pb.go @@ -0,0 +1,134 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/api/label.proto + +package label // import "google.golang.org/genproto/googleapis/api/label" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Value types that can be used as label values. +type LabelDescriptor_ValueType int32 + +const ( + // A variable-length string. This is the default. + LabelDescriptor_STRING LabelDescriptor_ValueType = 0 + // Boolean; true or false. + LabelDescriptor_BOOL LabelDescriptor_ValueType = 1 + // A 64-bit signed integer. + LabelDescriptor_INT64 LabelDescriptor_ValueType = 2 +) + +var LabelDescriptor_ValueType_name = map[int32]string{ + 0: "STRING", + 1: "BOOL", + 2: "INT64", +} +var LabelDescriptor_ValueType_value = map[string]int32{ + "STRING": 0, + "BOOL": 1, + "INT64": 2, +} + +func (x LabelDescriptor_ValueType) String() string { + return proto.EnumName(LabelDescriptor_ValueType_name, int32(x)) +} +func (LabelDescriptor_ValueType) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_label_4cc356b80657a87b, []int{0, 0} +} + +// A description of a label. +type LabelDescriptor struct { + // The label key. + Key string `protobuf:"bytes,1,opt,name=key,proto3" json:"key,omitempty"` + // The type of data that can be assigned to the label. + ValueType LabelDescriptor_ValueType `protobuf:"varint,2,opt,name=value_type,json=valueType,proto3,enum=google.api.LabelDescriptor_ValueType" json:"value_type,omitempty"` + // A human-readable description for the label. + Description string `protobuf:"bytes,3,opt,name=description,proto3" json:"description,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *LabelDescriptor) Reset() { *m = LabelDescriptor{} } +func (m *LabelDescriptor) String() string { return proto.CompactTextString(m) } +func (*LabelDescriptor) ProtoMessage() {} +func (*LabelDescriptor) Descriptor() ([]byte, []int) { + return fileDescriptor_label_4cc356b80657a87b, []int{0} +} +func (m *LabelDescriptor) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_LabelDescriptor.Unmarshal(m, b) +} +func (m *LabelDescriptor) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_LabelDescriptor.Marshal(b, m, deterministic) +} +func (dst *LabelDescriptor) XXX_Merge(src proto.Message) { + xxx_messageInfo_LabelDescriptor.Merge(dst, src) +} +func (m *LabelDescriptor) XXX_Size() int { + return xxx_messageInfo_LabelDescriptor.Size(m) +} +func (m *LabelDescriptor) XXX_DiscardUnknown() { + xxx_messageInfo_LabelDescriptor.DiscardUnknown(m) +} + +var xxx_messageInfo_LabelDescriptor proto.InternalMessageInfo + +func (m *LabelDescriptor) GetKey() string { + if m != nil { + return m.Key + } + return "" +} + +func (m *LabelDescriptor) GetValueType() LabelDescriptor_ValueType { + if m != nil { + return m.ValueType + } + return LabelDescriptor_STRING +} + +func (m *LabelDescriptor) GetDescription() string { + if m != nil { + return m.Description + } + return "" +} + +func init() { + proto.RegisterType((*LabelDescriptor)(nil), "google.api.LabelDescriptor") + proto.RegisterEnum("google.api.LabelDescriptor_ValueType", LabelDescriptor_ValueType_name, LabelDescriptor_ValueType_value) +} + +func init() { proto.RegisterFile("google/api/label.proto", fileDescriptor_label_4cc356b80657a87b) } + +var fileDescriptor_label_4cc356b80657a87b = []byte{ + // 252 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xe2, 0x12, 0x4b, 0xcf, 0xcf, 0x4f, + 0xcf, 0x49, 0xd5, 0x4f, 0x2c, 0xc8, 0xd4, 0xcf, 0x49, 0x4c, 0x4a, 0xcd, 0xd1, 0x2b, 0x28, 0xca, + 0x2f, 0xc9, 0x17, 0xe2, 0x82, 0x88, 0xeb, 0x25, 0x16, 0x64, 0x2a, 0xed, 0x64, 0xe4, 0xe2, 0xf7, + 0x01, 0xc9, 0xb9, 0xa4, 0x16, 0x27, 0x17, 0x65, 0x16, 0x94, 0xe4, 0x17, 0x09, 0x09, 0x70, 0x31, + 0x67, 0xa7, 0x56, 0x4a, 0x30, 0x2a, 0x30, 0x6a, 0x70, 0x06, 0x81, 0x98, 0x42, 0x2e, 0x5c, 0x5c, + 0x65, 0x89, 0x39, 0xa5, 0xa9, 0xf1, 0x25, 0x95, 0x05, 0xa9, 0x12, 0x4c, 0x0a, 0x8c, 0x1a, 0x7c, + 0x46, 0xaa, 0x7a, 0x08, 0x63, 0xf4, 0xd0, 0x8c, 0xd0, 0x0b, 0x03, 0xa9, 0x0e, 0xa9, 0x2c, 0x48, + 0x0d, 0xe2, 0x2c, 0x83, 0x31, 0x85, 0x14, 0xb8, 0xb8, 0x53, 0xa0, 0x4a, 0x32, 0xf3, 0xf3, 0x24, + 0x98, 0xc1, 0xe6, 0x23, 0x0b, 0x29, 0xe9, 0x70, 0x71, 0xc2, 0x75, 0x0a, 0x71, 0x71, 0xb1, 0x05, + 0x87, 0x04, 0x79, 0xfa, 0xb9, 0x0b, 0x30, 0x08, 0x71, 0x70, 0xb1, 0x38, 0xf9, 0xfb, 0xfb, 0x08, + 0x30, 0x0a, 0x71, 0x72, 0xb1, 0x7a, 0xfa, 0x85, 0x98, 0x99, 0x08, 0x30, 0x39, 0xc5, 0x73, 0xf1, + 0x25, 0xe7, 0xe7, 0x22, 0x39, 0xc3, 0x89, 0x0b, 0xec, 0x8e, 0x00, 0x90, 0x2f, 0x03, 0x18, 0xa3, + 0x4c, 0xa1, 0x32, 0xe9, 0xf9, 0x39, 0x89, 0x79, 0xe9, 0x7a, 0xf9, 0x45, 0xe9, 0xfa, 0xe9, 0xa9, + 0x79, 0xe0, 0x30, 0xd0, 0x87, 0x48, 0x25, 0x16, 0x64, 0x16, 0x23, 0x82, 0xc7, 0x1a, 0x4c, 0xfe, + 0x60, 0x64, 0x5c, 0xc4, 0xc4, 0xe2, 0xee, 0x18, 0xe0, 0x99, 0xc4, 0x06, 0x56, 0x6b, 0x0c, 0x08, + 0x00, 0x00, 0xff, 0xff, 0x57, 0x04, 0xaa, 0x1f, 0x49, 0x01, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/api/launch_stage.pb.go b/vendor/google.golang.org/genproto/googleapis/api/launch_stage.pb.go new file mode 100644 index 0000000..9246e55 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/api/launch_stage.pb.go @@ -0,0 +1,110 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/api/launch_stage.proto + +package api // import "google.golang.org/genproto/googleapis/api" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// The launch stage as defined by [Google Cloud Platform +// Launch Stages](http://cloud.google.com/terms/launch-stages). +type LaunchStage int32 + +const ( + // Do not use this default value. + LaunchStage_LAUNCH_STAGE_UNSPECIFIED LaunchStage = 0 + // Early Access features are limited to a closed group of testers. To use + // these features, you must sign up in advance and sign a Trusted Tester + // agreement (which includes confidentiality provisions). These features may + // be unstable, changed in backward-incompatible ways, and are not + // guaranteed to be released. + LaunchStage_EARLY_ACCESS LaunchStage = 1 + // Alpha is a limited availability test for releases before they are cleared + // for widespread use. By Alpha, all significant design issues are resolved + // and we are in the process of verifying functionality. Alpha customers + // need to apply for access, agree to applicable terms, and have their + // projects whitelisted. Alpha releases don’t have to be feature complete, + // no SLAs are provided, and there are no technical support obligations, but + // they will be far enough along that customers can actually use them in + // test environments or for limited-use tests -- just like they would in + // normal production cases. + LaunchStage_ALPHA LaunchStage = 2 + // Beta is the point at which we are ready to open a release for any + // customer to use. There are no SLA or technical support obligations in a + // Beta release. Products will be complete from a feature perspective, but + // may have some open outstanding issues. Beta releases are suitable for + // limited production use cases. + LaunchStage_BETA LaunchStage = 3 + // GA features are open to all developers and are considered stable and + // fully qualified for production use. + LaunchStage_GA LaunchStage = 4 + // Deprecated features are scheduled to be shut down and removed. For more + // information, see the “Deprecation Policy” section of our [Terms of + // Service](https://cloud.google.com/terms/) + // and the [Google Cloud Platform Subject to the Deprecation + // Policy](https://cloud.google.com/terms/deprecation) documentation. + LaunchStage_DEPRECATED LaunchStage = 5 +) + +var LaunchStage_name = map[int32]string{ + 0: "LAUNCH_STAGE_UNSPECIFIED", + 1: "EARLY_ACCESS", + 2: "ALPHA", + 3: "BETA", + 4: "GA", + 5: "DEPRECATED", +} +var LaunchStage_value = map[string]int32{ + "LAUNCH_STAGE_UNSPECIFIED": 0, + "EARLY_ACCESS": 1, + "ALPHA": 2, + "BETA": 3, + "GA": 4, + "DEPRECATED": 5, +} + +func (x LaunchStage) String() string { + return proto.EnumName(LaunchStage_name, int32(x)) +} +func (LaunchStage) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_launch_stage_9684e9c225ce12c7, []int{0} +} + +func init() { + proto.RegisterEnum("google.api.LaunchStage", LaunchStage_name, LaunchStage_value) +} + +func init() { + proto.RegisterFile("google/api/launch_stage.proto", fileDescriptor_launch_stage_9684e9c225ce12c7) +} + +var fileDescriptor_launch_stage_9684e9c225ce12c7 = []byte{ + // 225 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x4c, 0x8f, 0xc1, 0x4a, 0xc3, 0x40, + 0x14, 0x45, 0x6d, 0x4c, 0x8b, 0x3e, 0xa5, 0x3c, 0x66, 0xe5, 0x42, 0x7f, 0x40, 0x30, 0x59, 0xb8, + 0x74, 0xf5, 0x32, 0x79, 0xa6, 0x81, 0x50, 0x86, 0x4e, 0xba, 0xb0, 0x9b, 0x30, 0x96, 0x30, 0x8e, + 0xc4, 0xcc, 0xd0, 0xd6, 0x1f, 0xf2, 0x4b, 0x25, 0x89, 0x60, 0xd7, 0xe7, 0xc0, 0x3d, 0x17, 0x1e, + 0xac, 0xf7, 0xb6, 0x6b, 0x53, 0x13, 0x5c, 0xda, 0x99, 0xef, 0x7e, 0xff, 0xd1, 0x1c, 0x4f, 0xc6, + 0xb6, 0x49, 0x38, 0xf8, 0x93, 0x17, 0x30, 0xe1, 0xc4, 0x04, 0xf7, 0xf8, 0x09, 0x37, 0xd5, 0x68, + 0xe8, 0x41, 0x10, 0xf7, 0x70, 0x57, 0xd1, 0x76, 0x2d, 0x57, 0x8d, 0xae, 0xa9, 0xe0, 0x66, 0xbb, + 0xd6, 0x8a, 0x65, 0xf9, 0x5a, 0x72, 0x8e, 0x17, 0x02, 0xe1, 0x96, 0x69, 0x53, 0xbd, 0x35, 0x24, + 0x25, 0x6b, 0x8d, 0x33, 0x71, 0x0d, 0x73, 0xaa, 0xd4, 0x8a, 0x30, 0x12, 0x57, 0x10, 0x67, 0x5c, + 0x13, 0x5e, 0x8a, 0x05, 0x44, 0x05, 0x61, 0x2c, 0x96, 0x00, 0x39, 0xab, 0x0d, 0x4b, 0xaa, 0x39, + 0xc7, 0x79, 0xb6, 0x83, 0xe5, 0xde, 0x7f, 0x25, 0xff, 0xeb, 0x19, 0x9e, 0x6d, 0xab, 0xa1, 0x4d, + 0xcd, 0x76, 0x4f, 0x7f, 0xdc, 0xfa, 0xce, 0xf4, 0x36, 0xf1, 0x07, 0x9b, 0xda, 0xb6, 0x1f, 0xcb, + 0xd3, 0x09, 0x99, 0xe0, 0x8e, 0xc3, 0xb7, 0x17, 0x13, 0xdc, 0x4f, 0x14, 0x17, 0xa4, 0xca, 0xf7, + 0xc5, 0x28, 0x3c, 0xff, 0x06, 0x00, 0x00, 0xff, 0xff, 0x8e, 0xd5, 0x39, 0x1a, 0xfb, 0x00, 0x00, + 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/api/metric/metric.pb.go b/vendor/google.golang.org/genproto/googleapis/api/metric/metric.pb.go new file mode 100644 index 0000000..ad046ef --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/api/metric/metric.pb.go @@ -0,0 +1,476 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/api/metric.proto + +package metric // import "google.golang.org/genproto/googleapis/api/metric" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import duration "github.com/golang/protobuf/ptypes/duration" +import api "google.golang.org/genproto/googleapis/api" +import label "google.golang.org/genproto/googleapis/api/label" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// The kind of measurement. It describes how the data is reported. +type MetricDescriptor_MetricKind int32 + +const ( + // Do not use this default value. + MetricDescriptor_METRIC_KIND_UNSPECIFIED MetricDescriptor_MetricKind = 0 + // An instantaneous measurement of a value. + MetricDescriptor_GAUGE MetricDescriptor_MetricKind = 1 + // The change in a value during a time interval. + MetricDescriptor_DELTA MetricDescriptor_MetricKind = 2 + // A value accumulated over a time interval. Cumulative + // measurements in a time series should have the same start time + // and increasing end times, until an event resets the cumulative + // value to zero and sets a new start time for the following + // points. + MetricDescriptor_CUMULATIVE MetricDescriptor_MetricKind = 3 +) + +var MetricDescriptor_MetricKind_name = map[int32]string{ + 0: "METRIC_KIND_UNSPECIFIED", + 1: "GAUGE", + 2: "DELTA", + 3: "CUMULATIVE", +} +var MetricDescriptor_MetricKind_value = map[string]int32{ + "METRIC_KIND_UNSPECIFIED": 0, + "GAUGE": 1, + "DELTA": 2, + "CUMULATIVE": 3, +} + +func (x MetricDescriptor_MetricKind) String() string { + return proto.EnumName(MetricDescriptor_MetricKind_name, int32(x)) +} +func (MetricDescriptor_MetricKind) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_metric_55f130a603acc212, []int{0, 0} +} + +// The value type of a metric. +type MetricDescriptor_ValueType int32 + +const ( + // Do not use this default value. + MetricDescriptor_VALUE_TYPE_UNSPECIFIED MetricDescriptor_ValueType = 0 + // The value is a boolean. + // This value type can be used only if the metric kind is `GAUGE`. + MetricDescriptor_BOOL MetricDescriptor_ValueType = 1 + // The value is a signed 64-bit integer. + MetricDescriptor_INT64 MetricDescriptor_ValueType = 2 + // The value is a double precision floating point number. + MetricDescriptor_DOUBLE MetricDescriptor_ValueType = 3 + // The value is a text string. + // This value type can be used only if the metric kind is `GAUGE`. + MetricDescriptor_STRING MetricDescriptor_ValueType = 4 + // The value is a [`Distribution`][google.api.Distribution]. + MetricDescriptor_DISTRIBUTION MetricDescriptor_ValueType = 5 + // The value is money. + MetricDescriptor_MONEY MetricDescriptor_ValueType = 6 +) + +var MetricDescriptor_ValueType_name = map[int32]string{ + 0: "VALUE_TYPE_UNSPECIFIED", + 1: "BOOL", + 2: "INT64", + 3: "DOUBLE", + 4: "STRING", + 5: "DISTRIBUTION", + 6: "MONEY", +} +var MetricDescriptor_ValueType_value = map[string]int32{ + "VALUE_TYPE_UNSPECIFIED": 0, + "BOOL": 1, + "INT64": 2, + "DOUBLE": 3, + "STRING": 4, + "DISTRIBUTION": 5, + "MONEY": 6, +} + +func (x MetricDescriptor_ValueType) String() string { + return proto.EnumName(MetricDescriptor_ValueType_name, int32(x)) +} +func (MetricDescriptor_ValueType) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_metric_55f130a603acc212, []int{0, 1} +} + +// Defines a metric type and its schema. Once a metric descriptor is created, +// deleting or altering it stops data collection and makes the metric type's +// existing data unusable. +type MetricDescriptor struct { + // The resource name of the metric descriptor. + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // The metric type, including its DNS name prefix. The type is not + // URL-encoded. All user-defined metric types have the DNS name + // `custom.googleapis.com` or `external.googleapis.com`. Metric types should + // use a natural hierarchical grouping. For example: + // + // "custom.googleapis.com/invoice/paid/amount" + // "external.googleapis.com/prometheus/up" + // "appengine.googleapis.com/http/server/response_latencies" + Type string `protobuf:"bytes,8,opt,name=type,proto3" json:"type,omitempty"` + // The set of labels that can be used to describe a specific + // instance of this metric type. For example, the + // `appengine.googleapis.com/http/server/response_latencies` metric + // type has a label for the HTTP response code, `response_code`, so + // you can look at latencies for successful responses or just + // for responses that failed. + Labels []*label.LabelDescriptor `protobuf:"bytes,2,rep,name=labels,proto3" json:"labels,omitempty"` + // Whether the metric records instantaneous values, changes to a value, etc. + // Some combinations of `metric_kind` and `value_type` might not be supported. + MetricKind MetricDescriptor_MetricKind `protobuf:"varint,3,opt,name=metric_kind,json=metricKind,proto3,enum=google.api.MetricDescriptor_MetricKind" json:"metric_kind,omitempty"` + // Whether the measurement is an integer, a floating-point number, etc. + // Some combinations of `metric_kind` and `value_type` might not be supported. + ValueType MetricDescriptor_ValueType `protobuf:"varint,4,opt,name=value_type,json=valueType,proto3,enum=google.api.MetricDescriptor_ValueType" json:"value_type,omitempty"` + // The unit in which the metric value is reported. It is only applicable + // if the `value_type` is `INT64`, `DOUBLE`, or `DISTRIBUTION`. The + // supported units are a subset of [The Unified Code for Units of + // Measure](http://unitsofmeasure.org/ucum.html) standard: + // + // **Basic units (UNIT)** + // + // * `bit` bit + // * `By` byte + // * `s` second + // * `min` minute + // * `h` hour + // * `d` day + // + // **Prefixes (PREFIX)** + // + // * `k` kilo (10**3) + // * `M` mega (10**6) + // * `G` giga (10**9) + // * `T` tera (10**12) + // * `P` peta (10**15) + // * `E` exa (10**18) + // * `Z` zetta (10**21) + // * `Y` yotta (10**24) + // * `m` milli (10**-3) + // * `u` micro (10**-6) + // * `n` nano (10**-9) + // * `p` pico (10**-12) + // * `f` femto (10**-15) + // * `a` atto (10**-18) + // * `z` zepto (10**-21) + // * `y` yocto (10**-24) + // * `Ki` kibi (2**10) + // * `Mi` mebi (2**20) + // * `Gi` gibi (2**30) + // * `Ti` tebi (2**40) + // + // **Grammar** + // + // The grammar also includes these connectors: + // + // * `/` division (as an infix operator, e.g. `1/s`). + // * `.` multiplication (as an infix operator, e.g. `GBy.d`) + // + // The grammar for a unit is as follows: + // + // Expression = Component { "." Component } { "/" Component } ; + // + // Component = ( [ PREFIX ] UNIT | "%" ) [ Annotation ] + // | Annotation + // | "1" + // ; + // + // Annotation = "{" NAME "}" ; + // + // Notes: + // + // * `Annotation` is just a comment if it follows a `UNIT` and is + // equivalent to `1` if it is used alone. For examples, + // `{requests}/s == 1/s`, `By{transmitted}/s == By/s`. + // * `NAME` is a sequence of non-blank printable ASCII characters not + // containing '{' or '}'. + // * `1` represents dimensionless value 1, such as in `1/s`. + // * `%` represents dimensionless value 1/100, and annotates values giving + // a percentage. + Unit string `protobuf:"bytes,5,opt,name=unit,proto3" json:"unit,omitempty"` + // A detailed description of the metric, which can be used in documentation. + Description string `protobuf:"bytes,6,opt,name=description,proto3" json:"description,omitempty"` + // A concise name for the metric, which can be displayed in user interfaces. + // Use sentence case without an ending period, for example "Request count". + // This field is optional but it is recommended to be set for any metrics + // associated with user-visible concepts, such as Quota. + DisplayName string `protobuf:"bytes,7,opt,name=display_name,json=displayName,proto3" json:"display_name,omitempty"` + // Optional. Metadata which can be used to guide usage of the metric. + Metadata *MetricDescriptor_MetricDescriptorMetadata `protobuf:"bytes,10,opt,name=metadata,proto3" json:"metadata,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *MetricDescriptor) Reset() { *m = MetricDescriptor{} } +func (m *MetricDescriptor) String() string { return proto.CompactTextString(m) } +func (*MetricDescriptor) ProtoMessage() {} +func (*MetricDescriptor) Descriptor() ([]byte, []int) { + return fileDescriptor_metric_55f130a603acc212, []int{0} +} +func (m *MetricDescriptor) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_MetricDescriptor.Unmarshal(m, b) +} +func (m *MetricDescriptor) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_MetricDescriptor.Marshal(b, m, deterministic) +} +func (dst *MetricDescriptor) XXX_Merge(src proto.Message) { + xxx_messageInfo_MetricDescriptor.Merge(dst, src) +} +func (m *MetricDescriptor) XXX_Size() int { + return xxx_messageInfo_MetricDescriptor.Size(m) +} +func (m *MetricDescriptor) XXX_DiscardUnknown() { + xxx_messageInfo_MetricDescriptor.DiscardUnknown(m) +} + +var xxx_messageInfo_MetricDescriptor proto.InternalMessageInfo + +func (m *MetricDescriptor) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *MetricDescriptor) GetType() string { + if m != nil { + return m.Type + } + return "" +} + +func (m *MetricDescriptor) GetLabels() []*label.LabelDescriptor { + if m != nil { + return m.Labels + } + return nil +} + +func (m *MetricDescriptor) GetMetricKind() MetricDescriptor_MetricKind { + if m != nil { + return m.MetricKind + } + return MetricDescriptor_METRIC_KIND_UNSPECIFIED +} + +func (m *MetricDescriptor) GetValueType() MetricDescriptor_ValueType { + if m != nil { + return m.ValueType + } + return MetricDescriptor_VALUE_TYPE_UNSPECIFIED +} + +func (m *MetricDescriptor) GetUnit() string { + if m != nil { + return m.Unit + } + return "" +} + +func (m *MetricDescriptor) GetDescription() string { + if m != nil { + return m.Description + } + return "" +} + +func (m *MetricDescriptor) GetDisplayName() string { + if m != nil { + return m.DisplayName + } + return "" +} + +func (m *MetricDescriptor) GetMetadata() *MetricDescriptor_MetricDescriptorMetadata { + if m != nil { + return m.Metadata + } + return nil +} + +// Additional annotations that can be used to guide the usage of a metric. +type MetricDescriptor_MetricDescriptorMetadata struct { + // The launch stage of the metric definition. + LaunchStage api.LaunchStage `protobuf:"varint,1,opt,name=launch_stage,json=launchStage,proto3,enum=google.api.LaunchStage" json:"launch_stage,omitempty"` + // The sampling period of metric data points. For metrics which are written + // periodically, consecutive data points are stored at this time interval, + // excluding data loss due to errors. Metrics with a higher granularity have + // a smaller sampling period. + SamplePeriod *duration.Duration `protobuf:"bytes,2,opt,name=sample_period,json=samplePeriod,proto3" json:"sample_period,omitempty"` + // The delay of data points caused by ingestion. Data points older than this + // age are guaranteed to be ingested and available to be read, excluding + // data loss due to errors. + IngestDelay *duration.Duration `protobuf:"bytes,3,opt,name=ingest_delay,json=ingestDelay,proto3" json:"ingest_delay,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *MetricDescriptor_MetricDescriptorMetadata) Reset() { + *m = MetricDescriptor_MetricDescriptorMetadata{} +} +func (m *MetricDescriptor_MetricDescriptorMetadata) String() string { return proto.CompactTextString(m) } +func (*MetricDescriptor_MetricDescriptorMetadata) ProtoMessage() {} +func (*MetricDescriptor_MetricDescriptorMetadata) Descriptor() ([]byte, []int) { + return fileDescriptor_metric_55f130a603acc212, []int{0, 0} +} +func (m *MetricDescriptor_MetricDescriptorMetadata) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_MetricDescriptor_MetricDescriptorMetadata.Unmarshal(m, b) +} +func (m *MetricDescriptor_MetricDescriptorMetadata) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_MetricDescriptor_MetricDescriptorMetadata.Marshal(b, m, deterministic) +} +func (dst *MetricDescriptor_MetricDescriptorMetadata) XXX_Merge(src proto.Message) { + xxx_messageInfo_MetricDescriptor_MetricDescriptorMetadata.Merge(dst, src) +} +func (m *MetricDescriptor_MetricDescriptorMetadata) XXX_Size() int { + return xxx_messageInfo_MetricDescriptor_MetricDescriptorMetadata.Size(m) +} +func (m *MetricDescriptor_MetricDescriptorMetadata) XXX_DiscardUnknown() { + xxx_messageInfo_MetricDescriptor_MetricDescriptorMetadata.DiscardUnknown(m) +} + +var xxx_messageInfo_MetricDescriptor_MetricDescriptorMetadata proto.InternalMessageInfo + +func (m *MetricDescriptor_MetricDescriptorMetadata) GetLaunchStage() api.LaunchStage { + if m != nil { + return m.LaunchStage + } + return api.LaunchStage_LAUNCH_STAGE_UNSPECIFIED +} + +func (m *MetricDescriptor_MetricDescriptorMetadata) GetSamplePeriod() *duration.Duration { + if m != nil { + return m.SamplePeriod + } + return nil +} + +func (m *MetricDescriptor_MetricDescriptorMetadata) GetIngestDelay() *duration.Duration { + if m != nil { + return m.IngestDelay + } + return nil +} + +// A specific metric, identified by specifying values for all of the +// labels of a [`MetricDescriptor`][google.api.MetricDescriptor]. +type Metric struct { + // An existing metric type, see [google.api.MetricDescriptor][google.api.MetricDescriptor]. + // For example, `custom.googleapis.com/invoice/paid/amount`. + Type string `protobuf:"bytes,3,opt,name=type,proto3" json:"type,omitempty"` + // The set of label values that uniquely identify this metric. All + // labels listed in the `MetricDescriptor` must be assigned values. + Labels map[string]string `protobuf:"bytes,2,rep,name=labels,proto3" json:"labels,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Metric) Reset() { *m = Metric{} } +func (m *Metric) String() string { return proto.CompactTextString(m) } +func (*Metric) ProtoMessage() {} +func (*Metric) Descriptor() ([]byte, []int) { + return fileDescriptor_metric_55f130a603acc212, []int{1} +} +func (m *Metric) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Metric.Unmarshal(m, b) +} +func (m *Metric) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Metric.Marshal(b, m, deterministic) +} +func (dst *Metric) XXX_Merge(src proto.Message) { + xxx_messageInfo_Metric.Merge(dst, src) +} +func (m *Metric) XXX_Size() int { + return xxx_messageInfo_Metric.Size(m) +} +func (m *Metric) XXX_DiscardUnknown() { + xxx_messageInfo_Metric.DiscardUnknown(m) +} + +var xxx_messageInfo_Metric proto.InternalMessageInfo + +func (m *Metric) GetType() string { + if m != nil { + return m.Type + } + return "" +} + +func (m *Metric) GetLabels() map[string]string { + if m != nil { + return m.Labels + } + return nil +} + +func init() { + proto.RegisterType((*MetricDescriptor)(nil), "google.api.MetricDescriptor") + proto.RegisterType((*MetricDescriptor_MetricDescriptorMetadata)(nil), "google.api.MetricDescriptor.MetricDescriptorMetadata") + proto.RegisterType((*Metric)(nil), "google.api.Metric") + proto.RegisterMapType((map[string]string)(nil), "google.api.Metric.LabelsEntry") + proto.RegisterEnum("google.api.MetricDescriptor_MetricKind", MetricDescriptor_MetricKind_name, MetricDescriptor_MetricKind_value) + proto.RegisterEnum("google.api.MetricDescriptor_ValueType", MetricDescriptor_ValueType_name, MetricDescriptor_ValueType_value) +} + +func init() { proto.RegisterFile("google/api/metric.proto", fileDescriptor_metric_55f130a603acc212) } + +var fileDescriptor_metric_55f130a603acc212 = []byte{ + // 648 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x8c, 0x54, 0x5d, 0x6e, 0xd3, 0x40, + 0x10, 0xc6, 0xf9, 0x31, 0xcd, 0x38, 0x44, 0xd6, 0x0a, 0xb5, 0x26, 0x15, 0x55, 0xc8, 0x03, 0xe4, + 0x29, 0x91, 0x52, 0x28, 0x50, 0x10, 0x52, 0x52, 0x9b, 0x60, 0x35, 0x71, 0x82, 0x6b, 0x57, 0x2a, + 0x2f, 0xd6, 0x36, 0x5e, 0x8c, 0x55, 0xc7, 0x36, 0xb6, 0x53, 0x29, 0xa7, 0xe0, 0x0e, 0x5c, 0x86, + 0x0b, 0x70, 0x20, 0xb4, 0xbb, 0x4e, 0xe2, 0x06, 0x51, 0xf1, 0x94, 0x99, 0xf9, 0xbe, 0x99, 0x9d, + 0xf1, 0x37, 0x13, 0x38, 0xf0, 0xa2, 0xc8, 0x0b, 0x48, 0x0f, 0xc7, 0x7e, 0x6f, 0x41, 0xb2, 0xc4, + 0x9f, 0x77, 0xe3, 0x24, 0xca, 0x22, 0x04, 0x1c, 0xe8, 0xe2, 0xd8, 0x6f, 0xee, 0x17, 0x48, 0x01, + 0xbe, 0x26, 0x01, 0xe7, 0x34, 0x9f, 0xde, 0x89, 0x2f, 0xc3, 0xf9, 0x37, 0x27, 0xcd, 0xb0, 0x47, + 0x72, 0xf8, 0x28, 0x87, 0x99, 0x77, 0xbd, 0xfc, 0xda, 0x73, 0x97, 0x09, 0xce, 0xfc, 0x28, 0xe4, + 0x78, 0xfb, 0x97, 0x08, 0xf2, 0x84, 0xbd, 0xa9, 0x92, 0x74, 0x9e, 0xf8, 0x71, 0x16, 0x25, 0x08, + 0x41, 0x25, 0xc4, 0x0b, 0xa2, 0x08, 0x2d, 0xa1, 0x53, 0x33, 0x99, 0x4d, 0x63, 0xd9, 0x2a, 0x26, + 0xca, 0x1e, 0x8f, 0x51, 0x1b, 0x1d, 0x83, 0xc8, 0x5a, 0x49, 0x95, 0x52, 0xab, 0xdc, 0x91, 0xfa, + 0x87, 0xdd, 0x6d, 0xc3, 0xdd, 0x31, 0x45, 0xb6, 0x45, 0xcd, 0x9c, 0x8a, 0x3e, 0x81, 0xc4, 0x87, + 0x74, 0x6e, 0xfc, 0xd0, 0x55, 0xca, 0x2d, 0xa1, 0xd3, 0xe8, 0xbf, 0x28, 0x66, 0xee, 0xf6, 0x93, + 0x07, 0xce, 0xfd, 0xd0, 0x35, 0x61, 0xb1, 0xb1, 0x91, 0x06, 0x70, 0x8b, 0x83, 0x25, 0x71, 0x58, + 0x63, 0x15, 0x56, 0xe8, 0xf9, 0xbd, 0x85, 0x2e, 0x29, 0xdd, 0x5a, 0xc5, 0xc4, 0xac, 0xdd, 0xae, + 0x4d, 0x3a, 0xd9, 0x32, 0xf4, 0x33, 0xa5, 0xca, 0x27, 0xa3, 0x36, 0x6a, 0x81, 0xe4, 0xe6, 0x69, + 0x7e, 0x14, 0x2a, 0x22, 0x83, 0x8a, 0x21, 0xf4, 0x0c, 0xea, 0xae, 0x9f, 0xc6, 0x01, 0x5e, 0x39, + 0xec, 0x5b, 0x3d, 0xcc, 0x29, 0x3c, 0x66, 0xd0, 0x4f, 0xf6, 0x19, 0xf6, 0x16, 0x24, 0xc3, 0x2e, + 0xce, 0xb0, 0x02, 0x2d, 0xa1, 0x23, 0xf5, 0x5f, 0xfd, 0xc7, 0x98, 0xdb, 0xc0, 0x24, 0x4f, 0x36, + 0x37, 0x65, 0x9a, 0xbf, 0x05, 0x50, 0xfe, 0x45, 0x43, 0xa7, 0x50, 0x2f, 0x6e, 0x00, 0x93, 0xaf, + 0xd1, 0x3f, 0xb8, 0x2b, 0x0a, 0xc5, 0x2f, 0x28, 0x6c, 0x4a, 0xc1, 0xd6, 0x41, 0x1f, 0xe0, 0x51, + 0x8a, 0x17, 0x71, 0x40, 0x9c, 0x98, 0x24, 0x7e, 0xe4, 0x2a, 0x25, 0xd6, 0xf0, 0x93, 0x75, 0xf2, + 0x7a, 0x7f, 0xba, 0x6a, 0xbe, 0x3f, 0x66, 0x9d, 0xf3, 0x67, 0x8c, 0x8e, 0xde, 0x43, 0xdd, 0x0f, + 0x3d, 0x92, 0x66, 0x8e, 0x4b, 0x02, 0xbc, 0x62, 0xb2, 0xde, 0x9b, 0x2e, 0x71, 0xba, 0x4a, 0xd9, + 0xed, 0x29, 0xc0, 0x56, 0x63, 0x74, 0x08, 0x07, 0x13, 0xcd, 0x32, 0xf5, 0x33, 0xe7, 0x5c, 0x37, + 0x54, 0xc7, 0x36, 0x2e, 0x66, 0xda, 0x99, 0xfe, 0x51, 0xd7, 0x54, 0xf9, 0x01, 0xaa, 0x41, 0x75, + 0x34, 0xb0, 0x47, 0x9a, 0x2c, 0x50, 0x53, 0xd5, 0xc6, 0xd6, 0x40, 0x2e, 0xa1, 0x06, 0xc0, 0x99, + 0x3d, 0xb1, 0xc7, 0x03, 0x4b, 0xbf, 0xd4, 0xe4, 0x72, 0xfb, 0x3b, 0xd4, 0x36, 0x5a, 0xa3, 0x26, + 0xec, 0x5f, 0x0e, 0xc6, 0xb6, 0xe6, 0x58, 0x57, 0x33, 0x6d, 0xa7, 0xdc, 0x1e, 0x54, 0x86, 0xd3, + 0xe9, 0x98, 0x57, 0xd3, 0x0d, 0xeb, 0xe4, 0xa5, 0x5c, 0x42, 0x00, 0xa2, 0x3a, 0xb5, 0x87, 0x63, + 0x4d, 0x2e, 0x53, 0xfb, 0xc2, 0x32, 0x75, 0x63, 0x24, 0x57, 0x90, 0x0c, 0x75, 0x55, 0xa7, 0xde, + 0xd0, 0xb6, 0xf4, 0xa9, 0x21, 0x57, 0x69, 0xd2, 0x64, 0x6a, 0x68, 0x57, 0xb2, 0xd8, 0xfe, 0x21, + 0x80, 0xc8, 0x87, 0xd8, 0xdc, 0x4a, 0xb9, 0x70, 0x2b, 0x27, 0x3b, 0xb7, 0x72, 0xf4, 0xf7, 0x2a, + 0xf0, 0x93, 0x49, 0xb5, 0x30, 0x4b, 0x56, 0xeb, 0x73, 0x69, 0xbe, 0x05, 0xa9, 0x10, 0x46, 0x32, + 0x94, 0x6f, 0xc8, 0x2a, 0xbf, 0x4c, 0x6a, 0xa2, 0xc7, 0x50, 0x65, 0xbb, 0xcc, 0x14, 0xab, 0x99, + 0xdc, 0x39, 0x2d, 0xbd, 0x11, 0x86, 0x0e, 0x34, 0xe6, 0xd1, 0xa2, 0xf0, 0xce, 0x50, 0xe2, 0x0f, + 0xcd, 0xa8, 0x1a, 0x33, 0xe1, 0xcb, 0xeb, 0x1c, 0xf2, 0xa2, 0x00, 0x87, 0x5e, 0x37, 0x4a, 0xbc, + 0x9e, 0x47, 0x42, 0xa6, 0x55, 0x8f, 0x43, 0x38, 0xf6, 0xd3, 0xc2, 0xff, 0xd2, 0x3b, 0xfe, 0xf3, + 0xb3, 0x54, 0x19, 0x0d, 0x66, 0xfa, 0xb5, 0xc8, 0xa8, 0xc7, 0x7f, 0x02, 0x00, 0x00, 0xff, 0xff, + 0xee, 0x00, 0xfa, 0x56, 0xc1, 0x04, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/api/monitoredres/monitored_resource.pb.go b/vendor/google.golang.org/genproto/googleapis/api/monitoredres/monitored_resource.pb.go new file mode 100644 index 0000000..8423d37 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/api/monitoredres/monitored_resource.pb.go @@ -0,0 +1,288 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/api/monitored_resource.proto + +package monitoredres // import "google.golang.org/genproto/googleapis/api/monitoredres" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _struct "github.com/golang/protobuf/ptypes/struct" +import label "google.golang.org/genproto/googleapis/api/label" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// An object that describes the schema of a [MonitoredResource][google.api.MonitoredResource] object using a +// type name and a set of labels. For example, the monitored resource +// descriptor for Google Compute Engine VM instances has a type of +// `"gce_instance"` and specifies the use of the labels `"instance_id"` and +// `"zone"` to identify particular VM instances. +// +// Different APIs can support different monitored resource types. APIs generally +// provide a `list` method that returns the monitored resource descriptors used +// by the API. +type MonitoredResourceDescriptor struct { + // Optional. The resource name of the monitored resource descriptor: + // `"projects/{project_id}/monitoredResourceDescriptors/{type}"` where + // {type} is the value of the `type` field in this object and + // {project_id} is a project ID that provides API-specific context for + // accessing the type. APIs that do not use project information can use the + // resource name format `"monitoredResourceDescriptors/{type}"`. + Name string `protobuf:"bytes,5,opt,name=name,proto3" json:"name,omitempty"` + // Required. The monitored resource type. For example, the type + // `"cloudsql_database"` represents databases in Google Cloud SQL. + // The maximum length of this value is 256 characters. + Type string `protobuf:"bytes,1,opt,name=type,proto3" json:"type,omitempty"` + // Optional. A concise name for the monitored resource type that might be + // displayed in user interfaces. It should be a Title Cased Noun Phrase, + // without any article or other determiners. For example, + // `"Google Cloud SQL Database"`. + DisplayName string `protobuf:"bytes,2,opt,name=display_name,json=displayName,proto3" json:"display_name,omitempty"` + // Optional. A detailed description of the monitored resource type that might + // be used in documentation. + Description string `protobuf:"bytes,3,opt,name=description,proto3" json:"description,omitempty"` + // Required. A set of labels used to describe instances of this monitored + // resource type. For example, an individual Google Cloud SQL database is + // identified by values for the labels `"database_id"` and `"zone"`. + Labels []*label.LabelDescriptor `protobuf:"bytes,4,rep,name=labels,proto3" json:"labels,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *MonitoredResourceDescriptor) Reset() { *m = MonitoredResourceDescriptor{} } +func (m *MonitoredResourceDescriptor) String() string { return proto.CompactTextString(m) } +func (*MonitoredResourceDescriptor) ProtoMessage() {} +func (*MonitoredResourceDescriptor) Descriptor() ([]byte, []int) { + return fileDescriptor_monitored_resource_d0f80e27b3474083, []int{0} +} +func (m *MonitoredResourceDescriptor) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_MonitoredResourceDescriptor.Unmarshal(m, b) +} +func (m *MonitoredResourceDescriptor) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_MonitoredResourceDescriptor.Marshal(b, m, deterministic) +} +func (dst *MonitoredResourceDescriptor) XXX_Merge(src proto.Message) { + xxx_messageInfo_MonitoredResourceDescriptor.Merge(dst, src) +} +func (m *MonitoredResourceDescriptor) XXX_Size() int { + return xxx_messageInfo_MonitoredResourceDescriptor.Size(m) +} +func (m *MonitoredResourceDescriptor) XXX_DiscardUnknown() { + xxx_messageInfo_MonitoredResourceDescriptor.DiscardUnknown(m) +} + +var xxx_messageInfo_MonitoredResourceDescriptor proto.InternalMessageInfo + +func (m *MonitoredResourceDescriptor) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *MonitoredResourceDescriptor) GetType() string { + if m != nil { + return m.Type + } + return "" +} + +func (m *MonitoredResourceDescriptor) GetDisplayName() string { + if m != nil { + return m.DisplayName + } + return "" +} + +func (m *MonitoredResourceDescriptor) GetDescription() string { + if m != nil { + return m.Description + } + return "" +} + +func (m *MonitoredResourceDescriptor) GetLabels() []*label.LabelDescriptor { + if m != nil { + return m.Labels + } + return nil +} + +// An object representing a resource that can be used for monitoring, logging, +// billing, or other purposes. Examples include virtual machine instances, +// databases, and storage devices such as disks. The `type` field identifies a +// [MonitoredResourceDescriptor][google.api.MonitoredResourceDescriptor] object that describes the resource's +// schema. Information in the `labels` field identifies the actual resource and +// its attributes according to the schema. For example, a particular Compute +// Engine VM instance could be represented by the following object, because the +// [MonitoredResourceDescriptor][google.api.MonitoredResourceDescriptor] for `"gce_instance"` has labels +// `"instance_id"` and `"zone"`: +// +// { "type": "gce_instance", +// "labels": { "instance_id": "12345678901234", +// "zone": "us-central1-a" }} +type MonitoredResource struct { + // Required. The monitored resource type. This field must match + // the `type` field of a [MonitoredResourceDescriptor][google.api.MonitoredResourceDescriptor] object. For + // example, the type of a Compute Engine VM instance is `gce_instance`. + Type string `protobuf:"bytes,1,opt,name=type,proto3" json:"type,omitempty"` + // Required. Values for all of the labels listed in the associated monitored + // resource descriptor. For example, Compute Engine VM instances use the + // labels `"project_id"`, `"instance_id"`, and `"zone"`. + Labels map[string]string `protobuf:"bytes,2,rep,name=labels,proto3" json:"labels,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *MonitoredResource) Reset() { *m = MonitoredResource{} } +func (m *MonitoredResource) String() string { return proto.CompactTextString(m) } +func (*MonitoredResource) ProtoMessage() {} +func (*MonitoredResource) Descriptor() ([]byte, []int) { + return fileDescriptor_monitored_resource_d0f80e27b3474083, []int{1} +} +func (m *MonitoredResource) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_MonitoredResource.Unmarshal(m, b) +} +func (m *MonitoredResource) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_MonitoredResource.Marshal(b, m, deterministic) +} +func (dst *MonitoredResource) XXX_Merge(src proto.Message) { + xxx_messageInfo_MonitoredResource.Merge(dst, src) +} +func (m *MonitoredResource) XXX_Size() int { + return xxx_messageInfo_MonitoredResource.Size(m) +} +func (m *MonitoredResource) XXX_DiscardUnknown() { + xxx_messageInfo_MonitoredResource.DiscardUnknown(m) +} + +var xxx_messageInfo_MonitoredResource proto.InternalMessageInfo + +func (m *MonitoredResource) GetType() string { + if m != nil { + return m.Type + } + return "" +} + +func (m *MonitoredResource) GetLabels() map[string]string { + if m != nil { + return m.Labels + } + return nil +} + +// Auxiliary metadata for a [MonitoredResource][google.api.MonitoredResource] object. +// [MonitoredResource][google.api.MonitoredResource] objects contain the minimum set of information to +// uniquely identify a monitored resource instance. There is some other useful +// auxiliary metadata. Monitoring and Logging use an ingestion +// pipeline to extract metadata for cloud resources of all types, and store +// the metadata in this message. +type MonitoredResourceMetadata struct { + // Output only. Values for predefined system metadata labels. + // System labels are a kind of metadata extracted by Google, including + // "machine_image", "vpc", "subnet_id", + // "security_group", "name", etc. + // System label values can be only strings, Boolean values, or a list of + // strings. For example: + // + // { "name": "my-test-instance", + // "security_group": ["a", "b", "c"], + // "spot_instance": false } + SystemLabels *_struct.Struct `protobuf:"bytes,1,opt,name=system_labels,json=systemLabels,proto3" json:"system_labels,omitempty"` + // Output only. A map of user-defined metadata labels. + UserLabels map[string]string `protobuf:"bytes,2,rep,name=user_labels,json=userLabels,proto3" json:"user_labels,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *MonitoredResourceMetadata) Reset() { *m = MonitoredResourceMetadata{} } +func (m *MonitoredResourceMetadata) String() string { return proto.CompactTextString(m) } +func (*MonitoredResourceMetadata) ProtoMessage() {} +func (*MonitoredResourceMetadata) Descriptor() ([]byte, []int) { + return fileDescriptor_monitored_resource_d0f80e27b3474083, []int{2} +} +func (m *MonitoredResourceMetadata) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_MonitoredResourceMetadata.Unmarshal(m, b) +} +func (m *MonitoredResourceMetadata) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_MonitoredResourceMetadata.Marshal(b, m, deterministic) +} +func (dst *MonitoredResourceMetadata) XXX_Merge(src proto.Message) { + xxx_messageInfo_MonitoredResourceMetadata.Merge(dst, src) +} +func (m *MonitoredResourceMetadata) XXX_Size() int { + return xxx_messageInfo_MonitoredResourceMetadata.Size(m) +} +func (m *MonitoredResourceMetadata) XXX_DiscardUnknown() { + xxx_messageInfo_MonitoredResourceMetadata.DiscardUnknown(m) +} + +var xxx_messageInfo_MonitoredResourceMetadata proto.InternalMessageInfo + +func (m *MonitoredResourceMetadata) GetSystemLabels() *_struct.Struct { + if m != nil { + return m.SystemLabels + } + return nil +} + +func (m *MonitoredResourceMetadata) GetUserLabels() map[string]string { + if m != nil { + return m.UserLabels + } + return nil +} + +func init() { + proto.RegisterType((*MonitoredResourceDescriptor)(nil), "google.api.MonitoredResourceDescriptor") + proto.RegisterType((*MonitoredResource)(nil), "google.api.MonitoredResource") + proto.RegisterMapType((map[string]string)(nil), "google.api.MonitoredResource.LabelsEntry") + proto.RegisterType((*MonitoredResourceMetadata)(nil), "google.api.MonitoredResourceMetadata") + proto.RegisterMapType((map[string]string)(nil), "google.api.MonitoredResourceMetadata.UserLabelsEntry") +} + +func init() { + proto.RegisterFile("google/api/monitored_resource.proto", fileDescriptor_monitored_resource_d0f80e27b3474083) +} + +var fileDescriptor_monitored_resource_d0f80e27b3474083 = []byte{ + // 415 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x94, 0x53, 0x4d, 0xab, 0xd3, 0x40, + 0x14, 0x65, 0xd2, 0x0f, 0xf0, 0xa6, 0x7e, 0x0d, 0x52, 0x63, 0xea, 0xa2, 0xd6, 0x4d, 0xdd, 0x24, + 0xd0, 0x22, 0xf8, 0xb9, 0x68, 0x55, 0x44, 0xb0, 0x52, 0x22, 0xba, 0x70, 0x13, 0xa6, 0xc9, 0x18, + 0x82, 0x49, 0x26, 0xcc, 0x4c, 0x84, 0xfc, 0x1d, 0xc1, 0xdf, 0xe1, 0x5f, 0x72, 0xe9, 0x52, 0x32, + 0x33, 0x69, 0xd3, 0x97, 0xc7, 0x83, 0xb7, 0xbb, 0xf7, 0xdc, 0x73, 0xcf, 0x3d, 0x27, 0x43, 0xe0, + 0x71, 0xc2, 0x58, 0x92, 0x51, 0x9f, 0x94, 0xa9, 0x9f, 0xb3, 0x22, 0x95, 0x8c, 0xd3, 0x38, 0xe4, + 0x54, 0xb0, 0x8a, 0x47, 0xd4, 0x2b, 0x39, 0x93, 0x0c, 0x83, 0x26, 0x79, 0xa4, 0x4c, 0xdd, 0x69, + 0x67, 0x21, 0x23, 0x07, 0x9a, 0x69, 0x8e, 0xfb, 0xd0, 0xe0, 0xaa, 0x3b, 0x54, 0xdf, 0x7d, 0x21, + 0x79, 0x15, 0x49, 0x3d, 0x5d, 0xfc, 0x41, 0x30, 0xdb, 0xb5, 0xf2, 0x81, 0x51, 0x7f, 0x4b, 0x45, + 0xc4, 0xd3, 0x52, 0x32, 0x8e, 0x31, 0x0c, 0x0b, 0x92, 0x53, 0x67, 0x34, 0x47, 0xcb, 0x1b, 0x81, + 0xaa, 0x1b, 0x4c, 0xd6, 0x25, 0x75, 0x90, 0xc6, 0x9a, 0x1a, 0x3f, 0x82, 0x49, 0x9c, 0x8a, 0x32, + 0x23, 0x75, 0xa8, 0xf8, 0x96, 0x9a, 0xd9, 0x06, 0xfb, 0xd4, 0xac, 0xcd, 0xc1, 0x8e, 0x8d, 0x70, + 0xca, 0x0a, 0x67, 0x60, 0x18, 0x27, 0x08, 0xaf, 0x61, 0xac, 0x9c, 0x0b, 0x67, 0x38, 0x1f, 0x2c, + 0xed, 0xd5, 0xcc, 0x3b, 0xe5, 0xf3, 0x3e, 0x36, 0x93, 0x93, 0xb3, 0xc0, 0x50, 0x17, 0xbf, 0x11, + 0xdc, 0xed, 0x25, 0xb8, 0xd4, 0xe3, 0xe6, 0x28, 0x6f, 0x29, 0xf9, 0x27, 0x5d, 0xf9, 0x9e, 0x84, + 0x3e, 0x28, 0xde, 0x15, 0x92, 0xd7, 0xed, 0x31, 0xf7, 0x39, 0xd8, 0x1d, 0x18, 0xdf, 0x81, 0xc1, + 0x0f, 0x5a, 0x9b, 0x23, 0x4d, 0x89, 0xef, 0xc1, 0xe8, 0x27, 0xc9, 0xaa, 0xf6, 0x03, 0xe8, 0xe6, + 0x85, 0xf5, 0x0c, 0x2d, 0xfe, 0x22, 0x78, 0xd0, 0x3b, 0xb2, 0xa3, 0x92, 0xc4, 0x44, 0x12, 0xfc, + 0x0a, 0x6e, 0x8a, 0x5a, 0x48, 0x9a, 0x87, 0xc6, 0x62, 0xa3, 0x69, 0xaf, 0xee, 0xb7, 0x16, 0xdb, + 0xd7, 0xf3, 0x3e, 0xab, 0xd7, 0x0b, 0x26, 0x9a, 0xad, 0xcd, 0xe0, 0xaf, 0x60, 0x57, 0x82, 0xf2, + 0xf0, 0x2c, 0xde, 0xd3, 0x2b, 0xe3, 0xb5, 0x97, 0xbd, 0x2f, 0x82, 0xf2, 0x6e, 0x54, 0xa8, 0x8e, + 0x80, 0xfb, 0x1a, 0x6e, 0x5f, 0x18, 0x5f, 0x27, 0xf2, 0xb6, 0x86, 0x5b, 0x11, 0xcb, 0x3b, 0x36, + 0xb6, 0xd3, 0x9e, 0x8f, 0x7d, 0x13, 0x6c, 0x8f, 0xbe, 0xbd, 0x31, 0xac, 0x84, 0x65, 0xa4, 0x48, + 0x3c, 0xc6, 0x13, 0x3f, 0xa1, 0x85, 0x8a, 0xed, 0xeb, 0x11, 0x29, 0x53, 0x71, 0xfe, 0x3b, 0x70, + 0x2a, 0x5e, 0x76, 0x9b, 0x7f, 0x08, 0xfd, 0xb2, 0x86, 0xef, 0x37, 0xfb, 0x0f, 0x87, 0xb1, 0xda, + 0x5c, 0xff, 0x0f, 0x00, 0x00, 0xff, 0xff, 0x10, 0x16, 0x7c, 0xe9, 0x47, 0x03, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/api/serviceconfig/auth.pb.go b/vendor/google.golang.org/genproto/googleapis/api/serviceconfig/auth.pb.go new file mode 100644 index 0000000..4cac698 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/api/serviceconfig/auth.pb.go @@ -0,0 +1,446 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/api/auth.proto + +package serviceconfig // import "google.golang.org/genproto/googleapis/api/serviceconfig" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// `Authentication` defines the authentication configuration for an API. +// +// Example for an API targeted for external use: +// +// name: calendar.googleapis.com +// authentication: +// providers: +// - id: google_calendar_auth +// jwks_uri: https://www.googleapis.com/oauth2/v1/certs +// issuer: https://securetoken.google.com +// rules: +// - selector: "*" +// requirements: +// provider_id: google_calendar_auth +type Authentication struct { + // A list of authentication rules that apply to individual API methods. + // + // **NOTE:** All service configuration rules follow "last one wins" order. + Rules []*AuthenticationRule `protobuf:"bytes,3,rep,name=rules,proto3" json:"rules,omitempty"` + // Defines a set of authentication providers that a service supports. + Providers []*AuthProvider `protobuf:"bytes,4,rep,name=providers,proto3" json:"providers,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Authentication) Reset() { *m = Authentication{} } +func (m *Authentication) String() string { return proto.CompactTextString(m) } +func (*Authentication) ProtoMessage() {} +func (*Authentication) Descriptor() ([]byte, []int) { + return fileDescriptor_auth_20a218c05ef5a30e, []int{0} +} +func (m *Authentication) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Authentication.Unmarshal(m, b) +} +func (m *Authentication) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Authentication.Marshal(b, m, deterministic) +} +func (dst *Authentication) XXX_Merge(src proto.Message) { + xxx_messageInfo_Authentication.Merge(dst, src) +} +func (m *Authentication) XXX_Size() int { + return xxx_messageInfo_Authentication.Size(m) +} +func (m *Authentication) XXX_DiscardUnknown() { + xxx_messageInfo_Authentication.DiscardUnknown(m) +} + +var xxx_messageInfo_Authentication proto.InternalMessageInfo + +func (m *Authentication) GetRules() []*AuthenticationRule { + if m != nil { + return m.Rules + } + return nil +} + +func (m *Authentication) GetProviders() []*AuthProvider { + if m != nil { + return m.Providers + } + return nil +} + +// Authentication rules for the service. +// +// By default, if a method has any authentication requirements, every request +// must include a valid credential matching one of the requirements. +// It's an error to include more than one kind of credential in a single +// request. +// +// If a method doesn't have any auth requirements, request credentials will be +// ignored. +type AuthenticationRule struct { + // Selects the methods to which this rule applies. + // + // Refer to [selector][google.api.DocumentationRule.selector] for syntax details. + Selector string `protobuf:"bytes,1,opt,name=selector,proto3" json:"selector,omitempty"` + // The requirements for OAuth credentials. + Oauth *OAuthRequirements `protobuf:"bytes,2,opt,name=oauth,proto3" json:"oauth,omitempty"` + // If true, the service accepts API keys without any other credential. + AllowWithoutCredential bool `protobuf:"varint,5,opt,name=allow_without_credential,json=allowWithoutCredential,proto3" json:"allow_without_credential,omitempty"` + // Requirements for additional authentication providers. + Requirements []*AuthRequirement `protobuf:"bytes,7,rep,name=requirements,proto3" json:"requirements,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *AuthenticationRule) Reset() { *m = AuthenticationRule{} } +func (m *AuthenticationRule) String() string { return proto.CompactTextString(m) } +func (*AuthenticationRule) ProtoMessage() {} +func (*AuthenticationRule) Descriptor() ([]byte, []int) { + return fileDescriptor_auth_20a218c05ef5a30e, []int{1} +} +func (m *AuthenticationRule) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_AuthenticationRule.Unmarshal(m, b) +} +func (m *AuthenticationRule) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_AuthenticationRule.Marshal(b, m, deterministic) +} +func (dst *AuthenticationRule) XXX_Merge(src proto.Message) { + xxx_messageInfo_AuthenticationRule.Merge(dst, src) +} +func (m *AuthenticationRule) XXX_Size() int { + return xxx_messageInfo_AuthenticationRule.Size(m) +} +func (m *AuthenticationRule) XXX_DiscardUnknown() { + xxx_messageInfo_AuthenticationRule.DiscardUnknown(m) +} + +var xxx_messageInfo_AuthenticationRule proto.InternalMessageInfo + +func (m *AuthenticationRule) GetSelector() string { + if m != nil { + return m.Selector + } + return "" +} + +func (m *AuthenticationRule) GetOauth() *OAuthRequirements { + if m != nil { + return m.Oauth + } + return nil +} + +func (m *AuthenticationRule) GetAllowWithoutCredential() bool { + if m != nil { + return m.AllowWithoutCredential + } + return false +} + +func (m *AuthenticationRule) GetRequirements() []*AuthRequirement { + if m != nil { + return m.Requirements + } + return nil +} + +// Configuration for an authentication provider, including support for +// [JSON Web Token +// (JWT)](https://tools.ietf.org/html/draft-ietf-oauth-json-web-token-32). +type AuthProvider struct { + // The unique identifier of the auth provider. It will be referred to by + // `AuthRequirement.provider_id`. + // + // Example: "bookstore_auth". + Id string `protobuf:"bytes,1,opt,name=id,proto3" json:"id,omitempty"` + // Identifies the principal that issued the JWT. See + // https://tools.ietf.org/html/draft-ietf-oauth-json-web-token-32#section-4.1.1 + // Usually a URL or an email address. + // + // Example: https://securetoken.google.com + // Example: 1234567-compute@developer.gserviceaccount.com + Issuer string `protobuf:"bytes,2,opt,name=issuer,proto3" json:"issuer,omitempty"` + // URL of the provider's public key set to validate signature of the JWT. See + // [OpenID + // Discovery](https://openid.net/specs/openid-connect-discovery-1_0.html#ProviderMetadata). + // Optional if the key set document: + // - can be retrieved from + // [OpenID + // Discovery](https://openid.net/specs/openid-connect-discovery-1_0.html of + // the issuer. + // - can be inferred from the email domain of the issuer (e.g. a Google + // service account). + // + // Example: https://www.googleapis.com/oauth2/v1/certs + JwksUri string `protobuf:"bytes,3,opt,name=jwks_uri,json=jwksUri,proto3" json:"jwks_uri,omitempty"` + // The list of JWT + // [audiences](https://tools.ietf.org/html/draft-ietf-oauth-json-web-token-32#section-4.1.3). + // that are allowed to access. A JWT containing any of these audiences will + // be accepted. When this setting is absent, only JWTs with audience + // "https://[Service_name][google.api.Service.name]/[API_name][google.protobuf.Api.name]" + // will be accepted. For example, if no audiences are in the setting, + // LibraryService API will only accept JWTs with the following audience + // "https://library-example.googleapis.com/google.example.library.v1.LibraryService". + // + // Example: + // + // audiences: bookstore_android.apps.googleusercontent.com, + // bookstore_web.apps.googleusercontent.com + Audiences string `protobuf:"bytes,4,opt,name=audiences,proto3" json:"audiences,omitempty"` + // Redirect URL if JWT token is required but not present or is expired. + // Implement authorizationUrl of securityDefinitions in OpenAPI spec. + AuthorizationUrl string `protobuf:"bytes,5,opt,name=authorization_url,json=authorizationUrl,proto3" json:"authorization_url,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *AuthProvider) Reset() { *m = AuthProvider{} } +func (m *AuthProvider) String() string { return proto.CompactTextString(m) } +func (*AuthProvider) ProtoMessage() {} +func (*AuthProvider) Descriptor() ([]byte, []int) { + return fileDescriptor_auth_20a218c05ef5a30e, []int{2} +} +func (m *AuthProvider) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_AuthProvider.Unmarshal(m, b) +} +func (m *AuthProvider) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_AuthProvider.Marshal(b, m, deterministic) +} +func (dst *AuthProvider) XXX_Merge(src proto.Message) { + xxx_messageInfo_AuthProvider.Merge(dst, src) +} +func (m *AuthProvider) XXX_Size() int { + return xxx_messageInfo_AuthProvider.Size(m) +} +func (m *AuthProvider) XXX_DiscardUnknown() { + xxx_messageInfo_AuthProvider.DiscardUnknown(m) +} + +var xxx_messageInfo_AuthProvider proto.InternalMessageInfo + +func (m *AuthProvider) GetId() string { + if m != nil { + return m.Id + } + return "" +} + +func (m *AuthProvider) GetIssuer() string { + if m != nil { + return m.Issuer + } + return "" +} + +func (m *AuthProvider) GetJwksUri() string { + if m != nil { + return m.JwksUri + } + return "" +} + +func (m *AuthProvider) GetAudiences() string { + if m != nil { + return m.Audiences + } + return "" +} + +func (m *AuthProvider) GetAuthorizationUrl() string { + if m != nil { + return m.AuthorizationUrl + } + return "" +} + +// OAuth scopes are a way to define data and permissions on data. For example, +// there are scopes defined for "Read-only access to Google Calendar" and +// "Access to Cloud Platform". Users can consent to a scope for an application, +// giving it permission to access that data on their behalf. +// +// OAuth scope specifications should be fairly coarse grained; a user will need +// to see and understand the text description of what your scope means. +// +// In most cases: use one or at most two OAuth scopes for an entire family of +// products. If your product has multiple APIs, you should probably be sharing +// the OAuth scope across all of those APIs. +// +// When you need finer grained OAuth consent screens: talk with your product +// management about how developers will use them in practice. +// +// Please note that even though each of the canonical scopes is enough for a +// request to be accepted and passed to the backend, a request can still fail +// due to the backend requiring additional scopes or permissions. +type OAuthRequirements struct { + // The list of publicly documented OAuth scopes that are allowed access. An + // OAuth token containing any of these scopes will be accepted. + // + // Example: + // + // canonical_scopes: https://www.googleapis.com/auth/calendar, + // https://www.googleapis.com/auth/calendar.read + CanonicalScopes string `protobuf:"bytes,1,opt,name=canonical_scopes,json=canonicalScopes,proto3" json:"canonical_scopes,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *OAuthRequirements) Reset() { *m = OAuthRequirements{} } +func (m *OAuthRequirements) String() string { return proto.CompactTextString(m) } +func (*OAuthRequirements) ProtoMessage() {} +func (*OAuthRequirements) Descriptor() ([]byte, []int) { + return fileDescriptor_auth_20a218c05ef5a30e, []int{3} +} +func (m *OAuthRequirements) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_OAuthRequirements.Unmarshal(m, b) +} +func (m *OAuthRequirements) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_OAuthRequirements.Marshal(b, m, deterministic) +} +func (dst *OAuthRequirements) XXX_Merge(src proto.Message) { + xxx_messageInfo_OAuthRequirements.Merge(dst, src) +} +func (m *OAuthRequirements) XXX_Size() int { + return xxx_messageInfo_OAuthRequirements.Size(m) +} +func (m *OAuthRequirements) XXX_DiscardUnknown() { + xxx_messageInfo_OAuthRequirements.DiscardUnknown(m) +} + +var xxx_messageInfo_OAuthRequirements proto.InternalMessageInfo + +func (m *OAuthRequirements) GetCanonicalScopes() string { + if m != nil { + return m.CanonicalScopes + } + return "" +} + +// User-defined authentication requirements, including support for +// [JSON Web Token +// (JWT)](https://tools.ietf.org/html/draft-ietf-oauth-json-web-token-32). +type AuthRequirement struct { + // [id][google.api.AuthProvider.id] from authentication provider. + // + // Example: + // + // provider_id: bookstore_auth + ProviderId string `protobuf:"bytes,1,opt,name=provider_id,json=providerId,proto3" json:"provider_id,omitempty"` + // NOTE: This will be deprecated soon, once AuthProvider.audiences is + // implemented and accepted in all the runtime components. + // + // The list of JWT + // [audiences](https://tools.ietf.org/html/draft-ietf-oauth-json-web-token-32#section-4.1.3). + // that are allowed to access. A JWT containing any of these audiences will + // be accepted. When this setting is absent, only JWTs with audience + // "https://[Service_name][google.api.Service.name]/[API_name][google.protobuf.Api.name]" + // will be accepted. For example, if no audiences are in the setting, + // LibraryService API will only accept JWTs with the following audience + // "https://library-example.googleapis.com/google.example.library.v1.LibraryService". + // + // Example: + // + // audiences: bookstore_android.apps.googleusercontent.com, + // bookstore_web.apps.googleusercontent.com + Audiences string `protobuf:"bytes,2,opt,name=audiences,proto3" json:"audiences,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *AuthRequirement) Reset() { *m = AuthRequirement{} } +func (m *AuthRequirement) String() string { return proto.CompactTextString(m) } +func (*AuthRequirement) ProtoMessage() {} +func (*AuthRequirement) Descriptor() ([]byte, []int) { + return fileDescriptor_auth_20a218c05ef5a30e, []int{4} +} +func (m *AuthRequirement) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_AuthRequirement.Unmarshal(m, b) +} +func (m *AuthRequirement) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_AuthRequirement.Marshal(b, m, deterministic) +} +func (dst *AuthRequirement) XXX_Merge(src proto.Message) { + xxx_messageInfo_AuthRequirement.Merge(dst, src) +} +func (m *AuthRequirement) XXX_Size() int { + return xxx_messageInfo_AuthRequirement.Size(m) +} +func (m *AuthRequirement) XXX_DiscardUnknown() { + xxx_messageInfo_AuthRequirement.DiscardUnknown(m) +} + +var xxx_messageInfo_AuthRequirement proto.InternalMessageInfo + +func (m *AuthRequirement) GetProviderId() string { + if m != nil { + return m.ProviderId + } + return "" +} + +func (m *AuthRequirement) GetAudiences() string { + if m != nil { + return m.Audiences + } + return "" +} + +func init() { + proto.RegisterType((*Authentication)(nil), "google.api.Authentication") + proto.RegisterType((*AuthenticationRule)(nil), "google.api.AuthenticationRule") + proto.RegisterType((*AuthProvider)(nil), "google.api.AuthProvider") + proto.RegisterType((*OAuthRequirements)(nil), "google.api.OAuthRequirements") + proto.RegisterType((*AuthRequirement)(nil), "google.api.AuthRequirement") +} + +func init() { proto.RegisterFile("google/api/auth.proto", fileDescriptor_auth_20a218c05ef5a30e) } + +var fileDescriptor_auth_20a218c05ef5a30e = []byte{ + // 452 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x64, 0x52, 0x4d, 0x6b, 0xdb, 0x40, + 0x10, 0x45, 0x72, 0x9c, 0x58, 0xe3, 0xe0, 0x24, 0x0b, 0x0d, 0xea, 0xb7, 0xd1, 0xc9, 0xa5, 0x20, + 0x43, 0x52, 0x4a, 0xa1, 0xd0, 0x92, 0x94, 0x52, 0x72, 0xaa, 0xd9, 0x12, 0x0a, 0xbd, 0x88, 0xed, + 0x6a, 0x2b, 0x6f, 0xb3, 0xd1, 0xa8, 0xfb, 0x11, 0x43, 0x0f, 0xfd, 0x25, 0x3d, 0xf5, 0x97, 0xf5, + 0xa7, 0x04, 0xad, 0x14, 0x5b, 0x72, 0x8e, 0x33, 0xef, 0xbd, 0x99, 0x79, 0x6f, 0x17, 0x1e, 0x14, + 0x88, 0x85, 0x12, 0x73, 0x56, 0xc9, 0x39, 0x73, 0x76, 0x99, 0x56, 0x1a, 0x2d, 0x12, 0x68, 0xda, + 0x29, 0xab, 0x64, 0xf2, 0x07, 0x26, 0x67, 0xce, 0x2e, 0x45, 0x69, 0x25, 0x67, 0x56, 0x62, 0x49, + 0x5e, 0xc1, 0x50, 0x3b, 0x25, 0x4c, 0x3c, 0x98, 0x0e, 0x66, 0xe3, 0x93, 0x67, 0xe9, 0x86, 0x9d, + 0xf6, 0xa9, 0xd4, 0x29, 0x41, 0x1b, 0x32, 0x79, 0x0d, 0x51, 0xa5, 0xf1, 0x46, 0xe6, 0x42, 0x9b, + 0x78, 0xc7, 0x2b, 0xe3, 0x6d, 0xe5, 0xa2, 0x25, 0xd0, 0x0d, 0x35, 0xf9, 0x1f, 0x00, 0xb9, 0x3f, + 0x95, 0x3c, 0x82, 0x91, 0x11, 0x4a, 0x70, 0x8b, 0x3a, 0x0e, 0xa6, 0xc1, 0x2c, 0xa2, 0xeb, 0x9a, + 0x9c, 0xc2, 0x10, 0x6b, 0x37, 0x71, 0x38, 0x0d, 0x66, 0xe3, 0x93, 0xa7, 0xdd, 0x35, 0x9f, 0xeb, + 0x59, 0x54, 0xfc, 0x72, 0x52, 0x8b, 0x6b, 0x51, 0x5a, 0x43, 0x1b, 0x2e, 0x79, 0x03, 0x31, 0x53, + 0x0a, 0x57, 0xd9, 0x4a, 0xda, 0x25, 0x3a, 0x9b, 0x71, 0x2d, 0xf2, 0x7a, 0x29, 0x53, 0xf1, 0x70, + 0x1a, 0xcc, 0x46, 0xf4, 0xd8, 0xe3, 0x5f, 0x1b, 0xf8, 0xc3, 0x1a, 0x25, 0xef, 0x61, 0x5f, 0x77, + 0x06, 0xc6, 0x7b, 0xde, 0xdc, 0xe3, 0x6d, 0x73, 0x9d, 0xa5, 0xb4, 0x27, 0x48, 0xfe, 0x06, 0xb0, + 0xdf, 0xb5, 0x4f, 0x26, 0x10, 0xca, 0xbc, 0xb5, 0x15, 0xca, 0x9c, 0x1c, 0xc3, 0xae, 0x34, 0xc6, + 0x09, 0xed, 0x1d, 0x45, 0xb4, 0xad, 0xc8, 0x43, 0x18, 0xfd, 0x5c, 0x5d, 0x99, 0xcc, 0x69, 0x19, + 0x0f, 0x3c, 0xb2, 0x57, 0xd7, 0x97, 0x5a, 0x92, 0x27, 0x10, 0x31, 0x97, 0x4b, 0x51, 0x72, 0x51, + 0xc7, 0x5d, 0x63, 0x9b, 0x06, 0x79, 0x09, 0x47, 0xb5, 0x69, 0xd4, 0xf2, 0xb7, 0x8f, 0x34, 0x73, + 0xba, 0x71, 0x19, 0xd1, 0xc3, 0x1e, 0x70, 0xa9, 0x55, 0xf2, 0x0e, 0x8e, 0xee, 0xa5, 0x46, 0x5e, + 0xc0, 0x21, 0x67, 0x25, 0x96, 0x92, 0x33, 0x95, 0x19, 0x8e, 0x95, 0x30, 0xed, 0xc1, 0x07, 0xeb, + 0xfe, 0x17, 0xdf, 0x4e, 0x16, 0x70, 0xb0, 0x25, 0x27, 0xcf, 0x61, 0x7c, 0xf7, 0xc2, 0xd9, 0xda, + 0x29, 0xdc, 0xb5, 0x2e, 0xf2, 0xfe, 0xf9, 0xe1, 0xd6, 0xf9, 0xe7, 0x57, 0x30, 0xe1, 0x78, 0xdd, + 0x09, 0xf8, 0x3c, 0x6a, 0xf3, 0xb3, 0xb8, 0x08, 0xbe, 0x7d, 0x6c, 0x81, 0x02, 0x15, 0x2b, 0x8b, + 0x14, 0x75, 0x31, 0x2f, 0x44, 0xe9, 0xbf, 0xf6, 0xbc, 0x81, 0x58, 0x25, 0x8d, 0xff, 0xf4, 0x46, + 0xe8, 0x1b, 0xc9, 0x05, 0xc7, 0xf2, 0x87, 0x2c, 0xde, 0xf6, 0xaa, 0x7f, 0xe1, 0xce, 0xa7, 0xb3, + 0xc5, 0xc5, 0xf7, 0x5d, 0x2f, 0x3c, 0xbd, 0x0d, 0x00, 0x00, 0xff, 0xff, 0x54, 0x91, 0x94, 0x96, + 0x2c, 0x03, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/api/serviceconfig/backend.pb.go b/vendor/google.golang.org/genproto/googleapis/api/serviceconfig/backend.pb.go new file mode 100644 index 0000000..1d24457 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/api/serviceconfig/backend.pb.go @@ -0,0 +1,350 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/api/backend.proto + +package serviceconfig // import "google.golang.org/genproto/googleapis/api/serviceconfig" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Path Translation specifies how to combine the backend address with the +// request path in order to produce the appropriate forwarding URL for the +// request. +// +// Path Translation is applicable only to HTTP-based backends. Backends which +// do not accept requests over HTTP/HTTPS should leave `path_translation` +// unspecified. +type BackendRule_PathTranslation int32 + +const ( + BackendRule_PATH_TRANSLATION_UNSPECIFIED BackendRule_PathTranslation = 0 + // Use the backend address as-is, with no modification to the path. If the + // URL pattern contains variables, the variable names and values will be + // appended to the query string. If a query string parameter and a URL + // pattern variable have the same name, this may result in duplicate keys in + // the query string. + // + // # Examples + // + // Given the following operation config: + // + // Method path: /api/company/{cid}/user/{uid} + // Backend address: https://example.cloudfunctions.net/getUser + // + // Requests to the following request paths will call the backend at the + // translated path: + // + // Request path: /api/company/widgetworks/user/johndoe + // Translated: + // https://example.cloudfunctions.net/getUser?cid=widgetworks&uid=johndoe + // + // Request path: /api/company/widgetworks/user/johndoe?timezone=EST + // Translated: + // https://example.cloudfunctions.net/getUser?timezone=EST&cid=widgetworks&uid=johndoe + BackendRule_CONSTANT_ADDRESS BackendRule_PathTranslation = 1 + // The request path will be appended to the backend address. + // + // # Examples + // + // Given the following operation config: + // + // Method path: /api/company/{cid}/user/{uid} + // Backend address: https://example.appspot.com + // + // Requests to the following request paths will call the backend at the + // translated path: + // + // Request path: /api/company/widgetworks/user/johndoe + // Translated: + // https://example.appspot.com/api/company/widgetworks/user/johndoe + // + // Request path: /api/company/widgetworks/user/johndoe?timezone=EST + // Translated: + // https://example.appspot.com/api/company/widgetworks/user/johndoe?timezone=EST + BackendRule_APPEND_PATH_TO_ADDRESS BackendRule_PathTranslation = 2 +) + +var BackendRule_PathTranslation_name = map[int32]string{ + 0: "PATH_TRANSLATION_UNSPECIFIED", + 1: "CONSTANT_ADDRESS", + 2: "APPEND_PATH_TO_ADDRESS", +} +var BackendRule_PathTranslation_value = map[string]int32{ + "PATH_TRANSLATION_UNSPECIFIED": 0, + "CONSTANT_ADDRESS": 1, + "APPEND_PATH_TO_ADDRESS": 2, +} + +func (x BackendRule_PathTranslation) String() string { + return proto.EnumName(BackendRule_PathTranslation_name, int32(x)) +} +func (BackendRule_PathTranslation) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_backend_151dd2805952955b, []int{1, 0} +} + +// `Backend` defines the backend configuration for a service. +type Backend struct { + // A list of API backend rules that apply to individual API methods. + // + // **NOTE:** All service configuration rules follow "last one wins" order. + Rules []*BackendRule `protobuf:"bytes,1,rep,name=rules,proto3" json:"rules,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Backend) Reset() { *m = Backend{} } +func (m *Backend) String() string { return proto.CompactTextString(m) } +func (*Backend) ProtoMessage() {} +func (*Backend) Descriptor() ([]byte, []int) { + return fileDescriptor_backend_151dd2805952955b, []int{0} +} +func (m *Backend) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Backend.Unmarshal(m, b) +} +func (m *Backend) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Backend.Marshal(b, m, deterministic) +} +func (dst *Backend) XXX_Merge(src proto.Message) { + xxx_messageInfo_Backend.Merge(dst, src) +} +func (m *Backend) XXX_Size() int { + return xxx_messageInfo_Backend.Size(m) +} +func (m *Backend) XXX_DiscardUnknown() { + xxx_messageInfo_Backend.DiscardUnknown(m) +} + +var xxx_messageInfo_Backend proto.InternalMessageInfo + +func (m *Backend) GetRules() []*BackendRule { + if m != nil { + return m.Rules + } + return nil +} + +// A backend rule provides configuration for an individual API element. +type BackendRule struct { + // Selects the methods to which this rule applies. + // + // Refer to [selector][google.api.DocumentationRule.selector] for syntax details. + Selector string `protobuf:"bytes,1,opt,name=selector,proto3" json:"selector,omitempty"` + // The address of the API backend. + Address string `protobuf:"bytes,2,opt,name=address,proto3" json:"address,omitempty"` + // The number of seconds to wait for a response from a request. The default + // deadline for gRPC is infinite (no deadline) and HTTP requests is 5 seconds. + Deadline float64 `protobuf:"fixed64,3,opt,name=deadline,proto3" json:"deadline,omitempty"` + // Minimum deadline in seconds needed for this method. Calls having deadline + // value lower than this will be rejected. + MinDeadline float64 `protobuf:"fixed64,4,opt,name=min_deadline,json=minDeadline,proto3" json:"min_deadline,omitempty"` + // The number of seconds to wait for the completion of a long running + // operation. The default is no deadline. + OperationDeadline float64 `protobuf:"fixed64,5,opt,name=operation_deadline,json=operationDeadline,proto3" json:"operation_deadline,omitempty"` + PathTranslation BackendRule_PathTranslation `protobuf:"varint,6,opt,name=path_translation,json=pathTranslation,proto3,enum=google.api.BackendRule_PathTranslation" json:"path_translation,omitempty"` + // Authentication settings used by the backend. + // + // These are typically used to provide service management functionality to + // a backend served on a publicly-routable URL. The `authentication` + // details should match the authentication behavior used by the backend. + // + // For example, specifying `jwt_audience` implies that the backend expects + // authentication via a JWT. + // + // Types that are valid to be assigned to Authentication: + // *BackendRule_JwtAudience + Authentication isBackendRule_Authentication `protobuf_oneof:"authentication"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *BackendRule) Reset() { *m = BackendRule{} } +func (m *BackendRule) String() string { return proto.CompactTextString(m) } +func (*BackendRule) ProtoMessage() {} +func (*BackendRule) Descriptor() ([]byte, []int) { + return fileDescriptor_backend_151dd2805952955b, []int{1} +} +func (m *BackendRule) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_BackendRule.Unmarshal(m, b) +} +func (m *BackendRule) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_BackendRule.Marshal(b, m, deterministic) +} +func (dst *BackendRule) XXX_Merge(src proto.Message) { + xxx_messageInfo_BackendRule.Merge(dst, src) +} +func (m *BackendRule) XXX_Size() int { + return xxx_messageInfo_BackendRule.Size(m) +} +func (m *BackendRule) XXX_DiscardUnknown() { + xxx_messageInfo_BackendRule.DiscardUnknown(m) +} + +var xxx_messageInfo_BackendRule proto.InternalMessageInfo + +func (m *BackendRule) GetSelector() string { + if m != nil { + return m.Selector + } + return "" +} + +func (m *BackendRule) GetAddress() string { + if m != nil { + return m.Address + } + return "" +} + +func (m *BackendRule) GetDeadline() float64 { + if m != nil { + return m.Deadline + } + return 0 +} + +func (m *BackendRule) GetMinDeadline() float64 { + if m != nil { + return m.MinDeadline + } + return 0 +} + +func (m *BackendRule) GetOperationDeadline() float64 { + if m != nil { + return m.OperationDeadline + } + return 0 +} + +func (m *BackendRule) GetPathTranslation() BackendRule_PathTranslation { + if m != nil { + return m.PathTranslation + } + return BackendRule_PATH_TRANSLATION_UNSPECIFIED +} + +type isBackendRule_Authentication interface { + isBackendRule_Authentication() +} + +type BackendRule_JwtAudience struct { + JwtAudience string `protobuf:"bytes,7,opt,name=jwt_audience,json=jwtAudience,proto3,oneof"` +} + +func (*BackendRule_JwtAudience) isBackendRule_Authentication() {} + +func (m *BackendRule) GetAuthentication() isBackendRule_Authentication { + if m != nil { + return m.Authentication + } + return nil +} + +func (m *BackendRule) GetJwtAudience() string { + if x, ok := m.GetAuthentication().(*BackendRule_JwtAudience); ok { + return x.JwtAudience + } + return "" +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*BackendRule) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _BackendRule_OneofMarshaler, _BackendRule_OneofUnmarshaler, _BackendRule_OneofSizer, []interface{}{ + (*BackendRule_JwtAudience)(nil), + } +} + +func _BackendRule_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*BackendRule) + // authentication + switch x := m.Authentication.(type) { + case *BackendRule_JwtAudience: + b.EncodeVarint(7<<3 | proto.WireBytes) + b.EncodeStringBytes(x.JwtAudience) + case nil: + default: + return fmt.Errorf("BackendRule.Authentication has unexpected type %T", x) + } + return nil +} + +func _BackendRule_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*BackendRule) + switch tag { + case 7: // authentication.jwt_audience + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeStringBytes() + m.Authentication = &BackendRule_JwtAudience{x} + return true, err + default: + return false, nil + } +} + +func _BackendRule_OneofSizer(msg proto.Message) (n int) { + m := msg.(*BackendRule) + // authentication + switch x := m.Authentication.(type) { + case *BackendRule_JwtAudience: + n += 1 // tag and wire + n += proto.SizeVarint(uint64(len(x.JwtAudience))) + n += len(x.JwtAudience) + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +func init() { + proto.RegisterType((*Backend)(nil), "google.api.Backend") + proto.RegisterType((*BackendRule)(nil), "google.api.BackendRule") + proto.RegisterEnum("google.api.BackendRule_PathTranslation", BackendRule_PathTranslation_name, BackendRule_PathTranslation_value) +} + +func init() { proto.RegisterFile("google/api/backend.proto", fileDescriptor_backend_151dd2805952955b) } + +var fileDescriptor_backend_151dd2805952955b = []byte{ + // 408 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x74, 0x92, 0x41, 0x6f, 0xd3, 0x30, + 0x14, 0xc7, 0x97, 0x76, 0x5b, 0xe1, 0xb5, 0xea, 0x82, 0x85, 0x20, 0x9a, 0x38, 0x84, 0x72, 0xa0, + 0x97, 0xa5, 0xd2, 0xb8, 0x20, 0x71, 0x4a, 0x97, 0xc0, 0x2a, 0xa1, 0x34, 0x72, 0xc2, 0x85, 0x4b, + 0xe4, 0x25, 0x8f, 0xd4, 0x23, 0xb5, 0x23, 0xc7, 0x65, 0xdf, 0x07, 0xbe, 0x28, 0xaa, 0x93, 0x65, + 0x2d, 0x12, 0xc7, 0xff, 0xfb, 0xfd, 0x9e, 0xad, 0xbf, 0xf4, 0xc0, 0x29, 0xa5, 0x2c, 0x2b, 0x5c, + 0xb0, 0x9a, 0x2f, 0xee, 0x58, 0xfe, 0x13, 0x45, 0xe1, 0xd5, 0x4a, 0x6a, 0x49, 0xa0, 0x25, 0x1e, + 0xab, 0xf9, 0xec, 0x23, 0x8c, 0x96, 0x2d, 0x24, 0x57, 0x70, 0xa6, 0x76, 0x15, 0x36, 0x8e, 0xe5, + 0x0e, 0xe7, 0xe3, 0xeb, 0xd7, 0xde, 0x93, 0xe6, 0x75, 0x0e, 0xdd, 0x55, 0x48, 0x5b, 0x6b, 0xf6, + 0x67, 0x08, 0xe3, 0x83, 0x31, 0xb9, 0x84, 0x67, 0x0d, 0x56, 0x98, 0x6b, 0xa9, 0x1c, 0xcb, 0xb5, + 0xe6, 0xcf, 0x69, 0x9f, 0x89, 0x03, 0x23, 0x56, 0x14, 0x0a, 0x9b, 0xc6, 0x19, 0x18, 0xf4, 0x18, + 0xf7, 0x5b, 0x05, 0xb2, 0xa2, 0xe2, 0x02, 0x9d, 0xa1, 0x6b, 0xcd, 0x2d, 0xda, 0x67, 0xf2, 0x16, + 0x26, 0x5b, 0x2e, 0xb2, 0x9e, 0x9f, 0x1a, 0x3e, 0xde, 0x72, 0x11, 0x3c, 0x2a, 0x57, 0x40, 0x64, + 0x8d, 0x8a, 0x69, 0x2e, 0x0f, 0xc4, 0x33, 0x23, 0xbe, 0xe8, 0x49, 0xaf, 0x53, 0xb0, 0x6b, 0xa6, + 0x37, 0x99, 0x56, 0x4c, 0x34, 0x95, 0x61, 0xce, 0xb9, 0x6b, 0xcd, 0xa7, 0xd7, 0xef, 0xff, 0xd3, + 0xd6, 0x8b, 0x99, 0xde, 0xa4, 0x4f, 0x3a, 0xbd, 0xa8, 0x8f, 0x07, 0xe4, 0x1d, 0x4c, 0xee, 0x1f, + 0x74, 0xc6, 0x76, 0x05, 0x47, 0x91, 0xa3, 0x33, 0xda, 0x17, 0xbc, 0x3d, 0xa1, 0xe3, 0xfb, 0x07, + 0xed, 0x77, 0xc3, 0x19, 0xc2, 0xc5, 0x3f, 0x0f, 0x11, 0x17, 0xde, 0xc4, 0x7e, 0x7a, 0x9b, 0xa5, + 0xd4, 0x8f, 0x92, 0xaf, 0x7e, 0xba, 0x5a, 0x47, 0xd9, 0xb7, 0x28, 0x89, 0xc3, 0x9b, 0xd5, 0xe7, + 0x55, 0x18, 0xd8, 0x27, 0xe4, 0x25, 0xd8, 0x37, 0xeb, 0x28, 0x49, 0xfd, 0x28, 0xcd, 0xfc, 0x20, + 0xa0, 0x61, 0x92, 0xd8, 0x16, 0xb9, 0x84, 0x57, 0x7e, 0x1c, 0x87, 0x51, 0x90, 0xb5, 0xeb, 0xeb, + 0x9e, 0x0d, 0x96, 0x36, 0x4c, 0xd9, 0x4e, 0x6f, 0x50, 0x68, 0x9e, 0x9b, 0x5f, 0x96, 0x02, 0xa6, + 0xb9, 0xdc, 0x1e, 0x94, 0x5b, 0x4e, 0xba, 0x76, 0xf1, 0xfe, 0x16, 0x62, 0xeb, 0x7b, 0xd8, 0xb1, + 0x52, 0x56, 0x4c, 0x94, 0x9e, 0x54, 0xe5, 0xa2, 0x44, 0x61, 0x2e, 0x65, 0xd1, 0x22, 0x56, 0xf3, + 0xc6, 0x9c, 0x51, 0x83, 0xea, 0x17, 0xcf, 0x31, 0x97, 0xe2, 0x07, 0x2f, 0x3f, 0x1d, 0xa5, 0xdf, + 0x83, 0xd3, 0x2f, 0x7e, 0xbc, 0xba, 0x3b, 0x37, 0x8b, 0x1f, 0xfe, 0x06, 0x00, 0x00, 0xff, 0xff, + 0x2b, 0x64, 0x62, 0xc8, 0x7e, 0x02, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/api/serviceconfig/billing.pb.go b/vendor/google.golang.org/genproto/googleapis/api/serviceconfig/billing.pb.go new file mode 100644 index 0000000..82b0f68 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/api/serviceconfig/billing.pb.go @@ -0,0 +1,162 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/api/billing.proto + +package serviceconfig // import "google.golang.org/genproto/googleapis/api/serviceconfig" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "google.golang.org/genproto/googleapis/api/metric" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Billing related configuration of the service. +// +// The following example shows how to configure monitored resources and metrics +// for billing: +// +// monitored_resources: +// - type: library.googleapis.com/branch +// labels: +// - key: /city +// description: The city where the library branch is located in. +// - key: /name +// description: The name of the branch. +// metrics: +// - name: library.googleapis.com/book/borrowed_count +// metric_kind: DELTA +// value_type: INT64 +// billing: +// consumer_destinations: +// - monitored_resource: library.googleapis.com/branch +// metrics: +// - library.googleapis.com/book/borrowed_count +type Billing struct { + // Billing configurations for sending metrics to the consumer project. + // There can be multiple consumer destinations per service, each one must have + // a different monitored resource type. A metric can be used in at most + // one consumer destination. + ConsumerDestinations []*Billing_BillingDestination `protobuf:"bytes,8,rep,name=consumer_destinations,json=consumerDestinations,proto3" json:"consumer_destinations,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Billing) Reset() { *m = Billing{} } +func (m *Billing) String() string { return proto.CompactTextString(m) } +func (*Billing) ProtoMessage() {} +func (*Billing) Descriptor() ([]byte, []int) { + return fileDescriptor_billing_7a62f78f020eadae, []int{0} +} +func (m *Billing) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Billing.Unmarshal(m, b) +} +func (m *Billing) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Billing.Marshal(b, m, deterministic) +} +func (dst *Billing) XXX_Merge(src proto.Message) { + xxx_messageInfo_Billing.Merge(dst, src) +} +func (m *Billing) XXX_Size() int { + return xxx_messageInfo_Billing.Size(m) +} +func (m *Billing) XXX_DiscardUnknown() { + xxx_messageInfo_Billing.DiscardUnknown(m) +} + +var xxx_messageInfo_Billing proto.InternalMessageInfo + +func (m *Billing) GetConsumerDestinations() []*Billing_BillingDestination { + if m != nil { + return m.ConsumerDestinations + } + return nil +} + +// Configuration of a specific billing destination (Currently only support +// bill against consumer project). +type Billing_BillingDestination struct { + // The monitored resource type. The type must be defined in + // [Service.monitored_resources][google.api.Service.monitored_resources] section. + MonitoredResource string `protobuf:"bytes,1,opt,name=monitored_resource,json=monitoredResource,proto3" json:"monitored_resource,omitempty"` + // Names of the metrics to report to this billing destination. + // Each name must be defined in [Service.metrics][google.api.Service.metrics] section. + Metrics []string `protobuf:"bytes,2,rep,name=metrics,proto3" json:"metrics,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Billing_BillingDestination) Reset() { *m = Billing_BillingDestination{} } +func (m *Billing_BillingDestination) String() string { return proto.CompactTextString(m) } +func (*Billing_BillingDestination) ProtoMessage() {} +func (*Billing_BillingDestination) Descriptor() ([]byte, []int) { + return fileDescriptor_billing_7a62f78f020eadae, []int{0, 0} +} +func (m *Billing_BillingDestination) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Billing_BillingDestination.Unmarshal(m, b) +} +func (m *Billing_BillingDestination) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Billing_BillingDestination.Marshal(b, m, deterministic) +} +func (dst *Billing_BillingDestination) XXX_Merge(src proto.Message) { + xxx_messageInfo_Billing_BillingDestination.Merge(dst, src) +} +func (m *Billing_BillingDestination) XXX_Size() int { + return xxx_messageInfo_Billing_BillingDestination.Size(m) +} +func (m *Billing_BillingDestination) XXX_DiscardUnknown() { + xxx_messageInfo_Billing_BillingDestination.DiscardUnknown(m) +} + +var xxx_messageInfo_Billing_BillingDestination proto.InternalMessageInfo + +func (m *Billing_BillingDestination) GetMonitoredResource() string { + if m != nil { + return m.MonitoredResource + } + return "" +} + +func (m *Billing_BillingDestination) GetMetrics() []string { + if m != nil { + return m.Metrics + } + return nil +} + +func init() { + proto.RegisterType((*Billing)(nil), "google.api.Billing") + proto.RegisterType((*Billing_BillingDestination)(nil), "google.api.Billing.BillingDestination") +} + +func init() { proto.RegisterFile("google/api/billing.proto", fileDescriptor_billing_7a62f78f020eadae) } + +var fileDescriptor_billing_7a62f78f020eadae = []byte{ + // 255 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x64, 0x90, 0xc1, 0x4a, 0xc4, 0x30, + 0x10, 0x86, 0xe9, 0xae, 0xb8, 0x6e, 0x14, 0xc1, 0xa0, 0x18, 0xf6, 0x54, 0x3c, 0x48, 0x2f, 0xa6, + 0xa0, 0x47, 0x4f, 0x16, 0x45, 0xbc, 0x95, 0x1e, 0x15, 0x59, 0xb2, 0xd9, 0x31, 0x0c, 0xb4, 0x99, + 0x92, 0x64, 0x7d, 0x20, 0xdf, 0xc5, 0xf7, 0x12, 0x9b, 0xd6, 0x56, 0xf6, 0x14, 0x26, 0xdf, 0xf7, + 0x4f, 0x26, 0xc3, 0x84, 0x21, 0x32, 0x35, 0xe4, 0xaa, 0xc5, 0x7c, 0x83, 0x75, 0x8d, 0xd6, 0xc8, + 0xd6, 0x51, 0x20, 0xce, 0x22, 0x91, 0xaa, 0xc5, 0xd5, 0xe5, 0xc4, 0x6a, 0x20, 0x38, 0xd4, 0x51, + 0xba, 0xfa, 0x4e, 0xd8, 0xa2, 0x88, 0x31, 0xfe, 0xc6, 0x2e, 0x34, 0x59, 0xbf, 0x6b, 0xc0, 0xad, + 0xb7, 0xe0, 0x03, 0x5a, 0x15, 0x90, 0xac, 0x17, 0x47, 0xe9, 0x3c, 0x3b, 0xbe, 0xbd, 0x96, 0x63, + 0x43, 0xd9, 0x67, 0x86, 0xf3, 0x71, 0xd4, 0xab, 0xf3, 0xa1, 0xc9, 0xe4, 0xd2, 0xaf, 0xde, 0x19, + 0xdf, 0x77, 0xf9, 0x0d, 0xe3, 0x0d, 0x59, 0x0c, 0xe4, 0x60, 0xbb, 0x76, 0xe0, 0x69, 0xe7, 0x34, + 0x88, 0x24, 0x4d, 0xb2, 0x65, 0x75, 0xf6, 0x47, 0xaa, 0x1e, 0x70, 0xc1, 0x16, 0x71, 0x7a, 0x2f, + 0x66, 0xe9, 0x3c, 0x5b, 0x56, 0x43, 0x59, 0x58, 0x76, 0xaa, 0xa9, 0x99, 0x4c, 0x58, 0x9c, 0xf4, + 0xcf, 0x95, 0xbf, 0xff, 0x2c, 0x93, 0xd7, 0xa7, 0x9e, 0x19, 0xaa, 0x95, 0x35, 0x92, 0x9c, 0xc9, + 0x0d, 0xd8, 0x6e, 0x0b, 0x79, 0x44, 0xaa, 0x45, 0xdf, 0x6d, 0xc8, 0x83, 0xfb, 0x44, 0x0d, 0x9a, + 0xec, 0x07, 0x9a, 0xfb, 0x7f, 0xd5, 0xd7, 0xec, 0xe0, 0xf9, 0xa1, 0x7c, 0xd9, 0x1c, 0x76, 0xc1, + 0xbb, 0x9f, 0x00, 0x00, 0x00, 0xff, 0xff, 0x27, 0xa0, 0xf0, 0x6b, 0x7f, 0x01, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/api/serviceconfig/consumer.pb.go b/vendor/google.golang.org/genproto/googleapis/api/serviceconfig/consumer.pb.go new file mode 100644 index 0000000..9d8ddeb --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/api/serviceconfig/consumer.pb.go @@ -0,0 +1,210 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/api/consumer.proto + +package serviceconfig // import "google.golang.org/genproto/googleapis/api/serviceconfig" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Supported data type of the property values +type Property_PropertyType int32 + +const ( + // The type is unspecified, and will result in an error. + Property_UNSPECIFIED Property_PropertyType = 0 + // The type is `int64`. + Property_INT64 Property_PropertyType = 1 + // The type is `bool`. + Property_BOOL Property_PropertyType = 2 + // The type is `string`. + Property_STRING Property_PropertyType = 3 + // The type is 'double'. + Property_DOUBLE Property_PropertyType = 4 +) + +var Property_PropertyType_name = map[int32]string{ + 0: "UNSPECIFIED", + 1: "INT64", + 2: "BOOL", + 3: "STRING", + 4: "DOUBLE", +} +var Property_PropertyType_value = map[string]int32{ + "UNSPECIFIED": 0, + "INT64": 1, + "BOOL": 2, + "STRING": 3, + "DOUBLE": 4, +} + +func (x Property_PropertyType) String() string { + return proto.EnumName(Property_PropertyType_name, int32(x)) +} +func (Property_PropertyType) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_consumer_bc35b863b20645b4, []int{1, 0} +} + +// A descriptor for defining project properties for a service. One service may +// have many consumer projects, and the service may want to behave differently +// depending on some properties on the project. For example, a project may be +// associated with a school, or a business, or a government agency, a business +// type property on the project may affect how a service responds to the client. +// This descriptor defines which properties are allowed to be set on a project. +// +// Example: +// +// project_properties: +// properties: +// - name: NO_WATERMARK +// type: BOOL +// description: Allows usage of the API without watermarks. +// - name: EXTENDED_TILE_CACHE_PERIOD +// type: INT64 +type ProjectProperties struct { + // List of per consumer project-specific properties. + Properties []*Property `protobuf:"bytes,1,rep,name=properties,proto3" json:"properties,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ProjectProperties) Reset() { *m = ProjectProperties{} } +func (m *ProjectProperties) String() string { return proto.CompactTextString(m) } +func (*ProjectProperties) ProtoMessage() {} +func (*ProjectProperties) Descriptor() ([]byte, []int) { + return fileDescriptor_consumer_bc35b863b20645b4, []int{0} +} +func (m *ProjectProperties) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ProjectProperties.Unmarshal(m, b) +} +func (m *ProjectProperties) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ProjectProperties.Marshal(b, m, deterministic) +} +func (dst *ProjectProperties) XXX_Merge(src proto.Message) { + xxx_messageInfo_ProjectProperties.Merge(dst, src) +} +func (m *ProjectProperties) XXX_Size() int { + return xxx_messageInfo_ProjectProperties.Size(m) +} +func (m *ProjectProperties) XXX_DiscardUnknown() { + xxx_messageInfo_ProjectProperties.DiscardUnknown(m) +} + +var xxx_messageInfo_ProjectProperties proto.InternalMessageInfo + +func (m *ProjectProperties) GetProperties() []*Property { + if m != nil { + return m.Properties + } + return nil +} + +// Defines project properties. +// +// API services can define properties that can be assigned to consumer projects +// so that backends can perform response customization without having to make +// additional calls or maintain additional storage. For example, Maps API +// defines properties that controls map tile cache period, or whether to embed a +// watermark in a result. +// +// These values can be set via API producer console. Only API providers can +// define and set these properties. +type Property struct { + // The name of the property (a.k.a key). + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // The type of this property. + Type Property_PropertyType `protobuf:"varint,2,opt,name=type,proto3,enum=google.api.Property_PropertyType" json:"type,omitempty"` + // The description of the property + Description string `protobuf:"bytes,3,opt,name=description,proto3" json:"description,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Property) Reset() { *m = Property{} } +func (m *Property) String() string { return proto.CompactTextString(m) } +func (*Property) ProtoMessage() {} +func (*Property) Descriptor() ([]byte, []int) { + return fileDescriptor_consumer_bc35b863b20645b4, []int{1} +} +func (m *Property) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Property.Unmarshal(m, b) +} +func (m *Property) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Property.Marshal(b, m, deterministic) +} +func (dst *Property) XXX_Merge(src proto.Message) { + xxx_messageInfo_Property.Merge(dst, src) +} +func (m *Property) XXX_Size() int { + return xxx_messageInfo_Property.Size(m) +} +func (m *Property) XXX_DiscardUnknown() { + xxx_messageInfo_Property.DiscardUnknown(m) +} + +var xxx_messageInfo_Property proto.InternalMessageInfo + +func (m *Property) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *Property) GetType() Property_PropertyType { + if m != nil { + return m.Type + } + return Property_UNSPECIFIED +} + +func (m *Property) GetDescription() string { + if m != nil { + return m.Description + } + return "" +} + +func init() { + proto.RegisterType((*ProjectProperties)(nil), "google.api.ProjectProperties") + proto.RegisterType((*Property)(nil), "google.api.Property") + proto.RegisterEnum("google.api.Property_PropertyType", Property_PropertyType_name, Property_PropertyType_value) +} + +func init() { proto.RegisterFile("google/api/consumer.proto", fileDescriptor_consumer_bc35b863b20645b4) } + +var fileDescriptor_consumer_bc35b863b20645b4 = []byte{ + // 299 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x6c, 0x90, 0x4f, 0x4f, 0xf2, 0x40, + 0x10, 0xc6, 0xdf, 0x85, 0xbe, 0x04, 0x06, 0xc5, 0xba, 0xf1, 0x50, 0x6f, 0x95, 0x13, 0xa7, 0x36, + 0x41, 0xf4, 0xe2, 0xad, 0x50, 0x4d, 0x13, 0x02, 0x4d, 0x81, 0x8b, 0xb7, 0x5a, 0xc7, 0x75, 0x0d, + 0xec, 0x6c, 0xb6, 0xd5, 0x84, 0x0f, 0xe8, 0xf7, 0x32, 0x2c, 0x88, 0x35, 0xf1, 0xf6, 0xcc, 0x3e, + 0x7f, 0xb2, 0xf9, 0xc1, 0xa5, 0x20, 0x12, 0x6b, 0x0c, 0x73, 0x2d, 0xc3, 0x82, 0x54, 0xf9, 0xbe, + 0x41, 0x13, 0x68, 0x43, 0x15, 0x71, 0xd8, 0x5b, 0x41, 0xae, 0x65, 0x3f, 0x81, 0xf3, 0xd4, 0xd0, + 0x1b, 0x16, 0x55, 0x6a, 0x48, 0xa3, 0xa9, 0x24, 0x96, 0x7c, 0x04, 0xa0, 0x8f, 0x97, 0xc7, 0xfc, + 0xe6, 0xa0, 0x3b, 0xbc, 0x08, 0x7e, 0x5a, 0xc1, 0x21, 0xbb, 0xcd, 0x6a, 0xb9, 0xfe, 0x27, 0x83, + 0xf6, 0xb7, 0xc1, 0x39, 0x38, 0x2a, 0xdf, 0xa0, 0xc7, 0x7c, 0x36, 0xe8, 0x64, 0x56, 0xf3, 0x1b, + 0x70, 0xaa, 0xad, 0x46, 0xaf, 0xe1, 0xb3, 0x41, 0x6f, 0x78, 0xf5, 0xd7, 0xe0, 0x51, 0x2c, 0xb7, + 0x1a, 0x33, 0x1b, 0xe7, 0x3e, 0x74, 0x9f, 0xb1, 0x2c, 0x8c, 0xd4, 0x95, 0x24, 0xe5, 0x35, 0xed, + 0x62, 0xfd, 0xa9, 0x3f, 0x85, 0x93, 0x7a, 0x8f, 0x9f, 0x41, 0x77, 0x35, 0x5b, 0xa4, 0xf1, 0x38, + 0xb9, 0x4f, 0xe2, 0x89, 0xfb, 0x8f, 0x77, 0xe0, 0x7f, 0x32, 0x5b, 0xde, 0x8e, 0x5c, 0xc6, 0xdb, + 0xe0, 0x44, 0xf3, 0xf9, 0xd4, 0x6d, 0x70, 0x80, 0xd6, 0x62, 0x99, 0x25, 0xb3, 0x07, 0xb7, 0xb9, + 0xd3, 0x93, 0xf9, 0x2a, 0x9a, 0xc6, 0xae, 0x13, 0xbd, 0x42, 0xaf, 0xa0, 0x4d, 0xed, 0x77, 0xd1, + 0xe9, 0xf8, 0x00, 0x30, 0xdd, 0xf1, 0x4b, 0xd9, 0x63, 0x7c, 0x30, 0x05, 0xad, 0x73, 0x25, 0x02, + 0x32, 0x22, 0x14, 0xa8, 0x2c, 0xdd, 0x70, 0x6f, 0xe5, 0x5a, 0x96, 0x96, 0x7d, 0x89, 0xe6, 0x43, + 0x16, 0x58, 0x90, 0x7a, 0x91, 0xe2, 0xee, 0xd7, 0xf5, 0xd4, 0xb2, 0x8d, 0xeb, 0xaf, 0x00, 0x00, + 0x00, 0xff, 0xff, 0xb7, 0xa4, 0x04, 0x2c, 0xac, 0x01, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/api/serviceconfig/context.pb.go b/vendor/google.golang.org/genproto/googleapis/api/serviceconfig/context.pb.go new file mode 100644 index 0000000..d5f20ff --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/api/serviceconfig/context.pb.go @@ -0,0 +1,206 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/api/context.proto + +package serviceconfig // import "google.golang.org/genproto/googleapis/api/serviceconfig" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// `Context` defines which contexts an API requests. +// +// Example: +// +// context: +// rules: +// - selector: "*" +// requested: +// - google.rpc.context.ProjectContext +// - google.rpc.context.OriginContext +// +// The above specifies that all methods in the API request +// `google.rpc.context.ProjectContext` and +// `google.rpc.context.OriginContext`. +// +// Available context types are defined in package +// `google.rpc.context`. +// +// This also provides mechanism to whitelist any protobuf message extension that +// can be sent in grpc metadata using “x-goog-ext--bin” and +// “x-goog-ext--jspb” format. For example, list any service +// specific protobuf types that can appear in grpc metadata as follows in your +// yaml file: +// +// Example: +// +// context: +// rules: +// - selector: "google.example.library.v1.LibraryService.CreateBook" +// allowed_request_extensions: +// - google.foo.v1.NewExtension +// allowed_response_extensions: +// - google.foo.v1.NewExtension +// +// You can also specify extension ID instead of fully qualified extension name +// here. +type Context struct { + // A list of RPC context rules that apply to individual API methods. + // + // **NOTE:** All service configuration rules follow "last one wins" order. + Rules []*ContextRule `protobuf:"bytes,1,rep,name=rules,proto3" json:"rules,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Context) Reset() { *m = Context{} } +func (m *Context) String() string { return proto.CompactTextString(m) } +func (*Context) ProtoMessage() {} +func (*Context) Descriptor() ([]byte, []int) { + return fileDescriptor_context_51b0254c3aab83a4, []int{0} +} +func (m *Context) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Context.Unmarshal(m, b) +} +func (m *Context) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Context.Marshal(b, m, deterministic) +} +func (dst *Context) XXX_Merge(src proto.Message) { + xxx_messageInfo_Context.Merge(dst, src) +} +func (m *Context) XXX_Size() int { + return xxx_messageInfo_Context.Size(m) +} +func (m *Context) XXX_DiscardUnknown() { + xxx_messageInfo_Context.DiscardUnknown(m) +} + +var xxx_messageInfo_Context proto.InternalMessageInfo + +func (m *Context) GetRules() []*ContextRule { + if m != nil { + return m.Rules + } + return nil +} + +// A context rule provides information about the context for an individual API +// element. +type ContextRule struct { + // Selects the methods to which this rule applies. + // + // Refer to [selector][google.api.DocumentationRule.selector] for syntax details. + Selector string `protobuf:"bytes,1,opt,name=selector,proto3" json:"selector,omitempty"` + // A list of full type names of requested contexts. + Requested []string `protobuf:"bytes,2,rep,name=requested,proto3" json:"requested,omitempty"` + // A list of full type names of provided contexts. + Provided []string `protobuf:"bytes,3,rep,name=provided,proto3" json:"provided,omitempty"` + // A list of full type names or extension IDs of extensions allowed in grpc + // side channel from client to backend. + AllowedRequestExtensions []string `protobuf:"bytes,4,rep,name=allowed_request_extensions,json=allowedRequestExtensions,proto3" json:"allowed_request_extensions,omitempty"` + // A list of full type names or extension IDs of extensions allowed in grpc + // side channel from backend to client. + AllowedResponseExtensions []string `protobuf:"bytes,5,rep,name=allowed_response_extensions,json=allowedResponseExtensions,proto3" json:"allowed_response_extensions,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ContextRule) Reset() { *m = ContextRule{} } +func (m *ContextRule) String() string { return proto.CompactTextString(m) } +func (*ContextRule) ProtoMessage() {} +func (*ContextRule) Descriptor() ([]byte, []int) { + return fileDescriptor_context_51b0254c3aab83a4, []int{1} +} +func (m *ContextRule) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ContextRule.Unmarshal(m, b) +} +func (m *ContextRule) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ContextRule.Marshal(b, m, deterministic) +} +func (dst *ContextRule) XXX_Merge(src proto.Message) { + xxx_messageInfo_ContextRule.Merge(dst, src) +} +func (m *ContextRule) XXX_Size() int { + return xxx_messageInfo_ContextRule.Size(m) +} +func (m *ContextRule) XXX_DiscardUnknown() { + xxx_messageInfo_ContextRule.DiscardUnknown(m) +} + +var xxx_messageInfo_ContextRule proto.InternalMessageInfo + +func (m *ContextRule) GetSelector() string { + if m != nil { + return m.Selector + } + return "" +} + +func (m *ContextRule) GetRequested() []string { + if m != nil { + return m.Requested + } + return nil +} + +func (m *ContextRule) GetProvided() []string { + if m != nil { + return m.Provided + } + return nil +} + +func (m *ContextRule) GetAllowedRequestExtensions() []string { + if m != nil { + return m.AllowedRequestExtensions + } + return nil +} + +func (m *ContextRule) GetAllowedResponseExtensions() []string { + if m != nil { + return m.AllowedResponseExtensions + } + return nil +} + +func init() { + proto.RegisterType((*Context)(nil), "google.api.Context") + proto.RegisterType((*ContextRule)(nil), "google.api.ContextRule") +} + +func init() { proto.RegisterFile("google/api/context.proto", fileDescriptor_context_51b0254c3aab83a4) } + +var fileDescriptor_context_51b0254c3aab83a4 = []byte{ + // 281 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x54, 0x91, 0xbd, 0x4e, 0xc3, 0x30, + 0x10, 0x80, 0x95, 0xfe, 0x00, 0x71, 0x11, 0x83, 0x17, 0x4c, 0x61, 0x88, 0x3a, 0x65, 0x21, 0x91, + 0x60, 0x41, 0x02, 0x21, 0x51, 0x54, 0x21, 0xb6, 0x28, 0x23, 0x4b, 0x15, 0x92, 0xc3, 0xb2, 0x64, + 0x7c, 0xc6, 0x76, 0x4a, 0x9f, 0x87, 0x37, 0xe3, 0x4d, 0x50, 0xed, 0x28, 0x09, 0xe3, 0xdd, 0xf7, + 0x7d, 0x83, 0xcf, 0x84, 0x71, 0x44, 0x2e, 0x21, 0xaf, 0xb4, 0xc8, 0x6b, 0x54, 0x0e, 0xf6, 0x2e, + 0xd3, 0x06, 0x1d, 0x52, 0x12, 0x48, 0x56, 0x69, 0xb1, 0xba, 0x23, 0xc7, 0xcf, 0x01, 0xd2, 0x6b, + 0x32, 0x37, 0xad, 0x04, 0xcb, 0xa2, 0x64, 0x9a, 0x2e, 0x6e, 0xce, 0xb3, 0x41, 0xcb, 0x3a, 0xa7, + 0x6c, 0x25, 0x94, 0xc1, 0x5a, 0xfd, 0x46, 0x64, 0x31, 0x5a, 0xd3, 0x25, 0x39, 0xb1, 0x20, 0xa1, + 0x76, 0x68, 0x58, 0x94, 0x44, 0x69, 0x5c, 0xf6, 0x33, 0xbd, 0x22, 0xb1, 0x81, 0xaf, 0x16, 0xac, + 0x83, 0x86, 0x4d, 0x92, 0x69, 0x1a, 0x97, 0xc3, 0xe2, 0x50, 0x6a, 0x83, 0x3b, 0xd1, 0x40, 0xc3, + 0xa6, 0x1e, 0xf6, 0x33, 0x7d, 0x20, 0xcb, 0x4a, 0x4a, 0xfc, 0x86, 0x66, 0xdb, 0x05, 0x5b, 0xd8, + 0x3b, 0x50, 0x56, 0xa0, 0xb2, 0x6c, 0xe6, 0x6d, 0xd6, 0x19, 0x65, 0x10, 0x36, 0x3d, 0xa7, 0x8f, + 0xe4, 0x72, 0xa8, 0xad, 0x46, 0x65, 0x61, 0x9c, 0xcf, 0x7d, 0x7e, 0xd1, 0xe7, 0xc1, 0x18, 0xfa, + 0xb5, 0x22, 0x67, 0x35, 0x7e, 0x8e, 0x0e, 0xb1, 0x3e, 0xed, 0x9e, 0x5c, 0x1c, 0x2e, 0x59, 0x44, + 0x6f, 0x9b, 0x8e, 0x71, 0x94, 0x95, 0xe2, 0x19, 0x1a, 0x9e, 0x73, 0x50, 0xfe, 0xce, 0x79, 0x40, + 0x95, 0x16, 0xd6, 0x7f, 0x82, 0x05, 0xb3, 0x13, 0x35, 0xd4, 0xa8, 0x3e, 0x04, 0xbf, 0xff, 0x37, + 0xfd, 0x4c, 0x66, 0x2f, 0x4f, 0xc5, 0xeb, 0xfb, 0x91, 0x0f, 0x6f, 0xff, 0x02, 0x00, 0x00, 0xff, + 0xff, 0x00, 0x40, 0x95, 0xa9, 0xbc, 0x01, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/api/serviceconfig/control.pb.go b/vendor/google.golang.org/genproto/googleapis/api/serviceconfig/control.pb.go new file mode 100644 index 0000000..fc9ddd8 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/api/serviceconfig/control.pb.go @@ -0,0 +1,83 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/api/control.proto + +package serviceconfig // import "google.golang.org/genproto/googleapis/api/serviceconfig" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Selects and configures the service controller used by the service. The +// service controller handles features like abuse, quota, billing, logging, +// monitoring, etc. +type Control struct { + // The service control environment to use. If empty, no control plane + // feature (like quota and billing) will be enabled. + Environment string `protobuf:"bytes,1,opt,name=environment,proto3" json:"environment,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Control) Reset() { *m = Control{} } +func (m *Control) String() string { return proto.CompactTextString(m) } +func (*Control) ProtoMessage() {} +func (*Control) Descriptor() ([]byte, []int) { + return fileDescriptor_control_6bbb3be1ecc58ed7, []int{0} +} +func (m *Control) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Control.Unmarshal(m, b) +} +func (m *Control) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Control.Marshal(b, m, deterministic) +} +func (dst *Control) XXX_Merge(src proto.Message) { + xxx_messageInfo_Control.Merge(dst, src) +} +func (m *Control) XXX_Size() int { + return xxx_messageInfo_Control.Size(m) +} +func (m *Control) XXX_DiscardUnknown() { + xxx_messageInfo_Control.DiscardUnknown(m) +} + +var xxx_messageInfo_Control proto.InternalMessageInfo + +func (m *Control) GetEnvironment() string { + if m != nil { + return m.Environment + } + return "" +} + +func init() { + proto.RegisterType((*Control)(nil), "google.api.Control") +} + +func init() { proto.RegisterFile("google/api/control.proto", fileDescriptor_control_6bbb3be1ecc58ed7) } + +var fileDescriptor_control_6bbb3be1ecc58ed7 = []byte{ + // 165 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xe2, 0x92, 0x48, 0xcf, 0xcf, 0x4f, + 0xcf, 0x49, 0xd5, 0x4f, 0x2c, 0xc8, 0xd4, 0x4f, 0xce, 0xcf, 0x2b, 0x29, 0xca, 0xcf, 0xd1, 0x2b, + 0x28, 0xca, 0x2f, 0xc9, 0x17, 0xe2, 0x82, 0xc8, 0xe8, 0x25, 0x16, 0x64, 0x2a, 0x69, 0x73, 0xb1, + 0x3b, 0x43, 0x24, 0x85, 0x14, 0xb8, 0xb8, 0x53, 0xf3, 0xca, 0x32, 0x8b, 0xf2, 0xf3, 0x72, 0x53, + 0xf3, 0x4a, 0x24, 0x18, 0x15, 0x18, 0x35, 0x38, 0x83, 0x90, 0x85, 0x9c, 0xf2, 0xb8, 0xf8, 0x92, + 0xf3, 0x73, 0xf5, 0x10, 0xda, 0x9d, 0x78, 0xa0, 0x9a, 0x03, 0x40, 0x06, 0x07, 0x30, 0x46, 0xb9, + 0x42, 0xe5, 0xd2, 0xf3, 0x73, 0x12, 0xf3, 0xd2, 0xf5, 0xf2, 0x8b, 0xd2, 0xf5, 0xd3, 0x53, 0xf3, + 0xc0, 0xd6, 0xea, 0x43, 0xa4, 0x12, 0x0b, 0x32, 0x8b, 0xc1, 0x6e, 0x2a, 0x4e, 0x2d, 0x2a, 0xcb, + 0x4c, 0x4e, 0x4d, 0xce, 0xcf, 0x4b, 0xcb, 0x4c, 0xb7, 0x46, 0xe1, 0x2d, 0x62, 0x62, 0x71, 0x77, + 0x0c, 0xf0, 0x4c, 0x62, 0x03, 0x6b, 0x34, 0x06, 0x04, 0x00, 0x00, 0xff, 0xff, 0x44, 0x6e, 0x78, + 0xbd, 0xcb, 0x00, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/api/serviceconfig/documentation.pb.go b/vendor/google.golang.org/genproto/googleapis/api/serviceconfig/documentation.pb.go new file mode 100644 index 0000000..7ee6156 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/api/serviceconfig/documentation.pb.go @@ -0,0 +1,339 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/api/documentation.proto + +package serviceconfig // import "google.golang.org/genproto/googleapis/api/serviceconfig" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// `Documentation` provides the information for describing a service. +// +// Example: +//
documentation:
+//   summary: >
+//     The Google Calendar API gives access
+//     to most calendar features.
+//   pages:
+//   - name: Overview
+//     content: (== include google/foo/overview.md ==)
+//   - name: Tutorial
+//     content: (== include google/foo/tutorial.md ==)
+//     subpages;
+//     - name: Java
+//       content: (== include google/foo/tutorial_java.md ==)
+//   rules:
+//   - selector: google.calendar.Calendar.Get
+//     description: >
+//       ...
+//   - selector: google.calendar.Calendar.Put
+//     description: >
+//       ...
+// 
+// Documentation is provided in markdown syntax. In addition to +// standard markdown features, definition lists, tables and fenced +// code blocks are supported. Section headers can be provided and are +// interpreted relative to the section nesting of the context where +// a documentation fragment is embedded. +// +// Documentation from the IDL is merged with documentation defined +// via the config at normalization time, where documentation provided +// by config rules overrides IDL provided. +// +// A number of constructs specific to the API platform are supported +// in documentation text. +// +// In order to reference a proto element, the following +// notation can be used: +//
[fully.qualified.proto.name][]
+// To override the display text used for the link, this can be used: +//
[display text][fully.qualified.proto.name]
+// Text can be excluded from doc using the following notation: +//
(-- internal comment --)
+// +// A few directives are available in documentation. Note that +// directives must appear on a single line to be properly +// identified. The `include` directive includes a markdown file from +// an external source: +//
(== include path/to/file ==)
+// The `resource_for` directive marks a message to be the resource of +// a collection in REST view. If it is not specified, tools attempt +// to infer the resource from the operations in a collection: +//
(== resource_for v1.shelves.books ==)
+// The directive `suppress_warning` does not directly affect documentation +// and is documented together with service config validation. +type Documentation struct { + // A short summary of what the service does. Can only be provided by + // plain text. + Summary string `protobuf:"bytes,1,opt,name=summary,proto3" json:"summary,omitempty"` + // The top level pages for the documentation set. + Pages []*Page `protobuf:"bytes,5,rep,name=pages,proto3" json:"pages,omitempty"` + // A list of documentation rules that apply to individual API elements. + // + // **NOTE:** All service configuration rules follow "last one wins" order. + Rules []*DocumentationRule `protobuf:"bytes,3,rep,name=rules,proto3" json:"rules,omitempty"` + // The URL to the root of documentation. + DocumentationRootUrl string `protobuf:"bytes,4,opt,name=documentation_root_url,json=documentationRootUrl,proto3" json:"documentation_root_url,omitempty"` + // Declares a single overview page. For example: + //
documentation:
+	//   summary: ...
+	//   overview: (== include overview.md ==)
+	// 
+ // This is a shortcut for the following declaration (using pages style): + //
documentation:
+	//   summary: ...
+	//   pages:
+	//   - name: Overview
+	//     content: (== include overview.md ==)
+	// 
+ // Note: you cannot specify both `overview` field and `pages` field. + Overview string `protobuf:"bytes,2,opt,name=overview,proto3" json:"overview,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Documentation) Reset() { *m = Documentation{} } +func (m *Documentation) String() string { return proto.CompactTextString(m) } +func (*Documentation) ProtoMessage() {} +func (*Documentation) Descriptor() ([]byte, []int) { + return fileDescriptor_documentation_c22025712f3af31e, []int{0} +} +func (m *Documentation) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Documentation.Unmarshal(m, b) +} +func (m *Documentation) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Documentation.Marshal(b, m, deterministic) +} +func (dst *Documentation) XXX_Merge(src proto.Message) { + xxx_messageInfo_Documentation.Merge(dst, src) +} +func (m *Documentation) XXX_Size() int { + return xxx_messageInfo_Documentation.Size(m) +} +func (m *Documentation) XXX_DiscardUnknown() { + xxx_messageInfo_Documentation.DiscardUnknown(m) +} + +var xxx_messageInfo_Documentation proto.InternalMessageInfo + +func (m *Documentation) GetSummary() string { + if m != nil { + return m.Summary + } + return "" +} + +func (m *Documentation) GetPages() []*Page { + if m != nil { + return m.Pages + } + return nil +} + +func (m *Documentation) GetRules() []*DocumentationRule { + if m != nil { + return m.Rules + } + return nil +} + +func (m *Documentation) GetDocumentationRootUrl() string { + if m != nil { + return m.DocumentationRootUrl + } + return "" +} + +func (m *Documentation) GetOverview() string { + if m != nil { + return m.Overview + } + return "" +} + +// A documentation rule provides information about individual API elements. +type DocumentationRule struct { + // The selector is a comma-separated list of patterns. Each pattern is a + // qualified name of the element which may end in "*", indicating a wildcard. + // Wildcards are only allowed at the end and for a whole component of the + // qualified name, i.e. "foo.*" is ok, but not "foo.b*" or "foo.*.bar". A + // wildcard will match one or more components. To specify a default for all + // applicable elements, the whole pattern "*" is used. + Selector string `protobuf:"bytes,1,opt,name=selector,proto3" json:"selector,omitempty"` + // Description of the selected API(s). + Description string `protobuf:"bytes,2,opt,name=description,proto3" json:"description,omitempty"` + // Deprecation description of the selected element(s). It can be provided if + // an element is marked as `deprecated`. + DeprecationDescription string `protobuf:"bytes,3,opt,name=deprecation_description,json=deprecationDescription,proto3" json:"deprecation_description,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *DocumentationRule) Reset() { *m = DocumentationRule{} } +func (m *DocumentationRule) String() string { return proto.CompactTextString(m) } +func (*DocumentationRule) ProtoMessage() {} +func (*DocumentationRule) Descriptor() ([]byte, []int) { + return fileDescriptor_documentation_c22025712f3af31e, []int{1} +} +func (m *DocumentationRule) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_DocumentationRule.Unmarshal(m, b) +} +func (m *DocumentationRule) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_DocumentationRule.Marshal(b, m, deterministic) +} +func (dst *DocumentationRule) XXX_Merge(src proto.Message) { + xxx_messageInfo_DocumentationRule.Merge(dst, src) +} +func (m *DocumentationRule) XXX_Size() int { + return xxx_messageInfo_DocumentationRule.Size(m) +} +func (m *DocumentationRule) XXX_DiscardUnknown() { + xxx_messageInfo_DocumentationRule.DiscardUnknown(m) +} + +var xxx_messageInfo_DocumentationRule proto.InternalMessageInfo + +func (m *DocumentationRule) GetSelector() string { + if m != nil { + return m.Selector + } + return "" +} + +func (m *DocumentationRule) GetDescription() string { + if m != nil { + return m.Description + } + return "" +} + +func (m *DocumentationRule) GetDeprecationDescription() string { + if m != nil { + return m.DeprecationDescription + } + return "" +} + +// Represents a documentation page. A page can contain subpages to represent +// nested documentation set structure. +type Page struct { + // The name of the page. It will be used as an identity of the page to + // generate URI of the page, text of the link to this page in navigation, + // etc. The full page name (start from the root page name to this page + // concatenated with `.`) can be used as reference to the page in your + // documentation. For example: + //
pages:
+	// - name: Tutorial
+	//   content: (== include tutorial.md ==)
+	//   subpages:
+	//   - name: Java
+	//     content: (== include tutorial_java.md ==)
+	// 
+ // You can reference `Java` page using Markdown reference link syntax: + // `[Java][Tutorial.Java]`. + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // The Markdown content of the page. You can use (== include {path} + // ==) to include content from a Markdown file. + Content string `protobuf:"bytes,2,opt,name=content,proto3" json:"content,omitempty"` + // Subpages of this page. The order of subpages specified here will be + // honored in the generated docset. + Subpages []*Page `protobuf:"bytes,3,rep,name=subpages,proto3" json:"subpages,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Page) Reset() { *m = Page{} } +func (m *Page) String() string { return proto.CompactTextString(m) } +func (*Page) ProtoMessage() {} +func (*Page) Descriptor() ([]byte, []int) { + return fileDescriptor_documentation_c22025712f3af31e, []int{2} +} +func (m *Page) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Page.Unmarshal(m, b) +} +func (m *Page) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Page.Marshal(b, m, deterministic) +} +func (dst *Page) XXX_Merge(src proto.Message) { + xxx_messageInfo_Page.Merge(dst, src) +} +func (m *Page) XXX_Size() int { + return xxx_messageInfo_Page.Size(m) +} +func (m *Page) XXX_DiscardUnknown() { + xxx_messageInfo_Page.DiscardUnknown(m) +} + +var xxx_messageInfo_Page proto.InternalMessageInfo + +func (m *Page) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *Page) GetContent() string { + if m != nil { + return m.Content + } + return "" +} + +func (m *Page) GetSubpages() []*Page { + if m != nil { + return m.Subpages + } + return nil +} + +func init() { + proto.RegisterType((*Documentation)(nil), "google.api.Documentation") + proto.RegisterType((*DocumentationRule)(nil), "google.api.DocumentationRule") + proto.RegisterType((*Page)(nil), "google.api.Page") +} + +func init() { + proto.RegisterFile("google/api/documentation.proto", fileDescriptor_documentation_c22025712f3af31e) +} + +var fileDescriptor_documentation_c22025712f3af31e = []byte{ + // 356 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x74, 0x92, 0xc1, 0x6a, 0xe3, 0x30, + 0x14, 0x45, 0x71, 0xec, 0xcc, 0x64, 0x5e, 0x98, 0x61, 0x46, 0x0c, 0x19, 0x33, 0xd0, 0x12, 0xb2, + 0x28, 0x59, 0x14, 0x1b, 0x9a, 0x42, 0x17, 0x5d, 0x35, 0xa4, 0x94, 0xee, 0x8c, 0xa1, 0x9b, 0x6e, + 0x82, 0xa2, 0xbc, 0x0a, 0x83, 0xad, 0x67, 0x24, 0x39, 0xa5, 0xbf, 0xd0, 0xcf, 0xe8, 0x57, 0xf5, + 0x73, 0x8a, 0x65, 0x27, 0xb1, 0x29, 0xdd, 0xf9, 0xfa, 0x1e, 0xe9, 0x3e, 0x5d, 0x09, 0x4e, 0x25, + 0x91, 0xcc, 0x31, 0xe6, 0x65, 0x16, 0x6f, 0x49, 0x54, 0x05, 0x2a, 0xcb, 0x6d, 0x46, 0x2a, 0x2a, + 0x35, 0x59, 0x62, 0xd0, 0xf8, 0x11, 0x2f, 0xb3, 0xd9, 0xbb, 0x07, 0x3f, 0x57, 0x5d, 0x86, 0x85, + 0xf0, 0xdd, 0x54, 0x45, 0xc1, 0xf5, 0x4b, 0xe8, 0x4d, 0xbd, 0xf9, 0x8f, 0x74, 0x2f, 0xd9, 0x19, + 0x0c, 0x4b, 0x2e, 0xd1, 0x84, 0xc3, 0xa9, 0x3f, 0x1f, 0x5f, 0xfc, 0x8e, 0x8e, 0xfb, 0x44, 0x09, + 0x97, 0x98, 0x36, 0x36, 0x5b, 0xc0, 0x50, 0x57, 0x39, 0x9a, 0xd0, 0x77, 0xdc, 0x49, 0x97, 0xeb, + 0x65, 0xa5, 0x55, 0x8e, 0x69, 0xc3, 0xb2, 0x4b, 0x98, 0xf4, 0x66, 0x5d, 0x6b, 0x22, 0xbb, 0xae, + 0x74, 0x1e, 0x06, 0x6e, 0x8a, 0xbf, 0x3d, 0x37, 0x25, 0xb2, 0x0f, 0x3a, 0x67, 0xff, 0x61, 0x44, + 0x3b, 0xd4, 0xbb, 0x0c, 0x9f, 0xc3, 0x81, 0xe3, 0x0e, 0x7a, 0xf6, 0xea, 0xc1, 0x9f, 0x4f, 0x71, + 0xf5, 0x0a, 0x83, 0x39, 0x0a, 0x4b, 0xba, 0x3d, 0xdf, 0x41, 0xb3, 0x29, 0x8c, 0xb7, 0x68, 0x84, + 0xce, 0xca, 0x1a, 0x6f, 0x37, 0xec, 0xfe, 0x62, 0x57, 0xf0, 0x6f, 0x8b, 0xa5, 0x46, 0xd1, 0xcc, + 0xd8, 0xa5, 0x7d, 0x47, 0x4f, 0x3a, 0xf6, 0xea, 0xe8, 0xce, 0x36, 0x10, 0xd4, 0x15, 0x31, 0x06, + 0x81, 0xe2, 0x05, 0xb6, 0xd1, 0xee, 0xbb, 0x6e, 0x5c, 0x90, 0xb2, 0xa8, 0x6c, 0x1b, 0xb9, 0x97, + 0xec, 0x1c, 0x46, 0xa6, 0xda, 0x34, 0xa5, 0xfb, 0x5f, 0x94, 0x7e, 0x20, 0x96, 0x16, 0x7e, 0x09, + 0x2a, 0x3a, 0xc0, 0x92, 0xf5, 0xce, 0x9f, 0xd4, 0xb7, 0x9f, 0x78, 0x8f, 0xb7, 0x2d, 0x21, 0x29, + 0xe7, 0x4a, 0x46, 0xa4, 0x65, 0x2c, 0x51, 0xb9, 0xb7, 0x11, 0x37, 0x16, 0x2f, 0x33, 0xe3, 0x9e, + 0x8f, 0xa9, 0xbb, 0x14, 0x28, 0x48, 0x3d, 0x65, 0xf2, 0xba, 0xa7, 0xde, 0x06, 0xc1, 0xdd, 0x4d, + 0x72, 0xbf, 0xf9, 0xe6, 0x16, 0x2e, 0x3e, 0x02, 0x00, 0x00, 0xff, 0xff, 0x3e, 0x04, 0x32, 0xbf, + 0x76, 0x02, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/api/serviceconfig/endpoint.pb.go b/vendor/google.golang.org/genproto/googleapis/api/serviceconfig/endpoint.pb.go new file mode 100644 index 0000000..230b2e0 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/api/serviceconfig/endpoint.pb.go @@ -0,0 +1,149 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/api/endpoint.proto + +package serviceconfig // import "google.golang.org/genproto/googleapis/api/serviceconfig" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// `Endpoint` describes a network endpoint that serves a set of APIs. +// A service may expose any number of endpoints, and all endpoints share the +// same service configuration, such as quota configuration and monitoring +// configuration. +// +// Example service configuration: +// +// name: library-example.googleapis.com +// endpoints: +// # Below entry makes 'google.example.library.v1.Library' +// # API be served from endpoint address library-example.googleapis.com. +// # It also allows HTTP OPTIONS calls to be passed to the backend, for +// # it to decide whether the subsequent cross-origin request is +// # allowed to proceed. +// - name: library-example.googleapis.com +// allow_cors: true +type Endpoint struct { + // The canonical name of this endpoint. + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // DEPRECATED: This field is no longer supported. Instead of using aliases, + // please specify multiple [google.api.Endpoint][google.api.Endpoint] for each of the intended + // aliases. + // + // Additional names that this endpoint will be hosted on. + Aliases []string `protobuf:"bytes,2,rep,name=aliases,proto3" json:"aliases,omitempty"` // Deprecated: Do not use. + // The list of features enabled on this endpoint. + Features []string `protobuf:"bytes,4,rep,name=features,proto3" json:"features,omitempty"` + // The specification of an Internet routable address of API frontend that will + // handle requests to this [API + // Endpoint](https://cloud.google.com/apis/design/glossary). It should be + // either a valid IPv4 address or a fully-qualified domain name. For example, + // "8.8.8.8" or "myservice.appspot.com". + Target string `protobuf:"bytes,101,opt,name=target,proto3" json:"target,omitempty"` + // Allowing + // [CORS](https://en.wikipedia.org/wiki/Cross-origin_resource_sharing), aka + // cross-domain traffic, would allow the backends served from this endpoint to + // receive and respond to HTTP OPTIONS requests. The response will be used by + // the browser to determine whether the subsequent cross-origin request is + // allowed to proceed. + AllowCors bool `protobuf:"varint,5,opt,name=allow_cors,json=allowCors,proto3" json:"allow_cors,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Endpoint) Reset() { *m = Endpoint{} } +func (m *Endpoint) String() string { return proto.CompactTextString(m) } +func (*Endpoint) ProtoMessage() {} +func (*Endpoint) Descriptor() ([]byte, []int) { + return fileDescriptor_endpoint_134b6a0db7c4f443, []int{0} +} +func (m *Endpoint) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Endpoint.Unmarshal(m, b) +} +func (m *Endpoint) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Endpoint.Marshal(b, m, deterministic) +} +func (dst *Endpoint) XXX_Merge(src proto.Message) { + xxx_messageInfo_Endpoint.Merge(dst, src) +} +func (m *Endpoint) XXX_Size() int { + return xxx_messageInfo_Endpoint.Size(m) +} +func (m *Endpoint) XXX_DiscardUnknown() { + xxx_messageInfo_Endpoint.DiscardUnknown(m) +} + +var xxx_messageInfo_Endpoint proto.InternalMessageInfo + +func (m *Endpoint) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +// Deprecated: Do not use. +func (m *Endpoint) GetAliases() []string { + if m != nil { + return m.Aliases + } + return nil +} + +func (m *Endpoint) GetFeatures() []string { + if m != nil { + return m.Features + } + return nil +} + +func (m *Endpoint) GetTarget() string { + if m != nil { + return m.Target + } + return "" +} + +func (m *Endpoint) GetAllowCors() bool { + if m != nil { + return m.AllowCors + } + return false +} + +func init() { + proto.RegisterType((*Endpoint)(nil), "google.api.Endpoint") +} + +func init() { proto.RegisterFile("google/api/endpoint.proto", fileDescriptor_endpoint_134b6a0db7c4f443) } + +var fileDescriptor_endpoint_134b6a0db7c4f443 = []byte{ + // 236 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x54, 0x8f, 0xc1, 0x4a, 0xc4, 0x30, + 0x10, 0x86, 0x49, 0xad, 0x6b, 0x3b, 0xa0, 0x87, 0x1c, 0x24, 0x8a, 0x42, 0xf1, 0xd4, 0x53, 0x7b, + 0xf0, 0xe8, 0xc9, 0xca, 0x22, 0xde, 0x4a, 0x8f, 0x5e, 0x64, 0xac, 0xb3, 0x21, 0x90, 0xcd, 0x84, + 0x24, 0xea, 0x63, 0xf8, 0x0e, 0x3e, 0xa9, 0x6c, 0xb6, 0xab, 0x78, 0xcb, 0xff, 0x7f, 0x64, 0xf8, + 0x7e, 0xb8, 0xd0, 0xcc, 0xda, 0x52, 0x8f, 0xde, 0xf4, 0xe4, 0xde, 0x3c, 0x1b, 0x97, 0x3a, 0x1f, + 0x38, 0xb1, 0x84, 0x3d, 0xea, 0xd0, 0x9b, 0x9b, 0x2f, 0x01, 0xd5, 0x7a, 0xc1, 0x52, 0x42, 0xe9, + 0x70, 0x4b, 0x4a, 0x34, 0xa2, 0xad, 0xa7, 0xfc, 0x96, 0x57, 0x70, 0x82, 0xd6, 0x60, 0xa4, 0xa8, + 0x8a, 0xe6, 0xa8, 0xad, 0x87, 0x42, 0x89, 0xe9, 0x50, 0xc9, 0x4b, 0xa8, 0x36, 0x84, 0xe9, 0x3d, + 0x50, 0x54, 0xe5, 0x0e, 0x4f, 0xbf, 0x59, 0x9e, 0xc3, 0x2a, 0x61, 0xd0, 0x94, 0x14, 0xe5, 0x7b, + 0x4b, 0x92, 0xd7, 0x00, 0x68, 0x2d, 0x7f, 0xbe, 0xcc, 0x1c, 0xa2, 0x3a, 0x6e, 0x44, 0x5b, 0x4d, + 0x75, 0x6e, 0x1e, 0x38, 0xc4, 0x81, 0xe1, 0x6c, 0xe6, 0x6d, 0xf7, 0xe7, 0x38, 0x9c, 0x1e, 0x04, + 0xc7, 0x9d, 0xfe, 0x28, 0x9e, 0xd7, 0x0b, 0xd4, 0x6c, 0xd1, 0xe9, 0x8e, 0x83, 0xee, 0x35, 0xb9, + 0x3c, 0xae, 0xdf, 0x23, 0xf4, 0x26, 0xe6, 0xe9, 0x91, 0xc2, 0x87, 0x99, 0x69, 0x66, 0xb7, 0x31, + 0xfa, 0xee, 0x5f, 0xfa, 0x2e, 0xca, 0xc7, 0xfb, 0xf1, 0xe9, 0x75, 0x95, 0x3f, 0xde, 0xfe, 0x04, + 0x00, 0x00, 0xff, 0xff, 0xa5, 0x38, 0x4b, 0xb3, 0x32, 0x01, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/api/serviceconfig/log.pb.go b/vendor/google.golang.org/genproto/googleapis/api/serviceconfig/log.pb.go new file mode 100644 index 0000000..c19f6cf --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/api/serviceconfig/log.pb.go @@ -0,0 +1,126 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/api/log.proto + +package serviceconfig // import "google.golang.org/genproto/googleapis/api/serviceconfig" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import label "google.golang.org/genproto/googleapis/api/label" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// A description of a log type. Example in YAML format: +// +// - name: library.googleapis.com/activity_history +// description: The history of borrowing and returning library items. +// display_name: Activity +// labels: +// - key: /customer_id +// description: Identifier of a library customer +type LogDescriptor struct { + // The name of the log. It must be less than 512 characters long and can + // include the following characters: upper- and lower-case alphanumeric + // characters [A-Za-z0-9], and punctuation characters including + // slash, underscore, hyphen, period [/_-.]. + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // The set of labels that are available to describe a specific log entry. + // Runtime requests that contain labels not specified here are + // considered invalid. + Labels []*label.LabelDescriptor `protobuf:"bytes,2,rep,name=labels,proto3" json:"labels,omitempty"` + // A human-readable description of this log. This information appears in + // the documentation and can contain details. + Description string `protobuf:"bytes,3,opt,name=description,proto3" json:"description,omitempty"` + // The human-readable name for this log. This information appears on + // the user interface and should be concise. + DisplayName string `protobuf:"bytes,4,opt,name=display_name,json=displayName,proto3" json:"display_name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *LogDescriptor) Reset() { *m = LogDescriptor{} } +func (m *LogDescriptor) String() string { return proto.CompactTextString(m) } +func (*LogDescriptor) ProtoMessage() {} +func (*LogDescriptor) Descriptor() ([]byte, []int) { + return fileDescriptor_log_0e93d42992290c09, []int{0} +} +func (m *LogDescriptor) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_LogDescriptor.Unmarshal(m, b) +} +func (m *LogDescriptor) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_LogDescriptor.Marshal(b, m, deterministic) +} +func (dst *LogDescriptor) XXX_Merge(src proto.Message) { + xxx_messageInfo_LogDescriptor.Merge(dst, src) +} +func (m *LogDescriptor) XXX_Size() int { + return xxx_messageInfo_LogDescriptor.Size(m) +} +func (m *LogDescriptor) XXX_DiscardUnknown() { + xxx_messageInfo_LogDescriptor.DiscardUnknown(m) +} + +var xxx_messageInfo_LogDescriptor proto.InternalMessageInfo + +func (m *LogDescriptor) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *LogDescriptor) GetLabels() []*label.LabelDescriptor { + if m != nil { + return m.Labels + } + return nil +} + +func (m *LogDescriptor) GetDescription() string { + if m != nil { + return m.Description + } + return "" +} + +func (m *LogDescriptor) GetDisplayName() string { + if m != nil { + return m.DisplayName + } + return "" +} + +func init() { + proto.RegisterType((*LogDescriptor)(nil), "google.api.LogDescriptor") +} + +func init() { proto.RegisterFile("google/api/log.proto", fileDescriptor_log_0e93d42992290c09) } + +var fileDescriptor_log_0e93d42992290c09 = []byte{ + // 238 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x54, 0x8f, 0xc1, 0x4a, 0xc3, 0x40, + 0x10, 0x86, 0x49, 0x1b, 0x8a, 0x6e, 0xd5, 0xc3, 0x22, 0x12, 0xf4, 0x12, 0x3d, 0xf5, 0xb4, 0x01, + 0x7b, 0xf4, 0x64, 0x51, 0x44, 0x08, 0x12, 0x7a, 0xf4, 0x22, 0xd3, 0x74, 0x1c, 0x46, 0x36, 0x3b, + 0xcb, 0x6e, 0x11, 0x7c, 0x18, 0x2f, 0x3e, 0xa9, 0x74, 0x13, 0x68, 0x7a, 0xdb, 0xfd, 0xe6, 0x9b, + 0x7f, 0x66, 0xd4, 0x25, 0x89, 0x90, 0xc5, 0x0a, 0x3c, 0x57, 0x56, 0xc8, 0xf8, 0x20, 0x3b, 0xd1, + 0xaa, 0xa7, 0x06, 0x3c, 0x5f, 0x5f, 0x8d, 0x0d, 0xd8, 0xa0, 0xed, 0x9d, 0xbb, 0xdf, 0x4c, 0x9d, + 0xd7, 0x42, 0x4f, 0x18, 0xdb, 0xc0, 0x7e, 0x27, 0x41, 0x6b, 0x95, 0x3b, 0xe8, 0xb0, 0xc8, 0xca, + 0x6c, 0x71, 0xba, 0x4e, 0x6f, 0xbd, 0x54, 0xb3, 0xd4, 0x14, 0x8b, 0x49, 0x39, 0x5d, 0xcc, 0xef, + 0x6f, 0xcc, 0x21, 0xda, 0xd4, 0xfb, 0xca, 0x21, 0x60, 0x3d, 0xa8, 0xba, 0x54, 0xf3, 0xed, 0x40, + 0x59, 0x5c, 0x31, 0x4d, 0x79, 0x63, 0xa4, 0x6f, 0xd5, 0xd9, 0x96, 0xa3, 0xb7, 0xf0, 0xf3, 0x91, + 0x46, 0xe6, 0x83, 0xd2, 0xb3, 0x37, 0xe8, 0x70, 0xf5, 0xa5, 0x2e, 0x5a, 0xe9, 0x46, 0xe3, 0x56, + 0x27, 0xb5, 0x50, 0xb3, 0xdf, 0xbd, 0xc9, 0xde, 0x9f, 0x07, 0x4e, 0x62, 0xc1, 0x91, 0x91, 0x40, + 0x15, 0xa1, 0x4b, 0x97, 0x55, 0x7d, 0x09, 0x3c, 0xc7, 0x74, 0x74, 0xc4, 0xf0, 0xcd, 0x2d, 0xb6, + 0xe2, 0x3e, 0x99, 0x1e, 0x8e, 0x7e, 0x7f, 0x93, 0xfc, 0xe5, 0xb1, 0x79, 0xdd, 0xcc, 0x52, 0xe3, + 0xf2, 0x3f, 0x00, 0x00, 0xff, 0xff, 0x25, 0x6c, 0x32, 0xff, 0x4e, 0x01, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/api/serviceconfig/logging.pb.go b/vendor/google.golang.org/genproto/googleapis/api/serviceconfig/logging.pb.go new file mode 100644 index 0000000..88bd892 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/api/serviceconfig/logging.pb.go @@ -0,0 +1,184 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/api/logging.proto + +package serviceconfig // import "google.golang.org/genproto/googleapis/api/serviceconfig" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Logging configuration of the service. +// +// The following example shows how to configure logs to be sent to the +// producer and consumer projects. In the example, the `activity_history` +// log is sent to both the producer and consumer projects, whereas the +// `purchase_history` log is only sent to the producer project. +// +// monitored_resources: +// - type: library.googleapis.com/branch +// labels: +// - key: /city +// description: The city where the library branch is located in. +// - key: /name +// description: The name of the branch. +// logs: +// - name: activity_history +// labels: +// - key: /customer_id +// - name: purchase_history +// logging: +// producer_destinations: +// - monitored_resource: library.googleapis.com/branch +// logs: +// - activity_history +// - purchase_history +// consumer_destinations: +// - monitored_resource: library.googleapis.com/branch +// logs: +// - activity_history +type Logging struct { + // Logging configurations for sending logs to the producer project. + // There can be multiple producer destinations, each one must have a + // different monitored resource type. A log can be used in at most + // one producer destination. + ProducerDestinations []*Logging_LoggingDestination `protobuf:"bytes,1,rep,name=producer_destinations,json=producerDestinations,proto3" json:"producer_destinations,omitempty"` + // Logging configurations for sending logs to the consumer project. + // There can be multiple consumer destinations, each one must have a + // different monitored resource type. A log can be used in at most + // one consumer destination. + ConsumerDestinations []*Logging_LoggingDestination `protobuf:"bytes,2,rep,name=consumer_destinations,json=consumerDestinations,proto3" json:"consumer_destinations,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Logging) Reset() { *m = Logging{} } +func (m *Logging) String() string { return proto.CompactTextString(m) } +func (*Logging) ProtoMessage() {} +func (*Logging) Descriptor() ([]byte, []int) { + return fileDescriptor_logging_72dc980851656a75, []int{0} +} +func (m *Logging) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Logging.Unmarshal(m, b) +} +func (m *Logging) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Logging.Marshal(b, m, deterministic) +} +func (dst *Logging) XXX_Merge(src proto.Message) { + xxx_messageInfo_Logging.Merge(dst, src) +} +func (m *Logging) XXX_Size() int { + return xxx_messageInfo_Logging.Size(m) +} +func (m *Logging) XXX_DiscardUnknown() { + xxx_messageInfo_Logging.DiscardUnknown(m) +} + +var xxx_messageInfo_Logging proto.InternalMessageInfo + +func (m *Logging) GetProducerDestinations() []*Logging_LoggingDestination { + if m != nil { + return m.ProducerDestinations + } + return nil +} + +func (m *Logging) GetConsumerDestinations() []*Logging_LoggingDestination { + if m != nil { + return m.ConsumerDestinations + } + return nil +} + +// Configuration of a specific logging destination (the producer project +// or the consumer project). +type Logging_LoggingDestination struct { + // The monitored resource type. The type must be defined in the + // [Service.monitored_resources][google.api.Service.monitored_resources] section. + MonitoredResource string `protobuf:"bytes,3,opt,name=monitored_resource,json=monitoredResource,proto3" json:"monitored_resource,omitempty"` + // Names of the logs to be sent to this destination. Each name must + // be defined in the [Service.logs][google.api.Service.logs] section. If the log name is + // not a domain scoped name, it will be automatically prefixed with + // the service name followed by "/". + Logs []string `protobuf:"bytes,1,rep,name=logs,proto3" json:"logs,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Logging_LoggingDestination) Reset() { *m = Logging_LoggingDestination{} } +func (m *Logging_LoggingDestination) String() string { return proto.CompactTextString(m) } +func (*Logging_LoggingDestination) ProtoMessage() {} +func (*Logging_LoggingDestination) Descriptor() ([]byte, []int) { + return fileDescriptor_logging_72dc980851656a75, []int{0, 0} +} +func (m *Logging_LoggingDestination) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Logging_LoggingDestination.Unmarshal(m, b) +} +func (m *Logging_LoggingDestination) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Logging_LoggingDestination.Marshal(b, m, deterministic) +} +func (dst *Logging_LoggingDestination) XXX_Merge(src proto.Message) { + xxx_messageInfo_Logging_LoggingDestination.Merge(dst, src) +} +func (m *Logging_LoggingDestination) XXX_Size() int { + return xxx_messageInfo_Logging_LoggingDestination.Size(m) +} +func (m *Logging_LoggingDestination) XXX_DiscardUnknown() { + xxx_messageInfo_Logging_LoggingDestination.DiscardUnknown(m) +} + +var xxx_messageInfo_Logging_LoggingDestination proto.InternalMessageInfo + +func (m *Logging_LoggingDestination) GetMonitoredResource() string { + if m != nil { + return m.MonitoredResource + } + return "" +} + +func (m *Logging_LoggingDestination) GetLogs() []string { + if m != nil { + return m.Logs + } + return nil +} + +func init() { + proto.RegisterType((*Logging)(nil), "google.api.Logging") + proto.RegisterType((*Logging_LoggingDestination)(nil), "google.api.Logging.LoggingDestination") +} + +func init() { proto.RegisterFile("google/api/logging.proto", fileDescriptor_logging_72dc980851656a75) } + +var fileDescriptor_logging_72dc980851656a75 = []byte{ + // 260 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x94, 0x91, 0x4d, 0x4b, 0xc4, 0x30, + 0x10, 0x86, 0x69, 0x77, 0x51, 0x36, 0x8a, 0x60, 0x50, 0x28, 0x9e, 0x16, 0x0f, 0xb2, 0x17, 0x53, + 0xd0, 0xa3, 0x27, 0x17, 0x45, 0x04, 0x0f, 0xa5, 0x17, 0x41, 0x0f, 0x4b, 0x4c, 0xe3, 0x30, 0xd0, + 0xce, 0x84, 0xa4, 0xf5, 0xd7, 0x78, 0xf2, 0x97, 0xca, 0xf6, 0xc3, 0x56, 0x3d, 0xed, 0x29, 0x1f, + 0xcf, 0xbc, 0x4f, 0x92, 0x89, 0x48, 0x80, 0x19, 0x4a, 0x9b, 0x6a, 0x87, 0x69, 0xc9, 0x00, 0x48, + 0xa0, 0x9c, 0xe7, 0x9a, 0xa5, 0xe8, 0x88, 0xd2, 0x0e, 0xcf, 0x3f, 0x63, 0xb1, 0xff, 0xd4, 0x51, + 0xf9, 0x2a, 0x4e, 0x9d, 0xe7, 0xa2, 0x31, 0xd6, 0x6f, 0x0a, 0x1b, 0x6a, 0x24, 0x5d, 0x23, 0x53, + 0x48, 0xa2, 0xe5, 0x6c, 0x75, 0x70, 0x75, 0xa1, 0xc6, 0x9c, 0xea, 0x33, 0xc3, 0x78, 0x37, 0x96, + 0xe7, 0x27, 0x83, 0x64, 0xb2, 0x19, 0xb6, 0x72, 0xc3, 0x14, 0x9a, 0xea, 0xaf, 0x3c, 0xde, 0x4d, + 0x3e, 0x48, 0xa6, 0xf2, 0xb3, 0x67, 0x21, 0xff, 0xd7, 0xca, 0x4b, 0x21, 0x2b, 0x26, 0xac, 0xd9, + 0xdb, 0x62, 0xe3, 0x6d, 0xe0, 0xc6, 0x1b, 0x9b, 0xcc, 0x96, 0xd1, 0x6a, 0x91, 0x1f, 0xff, 0x90, + 0xbc, 0x07, 0x52, 0x8a, 0x79, 0xc9, 0xd0, 0xbd, 0x76, 0x91, 0xb7, 0xf3, 0x35, 0x89, 0x23, 0xc3, + 0xd5, 0xe4, 0x6e, 0xeb, 0xc3, 0xfe, 0xa0, 0x6c, 0xdb, 0xca, 0x2c, 0x7a, 0xb9, 0xef, 0x19, 0x70, + 0xa9, 0x09, 0x14, 0x7b, 0x48, 0xc1, 0x52, 0xdb, 0xe8, 0xb4, 0x43, 0xda, 0x61, 0x68, 0x7f, 0x21, + 0x58, 0xff, 0x81, 0xc6, 0x1a, 0xa6, 0x77, 0x84, 0x9b, 0x5f, 0xab, 0xaf, 0x78, 0xfe, 0x70, 0x9b, + 0x3d, 0xbe, 0xed, 0xb5, 0xc1, 0xeb, 0xef, 0x00, 0x00, 0x00, 0xff, 0xff, 0xf7, 0x84, 0x9b, 0x80, + 0xbd, 0x01, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/api/serviceconfig/monitoring.pb.go b/vendor/google.golang.org/genproto/googleapis/api/serviceconfig/monitoring.pb.go new file mode 100644 index 0000000..4baad4a --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/api/serviceconfig/monitoring.pb.go @@ -0,0 +1,196 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/api/monitoring.proto + +package serviceconfig // import "google.golang.org/genproto/googleapis/api/serviceconfig" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Monitoring configuration of the service. +// +// The example below shows how to configure monitored resources and metrics +// for monitoring. In the example, a monitored resource and two metrics are +// defined. The `library.googleapis.com/book/returned_count` metric is sent +// to both producer and consumer projects, whereas the +// `library.googleapis.com/book/overdue_count` metric is only sent to the +// consumer project. +// +// monitored_resources: +// - type: library.googleapis.com/branch +// labels: +// - key: /city +// description: The city where the library branch is located in. +// - key: /name +// description: The name of the branch. +// metrics: +// - name: library.googleapis.com/book/returned_count +// metric_kind: DELTA +// value_type: INT64 +// labels: +// - key: /customer_id +// - name: library.googleapis.com/book/overdue_count +// metric_kind: GAUGE +// value_type: INT64 +// labels: +// - key: /customer_id +// monitoring: +// producer_destinations: +// - monitored_resource: library.googleapis.com/branch +// metrics: +// - library.googleapis.com/book/returned_count +// consumer_destinations: +// - monitored_resource: library.googleapis.com/branch +// metrics: +// - library.googleapis.com/book/returned_count +// - library.googleapis.com/book/overdue_count +type Monitoring struct { + // Monitoring configurations for sending metrics to the producer project. + // There can be multiple producer destinations. A monitored resouce type may + // appear in multiple monitoring destinations if different aggregations are + // needed for different sets of metrics associated with that monitored + // resource type. A monitored resource and metric pair may only be used once + // in the Monitoring configuration. + ProducerDestinations []*Monitoring_MonitoringDestination `protobuf:"bytes,1,rep,name=producer_destinations,json=producerDestinations,proto3" json:"producer_destinations,omitempty"` + // Monitoring configurations for sending metrics to the consumer project. + // There can be multiple consumer destinations. A monitored resouce type may + // appear in multiple monitoring destinations if different aggregations are + // needed for different sets of metrics associated with that monitored + // resource type. A monitored resource and metric pair may only be used once + // in the Monitoring configuration. + ConsumerDestinations []*Monitoring_MonitoringDestination `protobuf:"bytes,2,rep,name=consumer_destinations,json=consumerDestinations,proto3" json:"consumer_destinations,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Monitoring) Reset() { *m = Monitoring{} } +func (m *Monitoring) String() string { return proto.CompactTextString(m) } +func (*Monitoring) ProtoMessage() {} +func (*Monitoring) Descriptor() ([]byte, []int) { + return fileDescriptor_monitoring_bee01dc8a7135913, []int{0} +} +func (m *Monitoring) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Monitoring.Unmarshal(m, b) +} +func (m *Monitoring) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Monitoring.Marshal(b, m, deterministic) +} +func (dst *Monitoring) XXX_Merge(src proto.Message) { + xxx_messageInfo_Monitoring.Merge(dst, src) +} +func (m *Monitoring) XXX_Size() int { + return xxx_messageInfo_Monitoring.Size(m) +} +func (m *Monitoring) XXX_DiscardUnknown() { + xxx_messageInfo_Monitoring.DiscardUnknown(m) +} + +var xxx_messageInfo_Monitoring proto.InternalMessageInfo + +func (m *Monitoring) GetProducerDestinations() []*Monitoring_MonitoringDestination { + if m != nil { + return m.ProducerDestinations + } + return nil +} + +func (m *Monitoring) GetConsumerDestinations() []*Monitoring_MonitoringDestination { + if m != nil { + return m.ConsumerDestinations + } + return nil +} + +// Configuration of a specific monitoring destination (the producer project +// or the consumer project). +type Monitoring_MonitoringDestination struct { + // The monitored resource type. The type must be defined in + // [Service.monitored_resources][google.api.Service.monitored_resources] section. + MonitoredResource string `protobuf:"bytes,1,opt,name=monitored_resource,json=monitoredResource,proto3" json:"monitored_resource,omitempty"` + // Types of the metrics to report to this monitoring destination. + // Each type must be defined in [Service.metrics][google.api.Service.metrics] section. + Metrics []string `protobuf:"bytes,2,rep,name=metrics,proto3" json:"metrics,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Monitoring_MonitoringDestination) Reset() { *m = Monitoring_MonitoringDestination{} } +func (m *Monitoring_MonitoringDestination) String() string { return proto.CompactTextString(m) } +func (*Monitoring_MonitoringDestination) ProtoMessage() {} +func (*Monitoring_MonitoringDestination) Descriptor() ([]byte, []int) { + return fileDescriptor_monitoring_bee01dc8a7135913, []int{0, 0} +} +func (m *Monitoring_MonitoringDestination) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Monitoring_MonitoringDestination.Unmarshal(m, b) +} +func (m *Monitoring_MonitoringDestination) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Monitoring_MonitoringDestination.Marshal(b, m, deterministic) +} +func (dst *Monitoring_MonitoringDestination) XXX_Merge(src proto.Message) { + xxx_messageInfo_Monitoring_MonitoringDestination.Merge(dst, src) +} +func (m *Monitoring_MonitoringDestination) XXX_Size() int { + return xxx_messageInfo_Monitoring_MonitoringDestination.Size(m) +} +func (m *Monitoring_MonitoringDestination) XXX_DiscardUnknown() { + xxx_messageInfo_Monitoring_MonitoringDestination.DiscardUnknown(m) +} + +var xxx_messageInfo_Monitoring_MonitoringDestination proto.InternalMessageInfo + +func (m *Monitoring_MonitoringDestination) GetMonitoredResource() string { + if m != nil { + return m.MonitoredResource + } + return "" +} + +func (m *Monitoring_MonitoringDestination) GetMetrics() []string { + if m != nil { + return m.Metrics + } + return nil +} + +func init() { + proto.RegisterType((*Monitoring)(nil), "google.api.Monitoring") + proto.RegisterType((*Monitoring_MonitoringDestination)(nil), "google.api.Monitoring.MonitoringDestination") +} + +func init() { + proto.RegisterFile("google/api/monitoring.proto", fileDescriptor_monitoring_bee01dc8a7135913) +} + +var fileDescriptor_monitoring_bee01dc8a7135913 = []byte{ + // 263 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x9c, 0x91, 0xcf, 0x4a, 0xc4, 0x30, + 0x10, 0xc6, 0xe9, 0x2a, 0xca, 0x46, 0x50, 0x0c, 0x2e, 0x14, 0xbd, 0x2c, 0x9e, 0xf6, 0xa0, 0x29, + 0xe8, 0xd1, 0x93, 0x8b, 0x22, 0x1e, 0x84, 0xd2, 0xa3, 0x97, 0x35, 0xa6, 0x63, 0x18, 0xd8, 0x66, + 0xe2, 0x24, 0xf5, 0x81, 0x7c, 0x06, 0x1f, 0x50, 0xb6, 0x7f, 0xb6, 0x55, 0x3c, 0x79, 0xcb, 0xe4, + 0x9b, 0x6f, 0x7e, 0xc3, 0x7c, 0xe2, 0xcc, 0x12, 0xd9, 0x35, 0x64, 0xda, 0x63, 0x56, 0x91, 0xc3, + 0x48, 0x8c, 0xce, 0x2a, 0xcf, 0x14, 0x49, 0x8a, 0x56, 0x54, 0xda, 0xe3, 0xf9, 0xd7, 0x44, 0x88, + 0xa7, 0x6d, 0x83, 0xd4, 0x62, 0xe6, 0x99, 0xca, 0xda, 0x00, 0xaf, 0x4a, 0x08, 0x11, 0x9d, 0x8e, + 0x48, 0x2e, 0xa4, 0xc9, 0x7c, 0x67, 0x71, 0x70, 0x75, 0xa1, 0x06, 0xab, 0x1a, 0x6c, 0xa3, 0xe7, + 0xdd, 0x60, 0x2a, 0x4e, 0xfa, 0x51, 0xa3, 0xcf, 0xb0, 0x41, 0x18, 0x72, 0xa1, 0xae, 0x7e, 0x23, + 0x26, 0xff, 0x41, 0xf4, 0xa3, 0xc6, 0x88, 0xd3, 0x17, 0x31, 0xfb, 0xb3, 0x5d, 0x5e, 0x0a, 0xd9, + 0x5d, 0x03, 0xca, 0x15, 0x43, 0xa0, 0x9a, 0x0d, 0xa4, 0xc9, 0x3c, 0x59, 0x4c, 0x8b, 0xe3, 0xad, + 0x52, 0x74, 0x82, 0x4c, 0xc5, 0x7e, 0x05, 0x91, 0xd1, 0xb4, 0xcb, 0x4d, 0x8b, 0xbe, 0x5c, 0xbe, + 0x8b, 0x43, 0x43, 0xd5, 0x68, 0xd5, 0xe5, 0xd1, 0x40, 0xcc, 0x37, 0x57, 0xce, 0x93, 0xe7, 0xfb, + 0x4e, 0xb6, 0xb4, 0xd6, 0xce, 0x2a, 0x62, 0x9b, 0x59, 0x70, 0x4d, 0x06, 0x59, 0x2b, 0x69, 0x8f, + 0xa1, 0xc9, 0x28, 0x00, 0x7f, 0xa0, 0x01, 0x43, 0xee, 0x0d, 0xed, 0xcd, 0x8f, 0xea, 0x73, 0xb2, + 0xfb, 0x70, 0x9b, 0x3f, 0xbe, 0xee, 0x35, 0xc6, 0xeb, 0xef, 0x00, 0x00, 0x00, 0xff, 0xff, 0xb1, + 0x9a, 0xd5, 0x79, 0xdb, 0x01, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/api/serviceconfig/quota.pb.go b/vendor/google.golang.org/genproto/googleapis/api/serviceconfig/quota.pb.go new file mode 100644 index 0000000..945801d --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/api/serviceconfig/quota.pb.go @@ -0,0 +1,393 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/api/quota.proto + +package serviceconfig // import "google.golang.org/genproto/googleapis/api/serviceconfig" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Quota configuration helps to achieve fairness and budgeting in service +// usage. +// +// The metric based quota configuration works this way: +// - The service configuration defines a set of metrics. +// - For API calls, the quota.metric_rules maps methods to metrics with +// corresponding costs. +// - The quota.limits defines limits on the metrics, which will be used for +// quota checks at runtime. +// +// An example quota configuration in yaml format: +// +// quota: +// limits: +// +// - name: apiWriteQpsPerProject +// metric: library.googleapis.com/write_calls +// unit: "1/min/{project}" # rate limit for consumer projects +// values: +// STANDARD: 10000 +// +// +// # The metric rules bind all methods to the read_calls metric, +// # except for the UpdateBook and DeleteBook methods. These two methods +// # are mapped to the write_calls metric, with the UpdateBook method +// # consuming at twice rate as the DeleteBook method. +// metric_rules: +// - selector: "*" +// metric_costs: +// library.googleapis.com/read_calls: 1 +// - selector: google.example.library.v1.LibraryService.UpdateBook +// metric_costs: +// library.googleapis.com/write_calls: 2 +// - selector: google.example.library.v1.LibraryService.DeleteBook +// metric_costs: +// library.googleapis.com/write_calls: 1 +// +// Corresponding Metric definition: +// +// metrics: +// - name: library.googleapis.com/read_calls +// display_name: Read requests +// metric_kind: DELTA +// value_type: INT64 +// +// - name: library.googleapis.com/write_calls +// display_name: Write requests +// metric_kind: DELTA +// value_type: INT64 +// +// +type Quota struct { + // List of `QuotaLimit` definitions for the service. + Limits []*QuotaLimit `protobuf:"bytes,3,rep,name=limits,proto3" json:"limits,omitempty"` + // List of `MetricRule` definitions, each one mapping a selected method to one + // or more metrics. + MetricRules []*MetricRule `protobuf:"bytes,4,rep,name=metric_rules,json=metricRules,proto3" json:"metric_rules,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Quota) Reset() { *m = Quota{} } +func (m *Quota) String() string { return proto.CompactTextString(m) } +func (*Quota) ProtoMessage() {} +func (*Quota) Descriptor() ([]byte, []int) { + return fileDescriptor_quota_129273d1f869e90d, []int{0} +} +func (m *Quota) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Quota.Unmarshal(m, b) +} +func (m *Quota) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Quota.Marshal(b, m, deterministic) +} +func (dst *Quota) XXX_Merge(src proto.Message) { + xxx_messageInfo_Quota.Merge(dst, src) +} +func (m *Quota) XXX_Size() int { + return xxx_messageInfo_Quota.Size(m) +} +func (m *Quota) XXX_DiscardUnknown() { + xxx_messageInfo_Quota.DiscardUnknown(m) +} + +var xxx_messageInfo_Quota proto.InternalMessageInfo + +func (m *Quota) GetLimits() []*QuotaLimit { + if m != nil { + return m.Limits + } + return nil +} + +func (m *Quota) GetMetricRules() []*MetricRule { + if m != nil { + return m.MetricRules + } + return nil +} + +// Bind API methods to metrics. Binding a method to a metric causes that +// metric's configured quota behaviors to apply to the method call. +type MetricRule struct { + // Selects the methods to which this rule applies. + // + // Refer to [selector][google.api.DocumentationRule.selector] for syntax details. + Selector string `protobuf:"bytes,1,opt,name=selector,proto3" json:"selector,omitempty"` + // Metrics to update when the selected methods are called, and the associated + // cost applied to each metric. + // + // The key of the map is the metric name, and the values are the amount + // increased for the metric against which the quota limits are defined. + // The value must not be negative. + MetricCosts map[string]int64 `protobuf:"bytes,2,rep,name=metric_costs,json=metricCosts,proto3" json:"metric_costs,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"varint,2,opt,name=value,proto3"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *MetricRule) Reset() { *m = MetricRule{} } +func (m *MetricRule) String() string { return proto.CompactTextString(m) } +func (*MetricRule) ProtoMessage() {} +func (*MetricRule) Descriptor() ([]byte, []int) { + return fileDescriptor_quota_129273d1f869e90d, []int{1} +} +func (m *MetricRule) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_MetricRule.Unmarshal(m, b) +} +func (m *MetricRule) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_MetricRule.Marshal(b, m, deterministic) +} +func (dst *MetricRule) XXX_Merge(src proto.Message) { + xxx_messageInfo_MetricRule.Merge(dst, src) +} +func (m *MetricRule) XXX_Size() int { + return xxx_messageInfo_MetricRule.Size(m) +} +func (m *MetricRule) XXX_DiscardUnknown() { + xxx_messageInfo_MetricRule.DiscardUnknown(m) +} + +var xxx_messageInfo_MetricRule proto.InternalMessageInfo + +func (m *MetricRule) GetSelector() string { + if m != nil { + return m.Selector + } + return "" +} + +func (m *MetricRule) GetMetricCosts() map[string]int64 { + if m != nil { + return m.MetricCosts + } + return nil +} + +// `QuotaLimit` defines a specific limit that applies over a specified duration +// for a limit type. There can be at most one limit for a duration and limit +// type combination defined within a `QuotaGroup`. +type QuotaLimit struct { + // Name of the quota limit. + // + // The name must be provided, and it must be unique within the service. The + // name can only include alphanumeric characters as well as '-'. + // + // The maximum length of the limit name is 64 characters. + Name string `protobuf:"bytes,6,opt,name=name,proto3" json:"name,omitempty"` + // Optional. User-visible, extended description for this quota limit. + // Should be used only when more context is needed to understand this limit + // than provided by the limit's display name (see: `display_name`). + Description string `protobuf:"bytes,2,opt,name=description,proto3" json:"description,omitempty"` + // Default number of tokens that can be consumed during the specified + // duration. This is the number of tokens assigned when a client + // application developer activates the service for his/her project. + // + // Specifying a value of 0 will block all requests. This can be used if you + // are provisioning quota to selected consumers and blocking others. + // Similarly, a value of -1 will indicate an unlimited quota. No other + // negative values are allowed. + // + // Used by group-based quotas only. + DefaultLimit int64 `protobuf:"varint,3,opt,name=default_limit,json=defaultLimit,proto3" json:"default_limit,omitempty"` + // Maximum number of tokens that can be consumed during the specified + // duration. Client application developers can override the default limit up + // to this maximum. If specified, this value cannot be set to a value less + // than the default limit. If not specified, it is set to the default limit. + // + // To allow clients to apply overrides with no upper bound, set this to -1, + // indicating unlimited maximum quota. + // + // Used by group-based quotas only. + MaxLimit int64 `protobuf:"varint,4,opt,name=max_limit,json=maxLimit,proto3" json:"max_limit,omitempty"` + // Free tier value displayed in the Developers Console for this limit. + // The free tier is the number of tokens that will be subtracted from the + // billed amount when billing is enabled. + // This field can only be set on a limit with duration "1d", in a billable + // group; it is invalid on any other limit. If this field is not set, it + // defaults to 0, indicating that there is no free tier for this service. + // + // Used by group-based quotas only. + FreeTier int64 `protobuf:"varint,7,opt,name=free_tier,json=freeTier,proto3" json:"free_tier,omitempty"` + // Duration of this limit in textual notation. Example: "100s", "24h", "1d". + // For duration longer than a day, only multiple of days is supported. We + // support only "100s" and "1d" for now. Additional support will be added in + // the future. "0" indicates indefinite duration. + // + // Used by group-based quotas only. + Duration string `protobuf:"bytes,5,opt,name=duration,proto3" json:"duration,omitempty"` + // The name of the metric this quota limit applies to. The quota limits with + // the same metric will be checked together during runtime. The metric must be + // defined within the service config. + Metric string `protobuf:"bytes,8,opt,name=metric,proto3" json:"metric,omitempty"` + // Specify the unit of the quota limit. It uses the same syntax as + // [Metric.unit][]. The supported unit kinds are determined by the quota + // backend system. + // + // Here are some examples: + // * "1/min/{project}" for quota per minute per project. + // + // Note: the order of unit components is insignificant. + // The "1" at the beginning is required to follow the metric unit syntax. + Unit string `protobuf:"bytes,9,opt,name=unit,proto3" json:"unit,omitempty"` + // Tiered limit values. You must specify this as a key:value pair, with an + // integer value that is the maximum number of requests allowed for the + // specified unit. Currently only STANDARD is supported. + Values map[string]int64 `protobuf:"bytes,10,rep,name=values,proto3" json:"values,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"varint,2,opt,name=value,proto3"` + // User-visible display name for this limit. + // Optional. If not set, the UI will provide a default display name based on + // the quota configuration. This field can be used to override the default + // display name generated from the configuration. + DisplayName string `protobuf:"bytes,12,opt,name=display_name,json=displayName,proto3" json:"display_name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *QuotaLimit) Reset() { *m = QuotaLimit{} } +func (m *QuotaLimit) String() string { return proto.CompactTextString(m) } +func (*QuotaLimit) ProtoMessage() {} +func (*QuotaLimit) Descriptor() ([]byte, []int) { + return fileDescriptor_quota_129273d1f869e90d, []int{2} +} +func (m *QuotaLimit) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_QuotaLimit.Unmarshal(m, b) +} +func (m *QuotaLimit) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_QuotaLimit.Marshal(b, m, deterministic) +} +func (dst *QuotaLimit) XXX_Merge(src proto.Message) { + xxx_messageInfo_QuotaLimit.Merge(dst, src) +} +func (m *QuotaLimit) XXX_Size() int { + return xxx_messageInfo_QuotaLimit.Size(m) +} +func (m *QuotaLimit) XXX_DiscardUnknown() { + xxx_messageInfo_QuotaLimit.DiscardUnknown(m) +} + +var xxx_messageInfo_QuotaLimit proto.InternalMessageInfo + +func (m *QuotaLimit) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *QuotaLimit) GetDescription() string { + if m != nil { + return m.Description + } + return "" +} + +func (m *QuotaLimit) GetDefaultLimit() int64 { + if m != nil { + return m.DefaultLimit + } + return 0 +} + +func (m *QuotaLimit) GetMaxLimit() int64 { + if m != nil { + return m.MaxLimit + } + return 0 +} + +func (m *QuotaLimit) GetFreeTier() int64 { + if m != nil { + return m.FreeTier + } + return 0 +} + +func (m *QuotaLimit) GetDuration() string { + if m != nil { + return m.Duration + } + return "" +} + +func (m *QuotaLimit) GetMetric() string { + if m != nil { + return m.Metric + } + return "" +} + +func (m *QuotaLimit) GetUnit() string { + if m != nil { + return m.Unit + } + return "" +} + +func (m *QuotaLimit) GetValues() map[string]int64 { + if m != nil { + return m.Values + } + return nil +} + +func (m *QuotaLimit) GetDisplayName() string { + if m != nil { + return m.DisplayName + } + return "" +} + +func init() { + proto.RegisterType((*Quota)(nil), "google.api.Quota") + proto.RegisterType((*MetricRule)(nil), "google.api.MetricRule") + proto.RegisterMapType((map[string]int64)(nil), "google.api.MetricRule.MetricCostsEntry") + proto.RegisterType((*QuotaLimit)(nil), "google.api.QuotaLimit") + proto.RegisterMapType((map[string]int64)(nil), "google.api.QuotaLimit.ValuesEntry") +} + +func init() { proto.RegisterFile("google/api/quota.proto", fileDescriptor_quota_129273d1f869e90d) } + +var fileDescriptor_quota_129273d1f869e90d = []byte{ + // 453 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x94, 0x53, 0xc1, 0x8e, 0xd3, 0x30, + 0x10, 0x55, 0x9a, 0x6e, 0x68, 0xa7, 0x05, 0xad, 0x2c, 0x54, 0x59, 0xe5, 0x52, 0xca, 0x81, 0x9e, + 0x52, 0x09, 0x2e, 0xec, 0x22, 0x21, 0xb1, 0x68, 0x85, 0x40, 0x80, 0x4a, 0x84, 0x38, 0x70, 0xa9, + 0x4c, 0x3a, 0x8d, 0x2c, 0x9c, 0x38, 0xd8, 0xce, 0x6a, 0x7b, 0xe6, 0x4f, 0xf8, 0x06, 0x3e, 0x10, + 0x79, 0xec, 0x6d, 0x0b, 0xec, 0x85, 0xdb, 0xcc, 0x7b, 0x6f, 0xfc, 0xe2, 0xe7, 0x09, 0x4c, 0x2a, + 0xad, 0x2b, 0x85, 0x4b, 0xd1, 0xca, 0xe5, 0xf7, 0x4e, 0x3b, 0x91, 0xb7, 0x46, 0x3b, 0xcd, 0x20, + 0xe0, 0xb9, 0x68, 0xe5, 0xdc, 0xc0, 0xc9, 0x47, 0x4f, 0xb1, 0x1c, 0x32, 0x25, 0x6b, 0xe9, 0x2c, + 0x4f, 0x67, 0xe9, 0x62, 0xf4, 0x64, 0x92, 0x1f, 0x54, 0x39, 0x49, 0xde, 0x79, 0xba, 0x88, 0x2a, + 0x76, 0x06, 0xe3, 0x1a, 0x9d, 0x91, 0xe5, 0xda, 0x74, 0x0a, 0x2d, 0xef, 0xff, 0x3b, 0xf5, 0x9e, + 0xf8, 0xa2, 0x53, 0x58, 0x8c, 0xea, 0x7d, 0x6d, 0xe7, 0xbf, 0x12, 0x80, 0x03, 0xc7, 0xa6, 0x30, + 0xb0, 0xa8, 0xb0, 0x74, 0xda, 0xf0, 0x64, 0x96, 0x2c, 0x86, 0xc5, 0xbe, 0x67, 0x6f, 0xf7, 0x2e, + 0xa5, 0xb6, 0xce, 0xf2, 0x1e, 0xb9, 0x3c, 0xbe, 0xdd, 0x25, 0x96, 0xaf, 0xbc, 0xf2, 0xb2, 0x71, + 0x66, 0x77, 0x63, 0x4b, 0xc8, 0xf4, 0x05, 0x9c, 0xfe, 0x2d, 0x60, 0xa7, 0x90, 0x7e, 0xc3, 0x5d, + 0xb4, 0xf5, 0x25, 0xbb, 0x0f, 0x27, 0x57, 0x42, 0x75, 0xc8, 0x7b, 0xb3, 0x64, 0x91, 0x16, 0xa1, + 0x39, 0xef, 0x3d, 0x4b, 0xe6, 0x3f, 0x52, 0x80, 0x43, 0x10, 0x8c, 0x41, 0xbf, 0x11, 0x35, 0xf2, + 0x8c, 0x66, 0xa9, 0x66, 0x33, 0x18, 0x6d, 0xd0, 0x96, 0x46, 0xb6, 0x4e, 0xea, 0x86, 0x8e, 0x18, + 0x16, 0xc7, 0x10, 0x7b, 0x04, 0x77, 0x37, 0xb8, 0x15, 0x9d, 0x72, 0x6b, 0x0a, 0x92, 0xa7, 0x64, + 0x33, 0x8e, 0x60, 0x38, 0xfa, 0x01, 0x0c, 0x6b, 0x71, 0x1d, 0x05, 0x7d, 0x12, 0x0c, 0x6a, 0x71, + 0xbd, 0x27, 0xb7, 0x06, 0x71, 0xed, 0x24, 0x1a, 0x7e, 0x27, 0x90, 0x1e, 0xf8, 0x24, 0xd1, 0xf8, + 0x2c, 0x37, 0x9d, 0x11, 0xe4, 0x7e, 0x12, 0xb2, 0xbc, 0xe9, 0xd9, 0x04, 0xb2, 0x10, 0x07, 0x1f, + 0x10, 0x13, 0x3b, 0x7f, 0x91, 0xae, 0x91, 0x8e, 0x0f, 0xc3, 0x45, 0x7c, 0xcd, 0xce, 0x21, 0xa3, + 0x8b, 0x5b, 0x0e, 0x94, 0xf8, 0xfc, 0xf6, 0x6d, 0xc8, 0x3f, 0x93, 0x28, 0x84, 0x1d, 0x27, 0xd8, + 0x43, 0x18, 0x6f, 0xa4, 0x6d, 0x95, 0xd8, 0xad, 0x29, 0xa0, 0x71, 0x4c, 0x21, 0x60, 0x1f, 0x44, + 0x8d, 0xd3, 0x33, 0x18, 0x1d, 0x4d, 0xfe, 0xcf, 0x2b, 0x5c, 0x28, 0xb8, 0x57, 0xea, 0xfa, 0xe8, + 0x73, 0x2e, 0xc2, 0xa3, 0xac, 0xfc, 0x6a, 0xaf, 0x92, 0x2f, 0x97, 0x91, 0xa9, 0xb4, 0x12, 0x4d, + 0x95, 0x6b, 0x53, 0x2d, 0x2b, 0x6c, 0x68, 0xf1, 0x97, 0x81, 0x12, 0xad, 0xb4, 0xf4, 0x4f, 0x58, + 0x34, 0x57, 0xb2, 0xc4, 0x52, 0x37, 0x5b, 0x59, 0x3d, 0xff, 0xa3, 0xfb, 0xd9, 0xeb, 0xbf, 0x7e, + 0xb9, 0x7a, 0xf3, 0x35, 0xa3, 0xc1, 0xa7, 0xbf, 0x03, 0x00, 0x00, 0xff, 0xff, 0x00, 0x7a, 0xae, + 0xf6, 0x4b, 0x03, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/api/serviceconfig/service.pb.go b/vendor/google.golang.org/genproto/googleapis/api/serviceconfig/service.pb.go new file mode 100644 index 0000000..66e49ce --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/api/serviceconfig/service.pb.go @@ -0,0 +1,405 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/api/service.proto + +package serviceconfig // import "google.golang.org/genproto/googleapis/api/serviceconfig" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "github.com/golang/protobuf/ptypes/any" +import wrappers "github.com/golang/protobuf/ptypes/wrappers" +import api1 "google.golang.org/genproto/googleapis/api" +import annotations "google.golang.org/genproto/googleapis/api/annotations" +import _ "google.golang.org/genproto/googleapis/api/label" +import metric "google.golang.org/genproto/googleapis/api/metric" +import monitoredres "google.golang.org/genproto/googleapis/api/monitoredres" +import api "google.golang.org/genproto/protobuf/api" +import ptype "google.golang.org/genproto/protobuf/ptype" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// `Service` is the root object of Google service configuration schema. It +// describes basic information about a service, such as the name and the +// title, and delegates other aspects to sub-sections. Each sub-section is +// either a proto message or a repeated proto message that configures a +// specific aspect, such as auth. See each proto message definition for details. +// +// Example: +// +// type: google.api.Service +// config_version: 3 +// name: calendar.googleapis.com +// title: Google Calendar API +// apis: +// - name: google.calendar.v3.Calendar +// authentication: +// providers: +// - id: google_calendar_auth +// jwks_uri: https://www.googleapis.com/oauth2/v1/certs +// issuer: https://securetoken.google.com +// rules: +// - selector: "*" +// requirements: +// provider_id: google_calendar_auth +type Service struct { + // The semantic version of the service configuration. The config version + // affects the interpretation of the service configuration. For example, + // certain features are enabled by default for certain config versions. + // The latest config version is `3`. + ConfigVersion *wrappers.UInt32Value `protobuf:"bytes,20,opt,name=config_version,json=configVersion,proto3" json:"config_version,omitempty"` + // The service name, which is a DNS-like logical identifier for the + // service, such as `calendar.googleapis.com`. The service name + // typically goes through DNS verification to make sure the owner + // of the service also owns the DNS name. + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // A unique ID for a specific instance of this message, typically assigned + // by the client for tracking purpose. If empty, the server may choose to + // generate one instead. Must be no longer than 60 characters. + Id string `protobuf:"bytes,33,opt,name=id,proto3" json:"id,omitempty"` + // The product title for this service. + Title string `protobuf:"bytes,2,opt,name=title,proto3" json:"title,omitempty"` + // The Google project that owns this service. + ProducerProjectId string `protobuf:"bytes,22,opt,name=producer_project_id,json=producerProjectId,proto3" json:"producer_project_id,omitempty"` + // A list of API interfaces exported by this service. Only the `name` field + // of the [google.protobuf.Api][google.protobuf.Api] needs to be provided by the configuration + // author, as the remaining fields will be derived from the IDL during the + // normalization process. It is an error to specify an API interface here + // which cannot be resolved against the associated IDL files. + Apis []*api.Api `protobuf:"bytes,3,rep,name=apis,proto3" json:"apis,omitempty"` + // A list of all proto message types included in this API service. + // Types referenced directly or indirectly by the `apis` are + // automatically included. Messages which are not referenced but + // shall be included, such as types used by the `google.protobuf.Any` type, + // should be listed here by name. Example: + // + // types: + // - name: google.protobuf.Int32 + Types []*ptype.Type `protobuf:"bytes,4,rep,name=types,proto3" json:"types,omitempty"` + // A list of all enum types included in this API service. Enums + // referenced directly or indirectly by the `apis` are automatically + // included. Enums which are not referenced but shall be included + // should be listed here by name. Example: + // + // enums: + // - name: google.someapi.v1.SomeEnum + Enums []*ptype.Enum `protobuf:"bytes,5,rep,name=enums,proto3" json:"enums,omitempty"` + // Additional API documentation. + Documentation *Documentation `protobuf:"bytes,6,opt,name=documentation,proto3" json:"documentation,omitempty"` + // API backend configuration. + Backend *Backend `protobuf:"bytes,8,opt,name=backend,proto3" json:"backend,omitempty"` + // HTTP configuration. + Http *annotations.Http `protobuf:"bytes,9,opt,name=http,proto3" json:"http,omitempty"` + // Quota configuration. + Quota *Quota `protobuf:"bytes,10,opt,name=quota,proto3" json:"quota,omitempty"` + // Auth configuration. + Authentication *Authentication `protobuf:"bytes,11,opt,name=authentication,proto3" json:"authentication,omitempty"` + // Context configuration. + Context *Context `protobuf:"bytes,12,opt,name=context,proto3" json:"context,omitempty"` + // Configuration controlling usage of this service. + Usage *Usage `protobuf:"bytes,15,opt,name=usage,proto3" json:"usage,omitempty"` + // Configuration for network endpoints. If this is empty, then an endpoint + // with the same name as the service is automatically generated to service all + // defined APIs. + Endpoints []*Endpoint `protobuf:"bytes,18,rep,name=endpoints,proto3" json:"endpoints,omitempty"` + // Configuration for the service control plane. + Control *Control `protobuf:"bytes,21,opt,name=control,proto3" json:"control,omitempty"` + // Defines the logs used by this service. + Logs []*LogDescriptor `protobuf:"bytes,23,rep,name=logs,proto3" json:"logs,omitempty"` + // Defines the metrics used by this service. + Metrics []*metric.MetricDescriptor `protobuf:"bytes,24,rep,name=metrics,proto3" json:"metrics,omitempty"` + // Defines the monitored resources used by this service. This is required + // by the [Service.monitoring][google.api.Service.monitoring] and [Service.logging][google.api.Service.logging] configurations. + MonitoredResources []*monitoredres.MonitoredResourceDescriptor `protobuf:"bytes,25,rep,name=monitored_resources,json=monitoredResources,proto3" json:"monitored_resources,omitempty"` + // Billing configuration. + Billing *Billing `protobuf:"bytes,26,opt,name=billing,proto3" json:"billing,omitempty"` + // Logging configuration. + Logging *Logging `protobuf:"bytes,27,opt,name=logging,proto3" json:"logging,omitempty"` + // Monitoring configuration. + Monitoring *Monitoring `protobuf:"bytes,28,opt,name=monitoring,proto3" json:"monitoring,omitempty"` + // System parameter configuration. + SystemParameters *SystemParameters `protobuf:"bytes,29,opt,name=system_parameters,json=systemParameters,proto3" json:"system_parameters,omitempty"` + // Output only. The source information for this configuration if available. + SourceInfo *SourceInfo `protobuf:"bytes,37,opt,name=source_info,json=sourceInfo,proto3" json:"source_info,omitempty"` + // Experimental configuration. + Experimental *api1.Experimental `protobuf:"bytes,101,opt,name=experimental,proto3" json:"experimental,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Service) Reset() { *m = Service{} } +func (m *Service) String() string { return proto.CompactTextString(m) } +func (*Service) ProtoMessage() {} +func (*Service) Descriptor() ([]byte, []int) { + return fileDescriptor_service_e2baf83097b3e099, []int{0} +} +func (m *Service) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Service.Unmarshal(m, b) +} +func (m *Service) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Service.Marshal(b, m, deterministic) +} +func (dst *Service) XXX_Merge(src proto.Message) { + xxx_messageInfo_Service.Merge(dst, src) +} +func (m *Service) XXX_Size() int { + return xxx_messageInfo_Service.Size(m) +} +func (m *Service) XXX_DiscardUnknown() { + xxx_messageInfo_Service.DiscardUnknown(m) +} + +var xxx_messageInfo_Service proto.InternalMessageInfo + +func (m *Service) GetConfigVersion() *wrappers.UInt32Value { + if m != nil { + return m.ConfigVersion + } + return nil +} + +func (m *Service) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *Service) GetId() string { + if m != nil { + return m.Id + } + return "" +} + +func (m *Service) GetTitle() string { + if m != nil { + return m.Title + } + return "" +} + +func (m *Service) GetProducerProjectId() string { + if m != nil { + return m.ProducerProjectId + } + return "" +} + +func (m *Service) GetApis() []*api.Api { + if m != nil { + return m.Apis + } + return nil +} + +func (m *Service) GetTypes() []*ptype.Type { + if m != nil { + return m.Types + } + return nil +} + +func (m *Service) GetEnums() []*ptype.Enum { + if m != nil { + return m.Enums + } + return nil +} + +func (m *Service) GetDocumentation() *Documentation { + if m != nil { + return m.Documentation + } + return nil +} + +func (m *Service) GetBackend() *Backend { + if m != nil { + return m.Backend + } + return nil +} + +func (m *Service) GetHttp() *annotations.Http { + if m != nil { + return m.Http + } + return nil +} + +func (m *Service) GetQuota() *Quota { + if m != nil { + return m.Quota + } + return nil +} + +func (m *Service) GetAuthentication() *Authentication { + if m != nil { + return m.Authentication + } + return nil +} + +func (m *Service) GetContext() *Context { + if m != nil { + return m.Context + } + return nil +} + +func (m *Service) GetUsage() *Usage { + if m != nil { + return m.Usage + } + return nil +} + +func (m *Service) GetEndpoints() []*Endpoint { + if m != nil { + return m.Endpoints + } + return nil +} + +func (m *Service) GetControl() *Control { + if m != nil { + return m.Control + } + return nil +} + +func (m *Service) GetLogs() []*LogDescriptor { + if m != nil { + return m.Logs + } + return nil +} + +func (m *Service) GetMetrics() []*metric.MetricDescriptor { + if m != nil { + return m.Metrics + } + return nil +} + +func (m *Service) GetMonitoredResources() []*monitoredres.MonitoredResourceDescriptor { + if m != nil { + return m.MonitoredResources + } + return nil +} + +func (m *Service) GetBilling() *Billing { + if m != nil { + return m.Billing + } + return nil +} + +func (m *Service) GetLogging() *Logging { + if m != nil { + return m.Logging + } + return nil +} + +func (m *Service) GetMonitoring() *Monitoring { + if m != nil { + return m.Monitoring + } + return nil +} + +func (m *Service) GetSystemParameters() *SystemParameters { + if m != nil { + return m.SystemParameters + } + return nil +} + +func (m *Service) GetSourceInfo() *SourceInfo { + if m != nil { + return m.SourceInfo + } + return nil +} + +func (m *Service) GetExperimental() *api1.Experimental { + if m != nil { + return m.Experimental + } + return nil +} + +func init() { + proto.RegisterType((*Service)(nil), "google.api.Service") +} + +func init() { proto.RegisterFile("google/api/service.proto", fileDescriptor_service_e2baf83097b3e099) } + +var fileDescriptor_service_e2baf83097b3e099 = []byte{ + // 825 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x74, 0x96, 0xdf, 0x6e, 0xdb, 0x36, + 0x14, 0x87, 0x61, 0xd7, 0x6e, 0x16, 0x3a, 0xcd, 0x1a, 0xc6, 0x49, 0x19, 0xd7, 0x1b, 0xd2, 0xfd, + 0x41, 0x8d, 0x0d, 0x95, 0x01, 0x17, 0xe8, 0x2e, 0x36, 0x60, 0x88, 0xdb, 0x60, 0x33, 0xd0, 0x01, + 0x1e, 0xb3, 0x16, 0xc3, 0x6e, 0x0c, 0x5a, 0xa2, 0x55, 0x6e, 0x12, 0xc9, 0x91, 0x54, 0x17, 0x3f, + 0xc7, 0xde, 0x60, 0x4f, 0x3a, 0x88, 0xa4, 0x62, 0xca, 0x52, 0xee, 0x22, 0x7e, 0xdf, 0x39, 0x38, + 0x14, 0xa9, 0x9f, 0x03, 0x50, 0x2a, 0x44, 0x9a, 0xd1, 0x29, 0x91, 0x6c, 0xaa, 0xa9, 0xfa, 0xc8, + 0x62, 0x1a, 0x49, 0x25, 0x8c, 0x80, 0xc0, 0x91, 0x88, 0x48, 0x36, 0x1a, 0x07, 0x16, 0xe1, 0x5c, + 0x18, 0x62, 0x98, 0xe0, 0xda, 0x99, 0xa3, 0xb3, 0x90, 0x16, 0xe6, 0x83, 0x5f, 0x0e, 0x5b, 0xaf, + 0x49, 0xfc, 0x17, 0xe5, 0x49, 0x1b, 0x61, 0x59, 0xc6, 0x78, 0xda, 0x42, 0x62, 0xc1, 0x0d, 0xbd, + 0x35, 0xf7, 0x10, 0x25, 0x32, 0x4f, 0x3e, 0x0f, 0x48, 0x22, 0xe2, 0x22, 0xa7, 0xdc, 0xcd, 0xe7, + 0xf9, 0x45, 0xc0, 0x29, 0x4f, 0xa4, 0x60, 0xbc, 0x6a, 0xfa, 0x4d, 0x88, 0x6e, 0x25, 0x55, 0xcc, + 0x16, 0x67, 0xb5, 0x87, 0x96, 0x5d, 0x7e, 0x30, 0x46, 0xfa, 0xe5, 0xf3, 0x60, 0x39, 0x23, 0x6b, + 0x5a, 0xe9, 0xc3, 0x70, 0x5d, 0xb4, 0xed, 0x2f, 0x13, 0x69, 0xba, 0xdb, 0xf9, 0x93, 0x80, 0xe4, + 0xd4, 0x28, 0x16, 0x7b, 0xf0, 0x65, 0x08, 0x04, 0x67, 0x46, 0x28, 0x9a, 0xac, 0x14, 0xd5, 0xa2, + 0x50, 0xd5, 0x61, 0x8d, 0x9e, 0x36, 0xa5, 0x5d, 0xeb, 0x70, 0xc4, 0xbf, 0x0b, 0x61, 0x88, 0x5f, + 0x0f, 0x4f, 0xd5, 0x75, 0x5b, 0x31, 0xbe, 0x11, 0x9e, 0x3e, 0x0b, 0xe9, 0x56, 0x1b, 0x9a, 0xaf, + 0x24, 0x51, 0x24, 0xa7, 0x86, 0xaa, 0x96, 0xc6, 0x85, 0x26, 0x29, 0xdd, 0x7b, 0xe3, 0xf6, 0x69, + 0x5d, 0x6c, 0xa6, 0x84, 0x6f, 0xef, 0x45, 0x92, 0x79, 0x34, 0xda, 0x47, 0x66, 0x2b, 0xe9, 0xde, + 0x19, 0xdf, 0xb1, 0x7f, 0x14, 0x91, 0x92, 0x2a, 0x7f, 0x05, 0xbf, 0xf8, 0x17, 0x80, 0x83, 0x1b, + 0x77, 0x7d, 0xe1, 0x6b, 0x70, 0x1c, 0x0b, 0xbe, 0x61, 0xe9, 0xea, 0x23, 0x55, 0x9a, 0x09, 0x8e, + 0x86, 0x97, 0x9d, 0xc9, 0x60, 0x36, 0x8e, 0xfc, 0x8d, 0xae, 0x9a, 0x44, 0xef, 0x16, 0xdc, 0xbc, + 0x9c, 0xbd, 0x27, 0x59, 0x41, 0xf1, 0x23, 0x57, 0xf3, 0xde, 0x95, 0x40, 0x08, 0x7a, 0x9c, 0xe4, + 0x14, 0x75, 0x2e, 0x3b, 0x93, 0x43, 0x6c, 0xff, 0x86, 0xc7, 0xa0, 0xcb, 0x12, 0xf4, 0xcc, 0xae, + 0x74, 0x59, 0x02, 0x87, 0xa0, 0x6f, 0x98, 0xc9, 0x28, 0xea, 0xda, 0x25, 0xf7, 0x00, 0x23, 0x70, + 0x2a, 0x95, 0x48, 0x8a, 0x98, 0xaa, 0x95, 0x54, 0xe2, 0x4f, 0x1a, 0x9b, 0x15, 0x4b, 0xd0, 0xb9, + 0x75, 0x4e, 0x2a, 0xb4, 0x74, 0x64, 0x91, 0xc0, 0x09, 0xe8, 0x11, 0xc9, 0x34, 0x7a, 0x70, 0xf9, + 0x60, 0x32, 0x98, 0x0d, 0x1b, 0x43, 0x5e, 0x49, 0x86, 0xad, 0x01, 0xbf, 0x05, 0xfd, 0xf2, 0x95, + 0x68, 0xd4, 0xb3, 0xea, 0x59, 0x43, 0xfd, 0x6d, 0x2b, 0x29, 0x76, 0x4e, 0x29, 0x53, 0x5e, 0xe4, + 0x1a, 0xf5, 0xef, 0x91, 0xaf, 0x79, 0x91, 0x63, 0xe7, 0xc0, 0x1f, 0xc1, 0xa3, 0xda, 0x97, 0x83, + 0x1e, 0xda, 0x37, 0x76, 0x11, 0xed, 0x32, 0x20, 0x7a, 0x13, 0x0a, 0xb8, 0xee, 0xc3, 0x17, 0xe0, + 0xc0, 0x7f, 0xe2, 0xe8, 0x13, 0x5b, 0x7a, 0x1a, 0x96, 0xce, 0x1d, 0xc2, 0x95, 0x03, 0xbf, 0x02, + 0xbd, 0xf2, 0x13, 0x42, 0x87, 0xd6, 0x7d, 0x1c, 0xba, 0x3f, 0x1b, 0x23, 0xb1, 0xa5, 0xf0, 0x39, + 0xe8, 0xdb, 0xeb, 0x8a, 0x80, 0xd5, 0x4e, 0x42, 0xed, 0xd7, 0x12, 0x60, 0xc7, 0xe1, 0x1c, 0x1c, + 0x97, 0xb9, 0x43, 0xb9, 0x61, 0xb1, 0x9b, 0x7f, 0x60, 0x2b, 0x46, 0x61, 0xc5, 0x55, 0xcd, 0xc0, + 0x7b, 0x15, 0xe5, 0x0e, 0x7c, 0xe0, 0xa0, 0xa3, 0xe6, 0x0e, 0x5e, 0x3b, 0x84, 0x2b, 0xa7, 0x9c, + 0xcd, 0xde, 0x78, 0xf4, 0x69, 0x73, 0xb6, 0x77, 0x25, 0xc0, 0x8e, 0xc3, 0x19, 0x38, 0xac, 0x42, + 0x47, 0x23, 0x58, 0x3f, 0xe3, 0x52, 0xbe, 0xf6, 0x10, 0xef, 0xb4, 0x6a, 0x16, 0x25, 0x32, 0x74, + 0xd6, 0x3e, 0x8b, 0x12, 0x19, 0xae, 0x1c, 0xf8, 0x02, 0xf4, 0x32, 0x91, 0x6a, 0xf4, 0xc4, 0x76, + 0xaf, 0x1d, 0xda, 0x5b, 0x91, 0xbe, 0xa1, 0x3a, 0x56, 0x4c, 0x1a, 0xa1, 0xb0, 0xd5, 0xe0, 0x2b, + 0x70, 0xe0, 0x02, 0x46, 0x23, 0x64, 0x2b, 0xc6, 0x61, 0xc5, 0x2f, 0x16, 0x05, 0x45, 0x95, 0x0c, + 0x7f, 0x07, 0xa7, 0xcd, 0xfc, 0xd1, 0xe8, 0xc2, 0xf6, 0x78, 0x5e, 0xeb, 0x51, 0x69, 0xd8, 0x5b, + 0x41, 0x3b, 0x98, 0xef, 0x43, 0xbb, 0x5f, 0xff, 0x33, 0x80, 0x46, 0x2d, 0xb7, 0xc7, 0x21, 0x5c, + 0x39, 0xa5, 0xee, 0xb3, 0x13, 0x3d, 0x6d, 0xea, 0x6f, 0x1d, 0xc2, 0x95, 0x03, 0x5f, 0x01, 0xb0, + 0x8b, 0x44, 0x34, 0xb6, 0x15, 0xe7, 0x2d, 0xe3, 0x96, 0x45, 0x81, 0x09, 0x17, 0xe0, 0x64, 0x3f, + 0xf7, 0x34, 0xfa, 0xac, 0x1e, 0x25, 0x65, 0xf9, 0x8d, 0x95, 0x96, 0x77, 0x0e, 0x7e, 0xac, 0xf7, + 0x56, 0xe0, 0x77, 0x60, 0x10, 0x04, 0x2c, 0xfa, 0xba, 0x39, 0xc3, 0x8d, 0xc5, 0x0b, 0xbe, 0x11, + 0x18, 0xe8, 0xbb, 0xbf, 0xe1, 0x0f, 0xe0, 0x28, 0xfc, 0x29, 0x42, 0xd4, 0x56, 0xa2, 0xda, 0x05, + 0x0a, 0x38, 0xae, 0xd9, 0x73, 0x5e, 0x26, 0x61, 0x1e, 0xc8, 0xf3, 0x23, 0x1f, 0x92, 0xcb, 0x32, + 0x05, 0x96, 0x9d, 0x3f, 0xae, 0x3d, 0x4b, 0x45, 0x46, 0x78, 0x1a, 0x09, 0x95, 0x4e, 0x53, 0xca, + 0x6d, 0x46, 0x4c, 0x1d, 0x2a, 0x93, 0x27, 0xfc, 0xef, 0xc0, 0xc5, 0xe4, 0xf7, 0xb5, 0xa7, 0xff, + 0xba, 0xbd, 0x9f, 0xae, 0x96, 0x8b, 0xf5, 0x43, 0x5b, 0xf8, 0xf2, 0xff, 0x00, 0x00, 0x00, 0xff, + 0xff, 0xfe, 0x6c, 0x4b, 0xf7, 0x55, 0x08, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/api/serviceconfig/source_info.pb.go b/vendor/google.golang.org/genproto/googleapis/api/serviceconfig/source_info.pb.go new file mode 100644 index 0000000..0b40ddc --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/api/serviceconfig/source_info.pb.go @@ -0,0 +1,85 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/api/source_info.proto + +package serviceconfig // import "google.golang.org/genproto/googleapis/api/serviceconfig" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import any "github.com/golang/protobuf/ptypes/any" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Source information used to create a Service Config +type SourceInfo struct { + // All files used during config generation. + SourceFiles []*any.Any `protobuf:"bytes,1,rep,name=source_files,json=sourceFiles,proto3" json:"source_files,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *SourceInfo) Reset() { *m = SourceInfo{} } +func (m *SourceInfo) String() string { return proto.CompactTextString(m) } +func (*SourceInfo) ProtoMessage() {} +func (*SourceInfo) Descriptor() ([]byte, []int) { + return fileDescriptor_source_info_17996d8c91d1c33a, []int{0} +} +func (m *SourceInfo) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_SourceInfo.Unmarshal(m, b) +} +func (m *SourceInfo) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_SourceInfo.Marshal(b, m, deterministic) +} +func (dst *SourceInfo) XXX_Merge(src proto.Message) { + xxx_messageInfo_SourceInfo.Merge(dst, src) +} +func (m *SourceInfo) XXX_Size() int { + return xxx_messageInfo_SourceInfo.Size(m) +} +func (m *SourceInfo) XXX_DiscardUnknown() { + xxx_messageInfo_SourceInfo.DiscardUnknown(m) +} + +var xxx_messageInfo_SourceInfo proto.InternalMessageInfo + +func (m *SourceInfo) GetSourceFiles() []*any.Any { + if m != nil { + return m.SourceFiles + } + return nil +} + +func init() { + proto.RegisterType((*SourceInfo)(nil), "google.api.SourceInfo") +} + +func init() { + proto.RegisterFile("google/api/source_info.proto", fileDescriptor_source_info_17996d8c91d1c33a) +} + +var fileDescriptor_source_info_17996d8c91d1c33a = []byte{ + // 198 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xe2, 0x92, 0x49, 0xcf, 0xcf, 0x4f, + 0xcf, 0x49, 0xd5, 0x4f, 0x2c, 0xc8, 0xd4, 0x2f, 0xce, 0x2f, 0x2d, 0x4a, 0x4e, 0x8d, 0xcf, 0xcc, + 0x4b, 0xcb, 0xd7, 0x2b, 0x28, 0xca, 0x2f, 0xc9, 0x17, 0xe2, 0x82, 0xc8, 0xea, 0x25, 0x16, 0x64, + 0x4a, 0x49, 0x42, 0x55, 0x82, 0x65, 0x92, 0x4a, 0xd3, 0xf4, 0x13, 0xf3, 0x2a, 0x21, 0xca, 0x94, + 0x5c, 0xb9, 0xb8, 0x82, 0xc1, 0x7a, 0x3d, 0xf3, 0xd2, 0xf2, 0x85, 0xcc, 0xb9, 0x78, 0xa0, 0x26, + 0xa5, 0x65, 0xe6, 0xa4, 0x16, 0x4b, 0x30, 0x2a, 0x30, 0x6b, 0x70, 0x1b, 0x89, 0xe8, 0x41, 0xcd, + 0x82, 0xe9, 0xd7, 0x73, 0xcc, 0xab, 0x0c, 0xe2, 0x86, 0xa8, 0x74, 0x03, 0x29, 0x74, 0x2a, 0xe4, + 0xe2, 0x4b, 0xce, 0xcf, 0xd5, 0x43, 0xd8, 0xe9, 0xc4, 0x8f, 0x30, 0x36, 0x00, 0xa4, 0x2d, 0x80, + 0x31, 0xca, 0x15, 0x2a, 0x9d, 0x9e, 0x9f, 0x93, 0x98, 0x97, 0xae, 0x97, 0x5f, 0x94, 0xae, 0x9f, + 0x9e, 0x9a, 0x07, 0x36, 0x54, 0x1f, 0x22, 0x95, 0x58, 0x90, 0x59, 0x0c, 0xf1, 0x4f, 0x6a, 0x51, + 0x59, 0x66, 0x72, 0x6a, 0x72, 0x7e, 0x5e, 0x5a, 0x66, 0xba, 0x35, 0x0a, 0x6f, 0x11, 0x13, 0x8b, + 0xbb, 0x63, 0x80, 0x67, 0x12, 0x1b, 0x58, 0xa3, 0x31, 0x20, 0x00, 0x00, 0xff, 0xff, 0xd4, 0x78, + 0x5d, 0xab, 0x07, 0x01, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/api/serviceconfig/system_parameter.pb.go b/vendor/google.golang.org/genproto/googleapis/api/serviceconfig/system_parameter.pb.go new file mode 100644 index 0000000..9cd02df --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/api/serviceconfig/system_parameter.pb.go @@ -0,0 +1,243 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/api/system_parameter.proto + +package serviceconfig // import "google.golang.org/genproto/googleapis/api/serviceconfig" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// ### System parameter configuration +// +// A system parameter is a special kind of parameter defined by the API +// system, not by an individual API. It is typically mapped to an HTTP header +// and/or a URL query parameter. This configuration specifies which methods +// change the names of the system parameters. +type SystemParameters struct { + // Define system parameters. + // + // The parameters defined here will override the default parameters + // implemented by the system. If this field is missing from the service + // config, default system parameters will be used. Default system parameters + // and names is implementation-dependent. + // + // Example: define api key for all methods + // + // system_parameters + // rules: + // - selector: "*" + // parameters: + // - name: api_key + // url_query_parameter: api_key + // + // + // Example: define 2 api key names for a specific method. + // + // system_parameters + // rules: + // - selector: "/ListShelves" + // parameters: + // - name: api_key + // http_header: Api-Key1 + // - name: api_key + // http_header: Api-Key2 + // + // **NOTE:** All service configuration rules follow "last one wins" order. + Rules []*SystemParameterRule `protobuf:"bytes,1,rep,name=rules,proto3" json:"rules,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *SystemParameters) Reset() { *m = SystemParameters{} } +func (m *SystemParameters) String() string { return proto.CompactTextString(m) } +func (*SystemParameters) ProtoMessage() {} +func (*SystemParameters) Descriptor() ([]byte, []int) { + return fileDescriptor_system_parameter_2f7efca4f2242165, []int{0} +} +func (m *SystemParameters) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_SystemParameters.Unmarshal(m, b) +} +func (m *SystemParameters) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_SystemParameters.Marshal(b, m, deterministic) +} +func (dst *SystemParameters) XXX_Merge(src proto.Message) { + xxx_messageInfo_SystemParameters.Merge(dst, src) +} +func (m *SystemParameters) XXX_Size() int { + return xxx_messageInfo_SystemParameters.Size(m) +} +func (m *SystemParameters) XXX_DiscardUnknown() { + xxx_messageInfo_SystemParameters.DiscardUnknown(m) +} + +var xxx_messageInfo_SystemParameters proto.InternalMessageInfo + +func (m *SystemParameters) GetRules() []*SystemParameterRule { + if m != nil { + return m.Rules + } + return nil +} + +// Define a system parameter rule mapping system parameter definitions to +// methods. +type SystemParameterRule struct { + // Selects the methods to which this rule applies. Use '*' to indicate all + // methods in all APIs. + // + // Refer to [selector][google.api.DocumentationRule.selector] for syntax details. + Selector string `protobuf:"bytes,1,opt,name=selector,proto3" json:"selector,omitempty"` + // Define parameters. Multiple names may be defined for a parameter. + // For a given method call, only one of them should be used. If multiple + // names are used the behavior is implementation-dependent. + // If none of the specified names are present the behavior is + // parameter-dependent. + Parameters []*SystemParameter `protobuf:"bytes,2,rep,name=parameters,proto3" json:"parameters,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *SystemParameterRule) Reset() { *m = SystemParameterRule{} } +func (m *SystemParameterRule) String() string { return proto.CompactTextString(m) } +func (*SystemParameterRule) ProtoMessage() {} +func (*SystemParameterRule) Descriptor() ([]byte, []int) { + return fileDescriptor_system_parameter_2f7efca4f2242165, []int{1} +} +func (m *SystemParameterRule) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_SystemParameterRule.Unmarshal(m, b) +} +func (m *SystemParameterRule) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_SystemParameterRule.Marshal(b, m, deterministic) +} +func (dst *SystemParameterRule) XXX_Merge(src proto.Message) { + xxx_messageInfo_SystemParameterRule.Merge(dst, src) +} +func (m *SystemParameterRule) XXX_Size() int { + return xxx_messageInfo_SystemParameterRule.Size(m) +} +func (m *SystemParameterRule) XXX_DiscardUnknown() { + xxx_messageInfo_SystemParameterRule.DiscardUnknown(m) +} + +var xxx_messageInfo_SystemParameterRule proto.InternalMessageInfo + +func (m *SystemParameterRule) GetSelector() string { + if m != nil { + return m.Selector + } + return "" +} + +func (m *SystemParameterRule) GetParameters() []*SystemParameter { + if m != nil { + return m.Parameters + } + return nil +} + +// Define a parameter's name and location. The parameter may be passed as either +// an HTTP header or a URL query parameter, and if both are passed the behavior +// is implementation-dependent. +type SystemParameter struct { + // Define the name of the parameter, such as "api_key" . It is case sensitive. + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // Define the HTTP header name to use for the parameter. It is case + // insensitive. + HttpHeader string `protobuf:"bytes,2,opt,name=http_header,json=httpHeader,proto3" json:"http_header,omitempty"` + // Define the URL query parameter name to use for the parameter. It is case + // sensitive. + UrlQueryParameter string `protobuf:"bytes,3,opt,name=url_query_parameter,json=urlQueryParameter,proto3" json:"url_query_parameter,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *SystemParameter) Reset() { *m = SystemParameter{} } +func (m *SystemParameter) String() string { return proto.CompactTextString(m) } +func (*SystemParameter) ProtoMessage() {} +func (*SystemParameter) Descriptor() ([]byte, []int) { + return fileDescriptor_system_parameter_2f7efca4f2242165, []int{2} +} +func (m *SystemParameter) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_SystemParameter.Unmarshal(m, b) +} +func (m *SystemParameter) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_SystemParameter.Marshal(b, m, deterministic) +} +func (dst *SystemParameter) XXX_Merge(src proto.Message) { + xxx_messageInfo_SystemParameter.Merge(dst, src) +} +func (m *SystemParameter) XXX_Size() int { + return xxx_messageInfo_SystemParameter.Size(m) +} +func (m *SystemParameter) XXX_DiscardUnknown() { + xxx_messageInfo_SystemParameter.DiscardUnknown(m) +} + +var xxx_messageInfo_SystemParameter proto.InternalMessageInfo + +func (m *SystemParameter) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *SystemParameter) GetHttpHeader() string { + if m != nil { + return m.HttpHeader + } + return "" +} + +func (m *SystemParameter) GetUrlQueryParameter() string { + if m != nil { + return m.UrlQueryParameter + } + return "" +} + +func init() { + proto.RegisterType((*SystemParameters)(nil), "google.api.SystemParameters") + proto.RegisterType((*SystemParameterRule)(nil), "google.api.SystemParameterRule") + proto.RegisterType((*SystemParameter)(nil), "google.api.SystemParameter") +} + +func init() { + proto.RegisterFile("google/api/system_parameter.proto", fileDescriptor_system_parameter_2f7efca4f2242165) +} + +var fileDescriptor_system_parameter_2f7efca4f2242165 = []byte{ + // 286 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x7c, 0x91, 0xbf, 0x4e, 0xc3, 0x30, + 0x10, 0x87, 0x95, 0xb6, 0x20, 0xb8, 0x4a, 0xfc, 0x71, 0x19, 0x22, 0x18, 0x5a, 0x3a, 0x75, 0x72, + 0x24, 0x10, 0x53, 0x27, 0x2a, 0x21, 0xe8, 0x16, 0xca, 0xc6, 0x12, 0x99, 0x70, 0xb8, 0x91, 0x9c, + 0xd8, 0x9c, 0x9d, 0x48, 0x7d, 0x1d, 0x9e, 0x14, 0xc5, 0x29, 0x69, 0x89, 0x10, 0x9b, 0xef, 0xbe, + 0xcf, 0xfa, 0x9d, 0xee, 0xe0, 0x5a, 0x6a, 0x2d, 0x15, 0x46, 0xc2, 0x64, 0x91, 0xdd, 0x58, 0x87, + 0x79, 0x62, 0x04, 0x89, 0x1c, 0x1d, 0x12, 0x37, 0xa4, 0x9d, 0x66, 0xd0, 0x28, 0x5c, 0x98, 0x6c, + 0xba, 0x84, 0xb3, 0x17, 0x6f, 0xc5, 0x3f, 0x92, 0x65, 0x77, 0x70, 0x40, 0xa5, 0x42, 0x1b, 0x06, + 0x93, 0xfe, 0x6c, 0x78, 0x33, 0xe6, 0x3b, 0x9f, 0x77, 0xe4, 0x55, 0xa9, 0x70, 0xd5, 0xd8, 0xd3, + 0x02, 0x46, 0x7f, 0x50, 0x76, 0x09, 0x47, 0x16, 0x15, 0xa6, 0x4e, 0x53, 0x18, 0x4c, 0x82, 0xd9, + 0xf1, 0xaa, 0xad, 0xd9, 0x1c, 0xa0, 0x1d, 0xce, 0x86, 0x3d, 0x1f, 0x77, 0xf5, 0x5f, 0xdc, 0x9e, + 0x3e, 0xad, 0xe0, 0xb4, 0x83, 0x19, 0x83, 0x41, 0x21, 0x72, 0xdc, 0xe6, 0xf8, 0x37, 0x1b, 0xc3, + 0x70, 0xed, 0x9c, 0x49, 0xd6, 0x28, 0xde, 0x91, 0xc2, 0x9e, 0x47, 0x50, 0xb7, 0x9e, 0x7c, 0x87, + 0x71, 0x18, 0x95, 0xa4, 0x92, 0xcf, 0x12, 0x69, 0xb3, 0xdb, 0x55, 0xd8, 0xf7, 0xe2, 0x79, 0x49, + 0xea, 0xb9, 0x26, 0x6d, 0xc8, 0xa2, 0x82, 0x93, 0x54, 0xe7, 0x7b, 0x53, 0x2e, 0x2e, 0x3a, 0x73, + 0xc4, 0xf5, 0x9a, 0xe3, 0xe0, 0xf5, 0x61, 0xeb, 0x48, 0xad, 0x44, 0x21, 0xb9, 0x26, 0x19, 0x49, + 0x2c, 0xfc, 0x11, 0xa2, 0x06, 0x09, 0x93, 0xd9, 0xe6, 0x54, 0x48, 0x55, 0x96, 0x62, 0xaa, 0x8b, + 0x8f, 0x4c, 0xce, 0x7f, 0x55, 0x5f, 0xbd, 0xc1, 0xe3, 0x7d, 0xbc, 0x7c, 0x3b, 0xf4, 0x1f, 0x6f, + 0xbf, 0x03, 0x00, 0x00, 0xff, 0xff, 0x5e, 0xdf, 0x2e, 0x09, 0xe2, 0x01, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/api/serviceconfig/usage.pb.go b/vendor/google.golang.org/genproto/googleapis/api/serviceconfig/usage.pb.go new file mode 100644 index 0000000..9ab776e --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/api/serviceconfig/usage.pb.go @@ -0,0 +1,208 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/api/usage.proto + +package serviceconfig // import "google.golang.org/genproto/googleapis/api/serviceconfig" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Configuration controlling usage of a service. +type Usage struct { + // Requirements that must be satisfied before a consumer project can use the + // service. Each requirement is of the form /; + // for example 'serviceusage.googleapis.com/billing-enabled'. + Requirements []string `protobuf:"bytes,1,rep,name=requirements,proto3" json:"requirements,omitempty"` + // A list of usage rules that apply to individual API methods. + // + // **NOTE:** All service configuration rules follow "last one wins" order. + Rules []*UsageRule `protobuf:"bytes,6,rep,name=rules,proto3" json:"rules,omitempty"` + // The full resource name of a channel used for sending notifications to the + // service producer. + // + // Google Service Management currently only supports + // [Google Cloud Pub/Sub](https://cloud.google.com/pubsub) as a notification + // channel. To use Google Cloud Pub/Sub as the channel, this must be the name + // of a Cloud Pub/Sub topic that uses the Cloud Pub/Sub topic name format + // documented in https://cloud.google.com/pubsub/docs/overview. + ProducerNotificationChannel string `protobuf:"bytes,7,opt,name=producer_notification_channel,json=producerNotificationChannel,proto3" json:"producer_notification_channel,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Usage) Reset() { *m = Usage{} } +func (m *Usage) String() string { return proto.CompactTextString(m) } +func (*Usage) ProtoMessage() {} +func (*Usage) Descriptor() ([]byte, []int) { + return fileDescriptor_usage_5ff3c0173739d6b5, []int{0} +} +func (m *Usage) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Usage.Unmarshal(m, b) +} +func (m *Usage) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Usage.Marshal(b, m, deterministic) +} +func (dst *Usage) XXX_Merge(src proto.Message) { + xxx_messageInfo_Usage.Merge(dst, src) +} +func (m *Usage) XXX_Size() int { + return xxx_messageInfo_Usage.Size(m) +} +func (m *Usage) XXX_DiscardUnknown() { + xxx_messageInfo_Usage.DiscardUnknown(m) +} + +var xxx_messageInfo_Usage proto.InternalMessageInfo + +func (m *Usage) GetRequirements() []string { + if m != nil { + return m.Requirements + } + return nil +} + +func (m *Usage) GetRules() []*UsageRule { + if m != nil { + return m.Rules + } + return nil +} + +func (m *Usage) GetProducerNotificationChannel() string { + if m != nil { + return m.ProducerNotificationChannel + } + return "" +} + +// Usage configuration rules for the service. +// +// NOTE: Under development. +// +// +// Use this rule to configure unregistered calls for the service. Unregistered +// calls are calls that do not contain consumer project identity. +// (Example: calls that do not contain an API key). +// By default, API methods do not allow unregistered calls, and each method call +// must be identified by a consumer project identity. Use this rule to +// allow/disallow unregistered calls. +// +// Example of an API that wants to allow unregistered calls for entire service. +// +// usage: +// rules: +// - selector: "*" +// allow_unregistered_calls: true +// +// Example of a method that wants to allow unregistered calls. +// +// usage: +// rules: +// - selector: "google.example.library.v1.LibraryService.CreateBook" +// allow_unregistered_calls: true +type UsageRule struct { + // Selects the methods to which this rule applies. Use '*' to indicate all + // methods in all APIs. + // + // Refer to [selector][google.api.DocumentationRule.selector] for syntax details. + Selector string `protobuf:"bytes,1,opt,name=selector,proto3" json:"selector,omitempty"` + // If true, the selected method allows unregistered calls, e.g. calls + // that don't identify any user or application. + AllowUnregisteredCalls bool `protobuf:"varint,2,opt,name=allow_unregistered_calls,json=allowUnregisteredCalls,proto3" json:"allow_unregistered_calls,omitempty"` + // If true, the selected method should skip service control and the control + // plane features, such as quota and billing, will not be available. + // This flag is used by Google Cloud Endpoints to bypass checks for internal + // methods, such as service health check methods. + SkipServiceControl bool `protobuf:"varint,3,opt,name=skip_service_control,json=skipServiceControl,proto3" json:"skip_service_control,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *UsageRule) Reset() { *m = UsageRule{} } +func (m *UsageRule) String() string { return proto.CompactTextString(m) } +func (*UsageRule) ProtoMessage() {} +func (*UsageRule) Descriptor() ([]byte, []int) { + return fileDescriptor_usage_5ff3c0173739d6b5, []int{1} +} +func (m *UsageRule) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_UsageRule.Unmarshal(m, b) +} +func (m *UsageRule) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_UsageRule.Marshal(b, m, deterministic) +} +func (dst *UsageRule) XXX_Merge(src proto.Message) { + xxx_messageInfo_UsageRule.Merge(dst, src) +} +func (m *UsageRule) XXX_Size() int { + return xxx_messageInfo_UsageRule.Size(m) +} +func (m *UsageRule) XXX_DiscardUnknown() { + xxx_messageInfo_UsageRule.DiscardUnknown(m) +} + +var xxx_messageInfo_UsageRule proto.InternalMessageInfo + +func (m *UsageRule) GetSelector() string { + if m != nil { + return m.Selector + } + return "" +} + +func (m *UsageRule) GetAllowUnregisteredCalls() bool { + if m != nil { + return m.AllowUnregisteredCalls + } + return false +} + +func (m *UsageRule) GetSkipServiceControl() bool { + if m != nil { + return m.SkipServiceControl + } + return false +} + +func init() { + proto.RegisterType((*Usage)(nil), "google.api.Usage") + proto.RegisterType((*UsageRule)(nil), "google.api.UsageRule") +} + +func init() { proto.RegisterFile("google/api/usage.proto", fileDescriptor_usage_5ff3c0173739d6b5) } + +var fileDescriptor_usage_5ff3c0173739d6b5 = []byte{ + // 319 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x54, 0x91, 0xc1, 0x4a, 0x33, 0x31, + 0x14, 0x85, 0x49, 0xfb, 0xb7, 0x7f, 0x1b, 0xc5, 0x45, 0xd0, 0x12, 0x14, 0x61, 0xe8, 0x6a, 0x40, + 0x98, 0x11, 0xdd, 0x08, 0xae, 0x6c, 0x11, 0x71, 0x23, 0x65, 0xa4, 0x1b, 0x37, 0x43, 0x4c, 0x6f, + 0x63, 0x30, 0xcd, 0x1d, 0x93, 0x8c, 0x3e, 0x88, 0x5b, 0x57, 0x3e, 0xa9, 0x4c, 0xa6, 0xd6, 0x76, + 0x79, 0xef, 0x77, 0xce, 0x49, 0x72, 0x42, 0x47, 0x0a, 0x51, 0x19, 0xc8, 0x45, 0xa5, 0xf3, 0xda, + 0x0b, 0x05, 0x59, 0xe5, 0x30, 0x20, 0xa3, 0xed, 0x3e, 0x13, 0x95, 0x1e, 0x7f, 0x11, 0xda, 0x9b, + 0x37, 0x8c, 0x8d, 0xe9, 0xbe, 0x83, 0xb7, 0x5a, 0x3b, 0x58, 0x81, 0x0d, 0x9e, 0x93, 0xa4, 0x9b, + 0x0e, 0x8b, 0x9d, 0x1d, 0x3b, 0xa3, 0x3d, 0x57, 0x1b, 0xf0, 0xbc, 0x9f, 0x74, 0xd3, 0xbd, 0x8b, + 0xa3, 0xec, 0x2f, 0x29, 0x8b, 0x29, 0x45, 0x6d, 0xa0, 0x68, 0x35, 0x6c, 0x42, 0x4f, 0x2b, 0x87, + 0x8b, 0x5a, 0x82, 0x2b, 0x2d, 0x06, 0xbd, 0xd4, 0x52, 0x04, 0x8d, 0xb6, 0x94, 0x2f, 0xc2, 0x5a, + 0x30, 0xfc, 0x7f, 0x42, 0xd2, 0x61, 0x71, 0xf2, 0x2b, 0x7a, 0xd8, 0xd2, 0x4c, 0x5b, 0xc9, 0xf8, + 0x93, 0xd0, 0xe1, 0x26, 0x98, 0x1d, 0xd3, 0x81, 0x07, 0x03, 0x32, 0xa0, 0xe3, 0x24, 0x9a, 0x37, + 0x33, 0xbb, 0xa2, 0x5c, 0x18, 0x83, 0x1f, 0x65, 0x6d, 0x1d, 0x28, 0xed, 0x03, 0x38, 0x58, 0x94, + 0x52, 0x18, 0xe3, 0x79, 0x27, 0x21, 0xe9, 0xa0, 0x18, 0x45, 0x3e, 0xdf, 0xc2, 0xd3, 0x86, 0xb2, + 0x73, 0x7a, 0xe8, 0x5f, 0x75, 0x55, 0x7a, 0x70, 0xef, 0x5a, 0x42, 0x29, 0xd1, 0x06, 0x87, 0x86, + 0x77, 0xa3, 0x8b, 0x35, 0xec, 0xb1, 0x45, 0xd3, 0x96, 0x4c, 0x0c, 0x3d, 0x90, 0xb8, 0xda, 0x7a, + 0xfc, 0x84, 0xc6, 0x4b, 0xce, 0x9a, 0x7a, 0x67, 0xe4, 0xe9, 0x76, 0x4d, 0x14, 0x1a, 0x61, 0x55, + 0x86, 0x4e, 0xe5, 0x0a, 0x6c, 0x2c, 0x3f, 0x6f, 0x91, 0xa8, 0xb4, 0x8f, 0xff, 0xb2, 0x3e, 0x54, + 0xa2, 0x5d, 0x6a, 0x75, 0xbd, 0x33, 0x7d, 0x77, 0xfe, 0xdd, 0xdd, 0xcc, 0xee, 0x9f, 0xfb, 0xd1, + 0x78, 0xf9, 0x13, 0x00, 0x00, 0xff, 0xff, 0xd7, 0x64, 0x56, 0x3e, 0xcf, 0x01, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/api/servicecontrol/v1/check_error.pb.go b/vendor/google.golang.org/genproto/googleapis/api/servicecontrol/v1/check_error.pb.go new file mode 100644 index 0000000..b023f26 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/api/servicecontrol/v1/check_error.pb.go @@ -0,0 +1,210 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/api/servicecontrol/v1/check_error.proto + +package servicecontrol // import "google.golang.org/genproto/googleapis/api/servicecontrol/v1" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Error codes for Check responses. +type CheckError_Code int32 + +const ( + // This is never used in `CheckResponse`. + CheckError_ERROR_CODE_UNSPECIFIED CheckError_Code = 0 + // The consumer's project id was not found. + // Same as [google.rpc.Code.NOT_FOUND][]. + CheckError_NOT_FOUND CheckError_Code = 5 + // The consumer doesn't have access to the specified resource. + // Same as [google.rpc.Code.PERMISSION_DENIED][]. + CheckError_PERMISSION_DENIED CheckError_Code = 7 + // Quota check failed. Same as [google.rpc.Code.RESOURCE_EXHAUSTED][]. + CheckError_RESOURCE_EXHAUSTED CheckError_Code = 8 + // The consumer hasn't activated the service. + CheckError_SERVICE_NOT_ACTIVATED CheckError_Code = 104 + // The consumer cannot access the service because billing is disabled. + CheckError_BILLING_DISABLED CheckError_Code = 107 + // The consumer's project has been marked as deleted (soft deletion). + CheckError_PROJECT_DELETED CheckError_Code = 108 + // The consumer's project number or id does not represent a valid project. + CheckError_PROJECT_INVALID CheckError_Code = 114 + // The IP address of the consumer is invalid for the specific consumer + // project. + CheckError_IP_ADDRESS_BLOCKED CheckError_Code = 109 + // The referer address of the consumer request is invalid for the specific + // consumer project. + CheckError_REFERER_BLOCKED CheckError_Code = 110 + // The client application of the consumer request is invalid for the + // specific consumer project. + CheckError_CLIENT_APP_BLOCKED CheckError_Code = 111 + // The API targeted by this request is invalid for the specified consumer + // project. + CheckError_API_TARGET_BLOCKED CheckError_Code = 122 + // The consumer's API key is invalid. + CheckError_API_KEY_INVALID CheckError_Code = 105 + // The consumer's API Key has expired. + CheckError_API_KEY_EXPIRED CheckError_Code = 112 + // The consumer's API Key was not found in config record. + CheckError_API_KEY_NOT_FOUND CheckError_Code = 113 + // The backend server for looking up project id/number is unavailable. + CheckError_NAMESPACE_LOOKUP_UNAVAILABLE CheckError_Code = 300 + // The backend server for checking service status is unavailable. + CheckError_SERVICE_STATUS_UNAVAILABLE CheckError_Code = 301 + // The backend server for checking billing status is unavailable. + CheckError_BILLING_STATUS_UNAVAILABLE CheckError_Code = 302 +) + +var CheckError_Code_name = map[int32]string{ + 0: "ERROR_CODE_UNSPECIFIED", + 5: "NOT_FOUND", + 7: "PERMISSION_DENIED", + 8: "RESOURCE_EXHAUSTED", + 104: "SERVICE_NOT_ACTIVATED", + 107: "BILLING_DISABLED", + 108: "PROJECT_DELETED", + 114: "PROJECT_INVALID", + 109: "IP_ADDRESS_BLOCKED", + 110: "REFERER_BLOCKED", + 111: "CLIENT_APP_BLOCKED", + 122: "API_TARGET_BLOCKED", + 105: "API_KEY_INVALID", + 112: "API_KEY_EXPIRED", + 113: "API_KEY_NOT_FOUND", + 300: "NAMESPACE_LOOKUP_UNAVAILABLE", + 301: "SERVICE_STATUS_UNAVAILABLE", + 302: "BILLING_STATUS_UNAVAILABLE", +} +var CheckError_Code_value = map[string]int32{ + "ERROR_CODE_UNSPECIFIED": 0, + "NOT_FOUND": 5, + "PERMISSION_DENIED": 7, + "RESOURCE_EXHAUSTED": 8, + "SERVICE_NOT_ACTIVATED": 104, + "BILLING_DISABLED": 107, + "PROJECT_DELETED": 108, + "PROJECT_INVALID": 114, + "IP_ADDRESS_BLOCKED": 109, + "REFERER_BLOCKED": 110, + "CLIENT_APP_BLOCKED": 111, + "API_TARGET_BLOCKED": 122, + "API_KEY_INVALID": 105, + "API_KEY_EXPIRED": 112, + "API_KEY_NOT_FOUND": 113, + "NAMESPACE_LOOKUP_UNAVAILABLE": 300, + "SERVICE_STATUS_UNAVAILABLE": 301, + "BILLING_STATUS_UNAVAILABLE": 302, +} + +func (x CheckError_Code) String() string { + return proto.EnumName(CheckError_Code_name, int32(x)) +} +func (CheckError_Code) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_check_error_95aa382d11b62e53, []int{0, 0} +} + +// Defines the errors to be returned in +// [google.api.servicecontrol.v1.CheckResponse.check_errors][google.api.servicecontrol.v1.CheckResponse.check_errors]. +type CheckError struct { + // The error code. + Code CheckError_Code `protobuf:"varint,1,opt,name=code,proto3,enum=google.api.servicecontrol.v1.CheckError_Code" json:"code,omitempty"` + // Free-form text providing details on the error cause of the error. + Detail string `protobuf:"bytes,2,opt,name=detail,proto3" json:"detail,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *CheckError) Reset() { *m = CheckError{} } +func (m *CheckError) String() string { return proto.CompactTextString(m) } +func (*CheckError) ProtoMessage() {} +func (*CheckError) Descriptor() ([]byte, []int) { + return fileDescriptor_check_error_95aa382d11b62e53, []int{0} +} +func (m *CheckError) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_CheckError.Unmarshal(m, b) +} +func (m *CheckError) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_CheckError.Marshal(b, m, deterministic) +} +func (dst *CheckError) XXX_Merge(src proto.Message) { + xxx_messageInfo_CheckError.Merge(dst, src) +} +func (m *CheckError) XXX_Size() int { + return xxx_messageInfo_CheckError.Size(m) +} +func (m *CheckError) XXX_DiscardUnknown() { + xxx_messageInfo_CheckError.DiscardUnknown(m) +} + +var xxx_messageInfo_CheckError proto.InternalMessageInfo + +func (m *CheckError) GetCode() CheckError_Code { + if m != nil { + return m.Code + } + return CheckError_ERROR_CODE_UNSPECIFIED +} + +func (m *CheckError) GetDetail() string { + if m != nil { + return m.Detail + } + return "" +} + +func init() { + proto.RegisterType((*CheckError)(nil), "google.api.servicecontrol.v1.CheckError") + proto.RegisterEnum("google.api.servicecontrol.v1.CheckError_Code", CheckError_Code_name, CheckError_Code_value) +} + +func init() { + proto.RegisterFile("google/api/servicecontrol/v1/check_error.proto", fileDescriptor_check_error_95aa382d11b62e53) +} + +var fileDescriptor_check_error_95aa382d11b62e53 = []byte{ + // 493 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x7c, 0x92, 0x4f, 0x6f, 0xd3, 0x30, + 0x18, 0xc6, 0x49, 0x29, 0x83, 0x59, 0x82, 0x05, 0xc3, 0xaa, 0x51, 0x55, 0xa2, 0xec, 0xb4, 0x0b, + 0x89, 0x06, 0x47, 0x4e, 0xae, 0xfd, 0x76, 0x78, 0xcd, 0x12, 0xcb, 0x4e, 0xaa, 0xc1, 0xc5, 0x0a, + 0x69, 0x94, 0x45, 0xeb, 0xe2, 0x92, 0x56, 0x3d, 0x70, 0xe6, 0xc3, 0x70, 0x00, 0x3e, 0x02, 0x9f, + 0x8b, 0xe3, 0xe4, 0x76, 0xfd, 0x27, 0x4d, 0x3b, 0xe6, 0xe7, 0xdf, 0xfb, 0xbc, 0xca, 0xab, 0x07, + 0x79, 0x85, 0x31, 0xc5, 0x38, 0xf7, 0xd3, 0x49, 0xe9, 0x4f, 0xf3, 0x7a, 0x5e, 0x66, 0x79, 0x66, + 0xaa, 0x59, 0x6d, 0xc6, 0xfe, 0xfc, 0xd4, 0xcf, 0xae, 0xf2, 0xec, 0x5a, 0xe7, 0x75, 0x6d, 0x6a, + 0x6f, 0x52, 0x9b, 0x99, 0xc1, 0x9d, 0xa5, 0xef, 0xa5, 0x93, 0xd2, 0xdb, 0xf5, 0xbd, 0xf9, 0x69, + 0xbb, 0xb3, 0x95, 0x96, 0x56, 0x95, 0x99, 0xa5, 0xb3, 0xd2, 0x54, 0xd3, 0xe5, 0xec, 0xf1, 0xaf, + 0x26, 0x42, 0xd4, 0x26, 0x82, 0x0d, 0xc4, 0x04, 0x35, 0x33, 0x33, 0xca, 0x8f, 0x9c, 0xae, 0x73, + 0xf2, 0xe2, 0xc3, 0x7b, 0xef, 0xa1, 0x64, 0x6f, 0x33, 0xe7, 0x51, 0x33, 0xca, 0xe5, 0x62, 0x14, + 0xb7, 0xd0, 0xde, 0x28, 0x9f, 0xa5, 0xe5, 0xf8, 0xa8, 0xd1, 0x75, 0x4e, 0xf6, 0xe5, 0xdd, 0xd7, + 0xf1, 0xbf, 0xc7, 0xa8, 0x69, 0x35, 0xdc, 0x46, 0x2d, 0x90, 0x32, 0x92, 0x9a, 0x46, 0x0c, 0x74, + 0x12, 0x2a, 0x01, 0x94, 0xf7, 0x39, 0x30, 0xf7, 0x11, 0x7e, 0x8e, 0xf6, 0xc3, 0x28, 0xd6, 0xfd, + 0x28, 0x09, 0x99, 0xfb, 0x04, 0x1f, 0xa2, 0x97, 0x02, 0xe4, 0x05, 0x57, 0x8a, 0x47, 0xa1, 0x66, + 0x10, 0x5a, 0xeb, 0x29, 0x6e, 0x21, 0x2c, 0x41, 0x45, 0x89, 0xa4, 0xa0, 0xe1, 0xf2, 0x33, 0x49, + 0x54, 0x0c, 0xcc, 0x7d, 0x86, 0xdf, 0xa0, 0x43, 0x05, 0x72, 0xc8, 0x29, 0x68, 0x9b, 0x42, 0x68, + 0xcc, 0x87, 0xc4, 0x3e, 0x5d, 0xe1, 0xd7, 0xc8, 0xed, 0xf1, 0x20, 0xe0, 0xe1, 0x99, 0x66, 0x5c, + 0x91, 0x5e, 0x00, 0xcc, 0xbd, 0xc6, 0xaf, 0xd0, 0x81, 0x90, 0xd1, 0x39, 0xd0, 0x58, 0x33, 0x08, + 0xc0, 0xaa, 0xe3, 0x6d, 0xc8, 0xc3, 0x21, 0x09, 0x38, 0x73, 0x6b, 0xbb, 0x92, 0x0b, 0x4d, 0x18, + 0x93, 0xa0, 0x94, 0xee, 0x05, 0x11, 0x1d, 0x00, 0x73, 0x6f, 0xac, 0x2c, 0xa1, 0x0f, 0x12, 0xe4, + 0x1a, 0x56, 0x56, 0xa6, 0x01, 0x87, 0x30, 0xd6, 0x44, 0x88, 0x35, 0x37, 0x96, 0x13, 0xc1, 0x75, + 0x4c, 0xe4, 0x19, 0xc4, 0x6b, 0xfe, 0xc3, 0x86, 0x58, 0x3e, 0x80, 0x2f, 0xeb, 0x8d, 0xe5, 0x36, + 0x84, 0x4b, 0xc1, 0x25, 0x30, 0x77, 0x62, 0x0f, 0xb2, 0x82, 0x9b, 0x3b, 0x7d, 0xc7, 0xef, 0x50, + 0x27, 0x24, 0x17, 0xa0, 0x04, 0xa1, 0xa0, 0x83, 0x28, 0x1a, 0x24, 0x42, 0x27, 0x21, 0x19, 0x12, + 0x1e, 0xd8, 0x5f, 0x75, 0x7f, 0x37, 0xf0, 0x5b, 0xd4, 0x5e, 0xdd, 0x46, 0xc5, 0x24, 0x4e, 0xd4, + 0x8e, 0xf0, 0x67, 0x21, 0xac, 0x2e, 0x74, 0x8f, 0xf0, 0xb7, 0xd1, 0xfb, 0xe9, 0xa0, 0x6e, 0x66, + 0x6e, 0x1e, 0xec, 0x44, 0xef, 0x60, 0x53, 0x0a, 0x61, 0x0b, 0x26, 0x9c, 0xaf, 0xe7, 0x77, 0x03, + 0x85, 0x19, 0xa7, 0x55, 0xe1, 0x99, 0xba, 0xf0, 0x8b, 0xbc, 0x5a, 0xd4, 0xcf, 0x5f, 0x3e, 0xa5, + 0x93, 0x72, 0x7a, 0x7f, 0xdb, 0x3f, 0xed, 0x92, 0xff, 0x8e, 0xf3, 0x6d, 0x6f, 0x31, 0xf9, 0xf1, + 0x36, 0x00, 0x00, 0xff, 0xff, 0x40, 0xf4, 0xc8, 0x44, 0x26, 0x03, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/api/servicecontrol/v1/distribution.pb.go b/vendor/google.golang.org/genproto/googleapis/api/servicecontrol/v1/distribution.pb.go new file mode 100644 index 0000000..35139f0 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/api/servicecontrol/v1/distribution.pb.go @@ -0,0 +1,528 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/api/servicecontrol/v1/distribution.proto + +package servicecontrol // import "google.golang.org/genproto/googleapis/api/servicecontrol/v1" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Distribution represents a frequency distribution of double-valued sample +// points. It contains the size of the population of sample points plus +// additional optional information: +// +// - the arithmetic mean of the samples +// - the minimum and maximum of the samples +// - the sum-squared-deviation of the samples, used to compute variance +// - a histogram of the values of the sample points +type Distribution struct { + // The total number of samples in the distribution. Must be >= 0. + Count int64 `protobuf:"varint,1,opt,name=count,proto3" json:"count,omitempty"` + // The arithmetic mean of the samples in the distribution. If `count` is + // zero then this field must be zero. + Mean float64 `protobuf:"fixed64,2,opt,name=mean,proto3" json:"mean,omitempty"` + // The minimum of the population of values. Ignored if `count` is zero. + Minimum float64 `protobuf:"fixed64,3,opt,name=minimum,proto3" json:"minimum,omitempty"` + // The maximum of the population of values. Ignored if `count` is zero. + Maximum float64 `protobuf:"fixed64,4,opt,name=maximum,proto3" json:"maximum,omitempty"` + // The sum of squared deviations from the mean: + // Sum[i=1..count]((x_i - mean)^2) + // where each x_i is a sample values. If `count` is zero then this field + // must be zero, otherwise validation of the request fails. + SumOfSquaredDeviation float64 `protobuf:"fixed64,5,opt,name=sum_of_squared_deviation,json=sumOfSquaredDeviation,proto3" json:"sum_of_squared_deviation,omitempty"` + // The number of samples in each histogram bucket. `bucket_counts` are + // optional. If present, they must sum to the `count` value. + // + // The buckets are defined below in `bucket_option`. There are N buckets. + // `bucket_counts[0]` is the number of samples in the underflow bucket. + // `bucket_counts[1]` to `bucket_counts[N-1]` are the numbers of samples + // in each of the finite buckets. And `bucket_counts[N] is the number + // of samples in the overflow bucket. See the comments of `bucket_option` + // below for more details. + // + // Any suffix of trailing zeros may be omitted. + BucketCounts []int64 `protobuf:"varint,6,rep,packed,name=bucket_counts,json=bucketCounts,proto3" json:"bucket_counts,omitempty"` + // Defines the buckets in the histogram. `bucket_option` and `bucket_counts` + // must be both set, or both unset. + // + // Buckets are numbered in the range of [0, N], with a total of N+1 buckets. + // There must be at least two buckets (a single-bucket histogram gives + // no information that isn't already provided by `count`). + // + // The first bucket is the underflow bucket which has a lower bound + // of -inf. The last bucket is the overflow bucket which has an + // upper bound of +inf. All other buckets (if any) are called "finite" + // buckets because they have finite lower and upper bounds. As described + // below, there are three ways to define the finite buckets. + // + // (1) Buckets with constant width. + // (2) Buckets with exponentially growing widths. + // (3) Buckets with arbitrary user-provided widths. + // + // In all cases, the buckets cover the entire real number line (-inf, + // +inf). Bucket upper bounds are exclusive and lower bounds are + // inclusive. The upper bound of the underflow bucket is equal to the + // lower bound of the smallest finite bucket; the lower bound of the + // overflow bucket is equal to the upper bound of the largest finite + // bucket. + // + // Types that are valid to be assigned to BucketOption: + // *Distribution_LinearBuckets_ + // *Distribution_ExponentialBuckets_ + // *Distribution_ExplicitBuckets_ + BucketOption isDistribution_BucketOption `protobuf_oneof:"bucket_option"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Distribution) Reset() { *m = Distribution{} } +func (m *Distribution) String() string { return proto.CompactTextString(m) } +func (*Distribution) ProtoMessage() {} +func (*Distribution) Descriptor() ([]byte, []int) { + return fileDescriptor_distribution_807f43a240f7aec0, []int{0} +} +func (m *Distribution) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Distribution.Unmarshal(m, b) +} +func (m *Distribution) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Distribution.Marshal(b, m, deterministic) +} +func (dst *Distribution) XXX_Merge(src proto.Message) { + xxx_messageInfo_Distribution.Merge(dst, src) +} +func (m *Distribution) XXX_Size() int { + return xxx_messageInfo_Distribution.Size(m) +} +func (m *Distribution) XXX_DiscardUnknown() { + xxx_messageInfo_Distribution.DiscardUnknown(m) +} + +var xxx_messageInfo_Distribution proto.InternalMessageInfo + +func (m *Distribution) GetCount() int64 { + if m != nil { + return m.Count + } + return 0 +} + +func (m *Distribution) GetMean() float64 { + if m != nil { + return m.Mean + } + return 0 +} + +func (m *Distribution) GetMinimum() float64 { + if m != nil { + return m.Minimum + } + return 0 +} + +func (m *Distribution) GetMaximum() float64 { + if m != nil { + return m.Maximum + } + return 0 +} + +func (m *Distribution) GetSumOfSquaredDeviation() float64 { + if m != nil { + return m.SumOfSquaredDeviation + } + return 0 +} + +func (m *Distribution) GetBucketCounts() []int64 { + if m != nil { + return m.BucketCounts + } + return nil +} + +type isDistribution_BucketOption interface { + isDistribution_BucketOption() +} + +type Distribution_LinearBuckets_ struct { + LinearBuckets *Distribution_LinearBuckets `protobuf:"bytes,7,opt,name=linear_buckets,json=linearBuckets,proto3,oneof"` +} + +type Distribution_ExponentialBuckets_ struct { + ExponentialBuckets *Distribution_ExponentialBuckets `protobuf:"bytes,8,opt,name=exponential_buckets,json=exponentialBuckets,proto3,oneof"` +} + +type Distribution_ExplicitBuckets_ struct { + ExplicitBuckets *Distribution_ExplicitBuckets `protobuf:"bytes,9,opt,name=explicit_buckets,json=explicitBuckets,proto3,oneof"` +} + +func (*Distribution_LinearBuckets_) isDistribution_BucketOption() {} + +func (*Distribution_ExponentialBuckets_) isDistribution_BucketOption() {} + +func (*Distribution_ExplicitBuckets_) isDistribution_BucketOption() {} + +func (m *Distribution) GetBucketOption() isDistribution_BucketOption { + if m != nil { + return m.BucketOption + } + return nil +} + +func (m *Distribution) GetLinearBuckets() *Distribution_LinearBuckets { + if x, ok := m.GetBucketOption().(*Distribution_LinearBuckets_); ok { + return x.LinearBuckets + } + return nil +} + +func (m *Distribution) GetExponentialBuckets() *Distribution_ExponentialBuckets { + if x, ok := m.GetBucketOption().(*Distribution_ExponentialBuckets_); ok { + return x.ExponentialBuckets + } + return nil +} + +func (m *Distribution) GetExplicitBuckets() *Distribution_ExplicitBuckets { + if x, ok := m.GetBucketOption().(*Distribution_ExplicitBuckets_); ok { + return x.ExplicitBuckets + } + return nil +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*Distribution) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _Distribution_OneofMarshaler, _Distribution_OneofUnmarshaler, _Distribution_OneofSizer, []interface{}{ + (*Distribution_LinearBuckets_)(nil), + (*Distribution_ExponentialBuckets_)(nil), + (*Distribution_ExplicitBuckets_)(nil), + } +} + +func _Distribution_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*Distribution) + // bucket_option + switch x := m.BucketOption.(type) { + case *Distribution_LinearBuckets_: + b.EncodeVarint(7<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.LinearBuckets); err != nil { + return err + } + case *Distribution_ExponentialBuckets_: + b.EncodeVarint(8<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.ExponentialBuckets); err != nil { + return err + } + case *Distribution_ExplicitBuckets_: + b.EncodeVarint(9<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.ExplicitBuckets); err != nil { + return err + } + case nil: + default: + return fmt.Errorf("Distribution.BucketOption has unexpected type %T", x) + } + return nil +} + +func _Distribution_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*Distribution) + switch tag { + case 7: // bucket_option.linear_buckets + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(Distribution_LinearBuckets) + err := b.DecodeMessage(msg) + m.BucketOption = &Distribution_LinearBuckets_{msg} + return true, err + case 8: // bucket_option.exponential_buckets + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(Distribution_ExponentialBuckets) + err := b.DecodeMessage(msg) + m.BucketOption = &Distribution_ExponentialBuckets_{msg} + return true, err + case 9: // bucket_option.explicit_buckets + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(Distribution_ExplicitBuckets) + err := b.DecodeMessage(msg) + m.BucketOption = &Distribution_ExplicitBuckets_{msg} + return true, err + default: + return false, nil + } +} + +func _Distribution_OneofSizer(msg proto.Message) (n int) { + m := msg.(*Distribution) + // bucket_option + switch x := m.BucketOption.(type) { + case *Distribution_LinearBuckets_: + s := proto.Size(x.LinearBuckets) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *Distribution_ExponentialBuckets_: + s := proto.Size(x.ExponentialBuckets) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *Distribution_ExplicitBuckets_: + s := proto.Size(x.ExplicitBuckets) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +// Describing buckets with constant width. +type Distribution_LinearBuckets struct { + // The number of finite buckets. With the underflow and overflow buckets, + // the total number of buckets is `num_finite_buckets` + 2. + // See comments on `bucket_options` for details. + NumFiniteBuckets int32 `protobuf:"varint,1,opt,name=num_finite_buckets,json=numFiniteBuckets,proto3" json:"num_finite_buckets,omitempty"` + // The i'th linear bucket covers the interval + // [offset + (i-1) * width, offset + i * width) + // where i ranges from 1 to num_finite_buckets, inclusive. + // Must be strictly positive. + Width float64 `protobuf:"fixed64,2,opt,name=width,proto3" json:"width,omitempty"` + // The i'th linear bucket covers the interval + // [offset + (i-1) * width, offset + i * width) + // where i ranges from 1 to num_finite_buckets, inclusive. + Offset float64 `protobuf:"fixed64,3,opt,name=offset,proto3" json:"offset,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Distribution_LinearBuckets) Reset() { *m = Distribution_LinearBuckets{} } +func (m *Distribution_LinearBuckets) String() string { return proto.CompactTextString(m) } +func (*Distribution_LinearBuckets) ProtoMessage() {} +func (*Distribution_LinearBuckets) Descriptor() ([]byte, []int) { + return fileDescriptor_distribution_807f43a240f7aec0, []int{0, 0} +} +func (m *Distribution_LinearBuckets) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Distribution_LinearBuckets.Unmarshal(m, b) +} +func (m *Distribution_LinearBuckets) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Distribution_LinearBuckets.Marshal(b, m, deterministic) +} +func (dst *Distribution_LinearBuckets) XXX_Merge(src proto.Message) { + xxx_messageInfo_Distribution_LinearBuckets.Merge(dst, src) +} +func (m *Distribution_LinearBuckets) XXX_Size() int { + return xxx_messageInfo_Distribution_LinearBuckets.Size(m) +} +func (m *Distribution_LinearBuckets) XXX_DiscardUnknown() { + xxx_messageInfo_Distribution_LinearBuckets.DiscardUnknown(m) +} + +var xxx_messageInfo_Distribution_LinearBuckets proto.InternalMessageInfo + +func (m *Distribution_LinearBuckets) GetNumFiniteBuckets() int32 { + if m != nil { + return m.NumFiniteBuckets + } + return 0 +} + +func (m *Distribution_LinearBuckets) GetWidth() float64 { + if m != nil { + return m.Width + } + return 0 +} + +func (m *Distribution_LinearBuckets) GetOffset() float64 { + if m != nil { + return m.Offset + } + return 0 +} + +// Describing buckets with exponentially growing width. +type Distribution_ExponentialBuckets struct { + // The number of finite buckets. With the underflow and overflow buckets, + // the total number of buckets is `num_finite_buckets` + 2. + // See comments on `bucket_options` for details. + NumFiniteBuckets int32 `protobuf:"varint,1,opt,name=num_finite_buckets,json=numFiniteBuckets,proto3" json:"num_finite_buckets,omitempty"` + // The i'th exponential bucket covers the interval + // [scale * growth_factor^(i-1), scale * growth_factor^i) + // where i ranges from 1 to num_finite_buckets inclusive. + // Must be larger than 1.0. + GrowthFactor float64 `protobuf:"fixed64,2,opt,name=growth_factor,json=growthFactor,proto3" json:"growth_factor,omitempty"` + // The i'th exponential bucket covers the interval + // [scale * growth_factor^(i-1), scale * growth_factor^i) + // where i ranges from 1 to num_finite_buckets inclusive. + // Must be > 0. + Scale float64 `protobuf:"fixed64,3,opt,name=scale,proto3" json:"scale,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Distribution_ExponentialBuckets) Reset() { *m = Distribution_ExponentialBuckets{} } +func (m *Distribution_ExponentialBuckets) String() string { return proto.CompactTextString(m) } +func (*Distribution_ExponentialBuckets) ProtoMessage() {} +func (*Distribution_ExponentialBuckets) Descriptor() ([]byte, []int) { + return fileDescriptor_distribution_807f43a240f7aec0, []int{0, 1} +} +func (m *Distribution_ExponentialBuckets) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Distribution_ExponentialBuckets.Unmarshal(m, b) +} +func (m *Distribution_ExponentialBuckets) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Distribution_ExponentialBuckets.Marshal(b, m, deterministic) +} +func (dst *Distribution_ExponentialBuckets) XXX_Merge(src proto.Message) { + xxx_messageInfo_Distribution_ExponentialBuckets.Merge(dst, src) +} +func (m *Distribution_ExponentialBuckets) XXX_Size() int { + return xxx_messageInfo_Distribution_ExponentialBuckets.Size(m) +} +func (m *Distribution_ExponentialBuckets) XXX_DiscardUnknown() { + xxx_messageInfo_Distribution_ExponentialBuckets.DiscardUnknown(m) +} + +var xxx_messageInfo_Distribution_ExponentialBuckets proto.InternalMessageInfo + +func (m *Distribution_ExponentialBuckets) GetNumFiniteBuckets() int32 { + if m != nil { + return m.NumFiniteBuckets + } + return 0 +} + +func (m *Distribution_ExponentialBuckets) GetGrowthFactor() float64 { + if m != nil { + return m.GrowthFactor + } + return 0 +} + +func (m *Distribution_ExponentialBuckets) GetScale() float64 { + if m != nil { + return m.Scale + } + return 0 +} + +// Describing buckets with arbitrary user-provided width. +type Distribution_ExplicitBuckets struct { + // 'bound' is a list of strictly increasing boundaries between + // buckets. Note that a list of length N-1 defines N buckets because + // of fenceposting. See comments on `bucket_options` for details. + // + // The i'th finite bucket covers the interval + // [bound[i-1], bound[i]) + // where i ranges from 1 to bound_size() - 1. Note that there are no + // finite buckets at all if 'bound' only contains a single element; in + // that special case the single bound defines the boundary between the + // underflow and overflow buckets. + // + // bucket number lower bound upper bound + // i == 0 (underflow) -inf bound[i] + // 0 < i < bound_size() bound[i-1] bound[i] + // i == bound_size() (overflow) bound[i-1] +inf + Bounds []float64 `protobuf:"fixed64,1,rep,packed,name=bounds,proto3" json:"bounds,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Distribution_ExplicitBuckets) Reset() { *m = Distribution_ExplicitBuckets{} } +func (m *Distribution_ExplicitBuckets) String() string { return proto.CompactTextString(m) } +func (*Distribution_ExplicitBuckets) ProtoMessage() {} +func (*Distribution_ExplicitBuckets) Descriptor() ([]byte, []int) { + return fileDescriptor_distribution_807f43a240f7aec0, []int{0, 2} +} +func (m *Distribution_ExplicitBuckets) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Distribution_ExplicitBuckets.Unmarshal(m, b) +} +func (m *Distribution_ExplicitBuckets) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Distribution_ExplicitBuckets.Marshal(b, m, deterministic) +} +func (dst *Distribution_ExplicitBuckets) XXX_Merge(src proto.Message) { + xxx_messageInfo_Distribution_ExplicitBuckets.Merge(dst, src) +} +func (m *Distribution_ExplicitBuckets) XXX_Size() int { + return xxx_messageInfo_Distribution_ExplicitBuckets.Size(m) +} +func (m *Distribution_ExplicitBuckets) XXX_DiscardUnknown() { + xxx_messageInfo_Distribution_ExplicitBuckets.DiscardUnknown(m) +} + +var xxx_messageInfo_Distribution_ExplicitBuckets proto.InternalMessageInfo + +func (m *Distribution_ExplicitBuckets) GetBounds() []float64 { + if m != nil { + return m.Bounds + } + return nil +} + +func init() { + proto.RegisterType((*Distribution)(nil), "google.api.servicecontrol.v1.Distribution") + proto.RegisterType((*Distribution_LinearBuckets)(nil), "google.api.servicecontrol.v1.Distribution.LinearBuckets") + proto.RegisterType((*Distribution_ExponentialBuckets)(nil), "google.api.servicecontrol.v1.Distribution.ExponentialBuckets") + proto.RegisterType((*Distribution_ExplicitBuckets)(nil), "google.api.servicecontrol.v1.Distribution.ExplicitBuckets") +} + +func init() { + proto.RegisterFile("google/api/servicecontrol/v1/distribution.proto", fileDescriptor_distribution_807f43a240f7aec0) +} + +var fileDescriptor_distribution_807f43a240f7aec0 = []byte{ + // 486 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x94, 0x93, 0x4d, 0x6f, 0xd3, 0x40, + 0x10, 0x86, 0x31, 0x6e, 0x52, 0x18, 0x12, 0x52, 0x96, 0x82, 0xac, 0x88, 0x83, 0x45, 0x2f, 0x41, + 0x42, 0xb6, 0x0a, 0x07, 0x10, 0x88, 0x4b, 0x28, 0x15, 0x42, 0x48, 0x54, 0xe6, 0xc6, 0xc5, 0xda, + 0xd8, 0x6b, 0x77, 0x55, 0x7b, 0xc7, 0xec, 0x47, 0x9a, 0x0b, 0x57, 0x7e, 0x0f, 0x3f, 0x8f, 0x23, + 0xf2, 0xae, 0xf3, 0x45, 0xab, 0x48, 0xb9, 0xf9, 0x7d, 0xc7, 0x33, 0xcf, 0xec, 0x68, 0x06, 0xe2, + 0x12, 0xb1, 0xac, 0x58, 0x4c, 0x1b, 0x1e, 0x2b, 0x26, 0xe7, 0x3c, 0x63, 0x19, 0x0a, 0x2d, 0xb1, + 0x8a, 0xe7, 0xa7, 0x71, 0xce, 0x95, 0x96, 0x7c, 0x66, 0x34, 0x47, 0x11, 0x35, 0x12, 0x35, 0x92, + 0x67, 0x2e, 0x21, 0xa2, 0x0d, 0x8f, 0xb6, 0x13, 0xa2, 0xf9, 0xe9, 0xf3, 0x3f, 0x7d, 0x18, 0x9c, + 0x6d, 0x24, 0x91, 0x63, 0xe8, 0x65, 0x68, 0x84, 0x0e, 0xbc, 0xd0, 0x9b, 0xf8, 0x89, 0x13, 0x84, + 0xc0, 0x41, 0xcd, 0xa8, 0x08, 0xee, 0x86, 0xde, 0xc4, 0x4b, 0xec, 0x37, 0x09, 0xe0, 0xb0, 0xe6, + 0x82, 0xd7, 0xa6, 0x0e, 0x7c, 0x6b, 0x2f, 0xa5, 0x8d, 0xd0, 0x85, 0x8d, 0x1c, 0x74, 0x11, 0x27, + 0xc9, 0x1b, 0x08, 0x94, 0xa9, 0x53, 0x2c, 0x52, 0xf5, 0xd3, 0x50, 0xc9, 0xf2, 0x34, 0x67, 0x73, + 0x4e, 0x5b, 0x72, 0xd0, 0xb3, 0xbf, 0x3e, 0x51, 0xa6, 0xfe, 0x56, 0x7c, 0x77, 0xd1, 0xb3, 0x65, + 0x90, 0x9c, 0xc0, 0x70, 0x66, 0xb2, 0x2b, 0xa6, 0x53, 0xdb, 0x90, 0x0a, 0xfa, 0xa1, 0x3f, 0xf1, + 0x93, 0x81, 0x33, 0x3f, 0x5a, 0x8f, 0x50, 0x78, 0x58, 0x71, 0xc1, 0xa8, 0x4c, 0x9d, 0xad, 0x82, + 0xc3, 0xd0, 0x9b, 0x3c, 0x78, 0xf5, 0x36, 0xda, 0x35, 0x83, 0x68, 0xf3, 0xfd, 0xd1, 0x57, 0x5b, + 0x60, 0xea, 0xf2, 0x3f, 0xdf, 0x49, 0x86, 0xd5, 0xa6, 0x41, 0x1a, 0x78, 0xcc, 0x16, 0x0d, 0x0a, + 0x26, 0x34, 0xa7, 0xd5, 0x8a, 0x73, 0xcf, 0x72, 0x3e, 0xec, 0xc1, 0xf9, 0xb4, 0xae, 0xb2, 0x86, + 0x11, 0x76, 0xc3, 0x25, 0x25, 0x1c, 0xb1, 0x45, 0x53, 0xf1, 0x8c, 0xeb, 0x15, 0xee, 0xbe, 0xc5, + 0xbd, 0xdb, 0x0f, 0x67, 0x4b, 0xac, 0x59, 0x23, 0xb6, 0x6d, 0x8d, 0xaf, 0x60, 0xb8, 0xf5, 0x78, + 0xf2, 0x12, 0x88, 0x30, 0x75, 0x5a, 0x70, 0xc1, 0x35, 0x5b, 0xb1, 0xdb, 0xbd, 0xe8, 0x25, 0x47, + 0xc2, 0xd4, 0xe7, 0x36, 0xb0, 0xfc, 0xfb, 0x18, 0x7a, 0xd7, 0x3c, 0xd7, 0x97, 0xdd, 0x8e, 0x38, + 0x41, 0x9e, 0x42, 0x1f, 0x8b, 0x42, 0x31, 0xdd, 0xed, 0x48, 0xa7, 0xc6, 0xbf, 0x80, 0xdc, 0x9c, + 0xc0, 0x9e, 0xc4, 0x13, 0x18, 0x96, 0x12, 0xaf, 0xf5, 0x65, 0x5a, 0xd0, 0x4c, 0xa3, 0xec, 0xc8, + 0x03, 0x67, 0x9e, 0x5b, 0xaf, 0x6d, 0x4b, 0x65, 0xb4, 0x62, 0x1d, 0xdf, 0x89, 0xf1, 0x0b, 0x18, + 0xfd, 0x37, 0x91, 0xb6, 0xd3, 0x19, 0x1a, 0x91, 0xb7, 0x3c, 0xbf, 0xed, 0xd4, 0xa9, 0xe9, 0x68, + 0xb5, 0x79, 0xd8, 0xb4, 0xa3, 0x9c, 0xfe, 0xf6, 0x20, 0xcc, 0xb0, 0xde, 0x39, 0xfc, 0xe9, 0xa3, + 0xcd, 0xe9, 0x5f, 0xb4, 0x87, 0x78, 0xe1, 0xfd, 0xf8, 0xd2, 0xa5, 0x94, 0x58, 0x51, 0x51, 0x46, + 0x28, 0xcb, 0xb8, 0x64, 0xc2, 0x9e, 0x69, 0x77, 0xd6, 0xb4, 0xe1, 0xea, 0xf6, 0xd3, 0x7e, 0xbf, + 0xed, 0xfc, 0xf5, 0xbc, 0x59, 0xdf, 0x66, 0xbe, 0xfe, 0x17, 0x00, 0x00, 0xff, 0xff, 0x14, 0x52, + 0xaf, 0xef, 0x13, 0x04, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/api/servicecontrol/v1/log_entry.pb.go b/vendor/google.golang.org/genproto/googleapis/api/servicecontrol/v1/log_entry.pb.go new file mode 100644 index 0000000..33f6ffc --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/api/servicecontrol/v1/log_entry.pb.go @@ -0,0 +1,293 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/api/servicecontrol/v1/log_entry.proto + +package servicecontrol // import "google.golang.org/genproto/googleapis/api/servicecontrol/v1" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import any "github.com/golang/protobuf/ptypes/any" +import _struct "github.com/golang/protobuf/ptypes/struct" +import timestamp "github.com/golang/protobuf/ptypes/timestamp" +import _ "google.golang.org/genproto/googleapis/api/annotations" +import _type "google.golang.org/genproto/googleapis/logging/type" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// An individual log entry. +type LogEntry struct { + // Required. The log to which this log entry belongs. Examples: `"syslog"`, + // `"book_log"`. + Name string `protobuf:"bytes,10,opt,name=name,proto3" json:"name,omitempty"` + // The time the event described by the log entry occurred. If + // omitted, defaults to operation start time. + Timestamp *timestamp.Timestamp `protobuf:"bytes,11,opt,name=timestamp,proto3" json:"timestamp,omitempty"` + // The severity of the log entry. The default value is + // `LogSeverity.DEFAULT`. + Severity _type.LogSeverity `protobuf:"varint,12,opt,name=severity,proto3,enum=google.logging.type.LogSeverity" json:"severity,omitempty"` + // A unique ID for the log entry used for deduplication. If omitted, + // the implementation will generate one based on operation_id. + InsertId string `protobuf:"bytes,4,opt,name=insert_id,json=insertId,proto3" json:"insert_id,omitempty"` + // A set of user-defined (key, value) data that provides additional + // information about the log entry. + Labels map[string]string `protobuf:"bytes,13,rep,name=labels,proto3" json:"labels,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` + // The log entry payload, which can be one of multiple types. + // + // Types that are valid to be assigned to Payload: + // *LogEntry_ProtoPayload + // *LogEntry_TextPayload + // *LogEntry_StructPayload + Payload isLogEntry_Payload `protobuf_oneof:"payload"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *LogEntry) Reset() { *m = LogEntry{} } +func (m *LogEntry) String() string { return proto.CompactTextString(m) } +func (*LogEntry) ProtoMessage() {} +func (*LogEntry) Descriptor() ([]byte, []int) { + return fileDescriptor_log_entry_ab78733b8d6a30bb, []int{0} +} +func (m *LogEntry) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_LogEntry.Unmarshal(m, b) +} +func (m *LogEntry) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_LogEntry.Marshal(b, m, deterministic) +} +func (dst *LogEntry) XXX_Merge(src proto.Message) { + xxx_messageInfo_LogEntry.Merge(dst, src) +} +func (m *LogEntry) XXX_Size() int { + return xxx_messageInfo_LogEntry.Size(m) +} +func (m *LogEntry) XXX_DiscardUnknown() { + xxx_messageInfo_LogEntry.DiscardUnknown(m) +} + +var xxx_messageInfo_LogEntry proto.InternalMessageInfo + +func (m *LogEntry) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *LogEntry) GetTimestamp() *timestamp.Timestamp { + if m != nil { + return m.Timestamp + } + return nil +} + +func (m *LogEntry) GetSeverity() _type.LogSeverity { + if m != nil { + return m.Severity + } + return _type.LogSeverity_DEFAULT +} + +func (m *LogEntry) GetInsertId() string { + if m != nil { + return m.InsertId + } + return "" +} + +func (m *LogEntry) GetLabels() map[string]string { + if m != nil { + return m.Labels + } + return nil +} + +type isLogEntry_Payload interface { + isLogEntry_Payload() +} + +type LogEntry_ProtoPayload struct { + ProtoPayload *any.Any `protobuf:"bytes,2,opt,name=proto_payload,json=protoPayload,proto3,oneof"` +} + +type LogEntry_TextPayload struct { + TextPayload string `protobuf:"bytes,3,opt,name=text_payload,json=textPayload,proto3,oneof"` +} + +type LogEntry_StructPayload struct { + StructPayload *_struct.Struct `protobuf:"bytes,6,opt,name=struct_payload,json=structPayload,proto3,oneof"` +} + +func (*LogEntry_ProtoPayload) isLogEntry_Payload() {} + +func (*LogEntry_TextPayload) isLogEntry_Payload() {} + +func (*LogEntry_StructPayload) isLogEntry_Payload() {} + +func (m *LogEntry) GetPayload() isLogEntry_Payload { + if m != nil { + return m.Payload + } + return nil +} + +func (m *LogEntry) GetProtoPayload() *any.Any { + if x, ok := m.GetPayload().(*LogEntry_ProtoPayload); ok { + return x.ProtoPayload + } + return nil +} + +func (m *LogEntry) GetTextPayload() string { + if x, ok := m.GetPayload().(*LogEntry_TextPayload); ok { + return x.TextPayload + } + return "" +} + +func (m *LogEntry) GetStructPayload() *_struct.Struct { + if x, ok := m.GetPayload().(*LogEntry_StructPayload); ok { + return x.StructPayload + } + return nil +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*LogEntry) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _LogEntry_OneofMarshaler, _LogEntry_OneofUnmarshaler, _LogEntry_OneofSizer, []interface{}{ + (*LogEntry_ProtoPayload)(nil), + (*LogEntry_TextPayload)(nil), + (*LogEntry_StructPayload)(nil), + } +} + +func _LogEntry_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*LogEntry) + // payload + switch x := m.Payload.(type) { + case *LogEntry_ProtoPayload: + b.EncodeVarint(2<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.ProtoPayload); err != nil { + return err + } + case *LogEntry_TextPayload: + b.EncodeVarint(3<<3 | proto.WireBytes) + b.EncodeStringBytes(x.TextPayload) + case *LogEntry_StructPayload: + b.EncodeVarint(6<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.StructPayload); err != nil { + return err + } + case nil: + default: + return fmt.Errorf("LogEntry.Payload has unexpected type %T", x) + } + return nil +} + +func _LogEntry_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*LogEntry) + switch tag { + case 2: // payload.proto_payload + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(any.Any) + err := b.DecodeMessage(msg) + m.Payload = &LogEntry_ProtoPayload{msg} + return true, err + case 3: // payload.text_payload + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeStringBytes() + m.Payload = &LogEntry_TextPayload{x} + return true, err + case 6: // payload.struct_payload + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(_struct.Struct) + err := b.DecodeMessage(msg) + m.Payload = &LogEntry_StructPayload{msg} + return true, err + default: + return false, nil + } +} + +func _LogEntry_OneofSizer(msg proto.Message) (n int) { + m := msg.(*LogEntry) + // payload + switch x := m.Payload.(type) { + case *LogEntry_ProtoPayload: + s := proto.Size(x.ProtoPayload) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *LogEntry_TextPayload: + n += 1 // tag and wire + n += proto.SizeVarint(uint64(len(x.TextPayload))) + n += len(x.TextPayload) + case *LogEntry_StructPayload: + s := proto.Size(x.StructPayload) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +func init() { + proto.RegisterType((*LogEntry)(nil), "google.api.servicecontrol.v1.LogEntry") + proto.RegisterMapType((map[string]string)(nil), "google.api.servicecontrol.v1.LogEntry.LabelsEntry") +} + +func init() { + proto.RegisterFile("google/api/servicecontrol/v1/log_entry.proto", fileDescriptor_log_entry_ab78733b8d6a30bb) +} + +var fileDescriptor_log_entry_ab78733b8d6a30bb = []byte{ + // 454 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x7c, 0x93, 0x4f, 0x8f, 0xd3, 0x30, + 0x10, 0xc5, 0x9b, 0xed, 0x52, 0x1a, 0xa7, 0x5d, 0x21, 0x6b, 0x25, 0x42, 0xa8, 0x44, 0x04, 0x12, + 0xea, 0x01, 0x39, 0xda, 0x72, 0x59, 0x58, 0x0e, 0x50, 0x09, 0xa9, 0xac, 0x7a, 0x58, 0x65, 0x39, + 0x71, 0xa9, 0xdc, 0xd6, 0x58, 0x16, 0xae, 0x27, 0x4a, 0xdc, 0x88, 0x9c, 0xf8, 0xbc, 0x7c, 0x0b, + 0x94, 0xb1, 0x53, 0xf6, 0x9f, 0xf6, 0xe6, 0xf1, 0xfb, 0xbd, 0x99, 0x79, 0x8e, 0x42, 0xde, 0x49, + 0x00, 0xa9, 0x45, 0xc6, 0x0b, 0x95, 0x55, 0xa2, 0xac, 0xd5, 0x46, 0x6c, 0xc0, 0xd8, 0x12, 0x74, + 0x56, 0x9f, 0x65, 0x1a, 0xe4, 0x4a, 0x18, 0x5b, 0x36, 0xac, 0x28, 0xc1, 0x02, 0x9d, 0x38, 0x9a, + 0xf1, 0x42, 0xb1, 0xdb, 0x34, 0xab, 0xcf, 0x92, 0xc9, 0x8d, 0x5e, 0xdc, 0x18, 0xb0, 0xdc, 0x2a, + 0x30, 0x95, 0xf3, 0x26, 0x6f, 0xbd, 0xaa, 0x41, 0x4a, 0x65, 0x64, 0x66, 0x9b, 0x02, 0x8b, 0x55, + 0x25, 0x6a, 0x51, 0x2a, 0xeb, 0x67, 0x24, 0x2f, 0x3c, 0x87, 0xd5, 0x7a, 0xff, 0x33, 0xe3, 0xa6, + 0x93, 0x26, 0x77, 0xa5, 0xca, 0x96, 0xfb, 0x8d, 0xf5, 0xea, 0xab, 0xbb, 0xaa, 0x55, 0x3b, 0x51, + 0x59, 0xbe, 0x2b, 0x1c, 0xf0, 0xfa, 0x6f, 0x9f, 0x0c, 0x97, 0x20, 0xbf, 0xb6, 0x81, 0x28, 0x25, + 0xc7, 0x86, 0xef, 0x44, 0x4c, 0xd2, 0x60, 0x1a, 0xe6, 0x78, 0xa6, 0xe7, 0x24, 0x3c, 0x78, 0xe2, + 0x28, 0x0d, 0xa6, 0xd1, 0x2c, 0x61, 0x3e, 0x72, 0xd7, 0x95, 0x7d, 0xef, 0x88, 0xfc, 0x3f, 0x4c, + 0x3f, 0x91, 0x61, 0x17, 0x23, 0x1e, 0xa5, 0xc1, 0xf4, 0x64, 0x96, 0x76, 0x46, 0x9f, 0x97, 0xb5, + 0x79, 0xd9, 0x12, 0xe4, 0xb5, 0xe7, 0xf2, 0x83, 0x83, 0xbe, 0x24, 0xa1, 0x32, 0x95, 0x28, 0xed, + 0x4a, 0x6d, 0xe3, 0x63, 0x5c, 0x68, 0xe8, 0x2e, 0xbe, 0x6d, 0xe9, 0x25, 0x19, 0x68, 0xbe, 0x16, + 0xba, 0x8a, 0xc7, 0x69, 0x7f, 0x1a, 0xcd, 0x66, 0xec, 0xb1, 0x8f, 0xc0, 0xba, 0x80, 0x6c, 0x89, + 0x26, 0x3c, 0xe7, 0xbe, 0x03, 0xbd, 0x20, 0x63, 0xcc, 0xb1, 0x2a, 0x78, 0xa3, 0x81, 0x6f, 0xe3, + 0x23, 0x0c, 0x79, 0x7a, 0x2f, 0xe4, 0x17, 0xd3, 0x2c, 0x7a, 0xf9, 0x08, 0xeb, 0x2b, 0xc7, 0xd2, + 0x37, 0x64, 0x64, 0xc5, 0x6f, 0x7b, 0xf0, 0xf6, 0xdb, 0x45, 0x17, 0xbd, 0x3c, 0x6a, 0x6f, 0x3b, + 0xe8, 0x33, 0x39, 0x71, 0x1f, 0xe5, 0x80, 0x0d, 0x70, 0xc4, 0xf3, 0x7b, 0x23, 0xae, 0x11, 0x5b, + 0xf4, 0xf2, 0xb1, 0x33, 0xf8, 0x0e, 0xc9, 0x07, 0x12, 0xdd, 0x58, 0x9d, 0x3e, 0x23, 0xfd, 0x5f, + 0xa2, 0x89, 0x03, 0x7c, 0x95, 0xf6, 0x48, 0x4f, 0xc9, 0x93, 0x9a, 0xeb, 0xbd, 0xc0, 0xe5, 0xc3, + 0xdc, 0x15, 0x1f, 0x8f, 0xce, 0x83, 0x79, 0x48, 0x9e, 0xfa, 0xa9, 0xf3, 0x3f, 0x24, 0xdd, 0xc0, + 0xee, 0xd1, 0xa7, 0x9a, 0x8f, 0xbb, 0xb7, 0xba, 0xc2, 0x98, 0xc1, 0x8f, 0x4b, 0x8f, 0x4b, 0xd0, + 0xdc, 0x48, 0x06, 0xa5, 0xcc, 0xa4, 0x30, 0xb8, 0x71, 0xe6, 0x24, 0x5e, 0xa8, 0xea, 0xe1, 0x7f, + 0xe5, 0xe2, 0xf6, 0xcd, 0x7a, 0x80, 0xb6, 0xf7, 0xff, 0x02, 0x00, 0x00, 0xff, 0xff, 0x67, 0x50, + 0x6e, 0x13, 0x61, 0x03, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/api/servicecontrol/v1/metric_value.pb.go b/vendor/google.golang.org/genproto/googleapis/api/servicecontrol/v1/metric_value.pb.go new file mode 100644 index 0000000..fee5b4a --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/api/servicecontrol/v1/metric_value.pb.go @@ -0,0 +1,386 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/api/servicecontrol/v1/metric_value.proto + +package servicecontrol // import "google.golang.org/genproto/googleapis/api/servicecontrol/v1" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import timestamp "github.com/golang/protobuf/ptypes/timestamp" +import _ "google.golang.org/genproto/googleapis/api/annotations" +import _ "google.golang.org/genproto/googleapis/type/money" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Represents a single metric value. +type MetricValue struct { + // The labels describing the metric value. + // See comments on + // [google.api.servicecontrol.v1.Operation.labels][google.api.servicecontrol.v1.Operation.labels] + // for the overriding relationship. + Labels map[string]string `protobuf:"bytes,1,rep,name=labels,proto3" json:"labels,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` + // The start of the time period over which this metric value's measurement + // applies. The time period has different semantics for different metric + // types (cumulative, delta, and gauge). See the metric definition + // documentation in the service configuration for details. + StartTime *timestamp.Timestamp `protobuf:"bytes,2,opt,name=start_time,json=startTime,proto3" json:"start_time,omitempty"` + // The end of the time period over which this metric value's measurement + // applies. + EndTime *timestamp.Timestamp `protobuf:"bytes,3,opt,name=end_time,json=endTime,proto3" json:"end_time,omitempty"` + // The value. The type of value used in the request must + // agree with the metric definition in the service configuration, otherwise + // the MetricValue is rejected. + // + // Types that are valid to be assigned to Value: + // *MetricValue_BoolValue + // *MetricValue_Int64Value + // *MetricValue_DoubleValue + // *MetricValue_StringValue + // *MetricValue_DistributionValue + Value isMetricValue_Value `protobuf_oneof:"value"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *MetricValue) Reset() { *m = MetricValue{} } +func (m *MetricValue) String() string { return proto.CompactTextString(m) } +func (*MetricValue) ProtoMessage() {} +func (*MetricValue) Descriptor() ([]byte, []int) { + return fileDescriptor_metric_value_b13910fcfcfb831a, []int{0} +} +func (m *MetricValue) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_MetricValue.Unmarshal(m, b) +} +func (m *MetricValue) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_MetricValue.Marshal(b, m, deterministic) +} +func (dst *MetricValue) XXX_Merge(src proto.Message) { + xxx_messageInfo_MetricValue.Merge(dst, src) +} +func (m *MetricValue) XXX_Size() int { + return xxx_messageInfo_MetricValue.Size(m) +} +func (m *MetricValue) XXX_DiscardUnknown() { + xxx_messageInfo_MetricValue.DiscardUnknown(m) +} + +var xxx_messageInfo_MetricValue proto.InternalMessageInfo + +func (m *MetricValue) GetLabels() map[string]string { + if m != nil { + return m.Labels + } + return nil +} + +func (m *MetricValue) GetStartTime() *timestamp.Timestamp { + if m != nil { + return m.StartTime + } + return nil +} + +func (m *MetricValue) GetEndTime() *timestamp.Timestamp { + if m != nil { + return m.EndTime + } + return nil +} + +type isMetricValue_Value interface { + isMetricValue_Value() +} + +type MetricValue_BoolValue struct { + BoolValue bool `protobuf:"varint,4,opt,name=bool_value,json=boolValue,proto3,oneof"` +} + +type MetricValue_Int64Value struct { + Int64Value int64 `protobuf:"varint,5,opt,name=int64_value,json=int64Value,proto3,oneof"` +} + +type MetricValue_DoubleValue struct { + DoubleValue float64 `protobuf:"fixed64,6,opt,name=double_value,json=doubleValue,proto3,oneof"` +} + +type MetricValue_StringValue struct { + StringValue string `protobuf:"bytes,7,opt,name=string_value,json=stringValue,proto3,oneof"` +} + +type MetricValue_DistributionValue struct { + DistributionValue *Distribution `protobuf:"bytes,8,opt,name=distribution_value,json=distributionValue,proto3,oneof"` +} + +func (*MetricValue_BoolValue) isMetricValue_Value() {} + +func (*MetricValue_Int64Value) isMetricValue_Value() {} + +func (*MetricValue_DoubleValue) isMetricValue_Value() {} + +func (*MetricValue_StringValue) isMetricValue_Value() {} + +func (*MetricValue_DistributionValue) isMetricValue_Value() {} + +func (m *MetricValue) GetValue() isMetricValue_Value { + if m != nil { + return m.Value + } + return nil +} + +func (m *MetricValue) GetBoolValue() bool { + if x, ok := m.GetValue().(*MetricValue_BoolValue); ok { + return x.BoolValue + } + return false +} + +func (m *MetricValue) GetInt64Value() int64 { + if x, ok := m.GetValue().(*MetricValue_Int64Value); ok { + return x.Int64Value + } + return 0 +} + +func (m *MetricValue) GetDoubleValue() float64 { + if x, ok := m.GetValue().(*MetricValue_DoubleValue); ok { + return x.DoubleValue + } + return 0 +} + +func (m *MetricValue) GetStringValue() string { + if x, ok := m.GetValue().(*MetricValue_StringValue); ok { + return x.StringValue + } + return "" +} + +func (m *MetricValue) GetDistributionValue() *Distribution { + if x, ok := m.GetValue().(*MetricValue_DistributionValue); ok { + return x.DistributionValue + } + return nil +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*MetricValue) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _MetricValue_OneofMarshaler, _MetricValue_OneofUnmarshaler, _MetricValue_OneofSizer, []interface{}{ + (*MetricValue_BoolValue)(nil), + (*MetricValue_Int64Value)(nil), + (*MetricValue_DoubleValue)(nil), + (*MetricValue_StringValue)(nil), + (*MetricValue_DistributionValue)(nil), + } +} + +func _MetricValue_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*MetricValue) + // value + switch x := m.Value.(type) { + case *MetricValue_BoolValue: + t := uint64(0) + if x.BoolValue { + t = 1 + } + b.EncodeVarint(4<<3 | proto.WireVarint) + b.EncodeVarint(t) + case *MetricValue_Int64Value: + b.EncodeVarint(5<<3 | proto.WireVarint) + b.EncodeVarint(uint64(x.Int64Value)) + case *MetricValue_DoubleValue: + b.EncodeVarint(6<<3 | proto.WireFixed64) + b.EncodeFixed64(math.Float64bits(x.DoubleValue)) + case *MetricValue_StringValue: + b.EncodeVarint(7<<3 | proto.WireBytes) + b.EncodeStringBytes(x.StringValue) + case *MetricValue_DistributionValue: + b.EncodeVarint(8<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.DistributionValue); err != nil { + return err + } + case nil: + default: + return fmt.Errorf("MetricValue.Value has unexpected type %T", x) + } + return nil +} + +func _MetricValue_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*MetricValue) + switch tag { + case 4: // value.bool_value + if wire != proto.WireVarint { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeVarint() + m.Value = &MetricValue_BoolValue{x != 0} + return true, err + case 5: // value.int64_value + if wire != proto.WireVarint { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeVarint() + m.Value = &MetricValue_Int64Value{int64(x)} + return true, err + case 6: // value.double_value + if wire != proto.WireFixed64 { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeFixed64() + m.Value = &MetricValue_DoubleValue{math.Float64frombits(x)} + return true, err + case 7: // value.string_value + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeStringBytes() + m.Value = &MetricValue_StringValue{x} + return true, err + case 8: // value.distribution_value + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(Distribution) + err := b.DecodeMessage(msg) + m.Value = &MetricValue_DistributionValue{msg} + return true, err + default: + return false, nil + } +} + +func _MetricValue_OneofSizer(msg proto.Message) (n int) { + m := msg.(*MetricValue) + // value + switch x := m.Value.(type) { + case *MetricValue_BoolValue: + n += 1 // tag and wire + n += 1 + case *MetricValue_Int64Value: + n += 1 // tag and wire + n += proto.SizeVarint(uint64(x.Int64Value)) + case *MetricValue_DoubleValue: + n += 1 // tag and wire + n += 8 + case *MetricValue_StringValue: + n += 1 // tag and wire + n += proto.SizeVarint(uint64(len(x.StringValue))) + n += len(x.StringValue) + case *MetricValue_DistributionValue: + s := proto.Size(x.DistributionValue) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +// Represents a set of metric values in the same metric. +// Each metric value in the set should have a unique combination of start time, +// end time, and label values. +type MetricValueSet struct { + // The metric name defined in the service configuration. + MetricName string `protobuf:"bytes,1,opt,name=metric_name,json=metricName,proto3" json:"metric_name,omitempty"` + // The values in this metric. + MetricValues []*MetricValue `protobuf:"bytes,2,rep,name=metric_values,json=metricValues,proto3" json:"metric_values,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *MetricValueSet) Reset() { *m = MetricValueSet{} } +func (m *MetricValueSet) String() string { return proto.CompactTextString(m) } +func (*MetricValueSet) ProtoMessage() {} +func (*MetricValueSet) Descriptor() ([]byte, []int) { + return fileDescriptor_metric_value_b13910fcfcfb831a, []int{1} +} +func (m *MetricValueSet) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_MetricValueSet.Unmarshal(m, b) +} +func (m *MetricValueSet) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_MetricValueSet.Marshal(b, m, deterministic) +} +func (dst *MetricValueSet) XXX_Merge(src proto.Message) { + xxx_messageInfo_MetricValueSet.Merge(dst, src) +} +func (m *MetricValueSet) XXX_Size() int { + return xxx_messageInfo_MetricValueSet.Size(m) +} +func (m *MetricValueSet) XXX_DiscardUnknown() { + xxx_messageInfo_MetricValueSet.DiscardUnknown(m) +} + +var xxx_messageInfo_MetricValueSet proto.InternalMessageInfo + +func (m *MetricValueSet) GetMetricName() string { + if m != nil { + return m.MetricName + } + return "" +} + +func (m *MetricValueSet) GetMetricValues() []*MetricValue { + if m != nil { + return m.MetricValues + } + return nil +} + +func init() { + proto.RegisterType((*MetricValue)(nil), "google.api.servicecontrol.v1.MetricValue") + proto.RegisterMapType((map[string]string)(nil), "google.api.servicecontrol.v1.MetricValue.LabelsEntry") + proto.RegisterType((*MetricValueSet)(nil), "google.api.servicecontrol.v1.MetricValueSet") +} + +func init() { + proto.RegisterFile("google/api/servicecontrol/v1/metric_value.proto", fileDescriptor_metric_value_b13910fcfcfb831a) +} + +var fileDescriptor_metric_value_b13910fcfcfb831a = []byte{ + // 482 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x8c, 0x93, 0xcf, 0x8b, 0xd3, 0x40, + 0x14, 0xc7, 0x3b, 0x8d, 0xdb, 0x1f, 0x2f, 0xab, 0x68, 0x14, 0x0c, 0x65, 0xa1, 0x71, 0xbd, 0x44, + 0x0f, 0x13, 0x76, 0x75, 0xc5, 0xd5, 0x5b, 0x51, 0x28, 0xe2, 0x2e, 0x4b, 0x14, 0x0f, 0x7a, 0x58, + 0x26, 0xed, 0x33, 0x0c, 0x26, 0x33, 0x21, 0x33, 0x2d, 0xf4, 0xe8, 0xcd, 0x3f, 0xd9, 0x8b, 0x20, + 0xf3, 0xa3, 0x9a, 0x82, 0xd4, 0xbd, 0xe5, 0x7d, 0xf3, 0xfd, 0xbc, 0x7c, 0x27, 0xef, 0x0d, 0x64, + 0xa5, 0x94, 0x65, 0x85, 0x19, 0x6b, 0x78, 0xa6, 0xb0, 0x5d, 0xf3, 0x05, 0x2e, 0xa4, 0xd0, 0xad, + 0xac, 0xb2, 0xf5, 0x49, 0x56, 0xa3, 0x6e, 0xf9, 0xe2, 0x7a, 0xcd, 0xaa, 0x15, 0xd2, 0xa6, 0x95, + 0x5a, 0x46, 0x47, 0x0e, 0xa0, 0xac, 0xe1, 0x74, 0x17, 0xa0, 0xeb, 0x93, 0xc9, 0x51, 0xa7, 0x1d, + 0x13, 0x42, 0x6a, 0xa6, 0xb9, 0x14, 0xca, 0xb1, 0x93, 0xfd, 0x1f, 0x5b, 0x72, 0xa5, 0x5b, 0x5e, + 0xac, 0x0c, 0xe1, 0x81, 0xa9, 0x07, 0x6c, 0x55, 0xac, 0xbe, 0x66, 0x9a, 0xd7, 0xa8, 0x34, 0xab, + 0x1b, 0x6f, 0x78, 0xe8, 0x0d, 0x7a, 0xd3, 0x60, 0x56, 0x4b, 0x81, 0x1b, 0xf7, 0xe2, 0xf8, 0x57, + 0x00, 0xe1, 0x85, 0x4d, 0xff, 0xc9, 0x84, 0x8f, 0x2e, 0x60, 0x50, 0xb1, 0x02, 0x2b, 0x15, 0x93, + 0x24, 0x48, 0xc3, 0xd3, 0x33, 0xba, 0xef, 0x1c, 0xb4, 0x83, 0xd2, 0xf7, 0x96, 0x7b, 0x2b, 0x74, + 0xbb, 0xc9, 0x7d, 0x93, 0xe8, 0x1c, 0x40, 0x69, 0xd6, 0xea, 0x6b, 0x13, 0x28, 0xee, 0x27, 0x24, + 0x0d, 0x4f, 0x27, 0xdb, 0x96, 0xdb, 0xb4, 0xf4, 0xe3, 0x36, 0x6d, 0x3e, 0xb6, 0x6e, 0x53, 0x47, + 0x67, 0x30, 0x42, 0xb1, 0x74, 0x60, 0xf0, 0x5f, 0x70, 0x88, 0x62, 0x69, 0xb1, 0x29, 0x40, 0x21, + 0x65, 0xe5, 0x66, 0x11, 0xdf, 0x4a, 0x48, 0x3a, 0x9a, 0xf7, 0xf2, 0xb1, 0xd1, 0xdc, 0x09, 0x1f, + 0x41, 0xc8, 0x85, 0x7e, 0xf1, 0xdc, 0x3b, 0x0e, 0x12, 0x92, 0x06, 0xf3, 0x5e, 0x0e, 0x56, 0x74, + 0x96, 0xc7, 0x70, 0xb8, 0x94, 0xab, 0xa2, 0x42, 0xef, 0x19, 0x24, 0x24, 0x25, 0xf3, 0x5e, 0x1e, + 0x3a, 0xf5, 0x8f, 0xc9, 0xcc, 0x41, 0x94, 0xde, 0x34, 0x4c, 0x48, 0x3a, 0x36, 0x26, 0xa7, 0x3a, + 0xd3, 0x17, 0x88, 0xba, 0xe3, 0xf2, 0xd6, 0x91, 0x3d, 0xce, 0xd3, 0xfd, 0xbf, 0xf6, 0x4d, 0x87, + 0x9b, 0xf7, 0xf2, 0x7b, 0xdd, 0x3e, 0xb6, 0xf9, 0xe4, 0x1c, 0xc2, 0xce, 0x3f, 0x8f, 0xee, 0x42, + 0xf0, 0x0d, 0x37, 0x31, 0x31, 0x39, 0x72, 0xf3, 0x18, 0x3d, 0x80, 0x03, 0xf7, 0xc1, 0xbe, 0xd5, + 0x5c, 0xf1, 0xaa, 0xff, 0x92, 0xcc, 0x86, 0xfe, 0xcd, 0xf1, 0x77, 0x02, 0x77, 0x3a, 0x43, 0xfc, + 0x80, 0x3a, 0x9a, 0x42, 0xe8, 0xf7, 0x59, 0xb0, 0x1a, 0x7d, 0x3f, 0x70, 0xd2, 0x25, 0xab, 0x31, + 0xba, 0x84, 0xdb, 0xdd, 0x85, 0x57, 0x71, 0xdf, 0xae, 0xca, 0x93, 0x1b, 0xaf, 0x4a, 0x7e, 0x58, + 0xff, 0x2d, 0xd4, 0xec, 0x07, 0x81, 0x64, 0x21, 0xeb, 0xbd, 0xf8, 0xec, 0xfe, 0x6e, 0xca, 0x2b, + 0xb3, 0x02, 0x57, 0xe4, 0xf3, 0x3b, 0x0f, 0x95, 0xb2, 0x62, 0xa2, 0xa4, 0xb2, 0x2d, 0xb3, 0x12, + 0x85, 0x5d, 0x10, 0x7f, 0x8b, 0x58, 0xc3, 0xd5, 0xbf, 0x6f, 0xd2, 0xeb, 0x5d, 0xe5, 0x27, 0x21, + 0xc5, 0xc0, 0x92, 0xcf, 0x7e, 0x07, 0x00, 0x00, 0xff, 0xff, 0xdb, 0x00, 0x1a, 0xde, 0xef, 0x03, + 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/api/servicecontrol/v1/operation.pb.go b/vendor/google.golang.org/genproto/googleapis/api/servicecontrol/v1/operation.pb.go new file mode 100644 index 0000000..c3d887e --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/api/servicecontrol/v1/operation.pb.go @@ -0,0 +1,250 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/api/servicecontrol/v1/operation.proto + +package servicecontrol // import "google.golang.org/genproto/googleapis/api/servicecontrol/v1" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import timestamp "github.com/golang/protobuf/ptypes/timestamp" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Defines the importance of the data contained in the operation. +type Operation_Importance int32 + +const ( + // The API implementation may cache and aggregate the data. + // The data may be lost when rare and unexpected system failures occur. + Operation_LOW Operation_Importance = 0 + // The API implementation doesn't cache and aggregate the data. + // If the method returns successfully, it's guaranteed that the data has + // been persisted in durable storage. + Operation_HIGH Operation_Importance = 1 +) + +var Operation_Importance_name = map[int32]string{ + 0: "LOW", + 1: "HIGH", +} +var Operation_Importance_value = map[string]int32{ + "LOW": 0, + "HIGH": 1, +} + +func (x Operation_Importance) String() string { + return proto.EnumName(Operation_Importance_name, int32(x)) +} +func (Operation_Importance) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_operation_3aad26a8020e2c86, []int{0, 0} +} + +// Represents information regarding an operation. +type Operation struct { + // Identity of the operation. This must be unique within the scope of the + // service that generated the operation. If the service calls + // Check() and Report() on the same operation, the two calls should carry + // the same id. + // + // UUID version 4 is recommended, though not required. + // In scenarios where an operation is computed from existing information + // and an idempotent id is desirable for deduplication purpose, UUID version 5 + // is recommended. See RFC 4122 for details. + OperationId string `protobuf:"bytes,1,opt,name=operation_id,json=operationId,proto3" json:"operation_id,omitempty"` + // Fully qualified name of the operation. Reserved for future use. + OperationName string `protobuf:"bytes,2,opt,name=operation_name,json=operationName,proto3" json:"operation_name,omitempty"` + // Identity of the consumer who is using the service. + // This field should be filled in for the operations initiated by a + // consumer, but not for service-initiated operations that are + // not related to a specific consumer. + // + // This can be in one of the following formats: + // project:, + // project_number:, + // api_key:. + ConsumerId string `protobuf:"bytes,3,opt,name=consumer_id,json=consumerId,proto3" json:"consumer_id,omitempty"` + // Required. Start time of the operation. + StartTime *timestamp.Timestamp `protobuf:"bytes,4,opt,name=start_time,json=startTime,proto3" json:"start_time,omitempty"` + // End time of the operation. + // Required when the operation is used in + // [ServiceController.Report][google.api.servicecontrol.v1.ServiceController.Report], + // but optional when the operation is used in + // [ServiceController.Check][google.api.servicecontrol.v1.ServiceController.Check]. + EndTime *timestamp.Timestamp `protobuf:"bytes,5,opt,name=end_time,json=endTime,proto3" json:"end_time,omitempty"` + // Labels describing the operation. Only the following labels are allowed: + // + // - Labels describing monitored resources as defined in + // the service configuration. + // - Default labels of metric values. When specified, labels defined in the + // metric value override these default. + // - The following labels defined by Google Cloud Platform: + // - `cloud.googleapis.com/location` describing the location where the + // operation happened, + // - `servicecontrol.googleapis.com/user_agent` describing the user agent + // of the API request, + // - `servicecontrol.googleapis.com/service_agent` describing the service + // used to handle the API request (e.g. ESP), + // - `servicecontrol.googleapis.com/platform` describing the platform + // where the API is served (e.g. GAE, GCE, GKE). + Labels map[string]string `protobuf:"bytes,6,rep,name=labels,proto3" json:"labels,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` + // Represents information about this operation. Each MetricValueSet + // corresponds to a metric defined in the service configuration. + // The data type used in the MetricValueSet must agree with + // the data type specified in the metric definition. + // + // Within a single operation, it is not allowed to have more than one + // MetricValue instances that have the same metric names and identical + // label value combinations. If a request has such duplicated MetricValue + // instances, the entire request is rejected with + // an invalid argument error. + MetricValueSets []*MetricValueSet `protobuf:"bytes,7,rep,name=metric_value_sets,json=metricValueSets,proto3" json:"metric_value_sets,omitempty"` + // Represents information to be logged. + LogEntries []*LogEntry `protobuf:"bytes,8,rep,name=log_entries,json=logEntries,proto3" json:"log_entries,omitempty"` + // DO NOT USE. This is an experimental field. + Importance Operation_Importance `protobuf:"varint,11,opt,name=importance,proto3,enum=google.api.servicecontrol.v1.Operation_Importance" json:"importance,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Operation) Reset() { *m = Operation{} } +func (m *Operation) String() string { return proto.CompactTextString(m) } +func (*Operation) ProtoMessage() {} +func (*Operation) Descriptor() ([]byte, []int) { + return fileDescriptor_operation_3aad26a8020e2c86, []int{0} +} +func (m *Operation) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Operation.Unmarshal(m, b) +} +func (m *Operation) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Operation.Marshal(b, m, deterministic) +} +func (dst *Operation) XXX_Merge(src proto.Message) { + xxx_messageInfo_Operation.Merge(dst, src) +} +func (m *Operation) XXX_Size() int { + return xxx_messageInfo_Operation.Size(m) +} +func (m *Operation) XXX_DiscardUnknown() { + xxx_messageInfo_Operation.DiscardUnknown(m) +} + +var xxx_messageInfo_Operation proto.InternalMessageInfo + +func (m *Operation) GetOperationId() string { + if m != nil { + return m.OperationId + } + return "" +} + +func (m *Operation) GetOperationName() string { + if m != nil { + return m.OperationName + } + return "" +} + +func (m *Operation) GetConsumerId() string { + if m != nil { + return m.ConsumerId + } + return "" +} + +func (m *Operation) GetStartTime() *timestamp.Timestamp { + if m != nil { + return m.StartTime + } + return nil +} + +func (m *Operation) GetEndTime() *timestamp.Timestamp { + if m != nil { + return m.EndTime + } + return nil +} + +func (m *Operation) GetLabels() map[string]string { + if m != nil { + return m.Labels + } + return nil +} + +func (m *Operation) GetMetricValueSets() []*MetricValueSet { + if m != nil { + return m.MetricValueSets + } + return nil +} + +func (m *Operation) GetLogEntries() []*LogEntry { + if m != nil { + return m.LogEntries + } + return nil +} + +func (m *Operation) GetImportance() Operation_Importance { + if m != nil { + return m.Importance + } + return Operation_LOW +} + +func init() { + proto.RegisterType((*Operation)(nil), "google.api.servicecontrol.v1.Operation") + proto.RegisterMapType((map[string]string)(nil), "google.api.servicecontrol.v1.Operation.LabelsEntry") + proto.RegisterEnum("google.api.servicecontrol.v1.Operation_Importance", Operation_Importance_name, Operation_Importance_value) +} + +func init() { + proto.RegisterFile("google/api/servicecontrol/v1/operation.proto", fileDescriptor_operation_3aad26a8020e2c86) +} + +var fileDescriptor_operation_3aad26a8020e2c86 = []byte{ + // 483 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x8c, 0x93, 0xcf, 0x6b, 0x13, 0x41, + 0x14, 0xc7, 0x9d, 0xa6, 0xf9, 0xf5, 0x56, 0x63, 0x1c, 0x3c, 0x2c, 0xa1, 0x90, 0x58, 0x50, 0x72, + 0x28, 0xb3, 0x34, 0x45, 0xb0, 0x7a, 0x2b, 0x48, 0x1b, 0x8d, 0xb6, 0xac, 0xa2, 0xe2, 0x25, 0x4c, + 0x36, 0xcf, 0x65, 0x70, 0x77, 0x66, 0x99, 0x99, 0x04, 0x7a, 0xf6, 0xe2, 0x9f, 0xec, 0x51, 0x76, + 0xf6, 0x47, 0x13, 0x90, 0xb5, 0xb7, 0x7d, 0x8f, 0xef, 0xe7, 0xbb, 0xdf, 0x79, 0x6f, 0x06, 0x4e, + 0x62, 0xa5, 0xe2, 0x04, 0x03, 0x9e, 0x89, 0xc0, 0xa0, 0xde, 0x8a, 0x08, 0x23, 0x25, 0xad, 0x56, + 0x49, 0xb0, 0x3d, 0x0d, 0x54, 0x86, 0x9a, 0x5b, 0xa1, 0x24, 0xcb, 0xb4, 0xb2, 0x8a, 0x1e, 0x15, + 0x6a, 0xc6, 0x33, 0xc1, 0xf6, 0xd5, 0x6c, 0x7b, 0x3a, 0x3a, 0xda, 0xf1, 0xe2, 0x52, 0x2a, 0xeb, + 0x50, 0x53, 0xb0, 0xa3, 0xe6, 0x3f, 0x25, 0x2a, 0x5e, 0xa2, 0xb4, 0xfa, 0xb6, 0x54, 0x07, 0x8d, + 0xea, 0x14, 0xad, 0x16, 0xd1, 0x72, 0xcb, 0x93, 0x0d, 0x96, 0xc0, 0xb8, 0x04, 0x5c, 0xb5, 0xda, + 0xfc, 0x08, 0xac, 0x48, 0xd1, 0x58, 0x9e, 0x66, 0x85, 0xe0, 0xf8, 0x77, 0x1b, 0xfa, 0xd7, 0xd5, + 0x79, 0xe8, 0x33, 0x78, 0x58, 0x1f, 0x6e, 0x29, 0xd6, 0x3e, 0x99, 0x90, 0x69, 0x3f, 0xf4, 0xea, + 0xde, 0x7c, 0x4d, 0x9f, 0xc3, 0xe0, 0x4e, 0x22, 0x79, 0x8a, 0xfe, 0x81, 0x13, 0x3d, 0xaa, 0xbb, + 0x1f, 0x79, 0x8a, 0x74, 0x0c, 0x5e, 0xa4, 0xa4, 0xd9, 0xa4, 0xa8, 0x73, 0xa3, 0x96, 0xd3, 0x40, + 0xd5, 0x9a, 0xaf, 0xe9, 0x39, 0x80, 0xb1, 0x5c, 0xdb, 0x65, 0x9e, 0xc8, 0x3f, 0x9c, 0x90, 0xa9, + 0x37, 0x1b, 0xb1, 0x72, 0x92, 0x55, 0x5c, 0xf6, 0xb9, 0x8a, 0x1b, 0xf6, 0x9d, 0x3a, 0xaf, 0xe9, + 0x4b, 0xe8, 0xa1, 0x5c, 0x17, 0x60, 0xfb, 0xbf, 0x60, 0x17, 0xe5, 0xda, 0x61, 0xef, 0xa1, 0x93, + 0xf0, 0x15, 0x26, 0xc6, 0xef, 0x4c, 0x5a, 0x53, 0x6f, 0x76, 0xc6, 0x9a, 0xf6, 0xc6, 0xea, 0xa9, + 0xb0, 0x85, 0xa3, 0xde, 0xe6, 0x7b, 0x08, 0x4b, 0x0b, 0xfa, 0x0d, 0x9e, 0xec, 0x8e, 0x7b, 0x69, + 0xd0, 0x1a, 0xbf, 0xeb, 0x7c, 0x4f, 0x9a, 0x7d, 0x3f, 0x38, 0xec, 0x4b, 0x4e, 0x7d, 0x42, 0x1b, + 0x3e, 0x4e, 0xf7, 0x6a, 0x43, 0x2f, 0xc1, 0xab, 0xd6, 0x2e, 0xd0, 0xf8, 0x3d, 0xe7, 0xf9, 0xa2, + 0xd9, 0x73, 0xa1, 0xe2, 0x22, 0x1e, 0x24, 0xc5, 0x97, 0x40, 0x43, 0x43, 0x00, 0x91, 0x66, 0x4a, + 0x5b, 0x2e, 0x23, 0xf4, 0xbd, 0x09, 0x99, 0x0e, 0x66, 0xb3, 0xfb, 0x9e, 0x79, 0x5e, 0x93, 0xe1, + 0x8e, 0xcb, 0xe8, 0x1c, 0xbc, 0x9d, 0x69, 0xd0, 0x21, 0xb4, 0x7e, 0xe2, 0x6d, 0x79, 0x4d, 0xf2, + 0x4f, 0xfa, 0x14, 0xda, 0x6e, 0x20, 0xe5, 0xad, 0x28, 0x8a, 0xd7, 0x07, 0xaf, 0xc8, 0xf1, 0x18, + 0xe0, 0xce, 0x94, 0x76, 0xa1, 0xb5, 0xb8, 0xfe, 0x3a, 0x7c, 0x40, 0x7b, 0x70, 0x78, 0x35, 0xbf, + 0xbc, 0x1a, 0x92, 0x8b, 0x5f, 0x04, 0x26, 0x91, 0x4a, 0x1b, 0x13, 0x5e, 0x0c, 0xea, 0x88, 0x37, + 0xf9, 0xaa, 0x6f, 0xc8, 0xf7, 0x77, 0xa5, 0x3e, 0x56, 0x09, 0x97, 0x31, 0x53, 0x3a, 0x0e, 0x62, + 0x94, 0xee, 0x22, 0x94, 0xcf, 0x85, 0x67, 0xc2, 0xfc, 0xfb, 0xc9, 0xbc, 0xd9, 0xef, 0xfc, 0x21, + 0x64, 0xd5, 0x71, 0xe4, 0xd9, 0xdf, 0x00, 0x00, 0x00, 0xff, 0xff, 0x8d, 0x9c, 0xc2, 0x5d, 0x03, + 0x04, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/api/servicecontrol/v1/quota_controller.pb.go b/vendor/google.golang.org/genproto/googleapis/api/servicecontrol/v1/quota_controller.pb.go new file mode 100644 index 0000000..43984a5 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/api/servicecontrol/v1/quota_controller.pb.go @@ -0,0 +1,590 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/api/servicecontrol/v1/quota_controller.proto + +package servicecontrol // import "google.golang.org/genproto/googleapis/api/servicecontrol/v1" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +import ( + context "golang.org/x/net/context" + grpc "google.golang.org/grpc" +) + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Supported quota modes. +type QuotaOperation_QuotaMode int32 + +const ( + // Guard against implicit default. Must not be used. + QuotaOperation_UNSPECIFIED QuotaOperation_QuotaMode = 0 + // For AllocateQuota request, allocates quota for the amount specified in + // the service configuration or specified using the quota metrics. If the + // amount is higher than the available quota, allocation error will be + // returned and no quota will be allocated. + QuotaOperation_NORMAL QuotaOperation_QuotaMode = 1 + // The operation allocates quota for the amount specified in the service + // configuration or specified using the quota metrics. If the amount is + // higher than the available quota, request does not fail but all available + // quota will be allocated. + QuotaOperation_BEST_EFFORT QuotaOperation_QuotaMode = 2 + // For AllocateQuota request, only checks if there is enough quota + // available and does not change the available quota. No lock is placed on + // the available quota either. + QuotaOperation_CHECK_ONLY QuotaOperation_QuotaMode = 3 +) + +var QuotaOperation_QuotaMode_name = map[int32]string{ + 0: "UNSPECIFIED", + 1: "NORMAL", + 2: "BEST_EFFORT", + 3: "CHECK_ONLY", +} +var QuotaOperation_QuotaMode_value = map[string]int32{ + "UNSPECIFIED": 0, + "NORMAL": 1, + "BEST_EFFORT": 2, + "CHECK_ONLY": 3, +} + +func (x QuotaOperation_QuotaMode) String() string { + return proto.EnumName(QuotaOperation_QuotaMode_name, int32(x)) +} +func (QuotaOperation_QuotaMode) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_quota_controller_635a8e629d18a12a, []int{1, 0} +} + +// Error codes related to project config validations are deprecated since the +// quota controller methods do not perform these validations. Instead services +// have to call the Check method, without quota_properties field, to perform +// these validations before calling the quota controller methods. These +// methods check only for project deletion to be wipe out compliant. +type QuotaError_Code int32 + +const ( + // This is never used. + QuotaError_UNSPECIFIED QuotaError_Code = 0 + // Quota allocation failed. + // Same as [google.rpc.Code.RESOURCE_EXHAUSTED][]. + QuotaError_RESOURCE_EXHAUSTED QuotaError_Code = 8 + // Consumer cannot access the service because the service requires active + // billing. + QuotaError_BILLING_NOT_ACTIVE QuotaError_Code = 107 + // Consumer's project has been marked as deleted (soft deletion). + QuotaError_PROJECT_DELETED QuotaError_Code = 108 + // Specified API key is invalid. + QuotaError_API_KEY_INVALID QuotaError_Code = 105 + // Specified API Key has expired. + QuotaError_API_KEY_EXPIRED QuotaError_Code = 112 +) + +var QuotaError_Code_name = map[int32]string{ + 0: "UNSPECIFIED", + 8: "RESOURCE_EXHAUSTED", + 107: "BILLING_NOT_ACTIVE", + 108: "PROJECT_DELETED", + 105: "API_KEY_INVALID", + 112: "API_KEY_EXPIRED", +} +var QuotaError_Code_value = map[string]int32{ + "UNSPECIFIED": 0, + "RESOURCE_EXHAUSTED": 8, + "BILLING_NOT_ACTIVE": 107, + "PROJECT_DELETED": 108, + "API_KEY_INVALID": 105, + "API_KEY_EXPIRED": 112, +} + +func (x QuotaError_Code) String() string { + return proto.EnumName(QuotaError_Code_name, int32(x)) +} +func (QuotaError_Code) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_quota_controller_635a8e629d18a12a, []int{3, 0} +} + +// Request message for the AllocateQuota method. +type AllocateQuotaRequest struct { + // Name of the service as specified in the service configuration. For example, + // `"pubsub.googleapis.com"`. + // + // See [google.api.Service][google.api.Service] for the definition of a + // service name. + ServiceName string `protobuf:"bytes,1,opt,name=service_name,json=serviceName,proto3" json:"service_name,omitempty"` + // Operation that describes the quota allocation. + AllocateOperation *QuotaOperation `protobuf:"bytes,2,opt,name=allocate_operation,json=allocateOperation,proto3" json:"allocate_operation,omitempty"` + // Specifies which version of service configuration should be used to process + // the request. If unspecified or no matching version can be found, the latest + // one will be used. + ServiceConfigId string `protobuf:"bytes,4,opt,name=service_config_id,json=serviceConfigId,proto3" json:"service_config_id,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *AllocateQuotaRequest) Reset() { *m = AllocateQuotaRequest{} } +func (m *AllocateQuotaRequest) String() string { return proto.CompactTextString(m) } +func (*AllocateQuotaRequest) ProtoMessage() {} +func (*AllocateQuotaRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_quota_controller_635a8e629d18a12a, []int{0} +} +func (m *AllocateQuotaRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_AllocateQuotaRequest.Unmarshal(m, b) +} +func (m *AllocateQuotaRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_AllocateQuotaRequest.Marshal(b, m, deterministic) +} +func (dst *AllocateQuotaRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_AllocateQuotaRequest.Merge(dst, src) +} +func (m *AllocateQuotaRequest) XXX_Size() int { + return xxx_messageInfo_AllocateQuotaRequest.Size(m) +} +func (m *AllocateQuotaRequest) XXX_DiscardUnknown() { + xxx_messageInfo_AllocateQuotaRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_AllocateQuotaRequest proto.InternalMessageInfo + +func (m *AllocateQuotaRequest) GetServiceName() string { + if m != nil { + return m.ServiceName + } + return "" +} + +func (m *AllocateQuotaRequest) GetAllocateOperation() *QuotaOperation { + if m != nil { + return m.AllocateOperation + } + return nil +} + +func (m *AllocateQuotaRequest) GetServiceConfigId() string { + if m != nil { + return m.ServiceConfigId + } + return "" +} + +// Represents information regarding a quota operation. +type QuotaOperation struct { + // Identity of the operation. This is expected to be unique within the scope + // of the service that generated the operation, and guarantees idempotency in + // case of retries. + // + // UUID version 4 is recommended, though not required. In scenarios where an + // operation is computed from existing information and an idempotent id is + // desirable for deduplication purpose, UUID version 5 is recommended. See + // RFC 4122 for details. + OperationId string `protobuf:"bytes,1,opt,name=operation_id,json=operationId,proto3" json:"operation_id,omitempty"` + // Fully qualified name of the API method for which this quota operation is + // requested. This name is used for matching quota rules or metric rules and + // billing status rules defined in service configuration. This field is not + // required if the quota operation is performed on non-API resources. + // + // Example of an RPC method name: + // google.example.library.v1.LibraryService.CreateShelf + MethodName string `protobuf:"bytes,2,opt,name=method_name,json=methodName,proto3" json:"method_name,omitempty"` + // Identity of the consumer for whom this quota operation is being performed. + // + // This can be in one of the following formats: + // project:, + // project_number:, + // api_key:. + ConsumerId string `protobuf:"bytes,3,opt,name=consumer_id,json=consumerId,proto3" json:"consumer_id,omitempty"` + // Labels describing the operation. + Labels map[string]string `protobuf:"bytes,4,rep,name=labels,proto3" json:"labels,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` + // Represents information about this operation. Each MetricValueSet + // corresponds to a metric defined in the service configuration. + // The data type used in the MetricValueSet must agree with + // the data type specified in the metric definition. + // + // Within a single operation, it is not allowed to have more than one + // MetricValue instances that have the same metric names and identical + // label value combinations. If a request has such duplicated MetricValue + // instances, the entire request is rejected with + // an invalid argument error. + QuotaMetrics []*MetricValueSet `protobuf:"bytes,5,rep,name=quota_metrics,json=quotaMetrics,proto3" json:"quota_metrics,omitempty"` + // Quota mode for this operation. + QuotaMode QuotaOperation_QuotaMode `protobuf:"varint,6,opt,name=quota_mode,json=quotaMode,proto3,enum=google.api.servicecontrol.v1.QuotaOperation_QuotaMode" json:"quota_mode,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *QuotaOperation) Reset() { *m = QuotaOperation{} } +func (m *QuotaOperation) String() string { return proto.CompactTextString(m) } +func (*QuotaOperation) ProtoMessage() {} +func (*QuotaOperation) Descriptor() ([]byte, []int) { + return fileDescriptor_quota_controller_635a8e629d18a12a, []int{1} +} +func (m *QuotaOperation) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_QuotaOperation.Unmarshal(m, b) +} +func (m *QuotaOperation) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_QuotaOperation.Marshal(b, m, deterministic) +} +func (dst *QuotaOperation) XXX_Merge(src proto.Message) { + xxx_messageInfo_QuotaOperation.Merge(dst, src) +} +func (m *QuotaOperation) XXX_Size() int { + return xxx_messageInfo_QuotaOperation.Size(m) +} +func (m *QuotaOperation) XXX_DiscardUnknown() { + xxx_messageInfo_QuotaOperation.DiscardUnknown(m) +} + +var xxx_messageInfo_QuotaOperation proto.InternalMessageInfo + +func (m *QuotaOperation) GetOperationId() string { + if m != nil { + return m.OperationId + } + return "" +} + +func (m *QuotaOperation) GetMethodName() string { + if m != nil { + return m.MethodName + } + return "" +} + +func (m *QuotaOperation) GetConsumerId() string { + if m != nil { + return m.ConsumerId + } + return "" +} + +func (m *QuotaOperation) GetLabels() map[string]string { + if m != nil { + return m.Labels + } + return nil +} + +func (m *QuotaOperation) GetQuotaMetrics() []*MetricValueSet { + if m != nil { + return m.QuotaMetrics + } + return nil +} + +func (m *QuotaOperation) GetQuotaMode() QuotaOperation_QuotaMode { + if m != nil { + return m.QuotaMode + } + return QuotaOperation_UNSPECIFIED +} + +// Response message for the AllocateQuota method. +type AllocateQuotaResponse struct { + // The same operation_id value used in the AllocateQuotaRequest. Used for + // logging and diagnostics purposes. + OperationId string `protobuf:"bytes,1,opt,name=operation_id,json=operationId,proto3" json:"operation_id,omitempty"` + // Indicates the decision of the allocate. + AllocateErrors []*QuotaError `protobuf:"bytes,2,rep,name=allocate_errors,json=allocateErrors,proto3" json:"allocate_errors,omitempty"` + // Quota metrics to indicate the result of allocation. Depending on the + // request, one or more of the following metrics will be included: + // + // 1. Per quota group or per quota metric incremental usage will be specified + // using the following delta metric : + // "serviceruntime.googleapis.com/api/consumer/quota_used_count" + // + // 2. The quota limit reached condition will be specified using the following + // boolean metric : + // "serviceruntime.googleapis.com/quota/exceeded" + QuotaMetrics []*MetricValueSet `protobuf:"bytes,3,rep,name=quota_metrics,json=quotaMetrics,proto3" json:"quota_metrics,omitempty"` + // ID of the actual config used to process the request. + ServiceConfigId string `protobuf:"bytes,4,opt,name=service_config_id,json=serviceConfigId,proto3" json:"service_config_id,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *AllocateQuotaResponse) Reset() { *m = AllocateQuotaResponse{} } +func (m *AllocateQuotaResponse) String() string { return proto.CompactTextString(m) } +func (*AllocateQuotaResponse) ProtoMessage() {} +func (*AllocateQuotaResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_quota_controller_635a8e629d18a12a, []int{2} +} +func (m *AllocateQuotaResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_AllocateQuotaResponse.Unmarshal(m, b) +} +func (m *AllocateQuotaResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_AllocateQuotaResponse.Marshal(b, m, deterministic) +} +func (dst *AllocateQuotaResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_AllocateQuotaResponse.Merge(dst, src) +} +func (m *AllocateQuotaResponse) XXX_Size() int { + return xxx_messageInfo_AllocateQuotaResponse.Size(m) +} +func (m *AllocateQuotaResponse) XXX_DiscardUnknown() { + xxx_messageInfo_AllocateQuotaResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_AllocateQuotaResponse proto.InternalMessageInfo + +func (m *AllocateQuotaResponse) GetOperationId() string { + if m != nil { + return m.OperationId + } + return "" +} + +func (m *AllocateQuotaResponse) GetAllocateErrors() []*QuotaError { + if m != nil { + return m.AllocateErrors + } + return nil +} + +func (m *AllocateQuotaResponse) GetQuotaMetrics() []*MetricValueSet { + if m != nil { + return m.QuotaMetrics + } + return nil +} + +func (m *AllocateQuotaResponse) GetServiceConfigId() string { + if m != nil { + return m.ServiceConfigId + } + return "" +} + +// Represents error information for +// [QuotaOperation][google.api.servicecontrol.v1.QuotaOperation]. +type QuotaError struct { + // Error code. + Code QuotaError_Code `protobuf:"varint,1,opt,name=code,proto3,enum=google.api.servicecontrol.v1.QuotaError_Code" json:"code,omitempty"` + // Subject to whom this error applies. See the specific enum for more details + // on this field. For example, "clientip:" or + // "project:". + Subject string `protobuf:"bytes,2,opt,name=subject,proto3" json:"subject,omitempty"` + // Free-form text that provides details on the cause of the error. + Description string `protobuf:"bytes,3,opt,name=description,proto3" json:"description,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *QuotaError) Reset() { *m = QuotaError{} } +func (m *QuotaError) String() string { return proto.CompactTextString(m) } +func (*QuotaError) ProtoMessage() {} +func (*QuotaError) Descriptor() ([]byte, []int) { + return fileDescriptor_quota_controller_635a8e629d18a12a, []int{3} +} +func (m *QuotaError) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_QuotaError.Unmarshal(m, b) +} +func (m *QuotaError) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_QuotaError.Marshal(b, m, deterministic) +} +func (dst *QuotaError) XXX_Merge(src proto.Message) { + xxx_messageInfo_QuotaError.Merge(dst, src) +} +func (m *QuotaError) XXX_Size() int { + return xxx_messageInfo_QuotaError.Size(m) +} +func (m *QuotaError) XXX_DiscardUnknown() { + xxx_messageInfo_QuotaError.DiscardUnknown(m) +} + +var xxx_messageInfo_QuotaError proto.InternalMessageInfo + +func (m *QuotaError) GetCode() QuotaError_Code { + if m != nil { + return m.Code + } + return QuotaError_UNSPECIFIED +} + +func (m *QuotaError) GetSubject() string { + if m != nil { + return m.Subject + } + return "" +} + +func (m *QuotaError) GetDescription() string { + if m != nil { + return m.Description + } + return "" +} + +func init() { + proto.RegisterType((*AllocateQuotaRequest)(nil), "google.api.servicecontrol.v1.AllocateQuotaRequest") + proto.RegisterType((*QuotaOperation)(nil), "google.api.servicecontrol.v1.QuotaOperation") + proto.RegisterMapType((map[string]string)(nil), "google.api.servicecontrol.v1.QuotaOperation.LabelsEntry") + proto.RegisterType((*AllocateQuotaResponse)(nil), "google.api.servicecontrol.v1.AllocateQuotaResponse") + proto.RegisterType((*QuotaError)(nil), "google.api.servicecontrol.v1.QuotaError") + proto.RegisterEnum("google.api.servicecontrol.v1.QuotaOperation_QuotaMode", QuotaOperation_QuotaMode_name, QuotaOperation_QuotaMode_value) + proto.RegisterEnum("google.api.servicecontrol.v1.QuotaError_Code", QuotaError_Code_name, QuotaError_Code_value) +} + +// Reference imports to suppress errors if they are not otherwise used. +var _ context.Context +var _ grpc.ClientConn + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the grpc package it is being compiled against. +const _ = grpc.SupportPackageIsVersion4 + +// QuotaControllerClient is the client API for QuotaController service. +// +// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://godoc.org/google.golang.org/grpc#ClientConn.NewStream. +type QuotaControllerClient interface { + // Attempts to allocate quota for the specified consumer. It should be called + // before the operation is executed. + // + // This method requires the `servicemanagement.services.quota` + // permission on the specified service. For more information, see + // [Cloud IAM](https://cloud.google.com/iam). + // + // **NOTE:** The client **must** fail-open on server errors `INTERNAL`, + // `UNKNOWN`, `DEADLINE_EXCEEDED`, and `UNAVAILABLE`. To ensure system + // reliability, the server may inject these errors to prohibit any hard + // dependency on the quota functionality. + AllocateQuota(ctx context.Context, in *AllocateQuotaRequest, opts ...grpc.CallOption) (*AllocateQuotaResponse, error) +} + +type quotaControllerClient struct { + cc *grpc.ClientConn +} + +func NewQuotaControllerClient(cc *grpc.ClientConn) QuotaControllerClient { + return "aControllerClient{cc} +} + +func (c *quotaControllerClient) AllocateQuota(ctx context.Context, in *AllocateQuotaRequest, opts ...grpc.CallOption) (*AllocateQuotaResponse, error) { + out := new(AllocateQuotaResponse) + err := c.cc.Invoke(ctx, "/google.api.servicecontrol.v1.QuotaController/AllocateQuota", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +// QuotaControllerServer is the server API for QuotaController service. +type QuotaControllerServer interface { + // Attempts to allocate quota for the specified consumer. It should be called + // before the operation is executed. + // + // This method requires the `servicemanagement.services.quota` + // permission on the specified service. For more information, see + // [Cloud IAM](https://cloud.google.com/iam). + // + // **NOTE:** The client **must** fail-open on server errors `INTERNAL`, + // `UNKNOWN`, `DEADLINE_EXCEEDED`, and `UNAVAILABLE`. To ensure system + // reliability, the server may inject these errors to prohibit any hard + // dependency on the quota functionality. + AllocateQuota(context.Context, *AllocateQuotaRequest) (*AllocateQuotaResponse, error) +} + +func RegisterQuotaControllerServer(s *grpc.Server, srv QuotaControllerServer) { + s.RegisterService(&_QuotaController_serviceDesc, srv) +} + +func _QuotaController_AllocateQuota_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(AllocateQuotaRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(QuotaControllerServer).AllocateQuota(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.api.servicecontrol.v1.QuotaController/AllocateQuota", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(QuotaControllerServer).AllocateQuota(ctx, req.(*AllocateQuotaRequest)) + } + return interceptor(ctx, in, info, handler) +} + +var _QuotaController_serviceDesc = grpc.ServiceDesc{ + ServiceName: "google.api.servicecontrol.v1.QuotaController", + HandlerType: (*QuotaControllerServer)(nil), + Methods: []grpc.MethodDesc{ + { + MethodName: "AllocateQuota", + Handler: _QuotaController_AllocateQuota_Handler, + }, + }, + Streams: []grpc.StreamDesc{}, + Metadata: "google/api/servicecontrol/v1/quota_controller.proto", +} + +func init() { + proto.RegisterFile("google/api/servicecontrol/v1/quota_controller.proto", fileDescriptor_quota_controller_635a8e629d18a12a) +} + +var fileDescriptor_quota_controller_635a8e629d18a12a = []byte{ + // 775 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xa4, 0x55, 0xc1, 0x6e, 0xea, 0x46, + 0x14, 0xed, 0x18, 0x42, 0x9b, 0xeb, 0x04, 0x9c, 0x69, 0x5a, 0x59, 0x28, 0x52, 0x28, 0x2b, 0x1a, + 0xb5, 0x46, 0x21, 0x55, 0x95, 0xa6, 0x2b, 0x30, 0x93, 0xc6, 0x09, 0x01, 0x62, 0x20, 0x4a, 0xda, + 0x85, 0xe5, 0xd8, 0x53, 0xea, 0xc6, 0x78, 0x1c, 0xdb, 0x20, 0x45, 0x55, 0x37, 0x5d, 0x54, 0xaa, + 0xd4, 0x5d, 0xfb, 0x1d, 0xfd, 0x88, 0xfc, 0x42, 0x7f, 0xe1, 0xfd, 0xc3, 0x7b, 0xcb, 0x27, 0x8f, + 0x0d, 0x0f, 0x22, 0xc4, 0x0b, 0x7a, 0x3b, 0xcf, 0xf1, 0x9c, 0x33, 0xf7, 0xde, 0x73, 0xe7, 0x0e, + 0x1c, 0x0d, 0x19, 0x1b, 0xba, 0xb4, 0x6a, 0xfa, 0x4e, 0x35, 0xa4, 0xc1, 0xc4, 0xb1, 0xa8, 0xc5, + 0xbc, 0x28, 0x60, 0x6e, 0x75, 0x72, 0x58, 0x7d, 0x18, 0xb3, 0xc8, 0x34, 0x52, 0xc0, 0xa5, 0x81, + 0xe2, 0x07, 0x2c, 0x62, 0x78, 0x2f, 0x21, 0x29, 0xa6, 0xef, 0x28, 0x8b, 0x24, 0x65, 0x72, 0x58, + 0xdc, 0x9b, 0x93, 0x34, 0x3d, 0x8f, 0x45, 0x66, 0xe4, 0x30, 0x2f, 0x4c, 0xb8, 0xc5, 0xea, 0xca, + 0x03, 0x47, 0x34, 0x0a, 0x1c, 0xcb, 0x98, 0x98, 0xee, 0x98, 0x26, 0x84, 0xf2, 0x13, 0x82, 0xdd, + 0xba, 0xeb, 0x32, 0xcb, 0x8c, 0xe8, 0x55, 0x1c, 0x8f, 0x4e, 0x1f, 0xc6, 0x34, 0x8c, 0xf0, 0x17, + 0xb0, 0x95, 0x0a, 0x18, 0x9e, 0x39, 0xa2, 0x32, 0x2a, 0xa1, 0xca, 0xa6, 0x2e, 0xa6, 0x58, 0xdb, + 0x1c, 0x51, 0xfc, 0x13, 0x60, 0x33, 0xa5, 0x1a, 0xcc, 0xa7, 0x01, 0x8f, 0x44, 0x16, 0x4a, 0xa8, + 0x22, 0xd6, 0xbe, 0x52, 0x56, 0x65, 0xa1, 0xf0, 0xa3, 0x3a, 0x53, 0x8e, 0xbe, 0x33, 0xd5, 0x99, + 0x41, 0xf8, 0x00, 0x76, 0xa6, 0xe7, 0x5b, 0xcc, 0xfb, 0xd9, 0x19, 0x1a, 0x8e, 0x2d, 0x67, 0x79, + 0x10, 0x85, 0xf4, 0x87, 0xca, 0x71, 0xcd, 0x2e, 0xbf, 0xce, 0x40, 0x7e, 0x51, 0x31, 0x0e, 0x7f, + 0x16, 0x52, 0xcc, 0x4c, 0xc3, 0x9f, 0x61, 0x9a, 0x8d, 0xf7, 0x41, 0x1c, 0xd1, 0xe8, 0x17, 0x66, + 0x27, 0x09, 0x0a, 0x7c, 0x07, 0x24, 0x10, 0xcf, 0x6f, 0x1f, 0x44, 0x8b, 0x79, 0xe1, 0x78, 0x44, + 0x83, 0x58, 0x22, 0x93, 0x6c, 0x98, 0x42, 0x9a, 0x8d, 0xbb, 0x90, 0x73, 0xcd, 0x3b, 0xea, 0x86, + 0x72, 0xb6, 0x94, 0xa9, 0x88, 0xb5, 0xe3, 0x75, 0x92, 0x56, 0x5a, 0x9c, 0x4a, 0xbc, 0x28, 0x78, + 0xd4, 0x53, 0x1d, 0x7c, 0x05, 0xdb, 0x49, 0x57, 0x24, 0x56, 0x85, 0xf2, 0x06, 0x17, 0x7e, 0x4f, + 0x35, 0x2f, 0xf9, 0xe6, 0xeb, 0xd8, 0xd6, 0x1e, 0x8d, 0xf4, 0x2d, 0x2e, 0x91, 0x80, 0x21, 0x1e, + 0x00, 0xa4, 0x92, 0xcc, 0xa6, 0x72, 0xae, 0x84, 0x2a, 0xf9, 0xda, 0xb7, 0x6b, 0x05, 0xca, 0x97, + 0x97, 0xcc, 0xa6, 0xfa, 0xe6, 0xc3, 0xf4, 0xb3, 0xf8, 0x1d, 0x88, 0x73, 0x09, 0x60, 0x09, 0x32, + 0xf7, 0xf4, 0x31, 0x2d, 0x73, 0xfc, 0x89, 0x77, 0x61, 0x83, 0x37, 0x5a, 0x5a, 0xd8, 0x64, 0x71, + 0x22, 0x1c, 0xa3, 0xb2, 0x06, 0x9b, 0x33, 0x49, 0x5c, 0x00, 0x71, 0xd0, 0xee, 0x75, 0x89, 0xaa, + 0x9d, 0x6a, 0xa4, 0x29, 0x7d, 0x84, 0x01, 0x72, 0xed, 0x8e, 0x7e, 0x59, 0x6f, 0x49, 0x28, 0xfe, + 0xd9, 0x20, 0xbd, 0xbe, 0x41, 0x4e, 0x4f, 0x3b, 0x7a, 0x5f, 0x12, 0x70, 0x1e, 0x40, 0x3d, 0x23, + 0xea, 0x85, 0xd1, 0x69, 0xb7, 0x6e, 0xa5, 0x4c, 0xf9, 0x6f, 0x01, 0x3e, 0x7b, 0xd6, 0xbe, 0xa1, + 0xcf, 0xbc, 0x90, 0xbe, 0xa4, 0x01, 0xae, 0xa0, 0x30, 0xeb, 0x5f, 0x1a, 0x04, 0x2c, 0x08, 0x65, + 0x81, 0x97, 0xbb, 0xf2, 0x82, 0xf2, 0x90, 0x98, 0xa0, 0xe7, 0xa7, 0x02, 0x7c, 0xb9, 0xc4, 0xbf, + 0xcc, 0x07, 0xfb, 0xb7, 0xce, 0x45, 0xf8, 0x57, 0x00, 0x78, 0x17, 0x1d, 0xae, 0x43, 0xd6, 0x8a, + 0x4d, 0x47, 0xdc, 0xf4, 0xaf, 0x5f, 0x9a, 0x95, 0xa2, 0xc6, 0x5e, 0x73, 0x2a, 0x96, 0xe1, 0xe3, + 0x70, 0x7c, 0xf7, 0x2b, 0xb5, 0xa2, 0xd4, 0xc7, 0xe9, 0x12, 0x97, 0x40, 0xb4, 0x69, 0x68, 0x05, + 0x8e, 0xcf, 0xaf, 0x7d, 0x72, 0x3b, 0xe6, 0xa1, 0xf2, 0x9f, 0x08, 0xb2, 0xea, 0x52, 0x8f, 0x3f, + 0x07, 0xac, 0x93, 0x5e, 0x67, 0xa0, 0xab, 0xc4, 0x20, 0x37, 0x67, 0xf5, 0x41, 0xaf, 0x4f, 0x9a, + 0xd2, 0x27, 0x31, 0xde, 0xd0, 0x5a, 0x2d, 0xad, 0xfd, 0x83, 0xd1, 0xee, 0xf4, 0x8d, 0xba, 0xda, + 0xd7, 0xae, 0x89, 0x74, 0x8f, 0x3f, 0x85, 0x42, 0x57, 0xef, 0x9c, 0x13, 0xb5, 0x6f, 0x34, 0x49, + 0x8b, 0xc4, 0x9b, 0xdd, 0x18, 0xac, 0x77, 0x35, 0xe3, 0x82, 0xdc, 0x1a, 0x5a, 0xfb, 0xba, 0xde, + 0xd2, 0x9a, 0x92, 0x33, 0x0f, 0x92, 0x9b, 0xae, 0xa6, 0x93, 0xa6, 0xe4, 0xd7, 0x9e, 0x10, 0x14, + 0x78, 0x7a, 0xea, 0x6c, 0xd6, 0xe2, 0xff, 0x10, 0x6c, 0x2f, 0x74, 0x0e, 0xae, 0xad, 0xae, 0xcf, + 0xb2, 0x29, 0x59, 0x3c, 0x5a, 0x8b, 0x93, 0xb4, 0x66, 0xf9, 0x9b, 0x3f, 0xfe, 0x7f, 0xf5, 0x8f, + 0xa0, 0x94, 0xbf, 0x8c, 0x67, 0x72, 0x4a, 0x0a, 0xab, 0xbf, 0xcd, 0x8f, 0xdb, 0xdf, 0x4f, 0xcc, + 0x79, 0xea, 0x09, 0x3a, 0x68, 0xfc, 0x85, 0xa0, 0x64, 0xb1, 0xd1, 0xca, 0x03, 0x1b, 0xbb, 0xcf, + 0xd2, 0xec, 0xc6, 0x43, 0xbe, 0x8b, 0x7e, 0x3c, 0x4f, 0x59, 0x43, 0xe6, 0x9a, 0xde, 0x50, 0x61, + 0xc1, 0xb0, 0x3a, 0xa4, 0x1e, 0x7f, 0x02, 0xd2, 0x27, 0xc3, 0xf4, 0x9d, 0x70, 0xf9, 0xb3, 0xf1, + 0xfd, 0x22, 0xf2, 0x06, 0xa1, 0xbb, 0x1c, 0x67, 0x1e, 0xbd, 0x0d, 0x00, 0x00, 0xff, 0xff, 0xbb, + 0x98, 0x03, 0x4f, 0xe0, 0x06, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/api/servicecontrol/v1/service_controller.pb.go b/vendor/google.golang.org/genproto/googleapis/api/servicecontrol/v1/service_controller.pb.go new file mode 100644 index 0000000..4034ca3 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/api/servicecontrol/v1/service_controller.pb.go @@ -0,0 +1,658 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/api/servicecontrol/v1/service_controller.proto + +package servicecontrol // import "google.golang.org/genproto/googleapis/api/servicecontrol/v1" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "google.golang.org/genproto/googleapis/api/annotations" +import status "google.golang.org/genproto/googleapis/rpc/status" + +import ( + context "golang.org/x/net/context" + grpc "google.golang.org/grpc" +) + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Request message for the Check method. +type CheckRequest struct { + // The service name as specified in its service configuration. For example, + // `"pubsub.googleapis.com"`. + // + // See + // [google.api.Service](https://cloud.google.com/service-management/reference/rpc/google.api#google.api.Service) + // for the definition of a service name. + ServiceName string `protobuf:"bytes,1,opt,name=service_name,json=serviceName,proto3" json:"service_name,omitempty"` + // The operation to be checked. + Operation *Operation `protobuf:"bytes,2,opt,name=operation,proto3" json:"operation,omitempty"` + // Specifies which version of service configuration should be used to process + // the request. + // + // If unspecified or no matching version can be found, the + // latest one will be used. + ServiceConfigId string `protobuf:"bytes,4,opt,name=service_config_id,json=serviceConfigId,proto3" json:"service_config_id,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *CheckRequest) Reset() { *m = CheckRequest{} } +func (m *CheckRequest) String() string { return proto.CompactTextString(m) } +func (*CheckRequest) ProtoMessage() {} +func (*CheckRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_service_controller_fa10647c90d68c28, []int{0} +} +func (m *CheckRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_CheckRequest.Unmarshal(m, b) +} +func (m *CheckRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_CheckRequest.Marshal(b, m, deterministic) +} +func (dst *CheckRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_CheckRequest.Merge(dst, src) +} +func (m *CheckRequest) XXX_Size() int { + return xxx_messageInfo_CheckRequest.Size(m) +} +func (m *CheckRequest) XXX_DiscardUnknown() { + xxx_messageInfo_CheckRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_CheckRequest proto.InternalMessageInfo + +func (m *CheckRequest) GetServiceName() string { + if m != nil { + return m.ServiceName + } + return "" +} + +func (m *CheckRequest) GetOperation() *Operation { + if m != nil { + return m.Operation + } + return nil +} + +func (m *CheckRequest) GetServiceConfigId() string { + if m != nil { + return m.ServiceConfigId + } + return "" +} + +// Response message for the Check method. +type CheckResponse struct { + // The same operation_id value used in the + // [CheckRequest][google.api.servicecontrol.v1.CheckRequest]. Used for logging + // and diagnostics purposes. + OperationId string `protobuf:"bytes,1,opt,name=operation_id,json=operationId,proto3" json:"operation_id,omitempty"` + // Indicate the decision of the check. + // + // If no check errors are present, the service should process the operation. + // Otherwise the service should use the list of errors to determine the + // appropriate action. + CheckErrors []*CheckError `protobuf:"bytes,2,rep,name=check_errors,json=checkErrors,proto3" json:"check_errors,omitempty"` + // The actual config id used to process the request. + ServiceConfigId string `protobuf:"bytes,5,opt,name=service_config_id,json=serviceConfigId,proto3" json:"service_config_id,omitempty"` + // Feedback data returned from the server during processing a Check request. + CheckInfo *CheckResponse_CheckInfo `protobuf:"bytes,6,opt,name=check_info,json=checkInfo,proto3" json:"check_info,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *CheckResponse) Reset() { *m = CheckResponse{} } +func (m *CheckResponse) String() string { return proto.CompactTextString(m) } +func (*CheckResponse) ProtoMessage() {} +func (*CheckResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_service_controller_fa10647c90d68c28, []int{1} +} +func (m *CheckResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_CheckResponse.Unmarshal(m, b) +} +func (m *CheckResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_CheckResponse.Marshal(b, m, deterministic) +} +func (dst *CheckResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_CheckResponse.Merge(dst, src) +} +func (m *CheckResponse) XXX_Size() int { + return xxx_messageInfo_CheckResponse.Size(m) +} +func (m *CheckResponse) XXX_DiscardUnknown() { + xxx_messageInfo_CheckResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_CheckResponse proto.InternalMessageInfo + +func (m *CheckResponse) GetOperationId() string { + if m != nil { + return m.OperationId + } + return "" +} + +func (m *CheckResponse) GetCheckErrors() []*CheckError { + if m != nil { + return m.CheckErrors + } + return nil +} + +func (m *CheckResponse) GetServiceConfigId() string { + if m != nil { + return m.ServiceConfigId + } + return "" +} + +func (m *CheckResponse) GetCheckInfo() *CheckResponse_CheckInfo { + if m != nil { + return m.CheckInfo + } + return nil +} + +type CheckResponse_CheckInfo struct { + // Consumer info of this check. + ConsumerInfo *CheckResponse_ConsumerInfo `protobuf:"bytes,2,opt,name=consumer_info,json=consumerInfo,proto3" json:"consumer_info,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *CheckResponse_CheckInfo) Reset() { *m = CheckResponse_CheckInfo{} } +func (m *CheckResponse_CheckInfo) String() string { return proto.CompactTextString(m) } +func (*CheckResponse_CheckInfo) ProtoMessage() {} +func (*CheckResponse_CheckInfo) Descriptor() ([]byte, []int) { + return fileDescriptor_service_controller_fa10647c90d68c28, []int{1, 0} +} +func (m *CheckResponse_CheckInfo) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_CheckResponse_CheckInfo.Unmarshal(m, b) +} +func (m *CheckResponse_CheckInfo) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_CheckResponse_CheckInfo.Marshal(b, m, deterministic) +} +func (dst *CheckResponse_CheckInfo) XXX_Merge(src proto.Message) { + xxx_messageInfo_CheckResponse_CheckInfo.Merge(dst, src) +} +func (m *CheckResponse_CheckInfo) XXX_Size() int { + return xxx_messageInfo_CheckResponse_CheckInfo.Size(m) +} +func (m *CheckResponse_CheckInfo) XXX_DiscardUnknown() { + xxx_messageInfo_CheckResponse_CheckInfo.DiscardUnknown(m) +} + +var xxx_messageInfo_CheckResponse_CheckInfo proto.InternalMessageInfo + +func (m *CheckResponse_CheckInfo) GetConsumerInfo() *CheckResponse_ConsumerInfo { + if m != nil { + return m.ConsumerInfo + } + return nil +} + +// `ConsumerInfo` provides information about the consumer project. +type CheckResponse_ConsumerInfo struct { + // The Google cloud project number, e.g. 1234567890. A value of 0 indicates + // no project number is found. + ProjectNumber int64 `protobuf:"varint,1,opt,name=project_number,json=projectNumber,proto3" json:"project_number,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *CheckResponse_ConsumerInfo) Reset() { *m = CheckResponse_ConsumerInfo{} } +func (m *CheckResponse_ConsumerInfo) String() string { return proto.CompactTextString(m) } +func (*CheckResponse_ConsumerInfo) ProtoMessage() {} +func (*CheckResponse_ConsumerInfo) Descriptor() ([]byte, []int) { + return fileDescriptor_service_controller_fa10647c90d68c28, []int{1, 1} +} +func (m *CheckResponse_ConsumerInfo) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_CheckResponse_ConsumerInfo.Unmarshal(m, b) +} +func (m *CheckResponse_ConsumerInfo) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_CheckResponse_ConsumerInfo.Marshal(b, m, deterministic) +} +func (dst *CheckResponse_ConsumerInfo) XXX_Merge(src proto.Message) { + xxx_messageInfo_CheckResponse_ConsumerInfo.Merge(dst, src) +} +func (m *CheckResponse_ConsumerInfo) XXX_Size() int { + return xxx_messageInfo_CheckResponse_ConsumerInfo.Size(m) +} +func (m *CheckResponse_ConsumerInfo) XXX_DiscardUnknown() { + xxx_messageInfo_CheckResponse_ConsumerInfo.DiscardUnknown(m) +} + +var xxx_messageInfo_CheckResponse_ConsumerInfo proto.InternalMessageInfo + +func (m *CheckResponse_ConsumerInfo) GetProjectNumber() int64 { + if m != nil { + return m.ProjectNumber + } + return 0 +} + +// Request message for the Report method. +type ReportRequest struct { + // The service name as specified in its service configuration. For example, + // `"pubsub.googleapis.com"`. + // + // See + // [google.api.Service](https://cloud.google.com/service-management/reference/rpc/google.api#google.api.Service) + // for the definition of a service name. + ServiceName string `protobuf:"bytes,1,opt,name=service_name,json=serviceName,proto3" json:"service_name,omitempty"` + // Operations to be reported. + // + // Typically the service should report one operation per request. + // Putting multiple operations into a single request is allowed, but should + // be used only when multiple operations are natually available at the time + // of the report. + // + // If multiple operations are in a single request, the total request size + // should be no larger than 1MB. See + // [ReportResponse.report_errors][google.api.servicecontrol.v1.ReportResponse.report_errors] + // for partial failure behavior. + Operations []*Operation `protobuf:"bytes,2,rep,name=operations,proto3" json:"operations,omitempty"` + // Specifies which version of service config should be used to process the + // request. + // + // If unspecified or no matching version can be found, the + // latest one will be used. + ServiceConfigId string `protobuf:"bytes,3,opt,name=service_config_id,json=serviceConfigId,proto3" json:"service_config_id,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ReportRequest) Reset() { *m = ReportRequest{} } +func (m *ReportRequest) String() string { return proto.CompactTextString(m) } +func (*ReportRequest) ProtoMessage() {} +func (*ReportRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_service_controller_fa10647c90d68c28, []int{2} +} +func (m *ReportRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ReportRequest.Unmarshal(m, b) +} +func (m *ReportRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ReportRequest.Marshal(b, m, deterministic) +} +func (dst *ReportRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_ReportRequest.Merge(dst, src) +} +func (m *ReportRequest) XXX_Size() int { + return xxx_messageInfo_ReportRequest.Size(m) +} +func (m *ReportRequest) XXX_DiscardUnknown() { + xxx_messageInfo_ReportRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_ReportRequest proto.InternalMessageInfo + +func (m *ReportRequest) GetServiceName() string { + if m != nil { + return m.ServiceName + } + return "" +} + +func (m *ReportRequest) GetOperations() []*Operation { + if m != nil { + return m.Operations + } + return nil +} + +func (m *ReportRequest) GetServiceConfigId() string { + if m != nil { + return m.ServiceConfigId + } + return "" +} + +// Response message for the Report method. +type ReportResponse struct { + // Partial failures, one for each `Operation` in the request that failed + // processing. There are three possible combinations of the RPC status: + // + // 1. The combination of a successful RPC status and an empty `report_errors` + // list indicates a complete success where all `Operations` in the + // request are processed successfully. + // 2. The combination of a successful RPC status and a non-empty + // `report_errors` list indicates a partial success where some + // `Operations` in the request succeeded. Each + // `Operation` that failed processing has a corresponding item + // in this list. + // 3. A failed RPC status indicates a general non-deterministic failure. + // When this happens, it's impossible to know which of the + // 'Operations' in the request succeeded or failed. + ReportErrors []*ReportResponse_ReportError `protobuf:"bytes,1,rep,name=report_errors,json=reportErrors,proto3" json:"report_errors,omitempty"` + // The actual config id used to process the request. + ServiceConfigId string `protobuf:"bytes,2,opt,name=service_config_id,json=serviceConfigId,proto3" json:"service_config_id,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ReportResponse) Reset() { *m = ReportResponse{} } +func (m *ReportResponse) String() string { return proto.CompactTextString(m) } +func (*ReportResponse) ProtoMessage() {} +func (*ReportResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_service_controller_fa10647c90d68c28, []int{3} +} +func (m *ReportResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ReportResponse.Unmarshal(m, b) +} +func (m *ReportResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ReportResponse.Marshal(b, m, deterministic) +} +func (dst *ReportResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_ReportResponse.Merge(dst, src) +} +func (m *ReportResponse) XXX_Size() int { + return xxx_messageInfo_ReportResponse.Size(m) +} +func (m *ReportResponse) XXX_DiscardUnknown() { + xxx_messageInfo_ReportResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_ReportResponse proto.InternalMessageInfo + +func (m *ReportResponse) GetReportErrors() []*ReportResponse_ReportError { + if m != nil { + return m.ReportErrors + } + return nil +} + +func (m *ReportResponse) GetServiceConfigId() string { + if m != nil { + return m.ServiceConfigId + } + return "" +} + +// Represents the processing error of one +// [Operation][google.api.servicecontrol.v1.Operation] in the request. +type ReportResponse_ReportError struct { + // The + // [Operation.operation_id][google.api.servicecontrol.v1.Operation.operation_id] + // value from the request. + OperationId string `protobuf:"bytes,1,opt,name=operation_id,json=operationId,proto3" json:"operation_id,omitempty"` + // Details of the error when processing the + // [Operation][google.api.servicecontrol.v1.Operation]. + Status *status.Status `protobuf:"bytes,2,opt,name=status,proto3" json:"status,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ReportResponse_ReportError) Reset() { *m = ReportResponse_ReportError{} } +func (m *ReportResponse_ReportError) String() string { return proto.CompactTextString(m) } +func (*ReportResponse_ReportError) ProtoMessage() {} +func (*ReportResponse_ReportError) Descriptor() ([]byte, []int) { + return fileDescriptor_service_controller_fa10647c90d68c28, []int{3, 0} +} +func (m *ReportResponse_ReportError) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ReportResponse_ReportError.Unmarshal(m, b) +} +func (m *ReportResponse_ReportError) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ReportResponse_ReportError.Marshal(b, m, deterministic) +} +func (dst *ReportResponse_ReportError) XXX_Merge(src proto.Message) { + xxx_messageInfo_ReportResponse_ReportError.Merge(dst, src) +} +func (m *ReportResponse_ReportError) XXX_Size() int { + return xxx_messageInfo_ReportResponse_ReportError.Size(m) +} +func (m *ReportResponse_ReportError) XXX_DiscardUnknown() { + xxx_messageInfo_ReportResponse_ReportError.DiscardUnknown(m) +} + +var xxx_messageInfo_ReportResponse_ReportError proto.InternalMessageInfo + +func (m *ReportResponse_ReportError) GetOperationId() string { + if m != nil { + return m.OperationId + } + return "" +} + +func (m *ReportResponse_ReportError) GetStatus() *status.Status { + if m != nil { + return m.Status + } + return nil +} + +func init() { + proto.RegisterType((*CheckRequest)(nil), "google.api.servicecontrol.v1.CheckRequest") + proto.RegisterType((*CheckResponse)(nil), "google.api.servicecontrol.v1.CheckResponse") + proto.RegisterType((*CheckResponse_CheckInfo)(nil), "google.api.servicecontrol.v1.CheckResponse.CheckInfo") + proto.RegisterType((*CheckResponse_ConsumerInfo)(nil), "google.api.servicecontrol.v1.CheckResponse.ConsumerInfo") + proto.RegisterType((*ReportRequest)(nil), "google.api.servicecontrol.v1.ReportRequest") + proto.RegisterType((*ReportResponse)(nil), "google.api.servicecontrol.v1.ReportResponse") + proto.RegisterType((*ReportResponse_ReportError)(nil), "google.api.servicecontrol.v1.ReportResponse.ReportError") +} + +// Reference imports to suppress errors if they are not otherwise used. +var _ context.Context +var _ grpc.ClientConn + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the grpc package it is being compiled against. +const _ = grpc.SupportPackageIsVersion4 + +// ServiceControllerClient is the client API for ServiceController service. +// +// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://godoc.org/google.golang.org/grpc#ClientConn.NewStream. +type ServiceControllerClient interface { + // Checks an operation with Google Service Control to decide whether + // the given operation should proceed. It should be called before the + // operation is executed. + // + // If feasible, the client should cache the check results and reuse them for + // 60 seconds. In case of server errors, the client can rely on the cached + // results for longer time. + // + // NOTE: the [CheckRequest][google.api.servicecontrol.v1.CheckRequest] has the + // size limit of 64KB. + // + // This method requires the `servicemanagement.services.check` permission + // on the specified service. For more information, see + // [Google Cloud IAM](https://cloud.google.com/iam). + Check(ctx context.Context, in *CheckRequest, opts ...grpc.CallOption) (*CheckResponse, error) + // Reports operation results to Google Service Control, such as logs and + // metrics. It should be called after an operation is completed. + // + // If feasible, the client should aggregate reporting data for up to 5 + // seconds to reduce API traffic. Limiting aggregation to 5 seconds is to + // reduce data loss during client crashes. Clients should carefully choose + // the aggregation time window to avoid data loss risk more than 0.01% + // for business and compliance reasons. + // + // NOTE: the [ReportRequest][google.api.servicecontrol.v1.ReportRequest] has + // the size limit of 1MB. + // + // This method requires the `servicemanagement.services.report` permission + // on the specified service. For more information, see + // [Google Cloud IAM](https://cloud.google.com/iam). + Report(ctx context.Context, in *ReportRequest, opts ...grpc.CallOption) (*ReportResponse, error) +} + +type serviceControllerClient struct { + cc *grpc.ClientConn +} + +func NewServiceControllerClient(cc *grpc.ClientConn) ServiceControllerClient { + return &serviceControllerClient{cc} +} + +func (c *serviceControllerClient) Check(ctx context.Context, in *CheckRequest, opts ...grpc.CallOption) (*CheckResponse, error) { + out := new(CheckResponse) + err := c.cc.Invoke(ctx, "/google.api.servicecontrol.v1.ServiceController/Check", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *serviceControllerClient) Report(ctx context.Context, in *ReportRequest, opts ...grpc.CallOption) (*ReportResponse, error) { + out := new(ReportResponse) + err := c.cc.Invoke(ctx, "/google.api.servicecontrol.v1.ServiceController/Report", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +// ServiceControllerServer is the server API for ServiceController service. +type ServiceControllerServer interface { + // Checks an operation with Google Service Control to decide whether + // the given operation should proceed. It should be called before the + // operation is executed. + // + // If feasible, the client should cache the check results and reuse them for + // 60 seconds. In case of server errors, the client can rely on the cached + // results for longer time. + // + // NOTE: the [CheckRequest][google.api.servicecontrol.v1.CheckRequest] has the + // size limit of 64KB. + // + // This method requires the `servicemanagement.services.check` permission + // on the specified service. For more information, see + // [Google Cloud IAM](https://cloud.google.com/iam). + Check(context.Context, *CheckRequest) (*CheckResponse, error) + // Reports operation results to Google Service Control, such as logs and + // metrics. It should be called after an operation is completed. + // + // If feasible, the client should aggregate reporting data for up to 5 + // seconds to reduce API traffic. Limiting aggregation to 5 seconds is to + // reduce data loss during client crashes. Clients should carefully choose + // the aggregation time window to avoid data loss risk more than 0.01% + // for business and compliance reasons. + // + // NOTE: the [ReportRequest][google.api.servicecontrol.v1.ReportRequest] has + // the size limit of 1MB. + // + // This method requires the `servicemanagement.services.report` permission + // on the specified service. For more information, see + // [Google Cloud IAM](https://cloud.google.com/iam). + Report(context.Context, *ReportRequest) (*ReportResponse, error) +} + +func RegisterServiceControllerServer(s *grpc.Server, srv ServiceControllerServer) { + s.RegisterService(&_ServiceController_serviceDesc, srv) +} + +func _ServiceController_Check_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(CheckRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(ServiceControllerServer).Check(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.api.servicecontrol.v1.ServiceController/Check", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(ServiceControllerServer).Check(ctx, req.(*CheckRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _ServiceController_Report_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(ReportRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(ServiceControllerServer).Report(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.api.servicecontrol.v1.ServiceController/Report", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(ServiceControllerServer).Report(ctx, req.(*ReportRequest)) + } + return interceptor(ctx, in, info, handler) +} + +var _ServiceController_serviceDesc = grpc.ServiceDesc{ + ServiceName: "google.api.servicecontrol.v1.ServiceController", + HandlerType: (*ServiceControllerServer)(nil), + Methods: []grpc.MethodDesc{ + { + MethodName: "Check", + Handler: _ServiceController_Check_Handler, + }, + { + MethodName: "Report", + Handler: _ServiceController_Report_Handler, + }, + }, + Streams: []grpc.StreamDesc{}, + Metadata: "google/api/servicecontrol/v1/service_controller.proto", +} + +func init() { + proto.RegisterFile("google/api/servicecontrol/v1/service_controller.proto", fileDescriptor_service_controller_fa10647c90d68c28) +} + +var fileDescriptor_service_controller_fa10647c90d68c28 = []byte{ + // 619 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x94, 0x54, 0xc1, 0x6e, 0xd3, 0x4c, + 0x10, 0xd6, 0x3a, 0x6d, 0xa4, 0x4c, 0x9c, 0xfe, 0xea, 0x1e, 0x7e, 0x22, 0xab, 0x87, 0xd4, 0x12, + 0x34, 0x4a, 0x8b, 0xad, 0x16, 0x55, 0x42, 0xe1, 0x44, 0xa3, 0xaa, 0x0a, 0x48, 0xa5, 0x72, 0x38, + 0x21, 0xaa, 0xc8, 0xdd, 0x6c, 0x8c, 0x4b, 0xb2, 0x6b, 0xd6, 0x4e, 0x2e, 0x88, 0x0b, 0x0f, 0xc0, + 0xa1, 0xbc, 0x01, 0xaa, 0xc4, 0x33, 0xf0, 0x1c, 0xbc, 0x02, 0x0f, 0x01, 0x37, 0x94, 0xdd, 0xb5, + 0xeb, 0x08, 0x63, 0x92, 0x9b, 0xf7, 0xdb, 0x99, 0xf9, 0xbe, 0x9d, 0xf9, 0x3c, 0x70, 0x1c, 0x70, + 0x1e, 0x4c, 0xa8, 0xeb, 0x47, 0xa1, 0x1b, 0x53, 0x31, 0x0f, 0x09, 0x25, 0x9c, 0x25, 0x82, 0x4f, + 0xdc, 0xf9, 0x61, 0x8a, 0x0c, 0x35, 0x34, 0xa1, 0xc2, 0x89, 0x04, 0x4f, 0x38, 0xde, 0x51, 0x69, + 0x8e, 0x1f, 0x85, 0xce, 0x72, 0x9a, 0x33, 0x3f, 0xb4, 0x76, 0x72, 0x45, 0x7d, 0xc6, 0x78, 0xe2, + 0x27, 0x21, 0x67, 0xb1, 0xca, 0xb5, 0x9c, 0x52, 0x4a, 0xf2, 0x86, 0x92, 0xb7, 0x43, 0x2a, 0x04, + 0xd7, 0x5c, 0xd6, 0x41, 0x69, 0x3c, 0x8f, 0xa8, 0x90, 0xe5, 0x75, 0xf4, 0x3d, 0x1d, 0x2d, 0x22, + 0xe2, 0xc6, 0x89, 0x9f, 0xcc, 0x34, 0xad, 0x7d, 0x8b, 0xc0, 0xec, 0x2d, 0x8a, 0x7b, 0xf4, 0xdd, + 0x8c, 0xc6, 0x09, 0xde, 0x05, 0x33, 0x7d, 0x1f, 0xf3, 0xa7, 0xb4, 0x89, 0x5a, 0xa8, 0x5d, 0xf3, + 0xea, 0x1a, 0x3b, 0xf7, 0xa7, 0x14, 0x9f, 0x42, 0x2d, 0xab, 0xdf, 0x34, 0x5a, 0xa8, 0x5d, 0x3f, + 0xda, 0x73, 0xca, 0x9e, 0xee, 0xbc, 0x48, 0xc3, 0xbd, 0xbb, 0x4c, 0xdc, 0x81, 0xed, 0x5c, 0x27, + 0xc7, 0x61, 0x30, 0x0c, 0x47, 0xcd, 0x0d, 0x49, 0xf7, 0x9f, 0xbe, 0xe8, 0x49, 0xbc, 0x3f, 0xb2, + 0x6f, 0x2b, 0xd0, 0xd0, 0x32, 0xe3, 0x88, 0xb3, 0x98, 0x2e, 0x74, 0x66, 0xa5, 0x16, 0x89, 0x5a, + 0x67, 0x86, 0xf5, 0x47, 0xf8, 0x39, 0x98, 0xb9, 0xbe, 0xc5, 0x4d, 0xa3, 0x55, 0x69, 0xd7, 0x8f, + 0xda, 0xe5, 0x52, 0x25, 0xcb, 0xe9, 0x22, 0xc1, 0xab, 0x93, 0xec, 0x3b, 0x2e, 0x56, 0xbb, 0x59, + 0xa8, 0x16, 0xbf, 0x04, 0x50, 0xc4, 0x21, 0x1b, 0xf3, 0x66, 0x55, 0x76, 0xe8, 0x78, 0x05, 0xda, + 0xf4, 0x71, 0xea, 0xd4, 0x67, 0x63, 0xee, 0xd5, 0x48, 0xfa, 0x69, 0x5d, 0x43, 0x2d, 0xc3, 0xf1, + 0x25, 0x34, 0x08, 0x67, 0xf1, 0x6c, 0x4a, 0x85, 0x62, 0x51, 0x73, 0x78, 0xbc, 0x16, 0x8b, 0x2e, + 0x20, 0x89, 0x4c, 0x92, 0x3b, 0x59, 0xc7, 0x60, 0xe6, 0x6f, 0xf1, 0x7d, 0xd8, 0x8a, 0x04, 0xbf, + 0xa6, 0x24, 0x19, 0xb2, 0xd9, 0xf4, 0x8a, 0x0a, 0xd9, 0xef, 0x8a, 0xd7, 0xd0, 0xe8, 0xb9, 0x04, + 0xed, 0xaf, 0x08, 0x1a, 0x1e, 0x8d, 0xb8, 0x48, 0xd6, 0xb0, 0xd3, 0x19, 0x40, 0x36, 0xb5, 0x74, + 0x48, 0x2b, 0xfb, 0x29, 0x97, 0x5a, 0x3c, 0xa2, 0x4a, 0xb1, 0xa1, 0x7e, 0x21, 0xd8, 0x4a, 0x95, + 0x6a, 0x47, 0x5d, 0x42, 0x43, 0x48, 0x24, 0xf5, 0x0b, 0x92, 0x52, 0xfe, 0xd1, 0xd2, 0xe5, 0x22, + 0xfa, 0xa8, 0xfc, 0x63, 0x8a, 0xbb, 0xc3, 0x5f, 0xd4, 0x19, 0x85, 0xea, 0xac, 0xd7, 0x50, 0xcf, + 0x15, 0x5a, 0xc5, 0xeb, 0x1d, 0xa8, 0xaa, 0xff, 0x5a, 0x1b, 0x01, 0xa7, 0xaa, 0x45, 0x44, 0x9c, + 0x81, 0xbc, 0xf1, 0x74, 0xc4, 0xd1, 0x37, 0x03, 0xb6, 0x07, 0x19, 0xa3, 0x5e, 0x61, 0xf8, 0x13, + 0x82, 0x4d, 0xe9, 0x0f, 0xdc, 0x59, 0xc9, 0x44, 0x72, 0xbe, 0xd6, 0xfe, 0x1a, 0x86, 0xb3, 0x0f, + 0x3e, 0x7e, 0xff, 0xf1, 0xd9, 0x78, 0x60, 0xef, 0xe6, 0xb6, 0x68, 0xec, 0xbe, 0xcf, 0x1b, 0xe4, + 0x43, 0x57, 0x1a, 0xbe, 0x8b, 0x3a, 0xf8, 0x06, 0x41, 0x55, 0x75, 0x01, 0xef, 0xaf, 0x36, 0x03, + 0x25, 0xe9, 0x60, 0x9d, 0x81, 0xd9, 0x0f, 0xa5, 0xa6, 0x3d, 0xdb, 0x2e, 0xd3, 0xa4, 0x06, 0xd9, + 0x45, 0x9d, 0x93, 0x1b, 0x04, 0x2d, 0xc2, 0xa7, 0xa5, 0x14, 0x27, 0xff, 0xff, 0xd1, 0xdd, 0x8b, + 0xc5, 0xb2, 0xbd, 0x40, 0xaf, 0x9e, 0xe9, 0xbc, 0x80, 0x4f, 0x7c, 0x16, 0x38, 0x5c, 0x04, 0x6e, + 0x40, 0x99, 0x5c, 0xc5, 0xae, 0xba, 0xf2, 0xa3, 0x30, 0x2e, 0x5e, 0xea, 0x4f, 0x96, 0x91, 0x9f, + 0x08, 0x7d, 0x31, 0x36, 0xce, 0x9e, 0x0e, 0x7a, 0x57, 0x55, 0x59, 0xe0, 0xd1, 0xef, 0x00, 0x00, + 0x00, 0xff, 0xff, 0x5e, 0x28, 0x7b, 0xe6, 0xb7, 0x06, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/api/servicemanagement/v1/resources.pb.go b/vendor/google.golang.org/genproto/googleapis/api/servicemanagement/v1/resources.pb.go new file mode 100644 index 0000000..d8b8b44 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/api/servicemanagement/v1/resources.pb.go @@ -0,0 +1,1036 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/api/servicemanagement/v1/resources.proto + +package servicemanagement // import "google.golang.org/genproto/googleapis/api/servicemanagement/v1" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "github.com/golang/protobuf/ptypes/any" +import _ "github.com/golang/protobuf/ptypes/struct" +import timestamp "github.com/golang/protobuf/ptypes/timestamp" +import _ "google.golang.org/genproto/googleapis/api/annotations" +import configchange "google.golang.org/genproto/googleapis/api/configchange" +import _ "google.golang.org/genproto/googleapis/api/metric" +import _ "google.golang.org/genproto/googleapis/api/serviceconfig" +import _ "google.golang.org/genproto/googleapis/longrunning" +import _ "google.golang.org/genproto/googleapis/rpc/status" +import _ "google.golang.org/genproto/protobuf/field_mask" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Code describes the status of the operation (or one of its steps). +type OperationMetadata_Status int32 + +const ( + // Unspecifed code. + OperationMetadata_STATUS_UNSPECIFIED OperationMetadata_Status = 0 + // The operation or step has completed without errors. + OperationMetadata_DONE OperationMetadata_Status = 1 + // The operation or step has not started yet. + OperationMetadata_NOT_STARTED OperationMetadata_Status = 2 + // The operation or step is in progress. + OperationMetadata_IN_PROGRESS OperationMetadata_Status = 3 + // The operation or step has completed with errors. If the operation is + // rollbackable, the rollback completed with errors too. + OperationMetadata_FAILED OperationMetadata_Status = 4 + // The operation or step has completed with cancellation. + OperationMetadata_CANCELLED OperationMetadata_Status = 5 +) + +var OperationMetadata_Status_name = map[int32]string{ + 0: "STATUS_UNSPECIFIED", + 1: "DONE", + 2: "NOT_STARTED", + 3: "IN_PROGRESS", + 4: "FAILED", + 5: "CANCELLED", +} +var OperationMetadata_Status_value = map[string]int32{ + "STATUS_UNSPECIFIED": 0, + "DONE": 1, + "NOT_STARTED": 2, + "IN_PROGRESS": 3, + "FAILED": 4, + "CANCELLED": 5, +} + +func (x OperationMetadata_Status) String() string { + return proto.EnumName(OperationMetadata_Status_name, int32(x)) +} +func (OperationMetadata_Status) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_resources_7973d4fc0911200f, []int{1, 0} +} + +// The kind of diagnostic information possible. +type Diagnostic_Kind int32 + +const ( + // Warnings and errors + Diagnostic_WARNING Diagnostic_Kind = 0 + // Only errors + Diagnostic_ERROR Diagnostic_Kind = 1 +) + +var Diagnostic_Kind_name = map[int32]string{ + 0: "WARNING", + 1: "ERROR", +} +var Diagnostic_Kind_value = map[string]int32{ + "WARNING": 0, + "ERROR": 1, +} + +func (x Diagnostic_Kind) String() string { + return proto.EnumName(Diagnostic_Kind_name, int32(x)) +} +func (Diagnostic_Kind) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_resources_7973d4fc0911200f, []int{2, 0} +} + +type ConfigFile_FileType int32 + +const ( + // Unknown file type. + ConfigFile_FILE_TYPE_UNSPECIFIED ConfigFile_FileType = 0 + // YAML-specification of service. + ConfigFile_SERVICE_CONFIG_YAML ConfigFile_FileType = 1 + // OpenAPI specification, serialized in JSON. + ConfigFile_OPEN_API_JSON ConfigFile_FileType = 2 + // OpenAPI specification, serialized in YAML. + ConfigFile_OPEN_API_YAML ConfigFile_FileType = 3 + // FileDescriptorSet, generated by protoc. + // + // To generate, use protoc with imports and source info included. + // For an example test.proto file, the following command would put the value + // in a new file named out.pb. + // + // $protoc --include_imports --include_source_info test.proto -o out.pb + ConfigFile_FILE_DESCRIPTOR_SET_PROTO ConfigFile_FileType = 4 + // Uncompiled Proto file. Used for storage and display purposes only, + // currently server-side compilation is not supported. Should match the + // inputs to 'protoc' command used to generated FILE_DESCRIPTOR_SET_PROTO. A + // file of this type can only be included if at least one file of type + // FILE_DESCRIPTOR_SET_PROTO is included. + ConfigFile_PROTO_FILE ConfigFile_FileType = 6 +) + +var ConfigFile_FileType_name = map[int32]string{ + 0: "FILE_TYPE_UNSPECIFIED", + 1: "SERVICE_CONFIG_YAML", + 2: "OPEN_API_JSON", + 3: "OPEN_API_YAML", + 4: "FILE_DESCRIPTOR_SET_PROTO", + 6: "PROTO_FILE", +} +var ConfigFile_FileType_value = map[string]int32{ + "FILE_TYPE_UNSPECIFIED": 0, + "SERVICE_CONFIG_YAML": 1, + "OPEN_API_JSON": 2, + "OPEN_API_YAML": 3, + "FILE_DESCRIPTOR_SET_PROTO": 4, + "PROTO_FILE": 6, +} + +func (x ConfigFile_FileType) String() string { + return proto.EnumName(ConfigFile_FileType_name, int32(x)) +} +func (ConfigFile_FileType) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_resources_7973d4fc0911200f, []int{4, 0} +} + +// Status of a Rollout. +type Rollout_RolloutStatus int32 + +const ( + // No status specified. + Rollout_ROLLOUT_STATUS_UNSPECIFIED Rollout_RolloutStatus = 0 + // The Rollout is in progress. + Rollout_IN_PROGRESS Rollout_RolloutStatus = 1 + // The Rollout has completed successfully. + Rollout_SUCCESS Rollout_RolloutStatus = 2 + // The Rollout has been cancelled. This can happen if you have overlapping + // Rollout pushes, and the previous ones will be cancelled. + Rollout_CANCELLED Rollout_RolloutStatus = 3 + // The Rollout has failed and the rollback attempt has failed too. + Rollout_FAILED Rollout_RolloutStatus = 4 + // The Rollout has not started yet and is pending for execution. + Rollout_PENDING Rollout_RolloutStatus = 5 + // The Rollout has failed and rolled back to the previous successful + // Rollout. + Rollout_FAILED_ROLLED_BACK Rollout_RolloutStatus = 6 +) + +var Rollout_RolloutStatus_name = map[int32]string{ + 0: "ROLLOUT_STATUS_UNSPECIFIED", + 1: "IN_PROGRESS", + 2: "SUCCESS", + 3: "CANCELLED", + 4: "FAILED", + 5: "PENDING", + 6: "FAILED_ROLLED_BACK", +} +var Rollout_RolloutStatus_value = map[string]int32{ + "ROLLOUT_STATUS_UNSPECIFIED": 0, + "IN_PROGRESS": 1, + "SUCCESS": 2, + "CANCELLED": 3, + "FAILED": 4, + "PENDING": 5, + "FAILED_ROLLED_BACK": 6, +} + +func (x Rollout_RolloutStatus) String() string { + return proto.EnumName(Rollout_RolloutStatus_name, int32(x)) +} +func (Rollout_RolloutStatus) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_resources_7973d4fc0911200f, []int{7, 0} +} + +// The full representation of a Service that is managed by +// Google Service Management. +type ManagedService struct { + // The name of the service. See the [overview](/service-management/overview) + // for naming requirements. + ServiceName string `protobuf:"bytes,2,opt,name=service_name,json=serviceName,proto3" json:"service_name,omitempty"` + // ID of the project that produces and owns this service. + ProducerProjectId string `protobuf:"bytes,3,opt,name=producer_project_id,json=producerProjectId,proto3" json:"producer_project_id,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ManagedService) Reset() { *m = ManagedService{} } +func (m *ManagedService) String() string { return proto.CompactTextString(m) } +func (*ManagedService) ProtoMessage() {} +func (*ManagedService) Descriptor() ([]byte, []int) { + return fileDescriptor_resources_7973d4fc0911200f, []int{0} +} +func (m *ManagedService) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ManagedService.Unmarshal(m, b) +} +func (m *ManagedService) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ManagedService.Marshal(b, m, deterministic) +} +func (dst *ManagedService) XXX_Merge(src proto.Message) { + xxx_messageInfo_ManagedService.Merge(dst, src) +} +func (m *ManagedService) XXX_Size() int { + return xxx_messageInfo_ManagedService.Size(m) +} +func (m *ManagedService) XXX_DiscardUnknown() { + xxx_messageInfo_ManagedService.DiscardUnknown(m) +} + +var xxx_messageInfo_ManagedService proto.InternalMessageInfo + +func (m *ManagedService) GetServiceName() string { + if m != nil { + return m.ServiceName + } + return "" +} + +func (m *ManagedService) GetProducerProjectId() string { + if m != nil { + return m.ProducerProjectId + } + return "" +} + +// The metadata associated with a long running operation resource. +type OperationMetadata struct { + // The full name of the resources that this operation is directly + // associated with. + ResourceNames []string `protobuf:"bytes,1,rep,name=resource_names,json=resourceNames,proto3" json:"resource_names,omitempty"` + // Detailed status information for each step. The order is undetermined. + Steps []*OperationMetadata_Step `protobuf:"bytes,2,rep,name=steps,proto3" json:"steps,omitempty"` + // Percentage of completion of this operation, ranging from 0 to 100. + ProgressPercentage int32 `protobuf:"varint,3,opt,name=progress_percentage,json=progressPercentage,proto3" json:"progress_percentage,omitempty"` + // The start time of the operation. + StartTime *timestamp.Timestamp `protobuf:"bytes,4,opt,name=start_time,json=startTime,proto3" json:"start_time,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *OperationMetadata) Reset() { *m = OperationMetadata{} } +func (m *OperationMetadata) String() string { return proto.CompactTextString(m) } +func (*OperationMetadata) ProtoMessage() {} +func (*OperationMetadata) Descriptor() ([]byte, []int) { + return fileDescriptor_resources_7973d4fc0911200f, []int{1} +} +func (m *OperationMetadata) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_OperationMetadata.Unmarshal(m, b) +} +func (m *OperationMetadata) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_OperationMetadata.Marshal(b, m, deterministic) +} +func (dst *OperationMetadata) XXX_Merge(src proto.Message) { + xxx_messageInfo_OperationMetadata.Merge(dst, src) +} +func (m *OperationMetadata) XXX_Size() int { + return xxx_messageInfo_OperationMetadata.Size(m) +} +func (m *OperationMetadata) XXX_DiscardUnknown() { + xxx_messageInfo_OperationMetadata.DiscardUnknown(m) +} + +var xxx_messageInfo_OperationMetadata proto.InternalMessageInfo + +func (m *OperationMetadata) GetResourceNames() []string { + if m != nil { + return m.ResourceNames + } + return nil +} + +func (m *OperationMetadata) GetSteps() []*OperationMetadata_Step { + if m != nil { + return m.Steps + } + return nil +} + +func (m *OperationMetadata) GetProgressPercentage() int32 { + if m != nil { + return m.ProgressPercentage + } + return 0 +} + +func (m *OperationMetadata) GetStartTime() *timestamp.Timestamp { + if m != nil { + return m.StartTime + } + return nil +} + +// Represents the status of one operation step. +type OperationMetadata_Step struct { + // The short description of the step. + Description string `protobuf:"bytes,2,opt,name=description,proto3" json:"description,omitempty"` + // The status code. + Status OperationMetadata_Status `protobuf:"varint,4,opt,name=status,proto3,enum=google.api.servicemanagement.v1.OperationMetadata_Status" json:"status,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *OperationMetadata_Step) Reset() { *m = OperationMetadata_Step{} } +func (m *OperationMetadata_Step) String() string { return proto.CompactTextString(m) } +func (*OperationMetadata_Step) ProtoMessage() {} +func (*OperationMetadata_Step) Descriptor() ([]byte, []int) { + return fileDescriptor_resources_7973d4fc0911200f, []int{1, 0} +} +func (m *OperationMetadata_Step) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_OperationMetadata_Step.Unmarshal(m, b) +} +func (m *OperationMetadata_Step) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_OperationMetadata_Step.Marshal(b, m, deterministic) +} +func (dst *OperationMetadata_Step) XXX_Merge(src proto.Message) { + xxx_messageInfo_OperationMetadata_Step.Merge(dst, src) +} +func (m *OperationMetadata_Step) XXX_Size() int { + return xxx_messageInfo_OperationMetadata_Step.Size(m) +} +func (m *OperationMetadata_Step) XXX_DiscardUnknown() { + xxx_messageInfo_OperationMetadata_Step.DiscardUnknown(m) +} + +var xxx_messageInfo_OperationMetadata_Step proto.InternalMessageInfo + +func (m *OperationMetadata_Step) GetDescription() string { + if m != nil { + return m.Description + } + return "" +} + +func (m *OperationMetadata_Step) GetStatus() OperationMetadata_Status { + if m != nil { + return m.Status + } + return OperationMetadata_STATUS_UNSPECIFIED +} + +// Represents a diagnostic message (error or warning) +type Diagnostic struct { + // File name and line number of the error or warning. + Location string `protobuf:"bytes,1,opt,name=location,proto3" json:"location,omitempty"` + // The kind of diagnostic information provided. + Kind Diagnostic_Kind `protobuf:"varint,2,opt,name=kind,proto3,enum=google.api.servicemanagement.v1.Diagnostic_Kind" json:"kind,omitempty"` + // Message describing the error or warning. + Message string `protobuf:"bytes,3,opt,name=message,proto3" json:"message,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Diagnostic) Reset() { *m = Diagnostic{} } +func (m *Diagnostic) String() string { return proto.CompactTextString(m) } +func (*Diagnostic) ProtoMessage() {} +func (*Diagnostic) Descriptor() ([]byte, []int) { + return fileDescriptor_resources_7973d4fc0911200f, []int{2} +} +func (m *Diagnostic) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Diagnostic.Unmarshal(m, b) +} +func (m *Diagnostic) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Diagnostic.Marshal(b, m, deterministic) +} +func (dst *Diagnostic) XXX_Merge(src proto.Message) { + xxx_messageInfo_Diagnostic.Merge(dst, src) +} +func (m *Diagnostic) XXX_Size() int { + return xxx_messageInfo_Diagnostic.Size(m) +} +func (m *Diagnostic) XXX_DiscardUnknown() { + xxx_messageInfo_Diagnostic.DiscardUnknown(m) +} + +var xxx_messageInfo_Diagnostic proto.InternalMessageInfo + +func (m *Diagnostic) GetLocation() string { + if m != nil { + return m.Location + } + return "" +} + +func (m *Diagnostic) GetKind() Diagnostic_Kind { + if m != nil { + return m.Kind + } + return Diagnostic_WARNING +} + +func (m *Diagnostic) GetMessage() string { + if m != nil { + return m.Message + } + return "" +} + +// Represents a source file which is used to generate the service configuration +// defined by `google.api.Service`. +type ConfigSource struct { + // A unique ID for a specific instance of this message, typically assigned + // by the client for tracking purpose. If empty, the server may choose to + // generate one instead. + Id string `protobuf:"bytes,5,opt,name=id,proto3" json:"id,omitempty"` + // Set of source configuration files that are used to generate a service + // configuration (`google.api.Service`). + Files []*ConfigFile `protobuf:"bytes,2,rep,name=files,proto3" json:"files,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ConfigSource) Reset() { *m = ConfigSource{} } +func (m *ConfigSource) String() string { return proto.CompactTextString(m) } +func (*ConfigSource) ProtoMessage() {} +func (*ConfigSource) Descriptor() ([]byte, []int) { + return fileDescriptor_resources_7973d4fc0911200f, []int{3} +} +func (m *ConfigSource) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ConfigSource.Unmarshal(m, b) +} +func (m *ConfigSource) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ConfigSource.Marshal(b, m, deterministic) +} +func (dst *ConfigSource) XXX_Merge(src proto.Message) { + xxx_messageInfo_ConfigSource.Merge(dst, src) +} +func (m *ConfigSource) XXX_Size() int { + return xxx_messageInfo_ConfigSource.Size(m) +} +func (m *ConfigSource) XXX_DiscardUnknown() { + xxx_messageInfo_ConfigSource.DiscardUnknown(m) +} + +var xxx_messageInfo_ConfigSource proto.InternalMessageInfo + +func (m *ConfigSource) GetId() string { + if m != nil { + return m.Id + } + return "" +} + +func (m *ConfigSource) GetFiles() []*ConfigFile { + if m != nil { + return m.Files + } + return nil +} + +// Generic specification of a source configuration file +type ConfigFile struct { + // The file name of the configuration file (full or relative path). + FilePath string `protobuf:"bytes,1,opt,name=file_path,json=filePath,proto3" json:"file_path,omitempty"` + // The bytes that constitute the file. + FileContents []byte `protobuf:"bytes,3,opt,name=file_contents,json=fileContents,proto3" json:"file_contents,omitempty"` + // The type of configuration file this represents. + FileType ConfigFile_FileType `protobuf:"varint,4,opt,name=file_type,json=fileType,proto3,enum=google.api.servicemanagement.v1.ConfigFile_FileType" json:"file_type,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ConfigFile) Reset() { *m = ConfigFile{} } +func (m *ConfigFile) String() string { return proto.CompactTextString(m) } +func (*ConfigFile) ProtoMessage() {} +func (*ConfigFile) Descriptor() ([]byte, []int) { + return fileDescriptor_resources_7973d4fc0911200f, []int{4} +} +func (m *ConfigFile) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ConfigFile.Unmarshal(m, b) +} +func (m *ConfigFile) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ConfigFile.Marshal(b, m, deterministic) +} +func (dst *ConfigFile) XXX_Merge(src proto.Message) { + xxx_messageInfo_ConfigFile.Merge(dst, src) +} +func (m *ConfigFile) XXX_Size() int { + return xxx_messageInfo_ConfigFile.Size(m) +} +func (m *ConfigFile) XXX_DiscardUnknown() { + xxx_messageInfo_ConfigFile.DiscardUnknown(m) +} + +var xxx_messageInfo_ConfigFile proto.InternalMessageInfo + +func (m *ConfigFile) GetFilePath() string { + if m != nil { + return m.FilePath + } + return "" +} + +func (m *ConfigFile) GetFileContents() []byte { + if m != nil { + return m.FileContents + } + return nil +} + +func (m *ConfigFile) GetFileType() ConfigFile_FileType { + if m != nil { + return m.FileType + } + return ConfigFile_FILE_TYPE_UNSPECIFIED +} + +// Represents a service configuration with its name and id. +type ConfigRef struct { + // Resource name of a service config. It must have the following + // format: "services/{service name}/configs/{config id}". + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ConfigRef) Reset() { *m = ConfigRef{} } +func (m *ConfigRef) String() string { return proto.CompactTextString(m) } +func (*ConfigRef) ProtoMessage() {} +func (*ConfigRef) Descriptor() ([]byte, []int) { + return fileDescriptor_resources_7973d4fc0911200f, []int{5} +} +func (m *ConfigRef) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ConfigRef.Unmarshal(m, b) +} +func (m *ConfigRef) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ConfigRef.Marshal(b, m, deterministic) +} +func (dst *ConfigRef) XXX_Merge(src proto.Message) { + xxx_messageInfo_ConfigRef.Merge(dst, src) +} +func (m *ConfigRef) XXX_Size() int { + return xxx_messageInfo_ConfigRef.Size(m) +} +func (m *ConfigRef) XXX_DiscardUnknown() { + xxx_messageInfo_ConfigRef.DiscardUnknown(m) +} + +var xxx_messageInfo_ConfigRef proto.InternalMessageInfo + +func (m *ConfigRef) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +// Change report associated with a particular service configuration. +// +// It contains a list of ConfigChanges based on the comparison between +// two service configurations. +type ChangeReport struct { + // List of changes between two service configurations. + // The changes will be alphabetically sorted based on the identifier + // of each change. + // A ConfigChange identifier is a dot separated path to the configuration. + // Example: visibility.rules[selector='LibraryService.CreateBook'].restriction + ConfigChanges []*configchange.ConfigChange `protobuf:"bytes,1,rep,name=config_changes,json=configChanges,proto3" json:"config_changes,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ChangeReport) Reset() { *m = ChangeReport{} } +func (m *ChangeReport) String() string { return proto.CompactTextString(m) } +func (*ChangeReport) ProtoMessage() {} +func (*ChangeReport) Descriptor() ([]byte, []int) { + return fileDescriptor_resources_7973d4fc0911200f, []int{6} +} +func (m *ChangeReport) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ChangeReport.Unmarshal(m, b) +} +func (m *ChangeReport) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ChangeReport.Marshal(b, m, deterministic) +} +func (dst *ChangeReport) XXX_Merge(src proto.Message) { + xxx_messageInfo_ChangeReport.Merge(dst, src) +} +func (m *ChangeReport) XXX_Size() int { + return xxx_messageInfo_ChangeReport.Size(m) +} +func (m *ChangeReport) XXX_DiscardUnknown() { + xxx_messageInfo_ChangeReport.DiscardUnknown(m) +} + +var xxx_messageInfo_ChangeReport proto.InternalMessageInfo + +func (m *ChangeReport) GetConfigChanges() []*configchange.ConfigChange { + if m != nil { + return m.ConfigChanges + } + return nil +} + +// A rollout resource that defines how service configuration versions are pushed +// to control plane systems. Typically, you create a new version of the +// service config, and then create a Rollout to push the service config. +type Rollout struct { + // Optional unique identifier of this Rollout. Only lower case letters, digits + // and '-' are allowed. + // + // If not specified by client, the server will generate one. The generated id + // will have the form of , where "date" is the create + // date in ISO 8601 format. "revision number" is a monotonically increasing + // positive number that is reset every day for each service. + // An example of the generated rollout_id is '2016-02-16r1' + RolloutId string `protobuf:"bytes,1,opt,name=rollout_id,json=rolloutId,proto3" json:"rollout_id,omitempty"` + // Creation time of the rollout. Readonly. + CreateTime *timestamp.Timestamp `protobuf:"bytes,2,opt,name=create_time,json=createTime,proto3" json:"create_time,omitempty"` + // The user who created the Rollout. Readonly. + CreatedBy string `protobuf:"bytes,3,opt,name=created_by,json=createdBy,proto3" json:"created_by,omitempty"` + // The status of this rollout. Readonly. In case of a failed rollout, + // the system will automatically rollback to the current Rollout + // version. Readonly. + Status Rollout_RolloutStatus `protobuf:"varint,4,opt,name=status,proto3,enum=google.api.servicemanagement.v1.Rollout_RolloutStatus" json:"status,omitempty"` + // Strategy that defines which versions of service configurations should be + // pushed + // and how they should be used at runtime. + // + // Types that are valid to be assigned to Strategy: + // *Rollout_TrafficPercentStrategy_ + // *Rollout_DeleteServiceStrategy_ + Strategy isRollout_Strategy `protobuf_oneof:"strategy"` + // The name of the service associated with this Rollout. + ServiceName string `protobuf:"bytes,8,opt,name=service_name,json=serviceName,proto3" json:"service_name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Rollout) Reset() { *m = Rollout{} } +func (m *Rollout) String() string { return proto.CompactTextString(m) } +func (*Rollout) ProtoMessage() {} +func (*Rollout) Descriptor() ([]byte, []int) { + return fileDescriptor_resources_7973d4fc0911200f, []int{7} +} +func (m *Rollout) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Rollout.Unmarshal(m, b) +} +func (m *Rollout) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Rollout.Marshal(b, m, deterministic) +} +func (dst *Rollout) XXX_Merge(src proto.Message) { + xxx_messageInfo_Rollout.Merge(dst, src) +} +func (m *Rollout) XXX_Size() int { + return xxx_messageInfo_Rollout.Size(m) +} +func (m *Rollout) XXX_DiscardUnknown() { + xxx_messageInfo_Rollout.DiscardUnknown(m) +} + +var xxx_messageInfo_Rollout proto.InternalMessageInfo + +func (m *Rollout) GetRolloutId() string { + if m != nil { + return m.RolloutId + } + return "" +} + +func (m *Rollout) GetCreateTime() *timestamp.Timestamp { + if m != nil { + return m.CreateTime + } + return nil +} + +func (m *Rollout) GetCreatedBy() string { + if m != nil { + return m.CreatedBy + } + return "" +} + +func (m *Rollout) GetStatus() Rollout_RolloutStatus { + if m != nil { + return m.Status + } + return Rollout_ROLLOUT_STATUS_UNSPECIFIED +} + +type isRollout_Strategy interface { + isRollout_Strategy() +} + +type Rollout_TrafficPercentStrategy_ struct { + TrafficPercentStrategy *Rollout_TrafficPercentStrategy `protobuf:"bytes,5,opt,name=traffic_percent_strategy,json=trafficPercentStrategy,proto3,oneof"` +} + +type Rollout_DeleteServiceStrategy_ struct { + DeleteServiceStrategy *Rollout_DeleteServiceStrategy `protobuf:"bytes,200,opt,name=delete_service_strategy,json=deleteServiceStrategy,proto3,oneof"` +} + +func (*Rollout_TrafficPercentStrategy_) isRollout_Strategy() {} + +func (*Rollout_DeleteServiceStrategy_) isRollout_Strategy() {} + +func (m *Rollout) GetStrategy() isRollout_Strategy { + if m != nil { + return m.Strategy + } + return nil +} + +func (m *Rollout) GetTrafficPercentStrategy() *Rollout_TrafficPercentStrategy { + if x, ok := m.GetStrategy().(*Rollout_TrafficPercentStrategy_); ok { + return x.TrafficPercentStrategy + } + return nil +} + +func (m *Rollout) GetDeleteServiceStrategy() *Rollout_DeleteServiceStrategy { + if x, ok := m.GetStrategy().(*Rollout_DeleteServiceStrategy_); ok { + return x.DeleteServiceStrategy + } + return nil +} + +func (m *Rollout) GetServiceName() string { + if m != nil { + return m.ServiceName + } + return "" +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*Rollout) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _Rollout_OneofMarshaler, _Rollout_OneofUnmarshaler, _Rollout_OneofSizer, []interface{}{ + (*Rollout_TrafficPercentStrategy_)(nil), + (*Rollout_DeleteServiceStrategy_)(nil), + } +} + +func _Rollout_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*Rollout) + // strategy + switch x := m.Strategy.(type) { + case *Rollout_TrafficPercentStrategy_: + b.EncodeVarint(5<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.TrafficPercentStrategy); err != nil { + return err + } + case *Rollout_DeleteServiceStrategy_: + b.EncodeVarint(200<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.DeleteServiceStrategy); err != nil { + return err + } + case nil: + default: + return fmt.Errorf("Rollout.Strategy has unexpected type %T", x) + } + return nil +} + +func _Rollout_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*Rollout) + switch tag { + case 5: // strategy.traffic_percent_strategy + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(Rollout_TrafficPercentStrategy) + err := b.DecodeMessage(msg) + m.Strategy = &Rollout_TrafficPercentStrategy_{msg} + return true, err + case 200: // strategy.delete_service_strategy + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(Rollout_DeleteServiceStrategy) + err := b.DecodeMessage(msg) + m.Strategy = &Rollout_DeleteServiceStrategy_{msg} + return true, err + default: + return false, nil + } +} + +func _Rollout_OneofSizer(msg proto.Message) (n int) { + m := msg.(*Rollout) + // strategy + switch x := m.Strategy.(type) { + case *Rollout_TrafficPercentStrategy_: + s := proto.Size(x.TrafficPercentStrategy) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *Rollout_DeleteServiceStrategy_: + s := proto.Size(x.DeleteServiceStrategy) + n += 2 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +// Strategy that specifies how clients of Google Service Controller want to +// send traffic to use different config versions. This is generally +// used by API proxy to split traffic based on your configured precentage for +// each config version. +// +// One example of how to gradually rollout a new service configuration using +// this +// strategy: +// Day 1 +// +// Rollout { +// id: "example.googleapis.com/rollout_20160206" +// traffic_percent_strategy { +// percentages: { +// "example.googleapis.com/20160201": 70.00 +// "example.googleapis.com/20160206": 30.00 +// } +// } +// } +// +// Day 2 +// +// Rollout { +// id: "example.googleapis.com/rollout_20160207" +// traffic_percent_strategy: { +// percentages: { +// "example.googleapis.com/20160206": 100.00 +// } +// } +// } +type Rollout_TrafficPercentStrategy struct { + // Maps service configuration IDs to their corresponding traffic percentage. + // Key is the service configuration ID, Value is the traffic percentage + // which must be greater than 0.0 and the sum must equal to 100.0. + Percentages map[string]float64 `protobuf:"bytes,1,rep,name=percentages,proto3" json:"percentages,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"fixed64,2,opt,name=value,proto3"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Rollout_TrafficPercentStrategy) Reset() { *m = Rollout_TrafficPercentStrategy{} } +func (m *Rollout_TrafficPercentStrategy) String() string { return proto.CompactTextString(m) } +func (*Rollout_TrafficPercentStrategy) ProtoMessage() {} +func (*Rollout_TrafficPercentStrategy) Descriptor() ([]byte, []int) { + return fileDescriptor_resources_7973d4fc0911200f, []int{7, 0} +} +func (m *Rollout_TrafficPercentStrategy) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Rollout_TrafficPercentStrategy.Unmarshal(m, b) +} +func (m *Rollout_TrafficPercentStrategy) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Rollout_TrafficPercentStrategy.Marshal(b, m, deterministic) +} +func (dst *Rollout_TrafficPercentStrategy) XXX_Merge(src proto.Message) { + xxx_messageInfo_Rollout_TrafficPercentStrategy.Merge(dst, src) +} +func (m *Rollout_TrafficPercentStrategy) XXX_Size() int { + return xxx_messageInfo_Rollout_TrafficPercentStrategy.Size(m) +} +func (m *Rollout_TrafficPercentStrategy) XXX_DiscardUnknown() { + xxx_messageInfo_Rollout_TrafficPercentStrategy.DiscardUnknown(m) +} + +var xxx_messageInfo_Rollout_TrafficPercentStrategy proto.InternalMessageInfo + +func (m *Rollout_TrafficPercentStrategy) GetPercentages() map[string]float64 { + if m != nil { + return m.Percentages + } + return nil +} + +// Strategy used to delete a service. This strategy is a placeholder only +// used by the system generated rollout to delete a service. +type Rollout_DeleteServiceStrategy struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Rollout_DeleteServiceStrategy) Reset() { *m = Rollout_DeleteServiceStrategy{} } +func (m *Rollout_DeleteServiceStrategy) String() string { return proto.CompactTextString(m) } +func (*Rollout_DeleteServiceStrategy) ProtoMessage() {} +func (*Rollout_DeleteServiceStrategy) Descriptor() ([]byte, []int) { + return fileDescriptor_resources_7973d4fc0911200f, []int{7, 1} +} +func (m *Rollout_DeleteServiceStrategy) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Rollout_DeleteServiceStrategy.Unmarshal(m, b) +} +func (m *Rollout_DeleteServiceStrategy) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Rollout_DeleteServiceStrategy.Marshal(b, m, deterministic) +} +func (dst *Rollout_DeleteServiceStrategy) XXX_Merge(src proto.Message) { + xxx_messageInfo_Rollout_DeleteServiceStrategy.Merge(dst, src) +} +func (m *Rollout_DeleteServiceStrategy) XXX_Size() int { + return xxx_messageInfo_Rollout_DeleteServiceStrategy.Size(m) +} +func (m *Rollout_DeleteServiceStrategy) XXX_DiscardUnknown() { + xxx_messageInfo_Rollout_DeleteServiceStrategy.DiscardUnknown(m) +} + +var xxx_messageInfo_Rollout_DeleteServiceStrategy proto.InternalMessageInfo + +func init() { + proto.RegisterType((*ManagedService)(nil), "google.api.servicemanagement.v1.ManagedService") + proto.RegisterType((*OperationMetadata)(nil), "google.api.servicemanagement.v1.OperationMetadata") + proto.RegisterType((*OperationMetadata_Step)(nil), "google.api.servicemanagement.v1.OperationMetadata.Step") + proto.RegisterType((*Diagnostic)(nil), "google.api.servicemanagement.v1.Diagnostic") + proto.RegisterType((*ConfigSource)(nil), "google.api.servicemanagement.v1.ConfigSource") + proto.RegisterType((*ConfigFile)(nil), "google.api.servicemanagement.v1.ConfigFile") + proto.RegisterType((*ConfigRef)(nil), "google.api.servicemanagement.v1.ConfigRef") + proto.RegisterType((*ChangeReport)(nil), "google.api.servicemanagement.v1.ChangeReport") + proto.RegisterType((*Rollout)(nil), "google.api.servicemanagement.v1.Rollout") + proto.RegisterType((*Rollout_TrafficPercentStrategy)(nil), "google.api.servicemanagement.v1.Rollout.TrafficPercentStrategy") + proto.RegisterMapType((map[string]float64)(nil), "google.api.servicemanagement.v1.Rollout.TrafficPercentStrategy.PercentagesEntry") + proto.RegisterType((*Rollout_DeleteServiceStrategy)(nil), "google.api.servicemanagement.v1.Rollout.DeleteServiceStrategy") + proto.RegisterEnum("google.api.servicemanagement.v1.OperationMetadata_Status", OperationMetadata_Status_name, OperationMetadata_Status_value) + proto.RegisterEnum("google.api.servicemanagement.v1.Diagnostic_Kind", Diagnostic_Kind_name, Diagnostic_Kind_value) + proto.RegisterEnum("google.api.servicemanagement.v1.ConfigFile_FileType", ConfigFile_FileType_name, ConfigFile_FileType_value) + proto.RegisterEnum("google.api.servicemanagement.v1.Rollout_RolloutStatus", Rollout_RolloutStatus_name, Rollout_RolloutStatus_value) +} + +func init() { + proto.RegisterFile("google/api/servicemanagement/v1/resources.proto", fileDescriptor_resources_7973d4fc0911200f) +} + +var fileDescriptor_resources_7973d4fc0911200f = []byte{ + // 1234 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xa4, 0x56, 0xef, 0x8e, 0xdb, 0x44, + 0x10, 0xaf, 0xf3, 0xef, 0x2e, 0x93, 0xbb, 0xe0, 0x6e, 0x69, 0x2f, 0x0d, 0xfd, 0x73, 0x4d, 0x85, + 0x74, 0x12, 0x92, 0xc3, 0x1d, 0x08, 0x28, 0x95, 0x5a, 0xe5, 0x1c, 0xdf, 0x11, 0x7a, 0x67, 0xbb, + 0xeb, 0x5c, 0x51, 0x51, 0x25, 0x6b, 0x6b, 0x6f, 0x5c, 0xd3, 0xc4, 0xb6, 0xec, 0xcd, 0x49, 0x51, + 0x9f, 0x81, 0x4f, 0xbc, 0x01, 0x9f, 0x10, 0x2f, 0xc0, 0x2b, 0x20, 0xc4, 0x03, 0x20, 0xf1, 0x18, + 0xbc, 0x00, 0xda, 0xf5, 0xba, 0x97, 0x3f, 0x87, 0x52, 0xe0, 0x4b, 0xb2, 0xf3, 0xfb, 0xcd, 0xce, + 0xcc, 0xce, 0xce, 0xce, 0x18, 0xba, 0x41, 0x1c, 0x07, 0x63, 0xda, 0x25, 0x49, 0xd8, 0xcd, 0x68, + 0x7a, 0x1e, 0x7a, 0x74, 0x42, 0x22, 0x12, 0xd0, 0x09, 0x8d, 0x58, 0xf7, 0x7c, 0xbf, 0x9b, 0xd2, + 0x2c, 0x9e, 0xa6, 0x1e, 0xcd, 0xb4, 0x24, 0x8d, 0x59, 0x8c, 0xee, 0xe6, 0x1b, 0x34, 0x92, 0x84, + 0xda, 0xca, 0x06, 0xed, 0x7c, 0xbf, 0x7d, 0x6b, 0xce, 0x22, 0x89, 0xa2, 0x98, 0x11, 0x16, 0xc6, + 0x91, 0xdc, 0xde, 0xbe, 0x33, 0xc7, 0x7a, 0x71, 0x34, 0x0a, 0x03, 0xd7, 0x7b, 0x45, 0xa2, 0x80, + 0x4a, 0x7e, 0x67, 0x8e, 0x9f, 0x50, 0x96, 0x86, 0x9e, 0x24, 0x5a, 0xab, 0x81, 0x4a, 0xe6, 0xbe, + 0x64, 0xc6, 0x71, 0x14, 0xa4, 0xd3, 0x28, 0x0a, 0xa3, 0xa0, 0x1b, 0x27, 0x34, 0x5d, 0xf0, 0x7b, + 0x53, 0x2a, 0x09, 0xe9, 0xe5, 0x74, 0xd4, 0x25, 0xd1, 0x4c, 0x52, 0xbb, 0xcb, 0xd4, 0x28, 0xa4, + 0x63, 0xdf, 0x9d, 0x90, 0xec, 0xb5, 0xd4, 0xb8, 0xb5, 0xac, 0x91, 0xb1, 0x74, 0xea, 0x31, 0xc9, + 0xde, 0x5d, 0x66, 0x59, 0x38, 0xa1, 0x19, 0x23, 0x93, 0x64, 0xe9, 0x4c, 0x69, 0xe2, 0x75, 0x33, + 0x46, 0xd8, 0x54, 0x06, 0xd5, 0xf1, 0xa0, 0x79, 0x2a, 0x72, 0xe7, 0x3b, 0xf9, 0x89, 0xd0, 0x3d, + 0xd8, 0x92, 0x87, 0x73, 0x23, 0x32, 0xa1, 0xad, 0xd2, 0xae, 0xb2, 0x57, 0xc7, 0x0d, 0x89, 0x99, + 0x64, 0x42, 0x91, 0x06, 0xd7, 0x92, 0x34, 0xf6, 0xa7, 0x1e, 0x4d, 0xdd, 0x24, 0x8d, 0xbf, 0xa3, + 0x1e, 0x73, 0x43, 0xbf, 0x55, 0x16, 0x9a, 0x57, 0x0b, 0xca, 0xce, 0x99, 0x81, 0xdf, 0xf9, 0xb3, + 0x0c, 0x57, 0xad, 0x22, 0x1d, 0xa7, 0x94, 0x11, 0x9f, 0x30, 0x82, 0x3e, 0x84, 0x66, 0x71, 0xb3, + 0xc2, 0x53, 0xd6, 0x52, 0x76, 0xcb, 0x7b, 0x75, 0xbc, 0x5d, 0xa0, 0xdc, 0x57, 0x86, 0x4e, 0xa1, + 0x9a, 0x31, 0x9a, 0x64, 0xad, 0xd2, 0x6e, 0x79, 0xaf, 0x71, 0xf0, 0xb9, 0xb6, 0xe6, 0xf6, 0xb5, + 0x15, 0x4f, 0x9a, 0xc3, 0x68, 0x82, 0x73, 0x2b, 0xa8, 0x2b, 0x62, 0x0f, 0x52, 0x9a, 0x65, 0x6e, + 0x42, 0x53, 0x8f, 0x46, 0x8c, 0x04, 0x54, 0xc4, 0x5e, 0xc5, 0xa8, 0xa0, 0xec, 0xb7, 0x0c, 0x7a, + 0x00, 0x90, 0x31, 0x92, 0x32, 0x97, 0xe7, 0xb4, 0x55, 0xd9, 0x55, 0xf6, 0x1a, 0x07, 0xed, 0x22, + 0x88, 0x22, 0xe1, 0xda, 0xb0, 0x48, 0x38, 0xae, 0x0b, 0x6d, 0x2e, 0xb7, 0xdf, 0x40, 0x85, 0xbb, + 0x46, 0xbb, 0xd0, 0xf0, 0x69, 0xe6, 0xa5, 0x61, 0xc2, 0xc3, 0x2a, 0x32, 0x3a, 0x07, 0xa1, 0xa7, + 0x50, 0xcb, 0xaf, 0x45, 0x38, 0x68, 0x1e, 0x3c, 0xf8, 0x4f, 0xa7, 0xe4, 0x06, 0xb0, 0x34, 0xd4, + 0x09, 0xa0, 0x96, 0x23, 0xe8, 0x06, 0x20, 0x67, 0xd8, 0x1b, 0x9e, 0x39, 0xee, 0x99, 0xe9, 0xd8, + 0x86, 0x3e, 0x38, 0x1a, 0x18, 0x7d, 0xf5, 0x0a, 0xda, 0x84, 0x4a, 0xdf, 0x32, 0x0d, 0x55, 0x41, + 0xef, 0x41, 0xc3, 0xb4, 0x86, 0xae, 0x33, 0xec, 0xe1, 0xa1, 0xd1, 0x57, 0x4b, 0x1c, 0x18, 0x98, + 0xae, 0x8d, 0xad, 0x63, 0x6c, 0x38, 0x8e, 0x5a, 0x46, 0x00, 0xb5, 0xa3, 0xde, 0xe0, 0xc4, 0xe8, + 0xab, 0x15, 0xb4, 0x0d, 0x75, 0xbd, 0x67, 0xea, 0xc6, 0x09, 0x17, 0xab, 0x9d, 0x9f, 0x14, 0x80, + 0x7e, 0x48, 0x82, 0x28, 0xce, 0x58, 0xe8, 0xa1, 0x36, 0x6c, 0x8e, 0x63, 0x4f, 0x84, 0xd6, 0x52, + 0xc4, 0x49, 0xdf, 0xca, 0xa8, 0x0f, 0x95, 0xd7, 0x61, 0xe4, 0x8b, 0x0c, 0x34, 0x0f, 0x3e, 0x5e, + 0x7b, 0xc8, 0x0b, 0xb3, 0xda, 0x93, 0x30, 0xf2, 0xb1, 0xd8, 0x8d, 0x5a, 0xb0, 0x31, 0xa1, 0x59, + 0x56, 0x5c, 0x5b, 0x1d, 0x17, 0x62, 0xe7, 0x0e, 0x54, 0xb8, 0x1e, 0x6a, 0xc0, 0xc6, 0x37, 0x3d, + 0x6c, 0x0e, 0xcc, 0x63, 0xf5, 0x0a, 0xaa, 0x43, 0xd5, 0xc0, 0xd8, 0xc2, 0xaa, 0xd2, 0x21, 0xb0, + 0xa5, 0x8b, 0x17, 0xef, 0x88, 0x02, 0x43, 0x4d, 0x28, 0x85, 0x7e, 0xab, 0x2a, 0x8c, 0x94, 0x42, + 0x1f, 0xf5, 0xa0, 0x3a, 0x0a, 0xc7, 0xb4, 0xa8, 0xb5, 0x8f, 0xd6, 0x06, 0x98, 0x5b, 0x3b, 0x0a, + 0xc7, 0x14, 0xe7, 0x3b, 0x3b, 0xbf, 0x94, 0x00, 0x2e, 0x50, 0xf4, 0x01, 0xd4, 0x39, 0xee, 0x26, + 0x84, 0xbd, 0x2a, 0xd2, 0xc1, 0x01, 0x9b, 0xb0, 0x57, 0xe8, 0x3e, 0x6c, 0x0b, 0xd2, 0x8b, 0x23, + 0x46, 0x23, 0x96, 0x89, 0xe3, 0x6c, 0xe1, 0x2d, 0x0e, 0xea, 0x12, 0x43, 0x4f, 0xa5, 0x05, 0x36, + 0x4b, 0xa8, 0xac, 0x8e, 0x4f, 0xff, 0x45, 0x5c, 0x1a, 0xff, 0x19, 0xce, 0x12, 0x9a, 0xfb, 0xe5, + 0xab, 0xce, 0x0f, 0x0a, 0x6c, 0x16, 0x30, 0xba, 0x09, 0xd7, 0x8f, 0x06, 0x27, 0x86, 0x3b, 0x7c, + 0x6e, 0x1b, 0x4b, 0x05, 0xb2, 0x03, 0xd7, 0x1c, 0x03, 0x3f, 0x1b, 0xe8, 0x86, 0xab, 0x5b, 0xe6, + 0xd1, 0xe0, 0xd8, 0x7d, 0xde, 0x3b, 0x3d, 0x51, 0x15, 0x74, 0x15, 0xb6, 0x2d, 0xdb, 0x30, 0xdd, + 0x9e, 0x3d, 0x70, 0xbf, 0x76, 0x2c, 0x53, 0x2d, 0x2d, 0x40, 0x42, 0xab, 0x8c, 0x6e, 0xc3, 0x4d, + 0x61, 0xb9, 0x6f, 0x38, 0x3a, 0x1e, 0xd8, 0x43, 0x0b, 0xbb, 0x8e, 0x31, 0xe4, 0x55, 0x35, 0xb4, + 0xd4, 0x0a, 0x6a, 0x02, 0x88, 0xa5, 0xcb, 0x95, 0xd4, 0x5a, 0xe7, 0x2e, 0xd4, 0xf3, 0xb0, 0x31, + 0x1d, 0x21, 0x04, 0x15, 0xd1, 0x7d, 0xf2, 0x94, 0x89, 0x75, 0xc7, 0x82, 0x2d, 0x5d, 0x34, 0x6a, + 0x4c, 0x93, 0x38, 0x65, 0xe8, 0x31, 0x34, 0x17, 0xfa, 0x77, 0xde, 0x40, 0x1a, 0x07, 0xad, 0xf9, + 0xf4, 0xe4, 0x26, 0xe5, 0xbe, 0x6d, 0x6f, 0x4e, 0xca, 0x3a, 0x7f, 0xd5, 0x60, 0x03, 0xc7, 0xe3, + 0x71, 0x3c, 0x65, 0xe8, 0x36, 0x40, 0x9a, 0x2f, 0x79, 0x2b, 0xcb, 0xdd, 0xd6, 0x25, 0x32, 0xf0, + 0xd1, 0x43, 0x68, 0x78, 0x29, 0x25, 0x8c, 0xe6, 0x6d, 0xa0, 0xb4, 0xb6, 0x0d, 0x40, 0xae, 0xce, + 0x01, 0x6e, 0x3b, 0x97, 0x7c, 0xf7, 0xe5, 0x4c, 0xd6, 0x6c, 0x5d, 0x22, 0x87, 0x33, 0x64, 0x2e, + 0x3d, 0xfe, 0xcf, 0xd6, 0x5e, 0xaf, 0x0c, 0xba, 0xf8, 0x5f, 0x7c, 0xf9, 0xe8, 0x0d, 0xb4, 0x58, + 0x4a, 0x46, 0xa3, 0xd0, 0x2b, 0x3a, 0x9c, 0x9b, 0xb1, 0x94, 0x30, 0x1a, 0xcc, 0x44, 0xad, 0x37, + 0x0e, 0x1e, 0xbf, 0xb3, 0x87, 0x61, 0x6e, 0x48, 0xf6, 0x43, 0x47, 0x9a, 0xf9, 0xea, 0x0a, 0xbe, + 0xc1, 0x2e, 0x65, 0xd0, 0x0c, 0x76, 0x7c, 0x3a, 0xa6, 0x8c, 0xba, 0xc5, 0x14, 0x79, 0xeb, 0xfb, + 0x57, 0x45, 0x38, 0x7f, 0xf4, 0xce, 0xce, 0xfb, 0xc2, 0x90, 0x1c, 0x4c, 0x73, 0xbe, 0xaf, 0xfb, + 0x97, 0x11, 0x2b, 0x93, 0x6b, 0x73, 0x65, 0x72, 0xb5, 0x7f, 0x57, 0xe0, 0xc6, 0xe5, 0x47, 0x42, + 0x29, 0x34, 0x2e, 0xe6, 0x41, 0x51, 0x4a, 0xf6, 0xff, 0x4c, 0x94, 0x76, 0x31, 0x48, 0x32, 0x23, + 0x62, 0xe9, 0x0c, 0xcf, 0x3b, 0x69, 0x3f, 0x02, 0x75, 0x59, 0x01, 0xa9, 0x50, 0x7e, 0x4d, 0x67, + 0xb2, 0x02, 0xf9, 0x12, 0xbd, 0x0f, 0xd5, 0x73, 0x32, 0x9e, 0xe6, 0x55, 0xa7, 0xe0, 0x5c, 0xf8, + 0xb2, 0xf4, 0x85, 0xd2, 0xde, 0x81, 0xeb, 0x97, 0xe6, 0xa8, 0xf3, 0xbd, 0x02, 0xdb, 0x0b, 0xc5, + 0x81, 0xee, 0x40, 0x1b, 0x5b, 0x27, 0x27, 0xd6, 0x99, 0x68, 0xf3, 0xab, 0xc3, 0x60, 0xa9, 0xe3, + 0x2b, 0xbc, 0x87, 0x3a, 0x67, 0xba, 0xce, 0x85, 0xd2, 0x62, 0xcb, 0x5f, 0x9c, 0x06, 0x0d, 0xd8, + 0xb0, 0x0d, 0xb3, 0xcf, 0x7b, 0x6d, 0x95, 0x8f, 0x9a, 0x9c, 0x70, 0xb9, 0x33, 0xa3, 0xef, 0x1e, + 0xf6, 0xf4, 0x27, 0x6a, 0xed, 0x10, 0x60, 0xb3, 0x28, 0x83, 0xc3, 0x3f, 0x14, 0xb8, 0xef, 0xc5, + 0x93, 0x75, 0x99, 0x3d, 0x6c, 0xe2, 0xe2, 0xbb, 0xcf, 0xe6, 0xcf, 0xcb, 0x56, 0xbe, 0xb5, 0xe5, + 0x96, 0x20, 0x1e, 0x93, 0x28, 0xd0, 0xe2, 0x34, 0xe8, 0x06, 0x34, 0x12, 0x8f, 0x4f, 0x7e, 0x44, + 0x92, 0x24, 0xcc, 0xfe, 0xf1, 0x43, 0xf2, 0xe1, 0x0a, 0xf8, 0x63, 0xa9, 0x72, 0xdc, 0x73, 0x4e, + 0x7f, 0x2e, 0xdd, 0x3b, 0xce, 0x2d, 0xeb, 0xe3, 0x78, 0xea, 0x6b, 0x32, 0x9b, 0xa7, 0x17, 0xe1, + 0x3c, 0xdb, 0xff, 0xad, 0xd0, 0x79, 0x21, 0x74, 0x5e, 0xac, 0xe8, 0xbc, 0x78, 0xb6, 0xff, 0xb2, + 0x26, 0x62, 0xf9, 0xe4, 0xef, 0x00, 0x00, 0x00, 0xff, 0xff, 0x67, 0x13, 0xc5, 0x22, 0xd3, 0x0a, + 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/api/servicemanagement/v1/servicemanager.pb.go b/vendor/google.golang.org/genproto/googleapis/api/servicemanagement/v1/servicemanager.pb.go new file mode 100644 index 0000000..45729df --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/api/servicemanagement/v1/servicemanager.pb.go @@ -0,0 +1,2015 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/api/servicemanagement/v1/servicemanager.proto + +package servicemanagement // import "google.golang.org/genproto/googleapis/api/servicemanagement/v1" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import any "github.com/golang/protobuf/ptypes/any" +import _ "github.com/golang/protobuf/ptypes/struct" +import _ "google.golang.org/genproto/googleapis/api/annotations" +import serviceconfig "google.golang.org/genproto/googleapis/api/serviceconfig" +import longrunning "google.golang.org/genproto/googleapis/longrunning" +import _ "google.golang.org/genproto/googleapis/rpc/status" +import _ "google.golang.org/genproto/protobuf/field_mask" + +import ( + context "golang.org/x/net/context" + grpc "google.golang.org/grpc" +) + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +type GetServiceConfigRequest_ConfigView int32 + +const ( + // Server response includes all fields except SourceInfo. + GetServiceConfigRequest_BASIC GetServiceConfigRequest_ConfigView = 0 + // Server response includes all fields including SourceInfo. + // SourceFiles are of type 'google.api.servicemanagement.v1.ConfigFile' + // and are only available for configs created using the + // SubmitConfigSource method. + GetServiceConfigRequest_FULL GetServiceConfigRequest_ConfigView = 1 +) + +var GetServiceConfigRequest_ConfigView_name = map[int32]string{ + 0: "BASIC", + 1: "FULL", +} +var GetServiceConfigRequest_ConfigView_value = map[string]int32{ + "BASIC": 0, + "FULL": 1, +} + +func (x GetServiceConfigRequest_ConfigView) String() string { + return proto.EnumName(GetServiceConfigRequest_ConfigView_name, int32(x)) +} +func (GetServiceConfigRequest_ConfigView) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_servicemanager_f197734cdf6c7769, []int{7, 0} +} + +// Request message for `ListServices` method. +type ListServicesRequest struct { + // Include services produced by the specified project. + ProducerProjectId string `protobuf:"bytes,1,opt,name=producer_project_id,json=producerProjectId,proto3" json:"producer_project_id,omitempty"` + // Requested size of the next page of data. + PageSize int32 `protobuf:"varint,5,opt,name=page_size,json=pageSize,proto3" json:"page_size,omitempty"` + // Token identifying which result to start with; returned by a previous list + // call. + PageToken string `protobuf:"bytes,6,opt,name=page_token,json=pageToken,proto3" json:"page_token,omitempty"` + // Include services consumed by the specified consumer. + // + // The Google Service Management implementation accepts the following + // forms: + // - project: + ConsumerId string `protobuf:"bytes,7,opt,name=consumer_id,json=consumerId,proto3" json:"consumer_id,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ListServicesRequest) Reset() { *m = ListServicesRequest{} } +func (m *ListServicesRequest) String() string { return proto.CompactTextString(m) } +func (*ListServicesRequest) ProtoMessage() {} +func (*ListServicesRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_servicemanager_f197734cdf6c7769, []int{0} +} +func (m *ListServicesRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ListServicesRequest.Unmarshal(m, b) +} +func (m *ListServicesRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ListServicesRequest.Marshal(b, m, deterministic) +} +func (dst *ListServicesRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_ListServicesRequest.Merge(dst, src) +} +func (m *ListServicesRequest) XXX_Size() int { + return xxx_messageInfo_ListServicesRequest.Size(m) +} +func (m *ListServicesRequest) XXX_DiscardUnknown() { + xxx_messageInfo_ListServicesRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_ListServicesRequest proto.InternalMessageInfo + +func (m *ListServicesRequest) GetProducerProjectId() string { + if m != nil { + return m.ProducerProjectId + } + return "" +} + +func (m *ListServicesRequest) GetPageSize() int32 { + if m != nil { + return m.PageSize + } + return 0 +} + +func (m *ListServicesRequest) GetPageToken() string { + if m != nil { + return m.PageToken + } + return "" +} + +func (m *ListServicesRequest) GetConsumerId() string { + if m != nil { + return m.ConsumerId + } + return "" +} + +// Response message for `ListServices` method. +type ListServicesResponse struct { + // The returned services will only have the name field set. + Services []*ManagedService `protobuf:"bytes,1,rep,name=services,proto3" json:"services,omitempty"` + // Token that can be passed to `ListServices` to resume a paginated query. + NextPageToken string `protobuf:"bytes,2,opt,name=next_page_token,json=nextPageToken,proto3" json:"next_page_token,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ListServicesResponse) Reset() { *m = ListServicesResponse{} } +func (m *ListServicesResponse) String() string { return proto.CompactTextString(m) } +func (*ListServicesResponse) ProtoMessage() {} +func (*ListServicesResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_servicemanager_f197734cdf6c7769, []int{1} +} +func (m *ListServicesResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ListServicesResponse.Unmarshal(m, b) +} +func (m *ListServicesResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ListServicesResponse.Marshal(b, m, deterministic) +} +func (dst *ListServicesResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_ListServicesResponse.Merge(dst, src) +} +func (m *ListServicesResponse) XXX_Size() int { + return xxx_messageInfo_ListServicesResponse.Size(m) +} +func (m *ListServicesResponse) XXX_DiscardUnknown() { + xxx_messageInfo_ListServicesResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_ListServicesResponse proto.InternalMessageInfo + +func (m *ListServicesResponse) GetServices() []*ManagedService { + if m != nil { + return m.Services + } + return nil +} + +func (m *ListServicesResponse) GetNextPageToken() string { + if m != nil { + return m.NextPageToken + } + return "" +} + +// Request message for `GetService` method. +type GetServiceRequest struct { + // The name of the service. See the `ServiceManager` overview for naming + // requirements. For example: `example.googleapis.com`. + ServiceName string `protobuf:"bytes,1,opt,name=service_name,json=serviceName,proto3" json:"service_name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GetServiceRequest) Reset() { *m = GetServiceRequest{} } +func (m *GetServiceRequest) String() string { return proto.CompactTextString(m) } +func (*GetServiceRequest) ProtoMessage() {} +func (*GetServiceRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_servicemanager_f197734cdf6c7769, []int{2} +} +func (m *GetServiceRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GetServiceRequest.Unmarshal(m, b) +} +func (m *GetServiceRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GetServiceRequest.Marshal(b, m, deterministic) +} +func (dst *GetServiceRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_GetServiceRequest.Merge(dst, src) +} +func (m *GetServiceRequest) XXX_Size() int { + return xxx_messageInfo_GetServiceRequest.Size(m) +} +func (m *GetServiceRequest) XXX_DiscardUnknown() { + xxx_messageInfo_GetServiceRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_GetServiceRequest proto.InternalMessageInfo + +func (m *GetServiceRequest) GetServiceName() string { + if m != nil { + return m.ServiceName + } + return "" +} + +// Request message for CreateService method. +type CreateServiceRequest struct { + // Initial values for the service resource. + Service *ManagedService `protobuf:"bytes,1,opt,name=service,proto3" json:"service,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *CreateServiceRequest) Reset() { *m = CreateServiceRequest{} } +func (m *CreateServiceRequest) String() string { return proto.CompactTextString(m) } +func (*CreateServiceRequest) ProtoMessage() {} +func (*CreateServiceRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_servicemanager_f197734cdf6c7769, []int{3} +} +func (m *CreateServiceRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_CreateServiceRequest.Unmarshal(m, b) +} +func (m *CreateServiceRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_CreateServiceRequest.Marshal(b, m, deterministic) +} +func (dst *CreateServiceRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_CreateServiceRequest.Merge(dst, src) +} +func (m *CreateServiceRequest) XXX_Size() int { + return xxx_messageInfo_CreateServiceRequest.Size(m) +} +func (m *CreateServiceRequest) XXX_DiscardUnknown() { + xxx_messageInfo_CreateServiceRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_CreateServiceRequest proto.InternalMessageInfo + +func (m *CreateServiceRequest) GetService() *ManagedService { + if m != nil { + return m.Service + } + return nil +} + +// Request message for DeleteService method. +type DeleteServiceRequest struct { + // The name of the service. See the [overview](/service-management/overview) + // for naming requirements. For example: `example.googleapis.com`. + ServiceName string `protobuf:"bytes,1,opt,name=service_name,json=serviceName,proto3" json:"service_name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *DeleteServiceRequest) Reset() { *m = DeleteServiceRequest{} } +func (m *DeleteServiceRequest) String() string { return proto.CompactTextString(m) } +func (*DeleteServiceRequest) ProtoMessage() {} +func (*DeleteServiceRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_servicemanager_f197734cdf6c7769, []int{4} +} +func (m *DeleteServiceRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_DeleteServiceRequest.Unmarshal(m, b) +} +func (m *DeleteServiceRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_DeleteServiceRequest.Marshal(b, m, deterministic) +} +func (dst *DeleteServiceRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_DeleteServiceRequest.Merge(dst, src) +} +func (m *DeleteServiceRequest) XXX_Size() int { + return xxx_messageInfo_DeleteServiceRequest.Size(m) +} +func (m *DeleteServiceRequest) XXX_DiscardUnknown() { + xxx_messageInfo_DeleteServiceRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_DeleteServiceRequest proto.InternalMessageInfo + +func (m *DeleteServiceRequest) GetServiceName() string { + if m != nil { + return m.ServiceName + } + return "" +} + +// Request message for UndeleteService method. +type UndeleteServiceRequest struct { + // The name of the service. See the [overview](/service-management/overview) + // for naming requirements. For example: `example.googleapis.com`. + ServiceName string `protobuf:"bytes,1,opt,name=service_name,json=serviceName,proto3" json:"service_name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *UndeleteServiceRequest) Reset() { *m = UndeleteServiceRequest{} } +func (m *UndeleteServiceRequest) String() string { return proto.CompactTextString(m) } +func (*UndeleteServiceRequest) ProtoMessage() {} +func (*UndeleteServiceRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_servicemanager_f197734cdf6c7769, []int{5} +} +func (m *UndeleteServiceRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_UndeleteServiceRequest.Unmarshal(m, b) +} +func (m *UndeleteServiceRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_UndeleteServiceRequest.Marshal(b, m, deterministic) +} +func (dst *UndeleteServiceRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_UndeleteServiceRequest.Merge(dst, src) +} +func (m *UndeleteServiceRequest) XXX_Size() int { + return xxx_messageInfo_UndeleteServiceRequest.Size(m) +} +func (m *UndeleteServiceRequest) XXX_DiscardUnknown() { + xxx_messageInfo_UndeleteServiceRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_UndeleteServiceRequest proto.InternalMessageInfo + +func (m *UndeleteServiceRequest) GetServiceName() string { + if m != nil { + return m.ServiceName + } + return "" +} + +// Response message for UndeleteService method. +type UndeleteServiceResponse struct { + // Revived service resource. + Service *ManagedService `protobuf:"bytes,1,opt,name=service,proto3" json:"service,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *UndeleteServiceResponse) Reset() { *m = UndeleteServiceResponse{} } +func (m *UndeleteServiceResponse) String() string { return proto.CompactTextString(m) } +func (*UndeleteServiceResponse) ProtoMessage() {} +func (*UndeleteServiceResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_servicemanager_f197734cdf6c7769, []int{6} +} +func (m *UndeleteServiceResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_UndeleteServiceResponse.Unmarshal(m, b) +} +func (m *UndeleteServiceResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_UndeleteServiceResponse.Marshal(b, m, deterministic) +} +func (dst *UndeleteServiceResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_UndeleteServiceResponse.Merge(dst, src) +} +func (m *UndeleteServiceResponse) XXX_Size() int { + return xxx_messageInfo_UndeleteServiceResponse.Size(m) +} +func (m *UndeleteServiceResponse) XXX_DiscardUnknown() { + xxx_messageInfo_UndeleteServiceResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_UndeleteServiceResponse proto.InternalMessageInfo + +func (m *UndeleteServiceResponse) GetService() *ManagedService { + if m != nil { + return m.Service + } + return nil +} + +// Request message for GetServiceConfig method. +type GetServiceConfigRequest struct { + // The name of the service. See the [overview](/service-management/overview) + // for naming requirements. For example: `example.googleapis.com`. + ServiceName string `protobuf:"bytes,1,opt,name=service_name,json=serviceName,proto3" json:"service_name,omitempty"` + // The id of the service configuration resource. + ConfigId string `protobuf:"bytes,2,opt,name=config_id,json=configId,proto3" json:"config_id,omitempty"` + // Specifies which parts of the Service Config should be returned in the + // response. + View GetServiceConfigRequest_ConfigView `protobuf:"varint,3,opt,name=view,proto3,enum=google.api.servicemanagement.v1.GetServiceConfigRequest_ConfigView" json:"view,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GetServiceConfigRequest) Reset() { *m = GetServiceConfigRequest{} } +func (m *GetServiceConfigRequest) String() string { return proto.CompactTextString(m) } +func (*GetServiceConfigRequest) ProtoMessage() {} +func (*GetServiceConfigRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_servicemanager_f197734cdf6c7769, []int{7} +} +func (m *GetServiceConfigRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GetServiceConfigRequest.Unmarshal(m, b) +} +func (m *GetServiceConfigRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GetServiceConfigRequest.Marshal(b, m, deterministic) +} +func (dst *GetServiceConfigRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_GetServiceConfigRequest.Merge(dst, src) +} +func (m *GetServiceConfigRequest) XXX_Size() int { + return xxx_messageInfo_GetServiceConfigRequest.Size(m) +} +func (m *GetServiceConfigRequest) XXX_DiscardUnknown() { + xxx_messageInfo_GetServiceConfigRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_GetServiceConfigRequest proto.InternalMessageInfo + +func (m *GetServiceConfigRequest) GetServiceName() string { + if m != nil { + return m.ServiceName + } + return "" +} + +func (m *GetServiceConfigRequest) GetConfigId() string { + if m != nil { + return m.ConfigId + } + return "" +} + +func (m *GetServiceConfigRequest) GetView() GetServiceConfigRequest_ConfigView { + if m != nil { + return m.View + } + return GetServiceConfigRequest_BASIC +} + +// Request message for ListServiceConfigs method. +type ListServiceConfigsRequest struct { + // The name of the service. See the [overview](/service-management/overview) + // for naming requirements. For example: `example.googleapis.com`. + ServiceName string `protobuf:"bytes,1,opt,name=service_name,json=serviceName,proto3" json:"service_name,omitempty"` + // The token of the page to retrieve. + PageToken string `protobuf:"bytes,2,opt,name=page_token,json=pageToken,proto3" json:"page_token,omitempty"` + // The max number of items to include in the response list. + PageSize int32 `protobuf:"varint,3,opt,name=page_size,json=pageSize,proto3" json:"page_size,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ListServiceConfigsRequest) Reset() { *m = ListServiceConfigsRequest{} } +func (m *ListServiceConfigsRequest) String() string { return proto.CompactTextString(m) } +func (*ListServiceConfigsRequest) ProtoMessage() {} +func (*ListServiceConfigsRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_servicemanager_f197734cdf6c7769, []int{8} +} +func (m *ListServiceConfigsRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ListServiceConfigsRequest.Unmarshal(m, b) +} +func (m *ListServiceConfigsRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ListServiceConfigsRequest.Marshal(b, m, deterministic) +} +func (dst *ListServiceConfigsRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_ListServiceConfigsRequest.Merge(dst, src) +} +func (m *ListServiceConfigsRequest) XXX_Size() int { + return xxx_messageInfo_ListServiceConfigsRequest.Size(m) +} +func (m *ListServiceConfigsRequest) XXX_DiscardUnknown() { + xxx_messageInfo_ListServiceConfigsRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_ListServiceConfigsRequest proto.InternalMessageInfo + +func (m *ListServiceConfigsRequest) GetServiceName() string { + if m != nil { + return m.ServiceName + } + return "" +} + +func (m *ListServiceConfigsRequest) GetPageToken() string { + if m != nil { + return m.PageToken + } + return "" +} + +func (m *ListServiceConfigsRequest) GetPageSize() int32 { + if m != nil { + return m.PageSize + } + return 0 +} + +// Response message for ListServiceConfigs method. +type ListServiceConfigsResponse struct { + // The list of service configuration resources. + ServiceConfigs []*serviceconfig.Service `protobuf:"bytes,1,rep,name=service_configs,json=serviceConfigs,proto3" json:"service_configs,omitempty"` + // The token of the next page of results. + NextPageToken string `protobuf:"bytes,2,opt,name=next_page_token,json=nextPageToken,proto3" json:"next_page_token,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ListServiceConfigsResponse) Reset() { *m = ListServiceConfigsResponse{} } +func (m *ListServiceConfigsResponse) String() string { return proto.CompactTextString(m) } +func (*ListServiceConfigsResponse) ProtoMessage() {} +func (*ListServiceConfigsResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_servicemanager_f197734cdf6c7769, []int{9} +} +func (m *ListServiceConfigsResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ListServiceConfigsResponse.Unmarshal(m, b) +} +func (m *ListServiceConfigsResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ListServiceConfigsResponse.Marshal(b, m, deterministic) +} +func (dst *ListServiceConfigsResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_ListServiceConfigsResponse.Merge(dst, src) +} +func (m *ListServiceConfigsResponse) XXX_Size() int { + return xxx_messageInfo_ListServiceConfigsResponse.Size(m) +} +func (m *ListServiceConfigsResponse) XXX_DiscardUnknown() { + xxx_messageInfo_ListServiceConfigsResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_ListServiceConfigsResponse proto.InternalMessageInfo + +func (m *ListServiceConfigsResponse) GetServiceConfigs() []*serviceconfig.Service { + if m != nil { + return m.ServiceConfigs + } + return nil +} + +func (m *ListServiceConfigsResponse) GetNextPageToken() string { + if m != nil { + return m.NextPageToken + } + return "" +} + +// Request message for CreateServiceConfig method. +type CreateServiceConfigRequest struct { + // The name of the service. See the [overview](/service-management/overview) + // for naming requirements. For example: `example.googleapis.com`. + ServiceName string `protobuf:"bytes,1,opt,name=service_name,json=serviceName,proto3" json:"service_name,omitempty"` + // The service configuration resource. + ServiceConfig *serviceconfig.Service `protobuf:"bytes,2,opt,name=service_config,json=serviceConfig,proto3" json:"service_config,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *CreateServiceConfigRequest) Reset() { *m = CreateServiceConfigRequest{} } +func (m *CreateServiceConfigRequest) String() string { return proto.CompactTextString(m) } +func (*CreateServiceConfigRequest) ProtoMessage() {} +func (*CreateServiceConfigRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_servicemanager_f197734cdf6c7769, []int{10} +} +func (m *CreateServiceConfigRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_CreateServiceConfigRequest.Unmarshal(m, b) +} +func (m *CreateServiceConfigRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_CreateServiceConfigRequest.Marshal(b, m, deterministic) +} +func (dst *CreateServiceConfigRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_CreateServiceConfigRequest.Merge(dst, src) +} +func (m *CreateServiceConfigRequest) XXX_Size() int { + return xxx_messageInfo_CreateServiceConfigRequest.Size(m) +} +func (m *CreateServiceConfigRequest) XXX_DiscardUnknown() { + xxx_messageInfo_CreateServiceConfigRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_CreateServiceConfigRequest proto.InternalMessageInfo + +func (m *CreateServiceConfigRequest) GetServiceName() string { + if m != nil { + return m.ServiceName + } + return "" +} + +func (m *CreateServiceConfigRequest) GetServiceConfig() *serviceconfig.Service { + if m != nil { + return m.ServiceConfig + } + return nil +} + +// Request message for SubmitConfigSource method. +type SubmitConfigSourceRequest struct { + // The name of the service. See the [overview](/service-management/overview) + // for naming requirements. For example: `example.googleapis.com`. + ServiceName string `protobuf:"bytes,1,opt,name=service_name,json=serviceName,proto3" json:"service_name,omitempty"` + // The source configuration for the service. + ConfigSource *ConfigSource `protobuf:"bytes,2,opt,name=config_source,json=configSource,proto3" json:"config_source,omitempty"` + // Optional. If set, this will result in the generation of a + // `google.api.Service` configuration based on the `ConfigSource` provided, + // but the generated config and the sources will NOT be persisted. + ValidateOnly bool `protobuf:"varint,3,opt,name=validate_only,json=validateOnly,proto3" json:"validate_only,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *SubmitConfigSourceRequest) Reset() { *m = SubmitConfigSourceRequest{} } +func (m *SubmitConfigSourceRequest) String() string { return proto.CompactTextString(m) } +func (*SubmitConfigSourceRequest) ProtoMessage() {} +func (*SubmitConfigSourceRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_servicemanager_f197734cdf6c7769, []int{11} +} +func (m *SubmitConfigSourceRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_SubmitConfigSourceRequest.Unmarshal(m, b) +} +func (m *SubmitConfigSourceRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_SubmitConfigSourceRequest.Marshal(b, m, deterministic) +} +func (dst *SubmitConfigSourceRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_SubmitConfigSourceRequest.Merge(dst, src) +} +func (m *SubmitConfigSourceRequest) XXX_Size() int { + return xxx_messageInfo_SubmitConfigSourceRequest.Size(m) +} +func (m *SubmitConfigSourceRequest) XXX_DiscardUnknown() { + xxx_messageInfo_SubmitConfigSourceRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_SubmitConfigSourceRequest proto.InternalMessageInfo + +func (m *SubmitConfigSourceRequest) GetServiceName() string { + if m != nil { + return m.ServiceName + } + return "" +} + +func (m *SubmitConfigSourceRequest) GetConfigSource() *ConfigSource { + if m != nil { + return m.ConfigSource + } + return nil +} + +func (m *SubmitConfigSourceRequest) GetValidateOnly() bool { + if m != nil { + return m.ValidateOnly + } + return false +} + +// Response message for SubmitConfigSource method. +type SubmitConfigSourceResponse struct { + // The generated service configuration. + ServiceConfig *serviceconfig.Service `protobuf:"bytes,1,opt,name=service_config,json=serviceConfig,proto3" json:"service_config,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *SubmitConfigSourceResponse) Reset() { *m = SubmitConfigSourceResponse{} } +func (m *SubmitConfigSourceResponse) String() string { return proto.CompactTextString(m) } +func (*SubmitConfigSourceResponse) ProtoMessage() {} +func (*SubmitConfigSourceResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_servicemanager_f197734cdf6c7769, []int{12} +} +func (m *SubmitConfigSourceResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_SubmitConfigSourceResponse.Unmarshal(m, b) +} +func (m *SubmitConfigSourceResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_SubmitConfigSourceResponse.Marshal(b, m, deterministic) +} +func (dst *SubmitConfigSourceResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_SubmitConfigSourceResponse.Merge(dst, src) +} +func (m *SubmitConfigSourceResponse) XXX_Size() int { + return xxx_messageInfo_SubmitConfigSourceResponse.Size(m) +} +func (m *SubmitConfigSourceResponse) XXX_DiscardUnknown() { + xxx_messageInfo_SubmitConfigSourceResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_SubmitConfigSourceResponse proto.InternalMessageInfo + +func (m *SubmitConfigSourceResponse) GetServiceConfig() *serviceconfig.Service { + if m != nil { + return m.ServiceConfig + } + return nil +} + +// Request message for 'CreateServiceRollout' +type CreateServiceRolloutRequest struct { + // The name of the service. See the [overview](/service-management/overview) + // for naming requirements. For example: `example.googleapis.com`. + ServiceName string `protobuf:"bytes,1,opt,name=service_name,json=serviceName,proto3" json:"service_name,omitempty"` + // The rollout resource. The `service_name` field is output only. + Rollout *Rollout `protobuf:"bytes,2,opt,name=rollout,proto3" json:"rollout,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *CreateServiceRolloutRequest) Reset() { *m = CreateServiceRolloutRequest{} } +func (m *CreateServiceRolloutRequest) String() string { return proto.CompactTextString(m) } +func (*CreateServiceRolloutRequest) ProtoMessage() {} +func (*CreateServiceRolloutRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_servicemanager_f197734cdf6c7769, []int{13} +} +func (m *CreateServiceRolloutRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_CreateServiceRolloutRequest.Unmarshal(m, b) +} +func (m *CreateServiceRolloutRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_CreateServiceRolloutRequest.Marshal(b, m, deterministic) +} +func (dst *CreateServiceRolloutRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_CreateServiceRolloutRequest.Merge(dst, src) +} +func (m *CreateServiceRolloutRequest) XXX_Size() int { + return xxx_messageInfo_CreateServiceRolloutRequest.Size(m) +} +func (m *CreateServiceRolloutRequest) XXX_DiscardUnknown() { + xxx_messageInfo_CreateServiceRolloutRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_CreateServiceRolloutRequest proto.InternalMessageInfo + +func (m *CreateServiceRolloutRequest) GetServiceName() string { + if m != nil { + return m.ServiceName + } + return "" +} + +func (m *CreateServiceRolloutRequest) GetRollout() *Rollout { + if m != nil { + return m.Rollout + } + return nil +} + +// Request message for 'ListServiceRollouts' +type ListServiceRolloutsRequest struct { + // The name of the service. See the [overview](/service-management/overview) + // for naming requirements. For example: `example.googleapis.com`. + ServiceName string `protobuf:"bytes,1,opt,name=service_name,json=serviceName,proto3" json:"service_name,omitempty"` + // The token of the page to retrieve. + PageToken string `protobuf:"bytes,2,opt,name=page_token,json=pageToken,proto3" json:"page_token,omitempty"` + // The max number of items to include in the response list. + PageSize int32 `protobuf:"varint,3,opt,name=page_size,json=pageSize,proto3" json:"page_size,omitempty"` + // Use `filter` to return subset of rollouts. + // The following filters are supported: + // -- To limit the results to only those in + // [status](google.api.servicemanagement.v1.RolloutStatus) 'SUCCESS', + // use filter='status=SUCCESS' + // -- To limit the results to those in + // [status](google.api.servicemanagement.v1.RolloutStatus) 'CANCELLED' + // or 'FAILED', use filter='status=CANCELLED OR status=FAILED' + Filter string `protobuf:"bytes,4,opt,name=filter,proto3" json:"filter,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ListServiceRolloutsRequest) Reset() { *m = ListServiceRolloutsRequest{} } +func (m *ListServiceRolloutsRequest) String() string { return proto.CompactTextString(m) } +func (*ListServiceRolloutsRequest) ProtoMessage() {} +func (*ListServiceRolloutsRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_servicemanager_f197734cdf6c7769, []int{14} +} +func (m *ListServiceRolloutsRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ListServiceRolloutsRequest.Unmarshal(m, b) +} +func (m *ListServiceRolloutsRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ListServiceRolloutsRequest.Marshal(b, m, deterministic) +} +func (dst *ListServiceRolloutsRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_ListServiceRolloutsRequest.Merge(dst, src) +} +func (m *ListServiceRolloutsRequest) XXX_Size() int { + return xxx_messageInfo_ListServiceRolloutsRequest.Size(m) +} +func (m *ListServiceRolloutsRequest) XXX_DiscardUnknown() { + xxx_messageInfo_ListServiceRolloutsRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_ListServiceRolloutsRequest proto.InternalMessageInfo + +func (m *ListServiceRolloutsRequest) GetServiceName() string { + if m != nil { + return m.ServiceName + } + return "" +} + +func (m *ListServiceRolloutsRequest) GetPageToken() string { + if m != nil { + return m.PageToken + } + return "" +} + +func (m *ListServiceRolloutsRequest) GetPageSize() int32 { + if m != nil { + return m.PageSize + } + return 0 +} + +func (m *ListServiceRolloutsRequest) GetFilter() string { + if m != nil { + return m.Filter + } + return "" +} + +// Response message for ListServiceRollouts method. +type ListServiceRolloutsResponse struct { + // The list of rollout resources. + Rollouts []*Rollout `protobuf:"bytes,1,rep,name=rollouts,proto3" json:"rollouts,omitempty"` + // The token of the next page of results. + NextPageToken string `protobuf:"bytes,2,opt,name=next_page_token,json=nextPageToken,proto3" json:"next_page_token,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ListServiceRolloutsResponse) Reset() { *m = ListServiceRolloutsResponse{} } +func (m *ListServiceRolloutsResponse) String() string { return proto.CompactTextString(m) } +func (*ListServiceRolloutsResponse) ProtoMessage() {} +func (*ListServiceRolloutsResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_servicemanager_f197734cdf6c7769, []int{15} +} +func (m *ListServiceRolloutsResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ListServiceRolloutsResponse.Unmarshal(m, b) +} +func (m *ListServiceRolloutsResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ListServiceRolloutsResponse.Marshal(b, m, deterministic) +} +func (dst *ListServiceRolloutsResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_ListServiceRolloutsResponse.Merge(dst, src) +} +func (m *ListServiceRolloutsResponse) XXX_Size() int { + return xxx_messageInfo_ListServiceRolloutsResponse.Size(m) +} +func (m *ListServiceRolloutsResponse) XXX_DiscardUnknown() { + xxx_messageInfo_ListServiceRolloutsResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_ListServiceRolloutsResponse proto.InternalMessageInfo + +func (m *ListServiceRolloutsResponse) GetRollouts() []*Rollout { + if m != nil { + return m.Rollouts + } + return nil +} + +func (m *ListServiceRolloutsResponse) GetNextPageToken() string { + if m != nil { + return m.NextPageToken + } + return "" +} + +// Request message for GetServiceRollout method. +type GetServiceRolloutRequest struct { + // The name of the service. See the [overview](/service-management/overview) + // for naming requirements. For example: `example.googleapis.com`. + ServiceName string `protobuf:"bytes,1,opt,name=service_name,json=serviceName,proto3" json:"service_name,omitempty"` + // The id of the rollout resource. + RolloutId string `protobuf:"bytes,2,opt,name=rollout_id,json=rolloutId,proto3" json:"rollout_id,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GetServiceRolloutRequest) Reset() { *m = GetServiceRolloutRequest{} } +func (m *GetServiceRolloutRequest) String() string { return proto.CompactTextString(m) } +func (*GetServiceRolloutRequest) ProtoMessage() {} +func (*GetServiceRolloutRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_servicemanager_f197734cdf6c7769, []int{16} +} +func (m *GetServiceRolloutRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GetServiceRolloutRequest.Unmarshal(m, b) +} +func (m *GetServiceRolloutRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GetServiceRolloutRequest.Marshal(b, m, deterministic) +} +func (dst *GetServiceRolloutRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_GetServiceRolloutRequest.Merge(dst, src) +} +func (m *GetServiceRolloutRequest) XXX_Size() int { + return xxx_messageInfo_GetServiceRolloutRequest.Size(m) +} +func (m *GetServiceRolloutRequest) XXX_DiscardUnknown() { + xxx_messageInfo_GetServiceRolloutRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_GetServiceRolloutRequest proto.InternalMessageInfo + +func (m *GetServiceRolloutRequest) GetServiceName() string { + if m != nil { + return m.ServiceName + } + return "" +} + +func (m *GetServiceRolloutRequest) GetRolloutId() string { + if m != nil { + return m.RolloutId + } + return "" +} + +// Request message for EnableService method. +type EnableServiceRequest struct { + // Name of the service to enable. Specifying an unknown service name will + // cause the request to fail. + ServiceName string `protobuf:"bytes,1,opt,name=service_name,json=serviceName,proto3" json:"service_name,omitempty"` + // The identity of consumer resource which service enablement will be + // applied to. + // + // The Google Service Management implementation accepts the following + // forms: + // - "project:" + // + // Note: this is made compatible with + // google.api.servicecontrol.v1.Operation.consumer_id. + ConsumerId string `protobuf:"bytes,2,opt,name=consumer_id,json=consumerId,proto3" json:"consumer_id,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *EnableServiceRequest) Reset() { *m = EnableServiceRequest{} } +func (m *EnableServiceRequest) String() string { return proto.CompactTextString(m) } +func (*EnableServiceRequest) ProtoMessage() {} +func (*EnableServiceRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_servicemanager_f197734cdf6c7769, []int{17} +} +func (m *EnableServiceRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_EnableServiceRequest.Unmarshal(m, b) +} +func (m *EnableServiceRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_EnableServiceRequest.Marshal(b, m, deterministic) +} +func (dst *EnableServiceRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_EnableServiceRequest.Merge(dst, src) +} +func (m *EnableServiceRequest) XXX_Size() int { + return xxx_messageInfo_EnableServiceRequest.Size(m) +} +func (m *EnableServiceRequest) XXX_DiscardUnknown() { + xxx_messageInfo_EnableServiceRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_EnableServiceRequest proto.InternalMessageInfo + +func (m *EnableServiceRequest) GetServiceName() string { + if m != nil { + return m.ServiceName + } + return "" +} + +func (m *EnableServiceRequest) GetConsumerId() string { + if m != nil { + return m.ConsumerId + } + return "" +} + +// Request message for DisableService method. +type DisableServiceRequest struct { + // Name of the service to disable. Specifying an unknown service name + // will cause the request to fail. + ServiceName string `protobuf:"bytes,1,opt,name=service_name,json=serviceName,proto3" json:"service_name,omitempty"` + // The identity of consumer resource which service disablement will be + // applied to. + // + // The Google Service Management implementation accepts the following + // forms: + // - "project:" + // + // Note: this is made compatible with + // google.api.servicecontrol.v1.Operation.consumer_id. + ConsumerId string `protobuf:"bytes,2,opt,name=consumer_id,json=consumerId,proto3" json:"consumer_id,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *DisableServiceRequest) Reset() { *m = DisableServiceRequest{} } +func (m *DisableServiceRequest) String() string { return proto.CompactTextString(m) } +func (*DisableServiceRequest) ProtoMessage() {} +func (*DisableServiceRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_servicemanager_f197734cdf6c7769, []int{18} +} +func (m *DisableServiceRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_DisableServiceRequest.Unmarshal(m, b) +} +func (m *DisableServiceRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_DisableServiceRequest.Marshal(b, m, deterministic) +} +func (dst *DisableServiceRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_DisableServiceRequest.Merge(dst, src) +} +func (m *DisableServiceRequest) XXX_Size() int { + return xxx_messageInfo_DisableServiceRequest.Size(m) +} +func (m *DisableServiceRequest) XXX_DiscardUnknown() { + xxx_messageInfo_DisableServiceRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_DisableServiceRequest proto.InternalMessageInfo + +func (m *DisableServiceRequest) GetServiceName() string { + if m != nil { + return m.ServiceName + } + return "" +} + +func (m *DisableServiceRequest) GetConsumerId() string { + if m != nil { + return m.ConsumerId + } + return "" +} + +// Request message for GenerateConfigReport method. +type GenerateConfigReportRequest struct { + // Service configuration for which we want to generate the report. + // For this version of API, the supported types are + // [google.api.servicemanagement.v1.ConfigRef][google.api.servicemanagement.v1.ConfigRef], + // [google.api.servicemanagement.v1.ConfigSource][google.api.servicemanagement.v1.ConfigSource], + // and [google.api.Service][google.api.Service] + NewConfig *any.Any `protobuf:"bytes,1,opt,name=new_config,json=newConfig,proto3" json:"new_config,omitempty"` + // Service configuration against which the comparison will be done. + // For this version of API, the supported types are + // [google.api.servicemanagement.v1.ConfigRef][google.api.servicemanagement.v1.ConfigRef], + // [google.api.servicemanagement.v1.ConfigSource][google.api.servicemanagement.v1.ConfigSource], + // and [google.api.Service][google.api.Service] + OldConfig *any.Any `protobuf:"bytes,2,opt,name=old_config,json=oldConfig,proto3" json:"old_config,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GenerateConfigReportRequest) Reset() { *m = GenerateConfigReportRequest{} } +func (m *GenerateConfigReportRequest) String() string { return proto.CompactTextString(m) } +func (*GenerateConfigReportRequest) ProtoMessage() {} +func (*GenerateConfigReportRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_servicemanager_f197734cdf6c7769, []int{19} +} +func (m *GenerateConfigReportRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GenerateConfigReportRequest.Unmarshal(m, b) +} +func (m *GenerateConfigReportRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GenerateConfigReportRequest.Marshal(b, m, deterministic) +} +func (dst *GenerateConfigReportRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_GenerateConfigReportRequest.Merge(dst, src) +} +func (m *GenerateConfigReportRequest) XXX_Size() int { + return xxx_messageInfo_GenerateConfigReportRequest.Size(m) +} +func (m *GenerateConfigReportRequest) XXX_DiscardUnknown() { + xxx_messageInfo_GenerateConfigReportRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_GenerateConfigReportRequest proto.InternalMessageInfo + +func (m *GenerateConfigReportRequest) GetNewConfig() *any.Any { + if m != nil { + return m.NewConfig + } + return nil +} + +func (m *GenerateConfigReportRequest) GetOldConfig() *any.Any { + if m != nil { + return m.OldConfig + } + return nil +} + +// Response message for GenerateConfigReport method. +type GenerateConfigReportResponse struct { + // Name of the service this report belongs to. + ServiceName string `protobuf:"bytes,1,opt,name=service_name,json=serviceName,proto3" json:"service_name,omitempty"` + // ID of the service configuration this report belongs to. + Id string `protobuf:"bytes,2,opt,name=id,proto3" json:"id,omitempty"` + // list of ChangeReport, each corresponding to comparison between two + // service configurations. + ChangeReports []*ChangeReport `protobuf:"bytes,3,rep,name=change_reports,json=changeReports,proto3" json:"change_reports,omitempty"` + // Errors / Linter warnings associated with the service definition this + // report + // belongs to. + Diagnostics []*Diagnostic `protobuf:"bytes,4,rep,name=diagnostics,proto3" json:"diagnostics,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GenerateConfigReportResponse) Reset() { *m = GenerateConfigReportResponse{} } +func (m *GenerateConfigReportResponse) String() string { return proto.CompactTextString(m) } +func (*GenerateConfigReportResponse) ProtoMessage() {} +func (*GenerateConfigReportResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_servicemanager_f197734cdf6c7769, []int{20} +} +func (m *GenerateConfigReportResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GenerateConfigReportResponse.Unmarshal(m, b) +} +func (m *GenerateConfigReportResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GenerateConfigReportResponse.Marshal(b, m, deterministic) +} +func (dst *GenerateConfigReportResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_GenerateConfigReportResponse.Merge(dst, src) +} +func (m *GenerateConfigReportResponse) XXX_Size() int { + return xxx_messageInfo_GenerateConfigReportResponse.Size(m) +} +func (m *GenerateConfigReportResponse) XXX_DiscardUnknown() { + xxx_messageInfo_GenerateConfigReportResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_GenerateConfigReportResponse proto.InternalMessageInfo + +func (m *GenerateConfigReportResponse) GetServiceName() string { + if m != nil { + return m.ServiceName + } + return "" +} + +func (m *GenerateConfigReportResponse) GetId() string { + if m != nil { + return m.Id + } + return "" +} + +func (m *GenerateConfigReportResponse) GetChangeReports() []*ChangeReport { + if m != nil { + return m.ChangeReports + } + return nil +} + +func (m *GenerateConfigReportResponse) GetDiagnostics() []*Diagnostic { + if m != nil { + return m.Diagnostics + } + return nil +} + +func init() { + proto.RegisterType((*ListServicesRequest)(nil), "google.api.servicemanagement.v1.ListServicesRequest") + proto.RegisterType((*ListServicesResponse)(nil), "google.api.servicemanagement.v1.ListServicesResponse") + proto.RegisterType((*GetServiceRequest)(nil), "google.api.servicemanagement.v1.GetServiceRequest") + proto.RegisterType((*CreateServiceRequest)(nil), "google.api.servicemanagement.v1.CreateServiceRequest") + proto.RegisterType((*DeleteServiceRequest)(nil), "google.api.servicemanagement.v1.DeleteServiceRequest") + proto.RegisterType((*UndeleteServiceRequest)(nil), "google.api.servicemanagement.v1.UndeleteServiceRequest") + proto.RegisterType((*UndeleteServiceResponse)(nil), "google.api.servicemanagement.v1.UndeleteServiceResponse") + proto.RegisterType((*GetServiceConfigRequest)(nil), "google.api.servicemanagement.v1.GetServiceConfigRequest") + proto.RegisterType((*ListServiceConfigsRequest)(nil), "google.api.servicemanagement.v1.ListServiceConfigsRequest") + proto.RegisterType((*ListServiceConfigsResponse)(nil), "google.api.servicemanagement.v1.ListServiceConfigsResponse") + proto.RegisterType((*CreateServiceConfigRequest)(nil), "google.api.servicemanagement.v1.CreateServiceConfigRequest") + proto.RegisterType((*SubmitConfigSourceRequest)(nil), "google.api.servicemanagement.v1.SubmitConfigSourceRequest") + proto.RegisterType((*SubmitConfigSourceResponse)(nil), "google.api.servicemanagement.v1.SubmitConfigSourceResponse") + proto.RegisterType((*CreateServiceRolloutRequest)(nil), "google.api.servicemanagement.v1.CreateServiceRolloutRequest") + proto.RegisterType((*ListServiceRolloutsRequest)(nil), "google.api.servicemanagement.v1.ListServiceRolloutsRequest") + proto.RegisterType((*ListServiceRolloutsResponse)(nil), "google.api.servicemanagement.v1.ListServiceRolloutsResponse") + proto.RegisterType((*GetServiceRolloutRequest)(nil), "google.api.servicemanagement.v1.GetServiceRolloutRequest") + proto.RegisterType((*EnableServiceRequest)(nil), "google.api.servicemanagement.v1.EnableServiceRequest") + proto.RegisterType((*DisableServiceRequest)(nil), "google.api.servicemanagement.v1.DisableServiceRequest") + proto.RegisterType((*GenerateConfigReportRequest)(nil), "google.api.servicemanagement.v1.GenerateConfigReportRequest") + proto.RegisterType((*GenerateConfigReportResponse)(nil), "google.api.servicemanagement.v1.GenerateConfigReportResponse") + proto.RegisterEnum("google.api.servicemanagement.v1.GetServiceConfigRequest_ConfigView", GetServiceConfigRequest_ConfigView_name, GetServiceConfigRequest_ConfigView_value) +} + +// Reference imports to suppress errors if they are not otherwise used. +var _ context.Context +var _ grpc.ClientConn + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the grpc package it is being compiled against. +const _ = grpc.SupportPackageIsVersion4 + +// ServiceManagerClient is the client API for ServiceManager service. +// +// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://godoc.org/google.golang.org/grpc#ClientConn.NewStream. +type ServiceManagerClient interface { + // Lists managed services. + // + // Returns all public services. For authenticated users, also returns all + // services the calling user has "servicemanagement.services.get" permission + // for. + // + // **BETA:** If the caller specifies the `consumer_id`, it returns only the + // services enabled on the consumer. The `consumer_id` must have the format + // of "project:{PROJECT-ID}". + ListServices(ctx context.Context, in *ListServicesRequest, opts ...grpc.CallOption) (*ListServicesResponse, error) + // Gets a managed service. Authentication is required unless the service is + // public. + GetService(ctx context.Context, in *GetServiceRequest, opts ...grpc.CallOption) (*ManagedService, error) + // Creates a new managed service. + // Please note one producer project can own no more than 20 services. + // + // Operation + CreateService(ctx context.Context, in *CreateServiceRequest, opts ...grpc.CallOption) (*longrunning.Operation, error) + // Deletes a managed service. This method will change the service to the + // `Soft-Delete` state for 30 days. Within this period, service producers may + // call + // [UndeleteService][google.api.servicemanagement.v1.ServiceManager.UndeleteService] + // to restore the service. After 30 days, the service will be permanently + // deleted. + // + // Operation + DeleteService(ctx context.Context, in *DeleteServiceRequest, opts ...grpc.CallOption) (*longrunning.Operation, error) + // Revives a previously deleted managed service. The method restores the + // service using the configuration at the time the service was deleted. + // The target service must exist and must have been deleted within the + // last 30 days. + // + // Operation + UndeleteService(ctx context.Context, in *UndeleteServiceRequest, opts ...grpc.CallOption) (*longrunning.Operation, error) + // Lists the history of the service configuration for a managed service, + // from the newest to the oldest. + ListServiceConfigs(ctx context.Context, in *ListServiceConfigsRequest, opts ...grpc.CallOption) (*ListServiceConfigsResponse, error) + // Gets a service configuration (version) for a managed service. + GetServiceConfig(ctx context.Context, in *GetServiceConfigRequest, opts ...grpc.CallOption) (*serviceconfig.Service, error) + // Creates a new service configuration (version) for a managed service. + // This method only stores the service configuration. To roll out the service + // configuration to backend systems please call + // [CreateServiceRollout][google.api.servicemanagement.v1.ServiceManager.CreateServiceRollout]. + // + // Only the 100 most recent service configurations and ones referenced by + // existing rollouts are kept for each service. The rest will be deleted + // eventually. + CreateServiceConfig(ctx context.Context, in *CreateServiceConfigRequest, opts ...grpc.CallOption) (*serviceconfig.Service, error) + // Creates a new service configuration (version) for a managed service based + // on + // user-supplied configuration source files (for example: OpenAPI + // Specification). This method stores the source configurations as well as the + // generated service configuration. To rollout the service configuration to + // other services, + // please call + // [CreateServiceRollout][google.api.servicemanagement.v1.ServiceManager.CreateServiceRollout]. + // + // Only the 100 most recent configuration sources and ones referenced by + // existing service configurtions are kept for each service. The rest will be + // deleted eventually. + // + // Operation + SubmitConfigSource(ctx context.Context, in *SubmitConfigSourceRequest, opts ...grpc.CallOption) (*longrunning.Operation, error) + // Lists the history of the service configuration rollouts for a managed + // service, from the newest to the oldest. + ListServiceRollouts(ctx context.Context, in *ListServiceRolloutsRequest, opts ...grpc.CallOption) (*ListServiceRolloutsResponse, error) + // Gets a service configuration + // [rollout][google.api.servicemanagement.v1.Rollout]. + GetServiceRollout(ctx context.Context, in *GetServiceRolloutRequest, opts ...grpc.CallOption) (*Rollout, error) + // Creates a new service configuration rollout. Based on rollout, the + // Google Service Management will roll out the service configurations to + // different backend services. For example, the logging configuration will be + // pushed to Google Cloud Logging. + // + // Please note that any previous pending and running Rollouts and associated + // Operations will be automatically cancelled so that the latest Rollout will + // not be blocked by previous Rollouts. + // + // Only the 100 most recent (in any state) and the last 10 successful (if not + // already part of the set of 100 most recent) rollouts are kept for each + // service. The rest will be deleted eventually. + // + // Operation + CreateServiceRollout(ctx context.Context, in *CreateServiceRolloutRequest, opts ...grpc.CallOption) (*longrunning.Operation, error) + // Generates and returns a report (errors, warnings and changes from + // existing configurations) associated with + // GenerateConfigReportRequest.new_value + // + // If GenerateConfigReportRequest.old_value is specified, + // GenerateConfigReportRequest will contain a single ChangeReport based on the + // comparison between GenerateConfigReportRequest.new_value and + // GenerateConfigReportRequest.old_value. + // If GenerateConfigReportRequest.old_value is not specified, this method + // will compare GenerateConfigReportRequest.new_value with the last pushed + // service configuration. + GenerateConfigReport(ctx context.Context, in *GenerateConfigReportRequest, opts ...grpc.CallOption) (*GenerateConfigReportResponse, error) + // Enables a [service][google.api.servicemanagement.v1.ManagedService] for a + // project, so it can be used for the project. See [Cloud Auth + // Guide](https://cloud.google.com/docs/authentication) for more information. + // + // Operation + EnableService(ctx context.Context, in *EnableServiceRequest, opts ...grpc.CallOption) (*longrunning.Operation, error) + // Disables a [service][google.api.servicemanagement.v1.ManagedService] for a + // project, so it can no longer be be used for the project. It prevents + // accidental usage that may cause unexpected billing charges or security + // leaks. + // + // Operation + DisableService(ctx context.Context, in *DisableServiceRequest, opts ...grpc.CallOption) (*longrunning.Operation, error) +} + +type serviceManagerClient struct { + cc *grpc.ClientConn +} + +func NewServiceManagerClient(cc *grpc.ClientConn) ServiceManagerClient { + return &serviceManagerClient{cc} +} + +func (c *serviceManagerClient) ListServices(ctx context.Context, in *ListServicesRequest, opts ...grpc.CallOption) (*ListServicesResponse, error) { + out := new(ListServicesResponse) + err := c.cc.Invoke(ctx, "/google.api.servicemanagement.v1.ServiceManager/ListServices", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *serviceManagerClient) GetService(ctx context.Context, in *GetServiceRequest, opts ...grpc.CallOption) (*ManagedService, error) { + out := new(ManagedService) + err := c.cc.Invoke(ctx, "/google.api.servicemanagement.v1.ServiceManager/GetService", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *serviceManagerClient) CreateService(ctx context.Context, in *CreateServiceRequest, opts ...grpc.CallOption) (*longrunning.Operation, error) { + out := new(longrunning.Operation) + err := c.cc.Invoke(ctx, "/google.api.servicemanagement.v1.ServiceManager/CreateService", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *serviceManagerClient) DeleteService(ctx context.Context, in *DeleteServiceRequest, opts ...grpc.CallOption) (*longrunning.Operation, error) { + out := new(longrunning.Operation) + err := c.cc.Invoke(ctx, "/google.api.servicemanagement.v1.ServiceManager/DeleteService", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *serviceManagerClient) UndeleteService(ctx context.Context, in *UndeleteServiceRequest, opts ...grpc.CallOption) (*longrunning.Operation, error) { + out := new(longrunning.Operation) + err := c.cc.Invoke(ctx, "/google.api.servicemanagement.v1.ServiceManager/UndeleteService", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *serviceManagerClient) ListServiceConfigs(ctx context.Context, in *ListServiceConfigsRequest, opts ...grpc.CallOption) (*ListServiceConfigsResponse, error) { + out := new(ListServiceConfigsResponse) + err := c.cc.Invoke(ctx, "/google.api.servicemanagement.v1.ServiceManager/ListServiceConfigs", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *serviceManagerClient) GetServiceConfig(ctx context.Context, in *GetServiceConfigRequest, opts ...grpc.CallOption) (*serviceconfig.Service, error) { + out := new(serviceconfig.Service) + err := c.cc.Invoke(ctx, "/google.api.servicemanagement.v1.ServiceManager/GetServiceConfig", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *serviceManagerClient) CreateServiceConfig(ctx context.Context, in *CreateServiceConfigRequest, opts ...grpc.CallOption) (*serviceconfig.Service, error) { + out := new(serviceconfig.Service) + err := c.cc.Invoke(ctx, "/google.api.servicemanagement.v1.ServiceManager/CreateServiceConfig", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *serviceManagerClient) SubmitConfigSource(ctx context.Context, in *SubmitConfigSourceRequest, opts ...grpc.CallOption) (*longrunning.Operation, error) { + out := new(longrunning.Operation) + err := c.cc.Invoke(ctx, "/google.api.servicemanagement.v1.ServiceManager/SubmitConfigSource", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *serviceManagerClient) ListServiceRollouts(ctx context.Context, in *ListServiceRolloutsRequest, opts ...grpc.CallOption) (*ListServiceRolloutsResponse, error) { + out := new(ListServiceRolloutsResponse) + err := c.cc.Invoke(ctx, "/google.api.servicemanagement.v1.ServiceManager/ListServiceRollouts", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *serviceManagerClient) GetServiceRollout(ctx context.Context, in *GetServiceRolloutRequest, opts ...grpc.CallOption) (*Rollout, error) { + out := new(Rollout) + err := c.cc.Invoke(ctx, "/google.api.servicemanagement.v1.ServiceManager/GetServiceRollout", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *serviceManagerClient) CreateServiceRollout(ctx context.Context, in *CreateServiceRolloutRequest, opts ...grpc.CallOption) (*longrunning.Operation, error) { + out := new(longrunning.Operation) + err := c.cc.Invoke(ctx, "/google.api.servicemanagement.v1.ServiceManager/CreateServiceRollout", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *serviceManagerClient) GenerateConfigReport(ctx context.Context, in *GenerateConfigReportRequest, opts ...grpc.CallOption) (*GenerateConfigReportResponse, error) { + out := new(GenerateConfigReportResponse) + err := c.cc.Invoke(ctx, "/google.api.servicemanagement.v1.ServiceManager/GenerateConfigReport", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *serviceManagerClient) EnableService(ctx context.Context, in *EnableServiceRequest, opts ...grpc.CallOption) (*longrunning.Operation, error) { + out := new(longrunning.Operation) + err := c.cc.Invoke(ctx, "/google.api.servicemanagement.v1.ServiceManager/EnableService", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *serviceManagerClient) DisableService(ctx context.Context, in *DisableServiceRequest, opts ...grpc.CallOption) (*longrunning.Operation, error) { + out := new(longrunning.Operation) + err := c.cc.Invoke(ctx, "/google.api.servicemanagement.v1.ServiceManager/DisableService", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +// ServiceManagerServer is the server API for ServiceManager service. +type ServiceManagerServer interface { + // Lists managed services. + // + // Returns all public services. For authenticated users, also returns all + // services the calling user has "servicemanagement.services.get" permission + // for. + // + // **BETA:** If the caller specifies the `consumer_id`, it returns only the + // services enabled on the consumer. The `consumer_id` must have the format + // of "project:{PROJECT-ID}". + ListServices(context.Context, *ListServicesRequest) (*ListServicesResponse, error) + // Gets a managed service. Authentication is required unless the service is + // public. + GetService(context.Context, *GetServiceRequest) (*ManagedService, error) + // Creates a new managed service. + // Please note one producer project can own no more than 20 services. + // + // Operation + CreateService(context.Context, *CreateServiceRequest) (*longrunning.Operation, error) + // Deletes a managed service. This method will change the service to the + // `Soft-Delete` state for 30 days. Within this period, service producers may + // call + // [UndeleteService][google.api.servicemanagement.v1.ServiceManager.UndeleteService] + // to restore the service. After 30 days, the service will be permanently + // deleted. + // + // Operation + DeleteService(context.Context, *DeleteServiceRequest) (*longrunning.Operation, error) + // Revives a previously deleted managed service. The method restores the + // service using the configuration at the time the service was deleted. + // The target service must exist and must have been deleted within the + // last 30 days. + // + // Operation + UndeleteService(context.Context, *UndeleteServiceRequest) (*longrunning.Operation, error) + // Lists the history of the service configuration for a managed service, + // from the newest to the oldest. + ListServiceConfigs(context.Context, *ListServiceConfigsRequest) (*ListServiceConfigsResponse, error) + // Gets a service configuration (version) for a managed service. + GetServiceConfig(context.Context, *GetServiceConfigRequest) (*serviceconfig.Service, error) + // Creates a new service configuration (version) for a managed service. + // This method only stores the service configuration. To roll out the service + // configuration to backend systems please call + // [CreateServiceRollout][google.api.servicemanagement.v1.ServiceManager.CreateServiceRollout]. + // + // Only the 100 most recent service configurations and ones referenced by + // existing rollouts are kept for each service. The rest will be deleted + // eventually. + CreateServiceConfig(context.Context, *CreateServiceConfigRequest) (*serviceconfig.Service, error) + // Creates a new service configuration (version) for a managed service based + // on + // user-supplied configuration source files (for example: OpenAPI + // Specification). This method stores the source configurations as well as the + // generated service configuration. To rollout the service configuration to + // other services, + // please call + // [CreateServiceRollout][google.api.servicemanagement.v1.ServiceManager.CreateServiceRollout]. + // + // Only the 100 most recent configuration sources and ones referenced by + // existing service configurtions are kept for each service. The rest will be + // deleted eventually. + // + // Operation + SubmitConfigSource(context.Context, *SubmitConfigSourceRequest) (*longrunning.Operation, error) + // Lists the history of the service configuration rollouts for a managed + // service, from the newest to the oldest. + ListServiceRollouts(context.Context, *ListServiceRolloutsRequest) (*ListServiceRolloutsResponse, error) + // Gets a service configuration + // [rollout][google.api.servicemanagement.v1.Rollout]. + GetServiceRollout(context.Context, *GetServiceRolloutRequest) (*Rollout, error) + // Creates a new service configuration rollout. Based on rollout, the + // Google Service Management will roll out the service configurations to + // different backend services. For example, the logging configuration will be + // pushed to Google Cloud Logging. + // + // Please note that any previous pending and running Rollouts and associated + // Operations will be automatically cancelled so that the latest Rollout will + // not be blocked by previous Rollouts. + // + // Only the 100 most recent (in any state) and the last 10 successful (if not + // already part of the set of 100 most recent) rollouts are kept for each + // service. The rest will be deleted eventually. + // + // Operation + CreateServiceRollout(context.Context, *CreateServiceRolloutRequest) (*longrunning.Operation, error) + // Generates and returns a report (errors, warnings and changes from + // existing configurations) associated with + // GenerateConfigReportRequest.new_value + // + // If GenerateConfigReportRequest.old_value is specified, + // GenerateConfigReportRequest will contain a single ChangeReport based on the + // comparison between GenerateConfigReportRequest.new_value and + // GenerateConfigReportRequest.old_value. + // If GenerateConfigReportRequest.old_value is not specified, this method + // will compare GenerateConfigReportRequest.new_value with the last pushed + // service configuration. + GenerateConfigReport(context.Context, *GenerateConfigReportRequest) (*GenerateConfigReportResponse, error) + // Enables a [service][google.api.servicemanagement.v1.ManagedService] for a + // project, so it can be used for the project. See [Cloud Auth + // Guide](https://cloud.google.com/docs/authentication) for more information. + // + // Operation + EnableService(context.Context, *EnableServiceRequest) (*longrunning.Operation, error) + // Disables a [service][google.api.servicemanagement.v1.ManagedService] for a + // project, so it can no longer be be used for the project. It prevents + // accidental usage that may cause unexpected billing charges or security + // leaks. + // + // Operation + DisableService(context.Context, *DisableServiceRequest) (*longrunning.Operation, error) +} + +func RegisterServiceManagerServer(s *grpc.Server, srv ServiceManagerServer) { + s.RegisterService(&_ServiceManager_serviceDesc, srv) +} + +func _ServiceManager_ListServices_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(ListServicesRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(ServiceManagerServer).ListServices(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.api.servicemanagement.v1.ServiceManager/ListServices", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(ServiceManagerServer).ListServices(ctx, req.(*ListServicesRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _ServiceManager_GetService_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(GetServiceRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(ServiceManagerServer).GetService(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.api.servicemanagement.v1.ServiceManager/GetService", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(ServiceManagerServer).GetService(ctx, req.(*GetServiceRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _ServiceManager_CreateService_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(CreateServiceRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(ServiceManagerServer).CreateService(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.api.servicemanagement.v1.ServiceManager/CreateService", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(ServiceManagerServer).CreateService(ctx, req.(*CreateServiceRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _ServiceManager_DeleteService_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(DeleteServiceRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(ServiceManagerServer).DeleteService(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.api.servicemanagement.v1.ServiceManager/DeleteService", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(ServiceManagerServer).DeleteService(ctx, req.(*DeleteServiceRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _ServiceManager_UndeleteService_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(UndeleteServiceRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(ServiceManagerServer).UndeleteService(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.api.servicemanagement.v1.ServiceManager/UndeleteService", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(ServiceManagerServer).UndeleteService(ctx, req.(*UndeleteServiceRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _ServiceManager_ListServiceConfigs_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(ListServiceConfigsRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(ServiceManagerServer).ListServiceConfigs(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.api.servicemanagement.v1.ServiceManager/ListServiceConfigs", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(ServiceManagerServer).ListServiceConfigs(ctx, req.(*ListServiceConfigsRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _ServiceManager_GetServiceConfig_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(GetServiceConfigRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(ServiceManagerServer).GetServiceConfig(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.api.servicemanagement.v1.ServiceManager/GetServiceConfig", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(ServiceManagerServer).GetServiceConfig(ctx, req.(*GetServiceConfigRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _ServiceManager_CreateServiceConfig_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(CreateServiceConfigRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(ServiceManagerServer).CreateServiceConfig(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.api.servicemanagement.v1.ServiceManager/CreateServiceConfig", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(ServiceManagerServer).CreateServiceConfig(ctx, req.(*CreateServiceConfigRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _ServiceManager_SubmitConfigSource_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(SubmitConfigSourceRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(ServiceManagerServer).SubmitConfigSource(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.api.servicemanagement.v1.ServiceManager/SubmitConfigSource", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(ServiceManagerServer).SubmitConfigSource(ctx, req.(*SubmitConfigSourceRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _ServiceManager_ListServiceRollouts_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(ListServiceRolloutsRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(ServiceManagerServer).ListServiceRollouts(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.api.servicemanagement.v1.ServiceManager/ListServiceRollouts", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(ServiceManagerServer).ListServiceRollouts(ctx, req.(*ListServiceRolloutsRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _ServiceManager_GetServiceRollout_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(GetServiceRolloutRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(ServiceManagerServer).GetServiceRollout(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.api.servicemanagement.v1.ServiceManager/GetServiceRollout", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(ServiceManagerServer).GetServiceRollout(ctx, req.(*GetServiceRolloutRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _ServiceManager_CreateServiceRollout_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(CreateServiceRolloutRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(ServiceManagerServer).CreateServiceRollout(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.api.servicemanagement.v1.ServiceManager/CreateServiceRollout", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(ServiceManagerServer).CreateServiceRollout(ctx, req.(*CreateServiceRolloutRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _ServiceManager_GenerateConfigReport_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(GenerateConfigReportRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(ServiceManagerServer).GenerateConfigReport(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.api.servicemanagement.v1.ServiceManager/GenerateConfigReport", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(ServiceManagerServer).GenerateConfigReport(ctx, req.(*GenerateConfigReportRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _ServiceManager_EnableService_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(EnableServiceRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(ServiceManagerServer).EnableService(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.api.servicemanagement.v1.ServiceManager/EnableService", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(ServiceManagerServer).EnableService(ctx, req.(*EnableServiceRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _ServiceManager_DisableService_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(DisableServiceRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(ServiceManagerServer).DisableService(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.api.servicemanagement.v1.ServiceManager/DisableService", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(ServiceManagerServer).DisableService(ctx, req.(*DisableServiceRequest)) + } + return interceptor(ctx, in, info, handler) +} + +var _ServiceManager_serviceDesc = grpc.ServiceDesc{ + ServiceName: "google.api.servicemanagement.v1.ServiceManager", + HandlerType: (*ServiceManagerServer)(nil), + Methods: []grpc.MethodDesc{ + { + MethodName: "ListServices", + Handler: _ServiceManager_ListServices_Handler, + }, + { + MethodName: "GetService", + Handler: _ServiceManager_GetService_Handler, + }, + { + MethodName: "CreateService", + Handler: _ServiceManager_CreateService_Handler, + }, + { + MethodName: "DeleteService", + Handler: _ServiceManager_DeleteService_Handler, + }, + { + MethodName: "UndeleteService", + Handler: _ServiceManager_UndeleteService_Handler, + }, + { + MethodName: "ListServiceConfigs", + Handler: _ServiceManager_ListServiceConfigs_Handler, + }, + { + MethodName: "GetServiceConfig", + Handler: _ServiceManager_GetServiceConfig_Handler, + }, + { + MethodName: "CreateServiceConfig", + Handler: _ServiceManager_CreateServiceConfig_Handler, + }, + { + MethodName: "SubmitConfigSource", + Handler: _ServiceManager_SubmitConfigSource_Handler, + }, + { + MethodName: "ListServiceRollouts", + Handler: _ServiceManager_ListServiceRollouts_Handler, + }, + { + MethodName: "GetServiceRollout", + Handler: _ServiceManager_GetServiceRollout_Handler, + }, + { + MethodName: "CreateServiceRollout", + Handler: _ServiceManager_CreateServiceRollout_Handler, + }, + { + MethodName: "GenerateConfigReport", + Handler: _ServiceManager_GenerateConfigReport_Handler, + }, + { + MethodName: "EnableService", + Handler: _ServiceManager_EnableService_Handler, + }, + { + MethodName: "DisableService", + Handler: _ServiceManager_DisableService_Handler, + }, + }, + Streams: []grpc.StreamDesc{}, + Metadata: "google/api/servicemanagement/v1/servicemanager.proto", +} + +func init() { + proto.RegisterFile("google/api/servicemanagement/v1/servicemanager.proto", fileDescriptor_servicemanager_f197734cdf6c7769) +} + +var fileDescriptor_servicemanager_f197734cdf6c7769 = []byte{ + // 1447 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xbc, 0x58, 0xcf, 0x6f, 0xdc, 0x44, + 0x14, 0x66, 0x36, 0x69, 0x9b, 0xbc, 0x64, 0xb7, 0xed, 0x64, 0x69, 0xb6, 0x9b, 0x46, 0x4d, 0x9c, + 0x52, 0xa2, 0x94, 0xda, 0x4a, 0xfa, 0x03, 0xba, 0x29, 0x87, 0x26, 0x81, 0x28, 0xa2, 0xa5, 0xd1, + 0xa6, 0x3f, 0x50, 0x09, 0x5a, 0x39, 0xf6, 0xc4, 0x98, 0x7a, 0x67, 0x16, 0xdb, 0x9b, 0x90, 0x46, + 0xbd, 0x54, 0x95, 0x90, 0xe0, 0x84, 0x8a, 0xe8, 0xbd, 0x42, 0x1c, 0x7a, 0xe5, 0x80, 0xc4, 0x01, + 0xc1, 0x95, 0x2b, 0x70, 0xe4, 0xc6, 0xdf, 0xc0, 0x19, 0xed, 0x78, 0xbc, 0xf1, 0xec, 0x3a, 0x6b, + 0x7b, 0x05, 0x1c, 0xe7, 0xcd, 0x7c, 0xef, 0x7d, 0xf3, 0xe6, 0xbd, 0x99, 0xcf, 0x86, 0xcb, 0x16, + 0x63, 0x96, 0x43, 0x34, 0xbd, 0x61, 0x6b, 0x1e, 0x71, 0x77, 0x6c, 0x83, 0xd4, 0x75, 0xaa, 0x5b, + 0xa4, 0x4e, 0xa8, 0xaf, 0xed, 0xcc, 0xcb, 0x46, 0x57, 0x6d, 0xb8, 0xcc, 0x67, 0xf8, 0x6c, 0x80, + 0x52, 0xf5, 0x86, 0xad, 0x76, 0xa1, 0xd4, 0x9d, 0xf9, 0xf2, 0x99, 0x88, 0x5b, 0x9d, 0x52, 0xe6, + 0xeb, 0xbe, 0xcd, 0xa8, 0x17, 0xc0, 0xcb, 0xa5, 0xee, 0xa0, 0x62, 0x46, 0x4b, 0xa2, 0xe3, 0x12, + 0x8f, 0x35, 0x5d, 0x83, 0x84, 0xae, 0x66, 0x04, 0xc0, 0x61, 0xd4, 0x72, 0x9b, 0x94, 0xda, 0xd4, + 0xd2, 0x58, 0x83, 0xb8, 0x52, 0xbc, 0xd3, 0x62, 0x11, 0x1f, 0x6d, 0x35, 0xb7, 0x35, 0x9d, 0xee, + 0x89, 0xa9, 0xa9, 0xce, 0xa9, 0x6d, 0x9b, 0x38, 0x66, 0xad, 0xae, 0x7b, 0x0f, 0xc5, 0x8a, 0x33, + 0x9d, 0x2b, 0x3c, 0xdf, 0x6d, 0x1a, 0xbe, 0x98, 0x1d, 0x17, 0xb3, 0x6e, 0xc3, 0xd0, 0x3c, 0x5f, + 0xf7, 0x9b, 0x22, 0xa6, 0xf2, 0x02, 0xc1, 0xd8, 0x4d, 0xdb, 0xf3, 0x37, 0x82, 0x5d, 0x78, 0x55, + 0xf2, 0x69, 0x93, 0x78, 0x3e, 0x56, 0x61, 0xac, 0xe1, 0x32, 0xb3, 0x69, 0x10, 0xb7, 0xd6, 0x70, + 0xd9, 0x27, 0xc4, 0xf0, 0x6b, 0xb6, 0x59, 0x42, 0x53, 0x68, 0x76, 0xb8, 0x7a, 0x32, 0x9c, 0x5a, + 0x0f, 0x66, 0xd6, 0x4c, 0x3c, 0x01, 0xc3, 0x0d, 0xdd, 0x22, 0x35, 0xcf, 0x7e, 0x44, 0x4a, 0x47, + 0xa6, 0xd0, 0xec, 0x91, 0xea, 0x50, 0xcb, 0xb0, 0x61, 0x3f, 0x22, 0x78, 0x12, 0x80, 0x4f, 0xfa, + 0xec, 0x21, 0xa1, 0xa5, 0xa3, 0xdc, 0x07, 0x5f, 0x7e, 0xa7, 0x65, 0xc0, 0x67, 0x61, 0xc4, 0x60, + 0xd4, 0x6b, 0xd6, 0x89, 0xdb, 0x8a, 0x71, 0x8c, 0xcf, 0x43, 0x68, 0x5a, 0x33, 0x95, 0x2f, 0x11, + 0x14, 0x65, 0x92, 0x5e, 0x83, 0x51, 0x8f, 0xe0, 0xf7, 0x60, 0x48, 0xa4, 0xdf, 0x2b, 0xa1, 0xa9, + 0x81, 0xd9, 0x91, 0x05, 0x4d, 0x4d, 0x38, 0x73, 0xf5, 0x16, 0x1f, 0x99, 0xc2, 0x57, 0xb5, 0xed, + 0x00, 0x9f, 0x87, 0xe3, 0x94, 0x7c, 0xe6, 0xd7, 0x22, 0x54, 0x73, 0x9c, 0x4a, 0xbe, 0x65, 0x5e, + 0x0f, 0xe9, 0x2a, 0x57, 0xe1, 0xe4, 0x2a, 0x09, 0xb9, 0x84, 0xf9, 0x9a, 0x86, 0x51, 0xe1, 0xa8, + 0x46, 0xf5, 0x3a, 0x11, 0x89, 0x1a, 0x11, 0xb6, 0xf7, 0xf5, 0x3a, 0x51, 0x74, 0x28, 0x2e, 0xbb, + 0x44, 0xf7, 0x49, 0x07, 0x74, 0x0d, 0x8e, 0x89, 0x65, 0x1c, 0xd5, 0xc7, 0x1e, 0x42, 0xbc, 0x72, + 0x0d, 0x8a, 0x2b, 0xc4, 0x21, 0x5d, 0x21, 0x52, 0xb0, 0x5b, 0x84, 0x53, 0x77, 0xa9, 0xd9, 0x27, + 0xd8, 0x84, 0xf1, 0x2e, 0xb0, 0x38, 0xa2, 0x7f, 0x71, 0x77, 0x7f, 0x20, 0x18, 0x3f, 0xc8, 0xfc, + 0x32, 0xa3, 0xdb, 0xb6, 0x95, 0x9e, 0x64, 0xab, 0x44, 0x0d, 0x8e, 0x69, 0x15, 0x59, 0x70, 0xb2, + 0x43, 0x81, 0x61, 0xcd, 0xc4, 0xf7, 0x61, 0x70, 0xc7, 0x26, 0xbb, 0xa5, 0x81, 0x29, 0x34, 0x5b, + 0x58, 0x58, 0x4e, 0xe4, 0x78, 0x08, 0x0f, 0x35, 0x18, 0xdd, 0xb3, 0xc9, 0x6e, 0x95, 0x3b, 0x54, + 0xa6, 0x01, 0x0e, 0x6c, 0x78, 0x18, 0x8e, 0x2c, 0xdd, 0xd8, 0x58, 0x5b, 0x3e, 0xf1, 0x0a, 0x1e, + 0x82, 0xc1, 0x77, 0xef, 0xde, 0xbc, 0x79, 0x02, 0x29, 0x8f, 0xe0, 0x74, 0xa4, 0xba, 0x83, 0xd5, + 0x5e, 0x86, 0x8d, 0xc9, 0xed, 0x95, 0xeb, 0x6c, 0x2f, 0xa9, 0x35, 0x07, 0xe4, 0xd6, 0x54, 0x9e, + 0x20, 0x28, 0xc7, 0x05, 0x17, 0xa7, 0x77, 0x1d, 0x8e, 0x87, 0xd1, 0x83, 0x54, 0x85, 0x7d, 0x36, + 0x16, 0xcd, 0x50, 0x78, 0x52, 0x05, 0x4f, 0xf2, 0x92, 0xba, 0xa3, 0xf6, 0xa1, 0x2c, 0x75, 0x46, + 0xe6, 0xa3, 0xad, 0x40, 0x41, 0xa6, 0xc9, 0xe3, 0x1c, 0xc2, 0x32, 0x2f, 0xb1, 0x54, 0x7e, 0x40, + 0x70, 0x7a, 0xa3, 0xb9, 0x55, 0xb7, 0xfd, 0xc0, 0xb0, 0xc1, 0x2f, 0xee, 0x0c, 0xc1, 0xab, 0x90, + 0x17, 0x75, 0x15, 0xdc, 0xf9, 0x22, 0xf6, 0xc5, 0xc4, 0x1a, 0x92, 0xe2, 0x8d, 0x1a, 0x91, 0x11, + 0x9e, 0x81, 0xfc, 0x8e, 0xee, 0xd8, 0xa6, 0xee, 0x93, 0x1a, 0xa3, 0xce, 0x1e, 0x3f, 0xb7, 0xa1, + 0xea, 0x68, 0x68, 0xbc, 0x4d, 0x9d, 0x3d, 0xe5, 0x03, 0x28, 0xc7, 0x11, 0x17, 0x47, 0xd7, 0x9d, + 0x13, 0x94, 0x3a, 0x27, 0x4f, 0x11, 0x4c, 0xc8, 0x77, 0x15, 0x73, 0x1c, 0xd6, 0xf4, 0x33, 0x64, + 0x65, 0x09, 0x8e, 0xb9, 0x01, 0x48, 0xe4, 0x63, 0x36, 0x31, 0x1f, 0x61, 0x90, 0x10, 0xa8, 0x3c, + 0x93, 0x8b, 0x53, 0xcc, 0xff, 0x4f, 0xad, 0x81, 0x4f, 0xc1, 0xd1, 0x6d, 0xdb, 0xf1, 0x89, 0x5b, + 0x1a, 0xe4, 0x38, 0x31, 0x6a, 0xbd, 0x46, 0x13, 0xb1, 0xac, 0x44, 0xe2, 0x57, 0x60, 0x48, 0x6c, + 0x20, 0x6c, 0x96, 0xf4, 0x5b, 0x6f, 0x23, 0x53, 0xf7, 0xce, 0x26, 0x94, 0x22, 0xaf, 0x51, 0xe6, + 0x63, 0x9a, 0x04, 0x10, 0x21, 0x0f, 0x6e, 0xc5, 0x61, 0x61, 0x59, 0x33, 0x95, 0x07, 0x50, 0x7c, + 0x87, 0xea, 0x5b, 0x4e, 0xf6, 0x37, 0xa1, 0xf3, 0x55, 0xcf, 0x75, 0xbd, 0xea, 0x1f, 0xc2, 0xab, + 0x2b, 0xb6, 0xf7, 0x1f, 0x39, 0xff, 0x1c, 0xc1, 0xc4, 0x2a, 0xa1, 0x2d, 0x85, 0xd5, 0xbe, 0x4e, + 0x1a, 0xcc, 0x6d, 0xa7, 0xe6, 0x12, 0x00, 0x25, 0xbb, 0x72, 0x67, 0x14, 0xc3, 0x63, 0x0a, 0x35, + 0x94, 0x7a, 0x83, 0xee, 0x55, 0x87, 0x29, 0xd9, 0x0d, 0x3c, 0xb4, 0x40, 0xcc, 0x31, 0xe5, 0x2b, + 0xe6, 0x10, 0x10, 0x73, 0x4c, 0xd1, 0x4b, 0x7f, 0x23, 0x38, 0x13, 0xcf, 0x44, 0xd4, 0x4b, 0x8a, + 0xed, 0x16, 0x20, 0xd7, 0xde, 0x65, 0xce, 0x36, 0xf1, 0x1d, 0x28, 0x18, 0x1f, 0xeb, 0xd4, 0x22, + 0x35, 0x97, 0xfb, 0xf2, 0x4a, 0x03, 0xbc, 0xd0, 0x52, 0xdc, 0x39, 0x1c, 0x26, 0x18, 0xe4, 0x8d, + 0xc8, 0xc8, 0xc3, 0xb7, 0x60, 0xc4, 0xb4, 0x75, 0x8b, 0x32, 0xcf, 0xb7, 0x0d, 0xaf, 0x34, 0xc8, + 0x5d, 0x5e, 0x48, 0x74, 0xb9, 0xd2, 0xc6, 0x54, 0xa3, 0xf8, 0x85, 0xdf, 0xc7, 0xa0, 0x20, 0x4e, + 0x36, 0x78, 0xd1, 0x5d, 0xfc, 0x15, 0x82, 0xd1, 0xa8, 0x90, 0xc3, 0x97, 0x13, 0xbd, 0xc7, 0x88, + 0xd3, 0xf2, 0x95, 0x8c, 0xa8, 0x20, 0xd1, 0x4a, 0xf1, 0xc9, 0x6f, 0x7f, 0x3d, 0xcb, 0x15, 0xf0, + 0x68, 0xe4, 0x83, 0xc1, 0xc3, 0xdf, 0x20, 0x80, 0x83, 0x0e, 0xc2, 0x0b, 0x19, 0x9e, 0xfe, 0x90, + 0x4f, 0x56, 0x49, 0xa3, 0xcc, 0x70, 0x26, 0x93, 0x78, 0x22, 0xca, 0x44, 0xdb, 0x8f, 0x96, 0xc1, + 0x63, 0xfc, 0x14, 0x41, 0x5e, 0xba, 0x84, 0x71, 0xf2, 0xbe, 0xe3, 0x04, 0x66, 0x79, 0x32, 0x84, + 0x45, 0xbe, 0x3e, 0xd4, 0xdb, 0xe1, 0xd7, 0x87, 0x32, 0xc9, 0xc9, 0x8c, 0x2b, 0x52, 0x5a, 0x2a, + 0xa1, 0xea, 0xc2, 0x5f, 0x20, 0xc8, 0x4b, 0xa2, 0x32, 0x05, 0x8d, 0x38, 0x11, 0x9a, 0x44, 0x43, + 0xe4, 0x64, 0xae, 0x67, 0x4e, 0x9e, 0x23, 0x38, 0xde, 0xa1, 0x34, 0xf1, 0x9b, 0x89, 0x74, 0xe2, + 0x85, 0x6d, 0x12, 0xa1, 0x37, 0x38, 0xa1, 0xf3, 0xca, 0xb9, 0x1e, 0x84, 0x2a, 0x4d, 0xe1, 0x1a, + 0xff, 0x88, 0x00, 0x77, 0x0b, 0x29, 0x5c, 0xc9, 0x52, 0xaa, 0xb2, 0xf4, 0x2b, 0x2f, 0xf6, 0x85, + 0x15, 0xc5, 0x7e, 0x81, 0xb3, 0x7f, 0x0d, 0xcf, 0xf4, 0x60, 0xaf, 0x09, 0x4d, 0x87, 0x7f, 0x46, + 0x70, 0xa2, 0x53, 0xd1, 0xe2, 0xb7, 0xfa, 0x15, 0xc1, 0xe5, 0x38, 0x89, 0xa1, 0x7c, 0xc4, 0x09, + 0xdd, 0xc7, 0x5a, 0x0a, 0x42, 0xda, 0x7e, 0x5b, 0xa9, 0x3f, 0x7e, 0x70, 0x0e, 0x2b, 0xc9, 0x10, + 0xfc, 0x2d, 0x82, 0xb1, 0x18, 0x11, 0x89, 0x17, 0xb3, 0xf5, 0x4c, 0x8a, 0x8d, 0x2c, 0xf2, 0x8d, + 0x5c, 0x51, 0xd2, 0x64, 0xb6, 0xd2, 0xa1, 0xc1, 0xf0, 0x77, 0x08, 0x70, 0xb7, 0x68, 0x4b, 0x51, + 0x26, 0x87, 0x4a, 0xd4, 0xa4, 0x32, 0xbe, 0xc2, 0xe9, 0x6a, 0xca, 0x5c, 0x1a, 0xba, 0x1e, 0x8f, + 0x52, 0x41, 0x73, 0xf8, 0x27, 0xf9, 0xc7, 0x40, 0xa8, 0x72, 0x70, 0xa6, 0xa2, 0xec, 0x50, 0x6c, + 0xe5, 0xeb, 0xfd, 0x81, 0x45, 0x49, 0x8b, 0x86, 0xc4, 0xbd, 0x1a, 0x52, 0x6b, 0x0b, 0xa8, 0xef, + 0x91, 0xf4, 0x9d, 0x1e, 0x98, 0xf1, 0xb5, 0x2c, 0xd7, 0xbb, 0xa4, 0xa6, 0xca, 0xa9, 0x55, 0x9c, + 0x72, 0x8d, 0x13, 0xbd, 0x84, 0xe7, 0xd3, 0x10, 0xd5, 0xf6, 0x0f, 0x04, 0xd8, 0x63, 0xfc, 0x12, + 0x75, 0xfe, 0x25, 0x10, 0xc4, 0xaf, 0x67, 0xbc, 0xfb, 0x65, 0xee, 0x29, 0x6b, 0x24, 0x55, 0x66, + 0x2b, 0xa1, 0x3e, 0xc7, 0xbf, 0x20, 0x28, 0xc6, 0x49, 0x9b, 0x14, 0x64, 0x7b, 0x68, 0xb3, 0xf2, + 0xdb, 0x7d, 0xa2, 0xe5, 0x32, 0x51, 0xa6, 0xa5, 0xf7, 0xcc, 0x8a, 0x81, 0xb4, 0xea, 0xfc, 0x6b, + 0x04, 0x79, 0x49, 0xe2, 0xa6, 0x78, 0xde, 0xe2, 0x24, 0x71, 0x52, 0x8a, 0x2f, 0x72, 0x56, 0xaf, + 0x2b, 0xbd, 0xee, 0xb2, 0x0a, 0xe1, 0x8e, 0x5b, 0xb4, 0x9e, 0x23, 0x28, 0xc8, 0xea, 0x18, 0x5f, + 0x4d, 0xa1, 0xc4, 0xbc, 0xec, 0xc4, 0x54, 0x4e, 0x6c, 0xb6, 0xe7, 0x75, 0x56, 0x31, 0x03, 0xcf, + 0x15, 0x34, 0xb7, 0xf4, 0x27, 0x82, 0x19, 0x83, 0xd5, 0x93, 0xc8, 0x2c, 0x8d, 0xc9, 0xda, 0x6f, + 0xbd, 0x25, 0x8f, 0xd7, 0xd1, 0x83, 0x75, 0x81, 0xb3, 0x98, 0xa3, 0x53, 0x4b, 0x65, 0xae, 0xa5, + 0x59, 0x84, 0x72, 0xf1, 0x2c, 0xfe, 0xaa, 0xea, 0x0d, 0xdb, 0x3b, 0xf4, 0xcf, 0xea, 0x62, 0x97, + 0xf1, 0x45, 0x6e, 0x70, 0xf5, 0xc6, 0xc6, 0xad, 0x97, 0xb9, 0xe9, 0xd5, 0xc0, 0xf3, 0xb2, 0xc3, + 0x9a, 0xa6, 0x2a, 0x45, 0xe7, 0x9c, 0xee, 0xcd, 0xff, 0x1a, 0xae, 0xd9, 0xe4, 0x6b, 0x36, 0xbb, + 0xd6, 0x6c, 0xde, 0x9b, 0xdf, 0x3a, 0xca, 0xb9, 0x5c, 0xfa, 0x27, 0x00, 0x00, 0xff, 0xff, 0x0e, + 0x92, 0xfd, 0x90, 0x73, 0x16, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/appengine/legacy/audit_data.pb.go b/vendor/google.golang.org/genproto/googleapis/appengine/legacy/audit_data.pb.go new file mode 100644 index 0000000..35eb040 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/appengine/legacy/audit_data.pb.go @@ -0,0 +1,99 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/appengine/legacy/audit_data.proto + +package legacy // import "google.golang.org/genproto/googleapis/appengine/legacy" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Admin Console legacy audit log. +type AuditData struct { + // Text description of the admin event. + // This is the "Event" column in Admin Console's Admin Logs. + EventMessage string `protobuf:"bytes,1,opt,name=event_message,json=eventMessage,proto3" json:"event_message,omitempty"` + // Arbitrary event data. + // This is the "Result" column in Admin Console's Admin Logs. + EventData map[string]string `protobuf:"bytes,2,rep,name=event_data,json=eventData,proto3" json:"event_data,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *AuditData) Reset() { *m = AuditData{} } +func (m *AuditData) String() string { return proto.CompactTextString(m) } +func (*AuditData) ProtoMessage() {} +func (*AuditData) Descriptor() ([]byte, []int) { + return fileDescriptor_audit_data_37c3913c9f749e98, []int{0} +} +func (m *AuditData) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_AuditData.Unmarshal(m, b) +} +func (m *AuditData) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_AuditData.Marshal(b, m, deterministic) +} +func (dst *AuditData) XXX_Merge(src proto.Message) { + xxx_messageInfo_AuditData.Merge(dst, src) +} +func (m *AuditData) XXX_Size() int { + return xxx_messageInfo_AuditData.Size(m) +} +func (m *AuditData) XXX_DiscardUnknown() { + xxx_messageInfo_AuditData.DiscardUnknown(m) +} + +var xxx_messageInfo_AuditData proto.InternalMessageInfo + +func (m *AuditData) GetEventMessage() string { + if m != nil { + return m.EventMessage + } + return "" +} + +func (m *AuditData) GetEventData() map[string]string { + if m != nil { + return m.EventData + } + return nil +} + +func init() { + proto.RegisterType((*AuditData)(nil), "google.appengine.legacy.AuditData") + proto.RegisterMapType((map[string]string)(nil), "google.appengine.legacy.AuditData.EventDataEntry") +} + +func init() { + proto.RegisterFile("google/appengine/legacy/audit_data.proto", fileDescriptor_audit_data_37c3913c9f749e98) +} + +var fileDescriptor_audit_data_37c3913c9f749e98 = []byte{ + // 247 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x74, 0x90, 0x4f, 0x4b, 0x03, 0x31, + 0x10, 0xc5, 0xc9, 0x16, 0x85, 0x1d, 0xb5, 0x48, 0x10, 0x5c, 0xf4, 0x52, 0xf4, 0xb2, 0xa7, 0x04, + 0xf5, 0x22, 0xfe, 0x39, 0x58, 0xec, 0x51, 0x58, 0x7a, 0xf4, 0x52, 0xc6, 0x76, 0x18, 0x16, 0xb7, + 0x49, 0xd8, 0x4d, 0x0b, 0xfb, 0xed, 0xfc, 0x68, 0x92, 0xa4, 0x2e, 0x48, 0xe9, 0x29, 0x33, 0x6f, + 0x7e, 0x99, 0x79, 0x3c, 0x28, 0xd9, 0x5a, 0x6e, 0x48, 0xa3, 0x73, 0x64, 0xb8, 0x36, 0xa4, 0x1b, + 0x62, 0x5c, 0xf6, 0x1a, 0x37, 0xab, 0xda, 0x2f, 0x56, 0xe8, 0x51, 0xb9, 0xd6, 0x7a, 0x2b, 0x2f, + 0x13, 0xa9, 0x06, 0x52, 0x25, 0xf2, 0xe6, 0x47, 0x40, 0xfe, 0x16, 0xe8, 0x77, 0xf4, 0x28, 0x6f, + 0xe1, 0x8c, 0xb6, 0x64, 0xfc, 0x62, 0x4d, 0x5d, 0x87, 0x4c, 0x85, 0x98, 0x88, 0x32, 0x9f, 0x9f, + 0x46, 0xf1, 0x23, 0x69, 0xb2, 0x02, 0x48, 0x50, 0xd8, 0x5f, 0x64, 0x93, 0x51, 0x79, 0x72, 0x7f, + 0xa7, 0x0e, 0x1c, 0x50, 0xc3, 0x72, 0x35, 0x0b, 0x9f, 0x42, 0x35, 0x33, 0xbe, 0xed, 0xe7, 0x39, + 0xfd, 0xf5, 0x57, 0x2f, 0x30, 0xfe, 0x3f, 0x94, 0xe7, 0x30, 0xfa, 0xa6, 0x7e, 0x77, 0x3e, 0x94, + 0xf2, 0x02, 0x8e, 0xb6, 0xd8, 0x6c, 0xa8, 0xc8, 0xa2, 0x96, 0x9a, 0xa7, 0xec, 0x51, 0x4c, 0x0d, + 0x5c, 0x2f, 0xed, 0xfa, 0x90, 0x81, 0xe9, 0x78, 0x70, 0x50, 0x85, 0x28, 0x2a, 0xf1, 0xf9, 0xba, + 0x43, 0xd9, 0x36, 0x68, 0x58, 0xd9, 0x96, 0x35, 0x93, 0x89, 0x41, 0xe9, 0x34, 0x42, 0x57, 0x77, + 0x7b, 0xa9, 0x3e, 0xa7, 0xe7, 0xeb, 0x38, 0x92, 0x0f, 0xbf, 0x01, 0x00, 0x00, 0xff, 0xff, 0x4e, + 0x5d, 0x14, 0xaa, 0x7e, 0x01, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/appengine/logging/v1/request_log.pb.go b/vendor/google.golang.org/genproto/googleapis/appengine/logging/v1/request_log.pb.go new file mode 100644 index 0000000..ebf475b --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/appengine/logging/v1/request_log.pb.go @@ -0,0 +1,616 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/appengine/logging/v1/request_log.proto + +package logging // import "google.golang.org/genproto/googleapis/appengine/logging/v1" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import duration "github.com/golang/protobuf/ptypes/duration" +import timestamp "github.com/golang/protobuf/ptypes/timestamp" +import _type "google.golang.org/genproto/googleapis/logging/type" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Application log line emitted while processing a request. +type LogLine struct { + // Approximate time when this log entry was made. + Time *timestamp.Timestamp `protobuf:"bytes,1,opt,name=time,proto3" json:"time,omitempty"` + // Severity of this log entry. + Severity _type.LogSeverity `protobuf:"varint,2,opt,name=severity,proto3,enum=google.logging.type.LogSeverity" json:"severity,omitempty"` + // App-provided log message. + LogMessage string `protobuf:"bytes,3,opt,name=log_message,json=logMessage,proto3" json:"log_message,omitempty"` + // Where in the source code this log message was written. + SourceLocation *SourceLocation `protobuf:"bytes,4,opt,name=source_location,json=sourceLocation,proto3" json:"source_location,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *LogLine) Reset() { *m = LogLine{} } +func (m *LogLine) String() string { return proto.CompactTextString(m) } +func (*LogLine) ProtoMessage() {} +func (*LogLine) Descriptor() ([]byte, []int) { + return fileDescriptor_request_log_d8583300719546b7, []int{0} +} +func (m *LogLine) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_LogLine.Unmarshal(m, b) +} +func (m *LogLine) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_LogLine.Marshal(b, m, deterministic) +} +func (dst *LogLine) XXX_Merge(src proto.Message) { + xxx_messageInfo_LogLine.Merge(dst, src) +} +func (m *LogLine) XXX_Size() int { + return xxx_messageInfo_LogLine.Size(m) +} +func (m *LogLine) XXX_DiscardUnknown() { + xxx_messageInfo_LogLine.DiscardUnknown(m) +} + +var xxx_messageInfo_LogLine proto.InternalMessageInfo + +func (m *LogLine) GetTime() *timestamp.Timestamp { + if m != nil { + return m.Time + } + return nil +} + +func (m *LogLine) GetSeverity() _type.LogSeverity { + if m != nil { + return m.Severity + } + return _type.LogSeverity_DEFAULT +} + +func (m *LogLine) GetLogMessage() string { + if m != nil { + return m.LogMessage + } + return "" +} + +func (m *LogLine) GetSourceLocation() *SourceLocation { + if m != nil { + return m.SourceLocation + } + return nil +} + +// Specifies a location in a source code file. +type SourceLocation struct { + // Source file name. Depending on the runtime environment, this might be a + // simple name or a fully-qualified name. + File string `protobuf:"bytes,1,opt,name=file,proto3" json:"file,omitempty"` + // Line within the source file. + Line int64 `protobuf:"varint,2,opt,name=line,proto3" json:"line,omitempty"` + // Human-readable name of the function or method being invoked, with optional + // context such as the class or package name. This information is used in + // contexts such as the logs viewer, where a file and line number are less + // meaningful. The format can vary by language. For example: + // `qual.if.ied.Class.method` (Java), `dir/package.func` (Go), `function` + // (Python). + FunctionName string `protobuf:"bytes,3,opt,name=function_name,json=functionName,proto3" json:"function_name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *SourceLocation) Reset() { *m = SourceLocation{} } +func (m *SourceLocation) String() string { return proto.CompactTextString(m) } +func (*SourceLocation) ProtoMessage() {} +func (*SourceLocation) Descriptor() ([]byte, []int) { + return fileDescriptor_request_log_d8583300719546b7, []int{1} +} +func (m *SourceLocation) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_SourceLocation.Unmarshal(m, b) +} +func (m *SourceLocation) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_SourceLocation.Marshal(b, m, deterministic) +} +func (dst *SourceLocation) XXX_Merge(src proto.Message) { + xxx_messageInfo_SourceLocation.Merge(dst, src) +} +func (m *SourceLocation) XXX_Size() int { + return xxx_messageInfo_SourceLocation.Size(m) +} +func (m *SourceLocation) XXX_DiscardUnknown() { + xxx_messageInfo_SourceLocation.DiscardUnknown(m) +} + +var xxx_messageInfo_SourceLocation proto.InternalMessageInfo + +func (m *SourceLocation) GetFile() string { + if m != nil { + return m.File + } + return "" +} + +func (m *SourceLocation) GetLine() int64 { + if m != nil { + return m.Line + } + return 0 +} + +func (m *SourceLocation) GetFunctionName() string { + if m != nil { + return m.FunctionName + } + return "" +} + +// A reference to a particular snapshot of the source tree used to build and +// deploy an application. +type SourceReference struct { + // Optional. A URI string identifying the repository. + // Example: "https://github.com/GoogleCloudPlatform/kubernetes.git" + Repository string `protobuf:"bytes,1,opt,name=repository,proto3" json:"repository,omitempty"` + // The canonical and persistent identifier of the deployed revision. + // Example (git): "0035781c50ec7aa23385dc841529ce8a4b70db1b" + RevisionId string `protobuf:"bytes,2,opt,name=revision_id,json=revisionId,proto3" json:"revision_id,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *SourceReference) Reset() { *m = SourceReference{} } +func (m *SourceReference) String() string { return proto.CompactTextString(m) } +func (*SourceReference) ProtoMessage() {} +func (*SourceReference) Descriptor() ([]byte, []int) { + return fileDescriptor_request_log_d8583300719546b7, []int{2} +} +func (m *SourceReference) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_SourceReference.Unmarshal(m, b) +} +func (m *SourceReference) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_SourceReference.Marshal(b, m, deterministic) +} +func (dst *SourceReference) XXX_Merge(src proto.Message) { + xxx_messageInfo_SourceReference.Merge(dst, src) +} +func (m *SourceReference) XXX_Size() int { + return xxx_messageInfo_SourceReference.Size(m) +} +func (m *SourceReference) XXX_DiscardUnknown() { + xxx_messageInfo_SourceReference.DiscardUnknown(m) +} + +var xxx_messageInfo_SourceReference proto.InternalMessageInfo + +func (m *SourceReference) GetRepository() string { + if m != nil { + return m.Repository + } + return "" +} + +func (m *SourceReference) GetRevisionId() string { + if m != nil { + return m.RevisionId + } + return "" +} + +// Complete log information about a single HTTP request to an App Engine +// application. +type RequestLog struct { + // Application that handled this request. + AppId string `protobuf:"bytes,1,opt,name=app_id,json=appId,proto3" json:"app_id,omitempty"` + // Module of the application that handled this request. + ModuleId string `protobuf:"bytes,37,opt,name=module_id,json=moduleId,proto3" json:"module_id,omitempty"` + // Version of the application that handled this request. + VersionId string `protobuf:"bytes,2,opt,name=version_id,json=versionId,proto3" json:"version_id,omitempty"` + // Globally unique identifier for a request, which is based on the request + // start time. Request IDs for requests which started later will compare + // greater as strings than those for requests which started earlier. + RequestId string `protobuf:"bytes,3,opt,name=request_id,json=requestId,proto3" json:"request_id,omitempty"` + // Origin IP address. + Ip string `protobuf:"bytes,4,opt,name=ip,proto3" json:"ip,omitempty"` + // Time when the request started. + StartTime *timestamp.Timestamp `protobuf:"bytes,6,opt,name=start_time,json=startTime,proto3" json:"start_time,omitempty"` + // Time when the request finished. + EndTime *timestamp.Timestamp `protobuf:"bytes,7,opt,name=end_time,json=endTime,proto3" json:"end_time,omitempty"` + // Latency of the request. + Latency *duration.Duration `protobuf:"bytes,8,opt,name=latency,proto3" json:"latency,omitempty"` + // Number of CPU megacycles used to process request. + MegaCycles int64 `protobuf:"varint,9,opt,name=mega_cycles,json=megaCycles,proto3" json:"mega_cycles,omitempty"` + // Request method. Example: `"GET"`, `"HEAD"`, `"PUT"`, `"POST"`, `"DELETE"`. + Method string `protobuf:"bytes,10,opt,name=method,proto3" json:"method,omitempty"` + // Contains the path and query portion of the URL that was requested. For + // example, if the URL was "http://example.com/app?name=val", the resource + // would be "/app?name=val". The fragment identifier, which is identified by + // the `#` character, is not included. + Resource string `protobuf:"bytes,11,opt,name=resource,proto3" json:"resource,omitempty"` + // HTTP version of request. Example: `"HTTP/1.1"`. + HttpVersion string `protobuf:"bytes,12,opt,name=http_version,json=httpVersion,proto3" json:"http_version,omitempty"` + // HTTP response status code. Example: 200, 404. + Status int32 `protobuf:"varint,13,opt,name=status,proto3" json:"status,omitempty"` + // Size in bytes sent back to client by request. + ResponseSize int64 `protobuf:"varint,14,opt,name=response_size,json=responseSize,proto3" json:"response_size,omitempty"` + // Referrer URL of request. + Referrer string `protobuf:"bytes,15,opt,name=referrer,proto3" json:"referrer,omitempty"` + // User agent that made the request. + UserAgent string `protobuf:"bytes,16,opt,name=user_agent,json=userAgent,proto3" json:"user_agent,omitempty"` + // The logged-in user who made the request. + // + // Most likely, this is the part of the user's email before the `@` sign. The + // field value is the same for different requests from the same user, but + // different users can have similar names. This information is also + // available to the application via the App Engine Users API. + // + // This field will be populated starting with App Engine 1.9.21. + Nickname string `protobuf:"bytes,40,opt,name=nickname,proto3" json:"nickname,omitempty"` + // File or class that handled the request. + UrlMapEntry string `protobuf:"bytes,17,opt,name=url_map_entry,json=urlMapEntry,proto3" json:"url_map_entry,omitempty"` + // Internet host and port number of the resource being requested. + Host string `protobuf:"bytes,20,opt,name=host,proto3" json:"host,omitempty"` + // An indication of the relative cost of serving this request. + Cost float64 `protobuf:"fixed64,21,opt,name=cost,proto3" json:"cost,omitempty"` + // Queue name of the request, in the case of an offline request. + TaskQueueName string `protobuf:"bytes,22,opt,name=task_queue_name,json=taskQueueName,proto3" json:"task_queue_name,omitempty"` + // Task name of the request, in the case of an offline request. + TaskName string `protobuf:"bytes,23,opt,name=task_name,json=taskName,proto3" json:"task_name,omitempty"` + // Whether this was a loading request for the instance. + WasLoadingRequest bool `protobuf:"varint,24,opt,name=was_loading_request,json=wasLoadingRequest,proto3" json:"was_loading_request,omitempty"` + // Time this request spent in the pending request queue. + PendingTime *duration.Duration `protobuf:"bytes,25,opt,name=pending_time,json=pendingTime,proto3" json:"pending_time,omitempty"` + // If the instance processing this request belongs to a manually scaled + // module, then this is the 0-based index of the instance. Otherwise, this + // value is -1. + InstanceIndex int32 `protobuf:"varint,26,opt,name=instance_index,json=instanceIndex,proto3" json:"instance_index,omitempty"` + // Whether this request is finished or active. + Finished bool `protobuf:"varint,27,opt,name=finished,proto3" json:"finished,omitempty"` + // Whether this is the first `RequestLog` entry for this request. If an + // active request has several `RequestLog` entries written to Stackdriver + // Logging, then this field will be set for one of them. + First bool `protobuf:"varint,42,opt,name=first,proto3" json:"first,omitempty"` + // An identifier for the instance that handled the request. + InstanceId string `protobuf:"bytes,28,opt,name=instance_id,json=instanceId,proto3" json:"instance_id,omitempty"` + // A list of log lines emitted by the application while serving this request. + Line []*LogLine `protobuf:"bytes,29,rep,name=line,proto3" json:"line,omitempty"` + // App Engine release version. + AppEngineRelease string `protobuf:"bytes,38,opt,name=app_engine_release,json=appEngineRelease,proto3" json:"app_engine_release,omitempty"` + // Stackdriver Trace identifier for this request. + TraceId string `protobuf:"bytes,39,opt,name=trace_id,json=traceId,proto3" json:"trace_id,omitempty"` + // Source code for the application that handled this request. There can be + // more than one source reference per deployed application if source code is + // distributed among multiple repositories. + SourceReference []*SourceReference `protobuf:"bytes,41,rep,name=source_reference,json=sourceReference,proto3" json:"source_reference,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *RequestLog) Reset() { *m = RequestLog{} } +func (m *RequestLog) String() string { return proto.CompactTextString(m) } +func (*RequestLog) ProtoMessage() {} +func (*RequestLog) Descriptor() ([]byte, []int) { + return fileDescriptor_request_log_d8583300719546b7, []int{3} +} +func (m *RequestLog) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_RequestLog.Unmarshal(m, b) +} +func (m *RequestLog) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_RequestLog.Marshal(b, m, deterministic) +} +func (dst *RequestLog) XXX_Merge(src proto.Message) { + xxx_messageInfo_RequestLog.Merge(dst, src) +} +func (m *RequestLog) XXX_Size() int { + return xxx_messageInfo_RequestLog.Size(m) +} +func (m *RequestLog) XXX_DiscardUnknown() { + xxx_messageInfo_RequestLog.DiscardUnknown(m) +} + +var xxx_messageInfo_RequestLog proto.InternalMessageInfo + +func (m *RequestLog) GetAppId() string { + if m != nil { + return m.AppId + } + return "" +} + +func (m *RequestLog) GetModuleId() string { + if m != nil { + return m.ModuleId + } + return "" +} + +func (m *RequestLog) GetVersionId() string { + if m != nil { + return m.VersionId + } + return "" +} + +func (m *RequestLog) GetRequestId() string { + if m != nil { + return m.RequestId + } + return "" +} + +func (m *RequestLog) GetIp() string { + if m != nil { + return m.Ip + } + return "" +} + +func (m *RequestLog) GetStartTime() *timestamp.Timestamp { + if m != nil { + return m.StartTime + } + return nil +} + +func (m *RequestLog) GetEndTime() *timestamp.Timestamp { + if m != nil { + return m.EndTime + } + return nil +} + +func (m *RequestLog) GetLatency() *duration.Duration { + if m != nil { + return m.Latency + } + return nil +} + +func (m *RequestLog) GetMegaCycles() int64 { + if m != nil { + return m.MegaCycles + } + return 0 +} + +func (m *RequestLog) GetMethod() string { + if m != nil { + return m.Method + } + return "" +} + +func (m *RequestLog) GetResource() string { + if m != nil { + return m.Resource + } + return "" +} + +func (m *RequestLog) GetHttpVersion() string { + if m != nil { + return m.HttpVersion + } + return "" +} + +func (m *RequestLog) GetStatus() int32 { + if m != nil { + return m.Status + } + return 0 +} + +func (m *RequestLog) GetResponseSize() int64 { + if m != nil { + return m.ResponseSize + } + return 0 +} + +func (m *RequestLog) GetReferrer() string { + if m != nil { + return m.Referrer + } + return "" +} + +func (m *RequestLog) GetUserAgent() string { + if m != nil { + return m.UserAgent + } + return "" +} + +func (m *RequestLog) GetNickname() string { + if m != nil { + return m.Nickname + } + return "" +} + +func (m *RequestLog) GetUrlMapEntry() string { + if m != nil { + return m.UrlMapEntry + } + return "" +} + +func (m *RequestLog) GetHost() string { + if m != nil { + return m.Host + } + return "" +} + +func (m *RequestLog) GetCost() float64 { + if m != nil { + return m.Cost + } + return 0 +} + +func (m *RequestLog) GetTaskQueueName() string { + if m != nil { + return m.TaskQueueName + } + return "" +} + +func (m *RequestLog) GetTaskName() string { + if m != nil { + return m.TaskName + } + return "" +} + +func (m *RequestLog) GetWasLoadingRequest() bool { + if m != nil { + return m.WasLoadingRequest + } + return false +} + +func (m *RequestLog) GetPendingTime() *duration.Duration { + if m != nil { + return m.PendingTime + } + return nil +} + +func (m *RequestLog) GetInstanceIndex() int32 { + if m != nil { + return m.InstanceIndex + } + return 0 +} + +func (m *RequestLog) GetFinished() bool { + if m != nil { + return m.Finished + } + return false +} + +func (m *RequestLog) GetFirst() bool { + if m != nil { + return m.First + } + return false +} + +func (m *RequestLog) GetInstanceId() string { + if m != nil { + return m.InstanceId + } + return "" +} + +func (m *RequestLog) GetLine() []*LogLine { + if m != nil { + return m.Line + } + return nil +} + +func (m *RequestLog) GetAppEngineRelease() string { + if m != nil { + return m.AppEngineRelease + } + return "" +} + +func (m *RequestLog) GetTraceId() string { + if m != nil { + return m.TraceId + } + return "" +} + +func (m *RequestLog) GetSourceReference() []*SourceReference { + if m != nil { + return m.SourceReference + } + return nil +} + +func init() { + proto.RegisterType((*LogLine)(nil), "google.appengine.logging.v1.LogLine") + proto.RegisterType((*SourceLocation)(nil), "google.appengine.logging.v1.SourceLocation") + proto.RegisterType((*SourceReference)(nil), "google.appengine.logging.v1.SourceReference") + proto.RegisterType((*RequestLog)(nil), "google.appengine.logging.v1.RequestLog") +} + +func init() { + proto.RegisterFile("google/appengine/logging/v1/request_log.proto", fileDescriptor_request_log_d8583300719546b7) +} + +var fileDescriptor_request_log_d8583300719546b7 = []byte{ + // 921 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x8c, 0x55, 0x7f, 0x6f, 0x1c, 0x35, + 0x10, 0xd5, 0xe6, 0xe7, 0x9d, 0x2f, 0xb9, 0xa4, 0xa6, 0x2d, 0x4e, 0x42, 0x9b, 0x23, 0xd0, 0x70, + 0x40, 0xd9, 0x53, 0x53, 0x21, 0x81, 0xe8, 0x3f, 0x04, 0xfa, 0xc7, 0x49, 0x57, 0x54, 0x36, 0x15, + 0x48, 0x48, 0x68, 0xe5, 0xee, 0xce, 0x6d, 0xac, 0xee, 0xda, 0xae, 0xed, 0xbd, 0xf6, 0xfa, 0x41, + 0xf8, 0x98, 0x7c, 0x06, 0xe4, 0xb1, 0xf7, 0x4a, 0x00, 0x25, 0xfc, 0xb7, 0xf3, 0xe6, 0xbd, 0xf1, + 0xd8, 0x7e, 0x9e, 0x25, 0x5f, 0x55, 0x4a, 0x55, 0x35, 0x4c, 0xb8, 0xd6, 0x20, 0x2b, 0x21, 0x61, + 0x52, 0xab, 0xaa, 0x12, 0xb2, 0x9a, 0x2c, 0x1e, 0x4d, 0x0c, 0xbc, 0x6e, 0xc1, 0xba, 0xbc, 0x56, + 0x55, 0xaa, 0x8d, 0x72, 0x8a, 0x1e, 0x05, 0x7a, 0xba, 0xa2, 0xa7, 0x91, 0x9e, 0x2e, 0x1e, 0x1d, + 0x9e, 0xc6, 0x5a, 0x5d, 0x05, 0xb7, 0xd4, 0x18, 0xe4, 0x16, 0x16, 0x60, 0x84, 0x5b, 0x86, 0x22, + 0x87, 0xf7, 0x23, 0x0f, 0xa3, 0x97, 0xed, 0x7c, 0x52, 0xb6, 0x86, 0x3b, 0xa1, 0x64, 0xcc, 0x1f, + 0xff, 0x33, 0xef, 0x44, 0x03, 0xd6, 0xf1, 0x46, 0x07, 0xc2, 0xc9, 0x9f, 0x09, 0xd9, 0x9e, 0xa9, + 0x6a, 0x26, 0x24, 0xd0, 0x94, 0x6c, 0xf8, 0x34, 0x4b, 0x46, 0xc9, 0x78, 0x70, 0x76, 0x98, 0xc6, + 0x06, 0x3b, 0x6d, 0xfa, 0xa2, 0xd3, 0x66, 0xc8, 0xa3, 0x4f, 0x48, 0xaf, 0x6b, 0x87, 0xad, 0x8d, + 0x92, 0xf1, 0xf0, 0x6c, 0xd4, 0x69, 0xba, 0xad, 0xf8, 0xbe, 0xd3, 0x99, 0xaa, 0x2e, 0x22, 0x2f, + 0x5b, 0x29, 0xe8, 0x31, 0x19, 0xf8, 0x0d, 0x35, 0x60, 0x2d, 0xaf, 0x80, 0xad, 0x8f, 0x92, 0x71, + 0x3f, 0x23, 0xb5, 0xaa, 0x9e, 0x05, 0x84, 0xbe, 0x20, 0x7b, 0x56, 0xb5, 0xa6, 0x80, 0xbc, 0x56, + 0x05, 0x6e, 0x8a, 0x6d, 0x60, 0x67, 0x5f, 0xa6, 0xd7, 0x1c, 0x5d, 0x7a, 0x81, 0x9a, 0x59, 0x94, + 0x64, 0x43, 0x7b, 0x25, 0x3e, 0xf9, 0x9d, 0x0c, 0xaf, 0x32, 0x28, 0x25, 0x1b, 0x73, 0x51, 0x87, + 0x6d, 0xf7, 0x33, 0xfc, 0xf6, 0x58, 0x2d, 0x24, 0xe0, 0xb6, 0xd6, 0x33, 0xfc, 0xa6, 0x9f, 0x90, + 0xdd, 0x79, 0x2b, 0x0b, 0xaf, 0xc9, 0x25, 0x6f, 0xba, 0x96, 0x77, 0x3a, 0xf0, 0x27, 0xde, 0xc0, + 0x49, 0x46, 0xf6, 0x42, 0xf9, 0x0c, 0xe6, 0x60, 0x40, 0x16, 0x40, 0xef, 0x13, 0x62, 0x40, 0x2b, + 0x2b, 0x9c, 0x32, 0xcb, 0xb8, 0xca, 0xdf, 0x10, 0x7f, 0x10, 0x06, 0x16, 0xc2, 0xfa, 0xba, 0xa2, + 0xc4, 0x25, 0x91, 0x10, 0xa0, 0x69, 0x79, 0xf2, 0x47, 0x9f, 0x90, 0x2c, 0xf8, 0x67, 0xa6, 0x2a, + 0x7a, 0x87, 0x6c, 0x71, 0xad, 0x3d, 0x35, 0xd4, 0xda, 0xe4, 0x5a, 0x4f, 0x4b, 0x7a, 0x44, 0xfa, + 0x8d, 0x2a, 0xdb, 0x1a, 0x7c, 0xe6, 0x01, 0x66, 0x7a, 0x01, 0x98, 0x96, 0xf4, 0x1e, 0x21, 0x0b, + 0x30, 0x57, 0x97, 0xe8, 0x47, 0x24, 0xa4, 0x3b, 0x83, 0x8a, 0x32, 0xee, 0xab, 0x1f, 0x91, 0x69, + 0x49, 0x87, 0x64, 0x4d, 0x68, 0x3c, 0xfc, 0x7e, 0xb6, 0x26, 0x34, 0xfd, 0x96, 0x10, 0xeb, 0xb8, + 0x71, 0x39, 0xda, 0x65, 0xeb, 0x46, 0xbb, 0xf4, 0x91, 0xed, 0x63, 0xfa, 0x35, 0xe9, 0x81, 0x2c, + 0x83, 0x70, 0xfb, 0x46, 0xe1, 0x36, 0xc8, 0x12, 0x65, 0x8f, 0xc9, 0x76, 0xcd, 0x1d, 0xc8, 0x62, + 0xc9, 0x7a, 0xa8, 0x3a, 0xf8, 0x97, 0xea, 0xc7, 0xe8, 0xfc, 0xac, 0x63, 0xfa, 0x83, 0x6d, 0xa0, + 0xe2, 0x79, 0xb1, 0x2c, 0x6a, 0xb0, 0xac, 0x8f, 0x77, 0x49, 0x3c, 0xf4, 0x03, 0x22, 0xf4, 0x2e, + 0xd9, 0x6a, 0xc0, 0x5d, 0xaa, 0x92, 0x11, 0xdc, 0x5b, 0x8c, 0xe8, 0x21, 0xe9, 0x19, 0x08, 0xbe, + 0x61, 0x83, 0x70, 0x92, 0x5d, 0x4c, 0x3f, 0x26, 0x3b, 0x97, 0xce, 0xe9, 0x3c, 0x1e, 0x1e, 0xdb, + 0xc1, 0xfc, 0xc0, 0x63, 0xbf, 0x04, 0xc8, 0x97, 0xb5, 0x8e, 0xbb, 0xd6, 0xb2, 0xdd, 0x51, 0x32, + 0xde, 0xcc, 0x62, 0xe4, 0x0d, 0x64, 0xc0, 0x6a, 0x25, 0x2d, 0xe4, 0x56, 0xbc, 0x03, 0x36, 0xc4, + 0x8e, 0x76, 0x3a, 0xf0, 0x42, 0xbc, 0x83, 0xb0, 0xf6, 0x1c, 0x8c, 0x01, 0xc3, 0xf6, 0xba, 0xb5, + 0x43, 0xec, 0xaf, 0xa9, 0xb5, 0x60, 0x72, 0x5e, 0x81, 0x74, 0x6c, 0x3f, 0x5c, 0x93, 0x47, 0xbe, + 0xf7, 0x80, 0x97, 0x4a, 0x51, 0xbc, 0x42, 0x6f, 0x8e, 0x83, 0xb4, 0x8b, 0xe9, 0x09, 0xd9, 0x6d, + 0x4d, 0x9d, 0x37, 0x5c, 0xe7, 0x20, 0x9d, 0x59, 0xb2, 0x5b, 0xa1, 0xef, 0xd6, 0xd4, 0xcf, 0xb8, + 0x7e, 0xea, 0x21, 0x6f, 0xfa, 0x4b, 0x65, 0x1d, 0xbb, 0x1d, 0x1e, 0x82, 0xff, 0xf6, 0x58, 0xe1, + 0xb1, 0x3b, 0xa3, 0x64, 0x9c, 0x64, 0xf8, 0x4d, 0x4f, 0xc9, 0x9e, 0xe3, 0xf6, 0x55, 0xfe, 0xba, + 0x85, 0x16, 0xc2, 0x53, 0xb8, 0x8b, 0x92, 0x5d, 0x0f, 0xff, 0xec, 0x51, 0xff, 0x16, 0xbc, 0x23, + 0x91, 0x87, 0x8c, 0x0f, 0x43, 0x43, 0x1e, 0xc0, 0x64, 0x4a, 0x3e, 0x78, 0xc3, 0x6d, 0x5e, 0x2b, + 0x5e, 0x0a, 0x59, 0xe5, 0xd1, 0x6c, 0x8c, 0x8d, 0x92, 0x71, 0x2f, 0xbb, 0xf5, 0x86, 0xdb, 0x59, + 0xc8, 0x44, 0xe3, 0xd3, 0x27, 0x64, 0x47, 0x83, 0x44, 0x2e, 0x9a, 0xe7, 0xe0, 0x26, 0x1b, 0x0c, + 0x22, 0x1d, 0xfd, 0xf3, 0x80, 0x0c, 0x85, 0xb4, 0x8e, 0xcb, 0x02, 0x72, 0x21, 0x4b, 0x78, 0xcb, + 0x0e, 0xf1, 0x6a, 0x76, 0x3b, 0x74, 0xea, 0x41, 0x7f, 0x82, 0x73, 0x21, 0x85, 0xbd, 0x84, 0x92, + 0x1d, 0x61, 0x27, 0xab, 0x98, 0xde, 0x26, 0x9b, 0x73, 0x61, 0xac, 0x63, 0x5f, 0x60, 0x22, 0x04, + 0xde, 0x63, 0xef, 0x0b, 0x97, 0xec, 0xa3, 0xf0, 0x78, 0x57, 0x55, 0x4b, 0xfa, 0x4d, 0x9c, 0x24, + 0xf7, 0x46, 0xeb, 0xe3, 0xc1, 0xd9, 0xa7, 0xd7, 0x8e, 0xae, 0x38, 0x88, 0xe3, 0xbc, 0x79, 0x48, + 0xa8, 0x7f, 0xe7, 0x81, 0x96, 0x1b, 0xa8, 0x81, 0x5b, 0x60, 0xa7, 0xb8, 0xc2, 0x3e, 0xd7, 0xfa, + 0x29, 0x26, 0xb2, 0x80, 0xd3, 0x03, 0xd2, 0x73, 0x86, 0x87, 0x2e, 0x3e, 0x43, 0xce, 0x36, 0xc6, + 0xd3, 0x92, 0xfe, 0x4a, 0xf6, 0xe3, 0x20, 0x35, 0xdd, 0x50, 0x62, 0x9f, 0x63, 0x3b, 0x0f, 0xff, + 0xc7, 0x24, 0x5d, 0x0d, 0xb2, 0x2c, 0x8e, 0xe3, 0x15, 0x70, 0xfe, 0x96, 0x1c, 0x17, 0xaa, 0xb9, + 0xae, 0xc6, 0xf9, 0xde, 0xfb, 0xc1, 0xf5, 0xdc, 0x5f, 0xd1, 0xf3, 0xe4, 0xb7, 0xf3, 0xc8, 0xaf, + 0x54, 0xcd, 0x65, 0x95, 0x2a, 0x53, 0x4d, 0x2a, 0x90, 0x78, 0x81, 0x93, 0x90, 0xe2, 0x5a, 0xd8, + 0xff, 0xfc, 0x8d, 0x7e, 0x17, 0x3f, 0x5f, 0x6e, 0x21, 0xfd, 0xf1, 0x5f, 0x01, 0x00, 0x00, 0xff, + 0xff, 0x05, 0xf7, 0x68, 0xa8, 0x74, 0x07, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/appengine/v1/app_yaml.pb.go b/vendor/google.golang.org/genproto/googleapis/appengine/v1/app_yaml.pb.go new file mode 100644 index 0000000..dc395d5 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/appengine/v1/app_yaml.pb.go @@ -0,0 +1,1025 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/appengine/v1/app_yaml.proto + +package appengine // import "google.golang.org/genproto/googleapis/appengine/v1" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import duration "github.com/golang/protobuf/ptypes/duration" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Actions to take when the user is not logged in. +type AuthFailAction int32 + +const ( + // Not specified. `AUTH_FAIL_ACTION_REDIRECT` is assumed. + AuthFailAction_AUTH_FAIL_ACTION_UNSPECIFIED AuthFailAction = 0 + // Redirects user to "accounts.google.com". The user is redirected back to the + // application URL after signing in or creating an account. + AuthFailAction_AUTH_FAIL_ACTION_REDIRECT AuthFailAction = 1 + // Rejects request with a `401` HTTP status code and an error + // message. + AuthFailAction_AUTH_FAIL_ACTION_UNAUTHORIZED AuthFailAction = 2 +) + +var AuthFailAction_name = map[int32]string{ + 0: "AUTH_FAIL_ACTION_UNSPECIFIED", + 1: "AUTH_FAIL_ACTION_REDIRECT", + 2: "AUTH_FAIL_ACTION_UNAUTHORIZED", +} +var AuthFailAction_value = map[string]int32{ + "AUTH_FAIL_ACTION_UNSPECIFIED": 0, + "AUTH_FAIL_ACTION_REDIRECT": 1, + "AUTH_FAIL_ACTION_UNAUTHORIZED": 2, +} + +func (x AuthFailAction) String() string { + return proto.EnumName(AuthFailAction_name, int32(x)) +} +func (AuthFailAction) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_app_yaml_a3518fc0a16627c5, []int{0} +} + +// Methods to restrict access to a URL based on login status. +type LoginRequirement int32 + +const ( + // Not specified. `LOGIN_OPTIONAL` is assumed. + LoginRequirement_LOGIN_UNSPECIFIED LoginRequirement = 0 + // Does not require that the user is signed in. + LoginRequirement_LOGIN_OPTIONAL LoginRequirement = 1 + // If the user is not signed in, the `auth_fail_action` is taken. + // In addition, if the user is not an administrator for the + // application, they are given an error message regardless of + // `auth_fail_action`. If the user is an administrator, the handler + // proceeds. + LoginRequirement_LOGIN_ADMIN LoginRequirement = 2 + // If the user has signed in, the handler proceeds normally. Otherwise, the + // auth_fail_action is taken. + LoginRequirement_LOGIN_REQUIRED LoginRequirement = 3 +) + +var LoginRequirement_name = map[int32]string{ + 0: "LOGIN_UNSPECIFIED", + 1: "LOGIN_OPTIONAL", + 2: "LOGIN_ADMIN", + 3: "LOGIN_REQUIRED", +} +var LoginRequirement_value = map[string]int32{ + "LOGIN_UNSPECIFIED": 0, + "LOGIN_OPTIONAL": 1, + "LOGIN_ADMIN": 2, + "LOGIN_REQUIRED": 3, +} + +func (x LoginRequirement) String() string { + return proto.EnumName(LoginRequirement_name, int32(x)) +} +func (LoginRequirement) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_app_yaml_a3518fc0a16627c5, []int{1} +} + +// Methods to enforce security (HTTPS) on a URL. +type SecurityLevel int32 + +const ( + // Not specified. + SecurityLevel_SECURE_UNSPECIFIED SecurityLevel = 0 + // Both HTTP and HTTPS requests with URLs that match the handler succeed + // without redirects. The application can examine the request to determine + // which protocol was used, and respond accordingly. + SecurityLevel_SECURE_DEFAULT SecurityLevel = 0 + // Requests for a URL that match this handler that use HTTPS are automatically + // redirected to the HTTP equivalent URL. + SecurityLevel_SECURE_NEVER SecurityLevel = 1 + // Both HTTP and HTTPS requests with URLs that match the handler succeed + // without redirects. The application can examine the request to determine + // which protocol was used and respond accordingly. + SecurityLevel_SECURE_OPTIONAL SecurityLevel = 2 + // Requests for a URL that match this handler that do not use HTTPS are + // automatically redirected to the HTTPS URL with the same path. Query + // parameters are reserved for the redirect. + SecurityLevel_SECURE_ALWAYS SecurityLevel = 3 +) + +var SecurityLevel_name = map[int32]string{ + 0: "SECURE_UNSPECIFIED", + // Duplicate value: 0: "SECURE_DEFAULT", + 1: "SECURE_NEVER", + 2: "SECURE_OPTIONAL", + 3: "SECURE_ALWAYS", +} +var SecurityLevel_value = map[string]int32{ + "SECURE_UNSPECIFIED": 0, + "SECURE_DEFAULT": 0, + "SECURE_NEVER": 1, + "SECURE_OPTIONAL": 2, + "SECURE_ALWAYS": 3, +} + +func (x SecurityLevel) String() string { + return proto.EnumName(SecurityLevel_name, int32(x)) +} +func (SecurityLevel) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_app_yaml_a3518fc0a16627c5, []int{2} +} + +// Error codes. +type ErrorHandler_ErrorCode int32 + +const ( + // Not specified. ERROR_CODE_DEFAULT is assumed. + ErrorHandler_ERROR_CODE_UNSPECIFIED ErrorHandler_ErrorCode = 0 + // All other error types. + ErrorHandler_ERROR_CODE_DEFAULT ErrorHandler_ErrorCode = 0 + // Application has exceeded a resource quota. + ErrorHandler_ERROR_CODE_OVER_QUOTA ErrorHandler_ErrorCode = 1 + // Client blocked by the application's Denial of Service protection + // configuration. + ErrorHandler_ERROR_CODE_DOS_API_DENIAL ErrorHandler_ErrorCode = 2 + // Deadline reached before the application responds. + ErrorHandler_ERROR_CODE_TIMEOUT ErrorHandler_ErrorCode = 3 +) + +var ErrorHandler_ErrorCode_name = map[int32]string{ + 0: "ERROR_CODE_UNSPECIFIED", + // Duplicate value: 0: "ERROR_CODE_DEFAULT", + 1: "ERROR_CODE_OVER_QUOTA", + 2: "ERROR_CODE_DOS_API_DENIAL", + 3: "ERROR_CODE_TIMEOUT", +} +var ErrorHandler_ErrorCode_value = map[string]int32{ + "ERROR_CODE_UNSPECIFIED": 0, + "ERROR_CODE_DEFAULT": 0, + "ERROR_CODE_OVER_QUOTA": 1, + "ERROR_CODE_DOS_API_DENIAL": 2, + "ERROR_CODE_TIMEOUT": 3, +} + +func (x ErrorHandler_ErrorCode) String() string { + return proto.EnumName(ErrorHandler_ErrorCode_name, int32(x)) +} +func (ErrorHandler_ErrorCode) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_app_yaml_a3518fc0a16627c5, []int{1, 0} +} + +// Redirect codes. +type UrlMap_RedirectHttpResponseCode int32 + +const ( + // Not specified. `302` is assumed. + UrlMap_REDIRECT_HTTP_RESPONSE_CODE_UNSPECIFIED UrlMap_RedirectHttpResponseCode = 0 + // `301 Moved Permanently` code. + UrlMap_REDIRECT_HTTP_RESPONSE_CODE_301 UrlMap_RedirectHttpResponseCode = 1 + // `302 Moved Temporarily` code. + UrlMap_REDIRECT_HTTP_RESPONSE_CODE_302 UrlMap_RedirectHttpResponseCode = 2 + // `303 See Other` code. + UrlMap_REDIRECT_HTTP_RESPONSE_CODE_303 UrlMap_RedirectHttpResponseCode = 3 + // `307 Temporary Redirect` code. + UrlMap_REDIRECT_HTTP_RESPONSE_CODE_307 UrlMap_RedirectHttpResponseCode = 4 +) + +var UrlMap_RedirectHttpResponseCode_name = map[int32]string{ + 0: "REDIRECT_HTTP_RESPONSE_CODE_UNSPECIFIED", + 1: "REDIRECT_HTTP_RESPONSE_CODE_301", + 2: "REDIRECT_HTTP_RESPONSE_CODE_302", + 3: "REDIRECT_HTTP_RESPONSE_CODE_303", + 4: "REDIRECT_HTTP_RESPONSE_CODE_307", +} +var UrlMap_RedirectHttpResponseCode_value = map[string]int32{ + "REDIRECT_HTTP_RESPONSE_CODE_UNSPECIFIED": 0, + "REDIRECT_HTTP_RESPONSE_CODE_301": 1, + "REDIRECT_HTTP_RESPONSE_CODE_302": 2, + "REDIRECT_HTTP_RESPONSE_CODE_303": 3, + "REDIRECT_HTTP_RESPONSE_CODE_307": 4, +} + +func (x UrlMap_RedirectHttpResponseCode) String() string { + return proto.EnumName(UrlMap_RedirectHttpResponseCode_name, int32(x)) +} +func (UrlMap_RedirectHttpResponseCode) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_app_yaml_a3518fc0a16627c5, []int{2, 0} +} + +// [Google Cloud +// Endpoints](https://cloud.google.com/appengine/docs/python/endpoints/) +// configuration for API handlers. +type ApiConfigHandler struct { + // Action to take when users access resources that require + // authentication. Defaults to `redirect`. + AuthFailAction AuthFailAction `protobuf:"varint,1,opt,name=auth_fail_action,json=authFailAction,proto3,enum=google.appengine.v1.AuthFailAction" json:"auth_fail_action,omitempty"` + // Level of login required to access this resource. Defaults to + // `optional`. + Login LoginRequirement `protobuf:"varint,2,opt,name=login,proto3,enum=google.appengine.v1.LoginRequirement" json:"login,omitempty"` + // Path to the script from the application root directory. + Script string `protobuf:"bytes,3,opt,name=script,proto3" json:"script,omitempty"` + // Security (HTTPS) enforcement for this URL. + SecurityLevel SecurityLevel `protobuf:"varint,4,opt,name=security_level,json=securityLevel,proto3,enum=google.appengine.v1.SecurityLevel" json:"security_level,omitempty"` + // URL to serve the endpoint at. + Url string `protobuf:"bytes,5,opt,name=url,proto3" json:"url,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ApiConfigHandler) Reset() { *m = ApiConfigHandler{} } +func (m *ApiConfigHandler) String() string { return proto.CompactTextString(m) } +func (*ApiConfigHandler) ProtoMessage() {} +func (*ApiConfigHandler) Descriptor() ([]byte, []int) { + return fileDescriptor_app_yaml_a3518fc0a16627c5, []int{0} +} +func (m *ApiConfigHandler) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ApiConfigHandler.Unmarshal(m, b) +} +func (m *ApiConfigHandler) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ApiConfigHandler.Marshal(b, m, deterministic) +} +func (dst *ApiConfigHandler) XXX_Merge(src proto.Message) { + xxx_messageInfo_ApiConfigHandler.Merge(dst, src) +} +func (m *ApiConfigHandler) XXX_Size() int { + return xxx_messageInfo_ApiConfigHandler.Size(m) +} +func (m *ApiConfigHandler) XXX_DiscardUnknown() { + xxx_messageInfo_ApiConfigHandler.DiscardUnknown(m) +} + +var xxx_messageInfo_ApiConfigHandler proto.InternalMessageInfo + +func (m *ApiConfigHandler) GetAuthFailAction() AuthFailAction { + if m != nil { + return m.AuthFailAction + } + return AuthFailAction_AUTH_FAIL_ACTION_UNSPECIFIED +} + +func (m *ApiConfigHandler) GetLogin() LoginRequirement { + if m != nil { + return m.Login + } + return LoginRequirement_LOGIN_UNSPECIFIED +} + +func (m *ApiConfigHandler) GetScript() string { + if m != nil { + return m.Script + } + return "" +} + +func (m *ApiConfigHandler) GetSecurityLevel() SecurityLevel { + if m != nil { + return m.SecurityLevel + } + return SecurityLevel_SECURE_UNSPECIFIED +} + +func (m *ApiConfigHandler) GetUrl() string { + if m != nil { + return m.Url + } + return "" +} + +// Custom static error page to be served when an error occurs. +type ErrorHandler struct { + // Error condition this handler applies to. + ErrorCode ErrorHandler_ErrorCode `protobuf:"varint,1,opt,name=error_code,json=errorCode,proto3,enum=google.appengine.v1.ErrorHandler_ErrorCode" json:"error_code,omitempty"` + // Static file content to be served for this error. + StaticFile string `protobuf:"bytes,2,opt,name=static_file,json=staticFile,proto3" json:"static_file,omitempty"` + // MIME type of file. Defaults to `text/html`. + MimeType string `protobuf:"bytes,3,opt,name=mime_type,json=mimeType,proto3" json:"mime_type,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ErrorHandler) Reset() { *m = ErrorHandler{} } +func (m *ErrorHandler) String() string { return proto.CompactTextString(m) } +func (*ErrorHandler) ProtoMessage() {} +func (*ErrorHandler) Descriptor() ([]byte, []int) { + return fileDescriptor_app_yaml_a3518fc0a16627c5, []int{1} +} +func (m *ErrorHandler) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ErrorHandler.Unmarshal(m, b) +} +func (m *ErrorHandler) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ErrorHandler.Marshal(b, m, deterministic) +} +func (dst *ErrorHandler) XXX_Merge(src proto.Message) { + xxx_messageInfo_ErrorHandler.Merge(dst, src) +} +func (m *ErrorHandler) XXX_Size() int { + return xxx_messageInfo_ErrorHandler.Size(m) +} +func (m *ErrorHandler) XXX_DiscardUnknown() { + xxx_messageInfo_ErrorHandler.DiscardUnknown(m) +} + +var xxx_messageInfo_ErrorHandler proto.InternalMessageInfo + +func (m *ErrorHandler) GetErrorCode() ErrorHandler_ErrorCode { + if m != nil { + return m.ErrorCode + } + return ErrorHandler_ERROR_CODE_UNSPECIFIED +} + +func (m *ErrorHandler) GetStaticFile() string { + if m != nil { + return m.StaticFile + } + return "" +} + +func (m *ErrorHandler) GetMimeType() string { + if m != nil { + return m.MimeType + } + return "" +} + +// URL pattern and description of how the URL should be handled. App Engine can +// handle URLs by executing application code or by serving static files +// uploaded with the version, such as images, CSS, or JavaScript. +type UrlMap struct { + // URL prefix. Uses regular expression syntax, which means regexp + // special characters must be escaped, but should not contain groupings. + // All URLs that begin with this prefix are handled by this handler, using the + // portion of the URL after the prefix as part of the file path. + UrlRegex string `protobuf:"bytes,1,opt,name=url_regex,json=urlRegex,proto3" json:"url_regex,omitempty"` + // Type of handler for this URL pattern. + // + // Types that are valid to be assigned to HandlerType: + // *UrlMap_StaticFiles + // *UrlMap_Script + // *UrlMap_ApiEndpoint + HandlerType isUrlMap_HandlerType `protobuf_oneof:"handler_type"` + // Security (HTTPS) enforcement for this URL. + SecurityLevel SecurityLevel `protobuf:"varint,5,opt,name=security_level,json=securityLevel,proto3,enum=google.appengine.v1.SecurityLevel" json:"security_level,omitempty"` + // Level of login required to access this resource. + Login LoginRequirement `protobuf:"varint,6,opt,name=login,proto3,enum=google.appengine.v1.LoginRequirement" json:"login,omitempty"` + // Action to take when users access resources that require + // authentication. Defaults to `redirect`. + AuthFailAction AuthFailAction `protobuf:"varint,7,opt,name=auth_fail_action,json=authFailAction,proto3,enum=google.appengine.v1.AuthFailAction" json:"auth_fail_action,omitempty"` + // `30x` code to use when performing redirects for the `secure` field. + // Defaults to `302`. + RedirectHttpResponseCode UrlMap_RedirectHttpResponseCode `protobuf:"varint,8,opt,name=redirect_http_response_code,json=redirectHttpResponseCode,proto3,enum=google.appengine.v1.UrlMap_RedirectHttpResponseCode" json:"redirect_http_response_code,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *UrlMap) Reset() { *m = UrlMap{} } +func (m *UrlMap) String() string { return proto.CompactTextString(m) } +func (*UrlMap) ProtoMessage() {} +func (*UrlMap) Descriptor() ([]byte, []int) { + return fileDescriptor_app_yaml_a3518fc0a16627c5, []int{2} +} +func (m *UrlMap) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_UrlMap.Unmarshal(m, b) +} +func (m *UrlMap) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_UrlMap.Marshal(b, m, deterministic) +} +func (dst *UrlMap) XXX_Merge(src proto.Message) { + xxx_messageInfo_UrlMap.Merge(dst, src) +} +func (m *UrlMap) XXX_Size() int { + return xxx_messageInfo_UrlMap.Size(m) +} +func (m *UrlMap) XXX_DiscardUnknown() { + xxx_messageInfo_UrlMap.DiscardUnknown(m) +} + +var xxx_messageInfo_UrlMap proto.InternalMessageInfo + +func (m *UrlMap) GetUrlRegex() string { + if m != nil { + return m.UrlRegex + } + return "" +} + +type isUrlMap_HandlerType interface { + isUrlMap_HandlerType() +} + +type UrlMap_StaticFiles struct { + StaticFiles *StaticFilesHandler `protobuf:"bytes,2,opt,name=static_files,json=staticFiles,proto3,oneof"` +} + +type UrlMap_Script struct { + Script *ScriptHandler `protobuf:"bytes,3,opt,name=script,proto3,oneof"` +} + +type UrlMap_ApiEndpoint struct { + ApiEndpoint *ApiEndpointHandler `protobuf:"bytes,4,opt,name=api_endpoint,json=apiEndpoint,proto3,oneof"` +} + +func (*UrlMap_StaticFiles) isUrlMap_HandlerType() {} + +func (*UrlMap_Script) isUrlMap_HandlerType() {} + +func (*UrlMap_ApiEndpoint) isUrlMap_HandlerType() {} + +func (m *UrlMap) GetHandlerType() isUrlMap_HandlerType { + if m != nil { + return m.HandlerType + } + return nil +} + +func (m *UrlMap) GetStaticFiles() *StaticFilesHandler { + if x, ok := m.GetHandlerType().(*UrlMap_StaticFiles); ok { + return x.StaticFiles + } + return nil +} + +func (m *UrlMap) GetScript() *ScriptHandler { + if x, ok := m.GetHandlerType().(*UrlMap_Script); ok { + return x.Script + } + return nil +} + +func (m *UrlMap) GetApiEndpoint() *ApiEndpointHandler { + if x, ok := m.GetHandlerType().(*UrlMap_ApiEndpoint); ok { + return x.ApiEndpoint + } + return nil +} + +func (m *UrlMap) GetSecurityLevel() SecurityLevel { + if m != nil { + return m.SecurityLevel + } + return SecurityLevel_SECURE_UNSPECIFIED +} + +func (m *UrlMap) GetLogin() LoginRequirement { + if m != nil { + return m.Login + } + return LoginRequirement_LOGIN_UNSPECIFIED +} + +func (m *UrlMap) GetAuthFailAction() AuthFailAction { + if m != nil { + return m.AuthFailAction + } + return AuthFailAction_AUTH_FAIL_ACTION_UNSPECIFIED +} + +func (m *UrlMap) GetRedirectHttpResponseCode() UrlMap_RedirectHttpResponseCode { + if m != nil { + return m.RedirectHttpResponseCode + } + return UrlMap_REDIRECT_HTTP_RESPONSE_CODE_UNSPECIFIED +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*UrlMap) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _UrlMap_OneofMarshaler, _UrlMap_OneofUnmarshaler, _UrlMap_OneofSizer, []interface{}{ + (*UrlMap_StaticFiles)(nil), + (*UrlMap_Script)(nil), + (*UrlMap_ApiEndpoint)(nil), + } +} + +func _UrlMap_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*UrlMap) + // handler_type + switch x := m.HandlerType.(type) { + case *UrlMap_StaticFiles: + b.EncodeVarint(2<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.StaticFiles); err != nil { + return err + } + case *UrlMap_Script: + b.EncodeVarint(3<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.Script); err != nil { + return err + } + case *UrlMap_ApiEndpoint: + b.EncodeVarint(4<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.ApiEndpoint); err != nil { + return err + } + case nil: + default: + return fmt.Errorf("UrlMap.HandlerType has unexpected type %T", x) + } + return nil +} + +func _UrlMap_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*UrlMap) + switch tag { + case 2: // handler_type.static_files + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(StaticFilesHandler) + err := b.DecodeMessage(msg) + m.HandlerType = &UrlMap_StaticFiles{msg} + return true, err + case 3: // handler_type.script + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(ScriptHandler) + err := b.DecodeMessage(msg) + m.HandlerType = &UrlMap_Script{msg} + return true, err + case 4: // handler_type.api_endpoint + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(ApiEndpointHandler) + err := b.DecodeMessage(msg) + m.HandlerType = &UrlMap_ApiEndpoint{msg} + return true, err + default: + return false, nil + } +} + +func _UrlMap_OneofSizer(msg proto.Message) (n int) { + m := msg.(*UrlMap) + // handler_type + switch x := m.HandlerType.(type) { + case *UrlMap_StaticFiles: + s := proto.Size(x.StaticFiles) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *UrlMap_Script: + s := proto.Size(x.Script) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *UrlMap_ApiEndpoint: + s := proto.Size(x.ApiEndpoint) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +// Files served directly to the user for a given URL, such as images, CSS +// stylesheets, or JavaScript source files. Static file handlers describe which +// files in the application directory are static files, and which URLs serve +// them. +type StaticFilesHandler struct { + // Path to the static files matched by the URL pattern, from the + // application root directory. The path can refer to text matched in groupings + // in the URL pattern. + Path string `protobuf:"bytes,1,opt,name=path,proto3" json:"path,omitempty"` + // Regular expression that matches the file paths for all files that should be + // referenced by this handler. + UploadPathRegex string `protobuf:"bytes,2,opt,name=upload_path_regex,json=uploadPathRegex,proto3" json:"upload_path_regex,omitempty"` + // HTTP headers to use for all responses from these URLs. + HttpHeaders map[string]string `protobuf:"bytes,3,rep,name=http_headers,json=httpHeaders,proto3" json:"http_headers,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` + // MIME type used to serve all files served by this handler. + // + // Defaults to file-specific MIME types, which are derived from each file's + // filename extension. + MimeType string `protobuf:"bytes,4,opt,name=mime_type,json=mimeType,proto3" json:"mime_type,omitempty"` + // Time a static file served by this handler should be cached + // by web proxies and browsers. + Expiration *duration.Duration `protobuf:"bytes,5,opt,name=expiration,proto3" json:"expiration,omitempty"` + // Whether this handler should match the request if the file + // referenced by the handler does not exist. + RequireMatchingFile bool `protobuf:"varint,6,opt,name=require_matching_file,json=requireMatchingFile,proto3" json:"require_matching_file,omitempty"` + // Whether files should also be uploaded as code data. By default, files + // declared in static file handlers are uploaded as static + // data and are only served to end users; they cannot be read by the + // application. If enabled, uploads are charged against both your code and + // static data storage resource quotas. + ApplicationReadable bool `protobuf:"varint,7,opt,name=application_readable,json=applicationReadable,proto3" json:"application_readable,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *StaticFilesHandler) Reset() { *m = StaticFilesHandler{} } +func (m *StaticFilesHandler) String() string { return proto.CompactTextString(m) } +func (*StaticFilesHandler) ProtoMessage() {} +func (*StaticFilesHandler) Descriptor() ([]byte, []int) { + return fileDescriptor_app_yaml_a3518fc0a16627c5, []int{3} +} +func (m *StaticFilesHandler) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_StaticFilesHandler.Unmarshal(m, b) +} +func (m *StaticFilesHandler) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_StaticFilesHandler.Marshal(b, m, deterministic) +} +func (dst *StaticFilesHandler) XXX_Merge(src proto.Message) { + xxx_messageInfo_StaticFilesHandler.Merge(dst, src) +} +func (m *StaticFilesHandler) XXX_Size() int { + return xxx_messageInfo_StaticFilesHandler.Size(m) +} +func (m *StaticFilesHandler) XXX_DiscardUnknown() { + xxx_messageInfo_StaticFilesHandler.DiscardUnknown(m) +} + +var xxx_messageInfo_StaticFilesHandler proto.InternalMessageInfo + +func (m *StaticFilesHandler) GetPath() string { + if m != nil { + return m.Path + } + return "" +} + +func (m *StaticFilesHandler) GetUploadPathRegex() string { + if m != nil { + return m.UploadPathRegex + } + return "" +} + +func (m *StaticFilesHandler) GetHttpHeaders() map[string]string { + if m != nil { + return m.HttpHeaders + } + return nil +} + +func (m *StaticFilesHandler) GetMimeType() string { + if m != nil { + return m.MimeType + } + return "" +} + +func (m *StaticFilesHandler) GetExpiration() *duration.Duration { + if m != nil { + return m.Expiration + } + return nil +} + +func (m *StaticFilesHandler) GetRequireMatchingFile() bool { + if m != nil { + return m.RequireMatchingFile + } + return false +} + +func (m *StaticFilesHandler) GetApplicationReadable() bool { + if m != nil { + return m.ApplicationReadable + } + return false +} + +// Executes a script to handle the request that matches the URL pattern. +type ScriptHandler struct { + // Path to the script from the application root directory. + ScriptPath string `protobuf:"bytes,1,opt,name=script_path,json=scriptPath,proto3" json:"script_path,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ScriptHandler) Reset() { *m = ScriptHandler{} } +func (m *ScriptHandler) String() string { return proto.CompactTextString(m) } +func (*ScriptHandler) ProtoMessage() {} +func (*ScriptHandler) Descriptor() ([]byte, []int) { + return fileDescriptor_app_yaml_a3518fc0a16627c5, []int{4} +} +func (m *ScriptHandler) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ScriptHandler.Unmarshal(m, b) +} +func (m *ScriptHandler) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ScriptHandler.Marshal(b, m, deterministic) +} +func (dst *ScriptHandler) XXX_Merge(src proto.Message) { + xxx_messageInfo_ScriptHandler.Merge(dst, src) +} +func (m *ScriptHandler) XXX_Size() int { + return xxx_messageInfo_ScriptHandler.Size(m) +} +func (m *ScriptHandler) XXX_DiscardUnknown() { + xxx_messageInfo_ScriptHandler.DiscardUnknown(m) +} + +var xxx_messageInfo_ScriptHandler proto.InternalMessageInfo + +func (m *ScriptHandler) GetScriptPath() string { + if m != nil { + return m.ScriptPath + } + return "" +} + +// Uses Google Cloud Endpoints to handle requests. +type ApiEndpointHandler struct { + // Path to the script from the application root directory. + ScriptPath string `protobuf:"bytes,1,opt,name=script_path,json=scriptPath,proto3" json:"script_path,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ApiEndpointHandler) Reset() { *m = ApiEndpointHandler{} } +func (m *ApiEndpointHandler) String() string { return proto.CompactTextString(m) } +func (*ApiEndpointHandler) ProtoMessage() {} +func (*ApiEndpointHandler) Descriptor() ([]byte, []int) { + return fileDescriptor_app_yaml_a3518fc0a16627c5, []int{5} +} +func (m *ApiEndpointHandler) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ApiEndpointHandler.Unmarshal(m, b) +} +func (m *ApiEndpointHandler) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ApiEndpointHandler.Marshal(b, m, deterministic) +} +func (dst *ApiEndpointHandler) XXX_Merge(src proto.Message) { + xxx_messageInfo_ApiEndpointHandler.Merge(dst, src) +} +func (m *ApiEndpointHandler) XXX_Size() int { + return xxx_messageInfo_ApiEndpointHandler.Size(m) +} +func (m *ApiEndpointHandler) XXX_DiscardUnknown() { + xxx_messageInfo_ApiEndpointHandler.DiscardUnknown(m) +} + +var xxx_messageInfo_ApiEndpointHandler proto.InternalMessageInfo + +func (m *ApiEndpointHandler) GetScriptPath() string { + if m != nil { + return m.ScriptPath + } + return "" +} + +// Health checking configuration for VM instances. Unhealthy instances +// are killed and replaced with new instances. Only applicable for +// instances in App Engine flexible environment. +type HealthCheck struct { + // Whether to explicitly disable health checks for this instance. + DisableHealthCheck bool `protobuf:"varint,1,opt,name=disable_health_check,json=disableHealthCheck,proto3" json:"disable_health_check,omitempty"` + // Host header to send when performing an HTTP health check. + // Example: "myapp.appspot.com" + Host string `protobuf:"bytes,2,opt,name=host,proto3" json:"host,omitempty"` + // Number of consecutive successful health checks required before receiving + // traffic. + HealthyThreshold uint32 `protobuf:"varint,3,opt,name=healthy_threshold,json=healthyThreshold,proto3" json:"healthy_threshold,omitempty"` + // Number of consecutive failed health checks required before removing + // traffic. + UnhealthyThreshold uint32 `protobuf:"varint,4,opt,name=unhealthy_threshold,json=unhealthyThreshold,proto3" json:"unhealthy_threshold,omitempty"` + // Number of consecutive failed health checks required before an instance is + // restarted. + RestartThreshold uint32 `protobuf:"varint,5,opt,name=restart_threshold,json=restartThreshold,proto3" json:"restart_threshold,omitempty"` + // Interval between health checks. + CheckInterval *duration.Duration `protobuf:"bytes,6,opt,name=check_interval,json=checkInterval,proto3" json:"check_interval,omitempty"` + // Time before the health check is considered failed. + Timeout *duration.Duration `protobuf:"bytes,7,opt,name=timeout,proto3" json:"timeout,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *HealthCheck) Reset() { *m = HealthCheck{} } +func (m *HealthCheck) String() string { return proto.CompactTextString(m) } +func (*HealthCheck) ProtoMessage() {} +func (*HealthCheck) Descriptor() ([]byte, []int) { + return fileDescriptor_app_yaml_a3518fc0a16627c5, []int{6} +} +func (m *HealthCheck) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_HealthCheck.Unmarshal(m, b) +} +func (m *HealthCheck) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_HealthCheck.Marshal(b, m, deterministic) +} +func (dst *HealthCheck) XXX_Merge(src proto.Message) { + xxx_messageInfo_HealthCheck.Merge(dst, src) +} +func (m *HealthCheck) XXX_Size() int { + return xxx_messageInfo_HealthCheck.Size(m) +} +func (m *HealthCheck) XXX_DiscardUnknown() { + xxx_messageInfo_HealthCheck.DiscardUnknown(m) +} + +var xxx_messageInfo_HealthCheck proto.InternalMessageInfo + +func (m *HealthCheck) GetDisableHealthCheck() bool { + if m != nil { + return m.DisableHealthCheck + } + return false +} + +func (m *HealthCheck) GetHost() string { + if m != nil { + return m.Host + } + return "" +} + +func (m *HealthCheck) GetHealthyThreshold() uint32 { + if m != nil { + return m.HealthyThreshold + } + return 0 +} + +func (m *HealthCheck) GetUnhealthyThreshold() uint32 { + if m != nil { + return m.UnhealthyThreshold + } + return 0 +} + +func (m *HealthCheck) GetRestartThreshold() uint32 { + if m != nil { + return m.RestartThreshold + } + return 0 +} + +func (m *HealthCheck) GetCheckInterval() *duration.Duration { + if m != nil { + return m.CheckInterval + } + return nil +} + +func (m *HealthCheck) GetTimeout() *duration.Duration { + if m != nil { + return m.Timeout + } + return nil +} + +// Third-party Python runtime library that is required by the application. +type Library struct { + // Name of the library. Example: "django". + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // Version of the library to select, or "latest". + Version string `protobuf:"bytes,2,opt,name=version,proto3" json:"version,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Library) Reset() { *m = Library{} } +func (m *Library) String() string { return proto.CompactTextString(m) } +func (*Library) ProtoMessage() {} +func (*Library) Descriptor() ([]byte, []int) { + return fileDescriptor_app_yaml_a3518fc0a16627c5, []int{7} +} +func (m *Library) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Library.Unmarshal(m, b) +} +func (m *Library) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Library.Marshal(b, m, deterministic) +} +func (dst *Library) XXX_Merge(src proto.Message) { + xxx_messageInfo_Library.Merge(dst, src) +} +func (m *Library) XXX_Size() int { + return xxx_messageInfo_Library.Size(m) +} +func (m *Library) XXX_DiscardUnknown() { + xxx_messageInfo_Library.DiscardUnknown(m) +} + +var xxx_messageInfo_Library proto.InternalMessageInfo + +func (m *Library) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *Library) GetVersion() string { + if m != nil { + return m.Version + } + return "" +} + +func init() { + proto.RegisterType((*ApiConfigHandler)(nil), "google.appengine.v1.ApiConfigHandler") + proto.RegisterType((*ErrorHandler)(nil), "google.appengine.v1.ErrorHandler") + proto.RegisterType((*UrlMap)(nil), "google.appengine.v1.UrlMap") + proto.RegisterType((*StaticFilesHandler)(nil), "google.appengine.v1.StaticFilesHandler") + proto.RegisterMapType((map[string]string)(nil), "google.appengine.v1.StaticFilesHandler.HttpHeadersEntry") + proto.RegisterType((*ScriptHandler)(nil), "google.appengine.v1.ScriptHandler") + proto.RegisterType((*ApiEndpointHandler)(nil), "google.appengine.v1.ApiEndpointHandler") + proto.RegisterType((*HealthCheck)(nil), "google.appengine.v1.HealthCheck") + proto.RegisterType((*Library)(nil), "google.appengine.v1.Library") + proto.RegisterEnum("google.appengine.v1.AuthFailAction", AuthFailAction_name, AuthFailAction_value) + proto.RegisterEnum("google.appengine.v1.LoginRequirement", LoginRequirement_name, LoginRequirement_value) + proto.RegisterEnum("google.appengine.v1.SecurityLevel", SecurityLevel_name, SecurityLevel_value) + proto.RegisterEnum("google.appengine.v1.ErrorHandler_ErrorCode", ErrorHandler_ErrorCode_name, ErrorHandler_ErrorCode_value) + proto.RegisterEnum("google.appengine.v1.UrlMap_RedirectHttpResponseCode", UrlMap_RedirectHttpResponseCode_name, UrlMap_RedirectHttpResponseCode_value) +} + +func init() { + proto.RegisterFile("google/appengine/v1/app_yaml.proto", fileDescriptor_app_yaml_a3518fc0a16627c5) +} + +var fileDescriptor_app_yaml_a3518fc0a16627c5 = []byte{ + // 1232 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x9c, 0x56, 0xdd, 0x6e, 0x13, 0x47, + 0x14, 0xc6, 0x76, 0x7e, 0x8f, 0x1d, 0xb3, 0x99, 0x00, 0x75, 0x02, 0x94, 0xd4, 0xa8, 0x02, 0x25, + 0x92, 0x4d, 0x92, 0x56, 0xd0, 0x82, 0xaa, 0x2e, 0xf6, 0xa6, 0xde, 0xca, 0x89, 0xcd, 0xd8, 0xa6, + 0x82, 0x5e, 0x8c, 0x26, 0xf6, 0xc4, 0x3b, 0x62, 0xbd, 0xbb, 0x9d, 0x1d, 0x47, 0xf8, 0x39, 0xaa, + 0xbe, 0x07, 0xb7, 0x7d, 0x90, 0x5e, 0xf5, 0x65, 0xaa, 0x99, 0x1d, 0xff, 0x25, 0x0e, 0xa9, 0xb8, + 0x9b, 0x73, 0xce, 0xf7, 0x9d, 0x9d, 0xf3, 0x3b, 0x0b, 0xc5, 0x7e, 0x18, 0xf6, 0x7d, 0x56, 0xa6, + 0x51, 0xc4, 0x82, 0x3e, 0x0f, 0x58, 0xf9, 0xe2, 0x40, 0x09, 0x64, 0x44, 0x07, 0x7e, 0x29, 0x12, + 0xa1, 0x0c, 0xd1, 0x56, 0x82, 0x29, 0x4d, 0x30, 0xa5, 0x8b, 0x83, 0x9d, 0x07, 0x13, 0x22, 0x2f, + 0xd3, 0x20, 0x08, 0x25, 0x95, 0x3c, 0x0c, 0xe2, 0x84, 0xb2, 0xf3, 0xb5, 0xb1, 0x6a, 0xe9, 0x6c, + 0x78, 0x5e, 0xee, 0x0d, 0x85, 0x06, 0x24, 0xf6, 0xe2, 0x9f, 0x69, 0xb0, 0xec, 0x88, 0x57, 0xc2, + 0xe0, 0x9c, 0xf7, 0x6b, 0x34, 0xe8, 0xf9, 0x4c, 0xa0, 0x13, 0xb0, 0xe8, 0x50, 0x7a, 0xe4, 0x9c, + 0x72, 0x9f, 0xd0, 0xae, 0x82, 0x17, 0x52, 0xbb, 0xa9, 0xa7, 0xf9, 0xc3, 0xc7, 0xa5, 0x05, 0x57, + 0x28, 0xd9, 0x43, 0xe9, 0x1d, 0x53, 0xee, 0xdb, 0x1a, 0x8a, 0xf3, 0x74, 0x4e, 0x46, 0x2f, 0x61, + 0xd9, 0x0f, 0xfb, 0x3c, 0x28, 0xa4, 0xb5, 0x8f, 0x6f, 0x17, 0xfa, 0xa8, 0x2b, 0x04, 0x66, 0x7f, + 0x0c, 0xb9, 0x60, 0x03, 0x16, 0x48, 0x9c, 0x70, 0xd0, 0x3d, 0x58, 0x89, 0xbb, 0x82, 0x47, 0xb2, + 0x90, 0xd9, 0x4d, 0x3d, 0x5d, 0xc7, 0x46, 0x42, 0x2e, 0xe4, 0x63, 0xd6, 0x1d, 0x0a, 0x2e, 0x47, + 0xc4, 0x67, 0x17, 0xcc, 0x2f, 0x2c, 0x69, 0xef, 0xc5, 0x85, 0xde, 0x5b, 0x06, 0x5a, 0x57, 0x48, + 0xbc, 0x11, 0xcf, 0x8a, 0xc8, 0x82, 0xcc, 0x50, 0xf8, 0x85, 0x65, 0xed, 0x5f, 0x1d, 0x8b, 0x9f, + 0xd2, 0x90, 0x73, 0x84, 0x08, 0xc5, 0x38, 0x23, 0xbf, 0x02, 0x30, 0x25, 0x93, 0x6e, 0xd8, 0x63, + 0x26, 0x17, 0xfb, 0x0b, 0xbf, 0x34, 0x4b, 0x4b, 0x84, 0x4a, 0xd8, 0x63, 0x78, 0x9d, 0x8d, 0x8f, + 0xe8, 0x11, 0x64, 0x63, 0x55, 0xa4, 0x2e, 0x39, 0xe7, 0x3e, 0xd3, 0x49, 0x59, 0xc7, 0x90, 0xa8, + 0x8e, 0xb9, 0xcf, 0xd0, 0x7d, 0x58, 0x1f, 0xf0, 0x01, 0x23, 0x72, 0x14, 0x31, 0x13, 0xf5, 0x9a, + 0x52, 0xb4, 0x47, 0x11, 0x2b, 0xfe, 0x95, 0x82, 0xf5, 0x89, 0x5b, 0xb4, 0x03, 0xf7, 0x1c, 0x8c, + 0x1b, 0x98, 0x54, 0x1a, 0x55, 0x87, 0x74, 0x4e, 0x5b, 0x4d, 0xa7, 0xe2, 0x1e, 0xbb, 0x4e, 0xd5, + 0xba, 0x85, 0xee, 0x01, 0x9a, 0xb1, 0x55, 0x9d, 0x63, 0xbb, 0x53, 0x6f, 0x5b, 0xb7, 0xd0, 0x36, + 0xdc, 0x9d, 0xd1, 0x37, 0xde, 0x3a, 0x98, 0xbc, 0xe9, 0x34, 0xda, 0xb6, 0x95, 0x42, 0x0f, 0x61, + 0x7b, 0x96, 0xd2, 0x68, 0x11, 0xbb, 0xe9, 0x92, 0xaa, 0x73, 0xea, 0xda, 0x75, 0x2b, 0x7d, 0xc9, + 0x63, 0xdb, 0x3d, 0x71, 0x1a, 0x9d, 0xb6, 0x95, 0xd9, 0x49, 0x5b, 0xa9, 0xe2, 0xdf, 0x2b, 0xb0, + 0xd2, 0x11, 0xfe, 0x09, 0x8d, 0xd4, 0xfd, 0x87, 0xc2, 0x27, 0x82, 0xf5, 0xd9, 0x47, 0x9d, 0xab, + 0x75, 0xbc, 0x36, 0x14, 0x3e, 0x56, 0x32, 0xaa, 0x43, 0x6e, 0x26, 0xfa, 0x58, 0x87, 0x9f, 0x3d, + 0x7c, 0xb2, 0xb8, 0x6a, 0x93, 0x9c, 0xc4, 0x26, 0xa3, 0xb5, 0x5b, 0x38, 0x3b, 0xcd, 0x54, 0x8c, + 0x5e, 0xcd, 0x75, 0x47, 0xf6, 0xba, 0xea, 0x6b, 0xc8, 0xd4, 0xc5, 0xb8, 0x87, 0xea, 0x90, 0xa3, + 0x11, 0x27, 0x2c, 0xe8, 0x45, 0x21, 0x0f, 0xa4, 0xee, 0xa0, 0xeb, 0xee, 0x62, 0x47, 0xdc, 0x31, + 0xb8, 0x99, 0xbb, 0xd0, 0xa9, 0x76, 0x41, 0x47, 0x2e, 0x7f, 0x69, 0x47, 0x4e, 0x26, 0x66, 0xe5, + 0x0b, 0x26, 0x66, 0xd1, 0xf4, 0xae, 0x7e, 0xf9, 0xf4, 0xc6, 0x70, 0x5f, 0xb0, 0x1e, 0x17, 0xac, + 0x2b, 0x89, 0x27, 0x65, 0x44, 0x04, 0x8b, 0xa3, 0x30, 0x88, 0x59, 0x32, 0x0b, 0x6b, 0xda, 0xf3, + 0x77, 0x0b, 0x3d, 0x27, 0xfd, 0x50, 0xc2, 0x86, 0x5e, 0x93, 0x32, 0xc2, 0x86, 0xac, 0x87, 0xa2, + 0x20, 0xae, 0xb1, 0x14, 0xff, 0x4d, 0x41, 0xe1, 0x3a, 0x1a, 0xda, 0x87, 0x27, 0xd8, 0xa9, 0xba, + 0xd8, 0xa9, 0xb4, 0x49, 0xad, 0xdd, 0x6e, 0x12, 0xec, 0xb4, 0x9a, 0x8d, 0xd3, 0x96, 0xb3, 0x68, + 0x0a, 0x1e, 0xc3, 0xa3, 0xcf, 0x81, 0x8f, 0x9e, 0x1d, 0x58, 0xa9, 0x9b, 0x41, 0x87, 0x56, 0xfa, + 0x66, 0xd0, 0x91, 0x95, 0xb9, 0x19, 0xf4, 0xdc, 0x5a, 0x7a, 0x9d, 0x87, 0x9c, 0x97, 0xf4, 0x90, + 0x9e, 0xf1, 0xe2, 0xa7, 0x0c, 0xa0, 0xab, 0xbd, 0x8e, 0x10, 0x2c, 0x45, 0x54, 0x7a, 0x66, 0x84, + 0xf4, 0x19, 0xed, 0xc1, 0xe6, 0x30, 0xf2, 0x43, 0xda, 0x23, 0x4a, 0x34, 0x33, 0x96, 0xac, 0x90, + 0xdb, 0x89, 0xa1, 0x49, 0xa5, 0x97, 0x8c, 0xda, 0xef, 0x90, 0xd3, 0x05, 0xf3, 0x18, 0xed, 0x31, + 0x11, 0x17, 0x32, 0xbb, 0x99, 0xa7, 0xd9, 0xc3, 0x17, 0xff, 0x73, 0xd4, 0x4a, 0x2a, 0xef, 0xb5, + 0x84, 0xea, 0x04, 0x52, 0x8c, 0x70, 0xd6, 0x9b, 0x6a, 0xe6, 0x97, 0xd4, 0xd2, 0xfc, 0x92, 0x42, + 0x3f, 0x00, 0xb0, 0x8f, 0x11, 0x4f, 0x5e, 0x1a, 0x3d, 0x06, 0xd9, 0xc3, 0xed, 0xf1, 0x77, 0xc7, + 0x4f, 0x51, 0xa9, 0x6a, 0x9e, 0x22, 0x3c, 0x03, 0x46, 0x87, 0x70, 0x57, 0x24, 0x3d, 0x4d, 0x06, + 0x54, 0x76, 0x3d, 0x1e, 0xf4, 0x93, 0x3d, 0xa9, 0x46, 0x61, 0x0d, 0x6f, 0x19, 0xe3, 0x89, 0xb1, + 0xe9, 0x85, 0x79, 0x00, 0x77, 0x68, 0x14, 0xf9, 0xbc, 0xab, 0x5d, 0x10, 0xc1, 0x68, 0x8f, 0x9e, + 0xf9, 0x4c, 0x77, 0xfd, 0x1a, 0xde, 0x9a, 0xb1, 0x61, 0x63, 0xda, 0xf9, 0x09, 0xac, 0xcb, 0xf1, + 0xa9, 0x77, 0xe0, 0x03, 0x1b, 0x99, 0x74, 0xab, 0x23, 0xba, 0x03, 0xcb, 0x17, 0xd4, 0x1f, 0x8e, + 0x97, 0x74, 0x22, 0xfc, 0x98, 0x7e, 0x91, 0x2a, 0x3e, 0x83, 0x8d, 0xb9, 0xad, 0xa2, 0xb7, 0xba, + 0x56, 0x90, 0x99, 0x9a, 0x41, 0xa2, 0x52, 0x25, 0x29, 0x7e, 0x0f, 0xe8, 0xea, 0x0e, 0xb9, 0x99, + 0xf6, 0x4f, 0x1a, 0xb2, 0x35, 0x46, 0x7d, 0xe9, 0x55, 0x3c, 0xd6, 0xfd, 0x80, 0x9e, 0xc1, 0x9d, + 0x1e, 0x8f, 0x55, 0x0c, 0xaa, 0xae, 0xbe, 0xf4, 0x48, 0x57, 0xe9, 0x35, 0x73, 0x0d, 0x23, 0x63, + 0x9b, 0x65, 0x20, 0x58, 0xf2, 0xc2, 0x58, 0x9a, 0x18, 0xf4, 0x19, 0xed, 0xc3, 0x66, 0xc2, 0x1e, + 0x11, 0xe9, 0x09, 0x16, 0x7b, 0xa1, 0xdf, 0xd3, 0x2b, 0x74, 0x03, 0x5b, 0xc6, 0xd0, 0x1e, 0xeb, + 0x51, 0x19, 0xb6, 0x86, 0xc1, 0x55, 0xf8, 0x92, 0x86, 0xa3, 0x89, 0x69, 0x4a, 0xd8, 0x87, 0x4d, + 0xc1, 0x62, 0x49, 0x85, 0x9c, 0x81, 0x2f, 0x27, 0xde, 0x8d, 0x61, 0x0a, 0xfe, 0x19, 0xf2, 0x3a, + 0x02, 0xc2, 0x03, 0xc9, 0xc4, 0x05, 0xf5, 0x75, 0xa5, 0x3f, 0xdb, 0x2f, 0x1b, 0x9a, 0xe0, 0x1a, + 0x3c, 0x3a, 0x82, 0x55, 0xc9, 0x07, 0x2c, 0x1c, 0x4a, 0x5d, 0xf1, 0xcf, 0x52, 0xc7, 0xc8, 0xe2, + 0x73, 0x58, 0xad, 0xf3, 0x33, 0x41, 0xc5, 0x48, 0x25, 0x28, 0xa0, 0x03, 0x36, 0x9e, 0x33, 0x75, + 0x46, 0x05, 0x58, 0xbd, 0x60, 0x22, 0x56, 0xed, 0x9b, 0xe4, 0x6d, 0x2c, 0xee, 0x49, 0xc8, 0xcf, + 0x6f, 0x4c, 0xb4, 0x0b, 0x0f, 0xec, 0x4e, 0xbb, 0x46, 0x8e, 0x6d, 0xb7, 0x4e, 0xec, 0x4a, 0xdb, + 0x6d, 0x9c, 0x5e, 0x5a, 0x42, 0x0f, 0x61, 0xfb, 0x0a, 0x62, 0xbc, 0x26, 0xac, 0x14, 0xfa, 0x06, + 0x1e, 0x2e, 0x70, 0xa0, 0x54, 0x0d, 0xec, 0xbe, 0x77, 0xaa, 0x56, 0x7a, 0xef, 0x0c, 0xac, 0xcb, + 0xfb, 0x1e, 0xdd, 0x85, 0xcd, 0x7a, 0xe3, 0x17, 0xf7, 0xf2, 0xc7, 0x10, 0xe4, 0x13, 0x75, 0xa3, + 0xa9, 0x3c, 0xd9, 0x75, 0x2b, 0x85, 0x6e, 0x43, 0x36, 0xd1, 0xd9, 0xd5, 0x13, 0xf7, 0xd4, 0x4a, + 0x4f, 0x41, 0xd8, 0x79, 0xd3, 0x71, 0xb1, 0x53, 0xb5, 0x32, 0x7b, 0x23, 0xd8, 0x98, 0x7b, 0x95, + 0xd4, 0x7b, 0xdf, 0x72, 0x2a, 0x1d, 0xec, 0x5c, 0xfd, 0x82, 0xd1, 0x4f, 0xff, 0x2a, 0x2c, 0xc8, + 0x19, 0xdd, 0xa9, 0xf3, 0xd6, 0xc1, 0x56, 0x0a, 0x6d, 0xc1, 0x6d, 0xa3, 0x99, 0x5c, 0x24, 0x8d, + 0x36, 0x61, 0xc3, 0x28, 0xed, 0xfa, 0x6f, 0xf6, 0xbb, 0x56, 0xf2, 0xf7, 0xf0, 0xba, 0x0f, 0x5f, + 0x75, 0xc3, 0xc1, 0xa2, 0xcd, 0xf4, 0x3a, 0x67, 0x47, 0xd1, 0x3b, 0x3a, 0xf0, 0x9b, 0xaa, 0x96, + 0xcd, 0xd4, 0xfb, 0x57, 0x06, 0xd4, 0x0f, 0x7d, 0x1a, 0xf4, 0x4b, 0xa1, 0xe8, 0x97, 0xfb, 0x2c, + 0xd0, 0x95, 0x2e, 0x27, 0x26, 0x1a, 0xf1, 0x78, 0xee, 0x3f, 0xfa, 0xe5, 0x44, 0x38, 0x5b, 0xd1, + 0xc0, 0xa3, 0xff, 0x02, 0x00, 0x00, 0xff, 0xff, 0x96, 0x5d, 0x26, 0x6c, 0x6f, 0x0b, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/appengine/v1/appengine.pb.go b/vendor/google.golang.org/genproto/googleapis/appengine/v1/appengine.pb.go new file mode 100644 index 0000000..d5fb026 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/appengine/v1/appengine.pb.go @@ -0,0 +1,1767 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/appengine/v1/appengine.proto + +package appengine // import "google.golang.org/genproto/googleapis/appengine/v1" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "github.com/golang/protobuf/ptypes/empty" +import _ "google.golang.org/genproto/googleapis/api/annotations" +import _ "google.golang.org/genproto/googleapis/iam/v1" +import longrunning "google.golang.org/genproto/googleapis/longrunning" +import field_mask "google.golang.org/genproto/protobuf/field_mask" + +import ( + context "golang.org/x/net/context" + grpc "google.golang.org/grpc" +) + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Fields that should be returned when [Version][google.appengine.v1.Version] +// resources are retreived. +type VersionView int32 + +const ( + // Basic version information including scaling and inbound services, + // but not detailed deployment information. + VersionView_BASIC VersionView = 0 + // The information from `BASIC`, plus detailed information about the + // deployment. This format is required when creating resources, but + // is not returned in `Get` or `List` by default. + VersionView_FULL VersionView = 1 +) + +var VersionView_name = map[int32]string{ + 0: "BASIC", + 1: "FULL", +} +var VersionView_value = map[string]int32{ + "BASIC": 0, + "FULL": 1, +} + +func (x VersionView) String() string { + return proto.EnumName(VersionView_name, int32(x)) +} +func (VersionView) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_appengine_4ea48fc042b3846d, []int{0} +} + +// Request message for `Applications.GetApplication`. +type GetApplicationRequest struct { + // Name of the Application resource to get. Example: `apps/myapp`. + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GetApplicationRequest) Reset() { *m = GetApplicationRequest{} } +func (m *GetApplicationRequest) String() string { return proto.CompactTextString(m) } +func (*GetApplicationRequest) ProtoMessage() {} +func (*GetApplicationRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_appengine_4ea48fc042b3846d, []int{0} +} +func (m *GetApplicationRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GetApplicationRequest.Unmarshal(m, b) +} +func (m *GetApplicationRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GetApplicationRequest.Marshal(b, m, deterministic) +} +func (dst *GetApplicationRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_GetApplicationRequest.Merge(dst, src) +} +func (m *GetApplicationRequest) XXX_Size() int { + return xxx_messageInfo_GetApplicationRequest.Size(m) +} +func (m *GetApplicationRequest) XXX_DiscardUnknown() { + xxx_messageInfo_GetApplicationRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_GetApplicationRequest proto.InternalMessageInfo + +func (m *GetApplicationRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +// Request message for 'Applications.RepairApplication'. +type RepairApplicationRequest struct { + // Name of the application to repair. Example: `apps/myapp` + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *RepairApplicationRequest) Reset() { *m = RepairApplicationRequest{} } +func (m *RepairApplicationRequest) String() string { return proto.CompactTextString(m) } +func (*RepairApplicationRequest) ProtoMessage() {} +func (*RepairApplicationRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_appengine_4ea48fc042b3846d, []int{1} +} +func (m *RepairApplicationRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_RepairApplicationRequest.Unmarshal(m, b) +} +func (m *RepairApplicationRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_RepairApplicationRequest.Marshal(b, m, deterministic) +} +func (dst *RepairApplicationRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_RepairApplicationRequest.Merge(dst, src) +} +func (m *RepairApplicationRequest) XXX_Size() int { + return xxx_messageInfo_RepairApplicationRequest.Size(m) +} +func (m *RepairApplicationRequest) XXX_DiscardUnknown() { + xxx_messageInfo_RepairApplicationRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_RepairApplicationRequest proto.InternalMessageInfo + +func (m *RepairApplicationRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +// Request message for `Services.ListServices`. +type ListServicesRequest struct { + // Name of the parent Application resource. Example: `apps/myapp`. + Parent string `protobuf:"bytes,1,opt,name=parent,proto3" json:"parent,omitempty"` + // Maximum results to return per page. + PageSize int32 `protobuf:"varint,2,opt,name=page_size,json=pageSize,proto3" json:"page_size,omitempty"` + // Continuation token for fetching the next page of results. + PageToken string `protobuf:"bytes,3,opt,name=page_token,json=pageToken,proto3" json:"page_token,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ListServicesRequest) Reset() { *m = ListServicesRequest{} } +func (m *ListServicesRequest) String() string { return proto.CompactTextString(m) } +func (*ListServicesRequest) ProtoMessage() {} +func (*ListServicesRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_appengine_4ea48fc042b3846d, []int{2} +} +func (m *ListServicesRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ListServicesRequest.Unmarshal(m, b) +} +func (m *ListServicesRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ListServicesRequest.Marshal(b, m, deterministic) +} +func (dst *ListServicesRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_ListServicesRequest.Merge(dst, src) +} +func (m *ListServicesRequest) XXX_Size() int { + return xxx_messageInfo_ListServicesRequest.Size(m) +} +func (m *ListServicesRequest) XXX_DiscardUnknown() { + xxx_messageInfo_ListServicesRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_ListServicesRequest proto.InternalMessageInfo + +func (m *ListServicesRequest) GetParent() string { + if m != nil { + return m.Parent + } + return "" +} + +func (m *ListServicesRequest) GetPageSize() int32 { + if m != nil { + return m.PageSize + } + return 0 +} + +func (m *ListServicesRequest) GetPageToken() string { + if m != nil { + return m.PageToken + } + return "" +} + +// Response message for `Services.ListServices`. +type ListServicesResponse struct { + // The services belonging to the requested application. + Services []*Service `protobuf:"bytes,1,rep,name=services,proto3" json:"services,omitempty"` + // Continuation token for fetching the next page of results. + NextPageToken string `protobuf:"bytes,2,opt,name=next_page_token,json=nextPageToken,proto3" json:"next_page_token,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ListServicesResponse) Reset() { *m = ListServicesResponse{} } +func (m *ListServicesResponse) String() string { return proto.CompactTextString(m) } +func (*ListServicesResponse) ProtoMessage() {} +func (*ListServicesResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_appengine_4ea48fc042b3846d, []int{3} +} +func (m *ListServicesResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ListServicesResponse.Unmarshal(m, b) +} +func (m *ListServicesResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ListServicesResponse.Marshal(b, m, deterministic) +} +func (dst *ListServicesResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_ListServicesResponse.Merge(dst, src) +} +func (m *ListServicesResponse) XXX_Size() int { + return xxx_messageInfo_ListServicesResponse.Size(m) +} +func (m *ListServicesResponse) XXX_DiscardUnknown() { + xxx_messageInfo_ListServicesResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_ListServicesResponse proto.InternalMessageInfo + +func (m *ListServicesResponse) GetServices() []*Service { + if m != nil { + return m.Services + } + return nil +} + +func (m *ListServicesResponse) GetNextPageToken() string { + if m != nil { + return m.NextPageToken + } + return "" +} + +// Request message for `Services.GetService`. +type GetServiceRequest struct { + // Name of the resource requested. Example: `apps/myapp/services/default`. + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GetServiceRequest) Reset() { *m = GetServiceRequest{} } +func (m *GetServiceRequest) String() string { return proto.CompactTextString(m) } +func (*GetServiceRequest) ProtoMessage() {} +func (*GetServiceRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_appengine_4ea48fc042b3846d, []int{4} +} +func (m *GetServiceRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GetServiceRequest.Unmarshal(m, b) +} +func (m *GetServiceRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GetServiceRequest.Marshal(b, m, deterministic) +} +func (dst *GetServiceRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_GetServiceRequest.Merge(dst, src) +} +func (m *GetServiceRequest) XXX_Size() int { + return xxx_messageInfo_GetServiceRequest.Size(m) +} +func (m *GetServiceRequest) XXX_DiscardUnknown() { + xxx_messageInfo_GetServiceRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_GetServiceRequest proto.InternalMessageInfo + +func (m *GetServiceRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +// Request message for `Services.UpdateService`. +type UpdateServiceRequest struct { + // Name of the resource to update. Example: `apps/myapp/services/default`. + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // A Service resource containing the updated service. Only fields set in the + // field mask will be updated. + Service *Service `protobuf:"bytes,2,opt,name=service,proto3" json:"service,omitempty"` + // Standard field mask for the set of fields to be updated. + UpdateMask *field_mask.FieldMask `protobuf:"bytes,3,opt,name=update_mask,json=updateMask,proto3" json:"update_mask,omitempty"` + // Set to `true` to gradually shift traffic from one version to another + // single version. By default, traffic is shifted immediately. + // For gradual traffic migration, the target version + // must be located within instances that are configured for both + // [warmup + // requests](https://cloud.google.com/appengine/docs/admin-api/reference/rest/v1/apps.services.versions#inboundservicetype) + // and + // [automatic + // scaling](https://cloud.google.com/appengine/docs/admin-api/reference/rest/v1/apps.services.versions#automaticscaling). + // You must specify the + // [`shardBy`](https://cloud.google.com/appengine/docs/admin-api/reference/rest/v1/apps.services#shardby) + // field in the Service resource. Gradual traffic migration is not + // supported in the App Engine flexible environment. For examples, see + // [Migrating and Splitting + // Traffic](https://cloud.google.com/appengine/docs/admin-api/migrating-splitting-traffic). + MigrateTraffic bool `protobuf:"varint,4,opt,name=migrate_traffic,json=migrateTraffic,proto3" json:"migrate_traffic,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *UpdateServiceRequest) Reset() { *m = UpdateServiceRequest{} } +func (m *UpdateServiceRequest) String() string { return proto.CompactTextString(m) } +func (*UpdateServiceRequest) ProtoMessage() {} +func (*UpdateServiceRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_appengine_4ea48fc042b3846d, []int{5} +} +func (m *UpdateServiceRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_UpdateServiceRequest.Unmarshal(m, b) +} +func (m *UpdateServiceRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_UpdateServiceRequest.Marshal(b, m, deterministic) +} +func (dst *UpdateServiceRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_UpdateServiceRequest.Merge(dst, src) +} +func (m *UpdateServiceRequest) XXX_Size() int { + return xxx_messageInfo_UpdateServiceRequest.Size(m) +} +func (m *UpdateServiceRequest) XXX_DiscardUnknown() { + xxx_messageInfo_UpdateServiceRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_UpdateServiceRequest proto.InternalMessageInfo + +func (m *UpdateServiceRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *UpdateServiceRequest) GetService() *Service { + if m != nil { + return m.Service + } + return nil +} + +func (m *UpdateServiceRequest) GetUpdateMask() *field_mask.FieldMask { + if m != nil { + return m.UpdateMask + } + return nil +} + +func (m *UpdateServiceRequest) GetMigrateTraffic() bool { + if m != nil { + return m.MigrateTraffic + } + return false +} + +// Request message for `Services.DeleteService`. +type DeleteServiceRequest struct { + // Name of the resource requested. Example: `apps/myapp/services/default`. + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *DeleteServiceRequest) Reset() { *m = DeleteServiceRequest{} } +func (m *DeleteServiceRequest) String() string { return proto.CompactTextString(m) } +func (*DeleteServiceRequest) ProtoMessage() {} +func (*DeleteServiceRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_appengine_4ea48fc042b3846d, []int{6} +} +func (m *DeleteServiceRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_DeleteServiceRequest.Unmarshal(m, b) +} +func (m *DeleteServiceRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_DeleteServiceRequest.Marshal(b, m, deterministic) +} +func (dst *DeleteServiceRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_DeleteServiceRequest.Merge(dst, src) +} +func (m *DeleteServiceRequest) XXX_Size() int { + return xxx_messageInfo_DeleteServiceRequest.Size(m) +} +func (m *DeleteServiceRequest) XXX_DiscardUnknown() { + xxx_messageInfo_DeleteServiceRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_DeleteServiceRequest proto.InternalMessageInfo + +func (m *DeleteServiceRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +// Request message for `Versions.ListVersions`. +type ListVersionsRequest struct { + // Name of the parent Service resource. Example: + // `apps/myapp/services/default`. + Parent string `protobuf:"bytes,1,opt,name=parent,proto3" json:"parent,omitempty"` + // Controls the set of fields returned in the `List` response. + View VersionView `protobuf:"varint,2,opt,name=view,proto3,enum=google.appengine.v1.VersionView" json:"view,omitempty"` + // Maximum results to return per page. + PageSize int32 `protobuf:"varint,3,opt,name=page_size,json=pageSize,proto3" json:"page_size,omitempty"` + // Continuation token for fetching the next page of results. + PageToken string `protobuf:"bytes,4,opt,name=page_token,json=pageToken,proto3" json:"page_token,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ListVersionsRequest) Reset() { *m = ListVersionsRequest{} } +func (m *ListVersionsRequest) String() string { return proto.CompactTextString(m) } +func (*ListVersionsRequest) ProtoMessage() {} +func (*ListVersionsRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_appengine_4ea48fc042b3846d, []int{7} +} +func (m *ListVersionsRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ListVersionsRequest.Unmarshal(m, b) +} +func (m *ListVersionsRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ListVersionsRequest.Marshal(b, m, deterministic) +} +func (dst *ListVersionsRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_ListVersionsRequest.Merge(dst, src) +} +func (m *ListVersionsRequest) XXX_Size() int { + return xxx_messageInfo_ListVersionsRequest.Size(m) +} +func (m *ListVersionsRequest) XXX_DiscardUnknown() { + xxx_messageInfo_ListVersionsRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_ListVersionsRequest proto.InternalMessageInfo + +func (m *ListVersionsRequest) GetParent() string { + if m != nil { + return m.Parent + } + return "" +} + +func (m *ListVersionsRequest) GetView() VersionView { + if m != nil { + return m.View + } + return VersionView_BASIC +} + +func (m *ListVersionsRequest) GetPageSize() int32 { + if m != nil { + return m.PageSize + } + return 0 +} + +func (m *ListVersionsRequest) GetPageToken() string { + if m != nil { + return m.PageToken + } + return "" +} + +// Response message for `Versions.ListVersions`. +type ListVersionsResponse struct { + // The versions belonging to the requested service. + Versions []*Version `protobuf:"bytes,1,rep,name=versions,proto3" json:"versions,omitempty"` + // Continuation token for fetching the next page of results. + NextPageToken string `protobuf:"bytes,2,opt,name=next_page_token,json=nextPageToken,proto3" json:"next_page_token,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ListVersionsResponse) Reset() { *m = ListVersionsResponse{} } +func (m *ListVersionsResponse) String() string { return proto.CompactTextString(m) } +func (*ListVersionsResponse) ProtoMessage() {} +func (*ListVersionsResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_appengine_4ea48fc042b3846d, []int{8} +} +func (m *ListVersionsResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ListVersionsResponse.Unmarshal(m, b) +} +func (m *ListVersionsResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ListVersionsResponse.Marshal(b, m, deterministic) +} +func (dst *ListVersionsResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_ListVersionsResponse.Merge(dst, src) +} +func (m *ListVersionsResponse) XXX_Size() int { + return xxx_messageInfo_ListVersionsResponse.Size(m) +} +func (m *ListVersionsResponse) XXX_DiscardUnknown() { + xxx_messageInfo_ListVersionsResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_ListVersionsResponse proto.InternalMessageInfo + +func (m *ListVersionsResponse) GetVersions() []*Version { + if m != nil { + return m.Versions + } + return nil +} + +func (m *ListVersionsResponse) GetNextPageToken() string { + if m != nil { + return m.NextPageToken + } + return "" +} + +// Request message for `Versions.GetVersion`. +type GetVersionRequest struct { + // Name of the resource requested. Example: + // `apps/myapp/services/default/versions/v1`. + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // Controls the set of fields returned in the `Get` response. + View VersionView `protobuf:"varint,2,opt,name=view,proto3,enum=google.appengine.v1.VersionView" json:"view,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GetVersionRequest) Reset() { *m = GetVersionRequest{} } +func (m *GetVersionRequest) String() string { return proto.CompactTextString(m) } +func (*GetVersionRequest) ProtoMessage() {} +func (*GetVersionRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_appengine_4ea48fc042b3846d, []int{9} +} +func (m *GetVersionRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GetVersionRequest.Unmarshal(m, b) +} +func (m *GetVersionRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GetVersionRequest.Marshal(b, m, deterministic) +} +func (dst *GetVersionRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_GetVersionRequest.Merge(dst, src) +} +func (m *GetVersionRequest) XXX_Size() int { + return xxx_messageInfo_GetVersionRequest.Size(m) +} +func (m *GetVersionRequest) XXX_DiscardUnknown() { + xxx_messageInfo_GetVersionRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_GetVersionRequest proto.InternalMessageInfo + +func (m *GetVersionRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *GetVersionRequest) GetView() VersionView { + if m != nil { + return m.View + } + return VersionView_BASIC +} + +// Request message for `Versions.CreateVersion`. +type CreateVersionRequest struct { + // Name of the parent resource to create this version under. Example: + // `apps/myapp/services/default`. + Parent string `protobuf:"bytes,1,opt,name=parent,proto3" json:"parent,omitempty"` + // Application deployment configuration. + Version *Version `protobuf:"bytes,2,opt,name=version,proto3" json:"version,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *CreateVersionRequest) Reset() { *m = CreateVersionRequest{} } +func (m *CreateVersionRequest) String() string { return proto.CompactTextString(m) } +func (*CreateVersionRequest) ProtoMessage() {} +func (*CreateVersionRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_appengine_4ea48fc042b3846d, []int{10} +} +func (m *CreateVersionRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_CreateVersionRequest.Unmarshal(m, b) +} +func (m *CreateVersionRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_CreateVersionRequest.Marshal(b, m, deterministic) +} +func (dst *CreateVersionRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_CreateVersionRequest.Merge(dst, src) +} +func (m *CreateVersionRequest) XXX_Size() int { + return xxx_messageInfo_CreateVersionRequest.Size(m) +} +func (m *CreateVersionRequest) XXX_DiscardUnknown() { + xxx_messageInfo_CreateVersionRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_CreateVersionRequest proto.InternalMessageInfo + +func (m *CreateVersionRequest) GetParent() string { + if m != nil { + return m.Parent + } + return "" +} + +func (m *CreateVersionRequest) GetVersion() *Version { + if m != nil { + return m.Version + } + return nil +} + +// Request message for `Versions.UpdateVersion`. +type UpdateVersionRequest struct { + // Name of the resource to update. Example: + // `apps/myapp/services/default/versions/1`. + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // A Version containing the updated resource. Only fields set in the field + // mask will be updated. + Version *Version `protobuf:"bytes,2,opt,name=version,proto3" json:"version,omitempty"` + // Standard field mask for the set of fields to be updated. + UpdateMask *field_mask.FieldMask `protobuf:"bytes,3,opt,name=update_mask,json=updateMask,proto3" json:"update_mask,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *UpdateVersionRequest) Reset() { *m = UpdateVersionRequest{} } +func (m *UpdateVersionRequest) String() string { return proto.CompactTextString(m) } +func (*UpdateVersionRequest) ProtoMessage() {} +func (*UpdateVersionRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_appengine_4ea48fc042b3846d, []int{11} +} +func (m *UpdateVersionRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_UpdateVersionRequest.Unmarshal(m, b) +} +func (m *UpdateVersionRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_UpdateVersionRequest.Marshal(b, m, deterministic) +} +func (dst *UpdateVersionRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_UpdateVersionRequest.Merge(dst, src) +} +func (m *UpdateVersionRequest) XXX_Size() int { + return xxx_messageInfo_UpdateVersionRequest.Size(m) +} +func (m *UpdateVersionRequest) XXX_DiscardUnknown() { + xxx_messageInfo_UpdateVersionRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_UpdateVersionRequest proto.InternalMessageInfo + +func (m *UpdateVersionRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *UpdateVersionRequest) GetVersion() *Version { + if m != nil { + return m.Version + } + return nil +} + +func (m *UpdateVersionRequest) GetUpdateMask() *field_mask.FieldMask { + if m != nil { + return m.UpdateMask + } + return nil +} + +// Request message for `Versions.DeleteVersion`. +type DeleteVersionRequest struct { + // Name of the resource requested. Example: + // `apps/myapp/services/default/versions/v1`. + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *DeleteVersionRequest) Reset() { *m = DeleteVersionRequest{} } +func (m *DeleteVersionRequest) String() string { return proto.CompactTextString(m) } +func (*DeleteVersionRequest) ProtoMessage() {} +func (*DeleteVersionRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_appengine_4ea48fc042b3846d, []int{12} +} +func (m *DeleteVersionRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_DeleteVersionRequest.Unmarshal(m, b) +} +func (m *DeleteVersionRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_DeleteVersionRequest.Marshal(b, m, deterministic) +} +func (dst *DeleteVersionRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_DeleteVersionRequest.Merge(dst, src) +} +func (m *DeleteVersionRequest) XXX_Size() int { + return xxx_messageInfo_DeleteVersionRequest.Size(m) +} +func (m *DeleteVersionRequest) XXX_DiscardUnknown() { + xxx_messageInfo_DeleteVersionRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_DeleteVersionRequest proto.InternalMessageInfo + +func (m *DeleteVersionRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +// Request message for `Instances.ListInstances`. +type ListInstancesRequest struct { + // Name of the parent Version resource. Example: + // `apps/myapp/services/default/versions/v1`. + Parent string `protobuf:"bytes,1,opt,name=parent,proto3" json:"parent,omitempty"` + // Maximum results to return per page. + PageSize int32 `protobuf:"varint,2,opt,name=page_size,json=pageSize,proto3" json:"page_size,omitempty"` + // Continuation token for fetching the next page of results. + PageToken string `protobuf:"bytes,3,opt,name=page_token,json=pageToken,proto3" json:"page_token,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ListInstancesRequest) Reset() { *m = ListInstancesRequest{} } +func (m *ListInstancesRequest) String() string { return proto.CompactTextString(m) } +func (*ListInstancesRequest) ProtoMessage() {} +func (*ListInstancesRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_appengine_4ea48fc042b3846d, []int{13} +} +func (m *ListInstancesRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ListInstancesRequest.Unmarshal(m, b) +} +func (m *ListInstancesRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ListInstancesRequest.Marshal(b, m, deterministic) +} +func (dst *ListInstancesRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_ListInstancesRequest.Merge(dst, src) +} +func (m *ListInstancesRequest) XXX_Size() int { + return xxx_messageInfo_ListInstancesRequest.Size(m) +} +func (m *ListInstancesRequest) XXX_DiscardUnknown() { + xxx_messageInfo_ListInstancesRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_ListInstancesRequest proto.InternalMessageInfo + +func (m *ListInstancesRequest) GetParent() string { + if m != nil { + return m.Parent + } + return "" +} + +func (m *ListInstancesRequest) GetPageSize() int32 { + if m != nil { + return m.PageSize + } + return 0 +} + +func (m *ListInstancesRequest) GetPageToken() string { + if m != nil { + return m.PageToken + } + return "" +} + +// Response message for `Instances.ListInstances`. +type ListInstancesResponse struct { + // The instances belonging to the requested version. + Instances []*Instance `protobuf:"bytes,1,rep,name=instances,proto3" json:"instances,omitempty"` + // Continuation token for fetching the next page of results. + NextPageToken string `protobuf:"bytes,2,opt,name=next_page_token,json=nextPageToken,proto3" json:"next_page_token,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ListInstancesResponse) Reset() { *m = ListInstancesResponse{} } +func (m *ListInstancesResponse) String() string { return proto.CompactTextString(m) } +func (*ListInstancesResponse) ProtoMessage() {} +func (*ListInstancesResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_appengine_4ea48fc042b3846d, []int{14} +} +func (m *ListInstancesResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ListInstancesResponse.Unmarshal(m, b) +} +func (m *ListInstancesResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ListInstancesResponse.Marshal(b, m, deterministic) +} +func (dst *ListInstancesResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_ListInstancesResponse.Merge(dst, src) +} +func (m *ListInstancesResponse) XXX_Size() int { + return xxx_messageInfo_ListInstancesResponse.Size(m) +} +func (m *ListInstancesResponse) XXX_DiscardUnknown() { + xxx_messageInfo_ListInstancesResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_ListInstancesResponse proto.InternalMessageInfo + +func (m *ListInstancesResponse) GetInstances() []*Instance { + if m != nil { + return m.Instances + } + return nil +} + +func (m *ListInstancesResponse) GetNextPageToken() string { + if m != nil { + return m.NextPageToken + } + return "" +} + +// Request message for `Instances.GetInstance`. +type GetInstanceRequest struct { + // Name of the resource requested. Example: + // `apps/myapp/services/default/versions/v1/instances/instance-1`. + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GetInstanceRequest) Reset() { *m = GetInstanceRequest{} } +func (m *GetInstanceRequest) String() string { return proto.CompactTextString(m) } +func (*GetInstanceRequest) ProtoMessage() {} +func (*GetInstanceRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_appengine_4ea48fc042b3846d, []int{15} +} +func (m *GetInstanceRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GetInstanceRequest.Unmarshal(m, b) +} +func (m *GetInstanceRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GetInstanceRequest.Marshal(b, m, deterministic) +} +func (dst *GetInstanceRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_GetInstanceRequest.Merge(dst, src) +} +func (m *GetInstanceRequest) XXX_Size() int { + return xxx_messageInfo_GetInstanceRequest.Size(m) +} +func (m *GetInstanceRequest) XXX_DiscardUnknown() { + xxx_messageInfo_GetInstanceRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_GetInstanceRequest proto.InternalMessageInfo + +func (m *GetInstanceRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +// Request message for `Instances.DeleteInstance`. +type DeleteInstanceRequest struct { + // Name of the resource requested. Example: + // `apps/myapp/services/default/versions/v1/instances/instance-1`. + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *DeleteInstanceRequest) Reset() { *m = DeleteInstanceRequest{} } +func (m *DeleteInstanceRequest) String() string { return proto.CompactTextString(m) } +func (*DeleteInstanceRequest) ProtoMessage() {} +func (*DeleteInstanceRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_appengine_4ea48fc042b3846d, []int{16} +} +func (m *DeleteInstanceRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_DeleteInstanceRequest.Unmarshal(m, b) +} +func (m *DeleteInstanceRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_DeleteInstanceRequest.Marshal(b, m, deterministic) +} +func (dst *DeleteInstanceRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_DeleteInstanceRequest.Merge(dst, src) +} +func (m *DeleteInstanceRequest) XXX_Size() int { + return xxx_messageInfo_DeleteInstanceRequest.Size(m) +} +func (m *DeleteInstanceRequest) XXX_DiscardUnknown() { + xxx_messageInfo_DeleteInstanceRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_DeleteInstanceRequest proto.InternalMessageInfo + +func (m *DeleteInstanceRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +// Request message for `Instances.DebugInstance`. +type DebugInstanceRequest struct { + // Name of the resource requested. Example: + // `apps/myapp/services/default/versions/v1/instances/instance-1`. + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *DebugInstanceRequest) Reset() { *m = DebugInstanceRequest{} } +func (m *DebugInstanceRequest) String() string { return proto.CompactTextString(m) } +func (*DebugInstanceRequest) ProtoMessage() {} +func (*DebugInstanceRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_appengine_4ea48fc042b3846d, []int{17} +} +func (m *DebugInstanceRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_DebugInstanceRequest.Unmarshal(m, b) +} +func (m *DebugInstanceRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_DebugInstanceRequest.Marshal(b, m, deterministic) +} +func (dst *DebugInstanceRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_DebugInstanceRequest.Merge(dst, src) +} +func (m *DebugInstanceRequest) XXX_Size() int { + return xxx_messageInfo_DebugInstanceRequest.Size(m) +} +func (m *DebugInstanceRequest) XXX_DiscardUnknown() { + xxx_messageInfo_DebugInstanceRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_DebugInstanceRequest proto.InternalMessageInfo + +func (m *DebugInstanceRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func init() { + proto.RegisterType((*GetApplicationRequest)(nil), "google.appengine.v1.GetApplicationRequest") + proto.RegisterType((*RepairApplicationRequest)(nil), "google.appengine.v1.RepairApplicationRequest") + proto.RegisterType((*ListServicesRequest)(nil), "google.appengine.v1.ListServicesRequest") + proto.RegisterType((*ListServicesResponse)(nil), "google.appengine.v1.ListServicesResponse") + proto.RegisterType((*GetServiceRequest)(nil), "google.appengine.v1.GetServiceRequest") + proto.RegisterType((*UpdateServiceRequest)(nil), "google.appengine.v1.UpdateServiceRequest") + proto.RegisterType((*DeleteServiceRequest)(nil), "google.appengine.v1.DeleteServiceRequest") + proto.RegisterType((*ListVersionsRequest)(nil), "google.appengine.v1.ListVersionsRequest") + proto.RegisterType((*ListVersionsResponse)(nil), "google.appengine.v1.ListVersionsResponse") + proto.RegisterType((*GetVersionRequest)(nil), "google.appengine.v1.GetVersionRequest") + proto.RegisterType((*CreateVersionRequest)(nil), "google.appengine.v1.CreateVersionRequest") + proto.RegisterType((*UpdateVersionRequest)(nil), "google.appengine.v1.UpdateVersionRequest") + proto.RegisterType((*DeleteVersionRequest)(nil), "google.appengine.v1.DeleteVersionRequest") + proto.RegisterType((*ListInstancesRequest)(nil), "google.appengine.v1.ListInstancesRequest") + proto.RegisterType((*ListInstancesResponse)(nil), "google.appengine.v1.ListInstancesResponse") + proto.RegisterType((*GetInstanceRequest)(nil), "google.appengine.v1.GetInstanceRequest") + proto.RegisterType((*DeleteInstanceRequest)(nil), "google.appengine.v1.DeleteInstanceRequest") + proto.RegisterType((*DebugInstanceRequest)(nil), "google.appengine.v1.DebugInstanceRequest") + proto.RegisterEnum("google.appengine.v1.VersionView", VersionView_name, VersionView_value) +} + +// Reference imports to suppress errors if they are not otherwise used. +var _ context.Context +var _ grpc.ClientConn + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the grpc package it is being compiled against. +const _ = grpc.SupportPackageIsVersion4 + +// InstancesClient is the client API for Instances service. +// +// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://godoc.org/google.golang.org/grpc#ClientConn.NewStream. +type InstancesClient interface { + // Lists the instances of a version. + ListInstances(ctx context.Context, in *ListInstancesRequest, opts ...grpc.CallOption) (*ListInstancesResponse, error) + // Gets instance information. + GetInstance(ctx context.Context, in *GetInstanceRequest, opts ...grpc.CallOption) (*Instance, error) + // Stops a running instance. + DeleteInstance(ctx context.Context, in *DeleteInstanceRequest, opts ...grpc.CallOption) (*longrunning.Operation, error) + // Enables debugging on a VM instance. This allows you to use the SSH + // command to connect to the virtual machine where the instance lives. + // While in "debug mode", the instance continues to serve live traffic. + // You should delete the instance when you are done debugging and then + // allow the system to take over and determine if another instance + // should be started. + // + // Only applicable for instances in App Engine flexible environment. + DebugInstance(ctx context.Context, in *DebugInstanceRequest, opts ...grpc.CallOption) (*longrunning.Operation, error) +} + +type instancesClient struct { + cc *grpc.ClientConn +} + +func NewInstancesClient(cc *grpc.ClientConn) InstancesClient { + return &instancesClient{cc} +} + +func (c *instancesClient) ListInstances(ctx context.Context, in *ListInstancesRequest, opts ...grpc.CallOption) (*ListInstancesResponse, error) { + out := new(ListInstancesResponse) + err := c.cc.Invoke(ctx, "/google.appengine.v1.Instances/ListInstances", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *instancesClient) GetInstance(ctx context.Context, in *GetInstanceRequest, opts ...grpc.CallOption) (*Instance, error) { + out := new(Instance) + err := c.cc.Invoke(ctx, "/google.appengine.v1.Instances/GetInstance", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *instancesClient) DeleteInstance(ctx context.Context, in *DeleteInstanceRequest, opts ...grpc.CallOption) (*longrunning.Operation, error) { + out := new(longrunning.Operation) + err := c.cc.Invoke(ctx, "/google.appengine.v1.Instances/DeleteInstance", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *instancesClient) DebugInstance(ctx context.Context, in *DebugInstanceRequest, opts ...grpc.CallOption) (*longrunning.Operation, error) { + out := new(longrunning.Operation) + err := c.cc.Invoke(ctx, "/google.appengine.v1.Instances/DebugInstance", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +// InstancesServer is the server API for Instances service. +type InstancesServer interface { + // Lists the instances of a version. + ListInstances(context.Context, *ListInstancesRequest) (*ListInstancesResponse, error) + // Gets instance information. + GetInstance(context.Context, *GetInstanceRequest) (*Instance, error) + // Stops a running instance. + DeleteInstance(context.Context, *DeleteInstanceRequest) (*longrunning.Operation, error) + // Enables debugging on a VM instance. This allows you to use the SSH + // command to connect to the virtual machine where the instance lives. + // While in "debug mode", the instance continues to serve live traffic. + // You should delete the instance when you are done debugging and then + // allow the system to take over and determine if another instance + // should be started. + // + // Only applicable for instances in App Engine flexible environment. + DebugInstance(context.Context, *DebugInstanceRequest) (*longrunning.Operation, error) +} + +func RegisterInstancesServer(s *grpc.Server, srv InstancesServer) { + s.RegisterService(&_Instances_serviceDesc, srv) +} + +func _Instances_ListInstances_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(ListInstancesRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(InstancesServer).ListInstances(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.appengine.v1.Instances/ListInstances", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(InstancesServer).ListInstances(ctx, req.(*ListInstancesRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _Instances_GetInstance_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(GetInstanceRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(InstancesServer).GetInstance(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.appengine.v1.Instances/GetInstance", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(InstancesServer).GetInstance(ctx, req.(*GetInstanceRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _Instances_DeleteInstance_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(DeleteInstanceRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(InstancesServer).DeleteInstance(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.appengine.v1.Instances/DeleteInstance", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(InstancesServer).DeleteInstance(ctx, req.(*DeleteInstanceRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _Instances_DebugInstance_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(DebugInstanceRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(InstancesServer).DebugInstance(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.appengine.v1.Instances/DebugInstance", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(InstancesServer).DebugInstance(ctx, req.(*DebugInstanceRequest)) + } + return interceptor(ctx, in, info, handler) +} + +var _Instances_serviceDesc = grpc.ServiceDesc{ + ServiceName: "google.appengine.v1.Instances", + HandlerType: (*InstancesServer)(nil), + Methods: []grpc.MethodDesc{ + { + MethodName: "ListInstances", + Handler: _Instances_ListInstances_Handler, + }, + { + MethodName: "GetInstance", + Handler: _Instances_GetInstance_Handler, + }, + { + MethodName: "DeleteInstance", + Handler: _Instances_DeleteInstance_Handler, + }, + { + MethodName: "DebugInstance", + Handler: _Instances_DebugInstance_Handler, + }, + }, + Streams: []grpc.StreamDesc{}, + Metadata: "google/appengine/v1/appengine.proto", +} + +// VersionsClient is the client API for Versions service. +// +// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://godoc.org/google.golang.org/grpc#ClientConn.NewStream. +type VersionsClient interface { + // Lists the versions of a service. + ListVersions(ctx context.Context, in *ListVersionsRequest, opts ...grpc.CallOption) (*ListVersionsResponse, error) + // Gets the specified Version resource. + // By default, only a `BASIC_VIEW` will be returned. + // Specify the `FULL_VIEW` parameter to get the full resource. + GetVersion(ctx context.Context, in *GetVersionRequest, opts ...grpc.CallOption) (*Version, error) + // Deploys code and resource files to a new version. + CreateVersion(ctx context.Context, in *CreateVersionRequest, opts ...grpc.CallOption) (*longrunning.Operation, error) + // Updates the specified Version resource. + // You can specify the following fields depending on the App Engine + // environment and type of scaling that the version resource uses: + // + // * [`serving_status`](https://cloud.google.com/appengine/docs/admin-api/reference/rest/v1/apps.services.versions#Version.FIELDS.serving_status): + // For Version resources that use basic scaling, manual scaling, or run in + // the App Engine flexible environment. + // * [`instance_class`](https://cloud.google.com/appengine/docs/admin-api/reference/rest/v1/apps.services.versions#Version.FIELDS.instance_class): + // For Version resources that run in the App Engine standard environment. + // * [`automatic_scaling.min_idle_instances`](https://cloud.google.com/appengine/docs/admin-api/reference/rest/v1/apps.services.versions#Version.FIELDS.automatic_scaling): + // For Version resources that use automatic scaling and run in the App + // Engine standard environment. + // * [`automatic_scaling.max_idle_instances`](https://cloud.google.com/appengine/docs/admin-api/reference/rest/v1/apps.services.versions#Version.FIELDS.automatic_scaling): + // For Version resources that use automatic scaling and run in the App + // Engine standard environment. + UpdateVersion(ctx context.Context, in *UpdateVersionRequest, opts ...grpc.CallOption) (*longrunning.Operation, error) + // Deletes an existing Version resource. + DeleteVersion(ctx context.Context, in *DeleteVersionRequest, opts ...grpc.CallOption) (*longrunning.Operation, error) +} + +type versionsClient struct { + cc *grpc.ClientConn +} + +func NewVersionsClient(cc *grpc.ClientConn) VersionsClient { + return &versionsClient{cc} +} + +func (c *versionsClient) ListVersions(ctx context.Context, in *ListVersionsRequest, opts ...grpc.CallOption) (*ListVersionsResponse, error) { + out := new(ListVersionsResponse) + err := c.cc.Invoke(ctx, "/google.appengine.v1.Versions/ListVersions", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *versionsClient) GetVersion(ctx context.Context, in *GetVersionRequest, opts ...grpc.CallOption) (*Version, error) { + out := new(Version) + err := c.cc.Invoke(ctx, "/google.appengine.v1.Versions/GetVersion", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *versionsClient) CreateVersion(ctx context.Context, in *CreateVersionRequest, opts ...grpc.CallOption) (*longrunning.Operation, error) { + out := new(longrunning.Operation) + err := c.cc.Invoke(ctx, "/google.appengine.v1.Versions/CreateVersion", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *versionsClient) UpdateVersion(ctx context.Context, in *UpdateVersionRequest, opts ...grpc.CallOption) (*longrunning.Operation, error) { + out := new(longrunning.Operation) + err := c.cc.Invoke(ctx, "/google.appengine.v1.Versions/UpdateVersion", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *versionsClient) DeleteVersion(ctx context.Context, in *DeleteVersionRequest, opts ...grpc.CallOption) (*longrunning.Operation, error) { + out := new(longrunning.Operation) + err := c.cc.Invoke(ctx, "/google.appengine.v1.Versions/DeleteVersion", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +// VersionsServer is the server API for Versions service. +type VersionsServer interface { + // Lists the versions of a service. + ListVersions(context.Context, *ListVersionsRequest) (*ListVersionsResponse, error) + // Gets the specified Version resource. + // By default, only a `BASIC_VIEW` will be returned. + // Specify the `FULL_VIEW` parameter to get the full resource. + GetVersion(context.Context, *GetVersionRequest) (*Version, error) + // Deploys code and resource files to a new version. + CreateVersion(context.Context, *CreateVersionRequest) (*longrunning.Operation, error) + // Updates the specified Version resource. + // You can specify the following fields depending on the App Engine + // environment and type of scaling that the version resource uses: + // + // * [`serving_status`](https://cloud.google.com/appengine/docs/admin-api/reference/rest/v1/apps.services.versions#Version.FIELDS.serving_status): + // For Version resources that use basic scaling, manual scaling, or run in + // the App Engine flexible environment. + // * [`instance_class`](https://cloud.google.com/appengine/docs/admin-api/reference/rest/v1/apps.services.versions#Version.FIELDS.instance_class): + // For Version resources that run in the App Engine standard environment. + // * [`automatic_scaling.min_idle_instances`](https://cloud.google.com/appengine/docs/admin-api/reference/rest/v1/apps.services.versions#Version.FIELDS.automatic_scaling): + // For Version resources that use automatic scaling and run in the App + // Engine standard environment. + // * [`automatic_scaling.max_idle_instances`](https://cloud.google.com/appengine/docs/admin-api/reference/rest/v1/apps.services.versions#Version.FIELDS.automatic_scaling): + // For Version resources that use automatic scaling and run in the App + // Engine standard environment. + UpdateVersion(context.Context, *UpdateVersionRequest) (*longrunning.Operation, error) + // Deletes an existing Version resource. + DeleteVersion(context.Context, *DeleteVersionRequest) (*longrunning.Operation, error) +} + +func RegisterVersionsServer(s *grpc.Server, srv VersionsServer) { + s.RegisterService(&_Versions_serviceDesc, srv) +} + +func _Versions_ListVersions_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(ListVersionsRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(VersionsServer).ListVersions(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.appengine.v1.Versions/ListVersions", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(VersionsServer).ListVersions(ctx, req.(*ListVersionsRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _Versions_GetVersion_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(GetVersionRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(VersionsServer).GetVersion(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.appengine.v1.Versions/GetVersion", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(VersionsServer).GetVersion(ctx, req.(*GetVersionRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _Versions_CreateVersion_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(CreateVersionRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(VersionsServer).CreateVersion(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.appengine.v1.Versions/CreateVersion", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(VersionsServer).CreateVersion(ctx, req.(*CreateVersionRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _Versions_UpdateVersion_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(UpdateVersionRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(VersionsServer).UpdateVersion(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.appengine.v1.Versions/UpdateVersion", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(VersionsServer).UpdateVersion(ctx, req.(*UpdateVersionRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _Versions_DeleteVersion_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(DeleteVersionRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(VersionsServer).DeleteVersion(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.appengine.v1.Versions/DeleteVersion", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(VersionsServer).DeleteVersion(ctx, req.(*DeleteVersionRequest)) + } + return interceptor(ctx, in, info, handler) +} + +var _Versions_serviceDesc = grpc.ServiceDesc{ + ServiceName: "google.appengine.v1.Versions", + HandlerType: (*VersionsServer)(nil), + Methods: []grpc.MethodDesc{ + { + MethodName: "ListVersions", + Handler: _Versions_ListVersions_Handler, + }, + { + MethodName: "GetVersion", + Handler: _Versions_GetVersion_Handler, + }, + { + MethodName: "CreateVersion", + Handler: _Versions_CreateVersion_Handler, + }, + { + MethodName: "UpdateVersion", + Handler: _Versions_UpdateVersion_Handler, + }, + { + MethodName: "DeleteVersion", + Handler: _Versions_DeleteVersion_Handler, + }, + }, + Streams: []grpc.StreamDesc{}, + Metadata: "google/appengine/v1/appengine.proto", +} + +// ServicesClient is the client API for Services service. +// +// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://godoc.org/google.golang.org/grpc#ClientConn.NewStream. +type ServicesClient interface { + // Lists all the services in the application. + ListServices(ctx context.Context, in *ListServicesRequest, opts ...grpc.CallOption) (*ListServicesResponse, error) + // Gets the current configuration of the specified service. + GetService(ctx context.Context, in *GetServiceRequest, opts ...grpc.CallOption) (*Service, error) + // Updates the configuration of the specified service. + UpdateService(ctx context.Context, in *UpdateServiceRequest, opts ...grpc.CallOption) (*longrunning.Operation, error) + // Deletes the specified service and all enclosed versions. + DeleteService(ctx context.Context, in *DeleteServiceRequest, opts ...grpc.CallOption) (*longrunning.Operation, error) +} + +type servicesClient struct { + cc *grpc.ClientConn +} + +func NewServicesClient(cc *grpc.ClientConn) ServicesClient { + return &servicesClient{cc} +} + +func (c *servicesClient) ListServices(ctx context.Context, in *ListServicesRequest, opts ...grpc.CallOption) (*ListServicesResponse, error) { + out := new(ListServicesResponse) + err := c.cc.Invoke(ctx, "/google.appengine.v1.Services/ListServices", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *servicesClient) GetService(ctx context.Context, in *GetServiceRequest, opts ...grpc.CallOption) (*Service, error) { + out := new(Service) + err := c.cc.Invoke(ctx, "/google.appengine.v1.Services/GetService", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *servicesClient) UpdateService(ctx context.Context, in *UpdateServiceRequest, opts ...grpc.CallOption) (*longrunning.Operation, error) { + out := new(longrunning.Operation) + err := c.cc.Invoke(ctx, "/google.appengine.v1.Services/UpdateService", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *servicesClient) DeleteService(ctx context.Context, in *DeleteServiceRequest, opts ...grpc.CallOption) (*longrunning.Operation, error) { + out := new(longrunning.Operation) + err := c.cc.Invoke(ctx, "/google.appengine.v1.Services/DeleteService", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +// ServicesServer is the server API for Services service. +type ServicesServer interface { + // Lists all the services in the application. + ListServices(context.Context, *ListServicesRequest) (*ListServicesResponse, error) + // Gets the current configuration of the specified service. + GetService(context.Context, *GetServiceRequest) (*Service, error) + // Updates the configuration of the specified service. + UpdateService(context.Context, *UpdateServiceRequest) (*longrunning.Operation, error) + // Deletes the specified service and all enclosed versions. + DeleteService(context.Context, *DeleteServiceRequest) (*longrunning.Operation, error) +} + +func RegisterServicesServer(s *grpc.Server, srv ServicesServer) { + s.RegisterService(&_Services_serviceDesc, srv) +} + +func _Services_ListServices_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(ListServicesRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(ServicesServer).ListServices(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.appengine.v1.Services/ListServices", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(ServicesServer).ListServices(ctx, req.(*ListServicesRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _Services_GetService_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(GetServiceRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(ServicesServer).GetService(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.appengine.v1.Services/GetService", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(ServicesServer).GetService(ctx, req.(*GetServiceRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _Services_UpdateService_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(UpdateServiceRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(ServicesServer).UpdateService(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.appengine.v1.Services/UpdateService", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(ServicesServer).UpdateService(ctx, req.(*UpdateServiceRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _Services_DeleteService_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(DeleteServiceRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(ServicesServer).DeleteService(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.appengine.v1.Services/DeleteService", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(ServicesServer).DeleteService(ctx, req.(*DeleteServiceRequest)) + } + return interceptor(ctx, in, info, handler) +} + +var _Services_serviceDesc = grpc.ServiceDesc{ + ServiceName: "google.appengine.v1.Services", + HandlerType: (*ServicesServer)(nil), + Methods: []grpc.MethodDesc{ + { + MethodName: "ListServices", + Handler: _Services_ListServices_Handler, + }, + { + MethodName: "GetService", + Handler: _Services_GetService_Handler, + }, + { + MethodName: "UpdateService", + Handler: _Services_UpdateService_Handler, + }, + { + MethodName: "DeleteService", + Handler: _Services_DeleteService_Handler, + }, + }, + Streams: []grpc.StreamDesc{}, + Metadata: "google/appengine/v1/appengine.proto", +} + +// ApplicationsClient is the client API for Applications service. +// +// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://godoc.org/google.golang.org/grpc#ClientConn.NewStream. +type ApplicationsClient interface { + // Gets information about an application. + GetApplication(ctx context.Context, in *GetApplicationRequest, opts ...grpc.CallOption) (*Application, error) + // Recreates the required App Engine features for the application in your + // project, for example a Cloud Storage bucket or App Engine service account. + // Use this method if you receive an error message about a missing feature, + // for example "*Error retrieving the App Engine service account*". + RepairApplication(ctx context.Context, in *RepairApplicationRequest, opts ...grpc.CallOption) (*longrunning.Operation, error) +} + +type applicationsClient struct { + cc *grpc.ClientConn +} + +func NewApplicationsClient(cc *grpc.ClientConn) ApplicationsClient { + return &applicationsClient{cc} +} + +func (c *applicationsClient) GetApplication(ctx context.Context, in *GetApplicationRequest, opts ...grpc.CallOption) (*Application, error) { + out := new(Application) + err := c.cc.Invoke(ctx, "/google.appengine.v1.Applications/GetApplication", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *applicationsClient) RepairApplication(ctx context.Context, in *RepairApplicationRequest, opts ...grpc.CallOption) (*longrunning.Operation, error) { + out := new(longrunning.Operation) + err := c.cc.Invoke(ctx, "/google.appengine.v1.Applications/RepairApplication", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +// ApplicationsServer is the server API for Applications service. +type ApplicationsServer interface { + // Gets information about an application. + GetApplication(context.Context, *GetApplicationRequest) (*Application, error) + // Recreates the required App Engine features for the application in your + // project, for example a Cloud Storage bucket or App Engine service account. + // Use this method if you receive an error message about a missing feature, + // for example "*Error retrieving the App Engine service account*". + RepairApplication(context.Context, *RepairApplicationRequest) (*longrunning.Operation, error) +} + +func RegisterApplicationsServer(s *grpc.Server, srv ApplicationsServer) { + s.RegisterService(&_Applications_serviceDesc, srv) +} + +func _Applications_GetApplication_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(GetApplicationRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(ApplicationsServer).GetApplication(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.appengine.v1.Applications/GetApplication", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(ApplicationsServer).GetApplication(ctx, req.(*GetApplicationRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _Applications_RepairApplication_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(RepairApplicationRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(ApplicationsServer).RepairApplication(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.appengine.v1.Applications/RepairApplication", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(ApplicationsServer).RepairApplication(ctx, req.(*RepairApplicationRequest)) + } + return interceptor(ctx, in, info, handler) +} + +var _Applications_serviceDesc = grpc.ServiceDesc{ + ServiceName: "google.appengine.v1.Applications", + HandlerType: (*ApplicationsServer)(nil), + Methods: []grpc.MethodDesc{ + { + MethodName: "GetApplication", + Handler: _Applications_GetApplication_Handler, + }, + { + MethodName: "RepairApplication", + Handler: _Applications_RepairApplication_Handler, + }, + }, + Streams: []grpc.StreamDesc{}, + Metadata: "google/appengine/v1/appengine.proto", +} + +func init() { + proto.RegisterFile("google/appengine/v1/appengine.proto", fileDescriptor_appengine_4ea48fc042b3846d) +} + +var fileDescriptor_appengine_4ea48fc042b3846d = []byte{ + // 1134 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xb4, 0x58, 0xdf, 0x6f, 0xdb, 0x54, + 0x14, 0xc6, 0x6d, 0xba, 0x25, 0x27, 0x6b, 0xb6, 0xde, 0xb6, 0x10, 0xbc, 0x76, 0x0a, 0x1e, 0x2c, + 0xa9, 0xa7, 0xc5, 0x9a, 0x07, 0xd3, 0x48, 0x01, 0xa9, 0xdd, 0xb4, 0x6a, 0x52, 0x11, 0x95, 0xbb, + 0xed, 0x01, 0x09, 0x55, 0x6e, 0x7a, 0x63, 0x5d, 0x9a, 0xd8, 0xc6, 0x76, 0xb2, 0x6e, 0x10, 0x21, + 0x31, 0x09, 0x01, 0x8f, 0x1b, 0x0f, 0x3c, 0x20, 0xed, 0x81, 0x7f, 0x86, 0x77, 0xfe, 0x01, 0x1e, + 0xf8, 0x43, 0x90, 0xaf, 0xef, 0xb5, 0x63, 0xc7, 0xbf, 0x54, 0xc4, 0x5b, 0x7c, 0xef, 0x77, 0xef, + 0xf9, 0xee, 0x77, 0x3e, 0xfb, 0x9c, 0x1b, 0xb8, 0x6e, 0x58, 0x96, 0x31, 0xc4, 0x8a, 0x6e, 0xdb, + 0xd8, 0x34, 0x88, 0x89, 0x95, 0xc9, 0xed, 0xe8, 0xa1, 0x6b, 0x3b, 0x96, 0x67, 0xa1, 0xd5, 0x00, + 0xd4, 0x8d, 0xc6, 0x27, 0xb7, 0xc5, 0x8d, 0x70, 0x25, 0x51, 0x74, 0xd3, 0xb4, 0x3c, 0xdd, 0x23, + 0x96, 0xe9, 0x06, 0x4b, 0xc4, 0x0f, 0x32, 0xf6, 0x1d, 0x92, 0x3e, 0xc5, 0x31, 0x98, 0x94, 0x06, + 0x23, 0xa6, 0xeb, 0xe9, 0x66, 0x9f, 0x45, 0x17, 0xdf, 0x4b, 0xc3, 0xb8, 0xd8, 0x99, 0x90, 0x7c, + 0xc8, 0x04, 0x3b, 0x6e, 0x14, 0xe9, 0x1a, 0x83, 0x10, 0x7d, 0x44, 0x63, 0xe8, 0xa3, 0x23, 0xdb, + 0x1a, 0x92, 0xfe, 0x73, 0x36, 0x2f, 0xc6, 0xe7, 0x63, 0x73, 0x5c, 0xa4, 0xa1, 0x65, 0x1a, 0xce, + 0xd8, 0x34, 0x89, 0x69, 0x28, 0x96, 0x8d, 0x9d, 0xd8, 0x89, 0xaf, 0x32, 0x10, 0x7d, 0x3a, 0x1e, + 0x0f, 0x14, 0x3c, 0xb2, 0x3d, 0xbe, 0x43, 0x2b, 0x39, 0x39, 0x20, 0x78, 0x78, 0x72, 0x34, 0xd2, + 0xdd, 0xd3, 0x00, 0x21, 0xdd, 0x84, 0xf5, 0x3d, 0xec, 0xed, 0x44, 0x0a, 0x69, 0xf8, 0x9b, 0x31, + 0x76, 0x3d, 0x84, 0xa0, 0x62, 0xea, 0x23, 0xdc, 0x14, 0x5a, 0x42, 0xa7, 0xa6, 0xd1, 0xdf, 0x52, + 0x17, 0x9a, 0x1a, 0xb6, 0x75, 0xe2, 0x94, 0xc4, 0x13, 0x58, 0xdd, 0x27, 0xae, 0x77, 0x18, 0x88, + 0xe6, 0x72, 0xe8, 0xdb, 0x70, 0xc1, 0xd6, 0x1d, 0x6c, 0x7a, 0x0c, 0xcc, 0x9e, 0xd0, 0x55, 0xa8, + 0xd9, 0xba, 0x81, 0x8f, 0x5c, 0xf2, 0x02, 0x37, 0x17, 0x5a, 0x42, 0x67, 0x49, 0xab, 0xfa, 0x03, + 0x87, 0xe4, 0x05, 0x46, 0x9b, 0x00, 0x74, 0xd2, 0xb3, 0x4e, 0xb1, 0xd9, 0x5c, 0xa4, 0x0b, 0x29, + 0xfc, 0xb1, 0x3f, 0x20, 0x9d, 0xc1, 0x5a, 0x3c, 0x94, 0x6b, 0x5b, 0xa6, 0x8b, 0xd1, 0x3d, 0xa8, + 0xb2, 0x9c, 0xb9, 0x4d, 0xa1, 0xb5, 0xd8, 0xa9, 0xab, 0x1b, 0xdd, 0x14, 0x5b, 0x75, 0xd9, 0x42, + 0x2d, 0x44, 0xa3, 0x1b, 0x70, 0xd9, 0xc4, 0x67, 0xde, 0xd1, 0x4c, 0xd4, 0x05, 0x1a, 0x75, 0xd9, + 0x1f, 0x3e, 0x08, 0x23, 0xb7, 0x61, 0x65, 0x0f, 0xf3, 0xc0, 0x79, 0x6a, 0xfc, 0x29, 0xc0, 0xda, + 0x13, 0xfb, 0x44, 0xf7, 0x70, 0x31, 0x18, 0xdd, 0x85, 0x8b, 0x8c, 0x09, 0x8d, 0x5a, 0x44, 0x9b, + 0x83, 0xd1, 0x36, 0xd4, 0xc7, 0x34, 0x06, 0x4d, 0x32, 0xd5, 0xa9, 0xae, 0x8a, 0x7c, 0x2d, 0xf7, + 0x41, 0xf7, 0xa1, 0xef, 0x83, 0xcf, 0x75, 0xf7, 0x54, 0x83, 0x00, 0xee, 0xff, 0x46, 0x6d, 0xb8, + 0x3c, 0x22, 0x86, 0xe3, 0xaf, 0xf6, 0x1c, 0x7d, 0x30, 0x20, 0xfd, 0x66, 0xa5, 0x25, 0x74, 0xaa, + 0x5a, 0x83, 0x0d, 0x3f, 0x0e, 0x46, 0x25, 0x19, 0xd6, 0x1e, 0xe0, 0x21, 0x2e, 0x73, 0x12, 0xe9, + 0x8d, 0x10, 0xb8, 0xe0, 0x69, 0xf0, 0x5e, 0x14, 0xba, 0xe0, 0x43, 0xa8, 0x4c, 0x08, 0x7e, 0x46, + 0x8f, 0xdd, 0x50, 0x5b, 0xa9, 0xc7, 0x66, 0x7b, 0x3d, 0x25, 0xf8, 0x99, 0x46, 0xd1, 0x71, 0xef, + 0x2c, 0xe6, 0x7a, 0xa7, 0x92, 0xe1, 0x9d, 0x88, 0x60, 0xe4, 0x1d, 0xf6, 0x32, 0xe7, 0x7b, 0x87, + 0x2d, 0xd4, 0x42, 0x74, 0x69, 0xef, 0x7c, 0x45, 0xbd, 0xc3, 0xd7, 0xe7, 0xd8, 0xe1, 0x5c, 0xa2, + 0x48, 0x03, 0x58, 0xbb, 0xef, 0x60, 0xdd, 0xc3, 0x89, 0x08, 0x59, 0xd2, 0xdf, 0x85, 0x8b, 0xec, + 0x08, 0xb9, 0xa6, 0xe3, 0xbb, 0x71, 0xb0, 0x9f, 0x62, 0xe6, 0xec, 0x12, 0x47, 0x39, 0x67, 0x90, + 0xff, 0xe4, 0xec, 0xc8, 0xb0, 0xc5, 0x04, 0xa5, 0xaf, 0x03, 0x3b, 0x3c, 0x62, 0xe5, 0xe0, 0x7f, + 0xfd, 0x6c, 0x7d, 0x07, 0xeb, 0x89, 0x58, 0xcc, 0x7b, 0xdb, 0x50, 0xe3, 0xf5, 0x88, 0x9b, 0x6f, + 0x33, 0x55, 0x27, 0xbe, 0x54, 0x8b, 0xf0, 0xa5, 0xed, 0xd7, 0x01, 0xb4, 0x87, 0xc3, 0xe0, 0x79, + 0x9a, 0xdc, 0x84, 0xf5, 0x40, 0xbf, 0x32, 0x60, 0x2a, 0xf6, 0xf1, 0xd8, 0x28, 0x81, 0x95, 0x25, + 0xa8, 0xcf, 0xf8, 0x16, 0xd5, 0x60, 0x69, 0x77, 0xe7, 0xf0, 0xd1, 0xfd, 0x2b, 0x6f, 0xa1, 0x2a, + 0x54, 0x1e, 0x3e, 0xd9, 0xdf, 0xbf, 0x22, 0xa8, 0x2f, 0x97, 0xa0, 0x16, 0x2a, 0x84, 0xfe, 0x10, + 0x60, 0x39, 0xa6, 0x19, 0xda, 0x4a, 0x15, 0x26, 0x2d, 0x87, 0xa2, 0x5c, 0x06, 0x1a, 0xa4, 0x40, + 0xda, 0xfe, 0xe1, 0xaf, 0x7f, 0x5e, 0x2f, 0x7c, 0x84, 0xee, 0xf8, 0x85, 0xf9, 0xdb, 0x20, 0xd9, + 0x9f, 0xea, 0xb6, 0xed, 0x2a, 0x32, 0xef, 0x03, 0xfc, 0x9f, 0xfc, 0xa5, 0x57, 0xe4, 0xa9, 0x12, + 0xa5, 0xe0, 0x95, 0x00, 0xf5, 0x19, 0x6d, 0x51, 0x3b, 0x35, 0xf0, 0xbc, 0xfa, 0x62, 0x7e, 0x96, + 0x13, 0xa4, 0x7c, 0x09, 0x73, 0x29, 0x45, 0x8c, 0x14, 0x79, 0x8a, 0x7e, 0x13, 0xa0, 0x11, 0x4f, + 0x23, 0x4a, 0x17, 0x24, 0x35, 0xd7, 0x11, 0xb5, 0x99, 0x86, 0xa4, 0xfb, 0x05, 0x6f, 0x48, 0x38, + 0x35, 0xf9, 0x5c, 0xd4, 0xde, 0x08, 0xb0, 0x1c, 0x33, 0x4d, 0x46, 0x56, 0xd3, 0x8c, 0x55, 0x44, + 0xec, 0x01, 0x25, 0xf6, 0x99, 0xf4, 0xf1, 0x39, 0x88, 0xf5, 0x4e, 0xfc, 0x80, 0x3d, 0x41, 0x56, + 0xff, 0x5e, 0x82, 0x2a, 0x2f, 0x11, 0xe8, 0x57, 0x01, 0x2e, 0xcd, 0xd6, 0x0c, 0xd4, 0xc9, 0x34, + 0x56, 0xa2, 0xee, 0x89, 0x5b, 0x25, 0x90, 0xcc, 0x81, 0x0a, 0x25, 0xbe, 0x85, 0xda, 0xb9, 0x0e, + 0x9c, 0x86, 0xdc, 0xd1, 0x4b, 0x01, 0x20, 0x2a, 0x28, 0xe8, 0x46, 0x96, 0xe9, 0xe2, 0x5f, 0x41, + 0x31, 0xf7, 0x0b, 0x9c, 0x60, 0x51, 0x28, 0xdf, 0x14, 0xbd, 0x16, 0x60, 0x39, 0x56, 0x77, 0x32, + 0x72, 0x99, 0x56, 0x9b, 0x8a, 0x72, 0x79, 0x8f, 0x92, 0x51, 0xa5, 0xb2, 0x92, 0xf4, 0xc2, 0xfa, + 0xe1, 0xb3, 0x8a, 0x15, 0xa9, 0x0c, 0x56, 0x69, 0x85, 0xac, 0x24, 0x2b, 0xb5, 0xac, 0x44, 0x11, + 0xab, 0x5f, 0xa8, 0xef, 0x67, 0x2a, 0x53, 0xa6, 0xef, 0xe7, 0xab, 0x57, 0x11, 0x2b, 0x96, 0x38, + 0xb9, 0x2c, 0x2b, 0xf5, 0xf7, 0x0a, 0x54, 0x79, 0x07, 0x8d, 0x7e, 0x66, 0x16, 0x0f, 0x07, 0xb2, + 0x2d, 0x9e, 0x68, 0xf0, 0x73, 0x2c, 0x9e, 0xec, 0xcf, 0xa5, 0xf7, 0x29, 0xc7, 0x6b, 0x68, 0x63, + 0x3e, 0x9f, 0xd3, 0x90, 0x26, 0x3a, 0xa3, 0xb6, 0x66, 0x8b, 0xb3, 0x6d, 0x1d, 0xef, 0x46, 0xc5, + 0xdc, 0x96, 0x39, 0x11, 0x39, 0x5d, 0x9d, 0x29, 0xfa, 0x29, 0x74, 0x0d, 0x8f, 0x9e, 0xe7, 0x9a, + 0x04, 0x81, 0x82, 0xfc, 0xdc, 0xa2, 0x0c, 0xda, 0x6a, 0x2e, 0x83, 0x5e, 0xd8, 0xda, 0x7f, 0xcf, + 0x9d, 0x92, 0xcf, 0x24, 0xad, 0x31, 0x2f, 0x62, 0xc2, 0xb4, 0x90, 0x73, 0x99, 0xa8, 0xaf, 0x16, + 0xe0, 0xd2, 0xcc, 0xcd, 0xcf, 0x45, 0xcf, 0xa1, 0x11, 0xbf, 0x3c, 0x66, 0x94, 0x93, 0xd4, 0x1b, + 0xa6, 0x98, 0xde, 0xc5, 0xce, 0x00, 0xa5, 0x77, 0x29, 0xad, 0x55, 0xb4, 0x92, 0xa4, 0x35, 0x45, + 0x3f, 0x0a, 0xb0, 0x32, 0x77, 0x17, 0x45, 0xb7, 0x52, 0xb7, 0xcc, 0xba, 0xb3, 0x16, 0xa9, 0x72, + 0x9d, 0x86, 0xdf, 0x94, 0x9a, 0x73, 0xe1, 0x7b, 0x0e, 0xdd, 0xb2, 0x27, 0xc8, 0xbb, 0x04, 0xde, + 0xe9, 0x5b, 0xa3, 0xb4, 0xb8, 0xbb, 0x8d, 0x1d, 0xfe, 0x74, 0xe0, 0x77, 0xa7, 0x07, 0xc2, 0x97, + 0x9f, 0x30, 0x98, 0x61, 0x0d, 0x75, 0xd3, 0xe8, 0x5a, 0x8e, 0xa1, 0x18, 0xd8, 0xa4, 0xbd, 0xab, + 0x12, 0x4c, 0xe9, 0x36, 0x71, 0x63, 0xff, 0x27, 0x6c, 0x87, 0x0f, 0xc7, 0x17, 0x28, 0xf0, 0xce, + 0xbf, 0x01, 0x00, 0x00, 0xff, 0xff, 0x89, 0x36, 0x22, 0x8f, 0x3d, 0x11, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/appengine/v1/application.pb.go b/vendor/google.golang.org/genproto/googleapis/appengine/v1/application.pb.go new file mode 100644 index 0000000..e54c2cf --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/appengine/v1/application.pb.go @@ -0,0 +1,274 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/appengine/v1/application.proto + +package appengine // import "google.golang.org/genproto/googleapis/appengine/v1" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import duration "github.com/golang/protobuf/ptypes/duration" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// An Application resource contains the top-level configuration of an App +// Engine application. +type Application struct { + // Full path to the Application resource in the API. + // Example: `apps/myapp`. + // + // @OutputOnly + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // Identifier of the Application resource. This identifier is equivalent + // to the project ID of the Google Cloud Platform project where you want to + // deploy your application. + // Example: `myapp`. + Id string `protobuf:"bytes,2,opt,name=id,proto3" json:"id,omitempty"` + // HTTP path dispatch rules for requests to the application that do not + // explicitly target a service or version. Rules are order-dependent. + // + // @OutputOnly + DispatchRules []*UrlDispatchRule `protobuf:"bytes,3,rep,name=dispatch_rules,json=dispatchRules,proto3" json:"dispatch_rules,omitempty"` + // Google Apps authentication domain that controls which users can access + // this application. + // + // Defaults to open access for any Google Account. + AuthDomain string `protobuf:"bytes,6,opt,name=auth_domain,json=authDomain,proto3" json:"auth_domain,omitempty"` + // Location from which this application will be run. Application instances + // will run out of data centers in the chosen location, which is also where + // all of the application's end user content is stored. + // + // Defaults to `us-central`. + // + // Options are: + // + // `us-central` - Central US + // + // `europe-west` - Western Europe + // + // `us-east1` - Eastern US + LocationId string `protobuf:"bytes,7,opt,name=location_id,json=locationId,proto3" json:"location_id,omitempty"` + // Google Cloud Storage bucket that can be used for storing files + // associated with this application. This bucket is associated with the + // application and can be used by the gcloud deployment commands. + // + // @OutputOnly + CodeBucket string `protobuf:"bytes,8,opt,name=code_bucket,json=codeBucket,proto3" json:"code_bucket,omitempty"` + // Cookie expiration policy for this application. + // + // @OutputOnly + DefaultCookieExpiration *duration.Duration `protobuf:"bytes,9,opt,name=default_cookie_expiration,json=defaultCookieExpiration,proto3" json:"default_cookie_expiration,omitempty"` + // Hostname used to reach this application, as resolved by App Engine. + // + // @OutputOnly + DefaultHostname string `protobuf:"bytes,11,opt,name=default_hostname,json=defaultHostname,proto3" json:"default_hostname,omitempty"` + // Google Cloud Storage bucket that can be used by this application to store + // content. + // + // @OutputOnly + DefaultBucket string `protobuf:"bytes,12,opt,name=default_bucket,json=defaultBucket,proto3" json:"default_bucket,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Application) Reset() { *m = Application{} } +func (m *Application) String() string { return proto.CompactTextString(m) } +func (*Application) ProtoMessage() {} +func (*Application) Descriptor() ([]byte, []int) { + return fileDescriptor_application_ac9d8695b16963cf, []int{0} +} +func (m *Application) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Application.Unmarshal(m, b) +} +func (m *Application) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Application.Marshal(b, m, deterministic) +} +func (dst *Application) XXX_Merge(src proto.Message) { + xxx_messageInfo_Application.Merge(dst, src) +} +func (m *Application) XXX_Size() int { + return xxx_messageInfo_Application.Size(m) +} +func (m *Application) XXX_DiscardUnknown() { + xxx_messageInfo_Application.DiscardUnknown(m) +} + +var xxx_messageInfo_Application proto.InternalMessageInfo + +func (m *Application) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *Application) GetId() string { + if m != nil { + return m.Id + } + return "" +} + +func (m *Application) GetDispatchRules() []*UrlDispatchRule { + if m != nil { + return m.DispatchRules + } + return nil +} + +func (m *Application) GetAuthDomain() string { + if m != nil { + return m.AuthDomain + } + return "" +} + +func (m *Application) GetLocationId() string { + if m != nil { + return m.LocationId + } + return "" +} + +func (m *Application) GetCodeBucket() string { + if m != nil { + return m.CodeBucket + } + return "" +} + +func (m *Application) GetDefaultCookieExpiration() *duration.Duration { + if m != nil { + return m.DefaultCookieExpiration + } + return nil +} + +func (m *Application) GetDefaultHostname() string { + if m != nil { + return m.DefaultHostname + } + return "" +} + +func (m *Application) GetDefaultBucket() string { + if m != nil { + return m.DefaultBucket + } + return "" +} + +// Rules to match an HTTP request and dispatch that request to a service. +type UrlDispatchRule struct { + // Domain name to match against. The wildcard "`*`" is supported if + // specified before a period: "`*.`". + // + // Defaults to matching all domains: "`*`". + Domain string `protobuf:"bytes,1,opt,name=domain,proto3" json:"domain,omitempty"` + // Pathname within the host. Must start with a "`/`". A + // single "`*`" can be included at the end of the path. The sum + // of the lengths of the domain and path may not exceed 100 + // characters. + Path string `protobuf:"bytes,2,opt,name=path,proto3" json:"path,omitempty"` + // Resource ID of a service in this application that should + // serve the matched request. The service must already + // exist. Example: `default`. + Service string `protobuf:"bytes,3,opt,name=service,proto3" json:"service,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *UrlDispatchRule) Reset() { *m = UrlDispatchRule{} } +func (m *UrlDispatchRule) String() string { return proto.CompactTextString(m) } +func (*UrlDispatchRule) ProtoMessage() {} +func (*UrlDispatchRule) Descriptor() ([]byte, []int) { + return fileDescriptor_application_ac9d8695b16963cf, []int{1} +} +func (m *UrlDispatchRule) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_UrlDispatchRule.Unmarshal(m, b) +} +func (m *UrlDispatchRule) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_UrlDispatchRule.Marshal(b, m, deterministic) +} +func (dst *UrlDispatchRule) XXX_Merge(src proto.Message) { + xxx_messageInfo_UrlDispatchRule.Merge(dst, src) +} +func (m *UrlDispatchRule) XXX_Size() int { + return xxx_messageInfo_UrlDispatchRule.Size(m) +} +func (m *UrlDispatchRule) XXX_DiscardUnknown() { + xxx_messageInfo_UrlDispatchRule.DiscardUnknown(m) +} + +var xxx_messageInfo_UrlDispatchRule proto.InternalMessageInfo + +func (m *UrlDispatchRule) GetDomain() string { + if m != nil { + return m.Domain + } + return "" +} + +func (m *UrlDispatchRule) GetPath() string { + if m != nil { + return m.Path + } + return "" +} + +func (m *UrlDispatchRule) GetService() string { + if m != nil { + return m.Service + } + return "" +} + +func init() { + proto.RegisterType((*Application)(nil), "google.appengine.v1.Application") + proto.RegisterType((*UrlDispatchRule)(nil), "google.appengine.v1.UrlDispatchRule") +} + +func init() { + proto.RegisterFile("google/appengine/v1/application.proto", fileDescriptor_application_ac9d8695b16963cf) +} + +var fileDescriptor_application_ac9d8695b16963cf = []byte{ + // 409 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x6c, 0x92, 0x5f, 0x6b, 0xdb, 0x30, + 0x14, 0xc5, 0x71, 0x3c, 0x92, 0x45, 0x5e, 0xfe, 0xa0, 0xc1, 0xa2, 0x84, 0xb1, 0x85, 0xb0, 0x40, + 0xf6, 0x62, 0x93, 0xec, 0x71, 0x7b, 0x59, 0x9a, 0x42, 0x4b, 0x5f, 0x82, 0x21, 0x14, 0xfa, 0x62, + 0x14, 0x4b, 0xb1, 0x45, 0x14, 0xc9, 0xd8, 0x72, 0xe8, 0x67, 0xe8, 0xa7, 0x2e, 0x96, 0x64, 0x37, + 0x2d, 0x79, 0xd3, 0x3d, 0xfa, 0x1d, 0xdd, 0xeb, 0x73, 0x0d, 0xe6, 0x89, 0x94, 0x09, 0xa7, 0x01, + 0xce, 0x32, 0x2a, 0x12, 0x26, 0x68, 0x70, 0x5e, 0x56, 0x05, 0x67, 0x31, 0x56, 0x4c, 0x0a, 0x3f, + 0xcb, 0xa5, 0x92, 0xf0, 0xab, 0xc1, 0xfc, 0x06, 0xf3, 0xcf, 0xcb, 0xc9, 0xf7, 0xc6, 0xcb, 0x02, + 0x2c, 0x84, 0x54, 0xda, 0x51, 0x18, 0xcb, 0xe4, 0x87, 0xbd, 0xd5, 0xd5, 0xbe, 0x3c, 0x04, 0xa4, + 0xcc, 0x2f, 0x9e, 0x9c, 0xbd, 0xb8, 0xc0, 0xfb, 0xff, 0xd6, 0x08, 0x42, 0xf0, 0x49, 0xe0, 0x13, + 0x45, 0xce, 0xd4, 0x59, 0x74, 0x43, 0x7d, 0x86, 0x7d, 0xd0, 0x62, 0x04, 0xb5, 0xb4, 0xd2, 0x62, + 0x04, 0x3e, 0x80, 0x3e, 0x61, 0x45, 0x86, 0x55, 0x9c, 0x46, 0x79, 0xc9, 0x69, 0x81, 0xdc, 0xa9, + 0xbb, 0xf0, 0x56, 0xbf, 0xfc, 0x2b, 0xf3, 0xf9, 0xbb, 0x9c, 0x6f, 0x2c, 0x1d, 0x96, 0x9c, 0x86, + 0x3d, 0x72, 0x51, 0x15, 0xf0, 0x27, 0xf0, 0x70, 0xa9, 0xd2, 0x88, 0xc8, 0x13, 0x66, 0x02, 0xb5, + 0x75, 0x17, 0x50, 0x49, 0x1b, 0xad, 0x54, 0x00, 0x97, 0x66, 0xba, 0x88, 0x11, 0xd4, 0x31, 0x40, + 0x2d, 0xdd, 0x93, 0x0a, 0x88, 0x25, 0xa1, 0xd1, 0xbe, 0x8c, 0x8f, 0x54, 0xa1, 0xcf, 0x06, 0xa8, + 0xa4, 0xb5, 0x56, 0xe0, 0x0e, 0x8c, 0x09, 0x3d, 0xe0, 0x92, 0xab, 0x28, 0x96, 0xf2, 0xc8, 0x68, + 0x44, 0x9f, 0x33, 0x66, 0x62, 0x40, 0xdd, 0xa9, 0xb3, 0xf0, 0x56, 0xe3, 0x7a, 0xf4, 0x3a, 0x27, + 0x7f, 0x63, 0x73, 0x0a, 0x47, 0xd6, 0x7b, 0xa3, 0xad, 0xb7, 0x8d, 0x13, 0xfe, 0x06, 0xc3, 0xfa, + 0xd9, 0x54, 0x16, 0x4a, 0xc7, 0xe6, 0xe9, 0xe6, 0x03, 0xab, 0xdf, 0x59, 0x19, 0xce, 0x41, 0xbf, + 0x46, 0xed, 0x94, 0x5f, 0x34, 0xd8, 0xb3, 0xaa, 0x19, 0x74, 0xf6, 0x08, 0x06, 0x1f, 0xd2, 0x82, + 0xdf, 0x40, 0xdb, 0x26, 0x63, 0x36, 0x62, 0xab, 0x6a, 0x4f, 0x19, 0x56, 0xa9, 0xdd, 0x8a, 0x3e, + 0x43, 0x04, 0x3a, 0x05, 0xcd, 0xcf, 0x2c, 0xa6, 0xc8, 0xd5, 0x72, 0x5d, 0xae, 0x8f, 0x60, 0x14, + 0xcb, 0xd3, 0xb5, 0xf5, 0xac, 0x87, 0x17, 0xdb, 0xdf, 0x56, 0x1f, 0xbf, 0x75, 0x9e, 0xfe, 0x59, + 0x30, 0x91, 0x1c, 0x8b, 0xc4, 0x97, 0x79, 0x12, 0x24, 0x54, 0xe8, 0x68, 0x02, 0x73, 0x85, 0x33, + 0x56, 0xbc, 0xfb, 0x5b, 0xff, 0x36, 0xc5, 0xbe, 0xad, 0xc1, 0x3f, 0xaf, 0x01, 0x00, 0x00, 0xff, + 0xff, 0x7a, 0x51, 0x2e, 0x3c, 0xd5, 0x02, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/appengine/v1/audit_data.pb.go b/vendor/google.golang.org/genproto/googleapis/appengine/v1/audit_data.pb.go new file mode 100644 index 0000000..8e4c4b2 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/appengine/v1/audit_data.pb.go @@ -0,0 +1,284 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/appengine/v1/audit_data.proto + +package appengine // import "google.golang.org/genproto/googleapis/appengine/v1" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "google.golang.org/genproto/googleapis/iam/v1" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// App Engine admin service audit log. +type AuditData struct { + // Detailed information about methods that require it. Does not include + // simple Get, List or Delete methods because all significant information + // (resource name, number of returned elements for List operations) is already + // included in parent audit log message. + // + // Types that are valid to be assigned to Method: + // *AuditData_UpdateService + // *AuditData_CreateVersion + Method isAuditData_Method `protobuf_oneof:"method"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *AuditData) Reset() { *m = AuditData{} } +func (m *AuditData) String() string { return proto.CompactTextString(m) } +func (*AuditData) ProtoMessage() {} +func (*AuditData) Descriptor() ([]byte, []int) { + return fileDescriptor_audit_data_60458bc78538b81a, []int{0} +} +func (m *AuditData) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_AuditData.Unmarshal(m, b) +} +func (m *AuditData) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_AuditData.Marshal(b, m, deterministic) +} +func (dst *AuditData) XXX_Merge(src proto.Message) { + xxx_messageInfo_AuditData.Merge(dst, src) +} +func (m *AuditData) XXX_Size() int { + return xxx_messageInfo_AuditData.Size(m) +} +func (m *AuditData) XXX_DiscardUnknown() { + xxx_messageInfo_AuditData.DiscardUnknown(m) +} + +var xxx_messageInfo_AuditData proto.InternalMessageInfo + +type isAuditData_Method interface { + isAuditData_Method() +} + +type AuditData_UpdateService struct { + UpdateService *UpdateServiceMethod `protobuf:"bytes,1,opt,name=update_service,json=updateService,proto3,oneof"` +} + +type AuditData_CreateVersion struct { + CreateVersion *CreateVersionMethod `protobuf:"bytes,2,opt,name=create_version,json=createVersion,proto3,oneof"` +} + +func (*AuditData_UpdateService) isAuditData_Method() {} + +func (*AuditData_CreateVersion) isAuditData_Method() {} + +func (m *AuditData) GetMethod() isAuditData_Method { + if m != nil { + return m.Method + } + return nil +} + +func (m *AuditData) GetUpdateService() *UpdateServiceMethod { + if x, ok := m.GetMethod().(*AuditData_UpdateService); ok { + return x.UpdateService + } + return nil +} + +func (m *AuditData) GetCreateVersion() *CreateVersionMethod { + if x, ok := m.GetMethod().(*AuditData_CreateVersion); ok { + return x.CreateVersion + } + return nil +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*AuditData) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _AuditData_OneofMarshaler, _AuditData_OneofUnmarshaler, _AuditData_OneofSizer, []interface{}{ + (*AuditData_UpdateService)(nil), + (*AuditData_CreateVersion)(nil), + } +} + +func _AuditData_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*AuditData) + // method + switch x := m.Method.(type) { + case *AuditData_UpdateService: + b.EncodeVarint(1<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.UpdateService); err != nil { + return err + } + case *AuditData_CreateVersion: + b.EncodeVarint(2<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.CreateVersion); err != nil { + return err + } + case nil: + default: + return fmt.Errorf("AuditData.Method has unexpected type %T", x) + } + return nil +} + +func _AuditData_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*AuditData) + switch tag { + case 1: // method.update_service + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(UpdateServiceMethod) + err := b.DecodeMessage(msg) + m.Method = &AuditData_UpdateService{msg} + return true, err + case 2: // method.create_version + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(CreateVersionMethod) + err := b.DecodeMessage(msg) + m.Method = &AuditData_CreateVersion{msg} + return true, err + default: + return false, nil + } +} + +func _AuditData_OneofSizer(msg proto.Message) (n int) { + m := msg.(*AuditData) + // method + switch x := m.Method.(type) { + case *AuditData_UpdateService: + s := proto.Size(x.UpdateService) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *AuditData_CreateVersion: + s := proto.Size(x.CreateVersion) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +// Detailed information about UpdateService call. +type UpdateServiceMethod struct { + // Update service request. + Request *UpdateServiceRequest `protobuf:"bytes,1,opt,name=request,proto3" json:"request,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *UpdateServiceMethod) Reset() { *m = UpdateServiceMethod{} } +func (m *UpdateServiceMethod) String() string { return proto.CompactTextString(m) } +func (*UpdateServiceMethod) ProtoMessage() {} +func (*UpdateServiceMethod) Descriptor() ([]byte, []int) { + return fileDescriptor_audit_data_60458bc78538b81a, []int{1} +} +func (m *UpdateServiceMethod) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_UpdateServiceMethod.Unmarshal(m, b) +} +func (m *UpdateServiceMethod) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_UpdateServiceMethod.Marshal(b, m, deterministic) +} +func (dst *UpdateServiceMethod) XXX_Merge(src proto.Message) { + xxx_messageInfo_UpdateServiceMethod.Merge(dst, src) +} +func (m *UpdateServiceMethod) XXX_Size() int { + return xxx_messageInfo_UpdateServiceMethod.Size(m) +} +func (m *UpdateServiceMethod) XXX_DiscardUnknown() { + xxx_messageInfo_UpdateServiceMethod.DiscardUnknown(m) +} + +var xxx_messageInfo_UpdateServiceMethod proto.InternalMessageInfo + +func (m *UpdateServiceMethod) GetRequest() *UpdateServiceRequest { + if m != nil { + return m.Request + } + return nil +} + +// Detailed information about CreateVersion call. +type CreateVersionMethod struct { + // Create version request. + Request *CreateVersionRequest `protobuf:"bytes,1,opt,name=request,proto3" json:"request,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *CreateVersionMethod) Reset() { *m = CreateVersionMethod{} } +func (m *CreateVersionMethod) String() string { return proto.CompactTextString(m) } +func (*CreateVersionMethod) ProtoMessage() {} +func (*CreateVersionMethod) Descriptor() ([]byte, []int) { + return fileDescriptor_audit_data_60458bc78538b81a, []int{2} +} +func (m *CreateVersionMethod) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_CreateVersionMethod.Unmarshal(m, b) +} +func (m *CreateVersionMethod) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_CreateVersionMethod.Marshal(b, m, deterministic) +} +func (dst *CreateVersionMethod) XXX_Merge(src proto.Message) { + xxx_messageInfo_CreateVersionMethod.Merge(dst, src) +} +func (m *CreateVersionMethod) XXX_Size() int { + return xxx_messageInfo_CreateVersionMethod.Size(m) +} +func (m *CreateVersionMethod) XXX_DiscardUnknown() { + xxx_messageInfo_CreateVersionMethod.DiscardUnknown(m) +} + +var xxx_messageInfo_CreateVersionMethod proto.InternalMessageInfo + +func (m *CreateVersionMethod) GetRequest() *CreateVersionRequest { + if m != nil { + return m.Request + } + return nil +} + +func init() { + proto.RegisterType((*AuditData)(nil), "google.appengine.v1.AuditData") + proto.RegisterType((*UpdateServiceMethod)(nil), "google.appengine.v1.UpdateServiceMethod") + proto.RegisterType((*CreateVersionMethod)(nil), "google.appengine.v1.CreateVersionMethod") +} + +func init() { + proto.RegisterFile("google/appengine/v1/audit_data.proto", fileDescriptor_audit_data_60458bc78538b81a) +} + +var fileDescriptor_audit_data_60458bc78538b81a = []byte{ + // 290 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x8c, 0x92, 0xb1, 0x4e, 0xc3, 0x30, + 0x10, 0x86, 0x09, 0x43, 0x01, 0x23, 0x3a, 0xa4, 0x03, 0x55, 0x07, 0x84, 0x0a, 0x43, 0x59, 0x1c, + 0x15, 0x46, 0x58, 0x48, 0x19, 0x58, 0x90, 0x4a, 0x10, 0x0c, 0x5d, 0xa2, 0x23, 0x39, 0x19, 0x4b, + 0x49, 0x6c, 0x1c, 0x27, 0x12, 0xcf, 0xc6, 0xcb, 0x21, 0xdb, 0x21, 0xb4, 0xc8, 0x82, 0x8e, 0xb9, + 0xfb, 0xfe, 0x2f, 0xbf, 0x74, 0x26, 0xe7, 0x4c, 0x08, 0x56, 0x60, 0x04, 0x52, 0x62, 0xc5, 0x78, + 0x85, 0x51, 0x3b, 0x8f, 0xa0, 0xc9, 0xb9, 0x4e, 0x73, 0xd0, 0x40, 0xa5, 0x12, 0x5a, 0x84, 0x23, + 0x47, 0xd1, 0x9e, 0xa2, 0xed, 0x7c, 0x72, 0xe6, 0x8d, 0xf6, 0x84, 0x4d, 0x4e, 0x4e, 0x3a, 0x88, + 0x43, 0x69, 0xd6, 0x1c, 0xca, 0x54, 0x8a, 0x82, 0x67, 0x1f, 0x6e, 0x3f, 0xfd, 0x0c, 0xc8, 0xc1, + 0xad, 0xf9, 0xdd, 0x1d, 0x68, 0x08, 0x1f, 0xc9, 0xb0, 0x91, 0x39, 0x68, 0x4c, 0x6b, 0x54, 0x2d, + 0xcf, 0x70, 0x1c, 0x9c, 0x06, 0xb3, 0xc3, 0xcb, 0x19, 0xf5, 0x14, 0xa0, 0xcf, 0x16, 0x7d, 0x72, + 0xe4, 0x03, 0xea, 0x37, 0x91, 0xdf, 0xef, 0x24, 0x47, 0xcd, 0xfa, 0xd8, 0x28, 0x33, 0x85, 0x46, + 0xd9, 0xa2, 0xaa, 0xb9, 0xa8, 0xc6, 0xbb, 0x7f, 0x28, 0x17, 0x16, 0x7d, 0x71, 0xe4, 0x8f, 0x32, + 0x5b, 0x1f, 0xc7, 0xfb, 0x64, 0x50, 0xda, 0xd5, 0x74, 0x45, 0x46, 0x9e, 0x12, 0xe1, 0x82, 0xec, + 0x29, 0x7c, 0x6f, 0xb0, 0xd6, 0x5d, 0xff, 0x8b, 0xff, 0xfb, 0x27, 0x2e, 0x90, 0x7c, 0x27, 0x8d, + 0xdb, 0xd3, 0x66, 0x5b, 0xf7, 0x46, 0xf4, 0xb7, 0x3b, 0xe6, 0xe4, 0x38, 0x13, 0xa5, 0x2f, 0x18, + 0x0f, 0xfb, 0x6b, 0x2c, 0xcd, 0x81, 0x96, 0xc1, 0xea, 0xa6, 0xc3, 0x98, 0x28, 0xa0, 0x62, 0x54, + 0x28, 0x16, 0x31, 0xac, 0xec, 0xf9, 0x22, 0xb7, 0x02, 0xc9, 0xeb, 0x8d, 0x67, 0x70, 0xdd, 0x7f, + 0xbc, 0x0e, 0x2c, 0x78, 0xf5, 0x15, 0x00, 0x00, 0xff, 0xff, 0xdb, 0x56, 0x80, 0x49, 0x69, 0x02, + 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/appengine/v1/deploy.pb.go b/vendor/google.golang.org/genproto/googleapis/appengine/v1/deploy.pb.go new file mode 100644 index 0000000..8003902 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/appengine/v1/deploy.pb.go @@ -0,0 +1,280 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/appengine/v1/deploy.proto + +package appengine // import "google.golang.org/genproto/googleapis/appengine/v1" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Code and application artifacts used to deploy a version to App Engine. +type Deployment struct { + // Manifest of the files stored in Google Cloud Storage that are included + // as part of this version. All files must be readable using the + // credentials supplied with this call. + Files map[string]*FileInfo `protobuf:"bytes,1,rep,name=files,proto3" json:"files,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` + // A Docker image that App Engine uses to run the version. + // Only applicable for instances in App Engine flexible environment. + Container *ContainerInfo `protobuf:"bytes,2,opt,name=container,proto3" json:"container,omitempty"` + // The zip file for this deployment, if this is a zip deployment. + Zip *ZipInfo `protobuf:"bytes,3,opt,name=zip,proto3" json:"zip,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Deployment) Reset() { *m = Deployment{} } +func (m *Deployment) String() string { return proto.CompactTextString(m) } +func (*Deployment) ProtoMessage() {} +func (*Deployment) Descriptor() ([]byte, []int) { + return fileDescriptor_deploy_c2d1a3b963500eae, []int{0} +} +func (m *Deployment) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Deployment.Unmarshal(m, b) +} +func (m *Deployment) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Deployment.Marshal(b, m, deterministic) +} +func (dst *Deployment) XXX_Merge(src proto.Message) { + xxx_messageInfo_Deployment.Merge(dst, src) +} +func (m *Deployment) XXX_Size() int { + return xxx_messageInfo_Deployment.Size(m) +} +func (m *Deployment) XXX_DiscardUnknown() { + xxx_messageInfo_Deployment.DiscardUnknown(m) +} + +var xxx_messageInfo_Deployment proto.InternalMessageInfo + +func (m *Deployment) GetFiles() map[string]*FileInfo { + if m != nil { + return m.Files + } + return nil +} + +func (m *Deployment) GetContainer() *ContainerInfo { + if m != nil { + return m.Container + } + return nil +} + +func (m *Deployment) GetZip() *ZipInfo { + if m != nil { + return m.Zip + } + return nil +} + +// Single source file that is part of the version to be deployed. Each source +// file that is deployed must be specified separately. +type FileInfo struct { + // URL source to use to fetch this file. Must be a URL to a resource in + // Google Cloud Storage in the form + // 'http(s)://storage.googleapis.com/\/\'. + SourceUrl string `protobuf:"bytes,1,opt,name=source_url,json=sourceUrl,proto3" json:"source_url,omitempty"` + // The SHA1 hash of the file, in hex. + Sha1Sum string `protobuf:"bytes,2,opt,name=sha1_sum,json=sha1Sum,proto3" json:"sha1_sum,omitempty"` + // The MIME type of the file. + // + // Defaults to the value from Google Cloud Storage. + MimeType string `protobuf:"bytes,3,opt,name=mime_type,json=mimeType,proto3" json:"mime_type,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *FileInfo) Reset() { *m = FileInfo{} } +func (m *FileInfo) String() string { return proto.CompactTextString(m) } +func (*FileInfo) ProtoMessage() {} +func (*FileInfo) Descriptor() ([]byte, []int) { + return fileDescriptor_deploy_c2d1a3b963500eae, []int{1} +} +func (m *FileInfo) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_FileInfo.Unmarshal(m, b) +} +func (m *FileInfo) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_FileInfo.Marshal(b, m, deterministic) +} +func (dst *FileInfo) XXX_Merge(src proto.Message) { + xxx_messageInfo_FileInfo.Merge(dst, src) +} +func (m *FileInfo) XXX_Size() int { + return xxx_messageInfo_FileInfo.Size(m) +} +func (m *FileInfo) XXX_DiscardUnknown() { + xxx_messageInfo_FileInfo.DiscardUnknown(m) +} + +var xxx_messageInfo_FileInfo proto.InternalMessageInfo + +func (m *FileInfo) GetSourceUrl() string { + if m != nil { + return m.SourceUrl + } + return "" +} + +func (m *FileInfo) GetSha1Sum() string { + if m != nil { + return m.Sha1Sum + } + return "" +} + +func (m *FileInfo) GetMimeType() string { + if m != nil { + return m.MimeType + } + return "" +} + +// Docker image that is used to start a VM container for the version you +// deploy. +type ContainerInfo struct { + // URI to the hosted container image in a Docker repository. The URI must be + // fully qualified and include a tag or digest. + // Examples: "gcr.io/my-project/image:tag" or "gcr.io/my-project/image@digest" + Image string `protobuf:"bytes,1,opt,name=image,proto3" json:"image,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ContainerInfo) Reset() { *m = ContainerInfo{} } +func (m *ContainerInfo) String() string { return proto.CompactTextString(m) } +func (*ContainerInfo) ProtoMessage() {} +func (*ContainerInfo) Descriptor() ([]byte, []int) { + return fileDescriptor_deploy_c2d1a3b963500eae, []int{2} +} +func (m *ContainerInfo) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ContainerInfo.Unmarshal(m, b) +} +func (m *ContainerInfo) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ContainerInfo.Marshal(b, m, deterministic) +} +func (dst *ContainerInfo) XXX_Merge(src proto.Message) { + xxx_messageInfo_ContainerInfo.Merge(dst, src) +} +func (m *ContainerInfo) XXX_Size() int { + return xxx_messageInfo_ContainerInfo.Size(m) +} +func (m *ContainerInfo) XXX_DiscardUnknown() { + xxx_messageInfo_ContainerInfo.DiscardUnknown(m) +} + +var xxx_messageInfo_ContainerInfo proto.InternalMessageInfo + +func (m *ContainerInfo) GetImage() string { + if m != nil { + return m.Image + } + return "" +} + +type ZipInfo struct { + // URL of the zip file to deploy from. Must be a URL to a resource in + // Google Cloud Storage in the form + // 'http(s)://storage.googleapis.com/\/\'. + SourceUrl string `protobuf:"bytes,3,opt,name=source_url,json=sourceUrl,proto3" json:"source_url,omitempty"` + // An estimate of the number of files in a zip for a zip deployment. + // If set, must be greater than or equal to the actual number of files. + // Used for optimizing performance; if not provided, deployment may be slow. + FilesCount int32 `protobuf:"varint,4,opt,name=files_count,json=filesCount,proto3" json:"files_count,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ZipInfo) Reset() { *m = ZipInfo{} } +func (m *ZipInfo) String() string { return proto.CompactTextString(m) } +func (*ZipInfo) ProtoMessage() {} +func (*ZipInfo) Descriptor() ([]byte, []int) { + return fileDescriptor_deploy_c2d1a3b963500eae, []int{3} +} +func (m *ZipInfo) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ZipInfo.Unmarshal(m, b) +} +func (m *ZipInfo) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ZipInfo.Marshal(b, m, deterministic) +} +func (dst *ZipInfo) XXX_Merge(src proto.Message) { + xxx_messageInfo_ZipInfo.Merge(dst, src) +} +func (m *ZipInfo) XXX_Size() int { + return xxx_messageInfo_ZipInfo.Size(m) +} +func (m *ZipInfo) XXX_DiscardUnknown() { + xxx_messageInfo_ZipInfo.DiscardUnknown(m) +} + +var xxx_messageInfo_ZipInfo proto.InternalMessageInfo + +func (m *ZipInfo) GetSourceUrl() string { + if m != nil { + return m.SourceUrl + } + return "" +} + +func (m *ZipInfo) GetFilesCount() int32 { + if m != nil { + return m.FilesCount + } + return 0 +} + +func init() { + proto.RegisterType((*Deployment)(nil), "google.appengine.v1.Deployment") + proto.RegisterMapType((map[string]*FileInfo)(nil), "google.appengine.v1.Deployment.FilesEntry") + proto.RegisterType((*FileInfo)(nil), "google.appengine.v1.FileInfo") + proto.RegisterType((*ContainerInfo)(nil), "google.appengine.v1.ContainerInfo") + proto.RegisterType((*ZipInfo)(nil), "google.appengine.v1.ZipInfo") +} + +func init() { + proto.RegisterFile("google/appengine/v1/deploy.proto", fileDescriptor_deploy_c2d1a3b963500eae) +} + +var fileDescriptor_deploy_c2d1a3b963500eae = []byte{ + // 394 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x6c, 0x92, 0xd1, 0xab, 0xd3, 0x30, + 0x14, 0xc6, 0xe9, 0x6a, 0xbd, 0xeb, 0x29, 0x82, 0x44, 0xc1, 0x7a, 0xbd, 0x17, 0x4b, 0x41, 0x28, + 0x3e, 0xa4, 0xec, 0xde, 0x17, 0x51, 0x1f, 0x2e, 0x9b, 0x0a, 0x7b, 0x1b, 0x55, 0x11, 0xf6, 0x52, + 0x62, 0xcd, 0x62, 0xb0, 0x4d, 0x42, 0x9b, 0x0e, 0xea, 0x7f, 0xe2, 0x7f, 0x2b, 0x49, 0xba, 0x8d, + 0x8d, 0xbe, 0xf5, 0x7c, 0xfd, 0x7d, 0x5f, 0x4e, 0x72, 0x0e, 0x24, 0x4c, 0x4a, 0x56, 0xd3, 0x9c, + 0x28, 0x45, 0x05, 0xe3, 0x82, 0xe6, 0xfb, 0x45, 0xfe, 0x8b, 0xaa, 0x5a, 0x0e, 0x58, 0xb5, 0x52, + 0x4b, 0xf4, 0xcc, 0x11, 0xf8, 0x48, 0xe0, 0xfd, 0xe2, 0xfa, 0xe6, 0x68, 0xe3, 0x39, 0x11, 0x42, + 0x6a, 0xa2, 0xb9, 0x14, 0x9d, 0xb3, 0xa4, 0xff, 0x66, 0x00, 0x9f, 0x6c, 0x46, 0x43, 0x85, 0x46, + 0x0f, 0x10, 0xec, 0x78, 0x4d, 0xbb, 0xd8, 0x4b, 0xfc, 0x2c, 0xba, 0x7b, 0x8b, 0x27, 0x12, 0xf1, + 0x89, 0xc7, 0x5f, 0x0c, 0xfc, 0x59, 0xe8, 0x76, 0x28, 0x9c, 0x11, 0x3d, 0x40, 0x58, 0x49, 0xa1, + 0x09, 0x17, 0xb4, 0x8d, 0x67, 0x89, 0x97, 0x45, 0x77, 0xe9, 0x64, 0xca, 0xea, 0x40, 0xad, 0xc5, + 0x4e, 0x16, 0x27, 0x13, 0xc2, 0xe0, 0xff, 0xe5, 0x2a, 0xf6, 0xad, 0xf7, 0x66, 0xd2, 0xbb, 0xe5, + 0xca, 0xba, 0x0c, 0x78, 0xfd, 0x03, 0xe0, 0xd4, 0x06, 0x7a, 0x0a, 0xfe, 0x1f, 0x3a, 0xc4, 0x5e, + 0xe2, 0x65, 0x61, 0x61, 0x3e, 0xd1, 0x3d, 0x04, 0x7b, 0x52, 0xf7, 0x74, 0xec, 0xe6, 0x76, 0x32, + 0xd1, 0x24, 0xd8, 0x48, 0xc7, 0xbe, 0x9f, 0xbd, 0xf3, 0x52, 0x02, 0xf3, 0x83, 0x8c, 0x6e, 0x01, + 0x3a, 0xd9, 0xb7, 0x15, 0x2d, 0xfb, 0xb6, 0x1e, 0xd3, 0x43, 0xa7, 0x7c, 0x6f, 0x6b, 0xf4, 0x12, + 0xe6, 0xdd, 0x6f, 0xb2, 0x28, 0xbb, 0xbe, 0xb1, 0xc7, 0x84, 0xc5, 0x95, 0xa9, 0xbf, 0xf6, 0x0d, + 0x7a, 0x05, 0x61, 0xc3, 0x1b, 0x5a, 0xea, 0x41, 0x51, 0x7b, 0xa9, 0xb0, 0x98, 0x1b, 0xe1, 0xdb, + 0xa0, 0x68, 0xfa, 0x06, 0x9e, 0x9c, 0xbd, 0x03, 0x7a, 0x0e, 0x01, 0x6f, 0x08, 0xa3, 0xe3, 0x11, + 0xae, 0x48, 0xd7, 0x70, 0x35, 0x5e, 0xf9, 0xa2, 0x11, 0xff, 0xb2, 0x91, 0xd7, 0x10, 0xd9, 0x39, + 0x94, 0x95, 0xec, 0x85, 0x8e, 0x1f, 0x25, 0x5e, 0x16, 0x14, 0x60, 0xa5, 0x95, 0x51, 0x96, 0x3b, + 0x78, 0x51, 0xc9, 0x66, 0xea, 0x0d, 0x96, 0x91, 0x1b, 0xec, 0xc6, 0x2c, 0xc6, 0xc6, 0xdb, 0x7e, + 0x1c, 0x19, 0x26, 0x6b, 0x22, 0x18, 0x96, 0x2d, 0xcb, 0x19, 0x15, 0x76, 0x6d, 0x72, 0xf7, 0x8b, + 0x28, 0xde, 0x9d, 0xad, 0xe3, 0x87, 0x63, 0xf1, 0xf3, 0xb1, 0x05, 0xef, 0xff, 0x07, 0x00, 0x00, + 0xff, 0xff, 0x6e, 0xeb, 0x52, 0x5a, 0xb6, 0x02, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/appengine/v1/instance.pb.go b/vendor/google.golang.org/genproto/googleapis/appengine/v1/instance.pb.go new file mode 100644 index 0000000..0adf2e6 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/appengine/v1/instance.pb.go @@ -0,0 +1,298 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/appengine/v1/instance.proto + +package appengine // import "google.golang.org/genproto/googleapis/appengine/v1" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import timestamp "github.com/golang/protobuf/ptypes/timestamp" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Availability of the instance. +type Instance_Availability int32 + +const ( + Instance_UNSPECIFIED Instance_Availability = 0 + Instance_RESIDENT Instance_Availability = 1 + Instance_DYNAMIC Instance_Availability = 2 +) + +var Instance_Availability_name = map[int32]string{ + 0: "UNSPECIFIED", + 1: "RESIDENT", + 2: "DYNAMIC", +} +var Instance_Availability_value = map[string]int32{ + "UNSPECIFIED": 0, + "RESIDENT": 1, + "DYNAMIC": 2, +} + +func (x Instance_Availability) String() string { + return proto.EnumName(Instance_Availability_name, int32(x)) +} +func (Instance_Availability) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_instance_db2e4a2fffd7a808, []int{0, 0} +} + +// An Instance resource is the computing unit that App Engine uses to +// automatically scale an application. +type Instance struct { + // Full path to the Instance resource in the API. + // Example: `apps/myapp/services/default/versions/v1/instances/instance-1`. + // + // @OutputOnly + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // Relative name of the instance within the version. + // Example: `instance-1`. + // + // @OutputOnly + Id string `protobuf:"bytes,2,opt,name=id,proto3" json:"id,omitempty"` + // App Engine release this instance is running on. + // + // @OutputOnly + AppEngineRelease string `protobuf:"bytes,3,opt,name=app_engine_release,json=appEngineRelease,proto3" json:"app_engine_release,omitempty"` + // Availability of the instance. + // + // @OutputOnly + Availability Instance_Availability `protobuf:"varint,4,opt,name=availability,proto3,enum=google.appengine.v1.Instance_Availability" json:"availability,omitempty"` + // Name of the virtual machine where this instance lives. Only applicable + // for instances in App Engine flexible environment. + // + // @OutputOnly + VmName string `protobuf:"bytes,5,opt,name=vm_name,json=vmName,proto3" json:"vm_name,omitempty"` + // Zone where the virtual machine is located. Only applicable for instances + // in App Engine flexible environment. + // + // @OutputOnly + VmZoneName string `protobuf:"bytes,6,opt,name=vm_zone_name,json=vmZoneName,proto3" json:"vm_zone_name,omitempty"` + // Virtual machine ID of this instance. Only applicable for instances in + // App Engine flexible environment. + // + // @OutputOnly + VmId string `protobuf:"bytes,7,opt,name=vm_id,json=vmId,proto3" json:"vm_id,omitempty"` + // Time that this instance was started. + // + // @OutputOnly + StartTime *timestamp.Timestamp `protobuf:"bytes,8,opt,name=start_time,json=startTime,proto3" json:"start_time,omitempty"` + // Number of requests since this instance was started. + // + // @OutputOnly + Requests int32 `protobuf:"varint,9,opt,name=requests,proto3" json:"requests,omitempty"` + // Number of errors since this instance was started. + // + // @OutputOnly + Errors int32 `protobuf:"varint,10,opt,name=errors,proto3" json:"errors,omitempty"` + // Average queries per second (QPS) over the last minute. + // + // @OutputOnly + Qps float32 `protobuf:"fixed32,11,opt,name=qps,proto3" json:"qps,omitempty"` + // Average latency (ms) over the last minute. + // + // @OutputOnly + AverageLatency int32 `protobuf:"varint,12,opt,name=average_latency,json=averageLatency,proto3" json:"average_latency,omitempty"` + // Total memory in use (bytes). + // + // @OutputOnly + MemoryUsage int64 `protobuf:"varint,13,opt,name=memory_usage,json=memoryUsage,proto3" json:"memory_usage,omitempty"` + // Status of the virtual machine where this instance lives. Only applicable + // for instances in App Engine flexible environment. + // + // @OutputOnly + VmStatus string `protobuf:"bytes,14,opt,name=vm_status,json=vmStatus,proto3" json:"vm_status,omitempty"` + // Whether this instance is in debug mode. Only applicable for instances in + // App Engine flexible environment. + // + // @OutputOnly + VmDebugEnabled bool `protobuf:"varint,15,opt,name=vm_debug_enabled,json=vmDebugEnabled,proto3" json:"vm_debug_enabled,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Instance) Reset() { *m = Instance{} } +func (m *Instance) String() string { return proto.CompactTextString(m) } +func (*Instance) ProtoMessage() {} +func (*Instance) Descriptor() ([]byte, []int) { + return fileDescriptor_instance_db2e4a2fffd7a808, []int{0} +} +func (m *Instance) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Instance.Unmarshal(m, b) +} +func (m *Instance) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Instance.Marshal(b, m, deterministic) +} +func (dst *Instance) XXX_Merge(src proto.Message) { + xxx_messageInfo_Instance.Merge(dst, src) +} +func (m *Instance) XXX_Size() int { + return xxx_messageInfo_Instance.Size(m) +} +func (m *Instance) XXX_DiscardUnknown() { + xxx_messageInfo_Instance.DiscardUnknown(m) +} + +var xxx_messageInfo_Instance proto.InternalMessageInfo + +func (m *Instance) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *Instance) GetId() string { + if m != nil { + return m.Id + } + return "" +} + +func (m *Instance) GetAppEngineRelease() string { + if m != nil { + return m.AppEngineRelease + } + return "" +} + +func (m *Instance) GetAvailability() Instance_Availability { + if m != nil { + return m.Availability + } + return Instance_UNSPECIFIED +} + +func (m *Instance) GetVmName() string { + if m != nil { + return m.VmName + } + return "" +} + +func (m *Instance) GetVmZoneName() string { + if m != nil { + return m.VmZoneName + } + return "" +} + +func (m *Instance) GetVmId() string { + if m != nil { + return m.VmId + } + return "" +} + +func (m *Instance) GetStartTime() *timestamp.Timestamp { + if m != nil { + return m.StartTime + } + return nil +} + +func (m *Instance) GetRequests() int32 { + if m != nil { + return m.Requests + } + return 0 +} + +func (m *Instance) GetErrors() int32 { + if m != nil { + return m.Errors + } + return 0 +} + +func (m *Instance) GetQps() float32 { + if m != nil { + return m.Qps + } + return 0 +} + +func (m *Instance) GetAverageLatency() int32 { + if m != nil { + return m.AverageLatency + } + return 0 +} + +func (m *Instance) GetMemoryUsage() int64 { + if m != nil { + return m.MemoryUsage + } + return 0 +} + +func (m *Instance) GetVmStatus() string { + if m != nil { + return m.VmStatus + } + return "" +} + +func (m *Instance) GetVmDebugEnabled() bool { + if m != nil { + return m.VmDebugEnabled + } + return false +} + +func init() { + proto.RegisterType((*Instance)(nil), "google.appengine.v1.Instance") + proto.RegisterEnum("google.appengine.v1.Instance_Availability", Instance_Availability_name, Instance_Availability_value) +} + +func init() { + proto.RegisterFile("google/appengine/v1/instance.proto", fileDescriptor_instance_db2e4a2fffd7a808) +} + +var fileDescriptor_instance_db2e4a2fffd7a808 = []byte{ + // 521 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x6c, 0x53, 0x5d, 0x6b, 0xdb, 0x3c, + 0x14, 0x7e, 0x9d, 0xb6, 0xa9, 0x73, 0xe2, 0x26, 0x46, 0x85, 0xb7, 0x22, 0x1b, 0xcc, 0xcb, 0xcd, + 0xcc, 0x18, 0x36, 0xed, 0xae, 0xf6, 0x71, 0xd3, 0x36, 0x1e, 0x18, 0xb6, 0x10, 0x9c, 0xf6, 0x62, + 0xbd, 0x31, 0x4a, 0xac, 0x79, 0x02, 0x4b, 0x72, 0x2d, 0x45, 0x90, 0xfd, 0xc6, 0xfd, 0xa8, 0x61, + 0x39, 0x09, 0x2d, 0xf4, 0xce, 0xcf, 0xc7, 0x41, 0xcf, 0x79, 0x0e, 0x86, 0x69, 0x29, 0x65, 0x59, + 0xd1, 0x98, 0xd4, 0x35, 0x15, 0x25, 0x13, 0x34, 0x36, 0x97, 0x31, 0x13, 0x4a, 0x13, 0xb1, 0xa6, + 0x51, 0xdd, 0x48, 0x2d, 0xd1, 0x79, 0xe7, 0x89, 0x0e, 0x9e, 0xc8, 0x5c, 0x4e, 0x5e, 0x1f, 0x06, + 0x59, 0x4c, 0x84, 0x90, 0x9a, 0x68, 0x26, 0x85, 0xea, 0x46, 0x26, 0x6f, 0x76, 0xaa, 0x45, 0xab, + 0xcd, 0xaf, 0x58, 0x33, 0x4e, 0x95, 0x26, 0xbc, 0xee, 0x0c, 0xd3, 0xbf, 0xc7, 0xe0, 0xa6, 0xbb, + 0x67, 0x10, 0x82, 0x63, 0x41, 0x38, 0xc5, 0x4e, 0xe0, 0x84, 0x83, 0xcc, 0x7e, 0xa3, 0x11, 0xf4, + 0x58, 0x81, 0x7b, 0x96, 0xe9, 0xb1, 0x02, 0x7d, 0x00, 0x44, 0xea, 0x3a, 0xef, 0x02, 0xe4, 0x0d, + 0xad, 0x28, 0x51, 0x14, 0x1f, 0x59, 0xdd, 0x27, 0x75, 0x9d, 0x58, 0x21, 0xeb, 0x78, 0x34, 0x07, + 0x8f, 0x18, 0xc2, 0x2a, 0xb2, 0x62, 0x15, 0xd3, 0x5b, 0x7c, 0x1c, 0x38, 0xe1, 0xe8, 0xea, 0x7d, + 0xf4, 0xc2, 0x26, 0xd1, 0x3e, 0x46, 0x74, 0xfd, 0x64, 0x22, 0x7b, 0x36, 0x8f, 0x2e, 0xe0, 0xd4, + 0xf0, 0xdc, 0x86, 0x3c, 0xb1, 0x4f, 0xf6, 0x0d, 0x9f, 0xb7, 0x31, 0x03, 0xf0, 0x0c, 0xcf, 0xff, + 0x48, 0x41, 0x3b, 0xb5, 0x6f, 0x55, 0x30, 0xfc, 0x41, 0x0a, 0x6a, 0x1d, 0xe7, 0x70, 0x62, 0x78, + 0xce, 0x0a, 0x7c, 0xda, 0x6d, 0x67, 0x78, 0x5a, 0xa0, 0x4f, 0x00, 0x4a, 0x93, 0x46, 0xe7, 0x6d, + 0x2f, 0xd8, 0x0d, 0x9c, 0x70, 0x78, 0x35, 0xd9, 0xa7, 0xdb, 0x97, 0x16, 0xdd, 0xed, 0x4b, 0xcb, + 0x06, 0xd6, 0xdd, 0x62, 0x34, 0x01, 0xb7, 0xa1, 0x8f, 0x1b, 0xaa, 0xb4, 0xc2, 0x83, 0xc0, 0x09, + 0x4f, 0xb2, 0x03, 0x46, 0xff, 0x43, 0x9f, 0x36, 0x8d, 0x6c, 0x14, 0x06, 0xab, 0xec, 0x10, 0xf2, + 0xe1, 0xe8, 0xb1, 0x56, 0x78, 0x18, 0x38, 0x61, 0x2f, 0x6b, 0x3f, 0xd1, 0x3b, 0x18, 0x13, 0x43, + 0x1b, 0x52, 0xd2, 0xbc, 0x22, 0x9a, 0x8a, 0xf5, 0x16, 0x7b, 0x76, 0x64, 0xb4, 0xa3, 0xbf, 0x77, + 0x2c, 0x7a, 0x0b, 0x1e, 0xa7, 0x5c, 0x36, 0xdb, 0x7c, 0xa3, 0x48, 0x49, 0xf1, 0x59, 0xe0, 0x84, + 0x47, 0xd9, 0xb0, 0xe3, 0xee, 0x5b, 0x0a, 0xbd, 0x82, 0x81, 0xe1, 0xb9, 0xd2, 0x44, 0x6f, 0x14, + 0x1e, 0xd9, 0x2d, 0x5d, 0xc3, 0x97, 0x16, 0xa3, 0x10, 0x7c, 0xc3, 0xf3, 0x82, 0xae, 0x36, 0x65, + 0x4e, 0x05, 0x59, 0x55, 0xb4, 0xc0, 0xe3, 0xc0, 0x09, 0xdd, 0x6c, 0x64, 0xf8, 0xac, 0xa5, 0x93, + 0x8e, 0x9d, 0x7e, 0x06, 0xef, 0xe9, 0x05, 0xd0, 0x18, 0x86, 0xf7, 0xf3, 0xe5, 0x22, 0xb9, 0x4d, + 0xbf, 0xa5, 0xc9, 0xcc, 0xff, 0x0f, 0x79, 0xe0, 0x66, 0xc9, 0x32, 0x9d, 0x25, 0xf3, 0x3b, 0xdf, + 0x41, 0x43, 0x38, 0x9d, 0xfd, 0x9c, 0x5f, 0xff, 0x48, 0x6f, 0xfd, 0xde, 0xcd, 0x6f, 0xb8, 0x58, + 0x4b, 0xfe, 0xd2, 0x79, 0x6f, 0xce, 0xf6, 0xf7, 0x5d, 0xb4, 0xb5, 0x2e, 0x9c, 0x87, 0xaf, 0x3b, + 0x57, 0x29, 0x2b, 0x22, 0xca, 0x48, 0x36, 0x65, 0x5c, 0x52, 0x61, 0x4b, 0x8f, 0x3b, 0x89, 0xd4, + 0x4c, 0x3d, 0xfb, 0x23, 0xbe, 0x1c, 0xc0, 0xaa, 0x6f, 0x8d, 0x1f, 0xff, 0x05, 0x00, 0x00, 0xff, + 0xff, 0x97, 0xe7, 0x7d, 0x88, 0x39, 0x03, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/appengine/v1/location.pb.go b/vendor/google.golang.org/genproto/googleapis/appengine/v1/location.pb.go new file mode 100644 index 0000000..1a05129 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/appengine/v1/location.pb.go @@ -0,0 +1,102 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/appengine/v1/location.proto + +package appengine // import "google.golang.org/genproto/googleapis/appengine/v1" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "google.golang.org/genproto/googleapis/api/annotations" +import _ "google.golang.org/genproto/googleapis/type/latlng" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Metadata for the given +// [google.cloud.location.Location][google.cloud.location.Location]. +type LocationMetadata struct { + // App Engine Standard Environment is available in the given location. + // + // @OutputOnly + StandardEnvironmentAvailable bool `protobuf:"varint,2,opt,name=standard_environment_available,json=standardEnvironmentAvailable,proto3" json:"standard_environment_available,omitempty"` + // App Engine Flexible Environment is available in the given location. + // + // @OutputOnly + FlexibleEnvironmentAvailable bool `protobuf:"varint,4,opt,name=flexible_environment_available,json=flexibleEnvironmentAvailable,proto3" json:"flexible_environment_available,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *LocationMetadata) Reset() { *m = LocationMetadata{} } +func (m *LocationMetadata) String() string { return proto.CompactTextString(m) } +func (*LocationMetadata) ProtoMessage() {} +func (*LocationMetadata) Descriptor() ([]byte, []int) { + return fileDescriptor_location_d54799c7cf43fb8a, []int{0} +} +func (m *LocationMetadata) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_LocationMetadata.Unmarshal(m, b) +} +func (m *LocationMetadata) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_LocationMetadata.Marshal(b, m, deterministic) +} +func (dst *LocationMetadata) XXX_Merge(src proto.Message) { + xxx_messageInfo_LocationMetadata.Merge(dst, src) +} +func (m *LocationMetadata) XXX_Size() int { + return xxx_messageInfo_LocationMetadata.Size(m) +} +func (m *LocationMetadata) XXX_DiscardUnknown() { + xxx_messageInfo_LocationMetadata.DiscardUnknown(m) +} + +var xxx_messageInfo_LocationMetadata proto.InternalMessageInfo + +func (m *LocationMetadata) GetStandardEnvironmentAvailable() bool { + if m != nil { + return m.StandardEnvironmentAvailable + } + return false +} + +func (m *LocationMetadata) GetFlexibleEnvironmentAvailable() bool { + if m != nil { + return m.FlexibleEnvironmentAvailable + } + return false +} + +func init() { + proto.RegisterType((*LocationMetadata)(nil), "google.appengine.v1.LocationMetadata") +} + +func init() { + proto.RegisterFile("google/appengine/v1/location.proto", fileDescriptor_location_d54799c7cf43fb8a) +} + +var fileDescriptor_location_d54799c7cf43fb8a = []byte{ + // 236 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x74, 0x90, 0x41, 0x4b, 0xc3, 0x40, + 0x10, 0x85, 0x89, 0x88, 0x48, 0x40, 0x90, 0x7a, 0xb0, 0x94, 0x22, 0xd2, 0x93, 0xa7, 0x5d, 0x8a, + 0x47, 0xbd, 0x58, 0xf4, 0xa6, 0x50, 0x3c, 0x7a, 0x29, 0x93, 0x66, 0x1c, 0x17, 0xa6, 0x33, 0x4b, + 0x32, 0x04, 0xfd, 0x33, 0xfe, 0x56, 0x69, 0x36, 0x1b, 0x11, 0xea, 0x71, 0x79, 0xdf, 0xfb, 0xd8, + 0x37, 0xe5, 0x82, 0x54, 0x89, 0xd1, 0x43, 0x8c, 0x28, 0x14, 0x04, 0x7d, 0xb7, 0xf4, 0xac, 0x5b, + 0xb0, 0xa0, 0xe2, 0x62, 0xa3, 0xa6, 0x93, 0x8b, 0xc4, 0xb8, 0x91, 0x71, 0xdd, 0x72, 0x36, 0x1f, + 0x8b, 0xc1, 0x83, 0x88, 0x5a, 0xdf, 0x68, 0x53, 0x65, 0x36, 0x1d, 0x52, 0xfb, 0x8a, 0xe8, 0x19, + 0x8c, 0x85, 0x52, 0xb2, 0xf8, 0x2e, 0xca, 0xf3, 0xe7, 0xc1, 0xff, 0x82, 0x06, 0x35, 0x18, 0x4c, + 0x1e, 0xcb, 0xab, 0xd6, 0x40, 0x6a, 0x68, 0xea, 0x0d, 0x4a, 0x17, 0x1a, 0x95, 0x1d, 0x8a, 0x6d, + 0xa0, 0x83, 0xc0, 0x50, 0x31, 0x4e, 0x8f, 0xae, 0x8b, 0x9b, 0xd3, 0xd7, 0x79, 0xa6, 0x9e, 0x7e, + 0xa1, 0x87, 0xcc, 0xec, 0x2d, 0xef, 0x8c, 0x9f, 0xa1, 0x62, 0xfc, 0xc7, 0x72, 0x9c, 0x2c, 0x99, + 0x3a, 0x64, 0x59, 0x7d, 0x94, 0x97, 0x5b, 0xdd, 0xb9, 0x03, 0x9b, 0x57, 0x67, 0xf9, 0xe3, 0xeb, + 0xfd, 0x94, 0x75, 0xf1, 0x76, 0x3f, 0x50, 0xa4, 0x0c, 0x42, 0x4e, 0x1b, 0xf2, 0x84, 0xd2, 0x0f, + 0xf5, 0x29, 0x82, 0x18, 0xda, 0x3f, 0xc7, 0xbd, 0x1b, 0x1f, 0xd5, 0x49, 0x0f, 0xde, 0xfe, 0x04, + 0x00, 0x00, 0xff, 0xff, 0x93, 0x9b, 0x7c, 0xf8, 0x84, 0x01, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/appengine/v1/operation.pb.go b/vendor/google.golang.org/genproto/googleapis/appengine/v1/operation.pb.go new file mode 100644 index 0000000..7c5b0db --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/appengine/v1/operation.pb.go @@ -0,0 +1,139 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/appengine/v1/operation.proto + +package appengine // import "google.golang.org/genproto/googleapis/appengine/v1" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import timestamp "github.com/golang/protobuf/ptypes/timestamp" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Metadata for the given +// [google.longrunning.Operation][google.longrunning.Operation]. +type OperationMetadataV1 struct { + // API method that initiated this operation. Example: + // `google.appengine.v1.Versions.CreateVersion`. + // + // @OutputOnly + Method string `protobuf:"bytes,1,opt,name=method,proto3" json:"method,omitempty"` + // Time that this operation was created. + // + // @OutputOnly + InsertTime *timestamp.Timestamp `protobuf:"bytes,2,opt,name=insert_time,json=insertTime,proto3" json:"insert_time,omitempty"` + // Time that this operation completed. + // + // @OutputOnly + EndTime *timestamp.Timestamp `protobuf:"bytes,3,opt,name=end_time,json=endTime,proto3" json:"end_time,omitempty"` + // User who requested this operation. + // + // @OutputOnly + User string `protobuf:"bytes,4,opt,name=user,proto3" json:"user,omitempty"` + // Name of the resource that this operation is acting on. Example: + // `apps/myapp/services/default`. + // + // @OutputOnly + Target string `protobuf:"bytes,5,opt,name=target,proto3" json:"target,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *OperationMetadataV1) Reset() { *m = OperationMetadataV1{} } +func (m *OperationMetadataV1) String() string { return proto.CompactTextString(m) } +func (*OperationMetadataV1) ProtoMessage() {} +func (*OperationMetadataV1) Descriptor() ([]byte, []int) { + return fileDescriptor_operation_326b57a977d1c44b, []int{0} +} +func (m *OperationMetadataV1) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_OperationMetadataV1.Unmarshal(m, b) +} +func (m *OperationMetadataV1) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_OperationMetadataV1.Marshal(b, m, deterministic) +} +func (dst *OperationMetadataV1) XXX_Merge(src proto.Message) { + xxx_messageInfo_OperationMetadataV1.Merge(dst, src) +} +func (m *OperationMetadataV1) XXX_Size() int { + return xxx_messageInfo_OperationMetadataV1.Size(m) +} +func (m *OperationMetadataV1) XXX_DiscardUnknown() { + xxx_messageInfo_OperationMetadataV1.DiscardUnknown(m) +} + +var xxx_messageInfo_OperationMetadataV1 proto.InternalMessageInfo + +func (m *OperationMetadataV1) GetMethod() string { + if m != nil { + return m.Method + } + return "" +} + +func (m *OperationMetadataV1) GetInsertTime() *timestamp.Timestamp { + if m != nil { + return m.InsertTime + } + return nil +} + +func (m *OperationMetadataV1) GetEndTime() *timestamp.Timestamp { + if m != nil { + return m.EndTime + } + return nil +} + +func (m *OperationMetadataV1) GetUser() string { + if m != nil { + return m.User + } + return "" +} + +func (m *OperationMetadataV1) GetTarget() string { + if m != nil { + return m.Target + } + return "" +} + +func init() { + proto.RegisterType((*OperationMetadataV1)(nil), "google.appengine.v1.OperationMetadataV1") +} + +func init() { + proto.RegisterFile("google/appengine/v1/operation.proto", fileDescriptor_operation_326b57a977d1c44b) +} + +var fileDescriptor_operation_326b57a977d1c44b = []byte{ + // 271 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x84, 0x91, 0x41, 0x4b, 0x03, 0x31, + 0x10, 0x85, 0x59, 0xad, 0x55, 0x53, 0xf0, 0xb0, 0x05, 0x5d, 0x16, 0xc1, 0xa2, 0x97, 0x9e, 0x12, + 0x56, 0xf1, 0x54, 0x4f, 0xbd, 0x8b, 0xa5, 0x88, 0x07, 0x2f, 0x92, 0xba, 0x63, 0x0c, 0x74, 0x67, + 0x42, 0x32, 0xed, 0xbf, 0xf4, 0x3f, 0xc9, 0x26, 0xbb, 0x0b, 0x82, 0xd0, 0x5b, 0x5e, 0xe6, 0x7d, + 0x79, 0x2f, 0x89, 0xb8, 0x33, 0x44, 0x66, 0x0b, 0x4a, 0x3b, 0x07, 0x68, 0x2c, 0x82, 0xda, 0x57, + 0x8a, 0x1c, 0x78, 0xcd, 0x96, 0x50, 0x3a, 0x4f, 0x4c, 0xf9, 0x34, 0x99, 0xe4, 0x60, 0x92, 0xfb, + 0xaa, 0xbc, 0x1e, 0x48, 0xab, 0x34, 0x22, 0x71, 0x24, 0x42, 0x42, 0xca, 0x9b, 0x6e, 0x1a, 0xd5, + 0x66, 0xf7, 0xa5, 0xd8, 0x36, 0x10, 0x58, 0x37, 0x2e, 0x19, 0x6e, 0x7f, 0x32, 0x31, 0x7d, 0xe9, + 0x73, 0x9e, 0x81, 0x75, 0xad, 0x59, 0xbf, 0x55, 0xf9, 0xa5, 0x18, 0x37, 0xc0, 0xdf, 0x54, 0x17, + 0xd9, 0x2c, 0x9b, 0x9f, 0xaf, 0x3b, 0x95, 0x2f, 0xc4, 0xc4, 0x62, 0x00, 0xcf, 0x1f, 0xed, 0x49, + 0xc5, 0xd1, 0x2c, 0x9b, 0x4f, 0xee, 0x4b, 0xd9, 0x35, 0xeb, 0x63, 0xe4, 0x6b, 0x1f, 0xb3, 0x16, + 0xc9, 0xde, 0x6e, 0xe4, 0x8f, 0xe2, 0x0c, 0xb0, 0x4e, 0xe4, 0xf1, 0x41, 0xf2, 0x14, 0xb0, 0x8e, + 0x58, 0x2e, 0x46, 0xbb, 0x00, 0xbe, 0x18, 0xc5, 0x26, 0x71, 0xdd, 0xf6, 0x63, 0xed, 0x0d, 0x70, + 0x71, 0x92, 0xfa, 0x25, 0xb5, 0xb4, 0xe2, 0xea, 0x93, 0x1a, 0xf9, 0xcf, 0x4b, 0x2d, 0x2f, 0x86, + 0x7b, 0xae, 0xda, 0xb0, 0x55, 0xf6, 0xfe, 0xd4, 0xd9, 0x0c, 0x6d, 0x35, 0x1a, 0x49, 0xde, 0x28, + 0x03, 0x18, 0xab, 0xa8, 0x34, 0xd2, 0xce, 0x86, 0x3f, 0x9f, 0xb2, 0x18, 0xc4, 0x66, 0x1c, 0x8d, + 0x0f, 0xbf, 0x01, 0x00, 0x00, 0xff, 0xff, 0x1d, 0x8e, 0xb2, 0x00, 0xbc, 0x01, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/appengine/v1/service.pb.go b/vendor/google.golang.org/genproto/googleapis/appengine/v1/service.pb.go new file mode 100644 index 0000000..d275e39 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/appengine/v1/service.pb.go @@ -0,0 +1,220 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/appengine/v1/service.proto + +package appengine // import "google.golang.org/genproto/googleapis/appengine/v1" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Available sharding mechanisms. +type TrafficSplit_ShardBy int32 + +const ( + // Diversion method unspecified. + TrafficSplit_UNSPECIFIED TrafficSplit_ShardBy = 0 + // Diversion based on a specially named cookie, "GOOGAPPUID." The cookie + // must be set by the application itself or no diversion will occur. + TrafficSplit_COOKIE TrafficSplit_ShardBy = 1 + // Diversion based on applying the modulus operation to a fingerprint + // of the IP address. + TrafficSplit_IP TrafficSplit_ShardBy = 2 +) + +var TrafficSplit_ShardBy_name = map[int32]string{ + 0: "UNSPECIFIED", + 1: "COOKIE", + 2: "IP", +} +var TrafficSplit_ShardBy_value = map[string]int32{ + "UNSPECIFIED": 0, + "COOKIE": 1, + "IP": 2, +} + +func (x TrafficSplit_ShardBy) String() string { + return proto.EnumName(TrafficSplit_ShardBy_name, int32(x)) +} +func (TrafficSplit_ShardBy) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_service_5cc1e3a69330c73e, []int{1, 0} +} + +// A Service resource is a logical component of an application that can share +// state and communicate in a secure fashion with other services. +// For example, an application that handles customer requests might +// include separate services to handle tasks such as backend data +// analysis or API requests from mobile devices. Each service has a +// collection of versions that define a specific set of code used to +// implement the functionality of that service. +type Service struct { + // Full path to the Service resource in the API. + // Example: `apps/myapp/services/default`. + // + // @OutputOnly + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // Relative name of the service within the application. + // Example: `default`. + // + // @OutputOnly + Id string `protobuf:"bytes,2,opt,name=id,proto3" json:"id,omitempty"` + // Mapping that defines fractional HTTP traffic diversion to + // different versions within the service. + Split *TrafficSplit `protobuf:"bytes,3,opt,name=split,proto3" json:"split,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Service) Reset() { *m = Service{} } +func (m *Service) String() string { return proto.CompactTextString(m) } +func (*Service) ProtoMessage() {} +func (*Service) Descriptor() ([]byte, []int) { + return fileDescriptor_service_5cc1e3a69330c73e, []int{0} +} +func (m *Service) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Service.Unmarshal(m, b) +} +func (m *Service) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Service.Marshal(b, m, deterministic) +} +func (dst *Service) XXX_Merge(src proto.Message) { + xxx_messageInfo_Service.Merge(dst, src) +} +func (m *Service) XXX_Size() int { + return xxx_messageInfo_Service.Size(m) +} +func (m *Service) XXX_DiscardUnknown() { + xxx_messageInfo_Service.DiscardUnknown(m) +} + +var xxx_messageInfo_Service proto.InternalMessageInfo + +func (m *Service) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *Service) GetId() string { + if m != nil { + return m.Id + } + return "" +} + +func (m *Service) GetSplit() *TrafficSplit { + if m != nil { + return m.Split + } + return nil +} + +// Traffic routing configuration for versions within a single service. Traffic +// splits define how traffic directed to the service is assigned to versions. +type TrafficSplit struct { + // Mechanism used to determine which version a request is sent to. + // The traffic selection algorithm will + // be stable for either type until allocations are changed. + ShardBy TrafficSplit_ShardBy `protobuf:"varint,1,opt,name=shard_by,json=shardBy,proto3,enum=google.appengine.v1.TrafficSplit_ShardBy" json:"shard_by,omitempty"` + // Mapping from version IDs within the service to fractional + // (0.000, 1] allocations of traffic for that version. Each version can + // be specified only once, but some versions in the service may not + // have any traffic allocation. Services that have traffic allocated + // cannot be deleted until either the service is deleted or + // their traffic allocation is removed. Allocations must sum to 1. + // Up to two decimal place precision is supported for IP-based splits and + // up to three decimal places is supported for cookie-based splits. + Allocations map[string]float64 `protobuf:"bytes,2,rep,name=allocations,proto3" json:"allocations,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"fixed64,2,opt,name=value,proto3"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *TrafficSplit) Reset() { *m = TrafficSplit{} } +func (m *TrafficSplit) String() string { return proto.CompactTextString(m) } +func (*TrafficSplit) ProtoMessage() {} +func (*TrafficSplit) Descriptor() ([]byte, []int) { + return fileDescriptor_service_5cc1e3a69330c73e, []int{1} +} +func (m *TrafficSplit) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_TrafficSplit.Unmarshal(m, b) +} +func (m *TrafficSplit) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_TrafficSplit.Marshal(b, m, deterministic) +} +func (dst *TrafficSplit) XXX_Merge(src proto.Message) { + xxx_messageInfo_TrafficSplit.Merge(dst, src) +} +func (m *TrafficSplit) XXX_Size() int { + return xxx_messageInfo_TrafficSplit.Size(m) +} +func (m *TrafficSplit) XXX_DiscardUnknown() { + xxx_messageInfo_TrafficSplit.DiscardUnknown(m) +} + +var xxx_messageInfo_TrafficSplit proto.InternalMessageInfo + +func (m *TrafficSplit) GetShardBy() TrafficSplit_ShardBy { + if m != nil { + return m.ShardBy + } + return TrafficSplit_UNSPECIFIED +} + +func (m *TrafficSplit) GetAllocations() map[string]float64 { + if m != nil { + return m.Allocations + } + return nil +} + +func init() { + proto.RegisterType((*Service)(nil), "google.appengine.v1.Service") + proto.RegisterType((*TrafficSplit)(nil), "google.appengine.v1.TrafficSplit") + proto.RegisterMapType((map[string]float64)(nil), "google.appengine.v1.TrafficSplit.AllocationsEntry") + proto.RegisterEnum("google.appengine.v1.TrafficSplit_ShardBy", TrafficSplit_ShardBy_name, TrafficSplit_ShardBy_value) +} + +func init() { + proto.RegisterFile("google/appengine/v1/service.proto", fileDescriptor_service_5cc1e3a69330c73e) +} + +var fileDescriptor_service_5cc1e3a69330c73e = []byte{ + // 356 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x84, 0x92, 0x51, 0x8b, 0xda, 0x40, + 0x10, 0xc7, 0x9b, 0xb5, 0x6a, 0x3b, 0x11, 0x1b, 0xb6, 0x85, 0x06, 0xe9, 0x83, 0xfa, 0x64, 0x5f, + 0x36, 0x98, 0x3e, 0xb4, 0xb4, 0xa5, 0x50, 0x35, 0x85, 0x50, 0x38, 0x43, 0xe2, 0xbd, 0xdc, 0xcb, + 0xb1, 0xc6, 0x75, 0x6f, 0xb9, 0xb8, 0x1b, 0x92, 0x5c, 0x20, 0x5f, 0xe5, 0x3e, 0xed, 0x91, 0x6c, + 0xf0, 0xbc, 0x43, 0xf0, 0x6d, 0x66, 0xf2, 0xfb, 0xff, 0x67, 0x32, 0xb3, 0x30, 0xe1, 0x4a, 0xf1, + 0x84, 0x39, 0x34, 0x4d, 0x99, 0xe4, 0x42, 0x32, 0xa7, 0x9c, 0x3b, 0x39, 0xcb, 0x4a, 0x11, 0x33, + 0x92, 0x66, 0xaa, 0x50, 0xf8, 0xa3, 0x46, 0xc8, 0x11, 0x21, 0xe5, 0x7c, 0xf4, 0xe5, 0xa8, 0x13, + 0x0e, 0x95, 0x52, 0x15, 0xb4, 0x10, 0x4a, 0xe6, 0x5a, 0x32, 0xdd, 0x43, 0x3f, 0xd2, 0x1e, 0x18, + 0xc3, 0x5b, 0x49, 0x0f, 0xcc, 0x36, 0xc6, 0xc6, 0xec, 0x7d, 0xd8, 0xc4, 0x78, 0x08, 0x48, 0xec, + 0x6c, 0xd4, 0x54, 0x90, 0xd8, 0xe1, 0xef, 0xd0, 0xcd, 0xd3, 0x44, 0x14, 0x76, 0x67, 0x6c, 0xcc, + 0x4c, 0x77, 0x42, 0xce, 0x74, 0x24, 0x9b, 0x8c, 0xee, 0xf7, 0x22, 0x8e, 0x6a, 0x30, 0xd4, 0xfc, + 0xf4, 0x11, 0xc1, 0xe0, 0xb4, 0x8e, 0x57, 0xf0, 0x2e, 0xbf, 0xa3, 0xd9, 0xee, 0x76, 0x5b, 0x35, + 0x1d, 0x87, 0xee, 0xd7, 0x8b, 0x66, 0x24, 0xaa, 0x15, 0x8b, 0x2a, 0xec, 0xe7, 0x3a, 0xc0, 0x1b, + 0x30, 0x69, 0x92, 0xa8, 0x58, 0xff, 0x93, 0x8d, 0xc6, 0x9d, 0x99, 0xe9, 0xba, 0x97, 0x8d, 0xfe, + 0x3e, 0x8b, 0x3c, 0x59, 0x64, 0x55, 0x78, 0x6a, 0x33, 0xfa, 0x03, 0xd6, 0x6b, 0x00, 0x5b, 0xd0, + 0xb9, 0x67, 0x55, 0xbb, 0x9c, 0x3a, 0xc4, 0x9f, 0xa0, 0x5b, 0xd2, 0xe4, 0x81, 0x35, 0xeb, 0x31, + 0x42, 0x9d, 0xfc, 0x44, 0x3f, 0x8c, 0x29, 0x81, 0x7e, 0x3b, 0x29, 0xfe, 0x00, 0xe6, 0xf5, 0x55, + 0x14, 0x78, 0x4b, 0xff, 0x9f, 0xef, 0xad, 0xac, 0x37, 0x18, 0xa0, 0xb7, 0x5c, 0xaf, 0xff, 0xfb, + 0x9e, 0x65, 0xe0, 0x1e, 0x20, 0x3f, 0xb0, 0xd0, 0x82, 0xc3, 0xe7, 0x58, 0x1d, 0xce, 0x4d, 0xbd, + 0x18, 0xb4, 0xd7, 0x09, 0xea, 0x6b, 0x05, 0xc6, 0xcd, 0xef, 0x16, 0xe2, 0x2a, 0xa1, 0x92, 0x13, + 0x95, 0x71, 0x87, 0x33, 0xd9, 0xdc, 0xd2, 0xd1, 0x9f, 0x68, 0x2a, 0xf2, 0x17, 0x8f, 0xe4, 0xd7, + 0x31, 0xd9, 0xf6, 0x1a, 0xf0, 0xdb, 0x53, 0x00, 0x00, 0x00, 0xff, 0xff, 0x5f, 0xbc, 0x13, 0xf4, + 0x4c, 0x02, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/appengine/v1/version.pb.go b/vendor/google.golang.org/genproto/googleapis/appengine/v1/version.pb.go new file mode 100644 index 0000000..a581910 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/appengine/v1/version.pb.go @@ -0,0 +1,1312 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/appengine/v1/version.proto + +package appengine // import "google.golang.org/genproto/googleapis/appengine/v1" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import duration "github.com/golang/protobuf/ptypes/duration" +import timestamp "github.com/golang/protobuf/ptypes/timestamp" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Available inbound services. +type InboundServiceType int32 + +const ( + // Not specified. + InboundServiceType_INBOUND_SERVICE_UNSPECIFIED InboundServiceType = 0 + // Allows an application to receive mail. + InboundServiceType_INBOUND_SERVICE_MAIL InboundServiceType = 1 + // Allows an application to receive email-bound notifications. + InboundServiceType_INBOUND_SERVICE_MAIL_BOUNCE InboundServiceType = 2 + // Allows an application to receive error stanzas. + InboundServiceType_INBOUND_SERVICE_XMPP_ERROR InboundServiceType = 3 + // Allows an application to receive instant messages. + InboundServiceType_INBOUND_SERVICE_XMPP_MESSAGE InboundServiceType = 4 + // Allows an application to receive user subscription POSTs. + InboundServiceType_INBOUND_SERVICE_XMPP_SUBSCRIBE InboundServiceType = 5 + // Allows an application to receive a user's chat presence. + InboundServiceType_INBOUND_SERVICE_XMPP_PRESENCE InboundServiceType = 6 + // Registers an application for notifications when a client connects or + // disconnects from a channel. + InboundServiceType_INBOUND_SERVICE_CHANNEL_PRESENCE InboundServiceType = 7 + // Enables warmup requests. + InboundServiceType_INBOUND_SERVICE_WARMUP InboundServiceType = 9 +) + +var InboundServiceType_name = map[int32]string{ + 0: "INBOUND_SERVICE_UNSPECIFIED", + 1: "INBOUND_SERVICE_MAIL", + 2: "INBOUND_SERVICE_MAIL_BOUNCE", + 3: "INBOUND_SERVICE_XMPP_ERROR", + 4: "INBOUND_SERVICE_XMPP_MESSAGE", + 5: "INBOUND_SERVICE_XMPP_SUBSCRIBE", + 6: "INBOUND_SERVICE_XMPP_PRESENCE", + 7: "INBOUND_SERVICE_CHANNEL_PRESENCE", + 9: "INBOUND_SERVICE_WARMUP", +} +var InboundServiceType_value = map[string]int32{ + "INBOUND_SERVICE_UNSPECIFIED": 0, + "INBOUND_SERVICE_MAIL": 1, + "INBOUND_SERVICE_MAIL_BOUNCE": 2, + "INBOUND_SERVICE_XMPP_ERROR": 3, + "INBOUND_SERVICE_XMPP_MESSAGE": 4, + "INBOUND_SERVICE_XMPP_SUBSCRIBE": 5, + "INBOUND_SERVICE_XMPP_PRESENCE": 6, + "INBOUND_SERVICE_CHANNEL_PRESENCE": 7, + "INBOUND_SERVICE_WARMUP": 9, +} + +func (x InboundServiceType) String() string { + return proto.EnumName(InboundServiceType_name, int32(x)) +} +func (InboundServiceType) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_version_af454e22c8704a00, []int{0} +} + +// Run states of a version. +type ServingStatus int32 + +const ( + // Not specified. + ServingStatus_SERVING_STATUS_UNSPECIFIED ServingStatus = 0 + // Currently serving. Instances are created according to the + // scaling settings of the version. + ServingStatus_SERVING ServingStatus = 1 + // Disabled. No instances will be created and the scaling + // settings are ignored until the state of the version changes + // to `SERVING`. + ServingStatus_STOPPED ServingStatus = 2 +) + +var ServingStatus_name = map[int32]string{ + 0: "SERVING_STATUS_UNSPECIFIED", + 1: "SERVING", + 2: "STOPPED", +} +var ServingStatus_value = map[string]int32{ + "SERVING_STATUS_UNSPECIFIED": 0, + "SERVING": 1, + "STOPPED": 2, +} + +func (x ServingStatus) String() string { + return proto.EnumName(ServingStatus_name, int32(x)) +} +func (ServingStatus) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_version_af454e22c8704a00, []int{1} +} + +// A Version resource is a specific set of source code and configuration files +// that are deployed into a service. +type Version struct { + // Full path to the Version resource in the API. Example: + // `apps/myapp/services/default/versions/v1`. + // + // @OutputOnly + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // Relative name of the version within the service. Example: `v1`. + // Version names can contain only lowercase letters, numbers, or hyphens. + // Reserved names: "default", "latest", and any name with the prefix "ah-". + Id string `protobuf:"bytes,2,opt,name=id,proto3" json:"id,omitempty"` + // Controls how instances are created. + // + // Defaults to `AutomaticScaling`. + // + // Types that are valid to be assigned to Scaling: + // *Version_AutomaticScaling + // *Version_BasicScaling + // *Version_ManualScaling + Scaling isVersion_Scaling `protobuf_oneof:"scaling"` + // Before an application can receive email or XMPP messages, the application + // must be configured to enable the service. + InboundServices []InboundServiceType `protobuf:"varint,6,rep,packed,name=inbound_services,json=inboundServices,proto3,enum=google.appengine.v1.InboundServiceType" json:"inbound_services,omitempty"` + // Instance class that is used to run this version. Valid values are: + // * AutomaticScaling: `F1`, `F2`, `F4`, `F4_1G` + // * ManualScaling or BasicScaling: `B1`, `B2`, `B4`, `B8`, `B4_1G` + // + // Defaults to `F1` for AutomaticScaling and `B1` for ManualScaling or + // BasicScaling. + InstanceClass string `protobuf:"bytes,7,opt,name=instance_class,json=instanceClass,proto3" json:"instance_class,omitempty"` + // Extra network settings. Only applicable for VM runtimes. + Network *Network `protobuf:"bytes,8,opt,name=network,proto3" json:"network,omitempty"` + // Machine resources for this version. Only applicable for VM runtimes. + Resources *Resources `protobuf:"bytes,9,opt,name=resources,proto3" json:"resources,omitempty"` + // Desired runtime. Example: `python27`. + Runtime string `protobuf:"bytes,10,opt,name=runtime,proto3" json:"runtime,omitempty"` + // Whether multiple requests can be dispatched to this version at once. + Threadsafe bool `protobuf:"varint,11,opt,name=threadsafe,proto3" json:"threadsafe,omitempty"` + // Whether to deploy this version in a container on a virtual machine. + Vm bool `protobuf:"varint,12,opt,name=vm,proto3" json:"vm,omitempty"` + // Metadata settings that are supplied to this version to enable + // beta runtime features. + BetaSettings map[string]string `protobuf:"bytes,13,rep,name=beta_settings,json=betaSettings,proto3" json:"beta_settings,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` + // App Engine execution environment for this version. + // + // Defaults to `standard`. + Env string `protobuf:"bytes,14,opt,name=env,proto3" json:"env,omitempty"` + // Current serving status of this version. Only the versions with a + // `SERVING` status create instances and can be billed. + // + // `SERVING_STATUS_UNSPECIFIED` is an invalid value. Defaults to `SERVING`. + ServingStatus ServingStatus `protobuf:"varint,15,opt,name=serving_status,json=servingStatus,proto3,enum=google.appengine.v1.ServingStatus" json:"serving_status,omitempty"` + // Email address of the user who created this version. + // + // @OutputOnly + CreatedBy string `protobuf:"bytes,16,opt,name=created_by,json=createdBy,proto3" json:"created_by,omitempty"` + // Time that this version was created. + // + // @OutputOnly + CreateTime *timestamp.Timestamp `protobuf:"bytes,17,opt,name=create_time,json=createTime,proto3" json:"create_time,omitempty"` + // Total size in bytes of all the files that are included in this version + // and curerntly hosted on the App Engine disk. + // + // @OutputOnly + DiskUsageBytes int64 `protobuf:"varint,18,opt,name=disk_usage_bytes,json=diskUsageBytes,proto3" json:"disk_usage_bytes,omitempty"` + // An ordered list of URL-matching patterns that should be applied to incoming + // requests. The first matching URL handles the request and other request + // handlers are not attempted. + // + // Only returned in `GET` requests if `view=FULL` is set. + Handlers []*UrlMap `protobuf:"bytes,100,rep,name=handlers,proto3" json:"handlers,omitempty"` + // Custom static error pages. Limited to 10KB per page. + // + // Only returned in `GET` requests if `view=FULL` is set. + ErrorHandlers []*ErrorHandler `protobuf:"bytes,101,rep,name=error_handlers,json=errorHandlers,proto3" json:"error_handlers,omitempty"` + // Configuration for third-party Python runtime libraries that are required + // by the application. + // + // Only returned in `GET` requests if `view=FULL` is set. + Libraries []*Library `protobuf:"bytes,102,rep,name=libraries,proto3" json:"libraries,omitempty"` + // Serving configuration for + // [Google Cloud + // Endpoints](https://cloud.google.com/appengine/docs/python/endpoints/). + // + // Only returned in `GET` requests if `view=FULL` is set. + ApiConfig *ApiConfigHandler `protobuf:"bytes,103,opt,name=api_config,json=apiConfig,proto3" json:"api_config,omitempty"` + // Environment variables available to the application. + // + // Only returned in `GET` requests if `view=FULL` is set. + EnvVariables map[string]string `protobuf:"bytes,104,rep,name=env_variables,json=envVariables,proto3" json:"env_variables,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` + // Duration that static files should be cached by web proxies and browsers. + // Only applicable if the corresponding + // [StaticFilesHandler](https://cloud.google.com/appengine/docs/admin-api/reference/rest/v1/apps.services.versions#staticfileshandler) + // does not specify its own expiration time. + // + // Only returned in `GET` requests if `view=FULL` is set. + DefaultExpiration *duration.Duration `protobuf:"bytes,105,opt,name=default_expiration,json=defaultExpiration,proto3" json:"default_expiration,omitempty"` + // Configures health checking for VM instances. Unhealthy instances are + // stopped and replaced with new instances. Only applicable for VM + // runtimes. + // + // Only returned in `GET` requests if `view=FULL` is set. + HealthCheck *HealthCheck `protobuf:"bytes,106,opt,name=health_check,json=healthCheck,proto3" json:"health_check,omitempty"` + // Files that match this pattern will not be built into this version. + // Only applicable for Go runtimes. + // + // Only returned in `GET` requests if `view=FULL` is set. + NobuildFilesRegex string `protobuf:"bytes,107,opt,name=nobuild_files_regex,json=nobuildFilesRegex,proto3" json:"nobuild_files_regex,omitempty"` + // Code and application artifacts that make up this version. + // + // Only returned in `GET` requests if `view=FULL` is set. + Deployment *Deployment `protobuf:"bytes,108,opt,name=deployment,proto3" json:"deployment,omitempty"` + // Serving URL for this version. Example: + // "https://myversion-dot-myservice-dot-myapp.appspot.com" + // + // @OutputOnly + VersionUrl string `protobuf:"bytes,109,opt,name=version_url,json=versionUrl,proto3" json:"version_url,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Version) Reset() { *m = Version{} } +func (m *Version) String() string { return proto.CompactTextString(m) } +func (*Version) ProtoMessage() {} +func (*Version) Descriptor() ([]byte, []int) { + return fileDescriptor_version_af454e22c8704a00, []int{0} +} +func (m *Version) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Version.Unmarshal(m, b) +} +func (m *Version) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Version.Marshal(b, m, deterministic) +} +func (dst *Version) XXX_Merge(src proto.Message) { + xxx_messageInfo_Version.Merge(dst, src) +} +func (m *Version) XXX_Size() int { + return xxx_messageInfo_Version.Size(m) +} +func (m *Version) XXX_DiscardUnknown() { + xxx_messageInfo_Version.DiscardUnknown(m) +} + +var xxx_messageInfo_Version proto.InternalMessageInfo + +func (m *Version) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *Version) GetId() string { + if m != nil { + return m.Id + } + return "" +} + +type isVersion_Scaling interface { + isVersion_Scaling() +} + +type Version_AutomaticScaling struct { + AutomaticScaling *AutomaticScaling `protobuf:"bytes,3,opt,name=automatic_scaling,json=automaticScaling,proto3,oneof"` +} + +type Version_BasicScaling struct { + BasicScaling *BasicScaling `protobuf:"bytes,4,opt,name=basic_scaling,json=basicScaling,proto3,oneof"` +} + +type Version_ManualScaling struct { + ManualScaling *ManualScaling `protobuf:"bytes,5,opt,name=manual_scaling,json=manualScaling,proto3,oneof"` +} + +func (*Version_AutomaticScaling) isVersion_Scaling() {} + +func (*Version_BasicScaling) isVersion_Scaling() {} + +func (*Version_ManualScaling) isVersion_Scaling() {} + +func (m *Version) GetScaling() isVersion_Scaling { + if m != nil { + return m.Scaling + } + return nil +} + +func (m *Version) GetAutomaticScaling() *AutomaticScaling { + if x, ok := m.GetScaling().(*Version_AutomaticScaling); ok { + return x.AutomaticScaling + } + return nil +} + +func (m *Version) GetBasicScaling() *BasicScaling { + if x, ok := m.GetScaling().(*Version_BasicScaling); ok { + return x.BasicScaling + } + return nil +} + +func (m *Version) GetManualScaling() *ManualScaling { + if x, ok := m.GetScaling().(*Version_ManualScaling); ok { + return x.ManualScaling + } + return nil +} + +func (m *Version) GetInboundServices() []InboundServiceType { + if m != nil { + return m.InboundServices + } + return nil +} + +func (m *Version) GetInstanceClass() string { + if m != nil { + return m.InstanceClass + } + return "" +} + +func (m *Version) GetNetwork() *Network { + if m != nil { + return m.Network + } + return nil +} + +func (m *Version) GetResources() *Resources { + if m != nil { + return m.Resources + } + return nil +} + +func (m *Version) GetRuntime() string { + if m != nil { + return m.Runtime + } + return "" +} + +func (m *Version) GetThreadsafe() bool { + if m != nil { + return m.Threadsafe + } + return false +} + +func (m *Version) GetVm() bool { + if m != nil { + return m.Vm + } + return false +} + +func (m *Version) GetBetaSettings() map[string]string { + if m != nil { + return m.BetaSettings + } + return nil +} + +func (m *Version) GetEnv() string { + if m != nil { + return m.Env + } + return "" +} + +func (m *Version) GetServingStatus() ServingStatus { + if m != nil { + return m.ServingStatus + } + return ServingStatus_SERVING_STATUS_UNSPECIFIED +} + +func (m *Version) GetCreatedBy() string { + if m != nil { + return m.CreatedBy + } + return "" +} + +func (m *Version) GetCreateTime() *timestamp.Timestamp { + if m != nil { + return m.CreateTime + } + return nil +} + +func (m *Version) GetDiskUsageBytes() int64 { + if m != nil { + return m.DiskUsageBytes + } + return 0 +} + +func (m *Version) GetHandlers() []*UrlMap { + if m != nil { + return m.Handlers + } + return nil +} + +func (m *Version) GetErrorHandlers() []*ErrorHandler { + if m != nil { + return m.ErrorHandlers + } + return nil +} + +func (m *Version) GetLibraries() []*Library { + if m != nil { + return m.Libraries + } + return nil +} + +func (m *Version) GetApiConfig() *ApiConfigHandler { + if m != nil { + return m.ApiConfig + } + return nil +} + +func (m *Version) GetEnvVariables() map[string]string { + if m != nil { + return m.EnvVariables + } + return nil +} + +func (m *Version) GetDefaultExpiration() *duration.Duration { + if m != nil { + return m.DefaultExpiration + } + return nil +} + +func (m *Version) GetHealthCheck() *HealthCheck { + if m != nil { + return m.HealthCheck + } + return nil +} + +func (m *Version) GetNobuildFilesRegex() string { + if m != nil { + return m.NobuildFilesRegex + } + return "" +} + +func (m *Version) GetDeployment() *Deployment { + if m != nil { + return m.Deployment + } + return nil +} + +func (m *Version) GetVersionUrl() string { + if m != nil { + return m.VersionUrl + } + return "" +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*Version) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _Version_OneofMarshaler, _Version_OneofUnmarshaler, _Version_OneofSizer, []interface{}{ + (*Version_AutomaticScaling)(nil), + (*Version_BasicScaling)(nil), + (*Version_ManualScaling)(nil), + } +} + +func _Version_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*Version) + // scaling + switch x := m.Scaling.(type) { + case *Version_AutomaticScaling: + b.EncodeVarint(3<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.AutomaticScaling); err != nil { + return err + } + case *Version_BasicScaling: + b.EncodeVarint(4<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.BasicScaling); err != nil { + return err + } + case *Version_ManualScaling: + b.EncodeVarint(5<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.ManualScaling); err != nil { + return err + } + case nil: + default: + return fmt.Errorf("Version.Scaling has unexpected type %T", x) + } + return nil +} + +func _Version_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*Version) + switch tag { + case 3: // scaling.automatic_scaling + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(AutomaticScaling) + err := b.DecodeMessage(msg) + m.Scaling = &Version_AutomaticScaling{msg} + return true, err + case 4: // scaling.basic_scaling + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(BasicScaling) + err := b.DecodeMessage(msg) + m.Scaling = &Version_BasicScaling{msg} + return true, err + case 5: // scaling.manual_scaling + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(ManualScaling) + err := b.DecodeMessage(msg) + m.Scaling = &Version_ManualScaling{msg} + return true, err + default: + return false, nil + } +} + +func _Version_OneofSizer(msg proto.Message) (n int) { + m := msg.(*Version) + // scaling + switch x := m.Scaling.(type) { + case *Version_AutomaticScaling: + s := proto.Size(x.AutomaticScaling) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *Version_BasicScaling: + s := proto.Size(x.BasicScaling) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *Version_ManualScaling: + s := proto.Size(x.ManualScaling) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +// Automatic scaling is based on request rate, response latencies, and other +// application metrics. +type AutomaticScaling struct { + // Amount of time that the + // [Autoscaler](https://cloud.google.com/compute/docs/autoscaler/) + // should wait between changes to the number of virtual machines. + // Only applicable for VM runtimes. + CoolDownPeriod *duration.Duration `protobuf:"bytes,1,opt,name=cool_down_period,json=coolDownPeriod,proto3" json:"cool_down_period,omitempty"` + // Target scaling by CPU usage. + CpuUtilization *CpuUtilization `protobuf:"bytes,2,opt,name=cpu_utilization,json=cpuUtilization,proto3" json:"cpu_utilization,omitempty"` + // Number of concurrent requests an automatic scaling instance can accept + // before the scheduler spawns a new instance. + // + // Defaults to a runtime-specific value. + MaxConcurrentRequests int32 `protobuf:"varint,3,opt,name=max_concurrent_requests,json=maxConcurrentRequests,proto3" json:"max_concurrent_requests,omitempty"` + // Maximum number of idle instances that should be maintained for this + // version. + MaxIdleInstances int32 `protobuf:"varint,4,opt,name=max_idle_instances,json=maxIdleInstances,proto3" json:"max_idle_instances,omitempty"` + // Maximum number of instances that should be started to handle requests. + MaxTotalInstances int32 `protobuf:"varint,5,opt,name=max_total_instances,json=maxTotalInstances,proto3" json:"max_total_instances,omitempty"` + // Maximum amount of time that a request should wait in the pending queue + // before starting a new instance to handle it. + MaxPendingLatency *duration.Duration `protobuf:"bytes,6,opt,name=max_pending_latency,json=maxPendingLatency,proto3" json:"max_pending_latency,omitempty"` + // Minimum number of idle instances that should be maintained for + // this version. Only applicable for the default version of a service. + MinIdleInstances int32 `protobuf:"varint,7,opt,name=min_idle_instances,json=minIdleInstances,proto3" json:"min_idle_instances,omitempty"` + // Minimum number of instances that should be maintained for this version. + MinTotalInstances int32 `protobuf:"varint,8,opt,name=min_total_instances,json=minTotalInstances,proto3" json:"min_total_instances,omitempty"` + // Minimum amount of time a request should wait in the pending queue before + // starting a new instance to handle it. + MinPendingLatency *duration.Duration `protobuf:"bytes,9,opt,name=min_pending_latency,json=minPendingLatency,proto3" json:"min_pending_latency,omitempty"` + // Target scaling by request utilization. + RequestUtilization *RequestUtilization `protobuf:"bytes,10,opt,name=request_utilization,json=requestUtilization,proto3" json:"request_utilization,omitempty"` + // Target scaling by disk usage. + DiskUtilization *DiskUtilization `protobuf:"bytes,11,opt,name=disk_utilization,json=diskUtilization,proto3" json:"disk_utilization,omitempty"` + // Target scaling by network usage. + NetworkUtilization *NetworkUtilization `protobuf:"bytes,12,opt,name=network_utilization,json=networkUtilization,proto3" json:"network_utilization,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *AutomaticScaling) Reset() { *m = AutomaticScaling{} } +func (m *AutomaticScaling) String() string { return proto.CompactTextString(m) } +func (*AutomaticScaling) ProtoMessage() {} +func (*AutomaticScaling) Descriptor() ([]byte, []int) { + return fileDescriptor_version_af454e22c8704a00, []int{1} +} +func (m *AutomaticScaling) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_AutomaticScaling.Unmarshal(m, b) +} +func (m *AutomaticScaling) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_AutomaticScaling.Marshal(b, m, deterministic) +} +func (dst *AutomaticScaling) XXX_Merge(src proto.Message) { + xxx_messageInfo_AutomaticScaling.Merge(dst, src) +} +func (m *AutomaticScaling) XXX_Size() int { + return xxx_messageInfo_AutomaticScaling.Size(m) +} +func (m *AutomaticScaling) XXX_DiscardUnknown() { + xxx_messageInfo_AutomaticScaling.DiscardUnknown(m) +} + +var xxx_messageInfo_AutomaticScaling proto.InternalMessageInfo + +func (m *AutomaticScaling) GetCoolDownPeriod() *duration.Duration { + if m != nil { + return m.CoolDownPeriod + } + return nil +} + +func (m *AutomaticScaling) GetCpuUtilization() *CpuUtilization { + if m != nil { + return m.CpuUtilization + } + return nil +} + +func (m *AutomaticScaling) GetMaxConcurrentRequests() int32 { + if m != nil { + return m.MaxConcurrentRequests + } + return 0 +} + +func (m *AutomaticScaling) GetMaxIdleInstances() int32 { + if m != nil { + return m.MaxIdleInstances + } + return 0 +} + +func (m *AutomaticScaling) GetMaxTotalInstances() int32 { + if m != nil { + return m.MaxTotalInstances + } + return 0 +} + +func (m *AutomaticScaling) GetMaxPendingLatency() *duration.Duration { + if m != nil { + return m.MaxPendingLatency + } + return nil +} + +func (m *AutomaticScaling) GetMinIdleInstances() int32 { + if m != nil { + return m.MinIdleInstances + } + return 0 +} + +func (m *AutomaticScaling) GetMinTotalInstances() int32 { + if m != nil { + return m.MinTotalInstances + } + return 0 +} + +func (m *AutomaticScaling) GetMinPendingLatency() *duration.Duration { + if m != nil { + return m.MinPendingLatency + } + return nil +} + +func (m *AutomaticScaling) GetRequestUtilization() *RequestUtilization { + if m != nil { + return m.RequestUtilization + } + return nil +} + +func (m *AutomaticScaling) GetDiskUtilization() *DiskUtilization { + if m != nil { + return m.DiskUtilization + } + return nil +} + +func (m *AutomaticScaling) GetNetworkUtilization() *NetworkUtilization { + if m != nil { + return m.NetworkUtilization + } + return nil +} + +// A service with basic scaling will create an instance when the application +// receives a request. The instance will be turned down when the app becomes +// idle. Basic scaling is ideal for work that is intermittent or driven by +// user activity. +type BasicScaling struct { + // Duration of time after the last request that an instance must wait before + // the instance is shut down. + IdleTimeout *duration.Duration `protobuf:"bytes,1,opt,name=idle_timeout,json=idleTimeout,proto3" json:"idle_timeout,omitempty"` + // Maximum number of instances to create for this version. + MaxInstances int32 `protobuf:"varint,2,opt,name=max_instances,json=maxInstances,proto3" json:"max_instances,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *BasicScaling) Reset() { *m = BasicScaling{} } +func (m *BasicScaling) String() string { return proto.CompactTextString(m) } +func (*BasicScaling) ProtoMessage() {} +func (*BasicScaling) Descriptor() ([]byte, []int) { + return fileDescriptor_version_af454e22c8704a00, []int{2} +} +func (m *BasicScaling) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_BasicScaling.Unmarshal(m, b) +} +func (m *BasicScaling) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_BasicScaling.Marshal(b, m, deterministic) +} +func (dst *BasicScaling) XXX_Merge(src proto.Message) { + xxx_messageInfo_BasicScaling.Merge(dst, src) +} +func (m *BasicScaling) XXX_Size() int { + return xxx_messageInfo_BasicScaling.Size(m) +} +func (m *BasicScaling) XXX_DiscardUnknown() { + xxx_messageInfo_BasicScaling.DiscardUnknown(m) +} + +var xxx_messageInfo_BasicScaling proto.InternalMessageInfo + +func (m *BasicScaling) GetIdleTimeout() *duration.Duration { + if m != nil { + return m.IdleTimeout + } + return nil +} + +func (m *BasicScaling) GetMaxInstances() int32 { + if m != nil { + return m.MaxInstances + } + return 0 +} + +// A service with manual scaling runs continuously, allowing you to perform +// complex initialization and rely on the state of its memory over time. +type ManualScaling struct { + // Number of instances to assign to the service at the start. This number + // can later be altered by using the + // [Modules + // API](https://cloud.google.com/appengine/docs/python/modules/functions) + // `set_num_instances()` function. + Instances int32 `protobuf:"varint,1,opt,name=instances,proto3" json:"instances,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ManualScaling) Reset() { *m = ManualScaling{} } +func (m *ManualScaling) String() string { return proto.CompactTextString(m) } +func (*ManualScaling) ProtoMessage() {} +func (*ManualScaling) Descriptor() ([]byte, []int) { + return fileDescriptor_version_af454e22c8704a00, []int{3} +} +func (m *ManualScaling) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ManualScaling.Unmarshal(m, b) +} +func (m *ManualScaling) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ManualScaling.Marshal(b, m, deterministic) +} +func (dst *ManualScaling) XXX_Merge(src proto.Message) { + xxx_messageInfo_ManualScaling.Merge(dst, src) +} +func (m *ManualScaling) XXX_Size() int { + return xxx_messageInfo_ManualScaling.Size(m) +} +func (m *ManualScaling) XXX_DiscardUnknown() { + xxx_messageInfo_ManualScaling.DiscardUnknown(m) +} + +var xxx_messageInfo_ManualScaling proto.InternalMessageInfo + +func (m *ManualScaling) GetInstances() int32 { + if m != nil { + return m.Instances + } + return 0 +} + +// Target scaling by CPU usage. +type CpuUtilization struct { + // Period of time over which CPU utilization is calculated. + AggregationWindowLength *duration.Duration `protobuf:"bytes,1,opt,name=aggregation_window_length,json=aggregationWindowLength,proto3" json:"aggregation_window_length,omitempty"` + // Target CPU utilization ratio to maintain when scaling. Must be between 0 + // and 1. + TargetUtilization float64 `protobuf:"fixed64,2,opt,name=target_utilization,json=targetUtilization,proto3" json:"target_utilization,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *CpuUtilization) Reset() { *m = CpuUtilization{} } +func (m *CpuUtilization) String() string { return proto.CompactTextString(m) } +func (*CpuUtilization) ProtoMessage() {} +func (*CpuUtilization) Descriptor() ([]byte, []int) { + return fileDescriptor_version_af454e22c8704a00, []int{4} +} +func (m *CpuUtilization) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_CpuUtilization.Unmarshal(m, b) +} +func (m *CpuUtilization) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_CpuUtilization.Marshal(b, m, deterministic) +} +func (dst *CpuUtilization) XXX_Merge(src proto.Message) { + xxx_messageInfo_CpuUtilization.Merge(dst, src) +} +func (m *CpuUtilization) XXX_Size() int { + return xxx_messageInfo_CpuUtilization.Size(m) +} +func (m *CpuUtilization) XXX_DiscardUnknown() { + xxx_messageInfo_CpuUtilization.DiscardUnknown(m) +} + +var xxx_messageInfo_CpuUtilization proto.InternalMessageInfo + +func (m *CpuUtilization) GetAggregationWindowLength() *duration.Duration { + if m != nil { + return m.AggregationWindowLength + } + return nil +} + +func (m *CpuUtilization) GetTargetUtilization() float64 { + if m != nil { + return m.TargetUtilization + } + return 0 +} + +// Target scaling by request utilization. Only applicable for VM runtimes. +type RequestUtilization struct { + // Target requests per second. + TargetRequestCountPerSecond int32 `protobuf:"varint,1,opt,name=target_request_count_per_second,json=targetRequestCountPerSecond,proto3" json:"target_request_count_per_second,omitempty"` + // Target number of concurrent requests. + TargetConcurrentRequests int32 `protobuf:"varint,2,opt,name=target_concurrent_requests,json=targetConcurrentRequests,proto3" json:"target_concurrent_requests,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *RequestUtilization) Reset() { *m = RequestUtilization{} } +func (m *RequestUtilization) String() string { return proto.CompactTextString(m) } +func (*RequestUtilization) ProtoMessage() {} +func (*RequestUtilization) Descriptor() ([]byte, []int) { + return fileDescriptor_version_af454e22c8704a00, []int{5} +} +func (m *RequestUtilization) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_RequestUtilization.Unmarshal(m, b) +} +func (m *RequestUtilization) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_RequestUtilization.Marshal(b, m, deterministic) +} +func (dst *RequestUtilization) XXX_Merge(src proto.Message) { + xxx_messageInfo_RequestUtilization.Merge(dst, src) +} +func (m *RequestUtilization) XXX_Size() int { + return xxx_messageInfo_RequestUtilization.Size(m) +} +func (m *RequestUtilization) XXX_DiscardUnknown() { + xxx_messageInfo_RequestUtilization.DiscardUnknown(m) +} + +var xxx_messageInfo_RequestUtilization proto.InternalMessageInfo + +func (m *RequestUtilization) GetTargetRequestCountPerSecond() int32 { + if m != nil { + return m.TargetRequestCountPerSecond + } + return 0 +} + +func (m *RequestUtilization) GetTargetConcurrentRequests() int32 { + if m != nil { + return m.TargetConcurrentRequests + } + return 0 +} + +// Target scaling by disk usage. Only applicable for VM runtimes. +type DiskUtilization struct { + // Target bytes written per second. + TargetWriteBytesPerSecond int32 `protobuf:"varint,14,opt,name=target_write_bytes_per_second,json=targetWriteBytesPerSecond,proto3" json:"target_write_bytes_per_second,omitempty"` + // Target ops written per second. + TargetWriteOpsPerSecond int32 `protobuf:"varint,15,opt,name=target_write_ops_per_second,json=targetWriteOpsPerSecond,proto3" json:"target_write_ops_per_second,omitempty"` + // Target bytes read per second. + TargetReadBytesPerSecond int32 `protobuf:"varint,16,opt,name=target_read_bytes_per_second,json=targetReadBytesPerSecond,proto3" json:"target_read_bytes_per_second,omitempty"` + // Target ops read per seconds. + TargetReadOpsPerSecond int32 `protobuf:"varint,17,opt,name=target_read_ops_per_second,json=targetReadOpsPerSecond,proto3" json:"target_read_ops_per_second,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *DiskUtilization) Reset() { *m = DiskUtilization{} } +func (m *DiskUtilization) String() string { return proto.CompactTextString(m) } +func (*DiskUtilization) ProtoMessage() {} +func (*DiskUtilization) Descriptor() ([]byte, []int) { + return fileDescriptor_version_af454e22c8704a00, []int{6} +} +func (m *DiskUtilization) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_DiskUtilization.Unmarshal(m, b) +} +func (m *DiskUtilization) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_DiskUtilization.Marshal(b, m, deterministic) +} +func (dst *DiskUtilization) XXX_Merge(src proto.Message) { + xxx_messageInfo_DiskUtilization.Merge(dst, src) +} +func (m *DiskUtilization) XXX_Size() int { + return xxx_messageInfo_DiskUtilization.Size(m) +} +func (m *DiskUtilization) XXX_DiscardUnknown() { + xxx_messageInfo_DiskUtilization.DiscardUnknown(m) +} + +var xxx_messageInfo_DiskUtilization proto.InternalMessageInfo + +func (m *DiskUtilization) GetTargetWriteBytesPerSecond() int32 { + if m != nil { + return m.TargetWriteBytesPerSecond + } + return 0 +} + +func (m *DiskUtilization) GetTargetWriteOpsPerSecond() int32 { + if m != nil { + return m.TargetWriteOpsPerSecond + } + return 0 +} + +func (m *DiskUtilization) GetTargetReadBytesPerSecond() int32 { + if m != nil { + return m.TargetReadBytesPerSecond + } + return 0 +} + +func (m *DiskUtilization) GetTargetReadOpsPerSecond() int32 { + if m != nil { + return m.TargetReadOpsPerSecond + } + return 0 +} + +// Target scaling by network usage. Only applicable for VM runtimes. +type NetworkUtilization struct { + // Target bytes sent per second. + TargetSentBytesPerSecond int32 `protobuf:"varint,1,opt,name=target_sent_bytes_per_second,json=targetSentBytesPerSecond,proto3" json:"target_sent_bytes_per_second,omitempty"` + // Target packets sent per second. + TargetSentPacketsPerSecond int32 `protobuf:"varint,11,opt,name=target_sent_packets_per_second,json=targetSentPacketsPerSecond,proto3" json:"target_sent_packets_per_second,omitempty"` + // Target bytes received per second. + TargetReceivedBytesPerSecond int32 `protobuf:"varint,12,opt,name=target_received_bytes_per_second,json=targetReceivedBytesPerSecond,proto3" json:"target_received_bytes_per_second,omitempty"` + // Target packets received per second. + TargetReceivedPacketsPerSecond int32 `protobuf:"varint,13,opt,name=target_received_packets_per_second,json=targetReceivedPacketsPerSecond,proto3" json:"target_received_packets_per_second,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *NetworkUtilization) Reset() { *m = NetworkUtilization{} } +func (m *NetworkUtilization) String() string { return proto.CompactTextString(m) } +func (*NetworkUtilization) ProtoMessage() {} +func (*NetworkUtilization) Descriptor() ([]byte, []int) { + return fileDescriptor_version_af454e22c8704a00, []int{7} +} +func (m *NetworkUtilization) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_NetworkUtilization.Unmarshal(m, b) +} +func (m *NetworkUtilization) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_NetworkUtilization.Marshal(b, m, deterministic) +} +func (dst *NetworkUtilization) XXX_Merge(src proto.Message) { + xxx_messageInfo_NetworkUtilization.Merge(dst, src) +} +func (m *NetworkUtilization) XXX_Size() int { + return xxx_messageInfo_NetworkUtilization.Size(m) +} +func (m *NetworkUtilization) XXX_DiscardUnknown() { + xxx_messageInfo_NetworkUtilization.DiscardUnknown(m) +} + +var xxx_messageInfo_NetworkUtilization proto.InternalMessageInfo + +func (m *NetworkUtilization) GetTargetSentBytesPerSecond() int32 { + if m != nil { + return m.TargetSentBytesPerSecond + } + return 0 +} + +func (m *NetworkUtilization) GetTargetSentPacketsPerSecond() int32 { + if m != nil { + return m.TargetSentPacketsPerSecond + } + return 0 +} + +func (m *NetworkUtilization) GetTargetReceivedBytesPerSecond() int32 { + if m != nil { + return m.TargetReceivedBytesPerSecond + } + return 0 +} + +func (m *NetworkUtilization) GetTargetReceivedPacketsPerSecond() int32 { + if m != nil { + return m.TargetReceivedPacketsPerSecond + } + return 0 +} + +// Extra network settings. Only applicable for VM runtimes. +type Network struct { + // List of ports, or port pairs, to forward from the virtual machine to the + // application container. + ForwardedPorts []string `protobuf:"bytes,1,rep,name=forwarded_ports,json=forwardedPorts,proto3" json:"forwarded_ports,omitempty"` + // Tag to apply to the VM instance during creation. + InstanceTag string `protobuf:"bytes,2,opt,name=instance_tag,json=instanceTag,proto3" json:"instance_tag,omitempty"` + // Google Cloud Platform network where the virtual machines are created. + // Specify the short name, not the resource path. + // + // Defaults to `default`. + Name string `protobuf:"bytes,3,opt,name=name,proto3" json:"name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Network) Reset() { *m = Network{} } +func (m *Network) String() string { return proto.CompactTextString(m) } +func (*Network) ProtoMessage() {} +func (*Network) Descriptor() ([]byte, []int) { + return fileDescriptor_version_af454e22c8704a00, []int{8} +} +func (m *Network) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Network.Unmarshal(m, b) +} +func (m *Network) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Network.Marshal(b, m, deterministic) +} +func (dst *Network) XXX_Merge(src proto.Message) { + xxx_messageInfo_Network.Merge(dst, src) +} +func (m *Network) XXX_Size() int { + return xxx_messageInfo_Network.Size(m) +} +func (m *Network) XXX_DiscardUnknown() { + xxx_messageInfo_Network.DiscardUnknown(m) +} + +var xxx_messageInfo_Network proto.InternalMessageInfo + +func (m *Network) GetForwardedPorts() []string { + if m != nil { + return m.ForwardedPorts + } + return nil +} + +func (m *Network) GetInstanceTag() string { + if m != nil { + return m.InstanceTag + } + return "" +} + +func (m *Network) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +// Machine resources for a version. +type Resources struct { + // Number of CPU cores needed. + Cpu float64 `protobuf:"fixed64,1,opt,name=cpu,proto3" json:"cpu,omitempty"` + // Disk size (GB) needed. + DiskGb float64 `protobuf:"fixed64,2,opt,name=disk_gb,json=diskGb,proto3" json:"disk_gb,omitempty"` + // Memory (GB) needed. + MemoryGb float64 `protobuf:"fixed64,3,opt,name=memory_gb,json=memoryGb,proto3" json:"memory_gb,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Resources) Reset() { *m = Resources{} } +func (m *Resources) String() string { return proto.CompactTextString(m) } +func (*Resources) ProtoMessage() {} +func (*Resources) Descriptor() ([]byte, []int) { + return fileDescriptor_version_af454e22c8704a00, []int{9} +} +func (m *Resources) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Resources.Unmarshal(m, b) +} +func (m *Resources) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Resources.Marshal(b, m, deterministic) +} +func (dst *Resources) XXX_Merge(src proto.Message) { + xxx_messageInfo_Resources.Merge(dst, src) +} +func (m *Resources) XXX_Size() int { + return xxx_messageInfo_Resources.Size(m) +} +func (m *Resources) XXX_DiscardUnknown() { + xxx_messageInfo_Resources.DiscardUnknown(m) +} + +var xxx_messageInfo_Resources proto.InternalMessageInfo + +func (m *Resources) GetCpu() float64 { + if m != nil { + return m.Cpu + } + return 0 +} + +func (m *Resources) GetDiskGb() float64 { + if m != nil { + return m.DiskGb + } + return 0 +} + +func (m *Resources) GetMemoryGb() float64 { + if m != nil { + return m.MemoryGb + } + return 0 +} + +func init() { + proto.RegisterType((*Version)(nil), "google.appengine.v1.Version") + proto.RegisterMapType((map[string]string)(nil), "google.appengine.v1.Version.BetaSettingsEntry") + proto.RegisterMapType((map[string]string)(nil), "google.appengine.v1.Version.EnvVariablesEntry") + proto.RegisterType((*AutomaticScaling)(nil), "google.appengine.v1.AutomaticScaling") + proto.RegisterType((*BasicScaling)(nil), "google.appengine.v1.BasicScaling") + proto.RegisterType((*ManualScaling)(nil), "google.appengine.v1.ManualScaling") + proto.RegisterType((*CpuUtilization)(nil), "google.appengine.v1.CpuUtilization") + proto.RegisterType((*RequestUtilization)(nil), "google.appengine.v1.RequestUtilization") + proto.RegisterType((*DiskUtilization)(nil), "google.appengine.v1.DiskUtilization") + proto.RegisterType((*NetworkUtilization)(nil), "google.appengine.v1.NetworkUtilization") + proto.RegisterType((*Network)(nil), "google.appengine.v1.Network") + proto.RegisterType((*Resources)(nil), "google.appengine.v1.Resources") + proto.RegisterEnum("google.appengine.v1.InboundServiceType", InboundServiceType_name, InboundServiceType_value) + proto.RegisterEnum("google.appengine.v1.ServingStatus", ServingStatus_name, ServingStatus_value) +} + +func init() { + proto.RegisterFile("google/appengine/v1/version.proto", fileDescriptor_version_af454e22c8704a00) +} + +var fileDescriptor_version_af454e22c8704a00 = []byte{ + // 1767 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x94, 0x58, 0x5d, 0x73, 0xdb, 0xc6, + 0x15, 0x0d, 0x29, 0x4b, 0x14, 0x2f, 0x3f, 0x04, 0xad, 0xd3, 0x08, 0x96, 0x64, 0x89, 0x66, 0x92, + 0xb1, 0x26, 0xd3, 0x50, 0x13, 0x77, 0x26, 0xed, 0x38, 0x9e, 0xa6, 0x22, 0x45, 0x5b, 0x6c, 0xf5, + 0xc1, 0x01, 0x48, 0x3b, 0xd3, 0x17, 0xcc, 0x12, 0x58, 0x41, 0x5b, 0x01, 0x0b, 0x64, 0xb1, 0xa0, + 0xc4, 0xfe, 0x87, 0x3e, 0xb6, 0xd3, 0x1f, 0xd1, 0xc7, 0xbe, 0xf5, 0xcf, 0x75, 0x76, 0x01, 0x90, + 0x20, 0x89, 0xd8, 0xed, 0x1b, 0xf7, 0xee, 0x39, 0x67, 0x0f, 0xee, 0xde, 0xbd, 0x0b, 0x10, 0x5e, + 0xb8, 0x41, 0xe0, 0x7a, 0xe4, 0x14, 0x87, 0x21, 0x61, 0x2e, 0x65, 0xe4, 0x74, 0xfa, 0xdd, 0xe9, + 0x94, 0xf0, 0x88, 0x06, 0xac, 0x13, 0xf2, 0x40, 0x04, 0xe8, 0x69, 0x02, 0xe9, 0xcc, 0x21, 0x9d, + 0xe9, 0x77, 0xfb, 0x87, 0x73, 0x1e, 0x3d, 0xc5, 0x8c, 0x05, 0x02, 0x0b, 0x1a, 0xb0, 0x28, 0xa1, + 0xec, 0xb7, 0x8b, 0x54, 0x71, 0x18, 0x5a, 0x33, 0xec, 0x7b, 0x29, 0xa6, 0x55, 0x84, 0x71, 0x48, + 0xe8, 0x05, 0xb3, 0x14, 0x71, 0x94, 0x22, 0xd4, 0x68, 0x12, 0xdf, 0x9e, 0x3a, 0x31, 0x57, 0xcb, + 0xa4, 0xf3, 0xc7, 0xab, 0xf3, 0x82, 0xfa, 0x24, 0x12, 0xd8, 0x0f, 0x13, 0x40, 0xfb, 0xef, 0x0d, + 0xa8, 0xbc, 0x4f, 0x9e, 0x05, 0x21, 0x78, 0xc2, 0xb0, 0x4f, 0xf4, 0x52, 0xab, 0x74, 0x52, 0x35, + 0xd4, 0x6f, 0xd4, 0x84, 0x32, 0x75, 0xf4, 0xb2, 0x8a, 0x94, 0xa9, 0x83, 0x46, 0xb0, 0x8b, 0x63, + 0x11, 0xf8, 0x58, 0x50, 0xdb, 0x8a, 0x6c, 0xec, 0x51, 0xe6, 0xea, 0x1b, 0xad, 0xd2, 0x49, 0xed, + 0xd5, 0xd7, 0x9d, 0x82, 0x2c, 0x74, 0xce, 0x32, 0xb4, 0x99, 0x80, 0x2f, 0x3e, 0x33, 0x34, 0xbc, + 0x12, 0x43, 0x17, 0xd0, 0x98, 0xe0, 0x28, 0xa7, 0xf8, 0x44, 0x29, 0xbe, 0x28, 0x54, 0xec, 0x4a, + 0xe4, 0x42, 0xad, 0x3e, 0xc9, 0x8d, 0xd1, 0x9f, 0xa0, 0xe9, 0x63, 0x16, 0x63, 0x6f, 0x2e, 0xb5, + 0xa9, 0xa4, 0xda, 0x85, 0x52, 0x57, 0x0a, 0xba, 0xd0, 0x6a, 0xf8, 0xf9, 0x00, 0x32, 0x40, 0xa3, + 0x6c, 0x12, 0xc4, 0xcc, 0xb1, 0x22, 0xc2, 0xa7, 0xd4, 0x26, 0x91, 0xbe, 0xd5, 0xda, 0x38, 0x69, + 0xbe, 0x7a, 0x59, 0x28, 0x37, 0x48, 0xc0, 0x66, 0x82, 0x1d, 0xcd, 0x42, 0x62, 0xec, 0xd0, 0xa5, + 0x58, 0x84, 0xbe, 0x86, 0x26, 0x65, 0x91, 0xc0, 0xcc, 0x26, 0x96, 0xed, 0xe1, 0x28, 0xd2, 0x2b, + 0x2a, 0xb9, 0x8d, 0x2c, 0xda, 0x93, 0x41, 0xf4, 0x3d, 0x54, 0x18, 0x11, 0x0f, 0x01, 0xbf, 0xd7, + 0xb7, 0xd5, 0x03, 0x1c, 0x16, 0xae, 0x78, 0x9d, 0x60, 0x8c, 0x0c, 0x8c, 0xde, 0x40, 0x95, 0x93, + 0x28, 0x88, 0xb9, 0xf4, 0x5a, 0x55, 0xcc, 0xa3, 0x42, 0xa6, 0x91, 0xa1, 0x8c, 0x05, 0x01, 0xe9, + 0x50, 0xe1, 0x31, 0x93, 0x35, 0xa2, 0x83, 0x72, 0x95, 0x0d, 0xd1, 0x11, 0x80, 0xb8, 0xe3, 0x04, + 0x3b, 0x11, 0xbe, 0x25, 0x7a, 0xad, 0x55, 0x3a, 0xd9, 0x36, 0x72, 0x11, 0x59, 0x27, 0x53, 0x5f, + 0xaf, 0xab, 0x78, 0x79, 0xea, 0x23, 0x13, 0x1a, 0x13, 0x22, 0xb0, 0x15, 0x11, 0x21, 0x28, 0x73, + 0x23, 0xbd, 0xd1, 0xda, 0x38, 0xa9, 0xbd, 0xea, 0x14, 0x7a, 0x49, 0x0b, 0xb0, 0xd3, 0x25, 0x02, + 0x9b, 0x29, 0xa1, 0xcf, 0x04, 0x9f, 0x19, 0xf5, 0x49, 0x2e, 0x84, 0x34, 0xd8, 0x20, 0x6c, 0xaa, + 0x37, 0x95, 0x35, 0xf9, 0x13, 0x0d, 0xa0, 0xa9, 0x76, 0x86, 0xb9, 0x56, 0x24, 0xb0, 0x88, 0x23, + 0x7d, 0xa7, 0x55, 0x3a, 0x69, 0xfe, 0xc2, 0x76, 0x9b, 0x09, 0xd4, 0x54, 0x48, 0xa3, 0x11, 0xe5, + 0x87, 0xe8, 0x39, 0x80, 0xcd, 0x09, 0x16, 0xc4, 0xb1, 0x26, 0x33, 0x5d, 0x53, 0x6b, 0x54, 0xd3, + 0x48, 0x77, 0x86, 0x7e, 0x80, 0x5a, 0x32, 0xb0, 0x54, 0x7a, 0x76, 0x55, 0x6a, 0xf7, 0xb3, 0x65, + 0xb2, 0xf3, 0xd5, 0x19, 0x65, 0xe7, 0xcb, 0x48, 0xd5, 0x64, 0x00, 0x9d, 0x80, 0xe6, 0xd0, 0xe8, + 0xde, 0x8a, 0x23, 0xec, 0x12, 0x6b, 0x32, 0x13, 0x24, 0xd2, 0x51, 0xab, 0x74, 0xb2, 0x61, 0x34, + 0x65, 0x7c, 0x2c, 0xc3, 0x5d, 0x19, 0x45, 0xbf, 0x85, 0xed, 0x3b, 0xcc, 0x1c, 0x8f, 0xf0, 0x48, + 0x77, 0x54, 0xca, 0x0e, 0x0a, 0x1f, 0x65, 0xcc, 0xbd, 0x2b, 0x1c, 0x1a, 0x73, 0x30, 0xba, 0x80, + 0x26, 0xe1, 0x3c, 0xe0, 0xd6, 0x9c, 0x4e, 0x14, 0xbd, 0xf8, 0x0c, 0xf5, 0x25, 0xf4, 0x22, 0x41, + 0x1a, 0x0d, 0x92, 0x1b, 0x45, 0xe8, 0x35, 0x54, 0x3d, 0x3a, 0xe1, 0x98, 0x53, 0x12, 0xe9, 0xb7, + 0x4a, 0xa4, 0xb8, 0xf8, 0x2e, 0x15, 0x6a, 0x66, 0x2c, 0xe0, 0xe8, 0x1c, 0x00, 0x87, 0xd4, 0xb2, + 0x03, 0x76, 0x4b, 0x5d, 0xdd, 0xfd, 0x58, 0x5f, 0x08, 0x69, 0x4f, 0xa1, 0x32, 0x17, 0x55, 0x9c, + 0x45, 0x64, 0xf1, 0x10, 0x36, 0xb5, 0xa6, 0x98, 0x53, 0x3c, 0xf1, 0x48, 0xa4, 0xdf, 0xfd, 0x0f, + 0xc5, 0xd3, 0x67, 0xd3, 0xf7, 0x19, 0x21, 0x2d, 0x1e, 0x92, 0x0b, 0xa1, 0x0b, 0x40, 0x0e, 0xb9, + 0xc5, 0xb1, 0x27, 0x2c, 0xf2, 0x18, 0xd2, 0xa4, 0x4d, 0xea, 0x54, 0x59, 0x7c, 0xb6, 0xb6, 0x8f, + 0xe7, 0x69, 0x1f, 0x35, 0x76, 0x53, 0x52, 0x7f, 0xce, 0x41, 0x3d, 0xa8, 0xdf, 0x11, 0xec, 0x89, + 0x3b, 0xcb, 0xbe, 0x23, 0xf6, 0xbd, 0xfe, 0x17, 0xa5, 0xd1, 0x2a, 0x74, 0x77, 0xa1, 0x80, 0x3d, + 0x89, 0x33, 0x6a, 0x77, 0x8b, 0x01, 0xea, 0xc0, 0x53, 0x16, 0x4c, 0x62, 0xea, 0x39, 0xd6, 0x2d, + 0xf5, 0x48, 0x64, 0x71, 0xe2, 0x92, 0x47, 0xfd, 0x5e, 0xd5, 0xdd, 0x6e, 0x3a, 0xf5, 0x56, 0xce, + 0x18, 0x72, 0x02, 0xfd, 0x08, 0x90, 0x74, 0x7e, 0x9f, 0x30, 0xa1, 0x7b, 0x6a, 0xc9, 0xe3, 0xc2, + 0x25, 0xcf, 0xe7, 0x30, 0x23, 0x47, 0x41, 0xc7, 0x50, 0x4b, 0x2f, 0x2d, 0x2b, 0xe6, 0x9e, 0xee, + 0xab, 0x85, 0x20, 0x0d, 0x8d, 0xb9, 0xb7, 0xff, 0x23, 0xec, 0xae, 0x1d, 0x40, 0x79, 0xe4, 0xee, + 0xc9, 0x2c, 0xbd, 0x12, 0xe4, 0x4f, 0xf4, 0x39, 0x6c, 0x4e, 0xb1, 0x17, 0x93, 0xf4, 0x52, 0x48, + 0x06, 0xaf, 0xcb, 0xbf, 0x2b, 0x49, 0x81, 0xb5, 0x4d, 0xf8, 0x7f, 0x04, 0xba, 0x55, 0xa8, 0xa4, + 0x5d, 0xbb, 0xfd, 0xef, 0x2d, 0xd0, 0x56, 0xaf, 0x0e, 0xd4, 0x03, 0xcd, 0x0e, 0x02, 0xcf, 0x72, + 0x82, 0x07, 0x66, 0x85, 0x84, 0xd3, 0xc0, 0x51, 0xc2, 0x1f, 0xdd, 0xc0, 0xa6, 0xa4, 0x9c, 0x07, + 0x0f, 0x6c, 0xa8, 0x08, 0xe8, 0x12, 0x76, 0xec, 0x30, 0xb6, 0x62, 0x41, 0x3d, 0xfa, 0xd7, 0xa4, + 0x08, 0xca, 0x4a, 0xe3, 0xcb, 0xc2, 0x6c, 0xf6, 0xc2, 0x78, 0xbc, 0x80, 0x1a, 0x4d, 0x7b, 0x69, + 0x8c, 0xbe, 0x87, 0x3d, 0x1f, 0x3f, 0xca, 0x82, 0xb7, 0x63, 0xce, 0x09, 0x13, 0x16, 0x27, 0x3f, + 0xc7, 0x24, 0x12, 0x91, 0xba, 0x15, 0x37, 0x8d, 0x5f, 0xf9, 0xf8, 0xb1, 0x37, 0x9f, 0x35, 0xd2, + 0x49, 0xf4, 0x6b, 0x40, 0x92, 0x47, 0x1d, 0x8f, 0x58, 0x59, 0xe7, 0x8f, 0xd4, 0xb5, 0xb7, 0x69, + 0x68, 0x3e, 0x7e, 0x1c, 0x38, 0x1e, 0x19, 0x64, 0x71, 0x59, 0x2c, 0x12, 0x2d, 0x02, 0x81, 0xbd, + 0x1c, 0x7c, 0x53, 0xc1, 0x77, 0x7d, 0xfc, 0x38, 0x92, 0x33, 0x0b, 0xfc, 0x20, 0xc1, 0x87, 0x84, + 0x39, 0xb2, 0x35, 0x7a, 0x58, 0x10, 0x66, 0xcf, 0xf4, 0xad, 0x4f, 0x16, 0xbb, 0x8f, 0x1f, 0x87, + 0x09, 0xe9, 0x32, 0xe1, 0x28, 0xa3, 0x94, 0xad, 0x1a, 0xad, 0xa4, 0x46, 0x29, 0x5b, 0x37, 0x4a, + 0xd9, 0x9a, 0xd1, 0xed, 0xd4, 0x28, 0x65, 0x05, 0x46, 0x29, 0x5b, 0x33, 0x5a, 0xfd, 0xb4, 0x51, + 0xca, 0x56, 0x8c, 0xfe, 0x04, 0x4f, 0xd3, 0xd4, 0x2f, 0xed, 0x2d, 0x28, 0xa9, 0x97, 0xbf, 0x70, + 0x07, 0x2a, 0x7c, 0x7e, 0x7f, 0x11, 0x5f, 0x8b, 0xa1, 0x9b, 0xac, 0x7b, 0xe7, 0x64, 0x6b, 0x4a, + 0xf6, 0xab, 0xe2, 0x03, 0x28, 0x5b, 0x7a, 0x4e, 0x73, 0xc7, 0x59, 0x0e, 0x48, 0xab, 0xe9, 0x7d, + 0xbd, 0xa4, 0x59, 0xff, 0x88, 0xd5, 0xf4, 0xa2, 0x5f, 0xb2, 0xca, 0xd6, 0x62, 0xed, 0x9f, 0xa1, + 0x9e, 0x7f, 0x3d, 0x42, 0x6f, 0xa0, 0xae, 0x76, 0x4e, 0xde, 0x59, 0x41, 0x2c, 0x3e, 0x7d, 0x5a, + 0x6a, 0x12, 0x3e, 0x4a, 0xd0, 0xe8, 0x4b, 0x68, 0xa8, 0x22, 0x9d, 0xef, 0x63, 0x59, 0xed, 0x63, + 0x5d, 0xd6, 0x67, 0x16, 0x6b, 0x7f, 0x0b, 0x8d, 0xa5, 0xd7, 0x28, 0x74, 0x08, 0xd5, 0x05, 0xa3, + 0xa4, 0x18, 0x8b, 0x40, 0xfb, 0x1f, 0x25, 0x68, 0x2e, 0x9f, 0x29, 0x34, 0x86, 0x67, 0xd8, 0x75, + 0x39, 0x71, 0xd5, 0xd0, 0x7a, 0xa0, 0xcc, 0x09, 0x1e, 0x2c, 0x8f, 0x30, 0x57, 0xdc, 0x7d, 0xda, + 0xf1, 0x5e, 0x8e, 0xfb, 0x41, 0x51, 0x2f, 0x15, 0x13, 0x7d, 0x0b, 0x48, 0x60, 0xee, 0x12, 0xb1, + 0x76, 0xd6, 0x4b, 0xc6, 0x6e, 0x32, 0x93, 0x4f, 0xdd, 0x3f, 0x4b, 0x80, 0xd6, 0x0b, 0x02, 0x9d, + 0xc3, 0x71, 0xaa, 0x92, 0x55, 0x97, 0x1d, 0xc4, 0x4c, 0xc8, 0xf6, 0x63, 0x45, 0xc4, 0x0e, 0x98, + 0x93, 0x3e, 0xe3, 0x41, 0x02, 0x4b, 0x25, 0x7a, 0x12, 0x34, 0x24, 0xdc, 0x54, 0x10, 0xf4, 0x06, + 0xf6, 0x53, 0x95, 0xa2, 0x4e, 0x91, 0xa4, 0x55, 0x4f, 0x10, 0xeb, 0xcd, 0xa2, 0xfd, 0xb7, 0x32, + 0xec, 0xac, 0x14, 0x15, 0xfa, 0x03, 0x3c, 0x4f, 0x15, 0x1f, 0x38, 0x15, 0xe9, 0x4b, 0x45, 0xde, + 0x55, 0x53, 0x89, 0x3e, 0x4b, 0x40, 0x1f, 0x24, 0x46, 0xbd, 0x61, 0xe4, 0x3d, 0x1d, 0x2c, 0x29, + 0x04, 0xe1, 0x12, 0x7f, 0x47, 0xf1, 0xf7, 0x72, 0xfc, 0x9b, 0x30, 0xc7, 0xfe, 0x3d, 0x1c, 0xce, + 0xf3, 0x82, 0x9d, 0xf5, 0xe5, 0xb5, 0xfc, 0x33, 0x19, 0x04, 0x3b, 0x2b, 0xab, 0xbf, 0x9e, 0x67, + 0x44, 0xf1, 0x57, 0x16, 0xdf, 0x55, 0xec, 0x2f, 0x16, 0xec, 0xfc, 0xda, 0xed, 0x7f, 0x95, 0x01, + 0xad, 0x1f, 0x88, 0x9c, 0xa5, 0x48, 0xa6, 0x77, 0xcd, 0x52, 0x29, 0x6f, 0xc9, 0x24, 0x4c, 0xac, + 0x58, 0xea, 0xc2, 0x51, 0x9e, 0x1f, 0x62, 0xfb, 0x9e, 0x88, 0x25, 0x85, 0x9a, 0x52, 0xd8, 0x5f, + 0x28, 0x0c, 0x13, 0xcc, 0x42, 0xe3, 0x2d, 0xb4, 0xe6, 0x8f, 0x65, 0x13, 0x3a, 0x25, 0x05, 0xa9, + 0xa9, 0x2b, 0x95, 0xc3, 0xec, 0xe1, 0x12, 0xd8, 0x8a, 0x97, 0x3f, 0x42, 0x7b, 0x55, 0xa7, 0xc0, + 0x4f, 0x43, 0x29, 0x1d, 0x2d, 0x2b, 0xad, 0x7a, 0x6a, 0x53, 0xa8, 0xa4, 0xd9, 0x42, 0x2f, 0x61, + 0xe7, 0x36, 0xe0, 0x0f, 0x98, 0x3b, 0x52, 0x30, 0xe0, 0x42, 0x9e, 0xd0, 0x8d, 0x93, 0xaa, 0xd1, + 0x9c, 0x87, 0x87, 0x32, 0x8a, 0x5e, 0x40, 0x7d, 0xfe, 0x99, 0x22, 0xb0, 0x9b, 0xde, 0xd5, 0xb5, + 0x2c, 0x36, 0xc2, 0xee, 0xfc, 0x73, 0x71, 0x63, 0xf1, 0xb9, 0xd8, 0x36, 0xa1, 0x3a, 0xff, 0xb0, + 0x90, 0x57, 0xbf, 0x1d, 0xc6, 0x2a, 0xed, 0x25, 0x43, 0xfe, 0x44, 0x7b, 0x50, 0x51, 0x9d, 0xd4, + 0x9d, 0xa4, 0xe7, 0x70, 0x4b, 0x0e, 0xdf, 0x4d, 0xd0, 0x01, 0x54, 0x7d, 0xe2, 0x07, 0x7c, 0x26, + 0xa7, 0x36, 0xd4, 0xd4, 0x76, 0x12, 0x78, 0x37, 0xf9, 0xe6, 0x3f, 0x65, 0x40, 0xeb, 0x9f, 0x56, + 0xe8, 0x18, 0x0e, 0x06, 0xd7, 0xdd, 0x9b, 0xf1, 0xf5, 0xb9, 0x65, 0xf6, 0x8d, 0xf7, 0x83, 0x5e, + 0xdf, 0x1a, 0x5f, 0x9b, 0xc3, 0x7e, 0x6f, 0xf0, 0x76, 0xd0, 0x3f, 0xd7, 0x3e, 0x43, 0x3a, 0x7c, + 0xbe, 0x0a, 0xb8, 0x3a, 0x1b, 0x5c, 0x6a, 0xa5, 0x22, 0xaa, 0x9c, 0xb1, 0x64, 0xa8, 0xd7, 0xd7, + 0xca, 0xe8, 0x08, 0xf6, 0x57, 0x01, 0x3f, 0x5d, 0x0d, 0x87, 0x56, 0xdf, 0x30, 0x6e, 0x0c, 0x6d, + 0x03, 0xb5, 0xe0, 0xb0, 0x70, 0xfe, 0xaa, 0x6f, 0x9a, 0x67, 0xef, 0xfa, 0xda, 0x13, 0xd4, 0x86, + 0xa3, 0x42, 0x84, 0x39, 0xee, 0x9a, 0x3d, 0x63, 0xd0, 0xed, 0x6b, 0x9b, 0xe8, 0x05, 0x3c, 0x2f, + 0xc4, 0x0c, 0x8d, 0xbe, 0xd9, 0x97, 0x46, 0xb6, 0xd0, 0x57, 0xd0, 0x5a, 0x85, 0xf4, 0x2e, 0xce, + 0xae, 0xaf, 0xfb, 0x97, 0x0b, 0x54, 0x05, 0xed, 0xc3, 0x17, 0xab, 0xa8, 0x0f, 0x67, 0xc6, 0xd5, + 0x78, 0xa8, 0x55, 0xbf, 0x19, 0x40, 0x63, 0xe9, 0xbb, 0x47, 0x3e, 0x9b, 0x02, 0x5d, 0xbf, 0xb3, + 0xcc, 0xd1, 0xd9, 0x68, 0x6c, 0xae, 0xa4, 0xad, 0x06, 0x95, 0x74, 0x5e, 0x2b, 0xa9, 0xc1, 0xe8, + 0x66, 0x38, 0xec, 0x9f, 0x6b, 0xe5, 0xae, 0x0b, 0x7b, 0x76, 0xe0, 0x17, 0xdd, 0x4f, 0xdd, 0x7a, + 0xfa, 0x1a, 0x3e, 0x94, 0xfd, 0x79, 0x58, 0xfa, 0xf3, 0x9b, 0x14, 0xe4, 0x06, 0x1e, 0x66, 0x6e, + 0x27, 0xe0, 0xee, 0xa9, 0x4b, 0x98, 0xea, 0xde, 0xa7, 0xc9, 0x14, 0x0e, 0x69, 0xb4, 0xf4, 0xcf, + 0xc6, 0x0f, 0xf3, 0xc1, 0x64, 0x4b, 0x01, 0x7f, 0xf3, 0xdf, 0x00, 0x00, 0x00, 0xff, 0xff, 0x2f, + 0xe1, 0xd6, 0xfa, 0x7b, 0x11, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/assistant/embedded/v1alpha1/embedded_assistant.pb.go b/vendor/google.golang.org/genproto/googleapis/assistant/embedded/v1alpha1/embedded_assistant.pb.go new file mode 100644 index 0000000..ebd7170 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/assistant/embedded/v1alpha1/embedded_assistant.pb.go @@ -0,0 +1,1142 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/assistant/embedded/v1alpha1/embedded_assistant.proto + +package embedded // import "google.golang.org/genproto/googleapis/assistant/embedded/v1alpha1" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "google.golang.org/genproto/googleapis/api/annotations" +import status "google.golang.org/genproto/googleapis/rpc/status" + +import ( + context "golang.org/x/net/context" + grpc "google.golang.org/grpc" +) + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Audio encoding of the data sent in the audio message. +// Audio must be one-channel (mono). The only language supported is "en-US". +type AudioInConfig_Encoding int32 + +const ( + // Not specified. Will return result [google.rpc.Code.INVALID_ARGUMENT][]. + AudioInConfig_ENCODING_UNSPECIFIED AudioInConfig_Encoding = 0 + // Uncompressed 16-bit signed little-endian samples (Linear PCM). + // This encoding includes no header, only the raw audio bytes. + AudioInConfig_LINEAR16 AudioInConfig_Encoding = 1 + // [`FLAC`](https://xiph.org/flac/documentation.html) (Free Lossless Audio + // Codec) is the recommended encoding because it is + // lossless--therefore recognition is not compromised--and + // requires only about half the bandwidth of `LINEAR16`. This encoding + // includes the `FLAC` stream header followed by audio data. It supports + // 16-bit and 24-bit samples, however, not all fields in `STREAMINFO` are + // supported. + AudioInConfig_FLAC AudioInConfig_Encoding = 2 +) + +var AudioInConfig_Encoding_name = map[int32]string{ + 0: "ENCODING_UNSPECIFIED", + 1: "LINEAR16", + 2: "FLAC", +} +var AudioInConfig_Encoding_value = map[string]int32{ + "ENCODING_UNSPECIFIED": 0, + "LINEAR16": 1, + "FLAC": 2, +} + +func (x AudioInConfig_Encoding) String() string { + return proto.EnumName(AudioInConfig_Encoding_name, int32(x)) +} +func (AudioInConfig_Encoding) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_embedded_assistant_ed9e8ba6964a09be, []int{1, 0} +} + +// Audio encoding of the data returned in the audio message. All encodings are +// raw audio bytes with no header, except as indicated below. +type AudioOutConfig_Encoding int32 + +const ( + // Not specified. Will return result [google.rpc.Code.INVALID_ARGUMENT][]. + AudioOutConfig_ENCODING_UNSPECIFIED AudioOutConfig_Encoding = 0 + // Uncompressed 16-bit signed little-endian samples (Linear PCM). + AudioOutConfig_LINEAR16 AudioOutConfig_Encoding = 1 + // MP3 audio encoding. The sample rate is encoded in the payload. + AudioOutConfig_MP3 AudioOutConfig_Encoding = 2 + // Opus-encoded audio wrapped in an ogg container. The result will be a + // file which can be played natively on Android and in some browsers (such + // as Chrome). The quality of the encoding is considerably higher than MP3 + // while using the same bitrate. The sample rate is encoded in the payload. + AudioOutConfig_OPUS_IN_OGG AudioOutConfig_Encoding = 3 +) + +var AudioOutConfig_Encoding_name = map[int32]string{ + 0: "ENCODING_UNSPECIFIED", + 1: "LINEAR16", + 2: "MP3", + 3: "OPUS_IN_OGG", +} +var AudioOutConfig_Encoding_value = map[string]int32{ + "ENCODING_UNSPECIFIED": 0, + "LINEAR16": 1, + "MP3": 2, + "OPUS_IN_OGG": 3, +} + +func (x AudioOutConfig_Encoding) String() string { + return proto.EnumName(AudioOutConfig_Encoding_name, int32(x)) +} +func (AudioOutConfig_Encoding) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_embedded_assistant_ed9e8ba6964a09be, []int{2, 0} +} + +// Possible states of the microphone after a `Converse` RPC completes. +type ConverseResult_MicrophoneMode int32 + +const ( + // No mode specified. + ConverseResult_MICROPHONE_MODE_UNSPECIFIED ConverseResult_MicrophoneMode = 0 + // The service is not expecting a follow-on question from the user. + // The microphone should remain off until the user re-activates it. + ConverseResult_CLOSE_MICROPHONE ConverseResult_MicrophoneMode = 1 + // The service is expecting a follow-on question from the user. The + // microphone should be re-opened when the `AudioOut` playback completes + // (by starting a new `Converse` RPC call to send the new audio). + ConverseResult_DIALOG_FOLLOW_ON ConverseResult_MicrophoneMode = 2 +) + +var ConverseResult_MicrophoneMode_name = map[int32]string{ + 0: "MICROPHONE_MODE_UNSPECIFIED", + 1: "CLOSE_MICROPHONE", + 2: "DIALOG_FOLLOW_ON", +} +var ConverseResult_MicrophoneMode_value = map[string]int32{ + "MICROPHONE_MODE_UNSPECIFIED": 0, + "CLOSE_MICROPHONE": 1, + "DIALOG_FOLLOW_ON": 2, +} + +func (x ConverseResult_MicrophoneMode) String() string { + return proto.EnumName(ConverseResult_MicrophoneMode_name, int32(x)) +} +func (ConverseResult_MicrophoneMode) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_embedded_assistant_ed9e8ba6964a09be, []int{5, 0} +} + +// Indicates the type of event. +type ConverseResponse_EventType int32 + +const ( + // No event specified. + ConverseResponse_EVENT_TYPE_UNSPECIFIED ConverseResponse_EventType = 0 + // This event indicates that the server has detected the end of the user's + // speech utterance and expects no additional speech. Therefore, the server + // will not process additional audio (although it may subsequently return + // additional results). The client should stop sending additional audio + // data, half-close the gRPC connection, and wait for any additional results + // until the server closes the gRPC connection. + ConverseResponse_END_OF_UTTERANCE ConverseResponse_EventType = 1 +) + +var ConverseResponse_EventType_name = map[int32]string{ + 0: "EVENT_TYPE_UNSPECIFIED", + 1: "END_OF_UTTERANCE", +} +var ConverseResponse_EventType_value = map[string]int32{ + "EVENT_TYPE_UNSPECIFIED": 0, + "END_OF_UTTERANCE": 1, +} + +func (x ConverseResponse_EventType) String() string { + return proto.EnumName(ConverseResponse_EventType_name, int32(x)) +} +func (ConverseResponse_EventType) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_embedded_assistant_ed9e8ba6964a09be, []int{7, 0} +} + +// Specifies how to process the `ConverseRequest` messages. +type ConverseConfig struct { + // *Required* Specifies how to process the subsequent incoming audio. + AudioInConfig *AudioInConfig `protobuf:"bytes,1,opt,name=audio_in_config,json=audioInConfig,proto3" json:"audio_in_config,omitempty"` + // *Required* Specifies how to format the audio that will be returned. + AudioOutConfig *AudioOutConfig `protobuf:"bytes,2,opt,name=audio_out_config,json=audioOutConfig,proto3" json:"audio_out_config,omitempty"` + // *Required* Represents the current dialog state. + ConverseState *ConverseState `protobuf:"bytes,3,opt,name=converse_state,json=converseState,proto3" json:"converse_state,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ConverseConfig) Reset() { *m = ConverseConfig{} } +func (m *ConverseConfig) String() string { return proto.CompactTextString(m) } +func (*ConverseConfig) ProtoMessage() {} +func (*ConverseConfig) Descriptor() ([]byte, []int) { + return fileDescriptor_embedded_assistant_ed9e8ba6964a09be, []int{0} +} +func (m *ConverseConfig) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ConverseConfig.Unmarshal(m, b) +} +func (m *ConverseConfig) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ConverseConfig.Marshal(b, m, deterministic) +} +func (dst *ConverseConfig) XXX_Merge(src proto.Message) { + xxx_messageInfo_ConverseConfig.Merge(dst, src) +} +func (m *ConverseConfig) XXX_Size() int { + return xxx_messageInfo_ConverseConfig.Size(m) +} +func (m *ConverseConfig) XXX_DiscardUnknown() { + xxx_messageInfo_ConverseConfig.DiscardUnknown(m) +} + +var xxx_messageInfo_ConverseConfig proto.InternalMessageInfo + +func (m *ConverseConfig) GetAudioInConfig() *AudioInConfig { + if m != nil { + return m.AudioInConfig + } + return nil +} + +func (m *ConverseConfig) GetAudioOutConfig() *AudioOutConfig { + if m != nil { + return m.AudioOutConfig + } + return nil +} + +func (m *ConverseConfig) GetConverseState() *ConverseState { + if m != nil { + return m.ConverseState + } + return nil +} + +// Specifies how to process the `audio_in` data that will be provided in +// subsequent requests. For recommended settings, see the Google Assistant SDK +// [best +// practices](https://developers.google.com/assistant/sdk/develop/grpc/best-practices/audio). +type AudioInConfig struct { + // *Required* Encoding of audio data sent in all `audio_in` messages. + Encoding AudioInConfig_Encoding `protobuf:"varint,1,opt,name=encoding,proto3,enum=google.assistant.embedded.v1alpha1.AudioInConfig_Encoding" json:"encoding,omitempty"` + // *Required* Sample rate (in Hertz) of the audio data sent in all `audio_in` + // messages. Valid values are from 16000-24000, but 16000 is optimal. + // For best results, set the sampling rate of the audio source to 16000 Hz. + // If that's not possible, use the native sample rate of the audio source + // (instead of re-sampling). + SampleRateHertz int32 `protobuf:"varint,2,opt,name=sample_rate_hertz,json=sampleRateHertz,proto3" json:"sample_rate_hertz,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *AudioInConfig) Reset() { *m = AudioInConfig{} } +func (m *AudioInConfig) String() string { return proto.CompactTextString(m) } +func (*AudioInConfig) ProtoMessage() {} +func (*AudioInConfig) Descriptor() ([]byte, []int) { + return fileDescriptor_embedded_assistant_ed9e8ba6964a09be, []int{1} +} +func (m *AudioInConfig) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_AudioInConfig.Unmarshal(m, b) +} +func (m *AudioInConfig) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_AudioInConfig.Marshal(b, m, deterministic) +} +func (dst *AudioInConfig) XXX_Merge(src proto.Message) { + xxx_messageInfo_AudioInConfig.Merge(dst, src) +} +func (m *AudioInConfig) XXX_Size() int { + return xxx_messageInfo_AudioInConfig.Size(m) +} +func (m *AudioInConfig) XXX_DiscardUnknown() { + xxx_messageInfo_AudioInConfig.DiscardUnknown(m) +} + +var xxx_messageInfo_AudioInConfig proto.InternalMessageInfo + +func (m *AudioInConfig) GetEncoding() AudioInConfig_Encoding { + if m != nil { + return m.Encoding + } + return AudioInConfig_ENCODING_UNSPECIFIED +} + +func (m *AudioInConfig) GetSampleRateHertz() int32 { + if m != nil { + return m.SampleRateHertz + } + return 0 +} + +// Specifies the desired format for the server to use when it returns +// `audio_out` messages. +type AudioOutConfig struct { + // *Required* The encoding of audio data to be returned in all `audio_out` + // messages. + Encoding AudioOutConfig_Encoding `protobuf:"varint,1,opt,name=encoding,proto3,enum=google.assistant.embedded.v1alpha1.AudioOutConfig_Encoding" json:"encoding,omitempty"` + // *Required* The sample rate in Hertz of the audio data returned in + // `audio_out` messages. Valid values are: 16000-24000. + SampleRateHertz int32 `protobuf:"varint,2,opt,name=sample_rate_hertz,json=sampleRateHertz,proto3" json:"sample_rate_hertz,omitempty"` + // *Required* Current volume setting of the device's audio output. + // Valid values are 1 to 100 (corresponding to 1% to 100%). + VolumePercentage int32 `protobuf:"varint,3,opt,name=volume_percentage,json=volumePercentage,proto3" json:"volume_percentage,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *AudioOutConfig) Reset() { *m = AudioOutConfig{} } +func (m *AudioOutConfig) String() string { return proto.CompactTextString(m) } +func (*AudioOutConfig) ProtoMessage() {} +func (*AudioOutConfig) Descriptor() ([]byte, []int) { + return fileDescriptor_embedded_assistant_ed9e8ba6964a09be, []int{2} +} +func (m *AudioOutConfig) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_AudioOutConfig.Unmarshal(m, b) +} +func (m *AudioOutConfig) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_AudioOutConfig.Marshal(b, m, deterministic) +} +func (dst *AudioOutConfig) XXX_Merge(src proto.Message) { + xxx_messageInfo_AudioOutConfig.Merge(dst, src) +} +func (m *AudioOutConfig) XXX_Size() int { + return xxx_messageInfo_AudioOutConfig.Size(m) +} +func (m *AudioOutConfig) XXX_DiscardUnknown() { + xxx_messageInfo_AudioOutConfig.DiscardUnknown(m) +} + +var xxx_messageInfo_AudioOutConfig proto.InternalMessageInfo + +func (m *AudioOutConfig) GetEncoding() AudioOutConfig_Encoding { + if m != nil { + return m.Encoding + } + return AudioOutConfig_ENCODING_UNSPECIFIED +} + +func (m *AudioOutConfig) GetSampleRateHertz() int32 { + if m != nil { + return m.SampleRateHertz + } + return 0 +} + +func (m *AudioOutConfig) GetVolumePercentage() int32 { + if m != nil { + return m.VolumePercentage + } + return 0 +} + +// Provides information about the current dialog state. +type ConverseState struct { + // *Required* The `conversation_state` value returned in the prior + // `ConverseResponse`. Omit (do not set the field) if there was no prior + // `ConverseResponse`. If there was a prior `ConverseResponse`, do not omit + // this field; doing so will end that conversation (and this new request will + // start a new conversation). + ConversationState []byte `protobuf:"bytes,1,opt,name=conversation_state,json=conversationState,proto3" json:"conversation_state,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ConverseState) Reset() { *m = ConverseState{} } +func (m *ConverseState) String() string { return proto.CompactTextString(m) } +func (*ConverseState) ProtoMessage() {} +func (*ConverseState) Descriptor() ([]byte, []int) { + return fileDescriptor_embedded_assistant_ed9e8ba6964a09be, []int{3} +} +func (m *ConverseState) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ConverseState.Unmarshal(m, b) +} +func (m *ConverseState) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ConverseState.Marshal(b, m, deterministic) +} +func (dst *ConverseState) XXX_Merge(src proto.Message) { + xxx_messageInfo_ConverseState.Merge(dst, src) +} +func (m *ConverseState) XXX_Size() int { + return xxx_messageInfo_ConverseState.Size(m) +} +func (m *ConverseState) XXX_DiscardUnknown() { + xxx_messageInfo_ConverseState.DiscardUnknown(m) +} + +var xxx_messageInfo_ConverseState proto.InternalMessageInfo + +func (m *ConverseState) GetConversationState() []byte { + if m != nil { + return m.ConversationState + } + return nil +} + +// The audio containing the assistant's response to the query. Sequential chunks +// of audio data are received in sequential `ConverseResponse` messages. +type AudioOut struct { + // *Output-only* The audio data containing the assistant's response to the + // query. Sequential chunks of audio data are received in sequential + // `ConverseResponse` messages. + AudioData []byte `protobuf:"bytes,1,opt,name=audio_data,json=audioData,proto3" json:"audio_data,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *AudioOut) Reset() { *m = AudioOut{} } +func (m *AudioOut) String() string { return proto.CompactTextString(m) } +func (*AudioOut) ProtoMessage() {} +func (*AudioOut) Descriptor() ([]byte, []int) { + return fileDescriptor_embedded_assistant_ed9e8ba6964a09be, []int{4} +} +func (m *AudioOut) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_AudioOut.Unmarshal(m, b) +} +func (m *AudioOut) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_AudioOut.Marshal(b, m, deterministic) +} +func (dst *AudioOut) XXX_Merge(src proto.Message) { + xxx_messageInfo_AudioOut.Merge(dst, src) +} +func (m *AudioOut) XXX_Size() int { + return xxx_messageInfo_AudioOut.Size(m) +} +func (m *AudioOut) XXX_DiscardUnknown() { + xxx_messageInfo_AudioOut.DiscardUnknown(m) +} + +var xxx_messageInfo_AudioOut proto.InternalMessageInfo + +func (m *AudioOut) GetAudioData() []byte { + if m != nil { + return m.AudioData + } + return nil +} + +// The semantic result for the user's spoken query. +type ConverseResult struct { + // *Output-only* The recognized transcript of what the user said. + SpokenRequestText string `protobuf:"bytes,1,opt,name=spoken_request_text,json=spokenRequestText,proto3" json:"spoken_request_text,omitempty"` + // *Output-only* The text of the assistant's spoken response. This is only + // returned for an IFTTT action. + SpokenResponseText string `protobuf:"bytes,2,opt,name=spoken_response_text,json=spokenResponseText,proto3" json:"spoken_response_text,omitempty"` + // *Output-only* State information for subsequent `ConverseRequest`. This + // value should be saved in the client and returned in the + // `conversation_state` with the next `ConverseRequest`. (The client does not + // need to interpret or otherwise use this value.) There is no need to save + // this information across device restarts. + ConversationState []byte `protobuf:"bytes,3,opt,name=conversation_state,json=conversationState,proto3" json:"conversation_state,omitempty"` + // *Output-only* Specifies the mode of the microphone after this `Converse` + // RPC is processed. + MicrophoneMode ConverseResult_MicrophoneMode `protobuf:"varint,4,opt,name=microphone_mode,json=microphoneMode,proto3,enum=google.assistant.embedded.v1alpha1.ConverseResult_MicrophoneMode" json:"microphone_mode,omitempty"` + // *Output-only* Updated volume level. The value will be 0 or omitted + // (indicating no change) unless a voice command such as "Increase the volume" + // or "Set volume level 4" was recognized, in which case the value will be + // between 1 and 100 (corresponding to the new volume level of 1% to 100%). + // Typically, a client should use this volume level when playing the + // `audio_out` data, and retain this value as the current volume level and + // supply it in the `AudioOutConfig` of the next `ConverseRequest`. (Some + // clients may also implement other ways to allow the current volume level to + // be changed, for example, by providing a knob that the user can turn.) + VolumePercentage int32 `protobuf:"varint,5,opt,name=volume_percentage,json=volumePercentage,proto3" json:"volume_percentage,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ConverseResult) Reset() { *m = ConverseResult{} } +func (m *ConverseResult) String() string { return proto.CompactTextString(m) } +func (*ConverseResult) ProtoMessage() {} +func (*ConverseResult) Descriptor() ([]byte, []int) { + return fileDescriptor_embedded_assistant_ed9e8ba6964a09be, []int{5} +} +func (m *ConverseResult) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ConverseResult.Unmarshal(m, b) +} +func (m *ConverseResult) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ConverseResult.Marshal(b, m, deterministic) +} +func (dst *ConverseResult) XXX_Merge(src proto.Message) { + xxx_messageInfo_ConverseResult.Merge(dst, src) +} +func (m *ConverseResult) XXX_Size() int { + return xxx_messageInfo_ConverseResult.Size(m) +} +func (m *ConverseResult) XXX_DiscardUnknown() { + xxx_messageInfo_ConverseResult.DiscardUnknown(m) +} + +var xxx_messageInfo_ConverseResult proto.InternalMessageInfo + +func (m *ConverseResult) GetSpokenRequestText() string { + if m != nil { + return m.SpokenRequestText + } + return "" +} + +func (m *ConverseResult) GetSpokenResponseText() string { + if m != nil { + return m.SpokenResponseText + } + return "" +} + +func (m *ConverseResult) GetConversationState() []byte { + if m != nil { + return m.ConversationState + } + return nil +} + +func (m *ConverseResult) GetMicrophoneMode() ConverseResult_MicrophoneMode { + if m != nil { + return m.MicrophoneMode + } + return ConverseResult_MICROPHONE_MODE_UNSPECIFIED +} + +func (m *ConverseResult) GetVolumePercentage() int32 { + if m != nil { + return m.VolumePercentage + } + return 0 +} + +// The top-level message sent by the client. Clients must send at least two, and +// typically numerous `ConverseRequest` messages. The first message must +// contain a `config` message and must not contain `audio_in` data. All +// subsequent messages must contain `audio_in` data and must not contain a +// `config` message. +type ConverseRequest struct { + // Exactly one of these fields must be specified in each `ConverseRequest`. + // + // Types that are valid to be assigned to ConverseRequest: + // *ConverseRequest_Config + // *ConverseRequest_AudioIn + ConverseRequest isConverseRequest_ConverseRequest `protobuf_oneof:"converse_request"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ConverseRequest) Reset() { *m = ConverseRequest{} } +func (m *ConverseRequest) String() string { return proto.CompactTextString(m) } +func (*ConverseRequest) ProtoMessage() {} +func (*ConverseRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_embedded_assistant_ed9e8ba6964a09be, []int{6} +} +func (m *ConverseRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ConverseRequest.Unmarshal(m, b) +} +func (m *ConverseRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ConverseRequest.Marshal(b, m, deterministic) +} +func (dst *ConverseRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_ConverseRequest.Merge(dst, src) +} +func (m *ConverseRequest) XXX_Size() int { + return xxx_messageInfo_ConverseRequest.Size(m) +} +func (m *ConverseRequest) XXX_DiscardUnknown() { + xxx_messageInfo_ConverseRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_ConverseRequest proto.InternalMessageInfo + +type isConverseRequest_ConverseRequest interface { + isConverseRequest_ConverseRequest() +} + +type ConverseRequest_Config struct { + Config *ConverseConfig `protobuf:"bytes,1,opt,name=config,proto3,oneof"` +} + +type ConverseRequest_AudioIn struct { + AudioIn []byte `protobuf:"bytes,2,opt,name=audio_in,json=audioIn,proto3,oneof"` +} + +func (*ConverseRequest_Config) isConverseRequest_ConverseRequest() {} + +func (*ConverseRequest_AudioIn) isConverseRequest_ConverseRequest() {} + +func (m *ConverseRequest) GetConverseRequest() isConverseRequest_ConverseRequest { + if m != nil { + return m.ConverseRequest + } + return nil +} + +func (m *ConverseRequest) GetConfig() *ConverseConfig { + if x, ok := m.GetConverseRequest().(*ConverseRequest_Config); ok { + return x.Config + } + return nil +} + +func (m *ConverseRequest) GetAudioIn() []byte { + if x, ok := m.GetConverseRequest().(*ConverseRequest_AudioIn); ok { + return x.AudioIn + } + return nil +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*ConverseRequest) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _ConverseRequest_OneofMarshaler, _ConverseRequest_OneofUnmarshaler, _ConverseRequest_OneofSizer, []interface{}{ + (*ConverseRequest_Config)(nil), + (*ConverseRequest_AudioIn)(nil), + } +} + +func _ConverseRequest_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*ConverseRequest) + // converse_request + switch x := m.ConverseRequest.(type) { + case *ConverseRequest_Config: + b.EncodeVarint(1<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.Config); err != nil { + return err + } + case *ConverseRequest_AudioIn: + b.EncodeVarint(2<<3 | proto.WireBytes) + b.EncodeRawBytes(x.AudioIn) + case nil: + default: + return fmt.Errorf("ConverseRequest.ConverseRequest has unexpected type %T", x) + } + return nil +} + +func _ConverseRequest_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*ConverseRequest) + switch tag { + case 1: // converse_request.config + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(ConverseConfig) + err := b.DecodeMessage(msg) + m.ConverseRequest = &ConverseRequest_Config{msg} + return true, err + case 2: // converse_request.audio_in + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeRawBytes(true) + m.ConverseRequest = &ConverseRequest_AudioIn{x} + return true, err + default: + return false, nil + } +} + +func _ConverseRequest_OneofSizer(msg proto.Message) (n int) { + m := msg.(*ConverseRequest) + // converse_request + switch x := m.ConverseRequest.(type) { + case *ConverseRequest_Config: + s := proto.Size(x.Config) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *ConverseRequest_AudioIn: + n += 1 // tag and wire + n += proto.SizeVarint(uint64(len(x.AudioIn))) + n += len(x.AudioIn) + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +// The top-level message received by the client. A series of one or more +// `ConverseResponse` messages are streamed back to the client. +type ConverseResponse struct { + // Exactly one of these fields will be populated in each `ConverseResponse`. + // + // Types that are valid to be assigned to ConverseResponse: + // *ConverseResponse_Error + // *ConverseResponse_EventType_ + // *ConverseResponse_AudioOut + // *ConverseResponse_Result + ConverseResponse isConverseResponse_ConverseResponse `protobuf_oneof:"converse_response"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ConverseResponse) Reset() { *m = ConverseResponse{} } +func (m *ConverseResponse) String() string { return proto.CompactTextString(m) } +func (*ConverseResponse) ProtoMessage() {} +func (*ConverseResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_embedded_assistant_ed9e8ba6964a09be, []int{7} +} +func (m *ConverseResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ConverseResponse.Unmarshal(m, b) +} +func (m *ConverseResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ConverseResponse.Marshal(b, m, deterministic) +} +func (dst *ConverseResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_ConverseResponse.Merge(dst, src) +} +func (m *ConverseResponse) XXX_Size() int { + return xxx_messageInfo_ConverseResponse.Size(m) +} +func (m *ConverseResponse) XXX_DiscardUnknown() { + xxx_messageInfo_ConverseResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_ConverseResponse proto.InternalMessageInfo + +type isConverseResponse_ConverseResponse interface { + isConverseResponse_ConverseResponse() +} + +type ConverseResponse_Error struct { + Error *status.Status `protobuf:"bytes,1,opt,name=error,proto3,oneof"` +} + +type ConverseResponse_EventType_ struct { + EventType ConverseResponse_EventType `protobuf:"varint,2,opt,name=event_type,json=eventType,proto3,enum=google.assistant.embedded.v1alpha1.ConverseResponse_EventType,oneof"` +} + +type ConverseResponse_AudioOut struct { + AudioOut *AudioOut `protobuf:"bytes,3,opt,name=audio_out,json=audioOut,proto3,oneof"` +} + +type ConverseResponse_Result struct { + Result *ConverseResult `protobuf:"bytes,5,opt,name=result,proto3,oneof"` +} + +func (*ConverseResponse_Error) isConverseResponse_ConverseResponse() {} + +func (*ConverseResponse_EventType_) isConverseResponse_ConverseResponse() {} + +func (*ConverseResponse_AudioOut) isConverseResponse_ConverseResponse() {} + +func (*ConverseResponse_Result) isConverseResponse_ConverseResponse() {} + +func (m *ConverseResponse) GetConverseResponse() isConverseResponse_ConverseResponse { + if m != nil { + return m.ConverseResponse + } + return nil +} + +func (m *ConverseResponse) GetError() *status.Status { + if x, ok := m.GetConverseResponse().(*ConverseResponse_Error); ok { + return x.Error + } + return nil +} + +func (m *ConverseResponse) GetEventType() ConverseResponse_EventType { + if x, ok := m.GetConverseResponse().(*ConverseResponse_EventType_); ok { + return x.EventType + } + return ConverseResponse_EVENT_TYPE_UNSPECIFIED +} + +func (m *ConverseResponse) GetAudioOut() *AudioOut { + if x, ok := m.GetConverseResponse().(*ConverseResponse_AudioOut); ok { + return x.AudioOut + } + return nil +} + +func (m *ConverseResponse) GetResult() *ConverseResult { + if x, ok := m.GetConverseResponse().(*ConverseResponse_Result); ok { + return x.Result + } + return nil +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*ConverseResponse) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _ConverseResponse_OneofMarshaler, _ConverseResponse_OneofUnmarshaler, _ConverseResponse_OneofSizer, []interface{}{ + (*ConverseResponse_Error)(nil), + (*ConverseResponse_EventType_)(nil), + (*ConverseResponse_AudioOut)(nil), + (*ConverseResponse_Result)(nil), + } +} + +func _ConverseResponse_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*ConverseResponse) + // converse_response + switch x := m.ConverseResponse.(type) { + case *ConverseResponse_Error: + b.EncodeVarint(1<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.Error); err != nil { + return err + } + case *ConverseResponse_EventType_: + b.EncodeVarint(2<<3 | proto.WireVarint) + b.EncodeVarint(uint64(x.EventType)) + case *ConverseResponse_AudioOut: + b.EncodeVarint(3<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.AudioOut); err != nil { + return err + } + case *ConverseResponse_Result: + b.EncodeVarint(5<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.Result); err != nil { + return err + } + case nil: + default: + return fmt.Errorf("ConverseResponse.ConverseResponse has unexpected type %T", x) + } + return nil +} + +func _ConverseResponse_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*ConverseResponse) + switch tag { + case 1: // converse_response.error + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(status.Status) + err := b.DecodeMessage(msg) + m.ConverseResponse = &ConverseResponse_Error{msg} + return true, err + case 2: // converse_response.event_type + if wire != proto.WireVarint { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeVarint() + m.ConverseResponse = &ConverseResponse_EventType_{ConverseResponse_EventType(x)} + return true, err + case 3: // converse_response.audio_out + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(AudioOut) + err := b.DecodeMessage(msg) + m.ConverseResponse = &ConverseResponse_AudioOut{msg} + return true, err + case 5: // converse_response.result + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(ConverseResult) + err := b.DecodeMessage(msg) + m.ConverseResponse = &ConverseResponse_Result{msg} + return true, err + default: + return false, nil + } +} + +func _ConverseResponse_OneofSizer(msg proto.Message) (n int) { + m := msg.(*ConverseResponse) + // converse_response + switch x := m.ConverseResponse.(type) { + case *ConverseResponse_Error: + s := proto.Size(x.Error) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *ConverseResponse_EventType_: + n += 1 // tag and wire + n += proto.SizeVarint(uint64(x.EventType)) + case *ConverseResponse_AudioOut: + s := proto.Size(x.AudioOut) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *ConverseResponse_Result: + s := proto.Size(x.Result) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +func init() { + proto.RegisterType((*ConverseConfig)(nil), "google.assistant.embedded.v1alpha1.ConverseConfig") + proto.RegisterType((*AudioInConfig)(nil), "google.assistant.embedded.v1alpha1.AudioInConfig") + proto.RegisterType((*AudioOutConfig)(nil), "google.assistant.embedded.v1alpha1.AudioOutConfig") + proto.RegisterType((*ConverseState)(nil), "google.assistant.embedded.v1alpha1.ConverseState") + proto.RegisterType((*AudioOut)(nil), "google.assistant.embedded.v1alpha1.AudioOut") + proto.RegisterType((*ConverseResult)(nil), "google.assistant.embedded.v1alpha1.ConverseResult") + proto.RegisterType((*ConverseRequest)(nil), "google.assistant.embedded.v1alpha1.ConverseRequest") + proto.RegisterType((*ConverseResponse)(nil), "google.assistant.embedded.v1alpha1.ConverseResponse") + proto.RegisterEnum("google.assistant.embedded.v1alpha1.AudioInConfig_Encoding", AudioInConfig_Encoding_name, AudioInConfig_Encoding_value) + proto.RegisterEnum("google.assistant.embedded.v1alpha1.AudioOutConfig_Encoding", AudioOutConfig_Encoding_name, AudioOutConfig_Encoding_value) + proto.RegisterEnum("google.assistant.embedded.v1alpha1.ConverseResult_MicrophoneMode", ConverseResult_MicrophoneMode_name, ConverseResult_MicrophoneMode_value) + proto.RegisterEnum("google.assistant.embedded.v1alpha1.ConverseResponse_EventType", ConverseResponse_EventType_name, ConverseResponse_EventType_value) +} + +// Reference imports to suppress errors if they are not otherwise used. +var _ context.Context +var _ grpc.ClientConn + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the grpc package it is being compiled against. +const _ = grpc.SupportPackageIsVersion4 + +// EmbeddedAssistantClient is the client API for EmbeddedAssistant service. +// +// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://godoc.org/google.golang.org/grpc#ClientConn.NewStream. +type EmbeddedAssistantClient interface { + // Initiates or continues a conversation with the embedded assistant service. + // Each call performs one round-trip, sending an audio request to the service + // and receiving the audio response. Uses bidirectional streaming to receive + // results, such as the `END_OF_UTTERANCE` event, while sending audio. + // + // A conversation is one or more gRPC connections, each consisting of several + // streamed requests and responses. + // For example, the user says *Add to my shopping list* and the assistant + // responds *What do you want to add?*. The sequence of streamed requests and + // responses in the first gRPC message could be: + // + // * ConverseRequest.config + // * ConverseRequest.audio_in + // * ConverseRequest.audio_in + // * ConverseRequest.audio_in + // * ConverseRequest.audio_in + // * ConverseResponse.event_type.END_OF_UTTERANCE + // * ConverseResponse.result.microphone_mode.DIALOG_FOLLOW_ON + // * ConverseResponse.audio_out + // * ConverseResponse.audio_out + // * ConverseResponse.audio_out + // + // The user then says *bagels* and the assistant responds + // *OK, I've added bagels to your shopping list*. This is sent as another gRPC + // connection call to the `Converse` method, again with streamed requests and + // responses, such as: + // + // * ConverseRequest.config + // * ConverseRequest.audio_in + // * ConverseRequest.audio_in + // * ConverseRequest.audio_in + // * ConverseResponse.event_type.END_OF_UTTERANCE + // * ConverseResponse.result.microphone_mode.CLOSE_MICROPHONE + // * ConverseResponse.audio_out + // * ConverseResponse.audio_out + // * ConverseResponse.audio_out + // * ConverseResponse.audio_out + // + // Although the precise order of responses is not guaranteed, sequential + // ConverseResponse.audio_out messages will always contain sequential portions + // of audio. + Converse(ctx context.Context, opts ...grpc.CallOption) (EmbeddedAssistant_ConverseClient, error) +} + +type embeddedAssistantClient struct { + cc *grpc.ClientConn +} + +func NewEmbeddedAssistantClient(cc *grpc.ClientConn) EmbeddedAssistantClient { + return &embeddedAssistantClient{cc} +} + +func (c *embeddedAssistantClient) Converse(ctx context.Context, opts ...grpc.CallOption) (EmbeddedAssistant_ConverseClient, error) { + stream, err := c.cc.NewStream(ctx, &_EmbeddedAssistant_serviceDesc.Streams[0], "/google.assistant.embedded.v1alpha1.EmbeddedAssistant/Converse", opts...) + if err != nil { + return nil, err + } + x := &embeddedAssistantConverseClient{stream} + return x, nil +} + +type EmbeddedAssistant_ConverseClient interface { + Send(*ConverseRequest) error + Recv() (*ConverseResponse, error) + grpc.ClientStream +} + +type embeddedAssistantConverseClient struct { + grpc.ClientStream +} + +func (x *embeddedAssistantConverseClient) Send(m *ConverseRequest) error { + return x.ClientStream.SendMsg(m) +} + +func (x *embeddedAssistantConverseClient) Recv() (*ConverseResponse, error) { + m := new(ConverseResponse) + if err := x.ClientStream.RecvMsg(m); err != nil { + return nil, err + } + return m, nil +} + +// EmbeddedAssistantServer is the server API for EmbeddedAssistant service. +type EmbeddedAssistantServer interface { + // Initiates or continues a conversation with the embedded assistant service. + // Each call performs one round-trip, sending an audio request to the service + // and receiving the audio response. Uses bidirectional streaming to receive + // results, such as the `END_OF_UTTERANCE` event, while sending audio. + // + // A conversation is one or more gRPC connections, each consisting of several + // streamed requests and responses. + // For example, the user says *Add to my shopping list* and the assistant + // responds *What do you want to add?*. The sequence of streamed requests and + // responses in the first gRPC message could be: + // + // * ConverseRequest.config + // * ConverseRequest.audio_in + // * ConverseRequest.audio_in + // * ConverseRequest.audio_in + // * ConverseRequest.audio_in + // * ConverseResponse.event_type.END_OF_UTTERANCE + // * ConverseResponse.result.microphone_mode.DIALOG_FOLLOW_ON + // * ConverseResponse.audio_out + // * ConverseResponse.audio_out + // * ConverseResponse.audio_out + // + // The user then says *bagels* and the assistant responds + // *OK, I've added bagels to your shopping list*. This is sent as another gRPC + // connection call to the `Converse` method, again with streamed requests and + // responses, such as: + // + // * ConverseRequest.config + // * ConverseRequest.audio_in + // * ConverseRequest.audio_in + // * ConverseRequest.audio_in + // * ConverseResponse.event_type.END_OF_UTTERANCE + // * ConverseResponse.result.microphone_mode.CLOSE_MICROPHONE + // * ConverseResponse.audio_out + // * ConverseResponse.audio_out + // * ConverseResponse.audio_out + // * ConverseResponse.audio_out + // + // Although the precise order of responses is not guaranteed, sequential + // ConverseResponse.audio_out messages will always contain sequential portions + // of audio. + Converse(EmbeddedAssistant_ConverseServer) error +} + +func RegisterEmbeddedAssistantServer(s *grpc.Server, srv EmbeddedAssistantServer) { + s.RegisterService(&_EmbeddedAssistant_serviceDesc, srv) +} + +func _EmbeddedAssistant_Converse_Handler(srv interface{}, stream grpc.ServerStream) error { + return srv.(EmbeddedAssistantServer).Converse(&embeddedAssistantConverseServer{stream}) +} + +type EmbeddedAssistant_ConverseServer interface { + Send(*ConverseResponse) error + Recv() (*ConverseRequest, error) + grpc.ServerStream +} + +type embeddedAssistantConverseServer struct { + grpc.ServerStream +} + +func (x *embeddedAssistantConverseServer) Send(m *ConverseResponse) error { + return x.ServerStream.SendMsg(m) +} + +func (x *embeddedAssistantConverseServer) Recv() (*ConverseRequest, error) { + m := new(ConverseRequest) + if err := x.ServerStream.RecvMsg(m); err != nil { + return nil, err + } + return m, nil +} + +var _EmbeddedAssistant_serviceDesc = grpc.ServiceDesc{ + ServiceName: "google.assistant.embedded.v1alpha1.EmbeddedAssistant", + HandlerType: (*EmbeddedAssistantServer)(nil), + Methods: []grpc.MethodDesc{}, + Streams: []grpc.StreamDesc{ + { + StreamName: "Converse", + Handler: _EmbeddedAssistant_Converse_Handler, + ServerStreams: true, + ClientStreams: true, + }, + }, + Metadata: "google/assistant/embedded/v1alpha1/embedded_assistant.proto", +} + +func init() { + proto.RegisterFile("google/assistant/embedded/v1alpha1/embedded_assistant.proto", fileDescriptor_embedded_assistant_ed9e8ba6964a09be) +} + +var fileDescriptor_embedded_assistant_ed9e8ba6964a09be = []byte{ + // 892 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xa4, 0x56, 0x51, 0x73, 0xdb, 0x44, + 0x10, 0xb6, 0xec, 0xa6, 0xb5, 0xb7, 0x89, 0x2c, 0x5f, 0x33, 0x90, 0x49, 0x61, 0x60, 0xf4, 0xc0, + 0x94, 0x02, 0x72, 0xe3, 0x30, 0x3c, 0x10, 0xe8, 0x8c, 0x63, 0x2b, 0xb1, 0xc1, 0x96, 0x3c, 0x67, + 0xa7, 0xa5, 0x0c, 0xcc, 0xcd, 0x55, 0x3e, 0x1c, 0x81, 0x7d, 0x27, 0xa4, 0x73, 0xa6, 0xe1, 0x07, + 0xf0, 0xd8, 0xe1, 0x95, 0x67, 0x7e, 0x11, 0xff, 0x88, 0xd1, 0x9d, 0xa4, 0xd8, 0x90, 0x42, 0x1c, + 0x1e, 0x6f, 0xf7, 0xbe, 0x4f, 0xbb, 0xdf, 0x7e, 0xb7, 0x23, 0x38, 0x9a, 0x09, 0x31, 0x9b, 0xb3, + 0x26, 0x4d, 0x92, 0x30, 0x91, 0x94, 0xcb, 0x26, 0x5b, 0xbc, 0x64, 0xd3, 0x29, 0x9b, 0x36, 0x2f, + 0x0e, 0xe8, 0x3c, 0x3a, 0xa7, 0x07, 0x45, 0x84, 0x14, 0x97, 0x9c, 0x28, 0x16, 0x52, 0x20, 0x5b, + 0x83, 0x9d, 0xab, 0x78, 0x7e, 0xd5, 0xc9, 0xc1, 0xfb, 0xef, 0xe4, 0x1f, 0x88, 0xc2, 0x26, 0xe5, + 0x5c, 0x48, 0x2a, 0x43, 0xc1, 0x13, 0xcd, 0xb0, 0xff, 0x76, 0x96, 0x8d, 0xa3, 0xa0, 0x99, 0x48, + 0x2a, 0x97, 0x59, 0xc2, 0xfe, 0xa3, 0x0c, 0x66, 0x47, 0xf0, 0x0b, 0x16, 0x27, 0xac, 0x23, 0xf8, + 0x0f, 0xe1, 0x0c, 0xbd, 0x80, 0x3a, 0x5d, 0x4e, 0x43, 0x41, 0x42, 0x4e, 0x02, 0x15, 0xda, 0x33, + 0xde, 0x37, 0x1e, 0xdd, 0x6f, 0x1d, 0x38, 0xff, 0x5d, 0x87, 0xd3, 0x4e, 0xa1, 0x7d, 0xae, 0xb9, + 0xf0, 0x0e, 0x5d, 0x3d, 0xa2, 0xef, 0xc0, 0xd2, 0xd4, 0x62, 0x29, 0x73, 0xee, 0xb2, 0xe2, 0x6e, + 0xdd, 0x98, 0xdb, 0x5f, 0xca, 0x8c, 0xdc, 0xa4, 0x6b, 0x67, 0xf4, 0x0d, 0x98, 0x41, 0xd6, 0x0a, + 0x49, 0x9b, 0x64, 0x7b, 0x95, 0x9b, 0xd7, 0x9d, 0x8b, 0x30, 0x4e, 0x81, 0x78, 0x27, 0x58, 0x3d, + 0xda, 0x7f, 0x1a, 0xb0, 0xb3, 0xd6, 0x18, 0x7a, 0x06, 0x55, 0xc6, 0x03, 0x31, 0x0d, 0xb9, 0x56, + 0xc7, 0x6c, 0x7d, 0xbe, 0xb1, 0x3a, 0x8e, 0x9b, 0x31, 0xe0, 0x82, 0x0b, 0x3d, 0x86, 0x46, 0x42, + 0x17, 0xd1, 0x9c, 0x91, 0x98, 0x4a, 0x46, 0xce, 0x59, 0x2c, 0x7f, 0x51, 0x12, 0x6d, 0xe1, 0xba, + 0x4e, 0x60, 0x2a, 0x59, 0x2f, 0x0d, 0xdb, 0x5f, 0x40, 0x35, 0x67, 0x40, 0x7b, 0xb0, 0xeb, 0x7a, + 0x1d, 0xbf, 0xdb, 0xf7, 0x4e, 0xc9, 0x99, 0x37, 0x1e, 0xb9, 0x9d, 0xfe, 0x49, 0xdf, 0xed, 0x5a, + 0x25, 0xb4, 0x0d, 0xd5, 0x41, 0xdf, 0x73, 0xdb, 0xf8, 0xe0, 0x33, 0xcb, 0x40, 0x55, 0xb8, 0x73, + 0x32, 0x68, 0x77, 0xac, 0xb2, 0xfd, 0x5b, 0x19, 0xcc, 0x75, 0x41, 0xd1, 0xf3, 0x7f, 0x34, 0x75, + 0xb4, 0xf9, 0x58, 0xfe, 0x67, 0x57, 0xe8, 0x23, 0x68, 0x5c, 0x88, 0xf9, 0x72, 0xc1, 0x48, 0xc4, + 0xe2, 0x80, 0x71, 0x49, 0x67, 0x7a, 0x90, 0x5b, 0xd8, 0xd2, 0x89, 0x51, 0x11, 0xb7, 0x07, 0xb7, + 0x90, 0xe0, 0x1e, 0x54, 0x86, 0xa3, 0x43, 0xab, 0x8c, 0xea, 0x70, 0xdf, 0x1f, 0x9d, 0x8d, 0x49, + 0xdf, 0x23, 0xfe, 0xe9, 0xa9, 0x55, 0xb1, 0x9f, 0xc2, 0xce, 0x9a, 0x0d, 0xd0, 0x27, 0x80, 0x32, + 0x23, 0xa8, 0xd7, 0x94, 0xb9, 0x2a, 0x95, 0x66, 0x1b, 0x37, 0x56, 0x33, 0xda, 0x26, 0x1f, 0x42, + 0x35, 0xd7, 0x02, 0xbd, 0x0b, 0xa0, 0xad, 0x3e, 0xa5, 0x92, 0x66, 0x90, 0x9a, 0x8a, 0x74, 0xa9, + 0xa4, 0xf6, 0xef, 0x95, 0xab, 0x77, 0x87, 0x59, 0xb2, 0x9c, 0x4b, 0xe4, 0xc0, 0x83, 0x24, 0x12, + 0x3f, 0x31, 0x4e, 0x62, 0xf6, 0xf3, 0x92, 0x25, 0x92, 0x48, 0xf6, 0x4a, 0x2a, 0x68, 0x0d, 0x37, + 0x74, 0x0a, 0xeb, 0xcc, 0x84, 0xbd, 0x92, 0xe8, 0x09, 0xec, 0x16, 0xf7, 0x93, 0x48, 0xf0, 0x84, + 0x69, 0x40, 0x59, 0x01, 0x50, 0x0e, 0xd0, 0x29, 0x85, 0xb8, 0xbe, 0x9d, 0xca, 0x1b, 0xda, 0x41, + 0x3f, 0x42, 0x7d, 0x11, 0x06, 0xb1, 0x88, 0xce, 0x05, 0x67, 0x64, 0x21, 0xa6, 0x6c, 0xef, 0x8e, + 0x72, 0x45, 0x7b, 0x93, 0x07, 0xa5, 0xbb, 0x73, 0x86, 0x05, 0xd3, 0x50, 0x4c, 0x19, 0x36, 0x17, + 0x6b, 0xe7, 0xeb, 0xa7, 0xbe, 0xf5, 0x86, 0xa9, 0x7f, 0x0f, 0xe6, 0x3a, 0x1d, 0x7a, 0x0f, 0x1e, + 0x0e, 0xfb, 0x1d, 0xec, 0x8f, 0x7a, 0xbe, 0xe7, 0x92, 0xa1, 0xdf, 0x75, 0xff, 0x66, 0x81, 0x5d, + 0xb0, 0x3a, 0x03, 0x7f, 0xec, 0x92, 0xab, 0x6b, 0x96, 0x91, 0x46, 0xbb, 0xfd, 0xf6, 0xc0, 0x3f, + 0x25, 0x27, 0xfe, 0x60, 0xe0, 0x3f, 0x27, 0xbe, 0x97, 0xbe, 0x0c, 0x03, 0xea, 0x57, 0xd5, 0x2b, + 0xc1, 0xd1, 0x00, 0xee, 0xae, 0xed, 0xc2, 0xd6, 0x26, 0x12, 0xe8, 0x87, 0xd1, 0x2b, 0xe1, 0x8c, + 0x03, 0x3d, 0x84, 0x6a, 0xbe, 0x62, 0xd5, 0xb8, 0xb6, 0x7b, 0x25, 0x7c, 0x2f, 0x5b, 0x95, 0xc7, + 0x08, 0xac, 0x62, 0x8d, 0x65, 0x4e, 0xb0, 0x5f, 0x57, 0xc0, 0x5a, 0x11, 0x54, 0x8d, 0x14, 0x3d, + 0x86, 0x2d, 0x16, 0xc7, 0x22, 0xce, 0x4a, 0x42, 0x79, 0x49, 0x71, 0x14, 0x38, 0x63, 0xb5, 0xe4, + 0x7b, 0x25, 0xac, 0xaf, 0x20, 0x02, 0xc0, 0x2e, 0x18, 0x97, 0x44, 0x5e, 0x46, 0x4c, 0x7d, 0xd3, + 0x6c, 0x3d, 0xdd, 0x70, 0x8c, 0xea, 0xab, 0x8e, 0x9b, 0xd2, 0x4c, 0x2e, 0x23, 0xd6, 0x2b, 0xe1, + 0x1a, 0xcb, 0x0f, 0xe8, 0x6b, 0xa8, 0x15, 0xab, 0x3d, 0xdb, 0xbb, 0x1f, 0x6f, 0xb2, 0x3c, 0x7a, + 0x25, 0x5c, 0xcd, 0xf7, 0x79, 0xaa, 0x76, 0xac, 0x6c, 0xa3, 0x2c, 0xb0, 0xa1, 0xda, 0xda, 0x70, + 0xa9, 0xda, 0x9a, 0xc3, 0xfe, 0x12, 0x6a, 0x45, 0xd1, 0x68, 0x1f, 0xde, 0x72, 0x9f, 0xb9, 0xde, + 0x84, 0x4c, 0x5e, 0x8c, 0xae, 0x31, 0x89, 0xeb, 0x75, 0x89, 0x7f, 0x42, 0xce, 0x26, 0x13, 0x17, + 0xb7, 0xbd, 0x8e, 0x6b, 0x19, 0xc7, 0x0f, 0xa0, 0xb1, 0x32, 0x0f, 0xad, 0x42, 0xeb, 0xb5, 0x01, + 0x0d, 0x37, 0x2b, 0xa1, 0x9d, 0x17, 0x85, 0x2e, 0xa1, 0x9a, 0x57, 0x81, 0x0e, 0x37, 0xab, 0x59, + 0xcd, 0x79, 0xff, 0xd3, 0xdb, 0x8c, 0xe4, 0x91, 0xf1, 0xc4, 0x38, 0xfe, 0xd5, 0x80, 0x0f, 0x02, + 0xb1, 0xb8, 0x01, 0xfe, 0xd8, 0x2c, 0x0a, 0x1e, 0xa5, 0xff, 0x00, 0x23, 0xe3, 0xdb, 0xaf, 0x32, + 0xd4, 0x4c, 0xcc, 0x29, 0x9f, 0x39, 0x22, 0x9e, 0x35, 0x67, 0x8c, 0xab, 0x3f, 0x84, 0xa6, 0x4e, + 0xd1, 0x28, 0x4c, 0xfe, 0xed, 0xe7, 0xe5, 0x28, 0x8f, 0xbc, 0xbc, 0xab, 0x60, 0x87, 0x7f, 0x05, + 0x00, 0x00, 0xff, 0xff, 0xec, 0x7a, 0x68, 0xfa, 0xf2, 0x08, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/assistant/embedded/v1alpha2/embedded_assistant.pb.go b/vendor/google.golang.org/genproto/googleapis/assistant/embedded/v1alpha2/embedded_assistant.pb.go new file mode 100644 index 0000000..2c376cf --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/assistant/embedded/v1alpha2/embedded_assistant.pb.go @@ -0,0 +1,1775 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/assistant/embedded/v1alpha2/embedded_assistant.proto + +package embedded // import "google.golang.org/genproto/googleapis/assistant/embedded/v1alpha2" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "google.golang.org/genproto/googleapis/api/annotations" +import latlng "google.golang.org/genproto/googleapis/type/latlng" + +import ( + context "golang.org/x/net/context" + grpc "google.golang.org/grpc" +) + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Indicates the type of event. +type AssistResponse_EventType int32 + +const ( + // No event specified. + AssistResponse_EVENT_TYPE_UNSPECIFIED AssistResponse_EventType = 0 + // This event indicates that the server has detected the end of the user's + // speech utterance and expects no additional speech. Therefore, the server + // will not process additional audio (although it may subsequently return + // additional results). The client should stop sending additional audio + // data, half-close the gRPC connection, and wait for any additional results + // until the server closes the gRPC connection. + AssistResponse_END_OF_UTTERANCE AssistResponse_EventType = 1 +) + +var AssistResponse_EventType_name = map[int32]string{ + 0: "EVENT_TYPE_UNSPECIFIED", + 1: "END_OF_UTTERANCE", +} +var AssistResponse_EventType_value = map[string]int32{ + "EVENT_TYPE_UNSPECIFIED": 0, + "END_OF_UTTERANCE": 1, +} + +func (x AssistResponse_EventType) String() string { + return proto.EnumName(AssistResponse_EventType_name, int32(x)) +} +func (AssistResponse_EventType) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_embedded_assistant_0b559f064bf1f88e, []int{1, 0} +} + +// Audio encoding of the data sent in the audio message. +// Audio must be one-channel (mono). +type AudioInConfig_Encoding int32 + +const ( + // Not specified. Will return result [google.rpc.Code.INVALID_ARGUMENT][]. + AudioInConfig_ENCODING_UNSPECIFIED AudioInConfig_Encoding = 0 + // Uncompressed 16-bit signed little-endian samples (Linear PCM). + // This encoding includes no header, only the raw audio bytes. + AudioInConfig_LINEAR16 AudioInConfig_Encoding = 1 + // [`FLAC`](https://xiph.org/flac/documentation.html) (Free Lossless Audio + // Codec) is the recommended encoding because it is + // lossless--therefore recognition is not compromised--and + // requires only about half the bandwidth of `LINEAR16`. This encoding + // includes the `FLAC` stream header followed by audio data. It supports + // 16-bit and 24-bit samples, however, not all fields in `STREAMINFO` are + // supported. + AudioInConfig_FLAC AudioInConfig_Encoding = 2 +) + +var AudioInConfig_Encoding_name = map[int32]string{ + 0: "ENCODING_UNSPECIFIED", + 1: "LINEAR16", + 2: "FLAC", +} +var AudioInConfig_Encoding_value = map[string]int32{ + "ENCODING_UNSPECIFIED": 0, + "LINEAR16": 1, + "FLAC": 2, +} + +func (x AudioInConfig_Encoding) String() string { + return proto.EnumName(AudioInConfig_Encoding_name, int32(x)) +} +func (AudioInConfig_Encoding) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_embedded_assistant_0b559f064bf1f88e, []int{4, 0} +} + +// Audio encoding of the data returned in the audio message. All encodings are +// raw audio bytes with no header, except as indicated below. +type AudioOutConfig_Encoding int32 + +const ( + // Not specified. Will return result [google.rpc.Code.INVALID_ARGUMENT][]. + AudioOutConfig_ENCODING_UNSPECIFIED AudioOutConfig_Encoding = 0 + // Uncompressed 16-bit signed little-endian samples (Linear PCM). + AudioOutConfig_LINEAR16 AudioOutConfig_Encoding = 1 + // MP3 audio encoding. The sample rate is encoded in the payload. + AudioOutConfig_MP3 AudioOutConfig_Encoding = 2 + // Opus-encoded audio wrapped in an ogg container. The result will be a + // file which can be played natively on Android and in some browsers (such + // as Chrome). The quality of the encoding is considerably higher than MP3 + // while using the same bitrate. The sample rate is encoded in the payload. + AudioOutConfig_OPUS_IN_OGG AudioOutConfig_Encoding = 3 +) + +var AudioOutConfig_Encoding_name = map[int32]string{ + 0: "ENCODING_UNSPECIFIED", + 1: "LINEAR16", + 2: "MP3", + 3: "OPUS_IN_OGG", +} +var AudioOutConfig_Encoding_value = map[string]int32{ + "ENCODING_UNSPECIFIED": 0, + "LINEAR16": 1, + "MP3": 2, + "OPUS_IN_OGG": 3, +} + +func (x AudioOutConfig_Encoding) String() string { + return proto.EnumName(AudioOutConfig_Encoding_name, int32(x)) +} +func (AudioOutConfig_Encoding) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_embedded_assistant_0b559f064bf1f88e, []int{5, 0} +} + +// Possible modes for visual screen-output on the device. +type ScreenOutConfig_ScreenMode int32 + +const ( + // No video mode specified. + // The Assistant may respond as if in `OFF` mode. + ScreenOutConfig_SCREEN_MODE_UNSPECIFIED ScreenOutConfig_ScreenMode = 0 + // Screen is off (or has brightness or other settings set so low it is + // not visible). The Assistant will typically not return a screen response + // in this mode. + ScreenOutConfig_OFF ScreenOutConfig_ScreenMode = 1 + // The Assistant will typically return a partial-screen response in this + // mode. + ScreenOutConfig_PLAYING ScreenOutConfig_ScreenMode = 3 +) + +var ScreenOutConfig_ScreenMode_name = map[int32]string{ + 0: "SCREEN_MODE_UNSPECIFIED", + 1: "OFF", + 3: "PLAYING", +} +var ScreenOutConfig_ScreenMode_value = map[string]int32{ + "SCREEN_MODE_UNSPECIFIED": 0, + "OFF": 1, + "PLAYING": 3, +} + +func (x ScreenOutConfig_ScreenMode) String() string { + return proto.EnumName(ScreenOutConfig_ScreenMode_name, int32(x)) +} +func (ScreenOutConfig_ScreenMode) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_embedded_assistant_0b559f064bf1f88e, []int{6, 0} +} + +// Possible formats of the screen data. +type ScreenOut_Format int32 + +const ( + // No format specified. + ScreenOut_FORMAT_UNSPECIFIED ScreenOut_Format = 0 + // Data will contain a fully-formed HTML5 layout encoded in UTF-8, e.g. + // `
...
`. It is intended to be rendered + // along with the audio response. Note that HTML5 doctype should be included + // in the actual HTML data. + ScreenOut_HTML ScreenOut_Format = 1 +) + +var ScreenOut_Format_name = map[int32]string{ + 0: "FORMAT_UNSPECIFIED", + 1: "HTML", +} +var ScreenOut_Format_value = map[string]int32{ + "FORMAT_UNSPECIFIED": 0, + "HTML": 1, +} + +func (x ScreenOut_Format) String() string { + return proto.EnumName(ScreenOut_Format_name, int32(x)) +} +func (ScreenOut_Format) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_embedded_assistant_0b559f064bf1f88e, []int{10, 0} +} + +// Possible states of the microphone after a `Assist` RPC completes. +type DialogStateOut_MicrophoneMode int32 + +const ( + // No mode specified. + DialogStateOut_MICROPHONE_MODE_UNSPECIFIED DialogStateOut_MicrophoneMode = 0 + // The service is not expecting a follow-on question from the user. + // The microphone should remain off until the user re-activates it. + DialogStateOut_CLOSE_MICROPHONE DialogStateOut_MicrophoneMode = 1 + // The service is expecting a follow-on question from the user. The + // microphone should be re-opened when the `AudioOut` playback completes + // (by starting a new `Assist` RPC call to send the new audio). + DialogStateOut_DIALOG_FOLLOW_ON DialogStateOut_MicrophoneMode = 2 +) + +var DialogStateOut_MicrophoneMode_name = map[int32]string{ + 0: "MICROPHONE_MODE_UNSPECIFIED", + 1: "CLOSE_MICROPHONE", + 2: "DIALOG_FOLLOW_ON", +} +var DialogStateOut_MicrophoneMode_value = map[string]int32{ + "MICROPHONE_MODE_UNSPECIFIED": 0, + "CLOSE_MICROPHONE": 1, + "DIALOG_FOLLOW_ON": 2, +} + +func (x DialogStateOut_MicrophoneMode) String() string { + return proto.EnumName(DialogStateOut_MicrophoneMode_name, int32(x)) +} +func (DialogStateOut_MicrophoneMode) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_embedded_assistant_0b559f064bf1f88e, []int{13, 0} +} + +// The top-level message sent by the client. Clients must send at least two, and +// typically numerous `AssistRequest` messages. The first message must +// contain a `config` message and must not contain `audio_in` data. All +// subsequent messages must contain `audio_in` data and must not contain a +// `config` message. +type AssistRequest struct { + // Exactly one of these fields must be specified in each `AssistRequest`. + // + // Types that are valid to be assigned to Type: + // *AssistRequest_Config + // *AssistRequest_AudioIn + Type isAssistRequest_Type `protobuf_oneof:"type"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *AssistRequest) Reset() { *m = AssistRequest{} } +func (m *AssistRequest) String() string { return proto.CompactTextString(m) } +func (*AssistRequest) ProtoMessage() {} +func (*AssistRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_embedded_assistant_0b559f064bf1f88e, []int{0} +} +func (m *AssistRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_AssistRequest.Unmarshal(m, b) +} +func (m *AssistRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_AssistRequest.Marshal(b, m, deterministic) +} +func (dst *AssistRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_AssistRequest.Merge(dst, src) +} +func (m *AssistRequest) XXX_Size() int { + return xxx_messageInfo_AssistRequest.Size(m) +} +func (m *AssistRequest) XXX_DiscardUnknown() { + xxx_messageInfo_AssistRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_AssistRequest proto.InternalMessageInfo + +type isAssistRequest_Type interface { + isAssistRequest_Type() +} + +type AssistRequest_Config struct { + Config *AssistConfig `protobuf:"bytes,1,opt,name=config,proto3,oneof"` +} + +type AssistRequest_AudioIn struct { + AudioIn []byte `protobuf:"bytes,2,opt,name=audio_in,json=audioIn,proto3,oneof"` +} + +func (*AssistRequest_Config) isAssistRequest_Type() {} + +func (*AssistRequest_AudioIn) isAssistRequest_Type() {} + +func (m *AssistRequest) GetType() isAssistRequest_Type { + if m != nil { + return m.Type + } + return nil +} + +func (m *AssistRequest) GetConfig() *AssistConfig { + if x, ok := m.GetType().(*AssistRequest_Config); ok { + return x.Config + } + return nil +} + +func (m *AssistRequest) GetAudioIn() []byte { + if x, ok := m.GetType().(*AssistRequest_AudioIn); ok { + return x.AudioIn + } + return nil +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*AssistRequest) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _AssistRequest_OneofMarshaler, _AssistRequest_OneofUnmarshaler, _AssistRequest_OneofSizer, []interface{}{ + (*AssistRequest_Config)(nil), + (*AssistRequest_AudioIn)(nil), + } +} + +func _AssistRequest_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*AssistRequest) + // type + switch x := m.Type.(type) { + case *AssistRequest_Config: + b.EncodeVarint(1<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.Config); err != nil { + return err + } + case *AssistRequest_AudioIn: + b.EncodeVarint(2<<3 | proto.WireBytes) + b.EncodeRawBytes(x.AudioIn) + case nil: + default: + return fmt.Errorf("AssistRequest.Type has unexpected type %T", x) + } + return nil +} + +func _AssistRequest_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*AssistRequest) + switch tag { + case 1: // type.config + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(AssistConfig) + err := b.DecodeMessage(msg) + m.Type = &AssistRequest_Config{msg} + return true, err + case 2: // type.audio_in + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeRawBytes(true) + m.Type = &AssistRequest_AudioIn{x} + return true, err + default: + return false, nil + } +} + +func _AssistRequest_OneofSizer(msg proto.Message) (n int) { + m := msg.(*AssistRequest) + // type + switch x := m.Type.(type) { + case *AssistRequest_Config: + s := proto.Size(x.Config) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *AssistRequest_AudioIn: + n += 1 // tag and wire + n += proto.SizeVarint(uint64(len(x.AudioIn))) + n += len(x.AudioIn) + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +// The top-level message received by the client. A series of one or more +// `AssistResponse` messages are streamed back to the client. +type AssistResponse struct { + // *Output-only* Indicates the type of event. + EventType AssistResponse_EventType `protobuf:"varint,1,opt,name=event_type,json=eventType,proto3,enum=google.assistant.embedded.v1alpha2.AssistResponse_EventType" json:"event_type,omitempty"` + // *Output-only* The audio containing the Assistant's response to the query. + AudioOut *AudioOut `protobuf:"bytes,3,opt,name=audio_out,json=audioOut,proto3" json:"audio_out,omitempty"` + // *Output-only* Contains the Assistant's visual response to the query. + ScreenOut *ScreenOut `protobuf:"bytes,4,opt,name=screen_out,json=screenOut,proto3" json:"screen_out,omitempty"` + // *Output-only* Contains the action triggered by the query with the + // appropriate payloads and semantic parsing. + DeviceAction *DeviceAction `protobuf:"bytes,6,opt,name=device_action,json=deviceAction,proto3" json:"device_action,omitempty"` + // *Output-only* This repeated list contains zero or more speech recognition + // results that correspond to consecutive portions of the audio currently + // being processed, starting with the portion corresponding to the earliest + // audio (and most stable portion) to the portion corresponding to the most + // recent audio. The strings can be concatenated to view the full + // in-progress response. When the speech recognition completes, this list + // will contain one item with `stability` of `1.0`. + SpeechResults []*SpeechRecognitionResult `protobuf:"bytes,2,rep,name=speech_results,json=speechResults,proto3" json:"speech_results,omitempty"` + // *Output-only* Contains output related to the user's query. + DialogStateOut *DialogStateOut `protobuf:"bytes,5,opt,name=dialog_state_out,json=dialogStateOut,proto3" json:"dialog_state_out,omitempty"` + // *Output-only* Debugging info for developer. Only returned if request set + // `return_debug_info` to true. + DebugInfo *DebugInfo `protobuf:"bytes,8,opt,name=debug_info,json=debugInfo,proto3" json:"debug_info,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *AssistResponse) Reset() { *m = AssistResponse{} } +func (m *AssistResponse) String() string { return proto.CompactTextString(m) } +func (*AssistResponse) ProtoMessage() {} +func (*AssistResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_embedded_assistant_0b559f064bf1f88e, []int{1} +} +func (m *AssistResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_AssistResponse.Unmarshal(m, b) +} +func (m *AssistResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_AssistResponse.Marshal(b, m, deterministic) +} +func (dst *AssistResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_AssistResponse.Merge(dst, src) +} +func (m *AssistResponse) XXX_Size() int { + return xxx_messageInfo_AssistResponse.Size(m) +} +func (m *AssistResponse) XXX_DiscardUnknown() { + xxx_messageInfo_AssistResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_AssistResponse proto.InternalMessageInfo + +func (m *AssistResponse) GetEventType() AssistResponse_EventType { + if m != nil { + return m.EventType + } + return AssistResponse_EVENT_TYPE_UNSPECIFIED +} + +func (m *AssistResponse) GetAudioOut() *AudioOut { + if m != nil { + return m.AudioOut + } + return nil +} + +func (m *AssistResponse) GetScreenOut() *ScreenOut { + if m != nil { + return m.ScreenOut + } + return nil +} + +func (m *AssistResponse) GetDeviceAction() *DeviceAction { + if m != nil { + return m.DeviceAction + } + return nil +} + +func (m *AssistResponse) GetSpeechResults() []*SpeechRecognitionResult { + if m != nil { + return m.SpeechResults + } + return nil +} + +func (m *AssistResponse) GetDialogStateOut() *DialogStateOut { + if m != nil { + return m.DialogStateOut + } + return nil +} + +func (m *AssistResponse) GetDebugInfo() *DebugInfo { + if m != nil { + return m.DebugInfo + } + return nil +} + +// Debug info for developer. Only returned if request set `return_debug_info` +// to true. +type DebugInfo struct { + // The original JSON response from an Action-on-Google agent to Google server. + // See + // https://developers.google.com/actions/reference/rest/Shared.Types/AppResponse. + // It will only be populated if the request maker owns the AoG project and the + // AoG project is in preview mode. + AogAgentToAssistantJson string `protobuf:"bytes,1,opt,name=aog_agent_to_assistant_json,json=aogAgentToAssistantJson,proto3" json:"aog_agent_to_assistant_json,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *DebugInfo) Reset() { *m = DebugInfo{} } +func (m *DebugInfo) String() string { return proto.CompactTextString(m) } +func (*DebugInfo) ProtoMessage() {} +func (*DebugInfo) Descriptor() ([]byte, []int) { + return fileDescriptor_embedded_assistant_0b559f064bf1f88e, []int{2} +} +func (m *DebugInfo) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_DebugInfo.Unmarshal(m, b) +} +func (m *DebugInfo) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_DebugInfo.Marshal(b, m, deterministic) +} +func (dst *DebugInfo) XXX_Merge(src proto.Message) { + xxx_messageInfo_DebugInfo.Merge(dst, src) +} +func (m *DebugInfo) XXX_Size() int { + return xxx_messageInfo_DebugInfo.Size(m) +} +func (m *DebugInfo) XXX_DiscardUnknown() { + xxx_messageInfo_DebugInfo.DiscardUnknown(m) +} + +var xxx_messageInfo_DebugInfo proto.InternalMessageInfo + +func (m *DebugInfo) GetAogAgentToAssistantJson() string { + if m != nil { + return m.AogAgentToAssistantJson + } + return "" +} + +// Specifies how to process the `AssistRequest` messages. +type AssistConfig struct { + // Types that are valid to be assigned to Type: + // *AssistConfig_AudioInConfig + // *AssistConfig_TextQuery + Type isAssistConfig_Type `protobuf_oneof:"type"` + // *Required* Specifies how to format the audio that will be returned. + AudioOutConfig *AudioOutConfig `protobuf:"bytes,2,opt,name=audio_out_config,json=audioOutConfig,proto3" json:"audio_out_config,omitempty"` + // *Optional* Specifies the desired format to use when server returns a + // visual screen response. + ScreenOutConfig *ScreenOutConfig `protobuf:"bytes,8,opt,name=screen_out_config,json=screenOutConfig,proto3" json:"screen_out_config,omitempty"` + // *Required* Represents the current dialog state. + DialogStateIn *DialogStateIn `protobuf:"bytes,3,opt,name=dialog_state_in,json=dialogStateIn,proto3" json:"dialog_state_in,omitempty"` + // Device configuration that uniquely identifies a specific device. + DeviceConfig *DeviceConfig `protobuf:"bytes,4,opt,name=device_config,json=deviceConfig,proto3" json:"device_config,omitempty"` + // *Optional* Debugging parameters for the whole `Assist` RPC. + DebugConfig *DebugConfig `protobuf:"bytes,5,opt,name=debug_config,json=debugConfig,proto3" json:"debug_config,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *AssistConfig) Reset() { *m = AssistConfig{} } +func (m *AssistConfig) String() string { return proto.CompactTextString(m) } +func (*AssistConfig) ProtoMessage() {} +func (*AssistConfig) Descriptor() ([]byte, []int) { + return fileDescriptor_embedded_assistant_0b559f064bf1f88e, []int{3} +} +func (m *AssistConfig) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_AssistConfig.Unmarshal(m, b) +} +func (m *AssistConfig) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_AssistConfig.Marshal(b, m, deterministic) +} +func (dst *AssistConfig) XXX_Merge(src proto.Message) { + xxx_messageInfo_AssistConfig.Merge(dst, src) +} +func (m *AssistConfig) XXX_Size() int { + return xxx_messageInfo_AssistConfig.Size(m) +} +func (m *AssistConfig) XXX_DiscardUnknown() { + xxx_messageInfo_AssistConfig.DiscardUnknown(m) +} + +var xxx_messageInfo_AssistConfig proto.InternalMessageInfo + +type isAssistConfig_Type interface { + isAssistConfig_Type() +} + +type AssistConfig_AudioInConfig struct { + AudioInConfig *AudioInConfig `protobuf:"bytes,1,opt,name=audio_in_config,json=audioInConfig,proto3,oneof"` +} + +type AssistConfig_TextQuery struct { + TextQuery string `protobuf:"bytes,6,opt,name=text_query,json=textQuery,proto3,oneof"` +} + +func (*AssistConfig_AudioInConfig) isAssistConfig_Type() {} + +func (*AssistConfig_TextQuery) isAssistConfig_Type() {} + +func (m *AssistConfig) GetType() isAssistConfig_Type { + if m != nil { + return m.Type + } + return nil +} + +func (m *AssistConfig) GetAudioInConfig() *AudioInConfig { + if x, ok := m.GetType().(*AssistConfig_AudioInConfig); ok { + return x.AudioInConfig + } + return nil +} + +func (m *AssistConfig) GetTextQuery() string { + if x, ok := m.GetType().(*AssistConfig_TextQuery); ok { + return x.TextQuery + } + return "" +} + +func (m *AssistConfig) GetAudioOutConfig() *AudioOutConfig { + if m != nil { + return m.AudioOutConfig + } + return nil +} + +func (m *AssistConfig) GetScreenOutConfig() *ScreenOutConfig { + if m != nil { + return m.ScreenOutConfig + } + return nil +} + +func (m *AssistConfig) GetDialogStateIn() *DialogStateIn { + if m != nil { + return m.DialogStateIn + } + return nil +} + +func (m *AssistConfig) GetDeviceConfig() *DeviceConfig { + if m != nil { + return m.DeviceConfig + } + return nil +} + +func (m *AssistConfig) GetDebugConfig() *DebugConfig { + if m != nil { + return m.DebugConfig + } + return nil +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*AssistConfig) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _AssistConfig_OneofMarshaler, _AssistConfig_OneofUnmarshaler, _AssistConfig_OneofSizer, []interface{}{ + (*AssistConfig_AudioInConfig)(nil), + (*AssistConfig_TextQuery)(nil), + } +} + +func _AssistConfig_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*AssistConfig) + // type + switch x := m.Type.(type) { + case *AssistConfig_AudioInConfig: + b.EncodeVarint(1<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.AudioInConfig); err != nil { + return err + } + case *AssistConfig_TextQuery: + b.EncodeVarint(6<<3 | proto.WireBytes) + b.EncodeStringBytes(x.TextQuery) + case nil: + default: + return fmt.Errorf("AssistConfig.Type has unexpected type %T", x) + } + return nil +} + +func _AssistConfig_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*AssistConfig) + switch tag { + case 1: // type.audio_in_config + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(AudioInConfig) + err := b.DecodeMessage(msg) + m.Type = &AssistConfig_AudioInConfig{msg} + return true, err + case 6: // type.text_query + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeStringBytes() + m.Type = &AssistConfig_TextQuery{x} + return true, err + default: + return false, nil + } +} + +func _AssistConfig_OneofSizer(msg proto.Message) (n int) { + m := msg.(*AssistConfig) + // type + switch x := m.Type.(type) { + case *AssistConfig_AudioInConfig: + s := proto.Size(x.AudioInConfig) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *AssistConfig_TextQuery: + n += 1 // tag and wire + n += proto.SizeVarint(uint64(len(x.TextQuery))) + n += len(x.TextQuery) + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +// Specifies how to process the `audio_in` data that will be provided in +// subsequent requests. For recommended settings, see the Google Assistant SDK +// [best +// practices](https://developers.google.com/assistant/sdk/guides/service/python/best-practices/audio). +type AudioInConfig struct { + // *Required* Encoding of audio data sent in all `audio_in` messages. + Encoding AudioInConfig_Encoding `protobuf:"varint,1,opt,name=encoding,proto3,enum=google.assistant.embedded.v1alpha2.AudioInConfig_Encoding" json:"encoding,omitempty"` + // *Required* Sample rate (in Hertz) of the audio data sent in all `audio_in` + // messages. Valid values are from 16000-24000, but 16000 is optimal. + // For best results, set the sampling rate of the audio source to 16000 Hz. + // If that's not possible, use the native sample rate of the audio source + // (instead of re-sampling). + SampleRateHertz int32 `protobuf:"varint,2,opt,name=sample_rate_hertz,json=sampleRateHertz,proto3" json:"sample_rate_hertz,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *AudioInConfig) Reset() { *m = AudioInConfig{} } +func (m *AudioInConfig) String() string { return proto.CompactTextString(m) } +func (*AudioInConfig) ProtoMessage() {} +func (*AudioInConfig) Descriptor() ([]byte, []int) { + return fileDescriptor_embedded_assistant_0b559f064bf1f88e, []int{4} +} +func (m *AudioInConfig) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_AudioInConfig.Unmarshal(m, b) +} +func (m *AudioInConfig) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_AudioInConfig.Marshal(b, m, deterministic) +} +func (dst *AudioInConfig) XXX_Merge(src proto.Message) { + xxx_messageInfo_AudioInConfig.Merge(dst, src) +} +func (m *AudioInConfig) XXX_Size() int { + return xxx_messageInfo_AudioInConfig.Size(m) +} +func (m *AudioInConfig) XXX_DiscardUnknown() { + xxx_messageInfo_AudioInConfig.DiscardUnknown(m) +} + +var xxx_messageInfo_AudioInConfig proto.InternalMessageInfo + +func (m *AudioInConfig) GetEncoding() AudioInConfig_Encoding { + if m != nil { + return m.Encoding + } + return AudioInConfig_ENCODING_UNSPECIFIED +} + +func (m *AudioInConfig) GetSampleRateHertz() int32 { + if m != nil { + return m.SampleRateHertz + } + return 0 +} + +// Specifies the desired format for the server to use when it returns +// `audio_out` messages. +type AudioOutConfig struct { + // *Required* The encoding of audio data to be returned in all `audio_out` + // messages. + Encoding AudioOutConfig_Encoding `protobuf:"varint,1,opt,name=encoding,proto3,enum=google.assistant.embedded.v1alpha2.AudioOutConfig_Encoding" json:"encoding,omitempty"` + // *Required* The sample rate in Hertz of the audio data returned in + // `audio_out` messages. Valid values are: 16000-24000. + SampleRateHertz int32 `protobuf:"varint,2,opt,name=sample_rate_hertz,json=sampleRateHertz,proto3" json:"sample_rate_hertz,omitempty"` + // *Required* Current volume setting of the device's audio output. + // Valid values are 1 to 100 (corresponding to 1% to 100%). + VolumePercentage int32 `protobuf:"varint,3,opt,name=volume_percentage,json=volumePercentage,proto3" json:"volume_percentage,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *AudioOutConfig) Reset() { *m = AudioOutConfig{} } +func (m *AudioOutConfig) String() string { return proto.CompactTextString(m) } +func (*AudioOutConfig) ProtoMessage() {} +func (*AudioOutConfig) Descriptor() ([]byte, []int) { + return fileDescriptor_embedded_assistant_0b559f064bf1f88e, []int{5} +} +func (m *AudioOutConfig) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_AudioOutConfig.Unmarshal(m, b) +} +func (m *AudioOutConfig) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_AudioOutConfig.Marshal(b, m, deterministic) +} +func (dst *AudioOutConfig) XXX_Merge(src proto.Message) { + xxx_messageInfo_AudioOutConfig.Merge(dst, src) +} +func (m *AudioOutConfig) XXX_Size() int { + return xxx_messageInfo_AudioOutConfig.Size(m) +} +func (m *AudioOutConfig) XXX_DiscardUnknown() { + xxx_messageInfo_AudioOutConfig.DiscardUnknown(m) +} + +var xxx_messageInfo_AudioOutConfig proto.InternalMessageInfo + +func (m *AudioOutConfig) GetEncoding() AudioOutConfig_Encoding { + if m != nil { + return m.Encoding + } + return AudioOutConfig_ENCODING_UNSPECIFIED +} + +func (m *AudioOutConfig) GetSampleRateHertz() int32 { + if m != nil { + return m.SampleRateHertz + } + return 0 +} + +func (m *AudioOutConfig) GetVolumePercentage() int32 { + if m != nil { + return m.VolumePercentage + } + return 0 +} + +// Specifies the desired format for the server to use when it returns +// `screen_out` response. +type ScreenOutConfig struct { + // Current visual screen-mode for the device while issuing the query. + ScreenMode ScreenOutConfig_ScreenMode `protobuf:"varint,1,opt,name=screen_mode,json=screenMode,proto3,enum=google.assistant.embedded.v1alpha2.ScreenOutConfig_ScreenMode" json:"screen_mode,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ScreenOutConfig) Reset() { *m = ScreenOutConfig{} } +func (m *ScreenOutConfig) String() string { return proto.CompactTextString(m) } +func (*ScreenOutConfig) ProtoMessage() {} +func (*ScreenOutConfig) Descriptor() ([]byte, []int) { + return fileDescriptor_embedded_assistant_0b559f064bf1f88e, []int{6} +} +func (m *ScreenOutConfig) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ScreenOutConfig.Unmarshal(m, b) +} +func (m *ScreenOutConfig) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ScreenOutConfig.Marshal(b, m, deterministic) +} +func (dst *ScreenOutConfig) XXX_Merge(src proto.Message) { + xxx_messageInfo_ScreenOutConfig.Merge(dst, src) +} +func (m *ScreenOutConfig) XXX_Size() int { + return xxx_messageInfo_ScreenOutConfig.Size(m) +} +func (m *ScreenOutConfig) XXX_DiscardUnknown() { + xxx_messageInfo_ScreenOutConfig.DiscardUnknown(m) +} + +var xxx_messageInfo_ScreenOutConfig proto.InternalMessageInfo + +func (m *ScreenOutConfig) GetScreenMode() ScreenOutConfig_ScreenMode { + if m != nil { + return m.ScreenMode + } + return ScreenOutConfig_SCREEN_MODE_UNSPECIFIED +} + +// Provides information about the current dialog state. +type DialogStateIn struct { + // *Required* This field must always be set to the + // [DialogStateOut.conversation_state][google.assistant.embedded.v1alpha2.DialogStateOut.conversation_state] + // value that was returned in the prior `Assist` RPC. It should only be + // omitted (field not set) if there was no prior `Assist` RPC because this is + // the first `Assist` RPC made by this device after it was first setup and/or + // a factory-default reset. + ConversationState []byte `protobuf:"bytes,1,opt,name=conversation_state,json=conversationState,proto3" json:"conversation_state,omitempty"` + // *Required* Language of the request in + // [IETF BCP 47 syntax](https://tools.ietf.org/html/bcp47) (for example, + // "en-US"). See [Language + // Support](https://developers.google.com/assistant/sdk/reference/rpc/languages) + // for more information. If you have selected a language for this `device_id` + // using the + // [Settings](https://developers.google.com/assistant/sdk/reference/assistant-app/assistant-settings) + // menu in your phone's Google Assistant app, that selection will override + // this value. + LanguageCode string `protobuf:"bytes,2,opt,name=language_code,json=languageCode,proto3" json:"language_code,omitempty"` + // *Optional* Location of the device where the query originated. + DeviceLocation *DeviceLocation `protobuf:"bytes,5,opt,name=device_location,json=deviceLocation,proto3" json:"device_location,omitempty"` + // *Optional* If true, the server will treat the request as a new conversation + // and not use state from the prior request. Set this field to true when the + // conversation should be restarted, such as after a device reboot, or after a + // significant lapse of time since the prior query. + IsNewConversation bool `protobuf:"varint,7,opt,name=is_new_conversation,json=isNewConversation,proto3" json:"is_new_conversation,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *DialogStateIn) Reset() { *m = DialogStateIn{} } +func (m *DialogStateIn) String() string { return proto.CompactTextString(m) } +func (*DialogStateIn) ProtoMessage() {} +func (*DialogStateIn) Descriptor() ([]byte, []int) { + return fileDescriptor_embedded_assistant_0b559f064bf1f88e, []int{7} +} +func (m *DialogStateIn) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_DialogStateIn.Unmarshal(m, b) +} +func (m *DialogStateIn) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_DialogStateIn.Marshal(b, m, deterministic) +} +func (dst *DialogStateIn) XXX_Merge(src proto.Message) { + xxx_messageInfo_DialogStateIn.Merge(dst, src) +} +func (m *DialogStateIn) XXX_Size() int { + return xxx_messageInfo_DialogStateIn.Size(m) +} +func (m *DialogStateIn) XXX_DiscardUnknown() { + xxx_messageInfo_DialogStateIn.DiscardUnknown(m) +} + +var xxx_messageInfo_DialogStateIn proto.InternalMessageInfo + +func (m *DialogStateIn) GetConversationState() []byte { + if m != nil { + return m.ConversationState + } + return nil +} + +func (m *DialogStateIn) GetLanguageCode() string { + if m != nil { + return m.LanguageCode + } + return "" +} + +func (m *DialogStateIn) GetDeviceLocation() *DeviceLocation { + if m != nil { + return m.DeviceLocation + } + return nil +} + +func (m *DialogStateIn) GetIsNewConversation() bool { + if m != nil { + return m.IsNewConversation + } + return false +} + +// *Required* Fields that identify the device to the Assistant. +// +// See also: +// +// * [Register a Device - REST +// API](https://developers.google.com/assistant/sdk/reference/device-registration/register-device-manual) +// * [Device Model and Instance +// Schemas](https://developers.google.com/assistant/sdk/reference/device-registration/model-and-instance-schemas) +// * [Device +// Proto](https://developers.google.com/assistant/sdk/reference/rpc/google.assistant.devices.v1alpha2#device) +type DeviceConfig struct { + // *Required* Unique identifier for the device. The id length must be 128 + // characters or less. Example: DBCDW098234. This MUST match the device_id + // returned from device registration. This device_id is used to match against + // the user's registered devices to lookup the supported traits and + // capabilities of this device. This information should not change across + // device reboots. However, it should not be saved across + // factory-default resets. + DeviceId string `protobuf:"bytes,1,opt,name=device_id,json=deviceId,proto3" json:"device_id,omitempty"` + // *Required* Unique identifier for the device model. The combination of + // device_model_id and device_id must have been previously associated through + // device registration. + DeviceModelId string `protobuf:"bytes,3,opt,name=device_model_id,json=deviceModelId,proto3" json:"device_model_id,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *DeviceConfig) Reset() { *m = DeviceConfig{} } +func (m *DeviceConfig) String() string { return proto.CompactTextString(m) } +func (*DeviceConfig) ProtoMessage() {} +func (*DeviceConfig) Descriptor() ([]byte, []int) { + return fileDescriptor_embedded_assistant_0b559f064bf1f88e, []int{8} +} +func (m *DeviceConfig) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_DeviceConfig.Unmarshal(m, b) +} +func (m *DeviceConfig) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_DeviceConfig.Marshal(b, m, deterministic) +} +func (dst *DeviceConfig) XXX_Merge(src proto.Message) { + xxx_messageInfo_DeviceConfig.Merge(dst, src) +} +func (m *DeviceConfig) XXX_Size() int { + return xxx_messageInfo_DeviceConfig.Size(m) +} +func (m *DeviceConfig) XXX_DiscardUnknown() { + xxx_messageInfo_DeviceConfig.DiscardUnknown(m) +} + +var xxx_messageInfo_DeviceConfig proto.InternalMessageInfo + +func (m *DeviceConfig) GetDeviceId() string { + if m != nil { + return m.DeviceId + } + return "" +} + +func (m *DeviceConfig) GetDeviceModelId() string { + if m != nil { + return m.DeviceModelId + } + return "" +} + +// The audio containing the Assistant's response to the query. Sequential chunks +// of audio data are received in sequential `AssistResponse` messages. +type AudioOut struct { + // *Output-only* The audio data containing the Assistant's response to the + // query. Sequential chunks of audio data are received in sequential + // `AssistResponse` messages. + AudioData []byte `protobuf:"bytes,1,opt,name=audio_data,json=audioData,proto3" json:"audio_data,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *AudioOut) Reset() { *m = AudioOut{} } +func (m *AudioOut) String() string { return proto.CompactTextString(m) } +func (*AudioOut) ProtoMessage() {} +func (*AudioOut) Descriptor() ([]byte, []int) { + return fileDescriptor_embedded_assistant_0b559f064bf1f88e, []int{9} +} +func (m *AudioOut) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_AudioOut.Unmarshal(m, b) +} +func (m *AudioOut) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_AudioOut.Marshal(b, m, deterministic) +} +func (dst *AudioOut) XXX_Merge(src proto.Message) { + xxx_messageInfo_AudioOut.Merge(dst, src) +} +func (m *AudioOut) XXX_Size() int { + return xxx_messageInfo_AudioOut.Size(m) +} +func (m *AudioOut) XXX_DiscardUnknown() { + xxx_messageInfo_AudioOut.DiscardUnknown(m) +} + +var xxx_messageInfo_AudioOut proto.InternalMessageInfo + +func (m *AudioOut) GetAudioData() []byte { + if m != nil { + return m.AudioData + } + return nil +} + +// The Assistant's visual output response to query. Enabled by +// `screen_out_config`. +type ScreenOut struct { + // *Output-only* The format of the provided screen data. + Format ScreenOut_Format `protobuf:"varint,1,opt,name=format,proto3,enum=google.assistant.embedded.v1alpha2.ScreenOut_Format" json:"format,omitempty"` + // *Output-only* The raw screen data to be displayed as the result of the + // Assistant query. + Data []byte `protobuf:"bytes,2,opt,name=data,proto3" json:"data,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ScreenOut) Reset() { *m = ScreenOut{} } +func (m *ScreenOut) String() string { return proto.CompactTextString(m) } +func (*ScreenOut) ProtoMessage() {} +func (*ScreenOut) Descriptor() ([]byte, []int) { + return fileDescriptor_embedded_assistant_0b559f064bf1f88e, []int{10} +} +func (m *ScreenOut) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ScreenOut.Unmarshal(m, b) +} +func (m *ScreenOut) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ScreenOut.Marshal(b, m, deterministic) +} +func (dst *ScreenOut) XXX_Merge(src proto.Message) { + xxx_messageInfo_ScreenOut.Merge(dst, src) +} +func (m *ScreenOut) XXX_Size() int { + return xxx_messageInfo_ScreenOut.Size(m) +} +func (m *ScreenOut) XXX_DiscardUnknown() { + xxx_messageInfo_ScreenOut.DiscardUnknown(m) +} + +var xxx_messageInfo_ScreenOut proto.InternalMessageInfo + +func (m *ScreenOut) GetFormat() ScreenOut_Format { + if m != nil { + return m.Format + } + return ScreenOut_FORMAT_UNSPECIFIED +} + +func (m *ScreenOut) GetData() []byte { + if m != nil { + return m.Data + } + return nil +} + +// The response returned to the device if the user has triggered a Device +// Action. For example, a device which supports the query *Turn on the light* +// would receive a `DeviceAction` with a JSON payload containing the semantics +// of the request. +type DeviceAction struct { + // JSON containing the device command response generated from the triggered + // Device Action grammar. The format is given by the + // `action.devices.EXECUTE` intent for a given + // [trait](https://developers.google.com/assistant/sdk/reference/traits/). + DeviceRequestJson string `protobuf:"bytes,1,opt,name=device_request_json,json=deviceRequestJson,proto3" json:"device_request_json,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *DeviceAction) Reset() { *m = DeviceAction{} } +func (m *DeviceAction) String() string { return proto.CompactTextString(m) } +func (*DeviceAction) ProtoMessage() {} +func (*DeviceAction) Descriptor() ([]byte, []int) { + return fileDescriptor_embedded_assistant_0b559f064bf1f88e, []int{11} +} +func (m *DeviceAction) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_DeviceAction.Unmarshal(m, b) +} +func (m *DeviceAction) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_DeviceAction.Marshal(b, m, deterministic) +} +func (dst *DeviceAction) XXX_Merge(src proto.Message) { + xxx_messageInfo_DeviceAction.Merge(dst, src) +} +func (m *DeviceAction) XXX_Size() int { + return xxx_messageInfo_DeviceAction.Size(m) +} +func (m *DeviceAction) XXX_DiscardUnknown() { + xxx_messageInfo_DeviceAction.DiscardUnknown(m) +} + +var xxx_messageInfo_DeviceAction proto.InternalMessageInfo + +func (m *DeviceAction) GetDeviceRequestJson() string { + if m != nil { + return m.DeviceRequestJson + } + return "" +} + +// The estimated transcription of a phrase the user has spoken. This could be +// a single segment or the full guess of the user's spoken query. +type SpeechRecognitionResult struct { + // *Output-only* Transcript text representing the words that the user spoke. + Transcript string `protobuf:"bytes,1,opt,name=transcript,proto3" json:"transcript,omitempty"` + // *Output-only* An estimate of the likelihood that the Assistant will not + // change its guess about this result. Values range from 0.0 (completely + // unstable) to 1.0 (completely stable and final). The default of 0.0 is a + // sentinel value indicating `stability` was not set. + Stability float32 `protobuf:"fixed32,2,opt,name=stability,proto3" json:"stability,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *SpeechRecognitionResult) Reset() { *m = SpeechRecognitionResult{} } +func (m *SpeechRecognitionResult) String() string { return proto.CompactTextString(m) } +func (*SpeechRecognitionResult) ProtoMessage() {} +func (*SpeechRecognitionResult) Descriptor() ([]byte, []int) { + return fileDescriptor_embedded_assistant_0b559f064bf1f88e, []int{12} +} +func (m *SpeechRecognitionResult) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_SpeechRecognitionResult.Unmarshal(m, b) +} +func (m *SpeechRecognitionResult) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_SpeechRecognitionResult.Marshal(b, m, deterministic) +} +func (dst *SpeechRecognitionResult) XXX_Merge(src proto.Message) { + xxx_messageInfo_SpeechRecognitionResult.Merge(dst, src) +} +func (m *SpeechRecognitionResult) XXX_Size() int { + return xxx_messageInfo_SpeechRecognitionResult.Size(m) +} +func (m *SpeechRecognitionResult) XXX_DiscardUnknown() { + xxx_messageInfo_SpeechRecognitionResult.DiscardUnknown(m) +} + +var xxx_messageInfo_SpeechRecognitionResult proto.InternalMessageInfo + +func (m *SpeechRecognitionResult) GetTranscript() string { + if m != nil { + return m.Transcript + } + return "" +} + +func (m *SpeechRecognitionResult) GetStability() float32 { + if m != nil { + return m.Stability + } + return 0 +} + +// The dialog state resulting from the user's query. Multiple of these messages +// may be received. +type DialogStateOut struct { + // *Output-only* Supplemental display text from the Assistant. This could be + // the same as the speech spoken in `AssistResponse.audio_out` or it could + // be some additional information which aids the user's understanding. + SupplementalDisplayText string `protobuf:"bytes,1,opt,name=supplemental_display_text,json=supplementalDisplayText,proto3" json:"supplemental_display_text,omitempty"` + // *Output-only* State information for the subsequent `Assist` RPC. This + // value should be saved in the client and returned in the + // [`DialogStateIn.conversation_state`](#dialogstatein) field with the next + // `Assist` RPC. (The client does not need to interpret or otherwise use this + // value.) This information should be saved across device reboots. However, + // this value should be cleared (not saved in the client) during a + // factory-default reset. + ConversationState []byte `protobuf:"bytes,2,opt,name=conversation_state,json=conversationState,proto3" json:"conversation_state,omitempty"` + // *Output-only* Specifies the mode of the microphone after this `Assist` + // RPC is processed. + MicrophoneMode DialogStateOut_MicrophoneMode `protobuf:"varint,3,opt,name=microphone_mode,json=microphoneMode,proto3,enum=google.assistant.embedded.v1alpha2.DialogStateOut_MicrophoneMode" json:"microphone_mode,omitempty"` + // *Output-only* Updated volume level. The value will be 0 or omitted + // (indicating no change) unless a voice command such as *Increase the volume* + // or *Set volume level 4* was recognized, in which case the value will be + // between 1 and 100 (corresponding to the new volume level of 1% to 100%). + // Typically, a client should use this volume level when playing the + // `audio_out` data, and retain this value as the current volume level and + // supply it in the `AudioOutConfig` of the next `AssistRequest`. (Some + // clients may also implement other ways to allow the current volume level to + // be changed, for example, by providing a knob that the user can turn.) + VolumePercentage int32 `protobuf:"varint,4,opt,name=volume_percentage,json=volumePercentage,proto3" json:"volume_percentage,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *DialogStateOut) Reset() { *m = DialogStateOut{} } +func (m *DialogStateOut) String() string { return proto.CompactTextString(m) } +func (*DialogStateOut) ProtoMessage() {} +func (*DialogStateOut) Descriptor() ([]byte, []int) { + return fileDescriptor_embedded_assistant_0b559f064bf1f88e, []int{13} +} +func (m *DialogStateOut) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_DialogStateOut.Unmarshal(m, b) +} +func (m *DialogStateOut) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_DialogStateOut.Marshal(b, m, deterministic) +} +func (dst *DialogStateOut) XXX_Merge(src proto.Message) { + xxx_messageInfo_DialogStateOut.Merge(dst, src) +} +func (m *DialogStateOut) XXX_Size() int { + return xxx_messageInfo_DialogStateOut.Size(m) +} +func (m *DialogStateOut) XXX_DiscardUnknown() { + xxx_messageInfo_DialogStateOut.DiscardUnknown(m) +} + +var xxx_messageInfo_DialogStateOut proto.InternalMessageInfo + +func (m *DialogStateOut) GetSupplementalDisplayText() string { + if m != nil { + return m.SupplementalDisplayText + } + return "" +} + +func (m *DialogStateOut) GetConversationState() []byte { + if m != nil { + return m.ConversationState + } + return nil +} + +func (m *DialogStateOut) GetMicrophoneMode() DialogStateOut_MicrophoneMode { + if m != nil { + return m.MicrophoneMode + } + return DialogStateOut_MICROPHONE_MODE_UNSPECIFIED +} + +func (m *DialogStateOut) GetVolumePercentage() int32 { + if m != nil { + return m.VolumePercentage + } + return 0 +} + +// Debugging parameters for the current request. +type DebugConfig struct { + // When this field is set to true, the `debug_info` field in `AssistResponse` + // may be populated. However it will significantly increase latency of + // responses. Do not set this field true in production code. + ReturnDebugInfo bool `protobuf:"varint,6,opt,name=return_debug_info,json=returnDebugInfo,proto3" json:"return_debug_info,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *DebugConfig) Reset() { *m = DebugConfig{} } +func (m *DebugConfig) String() string { return proto.CompactTextString(m) } +func (*DebugConfig) ProtoMessage() {} +func (*DebugConfig) Descriptor() ([]byte, []int) { + return fileDescriptor_embedded_assistant_0b559f064bf1f88e, []int{14} +} +func (m *DebugConfig) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_DebugConfig.Unmarshal(m, b) +} +func (m *DebugConfig) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_DebugConfig.Marshal(b, m, deterministic) +} +func (dst *DebugConfig) XXX_Merge(src proto.Message) { + xxx_messageInfo_DebugConfig.Merge(dst, src) +} +func (m *DebugConfig) XXX_Size() int { + return xxx_messageInfo_DebugConfig.Size(m) +} +func (m *DebugConfig) XXX_DiscardUnknown() { + xxx_messageInfo_DebugConfig.DiscardUnknown(m) +} + +var xxx_messageInfo_DebugConfig proto.InternalMessageInfo + +func (m *DebugConfig) GetReturnDebugInfo() bool { + if m != nil { + return m.ReturnDebugInfo + } + return false +} + +// There are three sources of locations. They are used with this precedence: +// +// 1. This `DeviceLocation`, which is primarily used for mobile devices with +// GPS . +// 2. Location specified by the user during device setup; this is per-user, per +// device. This location is used if `DeviceLocation` is not specified. +// 3. Inferred location based on IP address. This is used only if neither of the +// above are specified. +type DeviceLocation struct { + // Types that are valid to be assigned to Type: + // *DeviceLocation_Coordinates + Type isDeviceLocation_Type `protobuf_oneof:"type"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *DeviceLocation) Reset() { *m = DeviceLocation{} } +func (m *DeviceLocation) String() string { return proto.CompactTextString(m) } +func (*DeviceLocation) ProtoMessage() {} +func (*DeviceLocation) Descriptor() ([]byte, []int) { + return fileDescriptor_embedded_assistant_0b559f064bf1f88e, []int{15} +} +func (m *DeviceLocation) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_DeviceLocation.Unmarshal(m, b) +} +func (m *DeviceLocation) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_DeviceLocation.Marshal(b, m, deterministic) +} +func (dst *DeviceLocation) XXX_Merge(src proto.Message) { + xxx_messageInfo_DeviceLocation.Merge(dst, src) +} +func (m *DeviceLocation) XXX_Size() int { + return xxx_messageInfo_DeviceLocation.Size(m) +} +func (m *DeviceLocation) XXX_DiscardUnknown() { + xxx_messageInfo_DeviceLocation.DiscardUnknown(m) +} + +var xxx_messageInfo_DeviceLocation proto.InternalMessageInfo + +type isDeviceLocation_Type interface { + isDeviceLocation_Type() +} + +type DeviceLocation_Coordinates struct { + Coordinates *latlng.LatLng `protobuf:"bytes,1,opt,name=coordinates,proto3,oneof"` +} + +func (*DeviceLocation_Coordinates) isDeviceLocation_Type() {} + +func (m *DeviceLocation) GetType() isDeviceLocation_Type { + if m != nil { + return m.Type + } + return nil +} + +func (m *DeviceLocation) GetCoordinates() *latlng.LatLng { + if x, ok := m.GetType().(*DeviceLocation_Coordinates); ok { + return x.Coordinates + } + return nil +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*DeviceLocation) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _DeviceLocation_OneofMarshaler, _DeviceLocation_OneofUnmarshaler, _DeviceLocation_OneofSizer, []interface{}{ + (*DeviceLocation_Coordinates)(nil), + } +} + +func _DeviceLocation_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*DeviceLocation) + // type + switch x := m.Type.(type) { + case *DeviceLocation_Coordinates: + b.EncodeVarint(1<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.Coordinates); err != nil { + return err + } + case nil: + default: + return fmt.Errorf("DeviceLocation.Type has unexpected type %T", x) + } + return nil +} + +func _DeviceLocation_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*DeviceLocation) + switch tag { + case 1: // type.coordinates + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(latlng.LatLng) + err := b.DecodeMessage(msg) + m.Type = &DeviceLocation_Coordinates{msg} + return true, err + default: + return false, nil + } +} + +func _DeviceLocation_OneofSizer(msg proto.Message) (n int) { + m := msg.(*DeviceLocation) + // type + switch x := m.Type.(type) { + case *DeviceLocation_Coordinates: + s := proto.Size(x.Coordinates) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +func init() { + proto.RegisterType((*AssistRequest)(nil), "google.assistant.embedded.v1alpha2.AssistRequest") + proto.RegisterType((*AssistResponse)(nil), "google.assistant.embedded.v1alpha2.AssistResponse") + proto.RegisterType((*DebugInfo)(nil), "google.assistant.embedded.v1alpha2.DebugInfo") + proto.RegisterType((*AssistConfig)(nil), "google.assistant.embedded.v1alpha2.AssistConfig") + proto.RegisterType((*AudioInConfig)(nil), "google.assistant.embedded.v1alpha2.AudioInConfig") + proto.RegisterType((*AudioOutConfig)(nil), "google.assistant.embedded.v1alpha2.AudioOutConfig") + proto.RegisterType((*ScreenOutConfig)(nil), "google.assistant.embedded.v1alpha2.ScreenOutConfig") + proto.RegisterType((*DialogStateIn)(nil), "google.assistant.embedded.v1alpha2.DialogStateIn") + proto.RegisterType((*DeviceConfig)(nil), "google.assistant.embedded.v1alpha2.DeviceConfig") + proto.RegisterType((*AudioOut)(nil), "google.assistant.embedded.v1alpha2.AudioOut") + proto.RegisterType((*ScreenOut)(nil), "google.assistant.embedded.v1alpha2.ScreenOut") + proto.RegisterType((*DeviceAction)(nil), "google.assistant.embedded.v1alpha2.DeviceAction") + proto.RegisterType((*SpeechRecognitionResult)(nil), "google.assistant.embedded.v1alpha2.SpeechRecognitionResult") + proto.RegisterType((*DialogStateOut)(nil), "google.assistant.embedded.v1alpha2.DialogStateOut") + proto.RegisterType((*DebugConfig)(nil), "google.assistant.embedded.v1alpha2.DebugConfig") + proto.RegisterType((*DeviceLocation)(nil), "google.assistant.embedded.v1alpha2.DeviceLocation") + proto.RegisterEnum("google.assistant.embedded.v1alpha2.AssistResponse_EventType", AssistResponse_EventType_name, AssistResponse_EventType_value) + proto.RegisterEnum("google.assistant.embedded.v1alpha2.AudioInConfig_Encoding", AudioInConfig_Encoding_name, AudioInConfig_Encoding_value) + proto.RegisterEnum("google.assistant.embedded.v1alpha2.AudioOutConfig_Encoding", AudioOutConfig_Encoding_name, AudioOutConfig_Encoding_value) + proto.RegisterEnum("google.assistant.embedded.v1alpha2.ScreenOutConfig_ScreenMode", ScreenOutConfig_ScreenMode_name, ScreenOutConfig_ScreenMode_value) + proto.RegisterEnum("google.assistant.embedded.v1alpha2.ScreenOut_Format", ScreenOut_Format_name, ScreenOut_Format_value) + proto.RegisterEnum("google.assistant.embedded.v1alpha2.DialogStateOut_MicrophoneMode", DialogStateOut_MicrophoneMode_name, DialogStateOut_MicrophoneMode_value) +} + +// Reference imports to suppress errors if they are not otherwise used. +var _ context.Context +var _ grpc.ClientConn + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the grpc package it is being compiled against. +const _ = grpc.SupportPackageIsVersion4 + +// EmbeddedAssistantClient is the client API for EmbeddedAssistant service. +// +// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://godoc.org/google.golang.org/grpc#ClientConn.NewStream. +type EmbeddedAssistantClient interface { + // Initiates or continues a conversation with the embedded Assistant Service. + // Each call performs one round-trip, sending an audio request to the service + // and receiving the audio response. Uses bidirectional streaming to receive + // results, such as the `END_OF_UTTERANCE` event, while sending audio. + // + // A conversation is one or more gRPC connections, each consisting of several + // streamed requests and responses. + // For example, the user says *Add to my shopping list* and the Assistant + // responds *What do you want to add?*. The sequence of streamed requests and + // responses in the first gRPC message could be: + // + // * AssistRequest.config + // * AssistRequest.audio_in + // * AssistRequest.audio_in + // * AssistRequest.audio_in + // * AssistRequest.audio_in + // * AssistResponse.event_type.END_OF_UTTERANCE + // * AssistResponse.speech_results.transcript "add to my shopping list" + // * AssistResponse.dialog_state_out.microphone_mode.DIALOG_FOLLOW_ON + // * AssistResponse.audio_out + // * AssistResponse.audio_out + // * AssistResponse.audio_out + // + // + // The user then says *bagels* and the Assistant responds + // *OK, I've added bagels to your shopping list*. This is sent as another gRPC + // connection call to the `Assist` method, again with streamed requests and + // responses, such as: + // + // * AssistRequest.config + // * AssistRequest.audio_in + // * AssistRequest.audio_in + // * AssistRequest.audio_in + // * AssistResponse.event_type.END_OF_UTTERANCE + // * AssistResponse.dialog_state_out.microphone_mode.CLOSE_MICROPHONE + // * AssistResponse.audio_out + // * AssistResponse.audio_out + // * AssistResponse.audio_out + // * AssistResponse.audio_out + // + // Although the precise order of responses is not guaranteed, sequential + // `AssistResponse.audio_out` messages will always contain sequential portions + // of audio. + Assist(ctx context.Context, opts ...grpc.CallOption) (EmbeddedAssistant_AssistClient, error) +} + +type embeddedAssistantClient struct { + cc *grpc.ClientConn +} + +func NewEmbeddedAssistantClient(cc *grpc.ClientConn) EmbeddedAssistantClient { + return &embeddedAssistantClient{cc} +} + +func (c *embeddedAssistantClient) Assist(ctx context.Context, opts ...grpc.CallOption) (EmbeddedAssistant_AssistClient, error) { + stream, err := c.cc.NewStream(ctx, &_EmbeddedAssistant_serviceDesc.Streams[0], "/google.assistant.embedded.v1alpha2.EmbeddedAssistant/Assist", opts...) + if err != nil { + return nil, err + } + x := &embeddedAssistantAssistClient{stream} + return x, nil +} + +type EmbeddedAssistant_AssistClient interface { + Send(*AssistRequest) error + Recv() (*AssistResponse, error) + grpc.ClientStream +} + +type embeddedAssistantAssistClient struct { + grpc.ClientStream +} + +func (x *embeddedAssistantAssistClient) Send(m *AssistRequest) error { + return x.ClientStream.SendMsg(m) +} + +func (x *embeddedAssistantAssistClient) Recv() (*AssistResponse, error) { + m := new(AssistResponse) + if err := x.ClientStream.RecvMsg(m); err != nil { + return nil, err + } + return m, nil +} + +// EmbeddedAssistantServer is the server API for EmbeddedAssistant service. +type EmbeddedAssistantServer interface { + // Initiates or continues a conversation with the embedded Assistant Service. + // Each call performs one round-trip, sending an audio request to the service + // and receiving the audio response. Uses bidirectional streaming to receive + // results, such as the `END_OF_UTTERANCE` event, while sending audio. + // + // A conversation is one or more gRPC connections, each consisting of several + // streamed requests and responses. + // For example, the user says *Add to my shopping list* and the Assistant + // responds *What do you want to add?*. The sequence of streamed requests and + // responses in the first gRPC message could be: + // + // * AssistRequest.config + // * AssistRequest.audio_in + // * AssistRequest.audio_in + // * AssistRequest.audio_in + // * AssistRequest.audio_in + // * AssistResponse.event_type.END_OF_UTTERANCE + // * AssistResponse.speech_results.transcript "add to my shopping list" + // * AssistResponse.dialog_state_out.microphone_mode.DIALOG_FOLLOW_ON + // * AssistResponse.audio_out + // * AssistResponse.audio_out + // * AssistResponse.audio_out + // + // + // The user then says *bagels* and the Assistant responds + // *OK, I've added bagels to your shopping list*. This is sent as another gRPC + // connection call to the `Assist` method, again with streamed requests and + // responses, such as: + // + // * AssistRequest.config + // * AssistRequest.audio_in + // * AssistRequest.audio_in + // * AssistRequest.audio_in + // * AssistResponse.event_type.END_OF_UTTERANCE + // * AssistResponse.dialog_state_out.microphone_mode.CLOSE_MICROPHONE + // * AssistResponse.audio_out + // * AssistResponse.audio_out + // * AssistResponse.audio_out + // * AssistResponse.audio_out + // + // Although the precise order of responses is not guaranteed, sequential + // `AssistResponse.audio_out` messages will always contain sequential portions + // of audio. + Assist(EmbeddedAssistant_AssistServer) error +} + +func RegisterEmbeddedAssistantServer(s *grpc.Server, srv EmbeddedAssistantServer) { + s.RegisterService(&_EmbeddedAssistant_serviceDesc, srv) +} + +func _EmbeddedAssistant_Assist_Handler(srv interface{}, stream grpc.ServerStream) error { + return srv.(EmbeddedAssistantServer).Assist(&embeddedAssistantAssistServer{stream}) +} + +type EmbeddedAssistant_AssistServer interface { + Send(*AssistResponse) error + Recv() (*AssistRequest, error) + grpc.ServerStream +} + +type embeddedAssistantAssistServer struct { + grpc.ServerStream +} + +func (x *embeddedAssistantAssistServer) Send(m *AssistResponse) error { + return x.ServerStream.SendMsg(m) +} + +func (x *embeddedAssistantAssistServer) Recv() (*AssistRequest, error) { + m := new(AssistRequest) + if err := x.ServerStream.RecvMsg(m); err != nil { + return nil, err + } + return m, nil +} + +var _EmbeddedAssistant_serviceDesc = grpc.ServiceDesc{ + ServiceName: "google.assistant.embedded.v1alpha2.EmbeddedAssistant", + HandlerType: (*EmbeddedAssistantServer)(nil), + Methods: []grpc.MethodDesc{}, + Streams: []grpc.StreamDesc{ + { + StreamName: "Assist", + Handler: _EmbeddedAssistant_Assist_Handler, + ServerStreams: true, + ClientStreams: true, + }, + }, + Metadata: "google/assistant/embedded/v1alpha2/embedded_assistant.proto", +} + +func init() { + proto.RegisterFile("google/assistant/embedded/v1alpha2/embedded_assistant.proto", fileDescriptor_embedded_assistant_0b559f064bf1f88e) +} + +var fileDescriptor_embedded_assistant_0b559f064bf1f88e = []byte{ + // 1449 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xa4, 0x57, 0x41, 0x73, 0xdb, 0xc6, + 0x15, 0x16, 0x28, 0x8a, 0x22, 0x1f, 0x29, 0x92, 0x5a, 0x7b, 0x6c, 0x56, 0x72, 0x6b, 0x0d, 0x3a, + 0xe3, 0x51, 0xdd, 0x9a, 0xb4, 0xe8, 0x4e, 0x3b, 0xb5, 0x5c, 0x77, 0x68, 0x12, 0x94, 0xe0, 0x52, + 0x04, 0x0d, 0x52, 0xf6, 0xb8, 0x6e, 0x07, 0xb3, 0x22, 0x56, 0x10, 0x3c, 0xe0, 0x2e, 0x0d, 0x2c, + 0x65, 0xab, 0xa7, 0x1c, 0x73, 0x4b, 0xae, 0xb9, 0xe6, 0x9c, 0x5b, 0x7e, 0x49, 0x8e, 0xf9, 0x17, + 0x99, 0xc9, 0x1f, 0xc8, 0xec, 0x62, 0x41, 0x91, 0x8a, 0x95, 0x90, 0xce, 0x0d, 0xfb, 0xde, 0xbe, + 0x0f, 0x0f, 0xbb, 0xdf, 0xf7, 0xde, 0x03, 0xec, 0x7b, 0x8c, 0x79, 0x01, 0xa9, 0xe1, 0x28, 0xf2, + 0x23, 0x8e, 0x29, 0xaf, 0x91, 0xd1, 0x09, 0x71, 0x5d, 0xe2, 0xd6, 0xce, 0xf7, 0x70, 0x30, 0x3e, + 0xc3, 0xf5, 0xa9, 0xc5, 0x99, 0x6e, 0xaa, 0x8e, 0x43, 0xc6, 0x19, 0xd2, 0xe3, 0xe0, 0xea, 0xa5, + 0x3d, 0xd9, 0x5a, 0x4d, 0x82, 0xb7, 0xee, 0x24, 0x2f, 0x18, 0xfb, 0x35, 0x4c, 0x29, 0xe3, 0x98, + 0xfb, 0x8c, 0x46, 0x31, 0xc2, 0x56, 0x45, 0x79, 0xf9, 0xc5, 0x98, 0xd4, 0x02, 0xcc, 0x03, 0xea, + 0xc5, 0x1e, 0xfd, 0x33, 0x0d, 0x36, 0x1a, 0x12, 0xd7, 0x26, 0xef, 0x26, 0x24, 0xe2, 0xe8, 0x39, + 0x64, 0x86, 0x8c, 0x9e, 0xfa, 0x5e, 0x45, 0xdb, 0xd1, 0x76, 0xf3, 0xf5, 0x87, 0xd5, 0x5f, 0x7f, + 0x7d, 0x35, 0x86, 0x68, 0xca, 0xb8, 0xc3, 0x15, 0x5b, 0x21, 0xa0, 0x6d, 0xc8, 0xe2, 0x89, 0xeb, + 0x33, 0xc7, 0xa7, 0x95, 0xd4, 0x8e, 0xb6, 0x5b, 0x38, 0x5c, 0xb1, 0xd7, 0xa5, 0xc5, 0xa4, 0xcf, + 0x32, 0x90, 0x16, 0xf9, 0xe8, 0xdf, 0xac, 0x41, 0x31, 0x49, 0x21, 0x1a, 0x33, 0x1a, 0x11, 0xf4, + 0x06, 0x80, 0x9c, 0x13, 0xca, 0x1d, 0xb1, 0x41, 0xe6, 0x51, 0xac, 0x3f, 0x59, 0x3c, 0x8f, 0x04, + 0xa7, 0x6a, 0x08, 0x90, 0xc1, 0xc5, 0x98, 0xd8, 0x39, 0x92, 0x3c, 0x22, 0x13, 0x72, 0x71, 0x52, + 0x6c, 0xc2, 0x2b, 0xab, 0xf2, 0x1b, 0xff, 0xb2, 0x10, 0xb6, 0x08, 0xb2, 0x26, 0xdc, 0x8e, 0xbf, + 0xc9, 0x9a, 0x70, 0xd4, 0x01, 0x88, 0x86, 0x21, 0x21, 0x54, 0x62, 0xa5, 0x25, 0xd6, 0x83, 0x45, + 0xb0, 0xfa, 0x32, 0x4a, 0x80, 0xe5, 0xa2, 0xe4, 0x11, 0x1d, 0xc3, 0x86, 0x4b, 0xce, 0xfd, 0x21, + 0x71, 0xf0, 0x50, 0xdc, 0x5e, 0x25, 0xb3, 0xf8, 0x05, 0xb4, 0x64, 0x60, 0x43, 0xc6, 0xd9, 0x05, + 0x77, 0x66, 0x85, 0x4e, 0xa0, 0x18, 0x8d, 0x09, 0x19, 0x9e, 0x39, 0x21, 0x89, 0x26, 0x01, 0x8f, + 0x2a, 0xa9, 0x9d, 0xd5, 0xdd, 0x7c, 0x7d, 0x7f, 0xa1, 0x44, 0x65, 0xa4, 0x4d, 0x86, 0xcc, 0xa3, + 0xbe, 0x04, 0x97, 0x18, 0xf6, 0x46, 0xa4, 0x1c, 0x12, 0x11, 0xfd, 0x17, 0xca, 0xae, 0x8f, 0x03, + 0xe6, 0x39, 0x11, 0xc7, 0x9c, 0xc8, 0xe3, 0x58, 0x93, 0xd9, 0xd7, 0x17, 0xca, 0x5e, 0xc6, 0xf6, + 0x45, 0xa8, 0x38, 0x93, 0xa2, 0x3b, 0xb7, 0x16, 0xc7, 0xec, 0x92, 0x93, 0x89, 0xe7, 0xf8, 0xf4, + 0x94, 0x55, 0xb2, 0x8b, 0x1f, 0x73, 0x4b, 0x44, 0x99, 0xf4, 0x94, 0xd9, 0x39, 0x37, 0x79, 0xd4, + 0xff, 0x09, 0xb9, 0x29, 0x2f, 0xd0, 0x16, 0xdc, 0x32, 0x5e, 0x1a, 0xdd, 0x81, 0x33, 0x78, 0xdd, + 0x33, 0x9c, 0xe3, 0x6e, 0xbf, 0x67, 0x34, 0xcd, 0xb6, 0x69, 0xb4, 0xca, 0x2b, 0xe8, 0x26, 0x94, + 0x8d, 0x6e, 0xcb, 0xb1, 0xda, 0xce, 0xf1, 0x60, 0x60, 0xd8, 0x8d, 0x6e, 0xd3, 0x28, 0x6b, 0xba, + 0x09, 0xb9, 0x29, 0x2c, 0x7a, 0x02, 0xdb, 0x98, 0x79, 0x0e, 0xf6, 0x24, 0x59, 0xd9, 0xa5, 0x74, + 0x9d, 0xb7, 0x11, 0xa3, 0x92, 0xb9, 0x39, 0xfb, 0x36, 0x66, 0x5e, 0x43, 0xec, 0x18, 0xb0, 0x46, + 0xe2, 0x7f, 0x1e, 0x31, 0xaa, 0x7f, 0x9f, 0x86, 0xc2, 0xac, 0x72, 0xd0, 0x1b, 0x28, 0x25, 0x7a, + 0x71, 0xe6, 0x44, 0xb8, 0xb7, 0x30, 0x41, 0x4d, 0x3a, 0x55, 0xe1, 0x06, 0x9e, 0x35, 0xa0, 0xbb, + 0x00, 0x9c, 0x7c, 0xe0, 0xce, 0xbb, 0x09, 0x09, 0x2f, 0x24, 0xb7, 0x72, 0x87, 0x2b, 0x76, 0x4e, + 0xd8, 0x5e, 0x08, 0x93, 0xb8, 0xc4, 0xa9, 0x30, 0x92, 0xd7, 0xa7, 0x16, 0xbf, 0xc4, 0x44, 0x1f, + 0xf1, 0xeb, 0xec, 0x22, 0x9e, 0x5b, 0x23, 0x07, 0x36, 0x2f, 0xb5, 0x92, 0xc0, 0xc7, 0x77, 0xf9, + 0x68, 0x29, 0xc9, 0x28, 0xfc, 0x52, 0x34, 0x6f, 0x40, 0xaf, 0xa1, 0x34, 0xc7, 0x41, 0x9f, 0x2a, + 0x75, 0xef, 0x2d, 0x49, 0x41, 0x93, 0xda, 0x1b, 0xee, 0xec, 0x72, 0x46, 0x99, 0x2a, 0xef, 0xf4, + 0xb2, 0xca, 0x54, 0x49, 0x2b, 0x65, 0xaa, 0x8c, 0x6d, 0x28, 0xc4, 0xbc, 0x56, 0xa8, 0xb1, 0x62, + 0x6a, 0x0b, 0x33, 0x5b, 0x81, 0xe6, 0xdd, 0xcb, 0xc5, 0xb4, 0xaa, 0x7e, 0x27, 0x0a, 0xfb, 0xdc, + 0xfd, 0xbf, 0x84, 0x2c, 0xa1, 0x43, 0xe6, 0xfa, 0xd4, 0x53, 0x25, 0xf5, 0xf1, 0xd2, 0xac, 0xaa, + 0x1a, 0x0a, 0xc1, 0x9e, 0x62, 0xa1, 0xfb, 0xb0, 0x19, 0xe1, 0xd1, 0x38, 0x20, 0x4e, 0x28, 0x8e, + 0xfd, 0x8c, 0x84, 0xfc, 0xff, 0x92, 0x37, 0x6b, 0x76, 0x29, 0x76, 0xd8, 0x98, 0x93, 0x43, 0x61, + 0xd6, 0x9f, 0x40, 0x36, 0x41, 0x40, 0x15, 0xb8, 0x69, 0x74, 0x9b, 0x56, 0xcb, 0xec, 0x1e, 0x5c, + 0x11, 0x5e, 0x01, 0xb2, 0x1d, 0xb3, 0x6b, 0x34, 0xec, 0xbd, 0xbf, 0x95, 0x35, 0x94, 0x85, 0x74, + 0xbb, 0xd3, 0x68, 0x96, 0x53, 0xfa, 0x97, 0x29, 0x28, 0xce, 0xb3, 0x0c, 0xbd, 0xfa, 0xd9, 0x47, + 0xed, 0x2f, 0xcf, 0xd5, 0xdf, 0xf8, 0x55, 0xe8, 0xcf, 0xb0, 0x79, 0xce, 0x82, 0xc9, 0x88, 0x38, + 0x63, 0x12, 0x0e, 0x09, 0xe5, 0xd8, 0x23, 0x92, 0x7b, 0x6b, 0x76, 0x39, 0x76, 0xf4, 0xa6, 0x76, + 0xbd, 0xf3, 0x09, 0x47, 0xb0, 0x0e, 0xab, 0x47, 0xbd, 0x47, 0xe5, 0x14, 0x2a, 0x41, 0xde, 0xea, + 0x1d, 0xf7, 0x1d, 0xb3, 0xeb, 0x58, 0x07, 0x07, 0xe5, 0x55, 0xfd, 0x5b, 0x0d, 0x4a, 0x57, 0x94, + 0x81, 0x1c, 0xc8, 0x2b, 0xa5, 0x8d, 0x98, 0x9b, 0xb4, 0xcf, 0xa7, 0x9f, 0xa0, 0x31, 0xb5, 0x3e, + 0x62, 0x2e, 0xb1, 0x55, 0xa3, 0x13, 0xcf, 0xfa, 0xbf, 0x00, 0x2e, 0x3d, 0x68, 0x1b, 0x6e, 0xf7, + 0x9b, 0xb6, 0x61, 0x74, 0x9d, 0x23, 0xab, 0x75, 0xb5, 0x86, 0xae, 0xc3, 0xaa, 0xd5, 0x6e, 0x97, + 0x35, 0x94, 0x87, 0xf5, 0x5e, 0xa7, 0xf1, 0xda, 0xec, 0x8a, 0xac, 0x7f, 0xd0, 0x60, 0x63, 0x4e, + 0x70, 0xe8, 0x01, 0xa0, 0x21, 0xa3, 0xe7, 0x24, 0x8c, 0xe4, 0xe0, 0x12, 0x4b, 0x58, 0xa6, 0x5e, + 0xb0, 0x37, 0x67, 0x3d, 0x32, 0x00, 0xfd, 0x11, 0x36, 0x02, 0x4c, 0xbd, 0x09, 0xf6, 0x84, 0x24, + 0x5d, 0x22, 0x6f, 0x26, 0x67, 0x17, 0x12, 0x63, 0x53, 0x24, 0xf6, 0x06, 0x4a, 0x4a, 0xb5, 0x01, + 0x1b, 0xca, 0xe0, 0xa5, 0x7a, 0x92, 0x0c, 0xed, 0xa8, 0x48, 0xbb, 0xe8, 0xce, 0xad, 0x51, 0x15, + 0x6e, 0xf8, 0x91, 0x43, 0xc9, 0x7b, 0x67, 0x36, 0xbb, 0xca, 0xfa, 0x8e, 0xb6, 0x9b, 0xb5, 0x37, + 0xfd, 0xa8, 0x4b, 0xde, 0x37, 0x67, 0x1c, 0x7a, 0x1f, 0x0a, 0xb3, 0x95, 0x00, 0x6d, 0x43, 0x4e, + 0x25, 0xe7, 0xbb, 0xaa, 0x4f, 0x64, 0x63, 0x83, 0xe9, 0xa2, 0x7b, 0xd3, 0xcc, 0xc5, 0x0d, 0x06, + 0x62, 0xcb, 0xaa, 0xdc, 0xa2, 0xca, 0x90, 0x38, 0xf7, 0xc0, 0x74, 0xf5, 0x3f, 0x41, 0x36, 0x61, + 0x32, 0xfa, 0x3d, 0x40, 0x5c, 0xbd, 0x5d, 0xcc, 0xb1, 0x3a, 0xb9, 0x78, 0xd0, 0x69, 0x61, 0x8e, + 0xf5, 0xaf, 0x34, 0xc8, 0x4d, 0xaf, 0x17, 0x75, 0x20, 0x73, 0xca, 0xc2, 0x11, 0xe6, 0x8a, 0x1d, + 0x7f, 0x5d, 0x8a, 0x1d, 0xd5, 0xb6, 0x8c, 0xb5, 0x15, 0x06, 0x42, 0x90, 0x96, 0x2f, 0x95, 0x23, + 0x9e, 0x2d, 0x9f, 0xf5, 0xfb, 0x90, 0x89, 0x77, 0xa1, 0x5b, 0x80, 0xda, 0x96, 0x7d, 0xd4, 0x18, + 0x5c, 0xa1, 0x46, 0x16, 0xd2, 0x87, 0x83, 0xa3, 0x4e, 0x59, 0xd3, 0x9f, 0x26, 0x67, 0xa3, 0x26, + 0x96, 0x2a, 0xdc, 0x50, 0x9f, 0x1f, 0xc6, 0x43, 0xe9, 0x6c, 0x37, 0xdd, 0x8c, 0x5d, 0x6a, 0x5c, + 0x95, 0x7d, 0xf4, 0x15, 0xdc, 0xbe, 0x66, 0x4e, 0x41, 0x7f, 0x00, 0xe0, 0x21, 0xa6, 0xd1, 0x30, + 0xf4, 0xc7, 0x5c, 0x21, 0xcc, 0x58, 0xd0, 0x1d, 0xc8, 0x45, 0x1c, 0x9f, 0xf8, 0x81, 0xcf, 0x2f, + 0x64, 0xfe, 0x29, 0xfb, 0xd2, 0xa0, 0xff, 0x98, 0x82, 0xe2, 0xfc, 0x6c, 0x82, 0x1e, 0xc3, 0xef, + 0xa2, 0xc9, 0x78, 0x1c, 0x90, 0x91, 0xd0, 0x73, 0xe0, 0xb8, 0x7e, 0x34, 0x0e, 0xf0, 0x85, 0x23, + 0xda, 0x68, 0xd2, 0xef, 0x67, 0x37, 0xb4, 0x62, 0xff, 0x80, 0x7c, 0xe0, 0xd7, 0x90, 0x3c, 0x75, + 0x1d, 0xc9, 0xdf, 0x42, 0x69, 0xe4, 0x0f, 0x43, 0x36, 0x3e, 0x63, 0x34, 0x66, 0x82, 0x64, 0x41, + 0xb1, 0xde, 0x58, 0x7e, 0xa6, 0xaa, 0x1e, 0x4d, 0x91, 0xa4, 0x9c, 0x8b, 0xa3, 0xb9, 0xf5, 0xc7, + 0x4b, 0x58, 0xfa, 0x9a, 0x12, 0xf6, 0x3f, 0x28, 0xce, 0xc3, 0xa1, 0xbb, 0xb0, 0x7d, 0x64, 0x36, + 0x6d, 0xab, 0x77, 0x68, 0x75, 0x8d, 0x8f, 0xd5, 0x81, 0x9b, 0x50, 0x6e, 0x76, 0xac, 0xbe, 0xe1, + 0x5c, 0x6e, 0x2b, 0x6b, 0xc2, 0xda, 0x32, 0x1b, 0x1d, 0xeb, 0xc0, 0x69, 0x5b, 0x9d, 0x8e, 0xf5, + 0xca, 0xb1, 0xba, 0xe5, 0x94, 0xfe, 0x0f, 0xc8, 0xcf, 0xb4, 0x37, 0x51, 0x89, 0x43, 0xc2, 0x27, + 0x21, 0x75, 0x66, 0x86, 0xc0, 0x8c, 0xd4, 0x59, 0x29, 0x76, 0x4c, 0xe7, 0x31, 0xfd, 0x05, 0x14, + 0xe7, 0x75, 0x8b, 0xfe, 0x0e, 0xf9, 0x21, 0x63, 0xa1, 0xeb, 0x53, 0xcc, 0x49, 0xa4, 0xc6, 0xa9, + 0x1b, 0xc9, 0x01, 0x8a, 0x56, 0x59, 0xed, 0x60, 0xde, 0xa1, 0x62, 0x60, 0x9a, 0xdd, 0x99, 0x34, + 0xd2, 0xfa, 0xe7, 0x1a, 0x6c, 0x1a, 0xea, 0x74, 0xa7, 0xe3, 0x1b, 0x8a, 0x20, 0x13, 0x2f, 0xd0, + 0xde, 0x32, 0xff, 0x25, 0x92, 0xb3, 0x5b, 0xf5, 0xe5, 0x7f, 0x65, 0x76, 0xb5, 0x87, 0xda, 0xb3, + 0x2f, 0x34, 0xb8, 0x37, 0x64, 0xa3, 0x05, 0xa2, 0x9f, 0x15, 0xa7, 0xa9, 0xf6, 0xc4, 0x7f, 0x5e, + 0x4f, 0xfb, 0xcf, 0x73, 0x15, 0xe5, 0x31, 0x51, 0x24, 0xab, 0x2c, 0xf4, 0x6a, 0x1e, 0xa1, 0xf2, + 0x2f, 0xb0, 0x16, 0xbb, 0xf0, 0xd8, 0x8f, 0x7e, 0xe9, 0x0f, 0x75, 0x3f, 0xb1, 0x7c, 0x9d, 0xca, + 0x34, 0xfa, 0x83, 0x7e, 0xeb, 0xdf, 0x27, 0x19, 0x19, 0xff, 0xe8, 0xa7, 0x00, 0x00, 0x00, 0xff, + 0xff, 0xb9, 0x08, 0x29, 0x0f, 0xe0, 0x0e, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/bigtable/admin/cluster/v1/bigtable_cluster_data.pb.go b/vendor/google.golang.org/genproto/googleapis/bigtable/admin/cluster/v1/bigtable_cluster_data.pb.go new file mode 100644 index 0000000..393b802 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/bigtable/admin/cluster/v1/bigtable_cluster_data.pb.go @@ -0,0 +1,276 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/bigtable/admin/cluster/v1/bigtable_cluster_data.proto + +package cluster // import "google.golang.org/genproto/googleapis/bigtable/admin/cluster/v1" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "github.com/golang/protobuf/ptypes/timestamp" +import _ "google.golang.org/genproto/googleapis/api/annotations" +import longrunning "google.golang.org/genproto/googleapis/longrunning" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +type StorageType int32 + +const ( + // The storage type used is unspecified. + StorageType_STORAGE_UNSPECIFIED StorageType = 0 + // Data will be stored in SSD, providing low and consistent latencies. + StorageType_STORAGE_SSD StorageType = 1 + // Data will be stored in HDD, providing high and less predictable + // latencies. + StorageType_STORAGE_HDD StorageType = 2 +) + +var StorageType_name = map[int32]string{ + 0: "STORAGE_UNSPECIFIED", + 1: "STORAGE_SSD", + 2: "STORAGE_HDD", +} +var StorageType_value = map[string]int32{ + "STORAGE_UNSPECIFIED": 0, + "STORAGE_SSD": 1, + "STORAGE_HDD": 2, +} + +func (x StorageType) String() string { + return proto.EnumName(StorageType_name, int32(x)) +} +func (StorageType) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_bigtable_cluster_data_ec90ed011929fd01, []int{0} +} + +// Possible states of a zone. +type Zone_Status int32 + +const ( + // The state of the zone is unknown or unspecified. + Zone_UNKNOWN Zone_Status = 0 + // The zone is in a good state. + Zone_OK Zone_Status = 1 + // The zone is down for planned maintenance. + Zone_PLANNED_MAINTENANCE Zone_Status = 2 + // The zone is down for emergency or unplanned maintenance. + Zone_EMERGENCY_MAINENANCE Zone_Status = 3 +) + +var Zone_Status_name = map[int32]string{ + 0: "UNKNOWN", + 1: "OK", + 2: "PLANNED_MAINTENANCE", + 3: "EMERGENCY_MAINENANCE", +} +var Zone_Status_value = map[string]int32{ + "UNKNOWN": 0, + "OK": 1, + "PLANNED_MAINTENANCE": 2, + "EMERGENCY_MAINENANCE": 3, +} + +func (x Zone_Status) String() string { + return proto.EnumName(Zone_Status_name, int32(x)) +} +func (Zone_Status) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_bigtable_cluster_data_ec90ed011929fd01, []int{0, 0} +} + +// A physical location in which a particular project can allocate Cloud BigTable +// resources. +type Zone struct { + // A permanent unique identifier for the zone. + // Values are of the form projects//zones/[a-z][-a-z0-9]* + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // The name of this zone as it appears in UIs. + DisplayName string `protobuf:"bytes,2,opt,name=display_name,json=displayName,proto3" json:"display_name,omitempty"` + // The current state of this zone. + Status Zone_Status `protobuf:"varint,3,opt,name=status,proto3,enum=google.bigtable.admin.cluster.v1.Zone_Status" json:"status,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Zone) Reset() { *m = Zone{} } +func (m *Zone) String() string { return proto.CompactTextString(m) } +func (*Zone) ProtoMessage() {} +func (*Zone) Descriptor() ([]byte, []int) { + return fileDescriptor_bigtable_cluster_data_ec90ed011929fd01, []int{0} +} +func (m *Zone) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Zone.Unmarshal(m, b) +} +func (m *Zone) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Zone.Marshal(b, m, deterministic) +} +func (dst *Zone) XXX_Merge(src proto.Message) { + xxx_messageInfo_Zone.Merge(dst, src) +} +func (m *Zone) XXX_Size() int { + return xxx_messageInfo_Zone.Size(m) +} +func (m *Zone) XXX_DiscardUnknown() { + xxx_messageInfo_Zone.DiscardUnknown(m) +} + +var xxx_messageInfo_Zone proto.InternalMessageInfo + +func (m *Zone) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *Zone) GetDisplayName() string { + if m != nil { + return m.DisplayName + } + return "" +} + +func (m *Zone) GetStatus() Zone_Status { + if m != nil { + return m.Status + } + return Zone_UNKNOWN +} + +// An isolated set of Cloud BigTable resources on which tables can be hosted. +type Cluster struct { + // A permanent unique identifier for the cluster. For technical reasons, the + // zone in which the cluster resides is included here. + // Values are of the form + // projects//zones//clusters/[a-z][-a-z0-9]* + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // The operation currently running on the cluster, if any. + // This cannot be set directly, only through CreateCluster, UpdateCluster, + // or UndeleteCluster. Calls to these methods will be rejected if + // "current_operation" is already set. + CurrentOperation *longrunning.Operation `protobuf:"bytes,3,opt,name=current_operation,json=currentOperation,proto3" json:"current_operation,omitempty"` + // The descriptive name for this cluster as it appears in UIs. + // Must be unique per zone. + DisplayName string `protobuf:"bytes,4,opt,name=display_name,json=displayName,proto3" json:"display_name,omitempty"` + // The number of serve nodes allocated to this cluster. + ServeNodes int32 `protobuf:"varint,5,opt,name=serve_nodes,json=serveNodes,proto3" json:"serve_nodes,omitempty"` + // What storage type to use for tables in this cluster. Only configurable at + // cluster creation time. If unspecified, STORAGE_SSD will be used. + DefaultStorageType StorageType `protobuf:"varint,8,opt,name=default_storage_type,json=defaultStorageType,proto3,enum=google.bigtable.admin.cluster.v1.StorageType" json:"default_storage_type,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Cluster) Reset() { *m = Cluster{} } +func (m *Cluster) String() string { return proto.CompactTextString(m) } +func (*Cluster) ProtoMessage() {} +func (*Cluster) Descriptor() ([]byte, []int) { + return fileDescriptor_bigtable_cluster_data_ec90ed011929fd01, []int{1} +} +func (m *Cluster) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Cluster.Unmarshal(m, b) +} +func (m *Cluster) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Cluster.Marshal(b, m, deterministic) +} +func (dst *Cluster) XXX_Merge(src proto.Message) { + xxx_messageInfo_Cluster.Merge(dst, src) +} +func (m *Cluster) XXX_Size() int { + return xxx_messageInfo_Cluster.Size(m) +} +func (m *Cluster) XXX_DiscardUnknown() { + xxx_messageInfo_Cluster.DiscardUnknown(m) +} + +var xxx_messageInfo_Cluster proto.InternalMessageInfo + +func (m *Cluster) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *Cluster) GetCurrentOperation() *longrunning.Operation { + if m != nil { + return m.CurrentOperation + } + return nil +} + +func (m *Cluster) GetDisplayName() string { + if m != nil { + return m.DisplayName + } + return "" +} + +func (m *Cluster) GetServeNodes() int32 { + if m != nil { + return m.ServeNodes + } + return 0 +} + +func (m *Cluster) GetDefaultStorageType() StorageType { + if m != nil { + return m.DefaultStorageType + } + return StorageType_STORAGE_UNSPECIFIED +} + +func init() { + proto.RegisterType((*Zone)(nil), "google.bigtable.admin.cluster.v1.Zone") + proto.RegisterType((*Cluster)(nil), "google.bigtable.admin.cluster.v1.Cluster") + proto.RegisterEnum("google.bigtable.admin.cluster.v1.StorageType", StorageType_name, StorageType_value) + proto.RegisterEnum("google.bigtable.admin.cluster.v1.Zone_Status", Zone_Status_name, Zone_Status_value) +} + +func init() { + proto.RegisterFile("google/bigtable/admin/cluster/v1/bigtable_cluster_data.proto", fileDescriptor_bigtable_cluster_data_ec90ed011929fd01) +} + +var fileDescriptor_bigtable_cluster_data_ec90ed011929fd01 = []byte{ + // 493 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x8c, 0x93, 0xd1, 0x6e, 0xd3, 0x3c, + 0x1c, 0xc5, 0x97, 0xae, 0xeb, 0xbe, 0xcf, 0x41, 0x10, 0xcc, 0x24, 0xa2, 0x09, 0xb4, 0x52, 0xb8, + 0xa8, 0x90, 0x70, 0xb4, 0x71, 0x09, 0x37, 0x6d, 0x63, 0xba, 0x32, 0xe6, 0x56, 0x49, 0x27, 0xc4, + 0x6e, 0x2c, 0xb7, 0xf5, 0xac, 0x48, 0xa9, 0x1d, 0xc5, 0x4e, 0xa5, 0x3e, 0x03, 0x12, 0x8f, 0xc7, + 0xf3, 0xa0, 0x3a, 0x6e, 0x55, 0x34, 0xd0, 0xb8, 0xb3, 0xcf, 0x39, 0x3f, 0xbb, 0xff, 0x53, 0x07, + 0x7c, 0x14, 0x4a, 0x89, 0x9c, 0x47, 0xb3, 0x4c, 0x18, 0x36, 0xcb, 0x79, 0xc4, 0x16, 0xcb, 0x4c, + 0x46, 0xf3, 0xbc, 0xd2, 0x86, 0x97, 0xd1, 0xea, 0x7c, 0xe7, 0x50, 0xa7, 0xd1, 0x05, 0x33, 0x0c, + 0x15, 0xa5, 0x32, 0x0a, 0xb6, 0x6b, 0x1a, 0x6d, 0x33, 0xc8, 0xd2, 0xc8, 0x25, 0xd1, 0xea, 0xfc, + 0xf4, 0x85, 0x3b, 0x9f, 0x15, 0x59, 0xc4, 0xa4, 0x54, 0x86, 0x99, 0x4c, 0x49, 0x5d, 0xf3, 0xa7, + 0xaf, 0x9d, 0x9b, 0x2b, 0x29, 0xca, 0x4a, 0xca, 0x4c, 0x8a, 0x48, 0x15, 0xbc, 0xfc, 0x2d, 0x74, + 0xe6, 0x42, 0x76, 0x37, 0xab, 0xee, 0x22, 0x93, 0x2d, 0xb9, 0x36, 0x6c, 0x59, 0xd4, 0x81, 0xce, + 0x4f, 0x0f, 0x34, 0x6f, 0x95, 0xe4, 0x10, 0x82, 0xa6, 0x64, 0x4b, 0x1e, 0x7a, 0x6d, 0xaf, 0xfb, + 0x7f, 0x62, 0xd7, 0xf0, 0x15, 0x78, 0xb4, 0xc8, 0x74, 0x91, 0xb3, 0x35, 0xb5, 0x5e, 0xc3, 0x7a, + 0xbe, 0xd3, 0xc8, 0x26, 0x82, 0x41, 0x4b, 0x1b, 0x66, 0x2a, 0x1d, 0x1e, 0xb6, 0xbd, 0xee, 0xe3, + 0x8b, 0x77, 0xe8, 0xa1, 0xb1, 0xd0, 0xe6, 0x3a, 0x94, 0x5a, 0x28, 0x71, 0x70, 0x67, 0x02, 0x5a, + 0xb5, 0x02, 0x7d, 0x70, 0x7c, 0x43, 0xae, 0xc8, 0xf8, 0x2b, 0x09, 0x0e, 0x60, 0x0b, 0x34, 0xc6, + 0x57, 0x81, 0x07, 0x9f, 0x83, 0x67, 0x93, 0x2f, 0x3d, 0x42, 0x70, 0x4c, 0xaf, 0x7b, 0x23, 0x32, + 0xc5, 0xa4, 0x47, 0x06, 0x38, 0x68, 0xc0, 0x10, 0x9c, 0xe0, 0x6b, 0x9c, 0x0c, 0x31, 0x19, 0x7c, + 0xb3, 0x96, 0x73, 0x0e, 0x3b, 0x3f, 0x1a, 0xe0, 0x78, 0x50, 0x5f, 0xfa, 0xc7, 0xd9, 0x3e, 0x83, + 0xa7, 0xf3, 0xaa, 0x2c, 0xb9, 0x34, 0x74, 0xd7, 0x9a, 0x9d, 0xc1, 0xbf, 0x78, 0xb9, 0x9d, 0x61, + 0xaf, 0x5a, 0x34, 0xde, 0x86, 0x92, 0xc0, 0x71, 0x3b, 0xe5, 0x5e, 0x4f, 0xcd, 0xfb, 0x3d, 0x9d, + 0x01, 0x5f, 0xf3, 0x72, 0xc5, 0xa9, 0x54, 0x0b, 0xae, 0xc3, 0xa3, 0xb6, 0xd7, 0x3d, 0x4a, 0x80, + 0x95, 0xc8, 0x46, 0x81, 0x14, 0x9c, 0x2c, 0xf8, 0x1d, 0xab, 0x72, 0x43, 0xb5, 0x51, 0x25, 0x13, + 0x9c, 0x9a, 0x75, 0xc1, 0xc3, 0xff, 0xfe, 0xb5, 0xd6, 0xb4, 0xa6, 0xa6, 0xeb, 0x82, 0x27, 0xd0, + 0x1d, 0xb5, 0xa7, 0xbd, 0xbd, 0x04, 0xfe, 0xde, 0x76, 0x53, 0x69, 0x3a, 0x1d, 0x27, 0xbd, 0x21, + 0xa6, 0x37, 0x24, 0x9d, 0xe0, 0xc1, 0xe8, 0xd3, 0x08, 0xc7, 0xc1, 0x01, 0x7c, 0x02, 0xfc, 0xad, + 0x91, 0xa6, 0x71, 0xe0, 0xed, 0x0b, 0x97, 0x71, 0x1c, 0x34, 0xfa, 0xdf, 0x3d, 0xf0, 0x66, 0xae, + 0x96, 0x0f, 0xfe, 0xa4, 0x7e, 0xd8, 0x77, 0x96, 0xfb, 0x23, 0x62, 0x66, 0xd8, 0x64, 0xf3, 0xec, + 0x26, 0xde, 0xed, 0xd0, 0xd1, 0x42, 0xe5, 0x4c, 0x0a, 0xa4, 0x4a, 0x11, 0x09, 0x2e, 0xed, 0xa3, + 0x8c, 0x6a, 0x8b, 0x15, 0x99, 0xfe, 0xfb, 0xb7, 0xf5, 0xc1, 0x2d, 0x67, 0x2d, 0xcb, 0xbc, 0xff, + 0x15, 0x00, 0x00, 0xff, 0xff, 0xc9, 0x27, 0x25, 0xa6, 0x8e, 0x03, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/bigtable/admin/cluster/v1/bigtable_cluster_service.pb.go b/vendor/google.golang.org/genproto/googleapis/bigtable/admin/cluster/v1/bigtable_cluster_service.pb.go new file mode 100644 index 0000000..df9a9fd --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/bigtable/admin/cluster/v1/bigtable_cluster_service.pb.go @@ -0,0 +1,484 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/bigtable/admin/cluster/v1/bigtable_cluster_service.proto + +package cluster // import "google.golang.org/genproto/googleapis/bigtable/admin/cluster/v1" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import empty "github.com/golang/protobuf/ptypes/empty" +import _ "google.golang.org/genproto/googleapis/api/annotations" +import longrunning "google.golang.org/genproto/googleapis/longrunning" + +import ( + context "golang.org/x/net/context" + grpc "google.golang.org/grpc" +) + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Reference imports to suppress errors if they are not otherwise used. +var _ context.Context +var _ grpc.ClientConn + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the grpc package it is being compiled against. +const _ = grpc.SupportPackageIsVersion4 + +// BigtableClusterServiceClient is the client API for BigtableClusterService service. +// +// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://godoc.org/google.golang.org/grpc#ClientConn.NewStream. +type BigtableClusterServiceClient interface { + // Lists the supported zones for the given project. + ListZones(ctx context.Context, in *ListZonesRequest, opts ...grpc.CallOption) (*ListZonesResponse, error) + // Gets information about a particular cluster. + GetCluster(ctx context.Context, in *GetClusterRequest, opts ...grpc.CallOption) (*Cluster, error) + // Lists all clusters in the given project, along with any zones for which + // cluster information could not be retrieved. + ListClusters(ctx context.Context, in *ListClustersRequest, opts ...grpc.CallOption) (*ListClustersResponse, error) + // Creates a cluster and begins preparing it to begin serving. The returned + // cluster embeds as its "current_operation" a long-running operation which + // can be used to track the progress of turning up the new cluster. + // Immediately upon completion of this request: + // * The cluster will be readable via the API, with all requested attributes + // but no allocated resources. + // Until completion of the embedded operation: + // * Cancelling the operation will render the cluster immediately unreadable + // via the API. + // * All other attempts to modify or delete the cluster will be rejected. + // Upon completion of the embedded operation: + // * Billing for all successfully-allocated resources will begin (some types + // may have lower than the requested levels). + // * New tables can be created in the cluster. + // * The cluster's allocated resource levels will be readable via the API. + // The embedded operation's "metadata" field type is + // [CreateClusterMetadata][google.bigtable.admin.cluster.v1.CreateClusterMetadata] + // The embedded operation's "response" field type is + // [Cluster][google.bigtable.admin.cluster.v1.Cluster], if successful. + CreateCluster(ctx context.Context, in *CreateClusterRequest, opts ...grpc.CallOption) (*Cluster, error) + // Updates a cluster, and begins allocating or releasing resources as + // requested. The returned cluster embeds as its "current_operation" a + // long-running operation which can be used to track the progress of updating + // the cluster. + // Immediately upon completion of this request: + // * For resource types where a decrease in the cluster's allocation has been + // requested, billing will be based on the newly-requested level. + // Until completion of the embedded operation: + // * Cancelling the operation will set its metadata's "cancelled_at_time", + // and begin restoring resources to their pre-request values. The operation + // is guaranteed to succeed at undoing all resource changes, after which + // point it will terminate with a CANCELLED status. + // * All other attempts to modify or delete the cluster will be rejected. + // * Reading the cluster via the API will continue to give the pre-request + // resource levels. + // Upon completion of the embedded operation: + // * Billing will begin for all successfully-allocated resources (some types + // may have lower than the requested levels). + // * All newly-reserved resources will be available for serving the cluster's + // tables. + // * The cluster's new resource levels will be readable via the API. + // [UpdateClusterMetadata][google.bigtable.admin.cluster.v1.UpdateClusterMetadata] + // The embedded operation's "response" field type is + // [Cluster][google.bigtable.admin.cluster.v1.Cluster], if successful. + UpdateCluster(ctx context.Context, in *Cluster, opts ...grpc.CallOption) (*Cluster, error) + // Marks a cluster and all of its tables for permanent deletion in 7 days. + // Immediately upon completion of the request: + // * Billing will cease for all of the cluster's reserved resources. + // * The cluster's "delete_time" field will be set 7 days in the future. + // Soon afterward: + // * All tables within the cluster will become unavailable. + // Prior to the cluster's "delete_time": + // * The cluster can be recovered with a call to UndeleteCluster. + // * All other attempts to modify or delete the cluster will be rejected. + // At the cluster's "delete_time": + // * The cluster and *all of its tables* will immediately and irrevocably + // disappear from the API, and their data will be permanently deleted. + DeleteCluster(ctx context.Context, in *DeleteClusterRequest, opts ...grpc.CallOption) (*empty.Empty, error) + // Cancels the scheduled deletion of an cluster and begins preparing it to + // resume serving. The returned operation will also be embedded as the + // cluster's "current_operation". + // Immediately upon completion of this request: + // * The cluster's "delete_time" field will be unset, protecting it from + // automatic deletion. + // Until completion of the returned operation: + // * The operation cannot be cancelled. + // Upon completion of the returned operation: + // * Billing for the cluster's resources will resume. + // * All tables within the cluster will be available. + // [UndeleteClusterMetadata][google.bigtable.admin.cluster.v1.UndeleteClusterMetadata] + // The embedded operation's "response" field type is + // [Cluster][google.bigtable.admin.cluster.v1.Cluster], if successful. + UndeleteCluster(ctx context.Context, in *UndeleteClusterRequest, opts ...grpc.CallOption) (*longrunning.Operation, error) +} + +type bigtableClusterServiceClient struct { + cc *grpc.ClientConn +} + +func NewBigtableClusterServiceClient(cc *grpc.ClientConn) BigtableClusterServiceClient { + return &bigtableClusterServiceClient{cc} +} + +func (c *bigtableClusterServiceClient) ListZones(ctx context.Context, in *ListZonesRequest, opts ...grpc.CallOption) (*ListZonesResponse, error) { + out := new(ListZonesResponse) + err := c.cc.Invoke(ctx, "/google.bigtable.admin.cluster.v1.BigtableClusterService/ListZones", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *bigtableClusterServiceClient) GetCluster(ctx context.Context, in *GetClusterRequest, opts ...grpc.CallOption) (*Cluster, error) { + out := new(Cluster) + err := c.cc.Invoke(ctx, "/google.bigtable.admin.cluster.v1.BigtableClusterService/GetCluster", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *bigtableClusterServiceClient) ListClusters(ctx context.Context, in *ListClustersRequest, opts ...grpc.CallOption) (*ListClustersResponse, error) { + out := new(ListClustersResponse) + err := c.cc.Invoke(ctx, "/google.bigtable.admin.cluster.v1.BigtableClusterService/ListClusters", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *bigtableClusterServiceClient) CreateCluster(ctx context.Context, in *CreateClusterRequest, opts ...grpc.CallOption) (*Cluster, error) { + out := new(Cluster) + err := c.cc.Invoke(ctx, "/google.bigtable.admin.cluster.v1.BigtableClusterService/CreateCluster", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *bigtableClusterServiceClient) UpdateCluster(ctx context.Context, in *Cluster, opts ...grpc.CallOption) (*Cluster, error) { + out := new(Cluster) + err := c.cc.Invoke(ctx, "/google.bigtable.admin.cluster.v1.BigtableClusterService/UpdateCluster", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *bigtableClusterServiceClient) DeleteCluster(ctx context.Context, in *DeleteClusterRequest, opts ...grpc.CallOption) (*empty.Empty, error) { + out := new(empty.Empty) + err := c.cc.Invoke(ctx, "/google.bigtable.admin.cluster.v1.BigtableClusterService/DeleteCluster", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *bigtableClusterServiceClient) UndeleteCluster(ctx context.Context, in *UndeleteClusterRequest, opts ...grpc.CallOption) (*longrunning.Operation, error) { + out := new(longrunning.Operation) + err := c.cc.Invoke(ctx, "/google.bigtable.admin.cluster.v1.BigtableClusterService/UndeleteCluster", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +// BigtableClusterServiceServer is the server API for BigtableClusterService service. +type BigtableClusterServiceServer interface { + // Lists the supported zones for the given project. + ListZones(context.Context, *ListZonesRequest) (*ListZonesResponse, error) + // Gets information about a particular cluster. + GetCluster(context.Context, *GetClusterRequest) (*Cluster, error) + // Lists all clusters in the given project, along with any zones for which + // cluster information could not be retrieved. + ListClusters(context.Context, *ListClustersRequest) (*ListClustersResponse, error) + // Creates a cluster and begins preparing it to begin serving. The returned + // cluster embeds as its "current_operation" a long-running operation which + // can be used to track the progress of turning up the new cluster. + // Immediately upon completion of this request: + // * The cluster will be readable via the API, with all requested attributes + // but no allocated resources. + // Until completion of the embedded operation: + // * Cancelling the operation will render the cluster immediately unreadable + // via the API. + // * All other attempts to modify or delete the cluster will be rejected. + // Upon completion of the embedded operation: + // * Billing for all successfully-allocated resources will begin (some types + // may have lower than the requested levels). + // * New tables can be created in the cluster. + // * The cluster's allocated resource levels will be readable via the API. + // The embedded operation's "metadata" field type is + // [CreateClusterMetadata][google.bigtable.admin.cluster.v1.CreateClusterMetadata] + // The embedded operation's "response" field type is + // [Cluster][google.bigtable.admin.cluster.v1.Cluster], if successful. + CreateCluster(context.Context, *CreateClusterRequest) (*Cluster, error) + // Updates a cluster, and begins allocating or releasing resources as + // requested. The returned cluster embeds as its "current_operation" a + // long-running operation which can be used to track the progress of updating + // the cluster. + // Immediately upon completion of this request: + // * For resource types where a decrease in the cluster's allocation has been + // requested, billing will be based on the newly-requested level. + // Until completion of the embedded operation: + // * Cancelling the operation will set its metadata's "cancelled_at_time", + // and begin restoring resources to their pre-request values. The operation + // is guaranteed to succeed at undoing all resource changes, after which + // point it will terminate with a CANCELLED status. + // * All other attempts to modify or delete the cluster will be rejected. + // * Reading the cluster via the API will continue to give the pre-request + // resource levels. + // Upon completion of the embedded operation: + // * Billing will begin for all successfully-allocated resources (some types + // may have lower than the requested levels). + // * All newly-reserved resources will be available for serving the cluster's + // tables. + // * The cluster's new resource levels will be readable via the API. + // [UpdateClusterMetadata][google.bigtable.admin.cluster.v1.UpdateClusterMetadata] + // The embedded operation's "response" field type is + // [Cluster][google.bigtable.admin.cluster.v1.Cluster], if successful. + UpdateCluster(context.Context, *Cluster) (*Cluster, error) + // Marks a cluster and all of its tables for permanent deletion in 7 days. + // Immediately upon completion of the request: + // * Billing will cease for all of the cluster's reserved resources. + // * The cluster's "delete_time" field will be set 7 days in the future. + // Soon afterward: + // * All tables within the cluster will become unavailable. + // Prior to the cluster's "delete_time": + // * The cluster can be recovered with a call to UndeleteCluster. + // * All other attempts to modify or delete the cluster will be rejected. + // At the cluster's "delete_time": + // * The cluster and *all of its tables* will immediately and irrevocably + // disappear from the API, and their data will be permanently deleted. + DeleteCluster(context.Context, *DeleteClusterRequest) (*empty.Empty, error) + // Cancels the scheduled deletion of an cluster and begins preparing it to + // resume serving. The returned operation will also be embedded as the + // cluster's "current_operation". + // Immediately upon completion of this request: + // * The cluster's "delete_time" field will be unset, protecting it from + // automatic deletion. + // Until completion of the returned operation: + // * The operation cannot be cancelled. + // Upon completion of the returned operation: + // * Billing for the cluster's resources will resume. + // * All tables within the cluster will be available. + // [UndeleteClusterMetadata][google.bigtable.admin.cluster.v1.UndeleteClusterMetadata] + // The embedded operation's "response" field type is + // [Cluster][google.bigtable.admin.cluster.v1.Cluster], if successful. + UndeleteCluster(context.Context, *UndeleteClusterRequest) (*longrunning.Operation, error) +} + +func RegisterBigtableClusterServiceServer(s *grpc.Server, srv BigtableClusterServiceServer) { + s.RegisterService(&_BigtableClusterService_serviceDesc, srv) +} + +func _BigtableClusterService_ListZones_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(ListZonesRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(BigtableClusterServiceServer).ListZones(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.bigtable.admin.cluster.v1.BigtableClusterService/ListZones", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(BigtableClusterServiceServer).ListZones(ctx, req.(*ListZonesRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _BigtableClusterService_GetCluster_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(GetClusterRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(BigtableClusterServiceServer).GetCluster(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.bigtable.admin.cluster.v1.BigtableClusterService/GetCluster", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(BigtableClusterServiceServer).GetCluster(ctx, req.(*GetClusterRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _BigtableClusterService_ListClusters_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(ListClustersRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(BigtableClusterServiceServer).ListClusters(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.bigtable.admin.cluster.v1.BigtableClusterService/ListClusters", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(BigtableClusterServiceServer).ListClusters(ctx, req.(*ListClustersRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _BigtableClusterService_CreateCluster_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(CreateClusterRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(BigtableClusterServiceServer).CreateCluster(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.bigtable.admin.cluster.v1.BigtableClusterService/CreateCluster", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(BigtableClusterServiceServer).CreateCluster(ctx, req.(*CreateClusterRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _BigtableClusterService_UpdateCluster_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(Cluster) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(BigtableClusterServiceServer).UpdateCluster(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.bigtable.admin.cluster.v1.BigtableClusterService/UpdateCluster", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(BigtableClusterServiceServer).UpdateCluster(ctx, req.(*Cluster)) + } + return interceptor(ctx, in, info, handler) +} + +func _BigtableClusterService_DeleteCluster_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(DeleteClusterRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(BigtableClusterServiceServer).DeleteCluster(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.bigtable.admin.cluster.v1.BigtableClusterService/DeleteCluster", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(BigtableClusterServiceServer).DeleteCluster(ctx, req.(*DeleteClusterRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _BigtableClusterService_UndeleteCluster_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(UndeleteClusterRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(BigtableClusterServiceServer).UndeleteCluster(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.bigtable.admin.cluster.v1.BigtableClusterService/UndeleteCluster", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(BigtableClusterServiceServer).UndeleteCluster(ctx, req.(*UndeleteClusterRequest)) + } + return interceptor(ctx, in, info, handler) +} + +var _BigtableClusterService_serviceDesc = grpc.ServiceDesc{ + ServiceName: "google.bigtable.admin.cluster.v1.BigtableClusterService", + HandlerType: (*BigtableClusterServiceServer)(nil), + Methods: []grpc.MethodDesc{ + { + MethodName: "ListZones", + Handler: _BigtableClusterService_ListZones_Handler, + }, + { + MethodName: "GetCluster", + Handler: _BigtableClusterService_GetCluster_Handler, + }, + { + MethodName: "ListClusters", + Handler: _BigtableClusterService_ListClusters_Handler, + }, + { + MethodName: "CreateCluster", + Handler: _BigtableClusterService_CreateCluster_Handler, + }, + { + MethodName: "UpdateCluster", + Handler: _BigtableClusterService_UpdateCluster_Handler, + }, + { + MethodName: "DeleteCluster", + Handler: _BigtableClusterService_DeleteCluster_Handler, + }, + { + MethodName: "UndeleteCluster", + Handler: _BigtableClusterService_UndeleteCluster_Handler, + }, + }, + Streams: []grpc.StreamDesc{}, + Metadata: "google/bigtable/admin/cluster/v1/bigtable_cluster_service.proto", +} + +func init() { + proto.RegisterFile("google/bigtable/admin/cluster/v1/bigtable_cluster_service.proto", fileDescriptor_bigtable_cluster_service_b6e5a4d5a86f8b34) +} + +var fileDescriptor_bigtable_cluster_service_b6e5a4d5a86f8b34 = []byte{ + // 515 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x9c, 0x94, 0x4f, 0x6b, 0x14, 0x31, + 0x18, 0xc6, 0x89, 0x07, 0xa1, 0xc1, 0x45, 0xc8, 0xa1, 0x87, 0x6d, 0x0b, 0x32, 0x15, 0xb1, 0x23, + 0x26, 0x6e, 0x17, 0xc5, 0xbf, 0x08, 0x5b, 0xa5, 0x1e, 0x04, 0x8b, 0xd2, 0x4b, 0x2f, 0x4b, 0x76, + 0xe7, 0x35, 0x8c, 0xcc, 0x24, 0x31, 0xc9, 0x2c, 0xa8, 0xf4, 0xe2, 0xcd, 0x93, 0x88, 0x27, 0x3d, + 0x78, 0xeb, 0xdd, 0xef, 0xe2, 0x57, 0xf0, 0x83, 0xc8, 0x64, 0x92, 0xb5, 0x2b, 0x6b, 0x77, 0xa6, + 0xb7, 0x99, 0xc9, 0xfb, 0xbc, 0xcf, 0x6f, 0x9e, 0x24, 0x2f, 0x7e, 0x2c, 0x94, 0x12, 0x05, 0xb0, + 0x49, 0x2e, 0x1c, 0x9f, 0x14, 0xc0, 0x78, 0x56, 0xe6, 0x92, 0x4d, 0x8b, 0xca, 0x3a, 0x30, 0x6c, + 0x36, 0x98, 0xaf, 0x8c, 0xc3, 0xb7, 0xb1, 0x05, 0x33, 0xcb, 0xa7, 0x40, 0xb5, 0x51, 0x4e, 0x91, + 0x2b, 0x4d, 0x03, 0x1a, 0xcb, 0xa8, 0x6f, 0x40, 0x43, 0x31, 0x9d, 0x0d, 0xfa, 0x9b, 0xc1, 0x82, + 0xeb, 0x9c, 0x71, 0x29, 0x95, 0xe3, 0x2e, 0x57, 0xd2, 0x36, 0xfa, 0xfe, 0xc3, 0xee, 0x00, 0x19, + 0x77, 0x3c, 0xa8, 0x9f, 0x9d, 0x1b, 0x7f, 0x5c, 0x82, 0xb5, 0x5c, 0x40, 0xe4, 0xd8, 0x0e, 0x9d, + 0x0a, 0x25, 0x85, 0xa9, 0xa4, 0xcc, 0xa5, 0x60, 0x4a, 0x83, 0x59, 0x80, 0xdd, 0x08, 0x45, 0xfe, + 0x6d, 0x52, 0xbd, 0x66, 0x50, 0x6a, 0xf7, 0xae, 0x59, 0xdc, 0xfd, 0xb4, 0x86, 0xd7, 0x47, 0xc1, + 0x6d, 0xaf, 0x31, 0x7b, 0xd5, 0x78, 0x91, 0x6f, 0x08, 0xaf, 0x3d, 0xcf, 0xad, 0x3b, 0x52, 0x12, + 0x2c, 0xd9, 0xa5, 0xab, 0x32, 0xa3, 0xf3, 0xe2, 0x97, 0xf0, 0xb6, 0x02, 0xeb, 0xfa, 0xc3, 0x4e, + 0x1a, 0xab, 0x95, 0xb4, 0x90, 0x6c, 0x7f, 0xfc, 0xf5, 0xfb, 0xeb, 0x85, 0x2d, 0xb2, 0x51, 0x07, + 0xf1, 0x41, 0xf2, 0x12, 0x1e, 0x69, 0xa3, 0xde, 0xc0, 0xd4, 0x59, 0x96, 0x1e, 0xb3, 0xf7, 0x9e, + 0xe6, 0x07, 0xc2, 0x78, 0x1f, 0x5c, 0x20, 0x26, 0x2d, 0x8c, 0xfe, 0x56, 0x47, 0xba, 0x9d, 0xd5, + 0xa2, 0xa0, 0x48, 0x6e, 0x79, 0xa6, 0x94, 0x5c, 0x5f, 0xc6, 0xd4, 0x20, 0xb1, 0x34, 0x6e, 0x60, + 0x8d, 0x49, 0x7e, 0x22, 0x7c, 0xa9, 0xfe, 0xb7, 0xd0, 0xc1, 0x92, 0xdb, 0xed, 0xb2, 0x88, 0xf5, + 0x11, 0xf2, 0x4e, 0x57, 0x59, 0x48, 0x71, 0xe0, 0x89, 0x6f, 0x90, 0x9d, 0xe5, 0x29, 0x72, 0x21, + 0x0c, 0x08, 0xee, 0x20, 0x9b, 0x53, 0x93, 0x13, 0x84, 0x7b, 0x7b, 0x06, 0xb8, 0x8b, 0x07, 0x81, + 0xb4, 0x30, 0x5f, 0x10, 0x9c, 0x23, 0xd9, 0xc0, 0x99, 0x5c, 0x3b, 0x2b, 0xd9, 0xe3, 0x39, 0xe4, + 0x7d, 0x94, 0x92, 0xef, 0x08, 0xf7, 0x0e, 0x75, 0x76, 0x8a, 0xb3, 0xbd, 0x5f, 0x17, 0xb4, 0xa1, + 0x47, 0xbb, 0xd9, 0x6f, 0xbd, 0xe9, 0x35, 0xdc, 0x17, 0x84, 0x7b, 0x4f, 0xa0, 0x80, 0x4e, 0x21, + 0x2e, 0x08, 0x62, 0x88, 0xeb, 0x51, 0x17, 0xef, 0x2d, 0x7d, 0x5a, 0xdf, 0xdb, 0x78, 0x16, 0xd3, + 0xf6, 0x67, 0xf1, 0x04, 0xe1, 0xcb, 0x87, 0x32, 0x5b, 0xa0, 0xba, 0xbb, 0x9a, 0xea, 0x1f, 0x49, + 0xe4, 0xda, 0x8a, 0xca, 0x53, 0x43, 0x87, 0xbe, 0x88, 0x43, 0x27, 0xb9, 0xe7, 0xf1, 0x86, 0xc9, + 0xa0, 0x75, 0x6a, 0x55, 0xf0, 0x19, 0x7d, 0x46, 0xf8, 0xea, 0x54, 0x95, 0x2b, 0xc9, 0x46, 0x9b, + 0xcb, 0x27, 0x96, 0x3d, 0xa8, 0x93, 0x3a, 0x40, 0x47, 0xfb, 0xa1, 0x83, 0x50, 0x05, 0x97, 0x82, + 0x2a, 0x23, 0x98, 0x00, 0xe9, 0x73, 0x64, 0xcd, 0x12, 0xd7, 0xb9, 0xfd, 0xff, 0xfc, 0x7d, 0x10, + 0x1e, 0x27, 0x17, 0xbd, 0x66, 0xf8, 0x27, 0x00, 0x00, 0xff, 0xff, 0x50, 0x92, 0x91, 0x86, 0x71, + 0x06, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/bigtable/admin/cluster/v1/bigtable_cluster_service_messages.pb.go b/vendor/google.golang.org/genproto/googleapis/bigtable/admin/cluster/v1/bigtable_cluster_service_messages.pb.go new file mode 100644 index 0000000..ef17edc --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/bigtable/admin/cluster/v1/bigtable_cluster_service_messages.pb.go @@ -0,0 +1,646 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/bigtable/admin/cluster/v1/bigtable_cluster_service_messages.proto + +package cluster // import "google.golang.org/genproto/googleapis/bigtable/admin/cluster/v1" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import timestamp "github.com/golang/protobuf/ptypes/timestamp" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Request message for BigtableClusterService.ListZones. +type ListZonesRequest struct { + // The unique name of the project for which a list of supported zones is + // requested. + // Values are of the form projects/ + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ListZonesRequest) Reset() { *m = ListZonesRequest{} } +func (m *ListZonesRequest) String() string { return proto.CompactTextString(m) } +func (*ListZonesRequest) ProtoMessage() {} +func (*ListZonesRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_bigtable_cluster_service_messages_b1fb635d2f4efcd0, []int{0} +} +func (m *ListZonesRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ListZonesRequest.Unmarshal(m, b) +} +func (m *ListZonesRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ListZonesRequest.Marshal(b, m, deterministic) +} +func (dst *ListZonesRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_ListZonesRequest.Merge(dst, src) +} +func (m *ListZonesRequest) XXX_Size() int { + return xxx_messageInfo_ListZonesRequest.Size(m) +} +func (m *ListZonesRequest) XXX_DiscardUnknown() { + xxx_messageInfo_ListZonesRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_ListZonesRequest proto.InternalMessageInfo + +func (m *ListZonesRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +// Response message for BigtableClusterService.ListZones. +type ListZonesResponse struct { + // The list of requested zones. + Zones []*Zone `protobuf:"bytes,1,rep,name=zones,proto3" json:"zones,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ListZonesResponse) Reset() { *m = ListZonesResponse{} } +func (m *ListZonesResponse) String() string { return proto.CompactTextString(m) } +func (*ListZonesResponse) ProtoMessage() {} +func (*ListZonesResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_bigtable_cluster_service_messages_b1fb635d2f4efcd0, []int{1} +} +func (m *ListZonesResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ListZonesResponse.Unmarshal(m, b) +} +func (m *ListZonesResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ListZonesResponse.Marshal(b, m, deterministic) +} +func (dst *ListZonesResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_ListZonesResponse.Merge(dst, src) +} +func (m *ListZonesResponse) XXX_Size() int { + return xxx_messageInfo_ListZonesResponse.Size(m) +} +func (m *ListZonesResponse) XXX_DiscardUnknown() { + xxx_messageInfo_ListZonesResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_ListZonesResponse proto.InternalMessageInfo + +func (m *ListZonesResponse) GetZones() []*Zone { + if m != nil { + return m.Zones + } + return nil +} + +// Request message for BigtableClusterService.GetCluster. +type GetClusterRequest struct { + // The unique name of the requested cluster. + // Values are of the form projects//zones//clusters/ + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GetClusterRequest) Reset() { *m = GetClusterRequest{} } +func (m *GetClusterRequest) String() string { return proto.CompactTextString(m) } +func (*GetClusterRequest) ProtoMessage() {} +func (*GetClusterRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_bigtable_cluster_service_messages_b1fb635d2f4efcd0, []int{2} +} +func (m *GetClusterRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GetClusterRequest.Unmarshal(m, b) +} +func (m *GetClusterRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GetClusterRequest.Marshal(b, m, deterministic) +} +func (dst *GetClusterRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_GetClusterRequest.Merge(dst, src) +} +func (m *GetClusterRequest) XXX_Size() int { + return xxx_messageInfo_GetClusterRequest.Size(m) +} +func (m *GetClusterRequest) XXX_DiscardUnknown() { + xxx_messageInfo_GetClusterRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_GetClusterRequest proto.InternalMessageInfo + +func (m *GetClusterRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +// Request message for BigtableClusterService.ListClusters. +type ListClustersRequest struct { + // The unique name of the project for which a list of clusters is requested. + // Values are of the form projects/ + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ListClustersRequest) Reset() { *m = ListClustersRequest{} } +func (m *ListClustersRequest) String() string { return proto.CompactTextString(m) } +func (*ListClustersRequest) ProtoMessage() {} +func (*ListClustersRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_bigtable_cluster_service_messages_b1fb635d2f4efcd0, []int{3} +} +func (m *ListClustersRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ListClustersRequest.Unmarshal(m, b) +} +func (m *ListClustersRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ListClustersRequest.Marshal(b, m, deterministic) +} +func (dst *ListClustersRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_ListClustersRequest.Merge(dst, src) +} +func (m *ListClustersRequest) XXX_Size() int { + return xxx_messageInfo_ListClustersRequest.Size(m) +} +func (m *ListClustersRequest) XXX_DiscardUnknown() { + xxx_messageInfo_ListClustersRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_ListClustersRequest proto.InternalMessageInfo + +func (m *ListClustersRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +// Response message for BigtableClusterService.ListClusters. +type ListClustersResponse struct { + // The list of requested Clusters. + Clusters []*Cluster `protobuf:"bytes,1,rep,name=clusters,proto3" json:"clusters,omitempty"` + // The zones for which clusters could not be retrieved. + FailedZones []*Zone `protobuf:"bytes,2,rep,name=failed_zones,json=failedZones,proto3" json:"failed_zones,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ListClustersResponse) Reset() { *m = ListClustersResponse{} } +func (m *ListClustersResponse) String() string { return proto.CompactTextString(m) } +func (*ListClustersResponse) ProtoMessage() {} +func (*ListClustersResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_bigtable_cluster_service_messages_b1fb635d2f4efcd0, []int{4} +} +func (m *ListClustersResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ListClustersResponse.Unmarshal(m, b) +} +func (m *ListClustersResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ListClustersResponse.Marshal(b, m, deterministic) +} +func (dst *ListClustersResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_ListClustersResponse.Merge(dst, src) +} +func (m *ListClustersResponse) XXX_Size() int { + return xxx_messageInfo_ListClustersResponse.Size(m) +} +func (m *ListClustersResponse) XXX_DiscardUnknown() { + xxx_messageInfo_ListClustersResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_ListClustersResponse proto.InternalMessageInfo + +func (m *ListClustersResponse) GetClusters() []*Cluster { + if m != nil { + return m.Clusters + } + return nil +} + +func (m *ListClustersResponse) GetFailedZones() []*Zone { + if m != nil { + return m.FailedZones + } + return nil +} + +// Request message for BigtableClusterService.CreateCluster. +type CreateClusterRequest struct { + // The unique name of the zone in which to create the cluster. + // Values are of the form projects//zones/ + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // The id to be used when referring to the new cluster within its zone, + // e.g. just the "test-cluster" section of the full name + // "projects//zones//clusters/test-cluster". + ClusterId string `protobuf:"bytes,2,opt,name=cluster_id,json=clusterId,proto3" json:"cluster_id,omitempty"` + // The cluster to create. + // The "name", "delete_time", and "current_operation" fields must be left + // blank. + Cluster *Cluster `protobuf:"bytes,3,opt,name=cluster,proto3" json:"cluster,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *CreateClusterRequest) Reset() { *m = CreateClusterRequest{} } +func (m *CreateClusterRequest) String() string { return proto.CompactTextString(m) } +func (*CreateClusterRequest) ProtoMessage() {} +func (*CreateClusterRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_bigtable_cluster_service_messages_b1fb635d2f4efcd0, []int{5} +} +func (m *CreateClusterRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_CreateClusterRequest.Unmarshal(m, b) +} +func (m *CreateClusterRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_CreateClusterRequest.Marshal(b, m, deterministic) +} +func (dst *CreateClusterRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_CreateClusterRequest.Merge(dst, src) +} +func (m *CreateClusterRequest) XXX_Size() int { + return xxx_messageInfo_CreateClusterRequest.Size(m) +} +func (m *CreateClusterRequest) XXX_DiscardUnknown() { + xxx_messageInfo_CreateClusterRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_CreateClusterRequest proto.InternalMessageInfo + +func (m *CreateClusterRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *CreateClusterRequest) GetClusterId() string { + if m != nil { + return m.ClusterId + } + return "" +} + +func (m *CreateClusterRequest) GetCluster() *Cluster { + if m != nil { + return m.Cluster + } + return nil +} + +// Metadata type for the operation returned by +// BigtableClusterService.CreateCluster. +type CreateClusterMetadata struct { + // The request which prompted the creation of this operation. + OriginalRequest *CreateClusterRequest `protobuf:"bytes,1,opt,name=original_request,json=originalRequest,proto3" json:"original_request,omitempty"` + // The time at which original_request was received. + RequestTime *timestamp.Timestamp `protobuf:"bytes,2,opt,name=request_time,json=requestTime,proto3" json:"request_time,omitempty"` + // The time at which this operation failed or was completed successfully. + FinishTime *timestamp.Timestamp `protobuf:"bytes,3,opt,name=finish_time,json=finishTime,proto3" json:"finish_time,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *CreateClusterMetadata) Reset() { *m = CreateClusterMetadata{} } +func (m *CreateClusterMetadata) String() string { return proto.CompactTextString(m) } +func (*CreateClusterMetadata) ProtoMessage() {} +func (*CreateClusterMetadata) Descriptor() ([]byte, []int) { + return fileDescriptor_bigtable_cluster_service_messages_b1fb635d2f4efcd0, []int{6} +} +func (m *CreateClusterMetadata) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_CreateClusterMetadata.Unmarshal(m, b) +} +func (m *CreateClusterMetadata) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_CreateClusterMetadata.Marshal(b, m, deterministic) +} +func (dst *CreateClusterMetadata) XXX_Merge(src proto.Message) { + xxx_messageInfo_CreateClusterMetadata.Merge(dst, src) +} +func (m *CreateClusterMetadata) XXX_Size() int { + return xxx_messageInfo_CreateClusterMetadata.Size(m) +} +func (m *CreateClusterMetadata) XXX_DiscardUnknown() { + xxx_messageInfo_CreateClusterMetadata.DiscardUnknown(m) +} + +var xxx_messageInfo_CreateClusterMetadata proto.InternalMessageInfo + +func (m *CreateClusterMetadata) GetOriginalRequest() *CreateClusterRequest { + if m != nil { + return m.OriginalRequest + } + return nil +} + +func (m *CreateClusterMetadata) GetRequestTime() *timestamp.Timestamp { + if m != nil { + return m.RequestTime + } + return nil +} + +func (m *CreateClusterMetadata) GetFinishTime() *timestamp.Timestamp { + if m != nil { + return m.FinishTime + } + return nil +} + +// Metadata type for the operation returned by +// BigtableClusterService.UpdateCluster. +type UpdateClusterMetadata struct { + // The request which prompted the creation of this operation. + OriginalRequest *Cluster `protobuf:"bytes,1,opt,name=original_request,json=originalRequest,proto3" json:"original_request,omitempty"` + // The time at which original_request was received. + RequestTime *timestamp.Timestamp `protobuf:"bytes,2,opt,name=request_time,json=requestTime,proto3" json:"request_time,omitempty"` + // The time at which this operation was cancelled. If set, this operation is + // in the process of undoing itself (which is guaranteed to succeed) and + // cannot be cancelled again. + CancelTime *timestamp.Timestamp `protobuf:"bytes,3,opt,name=cancel_time,json=cancelTime,proto3" json:"cancel_time,omitempty"` + // The time at which this operation failed or was completed successfully. + FinishTime *timestamp.Timestamp `protobuf:"bytes,4,opt,name=finish_time,json=finishTime,proto3" json:"finish_time,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *UpdateClusterMetadata) Reset() { *m = UpdateClusterMetadata{} } +func (m *UpdateClusterMetadata) String() string { return proto.CompactTextString(m) } +func (*UpdateClusterMetadata) ProtoMessage() {} +func (*UpdateClusterMetadata) Descriptor() ([]byte, []int) { + return fileDescriptor_bigtable_cluster_service_messages_b1fb635d2f4efcd0, []int{7} +} +func (m *UpdateClusterMetadata) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_UpdateClusterMetadata.Unmarshal(m, b) +} +func (m *UpdateClusterMetadata) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_UpdateClusterMetadata.Marshal(b, m, deterministic) +} +func (dst *UpdateClusterMetadata) XXX_Merge(src proto.Message) { + xxx_messageInfo_UpdateClusterMetadata.Merge(dst, src) +} +func (m *UpdateClusterMetadata) XXX_Size() int { + return xxx_messageInfo_UpdateClusterMetadata.Size(m) +} +func (m *UpdateClusterMetadata) XXX_DiscardUnknown() { + xxx_messageInfo_UpdateClusterMetadata.DiscardUnknown(m) +} + +var xxx_messageInfo_UpdateClusterMetadata proto.InternalMessageInfo + +func (m *UpdateClusterMetadata) GetOriginalRequest() *Cluster { + if m != nil { + return m.OriginalRequest + } + return nil +} + +func (m *UpdateClusterMetadata) GetRequestTime() *timestamp.Timestamp { + if m != nil { + return m.RequestTime + } + return nil +} + +func (m *UpdateClusterMetadata) GetCancelTime() *timestamp.Timestamp { + if m != nil { + return m.CancelTime + } + return nil +} + +func (m *UpdateClusterMetadata) GetFinishTime() *timestamp.Timestamp { + if m != nil { + return m.FinishTime + } + return nil +} + +// Request message for BigtableClusterService.DeleteCluster. +type DeleteClusterRequest struct { + // The unique name of the cluster to be deleted. + // Values are of the form projects//zones//clusters/ + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *DeleteClusterRequest) Reset() { *m = DeleteClusterRequest{} } +func (m *DeleteClusterRequest) String() string { return proto.CompactTextString(m) } +func (*DeleteClusterRequest) ProtoMessage() {} +func (*DeleteClusterRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_bigtable_cluster_service_messages_b1fb635d2f4efcd0, []int{8} +} +func (m *DeleteClusterRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_DeleteClusterRequest.Unmarshal(m, b) +} +func (m *DeleteClusterRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_DeleteClusterRequest.Marshal(b, m, deterministic) +} +func (dst *DeleteClusterRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_DeleteClusterRequest.Merge(dst, src) +} +func (m *DeleteClusterRequest) XXX_Size() int { + return xxx_messageInfo_DeleteClusterRequest.Size(m) +} +func (m *DeleteClusterRequest) XXX_DiscardUnknown() { + xxx_messageInfo_DeleteClusterRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_DeleteClusterRequest proto.InternalMessageInfo + +func (m *DeleteClusterRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +// Request message for BigtableClusterService.UndeleteCluster. +type UndeleteClusterRequest struct { + // The unique name of the cluster to be un-deleted. + // Values are of the form projects//zones//clusters/ + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *UndeleteClusterRequest) Reset() { *m = UndeleteClusterRequest{} } +func (m *UndeleteClusterRequest) String() string { return proto.CompactTextString(m) } +func (*UndeleteClusterRequest) ProtoMessage() {} +func (*UndeleteClusterRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_bigtable_cluster_service_messages_b1fb635d2f4efcd0, []int{9} +} +func (m *UndeleteClusterRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_UndeleteClusterRequest.Unmarshal(m, b) +} +func (m *UndeleteClusterRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_UndeleteClusterRequest.Marshal(b, m, deterministic) +} +func (dst *UndeleteClusterRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_UndeleteClusterRequest.Merge(dst, src) +} +func (m *UndeleteClusterRequest) XXX_Size() int { + return xxx_messageInfo_UndeleteClusterRequest.Size(m) +} +func (m *UndeleteClusterRequest) XXX_DiscardUnknown() { + xxx_messageInfo_UndeleteClusterRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_UndeleteClusterRequest proto.InternalMessageInfo + +func (m *UndeleteClusterRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +// Metadata type for the operation returned by +// BigtableClusterService.UndeleteCluster. +type UndeleteClusterMetadata struct { + // The time at which the original request was received. + RequestTime *timestamp.Timestamp `protobuf:"bytes,1,opt,name=request_time,json=requestTime,proto3" json:"request_time,omitempty"` + // The time at which this operation failed or was completed successfully. + FinishTime *timestamp.Timestamp `protobuf:"bytes,2,opt,name=finish_time,json=finishTime,proto3" json:"finish_time,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *UndeleteClusterMetadata) Reset() { *m = UndeleteClusterMetadata{} } +func (m *UndeleteClusterMetadata) String() string { return proto.CompactTextString(m) } +func (*UndeleteClusterMetadata) ProtoMessage() {} +func (*UndeleteClusterMetadata) Descriptor() ([]byte, []int) { + return fileDescriptor_bigtable_cluster_service_messages_b1fb635d2f4efcd0, []int{10} +} +func (m *UndeleteClusterMetadata) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_UndeleteClusterMetadata.Unmarshal(m, b) +} +func (m *UndeleteClusterMetadata) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_UndeleteClusterMetadata.Marshal(b, m, deterministic) +} +func (dst *UndeleteClusterMetadata) XXX_Merge(src proto.Message) { + xxx_messageInfo_UndeleteClusterMetadata.Merge(dst, src) +} +func (m *UndeleteClusterMetadata) XXX_Size() int { + return xxx_messageInfo_UndeleteClusterMetadata.Size(m) +} +func (m *UndeleteClusterMetadata) XXX_DiscardUnknown() { + xxx_messageInfo_UndeleteClusterMetadata.DiscardUnknown(m) +} + +var xxx_messageInfo_UndeleteClusterMetadata proto.InternalMessageInfo + +func (m *UndeleteClusterMetadata) GetRequestTime() *timestamp.Timestamp { + if m != nil { + return m.RequestTime + } + return nil +} + +func (m *UndeleteClusterMetadata) GetFinishTime() *timestamp.Timestamp { + if m != nil { + return m.FinishTime + } + return nil +} + +// Metadata type for operations initiated by the V2 BigtableAdmin service. +// More complete information for such operations is available via the V2 API. +type V2OperationMetadata struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *V2OperationMetadata) Reset() { *m = V2OperationMetadata{} } +func (m *V2OperationMetadata) String() string { return proto.CompactTextString(m) } +func (*V2OperationMetadata) ProtoMessage() {} +func (*V2OperationMetadata) Descriptor() ([]byte, []int) { + return fileDescriptor_bigtable_cluster_service_messages_b1fb635d2f4efcd0, []int{11} +} +func (m *V2OperationMetadata) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_V2OperationMetadata.Unmarshal(m, b) +} +func (m *V2OperationMetadata) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_V2OperationMetadata.Marshal(b, m, deterministic) +} +func (dst *V2OperationMetadata) XXX_Merge(src proto.Message) { + xxx_messageInfo_V2OperationMetadata.Merge(dst, src) +} +func (m *V2OperationMetadata) XXX_Size() int { + return xxx_messageInfo_V2OperationMetadata.Size(m) +} +func (m *V2OperationMetadata) XXX_DiscardUnknown() { + xxx_messageInfo_V2OperationMetadata.DiscardUnknown(m) +} + +var xxx_messageInfo_V2OperationMetadata proto.InternalMessageInfo + +func init() { + proto.RegisterType((*ListZonesRequest)(nil), "google.bigtable.admin.cluster.v1.ListZonesRequest") + proto.RegisterType((*ListZonesResponse)(nil), "google.bigtable.admin.cluster.v1.ListZonesResponse") + proto.RegisterType((*GetClusterRequest)(nil), "google.bigtable.admin.cluster.v1.GetClusterRequest") + proto.RegisterType((*ListClustersRequest)(nil), "google.bigtable.admin.cluster.v1.ListClustersRequest") + proto.RegisterType((*ListClustersResponse)(nil), "google.bigtable.admin.cluster.v1.ListClustersResponse") + proto.RegisterType((*CreateClusterRequest)(nil), "google.bigtable.admin.cluster.v1.CreateClusterRequest") + proto.RegisterType((*CreateClusterMetadata)(nil), "google.bigtable.admin.cluster.v1.CreateClusterMetadata") + proto.RegisterType((*UpdateClusterMetadata)(nil), "google.bigtable.admin.cluster.v1.UpdateClusterMetadata") + proto.RegisterType((*DeleteClusterRequest)(nil), "google.bigtable.admin.cluster.v1.DeleteClusterRequest") + proto.RegisterType((*UndeleteClusterRequest)(nil), "google.bigtable.admin.cluster.v1.UndeleteClusterRequest") + proto.RegisterType((*UndeleteClusterMetadata)(nil), "google.bigtable.admin.cluster.v1.UndeleteClusterMetadata") + proto.RegisterType((*V2OperationMetadata)(nil), "google.bigtable.admin.cluster.v1.V2OperationMetadata") +} + +func init() { + proto.RegisterFile("google/bigtable/admin/cluster/v1/bigtable_cluster_service_messages.proto", fileDescriptor_bigtable_cluster_service_messages_b1fb635d2f4efcd0) +} + +var fileDescriptor_bigtable_cluster_service_messages_b1fb635d2f4efcd0 = []byte{ + // 541 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xb4, 0x55, 0x4d, 0x6f, 0xd3, 0x40, + 0x10, 0xd5, 0x26, 0xe5, 0xa3, 0xe3, 0x4a, 0xb4, 0x6e, 0x02, 0x51, 0x24, 0x44, 0x64, 0x50, 0x69, + 0x11, 0xb2, 0xd5, 0x20, 0x71, 0x69, 0xb9, 0x24, 0xa0, 0x52, 0x89, 0x8a, 0x12, 0x5a, 0x0e, 0xbd, + 0x58, 0x9b, 0x78, 0x62, 0x56, 0xb2, 0x77, 0x8d, 0x77, 0x93, 0x03, 0x3f, 0x82, 0x1b, 0xfc, 0x04, + 0xc4, 0x2f, 0xe4, 0x8c, 0xec, 0xdd, 0x8d, 0x68, 0x95, 0xd6, 0xb1, 0x10, 0xb7, 0xdd, 0x99, 0xf7, + 0x66, 0xde, 0x9b, 0x1d, 0x69, 0xe1, 0x6d, 0x2c, 0x44, 0x9c, 0x60, 0x30, 0x66, 0xb1, 0xa2, 0xe3, + 0x04, 0x03, 0x1a, 0xa5, 0x8c, 0x07, 0x93, 0x64, 0x26, 0x15, 0xe6, 0xc1, 0x7c, 0x7f, 0x91, 0x09, + 0x4d, 0x2c, 0x94, 0x98, 0xcf, 0xd9, 0x04, 0xc3, 0x14, 0xa5, 0xa4, 0x31, 0x4a, 0x3f, 0xcb, 0x85, + 0x12, 0x6e, 0x4f, 0x57, 0xf2, 0x2d, 0xde, 0x2f, 0x2b, 0xf9, 0x86, 0xe5, 0xcf, 0xf7, 0xbb, 0x87, + 0xf5, 0x7b, 0x45, 0x54, 0x51, 0x5d, 0xbf, 0xfb, 0xc8, 0xb0, 0xcb, 0xdb, 0x78, 0x36, 0x0d, 0x14, + 0x4b, 0x51, 0x2a, 0x9a, 0x66, 0x1a, 0xe0, 0xed, 0xc0, 0xe6, 0x3b, 0x26, 0xd5, 0x85, 0xe0, 0x28, + 0x47, 0xf8, 0x65, 0x86, 0x52, 0xb9, 0x2e, 0xac, 0x71, 0x9a, 0x62, 0x87, 0xf4, 0xc8, 0xee, 0xfa, + 0xa8, 0x3c, 0x7b, 0x1f, 0x60, 0xeb, 0x2f, 0x9c, 0xcc, 0x04, 0x97, 0xe8, 0x1e, 0xc2, 0xad, 0xaf, + 0x45, 0xa0, 0x43, 0x7a, 0xcd, 0x5d, 0xa7, 0xbf, 0xe3, 0x57, 0xb9, 0xf1, 0x0b, 0xfe, 0x48, 0x93, + 0xbc, 0xa7, 0xb0, 0x75, 0x84, 0x6a, 0xa8, 0x93, 0x37, 0xf5, 0xde, 0x83, 0xed, 0xa2, 0xb7, 0x41, + 0xde, 0x28, 0xf3, 0x17, 0x81, 0xd6, 0x65, 0xac, 0x91, 0xfa, 0x06, 0xee, 0x1a, 0x19, 0x56, 0xed, + 0x5e, 0xb5, 0x5a, 0xab, 0x6d, 0x41, 0x75, 0x8f, 0x61, 0x63, 0x4a, 0x59, 0x82, 0x51, 0xa8, 0x8d, + 0x37, 0x6a, 0x19, 0x77, 0x34, 0xb7, 0x1c, 0xa2, 0xf7, 0x8d, 0x40, 0x6b, 0x98, 0x23, 0x55, 0x58, + 0x3d, 0x02, 0xf7, 0x21, 0x80, 0x7d, 0x5d, 0x16, 0x75, 0x1a, 0x65, 0x66, 0xdd, 0x44, 0x8e, 0x23, + 0x77, 0x08, 0x77, 0xcc, 0xa5, 0xd3, 0xec, 0x91, 0x7a, 0xe6, 0x2c, 0xd3, 0xfb, 0x4d, 0xa0, 0x7d, + 0x49, 0xd0, 0x09, 0x2a, 0x5a, 0xec, 0x92, 0x4b, 0x61, 0x53, 0xe4, 0x2c, 0x66, 0x9c, 0x26, 0x61, + 0xae, 0x55, 0x96, 0xea, 0x9c, 0xfe, 0xcb, 0x15, 0xfa, 0x2c, 0xf1, 0x38, 0xba, 0x67, 0xeb, 0x59, + 0xd3, 0xaf, 0x60, 0xc3, 0x54, 0x0e, 0x8b, 0x15, 0x2d, 0x2d, 0x3a, 0xfd, 0xae, 0x2d, 0x6f, 0xf7, + 0xd7, 0x3f, 0xb3, 0xfb, 0x3b, 0x72, 0x0c, 0xbe, 0x88, 0xb8, 0x07, 0xe0, 0x4c, 0x19, 0x67, 0xf2, + 0xb3, 0x66, 0x37, 0x2b, 0xd9, 0xa0, 0xe1, 0x45, 0xc0, 0xfb, 0xd9, 0x80, 0xf6, 0x79, 0x16, 0x2d, + 0x31, 0x7e, 0x76, 0xad, 0xf1, 0x1a, 0x03, 0xfe, 0x0f, 0x5e, 0x27, 0x94, 0x4f, 0x30, 0x59, 0xd9, + 0xab, 0x86, 0x2f, 0x1b, 0xd4, 0x5a, 0xad, 0x41, 0x3d, 0x83, 0xd6, 0x6b, 0x4c, 0x70, 0x95, 0x8d, + 0xf5, 0x9e, 0xc3, 0xfd, 0x73, 0x1e, 0xad, 0x8a, 0xfe, 0x4e, 0xe0, 0xc1, 0x15, 0xf8, 0xe2, 0x11, + 0xae, 0x8e, 0x8b, 0xfc, 0xd3, 0x6a, 0x34, 0x6a, 0x39, 0x6e, 0xc3, 0xf6, 0xa7, 0xfe, 0xfb, 0x0c, + 0x73, 0xaa, 0x98, 0xe0, 0x56, 0xd2, 0xe0, 0x07, 0x81, 0x27, 0x13, 0x91, 0x56, 0xee, 0xc0, 0xe0, + 0xf1, 0xc0, 0xa4, 0x8c, 0xa9, 0x8f, 0xfa, 0x1b, 0x38, 0x31, 0xbf, 0xc0, 0x69, 0xd1, 0xfd, 0x94, + 0x5c, 0x1c, 0x99, 0x42, 0xb1, 0x48, 0x28, 0x8f, 0x7d, 0x91, 0xc7, 0x41, 0x8c, 0xbc, 0xd4, 0x16, + 0xe8, 0x14, 0xcd, 0x98, 0xbc, 0xfe, 0x0f, 0x38, 0x30, 0xc7, 0xf1, 0xed, 0x92, 0xf3, 0xe2, 0x4f, + 0x00, 0x00, 0x00, 0xff, 0xff, 0xc1, 0x75, 0x68, 0x13, 0xa2, 0x06, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/bigtable/admin/table/v1/bigtable_table_data.pb.go b/vendor/google.golang.org/genproto/googleapis/bigtable/admin/table/v1/bigtable_table_data.pb.go new file mode 100644 index 0000000..6c1057b --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/bigtable/admin/table/v1/bigtable_table_data.pb.go @@ -0,0 +1,546 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/bigtable/admin/table/v1/bigtable_table_data.proto + +package table // import "google.golang.org/genproto/googleapis/bigtable/admin/table/v1" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import duration "github.com/golang/protobuf/ptypes/duration" +import longrunning "google.golang.org/genproto/googleapis/longrunning" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +type Table_TimestampGranularity int32 + +const ( + Table_MILLIS Table_TimestampGranularity = 0 +) + +var Table_TimestampGranularity_name = map[int32]string{ + 0: "MILLIS", +} +var Table_TimestampGranularity_value = map[string]int32{ + "MILLIS": 0, +} + +func (x Table_TimestampGranularity) String() string { + return proto.EnumName(Table_TimestampGranularity_name, int32(x)) +} +func (Table_TimestampGranularity) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_bigtable_table_data_1213651e7f65c4a9, []int{0, 0} +} + +// A collection of user data indexed by row, column, and timestamp. +// Each table is served using the resources of its parent cluster. +type Table struct { + // A unique identifier of the form + // /tables/[_a-zA-Z0-9][-_.a-zA-Z0-9]* + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // If this Table is in the process of being created, the Operation used to + // track its progress. As long as this operation is present, the Table will + // not accept any Table Admin or Read/Write requests. + CurrentOperation *longrunning.Operation `protobuf:"bytes,2,opt,name=current_operation,json=currentOperation,proto3" json:"current_operation,omitempty"` + // The column families configured for this table, mapped by column family id. + ColumnFamilies map[string]*ColumnFamily `protobuf:"bytes,3,rep,name=column_families,json=columnFamilies,proto3" json:"column_families,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` + // The granularity (e.g. MILLIS, MICROS) at which timestamps are stored in + // this table. Timestamps not matching the granularity will be rejected. + // Cannot be changed once the table is created. + Granularity Table_TimestampGranularity `protobuf:"varint,4,opt,name=granularity,proto3,enum=google.bigtable.admin.table.v1.Table_TimestampGranularity" json:"granularity,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Table) Reset() { *m = Table{} } +func (m *Table) String() string { return proto.CompactTextString(m) } +func (*Table) ProtoMessage() {} +func (*Table) Descriptor() ([]byte, []int) { + return fileDescriptor_bigtable_table_data_1213651e7f65c4a9, []int{0} +} +func (m *Table) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Table.Unmarshal(m, b) +} +func (m *Table) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Table.Marshal(b, m, deterministic) +} +func (dst *Table) XXX_Merge(src proto.Message) { + xxx_messageInfo_Table.Merge(dst, src) +} +func (m *Table) XXX_Size() int { + return xxx_messageInfo_Table.Size(m) +} +func (m *Table) XXX_DiscardUnknown() { + xxx_messageInfo_Table.DiscardUnknown(m) +} + +var xxx_messageInfo_Table proto.InternalMessageInfo + +func (m *Table) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *Table) GetCurrentOperation() *longrunning.Operation { + if m != nil { + return m.CurrentOperation + } + return nil +} + +func (m *Table) GetColumnFamilies() map[string]*ColumnFamily { + if m != nil { + return m.ColumnFamilies + } + return nil +} + +func (m *Table) GetGranularity() Table_TimestampGranularity { + if m != nil { + return m.Granularity + } + return Table_MILLIS +} + +// A set of columns within a table which share a common configuration. +type ColumnFamily struct { + // A unique identifier of the form /columnFamilies/[-_.a-zA-Z0-9]+ + // The last segment is the same as the "name" field in + // google.bigtable.v1.Family. + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // Garbage collection expression specified by the following grammar: + // GC = EXPR + // | "" ; + // EXPR = EXPR, "||", EXPR (* lowest precedence *) + // | EXPR, "&&", EXPR + // | "(", EXPR, ")" (* highest precedence *) + // | PROP ; + // PROP = "version() >", NUM32 + // | "age() >", NUM64, [ UNIT ] ; + // NUM32 = non-zero-digit { digit } ; (* # NUM32 <= 2^32 - 1 *) + // NUM64 = non-zero-digit { digit } ; (* # NUM64 <= 2^63 - 1 *) + // UNIT = "d" | "h" | "m" (* d=days, h=hours, m=minutes, else micros *) + // GC expressions can be up to 500 characters in length + // + // The different types of PROP are defined as follows: + // version() - cell index, counting from most recent and starting at 1 + // age() - age of the cell (current time minus cell timestamp) + // + // Example: "version() > 3 || (age() > 3d && version() > 1)" + // drop cells beyond the most recent three, and drop cells older than three + // days unless they're the most recent cell in the row/column + // + // Garbage collection executes opportunistically in the background, and so + // it's possible for reads to return a cell even if it matches the active GC + // expression for its family. + GcExpression string `protobuf:"bytes,2,opt,name=gc_expression,json=gcExpression,proto3" json:"gc_expression,omitempty"` + // Garbage collection rule specified as a protobuf. + // Supersedes `gc_expression`. + // Must serialize to at most 500 bytes. + // + // NOTE: Garbage collection executes opportunistically in the background, and + // so it's possible for reads to return a cell even if it matches the active + // GC expression for its family. + GcRule *GcRule `protobuf:"bytes,3,opt,name=gc_rule,json=gcRule,proto3" json:"gc_rule,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ColumnFamily) Reset() { *m = ColumnFamily{} } +func (m *ColumnFamily) String() string { return proto.CompactTextString(m) } +func (*ColumnFamily) ProtoMessage() {} +func (*ColumnFamily) Descriptor() ([]byte, []int) { + return fileDescriptor_bigtable_table_data_1213651e7f65c4a9, []int{1} +} +func (m *ColumnFamily) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ColumnFamily.Unmarshal(m, b) +} +func (m *ColumnFamily) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ColumnFamily.Marshal(b, m, deterministic) +} +func (dst *ColumnFamily) XXX_Merge(src proto.Message) { + xxx_messageInfo_ColumnFamily.Merge(dst, src) +} +func (m *ColumnFamily) XXX_Size() int { + return xxx_messageInfo_ColumnFamily.Size(m) +} +func (m *ColumnFamily) XXX_DiscardUnknown() { + xxx_messageInfo_ColumnFamily.DiscardUnknown(m) +} + +var xxx_messageInfo_ColumnFamily proto.InternalMessageInfo + +func (m *ColumnFamily) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *ColumnFamily) GetGcExpression() string { + if m != nil { + return m.GcExpression + } + return "" +} + +func (m *ColumnFamily) GetGcRule() *GcRule { + if m != nil { + return m.GcRule + } + return nil +} + +// Rule for determining which cells to delete during garbage collection. +type GcRule struct { + // Types that are valid to be assigned to Rule: + // *GcRule_MaxNumVersions + // *GcRule_MaxAge + // *GcRule_Intersection_ + // *GcRule_Union_ + Rule isGcRule_Rule `protobuf_oneof:"rule"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GcRule) Reset() { *m = GcRule{} } +func (m *GcRule) String() string { return proto.CompactTextString(m) } +func (*GcRule) ProtoMessage() {} +func (*GcRule) Descriptor() ([]byte, []int) { + return fileDescriptor_bigtable_table_data_1213651e7f65c4a9, []int{2} +} +func (m *GcRule) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GcRule.Unmarshal(m, b) +} +func (m *GcRule) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GcRule.Marshal(b, m, deterministic) +} +func (dst *GcRule) XXX_Merge(src proto.Message) { + xxx_messageInfo_GcRule.Merge(dst, src) +} +func (m *GcRule) XXX_Size() int { + return xxx_messageInfo_GcRule.Size(m) +} +func (m *GcRule) XXX_DiscardUnknown() { + xxx_messageInfo_GcRule.DiscardUnknown(m) +} + +var xxx_messageInfo_GcRule proto.InternalMessageInfo + +type isGcRule_Rule interface { + isGcRule_Rule() +} + +type GcRule_MaxNumVersions struct { + MaxNumVersions int32 `protobuf:"varint,1,opt,name=max_num_versions,json=maxNumVersions,proto3,oneof"` +} + +type GcRule_MaxAge struct { + MaxAge *duration.Duration `protobuf:"bytes,2,opt,name=max_age,json=maxAge,proto3,oneof"` +} + +type GcRule_Intersection_ struct { + Intersection *GcRule_Intersection `protobuf:"bytes,3,opt,name=intersection,proto3,oneof"` +} + +type GcRule_Union_ struct { + Union *GcRule_Union `protobuf:"bytes,4,opt,name=union,proto3,oneof"` +} + +func (*GcRule_MaxNumVersions) isGcRule_Rule() {} + +func (*GcRule_MaxAge) isGcRule_Rule() {} + +func (*GcRule_Intersection_) isGcRule_Rule() {} + +func (*GcRule_Union_) isGcRule_Rule() {} + +func (m *GcRule) GetRule() isGcRule_Rule { + if m != nil { + return m.Rule + } + return nil +} + +func (m *GcRule) GetMaxNumVersions() int32 { + if x, ok := m.GetRule().(*GcRule_MaxNumVersions); ok { + return x.MaxNumVersions + } + return 0 +} + +func (m *GcRule) GetMaxAge() *duration.Duration { + if x, ok := m.GetRule().(*GcRule_MaxAge); ok { + return x.MaxAge + } + return nil +} + +func (m *GcRule) GetIntersection() *GcRule_Intersection { + if x, ok := m.GetRule().(*GcRule_Intersection_); ok { + return x.Intersection + } + return nil +} + +func (m *GcRule) GetUnion() *GcRule_Union { + if x, ok := m.GetRule().(*GcRule_Union_); ok { + return x.Union + } + return nil +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*GcRule) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _GcRule_OneofMarshaler, _GcRule_OneofUnmarshaler, _GcRule_OneofSizer, []interface{}{ + (*GcRule_MaxNumVersions)(nil), + (*GcRule_MaxAge)(nil), + (*GcRule_Intersection_)(nil), + (*GcRule_Union_)(nil), + } +} + +func _GcRule_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*GcRule) + // rule + switch x := m.Rule.(type) { + case *GcRule_MaxNumVersions: + b.EncodeVarint(1<<3 | proto.WireVarint) + b.EncodeVarint(uint64(x.MaxNumVersions)) + case *GcRule_MaxAge: + b.EncodeVarint(2<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.MaxAge); err != nil { + return err + } + case *GcRule_Intersection_: + b.EncodeVarint(3<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.Intersection); err != nil { + return err + } + case *GcRule_Union_: + b.EncodeVarint(4<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.Union); err != nil { + return err + } + case nil: + default: + return fmt.Errorf("GcRule.Rule has unexpected type %T", x) + } + return nil +} + +func _GcRule_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*GcRule) + switch tag { + case 1: // rule.max_num_versions + if wire != proto.WireVarint { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeVarint() + m.Rule = &GcRule_MaxNumVersions{int32(x)} + return true, err + case 2: // rule.max_age + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(duration.Duration) + err := b.DecodeMessage(msg) + m.Rule = &GcRule_MaxAge{msg} + return true, err + case 3: // rule.intersection + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(GcRule_Intersection) + err := b.DecodeMessage(msg) + m.Rule = &GcRule_Intersection_{msg} + return true, err + case 4: // rule.union + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(GcRule_Union) + err := b.DecodeMessage(msg) + m.Rule = &GcRule_Union_{msg} + return true, err + default: + return false, nil + } +} + +func _GcRule_OneofSizer(msg proto.Message) (n int) { + m := msg.(*GcRule) + // rule + switch x := m.Rule.(type) { + case *GcRule_MaxNumVersions: + n += 1 // tag and wire + n += proto.SizeVarint(uint64(x.MaxNumVersions)) + case *GcRule_MaxAge: + s := proto.Size(x.MaxAge) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *GcRule_Intersection_: + s := proto.Size(x.Intersection) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *GcRule_Union_: + s := proto.Size(x.Union) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +// A GcRule which deletes cells matching all of the given rules. +type GcRule_Intersection struct { + // Only delete cells which would be deleted by every element of `rules`. + Rules []*GcRule `protobuf:"bytes,1,rep,name=rules,proto3" json:"rules,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GcRule_Intersection) Reset() { *m = GcRule_Intersection{} } +func (m *GcRule_Intersection) String() string { return proto.CompactTextString(m) } +func (*GcRule_Intersection) ProtoMessage() {} +func (*GcRule_Intersection) Descriptor() ([]byte, []int) { + return fileDescriptor_bigtable_table_data_1213651e7f65c4a9, []int{2, 0} +} +func (m *GcRule_Intersection) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GcRule_Intersection.Unmarshal(m, b) +} +func (m *GcRule_Intersection) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GcRule_Intersection.Marshal(b, m, deterministic) +} +func (dst *GcRule_Intersection) XXX_Merge(src proto.Message) { + xxx_messageInfo_GcRule_Intersection.Merge(dst, src) +} +func (m *GcRule_Intersection) XXX_Size() int { + return xxx_messageInfo_GcRule_Intersection.Size(m) +} +func (m *GcRule_Intersection) XXX_DiscardUnknown() { + xxx_messageInfo_GcRule_Intersection.DiscardUnknown(m) +} + +var xxx_messageInfo_GcRule_Intersection proto.InternalMessageInfo + +func (m *GcRule_Intersection) GetRules() []*GcRule { + if m != nil { + return m.Rules + } + return nil +} + +// A GcRule which deletes cells matching any of the given rules. +type GcRule_Union struct { + // Delete cells which would be deleted by any element of `rules`. + Rules []*GcRule `protobuf:"bytes,1,rep,name=rules,proto3" json:"rules,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GcRule_Union) Reset() { *m = GcRule_Union{} } +func (m *GcRule_Union) String() string { return proto.CompactTextString(m) } +func (*GcRule_Union) ProtoMessage() {} +func (*GcRule_Union) Descriptor() ([]byte, []int) { + return fileDescriptor_bigtable_table_data_1213651e7f65c4a9, []int{2, 1} +} +func (m *GcRule_Union) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GcRule_Union.Unmarshal(m, b) +} +func (m *GcRule_Union) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GcRule_Union.Marshal(b, m, deterministic) +} +func (dst *GcRule_Union) XXX_Merge(src proto.Message) { + xxx_messageInfo_GcRule_Union.Merge(dst, src) +} +func (m *GcRule_Union) XXX_Size() int { + return xxx_messageInfo_GcRule_Union.Size(m) +} +func (m *GcRule_Union) XXX_DiscardUnknown() { + xxx_messageInfo_GcRule_Union.DiscardUnknown(m) +} + +var xxx_messageInfo_GcRule_Union proto.InternalMessageInfo + +func (m *GcRule_Union) GetRules() []*GcRule { + if m != nil { + return m.Rules + } + return nil +} + +func init() { + proto.RegisterType((*Table)(nil), "google.bigtable.admin.table.v1.Table") + proto.RegisterMapType((map[string]*ColumnFamily)(nil), "google.bigtable.admin.table.v1.Table.ColumnFamiliesEntry") + proto.RegisterType((*ColumnFamily)(nil), "google.bigtable.admin.table.v1.ColumnFamily") + proto.RegisterType((*GcRule)(nil), "google.bigtable.admin.table.v1.GcRule") + proto.RegisterType((*GcRule_Intersection)(nil), "google.bigtable.admin.table.v1.GcRule.Intersection") + proto.RegisterType((*GcRule_Union)(nil), "google.bigtable.admin.table.v1.GcRule.Union") + proto.RegisterEnum("google.bigtable.admin.table.v1.Table_TimestampGranularity", Table_TimestampGranularity_name, Table_TimestampGranularity_value) +} + +func init() { + proto.RegisterFile("google/bigtable/admin/table/v1/bigtable_table_data.proto", fileDescriptor_bigtable_table_data_1213651e7f65c4a9) +} + +var fileDescriptor_bigtable_table_data_1213651e7f65c4a9 = []byte{ + // 579 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xa4, 0x94, 0x61, 0x6b, 0xd3, 0x40, + 0x18, 0xc7, 0x9b, 0xa5, 0xed, 0xd8, 0xb3, 0x3a, 0xeb, 0x29, 0x52, 0x0b, 0x4a, 0xc9, 0x40, 0x8a, + 0xc8, 0x85, 0x6d, 0xbe, 0x98, 0x53, 0x10, 0xbb, 0xcd, 0x6d, 0x32, 0x75, 0xc4, 0x29, 0x28, 0x42, + 0xb8, 0x66, 0xb7, 0x23, 0x98, 0xbb, 0x2b, 0x97, 0x5c, 0x69, 0x5f, 0xfb, 0xc6, 0x8f, 0xe2, 0xa7, + 0xf0, 0xb3, 0x49, 0xee, 0x2e, 0x35, 0x83, 0xe9, 0x26, 0xbe, 0x49, 0x9e, 0x3c, 0xf7, 0xfc, 0x7f, + 0xf7, 0xcf, 0xf3, 0x5c, 0x02, 0xdb, 0x4c, 0x4a, 0x96, 0xd1, 0x70, 0x9c, 0xb2, 0x82, 0x8c, 0x33, + 0x1a, 0x92, 0x33, 0x9e, 0x8a, 0xd0, 0xc6, 0xd3, 0x8d, 0x45, 0x3e, 0xb6, 0xd7, 0x33, 0x52, 0x10, + 0x3c, 0x51, 0xb2, 0x90, 0xe8, 0x81, 0x55, 0xe2, 0xaa, 0x02, 0x1b, 0x25, 0xb6, 0xf1, 0x74, 0xa3, + 0xbf, 0xee, 0xc8, 0x99, 0x14, 0x4c, 0x69, 0x21, 0x52, 0xc1, 0x42, 0x39, 0xa1, 0x8a, 0x14, 0xa9, + 0x14, 0xb9, 0x85, 0xf4, 0x1d, 0x24, 0x34, 0x4f, 0x63, 0x7d, 0x1e, 0x9e, 0x69, 0x5b, 0x60, 0xd7, + 0x83, 0x9f, 0x3e, 0xb4, 0x4e, 0x4b, 0x22, 0x42, 0xd0, 0x14, 0x84, 0xd3, 0x9e, 0x37, 0xf0, 0x86, + 0x2b, 0x91, 0x89, 0xd1, 0x6b, 0xb8, 0x95, 0x68, 0xa5, 0xa8, 0x28, 0xe2, 0x05, 0xb9, 0xb7, 0x34, + 0xf0, 0x86, 0xab, 0x9b, 0xf7, 0xb1, 0xb3, 0x57, 0xdb, 0x1e, 0xbf, 0xab, 0x8a, 0xa2, 0xae, 0xd3, + 0x2d, 0x32, 0x68, 0x0c, 0x37, 0x13, 0x99, 0x69, 0x2e, 0xe2, 0x73, 0xc2, 0xd3, 0x2c, 0xa5, 0x79, + 0xcf, 0x1f, 0xf8, 0xc3, 0xd5, 0xcd, 0xa7, 0xf8, 0xef, 0x2f, 0x8a, 0x8d, 0x3f, 0xbc, 0x6b, 0xc4, + 0xaf, 0x9c, 0x76, 0x5f, 0x14, 0x6a, 0x1e, 0xad, 0x25, 0x17, 0x92, 0xe8, 0x0b, 0xac, 0x32, 0x45, + 0x84, 0xce, 0x88, 0x4a, 0x8b, 0x79, 0xaf, 0x39, 0xf0, 0x86, 0x6b, 0x9b, 0x3b, 0xd7, 0xe3, 0x9f, + 0xa6, 0x9c, 0xe6, 0x05, 0xe1, 0x93, 0x83, 0xdf, 0x84, 0xa8, 0x8e, 0xeb, 0x4b, 0xb8, 0x7d, 0x89, + 0x09, 0xd4, 0x05, 0xff, 0x2b, 0x9d, 0xbb, 0xbe, 0x95, 0x21, 0x1a, 0x41, 0x6b, 0x4a, 0x32, 0x4d, + 0x5d, 0xab, 0x1e, 0x5f, 0x65, 0xa0, 0x46, 0x9d, 0x47, 0x56, 0xba, 0xb3, 0xb4, 0xed, 0x05, 0x01, + 0xdc, 0xb9, 0xcc, 0x15, 0x02, 0x68, 0xbf, 0x39, 0x3a, 0x3e, 0x3e, 0x7a, 0xdf, 0x6d, 0x04, 0xdf, + 0x3d, 0xe8, 0xd4, 0xf5, 0x97, 0xce, 0x71, 0x1d, 0x6e, 0xb0, 0x24, 0xa6, 0xb3, 0x89, 0xa2, 0x79, + 0x5e, 0xcd, 0x70, 0x25, 0xea, 0xb0, 0x64, 0x7f, 0x91, 0x43, 0x2f, 0x60, 0x99, 0x25, 0xb1, 0xd2, + 0x19, 0xed, 0xf9, 0xc6, 0xf7, 0xc3, 0xab, 0x7c, 0x1f, 0x24, 0x91, 0xce, 0x68, 0xd4, 0x66, 0xe6, + 0x1e, 0xfc, 0xf0, 0xa1, 0x6d, 0x53, 0xe8, 0x11, 0x74, 0x39, 0x99, 0xc5, 0x42, 0xf3, 0x78, 0x4a, + 0x55, 0x89, 0xcf, 0x8d, 0xa1, 0xd6, 0x61, 0x23, 0x5a, 0xe3, 0x64, 0xf6, 0x56, 0xf3, 0x8f, 0x2e, + 0x8f, 0x9e, 0xc0, 0x72, 0x59, 0x4b, 0x58, 0xd5, 0xaf, 0x7b, 0xd5, 0xbe, 0xd5, 0xa1, 0xc5, 0x7b, + 0xee, 0xd0, 0x1e, 0x36, 0xa2, 0x36, 0x27, 0xb3, 0x97, 0x8c, 0xa2, 0x4f, 0xd0, 0x49, 0x45, 0x41, + 0x55, 0x4e, 0x13, 0x73, 0x2a, 0xad, 0xe5, 0xad, 0xeb, 0x59, 0xc6, 0x47, 0x35, 0xe9, 0x61, 0x23, + 0xba, 0x80, 0x42, 0x7b, 0xd0, 0xd2, 0xa2, 0x64, 0x36, 0xaf, 0x37, 0x3e, 0xc7, 0xfc, 0x20, 0x2c, + 0xcc, 0x8a, 0xfb, 0xc7, 0xd0, 0xa9, 0xef, 0x82, 0x9e, 0x43, 0xab, 0xec, 0x6d, 0xd9, 0x07, 0xff, + 0x1f, 0x9a, 0x6b, 0x45, 0xfd, 0x7d, 0x68, 0x19, 0xfe, 0xff, 0x61, 0x46, 0x6d, 0x68, 0x96, 0xc1, + 0xe8, 0x9b, 0x07, 0x41, 0x22, 0xf9, 0x15, 0xe2, 0xd1, 0xdd, 0x91, 0x5b, 0x30, 0x9f, 0xc8, 0x1e, + 0x29, 0xc8, 0x49, 0x39, 0x92, 0x13, 0xef, 0xf3, 0xae, 0x53, 0x32, 0x99, 0x11, 0xc1, 0xb0, 0x54, + 0x2c, 0x64, 0x54, 0x98, 0x81, 0x85, 0x76, 0x89, 0x4c, 0xd2, 0xfc, 0x4f, 0x7f, 0xbd, 0x67, 0x26, + 0x18, 0xb7, 0x4d, 0xfd, 0xd6, 0xaf, 0x00, 0x00, 0x00, 0xff, 0xff, 0xd7, 0x80, 0x76, 0xdc, 0x24, + 0x05, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/bigtable/admin/table/v1/bigtable_table_service.pb.go b/vendor/google.golang.org/genproto/googleapis/bigtable/admin/table/v1/bigtable_table_service.pb.go new file mode 100644 index 0000000..f2c91fc --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/bigtable/admin/table/v1/bigtable_table_service.pb.go @@ -0,0 +1,429 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/bigtable/admin/table/v1/bigtable_table_service.proto + +package table // import "google.golang.org/genproto/googleapis/bigtable/admin/table/v1" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import empty "github.com/golang/protobuf/ptypes/empty" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +import ( + context "golang.org/x/net/context" + grpc "google.golang.org/grpc" +) + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Reference imports to suppress errors if they are not otherwise used. +var _ context.Context +var _ grpc.ClientConn + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the grpc package it is being compiled against. +const _ = grpc.SupportPackageIsVersion4 + +// BigtableTableServiceClient is the client API for BigtableTableService service. +// +// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://godoc.org/google.golang.org/grpc#ClientConn.NewStream. +type BigtableTableServiceClient interface { + // Creates a new table, to be served from a specified cluster. + // The table can be created with a full set of initial column families, + // specified in the request. + CreateTable(ctx context.Context, in *CreateTableRequest, opts ...grpc.CallOption) (*Table, error) + // Lists the names of all tables served from a specified cluster. + ListTables(ctx context.Context, in *ListTablesRequest, opts ...grpc.CallOption) (*ListTablesResponse, error) + // Gets the schema of the specified table, including its column families. + GetTable(ctx context.Context, in *GetTableRequest, opts ...grpc.CallOption) (*Table, error) + // Permanently deletes a specified table and all of its data. + DeleteTable(ctx context.Context, in *DeleteTableRequest, opts ...grpc.CallOption) (*empty.Empty, error) + // Changes the name of a specified table. + // Cannot be used to move tables between clusters, zones, or projects. + RenameTable(ctx context.Context, in *RenameTableRequest, opts ...grpc.CallOption) (*empty.Empty, error) + // Creates a new column family within a specified table. + CreateColumnFamily(ctx context.Context, in *CreateColumnFamilyRequest, opts ...grpc.CallOption) (*ColumnFamily, error) + // Changes the configuration of a specified column family. + UpdateColumnFamily(ctx context.Context, in *ColumnFamily, opts ...grpc.CallOption) (*ColumnFamily, error) + // Permanently deletes a specified column family and all of its data. + DeleteColumnFamily(ctx context.Context, in *DeleteColumnFamilyRequest, opts ...grpc.CallOption) (*empty.Empty, error) + // Delete all rows in a table corresponding to a particular prefix + BulkDeleteRows(ctx context.Context, in *BulkDeleteRowsRequest, opts ...grpc.CallOption) (*empty.Empty, error) +} + +type bigtableTableServiceClient struct { + cc *grpc.ClientConn +} + +func NewBigtableTableServiceClient(cc *grpc.ClientConn) BigtableTableServiceClient { + return &bigtableTableServiceClient{cc} +} + +func (c *bigtableTableServiceClient) CreateTable(ctx context.Context, in *CreateTableRequest, opts ...grpc.CallOption) (*Table, error) { + out := new(Table) + err := c.cc.Invoke(ctx, "/google.bigtable.admin.table.v1.BigtableTableService/CreateTable", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *bigtableTableServiceClient) ListTables(ctx context.Context, in *ListTablesRequest, opts ...grpc.CallOption) (*ListTablesResponse, error) { + out := new(ListTablesResponse) + err := c.cc.Invoke(ctx, "/google.bigtable.admin.table.v1.BigtableTableService/ListTables", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *bigtableTableServiceClient) GetTable(ctx context.Context, in *GetTableRequest, opts ...grpc.CallOption) (*Table, error) { + out := new(Table) + err := c.cc.Invoke(ctx, "/google.bigtable.admin.table.v1.BigtableTableService/GetTable", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *bigtableTableServiceClient) DeleteTable(ctx context.Context, in *DeleteTableRequest, opts ...grpc.CallOption) (*empty.Empty, error) { + out := new(empty.Empty) + err := c.cc.Invoke(ctx, "/google.bigtable.admin.table.v1.BigtableTableService/DeleteTable", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *bigtableTableServiceClient) RenameTable(ctx context.Context, in *RenameTableRequest, opts ...grpc.CallOption) (*empty.Empty, error) { + out := new(empty.Empty) + err := c.cc.Invoke(ctx, "/google.bigtable.admin.table.v1.BigtableTableService/RenameTable", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *bigtableTableServiceClient) CreateColumnFamily(ctx context.Context, in *CreateColumnFamilyRequest, opts ...grpc.CallOption) (*ColumnFamily, error) { + out := new(ColumnFamily) + err := c.cc.Invoke(ctx, "/google.bigtable.admin.table.v1.BigtableTableService/CreateColumnFamily", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *bigtableTableServiceClient) UpdateColumnFamily(ctx context.Context, in *ColumnFamily, opts ...grpc.CallOption) (*ColumnFamily, error) { + out := new(ColumnFamily) + err := c.cc.Invoke(ctx, "/google.bigtable.admin.table.v1.BigtableTableService/UpdateColumnFamily", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *bigtableTableServiceClient) DeleteColumnFamily(ctx context.Context, in *DeleteColumnFamilyRequest, opts ...grpc.CallOption) (*empty.Empty, error) { + out := new(empty.Empty) + err := c.cc.Invoke(ctx, "/google.bigtable.admin.table.v1.BigtableTableService/DeleteColumnFamily", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *bigtableTableServiceClient) BulkDeleteRows(ctx context.Context, in *BulkDeleteRowsRequest, opts ...grpc.CallOption) (*empty.Empty, error) { + out := new(empty.Empty) + err := c.cc.Invoke(ctx, "/google.bigtable.admin.table.v1.BigtableTableService/BulkDeleteRows", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +// BigtableTableServiceServer is the server API for BigtableTableService service. +type BigtableTableServiceServer interface { + // Creates a new table, to be served from a specified cluster. + // The table can be created with a full set of initial column families, + // specified in the request. + CreateTable(context.Context, *CreateTableRequest) (*Table, error) + // Lists the names of all tables served from a specified cluster. + ListTables(context.Context, *ListTablesRequest) (*ListTablesResponse, error) + // Gets the schema of the specified table, including its column families. + GetTable(context.Context, *GetTableRequest) (*Table, error) + // Permanently deletes a specified table and all of its data. + DeleteTable(context.Context, *DeleteTableRequest) (*empty.Empty, error) + // Changes the name of a specified table. + // Cannot be used to move tables between clusters, zones, or projects. + RenameTable(context.Context, *RenameTableRequest) (*empty.Empty, error) + // Creates a new column family within a specified table. + CreateColumnFamily(context.Context, *CreateColumnFamilyRequest) (*ColumnFamily, error) + // Changes the configuration of a specified column family. + UpdateColumnFamily(context.Context, *ColumnFamily) (*ColumnFamily, error) + // Permanently deletes a specified column family and all of its data. + DeleteColumnFamily(context.Context, *DeleteColumnFamilyRequest) (*empty.Empty, error) + // Delete all rows in a table corresponding to a particular prefix + BulkDeleteRows(context.Context, *BulkDeleteRowsRequest) (*empty.Empty, error) +} + +func RegisterBigtableTableServiceServer(s *grpc.Server, srv BigtableTableServiceServer) { + s.RegisterService(&_BigtableTableService_serviceDesc, srv) +} + +func _BigtableTableService_CreateTable_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(CreateTableRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(BigtableTableServiceServer).CreateTable(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.bigtable.admin.table.v1.BigtableTableService/CreateTable", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(BigtableTableServiceServer).CreateTable(ctx, req.(*CreateTableRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _BigtableTableService_ListTables_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(ListTablesRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(BigtableTableServiceServer).ListTables(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.bigtable.admin.table.v1.BigtableTableService/ListTables", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(BigtableTableServiceServer).ListTables(ctx, req.(*ListTablesRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _BigtableTableService_GetTable_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(GetTableRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(BigtableTableServiceServer).GetTable(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.bigtable.admin.table.v1.BigtableTableService/GetTable", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(BigtableTableServiceServer).GetTable(ctx, req.(*GetTableRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _BigtableTableService_DeleteTable_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(DeleteTableRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(BigtableTableServiceServer).DeleteTable(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.bigtable.admin.table.v1.BigtableTableService/DeleteTable", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(BigtableTableServiceServer).DeleteTable(ctx, req.(*DeleteTableRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _BigtableTableService_RenameTable_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(RenameTableRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(BigtableTableServiceServer).RenameTable(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.bigtable.admin.table.v1.BigtableTableService/RenameTable", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(BigtableTableServiceServer).RenameTable(ctx, req.(*RenameTableRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _BigtableTableService_CreateColumnFamily_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(CreateColumnFamilyRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(BigtableTableServiceServer).CreateColumnFamily(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.bigtable.admin.table.v1.BigtableTableService/CreateColumnFamily", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(BigtableTableServiceServer).CreateColumnFamily(ctx, req.(*CreateColumnFamilyRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _BigtableTableService_UpdateColumnFamily_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(ColumnFamily) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(BigtableTableServiceServer).UpdateColumnFamily(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.bigtable.admin.table.v1.BigtableTableService/UpdateColumnFamily", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(BigtableTableServiceServer).UpdateColumnFamily(ctx, req.(*ColumnFamily)) + } + return interceptor(ctx, in, info, handler) +} + +func _BigtableTableService_DeleteColumnFamily_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(DeleteColumnFamilyRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(BigtableTableServiceServer).DeleteColumnFamily(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.bigtable.admin.table.v1.BigtableTableService/DeleteColumnFamily", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(BigtableTableServiceServer).DeleteColumnFamily(ctx, req.(*DeleteColumnFamilyRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _BigtableTableService_BulkDeleteRows_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(BulkDeleteRowsRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(BigtableTableServiceServer).BulkDeleteRows(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.bigtable.admin.table.v1.BigtableTableService/BulkDeleteRows", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(BigtableTableServiceServer).BulkDeleteRows(ctx, req.(*BulkDeleteRowsRequest)) + } + return interceptor(ctx, in, info, handler) +} + +var _BigtableTableService_serviceDesc = grpc.ServiceDesc{ + ServiceName: "google.bigtable.admin.table.v1.BigtableTableService", + HandlerType: (*BigtableTableServiceServer)(nil), + Methods: []grpc.MethodDesc{ + { + MethodName: "CreateTable", + Handler: _BigtableTableService_CreateTable_Handler, + }, + { + MethodName: "ListTables", + Handler: _BigtableTableService_ListTables_Handler, + }, + { + MethodName: "GetTable", + Handler: _BigtableTableService_GetTable_Handler, + }, + { + MethodName: "DeleteTable", + Handler: _BigtableTableService_DeleteTable_Handler, + }, + { + MethodName: "RenameTable", + Handler: _BigtableTableService_RenameTable_Handler, + }, + { + MethodName: "CreateColumnFamily", + Handler: _BigtableTableService_CreateColumnFamily_Handler, + }, + { + MethodName: "UpdateColumnFamily", + Handler: _BigtableTableService_UpdateColumnFamily_Handler, + }, + { + MethodName: "DeleteColumnFamily", + Handler: _BigtableTableService_DeleteColumnFamily_Handler, + }, + { + MethodName: "BulkDeleteRows", + Handler: _BigtableTableService_BulkDeleteRows_Handler, + }, + }, + Streams: []grpc.StreamDesc{}, + Metadata: "google/bigtable/admin/table/v1/bigtable_table_service.proto", +} + +func init() { + proto.RegisterFile("google/bigtable/admin/table/v1/bigtable_table_service.proto", fileDescriptor_bigtable_table_service_be80dba644cb3823) +} + +var fileDescriptor_bigtable_table_service_be80dba644cb3823 = []byte{ + // 560 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xac, 0x95, 0xbf, 0x6f, 0xd4, 0x30, + 0x14, 0xc7, 0x65, 0x06, 0x84, 0x7c, 0x88, 0xc1, 0x42, 0x0c, 0x07, 0x62, 0x88, 0xc4, 0x12, 0xa1, + 0x58, 0xb9, 0x82, 0x68, 0xaf, 0xaa, 0x84, 0x72, 0xd7, 0x56, 0xa2, 0x20, 0x95, 0x03, 0x16, 0x96, + 0xca, 0xc9, 0x3d, 0xa2, 0x40, 0x62, 0x87, 0xd8, 0x39, 0x54, 0x50, 0x17, 0x16, 0xfe, 0x00, 0x58, + 0x61, 0x62, 0x64, 0x82, 0x1d, 0x76, 0x56, 0xfe, 0x05, 0xfe, 0x10, 0x14, 0x3b, 0xa1, 0x69, 0xf9, + 0xe1, 0x73, 0xd5, 0x25, 0xe7, 0xb3, 0xbf, 0xdf, 0xf7, 0x3e, 0xcf, 0x7e, 0x96, 0xf1, 0x7a, 0x2a, + 0x44, 0x9a, 0x03, 0x8d, 0xb3, 0x54, 0xb1, 0x38, 0x07, 0xca, 0xe6, 0x45, 0xc6, 0xa9, 0x19, 0x2f, + 0xc2, 0xdf, 0xf3, 0x7b, 0xe6, 0x2b, 0xa1, 0x5a, 0x64, 0x09, 0x04, 0x65, 0x25, 0x94, 0x20, 0x57, + 0x8d, 0x39, 0xe8, 0x44, 0x81, 0x36, 0x07, 0x66, 0xbc, 0x08, 0x87, 0x57, 0xda, 0xe0, 0xac, 0xcc, + 0x28, 0xe3, 0x5c, 0x28, 0xa6, 0x32, 0xc1, 0xa5, 0x71, 0x0f, 0x57, 0xdd, 0x52, 0xcf, 0x99, 0x62, + 0xad, 0x73, 0x7a, 0x22, 0xe8, 0xbd, 0x02, 0xa4, 0x64, 0x29, 0x74, 0xf9, 0x2f, 0xb7, 0x51, 0xf4, + 0xbf, 0xb8, 0x7e, 0x42, 0xa1, 0x28, 0xd5, 0xbe, 0x59, 0x1c, 0x7d, 0x3d, 0x8f, 0x2f, 0x46, 0x6d, + 0x98, 0x87, 0xcd, 0xe7, 0x81, 0x09, 0x42, 0x3e, 0x22, 0x3c, 0x98, 0x54, 0xc0, 0x94, 0x99, 0x26, + 0xa3, 0xe0, 0xff, 0x9b, 0x10, 0xf4, 0xc4, 0x33, 0x78, 0x5e, 0x83, 0x54, 0xc3, 0x6b, 0x36, 0x8f, + 0x56, 0x7b, 0xe3, 0xd7, 0x3f, 0x7e, 0xbe, 0x3d, 0x73, 0xc3, 0xa3, 0x4d, 0x4d, 0xaf, 0x38, 0x2b, + 0x60, 0xa3, 0xac, 0xc4, 0x53, 0x48, 0x94, 0xa4, 0x3e, 0x7d, 0x29, 0x38, 0x34, 0xbf, 0x49, 0x5e, + 0x4b, 0x05, 0x95, 0xa4, 0xfe, 0x81, 0xd9, 0x01, 0x39, 0x46, 0x3e, 0xf9, 0x84, 0x30, 0xbe, 0x9b, + 0x49, 0xa5, 0x23, 0x49, 0x12, 0xda, 0x32, 0x1e, 0x6a, 0x3b, 0xc8, 0x91, 0x8b, 0x45, 0x96, 0x82, + 0x4b, 0xf0, 0x6e, 0x69, 0xe2, 0x90, 0xb8, 0x12, 0x93, 0xf7, 0x08, 0x9f, 0xdb, 0x06, 0x13, 0x8e, + 0x50, 0x5b, 0xe6, 0x4e, 0xe9, 0xb8, 0x9f, 0x6b, 0x9a, 0x6e, 0x85, 0x84, 0x4b, 0xd2, 0xb5, 0x70, + 0xd4, 0x3f, 0x20, 0xef, 0x10, 0x1e, 0x4c, 0x21, 0x87, 0xa5, 0x4f, 0xbd, 0x27, 0xee, 0x28, 0x2f, + 0x75, 0x9e, 0xae, 0xe1, 0x82, 0xcd, 0xa6, 0xe1, 0x3a, 0x2c, 0xff, 0x04, 0x58, 0x1f, 0x10, 0x1e, + 0xcc, 0xa0, 0xb1, 0x2c, 0x89, 0xd5, 0x13, 0xdb, 0xb0, 0x26, 0x1a, 0x6b, 0xc3, 0x5b, 0x75, 0xc6, + 0x1a, 0x57, 0x3a, 0x4b, 0xd3, 0x86, 0xdf, 0x11, 0x26, 0xe6, 0x02, 0x4c, 0x44, 0x5e, 0x17, 0x7c, + 0x8b, 0x15, 0x59, 0xbe, 0x4f, 0xd6, 0x96, 0xbb, 0x34, 0x7d, 0x4f, 0x87, 0x7b, 0xdd, 0x6a, 0xed, + 0x99, 0xbc, 0x1d, 0x5d, 0xc4, 0xa6, 0x77, 0xdb, 0xb9, 0x08, 0x9a, 0x1c, 0xc6, 0xc9, 0xcc, 0x9d, + 0xfa, 0x86, 0x30, 0x79, 0x54, 0xce, 0x8f, 0x17, 0xe3, 0x44, 0xe4, 0xc8, 0x7f, 0x4f, 0xf3, 0x6f, + 0x0f, 0x23, 0x57, 0xfe, 0x63, 0xf8, 0xcd, 0xa9, 0x20, 0x9f, 0x7c, 0x41, 0x98, 0x98, 0xce, 0x74, + 0x3b, 0x8e, 0x3f, 0x3d, 0xb6, 0xee, 0xb9, 0xa3, 0xc1, 0xa7, 0xfe, 0x29, 0x80, 0x93, 0xcf, 0x08, + 0x5f, 0x88, 0xea, 0xfc, 0x99, 0xa1, 0x98, 0x89, 0x17, 0x92, 0xdc, 0xb4, 0x11, 0x1f, 0xd5, 0xdb, + 0x68, 0xef, 0x6b, 0xda, 0x1d, 0x6f, 0x4b, 0xd3, 0x9a, 0x57, 0xc3, 0xa9, 0xe3, 0xe3, 0x23, 0xe9, + 0xc6, 0xc8, 0x8f, 0xde, 0x20, 0xec, 0x25, 0xa2, 0xb0, 0x70, 0x46, 0xc3, 0xbf, 0x3d, 0x32, 0x72, + 0xb7, 0xc1, 0xdb, 0x45, 0x8f, 0x27, 0xad, 0x3b, 0x15, 0x39, 0xe3, 0x69, 0x20, 0xaa, 0x94, 0xa6, + 0xc0, 0x35, 0x3c, 0x35, 0x4b, 0xac, 0xcc, 0xe4, 0xbf, 0xde, 0xc1, 0x75, 0x3d, 0x88, 0xcf, 0x6a, + 0xfd, 0xca, 0xaf, 0x00, 0x00, 0x00, 0xff, 0xff, 0x03, 0x65, 0xb4, 0xe0, 0xeb, 0x07, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/bigtable/admin/table/v1/bigtable_table_service_messages.pb.go b/vendor/google.golang.org/genproto/googleapis/bigtable/admin/table/v1/bigtable_table_service_messages.pb.go new file mode 100644 index 0000000..7bf2278 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/bigtable/admin/table/v1/bigtable_table_service_messages.pb.go @@ -0,0 +1,606 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/bigtable/admin/table/v1/bigtable_table_service_messages.proto + +package table // import "google.golang.org/genproto/googleapis/bigtable/admin/table/v1" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +type CreateTableRequest struct { + // The unique name of the cluster in which to create the new table. + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // The name by which the new table should be referred to within the cluster, + // e.g. "foobar" rather than "/tables/foobar". + TableId string `protobuf:"bytes,2,opt,name=table_id,json=tableId,proto3" json:"table_id,omitempty"` + // The Table to create. The `name` field of the Table and all of its + // ColumnFamilies must be left blank, and will be populated in the response. + Table *Table `protobuf:"bytes,3,opt,name=table,proto3" json:"table,omitempty"` + // The optional list of row keys that will be used to initially split the + // table into several tablets (Tablets are similar to HBase regions). + // Given two split keys, "s1" and "s2", three tablets will be created, + // spanning the key ranges: [, s1), [s1, s2), [s2, ). + // + // Example: + // * Row keys := ["a", "apple", "custom", "customer_1", "customer_2", + // "other", "zz"] + // * initial_split_keys := ["apple", "customer_1", "customer_2", "other"] + // * Key assignment: + // - Tablet 1 [, apple) => {"a"}. + // - Tablet 2 [apple, customer_1) => {"apple", "custom"}. + // - Tablet 3 [customer_1, customer_2) => {"customer_1"}. + // - Tablet 4 [customer_2, other) => {"customer_2"}. + // - Tablet 5 [other, ) => {"other", "zz"}. + InitialSplitKeys []string `protobuf:"bytes,4,rep,name=initial_split_keys,json=initialSplitKeys,proto3" json:"initial_split_keys,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *CreateTableRequest) Reset() { *m = CreateTableRequest{} } +func (m *CreateTableRequest) String() string { return proto.CompactTextString(m) } +func (*CreateTableRequest) ProtoMessage() {} +func (*CreateTableRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_bigtable_table_service_messages_b4971580659b019f, []int{0} +} +func (m *CreateTableRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_CreateTableRequest.Unmarshal(m, b) +} +func (m *CreateTableRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_CreateTableRequest.Marshal(b, m, deterministic) +} +func (dst *CreateTableRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_CreateTableRequest.Merge(dst, src) +} +func (m *CreateTableRequest) XXX_Size() int { + return xxx_messageInfo_CreateTableRequest.Size(m) +} +func (m *CreateTableRequest) XXX_DiscardUnknown() { + xxx_messageInfo_CreateTableRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_CreateTableRequest proto.InternalMessageInfo + +func (m *CreateTableRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *CreateTableRequest) GetTableId() string { + if m != nil { + return m.TableId + } + return "" +} + +func (m *CreateTableRequest) GetTable() *Table { + if m != nil { + return m.Table + } + return nil +} + +func (m *CreateTableRequest) GetInitialSplitKeys() []string { + if m != nil { + return m.InitialSplitKeys + } + return nil +} + +type ListTablesRequest struct { + // The unique name of the cluster for which tables should be listed. + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ListTablesRequest) Reset() { *m = ListTablesRequest{} } +func (m *ListTablesRequest) String() string { return proto.CompactTextString(m) } +func (*ListTablesRequest) ProtoMessage() {} +func (*ListTablesRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_bigtable_table_service_messages_b4971580659b019f, []int{1} +} +func (m *ListTablesRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ListTablesRequest.Unmarshal(m, b) +} +func (m *ListTablesRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ListTablesRequest.Marshal(b, m, deterministic) +} +func (dst *ListTablesRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_ListTablesRequest.Merge(dst, src) +} +func (m *ListTablesRequest) XXX_Size() int { + return xxx_messageInfo_ListTablesRequest.Size(m) +} +func (m *ListTablesRequest) XXX_DiscardUnknown() { + xxx_messageInfo_ListTablesRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_ListTablesRequest proto.InternalMessageInfo + +func (m *ListTablesRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +type ListTablesResponse struct { + // The tables present in the requested cluster. + // At present, only the names of the tables are populated. + Tables []*Table `protobuf:"bytes,1,rep,name=tables,proto3" json:"tables,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ListTablesResponse) Reset() { *m = ListTablesResponse{} } +func (m *ListTablesResponse) String() string { return proto.CompactTextString(m) } +func (*ListTablesResponse) ProtoMessage() {} +func (*ListTablesResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_bigtable_table_service_messages_b4971580659b019f, []int{2} +} +func (m *ListTablesResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ListTablesResponse.Unmarshal(m, b) +} +func (m *ListTablesResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ListTablesResponse.Marshal(b, m, deterministic) +} +func (dst *ListTablesResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_ListTablesResponse.Merge(dst, src) +} +func (m *ListTablesResponse) XXX_Size() int { + return xxx_messageInfo_ListTablesResponse.Size(m) +} +func (m *ListTablesResponse) XXX_DiscardUnknown() { + xxx_messageInfo_ListTablesResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_ListTablesResponse proto.InternalMessageInfo + +func (m *ListTablesResponse) GetTables() []*Table { + if m != nil { + return m.Tables + } + return nil +} + +type GetTableRequest struct { + // The unique name of the requested table. + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GetTableRequest) Reset() { *m = GetTableRequest{} } +func (m *GetTableRequest) String() string { return proto.CompactTextString(m) } +func (*GetTableRequest) ProtoMessage() {} +func (*GetTableRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_bigtable_table_service_messages_b4971580659b019f, []int{3} +} +func (m *GetTableRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GetTableRequest.Unmarshal(m, b) +} +func (m *GetTableRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GetTableRequest.Marshal(b, m, deterministic) +} +func (dst *GetTableRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_GetTableRequest.Merge(dst, src) +} +func (m *GetTableRequest) XXX_Size() int { + return xxx_messageInfo_GetTableRequest.Size(m) +} +func (m *GetTableRequest) XXX_DiscardUnknown() { + xxx_messageInfo_GetTableRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_GetTableRequest proto.InternalMessageInfo + +func (m *GetTableRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +type DeleteTableRequest struct { + // The unique name of the table to be deleted. + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *DeleteTableRequest) Reset() { *m = DeleteTableRequest{} } +func (m *DeleteTableRequest) String() string { return proto.CompactTextString(m) } +func (*DeleteTableRequest) ProtoMessage() {} +func (*DeleteTableRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_bigtable_table_service_messages_b4971580659b019f, []int{4} +} +func (m *DeleteTableRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_DeleteTableRequest.Unmarshal(m, b) +} +func (m *DeleteTableRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_DeleteTableRequest.Marshal(b, m, deterministic) +} +func (dst *DeleteTableRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_DeleteTableRequest.Merge(dst, src) +} +func (m *DeleteTableRequest) XXX_Size() int { + return xxx_messageInfo_DeleteTableRequest.Size(m) +} +func (m *DeleteTableRequest) XXX_DiscardUnknown() { + xxx_messageInfo_DeleteTableRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_DeleteTableRequest proto.InternalMessageInfo + +func (m *DeleteTableRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +type RenameTableRequest struct { + // The current unique name of the table. + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // The new name by which the table should be referred to within its containing + // cluster, e.g. "foobar" rather than "/tables/foobar". + NewId string `protobuf:"bytes,2,opt,name=new_id,json=newId,proto3" json:"new_id,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *RenameTableRequest) Reset() { *m = RenameTableRequest{} } +func (m *RenameTableRequest) String() string { return proto.CompactTextString(m) } +func (*RenameTableRequest) ProtoMessage() {} +func (*RenameTableRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_bigtable_table_service_messages_b4971580659b019f, []int{5} +} +func (m *RenameTableRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_RenameTableRequest.Unmarshal(m, b) +} +func (m *RenameTableRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_RenameTableRequest.Marshal(b, m, deterministic) +} +func (dst *RenameTableRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_RenameTableRequest.Merge(dst, src) +} +func (m *RenameTableRequest) XXX_Size() int { + return xxx_messageInfo_RenameTableRequest.Size(m) +} +func (m *RenameTableRequest) XXX_DiscardUnknown() { + xxx_messageInfo_RenameTableRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_RenameTableRequest proto.InternalMessageInfo + +func (m *RenameTableRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *RenameTableRequest) GetNewId() string { + if m != nil { + return m.NewId + } + return "" +} + +type CreateColumnFamilyRequest struct { + // The unique name of the table in which to create the new column family. + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // The name by which the new column family should be referred to within the + // table, e.g. "foobar" rather than "/columnFamilies/foobar". + ColumnFamilyId string `protobuf:"bytes,2,opt,name=column_family_id,json=columnFamilyId,proto3" json:"column_family_id,omitempty"` + // The column family to create. The `name` field must be left blank. + ColumnFamily *ColumnFamily `protobuf:"bytes,3,opt,name=column_family,json=columnFamily,proto3" json:"column_family,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *CreateColumnFamilyRequest) Reset() { *m = CreateColumnFamilyRequest{} } +func (m *CreateColumnFamilyRequest) String() string { return proto.CompactTextString(m) } +func (*CreateColumnFamilyRequest) ProtoMessage() {} +func (*CreateColumnFamilyRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_bigtable_table_service_messages_b4971580659b019f, []int{6} +} +func (m *CreateColumnFamilyRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_CreateColumnFamilyRequest.Unmarshal(m, b) +} +func (m *CreateColumnFamilyRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_CreateColumnFamilyRequest.Marshal(b, m, deterministic) +} +func (dst *CreateColumnFamilyRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_CreateColumnFamilyRequest.Merge(dst, src) +} +func (m *CreateColumnFamilyRequest) XXX_Size() int { + return xxx_messageInfo_CreateColumnFamilyRequest.Size(m) +} +func (m *CreateColumnFamilyRequest) XXX_DiscardUnknown() { + xxx_messageInfo_CreateColumnFamilyRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_CreateColumnFamilyRequest proto.InternalMessageInfo + +func (m *CreateColumnFamilyRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *CreateColumnFamilyRequest) GetColumnFamilyId() string { + if m != nil { + return m.ColumnFamilyId + } + return "" +} + +func (m *CreateColumnFamilyRequest) GetColumnFamily() *ColumnFamily { + if m != nil { + return m.ColumnFamily + } + return nil +} + +type DeleteColumnFamilyRequest struct { + // The unique name of the column family to be deleted. + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *DeleteColumnFamilyRequest) Reset() { *m = DeleteColumnFamilyRequest{} } +func (m *DeleteColumnFamilyRequest) String() string { return proto.CompactTextString(m) } +func (*DeleteColumnFamilyRequest) ProtoMessage() {} +func (*DeleteColumnFamilyRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_bigtable_table_service_messages_b4971580659b019f, []int{7} +} +func (m *DeleteColumnFamilyRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_DeleteColumnFamilyRequest.Unmarshal(m, b) +} +func (m *DeleteColumnFamilyRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_DeleteColumnFamilyRequest.Marshal(b, m, deterministic) +} +func (dst *DeleteColumnFamilyRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_DeleteColumnFamilyRequest.Merge(dst, src) +} +func (m *DeleteColumnFamilyRequest) XXX_Size() int { + return xxx_messageInfo_DeleteColumnFamilyRequest.Size(m) +} +func (m *DeleteColumnFamilyRequest) XXX_DiscardUnknown() { + xxx_messageInfo_DeleteColumnFamilyRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_DeleteColumnFamilyRequest proto.InternalMessageInfo + +func (m *DeleteColumnFamilyRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +type BulkDeleteRowsRequest struct { + // The unique name of the table on which to perform the bulk delete + TableName string `protobuf:"bytes,1,opt,name=table_name,json=tableName,proto3" json:"table_name,omitempty"` + // Types that are valid to be assigned to Target: + // *BulkDeleteRowsRequest_RowKeyPrefix + // *BulkDeleteRowsRequest_DeleteAllDataFromTable + Target isBulkDeleteRowsRequest_Target `protobuf_oneof:"target"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *BulkDeleteRowsRequest) Reset() { *m = BulkDeleteRowsRequest{} } +func (m *BulkDeleteRowsRequest) String() string { return proto.CompactTextString(m) } +func (*BulkDeleteRowsRequest) ProtoMessage() {} +func (*BulkDeleteRowsRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_bigtable_table_service_messages_b4971580659b019f, []int{8} +} +func (m *BulkDeleteRowsRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_BulkDeleteRowsRequest.Unmarshal(m, b) +} +func (m *BulkDeleteRowsRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_BulkDeleteRowsRequest.Marshal(b, m, deterministic) +} +func (dst *BulkDeleteRowsRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_BulkDeleteRowsRequest.Merge(dst, src) +} +func (m *BulkDeleteRowsRequest) XXX_Size() int { + return xxx_messageInfo_BulkDeleteRowsRequest.Size(m) +} +func (m *BulkDeleteRowsRequest) XXX_DiscardUnknown() { + xxx_messageInfo_BulkDeleteRowsRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_BulkDeleteRowsRequest proto.InternalMessageInfo + +func (m *BulkDeleteRowsRequest) GetTableName() string { + if m != nil { + return m.TableName + } + return "" +} + +type isBulkDeleteRowsRequest_Target interface { + isBulkDeleteRowsRequest_Target() +} + +type BulkDeleteRowsRequest_RowKeyPrefix struct { + RowKeyPrefix []byte `protobuf:"bytes,2,opt,name=row_key_prefix,json=rowKeyPrefix,proto3,oneof"` +} + +type BulkDeleteRowsRequest_DeleteAllDataFromTable struct { + DeleteAllDataFromTable bool `protobuf:"varint,3,opt,name=delete_all_data_from_table,json=deleteAllDataFromTable,proto3,oneof"` +} + +func (*BulkDeleteRowsRequest_RowKeyPrefix) isBulkDeleteRowsRequest_Target() {} + +func (*BulkDeleteRowsRequest_DeleteAllDataFromTable) isBulkDeleteRowsRequest_Target() {} + +func (m *BulkDeleteRowsRequest) GetTarget() isBulkDeleteRowsRequest_Target { + if m != nil { + return m.Target + } + return nil +} + +func (m *BulkDeleteRowsRequest) GetRowKeyPrefix() []byte { + if x, ok := m.GetTarget().(*BulkDeleteRowsRequest_RowKeyPrefix); ok { + return x.RowKeyPrefix + } + return nil +} + +func (m *BulkDeleteRowsRequest) GetDeleteAllDataFromTable() bool { + if x, ok := m.GetTarget().(*BulkDeleteRowsRequest_DeleteAllDataFromTable); ok { + return x.DeleteAllDataFromTable + } + return false +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*BulkDeleteRowsRequest) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _BulkDeleteRowsRequest_OneofMarshaler, _BulkDeleteRowsRequest_OneofUnmarshaler, _BulkDeleteRowsRequest_OneofSizer, []interface{}{ + (*BulkDeleteRowsRequest_RowKeyPrefix)(nil), + (*BulkDeleteRowsRequest_DeleteAllDataFromTable)(nil), + } +} + +func _BulkDeleteRowsRequest_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*BulkDeleteRowsRequest) + // target + switch x := m.Target.(type) { + case *BulkDeleteRowsRequest_RowKeyPrefix: + b.EncodeVarint(2<<3 | proto.WireBytes) + b.EncodeRawBytes(x.RowKeyPrefix) + case *BulkDeleteRowsRequest_DeleteAllDataFromTable: + t := uint64(0) + if x.DeleteAllDataFromTable { + t = 1 + } + b.EncodeVarint(3<<3 | proto.WireVarint) + b.EncodeVarint(t) + case nil: + default: + return fmt.Errorf("BulkDeleteRowsRequest.Target has unexpected type %T", x) + } + return nil +} + +func _BulkDeleteRowsRequest_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*BulkDeleteRowsRequest) + switch tag { + case 2: // target.row_key_prefix + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeRawBytes(true) + m.Target = &BulkDeleteRowsRequest_RowKeyPrefix{x} + return true, err + case 3: // target.delete_all_data_from_table + if wire != proto.WireVarint { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeVarint() + m.Target = &BulkDeleteRowsRequest_DeleteAllDataFromTable{x != 0} + return true, err + default: + return false, nil + } +} + +func _BulkDeleteRowsRequest_OneofSizer(msg proto.Message) (n int) { + m := msg.(*BulkDeleteRowsRequest) + // target + switch x := m.Target.(type) { + case *BulkDeleteRowsRequest_RowKeyPrefix: + n += 1 // tag and wire + n += proto.SizeVarint(uint64(len(x.RowKeyPrefix))) + n += len(x.RowKeyPrefix) + case *BulkDeleteRowsRequest_DeleteAllDataFromTable: + n += 1 // tag and wire + n += 1 + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +func init() { + proto.RegisterType((*CreateTableRequest)(nil), "google.bigtable.admin.table.v1.CreateTableRequest") + proto.RegisterType((*ListTablesRequest)(nil), "google.bigtable.admin.table.v1.ListTablesRequest") + proto.RegisterType((*ListTablesResponse)(nil), "google.bigtable.admin.table.v1.ListTablesResponse") + proto.RegisterType((*GetTableRequest)(nil), "google.bigtable.admin.table.v1.GetTableRequest") + proto.RegisterType((*DeleteTableRequest)(nil), "google.bigtable.admin.table.v1.DeleteTableRequest") + proto.RegisterType((*RenameTableRequest)(nil), "google.bigtable.admin.table.v1.RenameTableRequest") + proto.RegisterType((*CreateColumnFamilyRequest)(nil), "google.bigtable.admin.table.v1.CreateColumnFamilyRequest") + proto.RegisterType((*DeleteColumnFamilyRequest)(nil), "google.bigtable.admin.table.v1.DeleteColumnFamilyRequest") + proto.RegisterType((*BulkDeleteRowsRequest)(nil), "google.bigtable.admin.table.v1.BulkDeleteRowsRequest") +} + +func init() { + proto.RegisterFile("google/bigtable/admin/table/v1/bigtable_table_service_messages.proto", fileDescriptor_bigtable_table_service_messages_b4971580659b019f) +} + +var fileDescriptor_bigtable_table_service_messages_b4971580659b019f = []byte{ + // 514 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x94, 0x53, 0xc1, 0x6e, 0xd3, 0x40, + 0x10, 0xad, 0x49, 0x1b, 0x92, 0x21, 0x94, 0xb2, 0x52, 0x51, 0x52, 0x09, 0x14, 0x56, 0x2a, 0xe4, + 0x50, 0xd9, 0x2a, 0x5c, 0x90, 0x0a, 0x42, 0x24, 0x51, 0x69, 0x54, 0x40, 0xc1, 0xe1, 0xc4, 0xc5, + 0xda, 0xc4, 0x13, 0x6b, 0xd5, 0xb5, 0x37, 0xec, 0x6e, 0x12, 0xf2, 0x13, 0x7c, 0x06, 0x27, 0xc4, + 0x37, 0x22, 0xef, 0x9a, 0x26, 0x3d, 0x10, 0x97, 0x8b, 0x35, 0x9e, 0x79, 0xf3, 0x66, 0xf6, 0xcd, + 0x0c, 0xf4, 0x13, 0x29, 0x13, 0x81, 0xc1, 0x98, 0x27, 0x86, 0x8d, 0x05, 0x06, 0x2c, 0x4e, 0x79, + 0x16, 0x38, 0x7b, 0x71, 0x7a, 0xed, 0x8f, 0xdc, 0x57, 0xa3, 0x5a, 0xf0, 0x09, 0x46, 0x29, 0x6a, + 0xcd, 0x12, 0xd4, 0xfe, 0x4c, 0x49, 0x23, 0xc9, 0x13, 0xc7, 0xe2, 0xff, 0x45, 0xfb, 0x96, 0xc5, + 0x77, 0xf6, 0xe2, 0xf4, 0xe8, 0xd5, 0xff, 0x55, 0x89, 0x99, 0x61, 0x8e, 0x99, 0xfe, 0xf6, 0x80, + 0xf4, 0x14, 0x32, 0x83, 0x5f, 0xf2, 0x50, 0x88, 0xdf, 0xe6, 0xa8, 0x0d, 0x21, 0xb0, 0x9b, 0xb1, + 0x14, 0x9b, 0x5e, 0xdb, 0xeb, 0xd4, 0x43, 0x6b, 0x93, 0x16, 0xd4, 0x5c, 0x3a, 0x8f, 0x9b, 0x77, + 0xac, 0xff, 0xae, 0xfd, 0x1f, 0xc4, 0xe4, 0x0c, 0xf6, 0xac, 0xd9, 0xac, 0xb4, 0xbd, 0xce, 0xbd, + 0x17, 0xc7, 0xfe, 0xf6, 0x7e, 0x7d, 0x57, 0xcb, 0xe5, 0x90, 0x13, 0x20, 0x3c, 0xe3, 0x86, 0x33, + 0x11, 0xe9, 0x99, 0xe0, 0x26, 0xba, 0xc2, 0x95, 0x6e, 0xee, 0xb6, 0x2b, 0x9d, 0x7a, 0x78, 0x50, + 0x44, 0x46, 0x79, 0xe0, 0x12, 0x57, 0x9a, 0x3e, 0x87, 0x87, 0x1f, 0xb8, 0x36, 0x96, 0x41, 0x6f, + 0x69, 0x97, 0x8e, 0x80, 0x6c, 0x02, 0xf5, 0x4c, 0x66, 0x1a, 0xc9, 0x1b, 0xa8, 0xda, 0xaa, 0xba, + 0xe9, 0xb5, 0x2b, 0xb7, 0x6f, 0xb5, 0x48, 0xa2, 0xc7, 0xf0, 0xe0, 0x3d, 0x9a, 0x32, 0xa9, 0x68, + 0x07, 0x48, 0x1f, 0x05, 0x96, 0x8b, 0x4a, 0xdf, 0x02, 0x09, 0x31, 0xb7, 0x4a, 0xe5, 0x3f, 0x84, + 0x6a, 0x86, 0xcb, 0xb5, 0xf8, 0x7b, 0x19, 0x2e, 0x07, 0x31, 0xfd, 0xe5, 0x41, 0xcb, 0x0d, 0xb0, + 0x27, 0xc5, 0x3c, 0xcd, 0xce, 0x59, 0xca, 0xc5, 0x6a, 0x1b, 0x51, 0x07, 0x0e, 0x26, 0x16, 0x1a, + 0x4d, 0x2d, 0x76, 0x4d, 0xb9, 0x3f, 0xd9, 0xa0, 0x18, 0xc4, 0xe4, 0x33, 0xdc, 0xbf, 0x81, 0x2c, + 0xc6, 0x7b, 0x52, 0xa6, 0xd9, 0x8d, 0x4e, 0x1a, 0x9b, 0xa4, 0x34, 0x80, 0x96, 0x53, 0xe6, 0x96, + 0xdd, 0xd2, 0x9f, 0x1e, 0x1c, 0x76, 0xe7, 0xe2, 0xca, 0x65, 0x85, 0x72, 0x79, 0x3d, 0xf4, 0xc7, + 0x00, 0x6e, 0x1f, 0x37, 0x72, 0xea, 0xd6, 0xf3, 0x29, 0x7f, 0xe6, 0x33, 0xd8, 0x57, 0x72, 0x99, + 0x2f, 0x53, 0x34, 0x53, 0x38, 0xe5, 0xdf, 0xed, 0x23, 0x1b, 0x17, 0x3b, 0x61, 0x43, 0xc9, 0xe5, + 0x25, 0xae, 0x86, 0xd6, 0x4b, 0x5e, 0xc3, 0x51, 0x6c, 0xb9, 0x23, 0x26, 0x84, 0x3d, 0x8d, 0x68, + 0xaa, 0x64, 0x1a, 0xad, 0x17, 0xba, 0x76, 0xb1, 0x13, 0x3e, 0x72, 0x98, 0x77, 0x42, 0xf4, 0x99, + 0x61, 0xe7, 0x4a, 0xa6, 0x76, 0x60, 0xdd, 0x5a, 0xbe, 0x4f, 0x2a, 0x41, 0xd3, 0xfd, 0xe1, 0x01, + 0x9d, 0xc8, 0xb4, 0x44, 0x9b, 0xee, 0xd3, 0x6e, 0x11, 0xb0, 0xf9, 0x23, 0x77, 0xef, 0x1f, 0x8b, + 0x73, 0x1f, 0xe6, 0x37, 0x39, 0xf4, 0xbe, 0xf6, 0x0a, 0x92, 0x44, 0x0a, 0x96, 0x25, 0xbe, 0x54, + 0x49, 0x90, 0x60, 0x66, 0x2f, 0x36, 0x70, 0x21, 0x36, 0xe3, 0xfa, 0x5f, 0xe7, 0x7e, 0x66, 0x8d, + 0x71, 0xd5, 0xe2, 0x5f, 0xfe, 0x09, 0x00, 0x00, 0xff, 0xff, 0xf7, 0x08, 0x29, 0x16, 0x83, 0x04, + 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/bigtable/admin/v2/bigtable_instance_admin.pb.go b/vendor/google.golang.org/genproto/googleapis/bigtable/admin/v2/bigtable_instance_admin.pb.go new file mode 100644 index 0000000..4fdfd83 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/bigtable/admin/v2/bigtable_instance_admin.pb.go @@ -0,0 +1,2062 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/bigtable/admin/v2/bigtable_instance_admin.proto + +package admin // import "google.golang.org/genproto/googleapis/bigtable/admin/v2" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import empty "github.com/golang/protobuf/ptypes/empty" +import timestamp "github.com/golang/protobuf/ptypes/timestamp" +import _ "google.golang.org/genproto/googleapis/api/annotations" +import v1 "google.golang.org/genproto/googleapis/iam/v1" +import longrunning "google.golang.org/genproto/googleapis/longrunning" +import field_mask "google.golang.org/genproto/protobuf/field_mask" + +import ( + context "golang.org/x/net/context" + grpc "google.golang.org/grpc" +) + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Request message for BigtableInstanceAdmin.CreateInstance. +type CreateInstanceRequest struct { + // The unique name of the project in which to create the new instance. + // Values are of the form `projects/`. + Parent string `protobuf:"bytes,1,opt,name=parent,proto3" json:"parent,omitempty"` + // The ID to be used when referring to the new instance within its project, + // e.g., just `myinstance` rather than + // `projects/myproject/instances/myinstance`. + InstanceId string `protobuf:"bytes,2,opt,name=instance_id,json=instanceId,proto3" json:"instance_id,omitempty"` + // The instance to create. + // Fields marked `OutputOnly` must be left blank. + Instance *Instance `protobuf:"bytes,3,opt,name=instance,proto3" json:"instance,omitempty"` + // The clusters to be created within the instance, mapped by desired + // cluster ID, e.g., just `mycluster` rather than + // `projects/myproject/instances/myinstance/clusters/mycluster`. + // Fields marked `OutputOnly` must be left blank. + // Currently, at most two clusters can be specified. + Clusters map[string]*Cluster `protobuf:"bytes,4,rep,name=clusters,proto3" json:"clusters,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *CreateInstanceRequest) Reset() { *m = CreateInstanceRequest{} } +func (m *CreateInstanceRequest) String() string { return proto.CompactTextString(m) } +func (*CreateInstanceRequest) ProtoMessage() {} +func (*CreateInstanceRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_bigtable_instance_admin_7a6cd7ed1a550de5, []int{0} +} +func (m *CreateInstanceRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_CreateInstanceRequest.Unmarshal(m, b) +} +func (m *CreateInstanceRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_CreateInstanceRequest.Marshal(b, m, deterministic) +} +func (dst *CreateInstanceRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_CreateInstanceRequest.Merge(dst, src) +} +func (m *CreateInstanceRequest) XXX_Size() int { + return xxx_messageInfo_CreateInstanceRequest.Size(m) +} +func (m *CreateInstanceRequest) XXX_DiscardUnknown() { + xxx_messageInfo_CreateInstanceRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_CreateInstanceRequest proto.InternalMessageInfo + +func (m *CreateInstanceRequest) GetParent() string { + if m != nil { + return m.Parent + } + return "" +} + +func (m *CreateInstanceRequest) GetInstanceId() string { + if m != nil { + return m.InstanceId + } + return "" +} + +func (m *CreateInstanceRequest) GetInstance() *Instance { + if m != nil { + return m.Instance + } + return nil +} + +func (m *CreateInstanceRequest) GetClusters() map[string]*Cluster { + if m != nil { + return m.Clusters + } + return nil +} + +// Request message for BigtableInstanceAdmin.GetInstance. +type GetInstanceRequest struct { + // The unique name of the requested instance. Values are of the form + // `projects//instances/`. + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GetInstanceRequest) Reset() { *m = GetInstanceRequest{} } +func (m *GetInstanceRequest) String() string { return proto.CompactTextString(m) } +func (*GetInstanceRequest) ProtoMessage() {} +func (*GetInstanceRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_bigtable_instance_admin_7a6cd7ed1a550de5, []int{1} +} +func (m *GetInstanceRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GetInstanceRequest.Unmarshal(m, b) +} +func (m *GetInstanceRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GetInstanceRequest.Marshal(b, m, deterministic) +} +func (dst *GetInstanceRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_GetInstanceRequest.Merge(dst, src) +} +func (m *GetInstanceRequest) XXX_Size() int { + return xxx_messageInfo_GetInstanceRequest.Size(m) +} +func (m *GetInstanceRequest) XXX_DiscardUnknown() { + xxx_messageInfo_GetInstanceRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_GetInstanceRequest proto.InternalMessageInfo + +func (m *GetInstanceRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +// Request message for BigtableInstanceAdmin.ListInstances. +type ListInstancesRequest struct { + // The unique name of the project for which a list of instances is requested. + // Values are of the form `projects/`. + Parent string `protobuf:"bytes,1,opt,name=parent,proto3" json:"parent,omitempty"` + // DEPRECATED: This field is unused and ignored. + PageToken string `protobuf:"bytes,2,opt,name=page_token,json=pageToken,proto3" json:"page_token,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ListInstancesRequest) Reset() { *m = ListInstancesRequest{} } +func (m *ListInstancesRequest) String() string { return proto.CompactTextString(m) } +func (*ListInstancesRequest) ProtoMessage() {} +func (*ListInstancesRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_bigtable_instance_admin_7a6cd7ed1a550de5, []int{2} +} +func (m *ListInstancesRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ListInstancesRequest.Unmarshal(m, b) +} +func (m *ListInstancesRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ListInstancesRequest.Marshal(b, m, deterministic) +} +func (dst *ListInstancesRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_ListInstancesRequest.Merge(dst, src) +} +func (m *ListInstancesRequest) XXX_Size() int { + return xxx_messageInfo_ListInstancesRequest.Size(m) +} +func (m *ListInstancesRequest) XXX_DiscardUnknown() { + xxx_messageInfo_ListInstancesRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_ListInstancesRequest proto.InternalMessageInfo + +func (m *ListInstancesRequest) GetParent() string { + if m != nil { + return m.Parent + } + return "" +} + +func (m *ListInstancesRequest) GetPageToken() string { + if m != nil { + return m.PageToken + } + return "" +} + +// Response message for BigtableInstanceAdmin.ListInstances. +type ListInstancesResponse struct { + // The list of requested instances. + Instances []*Instance `protobuf:"bytes,1,rep,name=instances,proto3" json:"instances,omitempty"` + // Locations from which Instance information could not be retrieved, + // due to an outage or some other transient condition. + // Instances whose Clusters are all in one of the failed locations + // may be missing from `instances`, and Instances with at least one + // Cluster in a failed location may only have partial information returned. + // Values are of the form `projects//locations/` + FailedLocations []string `protobuf:"bytes,2,rep,name=failed_locations,json=failedLocations,proto3" json:"failed_locations,omitempty"` + // DEPRECATED: This field is unused and ignored. + NextPageToken string `protobuf:"bytes,3,opt,name=next_page_token,json=nextPageToken,proto3" json:"next_page_token,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ListInstancesResponse) Reset() { *m = ListInstancesResponse{} } +func (m *ListInstancesResponse) String() string { return proto.CompactTextString(m) } +func (*ListInstancesResponse) ProtoMessage() {} +func (*ListInstancesResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_bigtable_instance_admin_7a6cd7ed1a550de5, []int{3} +} +func (m *ListInstancesResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ListInstancesResponse.Unmarshal(m, b) +} +func (m *ListInstancesResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ListInstancesResponse.Marshal(b, m, deterministic) +} +func (dst *ListInstancesResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_ListInstancesResponse.Merge(dst, src) +} +func (m *ListInstancesResponse) XXX_Size() int { + return xxx_messageInfo_ListInstancesResponse.Size(m) +} +func (m *ListInstancesResponse) XXX_DiscardUnknown() { + xxx_messageInfo_ListInstancesResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_ListInstancesResponse proto.InternalMessageInfo + +func (m *ListInstancesResponse) GetInstances() []*Instance { + if m != nil { + return m.Instances + } + return nil +} + +func (m *ListInstancesResponse) GetFailedLocations() []string { + if m != nil { + return m.FailedLocations + } + return nil +} + +func (m *ListInstancesResponse) GetNextPageToken() string { + if m != nil { + return m.NextPageToken + } + return "" +} + +// Request message for BigtableInstanceAdmin.PartialUpdateInstance. +type PartialUpdateInstanceRequest struct { + // The Instance which will (partially) replace the current value. + Instance *Instance `protobuf:"bytes,1,opt,name=instance,proto3" json:"instance,omitempty"` + // The subset of Instance fields which should be replaced. + // Must be explicitly set. + UpdateMask *field_mask.FieldMask `protobuf:"bytes,2,opt,name=update_mask,json=updateMask,proto3" json:"update_mask,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *PartialUpdateInstanceRequest) Reset() { *m = PartialUpdateInstanceRequest{} } +func (m *PartialUpdateInstanceRequest) String() string { return proto.CompactTextString(m) } +func (*PartialUpdateInstanceRequest) ProtoMessage() {} +func (*PartialUpdateInstanceRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_bigtable_instance_admin_7a6cd7ed1a550de5, []int{4} +} +func (m *PartialUpdateInstanceRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_PartialUpdateInstanceRequest.Unmarshal(m, b) +} +func (m *PartialUpdateInstanceRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_PartialUpdateInstanceRequest.Marshal(b, m, deterministic) +} +func (dst *PartialUpdateInstanceRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_PartialUpdateInstanceRequest.Merge(dst, src) +} +func (m *PartialUpdateInstanceRequest) XXX_Size() int { + return xxx_messageInfo_PartialUpdateInstanceRequest.Size(m) +} +func (m *PartialUpdateInstanceRequest) XXX_DiscardUnknown() { + xxx_messageInfo_PartialUpdateInstanceRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_PartialUpdateInstanceRequest proto.InternalMessageInfo + +func (m *PartialUpdateInstanceRequest) GetInstance() *Instance { + if m != nil { + return m.Instance + } + return nil +} + +func (m *PartialUpdateInstanceRequest) GetUpdateMask() *field_mask.FieldMask { + if m != nil { + return m.UpdateMask + } + return nil +} + +// Request message for BigtableInstanceAdmin.DeleteInstance. +type DeleteInstanceRequest struct { + // The unique name of the instance to be deleted. + // Values are of the form `projects//instances/`. + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *DeleteInstanceRequest) Reset() { *m = DeleteInstanceRequest{} } +func (m *DeleteInstanceRequest) String() string { return proto.CompactTextString(m) } +func (*DeleteInstanceRequest) ProtoMessage() {} +func (*DeleteInstanceRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_bigtable_instance_admin_7a6cd7ed1a550de5, []int{5} +} +func (m *DeleteInstanceRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_DeleteInstanceRequest.Unmarshal(m, b) +} +func (m *DeleteInstanceRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_DeleteInstanceRequest.Marshal(b, m, deterministic) +} +func (dst *DeleteInstanceRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_DeleteInstanceRequest.Merge(dst, src) +} +func (m *DeleteInstanceRequest) XXX_Size() int { + return xxx_messageInfo_DeleteInstanceRequest.Size(m) +} +func (m *DeleteInstanceRequest) XXX_DiscardUnknown() { + xxx_messageInfo_DeleteInstanceRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_DeleteInstanceRequest proto.InternalMessageInfo + +func (m *DeleteInstanceRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +// Request message for BigtableInstanceAdmin.CreateCluster. +type CreateClusterRequest struct { + // The unique name of the instance in which to create the new cluster. + // Values are of the form + // `projects//instances/`. + Parent string `protobuf:"bytes,1,opt,name=parent,proto3" json:"parent,omitempty"` + // The ID to be used when referring to the new cluster within its instance, + // e.g., just `mycluster` rather than + // `projects/myproject/instances/myinstance/clusters/mycluster`. + ClusterId string `protobuf:"bytes,2,opt,name=cluster_id,json=clusterId,proto3" json:"cluster_id,omitempty"` + // The cluster to be created. + // Fields marked `OutputOnly` must be left blank. + Cluster *Cluster `protobuf:"bytes,3,opt,name=cluster,proto3" json:"cluster,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *CreateClusterRequest) Reset() { *m = CreateClusterRequest{} } +func (m *CreateClusterRequest) String() string { return proto.CompactTextString(m) } +func (*CreateClusterRequest) ProtoMessage() {} +func (*CreateClusterRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_bigtable_instance_admin_7a6cd7ed1a550de5, []int{6} +} +func (m *CreateClusterRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_CreateClusterRequest.Unmarshal(m, b) +} +func (m *CreateClusterRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_CreateClusterRequest.Marshal(b, m, deterministic) +} +func (dst *CreateClusterRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_CreateClusterRequest.Merge(dst, src) +} +func (m *CreateClusterRequest) XXX_Size() int { + return xxx_messageInfo_CreateClusterRequest.Size(m) +} +func (m *CreateClusterRequest) XXX_DiscardUnknown() { + xxx_messageInfo_CreateClusterRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_CreateClusterRequest proto.InternalMessageInfo + +func (m *CreateClusterRequest) GetParent() string { + if m != nil { + return m.Parent + } + return "" +} + +func (m *CreateClusterRequest) GetClusterId() string { + if m != nil { + return m.ClusterId + } + return "" +} + +func (m *CreateClusterRequest) GetCluster() *Cluster { + if m != nil { + return m.Cluster + } + return nil +} + +// Request message for BigtableInstanceAdmin.GetCluster. +type GetClusterRequest struct { + // The unique name of the requested cluster. Values are of the form + // `projects//instances//clusters/`. + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GetClusterRequest) Reset() { *m = GetClusterRequest{} } +func (m *GetClusterRequest) String() string { return proto.CompactTextString(m) } +func (*GetClusterRequest) ProtoMessage() {} +func (*GetClusterRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_bigtable_instance_admin_7a6cd7ed1a550de5, []int{7} +} +func (m *GetClusterRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GetClusterRequest.Unmarshal(m, b) +} +func (m *GetClusterRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GetClusterRequest.Marshal(b, m, deterministic) +} +func (dst *GetClusterRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_GetClusterRequest.Merge(dst, src) +} +func (m *GetClusterRequest) XXX_Size() int { + return xxx_messageInfo_GetClusterRequest.Size(m) +} +func (m *GetClusterRequest) XXX_DiscardUnknown() { + xxx_messageInfo_GetClusterRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_GetClusterRequest proto.InternalMessageInfo + +func (m *GetClusterRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +// Request message for BigtableInstanceAdmin.ListClusters. +type ListClustersRequest struct { + // The unique name of the instance for which a list of clusters is requested. + // Values are of the form `projects//instances/`. + // Use ` = '-'` to list Clusters for all Instances in a project, + // e.g., `projects/myproject/instances/-`. + Parent string `protobuf:"bytes,1,opt,name=parent,proto3" json:"parent,omitempty"` + // DEPRECATED: This field is unused and ignored. + PageToken string `protobuf:"bytes,2,opt,name=page_token,json=pageToken,proto3" json:"page_token,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ListClustersRequest) Reset() { *m = ListClustersRequest{} } +func (m *ListClustersRequest) String() string { return proto.CompactTextString(m) } +func (*ListClustersRequest) ProtoMessage() {} +func (*ListClustersRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_bigtable_instance_admin_7a6cd7ed1a550de5, []int{8} +} +func (m *ListClustersRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ListClustersRequest.Unmarshal(m, b) +} +func (m *ListClustersRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ListClustersRequest.Marshal(b, m, deterministic) +} +func (dst *ListClustersRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_ListClustersRequest.Merge(dst, src) +} +func (m *ListClustersRequest) XXX_Size() int { + return xxx_messageInfo_ListClustersRequest.Size(m) +} +func (m *ListClustersRequest) XXX_DiscardUnknown() { + xxx_messageInfo_ListClustersRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_ListClustersRequest proto.InternalMessageInfo + +func (m *ListClustersRequest) GetParent() string { + if m != nil { + return m.Parent + } + return "" +} + +func (m *ListClustersRequest) GetPageToken() string { + if m != nil { + return m.PageToken + } + return "" +} + +// Response message for BigtableInstanceAdmin.ListClusters. +type ListClustersResponse struct { + // The list of requested clusters. + Clusters []*Cluster `protobuf:"bytes,1,rep,name=clusters,proto3" json:"clusters,omitempty"` + // Locations from which Cluster information could not be retrieved, + // due to an outage or some other transient condition. + // Clusters from these locations may be missing from `clusters`, + // or may only have partial information returned. + // Values are of the form `projects//locations/` + FailedLocations []string `protobuf:"bytes,2,rep,name=failed_locations,json=failedLocations,proto3" json:"failed_locations,omitempty"` + // DEPRECATED: This field is unused and ignored. + NextPageToken string `protobuf:"bytes,3,opt,name=next_page_token,json=nextPageToken,proto3" json:"next_page_token,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ListClustersResponse) Reset() { *m = ListClustersResponse{} } +func (m *ListClustersResponse) String() string { return proto.CompactTextString(m) } +func (*ListClustersResponse) ProtoMessage() {} +func (*ListClustersResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_bigtable_instance_admin_7a6cd7ed1a550de5, []int{9} +} +func (m *ListClustersResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ListClustersResponse.Unmarshal(m, b) +} +func (m *ListClustersResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ListClustersResponse.Marshal(b, m, deterministic) +} +func (dst *ListClustersResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_ListClustersResponse.Merge(dst, src) +} +func (m *ListClustersResponse) XXX_Size() int { + return xxx_messageInfo_ListClustersResponse.Size(m) +} +func (m *ListClustersResponse) XXX_DiscardUnknown() { + xxx_messageInfo_ListClustersResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_ListClustersResponse proto.InternalMessageInfo + +func (m *ListClustersResponse) GetClusters() []*Cluster { + if m != nil { + return m.Clusters + } + return nil +} + +func (m *ListClustersResponse) GetFailedLocations() []string { + if m != nil { + return m.FailedLocations + } + return nil +} + +func (m *ListClustersResponse) GetNextPageToken() string { + if m != nil { + return m.NextPageToken + } + return "" +} + +// Request message for BigtableInstanceAdmin.DeleteCluster. +type DeleteClusterRequest struct { + // The unique name of the cluster to be deleted. Values are of the form + // `projects//instances//clusters/`. + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *DeleteClusterRequest) Reset() { *m = DeleteClusterRequest{} } +func (m *DeleteClusterRequest) String() string { return proto.CompactTextString(m) } +func (*DeleteClusterRequest) ProtoMessage() {} +func (*DeleteClusterRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_bigtable_instance_admin_7a6cd7ed1a550de5, []int{10} +} +func (m *DeleteClusterRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_DeleteClusterRequest.Unmarshal(m, b) +} +func (m *DeleteClusterRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_DeleteClusterRequest.Marshal(b, m, deterministic) +} +func (dst *DeleteClusterRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_DeleteClusterRequest.Merge(dst, src) +} +func (m *DeleteClusterRequest) XXX_Size() int { + return xxx_messageInfo_DeleteClusterRequest.Size(m) +} +func (m *DeleteClusterRequest) XXX_DiscardUnknown() { + xxx_messageInfo_DeleteClusterRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_DeleteClusterRequest proto.InternalMessageInfo + +func (m *DeleteClusterRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +// The metadata for the Operation returned by CreateInstance. +type CreateInstanceMetadata struct { + // The request that prompted the initiation of this CreateInstance operation. + OriginalRequest *CreateInstanceRequest `protobuf:"bytes,1,opt,name=original_request,json=originalRequest,proto3" json:"original_request,omitempty"` + // The time at which the original request was received. + RequestTime *timestamp.Timestamp `protobuf:"bytes,2,opt,name=request_time,json=requestTime,proto3" json:"request_time,omitempty"` + // The time at which the operation failed or was completed successfully. + FinishTime *timestamp.Timestamp `protobuf:"bytes,3,opt,name=finish_time,json=finishTime,proto3" json:"finish_time,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *CreateInstanceMetadata) Reset() { *m = CreateInstanceMetadata{} } +func (m *CreateInstanceMetadata) String() string { return proto.CompactTextString(m) } +func (*CreateInstanceMetadata) ProtoMessage() {} +func (*CreateInstanceMetadata) Descriptor() ([]byte, []int) { + return fileDescriptor_bigtable_instance_admin_7a6cd7ed1a550de5, []int{11} +} +func (m *CreateInstanceMetadata) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_CreateInstanceMetadata.Unmarshal(m, b) +} +func (m *CreateInstanceMetadata) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_CreateInstanceMetadata.Marshal(b, m, deterministic) +} +func (dst *CreateInstanceMetadata) XXX_Merge(src proto.Message) { + xxx_messageInfo_CreateInstanceMetadata.Merge(dst, src) +} +func (m *CreateInstanceMetadata) XXX_Size() int { + return xxx_messageInfo_CreateInstanceMetadata.Size(m) +} +func (m *CreateInstanceMetadata) XXX_DiscardUnknown() { + xxx_messageInfo_CreateInstanceMetadata.DiscardUnknown(m) +} + +var xxx_messageInfo_CreateInstanceMetadata proto.InternalMessageInfo + +func (m *CreateInstanceMetadata) GetOriginalRequest() *CreateInstanceRequest { + if m != nil { + return m.OriginalRequest + } + return nil +} + +func (m *CreateInstanceMetadata) GetRequestTime() *timestamp.Timestamp { + if m != nil { + return m.RequestTime + } + return nil +} + +func (m *CreateInstanceMetadata) GetFinishTime() *timestamp.Timestamp { + if m != nil { + return m.FinishTime + } + return nil +} + +// The metadata for the Operation returned by UpdateInstance. +type UpdateInstanceMetadata struct { + // The request that prompted the initiation of this UpdateInstance operation. + OriginalRequest *PartialUpdateInstanceRequest `protobuf:"bytes,1,opt,name=original_request,json=originalRequest,proto3" json:"original_request,omitempty"` + // The time at which the original request was received. + RequestTime *timestamp.Timestamp `protobuf:"bytes,2,opt,name=request_time,json=requestTime,proto3" json:"request_time,omitempty"` + // The time at which the operation failed or was completed successfully. + FinishTime *timestamp.Timestamp `protobuf:"bytes,3,opt,name=finish_time,json=finishTime,proto3" json:"finish_time,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *UpdateInstanceMetadata) Reset() { *m = UpdateInstanceMetadata{} } +func (m *UpdateInstanceMetadata) String() string { return proto.CompactTextString(m) } +func (*UpdateInstanceMetadata) ProtoMessage() {} +func (*UpdateInstanceMetadata) Descriptor() ([]byte, []int) { + return fileDescriptor_bigtable_instance_admin_7a6cd7ed1a550de5, []int{12} +} +func (m *UpdateInstanceMetadata) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_UpdateInstanceMetadata.Unmarshal(m, b) +} +func (m *UpdateInstanceMetadata) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_UpdateInstanceMetadata.Marshal(b, m, deterministic) +} +func (dst *UpdateInstanceMetadata) XXX_Merge(src proto.Message) { + xxx_messageInfo_UpdateInstanceMetadata.Merge(dst, src) +} +func (m *UpdateInstanceMetadata) XXX_Size() int { + return xxx_messageInfo_UpdateInstanceMetadata.Size(m) +} +func (m *UpdateInstanceMetadata) XXX_DiscardUnknown() { + xxx_messageInfo_UpdateInstanceMetadata.DiscardUnknown(m) +} + +var xxx_messageInfo_UpdateInstanceMetadata proto.InternalMessageInfo + +func (m *UpdateInstanceMetadata) GetOriginalRequest() *PartialUpdateInstanceRequest { + if m != nil { + return m.OriginalRequest + } + return nil +} + +func (m *UpdateInstanceMetadata) GetRequestTime() *timestamp.Timestamp { + if m != nil { + return m.RequestTime + } + return nil +} + +func (m *UpdateInstanceMetadata) GetFinishTime() *timestamp.Timestamp { + if m != nil { + return m.FinishTime + } + return nil +} + +// The metadata for the Operation returned by CreateCluster. +type CreateClusterMetadata struct { + // The request that prompted the initiation of this CreateCluster operation. + OriginalRequest *CreateClusterRequest `protobuf:"bytes,1,opt,name=original_request,json=originalRequest,proto3" json:"original_request,omitempty"` + // The time at which the original request was received. + RequestTime *timestamp.Timestamp `protobuf:"bytes,2,opt,name=request_time,json=requestTime,proto3" json:"request_time,omitempty"` + // The time at which the operation failed or was completed successfully. + FinishTime *timestamp.Timestamp `protobuf:"bytes,3,opt,name=finish_time,json=finishTime,proto3" json:"finish_time,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *CreateClusterMetadata) Reset() { *m = CreateClusterMetadata{} } +func (m *CreateClusterMetadata) String() string { return proto.CompactTextString(m) } +func (*CreateClusterMetadata) ProtoMessage() {} +func (*CreateClusterMetadata) Descriptor() ([]byte, []int) { + return fileDescriptor_bigtable_instance_admin_7a6cd7ed1a550de5, []int{13} +} +func (m *CreateClusterMetadata) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_CreateClusterMetadata.Unmarshal(m, b) +} +func (m *CreateClusterMetadata) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_CreateClusterMetadata.Marshal(b, m, deterministic) +} +func (dst *CreateClusterMetadata) XXX_Merge(src proto.Message) { + xxx_messageInfo_CreateClusterMetadata.Merge(dst, src) +} +func (m *CreateClusterMetadata) XXX_Size() int { + return xxx_messageInfo_CreateClusterMetadata.Size(m) +} +func (m *CreateClusterMetadata) XXX_DiscardUnknown() { + xxx_messageInfo_CreateClusterMetadata.DiscardUnknown(m) +} + +var xxx_messageInfo_CreateClusterMetadata proto.InternalMessageInfo + +func (m *CreateClusterMetadata) GetOriginalRequest() *CreateClusterRequest { + if m != nil { + return m.OriginalRequest + } + return nil +} + +func (m *CreateClusterMetadata) GetRequestTime() *timestamp.Timestamp { + if m != nil { + return m.RequestTime + } + return nil +} + +func (m *CreateClusterMetadata) GetFinishTime() *timestamp.Timestamp { + if m != nil { + return m.FinishTime + } + return nil +} + +// The metadata for the Operation returned by UpdateCluster. +type UpdateClusterMetadata struct { + // The request that prompted the initiation of this UpdateCluster operation. + OriginalRequest *Cluster `protobuf:"bytes,1,opt,name=original_request,json=originalRequest,proto3" json:"original_request,omitempty"` + // The time at which the original request was received. + RequestTime *timestamp.Timestamp `protobuf:"bytes,2,opt,name=request_time,json=requestTime,proto3" json:"request_time,omitempty"` + // The time at which the operation failed or was completed successfully. + FinishTime *timestamp.Timestamp `protobuf:"bytes,3,opt,name=finish_time,json=finishTime,proto3" json:"finish_time,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *UpdateClusterMetadata) Reset() { *m = UpdateClusterMetadata{} } +func (m *UpdateClusterMetadata) String() string { return proto.CompactTextString(m) } +func (*UpdateClusterMetadata) ProtoMessage() {} +func (*UpdateClusterMetadata) Descriptor() ([]byte, []int) { + return fileDescriptor_bigtable_instance_admin_7a6cd7ed1a550de5, []int{14} +} +func (m *UpdateClusterMetadata) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_UpdateClusterMetadata.Unmarshal(m, b) +} +func (m *UpdateClusterMetadata) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_UpdateClusterMetadata.Marshal(b, m, deterministic) +} +func (dst *UpdateClusterMetadata) XXX_Merge(src proto.Message) { + xxx_messageInfo_UpdateClusterMetadata.Merge(dst, src) +} +func (m *UpdateClusterMetadata) XXX_Size() int { + return xxx_messageInfo_UpdateClusterMetadata.Size(m) +} +func (m *UpdateClusterMetadata) XXX_DiscardUnknown() { + xxx_messageInfo_UpdateClusterMetadata.DiscardUnknown(m) +} + +var xxx_messageInfo_UpdateClusterMetadata proto.InternalMessageInfo + +func (m *UpdateClusterMetadata) GetOriginalRequest() *Cluster { + if m != nil { + return m.OriginalRequest + } + return nil +} + +func (m *UpdateClusterMetadata) GetRequestTime() *timestamp.Timestamp { + if m != nil { + return m.RequestTime + } + return nil +} + +func (m *UpdateClusterMetadata) GetFinishTime() *timestamp.Timestamp { + if m != nil { + return m.FinishTime + } + return nil +} + +// Request message for BigtableInstanceAdmin.CreateAppProfile. +type CreateAppProfileRequest struct { + // The unique name of the instance in which to create the new app profile. + // Values are of the form + // `projects//instances/`. + Parent string `protobuf:"bytes,1,opt,name=parent,proto3" json:"parent,omitempty"` + // The ID to be used when referring to the new app profile within its + // instance, e.g., just `myprofile` rather than + // `projects/myproject/instances/myinstance/appProfiles/myprofile`. + AppProfileId string `protobuf:"bytes,2,opt,name=app_profile_id,json=appProfileId,proto3" json:"app_profile_id,omitempty"` + // The app profile to be created. + // Fields marked `OutputOnly` will be ignored. + AppProfile *AppProfile `protobuf:"bytes,3,opt,name=app_profile,json=appProfile,proto3" json:"app_profile,omitempty"` + // If true, ignore safety checks when creating the app profile. + IgnoreWarnings bool `protobuf:"varint,4,opt,name=ignore_warnings,json=ignoreWarnings,proto3" json:"ignore_warnings,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *CreateAppProfileRequest) Reset() { *m = CreateAppProfileRequest{} } +func (m *CreateAppProfileRequest) String() string { return proto.CompactTextString(m) } +func (*CreateAppProfileRequest) ProtoMessage() {} +func (*CreateAppProfileRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_bigtable_instance_admin_7a6cd7ed1a550de5, []int{15} +} +func (m *CreateAppProfileRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_CreateAppProfileRequest.Unmarshal(m, b) +} +func (m *CreateAppProfileRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_CreateAppProfileRequest.Marshal(b, m, deterministic) +} +func (dst *CreateAppProfileRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_CreateAppProfileRequest.Merge(dst, src) +} +func (m *CreateAppProfileRequest) XXX_Size() int { + return xxx_messageInfo_CreateAppProfileRequest.Size(m) +} +func (m *CreateAppProfileRequest) XXX_DiscardUnknown() { + xxx_messageInfo_CreateAppProfileRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_CreateAppProfileRequest proto.InternalMessageInfo + +func (m *CreateAppProfileRequest) GetParent() string { + if m != nil { + return m.Parent + } + return "" +} + +func (m *CreateAppProfileRequest) GetAppProfileId() string { + if m != nil { + return m.AppProfileId + } + return "" +} + +func (m *CreateAppProfileRequest) GetAppProfile() *AppProfile { + if m != nil { + return m.AppProfile + } + return nil +} + +func (m *CreateAppProfileRequest) GetIgnoreWarnings() bool { + if m != nil { + return m.IgnoreWarnings + } + return false +} + +// Request message for BigtableInstanceAdmin.GetAppProfile. +type GetAppProfileRequest struct { + // The unique name of the requested app profile. Values are of the form + // `projects//instances//appProfiles/`. + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GetAppProfileRequest) Reset() { *m = GetAppProfileRequest{} } +func (m *GetAppProfileRequest) String() string { return proto.CompactTextString(m) } +func (*GetAppProfileRequest) ProtoMessage() {} +func (*GetAppProfileRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_bigtable_instance_admin_7a6cd7ed1a550de5, []int{16} +} +func (m *GetAppProfileRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GetAppProfileRequest.Unmarshal(m, b) +} +func (m *GetAppProfileRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GetAppProfileRequest.Marshal(b, m, deterministic) +} +func (dst *GetAppProfileRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_GetAppProfileRequest.Merge(dst, src) +} +func (m *GetAppProfileRequest) XXX_Size() int { + return xxx_messageInfo_GetAppProfileRequest.Size(m) +} +func (m *GetAppProfileRequest) XXX_DiscardUnknown() { + xxx_messageInfo_GetAppProfileRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_GetAppProfileRequest proto.InternalMessageInfo + +func (m *GetAppProfileRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +// Request message for BigtableInstanceAdmin.ListAppProfiles. +type ListAppProfilesRequest struct { + // The unique name of the instance for which a list of app profiles is + // requested. Values are of the form + // `projects//instances/`. + // Use ` = '-'` to list AppProfiles for all Instances in a project, + // e.g., `projects/myproject/instances/-`. + Parent string `protobuf:"bytes,1,opt,name=parent,proto3" json:"parent,omitempty"` + // Maximum number of results per page. + // CURRENTLY UNIMPLEMENTED AND IGNORED. + PageSize int32 `protobuf:"varint,3,opt,name=page_size,json=pageSize,proto3" json:"page_size,omitempty"` + // The value of `next_page_token` returned by a previous call. + PageToken string `protobuf:"bytes,2,opt,name=page_token,json=pageToken,proto3" json:"page_token,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ListAppProfilesRequest) Reset() { *m = ListAppProfilesRequest{} } +func (m *ListAppProfilesRequest) String() string { return proto.CompactTextString(m) } +func (*ListAppProfilesRequest) ProtoMessage() {} +func (*ListAppProfilesRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_bigtable_instance_admin_7a6cd7ed1a550de5, []int{17} +} +func (m *ListAppProfilesRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ListAppProfilesRequest.Unmarshal(m, b) +} +func (m *ListAppProfilesRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ListAppProfilesRequest.Marshal(b, m, deterministic) +} +func (dst *ListAppProfilesRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_ListAppProfilesRequest.Merge(dst, src) +} +func (m *ListAppProfilesRequest) XXX_Size() int { + return xxx_messageInfo_ListAppProfilesRequest.Size(m) +} +func (m *ListAppProfilesRequest) XXX_DiscardUnknown() { + xxx_messageInfo_ListAppProfilesRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_ListAppProfilesRequest proto.InternalMessageInfo + +func (m *ListAppProfilesRequest) GetParent() string { + if m != nil { + return m.Parent + } + return "" +} + +func (m *ListAppProfilesRequest) GetPageSize() int32 { + if m != nil { + return m.PageSize + } + return 0 +} + +func (m *ListAppProfilesRequest) GetPageToken() string { + if m != nil { + return m.PageToken + } + return "" +} + +// Response message for BigtableInstanceAdmin.ListAppProfiles. +type ListAppProfilesResponse struct { + // The list of requested app profiles. + AppProfiles []*AppProfile `protobuf:"bytes,1,rep,name=app_profiles,json=appProfiles,proto3" json:"app_profiles,omitempty"` + // Set if not all app profiles could be returned in a single response. + // Pass this value to `page_token` in another request to get the next + // page of results. + NextPageToken string `protobuf:"bytes,2,opt,name=next_page_token,json=nextPageToken,proto3" json:"next_page_token,omitempty"` + // Locations from which AppProfile information could not be retrieved, + // due to an outage or some other transient condition. + // AppProfiles from these locations may be missing from `app_profiles`. + // Values are of the form `projects//locations/` + FailedLocations []string `protobuf:"bytes,3,rep,name=failed_locations,json=failedLocations,proto3" json:"failed_locations,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ListAppProfilesResponse) Reset() { *m = ListAppProfilesResponse{} } +func (m *ListAppProfilesResponse) String() string { return proto.CompactTextString(m) } +func (*ListAppProfilesResponse) ProtoMessage() {} +func (*ListAppProfilesResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_bigtable_instance_admin_7a6cd7ed1a550de5, []int{18} +} +func (m *ListAppProfilesResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ListAppProfilesResponse.Unmarshal(m, b) +} +func (m *ListAppProfilesResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ListAppProfilesResponse.Marshal(b, m, deterministic) +} +func (dst *ListAppProfilesResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_ListAppProfilesResponse.Merge(dst, src) +} +func (m *ListAppProfilesResponse) XXX_Size() int { + return xxx_messageInfo_ListAppProfilesResponse.Size(m) +} +func (m *ListAppProfilesResponse) XXX_DiscardUnknown() { + xxx_messageInfo_ListAppProfilesResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_ListAppProfilesResponse proto.InternalMessageInfo + +func (m *ListAppProfilesResponse) GetAppProfiles() []*AppProfile { + if m != nil { + return m.AppProfiles + } + return nil +} + +func (m *ListAppProfilesResponse) GetNextPageToken() string { + if m != nil { + return m.NextPageToken + } + return "" +} + +func (m *ListAppProfilesResponse) GetFailedLocations() []string { + if m != nil { + return m.FailedLocations + } + return nil +} + +// Request message for BigtableInstanceAdmin.UpdateAppProfile. +type UpdateAppProfileRequest struct { + // The app profile which will (partially) replace the current value. + AppProfile *AppProfile `protobuf:"bytes,1,opt,name=app_profile,json=appProfile,proto3" json:"app_profile,omitempty"` + // The subset of app profile fields which should be replaced. + // If unset, all fields will be replaced. + UpdateMask *field_mask.FieldMask `protobuf:"bytes,2,opt,name=update_mask,json=updateMask,proto3" json:"update_mask,omitempty"` + // If true, ignore safety checks when updating the app profile. + IgnoreWarnings bool `protobuf:"varint,3,opt,name=ignore_warnings,json=ignoreWarnings,proto3" json:"ignore_warnings,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *UpdateAppProfileRequest) Reset() { *m = UpdateAppProfileRequest{} } +func (m *UpdateAppProfileRequest) String() string { return proto.CompactTextString(m) } +func (*UpdateAppProfileRequest) ProtoMessage() {} +func (*UpdateAppProfileRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_bigtable_instance_admin_7a6cd7ed1a550de5, []int{19} +} +func (m *UpdateAppProfileRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_UpdateAppProfileRequest.Unmarshal(m, b) +} +func (m *UpdateAppProfileRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_UpdateAppProfileRequest.Marshal(b, m, deterministic) +} +func (dst *UpdateAppProfileRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_UpdateAppProfileRequest.Merge(dst, src) +} +func (m *UpdateAppProfileRequest) XXX_Size() int { + return xxx_messageInfo_UpdateAppProfileRequest.Size(m) +} +func (m *UpdateAppProfileRequest) XXX_DiscardUnknown() { + xxx_messageInfo_UpdateAppProfileRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_UpdateAppProfileRequest proto.InternalMessageInfo + +func (m *UpdateAppProfileRequest) GetAppProfile() *AppProfile { + if m != nil { + return m.AppProfile + } + return nil +} + +func (m *UpdateAppProfileRequest) GetUpdateMask() *field_mask.FieldMask { + if m != nil { + return m.UpdateMask + } + return nil +} + +func (m *UpdateAppProfileRequest) GetIgnoreWarnings() bool { + if m != nil { + return m.IgnoreWarnings + } + return false +} + +// Request message for BigtableInstanceAdmin.DeleteAppProfile. +type DeleteAppProfileRequest struct { + // The unique name of the app profile to be deleted. Values are of the form + // `projects//instances//appProfiles/`. + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // If true, ignore safety checks when deleting the app profile. + IgnoreWarnings bool `protobuf:"varint,2,opt,name=ignore_warnings,json=ignoreWarnings,proto3" json:"ignore_warnings,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *DeleteAppProfileRequest) Reset() { *m = DeleteAppProfileRequest{} } +func (m *DeleteAppProfileRequest) String() string { return proto.CompactTextString(m) } +func (*DeleteAppProfileRequest) ProtoMessage() {} +func (*DeleteAppProfileRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_bigtable_instance_admin_7a6cd7ed1a550de5, []int{20} +} +func (m *DeleteAppProfileRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_DeleteAppProfileRequest.Unmarshal(m, b) +} +func (m *DeleteAppProfileRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_DeleteAppProfileRequest.Marshal(b, m, deterministic) +} +func (dst *DeleteAppProfileRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_DeleteAppProfileRequest.Merge(dst, src) +} +func (m *DeleteAppProfileRequest) XXX_Size() int { + return xxx_messageInfo_DeleteAppProfileRequest.Size(m) +} +func (m *DeleteAppProfileRequest) XXX_DiscardUnknown() { + xxx_messageInfo_DeleteAppProfileRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_DeleteAppProfileRequest proto.InternalMessageInfo + +func (m *DeleteAppProfileRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *DeleteAppProfileRequest) GetIgnoreWarnings() bool { + if m != nil { + return m.IgnoreWarnings + } + return false +} + +// The metadata for the Operation returned by UpdateAppProfile. +type UpdateAppProfileMetadata struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *UpdateAppProfileMetadata) Reset() { *m = UpdateAppProfileMetadata{} } +func (m *UpdateAppProfileMetadata) String() string { return proto.CompactTextString(m) } +func (*UpdateAppProfileMetadata) ProtoMessage() {} +func (*UpdateAppProfileMetadata) Descriptor() ([]byte, []int) { + return fileDescriptor_bigtable_instance_admin_7a6cd7ed1a550de5, []int{21} +} +func (m *UpdateAppProfileMetadata) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_UpdateAppProfileMetadata.Unmarshal(m, b) +} +func (m *UpdateAppProfileMetadata) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_UpdateAppProfileMetadata.Marshal(b, m, deterministic) +} +func (dst *UpdateAppProfileMetadata) XXX_Merge(src proto.Message) { + xxx_messageInfo_UpdateAppProfileMetadata.Merge(dst, src) +} +func (m *UpdateAppProfileMetadata) XXX_Size() int { + return xxx_messageInfo_UpdateAppProfileMetadata.Size(m) +} +func (m *UpdateAppProfileMetadata) XXX_DiscardUnknown() { + xxx_messageInfo_UpdateAppProfileMetadata.DiscardUnknown(m) +} + +var xxx_messageInfo_UpdateAppProfileMetadata proto.InternalMessageInfo + +func init() { + proto.RegisterType((*CreateInstanceRequest)(nil), "google.bigtable.admin.v2.CreateInstanceRequest") + proto.RegisterMapType((map[string]*Cluster)(nil), "google.bigtable.admin.v2.CreateInstanceRequest.ClustersEntry") + proto.RegisterType((*GetInstanceRequest)(nil), "google.bigtable.admin.v2.GetInstanceRequest") + proto.RegisterType((*ListInstancesRequest)(nil), "google.bigtable.admin.v2.ListInstancesRequest") + proto.RegisterType((*ListInstancesResponse)(nil), "google.bigtable.admin.v2.ListInstancesResponse") + proto.RegisterType((*PartialUpdateInstanceRequest)(nil), "google.bigtable.admin.v2.PartialUpdateInstanceRequest") + proto.RegisterType((*DeleteInstanceRequest)(nil), "google.bigtable.admin.v2.DeleteInstanceRequest") + proto.RegisterType((*CreateClusterRequest)(nil), "google.bigtable.admin.v2.CreateClusterRequest") + proto.RegisterType((*GetClusterRequest)(nil), "google.bigtable.admin.v2.GetClusterRequest") + proto.RegisterType((*ListClustersRequest)(nil), "google.bigtable.admin.v2.ListClustersRequest") + proto.RegisterType((*ListClustersResponse)(nil), "google.bigtable.admin.v2.ListClustersResponse") + proto.RegisterType((*DeleteClusterRequest)(nil), "google.bigtable.admin.v2.DeleteClusterRequest") + proto.RegisterType((*CreateInstanceMetadata)(nil), "google.bigtable.admin.v2.CreateInstanceMetadata") + proto.RegisterType((*UpdateInstanceMetadata)(nil), "google.bigtable.admin.v2.UpdateInstanceMetadata") + proto.RegisterType((*CreateClusterMetadata)(nil), "google.bigtable.admin.v2.CreateClusterMetadata") + proto.RegisterType((*UpdateClusterMetadata)(nil), "google.bigtable.admin.v2.UpdateClusterMetadata") + proto.RegisterType((*CreateAppProfileRequest)(nil), "google.bigtable.admin.v2.CreateAppProfileRequest") + proto.RegisterType((*GetAppProfileRequest)(nil), "google.bigtable.admin.v2.GetAppProfileRequest") + proto.RegisterType((*ListAppProfilesRequest)(nil), "google.bigtable.admin.v2.ListAppProfilesRequest") + proto.RegisterType((*ListAppProfilesResponse)(nil), "google.bigtable.admin.v2.ListAppProfilesResponse") + proto.RegisterType((*UpdateAppProfileRequest)(nil), "google.bigtable.admin.v2.UpdateAppProfileRequest") + proto.RegisterType((*DeleteAppProfileRequest)(nil), "google.bigtable.admin.v2.DeleteAppProfileRequest") + proto.RegisterType((*UpdateAppProfileMetadata)(nil), "google.bigtable.admin.v2.UpdateAppProfileMetadata") +} + +// Reference imports to suppress errors if they are not otherwise used. +var _ context.Context +var _ grpc.ClientConn + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the grpc package it is being compiled against. +const _ = grpc.SupportPackageIsVersion4 + +// BigtableInstanceAdminClient is the client API for BigtableInstanceAdmin service. +// +// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://godoc.org/google.golang.org/grpc#ClientConn.NewStream. +type BigtableInstanceAdminClient interface { + // Create an instance within a project. + CreateInstance(ctx context.Context, in *CreateInstanceRequest, opts ...grpc.CallOption) (*longrunning.Operation, error) + // Gets information about an instance. + GetInstance(ctx context.Context, in *GetInstanceRequest, opts ...grpc.CallOption) (*Instance, error) + // Lists information about instances in a project. + ListInstances(ctx context.Context, in *ListInstancesRequest, opts ...grpc.CallOption) (*ListInstancesResponse, error) + // Updates an instance within a project. + UpdateInstance(ctx context.Context, in *Instance, opts ...grpc.CallOption) (*Instance, error) + // Partially updates an instance within a project. + PartialUpdateInstance(ctx context.Context, in *PartialUpdateInstanceRequest, opts ...grpc.CallOption) (*longrunning.Operation, error) + // Delete an instance from a project. + DeleteInstance(ctx context.Context, in *DeleteInstanceRequest, opts ...grpc.CallOption) (*empty.Empty, error) + // Creates a cluster within an instance. + CreateCluster(ctx context.Context, in *CreateClusterRequest, opts ...grpc.CallOption) (*longrunning.Operation, error) + // Gets information about a cluster. + GetCluster(ctx context.Context, in *GetClusterRequest, opts ...grpc.CallOption) (*Cluster, error) + // Lists information about clusters in an instance. + ListClusters(ctx context.Context, in *ListClustersRequest, opts ...grpc.CallOption) (*ListClustersResponse, error) + // Updates a cluster within an instance. + UpdateCluster(ctx context.Context, in *Cluster, opts ...grpc.CallOption) (*longrunning.Operation, error) + // Deletes a cluster from an instance. + DeleteCluster(ctx context.Context, in *DeleteClusterRequest, opts ...grpc.CallOption) (*empty.Empty, error) + // Creates an app profile within an instance. + CreateAppProfile(ctx context.Context, in *CreateAppProfileRequest, opts ...grpc.CallOption) (*AppProfile, error) + // Gets information about an app profile. + GetAppProfile(ctx context.Context, in *GetAppProfileRequest, opts ...grpc.CallOption) (*AppProfile, error) + // Lists information about app profiles in an instance. + ListAppProfiles(ctx context.Context, in *ListAppProfilesRequest, opts ...grpc.CallOption) (*ListAppProfilesResponse, error) + // Updates an app profile within an instance. + UpdateAppProfile(ctx context.Context, in *UpdateAppProfileRequest, opts ...grpc.CallOption) (*longrunning.Operation, error) + // Deletes an app profile from an instance. + DeleteAppProfile(ctx context.Context, in *DeleteAppProfileRequest, opts ...grpc.CallOption) (*empty.Empty, error) + // Gets the access control policy for an instance resource. Returns an empty + // policy if an instance exists but does not have a policy set. + GetIamPolicy(ctx context.Context, in *v1.GetIamPolicyRequest, opts ...grpc.CallOption) (*v1.Policy, error) + // Sets the access control policy on an instance resource. Replaces any + // existing policy. + SetIamPolicy(ctx context.Context, in *v1.SetIamPolicyRequest, opts ...grpc.CallOption) (*v1.Policy, error) + // Returns permissions that the caller has on the specified instance resource. + TestIamPermissions(ctx context.Context, in *v1.TestIamPermissionsRequest, opts ...grpc.CallOption) (*v1.TestIamPermissionsResponse, error) +} + +type bigtableInstanceAdminClient struct { + cc *grpc.ClientConn +} + +func NewBigtableInstanceAdminClient(cc *grpc.ClientConn) BigtableInstanceAdminClient { + return &bigtableInstanceAdminClient{cc} +} + +func (c *bigtableInstanceAdminClient) CreateInstance(ctx context.Context, in *CreateInstanceRequest, opts ...grpc.CallOption) (*longrunning.Operation, error) { + out := new(longrunning.Operation) + err := c.cc.Invoke(ctx, "/google.bigtable.admin.v2.BigtableInstanceAdmin/CreateInstance", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *bigtableInstanceAdminClient) GetInstance(ctx context.Context, in *GetInstanceRequest, opts ...grpc.CallOption) (*Instance, error) { + out := new(Instance) + err := c.cc.Invoke(ctx, "/google.bigtable.admin.v2.BigtableInstanceAdmin/GetInstance", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *bigtableInstanceAdminClient) ListInstances(ctx context.Context, in *ListInstancesRequest, opts ...grpc.CallOption) (*ListInstancesResponse, error) { + out := new(ListInstancesResponse) + err := c.cc.Invoke(ctx, "/google.bigtable.admin.v2.BigtableInstanceAdmin/ListInstances", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *bigtableInstanceAdminClient) UpdateInstance(ctx context.Context, in *Instance, opts ...grpc.CallOption) (*Instance, error) { + out := new(Instance) + err := c.cc.Invoke(ctx, "/google.bigtable.admin.v2.BigtableInstanceAdmin/UpdateInstance", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *bigtableInstanceAdminClient) PartialUpdateInstance(ctx context.Context, in *PartialUpdateInstanceRequest, opts ...grpc.CallOption) (*longrunning.Operation, error) { + out := new(longrunning.Operation) + err := c.cc.Invoke(ctx, "/google.bigtable.admin.v2.BigtableInstanceAdmin/PartialUpdateInstance", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *bigtableInstanceAdminClient) DeleteInstance(ctx context.Context, in *DeleteInstanceRequest, opts ...grpc.CallOption) (*empty.Empty, error) { + out := new(empty.Empty) + err := c.cc.Invoke(ctx, "/google.bigtable.admin.v2.BigtableInstanceAdmin/DeleteInstance", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *bigtableInstanceAdminClient) CreateCluster(ctx context.Context, in *CreateClusterRequest, opts ...grpc.CallOption) (*longrunning.Operation, error) { + out := new(longrunning.Operation) + err := c.cc.Invoke(ctx, "/google.bigtable.admin.v2.BigtableInstanceAdmin/CreateCluster", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *bigtableInstanceAdminClient) GetCluster(ctx context.Context, in *GetClusterRequest, opts ...grpc.CallOption) (*Cluster, error) { + out := new(Cluster) + err := c.cc.Invoke(ctx, "/google.bigtable.admin.v2.BigtableInstanceAdmin/GetCluster", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *bigtableInstanceAdminClient) ListClusters(ctx context.Context, in *ListClustersRequest, opts ...grpc.CallOption) (*ListClustersResponse, error) { + out := new(ListClustersResponse) + err := c.cc.Invoke(ctx, "/google.bigtable.admin.v2.BigtableInstanceAdmin/ListClusters", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *bigtableInstanceAdminClient) UpdateCluster(ctx context.Context, in *Cluster, opts ...grpc.CallOption) (*longrunning.Operation, error) { + out := new(longrunning.Operation) + err := c.cc.Invoke(ctx, "/google.bigtable.admin.v2.BigtableInstanceAdmin/UpdateCluster", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *bigtableInstanceAdminClient) DeleteCluster(ctx context.Context, in *DeleteClusterRequest, opts ...grpc.CallOption) (*empty.Empty, error) { + out := new(empty.Empty) + err := c.cc.Invoke(ctx, "/google.bigtable.admin.v2.BigtableInstanceAdmin/DeleteCluster", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *bigtableInstanceAdminClient) CreateAppProfile(ctx context.Context, in *CreateAppProfileRequest, opts ...grpc.CallOption) (*AppProfile, error) { + out := new(AppProfile) + err := c.cc.Invoke(ctx, "/google.bigtable.admin.v2.BigtableInstanceAdmin/CreateAppProfile", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *bigtableInstanceAdminClient) GetAppProfile(ctx context.Context, in *GetAppProfileRequest, opts ...grpc.CallOption) (*AppProfile, error) { + out := new(AppProfile) + err := c.cc.Invoke(ctx, "/google.bigtable.admin.v2.BigtableInstanceAdmin/GetAppProfile", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *bigtableInstanceAdminClient) ListAppProfiles(ctx context.Context, in *ListAppProfilesRequest, opts ...grpc.CallOption) (*ListAppProfilesResponse, error) { + out := new(ListAppProfilesResponse) + err := c.cc.Invoke(ctx, "/google.bigtable.admin.v2.BigtableInstanceAdmin/ListAppProfiles", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *bigtableInstanceAdminClient) UpdateAppProfile(ctx context.Context, in *UpdateAppProfileRequest, opts ...grpc.CallOption) (*longrunning.Operation, error) { + out := new(longrunning.Operation) + err := c.cc.Invoke(ctx, "/google.bigtable.admin.v2.BigtableInstanceAdmin/UpdateAppProfile", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *bigtableInstanceAdminClient) DeleteAppProfile(ctx context.Context, in *DeleteAppProfileRequest, opts ...grpc.CallOption) (*empty.Empty, error) { + out := new(empty.Empty) + err := c.cc.Invoke(ctx, "/google.bigtable.admin.v2.BigtableInstanceAdmin/DeleteAppProfile", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *bigtableInstanceAdminClient) GetIamPolicy(ctx context.Context, in *v1.GetIamPolicyRequest, opts ...grpc.CallOption) (*v1.Policy, error) { + out := new(v1.Policy) + err := c.cc.Invoke(ctx, "/google.bigtable.admin.v2.BigtableInstanceAdmin/GetIamPolicy", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *bigtableInstanceAdminClient) SetIamPolicy(ctx context.Context, in *v1.SetIamPolicyRequest, opts ...grpc.CallOption) (*v1.Policy, error) { + out := new(v1.Policy) + err := c.cc.Invoke(ctx, "/google.bigtable.admin.v2.BigtableInstanceAdmin/SetIamPolicy", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *bigtableInstanceAdminClient) TestIamPermissions(ctx context.Context, in *v1.TestIamPermissionsRequest, opts ...grpc.CallOption) (*v1.TestIamPermissionsResponse, error) { + out := new(v1.TestIamPermissionsResponse) + err := c.cc.Invoke(ctx, "/google.bigtable.admin.v2.BigtableInstanceAdmin/TestIamPermissions", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +// BigtableInstanceAdminServer is the server API for BigtableInstanceAdmin service. +type BigtableInstanceAdminServer interface { + // Create an instance within a project. + CreateInstance(context.Context, *CreateInstanceRequest) (*longrunning.Operation, error) + // Gets information about an instance. + GetInstance(context.Context, *GetInstanceRequest) (*Instance, error) + // Lists information about instances in a project. + ListInstances(context.Context, *ListInstancesRequest) (*ListInstancesResponse, error) + // Updates an instance within a project. + UpdateInstance(context.Context, *Instance) (*Instance, error) + // Partially updates an instance within a project. + PartialUpdateInstance(context.Context, *PartialUpdateInstanceRequest) (*longrunning.Operation, error) + // Delete an instance from a project. + DeleteInstance(context.Context, *DeleteInstanceRequest) (*empty.Empty, error) + // Creates a cluster within an instance. + CreateCluster(context.Context, *CreateClusterRequest) (*longrunning.Operation, error) + // Gets information about a cluster. + GetCluster(context.Context, *GetClusterRequest) (*Cluster, error) + // Lists information about clusters in an instance. + ListClusters(context.Context, *ListClustersRequest) (*ListClustersResponse, error) + // Updates a cluster within an instance. + UpdateCluster(context.Context, *Cluster) (*longrunning.Operation, error) + // Deletes a cluster from an instance. + DeleteCluster(context.Context, *DeleteClusterRequest) (*empty.Empty, error) + // Creates an app profile within an instance. + CreateAppProfile(context.Context, *CreateAppProfileRequest) (*AppProfile, error) + // Gets information about an app profile. + GetAppProfile(context.Context, *GetAppProfileRequest) (*AppProfile, error) + // Lists information about app profiles in an instance. + ListAppProfiles(context.Context, *ListAppProfilesRequest) (*ListAppProfilesResponse, error) + // Updates an app profile within an instance. + UpdateAppProfile(context.Context, *UpdateAppProfileRequest) (*longrunning.Operation, error) + // Deletes an app profile from an instance. + DeleteAppProfile(context.Context, *DeleteAppProfileRequest) (*empty.Empty, error) + // Gets the access control policy for an instance resource. Returns an empty + // policy if an instance exists but does not have a policy set. + GetIamPolicy(context.Context, *v1.GetIamPolicyRequest) (*v1.Policy, error) + // Sets the access control policy on an instance resource. Replaces any + // existing policy. + SetIamPolicy(context.Context, *v1.SetIamPolicyRequest) (*v1.Policy, error) + // Returns permissions that the caller has on the specified instance resource. + TestIamPermissions(context.Context, *v1.TestIamPermissionsRequest) (*v1.TestIamPermissionsResponse, error) +} + +func RegisterBigtableInstanceAdminServer(s *grpc.Server, srv BigtableInstanceAdminServer) { + s.RegisterService(&_BigtableInstanceAdmin_serviceDesc, srv) +} + +func _BigtableInstanceAdmin_CreateInstance_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(CreateInstanceRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(BigtableInstanceAdminServer).CreateInstance(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.bigtable.admin.v2.BigtableInstanceAdmin/CreateInstance", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(BigtableInstanceAdminServer).CreateInstance(ctx, req.(*CreateInstanceRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _BigtableInstanceAdmin_GetInstance_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(GetInstanceRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(BigtableInstanceAdminServer).GetInstance(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.bigtable.admin.v2.BigtableInstanceAdmin/GetInstance", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(BigtableInstanceAdminServer).GetInstance(ctx, req.(*GetInstanceRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _BigtableInstanceAdmin_ListInstances_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(ListInstancesRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(BigtableInstanceAdminServer).ListInstances(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.bigtable.admin.v2.BigtableInstanceAdmin/ListInstances", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(BigtableInstanceAdminServer).ListInstances(ctx, req.(*ListInstancesRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _BigtableInstanceAdmin_UpdateInstance_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(Instance) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(BigtableInstanceAdminServer).UpdateInstance(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.bigtable.admin.v2.BigtableInstanceAdmin/UpdateInstance", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(BigtableInstanceAdminServer).UpdateInstance(ctx, req.(*Instance)) + } + return interceptor(ctx, in, info, handler) +} + +func _BigtableInstanceAdmin_PartialUpdateInstance_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(PartialUpdateInstanceRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(BigtableInstanceAdminServer).PartialUpdateInstance(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.bigtable.admin.v2.BigtableInstanceAdmin/PartialUpdateInstance", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(BigtableInstanceAdminServer).PartialUpdateInstance(ctx, req.(*PartialUpdateInstanceRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _BigtableInstanceAdmin_DeleteInstance_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(DeleteInstanceRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(BigtableInstanceAdminServer).DeleteInstance(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.bigtable.admin.v2.BigtableInstanceAdmin/DeleteInstance", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(BigtableInstanceAdminServer).DeleteInstance(ctx, req.(*DeleteInstanceRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _BigtableInstanceAdmin_CreateCluster_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(CreateClusterRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(BigtableInstanceAdminServer).CreateCluster(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.bigtable.admin.v2.BigtableInstanceAdmin/CreateCluster", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(BigtableInstanceAdminServer).CreateCluster(ctx, req.(*CreateClusterRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _BigtableInstanceAdmin_GetCluster_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(GetClusterRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(BigtableInstanceAdminServer).GetCluster(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.bigtable.admin.v2.BigtableInstanceAdmin/GetCluster", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(BigtableInstanceAdminServer).GetCluster(ctx, req.(*GetClusterRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _BigtableInstanceAdmin_ListClusters_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(ListClustersRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(BigtableInstanceAdminServer).ListClusters(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.bigtable.admin.v2.BigtableInstanceAdmin/ListClusters", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(BigtableInstanceAdminServer).ListClusters(ctx, req.(*ListClustersRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _BigtableInstanceAdmin_UpdateCluster_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(Cluster) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(BigtableInstanceAdminServer).UpdateCluster(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.bigtable.admin.v2.BigtableInstanceAdmin/UpdateCluster", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(BigtableInstanceAdminServer).UpdateCluster(ctx, req.(*Cluster)) + } + return interceptor(ctx, in, info, handler) +} + +func _BigtableInstanceAdmin_DeleteCluster_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(DeleteClusterRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(BigtableInstanceAdminServer).DeleteCluster(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.bigtable.admin.v2.BigtableInstanceAdmin/DeleteCluster", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(BigtableInstanceAdminServer).DeleteCluster(ctx, req.(*DeleteClusterRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _BigtableInstanceAdmin_CreateAppProfile_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(CreateAppProfileRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(BigtableInstanceAdminServer).CreateAppProfile(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.bigtable.admin.v2.BigtableInstanceAdmin/CreateAppProfile", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(BigtableInstanceAdminServer).CreateAppProfile(ctx, req.(*CreateAppProfileRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _BigtableInstanceAdmin_GetAppProfile_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(GetAppProfileRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(BigtableInstanceAdminServer).GetAppProfile(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.bigtable.admin.v2.BigtableInstanceAdmin/GetAppProfile", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(BigtableInstanceAdminServer).GetAppProfile(ctx, req.(*GetAppProfileRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _BigtableInstanceAdmin_ListAppProfiles_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(ListAppProfilesRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(BigtableInstanceAdminServer).ListAppProfiles(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.bigtable.admin.v2.BigtableInstanceAdmin/ListAppProfiles", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(BigtableInstanceAdminServer).ListAppProfiles(ctx, req.(*ListAppProfilesRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _BigtableInstanceAdmin_UpdateAppProfile_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(UpdateAppProfileRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(BigtableInstanceAdminServer).UpdateAppProfile(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.bigtable.admin.v2.BigtableInstanceAdmin/UpdateAppProfile", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(BigtableInstanceAdminServer).UpdateAppProfile(ctx, req.(*UpdateAppProfileRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _BigtableInstanceAdmin_DeleteAppProfile_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(DeleteAppProfileRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(BigtableInstanceAdminServer).DeleteAppProfile(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.bigtable.admin.v2.BigtableInstanceAdmin/DeleteAppProfile", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(BigtableInstanceAdminServer).DeleteAppProfile(ctx, req.(*DeleteAppProfileRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _BigtableInstanceAdmin_GetIamPolicy_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(v1.GetIamPolicyRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(BigtableInstanceAdminServer).GetIamPolicy(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.bigtable.admin.v2.BigtableInstanceAdmin/GetIamPolicy", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(BigtableInstanceAdminServer).GetIamPolicy(ctx, req.(*v1.GetIamPolicyRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _BigtableInstanceAdmin_SetIamPolicy_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(v1.SetIamPolicyRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(BigtableInstanceAdminServer).SetIamPolicy(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.bigtable.admin.v2.BigtableInstanceAdmin/SetIamPolicy", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(BigtableInstanceAdminServer).SetIamPolicy(ctx, req.(*v1.SetIamPolicyRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _BigtableInstanceAdmin_TestIamPermissions_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(v1.TestIamPermissionsRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(BigtableInstanceAdminServer).TestIamPermissions(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.bigtable.admin.v2.BigtableInstanceAdmin/TestIamPermissions", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(BigtableInstanceAdminServer).TestIamPermissions(ctx, req.(*v1.TestIamPermissionsRequest)) + } + return interceptor(ctx, in, info, handler) +} + +var _BigtableInstanceAdmin_serviceDesc = grpc.ServiceDesc{ + ServiceName: "google.bigtable.admin.v2.BigtableInstanceAdmin", + HandlerType: (*BigtableInstanceAdminServer)(nil), + Methods: []grpc.MethodDesc{ + { + MethodName: "CreateInstance", + Handler: _BigtableInstanceAdmin_CreateInstance_Handler, + }, + { + MethodName: "GetInstance", + Handler: _BigtableInstanceAdmin_GetInstance_Handler, + }, + { + MethodName: "ListInstances", + Handler: _BigtableInstanceAdmin_ListInstances_Handler, + }, + { + MethodName: "UpdateInstance", + Handler: _BigtableInstanceAdmin_UpdateInstance_Handler, + }, + { + MethodName: "PartialUpdateInstance", + Handler: _BigtableInstanceAdmin_PartialUpdateInstance_Handler, + }, + { + MethodName: "DeleteInstance", + Handler: _BigtableInstanceAdmin_DeleteInstance_Handler, + }, + { + MethodName: "CreateCluster", + Handler: _BigtableInstanceAdmin_CreateCluster_Handler, + }, + { + MethodName: "GetCluster", + Handler: _BigtableInstanceAdmin_GetCluster_Handler, + }, + { + MethodName: "ListClusters", + Handler: _BigtableInstanceAdmin_ListClusters_Handler, + }, + { + MethodName: "UpdateCluster", + Handler: _BigtableInstanceAdmin_UpdateCluster_Handler, + }, + { + MethodName: "DeleteCluster", + Handler: _BigtableInstanceAdmin_DeleteCluster_Handler, + }, + { + MethodName: "CreateAppProfile", + Handler: _BigtableInstanceAdmin_CreateAppProfile_Handler, + }, + { + MethodName: "GetAppProfile", + Handler: _BigtableInstanceAdmin_GetAppProfile_Handler, + }, + { + MethodName: "ListAppProfiles", + Handler: _BigtableInstanceAdmin_ListAppProfiles_Handler, + }, + { + MethodName: "UpdateAppProfile", + Handler: _BigtableInstanceAdmin_UpdateAppProfile_Handler, + }, + { + MethodName: "DeleteAppProfile", + Handler: _BigtableInstanceAdmin_DeleteAppProfile_Handler, + }, + { + MethodName: "GetIamPolicy", + Handler: _BigtableInstanceAdmin_GetIamPolicy_Handler, + }, + { + MethodName: "SetIamPolicy", + Handler: _BigtableInstanceAdmin_SetIamPolicy_Handler, + }, + { + MethodName: "TestIamPermissions", + Handler: _BigtableInstanceAdmin_TestIamPermissions_Handler, + }, + }, + Streams: []grpc.StreamDesc{}, + Metadata: "google/bigtable/admin/v2/bigtable_instance_admin.proto", +} + +func init() { + proto.RegisterFile("google/bigtable/admin/v2/bigtable_instance_admin.proto", fileDescriptor_bigtable_instance_admin_7a6cd7ed1a550de5) +} + +var fileDescriptor_bigtable_instance_admin_7a6cd7ed1a550de5 = []byte{ + // 1592 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xcc, 0x59, 0xcd, 0x6f, 0xdc, 0x44, + 0x14, 0xd7, 0xec, 0xb6, 0x25, 0x79, 0x9b, 0x2f, 0x86, 0x26, 0x59, 0xb9, 0x5f, 0xa9, 0x5b, 0xb5, + 0xe9, 0x36, 0xd8, 0x64, 0x41, 0x6d, 0x95, 0x90, 0x8a, 0x36, 0x2d, 0x51, 0x50, 0x2a, 0xa2, 0x6d, + 0x29, 0x6a, 0x15, 0xb1, 0x9a, 0x64, 0x27, 0x8b, 0x89, 0xd7, 0x36, 0xb6, 0x37, 0xd0, 0xa2, 0x5e, + 0x10, 0x42, 0xa8, 0x12, 0x1c, 0x40, 0xe2, 0x52, 0xc1, 0x85, 0x0b, 0xaa, 0x10, 0x88, 0x03, 0xdc, + 0xb8, 0x82, 0x04, 0x47, 0xfe, 0x02, 0x24, 0xce, 0x88, 0x1b, 0x57, 0x34, 0xe3, 0x19, 0xaf, 0xed, + 0xb5, 0xd7, 0x4e, 0xab, 0x4a, 0x3d, 0x75, 0x3d, 0xf3, 0xe6, 0xcd, 0xef, 0xbd, 0xf7, 0x7b, 0x6f, + 0xde, 0x4b, 0xe1, 0x5c, 0xdb, 0xb6, 0xdb, 0x26, 0xd5, 0x37, 0x8d, 0xb6, 0x4f, 0x36, 0x4d, 0xaa, + 0x93, 0x56, 0xc7, 0xb0, 0xf4, 0xdd, 0x7a, 0xb8, 0xd2, 0x34, 0x2c, 0xcf, 0x27, 0xd6, 0x16, 0x6d, + 0xf2, 0x2d, 0xcd, 0x71, 0x6d, 0xdf, 0xc6, 0xd5, 0xe0, 0x9c, 0x26, 0xa5, 0xb4, 0x60, 0x73, 0xb7, + 0xae, 0x1c, 0x16, 0x1a, 0x89, 0x63, 0xe8, 0xc4, 0xb2, 0x6c, 0x9f, 0xf8, 0x86, 0x6d, 0x79, 0xc1, + 0x39, 0xe5, 0x74, 0xe6, 0x7d, 0xf2, 0x1a, 0x21, 0x78, 0x54, 0x08, 0x1a, 0xa4, 0xa3, 0xef, 0xce, + 0xb3, 0x7f, 0x9a, 0x8e, 0x6d, 0x1a, 0x5b, 0x77, 0xc4, 0xbe, 0x12, 0xdf, 0x8f, 0xed, 0x9d, 0x10, + 0x7b, 0xa6, 0x6d, 0xb5, 0xdd, 0xae, 0x65, 0x19, 0x56, 0x5b, 0xb7, 0x1d, 0xea, 0xc6, 0x90, 0x1c, + 0x12, 0x42, 0xfc, 0x6b, 0xb3, 0xbb, 0xad, 0xd3, 0x8e, 0xe3, 0x4b, 0x0d, 0x33, 0xc9, 0xcd, 0x6d, + 0x83, 0x9a, 0xad, 0x66, 0x87, 0x78, 0x3b, 0x42, 0xe2, 0x58, 0x52, 0xc2, 0x37, 0x3a, 0xd4, 0xf3, + 0x49, 0xc7, 0x09, 0x04, 0xd4, 0x3f, 0x4a, 0x30, 0xb9, 0xec, 0x52, 0xe2, 0xd3, 0x55, 0x61, 0x59, + 0x83, 0xbe, 0xdb, 0xa5, 0x9e, 0x8f, 0xa7, 0xe0, 0x80, 0x43, 0x5c, 0x6a, 0xf9, 0x55, 0x34, 0x83, + 0x66, 0x87, 0x1b, 0xe2, 0x0b, 0x1f, 0x83, 0x4a, 0xe8, 0x6b, 0xa3, 0x55, 0x2d, 0xf1, 0x4d, 0x90, + 0x4b, 0xab, 0x2d, 0x7c, 0x11, 0x86, 0xe4, 0x57, 0xb5, 0x3c, 0x83, 0x66, 0x2b, 0x75, 0x55, 0xcb, + 0x8a, 0x83, 0x16, 0xde, 0x1a, 0x9e, 0xc1, 0xb7, 0x60, 0x68, 0xcb, 0xec, 0x7a, 0x3e, 0x75, 0xbd, + 0xea, 0xbe, 0x99, 0xf2, 0x6c, 0xa5, 0xbe, 0x94, 0x7d, 0x3e, 0x15, 0xbb, 0xb6, 0x2c, 0xce, 0x5f, + 0xb5, 0x7c, 0xf7, 0x4e, 0x23, 0x54, 0xa7, 0xbc, 0x05, 0xa3, 0xb1, 0x2d, 0x3c, 0x01, 0xe5, 0x1d, + 0x7a, 0x47, 0x58, 0xc8, 0x7e, 0xe2, 0xf3, 0xb0, 0x7f, 0x97, 0x98, 0x5d, 0xca, 0x0d, 0xab, 0xd4, + 0x8f, 0x0f, 0xb8, 0x3a, 0xd0, 0xd4, 0x08, 0xe4, 0x17, 0x4a, 0x17, 0x90, 0x3a, 0x0b, 0x78, 0x85, + 0xfa, 0x49, 0x4f, 0x62, 0xd8, 0x67, 0x91, 0x0e, 0x15, 0xb7, 0xf0, 0xdf, 0xea, 0x35, 0x38, 0xb8, + 0x66, 0x78, 0xa1, 0xa8, 0x97, 0xe7, 0xf5, 0x23, 0x00, 0x0e, 0x69, 0xd3, 0xa6, 0x6f, 0xef, 0x50, + 0x4b, 0x38, 0x7d, 0x98, 0xad, 0xdc, 0x60, 0x0b, 0xea, 0x77, 0x08, 0x26, 0x13, 0xfa, 0x3c, 0xc7, + 0xb6, 0x3c, 0x8a, 0x5f, 0x81, 0x61, 0xe9, 0x59, 0xaf, 0x8a, 0xb8, 0x3b, 0x8b, 0x84, 0xa3, 0x77, + 0x08, 0x9f, 0x81, 0x89, 0x6d, 0x62, 0x98, 0xb4, 0xd5, 0x34, 0xed, 0xad, 0x80, 0x9c, 0xd5, 0xd2, + 0x4c, 0x79, 0x76, 0xb8, 0x31, 0x1e, 0xac, 0xaf, 0xc9, 0x65, 0x7c, 0x0a, 0xc6, 0x2d, 0xfa, 0xbe, + 0xdf, 0x8c, 0x40, 0x2d, 0x73, 0xa8, 0xa3, 0x6c, 0x79, 0x3d, 0x84, 0xfb, 0x00, 0xc1, 0xe1, 0x75, + 0xe2, 0xfa, 0x06, 0x31, 0xdf, 0x70, 0x5a, 0x29, 0xe4, 0x8b, 0x72, 0x08, 0x3d, 0x02, 0x87, 0x16, + 0xa1, 0xd2, 0xe5, 0x8a, 0x79, 0x32, 0x88, 0x58, 0x2a, 0x52, 0x85, 0xcc, 0x06, 0xed, 0x55, 0x96, + 0x2f, 0xd7, 0x88, 0xb7, 0xd3, 0x80, 0x40, 0x9c, 0xfd, 0x56, 0xcf, 0xc2, 0xe4, 0x15, 0x6a, 0xd2, + 0x7e, 0x54, 0x69, 0x81, 0xbc, 0x8f, 0xe0, 0x60, 0x40, 0x42, 0xc9, 0x87, 0xfc, 0x48, 0x0a, 0x3e, + 0xf6, 0xd2, 0x67, 0x58, 0xac, 0xac, 0xb6, 0xf0, 0x22, 0x3c, 0x23, 0x3e, 0x44, 0xf2, 0x14, 0x60, + 0xa0, 0x3c, 0xa1, 0x9e, 0x86, 0x67, 0x57, 0xa8, 0x9f, 0x00, 0x92, 0x86, 0x7a, 0x0d, 0x9e, 0x63, + 0x74, 0x91, 0xc9, 0xf0, 0x98, 0xec, 0xfb, 0x16, 0x05, 0x6c, 0xee, 0xa9, 0x13, 0xe4, 0x5b, 0x8a, + 0xa4, 0x72, 0xc0, 0xbd, 0x02, 0xd6, 0x84, 0x47, 0x9e, 0x04, 0xf3, 0x6a, 0x70, 0x30, 0x88, 0x6d, + 0x01, 0x27, 0xfd, 0x8b, 0x60, 0x2a, 0x5e, 0x5f, 0xae, 0x51, 0x9f, 0xb4, 0x88, 0x4f, 0xf0, 0x6d, + 0x98, 0xb0, 0x5d, 0xa3, 0x6d, 0x58, 0xc4, 0x6c, 0xba, 0x81, 0x0a, 0xc1, 0x53, 0x7d, 0x8f, 0xb5, + 0xaa, 0x31, 0x2e, 0x15, 0x49, 0x28, 0x4b, 0x30, 0x22, 0x54, 0x36, 0x59, 0xb5, 0xce, 0x24, 0xef, + 0x0d, 0x59, 0xca, 0x1b, 0x15, 0x21, 0xcf, 0x56, 0x18, 0xf5, 0xb7, 0x0d, 0xcb, 0xf0, 0xde, 0x0e, + 0x4e, 0x97, 0x73, 0x4f, 0x43, 0x20, 0xce, 0x16, 0xd4, 0xff, 0x10, 0x4c, 0xc5, 0x33, 0x32, 0x34, + 0x99, 0x64, 0x9a, 0x7c, 0x2e, 0xdb, 0xe4, 0x41, 0x49, 0xfe, 0x74, 0x59, 0xfe, 0x0f, 0x92, 0x0f, + 0xa1, 0x60, 0x46, 0x68, 0xf8, 0xad, 0x4c, 0xc3, 0xb5, 0xbc, 0x58, 0xc7, 0x49, 0xf6, 0x74, 0x19, + 0xfc, 0x17, 0x82, 0xc9, 0x20, 0x2e, 0x49, 0x83, 0xd7, 0x32, 0x0d, 0x2e, 0x90, 0xbd, 0x4f, 0x95, + 0x8d, 0xbf, 0x21, 0x98, 0x0e, 0x22, 0x71, 0xc9, 0x71, 0xd6, 0x5d, 0x7b, 0xdb, 0x30, 0x73, 0xfb, + 0x9b, 0x93, 0x30, 0x46, 0x1c, 0xa7, 0xe9, 0x04, 0xd2, 0xbd, 0x1a, 0x3d, 0x42, 0x42, 0x15, 0xab, + 0x2d, 0x7c, 0x15, 0x2a, 0x11, 0x29, 0x01, 0xeb, 0x64, 0xb6, 0x7b, 0x22, 0xf7, 0x43, 0x4f, 0x11, + 0x3e, 0x0d, 0xe3, 0x46, 0xdb, 0xb2, 0x5d, 0xda, 0x7c, 0x8f, 0xb8, 0xac, 0x03, 0x64, 0x2d, 0x0f, + 0x9a, 0x1d, 0x6a, 0x8c, 0x05, 0xcb, 0x6f, 0x8a, 0x55, 0x56, 0xb7, 0x56, 0xa8, 0xdf, 0x6f, 0x45, + 0x5a, 0xdd, 0x32, 0x61, 0x8a, 0x55, 0xe3, 0x9e, 0x70, 0x6e, 0x7d, 0x3f, 0x04, 0xbc, 0x9a, 0x37, + 0x3d, 0xe3, 0x6e, 0x60, 0xcb, 0xfe, 0xc6, 0x10, 0x5b, 0xb8, 0x6e, 0xdc, 0xa5, 0x79, 0xc5, 0xff, + 0x27, 0x04, 0xd3, 0x7d, 0xd7, 0x89, 0xfa, 0xbf, 0x02, 0x23, 0x11, 0x2f, 0xc9, 0x37, 0xa0, 0x98, + 0x9b, 0x2a, 0x3d, 0x37, 0xa5, 0x96, 0xf7, 0x52, 0x4a, 0x79, 0x4f, 0x7d, 0x31, 0xca, 0xa9, 0x2f, + 0x86, 0xfa, 0x2b, 0x82, 0xe9, 0x80, 0xff, 0xfd, 0x5e, 0x4d, 0x44, 0x17, 0x3d, 0x62, 0x74, 0x1f, + 0xa7, 0x0b, 0x49, 0xa3, 0x46, 0x39, 0x95, 0x1a, 0x37, 0x61, 0x3a, 0x78, 0xd2, 0x0a, 0xb1, 0x23, + 0x4d, 0x6f, 0x29, 0x55, 0xaf, 0x02, 0xd5, 0xa4, 0x7f, 0x64, 0x89, 0xa8, 0x3f, 0x9c, 0x86, 0xc9, + 0xcb, 0xc2, 0x0d, 0xb2, 0xac, 0x5f, 0x62, 0xde, 0xc0, 0x9f, 0x21, 0x18, 0x8b, 0x3f, 0x74, 0x78, + 0xaf, 0x4f, 0xa2, 0x72, 0x44, 0x1e, 0x88, 0x8c, 0x46, 0xda, 0xeb, 0x72, 0x34, 0x52, 0xe7, 0x3e, + 0xfc, 0xf3, 0xef, 0x2f, 0x4a, 0xa7, 0xd4, 0xe3, 0x6c, 0x20, 0xfb, 0x20, 0xa0, 0xf0, 0x92, 0xe3, + 0xda, 0xef, 0xd0, 0x2d, 0xdf, 0xd3, 0x6b, 0xf7, 0xc2, 0x21, 0xcd, 0x5b, 0x40, 0x35, 0x7c, 0x1f, + 0x41, 0x25, 0xd2, 0x94, 0xe3, 0xb9, 0x6c, 0x34, 0xfd, 0xbd, 0xbb, 0x52, 0xa0, 0xed, 0x54, 0xcf, + 0x70, 0x3c, 0x27, 0x70, 0x80, 0x87, 0x39, 0x39, 0x82, 0xa6, 0x07, 0x46, 0xaf, 0xdd, 0xc3, 0x0f, + 0x10, 0x8c, 0xc6, 0xfa, 0x74, 0x3c, 0xe0, 0x0d, 0x49, 0x1b, 0x10, 0x14, 0xbd, 0xb0, 0x7c, 0x90, + 0x83, 0x09, 0x74, 0x83, 0xbc, 0x85, 0x3f, 0x46, 0x30, 0x16, 0x7f, 0xaa, 0x71, 0x01, 0xfb, 0x0b, + 0xf9, 0x48, 0xc4, 0x4c, 0xc9, 0xf7, 0x11, 0x8b, 0x19, 0x1b, 0x67, 0x52, 0x5b, 0x07, 0xfc, 0x88, + 0xbd, 0x46, 0x1e, 0xa5, 0x5e, 0xe6, 0xf0, 0xce, 0xd5, 0x6b, 0x1c, 0x5e, 0x38, 0xe4, 0x0f, 0xc4, + 0xd9, 0x9b, 0x36, 0x3e, 0x42, 0x30, 0x16, 0x9f, 0x18, 0x06, 0x71, 0x3e, 0x75, 0xb6, 0x50, 0xa6, + 0xfa, 0xca, 0xc2, 0x55, 0x36, 0xe9, 0xcb, 0xf0, 0xd5, 0x0a, 0x90, 0xeb, 0x2b, 0x04, 0xa3, 0xb1, + 0xbe, 0x03, 0xef, 0xb1, 0x41, 0xc9, 0xf3, 0xd2, 0x12, 0xc7, 0x72, 0x5e, 0x9d, 0x4b, 0xa7, 0x52, + 0x0c, 0x8d, 0x2e, 0xbb, 0xf8, 0x05, 0x39, 0x9d, 0xe0, 0xcf, 0x11, 0x40, 0x6f, 0x3c, 0xc1, 0x67, + 0x07, 0x26, 0x62, 0x02, 0x59, 0x7e, 0xe7, 0xa1, 0xbe, 0xc4, 0xd1, 0x69, 0x78, 0x2e, 0xcf, 0x53, + 0x21, 0x34, 0xe6, 0xb4, 0x6f, 0x10, 0x8c, 0x44, 0x67, 0x17, 0xfc, 0xfc, 0xe0, 0x04, 0x4b, 0x8c, + 0x4c, 0x8a, 0x56, 0x54, 0x5c, 0xa4, 0x63, 0x1c, 0x65, 0x41, 0x1f, 0xb2, 0x22, 0x36, 0x1a, 0x6b, + 0xd6, 0x70, 0xbe, 0x43, 0xf2, 0xa2, 0x79, 0x9e, 0x23, 0x99, 0x57, 0xf6, 0xe4, 0x2f, 0x96, 0x9d, + 0x9f, 0x22, 0x18, 0x8d, 0x0d, 0x51, 0x83, 0x78, 0x96, 0x36, 0x6d, 0x65, 0x92, 0x5d, 0x38, 0xa7, + 0xb6, 0xb7, 0x10, 0xfe, 0x88, 0x60, 0x22, 0xd9, 0xe5, 0xe1, 0xf9, 0x3c, 0xea, 0xf7, 0xbd, 0x96, + 0x4a, 0xa1, 0x07, 0x5e, 0xbd, 0xc2, 0x31, 0x5e, 0x54, 0xf5, 0x22, 0x01, 0x8c, 0xf4, 0x30, 0x0b, + 0xd1, 0x96, 0x02, 0x7f, 0x8d, 0x60, 0x34, 0xd6, 0xd0, 0x0d, 0xf2, 0x61, 0x5a, 0xe7, 0x57, 0x10, + 0xad, 0x08, 0x32, 0xd6, 0x73, 0x3d, 0x1a, 0x81, 0xca, 0x9c, 0xfa, 0x03, 0x82, 0xf1, 0x44, 0x5b, + 0x87, 0x5f, 0x18, 0xcc, 0xf5, 0xfe, 0x86, 0x53, 0x99, 0xdf, 0xc3, 0x09, 0x91, 0x20, 0x71, 0xc4, + 0xc5, 0xfd, 0x8b, 0x7f, 0x46, 0x30, 0x91, 0x6c, 0x58, 0x06, 0xd1, 0x20, 0xa3, 0xf9, 0xcb, 0x4b, + 0x9b, 0x75, 0x8e, 0xef, 0xb5, 0xfa, 0x22, 0xc7, 0x17, 0x89, 0xa9, 0x56, 0xdc, 0xbb, 0x71, 0x2e, + 0x7c, 0x89, 0x60, 0x22, 0xd9, 0xc1, 0x0d, 0x02, 0x9e, 0xd1, 0xed, 0x65, 0x66, 0x95, 0xf0, 0x68, + 0x6d, 0xcf, 0x1c, 0xf8, 0x04, 0xc1, 0x08, 0xeb, 0x89, 0x48, 0x67, 0x9d, 0xff, 0xe1, 0xba, 0xd7, + 0x0d, 0x18, 0xa4, 0xa3, 0xed, 0xce, 0x6b, 0xd1, 0x4d, 0x89, 0x62, 0x32, 0x21, 0x13, 0xec, 0x86, + 0x6f, 0x47, 0x9d, 0x83, 0x70, 0xa9, 0x67, 0x77, 0xdd, 0xad, 0xec, 0xc7, 0xb5, 0x1d, 0xd1, 0xcc, + 0x6a, 0x0e, 0x83, 0x72, 0x7d, 0x10, 0x94, 0xeb, 0x4f, 0x0c, 0x8a, 0x97, 0x80, 0xf2, 0x3d, 0x02, + 0x7c, 0x83, 0x7a, 0x7c, 0x91, 0xba, 0x1d, 0xc3, 0xf3, 0xf8, 0x5f, 0xa0, 0x66, 0x13, 0x97, 0xf5, + 0x8b, 0x48, 0x58, 0x67, 0x0a, 0x48, 0x8a, 0x64, 0x58, 0xe6, 0x50, 0x97, 0xd4, 0x0b, 0xc5, 0xa0, + 0xfa, 0x7d, 0x9a, 0x16, 0x50, 0xed, 0xf2, 0x2f, 0x08, 0x0e, 0x6f, 0xd9, 0x9d, 0x4c, 0x42, 0x5d, + 0x56, 0x52, 0x5b, 0xf9, 0x75, 0xc6, 0xa2, 0x75, 0x74, 0x7b, 0x49, 0x9c, 0x6b, 0xdb, 0x26, 0xb1, + 0xda, 0x9a, 0xed, 0xb6, 0xf5, 0x36, 0xb5, 0x38, 0xc7, 0xf4, 0x60, 0x8b, 0x38, 0x86, 0xd7, 0xff, + 0x7f, 0x25, 0x8b, 0xfc, 0xc7, 0xc3, 0xd2, 0xd1, 0x95, 0xe0, 0xfc, 0xb2, 0x69, 0x77, 0x5b, 0x9a, + 0xbc, 0x4a, 0xe3, 0x77, 0x68, 0x37, 0xeb, 0xbf, 0x4b, 0x81, 0x0d, 0x2e, 0xb0, 0x21, 0x05, 0x36, + 0xb8, 0xc0, 0xc6, 0xcd, 0xfa, 0xe6, 0x01, 0x7e, 0xd7, 0x8b, 0xff, 0x07, 0x00, 0x00, 0xff, 0xff, + 0x5c, 0x29, 0x0c, 0x23, 0x06, 0x1a, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/bigtable/admin/v2/bigtable_table_admin.pb.go b/vendor/google.golang.org/genproto/googleapis/bigtable/admin/v2/bigtable_table_admin.pb.go new file mode 100644 index 0000000..6b01ce3 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/bigtable/admin/v2/bigtable_table_admin.pb.go @@ -0,0 +1,2174 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/bigtable/admin/v2/bigtable_table_admin.proto + +package admin // import "google.golang.org/genproto/googleapis/bigtable/admin/v2" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import duration "github.com/golang/protobuf/ptypes/duration" +import empty "github.com/golang/protobuf/ptypes/empty" +import timestamp "github.com/golang/protobuf/ptypes/timestamp" +import _ "google.golang.org/genproto/googleapis/api/annotations" +import longrunning "google.golang.org/genproto/googleapis/longrunning" + +import ( + context "golang.org/x/net/context" + grpc "google.golang.org/grpc" +) + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Request message for +// [google.bigtable.admin.v2.BigtableTableAdmin.CreateTable][google.bigtable.admin.v2.BigtableTableAdmin.CreateTable] +type CreateTableRequest struct { + // The unique name of the instance in which to create the table. + // Values are of the form `projects//instances/`. + Parent string `protobuf:"bytes,1,opt,name=parent,proto3" json:"parent,omitempty"` + // The name by which the new table should be referred to within the parent + // instance, e.g., `foobar` rather than `/tables/foobar`. + TableId string `protobuf:"bytes,2,opt,name=table_id,json=tableId,proto3" json:"table_id,omitempty"` + // The Table to create. + Table *Table `protobuf:"bytes,3,opt,name=table,proto3" json:"table,omitempty"` + // The optional list of row keys that will be used to initially split the + // table into several tablets (tablets are similar to HBase regions). + // Given two split keys, `s1` and `s2`, three tablets will be created, + // spanning the key ranges: `[, s1), [s1, s2), [s2, )`. + // + // Example: + // + // * Row keys := `["a", "apple", "custom", "customer_1", "customer_2",` + // `"other", "zz"]` + // * initial_split_keys := `["apple", "customer_1", "customer_2", "other"]` + // * Key assignment: + // - Tablet 1 `[, apple) => {"a"}.` + // - Tablet 2 `[apple, customer_1) => {"apple", "custom"}.` + // - Tablet 3 `[customer_1, customer_2) => {"customer_1"}.` + // - Tablet 4 `[customer_2, other) => {"customer_2"}.` + // - Tablet 5 `[other, ) => {"other", "zz"}.` + InitialSplits []*CreateTableRequest_Split `protobuf:"bytes,4,rep,name=initial_splits,json=initialSplits,proto3" json:"initial_splits,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *CreateTableRequest) Reset() { *m = CreateTableRequest{} } +func (m *CreateTableRequest) String() string { return proto.CompactTextString(m) } +func (*CreateTableRequest) ProtoMessage() {} +func (*CreateTableRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_bigtable_table_admin_4c164c60709448e2, []int{0} +} +func (m *CreateTableRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_CreateTableRequest.Unmarshal(m, b) +} +func (m *CreateTableRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_CreateTableRequest.Marshal(b, m, deterministic) +} +func (dst *CreateTableRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_CreateTableRequest.Merge(dst, src) +} +func (m *CreateTableRequest) XXX_Size() int { + return xxx_messageInfo_CreateTableRequest.Size(m) +} +func (m *CreateTableRequest) XXX_DiscardUnknown() { + xxx_messageInfo_CreateTableRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_CreateTableRequest proto.InternalMessageInfo + +func (m *CreateTableRequest) GetParent() string { + if m != nil { + return m.Parent + } + return "" +} + +func (m *CreateTableRequest) GetTableId() string { + if m != nil { + return m.TableId + } + return "" +} + +func (m *CreateTableRequest) GetTable() *Table { + if m != nil { + return m.Table + } + return nil +} + +func (m *CreateTableRequest) GetInitialSplits() []*CreateTableRequest_Split { + if m != nil { + return m.InitialSplits + } + return nil +} + +// An initial split point for a newly created table. +type CreateTableRequest_Split struct { + // Row key to use as an initial tablet boundary. + Key []byte `protobuf:"bytes,1,opt,name=key,proto3" json:"key,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *CreateTableRequest_Split) Reset() { *m = CreateTableRequest_Split{} } +func (m *CreateTableRequest_Split) String() string { return proto.CompactTextString(m) } +func (*CreateTableRequest_Split) ProtoMessage() {} +func (*CreateTableRequest_Split) Descriptor() ([]byte, []int) { + return fileDescriptor_bigtable_table_admin_4c164c60709448e2, []int{0, 0} +} +func (m *CreateTableRequest_Split) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_CreateTableRequest_Split.Unmarshal(m, b) +} +func (m *CreateTableRequest_Split) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_CreateTableRequest_Split.Marshal(b, m, deterministic) +} +func (dst *CreateTableRequest_Split) XXX_Merge(src proto.Message) { + xxx_messageInfo_CreateTableRequest_Split.Merge(dst, src) +} +func (m *CreateTableRequest_Split) XXX_Size() int { + return xxx_messageInfo_CreateTableRequest_Split.Size(m) +} +func (m *CreateTableRequest_Split) XXX_DiscardUnknown() { + xxx_messageInfo_CreateTableRequest_Split.DiscardUnknown(m) +} + +var xxx_messageInfo_CreateTableRequest_Split proto.InternalMessageInfo + +func (m *CreateTableRequest_Split) GetKey() []byte { + if m != nil { + return m.Key + } + return nil +} + +// Request message for +// [google.bigtable.admin.v2.BigtableTableAdmin.CreateTableFromSnapshot][google.bigtable.admin.v2.BigtableTableAdmin.CreateTableFromSnapshot] +// +// Note: This is a private alpha release of Cloud Bigtable snapshots. This +// feature is not currently available to most Cloud Bigtable customers. This +// feature might be changed in backward-incompatible ways and is not recommended +// for production use. It is not subject to any SLA or deprecation policy. +type CreateTableFromSnapshotRequest struct { + // The unique name of the instance in which to create the table. + // Values are of the form `projects//instances/`. + Parent string `protobuf:"bytes,1,opt,name=parent,proto3" json:"parent,omitempty"` + // The name by which the new table should be referred to within the parent + // instance, e.g., `foobar` rather than `/tables/foobar`. + TableId string `protobuf:"bytes,2,opt,name=table_id,json=tableId,proto3" json:"table_id,omitempty"` + // The unique name of the snapshot from which to restore the table. The + // snapshot and the table must be in the same instance. + // Values are of the form + // `projects//instances//clusters//snapshots/`. + SourceSnapshot string `protobuf:"bytes,3,opt,name=source_snapshot,json=sourceSnapshot,proto3" json:"source_snapshot,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *CreateTableFromSnapshotRequest) Reset() { *m = CreateTableFromSnapshotRequest{} } +func (m *CreateTableFromSnapshotRequest) String() string { return proto.CompactTextString(m) } +func (*CreateTableFromSnapshotRequest) ProtoMessage() {} +func (*CreateTableFromSnapshotRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_bigtable_table_admin_4c164c60709448e2, []int{1} +} +func (m *CreateTableFromSnapshotRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_CreateTableFromSnapshotRequest.Unmarshal(m, b) +} +func (m *CreateTableFromSnapshotRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_CreateTableFromSnapshotRequest.Marshal(b, m, deterministic) +} +func (dst *CreateTableFromSnapshotRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_CreateTableFromSnapshotRequest.Merge(dst, src) +} +func (m *CreateTableFromSnapshotRequest) XXX_Size() int { + return xxx_messageInfo_CreateTableFromSnapshotRequest.Size(m) +} +func (m *CreateTableFromSnapshotRequest) XXX_DiscardUnknown() { + xxx_messageInfo_CreateTableFromSnapshotRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_CreateTableFromSnapshotRequest proto.InternalMessageInfo + +func (m *CreateTableFromSnapshotRequest) GetParent() string { + if m != nil { + return m.Parent + } + return "" +} + +func (m *CreateTableFromSnapshotRequest) GetTableId() string { + if m != nil { + return m.TableId + } + return "" +} + +func (m *CreateTableFromSnapshotRequest) GetSourceSnapshot() string { + if m != nil { + return m.SourceSnapshot + } + return "" +} + +// Request message for +// [google.bigtable.admin.v2.BigtableTableAdmin.DropRowRange][google.bigtable.admin.v2.BigtableTableAdmin.DropRowRange] +type DropRowRangeRequest struct { + // The unique name of the table on which to drop a range of rows. + // Values are of the form + // `projects//instances//tables/`. + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // Delete all rows or by prefix. + // + // Types that are valid to be assigned to Target: + // *DropRowRangeRequest_RowKeyPrefix + // *DropRowRangeRequest_DeleteAllDataFromTable + Target isDropRowRangeRequest_Target `protobuf_oneof:"target"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *DropRowRangeRequest) Reset() { *m = DropRowRangeRequest{} } +func (m *DropRowRangeRequest) String() string { return proto.CompactTextString(m) } +func (*DropRowRangeRequest) ProtoMessage() {} +func (*DropRowRangeRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_bigtable_table_admin_4c164c60709448e2, []int{2} +} +func (m *DropRowRangeRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_DropRowRangeRequest.Unmarshal(m, b) +} +func (m *DropRowRangeRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_DropRowRangeRequest.Marshal(b, m, deterministic) +} +func (dst *DropRowRangeRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_DropRowRangeRequest.Merge(dst, src) +} +func (m *DropRowRangeRequest) XXX_Size() int { + return xxx_messageInfo_DropRowRangeRequest.Size(m) +} +func (m *DropRowRangeRequest) XXX_DiscardUnknown() { + xxx_messageInfo_DropRowRangeRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_DropRowRangeRequest proto.InternalMessageInfo + +func (m *DropRowRangeRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +type isDropRowRangeRequest_Target interface { + isDropRowRangeRequest_Target() +} + +type DropRowRangeRequest_RowKeyPrefix struct { + RowKeyPrefix []byte `protobuf:"bytes,2,opt,name=row_key_prefix,json=rowKeyPrefix,proto3,oneof"` +} + +type DropRowRangeRequest_DeleteAllDataFromTable struct { + DeleteAllDataFromTable bool `protobuf:"varint,3,opt,name=delete_all_data_from_table,json=deleteAllDataFromTable,proto3,oneof"` +} + +func (*DropRowRangeRequest_RowKeyPrefix) isDropRowRangeRequest_Target() {} + +func (*DropRowRangeRequest_DeleteAllDataFromTable) isDropRowRangeRequest_Target() {} + +func (m *DropRowRangeRequest) GetTarget() isDropRowRangeRequest_Target { + if m != nil { + return m.Target + } + return nil +} + +func (m *DropRowRangeRequest) GetRowKeyPrefix() []byte { + if x, ok := m.GetTarget().(*DropRowRangeRequest_RowKeyPrefix); ok { + return x.RowKeyPrefix + } + return nil +} + +func (m *DropRowRangeRequest) GetDeleteAllDataFromTable() bool { + if x, ok := m.GetTarget().(*DropRowRangeRequest_DeleteAllDataFromTable); ok { + return x.DeleteAllDataFromTable + } + return false +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*DropRowRangeRequest) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _DropRowRangeRequest_OneofMarshaler, _DropRowRangeRequest_OneofUnmarshaler, _DropRowRangeRequest_OneofSizer, []interface{}{ + (*DropRowRangeRequest_RowKeyPrefix)(nil), + (*DropRowRangeRequest_DeleteAllDataFromTable)(nil), + } +} + +func _DropRowRangeRequest_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*DropRowRangeRequest) + // target + switch x := m.Target.(type) { + case *DropRowRangeRequest_RowKeyPrefix: + b.EncodeVarint(2<<3 | proto.WireBytes) + b.EncodeRawBytes(x.RowKeyPrefix) + case *DropRowRangeRequest_DeleteAllDataFromTable: + t := uint64(0) + if x.DeleteAllDataFromTable { + t = 1 + } + b.EncodeVarint(3<<3 | proto.WireVarint) + b.EncodeVarint(t) + case nil: + default: + return fmt.Errorf("DropRowRangeRequest.Target has unexpected type %T", x) + } + return nil +} + +func _DropRowRangeRequest_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*DropRowRangeRequest) + switch tag { + case 2: // target.row_key_prefix + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeRawBytes(true) + m.Target = &DropRowRangeRequest_RowKeyPrefix{x} + return true, err + case 3: // target.delete_all_data_from_table + if wire != proto.WireVarint { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeVarint() + m.Target = &DropRowRangeRequest_DeleteAllDataFromTable{x != 0} + return true, err + default: + return false, nil + } +} + +func _DropRowRangeRequest_OneofSizer(msg proto.Message) (n int) { + m := msg.(*DropRowRangeRequest) + // target + switch x := m.Target.(type) { + case *DropRowRangeRequest_RowKeyPrefix: + n += 1 // tag and wire + n += proto.SizeVarint(uint64(len(x.RowKeyPrefix))) + n += len(x.RowKeyPrefix) + case *DropRowRangeRequest_DeleteAllDataFromTable: + n += 1 // tag and wire + n += 1 + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +// Request message for +// [google.bigtable.admin.v2.BigtableTableAdmin.ListTables][google.bigtable.admin.v2.BigtableTableAdmin.ListTables] +type ListTablesRequest struct { + // The unique name of the instance for which tables should be listed. + // Values are of the form `projects//instances/`. + Parent string `protobuf:"bytes,1,opt,name=parent,proto3" json:"parent,omitempty"` + // The view to be applied to the returned tables' fields. + // Defaults to `NAME_ONLY` if unspecified; no others are currently supported. + View Table_View `protobuf:"varint,2,opt,name=view,proto3,enum=google.bigtable.admin.v2.Table_View" json:"view,omitempty"` + // Maximum number of results per page. + // CURRENTLY UNIMPLEMENTED AND IGNORED. + PageSize int32 `protobuf:"varint,4,opt,name=page_size,json=pageSize,proto3" json:"page_size,omitempty"` + // The value of `next_page_token` returned by a previous call. + PageToken string `protobuf:"bytes,3,opt,name=page_token,json=pageToken,proto3" json:"page_token,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ListTablesRequest) Reset() { *m = ListTablesRequest{} } +func (m *ListTablesRequest) String() string { return proto.CompactTextString(m) } +func (*ListTablesRequest) ProtoMessage() {} +func (*ListTablesRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_bigtable_table_admin_4c164c60709448e2, []int{3} +} +func (m *ListTablesRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ListTablesRequest.Unmarshal(m, b) +} +func (m *ListTablesRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ListTablesRequest.Marshal(b, m, deterministic) +} +func (dst *ListTablesRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_ListTablesRequest.Merge(dst, src) +} +func (m *ListTablesRequest) XXX_Size() int { + return xxx_messageInfo_ListTablesRequest.Size(m) +} +func (m *ListTablesRequest) XXX_DiscardUnknown() { + xxx_messageInfo_ListTablesRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_ListTablesRequest proto.InternalMessageInfo + +func (m *ListTablesRequest) GetParent() string { + if m != nil { + return m.Parent + } + return "" +} + +func (m *ListTablesRequest) GetView() Table_View { + if m != nil { + return m.View + } + return Table_VIEW_UNSPECIFIED +} + +func (m *ListTablesRequest) GetPageSize() int32 { + if m != nil { + return m.PageSize + } + return 0 +} + +func (m *ListTablesRequest) GetPageToken() string { + if m != nil { + return m.PageToken + } + return "" +} + +// Response message for +// [google.bigtable.admin.v2.BigtableTableAdmin.ListTables][google.bigtable.admin.v2.BigtableTableAdmin.ListTables] +type ListTablesResponse struct { + // The tables present in the requested instance. + Tables []*Table `protobuf:"bytes,1,rep,name=tables,proto3" json:"tables,omitempty"` + // Set if not all tables could be returned in a single response. + // Pass this value to `page_token` in another request to get the next + // page of results. + NextPageToken string `protobuf:"bytes,2,opt,name=next_page_token,json=nextPageToken,proto3" json:"next_page_token,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ListTablesResponse) Reset() { *m = ListTablesResponse{} } +func (m *ListTablesResponse) String() string { return proto.CompactTextString(m) } +func (*ListTablesResponse) ProtoMessage() {} +func (*ListTablesResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_bigtable_table_admin_4c164c60709448e2, []int{4} +} +func (m *ListTablesResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ListTablesResponse.Unmarshal(m, b) +} +func (m *ListTablesResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ListTablesResponse.Marshal(b, m, deterministic) +} +func (dst *ListTablesResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_ListTablesResponse.Merge(dst, src) +} +func (m *ListTablesResponse) XXX_Size() int { + return xxx_messageInfo_ListTablesResponse.Size(m) +} +func (m *ListTablesResponse) XXX_DiscardUnknown() { + xxx_messageInfo_ListTablesResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_ListTablesResponse proto.InternalMessageInfo + +func (m *ListTablesResponse) GetTables() []*Table { + if m != nil { + return m.Tables + } + return nil +} + +func (m *ListTablesResponse) GetNextPageToken() string { + if m != nil { + return m.NextPageToken + } + return "" +} + +// Request message for +// [google.bigtable.admin.v2.BigtableTableAdmin.GetTable][google.bigtable.admin.v2.BigtableTableAdmin.GetTable] +type GetTableRequest struct { + // The unique name of the requested table. + // Values are of the form + // `projects//instances//tables/
`. + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // The view to be applied to the returned table's fields. + // Defaults to `SCHEMA_VIEW` if unspecified. + View Table_View `protobuf:"varint,2,opt,name=view,proto3,enum=google.bigtable.admin.v2.Table_View" json:"view,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GetTableRequest) Reset() { *m = GetTableRequest{} } +func (m *GetTableRequest) String() string { return proto.CompactTextString(m) } +func (*GetTableRequest) ProtoMessage() {} +func (*GetTableRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_bigtable_table_admin_4c164c60709448e2, []int{5} +} +func (m *GetTableRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GetTableRequest.Unmarshal(m, b) +} +func (m *GetTableRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GetTableRequest.Marshal(b, m, deterministic) +} +func (dst *GetTableRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_GetTableRequest.Merge(dst, src) +} +func (m *GetTableRequest) XXX_Size() int { + return xxx_messageInfo_GetTableRequest.Size(m) +} +func (m *GetTableRequest) XXX_DiscardUnknown() { + xxx_messageInfo_GetTableRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_GetTableRequest proto.InternalMessageInfo + +func (m *GetTableRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *GetTableRequest) GetView() Table_View { + if m != nil { + return m.View + } + return Table_VIEW_UNSPECIFIED +} + +// Request message for +// [google.bigtable.admin.v2.BigtableTableAdmin.DeleteTable][google.bigtable.admin.v2.BigtableTableAdmin.DeleteTable] +type DeleteTableRequest struct { + // The unique name of the table to be deleted. + // Values are of the form + // `projects//instances//tables/
`. + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *DeleteTableRequest) Reset() { *m = DeleteTableRequest{} } +func (m *DeleteTableRequest) String() string { return proto.CompactTextString(m) } +func (*DeleteTableRequest) ProtoMessage() {} +func (*DeleteTableRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_bigtable_table_admin_4c164c60709448e2, []int{6} +} +func (m *DeleteTableRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_DeleteTableRequest.Unmarshal(m, b) +} +func (m *DeleteTableRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_DeleteTableRequest.Marshal(b, m, deterministic) +} +func (dst *DeleteTableRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_DeleteTableRequest.Merge(dst, src) +} +func (m *DeleteTableRequest) XXX_Size() int { + return xxx_messageInfo_DeleteTableRequest.Size(m) +} +func (m *DeleteTableRequest) XXX_DiscardUnknown() { + xxx_messageInfo_DeleteTableRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_DeleteTableRequest proto.InternalMessageInfo + +func (m *DeleteTableRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +// Request message for +// [google.bigtable.admin.v2.BigtableTableAdmin.ModifyColumnFamilies][google.bigtable.admin.v2.BigtableTableAdmin.ModifyColumnFamilies] +type ModifyColumnFamiliesRequest struct { + // The unique name of the table whose families should be modified. + // Values are of the form + // `projects//instances//tables/
`. + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // Modifications to be atomically applied to the specified table's families. + // Entries are applied in order, meaning that earlier modifications can be + // masked by later ones (in the case of repeated updates to the same family, + // for example). + Modifications []*ModifyColumnFamiliesRequest_Modification `protobuf:"bytes,2,rep,name=modifications,proto3" json:"modifications,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ModifyColumnFamiliesRequest) Reset() { *m = ModifyColumnFamiliesRequest{} } +func (m *ModifyColumnFamiliesRequest) String() string { return proto.CompactTextString(m) } +func (*ModifyColumnFamiliesRequest) ProtoMessage() {} +func (*ModifyColumnFamiliesRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_bigtable_table_admin_4c164c60709448e2, []int{7} +} +func (m *ModifyColumnFamiliesRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ModifyColumnFamiliesRequest.Unmarshal(m, b) +} +func (m *ModifyColumnFamiliesRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ModifyColumnFamiliesRequest.Marshal(b, m, deterministic) +} +func (dst *ModifyColumnFamiliesRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_ModifyColumnFamiliesRequest.Merge(dst, src) +} +func (m *ModifyColumnFamiliesRequest) XXX_Size() int { + return xxx_messageInfo_ModifyColumnFamiliesRequest.Size(m) +} +func (m *ModifyColumnFamiliesRequest) XXX_DiscardUnknown() { + xxx_messageInfo_ModifyColumnFamiliesRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_ModifyColumnFamiliesRequest proto.InternalMessageInfo + +func (m *ModifyColumnFamiliesRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *ModifyColumnFamiliesRequest) GetModifications() []*ModifyColumnFamiliesRequest_Modification { + if m != nil { + return m.Modifications + } + return nil +} + +// A create, update, or delete of a particular column family. +type ModifyColumnFamiliesRequest_Modification struct { + // The ID of the column family to be modified. + Id string `protobuf:"bytes,1,opt,name=id,proto3" json:"id,omitempty"` + // Column familiy modifications. + // + // Types that are valid to be assigned to Mod: + // *ModifyColumnFamiliesRequest_Modification_Create + // *ModifyColumnFamiliesRequest_Modification_Update + // *ModifyColumnFamiliesRequest_Modification_Drop + Mod isModifyColumnFamiliesRequest_Modification_Mod `protobuf_oneof:"mod"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ModifyColumnFamiliesRequest_Modification) Reset() { + *m = ModifyColumnFamiliesRequest_Modification{} +} +func (m *ModifyColumnFamiliesRequest_Modification) String() string { return proto.CompactTextString(m) } +func (*ModifyColumnFamiliesRequest_Modification) ProtoMessage() {} +func (*ModifyColumnFamiliesRequest_Modification) Descriptor() ([]byte, []int) { + return fileDescriptor_bigtable_table_admin_4c164c60709448e2, []int{7, 0} +} +func (m *ModifyColumnFamiliesRequest_Modification) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ModifyColumnFamiliesRequest_Modification.Unmarshal(m, b) +} +func (m *ModifyColumnFamiliesRequest_Modification) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ModifyColumnFamiliesRequest_Modification.Marshal(b, m, deterministic) +} +func (dst *ModifyColumnFamiliesRequest_Modification) XXX_Merge(src proto.Message) { + xxx_messageInfo_ModifyColumnFamiliesRequest_Modification.Merge(dst, src) +} +func (m *ModifyColumnFamiliesRequest_Modification) XXX_Size() int { + return xxx_messageInfo_ModifyColumnFamiliesRequest_Modification.Size(m) +} +func (m *ModifyColumnFamiliesRequest_Modification) XXX_DiscardUnknown() { + xxx_messageInfo_ModifyColumnFamiliesRequest_Modification.DiscardUnknown(m) +} + +var xxx_messageInfo_ModifyColumnFamiliesRequest_Modification proto.InternalMessageInfo + +func (m *ModifyColumnFamiliesRequest_Modification) GetId() string { + if m != nil { + return m.Id + } + return "" +} + +type isModifyColumnFamiliesRequest_Modification_Mod interface { + isModifyColumnFamiliesRequest_Modification_Mod() +} + +type ModifyColumnFamiliesRequest_Modification_Create struct { + Create *ColumnFamily `protobuf:"bytes,2,opt,name=create,proto3,oneof"` +} + +type ModifyColumnFamiliesRequest_Modification_Update struct { + Update *ColumnFamily `protobuf:"bytes,3,opt,name=update,proto3,oneof"` +} + +type ModifyColumnFamiliesRequest_Modification_Drop struct { + Drop bool `protobuf:"varint,4,opt,name=drop,proto3,oneof"` +} + +func (*ModifyColumnFamiliesRequest_Modification_Create) isModifyColumnFamiliesRequest_Modification_Mod() { +} + +func (*ModifyColumnFamiliesRequest_Modification_Update) isModifyColumnFamiliesRequest_Modification_Mod() { +} + +func (*ModifyColumnFamiliesRequest_Modification_Drop) isModifyColumnFamiliesRequest_Modification_Mod() { +} + +func (m *ModifyColumnFamiliesRequest_Modification) GetMod() isModifyColumnFamiliesRequest_Modification_Mod { + if m != nil { + return m.Mod + } + return nil +} + +func (m *ModifyColumnFamiliesRequest_Modification) GetCreate() *ColumnFamily { + if x, ok := m.GetMod().(*ModifyColumnFamiliesRequest_Modification_Create); ok { + return x.Create + } + return nil +} + +func (m *ModifyColumnFamiliesRequest_Modification) GetUpdate() *ColumnFamily { + if x, ok := m.GetMod().(*ModifyColumnFamiliesRequest_Modification_Update); ok { + return x.Update + } + return nil +} + +func (m *ModifyColumnFamiliesRequest_Modification) GetDrop() bool { + if x, ok := m.GetMod().(*ModifyColumnFamiliesRequest_Modification_Drop); ok { + return x.Drop + } + return false +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*ModifyColumnFamiliesRequest_Modification) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _ModifyColumnFamiliesRequest_Modification_OneofMarshaler, _ModifyColumnFamiliesRequest_Modification_OneofUnmarshaler, _ModifyColumnFamiliesRequest_Modification_OneofSizer, []interface{}{ + (*ModifyColumnFamiliesRequest_Modification_Create)(nil), + (*ModifyColumnFamiliesRequest_Modification_Update)(nil), + (*ModifyColumnFamiliesRequest_Modification_Drop)(nil), + } +} + +func _ModifyColumnFamiliesRequest_Modification_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*ModifyColumnFamiliesRequest_Modification) + // mod + switch x := m.Mod.(type) { + case *ModifyColumnFamiliesRequest_Modification_Create: + b.EncodeVarint(2<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.Create); err != nil { + return err + } + case *ModifyColumnFamiliesRequest_Modification_Update: + b.EncodeVarint(3<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.Update); err != nil { + return err + } + case *ModifyColumnFamiliesRequest_Modification_Drop: + t := uint64(0) + if x.Drop { + t = 1 + } + b.EncodeVarint(4<<3 | proto.WireVarint) + b.EncodeVarint(t) + case nil: + default: + return fmt.Errorf("ModifyColumnFamiliesRequest_Modification.Mod has unexpected type %T", x) + } + return nil +} + +func _ModifyColumnFamiliesRequest_Modification_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*ModifyColumnFamiliesRequest_Modification) + switch tag { + case 2: // mod.create + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(ColumnFamily) + err := b.DecodeMessage(msg) + m.Mod = &ModifyColumnFamiliesRequest_Modification_Create{msg} + return true, err + case 3: // mod.update + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(ColumnFamily) + err := b.DecodeMessage(msg) + m.Mod = &ModifyColumnFamiliesRequest_Modification_Update{msg} + return true, err + case 4: // mod.drop + if wire != proto.WireVarint { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeVarint() + m.Mod = &ModifyColumnFamiliesRequest_Modification_Drop{x != 0} + return true, err + default: + return false, nil + } +} + +func _ModifyColumnFamiliesRequest_Modification_OneofSizer(msg proto.Message) (n int) { + m := msg.(*ModifyColumnFamiliesRequest_Modification) + // mod + switch x := m.Mod.(type) { + case *ModifyColumnFamiliesRequest_Modification_Create: + s := proto.Size(x.Create) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *ModifyColumnFamiliesRequest_Modification_Update: + s := proto.Size(x.Update) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *ModifyColumnFamiliesRequest_Modification_Drop: + n += 1 // tag and wire + n += 1 + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +// Request message for +// [google.bigtable.admin.v2.BigtableTableAdmin.GenerateConsistencyToken][google.bigtable.admin.v2.BigtableTableAdmin.GenerateConsistencyToken] +type GenerateConsistencyTokenRequest struct { + // The unique name of the Table for which to create a consistency token. + // Values are of the form + // `projects//instances//tables/
`. + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GenerateConsistencyTokenRequest) Reset() { *m = GenerateConsistencyTokenRequest{} } +func (m *GenerateConsistencyTokenRequest) String() string { return proto.CompactTextString(m) } +func (*GenerateConsistencyTokenRequest) ProtoMessage() {} +func (*GenerateConsistencyTokenRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_bigtable_table_admin_4c164c60709448e2, []int{8} +} +func (m *GenerateConsistencyTokenRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GenerateConsistencyTokenRequest.Unmarshal(m, b) +} +func (m *GenerateConsistencyTokenRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GenerateConsistencyTokenRequest.Marshal(b, m, deterministic) +} +func (dst *GenerateConsistencyTokenRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_GenerateConsistencyTokenRequest.Merge(dst, src) +} +func (m *GenerateConsistencyTokenRequest) XXX_Size() int { + return xxx_messageInfo_GenerateConsistencyTokenRequest.Size(m) +} +func (m *GenerateConsistencyTokenRequest) XXX_DiscardUnknown() { + xxx_messageInfo_GenerateConsistencyTokenRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_GenerateConsistencyTokenRequest proto.InternalMessageInfo + +func (m *GenerateConsistencyTokenRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +// Response message for +// [google.bigtable.admin.v2.BigtableTableAdmin.GenerateConsistencyToken][google.bigtable.admin.v2.BigtableTableAdmin.GenerateConsistencyToken] +type GenerateConsistencyTokenResponse struct { + // The generated consistency token. + ConsistencyToken string `protobuf:"bytes,1,opt,name=consistency_token,json=consistencyToken,proto3" json:"consistency_token,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GenerateConsistencyTokenResponse) Reset() { *m = GenerateConsistencyTokenResponse{} } +func (m *GenerateConsistencyTokenResponse) String() string { return proto.CompactTextString(m) } +func (*GenerateConsistencyTokenResponse) ProtoMessage() {} +func (*GenerateConsistencyTokenResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_bigtable_table_admin_4c164c60709448e2, []int{9} +} +func (m *GenerateConsistencyTokenResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GenerateConsistencyTokenResponse.Unmarshal(m, b) +} +func (m *GenerateConsistencyTokenResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GenerateConsistencyTokenResponse.Marshal(b, m, deterministic) +} +func (dst *GenerateConsistencyTokenResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_GenerateConsistencyTokenResponse.Merge(dst, src) +} +func (m *GenerateConsistencyTokenResponse) XXX_Size() int { + return xxx_messageInfo_GenerateConsistencyTokenResponse.Size(m) +} +func (m *GenerateConsistencyTokenResponse) XXX_DiscardUnknown() { + xxx_messageInfo_GenerateConsistencyTokenResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_GenerateConsistencyTokenResponse proto.InternalMessageInfo + +func (m *GenerateConsistencyTokenResponse) GetConsistencyToken() string { + if m != nil { + return m.ConsistencyToken + } + return "" +} + +// Request message for +// [google.bigtable.admin.v2.BigtableTableAdmin.CheckConsistency][google.bigtable.admin.v2.BigtableTableAdmin.CheckConsistency] +type CheckConsistencyRequest struct { + // The unique name of the Table for which to check replication consistency. + // Values are of the form + // `projects//instances//tables/
`. + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // The token created using GenerateConsistencyToken for the Table. + ConsistencyToken string `protobuf:"bytes,2,opt,name=consistency_token,json=consistencyToken,proto3" json:"consistency_token,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *CheckConsistencyRequest) Reset() { *m = CheckConsistencyRequest{} } +func (m *CheckConsistencyRequest) String() string { return proto.CompactTextString(m) } +func (*CheckConsistencyRequest) ProtoMessage() {} +func (*CheckConsistencyRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_bigtable_table_admin_4c164c60709448e2, []int{10} +} +func (m *CheckConsistencyRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_CheckConsistencyRequest.Unmarshal(m, b) +} +func (m *CheckConsistencyRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_CheckConsistencyRequest.Marshal(b, m, deterministic) +} +func (dst *CheckConsistencyRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_CheckConsistencyRequest.Merge(dst, src) +} +func (m *CheckConsistencyRequest) XXX_Size() int { + return xxx_messageInfo_CheckConsistencyRequest.Size(m) +} +func (m *CheckConsistencyRequest) XXX_DiscardUnknown() { + xxx_messageInfo_CheckConsistencyRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_CheckConsistencyRequest proto.InternalMessageInfo + +func (m *CheckConsistencyRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *CheckConsistencyRequest) GetConsistencyToken() string { + if m != nil { + return m.ConsistencyToken + } + return "" +} + +// Response message for +// [google.bigtable.admin.v2.BigtableTableAdmin.CheckConsistency][google.bigtable.admin.v2.BigtableTableAdmin.CheckConsistency] +type CheckConsistencyResponse struct { + // True only if the token is consistent. A token is consistent if replication + // has caught up with the restrictions specified in the request. + Consistent bool `protobuf:"varint,1,opt,name=consistent,proto3" json:"consistent,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *CheckConsistencyResponse) Reset() { *m = CheckConsistencyResponse{} } +func (m *CheckConsistencyResponse) String() string { return proto.CompactTextString(m) } +func (*CheckConsistencyResponse) ProtoMessage() {} +func (*CheckConsistencyResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_bigtable_table_admin_4c164c60709448e2, []int{11} +} +func (m *CheckConsistencyResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_CheckConsistencyResponse.Unmarshal(m, b) +} +func (m *CheckConsistencyResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_CheckConsistencyResponse.Marshal(b, m, deterministic) +} +func (dst *CheckConsistencyResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_CheckConsistencyResponse.Merge(dst, src) +} +func (m *CheckConsistencyResponse) XXX_Size() int { + return xxx_messageInfo_CheckConsistencyResponse.Size(m) +} +func (m *CheckConsistencyResponse) XXX_DiscardUnknown() { + xxx_messageInfo_CheckConsistencyResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_CheckConsistencyResponse proto.InternalMessageInfo + +func (m *CheckConsistencyResponse) GetConsistent() bool { + if m != nil { + return m.Consistent + } + return false +} + +// Request message for +// [google.bigtable.admin.v2.BigtableTableAdmin.SnapshotTable][google.bigtable.admin.v2.BigtableTableAdmin.SnapshotTable] +// +// Note: This is a private alpha release of Cloud Bigtable snapshots. This +// feature is not currently available to most Cloud Bigtable customers. This +// feature might be changed in backward-incompatible ways and is not recommended +// for production use. It is not subject to any SLA or deprecation policy. +type SnapshotTableRequest struct { + // The unique name of the table to have the snapshot taken. + // Values are of the form + // `projects//instances//tables/
`. + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // The name of the cluster where the snapshot will be created in. + // Values are of the form + // `projects//instances//clusters/`. + Cluster string `protobuf:"bytes,2,opt,name=cluster,proto3" json:"cluster,omitempty"` + // The ID by which the new snapshot should be referred to within the parent + // cluster, e.g., `mysnapshot` of the form: `[_a-zA-Z0-9][-_.a-zA-Z0-9]*` + // rather than + // `projects//instances//clusters//snapshots/mysnapshot`. + SnapshotId string `protobuf:"bytes,3,opt,name=snapshot_id,json=snapshotId,proto3" json:"snapshot_id,omitempty"` + // The amount of time that the new snapshot can stay active after it is + // created. Once 'ttl' expires, the snapshot will get deleted. The maximum + // amount of time a snapshot can stay active is 7 days. If 'ttl' is not + // specified, the default value of 24 hours will be used. + Ttl *duration.Duration `protobuf:"bytes,4,opt,name=ttl,proto3" json:"ttl,omitempty"` + // Description of the snapshot. + Description string `protobuf:"bytes,5,opt,name=description,proto3" json:"description,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *SnapshotTableRequest) Reset() { *m = SnapshotTableRequest{} } +func (m *SnapshotTableRequest) String() string { return proto.CompactTextString(m) } +func (*SnapshotTableRequest) ProtoMessage() {} +func (*SnapshotTableRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_bigtable_table_admin_4c164c60709448e2, []int{12} +} +func (m *SnapshotTableRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_SnapshotTableRequest.Unmarshal(m, b) +} +func (m *SnapshotTableRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_SnapshotTableRequest.Marshal(b, m, deterministic) +} +func (dst *SnapshotTableRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_SnapshotTableRequest.Merge(dst, src) +} +func (m *SnapshotTableRequest) XXX_Size() int { + return xxx_messageInfo_SnapshotTableRequest.Size(m) +} +func (m *SnapshotTableRequest) XXX_DiscardUnknown() { + xxx_messageInfo_SnapshotTableRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_SnapshotTableRequest proto.InternalMessageInfo + +func (m *SnapshotTableRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *SnapshotTableRequest) GetCluster() string { + if m != nil { + return m.Cluster + } + return "" +} + +func (m *SnapshotTableRequest) GetSnapshotId() string { + if m != nil { + return m.SnapshotId + } + return "" +} + +func (m *SnapshotTableRequest) GetTtl() *duration.Duration { + if m != nil { + return m.Ttl + } + return nil +} + +func (m *SnapshotTableRequest) GetDescription() string { + if m != nil { + return m.Description + } + return "" +} + +// Request message for +// [google.bigtable.admin.v2.BigtableTableAdmin.GetSnapshot][google.bigtable.admin.v2.BigtableTableAdmin.GetSnapshot] +// +// Note: This is a private alpha release of Cloud Bigtable snapshots. This +// feature is not currently available to most Cloud Bigtable customers. This +// feature might be changed in backward-incompatible ways and is not recommended +// for production use. It is not subject to any SLA or deprecation policy. +type GetSnapshotRequest struct { + // The unique name of the requested snapshot. + // Values are of the form + // `projects//instances//clusters//snapshots/`. + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GetSnapshotRequest) Reset() { *m = GetSnapshotRequest{} } +func (m *GetSnapshotRequest) String() string { return proto.CompactTextString(m) } +func (*GetSnapshotRequest) ProtoMessage() {} +func (*GetSnapshotRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_bigtable_table_admin_4c164c60709448e2, []int{13} +} +func (m *GetSnapshotRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GetSnapshotRequest.Unmarshal(m, b) +} +func (m *GetSnapshotRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GetSnapshotRequest.Marshal(b, m, deterministic) +} +func (dst *GetSnapshotRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_GetSnapshotRequest.Merge(dst, src) +} +func (m *GetSnapshotRequest) XXX_Size() int { + return xxx_messageInfo_GetSnapshotRequest.Size(m) +} +func (m *GetSnapshotRequest) XXX_DiscardUnknown() { + xxx_messageInfo_GetSnapshotRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_GetSnapshotRequest proto.InternalMessageInfo + +func (m *GetSnapshotRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +// Request message for +// [google.bigtable.admin.v2.BigtableTableAdmin.ListSnapshots][google.bigtable.admin.v2.BigtableTableAdmin.ListSnapshots] +// +// Note: This is a private alpha release of Cloud Bigtable snapshots. This +// feature is not currently available to most Cloud Bigtable customers. This +// feature might be changed in backward-incompatible ways and is not recommended +// for production use. It is not subject to any SLA or deprecation policy. +type ListSnapshotsRequest struct { + // The unique name of the cluster for which snapshots should be listed. + // Values are of the form + // `projects//instances//clusters/`. + // Use ` = '-'` to list snapshots for all clusters in an instance, + // e.g., `projects//instances//clusters/-`. + Parent string `protobuf:"bytes,1,opt,name=parent,proto3" json:"parent,omitempty"` + // The maximum number of snapshots to return per page. + // CURRENTLY UNIMPLEMENTED AND IGNORED. + PageSize int32 `protobuf:"varint,2,opt,name=page_size,json=pageSize,proto3" json:"page_size,omitempty"` + // The value of `next_page_token` returned by a previous call. + PageToken string `protobuf:"bytes,3,opt,name=page_token,json=pageToken,proto3" json:"page_token,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ListSnapshotsRequest) Reset() { *m = ListSnapshotsRequest{} } +func (m *ListSnapshotsRequest) String() string { return proto.CompactTextString(m) } +func (*ListSnapshotsRequest) ProtoMessage() {} +func (*ListSnapshotsRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_bigtable_table_admin_4c164c60709448e2, []int{14} +} +func (m *ListSnapshotsRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ListSnapshotsRequest.Unmarshal(m, b) +} +func (m *ListSnapshotsRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ListSnapshotsRequest.Marshal(b, m, deterministic) +} +func (dst *ListSnapshotsRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_ListSnapshotsRequest.Merge(dst, src) +} +func (m *ListSnapshotsRequest) XXX_Size() int { + return xxx_messageInfo_ListSnapshotsRequest.Size(m) +} +func (m *ListSnapshotsRequest) XXX_DiscardUnknown() { + xxx_messageInfo_ListSnapshotsRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_ListSnapshotsRequest proto.InternalMessageInfo + +func (m *ListSnapshotsRequest) GetParent() string { + if m != nil { + return m.Parent + } + return "" +} + +func (m *ListSnapshotsRequest) GetPageSize() int32 { + if m != nil { + return m.PageSize + } + return 0 +} + +func (m *ListSnapshotsRequest) GetPageToken() string { + if m != nil { + return m.PageToken + } + return "" +} + +// Response message for +// [google.bigtable.admin.v2.BigtableTableAdmin.ListSnapshots][google.bigtable.admin.v2.BigtableTableAdmin.ListSnapshots] +// +// Note: This is a private alpha release of Cloud Bigtable snapshots. This +// feature is not currently available to most Cloud Bigtable customers. This +// feature might be changed in backward-incompatible ways and is not recommended +// for production use. It is not subject to any SLA or deprecation policy. +type ListSnapshotsResponse struct { + // The snapshots present in the requested cluster. + Snapshots []*Snapshot `protobuf:"bytes,1,rep,name=snapshots,proto3" json:"snapshots,omitempty"` + // Set if not all snapshots could be returned in a single response. + // Pass this value to `page_token` in another request to get the next + // page of results. + NextPageToken string `protobuf:"bytes,2,opt,name=next_page_token,json=nextPageToken,proto3" json:"next_page_token,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ListSnapshotsResponse) Reset() { *m = ListSnapshotsResponse{} } +func (m *ListSnapshotsResponse) String() string { return proto.CompactTextString(m) } +func (*ListSnapshotsResponse) ProtoMessage() {} +func (*ListSnapshotsResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_bigtable_table_admin_4c164c60709448e2, []int{15} +} +func (m *ListSnapshotsResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ListSnapshotsResponse.Unmarshal(m, b) +} +func (m *ListSnapshotsResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ListSnapshotsResponse.Marshal(b, m, deterministic) +} +func (dst *ListSnapshotsResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_ListSnapshotsResponse.Merge(dst, src) +} +func (m *ListSnapshotsResponse) XXX_Size() int { + return xxx_messageInfo_ListSnapshotsResponse.Size(m) +} +func (m *ListSnapshotsResponse) XXX_DiscardUnknown() { + xxx_messageInfo_ListSnapshotsResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_ListSnapshotsResponse proto.InternalMessageInfo + +func (m *ListSnapshotsResponse) GetSnapshots() []*Snapshot { + if m != nil { + return m.Snapshots + } + return nil +} + +func (m *ListSnapshotsResponse) GetNextPageToken() string { + if m != nil { + return m.NextPageToken + } + return "" +} + +// Request message for +// [google.bigtable.admin.v2.BigtableTableAdmin.DeleteSnapshot][google.bigtable.admin.v2.BigtableTableAdmin.DeleteSnapshot] +// +// Note: This is a private alpha release of Cloud Bigtable snapshots. This +// feature is not currently available to most Cloud Bigtable customers. This +// feature might be changed in backward-incompatible ways and is not recommended +// for production use. It is not subject to any SLA or deprecation policy. +type DeleteSnapshotRequest struct { + // The unique name of the snapshot to be deleted. + // Values are of the form + // `projects//instances//clusters//snapshots/`. + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *DeleteSnapshotRequest) Reset() { *m = DeleteSnapshotRequest{} } +func (m *DeleteSnapshotRequest) String() string { return proto.CompactTextString(m) } +func (*DeleteSnapshotRequest) ProtoMessage() {} +func (*DeleteSnapshotRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_bigtable_table_admin_4c164c60709448e2, []int{16} +} +func (m *DeleteSnapshotRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_DeleteSnapshotRequest.Unmarshal(m, b) +} +func (m *DeleteSnapshotRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_DeleteSnapshotRequest.Marshal(b, m, deterministic) +} +func (dst *DeleteSnapshotRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_DeleteSnapshotRequest.Merge(dst, src) +} +func (m *DeleteSnapshotRequest) XXX_Size() int { + return xxx_messageInfo_DeleteSnapshotRequest.Size(m) +} +func (m *DeleteSnapshotRequest) XXX_DiscardUnknown() { + xxx_messageInfo_DeleteSnapshotRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_DeleteSnapshotRequest proto.InternalMessageInfo + +func (m *DeleteSnapshotRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +// The metadata for the Operation returned by SnapshotTable. +// +// Note: This is a private alpha release of Cloud Bigtable snapshots. This +// feature is not currently available to most Cloud Bigtable customers. This +// feature might be changed in backward-incompatible ways and is not recommended +// for production use. It is not subject to any SLA or deprecation policy. +type SnapshotTableMetadata struct { + // The request that prompted the initiation of this SnapshotTable operation. + OriginalRequest *SnapshotTableRequest `protobuf:"bytes,1,opt,name=original_request,json=originalRequest,proto3" json:"original_request,omitempty"` + // The time at which the original request was received. + RequestTime *timestamp.Timestamp `protobuf:"bytes,2,opt,name=request_time,json=requestTime,proto3" json:"request_time,omitempty"` + // The time at which the operation failed or was completed successfully. + FinishTime *timestamp.Timestamp `protobuf:"bytes,3,opt,name=finish_time,json=finishTime,proto3" json:"finish_time,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *SnapshotTableMetadata) Reset() { *m = SnapshotTableMetadata{} } +func (m *SnapshotTableMetadata) String() string { return proto.CompactTextString(m) } +func (*SnapshotTableMetadata) ProtoMessage() {} +func (*SnapshotTableMetadata) Descriptor() ([]byte, []int) { + return fileDescriptor_bigtable_table_admin_4c164c60709448e2, []int{17} +} +func (m *SnapshotTableMetadata) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_SnapshotTableMetadata.Unmarshal(m, b) +} +func (m *SnapshotTableMetadata) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_SnapshotTableMetadata.Marshal(b, m, deterministic) +} +func (dst *SnapshotTableMetadata) XXX_Merge(src proto.Message) { + xxx_messageInfo_SnapshotTableMetadata.Merge(dst, src) +} +func (m *SnapshotTableMetadata) XXX_Size() int { + return xxx_messageInfo_SnapshotTableMetadata.Size(m) +} +func (m *SnapshotTableMetadata) XXX_DiscardUnknown() { + xxx_messageInfo_SnapshotTableMetadata.DiscardUnknown(m) +} + +var xxx_messageInfo_SnapshotTableMetadata proto.InternalMessageInfo + +func (m *SnapshotTableMetadata) GetOriginalRequest() *SnapshotTableRequest { + if m != nil { + return m.OriginalRequest + } + return nil +} + +func (m *SnapshotTableMetadata) GetRequestTime() *timestamp.Timestamp { + if m != nil { + return m.RequestTime + } + return nil +} + +func (m *SnapshotTableMetadata) GetFinishTime() *timestamp.Timestamp { + if m != nil { + return m.FinishTime + } + return nil +} + +// The metadata for the Operation returned by CreateTableFromSnapshot. +// +// Note: This is a private alpha release of Cloud Bigtable snapshots. This +// feature is not currently available to most Cloud Bigtable customers. This +// feature might be changed in backward-incompatible ways and is not recommended +// for production use. It is not subject to any SLA or deprecation policy. +type CreateTableFromSnapshotMetadata struct { + // The request that prompted the initiation of this CreateTableFromSnapshot + // operation. + OriginalRequest *CreateTableFromSnapshotRequest `protobuf:"bytes,1,opt,name=original_request,json=originalRequest,proto3" json:"original_request,omitempty"` + // The time at which the original request was received. + RequestTime *timestamp.Timestamp `protobuf:"bytes,2,opt,name=request_time,json=requestTime,proto3" json:"request_time,omitempty"` + // The time at which the operation failed or was completed successfully. + FinishTime *timestamp.Timestamp `protobuf:"bytes,3,opt,name=finish_time,json=finishTime,proto3" json:"finish_time,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *CreateTableFromSnapshotMetadata) Reset() { *m = CreateTableFromSnapshotMetadata{} } +func (m *CreateTableFromSnapshotMetadata) String() string { return proto.CompactTextString(m) } +func (*CreateTableFromSnapshotMetadata) ProtoMessage() {} +func (*CreateTableFromSnapshotMetadata) Descriptor() ([]byte, []int) { + return fileDescriptor_bigtable_table_admin_4c164c60709448e2, []int{18} +} +func (m *CreateTableFromSnapshotMetadata) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_CreateTableFromSnapshotMetadata.Unmarshal(m, b) +} +func (m *CreateTableFromSnapshotMetadata) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_CreateTableFromSnapshotMetadata.Marshal(b, m, deterministic) +} +func (dst *CreateTableFromSnapshotMetadata) XXX_Merge(src proto.Message) { + xxx_messageInfo_CreateTableFromSnapshotMetadata.Merge(dst, src) +} +func (m *CreateTableFromSnapshotMetadata) XXX_Size() int { + return xxx_messageInfo_CreateTableFromSnapshotMetadata.Size(m) +} +func (m *CreateTableFromSnapshotMetadata) XXX_DiscardUnknown() { + xxx_messageInfo_CreateTableFromSnapshotMetadata.DiscardUnknown(m) +} + +var xxx_messageInfo_CreateTableFromSnapshotMetadata proto.InternalMessageInfo + +func (m *CreateTableFromSnapshotMetadata) GetOriginalRequest() *CreateTableFromSnapshotRequest { + if m != nil { + return m.OriginalRequest + } + return nil +} + +func (m *CreateTableFromSnapshotMetadata) GetRequestTime() *timestamp.Timestamp { + if m != nil { + return m.RequestTime + } + return nil +} + +func (m *CreateTableFromSnapshotMetadata) GetFinishTime() *timestamp.Timestamp { + if m != nil { + return m.FinishTime + } + return nil +} + +func init() { + proto.RegisterType((*CreateTableRequest)(nil), "google.bigtable.admin.v2.CreateTableRequest") + proto.RegisterType((*CreateTableRequest_Split)(nil), "google.bigtable.admin.v2.CreateTableRequest.Split") + proto.RegisterType((*CreateTableFromSnapshotRequest)(nil), "google.bigtable.admin.v2.CreateTableFromSnapshotRequest") + proto.RegisterType((*DropRowRangeRequest)(nil), "google.bigtable.admin.v2.DropRowRangeRequest") + proto.RegisterType((*ListTablesRequest)(nil), "google.bigtable.admin.v2.ListTablesRequest") + proto.RegisterType((*ListTablesResponse)(nil), "google.bigtable.admin.v2.ListTablesResponse") + proto.RegisterType((*GetTableRequest)(nil), "google.bigtable.admin.v2.GetTableRequest") + proto.RegisterType((*DeleteTableRequest)(nil), "google.bigtable.admin.v2.DeleteTableRequest") + proto.RegisterType((*ModifyColumnFamiliesRequest)(nil), "google.bigtable.admin.v2.ModifyColumnFamiliesRequest") + proto.RegisterType((*ModifyColumnFamiliesRequest_Modification)(nil), "google.bigtable.admin.v2.ModifyColumnFamiliesRequest.Modification") + proto.RegisterType((*GenerateConsistencyTokenRequest)(nil), "google.bigtable.admin.v2.GenerateConsistencyTokenRequest") + proto.RegisterType((*GenerateConsistencyTokenResponse)(nil), "google.bigtable.admin.v2.GenerateConsistencyTokenResponse") + proto.RegisterType((*CheckConsistencyRequest)(nil), "google.bigtable.admin.v2.CheckConsistencyRequest") + proto.RegisterType((*CheckConsistencyResponse)(nil), "google.bigtable.admin.v2.CheckConsistencyResponse") + proto.RegisterType((*SnapshotTableRequest)(nil), "google.bigtable.admin.v2.SnapshotTableRequest") + proto.RegisterType((*GetSnapshotRequest)(nil), "google.bigtable.admin.v2.GetSnapshotRequest") + proto.RegisterType((*ListSnapshotsRequest)(nil), "google.bigtable.admin.v2.ListSnapshotsRequest") + proto.RegisterType((*ListSnapshotsResponse)(nil), "google.bigtable.admin.v2.ListSnapshotsResponse") + proto.RegisterType((*DeleteSnapshotRequest)(nil), "google.bigtable.admin.v2.DeleteSnapshotRequest") + proto.RegisterType((*SnapshotTableMetadata)(nil), "google.bigtable.admin.v2.SnapshotTableMetadata") + proto.RegisterType((*CreateTableFromSnapshotMetadata)(nil), "google.bigtable.admin.v2.CreateTableFromSnapshotMetadata") +} + +// Reference imports to suppress errors if they are not otherwise used. +var _ context.Context +var _ grpc.ClientConn + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the grpc package it is being compiled against. +const _ = grpc.SupportPackageIsVersion4 + +// BigtableTableAdminClient is the client API for BigtableTableAdmin service. +// +// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://godoc.org/google.golang.org/grpc#ClientConn.NewStream. +type BigtableTableAdminClient interface { + // Creates a new table in the specified instance. + // The table can be created with a full set of initial column families, + // specified in the request. + CreateTable(ctx context.Context, in *CreateTableRequest, opts ...grpc.CallOption) (*Table, error) + // Creates a new table from the specified snapshot. The target table must + // not exist. The snapshot and the table must be in the same instance. + // + // Note: This is a private alpha release of Cloud Bigtable snapshots. This + // feature is not currently available to most Cloud Bigtable customers. This + // feature might be changed in backward-incompatible ways and is not + // recommended for production use. It is not subject to any SLA or deprecation + // policy. + CreateTableFromSnapshot(ctx context.Context, in *CreateTableFromSnapshotRequest, opts ...grpc.CallOption) (*longrunning.Operation, error) + // Lists all tables served from a specified instance. + ListTables(ctx context.Context, in *ListTablesRequest, opts ...grpc.CallOption) (*ListTablesResponse, error) + // Gets metadata information about the specified table. + GetTable(ctx context.Context, in *GetTableRequest, opts ...grpc.CallOption) (*Table, error) + // Permanently deletes a specified table and all of its data. + DeleteTable(ctx context.Context, in *DeleteTableRequest, opts ...grpc.CallOption) (*empty.Empty, error) + // Performs a series of column family modifications on the specified table. + // Either all or none of the modifications will occur before this method + // returns, but data requests received prior to that point may see a table + // where only some modifications have taken effect. + ModifyColumnFamilies(ctx context.Context, in *ModifyColumnFamiliesRequest, opts ...grpc.CallOption) (*Table, error) + // Permanently drop/delete a row range from a specified table. The request can + // specify whether to delete all rows in a table, or only those that match a + // particular prefix. + DropRowRange(ctx context.Context, in *DropRowRangeRequest, opts ...grpc.CallOption) (*empty.Empty, error) + // Generates a consistency token for a Table, which can be used in + // CheckConsistency to check whether mutations to the table that finished + // before this call started have been replicated. The tokens will be available + // for 90 days. + GenerateConsistencyToken(ctx context.Context, in *GenerateConsistencyTokenRequest, opts ...grpc.CallOption) (*GenerateConsistencyTokenResponse, error) + // Checks replication consistency based on a consistency token, that is, if + // replication has caught up based on the conditions specified in the token + // and the check request. + CheckConsistency(ctx context.Context, in *CheckConsistencyRequest, opts ...grpc.CallOption) (*CheckConsistencyResponse, error) + // Creates a new snapshot in the specified cluster from the specified + // source table. The cluster and the table must be in the same instance. + // + // Note: This is a private alpha release of Cloud Bigtable snapshots. This + // feature is not currently available to most Cloud Bigtable customers. This + // feature might be changed in backward-incompatible ways and is not + // recommended for production use. It is not subject to any SLA or deprecation + // policy. + SnapshotTable(ctx context.Context, in *SnapshotTableRequest, opts ...grpc.CallOption) (*longrunning.Operation, error) + // Gets metadata information about the specified snapshot. + // + // Note: This is a private alpha release of Cloud Bigtable snapshots. This + // feature is not currently available to most Cloud Bigtable customers. This + // feature might be changed in backward-incompatible ways and is not + // recommended for production use. It is not subject to any SLA or deprecation + // policy. + GetSnapshot(ctx context.Context, in *GetSnapshotRequest, opts ...grpc.CallOption) (*Snapshot, error) + // Lists all snapshots associated with the specified cluster. + // + // Note: This is a private alpha release of Cloud Bigtable snapshots. This + // feature is not currently available to most Cloud Bigtable customers. This + // feature might be changed in backward-incompatible ways and is not + // recommended for production use. It is not subject to any SLA or deprecation + // policy. + ListSnapshots(ctx context.Context, in *ListSnapshotsRequest, opts ...grpc.CallOption) (*ListSnapshotsResponse, error) + // Permanently deletes the specified snapshot. + // + // Note: This is a private alpha release of Cloud Bigtable snapshots. This + // feature is not currently available to most Cloud Bigtable customers. This + // feature might be changed in backward-incompatible ways and is not + // recommended for production use. It is not subject to any SLA or deprecation + // policy. + DeleteSnapshot(ctx context.Context, in *DeleteSnapshotRequest, opts ...grpc.CallOption) (*empty.Empty, error) +} + +type bigtableTableAdminClient struct { + cc *grpc.ClientConn +} + +func NewBigtableTableAdminClient(cc *grpc.ClientConn) BigtableTableAdminClient { + return &bigtableTableAdminClient{cc} +} + +func (c *bigtableTableAdminClient) CreateTable(ctx context.Context, in *CreateTableRequest, opts ...grpc.CallOption) (*Table, error) { + out := new(Table) + err := c.cc.Invoke(ctx, "/google.bigtable.admin.v2.BigtableTableAdmin/CreateTable", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *bigtableTableAdminClient) CreateTableFromSnapshot(ctx context.Context, in *CreateTableFromSnapshotRequest, opts ...grpc.CallOption) (*longrunning.Operation, error) { + out := new(longrunning.Operation) + err := c.cc.Invoke(ctx, "/google.bigtable.admin.v2.BigtableTableAdmin/CreateTableFromSnapshot", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *bigtableTableAdminClient) ListTables(ctx context.Context, in *ListTablesRequest, opts ...grpc.CallOption) (*ListTablesResponse, error) { + out := new(ListTablesResponse) + err := c.cc.Invoke(ctx, "/google.bigtable.admin.v2.BigtableTableAdmin/ListTables", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *bigtableTableAdminClient) GetTable(ctx context.Context, in *GetTableRequest, opts ...grpc.CallOption) (*Table, error) { + out := new(Table) + err := c.cc.Invoke(ctx, "/google.bigtable.admin.v2.BigtableTableAdmin/GetTable", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *bigtableTableAdminClient) DeleteTable(ctx context.Context, in *DeleteTableRequest, opts ...grpc.CallOption) (*empty.Empty, error) { + out := new(empty.Empty) + err := c.cc.Invoke(ctx, "/google.bigtable.admin.v2.BigtableTableAdmin/DeleteTable", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *bigtableTableAdminClient) ModifyColumnFamilies(ctx context.Context, in *ModifyColumnFamiliesRequest, opts ...grpc.CallOption) (*Table, error) { + out := new(Table) + err := c.cc.Invoke(ctx, "/google.bigtable.admin.v2.BigtableTableAdmin/ModifyColumnFamilies", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *bigtableTableAdminClient) DropRowRange(ctx context.Context, in *DropRowRangeRequest, opts ...grpc.CallOption) (*empty.Empty, error) { + out := new(empty.Empty) + err := c.cc.Invoke(ctx, "/google.bigtable.admin.v2.BigtableTableAdmin/DropRowRange", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *bigtableTableAdminClient) GenerateConsistencyToken(ctx context.Context, in *GenerateConsistencyTokenRequest, opts ...grpc.CallOption) (*GenerateConsistencyTokenResponse, error) { + out := new(GenerateConsistencyTokenResponse) + err := c.cc.Invoke(ctx, "/google.bigtable.admin.v2.BigtableTableAdmin/GenerateConsistencyToken", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *bigtableTableAdminClient) CheckConsistency(ctx context.Context, in *CheckConsistencyRequest, opts ...grpc.CallOption) (*CheckConsistencyResponse, error) { + out := new(CheckConsistencyResponse) + err := c.cc.Invoke(ctx, "/google.bigtable.admin.v2.BigtableTableAdmin/CheckConsistency", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *bigtableTableAdminClient) SnapshotTable(ctx context.Context, in *SnapshotTableRequest, opts ...grpc.CallOption) (*longrunning.Operation, error) { + out := new(longrunning.Operation) + err := c.cc.Invoke(ctx, "/google.bigtable.admin.v2.BigtableTableAdmin/SnapshotTable", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *bigtableTableAdminClient) GetSnapshot(ctx context.Context, in *GetSnapshotRequest, opts ...grpc.CallOption) (*Snapshot, error) { + out := new(Snapshot) + err := c.cc.Invoke(ctx, "/google.bigtable.admin.v2.BigtableTableAdmin/GetSnapshot", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *bigtableTableAdminClient) ListSnapshots(ctx context.Context, in *ListSnapshotsRequest, opts ...grpc.CallOption) (*ListSnapshotsResponse, error) { + out := new(ListSnapshotsResponse) + err := c.cc.Invoke(ctx, "/google.bigtable.admin.v2.BigtableTableAdmin/ListSnapshots", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *bigtableTableAdminClient) DeleteSnapshot(ctx context.Context, in *DeleteSnapshotRequest, opts ...grpc.CallOption) (*empty.Empty, error) { + out := new(empty.Empty) + err := c.cc.Invoke(ctx, "/google.bigtable.admin.v2.BigtableTableAdmin/DeleteSnapshot", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +// BigtableTableAdminServer is the server API for BigtableTableAdmin service. +type BigtableTableAdminServer interface { + // Creates a new table in the specified instance. + // The table can be created with a full set of initial column families, + // specified in the request. + CreateTable(context.Context, *CreateTableRequest) (*Table, error) + // Creates a new table from the specified snapshot. The target table must + // not exist. The snapshot and the table must be in the same instance. + // + // Note: This is a private alpha release of Cloud Bigtable snapshots. This + // feature is not currently available to most Cloud Bigtable customers. This + // feature might be changed in backward-incompatible ways and is not + // recommended for production use. It is not subject to any SLA or deprecation + // policy. + CreateTableFromSnapshot(context.Context, *CreateTableFromSnapshotRequest) (*longrunning.Operation, error) + // Lists all tables served from a specified instance. + ListTables(context.Context, *ListTablesRequest) (*ListTablesResponse, error) + // Gets metadata information about the specified table. + GetTable(context.Context, *GetTableRequest) (*Table, error) + // Permanently deletes a specified table and all of its data. + DeleteTable(context.Context, *DeleteTableRequest) (*empty.Empty, error) + // Performs a series of column family modifications on the specified table. + // Either all or none of the modifications will occur before this method + // returns, but data requests received prior to that point may see a table + // where only some modifications have taken effect. + ModifyColumnFamilies(context.Context, *ModifyColumnFamiliesRequest) (*Table, error) + // Permanently drop/delete a row range from a specified table. The request can + // specify whether to delete all rows in a table, or only those that match a + // particular prefix. + DropRowRange(context.Context, *DropRowRangeRequest) (*empty.Empty, error) + // Generates a consistency token for a Table, which can be used in + // CheckConsistency to check whether mutations to the table that finished + // before this call started have been replicated. The tokens will be available + // for 90 days. + GenerateConsistencyToken(context.Context, *GenerateConsistencyTokenRequest) (*GenerateConsistencyTokenResponse, error) + // Checks replication consistency based on a consistency token, that is, if + // replication has caught up based on the conditions specified in the token + // and the check request. + CheckConsistency(context.Context, *CheckConsistencyRequest) (*CheckConsistencyResponse, error) + // Creates a new snapshot in the specified cluster from the specified + // source table. The cluster and the table must be in the same instance. + // + // Note: This is a private alpha release of Cloud Bigtable snapshots. This + // feature is not currently available to most Cloud Bigtable customers. This + // feature might be changed in backward-incompatible ways and is not + // recommended for production use. It is not subject to any SLA or deprecation + // policy. + SnapshotTable(context.Context, *SnapshotTableRequest) (*longrunning.Operation, error) + // Gets metadata information about the specified snapshot. + // + // Note: This is a private alpha release of Cloud Bigtable snapshots. This + // feature is not currently available to most Cloud Bigtable customers. This + // feature might be changed in backward-incompatible ways and is not + // recommended for production use. It is not subject to any SLA or deprecation + // policy. + GetSnapshot(context.Context, *GetSnapshotRequest) (*Snapshot, error) + // Lists all snapshots associated with the specified cluster. + // + // Note: This is a private alpha release of Cloud Bigtable snapshots. This + // feature is not currently available to most Cloud Bigtable customers. This + // feature might be changed in backward-incompatible ways and is not + // recommended for production use. It is not subject to any SLA or deprecation + // policy. + ListSnapshots(context.Context, *ListSnapshotsRequest) (*ListSnapshotsResponse, error) + // Permanently deletes the specified snapshot. + // + // Note: This is a private alpha release of Cloud Bigtable snapshots. This + // feature is not currently available to most Cloud Bigtable customers. This + // feature might be changed in backward-incompatible ways and is not + // recommended for production use. It is not subject to any SLA or deprecation + // policy. + DeleteSnapshot(context.Context, *DeleteSnapshotRequest) (*empty.Empty, error) +} + +func RegisterBigtableTableAdminServer(s *grpc.Server, srv BigtableTableAdminServer) { + s.RegisterService(&_BigtableTableAdmin_serviceDesc, srv) +} + +func _BigtableTableAdmin_CreateTable_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(CreateTableRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(BigtableTableAdminServer).CreateTable(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.bigtable.admin.v2.BigtableTableAdmin/CreateTable", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(BigtableTableAdminServer).CreateTable(ctx, req.(*CreateTableRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _BigtableTableAdmin_CreateTableFromSnapshot_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(CreateTableFromSnapshotRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(BigtableTableAdminServer).CreateTableFromSnapshot(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.bigtable.admin.v2.BigtableTableAdmin/CreateTableFromSnapshot", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(BigtableTableAdminServer).CreateTableFromSnapshot(ctx, req.(*CreateTableFromSnapshotRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _BigtableTableAdmin_ListTables_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(ListTablesRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(BigtableTableAdminServer).ListTables(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.bigtable.admin.v2.BigtableTableAdmin/ListTables", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(BigtableTableAdminServer).ListTables(ctx, req.(*ListTablesRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _BigtableTableAdmin_GetTable_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(GetTableRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(BigtableTableAdminServer).GetTable(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.bigtable.admin.v2.BigtableTableAdmin/GetTable", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(BigtableTableAdminServer).GetTable(ctx, req.(*GetTableRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _BigtableTableAdmin_DeleteTable_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(DeleteTableRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(BigtableTableAdminServer).DeleteTable(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.bigtable.admin.v2.BigtableTableAdmin/DeleteTable", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(BigtableTableAdminServer).DeleteTable(ctx, req.(*DeleteTableRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _BigtableTableAdmin_ModifyColumnFamilies_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(ModifyColumnFamiliesRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(BigtableTableAdminServer).ModifyColumnFamilies(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.bigtable.admin.v2.BigtableTableAdmin/ModifyColumnFamilies", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(BigtableTableAdminServer).ModifyColumnFamilies(ctx, req.(*ModifyColumnFamiliesRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _BigtableTableAdmin_DropRowRange_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(DropRowRangeRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(BigtableTableAdminServer).DropRowRange(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.bigtable.admin.v2.BigtableTableAdmin/DropRowRange", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(BigtableTableAdminServer).DropRowRange(ctx, req.(*DropRowRangeRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _BigtableTableAdmin_GenerateConsistencyToken_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(GenerateConsistencyTokenRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(BigtableTableAdminServer).GenerateConsistencyToken(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.bigtable.admin.v2.BigtableTableAdmin/GenerateConsistencyToken", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(BigtableTableAdminServer).GenerateConsistencyToken(ctx, req.(*GenerateConsistencyTokenRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _BigtableTableAdmin_CheckConsistency_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(CheckConsistencyRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(BigtableTableAdminServer).CheckConsistency(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.bigtable.admin.v2.BigtableTableAdmin/CheckConsistency", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(BigtableTableAdminServer).CheckConsistency(ctx, req.(*CheckConsistencyRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _BigtableTableAdmin_SnapshotTable_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(SnapshotTableRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(BigtableTableAdminServer).SnapshotTable(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.bigtable.admin.v2.BigtableTableAdmin/SnapshotTable", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(BigtableTableAdminServer).SnapshotTable(ctx, req.(*SnapshotTableRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _BigtableTableAdmin_GetSnapshot_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(GetSnapshotRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(BigtableTableAdminServer).GetSnapshot(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.bigtable.admin.v2.BigtableTableAdmin/GetSnapshot", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(BigtableTableAdminServer).GetSnapshot(ctx, req.(*GetSnapshotRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _BigtableTableAdmin_ListSnapshots_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(ListSnapshotsRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(BigtableTableAdminServer).ListSnapshots(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.bigtable.admin.v2.BigtableTableAdmin/ListSnapshots", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(BigtableTableAdminServer).ListSnapshots(ctx, req.(*ListSnapshotsRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _BigtableTableAdmin_DeleteSnapshot_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(DeleteSnapshotRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(BigtableTableAdminServer).DeleteSnapshot(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.bigtable.admin.v2.BigtableTableAdmin/DeleteSnapshot", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(BigtableTableAdminServer).DeleteSnapshot(ctx, req.(*DeleteSnapshotRequest)) + } + return interceptor(ctx, in, info, handler) +} + +var _BigtableTableAdmin_serviceDesc = grpc.ServiceDesc{ + ServiceName: "google.bigtable.admin.v2.BigtableTableAdmin", + HandlerType: (*BigtableTableAdminServer)(nil), + Methods: []grpc.MethodDesc{ + { + MethodName: "CreateTable", + Handler: _BigtableTableAdmin_CreateTable_Handler, + }, + { + MethodName: "CreateTableFromSnapshot", + Handler: _BigtableTableAdmin_CreateTableFromSnapshot_Handler, + }, + { + MethodName: "ListTables", + Handler: _BigtableTableAdmin_ListTables_Handler, + }, + { + MethodName: "GetTable", + Handler: _BigtableTableAdmin_GetTable_Handler, + }, + { + MethodName: "DeleteTable", + Handler: _BigtableTableAdmin_DeleteTable_Handler, + }, + { + MethodName: "ModifyColumnFamilies", + Handler: _BigtableTableAdmin_ModifyColumnFamilies_Handler, + }, + { + MethodName: "DropRowRange", + Handler: _BigtableTableAdmin_DropRowRange_Handler, + }, + { + MethodName: "GenerateConsistencyToken", + Handler: _BigtableTableAdmin_GenerateConsistencyToken_Handler, + }, + { + MethodName: "CheckConsistency", + Handler: _BigtableTableAdmin_CheckConsistency_Handler, + }, + { + MethodName: "SnapshotTable", + Handler: _BigtableTableAdmin_SnapshotTable_Handler, + }, + { + MethodName: "GetSnapshot", + Handler: _BigtableTableAdmin_GetSnapshot_Handler, + }, + { + MethodName: "ListSnapshots", + Handler: _BigtableTableAdmin_ListSnapshots_Handler, + }, + { + MethodName: "DeleteSnapshot", + Handler: _BigtableTableAdmin_DeleteSnapshot_Handler, + }, + }, + Streams: []grpc.StreamDesc{}, + Metadata: "google/bigtable/admin/v2/bigtable_table_admin.proto", +} + +func init() { + proto.RegisterFile("google/bigtable/admin/v2/bigtable_table_admin.proto", fileDescriptor_bigtable_table_admin_4c164c60709448e2) +} + +var fileDescriptor_bigtable_table_admin_4c164c60709448e2 = []byte{ + // 1520 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xcc, 0x58, 0xcb, 0x6f, 0xdc, 0x54, + 0x17, 0xaf, 0x27, 0x8f, 0x26, 0x67, 0xf2, 0xea, 0xfd, 0xd2, 0x76, 0x3a, 0x6d, 0x93, 0xc8, 0x5f, + 0x55, 0x42, 0x52, 0xc6, 0x62, 0xaa, 0xa8, 0x21, 0x6d, 0x20, 0x9d, 0x84, 0x26, 0x05, 0x4a, 0x23, + 0xb7, 0xaa, 0xd4, 0x2a, 0xd2, 0xc8, 0xb1, 0x6f, 0x9c, 0xdb, 0x78, 0x7c, 0x8d, 0x7d, 0xa7, 0x69, + 0x0a, 0x5d, 0xd0, 0x0d, 0x12, 0xdb, 0xae, 0x2a, 0x24, 0xa4, 0x6e, 0x59, 0x22, 0x24, 0xa4, 0x0a, + 0x09, 0xb1, 0x65, 0xcb, 0x9a, 0x05, 0x12, 0x6b, 0x56, 0xfc, 0x01, 0xe8, 0x3e, 0x9c, 0x78, 0x1e, + 0x1e, 0xcf, 0x84, 0x0d, 0x9b, 0xc8, 0xf7, 0xdc, 0xf3, 0xf8, 0x9d, 0x73, 0xee, 0x3d, 0xf7, 0x37, + 0x81, 0xab, 0x2e, 0xa5, 0xae, 0x87, 0x8d, 0x6d, 0xe2, 0x32, 0x6b, 0xdb, 0xc3, 0x86, 0xe5, 0xd4, + 0x88, 0x6f, 0x3c, 0x29, 0x1f, 0x4a, 0xaa, 0xf2, 0xaf, 0x90, 0x97, 0x82, 0x90, 0x32, 0x8a, 0x0a, + 0xd2, 0xa8, 0x14, 0xab, 0x94, 0xe4, 0xe6, 0x93, 0x72, 0xf1, 0x82, 0x72, 0x67, 0x05, 0xc4, 0xb0, + 0x7c, 0x9f, 0x32, 0x8b, 0x11, 0xea, 0x47, 0xd2, 0xae, 0x78, 0x29, 0x35, 0x98, 0x74, 0x23, 0xb5, + 0xfe, 0xaf, 0xb4, 0x3c, 0xea, 0xbb, 0x61, 0xdd, 0xf7, 0x89, 0xef, 0x1a, 0x34, 0xc0, 0x61, 0x83, + 0xab, 0x29, 0xa5, 0x24, 0x56, 0xdb, 0xf5, 0x1d, 0xc3, 0xa9, 0x4b, 0x05, 0xb5, 0x7f, 0xbe, 0x79, + 0x1f, 0xd7, 0x02, 0x76, 0xa0, 0x36, 0xa7, 0x9b, 0x37, 0x19, 0xa9, 0xe1, 0x88, 0x59, 0xb5, 0x40, + 0x2a, 0xe8, 0x7f, 0x6b, 0x80, 0x56, 0x43, 0x6c, 0x31, 0x7c, 0x9f, 0x03, 0x33, 0xf1, 0x67, 0x75, + 0x1c, 0x31, 0x74, 0x06, 0x06, 0x03, 0x2b, 0xc4, 0x3e, 0x2b, 0x68, 0x33, 0xda, 0xec, 0xb0, 0xa9, + 0x56, 0xe8, 0x1c, 0x0c, 0xc9, 0x22, 0x11, 0xa7, 0x90, 0x13, 0x3b, 0x27, 0xc5, 0xfa, 0xb6, 0x83, + 0x16, 0x60, 0x40, 0x7c, 0x16, 0xfa, 0x66, 0xb4, 0xd9, 0x7c, 0x79, 0xba, 0x94, 0x56, 0xba, 0x92, + 0x8c, 0x24, 0xb5, 0xd1, 0x43, 0x18, 0x23, 0x3e, 0x61, 0xc4, 0xf2, 0xaa, 0x51, 0xe0, 0x11, 0x16, + 0x15, 0xfa, 0x67, 0xfa, 0x66, 0xf3, 0xe5, 0x72, 0xba, 0x7d, 0x2b, 0xde, 0xd2, 0x3d, 0x6e, 0x6a, + 0x8e, 0x2a, 0x4f, 0x62, 0x15, 0x15, 0xcf, 0xc1, 0x80, 0xf8, 0x42, 0x13, 0xd0, 0xb7, 0x87, 0x0f, + 0x44, 0x2a, 0x23, 0x26, 0xff, 0xd4, 0xbf, 0x80, 0xa9, 0x84, 0x97, 0x5b, 0x21, 0xad, 0xdd, 0xf3, + 0xad, 0x20, 0xda, 0xa5, 0xec, 0x5f, 0x54, 0xe0, 0x2d, 0x18, 0x8f, 0x68, 0x3d, 0xb4, 0x71, 0x35, + 0x52, 0xce, 0x44, 0x2d, 0x86, 0xcd, 0x31, 0x29, 0x8e, 0x43, 0xe8, 0xaf, 0x34, 0xf8, 0xdf, 0x5a, + 0x48, 0x03, 0x93, 0xee, 0x9b, 0x96, 0xef, 0x1e, 0x56, 0x1d, 0x41, 0xbf, 0x6f, 0xd5, 0xb0, 0x8a, + 0x28, 0xbe, 0xd1, 0x65, 0x18, 0x0b, 0xe9, 0x7e, 0x75, 0x0f, 0x1f, 0x54, 0x83, 0x10, 0xef, 0x90, + 0xa7, 0x22, 0xea, 0xc8, 0xc6, 0x09, 0x73, 0x24, 0xa4, 0xfb, 0x1f, 0xe3, 0x83, 0x4d, 0x21, 0x45, + 0x37, 0xa0, 0xe8, 0x60, 0x0f, 0x33, 0x5c, 0xb5, 0x3c, 0xaf, 0xea, 0x58, 0xcc, 0xaa, 0xee, 0x84, + 0xb4, 0x56, 0x3d, 0xea, 0xc9, 0xd0, 0xc6, 0x09, 0xf3, 0x8c, 0xd4, 0xb9, 0xe9, 0x79, 0x6b, 0x16, + 0xb3, 0x78, 0xde, 0xa2, 0x00, 0x95, 0x21, 0x18, 0x64, 0x56, 0xe8, 0x62, 0xa6, 0xbf, 0xd6, 0xe0, + 0xd4, 0x27, 0x24, 0x62, 0x42, 0x1e, 0x65, 0x55, 0x63, 0x11, 0xfa, 0x9f, 0x10, 0xbc, 0x2f, 0x30, + 0x8d, 0x95, 0x2f, 0x65, 0xf4, 0xbc, 0xf4, 0x80, 0xe0, 0x7d, 0x53, 0x58, 0xa0, 0xf3, 0x30, 0x1c, + 0x58, 0x2e, 0xae, 0x46, 0xe4, 0x19, 0x2e, 0xf4, 0xcf, 0x68, 0xb3, 0x03, 0xe6, 0x10, 0x17, 0xdc, + 0x23, 0xcf, 0x30, 0xba, 0x08, 0x20, 0x36, 0x19, 0xdd, 0xc3, 0xbe, 0x2a, 0xa2, 0x50, 0xbf, 0xcf, + 0x05, 0x7a, 0x1d, 0x50, 0x12, 0x62, 0x14, 0x50, 0x3f, 0xc2, 0xe8, 0x1a, 0xcf, 0x81, 0x4b, 0x0a, + 0x9a, 0x38, 0x41, 0x99, 0x27, 0x50, 0xa9, 0xa3, 0xcb, 0x30, 0xee, 0xe3, 0xa7, 0xac, 0x9a, 0x08, + 0x29, 0x3b, 0x3b, 0xca, 0xc5, 0x9b, 0x87, 0x61, 0xab, 0x30, 0xbe, 0x8e, 0x59, 0xc3, 0x3d, 0x69, + 0xd7, 0xb1, 0x63, 0xd7, 0x44, 0x9f, 0x05, 0xb4, 0x26, 0xfa, 0x93, 0x15, 0x43, 0xff, 0x3d, 0x07, + 0xe7, 0xef, 0x50, 0x87, 0xec, 0x1c, 0xac, 0x52, 0xaf, 0x5e, 0xf3, 0x6f, 0x59, 0x35, 0xe2, 0x91, + 0xa3, 0x7e, 0xb5, 0xc3, 0xb5, 0x0b, 0xa3, 0x35, 0x6e, 0x42, 0x6c, 0x39, 0x5f, 0x0a, 0x39, 0x51, + 0xa6, 0x4a, 0x3a, 0xc0, 0x0e, 0x11, 0xe4, 0x9e, 0x72, 0x65, 0x36, 0x3a, 0x2e, 0xfe, 0xac, 0xc1, + 0x48, 0x72, 0x1f, 0x8d, 0x41, 0x8e, 0x38, 0x0a, 0x4c, 0x8e, 0x38, 0x68, 0x05, 0x06, 0x6d, 0x71, + 0xfd, 0x44, 0x91, 0xf2, 0xe5, 0xcb, 0x1d, 0x2e, 0xfb, 0x51, 0xf4, 0x83, 0x8d, 0x13, 0xa6, 0xb2, + 0xe3, 0x1e, 0xea, 0x81, 0xc3, 0x3d, 0xf4, 0xf5, 0xea, 0x41, 0xda, 0xa1, 0x49, 0xe8, 0x77, 0x42, + 0x1a, 0x88, 0xb3, 0xc7, 0xaf, 0x86, 0x58, 0x55, 0x06, 0xa0, 0xaf, 0x46, 0x1d, 0x7d, 0x01, 0xa6, + 0xd7, 0xb1, 0xcf, 0x27, 0x31, 0x5e, 0xa5, 0x7e, 0x44, 0x22, 0x86, 0x7d, 0xfb, 0x40, 0x1c, 0x83, + 0x4e, 0x6d, 0xb9, 0x0b, 0x33, 0xe9, 0x66, 0xea, 0x98, 0xce, 0xc3, 0x29, 0xfb, 0x68, 0x4f, 0x9d, + 0x37, 0xe9, 0x64, 0xc2, 0x6e, 0x32, 0xd2, 0x1f, 0xc1, 0xd9, 0xd5, 0x5d, 0x6c, 0xef, 0x25, 0xbc, + 0x75, 0x6a, 0x71, 0x5b, 0xdf, 0xb9, 0x14, 0xdf, 0x4b, 0x50, 0x68, 0xf5, 0xad, 0x40, 0x4e, 0x01, + 0x1c, 0xea, 0xcb, 0x3b, 0x3f, 0x64, 0x26, 0x24, 0xfa, 0x0f, 0x1a, 0x4c, 0xc6, 0xe3, 0x2c, 0xf3, + 0x42, 0x14, 0xe0, 0xa4, 0xed, 0xd5, 0x23, 0x86, 0xc3, 0x78, 0x62, 0xaa, 0x25, 0x9a, 0x86, 0x7c, + 0x3c, 0x2a, 0xf9, 0x3c, 0x95, 0x17, 0x1d, 0x62, 0xd1, 0x6d, 0x07, 0xcd, 0x43, 0x1f, 0x63, 0x9e, + 0xe8, 0x51, 0xbe, 0x7c, 0x2e, 0xee, 0x71, 0xfc, 0x9a, 0x95, 0xd6, 0xd4, 0x53, 0x68, 0x72, 0x2d, + 0x34, 0x03, 0x79, 0x07, 0x47, 0x76, 0x48, 0x02, 0x2e, 0x2b, 0x0c, 0x08, 0x6f, 0x49, 0x11, 0xbf, + 0x60, 0xeb, 0x98, 0x35, 0x8f, 0xfa, 0x76, 0x9d, 0x7c, 0x0c, 0x93, 0x7c, 0xc4, 0xc4, 0xaa, 0x99, + 0x83, 0xb0, 0x61, 0x9c, 0xe5, 0x7a, 0x1b, 0x67, 0x5f, 0x6a, 0x70, 0xba, 0x29, 0x98, 0x6a, 0xc3, + 0x0a, 0x0c, 0xc7, 0xc5, 0x88, 0xa7, 0x9a, 0x9e, 0x7e, 0xd0, 0x0f, 0xf3, 0x3a, 0x32, 0xea, 0x7a, + 0xb6, 0xcd, 0xc3, 0x69, 0x39, 0x7a, 0xba, 0x29, 0xce, 0x5f, 0x1a, 0x9c, 0x6e, 0xe8, 0xfe, 0x1d, + 0xcc, 0x2c, 0xfe, 0xe4, 0xa0, 0x87, 0x30, 0x41, 0x43, 0xe2, 0x12, 0xdf, 0xf2, 0xaa, 0xa1, 0xf4, + 0x20, 0x2c, 0xf3, 0xe5, 0x52, 0x36, 0xee, 0xe4, 0x41, 0x32, 0xc7, 0x63, 0x3f, 0x31, 0x90, 0x65, + 0x18, 0x51, 0x1e, 0xab, 0x9c, 0xc4, 0xa8, 0xc9, 0x51, 0x6c, 0x39, 0x13, 0xf7, 0x63, 0x86, 0x63, + 0xe6, 0x95, 0x3e, 0x97, 0xa0, 0xeb, 0x90, 0xdf, 0x21, 0x3e, 0x89, 0x76, 0xa5, 0x75, 0x5f, 0xa6, + 0x35, 0x48, 0x75, 0x2e, 0xd0, 0x5f, 0xe4, 0x60, 0x3a, 0x85, 0x2f, 0x1c, 0xa6, 0x6e, 0xa7, 0xa6, + 0xbe, 0xd8, 0x15, 0x95, 0x69, 0x43, 0x42, 0xfe, 0x53, 0x45, 0x28, 0xff, 0x78, 0x0a, 0x50, 0x45, + 0x65, 0x20, 0x10, 0xdf, 0xe4, 0x59, 0xa0, 0x97, 0x1a, 0xe4, 0x13, 0x69, 0xa0, 0x2b, 0xbd, 0x10, + 0xb7, 0x62, 0xd6, 0x23, 0xad, 0x2f, 0xbc, 0xf8, 0xed, 0xcf, 0x97, 0x39, 0x43, 0x9f, 0xe3, 0xe4, + 0xf9, 0x73, 0x79, 0xdb, 0x96, 0x83, 0x90, 0x3e, 0xc6, 0x36, 0x8b, 0x8c, 0x39, 0x83, 0xf8, 0x11, + 0xb3, 0x7c, 0x1b, 0x47, 0xc6, 0xdc, 0x73, 0x49, 0xae, 0xa3, 0x25, 0x6d, 0x0e, 0xfd, 0xa4, 0xc1, + 0xd9, 0x94, 0xe2, 0xa2, 0x63, 0xf7, 0xa3, 0x78, 0x31, 0xb6, 0x4c, 0x30, 0xf6, 0xd2, 0xdd, 0x98, + 0xb1, 0xeb, 0x1b, 0x02, 0x6b, 0x45, 0x5f, 0xee, 0x01, 0xab, 0x7c, 0xcf, 0x92, 0xc1, 0x38, 0xfc, + 0x6f, 0x34, 0x80, 0x23, 0x8a, 0x83, 0xe6, 0xd3, 0x11, 0xb7, 0x70, 0xb5, 0xe2, 0x95, 0xee, 0x94, + 0xe5, 0x88, 0xd1, 0xcb, 0x02, 0xf3, 0x15, 0xd4, 0x43, 0x7d, 0xd1, 0xd7, 0x1a, 0x0c, 0xc5, 0x4c, + 0x08, 0xbd, 0x9d, 0x1e, 0xae, 0x89, 0x2d, 0x65, 0x37, 0xbb, 0x11, 0x0c, 0x9f, 0x35, 0x29, 0x50, + 0x14, 0x12, 0x63, 0xee, 0x39, 0xfa, 0x4a, 0x83, 0x7c, 0x82, 0x35, 0x75, 0x3a, 0x7f, 0xad, 0xe4, + 0xaa, 0x78, 0xa6, 0xe5, 0xf0, 0x7f, 0xc8, 0x7f, 0x3e, 0xc5, 0x48, 0xe6, 0x7a, 0x41, 0xf2, 0x46, + 0x83, 0xc9, 0x76, 0x94, 0x09, 0x2d, 0x1c, 0x8b, 0x62, 0x65, 0x97, 0xeb, 0x23, 0x01, 0x72, 0x4d, + 0xff, 0xa0, 0x7b, 0x90, 0x4b, 0xb5, 0x36, 0x01, 0xf9, 0x89, 0x7b, 0xa5, 0xc1, 0x48, 0xf2, 0x37, + 0x09, 0x7a, 0xa7, 0x43, 0x1d, 0x5b, 0x7f, 0xbb, 0xa4, 0x16, 0xb2, 0x22, 0x30, 0xde, 0xd0, 0xaf, + 0xf5, 0x80, 0xd1, 0x49, 0xf8, 0xe7, 0xd8, 0xfe, 0xd0, 0xa0, 0x90, 0xc6, 0xab, 0xd0, 0x7b, 0x9d, + 0xce, 0x5f, 0x47, 0x0a, 0x57, 0x5c, 0x3a, 0x8e, 0xa9, 0xba, 0x37, 0x9f, 0x8a, 0xbc, 0x36, 0xf4, + 0xd5, 0x1e, 0xf2, 0x72, 0x53, 0x9c, 0xf2, 0x1c, 0x7f, 0xd1, 0x60, 0xa2, 0x99, 0x8e, 0xa1, 0x77, + 0x3b, 0x4c, 0xaa, 0xf6, 0xb4, 0xb0, 0x58, 0xee, 0xc5, 0x44, 0xe5, 0x72, 0x4b, 0xe4, 0xb2, 0xa2, + 0x5f, 0xef, 0x21, 0x17, 0xbb, 0xc9, 0x19, 0xcf, 0xe1, 0x5b, 0x0d, 0x46, 0x1b, 0x1e, 0x73, 0xd4, + 0xe3, 0xab, 0x9f, 0x35, 0x60, 0xdf, 0x17, 0x40, 0x17, 0xf5, 0xab, 0x3d, 0x00, 0x8d, 0x12, 0x63, + 0xf5, 0xb5, 0x06, 0xf9, 0x04, 0x01, 0xec, 0x34, 0x2b, 0x5a, 0x79, 0x62, 0xb1, 0x0b, 0xea, 0xa5, + 0xaf, 0x08, 0x84, 0x4b, 0x68, 0x31, 0x13, 0xa1, 0xe2, 0xc0, 0xfc, 0xf3, 0x90, 0xae, 0xf1, 0x29, + 0xf2, 0xbd, 0x06, 0xa3, 0x0d, 0x6c, 0xb0, 0x53, 0x11, 0xdb, 0x71, 0xd4, 0xa2, 0xd1, 0xb5, 0xbe, + 0xea, 0x7f, 0x23, 0xe8, 0x8e, 0x6f, 0x40, 0x02, 0xf6, 0xf3, 0x23, 0xdc, 0xfc, 0xbd, 0x1a, 0x6b, + 0xe4, 0x8f, 0xc8, 0xc8, 0x9a, 0xc3, 0xcd, 0xe5, 0x4d, 0x9b, 0x20, 0x0a, 0xdd, 0xdc, 0xb1, 0x4b, + 0x5a, 0x79, 0xa3, 0xc1, 0x05, 0x9b, 0xd6, 0x52, 0x01, 0x55, 0xce, 0xb6, 0xf2, 0x9a, 0x4d, 0x0e, + 0x62, 0x53, 0x7b, 0xb4, 0xac, 0x8c, 0x5c, 0xea, 0x59, 0xbe, 0x5b, 0xa2, 0xa1, 0x6b, 0xb8, 0xd8, + 0x17, 0x10, 0x0d, 0xb9, 0x65, 0x05, 0x24, 0x6a, 0xfd, 0x47, 0xdf, 0x75, 0xf1, 0xf1, 0x5d, 0x6e, + 0x6a, 0x5d, 0xda, 0xaf, 0x7a, 0xb4, 0xee, 0x94, 0xe2, 0x38, 0x25, 0x11, 0xa3, 0xf4, 0xa0, 0xfc, + 0x6b, 0xac, 0xb0, 0x25, 0x14, 0xb6, 0x62, 0x85, 0x2d, 0xa1, 0xb0, 0xf5, 0xa0, 0xbc, 0x3d, 0x28, + 0x62, 0x5d, 0xfd, 0x27, 0x00, 0x00, 0xff, 0xff, 0x1f, 0x17, 0x6a, 0x6f, 0xc0, 0x14, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/bigtable/admin/v2/common.pb.go b/vendor/google.golang.org/genproto/googleapis/bigtable/admin/v2/common.pb.go new file mode 100644 index 0000000..72fe729 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/bigtable/admin/v2/common.pb.go @@ -0,0 +1,80 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/bigtable/admin/v2/common.proto + +package admin // import "google.golang.org/genproto/googleapis/bigtable/admin/v2" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "github.com/golang/protobuf/ptypes/timestamp" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Storage media types for persisting Bigtable data. +type StorageType int32 + +const ( + // The user did not specify a storage type. + StorageType_STORAGE_TYPE_UNSPECIFIED StorageType = 0 + // Flash (SSD) storage should be used. + StorageType_SSD StorageType = 1 + // Magnetic drive (HDD) storage should be used. + StorageType_HDD StorageType = 2 +) + +var StorageType_name = map[int32]string{ + 0: "STORAGE_TYPE_UNSPECIFIED", + 1: "SSD", + 2: "HDD", +} +var StorageType_value = map[string]int32{ + "STORAGE_TYPE_UNSPECIFIED": 0, + "SSD": 1, + "HDD": 2, +} + +func (x StorageType) String() string { + return proto.EnumName(StorageType_name, int32(x)) +} +func (StorageType) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_common_0e01de0fdbb848ad, []int{0} +} + +func init() { + proto.RegisterEnum("google.bigtable.admin.v2.StorageType", StorageType_name, StorageType_value) +} + +func init() { + proto.RegisterFile("google/bigtable/admin/v2/common.proto", fileDescriptor_common_0e01de0fdbb848ad) +} + +var fileDescriptor_common_0e01de0fdbb848ad = []byte{ + // 270 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x74, 0xd0, 0xcf, 0x4b, 0xc3, 0x30, + 0x14, 0x07, 0x70, 0x3b, 0x41, 0x21, 0xbb, 0x94, 0x9e, 0xc6, 0x28, 0x7a, 0xf2, 0xe2, 0x21, 0x81, + 0x7a, 0x94, 0x1d, 0xd6, 0x1f, 0xce, 0x5d, 0xb4, 0x98, 0x3a, 0x50, 0x0a, 0x23, 0xdd, 0x62, 0x08, + 0x34, 0x79, 0xa1, 0xcd, 0x06, 0xfe, 0x4b, 0x1e, 0xfc, 0x43, 0xfc, 0xab, 0x64, 0x49, 0x7b, 0x12, + 0x6f, 0x2f, 0xbc, 0xcf, 0xcb, 0xf7, 0x25, 0xe8, 0x46, 0x00, 0x88, 0x96, 0x93, 0x46, 0x0a, 0xcb, + 0x9a, 0x96, 0x13, 0xb6, 0x57, 0x52, 0x93, 0x63, 0x42, 0x76, 0xa0, 0x14, 0x68, 0x6c, 0x3a, 0xb0, + 0x10, 0xcd, 0x3c, 0xc3, 0x23, 0xc3, 0x8e, 0xe1, 0x63, 0x32, 0x8f, 0x87, 0x0b, 0x98, 0x91, 0x84, + 0x69, 0x0d, 0x96, 0x59, 0x09, 0xba, 0xf7, 0x73, 0xf3, 0xeb, 0xa1, 0xeb, 0x4e, 0xcd, 0xe1, 0x83, + 0x58, 0xa9, 0x78, 0x6f, 0x99, 0x32, 0x1e, 0xdc, 0x2e, 0xd0, 0x94, 0x5a, 0xe8, 0x98, 0xe0, 0xd5, + 0xa7, 0xe1, 0x51, 0x8c, 0x66, 0xb4, 0x7a, 0x7e, 0x59, 0xae, 0x8a, 0x6d, 0xf5, 0x56, 0x16, 0xdb, + 0xd7, 0x27, 0x5a, 0x16, 0xd9, 0xfa, 0x61, 0x5d, 0xe4, 0xe1, 0x59, 0x74, 0x89, 0xce, 0x29, 0xcd, + 0xc3, 0xe0, 0x54, 0x3c, 0xe6, 0x79, 0x38, 0x49, 0xbf, 0x03, 0x14, 0xef, 0x40, 0xe1, 0xff, 0xd6, + 0x4b, 0xa7, 0x99, 0x7b, 0x46, 0x79, 0x0a, 0x2b, 0x83, 0xf7, 0xc5, 0x00, 0x05, 0xb4, 0x4c, 0x0b, + 0x0c, 0x9d, 0x20, 0x82, 0x6b, 0xb7, 0x0a, 0xf1, 0x2d, 0x66, 0x64, 0xff, 0xf7, 0x37, 0xee, 0x5d, + 0xf1, 0x35, 0xb9, 0x5a, 0xf9, 0xf9, 0xac, 0x85, 0xc3, 0x1e, 0xa7, 0x63, 0xdc, 0xd2, 0xc5, 0x6d, + 0x92, 0x9f, 0x11, 0xd4, 0x0e, 0xd4, 0x23, 0xa8, 0x1d, 0xa8, 0x37, 0x49, 0x73, 0xe1, 0xb2, 0xee, + 0x7e, 0x03, 0x00, 0x00, 0xff, 0xff, 0xaf, 0x9e, 0x61, 0x6a, 0x78, 0x01, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/bigtable/admin/v2/instance.pb.go b/vendor/google.golang.org/genproto/googleapis/bigtable/admin/v2/instance.pb.go new file mode 100644 index 0000000..b5e936c --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/bigtable/admin/v2/instance.pb.go @@ -0,0 +1,659 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/bigtable/admin/v2/instance.proto + +package admin // import "google.golang.org/genproto/googleapis/bigtable/admin/v2" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Possible states of an instance. +type Instance_State int32 + +const ( + // The state of the instance could not be determined. + Instance_STATE_NOT_KNOWN Instance_State = 0 + // The instance has been successfully created and can serve requests + // to its tables. + Instance_READY Instance_State = 1 + // The instance is currently being created, and may be destroyed + // if the creation process encounters an error. + Instance_CREATING Instance_State = 2 +) + +var Instance_State_name = map[int32]string{ + 0: "STATE_NOT_KNOWN", + 1: "READY", + 2: "CREATING", +} +var Instance_State_value = map[string]int32{ + "STATE_NOT_KNOWN": 0, + "READY": 1, + "CREATING": 2, +} + +func (x Instance_State) String() string { + return proto.EnumName(Instance_State_name, int32(x)) +} +func (Instance_State) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_instance_e9d126e00e9552ae, []int{0, 0} +} + +// The type of the instance. +type Instance_Type int32 + +const ( + // The type of the instance is unspecified. If set when creating an + // instance, a `PRODUCTION` instance will be created. If set when updating + // an instance, the type will be left unchanged. + Instance_TYPE_UNSPECIFIED Instance_Type = 0 + // An instance meant for production use. `serve_nodes` must be set + // on the cluster. + Instance_PRODUCTION Instance_Type = 1 + // The instance is meant for development and testing purposes only; it has + // no performance or uptime guarantees and is not covered by SLA. + // After a development instance is created, it can be upgraded by + // updating the instance to type `PRODUCTION`. An instance created + // as a production instance cannot be changed to a development instance. + // When creating a development instance, `serve_nodes` on the cluster must + // not be set. + Instance_DEVELOPMENT Instance_Type = 2 +) + +var Instance_Type_name = map[int32]string{ + 0: "TYPE_UNSPECIFIED", + 1: "PRODUCTION", + 2: "DEVELOPMENT", +} +var Instance_Type_value = map[string]int32{ + "TYPE_UNSPECIFIED": 0, + "PRODUCTION": 1, + "DEVELOPMENT": 2, +} + +func (x Instance_Type) String() string { + return proto.EnumName(Instance_Type_name, int32(x)) +} +func (Instance_Type) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_instance_e9d126e00e9552ae, []int{0, 1} +} + +// Possible states of a cluster. +type Cluster_State int32 + +const ( + // The state of the cluster could not be determined. + Cluster_STATE_NOT_KNOWN Cluster_State = 0 + // The cluster has been successfully created and is ready to serve requests. + Cluster_READY Cluster_State = 1 + // The cluster is currently being created, and may be destroyed + // if the creation process encounters an error. + // A cluster may not be able to serve requests while being created. + Cluster_CREATING Cluster_State = 2 + // The cluster is currently being resized, and may revert to its previous + // node count if the process encounters an error. + // A cluster is still capable of serving requests while being resized, + // but may exhibit performance as if its number of allocated nodes is + // between the starting and requested states. + Cluster_RESIZING Cluster_State = 3 + // The cluster has no backing nodes. The data (tables) still + // exist, but no operations can be performed on the cluster. + Cluster_DISABLED Cluster_State = 4 +) + +var Cluster_State_name = map[int32]string{ + 0: "STATE_NOT_KNOWN", + 1: "READY", + 2: "CREATING", + 3: "RESIZING", + 4: "DISABLED", +} +var Cluster_State_value = map[string]int32{ + "STATE_NOT_KNOWN": 0, + "READY": 1, + "CREATING": 2, + "RESIZING": 3, + "DISABLED": 4, +} + +func (x Cluster_State) String() string { + return proto.EnumName(Cluster_State_name, int32(x)) +} +func (Cluster_State) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_instance_e9d126e00e9552ae, []int{1, 0} +} + +// A collection of Bigtable [Tables][google.bigtable.admin.v2.Table] and +// the resources that serve them. +// All tables in an instance are served from a single +// [Cluster][google.bigtable.admin.v2.Cluster]. +type Instance struct { + // (`OutputOnly`) + // The unique name of the instance. Values are of the form + // `projects//instances/[a-z][a-z0-9\\-]+[a-z0-9]`. + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // The descriptive name for this instance as it appears in UIs. + // Can be changed at any time, but should be kept globally unique + // to avoid confusion. + DisplayName string `protobuf:"bytes,2,opt,name=display_name,json=displayName,proto3" json:"display_name,omitempty"` + // (`OutputOnly`) + // The current state of the instance. + State Instance_State `protobuf:"varint,3,opt,name=state,proto3,enum=google.bigtable.admin.v2.Instance_State" json:"state,omitempty"` + // The type of the instance. Defaults to `PRODUCTION`. + Type Instance_Type `protobuf:"varint,4,opt,name=type,proto3,enum=google.bigtable.admin.v2.Instance_Type" json:"type,omitempty"` + // Labels are a flexible and lightweight mechanism for organizing cloud + // resources into groups that reflect a customer's organizational needs and + // deployment strategies. They can be used to filter resources and aggregate + // metrics. + // + // * Label keys must be between 1 and 63 characters long and must conform to + // the regular expression: `[\p{Ll}\p{Lo}][\p{Ll}\p{Lo}\p{N}_-]{0,62}`. + // * Label values must be between 0 and 63 characters long and must conform to + // the regular expression: `[\p{Ll}\p{Lo}\p{N}_-]{0,63}`. + // * No more than 64 labels can be associated with a given resource. + // * Keys and values must both be under 128 bytes. + Labels map[string]string `protobuf:"bytes,5,rep,name=labels,proto3" json:"labels,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Instance) Reset() { *m = Instance{} } +func (m *Instance) String() string { return proto.CompactTextString(m) } +func (*Instance) ProtoMessage() {} +func (*Instance) Descriptor() ([]byte, []int) { + return fileDescriptor_instance_e9d126e00e9552ae, []int{0} +} +func (m *Instance) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Instance.Unmarshal(m, b) +} +func (m *Instance) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Instance.Marshal(b, m, deterministic) +} +func (dst *Instance) XXX_Merge(src proto.Message) { + xxx_messageInfo_Instance.Merge(dst, src) +} +func (m *Instance) XXX_Size() int { + return xxx_messageInfo_Instance.Size(m) +} +func (m *Instance) XXX_DiscardUnknown() { + xxx_messageInfo_Instance.DiscardUnknown(m) +} + +var xxx_messageInfo_Instance proto.InternalMessageInfo + +func (m *Instance) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *Instance) GetDisplayName() string { + if m != nil { + return m.DisplayName + } + return "" +} + +func (m *Instance) GetState() Instance_State { + if m != nil { + return m.State + } + return Instance_STATE_NOT_KNOWN +} + +func (m *Instance) GetType() Instance_Type { + if m != nil { + return m.Type + } + return Instance_TYPE_UNSPECIFIED +} + +func (m *Instance) GetLabels() map[string]string { + if m != nil { + return m.Labels + } + return nil +} + +// A resizable group of nodes in a particular cloud location, capable +// of serving all [Tables][google.bigtable.admin.v2.Table] in the parent +// [Instance][google.bigtable.admin.v2.Instance]. +type Cluster struct { + // (`OutputOnly`) + // The unique name of the cluster. Values are of the form + // `projects//instances//clusters/[a-z][-a-z0-9]*`. + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // (`CreationOnly`) + // The location where this cluster's nodes and storage reside. For best + // performance, clients should be located as close as possible to this + // cluster. Currently only zones are supported, so values should be of the + // form `projects//locations/`. + Location string `protobuf:"bytes,2,opt,name=location,proto3" json:"location,omitempty"` + // (`OutputOnly`) + // The current state of the cluster. + State Cluster_State `protobuf:"varint,3,opt,name=state,proto3,enum=google.bigtable.admin.v2.Cluster_State" json:"state,omitempty"` + // The number of nodes allocated to this cluster. More nodes enable higher + // throughput and more consistent performance. + ServeNodes int32 `protobuf:"varint,4,opt,name=serve_nodes,json=serveNodes,proto3" json:"serve_nodes,omitempty"` + // (`CreationOnly`) + // The type of storage used by this cluster to serve its + // parent instance's tables, unless explicitly overridden. + DefaultStorageType StorageType `protobuf:"varint,5,opt,name=default_storage_type,json=defaultStorageType,proto3,enum=google.bigtable.admin.v2.StorageType" json:"default_storage_type,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Cluster) Reset() { *m = Cluster{} } +func (m *Cluster) String() string { return proto.CompactTextString(m) } +func (*Cluster) ProtoMessage() {} +func (*Cluster) Descriptor() ([]byte, []int) { + return fileDescriptor_instance_e9d126e00e9552ae, []int{1} +} +func (m *Cluster) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Cluster.Unmarshal(m, b) +} +func (m *Cluster) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Cluster.Marshal(b, m, deterministic) +} +func (dst *Cluster) XXX_Merge(src proto.Message) { + xxx_messageInfo_Cluster.Merge(dst, src) +} +func (m *Cluster) XXX_Size() int { + return xxx_messageInfo_Cluster.Size(m) +} +func (m *Cluster) XXX_DiscardUnknown() { + xxx_messageInfo_Cluster.DiscardUnknown(m) +} + +var xxx_messageInfo_Cluster proto.InternalMessageInfo + +func (m *Cluster) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *Cluster) GetLocation() string { + if m != nil { + return m.Location + } + return "" +} + +func (m *Cluster) GetState() Cluster_State { + if m != nil { + return m.State + } + return Cluster_STATE_NOT_KNOWN +} + +func (m *Cluster) GetServeNodes() int32 { + if m != nil { + return m.ServeNodes + } + return 0 +} + +func (m *Cluster) GetDefaultStorageType() StorageType { + if m != nil { + return m.DefaultStorageType + } + return StorageType_STORAGE_TYPE_UNSPECIFIED +} + +// A configuration object describing how Cloud Bigtable should treat traffic +// from a particular end user application. +type AppProfile struct { + // (`OutputOnly`) + // The unique name of the app profile. Values are of the form + // `projects//instances//appProfiles/[_a-zA-Z0-9][-_.a-zA-Z0-9]*`. + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // Strongly validated etag for optimistic concurrency control. Preserve the + // value returned from `GetAppProfile` when calling `UpdateAppProfile` to + // fail the request if there has been a modification in the mean time. The + // `update_mask` of the request need not include `etag` for this protection + // to apply. + // See [Wikipedia](https://en.wikipedia.org/wiki/HTTP_ETag) and + // [RFC 7232](https://tools.ietf.org/html/rfc7232#section-2.3) for more + // details. + Etag string `protobuf:"bytes,2,opt,name=etag,proto3" json:"etag,omitempty"` + // Optional long form description of the use case for this AppProfile. + Description string `protobuf:"bytes,3,opt,name=description,proto3" json:"description,omitempty"` + // The routing policy for all read/write requests which use this app profile. + // A value must be explicitly set. + // + // Types that are valid to be assigned to RoutingPolicy: + // *AppProfile_MultiClusterRoutingUseAny_ + // *AppProfile_SingleClusterRouting_ + RoutingPolicy isAppProfile_RoutingPolicy `protobuf_oneof:"routing_policy"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *AppProfile) Reset() { *m = AppProfile{} } +func (m *AppProfile) String() string { return proto.CompactTextString(m) } +func (*AppProfile) ProtoMessage() {} +func (*AppProfile) Descriptor() ([]byte, []int) { + return fileDescriptor_instance_e9d126e00e9552ae, []int{2} +} +func (m *AppProfile) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_AppProfile.Unmarshal(m, b) +} +func (m *AppProfile) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_AppProfile.Marshal(b, m, deterministic) +} +func (dst *AppProfile) XXX_Merge(src proto.Message) { + xxx_messageInfo_AppProfile.Merge(dst, src) +} +func (m *AppProfile) XXX_Size() int { + return xxx_messageInfo_AppProfile.Size(m) +} +func (m *AppProfile) XXX_DiscardUnknown() { + xxx_messageInfo_AppProfile.DiscardUnknown(m) +} + +var xxx_messageInfo_AppProfile proto.InternalMessageInfo + +func (m *AppProfile) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *AppProfile) GetEtag() string { + if m != nil { + return m.Etag + } + return "" +} + +func (m *AppProfile) GetDescription() string { + if m != nil { + return m.Description + } + return "" +} + +type isAppProfile_RoutingPolicy interface { + isAppProfile_RoutingPolicy() +} + +type AppProfile_MultiClusterRoutingUseAny_ struct { + MultiClusterRoutingUseAny *AppProfile_MultiClusterRoutingUseAny `protobuf:"bytes,5,opt,name=multi_cluster_routing_use_any,json=multiClusterRoutingUseAny,proto3,oneof"` +} + +type AppProfile_SingleClusterRouting_ struct { + SingleClusterRouting *AppProfile_SingleClusterRouting `protobuf:"bytes,6,opt,name=single_cluster_routing,json=singleClusterRouting,proto3,oneof"` +} + +func (*AppProfile_MultiClusterRoutingUseAny_) isAppProfile_RoutingPolicy() {} + +func (*AppProfile_SingleClusterRouting_) isAppProfile_RoutingPolicy() {} + +func (m *AppProfile) GetRoutingPolicy() isAppProfile_RoutingPolicy { + if m != nil { + return m.RoutingPolicy + } + return nil +} + +func (m *AppProfile) GetMultiClusterRoutingUseAny() *AppProfile_MultiClusterRoutingUseAny { + if x, ok := m.GetRoutingPolicy().(*AppProfile_MultiClusterRoutingUseAny_); ok { + return x.MultiClusterRoutingUseAny + } + return nil +} + +func (m *AppProfile) GetSingleClusterRouting() *AppProfile_SingleClusterRouting { + if x, ok := m.GetRoutingPolicy().(*AppProfile_SingleClusterRouting_); ok { + return x.SingleClusterRouting + } + return nil +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*AppProfile) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _AppProfile_OneofMarshaler, _AppProfile_OneofUnmarshaler, _AppProfile_OneofSizer, []interface{}{ + (*AppProfile_MultiClusterRoutingUseAny_)(nil), + (*AppProfile_SingleClusterRouting_)(nil), + } +} + +func _AppProfile_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*AppProfile) + // routing_policy + switch x := m.RoutingPolicy.(type) { + case *AppProfile_MultiClusterRoutingUseAny_: + b.EncodeVarint(5<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.MultiClusterRoutingUseAny); err != nil { + return err + } + case *AppProfile_SingleClusterRouting_: + b.EncodeVarint(6<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.SingleClusterRouting); err != nil { + return err + } + case nil: + default: + return fmt.Errorf("AppProfile.RoutingPolicy has unexpected type %T", x) + } + return nil +} + +func _AppProfile_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*AppProfile) + switch tag { + case 5: // routing_policy.multi_cluster_routing_use_any + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(AppProfile_MultiClusterRoutingUseAny) + err := b.DecodeMessage(msg) + m.RoutingPolicy = &AppProfile_MultiClusterRoutingUseAny_{msg} + return true, err + case 6: // routing_policy.single_cluster_routing + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(AppProfile_SingleClusterRouting) + err := b.DecodeMessage(msg) + m.RoutingPolicy = &AppProfile_SingleClusterRouting_{msg} + return true, err + default: + return false, nil + } +} + +func _AppProfile_OneofSizer(msg proto.Message) (n int) { + m := msg.(*AppProfile) + // routing_policy + switch x := m.RoutingPolicy.(type) { + case *AppProfile_MultiClusterRoutingUseAny_: + s := proto.Size(x.MultiClusterRoutingUseAny) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *AppProfile_SingleClusterRouting_: + s := proto.Size(x.SingleClusterRouting) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +// Read/write requests may be routed to any cluster in the instance, and will +// fail over to another cluster in the event of transient errors or delays. +// Choosing this option sacrifices read-your-writes consistency to improve +// availability. +type AppProfile_MultiClusterRoutingUseAny struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *AppProfile_MultiClusterRoutingUseAny) Reset() { *m = AppProfile_MultiClusterRoutingUseAny{} } +func (m *AppProfile_MultiClusterRoutingUseAny) String() string { return proto.CompactTextString(m) } +func (*AppProfile_MultiClusterRoutingUseAny) ProtoMessage() {} +func (*AppProfile_MultiClusterRoutingUseAny) Descriptor() ([]byte, []int) { + return fileDescriptor_instance_e9d126e00e9552ae, []int{2, 0} +} +func (m *AppProfile_MultiClusterRoutingUseAny) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_AppProfile_MultiClusterRoutingUseAny.Unmarshal(m, b) +} +func (m *AppProfile_MultiClusterRoutingUseAny) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_AppProfile_MultiClusterRoutingUseAny.Marshal(b, m, deterministic) +} +func (dst *AppProfile_MultiClusterRoutingUseAny) XXX_Merge(src proto.Message) { + xxx_messageInfo_AppProfile_MultiClusterRoutingUseAny.Merge(dst, src) +} +func (m *AppProfile_MultiClusterRoutingUseAny) XXX_Size() int { + return xxx_messageInfo_AppProfile_MultiClusterRoutingUseAny.Size(m) +} +func (m *AppProfile_MultiClusterRoutingUseAny) XXX_DiscardUnknown() { + xxx_messageInfo_AppProfile_MultiClusterRoutingUseAny.DiscardUnknown(m) +} + +var xxx_messageInfo_AppProfile_MultiClusterRoutingUseAny proto.InternalMessageInfo + +// Unconditionally routes all read/write requests to a specific cluster. +// This option preserves read-your-writes consistency, but does not improve +// availability. +type AppProfile_SingleClusterRouting struct { + // The cluster to which read/write requests should be routed. + ClusterId string `protobuf:"bytes,1,opt,name=cluster_id,json=clusterId,proto3" json:"cluster_id,omitempty"` + // Whether or not `CheckAndMutateRow` and `ReadModifyWriteRow` requests are + // allowed by this app profile. It is unsafe to send these requests to + // the same table/row/column in multiple clusters. + AllowTransactionalWrites bool `protobuf:"varint,2,opt,name=allow_transactional_writes,json=allowTransactionalWrites,proto3" json:"allow_transactional_writes,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *AppProfile_SingleClusterRouting) Reset() { *m = AppProfile_SingleClusterRouting{} } +func (m *AppProfile_SingleClusterRouting) String() string { return proto.CompactTextString(m) } +func (*AppProfile_SingleClusterRouting) ProtoMessage() {} +func (*AppProfile_SingleClusterRouting) Descriptor() ([]byte, []int) { + return fileDescriptor_instance_e9d126e00e9552ae, []int{2, 1} +} +func (m *AppProfile_SingleClusterRouting) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_AppProfile_SingleClusterRouting.Unmarshal(m, b) +} +func (m *AppProfile_SingleClusterRouting) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_AppProfile_SingleClusterRouting.Marshal(b, m, deterministic) +} +func (dst *AppProfile_SingleClusterRouting) XXX_Merge(src proto.Message) { + xxx_messageInfo_AppProfile_SingleClusterRouting.Merge(dst, src) +} +func (m *AppProfile_SingleClusterRouting) XXX_Size() int { + return xxx_messageInfo_AppProfile_SingleClusterRouting.Size(m) +} +func (m *AppProfile_SingleClusterRouting) XXX_DiscardUnknown() { + xxx_messageInfo_AppProfile_SingleClusterRouting.DiscardUnknown(m) +} + +var xxx_messageInfo_AppProfile_SingleClusterRouting proto.InternalMessageInfo + +func (m *AppProfile_SingleClusterRouting) GetClusterId() string { + if m != nil { + return m.ClusterId + } + return "" +} + +func (m *AppProfile_SingleClusterRouting) GetAllowTransactionalWrites() bool { + if m != nil { + return m.AllowTransactionalWrites + } + return false +} + +func init() { + proto.RegisterType((*Instance)(nil), "google.bigtable.admin.v2.Instance") + proto.RegisterMapType((map[string]string)(nil), "google.bigtable.admin.v2.Instance.LabelsEntry") + proto.RegisterType((*Cluster)(nil), "google.bigtable.admin.v2.Cluster") + proto.RegisterType((*AppProfile)(nil), "google.bigtable.admin.v2.AppProfile") + proto.RegisterType((*AppProfile_MultiClusterRoutingUseAny)(nil), "google.bigtable.admin.v2.AppProfile.MultiClusterRoutingUseAny") + proto.RegisterType((*AppProfile_SingleClusterRouting)(nil), "google.bigtable.admin.v2.AppProfile.SingleClusterRouting") + proto.RegisterEnum("google.bigtable.admin.v2.Instance_State", Instance_State_name, Instance_State_value) + proto.RegisterEnum("google.bigtable.admin.v2.Instance_Type", Instance_Type_name, Instance_Type_value) + proto.RegisterEnum("google.bigtable.admin.v2.Cluster_State", Cluster_State_name, Cluster_State_value) +} + +func init() { + proto.RegisterFile("google/bigtable/admin/v2/instance.proto", fileDescriptor_instance_e9d126e00e9552ae) +} + +var fileDescriptor_instance_e9d126e00e9552ae = []byte{ + // 765 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x9c, 0x54, 0xdd, 0x8e, 0xdb, 0x44, + 0x14, 0x8e, 0xf3, 0xb3, 0x64, 0x4f, 0xca, 0xd6, 0x1a, 0x22, 0x94, 0x86, 0x16, 0x42, 0xa4, 0xaa, + 0xb9, 0x72, 0xa4, 0x20, 0x24, 0x4a, 0xd9, 0x4a, 0xf9, 0x71, 0x5b, 0x8b, 0xad, 0x13, 0x1c, 0xef, + 0xae, 0xba, 0x8a, 0x64, 0x4d, 0xec, 0x59, 0xcb, 0x62, 0x32, 0x63, 0x3c, 0x93, 0xac, 0x7c, 0xcb, + 0xe3, 0x70, 0xc5, 0x1d, 0x4f, 0xc0, 0x0d, 0x8f, 0xc3, 0x13, 0x20, 0x8f, 0x6d, 0x76, 0x17, 0x12, + 0xb4, 0xe2, 0x6e, 0xce, 0x39, 0xdf, 0x77, 0xbe, 0x33, 0xdf, 0xb1, 0x07, 0x5e, 0x84, 0x9c, 0x87, + 0x94, 0x0c, 0xd7, 0x51, 0x28, 0xf1, 0x9a, 0x92, 0x21, 0x0e, 0x36, 0x11, 0x1b, 0xee, 0x46, 0xc3, + 0x88, 0x09, 0x89, 0x99, 0x4f, 0x8c, 0x38, 0xe1, 0x92, 0xa3, 0x4e, 0x0e, 0x34, 0x4a, 0xa0, 0xa1, + 0x80, 0xc6, 0x6e, 0xd4, 0x7d, 0x5a, 0xb4, 0xc0, 0x71, 0x34, 0xc4, 0x8c, 0x71, 0x89, 0x65, 0xc4, + 0x99, 0xc8, 0x79, 0xdd, 0xe7, 0x07, 0x05, 0x7c, 0xbe, 0xd9, 0x70, 0x96, 0xc3, 0xfa, 0xbf, 0xd5, + 0xa0, 0x69, 0x15, 0x8a, 0x08, 0x41, 0x9d, 0xe1, 0x0d, 0xe9, 0x68, 0x3d, 0x6d, 0x70, 0xec, 0xa8, + 0x33, 0xfa, 0x12, 0x1e, 0x05, 0x91, 0x88, 0x29, 0x4e, 0x3d, 0x55, 0xab, 0xaa, 0x5a, 0xab, 0xc8, + 0xd9, 0x19, 0xe4, 0x35, 0x34, 0x84, 0xc4, 0x92, 0x74, 0x6a, 0x3d, 0x6d, 0x70, 0x32, 0x1a, 0x18, + 0x87, 0x46, 0x36, 0x4a, 0x25, 0x63, 0x99, 0xe1, 0x9d, 0x9c, 0x86, 0x5e, 0x41, 0x5d, 0xa6, 0x31, + 0xe9, 0xd4, 0x15, 0xfd, 0xc5, 0x03, 0xe8, 0x6e, 0x1a, 0x13, 0x47, 0x91, 0xd0, 0x1b, 0x38, 0xa2, + 0x78, 0x4d, 0xa8, 0xe8, 0x34, 0x7a, 0xb5, 0x41, 0x6b, 0x64, 0x3c, 0x80, 0x7e, 0xa6, 0x08, 0x26, + 0x93, 0x49, 0xea, 0x14, 0xec, 0xee, 0x4b, 0x68, 0xdd, 0x49, 0x23, 0x1d, 0x6a, 0x3f, 0x92, 0xb4, + 0x70, 0x22, 0x3b, 0xa2, 0x36, 0x34, 0x76, 0x98, 0x6e, 0x4b, 0x07, 0xf2, 0xe0, 0xdb, 0xea, 0x37, + 0x5a, 0xff, 0x6b, 0x68, 0xa8, 0xfb, 0xa0, 0x4f, 0xe0, 0xf1, 0xd2, 0x1d, 0xbb, 0xa6, 0x67, 0xcf, + 0x5d, 0xef, 0x7b, 0x7b, 0x7e, 0x69, 0xeb, 0x15, 0x74, 0x0c, 0x0d, 0xc7, 0x1c, 0xcf, 0x3e, 0xe8, + 0x1a, 0x7a, 0x04, 0xcd, 0xa9, 0x63, 0x8e, 0x5d, 0xcb, 0x7e, 0xab, 0x57, 0xfb, 0xa7, 0x50, 0xcf, + 0xee, 0x81, 0xda, 0xa0, 0xbb, 0x1f, 0x16, 0xa6, 0x77, 0x6e, 0x2f, 0x17, 0xe6, 0xd4, 0x7a, 0x63, + 0x99, 0x33, 0xbd, 0x82, 0x4e, 0x00, 0x16, 0xce, 0x7c, 0x76, 0x3e, 0x75, 0xad, 0xb9, 0xad, 0x6b, + 0xe8, 0x31, 0xb4, 0x66, 0xe6, 0x85, 0x79, 0x36, 0x5f, 0xbc, 0x37, 0x6d, 0x57, 0xaf, 0xf6, 0x7f, + 0xaf, 0xc2, 0x47, 0x53, 0xba, 0x15, 0x92, 0x24, 0x7b, 0x17, 0xd7, 0x85, 0x26, 0xe5, 0xbe, 0xfa, + 0x26, 0x8a, 0x91, 0xff, 0x8e, 0xd1, 0xe9, 0xfd, 0x8d, 0xfd, 0x87, 0xe5, 0x85, 0xc2, 0xfd, 0x85, + 0x7d, 0x01, 0x2d, 0x41, 0x92, 0x1d, 0xf1, 0x18, 0x0f, 0x88, 0x50, 0x7b, 0x6b, 0x38, 0xa0, 0x52, + 0x76, 0x96, 0x41, 0x97, 0xd0, 0x0e, 0xc8, 0x35, 0xde, 0x52, 0xe9, 0x09, 0xc9, 0x13, 0x1c, 0x12, + 0x4f, 0x6d, 0xb8, 0xa1, 0xe4, 0x9e, 0x1f, 0x96, 0x5b, 0xe6, 0x68, 0xb5, 0x5f, 0x54, 0xb4, 0xb8, + 0x93, 0xeb, 0xff, 0xf0, 0xbf, 0xac, 0xce, 0x22, 0xc7, 0x5c, 0x5a, 0x57, 0x59, 0x54, 0xcb, 0xa2, + 0x99, 0xb5, 0x1c, 0x4f, 0xce, 0xcc, 0x99, 0x5e, 0xef, 0xff, 0x59, 0x03, 0x18, 0xc7, 0xf1, 0x22, + 0xe1, 0xd7, 0x11, 0xdd, 0xff, 0x0f, 0x20, 0xa8, 0x13, 0x89, 0xc3, 0xc2, 0x46, 0x75, 0x46, 0x3d, + 0x68, 0x05, 0x44, 0xf8, 0x49, 0x14, 0x2b, 0x87, 0x6b, 0xc5, 0x6f, 0x71, 0x9b, 0x42, 0x3f, 0x6b, + 0xf0, 0x6c, 0xb3, 0xa5, 0x32, 0xf2, 0xfc, 0xdc, 0x44, 0x2f, 0xe1, 0x5b, 0x19, 0xb1, 0xd0, 0xdb, + 0x0a, 0xe2, 0x61, 0x96, 0x2a, 0x3b, 0x5a, 0xa3, 0xd7, 0x87, 0xed, 0xb8, 0x9d, 0xcb, 0x78, 0x9f, + 0x75, 0x2a, 0xb6, 0xe1, 0xe4, 0x7d, 0xce, 0x05, 0x19, 0xb3, 0xf4, 0x5d, 0xc5, 0x79, 0xb2, 0x39, + 0x54, 0x44, 0x3f, 0xc1, 0xa7, 0x22, 0x62, 0x21, 0x25, 0xff, 0x1c, 0xa2, 0x73, 0xa4, 0xc4, 0x5f, + 0x3e, 0x48, 0x7c, 0xa9, 0x5a, 0xdc, 0x17, 0x78, 0x57, 0x71, 0xda, 0x62, 0x4f, 0xbe, 0xfb, 0x19, + 0x3c, 0x39, 0x38, 0x6c, 0x57, 0x40, 0x7b, 0x5f, 0x33, 0xf4, 0x0c, 0xa0, 0x1c, 0x30, 0x0a, 0x0a, + 0xf3, 0x8f, 0x8b, 0x8c, 0x15, 0xa0, 0xef, 0xa0, 0x8b, 0x29, 0xe5, 0x37, 0x9e, 0x4c, 0x30, 0x13, + 0xd8, 0xcf, 0x0c, 0xc6, 0xd4, 0xbb, 0x49, 0x22, 0x49, 0x84, 0xda, 0x4b, 0xd3, 0xe9, 0x28, 0x84, + 0x7b, 0x17, 0x70, 0xa9, 0xea, 0x13, 0x1d, 0x4e, 0x4a, 0xeb, 0x63, 0x4e, 0x23, 0x3f, 0x9d, 0xfc, + 0xaa, 0xc1, 0x53, 0x9f, 0x6f, 0x0e, 0x5e, 0x7e, 0xf2, 0x71, 0xf9, 0x58, 0x2c, 0xb2, 0x67, 0x72, + 0xa1, 0x5d, 0x9d, 0x16, 0xd0, 0x90, 0x53, 0xcc, 0x42, 0x83, 0x27, 0xe1, 0x30, 0x24, 0x4c, 0x3d, + 0xa2, 0xc3, 0xbc, 0x84, 0xe3, 0x48, 0xfc, 0xfb, 0xb9, 0x7d, 0xa5, 0x0e, 0xbf, 0x54, 0x3f, 0x7f, + 0x9b, 0xf3, 0xa7, 0x94, 0x6f, 0x03, 0x63, 0x52, 0x0a, 0x8e, 0x95, 0xe0, 0xc5, 0xe8, 0x8f, 0x12, + 0xb0, 0x52, 0x80, 0x55, 0x09, 0x58, 0x29, 0xc0, 0xea, 0x62, 0xb4, 0x3e, 0x52, 0x5a, 0x5f, 0xfd, + 0x15, 0x00, 0x00, 0xff, 0xff, 0xd9, 0x04, 0x3d, 0xfc, 0x3a, 0x06, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/bigtable/admin/v2/table.pb.go b/vendor/google.golang.org/genproto/googleapis/bigtable/admin/v2/table.pb.go new file mode 100644 index 0000000..6dd38e1 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/bigtable/admin/v2/table.pb.go @@ -0,0 +1,820 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/bigtable/admin/v2/table.proto + +package admin // import "google.golang.org/genproto/googleapis/bigtable/admin/v2" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import duration "github.com/golang/protobuf/ptypes/duration" +import timestamp "github.com/golang/protobuf/ptypes/timestamp" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Possible timestamp granularities to use when keeping multiple versions +// of data in a table. +type Table_TimestampGranularity int32 + +const ( + // The user did not specify a granularity. Should not be returned. + // When specified during table creation, MILLIS will be used. + Table_TIMESTAMP_GRANULARITY_UNSPECIFIED Table_TimestampGranularity = 0 + // The table keeps data versioned at a granularity of 1ms. + Table_MILLIS Table_TimestampGranularity = 1 +) + +var Table_TimestampGranularity_name = map[int32]string{ + 0: "TIMESTAMP_GRANULARITY_UNSPECIFIED", + 1: "MILLIS", +} +var Table_TimestampGranularity_value = map[string]int32{ + "TIMESTAMP_GRANULARITY_UNSPECIFIED": 0, + "MILLIS": 1, +} + +func (x Table_TimestampGranularity) String() string { + return proto.EnumName(Table_TimestampGranularity_name, int32(x)) +} +func (Table_TimestampGranularity) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_table_e7caf794c322cd95, []int{0, 0} +} + +// Defines a view over a table's fields. +type Table_View int32 + +const ( + // Uses the default view for each method as documented in its request. + Table_VIEW_UNSPECIFIED Table_View = 0 + // Only populates `name`. + Table_NAME_ONLY Table_View = 1 + // Only populates `name` and fields related to the table's schema. + Table_SCHEMA_VIEW Table_View = 2 + // Only populates `name` and fields related to the table's + // replication state. + Table_REPLICATION_VIEW Table_View = 3 + // Populates all fields. + Table_FULL Table_View = 4 +) + +var Table_View_name = map[int32]string{ + 0: "VIEW_UNSPECIFIED", + 1: "NAME_ONLY", + 2: "SCHEMA_VIEW", + 3: "REPLICATION_VIEW", + 4: "FULL", +} +var Table_View_value = map[string]int32{ + "VIEW_UNSPECIFIED": 0, + "NAME_ONLY": 1, + "SCHEMA_VIEW": 2, + "REPLICATION_VIEW": 3, + "FULL": 4, +} + +func (x Table_View) String() string { + return proto.EnumName(Table_View_name, int32(x)) +} +func (Table_View) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_table_e7caf794c322cd95, []int{0, 1} +} + +// Table replication states. +type Table_ClusterState_ReplicationState int32 + +const ( + // The replication state of the table is unknown in this cluster. + Table_ClusterState_STATE_NOT_KNOWN Table_ClusterState_ReplicationState = 0 + // The cluster was recently created, and the table must finish copying + // over pre-existing data from other clusters before it can begin + // receiving live replication updates and serving Data API requests. + Table_ClusterState_INITIALIZING Table_ClusterState_ReplicationState = 1 + // The table is temporarily unable to serve Data API requests from this + // cluster due to planned internal maintenance. + Table_ClusterState_PLANNED_MAINTENANCE Table_ClusterState_ReplicationState = 2 + // The table is temporarily unable to serve Data API requests from this + // cluster due to unplanned or emergency maintenance. + Table_ClusterState_UNPLANNED_MAINTENANCE Table_ClusterState_ReplicationState = 3 + // The table can serve Data API requests from this cluster. Depending on + // replication delay, reads may not immediately reflect the state of the + // table in other clusters. + Table_ClusterState_READY Table_ClusterState_ReplicationState = 4 +) + +var Table_ClusterState_ReplicationState_name = map[int32]string{ + 0: "STATE_NOT_KNOWN", + 1: "INITIALIZING", + 2: "PLANNED_MAINTENANCE", + 3: "UNPLANNED_MAINTENANCE", + 4: "READY", +} +var Table_ClusterState_ReplicationState_value = map[string]int32{ + "STATE_NOT_KNOWN": 0, + "INITIALIZING": 1, + "PLANNED_MAINTENANCE": 2, + "UNPLANNED_MAINTENANCE": 3, + "READY": 4, +} + +func (x Table_ClusterState_ReplicationState) String() string { + return proto.EnumName(Table_ClusterState_ReplicationState_name, int32(x)) +} +func (Table_ClusterState_ReplicationState) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_table_e7caf794c322cd95, []int{0, 0, 0} +} + +// Possible states of a snapshot. +type Snapshot_State int32 + +const ( + // The state of the snapshot could not be determined. + Snapshot_STATE_NOT_KNOWN Snapshot_State = 0 + // The snapshot has been successfully created and can serve all requests. + Snapshot_READY Snapshot_State = 1 + // The snapshot is currently being created, and may be destroyed if the + // creation process encounters an error. A snapshot may not be restored to a + // table while it is being created. + Snapshot_CREATING Snapshot_State = 2 +) + +var Snapshot_State_name = map[int32]string{ + 0: "STATE_NOT_KNOWN", + 1: "READY", + 2: "CREATING", +} +var Snapshot_State_value = map[string]int32{ + "STATE_NOT_KNOWN": 0, + "READY": 1, + "CREATING": 2, +} + +func (x Snapshot_State) String() string { + return proto.EnumName(Snapshot_State_name, int32(x)) +} +func (Snapshot_State) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_table_e7caf794c322cd95, []int{3, 0} +} + +// A collection of user data indexed by row, column, and timestamp. +// Each table is served using the resources of its parent cluster. +type Table struct { + // (`OutputOnly`) + // The unique name of the table. Values are of the form + // `projects//instances//tables/[_a-zA-Z0-9][-_.a-zA-Z0-9]*`. + // Views: `NAME_ONLY`, `SCHEMA_VIEW`, `REPLICATION_VIEW`, `FULL` + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // (`OutputOnly`) + // Map from cluster ID to per-cluster table state. + // If it could not be determined whether or not the table has data in a + // particular cluster (for example, if its zone is unavailable), then + // there will be an entry for the cluster with UNKNOWN `replication_status`. + // Views: `REPLICATION_VIEW`, `FULL` + ClusterStates map[string]*Table_ClusterState `protobuf:"bytes,2,rep,name=cluster_states,json=clusterStates,proto3" json:"cluster_states,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` + // (`CreationOnly`) + // The column families configured for this table, mapped by column family ID. + // Views: `SCHEMA_VIEW`, `FULL` + ColumnFamilies map[string]*ColumnFamily `protobuf:"bytes,3,rep,name=column_families,json=columnFamilies,proto3" json:"column_families,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` + // (`CreationOnly`) + // The granularity (i.e. `MILLIS`) at which timestamps are stored in + // this table. Timestamps not matching the granularity will be rejected. + // If unspecified at creation time, the value will be set to `MILLIS`. + // Views: `SCHEMA_VIEW`, `FULL` + Granularity Table_TimestampGranularity `protobuf:"varint,4,opt,name=granularity,proto3,enum=google.bigtable.admin.v2.Table_TimestampGranularity" json:"granularity,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Table) Reset() { *m = Table{} } +func (m *Table) String() string { return proto.CompactTextString(m) } +func (*Table) ProtoMessage() {} +func (*Table) Descriptor() ([]byte, []int) { + return fileDescriptor_table_e7caf794c322cd95, []int{0} +} +func (m *Table) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Table.Unmarshal(m, b) +} +func (m *Table) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Table.Marshal(b, m, deterministic) +} +func (dst *Table) XXX_Merge(src proto.Message) { + xxx_messageInfo_Table.Merge(dst, src) +} +func (m *Table) XXX_Size() int { + return xxx_messageInfo_Table.Size(m) +} +func (m *Table) XXX_DiscardUnknown() { + xxx_messageInfo_Table.DiscardUnknown(m) +} + +var xxx_messageInfo_Table proto.InternalMessageInfo + +func (m *Table) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *Table) GetClusterStates() map[string]*Table_ClusterState { + if m != nil { + return m.ClusterStates + } + return nil +} + +func (m *Table) GetColumnFamilies() map[string]*ColumnFamily { + if m != nil { + return m.ColumnFamilies + } + return nil +} + +func (m *Table) GetGranularity() Table_TimestampGranularity { + if m != nil { + return m.Granularity + } + return Table_TIMESTAMP_GRANULARITY_UNSPECIFIED +} + +// The state of a table's data in a particular cluster. +type Table_ClusterState struct { + // (`OutputOnly`) + // The state of replication for the table in this cluster. + ReplicationState Table_ClusterState_ReplicationState `protobuf:"varint,1,opt,name=replication_state,json=replicationState,proto3,enum=google.bigtable.admin.v2.Table_ClusterState_ReplicationState" json:"replication_state,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Table_ClusterState) Reset() { *m = Table_ClusterState{} } +func (m *Table_ClusterState) String() string { return proto.CompactTextString(m) } +func (*Table_ClusterState) ProtoMessage() {} +func (*Table_ClusterState) Descriptor() ([]byte, []int) { + return fileDescriptor_table_e7caf794c322cd95, []int{0, 0} +} +func (m *Table_ClusterState) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Table_ClusterState.Unmarshal(m, b) +} +func (m *Table_ClusterState) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Table_ClusterState.Marshal(b, m, deterministic) +} +func (dst *Table_ClusterState) XXX_Merge(src proto.Message) { + xxx_messageInfo_Table_ClusterState.Merge(dst, src) +} +func (m *Table_ClusterState) XXX_Size() int { + return xxx_messageInfo_Table_ClusterState.Size(m) +} +func (m *Table_ClusterState) XXX_DiscardUnknown() { + xxx_messageInfo_Table_ClusterState.DiscardUnknown(m) +} + +var xxx_messageInfo_Table_ClusterState proto.InternalMessageInfo + +func (m *Table_ClusterState) GetReplicationState() Table_ClusterState_ReplicationState { + if m != nil { + return m.ReplicationState + } + return Table_ClusterState_STATE_NOT_KNOWN +} + +// A set of columns within a table which share a common configuration. +type ColumnFamily struct { + // Garbage collection rule specified as a protobuf. + // Must serialize to at most 500 bytes. + // + // NOTE: Garbage collection executes opportunistically in the background, and + // so it's possible for reads to return a cell even if it matches the active + // GC expression for its family. + GcRule *GcRule `protobuf:"bytes,1,opt,name=gc_rule,json=gcRule,proto3" json:"gc_rule,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ColumnFamily) Reset() { *m = ColumnFamily{} } +func (m *ColumnFamily) String() string { return proto.CompactTextString(m) } +func (*ColumnFamily) ProtoMessage() {} +func (*ColumnFamily) Descriptor() ([]byte, []int) { + return fileDescriptor_table_e7caf794c322cd95, []int{1} +} +func (m *ColumnFamily) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ColumnFamily.Unmarshal(m, b) +} +func (m *ColumnFamily) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ColumnFamily.Marshal(b, m, deterministic) +} +func (dst *ColumnFamily) XXX_Merge(src proto.Message) { + xxx_messageInfo_ColumnFamily.Merge(dst, src) +} +func (m *ColumnFamily) XXX_Size() int { + return xxx_messageInfo_ColumnFamily.Size(m) +} +func (m *ColumnFamily) XXX_DiscardUnknown() { + xxx_messageInfo_ColumnFamily.DiscardUnknown(m) +} + +var xxx_messageInfo_ColumnFamily proto.InternalMessageInfo + +func (m *ColumnFamily) GetGcRule() *GcRule { + if m != nil { + return m.GcRule + } + return nil +} + +// Rule for determining which cells to delete during garbage collection. +type GcRule struct { + // Garbage collection rules. + // + // Types that are valid to be assigned to Rule: + // *GcRule_MaxNumVersions + // *GcRule_MaxAge + // *GcRule_Intersection_ + // *GcRule_Union_ + Rule isGcRule_Rule `protobuf_oneof:"rule"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GcRule) Reset() { *m = GcRule{} } +func (m *GcRule) String() string { return proto.CompactTextString(m) } +func (*GcRule) ProtoMessage() {} +func (*GcRule) Descriptor() ([]byte, []int) { + return fileDescriptor_table_e7caf794c322cd95, []int{2} +} +func (m *GcRule) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GcRule.Unmarshal(m, b) +} +func (m *GcRule) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GcRule.Marshal(b, m, deterministic) +} +func (dst *GcRule) XXX_Merge(src proto.Message) { + xxx_messageInfo_GcRule.Merge(dst, src) +} +func (m *GcRule) XXX_Size() int { + return xxx_messageInfo_GcRule.Size(m) +} +func (m *GcRule) XXX_DiscardUnknown() { + xxx_messageInfo_GcRule.DiscardUnknown(m) +} + +var xxx_messageInfo_GcRule proto.InternalMessageInfo + +type isGcRule_Rule interface { + isGcRule_Rule() +} + +type GcRule_MaxNumVersions struct { + MaxNumVersions int32 `protobuf:"varint,1,opt,name=max_num_versions,json=maxNumVersions,proto3,oneof"` +} + +type GcRule_MaxAge struct { + MaxAge *duration.Duration `protobuf:"bytes,2,opt,name=max_age,json=maxAge,proto3,oneof"` +} + +type GcRule_Intersection_ struct { + Intersection *GcRule_Intersection `protobuf:"bytes,3,opt,name=intersection,proto3,oneof"` +} + +type GcRule_Union_ struct { + Union *GcRule_Union `protobuf:"bytes,4,opt,name=union,proto3,oneof"` +} + +func (*GcRule_MaxNumVersions) isGcRule_Rule() {} + +func (*GcRule_MaxAge) isGcRule_Rule() {} + +func (*GcRule_Intersection_) isGcRule_Rule() {} + +func (*GcRule_Union_) isGcRule_Rule() {} + +func (m *GcRule) GetRule() isGcRule_Rule { + if m != nil { + return m.Rule + } + return nil +} + +func (m *GcRule) GetMaxNumVersions() int32 { + if x, ok := m.GetRule().(*GcRule_MaxNumVersions); ok { + return x.MaxNumVersions + } + return 0 +} + +func (m *GcRule) GetMaxAge() *duration.Duration { + if x, ok := m.GetRule().(*GcRule_MaxAge); ok { + return x.MaxAge + } + return nil +} + +func (m *GcRule) GetIntersection() *GcRule_Intersection { + if x, ok := m.GetRule().(*GcRule_Intersection_); ok { + return x.Intersection + } + return nil +} + +func (m *GcRule) GetUnion() *GcRule_Union { + if x, ok := m.GetRule().(*GcRule_Union_); ok { + return x.Union + } + return nil +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*GcRule) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _GcRule_OneofMarshaler, _GcRule_OneofUnmarshaler, _GcRule_OneofSizer, []interface{}{ + (*GcRule_MaxNumVersions)(nil), + (*GcRule_MaxAge)(nil), + (*GcRule_Intersection_)(nil), + (*GcRule_Union_)(nil), + } +} + +func _GcRule_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*GcRule) + // rule + switch x := m.Rule.(type) { + case *GcRule_MaxNumVersions: + b.EncodeVarint(1<<3 | proto.WireVarint) + b.EncodeVarint(uint64(x.MaxNumVersions)) + case *GcRule_MaxAge: + b.EncodeVarint(2<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.MaxAge); err != nil { + return err + } + case *GcRule_Intersection_: + b.EncodeVarint(3<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.Intersection); err != nil { + return err + } + case *GcRule_Union_: + b.EncodeVarint(4<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.Union); err != nil { + return err + } + case nil: + default: + return fmt.Errorf("GcRule.Rule has unexpected type %T", x) + } + return nil +} + +func _GcRule_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*GcRule) + switch tag { + case 1: // rule.max_num_versions + if wire != proto.WireVarint { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeVarint() + m.Rule = &GcRule_MaxNumVersions{int32(x)} + return true, err + case 2: // rule.max_age + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(duration.Duration) + err := b.DecodeMessage(msg) + m.Rule = &GcRule_MaxAge{msg} + return true, err + case 3: // rule.intersection + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(GcRule_Intersection) + err := b.DecodeMessage(msg) + m.Rule = &GcRule_Intersection_{msg} + return true, err + case 4: // rule.union + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(GcRule_Union) + err := b.DecodeMessage(msg) + m.Rule = &GcRule_Union_{msg} + return true, err + default: + return false, nil + } +} + +func _GcRule_OneofSizer(msg proto.Message) (n int) { + m := msg.(*GcRule) + // rule + switch x := m.Rule.(type) { + case *GcRule_MaxNumVersions: + n += 1 // tag and wire + n += proto.SizeVarint(uint64(x.MaxNumVersions)) + case *GcRule_MaxAge: + s := proto.Size(x.MaxAge) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *GcRule_Intersection_: + s := proto.Size(x.Intersection) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *GcRule_Union_: + s := proto.Size(x.Union) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +// A GcRule which deletes cells matching all of the given rules. +type GcRule_Intersection struct { + // Only delete cells which would be deleted by every element of `rules`. + Rules []*GcRule `protobuf:"bytes,1,rep,name=rules,proto3" json:"rules,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GcRule_Intersection) Reset() { *m = GcRule_Intersection{} } +func (m *GcRule_Intersection) String() string { return proto.CompactTextString(m) } +func (*GcRule_Intersection) ProtoMessage() {} +func (*GcRule_Intersection) Descriptor() ([]byte, []int) { + return fileDescriptor_table_e7caf794c322cd95, []int{2, 0} +} +func (m *GcRule_Intersection) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GcRule_Intersection.Unmarshal(m, b) +} +func (m *GcRule_Intersection) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GcRule_Intersection.Marshal(b, m, deterministic) +} +func (dst *GcRule_Intersection) XXX_Merge(src proto.Message) { + xxx_messageInfo_GcRule_Intersection.Merge(dst, src) +} +func (m *GcRule_Intersection) XXX_Size() int { + return xxx_messageInfo_GcRule_Intersection.Size(m) +} +func (m *GcRule_Intersection) XXX_DiscardUnknown() { + xxx_messageInfo_GcRule_Intersection.DiscardUnknown(m) +} + +var xxx_messageInfo_GcRule_Intersection proto.InternalMessageInfo + +func (m *GcRule_Intersection) GetRules() []*GcRule { + if m != nil { + return m.Rules + } + return nil +} + +// A GcRule which deletes cells matching any of the given rules. +type GcRule_Union struct { + // Delete cells which would be deleted by any element of `rules`. + Rules []*GcRule `protobuf:"bytes,1,rep,name=rules,proto3" json:"rules,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GcRule_Union) Reset() { *m = GcRule_Union{} } +func (m *GcRule_Union) String() string { return proto.CompactTextString(m) } +func (*GcRule_Union) ProtoMessage() {} +func (*GcRule_Union) Descriptor() ([]byte, []int) { + return fileDescriptor_table_e7caf794c322cd95, []int{2, 1} +} +func (m *GcRule_Union) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GcRule_Union.Unmarshal(m, b) +} +func (m *GcRule_Union) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GcRule_Union.Marshal(b, m, deterministic) +} +func (dst *GcRule_Union) XXX_Merge(src proto.Message) { + xxx_messageInfo_GcRule_Union.Merge(dst, src) +} +func (m *GcRule_Union) XXX_Size() int { + return xxx_messageInfo_GcRule_Union.Size(m) +} +func (m *GcRule_Union) XXX_DiscardUnknown() { + xxx_messageInfo_GcRule_Union.DiscardUnknown(m) +} + +var xxx_messageInfo_GcRule_Union proto.InternalMessageInfo + +func (m *GcRule_Union) GetRules() []*GcRule { + if m != nil { + return m.Rules + } + return nil +} + +// A snapshot of a table at a particular time. A snapshot can be used as a +// checkpoint for data restoration or a data source for a new table. +// +// Note: This is a private alpha release of Cloud Bigtable snapshots. This +// feature is not currently available to most Cloud Bigtable customers. This +// feature might be changed in backward-incompatible ways and is not recommended +// for production use. It is not subject to any SLA or deprecation policy. +type Snapshot struct { + // (`OutputOnly`) + // The unique name of the snapshot. + // Values are of the form + // `projects//instances//clusters//snapshots/`. + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // (`OutputOnly`) + // The source table at the time the snapshot was taken. + SourceTable *Table `protobuf:"bytes,2,opt,name=source_table,json=sourceTable,proto3" json:"source_table,omitempty"` + // (`OutputOnly`) + // The size of the data in the source table at the time the snapshot was + // taken. In some cases, this value may be computed asynchronously via a + // background process and a placeholder of 0 will be used in the meantime. + DataSizeBytes int64 `protobuf:"varint,3,opt,name=data_size_bytes,json=dataSizeBytes,proto3" json:"data_size_bytes,omitempty"` + // (`OutputOnly`) + // The time when the snapshot is created. + CreateTime *timestamp.Timestamp `protobuf:"bytes,4,opt,name=create_time,json=createTime,proto3" json:"create_time,omitempty"` + // (`OutputOnly`) + // The time when the snapshot will be deleted. The maximum amount of time a + // snapshot can stay active is 365 days. If 'ttl' is not specified, + // the default maximum of 365 days will be used. + DeleteTime *timestamp.Timestamp `protobuf:"bytes,5,opt,name=delete_time,json=deleteTime,proto3" json:"delete_time,omitempty"` + // (`OutputOnly`) + // The current state of the snapshot. + State Snapshot_State `protobuf:"varint,6,opt,name=state,proto3,enum=google.bigtable.admin.v2.Snapshot_State" json:"state,omitempty"` + // (`OutputOnly`) + // Description of the snapshot. + Description string `protobuf:"bytes,7,opt,name=description,proto3" json:"description,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Snapshot) Reset() { *m = Snapshot{} } +func (m *Snapshot) String() string { return proto.CompactTextString(m) } +func (*Snapshot) ProtoMessage() {} +func (*Snapshot) Descriptor() ([]byte, []int) { + return fileDescriptor_table_e7caf794c322cd95, []int{3} +} +func (m *Snapshot) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Snapshot.Unmarshal(m, b) +} +func (m *Snapshot) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Snapshot.Marshal(b, m, deterministic) +} +func (dst *Snapshot) XXX_Merge(src proto.Message) { + xxx_messageInfo_Snapshot.Merge(dst, src) +} +func (m *Snapshot) XXX_Size() int { + return xxx_messageInfo_Snapshot.Size(m) +} +func (m *Snapshot) XXX_DiscardUnknown() { + xxx_messageInfo_Snapshot.DiscardUnknown(m) +} + +var xxx_messageInfo_Snapshot proto.InternalMessageInfo + +func (m *Snapshot) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *Snapshot) GetSourceTable() *Table { + if m != nil { + return m.SourceTable + } + return nil +} + +func (m *Snapshot) GetDataSizeBytes() int64 { + if m != nil { + return m.DataSizeBytes + } + return 0 +} + +func (m *Snapshot) GetCreateTime() *timestamp.Timestamp { + if m != nil { + return m.CreateTime + } + return nil +} + +func (m *Snapshot) GetDeleteTime() *timestamp.Timestamp { + if m != nil { + return m.DeleteTime + } + return nil +} + +func (m *Snapshot) GetState() Snapshot_State { + if m != nil { + return m.State + } + return Snapshot_STATE_NOT_KNOWN +} + +func (m *Snapshot) GetDescription() string { + if m != nil { + return m.Description + } + return "" +} + +func init() { + proto.RegisterType((*Table)(nil), "google.bigtable.admin.v2.Table") + proto.RegisterMapType((map[string]*Table_ClusterState)(nil), "google.bigtable.admin.v2.Table.ClusterStatesEntry") + proto.RegisterMapType((map[string]*ColumnFamily)(nil), "google.bigtable.admin.v2.Table.ColumnFamiliesEntry") + proto.RegisterType((*Table_ClusterState)(nil), "google.bigtable.admin.v2.Table.ClusterState") + proto.RegisterType((*ColumnFamily)(nil), "google.bigtable.admin.v2.ColumnFamily") + proto.RegisterType((*GcRule)(nil), "google.bigtable.admin.v2.GcRule") + proto.RegisterType((*GcRule_Intersection)(nil), "google.bigtable.admin.v2.GcRule.Intersection") + proto.RegisterType((*GcRule_Union)(nil), "google.bigtable.admin.v2.GcRule.Union") + proto.RegisterType((*Snapshot)(nil), "google.bigtable.admin.v2.Snapshot") + proto.RegisterEnum("google.bigtable.admin.v2.Table_TimestampGranularity", Table_TimestampGranularity_name, Table_TimestampGranularity_value) + proto.RegisterEnum("google.bigtable.admin.v2.Table_View", Table_View_name, Table_View_value) + proto.RegisterEnum("google.bigtable.admin.v2.Table_ClusterState_ReplicationState", Table_ClusterState_ReplicationState_name, Table_ClusterState_ReplicationState_value) + proto.RegisterEnum("google.bigtable.admin.v2.Snapshot_State", Snapshot_State_name, Snapshot_State_value) +} + +func init() { + proto.RegisterFile("google/bigtable/admin/v2/table.proto", fileDescriptor_table_e7caf794c322cd95) +} + +var fileDescriptor_table_e7caf794c322cd95 = []byte{ + // 965 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x9c, 0x56, 0xff, 0x6e, 0xdb, 0x54, + 0x18, 0xad, 0xe3, 0x38, 0x6d, 0xbf, 0xa4, 0xad, 0xb9, 0x1d, 0x22, 0x8b, 0xa6, 0x2d, 0x44, 0x30, + 0x45, 0x08, 0x1c, 0xc9, 0x1b, 0x08, 0x18, 0x1b, 0x72, 0x52, 0xb7, 0xb5, 0x48, 0xdc, 0xc8, 0x71, + 0x32, 0x75, 0x8a, 0x64, 0xdd, 0x38, 0x77, 0xc6, 0xe0, 0x1f, 0x91, 0x7f, 0x94, 0x66, 0x4f, 0xc1, + 0x0b, 0xf0, 0x37, 0x12, 0x12, 0x2f, 0xc2, 0xf3, 0xf0, 0x00, 0xc8, 0xf7, 0xda, 0x5b, 0xda, 0x26, + 0x84, 0xf1, 0x57, 0xaf, 0xbf, 0x7b, 0xce, 0xf9, 0xfc, 0x9d, 0x7b, 0x7d, 0x1a, 0xf8, 0xc4, 0x09, + 0x43, 0xc7, 0x23, 0x9d, 0x99, 0xeb, 0x24, 0x78, 0xe6, 0x91, 0x0e, 0x9e, 0xfb, 0x6e, 0xd0, 0xb9, + 0x92, 0x3b, 0xf4, 0x51, 0x5a, 0x44, 0x61, 0x12, 0xa2, 0x3a, 0x43, 0x49, 0x05, 0x4a, 0xa2, 0x28, + 0xe9, 0x4a, 0x6e, 0x3c, 0xc8, 0xf9, 0x78, 0xe1, 0x76, 0x70, 0x10, 0x84, 0x09, 0x4e, 0xdc, 0x30, + 0x88, 0x19, 0xaf, 0xf1, 0x30, 0xdf, 0xa5, 0x4f, 0xb3, 0xf4, 0x75, 0x67, 0x9e, 0x46, 0x14, 0x90, + 0xef, 0x3f, 0xba, 0xbd, 0x9f, 0xb8, 0x3e, 0x89, 0x13, 0xec, 0x2f, 0x18, 0xa0, 0xf5, 0xfb, 0x2e, + 0x08, 0x66, 0xd6, 0x11, 0x21, 0x28, 0x07, 0xd8, 0x27, 0x75, 0xae, 0xc9, 0xb5, 0xf7, 0x0d, 0xba, + 0x46, 0x97, 0x70, 0x68, 0x7b, 0x69, 0x9c, 0x90, 0xc8, 0x8a, 0x13, 0x9c, 0x90, 0xb8, 0x5e, 0x6a, + 0xf2, 0xed, 0xaa, 0x2c, 0x4b, 0x9b, 0xde, 0x57, 0xa2, 0x62, 0x52, 0x8f, 0xb1, 0x46, 0x94, 0xa4, + 0x06, 0x49, 0xb4, 0x34, 0x0e, 0xec, 0xd5, 0x1a, 0x9a, 0xc2, 0x91, 0x1d, 0x7a, 0xa9, 0x1f, 0x58, + 0xaf, 0xb1, 0xef, 0x7a, 0x2e, 0x89, 0xeb, 0x3c, 0xd5, 0x7e, 0xb2, 0x55, 0x9b, 0xd2, 0x4e, 0x73, + 0x16, 0x13, 0x3f, 0xb4, 0x6f, 0x14, 0xd1, 0x04, 0xaa, 0x4e, 0x84, 0x83, 0xd4, 0xc3, 0x91, 0x9b, + 0x2c, 0xeb, 0xe5, 0x26, 0xd7, 0x3e, 0x94, 0x9f, 0x6e, 0x53, 0x36, 0x0b, 0x73, 0xce, 0xde, 0x71, + 0x8d, 0x55, 0xa1, 0xc6, 0xdf, 0x1c, 0xd4, 0x56, 0x67, 0x43, 0x3f, 0xc1, 0x07, 0x11, 0x59, 0x78, + 0xae, 0x4d, 0x5d, 0x67, 0x2e, 0x51, 0x0b, 0x0f, 0xe5, 0xe7, 0xef, 0x63, 0x92, 0x64, 0xbc, 0x53, + 0xa1, 0x05, 0x43, 0x8c, 0x6e, 0x55, 0x5a, 0xd7, 0x20, 0xde, 0x46, 0xa1, 0x63, 0x38, 0x1a, 0x99, + 0x8a, 0xa9, 0x5a, 0xfa, 0x85, 0x69, 0xfd, 0xa0, 0x5f, 0xbc, 0xd4, 0xc5, 0x1d, 0x24, 0x42, 0x4d, + 0xd3, 0x35, 0x53, 0x53, 0xfa, 0xda, 0x2b, 0x4d, 0x3f, 0x13, 0x39, 0xf4, 0x11, 0x1c, 0x0f, 0xfb, + 0x8a, 0xae, 0xab, 0x27, 0xd6, 0x40, 0xd1, 0x74, 0x53, 0xd5, 0x15, 0xbd, 0xa7, 0x8a, 0x25, 0x74, + 0x1f, 0x3e, 0x1c, 0xeb, 0xeb, 0xb6, 0x78, 0xb4, 0x0f, 0x82, 0xa1, 0x2a, 0x27, 0x97, 0x62, 0xb9, + 0x11, 0x00, 0xba, 0x7b, 0xa2, 0x48, 0x04, 0xfe, 0x67, 0xb2, 0xcc, 0x2f, 0x4c, 0xb6, 0x44, 0x5d, + 0x10, 0xae, 0xb0, 0x97, 0x92, 0x7a, 0xa9, 0xc9, 0xb5, 0xab, 0xf2, 0xe7, 0xef, 0xe3, 0x80, 0xc1, + 0xa8, 0xdf, 0x96, 0xbe, 0xe6, 0x1a, 0x2e, 0x1c, 0xaf, 0x39, 0xe5, 0x35, 0x0d, 0xbf, 0xbb, 0xd9, + 0xf0, 0xf1, 0xe6, 0x86, 0x2b, 0x7a, 0xcb, 0x95, 0x56, 0x2d, 0x0d, 0xee, 0xad, 0x3b, 0x76, 0xf4, + 0x29, 0x7c, 0x6c, 0x6a, 0x03, 0x75, 0x64, 0x2a, 0x83, 0xa1, 0x75, 0x66, 0x28, 0xfa, 0xb8, 0xaf, + 0x18, 0x9a, 0x79, 0x69, 0x8d, 0xf5, 0xd1, 0x50, 0xed, 0x69, 0xa7, 0x9a, 0x7a, 0x22, 0xee, 0x20, + 0x80, 0xca, 0x40, 0xeb, 0xf7, 0xb5, 0x91, 0xc8, 0xb5, 0xa6, 0x50, 0x9e, 0xb8, 0xe4, 0x17, 0x74, + 0x0f, 0xc4, 0x89, 0xa6, 0xbe, 0xbc, 0x85, 0x3c, 0x80, 0x7d, 0x5d, 0x19, 0xa8, 0xd6, 0x85, 0xde, + 0xbf, 0x14, 0x39, 0x74, 0x04, 0xd5, 0x51, 0xef, 0x5c, 0x1d, 0x28, 0x56, 0x86, 0x15, 0x4b, 0x19, + 0xcb, 0x50, 0x87, 0x7d, 0xad, 0xa7, 0x98, 0xda, 0x85, 0xce, 0xaa, 0x3c, 0xda, 0x83, 0xf2, 0xe9, + 0xb8, 0xdf, 0x17, 0xcb, 0x2d, 0x0d, 0x6a, 0xab, 0x33, 0xa0, 0x6f, 0x60, 0xd7, 0xb1, 0xad, 0x28, + 0xf5, 0xd8, 0x7d, 0xab, 0xca, 0xcd, 0xcd, 0xc3, 0x9f, 0xd9, 0x46, 0xea, 0x11, 0xa3, 0xe2, 0xd0, + 0xbf, 0xad, 0x5f, 0x79, 0xa8, 0xb0, 0x12, 0xfa, 0x0c, 0x44, 0x1f, 0x5f, 0x5b, 0x41, 0xea, 0x5b, + 0x57, 0x24, 0x8a, 0xb3, 0x68, 0xa1, 0x72, 0xc2, 0xf9, 0x8e, 0x71, 0xe8, 0xe3, 0x6b, 0x3d, 0xf5, + 0x27, 0x79, 0x1d, 0x3d, 0x85, 0xdd, 0x0c, 0x8b, 0x9d, 0xc2, 0xee, 0xfb, 0x45, 0xc7, 0x22, 0x5e, + 0xa4, 0x93, 0x3c, 0x7e, 0xce, 0x77, 0x8c, 0x8a, 0x8f, 0xaf, 0x15, 0x87, 0xa0, 0x11, 0xd4, 0xdc, + 0x20, 0x21, 0x51, 0x4c, 0xec, 0x6c, 0xa7, 0xce, 0x53, 0xea, 0x17, 0xdb, 0x5e, 0x56, 0xd2, 0x56, + 0x48, 0xe7, 0x3b, 0xc6, 0x0d, 0x11, 0xf4, 0x02, 0x84, 0x34, 0xc8, 0xd4, 0xca, 0xdb, 0xce, 0x3d, + 0x57, 0x1b, 0x07, 0x4c, 0x86, 0xd1, 0x1a, 0xa7, 0x50, 0x5b, 0xd5, 0x47, 0x5f, 0x81, 0x90, 0x39, + 0x99, 0xcd, 0xce, 0xff, 0x27, 0x2b, 0x19, 0xbc, 0xf1, 0x3d, 0x08, 0x54, 0xf9, 0xff, 0x0a, 0x74, + 0x2b, 0x50, 0xce, 0x16, 0xad, 0xdf, 0x78, 0xd8, 0x1b, 0x05, 0x78, 0x11, 0xff, 0x18, 0x26, 0x6b, + 0xa3, 0xb8, 0x0b, 0xb5, 0x38, 0x4c, 0x23, 0x9b, 0x58, 0x54, 0x2f, 0x3f, 0x81, 0x47, 0x5b, 0xbe, + 0x30, 0xa3, 0xca, 0x48, 0x2c, 0xe2, 0x1f, 0xc3, 0xd1, 0x1c, 0x27, 0xd8, 0x8a, 0xdd, 0x37, 0xc4, + 0x9a, 0x2d, 0x13, 0x9a, 0xb9, 0x5c, 0x9b, 0x37, 0x0e, 0xb2, 0xf2, 0xc8, 0x7d, 0x43, 0xba, 0x59, + 0x11, 0x3d, 0x83, 0xaa, 0x1d, 0x11, 0x9c, 0x10, 0x2b, 0xfb, 0x77, 0x91, 0x7b, 0xdc, 0xb8, 0x73, + 0xd8, 0x6f, 0xbf, 0x1b, 0x03, 0x18, 0x3c, 0x2b, 0x64, 0xe4, 0x39, 0xf1, 0x48, 0x41, 0x16, 0xb6, + 0x93, 0x19, 0x9c, 0x92, 0x5f, 0x80, 0xc0, 0x22, 0xb4, 0x42, 0x23, 0xb4, 0xbd, 0x79, 0xbc, 0xc2, + 0x2c, 0x29, 0x0f, 0x0f, 0x4a, 0x43, 0xcd, 0xac, 0x79, 0x6c, 0x47, 0xee, 0x82, 0xde, 0xb5, 0x5d, + 0x6a, 0xe0, 0x6a, 0xa9, 0xf5, 0x25, 0x08, 0xff, 0x92, 0x9c, 0x6f, 0x33, 0x8f, 0x43, 0x35, 0xd8, + 0xeb, 0x19, 0xaa, 0x62, 0x66, 0x01, 0x5a, 0xea, 0xfe, 0xc9, 0xc1, 0x03, 0x3b, 0xf4, 0x37, 0xbe, + 0x4f, 0x17, 0xa8, 0xc5, 0xc3, 0x6c, 0xbc, 0x21, 0xf7, 0xea, 0x79, 0x8e, 0x73, 0x42, 0x0f, 0x07, + 0x8e, 0x14, 0x46, 0x4e, 0xc7, 0x21, 0x01, 0x1d, 0xbe, 0xc3, 0xb6, 0xf0, 0xc2, 0x8d, 0xef, 0xfe, + 0x28, 0x78, 0x46, 0x17, 0x7f, 0x94, 0x1e, 0x9e, 0x31, 0x7e, 0xcf, 0x0b, 0xd3, 0xb9, 0xd4, 0x2d, + 0xba, 0x29, 0xb4, 0xdb, 0x44, 0xfe, 0xab, 0x00, 0x4c, 0x29, 0x60, 0x5a, 0x00, 0xa6, 0x14, 0x30, + 0x9d, 0xc8, 0xb3, 0x0a, 0xed, 0xf5, 0xe4, 0x9f, 0x00, 0x00, 0x00, 0xff, 0xff, 0x56, 0x59, 0xa7, + 0xc1, 0x7f, 0x08, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/bigtable/v1/bigtable_data.pb.go b/vendor/google.golang.org/genproto/googleapis/bigtable/v1/bigtable_data.pb.go new file mode 100644 index 0000000..e9f6b00 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/bigtable/v1/bigtable_data.pb.go @@ -0,0 +1,2428 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/bigtable/v1/bigtable_data.proto + +package bigtable // import "google.golang.org/genproto/googleapis/bigtable/v1" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Specifies the complete (requested) contents of a single row of a table. +// Rows which exceed 256MiB in size cannot be read in full. +type Row struct { + // The unique key which identifies this row within its table. This is the same + // key that's used to identify the row in, for example, a MutateRowRequest. + // May contain any non-empty byte string up to 4KiB in length. + Key []byte `protobuf:"bytes,1,opt,name=key,proto3" json:"key,omitempty"` + // May be empty, but only if the entire row is empty. + // The mutual ordering of column families is not specified. + Families []*Family `protobuf:"bytes,2,rep,name=families,proto3" json:"families,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Row) Reset() { *m = Row{} } +func (m *Row) String() string { return proto.CompactTextString(m) } +func (*Row) ProtoMessage() {} +func (*Row) Descriptor() ([]byte, []int) { + return fileDescriptor_bigtable_data_ec1ed9ad6f99305a, []int{0} +} +func (m *Row) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Row.Unmarshal(m, b) +} +func (m *Row) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Row.Marshal(b, m, deterministic) +} +func (dst *Row) XXX_Merge(src proto.Message) { + xxx_messageInfo_Row.Merge(dst, src) +} +func (m *Row) XXX_Size() int { + return xxx_messageInfo_Row.Size(m) +} +func (m *Row) XXX_DiscardUnknown() { + xxx_messageInfo_Row.DiscardUnknown(m) +} + +var xxx_messageInfo_Row proto.InternalMessageInfo + +func (m *Row) GetKey() []byte { + if m != nil { + return m.Key + } + return nil +} + +func (m *Row) GetFamilies() []*Family { + if m != nil { + return m.Families + } + return nil +} + +// Specifies (some of) the contents of a single row/column family of a table. +type Family struct { + // The unique key which identifies this family within its row. This is the + // same key that's used to identify the family in, for example, a RowFilter + // which sets its "family_name_regex_filter" field. + // Must match [-_.a-zA-Z0-9]+, except that AggregatingRowProcessors may + // produce cells in a sentinel family with an empty name. + // Must be no greater than 64 characters in length. + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // Must not be empty. Sorted in order of increasing "qualifier". + Columns []*Column `protobuf:"bytes,2,rep,name=columns,proto3" json:"columns,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Family) Reset() { *m = Family{} } +func (m *Family) String() string { return proto.CompactTextString(m) } +func (*Family) ProtoMessage() {} +func (*Family) Descriptor() ([]byte, []int) { + return fileDescriptor_bigtable_data_ec1ed9ad6f99305a, []int{1} +} +func (m *Family) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Family.Unmarshal(m, b) +} +func (m *Family) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Family.Marshal(b, m, deterministic) +} +func (dst *Family) XXX_Merge(src proto.Message) { + xxx_messageInfo_Family.Merge(dst, src) +} +func (m *Family) XXX_Size() int { + return xxx_messageInfo_Family.Size(m) +} +func (m *Family) XXX_DiscardUnknown() { + xxx_messageInfo_Family.DiscardUnknown(m) +} + +var xxx_messageInfo_Family proto.InternalMessageInfo + +func (m *Family) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *Family) GetColumns() []*Column { + if m != nil { + return m.Columns + } + return nil +} + +// Specifies (some of) the contents of a single row/column of a table. +type Column struct { + // The unique key which identifies this column within its family. This is the + // same key that's used to identify the column in, for example, a RowFilter + // which sets its "column_qualifier_regex_filter" field. + // May contain any byte string, including the empty string, up to 16kiB in + // length. + Qualifier []byte `protobuf:"bytes,1,opt,name=qualifier,proto3" json:"qualifier,omitempty"` + // Must not be empty. Sorted in order of decreasing "timestamp_micros". + Cells []*Cell `protobuf:"bytes,2,rep,name=cells,proto3" json:"cells,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Column) Reset() { *m = Column{} } +func (m *Column) String() string { return proto.CompactTextString(m) } +func (*Column) ProtoMessage() {} +func (*Column) Descriptor() ([]byte, []int) { + return fileDescriptor_bigtable_data_ec1ed9ad6f99305a, []int{2} +} +func (m *Column) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Column.Unmarshal(m, b) +} +func (m *Column) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Column.Marshal(b, m, deterministic) +} +func (dst *Column) XXX_Merge(src proto.Message) { + xxx_messageInfo_Column.Merge(dst, src) +} +func (m *Column) XXX_Size() int { + return xxx_messageInfo_Column.Size(m) +} +func (m *Column) XXX_DiscardUnknown() { + xxx_messageInfo_Column.DiscardUnknown(m) +} + +var xxx_messageInfo_Column proto.InternalMessageInfo + +func (m *Column) GetQualifier() []byte { + if m != nil { + return m.Qualifier + } + return nil +} + +func (m *Column) GetCells() []*Cell { + if m != nil { + return m.Cells + } + return nil +} + +// Specifies (some of) the contents of a single row/column/timestamp of a table. +type Cell struct { + // The cell's stored timestamp, which also uniquely identifies it within + // its column. + // Values are always expressed in microseconds, but individual tables may set + // a coarser "granularity" to further restrict the allowed values. For + // example, a table which specifies millisecond granularity will only allow + // values of "timestamp_micros" which are multiples of 1000. + TimestampMicros int64 `protobuf:"varint,1,opt,name=timestamp_micros,json=timestampMicros,proto3" json:"timestamp_micros,omitempty"` + // The value stored in the cell. + // May contain any byte string, including the empty string, up to 100MiB in + // length. + Value []byte `protobuf:"bytes,2,opt,name=value,proto3" json:"value,omitempty"` + // Labels applied to the cell by a [RowFilter][google.bigtable.v1.RowFilter]. + Labels []string `protobuf:"bytes,3,rep,name=labels,proto3" json:"labels,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Cell) Reset() { *m = Cell{} } +func (m *Cell) String() string { return proto.CompactTextString(m) } +func (*Cell) ProtoMessage() {} +func (*Cell) Descriptor() ([]byte, []int) { + return fileDescriptor_bigtable_data_ec1ed9ad6f99305a, []int{3} +} +func (m *Cell) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Cell.Unmarshal(m, b) +} +func (m *Cell) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Cell.Marshal(b, m, deterministic) +} +func (dst *Cell) XXX_Merge(src proto.Message) { + xxx_messageInfo_Cell.Merge(dst, src) +} +func (m *Cell) XXX_Size() int { + return xxx_messageInfo_Cell.Size(m) +} +func (m *Cell) XXX_DiscardUnknown() { + xxx_messageInfo_Cell.DiscardUnknown(m) +} + +var xxx_messageInfo_Cell proto.InternalMessageInfo + +func (m *Cell) GetTimestampMicros() int64 { + if m != nil { + return m.TimestampMicros + } + return 0 +} + +func (m *Cell) GetValue() []byte { + if m != nil { + return m.Value + } + return nil +} + +func (m *Cell) GetLabels() []string { + if m != nil { + return m.Labels + } + return nil +} + +// Specifies a contiguous range of rows. +type RowRange struct { + // Inclusive lower bound. If left empty, interpreted as the empty string. + StartKey []byte `protobuf:"bytes,2,opt,name=start_key,json=startKey,proto3" json:"start_key,omitempty"` + // Exclusive upper bound. If left empty, interpreted as infinity. + EndKey []byte `protobuf:"bytes,3,opt,name=end_key,json=endKey,proto3" json:"end_key,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *RowRange) Reset() { *m = RowRange{} } +func (m *RowRange) String() string { return proto.CompactTextString(m) } +func (*RowRange) ProtoMessage() {} +func (*RowRange) Descriptor() ([]byte, []int) { + return fileDescriptor_bigtable_data_ec1ed9ad6f99305a, []int{4} +} +func (m *RowRange) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_RowRange.Unmarshal(m, b) +} +func (m *RowRange) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_RowRange.Marshal(b, m, deterministic) +} +func (dst *RowRange) XXX_Merge(src proto.Message) { + xxx_messageInfo_RowRange.Merge(dst, src) +} +func (m *RowRange) XXX_Size() int { + return xxx_messageInfo_RowRange.Size(m) +} +func (m *RowRange) XXX_DiscardUnknown() { + xxx_messageInfo_RowRange.DiscardUnknown(m) +} + +var xxx_messageInfo_RowRange proto.InternalMessageInfo + +func (m *RowRange) GetStartKey() []byte { + if m != nil { + return m.StartKey + } + return nil +} + +func (m *RowRange) GetEndKey() []byte { + if m != nil { + return m.EndKey + } + return nil +} + +// Specifies a non-contiguous set of rows. +type RowSet struct { + // Single rows included in the set. + RowKeys [][]byte `protobuf:"bytes,1,rep,name=row_keys,json=rowKeys,proto3" json:"row_keys,omitempty"` + // Contiguous row ranges included in the set. + RowRanges []*RowRange `protobuf:"bytes,2,rep,name=row_ranges,json=rowRanges,proto3" json:"row_ranges,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *RowSet) Reset() { *m = RowSet{} } +func (m *RowSet) String() string { return proto.CompactTextString(m) } +func (*RowSet) ProtoMessage() {} +func (*RowSet) Descriptor() ([]byte, []int) { + return fileDescriptor_bigtable_data_ec1ed9ad6f99305a, []int{5} +} +func (m *RowSet) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_RowSet.Unmarshal(m, b) +} +func (m *RowSet) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_RowSet.Marshal(b, m, deterministic) +} +func (dst *RowSet) XXX_Merge(src proto.Message) { + xxx_messageInfo_RowSet.Merge(dst, src) +} +func (m *RowSet) XXX_Size() int { + return xxx_messageInfo_RowSet.Size(m) +} +func (m *RowSet) XXX_DiscardUnknown() { + xxx_messageInfo_RowSet.DiscardUnknown(m) +} + +var xxx_messageInfo_RowSet proto.InternalMessageInfo + +func (m *RowSet) GetRowKeys() [][]byte { + if m != nil { + return m.RowKeys + } + return nil +} + +func (m *RowSet) GetRowRanges() []*RowRange { + if m != nil { + return m.RowRanges + } + return nil +} + +// Specifies a contiguous range of columns within a single column family. +// The range spans from : to +// :, where both bounds can be either inclusive or +// exclusive. +type ColumnRange struct { + // The name of the column family within which this range falls. + FamilyName string `protobuf:"bytes,1,opt,name=family_name,json=familyName,proto3" json:"family_name,omitempty"` + // The column qualifier at which to start the range (within 'column_family'). + // If neither field is set, interpreted as the empty string, inclusive. + // + // Types that are valid to be assigned to StartQualifier: + // *ColumnRange_StartQualifierInclusive + // *ColumnRange_StartQualifierExclusive + StartQualifier isColumnRange_StartQualifier `protobuf_oneof:"start_qualifier"` + // The column qualifier at which to end the range (within 'column_family'). + // If neither field is set, interpreted as the infinite string, exclusive. + // + // Types that are valid to be assigned to EndQualifier: + // *ColumnRange_EndQualifierInclusive + // *ColumnRange_EndQualifierExclusive + EndQualifier isColumnRange_EndQualifier `protobuf_oneof:"end_qualifier"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ColumnRange) Reset() { *m = ColumnRange{} } +func (m *ColumnRange) String() string { return proto.CompactTextString(m) } +func (*ColumnRange) ProtoMessage() {} +func (*ColumnRange) Descriptor() ([]byte, []int) { + return fileDescriptor_bigtable_data_ec1ed9ad6f99305a, []int{6} +} +func (m *ColumnRange) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ColumnRange.Unmarshal(m, b) +} +func (m *ColumnRange) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ColumnRange.Marshal(b, m, deterministic) +} +func (dst *ColumnRange) XXX_Merge(src proto.Message) { + xxx_messageInfo_ColumnRange.Merge(dst, src) +} +func (m *ColumnRange) XXX_Size() int { + return xxx_messageInfo_ColumnRange.Size(m) +} +func (m *ColumnRange) XXX_DiscardUnknown() { + xxx_messageInfo_ColumnRange.DiscardUnknown(m) +} + +var xxx_messageInfo_ColumnRange proto.InternalMessageInfo + +func (m *ColumnRange) GetFamilyName() string { + if m != nil { + return m.FamilyName + } + return "" +} + +type isColumnRange_StartQualifier interface { + isColumnRange_StartQualifier() +} + +type ColumnRange_StartQualifierInclusive struct { + StartQualifierInclusive []byte `protobuf:"bytes,2,opt,name=start_qualifier_inclusive,json=startQualifierInclusive,proto3,oneof"` +} + +type ColumnRange_StartQualifierExclusive struct { + StartQualifierExclusive []byte `protobuf:"bytes,3,opt,name=start_qualifier_exclusive,json=startQualifierExclusive,proto3,oneof"` +} + +func (*ColumnRange_StartQualifierInclusive) isColumnRange_StartQualifier() {} + +func (*ColumnRange_StartQualifierExclusive) isColumnRange_StartQualifier() {} + +func (m *ColumnRange) GetStartQualifier() isColumnRange_StartQualifier { + if m != nil { + return m.StartQualifier + } + return nil +} + +func (m *ColumnRange) GetStartQualifierInclusive() []byte { + if x, ok := m.GetStartQualifier().(*ColumnRange_StartQualifierInclusive); ok { + return x.StartQualifierInclusive + } + return nil +} + +func (m *ColumnRange) GetStartQualifierExclusive() []byte { + if x, ok := m.GetStartQualifier().(*ColumnRange_StartQualifierExclusive); ok { + return x.StartQualifierExclusive + } + return nil +} + +type isColumnRange_EndQualifier interface { + isColumnRange_EndQualifier() +} + +type ColumnRange_EndQualifierInclusive struct { + EndQualifierInclusive []byte `protobuf:"bytes,4,opt,name=end_qualifier_inclusive,json=endQualifierInclusive,proto3,oneof"` +} + +type ColumnRange_EndQualifierExclusive struct { + EndQualifierExclusive []byte `protobuf:"bytes,5,opt,name=end_qualifier_exclusive,json=endQualifierExclusive,proto3,oneof"` +} + +func (*ColumnRange_EndQualifierInclusive) isColumnRange_EndQualifier() {} + +func (*ColumnRange_EndQualifierExclusive) isColumnRange_EndQualifier() {} + +func (m *ColumnRange) GetEndQualifier() isColumnRange_EndQualifier { + if m != nil { + return m.EndQualifier + } + return nil +} + +func (m *ColumnRange) GetEndQualifierInclusive() []byte { + if x, ok := m.GetEndQualifier().(*ColumnRange_EndQualifierInclusive); ok { + return x.EndQualifierInclusive + } + return nil +} + +func (m *ColumnRange) GetEndQualifierExclusive() []byte { + if x, ok := m.GetEndQualifier().(*ColumnRange_EndQualifierExclusive); ok { + return x.EndQualifierExclusive + } + return nil +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*ColumnRange) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _ColumnRange_OneofMarshaler, _ColumnRange_OneofUnmarshaler, _ColumnRange_OneofSizer, []interface{}{ + (*ColumnRange_StartQualifierInclusive)(nil), + (*ColumnRange_StartQualifierExclusive)(nil), + (*ColumnRange_EndQualifierInclusive)(nil), + (*ColumnRange_EndQualifierExclusive)(nil), + } +} + +func _ColumnRange_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*ColumnRange) + // start_qualifier + switch x := m.StartQualifier.(type) { + case *ColumnRange_StartQualifierInclusive: + b.EncodeVarint(2<<3 | proto.WireBytes) + b.EncodeRawBytes(x.StartQualifierInclusive) + case *ColumnRange_StartQualifierExclusive: + b.EncodeVarint(3<<3 | proto.WireBytes) + b.EncodeRawBytes(x.StartQualifierExclusive) + case nil: + default: + return fmt.Errorf("ColumnRange.StartQualifier has unexpected type %T", x) + } + // end_qualifier + switch x := m.EndQualifier.(type) { + case *ColumnRange_EndQualifierInclusive: + b.EncodeVarint(4<<3 | proto.WireBytes) + b.EncodeRawBytes(x.EndQualifierInclusive) + case *ColumnRange_EndQualifierExclusive: + b.EncodeVarint(5<<3 | proto.WireBytes) + b.EncodeRawBytes(x.EndQualifierExclusive) + case nil: + default: + return fmt.Errorf("ColumnRange.EndQualifier has unexpected type %T", x) + } + return nil +} + +func _ColumnRange_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*ColumnRange) + switch tag { + case 2: // start_qualifier.start_qualifier_inclusive + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeRawBytes(true) + m.StartQualifier = &ColumnRange_StartQualifierInclusive{x} + return true, err + case 3: // start_qualifier.start_qualifier_exclusive + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeRawBytes(true) + m.StartQualifier = &ColumnRange_StartQualifierExclusive{x} + return true, err + case 4: // end_qualifier.end_qualifier_inclusive + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeRawBytes(true) + m.EndQualifier = &ColumnRange_EndQualifierInclusive{x} + return true, err + case 5: // end_qualifier.end_qualifier_exclusive + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeRawBytes(true) + m.EndQualifier = &ColumnRange_EndQualifierExclusive{x} + return true, err + default: + return false, nil + } +} + +func _ColumnRange_OneofSizer(msg proto.Message) (n int) { + m := msg.(*ColumnRange) + // start_qualifier + switch x := m.StartQualifier.(type) { + case *ColumnRange_StartQualifierInclusive: + n += 1 // tag and wire + n += proto.SizeVarint(uint64(len(x.StartQualifierInclusive))) + n += len(x.StartQualifierInclusive) + case *ColumnRange_StartQualifierExclusive: + n += 1 // tag and wire + n += proto.SizeVarint(uint64(len(x.StartQualifierExclusive))) + n += len(x.StartQualifierExclusive) + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + // end_qualifier + switch x := m.EndQualifier.(type) { + case *ColumnRange_EndQualifierInclusive: + n += 1 // tag and wire + n += proto.SizeVarint(uint64(len(x.EndQualifierInclusive))) + n += len(x.EndQualifierInclusive) + case *ColumnRange_EndQualifierExclusive: + n += 1 // tag and wire + n += proto.SizeVarint(uint64(len(x.EndQualifierExclusive))) + n += len(x.EndQualifierExclusive) + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +// Specified a contiguous range of microsecond timestamps. +type TimestampRange struct { + // Inclusive lower bound. If left empty, interpreted as 0. + StartTimestampMicros int64 `protobuf:"varint,1,opt,name=start_timestamp_micros,json=startTimestampMicros,proto3" json:"start_timestamp_micros,omitempty"` + // Exclusive upper bound. If left empty, interpreted as infinity. + EndTimestampMicros int64 `protobuf:"varint,2,opt,name=end_timestamp_micros,json=endTimestampMicros,proto3" json:"end_timestamp_micros,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *TimestampRange) Reset() { *m = TimestampRange{} } +func (m *TimestampRange) String() string { return proto.CompactTextString(m) } +func (*TimestampRange) ProtoMessage() {} +func (*TimestampRange) Descriptor() ([]byte, []int) { + return fileDescriptor_bigtable_data_ec1ed9ad6f99305a, []int{7} +} +func (m *TimestampRange) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_TimestampRange.Unmarshal(m, b) +} +func (m *TimestampRange) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_TimestampRange.Marshal(b, m, deterministic) +} +func (dst *TimestampRange) XXX_Merge(src proto.Message) { + xxx_messageInfo_TimestampRange.Merge(dst, src) +} +func (m *TimestampRange) XXX_Size() int { + return xxx_messageInfo_TimestampRange.Size(m) +} +func (m *TimestampRange) XXX_DiscardUnknown() { + xxx_messageInfo_TimestampRange.DiscardUnknown(m) +} + +var xxx_messageInfo_TimestampRange proto.InternalMessageInfo + +func (m *TimestampRange) GetStartTimestampMicros() int64 { + if m != nil { + return m.StartTimestampMicros + } + return 0 +} + +func (m *TimestampRange) GetEndTimestampMicros() int64 { + if m != nil { + return m.EndTimestampMicros + } + return 0 +} + +// Specifies a contiguous range of raw byte values. +type ValueRange struct { + // The value at which to start the range. + // If neither field is set, interpreted as the empty string, inclusive. + // + // Types that are valid to be assigned to StartValue: + // *ValueRange_StartValueInclusive + // *ValueRange_StartValueExclusive + StartValue isValueRange_StartValue `protobuf_oneof:"start_value"` + // The value at which to end the range. + // If neither field is set, interpreted as the infinite string, exclusive. + // + // Types that are valid to be assigned to EndValue: + // *ValueRange_EndValueInclusive + // *ValueRange_EndValueExclusive + EndValue isValueRange_EndValue `protobuf_oneof:"end_value"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ValueRange) Reset() { *m = ValueRange{} } +func (m *ValueRange) String() string { return proto.CompactTextString(m) } +func (*ValueRange) ProtoMessage() {} +func (*ValueRange) Descriptor() ([]byte, []int) { + return fileDescriptor_bigtable_data_ec1ed9ad6f99305a, []int{8} +} +func (m *ValueRange) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ValueRange.Unmarshal(m, b) +} +func (m *ValueRange) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ValueRange.Marshal(b, m, deterministic) +} +func (dst *ValueRange) XXX_Merge(src proto.Message) { + xxx_messageInfo_ValueRange.Merge(dst, src) +} +func (m *ValueRange) XXX_Size() int { + return xxx_messageInfo_ValueRange.Size(m) +} +func (m *ValueRange) XXX_DiscardUnknown() { + xxx_messageInfo_ValueRange.DiscardUnknown(m) +} + +var xxx_messageInfo_ValueRange proto.InternalMessageInfo + +type isValueRange_StartValue interface { + isValueRange_StartValue() +} + +type ValueRange_StartValueInclusive struct { + StartValueInclusive []byte `protobuf:"bytes,1,opt,name=start_value_inclusive,json=startValueInclusive,proto3,oneof"` +} + +type ValueRange_StartValueExclusive struct { + StartValueExclusive []byte `protobuf:"bytes,2,opt,name=start_value_exclusive,json=startValueExclusive,proto3,oneof"` +} + +func (*ValueRange_StartValueInclusive) isValueRange_StartValue() {} + +func (*ValueRange_StartValueExclusive) isValueRange_StartValue() {} + +func (m *ValueRange) GetStartValue() isValueRange_StartValue { + if m != nil { + return m.StartValue + } + return nil +} + +func (m *ValueRange) GetStartValueInclusive() []byte { + if x, ok := m.GetStartValue().(*ValueRange_StartValueInclusive); ok { + return x.StartValueInclusive + } + return nil +} + +func (m *ValueRange) GetStartValueExclusive() []byte { + if x, ok := m.GetStartValue().(*ValueRange_StartValueExclusive); ok { + return x.StartValueExclusive + } + return nil +} + +type isValueRange_EndValue interface { + isValueRange_EndValue() +} + +type ValueRange_EndValueInclusive struct { + EndValueInclusive []byte `protobuf:"bytes,3,opt,name=end_value_inclusive,json=endValueInclusive,proto3,oneof"` +} + +type ValueRange_EndValueExclusive struct { + EndValueExclusive []byte `protobuf:"bytes,4,opt,name=end_value_exclusive,json=endValueExclusive,proto3,oneof"` +} + +func (*ValueRange_EndValueInclusive) isValueRange_EndValue() {} + +func (*ValueRange_EndValueExclusive) isValueRange_EndValue() {} + +func (m *ValueRange) GetEndValue() isValueRange_EndValue { + if m != nil { + return m.EndValue + } + return nil +} + +func (m *ValueRange) GetEndValueInclusive() []byte { + if x, ok := m.GetEndValue().(*ValueRange_EndValueInclusive); ok { + return x.EndValueInclusive + } + return nil +} + +func (m *ValueRange) GetEndValueExclusive() []byte { + if x, ok := m.GetEndValue().(*ValueRange_EndValueExclusive); ok { + return x.EndValueExclusive + } + return nil +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*ValueRange) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _ValueRange_OneofMarshaler, _ValueRange_OneofUnmarshaler, _ValueRange_OneofSizer, []interface{}{ + (*ValueRange_StartValueInclusive)(nil), + (*ValueRange_StartValueExclusive)(nil), + (*ValueRange_EndValueInclusive)(nil), + (*ValueRange_EndValueExclusive)(nil), + } +} + +func _ValueRange_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*ValueRange) + // start_value + switch x := m.StartValue.(type) { + case *ValueRange_StartValueInclusive: + b.EncodeVarint(1<<3 | proto.WireBytes) + b.EncodeRawBytes(x.StartValueInclusive) + case *ValueRange_StartValueExclusive: + b.EncodeVarint(2<<3 | proto.WireBytes) + b.EncodeRawBytes(x.StartValueExclusive) + case nil: + default: + return fmt.Errorf("ValueRange.StartValue has unexpected type %T", x) + } + // end_value + switch x := m.EndValue.(type) { + case *ValueRange_EndValueInclusive: + b.EncodeVarint(3<<3 | proto.WireBytes) + b.EncodeRawBytes(x.EndValueInclusive) + case *ValueRange_EndValueExclusive: + b.EncodeVarint(4<<3 | proto.WireBytes) + b.EncodeRawBytes(x.EndValueExclusive) + case nil: + default: + return fmt.Errorf("ValueRange.EndValue has unexpected type %T", x) + } + return nil +} + +func _ValueRange_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*ValueRange) + switch tag { + case 1: // start_value.start_value_inclusive + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeRawBytes(true) + m.StartValue = &ValueRange_StartValueInclusive{x} + return true, err + case 2: // start_value.start_value_exclusive + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeRawBytes(true) + m.StartValue = &ValueRange_StartValueExclusive{x} + return true, err + case 3: // end_value.end_value_inclusive + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeRawBytes(true) + m.EndValue = &ValueRange_EndValueInclusive{x} + return true, err + case 4: // end_value.end_value_exclusive + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeRawBytes(true) + m.EndValue = &ValueRange_EndValueExclusive{x} + return true, err + default: + return false, nil + } +} + +func _ValueRange_OneofSizer(msg proto.Message) (n int) { + m := msg.(*ValueRange) + // start_value + switch x := m.StartValue.(type) { + case *ValueRange_StartValueInclusive: + n += 1 // tag and wire + n += proto.SizeVarint(uint64(len(x.StartValueInclusive))) + n += len(x.StartValueInclusive) + case *ValueRange_StartValueExclusive: + n += 1 // tag and wire + n += proto.SizeVarint(uint64(len(x.StartValueExclusive))) + n += len(x.StartValueExclusive) + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + // end_value + switch x := m.EndValue.(type) { + case *ValueRange_EndValueInclusive: + n += 1 // tag and wire + n += proto.SizeVarint(uint64(len(x.EndValueInclusive))) + n += len(x.EndValueInclusive) + case *ValueRange_EndValueExclusive: + n += 1 // tag and wire + n += proto.SizeVarint(uint64(len(x.EndValueExclusive))) + n += len(x.EndValueExclusive) + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +// Takes a row as input and produces an alternate view of the row based on +// specified rules. For example, a RowFilter might trim down a row to include +// just the cells from columns matching a given regular expression, or might +// return all the cells of a row but not their values. More complicated filters +// can be composed out of these components to express requests such as, "within +// every column of a particular family, give just the two most recent cells +// which are older than timestamp X." +// +// There are two broad categories of RowFilters (true filters and transformers), +// as well as two ways to compose simple filters into more complex ones +// (chains and interleaves). They work as follows: +// +// * True filters alter the input row by excluding some of its cells wholesale +// from the output row. An example of a true filter is the "value_regex_filter", +// which excludes cells whose values don't match the specified pattern. All +// regex true filters use RE2 syntax (https://github.com/google/re2/wiki/Syntax) +// in raw byte mode (RE2::Latin1), and are evaluated as full matches. An +// important point to keep in mind is that RE2(.) is equivalent by default to +// RE2([^\n]), meaning that it does not match newlines. When attempting to match +// an arbitrary byte, you should therefore use the escape sequence '\C', which +// may need to be further escaped as '\\C' in your client language. +// +// * Transformers alter the input row by changing the values of some of its +// cells in the output, without excluding them completely. Currently, the only +// supported transformer is the "strip_value_transformer", which replaces every +// cell's value with the empty string. +// +// * Chains and interleaves are described in more detail in the +// RowFilter.Chain and RowFilter.Interleave documentation. +// +// The total serialized size of a RowFilter message must not +// exceed 4096 bytes, and RowFilters may not be nested within each other +// (in Chains or Interleaves) to a depth of more than 20. +type RowFilter struct { + // Which of the possible RowFilter types to apply. If none are set, this + // RowFilter returns all cells in the input row. + // + // Types that are valid to be assigned to Filter: + // *RowFilter_Chain_ + // *RowFilter_Interleave_ + // *RowFilter_Condition_ + // *RowFilter_Sink + // *RowFilter_PassAllFilter + // *RowFilter_BlockAllFilter + // *RowFilter_RowKeyRegexFilter + // *RowFilter_RowSampleFilter + // *RowFilter_FamilyNameRegexFilter + // *RowFilter_ColumnQualifierRegexFilter + // *RowFilter_ColumnRangeFilter + // *RowFilter_TimestampRangeFilter + // *RowFilter_ValueRegexFilter + // *RowFilter_ValueRangeFilter + // *RowFilter_CellsPerRowOffsetFilter + // *RowFilter_CellsPerRowLimitFilter + // *RowFilter_CellsPerColumnLimitFilter + // *RowFilter_StripValueTransformer + // *RowFilter_ApplyLabelTransformer + Filter isRowFilter_Filter `protobuf_oneof:"filter"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *RowFilter) Reset() { *m = RowFilter{} } +func (m *RowFilter) String() string { return proto.CompactTextString(m) } +func (*RowFilter) ProtoMessage() {} +func (*RowFilter) Descriptor() ([]byte, []int) { + return fileDescriptor_bigtable_data_ec1ed9ad6f99305a, []int{9} +} +func (m *RowFilter) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_RowFilter.Unmarshal(m, b) +} +func (m *RowFilter) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_RowFilter.Marshal(b, m, deterministic) +} +func (dst *RowFilter) XXX_Merge(src proto.Message) { + xxx_messageInfo_RowFilter.Merge(dst, src) +} +func (m *RowFilter) XXX_Size() int { + return xxx_messageInfo_RowFilter.Size(m) +} +func (m *RowFilter) XXX_DiscardUnknown() { + xxx_messageInfo_RowFilter.DiscardUnknown(m) +} + +var xxx_messageInfo_RowFilter proto.InternalMessageInfo + +type isRowFilter_Filter interface { + isRowFilter_Filter() +} + +type RowFilter_Chain_ struct { + Chain *RowFilter_Chain `protobuf:"bytes,1,opt,name=chain,proto3,oneof"` +} + +type RowFilter_Interleave_ struct { + Interleave *RowFilter_Interleave `protobuf:"bytes,2,opt,name=interleave,proto3,oneof"` +} + +type RowFilter_Condition_ struct { + Condition *RowFilter_Condition `protobuf:"bytes,3,opt,name=condition,proto3,oneof"` +} + +type RowFilter_Sink struct { + Sink bool `protobuf:"varint,16,opt,name=sink,proto3,oneof"` +} + +type RowFilter_PassAllFilter struct { + PassAllFilter bool `protobuf:"varint,17,opt,name=pass_all_filter,json=passAllFilter,proto3,oneof"` +} + +type RowFilter_BlockAllFilter struct { + BlockAllFilter bool `protobuf:"varint,18,opt,name=block_all_filter,json=blockAllFilter,proto3,oneof"` +} + +type RowFilter_RowKeyRegexFilter struct { + RowKeyRegexFilter []byte `protobuf:"bytes,4,opt,name=row_key_regex_filter,json=rowKeyRegexFilter,proto3,oneof"` +} + +type RowFilter_RowSampleFilter struct { + RowSampleFilter float64 `protobuf:"fixed64,14,opt,name=row_sample_filter,json=rowSampleFilter,proto3,oneof"` +} + +type RowFilter_FamilyNameRegexFilter struct { + FamilyNameRegexFilter string `protobuf:"bytes,5,opt,name=family_name_regex_filter,json=familyNameRegexFilter,proto3,oneof"` +} + +type RowFilter_ColumnQualifierRegexFilter struct { + ColumnQualifierRegexFilter []byte `protobuf:"bytes,6,opt,name=column_qualifier_regex_filter,json=columnQualifierRegexFilter,proto3,oneof"` +} + +type RowFilter_ColumnRangeFilter struct { + ColumnRangeFilter *ColumnRange `protobuf:"bytes,7,opt,name=column_range_filter,json=columnRangeFilter,proto3,oneof"` +} + +type RowFilter_TimestampRangeFilter struct { + TimestampRangeFilter *TimestampRange `protobuf:"bytes,8,opt,name=timestamp_range_filter,json=timestampRangeFilter,proto3,oneof"` +} + +type RowFilter_ValueRegexFilter struct { + ValueRegexFilter []byte `protobuf:"bytes,9,opt,name=value_regex_filter,json=valueRegexFilter,proto3,oneof"` +} + +type RowFilter_ValueRangeFilter struct { + ValueRangeFilter *ValueRange `protobuf:"bytes,15,opt,name=value_range_filter,json=valueRangeFilter,proto3,oneof"` +} + +type RowFilter_CellsPerRowOffsetFilter struct { + CellsPerRowOffsetFilter int32 `protobuf:"varint,10,opt,name=cells_per_row_offset_filter,json=cellsPerRowOffsetFilter,proto3,oneof"` +} + +type RowFilter_CellsPerRowLimitFilter struct { + CellsPerRowLimitFilter int32 `protobuf:"varint,11,opt,name=cells_per_row_limit_filter,json=cellsPerRowLimitFilter,proto3,oneof"` +} + +type RowFilter_CellsPerColumnLimitFilter struct { + CellsPerColumnLimitFilter int32 `protobuf:"varint,12,opt,name=cells_per_column_limit_filter,json=cellsPerColumnLimitFilter,proto3,oneof"` +} + +type RowFilter_StripValueTransformer struct { + StripValueTransformer bool `protobuf:"varint,13,opt,name=strip_value_transformer,json=stripValueTransformer,proto3,oneof"` +} + +type RowFilter_ApplyLabelTransformer struct { + ApplyLabelTransformer string `protobuf:"bytes,19,opt,name=apply_label_transformer,json=applyLabelTransformer,proto3,oneof"` +} + +func (*RowFilter_Chain_) isRowFilter_Filter() {} + +func (*RowFilter_Interleave_) isRowFilter_Filter() {} + +func (*RowFilter_Condition_) isRowFilter_Filter() {} + +func (*RowFilter_Sink) isRowFilter_Filter() {} + +func (*RowFilter_PassAllFilter) isRowFilter_Filter() {} + +func (*RowFilter_BlockAllFilter) isRowFilter_Filter() {} + +func (*RowFilter_RowKeyRegexFilter) isRowFilter_Filter() {} + +func (*RowFilter_RowSampleFilter) isRowFilter_Filter() {} + +func (*RowFilter_FamilyNameRegexFilter) isRowFilter_Filter() {} + +func (*RowFilter_ColumnQualifierRegexFilter) isRowFilter_Filter() {} + +func (*RowFilter_ColumnRangeFilter) isRowFilter_Filter() {} + +func (*RowFilter_TimestampRangeFilter) isRowFilter_Filter() {} + +func (*RowFilter_ValueRegexFilter) isRowFilter_Filter() {} + +func (*RowFilter_ValueRangeFilter) isRowFilter_Filter() {} + +func (*RowFilter_CellsPerRowOffsetFilter) isRowFilter_Filter() {} + +func (*RowFilter_CellsPerRowLimitFilter) isRowFilter_Filter() {} + +func (*RowFilter_CellsPerColumnLimitFilter) isRowFilter_Filter() {} + +func (*RowFilter_StripValueTransformer) isRowFilter_Filter() {} + +func (*RowFilter_ApplyLabelTransformer) isRowFilter_Filter() {} + +func (m *RowFilter) GetFilter() isRowFilter_Filter { + if m != nil { + return m.Filter + } + return nil +} + +func (m *RowFilter) GetChain() *RowFilter_Chain { + if x, ok := m.GetFilter().(*RowFilter_Chain_); ok { + return x.Chain + } + return nil +} + +func (m *RowFilter) GetInterleave() *RowFilter_Interleave { + if x, ok := m.GetFilter().(*RowFilter_Interleave_); ok { + return x.Interleave + } + return nil +} + +func (m *RowFilter) GetCondition() *RowFilter_Condition { + if x, ok := m.GetFilter().(*RowFilter_Condition_); ok { + return x.Condition + } + return nil +} + +func (m *RowFilter) GetSink() bool { + if x, ok := m.GetFilter().(*RowFilter_Sink); ok { + return x.Sink + } + return false +} + +func (m *RowFilter) GetPassAllFilter() bool { + if x, ok := m.GetFilter().(*RowFilter_PassAllFilter); ok { + return x.PassAllFilter + } + return false +} + +func (m *RowFilter) GetBlockAllFilter() bool { + if x, ok := m.GetFilter().(*RowFilter_BlockAllFilter); ok { + return x.BlockAllFilter + } + return false +} + +func (m *RowFilter) GetRowKeyRegexFilter() []byte { + if x, ok := m.GetFilter().(*RowFilter_RowKeyRegexFilter); ok { + return x.RowKeyRegexFilter + } + return nil +} + +func (m *RowFilter) GetRowSampleFilter() float64 { + if x, ok := m.GetFilter().(*RowFilter_RowSampleFilter); ok { + return x.RowSampleFilter + } + return 0 +} + +func (m *RowFilter) GetFamilyNameRegexFilter() string { + if x, ok := m.GetFilter().(*RowFilter_FamilyNameRegexFilter); ok { + return x.FamilyNameRegexFilter + } + return "" +} + +func (m *RowFilter) GetColumnQualifierRegexFilter() []byte { + if x, ok := m.GetFilter().(*RowFilter_ColumnQualifierRegexFilter); ok { + return x.ColumnQualifierRegexFilter + } + return nil +} + +func (m *RowFilter) GetColumnRangeFilter() *ColumnRange { + if x, ok := m.GetFilter().(*RowFilter_ColumnRangeFilter); ok { + return x.ColumnRangeFilter + } + return nil +} + +func (m *RowFilter) GetTimestampRangeFilter() *TimestampRange { + if x, ok := m.GetFilter().(*RowFilter_TimestampRangeFilter); ok { + return x.TimestampRangeFilter + } + return nil +} + +func (m *RowFilter) GetValueRegexFilter() []byte { + if x, ok := m.GetFilter().(*RowFilter_ValueRegexFilter); ok { + return x.ValueRegexFilter + } + return nil +} + +func (m *RowFilter) GetValueRangeFilter() *ValueRange { + if x, ok := m.GetFilter().(*RowFilter_ValueRangeFilter); ok { + return x.ValueRangeFilter + } + return nil +} + +func (m *RowFilter) GetCellsPerRowOffsetFilter() int32 { + if x, ok := m.GetFilter().(*RowFilter_CellsPerRowOffsetFilter); ok { + return x.CellsPerRowOffsetFilter + } + return 0 +} + +func (m *RowFilter) GetCellsPerRowLimitFilter() int32 { + if x, ok := m.GetFilter().(*RowFilter_CellsPerRowLimitFilter); ok { + return x.CellsPerRowLimitFilter + } + return 0 +} + +func (m *RowFilter) GetCellsPerColumnLimitFilter() int32 { + if x, ok := m.GetFilter().(*RowFilter_CellsPerColumnLimitFilter); ok { + return x.CellsPerColumnLimitFilter + } + return 0 +} + +func (m *RowFilter) GetStripValueTransformer() bool { + if x, ok := m.GetFilter().(*RowFilter_StripValueTransformer); ok { + return x.StripValueTransformer + } + return false +} + +func (m *RowFilter) GetApplyLabelTransformer() string { + if x, ok := m.GetFilter().(*RowFilter_ApplyLabelTransformer); ok { + return x.ApplyLabelTransformer + } + return "" +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*RowFilter) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _RowFilter_OneofMarshaler, _RowFilter_OneofUnmarshaler, _RowFilter_OneofSizer, []interface{}{ + (*RowFilter_Chain_)(nil), + (*RowFilter_Interleave_)(nil), + (*RowFilter_Condition_)(nil), + (*RowFilter_Sink)(nil), + (*RowFilter_PassAllFilter)(nil), + (*RowFilter_BlockAllFilter)(nil), + (*RowFilter_RowKeyRegexFilter)(nil), + (*RowFilter_RowSampleFilter)(nil), + (*RowFilter_FamilyNameRegexFilter)(nil), + (*RowFilter_ColumnQualifierRegexFilter)(nil), + (*RowFilter_ColumnRangeFilter)(nil), + (*RowFilter_TimestampRangeFilter)(nil), + (*RowFilter_ValueRegexFilter)(nil), + (*RowFilter_ValueRangeFilter)(nil), + (*RowFilter_CellsPerRowOffsetFilter)(nil), + (*RowFilter_CellsPerRowLimitFilter)(nil), + (*RowFilter_CellsPerColumnLimitFilter)(nil), + (*RowFilter_StripValueTransformer)(nil), + (*RowFilter_ApplyLabelTransformer)(nil), + } +} + +func _RowFilter_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*RowFilter) + // filter + switch x := m.Filter.(type) { + case *RowFilter_Chain_: + b.EncodeVarint(1<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.Chain); err != nil { + return err + } + case *RowFilter_Interleave_: + b.EncodeVarint(2<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.Interleave); err != nil { + return err + } + case *RowFilter_Condition_: + b.EncodeVarint(3<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.Condition); err != nil { + return err + } + case *RowFilter_Sink: + t := uint64(0) + if x.Sink { + t = 1 + } + b.EncodeVarint(16<<3 | proto.WireVarint) + b.EncodeVarint(t) + case *RowFilter_PassAllFilter: + t := uint64(0) + if x.PassAllFilter { + t = 1 + } + b.EncodeVarint(17<<3 | proto.WireVarint) + b.EncodeVarint(t) + case *RowFilter_BlockAllFilter: + t := uint64(0) + if x.BlockAllFilter { + t = 1 + } + b.EncodeVarint(18<<3 | proto.WireVarint) + b.EncodeVarint(t) + case *RowFilter_RowKeyRegexFilter: + b.EncodeVarint(4<<3 | proto.WireBytes) + b.EncodeRawBytes(x.RowKeyRegexFilter) + case *RowFilter_RowSampleFilter: + b.EncodeVarint(14<<3 | proto.WireFixed64) + b.EncodeFixed64(math.Float64bits(x.RowSampleFilter)) + case *RowFilter_FamilyNameRegexFilter: + b.EncodeVarint(5<<3 | proto.WireBytes) + b.EncodeStringBytes(x.FamilyNameRegexFilter) + case *RowFilter_ColumnQualifierRegexFilter: + b.EncodeVarint(6<<3 | proto.WireBytes) + b.EncodeRawBytes(x.ColumnQualifierRegexFilter) + case *RowFilter_ColumnRangeFilter: + b.EncodeVarint(7<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.ColumnRangeFilter); err != nil { + return err + } + case *RowFilter_TimestampRangeFilter: + b.EncodeVarint(8<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.TimestampRangeFilter); err != nil { + return err + } + case *RowFilter_ValueRegexFilter: + b.EncodeVarint(9<<3 | proto.WireBytes) + b.EncodeRawBytes(x.ValueRegexFilter) + case *RowFilter_ValueRangeFilter: + b.EncodeVarint(15<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.ValueRangeFilter); err != nil { + return err + } + case *RowFilter_CellsPerRowOffsetFilter: + b.EncodeVarint(10<<3 | proto.WireVarint) + b.EncodeVarint(uint64(x.CellsPerRowOffsetFilter)) + case *RowFilter_CellsPerRowLimitFilter: + b.EncodeVarint(11<<3 | proto.WireVarint) + b.EncodeVarint(uint64(x.CellsPerRowLimitFilter)) + case *RowFilter_CellsPerColumnLimitFilter: + b.EncodeVarint(12<<3 | proto.WireVarint) + b.EncodeVarint(uint64(x.CellsPerColumnLimitFilter)) + case *RowFilter_StripValueTransformer: + t := uint64(0) + if x.StripValueTransformer { + t = 1 + } + b.EncodeVarint(13<<3 | proto.WireVarint) + b.EncodeVarint(t) + case *RowFilter_ApplyLabelTransformer: + b.EncodeVarint(19<<3 | proto.WireBytes) + b.EncodeStringBytes(x.ApplyLabelTransformer) + case nil: + default: + return fmt.Errorf("RowFilter.Filter has unexpected type %T", x) + } + return nil +} + +func _RowFilter_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*RowFilter) + switch tag { + case 1: // filter.chain + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(RowFilter_Chain) + err := b.DecodeMessage(msg) + m.Filter = &RowFilter_Chain_{msg} + return true, err + case 2: // filter.interleave + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(RowFilter_Interleave) + err := b.DecodeMessage(msg) + m.Filter = &RowFilter_Interleave_{msg} + return true, err + case 3: // filter.condition + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(RowFilter_Condition) + err := b.DecodeMessage(msg) + m.Filter = &RowFilter_Condition_{msg} + return true, err + case 16: // filter.sink + if wire != proto.WireVarint { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeVarint() + m.Filter = &RowFilter_Sink{x != 0} + return true, err + case 17: // filter.pass_all_filter + if wire != proto.WireVarint { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeVarint() + m.Filter = &RowFilter_PassAllFilter{x != 0} + return true, err + case 18: // filter.block_all_filter + if wire != proto.WireVarint { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeVarint() + m.Filter = &RowFilter_BlockAllFilter{x != 0} + return true, err + case 4: // filter.row_key_regex_filter + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeRawBytes(true) + m.Filter = &RowFilter_RowKeyRegexFilter{x} + return true, err + case 14: // filter.row_sample_filter + if wire != proto.WireFixed64 { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeFixed64() + m.Filter = &RowFilter_RowSampleFilter{math.Float64frombits(x)} + return true, err + case 5: // filter.family_name_regex_filter + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeStringBytes() + m.Filter = &RowFilter_FamilyNameRegexFilter{x} + return true, err + case 6: // filter.column_qualifier_regex_filter + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeRawBytes(true) + m.Filter = &RowFilter_ColumnQualifierRegexFilter{x} + return true, err + case 7: // filter.column_range_filter + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(ColumnRange) + err := b.DecodeMessage(msg) + m.Filter = &RowFilter_ColumnRangeFilter{msg} + return true, err + case 8: // filter.timestamp_range_filter + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(TimestampRange) + err := b.DecodeMessage(msg) + m.Filter = &RowFilter_TimestampRangeFilter{msg} + return true, err + case 9: // filter.value_regex_filter + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeRawBytes(true) + m.Filter = &RowFilter_ValueRegexFilter{x} + return true, err + case 15: // filter.value_range_filter + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(ValueRange) + err := b.DecodeMessage(msg) + m.Filter = &RowFilter_ValueRangeFilter{msg} + return true, err + case 10: // filter.cells_per_row_offset_filter + if wire != proto.WireVarint { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeVarint() + m.Filter = &RowFilter_CellsPerRowOffsetFilter{int32(x)} + return true, err + case 11: // filter.cells_per_row_limit_filter + if wire != proto.WireVarint { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeVarint() + m.Filter = &RowFilter_CellsPerRowLimitFilter{int32(x)} + return true, err + case 12: // filter.cells_per_column_limit_filter + if wire != proto.WireVarint { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeVarint() + m.Filter = &RowFilter_CellsPerColumnLimitFilter{int32(x)} + return true, err + case 13: // filter.strip_value_transformer + if wire != proto.WireVarint { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeVarint() + m.Filter = &RowFilter_StripValueTransformer{x != 0} + return true, err + case 19: // filter.apply_label_transformer + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeStringBytes() + m.Filter = &RowFilter_ApplyLabelTransformer{x} + return true, err + default: + return false, nil + } +} + +func _RowFilter_OneofSizer(msg proto.Message) (n int) { + m := msg.(*RowFilter) + // filter + switch x := m.Filter.(type) { + case *RowFilter_Chain_: + s := proto.Size(x.Chain) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *RowFilter_Interleave_: + s := proto.Size(x.Interleave) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *RowFilter_Condition_: + s := proto.Size(x.Condition) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *RowFilter_Sink: + n += 2 // tag and wire + n += 1 + case *RowFilter_PassAllFilter: + n += 2 // tag and wire + n += 1 + case *RowFilter_BlockAllFilter: + n += 2 // tag and wire + n += 1 + case *RowFilter_RowKeyRegexFilter: + n += 1 // tag and wire + n += proto.SizeVarint(uint64(len(x.RowKeyRegexFilter))) + n += len(x.RowKeyRegexFilter) + case *RowFilter_RowSampleFilter: + n += 1 // tag and wire + n += 8 + case *RowFilter_FamilyNameRegexFilter: + n += 1 // tag and wire + n += proto.SizeVarint(uint64(len(x.FamilyNameRegexFilter))) + n += len(x.FamilyNameRegexFilter) + case *RowFilter_ColumnQualifierRegexFilter: + n += 1 // tag and wire + n += proto.SizeVarint(uint64(len(x.ColumnQualifierRegexFilter))) + n += len(x.ColumnQualifierRegexFilter) + case *RowFilter_ColumnRangeFilter: + s := proto.Size(x.ColumnRangeFilter) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *RowFilter_TimestampRangeFilter: + s := proto.Size(x.TimestampRangeFilter) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *RowFilter_ValueRegexFilter: + n += 1 // tag and wire + n += proto.SizeVarint(uint64(len(x.ValueRegexFilter))) + n += len(x.ValueRegexFilter) + case *RowFilter_ValueRangeFilter: + s := proto.Size(x.ValueRangeFilter) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *RowFilter_CellsPerRowOffsetFilter: + n += 1 // tag and wire + n += proto.SizeVarint(uint64(x.CellsPerRowOffsetFilter)) + case *RowFilter_CellsPerRowLimitFilter: + n += 1 // tag and wire + n += proto.SizeVarint(uint64(x.CellsPerRowLimitFilter)) + case *RowFilter_CellsPerColumnLimitFilter: + n += 1 // tag and wire + n += proto.SizeVarint(uint64(x.CellsPerColumnLimitFilter)) + case *RowFilter_StripValueTransformer: + n += 1 // tag and wire + n += 1 + case *RowFilter_ApplyLabelTransformer: + n += 2 // tag and wire + n += proto.SizeVarint(uint64(len(x.ApplyLabelTransformer))) + n += len(x.ApplyLabelTransformer) + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +// A RowFilter which sends rows through several RowFilters in sequence. +type RowFilter_Chain struct { + // The elements of "filters" are chained together to process the input row: + // in row -> f(0) -> intermediate row -> f(1) -> ... -> f(N) -> out row + // The full chain is executed atomically. + Filters []*RowFilter `protobuf:"bytes,1,rep,name=filters,proto3" json:"filters,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *RowFilter_Chain) Reset() { *m = RowFilter_Chain{} } +func (m *RowFilter_Chain) String() string { return proto.CompactTextString(m) } +func (*RowFilter_Chain) ProtoMessage() {} +func (*RowFilter_Chain) Descriptor() ([]byte, []int) { + return fileDescriptor_bigtable_data_ec1ed9ad6f99305a, []int{9, 0} +} +func (m *RowFilter_Chain) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_RowFilter_Chain.Unmarshal(m, b) +} +func (m *RowFilter_Chain) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_RowFilter_Chain.Marshal(b, m, deterministic) +} +func (dst *RowFilter_Chain) XXX_Merge(src proto.Message) { + xxx_messageInfo_RowFilter_Chain.Merge(dst, src) +} +func (m *RowFilter_Chain) XXX_Size() int { + return xxx_messageInfo_RowFilter_Chain.Size(m) +} +func (m *RowFilter_Chain) XXX_DiscardUnknown() { + xxx_messageInfo_RowFilter_Chain.DiscardUnknown(m) +} + +var xxx_messageInfo_RowFilter_Chain proto.InternalMessageInfo + +func (m *RowFilter_Chain) GetFilters() []*RowFilter { + if m != nil { + return m.Filters + } + return nil +} + +// A RowFilter which sends each row to each of several component +// RowFilters and interleaves the results. +type RowFilter_Interleave struct { + // The elements of "filters" all process a copy of the input row, and the + // results are pooled, sorted, and combined into a single output row. + // If multiple cells are produced with the same column and timestamp, + // they will all appear in the output row in an unspecified mutual order. + // Consider the following example, with three filters: + // + // input row + // | + // ----------------------------------------------------- + // | | | + // f(0) f(1) f(2) + // | | | + // 1: foo,bar,10,x foo,bar,10,z far,bar,7,a + // 2: foo,blah,11,z far,blah,5,x far,blah,5,x + // | | | + // ----------------------------------------------------- + // | + // 1: foo,bar,10,z // could have switched with #2 + // 2: foo,bar,10,x // could have switched with #1 + // 3: foo,blah,11,z + // 4: far,bar,7,a + // 5: far,blah,5,x // identical to #6 + // 6: far,blah,5,x // identical to #5 + // All interleaved filters are executed atomically. + Filters []*RowFilter `protobuf:"bytes,1,rep,name=filters,proto3" json:"filters,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *RowFilter_Interleave) Reset() { *m = RowFilter_Interleave{} } +func (m *RowFilter_Interleave) String() string { return proto.CompactTextString(m) } +func (*RowFilter_Interleave) ProtoMessage() {} +func (*RowFilter_Interleave) Descriptor() ([]byte, []int) { + return fileDescriptor_bigtable_data_ec1ed9ad6f99305a, []int{9, 1} +} +func (m *RowFilter_Interleave) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_RowFilter_Interleave.Unmarshal(m, b) +} +func (m *RowFilter_Interleave) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_RowFilter_Interleave.Marshal(b, m, deterministic) +} +func (dst *RowFilter_Interleave) XXX_Merge(src proto.Message) { + xxx_messageInfo_RowFilter_Interleave.Merge(dst, src) +} +func (m *RowFilter_Interleave) XXX_Size() int { + return xxx_messageInfo_RowFilter_Interleave.Size(m) +} +func (m *RowFilter_Interleave) XXX_DiscardUnknown() { + xxx_messageInfo_RowFilter_Interleave.DiscardUnknown(m) +} + +var xxx_messageInfo_RowFilter_Interleave proto.InternalMessageInfo + +func (m *RowFilter_Interleave) GetFilters() []*RowFilter { + if m != nil { + return m.Filters + } + return nil +} + +// A RowFilter which evaluates one of two possible RowFilters, depending on +// whether or not a predicate RowFilter outputs any cells from the input row. +// +// IMPORTANT NOTE: The predicate filter does not execute atomically with the +// true and false filters, which may lead to inconsistent or unexpected +// results. Additionally, Condition filters have poor performance, especially +// when filters are set for the false condition. +type RowFilter_Condition struct { + // If "predicate_filter" outputs any cells, then "true_filter" will be + // evaluated on the input row. Otherwise, "false_filter" will be evaluated. + PredicateFilter *RowFilter `protobuf:"bytes,1,opt,name=predicate_filter,json=predicateFilter,proto3" json:"predicate_filter,omitempty"` + // The filter to apply to the input row if "predicate_filter" returns any + // results. If not provided, no results will be returned in the true case. + TrueFilter *RowFilter `protobuf:"bytes,2,opt,name=true_filter,json=trueFilter,proto3" json:"true_filter,omitempty"` + // The filter to apply to the input row if "predicate_filter" does not + // return any results. If not provided, no results will be returned in the + // false case. + FalseFilter *RowFilter `protobuf:"bytes,3,opt,name=false_filter,json=falseFilter,proto3" json:"false_filter,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *RowFilter_Condition) Reset() { *m = RowFilter_Condition{} } +func (m *RowFilter_Condition) String() string { return proto.CompactTextString(m) } +func (*RowFilter_Condition) ProtoMessage() {} +func (*RowFilter_Condition) Descriptor() ([]byte, []int) { + return fileDescriptor_bigtable_data_ec1ed9ad6f99305a, []int{9, 2} +} +func (m *RowFilter_Condition) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_RowFilter_Condition.Unmarshal(m, b) +} +func (m *RowFilter_Condition) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_RowFilter_Condition.Marshal(b, m, deterministic) +} +func (dst *RowFilter_Condition) XXX_Merge(src proto.Message) { + xxx_messageInfo_RowFilter_Condition.Merge(dst, src) +} +func (m *RowFilter_Condition) XXX_Size() int { + return xxx_messageInfo_RowFilter_Condition.Size(m) +} +func (m *RowFilter_Condition) XXX_DiscardUnknown() { + xxx_messageInfo_RowFilter_Condition.DiscardUnknown(m) +} + +var xxx_messageInfo_RowFilter_Condition proto.InternalMessageInfo + +func (m *RowFilter_Condition) GetPredicateFilter() *RowFilter { + if m != nil { + return m.PredicateFilter + } + return nil +} + +func (m *RowFilter_Condition) GetTrueFilter() *RowFilter { + if m != nil { + return m.TrueFilter + } + return nil +} + +func (m *RowFilter_Condition) GetFalseFilter() *RowFilter { + if m != nil { + return m.FalseFilter + } + return nil +} + +// Specifies a particular change to be made to the contents of a row. +type Mutation struct { + // Which of the possible Mutation types to apply. + // + // Types that are valid to be assigned to Mutation: + // *Mutation_SetCell_ + // *Mutation_DeleteFromColumn_ + // *Mutation_DeleteFromFamily_ + // *Mutation_DeleteFromRow_ + Mutation isMutation_Mutation `protobuf_oneof:"mutation"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Mutation) Reset() { *m = Mutation{} } +func (m *Mutation) String() string { return proto.CompactTextString(m) } +func (*Mutation) ProtoMessage() {} +func (*Mutation) Descriptor() ([]byte, []int) { + return fileDescriptor_bigtable_data_ec1ed9ad6f99305a, []int{10} +} +func (m *Mutation) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Mutation.Unmarshal(m, b) +} +func (m *Mutation) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Mutation.Marshal(b, m, deterministic) +} +func (dst *Mutation) XXX_Merge(src proto.Message) { + xxx_messageInfo_Mutation.Merge(dst, src) +} +func (m *Mutation) XXX_Size() int { + return xxx_messageInfo_Mutation.Size(m) +} +func (m *Mutation) XXX_DiscardUnknown() { + xxx_messageInfo_Mutation.DiscardUnknown(m) +} + +var xxx_messageInfo_Mutation proto.InternalMessageInfo + +type isMutation_Mutation interface { + isMutation_Mutation() +} + +type Mutation_SetCell_ struct { + SetCell *Mutation_SetCell `protobuf:"bytes,1,opt,name=set_cell,json=setCell,proto3,oneof"` +} + +type Mutation_DeleteFromColumn_ struct { + DeleteFromColumn *Mutation_DeleteFromColumn `protobuf:"bytes,2,opt,name=delete_from_column,json=deleteFromColumn,proto3,oneof"` +} + +type Mutation_DeleteFromFamily_ struct { + DeleteFromFamily *Mutation_DeleteFromFamily `protobuf:"bytes,3,opt,name=delete_from_family,json=deleteFromFamily,proto3,oneof"` +} + +type Mutation_DeleteFromRow_ struct { + DeleteFromRow *Mutation_DeleteFromRow `protobuf:"bytes,4,opt,name=delete_from_row,json=deleteFromRow,proto3,oneof"` +} + +func (*Mutation_SetCell_) isMutation_Mutation() {} + +func (*Mutation_DeleteFromColumn_) isMutation_Mutation() {} + +func (*Mutation_DeleteFromFamily_) isMutation_Mutation() {} + +func (*Mutation_DeleteFromRow_) isMutation_Mutation() {} + +func (m *Mutation) GetMutation() isMutation_Mutation { + if m != nil { + return m.Mutation + } + return nil +} + +func (m *Mutation) GetSetCell() *Mutation_SetCell { + if x, ok := m.GetMutation().(*Mutation_SetCell_); ok { + return x.SetCell + } + return nil +} + +func (m *Mutation) GetDeleteFromColumn() *Mutation_DeleteFromColumn { + if x, ok := m.GetMutation().(*Mutation_DeleteFromColumn_); ok { + return x.DeleteFromColumn + } + return nil +} + +func (m *Mutation) GetDeleteFromFamily() *Mutation_DeleteFromFamily { + if x, ok := m.GetMutation().(*Mutation_DeleteFromFamily_); ok { + return x.DeleteFromFamily + } + return nil +} + +func (m *Mutation) GetDeleteFromRow() *Mutation_DeleteFromRow { + if x, ok := m.GetMutation().(*Mutation_DeleteFromRow_); ok { + return x.DeleteFromRow + } + return nil +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*Mutation) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _Mutation_OneofMarshaler, _Mutation_OneofUnmarshaler, _Mutation_OneofSizer, []interface{}{ + (*Mutation_SetCell_)(nil), + (*Mutation_DeleteFromColumn_)(nil), + (*Mutation_DeleteFromFamily_)(nil), + (*Mutation_DeleteFromRow_)(nil), + } +} + +func _Mutation_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*Mutation) + // mutation + switch x := m.Mutation.(type) { + case *Mutation_SetCell_: + b.EncodeVarint(1<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.SetCell); err != nil { + return err + } + case *Mutation_DeleteFromColumn_: + b.EncodeVarint(2<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.DeleteFromColumn); err != nil { + return err + } + case *Mutation_DeleteFromFamily_: + b.EncodeVarint(3<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.DeleteFromFamily); err != nil { + return err + } + case *Mutation_DeleteFromRow_: + b.EncodeVarint(4<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.DeleteFromRow); err != nil { + return err + } + case nil: + default: + return fmt.Errorf("Mutation.Mutation has unexpected type %T", x) + } + return nil +} + +func _Mutation_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*Mutation) + switch tag { + case 1: // mutation.set_cell + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(Mutation_SetCell) + err := b.DecodeMessage(msg) + m.Mutation = &Mutation_SetCell_{msg} + return true, err + case 2: // mutation.delete_from_column + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(Mutation_DeleteFromColumn) + err := b.DecodeMessage(msg) + m.Mutation = &Mutation_DeleteFromColumn_{msg} + return true, err + case 3: // mutation.delete_from_family + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(Mutation_DeleteFromFamily) + err := b.DecodeMessage(msg) + m.Mutation = &Mutation_DeleteFromFamily_{msg} + return true, err + case 4: // mutation.delete_from_row + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(Mutation_DeleteFromRow) + err := b.DecodeMessage(msg) + m.Mutation = &Mutation_DeleteFromRow_{msg} + return true, err + default: + return false, nil + } +} + +func _Mutation_OneofSizer(msg proto.Message) (n int) { + m := msg.(*Mutation) + // mutation + switch x := m.Mutation.(type) { + case *Mutation_SetCell_: + s := proto.Size(x.SetCell) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *Mutation_DeleteFromColumn_: + s := proto.Size(x.DeleteFromColumn) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *Mutation_DeleteFromFamily_: + s := proto.Size(x.DeleteFromFamily) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *Mutation_DeleteFromRow_: + s := proto.Size(x.DeleteFromRow) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +// A Mutation which sets the value of the specified cell. +type Mutation_SetCell struct { + // The name of the family into which new data should be written. + // Must match [-_.a-zA-Z0-9]+ + FamilyName string `protobuf:"bytes,1,opt,name=family_name,json=familyName,proto3" json:"family_name,omitempty"` + // The qualifier of the column into which new data should be written. + // Can be any byte string, including the empty string. + ColumnQualifier []byte `protobuf:"bytes,2,opt,name=column_qualifier,json=columnQualifier,proto3" json:"column_qualifier,omitempty"` + // The timestamp of the cell into which new data should be written. + // Use -1 for current Bigtable server time. + // Otherwise, the client should set this value itself, noting that the + // default value is a timestamp of zero if the field is left unspecified. + // Values must match the "granularity" of the table (e.g. micros, millis). + TimestampMicros int64 `protobuf:"varint,3,opt,name=timestamp_micros,json=timestampMicros,proto3" json:"timestamp_micros,omitempty"` + // The value to be written into the specified cell. + Value []byte `protobuf:"bytes,4,opt,name=value,proto3" json:"value,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Mutation_SetCell) Reset() { *m = Mutation_SetCell{} } +func (m *Mutation_SetCell) String() string { return proto.CompactTextString(m) } +func (*Mutation_SetCell) ProtoMessage() {} +func (*Mutation_SetCell) Descriptor() ([]byte, []int) { + return fileDescriptor_bigtable_data_ec1ed9ad6f99305a, []int{10, 0} +} +func (m *Mutation_SetCell) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Mutation_SetCell.Unmarshal(m, b) +} +func (m *Mutation_SetCell) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Mutation_SetCell.Marshal(b, m, deterministic) +} +func (dst *Mutation_SetCell) XXX_Merge(src proto.Message) { + xxx_messageInfo_Mutation_SetCell.Merge(dst, src) +} +func (m *Mutation_SetCell) XXX_Size() int { + return xxx_messageInfo_Mutation_SetCell.Size(m) +} +func (m *Mutation_SetCell) XXX_DiscardUnknown() { + xxx_messageInfo_Mutation_SetCell.DiscardUnknown(m) +} + +var xxx_messageInfo_Mutation_SetCell proto.InternalMessageInfo + +func (m *Mutation_SetCell) GetFamilyName() string { + if m != nil { + return m.FamilyName + } + return "" +} + +func (m *Mutation_SetCell) GetColumnQualifier() []byte { + if m != nil { + return m.ColumnQualifier + } + return nil +} + +func (m *Mutation_SetCell) GetTimestampMicros() int64 { + if m != nil { + return m.TimestampMicros + } + return 0 +} + +func (m *Mutation_SetCell) GetValue() []byte { + if m != nil { + return m.Value + } + return nil +} + +// A Mutation which deletes cells from the specified column, optionally +// restricting the deletions to a given timestamp range. +type Mutation_DeleteFromColumn struct { + // The name of the family from which cells should be deleted. + // Must match [-_.a-zA-Z0-9]+ + FamilyName string `protobuf:"bytes,1,opt,name=family_name,json=familyName,proto3" json:"family_name,omitempty"` + // The qualifier of the column from which cells should be deleted. + // Can be any byte string, including the empty string. + ColumnQualifier []byte `protobuf:"bytes,2,opt,name=column_qualifier,json=columnQualifier,proto3" json:"column_qualifier,omitempty"` + // The range of timestamps within which cells should be deleted. + TimeRange *TimestampRange `protobuf:"bytes,3,opt,name=time_range,json=timeRange,proto3" json:"time_range,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Mutation_DeleteFromColumn) Reset() { *m = Mutation_DeleteFromColumn{} } +func (m *Mutation_DeleteFromColumn) String() string { return proto.CompactTextString(m) } +func (*Mutation_DeleteFromColumn) ProtoMessage() {} +func (*Mutation_DeleteFromColumn) Descriptor() ([]byte, []int) { + return fileDescriptor_bigtable_data_ec1ed9ad6f99305a, []int{10, 1} +} +func (m *Mutation_DeleteFromColumn) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Mutation_DeleteFromColumn.Unmarshal(m, b) +} +func (m *Mutation_DeleteFromColumn) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Mutation_DeleteFromColumn.Marshal(b, m, deterministic) +} +func (dst *Mutation_DeleteFromColumn) XXX_Merge(src proto.Message) { + xxx_messageInfo_Mutation_DeleteFromColumn.Merge(dst, src) +} +func (m *Mutation_DeleteFromColumn) XXX_Size() int { + return xxx_messageInfo_Mutation_DeleteFromColumn.Size(m) +} +func (m *Mutation_DeleteFromColumn) XXX_DiscardUnknown() { + xxx_messageInfo_Mutation_DeleteFromColumn.DiscardUnknown(m) +} + +var xxx_messageInfo_Mutation_DeleteFromColumn proto.InternalMessageInfo + +func (m *Mutation_DeleteFromColumn) GetFamilyName() string { + if m != nil { + return m.FamilyName + } + return "" +} + +func (m *Mutation_DeleteFromColumn) GetColumnQualifier() []byte { + if m != nil { + return m.ColumnQualifier + } + return nil +} + +func (m *Mutation_DeleteFromColumn) GetTimeRange() *TimestampRange { + if m != nil { + return m.TimeRange + } + return nil +} + +// A Mutation which deletes all cells from the specified column family. +type Mutation_DeleteFromFamily struct { + // The name of the family from which cells should be deleted. + // Must match [-_.a-zA-Z0-9]+ + FamilyName string `protobuf:"bytes,1,opt,name=family_name,json=familyName,proto3" json:"family_name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Mutation_DeleteFromFamily) Reset() { *m = Mutation_DeleteFromFamily{} } +func (m *Mutation_DeleteFromFamily) String() string { return proto.CompactTextString(m) } +func (*Mutation_DeleteFromFamily) ProtoMessage() {} +func (*Mutation_DeleteFromFamily) Descriptor() ([]byte, []int) { + return fileDescriptor_bigtable_data_ec1ed9ad6f99305a, []int{10, 2} +} +func (m *Mutation_DeleteFromFamily) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Mutation_DeleteFromFamily.Unmarshal(m, b) +} +func (m *Mutation_DeleteFromFamily) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Mutation_DeleteFromFamily.Marshal(b, m, deterministic) +} +func (dst *Mutation_DeleteFromFamily) XXX_Merge(src proto.Message) { + xxx_messageInfo_Mutation_DeleteFromFamily.Merge(dst, src) +} +func (m *Mutation_DeleteFromFamily) XXX_Size() int { + return xxx_messageInfo_Mutation_DeleteFromFamily.Size(m) +} +func (m *Mutation_DeleteFromFamily) XXX_DiscardUnknown() { + xxx_messageInfo_Mutation_DeleteFromFamily.DiscardUnknown(m) +} + +var xxx_messageInfo_Mutation_DeleteFromFamily proto.InternalMessageInfo + +func (m *Mutation_DeleteFromFamily) GetFamilyName() string { + if m != nil { + return m.FamilyName + } + return "" +} + +// A Mutation which deletes all cells from the containing row. +type Mutation_DeleteFromRow struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Mutation_DeleteFromRow) Reset() { *m = Mutation_DeleteFromRow{} } +func (m *Mutation_DeleteFromRow) String() string { return proto.CompactTextString(m) } +func (*Mutation_DeleteFromRow) ProtoMessage() {} +func (*Mutation_DeleteFromRow) Descriptor() ([]byte, []int) { + return fileDescriptor_bigtable_data_ec1ed9ad6f99305a, []int{10, 3} +} +func (m *Mutation_DeleteFromRow) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Mutation_DeleteFromRow.Unmarshal(m, b) +} +func (m *Mutation_DeleteFromRow) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Mutation_DeleteFromRow.Marshal(b, m, deterministic) +} +func (dst *Mutation_DeleteFromRow) XXX_Merge(src proto.Message) { + xxx_messageInfo_Mutation_DeleteFromRow.Merge(dst, src) +} +func (m *Mutation_DeleteFromRow) XXX_Size() int { + return xxx_messageInfo_Mutation_DeleteFromRow.Size(m) +} +func (m *Mutation_DeleteFromRow) XXX_DiscardUnknown() { + xxx_messageInfo_Mutation_DeleteFromRow.DiscardUnknown(m) +} + +var xxx_messageInfo_Mutation_DeleteFromRow proto.InternalMessageInfo + +// Specifies an atomic read/modify/write operation on the latest value of the +// specified column. +type ReadModifyWriteRule struct { + // The name of the family to which the read/modify/write should be applied. + // Must match [-_.a-zA-Z0-9]+ + FamilyName string `protobuf:"bytes,1,opt,name=family_name,json=familyName,proto3" json:"family_name,omitempty"` + // The qualifier of the column to which the read/modify/write should be + // applied. + // Can be any byte string, including the empty string. + ColumnQualifier []byte `protobuf:"bytes,2,opt,name=column_qualifier,json=columnQualifier,proto3" json:"column_qualifier,omitempty"` + // The rule used to determine the column's new latest value from its current + // latest value. + // + // Types that are valid to be assigned to Rule: + // *ReadModifyWriteRule_AppendValue + // *ReadModifyWriteRule_IncrementAmount + Rule isReadModifyWriteRule_Rule `protobuf_oneof:"rule"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ReadModifyWriteRule) Reset() { *m = ReadModifyWriteRule{} } +func (m *ReadModifyWriteRule) String() string { return proto.CompactTextString(m) } +func (*ReadModifyWriteRule) ProtoMessage() {} +func (*ReadModifyWriteRule) Descriptor() ([]byte, []int) { + return fileDescriptor_bigtable_data_ec1ed9ad6f99305a, []int{11} +} +func (m *ReadModifyWriteRule) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ReadModifyWriteRule.Unmarshal(m, b) +} +func (m *ReadModifyWriteRule) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ReadModifyWriteRule.Marshal(b, m, deterministic) +} +func (dst *ReadModifyWriteRule) XXX_Merge(src proto.Message) { + xxx_messageInfo_ReadModifyWriteRule.Merge(dst, src) +} +func (m *ReadModifyWriteRule) XXX_Size() int { + return xxx_messageInfo_ReadModifyWriteRule.Size(m) +} +func (m *ReadModifyWriteRule) XXX_DiscardUnknown() { + xxx_messageInfo_ReadModifyWriteRule.DiscardUnknown(m) +} + +var xxx_messageInfo_ReadModifyWriteRule proto.InternalMessageInfo + +func (m *ReadModifyWriteRule) GetFamilyName() string { + if m != nil { + return m.FamilyName + } + return "" +} + +func (m *ReadModifyWriteRule) GetColumnQualifier() []byte { + if m != nil { + return m.ColumnQualifier + } + return nil +} + +type isReadModifyWriteRule_Rule interface { + isReadModifyWriteRule_Rule() +} + +type ReadModifyWriteRule_AppendValue struct { + AppendValue []byte `protobuf:"bytes,3,opt,name=append_value,json=appendValue,proto3,oneof"` +} + +type ReadModifyWriteRule_IncrementAmount struct { + IncrementAmount int64 `protobuf:"varint,4,opt,name=increment_amount,json=incrementAmount,proto3,oneof"` +} + +func (*ReadModifyWriteRule_AppendValue) isReadModifyWriteRule_Rule() {} + +func (*ReadModifyWriteRule_IncrementAmount) isReadModifyWriteRule_Rule() {} + +func (m *ReadModifyWriteRule) GetRule() isReadModifyWriteRule_Rule { + if m != nil { + return m.Rule + } + return nil +} + +func (m *ReadModifyWriteRule) GetAppendValue() []byte { + if x, ok := m.GetRule().(*ReadModifyWriteRule_AppendValue); ok { + return x.AppendValue + } + return nil +} + +func (m *ReadModifyWriteRule) GetIncrementAmount() int64 { + if x, ok := m.GetRule().(*ReadModifyWriteRule_IncrementAmount); ok { + return x.IncrementAmount + } + return 0 +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*ReadModifyWriteRule) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _ReadModifyWriteRule_OneofMarshaler, _ReadModifyWriteRule_OneofUnmarshaler, _ReadModifyWriteRule_OneofSizer, []interface{}{ + (*ReadModifyWriteRule_AppendValue)(nil), + (*ReadModifyWriteRule_IncrementAmount)(nil), + } +} + +func _ReadModifyWriteRule_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*ReadModifyWriteRule) + // rule + switch x := m.Rule.(type) { + case *ReadModifyWriteRule_AppendValue: + b.EncodeVarint(3<<3 | proto.WireBytes) + b.EncodeRawBytes(x.AppendValue) + case *ReadModifyWriteRule_IncrementAmount: + b.EncodeVarint(4<<3 | proto.WireVarint) + b.EncodeVarint(uint64(x.IncrementAmount)) + case nil: + default: + return fmt.Errorf("ReadModifyWriteRule.Rule has unexpected type %T", x) + } + return nil +} + +func _ReadModifyWriteRule_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*ReadModifyWriteRule) + switch tag { + case 3: // rule.append_value + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeRawBytes(true) + m.Rule = &ReadModifyWriteRule_AppendValue{x} + return true, err + case 4: // rule.increment_amount + if wire != proto.WireVarint { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeVarint() + m.Rule = &ReadModifyWriteRule_IncrementAmount{int64(x)} + return true, err + default: + return false, nil + } +} + +func _ReadModifyWriteRule_OneofSizer(msg proto.Message) (n int) { + m := msg.(*ReadModifyWriteRule) + // rule + switch x := m.Rule.(type) { + case *ReadModifyWriteRule_AppendValue: + n += 1 // tag and wire + n += proto.SizeVarint(uint64(len(x.AppendValue))) + n += len(x.AppendValue) + case *ReadModifyWriteRule_IncrementAmount: + n += 1 // tag and wire + n += proto.SizeVarint(uint64(x.IncrementAmount)) + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +func init() { + proto.RegisterType((*Row)(nil), "google.bigtable.v1.Row") + proto.RegisterType((*Family)(nil), "google.bigtable.v1.Family") + proto.RegisterType((*Column)(nil), "google.bigtable.v1.Column") + proto.RegisterType((*Cell)(nil), "google.bigtable.v1.Cell") + proto.RegisterType((*RowRange)(nil), "google.bigtable.v1.RowRange") + proto.RegisterType((*RowSet)(nil), "google.bigtable.v1.RowSet") + proto.RegisterType((*ColumnRange)(nil), "google.bigtable.v1.ColumnRange") + proto.RegisterType((*TimestampRange)(nil), "google.bigtable.v1.TimestampRange") + proto.RegisterType((*ValueRange)(nil), "google.bigtable.v1.ValueRange") + proto.RegisterType((*RowFilter)(nil), "google.bigtable.v1.RowFilter") + proto.RegisterType((*RowFilter_Chain)(nil), "google.bigtable.v1.RowFilter.Chain") + proto.RegisterType((*RowFilter_Interleave)(nil), "google.bigtable.v1.RowFilter.Interleave") + proto.RegisterType((*RowFilter_Condition)(nil), "google.bigtable.v1.RowFilter.Condition") + proto.RegisterType((*Mutation)(nil), "google.bigtable.v1.Mutation") + proto.RegisterType((*Mutation_SetCell)(nil), "google.bigtable.v1.Mutation.SetCell") + proto.RegisterType((*Mutation_DeleteFromColumn)(nil), "google.bigtable.v1.Mutation.DeleteFromColumn") + proto.RegisterType((*Mutation_DeleteFromFamily)(nil), "google.bigtable.v1.Mutation.DeleteFromFamily") + proto.RegisterType((*Mutation_DeleteFromRow)(nil), "google.bigtable.v1.Mutation.DeleteFromRow") + proto.RegisterType((*ReadModifyWriteRule)(nil), "google.bigtable.v1.ReadModifyWriteRule") +} + +func init() { + proto.RegisterFile("google/bigtable/v1/bigtable_data.proto", fileDescriptor_bigtable_data_ec1ed9ad6f99305a) +} + +var fileDescriptor_bigtable_data_ec1ed9ad6f99305a = []byte{ + // 1378 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xac, 0x57, 0xdb, 0x6e, 0x1b, 0x37, + 0x13, 0xd6, 0x5a, 0xb2, 0x0e, 0xb3, 0x76, 0x24, 0xd3, 0x27, 0x45, 0x49, 0xfe, 0x18, 0xca, 0x8f, + 0x56, 0x49, 0x5b, 0x39, 0x71, 0x82, 0x36, 0x6d, 0x82, 0x22, 0x56, 0x0e, 0x55, 0x9a, 0x33, 0x63, + 0xa4, 0x40, 0x80, 0x62, 0x4b, 0x6b, 0x29, 0x75, 0x11, 0xee, 0x52, 0xe5, 0xae, 0xac, 0xe8, 0x45, + 0x7a, 0xdf, 0xe7, 0xe8, 0x5d, 0x5f, 0xa2, 0xaf, 0xd1, 0xcb, 0x5e, 0xf4, 0xa2, 0xe0, 0x61, 0x4f, + 0xb2, 0x62, 0x1b, 0x45, 0xee, 0x96, 0x9c, 0xef, 0xfb, 0x66, 0x38, 0x1c, 0x0e, 0xb9, 0xf0, 0xc9, + 0x88, 0xf3, 0x11, 0xa3, 0xbb, 0x87, 0xde, 0x28, 0x22, 0x87, 0x8c, 0xee, 0x1e, 0xdd, 0x48, 0xbe, + 0x1d, 0x97, 0x44, 0xa4, 0x3b, 0x16, 0x3c, 0xe2, 0x08, 0x69, 0x5c, 0x37, 0xb6, 0x75, 0x8f, 0x6e, + 0xb4, 0x5f, 0x40, 0x11, 0xf3, 0x29, 0x6a, 0x40, 0xf1, 0x1d, 0x9d, 0x35, 0xad, 0x1d, 0xab, 0xb3, + 0x82, 0xe5, 0x27, 0xfa, 0x12, 0xaa, 0x43, 0xe2, 0x7b, 0xcc, 0xa3, 0x61, 0x73, 0x69, 0xa7, 0xd8, + 0xb1, 0xf7, 0x5a, 0xdd, 0xe3, 0xfc, 0xee, 0x23, 0x89, 0x99, 0xe1, 0x04, 0xdb, 0xc6, 0x50, 0xd6, + 0x73, 0x08, 0x41, 0x29, 0x20, 0x3e, 0x55, 0xa2, 0x35, 0xac, 0xbe, 0xd1, 0x2d, 0xa8, 0x0c, 0x38, + 0x9b, 0xf8, 0xc1, 0x89, 0xa2, 0xf7, 0x15, 0x04, 0xc7, 0xd0, 0xf6, 0x1b, 0x28, 0xeb, 0x29, 0x74, + 0x11, 0x6a, 0xbf, 0x4c, 0x08, 0xf3, 0x86, 0x1e, 0x15, 0x26, 0xda, 0x74, 0x02, 0x75, 0x61, 0x79, + 0x40, 0x19, 0x8b, 0xb5, 0x9b, 0x0b, 0xb5, 0x29, 0x63, 0x58, 0xc3, 0xda, 0x0e, 0x94, 0xe4, 0x10, + 0x5d, 0x85, 0x46, 0xe4, 0xf9, 0x34, 0x8c, 0x88, 0x3f, 0x76, 0x7c, 0x6f, 0x20, 0x78, 0xa8, 0xc4, + 0x8b, 0xb8, 0x9e, 0xcc, 0x3f, 0x53, 0xd3, 0x68, 0x03, 0x96, 0x8f, 0x08, 0x9b, 0xd0, 0xe6, 0x92, + 0x72, 0xae, 0x07, 0x68, 0x0b, 0xca, 0x8c, 0x1c, 0x52, 0x16, 0x36, 0x8b, 0x3b, 0xc5, 0x4e, 0x0d, + 0x9b, 0x51, 0xfb, 0x1e, 0x54, 0x31, 0x9f, 0x62, 0x12, 0x8c, 0x28, 0xba, 0x00, 0xb5, 0x30, 0x22, + 0x22, 0x72, 0x64, 0xa2, 0x35, 0xbb, 0xaa, 0x26, 0x9e, 0xd0, 0x19, 0xda, 0x86, 0x0a, 0x0d, 0x5c, + 0x65, 0x2a, 0x2a, 0x53, 0x99, 0x06, 0xee, 0x13, 0x3a, 0x6b, 0xff, 0x04, 0x65, 0xcc, 0xa7, 0xaf, + 0x69, 0x84, 0xce, 0x43, 0x55, 0xf0, 0xa9, 0x84, 0xc8, 0xe0, 0x8a, 0x9d, 0x15, 0x5c, 0x11, 0x7c, + 0xfa, 0x84, 0xce, 0x42, 0x74, 0x07, 0x40, 0x9a, 0x84, 0xf4, 0x13, 0x2f, 0xfe, 0xe2, 0xa2, 0xc5, + 0xc7, 0xc1, 0xe0, 0x9a, 0x30, 0x5f, 0x61, 0xfb, 0x8f, 0x25, 0xb0, 0x4d, 0xc2, 0x55, 0x9c, 0x97, + 0xc1, 0x56, 0x9b, 0x39, 0x73, 0x32, 0xbb, 0x07, 0x7a, 0xea, 0xb9, 0xdc, 0xc3, 0xbb, 0x70, 0x5e, + 0x2f, 0x24, 0x49, 0xbc, 0xe3, 0x05, 0x03, 0x36, 0x09, 0xbd, 0x23, 0x93, 0x96, 0x7e, 0x01, 0x6f, + 0x2b, 0xc8, 0xab, 0x18, 0xf1, 0x38, 0x06, 0x2c, 0x62, 0xd3, 0xf7, 0x31, 0xbb, 0xb8, 0x98, 0xfd, + 0x30, 0x06, 0xa0, 0xdb, 0xb0, 0x2d, 0xf3, 0xb4, 0xc8, 0x73, 0x49, 0x71, 0x2d, 0xbc, 0x49, 0x03, + 0x77, 0x81, 0xdf, 0x63, 0xcc, 0xd4, 0xeb, 0xf2, 0x22, 0x66, 0xe2, 0xb3, 0xb7, 0x06, 0xf5, 0xb9, + 0x88, 0x7b, 0x75, 0x58, 0xcd, 0x89, 0xb5, 0xdf, 0xc3, 0xb9, 0x83, 0xb8, 0x52, 0x74, 0x1a, 0x6f, + 0xc1, 0x96, 0x66, 0x7d, 0xa0, 0xb2, 0x36, 0x94, 0xf5, 0x60, 0xae, 0xbc, 0xae, 0xc3, 0x86, 0x14, + 0x3e, 0xc6, 0x59, 0x52, 0x1c, 0x44, 0x03, 0x77, 0x8e, 0xd1, 0xfe, 0xdb, 0x02, 0x78, 0x23, 0x8b, + 0x30, 0x76, 0xbb, 0xa9, 0xdd, 0xaa, 0xc2, 0xcc, 0xa4, 0xc7, 0x32, 0xa9, 0x5d, 0x57, 0x66, 0xc5, + 0x48, 0x93, 0x33, 0xc7, 0x4a, 0x53, 0xb3, 0x74, 0x9c, 0x95, 0x6e, 0xc6, 0x75, 0x58, 0x97, 0xc1, + 0xce, 0x7b, 0x2a, 0x9a, 0x74, 0xae, 0xd1, 0xc0, 0x9d, 0xf3, 0x93, 0x63, 0xa4, 0x5e, 0x4a, 0xf3, + 0x8c, 0x34, 0xf9, 0xab, 0x60, 0x67, 0x22, 0xeb, 0xd9, 0x50, 0x4b, 0x04, 0xda, 0xff, 0xd8, 0x50, + 0xc3, 0x7c, 0xfa, 0xc8, 0x63, 0x11, 0x15, 0xe8, 0x0e, 0x2c, 0x0f, 0x7e, 0x26, 0x5e, 0xa0, 0x56, + 0x6a, 0xef, 0x5d, 0xf9, 0x40, 0xfd, 0x6b, 0x74, 0xf7, 0xbe, 0x84, 0xf6, 0x0b, 0x58, 0x73, 0xd0, + 0xf7, 0x00, 0x5e, 0x10, 0x51, 0xc1, 0x28, 0x31, 0xab, 0xb6, 0xf7, 0x3a, 0x27, 0x2b, 0x3c, 0x4e, + 0xf0, 0xfd, 0x02, 0xce, 0xb0, 0xd1, 0x77, 0x50, 0x1b, 0xf0, 0xc0, 0xf5, 0x22, 0x8f, 0x07, 0x2a, + 0x19, 0xf6, 0xde, 0xa7, 0xa7, 0x04, 0x13, 0xc3, 0xfb, 0x05, 0x9c, 0x72, 0xd1, 0x06, 0x94, 0x42, + 0x2f, 0x78, 0xd7, 0x6c, 0xec, 0x58, 0x9d, 0x6a, 0xbf, 0x80, 0xd5, 0x08, 0x75, 0xa0, 0x3e, 0x26, + 0x61, 0xe8, 0x10, 0xc6, 0x9c, 0xa1, 0xe2, 0x37, 0xd7, 0x0c, 0x60, 0x55, 0x1a, 0xf6, 0x19, 0x33, + 0x19, 0xb9, 0x06, 0x8d, 0x43, 0xc6, 0x07, 0xef, 0xb2, 0x50, 0x64, 0xa0, 0xe7, 0x94, 0x25, 0xc5, + 0xde, 0x80, 0x0d, 0xd3, 0x5d, 0x1c, 0x41, 0x47, 0xf4, 0x7d, 0x8c, 0x2f, 0x99, 0x02, 0x58, 0xd3, + 0xbd, 0x06, 0x4b, 0x9b, 0xa1, 0x7c, 0x0e, 0x72, 0xd2, 0x09, 0x89, 0x3f, 0x66, 0x34, 0xc6, 0x9f, + 0xdb, 0xb1, 0x3a, 0x56, 0xbf, 0x80, 0xeb, 0x82, 0x4f, 0x5f, 0x2b, 0x8b, 0x41, 0x7f, 0x0d, 0xcd, + 0x4c, 0x5b, 0xc9, 0x3b, 0x91, 0x07, 0xb0, 0xd6, 0x2f, 0xe0, 0xcd, 0xb4, 0xcb, 0x64, 0x1d, 0xdd, + 0x87, 0x4b, 0xfa, 0x26, 0xc8, 0x9c, 0xde, 0x1c, 0xbf, 0x6c, 0x82, 0x6c, 0x69, 0x58, 0x72, 0x86, + 0xb3, 0x22, 0xaf, 0x60, 0xdd, 0x88, 0xa8, 0x36, 0x19, 0x53, 0x2b, 0x6a, 0x7f, 0x2e, 0x9f, 0x70, + 0x0b, 0x49, 0xb4, 0x4c, 0xc0, 0x20, 0x1d, 0x1a, 0xc9, 0xb7, 0xb0, 0x95, 0x1e, 0xd4, 0x9c, 0x6a, + 0x55, 0xa9, 0xb6, 0x17, 0xa9, 0xe6, 0xdb, 0x44, 0xbf, 0x80, 0x37, 0xa2, 0xdc, 0x8c, 0xd1, 0xee, + 0x02, 0xd2, 0xa7, 0x24, 0xb7, 0xd0, 0x9a, 0x59, 0x68, 0x43, 0xd9, 0xb2, 0xcb, 0x7b, 0x9e, 0xe0, + 0xb3, 0x71, 0xd4, 0x55, 0x1c, 0xff, 0x5b, 0x14, 0x47, 0xda, 0x33, 0x52, 0xbd, 0x8c, 0xff, 0x6f, + 0xe1, 0x82, 0xba, 0x23, 0x9d, 0xb1, 0x4c, 0x36, 0x9f, 0x3a, 0x7c, 0x38, 0x0c, 0x69, 0x14, 0x0b, + 0xc3, 0x8e, 0xd5, 0x59, 0x96, 0x8d, 0x5a, 0x81, 0x5e, 0x52, 0x81, 0xf9, 0xf4, 0x85, 0x42, 0x18, + 0xfe, 0x5d, 0x68, 0xe5, 0xf9, 0xcc, 0xf3, 0xbd, 0x84, 0x6e, 0x1b, 0xfa, 0x56, 0x86, 0xfe, 0x54, + 0x02, 0x0c, 0xbb, 0x07, 0x97, 0x52, 0xb6, 0xd9, 0xb6, 0x9c, 0xc0, 0x8a, 0x11, 0x38, 0x1f, 0x0b, + 0xe8, 0xcd, 0xca, 0x6a, 0xdc, 0x86, 0xed, 0x30, 0x12, 0xde, 0xd8, 0x74, 0x9b, 0x48, 0x90, 0x20, + 0x1c, 0x72, 0xe1, 0x53, 0xd1, 0x5c, 0x35, 0x87, 0x60, 0x53, 0x01, 0x54, 0x26, 0x0e, 0x52, 0xb3, + 0x64, 0x92, 0xf1, 0x98, 0xcd, 0x1c, 0x75, 0x8b, 0xe7, 0x98, 0xeb, 0x71, 0xa5, 0x2a, 0xc0, 0x53, + 0x69, 0xcf, 0x30, 0x5b, 0xf7, 0x60, 0x59, 0x35, 0x16, 0xf4, 0x15, 0x54, 0x74, 0xa4, 0xfa, 0xae, + 0xb6, 0xf7, 0x2e, 0x9d, 0xd8, 0x01, 0x70, 0x8c, 0x6e, 0x3d, 0x04, 0x48, 0x1b, 0xcb, 0x7f, 0x97, + 0xf9, 0xd3, 0x82, 0x5a, 0xd2, 0x55, 0x50, 0x1f, 0x1a, 0x63, 0x41, 0x5d, 0x6f, 0x40, 0xa2, 0xa4, + 0x34, 0x74, 0x97, 0x3c, 0x45, 0xaf, 0x9e, 0xd0, 0x92, 0xb2, 0xb0, 0x23, 0x31, 0x49, 0x44, 0x96, + 0xce, 0x22, 0x02, 0x92, 0x61, 0xf8, 0xf7, 0x60, 0x65, 0x48, 0x58, 0x98, 0x08, 0x14, 0xcf, 0x22, + 0x60, 0x2b, 0x8a, 0x1e, 0xf4, 0xaa, 0x50, 0xd6, 0xdc, 0xf6, 0x5f, 0xcb, 0x50, 0x7d, 0x36, 0x89, + 0x88, 0x5a, 0xe2, 0x3e, 0x54, 0x65, 0x79, 0xca, 0x72, 0x30, 0x4b, 0xfb, 0xff, 0x22, 0xd1, 0x18, + 0xdf, 0x7d, 0x4d, 0x23, 0xf9, 0xf4, 0xeb, 0x17, 0x70, 0x25, 0xd4, 0x9f, 0xe8, 0x47, 0x40, 0x2e, + 0x65, 0x54, 0xa6, 0x48, 0x70, 0xdf, 0x94, 0x9d, 0x59, 0xe2, 0x17, 0x27, 0x8a, 0x3d, 0x50, 0xb4, + 0x47, 0x82, 0xfb, 0xba, 0x0c, 0xe5, 0x89, 0x72, 0xe7, 0xe6, 0xe6, 0xe5, 0x75, 0xab, 0x33, 0x09, + 0x38, 0xab, 0xbc, 0x7e, 0x59, 0xe7, 0xe5, 0xcd, 0x6b, 0xfb, 0x00, 0xea, 0x59, 0x79, 0xc1, 0xa7, + 0xaa, 0x77, 0xdb, 0x7b, 0xd7, 0xce, 0xa8, 0x8d, 0xf9, 0x54, 0x5e, 0x21, 0x6e, 0x76, 0xa2, 0xf5, + 0xab, 0x05, 0x15, 0x93, 0xaa, 0xd3, 0x1f, 0x86, 0x57, 0xa1, 0x31, 0xdf, 0xa7, 0xcd, 0x43, 0xb7, + 0x3e, 0xd7, 0x98, 0x17, 0xbe, 0xb8, 0x8b, 0xa7, 0xbc, 0xb8, 0x4b, 0x99, 0x17, 0x77, 0xeb, 0x37, + 0x0b, 0x1a, 0xf3, 0x69, 0xff, 0xa8, 0x11, 0xee, 0x03, 0xc8, 0x48, 0x74, 0x3f, 0x35, 0xdb, 0x74, + 0x86, 0x86, 0x8e, 0x6b, 0x92, 0xa5, 0x3e, 0x5b, 0x37, 0xb3, 0x21, 0x9a, 0x6d, 0x3a, 0x2d, 0xc4, + 0x56, 0x1d, 0x56, 0x73, 0x7b, 0xd2, 0x03, 0xa8, 0xfa, 0x66, 0xb7, 0xda, 0xbf, 0x5b, 0xb0, 0x8e, + 0x29, 0x71, 0x9f, 0x71, 0xd7, 0x1b, 0xce, 0x7e, 0x10, 0x5e, 0x44, 0xf1, 0x84, 0xd1, 0x8f, 0xba, + 0xf0, 0x2b, 0xb0, 0x42, 0xc6, 0xe3, 0xe4, 0x95, 0x95, 0xbc, 0xc9, 0x6d, 0x3d, 0xab, 0xba, 0x25, + 0xfa, 0x0c, 0x1a, 0x5e, 0x30, 0x10, 0xd4, 0xa7, 0x41, 0xe4, 0x10, 0x9f, 0x4f, 0x82, 0x48, 0xed, + 0x4f, 0x51, 0x5e, 0xfd, 0x89, 0x65, 0x5f, 0x19, 0x7a, 0x65, 0x28, 0x89, 0x09, 0xa3, 0x3d, 0x0f, + 0xb6, 0x06, 0xdc, 0x5f, 0x90, 0xc3, 0xde, 0x5a, 0xcf, 0x0c, 0x1e, 0x90, 0x88, 0xbc, 0x94, 0x3f, + 0xab, 0x2f, 0xad, 0xb7, 0xdf, 0x18, 0xe0, 0x88, 0x33, 0x12, 0x8c, 0xba, 0x5c, 0x8c, 0x76, 0x47, + 0x34, 0x50, 0xbf, 0xb2, 0xbb, 0xda, 0x44, 0xc6, 0x5e, 0x98, 0xfd, 0xeb, 0xbd, 0x13, 0x7f, 0x1f, + 0x96, 0x15, 0xec, 0xe6, 0xbf, 0x01, 0x00, 0x00, 0xff, 0xff, 0x3e, 0x54, 0x66, 0x7c, 0x1b, 0x0f, + 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/bigtable/v1/bigtable_service.pb.go b/vendor/google.golang.org/genproto/googleapis/bigtable/v1/bigtable_service.pb.go new file mode 100644 index 0000000..e21abb3 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/bigtable/v1/bigtable_service.pb.go @@ -0,0 +1,395 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/bigtable/v1/bigtable_service.proto + +package bigtable // import "google.golang.org/genproto/googleapis/bigtable/v1" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import empty "github.com/golang/protobuf/ptypes/empty" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +import ( + context "golang.org/x/net/context" + grpc "google.golang.org/grpc" +) + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Reference imports to suppress errors if they are not otherwise used. +var _ context.Context +var _ grpc.ClientConn + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the grpc package it is being compiled against. +const _ = grpc.SupportPackageIsVersion4 + +// BigtableServiceClient is the client API for BigtableService service. +// +// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://godoc.org/google.golang.org/grpc#ClientConn.NewStream. +type BigtableServiceClient interface { + // Streams back the contents of all requested rows, optionally applying + // the same Reader filter to each. Depending on their size, rows may be + // broken up across multiple responses, but atomicity of each row will still + // be preserved. + ReadRows(ctx context.Context, in *ReadRowsRequest, opts ...grpc.CallOption) (BigtableService_ReadRowsClient, error) + // Returns a sample of row keys in the table. The returned row keys will + // delimit contiguous sections of the table of approximately equal size, + // which can be used to break up the data for distributed tasks like + // mapreduces. + SampleRowKeys(ctx context.Context, in *SampleRowKeysRequest, opts ...grpc.CallOption) (BigtableService_SampleRowKeysClient, error) + // Mutates a row atomically. Cells already present in the row are left + // unchanged unless explicitly changed by 'mutation'. + MutateRow(ctx context.Context, in *MutateRowRequest, opts ...grpc.CallOption) (*empty.Empty, error) + // Mutates multiple rows in a batch. Each individual row is mutated + // atomically as in MutateRow, but the entire batch is not executed + // atomically. + MutateRows(ctx context.Context, in *MutateRowsRequest, opts ...grpc.CallOption) (*MutateRowsResponse, error) + // Mutates a row atomically based on the output of a predicate Reader filter. + CheckAndMutateRow(ctx context.Context, in *CheckAndMutateRowRequest, opts ...grpc.CallOption) (*CheckAndMutateRowResponse, error) + // Modifies a row atomically, reading the latest existing timestamp/value from + // the specified columns and writing a new value at + // max(existing timestamp, current server time) based on pre-defined + // read/modify/write rules. Returns the new contents of all modified cells. + ReadModifyWriteRow(ctx context.Context, in *ReadModifyWriteRowRequest, opts ...grpc.CallOption) (*Row, error) +} + +type bigtableServiceClient struct { + cc *grpc.ClientConn +} + +func NewBigtableServiceClient(cc *grpc.ClientConn) BigtableServiceClient { + return &bigtableServiceClient{cc} +} + +func (c *bigtableServiceClient) ReadRows(ctx context.Context, in *ReadRowsRequest, opts ...grpc.CallOption) (BigtableService_ReadRowsClient, error) { + stream, err := c.cc.NewStream(ctx, &_BigtableService_serviceDesc.Streams[0], "/google.bigtable.v1.BigtableService/ReadRows", opts...) + if err != nil { + return nil, err + } + x := &bigtableServiceReadRowsClient{stream} + if err := x.ClientStream.SendMsg(in); err != nil { + return nil, err + } + if err := x.ClientStream.CloseSend(); err != nil { + return nil, err + } + return x, nil +} + +type BigtableService_ReadRowsClient interface { + Recv() (*ReadRowsResponse, error) + grpc.ClientStream +} + +type bigtableServiceReadRowsClient struct { + grpc.ClientStream +} + +func (x *bigtableServiceReadRowsClient) Recv() (*ReadRowsResponse, error) { + m := new(ReadRowsResponse) + if err := x.ClientStream.RecvMsg(m); err != nil { + return nil, err + } + return m, nil +} + +func (c *bigtableServiceClient) SampleRowKeys(ctx context.Context, in *SampleRowKeysRequest, opts ...grpc.CallOption) (BigtableService_SampleRowKeysClient, error) { + stream, err := c.cc.NewStream(ctx, &_BigtableService_serviceDesc.Streams[1], "/google.bigtable.v1.BigtableService/SampleRowKeys", opts...) + if err != nil { + return nil, err + } + x := &bigtableServiceSampleRowKeysClient{stream} + if err := x.ClientStream.SendMsg(in); err != nil { + return nil, err + } + if err := x.ClientStream.CloseSend(); err != nil { + return nil, err + } + return x, nil +} + +type BigtableService_SampleRowKeysClient interface { + Recv() (*SampleRowKeysResponse, error) + grpc.ClientStream +} + +type bigtableServiceSampleRowKeysClient struct { + grpc.ClientStream +} + +func (x *bigtableServiceSampleRowKeysClient) Recv() (*SampleRowKeysResponse, error) { + m := new(SampleRowKeysResponse) + if err := x.ClientStream.RecvMsg(m); err != nil { + return nil, err + } + return m, nil +} + +func (c *bigtableServiceClient) MutateRow(ctx context.Context, in *MutateRowRequest, opts ...grpc.CallOption) (*empty.Empty, error) { + out := new(empty.Empty) + err := c.cc.Invoke(ctx, "/google.bigtable.v1.BigtableService/MutateRow", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *bigtableServiceClient) MutateRows(ctx context.Context, in *MutateRowsRequest, opts ...grpc.CallOption) (*MutateRowsResponse, error) { + out := new(MutateRowsResponse) + err := c.cc.Invoke(ctx, "/google.bigtable.v1.BigtableService/MutateRows", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *bigtableServiceClient) CheckAndMutateRow(ctx context.Context, in *CheckAndMutateRowRequest, opts ...grpc.CallOption) (*CheckAndMutateRowResponse, error) { + out := new(CheckAndMutateRowResponse) + err := c.cc.Invoke(ctx, "/google.bigtable.v1.BigtableService/CheckAndMutateRow", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *bigtableServiceClient) ReadModifyWriteRow(ctx context.Context, in *ReadModifyWriteRowRequest, opts ...grpc.CallOption) (*Row, error) { + out := new(Row) + err := c.cc.Invoke(ctx, "/google.bigtable.v1.BigtableService/ReadModifyWriteRow", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +// BigtableServiceServer is the server API for BigtableService service. +type BigtableServiceServer interface { + // Streams back the contents of all requested rows, optionally applying + // the same Reader filter to each. Depending on their size, rows may be + // broken up across multiple responses, but atomicity of each row will still + // be preserved. + ReadRows(*ReadRowsRequest, BigtableService_ReadRowsServer) error + // Returns a sample of row keys in the table. The returned row keys will + // delimit contiguous sections of the table of approximately equal size, + // which can be used to break up the data for distributed tasks like + // mapreduces. + SampleRowKeys(*SampleRowKeysRequest, BigtableService_SampleRowKeysServer) error + // Mutates a row atomically. Cells already present in the row are left + // unchanged unless explicitly changed by 'mutation'. + MutateRow(context.Context, *MutateRowRequest) (*empty.Empty, error) + // Mutates multiple rows in a batch. Each individual row is mutated + // atomically as in MutateRow, but the entire batch is not executed + // atomically. + MutateRows(context.Context, *MutateRowsRequest) (*MutateRowsResponse, error) + // Mutates a row atomically based on the output of a predicate Reader filter. + CheckAndMutateRow(context.Context, *CheckAndMutateRowRequest) (*CheckAndMutateRowResponse, error) + // Modifies a row atomically, reading the latest existing timestamp/value from + // the specified columns and writing a new value at + // max(existing timestamp, current server time) based on pre-defined + // read/modify/write rules. Returns the new contents of all modified cells. + ReadModifyWriteRow(context.Context, *ReadModifyWriteRowRequest) (*Row, error) +} + +func RegisterBigtableServiceServer(s *grpc.Server, srv BigtableServiceServer) { + s.RegisterService(&_BigtableService_serviceDesc, srv) +} + +func _BigtableService_ReadRows_Handler(srv interface{}, stream grpc.ServerStream) error { + m := new(ReadRowsRequest) + if err := stream.RecvMsg(m); err != nil { + return err + } + return srv.(BigtableServiceServer).ReadRows(m, &bigtableServiceReadRowsServer{stream}) +} + +type BigtableService_ReadRowsServer interface { + Send(*ReadRowsResponse) error + grpc.ServerStream +} + +type bigtableServiceReadRowsServer struct { + grpc.ServerStream +} + +func (x *bigtableServiceReadRowsServer) Send(m *ReadRowsResponse) error { + return x.ServerStream.SendMsg(m) +} + +func _BigtableService_SampleRowKeys_Handler(srv interface{}, stream grpc.ServerStream) error { + m := new(SampleRowKeysRequest) + if err := stream.RecvMsg(m); err != nil { + return err + } + return srv.(BigtableServiceServer).SampleRowKeys(m, &bigtableServiceSampleRowKeysServer{stream}) +} + +type BigtableService_SampleRowKeysServer interface { + Send(*SampleRowKeysResponse) error + grpc.ServerStream +} + +type bigtableServiceSampleRowKeysServer struct { + grpc.ServerStream +} + +func (x *bigtableServiceSampleRowKeysServer) Send(m *SampleRowKeysResponse) error { + return x.ServerStream.SendMsg(m) +} + +func _BigtableService_MutateRow_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(MutateRowRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(BigtableServiceServer).MutateRow(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.bigtable.v1.BigtableService/MutateRow", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(BigtableServiceServer).MutateRow(ctx, req.(*MutateRowRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _BigtableService_MutateRows_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(MutateRowsRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(BigtableServiceServer).MutateRows(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.bigtable.v1.BigtableService/MutateRows", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(BigtableServiceServer).MutateRows(ctx, req.(*MutateRowsRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _BigtableService_CheckAndMutateRow_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(CheckAndMutateRowRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(BigtableServiceServer).CheckAndMutateRow(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.bigtable.v1.BigtableService/CheckAndMutateRow", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(BigtableServiceServer).CheckAndMutateRow(ctx, req.(*CheckAndMutateRowRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _BigtableService_ReadModifyWriteRow_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(ReadModifyWriteRowRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(BigtableServiceServer).ReadModifyWriteRow(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.bigtable.v1.BigtableService/ReadModifyWriteRow", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(BigtableServiceServer).ReadModifyWriteRow(ctx, req.(*ReadModifyWriteRowRequest)) + } + return interceptor(ctx, in, info, handler) +} + +var _BigtableService_serviceDesc = grpc.ServiceDesc{ + ServiceName: "google.bigtable.v1.BigtableService", + HandlerType: (*BigtableServiceServer)(nil), + Methods: []grpc.MethodDesc{ + { + MethodName: "MutateRow", + Handler: _BigtableService_MutateRow_Handler, + }, + { + MethodName: "MutateRows", + Handler: _BigtableService_MutateRows_Handler, + }, + { + MethodName: "CheckAndMutateRow", + Handler: _BigtableService_CheckAndMutateRow_Handler, + }, + { + MethodName: "ReadModifyWriteRow", + Handler: _BigtableService_ReadModifyWriteRow_Handler, + }, + }, + Streams: []grpc.StreamDesc{ + { + StreamName: "ReadRows", + Handler: _BigtableService_ReadRows_Handler, + ServerStreams: true, + }, + { + StreamName: "SampleRowKeys", + Handler: _BigtableService_SampleRowKeys_Handler, + ServerStreams: true, + }, + }, + Metadata: "google/bigtable/v1/bigtable_service.proto", +} + +func init() { + proto.RegisterFile("google/bigtable/v1/bigtable_service.proto", fileDescriptor_bigtable_service_77c4f27ba0e27769) +} + +var fileDescriptor_bigtable_service_77c4f27ba0e27769 = []byte{ + // 521 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xac, 0x94, 0xcd, 0x6e, 0xd4, 0x30, + 0x10, 0xc7, 0x65, 0x0e, 0xa8, 0x58, 0x42, 0x08, 0x4b, 0x14, 0x69, 0xe1, 0x14, 0xa0, 0xa2, 0x11, + 0x8d, 0xdb, 0x72, 0x0b, 0xe2, 0xd0, 0x45, 0x50, 0x21, 0x58, 0x51, 0xa5, 0xe2, 0x43, 0xe5, 0xb0, + 0x78, 0x93, 0x69, 0x08, 0x4d, 0xe2, 0x60, 0x7b, 0x37, 0x5a, 0xaa, 0x5e, 0x38, 0x71, 0xe7, 0x11, + 0x10, 0x17, 0x5e, 0x80, 0x23, 0xef, 0x00, 0x67, 0x6e, 0x3c, 0x08, 0xb2, 0x63, 0x2f, 0x2c, 0x0d, + 0xcb, 0x8a, 0xee, 0x29, 0x8e, 0xe6, 0x3f, 0x33, 0xbf, 0xff, 0xf8, 0x03, 0xaf, 0xa6, 0x9c, 0xa7, + 0x39, 0xd0, 0x41, 0x96, 0x2a, 0x36, 0xc8, 0x81, 0x8e, 0x36, 0x26, 0xeb, 0xbe, 0x04, 0x31, 0xca, + 0x62, 0x08, 0x2a, 0xc1, 0x15, 0x27, 0xa4, 0x91, 0x06, 0x2e, 0x1c, 0x8c, 0x36, 0x3a, 0x97, 0x6d, + 0x3a, 0xab, 0x32, 0xca, 0xca, 0x92, 0x2b, 0xa6, 0x32, 0x5e, 0xca, 0x26, 0xa3, 0xb3, 0x32, 0xab, + 0x78, 0xc2, 0x14, 0xb3, 0xba, 0xcd, 0x39, 0x20, 0xfa, 0x05, 0x48, 0xc9, 0x52, 0x70, 0xb5, 0x2f, + 0xd9, 0x1c, 0xf3, 0x37, 0x18, 0xee, 0x53, 0x28, 0x2a, 0x35, 0x6e, 0x82, 0x9b, 0xdf, 0x97, 0xf0, + 0xb9, 0xae, 0x2d, 0xb0, 0xdb, 0xe4, 0x93, 0x8f, 0x08, 0x2f, 0x45, 0xc0, 0x92, 0x88, 0xd7, 0x92, + 0x5c, 0x09, 0x8e, 0x9b, 0x09, 0x5c, 0x34, 0x82, 0xd7, 0x43, 0x90, 0xaa, 0x73, 0x75, 0xb6, 0x48, + 0x56, 0xbc, 0x94, 0xe0, 0x3d, 0x7c, 0xfb, 0xed, 0xc7, 0xfb, 0x53, 0xf7, 0xbc, 0x2d, 0x4d, 0x7d, + 0xd8, 0x30, 0x97, 0xac, 0x80, 0xdb, 0x95, 0xe0, 0xaf, 0x20, 0x56, 0x92, 0xfa, 0xf4, 0x0d, 0x2f, + 0x41, 0x7f, 0xe3, 0x7c, 0x28, 0x15, 0x08, 0xbd, 0x34, 0x42, 0x49, 0xfd, 0x23, 0x2a, 0x78, 0x2d, + 0x43, 0x01, 0x2c, 0x09, 0x91, 0xbf, 0x8e, 0xc8, 0x67, 0x84, 0xcf, 0xee, 0xb2, 0xa2, 0xca, 0x21, + 0xe2, 0xf5, 0x03, 0x18, 0x4b, 0x72, 0xbd, 0x8d, 0x63, 0x4a, 0xe2, 0x88, 0x57, 0xe7, 0x50, 0x5a, + 0xec, 0x47, 0x06, 0xfb, 0x3e, 0xd9, 0x3e, 0x11, 0xb6, 0x34, 0xb5, 0x75, 0xe1, 0x75, 0x44, 0x3e, + 0x20, 0x7c, 0xa6, 0x37, 0x54, 0x4c, 0xe9, 0x66, 0xa4, 0x75, 0x7a, 0x93, 0xb0, 0x23, 0x5e, 0x76, + 0x2a, 0xb7, 0x8f, 0xc1, 0x5d, 0xbd, 0x8f, 0xde, 0x33, 0x83, 0x17, 0x79, 0xbd, 0x93, 0xe0, 0xd1, + 0x43, 0xc1, 0xeb, 0xfe, 0x01, 0x8c, 0x8f, 0xc2, 0xc2, 0x34, 0x0e, 0x91, 0x4f, 0x3e, 0x21, 0x8c, + 0x27, 0x18, 0x92, 0x5c, 0x9b, 0x89, 0x39, 0x99, 0xec, 0xca, 0xbf, 0x64, 0x76, 0xac, 0x3d, 0xc3, + 0xbd, 0xed, 0x75, 0xff, 0x93, 0xdb, 0x82, 0xea, 0x9a, 0x1a, 0xf6, 0x2b, 0xc2, 0xe7, 0xef, 0xbc, + 0x84, 0xf8, 0x60, 0xab, 0x4c, 0x7e, 0x8d, 0xf6, 0x46, 0x1b, 0xcc, 0x31, 0x99, 0x43, 0x5f, 0x9b, + 0x53, 0x6d, 0x1d, 0xbc, 0x30, 0x0e, 0xf6, 0xbc, 0xc7, 0x0b, 0x9a, 0x7c, 0x3c, 0xd5, 0x49, 0x9b, + 0xfa, 0x82, 0x30, 0xd1, 0xd7, 0xa8, 0xc7, 0x93, 0x6c, 0x7f, 0xfc, 0x54, 0x64, 0x8d, 0xab, 0xb5, + 0xbf, 0x5d, 0xb7, 0x69, 0x9d, 0xb3, 0x75, 0xb1, 0x55, 0xce, 0x6b, 0x8f, 0x19, 0x03, 0xcf, 0xbd, + 0x27, 0x0b, 0x32, 0x20, 0xa6, 0x11, 0x42, 0xe4, 0x77, 0x2b, 0xbc, 0x1c, 0xf3, 0xa2, 0x05, 0xa0, + 0x7b, 0xe1, 0x8f, 0x67, 0x47, 0xee, 0xe8, 0x73, 0xbd, 0x83, 0xf6, 0x42, 0x2b, 0x4e, 0x79, 0xce, + 0xca, 0x34, 0xe0, 0x22, 0xa5, 0x29, 0x94, 0xe6, 0xd4, 0xd3, 0x26, 0xc4, 0xaa, 0x4c, 0xfe, 0xfe, + 0x04, 0xde, 0x72, 0xeb, 0x77, 0x08, 0x0d, 0x4e, 0x1b, 0xe5, 0xcd, 0x9f, 0x01, 0x00, 0x00, 0xff, + 0xff, 0x4c, 0x27, 0x6e, 0x9a, 0xb0, 0x05, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/bigtable/v1/bigtable_service_messages.pb.go b/vendor/google.golang.org/genproto/googleapis/bigtable/v1/bigtable_service_messages.pb.go new file mode 100644 index 0000000..d65b29f --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/bigtable/v1/bigtable_service_messages.pb.go @@ -0,0 +1,1039 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/bigtable/v1/bigtable_service_messages.proto + +package bigtable // import "google.golang.org/genproto/googleapis/bigtable/v1" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import status "google.golang.org/genproto/googleapis/rpc/status" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Request message for BigtableServer.ReadRows. +type ReadRowsRequest struct { + // The unique name of the table from which to read. + TableName string `protobuf:"bytes,1,opt,name=table_name,json=tableName,proto3" json:"table_name,omitempty"` + // If neither row_key nor row_range is set, reads from all rows. + // + // Types that are valid to be assigned to Target: + // *ReadRowsRequest_RowKey + // *ReadRowsRequest_RowRange + // *ReadRowsRequest_RowSet + Target isReadRowsRequest_Target `protobuf_oneof:"target"` + // The filter to apply to the contents of the specified row(s). If unset, + // reads the entire table. + Filter *RowFilter `protobuf:"bytes,5,opt,name=filter,proto3" json:"filter,omitempty"` + // By default, rows are read sequentially, producing results which are + // guaranteed to arrive in increasing row order. Setting + // "allow_row_interleaving" to true allows multiple rows to be interleaved in + // the response stream, which increases throughput but breaks this guarantee, + // and may force the client to use more memory to buffer partially-received + // rows. Cannot be set to true when specifying "num_rows_limit". + AllowRowInterleaving bool `protobuf:"varint,6,opt,name=allow_row_interleaving,json=allowRowInterleaving,proto3" json:"allow_row_interleaving,omitempty"` + // The read will terminate after committing to N rows' worth of results. The + // default (zero) is to return all results. + // Note that "allow_row_interleaving" cannot be set to true when this is set. + NumRowsLimit int64 `protobuf:"varint,7,opt,name=num_rows_limit,json=numRowsLimit,proto3" json:"num_rows_limit,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ReadRowsRequest) Reset() { *m = ReadRowsRequest{} } +func (m *ReadRowsRequest) String() string { return proto.CompactTextString(m) } +func (*ReadRowsRequest) ProtoMessage() {} +func (*ReadRowsRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_bigtable_service_messages_4f7fbb80f2eeca84, []int{0} +} +func (m *ReadRowsRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ReadRowsRequest.Unmarshal(m, b) +} +func (m *ReadRowsRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ReadRowsRequest.Marshal(b, m, deterministic) +} +func (dst *ReadRowsRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_ReadRowsRequest.Merge(dst, src) +} +func (m *ReadRowsRequest) XXX_Size() int { + return xxx_messageInfo_ReadRowsRequest.Size(m) +} +func (m *ReadRowsRequest) XXX_DiscardUnknown() { + xxx_messageInfo_ReadRowsRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_ReadRowsRequest proto.InternalMessageInfo + +func (m *ReadRowsRequest) GetTableName() string { + if m != nil { + return m.TableName + } + return "" +} + +type isReadRowsRequest_Target interface { + isReadRowsRequest_Target() +} + +type ReadRowsRequest_RowKey struct { + RowKey []byte `protobuf:"bytes,2,opt,name=row_key,json=rowKey,proto3,oneof"` +} + +type ReadRowsRequest_RowRange struct { + RowRange *RowRange `protobuf:"bytes,3,opt,name=row_range,json=rowRange,proto3,oneof"` +} + +type ReadRowsRequest_RowSet struct { + RowSet *RowSet `protobuf:"bytes,8,opt,name=row_set,json=rowSet,proto3,oneof"` +} + +func (*ReadRowsRequest_RowKey) isReadRowsRequest_Target() {} + +func (*ReadRowsRequest_RowRange) isReadRowsRequest_Target() {} + +func (*ReadRowsRequest_RowSet) isReadRowsRequest_Target() {} + +func (m *ReadRowsRequest) GetTarget() isReadRowsRequest_Target { + if m != nil { + return m.Target + } + return nil +} + +func (m *ReadRowsRequest) GetRowKey() []byte { + if x, ok := m.GetTarget().(*ReadRowsRequest_RowKey); ok { + return x.RowKey + } + return nil +} + +func (m *ReadRowsRequest) GetRowRange() *RowRange { + if x, ok := m.GetTarget().(*ReadRowsRequest_RowRange); ok { + return x.RowRange + } + return nil +} + +func (m *ReadRowsRequest) GetRowSet() *RowSet { + if x, ok := m.GetTarget().(*ReadRowsRequest_RowSet); ok { + return x.RowSet + } + return nil +} + +func (m *ReadRowsRequest) GetFilter() *RowFilter { + if m != nil { + return m.Filter + } + return nil +} + +func (m *ReadRowsRequest) GetAllowRowInterleaving() bool { + if m != nil { + return m.AllowRowInterleaving + } + return false +} + +func (m *ReadRowsRequest) GetNumRowsLimit() int64 { + if m != nil { + return m.NumRowsLimit + } + return 0 +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*ReadRowsRequest) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _ReadRowsRequest_OneofMarshaler, _ReadRowsRequest_OneofUnmarshaler, _ReadRowsRequest_OneofSizer, []interface{}{ + (*ReadRowsRequest_RowKey)(nil), + (*ReadRowsRequest_RowRange)(nil), + (*ReadRowsRequest_RowSet)(nil), + } +} + +func _ReadRowsRequest_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*ReadRowsRequest) + // target + switch x := m.Target.(type) { + case *ReadRowsRequest_RowKey: + b.EncodeVarint(2<<3 | proto.WireBytes) + b.EncodeRawBytes(x.RowKey) + case *ReadRowsRequest_RowRange: + b.EncodeVarint(3<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.RowRange); err != nil { + return err + } + case *ReadRowsRequest_RowSet: + b.EncodeVarint(8<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.RowSet); err != nil { + return err + } + case nil: + default: + return fmt.Errorf("ReadRowsRequest.Target has unexpected type %T", x) + } + return nil +} + +func _ReadRowsRequest_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*ReadRowsRequest) + switch tag { + case 2: // target.row_key + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeRawBytes(true) + m.Target = &ReadRowsRequest_RowKey{x} + return true, err + case 3: // target.row_range + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(RowRange) + err := b.DecodeMessage(msg) + m.Target = &ReadRowsRequest_RowRange{msg} + return true, err + case 8: // target.row_set + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(RowSet) + err := b.DecodeMessage(msg) + m.Target = &ReadRowsRequest_RowSet{msg} + return true, err + default: + return false, nil + } +} + +func _ReadRowsRequest_OneofSizer(msg proto.Message) (n int) { + m := msg.(*ReadRowsRequest) + // target + switch x := m.Target.(type) { + case *ReadRowsRequest_RowKey: + n += 1 // tag and wire + n += proto.SizeVarint(uint64(len(x.RowKey))) + n += len(x.RowKey) + case *ReadRowsRequest_RowRange: + s := proto.Size(x.RowRange) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *ReadRowsRequest_RowSet: + s := proto.Size(x.RowSet) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +// Response message for BigtableService.ReadRows. +type ReadRowsResponse struct { + // The key of the row for which we're receiving data. + // Results will be received in increasing row key order, unless + // "allow_row_interleaving" was specified in the request. + RowKey []byte `protobuf:"bytes,1,opt,name=row_key,json=rowKey,proto3" json:"row_key,omitempty"` + // One or more chunks of the row specified by "row_key". + Chunks []*ReadRowsResponse_Chunk `protobuf:"bytes,2,rep,name=chunks,proto3" json:"chunks,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ReadRowsResponse) Reset() { *m = ReadRowsResponse{} } +func (m *ReadRowsResponse) String() string { return proto.CompactTextString(m) } +func (*ReadRowsResponse) ProtoMessage() {} +func (*ReadRowsResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_bigtable_service_messages_4f7fbb80f2eeca84, []int{1} +} +func (m *ReadRowsResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ReadRowsResponse.Unmarshal(m, b) +} +func (m *ReadRowsResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ReadRowsResponse.Marshal(b, m, deterministic) +} +func (dst *ReadRowsResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_ReadRowsResponse.Merge(dst, src) +} +func (m *ReadRowsResponse) XXX_Size() int { + return xxx_messageInfo_ReadRowsResponse.Size(m) +} +func (m *ReadRowsResponse) XXX_DiscardUnknown() { + xxx_messageInfo_ReadRowsResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_ReadRowsResponse proto.InternalMessageInfo + +func (m *ReadRowsResponse) GetRowKey() []byte { + if m != nil { + return m.RowKey + } + return nil +} + +func (m *ReadRowsResponse) GetChunks() []*ReadRowsResponse_Chunk { + if m != nil { + return m.Chunks + } + return nil +} + +// Specifies a piece of a row's contents returned as part of the read +// response stream. +type ReadRowsResponse_Chunk struct { + // Types that are valid to be assigned to Chunk: + // *ReadRowsResponse_Chunk_RowContents + // *ReadRowsResponse_Chunk_ResetRow + // *ReadRowsResponse_Chunk_CommitRow + Chunk isReadRowsResponse_Chunk_Chunk `protobuf_oneof:"chunk"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ReadRowsResponse_Chunk) Reset() { *m = ReadRowsResponse_Chunk{} } +func (m *ReadRowsResponse_Chunk) String() string { return proto.CompactTextString(m) } +func (*ReadRowsResponse_Chunk) ProtoMessage() {} +func (*ReadRowsResponse_Chunk) Descriptor() ([]byte, []int) { + return fileDescriptor_bigtable_service_messages_4f7fbb80f2eeca84, []int{1, 0} +} +func (m *ReadRowsResponse_Chunk) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ReadRowsResponse_Chunk.Unmarshal(m, b) +} +func (m *ReadRowsResponse_Chunk) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ReadRowsResponse_Chunk.Marshal(b, m, deterministic) +} +func (dst *ReadRowsResponse_Chunk) XXX_Merge(src proto.Message) { + xxx_messageInfo_ReadRowsResponse_Chunk.Merge(dst, src) +} +func (m *ReadRowsResponse_Chunk) XXX_Size() int { + return xxx_messageInfo_ReadRowsResponse_Chunk.Size(m) +} +func (m *ReadRowsResponse_Chunk) XXX_DiscardUnknown() { + xxx_messageInfo_ReadRowsResponse_Chunk.DiscardUnknown(m) +} + +var xxx_messageInfo_ReadRowsResponse_Chunk proto.InternalMessageInfo + +type isReadRowsResponse_Chunk_Chunk interface { + isReadRowsResponse_Chunk_Chunk() +} + +type ReadRowsResponse_Chunk_RowContents struct { + RowContents *Family `protobuf:"bytes,1,opt,name=row_contents,json=rowContents,proto3,oneof"` +} + +type ReadRowsResponse_Chunk_ResetRow struct { + ResetRow bool `protobuf:"varint,2,opt,name=reset_row,json=resetRow,proto3,oneof"` +} + +type ReadRowsResponse_Chunk_CommitRow struct { + CommitRow bool `protobuf:"varint,3,opt,name=commit_row,json=commitRow,proto3,oneof"` +} + +func (*ReadRowsResponse_Chunk_RowContents) isReadRowsResponse_Chunk_Chunk() {} + +func (*ReadRowsResponse_Chunk_ResetRow) isReadRowsResponse_Chunk_Chunk() {} + +func (*ReadRowsResponse_Chunk_CommitRow) isReadRowsResponse_Chunk_Chunk() {} + +func (m *ReadRowsResponse_Chunk) GetChunk() isReadRowsResponse_Chunk_Chunk { + if m != nil { + return m.Chunk + } + return nil +} + +func (m *ReadRowsResponse_Chunk) GetRowContents() *Family { + if x, ok := m.GetChunk().(*ReadRowsResponse_Chunk_RowContents); ok { + return x.RowContents + } + return nil +} + +func (m *ReadRowsResponse_Chunk) GetResetRow() bool { + if x, ok := m.GetChunk().(*ReadRowsResponse_Chunk_ResetRow); ok { + return x.ResetRow + } + return false +} + +func (m *ReadRowsResponse_Chunk) GetCommitRow() bool { + if x, ok := m.GetChunk().(*ReadRowsResponse_Chunk_CommitRow); ok { + return x.CommitRow + } + return false +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*ReadRowsResponse_Chunk) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _ReadRowsResponse_Chunk_OneofMarshaler, _ReadRowsResponse_Chunk_OneofUnmarshaler, _ReadRowsResponse_Chunk_OneofSizer, []interface{}{ + (*ReadRowsResponse_Chunk_RowContents)(nil), + (*ReadRowsResponse_Chunk_ResetRow)(nil), + (*ReadRowsResponse_Chunk_CommitRow)(nil), + } +} + +func _ReadRowsResponse_Chunk_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*ReadRowsResponse_Chunk) + // chunk + switch x := m.Chunk.(type) { + case *ReadRowsResponse_Chunk_RowContents: + b.EncodeVarint(1<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.RowContents); err != nil { + return err + } + case *ReadRowsResponse_Chunk_ResetRow: + t := uint64(0) + if x.ResetRow { + t = 1 + } + b.EncodeVarint(2<<3 | proto.WireVarint) + b.EncodeVarint(t) + case *ReadRowsResponse_Chunk_CommitRow: + t := uint64(0) + if x.CommitRow { + t = 1 + } + b.EncodeVarint(3<<3 | proto.WireVarint) + b.EncodeVarint(t) + case nil: + default: + return fmt.Errorf("ReadRowsResponse_Chunk.Chunk has unexpected type %T", x) + } + return nil +} + +func _ReadRowsResponse_Chunk_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*ReadRowsResponse_Chunk) + switch tag { + case 1: // chunk.row_contents + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(Family) + err := b.DecodeMessage(msg) + m.Chunk = &ReadRowsResponse_Chunk_RowContents{msg} + return true, err + case 2: // chunk.reset_row + if wire != proto.WireVarint { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeVarint() + m.Chunk = &ReadRowsResponse_Chunk_ResetRow{x != 0} + return true, err + case 3: // chunk.commit_row + if wire != proto.WireVarint { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeVarint() + m.Chunk = &ReadRowsResponse_Chunk_CommitRow{x != 0} + return true, err + default: + return false, nil + } +} + +func _ReadRowsResponse_Chunk_OneofSizer(msg proto.Message) (n int) { + m := msg.(*ReadRowsResponse_Chunk) + // chunk + switch x := m.Chunk.(type) { + case *ReadRowsResponse_Chunk_RowContents: + s := proto.Size(x.RowContents) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *ReadRowsResponse_Chunk_ResetRow: + n += 1 // tag and wire + n += 1 + case *ReadRowsResponse_Chunk_CommitRow: + n += 1 // tag and wire + n += 1 + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +// Request message for BigtableService.SampleRowKeys. +type SampleRowKeysRequest struct { + // The unique name of the table from which to sample row keys. + TableName string `protobuf:"bytes,1,opt,name=table_name,json=tableName,proto3" json:"table_name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *SampleRowKeysRequest) Reset() { *m = SampleRowKeysRequest{} } +func (m *SampleRowKeysRequest) String() string { return proto.CompactTextString(m) } +func (*SampleRowKeysRequest) ProtoMessage() {} +func (*SampleRowKeysRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_bigtable_service_messages_4f7fbb80f2eeca84, []int{2} +} +func (m *SampleRowKeysRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_SampleRowKeysRequest.Unmarshal(m, b) +} +func (m *SampleRowKeysRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_SampleRowKeysRequest.Marshal(b, m, deterministic) +} +func (dst *SampleRowKeysRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_SampleRowKeysRequest.Merge(dst, src) +} +func (m *SampleRowKeysRequest) XXX_Size() int { + return xxx_messageInfo_SampleRowKeysRequest.Size(m) +} +func (m *SampleRowKeysRequest) XXX_DiscardUnknown() { + xxx_messageInfo_SampleRowKeysRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_SampleRowKeysRequest proto.InternalMessageInfo + +func (m *SampleRowKeysRequest) GetTableName() string { + if m != nil { + return m.TableName + } + return "" +} + +// Response message for BigtableService.SampleRowKeys. +type SampleRowKeysResponse struct { + // Sorted streamed sequence of sample row keys in the table. The table might + // have contents before the first row key in the list and after the last one, + // but a key containing the empty string indicates "end of table" and will be + // the last response given, if present. + // Note that row keys in this list may not have ever been written to or read + // from, and users should therefore not make any assumptions about the row key + // structure that are specific to their use case. + RowKey []byte `protobuf:"bytes,1,opt,name=row_key,json=rowKey,proto3" json:"row_key,omitempty"` + // Approximate total storage space used by all rows in the table which precede + // "row_key". Buffering the contents of all rows between two subsequent + // samples would require space roughly equal to the difference in their + // "offset_bytes" fields. + OffsetBytes int64 `protobuf:"varint,2,opt,name=offset_bytes,json=offsetBytes,proto3" json:"offset_bytes,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *SampleRowKeysResponse) Reset() { *m = SampleRowKeysResponse{} } +func (m *SampleRowKeysResponse) String() string { return proto.CompactTextString(m) } +func (*SampleRowKeysResponse) ProtoMessage() {} +func (*SampleRowKeysResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_bigtable_service_messages_4f7fbb80f2eeca84, []int{3} +} +func (m *SampleRowKeysResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_SampleRowKeysResponse.Unmarshal(m, b) +} +func (m *SampleRowKeysResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_SampleRowKeysResponse.Marshal(b, m, deterministic) +} +func (dst *SampleRowKeysResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_SampleRowKeysResponse.Merge(dst, src) +} +func (m *SampleRowKeysResponse) XXX_Size() int { + return xxx_messageInfo_SampleRowKeysResponse.Size(m) +} +func (m *SampleRowKeysResponse) XXX_DiscardUnknown() { + xxx_messageInfo_SampleRowKeysResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_SampleRowKeysResponse proto.InternalMessageInfo + +func (m *SampleRowKeysResponse) GetRowKey() []byte { + if m != nil { + return m.RowKey + } + return nil +} + +func (m *SampleRowKeysResponse) GetOffsetBytes() int64 { + if m != nil { + return m.OffsetBytes + } + return 0 +} + +// Request message for BigtableService.MutateRow. +type MutateRowRequest struct { + // The unique name of the table to which the mutation should be applied. + TableName string `protobuf:"bytes,1,opt,name=table_name,json=tableName,proto3" json:"table_name,omitempty"` + // The key of the row to which the mutation should be applied. + RowKey []byte `protobuf:"bytes,2,opt,name=row_key,json=rowKey,proto3" json:"row_key,omitempty"` + // Changes to be atomically applied to the specified row. Entries are applied + // in order, meaning that earlier mutations can be masked by later ones. + // Must contain at least one entry and at most 100000. + Mutations []*Mutation `protobuf:"bytes,3,rep,name=mutations,proto3" json:"mutations,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *MutateRowRequest) Reset() { *m = MutateRowRequest{} } +func (m *MutateRowRequest) String() string { return proto.CompactTextString(m) } +func (*MutateRowRequest) ProtoMessage() {} +func (*MutateRowRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_bigtable_service_messages_4f7fbb80f2eeca84, []int{4} +} +func (m *MutateRowRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_MutateRowRequest.Unmarshal(m, b) +} +func (m *MutateRowRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_MutateRowRequest.Marshal(b, m, deterministic) +} +func (dst *MutateRowRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_MutateRowRequest.Merge(dst, src) +} +func (m *MutateRowRequest) XXX_Size() int { + return xxx_messageInfo_MutateRowRequest.Size(m) +} +func (m *MutateRowRequest) XXX_DiscardUnknown() { + xxx_messageInfo_MutateRowRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_MutateRowRequest proto.InternalMessageInfo + +func (m *MutateRowRequest) GetTableName() string { + if m != nil { + return m.TableName + } + return "" +} + +func (m *MutateRowRequest) GetRowKey() []byte { + if m != nil { + return m.RowKey + } + return nil +} + +func (m *MutateRowRequest) GetMutations() []*Mutation { + if m != nil { + return m.Mutations + } + return nil +} + +// Request message for BigtableService.MutateRows. +type MutateRowsRequest struct { + // The unique name of the table to which the mutations should be applied. + TableName string `protobuf:"bytes,1,opt,name=table_name,json=tableName,proto3" json:"table_name,omitempty"` + // The row keys/mutations to be applied in bulk. + // Each entry is applied as an atomic mutation, but the entries may be + // applied in arbitrary order (even between entries for the same row). + // At least one entry must be specified, and in total the entries may + // contain at most 100000 mutations. + Entries []*MutateRowsRequest_Entry `protobuf:"bytes,2,rep,name=entries,proto3" json:"entries,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *MutateRowsRequest) Reset() { *m = MutateRowsRequest{} } +func (m *MutateRowsRequest) String() string { return proto.CompactTextString(m) } +func (*MutateRowsRequest) ProtoMessage() {} +func (*MutateRowsRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_bigtable_service_messages_4f7fbb80f2eeca84, []int{5} +} +func (m *MutateRowsRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_MutateRowsRequest.Unmarshal(m, b) +} +func (m *MutateRowsRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_MutateRowsRequest.Marshal(b, m, deterministic) +} +func (dst *MutateRowsRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_MutateRowsRequest.Merge(dst, src) +} +func (m *MutateRowsRequest) XXX_Size() int { + return xxx_messageInfo_MutateRowsRequest.Size(m) +} +func (m *MutateRowsRequest) XXX_DiscardUnknown() { + xxx_messageInfo_MutateRowsRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_MutateRowsRequest proto.InternalMessageInfo + +func (m *MutateRowsRequest) GetTableName() string { + if m != nil { + return m.TableName + } + return "" +} + +func (m *MutateRowsRequest) GetEntries() []*MutateRowsRequest_Entry { + if m != nil { + return m.Entries + } + return nil +} + +type MutateRowsRequest_Entry struct { + // The key of the row to which the `mutations` should be applied. + RowKey []byte `protobuf:"bytes,1,opt,name=row_key,json=rowKey,proto3" json:"row_key,omitempty"` + // Changes to be atomically applied to the specified row. Mutations are + // applied in order, meaning that earlier mutations can be masked by + // later ones. + // At least one mutation must be specified. + Mutations []*Mutation `protobuf:"bytes,2,rep,name=mutations,proto3" json:"mutations,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *MutateRowsRequest_Entry) Reset() { *m = MutateRowsRequest_Entry{} } +func (m *MutateRowsRequest_Entry) String() string { return proto.CompactTextString(m) } +func (*MutateRowsRequest_Entry) ProtoMessage() {} +func (*MutateRowsRequest_Entry) Descriptor() ([]byte, []int) { + return fileDescriptor_bigtable_service_messages_4f7fbb80f2eeca84, []int{5, 0} +} +func (m *MutateRowsRequest_Entry) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_MutateRowsRequest_Entry.Unmarshal(m, b) +} +func (m *MutateRowsRequest_Entry) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_MutateRowsRequest_Entry.Marshal(b, m, deterministic) +} +func (dst *MutateRowsRequest_Entry) XXX_Merge(src proto.Message) { + xxx_messageInfo_MutateRowsRequest_Entry.Merge(dst, src) +} +func (m *MutateRowsRequest_Entry) XXX_Size() int { + return xxx_messageInfo_MutateRowsRequest_Entry.Size(m) +} +func (m *MutateRowsRequest_Entry) XXX_DiscardUnknown() { + xxx_messageInfo_MutateRowsRequest_Entry.DiscardUnknown(m) +} + +var xxx_messageInfo_MutateRowsRequest_Entry proto.InternalMessageInfo + +func (m *MutateRowsRequest_Entry) GetRowKey() []byte { + if m != nil { + return m.RowKey + } + return nil +} + +func (m *MutateRowsRequest_Entry) GetMutations() []*Mutation { + if m != nil { + return m.Mutations + } + return nil +} + +// Response message for BigtableService.MutateRows. +type MutateRowsResponse struct { + // The results for each Entry from the request, presented in the order + // in which the entries were originally given. + // Depending on how requests are batched during execution, it is possible + // for one Entry to fail due to an error with another Entry. In the event + // that this occurs, the same error will be reported for both entries. + Statuses []*status.Status `protobuf:"bytes,1,rep,name=statuses,proto3" json:"statuses,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *MutateRowsResponse) Reset() { *m = MutateRowsResponse{} } +func (m *MutateRowsResponse) String() string { return proto.CompactTextString(m) } +func (*MutateRowsResponse) ProtoMessage() {} +func (*MutateRowsResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_bigtable_service_messages_4f7fbb80f2eeca84, []int{6} +} +func (m *MutateRowsResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_MutateRowsResponse.Unmarshal(m, b) +} +func (m *MutateRowsResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_MutateRowsResponse.Marshal(b, m, deterministic) +} +func (dst *MutateRowsResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_MutateRowsResponse.Merge(dst, src) +} +func (m *MutateRowsResponse) XXX_Size() int { + return xxx_messageInfo_MutateRowsResponse.Size(m) +} +func (m *MutateRowsResponse) XXX_DiscardUnknown() { + xxx_messageInfo_MutateRowsResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_MutateRowsResponse proto.InternalMessageInfo + +func (m *MutateRowsResponse) GetStatuses() []*status.Status { + if m != nil { + return m.Statuses + } + return nil +} + +// Request message for BigtableService.CheckAndMutateRowRequest +type CheckAndMutateRowRequest struct { + // The unique name of the table to which the conditional mutation should be + // applied. + TableName string `protobuf:"bytes,1,opt,name=table_name,json=tableName,proto3" json:"table_name,omitempty"` + // The key of the row to which the conditional mutation should be applied. + RowKey []byte `protobuf:"bytes,2,opt,name=row_key,json=rowKey,proto3" json:"row_key,omitempty"` + // The filter to be applied to the contents of the specified row. Depending + // on whether or not any results are yielded, either "true_mutations" or + // "false_mutations" will be executed. If unset, checks that the row contains + // any values at all. + PredicateFilter *RowFilter `protobuf:"bytes,6,opt,name=predicate_filter,json=predicateFilter,proto3" json:"predicate_filter,omitempty"` + // Changes to be atomically applied to the specified row if "predicate_filter" + // yields at least one cell when applied to "row_key". Entries are applied in + // order, meaning that earlier mutations can be masked by later ones. + // Must contain at least one entry if "false_mutations" is empty, and at most + // 100000. + TrueMutations []*Mutation `protobuf:"bytes,4,rep,name=true_mutations,json=trueMutations,proto3" json:"true_mutations,omitempty"` + // Changes to be atomically applied to the specified row if "predicate_filter" + // does not yield any cells when applied to "row_key". Entries are applied in + // order, meaning that earlier mutations can be masked by later ones. + // Must contain at least one entry if "true_mutations" is empty, and at most + // 100000. + FalseMutations []*Mutation `protobuf:"bytes,5,rep,name=false_mutations,json=falseMutations,proto3" json:"false_mutations,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *CheckAndMutateRowRequest) Reset() { *m = CheckAndMutateRowRequest{} } +func (m *CheckAndMutateRowRequest) String() string { return proto.CompactTextString(m) } +func (*CheckAndMutateRowRequest) ProtoMessage() {} +func (*CheckAndMutateRowRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_bigtable_service_messages_4f7fbb80f2eeca84, []int{7} +} +func (m *CheckAndMutateRowRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_CheckAndMutateRowRequest.Unmarshal(m, b) +} +func (m *CheckAndMutateRowRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_CheckAndMutateRowRequest.Marshal(b, m, deterministic) +} +func (dst *CheckAndMutateRowRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_CheckAndMutateRowRequest.Merge(dst, src) +} +func (m *CheckAndMutateRowRequest) XXX_Size() int { + return xxx_messageInfo_CheckAndMutateRowRequest.Size(m) +} +func (m *CheckAndMutateRowRequest) XXX_DiscardUnknown() { + xxx_messageInfo_CheckAndMutateRowRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_CheckAndMutateRowRequest proto.InternalMessageInfo + +func (m *CheckAndMutateRowRequest) GetTableName() string { + if m != nil { + return m.TableName + } + return "" +} + +func (m *CheckAndMutateRowRequest) GetRowKey() []byte { + if m != nil { + return m.RowKey + } + return nil +} + +func (m *CheckAndMutateRowRequest) GetPredicateFilter() *RowFilter { + if m != nil { + return m.PredicateFilter + } + return nil +} + +func (m *CheckAndMutateRowRequest) GetTrueMutations() []*Mutation { + if m != nil { + return m.TrueMutations + } + return nil +} + +func (m *CheckAndMutateRowRequest) GetFalseMutations() []*Mutation { + if m != nil { + return m.FalseMutations + } + return nil +} + +// Response message for BigtableService.CheckAndMutateRowRequest. +type CheckAndMutateRowResponse struct { + // Whether or not the request's "predicate_filter" yielded any results for + // the specified row. + PredicateMatched bool `protobuf:"varint,1,opt,name=predicate_matched,json=predicateMatched,proto3" json:"predicate_matched,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *CheckAndMutateRowResponse) Reset() { *m = CheckAndMutateRowResponse{} } +func (m *CheckAndMutateRowResponse) String() string { return proto.CompactTextString(m) } +func (*CheckAndMutateRowResponse) ProtoMessage() {} +func (*CheckAndMutateRowResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_bigtable_service_messages_4f7fbb80f2eeca84, []int{8} +} +func (m *CheckAndMutateRowResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_CheckAndMutateRowResponse.Unmarshal(m, b) +} +func (m *CheckAndMutateRowResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_CheckAndMutateRowResponse.Marshal(b, m, deterministic) +} +func (dst *CheckAndMutateRowResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_CheckAndMutateRowResponse.Merge(dst, src) +} +func (m *CheckAndMutateRowResponse) XXX_Size() int { + return xxx_messageInfo_CheckAndMutateRowResponse.Size(m) +} +func (m *CheckAndMutateRowResponse) XXX_DiscardUnknown() { + xxx_messageInfo_CheckAndMutateRowResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_CheckAndMutateRowResponse proto.InternalMessageInfo + +func (m *CheckAndMutateRowResponse) GetPredicateMatched() bool { + if m != nil { + return m.PredicateMatched + } + return false +} + +// Request message for BigtableService.ReadModifyWriteRowRequest. +type ReadModifyWriteRowRequest struct { + // The unique name of the table to which the read/modify/write rules should be + // applied. + TableName string `protobuf:"bytes,1,opt,name=table_name,json=tableName,proto3" json:"table_name,omitempty"` + // The key of the row to which the read/modify/write rules should be applied. + RowKey []byte `protobuf:"bytes,2,opt,name=row_key,json=rowKey,proto3" json:"row_key,omitempty"` + // Rules specifying how the specified row's contents are to be transformed + // into writes. Entries are applied in order, meaning that earlier rules will + // affect the results of later ones. + Rules []*ReadModifyWriteRule `protobuf:"bytes,3,rep,name=rules,proto3" json:"rules,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ReadModifyWriteRowRequest) Reset() { *m = ReadModifyWriteRowRequest{} } +func (m *ReadModifyWriteRowRequest) String() string { return proto.CompactTextString(m) } +func (*ReadModifyWriteRowRequest) ProtoMessage() {} +func (*ReadModifyWriteRowRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_bigtable_service_messages_4f7fbb80f2eeca84, []int{9} +} +func (m *ReadModifyWriteRowRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ReadModifyWriteRowRequest.Unmarshal(m, b) +} +func (m *ReadModifyWriteRowRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ReadModifyWriteRowRequest.Marshal(b, m, deterministic) +} +func (dst *ReadModifyWriteRowRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_ReadModifyWriteRowRequest.Merge(dst, src) +} +func (m *ReadModifyWriteRowRequest) XXX_Size() int { + return xxx_messageInfo_ReadModifyWriteRowRequest.Size(m) +} +func (m *ReadModifyWriteRowRequest) XXX_DiscardUnknown() { + xxx_messageInfo_ReadModifyWriteRowRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_ReadModifyWriteRowRequest proto.InternalMessageInfo + +func (m *ReadModifyWriteRowRequest) GetTableName() string { + if m != nil { + return m.TableName + } + return "" +} + +func (m *ReadModifyWriteRowRequest) GetRowKey() []byte { + if m != nil { + return m.RowKey + } + return nil +} + +func (m *ReadModifyWriteRowRequest) GetRules() []*ReadModifyWriteRule { + if m != nil { + return m.Rules + } + return nil +} + +func init() { + proto.RegisterType((*ReadRowsRequest)(nil), "google.bigtable.v1.ReadRowsRequest") + proto.RegisterType((*ReadRowsResponse)(nil), "google.bigtable.v1.ReadRowsResponse") + proto.RegisterType((*ReadRowsResponse_Chunk)(nil), "google.bigtable.v1.ReadRowsResponse.Chunk") + proto.RegisterType((*SampleRowKeysRequest)(nil), "google.bigtable.v1.SampleRowKeysRequest") + proto.RegisterType((*SampleRowKeysResponse)(nil), "google.bigtable.v1.SampleRowKeysResponse") + proto.RegisterType((*MutateRowRequest)(nil), "google.bigtable.v1.MutateRowRequest") + proto.RegisterType((*MutateRowsRequest)(nil), "google.bigtable.v1.MutateRowsRequest") + proto.RegisterType((*MutateRowsRequest_Entry)(nil), "google.bigtable.v1.MutateRowsRequest.Entry") + proto.RegisterType((*MutateRowsResponse)(nil), "google.bigtable.v1.MutateRowsResponse") + proto.RegisterType((*CheckAndMutateRowRequest)(nil), "google.bigtable.v1.CheckAndMutateRowRequest") + proto.RegisterType((*CheckAndMutateRowResponse)(nil), "google.bigtable.v1.CheckAndMutateRowResponse") + proto.RegisterType((*ReadModifyWriteRowRequest)(nil), "google.bigtable.v1.ReadModifyWriteRowRequest") +} + +func init() { + proto.RegisterFile("google/bigtable/v1/bigtable_service_messages.proto", fileDescriptor_bigtable_service_messages_4f7fbb80f2eeca84) +} + +var fileDescriptor_bigtable_service_messages_4f7fbb80f2eeca84 = []byte{ + // 788 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xac, 0x55, 0x5f, 0x8b, 0x23, 0x45, + 0x10, 0xdf, 0x49, 0xcc, 0xbf, 0x4a, 0xdc, 0xdd, 0x6b, 0xce, 0xdb, 0xd9, 0x70, 0x8b, 0x71, 0x10, + 0x0d, 0x1e, 0x4c, 0xb8, 0xd5, 0x7d, 0xb9, 0x43, 0xc4, 0xc4, 0x3d, 0x22, 0x1a, 0x39, 0x3a, 0x0f, + 0x82, 0x08, 0xa1, 0x33, 0xa9, 0xcc, 0x0e, 0x3b, 0xd3, 0x1d, 0xbb, 0x7b, 0x32, 0xe4, 0x59, 0xf0, + 0x5d, 0x3f, 0x85, 0xdf, 0xc8, 0x17, 0x3f, 0x8c, 0x74, 0xcf, 0xe4, 0x8f, 0x6b, 0xa2, 0x11, 0xf6, + 0xad, 0xa7, 0xaa, 0x7e, 0xbf, 0xaa, 0xfa, 0x75, 0x4d, 0x17, 0x5c, 0x87, 0x42, 0x84, 0x31, 0xf6, + 0xa6, 0x51, 0xa8, 0xd9, 0x34, 0xc6, 0xde, 0xf2, 0xe5, 0xe6, 0x3c, 0x51, 0x28, 0x97, 0x51, 0x80, + 0x93, 0x04, 0x95, 0x62, 0x21, 0x2a, 0x7f, 0x21, 0x85, 0x16, 0x84, 0xe4, 0x18, 0x7f, 0x1d, 0xe7, + 0x2f, 0x5f, 0xb6, 0x3f, 0xfa, 0x37, 0x9e, 0x19, 0xd3, 0x2c, 0xc7, 0xb6, 0x2f, 0x8a, 0x38, 0xb9, + 0x08, 0x7a, 0x4a, 0x33, 0x9d, 0x16, 0xa4, 0xde, 0x9f, 0x25, 0x38, 0xa3, 0xc8, 0x66, 0x54, 0x64, + 0x8a, 0xe2, 0x4f, 0x29, 0x2a, 0x4d, 0xae, 0x00, 0x72, 0x02, 0xce, 0x12, 0x74, 0x9d, 0x8e, 0xd3, + 0x6d, 0xd0, 0x86, 0xb5, 0x7c, 0xc7, 0x12, 0x24, 0x97, 0x50, 0x93, 0x22, 0x9b, 0xdc, 0xe3, 0xca, + 0x2d, 0x75, 0x9c, 0x6e, 0x6b, 0x78, 0x42, 0xab, 0x52, 0x64, 0xdf, 0xe0, 0x8a, 0xbc, 0x86, 0x86, + 0x71, 0x49, 0xc6, 0x43, 0x74, 0xcb, 0x1d, 0xa7, 0xdb, 0xbc, 0x7e, 0xee, 0xff, 0xb3, 0x6c, 0x9f, + 0x8a, 0x8c, 0x9a, 0x98, 0xe1, 0x09, 0xad, 0xcb, 0xe2, 0x4c, 0x6e, 0x72, 0x5e, 0x85, 0xda, 0xad, + 0x5b, 0x68, 0xfb, 0x00, 0x74, 0x8c, 0xba, 0xc8, 0x39, 0x46, 0x4d, 0x6e, 0xa0, 0x3a, 0x8f, 0x62, + 0x8d, 0xd2, 0xad, 0x58, 0xd4, 0xd5, 0x01, 0xd4, 0x1b, 0x1b, 0x44, 0x8b, 0x60, 0xf2, 0x19, 0x3c, + 0x63, 0x71, 0x6c, 0x8a, 0x15, 0xd9, 0x24, 0xe2, 0x1a, 0x65, 0x8c, 0x6c, 0x19, 0xf1, 0xd0, 0xad, + 0x76, 0x9c, 0x6e, 0x9d, 0x3e, 0xb5, 0x5e, 0x2a, 0xb2, 0xaf, 0x77, 0x7c, 0xe4, 0x43, 0x38, 0xe5, + 0x69, 0x62, 0x30, 0x6a, 0x12, 0x47, 0x49, 0xa4, 0xdd, 0x5a, 0xc7, 0xe9, 0x96, 0x69, 0x8b, 0xa7, + 0x89, 0x91, 0xf0, 0x5b, 0x63, 0xeb, 0xd7, 0xa1, 0xaa, 0x99, 0x0c, 0x51, 0x7b, 0x3f, 0x97, 0xe0, + 0x7c, 0x2b, 0xaf, 0x5a, 0x08, 0xae, 0x90, 0x5c, 0x6c, 0x05, 0x34, 0xe2, 0xb6, 0x36, 0xf2, 0xf5, + 0xa1, 0x1a, 0xdc, 0xa5, 0xfc, 0x5e, 0xb9, 0xa5, 0x4e, 0xb9, 0xdb, 0xbc, 0xfe, 0x64, 0x6f, 0x2b, + 0x0f, 0xe8, 0xfc, 0x81, 0x81, 0xd0, 0x02, 0xd9, 0xfe, 0xd5, 0x81, 0x8a, 0xb5, 0x90, 0x2f, 0xa0, + 0x65, 0xd2, 0x04, 0x82, 0x6b, 0xe4, 0x5a, 0xd9, 0x5c, 0x07, 0x44, 0x7d, 0xc3, 0x92, 0x28, 0x5e, + 0x0d, 0x4f, 0x68, 0x53, 0x8a, 0x6c, 0x50, 0x00, 0xc8, 0x15, 0x34, 0x24, 0x2a, 0xd4, 0xa6, 0x5d, + 0x7b, 0xd5, 0x75, 0x7b, 0x5f, 0xc6, 0x44, 0x45, 0x46, 0xde, 0x07, 0x08, 0x44, 0x92, 0x44, 0xb9, + 0xbf, 0x5c, 0xf8, 0x1b, 0xb9, 0x8d, 0x8a, 0xac, 0x5f, 0x83, 0x8a, 0x2d, 0xca, 0xbb, 0x81, 0xa7, + 0x63, 0x96, 0x2c, 0x62, 0xa4, 0xb6, 0xcf, 0x23, 0x07, 0xcd, 0x1b, 0xc3, 0x7b, 0x0f, 0x60, 0xff, + 0x25, 0xe0, 0x07, 0xd0, 0x12, 0xf3, 0xb9, 0x29, 0x79, 0xba, 0xd2, 0xa8, 0x6c, 0xd1, 0x65, 0xda, + 0xcc, 0x6d, 0x7d, 0x63, 0xf2, 0x7e, 0x71, 0xe0, 0x7c, 0x94, 0x6a, 0xa6, 0x0d, 0xeb, 0x91, 0x13, + 0x7f, 0xf1, 0x60, 0xe2, 0x37, 0xf9, 0x5e, 0x41, 0x23, 0x31, 0x5c, 0x91, 0xe0, 0xca, 0x2d, 0xdb, + 0x3b, 0xdb, 0x3b, 0xef, 0xa3, 0x22, 0x88, 0x6e, 0xc3, 0xbd, 0x3f, 0x1c, 0x78, 0xb2, 0x29, 0xe4, + 0xd8, 0x7f, 0xef, 0x16, 0x6a, 0xc8, 0xb5, 0x8c, 0x70, 0x3d, 0x22, 0x2f, 0x0e, 0xa6, 0xdb, 0xa5, + 0xf5, 0x6f, 0xb9, 0x96, 0x2b, 0xba, 0xc6, 0xb6, 0x7f, 0x84, 0x8a, 0xb5, 0x1c, 0x56, 0xf2, 0x6f, + 0x9d, 0x95, 0xfe, 0x5f, 0x67, 0x5f, 0x01, 0xd9, 0xad, 0xa0, 0xb8, 0x34, 0x1f, 0xea, 0xf9, 0xcb, + 0x83, 0x66, 0x14, 0x0d, 0x21, 0x59, 0x13, 0xca, 0x45, 0xe0, 0x8f, 0xad, 0x8f, 0x6e, 0x62, 0xbc, + 0xdf, 0x4b, 0xe0, 0x0e, 0xee, 0x30, 0xb8, 0xff, 0x92, 0xcf, 0x1e, 0xed, 0xc2, 0x86, 0x70, 0xbe, + 0x90, 0x38, 0x8b, 0x02, 0xa6, 0x71, 0x52, 0x3c, 0x1b, 0xd5, 0x63, 0x9e, 0x8d, 0xb3, 0x0d, 0x2c, + 0x37, 0x90, 0x01, 0x9c, 0x6a, 0x99, 0xe2, 0x64, 0xab, 0xd2, 0x3b, 0x47, 0xa8, 0xf4, 0xae, 0xc1, + 0xac, 0xbf, 0x14, 0xb9, 0x85, 0xb3, 0x39, 0x8b, 0xd5, 0x2e, 0x4b, 0xe5, 0x08, 0x96, 0x53, 0x0b, + 0xda, 0xd0, 0x78, 0x43, 0xb8, 0xdc, 0xa3, 0x54, 0xa1, 0xfb, 0x0b, 0x78, 0xb2, 0x6d, 0x39, 0x61, + 0x3a, 0xb8, 0xc3, 0x99, 0x55, 0xac, 0x4e, 0xb7, 0x5a, 0x8c, 0x72, 0xbb, 0xf7, 0x9b, 0x03, 0x97, + 0xe6, 0x81, 0x19, 0x89, 0x59, 0x34, 0x5f, 0x7d, 0x2f, 0xa3, 0x47, 0x51, 0xfd, 0x73, 0xa8, 0xc8, + 0x34, 0xc6, 0xf5, 0x2f, 0xf2, 0xf1, 0xa1, 0x67, 0x6d, 0x37, 0x6b, 0x1a, 0x23, 0xcd, 0x51, 0x7d, + 0x0d, 0xcf, 0x02, 0x91, 0xec, 0x01, 0xf5, 0x9f, 0xf7, 0x8b, 0x8f, 0x71, 0xbe, 0x32, 0x47, 0xc5, + 0xc6, 0x7c, 0x6b, 0x76, 0xdb, 0x5b, 0xe7, 0x87, 0x57, 0x05, 0x26, 0x14, 0x31, 0xe3, 0xa1, 0x2f, + 0x64, 0xd8, 0x0b, 0x91, 0xdb, 0xcd, 0xd7, 0xcb, 0x5d, 0x6c, 0x11, 0xa9, 0xdd, 0xed, 0xf9, 0x7a, + 0x7d, 0x9e, 0x56, 0x6d, 0xd8, 0xa7, 0x7f, 0x05, 0x00, 0x00, 0xff, 0xff, 0x16, 0x55, 0x90, 0x95, + 0xab, 0x07, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/bigtable/v2/bigtable.pb.go b/vendor/google.golang.org/genproto/googleapis/bigtable/v2/bigtable.pb.go new file mode 100644 index 0000000..1d24f09 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/bigtable/v2/bigtable.pb.go @@ -0,0 +1,1532 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/bigtable/v2/bigtable.proto + +package bigtable // import "google.golang.org/genproto/googleapis/bigtable/v2" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import wrappers "github.com/golang/protobuf/ptypes/wrappers" +import _ "google.golang.org/genproto/googleapis/api/annotations" +import status "google.golang.org/genproto/googleapis/rpc/status" + +import ( + context "golang.org/x/net/context" + grpc "google.golang.org/grpc" +) + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Request message for Bigtable.ReadRows. +type ReadRowsRequest struct { + // The unique name of the table from which to read. + // Values are of the form + // `projects//instances//tables/
`. + TableName string `protobuf:"bytes,1,opt,name=table_name,json=tableName,proto3" json:"table_name,omitempty"` + // This value specifies routing for replication. If not specified, the + // "default" application profile will be used. + AppProfileId string `protobuf:"bytes,5,opt,name=app_profile_id,json=appProfileId,proto3" json:"app_profile_id,omitempty"` + // The row keys and/or ranges to read. If not specified, reads from all rows. + Rows *RowSet `protobuf:"bytes,2,opt,name=rows,proto3" json:"rows,omitempty"` + // The filter to apply to the contents of the specified row(s). If unset, + // reads the entirety of each row. + Filter *RowFilter `protobuf:"bytes,3,opt,name=filter,proto3" json:"filter,omitempty"` + // The read will terminate after committing to N rows' worth of results. The + // default (zero) is to return all results. + RowsLimit int64 `protobuf:"varint,4,opt,name=rows_limit,json=rowsLimit,proto3" json:"rows_limit,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ReadRowsRequest) Reset() { *m = ReadRowsRequest{} } +func (m *ReadRowsRequest) String() string { return proto.CompactTextString(m) } +func (*ReadRowsRequest) ProtoMessage() {} +func (*ReadRowsRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_bigtable_b796ce6ccdb99cd8, []int{0} +} +func (m *ReadRowsRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ReadRowsRequest.Unmarshal(m, b) +} +func (m *ReadRowsRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ReadRowsRequest.Marshal(b, m, deterministic) +} +func (dst *ReadRowsRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_ReadRowsRequest.Merge(dst, src) +} +func (m *ReadRowsRequest) XXX_Size() int { + return xxx_messageInfo_ReadRowsRequest.Size(m) +} +func (m *ReadRowsRequest) XXX_DiscardUnknown() { + xxx_messageInfo_ReadRowsRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_ReadRowsRequest proto.InternalMessageInfo + +func (m *ReadRowsRequest) GetTableName() string { + if m != nil { + return m.TableName + } + return "" +} + +func (m *ReadRowsRequest) GetAppProfileId() string { + if m != nil { + return m.AppProfileId + } + return "" +} + +func (m *ReadRowsRequest) GetRows() *RowSet { + if m != nil { + return m.Rows + } + return nil +} + +func (m *ReadRowsRequest) GetFilter() *RowFilter { + if m != nil { + return m.Filter + } + return nil +} + +func (m *ReadRowsRequest) GetRowsLimit() int64 { + if m != nil { + return m.RowsLimit + } + return 0 +} + +// Response message for Bigtable.ReadRows. +type ReadRowsResponse struct { + Chunks []*ReadRowsResponse_CellChunk `protobuf:"bytes,1,rep,name=chunks,proto3" json:"chunks,omitempty"` + // Optionally the server might return the row key of the last row it + // has scanned. The client can use this to construct a more + // efficient retry request if needed: any row keys or portions of + // ranges less than this row key can be dropped from the request. + // This is primarily useful for cases where the server has read a + // lot of data that was filtered out since the last committed row + // key, allowing the client to skip that work on a retry. + LastScannedRowKey []byte `protobuf:"bytes,2,opt,name=last_scanned_row_key,json=lastScannedRowKey,proto3" json:"last_scanned_row_key,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ReadRowsResponse) Reset() { *m = ReadRowsResponse{} } +func (m *ReadRowsResponse) String() string { return proto.CompactTextString(m) } +func (*ReadRowsResponse) ProtoMessage() {} +func (*ReadRowsResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_bigtable_b796ce6ccdb99cd8, []int{1} +} +func (m *ReadRowsResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ReadRowsResponse.Unmarshal(m, b) +} +func (m *ReadRowsResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ReadRowsResponse.Marshal(b, m, deterministic) +} +func (dst *ReadRowsResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_ReadRowsResponse.Merge(dst, src) +} +func (m *ReadRowsResponse) XXX_Size() int { + return xxx_messageInfo_ReadRowsResponse.Size(m) +} +func (m *ReadRowsResponse) XXX_DiscardUnknown() { + xxx_messageInfo_ReadRowsResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_ReadRowsResponse proto.InternalMessageInfo + +func (m *ReadRowsResponse) GetChunks() []*ReadRowsResponse_CellChunk { + if m != nil { + return m.Chunks + } + return nil +} + +func (m *ReadRowsResponse) GetLastScannedRowKey() []byte { + if m != nil { + return m.LastScannedRowKey + } + return nil +} + +// Specifies a piece of a row's contents returned as part of the read +// response stream. +type ReadRowsResponse_CellChunk struct { + // The row key for this chunk of data. If the row key is empty, + // this CellChunk is a continuation of the same row as the previous + // CellChunk in the response stream, even if that CellChunk was in a + // previous ReadRowsResponse message. + RowKey []byte `protobuf:"bytes,1,opt,name=row_key,json=rowKey,proto3" json:"row_key,omitempty"` + // The column family name for this chunk of data. If this message + // is not present this CellChunk is a continuation of the same column + // family as the previous CellChunk. The empty string can occur as a + // column family name in a response so clients must check + // explicitly for the presence of this message, not just for + // `family_name.value` being non-empty. + FamilyName *wrappers.StringValue `protobuf:"bytes,2,opt,name=family_name,json=familyName,proto3" json:"family_name,omitempty"` + // The column qualifier for this chunk of data. If this message + // is not present, this CellChunk is a continuation of the same column + // as the previous CellChunk. Column qualifiers may be empty so + // clients must check for the presence of this message, not just + // for `qualifier.value` being non-empty. + Qualifier *wrappers.BytesValue `protobuf:"bytes,3,opt,name=qualifier,proto3" json:"qualifier,omitempty"` + // The cell's stored timestamp, which also uniquely identifies it + // within its column. Values are always expressed in + // microseconds, but individual tables may set a coarser + // granularity to further restrict the allowed values. For + // example, a table which specifies millisecond granularity will + // only allow values of `timestamp_micros` which are multiples of + // 1000. Timestamps are only set in the first CellChunk per cell + // (for cells split into multiple chunks). + TimestampMicros int64 `protobuf:"varint,4,opt,name=timestamp_micros,json=timestampMicros,proto3" json:"timestamp_micros,omitempty"` + // Labels applied to the cell by a + // [RowFilter][google.bigtable.v2.RowFilter]. Labels are only set + // on the first CellChunk per cell. + Labels []string `protobuf:"bytes,5,rep,name=labels,proto3" json:"labels,omitempty"` + // The value stored in the cell. Cell values can be split across + // multiple CellChunks. In that case only the value field will be + // set in CellChunks after the first: the timestamp and labels + // will only be present in the first CellChunk, even if the first + // CellChunk came in a previous ReadRowsResponse. + Value []byte `protobuf:"bytes,6,opt,name=value,proto3" json:"value,omitempty"` + // If this CellChunk is part of a chunked cell value and this is + // not the final chunk of that cell, value_size will be set to the + // total length of the cell value. The client can use this size + // to pre-allocate memory to hold the full cell value. + ValueSize int32 `protobuf:"varint,7,opt,name=value_size,json=valueSize,proto3" json:"value_size,omitempty"` + // Types that are valid to be assigned to RowStatus: + // *ReadRowsResponse_CellChunk_ResetRow + // *ReadRowsResponse_CellChunk_CommitRow + RowStatus isReadRowsResponse_CellChunk_RowStatus `protobuf_oneof:"row_status"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ReadRowsResponse_CellChunk) Reset() { *m = ReadRowsResponse_CellChunk{} } +func (m *ReadRowsResponse_CellChunk) String() string { return proto.CompactTextString(m) } +func (*ReadRowsResponse_CellChunk) ProtoMessage() {} +func (*ReadRowsResponse_CellChunk) Descriptor() ([]byte, []int) { + return fileDescriptor_bigtable_b796ce6ccdb99cd8, []int{1, 0} +} +func (m *ReadRowsResponse_CellChunk) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ReadRowsResponse_CellChunk.Unmarshal(m, b) +} +func (m *ReadRowsResponse_CellChunk) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ReadRowsResponse_CellChunk.Marshal(b, m, deterministic) +} +func (dst *ReadRowsResponse_CellChunk) XXX_Merge(src proto.Message) { + xxx_messageInfo_ReadRowsResponse_CellChunk.Merge(dst, src) +} +func (m *ReadRowsResponse_CellChunk) XXX_Size() int { + return xxx_messageInfo_ReadRowsResponse_CellChunk.Size(m) +} +func (m *ReadRowsResponse_CellChunk) XXX_DiscardUnknown() { + xxx_messageInfo_ReadRowsResponse_CellChunk.DiscardUnknown(m) +} + +var xxx_messageInfo_ReadRowsResponse_CellChunk proto.InternalMessageInfo + +func (m *ReadRowsResponse_CellChunk) GetRowKey() []byte { + if m != nil { + return m.RowKey + } + return nil +} + +func (m *ReadRowsResponse_CellChunk) GetFamilyName() *wrappers.StringValue { + if m != nil { + return m.FamilyName + } + return nil +} + +func (m *ReadRowsResponse_CellChunk) GetQualifier() *wrappers.BytesValue { + if m != nil { + return m.Qualifier + } + return nil +} + +func (m *ReadRowsResponse_CellChunk) GetTimestampMicros() int64 { + if m != nil { + return m.TimestampMicros + } + return 0 +} + +func (m *ReadRowsResponse_CellChunk) GetLabels() []string { + if m != nil { + return m.Labels + } + return nil +} + +func (m *ReadRowsResponse_CellChunk) GetValue() []byte { + if m != nil { + return m.Value + } + return nil +} + +func (m *ReadRowsResponse_CellChunk) GetValueSize() int32 { + if m != nil { + return m.ValueSize + } + return 0 +} + +type isReadRowsResponse_CellChunk_RowStatus interface { + isReadRowsResponse_CellChunk_RowStatus() +} + +type ReadRowsResponse_CellChunk_ResetRow struct { + ResetRow bool `protobuf:"varint,8,opt,name=reset_row,json=resetRow,proto3,oneof"` +} + +type ReadRowsResponse_CellChunk_CommitRow struct { + CommitRow bool `protobuf:"varint,9,opt,name=commit_row,json=commitRow,proto3,oneof"` +} + +func (*ReadRowsResponse_CellChunk_ResetRow) isReadRowsResponse_CellChunk_RowStatus() {} + +func (*ReadRowsResponse_CellChunk_CommitRow) isReadRowsResponse_CellChunk_RowStatus() {} + +func (m *ReadRowsResponse_CellChunk) GetRowStatus() isReadRowsResponse_CellChunk_RowStatus { + if m != nil { + return m.RowStatus + } + return nil +} + +func (m *ReadRowsResponse_CellChunk) GetResetRow() bool { + if x, ok := m.GetRowStatus().(*ReadRowsResponse_CellChunk_ResetRow); ok { + return x.ResetRow + } + return false +} + +func (m *ReadRowsResponse_CellChunk) GetCommitRow() bool { + if x, ok := m.GetRowStatus().(*ReadRowsResponse_CellChunk_CommitRow); ok { + return x.CommitRow + } + return false +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*ReadRowsResponse_CellChunk) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _ReadRowsResponse_CellChunk_OneofMarshaler, _ReadRowsResponse_CellChunk_OneofUnmarshaler, _ReadRowsResponse_CellChunk_OneofSizer, []interface{}{ + (*ReadRowsResponse_CellChunk_ResetRow)(nil), + (*ReadRowsResponse_CellChunk_CommitRow)(nil), + } +} + +func _ReadRowsResponse_CellChunk_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*ReadRowsResponse_CellChunk) + // row_status + switch x := m.RowStatus.(type) { + case *ReadRowsResponse_CellChunk_ResetRow: + t := uint64(0) + if x.ResetRow { + t = 1 + } + b.EncodeVarint(8<<3 | proto.WireVarint) + b.EncodeVarint(t) + case *ReadRowsResponse_CellChunk_CommitRow: + t := uint64(0) + if x.CommitRow { + t = 1 + } + b.EncodeVarint(9<<3 | proto.WireVarint) + b.EncodeVarint(t) + case nil: + default: + return fmt.Errorf("ReadRowsResponse_CellChunk.RowStatus has unexpected type %T", x) + } + return nil +} + +func _ReadRowsResponse_CellChunk_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*ReadRowsResponse_CellChunk) + switch tag { + case 8: // row_status.reset_row + if wire != proto.WireVarint { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeVarint() + m.RowStatus = &ReadRowsResponse_CellChunk_ResetRow{x != 0} + return true, err + case 9: // row_status.commit_row + if wire != proto.WireVarint { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeVarint() + m.RowStatus = &ReadRowsResponse_CellChunk_CommitRow{x != 0} + return true, err + default: + return false, nil + } +} + +func _ReadRowsResponse_CellChunk_OneofSizer(msg proto.Message) (n int) { + m := msg.(*ReadRowsResponse_CellChunk) + // row_status + switch x := m.RowStatus.(type) { + case *ReadRowsResponse_CellChunk_ResetRow: + n += 1 // tag and wire + n += 1 + case *ReadRowsResponse_CellChunk_CommitRow: + n += 1 // tag and wire + n += 1 + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +// Request message for Bigtable.SampleRowKeys. +type SampleRowKeysRequest struct { + // The unique name of the table from which to sample row keys. + // Values are of the form + // `projects//instances//tables/
`. + TableName string `protobuf:"bytes,1,opt,name=table_name,json=tableName,proto3" json:"table_name,omitempty"` + // This value specifies routing for replication. If not specified, the + // "default" application profile will be used. + AppProfileId string `protobuf:"bytes,2,opt,name=app_profile_id,json=appProfileId,proto3" json:"app_profile_id,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *SampleRowKeysRequest) Reset() { *m = SampleRowKeysRequest{} } +func (m *SampleRowKeysRequest) String() string { return proto.CompactTextString(m) } +func (*SampleRowKeysRequest) ProtoMessage() {} +func (*SampleRowKeysRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_bigtable_b796ce6ccdb99cd8, []int{2} +} +func (m *SampleRowKeysRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_SampleRowKeysRequest.Unmarshal(m, b) +} +func (m *SampleRowKeysRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_SampleRowKeysRequest.Marshal(b, m, deterministic) +} +func (dst *SampleRowKeysRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_SampleRowKeysRequest.Merge(dst, src) +} +func (m *SampleRowKeysRequest) XXX_Size() int { + return xxx_messageInfo_SampleRowKeysRequest.Size(m) +} +func (m *SampleRowKeysRequest) XXX_DiscardUnknown() { + xxx_messageInfo_SampleRowKeysRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_SampleRowKeysRequest proto.InternalMessageInfo + +func (m *SampleRowKeysRequest) GetTableName() string { + if m != nil { + return m.TableName + } + return "" +} + +func (m *SampleRowKeysRequest) GetAppProfileId() string { + if m != nil { + return m.AppProfileId + } + return "" +} + +// Response message for Bigtable.SampleRowKeys. +type SampleRowKeysResponse struct { + // Sorted streamed sequence of sample row keys in the table. The table might + // have contents before the first row key in the list and after the last one, + // but a key containing the empty string indicates "end of table" and will be + // the last response given, if present. + // Note that row keys in this list may not have ever been written to or read + // from, and users should therefore not make any assumptions about the row key + // structure that are specific to their use case. + RowKey []byte `protobuf:"bytes,1,opt,name=row_key,json=rowKey,proto3" json:"row_key,omitempty"` + // Approximate total storage space used by all rows in the table which precede + // `row_key`. Buffering the contents of all rows between two subsequent + // samples would require space roughly equal to the difference in their + // `offset_bytes` fields. + OffsetBytes int64 `protobuf:"varint,2,opt,name=offset_bytes,json=offsetBytes,proto3" json:"offset_bytes,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *SampleRowKeysResponse) Reset() { *m = SampleRowKeysResponse{} } +func (m *SampleRowKeysResponse) String() string { return proto.CompactTextString(m) } +func (*SampleRowKeysResponse) ProtoMessage() {} +func (*SampleRowKeysResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_bigtable_b796ce6ccdb99cd8, []int{3} +} +func (m *SampleRowKeysResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_SampleRowKeysResponse.Unmarshal(m, b) +} +func (m *SampleRowKeysResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_SampleRowKeysResponse.Marshal(b, m, deterministic) +} +func (dst *SampleRowKeysResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_SampleRowKeysResponse.Merge(dst, src) +} +func (m *SampleRowKeysResponse) XXX_Size() int { + return xxx_messageInfo_SampleRowKeysResponse.Size(m) +} +func (m *SampleRowKeysResponse) XXX_DiscardUnknown() { + xxx_messageInfo_SampleRowKeysResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_SampleRowKeysResponse proto.InternalMessageInfo + +func (m *SampleRowKeysResponse) GetRowKey() []byte { + if m != nil { + return m.RowKey + } + return nil +} + +func (m *SampleRowKeysResponse) GetOffsetBytes() int64 { + if m != nil { + return m.OffsetBytes + } + return 0 +} + +// Request message for Bigtable.MutateRow. +type MutateRowRequest struct { + // The unique name of the table to which the mutation should be applied. + // Values are of the form + // `projects//instances//tables/
`. + TableName string `protobuf:"bytes,1,opt,name=table_name,json=tableName,proto3" json:"table_name,omitempty"` + // This value specifies routing for replication. If not specified, the + // "default" application profile will be used. + AppProfileId string `protobuf:"bytes,4,opt,name=app_profile_id,json=appProfileId,proto3" json:"app_profile_id,omitempty"` + // The key of the row to which the mutation should be applied. + RowKey []byte `protobuf:"bytes,2,opt,name=row_key,json=rowKey,proto3" json:"row_key,omitempty"` + // Changes to be atomically applied to the specified row. Entries are applied + // in order, meaning that earlier mutations can be masked by later ones. + // Must contain at least one entry and at most 100000. + Mutations []*Mutation `protobuf:"bytes,3,rep,name=mutations,proto3" json:"mutations,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *MutateRowRequest) Reset() { *m = MutateRowRequest{} } +func (m *MutateRowRequest) String() string { return proto.CompactTextString(m) } +func (*MutateRowRequest) ProtoMessage() {} +func (*MutateRowRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_bigtable_b796ce6ccdb99cd8, []int{4} +} +func (m *MutateRowRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_MutateRowRequest.Unmarshal(m, b) +} +func (m *MutateRowRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_MutateRowRequest.Marshal(b, m, deterministic) +} +func (dst *MutateRowRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_MutateRowRequest.Merge(dst, src) +} +func (m *MutateRowRequest) XXX_Size() int { + return xxx_messageInfo_MutateRowRequest.Size(m) +} +func (m *MutateRowRequest) XXX_DiscardUnknown() { + xxx_messageInfo_MutateRowRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_MutateRowRequest proto.InternalMessageInfo + +func (m *MutateRowRequest) GetTableName() string { + if m != nil { + return m.TableName + } + return "" +} + +func (m *MutateRowRequest) GetAppProfileId() string { + if m != nil { + return m.AppProfileId + } + return "" +} + +func (m *MutateRowRequest) GetRowKey() []byte { + if m != nil { + return m.RowKey + } + return nil +} + +func (m *MutateRowRequest) GetMutations() []*Mutation { + if m != nil { + return m.Mutations + } + return nil +} + +// Response message for Bigtable.MutateRow. +type MutateRowResponse struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *MutateRowResponse) Reset() { *m = MutateRowResponse{} } +func (m *MutateRowResponse) String() string { return proto.CompactTextString(m) } +func (*MutateRowResponse) ProtoMessage() {} +func (*MutateRowResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_bigtable_b796ce6ccdb99cd8, []int{5} +} +func (m *MutateRowResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_MutateRowResponse.Unmarshal(m, b) +} +func (m *MutateRowResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_MutateRowResponse.Marshal(b, m, deterministic) +} +func (dst *MutateRowResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_MutateRowResponse.Merge(dst, src) +} +func (m *MutateRowResponse) XXX_Size() int { + return xxx_messageInfo_MutateRowResponse.Size(m) +} +func (m *MutateRowResponse) XXX_DiscardUnknown() { + xxx_messageInfo_MutateRowResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_MutateRowResponse proto.InternalMessageInfo + +// Request message for BigtableService.MutateRows. +type MutateRowsRequest struct { + // The unique name of the table to which the mutations should be applied. + TableName string `protobuf:"bytes,1,opt,name=table_name,json=tableName,proto3" json:"table_name,omitempty"` + // This value specifies routing for replication. If not specified, the + // "default" application profile will be used. + AppProfileId string `protobuf:"bytes,3,opt,name=app_profile_id,json=appProfileId,proto3" json:"app_profile_id,omitempty"` + // The row keys and corresponding mutations to be applied in bulk. + // Each entry is applied as an atomic mutation, but the entries may be + // applied in arbitrary order (even between entries for the same row). + // At least one entry must be specified, and in total the entries can + // contain at most 100000 mutations. + Entries []*MutateRowsRequest_Entry `protobuf:"bytes,2,rep,name=entries,proto3" json:"entries,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *MutateRowsRequest) Reset() { *m = MutateRowsRequest{} } +func (m *MutateRowsRequest) String() string { return proto.CompactTextString(m) } +func (*MutateRowsRequest) ProtoMessage() {} +func (*MutateRowsRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_bigtable_b796ce6ccdb99cd8, []int{6} +} +func (m *MutateRowsRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_MutateRowsRequest.Unmarshal(m, b) +} +func (m *MutateRowsRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_MutateRowsRequest.Marshal(b, m, deterministic) +} +func (dst *MutateRowsRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_MutateRowsRequest.Merge(dst, src) +} +func (m *MutateRowsRequest) XXX_Size() int { + return xxx_messageInfo_MutateRowsRequest.Size(m) +} +func (m *MutateRowsRequest) XXX_DiscardUnknown() { + xxx_messageInfo_MutateRowsRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_MutateRowsRequest proto.InternalMessageInfo + +func (m *MutateRowsRequest) GetTableName() string { + if m != nil { + return m.TableName + } + return "" +} + +func (m *MutateRowsRequest) GetAppProfileId() string { + if m != nil { + return m.AppProfileId + } + return "" +} + +func (m *MutateRowsRequest) GetEntries() []*MutateRowsRequest_Entry { + if m != nil { + return m.Entries + } + return nil +} + +type MutateRowsRequest_Entry struct { + // The key of the row to which the `mutations` should be applied. + RowKey []byte `protobuf:"bytes,1,opt,name=row_key,json=rowKey,proto3" json:"row_key,omitempty"` + // Changes to be atomically applied to the specified row. Mutations are + // applied in order, meaning that earlier mutations can be masked by + // later ones. + // You must specify at least one mutation. + Mutations []*Mutation `protobuf:"bytes,2,rep,name=mutations,proto3" json:"mutations,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *MutateRowsRequest_Entry) Reset() { *m = MutateRowsRequest_Entry{} } +func (m *MutateRowsRequest_Entry) String() string { return proto.CompactTextString(m) } +func (*MutateRowsRequest_Entry) ProtoMessage() {} +func (*MutateRowsRequest_Entry) Descriptor() ([]byte, []int) { + return fileDescriptor_bigtable_b796ce6ccdb99cd8, []int{6, 0} +} +func (m *MutateRowsRequest_Entry) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_MutateRowsRequest_Entry.Unmarshal(m, b) +} +func (m *MutateRowsRequest_Entry) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_MutateRowsRequest_Entry.Marshal(b, m, deterministic) +} +func (dst *MutateRowsRequest_Entry) XXX_Merge(src proto.Message) { + xxx_messageInfo_MutateRowsRequest_Entry.Merge(dst, src) +} +func (m *MutateRowsRequest_Entry) XXX_Size() int { + return xxx_messageInfo_MutateRowsRequest_Entry.Size(m) +} +func (m *MutateRowsRequest_Entry) XXX_DiscardUnknown() { + xxx_messageInfo_MutateRowsRequest_Entry.DiscardUnknown(m) +} + +var xxx_messageInfo_MutateRowsRequest_Entry proto.InternalMessageInfo + +func (m *MutateRowsRequest_Entry) GetRowKey() []byte { + if m != nil { + return m.RowKey + } + return nil +} + +func (m *MutateRowsRequest_Entry) GetMutations() []*Mutation { + if m != nil { + return m.Mutations + } + return nil +} + +// Response message for BigtableService.MutateRows. +type MutateRowsResponse struct { + // One or more results for Entries from the batch request. + Entries []*MutateRowsResponse_Entry `protobuf:"bytes,1,rep,name=entries,proto3" json:"entries,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *MutateRowsResponse) Reset() { *m = MutateRowsResponse{} } +func (m *MutateRowsResponse) String() string { return proto.CompactTextString(m) } +func (*MutateRowsResponse) ProtoMessage() {} +func (*MutateRowsResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_bigtable_b796ce6ccdb99cd8, []int{7} +} +func (m *MutateRowsResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_MutateRowsResponse.Unmarshal(m, b) +} +func (m *MutateRowsResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_MutateRowsResponse.Marshal(b, m, deterministic) +} +func (dst *MutateRowsResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_MutateRowsResponse.Merge(dst, src) +} +func (m *MutateRowsResponse) XXX_Size() int { + return xxx_messageInfo_MutateRowsResponse.Size(m) +} +func (m *MutateRowsResponse) XXX_DiscardUnknown() { + xxx_messageInfo_MutateRowsResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_MutateRowsResponse proto.InternalMessageInfo + +func (m *MutateRowsResponse) GetEntries() []*MutateRowsResponse_Entry { + if m != nil { + return m.Entries + } + return nil +} + +type MutateRowsResponse_Entry struct { + // The index into the original request's `entries` list of the Entry + // for which a result is being reported. + Index int64 `protobuf:"varint,1,opt,name=index,proto3" json:"index,omitempty"` + // The result of the request Entry identified by `index`. + // Depending on how requests are batched during execution, it is possible + // for one Entry to fail due to an error with another Entry. In the event + // that this occurs, the same error will be reported for both entries. + Status *status.Status `protobuf:"bytes,2,opt,name=status,proto3" json:"status,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *MutateRowsResponse_Entry) Reset() { *m = MutateRowsResponse_Entry{} } +func (m *MutateRowsResponse_Entry) String() string { return proto.CompactTextString(m) } +func (*MutateRowsResponse_Entry) ProtoMessage() {} +func (*MutateRowsResponse_Entry) Descriptor() ([]byte, []int) { + return fileDescriptor_bigtable_b796ce6ccdb99cd8, []int{7, 0} +} +func (m *MutateRowsResponse_Entry) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_MutateRowsResponse_Entry.Unmarshal(m, b) +} +func (m *MutateRowsResponse_Entry) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_MutateRowsResponse_Entry.Marshal(b, m, deterministic) +} +func (dst *MutateRowsResponse_Entry) XXX_Merge(src proto.Message) { + xxx_messageInfo_MutateRowsResponse_Entry.Merge(dst, src) +} +func (m *MutateRowsResponse_Entry) XXX_Size() int { + return xxx_messageInfo_MutateRowsResponse_Entry.Size(m) +} +func (m *MutateRowsResponse_Entry) XXX_DiscardUnknown() { + xxx_messageInfo_MutateRowsResponse_Entry.DiscardUnknown(m) +} + +var xxx_messageInfo_MutateRowsResponse_Entry proto.InternalMessageInfo + +func (m *MutateRowsResponse_Entry) GetIndex() int64 { + if m != nil { + return m.Index + } + return 0 +} + +func (m *MutateRowsResponse_Entry) GetStatus() *status.Status { + if m != nil { + return m.Status + } + return nil +} + +// Request message for Bigtable.CheckAndMutateRow. +type CheckAndMutateRowRequest struct { + // The unique name of the table to which the conditional mutation should be + // applied. + // Values are of the form + // `projects//instances//tables/
`. + TableName string `protobuf:"bytes,1,opt,name=table_name,json=tableName,proto3" json:"table_name,omitempty"` + // This value specifies routing for replication. If not specified, the + // "default" application profile will be used. + AppProfileId string `protobuf:"bytes,7,opt,name=app_profile_id,json=appProfileId,proto3" json:"app_profile_id,omitempty"` + // The key of the row to which the conditional mutation should be applied. + RowKey []byte `protobuf:"bytes,2,opt,name=row_key,json=rowKey,proto3" json:"row_key,omitempty"` + // The filter to be applied to the contents of the specified row. Depending + // on whether or not any results are yielded, either `true_mutations` or + // `false_mutations` will be executed. If unset, checks that the row contains + // any values at all. + PredicateFilter *RowFilter `protobuf:"bytes,6,opt,name=predicate_filter,json=predicateFilter,proto3" json:"predicate_filter,omitempty"` + // Changes to be atomically applied to the specified row if `predicate_filter` + // yields at least one cell when applied to `row_key`. Entries are applied in + // order, meaning that earlier mutations can be masked by later ones. + // Must contain at least one entry if `false_mutations` is empty, and at most + // 100000. + TrueMutations []*Mutation `protobuf:"bytes,4,rep,name=true_mutations,json=trueMutations,proto3" json:"true_mutations,omitempty"` + // Changes to be atomically applied to the specified row if `predicate_filter` + // does not yield any cells when applied to `row_key`. Entries are applied in + // order, meaning that earlier mutations can be masked by later ones. + // Must contain at least one entry if `true_mutations` is empty, and at most + // 100000. + FalseMutations []*Mutation `protobuf:"bytes,5,rep,name=false_mutations,json=falseMutations,proto3" json:"false_mutations,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *CheckAndMutateRowRequest) Reset() { *m = CheckAndMutateRowRequest{} } +func (m *CheckAndMutateRowRequest) String() string { return proto.CompactTextString(m) } +func (*CheckAndMutateRowRequest) ProtoMessage() {} +func (*CheckAndMutateRowRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_bigtable_b796ce6ccdb99cd8, []int{8} +} +func (m *CheckAndMutateRowRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_CheckAndMutateRowRequest.Unmarshal(m, b) +} +func (m *CheckAndMutateRowRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_CheckAndMutateRowRequest.Marshal(b, m, deterministic) +} +func (dst *CheckAndMutateRowRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_CheckAndMutateRowRequest.Merge(dst, src) +} +func (m *CheckAndMutateRowRequest) XXX_Size() int { + return xxx_messageInfo_CheckAndMutateRowRequest.Size(m) +} +func (m *CheckAndMutateRowRequest) XXX_DiscardUnknown() { + xxx_messageInfo_CheckAndMutateRowRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_CheckAndMutateRowRequest proto.InternalMessageInfo + +func (m *CheckAndMutateRowRequest) GetTableName() string { + if m != nil { + return m.TableName + } + return "" +} + +func (m *CheckAndMutateRowRequest) GetAppProfileId() string { + if m != nil { + return m.AppProfileId + } + return "" +} + +func (m *CheckAndMutateRowRequest) GetRowKey() []byte { + if m != nil { + return m.RowKey + } + return nil +} + +func (m *CheckAndMutateRowRequest) GetPredicateFilter() *RowFilter { + if m != nil { + return m.PredicateFilter + } + return nil +} + +func (m *CheckAndMutateRowRequest) GetTrueMutations() []*Mutation { + if m != nil { + return m.TrueMutations + } + return nil +} + +func (m *CheckAndMutateRowRequest) GetFalseMutations() []*Mutation { + if m != nil { + return m.FalseMutations + } + return nil +} + +// Response message for Bigtable.CheckAndMutateRow. +type CheckAndMutateRowResponse struct { + // Whether or not the request's `predicate_filter` yielded any results for + // the specified row. + PredicateMatched bool `protobuf:"varint,1,opt,name=predicate_matched,json=predicateMatched,proto3" json:"predicate_matched,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *CheckAndMutateRowResponse) Reset() { *m = CheckAndMutateRowResponse{} } +func (m *CheckAndMutateRowResponse) String() string { return proto.CompactTextString(m) } +func (*CheckAndMutateRowResponse) ProtoMessage() {} +func (*CheckAndMutateRowResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_bigtable_b796ce6ccdb99cd8, []int{9} +} +func (m *CheckAndMutateRowResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_CheckAndMutateRowResponse.Unmarshal(m, b) +} +func (m *CheckAndMutateRowResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_CheckAndMutateRowResponse.Marshal(b, m, deterministic) +} +func (dst *CheckAndMutateRowResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_CheckAndMutateRowResponse.Merge(dst, src) +} +func (m *CheckAndMutateRowResponse) XXX_Size() int { + return xxx_messageInfo_CheckAndMutateRowResponse.Size(m) +} +func (m *CheckAndMutateRowResponse) XXX_DiscardUnknown() { + xxx_messageInfo_CheckAndMutateRowResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_CheckAndMutateRowResponse proto.InternalMessageInfo + +func (m *CheckAndMutateRowResponse) GetPredicateMatched() bool { + if m != nil { + return m.PredicateMatched + } + return false +} + +// Request message for Bigtable.ReadModifyWriteRow. +type ReadModifyWriteRowRequest struct { + // The unique name of the table to which the read/modify/write rules should be + // applied. + // Values are of the form + // `projects//instances//tables/
`. + TableName string `protobuf:"bytes,1,opt,name=table_name,json=tableName,proto3" json:"table_name,omitempty"` + // This value specifies routing for replication. If not specified, the + // "default" application profile will be used. + AppProfileId string `protobuf:"bytes,4,opt,name=app_profile_id,json=appProfileId,proto3" json:"app_profile_id,omitempty"` + // The key of the row to which the read/modify/write rules should be applied. + RowKey []byte `protobuf:"bytes,2,opt,name=row_key,json=rowKey,proto3" json:"row_key,omitempty"` + // Rules specifying how the specified row's contents are to be transformed + // into writes. Entries are applied in order, meaning that earlier rules will + // affect the results of later ones. + Rules []*ReadModifyWriteRule `protobuf:"bytes,3,rep,name=rules,proto3" json:"rules,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ReadModifyWriteRowRequest) Reset() { *m = ReadModifyWriteRowRequest{} } +func (m *ReadModifyWriteRowRequest) String() string { return proto.CompactTextString(m) } +func (*ReadModifyWriteRowRequest) ProtoMessage() {} +func (*ReadModifyWriteRowRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_bigtable_b796ce6ccdb99cd8, []int{10} +} +func (m *ReadModifyWriteRowRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ReadModifyWriteRowRequest.Unmarshal(m, b) +} +func (m *ReadModifyWriteRowRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ReadModifyWriteRowRequest.Marshal(b, m, deterministic) +} +func (dst *ReadModifyWriteRowRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_ReadModifyWriteRowRequest.Merge(dst, src) +} +func (m *ReadModifyWriteRowRequest) XXX_Size() int { + return xxx_messageInfo_ReadModifyWriteRowRequest.Size(m) +} +func (m *ReadModifyWriteRowRequest) XXX_DiscardUnknown() { + xxx_messageInfo_ReadModifyWriteRowRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_ReadModifyWriteRowRequest proto.InternalMessageInfo + +func (m *ReadModifyWriteRowRequest) GetTableName() string { + if m != nil { + return m.TableName + } + return "" +} + +func (m *ReadModifyWriteRowRequest) GetAppProfileId() string { + if m != nil { + return m.AppProfileId + } + return "" +} + +func (m *ReadModifyWriteRowRequest) GetRowKey() []byte { + if m != nil { + return m.RowKey + } + return nil +} + +func (m *ReadModifyWriteRowRequest) GetRules() []*ReadModifyWriteRule { + if m != nil { + return m.Rules + } + return nil +} + +// Response message for Bigtable.ReadModifyWriteRow. +type ReadModifyWriteRowResponse struct { + // A Row containing the new contents of all cells modified by the request. + Row *Row `protobuf:"bytes,1,opt,name=row,proto3" json:"row,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ReadModifyWriteRowResponse) Reset() { *m = ReadModifyWriteRowResponse{} } +func (m *ReadModifyWriteRowResponse) String() string { return proto.CompactTextString(m) } +func (*ReadModifyWriteRowResponse) ProtoMessage() {} +func (*ReadModifyWriteRowResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_bigtable_b796ce6ccdb99cd8, []int{11} +} +func (m *ReadModifyWriteRowResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ReadModifyWriteRowResponse.Unmarshal(m, b) +} +func (m *ReadModifyWriteRowResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ReadModifyWriteRowResponse.Marshal(b, m, deterministic) +} +func (dst *ReadModifyWriteRowResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_ReadModifyWriteRowResponse.Merge(dst, src) +} +func (m *ReadModifyWriteRowResponse) XXX_Size() int { + return xxx_messageInfo_ReadModifyWriteRowResponse.Size(m) +} +func (m *ReadModifyWriteRowResponse) XXX_DiscardUnknown() { + xxx_messageInfo_ReadModifyWriteRowResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_ReadModifyWriteRowResponse proto.InternalMessageInfo + +func (m *ReadModifyWriteRowResponse) GetRow() *Row { + if m != nil { + return m.Row + } + return nil +} + +func init() { + proto.RegisterType((*ReadRowsRequest)(nil), "google.bigtable.v2.ReadRowsRequest") + proto.RegisterType((*ReadRowsResponse)(nil), "google.bigtable.v2.ReadRowsResponse") + proto.RegisterType((*ReadRowsResponse_CellChunk)(nil), "google.bigtable.v2.ReadRowsResponse.CellChunk") + proto.RegisterType((*SampleRowKeysRequest)(nil), "google.bigtable.v2.SampleRowKeysRequest") + proto.RegisterType((*SampleRowKeysResponse)(nil), "google.bigtable.v2.SampleRowKeysResponse") + proto.RegisterType((*MutateRowRequest)(nil), "google.bigtable.v2.MutateRowRequest") + proto.RegisterType((*MutateRowResponse)(nil), "google.bigtable.v2.MutateRowResponse") + proto.RegisterType((*MutateRowsRequest)(nil), "google.bigtable.v2.MutateRowsRequest") + proto.RegisterType((*MutateRowsRequest_Entry)(nil), "google.bigtable.v2.MutateRowsRequest.Entry") + proto.RegisterType((*MutateRowsResponse)(nil), "google.bigtable.v2.MutateRowsResponse") + proto.RegisterType((*MutateRowsResponse_Entry)(nil), "google.bigtable.v2.MutateRowsResponse.Entry") + proto.RegisterType((*CheckAndMutateRowRequest)(nil), "google.bigtable.v2.CheckAndMutateRowRequest") + proto.RegisterType((*CheckAndMutateRowResponse)(nil), "google.bigtable.v2.CheckAndMutateRowResponse") + proto.RegisterType((*ReadModifyWriteRowRequest)(nil), "google.bigtable.v2.ReadModifyWriteRowRequest") + proto.RegisterType((*ReadModifyWriteRowResponse)(nil), "google.bigtable.v2.ReadModifyWriteRowResponse") +} + +// Reference imports to suppress errors if they are not otherwise used. +var _ context.Context +var _ grpc.ClientConn + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the grpc package it is being compiled against. +const _ = grpc.SupportPackageIsVersion4 + +// BigtableClient is the client API for Bigtable service. +// +// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://godoc.org/google.golang.org/grpc#ClientConn.NewStream. +type BigtableClient interface { + // Streams back the contents of all requested rows in key order, optionally + // applying the same Reader filter to each. Depending on their size, + // rows and cells may be broken up across multiple responses, but + // atomicity of each row will still be preserved. See the + // ReadRowsResponse documentation for details. + ReadRows(ctx context.Context, in *ReadRowsRequest, opts ...grpc.CallOption) (Bigtable_ReadRowsClient, error) + // Returns a sample of row keys in the table. The returned row keys will + // delimit contiguous sections of the table of approximately equal size, + // which can be used to break up the data for distributed tasks like + // mapreduces. + SampleRowKeys(ctx context.Context, in *SampleRowKeysRequest, opts ...grpc.CallOption) (Bigtable_SampleRowKeysClient, error) + // Mutates a row atomically. Cells already present in the row are left + // unchanged unless explicitly changed by `mutation`. + MutateRow(ctx context.Context, in *MutateRowRequest, opts ...grpc.CallOption) (*MutateRowResponse, error) + // Mutates multiple rows in a batch. Each individual row is mutated + // atomically as in MutateRow, but the entire batch is not executed + // atomically. + MutateRows(ctx context.Context, in *MutateRowsRequest, opts ...grpc.CallOption) (Bigtable_MutateRowsClient, error) + // Mutates a row atomically based on the output of a predicate Reader filter. + CheckAndMutateRow(ctx context.Context, in *CheckAndMutateRowRequest, opts ...grpc.CallOption) (*CheckAndMutateRowResponse, error) + // Modifies a row atomically on the server. The method reads the latest + // existing timestamp and value from the specified columns and writes a new + // entry based on pre-defined read/modify/write rules. The new value for the + // timestamp is the greater of the existing timestamp or the current server + // time. The method returns the new contents of all modified cells. + ReadModifyWriteRow(ctx context.Context, in *ReadModifyWriteRowRequest, opts ...grpc.CallOption) (*ReadModifyWriteRowResponse, error) +} + +type bigtableClient struct { + cc *grpc.ClientConn +} + +func NewBigtableClient(cc *grpc.ClientConn) BigtableClient { + return &bigtableClient{cc} +} + +func (c *bigtableClient) ReadRows(ctx context.Context, in *ReadRowsRequest, opts ...grpc.CallOption) (Bigtable_ReadRowsClient, error) { + stream, err := c.cc.NewStream(ctx, &_Bigtable_serviceDesc.Streams[0], "/google.bigtable.v2.Bigtable/ReadRows", opts...) + if err != nil { + return nil, err + } + x := &bigtableReadRowsClient{stream} + if err := x.ClientStream.SendMsg(in); err != nil { + return nil, err + } + if err := x.ClientStream.CloseSend(); err != nil { + return nil, err + } + return x, nil +} + +type Bigtable_ReadRowsClient interface { + Recv() (*ReadRowsResponse, error) + grpc.ClientStream +} + +type bigtableReadRowsClient struct { + grpc.ClientStream +} + +func (x *bigtableReadRowsClient) Recv() (*ReadRowsResponse, error) { + m := new(ReadRowsResponse) + if err := x.ClientStream.RecvMsg(m); err != nil { + return nil, err + } + return m, nil +} + +func (c *bigtableClient) SampleRowKeys(ctx context.Context, in *SampleRowKeysRequest, opts ...grpc.CallOption) (Bigtable_SampleRowKeysClient, error) { + stream, err := c.cc.NewStream(ctx, &_Bigtable_serviceDesc.Streams[1], "/google.bigtable.v2.Bigtable/SampleRowKeys", opts...) + if err != nil { + return nil, err + } + x := &bigtableSampleRowKeysClient{stream} + if err := x.ClientStream.SendMsg(in); err != nil { + return nil, err + } + if err := x.ClientStream.CloseSend(); err != nil { + return nil, err + } + return x, nil +} + +type Bigtable_SampleRowKeysClient interface { + Recv() (*SampleRowKeysResponse, error) + grpc.ClientStream +} + +type bigtableSampleRowKeysClient struct { + grpc.ClientStream +} + +func (x *bigtableSampleRowKeysClient) Recv() (*SampleRowKeysResponse, error) { + m := new(SampleRowKeysResponse) + if err := x.ClientStream.RecvMsg(m); err != nil { + return nil, err + } + return m, nil +} + +func (c *bigtableClient) MutateRow(ctx context.Context, in *MutateRowRequest, opts ...grpc.CallOption) (*MutateRowResponse, error) { + out := new(MutateRowResponse) + err := c.cc.Invoke(ctx, "/google.bigtable.v2.Bigtable/MutateRow", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *bigtableClient) MutateRows(ctx context.Context, in *MutateRowsRequest, opts ...grpc.CallOption) (Bigtable_MutateRowsClient, error) { + stream, err := c.cc.NewStream(ctx, &_Bigtable_serviceDesc.Streams[2], "/google.bigtable.v2.Bigtable/MutateRows", opts...) + if err != nil { + return nil, err + } + x := &bigtableMutateRowsClient{stream} + if err := x.ClientStream.SendMsg(in); err != nil { + return nil, err + } + if err := x.ClientStream.CloseSend(); err != nil { + return nil, err + } + return x, nil +} + +type Bigtable_MutateRowsClient interface { + Recv() (*MutateRowsResponse, error) + grpc.ClientStream +} + +type bigtableMutateRowsClient struct { + grpc.ClientStream +} + +func (x *bigtableMutateRowsClient) Recv() (*MutateRowsResponse, error) { + m := new(MutateRowsResponse) + if err := x.ClientStream.RecvMsg(m); err != nil { + return nil, err + } + return m, nil +} + +func (c *bigtableClient) CheckAndMutateRow(ctx context.Context, in *CheckAndMutateRowRequest, opts ...grpc.CallOption) (*CheckAndMutateRowResponse, error) { + out := new(CheckAndMutateRowResponse) + err := c.cc.Invoke(ctx, "/google.bigtable.v2.Bigtable/CheckAndMutateRow", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *bigtableClient) ReadModifyWriteRow(ctx context.Context, in *ReadModifyWriteRowRequest, opts ...grpc.CallOption) (*ReadModifyWriteRowResponse, error) { + out := new(ReadModifyWriteRowResponse) + err := c.cc.Invoke(ctx, "/google.bigtable.v2.Bigtable/ReadModifyWriteRow", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +// BigtableServer is the server API for Bigtable service. +type BigtableServer interface { + // Streams back the contents of all requested rows in key order, optionally + // applying the same Reader filter to each. Depending on their size, + // rows and cells may be broken up across multiple responses, but + // atomicity of each row will still be preserved. See the + // ReadRowsResponse documentation for details. + ReadRows(*ReadRowsRequest, Bigtable_ReadRowsServer) error + // Returns a sample of row keys in the table. The returned row keys will + // delimit contiguous sections of the table of approximately equal size, + // which can be used to break up the data for distributed tasks like + // mapreduces. + SampleRowKeys(*SampleRowKeysRequest, Bigtable_SampleRowKeysServer) error + // Mutates a row atomically. Cells already present in the row are left + // unchanged unless explicitly changed by `mutation`. + MutateRow(context.Context, *MutateRowRequest) (*MutateRowResponse, error) + // Mutates multiple rows in a batch. Each individual row is mutated + // atomically as in MutateRow, but the entire batch is not executed + // atomically. + MutateRows(*MutateRowsRequest, Bigtable_MutateRowsServer) error + // Mutates a row atomically based on the output of a predicate Reader filter. + CheckAndMutateRow(context.Context, *CheckAndMutateRowRequest) (*CheckAndMutateRowResponse, error) + // Modifies a row atomically on the server. The method reads the latest + // existing timestamp and value from the specified columns and writes a new + // entry based on pre-defined read/modify/write rules. The new value for the + // timestamp is the greater of the existing timestamp or the current server + // time. The method returns the new contents of all modified cells. + ReadModifyWriteRow(context.Context, *ReadModifyWriteRowRequest) (*ReadModifyWriteRowResponse, error) +} + +func RegisterBigtableServer(s *grpc.Server, srv BigtableServer) { + s.RegisterService(&_Bigtable_serviceDesc, srv) +} + +func _Bigtable_ReadRows_Handler(srv interface{}, stream grpc.ServerStream) error { + m := new(ReadRowsRequest) + if err := stream.RecvMsg(m); err != nil { + return err + } + return srv.(BigtableServer).ReadRows(m, &bigtableReadRowsServer{stream}) +} + +type Bigtable_ReadRowsServer interface { + Send(*ReadRowsResponse) error + grpc.ServerStream +} + +type bigtableReadRowsServer struct { + grpc.ServerStream +} + +func (x *bigtableReadRowsServer) Send(m *ReadRowsResponse) error { + return x.ServerStream.SendMsg(m) +} + +func _Bigtable_SampleRowKeys_Handler(srv interface{}, stream grpc.ServerStream) error { + m := new(SampleRowKeysRequest) + if err := stream.RecvMsg(m); err != nil { + return err + } + return srv.(BigtableServer).SampleRowKeys(m, &bigtableSampleRowKeysServer{stream}) +} + +type Bigtable_SampleRowKeysServer interface { + Send(*SampleRowKeysResponse) error + grpc.ServerStream +} + +type bigtableSampleRowKeysServer struct { + grpc.ServerStream +} + +func (x *bigtableSampleRowKeysServer) Send(m *SampleRowKeysResponse) error { + return x.ServerStream.SendMsg(m) +} + +func _Bigtable_MutateRow_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(MutateRowRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(BigtableServer).MutateRow(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.bigtable.v2.Bigtable/MutateRow", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(BigtableServer).MutateRow(ctx, req.(*MutateRowRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _Bigtable_MutateRows_Handler(srv interface{}, stream grpc.ServerStream) error { + m := new(MutateRowsRequest) + if err := stream.RecvMsg(m); err != nil { + return err + } + return srv.(BigtableServer).MutateRows(m, &bigtableMutateRowsServer{stream}) +} + +type Bigtable_MutateRowsServer interface { + Send(*MutateRowsResponse) error + grpc.ServerStream +} + +type bigtableMutateRowsServer struct { + grpc.ServerStream +} + +func (x *bigtableMutateRowsServer) Send(m *MutateRowsResponse) error { + return x.ServerStream.SendMsg(m) +} + +func _Bigtable_CheckAndMutateRow_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(CheckAndMutateRowRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(BigtableServer).CheckAndMutateRow(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.bigtable.v2.Bigtable/CheckAndMutateRow", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(BigtableServer).CheckAndMutateRow(ctx, req.(*CheckAndMutateRowRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _Bigtable_ReadModifyWriteRow_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(ReadModifyWriteRowRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(BigtableServer).ReadModifyWriteRow(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.bigtable.v2.Bigtable/ReadModifyWriteRow", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(BigtableServer).ReadModifyWriteRow(ctx, req.(*ReadModifyWriteRowRequest)) + } + return interceptor(ctx, in, info, handler) +} + +var _Bigtable_serviceDesc = grpc.ServiceDesc{ + ServiceName: "google.bigtable.v2.Bigtable", + HandlerType: (*BigtableServer)(nil), + Methods: []grpc.MethodDesc{ + { + MethodName: "MutateRow", + Handler: _Bigtable_MutateRow_Handler, + }, + { + MethodName: "CheckAndMutateRow", + Handler: _Bigtable_CheckAndMutateRow_Handler, + }, + { + MethodName: "ReadModifyWriteRow", + Handler: _Bigtable_ReadModifyWriteRow_Handler, + }, + }, + Streams: []grpc.StreamDesc{ + { + StreamName: "ReadRows", + Handler: _Bigtable_ReadRows_Handler, + ServerStreams: true, + }, + { + StreamName: "SampleRowKeys", + Handler: _Bigtable_SampleRowKeys_Handler, + ServerStreams: true, + }, + { + StreamName: "MutateRows", + Handler: _Bigtable_MutateRows_Handler, + ServerStreams: true, + }, + }, + Metadata: "google/bigtable/v2/bigtable.proto", +} + +func init() { + proto.RegisterFile("google/bigtable/v2/bigtable.proto", fileDescriptor_bigtable_b796ce6ccdb99cd8) +} + +var fileDescriptor_bigtable_b796ce6ccdb99cd8 = []byte{ + // 1210 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xbc, 0x57, 0x41, 0x6f, 0x1b, 0x45, + 0x14, 0x66, 0xec, 0xd8, 0xf1, 0xbe, 0xa4, 0x4d, 0x32, 0x84, 0x76, 0x6b, 0x5a, 0x70, 0x97, 0x16, + 0xdc, 0x94, 0xae, 0x2b, 0x23, 0x0e, 0x75, 0xd5, 0x02, 0x09, 0x69, 0x53, 0x41, 0xaa, 0x6a, 0x2c, + 0x15, 0x09, 0x22, 0xad, 0xc6, 0xeb, 0xb1, 0x3b, 0x74, 0x77, 0x67, 0xbb, 0x3b, 0x5b, 0xe3, 0x22, + 0x2e, 0xfc, 0x05, 0x8e, 0x08, 0x71, 0x42, 0x48, 0x08, 0x38, 0x73, 0xe3, 0xc0, 0x8d, 0x03, 0x17, + 0xae, 0x1c, 0xfb, 0x0b, 0xb8, 0x23, 0xa1, 0x9d, 0x9d, 0xb5, 0x9d, 0xc4, 0x6e, 0x9d, 0x20, 0x71, + 0xdb, 0x7d, 0xef, 0x7d, 0x6f, 0xbf, 0xf7, 0xbd, 0x37, 0x6f, 0x6c, 0x38, 0xdf, 0x17, 0xa2, 0xef, + 0xb1, 0x46, 0x87, 0xf7, 0x25, 0xed, 0x78, 0xac, 0xf1, 0xb8, 0x39, 0x7a, 0xb6, 0xc3, 0x48, 0x48, + 0x81, 0x71, 0x16, 0x62, 0x8f, 0xcc, 0x8f, 0x9b, 0xd5, 0xb3, 0x1a, 0x46, 0x43, 0xde, 0xa0, 0x41, + 0x20, 0x24, 0x95, 0x5c, 0x04, 0x71, 0x86, 0xa8, 0x9e, 0x9b, 0x92, 0xb4, 0x4b, 0x25, 0xd5, 0xee, + 0x57, 0xb4, 0x5b, 0xbd, 0x75, 0x92, 0x5e, 0x63, 0x10, 0xd1, 0x30, 0x64, 0x51, 0x0e, 0x3f, 0xad, + 0xfd, 0x51, 0xe8, 0x36, 0x62, 0x49, 0x65, 0xa2, 0x1d, 0xd6, 0x5f, 0x08, 0x56, 0x08, 0xa3, 0x5d, + 0x22, 0x06, 0x31, 0x61, 0x8f, 0x12, 0x16, 0x4b, 0x7c, 0x0e, 0x40, 0x7d, 0xc3, 0x09, 0xa8, 0xcf, + 0x4c, 0x54, 0x43, 0x75, 0x83, 0x18, 0xca, 0x72, 0x97, 0xfa, 0x0c, 0x5f, 0x80, 0x93, 0x34, 0x0c, + 0x9d, 0x30, 0x12, 0x3d, 0xee, 0x31, 0x87, 0x77, 0xcd, 0x92, 0x0a, 0x59, 0xa6, 0x61, 0x78, 0x2f, + 0x33, 0xde, 0xe9, 0x62, 0x1b, 0x16, 0x22, 0x31, 0x88, 0xcd, 0x42, 0x0d, 0xd5, 0x97, 0x9a, 0x55, + 0xfb, 0x70, 0xc5, 0x36, 0x11, 0x83, 0x36, 0x93, 0x44, 0xc5, 0xe1, 0xb7, 0xa1, 0xdc, 0xe3, 0x9e, + 0x64, 0x91, 0x59, 0x54, 0x88, 0x73, 0x33, 0x10, 0xb7, 0x54, 0x10, 0xd1, 0xc1, 0x29, 0xd7, 0x14, + 0xee, 0x78, 0xdc, 0xe7, 0xd2, 0x5c, 0xa8, 0xa1, 0x7a, 0x91, 0x18, 0xa9, 0xe5, 0xc3, 0xd4, 0x60, + 0xfd, 0x5d, 0x84, 0xd5, 0x71, 0x79, 0x71, 0x28, 0x82, 0x98, 0xe1, 0x5b, 0x50, 0x76, 0x1f, 0x24, + 0xc1, 0xc3, 0xd8, 0x44, 0xb5, 0x62, 0x7d, 0xa9, 0x69, 0x4f, 0xfd, 0xd4, 0x01, 0x94, 0xbd, 0xc5, + 0x3c, 0x6f, 0x2b, 0x85, 0x11, 0x8d, 0xc6, 0x0d, 0x58, 0xf7, 0x68, 0x2c, 0x9d, 0xd8, 0xa5, 0x41, + 0xc0, 0xba, 0x4e, 0x24, 0x06, 0xce, 0x43, 0x36, 0x54, 0x25, 0x2f, 0x93, 0xb5, 0xd4, 0xd7, 0xce, + 0x5c, 0x44, 0x0c, 0x3e, 0x60, 0xc3, 0xea, 0xd3, 0x02, 0x18, 0xa3, 0x34, 0xf8, 0x34, 0x2c, 0xe6, + 0x08, 0xa4, 0x10, 0xe5, 0x48, 0x85, 0xe1, 0x1b, 0xb0, 0xd4, 0xa3, 0x3e, 0xf7, 0x86, 0x59, 0x03, + 0x32, 0x05, 0xcf, 0xe6, 0x24, 0xf3, 0x16, 0xdb, 0x6d, 0x19, 0xf1, 0xa0, 0x7f, 0x9f, 0x7a, 0x09, + 0x23, 0x90, 0x01, 0x54, 0x7f, 0xae, 0x81, 0xf1, 0x28, 0xa1, 0x1e, 0xef, 0xf1, 0x91, 0x98, 0x2f, + 0x1f, 0x02, 0x6f, 0x0e, 0x25, 0x8b, 0x33, 0xec, 0x38, 0x1a, 0x5f, 0x82, 0x55, 0xc9, 0x7d, 0x16, + 0x4b, 0xea, 0x87, 0x8e, 0xcf, 0xdd, 0x48, 0xc4, 0x5a, 0xd3, 0x95, 0x91, 0x7d, 0x57, 0x99, 0xf1, + 0x29, 0x28, 0x7b, 0xb4, 0xc3, 0xbc, 0xd8, 0x2c, 0xd5, 0x8a, 0x75, 0x83, 0xe8, 0x37, 0xbc, 0x0e, + 0xa5, 0xc7, 0x69, 0x5a, 0xb3, 0xac, 0x6a, 0xca, 0x5e, 0xd2, 0x36, 0xa9, 0x07, 0x27, 0xe6, 0x4f, + 0x98, 0xb9, 0x58, 0x43, 0xf5, 0x12, 0x31, 0x94, 0xa5, 0xcd, 0x9f, 0xa4, 0x6e, 0x23, 0x62, 0x31, + 0x93, 0xa9, 0x84, 0x66, 0xa5, 0x86, 0xea, 0x95, 0x9d, 0x17, 0x48, 0x45, 0x99, 0x88, 0x18, 0xe0, + 0x57, 0x01, 0x5c, 0xe1, 0xfb, 0x3c, 0xf3, 0x1b, 0xda, 0x6f, 0x64, 0x36, 0x22, 0x06, 0x9b, 0xcb, + 0x6a, 0x0a, 0x9c, 0x6c, 0xb2, 0xad, 0x4f, 0x60, 0xbd, 0x4d, 0xfd, 0xd0, 0x63, 0x99, 0xec, 0xc7, + 0x9f, 0xeb, 0xc2, 0xe1, 0xb9, 0xb6, 0xda, 0xf0, 0xd2, 0x81, 0xe4, 0x7a, 0xaa, 0x66, 0xb6, 0xf3, + 0x3c, 0x2c, 0x8b, 0x5e, 0x2f, 0xad, 0xae, 0x93, 0x8a, 0xae, 0xb2, 0x16, 0xc9, 0x52, 0x66, 0x53, + 0x7d, 0xb0, 0x7e, 0x44, 0xb0, 0xba, 0x9b, 0x48, 0x2a, 0xd3, 0xac, 0xc7, 0xa6, 0xbb, 0x30, 0xe5, + 0x18, 0x4e, 0xb0, 0x2a, 0xec, 0x63, 0xd5, 0x02, 0xc3, 0x4f, 0xf4, 0x8e, 0x31, 0x8b, 0xea, 0x1c, + 0x9c, 0x9d, 0x76, 0x0e, 0x76, 0x75, 0x10, 0x19, 0x87, 0x5b, 0x2f, 0xc2, 0xda, 0x04, 0xdb, 0xac, + 0x7e, 0xeb, 0x1f, 0x34, 0x61, 0x3d, 0xbe, 0xe6, 0xc5, 0x29, 0x45, 0x6c, 0xc3, 0x22, 0x0b, 0x64, + 0xc4, 0x95, 0x78, 0x29, 0xd3, 0xcb, 0x33, 0x99, 0x4e, 0x7e, 0xdc, 0xde, 0x0e, 0x64, 0x34, 0x24, + 0x39, 0xb6, 0xba, 0x07, 0x25, 0x65, 0x99, 0xdd, 0xaa, 0x7d, 0xa2, 0x14, 0x8e, 0x26, 0xca, 0xf7, + 0x08, 0xf0, 0x24, 0x85, 0xd1, 0xb2, 0x19, 0x71, 0xcf, 0xb6, 0xcd, 0x9b, 0xcf, 0xe3, 0xae, 0xf7, + 0xcd, 0x01, 0xf2, 0x77, 0x72, 0xf2, 0xeb, 0x50, 0xe2, 0x41, 0x97, 0x7d, 0xa6, 0xa8, 0x17, 0x49, + 0xf6, 0x82, 0x37, 0xa0, 0x9c, 0x4d, 0xbf, 0x5e, 0x17, 0x38, 0xff, 0x4a, 0x14, 0xba, 0x76, 0x5b, + 0x79, 0x88, 0x8e, 0xb0, 0xfe, 0x28, 0x80, 0xb9, 0xf5, 0x80, 0xb9, 0x0f, 0xdf, 0x0b, 0xba, 0xff, + 0x7d, 0xea, 0x16, 0x8f, 0x32, 0x75, 0x3b, 0xb0, 0x1a, 0x46, 0xac, 0xcb, 0x5d, 0x2a, 0x99, 0xa3, + 0xf7, 0x7d, 0x79, 0x9e, 0x7d, 0xbf, 0x32, 0x82, 0x65, 0x06, 0xbc, 0x05, 0x27, 0x65, 0x94, 0x30, + 0x67, 0xdc, 0xaf, 0x85, 0x39, 0xfa, 0x75, 0x22, 0xc5, 0xe4, 0x6f, 0x31, 0xde, 0x86, 0x95, 0x1e, + 0xf5, 0xe2, 0xc9, 0x2c, 0xa5, 0x39, 0xb2, 0x9c, 0x54, 0xa0, 0x51, 0x1a, 0x6b, 0x07, 0xce, 0x4c, + 0xd1, 0x53, 0x0f, 0xc0, 0x65, 0x58, 0x1b, 0x97, 0xec, 0x53, 0xe9, 0x3e, 0x60, 0x5d, 0xa5, 0x6b, + 0x85, 0x8c, 0xb5, 0xd8, 0xcd, 0xec, 0xd6, 0x2f, 0x08, 0xce, 0xa4, 0x37, 0xcf, 0xae, 0xe8, 0xf2, + 0xde, 0xf0, 0xa3, 0x88, 0xff, 0x8f, 0x1b, 0xe1, 0x06, 0x94, 0xa2, 0xc4, 0x63, 0xf9, 0x36, 0x78, + 0x63, 0xd6, 0xad, 0x38, 0xc9, 0x2d, 0xf1, 0x18, 0xc9, 0x50, 0xd6, 0x6d, 0xa8, 0x4e, 0x63, 0xae, + 0x55, 0xb8, 0x04, 0xc5, 0x74, 0x77, 0x23, 0xd5, 0xeb, 0xd3, 0x33, 0x7a, 0x4d, 0xd2, 0x98, 0xe6, + 0x4f, 0x15, 0xa8, 0x6c, 0x6a, 0x07, 0xfe, 0x06, 0x41, 0x25, 0xbf, 0x8a, 0xf1, 0x6b, 0xcf, 0xbe, + 0xa8, 0x95, 0x48, 0xd5, 0x0b, 0xf3, 0xdc, 0xe6, 0xd6, 0xfb, 0x5f, 0xfe, 0xf9, 0xf4, 0xab, 0xc2, + 0x4d, 0xeb, 0x5a, 0xfa, 0x43, 0xea, 0xf3, 0xb1, 0xaa, 0x37, 0xc2, 0x48, 0x7c, 0xca, 0x5c, 0x19, + 0x37, 0x36, 0x1a, 0x3c, 0x88, 0x25, 0x0d, 0x5c, 0x96, 0x3e, 0xab, 0x88, 0xb8, 0xb1, 0xf1, 0x45, + 0x2b, 0xd2, 0xa9, 0x5a, 0x68, 0xe3, 0x2a, 0xc2, 0x3f, 0x23, 0x38, 0xb1, 0xef, 0x3e, 0xc0, 0xf5, + 0x69, 0xdf, 0x9f, 0x76, 0x1f, 0x55, 0x2f, 0xcd, 0x11, 0xa9, 0xe9, 0xde, 0x52, 0x74, 0xdf, 0xc5, + 0x37, 0x8f, 0x4c, 0x37, 0x9e, 0xcc, 0x77, 0x15, 0xe1, 0x6f, 0x11, 0x18, 0xa3, 0x21, 0xc5, 0x17, + 0x9e, 0xb9, 0x8c, 0x72, 0xa2, 0x17, 0x9f, 0x13, 0xa5, 0x49, 0x6e, 0x2b, 0x92, 0xef, 0x58, 0xad, + 0x23, 0x93, 0xf4, 0xf3, 0x5c, 0x2d, 0xb4, 0x81, 0xbf, 0x43, 0x00, 0xe3, 0x7d, 0x88, 0x2f, 0xce, + 0xb5, 0xeb, 0xab, 0xaf, 0xcf, 0xb7, 0x56, 0x73, 0x25, 0xad, 0xeb, 0xc7, 0x27, 0xa9, 0x5b, 0xff, + 0x2b, 0x82, 0xb5, 0x43, 0xc7, 0x1e, 0x4f, 0x5d, 0xef, 0xb3, 0xb6, 0x6d, 0xf5, 0xca, 0x9c, 0xd1, + 0x9a, 0xfc, 0xae, 0x22, 0x7f, 0xdb, 0xda, 0x3c, 0x32, 0x79, 0xf7, 0x60, 0xce, 0x54, 0xe9, 0xdf, + 0x10, 0xe0, 0xc3, 0x67, 0x16, 0x5f, 0x99, 0xe7, 0xe4, 0x8f, 0x6b, 0xb0, 0xe7, 0x0d, 0xd7, 0x45, + 0xdc, 0x55, 0x45, 0xec, 0x58, 0x5b, 0xc7, 0x3a, 0x7a, 0xfb, 0x93, 0xb6, 0xd0, 0xc6, 0xe6, 0xd7, + 0x08, 0x4e, 0xb9, 0xc2, 0x9f, 0xc2, 0x62, 0xf3, 0x44, 0xbe, 0x47, 0xee, 0xa5, 0xbf, 0x7b, 0xef, + 0xa1, 0x8f, 0x5b, 0x3a, 0xa8, 0x2f, 0x3c, 0x1a, 0xf4, 0x6d, 0x11, 0xf5, 0x1b, 0x7d, 0x16, 0xa8, + 0x5f, 0xc5, 0x8d, 0xcc, 0x45, 0x43, 0x1e, 0x4f, 0xfe, 0xcb, 0xba, 0x9e, 0x3f, 0xff, 0x50, 0x30, + 0x6f, 0x67, 0xe0, 0x2d, 0x4f, 0x24, 0x5d, 0x3b, 0x4f, 0x6d, 0xdf, 0x6f, 0xfe, 0x9e, 0xbb, 0xf6, + 0x94, 0x6b, 0x2f, 0x77, 0xed, 0xdd, 0x6f, 0x76, 0xca, 0x2a, 0xf9, 0x5b, 0xff, 0x06, 0x00, 0x00, + 0xff, 0xff, 0xd6, 0x35, 0xfc, 0x0e, 0x16, 0x0e, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/bigtable/v2/data.pb.go b/vendor/google.golang.org/genproto/googleapis/bigtable/v2/data.pb.go new file mode 100644 index 0000000..858db40 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/bigtable/v2/data.pb.go @@ -0,0 +1,2611 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/bigtable/v2/data.proto + +package bigtable // import "google.golang.org/genproto/googleapis/bigtable/v2" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Specifies the complete (requested) contents of a single row of a table. +// Rows which exceed 256MiB in size cannot be read in full. +type Row struct { + // The unique key which identifies this row within its table. This is the same + // key that's used to identify the row in, for example, a MutateRowRequest. + // May contain any non-empty byte string up to 4KiB in length. + Key []byte `protobuf:"bytes,1,opt,name=key,proto3" json:"key,omitempty"` + // May be empty, but only if the entire row is empty. + // The mutual ordering of column families is not specified. + Families []*Family `protobuf:"bytes,2,rep,name=families,proto3" json:"families,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Row) Reset() { *m = Row{} } +func (m *Row) String() string { return proto.CompactTextString(m) } +func (*Row) ProtoMessage() {} +func (*Row) Descriptor() ([]byte, []int) { + return fileDescriptor_data_617bf329776ab134, []int{0} +} +func (m *Row) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Row.Unmarshal(m, b) +} +func (m *Row) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Row.Marshal(b, m, deterministic) +} +func (dst *Row) XXX_Merge(src proto.Message) { + xxx_messageInfo_Row.Merge(dst, src) +} +func (m *Row) XXX_Size() int { + return xxx_messageInfo_Row.Size(m) +} +func (m *Row) XXX_DiscardUnknown() { + xxx_messageInfo_Row.DiscardUnknown(m) +} + +var xxx_messageInfo_Row proto.InternalMessageInfo + +func (m *Row) GetKey() []byte { + if m != nil { + return m.Key + } + return nil +} + +func (m *Row) GetFamilies() []*Family { + if m != nil { + return m.Families + } + return nil +} + +// Specifies (some of) the contents of a single row/column family intersection +// of a table. +type Family struct { + // The unique key which identifies this family within its row. This is the + // same key that's used to identify the family in, for example, a RowFilter + // which sets its "family_name_regex_filter" field. + // Must match `[-_.a-zA-Z0-9]+`, except that AggregatingRowProcessors may + // produce cells in a sentinel family with an empty name. + // Must be no greater than 64 characters in length. + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // Must not be empty. Sorted in order of increasing "qualifier". + Columns []*Column `protobuf:"bytes,2,rep,name=columns,proto3" json:"columns,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Family) Reset() { *m = Family{} } +func (m *Family) String() string { return proto.CompactTextString(m) } +func (*Family) ProtoMessage() {} +func (*Family) Descriptor() ([]byte, []int) { + return fileDescriptor_data_617bf329776ab134, []int{1} +} +func (m *Family) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Family.Unmarshal(m, b) +} +func (m *Family) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Family.Marshal(b, m, deterministic) +} +func (dst *Family) XXX_Merge(src proto.Message) { + xxx_messageInfo_Family.Merge(dst, src) +} +func (m *Family) XXX_Size() int { + return xxx_messageInfo_Family.Size(m) +} +func (m *Family) XXX_DiscardUnknown() { + xxx_messageInfo_Family.DiscardUnknown(m) +} + +var xxx_messageInfo_Family proto.InternalMessageInfo + +func (m *Family) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *Family) GetColumns() []*Column { + if m != nil { + return m.Columns + } + return nil +} + +// Specifies (some of) the contents of a single row/column intersection of a +// table. +type Column struct { + // The unique key which identifies this column within its family. This is the + // same key that's used to identify the column in, for example, a RowFilter + // which sets its `column_qualifier_regex_filter` field. + // May contain any byte string, including the empty string, up to 16kiB in + // length. + Qualifier []byte `protobuf:"bytes,1,opt,name=qualifier,proto3" json:"qualifier,omitempty"` + // Must not be empty. Sorted in order of decreasing "timestamp_micros". + Cells []*Cell `protobuf:"bytes,2,rep,name=cells,proto3" json:"cells,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Column) Reset() { *m = Column{} } +func (m *Column) String() string { return proto.CompactTextString(m) } +func (*Column) ProtoMessage() {} +func (*Column) Descriptor() ([]byte, []int) { + return fileDescriptor_data_617bf329776ab134, []int{2} +} +func (m *Column) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Column.Unmarshal(m, b) +} +func (m *Column) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Column.Marshal(b, m, deterministic) +} +func (dst *Column) XXX_Merge(src proto.Message) { + xxx_messageInfo_Column.Merge(dst, src) +} +func (m *Column) XXX_Size() int { + return xxx_messageInfo_Column.Size(m) +} +func (m *Column) XXX_DiscardUnknown() { + xxx_messageInfo_Column.DiscardUnknown(m) +} + +var xxx_messageInfo_Column proto.InternalMessageInfo + +func (m *Column) GetQualifier() []byte { + if m != nil { + return m.Qualifier + } + return nil +} + +func (m *Column) GetCells() []*Cell { + if m != nil { + return m.Cells + } + return nil +} + +// Specifies (some of) the contents of a single row/column/timestamp of a table. +type Cell struct { + // The cell's stored timestamp, which also uniquely identifies it within + // its column. + // Values are always expressed in microseconds, but individual tables may set + // a coarser granularity to further restrict the allowed values. For + // example, a table which specifies millisecond granularity will only allow + // values of `timestamp_micros` which are multiples of 1000. + TimestampMicros int64 `protobuf:"varint,1,opt,name=timestamp_micros,json=timestampMicros,proto3" json:"timestamp_micros,omitempty"` + // The value stored in the cell. + // May contain any byte string, including the empty string, up to 100MiB in + // length. + Value []byte `protobuf:"bytes,2,opt,name=value,proto3" json:"value,omitempty"` + // Labels applied to the cell by a [RowFilter][google.bigtable.v2.RowFilter]. + Labels []string `protobuf:"bytes,3,rep,name=labels,proto3" json:"labels,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Cell) Reset() { *m = Cell{} } +func (m *Cell) String() string { return proto.CompactTextString(m) } +func (*Cell) ProtoMessage() {} +func (*Cell) Descriptor() ([]byte, []int) { + return fileDescriptor_data_617bf329776ab134, []int{3} +} +func (m *Cell) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Cell.Unmarshal(m, b) +} +func (m *Cell) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Cell.Marshal(b, m, deterministic) +} +func (dst *Cell) XXX_Merge(src proto.Message) { + xxx_messageInfo_Cell.Merge(dst, src) +} +func (m *Cell) XXX_Size() int { + return xxx_messageInfo_Cell.Size(m) +} +func (m *Cell) XXX_DiscardUnknown() { + xxx_messageInfo_Cell.DiscardUnknown(m) +} + +var xxx_messageInfo_Cell proto.InternalMessageInfo + +func (m *Cell) GetTimestampMicros() int64 { + if m != nil { + return m.TimestampMicros + } + return 0 +} + +func (m *Cell) GetValue() []byte { + if m != nil { + return m.Value + } + return nil +} + +func (m *Cell) GetLabels() []string { + if m != nil { + return m.Labels + } + return nil +} + +// Specifies a contiguous range of rows. +type RowRange struct { + // The row key at which to start the range. + // If neither field is set, interpreted as the empty string, inclusive. + // + // Types that are valid to be assigned to StartKey: + // *RowRange_StartKeyClosed + // *RowRange_StartKeyOpen + StartKey isRowRange_StartKey `protobuf_oneof:"start_key"` + // The row key at which to end the range. + // If neither field is set, interpreted as the infinite row key, exclusive. + // + // Types that are valid to be assigned to EndKey: + // *RowRange_EndKeyOpen + // *RowRange_EndKeyClosed + EndKey isRowRange_EndKey `protobuf_oneof:"end_key"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *RowRange) Reset() { *m = RowRange{} } +func (m *RowRange) String() string { return proto.CompactTextString(m) } +func (*RowRange) ProtoMessage() {} +func (*RowRange) Descriptor() ([]byte, []int) { + return fileDescriptor_data_617bf329776ab134, []int{4} +} +func (m *RowRange) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_RowRange.Unmarshal(m, b) +} +func (m *RowRange) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_RowRange.Marshal(b, m, deterministic) +} +func (dst *RowRange) XXX_Merge(src proto.Message) { + xxx_messageInfo_RowRange.Merge(dst, src) +} +func (m *RowRange) XXX_Size() int { + return xxx_messageInfo_RowRange.Size(m) +} +func (m *RowRange) XXX_DiscardUnknown() { + xxx_messageInfo_RowRange.DiscardUnknown(m) +} + +var xxx_messageInfo_RowRange proto.InternalMessageInfo + +type isRowRange_StartKey interface { + isRowRange_StartKey() +} + +type RowRange_StartKeyClosed struct { + StartKeyClosed []byte `protobuf:"bytes,1,opt,name=start_key_closed,json=startKeyClosed,proto3,oneof"` +} + +type RowRange_StartKeyOpen struct { + StartKeyOpen []byte `protobuf:"bytes,2,opt,name=start_key_open,json=startKeyOpen,proto3,oneof"` +} + +func (*RowRange_StartKeyClosed) isRowRange_StartKey() {} + +func (*RowRange_StartKeyOpen) isRowRange_StartKey() {} + +func (m *RowRange) GetStartKey() isRowRange_StartKey { + if m != nil { + return m.StartKey + } + return nil +} + +func (m *RowRange) GetStartKeyClosed() []byte { + if x, ok := m.GetStartKey().(*RowRange_StartKeyClosed); ok { + return x.StartKeyClosed + } + return nil +} + +func (m *RowRange) GetStartKeyOpen() []byte { + if x, ok := m.GetStartKey().(*RowRange_StartKeyOpen); ok { + return x.StartKeyOpen + } + return nil +} + +type isRowRange_EndKey interface { + isRowRange_EndKey() +} + +type RowRange_EndKeyOpen struct { + EndKeyOpen []byte `protobuf:"bytes,3,opt,name=end_key_open,json=endKeyOpen,proto3,oneof"` +} + +type RowRange_EndKeyClosed struct { + EndKeyClosed []byte `protobuf:"bytes,4,opt,name=end_key_closed,json=endKeyClosed,proto3,oneof"` +} + +func (*RowRange_EndKeyOpen) isRowRange_EndKey() {} + +func (*RowRange_EndKeyClosed) isRowRange_EndKey() {} + +func (m *RowRange) GetEndKey() isRowRange_EndKey { + if m != nil { + return m.EndKey + } + return nil +} + +func (m *RowRange) GetEndKeyOpen() []byte { + if x, ok := m.GetEndKey().(*RowRange_EndKeyOpen); ok { + return x.EndKeyOpen + } + return nil +} + +func (m *RowRange) GetEndKeyClosed() []byte { + if x, ok := m.GetEndKey().(*RowRange_EndKeyClosed); ok { + return x.EndKeyClosed + } + return nil +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*RowRange) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _RowRange_OneofMarshaler, _RowRange_OneofUnmarshaler, _RowRange_OneofSizer, []interface{}{ + (*RowRange_StartKeyClosed)(nil), + (*RowRange_StartKeyOpen)(nil), + (*RowRange_EndKeyOpen)(nil), + (*RowRange_EndKeyClosed)(nil), + } +} + +func _RowRange_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*RowRange) + // start_key + switch x := m.StartKey.(type) { + case *RowRange_StartKeyClosed: + b.EncodeVarint(1<<3 | proto.WireBytes) + b.EncodeRawBytes(x.StartKeyClosed) + case *RowRange_StartKeyOpen: + b.EncodeVarint(2<<3 | proto.WireBytes) + b.EncodeRawBytes(x.StartKeyOpen) + case nil: + default: + return fmt.Errorf("RowRange.StartKey has unexpected type %T", x) + } + // end_key + switch x := m.EndKey.(type) { + case *RowRange_EndKeyOpen: + b.EncodeVarint(3<<3 | proto.WireBytes) + b.EncodeRawBytes(x.EndKeyOpen) + case *RowRange_EndKeyClosed: + b.EncodeVarint(4<<3 | proto.WireBytes) + b.EncodeRawBytes(x.EndKeyClosed) + case nil: + default: + return fmt.Errorf("RowRange.EndKey has unexpected type %T", x) + } + return nil +} + +func _RowRange_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*RowRange) + switch tag { + case 1: // start_key.start_key_closed + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeRawBytes(true) + m.StartKey = &RowRange_StartKeyClosed{x} + return true, err + case 2: // start_key.start_key_open + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeRawBytes(true) + m.StartKey = &RowRange_StartKeyOpen{x} + return true, err + case 3: // end_key.end_key_open + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeRawBytes(true) + m.EndKey = &RowRange_EndKeyOpen{x} + return true, err + case 4: // end_key.end_key_closed + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeRawBytes(true) + m.EndKey = &RowRange_EndKeyClosed{x} + return true, err + default: + return false, nil + } +} + +func _RowRange_OneofSizer(msg proto.Message) (n int) { + m := msg.(*RowRange) + // start_key + switch x := m.StartKey.(type) { + case *RowRange_StartKeyClosed: + n += 1 // tag and wire + n += proto.SizeVarint(uint64(len(x.StartKeyClosed))) + n += len(x.StartKeyClosed) + case *RowRange_StartKeyOpen: + n += 1 // tag and wire + n += proto.SizeVarint(uint64(len(x.StartKeyOpen))) + n += len(x.StartKeyOpen) + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + // end_key + switch x := m.EndKey.(type) { + case *RowRange_EndKeyOpen: + n += 1 // tag and wire + n += proto.SizeVarint(uint64(len(x.EndKeyOpen))) + n += len(x.EndKeyOpen) + case *RowRange_EndKeyClosed: + n += 1 // tag and wire + n += proto.SizeVarint(uint64(len(x.EndKeyClosed))) + n += len(x.EndKeyClosed) + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +// Specifies a non-contiguous set of rows. +type RowSet struct { + // Single rows included in the set. + RowKeys [][]byte `protobuf:"bytes,1,rep,name=row_keys,json=rowKeys,proto3" json:"row_keys,omitempty"` + // Contiguous row ranges included in the set. + RowRanges []*RowRange `protobuf:"bytes,2,rep,name=row_ranges,json=rowRanges,proto3" json:"row_ranges,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *RowSet) Reset() { *m = RowSet{} } +func (m *RowSet) String() string { return proto.CompactTextString(m) } +func (*RowSet) ProtoMessage() {} +func (*RowSet) Descriptor() ([]byte, []int) { + return fileDescriptor_data_617bf329776ab134, []int{5} +} +func (m *RowSet) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_RowSet.Unmarshal(m, b) +} +func (m *RowSet) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_RowSet.Marshal(b, m, deterministic) +} +func (dst *RowSet) XXX_Merge(src proto.Message) { + xxx_messageInfo_RowSet.Merge(dst, src) +} +func (m *RowSet) XXX_Size() int { + return xxx_messageInfo_RowSet.Size(m) +} +func (m *RowSet) XXX_DiscardUnknown() { + xxx_messageInfo_RowSet.DiscardUnknown(m) +} + +var xxx_messageInfo_RowSet proto.InternalMessageInfo + +func (m *RowSet) GetRowKeys() [][]byte { + if m != nil { + return m.RowKeys + } + return nil +} + +func (m *RowSet) GetRowRanges() []*RowRange { + if m != nil { + return m.RowRanges + } + return nil +} + +// Specifies a contiguous range of columns within a single column family. +// The range spans from <column_family>:<start_qualifier> to +// <column_family>:<end_qualifier>, where both bounds can be either +// inclusive or exclusive. +type ColumnRange struct { + // The name of the column family within which this range falls. + FamilyName string `protobuf:"bytes,1,opt,name=family_name,json=familyName,proto3" json:"family_name,omitempty"` + // The column qualifier at which to start the range (within `column_family`). + // If neither field is set, interpreted as the empty string, inclusive. + // + // Types that are valid to be assigned to StartQualifier: + // *ColumnRange_StartQualifierClosed + // *ColumnRange_StartQualifierOpen + StartQualifier isColumnRange_StartQualifier `protobuf_oneof:"start_qualifier"` + // The column qualifier at which to end the range (within `column_family`). + // If neither field is set, interpreted as the infinite string, exclusive. + // + // Types that are valid to be assigned to EndQualifier: + // *ColumnRange_EndQualifierClosed + // *ColumnRange_EndQualifierOpen + EndQualifier isColumnRange_EndQualifier `protobuf_oneof:"end_qualifier"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ColumnRange) Reset() { *m = ColumnRange{} } +func (m *ColumnRange) String() string { return proto.CompactTextString(m) } +func (*ColumnRange) ProtoMessage() {} +func (*ColumnRange) Descriptor() ([]byte, []int) { + return fileDescriptor_data_617bf329776ab134, []int{6} +} +func (m *ColumnRange) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ColumnRange.Unmarshal(m, b) +} +func (m *ColumnRange) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ColumnRange.Marshal(b, m, deterministic) +} +func (dst *ColumnRange) XXX_Merge(src proto.Message) { + xxx_messageInfo_ColumnRange.Merge(dst, src) +} +func (m *ColumnRange) XXX_Size() int { + return xxx_messageInfo_ColumnRange.Size(m) +} +func (m *ColumnRange) XXX_DiscardUnknown() { + xxx_messageInfo_ColumnRange.DiscardUnknown(m) +} + +var xxx_messageInfo_ColumnRange proto.InternalMessageInfo + +func (m *ColumnRange) GetFamilyName() string { + if m != nil { + return m.FamilyName + } + return "" +} + +type isColumnRange_StartQualifier interface { + isColumnRange_StartQualifier() +} + +type ColumnRange_StartQualifierClosed struct { + StartQualifierClosed []byte `protobuf:"bytes,2,opt,name=start_qualifier_closed,json=startQualifierClosed,proto3,oneof"` +} + +type ColumnRange_StartQualifierOpen struct { + StartQualifierOpen []byte `protobuf:"bytes,3,opt,name=start_qualifier_open,json=startQualifierOpen,proto3,oneof"` +} + +func (*ColumnRange_StartQualifierClosed) isColumnRange_StartQualifier() {} + +func (*ColumnRange_StartQualifierOpen) isColumnRange_StartQualifier() {} + +func (m *ColumnRange) GetStartQualifier() isColumnRange_StartQualifier { + if m != nil { + return m.StartQualifier + } + return nil +} + +func (m *ColumnRange) GetStartQualifierClosed() []byte { + if x, ok := m.GetStartQualifier().(*ColumnRange_StartQualifierClosed); ok { + return x.StartQualifierClosed + } + return nil +} + +func (m *ColumnRange) GetStartQualifierOpen() []byte { + if x, ok := m.GetStartQualifier().(*ColumnRange_StartQualifierOpen); ok { + return x.StartQualifierOpen + } + return nil +} + +type isColumnRange_EndQualifier interface { + isColumnRange_EndQualifier() +} + +type ColumnRange_EndQualifierClosed struct { + EndQualifierClosed []byte `protobuf:"bytes,4,opt,name=end_qualifier_closed,json=endQualifierClosed,proto3,oneof"` +} + +type ColumnRange_EndQualifierOpen struct { + EndQualifierOpen []byte `protobuf:"bytes,5,opt,name=end_qualifier_open,json=endQualifierOpen,proto3,oneof"` +} + +func (*ColumnRange_EndQualifierClosed) isColumnRange_EndQualifier() {} + +func (*ColumnRange_EndQualifierOpen) isColumnRange_EndQualifier() {} + +func (m *ColumnRange) GetEndQualifier() isColumnRange_EndQualifier { + if m != nil { + return m.EndQualifier + } + return nil +} + +func (m *ColumnRange) GetEndQualifierClosed() []byte { + if x, ok := m.GetEndQualifier().(*ColumnRange_EndQualifierClosed); ok { + return x.EndQualifierClosed + } + return nil +} + +func (m *ColumnRange) GetEndQualifierOpen() []byte { + if x, ok := m.GetEndQualifier().(*ColumnRange_EndQualifierOpen); ok { + return x.EndQualifierOpen + } + return nil +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*ColumnRange) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _ColumnRange_OneofMarshaler, _ColumnRange_OneofUnmarshaler, _ColumnRange_OneofSizer, []interface{}{ + (*ColumnRange_StartQualifierClosed)(nil), + (*ColumnRange_StartQualifierOpen)(nil), + (*ColumnRange_EndQualifierClosed)(nil), + (*ColumnRange_EndQualifierOpen)(nil), + } +} + +func _ColumnRange_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*ColumnRange) + // start_qualifier + switch x := m.StartQualifier.(type) { + case *ColumnRange_StartQualifierClosed: + b.EncodeVarint(2<<3 | proto.WireBytes) + b.EncodeRawBytes(x.StartQualifierClosed) + case *ColumnRange_StartQualifierOpen: + b.EncodeVarint(3<<3 | proto.WireBytes) + b.EncodeRawBytes(x.StartQualifierOpen) + case nil: + default: + return fmt.Errorf("ColumnRange.StartQualifier has unexpected type %T", x) + } + // end_qualifier + switch x := m.EndQualifier.(type) { + case *ColumnRange_EndQualifierClosed: + b.EncodeVarint(4<<3 | proto.WireBytes) + b.EncodeRawBytes(x.EndQualifierClosed) + case *ColumnRange_EndQualifierOpen: + b.EncodeVarint(5<<3 | proto.WireBytes) + b.EncodeRawBytes(x.EndQualifierOpen) + case nil: + default: + return fmt.Errorf("ColumnRange.EndQualifier has unexpected type %T", x) + } + return nil +} + +func _ColumnRange_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*ColumnRange) + switch tag { + case 2: // start_qualifier.start_qualifier_closed + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeRawBytes(true) + m.StartQualifier = &ColumnRange_StartQualifierClosed{x} + return true, err + case 3: // start_qualifier.start_qualifier_open + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeRawBytes(true) + m.StartQualifier = &ColumnRange_StartQualifierOpen{x} + return true, err + case 4: // end_qualifier.end_qualifier_closed + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeRawBytes(true) + m.EndQualifier = &ColumnRange_EndQualifierClosed{x} + return true, err + case 5: // end_qualifier.end_qualifier_open + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeRawBytes(true) + m.EndQualifier = &ColumnRange_EndQualifierOpen{x} + return true, err + default: + return false, nil + } +} + +func _ColumnRange_OneofSizer(msg proto.Message) (n int) { + m := msg.(*ColumnRange) + // start_qualifier + switch x := m.StartQualifier.(type) { + case *ColumnRange_StartQualifierClosed: + n += 1 // tag and wire + n += proto.SizeVarint(uint64(len(x.StartQualifierClosed))) + n += len(x.StartQualifierClosed) + case *ColumnRange_StartQualifierOpen: + n += 1 // tag and wire + n += proto.SizeVarint(uint64(len(x.StartQualifierOpen))) + n += len(x.StartQualifierOpen) + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + // end_qualifier + switch x := m.EndQualifier.(type) { + case *ColumnRange_EndQualifierClosed: + n += 1 // tag and wire + n += proto.SizeVarint(uint64(len(x.EndQualifierClosed))) + n += len(x.EndQualifierClosed) + case *ColumnRange_EndQualifierOpen: + n += 1 // tag and wire + n += proto.SizeVarint(uint64(len(x.EndQualifierOpen))) + n += len(x.EndQualifierOpen) + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +// Specified a contiguous range of microsecond timestamps. +type TimestampRange struct { + // Inclusive lower bound. If left empty, interpreted as 0. + StartTimestampMicros int64 `protobuf:"varint,1,opt,name=start_timestamp_micros,json=startTimestampMicros,proto3" json:"start_timestamp_micros,omitempty"` + // Exclusive upper bound. If left empty, interpreted as infinity. + EndTimestampMicros int64 `protobuf:"varint,2,opt,name=end_timestamp_micros,json=endTimestampMicros,proto3" json:"end_timestamp_micros,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *TimestampRange) Reset() { *m = TimestampRange{} } +func (m *TimestampRange) String() string { return proto.CompactTextString(m) } +func (*TimestampRange) ProtoMessage() {} +func (*TimestampRange) Descriptor() ([]byte, []int) { + return fileDescriptor_data_617bf329776ab134, []int{7} +} +func (m *TimestampRange) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_TimestampRange.Unmarshal(m, b) +} +func (m *TimestampRange) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_TimestampRange.Marshal(b, m, deterministic) +} +func (dst *TimestampRange) XXX_Merge(src proto.Message) { + xxx_messageInfo_TimestampRange.Merge(dst, src) +} +func (m *TimestampRange) XXX_Size() int { + return xxx_messageInfo_TimestampRange.Size(m) +} +func (m *TimestampRange) XXX_DiscardUnknown() { + xxx_messageInfo_TimestampRange.DiscardUnknown(m) +} + +var xxx_messageInfo_TimestampRange proto.InternalMessageInfo + +func (m *TimestampRange) GetStartTimestampMicros() int64 { + if m != nil { + return m.StartTimestampMicros + } + return 0 +} + +func (m *TimestampRange) GetEndTimestampMicros() int64 { + if m != nil { + return m.EndTimestampMicros + } + return 0 +} + +// Specifies a contiguous range of raw byte values. +type ValueRange struct { + // The value at which to start the range. + // If neither field is set, interpreted as the empty string, inclusive. + // + // Types that are valid to be assigned to StartValue: + // *ValueRange_StartValueClosed + // *ValueRange_StartValueOpen + StartValue isValueRange_StartValue `protobuf_oneof:"start_value"` + // The value at which to end the range. + // If neither field is set, interpreted as the infinite string, exclusive. + // + // Types that are valid to be assigned to EndValue: + // *ValueRange_EndValueClosed + // *ValueRange_EndValueOpen + EndValue isValueRange_EndValue `protobuf_oneof:"end_value"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ValueRange) Reset() { *m = ValueRange{} } +func (m *ValueRange) String() string { return proto.CompactTextString(m) } +func (*ValueRange) ProtoMessage() {} +func (*ValueRange) Descriptor() ([]byte, []int) { + return fileDescriptor_data_617bf329776ab134, []int{8} +} +func (m *ValueRange) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ValueRange.Unmarshal(m, b) +} +func (m *ValueRange) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ValueRange.Marshal(b, m, deterministic) +} +func (dst *ValueRange) XXX_Merge(src proto.Message) { + xxx_messageInfo_ValueRange.Merge(dst, src) +} +func (m *ValueRange) XXX_Size() int { + return xxx_messageInfo_ValueRange.Size(m) +} +func (m *ValueRange) XXX_DiscardUnknown() { + xxx_messageInfo_ValueRange.DiscardUnknown(m) +} + +var xxx_messageInfo_ValueRange proto.InternalMessageInfo + +type isValueRange_StartValue interface { + isValueRange_StartValue() +} + +type ValueRange_StartValueClosed struct { + StartValueClosed []byte `protobuf:"bytes,1,opt,name=start_value_closed,json=startValueClosed,proto3,oneof"` +} + +type ValueRange_StartValueOpen struct { + StartValueOpen []byte `protobuf:"bytes,2,opt,name=start_value_open,json=startValueOpen,proto3,oneof"` +} + +func (*ValueRange_StartValueClosed) isValueRange_StartValue() {} + +func (*ValueRange_StartValueOpen) isValueRange_StartValue() {} + +func (m *ValueRange) GetStartValue() isValueRange_StartValue { + if m != nil { + return m.StartValue + } + return nil +} + +func (m *ValueRange) GetStartValueClosed() []byte { + if x, ok := m.GetStartValue().(*ValueRange_StartValueClosed); ok { + return x.StartValueClosed + } + return nil +} + +func (m *ValueRange) GetStartValueOpen() []byte { + if x, ok := m.GetStartValue().(*ValueRange_StartValueOpen); ok { + return x.StartValueOpen + } + return nil +} + +type isValueRange_EndValue interface { + isValueRange_EndValue() +} + +type ValueRange_EndValueClosed struct { + EndValueClosed []byte `protobuf:"bytes,3,opt,name=end_value_closed,json=endValueClosed,proto3,oneof"` +} + +type ValueRange_EndValueOpen struct { + EndValueOpen []byte `protobuf:"bytes,4,opt,name=end_value_open,json=endValueOpen,proto3,oneof"` +} + +func (*ValueRange_EndValueClosed) isValueRange_EndValue() {} + +func (*ValueRange_EndValueOpen) isValueRange_EndValue() {} + +func (m *ValueRange) GetEndValue() isValueRange_EndValue { + if m != nil { + return m.EndValue + } + return nil +} + +func (m *ValueRange) GetEndValueClosed() []byte { + if x, ok := m.GetEndValue().(*ValueRange_EndValueClosed); ok { + return x.EndValueClosed + } + return nil +} + +func (m *ValueRange) GetEndValueOpen() []byte { + if x, ok := m.GetEndValue().(*ValueRange_EndValueOpen); ok { + return x.EndValueOpen + } + return nil +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*ValueRange) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _ValueRange_OneofMarshaler, _ValueRange_OneofUnmarshaler, _ValueRange_OneofSizer, []interface{}{ + (*ValueRange_StartValueClosed)(nil), + (*ValueRange_StartValueOpen)(nil), + (*ValueRange_EndValueClosed)(nil), + (*ValueRange_EndValueOpen)(nil), + } +} + +func _ValueRange_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*ValueRange) + // start_value + switch x := m.StartValue.(type) { + case *ValueRange_StartValueClosed: + b.EncodeVarint(1<<3 | proto.WireBytes) + b.EncodeRawBytes(x.StartValueClosed) + case *ValueRange_StartValueOpen: + b.EncodeVarint(2<<3 | proto.WireBytes) + b.EncodeRawBytes(x.StartValueOpen) + case nil: + default: + return fmt.Errorf("ValueRange.StartValue has unexpected type %T", x) + } + // end_value + switch x := m.EndValue.(type) { + case *ValueRange_EndValueClosed: + b.EncodeVarint(3<<3 | proto.WireBytes) + b.EncodeRawBytes(x.EndValueClosed) + case *ValueRange_EndValueOpen: + b.EncodeVarint(4<<3 | proto.WireBytes) + b.EncodeRawBytes(x.EndValueOpen) + case nil: + default: + return fmt.Errorf("ValueRange.EndValue has unexpected type %T", x) + } + return nil +} + +func _ValueRange_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*ValueRange) + switch tag { + case 1: // start_value.start_value_closed + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeRawBytes(true) + m.StartValue = &ValueRange_StartValueClosed{x} + return true, err + case 2: // start_value.start_value_open + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeRawBytes(true) + m.StartValue = &ValueRange_StartValueOpen{x} + return true, err + case 3: // end_value.end_value_closed + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeRawBytes(true) + m.EndValue = &ValueRange_EndValueClosed{x} + return true, err + case 4: // end_value.end_value_open + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeRawBytes(true) + m.EndValue = &ValueRange_EndValueOpen{x} + return true, err + default: + return false, nil + } +} + +func _ValueRange_OneofSizer(msg proto.Message) (n int) { + m := msg.(*ValueRange) + // start_value + switch x := m.StartValue.(type) { + case *ValueRange_StartValueClosed: + n += 1 // tag and wire + n += proto.SizeVarint(uint64(len(x.StartValueClosed))) + n += len(x.StartValueClosed) + case *ValueRange_StartValueOpen: + n += 1 // tag and wire + n += proto.SizeVarint(uint64(len(x.StartValueOpen))) + n += len(x.StartValueOpen) + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + // end_value + switch x := m.EndValue.(type) { + case *ValueRange_EndValueClosed: + n += 1 // tag and wire + n += proto.SizeVarint(uint64(len(x.EndValueClosed))) + n += len(x.EndValueClosed) + case *ValueRange_EndValueOpen: + n += 1 // tag and wire + n += proto.SizeVarint(uint64(len(x.EndValueOpen))) + n += len(x.EndValueOpen) + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +// Takes a row as input and produces an alternate view of the row based on +// specified rules. For example, a RowFilter might trim down a row to include +// just the cells from columns matching a given regular expression, or might +// return all the cells of a row but not their values. More complicated filters +// can be composed out of these components to express requests such as, "within +// every column of a particular family, give just the two most recent cells +// which are older than timestamp X." +// +// There are two broad categories of RowFilters (true filters and transformers), +// as well as two ways to compose simple filters into more complex ones +// (chains and interleaves). They work as follows: +// +// * True filters alter the input row by excluding some of its cells wholesale +// from the output row. An example of a true filter is the `value_regex_filter`, +// which excludes cells whose values don't match the specified pattern. All +// regex true filters use RE2 syntax (https://github.com/google/re2/wiki/Syntax) +// in raw byte mode (RE2::Latin1), and are evaluated as full matches. An +// important point to keep in mind is that `RE2(.)` is equivalent by default to +// `RE2([^\n])`, meaning that it does not match newlines. When attempting to +// match an arbitrary byte, you should therefore use the escape sequence `\C`, +// which may need to be further escaped as `\\C` in your client language. +// +// * Transformers alter the input row by changing the values of some of its +// cells in the output, without excluding them completely. Currently, the only +// supported transformer is the `strip_value_transformer`, which replaces every +// cell's value with the empty string. +// +// * Chains and interleaves are described in more detail in the +// RowFilter.Chain and RowFilter.Interleave documentation. +// +// The total serialized size of a RowFilter message must not +// exceed 4096 bytes, and RowFilters may not be nested within each other +// (in Chains or Interleaves) to a depth of more than 20. +type RowFilter struct { + // Which of the possible RowFilter types to apply. If none are set, this + // RowFilter returns all cells in the input row. + // + // Types that are valid to be assigned to Filter: + // *RowFilter_Chain_ + // *RowFilter_Interleave_ + // *RowFilter_Condition_ + // *RowFilter_Sink + // *RowFilter_PassAllFilter + // *RowFilter_BlockAllFilter + // *RowFilter_RowKeyRegexFilter + // *RowFilter_RowSampleFilter + // *RowFilter_FamilyNameRegexFilter + // *RowFilter_ColumnQualifierRegexFilter + // *RowFilter_ColumnRangeFilter + // *RowFilter_TimestampRangeFilter + // *RowFilter_ValueRegexFilter + // *RowFilter_ValueRangeFilter + // *RowFilter_CellsPerRowOffsetFilter + // *RowFilter_CellsPerRowLimitFilter + // *RowFilter_CellsPerColumnLimitFilter + // *RowFilter_StripValueTransformer + // *RowFilter_ApplyLabelTransformer + Filter isRowFilter_Filter `protobuf_oneof:"filter"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *RowFilter) Reset() { *m = RowFilter{} } +func (m *RowFilter) String() string { return proto.CompactTextString(m) } +func (*RowFilter) ProtoMessage() {} +func (*RowFilter) Descriptor() ([]byte, []int) { + return fileDescriptor_data_617bf329776ab134, []int{9} +} +func (m *RowFilter) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_RowFilter.Unmarshal(m, b) +} +func (m *RowFilter) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_RowFilter.Marshal(b, m, deterministic) +} +func (dst *RowFilter) XXX_Merge(src proto.Message) { + xxx_messageInfo_RowFilter.Merge(dst, src) +} +func (m *RowFilter) XXX_Size() int { + return xxx_messageInfo_RowFilter.Size(m) +} +func (m *RowFilter) XXX_DiscardUnknown() { + xxx_messageInfo_RowFilter.DiscardUnknown(m) +} + +var xxx_messageInfo_RowFilter proto.InternalMessageInfo + +type isRowFilter_Filter interface { + isRowFilter_Filter() +} + +type RowFilter_Chain_ struct { + Chain *RowFilter_Chain `protobuf:"bytes,1,opt,name=chain,proto3,oneof"` +} + +type RowFilter_Interleave_ struct { + Interleave *RowFilter_Interleave `protobuf:"bytes,2,opt,name=interleave,proto3,oneof"` +} + +type RowFilter_Condition_ struct { + Condition *RowFilter_Condition `protobuf:"bytes,3,opt,name=condition,proto3,oneof"` +} + +type RowFilter_Sink struct { + Sink bool `protobuf:"varint,16,opt,name=sink,proto3,oneof"` +} + +type RowFilter_PassAllFilter struct { + PassAllFilter bool `protobuf:"varint,17,opt,name=pass_all_filter,json=passAllFilter,proto3,oneof"` +} + +type RowFilter_BlockAllFilter struct { + BlockAllFilter bool `protobuf:"varint,18,opt,name=block_all_filter,json=blockAllFilter,proto3,oneof"` +} + +type RowFilter_RowKeyRegexFilter struct { + RowKeyRegexFilter []byte `protobuf:"bytes,4,opt,name=row_key_regex_filter,json=rowKeyRegexFilter,proto3,oneof"` +} + +type RowFilter_RowSampleFilter struct { + RowSampleFilter float64 `protobuf:"fixed64,14,opt,name=row_sample_filter,json=rowSampleFilter,proto3,oneof"` +} + +type RowFilter_FamilyNameRegexFilter struct { + FamilyNameRegexFilter string `protobuf:"bytes,5,opt,name=family_name_regex_filter,json=familyNameRegexFilter,proto3,oneof"` +} + +type RowFilter_ColumnQualifierRegexFilter struct { + ColumnQualifierRegexFilter []byte `protobuf:"bytes,6,opt,name=column_qualifier_regex_filter,json=columnQualifierRegexFilter,proto3,oneof"` +} + +type RowFilter_ColumnRangeFilter struct { + ColumnRangeFilter *ColumnRange `protobuf:"bytes,7,opt,name=column_range_filter,json=columnRangeFilter,proto3,oneof"` +} + +type RowFilter_TimestampRangeFilter struct { + TimestampRangeFilter *TimestampRange `protobuf:"bytes,8,opt,name=timestamp_range_filter,json=timestampRangeFilter,proto3,oneof"` +} + +type RowFilter_ValueRegexFilter struct { + ValueRegexFilter []byte `protobuf:"bytes,9,opt,name=value_regex_filter,json=valueRegexFilter,proto3,oneof"` +} + +type RowFilter_ValueRangeFilter struct { + ValueRangeFilter *ValueRange `protobuf:"bytes,15,opt,name=value_range_filter,json=valueRangeFilter,proto3,oneof"` +} + +type RowFilter_CellsPerRowOffsetFilter struct { + CellsPerRowOffsetFilter int32 `protobuf:"varint,10,opt,name=cells_per_row_offset_filter,json=cellsPerRowOffsetFilter,proto3,oneof"` +} + +type RowFilter_CellsPerRowLimitFilter struct { + CellsPerRowLimitFilter int32 `protobuf:"varint,11,opt,name=cells_per_row_limit_filter,json=cellsPerRowLimitFilter,proto3,oneof"` +} + +type RowFilter_CellsPerColumnLimitFilter struct { + CellsPerColumnLimitFilter int32 `protobuf:"varint,12,opt,name=cells_per_column_limit_filter,json=cellsPerColumnLimitFilter,proto3,oneof"` +} + +type RowFilter_StripValueTransformer struct { + StripValueTransformer bool `protobuf:"varint,13,opt,name=strip_value_transformer,json=stripValueTransformer,proto3,oneof"` +} + +type RowFilter_ApplyLabelTransformer struct { + ApplyLabelTransformer string `protobuf:"bytes,19,opt,name=apply_label_transformer,json=applyLabelTransformer,proto3,oneof"` +} + +func (*RowFilter_Chain_) isRowFilter_Filter() {} + +func (*RowFilter_Interleave_) isRowFilter_Filter() {} + +func (*RowFilter_Condition_) isRowFilter_Filter() {} + +func (*RowFilter_Sink) isRowFilter_Filter() {} + +func (*RowFilter_PassAllFilter) isRowFilter_Filter() {} + +func (*RowFilter_BlockAllFilter) isRowFilter_Filter() {} + +func (*RowFilter_RowKeyRegexFilter) isRowFilter_Filter() {} + +func (*RowFilter_RowSampleFilter) isRowFilter_Filter() {} + +func (*RowFilter_FamilyNameRegexFilter) isRowFilter_Filter() {} + +func (*RowFilter_ColumnQualifierRegexFilter) isRowFilter_Filter() {} + +func (*RowFilter_ColumnRangeFilter) isRowFilter_Filter() {} + +func (*RowFilter_TimestampRangeFilter) isRowFilter_Filter() {} + +func (*RowFilter_ValueRegexFilter) isRowFilter_Filter() {} + +func (*RowFilter_ValueRangeFilter) isRowFilter_Filter() {} + +func (*RowFilter_CellsPerRowOffsetFilter) isRowFilter_Filter() {} + +func (*RowFilter_CellsPerRowLimitFilter) isRowFilter_Filter() {} + +func (*RowFilter_CellsPerColumnLimitFilter) isRowFilter_Filter() {} + +func (*RowFilter_StripValueTransformer) isRowFilter_Filter() {} + +func (*RowFilter_ApplyLabelTransformer) isRowFilter_Filter() {} + +func (m *RowFilter) GetFilter() isRowFilter_Filter { + if m != nil { + return m.Filter + } + return nil +} + +func (m *RowFilter) GetChain() *RowFilter_Chain { + if x, ok := m.GetFilter().(*RowFilter_Chain_); ok { + return x.Chain + } + return nil +} + +func (m *RowFilter) GetInterleave() *RowFilter_Interleave { + if x, ok := m.GetFilter().(*RowFilter_Interleave_); ok { + return x.Interleave + } + return nil +} + +func (m *RowFilter) GetCondition() *RowFilter_Condition { + if x, ok := m.GetFilter().(*RowFilter_Condition_); ok { + return x.Condition + } + return nil +} + +func (m *RowFilter) GetSink() bool { + if x, ok := m.GetFilter().(*RowFilter_Sink); ok { + return x.Sink + } + return false +} + +func (m *RowFilter) GetPassAllFilter() bool { + if x, ok := m.GetFilter().(*RowFilter_PassAllFilter); ok { + return x.PassAllFilter + } + return false +} + +func (m *RowFilter) GetBlockAllFilter() bool { + if x, ok := m.GetFilter().(*RowFilter_BlockAllFilter); ok { + return x.BlockAllFilter + } + return false +} + +func (m *RowFilter) GetRowKeyRegexFilter() []byte { + if x, ok := m.GetFilter().(*RowFilter_RowKeyRegexFilter); ok { + return x.RowKeyRegexFilter + } + return nil +} + +func (m *RowFilter) GetRowSampleFilter() float64 { + if x, ok := m.GetFilter().(*RowFilter_RowSampleFilter); ok { + return x.RowSampleFilter + } + return 0 +} + +func (m *RowFilter) GetFamilyNameRegexFilter() string { + if x, ok := m.GetFilter().(*RowFilter_FamilyNameRegexFilter); ok { + return x.FamilyNameRegexFilter + } + return "" +} + +func (m *RowFilter) GetColumnQualifierRegexFilter() []byte { + if x, ok := m.GetFilter().(*RowFilter_ColumnQualifierRegexFilter); ok { + return x.ColumnQualifierRegexFilter + } + return nil +} + +func (m *RowFilter) GetColumnRangeFilter() *ColumnRange { + if x, ok := m.GetFilter().(*RowFilter_ColumnRangeFilter); ok { + return x.ColumnRangeFilter + } + return nil +} + +func (m *RowFilter) GetTimestampRangeFilter() *TimestampRange { + if x, ok := m.GetFilter().(*RowFilter_TimestampRangeFilter); ok { + return x.TimestampRangeFilter + } + return nil +} + +func (m *RowFilter) GetValueRegexFilter() []byte { + if x, ok := m.GetFilter().(*RowFilter_ValueRegexFilter); ok { + return x.ValueRegexFilter + } + return nil +} + +func (m *RowFilter) GetValueRangeFilter() *ValueRange { + if x, ok := m.GetFilter().(*RowFilter_ValueRangeFilter); ok { + return x.ValueRangeFilter + } + return nil +} + +func (m *RowFilter) GetCellsPerRowOffsetFilter() int32 { + if x, ok := m.GetFilter().(*RowFilter_CellsPerRowOffsetFilter); ok { + return x.CellsPerRowOffsetFilter + } + return 0 +} + +func (m *RowFilter) GetCellsPerRowLimitFilter() int32 { + if x, ok := m.GetFilter().(*RowFilter_CellsPerRowLimitFilter); ok { + return x.CellsPerRowLimitFilter + } + return 0 +} + +func (m *RowFilter) GetCellsPerColumnLimitFilter() int32 { + if x, ok := m.GetFilter().(*RowFilter_CellsPerColumnLimitFilter); ok { + return x.CellsPerColumnLimitFilter + } + return 0 +} + +func (m *RowFilter) GetStripValueTransformer() bool { + if x, ok := m.GetFilter().(*RowFilter_StripValueTransformer); ok { + return x.StripValueTransformer + } + return false +} + +func (m *RowFilter) GetApplyLabelTransformer() string { + if x, ok := m.GetFilter().(*RowFilter_ApplyLabelTransformer); ok { + return x.ApplyLabelTransformer + } + return "" +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*RowFilter) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _RowFilter_OneofMarshaler, _RowFilter_OneofUnmarshaler, _RowFilter_OneofSizer, []interface{}{ + (*RowFilter_Chain_)(nil), + (*RowFilter_Interleave_)(nil), + (*RowFilter_Condition_)(nil), + (*RowFilter_Sink)(nil), + (*RowFilter_PassAllFilter)(nil), + (*RowFilter_BlockAllFilter)(nil), + (*RowFilter_RowKeyRegexFilter)(nil), + (*RowFilter_RowSampleFilter)(nil), + (*RowFilter_FamilyNameRegexFilter)(nil), + (*RowFilter_ColumnQualifierRegexFilter)(nil), + (*RowFilter_ColumnRangeFilter)(nil), + (*RowFilter_TimestampRangeFilter)(nil), + (*RowFilter_ValueRegexFilter)(nil), + (*RowFilter_ValueRangeFilter)(nil), + (*RowFilter_CellsPerRowOffsetFilter)(nil), + (*RowFilter_CellsPerRowLimitFilter)(nil), + (*RowFilter_CellsPerColumnLimitFilter)(nil), + (*RowFilter_StripValueTransformer)(nil), + (*RowFilter_ApplyLabelTransformer)(nil), + } +} + +func _RowFilter_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*RowFilter) + // filter + switch x := m.Filter.(type) { + case *RowFilter_Chain_: + b.EncodeVarint(1<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.Chain); err != nil { + return err + } + case *RowFilter_Interleave_: + b.EncodeVarint(2<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.Interleave); err != nil { + return err + } + case *RowFilter_Condition_: + b.EncodeVarint(3<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.Condition); err != nil { + return err + } + case *RowFilter_Sink: + t := uint64(0) + if x.Sink { + t = 1 + } + b.EncodeVarint(16<<3 | proto.WireVarint) + b.EncodeVarint(t) + case *RowFilter_PassAllFilter: + t := uint64(0) + if x.PassAllFilter { + t = 1 + } + b.EncodeVarint(17<<3 | proto.WireVarint) + b.EncodeVarint(t) + case *RowFilter_BlockAllFilter: + t := uint64(0) + if x.BlockAllFilter { + t = 1 + } + b.EncodeVarint(18<<3 | proto.WireVarint) + b.EncodeVarint(t) + case *RowFilter_RowKeyRegexFilter: + b.EncodeVarint(4<<3 | proto.WireBytes) + b.EncodeRawBytes(x.RowKeyRegexFilter) + case *RowFilter_RowSampleFilter: + b.EncodeVarint(14<<3 | proto.WireFixed64) + b.EncodeFixed64(math.Float64bits(x.RowSampleFilter)) + case *RowFilter_FamilyNameRegexFilter: + b.EncodeVarint(5<<3 | proto.WireBytes) + b.EncodeStringBytes(x.FamilyNameRegexFilter) + case *RowFilter_ColumnQualifierRegexFilter: + b.EncodeVarint(6<<3 | proto.WireBytes) + b.EncodeRawBytes(x.ColumnQualifierRegexFilter) + case *RowFilter_ColumnRangeFilter: + b.EncodeVarint(7<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.ColumnRangeFilter); err != nil { + return err + } + case *RowFilter_TimestampRangeFilter: + b.EncodeVarint(8<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.TimestampRangeFilter); err != nil { + return err + } + case *RowFilter_ValueRegexFilter: + b.EncodeVarint(9<<3 | proto.WireBytes) + b.EncodeRawBytes(x.ValueRegexFilter) + case *RowFilter_ValueRangeFilter: + b.EncodeVarint(15<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.ValueRangeFilter); err != nil { + return err + } + case *RowFilter_CellsPerRowOffsetFilter: + b.EncodeVarint(10<<3 | proto.WireVarint) + b.EncodeVarint(uint64(x.CellsPerRowOffsetFilter)) + case *RowFilter_CellsPerRowLimitFilter: + b.EncodeVarint(11<<3 | proto.WireVarint) + b.EncodeVarint(uint64(x.CellsPerRowLimitFilter)) + case *RowFilter_CellsPerColumnLimitFilter: + b.EncodeVarint(12<<3 | proto.WireVarint) + b.EncodeVarint(uint64(x.CellsPerColumnLimitFilter)) + case *RowFilter_StripValueTransformer: + t := uint64(0) + if x.StripValueTransformer { + t = 1 + } + b.EncodeVarint(13<<3 | proto.WireVarint) + b.EncodeVarint(t) + case *RowFilter_ApplyLabelTransformer: + b.EncodeVarint(19<<3 | proto.WireBytes) + b.EncodeStringBytes(x.ApplyLabelTransformer) + case nil: + default: + return fmt.Errorf("RowFilter.Filter has unexpected type %T", x) + } + return nil +} + +func _RowFilter_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*RowFilter) + switch tag { + case 1: // filter.chain + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(RowFilter_Chain) + err := b.DecodeMessage(msg) + m.Filter = &RowFilter_Chain_{msg} + return true, err + case 2: // filter.interleave + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(RowFilter_Interleave) + err := b.DecodeMessage(msg) + m.Filter = &RowFilter_Interleave_{msg} + return true, err + case 3: // filter.condition + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(RowFilter_Condition) + err := b.DecodeMessage(msg) + m.Filter = &RowFilter_Condition_{msg} + return true, err + case 16: // filter.sink + if wire != proto.WireVarint { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeVarint() + m.Filter = &RowFilter_Sink{x != 0} + return true, err + case 17: // filter.pass_all_filter + if wire != proto.WireVarint { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeVarint() + m.Filter = &RowFilter_PassAllFilter{x != 0} + return true, err + case 18: // filter.block_all_filter + if wire != proto.WireVarint { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeVarint() + m.Filter = &RowFilter_BlockAllFilter{x != 0} + return true, err + case 4: // filter.row_key_regex_filter + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeRawBytes(true) + m.Filter = &RowFilter_RowKeyRegexFilter{x} + return true, err + case 14: // filter.row_sample_filter + if wire != proto.WireFixed64 { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeFixed64() + m.Filter = &RowFilter_RowSampleFilter{math.Float64frombits(x)} + return true, err + case 5: // filter.family_name_regex_filter + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeStringBytes() + m.Filter = &RowFilter_FamilyNameRegexFilter{x} + return true, err + case 6: // filter.column_qualifier_regex_filter + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeRawBytes(true) + m.Filter = &RowFilter_ColumnQualifierRegexFilter{x} + return true, err + case 7: // filter.column_range_filter + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(ColumnRange) + err := b.DecodeMessage(msg) + m.Filter = &RowFilter_ColumnRangeFilter{msg} + return true, err + case 8: // filter.timestamp_range_filter + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(TimestampRange) + err := b.DecodeMessage(msg) + m.Filter = &RowFilter_TimestampRangeFilter{msg} + return true, err + case 9: // filter.value_regex_filter + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeRawBytes(true) + m.Filter = &RowFilter_ValueRegexFilter{x} + return true, err + case 15: // filter.value_range_filter + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(ValueRange) + err := b.DecodeMessage(msg) + m.Filter = &RowFilter_ValueRangeFilter{msg} + return true, err + case 10: // filter.cells_per_row_offset_filter + if wire != proto.WireVarint { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeVarint() + m.Filter = &RowFilter_CellsPerRowOffsetFilter{int32(x)} + return true, err + case 11: // filter.cells_per_row_limit_filter + if wire != proto.WireVarint { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeVarint() + m.Filter = &RowFilter_CellsPerRowLimitFilter{int32(x)} + return true, err + case 12: // filter.cells_per_column_limit_filter + if wire != proto.WireVarint { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeVarint() + m.Filter = &RowFilter_CellsPerColumnLimitFilter{int32(x)} + return true, err + case 13: // filter.strip_value_transformer + if wire != proto.WireVarint { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeVarint() + m.Filter = &RowFilter_StripValueTransformer{x != 0} + return true, err + case 19: // filter.apply_label_transformer + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeStringBytes() + m.Filter = &RowFilter_ApplyLabelTransformer{x} + return true, err + default: + return false, nil + } +} + +func _RowFilter_OneofSizer(msg proto.Message) (n int) { + m := msg.(*RowFilter) + // filter + switch x := m.Filter.(type) { + case *RowFilter_Chain_: + s := proto.Size(x.Chain) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *RowFilter_Interleave_: + s := proto.Size(x.Interleave) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *RowFilter_Condition_: + s := proto.Size(x.Condition) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *RowFilter_Sink: + n += 2 // tag and wire + n += 1 + case *RowFilter_PassAllFilter: + n += 2 // tag and wire + n += 1 + case *RowFilter_BlockAllFilter: + n += 2 // tag and wire + n += 1 + case *RowFilter_RowKeyRegexFilter: + n += 1 // tag and wire + n += proto.SizeVarint(uint64(len(x.RowKeyRegexFilter))) + n += len(x.RowKeyRegexFilter) + case *RowFilter_RowSampleFilter: + n += 1 // tag and wire + n += 8 + case *RowFilter_FamilyNameRegexFilter: + n += 1 // tag and wire + n += proto.SizeVarint(uint64(len(x.FamilyNameRegexFilter))) + n += len(x.FamilyNameRegexFilter) + case *RowFilter_ColumnQualifierRegexFilter: + n += 1 // tag and wire + n += proto.SizeVarint(uint64(len(x.ColumnQualifierRegexFilter))) + n += len(x.ColumnQualifierRegexFilter) + case *RowFilter_ColumnRangeFilter: + s := proto.Size(x.ColumnRangeFilter) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *RowFilter_TimestampRangeFilter: + s := proto.Size(x.TimestampRangeFilter) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *RowFilter_ValueRegexFilter: + n += 1 // tag and wire + n += proto.SizeVarint(uint64(len(x.ValueRegexFilter))) + n += len(x.ValueRegexFilter) + case *RowFilter_ValueRangeFilter: + s := proto.Size(x.ValueRangeFilter) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *RowFilter_CellsPerRowOffsetFilter: + n += 1 // tag and wire + n += proto.SizeVarint(uint64(x.CellsPerRowOffsetFilter)) + case *RowFilter_CellsPerRowLimitFilter: + n += 1 // tag and wire + n += proto.SizeVarint(uint64(x.CellsPerRowLimitFilter)) + case *RowFilter_CellsPerColumnLimitFilter: + n += 1 // tag and wire + n += proto.SizeVarint(uint64(x.CellsPerColumnLimitFilter)) + case *RowFilter_StripValueTransformer: + n += 1 // tag and wire + n += 1 + case *RowFilter_ApplyLabelTransformer: + n += 2 // tag and wire + n += proto.SizeVarint(uint64(len(x.ApplyLabelTransformer))) + n += len(x.ApplyLabelTransformer) + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +// A RowFilter which sends rows through several RowFilters in sequence. +type RowFilter_Chain struct { + // The elements of "filters" are chained together to process the input row: + // in row -> f(0) -> intermediate row -> f(1) -> ... -> f(N) -> out row + // The full chain is executed atomically. + Filters []*RowFilter `protobuf:"bytes,1,rep,name=filters,proto3" json:"filters,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *RowFilter_Chain) Reset() { *m = RowFilter_Chain{} } +func (m *RowFilter_Chain) String() string { return proto.CompactTextString(m) } +func (*RowFilter_Chain) ProtoMessage() {} +func (*RowFilter_Chain) Descriptor() ([]byte, []int) { + return fileDescriptor_data_617bf329776ab134, []int{9, 0} +} +func (m *RowFilter_Chain) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_RowFilter_Chain.Unmarshal(m, b) +} +func (m *RowFilter_Chain) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_RowFilter_Chain.Marshal(b, m, deterministic) +} +func (dst *RowFilter_Chain) XXX_Merge(src proto.Message) { + xxx_messageInfo_RowFilter_Chain.Merge(dst, src) +} +func (m *RowFilter_Chain) XXX_Size() int { + return xxx_messageInfo_RowFilter_Chain.Size(m) +} +func (m *RowFilter_Chain) XXX_DiscardUnknown() { + xxx_messageInfo_RowFilter_Chain.DiscardUnknown(m) +} + +var xxx_messageInfo_RowFilter_Chain proto.InternalMessageInfo + +func (m *RowFilter_Chain) GetFilters() []*RowFilter { + if m != nil { + return m.Filters + } + return nil +} + +// A RowFilter which sends each row to each of several component +// RowFilters and interleaves the results. +type RowFilter_Interleave struct { + // The elements of "filters" all process a copy of the input row, and the + // results are pooled, sorted, and combined into a single output row. + // If multiple cells are produced with the same column and timestamp, + // they will all appear in the output row in an unspecified mutual order. + // Consider the following example, with three filters: + // + // input row + // | + // ----------------------------------------------------- + // | | | + // f(0) f(1) f(2) + // | | | + // 1: foo,bar,10,x foo,bar,10,z far,bar,7,a + // 2: foo,blah,11,z far,blah,5,x far,blah,5,x + // | | | + // ----------------------------------------------------- + // | + // 1: foo,bar,10,z // could have switched with #2 + // 2: foo,bar,10,x // could have switched with #1 + // 3: foo,blah,11,z + // 4: far,bar,7,a + // 5: far,blah,5,x // identical to #6 + // 6: far,blah,5,x // identical to #5 + // + // All interleaved filters are executed atomically. + Filters []*RowFilter `protobuf:"bytes,1,rep,name=filters,proto3" json:"filters,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *RowFilter_Interleave) Reset() { *m = RowFilter_Interleave{} } +func (m *RowFilter_Interleave) String() string { return proto.CompactTextString(m) } +func (*RowFilter_Interleave) ProtoMessage() {} +func (*RowFilter_Interleave) Descriptor() ([]byte, []int) { + return fileDescriptor_data_617bf329776ab134, []int{9, 1} +} +func (m *RowFilter_Interleave) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_RowFilter_Interleave.Unmarshal(m, b) +} +func (m *RowFilter_Interleave) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_RowFilter_Interleave.Marshal(b, m, deterministic) +} +func (dst *RowFilter_Interleave) XXX_Merge(src proto.Message) { + xxx_messageInfo_RowFilter_Interleave.Merge(dst, src) +} +func (m *RowFilter_Interleave) XXX_Size() int { + return xxx_messageInfo_RowFilter_Interleave.Size(m) +} +func (m *RowFilter_Interleave) XXX_DiscardUnknown() { + xxx_messageInfo_RowFilter_Interleave.DiscardUnknown(m) +} + +var xxx_messageInfo_RowFilter_Interleave proto.InternalMessageInfo + +func (m *RowFilter_Interleave) GetFilters() []*RowFilter { + if m != nil { + return m.Filters + } + return nil +} + +// A RowFilter which evaluates one of two possible RowFilters, depending on +// whether or not a predicate RowFilter outputs any cells from the input row. +// +// IMPORTANT NOTE: The predicate filter does not execute atomically with the +// true and false filters, which may lead to inconsistent or unexpected +// results. Additionally, Condition filters have poor performance, especially +// when filters are set for the false condition. +type RowFilter_Condition struct { + // If `predicate_filter` outputs any cells, then `true_filter` will be + // evaluated on the input row. Otherwise, `false_filter` will be evaluated. + PredicateFilter *RowFilter `protobuf:"bytes,1,opt,name=predicate_filter,json=predicateFilter,proto3" json:"predicate_filter,omitempty"` + // The filter to apply to the input row if `predicate_filter` returns any + // results. If not provided, no results will be returned in the true case. + TrueFilter *RowFilter `protobuf:"bytes,2,opt,name=true_filter,json=trueFilter,proto3" json:"true_filter,omitempty"` + // The filter to apply to the input row if `predicate_filter` does not + // return any results. If not provided, no results will be returned in the + // false case. + FalseFilter *RowFilter `protobuf:"bytes,3,opt,name=false_filter,json=falseFilter,proto3" json:"false_filter,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *RowFilter_Condition) Reset() { *m = RowFilter_Condition{} } +func (m *RowFilter_Condition) String() string { return proto.CompactTextString(m) } +func (*RowFilter_Condition) ProtoMessage() {} +func (*RowFilter_Condition) Descriptor() ([]byte, []int) { + return fileDescriptor_data_617bf329776ab134, []int{9, 2} +} +func (m *RowFilter_Condition) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_RowFilter_Condition.Unmarshal(m, b) +} +func (m *RowFilter_Condition) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_RowFilter_Condition.Marshal(b, m, deterministic) +} +func (dst *RowFilter_Condition) XXX_Merge(src proto.Message) { + xxx_messageInfo_RowFilter_Condition.Merge(dst, src) +} +func (m *RowFilter_Condition) XXX_Size() int { + return xxx_messageInfo_RowFilter_Condition.Size(m) +} +func (m *RowFilter_Condition) XXX_DiscardUnknown() { + xxx_messageInfo_RowFilter_Condition.DiscardUnknown(m) +} + +var xxx_messageInfo_RowFilter_Condition proto.InternalMessageInfo + +func (m *RowFilter_Condition) GetPredicateFilter() *RowFilter { + if m != nil { + return m.PredicateFilter + } + return nil +} + +func (m *RowFilter_Condition) GetTrueFilter() *RowFilter { + if m != nil { + return m.TrueFilter + } + return nil +} + +func (m *RowFilter_Condition) GetFalseFilter() *RowFilter { + if m != nil { + return m.FalseFilter + } + return nil +} + +// Specifies a particular change to be made to the contents of a row. +type Mutation struct { + // Which of the possible Mutation types to apply. + // + // Types that are valid to be assigned to Mutation: + // *Mutation_SetCell_ + // *Mutation_DeleteFromColumn_ + // *Mutation_DeleteFromFamily_ + // *Mutation_DeleteFromRow_ + Mutation isMutation_Mutation `protobuf_oneof:"mutation"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Mutation) Reset() { *m = Mutation{} } +func (m *Mutation) String() string { return proto.CompactTextString(m) } +func (*Mutation) ProtoMessage() {} +func (*Mutation) Descriptor() ([]byte, []int) { + return fileDescriptor_data_617bf329776ab134, []int{10} +} +func (m *Mutation) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Mutation.Unmarshal(m, b) +} +func (m *Mutation) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Mutation.Marshal(b, m, deterministic) +} +func (dst *Mutation) XXX_Merge(src proto.Message) { + xxx_messageInfo_Mutation.Merge(dst, src) +} +func (m *Mutation) XXX_Size() int { + return xxx_messageInfo_Mutation.Size(m) +} +func (m *Mutation) XXX_DiscardUnknown() { + xxx_messageInfo_Mutation.DiscardUnknown(m) +} + +var xxx_messageInfo_Mutation proto.InternalMessageInfo + +type isMutation_Mutation interface { + isMutation_Mutation() +} + +type Mutation_SetCell_ struct { + SetCell *Mutation_SetCell `protobuf:"bytes,1,opt,name=set_cell,json=setCell,proto3,oneof"` +} + +type Mutation_DeleteFromColumn_ struct { + DeleteFromColumn *Mutation_DeleteFromColumn `protobuf:"bytes,2,opt,name=delete_from_column,json=deleteFromColumn,proto3,oneof"` +} + +type Mutation_DeleteFromFamily_ struct { + DeleteFromFamily *Mutation_DeleteFromFamily `protobuf:"bytes,3,opt,name=delete_from_family,json=deleteFromFamily,proto3,oneof"` +} + +type Mutation_DeleteFromRow_ struct { + DeleteFromRow *Mutation_DeleteFromRow `protobuf:"bytes,4,opt,name=delete_from_row,json=deleteFromRow,proto3,oneof"` +} + +func (*Mutation_SetCell_) isMutation_Mutation() {} + +func (*Mutation_DeleteFromColumn_) isMutation_Mutation() {} + +func (*Mutation_DeleteFromFamily_) isMutation_Mutation() {} + +func (*Mutation_DeleteFromRow_) isMutation_Mutation() {} + +func (m *Mutation) GetMutation() isMutation_Mutation { + if m != nil { + return m.Mutation + } + return nil +} + +func (m *Mutation) GetSetCell() *Mutation_SetCell { + if x, ok := m.GetMutation().(*Mutation_SetCell_); ok { + return x.SetCell + } + return nil +} + +func (m *Mutation) GetDeleteFromColumn() *Mutation_DeleteFromColumn { + if x, ok := m.GetMutation().(*Mutation_DeleteFromColumn_); ok { + return x.DeleteFromColumn + } + return nil +} + +func (m *Mutation) GetDeleteFromFamily() *Mutation_DeleteFromFamily { + if x, ok := m.GetMutation().(*Mutation_DeleteFromFamily_); ok { + return x.DeleteFromFamily + } + return nil +} + +func (m *Mutation) GetDeleteFromRow() *Mutation_DeleteFromRow { + if x, ok := m.GetMutation().(*Mutation_DeleteFromRow_); ok { + return x.DeleteFromRow + } + return nil +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*Mutation) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _Mutation_OneofMarshaler, _Mutation_OneofUnmarshaler, _Mutation_OneofSizer, []interface{}{ + (*Mutation_SetCell_)(nil), + (*Mutation_DeleteFromColumn_)(nil), + (*Mutation_DeleteFromFamily_)(nil), + (*Mutation_DeleteFromRow_)(nil), + } +} + +func _Mutation_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*Mutation) + // mutation + switch x := m.Mutation.(type) { + case *Mutation_SetCell_: + b.EncodeVarint(1<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.SetCell); err != nil { + return err + } + case *Mutation_DeleteFromColumn_: + b.EncodeVarint(2<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.DeleteFromColumn); err != nil { + return err + } + case *Mutation_DeleteFromFamily_: + b.EncodeVarint(3<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.DeleteFromFamily); err != nil { + return err + } + case *Mutation_DeleteFromRow_: + b.EncodeVarint(4<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.DeleteFromRow); err != nil { + return err + } + case nil: + default: + return fmt.Errorf("Mutation.Mutation has unexpected type %T", x) + } + return nil +} + +func _Mutation_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*Mutation) + switch tag { + case 1: // mutation.set_cell + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(Mutation_SetCell) + err := b.DecodeMessage(msg) + m.Mutation = &Mutation_SetCell_{msg} + return true, err + case 2: // mutation.delete_from_column + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(Mutation_DeleteFromColumn) + err := b.DecodeMessage(msg) + m.Mutation = &Mutation_DeleteFromColumn_{msg} + return true, err + case 3: // mutation.delete_from_family + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(Mutation_DeleteFromFamily) + err := b.DecodeMessage(msg) + m.Mutation = &Mutation_DeleteFromFamily_{msg} + return true, err + case 4: // mutation.delete_from_row + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(Mutation_DeleteFromRow) + err := b.DecodeMessage(msg) + m.Mutation = &Mutation_DeleteFromRow_{msg} + return true, err + default: + return false, nil + } +} + +func _Mutation_OneofSizer(msg proto.Message) (n int) { + m := msg.(*Mutation) + // mutation + switch x := m.Mutation.(type) { + case *Mutation_SetCell_: + s := proto.Size(x.SetCell) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *Mutation_DeleteFromColumn_: + s := proto.Size(x.DeleteFromColumn) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *Mutation_DeleteFromFamily_: + s := proto.Size(x.DeleteFromFamily) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *Mutation_DeleteFromRow_: + s := proto.Size(x.DeleteFromRow) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +// A Mutation which sets the value of the specified cell. +type Mutation_SetCell struct { + // The name of the family into which new data should be written. + // Must match `[-_.a-zA-Z0-9]+` + FamilyName string `protobuf:"bytes,1,opt,name=family_name,json=familyName,proto3" json:"family_name,omitempty"` + // The qualifier of the column into which new data should be written. + // Can be any byte string, including the empty string. + ColumnQualifier []byte `protobuf:"bytes,2,opt,name=column_qualifier,json=columnQualifier,proto3" json:"column_qualifier,omitempty"` + // The timestamp of the cell into which new data should be written. + // Use -1 for current Bigtable server time. + // Otherwise, the client should set this value itself, noting that the + // default value is a timestamp of zero if the field is left unspecified. + // Values must match the granularity of the table (e.g. micros, millis). + TimestampMicros int64 `protobuf:"varint,3,opt,name=timestamp_micros,json=timestampMicros,proto3" json:"timestamp_micros,omitempty"` + // The value to be written into the specified cell. + Value []byte `protobuf:"bytes,4,opt,name=value,proto3" json:"value,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Mutation_SetCell) Reset() { *m = Mutation_SetCell{} } +func (m *Mutation_SetCell) String() string { return proto.CompactTextString(m) } +func (*Mutation_SetCell) ProtoMessage() {} +func (*Mutation_SetCell) Descriptor() ([]byte, []int) { + return fileDescriptor_data_617bf329776ab134, []int{10, 0} +} +func (m *Mutation_SetCell) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Mutation_SetCell.Unmarshal(m, b) +} +func (m *Mutation_SetCell) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Mutation_SetCell.Marshal(b, m, deterministic) +} +func (dst *Mutation_SetCell) XXX_Merge(src proto.Message) { + xxx_messageInfo_Mutation_SetCell.Merge(dst, src) +} +func (m *Mutation_SetCell) XXX_Size() int { + return xxx_messageInfo_Mutation_SetCell.Size(m) +} +func (m *Mutation_SetCell) XXX_DiscardUnknown() { + xxx_messageInfo_Mutation_SetCell.DiscardUnknown(m) +} + +var xxx_messageInfo_Mutation_SetCell proto.InternalMessageInfo + +func (m *Mutation_SetCell) GetFamilyName() string { + if m != nil { + return m.FamilyName + } + return "" +} + +func (m *Mutation_SetCell) GetColumnQualifier() []byte { + if m != nil { + return m.ColumnQualifier + } + return nil +} + +func (m *Mutation_SetCell) GetTimestampMicros() int64 { + if m != nil { + return m.TimestampMicros + } + return 0 +} + +func (m *Mutation_SetCell) GetValue() []byte { + if m != nil { + return m.Value + } + return nil +} + +// A Mutation which deletes cells from the specified column, optionally +// restricting the deletions to a given timestamp range. +type Mutation_DeleteFromColumn struct { + // The name of the family from which cells should be deleted. + // Must match `[-_.a-zA-Z0-9]+` + FamilyName string `protobuf:"bytes,1,opt,name=family_name,json=familyName,proto3" json:"family_name,omitempty"` + // The qualifier of the column from which cells should be deleted. + // Can be any byte string, including the empty string. + ColumnQualifier []byte `protobuf:"bytes,2,opt,name=column_qualifier,json=columnQualifier,proto3" json:"column_qualifier,omitempty"` + // The range of timestamps within which cells should be deleted. + TimeRange *TimestampRange `protobuf:"bytes,3,opt,name=time_range,json=timeRange,proto3" json:"time_range,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Mutation_DeleteFromColumn) Reset() { *m = Mutation_DeleteFromColumn{} } +func (m *Mutation_DeleteFromColumn) String() string { return proto.CompactTextString(m) } +func (*Mutation_DeleteFromColumn) ProtoMessage() {} +func (*Mutation_DeleteFromColumn) Descriptor() ([]byte, []int) { + return fileDescriptor_data_617bf329776ab134, []int{10, 1} +} +func (m *Mutation_DeleteFromColumn) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Mutation_DeleteFromColumn.Unmarshal(m, b) +} +func (m *Mutation_DeleteFromColumn) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Mutation_DeleteFromColumn.Marshal(b, m, deterministic) +} +func (dst *Mutation_DeleteFromColumn) XXX_Merge(src proto.Message) { + xxx_messageInfo_Mutation_DeleteFromColumn.Merge(dst, src) +} +func (m *Mutation_DeleteFromColumn) XXX_Size() int { + return xxx_messageInfo_Mutation_DeleteFromColumn.Size(m) +} +func (m *Mutation_DeleteFromColumn) XXX_DiscardUnknown() { + xxx_messageInfo_Mutation_DeleteFromColumn.DiscardUnknown(m) +} + +var xxx_messageInfo_Mutation_DeleteFromColumn proto.InternalMessageInfo + +func (m *Mutation_DeleteFromColumn) GetFamilyName() string { + if m != nil { + return m.FamilyName + } + return "" +} + +func (m *Mutation_DeleteFromColumn) GetColumnQualifier() []byte { + if m != nil { + return m.ColumnQualifier + } + return nil +} + +func (m *Mutation_DeleteFromColumn) GetTimeRange() *TimestampRange { + if m != nil { + return m.TimeRange + } + return nil +} + +// A Mutation which deletes all cells from the specified column family. +type Mutation_DeleteFromFamily struct { + // The name of the family from which cells should be deleted. + // Must match `[-_.a-zA-Z0-9]+` + FamilyName string `protobuf:"bytes,1,opt,name=family_name,json=familyName,proto3" json:"family_name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Mutation_DeleteFromFamily) Reset() { *m = Mutation_DeleteFromFamily{} } +func (m *Mutation_DeleteFromFamily) String() string { return proto.CompactTextString(m) } +func (*Mutation_DeleteFromFamily) ProtoMessage() {} +func (*Mutation_DeleteFromFamily) Descriptor() ([]byte, []int) { + return fileDescriptor_data_617bf329776ab134, []int{10, 2} +} +func (m *Mutation_DeleteFromFamily) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Mutation_DeleteFromFamily.Unmarshal(m, b) +} +func (m *Mutation_DeleteFromFamily) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Mutation_DeleteFromFamily.Marshal(b, m, deterministic) +} +func (dst *Mutation_DeleteFromFamily) XXX_Merge(src proto.Message) { + xxx_messageInfo_Mutation_DeleteFromFamily.Merge(dst, src) +} +func (m *Mutation_DeleteFromFamily) XXX_Size() int { + return xxx_messageInfo_Mutation_DeleteFromFamily.Size(m) +} +func (m *Mutation_DeleteFromFamily) XXX_DiscardUnknown() { + xxx_messageInfo_Mutation_DeleteFromFamily.DiscardUnknown(m) +} + +var xxx_messageInfo_Mutation_DeleteFromFamily proto.InternalMessageInfo + +func (m *Mutation_DeleteFromFamily) GetFamilyName() string { + if m != nil { + return m.FamilyName + } + return "" +} + +// A Mutation which deletes all cells from the containing row. +type Mutation_DeleteFromRow struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Mutation_DeleteFromRow) Reset() { *m = Mutation_DeleteFromRow{} } +func (m *Mutation_DeleteFromRow) String() string { return proto.CompactTextString(m) } +func (*Mutation_DeleteFromRow) ProtoMessage() {} +func (*Mutation_DeleteFromRow) Descriptor() ([]byte, []int) { + return fileDescriptor_data_617bf329776ab134, []int{10, 3} +} +func (m *Mutation_DeleteFromRow) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Mutation_DeleteFromRow.Unmarshal(m, b) +} +func (m *Mutation_DeleteFromRow) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Mutation_DeleteFromRow.Marshal(b, m, deterministic) +} +func (dst *Mutation_DeleteFromRow) XXX_Merge(src proto.Message) { + xxx_messageInfo_Mutation_DeleteFromRow.Merge(dst, src) +} +func (m *Mutation_DeleteFromRow) XXX_Size() int { + return xxx_messageInfo_Mutation_DeleteFromRow.Size(m) +} +func (m *Mutation_DeleteFromRow) XXX_DiscardUnknown() { + xxx_messageInfo_Mutation_DeleteFromRow.DiscardUnknown(m) +} + +var xxx_messageInfo_Mutation_DeleteFromRow proto.InternalMessageInfo + +// Specifies an atomic read/modify/write operation on the latest value of the +// specified column. +type ReadModifyWriteRule struct { + // The name of the family to which the read/modify/write should be applied. + // Must match `[-_.a-zA-Z0-9]+` + FamilyName string `protobuf:"bytes,1,opt,name=family_name,json=familyName,proto3" json:"family_name,omitempty"` + // The qualifier of the column to which the read/modify/write should be + // applied. + // Can be any byte string, including the empty string. + ColumnQualifier []byte `protobuf:"bytes,2,opt,name=column_qualifier,json=columnQualifier,proto3" json:"column_qualifier,omitempty"` + // The rule used to determine the column's new latest value from its current + // latest value. + // + // Types that are valid to be assigned to Rule: + // *ReadModifyWriteRule_AppendValue + // *ReadModifyWriteRule_IncrementAmount + Rule isReadModifyWriteRule_Rule `protobuf_oneof:"rule"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ReadModifyWriteRule) Reset() { *m = ReadModifyWriteRule{} } +func (m *ReadModifyWriteRule) String() string { return proto.CompactTextString(m) } +func (*ReadModifyWriteRule) ProtoMessage() {} +func (*ReadModifyWriteRule) Descriptor() ([]byte, []int) { + return fileDescriptor_data_617bf329776ab134, []int{11} +} +func (m *ReadModifyWriteRule) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ReadModifyWriteRule.Unmarshal(m, b) +} +func (m *ReadModifyWriteRule) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ReadModifyWriteRule.Marshal(b, m, deterministic) +} +func (dst *ReadModifyWriteRule) XXX_Merge(src proto.Message) { + xxx_messageInfo_ReadModifyWriteRule.Merge(dst, src) +} +func (m *ReadModifyWriteRule) XXX_Size() int { + return xxx_messageInfo_ReadModifyWriteRule.Size(m) +} +func (m *ReadModifyWriteRule) XXX_DiscardUnknown() { + xxx_messageInfo_ReadModifyWriteRule.DiscardUnknown(m) +} + +var xxx_messageInfo_ReadModifyWriteRule proto.InternalMessageInfo + +func (m *ReadModifyWriteRule) GetFamilyName() string { + if m != nil { + return m.FamilyName + } + return "" +} + +func (m *ReadModifyWriteRule) GetColumnQualifier() []byte { + if m != nil { + return m.ColumnQualifier + } + return nil +} + +type isReadModifyWriteRule_Rule interface { + isReadModifyWriteRule_Rule() +} + +type ReadModifyWriteRule_AppendValue struct { + AppendValue []byte `protobuf:"bytes,3,opt,name=append_value,json=appendValue,proto3,oneof"` +} + +type ReadModifyWriteRule_IncrementAmount struct { + IncrementAmount int64 `protobuf:"varint,4,opt,name=increment_amount,json=incrementAmount,proto3,oneof"` +} + +func (*ReadModifyWriteRule_AppendValue) isReadModifyWriteRule_Rule() {} + +func (*ReadModifyWriteRule_IncrementAmount) isReadModifyWriteRule_Rule() {} + +func (m *ReadModifyWriteRule) GetRule() isReadModifyWriteRule_Rule { + if m != nil { + return m.Rule + } + return nil +} + +func (m *ReadModifyWriteRule) GetAppendValue() []byte { + if x, ok := m.GetRule().(*ReadModifyWriteRule_AppendValue); ok { + return x.AppendValue + } + return nil +} + +func (m *ReadModifyWriteRule) GetIncrementAmount() int64 { + if x, ok := m.GetRule().(*ReadModifyWriteRule_IncrementAmount); ok { + return x.IncrementAmount + } + return 0 +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*ReadModifyWriteRule) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _ReadModifyWriteRule_OneofMarshaler, _ReadModifyWriteRule_OneofUnmarshaler, _ReadModifyWriteRule_OneofSizer, []interface{}{ + (*ReadModifyWriteRule_AppendValue)(nil), + (*ReadModifyWriteRule_IncrementAmount)(nil), + } +} + +func _ReadModifyWriteRule_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*ReadModifyWriteRule) + // rule + switch x := m.Rule.(type) { + case *ReadModifyWriteRule_AppendValue: + b.EncodeVarint(3<<3 | proto.WireBytes) + b.EncodeRawBytes(x.AppendValue) + case *ReadModifyWriteRule_IncrementAmount: + b.EncodeVarint(4<<3 | proto.WireVarint) + b.EncodeVarint(uint64(x.IncrementAmount)) + case nil: + default: + return fmt.Errorf("ReadModifyWriteRule.Rule has unexpected type %T", x) + } + return nil +} + +func _ReadModifyWriteRule_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*ReadModifyWriteRule) + switch tag { + case 3: // rule.append_value + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeRawBytes(true) + m.Rule = &ReadModifyWriteRule_AppendValue{x} + return true, err + case 4: // rule.increment_amount + if wire != proto.WireVarint { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeVarint() + m.Rule = &ReadModifyWriteRule_IncrementAmount{int64(x)} + return true, err + default: + return false, nil + } +} + +func _ReadModifyWriteRule_OneofSizer(msg proto.Message) (n int) { + m := msg.(*ReadModifyWriteRule) + // rule + switch x := m.Rule.(type) { + case *ReadModifyWriteRule_AppendValue: + n += 1 // tag and wire + n += proto.SizeVarint(uint64(len(x.AppendValue))) + n += len(x.AppendValue) + case *ReadModifyWriteRule_IncrementAmount: + n += 1 // tag and wire + n += proto.SizeVarint(uint64(x.IncrementAmount)) + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +func init() { + proto.RegisterType((*Row)(nil), "google.bigtable.v2.Row") + proto.RegisterType((*Family)(nil), "google.bigtable.v2.Family") + proto.RegisterType((*Column)(nil), "google.bigtable.v2.Column") + proto.RegisterType((*Cell)(nil), "google.bigtable.v2.Cell") + proto.RegisterType((*RowRange)(nil), "google.bigtable.v2.RowRange") + proto.RegisterType((*RowSet)(nil), "google.bigtable.v2.RowSet") + proto.RegisterType((*ColumnRange)(nil), "google.bigtable.v2.ColumnRange") + proto.RegisterType((*TimestampRange)(nil), "google.bigtable.v2.TimestampRange") + proto.RegisterType((*ValueRange)(nil), "google.bigtable.v2.ValueRange") + proto.RegisterType((*RowFilter)(nil), "google.bigtable.v2.RowFilter") + proto.RegisterType((*RowFilter_Chain)(nil), "google.bigtable.v2.RowFilter.Chain") + proto.RegisterType((*RowFilter_Interleave)(nil), "google.bigtable.v2.RowFilter.Interleave") + proto.RegisterType((*RowFilter_Condition)(nil), "google.bigtable.v2.RowFilter.Condition") + proto.RegisterType((*Mutation)(nil), "google.bigtable.v2.Mutation") + proto.RegisterType((*Mutation_SetCell)(nil), "google.bigtable.v2.Mutation.SetCell") + proto.RegisterType((*Mutation_DeleteFromColumn)(nil), "google.bigtable.v2.Mutation.DeleteFromColumn") + proto.RegisterType((*Mutation_DeleteFromFamily)(nil), "google.bigtable.v2.Mutation.DeleteFromFamily") + proto.RegisterType((*Mutation_DeleteFromRow)(nil), "google.bigtable.v2.Mutation.DeleteFromRow") + proto.RegisterType((*ReadModifyWriteRule)(nil), "google.bigtable.v2.ReadModifyWriteRule") +} + +func init() { proto.RegisterFile("google/bigtable/v2/data.proto", fileDescriptor_data_617bf329776ab134) } + +var fileDescriptor_data_617bf329776ab134 = []byte{ + // 1444 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xac, 0x57, 0xdb, 0x72, 0x1b, 0x45, + 0x13, 0xd6, 0x5a, 0xb6, 0x2c, 0xf5, 0xca, 0x96, 0x32, 0x71, 0x1c, 0x45, 0x7f, 0xfc, 0xc7, 0xa5, + 0x50, 0x41, 0x31, 0x20, 0x83, 0x92, 0x0a, 0x87, 0x50, 0x54, 0x2c, 0x87, 0x44, 0x90, 0xf3, 0xc4, + 0x65, 0xaa, 0x52, 0xa1, 0x96, 0xb1, 0x34, 0x12, 0x5b, 0x9e, 0xdd, 0x59, 0x76, 0x57, 0x56, 0xf4, + 0x22, 0x70, 0x0d, 0x97, 0xbc, 0x02, 0x77, 0x5c, 0xf2, 0x02, 0xf0, 0x18, 0x3c, 0x00, 0x17, 0xd4, + 0x9c, 0xf6, 0xa0, 0x38, 0xb6, 0x8b, 0xca, 0xdd, 0x6e, 0xf7, 0xf7, 0x7d, 0xdd, 0xd3, 0xd3, 0xd3, + 0x3b, 0x0b, 0x1b, 0x63, 0xce, 0xc7, 0x8c, 0x6e, 0x1f, 0xb8, 0xe3, 0x98, 0x1c, 0x30, 0xba, 0x7d, + 0xd4, 0xdd, 0x1e, 0x92, 0x98, 0x74, 0x82, 0x90, 0xc7, 0x1c, 0x21, 0xe5, 0xee, 0x18, 0x77, 0xe7, + 0xa8, 0xdb, 0x7a, 0x02, 0x45, 0xcc, 0xa7, 0xa8, 0x0e, 0xc5, 0x43, 0x3a, 0x6b, 0x58, 0x9b, 0x56, + 0xbb, 0x8a, 0xc5, 0x23, 0xba, 0x05, 0xe5, 0x11, 0xf1, 0x5c, 0xe6, 0xd2, 0xa8, 0xb1, 0xb0, 0x59, + 0x6c, 0xdb, 0xdd, 0x66, 0xe7, 0x75, 0x7e, 0xe7, 0x9e, 0xc0, 0xcc, 0x70, 0x82, 0x6d, 0x61, 0x28, + 0x29, 0x1b, 0x42, 0xb0, 0xe8, 0x13, 0x8f, 0x4a, 0xd1, 0x0a, 0x96, 0xcf, 0xe8, 0x26, 0x2c, 0x0f, + 0x38, 0x9b, 0x78, 0xfe, 0x89, 0xa2, 0xbb, 0x12, 0x82, 0x0d, 0xb4, 0xb5, 0x0f, 0x25, 0x65, 0x42, + 0x97, 0xa1, 0xf2, 0xc3, 0x84, 0x30, 0x77, 0xe4, 0xd2, 0x50, 0x67, 0x9b, 0x1a, 0x50, 0x07, 0x96, + 0x06, 0x94, 0x31, 0xa3, 0xdd, 0x38, 0x56, 0x9b, 0x32, 0x86, 0x15, 0xac, 0xe5, 0xc0, 0xa2, 0x78, + 0x45, 0xd7, 0xa1, 0x1e, 0xbb, 0x1e, 0x8d, 0x62, 0xe2, 0x05, 0x8e, 0xe7, 0x0e, 0x42, 0x1e, 0x49, + 0xf1, 0x22, 0xae, 0x25, 0xf6, 0x47, 0xd2, 0x8c, 0xd6, 0x60, 0xe9, 0x88, 0xb0, 0x09, 0x6d, 0x2c, + 0xc8, 0xe0, 0xea, 0x05, 0xad, 0x43, 0x89, 0x91, 0x03, 0xca, 0xa2, 0x46, 0x71, 0xb3, 0xd8, 0xae, + 0x60, 0xfd, 0xd6, 0xfa, 0xdd, 0x82, 0x32, 0xe6, 0x53, 0x4c, 0xfc, 0x31, 0x45, 0x5b, 0x50, 0x8f, + 0x62, 0x12, 0xc6, 0xce, 0x21, 0x9d, 0x39, 0x03, 0xc6, 0x23, 0x3a, 0x54, 0x4b, 0xe8, 0x17, 0xf0, + 0xaa, 0xf4, 0x3c, 0xa0, 0xb3, 0x5d, 0x69, 0x47, 0xd7, 0x60, 0x35, 0xc5, 0xf2, 0x80, 0xfa, 0x2a, + 0x5e, 0xbf, 0x80, 0xab, 0x06, 0xf9, 0x24, 0xa0, 0x3e, 0x6a, 0x41, 0x95, 0xfa, 0xc3, 0x14, 0x55, + 0x94, 0x28, 0x0b, 0x03, 0xf5, 0x87, 0x06, 0x73, 0x0d, 0x56, 0x0d, 0x46, 0x47, 0x5d, 0xd4, 0xa8, + 0xaa, 0x42, 0xa9, 0x98, 0x3d, 0x1b, 0x2a, 0x49, 0xcc, 0x5e, 0x05, 0x96, 0x35, 0xa9, 0xf5, 0x1d, + 0x94, 0x30, 0x9f, 0x3e, 0xa7, 0x31, 0xba, 0x04, 0xe5, 0x90, 0x4f, 0x85, 0x51, 0xd4, 0xa7, 0xd8, + 0xae, 0xe2, 0xe5, 0x90, 0x4f, 0x1f, 0xd0, 0x59, 0x84, 0x6e, 0x03, 0x08, 0x57, 0x28, 0x56, 0x6a, + 0xea, 0x7f, 0xf9, 0xb8, 0xfa, 0x9b, 0x72, 0xe0, 0x4a, 0xa8, 0x9f, 0xa2, 0xd6, 0x2f, 0x0b, 0x60, + 0xeb, 0x3d, 0x97, 0x95, 0xba, 0x02, 0xb6, 0xec, 0xa7, 0x99, 0x93, 0x69, 0x20, 0x50, 0xa6, 0xc7, + 0xa2, 0x8d, 0x6e, 0xc1, 0xba, 0x4a, 0x35, 0xd9, 0x7b, 0xb3, 0x34, 0x53, 0xa6, 0x35, 0xe9, 0x7f, + 0x66, 0xdc, 0xba, 0xac, 0x5d, 0x58, 0x9b, 0xe7, 0x65, 0xca, 0x56, 0xc0, 0x28, 0xcf, 0x92, 0xe5, + 0xeb, 0xc2, 0x9a, 0xa8, 0xc4, 0x6b, 0x91, 0x4c, 0x11, 0x11, 0xf5, 0x87, 0xf3, 0x71, 0x3a, 0x80, + 0xf2, 0x1c, 0x19, 0x65, 0x49, 0x33, 0xea, 0x59, 0x86, 0x88, 0xd1, 0x3b, 0x07, 0xb5, 0xb9, 0xbc, + 0x7a, 0x35, 0x58, 0xc9, 0x49, 0xb4, 0x5e, 0xc1, 0xea, 0x9e, 0x69, 0x46, 0x55, 0xa6, 0x9b, 0xa6, + 0x0a, 0x6f, 0x68, 0x5e, 0xb5, 0xd6, 0xbd, 0xb9, 0x0e, 0xfe, 0x50, 0xad, 0xe7, 0x35, 0xce, 0x82, + 0xe4, 0x88, 0xbc, 0xe7, 0x18, 0xad, 0xbf, 0x2c, 0x80, 0x7d, 0xd1, 0xe7, 0x2a, 0x6c, 0x07, 0x54, + 0x99, 0x1c, 0xd9, 0xfb, 0xf3, 0x9d, 0xac, 0x7a, 0x5c, 0xc2, 0x75, 0x31, 0x92, 0xbe, 0x57, 0xf8, + 0x5c, 0x37, 0xaf, 0xa6, 0x68, 0x59, 0xec, 0x2d, 0x10, 0xc5, 0xc9, 0x2b, 0x9b, 0x9e, 0x16, 0x5d, + 0x9c, 0xd5, 0xd5, 0x7d, 0x9d, 0x51, 0xcd, 0xf6, 0x75, 0xa2, 0xd9, 0x5b, 0x01, 0x3b, 0x13, 0x5f, + 0xb4, 0x79, 0x42, 0x6b, 0xfd, 0x63, 0x43, 0x05, 0xf3, 0xe9, 0x3d, 0x97, 0xc5, 0x34, 0x44, 0xb7, + 0x61, 0x69, 0xf0, 0x3d, 0x71, 0x7d, 0xb9, 0x18, 0xbb, 0x7b, 0xf5, 0x0d, 0xfd, 0xab, 0xd0, 0x9d, + 0x5d, 0x01, 0xed, 0x17, 0xb0, 0xe2, 0xa0, 0xaf, 0x01, 0x5c, 0x3f, 0xa6, 0x21, 0xa3, 0xe4, 0x48, + 0x8d, 0x07, 0xbb, 0xdb, 0x3e, 0x59, 0xe1, 0xab, 0x04, 0xdf, 0x2f, 0xe0, 0x0c, 0x1b, 0xdd, 0x87, + 0xca, 0x80, 0xfb, 0x43, 0x37, 0x76, 0xb9, 0x6a, 0x4e, 0xbb, 0xfb, 0xee, 0x29, 0xc9, 0x18, 0x78, + 0xbf, 0x80, 0x53, 0x2e, 0x5a, 0x83, 0xc5, 0xc8, 0xf5, 0x0f, 0x1b, 0xf5, 0x4d, 0xab, 0x5d, 0xee, + 0x17, 0xb0, 0x7c, 0x43, 0x6d, 0xa8, 0x05, 0x24, 0x8a, 0x1c, 0xc2, 0x98, 0x33, 0x92, 0xfc, 0xc6, + 0x39, 0x0d, 0x58, 0x11, 0x8e, 0x1d, 0xc6, 0x74, 0x45, 0xb6, 0xa0, 0x7e, 0xc0, 0xf8, 0xe0, 0x30, + 0x0b, 0x45, 0x1a, 0xba, 0x2a, 0x3d, 0x29, 0xf6, 0x23, 0x58, 0xd3, 0xd3, 0xc1, 0x09, 0xe9, 0x98, + 0xbe, 0x32, 0xf8, 0x45, 0xbd, 0xd7, 0xe7, 0xd4, 0xac, 0xc0, 0xc2, 0xa7, 0x29, 0xef, 0x83, 0x30, + 0x3a, 0x11, 0xf1, 0x02, 0x46, 0x0d, 0x7e, 0x75, 0xd3, 0x6a, 0x5b, 0xfd, 0x02, 0xae, 0x85, 0x7c, + 0xfa, 0x5c, 0x7a, 0x34, 0xfa, 0x53, 0x68, 0x64, 0xc6, 0x42, 0x3e, 0x88, 0x38, 0x5b, 0x95, 0x7e, + 0x01, 0x5f, 0x48, 0xa7, 0x44, 0x36, 0xd0, 0x2e, 0x6c, 0xa8, 0x8f, 0x49, 0xe6, 0x4c, 0xe6, 0xf8, + 0x25, 0x9d, 0x64, 0x53, 0xc1, 0x92, 0xe3, 0x99, 0x15, 0x79, 0x06, 0xe7, 0xb5, 0x88, 0x1c, 0x73, + 0x86, 0xba, 0x2c, 0xf7, 0xe7, 0xca, 0x09, 0x1f, 0x32, 0x81, 0x16, 0x05, 0x18, 0xa4, 0xaf, 0x5a, + 0xf2, 0x05, 0xac, 0xa7, 0x07, 0x31, 0xa7, 0x5a, 0x96, 0xaa, 0xad, 0xe3, 0x54, 0xf3, 0x63, 0x40, + 0x0c, 0xbb, 0x38, 0x67, 0xd1, 0xda, 0x1d, 0x40, 0xea, 0x6c, 0xe4, 0x16, 0x5a, 0x31, 0xe7, 0x54, + 0xfa, 0xb2, 0xcb, 0x7b, 0x9c, 0xe0, 0xb3, 0x79, 0xd4, 0x64, 0x1e, 0xff, 0x3f, 0x2e, 0x8f, 0x74, + 0x26, 0xa4, 0x7a, 0x99, 0xf8, 0x5f, 0xc0, 0xff, 0xe4, 0x67, 0xd6, 0x09, 0x44, 0xb1, 0xf9, 0xd4, + 0xe1, 0xa3, 0x51, 0x44, 0x63, 0x23, 0x0c, 0x9b, 0x56, 0x7b, 0xa9, 0x5f, 0xc0, 0x17, 0x25, 0xe8, + 0x29, 0x0d, 0x31, 0x9f, 0x3e, 0x91, 0x08, 0xcd, 0xff, 0x1c, 0x9a, 0x79, 0x3e, 0x73, 0x3d, 0x37, + 0xa1, 0xdb, 0x9a, 0xbe, 0x9e, 0xa1, 0x3f, 0x14, 0x00, 0xcd, 0xee, 0xc1, 0x46, 0xca, 0xd6, 0xdb, + 0x96, 0x13, 0xa8, 0x6a, 0x81, 0x4b, 0x46, 0x40, 0x6d, 0x56, 0x56, 0xe3, 0x13, 0xb8, 0x18, 0xc5, + 0xa1, 0x1b, 0xe8, 0x19, 0x13, 0x87, 0xc4, 0x8f, 0x46, 0x3c, 0xf4, 0x68, 0xd8, 0x58, 0xd1, 0x87, + 0xe0, 0x82, 0x04, 0xc8, 0x4a, 0xec, 0xa5, 0x6e, 0xc1, 0x24, 0x41, 0xc0, 0x66, 0x8e, 0xbc, 0x08, + 0xe4, 0x98, 0xe7, 0x4d, 0xa7, 0x4a, 0xc0, 0x43, 0xe1, 0xcf, 0x30, 0x9b, 0x77, 0x60, 0x49, 0x0e, + 0x16, 0xf4, 0x31, 0x2c, 0xab, 0x4c, 0xd5, 0xb7, 0xd6, 0xee, 0x6e, 0x9c, 0x38, 0x01, 0xb0, 0x41, + 0x37, 0xbf, 0x04, 0x48, 0x07, 0xcb, 0x7f, 0x97, 0xf9, 0xd3, 0x82, 0x4a, 0x32, 0x55, 0x50, 0x1f, + 0xea, 0x41, 0x48, 0x87, 0xee, 0x80, 0xc4, 0x49, 0x6b, 0xa8, 0x29, 0x79, 0x8a, 0x5e, 0x2d, 0xa1, + 0x25, 0x6d, 0x61, 0xc7, 0xe1, 0x24, 0x11, 0x59, 0x38, 0x8b, 0x08, 0x08, 0x86, 0xe6, 0xdf, 0x81, + 0xea, 0x88, 0xb0, 0x28, 0x11, 0x28, 0x9e, 0x45, 0xc0, 0x96, 0x14, 0xf5, 0xd2, 0x2b, 0x43, 0x49, + 0x71, 0x5b, 0x7f, 0x2f, 0x41, 0xf9, 0xd1, 0x24, 0x26, 0x72, 0x89, 0x3b, 0x50, 0x16, 0xed, 0x29, + 0xda, 0x41, 0x2f, 0xed, 0x9d, 0xe3, 0x44, 0x0d, 0xbe, 0xf3, 0x9c, 0xc6, 0xe2, 0xf6, 0xd8, 0x2f, + 0xe0, 0xe5, 0x48, 0x3d, 0xa2, 0x6f, 0x01, 0x0d, 0x29, 0xa3, 0xa2, 0x44, 0x21, 0xf7, 0x74, 0xdb, + 0xe9, 0x25, 0x7e, 0x70, 0xa2, 0xd8, 0x5d, 0x49, 0xbb, 0x17, 0x72, 0x4f, 0xb5, 0xa1, 0x38, 0x51, + 0xc3, 0x39, 0xdb, 0xbc, 0xbc, 0x1a, 0x75, 0xba, 0x00, 0x67, 0x95, 0x57, 0x97, 0xf3, 0xbc, 0xbc, + 0xbe, 0xb0, 0xef, 0x41, 0x2d, 0x2b, 0x1f, 0xf2, 0xa9, 0x9c, 0xdd, 0x76, 0x77, 0xeb, 0x8c, 0xda, + 0x98, 0x4f, 0xc5, 0x27, 0x64, 0x98, 0x35, 0x34, 0x7f, 0xb4, 0x60, 0x59, 0x97, 0xea, 0xf4, 0x8b, + 0xdd, 0x75, 0xa8, 0xcf, 0xcf, 0x69, 0x7d, 0xd3, 0xae, 0xcd, 0x0d, 0xe6, 0x63, 0x2f, 0xed, 0xc5, + 0x53, 0x2e, 0xed, 0x8b, 0x99, 0x4b, 0x7b, 0xf3, 0x67, 0x0b, 0xea, 0xf3, 0x65, 0x7f, 0xab, 0x19, + 0xee, 0x00, 0x88, 0x4c, 0xd4, 0x3c, 0xd5, 0xdb, 0x74, 0x86, 0x81, 0x8e, 0x2b, 0x82, 0x25, 0x1f, + 0x9b, 0x37, 0xb2, 0x29, 0xea, 0x6d, 0x3a, 0x2d, 0xc5, 0x66, 0x0d, 0x56, 0x72, 0x7b, 0xd2, 0x03, + 0x28, 0x7b, 0x7a, 0xb7, 0x5a, 0xbf, 0x59, 0x70, 0x1e, 0x53, 0x32, 0x7c, 0xc4, 0x87, 0xee, 0x68, + 0xf6, 0x4d, 0xe8, 0xc6, 0x14, 0x4f, 0x18, 0x7d, 0xab, 0x0b, 0xbf, 0x0a, 0x55, 0x12, 0x04, 0xc9, + 0x2d, 0x2b, 0xb9, 0x5e, 0xdb, 0xca, 0x2a, 0xa7, 0x25, 0x7a, 0x0f, 0xea, 0xae, 0x3f, 0x08, 0xa9, + 0x47, 0xfd, 0xd8, 0x21, 0x1e, 0x9f, 0xf8, 0xb1, 0xdc, 0x9f, 0xa2, 0xf8, 0xf4, 0x27, 0x9e, 0x1d, + 0xe9, 0xe8, 0x95, 0x60, 0x31, 0x9c, 0x30, 0xda, 0xfb, 0xc9, 0x82, 0xf5, 0x01, 0xf7, 0x8e, 0x29, + 0x62, 0xaf, 0x72, 0x97, 0xc4, 0xe4, 0xa9, 0xf8, 0xd1, 0x7d, 0x6a, 0xbd, 0xf8, 0x4c, 0x03, 0xc6, + 0x9c, 0x11, 0x7f, 0xdc, 0xe1, 0xe1, 0x78, 0x7b, 0x4c, 0x7d, 0xf9, 0x1b, 0xbc, 0xad, 0x5c, 0x24, + 0x70, 0xa3, 0xec, 0x8f, 0xf2, 0x6d, 0xf3, 0xfc, 0xeb, 0x42, 0xe3, 0xbe, 0x22, 0xef, 0x32, 0x3e, + 0x19, 0x76, 0x7a, 0x26, 0xc6, 0x7e, 0xf7, 0x0f, 0xe3, 0x7a, 0x29, 0x5d, 0x2f, 0x8d, 0xeb, 0xe5, + 0x7e, 0xf7, 0xa0, 0x24, 0xc5, 0x6f, 0xfc, 0x1b, 0x00, 0x00, 0xff, 0xff, 0x05, 0xf7, 0x92, 0x43, + 0x84, 0x0f, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/bytestream/bytestream.pb.go b/vendor/google.golang.org/genproto/googleapis/bytestream/bytestream.pb.go new file mode 100644 index 0000000..bfa0a2d --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/bytestream/bytestream.pb.go @@ -0,0 +1,675 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/bytestream/bytestream.proto + +package bytestream // import "google.golang.org/genproto/googleapis/bytestream" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "github.com/golang/protobuf/ptypes/wrappers" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +import ( + context "golang.org/x/net/context" + grpc "google.golang.org/grpc" +) + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Request object for ByteStream.Read. +type ReadRequest struct { + // The name of the resource to read. + ResourceName string `protobuf:"bytes,1,opt,name=resource_name,json=resourceName,proto3" json:"resource_name,omitempty"` + // The offset for the first byte to return in the read, relative to the start + // of the resource. + // + // A `read_offset` that is negative or greater than the size of the resource + // will cause an `OUT_OF_RANGE` error. + ReadOffset int64 `protobuf:"varint,2,opt,name=read_offset,json=readOffset,proto3" json:"read_offset,omitempty"` + // The maximum number of `data` bytes the server is allowed to return in the + // sum of all `ReadResponse` messages. A `read_limit` of zero indicates that + // there is no limit, and a negative `read_limit` will cause an error. + // + // If the stream returns fewer bytes than allowed by the `read_limit` and no + // error occurred, the stream includes all data from the `read_offset` to the + // end of the resource. + ReadLimit int64 `protobuf:"varint,3,opt,name=read_limit,json=readLimit,proto3" json:"read_limit,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ReadRequest) Reset() { *m = ReadRequest{} } +func (m *ReadRequest) String() string { return proto.CompactTextString(m) } +func (*ReadRequest) ProtoMessage() {} +func (*ReadRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_bytestream_52dd791ecad88fee, []int{0} +} +func (m *ReadRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ReadRequest.Unmarshal(m, b) +} +func (m *ReadRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ReadRequest.Marshal(b, m, deterministic) +} +func (dst *ReadRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_ReadRequest.Merge(dst, src) +} +func (m *ReadRequest) XXX_Size() int { + return xxx_messageInfo_ReadRequest.Size(m) +} +func (m *ReadRequest) XXX_DiscardUnknown() { + xxx_messageInfo_ReadRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_ReadRequest proto.InternalMessageInfo + +func (m *ReadRequest) GetResourceName() string { + if m != nil { + return m.ResourceName + } + return "" +} + +func (m *ReadRequest) GetReadOffset() int64 { + if m != nil { + return m.ReadOffset + } + return 0 +} + +func (m *ReadRequest) GetReadLimit() int64 { + if m != nil { + return m.ReadLimit + } + return 0 +} + +// Response object for ByteStream.Read. +type ReadResponse struct { + // A portion of the data for the resource. The service **may** leave `data` + // empty for any given `ReadResponse`. This enables the service to inform the + // client that the request is still live while it is running an operation to + // generate more data. + Data []byte `protobuf:"bytes,10,opt,name=data,proto3" json:"data,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ReadResponse) Reset() { *m = ReadResponse{} } +func (m *ReadResponse) String() string { return proto.CompactTextString(m) } +func (*ReadResponse) ProtoMessage() {} +func (*ReadResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_bytestream_52dd791ecad88fee, []int{1} +} +func (m *ReadResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ReadResponse.Unmarshal(m, b) +} +func (m *ReadResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ReadResponse.Marshal(b, m, deterministic) +} +func (dst *ReadResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_ReadResponse.Merge(dst, src) +} +func (m *ReadResponse) XXX_Size() int { + return xxx_messageInfo_ReadResponse.Size(m) +} +func (m *ReadResponse) XXX_DiscardUnknown() { + xxx_messageInfo_ReadResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_ReadResponse proto.InternalMessageInfo + +func (m *ReadResponse) GetData() []byte { + if m != nil { + return m.Data + } + return nil +} + +// Request object for ByteStream.Write. +type WriteRequest struct { + // The name of the resource to write. This **must** be set on the first + // `WriteRequest` of each `Write()` action. If it is set on subsequent calls, + // it **must** match the value of the first request. + ResourceName string `protobuf:"bytes,1,opt,name=resource_name,json=resourceName,proto3" json:"resource_name,omitempty"` + // The offset from the beginning of the resource at which the data should be + // written. It is required on all `WriteRequest`s. + // + // In the first `WriteRequest` of a `Write()` action, it indicates + // the initial offset for the `Write()` call. The value **must** be equal to + // the `committed_size` that a call to `QueryWriteStatus()` would return. + // + // On subsequent calls, this value **must** be set and **must** be equal to + // the sum of the first `write_offset` and the sizes of all `data` bundles + // sent previously on this stream. + // + // An incorrect value will cause an error. + WriteOffset int64 `protobuf:"varint,2,opt,name=write_offset,json=writeOffset,proto3" json:"write_offset,omitempty"` + // If `true`, this indicates that the write is complete. Sending any + // `WriteRequest`s subsequent to one in which `finish_write` is `true` will + // cause an error. + FinishWrite bool `protobuf:"varint,3,opt,name=finish_write,json=finishWrite,proto3" json:"finish_write,omitempty"` + // A portion of the data for the resource. The client **may** leave `data` + // empty for any given `WriteRequest`. This enables the client to inform the + // service that the request is still live while it is running an operation to + // generate more data. + Data []byte `protobuf:"bytes,10,opt,name=data,proto3" json:"data,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *WriteRequest) Reset() { *m = WriteRequest{} } +func (m *WriteRequest) String() string { return proto.CompactTextString(m) } +func (*WriteRequest) ProtoMessage() {} +func (*WriteRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_bytestream_52dd791ecad88fee, []int{2} +} +func (m *WriteRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_WriteRequest.Unmarshal(m, b) +} +func (m *WriteRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_WriteRequest.Marshal(b, m, deterministic) +} +func (dst *WriteRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_WriteRequest.Merge(dst, src) +} +func (m *WriteRequest) XXX_Size() int { + return xxx_messageInfo_WriteRequest.Size(m) +} +func (m *WriteRequest) XXX_DiscardUnknown() { + xxx_messageInfo_WriteRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_WriteRequest proto.InternalMessageInfo + +func (m *WriteRequest) GetResourceName() string { + if m != nil { + return m.ResourceName + } + return "" +} + +func (m *WriteRequest) GetWriteOffset() int64 { + if m != nil { + return m.WriteOffset + } + return 0 +} + +func (m *WriteRequest) GetFinishWrite() bool { + if m != nil { + return m.FinishWrite + } + return false +} + +func (m *WriteRequest) GetData() []byte { + if m != nil { + return m.Data + } + return nil +} + +// Response object for ByteStream.Write. +type WriteResponse struct { + // The number of bytes that have been processed for the given resource. + CommittedSize int64 `protobuf:"varint,1,opt,name=committed_size,json=committedSize,proto3" json:"committed_size,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *WriteResponse) Reset() { *m = WriteResponse{} } +func (m *WriteResponse) String() string { return proto.CompactTextString(m) } +func (*WriteResponse) ProtoMessage() {} +func (*WriteResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_bytestream_52dd791ecad88fee, []int{3} +} +func (m *WriteResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_WriteResponse.Unmarshal(m, b) +} +func (m *WriteResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_WriteResponse.Marshal(b, m, deterministic) +} +func (dst *WriteResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_WriteResponse.Merge(dst, src) +} +func (m *WriteResponse) XXX_Size() int { + return xxx_messageInfo_WriteResponse.Size(m) +} +func (m *WriteResponse) XXX_DiscardUnknown() { + xxx_messageInfo_WriteResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_WriteResponse proto.InternalMessageInfo + +func (m *WriteResponse) GetCommittedSize() int64 { + if m != nil { + return m.CommittedSize + } + return 0 +} + +// Request object for ByteStream.QueryWriteStatus. +type QueryWriteStatusRequest struct { + // The name of the resource whose write status is being requested. + ResourceName string `protobuf:"bytes,1,opt,name=resource_name,json=resourceName,proto3" json:"resource_name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *QueryWriteStatusRequest) Reset() { *m = QueryWriteStatusRequest{} } +func (m *QueryWriteStatusRequest) String() string { return proto.CompactTextString(m) } +func (*QueryWriteStatusRequest) ProtoMessage() {} +func (*QueryWriteStatusRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_bytestream_52dd791ecad88fee, []int{4} +} +func (m *QueryWriteStatusRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_QueryWriteStatusRequest.Unmarshal(m, b) +} +func (m *QueryWriteStatusRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_QueryWriteStatusRequest.Marshal(b, m, deterministic) +} +func (dst *QueryWriteStatusRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_QueryWriteStatusRequest.Merge(dst, src) +} +func (m *QueryWriteStatusRequest) XXX_Size() int { + return xxx_messageInfo_QueryWriteStatusRequest.Size(m) +} +func (m *QueryWriteStatusRequest) XXX_DiscardUnknown() { + xxx_messageInfo_QueryWriteStatusRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_QueryWriteStatusRequest proto.InternalMessageInfo + +func (m *QueryWriteStatusRequest) GetResourceName() string { + if m != nil { + return m.ResourceName + } + return "" +} + +// Response object for ByteStream.QueryWriteStatus. +type QueryWriteStatusResponse struct { + // The number of bytes that have been processed for the given resource. + CommittedSize int64 `protobuf:"varint,1,opt,name=committed_size,json=committedSize,proto3" json:"committed_size,omitempty"` + // `complete` is `true` only if the client has sent a `WriteRequest` with + // `finish_write` set to true, and the server has processed that request. + Complete bool `protobuf:"varint,2,opt,name=complete,proto3" json:"complete,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *QueryWriteStatusResponse) Reset() { *m = QueryWriteStatusResponse{} } +func (m *QueryWriteStatusResponse) String() string { return proto.CompactTextString(m) } +func (*QueryWriteStatusResponse) ProtoMessage() {} +func (*QueryWriteStatusResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_bytestream_52dd791ecad88fee, []int{5} +} +func (m *QueryWriteStatusResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_QueryWriteStatusResponse.Unmarshal(m, b) +} +func (m *QueryWriteStatusResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_QueryWriteStatusResponse.Marshal(b, m, deterministic) +} +func (dst *QueryWriteStatusResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_QueryWriteStatusResponse.Merge(dst, src) +} +func (m *QueryWriteStatusResponse) XXX_Size() int { + return xxx_messageInfo_QueryWriteStatusResponse.Size(m) +} +func (m *QueryWriteStatusResponse) XXX_DiscardUnknown() { + xxx_messageInfo_QueryWriteStatusResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_QueryWriteStatusResponse proto.InternalMessageInfo + +func (m *QueryWriteStatusResponse) GetCommittedSize() int64 { + if m != nil { + return m.CommittedSize + } + return 0 +} + +func (m *QueryWriteStatusResponse) GetComplete() bool { + if m != nil { + return m.Complete + } + return false +} + +func init() { + proto.RegisterType((*ReadRequest)(nil), "google.bytestream.ReadRequest") + proto.RegisterType((*ReadResponse)(nil), "google.bytestream.ReadResponse") + proto.RegisterType((*WriteRequest)(nil), "google.bytestream.WriteRequest") + proto.RegisterType((*WriteResponse)(nil), "google.bytestream.WriteResponse") + proto.RegisterType((*QueryWriteStatusRequest)(nil), "google.bytestream.QueryWriteStatusRequest") + proto.RegisterType((*QueryWriteStatusResponse)(nil), "google.bytestream.QueryWriteStatusResponse") +} + +// Reference imports to suppress errors if they are not otherwise used. +var _ context.Context +var _ grpc.ClientConn + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the grpc package it is being compiled against. +const _ = grpc.SupportPackageIsVersion4 + +// ByteStreamClient is the client API for ByteStream service. +// +// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://godoc.org/google.golang.org/grpc#ClientConn.NewStream. +type ByteStreamClient interface { + // `Read()` is used to retrieve the contents of a resource as a sequence + // of bytes. The bytes are returned in a sequence of responses, and the + // responses are delivered as the results of a server-side streaming RPC. + Read(ctx context.Context, in *ReadRequest, opts ...grpc.CallOption) (ByteStream_ReadClient, error) + // `Write()` is used to send the contents of a resource as a sequence of + // bytes. The bytes are sent in a sequence of request protos of a client-side + // streaming RPC. + // + // A `Write()` action is resumable. If there is an error or the connection is + // broken during the `Write()`, the client should check the status of the + // `Write()` by calling `QueryWriteStatus()` and continue writing from the + // returned `committed_size`. This may be less than the amount of data the + // client previously sent. + // + // Calling `Write()` on a resource name that was previously written and + // finalized could cause an error, depending on whether the underlying service + // allows over-writing of previously written resources. + // + // When the client closes the request channel, the service will respond with + // a `WriteResponse`. The service will not view the resource as `complete` + // until the client has sent a `WriteRequest` with `finish_write` set to + // `true`. Sending any requests on a stream after sending a request with + // `finish_write` set to `true` will cause an error. The client **should** + // check the `WriteResponse` it receives to determine how much data the + // service was able to commit and whether the service views the resource as + // `complete` or not. + Write(ctx context.Context, opts ...grpc.CallOption) (ByteStream_WriteClient, error) + // `QueryWriteStatus()` is used to find the `committed_size` for a resource + // that is being written, which can then be used as the `write_offset` for + // the next `Write()` call. + // + // If the resource does not exist (i.e., the resource has been deleted, or the + // first `Write()` has not yet reached the service), this method returns the + // error `NOT_FOUND`. + // + // The client **may** call `QueryWriteStatus()` at any time to determine how + // much data has been processed for this resource. This is useful if the + // client is buffering data and needs to know which data can be safely + // evicted. For any sequence of `QueryWriteStatus()` calls for a given + // resource name, the sequence of returned `committed_size` values will be + // non-decreasing. + QueryWriteStatus(ctx context.Context, in *QueryWriteStatusRequest, opts ...grpc.CallOption) (*QueryWriteStatusResponse, error) +} + +type byteStreamClient struct { + cc *grpc.ClientConn +} + +func NewByteStreamClient(cc *grpc.ClientConn) ByteStreamClient { + return &byteStreamClient{cc} +} + +func (c *byteStreamClient) Read(ctx context.Context, in *ReadRequest, opts ...grpc.CallOption) (ByteStream_ReadClient, error) { + stream, err := c.cc.NewStream(ctx, &_ByteStream_serviceDesc.Streams[0], "/google.bytestream.ByteStream/Read", opts...) + if err != nil { + return nil, err + } + x := &byteStreamReadClient{stream} + if err := x.ClientStream.SendMsg(in); err != nil { + return nil, err + } + if err := x.ClientStream.CloseSend(); err != nil { + return nil, err + } + return x, nil +} + +type ByteStream_ReadClient interface { + Recv() (*ReadResponse, error) + grpc.ClientStream +} + +type byteStreamReadClient struct { + grpc.ClientStream +} + +func (x *byteStreamReadClient) Recv() (*ReadResponse, error) { + m := new(ReadResponse) + if err := x.ClientStream.RecvMsg(m); err != nil { + return nil, err + } + return m, nil +} + +func (c *byteStreamClient) Write(ctx context.Context, opts ...grpc.CallOption) (ByteStream_WriteClient, error) { + stream, err := c.cc.NewStream(ctx, &_ByteStream_serviceDesc.Streams[1], "/google.bytestream.ByteStream/Write", opts...) + if err != nil { + return nil, err + } + x := &byteStreamWriteClient{stream} + return x, nil +} + +type ByteStream_WriteClient interface { + Send(*WriteRequest) error + CloseAndRecv() (*WriteResponse, error) + grpc.ClientStream +} + +type byteStreamWriteClient struct { + grpc.ClientStream +} + +func (x *byteStreamWriteClient) Send(m *WriteRequest) error { + return x.ClientStream.SendMsg(m) +} + +func (x *byteStreamWriteClient) CloseAndRecv() (*WriteResponse, error) { + if err := x.ClientStream.CloseSend(); err != nil { + return nil, err + } + m := new(WriteResponse) + if err := x.ClientStream.RecvMsg(m); err != nil { + return nil, err + } + return m, nil +} + +func (c *byteStreamClient) QueryWriteStatus(ctx context.Context, in *QueryWriteStatusRequest, opts ...grpc.CallOption) (*QueryWriteStatusResponse, error) { + out := new(QueryWriteStatusResponse) + err := c.cc.Invoke(ctx, "/google.bytestream.ByteStream/QueryWriteStatus", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +// ByteStreamServer is the server API for ByteStream service. +type ByteStreamServer interface { + // `Read()` is used to retrieve the contents of a resource as a sequence + // of bytes. The bytes are returned in a sequence of responses, and the + // responses are delivered as the results of a server-side streaming RPC. + Read(*ReadRequest, ByteStream_ReadServer) error + // `Write()` is used to send the contents of a resource as a sequence of + // bytes. The bytes are sent in a sequence of request protos of a client-side + // streaming RPC. + // + // A `Write()` action is resumable. If there is an error or the connection is + // broken during the `Write()`, the client should check the status of the + // `Write()` by calling `QueryWriteStatus()` and continue writing from the + // returned `committed_size`. This may be less than the amount of data the + // client previously sent. + // + // Calling `Write()` on a resource name that was previously written and + // finalized could cause an error, depending on whether the underlying service + // allows over-writing of previously written resources. + // + // When the client closes the request channel, the service will respond with + // a `WriteResponse`. The service will not view the resource as `complete` + // until the client has sent a `WriteRequest` with `finish_write` set to + // `true`. Sending any requests on a stream after sending a request with + // `finish_write` set to `true` will cause an error. The client **should** + // check the `WriteResponse` it receives to determine how much data the + // service was able to commit and whether the service views the resource as + // `complete` or not. + Write(ByteStream_WriteServer) error + // `QueryWriteStatus()` is used to find the `committed_size` for a resource + // that is being written, which can then be used as the `write_offset` for + // the next `Write()` call. + // + // If the resource does not exist (i.e., the resource has been deleted, or the + // first `Write()` has not yet reached the service), this method returns the + // error `NOT_FOUND`. + // + // The client **may** call `QueryWriteStatus()` at any time to determine how + // much data has been processed for this resource. This is useful if the + // client is buffering data and needs to know which data can be safely + // evicted. For any sequence of `QueryWriteStatus()` calls for a given + // resource name, the sequence of returned `committed_size` values will be + // non-decreasing. + QueryWriteStatus(context.Context, *QueryWriteStatusRequest) (*QueryWriteStatusResponse, error) +} + +func RegisterByteStreamServer(s *grpc.Server, srv ByteStreamServer) { + s.RegisterService(&_ByteStream_serviceDesc, srv) +} + +func _ByteStream_Read_Handler(srv interface{}, stream grpc.ServerStream) error { + m := new(ReadRequest) + if err := stream.RecvMsg(m); err != nil { + return err + } + return srv.(ByteStreamServer).Read(m, &byteStreamReadServer{stream}) +} + +type ByteStream_ReadServer interface { + Send(*ReadResponse) error + grpc.ServerStream +} + +type byteStreamReadServer struct { + grpc.ServerStream +} + +func (x *byteStreamReadServer) Send(m *ReadResponse) error { + return x.ServerStream.SendMsg(m) +} + +func _ByteStream_Write_Handler(srv interface{}, stream grpc.ServerStream) error { + return srv.(ByteStreamServer).Write(&byteStreamWriteServer{stream}) +} + +type ByteStream_WriteServer interface { + SendAndClose(*WriteResponse) error + Recv() (*WriteRequest, error) + grpc.ServerStream +} + +type byteStreamWriteServer struct { + grpc.ServerStream +} + +func (x *byteStreamWriteServer) SendAndClose(m *WriteResponse) error { + return x.ServerStream.SendMsg(m) +} + +func (x *byteStreamWriteServer) Recv() (*WriteRequest, error) { + m := new(WriteRequest) + if err := x.ServerStream.RecvMsg(m); err != nil { + return nil, err + } + return m, nil +} + +func _ByteStream_QueryWriteStatus_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(QueryWriteStatusRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(ByteStreamServer).QueryWriteStatus(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.bytestream.ByteStream/QueryWriteStatus", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(ByteStreamServer).QueryWriteStatus(ctx, req.(*QueryWriteStatusRequest)) + } + return interceptor(ctx, in, info, handler) +} + +var _ByteStream_serviceDesc = grpc.ServiceDesc{ + ServiceName: "google.bytestream.ByteStream", + HandlerType: (*ByteStreamServer)(nil), + Methods: []grpc.MethodDesc{ + { + MethodName: "QueryWriteStatus", + Handler: _ByteStream_QueryWriteStatus_Handler, + }, + }, + Streams: []grpc.StreamDesc{ + { + StreamName: "Read", + Handler: _ByteStream_Read_Handler, + ServerStreams: true, + }, + { + StreamName: "Write", + Handler: _ByteStream_Write_Handler, + ClientStreams: true, + }, + }, + Metadata: "google/bytestream/bytestream.proto", +} + +func init() { + proto.RegisterFile("google/bytestream/bytestream.proto", fileDescriptor_bytestream_52dd791ecad88fee) +} + +var fileDescriptor_bytestream_52dd791ecad88fee = []byte{ + // 446 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x94, 0x53, 0x5b, 0x8b, 0x13, 0x31, + 0x14, 0x66, 0xb6, 0xab, 0x74, 0x4f, 0xa7, 0x5e, 0x02, 0xe2, 0x30, 0xe8, 0x6e, 0x77, 0x44, 0x28, + 0x0a, 0x33, 0xa2, 0xe0, 0xcb, 0x82, 0x0f, 0x7d, 0x13, 0x16, 0x2f, 0xd9, 0x07, 0x41, 0x90, 0x21, + 0x6d, 0x4f, 0xc7, 0x60, 0x93, 0x8c, 0x49, 0x86, 0xa5, 0xfb, 0x1f, 0x7c, 0xf1, 0x17, 0x4b, 0x92, + 0xb1, 0x1d, 0x6d, 0x0b, 0xdb, 0xb7, 0xe4, 0xbb, 0xcc, 0xf9, 0xe6, 0xe4, 0x1c, 0xc8, 0x2a, 0xa5, + 0xaa, 0x25, 0x16, 0xd3, 0x95, 0x45, 0x63, 0x35, 0x32, 0xd1, 0x39, 0xe6, 0xb5, 0x56, 0x56, 0x91, + 0x87, 0x41, 0x93, 0x6f, 0x88, 0xf4, 0x49, 0x6b, 0x63, 0x35, 0x2f, 0x98, 0x94, 0xca, 0x32, 0xcb, + 0x95, 0x34, 0xc1, 0x90, 0x9e, 0xb6, 0xac, 0xbf, 0x4d, 0x9b, 0x45, 0x71, 0xad, 0x59, 0x5d, 0xa3, + 0x6e, 0xf9, 0x4c, 0xc3, 0x80, 0x22, 0x9b, 0x53, 0xfc, 0xd9, 0xa0, 0xb1, 0xe4, 0x19, 0x0c, 0x35, + 0x1a, 0xd5, 0xe8, 0x19, 0x96, 0x92, 0x09, 0x4c, 0xa2, 0x51, 0x34, 0x3e, 0xa1, 0xf1, 0x5f, 0xf0, + 0x03, 0x13, 0x48, 0xce, 0x60, 0xa0, 0x91, 0xcd, 0x4b, 0xb5, 0x58, 0x18, 0xb4, 0xc9, 0xd1, 0x28, + 0x1a, 0xf7, 0x28, 0x38, 0xe8, 0xa3, 0x47, 0xc8, 0x53, 0xf0, 0xb7, 0x72, 0xc9, 0x05, 0xb7, 0x49, + 0xcf, 0xf3, 0x27, 0x0e, 0xb9, 0x74, 0x40, 0x96, 0x41, 0x1c, 0x6a, 0x9a, 0x5a, 0x49, 0x83, 0x84, + 0xc0, 0xf1, 0x9c, 0x59, 0x96, 0xc0, 0x28, 0x1a, 0xc7, 0xd4, 0x9f, 0xb3, 0x5f, 0x11, 0xc4, 0x5f, + 0x34, 0xb7, 0x78, 0x50, 0xb2, 0x73, 0x88, 0xaf, 0x9d, 0xe9, 0xdf, 0x68, 0x03, 0x8f, 0xb5, 0xd9, + 0xce, 0x21, 0x5e, 0x70, 0xc9, 0xcd, 0xf7, 0xd2, 0xa3, 0x3e, 0x5d, 0x9f, 0x0e, 0x02, 0xe6, 0x2b, + 0xee, 0xcc, 0xf3, 0x16, 0x86, 0x6d, 0x9c, 0x36, 0xf4, 0x73, 0xb8, 0x37, 0x53, 0x42, 0x70, 0x6b, + 0x71, 0x5e, 0x1a, 0x7e, 0x13, 0x02, 0xf5, 0xe8, 0x70, 0x8d, 0x5e, 0xf1, 0x1b, 0xcc, 0xde, 0xc1, + 0xe3, 0xcf, 0x0d, 0xea, 0x95, 0x37, 0x5f, 0x59, 0x66, 0x1b, 0x73, 0xc8, 0x1f, 0x65, 0xdf, 0x20, + 0xd9, 0xf6, 0x1f, 0x14, 0x81, 0xa4, 0xd0, 0x9f, 0x29, 0x51, 0x2f, 0xd1, 0xa2, 0x6f, 0x48, 0x9f, + 0xae, 0xef, 0xaf, 0x7f, 0x1f, 0x01, 0x4c, 0x56, 0xee, 0xcb, 0x6e, 0x96, 0xc8, 0x7b, 0x38, 0x76, + 0x2f, 0x43, 0x4e, 0xf3, 0xad, 0x39, 0xcb, 0x3b, 0x63, 0x92, 0x9e, 0xed, 0xe5, 0x43, 0xb4, 0x57, + 0x11, 0xb9, 0x84, 0x3b, 0xa1, 0x9b, 0xbb, 0xb4, 0xdd, 0x97, 0x4d, 0x47, 0xfb, 0x05, 0xe1, 0x6b, + 0xe3, 0x88, 0xfc, 0x80, 0x07, 0xff, 0xb7, 0x81, 0xbc, 0xd8, 0xe1, 0xdb, 0xd3, 0xeb, 0xf4, 0xe5, + 0xad, 0xb4, 0xa1, 0xdc, 0x04, 0xe1, 0xd1, 0x4c, 0x89, 0x6d, 0xc7, 0xe4, 0xfe, 0xa6, 0x55, 0x9f, + 0xdc, 0xf6, 0x7c, 0xbd, 0x68, 0x35, 0x95, 0x5a, 0x32, 0x59, 0xe5, 0x4a, 0x57, 0x45, 0x85, 0xd2, + 0x6f, 0x56, 0x11, 0x28, 0x56, 0x73, 0xd3, 0x59, 0xe3, 0x8b, 0xcd, 0x71, 0x7a, 0xd7, 0xeb, 0xde, + 0xfc, 0x09, 0x00, 0x00, 0xff, 0xff, 0x8f, 0x91, 0x09, 0xd4, 0xf8, 0x03, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/cloud/asset/v1/asset_service.pb.go b/vendor/google.golang.org/genproto/googleapis/cloud/asset/v1/asset_service.pb.go new file mode 100644 index 0000000..43d71c8 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/cloud/asset/v1/asset_service.pb.go @@ -0,0 +1,738 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/cloud/asset/v1/asset_service.proto + +package asset // import "google.golang.org/genproto/googleapis/cloud/asset/v1" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import timestamp "github.com/golang/protobuf/ptypes/timestamp" +import _ "google.golang.org/genproto/googleapis/api/annotations" +import longrunning "google.golang.org/genproto/googleapis/longrunning" + +import ( + context "golang.org/x/net/context" + grpc "google.golang.org/grpc" +) + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Asset content type. +type ContentType int32 + +const ( + // Unspecified content type. + ContentType_CONTENT_TYPE_UNSPECIFIED ContentType = 0 + // Resource metadata. + ContentType_RESOURCE ContentType = 1 + // The actual IAM policy set on a resource. + ContentType_IAM_POLICY ContentType = 2 +) + +var ContentType_name = map[int32]string{ + 0: "CONTENT_TYPE_UNSPECIFIED", + 1: "RESOURCE", + 2: "IAM_POLICY", +} +var ContentType_value = map[string]int32{ + "CONTENT_TYPE_UNSPECIFIED": 0, + "RESOURCE": 1, + "IAM_POLICY": 2, +} + +func (x ContentType) String() string { + return proto.EnumName(ContentType_name, int32(x)) +} +func (ContentType) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_asset_service_4116b6e08342e761, []int{0} +} + +// Export asset request. +type ExportAssetsRequest struct { + // Required. The relative name of the root asset. This can only be an + // organization number (such as "organizations/123"), a project ID (such as + // "projects/my-project-id"), or a project number (such as "projects/12345"), + // or a folder number (such as "folders/123"). + Parent string `protobuf:"bytes,1,opt,name=parent,proto3" json:"parent,omitempty"` + // Timestamp to take an asset snapshot. This can only be set to a timestamp + // between 2018-10-02 UTC (inclusive) and the current time. If not specified, + // the current time will be used. Due to delays in resource data collection + // and indexing, there is a volatile window during which running the same + // query may get different results. + ReadTime *timestamp.Timestamp `protobuf:"bytes,2,opt,name=read_time,json=readTime,proto3" json:"read_time,omitempty"` + // A list of asset types of which to take a snapshot for. For example: + // "compute.googleapis.com/Disk". If specified, only matching assets will be returned. + // See [Introduction to Cloud Asset + // Inventory](https://cloud.google.com/resource-manager/docs/cloud-asset-inventory/overview) + // for all supported asset types. + AssetTypes []string `protobuf:"bytes,3,rep,name=asset_types,json=assetTypes,proto3" json:"asset_types,omitempty"` + // Asset content type. If not specified, no content but the asset name will be + // returned. + ContentType ContentType `protobuf:"varint,4,opt,name=content_type,json=contentType,proto3,enum=google.cloud.asset.v1.ContentType" json:"content_type,omitempty"` + // Required. Output configuration indicating where the results will be output + // to. All results will be in newline delimited JSON format. + OutputConfig *OutputConfig `protobuf:"bytes,5,opt,name=output_config,json=outputConfig,proto3" json:"output_config,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ExportAssetsRequest) Reset() { *m = ExportAssetsRequest{} } +func (m *ExportAssetsRequest) String() string { return proto.CompactTextString(m) } +func (*ExportAssetsRequest) ProtoMessage() {} +func (*ExportAssetsRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_asset_service_4116b6e08342e761, []int{0} +} +func (m *ExportAssetsRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ExportAssetsRequest.Unmarshal(m, b) +} +func (m *ExportAssetsRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ExportAssetsRequest.Marshal(b, m, deterministic) +} +func (dst *ExportAssetsRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_ExportAssetsRequest.Merge(dst, src) +} +func (m *ExportAssetsRequest) XXX_Size() int { + return xxx_messageInfo_ExportAssetsRequest.Size(m) +} +func (m *ExportAssetsRequest) XXX_DiscardUnknown() { + xxx_messageInfo_ExportAssetsRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_ExportAssetsRequest proto.InternalMessageInfo + +func (m *ExportAssetsRequest) GetParent() string { + if m != nil { + return m.Parent + } + return "" +} + +func (m *ExportAssetsRequest) GetReadTime() *timestamp.Timestamp { + if m != nil { + return m.ReadTime + } + return nil +} + +func (m *ExportAssetsRequest) GetAssetTypes() []string { + if m != nil { + return m.AssetTypes + } + return nil +} + +func (m *ExportAssetsRequest) GetContentType() ContentType { + if m != nil { + return m.ContentType + } + return ContentType_CONTENT_TYPE_UNSPECIFIED +} + +func (m *ExportAssetsRequest) GetOutputConfig() *OutputConfig { + if m != nil { + return m.OutputConfig + } + return nil +} + +// The export asset response. This message is returned by the +// [google.longrunning.Operations.GetOperation][google.longrunning.Operations.GetOperation] method in the returned +// [google.longrunning.Operation.response][google.longrunning.Operation.response] field. +type ExportAssetsResponse struct { + // Time the snapshot was taken. + ReadTime *timestamp.Timestamp `protobuf:"bytes,1,opt,name=read_time,json=readTime,proto3" json:"read_time,omitempty"` + // Output configuration indicating where the results were output to. + // All results are in JSON format. + OutputConfig *OutputConfig `protobuf:"bytes,2,opt,name=output_config,json=outputConfig,proto3" json:"output_config,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ExportAssetsResponse) Reset() { *m = ExportAssetsResponse{} } +func (m *ExportAssetsResponse) String() string { return proto.CompactTextString(m) } +func (*ExportAssetsResponse) ProtoMessage() {} +func (*ExportAssetsResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_asset_service_4116b6e08342e761, []int{1} +} +func (m *ExportAssetsResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ExportAssetsResponse.Unmarshal(m, b) +} +func (m *ExportAssetsResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ExportAssetsResponse.Marshal(b, m, deterministic) +} +func (dst *ExportAssetsResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_ExportAssetsResponse.Merge(dst, src) +} +func (m *ExportAssetsResponse) XXX_Size() int { + return xxx_messageInfo_ExportAssetsResponse.Size(m) +} +func (m *ExportAssetsResponse) XXX_DiscardUnknown() { + xxx_messageInfo_ExportAssetsResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_ExportAssetsResponse proto.InternalMessageInfo + +func (m *ExportAssetsResponse) GetReadTime() *timestamp.Timestamp { + if m != nil { + return m.ReadTime + } + return nil +} + +func (m *ExportAssetsResponse) GetOutputConfig() *OutputConfig { + if m != nil { + return m.OutputConfig + } + return nil +} + +// Batch get assets history request. +type BatchGetAssetsHistoryRequest struct { + // Required. The relative name of the root asset. It can only be an + // organization number (such as "organizations/123"), a project ID (such as + // "projects/my-project-id")", or a project number (such as "projects/12345"). + Parent string `protobuf:"bytes,1,opt,name=parent,proto3" json:"parent,omitempty"` + // A list of the full names of the assets. For example: + // `//compute.googleapis.com/projects/my_project_123/zones/zone1/instances/instance1`. + // See [Resource + // Names](https://cloud.google.com/apis/design/resource_names#full_resource_name) + // and [Resource Name Format](https://cloud.google.com/resource-manager/docs/cloud-asset-inventory/resource-name-format) + // for more info. + // + // The request becomes a no-op if the asset name list is empty, and the max + // size of the asset name list is 100 in one request. + AssetNames []string `protobuf:"bytes,2,rep,name=asset_names,json=assetNames,proto3" json:"asset_names,omitempty"` + // Required. The content type. + ContentType ContentType `protobuf:"varint,3,opt,name=content_type,json=contentType,proto3,enum=google.cloud.asset.v1.ContentType" json:"content_type,omitempty"` + // Optional. The time window for the asset history. Both start_time and + // end_time are optional and if set, it must be after 2018-10-02 UTC. If + // end_time is not set, it is default to current timestamp. If start_time is + // not set, the snapshot of the assets at end_time will be returned. The + // returned results contain all temporal assets whose time window overlap with + // read_time_window. + ReadTimeWindow *TimeWindow `protobuf:"bytes,4,opt,name=read_time_window,json=readTimeWindow,proto3" json:"read_time_window,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *BatchGetAssetsHistoryRequest) Reset() { *m = BatchGetAssetsHistoryRequest{} } +func (m *BatchGetAssetsHistoryRequest) String() string { return proto.CompactTextString(m) } +func (*BatchGetAssetsHistoryRequest) ProtoMessage() {} +func (*BatchGetAssetsHistoryRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_asset_service_4116b6e08342e761, []int{2} +} +func (m *BatchGetAssetsHistoryRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_BatchGetAssetsHistoryRequest.Unmarshal(m, b) +} +func (m *BatchGetAssetsHistoryRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_BatchGetAssetsHistoryRequest.Marshal(b, m, deterministic) +} +func (dst *BatchGetAssetsHistoryRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_BatchGetAssetsHistoryRequest.Merge(dst, src) +} +func (m *BatchGetAssetsHistoryRequest) XXX_Size() int { + return xxx_messageInfo_BatchGetAssetsHistoryRequest.Size(m) +} +func (m *BatchGetAssetsHistoryRequest) XXX_DiscardUnknown() { + xxx_messageInfo_BatchGetAssetsHistoryRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_BatchGetAssetsHistoryRequest proto.InternalMessageInfo + +func (m *BatchGetAssetsHistoryRequest) GetParent() string { + if m != nil { + return m.Parent + } + return "" +} + +func (m *BatchGetAssetsHistoryRequest) GetAssetNames() []string { + if m != nil { + return m.AssetNames + } + return nil +} + +func (m *BatchGetAssetsHistoryRequest) GetContentType() ContentType { + if m != nil { + return m.ContentType + } + return ContentType_CONTENT_TYPE_UNSPECIFIED +} + +func (m *BatchGetAssetsHistoryRequest) GetReadTimeWindow() *TimeWindow { + if m != nil { + return m.ReadTimeWindow + } + return nil +} + +// Batch get assets history response. +type BatchGetAssetsHistoryResponse struct { + // A list of assets with valid time windows. + Assets []*TemporalAsset `protobuf:"bytes,1,rep,name=assets,proto3" json:"assets,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *BatchGetAssetsHistoryResponse) Reset() { *m = BatchGetAssetsHistoryResponse{} } +func (m *BatchGetAssetsHistoryResponse) String() string { return proto.CompactTextString(m) } +func (*BatchGetAssetsHistoryResponse) ProtoMessage() {} +func (*BatchGetAssetsHistoryResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_asset_service_4116b6e08342e761, []int{3} +} +func (m *BatchGetAssetsHistoryResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_BatchGetAssetsHistoryResponse.Unmarshal(m, b) +} +func (m *BatchGetAssetsHistoryResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_BatchGetAssetsHistoryResponse.Marshal(b, m, deterministic) +} +func (dst *BatchGetAssetsHistoryResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_BatchGetAssetsHistoryResponse.Merge(dst, src) +} +func (m *BatchGetAssetsHistoryResponse) XXX_Size() int { + return xxx_messageInfo_BatchGetAssetsHistoryResponse.Size(m) +} +func (m *BatchGetAssetsHistoryResponse) XXX_DiscardUnknown() { + xxx_messageInfo_BatchGetAssetsHistoryResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_BatchGetAssetsHistoryResponse proto.InternalMessageInfo + +func (m *BatchGetAssetsHistoryResponse) GetAssets() []*TemporalAsset { + if m != nil { + return m.Assets + } + return nil +} + +// Output configuration for export assets destination. +type OutputConfig struct { + // Asset export destination. + // + // Types that are valid to be assigned to Destination: + // *OutputConfig_GcsDestination + Destination isOutputConfig_Destination `protobuf_oneof:"destination"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *OutputConfig) Reset() { *m = OutputConfig{} } +func (m *OutputConfig) String() string { return proto.CompactTextString(m) } +func (*OutputConfig) ProtoMessage() {} +func (*OutputConfig) Descriptor() ([]byte, []int) { + return fileDescriptor_asset_service_4116b6e08342e761, []int{4} +} +func (m *OutputConfig) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_OutputConfig.Unmarshal(m, b) +} +func (m *OutputConfig) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_OutputConfig.Marshal(b, m, deterministic) +} +func (dst *OutputConfig) XXX_Merge(src proto.Message) { + xxx_messageInfo_OutputConfig.Merge(dst, src) +} +func (m *OutputConfig) XXX_Size() int { + return xxx_messageInfo_OutputConfig.Size(m) +} +func (m *OutputConfig) XXX_DiscardUnknown() { + xxx_messageInfo_OutputConfig.DiscardUnknown(m) +} + +var xxx_messageInfo_OutputConfig proto.InternalMessageInfo + +type isOutputConfig_Destination interface { + isOutputConfig_Destination() +} + +type OutputConfig_GcsDestination struct { + GcsDestination *GcsDestination `protobuf:"bytes,1,opt,name=gcs_destination,json=gcsDestination,proto3,oneof"` +} + +func (*OutputConfig_GcsDestination) isOutputConfig_Destination() {} + +func (m *OutputConfig) GetDestination() isOutputConfig_Destination { + if m != nil { + return m.Destination + } + return nil +} + +func (m *OutputConfig) GetGcsDestination() *GcsDestination { + if x, ok := m.GetDestination().(*OutputConfig_GcsDestination); ok { + return x.GcsDestination + } + return nil +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*OutputConfig) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _OutputConfig_OneofMarshaler, _OutputConfig_OneofUnmarshaler, _OutputConfig_OneofSizer, []interface{}{ + (*OutputConfig_GcsDestination)(nil), + } +} + +func _OutputConfig_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*OutputConfig) + // destination + switch x := m.Destination.(type) { + case *OutputConfig_GcsDestination: + b.EncodeVarint(1<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.GcsDestination); err != nil { + return err + } + case nil: + default: + return fmt.Errorf("OutputConfig.Destination has unexpected type %T", x) + } + return nil +} + +func _OutputConfig_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*OutputConfig) + switch tag { + case 1: // destination.gcs_destination + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(GcsDestination) + err := b.DecodeMessage(msg) + m.Destination = &OutputConfig_GcsDestination{msg} + return true, err + default: + return false, nil + } +} + +func _OutputConfig_OneofSizer(msg proto.Message) (n int) { + m := msg.(*OutputConfig) + // destination + switch x := m.Destination.(type) { + case *OutputConfig_GcsDestination: + s := proto.Size(x.GcsDestination) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +// A Cloud Storage location. +type GcsDestination struct { + // Required. + // + // Types that are valid to be assigned to ObjectUri: + // *GcsDestination_Uri + ObjectUri isGcsDestination_ObjectUri `protobuf_oneof:"object_uri"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GcsDestination) Reset() { *m = GcsDestination{} } +func (m *GcsDestination) String() string { return proto.CompactTextString(m) } +func (*GcsDestination) ProtoMessage() {} +func (*GcsDestination) Descriptor() ([]byte, []int) { + return fileDescriptor_asset_service_4116b6e08342e761, []int{5} +} +func (m *GcsDestination) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GcsDestination.Unmarshal(m, b) +} +func (m *GcsDestination) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GcsDestination.Marshal(b, m, deterministic) +} +func (dst *GcsDestination) XXX_Merge(src proto.Message) { + xxx_messageInfo_GcsDestination.Merge(dst, src) +} +func (m *GcsDestination) XXX_Size() int { + return xxx_messageInfo_GcsDestination.Size(m) +} +func (m *GcsDestination) XXX_DiscardUnknown() { + xxx_messageInfo_GcsDestination.DiscardUnknown(m) +} + +var xxx_messageInfo_GcsDestination proto.InternalMessageInfo + +type isGcsDestination_ObjectUri interface { + isGcsDestination_ObjectUri() +} + +type GcsDestination_Uri struct { + Uri string `protobuf:"bytes,1,opt,name=uri,proto3,oneof"` +} + +func (*GcsDestination_Uri) isGcsDestination_ObjectUri() {} + +func (m *GcsDestination) GetObjectUri() isGcsDestination_ObjectUri { + if m != nil { + return m.ObjectUri + } + return nil +} + +func (m *GcsDestination) GetUri() string { + if x, ok := m.GetObjectUri().(*GcsDestination_Uri); ok { + return x.Uri + } + return "" +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*GcsDestination) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _GcsDestination_OneofMarshaler, _GcsDestination_OneofUnmarshaler, _GcsDestination_OneofSizer, []interface{}{ + (*GcsDestination_Uri)(nil), + } +} + +func _GcsDestination_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*GcsDestination) + // object_uri + switch x := m.ObjectUri.(type) { + case *GcsDestination_Uri: + b.EncodeVarint(1<<3 | proto.WireBytes) + b.EncodeStringBytes(x.Uri) + case nil: + default: + return fmt.Errorf("GcsDestination.ObjectUri has unexpected type %T", x) + } + return nil +} + +func _GcsDestination_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*GcsDestination) + switch tag { + case 1: // object_uri.uri + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeStringBytes() + m.ObjectUri = &GcsDestination_Uri{x} + return true, err + default: + return false, nil + } +} + +func _GcsDestination_OneofSizer(msg proto.Message) (n int) { + m := msg.(*GcsDestination) + // object_uri + switch x := m.ObjectUri.(type) { + case *GcsDestination_Uri: + n += 1 // tag and wire + n += proto.SizeVarint(uint64(len(x.Uri))) + n += len(x.Uri) + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +func init() { + proto.RegisterType((*ExportAssetsRequest)(nil), "google.cloud.asset.v1.ExportAssetsRequest") + proto.RegisterType((*ExportAssetsResponse)(nil), "google.cloud.asset.v1.ExportAssetsResponse") + proto.RegisterType((*BatchGetAssetsHistoryRequest)(nil), "google.cloud.asset.v1.BatchGetAssetsHistoryRequest") + proto.RegisterType((*BatchGetAssetsHistoryResponse)(nil), "google.cloud.asset.v1.BatchGetAssetsHistoryResponse") + proto.RegisterType((*OutputConfig)(nil), "google.cloud.asset.v1.OutputConfig") + proto.RegisterType((*GcsDestination)(nil), "google.cloud.asset.v1.GcsDestination") + proto.RegisterEnum("google.cloud.asset.v1.ContentType", ContentType_name, ContentType_value) +} + +// Reference imports to suppress errors if they are not otherwise used. +var _ context.Context +var _ grpc.ClientConn + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the grpc package it is being compiled against. +const _ = grpc.SupportPackageIsVersion4 + +// AssetServiceClient is the client API for AssetService service. +// +// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://godoc.org/google.golang.org/grpc#ClientConn.NewStream. +type AssetServiceClient interface { + // Exports assets with time and resource types to a given Cloud Storage + // location. The output format is newline-delimited JSON. + // This API implements the [google.longrunning.Operation][google.longrunning.Operation] API allowing you + // to keep track of the export. + ExportAssets(ctx context.Context, in *ExportAssetsRequest, opts ...grpc.CallOption) (*longrunning.Operation, error) + // Batch gets the update history of assets that overlap a time window. + // For RESOURCE content, this API outputs history with asset in both + // non-delete or deleted status. + // For IAM_POLICY content, this API outputs history when the asset and its + // attached IAM POLICY both exist. This can create gaps in the output history. + // If a specified asset does not exist, this API returns an INVALID_ARGUMENT + // error. + BatchGetAssetsHistory(ctx context.Context, in *BatchGetAssetsHistoryRequest, opts ...grpc.CallOption) (*BatchGetAssetsHistoryResponse, error) +} + +type assetServiceClient struct { + cc *grpc.ClientConn +} + +func NewAssetServiceClient(cc *grpc.ClientConn) AssetServiceClient { + return &assetServiceClient{cc} +} + +func (c *assetServiceClient) ExportAssets(ctx context.Context, in *ExportAssetsRequest, opts ...grpc.CallOption) (*longrunning.Operation, error) { + out := new(longrunning.Operation) + err := c.cc.Invoke(ctx, "/google.cloud.asset.v1.AssetService/ExportAssets", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *assetServiceClient) BatchGetAssetsHistory(ctx context.Context, in *BatchGetAssetsHistoryRequest, opts ...grpc.CallOption) (*BatchGetAssetsHistoryResponse, error) { + out := new(BatchGetAssetsHistoryResponse) + err := c.cc.Invoke(ctx, "/google.cloud.asset.v1.AssetService/BatchGetAssetsHistory", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +// AssetServiceServer is the server API for AssetService service. +type AssetServiceServer interface { + // Exports assets with time and resource types to a given Cloud Storage + // location. The output format is newline-delimited JSON. + // This API implements the [google.longrunning.Operation][google.longrunning.Operation] API allowing you + // to keep track of the export. + ExportAssets(context.Context, *ExportAssetsRequest) (*longrunning.Operation, error) + // Batch gets the update history of assets that overlap a time window. + // For RESOURCE content, this API outputs history with asset in both + // non-delete or deleted status. + // For IAM_POLICY content, this API outputs history when the asset and its + // attached IAM POLICY both exist. This can create gaps in the output history. + // If a specified asset does not exist, this API returns an INVALID_ARGUMENT + // error. + BatchGetAssetsHistory(context.Context, *BatchGetAssetsHistoryRequest) (*BatchGetAssetsHistoryResponse, error) +} + +func RegisterAssetServiceServer(s *grpc.Server, srv AssetServiceServer) { + s.RegisterService(&_AssetService_serviceDesc, srv) +} + +func _AssetService_ExportAssets_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(ExportAssetsRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(AssetServiceServer).ExportAssets(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.asset.v1.AssetService/ExportAssets", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(AssetServiceServer).ExportAssets(ctx, req.(*ExportAssetsRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _AssetService_BatchGetAssetsHistory_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(BatchGetAssetsHistoryRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(AssetServiceServer).BatchGetAssetsHistory(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.asset.v1.AssetService/BatchGetAssetsHistory", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(AssetServiceServer).BatchGetAssetsHistory(ctx, req.(*BatchGetAssetsHistoryRequest)) + } + return interceptor(ctx, in, info, handler) +} + +var _AssetService_serviceDesc = grpc.ServiceDesc{ + ServiceName: "google.cloud.asset.v1.AssetService", + HandlerType: (*AssetServiceServer)(nil), + Methods: []grpc.MethodDesc{ + { + MethodName: "ExportAssets", + Handler: _AssetService_ExportAssets_Handler, + }, + { + MethodName: "BatchGetAssetsHistory", + Handler: _AssetService_BatchGetAssetsHistory_Handler, + }, + }, + Streams: []grpc.StreamDesc{}, + Metadata: "google/cloud/asset/v1/asset_service.proto", +} + +func init() { + proto.RegisterFile("google/cloud/asset/v1/asset_service.proto", fileDescriptor_asset_service_4116b6e08342e761) +} + +var fileDescriptor_asset_service_4116b6e08342e761 = []byte{ + // 739 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x9c, 0x54, 0xc1, 0x6e, 0xd3, 0x4a, + 0x14, 0xad, 0x9d, 0xf7, 0xaa, 0x76, 0x92, 0xe6, 0xe5, 0xcd, 0x7b, 0x45, 0x26, 0x6a, 0xd5, 0xe0, + 0x02, 0x0a, 0x59, 0xd8, 0x4a, 0x8a, 0x84, 0x54, 0x60, 0xd1, 0xa4, 0xa6, 0x89, 0x80, 0x24, 0x72, + 0xd3, 0xa2, 0xa2, 0x22, 0xcb, 0x71, 0xa6, 0xc6, 0x28, 0x99, 0x31, 0x9e, 0x71, 0x4b, 0x85, 0xd8, + 0xc0, 0x27, 0xb0, 0x01, 0x89, 0x2f, 0x60, 0xc9, 0x5f, 0xc0, 0x96, 0x5f, 0x60, 0xc7, 0x4f, 0x20, + 0xcf, 0x38, 0xad, 0x43, 0xe3, 0xa2, 0x76, 0xe7, 0x7b, 0x7d, 0xee, 0xb9, 0x77, 0xce, 0x99, 0xb9, + 0xe0, 0x96, 0x4b, 0x88, 0x3b, 0x44, 0xba, 0x33, 0x24, 0xe1, 0x40, 0xb7, 0x29, 0x45, 0x4c, 0x3f, + 0xac, 0x8a, 0x0f, 0x8b, 0xa2, 0xe0, 0xd0, 0x73, 0x90, 0xe6, 0x07, 0x84, 0x11, 0xb8, 0x28, 0xa0, + 0x1a, 0x87, 0x6a, 0x1c, 0xa1, 0x1d, 0x56, 0x8b, 0x4b, 0x31, 0x83, 0xed, 0x7b, 0xba, 0x8d, 0x31, + 0x61, 0x36, 0xf3, 0x08, 0xa6, 0xa2, 0xa8, 0xa8, 0x9e, 0xc3, 0x3f, 0xc6, 0xac, 0xc6, 0x98, 0x21, + 0xc1, 0x6e, 0x10, 0x62, 0xec, 0x61, 0x57, 0x27, 0x3e, 0x0a, 0x26, 0x88, 0x56, 0x62, 0x10, 0x8f, + 0xfa, 0xe1, 0x81, 0xce, 0xbc, 0x11, 0xa2, 0xcc, 0x1e, 0xf9, 0x02, 0xa0, 0x7e, 0x90, 0xc1, 0x7f, + 0xc6, 0x2b, 0x9f, 0x04, 0x6c, 0x83, 0x93, 0x9b, 0xe8, 0x65, 0x88, 0x28, 0x83, 0x57, 0xc0, 0xac, + 0x6f, 0x07, 0x08, 0x33, 0x45, 0x2a, 0x49, 0xe5, 0x79, 0x33, 0x8e, 0xe0, 0x1d, 0x30, 0x1f, 0x20, + 0x7b, 0x60, 0x45, 0x3c, 0x8a, 0x5c, 0x92, 0xca, 0xd9, 0x5a, 0x51, 0x8b, 0x8f, 0x38, 0x6e, 0xa2, + 0xf5, 0xc6, 0x4d, 0xcc, 0xb9, 0x08, 0x1c, 0x85, 0x70, 0x05, 0x64, 0x85, 0x3c, 0xec, 0xd8, 0x47, + 0x54, 0xc9, 0x94, 0x32, 0xe5, 0x79, 0x13, 0xf0, 0x54, 0x2f, 0xca, 0x40, 0x03, 0xe4, 0x1c, 0x82, + 0x19, 0xc2, 0x02, 0xa2, 0xfc, 0x55, 0x92, 0xca, 0xf9, 0x9a, 0xaa, 0x4d, 0xd5, 0x4f, 0x6b, 0x08, + 0x68, 0x54, 0x6a, 0x66, 0x9d, 0xd3, 0x00, 0x36, 0xc1, 0x02, 0x09, 0x99, 0x1f, 0x32, 0xcb, 0x21, + 0xf8, 0xc0, 0x73, 0x95, 0xbf, 0xf9, 0x90, 0xab, 0x29, 0x3c, 0x1d, 0x8e, 0x6d, 0x70, 0xa8, 0x99, + 0x23, 0x89, 0x48, 0xfd, 0x28, 0x81, 0xff, 0x27, 0xa5, 0xa1, 0x3e, 0xc1, 0x14, 0x4d, 0x6a, 0x20, + 0x5d, 0x40, 0x83, 0x33, 0xb3, 0xc9, 0x97, 0x9d, 0xed, 0xa7, 0x04, 0x96, 0xea, 0x36, 0x73, 0x9e, + 0x6f, 0xa1, 0x78, 0xba, 0xa6, 0x47, 0x19, 0x09, 0x8e, 0xff, 0xe4, 0xdf, 0x89, 0x0d, 0xd8, 0x1e, + 0x21, 0xaa, 0xc8, 0x09, 0x1b, 0xda, 0x51, 0xe6, 0x8c, 0x0d, 0x99, 0xcb, 0xd9, 0xf0, 0x10, 0x14, + 0x4e, 0x34, 0xb2, 0x8e, 0x3c, 0x3c, 0x20, 0x47, 0xdc, 0xd1, 0x6c, 0xed, 0x5a, 0x0a, 0x55, 0xa4, + 0xd0, 0x13, 0x0e, 0x34, 0xf3, 0x63, 0xc5, 0x44, 0xac, 0x3e, 0x03, 0xcb, 0x29, 0x87, 0x8d, 0x1d, + 0xb9, 0x07, 0x66, 0xc5, 0xdb, 0x50, 0xa4, 0x52, 0xa6, 0x9c, 0xad, 0x5d, 0x4f, 0xeb, 0x81, 0x46, + 0x3e, 0x09, 0xec, 0x21, 0x67, 0x31, 0xe3, 0x1a, 0x95, 0x80, 0x5c, 0x52, 0x6a, 0xd8, 0x05, 0xff, + 0xb8, 0x0e, 0xb5, 0x06, 0x88, 0x32, 0x0f, 0xf3, 0xe7, 0x14, 0xbb, 0x7c, 0x23, 0x85, 0x76, 0xcb, + 0xa1, 0x9b, 0xa7, 0xe0, 0xe6, 0x8c, 0x99, 0x77, 0x27, 0x32, 0xf5, 0x05, 0x90, 0x4d, 0xb0, 0xa9, + 0x35, 0x90, 0x9f, 0x2c, 0x81, 0x10, 0x64, 0xc2, 0xc0, 0x13, 0x5e, 0x35, 0x67, 0xcc, 0x28, 0xa8, + 0xe7, 0x00, 0x20, 0xfd, 0x17, 0xc8, 0x61, 0x56, 0x18, 0x78, 0x95, 0x16, 0xc8, 0x26, 0xc4, 0x86, + 0x4b, 0x40, 0x69, 0x74, 0xda, 0x3d, 0xa3, 0xdd, 0xb3, 0x7a, 0x7b, 0x5d, 0xc3, 0xda, 0x69, 0x6f, + 0x77, 0x8d, 0x46, 0xeb, 0x41, 0xcb, 0xd8, 0x2c, 0xcc, 0xc0, 0x1c, 0x98, 0x33, 0x8d, 0xed, 0xce, + 0x8e, 0xd9, 0x30, 0x0a, 0x12, 0xcc, 0x03, 0xd0, 0xda, 0x78, 0x6c, 0x75, 0x3b, 0x8f, 0x5a, 0x8d, + 0xbd, 0x82, 0x5c, 0xfb, 0x2a, 0x83, 0x1c, 0x57, 0x60, 0x5b, 0x6c, 0x2a, 0xf8, 0x4e, 0x02, 0xb9, + 0xe4, 0x4d, 0x87, 0x95, 0x94, 0x83, 0x4e, 0xd9, 0x14, 0xc5, 0xe5, 0x31, 0x36, 0xb1, 0x88, 0xb4, + 0xce, 0x78, 0x11, 0xa9, 0xe5, 0xb7, 0xdf, 0x7f, 0xbc, 0x97, 0x55, 0x75, 0x39, 0x5a, 0x60, 0xaf, + 0xc5, 0x2d, 0xbc, 0x5f, 0xd1, 0x2b, 0x6f, 0xd6, 0x51, 0x82, 0x6c, 0x5d, 0xaa, 0xc0, 0x2f, 0x12, + 0x58, 0x9c, 0x6a, 0x33, 0x5c, 0x4b, 0x19, 0xe7, 0xbc, 0x17, 0x50, 0xbc, 0x7d, 0xb1, 0x22, 0x71, + 0x93, 0x54, 0x8d, 0x8f, 0x5b, 0x86, 0x37, 0xcf, 0x8c, 0xdb, 0x9f, 0x56, 0x57, 0xff, 0x24, 0x81, + 0xab, 0x0e, 0x19, 0x4d, 0xef, 0x55, 0xff, 0x37, 0x29, 0x73, 0x37, 0x5a, 0x0d, 0x5d, 0xe9, 0xe9, + 0x7a, 0x8c, 0x75, 0xc9, 0xd0, 0xc6, 0xae, 0x46, 0x02, 0x57, 0x77, 0x11, 0xe6, 0x8b, 0x43, 0x17, + 0xbf, 0x6c, 0xdf, 0xa3, 0xbf, 0xed, 0xfe, 0xbb, 0xfc, 0xe3, 0xb3, 0xbc, 0xb8, 0x25, 0x8a, 0x1b, + 0xbc, 0x11, 0xa7, 0xd7, 0x76, 0xab, 0xdf, 0xc6, 0xf9, 0x7d, 0x9e, 0xdf, 0xe7, 0xf9, 0xfd, 0xdd, + 0x6a, 0x7f, 0x96, 0xd3, 0xae, 0xfd, 0x0a, 0x00, 0x00, 0xff, 0xff, 0xd0, 0xb5, 0xf3, 0x28, 0xb1, + 0x06, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/cloud/asset/v1/assets.pb.go b/vendor/google.golang.org/genproto/googleapis/cloud/asset/v1/assets.pb.go new file mode 100644 index 0000000..8560e0a --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/cloud/asset/v1/assets.pb.go @@ -0,0 +1,361 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/cloud/asset/v1/assets.proto + +package asset // import "google.golang.org/genproto/googleapis/cloud/asset/v1" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "github.com/golang/protobuf/ptypes/any" +import _struct "github.com/golang/protobuf/ptypes/struct" +import timestamp "github.com/golang/protobuf/ptypes/timestamp" +import _ "google.golang.org/genproto/googleapis/api/annotations" +import v1 "google.golang.org/genproto/googleapis/iam/v1" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Temporal asset. In addition to the asset, the temporal asset includes the +// status of the asset and valid from and to time of it. +type TemporalAsset struct { + // The time window when the asset data and state was observed. + Window *TimeWindow `protobuf:"bytes,1,opt,name=window,proto3" json:"window,omitempty"` + // If the asset is deleted or not. + Deleted bool `protobuf:"varint,2,opt,name=deleted,proto3" json:"deleted,omitempty"` + // Asset. + Asset *Asset `protobuf:"bytes,3,opt,name=asset,proto3" json:"asset,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *TemporalAsset) Reset() { *m = TemporalAsset{} } +func (m *TemporalAsset) String() string { return proto.CompactTextString(m) } +func (*TemporalAsset) ProtoMessage() {} +func (*TemporalAsset) Descriptor() ([]byte, []int) { + return fileDescriptor_assets_6b5b5abd731d0fdc, []int{0} +} +func (m *TemporalAsset) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_TemporalAsset.Unmarshal(m, b) +} +func (m *TemporalAsset) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_TemporalAsset.Marshal(b, m, deterministic) +} +func (dst *TemporalAsset) XXX_Merge(src proto.Message) { + xxx_messageInfo_TemporalAsset.Merge(dst, src) +} +func (m *TemporalAsset) XXX_Size() int { + return xxx_messageInfo_TemporalAsset.Size(m) +} +func (m *TemporalAsset) XXX_DiscardUnknown() { + xxx_messageInfo_TemporalAsset.DiscardUnknown(m) +} + +var xxx_messageInfo_TemporalAsset proto.InternalMessageInfo + +func (m *TemporalAsset) GetWindow() *TimeWindow { + if m != nil { + return m.Window + } + return nil +} + +func (m *TemporalAsset) GetDeleted() bool { + if m != nil { + return m.Deleted + } + return false +} + +func (m *TemporalAsset) GetAsset() *Asset { + if m != nil { + return m.Asset + } + return nil +} + +// A time window of (start_time, end_time]. +type TimeWindow struct { + // Start time of the time window (exclusive). + StartTime *timestamp.Timestamp `protobuf:"bytes,1,opt,name=start_time,json=startTime,proto3" json:"start_time,omitempty"` + // End time of the time window (inclusive). + // Current timestamp if not specified. + EndTime *timestamp.Timestamp `protobuf:"bytes,2,opt,name=end_time,json=endTime,proto3" json:"end_time,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *TimeWindow) Reset() { *m = TimeWindow{} } +func (m *TimeWindow) String() string { return proto.CompactTextString(m) } +func (*TimeWindow) ProtoMessage() {} +func (*TimeWindow) Descriptor() ([]byte, []int) { + return fileDescriptor_assets_6b5b5abd731d0fdc, []int{1} +} +func (m *TimeWindow) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_TimeWindow.Unmarshal(m, b) +} +func (m *TimeWindow) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_TimeWindow.Marshal(b, m, deterministic) +} +func (dst *TimeWindow) XXX_Merge(src proto.Message) { + xxx_messageInfo_TimeWindow.Merge(dst, src) +} +func (m *TimeWindow) XXX_Size() int { + return xxx_messageInfo_TimeWindow.Size(m) +} +func (m *TimeWindow) XXX_DiscardUnknown() { + xxx_messageInfo_TimeWindow.DiscardUnknown(m) +} + +var xxx_messageInfo_TimeWindow proto.InternalMessageInfo + +func (m *TimeWindow) GetStartTime() *timestamp.Timestamp { + if m != nil { + return m.StartTime + } + return nil +} + +func (m *TimeWindow) GetEndTime() *timestamp.Timestamp { + if m != nil { + return m.EndTime + } + return nil +} + +// Cloud asset. This includes all Google Cloud Platform resources, +// Cloud IAM policies, and other non-GCP assets. +type Asset struct { + // The full name of the asset. For example: + // `//compute.googleapis.com/projects/my_project_123/zones/zone1/instances/instance1`. + // See [Resource + // Names](https://cloud.google.com/apis/design/resource_names#full_resource_name) + // for more information. + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // Type of the asset. Example: "compute.googleapis.com/Disk". + AssetType string `protobuf:"bytes,2,opt,name=asset_type,json=assetType,proto3" json:"asset_type,omitempty"` + // Representation of the resource. + Resource *Resource `protobuf:"bytes,3,opt,name=resource,proto3" json:"resource,omitempty"` + // Representation of the actual Cloud IAM policy set on a cloud resource. For + // each resource, there must be at most one Cloud IAM policy set on it. + IamPolicy *v1.Policy `protobuf:"bytes,4,opt,name=iam_policy,json=iamPolicy,proto3" json:"iam_policy,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Asset) Reset() { *m = Asset{} } +func (m *Asset) String() string { return proto.CompactTextString(m) } +func (*Asset) ProtoMessage() {} +func (*Asset) Descriptor() ([]byte, []int) { + return fileDescriptor_assets_6b5b5abd731d0fdc, []int{2} +} +func (m *Asset) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Asset.Unmarshal(m, b) +} +func (m *Asset) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Asset.Marshal(b, m, deterministic) +} +func (dst *Asset) XXX_Merge(src proto.Message) { + xxx_messageInfo_Asset.Merge(dst, src) +} +func (m *Asset) XXX_Size() int { + return xxx_messageInfo_Asset.Size(m) +} +func (m *Asset) XXX_DiscardUnknown() { + xxx_messageInfo_Asset.DiscardUnknown(m) +} + +var xxx_messageInfo_Asset proto.InternalMessageInfo + +func (m *Asset) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *Asset) GetAssetType() string { + if m != nil { + return m.AssetType + } + return "" +} + +func (m *Asset) GetResource() *Resource { + if m != nil { + return m.Resource + } + return nil +} + +func (m *Asset) GetIamPolicy() *v1.Policy { + if m != nil { + return m.IamPolicy + } + return nil +} + +// Representation of a cloud resource. +type Resource struct { + // The API version. Example: "v1". + Version string `protobuf:"bytes,1,opt,name=version,proto3" json:"version,omitempty"` + // The URL of the discovery document containing the resource's JSON schema. + // For example: + // `"https://www.googleapis.com/discovery/v1/apis/compute/v1/rest"`. + // It will be left unspecified for resources without a discovery-based API, + // such as Cloud Bigtable. + DiscoveryDocumentUri string `protobuf:"bytes,2,opt,name=discovery_document_uri,json=discoveryDocumentUri,proto3" json:"discovery_document_uri,omitempty"` + // The JSON schema name listed in the discovery document. + // Example: "Project". It will be left unspecified for resources (such as + // Cloud Bigtable) without a discovery-based API. + DiscoveryName string `protobuf:"bytes,3,opt,name=discovery_name,json=discoveryName,proto3" json:"discovery_name,omitempty"` + // The REST URL for accessing the resource. An HTTP GET operation using this + // URL returns the resource itself. + // Example: + // `https://cloudresourcemanager.googleapis.com/v1/projects/my-project-123`. + // It will be left unspecified for resources without a REST API. + ResourceUrl string `protobuf:"bytes,4,opt,name=resource_url,json=resourceUrl,proto3" json:"resource_url,omitempty"` + // The full name of the immediate parent of this resource. See + // [Resource + // Names](https://cloud.google.com/apis/design/resource_names#full_resource_name) + // for more information. + // + // For GCP assets, it is the parent resource defined in the [Cloud IAM policy + // hierarchy](https://cloud.google.com/iam/docs/overview#policy_hierarchy). + // For example: + // `"//cloudresourcemanager.googleapis.com/projects/my_project_123"`. + // + // For third-party assets, it is up to the users to define. + Parent string `protobuf:"bytes,5,opt,name=parent,proto3" json:"parent,omitempty"` + // The content of the resource, in which some sensitive fields are scrubbed + // away and may not be present. + Data *_struct.Struct `protobuf:"bytes,6,opt,name=data,proto3" json:"data,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Resource) Reset() { *m = Resource{} } +func (m *Resource) String() string { return proto.CompactTextString(m) } +func (*Resource) ProtoMessage() {} +func (*Resource) Descriptor() ([]byte, []int) { + return fileDescriptor_assets_6b5b5abd731d0fdc, []int{3} +} +func (m *Resource) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Resource.Unmarshal(m, b) +} +func (m *Resource) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Resource.Marshal(b, m, deterministic) +} +func (dst *Resource) XXX_Merge(src proto.Message) { + xxx_messageInfo_Resource.Merge(dst, src) +} +func (m *Resource) XXX_Size() int { + return xxx_messageInfo_Resource.Size(m) +} +func (m *Resource) XXX_DiscardUnknown() { + xxx_messageInfo_Resource.DiscardUnknown(m) +} + +var xxx_messageInfo_Resource proto.InternalMessageInfo + +func (m *Resource) GetVersion() string { + if m != nil { + return m.Version + } + return "" +} + +func (m *Resource) GetDiscoveryDocumentUri() string { + if m != nil { + return m.DiscoveryDocumentUri + } + return "" +} + +func (m *Resource) GetDiscoveryName() string { + if m != nil { + return m.DiscoveryName + } + return "" +} + +func (m *Resource) GetResourceUrl() string { + if m != nil { + return m.ResourceUrl + } + return "" +} + +func (m *Resource) GetParent() string { + if m != nil { + return m.Parent + } + return "" +} + +func (m *Resource) GetData() *_struct.Struct { + if m != nil { + return m.Data + } + return nil +} + +func init() { + proto.RegisterType((*TemporalAsset)(nil), "google.cloud.asset.v1.TemporalAsset") + proto.RegisterType((*TimeWindow)(nil), "google.cloud.asset.v1.TimeWindow") + proto.RegisterType((*Asset)(nil), "google.cloud.asset.v1.Asset") + proto.RegisterType((*Resource)(nil), "google.cloud.asset.v1.Resource") +} + +func init() { + proto.RegisterFile("google/cloud/asset/v1/assets.proto", fileDescriptor_assets_6b5b5abd731d0fdc) +} + +var fileDescriptor_assets_6b5b5abd731d0fdc = []byte{ + // 541 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x84, 0x53, 0xdf, 0x6a, 0xdb, 0x3c, + 0x14, 0xc7, 0x69, 0x92, 0xc6, 0xa7, 0x5f, 0xbf, 0x0b, 0xb1, 0x74, 0x6e, 0xc8, 0x68, 0x6b, 0x18, + 0x14, 0x06, 0x36, 0xe9, 0xba, 0x8b, 0xae, 0x57, 0xeb, 0x06, 0xbb, 0x1b, 0x41, 0x4b, 0x3b, 0x18, + 0x01, 0xa3, 0xda, 0x9a, 0x11, 0xd8, 0x92, 0x91, 0xe5, 0x94, 0xdc, 0xec, 0x2d, 0x06, 0x7b, 0x86, + 0xc1, 0x5e, 0x64, 0x6f, 0xb2, 0xb7, 0x18, 0x3e, 0x92, 0x53, 0xc8, 0x5a, 0x76, 0xa7, 0x73, 0x7e, + 0x7f, 0x74, 0x7e, 0x47, 0x36, 0x84, 0xb9, 0x52, 0x79, 0xc1, 0xe3, 0xb4, 0x50, 0x4d, 0x16, 0xb3, + 0xba, 0xe6, 0x26, 0x5e, 0xcd, 0xec, 0xa1, 0x8e, 0x2a, 0xad, 0x8c, 0x22, 0x63, 0xcb, 0x89, 0x90, + 0x13, 0x21, 0x14, 0xad, 0x66, 0x93, 0xa9, 0x93, 0xb2, 0x4a, 0xc4, 0x4c, 0x4a, 0x65, 0x98, 0x11, + 0x4a, 0x3a, 0xd1, 0x64, 0xe2, 0x50, 0xc1, 0xca, 0xd6, 0xb0, 0x52, 0x85, 0x48, 0xd7, 0x0e, 0x3b, + 0x74, 0x18, 0x56, 0xb7, 0xcd, 0x97, 0x98, 0xc9, 0x0e, 0x9a, 0x6e, 0x43, 0xb5, 0xd1, 0x4d, 0x6a, + 0x1c, 0x7a, 0xb4, 0x8d, 0x1a, 0x51, 0xf2, 0xda, 0xb0, 0xb2, 0xb2, 0x84, 0xf0, 0xbb, 0x07, 0xfb, + 0x0b, 0x5e, 0x56, 0x4a, 0xb3, 0xe2, 0x4d, 0x3b, 0x28, 0xb9, 0x80, 0xe1, 0x9d, 0x90, 0x99, 0xba, + 0x0b, 0xbc, 0x63, 0xef, 0x74, 0xef, 0xec, 0x24, 0x7a, 0x30, 0x4d, 0xb4, 0x10, 0x25, 0xff, 0x84, + 0x44, 0xea, 0x04, 0x24, 0x80, 0xdd, 0x8c, 0x17, 0xdc, 0xf0, 0x2c, 0xe8, 0x1d, 0x7b, 0xa7, 0x23, + 0xda, 0x95, 0xe4, 0x0c, 0x06, 0x28, 0x0c, 0x76, 0xd0, 0x73, 0xfa, 0x88, 0x27, 0x4e, 0x40, 0x2d, + 0x35, 0xfc, 0x0a, 0x70, 0x7f, 0x07, 0xb9, 0x00, 0xa8, 0x0d, 0xd3, 0x26, 0x69, 0x13, 0xb8, 0xd1, + 0x26, 0x9d, 0x4d, 0x17, 0x0f, 0x87, 0xc2, 0x78, 0xd4, 0x47, 0x76, 0x5b, 0x93, 0x57, 0x30, 0xe2, + 0x32, 0xb3, 0xc2, 0xde, 0x3f, 0x85, 0xbb, 0x5c, 0x66, 0x6d, 0x15, 0xfe, 0xf4, 0x60, 0x60, 0x57, + 0x42, 0xa0, 0x2f, 0x99, 0xbb, 0xd5, 0xa7, 0x78, 0x26, 0xcf, 0x00, 0x70, 0xcc, 0xc4, 0xac, 0x2b, + 0x6b, 0xeb, 0x53, 0x1f, 0x3b, 0x8b, 0x75, 0xc5, 0xc9, 0x25, 0x8c, 0x34, 0xaf, 0x55, 0xa3, 0x53, + 0xee, 0x32, 0x1f, 0x3d, 0x92, 0x99, 0x3a, 0x1a, 0xdd, 0x08, 0xc8, 0x39, 0x80, 0x60, 0x65, 0x62, + 0x3f, 0x81, 0xa0, 0x8f, 0xf2, 0x71, 0x27, 0x17, 0xac, 0x6c, 0x65, 0x73, 0x04, 0xa9, 0x2f, 0x58, + 0x69, 0x8f, 0xe1, 0x6f, 0x0f, 0x46, 0x9d, 0x59, 0xfb, 0x14, 0x2b, 0xae, 0x6b, 0xa1, 0xa4, 0x9b, + 0xba, 0x2b, 0xc9, 0x39, 0x1c, 0x64, 0xa2, 0x4e, 0xd5, 0x8a, 0xeb, 0x75, 0x92, 0xa9, 0xb4, 0x29, + 0xb9, 0x34, 0x49, 0xa3, 0x85, 0x0b, 0xf1, 0x64, 0x83, 0xbe, 0x73, 0xe0, 0xb5, 0x16, 0xe4, 0x39, + 0xfc, 0x7f, 0xaf, 0xc2, 0x65, 0xec, 0x20, 0x7b, 0x7f, 0xd3, 0xfd, 0xd0, 0x6e, 0xe5, 0x04, 0xfe, + 0xeb, 0x52, 0x24, 0x8d, 0x2e, 0x70, 0x76, 0x9f, 0xee, 0x75, 0xbd, 0x6b, 0x5d, 0x90, 0x03, 0x18, + 0x56, 0x4c, 0x73, 0x69, 0x82, 0x01, 0x82, 0xae, 0x22, 0x2f, 0xa0, 0x9f, 0x31, 0xc3, 0x82, 0x21, + 0xc6, 0x7d, 0xfa, 0xd7, 0x0b, 0x7d, 0xc4, 0xef, 0x9a, 0x22, 0xe9, 0xea, 0x9b, 0x07, 0x87, 0xa9, + 0x2a, 0x1f, 0x5e, 0xe9, 0x15, 0xe0, 0xb3, 0xcd, 0x5b, 0xe5, 0xdc, 0xfb, 0xfc, 0xda, 0x91, 0x72, + 0x55, 0x30, 0x99, 0x47, 0x4a, 0xe7, 0x71, 0xce, 0x25, 0xfa, 0xc6, 0x16, 0x62, 0x95, 0xa8, 0xb7, + 0x7e, 0xe8, 0x4b, 0x3c, 0xfc, 0xe8, 0x8d, 0xdf, 0x5b, 0xf1, 0x5b, 0xbc, 0x01, 0x7d, 0xa3, 0x9b, + 0xd9, 0xaf, 0xae, 0xbf, 0xc4, 0xfe, 0x12, 0xfb, 0xcb, 0x9b, 0xd9, 0xed, 0x10, 0x6d, 0x5f, 0xfe, + 0x09, 0x00, 0x00, 0xff, 0xff, 0x66, 0xec, 0xa8, 0x3b, 0x26, 0x04, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/cloud/asset/v1beta1/asset_service.pb.go b/vendor/google.golang.org/genproto/googleapis/cloud/asset/v1beta1/asset_service.pb.go new file mode 100644 index 0000000..820127b --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/cloud/asset/v1beta1/asset_service.pb.go @@ -0,0 +1,740 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/cloud/asset/v1beta1/asset_service.proto + +package asset // import "google.golang.org/genproto/googleapis/cloud/asset/v1beta1" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import timestamp "github.com/golang/protobuf/ptypes/timestamp" +import _ "google.golang.org/genproto/googleapis/api/annotations" +import longrunning "google.golang.org/genproto/googleapis/longrunning" + +import ( + context "golang.org/x/net/context" + grpc "google.golang.org/grpc" +) + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Asset content type. +type ContentType int32 + +const ( + // Unspecified content type. + ContentType_CONTENT_TYPE_UNSPECIFIED ContentType = 0 + // Resource metadata. + ContentType_RESOURCE ContentType = 1 + // The actual IAM policy set on a resource. + ContentType_IAM_POLICY ContentType = 2 +) + +var ContentType_name = map[int32]string{ + 0: "CONTENT_TYPE_UNSPECIFIED", + 1: "RESOURCE", + 2: "IAM_POLICY", +} +var ContentType_value = map[string]int32{ + "CONTENT_TYPE_UNSPECIFIED": 0, + "RESOURCE": 1, + "IAM_POLICY": 2, +} + +func (x ContentType) String() string { + return proto.EnumName(ContentType_name, int32(x)) +} +func (ContentType) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_asset_service_c557f498426d873d, []int{0} +} + +// Export asset request. +type ExportAssetsRequest struct { + // Required. The relative name of the root asset. This can only be an + // organization number (such as "organizations/123"), a project ID (such as + // "projects/my-project-id"), a project number (such as "projects/12345"), or + // a folder number (such as "folders/123"). + Parent string `protobuf:"bytes,1,opt,name=parent,proto3" json:"parent,omitempty"` + // Timestamp to take an asset snapshot. This can only be set to a timestamp + // between 2018-10-02 UTC (inclusive) and the current time. If not specified, + // the current time will be used. Due to delays in resource data collection + // and indexing, there is a volatile window during which running the same + // query may get different results. + ReadTime *timestamp.Timestamp `protobuf:"bytes,2,opt,name=read_time,json=readTime,proto3" json:"read_time,omitempty"` + // A list of asset types of which to take a snapshot for. For example: + // "google.compute.Disk". If specified, only matching assets will be returned. + // See [Introduction to Cloud Asset + // Inventory](https://cloud.google.com/resource-manager/docs/cloud-asset-inventory/overview) + // for all supported asset types. + AssetTypes []string `protobuf:"bytes,3,rep,name=asset_types,json=assetTypes,proto3" json:"asset_types,omitempty"` + // Asset content type. If not specified, no content but the asset name will be + // returned. + ContentType ContentType `protobuf:"varint,4,opt,name=content_type,json=contentType,proto3,enum=google.cloud.asset.v1beta1.ContentType" json:"content_type,omitempty"` + // Required. Output configuration indicating where the results will be output + // to. All results will be in newline delimited JSON format. + OutputConfig *OutputConfig `protobuf:"bytes,5,opt,name=output_config,json=outputConfig,proto3" json:"output_config,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ExportAssetsRequest) Reset() { *m = ExportAssetsRequest{} } +func (m *ExportAssetsRequest) String() string { return proto.CompactTextString(m) } +func (*ExportAssetsRequest) ProtoMessage() {} +func (*ExportAssetsRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_asset_service_c557f498426d873d, []int{0} +} +func (m *ExportAssetsRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ExportAssetsRequest.Unmarshal(m, b) +} +func (m *ExportAssetsRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ExportAssetsRequest.Marshal(b, m, deterministic) +} +func (dst *ExportAssetsRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_ExportAssetsRequest.Merge(dst, src) +} +func (m *ExportAssetsRequest) XXX_Size() int { + return xxx_messageInfo_ExportAssetsRequest.Size(m) +} +func (m *ExportAssetsRequest) XXX_DiscardUnknown() { + xxx_messageInfo_ExportAssetsRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_ExportAssetsRequest proto.InternalMessageInfo + +func (m *ExportAssetsRequest) GetParent() string { + if m != nil { + return m.Parent + } + return "" +} + +func (m *ExportAssetsRequest) GetReadTime() *timestamp.Timestamp { + if m != nil { + return m.ReadTime + } + return nil +} + +func (m *ExportAssetsRequest) GetAssetTypes() []string { + if m != nil { + return m.AssetTypes + } + return nil +} + +func (m *ExportAssetsRequest) GetContentType() ContentType { + if m != nil { + return m.ContentType + } + return ContentType_CONTENT_TYPE_UNSPECIFIED +} + +func (m *ExportAssetsRequest) GetOutputConfig() *OutputConfig { + if m != nil { + return m.OutputConfig + } + return nil +} + +// The export asset response. This message is returned by the +// [google.longrunning.Operations.GetOperation][google.longrunning.Operations.GetOperation] +// method in the returned +// [google.longrunning.Operation.response][google.longrunning.Operation.response] +// field. +type ExportAssetsResponse struct { + // Time the snapshot was taken. + ReadTime *timestamp.Timestamp `protobuf:"bytes,1,opt,name=read_time,json=readTime,proto3" json:"read_time,omitempty"` + // Output configuration indicating where the results were output to. + // All results are in JSON format. + OutputConfig *OutputConfig `protobuf:"bytes,2,opt,name=output_config,json=outputConfig,proto3" json:"output_config,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ExportAssetsResponse) Reset() { *m = ExportAssetsResponse{} } +func (m *ExportAssetsResponse) String() string { return proto.CompactTextString(m) } +func (*ExportAssetsResponse) ProtoMessage() {} +func (*ExportAssetsResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_asset_service_c557f498426d873d, []int{1} +} +func (m *ExportAssetsResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ExportAssetsResponse.Unmarshal(m, b) +} +func (m *ExportAssetsResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ExportAssetsResponse.Marshal(b, m, deterministic) +} +func (dst *ExportAssetsResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_ExportAssetsResponse.Merge(dst, src) +} +func (m *ExportAssetsResponse) XXX_Size() int { + return xxx_messageInfo_ExportAssetsResponse.Size(m) +} +func (m *ExportAssetsResponse) XXX_DiscardUnknown() { + xxx_messageInfo_ExportAssetsResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_ExportAssetsResponse proto.InternalMessageInfo + +func (m *ExportAssetsResponse) GetReadTime() *timestamp.Timestamp { + if m != nil { + return m.ReadTime + } + return nil +} + +func (m *ExportAssetsResponse) GetOutputConfig() *OutputConfig { + if m != nil { + return m.OutputConfig + } + return nil +} + +// Batch get assets history request. +type BatchGetAssetsHistoryRequest struct { + // Required. The relative name of the root asset. It can only be an + // organization number (such as "organizations/123"), a project ID (such as + // "projects/my-project-id")", or a project number (such as "projects/12345"). + Parent string `protobuf:"bytes,1,opt,name=parent,proto3" json:"parent,omitempty"` + // A list of the full names of the assets. For example: + // `//compute.googleapis.com/projects/my_project_123/zones/zone1/instances/instance1`. + // See [Resource + // Names](https://cloud.google.com/apis/design/resource_names#full_resource_name) + // for more info. + // + // The request becomes a no-op if the asset name list is empty, and the max + // size of the asset name list is 100 in one request. + AssetNames []string `protobuf:"bytes,2,rep,name=asset_names,json=assetNames,proto3" json:"asset_names,omitempty"` + // Required. The content type. + ContentType ContentType `protobuf:"varint,3,opt,name=content_type,json=contentType,proto3,enum=google.cloud.asset.v1beta1.ContentType" json:"content_type,omitempty"` + // Optional. The time window for the asset history. Both start_time and + // end_time are optional and if set, it must be after 2018-10-02 UTC. If + // end_time is not set, it is default to current timestamp. If start_time is + // not set, the snapshot of the assets at end_time will be returned. The + // returned results contain all temporal assets whose time window overlap with + // read_time_window. + ReadTimeWindow *TimeWindow `protobuf:"bytes,4,opt,name=read_time_window,json=readTimeWindow,proto3" json:"read_time_window,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *BatchGetAssetsHistoryRequest) Reset() { *m = BatchGetAssetsHistoryRequest{} } +func (m *BatchGetAssetsHistoryRequest) String() string { return proto.CompactTextString(m) } +func (*BatchGetAssetsHistoryRequest) ProtoMessage() {} +func (*BatchGetAssetsHistoryRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_asset_service_c557f498426d873d, []int{2} +} +func (m *BatchGetAssetsHistoryRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_BatchGetAssetsHistoryRequest.Unmarshal(m, b) +} +func (m *BatchGetAssetsHistoryRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_BatchGetAssetsHistoryRequest.Marshal(b, m, deterministic) +} +func (dst *BatchGetAssetsHistoryRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_BatchGetAssetsHistoryRequest.Merge(dst, src) +} +func (m *BatchGetAssetsHistoryRequest) XXX_Size() int { + return xxx_messageInfo_BatchGetAssetsHistoryRequest.Size(m) +} +func (m *BatchGetAssetsHistoryRequest) XXX_DiscardUnknown() { + xxx_messageInfo_BatchGetAssetsHistoryRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_BatchGetAssetsHistoryRequest proto.InternalMessageInfo + +func (m *BatchGetAssetsHistoryRequest) GetParent() string { + if m != nil { + return m.Parent + } + return "" +} + +func (m *BatchGetAssetsHistoryRequest) GetAssetNames() []string { + if m != nil { + return m.AssetNames + } + return nil +} + +func (m *BatchGetAssetsHistoryRequest) GetContentType() ContentType { + if m != nil { + return m.ContentType + } + return ContentType_CONTENT_TYPE_UNSPECIFIED +} + +func (m *BatchGetAssetsHistoryRequest) GetReadTimeWindow() *TimeWindow { + if m != nil { + return m.ReadTimeWindow + } + return nil +} + +// Batch get assets history response. +type BatchGetAssetsHistoryResponse struct { + // A list of assets with valid time windows. + Assets []*TemporalAsset `protobuf:"bytes,1,rep,name=assets,proto3" json:"assets,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *BatchGetAssetsHistoryResponse) Reset() { *m = BatchGetAssetsHistoryResponse{} } +func (m *BatchGetAssetsHistoryResponse) String() string { return proto.CompactTextString(m) } +func (*BatchGetAssetsHistoryResponse) ProtoMessage() {} +func (*BatchGetAssetsHistoryResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_asset_service_c557f498426d873d, []int{3} +} +func (m *BatchGetAssetsHistoryResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_BatchGetAssetsHistoryResponse.Unmarshal(m, b) +} +func (m *BatchGetAssetsHistoryResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_BatchGetAssetsHistoryResponse.Marshal(b, m, deterministic) +} +func (dst *BatchGetAssetsHistoryResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_BatchGetAssetsHistoryResponse.Merge(dst, src) +} +func (m *BatchGetAssetsHistoryResponse) XXX_Size() int { + return xxx_messageInfo_BatchGetAssetsHistoryResponse.Size(m) +} +func (m *BatchGetAssetsHistoryResponse) XXX_DiscardUnknown() { + xxx_messageInfo_BatchGetAssetsHistoryResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_BatchGetAssetsHistoryResponse proto.InternalMessageInfo + +func (m *BatchGetAssetsHistoryResponse) GetAssets() []*TemporalAsset { + if m != nil { + return m.Assets + } + return nil +} + +// Output configuration for export assets destination. +type OutputConfig struct { + // Asset export destination. + // + // Types that are valid to be assigned to Destination: + // *OutputConfig_GcsDestination + Destination isOutputConfig_Destination `protobuf_oneof:"destination"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *OutputConfig) Reset() { *m = OutputConfig{} } +func (m *OutputConfig) String() string { return proto.CompactTextString(m) } +func (*OutputConfig) ProtoMessage() {} +func (*OutputConfig) Descriptor() ([]byte, []int) { + return fileDescriptor_asset_service_c557f498426d873d, []int{4} +} +func (m *OutputConfig) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_OutputConfig.Unmarshal(m, b) +} +func (m *OutputConfig) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_OutputConfig.Marshal(b, m, deterministic) +} +func (dst *OutputConfig) XXX_Merge(src proto.Message) { + xxx_messageInfo_OutputConfig.Merge(dst, src) +} +func (m *OutputConfig) XXX_Size() int { + return xxx_messageInfo_OutputConfig.Size(m) +} +func (m *OutputConfig) XXX_DiscardUnknown() { + xxx_messageInfo_OutputConfig.DiscardUnknown(m) +} + +var xxx_messageInfo_OutputConfig proto.InternalMessageInfo + +type isOutputConfig_Destination interface { + isOutputConfig_Destination() +} + +type OutputConfig_GcsDestination struct { + GcsDestination *GcsDestination `protobuf:"bytes,1,opt,name=gcs_destination,json=gcsDestination,proto3,oneof"` +} + +func (*OutputConfig_GcsDestination) isOutputConfig_Destination() {} + +func (m *OutputConfig) GetDestination() isOutputConfig_Destination { + if m != nil { + return m.Destination + } + return nil +} + +func (m *OutputConfig) GetGcsDestination() *GcsDestination { + if x, ok := m.GetDestination().(*OutputConfig_GcsDestination); ok { + return x.GcsDestination + } + return nil +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*OutputConfig) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _OutputConfig_OneofMarshaler, _OutputConfig_OneofUnmarshaler, _OutputConfig_OneofSizer, []interface{}{ + (*OutputConfig_GcsDestination)(nil), + } +} + +func _OutputConfig_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*OutputConfig) + // destination + switch x := m.Destination.(type) { + case *OutputConfig_GcsDestination: + b.EncodeVarint(1<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.GcsDestination); err != nil { + return err + } + case nil: + default: + return fmt.Errorf("OutputConfig.Destination has unexpected type %T", x) + } + return nil +} + +func _OutputConfig_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*OutputConfig) + switch tag { + case 1: // destination.gcs_destination + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(GcsDestination) + err := b.DecodeMessage(msg) + m.Destination = &OutputConfig_GcsDestination{msg} + return true, err + default: + return false, nil + } +} + +func _OutputConfig_OneofSizer(msg proto.Message) (n int) { + m := msg.(*OutputConfig) + // destination + switch x := m.Destination.(type) { + case *OutputConfig_GcsDestination: + s := proto.Size(x.GcsDestination) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +// A Cloud Storage location. +type GcsDestination struct { + // Required. + // + // Types that are valid to be assigned to ObjectUri: + // *GcsDestination_Uri + ObjectUri isGcsDestination_ObjectUri `protobuf_oneof:"object_uri"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GcsDestination) Reset() { *m = GcsDestination{} } +func (m *GcsDestination) String() string { return proto.CompactTextString(m) } +func (*GcsDestination) ProtoMessage() {} +func (*GcsDestination) Descriptor() ([]byte, []int) { + return fileDescriptor_asset_service_c557f498426d873d, []int{5} +} +func (m *GcsDestination) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GcsDestination.Unmarshal(m, b) +} +func (m *GcsDestination) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GcsDestination.Marshal(b, m, deterministic) +} +func (dst *GcsDestination) XXX_Merge(src proto.Message) { + xxx_messageInfo_GcsDestination.Merge(dst, src) +} +func (m *GcsDestination) XXX_Size() int { + return xxx_messageInfo_GcsDestination.Size(m) +} +func (m *GcsDestination) XXX_DiscardUnknown() { + xxx_messageInfo_GcsDestination.DiscardUnknown(m) +} + +var xxx_messageInfo_GcsDestination proto.InternalMessageInfo + +type isGcsDestination_ObjectUri interface { + isGcsDestination_ObjectUri() +} + +type GcsDestination_Uri struct { + Uri string `protobuf:"bytes,1,opt,name=uri,proto3,oneof"` +} + +func (*GcsDestination_Uri) isGcsDestination_ObjectUri() {} + +func (m *GcsDestination) GetObjectUri() isGcsDestination_ObjectUri { + if m != nil { + return m.ObjectUri + } + return nil +} + +func (m *GcsDestination) GetUri() string { + if x, ok := m.GetObjectUri().(*GcsDestination_Uri); ok { + return x.Uri + } + return "" +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*GcsDestination) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _GcsDestination_OneofMarshaler, _GcsDestination_OneofUnmarshaler, _GcsDestination_OneofSizer, []interface{}{ + (*GcsDestination_Uri)(nil), + } +} + +func _GcsDestination_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*GcsDestination) + // object_uri + switch x := m.ObjectUri.(type) { + case *GcsDestination_Uri: + b.EncodeVarint(1<<3 | proto.WireBytes) + b.EncodeStringBytes(x.Uri) + case nil: + default: + return fmt.Errorf("GcsDestination.ObjectUri has unexpected type %T", x) + } + return nil +} + +func _GcsDestination_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*GcsDestination) + switch tag { + case 1: // object_uri.uri + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeStringBytes() + m.ObjectUri = &GcsDestination_Uri{x} + return true, err + default: + return false, nil + } +} + +func _GcsDestination_OneofSizer(msg proto.Message) (n int) { + m := msg.(*GcsDestination) + // object_uri + switch x := m.ObjectUri.(type) { + case *GcsDestination_Uri: + n += 1 // tag and wire + n += proto.SizeVarint(uint64(len(x.Uri))) + n += len(x.Uri) + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +func init() { + proto.RegisterType((*ExportAssetsRequest)(nil), "google.cloud.asset.v1beta1.ExportAssetsRequest") + proto.RegisterType((*ExportAssetsResponse)(nil), "google.cloud.asset.v1beta1.ExportAssetsResponse") + proto.RegisterType((*BatchGetAssetsHistoryRequest)(nil), "google.cloud.asset.v1beta1.BatchGetAssetsHistoryRequest") + proto.RegisterType((*BatchGetAssetsHistoryResponse)(nil), "google.cloud.asset.v1beta1.BatchGetAssetsHistoryResponse") + proto.RegisterType((*OutputConfig)(nil), "google.cloud.asset.v1beta1.OutputConfig") + proto.RegisterType((*GcsDestination)(nil), "google.cloud.asset.v1beta1.GcsDestination") + proto.RegisterEnum("google.cloud.asset.v1beta1.ContentType", ContentType_name, ContentType_value) +} + +// Reference imports to suppress errors if they are not otherwise used. +var _ context.Context +var _ grpc.ClientConn + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the grpc package it is being compiled against. +const _ = grpc.SupportPackageIsVersion4 + +// AssetServiceClient is the client API for AssetService service. +// +// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://godoc.org/google.golang.org/grpc#ClientConn.NewStream. +type AssetServiceClient interface { + // Exports assets with time and resource types to a given Cloud Storage + // location. The output format is newline-delimited JSON. + // This API implements the + // [google.longrunning.Operation][google.longrunning.Operation] API allowing + // you to keep track of the export. + ExportAssets(ctx context.Context, in *ExportAssetsRequest, opts ...grpc.CallOption) (*longrunning.Operation, error) + // Batch gets the update history of assets that overlap a time window. + // For RESOURCE content, this API outputs history with asset in both + // non-delete or deleted status. + // For IAM_POLICY content, this API outputs history when the asset and its + // attached IAM POLICY both exist. This can create gaps in the output history. + BatchGetAssetsHistory(ctx context.Context, in *BatchGetAssetsHistoryRequest, opts ...grpc.CallOption) (*BatchGetAssetsHistoryResponse, error) +} + +type assetServiceClient struct { + cc *grpc.ClientConn +} + +func NewAssetServiceClient(cc *grpc.ClientConn) AssetServiceClient { + return &assetServiceClient{cc} +} + +func (c *assetServiceClient) ExportAssets(ctx context.Context, in *ExportAssetsRequest, opts ...grpc.CallOption) (*longrunning.Operation, error) { + out := new(longrunning.Operation) + err := c.cc.Invoke(ctx, "/google.cloud.asset.v1beta1.AssetService/ExportAssets", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *assetServiceClient) BatchGetAssetsHistory(ctx context.Context, in *BatchGetAssetsHistoryRequest, opts ...grpc.CallOption) (*BatchGetAssetsHistoryResponse, error) { + out := new(BatchGetAssetsHistoryResponse) + err := c.cc.Invoke(ctx, "/google.cloud.asset.v1beta1.AssetService/BatchGetAssetsHistory", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +// AssetServiceServer is the server API for AssetService service. +type AssetServiceServer interface { + // Exports assets with time and resource types to a given Cloud Storage + // location. The output format is newline-delimited JSON. + // This API implements the + // [google.longrunning.Operation][google.longrunning.Operation] API allowing + // you to keep track of the export. + ExportAssets(context.Context, *ExportAssetsRequest) (*longrunning.Operation, error) + // Batch gets the update history of assets that overlap a time window. + // For RESOURCE content, this API outputs history with asset in both + // non-delete or deleted status. + // For IAM_POLICY content, this API outputs history when the asset and its + // attached IAM POLICY both exist. This can create gaps in the output history. + BatchGetAssetsHistory(context.Context, *BatchGetAssetsHistoryRequest) (*BatchGetAssetsHistoryResponse, error) +} + +func RegisterAssetServiceServer(s *grpc.Server, srv AssetServiceServer) { + s.RegisterService(&_AssetService_serviceDesc, srv) +} + +func _AssetService_ExportAssets_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(ExportAssetsRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(AssetServiceServer).ExportAssets(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.asset.v1beta1.AssetService/ExportAssets", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(AssetServiceServer).ExportAssets(ctx, req.(*ExportAssetsRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _AssetService_BatchGetAssetsHistory_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(BatchGetAssetsHistoryRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(AssetServiceServer).BatchGetAssetsHistory(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.asset.v1beta1.AssetService/BatchGetAssetsHistory", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(AssetServiceServer).BatchGetAssetsHistory(ctx, req.(*BatchGetAssetsHistoryRequest)) + } + return interceptor(ctx, in, info, handler) +} + +var _AssetService_serviceDesc = grpc.ServiceDesc{ + ServiceName: "google.cloud.asset.v1beta1.AssetService", + HandlerType: (*AssetServiceServer)(nil), + Methods: []grpc.MethodDesc{ + { + MethodName: "ExportAssets", + Handler: _AssetService_ExportAssets_Handler, + }, + { + MethodName: "BatchGetAssetsHistory", + Handler: _AssetService_BatchGetAssetsHistory_Handler, + }, + }, + Streams: []grpc.StreamDesc{}, + Metadata: "google/cloud/asset/v1beta1/asset_service.proto", +} + +func init() { + proto.RegisterFile("google/cloud/asset/v1beta1/asset_service.proto", fileDescriptor_asset_service_c557f498426d873d) +} + +var fileDescriptor_asset_service_c557f498426d873d = []byte{ + // 793 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xa4, 0x55, 0xcd, 0x6e, 0xd3, 0x4a, + 0x14, 0xae, 0x9d, 0xde, 0xaa, 0x9d, 0xa4, 0xb9, 0xb9, 0x73, 0x7f, 0x64, 0x45, 0xed, 0x6d, 0x64, + 0x24, 0x9a, 0x46, 0xc2, 0x56, 0xd3, 0x45, 0x69, 0x11, 0x42, 0x4d, 0x1a, 0xda, 0x20, 0x9a, 0x44, + 0x6e, 0x5a, 0xd4, 0xaa, 0x92, 0xe5, 0x38, 0x53, 0x63, 0x94, 0xcc, 0x18, 0xcf, 0xb8, 0xa5, 0x20, + 0x36, 0xf0, 0x04, 0xc0, 0x8a, 0x0d, 0x0b, 0x96, 0xac, 0x78, 0x0e, 0xb6, 0xbc, 0x02, 0xaf, 0xc0, + 0x86, 0x15, 0xf2, 0x8c, 0xd3, 0x3a, 0x4a, 0xea, 0x0a, 0xba, 0x3c, 0x33, 0xdf, 0xf7, 0x9d, 0x33, + 0xdf, 0x99, 0x39, 0x03, 0x34, 0x87, 0x10, 0xa7, 0x87, 0x74, 0xbb, 0x47, 0x82, 0xae, 0x6e, 0x51, + 0x8a, 0x98, 0x7e, 0xb2, 0xdc, 0x41, 0xcc, 0x5a, 0x16, 0x91, 0x49, 0x91, 0x7f, 0xe2, 0xda, 0x48, + 0xf3, 0x7c, 0xc2, 0x08, 0xcc, 0x0b, 0xbc, 0xc6, 0xf1, 0x1a, 0x47, 0x68, 0x11, 0x3e, 0x3f, 0x17, + 0x69, 0x59, 0x9e, 0xab, 0x5b, 0x18, 0x13, 0x66, 0x31, 0x97, 0x60, 0x2a, 0x98, 0xf9, 0xc5, 0xab, + 0x32, 0x0d, 0x80, 0x37, 0x22, 0x60, 0x8f, 0x60, 0xc7, 0x0f, 0x30, 0x76, 0xb1, 0xa3, 0x13, 0x0f, + 0xf9, 0x43, 0x6a, 0x0b, 0x11, 0x88, 0x47, 0x9d, 0xe0, 0x58, 0x67, 0x6e, 0x1f, 0x51, 0x66, 0xf5, + 0x3d, 0x01, 0x50, 0x3f, 0xca, 0xe0, 0xef, 0xda, 0x33, 0x8f, 0xf8, 0x6c, 0x83, 0x8b, 0x1b, 0xe8, + 0x69, 0x80, 0x28, 0x83, 0xff, 0x81, 0x29, 0xcf, 0xf2, 0x11, 0x66, 0x8a, 0x54, 0x90, 0x8a, 0x33, + 0x46, 0x14, 0xc1, 0x55, 0x30, 0xe3, 0x23, 0xab, 0x6b, 0x86, 0x3a, 0x8a, 0x5c, 0x90, 0x8a, 0xe9, + 0x72, 0x3e, 0x32, 0x47, 0x1b, 0x24, 0xd1, 0xda, 0x83, 0x24, 0xc6, 0x74, 0x08, 0x0e, 0x43, 0xb8, + 0x00, 0xd2, 0xc2, 0x28, 0x76, 0xe6, 0x21, 0xaa, 0xa4, 0x0a, 0xa9, 0xe2, 0x8c, 0x01, 0xf8, 0x52, + 0x3b, 0x5c, 0x81, 0x0f, 0x40, 0xc6, 0x26, 0x98, 0x21, 0x2c, 0x20, 0xca, 0x64, 0x41, 0x2a, 0x66, + 0xcb, 0x8b, 0xda, 0xe5, 0x4e, 0x6a, 0x55, 0x81, 0x0f, 0xf9, 0x46, 0xda, 0xbe, 0x08, 0xe0, 0x0e, + 0x98, 0x25, 0x01, 0xf3, 0x02, 0x66, 0xda, 0x04, 0x1f, 0xbb, 0x8e, 0xf2, 0x07, 0xaf, 0xb4, 0x98, + 0x24, 0xd6, 0xe4, 0x84, 0x2a, 0xc7, 0x1b, 0x19, 0x12, 0x8b, 0xd4, 0x0f, 0x12, 0xf8, 0x67, 0xd8, + 0x24, 0xea, 0x11, 0x4c, 0xd1, 0xb0, 0x1b, 0xd2, 0x2f, 0xb8, 0x31, 0x52, 0xa0, 0x7c, 0xad, 0x02, + 0xbf, 0x4b, 0x60, 0xae, 0x62, 0x31, 0xfb, 0xf1, 0x16, 0x8a, 0x4a, 0xdc, 0x76, 0x29, 0x23, 0xfe, + 0xd9, 0x55, 0xed, 0x3c, 0xef, 0x0a, 0xb6, 0xfa, 0x88, 0x2a, 0x72, 0xac, 0x2b, 0x8d, 0x70, 0x65, + 0xa4, 0x2b, 0xa9, 0x6b, 0x74, 0xa5, 0x05, 0x72, 0xe7, 0x6e, 0x99, 0xa7, 0x2e, 0xee, 0x92, 0x53, + 0xde, 0xe5, 0x74, 0xf9, 0x66, 0x92, 0x5e, 0x68, 0xd8, 0x23, 0x8e, 0x36, 0xb2, 0x03, 0x03, 0x45, + 0xac, 0x76, 0xc0, 0xfc, 0x25, 0xc7, 0x8e, 0x1a, 0xb4, 0x01, 0xa6, 0xc4, 0xa3, 0x51, 0xa4, 0x42, + 0xaa, 0x98, 0x2e, 0x2f, 0x25, 0x26, 0x42, 0x7d, 0x8f, 0xf8, 0x56, 0x8f, 0x4b, 0x19, 0x11, 0x51, + 0x65, 0x20, 0x13, 0x77, 0x1e, 0xee, 0x81, 0x3f, 0x1d, 0x9b, 0x9a, 0x5d, 0x44, 0x99, 0x8b, 0xf9, + 0x63, 0x8b, 0x3a, 0x5f, 0x4a, 0xd2, 0xde, 0xb2, 0xe9, 0xe6, 0x05, 0x63, 0x7b, 0xc2, 0xc8, 0x3a, + 0x43, 0x2b, 0x95, 0x59, 0x90, 0x8e, 0x49, 0xaa, 0x65, 0x90, 0x1d, 0xa6, 0x40, 0x08, 0x52, 0x81, + 0xef, 0x8a, 0xfe, 0x6d, 0x4f, 0x18, 0x61, 0x50, 0xc9, 0x00, 0x40, 0x3a, 0x4f, 0x90, 0xcd, 0xcc, + 0xc0, 0x77, 0x4b, 0x75, 0x90, 0x8e, 0x79, 0x0f, 0xe7, 0x80, 0x52, 0x6d, 0x36, 0xda, 0xb5, 0x46, + 0xdb, 0x6c, 0x1f, 0xb4, 0x6a, 0xe6, 0x5e, 0x63, 0xb7, 0x55, 0xab, 0xd6, 0xef, 0xd7, 0x6b, 0x9b, + 0xb9, 0x09, 0x98, 0x01, 0xd3, 0x46, 0x6d, 0xb7, 0xb9, 0x67, 0x54, 0x6b, 0x39, 0x09, 0x66, 0x01, + 0xa8, 0x6f, 0xec, 0x98, 0xad, 0xe6, 0xc3, 0x7a, 0xf5, 0x20, 0x27, 0x97, 0xdf, 0x4c, 0x82, 0x0c, + 0xb7, 0x61, 0x57, 0x8c, 0x35, 0xf8, 0x43, 0x02, 0x99, 0xf8, 0x13, 0x80, 0x7a, 0xd2, 0x69, 0xc7, + 0x4c, 0x94, 0xfc, 0xfc, 0x80, 0x10, 0x1b, 0x58, 0x5a, 0x73, 0x30, 0xb0, 0xd4, 0xf7, 0xd2, 0xab, + 0xaf, 0xdf, 0xde, 0xc9, 0x6f, 0x25, 0x75, 0xe9, 0x7c, 0xde, 0xbd, 0x10, 0xb7, 0xf4, 0xae, 0xe7, + 0x93, 0xf0, 0x90, 0x54, 0x2f, 0xbd, 0x5c, 0x47, 0x31, 0xe9, 0x75, 0xa9, 0x74, 0x78, 0x4b, 0x2d, + 0x8e, 0xe0, 0x8f, 0x49, 0xaf, 0x8b, 0xfc, 0xb1, 0xf0, 0x15, 0x55, 0x1b, 0x81, 0x13, 0xdf, 0xb1, + 0xb0, 0xfb, 0x5c, 0x0c, 0xce, 0x31, 0x24, 0xf8, 0x5a, 0x06, 0xff, 0x8e, 0xbd, 0x67, 0xf0, 0x76, + 0x92, 0x0b, 0x49, 0x2f, 0x32, 0xbf, 0xf6, 0x1b, 0x4c, 0x71, 0xa9, 0xd5, 0x80, 0x3b, 0x45, 0x60, + 0x39, 0xd1, 0xa8, 0xce, 0x38, 0x8d, 0xc3, 0x35, 0xb8, 0x7a, 0xf5, 0xf9, 0xc7, 0x52, 0x2b, 0x9f, + 0x25, 0xf0, 0xbf, 0x4d, 0xfa, 0x09, 0x75, 0x57, 0xfe, 0x8a, 0xdf, 0x99, 0x56, 0x38, 0x00, 0x5b, + 0xd2, 0xe1, 0xbd, 0x88, 0xe0, 0x90, 0x9e, 0x85, 0x1d, 0x8d, 0xf8, 0x8e, 0xee, 0x20, 0xcc, 0xc7, + 0xa3, 0x2e, 0xb6, 0x2c, 0xcf, 0xa5, 0xe3, 0x3e, 0xbc, 0x3b, 0x3c, 0xfa, 0x24, 0xe7, 0xb7, 0x84, + 0x42, 0x95, 0xa7, 0xe4, 0x39, 0xb4, 0xfd, 0xe5, 0x4a, 0x08, 0xf9, 0x32, 0xd8, 0x3c, 0xe2, 0x9b, + 0x47, 0x7c, 0xf3, 0x68, 0x5f, 0xf0, 0x3b, 0x53, 0x3c, 0xcb, 0xca, 0xcf, 0x00, 0x00, 0x00, 0xff, + 0xff, 0xc4, 0xf2, 0x88, 0x01, 0xbf, 0x07, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/cloud/asset/v1beta1/assets.pb.go b/vendor/google.golang.org/genproto/googleapis/cloud/asset/v1beta1/assets.pb.go new file mode 100644 index 0000000..b54022d --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/cloud/asset/v1beta1/assets.pb.go @@ -0,0 +1,362 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/cloud/asset/v1beta1/assets.proto + +package asset // import "google.golang.org/genproto/googleapis/cloud/asset/v1beta1" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "github.com/golang/protobuf/ptypes/any" +import _struct "github.com/golang/protobuf/ptypes/struct" +import timestamp "github.com/golang/protobuf/ptypes/timestamp" +import _ "google.golang.org/genproto/googleapis/api/annotations" +import v1 "google.golang.org/genproto/googleapis/iam/v1" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Temporal asset. In addition to the asset, the temporal asset includes the +// status of the asset and valid from and to time of it. +type TemporalAsset struct { + // The time window when the asset data and state was observed. + Window *TimeWindow `protobuf:"bytes,1,opt,name=window,proto3" json:"window,omitempty"` + // If the asset is deleted or not. + Deleted bool `protobuf:"varint,2,opt,name=deleted,proto3" json:"deleted,omitempty"` + // Asset. + Asset *Asset `protobuf:"bytes,3,opt,name=asset,proto3" json:"asset,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *TemporalAsset) Reset() { *m = TemporalAsset{} } +func (m *TemporalAsset) String() string { return proto.CompactTextString(m) } +func (*TemporalAsset) ProtoMessage() {} +func (*TemporalAsset) Descriptor() ([]byte, []int) { + return fileDescriptor_assets_534010dc2a646cb2, []int{0} +} +func (m *TemporalAsset) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_TemporalAsset.Unmarshal(m, b) +} +func (m *TemporalAsset) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_TemporalAsset.Marshal(b, m, deterministic) +} +func (dst *TemporalAsset) XXX_Merge(src proto.Message) { + xxx_messageInfo_TemporalAsset.Merge(dst, src) +} +func (m *TemporalAsset) XXX_Size() int { + return xxx_messageInfo_TemporalAsset.Size(m) +} +func (m *TemporalAsset) XXX_DiscardUnknown() { + xxx_messageInfo_TemporalAsset.DiscardUnknown(m) +} + +var xxx_messageInfo_TemporalAsset proto.InternalMessageInfo + +func (m *TemporalAsset) GetWindow() *TimeWindow { + if m != nil { + return m.Window + } + return nil +} + +func (m *TemporalAsset) GetDeleted() bool { + if m != nil { + return m.Deleted + } + return false +} + +func (m *TemporalAsset) GetAsset() *Asset { + if m != nil { + return m.Asset + } + return nil +} + +// A time window of (start_time, end_time]. +type TimeWindow struct { + // Start time of the time window (exclusive). + StartTime *timestamp.Timestamp `protobuf:"bytes,1,opt,name=start_time,json=startTime,proto3" json:"start_time,omitempty"` + // End time of the time window (inclusive). + // Current timestamp if not specified. + EndTime *timestamp.Timestamp `protobuf:"bytes,2,opt,name=end_time,json=endTime,proto3" json:"end_time,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *TimeWindow) Reset() { *m = TimeWindow{} } +func (m *TimeWindow) String() string { return proto.CompactTextString(m) } +func (*TimeWindow) ProtoMessage() {} +func (*TimeWindow) Descriptor() ([]byte, []int) { + return fileDescriptor_assets_534010dc2a646cb2, []int{1} +} +func (m *TimeWindow) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_TimeWindow.Unmarshal(m, b) +} +func (m *TimeWindow) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_TimeWindow.Marshal(b, m, deterministic) +} +func (dst *TimeWindow) XXX_Merge(src proto.Message) { + xxx_messageInfo_TimeWindow.Merge(dst, src) +} +func (m *TimeWindow) XXX_Size() int { + return xxx_messageInfo_TimeWindow.Size(m) +} +func (m *TimeWindow) XXX_DiscardUnknown() { + xxx_messageInfo_TimeWindow.DiscardUnknown(m) +} + +var xxx_messageInfo_TimeWindow proto.InternalMessageInfo + +func (m *TimeWindow) GetStartTime() *timestamp.Timestamp { + if m != nil { + return m.StartTime + } + return nil +} + +func (m *TimeWindow) GetEndTime() *timestamp.Timestamp { + if m != nil { + return m.EndTime + } + return nil +} + +// Cloud asset. This includes all Google Cloud Platform resources, +// Cloud IAM policies, and other non-GCP assets. +type Asset struct { + // The full name of the asset. For example: + // `//compute.googleapis.com/projects/my_project_123/zones/zone1/instances/instance1`. + // See [Resource + // Names](https://cloud.google.com/apis/design/resource_names#full_resource_name) + // for more information. + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // Type of the asset. Example: "google.compute.Disk". + AssetType string `protobuf:"bytes,2,opt,name=asset_type,json=assetType,proto3" json:"asset_type,omitempty"` + // Representation of the resource. + Resource *Resource `protobuf:"bytes,3,opt,name=resource,proto3" json:"resource,omitempty"` + // Representation of the actual Cloud IAM policy set on a cloud resource. For + // each resource, there must be at most one Cloud IAM policy set on it. + IamPolicy *v1.Policy `protobuf:"bytes,4,opt,name=iam_policy,json=iamPolicy,proto3" json:"iam_policy,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Asset) Reset() { *m = Asset{} } +func (m *Asset) String() string { return proto.CompactTextString(m) } +func (*Asset) ProtoMessage() {} +func (*Asset) Descriptor() ([]byte, []int) { + return fileDescriptor_assets_534010dc2a646cb2, []int{2} +} +func (m *Asset) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Asset.Unmarshal(m, b) +} +func (m *Asset) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Asset.Marshal(b, m, deterministic) +} +func (dst *Asset) XXX_Merge(src proto.Message) { + xxx_messageInfo_Asset.Merge(dst, src) +} +func (m *Asset) XXX_Size() int { + return xxx_messageInfo_Asset.Size(m) +} +func (m *Asset) XXX_DiscardUnknown() { + xxx_messageInfo_Asset.DiscardUnknown(m) +} + +var xxx_messageInfo_Asset proto.InternalMessageInfo + +func (m *Asset) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *Asset) GetAssetType() string { + if m != nil { + return m.AssetType + } + return "" +} + +func (m *Asset) GetResource() *Resource { + if m != nil { + return m.Resource + } + return nil +} + +func (m *Asset) GetIamPolicy() *v1.Policy { + if m != nil { + return m.IamPolicy + } + return nil +} + +// Representation of a cloud resource. +type Resource struct { + // The API version. Example: "v1". + Version string `protobuf:"bytes,1,opt,name=version,proto3" json:"version,omitempty"` + // The URL of the discovery document containing the resource's JSON schema. + // For example: + // `"https://www.googleapis.com/discovery/v1/apis/compute/v1/rest"`. + // It will be left unspecified for resources without a discovery-based API, + // such as Cloud Bigtable. + DiscoveryDocumentUri string `protobuf:"bytes,2,opt,name=discovery_document_uri,json=discoveryDocumentUri,proto3" json:"discovery_document_uri,omitempty"` + // The JSON schema name listed in the discovery document. + // Example: "Project". It will be left unspecified for resources (such as + // Cloud Bigtable) without a discovery-based API. + DiscoveryName string `protobuf:"bytes,3,opt,name=discovery_name,json=discoveryName,proto3" json:"discovery_name,omitempty"` + // The REST URL for accessing the resource. An HTTP GET operation using this + // URL returns the resource itself. + // Example: + // `https://cloudresourcemanager.googleapis.com/v1/projects/my-project-123`. + // It will be left unspecified for resources without a REST API. + ResourceUrl string `protobuf:"bytes,4,opt,name=resource_url,json=resourceUrl,proto3" json:"resource_url,omitempty"` + // The full name of the immediate parent of this resource. See + // [Resource + // Names](https://cloud.google.com/apis/design/resource_names#full_resource_name) + // for more information. + // + // For GCP assets, it is the parent resource defined in the [Cloud IAM policy + // hierarchy](https://cloud.google.com/iam/docs/overview#policy_hierarchy). + // For example: + // `"//cloudresourcemanager.googleapis.com/projects/my_project_123"`. + // + // For third-party assets, it is up to the users to define. + Parent string `protobuf:"bytes,5,opt,name=parent,proto3" json:"parent,omitempty"` + // The content of the resource, in which some sensitive fields are scrubbed + // away and may not be present. + Data *_struct.Struct `protobuf:"bytes,6,opt,name=data,proto3" json:"data,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Resource) Reset() { *m = Resource{} } +func (m *Resource) String() string { return proto.CompactTextString(m) } +func (*Resource) ProtoMessage() {} +func (*Resource) Descriptor() ([]byte, []int) { + return fileDescriptor_assets_534010dc2a646cb2, []int{3} +} +func (m *Resource) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Resource.Unmarshal(m, b) +} +func (m *Resource) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Resource.Marshal(b, m, deterministic) +} +func (dst *Resource) XXX_Merge(src proto.Message) { + xxx_messageInfo_Resource.Merge(dst, src) +} +func (m *Resource) XXX_Size() int { + return xxx_messageInfo_Resource.Size(m) +} +func (m *Resource) XXX_DiscardUnknown() { + xxx_messageInfo_Resource.DiscardUnknown(m) +} + +var xxx_messageInfo_Resource proto.InternalMessageInfo + +func (m *Resource) GetVersion() string { + if m != nil { + return m.Version + } + return "" +} + +func (m *Resource) GetDiscoveryDocumentUri() string { + if m != nil { + return m.DiscoveryDocumentUri + } + return "" +} + +func (m *Resource) GetDiscoveryName() string { + if m != nil { + return m.DiscoveryName + } + return "" +} + +func (m *Resource) GetResourceUrl() string { + if m != nil { + return m.ResourceUrl + } + return "" +} + +func (m *Resource) GetParent() string { + if m != nil { + return m.Parent + } + return "" +} + +func (m *Resource) GetData() *_struct.Struct { + if m != nil { + return m.Data + } + return nil +} + +func init() { + proto.RegisterType((*TemporalAsset)(nil), "google.cloud.asset.v1beta1.TemporalAsset") + proto.RegisterType((*TimeWindow)(nil), "google.cloud.asset.v1beta1.TimeWindow") + proto.RegisterType((*Asset)(nil), "google.cloud.asset.v1beta1.Asset") + proto.RegisterType((*Resource)(nil), "google.cloud.asset.v1beta1.Resource") +} + +func init() { + proto.RegisterFile("google/cloud/asset/v1beta1/assets.proto", fileDescriptor_assets_534010dc2a646cb2) +} + +var fileDescriptor_assets_534010dc2a646cb2 = []byte{ + // 551 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x84, 0x53, 0xdb, 0x6a, 0xd4, 0x40, + 0x18, 0x26, 0xed, 0xee, 0x76, 0xf3, 0xd7, 0x7a, 0x31, 0x68, 0x8d, 0x4b, 0xd5, 0x76, 0xf1, 0x50, + 0x10, 0x12, 0x56, 0x2b, 0x22, 0x82, 0x87, 0x55, 0xf0, 0x4e, 0x96, 0x71, 0x5b, 0x41, 0x16, 0xc2, + 0x6c, 0x32, 0x86, 0x81, 0x64, 0x26, 0x4c, 0x26, 0x29, 0x7b, 0xe3, 0xc3, 0x78, 0xa7, 0x97, 0x3e, + 0x86, 0x6f, 0xe2, 0x5b, 0x48, 0xfe, 0x99, 0x6c, 0xa1, 0xea, 0x7a, 0x97, 0x6f, 0xbe, 0xc3, 0xfc, + 0xdf, 0xcc, 0x04, 0x1e, 0x64, 0x4a, 0x65, 0x39, 0x8f, 0x92, 0x5c, 0xd5, 0x69, 0xc4, 0xaa, 0x8a, + 0x9b, 0xa8, 0x99, 0x2c, 0xb9, 0x61, 0x13, 0x8b, 0xaa, 0xb0, 0xd4, 0xca, 0x28, 0x32, 0xb2, 0xc2, + 0x10, 0x85, 0x21, 0x52, 0xa1, 0x13, 0x8e, 0x0e, 0x5c, 0x08, 0x2b, 0x45, 0xc4, 0xa4, 0x54, 0x86, + 0x19, 0xa1, 0xa4, 0x73, 0x8e, 0x9c, 0x33, 0x12, 0xac, 0x88, 0x9a, 0x49, 0x54, 0xaa, 0x5c, 0x24, + 0x2b, 0xc7, 0xdd, 0x74, 0x1c, 0xa2, 0x65, 0xfd, 0x39, 0x62, 0xb2, 0xa3, 0x0e, 0x2e, 0x53, 0x95, + 0xd1, 0x75, 0x62, 0x1c, 0x7b, 0xe7, 0x32, 0x6b, 0x44, 0xc1, 0x2b, 0xc3, 0x8a, 0xd2, 0x0a, 0xc6, + 0x5f, 0x3d, 0xd8, 0x9b, 0xf3, 0xa2, 0x54, 0x9a, 0xe5, 0xaf, 0xdb, 0x69, 0xc9, 0x0b, 0x18, 0x9c, + 0x0b, 0x99, 0xaa, 0xf3, 0xc0, 0x3b, 0xf4, 0x8e, 0x77, 0x1f, 0xdd, 0x0f, 0xff, 0x5d, 0x29, 0x9c, + 0x8b, 0x82, 0x7f, 0x44, 0x35, 0x75, 0x2e, 0x12, 0xc0, 0x4e, 0xca, 0x73, 0x6e, 0x78, 0x1a, 0x6c, + 0x1d, 0x7a, 0xc7, 0x43, 0xda, 0x41, 0xf2, 0x14, 0xfa, 0xe8, 0x0e, 0xb6, 0x31, 0xf8, 0x68, 0x53, + 0x30, 0xce, 0x42, 0xad, 0x7e, 0xfc, 0x05, 0xe0, 0x62, 0x23, 0xf2, 0x0c, 0xa0, 0x32, 0x4c, 0x9b, + 0xb8, 0xed, 0xe2, 0x86, 0x1c, 0x75, 0x59, 0x5d, 0x51, 0x9c, 0x0c, 0x8b, 0x52, 0x1f, 0xd5, 0x2d, + 0x26, 0x4f, 0x60, 0xc8, 0x65, 0x6a, 0x8d, 0x5b, 0xff, 0x35, 0xee, 0x70, 0x99, 0xb6, 0x68, 0xfc, + 0xc3, 0x83, 0xbe, 0x3d, 0x1c, 0x02, 0x3d, 0xc9, 0xdc, 0xae, 0x3e, 0xc5, 0x6f, 0x72, 0x0b, 0x00, + 0xc7, 0x8c, 0xcd, 0xaa, 0xb4, 0xb1, 0x3e, 0xf5, 0x71, 0x65, 0xbe, 0x2a, 0x39, 0x79, 0x05, 0x43, + 0xcd, 0x2b, 0x55, 0xeb, 0x84, 0xbb, 0xe2, 0x77, 0x37, 0x15, 0xa7, 0x4e, 0x4b, 0xd7, 0x2e, 0x72, + 0x02, 0x20, 0x58, 0x11, 0xdb, 0x17, 0x11, 0xf4, 0x30, 0xe3, 0x7a, 0x97, 0x21, 0x58, 0x11, 0x36, + 0x93, 0x70, 0x86, 0x24, 0xf5, 0x05, 0x2b, 0xec, 0xe7, 0xf8, 0x97, 0x07, 0xc3, 0x2e, 0xac, 0xbd, + 0x94, 0x86, 0xeb, 0x4a, 0x28, 0xe9, 0x46, 0xef, 0x20, 0x39, 0x81, 0xfd, 0x54, 0x54, 0x89, 0x6a, + 0xb8, 0x5e, 0xc5, 0xa9, 0x4a, 0xea, 0x82, 0x4b, 0x13, 0xd7, 0x5a, 0xb8, 0x26, 0xd7, 0xd6, 0xec, + 0x5b, 0x47, 0x9e, 0x6a, 0x41, 0xee, 0xc1, 0xd5, 0x0b, 0x17, 0x9e, 0xc8, 0x36, 0xaa, 0xf7, 0xd6, + 0xab, 0xef, 0xdb, 0xa3, 0x39, 0x82, 0x2b, 0x5d, 0x8b, 0xb8, 0xd6, 0x39, 0xce, 0xee, 0xd3, 0xdd, + 0x6e, 0xed, 0x54, 0xe7, 0x64, 0x1f, 0x06, 0x25, 0xd3, 0x5c, 0x9a, 0xa0, 0x8f, 0xa4, 0x43, 0xe4, + 0x21, 0xf4, 0x52, 0x66, 0x58, 0x30, 0xc0, 0xba, 0x37, 0xfe, 0xb8, 0xa6, 0x0f, 0xf8, 0xcc, 0x29, + 0x8a, 0xa6, 0xdf, 0x3c, 0xb8, 0x9d, 0xa8, 0x62, 0xc3, 0xb9, 0x4e, 0x01, 0x2f, 0x70, 0xd6, 0xda, + 0x67, 0xde, 0xa7, 0x97, 0x4e, 0x99, 0xa9, 0x9c, 0xc9, 0x2c, 0x54, 0x3a, 0x8b, 0x32, 0x2e, 0x31, + 0x3c, 0xb2, 0x14, 0x2b, 0x45, 0xf5, 0xb7, 0xdf, 0xfd, 0x39, 0xa2, 0xef, 0x5b, 0xa3, 0x77, 0x36, + 0xe1, 0x0d, 0xee, 0x85, 0xe1, 0xe1, 0xd9, 0x64, 0xda, 0x4a, 0x7e, 0x76, 0xe4, 0x02, 0xc9, 0x05, + 0x92, 0x8b, 0x33, 0xeb, 0x5f, 0x0e, 0x70, 0x97, 0xc7, 0xbf, 0x03, 0x00, 0x00, 0xff, 0xff, 0x07, + 0x7f, 0x2e, 0x7b, 0x53, 0x04, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/cloud/audit/audit_log.pb.go b/vendor/google.golang.org/genproto/googleapis/cloud/audit/audit_log.pb.go new file mode 100644 index 0000000..df6296d --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/cloud/audit/audit_log.pb.go @@ -0,0 +1,389 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/cloud/audit/audit_log.proto + +package audit // import "google.golang.org/genproto/googleapis/cloud/audit" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import any "github.com/golang/protobuf/ptypes/any" +import _struct "github.com/golang/protobuf/ptypes/struct" +import _ "google.golang.org/genproto/googleapis/api/annotations" +import status "google.golang.org/genproto/googleapis/rpc/status" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Common audit log format for Google Cloud Platform API operations. +type AuditLog struct { + // The name of the API service performing the operation. For example, + // `"datastore.googleapis.com"`. + ServiceName string `protobuf:"bytes,7,opt,name=service_name,json=serviceName,proto3" json:"service_name,omitempty"` + // The name of the service method or operation. + // For API calls, this should be the name of the API method. + // For example, + // + // "google.datastore.v1.Datastore.RunQuery" + // "google.logging.v1.LoggingService.DeleteLog" + MethodName string `protobuf:"bytes,8,opt,name=method_name,json=methodName,proto3" json:"method_name,omitempty"` + // The resource or collection that is the target of the operation. + // The name is a scheme-less URI, not including the API service name. + // For example: + // + // "shelves/SHELF_ID/books" + // "shelves/SHELF_ID/books/BOOK_ID" + ResourceName string `protobuf:"bytes,11,opt,name=resource_name,json=resourceName,proto3" json:"resource_name,omitempty"` + // The number of items returned from a List or Query API method, + // if applicable. + NumResponseItems int64 `protobuf:"varint,12,opt,name=num_response_items,json=numResponseItems,proto3" json:"num_response_items,omitempty"` + // The status of the overall operation. + Status *status.Status `protobuf:"bytes,2,opt,name=status,proto3" json:"status,omitempty"` + // Authentication information. + AuthenticationInfo *AuthenticationInfo `protobuf:"bytes,3,opt,name=authentication_info,json=authenticationInfo,proto3" json:"authentication_info,omitempty"` + // Authorization information. If there are multiple + // resources or permissions involved, then there is + // one AuthorizationInfo element for each {resource, permission} tuple. + AuthorizationInfo []*AuthorizationInfo `protobuf:"bytes,9,rep,name=authorization_info,json=authorizationInfo,proto3" json:"authorization_info,omitempty"` + // Metadata about the operation. + RequestMetadata *RequestMetadata `protobuf:"bytes,4,opt,name=request_metadata,json=requestMetadata,proto3" json:"request_metadata,omitempty"` + // The operation request. This may not include all request parameters, + // such as those that are too large, privacy-sensitive, or duplicated + // elsewhere in the log record. + // It should never include user-generated data, such as file contents. + // When the JSON object represented here has a proto equivalent, the proto + // name will be indicated in the `@type` property. + Request *_struct.Struct `protobuf:"bytes,16,opt,name=request,proto3" json:"request,omitempty"` + // The operation response. This may not include all response elements, + // such as those that are too large, privacy-sensitive, or duplicated + // elsewhere in the log record. + // It should never include user-generated data, such as file contents. + // When the JSON object represented here has a proto equivalent, the proto + // name will be indicated in the `@type` property. + Response *_struct.Struct `protobuf:"bytes,17,opt,name=response,proto3" json:"response,omitempty"` + // Other service-specific data about the request, response, and other + // activities. + ServiceData *any.Any `protobuf:"bytes,15,opt,name=service_data,json=serviceData,proto3" json:"service_data,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *AuditLog) Reset() { *m = AuditLog{} } +func (m *AuditLog) String() string { return proto.CompactTextString(m) } +func (*AuditLog) ProtoMessage() {} +func (*AuditLog) Descriptor() ([]byte, []int) { + return fileDescriptor_audit_log_8adb2c14fea76ea3, []int{0} +} +func (m *AuditLog) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_AuditLog.Unmarshal(m, b) +} +func (m *AuditLog) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_AuditLog.Marshal(b, m, deterministic) +} +func (dst *AuditLog) XXX_Merge(src proto.Message) { + xxx_messageInfo_AuditLog.Merge(dst, src) +} +func (m *AuditLog) XXX_Size() int { + return xxx_messageInfo_AuditLog.Size(m) +} +func (m *AuditLog) XXX_DiscardUnknown() { + xxx_messageInfo_AuditLog.DiscardUnknown(m) +} + +var xxx_messageInfo_AuditLog proto.InternalMessageInfo + +func (m *AuditLog) GetServiceName() string { + if m != nil { + return m.ServiceName + } + return "" +} + +func (m *AuditLog) GetMethodName() string { + if m != nil { + return m.MethodName + } + return "" +} + +func (m *AuditLog) GetResourceName() string { + if m != nil { + return m.ResourceName + } + return "" +} + +func (m *AuditLog) GetNumResponseItems() int64 { + if m != nil { + return m.NumResponseItems + } + return 0 +} + +func (m *AuditLog) GetStatus() *status.Status { + if m != nil { + return m.Status + } + return nil +} + +func (m *AuditLog) GetAuthenticationInfo() *AuthenticationInfo { + if m != nil { + return m.AuthenticationInfo + } + return nil +} + +func (m *AuditLog) GetAuthorizationInfo() []*AuthorizationInfo { + if m != nil { + return m.AuthorizationInfo + } + return nil +} + +func (m *AuditLog) GetRequestMetadata() *RequestMetadata { + if m != nil { + return m.RequestMetadata + } + return nil +} + +func (m *AuditLog) GetRequest() *_struct.Struct { + if m != nil { + return m.Request + } + return nil +} + +func (m *AuditLog) GetResponse() *_struct.Struct { + if m != nil { + return m.Response + } + return nil +} + +func (m *AuditLog) GetServiceData() *any.Any { + if m != nil { + return m.ServiceData + } + return nil +} + +// Authentication information for the operation. +type AuthenticationInfo struct { + // The email address of the authenticated user making the request. + PrincipalEmail string `protobuf:"bytes,1,opt,name=principal_email,json=principalEmail,proto3" json:"principal_email,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *AuthenticationInfo) Reset() { *m = AuthenticationInfo{} } +func (m *AuthenticationInfo) String() string { return proto.CompactTextString(m) } +func (*AuthenticationInfo) ProtoMessage() {} +func (*AuthenticationInfo) Descriptor() ([]byte, []int) { + return fileDescriptor_audit_log_8adb2c14fea76ea3, []int{1} +} +func (m *AuthenticationInfo) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_AuthenticationInfo.Unmarshal(m, b) +} +func (m *AuthenticationInfo) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_AuthenticationInfo.Marshal(b, m, deterministic) +} +func (dst *AuthenticationInfo) XXX_Merge(src proto.Message) { + xxx_messageInfo_AuthenticationInfo.Merge(dst, src) +} +func (m *AuthenticationInfo) XXX_Size() int { + return xxx_messageInfo_AuthenticationInfo.Size(m) +} +func (m *AuthenticationInfo) XXX_DiscardUnknown() { + xxx_messageInfo_AuthenticationInfo.DiscardUnknown(m) +} + +var xxx_messageInfo_AuthenticationInfo proto.InternalMessageInfo + +func (m *AuthenticationInfo) GetPrincipalEmail() string { + if m != nil { + return m.PrincipalEmail + } + return "" +} + +// Authorization information for the operation. +type AuthorizationInfo struct { + // The resource being accessed, as a REST-style string. For example: + // + // bigquery.googlapis.com/projects/PROJECTID/datasets/DATASETID + Resource string `protobuf:"bytes,1,opt,name=resource,proto3" json:"resource,omitempty"` + // The required IAM permission. + Permission string `protobuf:"bytes,2,opt,name=permission,proto3" json:"permission,omitempty"` + // Whether or not authorization for `resource` and `permission` + // was granted. + Granted bool `protobuf:"varint,3,opt,name=granted,proto3" json:"granted,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *AuthorizationInfo) Reset() { *m = AuthorizationInfo{} } +func (m *AuthorizationInfo) String() string { return proto.CompactTextString(m) } +func (*AuthorizationInfo) ProtoMessage() {} +func (*AuthorizationInfo) Descriptor() ([]byte, []int) { + return fileDescriptor_audit_log_8adb2c14fea76ea3, []int{2} +} +func (m *AuthorizationInfo) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_AuthorizationInfo.Unmarshal(m, b) +} +func (m *AuthorizationInfo) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_AuthorizationInfo.Marshal(b, m, deterministic) +} +func (dst *AuthorizationInfo) XXX_Merge(src proto.Message) { + xxx_messageInfo_AuthorizationInfo.Merge(dst, src) +} +func (m *AuthorizationInfo) XXX_Size() int { + return xxx_messageInfo_AuthorizationInfo.Size(m) +} +func (m *AuthorizationInfo) XXX_DiscardUnknown() { + xxx_messageInfo_AuthorizationInfo.DiscardUnknown(m) +} + +var xxx_messageInfo_AuthorizationInfo proto.InternalMessageInfo + +func (m *AuthorizationInfo) GetResource() string { + if m != nil { + return m.Resource + } + return "" +} + +func (m *AuthorizationInfo) GetPermission() string { + if m != nil { + return m.Permission + } + return "" +} + +func (m *AuthorizationInfo) GetGranted() bool { + if m != nil { + return m.Granted + } + return false +} + +// Metadata about the request. +type RequestMetadata struct { + // The IP address of the caller. + CallerIp string `protobuf:"bytes,1,opt,name=caller_ip,json=callerIp,proto3" json:"caller_ip,omitempty"` + // The user agent of the caller. + // This information is not authenticated and should be treated accordingly. + // For example: + // + // + `google-api-python-client/1.4.0`: + // The request was made by the Google API client for Python. + // + `Cloud SDK Command Line Tool apitools-client/1.0 gcloud/0.9.62`: + // The request was made by the Google Cloud SDK CLI (gcloud). + // + `AppEngine-Google; (+http://code.google.com/appengine; appid: + // s~my-project`: + // The request was made from the `my-project` App Engine app. + CallerSuppliedUserAgent string `protobuf:"bytes,2,opt,name=caller_supplied_user_agent,json=callerSuppliedUserAgent,proto3" json:"caller_supplied_user_agent,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *RequestMetadata) Reset() { *m = RequestMetadata{} } +func (m *RequestMetadata) String() string { return proto.CompactTextString(m) } +func (*RequestMetadata) ProtoMessage() {} +func (*RequestMetadata) Descriptor() ([]byte, []int) { + return fileDescriptor_audit_log_8adb2c14fea76ea3, []int{3} +} +func (m *RequestMetadata) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_RequestMetadata.Unmarshal(m, b) +} +func (m *RequestMetadata) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_RequestMetadata.Marshal(b, m, deterministic) +} +func (dst *RequestMetadata) XXX_Merge(src proto.Message) { + xxx_messageInfo_RequestMetadata.Merge(dst, src) +} +func (m *RequestMetadata) XXX_Size() int { + return xxx_messageInfo_RequestMetadata.Size(m) +} +func (m *RequestMetadata) XXX_DiscardUnknown() { + xxx_messageInfo_RequestMetadata.DiscardUnknown(m) +} + +var xxx_messageInfo_RequestMetadata proto.InternalMessageInfo + +func (m *RequestMetadata) GetCallerIp() string { + if m != nil { + return m.CallerIp + } + return "" +} + +func (m *RequestMetadata) GetCallerSuppliedUserAgent() string { + if m != nil { + return m.CallerSuppliedUserAgent + } + return "" +} + +func init() { + proto.RegisterType((*AuditLog)(nil), "google.cloud.audit.AuditLog") + proto.RegisterType((*AuthenticationInfo)(nil), "google.cloud.audit.AuthenticationInfo") + proto.RegisterType((*AuthorizationInfo)(nil), "google.cloud.audit.AuthorizationInfo") + proto.RegisterType((*RequestMetadata)(nil), "google.cloud.audit.RequestMetadata") +} + +func init() { + proto.RegisterFile("google/cloud/audit/audit_log.proto", fileDescriptor_audit_log_8adb2c14fea76ea3) +} + +var fileDescriptor_audit_log_8adb2c14fea76ea3 = []byte{ + // 576 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x7c, 0x94, 0x5f, 0x6f, 0xd3, 0x30, + 0x14, 0xc5, 0x55, 0x36, 0x6d, 0xad, 0xbb, 0xd1, 0xd6, 0x20, 0x1a, 0xca, 0x04, 0xa5, 0x13, 0x50, + 0x21, 0x94, 0x88, 0xed, 0x61, 0x0f, 0x13, 0x0f, 0x9d, 0xe0, 0xa1, 0x12, 0x4c, 0x53, 0x0a, 0x42, + 0xe2, 0x25, 0x72, 0x93, 0xdb, 0xcc, 0x22, 0xb1, 0x8d, 0xff, 0x20, 0x8d, 0xef, 0xcc, 0x77, 0x40, + 0xbd, 0x71, 0x4a, 0xd7, 0x0e, 0x5e, 0x2c, 0xf9, 0x9c, 0xdf, 0xbd, 0x76, 0xaf, 0x4f, 0x43, 0x46, + 0xb9, 0x94, 0x79, 0x01, 0x51, 0x5a, 0x48, 0x97, 0x45, 0xcc, 0x65, 0xdc, 0x56, 0x6b, 0x52, 0xc8, + 0x3c, 0x54, 0x5a, 0x5a, 0x49, 0x69, 0xc5, 0x84, 0xc8, 0x84, 0xe8, 0x0e, 0x8e, 0x7c, 0x1d, 0x53, + 0x3c, 0x62, 0x42, 0x48, 0xcb, 0x2c, 0x97, 0xc2, 0x54, 0x15, 0x83, 0xc7, 0xde, 0xc5, 0xdd, 0xdc, + 0x2d, 0x22, 0x26, 0x6e, 0xbc, 0x75, 0xb4, 0x69, 0x19, 0xab, 0x5d, 0x6a, 0xbd, 0xdb, 0xf7, 0xae, + 0x56, 0x69, 0x64, 0x2c, 0xb3, 0xce, 0x77, 0x1c, 0xfd, 0xde, 0x25, 0xcd, 0xc9, 0xf2, 0xe4, 0x8f, + 0x32, 0xa7, 0xcf, 0xc9, 0x81, 0x01, 0xfd, 0x93, 0xa7, 0x90, 0x08, 0x56, 0x42, 0xb0, 0x3f, 0x6c, + 0x8c, 0x5b, 0x71, 0xdb, 0x6b, 0x97, 0xac, 0x04, 0xfa, 0x8c, 0xb4, 0x4b, 0xb0, 0xd7, 0x32, 0xab, + 0x88, 0x26, 0x12, 0xa4, 0x92, 0x10, 0x38, 0x26, 0x87, 0x1a, 0x8c, 0x74, 0xba, 0x6e, 0xd2, 0x46, + 0xe4, 0xa0, 0x16, 0x11, 0x7a, 0x43, 0xa8, 0x70, 0x65, 0xa2, 0xc1, 0x28, 0x29, 0x0c, 0x24, 0xdc, + 0x42, 0x69, 0x82, 0x83, 0x61, 0x63, 0xbc, 0x13, 0x77, 0x85, 0x2b, 0x63, 0x6f, 0x4c, 0x97, 0x3a, + 0x7d, 0x4d, 0xf6, 0xaa, 0x3b, 0x07, 0xf7, 0x86, 0x8d, 0x71, 0xfb, 0x84, 0x86, 0x7e, 0x70, 0x5a, + 0xa5, 0xe1, 0x0c, 0x9d, 0xd8, 0x13, 0xf4, 0x2b, 0x79, 0xc0, 0x9c, 0xbd, 0x06, 0x61, 0x79, 0x8a, + 0xa3, 0x4b, 0xb8, 0x58, 0xc8, 0x60, 0x07, 0x0b, 0x5f, 0x86, 0xdb, 0x13, 0x0f, 0x27, 0xb7, 0xf0, + 0xa9, 0x58, 0xc8, 0x98, 0xb2, 0x2d, 0x8d, 0x7e, 0x26, 0xa8, 0x4a, 0xcd, 0x7f, 0xad, 0xf5, 0x6d, + 0x0d, 0x77, 0xc6, 0xed, 0x93, 0x17, 0xff, 0xea, 0xbb, 0xa2, 0xb1, 0x6d, 0x8f, 0x6d, 0x4a, 0xf4, + 0x92, 0x74, 0x35, 0xfc, 0x70, 0x60, 0x6c, 0x52, 0x82, 0x65, 0x19, 0xb3, 0x2c, 0xd8, 0xc5, 0xbb, + 0x1e, 0xdf, 0xd5, 0x33, 0xae, 0xd8, 0x4f, 0x1e, 0x8d, 0x3b, 0xfa, 0xb6, 0x40, 0xdf, 0x92, 0x7d, + 0x2f, 0x05, 0x5d, 0x6c, 0xd3, 0xaf, 0xdb, 0xd4, 0xb9, 0x08, 0x67, 0x98, 0x8b, 0xb8, 0xe6, 0xe8, + 0x29, 0x69, 0xd6, 0xef, 0x10, 0xf4, 0xfe, 0x5f, 0xb3, 0x02, 0xe9, 0xd9, 0xdf, 0xa4, 0xe0, 0x9d, + 0x3b, 0x58, 0xf8, 0x70, 0xab, 0x70, 0x22, 0x6e, 0x56, 0xf9, 0x79, 0xcf, 0x2c, 0x1b, 0xbd, 0x23, + 0x74, 0x7b, 0xe0, 0xf4, 0x15, 0xe9, 0x28, 0xcd, 0x45, 0xca, 0x15, 0x2b, 0x12, 0x28, 0x19, 0x2f, + 0x82, 0x06, 0xc6, 0xe6, 0xfe, 0x4a, 0xfe, 0xb0, 0x54, 0x47, 0x9c, 0xf4, 0xb6, 0xe6, 0x4a, 0x07, + 0xf8, 0x0b, 0x30, 0x5d, 0xbe, 0x6c, 0xb5, 0xa7, 0x4f, 0x09, 0x51, 0xa0, 0x4b, 0x6e, 0x0c, 0x97, + 0x02, 0xf3, 0xd3, 0x8a, 0xd7, 0x14, 0x1a, 0x90, 0xfd, 0x5c, 0x33, 0x61, 0x21, 0xc3, 0x8c, 0x34, + 0xe3, 0x7a, 0x3b, 0xfa, 0x4e, 0x3a, 0x1b, 0xe3, 0xa6, 0x4f, 0x48, 0x2b, 0x65, 0x45, 0x01, 0x3a, + 0xe1, 0xaa, 0x3e, 0xa9, 0x12, 0xa6, 0x8a, 0x9e, 0x93, 0x81, 0x37, 0x8d, 0x53, 0xaa, 0xe0, 0x90, + 0x25, 0xce, 0x80, 0x4e, 0x58, 0x0e, 0xc2, 0xfa, 0x93, 0xfb, 0x15, 0x31, 0xf3, 0xc0, 0x17, 0x03, + 0x7a, 0xb2, 0xb4, 0x2f, 0xe6, 0xe4, 0x51, 0x2a, 0xcb, 0x3b, 0x9e, 0xfc, 0xe2, 0xb0, 0xfe, 0x77, + 0x5e, 0x2d, 0x67, 0x7a, 0xd5, 0xf8, 0x76, 0xe6, 0xa1, 0x5c, 0x16, 0x4c, 0xe4, 0xa1, 0xd4, 0x79, + 0x94, 0x83, 0xc0, 0x89, 0x47, 0x95, 0xc5, 0x14, 0x37, 0xeb, 0x1f, 0x9e, 0x73, 0x5c, 0xe7, 0x7b, + 0xc8, 0x9c, 0xfe, 0x09, 0x00, 0x00, 0xff, 0xff, 0x90, 0xe4, 0x37, 0xbf, 0x9b, 0x04, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/cloud/automl/v1beta1/annotation_payload.pb.go b/vendor/google.golang.org/genproto/googleapis/cloud/automl/v1beta1/annotation_payload.pb.go new file mode 100644 index 0000000..7510e6a --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/cloud/automl/v1beta1/annotation_payload.pb.go @@ -0,0 +1,403 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/cloud/automl/v1beta1/annotation_payload.proto + +package automl // import "google.golang.org/genproto/googleapis/cloud/automl/v1beta1" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "github.com/golang/protobuf/ptypes/any" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Contains annotation information that is relevant to AutoML. +type AnnotationPayload struct { + // Output only . Additional information about the annotation + // specific to the AutoML domain. + // + // Types that are valid to be assigned to Detail: + // *AnnotationPayload_Translation + // *AnnotationPayload_Classification + // *AnnotationPayload_ImageObjectDetection + // *AnnotationPayload_VideoClassification + // *AnnotationPayload_TextExtraction + // *AnnotationPayload_TextSentiment + // *AnnotationPayload_Tables + Detail isAnnotationPayload_Detail `protobuf_oneof:"detail"` + // Output only . The resource ID of the annotation spec that + // this annotation pertains to. The annotation spec comes from either an + // ancestor dataset, or the dataset that was used to train the model in use. + AnnotationSpecId string `protobuf:"bytes,1,opt,name=annotation_spec_id,json=annotationSpecId,proto3" json:"annotation_spec_id,omitempty"` + // Output only. The value of [AnnotationSpec.display_name][google.cloud.automl.v1beta1.AnnotationSpec.display_name] when the model + // was trained. Because this field returns a value at model training time, + // for different models trained using the same dataset, the returned value + // could be different as model owner could update the display_name between + // any two model training. + DisplayName string `protobuf:"bytes,5,opt,name=display_name,json=displayName,proto3" json:"display_name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *AnnotationPayload) Reset() { *m = AnnotationPayload{} } +func (m *AnnotationPayload) String() string { return proto.CompactTextString(m) } +func (*AnnotationPayload) ProtoMessage() {} +func (*AnnotationPayload) Descriptor() ([]byte, []int) { + return fileDescriptor_annotation_payload_62fedaea4ad5a830, []int{0} +} +func (m *AnnotationPayload) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_AnnotationPayload.Unmarshal(m, b) +} +func (m *AnnotationPayload) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_AnnotationPayload.Marshal(b, m, deterministic) +} +func (dst *AnnotationPayload) XXX_Merge(src proto.Message) { + xxx_messageInfo_AnnotationPayload.Merge(dst, src) +} +func (m *AnnotationPayload) XXX_Size() int { + return xxx_messageInfo_AnnotationPayload.Size(m) +} +func (m *AnnotationPayload) XXX_DiscardUnknown() { + xxx_messageInfo_AnnotationPayload.DiscardUnknown(m) +} + +var xxx_messageInfo_AnnotationPayload proto.InternalMessageInfo + +type isAnnotationPayload_Detail interface { + isAnnotationPayload_Detail() +} + +type AnnotationPayload_Translation struct { + Translation *TranslationAnnotation `protobuf:"bytes,2,opt,name=translation,proto3,oneof"` +} + +type AnnotationPayload_Classification struct { + Classification *ClassificationAnnotation `protobuf:"bytes,3,opt,name=classification,proto3,oneof"` +} + +type AnnotationPayload_ImageObjectDetection struct { + ImageObjectDetection *ImageObjectDetectionAnnotation `protobuf:"bytes,4,opt,name=image_object_detection,json=imageObjectDetection,proto3,oneof"` +} + +type AnnotationPayload_VideoClassification struct { + VideoClassification *VideoClassificationAnnotation `protobuf:"bytes,9,opt,name=video_classification,json=videoClassification,proto3,oneof"` +} + +type AnnotationPayload_TextExtraction struct { + TextExtraction *TextExtractionAnnotation `protobuf:"bytes,6,opt,name=text_extraction,json=textExtraction,proto3,oneof"` +} + +type AnnotationPayload_TextSentiment struct { + TextSentiment *TextSentimentAnnotation `protobuf:"bytes,7,opt,name=text_sentiment,json=textSentiment,proto3,oneof"` +} + +type AnnotationPayload_Tables struct { + Tables *TablesAnnotation `protobuf:"bytes,10,opt,name=tables,proto3,oneof"` +} + +func (*AnnotationPayload_Translation) isAnnotationPayload_Detail() {} + +func (*AnnotationPayload_Classification) isAnnotationPayload_Detail() {} + +func (*AnnotationPayload_ImageObjectDetection) isAnnotationPayload_Detail() {} + +func (*AnnotationPayload_VideoClassification) isAnnotationPayload_Detail() {} + +func (*AnnotationPayload_TextExtraction) isAnnotationPayload_Detail() {} + +func (*AnnotationPayload_TextSentiment) isAnnotationPayload_Detail() {} + +func (*AnnotationPayload_Tables) isAnnotationPayload_Detail() {} + +func (m *AnnotationPayload) GetDetail() isAnnotationPayload_Detail { + if m != nil { + return m.Detail + } + return nil +} + +func (m *AnnotationPayload) GetTranslation() *TranslationAnnotation { + if x, ok := m.GetDetail().(*AnnotationPayload_Translation); ok { + return x.Translation + } + return nil +} + +func (m *AnnotationPayload) GetClassification() *ClassificationAnnotation { + if x, ok := m.GetDetail().(*AnnotationPayload_Classification); ok { + return x.Classification + } + return nil +} + +func (m *AnnotationPayload) GetImageObjectDetection() *ImageObjectDetectionAnnotation { + if x, ok := m.GetDetail().(*AnnotationPayload_ImageObjectDetection); ok { + return x.ImageObjectDetection + } + return nil +} + +func (m *AnnotationPayload) GetVideoClassification() *VideoClassificationAnnotation { + if x, ok := m.GetDetail().(*AnnotationPayload_VideoClassification); ok { + return x.VideoClassification + } + return nil +} + +func (m *AnnotationPayload) GetTextExtraction() *TextExtractionAnnotation { + if x, ok := m.GetDetail().(*AnnotationPayload_TextExtraction); ok { + return x.TextExtraction + } + return nil +} + +func (m *AnnotationPayload) GetTextSentiment() *TextSentimentAnnotation { + if x, ok := m.GetDetail().(*AnnotationPayload_TextSentiment); ok { + return x.TextSentiment + } + return nil +} + +func (m *AnnotationPayload) GetTables() *TablesAnnotation { + if x, ok := m.GetDetail().(*AnnotationPayload_Tables); ok { + return x.Tables + } + return nil +} + +func (m *AnnotationPayload) GetAnnotationSpecId() string { + if m != nil { + return m.AnnotationSpecId + } + return "" +} + +func (m *AnnotationPayload) GetDisplayName() string { + if m != nil { + return m.DisplayName + } + return "" +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*AnnotationPayload) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _AnnotationPayload_OneofMarshaler, _AnnotationPayload_OneofUnmarshaler, _AnnotationPayload_OneofSizer, []interface{}{ + (*AnnotationPayload_Translation)(nil), + (*AnnotationPayload_Classification)(nil), + (*AnnotationPayload_ImageObjectDetection)(nil), + (*AnnotationPayload_VideoClassification)(nil), + (*AnnotationPayload_TextExtraction)(nil), + (*AnnotationPayload_TextSentiment)(nil), + (*AnnotationPayload_Tables)(nil), + } +} + +func _AnnotationPayload_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*AnnotationPayload) + // detail + switch x := m.Detail.(type) { + case *AnnotationPayload_Translation: + b.EncodeVarint(2<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.Translation); err != nil { + return err + } + case *AnnotationPayload_Classification: + b.EncodeVarint(3<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.Classification); err != nil { + return err + } + case *AnnotationPayload_ImageObjectDetection: + b.EncodeVarint(4<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.ImageObjectDetection); err != nil { + return err + } + case *AnnotationPayload_VideoClassification: + b.EncodeVarint(9<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.VideoClassification); err != nil { + return err + } + case *AnnotationPayload_TextExtraction: + b.EncodeVarint(6<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.TextExtraction); err != nil { + return err + } + case *AnnotationPayload_TextSentiment: + b.EncodeVarint(7<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.TextSentiment); err != nil { + return err + } + case *AnnotationPayload_Tables: + b.EncodeVarint(10<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.Tables); err != nil { + return err + } + case nil: + default: + return fmt.Errorf("AnnotationPayload.Detail has unexpected type %T", x) + } + return nil +} + +func _AnnotationPayload_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*AnnotationPayload) + switch tag { + case 2: // detail.translation + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(TranslationAnnotation) + err := b.DecodeMessage(msg) + m.Detail = &AnnotationPayload_Translation{msg} + return true, err + case 3: // detail.classification + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(ClassificationAnnotation) + err := b.DecodeMessage(msg) + m.Detail = &AnnotationPayload_Classification{msg} + return true, err + case 4: // detail.image_object_detection + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(ImageObjectDetectionAnnotation) + err := b.DecodeMessage(msg) + m.Detail = &AnnotationPayload_ImageObjectDetection{msg} + return true, err + case 9: // detail.video_classification + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(VideoClassificationAnnotation) + err := b.DecodeMessage(msg) + m.Detail = &AnnotationPayload_VideoClassification{msg} + return true, err + case 6: // detail.text_extraction + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(TextExtractionAnnotation) + err := b.DecodeMessage(msg) + m.Detail = &AnnotationPayload_TextExtraction{msg} + return true, err + case 7: // detail.text_sentiment + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(TextSentimentAnnotation) + err := b.DecodeMessage(msg) + m.Detail = &AnnotationPayload_TextSentiment{msg} + return true, err + case 10: // detail.tables + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(TablesAnnotation) + err := b.DecodeMessage(msg) + m.Detail = &AnnotationPayload_Tables{msg} + return true, err + default: + return false, nil + } +} + +func _AnnotationPayload_OneofSizer(msg proto.Message) (n int) { + m := msg.(*AnnotationPayload) + // detail + switch x := m.Detail.(type) { + case *AnnotationPayload_Translation: + s := proto.Size(x.Translation) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *AnnotationPayload_Classification: + s := proto.Size(x.Classification) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *AnnotationPayload_ImageObjectDetection: + s := proto.Size(x.ImageObjectDetection) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *AnnotationPayload_VideoClassification: + s := proto.Size(x.VideoClassification) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *AnnotationPayload_TextExtraction: + s := proto.Size(x.TextExtraction) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *AnnotationPayload_TextSentiment: + s := proto.Size(x.TextSentiment) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *AnnotationPayload_Tables: + s := proto.Size(x.Tables) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +func init() { + proto.RegisterType((*AnnotationPayload)(nil), "google.cloud.automl.v1beta1.AnnotationPayload") +} + +func init() { + proto.RegisterFile("google/cloud/automl/v1beta1/annotation_payload.proto", fileDescriptor_annotation_payload_62fedaea4ad5a830) +} + +var fileDescriptor_annotation_payload_62fedaea4ad5a830 = []byte{ + // 497 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x8c, 0x94, 0x41, 0x6f, 0xd3, 0x30, + 0x18, 0x86, 0xc9, 0x80, 0xc0, 0x5c, 0x18, 0x60, 0x26, 0x14, 0x36, 0x24, 0x06, 0xa7, 0x4a, 0xb0, + 0x64, 0x1d, 0xe3, 0xc2, 0x4e, 0xdd, 0x40, 0x63, 0x07, 0x60, 0xda, 0xa6, 0x1e, 0x50, 0x51, 0xf8, + 0x92, 0x7c, 0x8b, 0x8c, 0x1c, 0x3b, 0x6a, 0xdc, 0xaa, 0xbd, 0xf3, 0x1f, 0xf8, 0x4f, 0xfc, 0x2a, + 0x54, 0x3b, 0x6d, 0xe3, 0x32, 0xb9, 0x3b, 0xa6, 0xdf, 0xfb, 0x3c, 0x6f, 0x6c, 0xc7, 0x25, 0x07, + 0xb9, 0x94, 0x39, 0xc7, 0x28, 0xe5, 0x72, 0x98, 0x45, 0x30, 0x54, 0xb2, 0xe0, 0xd1, 0xa8, 0x93, + 0xa0, 0x82, 0x4e, 0x04, 0x42, 0x48, 0x05, 0x8a, 0x49, 0x11, 0x97, 0x30, 0xe1, 0x12, 0xb2, 0xb0, + 0x1c, 0x48, 0x25, 0xe9, 0xb6, 0xa1, 0x42, 0x4d, 0x85, 0x86, 0x0a, 0x6b, 0x6a, 0xeb, 0x45, 0xad, + 0x84, 0x92, 0x35, 0x0c, 0x95, 0x41, 0xb7, 0xf6, 0x5c, 0x85, 0x29, 0x87, 0xaa, 0x62, 0x57, 0x2c, + 0xd5, 0x48, 0x4d, 0xbc, 0x71, 0x11, 0x19, 0x2a, 0x4c, 0x1b, 0xe1, 0xb6, 0x2b, 0xac, 0x20, 0xe1, + 0x38, 0x7b, 0x91, 0x8e, 0x33, 0x89, 0x63, 0x15, 0xe3, 0x58, 0x0d, 0xa0, 0x29, 0xdf, 0x5b, 0x89, + 0x54, 0x28, 0x14, 0x2b, 0x50, 0xa8, 0x9a, 0xd8, 0x75, 0x12, 0x03, 0x10, 0x15, 0x6f, 0x2e, 0xf5, + 0x79, 0x1d, 0xd7, 0x4f, 0xc9, 0xf0, 0x2a, 0x02, 0x31, 0x31, 0xa3, 0xd7, 0x7f, 0x7c, 0xf2, 0xa4, + 0x3b, 0xdf, 0xcd, 0x33, 0x73, 0x1c, 0xb4, 0x47, 0x5a, 0x0d, 0x4b, 0xb0, 0xb6, 0xe3, 0xb5, 0x5b, + 0xfb, 0xfb, 0xa1, 0xe3, 0x78, 0xc2, 0xcb, 0x45, 0x7e, 0xe1, 0xfb, 0x7c, 0xeb, 0xbc, 0x29, 0xa2, + 0x31, 0xd9, 0xb0, 0xcf, 0x22, 0xb8, 0xad, 0xd5, 0xef, 0x9d, 0xea, 0x63, 0x0b, 0xb1, 0xec, 0x4b, + 0x3a, 0x5a, 0x91, 0x67, 0xac, 0x80, 0x1c, 0x63, 0x99, 0xfc, 0xc2, 0x54, 0xc5, 0xf3, 0x73, 0x0c, + 0xee, 0xe8, 0xa2, 0x43, 0x67, 0xd1, 0xe9, 0x14, 0xfd, 0xa6, 0xc9, 0x8f, 0x33, 0xd0, 0xaa, 0xdb, + 0x64, 0xd7, 0x24, 0xa8, 0x24, 0x9b, 0x23, 0x96, 0xa1, 0x8c, 0x97, 0xd6, 0xb6, 0xae, 0x2b, 0x3f, + 0x38, 0x2b, 0x7b, 0x53, 0xd0, 0xb1, 0xc0, 0xa7, 0xa3, 0xff, 0x03, 0xf4, 0x27, 0x79, 0xb4, 0xf4, + 0x25, 0x05, 0xfe, 0x0d, 0xf6, 0xf1, 0x12, 0xc7, 0xea, 0xd3, 0x1c, 0xb1, 0xf7, 0x51, 0x59, 0x33, + 0xfa, 0x83, 0x6c, 0xd8, 0x1f, 0x5e, 0x70, 0x4f, 0x17, 0x1c, 0xac, 0x2c, 0xb8, 0x98, 0x11, 0x96, + 0xff, 0xa1, 0x6a, 0x8e, 0xe8, 0x09, 0xf1, 0xcd, 0xa5, 0x09, 0x88, 0xd6, 0xee, 0xba, 0xb5, 0x3a, + 0x6a, 0xf9, 0x6a, 0x9c, 0xbe, 0x25, 0xb4, 0xf1, 0x6f, 0x52, 0x95, 0x98, 0xc6, 0x2c, 0x0b, 0xbc, + 0x1d, 0xaf, 0xbd, 0x7e, 0xfe, 0x78, 0x31, 0xb9, 0x28, 0x31, 0x3d, 0xcd, 0xe8, 0x2b, 0xf2, 0x20, + 0x63, 0x55, 0xc9, 0x61, 0x12, 0x0b, 0x28, 0x30, 0xb8, 0xab, 0x73, 0xad, 0xfa, 0xb7, 0xaf, 0x50, + 0xe0, 0xd1, 0x7d, 0xe2, 0x67, 0xa8, 0x80, 0xf1, 0xa3, 0xdf, 0x1e, 0x79, 0x99, 0xca, 0xc2, 0xf5, + 0x66, 0x67, 0xde, 0xf7, 0x6e, 0x3d, 0xce, 0x25, 0x07, 0x91, 0x87, 0x72, 0x90, 0x47, 0x39, 0x0a, + 0x7d, 0xb7, 0x22, 0x33, 0x82, 0x92, 0x55, 0xd7, 0x5e, 0xd4, 0x43, 0xf3, 0xf8, 0x77, 0x6d, 0xfb, + 0x44, 0x07, 0xfb, 0xc7, 0xd3, 0x50, 0xbf, 0x3b, 0x54, 0xf2, 0x0b, 0xef, 0xf7, 0x4c, 0x28, 0xf1, + 0xb5, 0xeb, 0xdd, 0xbf, 0x00, 0x00, 0x00, 0xff, 0xff, 0x08, 0x95, 0x59, 0xf9, 0x52, 0x05, 0x00, + 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/cloud/automl/v1beta1/classification.pb.go b/vendor/google.golang.org/genproto/googleapis/cloud/automl/v1beta1/classification.pb.go new file mode 100644 index 0000000..1ce2e09 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/cloud/automl/v1beta1/classification.pb.go @@ -0,0 +1,633 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/cloud/automl/v1beta1/classification.proto + +package automl // import "google.golang.org/genproto/googleapis/cloud/automl/v1beta1" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Type of the classification problem. +type ClassificationType int32 + +const ( + // Should not be used, an un-set enum has this value by default. + ClassificationType_CLASSIFICATION_TYPE_UNSPECIFIED ClassificationType = 0 + // At most one label is allowed per example. + ClassificationType_MULTICLASS ClassificationType = 1 + // Multiple labels are allowed for one example. + ClassificationType_MULTILABEL ClassificationType = 2 +) + +var ClassificationType_name = map[int32]string{ + 0: "CLASSIFICATION_TYPE_UNSPECIFIED", + 1: "MULTICLASS", + 2: "MULTILABEL", +} +var ClassificationType_value = map[string]int32{ + "CLASSIFICATION_TYPE_UNSPECIFIED": 0, + "MULTICLASS": 1, + "MULTILABEL": 2, +} + +func (x ClassificationType) String() string { + return proto.EnumName(ClassificationType_name, int32(x)) +} +func (ClassificationType) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_classification_337031297c7f069d, []int{0} +} + +// Contains annotation details specific to classification. +type ClassificationAnnotation struct { + // Output only. A confidence estimate between 0.0 and 1.0. A higher value + // means greater confidence that the annotation is positive. If a user + // approves an annotation as negative or positive, the score value remains + // unchanged. If a user creates an annotation, the score is 0 for negative or + // 1 for positive. + Score float32 `protobuf:"fixed32,1,opt,name=score,proto3" json:"score,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ClassificationAnnotation) Reset() { *m = ClassificationAnnotation{} } +func (m *ClassificationAnnotation) String() string { return proto.CompactTextString(m) } +func (*ClassificationAnnotation) ProtoMessage() {} +func (*ClassificationAnnotation) Descriptor() ([]byte, []int) { + return fileDescriptor_classification_337031297c7f069d, []int{0} +} +func (m *ClassificationAnnotation) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ClassificationAnnotation.Unmarshal(m, b) +} +func (m *ClassificationAnnotation) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ClassificationAnnotation.Marshal(b, m, deterministic) +} +func (dst *ClassificationAnnotation) XXX_Merge(src proto.Message) { + xxx_messageInfo_ClassificationAnnotation.Merge(dst, src) +} +func (m *ClassificationAnnotation) XXX_Size() int { + return xxx_messageInfo_ClassificationAnnotation.Size(m) +} +func (m *ClassificationAnnotation) XXX_DiscardUnknown() { + xxx_messageInfo_ClassificationAnnotation.DiscardUnknown(m) +} + +var xxx_messageInfo_ClassificationAnnotation proto.InternalMessageInfo + +func (m *ClassificationAnnotation) GetScore() float32 { + if m != nil { + return m.Score + } + return 0 +} + +// Contains annotation details specific to video classification. +type VideoClassificationAnnotation struct { + // Output only. Expresses the type of video classification. Possible values: + // + // * `segment` - Classification done on a specified by user + // time segment of a video. AnnotationSpec is answered to be present + // in that time segment, if it is present in any part of it. The video + // ML model evaluations are done only for this type of classification. + // + // * `shot`- Shot-level classification. + // AutoML Video Intelligence determines the boundaries + // for each camera shot in the entire segment of the video that user + // specified in the request configuration. AutoML Video Intelligence + // then returns labels and their confidence scores for each detected + // shot, along with the start and end time of the shot. + // WARNING: Model evaluation is not done for this classification type, + // the quality of it depends on training data, but there are no + // metrics provided to describe that quality. + // + // * `1s_interval` - AutoML Video Intelligence returns labels and their + // confidence scores for each second of the entire segment of the video + // that user specified in the request configuration. + // WARNING: Model evaluation is not done for this classification type, + // the quality of it depends on training data, but there are no + // metrics provided to describe that quality. + Type string `protobuf:"bytes,1,opt,name=type,proto3" json:"type,omitempty"` + // Output only . The classification details of this annotation. + ClassificationAnnotation *ClassificationAnnotation `protobuf:"bytes,2,opt,name=classification_annotation,json=classificationAnnotation,proto3" json:"classification_annotation,omitempty"` + // Output only . The time segment of the video to which the + // annotation applies. + TimeSegment *TimeSegment `protobuf:"bytes,3,opt,name=time_segment,json=timeSegment,proto3" json:"time_segment,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *VideoClassificationAnnotation) Reset() { *m = VideoClassificationAnnotation{} } +func (m *VideoClassificationAnnotation) String() string { return proto.CompactTextString(m) } +func (*VideoClassificationAnnotation) ProtoMessage() {} +func (*VideoClassificationAnnotation) Descriptor() ([]byte, []int) { + return fileDescriptor_classification_337031297c7f069d, []int{1} +} +func (m *VideoClassificationAnnotation) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_VideoClassificationAnnotation.Unmarshal(m, b) +} +func (m *VideoClassificationAnnotation) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_VideoClassificationAnnotation.Marshal(b, m, deterministic) +} +func (dst *VideoClassificationAnnotation) XXX_Merge(src proto.Message) { + xxx_messageInfo_VideoClassificationAnnotation.Merge(dst, src) +} +func (m *VideoClassificationAnnotation) XXX_Size() int { + return xxx_messageInfo_VideoClassificationAnnotation.Size(m) +} +func (m *VideoClassificationAnnotation) XXX_DiscardUnknown() { + xxx_messageInfo_VideoClassificationAnnotation.DiscardUnknown(m) +} + +var xxx_messageInfo_VideoClassificationAnnotation proto.InternalMessageInfo + +func (m *VideoClassificationAnnotation) GetType() string { + if m != nil { + return m.Type + } + return "" +} + +func (m *VideoClassificationAnnotation) GetClassificationAnnotation() *ClassificationAnnotation { + if m != nil { + return m.ClassificationAnnotation + } + return nil +} + +func (m *VideoClassificationAnnotation) GetTimeSegment() *TimeSegment { + if m != nil { + return m.TimeSegment + } + return nil +} + +// Model evaluation metrics for classification problems. +// Note: For Video Classification this metrics only describe quality of the +// Video Classification predictions of "segment_classification" type. +type ClassificationEvaluationMetrics struct { + // Output only. The Area Under Precision-Recall Curve metric. Micro-averaged + // for the overall evaluation. + AuPrc float32 `protobuf:"fixed32,1,opt,name=au_prc,json=auPrc,proto3" json:"au_prc,omitempty"` + // Output only. The Area Under Precision-Recall Curve metric based on priors. + // Micro-averaged for the overall evaluation. + // Deprecated. + BaseAuPrc float32 `protobuf:"fixed32,2,opt,name=base_au_prc,json=baseAuPrc,proto3" json:"base_au_prc,omitempty"` // Deprecated: Do not use. + // Output only. The Area Under Receiver Operating Characteristic curve metric. + // Micro-averaged for the overall evaluation. + AuRoc float32 `protobuf:"fixed32,6,opt,name=au_roc,json=auRoc,proto3" json:"au_roc,omitempty"` + // Output only. The Log Loss metric. + LogLoss float32 `protobuf:"fixed32,7,opt,name=log_loss,json=logLoss,proto3" json:"log_loss,omitempty"` + // Output only. Metrics for each confidence_threshold in + // 0.00,0.05,0.10,...,0.95,0.96,0.97,0.98,0.99 and + // position_threshold = INT32_MAX_VALUE. + // Precision-recall curve is derived from them. + // The above metrics may also be supplied for additional values of + // position_threshold. + ConfidenceMetricsEntry []*ClassificationEvaluationMetrics_ConfidenceMetricsEntry `protobuf:"bytes,3,rep,name=confidence_metrics_entry,json=confidenceMetricsEntry,proto3" json:"confidence_metrics_entry,omitempty"` + // Output only. Confusion matrix of the evaluation. + // Only set for MULTICLASS classification problems where number + // of labels is no more than 10. + // Only set for model level evaluation, not for evaluation per label. + ConfusionMatrix *ClassificationEvaluationMetrics_ConfusionMatrix `protobuf:"bytes,4,opt,name=confusion_matrix,json=confusionMatrix,proto3" json:"confusion_matrix,omitempty"` + // Output only. The annotation spec ids used for this evaluation. + AnnotationSpecId []string `protobuf:"bytes,5,rep,name=annotation_spec_id,json=annotationSpecId,proto3" json:"annotation_spec_id,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ClassificationEvaluationMetrics) Reset() { *m = ClassificationEvaluationMetrics{} } +func (m *ClassificationEvaluationMetrics) String() string { return proto.CompactTextString(m) } +func (*ClassificationEvaluationMetrics) ProtoMessage() {} +func (*ClassificationEvaluationMetrics) Descriptor() ([]byte, []int) { + return fileDescriptor_classification_337031297c7f069d, []int{2} +} +func (m *ClassificationEvaluationMetrics) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ClassificationEvaluationMetrics.Unmarshal(m, b) +} +func (m *ClassificationEvaluationMetrics) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ClassificationEvaluationMetrics.Marshal(b, m, deterministic) +} +func (dst *ClassificationEvaluationMetrics) XXX_Merge(src proto.Message) { + xxx_messageInfo_ClassificationEvaluationMetrics.Merge(dst, src) +} +func (m *ClassificationEvaluationMetrics) XXX_Size() int { + return xxx_messageInfo_ClassificationEvaluationMetrics.Size(m) +} +func (m *ClassificationEvaluationMetrics) XXX_DiscardUnknown() { + xxx_messageInfo_ClassificationEvaluationMetrics.DiscardUnknown(m) +} + +var xxx_messageInfo_ClassificationEvaluationMetrics proto.InternalMessageInfo + +func (m *ClassificationEvaluationMetrics) GetAuPrc() float32 { + if m != nil { + return m.AuPrc + } + return 0 +} + +// Deprecated: Do not use. +func (m *ClassificationEvaluationMetrics) GetBaseAuPrc() float32 { + if m != nil { + return m.BaseAuPrc + } + return 0 +} + +func (m *ClassificationEvaluationMetrics) GetAuRoc() float32 { + if m != nil { + return m.AuRoc + } + return 0 +} + +func (m *ClassificationEvaluationMetrics) GetLogLoss() float32 { + if m != nil { + return m.LogLoss + } + return 0 +} + +func (m *ClassificationEvaluationMetrics) GetConfidenceMetricsEntry() []*ClassificationEvaluationMetrics_ConfidenceMetricsEntry { + if m != nil { + return m.ConfidenceMetricsEntry + } + return nil +} + +func (m *ClassificationEvaluationMetrics) GetConfusionMatrix() *ClassificationEvaluationMetrics_ConfusionMatrix { + if m != nil { + return m.ConfusionMatrix + } + return nil +} + +func (m *ClassificationEvaluationMetrics) GetAnnotationSpecId() []string { + if m != nil { + return m.AnnotationSpecId + } + return nil +} + +// Metrics for a single confidence threshold. +type ClassificationEvaluationMetrics_ConfidenceMetricsEntry struct { + // Output only. Metrics are computed with an assumption that the model + // never returns predictions with score lower than this value. + ConfidenceThreshold float32 `protobuf:"fixed32,1,opt,name=confidence_threshold,json=confidenceThreshold,proto3" json:"confidence_threshold,omitempty"` + // Output only. Metrics are computed with an assumption that the model + // always returns at most this many predictions (ordered by their score, + // descendingly), but they all still need to meet the confidence_threshold. + PositionThreshold int32 `protobuf:"varint,14,opt,name=position_threshold,json=positionThreshold,proto3" json:"position_threshold,omitempty"` + // Output only. Recall (True Positive Rate) for the given confidence + // threshold. + Recall float32 `protobuf:"fixed32,2,opt,name=recall,proto3" json:"recall,omitempty"` + // Output only. Precision for the given confidence threshold. + Precision float32 `protobuf:"fixed32,3,opt,name=precision,proto3" json:"precision,omitempty"` + // Output only. False Positive Rate for the given confidence threshold. + FalsePositiveRate float32 `protobuf:"fixed32,8,opt,name=false_positive_rate,json=falsePositiveRate,proto3" json:"false_positive_rate,omitempty"` + // Output only. The harmonic mean of recall and precision. + F1Score float32 `protobuf:"fixed32,4,opt,name=f1_score,json=f1Score,proto3" json:"f1_score,omitempty"` + // Output only. The Recall (True Positive Rate) when only considering the + // label that has the highest prediction score and not below the confidence + // threshold for each example. + RecallAt1 float32 `protobuf:"fixed32,5,opt,name=recall_at1,json=recallAt1,proto3" json:"recall_at1,omitempty"` + // Output only. The precision when only considering the label that has the + // highest prediction score and not below the confidence threshold for each + // example. + PrecisionAt1 float32 `protobuf:"fixed32,6,opt,name=precision_at1,json=precisionAt1,proto3" json:"precision_at1,omitempty"` + // Output only. The False Positive Rate when only considering the label that + // has the highest prediction score and not below the confidence threshold + // for each example. + FalsePositiveRateAt1 float32 `protobuf:"fixed32,9,opt,name=false_positive_rate_at1,json=falsePositiveRateAt1,proto3" json:"false_positive_rate_at1,omitempty"` + // Output only. The harmonic mean of [recall_at1][google.cloud.automl.v1beta1.ClassificationEvaluationMetrics.ConfidenceMetricsEntry.recall_at1] and [precision_at1][google.cloud.automl.v1beta1.ClassificationEvaluationMetrics.ConfidenceMetricsEntry.precision_at1]. + F1ScoreAt1 float32 `protobuf:"fixed32,7,opt,name=f1_score_at1,json=f1ScoreAt1,proto3" json:"f1_score_at1,omitempty"` + // Output only. The number of model created labels that match a ground truth + // label. + TruePositiveCount int64 `protobuf:"varint,10,opt,name=true_positive_count,json=truePositiveCount,proto3" json:"true_positive_count,omitempty"` + // Output only. The number of model created labels that do not match a + // ground truth label. + FalsePositiveCount int64 `protobuf:"varint,11,opt,name=false_positive_count,json=falsePositiveCount,proto3" json:"false_positive_count,omitempty"` + // Output only. The number of ground truth labels that are not matched + // by a model created label. + FalseNegativeCount int64 `protobuf:"varint,12,opt,name=false_negative_count,json=falseNegativeCount,proto3" json:"false_negative_count,omitempty"` + // Output only. The number of labels that were not created by the model, + // but if they would, they would not match a ground truth label. + TrueNegativeCount int64 `protobuf:"varint,13,opt,name=true_negative_count,json=trueNegativeCount,proto3" json:"true_negative_count,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ClassificationEvaluationMetrics_ConfidenceMetricsEntry) Reset() { + *m = ClassificationEvaluationMetrics_ConfidenceMetricsEntry{} +} +func (m *ClassificationEvaluationMetrics_ConfidenceMetricsEntry) String() string { + return proto.CompactTextString(m) +} +func (*ClassificationEvaluationMetrics_ConfidenceMetricsEntry) ProtoMessage() {} +func (*ClassificationEvaluationMetrics_ConfidenceMetricsEntry) Descriptor() ([]byte, []int) { + return fileDescriptor_classification_337031297c7f069d, []int{2, 0} +} +func (m *ClassificationEvaluationMetrics_ConfidenceMetricsEntry) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ClassificationEvaluationMetrics_ConfidenceMetricsEntry.Unmarshal(m, b) +} +func (m *ClassificationEvaluationMetrics_ConfidenceMetricsEntry) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ClassificationEvaluationMetrics_ConfidenceMetricsEntry.Marshal(b, m, deterministic) +} +func (dst *ClassificationEvaluationMetrics_ConfidenceMetricsEntry) XXX_Merge(src proto.Message) { + xxx_messageInfo_ClassificationEvaluationMetrics_ConfidenceMetricsEntry.Merge(dst, src) +} +func (m *ClassificationEvaluationMetrics_ConfidenceMetricsEntry) XXX_Size() int { + return xxx_messageInfo_ClassificationEvaluationMetrics_ConfidenceMetricsEntry.Size(m) +} +func (m *ClassificationEvaluationMetrics_ConfidenceMetricsEntry) XXX_DiscardUnknown() { + xxx_messageInfo_ClassificationEvaluationMetrics_ConfidenceMetricsEntry.DiscardUnknown(m) +} + +var xxx_messageInfo_ClassificationEvaluationMetrics_ConfidenceMetricsEntry proto.InternalMessageInfo + +func (m *ClassificationEvaluationMetrics_ConfidenceMetricsEntry) GetConfidenceThreshold() float32 { + if m != nil { + return m.ConfidenceThreshold + } + return 0 +} + +func (m *ClassificationEvaluationMetrics_ConfidenceMetricsEntry) GetPositionThreshold() int32 { + if m != nil { + return m.PositionThreshold + } + return 0 +} + +func (m *ClassificationEvaluationMetrics_ConfidenceMetricsEntry) GetRecall() float32 { + if m != nil { + return m.Recall + } + return 0 +} + +func (m *ClassificationEvaluationMetrics_ConfidenceMetricsEntry) GetPrecision() float32 { + if m != nil { + return m.Precision + } + return 0 +} + +func (m *ClassificationEvaluationMetrics_ConfidenceMetricsEntry) GetFalsePositiveRate() float32 { + if m != nil { + return m.FalsePositiveRate + } + return 0 +} + +func (m *ClassificationEvaluationMetrics_ConfidenceMetricsEntry) GetF1Score() float32 { + if m != nil { + return m.F1Score + } + return 0 +} + +func (m *ClassificationEvaluationMetrics_ConfidenceMetricsEntry) GetRecallAt1() float32 { + if m != nil { + return m.RecallAt1 + } + return 0 +} + +func (m *ClassificationEvaluationMetrics_ConfidenceMetricsEntry) GetPrecisionAt1() float32 { + if m != nil { + return m.PrecisionAt1 + } + return 0 +} + +func (m *ClassificationEvaluationMetrics_ConfidenceMetricsEntry) GetFalsePositiveRateAt1() float32 { + if m != nil { + return m.FalsePositiveRateAt1 + } + return 0 +} + +func (m *ClassificationEvaluationMetrics_ConfidenceMetricsEntry) GetF1ScoreAt1() float32 { + if m != nil { + return m.F1ScoreAt1 + } + return 0 +} + +func (m *ClassificationEvaluationMetrics_ConfidenceMetricsEntry) GetTruePositiveCount() int64 { + if m != nil { + return m.TruePositiveCount + } + return 0 +} + +func (m *ClassificationEvaluationMetrics_ConfidenceMetricsEntry) GetFalsePositiveCount() int64 { + if m != nil { + return m.FalsePositiveCount + } + return 0 +} + +func (m *ClassificationEvaluationMetrics_ConfidenceMetricsEntry) GetFalseNegativeCount() int64 { + if m != nil { + return m.FalseNegativeCount + } + return 0 +} + +func (m *ClassificationEvaluationMetrics_ConfidenceMetricsEntry) GetTrueNegativeCount() int64 { + if m != nil { + return m.TrueNegativeCount + } + return 0 +} + +// Confusion matrix of the model running the classification. +type ClassificationEvaluationMetrics_ConfusionMatrix struct { + // Output only. IDs of the annotation specs used in the confusion matrix. + AnnotationSpecId []string `protobuf:"bytes,1,rep,name=annotation_spec_id,json=annotationSpecId,proto3" json:"annotation_spec_id,omitempty"` + // Output only. Rows in the confusion matrix. The number of rows is equal to + // the size of `annotation_spec_id`. + // `row[i].value[j]` is the number of examples that have ground truth of the + // `annotation_spec_id[i]` and are predicted as `annotation_spec_id[j]` by + // the model being evaluated. + Row []*ClassificationEvaluationMetrics_ConfusionMatrix_Row `protobuf:"bytes,2,rep,name=row,proto3" json:"row,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ClassificationEvaluationMetrics_ConfusionMatrix) Reset() { + *m = ClassificationEvaluationMetrics_ConfusionMatrix{} +} +func (m *ClassificationEvaluationMetrics_ConfusionMatrix) String() string { + return proto.CompactTextString(m) +} +func (*ClassificationEvaluationMetrics_ConfusionMatrix) ProtoMessage() {} +func (*ClassificationEvaluationMetrics_ConfusionMatrix) Descriptor() ([]byte, []int) { + return fileDescriptor_classification_337031297c7f069d, []int{2, 1} +} +func (m *ClassificationEvaluationMetrics_ConfusionMatrix) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ClassificationEvaluationMetrics_ConfusionMatrix.Unmarshal(m, b) +} +func (m *ClassificationEvaluationMetrics_ConfusionMatrix) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ClassificationEvaluationMetrics_ConfusionMatrix.Marshal(b, m, deterministic) +} +func (dst *ClassificationEvaluationMetrics_ConfusionMatrix) XXX_Merge(src proto.Message) { + xxx_messageInfo_ClassificationEvaluationMetrics_ConfusionMatrix.Merge(dst, src) +} +func (m *ClassificationEvaluationMetrics_ConfusionMatrix) XXX_Size() int { + return xxx_messageInfo_ClassificationEvaluationMetrics_ConfusionMatrix.Size(m) +} +func (m *ClassificationEvaluationMetrics_ConfusionMatrix) XXX_DiscardUnknown() { + xxx_messageInfo_ClassificationEvaluationMetrics_ConfusionMatrix.DiscardUnknown(m) +} + +var xxx_messageInfo_ClassificationEvaluationMetrics_ConfusionMatrix proto.InternalMessageInfo + +func (m *ClassificationEvaluationMetrics_ConfusionMatrix) GetAnnotationSpecId() []string { + if m != nil { + return m.AnnotationSpecId + } + return nil +} + +func (m *ClassificationEvaluationMetrics_ConfusionMatrix) GetRow() []*ClassificationEvaluationMetrics_ConfusionMatrix_Row { + if m != nil { + return m.Row + } + return nil +} + +// Output only. A row in the confusion matrix. +type ClassificationEvaluationMetrics_ConfusionMatrix_Row struct { + // Output only. Value of the specific cell in the confusion matrix. + // The number of values each row has (i.e. the length of the row) is equal + // to the length of the annotation_spec_id field. + ExampleCount []int32 `protobuf:"varint,1,rep,packed,name=example_count,json=exampleCount,proto3" json:"example_count,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ClassificationEvaluationMetrics_ConfusionMatrix_Row) Reset() { + *m = ClassificationEvaluationMetrics_ConfusionMatrix_Row{} +} +func (m *ClassificationEvaluationMetrics_ConfusionMatrix_Row) String() string { + return proto.CompactTextString(m) +} +func (*ClassificationEvaluationMetrics_ConfusionMatrix_Row) ProtoMessage() {} +func (*ClassificationEvaluationMetrics_ConfusionMatrix_Row) Descriptor() ([]byte, []int) { + return fileDescriptor_classification_337031297c7f069d, []int{2, 1, 0} +} +func (m *ClassificationEvaluationMetrics_ConfusionMatrix_Row) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ClassificationEvaluationMetrics_ConfusionMatrix_Row.Unmarshal(m, b) +} +func (m *ClassificationEvaluationMetrics_ConfusionMatrix_Row) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ClassificationEvaluationMetrics_ConfusionMatrix_Row.Marshal(b, m, deterministic) +} +func (dst *ClassificationEvaluationMetrics_ConfusionMatrix_Row) XXX_Merge(src proto.Message) { + xxx_messageInfo_ClassificationEvaluationMetrics_ConfusionMatrix_Row.Merge(dst, src) +} +func (m *ClassificationEvaluationMetrics_ConfusionMatrix_Row) XXX_Size() int { + return xxx_messageInfo_ClassificationEvaluationMetrics_ConfusionMatrix_Row.Size(m) +} +func (m *ClassificationEvaluationMetrics_ConfusionMatrix_Row) XXX_DiscardUnknown() { + xxx_messageInfo_ClassificationEvaluationMetrics_ConfusionMatrix_Row.DiscardUnknown(m) +} + +var xxx_messageInfo_ClassificationEvaluationMetrics_ConfusionMatrix_Row proto.InternalMessageInfo + +func (m *ClassificationEvaluationMetrics_ConfusionMatrix_Row) GetExampleCount() []int32 { + if m != nil { + return m.ExampleCount + } + return nil +} + +func init() { + proto.RegisterType((*ClassificationAnnotation)(nil), "google.cloud.automl.v1beta1.ClassificationAnnotation") + proto.RegisterType((*VideoClassificationAnnotation)(nil), "google.cloud.automl.v1beta1.VideoClassificationAnnotation") + proto.RegisterType((*ClassificationEvaluationMetrics)(nil), "google.cloud.automl.v1beta1.ClassificationEvaluationMetrics") + proto.RegisterType((*ClassificationEvaluationMetrics_ConfidenceMetricsEntry)(nil), "google.cloud.automl.v1beta1.ClassificationEvaluationMetrics.ConfidenceMetricsEntry") + proto.RegisterType((*ClassificationEvaluationMetrics_ConfusionMatrix)(nil), "google.cloud.automl.v1beta1.ClassificationEvaluationMetrics.ConfusionMatrix") + proto.RegisterType((*ClassificationEvaluationMetrics_ConfusionMatrix_Row)(nil), "google.cloud.automl.v1beta1.ClassificationEvaluationMetrics.ConfusionMatrix.Row") + proto.RegisterEnum("google.cloud.automl.v1beta1.ClassificationType", ClassificationType_name, ClassificationType_value) +} + +func init() { + proto.RegisterFile("google/cloud/automl/v1beta1/classification.proto", fileDescriptor_classification_337031297c7f069d) +} + +var fileDescriptor_classification_337031297c7f069d = []byte{ + // 841 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xac, 0x55, 0xd1, 0x6e, 0xdb, 0x36, + 0x14, 0x9d, 0xec, 0x38, 0xad, 0xaf, 0xdd, 0xd6, 0x61, 0xb2, 0x4c, 0x75, 0x5b, 0xc4, 0x48, 0x5f, + 0x8c, 0x60, 0x93, 0xe3, 0x0e, 0x7d, 0xda, 0x93, 0xe3, 0xb9, 0x83, 0x31, 0x27, 0x33, 0x68, 0xb7, + 0x40, 0x87, 0x00, 0x02, 0x43, 0xd3, 0xaa, 0x00, 0x49, 0x14, 0x48, 0x2a, 0x69, 0x3f, 0x62, 0xcf, + 0xfb, 0xa7, 0xbd, 0xec, 0x07, 0xf6, 0x19, 0xc3, 0x9e, 0x07, 0x92, 0xb2, 0x15, 0xa5, 0x8e, 0xb1, + 0x61, 0x7b, 0xf3, 0xbd, 0xe7, 0x9c, 0x7b, 0xcf, 0xa5, 0x2e, 0x4d, 0x38, 0x0d, 0x38, 0x0f, 0x22, + 0xd6, 0xa3, 0x11, 0xcf, 0x16, 0x3d, 0x92, 0x29, 0x1e, 0x47, 0xbd, 0xeb, 0xfe, 0x15, 0x53, 0xa4, + 0xdf, 0xa3, 0x11, 0x91, 0x32, 0x5c, 0x86, 0x94, 0xa8, 0x90, 0x27, 0x5e, 0x2a, 0xb8, 0xe2, 0xe8, + 0x99, 0x55, 0x78, 0x46, 0xe1, 0x59, 0x85, 0x97, 0x2b, 0xda, 0xcf, 0xf3, 0x72, 0x24, 0x0d, 0x7b, + 0x24, 0x49, 0xb8, 0x32, 0x4a, 0x69, 0xa5, 0xed, 0x93, 0x6d, 0xcd, 0x14, 0x8b, 0x53, 0x2e, 0x48, + 0x64, 0xb9, 0xc7, 0xa7, 0xe0, 0x0e, 0x4b, 0xed, 0x07, 0xeb, 0x72, 0xe8, 0x00, 0x6a, 0x92, 0x72, + 0xc1, 0x5c, 0xa7, 0xe3, 0x74, 0x2b, 0xd8, 0x06, 0xc7, 0x7f, 0x3a, 0xf0, 0xe2, 0x5d, 0xb8, 0x60, + 0xfc, 0x5e, 0x1d, 0x82, 0x1d, 0xf5, 0x29, 0xb5, 0xb2, 0x3a, 0x36, 0xbf, 0x91, 0x80, 0xa7, 0xe5, + 0x31, 0xfd, 0xc2, 0xb7, 0x5b, 0xe9, 0x38, 0xdd, 0xc6, 0xab, 0xd7, 0xde, 0x96, 0x91, 0xbd, 0xfb, + 0xba, 0x61, 0x97, 0xde, 0xe7, 0xe3, 0x47, 0x68, 0xaa, 0x30, 0x66, 0xbe, 0x64, 0x41, 0xcc, 0x12, + 0xe5, 0x56, 0x4d, 0x9b, 0xee, 0xd6, 0x36, 0xf3, 0x30, 0x66, 0x33, 0xcb, 0xc7, 0x0d, 0x55, 0x04, + 0xc7, 0x7f, 0xd5, 0xe1, 0xa8, 0xec, 0x61, 0x74, 0x4d, 0xa2, 0xcc, 0xfc, 0x3a, 0x67, 0x4a, 0x84, + 0x54, 0xa2, 0x2f, 0x61, 0x97, 0x64, 0x7e, 0x2a, 0xe8, 0xea, 0xc4, 0x48, 0x36, 0x15, 0x14, 0x1d, + 0x43, 0xe3, 0x8a, 0x48, 0xe6, 0xe7, 0x98, 0x9e, 0xb6, 0x72, 0x56, 0x71, 0x1d, 0x5c, 0xd7, 0xe9, + 0x81, 0xe1, 0x58, 0xa9, 0xe0, 0xd4, 0xdd, 0x5d, 0x49, 0x31, 0xa7, 0xe8, 0x29, 0x3c, 0x8c, 0x78, + 0xe0, 0x47, 0x5c, 0x4a, 0xf7, 0x81, 0x01, 0x1e, 0x44, 0x3c, 0x98, 0x70, 0x29, 0xd1, 0x2f, 0x0e, + 0xb8, 0x94, 0x27, 0xcb, 0x70, 0xc1, 0x12, 0xca, 0xfc, 0xd8, 0x7a, 0xf0, 0x59, 0xa2, 0xc4, 0x27, + 0xb7, 0xda, 0xa9, 0x76, 0x1b, 0xaf, 0x66, 0xff, 0xe2, 0x44, 0x3f, 0x9b, 0xc6, 0x1b, 0xae, 0x8b, + 0xe7, 0x99, 0x91, 0x2e, 0x8d, 0x0f, 0xe9, 0xc6, 0x3c, 0xba, 0x81, 0x96, 0x46, 0x32, 0xa9, 0x3f, + 0x6e, 0x4c, 0x94, 0x08, 0x3f, 0xba, 0x3b, 0xe6, 0xc4, 0x27, 0xff, 0xd9, 0x86, 0x29, 0x7a, 0x6e, + 0x6a, 0xe2, 0x27, 0xb4, 0x9c, 0x40, 0x5f, 0x03, 0x2a, 0x76, 0xc9, 0x97, 0x29, 0xa3, 0x7e, 0xb8, + 0x70, 0x6b, 0x9d, 0x6a, 0xb7, 0x8e, 0x5b, 0x05, 0x32, 0x4b, 0x19, 0x1d, 0x2f, 0xda, 0x7f, 0xec, + 0xc0, 0xe1, 0xe6, 0xc9, 0x50, 0x1f, 0x0e, 0x6e, 0x1d, 0xa8, 0xfa, 0x20, 0x98, 0xfc, 0xc0, 0xa3, + 0x45, 0xfe, 0x31, 0xf7, 0x0b, 0x6c, 0xbe, 0x82, 0xd0, 0x37, 0x80, 0x52, 0x2e, 0x43, 0xd3, 0xb9, + 0x10, 0x3c, 0xee, 0x38, 0xdd, 0x1a, 0xde, 0x5b, 0x21, 0x05, 0xfd, 0x10, 0x76, 0x05, 0xa3, 0x24, + 0x8a, 0xec, 0x12, 0xe0, 0x3c, 0x42, 0xcf, 0xa1, 0x9e, 0x0a, 0x46, 0x43, 0x3d, 0x95, 0x59, 0xd3, + 0x0a, 0x2e, 0x12, 0xc8, 0x83, 0xfd, 0x25, 0x89, 0x24, 0xf3, 0x6d, 0xc1, 0x6b, 0xe6, 0x0b, 0xa2, + 0x98, 0xfb, 0xd0, 0xf0, 0xf6, 0x0c, 0x34, 0xcd, 0x11, 0x4c, 0x14, 0xd3, 0x4b, 0xb3, 0xec, 0xfb, + 0xf6, 0xea, 0xee, 0xd8, 0xa5, 0x59, 0xf6, 0x67, 0x3a, 0x44, 0x2f, 0x00, 0x6c, 0x4b, 0x9f, 0xa8, + 0xbe, 0x5b, 0xb3, 0x9d, 0x6c, 0x66, 0xa0, 0xfa, 0xe8, 0x25, 0x3c, 0x5a, 0xb7, 0x35, 0x0c, 0xbb, + 0x8c, 0xcd, 0x75, 0x52, 0x93, 0x5e, 0xc3, 0x57, 0x1b, 0xec, 0x18, 0x7a, 0xdd, 0xd0, 0x0f, 0x3e, + 0xb3, 0xa4, 0x65, 0x1d, 0x68, 0xae, 0x5c, 0x19, 0xae, 0x5d, 0x67, 0xc8, 0x9d, 0x69, 0x86, 0x07, + 0xfb, 0x4a, 0x64, 0xb7, 0xea, 0x52, 0x9e, 0x25, 0xca, 0x85, 0x8e, 0xd3, 0xad, 0xe2, 0x3d, 0x0d, + 0xad, 0x6a, 0x0e, 0x35, 0x80, 0x4e, 0xe1, 0xe0, 0x8e, 0x11, 0x2b, 0x68, 0x18, 0x01, 0x2a, 0xb9, + 0xb8, 0xa3, 0x48, 0x58, 0x40, 0x6e, 0x29, 0x9a, 0xb7, 0x14, 0x17, 0x39, 0x64, 0x15, 0x2b, 0x4f, + 0x77, 0x04, 0x8f, 0x0a, 0x4f, 0x25, 0x7e, 0xfb, 0x77, 0x07, 0x9e, 0x0c, 0xff, 0xd1, 0x82, 0x3a, + 0x9b, 0x17, 0x14, 0x5d, 0x41, 0x55, 0xf0, 0x1b, 0xb7, 0x62, 0x6e, 0xf0, 0xf4, 0xff, 0xbc, 0x3a, + 0x1e, 0xe6, 0x37, 0x58, 0x17, 0x6f, 0x9f, 0x40, 0x15, 0xf3, 0x1b, 0xfd, 0xb9, 0xd9, 0x47, 0x12, + 0xa7, 0xd1, 0x6a, 0x2c, 0xed, 0xa9, 0x86, 0x9b, 0x79, 0xd2, 0x4c, 0x74, 0xf2, 0x1e, 0x50, 0xb9, + 0xcf, 0x5c, 0xff, 0x9f, 0xbf, 0x84, 0xa3, 0xe1, 0x64, 0x30, 0x9b, 0x8d, 0xdf, 0x8c, 0x87, 0x83, + 0xf9, 0xf8, 0xa7, 0x0b, 0x7f, 0xfe, 0x7e, 0x3a, 0xf2, 0xdf, 0x5e, 0xcc, 0xa6, 0xa3, 0xe1, 0xf8, + 0xcd, 0x78, 0xf4, 0x7d, 0xeb, 0x0b, 0xf4, 0x18, 0xe0, 0xfc, 0xed, 0x64, 0x3e, 0x36, 0xcc, 0x96, + 0xb3, 0x8e, 0x27, 0x83, 0xb3, 0xd1, 0xa4, 0x55, 0x39, 0xfb, 0xd5, 0x81, 0x23, 0xca, 0xe3, 0x6d, + 0x33, 0x9e, 0xed, 0x97, 0x9b, 0x4f, 0xf5, 0xab, 0xf5, 0xf3, 0x20, 0x57, 0x04, 0x3c, 0x22, 0x49, + 0xe0, 0x71, 0x11, 0xf4, 0x02, 0x96, 0x98, 0x17, 0xad, 0x67, 0x21, 0x92, 0x86, 0x72, 0xe3, 0x03, + 0xf8, 0x9d, 0x0d, 0x7f, 0xab, 0x3c, 0xfb, 0xc1, 0x10, 0x2f, 0x87, 0x9a, 0x74, 0x39, 0xc8, 0x14, + 0x3f, 0x8f, 0x2e, 0xdf, 0x59, 0xd2, 0xd5, 0xae, 0xa9, 0xf5, 0xed, 0xdf, 0x01, 0x00, 0x00, 0xff, + 0xff, 0x0e, 0xfe, 0xf4, 0x8c, 0xb8, 0x07, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/cloud/automl/v1beta1/column_spec.pb.go b/vendor/google.golang.org/genproto/googleapis/cloud/automl/v1beta1/column_spec.pb.go new file mode 100644 index 0000000..30ff709 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/cloud/automl/v1beta1/column_spec.pb.go @@ -0,0 +1,212 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/cloud/automl/v1beta1/column_spec.proto + +package automl // import "google.golang.org/genproto/googleapis/cloud/automl/v1beta1" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// A representation of a column in a relational table. When listing them, column specs are returned in the same order in which they were +// given on import . +// Used by: +// * Tables +type ColumnSpec struct { + // Output only. The resource name of the column specs. + // Form: + // + // `projects/{project_id}/locations/{location_id}/datasets/{dataset_id}/tableSpecs/{table_spec_id}/columnSpecs/{column_spec_id}` + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // The data type of elements stored in the column. + DataType *DataType `protobuf:"bytes,2,opt,name=data_type,json=dataType,proto3" json:"data_type,omitempty"` + // Output only. The name of the column to show in the interface. The name can + // be up to 100 characters long and can consist only of ASCII Latin letters + // A-Z and a-z, ASCII digits 0-9, underscores(_), and forward slashes(/), and + // must start with a letter or a digit. + DisplayName string `protobuf:"bytes,3,opt,name=display_name,json=displayName,proto3" json:"display_name,omitempty"` + // Output only. Stats of the series of values in the column. + // This field may be stale, see the ancestor's + // Dataset.tables_dataset_metadata.stats_update_time field + // for the timestamp at which these stats were last updated. + DataStats *DataStats `protobuf:"bytes,4,opt,name=data_stats,json=dataStats,proto3" json:"data_stats,omitempty"` + // Output only. Top 10 most correlated with this column columns of the table, + // ordered by + // [cramers_v][google.cloud.automl.v1beta1.CorrelationStats.cramers_v] metric. + // This field may be stale, see the ancestor's + // Dataset.tables_dataset_metadata.stats_update_time field + // for the timestamp at which these stats were last updated. + TopCorrelatedColumns []*ColumnSpec_CorrelatedColumn `protobuf:"bytes,5,rep,name=top_correlated_columns,json=topCorrelatedColumns,proto3" json:"top_correlated_columns,omitempty"` + // Used to perform consistent read-modify-write updates. If not set, a blind + // "overwrite" update happens. + Etag string `protobuf:"bytes,6,opt,name=etag,proto3" json:"etag,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ColumnSpec) Reset() { *m = ColumnSpec{} } +func (m *ColumnSpec) String() string { return proto.CompactTextString(m) } +func (*ColumnSpec) ProtoMessage() {} +func (*ColumnSpec) Descriptor() ([]byte, []int) { + return fileDescriptor_column_spec_5bd50af362cf7442, []int{0} +} +func (m *ColumnSpec) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ColumnSpec.Unmarshal(m, b) +} +func (m *ColumnSpec) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ColumnSpec.Marshal(b, m, deterministic) +} +func (dst *ColumnSpec) XXX_Merge(src proto.Message) { + xxx_messageInfo_ColumnSpec.Merge(dst, src) +} +func (m *ColumnSpec) XXX_Size() int { + return xxx_messageInfo_ColumnSpec.Size(m) +} +func (m *ColumnSpec) XXX_DiscardUnknown() { + xxx_messageInfo_ColumnSpec.DiscardUnknown(m) +} + +var xxx_messageInfo_ColumnSpec proto.InternalMessageInfo + +func (m *ColumnSpec) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *ColumnSpec) GetDataType() *DataType { + if m != nil { + return m.DataType + } + return nil +} + +func (m *ColumnSpec) GetDisplayName() string { + if m != nil { + return m.DisplayName + } + return "" +} + +func (m *ColumnSpec) GetDataStats() *DataStats { + if m != nil { + return m.DataStats + } + return nil +} + +func (m *ColumnSpec) GetTopCorrelatedColumns() []*ColumnSpec_CorrelatedColumn { + if m != nil { + return m.TopCorrelatedColumns + } + return nil +} + +func (m *ColumnSpec) GetEtag() string { + if m != nil { + return m.Etag + } + return "" +} + +// Identifies the table's column, and its correlation with the column this +// ColumnSpec describes. +type ColumnSpec_CorrelatedColumn struct { + // The column_spec_id of the correlated column, which belongs to the same + // table as the in-context column. + ColumnSpecId string `protobuf:"bytes,1,opt,name=column_spec_id,json=columnSpecId,proto3" json:"column_spec_id,omitempty"` + // Correlation between this and the in-context column. + CorrelationStats *CorrelationStats `protobuf:"bytes,2,opt,name=correlation_stats,json=correlationStats,proto3" json:"correlation_stats,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ColumnSpec_CorrelatedColumn) Reset() { *m = ColumnSpec_CorrelatedColumn{} } +func (m *ColumnSpec_CorrelatedColumn) String() string { return proto.CompactTextString(m) } +func (*ColumnSpec_CorrelatedColumn) ProtoMessage() {} +func (*ColumnSpec_CorrelatedColumn) Descriptor() ([]byte, []int) { + return fileDescriptor_column_spec_5bd50af362cf7442, []int{0, 0} +} +func (m *ColumnSpec_CorrelatedColumn) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ColumnSpec_CorrelatedColumn.Unmarshal(m, b) +} +func (m *ColumnSpec_CorrelatedColumn) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ColumnSpec_CorrelatedColumn.Marshal(b, m, deterministic) +} +func (dst *ColumnSpec_CorrelatedColumn) XXX_Merge(src proto.Message) { + xxx_messageInfo_ColumnSpec_CorrelatedColumn.Merge(dst, src) +} +func (m *ColumnSpec_CorrelatedColumn) XXX_Size() int { + return xxx_messageInfo_ColumnSpec_CorrelatedColumn.Size(m) +} +func (m *ColumnSpec_CorrelatedColumn) XXX_DiscardUnknown() { + xxx_messageInfo_ColumnSpec_CorrelatedColumn.DiscardUnknown(m) +} + +var xxx_messageInfo_ColumnSpec_CorrelatedColumn proto.InternalMessageInfo + +func (m *ColumnSpec_CorrelatedColumn) GetColumnSpecId() string { + if m != nil { + return m.ColumnSpecId + } + return "" +} + +func (m *ColumnSpec_CorrelatedColumn) GetCorrelationStats() *CorrelationStats { + if m != nil { + return m.CorrelationStats + } + return nil +} + +func init() { + proto.RegisterType((*ColumnSpec)(nil), "google.cloud.automl.v1beta1.ColumnSpec") + proto.RegisterType((*ColumnSpec_CorrelatedColumn)(nil), "google.cloud.automl.v1beta1.ColumnSpec.CorrelatedColumn") +} + +func init() { + proto.RegisterFile("google/cloud/automl/v1beta1/column_spec.proto", fileDescriptor_column_spec_5bd50af362cf7442) +} + +var fileDescriptor_column_spec_5bd50af362cf7442 = []byte{ + // 396 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x8c, 0x92, 0xd1, 0xea, 0xd3, 0x30, + 0x14, 0xc6, 0xe9, 0x7f, 0x7f, 0x87, 0xcb, 0x86, 0xcc, 0x20, 0x52, 0x36, 0xc1, 0x29, 0x2a, 0xbb, + 0x70, 0x29, 0x9b, 0x37, 0x82, 0x57, 0xdb, 0x14, 0xf1, 0x42, 0x91, 0x4e, 0xbc, 0x18, 0x83, 0x72, + 0x96, 0x86, 0x52, 0x48, 0x73, 0x42, 0x9b, 0x0a, 0xbb, 0xf7, 0x11, 0x7c, 0x2a, 0xdf, 0xc4, 0xb7, + 0x90, 0x26, 0xb1, 0x93, 0x21, 0x9d, 0x77, 0xa7, 0x27, 0xbf, 0x7c, 0xdf, 0xc9, 0xd7, 0x43, 0x16, + 0x19, 0x62, 0x26, 0x45, 0xc4, 0x25, 0xd6, 0x69, 0x04, 0xb5, 0xc1, 0x42, 0x46, 0xdf, 0x96, 0x47, + 0x61, 0x60, 0x19, 0x71, 0x94, 0x75, 0xa1, 0x92, 0x4a, 0x0b, 0xce, 0x74, 0x89, 0x06, 0xe9, 0xd4, + 0xe1, 0xcc, 0xe2, 0xcc, 0xe1, 0xcc, 0xe3, 0x93, 0x47, 0x5e, 0x0b, 0x74, 0x1e, 0x81, 0x52, 0x68, + 0xc0, 0xe4, 0xa8, 0x2a, 0x77, 0x75, 0xf2, 0xb2, 0xcb, 0x29, 0x05, 0x03, 0x49, 0x65, 0xc0, 0xfc, + 0x3f, 0x6d, 0x4e, 0x5a, 0x78, 0xfa, 0xe9, 0xaf, 0x1e, 0x21, 0x5b, 0x3b, 0xec, 0x4e, 0x0b, 0x4e, + 0x29, 0xb9, 0x55, 0x50, 0x88, 0x30, 0x98, 0x05, 0xf3, 0x41, 0x6c, 0x6b, 0xba, 0x21, 0x83, 0xf6, + 0x5a, 0x78, 0x33, 0x0b, 0xe6, 0xc3, 0xd5, 0x73, 0xd6, 0xf1, 0x1a, 0xf6, 0x16, 0x0c, 0x7c, 0x39, + 0x69, 0x11, 0xdf, 0x4d, 0x7d, 0x45, 0x9f, 0x90, 0x51, 0x9a, 0x57, 0x5a, 0xc2, 0x29, 0xb1, 0xfa, + 0x3d, 0xab, 0x3f, 0xf4, 0xbd, 0x4f, 0x8d, 0xcd, 0x3b, 0x42, 0xce, 0x6f, 0x09, 0x6f, 0xad, 0xcf, + 0x8b, 0xab, 0x3e, 0xbb, 0x86, 0x8e, 0xed, 0x80, 0xb6, 0xa4, 0x8a, 0x3c, 0x34, 0xa8, 0x13, 0x8e, + 0x65, 0x29, 0x24, 0x18, 0x91, 0x26, 0xee, 0x5f, 0x54, 0xe1, 0x9d, 0x59, 0x6f, 0x3e, 0x5c, 0xbd, + 0xee, 0x94, 0x3c, 0x47, 0xc1, 0xb6, 0xad, 0x82, 0x6b, 0xc6, 0x0f, 0x0c, 0xea, 0xcb, 0x66, 0xd5, + 0x24, 0x26, 0x0c, 0x64, 0x61, 0xdf, 0x25, 0xd6, 0xd4, 0x93, 0x1f, 0x01, 0x19, 0x5f, 0x92, 0xf4, + 0x19, 0xb9, 0xf7, 0xd7, 0x56, 0x24, 0x79, 0xea, 0x43, 0x1e, 0xf1, 0xd6, 0xf3, 0x43, 0x4a, 0xf7, + 0xe4, 0xfe, 0x9f, 0xd1, 0x73, 0x54, 0x3e, 0x0c, 0x17, 0xfa, 0xe2, 0xca, 0xe4, 0xed, 0x2d, 0x97, + 0xc9, 0x98, 0x5f, 0x74, 0x36, 0xdf, 0x03, 0xf2, 0x98, 0x63, 0xd1, 0x25, 0xf3, 0x39, 0xd8, 0xaf, + 0xfd, 0x71, 0x86, 0x12, 0x54, 0xc6, 0xb0, 0xcc, 0xa2, 0x4c, 0x28, 0xbb, 0x2d, 0x91, 0x3b, 0x02, + 0x9d, 0x57, 0xff, 0x5c, 0xaf, 0x37, 0xee, 0xf3, 0xe7, 0xcd, 0xf4, 0xbd, 0x05, 0x0f, 0xdb, 0x06, + 0x3a, 0xac, 0x6b, 0x83, 0x1f, 0xe5, 0xe1, 0xab, 0x83, 0x8e, 0x7d, 0xab, 0xf5, 0xea, 0x77, 0x00, + 0x00, 0x00, 0xff, 0xff, 0x06, 0x4f, 0x07, 0x5d, 0x41, 0x03, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/cloud/automl/v1beta1/data_items.pb.go b/vendor/google.golang.org/genproto/googleapis/cloud/automl/v1beta1/data_items.pb.go new file mode 100644 index 0000000..6b19515 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/cloud/automl/v1beta1/data_items.pb.go @@ -0,0 +1,605 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/cloud/automl/v1beta1/data_items.proto + +package automl // import "google.golang.org/genproto/googleapis/cloud/automl/v1beta1" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "github.com/golang/protobuf/ptypes/any" +import _ "github.com/golang/protobuf/ptypes/duration" +import _struct "github.com/golang/protobuf/ptypes/struct" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// A representation of an image. +// Only images up to 30MB in size are supported. +type Image struct { + // Input only. The data representing the image. + // For Predict calls [image_bytes][] must be set, as other options are not + // currently supported by prediction API. You can read the contents of an + // uploaded image by using the [content_uri][] field. + // + // Types that are valid to be assigned to Data: + // *Image_ImageBytes + // *Image_InputConfig + Data isImage_Data `protobuf_oneof:"data"` + // Output only. HTTP URI to the thumbnail image. + ThumbnailUri string `protobuf:"bytes,4,opt,name=thumbnail_uri,json=thumbnailUri,proto3" json:"thumbnail_uri,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Image) Reset() { *m = Image{} } +func (m *Image) String() string { return proto.CompactTextString(m) } +func (*Image) ProtoMessage() {} +func (*Image) Descriptor() ([]byte, []int) { + return fileDescriptor_data_items_5b5af88f35c66f99, []int{0} +} +func (m *Image) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Image.Unmarshal(m, b) +} +func (m *Image) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Image.Marshal(b, m, deterministic) +} +func (dst *Image) XXX_Merge(src proto.Message) { + xxx_messageInfo_Image.Merge(dst, src) +} +func (m *Image) XXX_Size() int { + return xxx_messageInfo_Image.Size(m) +} +func (m *Image) XXX_DiscardUnknown() { + xxx_messageInfo_Image.DiscardUnknown(m) +} + +var xxx_messageInfo_Image proto.InternalMessageInfo + +type isImage_Data interface { + isImage_Data() +} + +type Image_ImageBytes struct { + ImageBytes []byte `protobuf:"bytes,1,opt,name=image_bytes,json=imageBytes,proto3,oneof"` +} + +type Image_InputConfig struct { + InputConfig *InputConfig `protobuf:"bytes,6,opt,name=input_config,json=inputConfig,proto3,oneof"` +} + +func (*Image_ImageBytes) isImage_Data() {} + +func (*Image_InputConfig) isImage_Data() {} + +func (m *Image) GetData() isImage_Data { + if m != nil { + return m.Data + } + return nil +} + +func (m *Image) GetImageBytes() []byte { + if x, ok := m.GetData().(*Image_ImageBytes); ok { + return x.ImageBytes + } + return nil +} + +func (m *Image) GetInputConfig() *InputConfig { + if x, ok := m.GetData().(*Image_InputConfig); ok { + return x.InputConfig + } + return nil +} + +func (m *Image) GetThumbnailUri() string { + if m != nil { + return m.ThumbnailUri + } + return "" +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*Image) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _Image_OneofMarshaler, _Image_OneofUnmarshaler, _Image_OneofSizer, []interface{}{ + (*Image_ImageBytes)(nil), + (*Image_InputConfig)(nil), + } +} + +func _Image_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*Image) + // data + switch x := m.Data.(type) { + case *Image_ImageBytes: + b.EncodeVarint(1<<3 | proto.WireBytes) + b.EncodeRawBytes(x.ImageBytes) + case *Image_InputConfig: + b.EncodeVarint(6<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.InputConfig); err != nil { + return err + } + case nil: + default: + return fmt.Errorf("Image.Data has unexpected type %T", x) + } + return nil +} + +func _Image_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*Image) + switch tag { + case 1: // data.image_bytes + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeRawBytes(true) + m.Data = &Image_ImageBytes{x} + return true, err + case 6: // data.input_config + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(InputConfig) + err := b.DecodeMessage(msg) + m.Data = &Image_InputConfig{msg} + return true, err + default: + return false, nil + } +} + +func _Image_OneofSizer(msg proto.Message) (n int) { + m := msg.(*Image) + // data + switch x := m.Data.(type) { + case *Image_ImageBytes: + n += 1 // tag and wire + n += proto.SizeVarint(uint64(len(x.ImageBytes))) + n += len(x.ImageBytes) + case *Image_InputConfig: + s := proto.Size(x.InputConfig) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +// A representation of a text snippet. +type TextSnippet struct { + // Required. The content of the text snippet as a string. Up to 250000 + // characters long. + Content string `protobuf:"bytes,1,opt,name=content,proto3" json:"content,omitempty"` + // The format of the source text. Currently the only two allowed values are + // "text/html" and "text/plain". If left blank the format is automatically + // determined from the type of the uploaded content. + MimeType string `protobuf:"bytes,2,opt,name=mime_type,json=mimeType,proto3" json:"mime_type,omitempty"` + // Output only. HTTP URI where you can download the content. + ContentUri string `protobuf:"bytes,4,opt,name=content_uri,json=contentUri,proto3" json:"content_uri,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *TextSnippet) Reset() { *m = TextSnippet{} } +func (m *TextSnippet) String() string { return proto.CompactTextString(m) } +func (*TextSnippet) ProtoMessage() {} +func (*TextSnippet) Descriptor() ([]byte, []int) { + return fileDescriptor_data_items_5b5af88f35c66f99, []int{1} +} +func (m *TextSnippet) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_TextSnippet.Unmarshal(m, b) +} +func (m *TextSnippet) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_TextSnippet.Marshal(b, m, deterministic) +} +func (dst *TextSnippet) XXX_Merge(src proto.Message) { + xxx_messageInfo_TextSnippet.Merge(dst, src) +} +func (m *TextSnippet) XXX_Size() int { + return xxx_messageInfo_TextSnippet.Size(m) +} +func (m *TextSnippet) XXX_DiscardUnknown() { + xxx_messageInfo_TextSnippet.DiscardUnknown(m) +} + +var xxx_messageInfo_TextSnippet proto.InternalMessageInfo + +func (m *TextSnippet) GetContent() string { + if m != nil { + return m.Content + } + return "" +} + +func (m *TextSnippet) GetMimeType() string { + if m != nil { + return m.MimeType + } + return "" +} + +func (m *TextSnippet) GetContentUri() string { + if m != nil { + return m.ContentUri + } + return "" +} + +// A structured text document e.g. a PDF. +type Document struct { + // An input config specifying the content of the document. + InputConfig *DocumentInputConfig `protobuf:"bytes,1,opt,name=input_config,json=inputConfig,proto3" json:"input_config,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Document) Reset() { *m = Document{} } +func (m *Document) String() string { return proto.CompactTextString(m) } +func (*Document) ProtoMessage() {} +func (*Document) Descriptor() ([]byte, []int) { + return fileDescriptor_data_items_5b5af88f35c66f99, []int{2} +} +func (m *Document) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Document.Unmarshal(m, b) +} +func (m *Document) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Document.Marshal(b, m, deterministic) +} +func (dst *Document) XXX_Merge(src proto.Message) { + xxx_messageInfo_Document.Merge(dst, src) +} +func (m *Document) XXX_Size() int { + return xxx_messageInfo_Document.Size(m) +} +func (m *Document) XXX_DiscardUnknown() { + xxx_messageInfo_Document.DiscardUnknown(m) +} + +var xxx_messageInfo_Document proto.InternalMessageInfo + +func (m *Document) GetInputConfig() *DocumentInputConfig { + if m != nil { + return m.InputConfig + } + return nil +} + +// A representation of a row in a relational table. +type Row struct { + // The resource IDs of the column specs describing the columns of the row. + // If set must contain, but possibly in a different order, all input feature + // + // [column_spec_ids][google.cloud.automl.v1beta1.TablesModelMetadata.input_feature_column_specs] + // of the Model this row is being passed to. + // Note: The below `values` field must match order of this field, if this + // field is set. + ColumnSpecIds []string `protobuf:"bytes,2,rep,name=column_spec_ids,json=columnSpecIds,proto3" json:"column_spec_ids,omitempty"` + // Required. The values of the row cells, given in the same order as the + // column_spec_ids, or, if not set, then in the same order as input feature + // + // [column_specs][google.cloud.automl.v1beta1.TablesModelMetadata.input_feature_column_specs] + // of the Model this row is being passed to. + Values []*_struct.Value `protobuf:"bytes,3,rep,name=values,proto3" json:"values,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Row) Reset() { *m = Row{} } +func (m *Row) String() string { return proto.CompactTextString(m) } +func (*Row) ProtoMessage() {} +func (*Row) Descriptor() ([]byte, []int) { + return fileDescriptor_data_items_5b5af88f35c66f99, []int{3} +} +func (m *Row) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Row.Unmarshal(m, b) +} +func (m *Row) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Row.Marshal(b, m, deterministic) +} +func (dst *Row) XXX_Merge(src proto.Message) { + xxx_messageInfo_Row.Merge(dst, src) +} +func (m *Row) XXX_Size() int { + return xxx_messageInfo_Row.Size(m) +} +func (m *Row) XXX_DiscardUnknown() { + xxx_messageInfo_Row.DiscardUnknown(m) +} + +var xxx_messageInfo_Row proto.InternalMessageInfo + +func (m *Row) GetColumnSpecIds() []string { + if m != nil { + return m.ColumnSpecIds + } + return nil +} + +func (m *Row) GetValues() []*_struct.Value { + if m != nil { + return m.Values + } + return nil +} + +// Example data used for training or prediction. +type ExamplePayload struct { + // Required. Input only. The example data. + // + // Types that are valid to be assigned to Payload: + // *ExamplePayload_Image + // *ExamplePayload_TextSnippet + // *ExamplePayload_Document + // *ExamplePayload_Row + Payload isExamplePayload_Payload `protobuf_oneof:"payload"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ExamplePayload) Reset() { *m = ExamplePayload{} } +func (m *ExamplePayload) String() string { return proto.CompactTextString(m) } +func (*ExamplePayload) ProtoMessage() {} +func (*ExamplePayload) Descriptor() ([]byte, []int) { + return fileDescriptor_data_items_5b5af88f35c66f99, []int{4} +} +func (m *ExamplePayload) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ExamplePayload.Unmarshal(m, b) +} +func (m *ExamplePayload) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ExamplePayload.Marshal(b, m, deterministic) +} +func (dst *ExamplePayload) XXX_Merge(src proto.Message) { + xxx_messageInfo_ExamplePayload.Merge(dst, src) +} +func (m *ExamplePayload) XXX_Size() int { + return xxx_messageInfo_ExamplePayload.Size(m) +} +func (m *ExamplePayload) XXX_DiscardUnknown() { + xxx_messageInfo_ExamplePayload.DiscardUnknown(m) +} + +var xxx_messageInfo_ExamplePayload proto.InternalMessageInfo + +type isExamplePayload_Payload interface { + isExamplePayload_Payload() +} + +type ExamplePayload_Image struct { + Image *Image `protobuf:"bytes,1,opt,name=image,proto3,oneof"` +} + +type ExamplePayload_TextSnippet struct { + TextSnippet *TextSnippet `protobuf:"bytes,2,opt,name=text_snippet,json=textSnippet,proto3,oneof"` +} + +type ExamplePayload_Document struct { + Document *Document `protobuf:"bytes,4,opt,name=document,proto3,oneof"` +} + +type ExamplePayload_Row struct { + Row *Row `protobuf:"bytes,3,opt,name=row,proto3,oneof"` +} + +func (*ExamplePayload_Image) isExamplePayload_Payload() {} + +func (*ExamplePayload_TextSnippet) isExamplePayload_Payload() {} + +func (*ExamplePayload_Document) isExamplePayload_Payload() {} + +func (*ExamplePayload_Row) isExamplePayload_Payload() {} + +func (m *ExamplePayload) GetPayload() isExamplePayload_Payload { + if m != nil { + return m.Payload + } + return nil +} + +func (m *ExamplePayload) GetImage() *Image { + if x, ok := m.GetPayload().(*ExamplePayload_Image); ok { + return x.Image + } + return nil +} + +func (m *ExamplePayload) GetTextSnippet() *TextSnippet { + if x, ok := m.GetPayload().(*ExamplePayload_TextSnippet); ok { + return x.TextSnippet + } + return nil +} + +func (m *ExamplePayload) GetDocument() *Document { + if x, ok := m.GetPayload().(*ExamplePayload_Document); ok { + return x.Document + } + return nil +} + +func (m *ExamplePayload) GetRow() *Row { + if x, ok := m.GetPayload().(*ExamplePayload_Row); ok { + return x.Row + } + return nil +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*ExamplePayload) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _ExamplePayload_OneofMarshaler, _ExamplePayload_OneofUnmarshaler, _ExamplePayload_OneofSizer, []interface{}{ + (*ExamplePayload_Image)(nil), + (*ExamplePayload_TextSnippet)(nil), + (*ExamplePayload_Document)(nil), + (*ExamplePayload_Row)(nil), + } +} + +func _ExamplePayload_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*ExamplePayload) + // payload + switch x := m.Payload.(type) { + case *ExamplePayload_Image: + b.EncodeVarint(1<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.Image); err != nil { + return err + } + case *ExamplePayload_TextSnippet: + b.EncodeVarint(2<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.TextSnippet); err != nil { + return err + } + case *ExamplePayload_Document: + b.EncodeVarint(4<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.Document); err != nil { + return err + } + case *ExamplePayload_Row: + b.EncodeVarint(3<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.Row); err != nil { + return err + } + case nil: + default: + return fmt.Errorf("ExamplePayload.Payload has unexpected type %T", x) + } + return nil +} + +func _ExamplePayload_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*ExamplePayload) + switch tag { + case 1: // payload.image + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(Image) + err := b.DecodeMessage(msg) + m.Payload = &ExamplePayload_Image{msg} + return true, err + case 2: // payload.text_snippet + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(TextSnippet) + err := b.DecodeMessage(msg) + m.Payload = &ExamplePayload_TextSnippet{msg} + return true, err + case 4: // payload.document + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(Document) + err := b.DecodeMessage(msg) + m.Payload = &ExamplePayload_Document{msg} + return true, err + case 3: // payload.row + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(Row) + err := b.DecodeMessage(msg) + m.Payload = &ExamplePayload_Row{msg} + return true, err + default: + return false, nil + } +} + +func _ExamplePayload_OneofSizer(msg proto.Message) (n int) { + m := msg.(*ExamplePayload) + // payload + switch x := m.Payload.(type) { + case *ExamplePayload_Image: + s := proto.Size(x.Image) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *ExamplePayload_TextSnippet: + s := proto.Size(x.TextSnippet) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *ExamplePayload_Document: + s := proto.Size(x.Document) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *ExamplePayload_Row: + s := proto.Size(x.Row) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +func init() { + proto.RegisterType((*Image)(nil), "google.cloud.automl.v1beta1.Image") + proto.RegisterType((*TextSnippet)(nil), "google.cloud.automl.v1beta1.TextSnippet") + proto.RegisterType((*Document)(nil), "google.cloud.automl.v1beta1.Document") + proto.RegisterType((*Row)(nil), "google.cloud.automl.v1beta1.Row") + proto.RegisterType((*ExamplePayload)(nil), "google.cloud.automl.v1beta1.ExamplePayload") +} + +func init() { + proto.RegisterFile("google/cloud/automl/v1beta1/data_items.proto", fileDescriptor_data_items_5b5af88f35c66f99) +} + +var fileDescriptor_data_items_5b5af88f35c66f99 = []byte{ + // 559 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x8c, 0x53, 0xcf, 0x6f, 0xd3, 0x30, + 0x14, 0x6e, 0x9a, 0xad, 0x5b, 0x9d, 0x0d, 0xa4, 0x1c, 0x50, 0xd8, 0x26, 0x16, 0xc2, 0x0f, 0xe5, + 0x80, 0x12, 0x56, 0x38, 0xc1, 0x69, 0x2d, 0x88, 0xf6, 0x30, 0x69, 0x72, 0xb7, 0x1d, 0x50, 0x51, + 0xe4, 0x26, 0x5e, 0xb0, 0x94, 0xd8, 0x56, 0x62, 0xaf, 0xed, 0x9d, 0x7f, 0x84, 0x0b, 0x7f, 0x0c, + 0x7f, 0x15, 0xb2, 0xe3, 0x8e, 0x52, 0xa1, 0xc0, 0x2d, 0xef, 0x7d, 0xdf, 0xf7, 0x9e, 0xdf, 0xf7, + 0x29, 0xe0, 0x55, 0xce, 0x58, 0x5e, 0xe0, 0x38, 0x2d, 0x98, 0xcc, 0x62, 0x24, 0x05, 0x2b, 0x8b, + 0xf8, 0xee, 0x6c, 0x8e, 0x05, 0x3a, 0x8b, 0x33, 0x24, 0x50, 0x42, 0x04, 0x2e, 0xeb, 0x88, 0x57, + 0x4c, 0x30, 0xf7, 0xb8, 0x61, 0x47, 0x9a, 0x1d, 0x35, 0xec, 0xc8, 0xb0, 0x8f, 0x4e, 0xcc, 0x28, + 0xc4, 0x49, 0x8c, 0x28, 0x65, 0x02, 0x09, 0xc2, 0xa8, 0x91, 0x1e, 0x3d, 0x6f, 0x5b, 0x44, 0x98, + 0x61, 0x3d, 0x36, 0x2c, 0x5d, 0xcd, 0xe5, 0x6d, 0x8c, 0xe8, 0xca, 0x40, 0x4f, 0xb6, 0xa1, 0x4c, + 0x56, 0x7a, 0x83, 0xc1, 0x4f, 0xb6, 0xf1, 0x5a, 0x54, 0x32, 0x15, 0x0d, 0x1a, 0xfc, 0xb0, 0xc0, + 0xee, 0xa4, 0x44, 0x39, 0x76, 0x9f, 0x02, 0x87, 0xa8, 0x8f, 0x64, 0xbe, 0x12, 0xb8, 0xf6, 0x2c, + 0xdf, 0x0a, 0x0f, 0xc6, 0x1d, 0x08, 0x74, 0x73, 0xa8, 0x7a, 0xee, 0x05, 0x38, 0x20, 0x94, 0x4b, + 0x91, 0xa4, 0x8c, 0xde, 0x92, 0xdc, 0xeb, 0xf9, 0x56, 0xe8, 0x0c, 0xc2, 0xa8, 0xe5, 0xfa, 0x68, + 0xa2, 0x04, 0x23, 0xcd, 0x1f, 0x77, 0xa0, 0x43, 0x7e, 0x97, 0xee, 0x33, 0x70, 0x28, 0xbe, 0xca, + 0x72, 0x4e, 0x11, 0x29, 0x12, 0x59, 0x11, 0x6f, 0xc7, 0xb7, 0xc2, 0x3e, 0x3c, 0xb8, 0x6f, 0x5e, + 0x57, 0x64, 0xd8, 0x03, 0x3b, 0xca, 0xee, 0x00, 0x03, 0xe7, 0x0a, 0x2f, 0xc5, 0x94, 0x12, 0xce, + 0xb1, 0x70, 0x3d, 0xb0, 0x97, 0x32, 0x2a, 0x30, 0x15, 0xfa, 0xa5, 0x7d, 0xb8, 0x2e, 0xdd, 0x63, + 0xd0, 0x2f, 0x49, 0x89, 0x13, 0xb1, 0xe2, 0xd8, 0xeb, 0x6a, 0x6c, 0x5f, 0x35, 0xae, 0x56, 0x1c, + 0xbb, 0xa7, 0xc0, 0x31, 0xbc, 0x8d, 0x85, 0xc0, 0xb4, 0xae, 0x2b, 0x12, 0x24, 0x60, 0xff, 0x03, + 0x4b, 0x65, 0xa9, 0x26, 0x4d, 0xb7, 0xce, 0xb5, 0xf4, 0xb9, 0xaf, 0x5b, 0xcf, 0x5d, 0x8b, 0x37, + 0xce, 0xfe, 0xe3, 0xe8, 0xe0, 0x0b, 0xb0, 0x21, 0x5b, 0xb8, 0x2f, 0xc1, 0xc3, 0x94, 0x15, 0xb2, + 0xa4, 0x49, 0xcd, 0x71, 0x9a, 0x90, 0xac, 0xf6, 0xba, 0xbe, 0x1d, 0xf6, 0xe1, 0x61, 0xd3, 0x9e, + 0x72, 0x9c, 0x4e, 0xb2, 0xda, 0x8d, 0x40, 0xef, 0x0e, 0x15, 0x12, 0xd7, 0x9e, 0xed, 0xdb, 0xa1, + 0x33, 0x78, 0xb4, 0xde, 0xbe, 0x8e, 0x33, 0xba, 0x51, 0x30, 0x34, 0xac, 0xe0, 0x7b, 0x17, 0x3c, + 0xf8, 0xb8, 0x44, 0x25, 0x2f, 0xf0, 0x25, 0x5a, 0x15, 0x0c, 0x65, 0xee, 0x3b, 0xb0, 0xab, 0x33, + 0x34, 0xef, 0x0f, 0xda, 0xe3, 0x52, 0xcc, 0x71, 0x07, 0x36, 0x12, 0x95, 0xb8, 0xc0, 0x4b, 0x91, + 0xd4, 0x8d, 0xed, 0xda, 0xcf, 0x7f, 0x25, 0xbe, 0x11, 0x93, 0x4a, 0x5c, 0x6c, 0xa4, 0x36, 0x02, + 0xfb, 0x99, 0x31, 0x48, 0x7b, 0xef, 0x0c, 0x5e, 0xfc, 0x97, 0x9b, 0xe3, 0x0e, 0xbc, 0x17, 0xba, + 0x6f, 0x81, 0x5d, 0xb1, 0x85, 0x67, 0x6b, 0xbd, 0xdf, 0xaa, 0x87, 0x6c, 0x31, 0xee, 0x40, 0x45, + 0x1f, 0xf6, 0xc1, 0x1e, 0x6f, 0x0c, 0x19, 0x7e, 0xb3, 0xc0, 0x69, 0xca, 0xca, 0x36, 0xe5, 0xa5, + 0xf5, 0xf9, 0xdc, 0xc0, 0x39, 0x2b, 0x10, 0xcd, 0x23, 0x56, 0xe5, 0x71, 0x8e, 0xa9, 0xb6, 0x3d, + 0x6e, 0x20, 0xc4, 0x49, 0xfd, 0xd7, 0xff, 0xf6, 0x7d, 0x53, 0xfe, 0xec, 0x1e, 0x7f, 0xd2, 0xc4, + 0xd9, 0x48, 0x91, 0x66, 0xe7, 0x52, 0xb0, 0x8b, 0x62, 0x76, 0xd3, 0x90, 0xe6, 0x3d, 0x3d, 0xeb, + 0xcd, 0xaf, 0x00, 0x00, 0x00, 0xff, 0xff, 0x33, 0xc9, 0x61, 0x25, 0x6b, 0x04, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/cloud/automl/v1beta1/data_stats.pb.go b/vendor/google.golang.org/genproto/googleapis/cloud/automl/v1beta1/data_stats.pb.go new file mode 100644 index 0000000..a7c46bb --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/cloud/automl/v1beta1/data_stats.pb.go @@ -0,0 +1,920 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/cloud/automl/v1beta1/data_stats.proto + +package automl // import "google.golang.org/genproto/googleapis/cloud/automl/v1beta1" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// The data statistics of a series of values that share the same DataType. +type DataStats struct { + // The data statistics specific to a DataType. + // + // Types that are valid to be assigned to Stats: + // *DataStats_Float64Stats + // *DataStats_StringStats + // *DataStats_TimestampStats + // *DataStats_ArrayStats + // *DataStats_StructStats + // *DataStats_CategoryStats + Stats isDataStats_Stats `protobuf_oneof:"stats"` + // The number of distinct values. + DistinctValueCount int64 `protobuf:"varint,1,opt,name=distinct_value_count,json=distinctValueCount,proto3" json:"distinct_value_count,omitempty"` + // The number of values that are null. + NullValueCount int64 `protobuf:"varint,2,opt,name=null_value_count,json=nullValueCount,proto3" json:"null_value_count,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *DataStats) Reset() { *m = DataStats{} } +func (m *DataStats) String() string { return proto.CompactTextString(m) } +func (*DataStats) ProtoMessage() {} +func (*DataStats) Descriptor() ([]byte, []int) { + return fileDescriptor_data_stats_e2571ab261ca3ffb, []int{0} +} +func (m *DataStats) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_DataStats.Unmarshal(m, b) +} +func (m *DataStats) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_DataStats.Marshal(b, m, deterministic) +} +func (dst *DataStats) XXX_Merge(src proto.Message) { + xxx_messageInfo_DataStats.Merge(dst, src) +} +func (m *DataStats) XXX_Size() int { + return xxx_messageInfo_DataStats.Size(m) +} +func (m *DataStats) XXX_DiscardUnknown() { + xxx_messageInfo_DataStats.DiscardUnknown(m) +} + +var xxx_messageInfo_DataStats proto.InternalMessageInfo + +type isDataStats_Stats interface { + isDataStats_Stats() +} + +type DataStats_Float64Stats struct { + Float64Stats *Float64Stats `protobuf:"bytes,3,opt,name=float64_stats,json=float64Stats,proto3,oneof"` +} + +type DataStats_StringStats struct { + StringStats *StringStats `protobuf:"bytes,4,opt,name=string_stats,json=stringStats,proto3,oneof"` +} + +type DataStats_TimestampStats struct { + TimestampStats *TimestampStats `protobuf:"bytes,5,opt,name=timestamp_stats,json=timestampStats,proto3,oneof"` +} + +type DataStats_ArrayStats struct { + ArrayStats *ArrayStats `protobuf:"bytes,6,opt,name=array_stats,json=arrayStats,proto3,oneof"` +} + +type DataStats_StructStats struct { + StructStats *StructStats `protobuf:"bytes,7,opt,name=struct_stats,json=structStats,proto3,oneof"` +} + +type DataStats_CategoryStats struct { + CategoryStats *CategoryStats `protobuf:"bytes,8,opt,name=category_stats,json=categoryStats,proto3,oneof"` +} + +func (*DataStats_Float64Stats) isDataStats_Stats() {} + +func (*DataStats_StringStats) isDataStats_Stats() {} + +func (*DataStats_TimestampStats) isDataStats_Stats() {} + +func (*DataStats_ArrayStats) isDataStats_Stats() {} + +func (*DataStats_StructStats) isDataStats_Stats() {} + +func (*DataStats_CategoryStats) isDataStats_Stats() {} + +func (m *DataStats) GetStats() isDataStats_Stats { + if m != nil { + return m.Stats + } + return nil +} + +func (m *DataStats) GetFloat64Stats() *Float64Stats { + if x, ok := m.GetStats().(*DataStats_Float64Stats); ok { + return x.Float64Stats + } + return nil +} + +func (m *DataStats) GetStringStats() *StringStats { + if x, ok := m.GetStats().(*DataStats_StringStats); ok { + return x.StringStats + } + return nil +} + +func (m *DataStats) GetTimestampStats() *TimestampStats { + if x, ok := m.GetStats().(*DataStats_TimestampStats); ok { + return x.TimestampStats + } + return nil +} + +func (m *DataStats) GetArrayStats() *ArrayStats { + if x, ok := m.GetStats().(*DataStats_ArrayStats); ok { + return x.ArrayStats + } + return nil +} + +func (m *DataStats) GetStructStats() *StructStats { + if x, ok := m.GetStats().(*DataStats_StructStats); ok { + return x.StructStats + } + return nil +} + +func (m *DataStats) GetCategoryStats() *CategoryStats { + if x, ok := m.GetStats().(*DataStats_CategoryStats); ok { + return x.CategoryStats + } + return nil +} + +func (m *DataStats) GetDistinctValueCount() int64 { + if m != nil { + return m.DistinctValueCount + } + return 0 +} + +func (m *DataStats) GetNullValueCount() int64 { + if m != nil { + return m.NullValueCount + } + return 0 +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*DataStats) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _DataStats_OneofMarshaler, _DataStats_OneofUnmarshaler, _DataStats_OneofSizer, []interface{}{ + (*DataStats_Float64Stats)(nil), + (*DataStats_StringStats)(nil), + (*DataStats_TimestampStats)(nil), + (*DataStats_ArrayStats)(nil), + (*DataStats_StructStats)(nil), + (*DataStats_CategoryStats)(nil), + } +} + +func _DataStats_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*DataStats) + // stats + switch x := m.Stats.(type) { + case *DataStats_Float64Stats: + b.EncodeVarint(3<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.Float64Stats); err != nil { + return err + } + case *DataStats_StringStats: + b.EncodeVarint(4<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.StringStats); err != nil { + return err + } + case *DataStats_TimestampStats: + b.EncodeVarint(5<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.TimestampStats); err != nil { + return err + } + case *DataStats_ArrayStats: + b.EncodeVarint(6<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.ArrayStats); err != nil { + return err + } + case *DataStats_StructStats: + b.EncodeVarint(7<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.StructStats); err != nil { + return err + } + case *DataStats_CategoryStats: + b.EncodeVarint(8<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.CategoryStats); err != nil { + return err + } + case nil: + default: + return fmt.Errorf("DataStats.Stats has unexpected type %T", x) + } + return nil +} + +func _DataStats_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*DataStats) + switch tag { + case 3: // stats.float64_stats + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(Float64Stats) + err := b.DecodeMessage(msg) + m.Stats = &DataStats_Float64Stats{msg} + return true, err + case 4: // stats.string_stats + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(StringStats) + err := b.DecodeMessage(msg) + m.Stats = &DataStats_StringStats{msg} + return true, err + case 5: // stats.timestamp_stats + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(TimestampStats) + err := b.DecodeMessage(msg) + m.Stats = &DataStats_TimestampStats{msg} + return true, err + case 6: // stats.array_stats + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(ArrayStats) + err := b.DecodeMessage(msg) + m.Stats = &DataStats_ArrayStats{msg} + return true, err + case 7: // stats.struct_stats + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(StructStats) + err := b.DecodeMessage(msg) + m.Stats = &DataStats_StructStats{msg} + return true, err + case 8: // stats.category_stats + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(CategoryStats) + err := b.DecodeMessage(msg) + m.Stats = &DataStats_CategoryStats{msg} + return true, err + default: + return false, nil + } +} + +func _DataStats_OneofSizer(msg proto.Message) (n int) { + m := msg.(*DataStats) + // stats + switch x := m.Stats.(type) { + case *DataStats_Float64Stats: + s := proto.Size(x.Float64Stats) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *DataStats_StringStats: + s := proto.Size(x.StringStats) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *DataStats_TimestampStats: + s := proto.Size(x.TimestampStats) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *DataStats_ArrayStats: + s := proto.Size(x.ArrayStats) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *DataStats_StructStats: + s := proto.Size(x.StructStats) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *DataStats_CategoryStats: + s := proto.Size(x.CategoryStats) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +// The data statistics of a series of FLOAT64 values. +type Float64Stats struct { + // The mean of the series. + Mean float64 `protobuf:"fixed64,1,opt,name=mean,proto3" json:"mean,omitempty"` + // The standard deviation of the series. + StandardDeviation float64 `protobuf:"fixed64,2,opt,name=standard_deviation,json=standardDeviation,proto3" json:"standard_deviation,omitempty"` + // Ordered from 0 to k k-quantile values of the data series of n values. + // The value at index i is, approximately, the i*n/k-th smallest value in the + // series; for i = 0 and i = k these are, respectively, the min and max + // values. + Quantiles []float64 `protobuf:"fixed64,3,rep,packed,name=quantiles,proto3" json:"quantiles,omitempty"` + // Histogram buckets of the data series. Sorted by the min value of the + // bucket, ascendingly, and the number of the buckets is dynamically + // generated. The buckets are non-overlapping and completely cover whole + // FLOAT64 range with min of first bucket being `"-Infinity"`, and max of + // the last one being `"Infinity"`. + HistogramBuckets []*Float64Stats_HistogramBucket `protobuf:"bytes,4,rep,name=histogram_buckets,json=histogramBuckets,proto3" json:"histogram_buckets,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Float64Stats) Reset() { *m = Float64Stats{} } +func (m *Float64Stats) String() string { return proto.CompactTextString(m) } +func (*Float64Stats) ProtoMessage() {} +func (*Float64Stats) Descriptor() ([]byte, []int) { + return fileDescriptor_data_stats_e2571ab261ca3ffb, []int{1} +} +func (m *Float64Stats) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Float64Stats.Unmarshal(m, b) +} +func (m *Float64Stats) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Float64Stats.Marshal(b, m, deterministic) +} +func (dst *Float64Stats) XXX_Merge(src proto.Message) { + xxx_messageInfo_Float64Stats.Merge(dst, src) +} +func (m *Float64Stats) XXX_Size() int { + return xxx_messageInfo_Float64Stats.Size(m) +} +func (m *Float64Stats) XXX_DiscardUnknown() { + xxx_messageInfo_Float64Stats.DiscardUnknown(m) +} + +var xxx_messageInfo_Float64Stats proto.InternalMessageInfo + +func (m *Float64Stats) GetMean() float64 { + if m != nil { + return m.Mean + } + return 0 +} + +func (m *Float64Stats) GetStandardDeviation() float64 { + if m != nil { + return m.StandardDeviation + } + return 0 +} + +func (m *Float64Stats) GetQuantiles() []float64 { + if m != nil { + return m.Quantiles + } + return nil +} + +func (m *Float64Stats) GetHistogramBuckets() []*Float64Stats_HistogramBucket { + if m != nil { + return m.HistogramBuckets + } + return nil +} + +// A bucket of a histogram. +type Float64Stats_HistogramBucket struct { + // The minimum value of the bucket, inclusive. + Min float64 `protobuf:"fixed64,1,opt,name=min,proto3" json:"min,omitempty"` + // The maximum value of the bucket, exclusive unless max = `"Infinity"`, in + // which case it's inclusive. + Max float64 `protobuf:"fixed64,2,opt,name=max,proto3" json:"max,omitempty"` + // The number of data values that are in the bucket, i.e. are between + // min and max values. + Count int64 `protobuf:"varint,3,opt,name=count,proto3" json:"count,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Float64Stats_HistogramBucket) Reset() { *m = Float64Stats_HistogramBucket{} } +func (m *Float64Stats_HistogramBucket) String() string { return proto.CompactTextString(m) } +func (*Float64Stats_HistogramBucket) ProtoMessage() {} +func (*Float64Stats_HistogramBucket) Descriptor() ([]byte, []int) { + return fileDescriptor_data_stats_e2571ab261ca3ffb, []int{1, 0} +} +func (m *Float64Stats_HistogramBucket) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Float64Stats_HistogramBucket.Unmarshal(m, b) +} +func (m *Float64Stats_HistogramBucket) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Float64Stats_HistogramBucket.Marshal(b, m, deterministic) +} +func (dst *Float64Stats_HistogramBucket) XXX_Merge(src proto.Message) { + xxx_messageInfo_Float64Stats_HistogramBucket.Merge(dst, src) +} +func (m *Float64Stats_HistogramBucket) XXX_Size() int { + return xxx_messageInfo_Float64Stats_HistogramBucket.Size(m) +} +func (m *Float64Stats_HistogramBucket) XXX_DiscardUnknown() { + xxx_messageInfo_Float64Stats_HistogramBucket.DiscardUnknown(m) +} + +var xxx_messageInfo_Float64Stats_HistogramBucket proto.InternalMessageInfo + +func (m *Float64Stats_HistogramBucket) GetMin() float64 { + if m != nil { + return m.Min + } + return 0 +} + +func (m *Float64Stats_HistogramBucket) GetMax() float64 { + if m != nil { + return m.Max + } + return 0 +} + +func (m *Float64Stats_HistogramBucket) GetCount() int64 { + if m != nil { + return m.Count + } + return 0 +} + +// The data statistics of a series of STRING values. +type StringStats struct { + // The statistics of the top 20 unigrams, ordered by + // [count][google.cloud.automl.v1beta1.StringStats.UnigramStats.count]. + TopUnigramStats []*StringStats_UnigramStats `protobuf:"bytes,1,rep,name=top_unigram_stats,json=topUnigramStats,proto3" json:"top_unigram_stats,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *StringStats) Reset() { *m = StringStats{} } +func (m *StringStats) String() string { return proto.CompactTextString(m) } +func (*StringStats) ProtoMessage() {} +func (*StringStats) Descriptor() ([]byte, []int) { + return fileDescriptor_data_stats_e2571ab261ca3ffb, []int{2} +} +func (m *StringStats) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_StringStats.Unmarshal(m, b) +} +func (m *StringStats) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_StringStats.Marshal(b, m, deterministic) +} +func (dst *StringStats) XXX_Merge(src proto.Message) { + xxx_messageInfo_StringStats.Merge(dst, src) +} +func (m *StringStats) XXX_Size() int { + return xxx_messageInfo_StringStats.Size(m) +} +func (m *StringStats) XXX_DiscardUnknown() { + xxx_messageInfo_StringStats.DiscardUnknown(m) +} + +var xxx_messageInfo_StringStats proto.InternalMessageInfo + +func (m *StringStats) GetTopUnigramStats() []*StringStats_UnigramStats { + if m != nil { + return m.TopUnigramStats + } + return nil +} + +// The statistics of a unigram. +type StringStats_UnigramStats struct { + // The unigram. + Value string `protobuf:"bytes,1,opt,name=value,proto3" json:"value,omitempty"` + // The number of occurrences of this unigram in the series. + Count int64 `protobuf:"varint,2,opt,name=count,proto3" json:"count,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *StringStats_UnigramStats) Reset() { *m = StringStats_UnigramStats{} } +func (m *StringStats_UnigramStats) String() string { return proto.CompactTextString(m) } +func (*StringStats_UnigramStats) ProtoMessage() {} +func (*StringStats_UnigramStats) Descriptor() ([]byte, []int) { + return fileDescriptor_data_stats_e2571ab261ca3ffb, []int{2, 0} +} +func (m *StringStats_UnigramStats) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_StringStats_UnigramStats.Unmarshal(m, b) +} +func (m *StringStats_UnigramStats) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_StringStats_UnigramStats.Marshal(b, m, deterministic) +} +func (dst *StringStats_UnigramStats) XXX_Merge(src proto.Message) { + xxx_messageInfo_StringStats_UnigramStats.Merge(dst, src) +} +func (m *StringStats_UnigramStats) XXX_Size() int { + return xxx_messageInfo_StringStats_UnigramStats.Size(m) +} +func (m *StringStats_UnigramStats) XXX_DiscardUnknown() { + xxx_messageInfo_StringStats_UnigramStats.DiscardUnknown(m) +} + +var xxx_messageInfo_StringStats_UnigramStats proto.InternalMessageInfo + +func (m *StringStats_UnigramStats) GetValue() string { + if m != nil { + return m.Value + } + return "" +} + +func (m *StringStats_UnigramStats) GetCount() int64 { + if m != nil { + return m.Count + } + return 0 +} + +// The data statistics of a series of TIMESTAMP values. +type TimestampStats struct { + // The string key is the pre-defined granularity. Currently supported: + // hour_of_day, day_of_week, month_of_year. + // Granularities finer that the granularity of timestamp data are not + // populated (e.g. if timestamps are at day granularity, then hour_of_day + // is not populated). + GranularStats map[string]*TimestampStats_GranularStats `protobuf:"bytes,1,rep,name=granular_stats,json=granularStats,proto3" json:"granular_stats,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *TimestampStats) Reset() { *m = TimestampStats{} } +func (m *TimestampStats) String() string { return proto.CompactTextString(m) } +func (*TimestampStats) ProtoMessage() {} +func (*TimestampStats) Descriptor() ([]byte, []int) { + return fileDescriptor_data_stats_e2571ab261ca3ffb, []int{3} +} +func (m *TimestampStats) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_TimestampStats.Unmarshal(m, b) +} +func (m *TimestampStats) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_TimestampStats.Marshal(b, m, deterministic) +} +func (dst *TimestampStats) XXX_Merge(src proto.Message) { + xxx_messageInfo_TimestampStats.Merge(dst, src) +} +func (m *TimestampStats) XXX_Size() int { + return xxx_messageInfo_TimestampStats.Size(m) +} +func (m *TimestampStats) XXX_DiscardUnknown() { + xxx_messageInfo_TimestampStats.DiscardUnknown(m) +} + +var xxx_messageInfo_TimestampStats proto.InternalMessageInfo + +func (m *TimestampStats) GetGranularStats() map[string]*TimestampStats_GranularStats { + if m != nil { + return m.GranularStats + } + return nil +} + +// Stats split by a defined in context granularity. +type TimestampStats_GranularStats struct { + // A map from granularity key to example count for that key. + // E.g. for hour_of_day `13` means 1pm, or for month_of_year `5` means May). + Buckets map[int32]int64 `protobuf:"bytes,1,rep,name=buckets,proto3" json:"buckets,omitempty" protobuf_key:"varint,1,opt,name=key,proto3" protobuf_val:"varint,2,opt,name=value,proto3"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *TimestampStats_GranularStats) Reset() { *m = TimestampStats_GranularStats{} } +func (m *TimestampStats_GranularStats) String() string { return proto.CompactTextString(m) } +func (*TimestampStats_GranularStats) ProtoMessage() {} +func (*TimestampStats_GranularStats) Descriptor() ([]byte, []int) { + return fileDescriptor_data_stats_e2571ab261ca3ffb, []int{3, 0} +} +func (m *TimestampStats_GranularStats) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_TimestampStats_GranularStats.Unmarshal(m, b) +} +func (m *TimestampStats_GranularStats) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_TimestampStats_GranularStats.Marshal(b, m, deterministic) +} +func (dst *TimestampStats_GranularStats) XXX_Merge(src proto.Message) { + xxx_messageInfo_TimestampStats_GranularStats.Merge(dst, src) +} +func (m *TimestampStats_GranularStats) XXX_Size() int { + return xxx_messageInfo_TimestampStats_GranularStats.Size(m) +} +func (m *TimestampStats_GranularStats) XXX_DiscardUnknown() { + xxx_messageInfo_TimestampStats_GranularStats.DiscardUnknown(m) +} + +var xxx_messageInfo_TimestampStats_GranularStats proto.InternalMessageInfo + +func (m *TimestampStats_GranularStats) GetBuckets() map[int32]int64 { + if m != nil { + return m.Buckets + } + return nil +} + +// The data statistics of a series of ARRAY values. +type ArrayStats struct { + // Stats of all the values of all arrays, as if they were a single long + // series of data. The type depends on the element type of the array. + MemberStats *DataStats `protobuf:"bytes,2,opt,name=member_stats,json=memberStats,proto3" json:"member_stats,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ArrayStats) Reset() { *m = ArrayStats{} } +func (m *ArrayStats) String() string { return proto.CompactTextString(m) } +func (*ArrayStats) ProtoMessage() {} +func (*ArrayStats) Descriptor() ([]byte, []int) { + return fileDescriptor_data_stats_e2571ab261ca3ffb, []int{4} +} +func (m *ArrayStats) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ArrayStats.Unmarshal(m, b) +} +func (m *ArrayStats) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ArrayStats.Marshal(b, m, deterministic) +} +func (dst *ArrayStats) XXX_Merge(src proto.Message) { + xxx_messageInfo_ArrayStats.Merge(dst, src) +} +func (m *ArrayStats) XXX_Size() int { + return xxx_messageInfo_ArrayStats.Size(m) +} +func (m *ArrayStats) XXX_DiscardUnknown() { + xxx_messageInfo_ArrayStats.DiscardUnknown(m) +} + +var xxx_messageInfo_ArrayStats proto.InternalMessageInfo + +func (m *ArrayStats) GetMemberStats() *DataStats { + if m != nil { + return m.MemberStats + } + return nil +} + +// The data statistics of a series of STRUCT values. +type StructStats struct { + // Map from a field name of the struct to data stats aggregated over series + // of all data in that field across all the structs. + FieldStats map[string]*DataStats `protobuf:"bytes,1,rep,name=field_stats,json=fieldStats,proto3" json:"field_stats,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *StructStats) Reset() { *m = StructStats{} } +func (m *StructStats) String() string { return proto.CompactTextString(m) } +func (*StructStats) ProtoMessage() {} +func (*StructStats) Descriptor() ([]byte, []int) { + return fileDescriptor_data_stats_e2571ab261ca3ffb, []int{5} +} +func (m *StructStats) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_StructStats.Unmarshal(m, b) +} +func (m *StructStats) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_StructStats.Marshal(b, m, deterministic) +} +func (dst *StructStats) XXX_Merge(src proto.Message) { + xxx_messageInfo_StructStats.Merge(dst, src) +} +func (m *StructStats) XXX_Size() int { + return xxx_messageInfo_StructStats.Size(m) +} +func (m *StructStats) XXX_DiscardUnknown() { + xxx_messageInfo_StructStats.DiscardUnknown(m) +} + +var xxx_messageInfo_StructStats proto.InternalMessageInfo + +func (m *StructStats) GetFieldStats() map[string]*DataStats { + if m != nil { + return m.FieldStats + } + return nil +} + +// The data statistics of a series of CATEGORY values. +type CategoryStats struct { + // The statistics of the top 20 CATEGORY values, ordered by + // + // [count][google.cloud.automl.v1beta1.CategoryStats.SingleCategoryStats.count]. + TopCategoryStats []*CategoryStats_SingleCategoryStats `protobuf:"bytes,1,rep,name=top_category_stats,json=topCategoryStats,proto3" json:"top_category_stats,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *CategoryStats) Reset() { *m = CategoryStats{} } +func (m *CategoryStats) String() string { return proto.CompactTextString(m) } +func (*CategoryStats) ProtoMessage() {} +func (*CategoryStats) Descriptor() ([]byte, []int) { + return fileDescriptor_data_stats_e2571ab261ca3ffb, []int{6} +} +func (m *CategoryStats) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_CategoryStats.Unmarshal(m, b) +} +func (m *CategoryStats) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_CategoryStats.Marshal(b, m, deterministic) +} +func (dst *CategoryStats) XXX_Merge(src proto.Message) { + xxx_messageInfo_CategoryStats.Merge(dst, src) +} +func (m *CategoryStats) XXX_Size() int { + return xxx_messageInfo_CategoryStats.Size(m) +} +func (m *CategoryStats) XXX_DiscardUnknown() { + xxx_messageInfo_CategoryStats.DiscardUnknown(m) +} + +var xxx_messageInfo_CategoryStats proto.InternalMessageInfo + +func (m *CategoryStats) GetTopCategoryStats() []*CategoryStats_SingleCategoryStats { + if m != nil { + return m.TopCategoryStats + } + return nil +} + +// The statistics of a single CATEGORY value. +type CategoryStats_SingleCategoryStats struct { + // The CATEGORY value. + Value string `protobuf:"bytes,1,opt,name=value,proto3" json:"value,omitempty"` + // The number of occurrences of this value in the series. + Count int64 `protobuf:"varint,2,opt,name=count,proto3" json:"count,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *CategoryStats_SingleCategoryStats) Reset() { *m = CategoryStats_SingleCategoryStats{} } +func (m *CategoryStats_SingleCategoryStats) String() string { return proto.CompactTextString(m) } +func (*CategoryStats_SingleCategoryStats) ProtoMessage() {} +func (*CategoryStats_SingleCategoryStats) Descriptor() ([]byte, []int) { + return fileDescriptor_data_stats_e2571ab261ca3ffb, []int{6, 0} +} +func (m *CategoryStats_SingleCategoryStats) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_CategoryStats_SingleCategoryStats.Unmarshal(m, b) +} +func (m *CategoryStats_SingleCategoryStats) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_CategoryStats_SingleCategoryStats.Marshal(b, m, deterministic) +} +func (dst *CategoryStats_SingleCategoryStats) XXX_Merge(src proto.Message) { + xxx_messageInfo_CategoryStats_SingleCategoryStats.Merge(dst, src) +} +func (m *CategoryStats_SingleCategoryStats) XXX_Size() int { + return xxx_messageInfo_CategoryStats_SingleCategoryStats.Size(m) +} +func (m *CategoryStats_SingleCategoryStats) XXX_DiscardUnknown() { + xxx_messageInfo_CategoryStats_SingleCategoryStats.DiscardUnknown(m) +} + +var xxx_messageInfo_CategoryStats_SingleCategoryStats proto.InternalMessageInfo + +func (m *CategoryStats_SingleCategoryStats) GetValue() string { + if m != nil { + return m.Value + } + return "" +} + +func (m *CategoryStats_SingleCategoryStats) GetCount() int64 { + if m != nil { + return m.Count + } + return 0 +} + +// A correlation statistics between two series of DataType values. The series +// may have differing DataType-s, but within a single series the DataType must +// be the same. +type CorrelationStats struct { + // The correlation value using the Cramer's V measure. + CramersV float64 `protobuf:"fixed64,1,opt,name=cramers_v,json=cramersV,proto3" json:"cramers_v,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *CorrelationStats) Reset() { *m = CorrelationStats{} } +func (m *CorrelationStats) String() string { return proto.CompactTextString(m) } +func (*CorrelationStats) ProtoMessage() {} +func (*CorrelationStats) Descriptor() ([]byte, []int) { + return fileDescriptor_data_stats_e2571ab261ca3ffb, []int{7} +} +func (m *CorrelationStats) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_CorrelationStats.Unmarshal(m, b) +} +func (m *CorrelationStats) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_CorrelationStats.Marshal(b, m, deterministic) +} +func (dst *CorrelationStats) XXX_Merge(src proto.Message) { + xxx_messageInfo_CorrelationStats.Merge(dst, src) +} +func (m *CorrelationStats) XXX_Size() int { + return xxx_messageInfo_CorrelationStats.Size(m) +} +func (m *CorrelationStats) XXX_DiscardUnknown() { + xxx_messageInfo_CorrelationStats.DiscardUnknown(m) +} + +var xxx_messageInfo_CorrelationStats proto.InternalMessageInfo + +func (m *CorrelationStats) GetCramersV() float64 { + if m != nil { + return m.CramersV + } + return 0 +} + +func init() { + proto.RegisterType((*DataStats)(nil), "google.cloud.automl.v1beta1.DataStats") + proto.RegisterType((*Float64Stats)(nil), "google.cloud.automl.v1beta1.Float64Stats") + proto.RegisterType((*Float64Stats_HistogramBucket)(nil), "google.cloud.automl.v1beta1.Float64Stats.HistogramBucket") + proto.RegisterType((*StringStats)(nil), "google.cloud.automl.v1beta1.StringStats") + proto.RegisterType((*StringStats_UnigramStats)(nil), "google.cloud.automl.v1beta1.StringStats.UnigramStats") + proto.RegisterType((*TimestampStats)(nil), "google.cloud.automl.v1beta1.TimestampStats") + proto.RegisterMapType((map[string]*TimestampStats_GranularStats)(nil), "google.cloud.automl.v1beta1.TimestampStats.GranularStatsEntry") + proto.RegisterType((*TimestampStats_GranularStats)(nil), "google.cloud.automl.v1beta1.TimestampStats.GranularStats") + proto.RegisterMapType((map[int32]int64)(nil), "google.cloud.automl.v1beta1.TimestampStats.GranularStats.BucketsEntry") + proto.RegisterType((*ArrayStats)(nil), "google.cloud.automl.v1beta1.ArrayStats") + proto.RegisterType((*StructStats)(nil), "google.cloud.automl.v1beta1.StructStats") + proto.RegisterMapType((map[string]*DataStats)(nil), "google.cloud.automl.v1beta1.StructStats.FieldStatsEntry") + proto.RegisterType((*CategoryStats)(nil), "google.cloud.automl.v1beta1.CategoryStats") + proto.RegisterType((*CategoryStats_SingleCategoryStats)(nil), "google.cloud.automl.v1beta1.CategoryStats.SingleCategoryStats") + proto.RegisterType((*CorrelationStats)(nil), "google.cloud.automl.v1beta1.CorrelationStats") +} + +func init() { + proto.RegisterFile("google/cloud/automl/v1beta1/data_stats.proto", fileDescriptor_data_stats_e2571ab261ca3ffb) +} + +var fileDescriptor_data_stats_e2571ab261ca3ffb = []byte{ + // 830 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x9c, 0x96, 0xcb, 0x6f, 0xd3, 0x48, + 0x18, 0xc0, 0xd7, 0x49, 0xd3, 0x36, 0x5f, 0x9e, 0x9d, 0xed, 0xa1, 0x4a, 0x2b, 0x6d, 0x95, 0xc3, + 0x6e, 0xf6, 0x65, 0x6f, 0xbb, 0x80, 0x4a, 0x41, 0x48, 0x69, 0x4a, 0x5b, 0x40, 0x15, 0x95, 0x03, + 0x41, 0xa0, 0x4a, 0x61, 0xe2, 0x4c, 0x5c, 0xab, 0xf6, 0x4c, 0xb0, 0xc7, 0x11, 0x15, 0x57, 0xfe, + 0x94, 0x1e, 0xe1, 0x7f, 0xe0, 0xcc, 0x85, 0x33, 0xff, 0x0d, 0xf2, 0xcc, 0x38, 0xb1, 0x4b, 0x65, + 0x52, 0x6e, 0xfe, 0x5e, 0xbf, 0x6f, 0xbe, 0xc7, 0x4c, 0x02, 0xff, 0xd8, 0x8c, 0xd9, 0x2e, 0x31, + 0x2c, 0x97, 0x85, 0x43, 0x03, 0x87, 0x9c, 0x79, 0xae, 0x31, 0xd9, 0x1a, 0x10, 0x8e, 0xb7, 0x8c, + 0x21, 0xe6, 0xb8, 0x1f, 0x70, 0xcc, 0x03, 0x7d, 0xec, 0x33, 0xce, 0xd0, 0xba, 0xf4, 0xd6, 0x85, + 0xb7, 0x2e, 0xbd, 0x75, 0xe5, 0xdd, 0xd8, 0x50, 0x28, 0x3c, 0x76, 0x0c, 0x4c, 0x29, 0xe3, 0x98, + 0x3b, 0x8c, 0xaa, 0xd0, 0xe6, 0xd7, 0x05, 0x28, 0xee, 0x63, 0x8e, 0xbb, 0x11, 0x0e, 0x9d, 0x40, + 0x65, 0xe4, 0x32, 0xcc, 0xef, 0xdc, 0x92, 0xfc, 0xb5, 0xfc, 0xa6, 0xd6, 0x2a, 0x6d, 0xff, 0xa9, + 0x67, 0x24, 0xd0, 0x0f, 0x64, 0x84, 0x20, 0x1c, 0xfd, 0x62, 0x96, 0x47, 0x09, 0x19, 0x1d, 0x43, + 0x39, 0xe0, 0xbe, 0x43, 0x6d, 0x05, 0x5c, 0x10, 0xc0, 0x56, 0x26, 0xb0, 0x2b, 0x02, 0x62, 0x5e, + 0x29, 0x98, 0x89, 0xa8, 0x07, 0x35, 0xee, 0x78, 0x24, 0xe0, 0xd8, 0x1b, 0x2b, 0x62, 0x41, 0x10, + 0xff, 0xce, 0x24, 0x3e, 0x8b, 0x63, 0x62, 0x68, 0x95, 0xa7, 0x34, 0xe8, 0x31, 0x94, 0xb0, 0xef, + 0xe3, 0x0b, 0xc5, 0x5c, 0x14, 0xcc, 0x3f, 0x32, 0x99, 0xed, 0xc8, 0x3f, 0xe6, 0x01, 0x9e, 0x4a, + 0xaa, 0xe4, 0xd0, 0xe2, 0x0a, 0xb6, 0x34, 0x5f, 0xc9, 0xa1, 0xc5, 0x93, 0x25, 0xc7, 0x22, 0xea, + 0x42, 0xd5, 0xc2, 0x9c, 0xd8, 0xcc, 0x8f, 0x4f, 0xb7, 0x2c, 0x80, 0x7f, 0x65, 0x02, 0x3b, 0x2a, + 0x24, 0x46, 0x56, 0xac, 0xa4, 0x02, 0xfd, 0x07, 0xab, 0x43, 0x27, 0xe0, 0x0e, 0xb5, 0x78, 0x7f, + 0x82, 0xdd, 0x90, 0xf4, 0x2d, 0x16, 0x52, 0xbe, 0xa6, 0x6d, 0x6a, 0xad, 0xbc, 0x89, 0x62, 0x5b, + 0x2f, 0x32, 0x75, 0x22, 0x0b, 0x6a, 0x41, 0x9d, 0x86, 0xae, 0x9b, 0xf2, 0xce, 0x09, 0xef, 0x6a, + 0xa4, 0x9f, 0x79, 0xee, 0x2d, 0x41, 0x41, 0x9c, 0xb3, 0x79, 0x99, 0x83, 0x72, 0x72, 0x39, 0x10, + 0x82, 0x05, 0x8f, 0x60, 0x2a, 0xb2, 0x68, 0xa6, 0xf8, 0x46, 0xff, 0x02, 0x0a, 0x38, 0xa6, 0x43, + 0xec, 0x0f, 0xfb, 0x43, 0x32, 0x71, 0xc4, 0x76, 0x0a, 0xb2, 0x66, 0xae, 0xc4, 0x96, 0xfd, 0xd8, + 0x80, 0x36, 0xa0, 0xf8, 0x26, 0xc4, 0x94, 0x3b, 0x2e, 0x89, 0xb6, 0x33, 0xdf, 0xd2, 0xcc, 0x99, + 0x02, 0x8d, 0x60, 0xe5, 0xcc, 0x09, 0x38, 0xb3, 0x7d, 0xec, 0xf5, 0x07, 0xa1, 0x75, 0x4e, 0xc4, + 0xca, 0xe5, 0x5b, 0xa5, 0xed, 0xbb, 0x73, 0xef, 0xb0, 0x7e, 0x14, 0x23, 0xf6, 0x04, 0xc1, 0xac, + 0x9f, 0xa5, 0x15, 0x41, 0xe3, 0x09, 0xd4, 0xae, 0x38, 0xa1, 0x3a, 0xe4, 0x3d, 0x27, 0x2e, 0x2d, + 0xfa, 0x14, 0x1a, 0xfc, 0x56, 0x95, 0x12, 0x7d, 0xa2, 0x55, 0x28, 0xc8, 0xc6, 0xe5, 0x45, 0xe3, + 0xa4, 0xd0, 0xfc, 0xa0, 0x41, 0x29, 0xb1, 0xf2, 0x08, 0xc3, 0x0a, 0x67, 0xe3, 0x7e, 0x48, 0x1d, + 0x51, 0x86, 0x9c, 0xb9, 0x26, 0x8a, 0xb8, 0x3d, 0xef, 0xbd, 0xd1, 0x9f, 0xcb, 0x68, 0x21, 0x98, + 0x35, 0xce, 0xc6, 0x49, 0x45, 0x63, 0x17, 0xca, 0x49, 0x39, 0x3a, 0x98, 0x98, 0xab, 0x38, 0x7e, + 0xd1, 0x94, 0xc2, 0xec, 0xb8, 0xb9, 0xe4, 0x71, 0x2f, 0xf3, 0x50, 0x4d, 0xdf, 0x27, 0x44, 0xa0, + 0x6a, 0xfb, 0x98, 0x86, 0x2e, 0xf6, 0x53, 0xc7, 0x7d, 0x70, 0x83, 0x4b, 0xa9, 0x1f, 0x2a, 0x82, + 0x90, 0x1e, 0x52, 0xee, 0x5f, 0x98, 0x15, 0x3b, 0xa9, 0x6b, 0x7c, 0xd4, 0xa0, 0x92, 0xf2, 0x42, + 0xaf, 0x61, 0x29, 0x9e, 0xb2, 0xcc, 0x78, 0xf0, 0xd3, 0x19, 0x75, 0x35, 0x5b, 0x99, 0x39, 0xc6, + 0x46, 0x9d, 0x4a, 0x1a, 0xa2, 0xa1, 0x9e, 0x93, 0x0b, 0xd1, 0xa7, 0x82, 0x19, 0x7d, 0xce, 0x7a, + 0xa7, 0xba, 0x24, 0x84, 0xdd, 0xdc, 0x8e, 0xd6, 0x78, 0x07, 0xe8, 0xfb, 0xa2, 0x92, 0x84, 0xa2, + 0x24, 0x3c, 0x4d, 0x12, 0x7e, 0xb4, 0xa9, 0x59, 0x35, 0x24, 0x92, 0x37, 0x5f, 0x00, 0xcc, 0x5e, + 0x28, 0xf4, 0x08, 0xca, 0x1e, 0xf1, 0x06, 0x24, 0x9e, 0x8f, 0xcc, 0xf4, 0x7b, 0x66, 0xa6, 0xe9, + 0xcf, 0x82, 0x59, 0x92, 0xb1, 0x42, 0x68, 0x7e, 0x91, 0xeb, 0x3a, 0x7d, 0x9f, 0x5e, 0x42, 0x69, + 0xe4, 0x10, 0x77, 0x98, 0x9a, 0xfc, 0xce, 0xbc, 0xaf, 0x9d, 0x7e, 0x10, 0xc5, 0x26, 0x66, 0x0e, + 0xa3, 0xa9, 0xa2, 0x41, 0xa0, 0x76, 0xc5, 0x7c, 0x4d, 0xf7, 0xee, 0xa7, 0xbb, 0x37, 0x6f, 0x4d, + 0x89, 0x56, 0x7d, 0xd2, 0xa0, 0x92, 0x7a, 0x2f, 0x91, 0x0b, 0x28, 0xba, 0x82, 0x57, 0xde, 0xdd, + 0x79, 0x96, 0x3a, 0xc5, 0xd1, 0xbb, 0x0e, 0xb5, 0x5d, 0x92, 0xd2, 0x99, 0x75, 0xce, 0xc6, 0x29, + 0x4d, 0xa3, 0x0d, 0xbf, 0x5e, 0xe3, 0x78, 0xa3, 0x4b, 0x69, 0x40, 0xbd, 0xc3, 0x7c, 0x9f, 0xb8, + 0xe2, 0x95, 0x94, 0xf1, 0xeb, 0x50, 0xb4, 0x7c, 0xec, 0x11, 0x3f, 0xe8, 0x4f, 0xd4, 0xbb, 0xb4, + 0xac, 0x14, 0xbd, 0xbd, 0xf7, 0x1a, 0xfc, 0x66, 0x31, 0x2f, 0xab, 0x96, 0x13, 0xed, 0x55, 0x5b, + 0x99, 0x6d, 0xe6, 0x62, 0x6a, 0xeb, 0xcc, 0xb7, 0x0d, 0x9b, 0x50, 0xf1, 0xcf, 0xc1, 0x90, 0x26, + 0x3c, 0x76, 0x82, 0x6b, 0xff, 0xa5, 0xdc, 0x93, 0xe2, 0xe7, 0xdc, 0xfa, 0xa1, 0x70, 0x3c, 0xed, + 0x44, 0x4e, 0xa7, 0xed, 0x90, 0xb3, 0x63, 0xf7, 0xb4, 0x27, 0x9d, 0x06, 0x8b, 0x82, 0xf5, 0xff, + 0xb7, 0x00, 0x00, 0x00, 0xff, 0xff, 0x52, 0xf8, 0x53, 0x15, 0xf0, 0x08, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/cloud/automl/v1beta1/data_types.pb.go b/vendor/google.golang.org/genproto/googleapis/cloud/automl/v1beta1/data_types.pb.go new file mode 100644 index 0000000..bc168e1 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/cloud/automl/v1beta1/data_types.pb.go @@ -0,0 +1,374 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/cloud/automl/v1beta1/data_types.proto + +package automl // import "google.golang.org/genproto/googleapis/cloud/automl/v1beta1" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "github.com/golang/protobuf/ptypes/any" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// `TypeCode` is used as a part of +// [DataType][google.cloud.automl.v1beta1.DataType]. +// +// Each legal value of a DataType can be encoded to or decoded from a JSON +// value, using the encodings listed below, and definitions of which can be +// found at +// +// https: +// //developers.google.com/protocol-buffers +// // /docs/reference/google.protobuf#value. +type TypeCode int32 + +const ( + // Not specified. Should not be used. + TypeCode_TYPE_CODE_UNSPECIFIED TypeCode = 0 + // Encoded as `number`, or the strings `"NaN"`, `"Infinity"`, or + // `"-Infinity"`. + TypeCode_FLOAT64 TypeCode = 3 + // Must be between 0AD and 9999AD. Encoded as `string` according to + // [time_format][google.cloud.automl.v1beta1.DataType.time_format], or, if + // that format is not set, then in RFC 3339 `date-time` format, where + // `time-offset` = `"Z"` (e.g. 1985-04-12T23:20:50.52Z). + TypeCode_TIMESTAMP TypeCode = 4 + // Encoded as `string`. + TypeCode_STRING TypeCode = 6 + // Encoded as `list`, where the list elements are represented according to + // + // [list_element_type][google.cloud.automl.v1beta1.DataType.list_element_type]. + TypeCode_ARRAY TypeCode = 8 + // Encoded as `struct`, where field values are represented according to + // [struct_type][google.cloud.automl.v1beta1.DataType.struct_type]. + TypeCode_STRUCT TypeCode = 9 + // Values of this type are not further understood by AutoML, + // e.g. AutoML is unable to tell the order of values (as it could with + // FLOAT64), or is unable to say if one value contains another (as it + // could with STRING). + // Encoded as `string` (bytes should be base64-encoded, as described in RFC + // 4648, section 4). + TypeCode_CATEGORY TypeCode = 10 +) + +var TypeCode_name = map[int32]string{ + 0: "TYPE_CODE_UNSPECIFIED", + 3: "FLOAT64", + 4: "TIMESTAMP", + 6: "STRING", + 8: "ARRAY", + 9: "STRUCT", + 10: "CATEGORY", +} +var TypeCode_value = map[string]int32{ + "TYPE_CODE_UNSPECIFIED": 0, + "FLOAT64": 3, + "TIMESTAMP": 4, + "STRING": 6, + "ARRAY": 8, + "STRUCT": 9, + "CATEGORY": 10, +} + +func (x TypeCode) String() string { + return proto.EnumName(TypeCode_name, int32(x)) +} +func (TypeCode) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_data_types_f857041c6e153cb1, []int{0} +} + +// Indicated the type of data that can be stored in a structured data entity +// (e.g. a table). +type DataType struct { + // Details of DataType-s that need additional specification. + // + // Types that are valid to be assigned to Details: + // *DataType_ListElementType + // *DataType_StructType + // *DataType_TimeFormat + Details isDataType_Details `protobuf_oneof:"details"` + // Required. The [TypeCode][google.cloud.automl.v1beta1.TypeCode] for this type. + TypeCode TypeCode `protobuf:"varint,1,opt,name=type_code,json=typeCode,proto3,enum=google.cloud.automl.v1beta1.TypeCode" json:"type_code,omitempty"` + // If true, this DataType can also be `null`. + Nullable bool `protobuf:"varint,4,opt,name=nullable,proto3" json:"nullable,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *DataType) Reset() { *m = DataType{} } +func (m *DataType) String() string { return proto.CompactTextString(m) } +func (*DataType) ProtoMessage() {} +func (*DataType) Descriptor() ([]byte, []int) { + return fileDescriptor_data_types_f857041c6e153cb1, []int{0} +} +func (m *DataType) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_DataType.Unmarshal(m, b) +} +func (m *DataType) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_DataType.Marshal(b, m, deterministic) +} +func (dst *DataType) XXX_Merge(src proto.Message) { + xxx_messageInfo_DataType.Merge(dst, src) +} +func (m *DataType) XXX_Size() int { + return xxx_messageInfo_DataType.Size(m) +} +func (m *DataType) XXX_DiscardUnknown() { + xxx_messageInfo_DataType.DiscardUnknown(m) +} + +var xxx_messageInfo_DataType proto.InternalMessageInfo + +type isDataType_Details interface { + isDataType_Details() +} + +type DataType_ListElementType struct { + ListElementType *DataType `protobuf:"bytes,2,opt,name=list_element_type,json=listElementType,proto3,oneof"` +} + +type DataType_StructType struct { + StructType *StructType `protobuf:"bytes,3,opt,name=struct_type,json=structType,proto3,oneof"` +} + +type DataType_TimeFormat struct { + TimeFormat string `protobuf:"bytes,5,opt,name=time_format,json=timeFormat,proto3,oneof"` +} + +func (*DataType_ListElementType) isDataType_Details() {} + +func (*DataType_StructType) isDataType_Details() {} + +func (*DataType_TimeFormat) isDataType_Details() {} + +func (m *DataType) GetDetails() isDataType_Details { + if m != nil { + return m.Details + } + return nil +} + +func (m *DataType) GetListElementType() *DataType { + if x, ok := m.GetDetails().(*DataType_ListElementType); ok { + return x.ListElementType + } + return nil +} + +func (m *DataType) GetStructType() *StructType { + if x, ok := m.GetDetails().(*DataType_StructType); ok { + return x.StructType + } + return nil +} + +func (m *DataType) GetTimeFormat() string { + if x, ok := m.GetDetails().(*DataType_TimeFormat); ok { + return x.TimeFormat + } + return "" +} + +func (m *DataType) GetTypeCode() TypeCode { + if m != nil { + return m.TypeCode + } + return TypeCode_TYPE_CODE_UNSPECIFIED +} + +func (m *DataType) GetNullable() bool { + if m != nil { + return m.Nullable + } + return false +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*DataType) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _DataType_OneofMarshaler, _DataType_OneofUnmarshaler, _DataType_OneofSizer, []interface{}{ + (*DataType_ListElementType)(nil), + (*DataType_StructType)(nil), + (*DataType_TimeFormat)(nil), + } +} + +func _DataType_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*DataType) + // details + switch x := m.Details.(type) { + case *DataType_ListElementType: + b.EncodeVarint(2<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.ListElementType); err != nil { + return err + } + case *DataType_StructType: + b.EncodeVarint(3<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.StructType); err != nil { + return err + } + case *DataType_TimeFormat: + b.EncodeVarint(5<<3 | proto.WireBytes) + b.EncodeStringBytes(x.TimeFormat) + case nil: + default: + return fmt.Errorf("DataType.Details has unexpected type %T", x) + } + return nil +} + +func _DataType_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*DataType) + switch tag { + case 2: // details.list_element_type + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(DataType) + err := b.DecodeMessage(msg) + m.Details = &DataType_ListElementType{msg} + return true, err + case 3: // details.struct_type + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(StructType) + err := b.DecodeMessage(msg) + m.Details = &DataType_StructType{msg} + return true, err + case 5: // details.time_format + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeStringBytes() + m.Details = &DataType_TimeFormat{x} + return true, err + default: + return false, nil + } +} + +func _DataType_OneofSizer(msg proto.Message) (n int) { + m := msg.(*DataType) + // details + switch x := m.Details.(type) { + case *DataType_ListElementType: + s := proto.Size(x.ListElementType) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *DataType_StructType: + s := proto.Size(x.StructType) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *DataType_TimeFormat: + n += 1 // tag and wire + n += proto.SizeVarint(uint64(len(x.TimeFormat))) + n += len(x.TimeFormat) + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +// `StructType` defines the DataType-s of a [STRUCT][google.cloud.automl.v1beta1.TypeCode.STRUCT] type. +type StructType struct { + // Unordered map of struct field names to their data types. + // Fields cannot be added or removed via Update. Their names and + // data types are still mutable. + Fields map[string]*DataType `protobuf:"bytes,1,rep,name=fields,proto3" json:"fields,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *StructType) Reset() { *m = StructType{} } +func (m *StructType) String() string { return proto.CompactTextString(m) } +func (*StructType) ProtoMessage() {} +func (*StructType) Descriptor() ([]byte, []int) { + return fileDescriptor_data_types_f857041c6e153cb1, []int{1} +} +func (m *StructType) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_StructType.Unmarshal(m, b) +} +func (m *StructType) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_StructType.Marshal(b, m, deterministic) +} +func (dst *StructType) XXX_Merge(src proto.Message) { + xxx_messageInfo_StructType.Merge(dst, src) +} +func (m *StructType) XXX_Size() int { + return xxx_messageInfo_StructType.Size(m) +} +func (m *StructType) XXX_DiscardUnknown() { + xxx_messageInfo_StructType.DiscardUnknown(m) +} + +var xxx_messageInfo_StructType proto.InternalMessageInfo + +func (m *StructType) GetFields() map[string]*DataType { + if m != nil { + return m.Fields + } + return nil +} + +func init() { + proto.RegisterType((*DataType)(nil), "google.cloud.automl.v1beta1.DataType") + proto.RegisterType((*StructType)(nil), "google.cloud.automl.v1beta1.StructType") + proto.RegisterMapType((map[string]*DataType)(nil), "google.cloud.automl.v1beta1.StructType.FieldsEntry") + proto.RegisterEnum("google.cloud.automl.v1beta1.TypeCode", TypeCode_name, TypeCode_value) +} + +func init() { + proto.RegisterFile("google/cloud/automl/v1beta1/data_types.proto", fileDescriptor_data_types_f857041c6e153cb1) +} + +var fileDescriptor_data_types_f857041c6e153cb1 = []byte{ + // 528 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x94, 0x93, 0x5f, 0x8b, 0xd3, 0x40, + 0x10, 0xc0, 0x2f, 0xed, 0xb5, 0x97, 0x4c, 0xfc, 0x13, 0x17, 0x84, 0x5e, 0x4f, 0xb0, 0x1e, 0x8a, + 0x45, 0x24, 0xa1, 0x77, 0x22, 0xe2, 0x3d, 0xa5, 0x69, 0x5a, 0xab, 0xf6, 0x5a, 0xd2, 0x9c, 0x50, + 0x29, 0xc4, 0x6d, 0xb3, 0x0d, 0xc1, 0x6d, 0xb6, 0x24, 0x9b, 0xe3, 0xfa, 0xee, 0x97, 0x12, 0xbf, + 0x81, 0x9f, 0x4a, 0xb2, 0xd9, 0x3b, 0x7d, 0x90, 0xea, 0xbd, 0xed, 0xcc, 0xfc, 0xe6, 0xb7, 0x33, + 0x24, 0x0b, 0x2f, 0x23, 0xc6, 0x22, 0x4a, 0xac, 0x25, 0x65, 0x79, 0x68, 0xe1, 0x9c, 0xb3, 0x35, + 0xb5, 0x2e, 0x3b, 0x0b, 0xc2, 0x71, 0xc7, 0x0a, 0x31, 0xc7, 0x01, 0xdf, 0x6e, 0x48, 0x66, 0x6e, + 0x52, 0xc6, 0x19, 0x3a, 0x2a, 0x69, 0x53, 0xd0, 0x66, 0x49, 0x9b, 0x92, 0x6e, 0x3e, 0x92, 0x2a, + 0xbc, 0x89, 0x2d, 0x9c, 0x24, 0x8c, 0x63, 0x1e, 0xb3, 0x44, 0xb6, 0x36, 0x9f, 0xee, 0xba, 0x28, + 0x66, 0x92, 0xea, 0xec, 0xa2, 0x38, 0xb9, 0xe2, 0x01, 0xb9, 0xe2, 0x29, 0x5e, 0x16, 0x66, 0xd9, + 0x72, 0x28, 0x5b, 0x44, 0xb4, 0xc8, 0x57, 0x16, 0x4e, 0xb6, 0x65, 0xe9, 0xf8, 0x7b, 0x05, 0xd4, + 0x1e, 0xe6, 0xd8, 0xdf, 0x6e, 0x08, 0x9a, 0xc2, 0x03, 0x1a, 0x67, 0x3c, 0x20, 0x94, 0xac, 0x49, + 0xc2, 0xc5, 0x5e, 0x8d, 0x4a, 0x4b, 0x69, 0xeb, 0x27, 0xcf, 0xcc, 0x1d, 0x7b, 0x99, 0xd7, 0x86, + 0x77, 0x7b, 0xde, 0xfd, 0xc2, 0xe0, 0x96, 0x02, 0x21, 0x7d, 0x0f, 0x7a, 0xc6, 0xd3, 0x7c, 0x29, + 0x75, 0x55, 0xa1, 0x7b, 0xbe, 0x53, 0x37, 0x15, 0xbc, 0x14, 0x42, 0x76, 0x13, 0xa1, 0x27, 0xa0, + 0xf3, 0x78, 0x4d, 0x82, 0x15, 0x4b, 0xd7, 0x98, 0x37, 0x6a, 0x2d, 0xa5, 0xad, 0x15, 0x48, 0x91, + 0xec, 0x8b, 0x1c, 0xea, 0x82, 0x56, 0xdc, 0x13, 0x2c, 0x59, 0x48, 0x1a, 0x4a, 0x4b, 0x69, 0xdf, + 0xfb, 0xc7, 0xec, 0x85, 0xd8, 0x61, 0x21, 0xf1, 0x54, 0x2e, 0x4f, 0xa8, 0x09, 0x6a, 0x92, 0x53, + 0x8a, 0x17, 0x94, 0x34, 0xf6, 0x5b, 0x4a, 0x5b, 0xf5, 0x6e, 0xe2, 0xae, 0x06, 0x07, 0x21, 0xe1, + 0x38, 0xa6, 0xd9, 0xf1, 0x0f, 0x05, 0xe0, 0xf7, 0xa8, 0xe8, 0x03, 0xd4, 0x57, 0x31, 0xa1, 0x61, + 0xd6, 0x50, 0x5a, 0xd5, 0xb6, 0x7e, 0x72, 0xfa, 0x9f, 0x3b, 0x9a, 0x7d, 0xd1, 0xe5, 0x26, 0x3c, + 0xdd, 0x7a, 0x52, 0xd1, 0xfc, 0x02, 0xfa, 0x1f, 0x69, 0x64, 0x40, 0xf5, 0x2b, 0xd9, 0x8a, 0x7d, + 0x34, 0xaf, 0x38, 0xa2, 0x33, 0xa8, 0x5d, 0x62, 0x9a, 0xdf, 0xee, 0xfb, 0x78, 0x65, 0xcf, 0xdb, + 0xca, 0x1b, 0xe5, 0x45, 0x0a, 0xea, 0xf5, 0xea, 0xe8, 0x10, 0x1e, 0xfa, 0xb3, 0x89, 0x1b, 0x38, + 0xe3, 0x9e, 0x1b, 0x5c, 0x9c, 0x4f, 0x27, 0xae, 0x33, 0xec, 0x0f, 0xdd, 0x9e, 0xb1, 0x87, 0x74, + 0x38, 0xe8, 0x7f, 0x1c, 0xdb, 0xfe, 0xeb, 0x57, 0x46, 0x15, 0xdd, 0x05, 0xcd, 0x1f, 0x8e, 0xdc, + 0xa9, 0x6f, 0x8f, 0x26, 0xc6, 0x3e, 0x02, 0xa8, 0x4f, 0x7d, 0x6f, 0x78, 0x3e, 0x30, 0xea, 0x48, + 0x83, 0x9a, 0xed, 0x79, 0xf6, 0xcc, 0x50, 0x65, 0xfa, 0xc2, 0xf1, 0x0d, 0x0d, 0xdd, 0x01, 0xd5, + 0xb1, 0x7d, 0x77, 0x30, 0xf6, 0x66, 0x06, 0x74, 0xbf, 0x29, 0xf0, 0x78, 0xc9, 0xd6, 0xbb, 0x66, + 0x9d, 0x28, 0x9f, 0x6d, 0x59, 0x8e, 0x18, 0xc5, 0x49, 0x64, 0xb2, 0x34, 0xb2, 0x22, 0x92, 0x88, + 0xff, 0xd5, 0x2a, 0x4b, 0x78, 0x13, 0x67, 0x7f, 0x7d, 0x00, 0x67, 0x65, 0xf8, 0xb3, 0x72, 0x34, + 0x10, 0xe0, 0xdc, 0x29, 0xa0, 0xb9, 0x9d, 0x73, 0x36, 0xa2, 0xf3, 0x4f, 0x25, 0xb4, 0xa8, 0x0b, + 0xd7, 0xe9, 0xaf, 0x00, 0x00, 0x00, 0xff, 0xff, 0x69, 0x86, 0x13, 0xbf, 0xda, 0x03, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/cloud/automl/v1beta1/dataset.pb.go b/vendor/google.golang.org/genproto/googleapis/cloud/automl/v1beta1/dataset.pb.go new file mode 100644 index 0000000..5b962b7 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/cloud/automl/v1beta1/dataset.pb.go @@ -0,0 +1,549 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/cloud/automl/v1beta1/dataset.proto + +package automl // import "google.golang.org/genproto/googleapis/cloud/automl/v1beta1" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import timestamp "github.com/golang/protobuf/ptypes/timestamp" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// A workspace for solving a single, particular machine learning (ML) problem. +// A workspace contains examples that may be annotated. +type Dataset struct { + // Required. + // The dataset metadata that is specific to the problem type. + // + // Types that are valid to be assigned to DatasetMetadata: + // *Dataset_TranslationDatasetMetadata + // *Dataset_ImageClassificationDatasetMetadata + // *Dataset_TextClassificationDatasetMetadata + // *Dataset_ImageObjectDetectionDatasetMetadata + // *Dataset_VideoClassificationDatasetMetadata + // *Dataset_TextExtractionDatasetMetadata + // *Dataset_TextSentimentDatasetMetadata + // *Dataset_TablesDatasetMetadata + DatasetMetadata isDataset_DatasetMetadata `protobuf_oneof:"dataset_metadata"` + // Output only. The resource name of the dataset. + // Form: `projects/{project_id}/locations/{location_id}/datasets/{dataset_id}` + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // Required. The name of the dataset to show in the interface. The name can be + // up to 32 characters long and can consist only of ASCII Latin letters A-Z + // and a-z, underscores + // (_), and ASCII digits 0-9. + DisplayName string `protobuf:"bytes,2,opt,name=display_name,json=displayName,proto3" json:"display_name,omitempty"` + // User-provided description of the dataset. The description can be up to + // 25000 characters long. + Description string `protobuf:"bytes,3,opt,name=description,proto3" json:"description,omitempty"` + // Output only. The number of examples in the dataset. + ExampleCount int32 `protobuf:"varint,21,opt,name=example_count,json=exampleCount,proto3" json:"example_count,omitempty"` + // Output only. Timestamp when this dataset was created. + CreateTime *timestamp.Timestamp `protobuf:"bytes,14,opt,name=create_time,json=createTime,proto3" json:"create_time,omitempty"` + // Used to perform consistent read-modify-write updates. If not set, a blind + // "overwrite" update happens. + Etag string `protobuf:"bytes,17,opt,name=etag,proto3" json:"etag,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Dataset) Reset() { *m = Dataset{} } +func (m *Dataset) String() string { return proto.CompactTextString(m) } +func (*Dataset) ProtoMessage() {} +func (*Dataset) Descriptor() ([]byte, []int) { + return fileDescriptor_dataset_954237b3f3904ab8, []int{0} +} +func (m *Dataset) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Dataset.Unmarshal(m, b) +} +func (m *Dataset) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Dataset.Marshal(b, m, deterministic) +} +func (dst *Dataset) XXX_Merge(src proto.Message) { + xxx_messageInfo_Dataset.Merge(dst, src) +} +func (m *Dataset) XXX_Size() int { + return xxx_messageInfo_Dataset.Size(m) +} +func (m *Dataset) XXX_DiscardUnknown() { + xxx_messageInfo_Dataset.DiscardUnknown(m) +} + +var xxx_messageInfo_Dataset proto.InternalMessageInfo + +type isDataset_DatasetMetadata interface { + isDataset_DatasetMetadata() +} + +type Dataset_TranslationDatasetMetadata struct { + TranslationDatasetMetadata *TranslationDatasetMetadata `protobuf:"bytes,23,opt,name=translation_dataset_metadata,json=translationDatasetMetadata,proto3,oneof"` +} + +type Dataset_ImageClassificationDatasetMetadata struct { + ImageClassificationDatasetMetadata *ImageClassificationDatasetMetadata `protobuf:"bytes,24,opt,name=image_classification_dataset_metadata,json=imageClassificationDatasetMetadata,proto3,oneof"` +} + +type Dataset_TextClassificationDatasetMetadata struct { + TextClassificationDatasetMetadata *TextClassificationDatasetMetadata `protobuf:"bytes,25,opt,name=text_classification_dataset_metadata,json=textClassificationDatasetMetadata,proto3,oneof"` +} + +type Dataset_ImageObjectDetectionDatasetMetadata struct { + ImageObjectDetectionDatasetMetadata *ImageObjectDetectionDatasetMetadata `protobuf:"bytes,26,opt,name=image_object_detection_dataset_metadata,json=imageObjectDetectionDatasetMetadata,proto3,oneof"` +} + +type Dataset_VideoClassificationDatasetMetadata struct { + VideoClassificationDatasetMetadata *VideoClassificationDatasetMetadata `protobuf:"bytes,31,opt,name=video_classification_dataset_metadata,json=videoClassificationDatasetMetadata,proto3,oneof"` +} + +type Dataset_TextExtractionDatasetMetadata struct { + TextExtractionDatasetMetadata *TextExtractionDatasetMetadata `protobuf:"bytes,28,opt,name=text_extraction_dataset_metadata,json=textExtractionDatasetMetadata,proto3,oneof"` +} + +type Dataset_TextSentimentDatasetMetadata struct { + TextSentimentDatasetMetadata *TextSentimentDatasetMetadata `protobuf:"bytes,30,opt,name=text_sentiment_dataset_metadata,json=textSentimentDatasetMetadata,proto3,oneof"` +} + +type Dataset_TablesDatasetMetadata struct { + TablesDatasetMetadata *TablesDatasetMetadata `protobuf:"bytes,33,opt,name=tables_dataset_metadata,json=tablesDatasetMetadata,proto3,oneof"` +} + +func (*Dataset_TranslationDatasetMetadata) isDataset_DatasetMetadata() {} + +func (*Dataset_ImageClassificationDatasetMetadata) isDataset_DatasetMetadata() {} + +func (*Dataset_TextClassificationDatasetMetadata) isDataset_DatasetMetadata() {} + +func (*Dataset_ImageObjectDetectionDatasetMetadata) isDataset_DatasetMetadata() {} + +func (*Dataset_VideoClassificationDatasetMetadata) isDataset_DatasetMetadata() {} + +func (*Dataset_TextExtractionDatasetMetadata) isDataset_DatasetMetadata() {} + +func (*Dataset_TextSentimentDatasetMetadata) isDataset_DatasetMetadata() {} + +func (*Dataset_TablesDatasetMetadata) isDataset_DatasetMetadata() {} + +func (m *Dataset) GetDatasetMetadata() isDataset_DatasetMetadata { + if m != nil { + return m.DatasetMetadata + } + return nil +} + +func (m *Dataset) GetTranslationDatasetMetadata() *TranslationDatasetMetadata { + if x, ok := m.GetDatasetMetadata().(*Dataset_TranslationDatasetMetadata); ok { + return x.TranslationDatasetMetadata + } + return nil +} + +func (m *Dataset) GetImageClassificationDatasetMetadata() *ImageClassificationDatasetMetadata { + if x, ok := m.GetDatasetMetadata().(*Dataset_ImageClassificationDatasetMetadata); ok { + return x.ImageClassificationDatasetMetadata + } + return nil +} + +func (m *Dataset) GetTextClassificationDatasetMetadata() *TextClassificationDatasetMetadata { + if x, ok := m.GetDatasetMetadata().(*Dataset_TextClassificationDatasetMetadata); ok { + return x.TextClassificationDatasetMetadata + } + return nil +} + +func (m *Dataset) GetImageObjectDetectionDatasetMetadata() *ImageObjectDetectionDatasetMetadata { + if x, ok := m.GetDatasetMetadata().(*Dataset_ImageObjectDetectionDatasetMetadata); ok { + return x.ImageObjectDetectionDatasetMetadata + } + return nil +} + +func (m *Dataset) GetVideoClassificationDatasetMetadata() *VideoClassificationDatasetMetadata { + if x, ok := m.GetDatasetMetadata().(*Dataset_VideoClassificationDatasetMetadata); ok { + return x.VideoClassificationDatasetMetadata + } + return nil +} + +func (m *Dataset) GetTextExtractionDatasetMetadata() *TextExtractionDatasetMetadata { + if x, ok := m.GetDatasetMetadata().(*Dataset_TextExtractionDatasetMetadata); ok { + return x.TextExtractionDatasetMetadata + } + return nil +} + +func (m *Dataset) GetTextSentimentDatasetMetadata() *TextSentimentDatasetMetadata { + if x, ok := m.GetDatasetMetadata().(*Dataset_TextSentimentDatasetMetadata); ok { + return x.TextSentimentDatasetMetadata + } + return nil +} + +func (m *Dataset) GetTablesDatasetMetadata() *TablesDatasetMetadata { + if x, ok := m.GetDatasetMetadata().(*Dataset_TablesDatasetMetadata); ok { + return x.TablesDatasetMetadata + } + return nil +} + +func (m *Dataset) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *Dataset) GetDisplayName() string { + if m != nil { + return m.DisplayName + } + return "" +} + +func (m *Dataset) GetDescription() string { + if m != nil { + return m.Description + } + return "" +} + +func (m *Dataset) GetExampleCount() int32 { + if m != nil { + return m.ExampleCount + } + return 0 +} + +func (m *Dataset) GetCreateTime() *timestamp.Timestamp { + if m != nil { + return m.CreateTime + } + return nil +} + +func (m *Dataset) GetEtag() string { + if m != nil { + return m.Etag + } + return "" +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*Dataset) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _Dataset_OneofMarshaler, _Dataset_OneofUnmarshaler, _Dataset_OneofSizer, []interface{}{ + (*Dataset_TranslationDatasetMetadata)(nil), + (*Dataset_ImageClassificationDatasetMetadata)(nil), + (*Dataset_TextClassificationDatasetMetadata)(nil), + (*Dataset_ImageObjectDetectionDatasetMetadata)(nil), + (*Dataset_VideoClassificationDatasetMetadata)(nil), + (*Dataset_TextExtractionDatasetMetadata)(nil), + (*Dataset_TextSentimentDatasetMetadata)(nil), + (*Dataset_TablesDatasetMetadata)(nil), + } +} + +func _Dataset_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*Dataset) + // dataset_metadata + switch x := m.DatasetMetadata.(type) { + case *Dataset_TranslationDatasetMetadata: + b.EncodeVarint(23<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.TranslationDatasetMetadata); err != nil { + return err + } + case *Dataset_ImageClassificationDatasetMetadata: + b.EncodeVarint(24<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.ImageClassificationDatasetMetadata); err != nil { + return err + } + case *Dataset_TextClassificationDatasetMetadata: + b.EncodeVarint(25<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.TextClassificationDatasetMetadata); err != nil { + return err + } + case *Dataset_ImageObjectDetectionDatasetMetadata: + b.EncodeVarint(26<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.ImageObjectDetectionDatasetMetadata); err != nil { + return err + } + case *Dataset_VideoClassificationDatasetMetadata: + b.EncodeVarint(31<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.VideoClassificationDatasetMetadata); err != nil { + return err + } + case *Dataset_TextExtractionDatasetMetadata: + b.EncodeVarint(28<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.TextExtractionDatasetMetadata); err != nil { + return err + } + case *Dataset_TextSentimentDatasetMetadata: + b.EncodeVarint(30<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.TextSentimentDatasetMetadata); err != nil { + return err + } + case *Dataset_TablesDatasetMetadata: + b.EncodeVarint(33<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.TablesDatasetMetadata); err != nil { + return err + } + case nil: + default: + return fmt.Errorf("Dataset.DatasetMetadata has unexpected type %T", x) + } + return nil +} + +func _Dataset_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*Dataset) + switch tag { + case 23: // dataset_metadata.translation_dataset_metadata + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(TranslationDatasetMetadata) + err := b.DecodeMessage(msg) + m.DatasetMetadata = &Dataset_TranslationDatasetMetadata{msg} + return true, err + case 24: // dataset_metadata.image_classification_dataset_metadata + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(ImageClassificationDatasetMetadata) + err := b.DecodeMessage(msg) + m.DatasetMetadata = &Dataset_ImageClassificationDatasetMetadata{msg} + return true, err + case 25: // dataset_metadata.text_classification_dataset_metadata + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(TextClassificationDatasetMetadata) + err := b.DecodeMessage(msg) + m.DatasetMetadata = &Dataset_TextClassificationDatasetMetadata{msg} + return true, err + case 26: // dataset_metadata.image_object_detection_dataset_metadata + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(ImageObjectDetectionDatasetMetadata) + err := b.DecodeMessage(msg) + m.DatasetMetadata = &Dataset_ImageObjectDetectionDatasetMetadata{msg} + return true, err + case 31: // dataset_metadata.video_classification_dataset_metadata + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(VideoClassificationDatasetMetadata) + err := b.DecodeMessage(msg) + m.DatasetMetadata = &Dataset_VideoClassificationDatasetMetadata{msg} + return true, err + case 28: // dataset_metadata.text_extraction_dataset_metadata + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(TextExtractionDatasetMetadata) + err := b.DecodeMessage(msg) + m.DatasetMetadata = &Dataset_TextExtractionDatasetMetadata{msg} + return true, err + case 30: // dataset_metadata.text_sentiment_dataset_metadata + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(TextSentimentDatasetMetadata) + err := b.DecodeMessage(msg) + m.DatasetMetadata = &Dataset_TextSentimentDatasetMetadata{msg} + return true, err + case 33: // dataset_metadata.tables_dataset_metadata + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(TablesDatasetMetadata) + err := b.DecodeMessage(msg) + m.DatasetMetadata = &Dataset_TablesDatasetMetadata{msg} + return true, err + default: + return false, nil + } +} + +func _Dataset_OneofSizer(msg proto.Message) (n int) { + m := msg.(*Dataset) + // dataset_metadata + switch x := m.DatasetMetadata.(type) { + case *Dataset_TranslationDatasetMetadata: + s := proto.Size(x.TranslationDatasetMetadata) + n += 2 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *Dataset_ImageClassificationDatasetMetadata: + s := proto.Size(x.ImageClassificationDatasetMetadata) + n += 2 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *Dataset_TextClassificationDatasetMetadata: + s := proto.Size(x.TextClassificationDatasetMetadata) + n += 2 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *Dataset_ImageObjectDetectionDatasetMetadata: + s := proto.Size(x.ImageObjectDetectionDatasetMetadata) + n += 2 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *Dataset_VideoClassificationDatasetMetadata: + s := proto.Size(x.VideoClassificationDatasetMetadata) + n += 2 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *Dataset_TextExtractionDatasetMetadata: + s := proto.Size(x.TextExtractionDatasetMetadata) + n += 2 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *Dataset_TextSentimentDatasetMetadata: + s := proto.Size(x.TextSentimentDatasetMetadata) + n += 2 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *Dataset_TablesDatasetMetadata: + s := proto.Size(x.TablesDatasetMetadata) + n += 2 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +// A definition of an annotation. +type AnnotationSpec struct { + // Output only. Resource name of the annotation spec. + // Form: + // + // 'projects/{project_id}/locations/{location_id}/datasets/{dataset_id}/annotationSpecs/{annotation_spec_id}' + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // Required. + // The name of the annotation spec to show in the interface. The name can be + // up to 32 characters long and can consist only of ASCII Latin letters A-Z + // and a-z, underscores + // (_), and ASCII digits 0-9. + DisplayName string `protobuf:"bytes,2,opt,name=display_name,json=displayName,proto3" json:"display_name,omitempty"` + // Output only. The number of examples in the parent dataset + // labeled by the annotation spec. + ExampleCount int32 `protobuf:"varint,9,opt,name=example_count,json=exampleCount,proto3" json:"example_count,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *AnnotationSpec) Reset() { *m = AnnotationSpec{} } +func (m *AnnotationSpec) String() string { return proto.CompactTextString(m) } +func (*AnnotationSpec) ProtoMessage() {} +func (*AnnotationSpec) Descriptor() ([]byte, []int) { + return fileDescriptor_dataset_954237b3f3904ab8, []int{1} +} +func (m *AnnotationSpec) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_AnnotationSpec.Unmarshal(m, b) +} +func (m *AnnotationSpec) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_AnnotationSpec.Marshal(b, m, deterministic) +} +func (dst *AnnotationSpec) XXX_Merge(src proto.Message) { + xxx_messageInfo_AnnotationSpec.Merge(dst, src) +} +func (m *AnnotationSpec) XXX_Size() int { + return xxx_messageInfo_AnnotationSpec.Size(m) +} +func (m *AnnotationSpec) XXX_DiscardUnknown() { + xxx_messageInfo_AnnotationSpec.DiscardUnknown(m) +} + +var xxx_messageInfo_AnnotationSpec proto.InternalMessageInfo + +func (m *AnnotationSpec) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *AnnotationSpec) GetDisplayName() string { + if m != nil { + return m.DisplayName + } + return "" +} + +func (m *AnnotationSpec) GetExampleCount() int32 { + if m != nil { + return m.ExampleCount + } + return 0 +} + +func init() { + proto.RegisterType((*Dataset)(nil), "google.cloud.automl.v1beta1.Dataset") + proto.RegisterType((*AnnotationSpec)(nil), "google.cloud.automl.v1beta1.AnnotationSpec") +} + +func init() { + proto.RegisterFile("google/cloud/automl/v1beta1/dataset.proto", fileDescriptor_dataset_954237b3f3904ab8) +} + +var fileDescriptor_dataset_954237b3f3904ab8 = []byte{ + // 647 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x9c, 0x95, 0x5d, 0x6b, 0xd4, 0x4c, + 0x14, 0xc7, 0x9f, 0xf4, 0xf1, 0x85, 0xce, 0xd6, 0xa2, 0x03, 0xa5, 0x71, 0xbb, 0xba, 0xdb, 0x56, + 0xed, 0x0a, 0x9a, 0xd0, 0x2a, 0x88, 0x16, 0xd4, 0xbe, 0x88, 0x7a, 0x51, 0x95, 0x6d, 0xe9, 0x85, + 0x14, 0xc2, 0x6c, 0x72, 0x1a, 0x46, 0x26, 0x99, 0x90, 0x9c, 0x2d, 0x5b, 0xbc, 0x13, 0xfd, 0x00, + 0x82, 0x17, 0x7e, 0x1d, 0x6f, 0xfd, 0x54, 0x32, 0x93, 0x49, 0xab, 0xa6, 0x9b, 0x29, 0xde, 0x25, + 0xe7, 0xfc, 0xe6, 0x7f, 0xfe, 0xe7, 0x9c, 0xdd, 0x09, 0xb9, 0x1b, 0x4b, 0x19, 0x0b, 0xf0, 0x43, + 0x21, 0x47, 0x91, 0xcf, 0x46, 0x28, 0x13, 0xe1, 0x1f, 0xad, 0x0e, 0x01, 0xd9, 0xaa, 0x1f, 0x31, + 0x64, 0x05, 0xa0, 0x97, 0xe5, 0x12, 0x25, 0x5d, 0x28, 0x51, 0x4f, 0xa3, 0x5e, 0x89, 0x7a, 0x06, + 0x6d, 0x77, 0x8c, 0x0e, 0xcb, 0xb8, 0xcf, 0xd2, 0x54, 0x22, 0x43, 0x2e, 0xd3, 0xa2, 0x3c, 0xda, + 0x7e, 0xd8, 0x54, 0xe5, 0x14, 0x0f, 0x32, 0x76, 0x2c, 0x24, 0x8b, 0xcc, 0xa9, 0x7b, 0x36, 0x6f, + 0x01, 0x47, 0x48, 0xaa, 0x1a, 0x2b, 0x4d, 0x34, 0x4f, 0x58, 0x0c, 0x06, 0xec, 0x37, 0x81, 0xc8, + 0x86, 0x02, 0x2a, 0xc9, 0x3b, 0x8d, 0x24, 0x8c, 0xcd, 0x64, 0xda, 0xf7, 0x1b, 0xb9, 0x9c, 0xa5, + 0x85, 0xd0, 0xfd, 0x9d, 0xc7, 0xe9, 0x11, 0x8f, 0x40, 0x1a, 0xb0, 0x6b, 0x40, 0xfd, 0x36, 0x1c, + 0x1d, 0xfa, 0xc8, 0x13, 0x28, 0x90, 0x25, 0x59, 0x09, 0x2c, 0xfd, 0x20, 0xe4, 0xf2, 0x76, 0xb9, + 0x24, 0xfa, 0x91, 0x74, 0x7e, 0x2b, 0x15, 0x98, 0xdd, 0x05, 0x09, 0x20, 0x53, 0xcf, 0xee, 0x7c, + 0xcf, 0xe9, 0xb7, 0xd6, 0x1e, 0x79, 0x0d, 0x5b, 0xf4, 0xf6, 0x4e, 0x05, 0x8c, 0xec, 0x8e, 0x39, + 0xfe, 0xea, 0xbf, 0x41, 0x1b, 0x27, 0x66, 0xe9, 0x37, 0x87, 0xdc, 0xd6, 0x33, 0x0e, 0x42, 0xc1, + 0x8a, 0x82, 0x1f, 0xf2, 0x70, 0x82, 0x0d, 0x57, 0xdb, 0x78, 0xd6, 0x68, 0xe3, 0xb5, 0x52, 0xda, + 0xfa, 0x43, 0xa8, 0x6e, 0x67, 0x89, 0x5b, 0x29, 0xfa, 0xd5, 0x21, 0xb7, 0xd4, 0x9e, 0xac, 0xae, + 0xae, 0x6b, 0x57, 0x4f, 0x9b, 0x87, 0x03, 0x63, 0xb4, 0x99, 0x5a, 0x44, 0x1b, 0x44, 0xbf, 0x3b, + 0x64, 0xa5, 0x1c, 0x95, 0x1c, 0x7e, 0x80, 0x10, 0x83, 0x08, 0x10, 0xc2, 0xb3, 0x6d, 0xb5, 0xb5, + 0xad, 0xe7, 0xf6, 0x61, 0xbd, 0xd5, 0x52, 0xdb, 0x95, 0x52, 0xdd, 0xd8, 0x32, 0xb7, 0x63, 0x7a, + 0x8b, 0xfa, 0xf7, 0x67, 0x9d, 0x57, 0xf7, 0x1c, 0x5b, 0xdc, 0x57, 0x4a, 0xd6, 0x2d, 0x1e, 0x59, + 0x29, 0xfa, 0xc5, 0x21, 0x3d, 0xbd, 0x45, 0x18, 0x63, 0xce, 0x26, 0x8c, 0xaa, 0xa3, 0x1d, 0x3d, + 0xb1, 0x6e, 0xf0, 0xc5, 0x89, 0x46, 0xdd, 0xcc, 0x0d, 0x6c, 0x02, 0xe8, 0x27, 0x87, 0x74, 0xb5, + 0x8f, 0x02, 0x52, 0xf5, 0x4f, 0x4c, 0xb1, 0x6e, 0xe3, 0xa6, 0xb6, 0xf1, 0xd8, 0x6a, 0x63, 0xb7, + 0x92, 0xa8, 0xbb, 0xe8, 0x60, 0x43, 0x9e, 0x0a, 0x32, 0x5f, 0xde, 0x51, 0xf5, 0xda, 0x8b, 0xba, + 0xf6, 0x5a, 0x73, 0x6d, 0x7d, 0xb6, 0x5e, 0x74, 0x0e, 0xcf, 0x4a, 0x50, 0x4a, 0x2e, 0xa4, 0x2c, + 0x01, 0xd7, 0xe9, 0x39, 0xfd, 0xe9, 0x81, 0x7e, 0xa6, 0x8b, 0x64, 0x26, 0xe2, 0x45, 0x26, 0xd8, + 0x71, 0xa0, 0x73, 0x53, 0x3a, 0xd7, 0x32, 0xb1, 0x37, 0x0a, 0xe9, 0x91, 0x56, 0x04, 0x45, 0x98, + 0xf3, 0x4c, 0xcd, 0xd1, 0xfd, 0xdf, 0x10, 0xa7, 0x21, 0xba, 0x4c, 0xae, 0xc0, 0x98, 0x25, 0x99, + 0x80, 0x20, 0x94, 0xa3, 0x14, 0xdd, 0xb9, 0x9e, 0xd3, 0xbf, 0x38, 0x98, 0x31, 0xc1, 0x2d, 0x15, + 0xa3, 0xeb, 0xa4, 0x15, 0xe6, 0xc0, 0x10, 0x02, 0x35, 0x0b, 0x77, 0x56, 0xf7, 0xd7, 0xae, 0xfa, + 0xab, 0x6e, 0x45, 0x6f, 0xaf, 0xba, 0x15, 0x07, 0xa4, 0xc4, 0x55, 0x40, 0x59, 0x07, 0x64, 0xb1, + 0x7b, 0xad, 0xb4, 0xae, 0x9e, 0x37, 0x29, 0xb9, 0xfa, 0xf7, 0xd4, 0x96, 0x04, 0x99, 0xdd, 0x38, + 0xf9, 0x02, 0xed, 0x66, 0x10, 0xfe, 0x6b, 0xd3, 0xb5, 0x96, 0xa6, 0xeb, 0x2d, 0x6d, 0x7e, 0x76, + 0x48, 0x37, 0x94, 0x49, 0xd3, 0x8e, 0xde, 0x39, 0xef, 0x37, 0x4c, 0x3a, 0x96, 0x82, 0xa5, 0xb1, + 0x27, 0xf3, 0xd8, 0x8f, 0x21, 0xd5, 0x0d, 0xfb, 0x65, 0x8a, 0x65, 0xbc, 0x38, 0xf3, 0x03, 0xb2, + 0x5e, 0xbe, 0xfe, 0x9c, 0x5a, 0x78, 0xa9, 0xc1, 0x83, 0x2d, 0x05, 0x1d, 0x6c, 0x8c, 0x50, 0xee, + 0x88, 0x83, 0xfd, 0x12, 0x1a, 0x5e, 0xd2, 0x5a, 0x0f, 0x7e, 0x05, 0x00, 0x00, 0xff, 0xff, 0xa6, + 0x6b, 0xbf, 0xc3, 0xff, 0x07, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/cloud/automl/v1beta1/detection.pb.go b/vendor/google.golang.org/genproto/googleapis/cloud/automl/v1beta1/detection.pb.go new file mode 100644 index 0000000..a1458b3 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/cloud/automl/v1beta1/detection.pb.go @@ -0,0 +1,319 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/cloud/automl/v1beta1/detection.proto + +package automl // import "google.golang.org/genproto/googleapis/cloud/automl/v1beta1" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "github.com/golang/protobuf/ptypes/duration" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Annotation details for image object detection. +type ImageObjectDetectionAnnotation struct { + // Output only. + // The rectangle representing the object location. + BoundingBox *BoundingPoly `protobuf:"bytes,1,opt,name=bounding_box,json=boundingBox,proto3" json:"bounding_box,omitempty"` + // Output only. + // The confidence that this annotation is positive for the parent example, + // value in [0, 1], higher means higher positivity confidence. + Score float32 `protobuf:"fixed32,2,opt,name=score,proto3" json:"score,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ImageObjectDetectionAnnotation) Reset() { *m = ImageObjectDetectionAnnotation{} } +func (m *ImageObjectDetectionAnnotation) String() string { return proto.CompactTextString(m) } +func (*ImageObjectDetectionAnnotation) ProtoMessage() {} +func (*ImageObjectDetectionAnnotation) Descriptor() ([]byte, []int) { + return fileDescriptor_detection_fc1cbf42da607929, []int{0} +} +func (m *ImageObjectDetectionAnnotation) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ImageObjectDetectionAnnotation.Unmarshal(m, b) +} +func (m *ImageObjectDetectionAnnotation) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ImageObjectDetectionAnnotation.Marshal(b, m, deterministic) +} +func (dst *ImageObjectDetectionAnnotation) XXX_Merge(src proto.Message) { + xxx_messageInfo_ImageObjectDetectionAnnotation.Merge(dst, src) +} +func (m *ImageObjectDetectionAnnotation) XXX_Size() int { + return xxx_messageInfo_ImageObjectDetectionAnnotation.Size(m) +} +func (m *ImageObjectDetectionAnnotation) XXX_DiscardUnknown() { + xxx_messageInfo_ImageObjectDetectionAnnotation.DiscardUnknown(m) +} + +var xxx_messageInfo_ImageObjectDetectionAnnotation proto.InternalMessageInfo + +func (m *ImageObjectDetectionAnnotation) GetBoundingBox() *BoundingPoly { + if m != nil { + return m.BoundingBox + } + return nil +} + +func (m *ImageObjectDetectionAnnotation) GetScore() float32 { + if m != nil { + return m.Score + } + return 0 +} + +// Bounding box matching model metrics for a single intersection-over-union +// threshold and multiple label match confidence thresholds. +type BoundingBoxMetricsEntry struct { + // Output only. The intersection-over-union threshold value used to compute + // this metrics entry. + IouThreshold float32 `protobuf:"fixed32,1,opt,name=iou_threshold,json=iouThreshold,proto3" json:"iou_threshold,omitempty"` + // Output only. The mean average precision, most often close to au_prc. + MeanAveragePrecision float32 `protobuf:"fixed32,2,opt,name=mean_average_precision,json=meanAveragePrecision,proto3" json:"mean_average_precision,omitempty"` + // Output only. Metrics for each label-match confidence_threshold from + // 0.05,0.10,...,0.95,0.96,0.97,0.98,0.99. Precision-recall curve is + // derived from them. + ConfidenceMetricsEntries []*BoundingBoxMetricsEntry_ConfidenceMetricsEntry `protobuf:"bytes,3,rep,name=confidence_metrics_entries,json=confidenceMetricsEntries,proto3" json:"confidence_metrics_entries,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *BoundingBoxMetricsEntry) Reset() { *m = BoundingBoxMetricsEntry{} } +func (m *BoundingBoxMetricsEntry) String() string { return proto.CompactTextString(m) } +func (*BoundingBoxMetricsEntry) ProtoMessage() {} +func (*BoundingBoxMetricsEntry) Descriptor() ([]byte, []int) { + return fileDescriptor_detection_fc1cbf42da607929, []int{1} +} +func (m *BoundingBoxMetricsEntry) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_BoundingBoxMetricsEntry.Unmarshal(m, b) +} +func (m *BoundingBoxMetricsEntry) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_BoundingBoxMetricsEntry.Marshal(b, m, deterministic) +} +func (dst *BoundingBoxMetricsEntry) XXX_Merge(src proto.Message) { + xxx_messageInfo_BoundingBoxMetricsEntry.Merge(dst, src) +} +func (m *BoundingBoxMetricsEntry) XXX_Size() int { + return xxx_messageInfo_BoundingBoxMetricsEntry.Size(m) +} +func (m *BoundingBoxMetricsEntry) XXX_DiscardUnknown() { + xxx_messageInfo_BoundingBoxMetricsEntry.DiscardUnknown(m) +} + +var xxx_messageInfo_BoundingBoxMetricsEntry proto.InternalMessageInfo + +func (m *BoundingBoxMetricsEntry) GetIouThreshold() float32 { + if m != nil { + return m.IouThreshold + } + return 0 +} + +func (m *BoundingBoxMetricsEntry) GetMeanAveragePrecision() float32 { + if m != nil { + return m.MeanAveragePrecision + } + return 0 +} + +func (m *BoundingBoxMetricsEntry) GetConfidenceMetricsEntries() []*BoundingBoxMetricsEntry_ConfidenceMetricsEntry { + if m != nil { + return m.ConfidenceMetricsEntries + } + return nil +} + +// Metrics for a single confidence threshold. +type BoundingBoxMetricsEntry_ConfidenceMetricsEntry struct { + // Output only. The confidence threshold value used to compute the metrics. + ConfidenceThreshold float32 `protobuf:"fixed32,1,opt,name=confidence_threshold,json=confidenceThreshold,proto3" json:"confidence_threshold,omitempty"` + // Output only. Recall under the given confidence threshold. + Recall float32 `protobuf:"fixed32,2,opt,name=recall,proto3" json:"recall,omitempty"` + // Output only. Precision under the given confidence threshold. + Precision float32 `protobuf:"fixed32,3,opt,name=precision,proto3" json:"precision,omitempty"` + // Output only. The harmonic mean of recall and precision. + F1Score float32 `protobuf:"fixed32,4,opt,name=f1_score,json=f1Score,proto3" json:"f1_score,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *BoundingBoxMetricsEntry_ConfidenceMetricsEntry) Reset() { + *m = BoundingBoxMetricsEntry_ConfidenceMetricsEntry{} +} +func (m *BoundingBoxMetricsEntry_ConfidenceMetricsEntry) String() string { + return proto.CompactTextString(m) +} +func (*BoundingBoxMetricsEntry_ConfidenceMetricsEntry) ProtoMessage() {} +func (*BoundingBoxMetricsEntry_ConfidenceMetricsEntry) Descriptor() ([]byte, []int) { + return fileDescriptor_detection_fc1cbf42da607929, []int{1, 0} +} +func (m *BoundingBoxMetricsEntry_ConfidenceMetricsEntry) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_BoundingBoxMetricsEntry_ConfidenceMetricsEntry.Unmarshal(m, b) +} +func (m *BoundingBoxMetricsEntry_ConfidenceMetricsEntry) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_BoundingBoxMetricsEntry_ConfidenceMetricsEntry.Marshal(b, m, deterministic) +} +func (dst *BoundingBoxMetricsEntry_ConfidenceMetricsEntry) XXX_Merge(src proto.Message) { + xxx_messageInfo_BoundingBoxMetricsEntry_ConfidenceMetricsEntry.Merge(dst, src) +} +func (m *BoundingBoxMetricsEntry_ConfidenceMetricsEntry) XXX_Size() int { + return xxx_messageInfo_BoundingBoxMetricsEntry_ConfidenceMetricsEntry.Size(m) +} +func (m *BoundingBoxMetricsEntry_ConfidenceMetricsEntry) XXX_DiscardUnknown() { + xxx_messageInfo_BoundingBoxMetricsEntry_ConfidenceMetricsEntry.DiscardUnknown(m) +} + +var xxx_messageInfo_BoundingBoxMetricsEntry_ConfidenceMetricsEntry proto.InternalMessageInfo + +func (m *BoundingBoxMetricsEntry_ConfidenceMetricsEntry) GetConfidenceThreshold() float32 { + if m != nil { + return m.ConfidenceThreshold + } + return 0 +} + +func (m *BoundingBoxMetricsEntry_ConfidenceMetricsEntry) GetRecall() float32 { + if m != nil { + return m.Recall + } + return 0 +} + +func (m *BoundingBoxMetricsEntry_ConfidenceMetricsEntry) GetPrecision() float32 { + if m != nil { + return m.Precision + } + return 0 +} + +func (m *BoundingBoxMetricsEntry_ConfidenceMetricsEntry) GetF1Score() float32 { + if m != nil { + return m.F1Score + } + return 0 +} + +// Model evaluation metrics for image object detection problems. +// Evaluates prediction quality of labeled bounding boxes. +type ImageObjectDetectionEvaluationMetrics struct { + // Output only. The total number of bounding boxes (i.e. summed over all + // images) the ground truth used to create this evaluation had. + EvaluatedBoundingBoxCount int32 `protobuf:"varint,1,opt,name=evaluated_bounding_box_count,json=evaluatedBoundingBoxCount,proto3" json:"evaluated_bounding_box_count,omitempty"` + // Output only. The bounding boxes match metrics for each + // Intersection-over-union threshold 0.05,0.10,...,0.95,0.96,0.97,0.98,0.99 + // and each label confidence threshold 0.05,0.10,...,0.95,0.96,0.97,0.98,0.99 + // pair. + BoundingBoxMetricsEntries []*BoundingBoxMetricsEntry `protobuf:"bytes,2,rep,name=bounding_box_metrics_entries,json=boundingBoxMetricsEntries,proto3" json:"bounding_box_metrics_entries,omitempty"` + // Output only. The single metric for bounding boxes evaluation: + // the mean_average_precision averaged over all bounding_box_metrics_entries. + BoundingBoxMeanAveragePrecision float32 `protobuf:"fixed32,3,opt,name=bounding_box_mean_average_precision,json=boundingBoxMeanAveragePrecision,proto3" json:"bounding_box_mean_average_precision,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ImageObjectDetectionEvaluationMetrics) Reset() { *m = ImageObjectDetectionEvaluationMetrics{} } +func (m *ImageObjectDetectionEvaluationMetrics) String() string { return proto.CompactTextString(m) } +func (*ImageObjectDetectionEvaluationMetrics) ProtoMessage() {} +func (*ImageObjectDetectionEvaluationMetrics) Descriptor() ([]byte, []int) { + return fileDescriptor_detection_fc1cbf42da607929, []int{2} +} +func (m *ImageObjectDetectionEvaluationMetrics) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ImageObjectDetectionEvaluationMetrics.Unmarshal(m, b) +} +func (m *ImageObjectDetectionEvaluationMetrics) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ImageObjectDetectionEvaluationMetrics.Marshal(b, m, deterministic) +} +func (dst *ImageObjectDetectionEvaluationMetrics) XXX_Merge(src proto.Message) { + xxx_messageInfo_ImageObjectDetectionEvaluationMetrics.Merge(dst, src) +} +func (m *ImageObjectDetectionEvaluationMetrics) XXX_Size() int { + return xxx_messageInfo_ImageObjectDetectionEvaluationMetrics.Size(m) +} +func (m *ImageObjectDetectionEvaluationMetrics) XXX_DiscardUnknown() { + xxx_messageInfo_ImageObjectDetectionEvaluationMetrics.DiscardUnknown(m) +} + +var xxx_messageInfo_ImageObjectDetectionEvaluationMetrics proto.InternalMessageInfo + +func (m *ImageObjectDetectionEvaluationMetrics) GetEvaluatedBoundingBoxCount() int32 { + if m != nil { + return m.EvaluatedBoundingBoxCount + } + return 0 +} + +func (m *ImageObjectDetectionEvaluationMetrics) GetBoundingBoxMetricsEntries() []*BoundingBoxMetricsEntry { + if m != nil { + return m.BoundingBoxMetricsEntries + } + return nil +} + +func (m *ImageObjectDetectionEvaluationMetrics) GetBoundingBoxMeanAveragePrecision() float32 { + if m != nil { + return m.BoundingBoxMeanAveragePrecision + } + return 0 +} + +func init() { + proto.RegisterType((*ImageObjectDetectionAnnotation)(nil), "google.cloud.automl.v1beta1.ImageObjectDetectionAnnotation") + proto.RegisterType((*BoundingBoxMetricsEntry)(nil), "google.cloud.automl.v1beta1.BoundingBoxMetricsEntry") + proto.RegisterType((*BoundingBoxMetricsEntry_ConfidenceMetricsEntry)(nil), "google.cloud.automl.v1beta1.BoundingBoxMetricsEntry.ConfidenceMetricsEntry") + proto.RegisterType((*ImageObjectDetectionEvaluationMetrics)(nil), "google.cloud.automl.v1beta1.ImageObjectDetectionEvaluationMetrics") +} + +func init() { + proto.RegisterFile("google/cloud/automl/v1beta1/detection.proto", fileDescriptor_detection_fc1cbf42da607929) +} + +var fileDescriptor_detection_fc1cbf42da607929 = []byte{ + // 537 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x9c, 0x53, 0xc1, 0x6e, 0xd3, 0x4c, + 0x10, 0x96, 0x9d, 0xbf, 0xfd, 0x61, 0x5b, 0x2e, 0x4b, 0x55, 0xd2, 0x34, 0x6a, 0xab, 0x56, 0x48, + 0x01, 0x24, 0x5b, 0x29, 0xbd, 0x71, 0x40, 0x49, 0xa8, 0x10, 0xa2, 0x15, 0x91, 0x41, 0x1c, 0x50, + 0x24, 0x6b, 0xbd, 0x9e, 0xb8, 0x8b, 0xec, 0x9d, 0x68, 0xbd, 0x8e, 0x9a, 0x7b, 0x2f, 0xbc, 0x03, + 0x12, 0x4f, 0xc0, 0x8b, 0xf0, 0x54, 0x28, 0xbb, 0x4e, 0x9c, 0x06, 0x2b, 0x42, 0x1c, 0x67, 0xbe, + 0xef, 0x9b, 0xf9, 0x76, 0x66, 0x96, 0xbc, 0x48, 0x10, 0x93, 0x14, 0x7c, 0x9e, 0x62, 0x11, 0xfb, + 0xac, 0xd0, 0x98, 0xa5, 0xfe, 0xb4, 0x1b, 0x81, 0x66, 0x5d, 0x3f, 0x06, 0x0d, 0x5c, 0x0b, 0x94, + 0xde, 0x44, 0xa1, 0x46, 0x7a, 0x68, 0xc9, 0x9e, 0x21, 0x7b, 0x96, 0xec, 0x95, 0xe4, 0x56, 0xbb, + 0xac, 0xc4, 0x26, 0xc2, 0x67, 0x52, 0xa2, 0x66, 0x73, 0x65, 0x6e, 0xa5, 0xad, 0xe7, 0x9b, 0xfa, + 0x24, 0x80, 0x19, 0x68, 0x35, 0x2b, 0xb9, 0x47, 0x25, 0xd7, 0x44, 0x51, 0x31, 0xf6, 0xe3, 0x42, + 0xb1, 0xca, 0xc6, 0xe9, 0x9d, 0x43, 0x8e, 0xde, 0x65, 0x2c, 0x81, 0x0f, 0xd1, 0x57, 0xe0, 0xfa, + 0xcd, 0xc2, 0x65, 0x6f, 0xd9, 0x95, 0x5e, 0x91, 0xdd, 0x08, 0x0b, 0x19, 0x0b, 0x99, 0x84, 0x11, + 0xde, 0x36, 0x9d, 0x13, 0xa7, 0xb3, 0x73, 0xfe, 0xcc, 0xdb, 0xf0, 0x00, 0xaf, 0x5f, 0x0a, 0x86, + 0x98, 0xce, 0x82, 0x9d, 0x85, 0xbc, 0x8f, 0xb7, 0x74, 0x8f, 0x6c, 0xe5, 0x1c, 0x15, 0x34, 0xdd, + 0x13, 0xa7, 0xe3, 0x06, 0x36, 0x38, 0xfd, 0xd1, 0x20, 0x4f, 0xfa, 0x15, 0xeb, 0x1a, 0xb4, 0x12, + 0x3c, 0xbf, 0x94, 0x5a, 0xcd, 0xe8, 0x19, 0x79, 0x24, 0xb0, 0x08, 0xf5, 0x8d, 0x82, 0xfc, 0x06, + 0xd3, 0xd8, 0x18, 0x70, 0x83, 0x5d, 0x81, 0xc5, 0xa7, 0x45, 0x8e, 0x5e, 0x90, 0xfd, 0x0c, 0x98, + 0x0c, 0xd9, 0x14, 0x14, 0x4b, 0x20, 0x9c, 0x28, 0xe0, 0x22, 0x17, 0x28, 0xcb, 0x3e, 0x7b, 0x73, + 0xb4, 0x67, 0xc1, 0xe1, 0x02, 0xa3, 0xdf, 0x1c, 0xd2, 0xe2, 0x28, 0xc7, 0x22, 0x06, 0xc9, 0x21, + 0xcc, 0x6c, 0xdb, 0x10, 0xa4, 0x56, 0x02, 0xf2, 0x66, 0xe3, 0xa4, 0xd1, 0xd9, 0x39, 0x7f, 0xff, + 0x57, 0x2f, 0x5d, 0x73, 0xed, 0x0d, 0x96, 0x65, 0x57, 0xd3, 0x41, 0x93, 0xd7, 0xe5, 0x05, 0xe4, + 0xad, 0xef, 0x0e, 0xd9, 0xaf, 0x17, 0xd1, 0x2e, 0xd9, 0x5b, 0x71, 0xb9, 0x3e, 0x88, 0xc7, 0x15, + 0x56, 0xcd, 0x63, 0x9f, 0x6c, 0x2b, 0xe0, 0x2c, 0x4d, 0xcb, 0xf7, 0x97, 0x11, 0x6d, 0x93, 0x87, + 0xd5, 0x68, 0x1a, 0x06, 0xaa, 0x12, 0xf4, 0x80, 0x3c, 0x18, 0x77, 0x43, 0xbb, 0x9f, 0xff, 0x0c, + 0xf8, 0xff, 0xb8, 0xfb, 0xd1, 0x6c, 0xe8, 0xa7, 0x4b, 0x9e, 0xd6, 0x1d, 0xca, 0xe5, 0x94, 0xa5, + 0x85, 0x39, 0x94, 0xd2, 0x32, 0x7d, 0x4d, 0xda, 0x60, 0x93, 0x10, 0x87, 0xab, 0x97, 0x13, 0x72, + 0x2c, 0xa4, 0x36, 0xae, 0xb7, 0x82, 0x83, 0x25, 0x67, 0x65, 0x82, 0x83, 0x39, 0x81, 0x16, 0xa4, + 0x7d, 0x4f, 0xb6, 0xbe, 0x16, 0xd7, 0xac, 0xe5, 0xe2, 0x5f, 0xd6, 0x12, 0x1c, 0x44, 0xb5, 0x80, + 0x80, 0x9c, 0x5e, 0x91, 0xb3, 0xb5, 0xb6, 0xb5, 0xf7, 0x64, 0x87, 0x76, 0x7c, 0xaf, 0xce, 0x9f, + 0xa7, 0xd5, 0xbf, 0x73, 0xc8, 0x31, 0xc7, 0x6c, 0x93, 0xc9, 0xa1, 0xf3, 0xa5, 0x57, 0xc2, 0x09, + 0xa6, 0x4c, 0x26, 0x1e, 0xaa, 0xc4, 0x4f, 0x40, 0x9a, 0xaf, 0xe9, 0x5b, 0x88, 0x4d, 0x44, 0x5e, + 0xfb, 0xd3, 0x5f, 0xd9, 0xf0, 0x97, 0x7b, 0xf8, 0xd6, 0x10, 0x47, 0x83, 0x39, 0x69, 0xd4, 0x2b, + 0x34, 0x5e, 0xa7, 0xa3, 0xcf, 0x96, 0x14, 0x6d, 0x9b, 0x5a, 0x2f, 0x7f, 0x07, 0x00, 0x00, 0xff, + 0xff, 0xfe, 0xc6, 0x97, 0x57, 0x9c, 0x04, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/cloud/automl/v1beta1/geometry.pb.go b/vendor/google.golang.org/genproto/googleapis/cloud/automl/v1beta1/geometry.pb.go new file mode 100644 index 0000000..41a4791 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/cloud/automl/v1beta1/geometry.pb.go @@ -0,0 +1,145 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/cloud/automl/v1beta1/geometry.proto + +package automl // import "google.golang.org/genproto/googleapis/cloud/automl/v1beta1" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// A vertex represents a 2D point in the image. +// The normalized vertex coordinates are between 0 to 1 fractions relative to +// the original plane (image, video). E.g. if the plane (e.g. whole image) would +// have size 10 x 20 then a point with normalized coordinates (0.1, 0.3) would +// be at the position (1, 6) on that plane. +type NormalizedVertex struct { + // Required. Horizontal coordinate. + X float32 `protobuf:"fixed32,1,opt,name=x,proto3" json:"x,omitempty"` + // Required. Vertical coordinate. + Y float32 `protobuf:"fixed32,2,opt,name=y,proto3" json:"y,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *NormalizedVertex) Reset() { *m = NormalizedVertex{} } +func (m *NormalizedVertex) String() string { return proto.CompactTextString(m) } +func (*NormalizedVertex) ProtoMessage() {} +func (*NormalizedVertex) Descriptor() ([]byte, []int) { + return fileDescriptor_geometry_693f597e33c64c76, []int{0} +} +func (m *NormalizedVertex) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_NormalizedVertex.Unmarshal(m, b) +} +func (m *NormalizedVertex) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_NormalizedVertex.Marshal(b, m, deterministic) +} +func (dst *NormalizedVertex) XXX_Merge(src proto.Message) { + xxx_messageInfo_NormalizedVertex.Merge(dst, src) +} +func (m *NormalizedVertex) XXX_Size() int { + return xxx_messageInfo_NormalizedVertex.Size(m) +} +func (m *NormalizedVertex) XXX_DiscardUnknown() { + xxx_messageInfo_NormalizedVertex.DiscardUnknown(m) +} + +var xxx_messageInfo_NormalizedVertex proto.InternalMessageInfo + +func (m *NormalizedVertex) GetX() float32 { + if m != nil { + return m.X + } + return 0 +} + +func (m *NormalizedVertex) GetY() float32 { + if m != nil { + return m.Y + } + return 0 +} + +// A bounding polygon of a detected object on a plane. +// On output both vertices and normalized_vertices are provided. +// The polygon is formed by connecting vertices in the order they are listed. +type BoundingPoly struct { + // Output only . The bounding polygon normalized vertices. + NormalizedVertices []*NormalizedVertex `protobuf:"bytes,2,rep,name=normalized_vertices,json=normalizedVertices,proto3" json:"normalized_vertices,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *BoundingPoly) Reset() { *m = BoundingPoly{} } +func (m *BoundingPoly) String() string { return proto.CompactTextString(m) } +func (*BoundingPoly) ProtoMessage() {} +func (*BoundingPoly) Descriptor() ([]byte, []int) { + return fileDescriptor_geometry_693f597e33c64c76, []int{1} +} +func (m *BoundingPoly) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_BoundingPoly.Unmarshal(m, b) +} +func (m *BoundingPoly) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_BoundingPoly.Marshal(b, m, deterministic) +} +func (dst *BoundingPoly) XXX_Merge(src proto.Message) { + xxx_messageInfo_BoundingPoly.Merge(dst, src) +} +func (m *BoundingPoly) XXX_Size() int { + return xxx_messageInfo_BoundingPoly.Size(m) +} +func (m *BoundingPoly) XXX_DiscardUnknown() { + xxx_messageInfo_BoundingPoly.DiscardUnknown(m) +} + +var xxx_messageInfo_BoundingPoly proto.InternalMessageInfo + +func (m *BoundingPoly) GetNormalizedVertices() []*NormalizedVertex { + if m != nil { + return m.NormalizedVertices + } + return nil +} + +func init() { + proto.RegisterType((*NormalizedVertex)(nil), "google.cloud.automl.v1beta1.NormalizedVertex") + proto.RegisterType((*BoundingPoly)(nil), "google.cloud.automl.v1beta1.BoundingPoly") +} + +func init() { + proto.RegisterFile("google/cloud/automl/v1beta1/geometry.proto", fileDescriptor_geometry_693f597e33c64c76) +} + +var fileDescriptor_geometry_693f597e33c64c76 = []byte{ + // 260 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x7c, 0x90, 0x41, 0x4b, 0xc3, 0x30, + 0x14, 0xc7, 0x49, 0x05, 0x0f, 0x71, 0x07, 0xa9, 0x97, 0xe2, 0x04, 0xc7, 0x4e, 0x43, 0x30, 0x61, + 0x7a, 0xf4, 0xb4, 0x79, 0xf0, 0xa4, 0x8c, 0x1d, 0x76, 0x90, 0xa2, 0x64, 0xed, 0x23, 0x04, 0xd2, + 0xf7, 0x4a, 0xfa, 0x3a, 0x56, 0xcf, 0x7e, 0x3a, 0x3f, 0x95, 0xac, 0x29, 0x82, 0x22, 0x3b, 0xfe, + 0xf3, 0x7e, 0xff, 0x5f, 0x92, 0x27, 0x6f, 0x2c, 0x91, 0xf5, 0xa0, 0x0b, 0x4f, 0x6d, 0xa9, 0x4d, + 0xcb, 0x54, 0x79, 0xbd, 0x9b, 0x6f, 0x81, 0xcd, 0x5c, 0x5b, 0xa0, 0x0a, 0x38, 0x74, 0xaa, 0x0e, + 0xc4, 0x94, 0x8e, 0x23, 0xab, 0x7a, 0x56, 0x45, 0x56, 0x0d, 0xec, 0xe5, 0xd5, 0x20, 0x32, 0xb5, + 0xd3, 0x06, 0x91, 0xd8, 0xb0, 0x23, 0x6c, 0x62, 0x75, 0xaa, 0xe4, 0xf9, 0x0b, 0x85, 0xca, 0x78, + 0xf7, 0x01, 0xe5, 0x06, 0x02, 0xc3, 0x3e, 0x1d, 0x49, 0xb1, 0xcf, 0xc4, 0x44, 0xcc, 0x92, 0xb5, + 0xe8, 0x53, 0x97, 0x25, 0x31, 0x75, 0x53, 0x94, 0xa3, 0x25, 0xb5, 0x58, 0x3a, 0xb4, 0x2b, 0xf2, + 0x5d, 0xfa, 0x26, 0x2f, 0xf0, 0xa7, 0xff, 0xbe, 0x83, 0xc0, 0xae, 0x80, 0x26, 0x4b, 0x26, 0x27, + 0xb3, 0xb3, 0xbb, 0x5b, 0x75, 0xe4, 0x61, 0xea, 0xef, 0xbd, 0xeb, 0x14, 0x7f, 0x9d, 0x1c, 0x44, + 0xcb, 0x4f, 0x21, 0xaf, 0x0b, 0xaa, 0x8e, 0x89, 0x56, 0xe2, 0x75, 0x31, 0x8c, 0x2d, 0x79, 0x83, + 0x56, 0x51, 0xb0, 0xda, 0x02, 0xf6, 0x3f, 0xd4, 0x71, 0x64, 0x6a, 0xd7, 0xfc, 0xbb, 0xcb, 0x87, + 0x18, 0xbf, 0x92, 0xf1, 0x53, 0x0f, 0xe6, 0x8f, 0x07, 0x28, 0x5f, 0xb4, 0x4c, 0xcf, 0x3e, 0xdf, + 0x44, 0x68, 0x7b, 0xda, 0xbb, 0xee, 0xbf, 0x03, 0x00, 0x00, 0xff, 0xff, 0x39, 0xfe, 0x55, 0x67, + 0x96, 0x01, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/cloud/automl/v1beta1/image.pb.go b/vendor/google.golang.org/genproto/googleapis/cloud/automl/v1beta1/image.pb.go new file mode 100644 index 0000000..18bbfc6 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/cloud/automl/v1beta1/image.pb.go @@ -0,0 +1,370 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/cloud/automl/v1beta1/image.proto + +package automl // import "google.golang.org/genproto/googleapis/cloud/automl/v1beta1" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "github.com/golang/protobuf/ptypes/timestamp" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Dataset metadata that is specific to image classification. +type ImageClassificationDatasetMetadata struct { + // Required. Type of the classification problem. + ClassificationType ClassificationType `protobuf:"varint,1,opt,name=classification_type,json=classificationType,proto3,enum=google.cloud.automl.v1beta1.ClassificationType" json:"classification_type,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ImageClassificationDatasetMetadata) Reset() { *m = ImageClassificationDatasetMetadata{} } +func (m *ImageClassificationDatasetMetadata) String() string { return proto.CompactTextString(m) } +func (*ImageClassificationDatasetMetadata) ProtoMessage() {} +func (*ImageClassificationDatasetMetadata) Descriptor() ([]byte, []int) { + return fileDescriptor_image_b1aa6fd5524822c5, []int{0} +} +func (m *ImageClassificationDatasetMetadata) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ImageClassificationDatasetMetadata.Unmarshal(m, b) +} +func (m *ImageClassificationDatasetMetadata) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ImageClassificationDatasetMetadata.Marshal(b, m, deterministic) +} +func (dst *ImageClassificationDatasetMetadata) XXX_Merge(src proto.Message) { + xxx_messageInfo_ImageClassificationDatasetMetadata.Merge(dst, src) +} +func (m *ImageClassificationDatasetMetadata) XXX_Size() int { + return xxx_messageInfo_ImageClassificationDatasetMetadata.Size(m) +} +func (m *ImageClassificationDatasetMetadata) XXX_DiscardUnknown() { + xxx_messageInfo_ImageClassificationDatasetMetadata.DiscardUnknown(m) +} + +var xxx_messageInfo_ImageClassificationDatasetMetadata proto.InternalMessageInfo + +func (m *ImageClassificationDatasetMetadata) GetClassificationType() ClassificationType { + if m != nil { + return m.ClassificationType + } + return ClassificationType_CLASSIFICATION_TYPE_UNSPECIFIED +} + +// Dataset metadata specific to image object detection. +type ImageObjectDetectionDatasetMetadata struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ImageObjectDetectionDatasetMetadata) Reset() { *m = ImageObjectDetectionDatasetMetadata{} } +func (m *ImageObjectDetectionDatasetMetadata) String() string { return proto.CompactTextString(m) } +func (*ImageObjectDetectionDatasetMetadata) ProtoMessage() {} +func (*ImageObjectDetectionDatasetMetadata) Descriptor() ([]byte, []int) { + return fileDescriptor_image_b1aa6fd5524822c5, []int{1} +} +func (m *ImageObjectDetectionDatasetMetadata) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ImageObjectDetectionDatasetMetadata.Unmarshal(m, b) +} +func (m *ImageObjectDetectionDatasetMetadata) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ImageObjectDetectionDatasetMetadata.Marshal(b, m, deterministic) +} +func (dst *ImageObjectDetectionDatasetMetadata) XXX_Merge(src proto.Message) { + xxx_messageInfo_ImageObjectDetectionDatasetMetadata.Merge(dst, src) +} +func (m *ImageObjectDetectionDatasetMetadata) XXX_Size() int { + return xxx_messageInfo_ImageObjectDetectionDatasetMetadata.Size(m) +} +func (m *ImageObjectDetectionDatasetMetadata) XXX_DiscardUnknown() { + xxx_messageInfo_ImageObjectDetectionDatasetMetadata.DiscardUnknown(m) +} + +var xxx_messageInfo_ImageObjectDetectionDatasetMetadata proto.InternalMessageInfo + +// Model metadata for image classification. +type ImageClassificationModelMetadata struct { + // Optional. The ID of the `base` model. If it is specified, the new model + // will be created based on the `base` model. Otherwise, the new model will be + // created from scratch. The `base` model must be in the same + // `project` and `location` as the new model to create, and have the same + // `model_type`. + BaseModelId string `protobuf:"bytes,1,opt,name=base_model_id,json=baseModelId,proto3" json:"base_model_id,omitempty"` + // Required. The train budget of creating this model, expressed in hours. The + // actual `train_cost` will be equal or less than this value. + TrainBudget int64 `protobuf:"varint,2,opt,name=train_budget,json=trainBudget,proto3" json:"train_budget,omitempty"` + // Output only. The actual train cost of creating this model, expressed in + // hours. If this model is created from a `base` model, the train cost used + // to create the `base` model are not included. + TrainCost int64 `protobuf:"varint,3,opt,name=train_cost,json=trainCost,proto3" json:"train_cost,omitempty"` + // Output only. The reason that this create model operation stopped, + // e.g. `BUDGET_REACHED`, `MODEL_CONVERGED`. + StopReason string `protobuf:"bytes,5,opt,name=stop_reason,json=stopReason,proto3" json:"stop_reason,omitempty"` + // Optional. Type of the model. The available values are: + // * `cloud` - Model to be used via prediction calls to AutoML API. + // This is the default value. + // * `mobile-low-latency-1` - A model that, in addition to providing + // prediction via AutoML API, can also be exported (see + // [AutoMl.ExportModel][google.cloud.automl.v1beta1.AutoMl.ExportModel]) and used on a mobile or edge device + // with TensorFlow afterwards. Expected to have low latency, but + // may have lower prediction quality than other models. + // * `mobile-versatile-1` - A model that, in addition to providing + // prediction via AutoML API, can also be exported (see + // [AutoMl.ExportModel][google.cloud.automl.v1beta1.AutoMl.ExportModel]) and used on a mobile or edge device + // with TensorFlow afterwards. + // * `mobile-high-accuracy-1` - A model that, in addition to providing + // prediction via AutoML API, can also be exported (see + // [AutoMl.ExportModel][google.cloud.automl.v1beta1.AutoMl.ExportModel]) and used on a mobile or edge device + // with TensorFlow afterwards. Expected to have a higher + // latency, but should also have a higher prediction quality + // than other models. + // * `mobile-core-ml-low-latency-1` - A model that, in addition to providing + // prediction via AutoML API, can also be exported (see + // [AutoMl.ExportModel][google.cloud.automl.v1beta1.AutoMl.ExportModel]) and used on a mobile device with Core + // ML afterwards. Expected to have low latency, but may have + // lower prediction quality than other models. + // * `mobile-core-ml-versatile-1` - A model that, in addition to providing + // prediction via AutoML API, can also be exported (see + // [AutoMl.ExportModel][google.cloud.automl.v1beta1.AutoMl.ExportModel]) and used on a mobile device with Core + // ML afterwards. + // * `mobile-core-ml-high-accuracy-1` - A model that, in addition to + // providing prediction via AutoML API, can also be exported + // (see [AutoMl.ExportModel][google.cloud.automl.v1beta1.AutoMl.ExportModel]) and used on a mobile device with + // Core ML afterwards. Expected to have a higher latency, but + // should also have a higher prediction quality than other + // models. + ModelType string `protobuf:"bytes,7,opt,name=model_type,json=modelType,proto3" json:"model_type,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ImageClassificationModelMetadata) Reset() { *m = ImageClassificationModelMetadata{} } +func (m *ImageClassificationModelMetadata) String() string { return proto.CompactTextString(m) } +func (*ImageClassificationModelMetadata) ProtoMessage() {} +func (*ImageClassificationModelMetadata) Descriptor() ([]byte, []int) { + return fileDescriptor_image_b1aa6fd5524822c5, []int{2} +} +func (m *ImageClassificationModelMetadata) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ImageClassificationModelMetadata.Unmarshal(m, b) +} +func (m *ImageClassificationModelMetadata) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ImageClassificationModelMetadata.Marshal(b, m, deterministic) +} +func (dst *ImageClassificationModelMetadata) XXX_Merge(src proto.Message) { + xxx_messageInfo_ImageClassificationModelMetadata.Merge(dst, src) +} +func (m *ImageClassificationModelMetadata) XXX_Size() int { + return xxx_messageInfo_ImageClassificationModelMetadata.Size(m) +} +func (m *ImageClassificationModelMetadata) XXX_DiscardUnknown() { + xxx_messageInfo_ImageClassificationModelMetadata.DiscardUnknown(m) +} + +var xxx_messageInfo_ImageClassificationModelMetadata proto.InternalMessageInfo + +func (m *ImageClassificationModelMetadata) GetBaseModelId() string { + if m != nil { + return m.BaseModelId + } + return "" +} + +func (m *ImageClassificationModelMetadata) GetTrainBudget() int64 { + if m != nil { + return m.TrainBudget + } + return 0 +} + +func (m *ImageClassificationModelMetadata) GetTrainCost() int64 { + if m != nil { + return m.TrainCost + } + return 0 +} + +func (m *ImageClassificationModelMetadata) GetStopReason() string { + if m != nil { + return m.StopReason + } + return "" +} + +func (m *ImageClassificationModelMetadata) GetModelType() string { + if m != nil { + return m.ModelType + } + return "" +} + +// Model metadata specific to image object detection. +type ImageObjectDetectionModelMetadata struct { + // Optional. Type of the model. The available values are: + // * `cloud-high-accuracy-1` - (default) A model to be used via prediction + // calls to AutoML API. Expected to have a higher latency, but + // should also have a higher prediction quality than other + // models. + // * `cloud-low-latency-1` - A model to be used via prediction + // calls to AutoML API. Expected to have low latency, but may + // have lower prediction quality than other models. + ModelType string `protobuf:"bytes,1,opt,name=model_type,json=modelType,proto3" json:"model_type,omitempty"` + // Output only. The number of nodes this model is deployed on. A node is an + // abstraction of a machine resource, which can handle online prediction QPS + // as given in the qps_per_node field. + NodeCount int64 `protobuf:"varint,3,opt,name=node_count,json=nodeCount,proto3" json:"node_count,omitempty"` + // Output only. An approximate number of online prediction QPS that can + // be supported by this model per each node on which it is deployed. + NodeQps float64 `protobuf:"fixed64,4,opt,name=node_qps,json=nodeQps,proto3" json:"node_qps,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ImageObjectDetectionModelMetadata) Reset() { *m = ImageObjectDetectionModelMetadata{} } +func (m *ImageObjectDetectionModelMetadata) String() string { return proto.CompactTextString(m) } +func (*ImageObjectDetectionModelMetadata) ProtoMessage() {} +func (*ImageObjectDetectionModelMetadata) Descriptor() ([]byte, []int) { + return fileDescriptor_image_b1aa6fd5524822c5, []int{3} +} +func (m *ImageObjectDetectionModelMetadata) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ImageObjectDetectionModelMetadata.Unmarshal(m, b) +} +func (m *ImageObjectDetectionModelMetadata) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ImageObjectDetectionModelMetadata.Marshal(b, m, deterministic) +} +func (dst *ImageObjectDetectionModelMetadata) XXX_Merge(src proto.Message) { + xxx_messageInfo_ImageObjectDetectionModelMetadata.Merge(dst, src) +} +func (m *ImageObjectDetectionModelMetadata) XXX_Size() int { + return xxx_messageInfo_ImageObjectDetectionModelMetadata.Size(m) +} +func (m *ImageObjectDetectionModelMetadata) XXX_DiscardUnknown() { + xxx_messageInfo_ImageObjectDetectionModelMetadata.DiscardUnknown(m) +} + +var xxx_messageInfo_ImageObjectDetectionModelMetadata proto.InternalMessageInfo + +func (m *ImageObjectDetectionModelMetadata) GetModelType() string { + if m != nil { + return m.ModelType + } + return "" +} + +func (m *ImageObjectDetectionModelMetadata) GetNodeCount() int64 { + if m != nil { + return m.NodeCount + } + return 0 +} + +func (m *ImageObjectDetectionModelMetadata) GetNodeQps() float64 { + if m != nil { + return m.NodeQps + } + return 0 +} + +// Model deployment metadata specific to Image Object Detection. +type ImageObjectDetectionModelDeploymentMetadata struct { + // Input only. The number of nodes to deploy the model on. A node is an + // abstraction of a machine resource, which can handle online prediction QPS + // as given in the model's + // + // [qps_per_node][google.cloud.automl.v1beta1.ImageObjectDetectionModelMetadata.qps_per_node]. + // Must be between 1 and 100, inclusive on both ends. + NodeCount int64 `protobuf:"varint,1,opt,name=node_count,json=nodeCount,proto3" json:"node_count,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ImageObjectDetectionModelDeploymentMetadata) Reset() { + *m = ImageObjectDetectionModelDeploymentMetadata{} +} +func (m *ImageObjectDetectionModelDeploymentMetadata) String() string { + return proto.CompactTextString(m) +} +func (*ImageObjectDetectionModelDeploymentMetadata) ProtoMessage() {} +func (*ImageObjectDetectionModelDeploymentMetadata) Descriptor() ([]byte, []int) { + return fileDescriptor_image_b1aa6fd5524822c5, []int{4} +} +func (m *ImageObjectDetectionModelDeploymentMetadata) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ImageObjectDetectionModelDeploymentMetadata.Unmarshal(m, b) +} +func (m *ImageObjectDetectionModelDeploymentMetadata) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ImageObjectDetectionModelDeploymentMetadata.Marshal(b, m, deterministic) +} +func (dst *ImageObjectDetectionModelDeploymentMetadata) XXX_Merge(src proto.Message) { + xxx_messageInfo_ImageObjectDetectionModelDeploymentMetadata.Merge(dst, src) +} +func (m *ImageObjectDetectionModelDeploymentMetadata) XXX_Size() int { + return xxx_messageInfo_ImageObjectDetectionModelDeploymentMetadata.Size(m) +} +func (m *ImageObjectDetectionModelDeploymentMetadata) XXX_DiscardUnknown() { + xxx_messageInfo_ImageObjectDetectionModelDeploymentMetadata.DiscardUnknown(m) +} + +var xxx_messageInfo_ImageObjectDetectionModelDeploymentMetadata proto.InternalMessageInfo + +func (m *ImageObjectDetectionModelDeploymentMetadata) GetNodeCount() int64 { + if m != nil { + return m.NodeCount + } + return 0 +} + +func init() { + proto.RegisterType((*ImageClassificationDatasetMetadata)(nil), "google.cloud.automl.v1beta1.ImageClassificationDatasetMetadata") + proto.RegisterType((*ImageObjectDetectionDatasetMetadata)(nil), "google.cloud.automl.v1beta1.ImageObjectDetectionDatasetMetadata") + proto.RegisterType((*ImageClassificationModelMetadata)(nil), "google.cloud.automl.v1beta1.ImageClassificationModelMetadata") + proto.RegisterType((*ImageObjectDetectionModelMetadata)(nil), "google.cloud.automl.v1beta1.ImageObjectDetectionModelMetadata") + proto.RegisterType((*ImageObjectDetectionModelDeploymentMetadata)(nil), "google.cloud.automl.v1beta1.ImageObjectDetectionModelDeploymentMetadata") +} + +func init() { + proto.RegisterFile("google/cloud/automl/v1beta1/image.proto", fileDescriptor_image_b1aa6fd5524822c5) +} + +var fileDescriptor_image_b1aa6fd5524822c5 = []byte{ + // 458 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x7c, 0x92, 0xd1, 0x6b, 0xd4, 0x40, + 0x10, 0xc6, 0xd9, 0x56, 0xad, 0xdd, 0x53, 0x1f, 0xe2, 0x4b, 0x6c, 0x95, 0xbb, 0x46, 0xc4, 0x03, + 0x21, 0xb1, 0xfa, 0xe8, 0x53, 0xef, 0x0e, 0xa4, 0xe0, 0x61, 0x0d, 0xe2, 0x83, 0x1c, 0xc4, 0x4d, + 0x32, 0x0d, 0x91, 0x64, 0x67, 0xbd, 0x9d, 0x08, 0x07, 0x3e, 0xfb, 0xec, 0x9f, 0x25, 0xfe, 0x55, + 0xb2, 0xb3, 0xa1, 0x34, 0xf5, 0xbc, 0xc7, 0xfd, 0xe6, 0x37, 0xf3, 0x7d, 0xb3, 0x8c, 0x7c, 0x5e, + 0x21, 0x56, 0x0d, 0x24, 0x45, 0x83, 0x5d, 0x99, 0xa8, 0x8e, 0xb0, 0x6d, 0x92, 0xef, 0xa7, 0x39, + 0x90, 0x3a, 0x4d, 0xea, 0x56, 0x55, 0x10, 0x9b, 0x35, 0x12, 0x06, 0xc7, 0x1e, 0x8c, 0x19, 0x8c, + 0x3d, 0x18, 0xf7, 0xe0, 0xd1, 0xe3, 0x7e, 0x8a, 0x32, 0x75, 0xa2, 0xb4, 0x46, 0x52, 0x54, 0xa3, + 0xb6, 0xbe, 0xf5, 0xe8, 0xe5, 0x2e, 0x8f, 0xa2, 0x51, 0xd6, 0xd6, 0x97, 0x75, 0xc1, 0x2d, 0x7d, + 0xc7, 0xb8, 0xef, 0xe0, 0x57, 0xde, 0x5d, 0x26, 0x54, 0xb7, 0x60, 0x49, 0xb5, 0xc6, 0x03, 0xd1, + 0x4f, 0x21, 0xa3, 0x73, 0x97, 0x6e, 0x3e, 0x68, 0x5f, 0x28, 0x52, 0x16, 0x68, 0x09, 0xa4, 0x4a, + 0x45, 0x2a, 0xf8, 0x22, 0x1f, 0x0e, 0xe7, 0x67, 0xb4, 0x31, 0x10, 0x8a, 0x89, 0x98, 0x3e, 0x78, + 0x95, 0xc4, 0x3b, 0x56, 0x8a, 0x87, 0x83, 0x3f, 0x6e, 0x0c, 0xa4, 0x41, 0xf1, 0x8f, 0x16, 0x3d, + 0x93, 0x4f, 0x39, 0xc7, 0xfb, 0xfc, 0x2b, 0x14, 0xb4, 0x00, 0x82, 0x62, 0x4b, 0x90, 0xe8, 0xb7, + 0x90, 0x93, 0x2d, 0x79, 0x97, 0x58, 0x42, 0x73, 0x95, 0x36, 0x92, 0xf7, 0x73, 0x65, 0x21, 0x6b, + 0x9d, 0x9a, 0xd5, 0x25, 0xe7, 0x3c, 0x4c, 0x47, 0x4e, 0x64, 0xf2, 0xbc, 0x0c, 0x4e, 0xe4, 0x3d, + 0x5a, 0xab, 0x5a, 0x67, 0x79, 0x57, 0x56, 0x40, 0xe1, 0xde, 0x44, 0x4c, 0xf7, 0xd3, 0x11, 0x6b, + 0x33, 0x96, 0x82, 0x27, 0x52, 0x7a, 0xa4, 0x40, 0x4b, 0xe1, 0x3e, 0x03, 0x87, 0xac, 0xcc, 0xd1, + 0x52, 0x30, 0x96, 0x23, 0x4b, 0x68, 0xb2, 0x35, 0x28, 0x8b, 0x3a, 0xbc, 0xcd, 0x1e, 0xd2, 0x49, + 0x29, 0x2b, 0xae, 0xdf, 0x27, 0xe0, 0xbf, 0x3a, 0xe0, 0xfa, 0x21, 0x2b, 0xbc, 0xf1, 0x0f, 0x79, + 0xb2, 0x6d, 0xe3, 0xe1, 0x2a, 0xc3, 0x19, 0xe2, 0xc6, 0x0c, 0x57, 0xd6, 0x58, 0x42, 0x56, 0x60, + 0xa7, 0xaf, 0x22, 0x3a, 0x65, 0xee, 0x84, 0xe0, 0x91, 0xbc, 0xcb, 0xe5, 0x6f, 0xc6, 0x86, 0xb7, + 0x26, 0x62, 0x2a, 0xd2, 0x03, 0xf7, 0xfe, 0x60, 0x6c, 0xf4, 0x4e, 0xbe, 0xf8, 0xaf, 0xfb, 0x02, + 0x4c, 0x83, 0x9b, 0x16, 0x34, 0x5d, 0xcf, 0x71, 0xcd, 0x48, 0xdc, 0x30, 0x9a, 0xfd, 0x12, 0x72, + 0x5c, 0x60, 0xbb, 0xeb, 0x10, 0x66, 0x92, 0xfd, 0x2e, 0xdc, 0xd9, 0x5d, 0x88, 0xcf, 0x67, 0x3d, + 0x5a, 0x61, 0xa3, 0x74, 0x15, 0xe3, 0xba, 0x4a, 0x2a, 0xd0, 0x7c, 0x94, 0x89, 0x2f, 0x29, 0x53, + 0xdb, 0xad, 0xa7, 0xfe, 0xc6, 0x3f, 0xff, 0xec, 0x1d, 0xbf, 0x65, 0x70, 0x35, 0x77, 0xd0, 0xea, + 0xac, 0x23, 0x5c, 0x36, 0xab, 0x4f, 0x1e, 0xca, 0xef, 0xf0, 0xac, 0xd7, 0x7f, 0x03, 0x00, 0x00, + 0xff, 0xff, 0x28, 0xe5, 0x7e, 0xd9, 0x99, 0x03, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/cloud/automl/v1beta1/io.pb.go b/vendor/google.golang.org/genproto/googleapis/cloud/automl/v1beta1/io.pb.go new file mode 100644 index 0000000..d0f3971 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/cloud/automl/v1beta1/io.pb.go @@ -0,0 +1,1861 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/cloud/automl/v1beta1/io.proto + +package automl // import "google.golang.org/genproto/googleapis/cloud/automl/v1beta1" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Input configuration for ImportData Action. +// +// The format of input depends on dataset_metadata the Dataset into which +// the import is happening has. As input source the +// [gcs_source][google.cloud.automl.v1beta1.InputConfig.gcs_source] +// is expected, unless specified otherwise. If a file with identical content +// (even if it had different GCS_FILE_PATH) is mentioned multiple times , then +// its label, bounding boxes etc. are appended. The same file should be always +// provided with the same ML_USE and GCS_FILE_PATH, if it is not then +// these values are nondeterministically selected from the given ones. +// +// The formats are represented in EBNF with commas being literal and with +// non-terminal symbols defined near the end of this comment. The formats are: +// +// * For Image Object Detection: +// CSV file(s) with each line in format: +// ML_USE,GCS_FILE_PATH,LABEL,BOUNDING_BOX +// GCS_FILE_PATH leads to image of up to 30MB in size. Supported +// extensions: .JPEG, .GIF, .PNG. +// Each image is assumed to be exhaustively labeled. The +// minimum allowed BOUNDING_BOX edge length is 0.01, and no more than +// 500 BOUNDING_BOX-es per image are allowed. +// Three sample rows: +// TRAIN,gs://folder/image1.png,car,0.1,0.1,,,0.3,0.3,, +// TRAIN,gs://folder/image1.png,bike,.7,.6,,,.8,.9,, +// TEST,gs://folder/im2.png,car,0.1,0.1,0.2,0.1,0.2,0.3,0.1,0.3 +// +// +// * For Video Classification: +// CSV file(s) with each line in format: +// ML_USE,GCS_FILE_PATH +// where ML_USE VALIDATE value should not be used. The GCS_FILE_PATH +// should lead to another .csv file which describes examples that have +// given ML_USE, using the following row format: +// GCS_FILE_PATH,LABEL,TIME_SEGMENT_START,TIME_SEGMENT_END +// Here GCS_FILE_PATH leads to a video of up to 50GB in size and up +// to 3h duration. Supported extensions: .MOV, .MPEG4, .MP4, .AVI. +// TIME_SEGMENT_START and TIME_SEGMENT_END must be within the +// length of the video, and end has to be after the start. Any segment +// of a video which has one or more labels on it, is considered a +// hard negative for all other labels. Any segment with no labels on +// it is considered to be unknown. +// Sample top level CSV file: +// TRAIN,gs://folder/train_videos.csv +// TEST,gs://folder/test_videos.csv +// UNASSIGNED,gs://folder/other_videos.csv +// Three sample rows of a CSV file for a particular ML_USE: +// gs://folder/video1.avi,car,120,180.000021 +// gs://folder/video1.avi,bike,150,180.000021 +// gs://folder/vid2.avi,car,0,60.5 +// * For Text Extraction: +// CSV file(s) with each line in format: +// ML_USE,GCS_FILE_PATH +// GCS_FILE_PATH leads to a .JSONL (i.e. JSON Lines) file which either +// imports text in-line or as documents. +// The in-line .JSONL file contains, per line, a proto that wraps a +// TextSnippet proto (in json representation) followed by one or +// more AnnotationPayload protos (called annotations), which have +// display_name and text_extraction detail populated. +// Given text is expected to be annotated exhaustively, e.g. if you +// look for animals and text contains "dolphin" that is not labeled, +// then "dolphin" will be assumed to not be an animal. +// Any given text snippet content must have 30,000 characters or +// less, and also be UTF-8 NFC encoded (ASCII already is). +// The document .JSONL file contains, per line, a proto that wraps a +// Document proto with input_config set. Only PDF documents are +// supported now, and each document may be up to 2MB large. +// Currently annotations on documents cannot be specified at import. +// Any given .JSONL file must be 100MB or smaller. +// Three sample CSV rows: +// TRAIN,gs://folder/file1.jsonl +// VALIDATE,gs://folder/file2.jsonl +// TEST,gs://folder/file3.jsonl +// Sample in-line JSON Lines file (presented here with artificial line +// breaks, but the only actual line break is denoted by \n).: +// { +// "text_snippet": { +// "content": "dog car cat" +// }, +// "annotations": [ +// { +// "display_name": "animal", +// "text_extraction": { +// "text_segment": {"start_offset": 0, "end_offset": 2} +// } +// }, +// { +// "display_name": "vehicle", +// "text_extraction": { +// "text_segment": {"start_offset": 4, "end_offset": 6} +// } +// }, +// { +// "display_name": "animal", +// "text_extraction": { +// "text_segment": {"start_offset": 8, "end_offset": 10} +// } +// } +// ] +// }\n +// { +// "text_snippet": { +// "content": "This dog is good." +// }, +// "annotations": [ +// { +// "display_name": "animal", +// "text_extraction": { +// "text_segment": {"start_offset": 5, "end_offset": 7} +// } +// } +// ] +// } +// Sample document JSON Lines file (presented here with artificial line +// breaks, but the only actual line break is denoted by \n).: +// { +// "document": { +// "input_config": { +// "gcs_source": { "input_uris": [ "gs://folder/document1.pdf" ] +// } +// } +// } +// }\n +// { +// "document": { +// "input_config": { +// "gcs_source": { "input_uris": [ "gs://folder/document2.pdf" ] +// } +// } +// } +// } +// * For Tables: +// Either +// [gcs_source][google.cloud.automl.v1beta1.InputConfig.gcs_source] or +// +// [bigquery_source][google.cloud.automl.v1beta1.InputConfig.bigquery_source] +// can be used. All inputs will be concatenated into a single +// +// [primary_table][google.cloud.automl.v1beta1.TablesDatasetMetadata.primary_table_name] +// For gcs_source: +// CSV file(s), where first file must have a header containing unique +// column names, other files may have such header line too, and all +// other lines contain values for the header columns. Each line must +// have 1,000,000 or fewer characters. +// First three sample rows of a CSV file: +// "Id","First Name","Last Name","Dob","Addresses" +// +// "1","John","Doe","1968-01-22","[{"status":"current","address":"123_First_Avenue","city":"Seattle","state":"WA","zip":"11111","numberOfYears":"1"},{"status":"previous","address":"456_Main_Street","city":"Portland","state":"OR","zip":"22222","numberOfYears":"5"}]" +// +// "2","Jane","Doe","1980-10-16","[{"status":"current","address":"789_Any_Avenue","city":"Albany","state":"NY","zip":"33333","numberOfYears":"2"},{"status":"previous","address":"321_Main_Street","city":"Hoboken","state":"NJ","zip":"44444","numberOfYears":"3"}]} +// For bigquery_source: +// An URI of a BigQuery table. +// An imported table must have between 2 and 1,000 columns, inclusive, +// and between 1,000 and 10,000,000 rows, inclusive. +// +// * For Text Sentiment: +// CSV file(s) with each line in format: +// ML_USE,TEXT_SNIPPET,SENTIMENT +// TEXT_SNIPPET must have up to 500 characters. +// Three sample rows: +// TRAIN,"@freewrytin God is way too good for Claritin",2 +// TRAIN,"I need Claritin so bad",3 +// TEST,"Thank god for Claritin.",4 +// +// Definitions: +// ML_USE = "TRAIN" | "VALIDATE" | "TEST" | "UNASSIGNED" +// Describes how the given example (file) should be used for model +// training. "UNASSIGNED" can be used when user has no preference. +// GCS_FILE_PATH = A path to file on GCS, e.g. "gs://folder/image1.png". +// LABEL = A display name of an object on an image, video etc., e.g. "dog". +// Must be up to 32 characters long and can consist only of ASCII +// Latin letters A-Z and a-z, underscores(_), and ASCII digits 0-9. +// For each label an AnnotationSpec is created which display_name +// becomes the label; AnnotationSpecs are given back in predictions. +// INSTANCE_ID = A positive integer that identifies a specific instance of a +// labeled entity on an example. Used e.g. to track two cars on +// a video while being able to tell apart which one is which. +// BOUNDING_BOX = VERTEX,VERTEX,VERTEX,VERTEX | VERTEX,,,VERTEX,, +// A rectangle parallel to the frame of the example (image, +// video). If 4 vertices are given they are connected by edges +// in the order provided, if 2 are given they are recognized +// as diagonally opposite vertices of the rectangle. +// VERTEX = COORDINATE,COORDINATE +// First coordinate is horizontal (x), the second is vertical (y). +// COORDINATE = A float in 0 to 1 range, relative to total length of +// image or video in given dimension. For fractions the +// leading non-decimal 0 can be omitted (i.e. 0.3 = .3). +// Point 0,0 is in top left. +// TIME_SEGMENT_START = TIME_OFFSET +// Expresses a beginning, inclusive, of a time segment +// within an example that has a time dimension +// (e.g. video). +// TIME_SEGMENT_END = TIME_OFFSET +// Expresses an end, exclusive, of a time segment within +// an example that has a time dimension (e.g. video). +// TIME_OFFSET = A number of seconds as measured from the start of an +// example (e.g. video). Fractions are allowed, up to a +// microsecond precision. "inf" is allowed, and it means the end +// of the example. +// TEXT_SNIPPET = A content of a text snippet, UTF-8 encoded. +// SENTIMENT = An integer between 0 and +// Dataset.text_sentiment_dataset_metadata.sentiment_max +// (inclusive). Describes the ordinal of the sentiment - higher +// value means a more positive sentiment. All the values are +// completely relative, i.e. neither 0 needs to mean a negative or +// neutral sentiment nor sentiment_max needs to mean a positive one +// - it is just required that 0 is the least positive sentiment +// in the data, and sentiment_max is the most positive one. +// The SENTIMENT shouldn't be confused with "score" or "magnitude" +// from the previous Natural Language Sentiment Analysis API. +// All SENTIMENT values between 0 and sentiment_max must be +// represented in the imported data. On prediction the same 0 to +// sentiment_max range will be used. The difference between +// neighboring sentiment values needs not to be uniform, e.g. 1 and +// 2 may be similar whereas the difference between 2 and 3 may be +// huge. +// +// Errors: +// If any of the provided CSV files can't be parsed or if more than certain +// percent of CSV rows cannot be processed then the operation fails and +// nothing is imported. Regardless of overall success or failure the per-row +// failures, up to a certain count cap, will be listed in +// Operation.metadata.partial_failures. +// +type InputConfig struct { + // Required. The source of the input. + // + // Types that are valid to be assigned to Source: + // *InputConfig_GcsSource + // *InputConfig_BigquerySource + Source isInputConfig_Source `protobuf_oneof:"source"` + // Additional domain-specific parameters describing the semantic of the + // imported data, any string must be up to 25000 + // characters long. + // + // * For Tables: + // `schema_inference_version` - (integer) Required. The version of the + // algorithm that should be used for the initial inference of the + // schema (columns' DataTypes) of the table the data is being imported + // into. Allowed values: "1". + Params map[string]string `protobuf:"bytes,2,rep,name=params,proto3" json:"params,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *InputConfig) Reset() { *m = InputConfig{} } +func (m *InputConfig) String() string { return proto.CompactTextString(m) } +func (*InputConfig) ProtoMessage() {} +func (*InputConfig) Descriptor() ([]byte, []int) { + return fileDescriptor_io_bc3ab7a921debf0e, []int{0} +} +func (m *InputConfig) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_InputConfig.Unmarshal(m, b) +} +func (m *InputConfig) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_InputConfig.Marshal(b, m, deterministic) +} +func (dst *InputConfig) XXX_Merge(src proto.Message) { + xxx_messageInfo_InputConfig.Merge(dst, src) +} +func (m *InputConfig) XXX_Size() int { + return xxx_messageInfo_InputConfig.Size(m) +} +func (m *InputConfig) XXX_DiscardUnknown() { + xxx_messageInfo_InputConfig.DiscardUnknown(m) +} + +var xxx_messageInfo_InputConfig proto.InternalMessageInfo + +type isInputConfig_Source interface { + isInputConfig_Source() +} + +type InputConfig_GcsSource struct { + GcsSource *GcsSource `protobuf:"bytes,1,opt,name=gcs_source,json=gcsSource,proto3,oneof"` +} + +type InputConfig_BigquerySource struct { + BigquerySource *BigQuerySource `protobuf:"bytes,3,opt,name=bigquery_source,json=bigquerySource,proto3,oneof"` +} + +func (*InputConfig_GcsSource) isInputConfig_Source() {} + +func (*InputConfig_BigquerySource) isInputConfig_Source() {} + +func (m *InputConfig) GetSource() isInputConfig_Source { + if m != nil { + return m.Source + } + return nil +} + +func (m *InputConfig) GetGcsSource() *GcsSource { + if x, ok := m.GetSource().(*InputConfig_GcsSource); ok { + return x.GcsSource + } + return nil +} + +func (m *InputConfig) GetBigquerySource() *BigQuerySource { + if x, ok := m.GetSource().(*InputConfig_BigquerySource); ok { + return x.BigquerySource + } + return nil +} + +func (m *InputConfig) GetParams() map[string]string { + if m != nil { + return m.Params + } + return nil +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*InputConfig) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _InputConfig_OneofMarshaler, _InputConfig_OneofUnmarshaler, _InputConfig_OneofSizer, []interface{}{ + (*InputConfig_GcsSource)(nil), + (*InputConfig_BigquerySource)(nil), + } +} + +func _InputConfig_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*InputConfig) + // source + switch x := m.Source.(type) { + case *InputConfig_GcsSource: + b.EncodeVarint(1<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.GcsSource); err != nil { + return err + } + case *InputConfig_BigquerySource: + b.EncodeVarint(3<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.BigquerySource); err != nil { + return err + } + case nil: + default: + return fmt.Errorf("InputConfig.Source has unexpected type %T", x) + } + return nil +} + +func _InputConfig_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*InputConfig) + switch tag { + case 1: // source.gcs_source + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(GcsSource) + err := b.DecodeMessage(msg) + m.Source = &InputConfig_GcsSource{msg} + return true, err + case 3: // source.bigquery_source + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(BigQuerySource) + err := b.DecodeMessage(msg) + m.Source = &InputConfig_BigquerySource{msg} + return true, err + default: + return false, nil + } +} + +func _InputConfig_OneofSizer(msg proto.Message) (n int) { + m := msg.(*InputConfig) + // source + switch x := m.Source.(type) { + case *InputConfig_GcsSource: + s := proto.Size(x.GcsSource) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *InputConfig_BigquerySource: + s := proto.Size(x.BigquerySource) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +// Input configuration for BatchPredict Action. +// +// The format of input depends on the ML problem of the model used for +// prediction. As input source the +// [gcs_source][google.cloud.automl.v1beta1.InputConfig.gcs_source] +// is expected, unless specified otherwise. +// +// The formats are represented in EBNF with commas being literal and with +// non-terminal symbols defined near the end of this comment. The formats are: +// +// * For Video Classification: +// CSV file(s) with each line in format: +// GCS_FILE_PATH,TIME_SEGMENT_START,TIME_SEGMENT_END +// GCS_FILE_PATH leads to video of up to 50GB in size and up to 3h +// duration. Supported extensions: .MOV, .MPEG4, .MP4, .AVI. +// TIME_SEGMENT_START and TIME_SEGMENT_END must be within the +// length of the video, and end has to be after the start. +// Three sample rows: +// gs://folder/video1.mp4,10,40 +// gs://folder/video1.mp4,20,60 +// gs://folder/vid2.mov,0,inf +// +// * For Text Extraction +// .JSONL (i.e. JSON Lines) file(s) which either provide text in-line or +// as documents (for a single BatchPredict call only one of the these +// formats may be used). +// The in-line .JSONL file(s) contain per line a proto that +// wraps a temporary user-assigned TextSnippet ID (string up to 2000 +// characters long) called "id" followed by a TextSnippet proto (in +// json representation). Any given text snippet content must have +// 30,000 characters or less, and also be UTF-8 NFC encoded (ASCII +// already is). The IDs provided should be unique. +// The document .JSONL file(s) contain, per line, a proto that wraps a +// Document proto with input_config set. Only PDF documents are +// supported now, and each document must be up to 2MB large. +// Any given .JSONL file must be 100MB or smaller, and no more than 20 +// files may be given. +// Sample in-line JSON Lines file (presented here with artificial line +// breaks, but the only actual line break is denoted by \n): +// { +// "id": "my_first_id", +// "text_snippet": { "content": "dog car cat"} +// }\n +// { +// "id": "2", +// "text_snippet": { +// "content": "An elaborate content", +// "mime_type": "text/plain" +// } +// } +// Sample document JSON Lines file (presented here with artificial line +// breaks, but the only actual line break is denoted by \n).: +// { +// "document": { +// "input_config": { +// "gcs_source": { "input_uris": [ "gs://folder/document1.pdf" ] +// } +// } +// } +// }\n +// { +// "document": { +// "input_config": { +// "gcs_source": { "input_uris": [ "gs://folder/document2.pdf" ] +// } +// } +// } +// } +// +// * For Tables: +// Either +// [gcs_source][google.cloud.automl.v1beta1.InputConfig.gcs_source] or +// +// [bigquery_source][google.cloud.automl.v1beta1.InputConfig.bigquery_source]. +// For gcs_source: +// CSV file(s), where first file must have a header containing +// column names, other files may have such header line too, and all +// other lines contain values for the header columns. The column +// names must be exactly same (order may differ) as the model's +// +// [input_feature_column_specs'][google.cloud.automl.v1beta1.TablesModelMetadata.input_feature_column_specs] +// [display_names][google.cloud.automl.v1beta1.display_name], with +// values compatible with these column specs data types. +// Prediction on all the rows, i.e. the CSV lines, will be +// attempted. +// Each line must have 1,000,000 or fewer characters. +// First three sample rows of a CSV file: +// "First Name","Last Name","Dob","Addresses" +// +// "John","Doe","1968-01-22","[{"status":"current","address":"123_First_Avenue","city":"Seattle","state":"WA","zip":"11111","numberOfYears":"1"},{"status":"previous","address":"456_Main_Street","city":"Portland","state":"OR","zip":"22222","numberOfYears":"5"}]" +// +// "Jane","Doe","1980-10-16","[{"status":"current","address":"789_Any_Avenue","city":"Albany","state":"NY","zip":"33333","numberOfYears":"2"},{"status":"previous","address":"321_Main_Street","city":"Hoboken","state":"NJ","zip":"44444","numberOfYears":"3"}]} +// For bigquery_source: +// An URI of a BigQuery table. The table's columns must be exactly +// same (order may differ) as all model's +// +// [input_feature_column_specs'][google.cloud.automl.v1beta1.TablesModelMetadata.input_feature_column_specs] +// [display_names][google.cloud.automl.v1beta1.display_name], with +// data compatible with these colum specs data types. +// Prediction on all the rows of the table will be attempted. +// +// Definitions: +// GCS_FILE_PATH = A path to file on GCS, e.g. "gs://folder/video.avi". +// TIME_SEGMENT_START = TIME_OFFSET +// Expresses a beginning, inclusive, of a time segment +// within an +// example that has a time dimension (e.g. video). +// TIME_SEGMENT_END = TIME_OFFSET +// Expresses an end, exclusive, of a time segment within +// an example that has a time dimension (e.g. video). +// TIME_OFFSET = A number of seconds as measured from the start of an +// example (e.g. video). Fractions are allowed, up to a +// microsecond precision. "inf" is allowed and it means the end +// of the example. +// +// Errors: +// If any of the provided CSV files can't be parsed or if more than certain +// percent of CSV rows cannot be processed then the operation fails and +// prediction does not happen. Regardless of overall success or failure the +// per-row failures, up to a certain count cap, will be listed in +// Operation.metadata.partial_failures. +// +type BatchPredictInputConfig struct { + // Required. The source of the input. + // + // Types that are valid to be assigned to Source: + // *BatchPredictInputConfig_GcsSource + // *BatchPredictInputConfig_BigquerySource + Source isBatchPredictInputConfig_Source `protobuf_oneof:"source"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *BatchPredictInputConfig) Reset() { *m = BatchPredictInputConfig{} } +func (m *BatchPredictInputConfig) String() string { return proto.CompactTextString(m) } +func (*BatchPredictInputConfig) ProtoMessage() {} +func (*BatchPredictInputConfig) Descriptor() ([]byte, []int) { + return fileDescriptor_io_bc3ab7a921debf0e, []int{1} +} +func (m *BatchPredictInputConfig) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_BatchPredictInputConfig.Unmarshal(m, b) +} +func (m *BatchPredictInputConfig) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_BatchPredictInputConfig.Marshal(b, m, deterministic) +} +func (dst *BatchPredictInputConfig) XXX_Merge(src proto.Message) { + xxx_messageInfo_BatchPredictInputConfig.Merge(dst, src) +} +func (m *BatchPredictInputConfig) XXX_Size() int { + return xxx_messageInfo_BatchPredictInputConfig.Size(m) +} +func (m *BatchPredictInputConfig) XXX_DiscardUnknown() { + xxx_messageInfo_BatchPredictInputConfig.DiscardUnknown(m) +} + +var xxx_messageInfo_BatchPredictInputConfig proto.InternalMessageInfo + +type isBatchPredictInputConfig_Source interface { + isBatchPredictInputConfig_Source() +} + +type BatchPredictInputConfig_GcsSource struct { + GcsSource *GcsSource `protobuf:"bytes,1,opt,name=gcs_source,json=gcsSource,proto3,oneof"` +} + +type BatchPredictInputConfig_BigquerySource struct { + BigquerySource *BigQuerySource `protobuf:"bytes,2,opt,name=bigquery_source,json=bigquerySource,proto3,oneof"` +} + +func (*BatchPredictInputConfig_GcsSource) isBatchPredictInputConfig_Source() {} + +func (*BatchPredictInputConfig_BigquerySource) isBatchPredictInputConfig_Source() {} + +func (m *BatchPredictInputConfig) GetSource() isBatchPredictInputConfig_Source { + if m != nil { + return m.Source + } + return nil +} + +func (m *BatchPredictInputConfig) GetGcsSource() *GcsSource { + if x, ok := m.GetSource().(*BatchPredictInputConfig_GcsSource); ok { + return x.GcsSource + } + return nil +} + +func (m *BatchPredictInputConfig) GetBigquerySource() *BigQuerySource { + if x, ok := m.GetSource().(*BatchPredictInputConfig_BigquerySource); ok { + return x.BigquerySource + } + return nil +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*BatchPredictInputConfig) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _BatchPredictInputConfig_OneofMarshaler, _BatchPredictInputConfig_OneofUnmarshaler, _BatchPredictInputConfig_OneofSizer, []interface{}{ + (*BatchPredictInputConfig_GcsSource)(nil), + (*BatchPredictInputConfig_BigquerySource)(nil), + } +} + +func _BatchPredictInputConfig_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*BatchPredictInputConfig) + // source + switch x := m.Source.(type) { + case *BatchPredictInputConfig_GcsSource: + b.EncodeVarint(1<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.GcsSource); err != nil { + return err + } + case *BatchPredictInputConfig_BigquerySource: + b.EncodeVarint(2<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.BigquerySource); err != nil { + return err + } + case nil: + default: + return fmt.Errorf("BatchPredictInputConfig.Source has unexpected type %T", x) + } + return nil +} + +func _BatchPredictInputConfig_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*BatchPredictInputConfig) + switch tag { + case 1: // source.gcs_source + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(GcsSource) + err := b.DecodeMessage(msg) + m.Source = &BatchPredictInputConfig_GcsSource{msg} + return true, err + case 2: // source.bigquery_source + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(BigQuerySource) + err := b.DecodeMessage(msg) + m.Source = &BatchPredictInputConfig_BigquerySource{msg} + return true, err + default: + return false, nil + } +} + +func _BatchPredictInputConfig_OneofSizer(msg proto.Message) (n int) { + m := msg.(*BatchPredictInputConfig) + // source + switch x := m.Source.(type) { + case *BatchPredictInputConfig_GcsSource: + s := proto.Size(x.GcsSource) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *BatchPredictInputConfig_BigquerySource: + s := proto.Size(x.BigquerySource) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +// Input configuration of a [Document][google.cloud.automl.v1beta1.Document]. +type DocumentInputConfig struct { + // The Google Cloud Storage location of the document file. Only a single path + // should be given. + // Max supported size: 512MB. + // Supported extensions: .PDF. + GcsSource *GcsSource `protobuf:"bytes,1,opt,name=gcs_source,json=gcsSource,proto3" json:"gcs_source,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *DocumentInputConfig) Reset() { *m = DocumentInputConfig{} } +func (m *DocumentInputConfig) String() string { return proto.CompactTextString(m) } +func (*DocumentInputConfig) ProtoMessage() {} +func (*DocumentInputConfig) Descriptor() ([]byte, []int) { + return fileDescriptor_io_bc3ab7a921debf0e, []int{2} +} +func (m *DocumentInputConfig) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_DocumentInputConfig.Unmarshal(m, b) +} +func (m *DocumentInputConfig) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_DocumentInputConfig.Marshal(b, m, deterministic) +} +func (dst *DocumentInputConfig) XXX_Merge(src proto.Message) { + xxx_messageInfo_DocumentInputConfig.Merge(dst, src) +} +func (m *DocumentInputConfig) XXX_Size() int { + return xxx_messageInfo_DocumentInputConfig.Size(m) +} +func (m *DocumentInputConfig) XXX_DiscardUnknown() { + xxx_messageInfo_DocumentInputConfig.DiscardUnknown(m) +} + +var xxx_messageInfo_DocumentInputConfig proto.InternalMessageInfo + +func (m *DocumentInputConfig) GetGcsSource() *GcsSource { + if m != nil { + return m.GcsSource + } + return nil +} + +// Output configuration for ExportData. +// +// As destination the +// [gcs_destination][google.cloud.automl.v1beta1.OutputConfig.gcs_destination] +// must be set unless specified otherwise for a domain. +// Only ground truth annotations are exported (not approved annotations are +// not exported). +// +// The outputs correspond to how the data was imported, and may be used as +// input to import data. The output formats are represented as EBNF with literal +// commas and same non-terminal symbols definitions are these in import data's +// [InputConfig][google.cloud.automl.v1beta1.InputConfig]: +// +// * For Image Object Detection: +// CSV file(s) `image_object_detection_1.csv`, +// `image_object_detection_2.csv`,...,`image_object_detection_N.csv` +// with each line in format: +// ML_USE,GCS_FILE_PATH,LABEL,BOUNDING_BOX +// where GCS_FILE_PATHs point at the original, source locations of the +// imported images. +// +// * For Video Classification: +// CSV file `video_classification.csv`, with each line in format: +// ML_USE,GCS_FILE_PATH +// (may have muliple lines per a single ML_USE). +// Each GCS_FILE_PATH leads to another .csv file which +// describes examples that have given ML_USE, using the following +// row format: +// GCS_FILE_PATH,LABEL,TIME_SEGMENT_START,TIME_SEGMENT_END +// Here GCS_FILE_PATHs point at the original, source locations of the +// imported videos. +// * For Text Extraction: +// CSV file `text_extraction.csv`, with each line in format: +// ML_USE,GCS_FILE_PATH +// GCS_FILE_PATH leads to a .JSONL (i.e. JSON Lines) file which +// contains, per line, a proto that wraps a TextSnippet proto (in json +// representation) followed by AnnotationPayload protos (called +// annotations). If initially documents had been imported, corresponding +// OCR-ed representation is returned. +// +// * For Tables: +// Output depends on whether the dataset was imported from GCS or +// BigQuery. +// GCS case: +// +// [gcs_destination][google.cloud.automl.v1beta1.OutputConfig.gcs_destination] +// must be set. Exported are CSV file(s) `tables_1.csv`, +// `tables_2.csv`,...,`tables_N.csv` with each having as header line +// the table's column names, and all other lines contain values for +// the header columns. +// BigQuery case: +// +// [bigquery_destination][google.cloud.automl.v1beta1.OutputConfig.bigquery_destination] +// pointing to a BigQuery project must be set. In the given project a +// new dataset will be created with name +// +// `export_data__` +// where will be made +// BigQuery-dataset-name compatible (e.g. most special characters will +// become underscores), and timestamp will be in +// YYYY_MM_DDThh_mm_ss_sssZ "based on ISO-8601" format. In that +// dataset a new table called `primary_table` will be created, and +// filled with precisely the same data as this obtained on import. +type OutputConfig struct { + // Required. The destination of the output. + // + // Types that are valid to be assigned to Destination: + // *OutputConfig_GcsDestination + // *OutputConfig_BigqueryDestination + Destination isOutputConfig_Destination `protobuf_oneof:"destination"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *OutputConfig) Reset() { *m = OutputConfig{} } +func (m *OutputConfig) String() string { return proto.CompactTextString(m) } +func (*OutputConfig) ProtoMessage() {} +func (*OutputConfig) Descriptor() ([]byte, []int) { + return fileDescriptor_io_bc3ab7a921debf0e, []int{3} +} +func (m *OutputConfig) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_OutputConfig.Unmarshal(m, b) +} +func (m *OutputConfig) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_OutputConfig.Marshal(b, m, deterministic) +} +func (dst *OutputConfig) XXX_Merge(src proto.Message) { + xxx_messageInfo_OutputConfig.Merge(dst, src) +} +func (m *OutputConfig) XXX_Size() int { + return xxx_messageInfo_OutputConfig.Size(m) +} +func (m *OutputConfig) XXX_DiscardUnknown() { + xxx_messageInfo_OutputConfig.DiscardUnknown(m) +} + +var xxx_messageInfo_OutputConfig proto.InternalMessageInfo + +type isOutputConfig_Destination interface { + isOutputConfig_Destination() +} + +type OutputConfig_GcsDestination struct { + GcsDestination *GcsDestination `protobuf:"bytes,1,opt,name=gcs_destination,json=gcsDestination,proto3,oneof"` +} + +type OutputConfig_BigqueryDestination struct { + BigqueryDestination *BigQueryDestination `protobuf:"bytes,2,opt,name=bigquery_destination,json=bigqueryDestination,proto3,oneof"` +} + +func (*OutputConfig_GcsDestination) isOutputConfig_Destination() {} + +func (*OutputConfig_BigqueryDestination) isOutputConfig_Destination() {} + +func (m *OutputConfig) GetDestination() isOutputConfig_Destination { + if m != nil { + return m.Destination + } + return nil +} + +func (m *OutputConfig) GetGcsDestination() *GcsDestination { + if x, ok := m.GetDestination().(*OutputConfig_GcsDestination); ok { + return x.GcsDestination + } + return nil +} + +func (m *OutputConfig) GetBigqueryDestination() *BigQueryDestination { + if x, ok := m.GetDestination().(*OutputConfig_BigqueryDestination); ok { + return x.BigqueryDestination + } + return nil +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*OutputConfig) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _OutputConfig_OneofMarshaler, _OutputConfig_OneofUnmarshaler, _OutputConfig_OneofSizer, []interface{}{ + (*OutputConfig_GcsDestination)(nil), + (*OutputConfig_BigqueryDestination)(nil), + } +} + +func _OutputConfig_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*OutputConfig) + // destination + switch x := m.Destination.(type) { + case *OutputConfig_GcsDestination: + b.EncodeVarint(1<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.GcsDestination); err != nil { + return err + } + case *OutputConfig_BigqueryDestination: + b.EncodeVarint(2<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.BigqueryDestination); err != nil { + return err + } + case nil: + default: + return fmt.Errorf("OutputConfig.Destination has unexpected type %T", x) + } + return nil +} + +func _OutputConfig_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*OutputConfig) + switch tag { + case 1: // destination.gcs_destination + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(GcsDestination) + err := b.DecodeMessage(msg) + m.Destination = &OutputConfig_GcsDestination{msg} + return true, err + case 2: // destination.bigquery_destination + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(BigQueryDestination) + err := b.DecodeMessage(msg) + m.Destination = &OutputConfig_BigqueryDestination{msg} + return true, err + default: + return false, nil + } +} + +func _OutputConfig_OneofSizer(msg proto.Message) (n int) { + m := msg.(*OutputConfig) + // destination + switch x := m.Destination.(type) { + case *OutputConfig_GcsDestination: + s := proto.Size(x.GcsDestination) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *OutputConfig_BigqueryDestination: + s := proto.Size(x.BigqueryDestination) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +// Output configuration for BatchPredict Action. +// +// As destination the +// +// [gcs_destination][google.cloud.automl.v1beta1.BatchPredictOutputConfig.gcs_destination] +// must be set unless specified otherwise for a domain. If gcs_destination is +// set then in the given directory a new directory will be created. Its name +// will be +// "prediction--", +// where timestamp is in YYYY-MM-DDThh:mm:ss.sssZ ISO-8601 format. The contents +// of it depend on the ML problem the predictions are made for. +// +// * For Video Classification: +// In the created directory a video_classification.csv file, and a .JSON +// file per each video classification requested in the input (i.e. each +// line in given CSV(s)), will be created. +// +// The format of video_classification.csv is: +// +// GCS_FILE_PATH,TIME_SEGMENT_START,TIME_SEGMENT_END,JSON_FILE_NAME,STATUS +// where: +// GCS_FILE_PATH,TIME_SEGMENT_START,TIME_SEGMENT_END = matches 1 to 1 +// the prediction input lines (i.e. video_classification.csv has +// precisely the same number of lines as the prediction input had.) +// JSON_FILE_NAME = Name of .JSON file in the output directory, which +// contains prediction responses for the video time segment. +// STATUS = "OK" if prediction completed successfully, or an error +// code and,or message otherwise. If STATUS is not "OK" then the +// .JSON file for that line may not exist or be empty. +// +// Each .JSON file, assuming STATUS is "OK", will contain a list of +// AnnotationPayload protos in JSON format, which are the predictions +// for the video time segment the file is assigned to in the +// video_classification.csv. All AnnotationPayload protos will have +// video_classification field set, and will be sorted by +// video_classification.type field (note that the returned types are +// governed by `classifaction_types` parameter in +// [PredictService.BatchPredictRequest.params][]). +// * For Text Extraction: +// In the created directory files `text_extraction_1.jsonl`, +// `text_extraction_2.jsonl`,...,`text_extraction_N.jsonl` +// will be created, where N may be 1, and depends on the +// total number of inputs and annotations found. +// The contents of these .JSONL file(s) depend on whether the input +// used inline text, or documents. +// If input was inline, then each .JSONL file will contain, per line, +// a JSON representation of a proto that wraps given in request text +// snippet's "id" : "" followed by a list of zero or more +// AnnotationPayload protos (called annotations), which have +// text_extraction detail populated. A single text snippet will be +// listed only once with all its annotations, and its annotations will +// never be split across files. +// If input used documents, then each .JSONL file will contain, per +// line, a JSON representation of a proto that wraps given in request +// document proto, followed by its OCR-ed representation in the form +// of a text snippet, finally followed by a list of zero or more +// AnnotationPayload protos (called annotations), which have +// text_extraction detail populated and refer, via their indices, to +// the OCR-ed text snippet. A single document (and its text snippet) +// will be listed only once with all its annotations, and its +// annotations will never be split across files. +// If prediction for any text snippet failed (partially or completely), +// then additional `errors_1.jsonl`, `errors_2.jsonl`,..., +// `errors_N.jsonl` files will be created (N depends on total number of +// failed predictions). These files will have a JSON representation of a +// proto that wraps either the "id" : "" (in case of inline) +// or the document proto (in case of document) but here followed by +// exactly one +// +// [`google.rpc.Status`](https: +// //github.com/googleapis/googleapis/blob/master/google/rpc/status.proto) +// containing only `code` and `message`. +// +// * For Tables: +// Output depends on whether +// +// [gcs_destination][google.cloud.automl.v1beta1.BatchPredictOutputConfig.gcs_destination] +// or +// +// [bigquery_destination][google.cloud.automl.v1beta1.BatchPredictOutputConfig.bigquery_destination] +// is set (either is allowed). +// GCS case: +// In the created directory files `tables_1.csv`, `tables_2.csv`,..., +// `tables_N.csv` will be created, where N may be 1, and depends on +// the total number of the successfully predicted rows. +// For the classification models: +// Each .csv file will contain a header, listing all model's +// +// [input_feature_column_specs'][google.cloud.automl.v1beta1.TablesModelMetadata.input_feature_column_specs] +// +// [display_names][google.cloud.automl.v1beta1.ColumnSpec.display_name] +// followed by M target column names in the format of +// +// "<[target_column_specs][google.cloud.automl.v1beta1.TablesModelMetadata.target_column_spec] +// +// [display_name][google.cloud.automl.v1beta1.ColumnSpec.display_name]>__score" where M is the number of distinct target values, +// i.e. number of distinct values in the target column of the table +// used to train the model. Subsequent lines will contain the +// respective values of successfully predicted rows, with the last, +// i.e. the target, columns having the corresponding prediction +// [scores][google.cloud.automl.v1beta1.TablesAnnotation.score]. +// For the regression models: +// Each .csv file will contain a header, listing all model's +// +// [input_feature_column_specs][google.cloud.automl.v1beta1.TablesModelMetadata.input_feature_column_specs] +// [display_names][google.cloud.automl.v1beta1.display_name] +// followed by the target column with name equal to +// +// [target_column_specs'][google.cloud.automl.v1beta1.TablesModelMetadata.target_column_spec] +// +// [display_name][google.cloud.automl.v1beta1.ColumnSpec.display_name]. +// Subsequent lines will contain the respective values of +// successfully predicted rows, with the last, i.e. the target, +// column having the predicted target value. +// If prediction for any rows failed, then an additional +// `errors_1.csv`, `errors_2.csv`,..., `errors_N.csv` will be created +// (N depends on total number of failed rows). These files will have +// analogous format as `tables_*.csv`, but always with a single target +// column having +// +// [`google.rpc.Status`](https: +// //github.com/googleapis/googleapis/blob/master/google/rpc/status.proto) +// represented as a JSON string, and containing only `code` and +// `message`. +// BigQuery case: +// +// [bigquery_destination][google.cloud.automl.v1beta1.OutputConfig.bigquery_destination] +// pointing to a BigQuery project must be set. In the given project a +// new dataset will be created with name +// `prediction__` +// where will be made +// BigQuery-dataset-name compatible (e.g. most special characters will +// become underscores), and timestamp will be in +// YYYY_MM_DDThh_mm_ss_sssZ "based on ISO-8601" format. In the dataset +// two tables will be created, `predictions`, and `errors`. +// The `predictions` table's column names will be the +// +// [input_feature_column_specs'][google.cloud.automl.v1beta1.TablesModelMetadata.input_feature_column_specs] +// +// [display_names][google.cloud.automl.v1beta1.ColumnSpec.display_name] +// followed by model's +// +// [target_column_specs'][google.cloud.automl.v1beta1.TablesModelMetadata.target_column_spec] +// +// [display_name][google.cloud.automl.v1beta1.ColumnSpec.display_name]. +// The input feature columns will contain the respective values of +// successfully predicted rows, with the target column having an +// ARRAY of +// +// [AnnotationPayloads][google.cloud.automl.v1beta1.AnnotationPayload], +// represented as STRUCT-s, containing +// [TablesAnnotation][google.cloud.automl.v1beta1.TablesAnnotation]. +// The `errors` table contains rows for which the prediction has +// failed, it has analogous input feature and target columns, but +// here the target column as a value has +// +// [`google.rpc.Status`](https: +// //github.com/googleapis/googleapis/blob/master/google/rpc/status.proto) +// represented as a STRUCT, and containing only `code` and `message`. +type BatchPredictOutputConfig struct { + // Required. The destination of the output. + // + // Types that are valid to be assigned to Destination: + // *BatchPredictOutputConfig_GcsDestination + // *BatchPredictOutputConfig_BigqueryDestination + Destination isBatchPredictOutputConfig_Destination `protobuf_oneof:"destination"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *BatchPredictOutputConfig) Reset() { *m = BatchPredictOutputConfig{} } +func (m *BatchPredictOutputConfig) String() string { return proto.CompactTextString(m) } +func (*BatchPredictOutputConfig) ProtoMessage() {} +func (*BatchPredictOutputConfig) Descriptor() ([]byte, []int) { + return fileDescriptor_io_bc3ab7a921debf0e, []int{4} +} +func (m *BatchPredictOutputConfig) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_BatchPredictOutputConfig.Unmarshal(m, b) +} +func (m *BatchPredictOutputConfig) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_BatchPredictOutputConfig.Marshal(b, m, deterministic) +} +func (dst *BatchPredictOutputConfig) XXX_Merge(src proto.Message) { + xxx_messageInfo_BatchPredictOutputConfig.Merge(dst, src) +} +func (m *BatchPredictOutputConfig) XXX_Size() int { + return xxx_messageInfo_BatchPredictOutputConfig.Size(m) +} +func (m *BatchPredictOutputConfig) XXX_DiscardUnknown() { + xxx_messageInfo_BatchPredictOutputConfig.DiscardUnknown(m) +} + +var xxx_messageInfo_BatchPredictOutputConfig proto.InternalMessageInfo + +type isBatchPredictOutputConfig_Destination interface { + isBatchPredictOutputConfig_Destination() +} + +type BatchPredictOutputConfig_GcsDestination struct { + GcsDestination *GcsDestination `protobuf:"bytes,1,opt,name=gcs_destination,json=gcsDestination,proto3,oneof"` +} + +type BatchPredictOutputConfig_BigqueryDestination struct { + BigqueryDestination *BigQueryDestination `protobuf:"bytes,2,opt,name=bigquery_destination,json=bigqueryDestination,proto3,oneof"` +} + +func (*BatchPredictOutputConfig_GcsDestination) isBatchPredictOutputConfig_Destination() {} + +func (*BatchPredictOutputConfig_BigqueryDestination) isBatchPredictOutputConfig_Destination() {} + +func (m *BatchPredictOutputConfig) GetDestination() isBatchPredictOutputConfig_Destination { + if m != nil { + return m.Destination + } + return nil +} + +func (m *BatchPredictOutputConfig) GetGcsDestination() *GcsDestination { + if x, ok := m.GetDestination().(*BatchPredictOutputConfig_GcsDestination); ok { + return x.GcsDestination + } + return nil +} + +func (m *BatchPredictOutputConfig) GetBigqueryDestination() *BigQueryDestination { + if x, ok := m.GetDestination().(*BatchPredictOutputConfig_BigqueryDestination); ok { + return x.BigqueryDestination + } + return nil +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*BatchPredictOutputConfig) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _BatchPredictOutputConfig_OneofMarshaler, _BatchPredictOutputConfig_OneofUnmarshaler, _BatchPredictOutputConfig_OneofSizer, []interface{}{ + (*BatchPredictOutputConfig_GcsDestination)(nil), + (*BatchPredictOutputConfig_BigqueryDestination)(nil), + } +} + +func _BatchPredictOutputConfig_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*BatchPredictOutputConfig) + // destination + switch x := m.Destination.(type) { + case *BatchPredictOutputConfig_GcsDestination: + b.EncodeVarint(1<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.GcsDestination); err != nil { + return err + } + case *BatchPredictOutputConfig_BigqueryDestination: + b.EncodeVarint(2<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.BigqueryDestination); err != nil { + return err + } + case nil: + default: + return fmt.Errorf("BatchPredictOutputConfig.Destination has unexpected type %T", x) + } + return nil +} + +func _BatchPredictOutputConfig_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*BatchPredictOutputConfig) + switch tag { + case 1: // destination.gcs_destination + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(GcsDestination) + err := b.DecodeMessage(msg) + m.Destination = &BatchPredictOutputConfig_GcsDestination{msg} + return true, err + case 2: // destination.bigquery_destination + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(BigQueryDestination) + err := b.DecodeMessage(msg) + m.Destination = &BatchPredictOutputConfig_BigqueryDestination{msg} + return true, err + default: + return false, nil + } +} + +func _BatchPredictOutputConfig_OneofSizer(msg proto.Message) (n int) { + m := msg.(*BatchPredictOutputConfig) + // destination + switch x := m.Destination.(type) { + case *BatchPredictOutputConfig_GcsDestination: + s := proto.Size(x.GcsDestination) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *BatchPredictOutputConfig_BigqueryDestination: + s := proto.Size(x.BigqueryDestination) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +// Output configuration for ModelExport Action. +type ModelExportOutputConfig struct { + // Required. The destination of the output. + // + // Types that are valid to be assigned to Destination: + // *ModelExportOutputConfig_GcsDestination + // *ModelExportOutputConfig_GcrDestination + Destination isModelExportOutputConfig_Destination `protobuf_oneof:"destination"` + // The format in which the model must be exported. The available, and default, + // formats depend on the problem and model type (if given problem and type + // combination doesn't have a format listed, it means its models are not + // exportable): + // + // * For Image Classification mobile-low-latency-1, mobile-versatile-1, + // mobile-high-accuracy-1: + // "tflite" (default), "edgetpu_tflite", "tf_saved_model", "docker". + // + // * For Image Classification mobile-core-ml-low-latency-1, + // mobile-core-ml-versatile-1, mobile-core-ml-high-accuracy-1: + // "core_ml" (default). + // Formats description: + // + // * tflite - Used for Android mobile devices. + // * edgetpu_tflite - Used for [Edge TPU](https://cloud.google.com/edge-tpu/) + // devices. + // * tf_saved_model - A tensorflow model in SavedModel format. + // * docker - Used for Docker containers. Use the params field to customize + // the container. The container is verified to work correctly on + // ubuntu 16.04 operating system. + // * core_ml - Used for iOS mobile devices. + ModelFormat string `protobuf:"bytes,4,opt,name=model_format,json=modelFormat,proto3" json:"model_format,omitempty"` + // Additional model-type and format specific parameters describing the + // requirements for the to be exported model files, any string must be up to + // 25000 characters long. + // + // * For `docker` format: + // `cpu_architecture` - (string) "x86_64" (default). + // `gpu_architecture` - (string) "none" (default), "nvidia". + Params map[string]string `protobuf:"bytes,2,rep,name=params,proto3" json:"params,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ModelExportOutputConfig) Reset() { *m = ModelExportOutputConfig{} } +func (m *ModelExportOutputConfig) String() string { return proto.CompactTextString(m) } +func (*ModelExportOutputConfig) ProtoMessage() {} +func (*ModelExportOutputConfig) Descriptor() ([]byte, []int) { + return fileDescriptor_io_bc3ab7a921debf0e, []int{5} +} +func (m *ModelExportOutputConfig) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ModelExportOutputConfig.Unmarshal(m, b) +} +func (m *ModelExportOutputConfig) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ModelExportOutputConfig.Marshal(b, m, deterministic) +} +func (dst *ModelExportOutputConfig) XXX_Merge(src proto.Message) { + xxx_messageInfo_ModelExportOutputConfig.Merge(dst, src) +} +func (m *ModelExportOutputConfig) XXX_Size() int { + return xxx_messageInfo_ModelExportOutputConfig.Size(m) +} +func (m *ModelExportOutputConfig) XXX_DiscardUnknown() { + xxx_messageInfo_ModelExportOutputConfig.DiscardUnknown(m) +} + +var xxx_messageInfo_ModelExportOutputConfig proto.InternalMessageInfo + +type isModelExportOutputConfig_Destination interface { + isModelExportOutputConfig_Destination() +} + +type ModelExportOutputConfig_GcsDestination struct { + GcsDestination *GcsDestination `protobuf:"bytes,1,opt,name=gcs_destination,json=gcsDestination,proto3,oneof"` +} + +type ModelExportOutputConfig_GcrDestination struct { + GcrDestination *GcrDestination `protobuf:"bytes,3,opt,name=gcr_destination,json=gcrDestination,proto3,oneof"` +} + +func (*ModelExportOutputConfig_GcsDestination) isModelExportOutputConfig_Destination() {} + +func (*ModelExportOutputConfig_GcrDestination) isModelExportOutputConfig_Destination() {} + +func (m *ModelExportOutputConfig) GetDestination() isModelExportOutputConfig_Destination { + if m != nil { + return m.Destination + } + return nil +} + +func (m *ModelExportOutputConfig) GetGcsDestination() *GcsDestination { + if x, ok := m.GetDestination().(*ModelExportOutputConfig_GcsDestination); ok { + return x.GcsDestination + } + return nil +} + +func (m *ModelExportOutputConfig) GetGcrDestination() *GcrDestination { + if x, ok := m.GetDestination().(*ModelExportOutputConfig_GcrDestination); ok { + return x.GcrDestination + } + return nil +} + +func (m *ModelExportOutputConfig) GetModelFormat() string { + if m != nil { + return m.ModelFormat + } + return "" +} + +func (m *ModelExportOutputConfig) GetParams() map[string]string { + if m != nil { + return m.Params + } + return nil +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*ModelExportOutputConfig) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _ModelExportOutputConfig_OneofMarshaler, _ModelExportOutputConfig_OneofUnmarshaler, _ModelExportOutputConfig_OneofSizer, []interface{}{ + (*ModelExportOutputConfig_GcsDestination)(nil), + (*ModelExportOutputConfig_GcrDestination)(nil), + } +} + +func _ModelExportOutputConfig_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*ModelExportOutputConfig) + // destination + switch x := m.Destination.(type) { + case *ModelExportOutputConfig_GcsDestination: + b.EncodeVarint(1<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.GcsDestination); err != nil { + return err + } + case *ModelExportOutputConfig_GcrDestination: + b.EncodeVarint(3<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.GcrDestination); err != nil { + return err + } + case nil: + default: + return fmt.Errorf("ModelExportOutputConfig.Destination has unexpected type %T", x) + } + return nil +} + +func _ModelExportOutputConfig_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*ModelExportOutputConfig) + switch tag { + case 1: // destination.gcs_destination + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(GcsDestination) + err := b.DecodeMessage(msg) + m.Destination = &ModelExportOutputConfig_GcsDestination{msg} + return true, err + case 3: // destination.gcr_destination + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(GcrDestination) + err := b.DecodeMessage(msg) + m.Destination = &ModelExportOutputConfig_GcrDestination{msg} + return true, err + default: + return false, nil + } +} + +func _ModelExportOutputConfig_OneofSizer(msg proto.Message) (n int) { + m := msg.(*ModelExportOutputConfig) + // destination + switch x := m.Destination.(type) { + case *ModelExportOutputConfig_GcsDestination: + s := proto.Size(x.GcsDestination) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *ModelExportOutputConfig_GcrDestination: + s := proto.Size(x.GcrDestination) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +// Output configuration for ExportEvaluatedExamples Action. Note that this call +// is available only for 30 days since the moment the model was evaluated. +// The output depends on the domain, as follows (note that only examples from +// the TEST set are exported): +// +// * For Tables: +// +// [bigquery_destination][google.cloud.automl.v1beta1.OutputConfig.bigquery_destination] +// pointing to a BigQuery project must be set. In the given project a +// new dataset will be created with name +// +// `export_evaluated_examples__` +// where will be made BigQuery-dataset-name +// compatible (e.g. most special characters will become underscores), +// and timestamp will be in YYYY_MM_DDThh_mm_ss_sssZ "based on ISO-8601" +// format. In the dataset an `evaluated_examples` table will be +// created. It will have all the same columns as the +// [primary +// +// table][google.cloud.automl.v1beta1.TablesDatasetMetadata.primary_table_spec_id] +// of the +// [dataset][google.cloud.automl.v1beta1.Model.dataset_id] from which +// the model was created, as they were at the moment of model's +// evaluation (this includes the target column with its ground +// truth), followed by a column called "predicted_". That +// last column will contain the model's prediction result for each +// respective row, given as ARRAY of +// [AnnotationPayloads][google.cloud.automl.v1beta1.AnnotationPayload], +// represented as STRUCT-s, containing +// [TablesAnnotation][google.cloud.automl.v1beta1.TablesAnnotation]. +type ExportEvaluatedExamplesOutputConfig struct { + // Required. The destination of the output. + // + // Types that are valid to be assigned to Destination: + // *ExportEvaluatedExamplesOutputConfig_BigqueryDestination + Destination isExportEvaluatedExamplesOutputConfig_Destination `protobuf_oneof:"destination"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ExportEvaluatedExamplesOutputConfig) Reset() { *m = ExportEvaluatedExamplesOutputConfig{} } +func (m *ExportEvaluatedExamplesOutputConfig) String() string { return proto.CompactTextString(m) } +func (*ExportEvaluatedExamplesOutputConfig) ProtoMessage() {} +func (*ExportEvaluatedExamplesOutputConfig) Descriptor() ([]byte, []int) { + return fileDescriptor_io_bc3ab7a921debf0e, []int{6} +} +func (m *ExportEvaluatedExamplesOutputConfig) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ExportEvaluatedExamplesOutputConfig.Unmarshal(m, b) +} +func (m *ExportEvaluatedExamplesOutputConfig) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ExportEvaluatedExamplesOutputConfig.Marshal(b, m, deterministic) +} +func (dst *ExportEvaluatedExamplesOutputConfig) XXX_Merge(src proto.Message) { + xxx_messageInfo_ExportEvaluatedExamplesOutputConfig.Merge(dst, src) +} +func (m *ExportEvaluatedExamplesOutputConfig) XXX_Size() int { + return xxx_messageInfo_ExportEvaluatedExamplesOutputConfig.Size(m) +} +func (m *ExportEvaluatedExamplesOutputConfig) XXX_DiscardUnknown() { + xxx_messageInfo_ExportEvaluatedExamplesOutputConfig.DiscardUnknown(m) +} + +var xxx_messageInfo_ExportEvaluatedExamplesOutputConfig proto.InternalMessageInfo + +type isExportEvaluatedExamplesOutputConfig_Destination interface { + isExportEvaluatedExamplesOutputConfig_Destination() +} + +type ExportEvaluatedExamplesOutputConfig_BigqueryDestination struct { + BigqueryDestination *BigQueryDestination `protobuf:"bytes,2,opt,name=bigquery_destination,json=bigqueryDestination,proto3,oneof"` +} + +func (*ExportEvaluatedExamplesOutputConfig_BigqueryDestination) isExportEvaluatedExamplesOutputConfig_Destination() { +} + +func (m *ExportEvaluatedExamplesOutputConfig) GetDestination() isExportEvaluatedExamplesOutputConfig_Destination { + if m != nil { + return m.Destination + } + return nil +} + +func (m *ExportEvaluatedExamplesOutputConfig) GetBigqueryDestination() *BigQueryDestination { + if x, ok := m.GetDestination().(*ExportEvaluatedExamplesOutputConfig_BigqueryDestination); ok { + return x.BigqueryDestination + } + return nil +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*ExportEvaluatedExamplesOutputConfig) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _ExportEvaluatedExamplesOutputConfig_OneofMarshaler, _ExportEvaluatedExamplesOutputConfig_OneofUnmarshaler, _ExportEvaluatedExamplesOutputConfig_OneofSizer, []interface{}{ + (*ExportEvaluatedExamplesOutputConfig_BigqueryDestination)(nil), + } +} + +func _ExportEvaluatedExamplesOutputConfig_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*ExportEvaluatedExamplesOutputConfig) + // destination + switch x := m.Destination.(type) { + case *ExportEvaluatedExamplesOutputConfig_BigqueryDestination: + b.EncodeVarint(2<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.BigqueryDestination); err != nil { + return err + } + case nil: + default: + return fmt.Errorf("ExportEvaluatedExamplesOutputConfig.Destination has unexpected type %T", x) + } + return nil +} + +func _ExportEvaluatedExamplesOutputConfig_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*ExportEvaluatedExamplesOutputConfig) + switch tag { + case 2: // destination.bigquery_destination + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(BigQueryDestination) + err := b.DecodeMessage(msg) + m.Destination = &ExportEvaluatedExamplesOutputConfig_BigqueryDestination{msg} + return true, err + default: + return false, nil + } +} + +func _ExportEvaluatedExamplesOutputConfig_OneofSizer(msg proto.Message) (n int) { + m := msg.(*ExportEvaluatedExamplesOutputConfig) + // destination + switch x := m.Destination.(type) { + case *ExportEvaluatedExamplesOutputConfig_BigqueryDestination: + s := proto.Size(x.BigqueryDestination) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +// The Google Cloud Storage location for the input content. +type GcsSource struct { + // Required. Google Cloud Storage URIs to input files, up to 2000 characters + // long. Accepted forms: + // * Full object path, e.g. gs://bucket/directory/object.csv + InputUris []string `protobuf:"bytes,1,rep,name=input_uris,json=inputUris,proto3" json:"input_uris,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GcsSource) Reset() { *m = GcsSource{} } +func (m *GcsSource) String() string { return proto.CompactTextString(m) } +func (*GcsSource) ProtoMessage() {} +func (*GcsSource) Descriptor() ([]byte, []int) { + return fileDescriptor_io_bc3ab7a921debf0e, []int{7} +} +func (m *GcsSource) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GcsSource.Unmarshal(m, b) +} +func (m *GcsSource) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GcsSource.Marshal(b, m, deterministic) +} +func (dst *GcsSource) XXX_Merge(src proto.Message) { + xxx_messageInfo_GcsSource.Merge(dst, src) +} +func (m *GcsSource) XXX_Size() int { + return xxx_messageInfo_GcsSource.Size(m) +} +func (m *GcsSource) XXX_DiscardUnknown() { + xxx_messageInfo_GcsSource.DiscardUnknown(m) +} + +var xxx_messageInfo_GcsSource proto.InternalMessageInfo + +func (m *GcsSource) GetInputUris() []string { + if m != nil { + return m.InputUris + } + return nil +} + +// The BigQuery location for the input content. +type BigQuerySource struct { + // Required. BigQuery URI to a table, up to 2000 characters long. + // Accepted forms: + // * BigQuery path e.g. bq://projectId.bqDatasetId.bqTableId + InputUri string `protobuf:"bytes,1,opt,name=input_uri,json=inputUri,proto3" json:"input_uri,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *BigQuerySource) Reset() { *m = BigQuerySource{} } +func (m *BigQuerySource) String() string { return proto.CompactTextString(m) } +func (*BigQuerySource) ProtoMessage() {} +func (*BigQuerySource) Descriptor() ([]byte, []int) { + return fileDescriptor_io_bc3ab7a921debf0e, []int{8} +} +func (m *BigQuerySource) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_BigQuerySource.Unmarshal(m, b) +} +func (m *BigQuerySource) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_BigQuerySource.Marshal(b, m, deterministic) +} +func (dst *BigQuerySource) XXX_Merge(src proto.Message) { + xxx_messageInfo_BigQuerySource.Merge(dst, src) +} +func (m *BigQuerySource) XXX_Size() int { + return xxx_messageInfo_BigQuerySource.Size(m) +} +func (m *BigQuerySource) XXX_DiscardUnknown() { + xxx_messageInfo_BigQuerySource.DiscardUnknown(m) +} + +var xxx_messageInfo_BigQuerySource proto.InternalMessageInfo + +func (m *BigQuerySource) GetInputUri() string { + if m != nil { + return m.InputUri + } + return "" +} + +// The Google Cloud Storage location where the output is to be written to. +type GcsDestination struct { + // Required. Google Cloud Storage URI to output directory, up to 2000 + // characters long. + // Accepted forms: + // * Prefix path: gs://bucket/directory + // The requesting user must have write permission to the bucket. + // The directory is created if it doesn't exist. + OutputUriPrefix string `protobuf:"bytes,1,opt,name=output_uri_prefix,json=outputUriPrefix,proto3" json:"output_uri_prefix,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GcsDestination) Reset() { *m = GcsDestination{} } +func (m *GcsDestination) String() string { return proto.CompactTextString(m) } +func (*GcsDestination) ProtoMessage() {} +func (*GcsDestination) Descriptor() ([]byte, []int) { + return fileDescriptor_io_bc3ab7a921debf0e, []int{9} +} +func (m *GcsDestination) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GcsDestination.Unmarshal(m, b) +} +func (m *GcsDestination) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GcsDestination.Marshal(b, m, deterministic) +} +func (dst *GcsDestination) XXX_Merge(src proto.Message) { + xxx_messageInfo_GcsDestination.Merge(dst, src) +} +func (m *GcsDestination) XXX_Size() int { + return xxx_messageInfo_GcsDestination.Size(m) +} +func (m *GcsDestination) XXX_DiscardUnknown() { + xxx_messageInfo_GcsDestination.DiscardUnknown(m) +} + +var xxx_messageInfo_GcsDestination proto.InternalMessageInfo + +func (m *GcsDestination) GetOutputUriPrefix() string { + if m != nil { + return m.OutputUriPrefix + } + return "" +} + +// The BigQuery location for the output content. +type BigQueryDestination struct { + // Required. BigQuery URI to a project, up to 2000 characters long. + // Accepted forms: + // * BigQuery path e.g. bq://projectId + OutputUri string `protobuf:"bytes,1,opt,name=output_uri,json=outputUri,proto3" json:"output_uri,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *BigQueryDestination) Reset() { *m = BigQueryDestination{} } +func (m *BigQueryDestination) String() string { return proto.CompactTextString(m) } +func (*BigQueryDestination) ProtoMessage() {} +func (*BigQueryDestination) Descriptor() ([]byte, []int) { + return fileDescriptor_io_bc3ab7a921debf0e, []int{10} +} +func (m *BigQueryDestination) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_BigQueryDestination.Unmarshal(m, b) +} +func (m *BigQueryDestination) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_BigQueryDestination.Marshal(b, m, deterministic) +} +func (dst *BigQueryDestination) XXX_Merge(src proto.Message) { + xxx_messageInfo_BigQueryDestination.Merge(dst, src) +} +func (m *BigQueryDestination) XXX_Size() int { + return xxx_messageInfo_BigQueryDestination.Size(m) +} +func (m *BigQueryDestination) XXX_DiscardUnknown() { + xxx_messageInfo_BigQueryDestination.DiscardUnknown(m) +} + +var xxx_messageInfo_BigQueryDestination proto.InternalMessageInfo + +func (m *BigQueryDestination) GetOutputUri() string { + if m != nil { + return m.OutputUri + } + return "" +} + +// The GCR location where the image must be pushed to. +type GcrDestination struct { + // Required. Google Contained Registry URI of the new image, up to 2000 + // characters long. See + // + // https: + // //cloud.google.com/container-registry/do + // // cs/pushing-and-pulling#pushing_an_image_to_a_registry + // Accepted forms: + // * [HOSTNAME]/[PROJECT-ID]/[IMAGE] + // * [HOSTNAME]/[PROJECT-ID]/[IMAGE]:[TAG] + // + // The requesting user must have permission to push images the project. + OutputUri string `protobuf:"bytes,1,opt,name=output_uri,json=outputUri,proto3" json:"output_uri,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GcrDestination) Reset() { *m = GcrDestination{} } +func (m *GcrDestination) String() string { return proto.CompactTextString(m) } +func (*GcrDestination) ProtoMessage() {} +func (*GcrDestination) Descriptor() ([]byte, []int) { + return fileDescriptor_io_bc3ab7a921debf0e, []int{11} +} +func (m *GcrDestination) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GcrDestination.Unmarshal(m, b) +} +func (m *GcrDestination) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GcrDestination.Marshal(b, m, deterministic) +} +func (dst *GcrDestination) XXX_Merge(src proto.Message) { + xxx_messageInfo_GcrDestination.Merge(dst, src) +} +func (m *GcrDestination) XXX_Size() int { + return xxx_messageInfo_GcrDestination.Size(m) +} +func (m *GcrDestination) XXX_DiscardUnknown() { + xxx_messageInfo_GcrDestination.DiscardUnknown(m) +} + +var xxx_messageInfo_GcrDestination proto.InternalMessageInfo + +func (m *GcrDestination) GetOutputUri() string { + if m != nil { + return m.OutputUri + } + return "" +} + +func init() { + proto.RegisterType((*InputConfig)(nil), "google.cloud.automl.v1beta1.InputConfig") + proto.RegisterMapType((map[string]string)(nil), "google.cloud.automl.v1beta1.InputConfig.ParamsEntry") + proto.RegisterType((*BatchPredictInputConfig)(nil), "google.cloud.automl.v1beta1.BatchPredictInputConfig") + proto.RegisterType((*DocumentInputConfig)(nil), "google.cloud.automl.v1beta1.DocumentInputConfig") + proto.RegisterType((*OutputConfig)(nil), "google.cloud.automl.v1beta1.OutputConfig") + proto.RegisterType((*BatchPredictOutputConfig)(nil), "google.cloud.automl.v1beta1.BatchPredictOutputConfig") + proto.RegisterType((*ModelExportOutputConfig)(nil), "google.cloud.automl.v1beta1.ModelExportOutputConfig") + proto.RegisterMapType((map[string]string)(nil), "google.cloud.automl.v1beta1.ModelExportOutputConfig.ParamsEntry") + proto.RegisterType((*ExportEvaluatedExamplesOutputConfig)(nil), "google.cloud.automl.v1beta1.ExportEvaluatedExamplesOutputConfig") + proto.RegisterType((*GcsSource)(nil), "google.cloud.automl.v1beta1.GcsSource") + proto.RegisterType((*BigQuerySource)(nil), "google.cloud.automl.v1beta1.BigQuerySource") + proto.RegisterType((*GcsDestination)(nil), "google.cloud.automl.v1beta1.GcsDestination") + proto.RegisterType((*BigQueryDestination)(nil), "google.cloud.automl.v1beta1.BigQueryDestination") + proto.RegisterType((*GcrDestination)(nil), "google.cloud.automl.v1beta1.GcrDestination") +} + +func init() { + proto.RegisterFile("google/cloud/automl/v1beta1/io.proto", fileDescriptor_io_bc3ab7a921debf0e) +} + +var fileDescriptor_io_bc3ab7a921debf0e = []byte{ + // 630 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xdc, 0x96, 0xcf, 0x6e, 0xd3, 0x4c, + 0x14, 0xc5, 0x3f, 0xdb, 0x1f, 0x55, 0x7d, 0x5d, 0x5a, 0x70, 0x2b, 0xd5, 0x6a, 0x41, 0x14, 0x83, + 0x50, 0xd4, 0x0a, 0x9b, 0x96, 0x2e, 0xf8, 0xb7, 0xa0, 0x69, 0x43, 0x40, 0xa2, 0x22, 0x04, 0x35, + 0x42, 0x28, 0x52, 0x34, 0x71, 0x26, 0xc3, 0x08, 0xdb, 0x63, 0xc6, 0xe3, 0x2a, 0xd9, 0xf3, 0x00, + 0xec, 0x79, 0x1a, 0x24, 0x56, 0x3c, 0x03, 0x0f, 0xc0, 0x63, 0x20, 0x8f, 0x9d, 0xc4, 0x2e, 0x25, + 0x44, 0xa8, 0x2a, 0x12, 0xbb, 0xcc, 0x9d, 0x73, 0x7e, 0xe3, 0x73, 0x3d, 0xb9, 0x09, 0xdc, 0x24, + 0x8c, 0x11, 0x1f, 0xbb, 0x9e, 0xcf, 0x92, 0x9e, 0x8b, 0x12, 0xc1, 0x02, 0xdf, 0x3d, 0xde, 0xee, + 0x62, 0x81, 0xb6, 0x5d, 0xca, 0x9c, 0x88, 0x33, 0xc1, 0xcc, 0xf5, 0x4c, 0xe5, 0x48, 0x95, 0x93, + 0xa9, 0x9c, 0x5c, 0xb5, 0x76, 0x25, 0x47, 0xa0, 0x88, 0xba, 0x28, 0x0c, 0x99, 0x40, 0x82, 0xb2, + 0x30, 0xce, 0xac, 0xf6, 0x67, 0x15, 0x8c, 0x67, 0x61, 0x94, 0x88, 0x7d, 0x16, 0xf6, 0x29, 0x31, + 0xeb, 0x00, 0xc4, 0x8b, 0x3b, 0x31, 0x4b, 0xb8, 0x87, 0x2d, 0x65, 0x43, 0xa9, 0x18, 0x3b, 0xb7, + 0x9c, 0x29, 0x7c, 0xa7, 0xee, 0xc5, 0xaf, 0xa4, 0xfa, 0xe9, 0x7f, 0x4d, 0x9d, 0x8c, 0x16, 0x66, + 0x0b, 0x96, 0xba, 0x94, 0xbc, 0x4f, 0x30, 0x1f, 0x8e, 0x68, 0x9a, 0xa4, 0x6d, 0x4d, 0xa5, 0x55, + 0x29, 0x79, 0x99, 0x7a, 0xc6, 0xc8, 0xc5, 0x11, 0x25, 0xe7, 0x3e, 0x87, 0xb9, 0x08, 0x71, 0x14, + 0xc4, 0x96, 0xba, 0xa1, 0x55, 0x8c, 0x9d, 0xdd, 0xa9, 0xb8, 0x42, 0x34, 0xa7, 0x21, 0x6d, 0xb5, + 0x50, 0xf0, 0x61, 0x33, 0x67, 0xac, 0xdd, 0x07, 0xa3, 0x50, 0x36, 0x2f, 0x81, 0xf6, 0x0e, 0x0f, + 0x65, 0x6c, 0xbd, 0x99, 0x7e, 0x34, 0x57, 0xe0, 0xc2, 0x31, 0xf2, 0x13, 0x6c, 0xa9, 0xb2, 0x96, + 0x2d, 0x1e, 0xa8, 0xf7, 0x94, 0xea, 0x3c, 0xcc, 0x65, 0xb9, 0xec, 0x2f, 0x0a, 0xac, 0x56, 0x91, + 0xf0, 0xde, 0x36, 0x38, 0xee, 0x51, 0x4f, 0x9c, 0x57, 0x3f, 0xd5, 0x33, 0xe8, 0x67, 0x21, 0x46, + 0x1b, 0x96, 0x0f, 0x98, 0x97, 0x04, 0x38, 0x2c, 0x25, 0xa8, 0xfd, 0x79, 0x82, 0xc2, 0xf3, 0xdb, + 0xdf, 0x14, 0x58, 0x78, 0x91, 0x88, 0x09, 0xb7, 0x05, 0x4b, 0x29, 0xb7, 0x87, 0x63, 0x41, 0x43, + 0x79, 0x27, 0x73, 0xf8, 0xd6, 0xef, 0xe0, 0x07, 0x13, 0x4b, 0x1a, 0x88, 0x94, 0x2a, 0x26, 0x86, + 0x95, 0x71, 0xa3, 0x8a, 0xf0, 0xac, 0x5b, 0x77, 0x66, 0xea, 0x56, 0xf9, 0x84, 0xe5, 0x11, 0xaf, + 0x50, 0xae, 0x5e, 0x04, 0xa3, 0x40, 0xb7, 0xbf, 0x2b, 0x60, 0x15, 0xef, 0xc0, 0x3f, 0x1c, 0xf5, + 0xa3, 0x06, 0xab, 0x87, 0xac, 0x87, 0xfd, 0xda, 0x20, 0x62, 0xfc, 0x7c, 0x92, 0x4a, 0x2e, 0x2f, + 0x71, 0xb5, 0x99, 0xb8, 0xfc, 0x27, 0x6e, 0xb1, 0x62, 0x5e, 0x87, 0x85, 0x20, 0x8d, 0xd2, 0xe9, + 0x33, 0x1e, 0x20, 0x61, 0xfd, 0x2f, 0xbf, 0xe5, 0x86, 0xac, 0x3d, 0x91, 0x25, 0xf3, 0xf5, 0x89, + 0x81, 0xf3, 0x78, 0xea, 0x89, 0xbf, 0x68, 0xcc, 0x59, 0x0f, 0x9f, 0x13, 0xaf, 0xe4, 0x93, 0x02, + 0x37, 0xb2, 0x43, 0x6b, 0xa9, 0x06, 0x09, 0xdc, 0xab, 0x0d, 0x50, 0x10, 0xf9, 0x38, 0x2e, 0xbd, + 0x9e, 0xbf, 0x73, 0x61, 0x36, 0x41, 0x1f, 0x8f, 0x04, 0xf3, 0x2a, 0x00, 0x4d, 0xa7, 0x4b, 0x27, + 0xe1, 0x34, 0xb6, 0x94, 0x0d, 0xad, 0xa2, 0x37, 0x75, 0x59, 0x39, 0xe2, 0x34, 0xb6, 0x6f, 0xc3, + 0x62, 0x79, 0x64, 0x99, 0xeb, 0xa0, 0x8f, 0x0d, 0x79, 0x73, 0xe6, 0x47, 0x7a, 0xfb, 0x11, 0x2c, + 0x96, 0xef, 0x8e, 0xb9, 0x09, 0x97, 0x99, 0x8c, 0x9c, 0xea, 0x3b, 0x11, 0xc7, 0x7d, 0x3a, 0xc8, + 0x6d, 0x4b, 0xd9, 0xc6, 0x11, 0xa7, 0x0d, 0x59, 0xb6, 0x77, 0x61, 0xf9, 0x94, 0x54, 0xe9, 0x23, + 0x4e, 0x10, 0xb9, 0x57, 0x1f, 0x7b, 0x6d, 0x37, 0x3d, 0x93, 0xcf, 0x6e, 0xa8, 0x7e, 0x50, 0xe0, + 0x9a, 0xc7, 0x82, 0x69, 0xdd, 0x6d, 0x28, 0x6f, 0xf6, 0xf2, 0x6d, 0xc2, 0x7c, 0x14, 0x12, 0x87, + 0x71, 0xe2, 0x12, 0x1c, 0xca, 0x5f, 0x69, 0x37, 0xdb, 0x42, 0x11, 0x8d, 0x4f, 0xfd, 0x27, 0xf0, + 0x30, 0x5b, 0x7e, 0x55, 0xd7, 0xeb, 0x52, 0xd8, 0xde, 0x4f, 0x45, 0xed, 0xbd, 0x44, 0xb0, 0x43, + 0xbf, 0xdd, 0xca, 0x44, 0xdd, 0x39, 0xc9, 0xba, 0xfb, 0x23, 0x00, 0x00, 0xff, 0xff, 0x7b, 0x21, + 0xf1, 0x3d, 0x54, 0x08, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/cloud/automl/v1beta1/model.pb.go b/vendor/google.golang.org/genproto/googleapis/cloud/automl/v1beta1/model.pb.go new file mode 100644 index 0000000..45b8cd4 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/cloud/automl/v1beta1/model.pb.go @@ -0,0 +1,519 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/cloud/automl/v1beta1/model.proto + +package automl // import "google.golang.org/genproto/googleapis/cloud/automl/v1beta1" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import timestamp "github.com/golang/protobuf/ptypes/timestamp" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Deployment state of the model. +type Model_DeploymentState int32 + +const ( + // Should not be used, an un-set enum has this value by default. + Model_DEPLOYMENT_STATE_UNSPECIFIED Model_DeploymentState = 0 + // Model is deployed. + Model_DEPLOYED Model_DeploymentState = 1 + // Model is not deployed. + Model_UNDEPLOYED Model_DeploymentState = 2 +) + +var Model_DeploymentState_name = map[int32]string{ + 0: "DEPLOYMENT_STATE_UNSPECIFIED", + 1: "DEPLOYED", + 2: "UNDEPLOYED", +} +var Model_DeploymentState_value = map[string]int32{ + "DEPLOYMENT_STATE_UNSPECIFIED": 0, + "DEPLOYED": 1, + "UNDEPLOYED": 2, +} + +func (x Model_DeploymentState) String() string { + return proto.EnumName(Model_DeploymentState_name, int32(x)) +} +func (Model_DeploymentState) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_model_9828559028c2521f, []int{0, 0} +} + +// API proto representing a trained machine learning model. +type Model struct { + // Required. + // The model metadata that is specific to the problem type. + // Must match the metadata type of the dataset used to train the model. + // + // Types that are valid to be assigned to ModelMetadata: + // *Model_TranslationModelMetadata + // *Model_ImageClassificationModelMetadata + // *Model_TextClassificationModelMetadata + // *Model_ImageObjectDetectionModelMetadata + // *Model_VideoClassificationModelMetadata + // *Model_TextExtractionModelMetadata + // *Model_TablesModelMetadata + // *Model_TextSentimentModelMetadata + ModelMetadata isModel_ModelMetadata `protobuf_oneof:"model_metadata"` + // Output only. + // Resource name of the model. + // Format: `projects/{project_id}/locations/{location_id}/models/{model_id}` + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // Required. The name of the model to show in the interface. The name can be + // up to 32 characters long and can consist only of ASCII Latin letters A-Z + // and a-z, underscores + // (_), and ASCII digits 0-9. It must start with a letter. + DisplayName string `protobuf:"bytes,2,opt,name=display_name,json=displayName,proto3" json:"display_name,omitempty"` + // Required. + // The resource ID of the dataset used to create the model. The dataset must + // come from the same ancestor project and location. + DatasetId string `protobuf:"bytes,3,opt,name=dataset_id,json=datasetId,proto3" json:"dataset_id,omitempty"` + // Output only. + // Timestamp when this model was created. + CreateTime *timestamp.Timestamp `protobuf:"bytes,7,opt,name=create_time,json=createTime,proto3" json:"create_time,omitempty"` + // Output only. + // Timestamp when this model was last updated. + UpdateTime *timestamp.Timestamp `protobuf:"bytes,11,opt,name=update_time,json=updateTime,proto3" json:"update_time,omitempty"` + // Output only. Deployment state of the model. A model can only serve + // prediction requests after it gets deployed. + DeploymentState Model_DeploymentState `protobuf:"varint,8,opt,name=deployment_state,json=deploymentState,proto3,enum=google.cloud.automl.v1beta1.Model_DeploymentState" json:"deployment_state,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Model) Reset() { *m = Model{} } +func (m *Model) String() string { return proto.CompactTextString(m) } +func (*Model) ProtoMessage() {} +func (*Model) Descriptor() ([]byte, []int) { + return fileDescriptor_model_9828559028c2521f, []int{0} +} +func (m *Model) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Model.Unmarshal(m, b) +} +func (m *Model) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Model.Marshal(b, m, deterministic) +} +func (dst *Model) XXX_Merge(src proto.Message) { + xxx_messageInfo_Model.Merge(dst, src) +} +func (m *Model) XXX_Size() int { + return xxx_messageInfo_Model.Size(m) +} +func (m *Model) XXX_DiscardUnknown() { + xxx_messageInfo_Model.DiscardUnknown(m) +} + +var xxx_messageInfo_Model proto.InternalMessageInfo + +type isModel_ModelMetadata interface { + isModel_ModelMetadata() +} + +type Model_TranslationModelMetadata struct { + TranslationModelMetadata *TranslationModelMetadata `protobuf:"bytes,15,opt,name=translation_model_metadata,json=translationModelMetadata,proto3,oneof"` +} + +type Model_ImageClassificationModelMetadata struct { + ImageClassificationModelMetadata *ImageClassificationModelMetadata `protobuf:"bytes,13,opt,name=image_classification_model_metadata,json=imageClassificationModelMetadata,proto3,oneof"` +} + +type Model_TextClassificationModelMetadata struct { + TextClassificationModelMetadata *TextClassificationModelMetadata `protobuf:"bytes,14,opt,name=text_classification_model_metadata,json=textClassificationModelMetadata,proto3,oneof"` +} + +type Model_ImageObjectDetectionModelMetadata struct { + ImageObjectDetectionModelMetadata *ImageObjectDetectionModelMetadata `protobuf:"bytes,20,opt,name=image_object_detection_model_metadata,json=imageObjectDetectionModelMetadata,proto3,oneof"` +} + +type Model_VideoClassificationModelMetadata struct { + VideoClassificationModelMetadata *VideoClassificationModelMetadata `protobuf:"bytes,23,opt,name=video_classification_model_metadata,json=videoClassificationModelMetadata,proto3,oneof"` +} + +type Model_TextExtractionModelMetadata struct { + TextExtractionModelMetadata *TextExtractionModelMetadata `protobuf:"bytes,19,opt,name=text_extraction_model_metadata,json=textExtractionModelMetadata,proto3,oneof"` +} + +type Model_TablesModelMetadata struct { + TablesModelMetadata *TablesModelMetadata `protobuf:"bytes,24,opt,name=tables_model_metadata,json=tablesModelMetadata,proto3,oneof"` +} + +type Model_TextSentimentModelMetadata struct { + TextSentimentModelMetadata *TextSentimentModelMetadata `protobuf:"bytes,22,opt,name=text_sentiment_model_metadata,json=textSentimentModelMetadata,proto3,oneof"` +} + +func (*Model_TranslationModelMetadata) isModel_ModelMetadata() {} + +func (*Model_ImageClassificationModelMetadata) isModel_ModelMetadata() {} + +func (*Model_TextClassificationModelMetadata) isModel_ModelMetadata() {} + +func (*Model_ImageObjectDetectionModelMetadata) isModel_ModelMetadata() {} + +func (*Model_VideoClassificationModelMetadata) isModel_ModelMetadata() {} + +func (*Model_TextExtractionModelMetadata) isModel_ModelMetadata() {} + +func (*Model_TablesModelMetadata) isModel_ModelMetadata() {} + +func (*Model_TextSentimentModelMetadata) isModel_ModelMetadata() {} + +func (m *Model) GetModelMetadata() isModel_ModelMetadata { + if m != nil { + return m.ModelMetadata + } + return nil +} + +func (m *Model) GetTranslationModelMetadata() *TranslationModelMetadata { + if x, ok := m.GetModelMetadata().(*Model_TranslationModelMetadata); ok { + return x.TranslationModelMetadata + } + return nil +} + +func (m *Model) GetImageClassificationModelMetadata() *ImageClassificationModelMetadata { + if x, ok := m.GetModelMetadata().(*Model_ImageClassificationModelMetadata); ok { + return x.ImageClassificationModelMetadata + } + return nil +} + +func (m *Model) GetTextClassificationModelMetadata() *TextClassificationModelMetadata { + if x, ok := m.GetModelMetadata().(*Model_TextClassificationModelMetadata); ok { + return x.TextClassificationModelMetadata + } + return nil +} + +func (m *Model) GetImageObjectDetectionModelMetadata() *ImageObjectDetectionModelMetadata { + if x, ok := m.GetModelMetadata().(*Model_ImageObjectDetectionModelMetadata); ok { + return x.ImageObjectDetectionModelMetadata + } + return nil +} + +func (m *Model) GetVideoClassificationModelMetadata() *VideoClassificationModelMetadata { + if x, ok := m.GetModelMetadata().(*Model_VideoClassificationModelMetadata); ok { + return x.VideoClassificationModelMetadata + } + return nil +} + +func (m *Model) GetTextExtractionModelMetadata() *TextExtractionModelMetadata { + if x, ok := m.GetModelMetadata().(*Model_TextExtractionModelMetadata); ok { + return x.TextExtractionModelMetadata + } + return nil +} + +func (m *Model) GetTablesModelMetadata() *TablesModelMetadata { + if x, ok := m.GetModelMetadata().(*Model_TablesModelMetadata); ok { + return x.TablesModelMetadata + } + return nil +} + +func (m *Model) GetTextSentimentModelMetadata() *TextSentimentModelMetadata { + if x, ok := m.GetModelMetadata().(*Model_TextSentimentModelMetadata); ok { + return x.TextSentimentModelMetadata + } + return nil +} + +func (m *Model) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *Model) GetDisplayName() string { + if m != nil { + return m.DisplayName + } + return "" +} + +func (m *Model) GetDatasetId() string { + if m != nil { + return m.DatasetId + } + return "" +} + +func (m *Model) GetCreateTime() *timestamp.Timestamp { + if m != nil { + return m.CreateTime + } + return nil +} + +func (m *Model) GetUpdateTime() *timestamp.Timestamp { + if m != nil { + return m.UpdateTime + } + return nil +} + +func (m *Model) GetDeploymentState() Model_DeploymentState { + if m != nil { + return m.DeploymentState + } + return Model_DEPLOYMENT_STATE_UNSPECIFIED +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*Model) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _Model_OneofMarshaler, _Model_OneofUnmarshaler, _Model_OneofSizer, []interface{}{ + (*Model_TranslationModelMetadata)(nil), + (*Model_ImageClassificationModelMetadata)(nil), + (*Model_TextClassificationModelMetadata)(nil), + (*Model_ImageObjectDetectionModelMetadata)(nil), + (*Model_VideoClassificationModelMetadata)(nil), + (*Model_TextExtractionModelMetadata)(nil), + (*Model_TablesModelMetadata)(nil), + (*Model_TextSentimentModelMetadata)(nil), + } +} + +func _Model_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*Model) + // model_metadata + switch x := m.ModelMetadata.(type) { + case *Model_TranslationModelMetadata: + b.EncodeVarint(15<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.TranslationModelMetadata); err != nil { + return err + } + case *Model_ImageClassificationModelMetadata: + b.EncodeVarint(13<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.ImageClassificationModelMetadata); err != nil { + return err + } + case *Model_TextClassificationModelMetadata: + b.EncodeVarint(14<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.TextClassificationModelMetadata); err != nil { + return err + } + case *Model_ImageObjectDetectionModelMetadata: + b.EncodeVarint(20<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.ImageObjectDetectionModelMetadata); err != nil { + return err + } + case *Model_VideoClassificationModelMetadata: + b.EncodeVarint(23<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.VideoClassificationModelMetadata); err != nil { + return err + } + case *Model_TextExtractionModelMetadata: + b.EncodeVarint(19<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.TextExtractionModelMetadata); err != nil { + return err + } + case *Model_TablesModelMetadata: + b.EncodeVarint(24<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.TablesModelMetadata); err != nil { + return err + } + case *Model_TextSentimentModelMetadata: + b.EncodeVarint(22<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.TextSentimentModelMetadata); err != nil { + return err + } + case nil: + default: + return fmt.Errorf("Model.ModelMetadata has unexpected type %T", x) + } + return nil +} + +func _Model_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*Model) + switch tag { + case 15: // model_metadata.translation_model_metadata + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(TranslationModelMetadata) + err := b.DecodeMessage(msg) + m.ModelMetadata = &Model_TranslationModelMetadata{msg} + return true, err + case 13: // model_metadata.image_classification_model_metadata + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(ImageClassificationModelMetadata) + err := b.DecodeMessage(msg) + m.ModelMetadata = &Model_ImageClassificationModelMetadata{msg} + return true, err + case 14: // model_metadata.text_classification_model_metadata + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(TextClassificationModelMetadata) + err := b.DecodeMessage(msg) + m.ModelMetadata = &Model_TextClassificationModelMetadata{msg} + return true, err + case 20: // model_metadata.image_object_detection_model_metadata + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(ImageObjectDetectionModelMetadata) + err := b.DecodeMessage(msg) + m.ModelMetadata = &Model_ImageObjectDetectionModelMetadata{msg} + return true, err + case 23: // model_metadata.video_classification_model_metadata + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(VideoClassificationModelMetadata) + err := b.DecodeMessage(msg) + m.ModelMetadata = &Model_VideoClassificationModelMetadata{msg} + return true, err + case 19: // model_metadata.text_extraction_model_metadata + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(TextExtractionModelMetadata) + err := b.DecodeMessage(msg) + m.ModelMetadata = &Model_TextExtractionModelMetadata{msg} + return true, err + case 24: // model_metadata.tables_model_metadata + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(TablesModelMetadata) + err := b.DecodeMessage(msg) + m.ModelMetadata = &Model_TablesModelMetadata{msg} + return true, err + case 22: // model_metadata.text_sentiment_model_metadata + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(TextSentimentModelMetadata) + err := b.DecodeMessage(msg) + m.ModelMetadata = &Model_TextSentimentModelMetadata{msg} + return true, err + default: + return false, nil + } +} + +func _Model_OneofSizer(msg proto.Message) (n int) { + m := msg.(*Model) + // model_metadata + switch x := m.ModelMetadata.(type) { + case *Model_TranslationModelMetadata: + s := proto.Size(x.TranslationModelMetadata) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *Model_ImageClassificationModelMetadata: + s := proto.Size(x.ImageClassificationModelMetadata) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *Model_TextClassificationModelMetadata: + s := proto.Size(x.TextClassificationModelMetadata) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *Model_ImageObjectDetectionModelMetadata: + s := proto.Size(x.ImageObjectDetectionModelMetadata) + n += 2 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *Model_VideoClassificationModelMetadata: + s := proto.Size(x.VideoClassificationModelMetadata) + n += 2 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *Model_TextExtractionModelMetadata: + s := proto.Size(x.TextExtractionModelMetadata) + n += 2 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *Model_TablesModelMetadata: + s := proto.Size(x.TablesModelMetadata) + n += 2 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *Model_TextSentimentModelMetadata: + s := proto.Size(x.TextSentimentModelMetadata) + n += 2 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +func init() { + proto.RegisterType((*Model)(nil), "google.cloud.automl.v1beta1.Model") + proto.RegisterEnum("google.cloud.automl.v1beta1.Model_DeploymentState", Model_DeploymentState_name, Model_DeploymentState_value) +} + +func init() { + proto.RegisterFile("google/cloud/automl/v1beta1/model.proto", fileDescriptor_model_9828559028c2521f) +} + +var fileDescriptor_model_9828559028c2521f = []byte{ + // 673 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x8c, 0x95, 0x6f, 0x4f, 0xd3, 0x5e, + 0x14, 0xc7, 0x29, 0xbf, 0x9f, 0x0a, 0x77, 0x38, 0x96, 0x8b, 0x7f, 0x9a, 0x01, 0x32, 0x30, 0xea, + 0x9e, 0xd8, 0x0a, 0xc6, 0x68, 0x82, 0x9a, 0x00, 0xab, 0xba, 0xc4, 0x0d, 0xdc, 0x06, 0x89, 0x06, + 0xd3, 0xdc, 0xb5, 0x97, 0xa6, 0xa6, 0xed, 0x6d, 0xd6, 0x33, 0x02, 0x89, 0x89, 0x4f, 0x7c, 0x66, + 0xe2, 0x03, 0x5f, 0x90, 0x2f, 0xc0, 0x57, 0x65, 0x7a, 0x6e, 0x37, 0xa5, 0x6e, 0xb7, 0x3c, 0xa3, + 0xf7, 0x7c, 0xce, 0xf7, 0x7c, 0xef, 0xf7, 0xd0, 0x8e, 0x3c, 0xf0, 0x84, 0xf0, 0x02, 0x6e, 0x3a, + 0x81, 0x18, 0xba, 0x26, 0x1b, 0x82, 0x08, 0x03, 0xf3, 0x74, 0xb3, 0xcf, 0x81, 0x6d, 0x9a, 0xa1, + 0x70, 0x79, 0x60, 0xc4, 0x03, 0x01, 0x82, 0x2e, 0x4b, 0xd0, 0x40, 0xd0, 0x90, 0xa0, 0x91, 0x81, + 0xd5, 0x95, 0x4c, 0x85, 0xc5, 0xbe, 0xc9, 0xa2, 0x48, 0x00, 0x03, 0x5f, 0x44, 0x89, 0x6c, 0xad, + 0x2a, 0x67, 0xf8, 0x21, 0xf3, 0x78, 0x06, 0xd6, 0x55, 0x20, 0xb0, 0x7e, 0xc0, 0x47, 0x92, 0xf7, + 0x95, 0x24, 0x3f, 0x83, 0x8c, 0x7b, 0xa8, 0xe4, 0x06, 0x2c, 0x4a, 0x02, 0xb4, 0x7a, 0x19, 0xa7, + 0xa7, 0xbe, 0xcb, 0x45, 0x06, 0xae, 0x65, 0x20, 0x3e, 0xf5, 0x87, 0x27, 0x26, 0xf8, 0x21, 0x4f, + 0x80, 0x85, 0xb1, 0x04, 0x36, 0x7e, 0x96, 0xc8, 0x95, 0x56, 0x1a, 0x1f, 0x1d, 0x92, 0xea, 0x5f, + 0x83, 0x6c, 0xcc, 0xd4, 0x0e, 0x39, 0x30, 0x97, 0x01, 0xd3, 0x17, 0x6b, 0x5a, 0xbd, 0xb4, 0xf5, + 0xc4, 0x50, 0xa4, 0x6b, 0xf4, 0xfe, 0xb4, 0xa3, 0x64, 0x2b, 0x6b, 0x7e, 0x33, 0xd3, 0xd1, 0x61, + 0x4a, 0x8d, 0x7e, 0xd7, 0xc8, 0x5d, 0xcc, 0xd6, 0x76, 0x02, 0x96, 0x24, 0xfe, 0x89, 0xef, 0x4c, + 0x34, 0x70, 0x1d, 0x0d, 0xbc, 0x50, 0x1a, 0x68, 0xa6, 0x3a, 0x7b, 0x17, 0x64, 0xf2, 0x46, 0x6a, + 0x7e, 0x01, 0x43, 0xbf, 0x69, 0x64, 0x23, 0xdd, 0x4c, 0x81, 0x9f, 0x32, 0xfa, 0x79, 0xae, 0x0e, + 0x84, 0x9f, 0x81, 0xda, 0xce, 0x1a, 0xa8, 0x11, 0xfa, 0x43, 0x23, 0xf7, 0x64, 0x3c, 0xa2, 0xff, + 0x89, 0x3b, 0x60, 0xbb, 0x1c, 0xb8, 0x33, 0xc9, 0xd0, 0x0d, 0x34, 0xf4, 0xb2, 0x38, 0xa0, 0x7d, + 0x14, 0x6a, 0x8c, 0x74, 0xf2, 0x96, 0xd6, 0xfd, 0x22, 0x08, 0x77, 0x86, 0xff, 0x65, 0x05, 0x19, + 0xdd, 0xbe, 0xc4, 0xce, 0x8e, 0x52, 0x9d, 0x82, 0x9d, 0x9d, 0x16, 0x30, 0xf4, 0x0b, 0xb9, 0x83, + 0x2b, 0xe3, 0x67, 0x30, 0x60, 0x13, 0xd3, 0x59, 0x42, 0x2b, 0xcf, 0x0a, 0xd7, 0x65, 0x8d, 0x15, + 0xf2, 0x2e, 0x96, 0x61, 0x7a, 0x99, 0x9e, 0x90, 0x9b, 0xf2, 0xbd, 0xcf, 0xcf, 0xd5, 0x71, 0xee, + 0x23, 0xf5, 0x5c, 0xec, 0xcc, 0xcf, 0x5b, 0x82, 0x7f, 0x8f, 0xe9, 0x67, 0xb2, 0x8a, 0x17, 0x4d, + 0x78, 0x94, 0xbe, 0xc9, 0x11, 0xe4, 0xe7, 0xdd, 0xc2, 0x79, 0x4f, 0x0b, 0xef, 0xd9, 0x1d, 0x09, + 0xe4, 0xc7, 0x56, 0x61, 0x6a, 0x95, 0x52, 0xf2, 0x7f, 0xc4, 0x42, 0xae, 0x6b, 0x35, 0xad, 0x3e, + 0xdf, 0xc1, 0xbf, 0xe9, 0x3a, 0x59, 0x70, 0xfd, 0x24, 0x0e, 0xd8, 0xb9, 0x8d, 0xb5, 0x59, 0xac, + 0x95, 0xb2, 0xb3, 0x76, 0x8a, 0xac, 0x12, 0x92, 0xb6, 0x27, 0x1c, 0x6c, 0xdf, 0xd5, 0xff, 0x43, + 0x60, 0x3e, 0x3b, 0x69, 0xba, 0x74, 0x9b, 0x94, 0x9c, 0x01, 0x67, 0xc0, 0xed, 0x74, 0xa6, 0x7e, + 0x0d, 0x6f, 0x50, 0x1d, 0xdd, 0x60, 0xf4, 0xe5, 0x32, 0x7a, 0xa3, 0x2f, 0x57, 0x87, 0x48, 0x3c, + 0x3d, 0x48, 0x9b, 0x87, 0xb1, 0x3b, 0x6e, 0x2e, 0x15, 0x37, 0x4b, 0x1c, 0x9b, 0x3f, 0x92, 0x8a, + 0xcb, 0xe3, 0x40, 0x9c, 0x63, 0x92, 0x09, 0x30, 0xe0, 0xfa, 0x5c, 0x4d, 0xab, 0x97, 0xb7, 0xb6, + 0x94, 0x01, 0x62, 0x2a, 0x46, 0x63, 0xdc, 0xda, 0x4d, 0x3b, 0x3b, 0x8b, 0xee, 0xc5, 0x83, 0x8d, + 0x77, 0x64, 0x31, 0xc7, 0xd0, 0x1a, 0x59, 0x69, 0x58, 0x07, 0x6f, 0xf7, 0xdf, 0xb7, 0xac, 0x76, + 0xcf, 0xee, 0xf6, 0x76, 0x7a, 0x96, 0x7d, 0xd8, 0xee, 0x1e, 0x58, 0x7b, 0xcd, 0x57, 0x4d, 0xab, + 0x51, 0x99, 0xa1, 0x0b, 0x64, 0x4e, 0x12, 0x56, 0xa3, 0xa2, 0xd1, 0x32, 0x21, 0x87, 0xed, 0xf1, + 0xf3, 0xec, 0x6e, 0x85, 0x94, 0x2f, 0x2e, 0x7c, 0xf7, 0xab, 0x46, 0xd6, 0x1c, 0x11, 0xaa, 0xfc, + 0x1e, 0x68, 0x1f, 0x76, 0xb2, 0xb2, 0x27, 0x02, 0x16, 0x79, 0x86, 0x18, 0x78, 0xa6, 0xc7, 0x23, + 0x8c, 0xc7, 0x94, 0x25, 0x16, 0xfb, 0xc9, 0xc4, 0xdf, 0x93, 0x6d, 0xf9, 0xf8, 0x6b, 0x76, 0xf9, + 0x35, 0x82, 0xc7, 0x7b, 0x29, 0x74, 0xbc, 0x33, 0x04, 0xd1, 0x0a, 0x8e, 0x8f, 0x24, 0xd4, 0xbf, + 0x8a, 0x5a, 0x8f, 0x7f, 0x07, 0x00, 0x00, 0xff, 0xff, 0xed, 0x1f, 0xbf, 0xcc, 0xa8, 0x07, 0x00, + 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/cloud/automl/v1beta1/model_evaluation.pb.go b/vendor/google.golang.org/genproto/googleapis/cloud/automl/v1beta1/model_evaluation.pb.go new file mode 100644 index 0000000..b21dadb --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/cloud/automl/v1beta1/model_evaluation.pb.go @@ -0,0 +1,423 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/cloud/automl/v1beta1/model_evaluation.proto + +package automl // import "google.golang.org/genproto/googleapis/cloud/automl/v1beta1" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import timestamp "github.com/golang/protobuf/ptypes/timestamp" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Evaluation results of a model. +type ModelEvaluation struct { + // Output only. Problem type specific evaluation metrics. + // + // Types that are valid to be assigned to Metrics: + // *ModelEvaluation_ClassificationEvaluationMetrics + // *ModelEvaluation_RegressionEvaluationMetrics + // *ModelEvaluation_TranslationEvaluationMetrics + // *ModelEvaluation_ImageObjectDetectionEvaluationMetrics + // *ModelEvaluation_TextSentimentEvaluationMetrics + // *ModelEvaluation_TextExtractionEvaluationMetrics + Metrics isModelEvaluation_Metrics `protobuf_oneof:"metrics"` + // Output only. + // Resource name of the model evaluation. + // Format: + // + // `projects/{project_id}/locations/{location_id}/models/{model_id}/modelEvaluations/{model_evaluation_id}` + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // Output only. + // The ID of the annotation spec that the model evaluation applies to. The + // The ID is empty for the overall model evaluation. + // For Tables classification these are the distinct values of the target + // column at the moment of the evaluation; for this problem annotation specs + // in the dataset do not exist. + // NOTE: Currently there is no way to obtain the display_name of the + // annotation spec from its ID. To see the display_names, review the model + // evaluations in the UI. + AnnotationSpecId string `protobuf:"bytes,2,opt,name=annotation_spec_id,json=annotationSpecId,proto3" json:"annotation_spec_id,omitempty"` + // Output only. The value of [AnnotationSpec.display_name][google.cloud.automl.v1beta1.AnnotationSpec.display_name] when the model + // was trained. Because this field returns a value at model training time, + // for different models trained using the same dataset, the returned value + // could be different as model owner could update the display_name between + // any two model training. + // The display_name is empty for the overall model evaluation. + DisplayName string `protobuf:"bytes,15,opt,name=display_name,json=displayName,proto3" json:"display_name,omitempty"` + // Output only. + // Timestamp when this model evaluation was created. + CreateTime *timestamp.Timestamp `protobuf:"bytes,5,opt,name=create_time,json=createTime,proto3" json:"create_time,omitempty"` + // Output only. + // The number of examples used for model evaluation, i.e. for + // which ground truth from time of model creation is compared against the + // predicted annotations created by the model. + // For overall ModelEvaluation (i.e. with annotation_spec_id not set) this is + // the total number of all examples used for evaluation. + // Otherwise, this is the count of examples that according to the ground + // truth were annotated by the + // + // [annotation_spec_id][google.cloud.automl.v1beta1.ModelEvaluation.annotation_spec_id]. + EvaluatedExampleCount int32 `protobuf:"varint,6,opt,name=evaluated_example_count,json=evaluatedExampleCount,proto3" json:"evaluated_example_count,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ModelEvaluation) Reset() { *m = ModelEvaluation{} } +func (m *ModelEvaluation) String() string { return proto.CompactTextString(m) } +func (*ModelEvaluation) ProtoMessage() {} +func (*ModelEvaluation) Descriptor() ([]byte, []int) { + return fileDescriptor_model_evaluation_c883823b7cd2bdc7, []int{0} +} +func (m *ModelEvaluation) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ModelEvaluation.Unmarshal(m, b) +} +func (m *ModelEvaluation) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ModelEvaluation.Marshal(b, m, deterministic) +} +func (dst *ModelEvaluation) XXX_Merge(src proto.Message) { + xxx_messageInfo_ModelEvaluation.Merge(dst, src) +} +func (m *ModelEvaluation) XXX_Size() int { + return xxx_messageInfo_ModelEvaluation.Size(m) +} +func (m *ModelEvaluation) XXX_DiscardUnknown() { + xxx_messageInfo_ModelEvaluation.DiscardUnknown(m) +} + +var xxx_messageInfo_ModelEvaluation proto.InternalMessageInfo + +type isModelEvaluation_Metrics interface { + isModelEvaluation_Metrics() +} + +type ModelEvaluation_ClassificationEvaluationMetrics struct { + ClassificationEvaluationMetrics *ClassificationEvaluationMetrics `protobuf:"bytes,8,opt,name=classification_evaluation_metrics,json=classificationEvaluationMetrics,proto3,oneof"` +} + +type ModelEvaluation_RegressionEvaluationMetrics struct { + RegressionEvaluationMetrics *RegressionEvaluationMetrics `protobuf:"bytes,24,opt,name=regression_evaluation_metrics,json=regressionEvaluationMetrics,proto3,oneof"` +} + +type ModelEvaluation_TranslationEvaluationMetrics struct { + TranslationEvaluationMetrics *TranslationEvaluationMetrics `protobuf:"bytes,9,opt,name=translation_evaluation_metrics,json=translationEvaluationMetrics,proto3,oneof"` +} + +type ModelEvaluation_ImageObjectDetectionEvaluationMetrics struct { + ImageObjectDetectionEvaluationMetrics *ImageObjectDetectionEvaluationMetrics `protobuf:"bytes,12,opt,name=image_object_detection_evaluation_metrics,json=imageObjectDetectionEvaluationMetrics,proto3,oneof"` +} + +type ModelEvaluation_TextSentimentEvaluationMetrics struct { + TextSentimentEvaluationMetrics *TextSentimentEvaluationMetrics `protobuf:"bytes,11,opt,name=text_sentiment_evaluation_metrics,json=textSentimentEvaluationMetrics,proto3,oneof"` +} + +type ModelEvaluation_TextExtractionEvaluationMetrics struct { + TextExtractionEvaluationMetrics *TextExtractionEvaluationMetrics `protobuf:"bytes,13,opt,name=text_extraction_evaluation_metrics,json=textExtractionEvaluationMetrics,proto3,oneof"` +} + +func (*ModelEvaluation_ClassificationEvaluationMetrics) isModelEvaluation_Metrics() {} + +func (*ModelEvaluation_RegressionEvaluationMetrics) isModelEvaluation_Metrics() {} + +func (*ModelEvaluation_TranslationEvaluationMetrics) isModelEvaluation_Metrics() {} + +func (*ModelEvaluation_ImageObjectDetectionEvaluationMetrics) isModelEvaluation_Metrics() {} + +func (*ModelEvaluation_TextSentimentEvaluationMetrics) isModelEvaluation_Metrics() {} + +func (*ModelEvaluation_TextExtractionEvaluationMetrics) isModelEvaluation_Metrics() {} + +func (m *ModelEvaluation) GetMetrics() isModelEvaluation_Metrics { + if m != nil { + return m.Metrics + } + return nil +} + +func (m *ModelEvaluation) GetClassificationEvaluationMetrics() *ClassificationEvaluationMetrics { + if x, ok := m.GetMetrics().(*ModelEvaluation_ClassificationEvaluationMetrics); ok { + return x.ClassificationEvaluationMetrics + } + return nil +} + +func (m *ModelEvaluation) GetRegressionEvaluationMetrics() *RegressionEvaluationMetrics { + if x, ok := m.GetMetrics().(*ModelEvaluation_RegressionEvaluationMetrics); ok { + return x.RegressionEvaluationMetrics + } + return nil +} + +func (m *ModelEvaluation) GetTranslationEvaluationMetrics() *TranslationEvaluationMetrics { + if x, ok := m.GetMetrics().(*ModelEvaluation_TranslationEvaluationMetrics); ok { + return x.TranslationEvaluationMetrics + } + return nil +} + +func (m *ModelEvaluation) GetImageObjectDetectionEvaluationMetrics() *ImageObjectDetectionEvaluationMetrics { + if x, ok := m.GetMetrics().(*ModelEvaluation_ImageObjectDetectionEvaluationMetrics); ok { + return x.ImageObjectDetectionEvaluationMetrics + } + return nil +} + +func (m *ModelEvaluation) GetTextSentimentEvaluationMetrics() *TextSentimentEvaluationMetrics { + if x, ok := m.GetMetrics().(*ModelEvaluation_TextSentimentEvaluationMetrics); ok { + return x.TextSentimentEvaluationMetrics + } + return nil +} + +func (m *ModelEvaluation) GetTextExtractionEvaluationMetrics() *TextExtractionEvaluationMetrics { + if x, ok := m.GetMetrics().(*ModelEvaluation_TextExtractionEvaluationMetrics); ok { + return x.TextExtractionEvaluationMetrics + } + return nil +} + +func (m *ModelEvaluation) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *ModelEvaluation) GetAnnotationSpecId() string { + if m != nil { + return m.AnnotationSpecId + } + return "" +} + +func (m *ModelEvaluation) GetDisplayName() string { + if m != nil { + return m.DisplayName + } + return "" +} + +func (m *ModelEvaluation) GetCreateTime() *timestamp.Timestamp { + if m != nil { + return m.CreateTime + } + return nil +} + +func (m *ModelEvaluation) GetEvaluatedExampleCount() int32 { + if m != nil { + return m.EvaluatedExampleCount + } + return 0 +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*ModelEvaluation) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _ModelEvaluation_OneofMarshaler, _ModelEvaluation_OneofUnmarshaler, _ModelEvaluation_OneofSizer, []interface{}{ + (*ModelEvaluation_ClassificationEvaluationMetrics)(nil), + (*ModelEvaluation_RegressionEvaluationMetrics)(nil), + (*ModelEvaluation_TranslationEvaluationMetrics)(nil), + (*ModelEvaluation_ImageObjectDetectionEvaluationMetrics)(nil), + (*ModelEvaluation_TextSentimentEvaluationMetrics)(nil), + (*ModelEvaluation_TextExtractionEvaluationMetrics)(nil), + } +} + +func _ModelEvaluation_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*ModelEvaluation) + // metrics + switch x := m.Metrics.(type) { + case *ModelEvaluation_ClassificationEvaluationMetrics: + b.EncodeVarint(8<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.ClassificationEvaluationMetrics); err != nil { + return err + } + case *ModelEvaluation_RegressionEvaluationMetrics: + b.EncodeVarint(24<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.RegressionEvaluationMetrics); err != nil { + return err + } + case *ModelEvaluation_TranslationEvaluationMetrics: + b.EncodeVarint(9<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.TranslationEvaluationMetrics); err != nil { + return err + } + case *ModelEvaluation_ImageObjectDetectionEvaluationMetrics: + b.EncodeVarint(12<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.ImageObjectDetectionEvaluationMetrics); err != nil { + return err + } + case *ModelEvaluation_TextSentimentEvaluationMetrics: + b.EncodeVarint(11<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.TextSentimentEvaluationMetrics); err != nil { + return err + } + case *ModelEvaluation_TextExtractionEvaluationMetrics: + b.EncodeVarint(13<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.TextExtractionEvaluationMetrics); err != nil { + return err + } + case nil: + default: + return fmt.Errorf("ModelEvaluation.Metrics has unexpected type %T", x) + } + return nil +} + +func _ModelEvaluation_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*ModelEvaluation) + switch tag { + case 8: // metrics.classification_evaluation_metrics + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(ClassificationEvaluationMetrics) + err := b.DecodeMessage(msg) + m.Metrics = &ModelEvaluation_ClassificationEvaluationMetrics{msg} + return true, err + case 24: // metrics.regression_evaluation_metrics + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(RegressionEvaluationMetrics) + err := b.DecodeMessage(msg) + m.Metrics = &ModelEvaluation_RegressionEvaluationMetrics{msg} + return true, err + case 9: // metrics.translation_evaluation_metrics + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(TranslationEvaluationMetrics) + err := b.DecodeMessage(msg) + m.Metrics = &ModelEvaluation_TranslationEvaluationMetrics{msg} + return true, err + case 12: // metrics.image_object_detection_evaluation_metrics + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(ImageObjectDetectionEvaluationMetrics) + err := b.DecodeMessage(msg) + m.Metrics = &ModelEvaluation_ImageObjectDetectionEvaluationMetrics{msg} + return true, err + case 11: // metrics.text_sentiment_evaluation_metrics + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(TextSentimentEvaluationMetrics) + err := b.DecodeMessage(msg) + m.Metrics = &ModelEvaluation_TextSentimentEvaluationMetrics{msg} + return true, err + case 13: // metrics.text_extraction_evaluation_metrics + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(TextExtractionEvaluationMetrics) + err := b.DecodeMessage(msg) + m.Metrics = &ModelEvaluation_TextExtractionEvaluationMetrics{msg} + return true, err + default: + return false, nil + } +} + +func _ModelEvaluation_OneofSizer(msg proto.Message) (n int) { + m := msg.(*ModelEvaluation) + // metrics + switch x := m.Metrics.(type) { + case *ModelEvaluation_ClassificationEvaluationMetrics: + s := proto.Size(x.ClassificationEvaluationMetrics) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *ModelEvaluation_RegressionEvaluationMetrics: + s := proto.Size(x.RegressionEvaluationMetrics) + n += 2 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *ModelEvaluation_TranslationEvaluationMetrics: + s := proto.Size(x.TranslationEvaluationMetrics) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *ModelEvaluation_ImageObjectDetectionEvaluationMetrics: + s := proto.Size(x.ImageObjectDetectionEvaluationMetrics) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *ModelEvaluation_TextSentimentEvaluationMetrics: + s := proto.Size(x.TextSentimentEvaluationMetrics) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *ModelEvaluation_TextExtractionEvaluationMetrics: + s := proto.Size(x.TextExtractionEvaluationMetrics) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +func init() { + proto.RegisterType((*ModelEvaluation)(nil), "google.cloud.automl.v1beta1.ModelEvaluation") +} + +func init() { + proto.RegisterFile("google/cloud/automl/v1beta1/model_evaluation.proto", fileDescriptor_model_evaluation_c883823b7cd2bdc7) +} + +var fileDescriptor_model_evaluation_c883823b7cd2bdc7 = []byte{ + // 595 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x8c, 0x94, 0x41, 0x6b, 0xd4, 0x40, + 0x14, 0xc7, 0x4d, 0xb1, 0xb5, 0x9d, 0xad, 0x54, 0x06, 0xc4, 0xb0, 0xad, 0xed, 0x6e, 0x41, 0x58, + 0xb1, 0x26, 0xdd, 0x0a, 0xa2, 0xac, 0x97, 0xee, 0xba, 0x68, 0x0f, 0xab, 0xb2, 0x2d, 0x1e, 0x64, + 0x21, 0xcc, 0x4e, 0x5e, 0x43, 0x64, 0x92, 0x09, 0x99, 0x49, 0x59, 0x2f, 0x82, 0xe0, 0x41, 0xf4, + 0x3b, 0xf8, 0x5d, 0xbc, 0xfa, 0xa9, 0x24, 0x93, 0x64, 0x93, 0x95, 0x71, 0xec, 0x2d, 0xc9, 0xfb, + 0xff, 0xff, 0xf3, 0xcb, 0x7b, 0x33, 0x83, 0x4e, 0x02, 0xce, 0x03, 0x06, 0x2e, 0x65, 0x3c, 0xf3, + 0x5d, 0x92, 0x49, 0x1e, 0x31, 0xf7, 0xaa, 0x3f, 0x07, 0x49, 0xfa, 0x6e, 0xc4, 0x7d, 0x60, 0x1e, + 0x5c, 0x11, 0x96, 0x11, 0x19, 0xf2, 0xd8, 0x49, 0x52, 0x2e, 0x39, 0xde, 0x2d, 0x3c, 0x8e, 0xf2, + 0x38, 0x85, 0xc7, 0x29, 0x3d, 0xed, 0xbd, 0x32, 0x90, 0x24, 0xa1, 0x4b, 0xe2, 0x98, 0x4b, 0xe5, + 0x14, 0x85, 0xb5, 0x7d, 0x6c, 0x5a, 0x8e, 0x32, 0x22, 0x44, 0x78, 0x19, 0xd2, 0xc6, 0x62, 0xed, + 0x47, 0x26, 0x87, 0x0f, 0x12, 0x68, 0x43, 0x7c, 0x64, 0x12, 0xa7, 0x10, 0xa4, 0x20, 0x44, 0xad, + 0xee, 0x99, 0xd4, 0x92, 0xcc, 0x19, 0x54, 0xd8, 0x7d, 0xa3, 0x12, 0x16, 0xd2, 0x83, 0x85, 0x4c, + 0x49, 0x13, 0xe5, 0xf8, 0xbf, 0x16, 0x01, 0xb1, 0x0c, 0x23, 0x88, 0x65, 0xe9, 0x78, 0x6c, 0x74, + 0xa4, 0x24, 0x16, 0xac, 0xd9, 0x98, 0x83, 0x52, 0xae, 0xde, 0xe6, 0xd9, 0xa5, 0x9b, 0x87, 0x09, + 0x49, 0xa2, 0xa4, 0x10, 0x1c, 0xfe, 0xda, 0x44, 0x3b, 0x93, 0x7c, 0x82, 0xe3, 0xe5, 0x00, 0xf1, + 0x77, 0x0b, 0x75, 0x57, 0xdb, 0xdc, 0x18, 0xaf, 0x17, 0x81, 0x4c, 0x43, 0x2a, 0xec, 0xcd, 0x8e, + 0xd5, 0x6b, 0x9d, 0xbc, 0x70, 0x0c, 0x73, 0x76, 0x46, 0x2b, 0x29, 0xf5, 0x12, 0x93, 0x22, 0xe3, + 0xf5, 0x8d, 0xe9, 0x01, 0x35, 0x4b, 0xf0, 0x67, 0x74, 0xbf, 0x9e, 0x89, 0x8e, 0xc3, 0x56, 0x1c, + 0xcf, 0x8c, 0x1c, 0xd3, 0x65, 0x82, 0x8e, 0x61, 0x37, 0xfd, 0x77, 0x19, 0x7f, 0xb1, 0xd0, 0x7e, + 0xa3, 0xaf, 0x3a, 0x82, 0x2d, 0x45, 0xf0, 0xdc, 0x48, 0x70, 0x51, 0x47, 0xe8, 0x10, 0xf6, 0xa4, + 0xa1, 0x8e, 0x7f, 0x5a, 0xe8, 0x61, 0x18, 0x91, 0x00, 0x3c, 0x3e, 0xff, 0x08, 0x54, 0x7a, 0xcb, + 0x2d, 0xad, 0xc3, 0xd9, 0x56, 0x38, 0x43, 0x23, 0xce, 0x59, 0x9e, 0xf6, 0x56, 0x85, 0xbd, 0xac, + 0xb2, 0x74, 0x5c, 0x0f, 0xc2, 0xeb, 0x08, 0xf1, 0x37, 0x0b, 0x75, 0x57, 0xb7, 0xab, 0x0e, 0xac, + 0xa5, 0xc0, 0x06, 0xe6, 0x3e, 0xc1, 0x42, 0x9e, 0x57, 0x21, 0x3a, 0xa2, 0x7d, 0x69, 0x54, 0xe0, + 0x1f, 0x16, 0x3a, 0xfc, 0xeb, 0xb0, 0xe9, 0x58, 0x6e, 0x5f, 0x63, 0xf7, 0xe6, 0x2c, 0xe3, 0x65, + 0x8a, 0x76, 0xf7, 0x4a, 0xb3, 0x04, 0x63, 0x74, 0x33, 0x26, 0x11, 0xd8, 0x56, 0xc7, 0xea, 0x6d, + 0x4d, 0xd5, 0x33, 0x3e, 0x42, 0xb8, 0xbe, 0xf3, 0x3c, 0x91, 0x00, 0xf5, 0x42, 0xdf, 0x5e, 0x53, + 0x8a, 0x3b, 0x75, 0xe5, 0x3c, 0x01, 0x7a, 0xe6, 0xe3, 0x2e, 0xda, 0xf6, 0x43, 0x91, 0x30, 0xf2, + 0xc9, 0x53, 0x49, 0x3b, 0x4a, 0xd7, 0x2a, 0xbf, 0xbd, 0xc9, 0x03, 0x07, 0xa8, 0x45, 0x53, 0x20, + 0x12, 0xbc, 0xbc, 0x29, 0xf6, 0xba, 0xfa, 0xb5, 0x76, 0xf5, 0x6b, 0xd5, 0xd1, 0x77, 0x2e, 0xaa, + 0xa3, 0x3f, 0x45, 0x85, 0x3c, 0xff, 0x80, 0x9f, 0xa2, 0x7b, 0x65, 0x7b, 0xc0, 0xf7, 0x60, 0x41, + 0xa2, 0x84, 0x81, 0x47, 0x79, 0x16, 0x4b, 0x7b, 0xa3, 0x63, 0xf5, 0xd6, 0xa7, 0x77, 0x97, 0xe5, + 0x71, 0x51, 0x1d, 0xe5, 0xc5, 0xe1, 0x16, 0xba, 0x55, 0xf6, 0x72, 0xf8, 0xd5, 0x42, 0x07, 0x94, + 0x47, 0xa6, 0x5e, 0xbe, 0xb3, 0x3e, 0x9c, 0x96, 0xe5, 0x80, 0x33, 0x12, 0x07, 0x0e, 0x4f, 0x03, + 0x37, 0x80, 0x58, 0xd1, 0xb9, 0x45, 0x89, 0x24, 0xa1, 0xd0, 0x5e, 0x6c, 0x83, 0xe2, 0xf5, 0xf7, + 0xda, 0xee, 0x2b, 0x25, 0x9c, 0x8d, 0x72, 0xd1, 0xec, 0x34, 0x93, 0x7c, 0xc2, 0x66, 0xef, 0x0b, + 0xd1, 0x7c, 0x43, 0x65, 0x3d, 0xf9, 0x13, 0x00, 0x00, 0xff, 0xff, 0x08, 0xb8, 0xf3, 0x9a, 0xae, + 0x06, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/cloud/automl/v1beta1/operations.pb.go b/vendor/google.golang.org/genproto/googleapis/cloud/automl/v1beta1/operations.pb.go new file mode 100644 index 0000000..5706a90 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/cloud/automl/v1beta1/operations.pb.go @@ -0,0 +1,1246 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/cloud/automl/v1beta1/operations.proto + +package automl // import "google.golang.org/genproto/googleapis/cloud/automl/v1beta1" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "github.com/golang/protobuf/ptypes/empty" +import timestamp "github.com/golang/protobuf/ptypes/timestamp" +import _ "google.golang.org/genproto/googleapis/api/annotations" +import status "google.golang.org/genproto/googleapis/rpc/status" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Metadata used across all long running operations returned by AutoML API. +type OperationMetadata struct { + // Ouptut only. Details of specific operation. Even if this field is empty, + // the presence allows to distinguish different types of operations. + // + // Types that are valid to be assigned to Details: + // *OperationMetadata_DeleteDetails + // *OperationMetadata_DeployModelDetails + // *OperationMetadata_UndeployModelDetails + // *OperationMetadata_CreateModelDetails + // *OperationMetadata_ImportDataDetails + // *OperationMetadata_BatchPredictDetails + // *OperationMetadata_ExportDataDetails + // *OperationMetadata_ExportModelDetails + // *OperationMetadata_ExportEvaluatedExamplesDetails + Details isOperationMetadata_Details `protobuf_oneof:"details"` + // Output only. Progress of operation. Range: [0, 100]. + // Not used currently. + ProgressPercent int32 `protobuf:"varint,13,opt,name=progress_percent,json=progressPercent,proto3" json:"progress_percent,omitempty"` + // Output only. Partial failures encountered. + // E.g. single files that couldn't be read. + // This field should never exceed 20 entries. + // Status details field will contain standard GCP error details. + PartialFailures []*status.Status `protobuf:"bytes,2,rep,name=partial_failures,json=partialFailures,proto3" json:"partial_failures,omitempty"` + // Output only. Time when the operation was created. + CreateTime *timestamp.Timestamp `protobuf:"bytes,3,opt,name=create_time,json=createTime,proto3" json:"create_time,omitempty"` + // Output only. Time when the operation was updated for the last time. + UpdateTime *timestamp.Timestamp `protobuf:"bytes,4,opt,name=update_time,json=updateTime,proto3" json:"update_time,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *OperationMetadata) Reset() { *m = OperationMetadata{} } +func (m *OperationMetadata) String() string { return proto.CompactTextString(m) } +func (*OperationMetadata) ProtoMessage() {} +func (*OperationMetadata) Descriptor() ([]byte, []int) { + return fileDescriptor_operations_3312906655c3448c, []int{0} +} +func (m *OperationMetadata) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_OperationMetadata.Unmarshal(m, b) +} +func (m *OperationMetadata) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_OperationMetadata.Marshal(b, m, deterministic) +} +func (dst *OperationMetadata) XXX_Merge(src proto.Message) { + xxx_messageInfo_OperationMetadata.Merge(dst, src) +} +func (m *OperationMetadata) XXX_Size() int { + return xxx_messageInfo_OperationMetadata.Size(m) +} +func (m *OperationMetadata) XXX_DiscardUnknown() { + xxx_messageInfo_OperationMetadata.DiscardUnknown(m) +} + +var xxx_messageInfo_OperationMetadata proto.InternalMessageInfo + +type isOperationMetadata_Details interface { + isOperationMetadata_Details() +} + +type OperationMetadata_DeleteDetails struct { + DeleteDetails *DeleteOperationMetadata `protobuf:"bytes,8,opt,name=delete_details,json=deleteDetails,proto3,oneof"` +} + +type OperationMetadata_DeployModelDetails struct { + DeployModelDetails *DeployModelOperationMetadata `protobuf:"bytes,24,opt,name=deploy_model_details,json=deployModelDetails,proto3,oneof"` +} + +type OperationMetadata_UndeployModelDetails struct { + UndeployModelDetails *UndeployModelOperationMetadata `protobuf:"bytes,25,opt,name=undeploy_model_details,json=undeployModelDetails,proto3,oneof"` +} + +type OperationMetadata_CreateModelDetails struct { + CreateModelDetails *CreateModelOperationMetadata `protobuf:"bytes,10,opt,name=create_model_details,json=createModelDetails,proto3,oneof"` +} + +type OperationMetadata_ImportDataDetails struct { + ImportDataDetails *ImportDataOperationMetadata `protobuf:"bytes,15,opt,name=import_data_details,json=importDataDetails,proto3,oneof"` +} + +type OperationMetadata_BatchPredictDetails struct { + BatchPredictDetails *BatchPredictOperationMetadata `protobuf:"bytes,16,opt,name=batch_predict_details,json=batchPredictDetails,proto3,oneof"` +} + +type OperationMetadata_ExportDataDetails struct { + ExportDataDetails *ExportDataOperationMetadata `protobuf:"bytes,21,opt,name=export_data_details,json=exportDataDetails,proto3,oneof"` +} + +type OperationMetadata_ExportModelDetails struct { + ExportModelDetails *ExportModelOperationMetadata `protobuf:"bytes,22,opt,name=export_model_details,json=exportModelDetails,proto3,oneof"` +} + +type OperationMetadata_ExportEvaluatedExamplesDetails struct { + ExportEvaluatedExamplesDetails *ExportEvaluatedExamplesOperationMetadata `protobuf:"bytes,26,opt,name=export_evaluated_examples_details,json=exportEvaluatedExamplesDetails,proto3,oneof"` +} + +func (*OperationMetadata_DeleteDetails) isOperationMetadata_Details() {} + +func (*OperationMetadata_DeployModelDetails) isOperationMetadata_Details() {} + +func (*OperationMetadata_UndeployModelDetails) isOperationMetadata_Details() {} + +func (*OperationMetadata_CreateModelDetails) isOperationMetadata_Details() {} + +func (*OperationMetadata_ImportDataDetails) isOperationMetadata_Details() {} + +func (*OperationMetadata_BatchPredictDetails) isOperationMetadata_Details() {} + +func (*OperationMetadata_ExportDataDetails) isOperationMetadata_Details() {} + +func (*OperationMetadata_ExportModelDetails) isOperationMetadata_Details() {} + +func (*OperationMetadata_ExportEvaluatedExamplesDetails) isOperationMetadata_Details() {} + +func (m *OperationMetadata) GetDetails() isOperationMetadata_Details { + if m != nil { + return m.Details + } + return nil +} + +func (m *OperationMetadata) GetDeleteDetails() *DeleteOperationMetadata { + if x, ok := m.GetDetails().(*OperationMetadata_DeleteDetails); ok { + return x.DeleteDetails + } + return nil +} + +func (m *OperationMetadata) GetDeployModelDetails() *DeployModelOperationMetadata { + if x, ok := m.GetDetails().(*OperationMetadata_DeployModelDetails); ok { + return x.DeployModelDetails + } + return nil +} + +func (m *OperationMetadata) GetUndeployModelDetails() *UndeployModelOperationMetadata { + if x, ok := m.GetDetails().(*OperationMetadata_UndeployModelDetails); ok { + return x.UndeployModelDetails + } + return nil +} + +func (m *OperationMetadata) GetCreateModelDetails() *CreateModelOperationMetadata { + if x, ok := m.GetDetails().(*OperationMetadata_CreateModelDetails); ok { + return x.CreateModelDetails + } + return nil +} + +func (m *OperationMetadata) GetImportDataDetails() *ImportDataOperationMetadata { + if x, ok := m.GetDetails().(*OperationMetadata_ImportDataDetails); ok { + return x.ImportDataDetails + } + return nil +} + +func (m *OperationMetadata) GetBatchPredictDetails() *BatchPredictOperationMetadata { + if x, ok := m.GetDetails().(*OperationMetadata_BatchPredictDetails); ok { + return x.BatchPredictDetails + } + return nil +} + +func (m *OperationMetadata) GetExportDataDetails() *ExportDataOperationMetadata { + if x, ok := m.GetDetails().(*OperationMetadata_ExportDataDetails); ok { + return x.ExportDataDetails + } + return nil +} + +func (m *OperationMetadata) GetExportModelDetails() *ExportModelOperationMetadata { + if x, ok := m.GetDetails().(*OperationMetadata_ExportModelDetails); ok { + return x.ExportModelDetails + } + return nil +} + +func (m *OperationMetadata) GetExportEvaluatedExamplesDetails() *ExportEvaluatedExamplesOperationMetadata { + if x, ok := m.GetDetails().(*OperationMetadata_ExportEvaluatedExamplesDetails); ok { + return x.ExportEvaluatedExamplesDetails + } + return nil +} + +func (m *OperationMetadata) GetProgressPercent() int32 { + if m != nil { + return m.ProgressPercent + } + return 0 +} + +func (m *OperationMetadata) GetPartialFailures() []*status.Status { + if m != nil { + return m.PartialFailures + } + return nil +} + +func (m *OperationMetadata) GetCreateTime() *timestamp.Timestamp { + if m != nil { + return m.CreateTime + } + return nil +} + +func (m *OperationMetadata) GetUpdateTime() *timestamp.Timestamp { + if m != nil { + return m.UpdateTime + } + return nil +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*OperationMetadata) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _OperationMetadata_OneofMarshaler, _OperationMetadata_OneofUnmarshaler, _OperationMetadata_OneofSizer, []interface{}{ + (*OperationMetadata_DeleteDetails)(nil), + (*OperationMetadata_DeployModelDetails)(nil), + (*OperationMetadata_UndeployModelDetails)(nil), + (*OperationMetadata_CreateModelDetails)(nil), + (*OperationMetadata_ImportDataDetails)(nil), + (*OperationMetadata_BatchPredictDetails)(nil), + (*OperationMetadata_ExportDataDetails)(nil), + (*OperationMetadata_ExportModelDetails)(nil), + (*OperationMetadata_ExportEvaluatedExamplesDetails)(nil), + } +} + +func _OperationMetadata_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*OperationMetadata) + // details + switch x := m.Details.(type) { + case *OperationMetadata_DeleteDetails: + b.EncodeVarint(8<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.DeleteDetails); err != nil { + return err + } + case *OperationMetadata_DeployModelDetails: + b.EncodeVarint(24<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.DeployModelDetails); err != nil { + return err + } + case *OperationMetadata_UndeployModelDetails: + b.EncodeVarint(25<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.UndeployModelDetails); err != nil { + return err + } + case *OperationMetadata_CreateModelDetails: + b.EncodeVarint(10<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.CreateModelDetails); err != nil { + return err + } + case *OperationMetadata_ImportDataDetails: + b.EncodeVarint(15<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.ImportDataDetails); err != nil { + return err + } + case *OperationMetadata_BatchPredictDetails: + b.EncodeVarint(16<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.BatchPredictDetails); err != nil { + return err + } + case *OperationMetadata_ExportDataDetails: + b.EncodeVarint(21<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.ExportDataDetails); err != nil { + return err + } + case *OperationMetadata_ExportModelDetails: + b.EncodeVarint(22<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.ExportModelDetails); err != nil { + return err + } + case *OperationMetadata_ExportEvaluatedExamplesDetails: + b.EncodeVarint(26<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.ExportEvaluatedExamplesDetails); err != nil { + return err + } + case nil: + default: + return fmt.Errorf("OperationMetadata.Details has unexpected type %T", x) + } + return nil +} + +func _OperationMetadata_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*OperationMetadata) + switch tag { + case 8: // details.delete_details + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(DeleteOperationMetadata) + err := b.DecodeMessage(msg) + m.Details = &OperationMetadata_DeleteDetails{msg} + return true, err + case 24: // details.deploy_model_details + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(DeployModelOperationMetadata) + err := b.DecodeMessage(msg) + m.Details = &OperationMetadata_DeployModelDetails{msg} + return true, err + case 25: // details.undeploy_model_details + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(UndeployModelOperationMetadata) + err := b.DecodeMessage(msg) + m.Details = &OperationMetadata_UndeployModelDetails{msg} + return true, err + case 10: // details.create_model_details + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(CreateModelOperationMetadata) + err := b.DecodeMessage(msg) + m.Details = &OperationMetadata_CreateModelDetails{msg} + return true, err + case 15: // details.import_data_details + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(ImportDataOperationMetadata) + err := b.DecodeMessage(msg) + m.Details = &OperationMetadata_ImportDataDetails{msg} + return true, err + case 16: // details.batch_predict_details + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(BatchPredictOperationMetadata) + err := b.DecodeMessage(msg) + m.Details = &OperationMetadata_BatchPredictDetails{msg} + return true, err + case 21: // details.export_data_details + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(ExportDataOperationMetadata) + err := b.DecodeMessage(msg) + m.Details = &OperationMetadata_ExportDataDetails{msg} + return true, err + case 22: // details.export_model_details + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(ExportModelOperationMetadata) + err := b.DecodeMessage(msg) + m.Details = &OperationMetadata_ExportModelDetails{msg} + return true, err + case 26: // details.export_evaluated_examples_details + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(ExportEvaluatedExamplesOperationMetadata) + err := b.DecodeMessage(msg) + m.Details = &OperationMetadata_ExportEvaluatedExamplesDetails{msg} + return true, err + default: + return false, nil + } +} + +func _OperationMetadata_OneofSizer(msg proto.Message) (n int) { + m := msg.(*OperationMetadata) + // details + switch x := m.Details.(type) { + case *OperationMetadata_DeleteDetails: + s := proto.Size(x.DeleteDetails) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *OperationMetadata_DeployModelDetails: + s := proto.Size(x.DeployModelDetails) + n += 2 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *OperationMetadata_UndeployModelDetails: + s := proto.Size(x.UndeployModelDetails) + n += 2 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *OperationMetadata_CreateModelDetails: + s := proto.Size(x.CreateModelDetails) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *OperationMetadata_ImportDataDetails: + s := proto.Size(x.ImportDataDetails) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *OperationMetadata_BatchPredictDetails: + s := proto.Size(x.BatchPredictDetails) + n += 2 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *OperationMetadata_ExportDataDetails: + s := proto.Size(x.ExportDataDetails) + n += 2 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *OperationMetadata_ExportModelDetails: + s := proto.Size(x.ExportModelDetails) + n += 2 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *OperationMetadata_ExportEvaluatedExamplesDetails: + s := proto.Size(x.ExportEvaluatedExamplesDetails) + n += 2 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +// Details of operations that perform deletes of any entities. +type DeleteOperationMetadata struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *DeleteOperationMetadata) Reset() { *m = DeleteOperationMetadata{} } +func (m *DeleteOperationMetadata) String() string { return proto.CompactTextString(m) } +func (*DeleteOperationMetadata) ProtoMessage() {} +func (*DeleteOperationMetadata) Descriptor() ([]byte, []int) { + return fileDescriptor_operations_3312906655c3448c, []int{1} +} +func (m *DeleteOperationMetadata) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_DeleteOperationMetadata.Unmarshal(m, b) +} +func (m *DeleteOperationMetadata) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_DeleteOperationMetadata.Marshal(b, m, deterministic) +} +func (dst *DeleteOperationMetadata) XXX_Merge(src proto.Message) { + xxx_messageInfo_DeleteOperationMetadata.Merge(dst, src) +} +func (m *DeleteOperationMetadata) XXX_Size() int { + return xxx_messageInfo_DeleteOperationMetadata.Size(m) +} +func (m *DeleteOperationMetadata) XXX_DiscardUnknown() { + xxx_messageInfo_DeleteOperationMetadata.DiscardUnknown(m) +} + +var xxx_messageInfo_DeleteOperationMetadata proto.InternalMessageInfo + +// Details of DeployModel operation. +type DeployModelOperationMetadata struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *DeployModelOperationMetadata) Reset() { *m = DeployModelOperationMetadata{} } +func (m *DeployModelOperationMetadata) String() string { return proto.CompactTextString(m) } +func (*DeployModelOperationMetadata) ProtoMessage() {} +func (*DeployModelOperationMetadata) Descriptor() ([]byte, []int) { + return fileDescriptor_operations_3312906655c3448c, []int{2} +} +func (m *DeployModelOperationMetadata) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_DeployModelOperationMetadata.Unmarshal(m, b) +} +func (m *DeployModelOperationMetadata) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_DeployModelOperationMetadata.Marshal(b, m, deterministic) +} +func (dst *DeployModelOperationMetadata) XXX_Merge(src proto.Message) { + xxx_messageInfo_DeployModelOperationMetadata.Merge(dst, src) +} +func (m *DeployModelOperationMetadata) XXX_Size() int { + return xxx_messageInfo_DeployModelOperationMetadata.Size(m) +} +func (m *DeployModelOperationMetadata) XXX_DiscardUnknown() { + xxx_messageInfo_DeployModelOperationMetadata.DiscardUnknown(m) +} + +var xxx_messageInfo_DeployModelOperationMetadata proto.InternalMessageInfo + +// Details of UndeployModel operation. +type UndeployModelOperationMetadata struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *UndeployModelOperationMetadata) Reset() { *m = UndeployModelOperationMetadata{} } +func (m *UndeployModelOperationMetadata) String() string { return proto.CompactTextString(m) } +func (*UndeployModelOperationMetadata) ProtoMessage() {} +func (*UndeployModelOperationMetadata) Descriptor() ([]byte, []int) { + return fileDescriptor_operations_3312906655c3448c, []int{3} +} +func (m *UndeployModelOperationMetadata) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_UndeployModelOperationMetadata.Unmarshal(m, b) +} +func (m *UndeployModelOperationMetadata) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_UndeployModelOperationMetadata.Marshal(b, m, deterministic) +} +func (dst *UndeployModelOperationMetadata) XXX_Merge(src proto.Message) { + xxx_messageInfo_UndeployModelOperationMetadata.Merge(dst, src) +} +func (m *UndeployModelOperationMetadata) XXX_Size() int { + return xxx_messageInfo_UndeployModelOperationMetadata.Size(m) +} +func (m *UndeployModelOperationMetadata) XXX_DiscardUnknown() { + xxx_messageInfo_UndeployModelOperationMetadata.DiscardUnknown(m) +} + +var xxx_messageInfo_UndeployModelOperationMetadata proto.InternalMessageInfo + +// Details of CreateModel operation. +type CreateModelOperationMetadata struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *CreateModelOperationMetadata) Reset() { *m = CreateModelOperationMetadata{} } +func (m *CreateModelOperationMetadata) String() string { return proto.CompactTextString(m) } +func (*CreateModelOperationMetadata) ProtoMessage() {} +func (*CreateModelOperationMetadata) Descriptor() ([]byte, []int) { + return fileDescriptor_operations_3312906655c3448c, []int{4} +} +func (m *CreateModelOperationMetadata) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_CreateModelOperationMetadata.Unmarshal(m, b) +} +func (m *CreateModelOperationMetadata) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_CreateModelOperationMetadata.Marshal(b, m, deterministic) +} +func (dst *CreateModelOperationMetadata) XXX_Merge(src proto.Message) { + xxx_messageInfo_CreateModelOperationMetadata.Merge(dst, src) +} +func (m *CreateModelOperationMetadata) XXX_Size() int { + return xxx_messageInfo_CreateModelOperationMetadata.Size(m) +} +func (m *CreateModelOperationMetadata) XXX_DiscardUnknown() { + xxx_messageInfo_CreateModelOperationMetadata.DiscardUnknown(m) +} + +var xxx_messageInfo_CreateModelOperationMetadata proto.InternalMessageInfo + +// Details of ImportData operation. +type ImportDataOperationMetadata struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ImportDataOperationMetadata) Reset() { *m = ImportDataOperationMetadata{} } +func (m *ImportDataOperationMetadata) String() string { return proto.CompactTextString(m) } +func (*ImportDataOperationMetadata) ProtoMessage() {} +func (*ImportDataOperationMetadata) Descriptor() ([]byte, []int) { + return fileDescriptor_operations_3312906655c3448c, []int{5} +} +func (m *ImportDataOperationMetadata) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ImportDataOperationMetadata.Unmarshal(m, b) +} +func (m *ImportDataOperationMetadata) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ImportDataOperationMetadata.Marshal(b, m, deterministic) +} +func (dst *ImportDataOperationMetadata) XXX_Merge(src proto.Message) { + xxx_messageInfo_ImportDataOperationMetadata.Merge(dst, src) +} +func (m *ImportDataOperationMetadata) XXX_Size() int { + return xxx_messageInfo_ImportDataOperationMetadata.Size(m) +} +func (m *ImportDataOperationMetadata) XXX_DiscardUnknown() { + xxx_messageInfo_ImportDataOperationMetadata.DiscardUnknown(m) +} + +var xxx_messageInfo_ImportDataOperationMetadata proto.InternalMessageInfo + +// Details of ExportData operation. +type ExportDataOperationMetadata struct { + // Output only. Information further describing this export data's output. + OutputInfo *ExportDataOperationMetadata_ExportDataOutputInfo `protobuf:"bytes,1,opt,name=output_info,json=outputInfo,proto3" json:"output_info,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ExportDataOperationMetadata) Reset() { *m = ExportDataOperationMetadata{} } +func (m *ExportDataOperationMetadata) String() string { return proto.CompactTextString(m) } +func (*ExportDataOperationMetadata) ProtoMessage() {} +func (*ExportDataOperationMetadata) Descriptor() ([]byte, []int) { + return fileDescriptor_operations_3312906655c3448c, []int{6} +} +func (m *ExportDataOperationMetadata) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ExportDataOperationMetadata.Unmarshal(m, b) +} +func (m *ExportDataOperationMetadata) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ExportDataOperationMetadata.Marshal(b, m, deterministic) +} +func (dst *ExportDataOperationMetadata) XXX_Merge(src proto.Message) { + xxx_messageInfo_ExportDataOperationMetadata.Merge(dst, src) +} +func (m *ExportDataOperationMetadata) XXX_Size() int { + return xxx_messageInfo_ExportDataOperationMetadata.Size(m) +} +func (m *ExportDataOperationMetadata) XXX_DiscardUnknown() { + xxx_messageInfo_ExportDataOperationMetadata.DiscardUnknown(m) +} + +var xxx_messageInfo_ExportDataOperationMetadata proto.InternalMessageInfo + +func (m *ExportDataOperationMetadata) GetOutputInfo() *ExportDataOperationMetadata_ExportDataOutputInfo { + if m != nil { + return m.OutputInfo + } + return nil +} + +// Further describes this export data's output. +// Supplements +// [OutputConfig][google.cloud.automl.v1beta1.OutputConfig]. +type ExportDataOperationMetadata_ExportDataOutputInfo struct { + // The output location to which the exported data is written. + // + // Types that are valid to be assigned to OutputLocation: + // *ExportDataOperationMetadata_ExportDataOutputInfo_GcsOutputDirectory + // *ExportDataOperationMetadata_ExportDataOutputInfo_BigqueryOutputDataset + OutputLocation isExportDataOperationMetadata_ExportDataOutputInfo_OutputLocation `protobuf_oneof:"output_location"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ExportDataOperationMetadata_ExportDataOutputInfo) Reset() { + *m = ExportDataOperationMetadata_ExportDataOutputInfo{} +} +func (m *ExportDataOperationMetadata_ExportDataOutputInfo) String() string { + return proto.CompactTextString(m) +} +func (*ExportDataOperationMetadata_ExportDataOutputInfo) ProtoMessage() {} +func (*ExportDataOperationMetadata_ExportDataOutputInfo) Descriptor() ([]byte, []int) { + return fileDescriptor_operations_3312906655c3448c, []int{6, 0} +} +func (m *ExportDataOperationMetadata_ExportDataOutputInfo) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ExportDataOperationMetadata_ExportDataOutputInfo.Unmarshal(m, b) +} +func (m *ExportDataOperationMetadata_ExportDataOutputInfo) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ExportDataOperationMetadata_ExportDataOutputInfo.Marshal(b, m, deterministic) +} +func (dst *ExportDataOperationMetadata_ExportDataOutputInfo) XXX_Merge(src proto.Message) { + xxx_messageInfo_ExportDataOperationMetadata_ExportDataOutputInfo.Merge(dst, src) +} +func (m *ExportDataOperationMetadata_ExportDataOutputInfo) XXX_Size() int { + return xxx_messageInfo_ExportDataOperationMetadata_ExportDataOutputInfo.Size(m) +} +func (m *ExportDataOperationMetadata_ExportDataOutputInfo) XXX_DiscardUnknown() { + xxx_messageInfo_ExportDataOperationMetadata_ExportDataOutputInfo.DiscardUnknown(m) +} + +var xxx_messageInfo_ExportDataOperationMetadata_ExportDataOutputInfo proto.InternalMessageInfo + +type isExportDataOperationMetadata_ExportDataOutputInfo_OutputLocation interface { + isExportDataOperationMetadata_ExportDataOutputInfo_OutputLocation() +} + +type ExportDataOperationMetadata_ExportDataOutputInfo_GcsOutputDirectory struct { + GcsOutputDirectory string `protobuf:"bytes,1,opt,name=gcs_output_directory,json=gcsOutputDirectory,proto3,oneof"` +} + +type ExportDataOperationMetadata_ExportDataOutputInfo_BigqueryOutputDataset struct { + BigqueryOutputDataset string `protobuf:"bytes,2,opt,name=bigquery_output_dataset,json=bigqueryOutputDataset,proto3,oneof"` +} + +func (*ExportDataOperationMetadata_ExportDataOutputInfo_GcsOutputDirectory) isExportDataOperationMetadata_ExportDataOutputInfo_OutputLocation() { +} + +func (*ExportDataOperationMetadata_ExportDataOutputInfo_BigqueryOutputDataset) isExportDataOperationMetadata_ExportDataOutputInfo_OutputLocation() { +} + +func (m *ExportDataOperationMetadata_ExportDataOutputInfo) GetOutputLocation() isExportDataOperationMetadata_ExportDataOutputInfo_OutputLocation { + if m != nil { + return m.OutputLocation + } + return nil +} + +func (m *ExportDataOperationMetadata_ExportDataOutputInfo) GetGcsOutputDirectory() string { + if x, ok := m.GetOutputLocation().(*ExportDataOperationMetadata_ExportDataOutputInfo_GcsOutputDirectory); ok { + return x.GcsOutputDirectory + } + return "" +} + +func (m *ExportDataOperationMetadata_ExportDataOutputInfo) GetBigqueryOutputDataset() string { + if x, ok := m.GetOutputLocation().(*ExportDataOperationMetadata_ExportDataOutputInfo_BigqueryOutputDataset); ok { + return x.BigqueryOutputDataset + } + return "" +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*ExportDataOperationMetadata_ExportDataOutputInfo) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _ExportDataOperationMetadata_ExportDataOutputInfo_OneofMarshaler, _ExportDataOperationMetadata_ExportDataOutputInfo_OneofUnmarshaler, _ExportDataOperationMetadata_ExportDataOutputInfo_OneofSizer, []interface{}{ + (*ExportDataOperationMetadata_ExportDataOutputInfo_GcsOutputDirectory)(nil), + (*ExportDataOperationMetadata_ExportDataOutputInfo_BigqueryOutputDataset)(nil), + } +} + +func _ExportDataOperationMetadata_ExportDataOutputInfo_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*ExportDataOperationMetadata_ExportDataOutputInfo) + // output_location + switch x := m.OutputLocation.(type) { + case *ExportDataOperationMetadata_ExportDataOutputInfo_GcsOutputDirectory: + b.EncodeVarint(1<<3 | proto.WireBytes) + b.EncodeStringBytes(x.GcsOutputDirectory) + case *ExportDataOperationMetadata_ExportDataOutputInfo_BigqueryOutputDataset: + b.EncodeVarint(2<<3 | proto.WireBytes) + b.EncodeStringBytes(x.BigqueryOutputDataset) + case nil: + default: + return fmt.Errorf("ExportDataOperationMetadata_ExportDataOutputInfo.OutputLocation has unexpected type %T", x) + } + return nil +} + +func _ExportDataOperationMetadata_ExportDataOutputInfo_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*ExportDataOperationMetadata_ExportDataOutputInfo) + switch tag { + case 1: // output_location.gcs_output_directory + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeStringBytes() + m.OutputLocation = &ExportDataOperationMetadata_ExportDataOutputInfo_GcsOutputDirectory{x} + return true, err + case 2: // output_location.bigquery_output_dataset + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeStringBytes() + m.OutputLocation = &ExportDataOperationMetadata_ExportDataOutputInfo_BigqueryOutputDataset{x} + return true, err + default: + return false, nil + } +} + +func _ExportDataOperationMetadata_ExportDataOutputInfo_OneofSizer(msg proto.Message) (n int) { + m := msg.(*ExportDataOperationMetadata_ExportDataOutputInfo) + // output_location + switch x := m.OutputLocation.(type) { + case *ExportDataOperationMetadata_ExportDataOutputInfo_GcsOutputDirectory: + n += 1 // tag and wire + n += proto.SizeVarint(uint64(len(x.GcsOutputDirectory))) + n += len(x.GcsOutputDirectory) + case *ExportDataOperationMetadata_ExportDataOutputInfo_BigqueryOutputDataset: + n += 1 // tag and wire + n += proto.SizeVarint(uint64(len(x.BigqueryOutputDataset))) + n += len(x.BigqueryOutputDataset) + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +// Details of BatchPredict operation. +type BatchPredictOperationMetadata struct { + // Output only. The input config that was given upon starting this + // batch predict operation. + InputConfig *BatchPredictInputConfig `protobuf:"bytes,1,opt,name=input_config,json=inputConfig,proto3" json:"input_config,omitempty"` + // Output only. Information further describing this batch predict's output. + OutputInfo *BatchPredictOperationMetadata_BatchPredictOutputInfo `protobuf:"bytes,2,opt,name=output_info,json=outputInfo,proto3" json:"output_info,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *BatchPredictOperationMetadata) Reset() { *m = BatchPredictOperationMetadata{} } +func (m *BatchPredictOperationMetadata) String() string { return proto.CompactTextString(m) } +func (*BatchPredictOperationMetadata) ProtoMessage() {} +func (*BatchPredictOperationMetadata) Descriptor() ([]byte, []int) { + return fileDescriptor_operations_3312906655c3448c, []int{7} +} +func (m *BatchPredictOperationMetadata) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_BatchPredictOperationMetadata.Unmarshal(m, b) +} +func (m *BatchPredictOperationMetadata) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_BatchPredictOperationMetadata.Marshal(b, m, deterministic) +} +func (dst *BatchPredictOperationMetadata) XXX_Merge(src proto.Message) { + xxx_messageInfo_BatchPredictOperationMetadata.Merge(dst, src) +} +func (m *BatchPredictOperationMetadata) XXX_Size() int { + return xxx_messageInfo_BatchPredictOperationMetadata.Size(m) +} +func (m *BatchPredictOperationMetadata) XXX_DiscardUnknown() { + xxx_messageInfo_BatchPredictOperationMetadata.DiscardUnknown(m) +} + +var xxx_messageInfo_BatchPredictOperationMetadata proto.InternalMessageInfo + +func (m *BatchPredictOperationMetadata) GetInputConfig() *BatchPredictInputConfig { + if m != nil { + return m.InputConfig + } + return nil +} + +func (m *BatchPredictOperationMetadata) GetOutputInfo() *BatchPredictOperationMetadata_BatchPredictOutputInfo { + if m != nil { + return m.OutputInfo + } + return nil +} + +// Further describes this batch predict's output. +// Supplements +// +// [BatchPredictionOutputConfig][google.cloud.automl.v1beta1.BatchPredictionOutputConfig]. +type BatchPredictOperationMetadata_BatchPredictOutputInfo struct { + // The output location into which prediction output is written. + // + // Types that are valid to be assigned to OutputLocation: + // *BatchPredictOperationMetadata_BatchPredictOutputInfo_GcsOutputDirectory + // *BatchPredictOperationMetadata_BatchPredictOutputInfo_BigqueryOutputDataset + OutputLocation isBatchPredictOperationMetadata_BatchPredictOutputInfo_OutputLocation `protobuf_oneof:"output_location"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *BatchPredictOperationMetadata_BatchPredictOutputInfo) Reset() { + *m = BatchPredictOperationMetadata_BatchPredictOutputInfo{} +} +func (m *BatchPredictOperationMetadata_BatchPredictOutputInfo) String() string { + return proto.CompactTextString(m) +} +func (*BatchPredictOperationMetadata_BatchPredictOutputInfo) ProtoMessage() {} +func (*BatchPredictOperationMetadata_BatchPredictOutputInfo) Descriptor() ([]byte, []int) { + return fileDescriptor_operations_3312906655c3448c, []int{7, 0} +} +func (m *BatchPredictOperationMetadata_BatchPredictOutputInfo) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_BatchPredictOperationMetadata_BatchPredictOutputInfo.Unmarshal(m, b) +} +func (m *BatchPredictOperationMetadata_BatchPredictOutputInfo) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_BatchPredictOperationMetadata_BatchPredictOutputInfo.Marshal(b, m, deterministic) +} +func (dst *BatchPredictOperationMetadata_BatchPredictOutputInfo) XXX_Merge(src proto.Message) { + xxx_messageInfo_BatchPredictOperationMetadata_BatchPredictOutputInfo.Merge(dst, src) +} +func (m *BatchPredictOperationMetadata_BatchPredictOutputInfo) XXX_Size() int { + return xxx_messageInfo_BatchPredictOperationMetadata_BatchPredictOutputInfo.Size(m) +} +func (m *BatchPredictOperationMetadata_BatchPredictOutputInfo) XXX_DiscardUnknown() { + xxx_messageInfo_BatchPredictOperationMetadata_BatchPredictOutputInfo.DiscardUnknown(m) +} + +var xxx_messageInfo_BatchPredictOperationMetadata_BatchPredictOutputInfo proto.InternalMessageInfo + +type isBatchPredictOperationMetadata_BatchPredictOutputInfo_OutputLocation interface { + isBatchPredictOperationMetadata_BatchPredictOutputInfo_OutputLocation() +} + +type BatchPredictOperationMetadata_BatchPredictOutputInfo_GcsOutputDirectory struct { + GcsOutputDirectory string `protobuf:"bytes,1,opt,name=gcs_output_directory,json=gcsOutputDirectory,proto3,oneof"` +} + +type BatchPredictOperationMetadata_BatchPredictOutputInfo_BigqueryOutputDataset struct { + BigqueryOutputDataset string `protobuf:"bytes,2,opt,name=bigquery_output_dataset,json=bigqueryOutputDataset,proto3,oneof"` +} + +func (*BatchPredictOperationMetadata_BatchPredictOutputInfo_GcsOutputDirectory) isBatchPredictOperationMetadata_BatchPredictOutputInfo_OutputLocation() { +} + +func (*BatchPredictOperationMetadata_BatchPredictOutputInfo_BigqueryOutputDataset) isBatchPredictOperationMetadata_BatchPredictOutputInfo_OutputLocation() { +} + +func (m *BatchPredictOperationMetadata_BatchPredictOutputInfo) GetOutputLocation() isBatchPredictOperationMetadata_BatchPredictOutputInfo_OutputLocation { + if m != nil { + return m.OutputLocation + } + return nil +} + +func (m *BatchPredictOperationMetadata_BatchPredictOutputInfo) GetGcsOutputDirectory() string { + if x, ok := m.GetOutputLocation().(*BatchPredictOperationMetadata_BatchPredictOutputInfo_GcsOutputDirectory); ok { + return x.GcsOutputDirectory + } + return "" +} + +func (m *BatchPredictOperationMetadata_BatchPredictOutputInfo) GetBigqueryOutputDataset() string { + if x, ok := m.GetOutputLocation().(*BatchPredictOperationMetadata_BatchPredictOutputInfo_BigqueryOutputDataset); ok { + return x.BigqueryOutputDataset + } + return "" +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*BatchPredictOperationMetadata_BatchPredictOutputInfo) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _BatchPredictOperationMetadata_BatchPredictOutputInfo_OneofMarshaler, _BatchPredictOperationMetadata_BatchPredictOutputInfo_OneofUnmarshaler, _BatchPredictOperationMetadata_BatchPredictOutputInfo_OneofSizer, []interface{}{ + (*BatchPredictOperationMetadata_BatchPredictOutputInfo_GcsOutputDirectory)(nil), + (*BatchPredictOperationMetadata_BatchPredictOutputInfo_BigqueryOutputDataset)(nil), + } +} + +func _BatchPredictOperationMetadata_BatchPredictOutputInfo_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*BatchPredictOperationMetadata_BatchPredictOutputInfo) + // output_location + switch x := m.OutputLocation.(type) { + case *BatchPredictOperationMetadata_BatchPredictOutputInfo_GcsOutputDirectory: + b.EncodeVarint(1<<3 | proto.WireBytes) + b.EncodeStringBytes(x.GcsOutputDirectory) + case *BatchPredictOperationMetadata_BatchPredictOutputInfo_BigqueryOutputDataset: + b.EncodeVarint(2<<3 | proto.WireBytes) + b.EncodeStringBytes(x.BigqueryOutputDataset) + case nil: + default: + return fmt.Errorf("BatchPredictOperationMetadata_BatchPredictOutputInfo.OutputLocation has unexpected type %T", x) + } + return nil +} + +func _BatchPredictOperationMetadata_BatchPredictOutputInfo_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*BatchPredictOperationMetadata_BatchPredictOutputInfo) + switch tag { + case 1: // output_location.gcs_output_directory + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeStringBytes() + m.OutputLocation = &BatchPredictOperationMetadata_BatchPredictOutputInfo_GcsOutputDirectory{x} + return true, err + case 2: // output_location.bigquery_output_dataset + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeStringBytes() + m.OutputLocation = &BatchPredictOperationMetadata_BatchPredictOutputInfo_BigqueryOutputDataset{x} + return true, err + default: + return false, nil + } +} + +func _BatchPredictOperationMetadata_BatchPredictOutputInfo_OneofSizer(msg proto.Message) (n int) { + m := msg.(*BatchPredictOperationMetadata_BatchPredictOutputInfo) + // output_location + switch x := m.OutputLocation.(type) { + case *BatchPredictOperationMetadata_BatchPredictOutputInfo_GcsOutputDirectory: + n += 1 // tag and wire + n += proto.SizeVarint(uint64(len(x.GcsOutputDirectory))) + n += len(x.GcsOutputDirectory) + case *BatchPredictOperationMetadata_BatchPredictOutputInfo_BigqueryOutputDataset: + n += 1 // tag and wire + n += proto.SizeVarint(uint64(len(x.BigqueryOutputDataset))) + n += len(x.BigqueryOutputDataset) + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +// Details of ExportModel operation. +type ExportModelOperationMetadata struct { + // Output only. Information further describing the output of this model + // export. + OutputInfo *ExportModelOperationMetadata_ExportModelOutputInfo `protobuf:"bytes,2,opt,name=output_info,json=outputInfo,proto3" json:"output_info,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ExportModelOperationMetadata) Reset() { *m = ExportModelOperationMetadata{} } +func (m *ExportModelOperationMetadata) String() string { return proto.CompactTextString(m) } +func (*ExportModelOperationMetadata) ProtoMessage() {} +func (*ExportModelOperationMetadata) Descriptor() ([]byte, []int) { + return fileDescriptor_operations_3312906655c3448c, []int{8} +} +func (m *ExportModelOperationMetadata) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ExportModelOperationMetadata.Unmarshal(m, b) +} +func (m *ExportModelOperationMetadata) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ExportModelOperationMetadata.Marshal(b, m, deterministic) +} +func (dst *ExportModelOperationMetadata) XXX_Merge(src proto.Message) { + xxx_messageInfo_ExportModelOperationMetadata.Merge(dst, src) +} +func (m *ExportModelOperationMetadata) XXX_Size() int { + return xxx_messageInfo_ExportModelOperationMetadata.Size(m) +} +func (m *ExportModelOperationMetadata) XXX_DiscardUnknown() { + xxx_messageInfo_ExportModelOperationMetadata.DiscardUnknown(m) +} + +var xxx_messageInfo_ExportModelOperationMetadata proto.InternalMessageInfo + +func (m *ExportModelOperationMetadata) GetOutputInfo() *ExportModelOperationMetadata_ExportModelOutputInfo { + if m != nil { + return m.OutputInfo + } + return nil +} + +// Further describes the output of model export. +// Supplements +// +// [ModelExportOutputConfig][google.cloud.automl.v1beta1.ModelExportOutputConfig]. +type ExportModelOperationMetadata_ExportModelOutputInfo struct { + // The full path of the Google Cloud Storage directory created, into which + // the model will be exported. + GcsOutputDirectory string `protobuf:"bytes,1,opt,name=gcs_output_directory,json=gcsOutputDirectory,proto3" json:"gcs_output_directory,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ExportModelOperationMetadata_ExportModelOutputInfo) Reset() { + *m = ExportModelOperationMetadata_ExportModelOutputInfo{} +} +func (m *ExportModelOperationMetadata_ExportModelOutputInfo) String() string { + return proto.CompactTextString(m) +} +func (*ExportModelOperationMetadata_ExportModelOutputInfo) ProtoMessage() {} +func (*ExportModelOperationMetadata_ExportModelOutputInfo) Descriptor() ([]byte, []int) { + return fileDescriptor_operations_3312906655c3448c, []int{8, 0} +} +func (m *ExportModelOperationMetadata_ExportModelOutputInfo) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ExportModelOperationMetadata_ExportModelOutputInfo.Unmarshal(m, b) +} +func (m *ExportModelOperationMetadata_ExportModelOutputInfo) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ExportModelOperationMetadata_ExportModelOutputInfo.Marshal(b, m, deterministic) +} +func (dst *ExportModelOperationMetadata_ExportModelOutputInfo) XXX_Merge(src proto.Message) { + xxx_messageInfo_ExportModelOperationMetadata_ExportModelOutputInfo.Merge(dst, src) +} +func (m *ExportModelOperationMetadata_ExportModelOutputInfo) XXX_Size() int { + return xxx_messageInfo_ExportModelOperationMetadata_ExportModelOutputInfo.Size(m) +} +func (m *ExportModelOperationMetadata_ExportModelOutputInfo) XXX_DiscardUnknown() { + xxx_messageInfo_ExportModelOperationMetadata_ExportModelOutputInfo.DiscardUnknown(m) +} + +var xxx_messageInfo_ExportModelOperationMetadata_ExportModelOutputInfo proto.InternalMessageInfo + +func (m *ExportModelOperationMetadata_ExportModelOutputInfo) GetGcsOutputDirectory() string { + if m != nil { + return m.GcsOutputDirectory + } + return "" +} + +// Details of EvaluatedExamples operation. +type ExportEvaluatedExamplesOperationMetadata struct { + // Output only. Information further describing the output of this evaluated + // examples export. + OutputInfo *ExportEvaluatedExamplesOperationMetadata_ExportEvaluatedExamplesOutputInfo `protobuf:"bytes,2,opt,name=output_info,json=outputInfo,proto3" json:"output_info,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ExportEvaluatedExamplesOperationMetadata) Reset() { + *m = ExportEvaluatedExamplesOperationMetadata{} +} +func (m *ExportEvaluatedExamplesOperationMetadata) String() string { return proto.CompactTextString(m) } +func (*ExportEvaluatedExamplesOperationMetadata) ProtoMessage() {} +func (*ExportEvaluatedExamplesOperationMetadata) Descriptor() ([]byte, []int) { + return fileDescriptor_operations_3312906655c3448c, []int{9} +} +func (m *ExportEvaluatedExamplesOperationMetadata) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ExportEvaluatedExamplesOperationMetadata.Unmarshal(m, b) +} +func (m *ExportEvaluatedExamplesOperationMetadata) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ExportEvaluatedExamplesOperationMetadata.Marshal(b, m, deterministic) +} +func (dst *ExportEvaluatedExamplesOperationMetadata) XXX_Merge(src proto.Message) { + xxx_messageInfo_ExportEvaluatedExamplesOperationMetadata.Merge(dst, src) +} +func (m *ExportEvaluatedExamplesOperationMetadata) XXX_Size() int { + return xxx_messageInfo_ExportEvaluatedExamplesOperationMetadata.Size(m) +} +func (m *ExportEvaluatedExamplesOperationMetadata) XXX_DiscardUnknown() { + xxx_messageInfo_ExportEvaluatedExamplesOperationMetadata.DiscardUnknown(m) +} + +var xxx_messageInfo_ExportEvaluatedExamplesOperationMetadata proto.InternalMessageInfo + +func (m *ExportEvaluatedExamplesOperationMetadata) GetOutputInfo() *ExportEvaluatedExamplesOperationMetadata_ExportEvaluatedExamplesOutputInfo { + if m != nil { + return m.OutputInfo + } + return nil +} + +// Further describes the output of the evaluated examples export. +// Supplements +// +// [ExportEvaluatedExamplesOutputConfig][google.cloud.automl.v1beta1.ExportEvaluatedExamplesOutputConfig]. +type ExportEvaluatedExamplesOperationMetadata_ExportEvaluatedExamplesOutputInfo struct { + // The path of the BigQuery dataset created, in bq://projectId.bqDatasetId + // format, into which the output of export evaluated examples is written. + BigqueryOutputDataset string `protobuf:"bytes,2,opt,name=bigquery_output_dataset,json=bigqueryOutputDataset,proto3" json:"bigquery_output_dataset,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ExportEvaluatedExamplesOperationMetadata_ExportEvaluatedExamplesOutputInfo) Reset() { + *m = ExportEvaluatedExamplesOperationMetadata_ExportEvaluatedExamplesOutputInfo{} +} +func (m *ExportEvaluatedExamplesOperationMetadata_ExportEvaluatedExamplesOutputInfo) String() string { + return proto.CompactTextString(m) +} +func (*ExportEvaluatedExamplesOperationMetadata_ExportEvaluatedExamplesOutputInfo) ProtoMessage() {} +func (*ExportEvaluatedExamplesOperationMetadata_ExportEvaluatedExamplesOutputInfo) Descriptor() ([]byte, []int) { + return fileDescriptor_operations_3312906655c3448c, []int{9, 0} +} +func (m *ExportEvaluatedExamplesOperationMetadata_ExportEvaluatedExamplesOutputInfo) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ExportEvaluatedExamplesOperationMetadata_ExportEvaluatedExamplesOutputInfo.Unmarshal(m, b) +} +func (m *ExportEvaluatedExamplesOperationMetadata_ExportEvaluatedExamplesOutputInfo) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ExportEvaluatedExamplesOperationMetadata_ExportEvaluatedExamplesOutputInfo.Marshal(b, m, deterministic) +} +func (dst *ExportEvaluatedExamplesOperationMetadata_ExportEvaluatedExamplesOutputInfo) XXX_Merge(src proto.Message) { + xxx_messageInfo_ExportEvaluatedExamplesOperationMetadata_ExportEvaluatedExamplesOutputInfo.Merge(dst, src) +} +func (m *ExportEvaluatedExamplesOperationMetadata_ExportEvaluatedExamplesOutputInfo) XXX_Size() int { + return xxx_messageInfo_ExportEvaluatedExamplesOperationMetadata_ExportEvaluatedExamplesOutputInfo.Size(m) +} +func (m *ExportEvaluatedExamplesOperationMetadata_ExportEvaluatedExamplesOutputInfo) XXX_DiscardUnknown() { + xxx_messageInfo_ExportEvaluatedExamplesOperationMetadata_ExportEvaluatedExamplesOutputInfo.DiscardUnknown(m) +} + +var xxx_messageInfo_ExportEvaluatedExamplesOperationMetadata_ExportEvaluatedExamplesOutputInfo proto.InternalMessageInfo + +func (m *ExportEvaluatedExamplesOperationMetadata_ExportEvaluatedExamplesOutputInfo) GetBigqueryOutputDataset() string { + if m != nil { + return m.BigqueryOutputDataset + } + return "" +} + +func init() { + proto.RegisterType((*OperationMetadata)(nil), "google.cloud.automl.v1beta1.OperationMetadata") + proto.RegisterType((*DeleteOperationMetadata)(nil), "google.cloud.automl.v1beta1.DeleteOperationMetadata") + proto.RegisterType((*DeployModelOperationMetadata)(nil), "google.cloud.automl.v1beta1.DeployModelOperationMetadata") + proto.RegisterType((*UndeployModelOperationMetadata)(nil), "google.cloud.automl.v1beta1.UndeployModelOperationMetadata") + proto.RegisterType((*CreateModelOperationMetadata)(nil), "google.cloud.automl.v1beta1.CreateModelOperationMetadata") + proto.RegisterType((*ImportDataOperationMetadata)(nil), "google.cloud.automl.v1beta1.ImportDataOperationMetadata") + proto.RegisterType((*ExportDataOperationMetadata)(nil), "google.cloud.automl.v1beta1.ExportDataOperationMetadata") + proto.RegisterType((*ExportDataOperationMetadata_ExportDataOutputInfo)(nil), "google.cloud.automl.v1beta1.ExportDataOperationMetadata.ExportDataOutputInfo") + proto.RegisterType((*BatchPredictOperationMetadata)(nil), "google.cloud.automl.v1beta1.BatchPredictOperationMetadata") + proto.RegisterType((*BatchPredictOperationMetadata_BatchPredictOutputInfo)(nil), "google.cloud.automl.v1beta1.BatchPredictOperationMetadata.BatchPredictOutputInfo") + proto.RegisterType((*ExportModelOperationMetadata)(nil), "google.cloud.automl.v1beta1.ExportModelOperationMetadata") + proto.RegisterType((*ExportModelOperationMetadata_ExportModelOutputInfo)(nil), "google.cloud.automl.v1beta1.ExportModelOperationMetadata.ExportModelOutputInfo") + proto.RegisterType((*ExportEvaluatedExamplesOperationMetadata)(nil), "google.cloud.automl.v1beta1.ExportEvaluatedExamplesOperationMetadata") + proto.RegisterType((*ExportEvaluatedExamplesOperationMetadata_ExportEvaluatedExamplesOutputInfo)(nil), "google.cloud.automl.v1beta1.ExportEvaluatedExamplesOperationMetadata.ExportEvaluatedExamplesOutputInfo") +} + +func init() { + proto.RegisterFile("google/cloud/automl/v1beta1/operations.proto", fileDescriptor_operations_3312906655c3448c) +} + +var fileDescriptor_operations_3312906655c3448c = []byte{ + // 884 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xc4, 0x96, 0xdd, 0x6e, 0x1b, 0x45, + 0x14, 0xc7, 0x59, 0x87, 0xaf, 0x1e, 0x53, 0x92, 0x4c, 0x9d, 0xc4, 0x75, 0xd2, 0x34, 0x5d, 0x21, + 0x11, 0x24, 0xb4, 0x4b, 0x43, 0x85, 0x0a, 0x11, 0x17, 0x4d, 0x62, 0xc0, 0x17, 0x56, 0x83, 0xf9, + 0x92, 0x20, 0x68, 0x35, 0xde, 0x1d, 0x2f, 0x83, 0x76, 0x77, 0x86, 0xd9, 0xd9, 0xca, 0xbe, 0x45, + 0x5c, 0xf4, 0xba, 0x37, 0x88, 0x27, 0xe0, 0x5d, 0x78, 0x05, 0x5e, 0x06, 0xed, 0xec, 0xac, 0x3f, + 0xd2, 0xf1, 0xd8, 0x8a, 0x90, 0x7a, 0xe7, 0x9d, 0xf3, 0xff, 0x9f, 0xdf, 0x9c, 0x33, 0x33, 0x47, + 0x86, 0x0f, 0x63, 0xc6, 0xe2, 0x84, 0xf8, 0x61, 0xc2, 0x8a, 0xc8, 0xc7, 0x85, 0x64, 0x69, 0xe2, + 0x3f, 0x7b, 0x38, 0x24, 0x12, 0x3f, 0xf4, 0x19, 0x27, 0x02, 0x4b, 0xca, 0xb2, 0xdc, 0xe3, 0x82, + 0x49, 0x86, 0xf6, 0x2b, 0xb5, 0xa7, 0xd4, 0x5e, 0xa5, 0xf6, 0xb4, 0xba, 0x73, 0xa0, 0x53, 0x61, + 0x4e, 0x7d, 0x9c, 0x65, 0x4c, 0xce, 0x5b, 0x3b, 0xef, 0xd9, 0x40, 0x94, 0x69, 0xd5, 0xfb, 0x36, + 0x55, 0xca, 0x22, 0x92, 0x68, 0xe1, 0xc9, 0x4a, 0x61, 0x40, 0x9e, 0xe1, 0xa4, 0x50, 0x7b, 0xd0, + 0x1e, 0xbd, 0x7b, 0x5f, 0x7d, 0x0d, 0x8b, 0x91, 0x4f, 0x52, 0x2e, 0x27, 0x3a, 0x78, 0xff, 0x7a, + 0x50, 0xd2, 0x94, 0xe4, 0x12, 0xa7, 0x5c, 0x0b, 0xf6, 0xb4, 0x40, 0xf0, 0xd0, 0xcf, 0x25, 0x96, + 0x85, 0xae, 0xcc, 0xfd, 0x1d, 0x60, 0xfb, 0x69, 0xdd, 0xa9, 0x3e, 0x91, 0x38, 0xc2, 0x12, 0xa3, + 0x9f, 0xe1, 0xdd, 0x88, 0x24, 0x44, 0x92, 0x20, 0x22, 0x12, 0xd3, 0x24, 0x6f, 0xbf, 0x7d, 0xe4, + 0x1c, 0x37, 0x4f, 0x1e, 0x79, 0x96, 0x1e, 0x7a, 0x17, 0xca, 0xf2, 0x52, 0xb6, 0xaf, 0x5e, 0x1b, + 0xdc, 0xae, 0xb2, 0x5d, 0x54, 0xc9, 0x50, 0x0a, 0xad, 0x88, 0xf0, 0x84, 0x4d, 0x82, 0xaa, 0xd8, + 0x1a, 0xd2, 0x56, 0x90, 0x4f, 0x57, 0x40, 0x4a, 0x63, 0xbf, 0xf4, 0x99, 0x48, 0x28, 0x9a, 0xc5, + 0x6b, 0x5c, 0x0e, 0xbb, 0x45, 0x66, 0x04, 0xde, 0x55, 0xc0, 0x53, 0x2b, 0xf0, 0xbb, 0x2c, 0xb2, + 0x23, 0x5b, 0x45, 0x66, 0x80, 0xa6, 0xd0, 0x0a, 0x05, 0xc1, 0x92, 0x5c, 0x43, 0xc2, 0x1a, 0x35, + 0x9e, 0x2b, 0xe3, 0xf2, 0x1a, 0xc3, 0x59, 0xbc, 0xc6, 0xfd, 0x0a, 0x77, 0x68, 0xca, 0x99, 0x90, + 0x41, 0x29, 0x9a, 0xd2, 0x36, 0x15, 0xed, 0xb1, 0x95, 0xd6, 0x53, 0xbe, 0x0b, 0x2c, 0xb1, 0x09, + 0xb6, 0x4d, 0xa7, 0xe1, 0x9a, 0xc5, 0x61, 0x67, 0x88, 0x65, 0xf8, 0x4b, 0xc0, 0x05, 0x89, 0x68, + 0x28, 0xa7, 0xb4, 0x2d, 0x45, 0xfb, 0xcc, 0x4a, 0x3b, 0x2b, 0x9d, 0x97, 0x95, 0xd1, 0xc4, 0xbb, + 0x33, 0x9c, 0x13, 0xcc, 0x55, 0x47, 0xc6, 0x2f, 0x57, 0xb7, 0xb3, 0x46, 0x75, 0xdd, 0xb1, 0xb5, + 0x3a, 0x32, 0xbe, 0x5e, 0x5d, 0x0a, 0x2d, 0xcd, 0x5a, 0x3c, 0xb8, 0xdd, 0x35, 0x0e, 0xae, 0x82, + 0x2d, 0x3f, 0x38, 0x32, 0x8b, 0xd7, 0xb8, 0x17, 0x0e, 0x3c, 0xd0, 0x3c, 0xfd, 0xe6, 0x49, 0x14, + 0x90, 0x31, 0x4e, 0x79, 0x42, 0xf2, 0x29, 0xbc, 0xa3, 0xe0, 0xdd, 0x35, 0xe0, 0xdd, 0x3a, 0x49, + 0x57, 0xe7, 0x30, 0x6d, 0xe4, 0x90, 0x98, 0xb5, 0xf5, 0xa6, 0x3e, 0x80, 0x2d, 0x2e, 0x58, 0x2c, + 0x48, 0x9e, 0x07, 0x9c, 0x88, 0x90, 0x64, 0xb2, 0x7d, 0xfb, 0xc8, 0x39, 0x7e, 0x63, 0xb0, 0x59, + 0xaf, 0x5f, 0x56, 0xcb, 0xe8, 0x73, 0xd8, 0xe2, 0x58, 0x48, 0x8a, 0x93, 0x60, 0x84, 0x69, 0x52, + 0x08, 0x92, 0xb7, 0x1b, 0x47, 0x1b, 0xc7, 0xcd, 0x13, 0x54, 0xef, 0x56, 0xf0, 0xd0, 0xfb, 0x46, + 0x0d, 0x9d, 0xc1, 0xa6, 0xd6, 0x7e, 0xa1, 0xa5, 0xe8, 0x14, 0x9a, 0xfa, 0x99, 0x94, 0x23, 0xab, + 0xbd, 0xa1, 0xea, 0xec, 0xd4, 0xce, 0x7a, 0x9e, 0x79, 0xdf, 0xd6, 0xf3, 0x6c, 0x00, 0x95, 0xbc, + 0x5c, 0x28, 0xcd, 0x05, 0x8f, 0xa6, 0xe6, 0xd7, 0x57, 0x9b, 0x2b, 0x79, 0xb9, 0x70, 0x76, 0x0b, + 0xde, 0xd2, 0xdd, 0x75, 0xef, 0xc2, 0xde, 0x92, 0xd9, 0xe5, 0x1e, 0xc2, 0x81, 0x6d, 0xe2, 0xb8, + 0x47, 0x70, 0x68, 0x1f, 0x10, 0x65, 0x06, 0xdb, 0x7b, 0x76, 0xef, 0xc1, 0xbe, 0xe5, 0x05, 0xba, + 0x7f, 0x37, 0x60, 0xdf, 0x72, 0x87, 0x51, 0x06, 0x4d, 0x56, 0x48, 0x5e, 0xc8, 0x80, 0x66, 0x23, + 0xd6, 0x76, 0x54, 0x0f, 0xfa, 0x37, 0x7d, 0x12, 0xf3, 0x31, 0x95, 0xb5, 0x97, 0x8d, 0xd8, 0x00, + 0xd8, 0xf4, 0x77, 0xe7, 0x4f, 0x07, 0x5a, 0x26, 0x11, 0x3a, 0x81, 0x56, 0x1c, 0xe6, 0x81, 0xde, + 0x4c, 0x44, 0x05, 0x09, 0x25, 0x13, 0x13, 0xb5, 0xa3, 0x5b, 0xe5, 0xe5, 0x8f, 0xc3, 0xbc, 0x92, + 0x5f, 0xd4, 0x31, 0xf4, 0x18, 0xf6, 0x86, 0x34, 0xfe, 0xad, 0x20, 0x62, 0x32, 0x35, 0x62, 0x89, + 0x73, 0x22, 0xdb, 0x0d, 0x6d, 0xdb, 0xa9, 0x05, 0xda, 0x5b, 0x85, 0xcf, 0xb6, 0x61, 0x53, 0x1b, + 0x12, 0x16, 0xaa, 0x12, 0xdc, 0xe7, 0x1b, 0x70, 0xcf, 0x3a, 0x5d, 0xd0, 0x0f, 0xf0, 0x0e, 0xcd, + 0x4a, 0x4f, 0xc8, 0xb2, 0x11, 0x8d, 0x75, 0xb3, 0x1e, 0xad, 0x3d, 0xaf, 0x7a, 0xa5, 0xf9, 0x5c, + 0x79, 0x07, 0x4d, 0x3a, 0xfb, 0x40, 0x62, 0xf1, 0x10, 0x1a, 0x2a, 0xef, 0xd7, 0x37, 0x9f, 0x83, + 0x8b, 0x51, 0xf3, 0x41, 0xfc, 0xe5, 0xc0, 0xae, 0x59, 0xf6, 0xea, 0x8f, 0xe2, 0x5f, 0x07, 0x0e, + 0x6c, 0xb3, 0x10, 0x71, 0x53, 0xc3, 0x9e, 0xde, 0x78, 0xb6, 0x2e, 0x04, 0xcd, 0xed, 0xea, 0xc1, + 0x8e, 0x51, 0x84, 0x3e, 0xb2, 0x35, 0xcb, 0xd4, 0x2a, 0xf7, 0x45, 0x03, 0x8e, 0xd7, 0x1d, 0xb6, + 0xe8, 0xb9, 0x63, 0x2a, 0x35, 0xfe, 0x5f, 0x26, 0xf9, 0x52, 0xa1, 0xb9, 0x05, 0x3f, 0xc1, 0x83, + 0x95, 0x06, 0xf4, 0xc9, 0x8a, 0x7b, 0xb0, 0xec, 0x16, 0xfc, 0xe1, 0xc0, 0xfd, 0x90, 0xa5, 0xb6, + 0xba, 0x2e, 0x9d, 0x1f, 0x9f, 0xe8, 0x70, 0xcc, 0x12, 0x9c, 0xc5, 0x1e, 0x13, 0xb1, 0x1f, 0x93, + 0x4c, 0x4d, 0x6a, 0xbf, 0x0a, 0x61, 0x4e, 0x73, 0xe3, 0x1f, 0xe3, 0xd3, 0xea, 0xf3, 0x9f, 0xc6, + 0xfe, 0x97, 0x4a, 0x78, 0x75, 0x5e, 0x8a, 0xae, 0x9e, 0x14, 0x92, 0xf5, 0x93, 0xab, 0xef, 0x2b, + 0xd1, 0xf0, 0x4d, 0x95, 0xeb, 0xe3, 0xff, 0x02, 0x00, 0x00, 0xff, 0xff, 0x12, 0x19, 0x35, 0xdb, + 0x1b, 0x0c, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/cloud/automl/v1beta1/prediction_service.pb.go b/vendor/google.golang.org/genproto/googleapis/cloud/automl/v1beta1/prediction_service.pb.go new file mode 100644 index 0000000..44cbe35 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/cloud/automl/v1beta1/prediction_service.pb.go @@ -0,0 +1,506 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/cloud/automl/v1beta1/prediction_service.proto + +package automl // import "google.golang.org/genproto/googleapis/cloud/automl/v1beta1" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "google.golang.org/genproto/googleapis/api/annotations" +import longrunning "google.golang.org/genproto/googleapis/longrunning" + +import ( + context "golang.org/x/net/context" + grpc "google.golang.org/grpc" +) + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Request message for [PredictionService.Predict][google.cloud.automl.v1beta1.PredictionService.Predict]. +type PredictRequest struct { + // Name of the model requested to serve the prediction. + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // Required. + // Payload to perform a prediction on. The payload must match the + // problem type that the model was trained to solve. + Payload *ExamplePayload `protobuf:"bytes,2,opt,name=payload,proto3" json:"payload,omitempty"` + // Additional domain-specific parameters, any string must be up to 25000 + // characters long. + // + // * For Image Classification: + // + // `score_threshold` - (float) A value from 0.0 to 1.0. When the model + // makes predictions for an image, it will only produce results that have + // at least this confidence score. The default is 0.5. + // + // * For Image Object Detection: + // `score_threshold` - (float) When Model detects objects on the image, + // it will only produce bounding boxes which have at least this + // confidence score. Value in 0 to 1 range, default is 0.5. + // `max_bounding_box_count` - (int64) No more than this number of bounding + // boxes will be returned in the response. Default is 100, the + // requested value may be limited by server. + Params map[string]string `protobuf:"bytes,3,rep,name=params,proto3" json:"params,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *PredictRequest) Reset() { *m = PredictRequest{} } +func (m *PredictRequest) String() string { return proto.CompactTextString(m) } +func (*PredictRequest) ProtoMessage() {} +func (*PredictRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_prediction_service_4cd94add91ccba6b, []int{0} +} +func (m *PredictRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_PredictRequest.Unmarshal(m, b) +} +func (m *PredictRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_PredictRequest.Marshal(b, m, deterministic) +} +func (dst *PredictRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_PredictRequest.Merge(dst, src) +} +func (m *PredictRequest) XXX_Size() int { + return xxx_messageInfo_PredictRequest.Size(m) +} +func (m *PredictRequest) XXX_DiscardUnknown() { + xxx_messageInfo_PredictRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_PredictRequest proto.InternalMessageInfo + +func (m *PredictRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *PredictRequest) GetPayload() *ExamplePayload { + if m != nil { + return m.Payload + } + return nil +} + +func (m *PredictRequest) GetParams() map[string]string { + if m != nil { + return m.Params + } + return nil +} + +// Response message for [PredictionService.Predict][google.cloud.automl.v1beta1.PredictionService.Predict]. +type PredictResponse struct { + // Prediction result. + // Translation and Text Sentiment will return precisely one payload. + Payload []*AnnotationPayload `protobuf:"bytes,1,rep,name=payload,proto3" json:"payload,omitempty"` + // Additional domain-specific prediction response metadata. + // + // * For Image Object Detection: + // `max_bounding_box_count` - (int64) At most that many bounding boxes per + // image could have been returned. + // + // * For Text Sentiment: + // `sentiment_score` - (float, deprecated) A value between -1 and 1, + // -1 maps to least positive sentiment, while 1 maps to the most positive + // one and the higher the score, the more positive the sentiment in the + // document is. Yet these values are relative to the training data, so + // e.g. if all data was positive then -1 will be also positive (though + // the least). + // The sentiment_score shouldn't be confused with "score" or "magnitude" + // from the previous Natural Language Sentiment Analysis API. + Metadata map[string]string `protobuf:"bytes,2,rep,name=metadata,proto3" json:"metadata,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *PredictResponse) Reset() { *m = PredictResponse{} } +func (m *PredictResponse) String() string { return proto.CompactTextString(m) } +func (*PredictResponse) ProtoMessage() {} +func (*PredictResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_prediction_service_4cd94add91ccba6b, []int{1} +} +func (m *PredictResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_PredictResponse.Unmarshal(m, b) +} +func (m *PredictResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_PredictResponse.Marshal(b, m, deterministic) +} +func (dst *PredictResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_PredictResponse.Merge(dst, src) +} +func (m *PredictResponse) XXX_Size() int { + return xxx_messageInfo_PredictResponse.Size(m) +} +func (m *PredictResponse) XXX_DiscardUnknown() { + xxx_messageInfo_PredictResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_PredictResponse proto.InternalMessageInfo + +func (m *PredictResponse) GetPayload() []*AnnotationPayload { + if m != nil { + return m.Payload + } + return nil +} + +func (m *PredictResponse) GetMetadata() map[string]string { + if m != nil { + return m.Metadata + } + return nil +} + +// Request message for [PredictionService.BatchPredict][google.cloud.automl.v1beta1.PredictionService.BatchPredict]. +type BatchPredictRequest struct { + // Name of the model requested to serve the batch prediction. + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // Required. The input configuration for batch prediction. + InputConfig *BatchPredictInputConfig `protobuf:"bytes,3,opt,name=input_config,json=inputConfig,proto3" json:"input_config,omitempty"` + // Required. The Configuration specifying where output predictions should + // be written. + OutputConfig *BatchPredictOutputConfig `protobuf:"bytes,4,opt,name=output_config,json=outputConfig,proto3" json:"output_config,omitempty"` + // Additional domain-specific parameters for the predictions, any string must + // be up to 25000 characters long. + // + // * For Video Classification : + // `score_threshold` - (float) A value from 0.0 to 1.0. When the model + // makes predictions for a video, it will only produce results that + // have at least this confidence score. The default is 0.5. + // `segment_classification` - (boolean) Set to true to request + // segment-level classification. AutoML Video Intelligence returns + // labels and their confidence scores for the entire segment of the + // video that user specified in the request configuration. + // The default is "true". + // `shot_classification` - (boolean) Set to true to request shot-level + // classification. AutoML Video Intelligence determines the boundaries + // for each camera shot in the entire segment of the video that user + // specified in the request configuration. AutoML Video Intelligence + // then returns labels and their confidence scores for each detected + // shot, along with the start and end time of the shot. + // WARNING: Model evaluation is not done for this classification type, + // the quality of it depends on training data, but there are no metrics + // provided to describe that quality. The default is "false". + // `1s_interval_classification` - (boolean) Set to true to request + // classification for a video at one-second intervals. AutoML Video + // Intelligence returns labels and their confidence scores for each + // second of the entire segment of the video that user specified in the + // request configuration. + // WARNING: Model evaluation is not done for this classification + // type, the quality of it depends on training data, but there are no + // metrics provided to describe that quality. The default is + // "false". + Params map[string]string `protobuf:"bytes,5,rep,name=params,proto3" json:"params,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *BatchPredictRequest) Reset() { *m = BatchPredictRequest{} } +func (m *BatchPredictRequest) String() string { return proto.CompactTextString(m) } +func (*BatchPredictRequest) ProtoMessage() {} +func (*BatchPredictRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_prediction_service_4cd94add91ccba6b, []int{2} +} +func (m *BatchPredictRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_BatchPredictRequest.Unmarshal(m, b) +} +func (m *BatchPredictRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_BatchPredictRequest.Marshal(b, m, deterministic) +} +func (dst *BatchPredictRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_BatchPredictRequest.Merge(dst, src) +} +func (m *BatchPredictRequest) XXX_Size() int { + return xxx_messageInfo_BatchPredictRequest.Size(m) +} +func (m *BatchPredictRequest) XXX_DiscardUnknown() { + xxx_messageInfo_BatchPredictRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_BatchPredictRequest proto.InternalMessageInfo + +func (m *BatchPredictRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *BatchPredictRequest) GetInputConfig() *BatchPredictInputConfig { + if m != nil { + return m.InputConfig + } + return nil +} + +func (m *BatchPredictRequest) GetOutputConfig() *BatchPredictOutputConfig { + if m != nil { + return m.OutputConfig + } + return nil +} + +func (m *BatchPredictRequest) GetParams() map[string]string { + if m != nil { + return m.Params + } + return nil +} + +// Batch predict result. +type BatchPredictResult struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *BatchPredictResult) Reset() { *m = BatchPredictResult{} } +func (m *BatchPredictResult) String() string { return proto.CompactTextString(m) } +func (*BatchPredictResult) ProtoMessage() {} +func (*BatchPredictResult) Descriptor() ([]byte, []int) { + return fileDescriptor_prediction_service_4cd94add91ccba6b, []int{3} +} +func (m *BatchPredictResult) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_BatchPredictResult.Unmarshal(m, b) +} +func (m *BatchPredictResult) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_BatchPredictResult.Marshal(b, m, deterministic) +} +func (dst *BatchPredictResult) XXX_Merge(src proto.Message) { + xxx_messageInfo_BatchPredictResult.Merge(dst, src) +} +func (m *BatchPredictResult) XXX_Size() int { + return xxx_messageInfo_BatchPredictResult.Size(m) +} +func (m *BatchPredictResult) XXX_DiscardUnknown() { + xxx_messageInfo_BatchPredictResult.DiscardUnknown(m) +} + +var xxx_messageInfo_BatchPredictResult proto.InternalMessageInfo + +func init() { + proto.RegisterType((*PredictRequest)(nil), "google.cloud.automl.v1beta1.PredictRequest") + proto.RegisterMapType((map[string]string)(nil), "google.cloud.automl.v1beta1.PredictRequest.ParamsEntry") + proto.RegisterType((*PredictResponse)(nil), "google.cloud.automl.v1beta1.PredictResponse") + proto.RegisterMapType((map[string]string)(nil), "google.cloud.automl.v1beta1.PredictResponse.MetadataEntry") + proto.RegisterType((*BatchPredictRequest)(nil), "google.cloud.automl.v1beta1.BatchPredictRequest") + proto.RegisterMapType((map[string]string)(nil), "google.cloud.automl.v1beta1.BatchPredictRequest.ParamsEntry") + proto.RegisterType((*BatchPredictResult)(nil), "google.cloud.automl.v1beta1.BatchPredictResult") +} + +// Reference imports to suppress errors if they are not otherwise used. +var _ context.Context +var _ grpc.ClientConn + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the grpc package it is being compiled against. +const _ = grpc.SupportPackageIsVersion4 + +// PredictionServiceClient is the client API for PredictionService service. +// +// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://godoc.org/google.golang.org/grpc#ClientConn.NewStream. +type PredictionServiceClient interface { + // Perform an online prediction. The prediction result will be directly + // returned in the response. + // Available for following ML problems, and their expected request payloads: + // * Image Classification - Image in .JPEG, .GIF or .PNG format, image_bytes + // up to 30MB. + // * Image Object Detection - Image in .JPEG, .GIF or .PNG format, image_bytes + // up to 30MB. + // * Text Classification - TextSnippet, content up to 10,000 characters, + // UTF-8 encoded. + // * Text Extraction - TextSnippet, content up to 30,000 characters, + // UTF-8 NFC encoded. * Translation - TextSnippet, content up to 25,000 characters, UTF-8 + // encoded. + // * Tables - Row, with column values matching the columns of the model, + // up to 5MB. + // * Text Sentiment - TextSnippet, content up 500 characters, UTF-8 encoded. + Predict(ctx context.Context, in *PredictRequest, opts ...grpc.CallOption) (*PredictResponse, error) + // Perform a batch prediction. Unlike the online [Predict][google.cloud.automl.v1beta1.PredictionService.Predict], batch + // prediction result won't be immediately available in the response. Instead, + // a long running operation object is returned. User can poll the operation + // result via [GetOperation][google.longrunning.Operations.GetOperation] + // method. Once the operation is done, [BatchPredictResult][google.cloud.automl.v1beta1.BatchPredictResult] is returned in + // the [response][google.longrunning.Operation.response] field. + // Available for following ML problems: + // * Video Classification + // * Text Extraction + // * Tables + BatchPredict(ctx context.Context, in *BatchPredictRequest, opts ...grpc.CallOption) (*longrunning.Operation, error) +} + +type predictionServiceClient struct { + cc *grpc.ClientConn +} + +func NewPredictionServiceClient(cc *grpc.ClientConn) PredictionServiceClient { + return &predictionServiceClient{cc} +} + +func (c *predictionServiceClient) Predict(ctx context.Context, in *PredictRequest, opts ...grpc.CallOption) (*PredictResponse, error) { + out := new(PredictResponse) + err := c.cc.Invoke(ctx, "/google.cloud.automl.v1beta1.PredictionService/Predict", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *predictionServiceClient) BatchPredict(ctx context.Context, in *BatchPredictRequest, opts ...grpc.CallOption) (*longrunning.Operation, error) { + out := new(longrunning.Operation) + err := c.cc.Invoke(ctx, "/google.cloud.automl.v1beta1.PredictionService/BatchPredict", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +// PredictionServiceServer is the server API for PredictionService service. +type PredictionServiceServer interface { + // Perform an online prediction. The prediction result will be directly + // returned in the response. + // Available for following ML problems, and their expected request payloads: + // * Image Classification - Image in .JPEG, .GIF or .PNG format, image_bytes + // up to 30MB. + // * Image Object Detection - Image in .JPEG, .GIF or .PNG format, image_bytes + // up to 30MB. + // * Text Classification - TextSnippet, content up to 10,000 characters, + // UTF-8 encoded. + // * Text Extraction - TextSnippet, content up to 30,000 characters, + // UTF-8 NFC encoded. * Translation - TextSnippet, content up to 25,000 characters, UTF-8 + // encoded. + // * Tables - Row, with column values matching the columns of the model, + // up to 5MB. + // * Text Sentiment - TextSnippet, content up 500 characters, UTF-8 encoded. + Predict(context.Context, *PredictRequest) (*PredictResponse, error) + // Perform a batch prediction. Unlike the online [Predict][google.cloud.automl.v1beta1.PredictionService.Predict], batch + // prediction result won't be immediately available in the response. Instead, + // a long running operation object is returned. User can poll the operation + // result via [GetOperation][google.longrunning.Operations.GetOperation] + // method. Once the operation is done, [BatchPredictResult][google.cloud.automl.v1beta1.BatchPredictResult] is returned in + // the [response][google.longrunning.Operation.response] field. + // Available for following ML problems: + // * Video Classification + // * Text Extraction + // * Tables + BatchPredict(context.Context, *BatchPredictRequest) (*longrunning.Operation, error) +} + +func RegisterPredictionServiceServer(s *grpc.Server, srv PredictionServiceServer) { + s.RegisterService(&_PredictionService_serviceDesc, srv) +} + +func _PredictionService_Predict_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(PredictRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(PredictionServiceServer).Predict(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.automl.v1beta1.PredictionService/Predict", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(PredictionServiceServer).Predict(ctx, req.(*PredictRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _PredictionService_BatchPredict_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(BatchPredictRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(PredictionServiceServer).BatchPredict(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.automl.v1beta1.PredictionService/BatchPredict", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(PredictionServiceServer).BatchPredict(ctx, req.(*BatchPredictRequest)) + } + return interceptor(ctx, in, info, handler) +} + +var _PredictionService_serviceDesc = grpc.ServiceDesc{ + ServiceName: "google.cloud.automl.v1beta1.PredictionService", + HandlerType: (*PredictionServiceServer)(nil), + Methods: []grpc.MethodDesc{ + { + MethodName: "Predict", + Handler: _PredictionService_Predict_Handler, + }, + { + MethodName: "BatchPredict", + Handler: _PredictionService_BatchPredict_Handler, + }, + }, + Streams: []grpc.StreamDesc{}, + Metadata: "google/cloud/automl/v1beta1/prediction_service.proto", +} + +func init() { + proto.RegisterFile("google/cloud/automl/v1beta1/prediction_service.proto", fileDescriptor_prediction_service_4cd94add91ccba6b) +} + +var fileDescriptor_prediction_service_4cd94add91ccba6b = []byte{ + // 634 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xb4, 0x54, 0xcd, 0x6e, 0xd4, 0x3c, + 0x14, 0x95, 0x33, 0xfd, 0xf9, 0xea, 0x69, 0x3f, 0xc0, 0x54, 0x68, 0x94, 0x82, 0xa8, 0x02, 0x8b, + 0x6a, 0x5a, 0xc5, 0xb4, 0x14, 0x15, 0xa6, 0x65, 0x31, 0x53, 0x55, 0x85, 0x45, 0xd5, 0x28, 0xa0, + 0x22, 0x55, 0x95, 0x46, 0x6e, 0xc6, 0x84, 0x80, 0x63, 0x87, 0xc4, 0xa9, 0xa8, 0x10, 0x1b, 0x5e, + 0x81, 0x2d, 0x0f, 0xc0, 0xb3, 0xb0, 0xe5, 0x15, 0xd8, 0xb0, 0x82, 0x1d, 0x5b, 0x14, 0xdb, 0x93, + 0x66, 0x54, 0x14, 0xcd, 0x2c, 0xd8, 0xc5, 0xc9, 0x3d, 0xe7, 0x9e, 0x73, 0xaf, 0x73, 0xe0, 0x66, + 0x28, 0x44, 0xc8, 0x28, 0x0e, 0x98, 0xc8, 0x07, 0x98, 0xe4, 0x52, 0xc4, 0x0c, 0x9f, 0xad, 0x9f, + 0x52, 0x49, 0xd6, 0x71, 0x92, 0xd2, 0x41, 0x14, 0xc8, 0x48, 0xf0, 0x7e, 0x46, 0xd3, 0xb3, 0x28, + 0xa0, 0x6e, 0x92, 0x0a, 0x29, 0xd0, 0x92, 0x46, 0xb9, 0x0a, 0xe5, 0x6a, 0x94, 0x6b, 0x50, 0xf6, + 0x4d, 0x43, 0x49, 0x92, 0x08, 0x13, 0xce, 0x85, 0x24, 0x05, 0x43, 0xa6, 0xa1, 0x76, 0x6d, 0xc3, + 0x8b, 0xf2, 0x7e, 0x42, 0xce, 0x99, 0x20, 0x03, 0x83, 0x5a, 0xab, 0x43, 0x0d, 0x88, 0x24, 0xfd, + 0x48, 0xd2, 0x78, 0xd8, 0xe3, 0x6e, 0x5d, 0x75, 0x24, 0xc6, 0xe1, 0x14, 0x09, 0x4d, 0x47, 0x74, + 0xdf, 0x31, 0xd5, 0x4c, 0xf0, 0x30, 0xcd, 0x39, 0x8f, 0x78, 0x78, 0xa9, 0xc8, 0xf9, 0x0d, 0xe0, + 0xff, 0x9e, 0x1e, 0x9a, 0x4f, 0xdf, 0xe6, 0x34, 0x93, 0x08, 0xc1, 0x29, 0x4e, 0x62, 0xda, 0x02, + 0xcb, 0x60, 0x65, 0xce, 0x57, 0xcf, 0x68, 0x0f, 0xce, 0x1a, 0x7b, 0x2d, 0x6b, 0x19, 0xac, 0x34, + 0x37, 0x56, 0xdd, 0x9a, 0x81, 0xba, 0x7b, 0xef, 0x48, 0x9c, 0x30, 0xea, 0x69, 0x88, 0x3f, 0xc4, + 0xa2, 0x43, 0x38, 0x93, 0x90, 0x94, 0xc4, 0x59, 0xab, 0xb1, 0xdc, 0x58, 0x69, 0x6e, 0x6c, 0xd5, + 0xb2, 0x8c, 0xea, 0x72, 0x3d, 0x85, 0xdc, 0xe3, 0x32, 0x3d, 0xf7, 0x0d, 0x8d, 0xfd, 0x08, 0x36, + 0x2b, 0xaf, 0xd1, 0x55, 0xd8, 0x78, 0x43, 0xcf, 0x8d, 0xf2, 0xe2, 0x11, 0x2d, 0xc2, 0xe9, 0x33, + 0xc2, 0x72, 0xaa, 0x64, 0xcf, 0xf9, 0xfa, 0xd0, 0xb1, 0x1e, 0x02, 0xe7, 0x17, 0x80, 0x57, 0xca, + 0x0e, 0x59, 0x22, 0x78, 0x46, 0xd1, 0x93, 0x0b, 0x9b, 0x40, 0x09, 0x74, 0x6b, 0x05, 0x76, 0xcb, + 0xe5, 0x5f, 0x72, 0x7a, 0x04, 0xff, 0x8b, 0xa9, 0x24, 0xc5, 0xa2, 0x5b, 0x96, 0xa2, 0xea, 0x8c, + 0xe7, 0x55, 0x2b, 0x71, 0x0f, 0x0c, 0x58, 0xdb, 0x2d, 0xb9, 0xec, 0x6d, 0xb8, 0x30, 0xf2, 0x69, + 0x22, 0xcb, 0x3f, 0x2d, 0x78, 0xbd, 0x47, 0x64, 0xf0, 0x6a, 0x8c, 0x8d, 0xbf, 0x80, 0xf3, 0x11, + 0x4f, 0x72, 0xd9, 0x0f, 0x04, 0x7f, 0x19, 0x85, 0xad, 0x86, 0x5a, 0xfb, 0x66, 0xad, 0x89, 0x2a, + 0xf7, 0xd3, 0x02, 0xbc, 0xab, 0xb0, 0x7e, 0x33, 0xba, 0x38, 0xa0, 0x63, 0xb8, 0x20, 0x72, 0x59, + 0x61, 0x9e, 0x52, 0xcc, 0x0f, 0xc6, 0x66, 0x3e, 0x54, 0x68, 0x43, 0x3d, 0x2f, 0x2a, 0x27, 0xf4, + 0xbc, 0xbc, 0x5f, 0xd3, 0x6a, 0xe6, 0x3b, 0x63, 0x93, 0xfe, 0xa3, 0x4b, 0xb6, 0x08, 0xd1, 0x68, + 0x97, 0x2c, 0x67, 0x72, 0xe3, 0x87, 0x05, 0xaf, 0x79, 0x65, 0x52, 0x3d, 0xd3, 0x41, 0x85, 0xbe, + 0x00, 0x38, 0x6b, 0xde, 0xa2, 0xd5, 0x09, 0x7e, 0x0c, 0x7b, 0x6d, 0x92, 0x9b, 0xe5, 0xf4, 0x3e, + 0x7e, 0xfb, 0xfe, 0xc9, 0xda, 0x71, 0xb6, 0xca, 0xe4, 0x78, 0x5f, 0x2c, 0xfc, 0x71, 0x92, 0x8a, + 0xd7, 0x34, 0x90, 0x19, 0x6e, 0x63, 0x26, 0x02, 0x1d, 0x12, 0xb8, 0x8d, 0x63, 0x31, 0xa0, 0x2c, + 0xc3, 0xed, 0x0f, 0x1d, 0x93, 0xad, 0x1d, 0xd0, 0x2e, 0xa4, 0xce, 0x57, 0x7d, 0xa1, 0x7b, 0x93, + 0x0e, 0xda, 0xbe, 0x35, 0x44, 0x54, 0xe2, 0xc9, 0x3d, 0x1c, 0xc6, 0x93, 0xb3, 0xaf, 0x54, 0x76, + 0x9d, 0x9d, 0x49, 0x55, 0x9e, 0x56, 0x7a, 0x75, 0x40, 0xbb, 0xf7, 0x19, 0xc0, 0xdb, 0x81, 0x88, + 0xeb, 0xf4, 0xf5, 0x6e, 0x5c, 0x5a, 0x86, 0x57, 0x84, 0xa3, 0x07, 0x8e, 0xbb, 0x06, 0x16, 0x0a, + 0x46, 0x78, 0xe8, 0x8a, 0x34, 0xc4, 0x21, 0xe5, 0x2a, 0x3a, 0xb1, 0xfe, 0x44, 0x92, 0x28, 0xfb, + 0x6b, 0x20, 0x6f, 0xeb, 0xe3, 0x57, 0x6b, 0x69, 0x5f, 0x15, 0x9e, 0xec, 0x16, 0x45, 0x27, 0xdd, + 0x5c, 0x8a, 0x03, 0x76, 0x72, 0xa4, 0x8b, 0x4e, 0x67, 0x14, 0xd7, 0xfd, 0x3f, 0x01, 0x00, 0x00, + 0xff, 0xff, 0xda, 0xb7, 0xf9, 0x0c, 0xd6, 0x06, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/cloud/automl/v1beta1/ranges.pb.go b/vendor/google.golang.org/genproto/googleapis/cloud/automl/v1beta1/ranges.pb.go new file mode 100644 index 0000000..80a9afc --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/cloud/automl/v1beta1/ranges.pb.go @@ -0,0 +1,95 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/cloud/automl/v1beta1/ranges.proto + +package automl // import "google.golang.org/genproto/googleapis/cloud/automl/v1beta1" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// A range between two double numbers. +type DoubleRange struct { + // Start of the range, inclusive. + Start float64 `protobuf:"fixed64,1,opt,name=start,proto3" json:"start,omitempty"` + // End of the range, exclusive. + End float64 `protobuf:"fixed64,2,opt,name=end,proto3" json:"end,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *DoubleRange) Reset() { *m = DoubleRange{} } +func (m *DoubleRange) String() string { return proto.CompactTextString(m) } +func (*DoubleRange) ProtoMessage() {} +func (*DoubleRange) Descriptor() ([]byte, []int) { + return fileDescriptor_ranges_d6420aeb5e8fc6d2, []int{0} +} +func (m *DoubleRange) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_DoubleRange.Unmarshal(m, b) +} +func (m *DoubleRange) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_DoubleRange.Marshal(b, m, deterministic) +} +func (dst *DoubleRange) XXX_Merge(src proto.Message) { + xxx_messageInfo_DoubleRange.Merge(dst, src) +} +func (m *DoubleRange) XXX_Size() int { + return xxx_messageInfo_DoubleRange.Size(m) +} +func (m *DoubleRange) XXX_DiscardUnknown() { + xxx_messageInfo_DoubleRange.DiscardUnknown(m) +} + +var xxx_messageInfo_DoubleRange proto.InternalMessageInfo + +func (m *DoubleRange) GetStart() float64 { + if m != nil { + return m.Start + } + return 0 +} + +func (m *DoubleRange) GetEnd() float64 { + if m != nil { + return m.End + } + return 0 +} + +func init() { + proto.RegisterType((*DoubleRange)(nil), "google.cloud.automl.v1beta1.DoubleRange") +} + +func init() { + proto.RegisterFile("google/cloud/automl/v1beta1/ranges.proto", fileDescriptor_ranges_d6420aeb5e8fc6d2) +} + +var fileDescriptor_ranges_d6420aeb5e8fc6d2 = []byte{ + // 222 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x7c, 0xcf, 0xbf, 0x4a, 0xc5, 0x30, + 0x14, 0x06, 0x70, 0x72, 0x45, 0x87, 0xdc, 0x45, 0x8a, 0xc3, 0xc5, 0x2b, 0x28, 0x4e, 0x77, 0x4a, + 0x28, 0xe2, 0xe4, 0xd4, 0x2a, 0x38, 0x09, 0xa5, 0x83, 0x83, 0x74, 0x39, 0x6d, 0x43, 0x28, 0xa4, + 0x39, 0x25, 0x39, 0xf1, 0x3d, 0x7c, 0x2d, 0x9f, 0x4a, 0xf2, 0x67, 0x94, 0xbb, 0xe5, 0x24, 0xbf, + 0x7c, 0x5f, 0xc2, 0x4f, 0x1a, 0x51, 0x1b, 0x25, 0x27, 0x83, 0x61, 0x96, 0x10, 0x08, 0x57, 0x23, + 0xbf, 0xeb, 0x51, 0x11, 0xd4, 0xd2, 0x81, 0xd5, 0xca, 0x8b, 0xcd, 0x21, 0x61, 0x75, 0xcc, 0x52, + 0x24, 0x29, 0xb2, 0x14, 0x45, 0xde, 0xde, 0x95, 0x18, 0xd8, 0x16, 0x09, 0xd6, 0x22, 0x01, 0x2d, + 0x68, 0xcb, 0xd5, 0xc7, 0x67, 0xbe, 0x7f, 0xc3, 0x30, 0x1a, 0xd5, 0xc7, 0xc0, 0xea, 0x86, 0x5f, + 0x7a, 0x02, 0x47, 0x07, 0xf6, 0xc0, 0x4e, 0xac, 0xcf, 0x43, 0x75, 0xcd, 0x2f, 0x94, 0x9d, 0x0f, + 0xbb, 0xb4, 0x17, 0x97, 0xed, 0x0f, 0xe3, 0xf7, 0x13, 0xae, 0xe2, 0x4c, 0x71, 0xbb, 0x4f, 0x91, + 0xbe, 0x8b, 0x3d, 0x1d, 0xfb, 0x6a, 0x8a, 0xd5, 0x68, 0xc0, 0x6a, 0x81, 0x4e, 0x4b, 0xad, 0x6c, + 0x7a, 0x85, 0xcc, 0x47, 0xb0, 0x2d, 0xfe, 0xdf, 0xdf, 0xbe, 0xe4, 0xf1, 0x77, 0x77, 0x7c, 0x4f, + 0x70, 0x78, 0x8d, 0x68, 0x68, 0x02, 0xe1, 0x87, 0x19, 0x3e, 0x33, 0x1a, 0xaf, 0x52, 0xd6, 0xd3, + 0x5f, 0x00, 0x00, 0x00, 0xff, 0xff, 0x3b, 0x33, 0xfa, 0xe5, 0x38, 0x01, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/cloud/automl/v1beta1/regression.pb.go b/vendor/google.golang.org/genproto/googleapis/cloud/automl/v1beta1/regression.pb.go new file mode 100644 index 0000000..b4c565d --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/cloud/automl/v1beta1/regression.pb.go @@ -0,0 +1,119 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/cloud/automl/v1beta1/regression.proto + +package automl // import "google.golang.org/genproto/googleapis/cloud/automl/v1beta1" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Metrics for regression problems. +type RegressionEvaluationMetrics struct { + // Output only. Root Mean Squared Error (RMSE). + RootMeanSquaredError float32 `protobuf:"fixed32,1,opt,name=root_mean_squared_error,json=rootMeanSquaredError,proto3" json:"root_mean_squared_error,omitempty"` + // Output only. Mean Absolute Error (MAE). + MeanAbsoluteError float32 `protobuf:"fixed32,2,opt,name=mean_absolute_error,json=meanAbsoluteError,proto3" json:"mean_absolute_error,omitempty"` + // Output only. Mean absolute percentage error. Only set if all ground truth + // values are are positive. + MeanAbsolutePercentageError float32 `protobuf:"fixed32,3,opt,name=mean_absolute_percentage_error,json=meanAbsolutePercentageError,proto3" json:"mean_absolute_percentage_error,omitempty"` + // Output only. R squared. + RSquared float32 `protobuf:"fixed32,4,opt,name=r_squared,json=rSquared,proto3" json:"r_squared,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *RegressionEvaluationMetrics) Reset() { *m = RegressionEvaluationMetrics{} } +func (m *RegressionEvaluationMetrics) String() string { return proto.CompactTextString(m) } +func (*RegressionEvaluationMetrics) ProtoMessage() {} +func (*RegressionEvaluationMetrics) Descriptor() ([]byte, []int) { + return fileDescriptor_regression_ca949e981cd1d01b, []int{0} +} +func (m *RegressionEvaluationMetrics) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_RegressionEvaluationMetrics.Unmarshal(m, b) +} +func (m *RegressionEvaluationMetrics) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_RegressionEvaluationMetrics.Marshal(b, m, deterministic) +} +func (dst *RegressionEvaluationMetrics) XXX_Merge(src proto.Message) { + xxx_messageInfo_RegressionEvaluationMetrics.Merge(dst, src) +} +func (m *RegressionEvaluationMetrics) XXX_Size() int { + return xxx_messageInfo_RegressionEvaluationMetrics.Size(m) +} +func (m *RegressionEvaluationMetrics) XXX_DiscardUnknown() { + xxx_messageInfo_RegressionEvaluationMetrics.DiscardUnknown(m) +} + +var xxx_messageInfo_RegressionEvaluationMetrics proto.InternalMessageInfo + +func (m *RegressionEvaluationMetrics) GetRootMeanSquaredError() float32 { + if m != nil { + return m.RootMeanSquaredError + } + return 0 +} + +func (m *RegressionEvaluationMetrics) GetMeanAbsoluteError() float32 { + if m != nil { + return m.MeanAbsoluteError + } + return 0 +} + +func (m *RegressionEvaluationMetrics) GetMeanAbsolutePercentageError() float32 { + if m != nil { + return m.MeanAbsolutePercentageError + } + return 0 +} + +func (m *RegressionEvaluationMetrics) GetRSquared() float32 { + if m != nil { + return m.RSquared + } + return 0 +} + +func init() { + proto.RegisterType((*RegressionEvaluationMetrics)(nil), "google.cloud.automl.v1beta1.RegressionEvaluationMetrics") +} + +func init() { + proto.RegisterFile("google/cloud/automl/v1beta1/regression.proto", fileDescriptor_regression_ca949e981cd1d01b) +} + +var fileDescriptor_regression_ca949e981cd1d01b = []byte{ + // 300 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x7c, 0x91, 0xc1, 0x4a, 0x33, 0x31, + 0x14, 0x85, 0x99, 0xfe, 0x3f, 0xa2, 0xd9, 0x88, 0x55, 0xb0, 0x38, 0xa2, 0xe2, 0xca, 0x85, 0x24, + 0x14, 0x71, 0xe5, 0xaa, 0x2d, 0xc5, 0x55, 0xa1, 0x54, 0x70, 0x21, 0x85, 0x72, 0x3b, 0xbd, 0x84, + 0x81, 0x34, 0x77, 0xbc, 0x49, 0xfa, 0x22, 0xbe, 0x95, 0x0f, 0xe2, 0x73, 0xc8, 0x24, 0x69, 0x55, + 0x10, 0x97, 0xe1, 0x7c, 0xdf, 0xe1, 0x72, 0x22, 0x6e, 0x35, 0x91, 0x36, 0xa8, 0x2a, 0x43, 0x61, + 0xa5, 0x20, 0x78, 0x5a, 0x1b, 0xb5, 0xe9, 0x2f, 0xd1, 0x43, 0x5f, 0x31, 0x6a, 0x46, 0xe7, 0x6a, + 0xb2, 0xb2, 0x61, 0xf2, 0xd4, 0x2d, 0x13, 0x2d, 0x23, 0x2d, 0x13, 0x2d, 0x33, 0x7d, 0x76, 0x9e, + 0xab, 0xa0, 0xa9, 0x15, 0x58, 0x4b, 0x1e, 0x7c, 0x4d, 0xd6, 0x25, 0xf5, 0xfa, 0xa3, 0x10, 0xe5, + 0x6c, 0xd7, 0x37, 0xde, 0x80, 0x09, 0x31, 0x9f, 0xa0, 0xe7, 0xba, 0x72, 0xdd, 0x7b, 0x71, 0xca, + 0x44, 0x7e, 0xb1, 0x46, 0xb0, 0x0b, 0xf7, 0x1a, 0x80, 0x71, 0xb5, 0x40, 0x66, 0xe2, 0x5e, 0x71, + 0x55, 0xdc, 0x74, 0x66, 0x27, 0x6d, 0x3c, 0x41, 0xb0, 0x4f, 0x29, 0x1c, 0xb7, 0x59, 0x57, 0x8a, + 0xe3, 0x68, 0xc0, 0xd2, 0x91, 0x09, 0x1e, 0xb3, 0xd2, 0x89, 0xca, 0x51, 0x1b, 0x0d, 0x72, 0x92, + 0xf8, 0x91, 0xb8, 0xf8, 0xc9, 0x37, 0xc8, 0x15, 0x5a, 0x0f, 0x7a, 0xab, 0xfe, 0x8b, 0x6a, 0xf9, + 0x5d, 0x9d, 0xee, 0x98, 0x54, 0x52, 0x8a, 0x03, 0xde, 0xde, 0xd8, 0xfb, 0x1f, 0xf9, 0x7d, 0xce, + 0x67, 0x0d, 0xdf, 0x0a, 0x71, 0x59, 0xd1, 0x5a, 0xfe, 0x31, 0xd5, 0xf0, 0xf0, 0x6b, 0x89, 0x69, + 0xbb, 0xce, 0xcb, 0x20, 0xd3, 0x9a, 0x0c, 0x58, 0x2d, 0x89, 0xb5, 0xd2, 0x68, 0xe3, 0x72, 0x2a, + 0x45, 0xd0, 0xd4, 0xee, 0xd7, 0x5f, 0x7a, 0x48, 0xcf, 0xf7, 0x4e, 0xf9, 0x18, 0xc1, 0xf9, 0xa8, + 0x85, 0xe6, 0x83, 0xe0, 0x69, 0x62, 0xe6, 0xcf, 0x09, 0x5a, 0xee, 0xc5, 0xae, 0xbb, 0xcf, 0x00, + 0x00, 0x00, 0xff, 0xff, 0xa6, 0x7b, 0x3d, 0x93, 0xf0, 0x01, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/cloud/automl/v1beta1/service.pb.go b/vendor/google.golang.org/genproto/googleapis/cloud/automl/v1beta1/service.pb.go new file mode 100644 index 0000000..516d7c4 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/cloud/automl/v1beta1/service.pb.go @@ -0,0 +1,2785 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/cloud/automl/v1beta1/service.proto + +package automl // import "google.golang.org/genproto/googleapis/cloud/automl/v1beta1" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "google.golang.org/genproto/googleapis/api/annotations" +import longrunning "google.golang.org/genproto/googleapis/longrunning" +import field_mask "google.golang.org/genproto/protobuf/field_mask" + +import ( + context "golang.org/x/net/context" + grpc "google.golang.org/grpc" +) + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Request message for [AutoMl.CreateDataset][google.cloud.automl.v1beta1.AutoMl.CreateDataset]. +type CreateDatasetRequest struct { + // The resource name of the project to create the dataset for. + Parent string `protobuf:"bytes,1,opt,name=parent,proto3" json:"parent,omitempty"` + // The dataset to create. + Dataset *Dataset `protobuf:"bytes,2,opt,name=dataset,proto3" json:"dataset,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *CreateDatasetRequest) Reset() { *m = CreateDatasetRequest{} } +func (m *CreateDatasetRequest) String() string { return proto.CompactTextString(m) } +func (*CreateDatasetRequest) ProtoMessage() {} +func (*CreateDatasetRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_service_49f84df6902cc1f1, []int{0} +} +func (m *CreateDatasetRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_CreateDatasetRequest.Unmarshal(m, b) +} +func (m *CreateDatasetRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_CreateDatasetRequest.Marshal(b, m, deterministic) +} +func (dst *CreateDatasetRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_CreateDatasetRequest.Merge(dst, src) +} +func (m *CreateDatasetRequest) XXX_Size() int { + return xxx_messageInfo_CreateDatasetRequest.Size(m) +} +func (m *CreateDatasetRequest) XXX_DiscardUnknown() { + xxx_messageInfo_CreateDatasetRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_CreateDatasetRequest proto.InternalMessageInfo + +func (m *CreateDatasetRequest) GetParent() string { + if m != nil { + return m.Parent + } + return "" +} + +func (m *CreateDatasetRequest) GetDataset() *Dataset { + if m != nil { + return m.Dataset + } + return nil +} + +// Request message for [AutoMl.GetDataset][google.cloud.automl.v1beta1.AutoMl.GetDataset]. +type GetDatasetRequest struct { + // The resource name of the dataset to retrieve. + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GetDatasetRequest) Reset() { *m = GetDatasetRequest{} } +func (m *GetDatasetRequest) String() string { return proto.CompactTextString(m) } +func (*GetDatasetRequest) ProtoMessage() {} +func (*GetDatasetRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_service_49f84df6902cc1f1, []int{1} +} +func (m *GetDatasetRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GetDatasetRequest.Unmarshal(m, b) +} +func (m *GetDatasetRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GetDatasetRequest.Marshal(b, m, deterministic) +} +func (dst *GetDatasetRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_GetDatasetRequest.Merge(dst, src) +} +func (m *GetDatasetRequest) XXX_Size() int { + return xxx_messageInfo_GetDatasetRequest.Size(m) +} +func (m *GetDatasetRequest) XXX_DiscardUnknown() { + xxx_messageInfo_GetDatasetRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_GetDatasetRequest proto.InternalMessageInfo + +func (m *GetDatasetRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +// Request message for [AutoMl.ListDatasets][google.cloud.automl.v1beta1.AutoMl.ListDatasets]. +type ListDatasetsRequest struct { + // The resource name of the project from which to list datasets. + Parent string `protobuf:"bytes,1,opt,name=parent,proto3" json:"parent,omitempty"` + // An expression for filtering the results of the request. + // + // * `dataset_metadata` - for existence of the case (e.g. + // image_classification_dataset_metadata:*). Some examples of using the filter are: + // + // * `translation_dataset_metadata:*` --> The dataset has + // translation_dataset_metadata. + Filter string `protobuf:"bytes,3,opt,name=filter,proto3" json:"filter,omitempty"` + // Requested page size. Server may return fewer results than requested. + // If unspecified, server will pick a default size. + PageSize int32 `protobuf:"varint,4,opt,name=page_size,json=pageSize,proto3" json:"page_size,omitempty"` + // A token identifying a page of results for the server to return + // Typically obtained via + // [ListDatasetsResponse.next_page_token][google.cloud.automl.v1beta1.ListDatasetsResponse.next_page_token] of the previous + // [AutoMl.ListDatasets][google.cloud.automl.v1beta1.AutoMl.ListDatasets] call. + PageToken string `protobuf:"bytes,6,opt,name=page_token,json=pageToken,proto3" json:"page_token,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ListDatasetsRequest) Reset() { *m = ListDatasetsRequest{} } +func (m *ListDatasetsRequest) String() string { return proto.CompactTextString(m) } +func (*ListDatasetsRequest) ProtoMessage() {} +func (*ListDatasetsRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_service_49f84df6902cc1f1, []int{2} +} +func (m *ListDatasetsRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ListDatasetsRequest.Unmarshal(m, b) +} +func (m *ListDatasetsRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ListDatasetsRequest.Marshal(b, m, deterministic) +} +func (dst *ListDatasetsRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_ListDatasetsRequest.Merge(dst, src) +} +func (m *ListDatasetsRequest) XXX_Size() int { + return xxx_messageInfo_ListDatasetsRequest.Size(m) +} +func (m *ListDatasetsRequest) XXX_DiscardUnknown() { + xxx_messageInfo_ListDatasetsRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_ListDatasetsRequest proto.InternalMessageInfo + +func (m *ListDatasetsRequest) GetParent() string { + if m != nil { + return m.Parent + } + return "" +} + +func (m *ListDatasetsRequest) GetFilter() string { + if m != nil { + return m.Filter + } + return "" +} + +func (m *ListDatasetsRequest) GetPageSize() int32 { + if m != nil { + return m.PageSize + } + return 0 +} + +func (m *ListDatasetsRequest) GetPageToken() string { + if m != nil { + return m.PageToken + } + return "" +} + +// Response message for [AutoMl.ListDatasets][google.cloud.automl.v1beta1.AutoMl.ListDatasets]. +type ListDatasetsResponse struct { + // The datasets read. + Datasets []*Dataset `protobuf:"bytes,1,rep,name=datasets,proto3" json:"datasets,omitempty"` + // A token to retrieve next page of results. + // Pass to [ListDatasetsRequest.page_token][google.cloud.automl.v1beta1.ListDatasetsRequest.page_token] to obtain that page. + NextPageToken string `protobuf:"bytes,2,opt,name=next_page_token,json=nextPageToken,proto3" json:"next_page_token,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ListDatasetsResponse) Reset() { *m = ListDatasetsResponse{} } +func (m *ListDatasetsResponse) String() string { return proto.CompactTextString(m) } +func (*ListDatasetsResponse) ProtoMessage() {} +func (*ListDatasetsResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_service_49f84df6902cc1f1, []int{3} +} +func (m *ListDatasetsResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ListDatasetsResponse.Unmarshal(m, b) +} +func (m *ListDatasetsResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ListDatasetsResponse.Marshal(b, m, deterministic) +} +func (dst *ListDatasetsResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_ListDatasetsResponse.Merge(dst, src) +} +func (m *ListDatasetsResponse) XXX_Size() int { + return xxx_messageInfo_ListDatasetsResponse.Size(m) +} +func (m *ListDatasetsResponse) XXX_DiscardUnknown() { + xxx_messageInfo_ListDatasetsResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_ListDatasetsResponse proto.InternalMessageInfo + +func (m *ListDatasetsResponse) GetDatasets() []*Dataset { + if m != nil { + return m.Datasets + } + return nil +} + +func (m *ListDatasetsResponse) GetNextPageToken() string { + if m != nil { + return m.NextPageToken + } + return "" +} + +// Request message for [AutoMl.UpdateDataset][google.cloud.automl.v1beta1.AutoMl.UpdateDataset] +type UpdateDatasetRequest struct { + // The dataset which replaces the resource on the server. + Dataset *Dataset `protobuf:"bytes,1,opt,name=dataset,proto3" json:"dataset,omitempty"` + // The update mask applies to the resource. For the `FieldMask` definition, + // see + // + // https: + // //developers.google.com/protocol-buffers + // // /docs/reference/google.protobuf#fieldmask + UpdateMask *field_mask.FieldMask `protobuf:"bytes,2,opt,name=update_mask,json=updateMask,proto3" json:"update_mask,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *UpdateDatasetRequest) Reset() { *m = UpdateDatasetRequest{} } +func (m *UpdateDatasetRequest) String() string { return proto.CompactTextString(m) } +func (*UpdateDatasetRequest) ProtoMessage() {} +func (*UpdateDatasetRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_service_49f84df6902cc1f1, []int{4} +} +func (m *UpdateDatasetRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_UpdateDatasetRequest.Unmarshal(m, b) +} +func (m *UpdateDatasetRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_UpdateDatasetRequest.Marshal(b, m, deterministic) +} +func (dst *UpdateDatasetRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_UpdateDatasetRequest.Merge(dst, src) +} +func (m *UpdateDatasetRequest) XXX_Size() int { + return xxx_messageInfo_UpdateDatasetRequest.Size(m) +} +func (m *UpdateDatasetRequest) XXX_DiscardUnknown() { + xxx_messageInfo_UpdateDatasetRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_UpdateDatasetRequest proto.InternalMessageInfo + +func (m *UpdateDatasetRequest) GetDataset() *Dataset { + if m != nil { + return m.Dataset + } + return nil +} + +func (m *UpdateDatasetRequest) GetUpdateMask() *field_mask.FieldMask { + if m != nil { + return m.UpdateMask + } + return nil +} + +// Request message for [AutoMl.DeleteDataset][google.cloud.automl.v1beta1.AutoMl.DeleteDataset]. +type DeleteDatasetRequest struct { + // The resource name of the dataset to delete. + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *DeleteDatasetRequest) Reset() { *m = DeleteDatasetRequest{} } +func (m *DeleteDatasetRequest) String() string { return proto.CompactTextString(m) } +func (*DeleteDatasetRequest) ProtoMessage() {} +func (*DeleteDatasetRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_service_49f84df6902cc1f1, []int{5} +} +func (m *DeleteDatasetRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_DeleteDatasetRequest.Unmarshal(m, b) +} +func (m *DeleteDatasetRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_DeleteDatasetRequest.Marshal(b, m, deterministic) +} +func (dst *DeleteDatasetRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_DeleteDatasetRequest.Merge(dst, src) +} +func (m *DeleteDatasetRequest) XXX_Size() int { + return xxx_messageInfo_DeleteDatasetRequest.Size(m) +} +func (m *DeleteDatasetRequest) XXX_DiscardUnknown() { + xxx_messageInfo_DeleteDatasetRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_DeleteDatasetRequest proto.InternalMessageInfo + +func (m *DeleteDatasetRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +// Request message for [AutoMl.ImportData][google.cloud.automl.v1beta1.AutoMl.ImportData]. +type ImportDataRequest struct { + // Required. Dataset name. Dataset must already exist. All imported + // annotations and examples will be added. + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // Required. The desired input location and its domain specific semantics, + // if any. + InputConfig *InputConfig `protobuf:"bytes,3,opt,name=input_config,json=inputConfig,proto3" json:"input_config,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ImportDataRequest) Reset() { *m = ImportDataRequest{} } +func (m *ImportDataRequest) String() string { return proto.CompactTextString(m) } +func (*ImportDataRequest) ProtoMessage() {} +func (*ImportDataRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_service_49f84df6902cc1f1, []int{6} +} +func (m *ImportDataRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ImportDataRequest.Unmarshal(m, b) +} +func (m *ImportDataRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ImportDataRequest.Marshal(b, m, deterministic) +} +func (dst *ImportDataRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_ImportDataRequest.Merge(dst, src) +} +func (m *ImportDataRequest) XXX_Size() int { + return xxx_messageInfo_ImportDataRequest.Size(m) +} +func (m *ImportDataRequest) XXX_DiscardUnknown() { + xxx_messageInfo_ImportDataRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_ImportDataRequest proto.InternalMessageInfo + +func (m *ImportDataRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *ImportDataRequest) GetInputConfig() *InputConfig { + if m != nil { + return m.InputConfig + } + return nil +} + +// Request message for [AutoMl.ExportData][google.cloud.automl.v1beta1.AutoMl.ExportData]. +type ExportDataRequest struct { + // Required. The resource name of the dataset. + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // Required. The desired output location. + OutputConfig *OutputConfig `protobuf:"bytes,3,opt,name=output_config,json=outputConfig,proto3" json:"output_config,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ExportDataRequest) Reset() { *m = ExportDataRequest{} } +func (m *ExportDataRequest) String() string { return proto.CompactTextString(m) } +func (*ExportDataRequest) ProtoMessage() {} +func (*ExportDataRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_service_49f84df6902cc1f1, []int{7} +} +func (m *ExportDataRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ExportDataRequest.Unmarshal(m, b) +} +func (m *ExportDataRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ExportDataRequest.Marshal(b, m, deterministic) +} +func (dst *ExportDataRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_ExportDataRequest.Merge(dst, src) +} +func (m *ExportDataRequest) XXX_Size() int { + return xxx_messageInfo_ExportDataRequest.Size(m) +} +func (m *ExportDataRequest) XXX_DiscardUnknown() { + xxx_messageInfo_ExportDataRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_ExportDataRequest proto.InternalMessageInfo + +func (m *ExportDataRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *ExportDataRequest) GetOutputConfig() *OutputConfig { + if m != nil { + return m.OutputConfig + } + return nil +} + +// Request message for [AutoMl.GetAnnotationSpec][google.cloud.automl.v1beta1.AutoMl.GetAnnotationSpec]. +type GetAnnotationSpecRequest struct { + // The resource name of the annotation spec to retrieve. + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GetAnnotationSpecRequest) Reset() { *m = GetAnnotationSpecRequest{} } +func (m *GetAnnotationSpecRequest) String() string { return proto.CompactTextString(m) } +func (*GetAnnotationSpecRequest) ProtoMessage() {} +func (*GetAnnotationSpecRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_service_49f84df6902cc1f1, []int{8} +} +func (m *GetAnnotationSpecRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GetAnnotationSpecRequest.Unmarshal(m, b) +} +func (m *GetAnnotationSpecRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GetAnnotationSpecRequest.Marshal(b, m, deterministic) +} +func (dst *GetAnnotationSpecRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_GetAnnotationSpecRequest.Merge(dst, src) +} +func (m *GetAnnotationSpecRequest) XXX_Size() int { + return xxx_messageInfo_GetAnnotationSpecRequest.Size(m) +} +func (m *GetAnnotationSpecRequest) XXX_DiscardUnknown() { + xxx_messageInfo_GetAnnotationSpecRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_GetAnnotationSpecRequest proto.InternalMessageInfo + +func (m *GetAnnotationSpecRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +// Request message for [AutoMl.GetTableSpec][google.cloud.automl.v1beta1.AutoMl.GetTableSpec]. +type GetTableSpecRequest struct { + // The resource name of the table spec to retrieve. + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // Mask specifying which fields to read. + FieldMask *field_mask.FieldMask `protobuf:"bytes,2,opt,name=field_mask,json=fieldMask,proto3" json:"field_mask,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GetTableSpecRequest) Reset() { *m = GetTableSpecRequest{} } +func (m *GetTableSpecRequest) String() string { return proto.CompactTextString(m) } +func (*GetTableSpecRequest) ProtoMessage() {} +func (*GetTableSpecRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_service_49f84df6902cc1f1, []int{9} +} +func (m *GetTableSpecRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GetTableSpecRequest.Unmarshal(m, b) +} +func (m *GetTableSpecRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GetTableSpecRequest.Marshal(b, m, deterministic) +} +func (dst *GetTableSpecRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_GetTableSpecRequest.Merge(dst, src) +} +func (m *GetTableSpecRequest) XXX_Size() int { + return xxx_messageInfo_GetTableSpecRequest.Size(m) +} +func (m *GetTableSpecRequest) XXX_DiscardUnknown() { + xxx_messageInfo_GetTableSpecRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_GetTableSpecRequest proto.InternalMessageInfo + +func (m *GetTableSpecRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *GetTableSpecRequest) GetFieldMask() *field_mask.FieldMask { + if m != nil { + return m.FieldMask + } + return nil +} + +// Request message for [AutoMl.ListTableSpecs][google.cloud.automl.v1beta1.AutoMl.ListTableSpecs]. +type ListTableSpecsRequest struct { + // The resource name of the dataset to list table specs from. + Parent string `protobuf:"bytes,1,opt,name=parent,proto3" json:"parent,omitempty"` + // Mask specifying which fields to read. + FieldMask *field_mask.FieldMask `protobuf:"bytes,2,opt,name=field_mask,json=fieldMask,proto3" json:"field_mask,omitempty"` + // Filter expression, see go/filtering. + Filter string `protobuf:"bytes,3,opt,name=filter,proto3" json:"filter,omitempty"` + // Requested page size. The server can return fewer results than requested. + // If unspecified, the server will pick a default size. + PageSize int32 `protobuf:"varint,4,opt,name=page_size,json=pageSize,proto3" json:"page_size,omitempty"` + // A token identifying a page of results for the server to return. + // Typically obtained from the + // [ListTableSpecsResponse.next_page_token][google.cloud.automl.v1beta1.ListTableSpecsResponse.next_page_token] field of the previous + // [AutoMl.ListTableSpecs][google.cloud.automl.v1beta1.AutoMl.ListTableSpecs] call. + PageToken string `protobuf:"bytes,6,opt,name=page_token,json=pageToken,proto3" json:"page_token,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ListTableSpecsRequest) Reset() { *m = ListTableSpecsRequest{} } +func (m *ListTableSpecsRequest) String() string { return proto.CompactTextString(m) } +func (*ListTableSpecsRequest) ProtoMessage() {} +func (*ListTableSpecsRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_service_49f84df6902cc1f1, []int{10} +} +func (m *ListTableSpecsRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ListTableSpecsRequest.Unmarshal(m, b) +} +func (m *ListTableSpecsRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ListTableSpecsRequest.Marshal(b, m, deterministic) +} +func (dst *ListTableSpecsRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_ListTableSpecsRequest.Merge(dst, src) +} +func (m *ListTableSpecsRequest) XXX_Size() int { + return xxx_messageInfo_ListTableSpecsRequest.Size(m) +} +func (m *ListTableSpecsRequest) XXX_DiscardUnknown() { + xxx_messageInfo_ListTableSpecsRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_ListTableSpecsRequest proto.InternalMessageInfo + +func (m *ListTableSpecsRequest) GetParent() string { + if m != nil { + return m.Parent + } + return "" +} + +func (m *ListTableSpecsRequest) GetFieldMask() *field_mask.FieldMask { + if m != nil { + return m.FieldMask + } + return nil +} + +func (m *ListTableSpecsRequest) GetFilter() string { + if m != nil { + return m.Filter + } + return "" +} + +func (m *ListTableSpecsRequest) GetPageSize() int32 { + if m != nil { + return m.PageSize + } + return 0 +} + +func (m *ListTableSpecsRequest) GetPageToken() string { + if m != nil { + return m.PageToken + } + return "" +} + +// Response message for [AutoMl.ListTableSpecs][google.cloud.automl.v1beta1.AutoMl.ListTableSpecs]. +type ListTableSpecsResponse struct { + // The table specs read. + TableSpecs []*TableSpec `protobuf:"bytes,1,rep,name=table_specs,json=tableSpecs,proto3" json:"table_specs,omitempty"` + // A token to retrieve next page of results. + // Pass to [ListTableSpecsRequest.page_token][google.cloud.automl.v1beta1.ListTableSpecsRequest.page_token] to obtain that page. + NextPageToken string `protobuf:"bytes,2,opt,name=next_page_token,json=nextPageToken,proto3" json:"next_page_token,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ListTableSpecsResponse) Reset() { *m = ListTableSpecsResponse{} } +func (m *ListTableSpecsResponse) String() string { return proto.CompactTextString(m) } +func (*ListTableSpecsResponse) ProtoMessage() {} +func (*ListTableSpecsResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_service_49f84df6902cc1f1, []int{11} +} +func (m *ListTableSpecsResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ListTableSpecsResponse.Unmarshal(m, b) +} +func (m *ListTableSpecsResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ListTableSpecsResponse.Marshal(b, m, deterministic) +} +func (dst *ListTableSpecsResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_ListTableSpecsResponse.Merge(dst, src) +} +func (m *ListTableSpecsResponse) XXX_Size() int { + return xxx_messageInfo_ListTableSpecsResponse.Size(m) +} +func (m *ListTableSpecsResponse) XXX_DiscardUnknown() { + xxx_messageInfo_ListTableSpecsResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_ListTableSpecsResponse proto.InternalMessageInfo + +func (m *ListTableSpecsResponse) GetTableSpecs() []*TableSpec { + if m != nil { + return m.TableSpecs + } + return nil +} + +func (m *ListTableSpecsResponse) GetNextPageToken() string { + if m != nil { + return m.NextPageToken + } + return "" +} + +// Request message for [AutoMl.UpdateTableSpec][google.cloud.automl.v1beta1.AutoMl.UpdateTableSpec] +type UpdateTableSpecRequest struct { + // The table spec which replaces the resource on the server. + TableSpec *TableSpec `protobuf:"bytes,1,opt,name=table_spec,json=tableSpec,proto3" json:"table_spec,omitempty"` + // The update mask applies to the resource. For the `FieldMask` definition, + // see + // + // https: + // //developers.google.com/protocol-buffers + // // /docs/reference/google.protobuf#fieldmask + UpdateMask *field_mask.FieldMask `protobuf:"bytes,2,opt,name=update_mask,json=updateMask,proto3" json:"update_mask,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *UpdateTableSpecRequest) Reset() { *m = UpdateTableSpecRequest{} } +func (m *UpdateTableSpecRequest) String() string { return proto.CompactTextString(m) } +func (*UpdateTableSpecRequest) ProtoMessage() {} +func (*UpdateTableSpecRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_service_49f84df6902cc1f1, []int{12} +} +func (m *UpdateTableSpecRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_UpdateTableSpecRequest.Unmarshal(m, b) +} +func (m *UpdateTableSpecRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_UpdateTableSpecRequest.Marshal(b, m, deterministic) +} +func (dst *UpdateTableSpecRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_UpdateTableSpecRequest.Merge(dst, src) +} +func (m *UpdateTableSpecRequest) XXX_Size() int { + return xxx_messageInfo_UpdateTableSpecRequest.Size(m) +} +func (m *UpdateTableSpecRequest) XXX_DiscardUnknown() { + xxx_messageInfo_UpdateTableSpecRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_UpdateTableSpecRequest proto.InternalMessageInfo + +func (m *UpdateTableSpecRequest) GetTableSpec() *TableSpec { + if m != nil { + return m.TableSpec + } + return nil +} + +func (m *UpdateTableSpecRequest) GetUpdateMask() *field_mask.FieldMask { + if m != nil { + return m.UpdateMask + } + return nil +} + +// Request message for [AutoMl.GetColumnSpec][google.cloud.automl.v1beta1.AutoMl.GetColumnSpec]. +type GetColumnSpecRequest struct { + // The resource name of the column spec to retrieve. + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // Mask specifying which fields to read. + FieldMask *field_mask.FieldMask `protobuf:"bytes,2,opt,name=field_mask,json=fieldMask,proto3" json:"field_mask,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GetColumnSpecRequest) Reset() { *m = GetColumnSpecRequest{} } +func (m *GetColumnSpecRequest) String() string { return proto.CompactTextString(m) } +func (*GetColumnSpecRequest) ProtoMessage() {} +func (*GetColumnSpecRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_service_49f84df6902cc1f1, []int{13} +} +func (m *GetColumnSpecRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GetColumnSpecRequest.Unmarshal(m, b) +} +func (m *GetColumnSpecRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GetColumnSpecRequest.Marshal(b, m, deterministic) +} +func (dst *GetColumnSpecRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_GetColumnSpecRequest.Merge(dst, src) +} +func (m *GetColumnSpecRequest) XXX_Size() int { + return xxx_messageInfo_GetColumnSpecRequest.Size(m) +} +func (m *GetColumnSpecRequest) XXX_DiscardUnknown() { + xxx_messageInfo_GetColumnSpecRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_GetColumnSpecRequest proto.InternalMessageInfo + +func (m *GetColumnSpecRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *GetColumnSpecRequest) GetFieldMask() *field_mask.FieldMask { + if m != nil { + return m.FieldMask + } + return nil +} + +// Request message for [AutoMl.ListColumnSpecs][google.cloud.automl.v1beta1.AutoMl.ListColumnSpecs]. +type ListColumnSpecsRequest struct { + // The resource name of the table spec to list column specs from. + Parent string `protobuf:"bytes,1,opt,name=parent,proto3" json:"parent,omitempty"` + // Mask specifying which fields to read. + FieldMask *field_mask.FieldMask `protobuf:"bytes,2,opt,name=field_mask,json=fieldMask,proto3" json:"field_mask,omitempty"` + // Filter expression, see go/filtering. + Filter string `protobuf:"bytes,3,opt,name=filter,proto3" json:"filter,omitempty"` + // Requested page size. The server can return fewer results than requested. + // If unspecified, the server will pick a default size. + PageSize int32 `protobuf:"varint,4,opt,name=page_size,json=pageSize,proto3" json:"page_size,omitempty"` + // A token identifying a page of results for the server to return. + // Typically obtained from the + // [ListColumnSpecsResponse.next_page_token][google.cloud.automl.v1beta1.ListColumnSpecsResponse.next_page_token] field of the previous + // [AutoMl.ListColumnSpecs][google.cloud.automl.v1beta1.AutoMl.ListColumnSpecs] call. + PageToken string `protobuf:"bytes,6,opt,name=page_token,json=pageToken,proto3" json:"page_token,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ListColumnSpecsRequest) Reset() { *m = ListColumnSpecsRequest{} } +func (m *ListColumnSpecsRequest) String() string { return proto.CompactTextString(m) } +func (*ListColumnSpecsRequest) ProtoMessage() {} +func (*ListColumnSpecsRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_service_49f84df6902cc1f1, []int{14} +} +func (m *ListColumnSpecsRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ListColumnSpecsRequest.Unmarshal(m, b) +} +func (m *ListColumnSpecsRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ListColumnSpecsRequest.Marshal(b, m, deterministic) +} +func (dst *ListColumnSpecsRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_ListColumnSpecsRequest.Merge(dst, src) +} +func (m *ListColumnSpecsRequest) XXX_Size() int { + return xxx_messageInfo_ListColumnSpecsRequest.Size(m) +} +func (m *ListColumnSpecsRequest) XXX_DiscardUnknown() { + xxx_messageInfo_ListColumnSpecsRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_ListColumnSpecsRequest proto.InternalMessageInfo + +func (m *ListColumnSpecsRequest) GetParent() string { + if m != nil { + return m.Parent + } + return "" +} + +func (m *ListColumnSpecsRequest) GetFieldMask() *field_mask.FieldMask { + if m != nil { + return m.FieldMask + } + return nil +} + +func (m *ListColumnSpecsRequest) GetFilter() string { + if m != nil { + return m.Filter + } + return "" +} + +func (m *ListColumnSpecsRequest) GetPageSize() int32 { + if m != nil { + return m.PageSize + } + return 0 +} + +func (m *ListColumnSpecsRequest) GetPageToken() string { + if m != nil { + return m.PageToken + } + return "" +} + +// Response message for [AutoMl.ListColumnSpecs][google.cloud.automl.v1beta1.AutoMl.ListColumnSpecs]. +type ListColumnSpecsResponse struct { + // The column specs read. + ColumnSpecs []*ColumnSpec `protobuf:"bytes,1,rep,name=column_specs,json=columnSpecs,proto3" json:"column_specs,omitempty"` + // A token to retrieve next page of results. + // Pass to [ListColumnSpecsRequest.page_token][google.cloud.automl.v1beta1.ListColumnSpecsRequest.page_token] to obtain that page. + NextPageToken string `protobuf:"bytes,2,opt,name=next_page_token,json=nextPageToken,proto3" json:"next_page_token,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ListColumnSpecsResponse) Reset() { *m = ListColumnSpecsResponse{} } +func (m *ListColumnSpecsResponse) String() string { return proto.CompactTextString(m) } +func (*ListColumnSpecsResponse) ProtoMessage() {} +func (*ListColumnSpecsResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_service_49f84df6902cc1f1, []int{15} +} +func (m *ListColumnSpecsResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ListColumnSpecsResponse.Unmarshal(m, b) +} +func (m *ListColumnSpecsResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ListColumnSpecsResponse.Marshal(b, m, deterministic) +} +func (dst *ListColumnSpecsResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_ListColumnSpecsResponse.Merge(dst, src) +} +func (m *ListColumnSpecsResponse) XXX_Size() int { + return xxx_messageInfo_ListColumnSpecsResponse.Size(m) +} +func (m *ListColumnSpecsResponse) XXX_DiscardUnknown() { + xxx_messageInfo_ListColumnSpecsResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_ListColumnSpecsResponse proto.InternalMessageInfo + +func (m *ListColumnSpecsResponse) GetColumnSpecs() []*ColumnSpec { + if m != nil { + return m.ColumnSpecs + } + return nil +} + +func (m *ListColumnSpecsResponse) GetNextPageToken() string { + if m != nil { + return m.NextPageToken + } + return "" +} + +// Request message for [AutoMl.UpdateColumnSpec][google.cloud.automl.v1beta1.AutoMl.UpdateColumnSpec] +type UpdateColumnSpecRequest struct { + // The column spec which replaces the resource on the server. + ColumnSpec *ColumnSpec `protobuf:"bytes,1,opt,name=column_spec,json=columnSpec,proto3" json:"column_spec,omitempty"` + // The update mask applies to the resource. For the `FieldMask` definition, + // see + // + // https: + // //developers.google.com/protocol-buffers + // // /docs/reference/google.protobuf#fieldmask + UpdateMask *field_mask.FieldMask `protobuf:"bytes,2,opt,name=update_mask,json=updateMask,proto3" json:"update_mask,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *UpdateColumnSpecRequest) Reset() { *m = UpdateColumnSpecRequest{} } +func (m *UpdateColumnSpecRequest) String() string { return proto.CompactTextString(m) } +func (*UpdateColumnSpecRequest) ProtoMessage() {} +func (*UpdateColumnSpecRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_service_49f84df6902cc1f1, []int{16} +} +func (m *UpdateColumnSpecRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_UpdateColumnSpecRequest.Unmarshal(m, b) +} +func (m *UpdateColumnSpecRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_UpdateColumnSpecRequest.Marshal(b, m, deterministic) +} +func (dst *UpdateColumnSpecRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_UpdateColumnSpecRequest.Merge(dst, src) +} +func (m *UpdateColumnSpecRequest) XXX_Size() int { + return xxx_messageInfo_UpdateColumnSpecRequest.Size(m) +} +func (m *UpdateColumnSpecRequest) XXX_DiscardUnknown() { + xxx_messageInfo_UpdateColumnSpecRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_UpdateColumnSpecRequest proto.InternalMessageInfo + +func (m *UpdateColumnSpecRequest) GetColumnSpec() *ColumnSpec { + if m != nil { + return m.ColumnSpec + } + return nil +} + +func (m *UpdateColumnSpecRequest) GetUpdateMask() *field_mask.FieldMask { + if m != nil { + return m.UpdateMask + } + return nil +} + +// Request message for [AutoMl.CreateModel][google.cloud.automl.v1beta1.AutoMl.CreateModel]. +type CreateModelRequest struct { + // Resource name of the parent project where the model is being created. + Parent string `protobuf:"bytes,1,opt,name=parent,proto3" json:"parent,omitempty"` + // The model to create. + Model *Model `protobuf:"bytes,4,opt,name=model,proto3" json:"model,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *CreateModelRequest) Reset() { *m = CreateModelRequest{} } +func (m *CreateModelRequest) String() string { return proto.CompactTextString(m) } +func (*CreateModelRequest) ProtoMessage() {} +func (*CreateModelRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_service_49f84df6902cc1f1, []int{17} +} +func (m *CreateModelRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_CreateModelRequest.Unmarshal(m, b) +} +func (m *CreateModelRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_CreateModelRequest.Marshal(b, m, deterministic) +} +func (dst *CreateModelRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_CreateModelRequest.Merge(dst, src) +} +func (m *CreateModelRequest) XXX_Size() int { + return xxx_messageInfo_CreateModelRequest.Size(m) +} +func (m *CreateModelRequest) XXX_DiscardUnknown() { + xxx_messageInfo_CreateModelRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_CreateModelRequest proto.InternalMessageInfo + +func (m *CreateModelRequest) GetParent() string { + if m != nil { + return m.Parent + } + return "" +} + +func (m *CreateModelRequest) GetModel() *Model { + if m != nil { + return m.Model + } + return nil +} + +// Request message for [AutoMl.GetModel][google.cloud.automl.v1beta1.AutoMl.GetModel]. +type GetModelRequest struct { + // Resource name of the model. + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GetModelRequest) Reset() { *m = GetModelRequest{} } +func (m *GetModelRequest) String() string { return proto.CompactTextString(m) } +func (*GetModelRequest) ProtoMessage() {} +func (*GetModelRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_service_49f84df6902cc1f1, []int{18} +} +func (m *GetModelRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GetModelRequest.Unmarshal(m, b) +} +func (m *GetModelRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GetModelRequest.Marshal(b, m, deterministic) +} +func (dst *GetModelRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_GetModelRequest.Merge(dst, src) +} +func (m *GetModelRequest) XXX_Size() int { + return xxx_messageInfo_GetModelRequest.Size(m) +} +func (m *GetModelRequest) XXX_DiscardUnknown() { + xxx_messageInfo_GetModelRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_GetModelRequest proto.InternalMessageInfo + +func (m *GetModelRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +// Request message for [AutoMl.ListModels][google.cloud.automl.v1beta1.AutoMl.ListModels]. +type ListModelsRequest struct { + // Resource name of the project, from which to list the models. + Parent string `protobuf:"bytes,1,opt,name=parent,proto3" json:"parent,omitempty"` + // An expression for filtering the results of the request. + // + // * `model_metadata` - for existence of the case (e.g. + // video_classification_model_metadata:*). + // * `dataset_id` - for = or !=. Some examples of using the filter are: + // + // * `image_classification_model_metadata:*` --> The model has + // image_classification_model_metadata. + // * `dataset_id=5` --> The model was created from a dataset with ID 5. + Filter string `protobuf:"bytes,3,opt,name=filter,proto3" json:"filter,omitempty"` + // Requested page size. + PageSize int32 `protobuf:"varint,4,opt,name=page_size,json=pageSize,proto3" json:"page_size,omitempty"` + // A token identifying a page of results for the server to return + // Typically obtained via + // [ListModelsResponse.next_page_token][google.cloud.automl.v1beta1.ListModelsResponse.next_page_token] of the previous + // [AutoMl.ListModels][google.cloud.automl.v1beta1.AutoMl.ListModels] call. + PageToken string `protobuf:"bytes,6,opt,name=page_token,json=pageToken,proto3" json:"page_token,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ListModelsRequest) Reset() { *m = ListModelsRequest{} } +func (m *ListModelsRequest) String() string { return proto.CompactTextString(m) } +func (*ListModelsRequest) ProtoMessage() {} +func (*ListModelsRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_service_49f84df6902cc1f1, []int{19} +} +func (m *ListModelsRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ListModelsRequest.Unmarshal(m, b) +} +func (m *ListModelsRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ListModelsRequest.Marshal(b, m, deterministic) +} +func (dst *ListModelsRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_ListModelsRequest.Merge(dst, src) +} +func (m *ListModelsRequest) XXX_Size() int { + return xxx_messageInfo_ListModelsRequest.Size(m) +} +func (m *ListModelsRequest) XXX_DiscardUnknown() { + xxx_messageInfo_ListModelsRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_ListModelsRequest proto.InternalMessageInfo + +func (m *ListModelsRequest) GetParent() string { + if m != nil { + return m.Parent + } + return "" +} + +func (m *ListModelsRequest) GetFilter() string { + if m != nil { + return m.Filter + } + return "" +} + +func (m *ListModelsRequest) GetPageSize() int32 { + if m != nil { + return m.PageSize + } + return 0 +} + +func (m *ListModelsRequest) GetPageToken() string { + if m != nil { + return m.PageToken + } + return "" +} + +// Response message for [AutoMl.ListModels][google.cloud.automl.v1beta1.AutoMl.ListModels]. +type ListModelsResponse struct { + // List of models in the requested page. + Model []*Model `protobuf:"bytes,1,rep,name=model,proto3" json:"model,omitempty"` + // A token to retrieve next page of results. + // Pass to [ListModelsRequest.page_token][google.cloud.automl.v1beta1.ListModelsRequest.page_token] to obtain that page. + NextPageToken string `protobuf:"bytes,2,opt,name=next_page_token,json=nextPageToken,proto3" json:"next_page_token,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ListModelsResponse) Reset() { *m = ListModelsResponse{} } +func (m *ListModelsResponse) String() string { return proto.CompactTextString(m) } +func (*ListModelsResponse) ProtoMessage() {} +func (*ListModelsResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_service_49f84df6902cc1f1, []int{20} +} +func (m *ListModelsResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ListModelsResponse.Unmarshal(m, b) +} +func (m *ListModelsResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ListModelsResponse.Marshal(b, m, deterministic) +} +func (dst *ListModelsResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_ListModelsResponse.Merge(dst, src) +} +func (m *ListModelsResponse) XXX_Size() int { + return xxx_messageInfo_ListModelsResponse.Size(m) +} +func (m *ListModelsResponse) XXX_DiscardUnknown() { + xxx_messageInfo_ListModelsResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_ListModelsResponse proto.InternalMessageInfo + +func (m *ListModelsResponse) GetModel() []*Model { + if m != nil { + return m.Model + } + return nil +} + +func (m *ListModelsResponse) GetNextPageToken() string { + if m != nil { + return m.NextPageToken + } + return "" +} + +// Request message for [AutoMl.DeleteModel][google.cloud.automl.v1beta1.AutoMl.DeleteModel]. +type DeleteModelRequest struct { + // Resource name of the model being deleted. + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *DeleteModelRequest) Reset() { *m = DeleteModelRequest{} } +func (m *DeleteModelRequest) String() string { return proto.CompactTextString(m) } +func (*DeleteModelRequest) ProtoMessage() {} +func (*DeleteModelRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_service_49f84df6902cc1f1, []int{21} +} +func (m *DeleteModelRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_DeleteModelRequest.Unmarshal(m, b) +} +func (m *DeleteModelRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_DeleteModelRequest.Marshal(b, m, deterministic) +} +func (dst *DeleteModelRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_DeleteModelRequest.Merge(dst, src) +} +func (m *DeleteModelRequest) XXX_Size() int { + return xxx_messageInfo_DeleteModelRequest.Size(m) +} +func (m *DeleteModelRequest) XXX_DiscardUnknown() { + xxx_messageInfo_DeleteModelRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_DeleteModelRequest proto.InternalMessageInfo + +func (m *DeleteModelRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +// Request message for [AutoMl.DeployModel][google.cloud.automl.v1beta1.AutoMl.DeployModel]. +type DeployModelRequest struct { + // The per-domain specific deployment parameters. + // + // Types that are valid to be assigned to ModelDeploymentMetadata: + // *DeployModelRequest_ImageObjectDetectionModelDeploymentMetadata + ModelDeploymentMetadata isDeployModelRequest_ModelDeploymentMetadata `protobuf_oneof:"model_deployment_metadata"` + // Resource name of the model to deploy. + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *DeployModelRequest) Reset() { *m = DeployModelRequest{} } +func (m *DeployModelRequest) String() string { return proto.CompactTextString(m) } +func (*DeployModelRequest) ProtoMessage() {} +func (*DeployModelRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_service_49f84df6902cc1f1, []int{22} +} +func (m *DeployModelRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_DeployModelRequest.Unmarshal(m, b) +} +func (m *DeployModelRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_DeployModelRequest.Marshal(b, m, deterministic) +} +func (dst *DeployModelRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_DeployModelRequest.Merge(dst, src) +} +func (m *DeployModelRequest) XXX_Size() int { + return xxx_messageInfo_DeployModelRequest.Size(m) +} +func (m *DeployModelRequest) XXX_DiscardUnknown() { + xxx_messageInfo_DeployModelRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_DeployModelRequest proto.InternalMessageInfo + +type isDeployModelRequest_ModelDeploymentMetadata interface { + isDeployModelRequest_ModelDeploymentMetadata() +} + +type DeployModelRequest_ImageObjectDetectionModelDeploymentMetadata struct { + ImageObjectDetectionModelDeploymentMetadata *ImageObjectDetectionModelDeploymentMetadata `protobuf:"bytes,2,opt,name=image_object_detection_model_deployment_metadata,json=imageObjectDetectionModelDeploymentMetadata,proto3,oneof"` +} + +func (*DeployModelRequest_ImageObjectDetectionModelDeploymentMetadata) isDeployModelRequest_ModelDeploymentMetadata() { +} + +func (m *DeployModelRequest) GetModelDeploymentMetadata() isDeployModelRequest_ModelDeploymentMetadata { + if m != nil { + return m.ModelDeploymentMetadata + } + return nil +} + +func (m *DeployModelRequest) GetImageObjectDetectionModelDeploymentMetadata() *ImageObjectDetectionModelDeploymentMetadata { + if x, ok := m.GetModelDeploymentMetadata().(*DeployModelRequest_ImageObjectDetectionModelDeploymentMetadata); ok { + return x.ImageObjectDetectionModelDeploymentMetadata + } + return nil +} + +func (m *DeployModelRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*DeployModelRequest) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _DeployModelRequest_OneofMarshaler, _DeployModelRequest_OneofUnmarshaler, _DeployModelRequest_OneofSizer, []interface{}{ + (*DeployModelRequest_ImageObjectDetectionModelDeploymentMetadata)(nil), + } +} + +func _DeployModelRequest_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*DeployModelRequest) + // model_deployment_metadata + switch x := m.ModelDeploymentMetadata.(type) { + case *DeployModelRequest_ImageObjectDetectionModelDeploymentMetadata: + b.EncodeVarint(2<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.ImageObjectDetectionModelDeploymentMetadata); err != nil { + return err + } + case nil: + default: + return fmt.Errorf("DeployModelRequest.ModelDeploymentMetadata has unexpected type %T", x) + } + return nil +} + +func _DeployModelRequest_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*DeployModelRequest) + switch tag { + case 2: // model_deployment_metadata.image_object_detection_model_deployment_metadata + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(ImageObjectDetectionModelDeploymentMetadata) + err := b.DecodeMessage(msg) + m.ModelDeploymentMetadata = &DeployModelRequest_ImageObjectDetectionModelDeploymentMetadata{msg} + return true, err + default: + return false, nil + } +} + +func _DeployModelRequest_OneofSizer(msg proto.Message) (n int) { + m := msg.(*DeployModelRequest) + // model_deployment_metadata + switch x := m.ModelDeploymentMetadata.(type) { + case *DeployModelRequest_ImageObjectDetectionModelDeploymentMetadata: + s := proto.Size(x.ImageObjectDetectionModelDeploymentMetadata) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +// Request message for [AutoMl.UndeployModel][google.cloud.automl.v1beta1.AutoMl.UndeployModel]. +type UndeployModelRequest struct { + // Resource name of the model to undeploy. + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *UndeployModelRequest) Reset() { *m = UndeployModelRequest{} } +func (m *UndeployModelRequest) String() string { return proto.CompactTextString(m) } +func (*UndeployModelRequest) ProtoMessage() {} +func (*UndeployModelRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_service_49f84df6902cc1f1, []int{23} +} +func (m *UndeployModelRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_UndeployModelRequest.Unmarshal(m, b) +} +func (m *UndeployModelRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_UndeployModelRequest.Marshal(b, m, deterministic) +} +func (dst *UndeployModelRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_UndeployModelRequest.Merge(dst, src) +} +func (m *UndeployModelRequest) XXX_Size() int { + return xxx_messageInfo_UndeployModelRequest.Size(m) +} +func (m *UndeployModelRequest) XXX_DiscardUnknown() { + xxx_messageInfo_UndeployModelRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_UndeployModelRequest proto.InternalMessageInfo + +func (m *UndeployModelRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +// Request message for [AutoMl.ExportModel][google.cloud.automl.v1beta1.AutoMl.ExportModel]. +// Models need to be enabled for exporting, otherwise an error code will be +// returned. +type ExportModelRequest struct { + // Required. The resource name of the model to export. + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // Required. The desired output location and configuration. + OutputConfig *ModelExportOutputConfig `protobuf:"bytes,3,opt,name=output_config,json=outputConfig,proto3" json:"output_config,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ExportModelRequest) Reset() { *m = ExportModelRequest{} } +func (m *ExportModelRequest) String() string { return proto.CompactTextString(m) } +func (*ExportModelRequest) ProtoMessage() {} +func (*ExportModelRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_service_49f84df6902cc1f1, []int{24} +} +func (m *ExportModelRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ExportModelRequest.Unmarshal(m, b) +} +func (m *ExportModelRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ExportModelRequest.Marshal(b, m, deterministic) +} +func (dst *ExportModelRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_ExportModelRequest.Merge(dst, src) +} +func (m *ExportModelRequest) XXX_Size() int { + return xxx_messageInfo_ExportModelRequest.Size(m) +} +func (m *ExportModelRequest) XXX_DiscardUnknown() { + xxx_messageInfo_ExportModelRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_ExportModelRequest proto.InternalMessageInfo + +func (m *ExportModelRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *ExportModelRequest) GetOutputConfig() *ModelExportOutputConfig { + if m != nil { + return m.OutputConfig + } + return nil +} + +// Request message for [AutoMl.ExportEvaluatedExamples][google.cloud.automl.v1beta1.AutoMl.ExportEvaluatedExamples]. +type ExportEvaluatedExamplesRequest struct { + // Required. The resource name of the model whose evaluated examples are to + // be exported. + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // Required. The desired output location and configuration. + OutputConfig *ExportEvaluatedExamplesOutputConfig `protobuf:"bytes,3,opt,name=output_config,json=outputConfig,proto3" json:"output_config,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ExportEvaluatedExamplesRequest) Reset() { *m = ExportEvaluatedExamplesRequest{} } +func (m *ExportEvaluatedExamplesRequest) String() string { return proto.CompactTextString(m) } +func (*ExportEvaluatedExamplesRequest) ProtoMessage() {} +func (*ExportEvaluatedExamplesRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_service_49f84df6902cc1f1, []int{25} +} +func (m *ExportEvaluatedExamplesRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ExportEvaluatedExamplesRequest.Unmarshal(m, b) +} +func (m *ExportEvaluatedExamplesRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ExportEvaluatedExamplesRequest.Marshal(b, m, deterministic) +} +func (dst *ExportEvaluatedExamplesRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_ExportEvaluatedExamplesRequest.Merge(dst, src) +} +func (m *ExportEvaluatedExamplesRequest) XXX_Size() int { + return xxx_messageInfo_ExportEvaluatedExamplesRequest.Size(m) +} +func (m *ExportEvaluatedExamplesRequest) XXX_DiscardUnknown() { + xxx_messageInfo_ExportEvaluatedExamplesRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_ExportEvaluatedExamplesRequest proto.InternalMessageInfo + +func (m *ExportEvaluatedExamplesRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *ExportEvaluatedExamplesRequest) GetOutputConfig() *ExportEvaluatedExamplesOutputConfig { + if m != nil { + return m.OutputConfig + } + return nil +} + +// Request message for [AutoMl.GetModelEvaluation][google.cloud.automl.v1beta1.AutoMl.GetModelEvaluation]. +type GetModelEvaluationRequest struct { + // Resource name for the model evaluation. + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GetModelEvaluationRequest) Reset() { *m = GetModelEvaluationRequest{} } +func (m *GetModelEvaluationRequest) String() string { return proto.CompactTextString(m) } +func (*GetModelEvaluationRequest) ProtoMessage() {} +func (*GetModelEvaluationRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_service_49f84df6902cc1f1, []int{26} +} +func (m *GetModelEvaluationRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GetModelEvaluationRequest.Unmarshal(m, b) +} +func (m *GetModelEvaluationRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GetModelEvaluationRequest.Marshal(b, m, deterministic) +} +func (dst *GetModelEvaluationRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_GetModelEvaluationRequest.Merge(dst, src) +} +func (m *GetModelEvaluationRequest) XXX_Size() int { + return xxx_messageInfo_GetModelEvaluationRequest.Size(m) +} +func (m *GetModelEvaluationRequest) XXX_DiscardUnknown() { + xxx_messageInfo_GetModelEvaluationRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_GetModelEvaluationRequest proto.InternalMessageInfo + +func (m *GetModelEvaluationRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +// Request message for [AutoMl.ListModelEvaluations][google.cloud.automl.v1beta1.AutoMl.ListModelEvaluations]. +type ListModelEvaluationsRequest struct { + // Resource name of the model to list the model evaluations for. + // If modelId is set as "-", this will list model evaluations from across all + // models of the parent location. + Parent string `protobuf:"bytes,1,opt,name=parent,proto3" json:"parent,omitempty"` + // An expression for filtering the results of the request. + // + // * `annotation_spec_id` - for =, != or existence. See example below for + // the last. + // + // Some examples of using the filter are: + // + // * `annotation_spec_id!=4` --> The model evaluation was done for + // annotation spec with ID different than 4. + // * `NOT annotation_spec_id:*` --> The model evaluation was done for + // aggregate of all annotation specs. + Filter string `protobuf:"bytes,3,opt,name=filter,proto3" json:"filter,omitempty"` + // Requested page size. + PageSize int32 `protobuf:"varint,4,opt,name=page_size,json=pageSize,proto3" json:"page_size,omitempty"` + // A token identifying a page of results for the server to return. + // Typically obtained via + // [ListModelEvaluationsResponse.next_page_token][google.cloud.automl.v1beta1.ListModelEvaluationsResponse.next_page_token] of the previous + // [AutoMl.ListModelEvaluations][google.cloud.automl.v1beta1.AutoMl.ListModelEvaluations] call. + PageToken string `protobuf:"bytes,6,opt,name=page_token,json=pageToken,proto3" json:"page_token,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ListModelEvaluationsRequest) Reset() { *m = ListModelEvaluationsRequest{} } +func (m *ListModelEvaluationsRequest) String() string { return proto.CompactTextString(m) } +func (*ListModelEvaluationsRequest) ProtoMessage() {} +func (*ListModelEvaluationsRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_service_49f84df6902cc1f1, []int{27} +} +func (m *ListModelEvaluationsRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ListModelEvaluationsRequest.Unmarshal(m, b) +} +func (m *ListModelEvaluationsRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ListModelEvaluationsRequest.Marshal(b, m, deterministic) +} +func (dst *ListModelEvaluationsRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_ListModelEvaluationsRequest.Merge(dst, src) +} +func (m *ListModelEvaluationsRequest) XXX_Size() int { + return xxx_messageInfo_ListModelEvaluationsRequest.Size(m) +} +func (m *ListModelEvaluationsRequest) XXX_DiscardUnknown() { + xxx_messageInfo_ListModelEvaluationsRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_ListModelEvaluationsRequest proto.InternalMessageInfo + +func (m *ListModelEvaluationsRequest) GetParent() string { + if m != nil { + return m.Parent + } + return "" +} + +func (m *ListModelEvaluationsRequest) GetFilter() string { + if m != nil { + return m.Filter + } + return "" +} + +func (m *ListModelEvaluationsRequest) GetPageSize() int32 { + if m != nil { + return m.PageSize + } + return 0 +} + +func (m *ListModelEvaluationsRequest) GetPageToken() string { + if m != nil { + return m.PageToken + } + return "" +} + +// Response message for [AutoMl.ListModelEvaluations][google.cloud.automl.v1beta1.AutoMl.ListModelEvaluations]. +type ListModelEvaluationsResponse struct { + // List of model evaluations in the requested page. + ModelEvaluation []*ModelEvaluation `protobuf:"bytes,1,rep,name=model_evaluation,json=modelEvaluation,proto3" json:"model_evaluation,omitempty"` + // A token to retrieve next page of results. + // Pass to the [ListModelEvaluationsRequest.page_token][google.cloud.automl.v1beta1.ListModelEvaluationsRequest.page_token] field of a new + // [AutoMl.ListModelEvaluations][google.cloud.automl.v1beta1.AutoMl.ListModelEvaluations] request to obtain that page. + NextPageToken string `protobuf:"bytes,2,opt,name=next_page_token,json=nextPageToken,proto3" json:"next_page_token,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ListModelEvaluationsResponse) Reset() { *m = ListModelEvaluationsResponse{} } +func (m *ListModelEvaluationsResponse) String() string { return proto.CompactTextString(m) } +func (*ListModelEvaluationsResponse) ProtoMessage() {} +func (*ListModelEvaluationsResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_service_49f84df6902cc1f1, []int{28} +} +func (m *ListModelEvaluationsResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ListModelEvaluationsResponse.Unmarshal(m, b) +} +func (m *ListModelEvaluationsResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ListModelEvaluationsResponse.Marshal(b, m, deterministic) +} +func (dst *ListModelEvaluationsResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_ListModelEvaluationsResponse.Merge(dst, src) +} +func (m *ListModelEvaluationsResponse) XXX_Size() int { + return xxx_messageInfo_ListModelEvaluationsResponse.Size(m) +} +func (m *ListModelEvaluationsResponse) XXX_DiscardUnknown() { + xxx_messageInfo_ListModelEvaluationsResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_ListModelEvaluationsResponse proto.InternalMessageInfo + +func (m *ListModelEvaluationsResponse) GetModelEvaluation() []*ModelEvaluation { + if m != nil { + return m.ModelEvaluation + } + return nil +} + +func (m *ListModelEvaluationsResponse) GetNextPageToken() string { + if m != nil { + return m.NextPageToken + } + return "" +} + +func init() { + proto.RegisterType((*CreateDatasetRequest)(nil), "google.cloud.automl.v1beta1.CreateDatasetRequest") + proto.RegisterType((*GetDatasetRequest)(nil), "google.cloud.automl.v1beta1.GetDatasetRequest") + proto.RegisterType((*ListDatasetsRequest)(nil), "google.cloud.automl.v1beta1.ListDatasetsRequest") + proto.RegisterType((*ListDatasetsResponse)(nil), "google.cloud.automl.v1beta1.ListDatasetsResponse") + proto.RegisterType((*UpdateDatasetRequest)(nil), "google.cloud.automl.v1beta1.UpdateDatasetRequest") + proto.RegisterType((*DeleteDatasetRequest)(nil), "google.cloud.automl.v1beta1.DeleteDatasetRequest") + proto.RegisterType((*ImportDataRequest)(nil), "google.cloud.automl.v1beta1.ImportDataRequest") + proto.RegisterType((*ExportDataRequest)(nil), "google.cloud.automl.v1beta1.ExportDataRequest") + proto.RegisterType((*GetAnnotationSpecRequest)(nil), "google.cloud.automl.v1beta1.GetAnnotationSpecRequest") + proto.RegisterType((*GetTableSpecRequest)(nil), "google.cloud.automl.v1beta1.GetTableSpecRequest") + proto.RegisterType((*ListTableSpecsRequest)(nil), "google.cloud.automl.v1beta1.ListTableSpecsRequest") + proto.RegisterType((*ListTableSpecsResponse)(nil), "google.cloud.automl.v1beta1.ListTableSpecsResponse") + proto.RegisterType((*UpdateTableSpecRequest)(nil), "google.cloud.automl.v1beta1.UpdateTableSpecRequest") + proto.RegisterType((*GetColumnSpecRequest)(nil), "google.cloud.automl.v1beta1.GetColumnSpecRequest") + proto.RegisterType((*ListColumnSpecsRequest)(nil), "google.cloud.automl.v1beta1.ListColumnSpecsRequest") + proto.RegisterType((*ListColumnSpecsResponse)(nil), "google.cloud.automl.v1beta1.ListColumnSpecsResponse") + proto.RegisterType((*UpdateColumnSpecRequest)(nil), "google.cloud.automl.v1beta1.UpdateColumnSpecRequest") + proto.RegisterType((*CreateModelRequest)(nil), "google.cloud.automl.v1beta1.CreateModelRequest") + proto.RegisterType((*GetModelRequest)(nil), "google.cloud.automl.v1beta1.GetModelRequest") + proto.RegisterType((*ListModelsRequest)(nil), "google.cloud.automl.v1beta1.ListModelsRequest") + proto.RegisterType((*ListModelsResponse)(nil), "google.cloud.automl.v1beta1.ListModelsResponse") + proto.RegisterType((*DeleteModelRequest)(nil), "google.cloud.automl.v1beta1.DeleteModelRequest") + proto.RegisterType((*DeployModelRequest)(nil), "google.cloud.automl.v1beta1.DeployModelRequest") + proto.RegisterType((*UndeployModelRequest)(nil), "google.cloud.automl.v1beta1.UndeployModelRequest") + proto.RegisterType((*ExportModelRequest)(nil), "google.cloud.automl.v1beta1.ExportModelRequest") + proto.RegisterType((*ExportEvaluatedExamplesRequest)(nil), "google.cloud.automl.v1beta1.ExportEvaluatedExamplesRequest") + proto.RegisterType((*GetModelEvaluationRequest)(nil), "google.cloud.automl.v1beta1.GetModelEvaluationRequest") + proto.RegisterType((*ListModelEvaluationsRequest)(nil), "google.cloud.automl.v1beta1.ListModelEvaluationsRequest") + proto.RegisterType((*ListModelEvaluationsResponse)(nil), "google.cloud.automl.v1beta1.ListModelEvaluationsResponse") +} + +// Reference imports to suppress errors if they are not otherwise used. +var _ context.Context +var _ grpc.ClientConn + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the grpc package it is being compiled against. +const _ = grpc.SupportPackageIsVersion4 + +// AutoMlClient is the client API for AutoMl service. +// +// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://godoc.org/google.golang.org/grpc#ClientConn.NewStream. +type AutoMlClient interface { + // Creates a dataset. + CreateDataset(ctx context.Context, in *CreateDatasetRequest, opts ...grpc.CallOption) (*Dataset, error) + // Gets a dataset. + GetDataset(ctx context.Context, in *GetDatasetRequest, opts ...grpc.CallOption) (*Dataset, error) + // Lists datasets in a project. + ListDatasets(ctx context.Context, in *ListDatasetsRequest, opts ...grpc.CallOption) (*ListDatasetsResponse, error) + // Updates a dataset. + UpdateDataset(ctx context.Context, in *UpdateDatasetRequest, opts ...grpc.CallOption) (*Dataset, error) + // Deletes a dataset and all of its contents. + // Returns empty response in the + // [response][google.longrunning.Operation.response] field when it completes, + // and `delete_details` in the + // [metadata][google.longrunning.Operation.metadata] field. + DeleteDataset(ctx context.Context, in *DeleteDatasetRequest, opts ...grpc.CallOption) (*longrunning.Operation, error) + // Imports data into a dataset. For Tables this method can only be called on an empty Dataset. + // + // For Tables: + // * A + // [schema_inference_version][google.cloud.automl.v1beta1.InputConfig.params] + // parameter must be explicitly set. + // Returns an empty response in the + // [response][google.longrunning.Operation.response] field when it completes. + ImportData(ctx context.Context, in *ImportDataRequest, opts ...grpc.CallOption) (*longrunning.Operation, error) + // Exports dataset's data to the provided output location. + // Returns an empty response in the + // [response][google.longrunning.Operation.response] field when it completes. + ExportData(ctx context.Context, in *ExportDataRequest, opts ...grpc.CallOption) (*longrunning.Operation, error) + // Gets an annotation spec. + GetAnnotationSpec(ctx context.Context, in *GetAnnotationSpecRequest, opts ...grpc.CallOption) (*AnnotationSpec, error) + // Gets a table spec. + GetTableSpec(ctx context.Context, in *GetTableSpecRequest, opts ...grpc.CallOption) (*TableSpec, error) + // Lists table specs in a dataset. + ListTableSpecs(ctx context.Context, in *ListTableSpecsRequest, opts ...grpc.CallOption) (*ListTableSpecsResponse, error) + // Updates a table spec. + UpdateTableSpec(ctx context.Context, in *UpdateTableSpecRequest, opts ...grpc.CallOption) (*TableSpec, error) + // Gets a column spec. + GetColumnSpec(ctx context.Context, in *GetColumnSpecRequest, opts ...grpc.CallOption) (*ColumnSpec, error) + // Lists column specs in a table spec. + ListColumnSpecs(ctx context.Context, in *ListColumnSpecsRequest, opts ...grpc.CallOption) (*ListColumnSpecsResponse, error) + // Updates a column spec. + UpdateColumnSpec(ctx context.Context, in *UpdateColumnSpecRequest, opts ...grpc.CallOption) (*ColumnSpec, error) + // Creates a model. + // Returns a Model in the [response][google.longrunning.Operation.response] + // field when it completes. + // When you create a model, several model evaluations are created for it: + // a global evaluation, and one evaluation for each annotation spec. + CreateModel(ctx context.Context, in *CreateModelRequest, opts ...grpc.CallOption) (*longrunning.Operation, error) + // Gets a model. + GetModel(ctx context.Context, in *GetModelRequest, opts ...grpc.CallOption) (*Model, error) + // Lists models. + ListModels(ctx context.Context, in *ListModelsRequest, opts ...grpc.CallOption) (*ListModelsResponse, error) + // Deletes a model. + // Returns `google.protobuf.Empty` in the + // [response][google.longrunning.Operation.response] field when it completes, + // and `delete_details` in the + // [metadata][google.longrunning.Operation.metadata] field. + DeleteModel(ctx context.Context, in *DeleteModelRequest, opts ...grpc.CallOption) (*longrunning.Operation, error) + // Deploys a model. If a model is already deployed, deploying it with the + // same parameters has no effect. Deploying with different parametrs + // (as e.g. changing + // + // [node_number][google.cloud.automl.v1beta1.ImageObjectDetectionModelDeploymentMetadata.node_number] + // ) will update the deployment without pausing the model's availability. + // + // Only applicable for Text Classification, Image Object Detection and Tables; + // all other domains manage deployment automatically. + // + // Returns an empty response in the + // [response][google.longrunning.Operation.response] field when it completes. + DeployModel(ctx context.Context, in *DeployModelRequest, opts ...grpc.CallOption) (*longrunning.Operation, error) + // Undeploys a model. If the model is not deployed this method has no effect. + // + // Only applicable for Text Classification, Image Object Detection and Tables; + // all other domains manage deployment automatically. + // + // Returns an empty response in the + // [response][google.longrunning.Operation.response] field when it completes. + UndeployModel(ctx context.Context, in *UndeployModelRequest, opts ...grpc.CallOption) (*longrunning.Operation, error) + // Exports a trained, "export-able", model to a user specified Google Cloud + // Storage location. A model is considered export-able if and only if it has + // an export format defined for it in + // + // [ModelExportOutputConfig][google.cloud.automl.v1beta1.ModelExportOutputConfig]. + // + // Returns an empty response in the + // [response][google.longrunning.Operation.response] field when it completes. + ExportModel(ctx context.Context, in *ExportModelRequest, opts ...grpc.CallOption) (*longrunning.Operation, error) + // Exports examples on which the model was evaluated (i.e. which were in the + // TEST set of the dataset the model was created from), together with their + // ground truth annotations and the annotations created (predicted) by the + // model. + // The examples, ground truth and predictions are exported in the state + // they were at the moment the model was evaluated. + // + // This export is available only for 30 days since the model evaluation is + // created. + // + // Currently only available for Tables. + // + // Returns an empty response in the + // [response][google.longrunning.Operation.response] field when it completes. + ExportEvaluatedExamples(ctx context.Context, in *ExportEvaluatedExamplesRequest, opts ...grpc.CallOption) (*longrunning.Operation, error) + // Gets a model evaluation. + GetModelEvaluation(ctx context.Context, in *GetModelEvaluationRequest, opts ...grpc.CallOption) (*ModelEvaluation, error) + // Lists model evaluations. + ListModelEvaluations(ctx context.Context, in *ListModelEvaluationsRequest, opts ...grpc.CallOption) (*ListModelEvaluationsResponse, error) +} + +type autoMlClient struct { + cc *grpc.ClientConn +} + +func NewAutoMlClient(cc *grpc.ClientConn) AutoMlClient { + return &autoMlClient{cc} +} + +func (c *autoMlClient) CreateDataset(ctx context.Context, in *CreateDatasetRequest, opts ...grpc.CallOption) (*Dataset, error) { + out := new(Dataset) + err := c.cc.Invoke(ctx, "/google.cloud.automl.v1beta1.AutoMl/CreateDataset", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *autoMlClient) GetDataset(ctx context.Context, in *GetDatasetRequest, opts ...grpc.CallOption) (*Dataset, error) { + out := new(Dataset) + err := c.cc.Invoke(ctx, "/google.cloud.automl.v1beta1.AutoMl/GetDataset", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *autoMlClient) ListDatasets(ctx context.Context, in *ListDatasetsRequest, opts ...grpc.CallOption) (*ListDatasetsResponse, error) { + out := new(ListDatasetsResponse) + err := c.cc.Invoke(ctx, "/google.cloud.automl.v1beta1.AutoMl/ListDatasets", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *autoMlClient) UpdateDataset(ctx context.Context, in *UpdateDatasetRequest, opts ...grpc.CallOption) (*Dataset, error) { + out := new(Dataset) + err := c.cc.Invoke(ctx, "/google.cloud.automl.v1beta1.AutoMl/UpdateDataset", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *autoMlClient) DeleteDataset(ctx context.Context, in *DeleteDatasetRequest, opts ...grpc.CallOption) (*longrunning.Operation, error) { + out := new(longrunning.Operation) + err := c.cc.Invoke(ctx, "/google.cloud.automl.v1beta1.AutoMl/DeleteDataset", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *autoMlClient) ImportData(ctx context.Context, in *ImportDataRequest, opts ...grpc.CallOption) (*longrunning.Operation, error) { + out := new(longrunning.Operation) + err := c.cc.Invoke(ctx, "/google.cloud.automl.v1beta1.AutoMl/ImportData", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *autoMlClient) ExportData(ctx context.Context, in *ExportDataRequest, opts ...grpc.CallOption) (*longrunning.Operation, error) { + out := new(longrunning.Operation) + err := c.cc.Invoke(ctx, "/google.cloud.automl.v1beta1.AutoMl/ExportData", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *autoMlClient) GetAnnotationSpec(ctx context.Context, in *GetAnnotationSpecRequest, opts ...grpc.CallOption) (*AnnotationSpec, error) { + out := new(AnnotationSpec) + err := c.cc.Invoke(ctx, "/google.cloud.automl.v1beta1.AutoMl/GetAnnotationSpec", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *autoMlClient) GetTableSpec(ctx context.Context, in *GetTableSpecRequest, opts ...grpc.CallOption) (*TableSpec, error) { + out := new(TableSpec) + err := c.cc.Invoke(ctx, "/google.cloud.automl.v1beta1.AutoMl/GetTableSpec", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *autoMlClient) ListTableSpecs(ctx context.Context, in *ListTableSpecsRequest, opts ...grpc.CallOption) (*ListTableSpecsResponse, error) { + out := new(ListTableSpecsResponse) + err := c.cc.Invoke(ctx, "/google.cloud.automl.v1beta1.AutoMl/ListTableSpecs", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *autoMlClient) UpdateTableSpec(ctx context.Context, in *UpdateTableSpecRequest, opts ...grpc.CallOption) (*TableSpec, error) { + out := new(TableSpec) + err := c.cc.Invoke(ctx, "/google.cloud.automl.v1beta1.AutoMl/UpdateTableSpec", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *autoMlClient) GetColumnSpec(ctx context.Context, in *GetColumnSpecRequest, opts ...grpc.CallOption) (*ColumnSpec, error) { + out := new(ColumnSpec) + err := c.cc.Invoke(ctx, "/google.cloud.automl.v1beta1.AutoMl/GetColumnSpec", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *autoMlClient) ListColumnSpecs(ctx context.Context, in *ListColumnSpecsRequest, opts ...grpc.CallOption) (*ListColumnSpecsResponse, error) { + out := new(ListColumnSpecsResponse) + err := c.cc.Invoke(ctx, "/google.cloud.automl.v1beta1.AutoMl/ListColumnSpecs", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *autoMlClient) UpdateColumnSpec(ctx context.Context, in *UpdateColumnSpecRequest, opts ...grpc.CallOption) (*ColumnSpec, error) { + out := new(ColumnSpec) + err := c.cc.Invoke(ctx, "/google.cloud.automl.v1beta1.AutoMl/UpdateColumnSpec", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *autoMlClient) CreateModel(ctx context.Context, in *CreateModelRequest, opts ...grpc.CallOption) (*longrunning.Operation, error) { + out := new(longrunning.Operation) + err := c.cc.Invoke(ctx, "/google.cloud.automl.v1beta1.AutoMl/CreateModel", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *autoMlClient) GetModel(ctx context.Context, in *GetModelRequest, opts ...grpc.CallOption) (*Model, error) { + out := new(Model) + err := c.cc.Invoke(ctx, "/google.cloud.automl.v1beta1.AutoMl/GetModel", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *autoMlClient) ListModels(ctx context.Context, in *ListModelsRequest, opts ...grpc.CallOption) (*ListModelsResponse, error) { + out := new(ListModelsResponse) + err := c.cc.Invoke(ctx, "/google.cloud.automl.v1beta1.AutoMl/ListModels", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *autoMlClient) DeleteModel(ctx context.Context, in *DeleteModelRequest, opts ...grpc.CallOption) (*longrunning.Operation, error) { + out := new(longrunning.Operation) + err := c.cc.Invoke(ctx, "/google.cloud.automl.v1beta1.AutoMl/DeleteModel", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *autoMlClient) DeployModel(ctx context.Context, in *DeployModelRequest, opts ...grpc.CallOption) (*longrunning.Operation, error) { + out := new(longrunning.Operation) + err := c.cc.Invoke(ctx, "/google.cloud.automl.v1beta1.AutoMl/DeployModel", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *autoMlClient) UndeployModel(ctx context.Context, in *UndeployModelRequest, opts ...grpc.CallOption) (*longrunning.Operation, error) { + out := new(longrunning.Operation) + err := c.cc.Invoke(ctx, "/google.cloud.automl.v1beta1.AutoMl/UndeployModel", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *autoMlClient) ExportModel(ctx context.Context, in *ExportModelRequest, opts ...grpc.CallOption) (*longrunning.Operation, error) { + out := new(longrunning.Operation) + err := c.cc.Invoke(ctx, "/google.cloud.automl.v1beta1.AutoMl/ExportModel", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *autoMlClient) ExportEvaluatedExamples(ctx context.Context, in *ExportEvaluatedExamplesRequest, opts ...grpc.CallOption) (*longrunning.Operation, error) { + out := new(longrunning.Operation) + err := c.cc.Invoke(ctx, "/google.cloud.automl.v1beta1.AutoMl/ExportEvaluatedExamples", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *autoMlClient) GetModelEvaluation(ctx context.Context, in *GetModelEvaluationRequest, opts ...grpc.CallOption) (*ModelEvaluation, error) { + out := new(ModelEvaluation) + err := c.cc.Invoke(ctx, "/google.cloud.automl.v1beta1.AutoMl/GetModelEvaluation", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *autoMlClient) ListModelEvaluations(ctx context.Context, in *ListModelEvaluationsRequest, opts ...grpc.CallOption) (*ListModelEvaluationsResponse, error) { + out := new(ListModelEvaluationsResponse) + err := c.cc.Invoke(ctx, "/google.cloud.automl.v1beta1.AutoMl/ListModelEvaluations", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +// AutoMlServer is the server API for AutoMl service. +type AutoMlServer interface { + // Creates a dataset. + CreateDataset(context.Context, *CreateDatasetRequest) (*Dataset, error) + // Gets a dataset. + GetDataset(context.Context, *GetDatasetRequest) (*Dataset, error) + // Lists datasets in a project. + ListDatasets(context.Context, *ListDatasetsRequest) (*ListDatasetsResponse, error) + // Updates a dataset. + UpdateDataset(context.Context, *UpdateDatasetRequest) (*Dataset, error) + // Deletes a dataset and all of its contents. + // Returns empty response in the + // [response][google.longrunning.Operation.response] field when it completes, + // and `delete_details` in the + // [metadata][google.longrunning.Operation.metadata] field. + DeleteDataset(context.Context, *DeleteDatasetRequest) (*longrunning.Operation, error) + // Imports data into a dataset. For Tables this method can only be called on an empty Dataset. + // + // For Tables: + // * A + // [schema_inference_version][google.cloud.automl.v1beta1.InputConfig.params] + // parameter must be explicitly set. + // Returns an empty response in the + // [response][google.longrunning.Operation.response] field when it completes. + ImportData(context.Context, *ImportDataRequest) (*longrunning.Operation, error) + // Exports dataset's data to the provided output location. + // Returns an empty response in the + // [response][google.longrunning.Operation.response] field when it completes. + ExportData(context.Context, *ExportDataRequest) (*longrunning.Operation, error) + // Gets an annotation spec. + GetAnnotationSpec(context.Context, *GetAnnotationSpecRequest) (*AnnotationSpec, error) + // Gets a table spec. + GetTableSpec(context.Context, *GetTableSpecRequest) (*TableSpec, error) + // Lists table specs in a dataset. + ListTableSpecs(context.Context, *ListTableSpecsRequest) (*ListTableSpecsResponse, error) + // Updates a table spec. + UpdateTableSpec(context.Context, *UpdateTableSpecRequest) (*TableSpec, error) + // Gets a column spec. + GetColumnSpec(context.Context, *GetColumnSpecRequest) (*ColumnSpec, error) + // Lists column specs in a table spec. + ListColumnSpecs(context.Context, *ListColumnSpecsRequest) (*ListColumnSpecsResponse, error) + // Updates a column spec. + UpdateColumnSpec(context.Context, *UpdateColumnSpecRequest) (*ColumnSpec, error) + // Creates a model. + // Returns a Model in the [response][google.longrunning.Operation.response] + // field when it completes. + // When you create a model, several model evaluations are created for it: + // a global evaluation, and one evaluation for each annotation spec. + CreateModel(context.Context, *CreateModelRequest) (*longrunning.Operation, error) + // Gets a model. + GetModel(context.Context, *GetModelRequest) (*Model, error) + // Lists models. + ListModels(context.Context, *ListModelsRequest) (*ListModelsResponse, error) + // Deletes a model. + // Returns `google.protobuf.Empty` in the + // [response][google.longrunning.Operation.response] field when it completes, + // and `delete_details` in the + // [metadata][google.longrunning.Operation.metadata] field. + DeleteModel(context.Context, *DeleteModelRequest) (*longrunning.Operation, error) + // Deploys a model. If a model is already deployed, deploying it with the + // same parameters has no effect. Deploying with different parametrs + // (as e.g. changing + // + // [node_number][google.cloud.automl.v1beta1.ImageObjectDetectionModelDeploymentMetadata.node_number] + // ) will update the deployment without pausing the model's availability. + // + // Only applicable for Text Classification, Image Object Detection and Tables; + // all other domains manage deployment automatically. + // + // Returns an empty response in the + // [response][google.longrunning.Operation.response] field when it completes. + DeployModel(context.Context, *DeployModelRequest) (*longrunning.Operation, error) + // Undeploys a model. If the model is not deployed this method has no effect. + // + // Only applicable for Text Classification, Image Object Detection and Tables; + // all other domains manage deployment automatically. + // + // Returns an empty response in the + // [response][google.longrunning.Operation.response] field when it completes. + UndeployModel(context.Context, *UndeployModelRequest) (*longrunning.Operation, error) + // Exports a trained, "export-able", model to a user specified Google Cloud + // Storage location. A model is considered export-able if and only if it has + // an export format defined for it in + // + // [ModelExportOutputConfig][google.cloud.automl.v1beta1.ModelExportOutputConfig]. + // + // Returns an empty response in the + // [response][google.longrunning.Operation.response] field when it completes. + ExportModel(context.Context, *ExportModelRequest) (*longrunning.Operation, error) + // Exports examples on which the model was evaluated (i.e. which were in the + // TEST set of the dataset the model was created from), together with their + // ground truth annotations and the annotations created (predicted) by the + // model. + // The examples, ground truth and predictions are exported in the state + // they were at the moment the model was evaluated. + // + // This export is available only for 30 days since the model evaluation is + // created. + // + // Currently only available for Tables. + // + // Returns an empty response in the + // [response][google.longrunning.Operation.response] field when it completes. + ExportEvaluatedExamples(context.Context, *ExportEvaluatedExamplesRequest) (*longrunning.Operation, error) + // Gets a model evaluation. + GetModelEvaluation(context.Context, *GetModelEvaluationRequest) (*ModelEvaluation, error) + // Lists model evaluations. + ListModelEvaluations(context.Context, *ListModelEvaluationsRequest) (*ListModelEvaluationsResponse, error) +} + +func RegisterAutoMlServer(s *grpc.Server, srv AutoMlServer) { + s.RegisterService(&_AutoMl_serviceDesc, srv) +} + +func _AutoMl_CreateDataset_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(CreateDatasetRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(AutoMlServer).CreateDataset(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.automl.v1beta1.AutoMl/CreateDataset", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(AutoMlServer).CreateDataset(ctx, req.(*CreateDatasetRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _AutoMl_GetDataset_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(GetDatasetRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(AutoMlServer).GetDataset(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.automl.v1beta1.AutoMl/GetDataset", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(AutoMlServer).GetDataset(ctx, req.(*GetDatasetRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _AutoMl_ListDatasets_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(ListDatasetsRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(AutoMlServer).ListDatasets(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.automl.v1beta1.AutoMl/ListDatasets", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(AutoMlServer).ListDatasets(ctx, req.(*ListDatasetsRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _AutoMl_UpdateDataset_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(UpdateDatasetRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(AutoMlServer).UpdateDataset(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.automl.v1beta1.AutoMl/UpdateDataset", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(AutoMlServer).UpdateDataset(ctx, req.(*UpdateDatasetRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _AutoMl_DeleteDataset_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(DeleteDatasetRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(AutoMlServer).DeleteDataset(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.automl.v1beta1.AutoMl/DeleteDataset", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(AutoMlServer).DeleteDataset(ctx, req.(*DeleteDatasetRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _AutoMl_ImportData_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(ImportDataRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(AutoMlServer).ImportData(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.automl.v1beta1.AutoMl/ImportData", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(AutoMlServer).ImportData(ctx, req.(*ImportDataRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _AutoMl_ExportData_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(ExportDataRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(AutoMlServer).ExportData(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.automl.v1beta1.AutoMl/ExportData", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(AutoMlServer).ExportData(ctx, req.(*ExportDataRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _AutoMl_GetAnnotationSpec_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(GetAnnotationSpecRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(AutoMlServer).GetAnnotationSpec(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.automl.v1beta1.AutoMl/GetAnnotationSpec", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(AutoMlServer).GetAnnotationSpec(ctx, req.(*GetAnnotationSpecRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _AutoMl_GetTableSpec_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(GetTableSpecRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(AutoMlServer).GetTableSpec(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.automl.v1beta1.AutoMl/GetTableSpec", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(AutoMlServer).GetTableSpec(ctx, req.(*GetTableSpecRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _AutoMl_ListTableSpecs_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(ListTableSpecsRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(AutoMlServer).ListTableSpecs(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.automl.v1beta1.AutoMl/ListTableSpecs", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(AutoMlServer).ListTableSpecs(ctx, req.(*ListTableSpecsRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _AutoMl_UpdateTableSpec_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(UpdateTableSpecRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(AutoMlServer).UpdateTableSpec(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.automl.v1beta1.AutoMl/UpdateTableSpec", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(AutoMlServer).UpdateTableSpec(ctx, req.(*UpdateTableSpecRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _AutoMl_GetColumnSpec_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(GetColumnSpecRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(AutoMlServer).GetColumnSpec(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.automl.v1beta1.AutoMl/GetColumnSpec", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(AutoMlServer).GetColumnSpec(ctx, req.(*GetColumnSpecRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _AutoMl_ListColumnSpecs_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(ListColumnSpecsRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(AutoMlServer).ListColumnSpecs(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.automl.v1beta1.AutoMl/ListColumnSpecs", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(AutoMlServer).ListColumnSpecs(ctx, req.(*ListColumnSpecsRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _AutoMl_UpdateColumnSpec_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(UpdateColumnSpecRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(AutoMlServer).UpdateColumnSpec(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.automl.v1beta1.AutoMl/UpdateColumnSpec", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(AutoMlServer).UpdateColumnSpec(ctx, req.(*UpdateColumnSpecRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _AutoMl_CreateModel_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(CreateModelRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(AutoMlServer).CreateModel(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.automl.v1beta1.AutoMl/CreateModel", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(AutoMlServer).CreateModel(ctx, req.(*CreateModelRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _AutoMl_GetModel_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(GetModelRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(AutoMlServer).GetModel(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.automl.v1beta1.AutoMl/GetModel", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(AutoMlServer).GetModel(ctx, req.(*GetModelRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _AutoMl_ListModels_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(ListModelsRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(AutoMlServer).ListModels(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.automl.v1beta1.AutoMl/ListModels", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(AutoMlServer).ListModels(ctx, req.(*ListModelsRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _AutoMl_DeleteModel_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(DeleteModelRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(AutoMlServer).DeleteModel(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.automl.v1beta1.AutoMl/DeleteModel", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(AutoMlServer).DeleteModel(ctx, req.(*DeleteModelRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _AutoMl_DeployModel_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(DeployModelRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(AutoMlServer).DeployModel(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.automl.v1beta1.AutoMl/DeployModel", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(AutoMlServer).DeployModel(ctx, req.(*DeployModelRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _AutoMl_UndeployModel_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(UndeployModelRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(AutoMlServer).UndeployModel(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.automl.v1beta1.AutoMl/UndeployModel", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(AutoMlServer).UndeployModel(ctx, req.(*UndeployModelRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _AutoMl_ExportModel_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(ExportModelRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(AutoMlServer).ExportModel(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.automl.v1beta1.AutoMl/ExportModel", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(AutoMlServer).ExportModel(ctx, req.(*ExportModelRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _AutoMl_ExportEvaluatedExamples_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(ExportEvaluatedExamplesRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(AutoMlServer).ExportEvaluatedExamples(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.automl.v1beta1.AutoMl/ExportEvaluatedExamples", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(AutoMlServer).ExportEvaluatedExamples(ctx, req.(*ExportEvaluatedExamplesRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _AutoMl_GetModelEvaluation_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(GetModelEvaluationRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(AutoMlServer).GetModelEvaluation(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.automl.v1beta1.AutoMl/GetModelEvaluation", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(AutoMlServer).GetModelEvaluation(ctx, req.(*GetModelEvaluationRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _AutoMl_ListModelEvaluations_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(ListModelEvaluationsRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(AutoMlServer).ListModelEvaluations(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.automl.v1beta1.AutoMl/ListModelEvaluations", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(AutoMlServer).ListModelEvaluations(ctx, req.(*ListModelEvaluationsRequest)) + } + return interceptor(ctx, in, info, handler) +} + +var _AutoMl_serviceDesc = grpc.ServiceDesc{ + ServiceName: "google.cloud.automl.v1beta1.AutoMl", + HandlerType: (*AutoMlServer)(nil), + Methods: []grpc.MethodDesc{ + { + MethodName: "CreateDataset", + Handler: _AutoMl_CreateDataset_Handler, + }, + { + MethodName: "GetDataset", + Handler: _AutoMl_GetDataset_Handler, + }, + { + MethodName: "ListDatasets", + Handler: _AutoMl_ListDatasets_Handler, + }, + { + MethodName: "UpdateDataset", + Handler: _AutoMl_UpdateDataset_Handler, + }, + { + MethodName: "DeleteDataset", + Handler: _AutoMl_DeleteDataset_Handler, + }, + { + MethodName: "ImportData", + Handler: _AutoMl_ImportData_Handler, + }, + { + MethodName: "ExportData", + Handler: _AutoMl_ExportData_Handler, + }, + { + MethodName: "GetAnnotationSpec", + Handler: _AutoMl_GetAnnotationSpec_Handler, + }, + { + MethodName: "GetTableSpec", + Handler: _AutoMl_GetTableSpec_Handler, + }, + { + MethodName: "ListTableSpecs", + Handler: _AutoMl_ListTableSpecs_Handler, + }, + { + MethodName: "UpdateTableSpec", + Handler: _AutoMl_UpdateTableSpec_Handler, + }, + { + MethodName: "GetColumnSpec", + Handler: _AutoMl_GetColumnSpec_Handler, + }, + { + MethodName: "ListColumnSpecs", + Handler: _AutoMl_ListColumnSpecs_Handler, + }, + { + MethodName: "UpdateColumnSpec", + Handler: _AutoMl_UpdateColumnSpec_Handler, + }, + { + MethodName: "CreateModel", + Handler: _AutoMl_CreateModel_Handler, + }, + { + MethodName: "GetModel", + Handler: _AutoMl_GetModel_Handler, + }, + { + MethodName: "ListModels", + Handler: _AutoMl_ListModels_Handler, + }, + { + MethodName: "DeleteModel", + Handler: _AutoMl_DeleteModel_Handler, + }, + { + MethodName: "DeployModel", + Handler: _AutoMl_DeployModel_Handler, + }, + { + MethodName: "UndeployModel", + Handler: _AutoMl_UndeployModel_Handler, + }, + { + MethodName: "ExportModel", + Handler: _AutoMl_ExportModel_Handler, + }, + { + MethodName: "ExportEvaluatedExamples", + Handler: _AutoMl_ExportEvaluatedExamples_Handler, + }, + { + MethodName: "GetModelEvaluation", + Handler: _AutoMl_GetModelEvaluation_Handler, + }, + { + MethodName: "ListModelEvaluations", + Handler: _AutoMl_ListModelEvaluations_Handler, + }, + }, + Streams: []grpc.StreamDesc{}, + Metadata: "google/cloud/automl/v1beta1/service.proto", +} + +func init() { + proto.RegisterFile("google/cloud/automl/v1beta1/service.proto", fileDescriptor_service_49f84df6902cc1f1) +} + +var fileDescriptor_service_49f84df6902cc1f1 = []byte{ + // 1776 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xd4, 0x59, 0xcd, 0x6f, 0x14, 0x47, + 0x16, 0xdf, 0x32, 0xe0, 0xc5, 0x6f, 0xec, 0x35, 0x2e, 0xbc, 0xc6, 0xb4, 0x61, 0xd7, 0xea, 0x65, + 0xc1, 0x18, 0x76, 0x1a, 0x8f, 0xbd, 0xac, 0x3f, 0x00, 0xe1, 0x2f, 0x86, 0x2f, 0x03, 0x32, 0xb0, + 0xec, 0x46, 0x44, 0xa3, 0xf6, 0x4c, 0x79, 0xd4, 0xa1, 0xa7, 0xab, 0x33, 0xdd, 0xe3, 0x18, 0x22, + 0x94, 0xaf, 0x13, 0x87, 0x44, 0x8a, 0x50, 0x72, 0x09, 0x51, 0x38, 0x44, 0x9c, 0xa2, 0x28, 0x91, + 0x72, 0x4e, 0x14, 0x29, 0x8a, 0x94, 0x48, 0xb9, 0x24, 0x87, 0x5c, 0x73, 0xc8, 0x3f, 0x91, 0x5b, + 0xd4, 0x55, 0xd5, 0x1f, 0x33, 0xdd, 0xee, 0xae, 0x19, 0xcb, 0x42, 0xb9, 0xb9, 0xdb, 0xef, 0xd5, + 0xfb, 0xbd, 0xaf, 0xea, 0xf7, 0x7b, 0x03, 0xc7, 0xab, 0x94, 0x56, 0x4d, 0xa2, 0x95, 0x4d, 0xda, + 0xa8, 0x68, 0x7a, 0xc3, 0xa5, 0x35, 0x53, 0xdb, 0x98, 0x58, 0x23, 0xae, 0x3e, 0xa1, 0x39, 0xa4, + 0xbe, 0x61, 0x94, 0x49, 0xde, 0xae, 0x53, 0x97, 0xe2, 0x11, 0x2e, 0x9a, 0x67, 0xa2, 0x79, 0x2e, + 0x9a, 0x17, 0xa2, 0xca, 0x21, 0x71, 0x8e, 0x6e, 0x1b, 0x9a, 0x6e, 0x59, 0xd4, 0xd5, 0x5d, 0x83, + 0x5a, 0x0e, 0x57, 0x55, 0xa6, 0xd2, 0xac, 0x84, 0xe2, 0x25, 0x5b, 0xbf, 0x6f, 0x52, 0xbd, 0x22, + 0xb4, 0xfe, 0x95, 0xa6, 0x55, 0xa6, 0x66, 0xa3, 0x66, 0x95, 0x1c, 0x9b, 0x94, 0x85, 0x78, 0xaa, + 0x2b, 0x15, 0xdd, 0xd5, 0x1d, 0xe2, 0x0a, 0xd1, 0x63, 0x69, 0xa2, 0x46, 0x4d, 0xaf, 0x0a, 0x9f, + 0x95, 0x23, 0xa9, 0x82, 0x54, 0xe6, 0xb8, 0x1a, 0xad, 0x10, 0x53, 0x08, 0x16, 0x32, 0x05, 0x4b, + 0x64, 0x43, 0x37, 0x1b, 0x2c, 0x1a, 0x42, 0xe7, 0x64, 0x9a, 0x0e, 0xb5, 0x49, 0xbd, 0x29, 0xd2, + 0xa9, 0xd2, 0xae, 0xbe, 0x66, 0x92, 0x68, 0xc8, 0xfe, 0x21, 0xa4, 0x4d, 0x6a, 0x55, 0xeb, 0x0d, + 0xcb, 0x32, 0xac, 0x6a, 0xfc, 0xc8, 0x51, 0x21, 0xc4, 0x9e, 0xd6, 0x1a, 0xeb, 0xda, 0xba, 0x41, + 0xcc, 0x4a, 0xa9, 0xa6, 0x3b, 0xf7, 0xb8, 0x84, 0x6a, 0xc1, 0xe0, 0x62, 0x9d, 0xe8, 0x2e, 0x59, + 0xe2, 0x51, 0x5e, 0x25, 0x2f, 0x37, 0x88, 0xe3, 0xe2, 0x21, 0xe8, 0xb6, 0xf5, 0x3a, 0xb1, 0xdc, + 0x61, 0x34, 0x8a, 0xc6, 0x7a, 0x56, 0xc5, 0x13, 0x3e, 0x07, 0x7f, 0x16, 0xf9, 0x18, 0xee, 0x1a, + 0x45, 0x63, 0xb9, 0xc2, 0x91, 0x7c, 0x4a, 0x6d, 0xe5, 0xfd, 0x53, 0x7d, 0x25, 0xf5, 0x18, 0x0c, + 0x14, 0x89, 0xdb, 0x62, 0x0c, 0xc3, 0x6e, 0x4b, 0xaf, 0x11, 0x61, 0x8a, 0xfd, 0xad, 0xbe, 0x81, + 0x60, 0xff, 0x55, 0xc3, 0xf1, 0x45, 0x9d, 0x2c, 0x60, 0x43, 0xd0, 0xbd, 0x6e, 0x98, 0x2e, 0xa9, + 0x0f, 0xef, 0xe2, 0xef, 0xf9, 0x13, 0x1e, 0x81, 0x1e, 0x5b, 0xaf, 0x92, 0x92, 0x63, 0x3c, 0x20, + 0xc3, 0xbb, 0x47, 0xd1, 0xd8, 0x9e, 0xd5, 0xbd, 0xde, 0x8b, 0x9b, 0xc6, 0x03, 0x82, 0x0f, 0x03, + 0xb0, 0x7f, 0xba, 0xf4, 0x1e, 0xb1, 0x86, 0xbb, 0x99, 0x22, 0x13, 0xbf, 0xe5, 0xbd, 0x50, 0x5f, + 0x47, 0x30, 0xd8, 0x8c, 0xc1, 0xb1, 0xa9, 0xe5, 0x10, 0x7c, 0x1e, 0xf6, 0x0a, 0x87, 0x9c, 0x61, + 0x34, 0xba, 0x4b, 0x3a, 0x0c, 0x81, 0x16, 0x3e, 0x0a, 0xfd, 0x16, 0xd9, 0x74, 0x4b, 0x11, 0xf3, + 0x5d, 0xcc, 0x7c, 0x9f, 0xf7, 0xfa, 0x46, 0x00, 0xe1, 0x31, 0x82, 0xc1, 0xdb, 0x76, 0x25, 0x9e, + 0xa0, 0x48, 0x22, 0x50, 0x07, 0x89, 0xc0, 0x73, 0x90, 0x6b, 0xb0, 0x73, 0x59, 0x35, 0x88, 0x64, + 0x2a, 0xfe, 0x19, 0x7e, 0xc1, 0xe4, 0x2f, 0x78, 0x05, 0xb3, 0xa2, 0x3b, 0xf7, 0x56, 0x81, 0x8b, + 0x7b, 0x7f, 0xab, 0xe3, 0x30, 0xb8, 0x44, 0x4c, 0x12, 0x03, 0x95, 0x94, 0x48, 0x17, 0x06, 0x2e, + 0xd5, 0x6c, 0x5a, 0x67, 0x51, 0x4c, 0x11, 0xc4, 0x57, 0xa0, 0xd7, 0xb0, 0xec, 0x86, 0x5b, 0x2a, + 0x53, 0x6b, 0xdd, 0xa8, 0xb2, 0x3c, 0xe6, 0x0a, 0x63, 0xa9, 0x6e, 0x5d, 0xf2, 0x14, 0x16, 0x99, + 0xfc, 0x6a, 0xce, 0x08, 0x1f, 0xd4, 0x57, 0x60, 0x60, 0x79, 0x53, 0xc6, 0xea, 0x35, 0xe8, 0xa3, + 0x0d, 0x37, 0x66, 0xf6, 0x78, 0xaa, 0xd9, 0xeb, 0x4c, 0x43, 0xd8, 0xed, 0xa5, 0x91, 0x27, 0x35, + 0x0f, 0xc3, 0x45, 0xe2, 0xce, 0x07, 0x17, 0xe3, 0x4d, 0x9b, 0x94, 0xd3, 0xc2, 0x53, 0x81, 0xfd, + 0x45, 0xe2, 0xde, 0xf2, 0xda, 0x3b, 0x43, 0x14, 0xcf, 0x00, 0x84, 0xfd, 0x2b, 0x91, 0xb1, 0x9e, + 0x75, 0xff, 0x4f, 0xf5, 0x4b, 0x04, 0x7f, 0xf5, 0x2a, 0x39, 0xb0, 0x93, 0xd9, 0x4f, 0x9d, 0x1b, + 0xdb, 0x91, 0x56, 0x7c, 0x84, 0x60, 0xa8, 0xd5, 0x01, 0xd1, 0x8c, 0x45, 0xc8, 0x85, 0xb7, 0xa3, + 0xdf, 0x8f, 0x47, 0x53, 0xf3, 0x17, 0x86, 0x1b, 0xdc, 0xe0, 0x40, 0xe9, 0x9e, 0x7c, 0x82, 0x60, + 0x88, 0xf7, 0x64, 0x2c, 0x6d, 0xcb, 0x00, 0x21, 0x16, 0xd1, 0x98, 0xb2, 0x50, 0x7a, 0x02, 0x28, + 0xdb, 0x6b, 0x4e, 0x02, 0x83, 0x45, 0xe2, 0x2e, 0xb2, 0x8f, 0xec, 0x0e, 0x96, 0xd4, 0x57, 0x22, + 0x23, 0xa1, 0xa1, 0x3f, 0x5a, 0x4d, 0xbd, 0x8d, 0xe0, 0x40, 0xcc, 0x03, 0x51, 0x54, 0x97, 0xa1, + 0x37, 0x32, 0xa6, 0xf8, 0x55, 0x75, 0x2c, 0x35, 0x95, 0x91, 0x90, 0xe7, 0xca, 0xe1, 0x99, 0xd2, + 0x75, 0xf5, 0x14, 0xc1, 0x01, 0x5e, 0x57, 0xf1, 0xe4, 0x5d, 0x84, 0x5c, 0x04, 0x8f, 0xa8, 0x2c, + 0x69, 0x38, 0x10, 0xc2, 0xd9, 0x5e, 0x6d, 0xad, 0x03, 0xe6, 0xe3, 0xc2, 0x8a, 0x37, 0xf1, 0x64, + 0xe5, 0x7b, 0x1a, 0xf6, 0xb0, 0xc9, 0x88, 0x25, 0x26, 0x57, 0x50, 0x53, 0xe1, 0xf2, 0x13, 0xb9, + 0x82, 0xfa, 0x4f, 0xe8, 0x2f, 0x12, 0xb7, 0xc9, 0x48, 0xd2, 0xe5, 0xf9, 0x1a, 0x0c, 0x78, 0x09, + 0x64, 0x72, 0xcf, 0x65, 0x42, 0xd8, 0x00, 0x1c, 0x05, 0x20, 0x8a, 0x27, 0xf0, 0x9b, 0x57, 0x8d, + 0xbc, 0xdf, 0xd2, 0xa5, 0x32, 0x06, 0x98, 0x7f, 0x80, 0x33, 0x43, 0xf4, 0x1b, 0xf2, 0x44, 0x6d, + 0x93, 0xde, 0x6f, 0x12, 0xfd, 0x1c, 0xc1, 0x29, 0x36, 0x2d, 0x97, 0xe8, 0xda, 0x4b, 0xa4, 0xec, + 0x96, 0x2a, 0xc4, 0x25, 0x65, 0x36, 0xc9, 0xf3, 0x61, 0xb6, 0xc2, 0x74, 0x6a, 0xc4, 0x72, 0x4b, + 0x35, 0xe2, 0xea, 0xde, 0xd8, 0x20, 0x6a, 0xe5, 0x62, 0xfa, 0x17, 0xd9, 0x3b, 0xf4, 0x3a, 0x3b, + 0x73, 0xc9, 0x3f, 0x92, 0x59, 0x5e, 0x0a, 0x0e, 0x5c, 0x11, 0xe7, 0x5d, 0xfc, 0xd3, 0xea, 0x09, + 0x43, 0x5e, 0x3c, 0xc9, 0xbd, 0x85, 0x11, 0x38, 0xb8, 0x25, 0x5e, 0x6f, 0x4c, 0xb9, 0x6d, 0x55, + 0xe2, 0xce, 0x27, 0xc5, 0xe9, 0x2d, 0x04, 0x98, 0x4f, 0x0c, 0x59, 0xa2, 0xf8, 0xff, 0xc9, 0x23, + 0xc3, 0x54, 0x76, 0x9a, 0xb9, 0x81, 0x94, 0xe9, 0xe1, 0x03, 0x04, 0x7f, 0xe3, 0x42, 0xcb, 0x9c, + 0x4c, 0x90, 0xca, 0xf2, 0xa6, 0x5e, 0xb3, 0x4d, 0xe2, 0xa4, 0x21, 0x22, 0xc9, 0x88, 0xce, 0xa7, + 0x22, 0xda, 0xc2, 0x4e, 0x0a, 0x3a, 0x0d, 0x0e, 0xfa, 0x5d, 0xb9, 0x1c, 0x70, 0x9d, 0xb4, 0xa0, + 0x3e, 0x42, 0x30, 0x12, 0xf4, 0x47, 0xa8, 0xf2, 0x5c, 0x5a, 0xf5, 0x23, 0x04, 0x87, 0x92, 0xb1, + 0x88, 0xae, 0xbd, 0x03, 0xfb, 0x5a, 0x79, 0x9c, 0x68, 0xe0, 0x93, 0x12, 0x99, 0x0d, 0xe3, 0xd1, + 0x5f, 0x6b, 0x7e, 0x21, 0xdb, 0xd4, 0x85, 0x77, 0x54, 0xe8, 0x9e, 0x6f, 0xb8, 0x74, 0xc5, 0xc4, + 0x9f, 0x20, 0xe8, 0x6b, 0xe2, 0x65, 0x78, 0x22, 0xfd, 0xae, 0x4f, 0xe0, 0x70, 0x8a, 0x14, 0x23, + 0x50, 0x17, 0xde, 0xfc, 0xe9, 0xd7, 0xc7, 0x5d, 0x67, 0xd4, 0x89, 0x80, 0x6b, 0xbe, 0xca, 0xb3, + 0x70, 0xd6, 0xae, 0x53, 0xaf, 0x2f, 0x1d, 0x6d, 0x5c, 0x33, 0x69, 0x99, 0x47, 0x4b, 0x1b, 0x7f, + 0xe8, 0x13, 0x72, 0x67, 0x36, 0x20, 0x13, 0x1f, 0x22, 0x80, 0x90, 0xd6, 0xe1, 0x7c, 0xaa, 0xe1, + 0x18, 0xff, 0x93, 0x04, 0x3a, 0xc3, 0x80, 0x4e, 0xe2, 0x08, 0x50, 0xaf, 0xc8, 0xb6, 0x80, 0x19, + 0xa0, 0xd4, 0xc6, 0x1f, 0xe2, 0x4f, 0x11, 0xf4, 0x46, 0x89, 0x1c, 0x3e, 0x95, 0x6a, 0x31, 0x81, + 0x77, 0x2a, 0x13, 0x6d, 0x68, 0xf0, 0x82, 0x4a, 0x02, 0x2c, 0x19, 0x59, 0xfc, 0x05, 0x82, 0xbe, + 0x26, 0xda, 0x97, 0x91, 0xff, 0x24, 0x8a, 0x28, 0x19, 0xd6, 0xcb, 0x0c, 0xe5, 0x52, 0x61, 0x26, + 0x44, 0xe9, 0x6f, 0x5c, 0x24, 0xc3, 0x1b, 0xd6, 0xc1, 0x13, 0x04, 0x7d, 0x4d, 0xc4, 0x30, 0x03, + 0x76, 0x12, 0x89, 0x54, 0x0e, 0xfb, 0x2a, 0x91, 0xd5, 0x46, 0xfe, 0xba, 0xbf, 0xda, 0xf0, 0xa3, + 0x3a, 0xde, 0x41, 0x19, 0x7c, 0x8c, 0x00, 0x42, 0x2e, 0x9a, 0x51, 0xa6, 0x31, 0xd2, 0x9a, 0x05, + 0xac, 0xc8, 0x80, 0xcd, 0xab, 0x67, 0xda, 0x06, 0x36, 0x6b, 0x04, 0xb6, 0x66, 0xd1, 0x38, 0x83, + 0x19, 0x92, 0xd7, 0x0c, 0x98, 0x31, 0x96, 0xbb, 0x83, 0x30, 0xc9, 0x66, 0x14, 0xe6, 0x37, 0x88, + 0xed, 0x72, 0x9a, 0xa9, 0x2e, 0xfe, 0x77, 0x56, 0xef, 0x27, 0x52, 0x63, 0xe5, 0x44, 0xaa, 0x5a, + 0xb3, 0x8e, 0x7a, 0x85, 0xb9, 0xb0, 0x8c, 0x17, 0xdb, 0x75, 0x21, 0xb2, 0xb0, 0x64, 0xa3, 0xb9, + 0x57, 0x14, 0x9f, 0x21, 0xe8, 0x8d, 0x32, 0xf0, 0x8c, 0xbb, 0x21, 0x81, 0xac, 0x2b, 0x92, 0x0c, + 0x4f, 0xbd, 0xc0, 0x70, 0x9f, 0xc7, 0xe7, 0xda, 0xc6, 0x1d, 0xb2, 0x54, 0x0f, 0xf2, 0xd7, 0x08, + 0xfe, 0xd2, 0x4c, 0x86, 0x71, 0x21, 0xf3, 0x7a, 0x8a, 0x51, 0x7f, 0x65, 0xb2, 0x2d, 0x1d, 0x71, + 0xa9, 0x25, 0xf8, 0x90, 0x7a, 0xa9, 0x45, 0x0b, 0x28, 0xe2, 0x06, 0xfe, 0x01, 0x41, 0x7f, 0x0b, + 0x89, 0xc6, 0x93, 0x12, 0x77, 0x5c, 0xc7, 0xc1, 0x7f, 0x91, 0x01, 0xbf, 0x53, 0xb8, 0x14, 0x02, + 0x8f, 0x2c, 0x55, 0x3b, 0xc9, 0xc3, 0x6c, 0x84, 0xeb, 0x7b, 0x39, 0xe9, 0x6b, 0xa2, 0xdd, 0x19, + 0x57, 0x5f, 0x12, 0x45, 0x57, 0x64, 0x09, 0x9d, 0x7a, 0x8b, 0x39, 0x73, 0x0d, 0x5f, 0xdd, 0x56, + 0x25, 0x69, 0x11, 0x96, 0xea, 0xd5, 0xd5, 0x8f, 0x08, 0xfa, 0x5b, 0x08, 0x31, 0xce, 0x2e, 0x92, + 0xf8, 0x02, 0x40, 0x99, 0x6a, 0x4f, 0x49, 0x94, 0x56, 0x82, 0x53, 0xb2, 0xa5, 0xd5, 0x9c, 0x98, + 0xa8, 0x5f, 0xf8, 0x17, 0x04, 0xfb, 0x5a, 0x59, 0x35, 0x9e, 0x92, 0xa8, 0xb4, 0x6d, 0xa4, 0xa7, + 0xc6, 0x3c, 0xa9, 0x16, 0xfe, 0x17, 0x7a, 0x12, 0xfd, 0xd1, 0x63, 0xfb, 0xa9, 0x9a, 0x8d, 0x6e, + 0x03, 0xbc, 0xf1, 0x2b, 0x17, 0xa1, 0xe5, 0x58, 0x93, 0x98, 0x15, 0xa3, 0x2c, 0x27, 0xeb, 0x93, + 0x71, 0x8e, 0xb9, 0x33, 0xad, 0x6a, 0xd2, 0x83, 0x0c, 0x1b, 0x81, 0x9d, 0x59, 0xc1, 0x6a, 0xdf, + 0x43, 0xb0, 0xd7, 0x27, 0x0e, 0xf8, 0x64, 0x56, 0x5b, 0x34, 0x21, 0x93, 0xe0, 0xce, 0xea, 0x7f, + 0x18, 0xbc, 0x09, 0xac, 0xc9, 0x35, 0x03, 0xc7, 0xe6, 0xd5, 0xfb, 0x33, 0x04, 0x10, 0xd2, 0xf7, + 0x8c, 0x0f, 0x6d, 0x6c, 0xd1, 0xa0, 0x68, 0xd2, 0xf2, 0xa2, 0xc0, 0x13, 0x80, 0x4a, 0xc5, 0x11, + 0xbf, 0x8f, 0x20, 0x17, 0xe1, 0xfb, 0x19, 0x09, 0x8e, 0x6f, 0x06, 0xb2, 0x12, 0x2c, 0x80, 0x8d, + 0xb7, 0x1d, 0xc1, 0xa7, 0x0c, 0x58, 0x40, 0xb0, 0x33, 0x81, 0xb5, 0x52, 0xf1, 0x2c, 0x60, 0xf3, + 0x0c, 0xd8, 0x9c, 0x7a, 0xba, 0x4d, 0x60, 0xb3, 0x9c, 0xf5, 0x7b, 0x63, 0xca, 0x33, 0x6f, 0x94, + 0x8e, 0x6e, 0x01, 0xb2, 0x46, 0xe9, 0x84, 0x8d, 0x41, 0x16, 0xcc, 0x45, 0x06, 0xf3, 0xac, 0x3a, + 0xdd, 0x2e, 0xcc, 0x86, 0x15, 0x02, 0xf5, 0x62, 0x19, 0xd9, 0x40, 0x64, 0xc4, 0x32, 0xbe, 0xab, + 0xd8, 0xb1, 0x58, 0xf2, 0xb1, 0xcf, 0x83, 0xf8, 0x1d, 0x82, 0x03, 0x5b, 0xac, 0x0d, 0xf0, 0x5c, + 0x27, 0xcb, 0x06, 0x49, 0xe8, 0xab, 0x0c, 0xfa, 0x55, 0xb5, 0xd8, 0x19, 0xf4, 0x98, 0x59, 0xcf, + 0x97, 0x6f, 0x11, 0xe0, 0xf8, 0x36, 0x03, 0x9f, 0x96, 0xba, 0x9e, 0x62, 0xeb, 0x0f, 0xa5, 0xad, + 0x1d, 0x81, 0x4f, 0xba, 0xf0, 0x42, 0x7b, 0x0e, 0x69, 0x2d, 0xab, 0x05, 0xd6, 0x83, 0x3f, 0x8b, + 0x5f, 0x29, 0x5b, 0x17, 0x1b, 0x78, 0x5a, 0xee, 0x7e, 0x8a, 0xef, 0x65, 0x94, 0x99, 0x0e, 0x34, + 0xc5, 0x1d, 0x97, 0xe0, 0x59, 0xfa, 0x47, 0x3c, 0x48, 0x56, 0xcc, 0xb9, 0x85, 0x77, 0x11, 0xfc, + 0xbd, 0x4c, 0x6b, 0x69, 0x60, 0x16, 0x72, 0x7c, 0x63, 0x72, 0xa3, 0x4e, 0x5d, 0x7a, 0x03, 0xbd, + 0x30, 0x2f, 0x64, 0xab, 0xd4, 0xd4, 0xad, 0x6a, 0x9e, 0xd6, 0xab, 0x5a, 0x95, 0x58, 0x6c, 0xab, + 0xad, 0xf1, 0x7f, 0xe9, 0xb6, 0xe1, 0x24, 0xfe, 0xc6, 0x3e, 0xc7, 0x1f, 0xbf, 0xef, 0x1a, 0x29, + 0x32, 0xc1, 0xbb, 0x8b, 0x9e, 0xd0, 0x5d, 0x7e, 0xfe, 0xdd, 0xff, 0x72, 0xa1, 0xb5, 0x6e, 0x76, + 0xd6, 0xe4, 0xef, 0x01, 0x00, 0x00, 0xff, 0xff, 0x0c, 0xe1, 0xf9, 0x9a, 0x7e, 0x21, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/cloud/automl/v1beta1/table_spec.pb.go b/vendor/google.golang.org/genproto/googleapis/cloud/automl/v1beta1/table_spec.pb.go new file mode 100644 index 0000000..40af9a2 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/cloud/automl/v1beta1/table_spec.pb.go @@ -0,0 +1,159 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/cloud/automl/v1beta1/table_spec.proto + +package automl // import "google.golang.org/genproto/googleapis/cloud/automl/v1beta1" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// A specification of a relational table. +// The table's schema is represented via its child column specs. It is +// pre-populated as part of ImportData by schema inference algorithm, the +// version of which is a required parameter of ImportData InputConfig. +// Note: While working with a table, at times the schema may be +// inconsistent with the data in the table (e.g. string in a FLOAT64 column). +// The consistency validation is done upon creation of a model. +// Used by: +// * Tables +type TableSpec struct { + // Output only. The resource name of the table spec. + // Form: + // + // `projects/{project_id}/locations/{location_id}/datasets/{dataset_id}/tableSpecs/{table_spec_id}` + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // column_spec_id of the time column. Only used if the parent dataset's + // ml_use_column_spec_id is not set. Used to split rows into TRAIN, VALIDATE + // and TEST sets such that oldest rows go to TRAIN set, newest to TEST, and + // those in between to VALIDATE. + // Required type: TIMESTAMP. + // If both this column and ml_use_column are not set, then ML use of all rows + // will be assigned by AutoML. NOTE: Updates of this field will instantly + // affect any other users concurrently working with the dataset. + TimeColumnSpecId string `protobuf:"bytes,2,opt,name=time_column_spec_id,json=timeColumnSpecId,proto3" json:"time_column_spec_id,omitempty"` + // Output only. The number of rows (i.e. examples) in the table. + RowCount int64 `protobuf:"varint,3,opt,name=row_count,json=rowCount,proto3" json:"row_count,omitempty"` + // Output only. The number of columns of the table. That is, the number of + // child ColumnSpec-s. + ColumnCount int64 `protobuf:"varint,7,opt,name=column_count,json=columnCount,proto3" json:"column_count,omitempty"` + // Output only. Input configs via which data currently residing in the table + // had been imported. + InputConfigs []*InputConfig `protobuf:"bytes,5,rep,name=input_configs,json=inputConfigs,proto3" json:"input_configs,omitempty"` + // Used to perform consistent read-modify-write updates. If not set, a blind + // "overwrite" update happens. + Etag string `protobuf:"bytes,6,opt,name=etag,proto3" json:"etag,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *TableSpec) Reset() { *m = TableSpec{} } +func (m *TableSpec) String() string { return proto.CompactTextString(m) } +func (*TableSpec) ProtoMessage() {} +func (*TableSpec) Descriptor() ([]byte, []int) { + return fileDescriptor_table_spec_f59803a440a81f32, []int{0} +} +func (m *TableSpec) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_TableSpec.Unmarshal(m, b) +} +func (m *TableSpec) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_TableSpec.Marshal(b, m, deterministic) +} +func (dst *TableSpec) XXX_Merge(src proto.Message) { + xxx_messageInfo_TableSpec.Merge(dst, src) +} +func (m *TableSpec) XXX_Size() int { + return xxx_messageInfo_TableSpec.Size(m) +} +func (m *TableSpec) XXX_DiscardUnknown() { + xxx_messageInfo_TableSpec.DiscardUnknown(m) +} + +var xxx_messageInfo_TableSpec proto.InternalMessageInfo + +func (m *TableSpec) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *TableSpec) GetTimeColumnSpecId() string { + if m != nil { + return m.TimeColumnSpecId + } + return "" +} + +func (m *TableSpec) GetRowCount() int64 { + if m != nil { + return m.RowCount + } + return 0 +} + +func (m *TableSpec) GetColumnCount() int64 { + if m != nil { + return m.ColumnCount + } + return 0 +} + +func (m *TableSpec) GetInputConfigs() []*InputConfig { + if m != nil { + return m.InputConfigs + } + return nil +} + +func (m *TableSpec) GetEtag() string { + if m != nil { + return m.Etag + } + return "" +} + +func init() { + proto.RegisterType((*TableSpec)(nil), "google.cloud.automl.v1beta1.TableSpec") +} + +func init() { + proto.RegisterFile("google/cloud/automl/v1beta1/table_spec.proto", fileDescriptor_table_spec_f59803a440a81f32) +} + +var fileDescriptor_table_spec_f59803a440a81f32 = []byte{ + // 322 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x7c, 0x90, 0xc1, 0x4a, 0x03, 0x31, + 0x10, 0x86, 0xd9, 0x56, 0xab, 0x4d, 0x2b, 0xc8, 0x7a, 0x59, 0x5a, 0xc1, 0x2a, 0x1e, 0xf6, 0xa0, + 0x09, 0xd5, 0xa3, 0xa7, 0xba, 0x07, 0xe9, 0xa1, 0x20, 0x55, 0x3c, 0x48, 0x61, 0x49, 0xd3, 0x18, + 0x02, 0xd9, 0x4c, 0xd8, 0x4d, 0xec, 0x0b, 0xf8, 0x74, 0xbe, 0x8d, 0x6f, 0x20, 0x49, 0x16, 0xbc, + 0xc8, 0xde, 0x66, 0xf2, 0x7f, 0xf3, 0x4f, 0xe6, 0x47, 0x37, 0x02, 0x40, 0x28, 0x4e, 0x98, 0x02, + 0xb7, 0x23, 0xd4, 0x59, 0xa8, 0x14, 0xf9, 0x9c, 0x6f, 0xb9, 0xa5, 0x73, 0x62, 0xe9, 0x56, 0xf1, + 0xb2, 0x31, 0x9c, 0x61, 0x53, 0x83, 0x85, 0x74, 0x1a, 0x69, 0x1c, 0x68, 0x1c, 0x69, 0xdc, 0xd2, + 0x93, 0xf3, 0xd6, 0x8a, 0x1a, 0x49, 0xa8, 0xd6, 0x60, 0xa9, 0x95, 0xa0, 0x9b, 0x38, 0x3a, 0xb9, + 0xee, 0x5a, 0x24, 0x21, 0x52, 0x57, 0x3f, 0x09, 0x1a, 0xbe, 0xfa, 0xad, 0x2f, 0x86, 0xb3, 0x34, + 0x45, 0x07, 0x9a, 0x56, 0x3c, 0x4b, 0x66, 0x49, 0x3e, 0x5c, 0x87, 0x3a, 0xbd, 0x45, 0x67, 0x56, + 0x56, 0xbc, 0x64, 0xa0, 0x5c, 0xa5, 0xc3, 0xe7, 0x4a, 0xb9, 0xcb, 0x7a, 0x01, 0x39, 0xf5, 0x52, + 0x11, 0x14, 0x6f, 0xb0, 0xdc, 0xa5, 0x53, 0x34, 0xac, 0x61, 0x5f, 0x32, 0x70, 0xda, 0x66, 0xfd, + 0x59, 0x92, 0xf7, 0xd7, 0xc7, 0x35, 0xec, 0x0b, 0xdf, 0xa7, 0x97, 0x68, 0xdc, 0xda, 0x44, 0xfd, + 0x28, 0xe8, 0xa3, 0xf8, 0x16, 0x91, 0x15, 0x3a, 0x91, 0xda, 0x38, 0x5b, 0x32, 0xd0, 0x1f, 0x52, + 0x34, 0xd9, 0xe1, 0xac, 0x9f, 0x8f, 0xee, 0x72, 0xdc, 0x91, 0x04, 0x5e, 0xfa, 0x89, 0x22, 0x0c, + 0xac, 0xc7, 0xf2, 0xaf, 0x69, 0xfc, 0x45, 0xdc, 0x52, 0x91, 0x0d, 0xe2, 0x45, 0xbe, 0x7e, 0xfc, + 0x4a, 0xd0, 0x05, 0x83, 0xaa, 0xcb, 0xf1, 0x39, 0x79, 0x5f, 0xb4, 0xb2, 0x00, 0x45, 0xb5, 0xc0, + 0x50, 0x0b, 0x22, 0xb8, 0x0e, 0xa9, 0x91, 0x28, 0x51, 0x23, 0x9b, 0x7f, 0xe3, 0x7d, 0x88, 0xed, + 0x77, 0x6f, 0xfa, 0x14, 0xc0, 0x4d, 0xe1, 0xa1, 0xcd, 0xc2, 0x59, 0x58, 0xa9, 0xcd, 0x5b, 0x84, + 0xb6, 0x83, 0xe0, 0x75, 0xff, 0x1b, 0x00, 0x00, 0xff, 0xff, 0xc3, 0x81, 0x3f, 0x46, 0x12, 0x02, + 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/cloud/automl/v1beta1/tables.pb.go b/vendor/google.golang.org/genproto/googleapis/cloud/automl/v1beta1/tables.pb.go new file mode 100644 index 0000000..89a6ea5 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/cloud/automl/v1beta1/tables.pb.go @@ -0,0 +1,534 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/cloud/automl/v1beta1/tables.proto + +package automl // import "google.golang.org/genproto/googleapis/cloud/automl/v1beta1" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _struct "github.com/golang/protobuf/ptypes/struct" +import timestamp "github.com/golang/protobuf/ptypes/timestamp" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Metadata for a dataset used for AutoML Tables. +type TablesDatasetMetadata struct { + // Output only. The table_spec_id of the primary table of this dataset. + PrimaryTableSpecId string `protobuf:"bytes,1,opt,name=primary_table_spec_id,json=primaryTableSpecId,proto3" json:"primary_table_spec_id,omitempty"` + // column_spec_id of the primary table's column that should be used as the + // training & prediction target. + // This column must be non-nullable and have one of following data types + // (otherwise model creation will error): + // * CATEGORY + // * ARRAY(CATEGORY) + // * FLOAT64 + // Furthermore, if the type is CATEGORY or ARRAY(CATEGORY), then only up to + // 40 unique values may exist in that column across all rows, but for + // ARRAY(CATEGORY) unique values are counted as elements of the ARRAY (i.e. + // following 3 ARRAY-s: [A, B], [A], [B] are counted as having 2 unique + // values). + // + // NOTE: Updates of this field will instantly affect any other users + // concurrently working with the dataset. + TargetColumnSpecId string `protobuf:"bytes,2,opt,name=target_column_spec_id,json=targetColumnSpecId,proto3" json:"target_column_spec_id,omitempty"` + // column_spec_id of the primary table's column that should be used as the + // weight column, i.e. the higher the value the more important the row will be + // during model training. + // Required type: FLOAT64. + // Allowed values: 0 to 10000, inclusive on both ends; 0 means the row is + // ignored for training. + // If not set all rows are assumed to have equal weight of 1. + // NOTE: Updates of this field will instantly affect any other users + // concurrently working with the dataset. + WeightColumnSpecId string `protobuf:"bytes,3,opt,name=weight_column_spec_id,json=weightColumnSpecId,proto3" json:"weight_column_spec_id,omitempty"` + // column_spec_id of the primary table column which specifies a possible ML + // use of the row, i.e. the column will be used to split the rows into TRAIN, + // VALIDATE and TEST sets. + // Required type: STRING. + // This column, if set, must either have all of `TRAIN`, `VALIDATE`, `TEST` + // among its values, or only have `TEST`, `UNASSIGNED` values. In the latter + // case the rows with `UNASSIGNED` value will be assigned by AutoML. Note + // that if a given ml use distribution makes it impossible to create a "good" + // model, that call will error describing the issue. + // If both this column_spec_id and primary table's time_column_spec_id are not + // set, then all rows are treated as `UNASSIGNED`. + // NOTE: Updates of this field will instantly affect any other users + // concurrently working with the dataset. + MlUseColumnSpecId string `protobuf:"bytes,4,opt,name=ml_use_column_spec_id,json=mlUseColumnSpecId,proto3" json:"ml_use_column_spec_id,omitempty"` + // Output only. Correlations between + // + // [target_column][google.cloud.automl.v1beta1.TablesDatasetMetadata.target_column], + // and other columns of the + // + // [primary_table][google.cloud.automl.v1beta1.TablesDatasetMetadata.primary_table_spec_id]. + // Only set if the target column is set. Mapping from other column spec id to + // its CorrelationStats with the target column. + // This field may be stale, see the stats_update_time field for + // for the timestamp at which these stats were last updated. + TargetColumnCorrelations map[string]*CorrelationStats `protobuf:"bytes,6,rep,name=target_column_correlations,json=targetColumnCorrelations,proto3" json:"target_column_correlations,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` + // The most recent timestamp when target_column_correlations field and all + // descendant ColumnSpec.data_stats and ColumnSpec.top_correlated_columns + // fields were last (re-)generated. Any changes that happened to the dataset + // afterwards are not reflected in these fields values. The regeneration + // happens in the background on a best effort basis. + StatsUpdateTime *timestamp.Timestamp `protobuf:"bytes,7,opt,name=stats_update_time,json=statsUpdateTime,proto3" json:"stats_update_time,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *TablesDatasetMetadata) Reset() { *m = TablesDatasetMetadata{} } +func (m *TablesDatasetMetadata) String() string { return proto.CompactTextString(m) } +func (*TablesDatasetMetadata) ProtoMessage() {} +func (*TablesDatasetMetadata) Descriptor() ([]byte, []int) { + return fileDescriptor_tables_2ffb928805ee705e, []int{0} +} +func (m *TablesDatasetMetadata) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_TablesDatasetMetadata.Unmarshal(m, b) +} +func (m *TablesDatasetMetadata) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_TablesDatasetMetadata.Marshal(b, m, deterministic) +} +func (dst *TablesDatasetMetadata) XXX_Merge(src proto.Message) { + xxx_messageInfo_TablesDatasetMetadata.Merge(dst, src) +} +func (m *TablesDatasetMetadata) XXX_Size() int { + return xxx_messageInfo_TablesDatasetMetadata.Size(m) +} +func (m *TablesDatasetMetadata) XXX_DiscardUnknown() { + xxx_messageInfo_TablesDatasetMetadata.DiscardUnknown(m) +} + +var xxx_messageInfo_TablesDatasetMetadata proto.InternalMessageInfo + +func (m *TablesDatasetMetadata) GetPrimaryTableSpecId() string { + if m != nil { + return m.PrimaryTableSpecId + } + return "" +} + +func (m *TablesDatasetMetadata) GetTargetColumnSpecId() string { + if m != nil { + return m.TargetColumnSpecId + } + return "" +} + +func (m *TablesDatasetMetadata) GetWeightColumnSpecId() string { + if m != nil { + return m.WeightColumnSpecId + } + return "" +} + +func (m *TablesDatasetMetadata) GetMlUseColumnSpecId() string { + if m != nil { + return m.MlUseColumnSpecId + } + return "" +} + +func (m *TablesDatasetMetadata) GetTargetColumnCorrelations() map[string]*CorrelationStats { + if m != nil { + return m.TargetColumnCorrelations + } + return nil +} + +func (m *TablesDatasetMetadata) GetStatsUpdateTime() *timestamp.Timestamp { + if m != nil { + return m.StatsUpdateTime + } + return nil +} + +// Model metadata specific to AutoML Tables. +type TablesModelMetadata struct { + // Column spec of the dataset's primary table's column the model is + // predicting. Snapshotted when model creation started. + // Only 3 fields are used: + // name - May be set on CreateModel, if it's not then the ColumnSpec + // corresponding to the current target_column_spec_id of the dataset + // the model is trained from is used. + // If neither is set, CreateModel will error. + // display_name - Output only. + // data_type - Output only. + TargetColumnSpec *ColumnSpec `protobuf:"bytes,2,opt,name=target_column_spec,json=targetColumnSpec,proto3" json:"target_column_spec,omitempty"` + // Column specs of the dataset's primary table's columns, on which + // the model is trained and which are used as the input for predictions. + // The + // + // [target_column][google.cloud.automl.v1beta1.TablesModelMetadata.target_column_spec] + // as well as, according to dataset's state upon model creation, + // + // [weight_column][google.cloud.automl.v1beta1.TablesDatasetMetadata.weight_column_spec_id], + // and + // + // [ml_use_column][google.cloud.automl.v1beta1.TablesDatasetMetadata.ml_use_column_spec_id] + // must never be included here. + // Only 3 fields are used: + // name - May be set on CreateModel, if set only the columns specified are + // used, otherwise all primary table's columns (except the ones listed + // above) are used for the training and prediction input. + // display_name - Output only. + // data_type - Output only. + InputFeatureColumnSpecs []*ColumnSpec `protobuf:"bytes,3,rep,name=input_feature_column_specs,json=inputFeatureColumnSpecs,proto3" json:"input_feature_column_specs,omitempty"` + // Objective function the model is optimizing towards. The training process + // creates a model that maximizes/minimizes the value of the objective + // function over the validation set. + // + // The supported optimization objectives depend on the prediction_type. + // If the field is not set, a default objective function is used. + // + // CLASSIFICATION_BINARY: + // "MAXIMIZE_AU_ROC" (default) - Maximize the area under the receiver + // operating characteristic (ROC) curve. + // "MINIMIZE_LOG_LOSS" - Minimize log loss. + // "MAXIMIZE_AU_PRC" - Maximize the area under the precision-recall curve. + // + // CLASSIFICATION_MULTI_CLASS : + // "MINIMIZE_LOG_LOSS" (default) - Minimize log loss. + // + // CLASSIFICATION_MULTI_LABEL: + // "MINIMIZE_LOG_LOSS" (default) - Minimize log loss. + // + // REGRESSION: + // "MINIMIZE_RMSE" (default) - Minimize root-mean-squared error (RMSE). + // "MINIMIZE_MAE" - Minimize mean-absolute error (MAE). + // "MINIMIZE_RMSLE" - Minimize root-mean-squared log error (RMSLE). + // + // FORECASTING: + // "MINIMIZE_RMSE" (default) - Minimize root-mean-squared error (RMSE). + // "MINIMIZE_MAE" - Minimize mean-absolute error (MAE). + OptimizationObjective string `protobuf:"bytes,4,opt,name=optimization_objective,json=optimizationObjective,proto3" json:"optimization_objective,omitempty"` + // Output only. Auxiliary information for each of the + // input_feature_column_specs, with respect to this particular model. + TablesModelColumnInfo []*TablesModelColumnInfo `protobuf:"bytes,5,rep,name=tables_model_column_info,json=tablesModelColumnInfo,proto3" json:"tables_model_column_info,omitempty"` + // The train budget of creating this model, expressed in milli node hours + // i.e. 1,000 value in this field means 1 node hour. + // + // The training cost of the model will not exceed this budget. The final cost + // will be attempted to be close to the budget, though may end up being (even) + // noticeably smaller - at the backend's discretion. This especially may + // happen when further model training ceases to provide any improvements. + // + // If the budget is set to a value known to be insufficient to train a + // model for the given dataset, the training won't be attempted and + // will error. + TrainBudgetMilliNodeHours int64 `protobuf:"varint,6,opt,name=train_budget_milli_node_hours,json=trainBudgetMilliNodeHours,proto3" json:"train_budget_milli_node_hours,omitempty"` + // Output only. The actual training cost of the model, expressed in milli + // node hours, i.e. 1,000 value in this field means 1 node hour. Guaranteed + // to not exceed the train budget. + TrainCostMilliNodeHours int64 `protobuf:"varint,7,opt,name=train_cost_milli_node_hours,json=trainCostMilliNodeHours,proto3" json:"train_cost_milli_node_hours,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *TablesModelMetadata) Reset() { *m = TablesModelMetadata{} } +func (m *TablesModelMetadata) String() string { return proto.CompactTextString(m) } +func (*TablesModelMetadata) ProtoMessage() {} +func (*TablesModelMetadata) Descriptor() ([]byte, []int) { + return fileDescriptor_tables_2ffb928805ee705e, []int{1} +} +func (m *TablesModelMetadata) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_TablesModelMetadata.Unmarshal(m, b) +} +func (m *TablesModelMetadata) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_TablesModelMetadata.Marshal(b, m, deterministic) +} +func (dst *TablesModelMetadata) XXX_Merge(src proto.Message) { + xxx_messageInfo_TablesModelMetadata.Merge(dst, src) +} +func (m *TablesModelMetadata) XXX_Size() int { + return xxx_messageInfo_TablesModelMetadata.Size(m) +} +func (m *TablesModelMetadata) XXX_DiscardUnknown() { + xxx_messageInfo_TablesModelMetadata.DiscardUnknown(m) +} + +var xxx_messageInfo_TablesModelMetadata proto.InternalMessageInfo + +func (m *TablesModelMetadata) GetTargetColumnSpec() *ColumnSpec { + if m != nil { + return m.TargetColumnSpec + } + return nil +} + +func (m *TablesModelMetadata) GetInputFeatureColumnSpecs() []*ColumnSpec { + if m != nil { + return m.InputFeatureColumnSpecs + } + return nil +} + +func (m *TablesModelMetadata) GetOptimizationObjective() string { + if m != nil { + return m.OptimizationObjective + } + return "" +} + +func (m *TablesModelMetadata) GetTablesModelColumnInfo() []*TablesModelColumnInfo { + if m != nil { + return m.TablesModelColumnInfo + } + return nil +} + +func (m *TablesModelMetadata) GetTrainBudgetMilliNodeHours() int64 { + if m != nil { + return m.TrainBudgetMilliNodeHours + } + return 0 +} + +func (m *TablesModelMetadata) GetTrainCostMilliNodeHours() int64 { + if m != nil { + return m.TrainCostMilliNodeHours + } + return 0 +} + +// Contains annotation details specific to Tables. +type TablesAnnotation struct { + // Output only. A confidence estimate between 0.0 and 1.0, inclusive. A higher + // value means greater confidence in the returned value. + // For + // + // [target_column_spec][google.cloud.automl.v1beta1.TablesModelMetadata.target_column_spec] + // of ARRAY(CATEGORY) data type, this is a confidence that one of the values + // in the ARRAY would be the provided value. + // For + // + // [target_column_spec][google.cloud.automl.v1beta1.TablesModelMetadata.target_column_spec] + // of FLOAT64 data type the score is not populated. + Score float32 `protobuf:"fixed32,1,opt,name=score,proto3" json:"score,omitempty"` + // Output only. Only populated when + // + // [target_column_spec][google.cloud.automl.v1beta1.TablesModelMetadata.target_column_spec] + // has FLOAT64 data type (i.e. for regression predictions). An interval in + // which the exactly correct target value has 95% chance to be in. + PredictionInterval *DoubleRange `protobuf:"bytes,4,opt,name=prediction_interval,json=predictionInterval,proto3" json:"prediction_interval,omitempty"` + // The predicted value of the row's + // + // [target_column][google.cloud.automl.v1beta1.TablesModelMetadata.target_column_spec]. + // The value depends on the column's DataType: + // CATEGORY - the predicted (with the above confidence `score`) CATEGORY + // value. + // FLOAT64 - the predicted (with the above confidence `score`) FLOAT64 value. + // ARRAY(CATEGORY) - CATEGORY value meaning that this value would be in the + // ARRAY in that column (with the above confidence `score`). + Value *_struct.Value `protobuf:"bytes,2,opt,name=value,proto3" json:"value,omitempty"` + // Output only. Auxiliary information for each of the model's + // + // [input_feature_column_specs'][google.cloud.automl.v1beta1.TablesModelMetadata.input_feature_column_specs] + // with respect to this particular prediction. + TablesModelColumnInfo []*TablesModelColumnInfo `protobuf:"bytes,3,rep,name=tables_model_column_info,json=tablesModelColumnInfo,proto3" json:"tables_model_column_info,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *TablesAnnotation) Reset() { *m = TablesAnnotation{} } +func (m *TablesAnnotation) String() string { return proto.CompactTextString(m) } +func (*TablesAnnotation) ProtoMessage() {} +func (*TablesAnnotation) Descriptor() ([]byte, []int) { + return fileDescriptor_tables_2ffb928805ee705e, []int{2} +} +func (m *TablesAnnotation) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_TablesAnnotation.Unmarshal(m, b) +} +func (m *TablesAnnotation) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_TablesAnnotation.Marshal(b, m, deterministic) +} +func (dst *TablesAnnotation) XXX_Merge(src proto.Message) { + xxx_messageInfo_TablesAnnotation.Merge(dst, src) +} +func (m *TablesAnnotation) XXX_Size() int { + return xxx_messageInfo_TablesAnnotation.Size(m) +} +func (m *TablesAnnotation) XXX_DiscardUnknown() { + xxx_messageInfo_TablesAnnotation.DiscardUnknown(m) +} + +var xxx_messageInfo_TablesAnnotation proto.InternalMessageInfo + +func (m *TablesAnnotation) GetScore() float32 { + if m != nil { + return m.Score + } + return 0 +} + +func (m *TablesAnnotation) GetPredictionInterval() *DoubleRange { + if m != nil { + return m.PredictionInterval + } + return nil +} + +func (m *TablesAnnotation) GetValue() *_struct.Value { + if m != nil { + return m.Value + } + return nil +} + +func (m *TablesAnnotation) GetTablesModelColumnInfo() []*TablesModelColumnInfo { + if m != nil { + return m.TablesModelColumnInfo + } + return nil +} + +// An information specific to given column and Tables Model, in context +// of the Model and the predictions created by it. +type TablesModelColumnInfo struct { + // Output only. The name of the ColumnSpec describing the column. Not + // populated when this proto is outputted to BigQuery. + ColumnSpecName string `protobuf:"bytes,1,opt,name=column_spec_name,json=columnSpecName,proto3" json:"column_spec_name,omitempty"` + // Output only. The display name of the column (same as the display_name of + // its ColumnSpec). + ColumnDisplayName string `protobuf:"bytes,2,opt,name=column_display_name,json=columnDisplayName,proto3" json:"column_display_name,omitempty"` + // Output only. + // + // When given as part of a Model: + // Measurement of how much model predictions correctness on the TEST data + // depend on values in this column. A value between 0 and 1, higher means + // higher influence. These values are normalized - for all input feature + // columns of a given model they add to 1. + // + // When given back by Predict or Batch Predict: + // Measurement of how impactful for the prediction returned for the given row + // the value in this column was. A value between 0 and 1, higher means larger + // impact. These values are normalized - for all input feature columns of a + // single predicted row they add to 1. + FeatureImportance float32 `protobuf:"fixed32,3,opt,name=feature_importance,json=featureImportance,proto3" json:"feature_importance,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *TablesModelColumnInfo) Reset() { *m = TablesModelColumnInfo{} } +func (m *TablesModelColumnInfo) String() string { return proto.CompactTextString(m) } +func (*TablesModelColumnInfo) ProtoMessage() {} +func (*TablesModelColumnInfo) Descriptor() ([]byte, []int) { + return fileDescriptor_tables_2ffb928805ee705e, []int{3} +} +func (m *TablesModelColumnInfo) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_TablesModelColumnInfo.Unmarshal(m, b) +} +func (m *TablesModelColumnInfo) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_TablesModelColumnInfo.Marshal(b, m, deterministic) +} +func (dst *TablesModelColumnInfo) XXX_Merge(src proto.Message) { + xxx_messageInfo_TablesModelColumnInfo.Merge(dst, src) +} +func (m *TablesModelColumnInfo) XXX_Size() int { + return xxx_messageInfo_TablesModelColumnInfo.Size(m) +} +func (m *TablesModelColumnInfo) XXX_DiscardUnknown() { + xxx_messageInfo_TablesModelColumnInfo.DiscardUnknown(m) +} + +var xxx_messageInfo_TablesModelColumnInfo proto.InternalMessageInfo + +func (m *TablesModelColumnInfo) GetColumnSpecName() string { + if m != nil { + return m.ColumnSpecName + } + return "" +} + +func (m *TablesModelColumnInfo) GetColumnDisplayName() string { + if m != nil { + return m.ColumnDisplayName + } + return "" +} + +func (m *TablesModelColumnInfo) GetFeatureImportance() float32 { + if m != nil { + return m.FeatureImportance + } + return 0 +} + +func init() { + proto.RegisterType((*TablesDatasetMetadata)(nil), "google.cloud.automl.v1beta1.TablesDatasetMetadata") + proto.RegisterMapType((map[string]*CorrelationStats)(nil), "google.cloud.automl.v1beta1.TablesDatasetMetadata.TargetColumnCorrelationsEntry") + proto.RegisterType((*TablesModelMetadata)(nil), "google.cloud.automl.v1beta1.TablesModelMetadata") + proto.RegisterType((*TablesAnnotation)(nil), "google.cloud.automl.v1beta1.TablesAnnotation") + proto.RegisterType((*TablesModelColumnInfo)(nil), "google.cloud.automl.v1beta1.TablesModelColumnInfo") +} + +func init() { + proto.RegisterFile("google/cloud/automl/v1beta1/tables.proto", fileDescriptor_tables_2ffb928805ee705e) +} + +var fileDescriptor_tables_2ffb928805ee705e = []byte{ + // 797 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xac, 0x55, 0xcd, 0x6e, 0xdb, 0x38, + 0x10, 0x86, 0xec, 0xfc, 0x60, 0x69, 0x60, 0xd7, 0x66, 0xd6, 0x89, 0xd7, 0xd9, 0x20, 0x81, 0x2f, + 0xeb, 0x43, 0x22, 0x6d, 0x5c, 0x14, 0x28, 0xda, 0x1e, 0x9a, 0x38, 0x4d, 0xeb, 0x83, 0xd3, 0x40, + 0xf9, 0x01, 0x5a, 0x04, 0x10, 0x68, 0x89, 0x56, 0xd8, 0x50, 0xa4, 0x40, 0x51, 0x2e, 0x9c, 0x73, + 0xcf, 0x7d, 0x84, 0xa2, 0x6f, 0xd1, 0x77, 0xe8, 0x03, 0xf5, 0x5c, 0x90, 0x94, 0x1c, 0xdb, 0x71, + 0xd5, 0x1c, 0x7a, 0xb3, 0xf8, 0x7d, 0xdf, 0x70, 0x66, 0xbe, 0xe1, 0x18, 0xb4, 0x43, 0xce, 0x43, + 0x8a, 0x1d, 0x9f, 0xf2, 0x34, 0x70, 0x50, 0x2a, 0x79, 0x44, 0x9d, 0xd1, 0xfe, 0x00, 0x4b, 0xb4, + 0xef, 0x48, 0x34, 0xa0, 0x38, 0xb1, 0x63, 0xc1, 0x25, 0x87, 0x9b, 0x86, 0x69, 0x6b, 0xa6, 0x6d, + 0x98, 0x76, 0xc6, 0x6c, 0xfe, 0x9b, 0x85, 0x41, 0x31, 0x71, 0x10, 0x63, 0x5c, 0x22, 0x49, 0x38, + 0xcb, 0xa4, 0xcd, 0xbd, 0xa2, 0x4b, 0x7c, 0x4e, 0xd3, 0x88, 0x79, 0x49, 0x8c, 0xfd, 0x8c, 0xbe, + 0x5b, 0x44, 0x0f, 0x90, 0x44, 0x5e, 0x22, 0x91, 0xcc, 0x83, 0x17, 0x56, 0x20, 0x10, 0x0b, 0xf3, + 0x0a, 0x26, 0x49, 0xea, 0xaf, 0x41, 0x3a, 0x74, 0x12, 0x29, 0x52, 0x5f, 0x66, 0xe8, 0xf6, 0x3c, + 0x2a, 0x49, 0x84, 0x13, 0x89, 0xa2, 0xd8, 0x10, 0x5a, 0x5f, 0x97, 0x40, 0xfd, 0x5c, 0x77, 0xe4, + 0x08, 0x49, 0x94, 0x60, 0xd9, 0xc7, 0x12, 0xa9, 0x74, 0xe0, 0x3e, 0xa8, 0xc7, 0x82, 0x44, 0x48, + 0x8c, 0x3d, 0xdd, 0x32, 0x5d, 0x8c, 0x47, 0x82, 0x86, 0xb5, 0x63, 0xb5, 0xff, 0x70, 0x61, 0x06, + 0x6a, 0xf1, 0x59, 0x8c, 0xfd, 0x5e, 0xa0, 0x24, 0x12, 0x89, 0x10, 0x4b, 0x6f, 0xaa, 0x7e, 0x25, + 0x29, 0x19, 0x89, 0x01, 0xbb, 0x1a, 0xbb, 0x93, 0x7c, 0xc0, 0x24, 0xbc, 0xbe, 0x27, 0x29, 0x1b, + 0x89, 0x01, 0x67, 0x24, 0xff, 0x83, 0x7a, 0x44, 0xbd, 0x34, 0xc1, 0xf3, 0x92, 0x25, 0x2d, 0xa9, + 0x45, 0xf4, 0x22, 0xc1, 0x33, 0x8a, 0x4f, 0x16, 0x68, 0xce, 0x26, 0xe6, 0x73, 0x21, 0x30, 0x35, + 0x7e, 0x36, 0x56, 0x76, 0xca, 0xed, 0x4a, 0xe7, 0xd4, 0x2e, 0x98, 0x05, 0x7b, 0x61, 0x8f, 0xec, + 0xf3, 0xa9, 0x82, 0xba, 0x53, 0x21, 0x5f, 0x32, 0x29, 0xc6, 0x6e, 0x43, 0xfe, 0x04, 0x86, 0xc7, + 0xa0, 0xa6, 0xdd, 0xf6, 0xd2, 0x38, 0x40, 0x12, 0x7b, 0xca, 0x95, 0xc6, 0xea, 0x8e, 0xd5, 0xae, + 0x74, 0x9a, 0x79, 0x1a, 0xb9, 0x65, 0xf6, 0x79, 0x6e, 0x99, 0xfb, 0x97, 0x16, 0x5d, 0x68, 0x8d, + 0x3a, 0x6d, 0xde, 0x82, 0xad, 0xc2, 0x14, 0x60, 0x15, 0x94, 0x6f, 0xf0, 0x38, 0xb3, 0x4c, 0xfd, + 0x84, 0x5d, 0xb0, 0x3c, 0x42, 0x34, 0xc5, 0xda, 0x93, 0x4a, 0x67, 0xaf, 0xb0, 0xea, 0xa9, 0x80, + 0x67, 0xea, 0x6a, 0xd7, 0x68, 0x9f, 0x96, 0x9e, 0x58, 0xad, 0xef, 0x65, 0xb0, 0x66, 0xba, 0xd2, + 0xe7, 0x01, 0xa6, 0x93, 0xb9, 0xb9, 0x00, 0xf0, 0xfe, 0x10, 0x64, 0xb7, 0xfd, 0xf7, 0x8b, 0xdb, + 0x72, 0xcf, 0xdc, 0xea, 0xfc, 0xa8, 0xc0, 0x00, 0x34, 0x09, 0x8b, 0x53, 0xe9, 0x0d, 0x31, 0x92, + 0xa9, 0x98, 0x31, 0x3f, 0x69, 0x94, 0xb5, 0x85, 0x0f, 0x0e, 0xbf, 0xa1, 0x43, 0x1d, 0x9b, 0x48, + 0x77, 0xe7, 0x09, 0x7c, 0x0c, 0xd6, 0x79, 0x2c, 0x49, 0x44, 0x6e, 0x75, 0xd1, 0x1e, 0x1f, 0xbc, + 0xc7, 0xbe, 0x24, 0x23, 0x9c, 0x0d, 0x57, 0x7d, 0x1a, 0x7d, 0x93, 0x83, 0xf0, 0x06, 0x34, 0xcc, + 0x5a, 0xf1, 0x22, 0xd5, 0x8b, 0x3c, 0x37, 0xc2, 0x86, 0xbc, 0xb1, 0xac, 0x53, 0xeb, 0x3c, 0x60, + 0xba, 0x74, 0x1f, 0x4d, 0x36, 0x3d, 0x36, 0xe4, 0x6e, 0x5d, 0x2e, 0x3a, 0x86, 0x2f, 0xc0, 0x96, + 0x14, 0x88, 0x30, 0x6f, 0x90, 0x06, 0xaa, 0xcd, 0x11, 0xa1, 0x94, 0x78, 0x8c, 0x07, 0xd8, 0xbb, + 0xe6, 0xa9, 0x50, 0xf3, 0x6c, 0xb5, 0xcb, 0xee, 0x3f, 0x9a, 0x74, 0xa8, 0x39, 0x7d, 0x45, 0x39, + 0xe1, 0x01, 0x7e, 0xad, 0x08, 0xf0, 0x39, 0xd8, 0x34, 0x11, 0x7c, 0x9e, 0x2c, 0xd0, 0xaf, 0x6a, + 0xfd, 0x86, 0xa6, 0x74, 0x79, 0x32, 0xa7, 0x6e, 0x7d, 0x2e, 0x81, 0xaa, 0x49, 0xf8, 0x60, 0xb2, + 0x14, 0xe1, 0xdf, 0x60, 0x39, 0xf1, 0xb9, 0xc0, 0x7a, 0xd4, 0x4a, 0xae, 0xf9, 0x80, 0x6f, 0xc1, + 0x5a, 0x2c, 0x70, 0x40, 0x7c, 0xdd, 0x4c, 0xc2, 0x24, 0x16, 0x23, 0x44, 0x75, 0x2f, 0x2b, 0x9d, + 0x76, 0x61, 0x4b, 0x8e, 0x78, 0x3a, 0xa0, 0xd8, 0x55, 0xab, 0x4e, 0xed, 0x9a, 0x3c, 0x48, 0x2f, + 0x8b, 0x01, 0x77, 0x67, 0xe7, 0x78, 0xfd, 0xde, 0xb3, 0xb9, 0x54, 0x68, 0x36, 0xb0, 0x85, 0x06, + 0x95, 0x7f, 0xb3, 0x41, 0xad, 0x2f, 0x56, 0xbe, 0x53, 0xe7, 0xad, 0x6b, 0x83, 0xea, 0xf4, 0xce, + 0x62, 0x28, 0xc2, 0xd9, 0xdb, 0xfc, 0xd3, 0x9f, 0x4c, 0xe1, 0x09, 0x8a, 0x30, 0xb4, 0xc1, 0x5a, + 0xc6, 0x0c, 0x48, 0x12, 0x53, 0x34, 0x36, 0x64, 0xb3, 0x48, 0x6b, 0x06, 0x3a, 0x32, 0x88, 0xe6, + 0xef, 0x01, 0x98, 0x3f, 0x0c, 0x12, 0xc5, 0x5c, 0x48, 0xc4, 0x7c, 0xac, 0x97, 0x68, 0xc9, 0xad, + 0x65, 0x48, 0x6f, 0x02, 0x1c, 0x7e, 0xb4, 0xc0, 0xb6, 0xcf, 0xa3, 0xa2, 0x9a, 0x4f, 0xad, 0x77, + 0x07, 0x19, 0x1c, 0x72, 0x8a, 0x58, 0x68, 0x73, 0x11, 0x3a, 0x21, 0x66, 0xba, 0xc3, 0x8e, 0x81, + 0x50, 0x4c, 0x92, 0x85, 0x7f, 0x52, 0xcf, 0xcc, 0xe7, 0xb7, 0xd2, 0xe6, 0x2b, 0x4d, 0xbc, 0xea, + 0x2a, 0xd2, 0xd5, 0x41, 0x2a, 0x79, 0x9f, 0x5e, 0x5d, 0x1a, 0xd2, 0x60, 0x45, 0xc7, 0x7a, 0xf4, + 0x23, 0x00, 0x00, 0xff, 0xff, 0x13, 0x68, 0xa2, 0x3d, 0xb1, 0x07, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/cloud/automl/v1beta1/temporal.pb.go b/vendor/google.golang.org/genproto/googleapis/cloud/automl/v1beta1/temporal.pb.go new file mode 100644 index 0000000..cbd8a4a --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/cloud/automl/v1beta1/temporal.pb.go @@ -0,0 +1,101 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/cloud/automl/v1beta1/temporal.proto + +package automl // import "google.golang.org/genproto/googleapis/cloud/automl/v1beta1" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import duration "github.com/golang/protobuf/ptypes/duration" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// A time period inside of an example that has a time dimension (e.g. video). +type TimeSegment struct { + // Start of the time segment (inclusive), represented as the duration since + // the example start. + StartTimeOffset *duration.Duration `protobuf:"bytes,1,opt,name=start_time_offset,json=startTimeOffset,proto3" json:"start_time_offset,omitempty"` + // End of the time segment (exclusive), represented as the duration since the + // example start. + EndTimeOffset *duration.Duration `protobuf:"bytes,2,opt,name=end_time_offset,json=endTimeOffset,proto3" json:"end_time_offset,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *TimeSegment) Reset() { *m = TimeSegment{} } +func (m *TimeSegment) String() string { return proto.CompactTextString(m) } +func (*TimeSegment) ProtoMessage() {} +func (*TimeSegment) Descriptor() ([]byte, []int) { + return fileDescriptor_temporal_20cabaedddc21db8, []int{0} +} +func (m *TimeSegment) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_TimeSegment.Unmarshal(m, b) +} +func (m *TimeSegment) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_TimeSegment.Marshal(b, m, deterministic) +} +func (dst *TimeSegment) XXX_Merge(src proto.Message) { + xxx_messageInfo_TimeSegment.Merge(dst, src) +} +func (m *TimeSegment) XXX_Size() int { + return xxx_messageInfo_TimeSegment.Size(m) +} +func (m *TimeSegment) XXX_DiscardUnknown() { + xxx_messageInfo_TimeSegment.DiscardUnknown(m) +} + +var xxx_messageInfo_TimeSegment proto.InternalMessageInfo + +func (m *TimeSegment) GetStartTimeOffset() *duration.Duration { + if m != nil { + return m.StartTimeOffset + } + return nil +} + +func (m *TimeSegment) GetEndTimeOffset() *duration.Duration { + if m != nil { + return m.EndTimeOffset + } + return nil +} + +func init() { + proto.RegisterType((*TimeSegment)(nil), "google.cloud.automl.v1beta1.TimeSegment") +} + +func init() { + proto.RegisterFile("google/cloud/automl/v1beta1/temporal.proto", fileDescriptor_temporal_20cabaedddc21db8) +} + +var fileDescriptor_temporal_20cabaedddc21db8 = []byte{ + // 264 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x84, 0x90, 0xbf, 0x4a, 0x34, 0x31, + 0x14, 0xc5, 0xc9, 0x16, 0x5f, 0x91, 0xe5, 0x63, 0x71, 0x2a, 0xdd, 0x15, 0x15, 0x2b, 0xb1, 0x48, + 0x58, 0x2d, 0xad, 0xc6, 0x3f, 0x58, 0x89, 0xa2, 0x62, 0x21, 0x03, 0x4b, 0x66, 0xe7, 0x4e, 0x08, + 0x24, 0xb9, 0x43, 0xe6, 0xc6, 0x27, 0xf0, 0x1d, 0x7c, 0x27, 0x9f, 0x4a, 0x36, 0x89, 0x88, 0x20, + 0x5a, 0x1e, 0xce, 0xef, 0xfc, 0x48, 0x2e, 0x3f, 0xd6, 0x88, 0xda, 0x82, 0x5c, 0x5b, 0x8c, 0x9d, + 0x54, 0x91, 0xd0, 0x59, 0xf9, 0xb2, 0x6c, 0x81, 0xd4, 0x52, 0x12, 0xb8, 0x01, 0x83, 0xb2, 0x62, + 0x08, 0x48, 0x58, 0x2d, 0x32, 0x2b, 0x12, 0x2b, 0x32, 0x2b, 0x0a, 0x3b, 0xdf, 0x2d, 0x22, 0x35, + 0x18, 0xa9, 0xbc, 0x47, 0x52, 0x64, 0xd0, 0x8f, 0x79, 0x3a, 0xdf, 0x2b, 0x6d, 0x4a, 0x6d, 0xec, + 0x65, 0x17, 0x43, 0x02, 0x72, 0x7f, 0xf8, 0xc6, 0xf8, 0xf4, 0xd1, 0x38, 0x78, 0x00, 0xed, 0xc0, + 0x53, 0x75, 0xc5, 0xb7, 0x46, 0x52, 0x81, 0x56, 0x64, 0x1c, 0xac, 0xb0, 0xef, 0x47, 0xa0, 0x6d, + 0x76, 0xc0, 0x8e, 0xa6, 0x27, 0x3b, 0xa2, 0x3c, 0xe3, 0xd3, 0x25, 0x2e, 0x8b, 0xeb, 0x7e, 0x96, + 0x36, 0x1b, 0xcf, 0x6d, 0x5a, 0x54, 0x35, 0x9f, 0x81, 0xef, 0xbe, 0x49, 0x26, 0x7f, 0x49, 0xfe, + 0x83, 0xef, 0xbe, 0x14, 0xe7, 0xaf, 0x8c, 0xef, 0xaf, 0xd1, 0x89, 0x5f, 0xfe, 0x7e, 0xc7, 0x9e, + 0xeb, 0x52, 0x6b, 0xb4, 0xca, 0x6b, 0x81, 0x41, 0x4b, 0x0d, 0x3e, 0xc9, 0x65, 0xae, 0xd4, 0x60, + 0xc6, 0x1f, 0xaf, 0x7c, 0x96, 0xe3, 0xfb, 0x64, 0x71, 0x9d, 0xc0, 0xe6, 0x62, 0x03, 0x35, 0x75, + 0x24, 0xbc, 0xb1, 0xcd, 0x53, 0x86, 0xda, 0x7f, 0xc9, 0x75, 0xfa, 0x11, 0x00, 0x00, 0xff, 0xff, + 0x60, 0x83, 0x10, 0xd5, 0xb0, 0x01, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/cloud/automl/v1beta1/text.pb.go b/vendor/google.golang.org/genproto/googleapis/cloud/automl/v1beta1/text.pb.go new file mode 100644 index 0000000..febfeb1 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/cloud/automl/v1beta1/text.pb.go @@ -0,0 +1,269 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/cloud/automl/v1beta1/text.proto + +package automl // import "google.golang.org/genproto/googleapis/cloud/automl/v1beta1" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Dataset metadata for classification. +type TextClassificationDatasetMetadata struct { + // Required. + // Type of the classification problem. + ClassificationType ClassificationType `protobuf:"varint,1,opt,name=classification_type,json=classificationType,proto3,enum=google.cloud.automl.v1beta1.ClassificationType" json:"classification_type,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *TextClassificationDatasetMetadata) Reset() { *m = TextClassificationDatasetMetadata{} } +func (m *TextClassificationDatasetMetadata) String() string { return proto.CompactTextString(m) } +func (*TextClassificationDatasetMetadata) ProtoMessage() {} +func (*TextClassificationDatasetMetadata) Descriptor() ([]byte, []int) { + return fileDescriptor_text_15189e358b88704e, []int{0} +} +func (m *TextClassificationDatasetMetadata) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_TextClassificationDatasetMetadata.Unmarshal(m, b) +} +func (m *TextClassificationDatasetMetadata) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_TextClassificationDatasetMetadata.Marshal(b, m, deterministic) +} +func (dst *TextClassificationDatasetMetadata) XXX_Merge(src proto.Message) { + xxx_messageInfo_TextClassificationDatasetMetadata.Merge(dst, src) +} +func (m *TextClassificationDatasetMetadata) XXX_Size() int { + return xxx_messageInfo_TextClassificationDatasetMetadata.Size(m) +} +func (m *TextClassificationDatasetMetadata) XXX_DiscardUnknown() { + xxx_messageInfo_TextClassificationDatasetMetadata.DiscardUnknown(m) +} + +var xxx_messageInfo_TextClassificationDatasetMetadata proto.InternalMessageInfo + +func (m *TextClassificationDatasetMetadata) GetClassificationType() ClassificationType { + if m != nil { + return m.ClassificationType + } + return ClassificationType_CLASSIFICATION_TYPE_UNSPECIFIED +} + +// Model metadata that is specific to text classification. +type TextClassificationModelMetadata struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *TextClassificationModelMetadata) Reset() { *m = TextClassificationModelMetadata{} } +func (m *TextClassificationModelMetadata) String() string { return proto.CompactTextString(m) } +func (*TextClassificationModelMetadata) ProtoMessage() {} +func (*TextClassificationModelMetadata) Descriptor() ([]byte, []int) { + return fileDescriptor_text_15189e358b88704e, []int{1} +} +func (m *TextClassificationModelMetadata) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_TextClassificationModelMetadata.Unmarshal(m, b) +} +func (m *TextClassificationModelMetadata) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_TextClassificationModelMetadata.Marshal(b, m, deterministic) +} +func (dst *TextClassificationModelMetadata) XXX_Merge(src proto.Message) { + xxx_messageInfo_TextClassificationModelMetadata.Merge(dst, src) +} +func (m *TextClassificationModelMetadata) XXX_Size() int { + return xxx_messageInfo_TextClassificationModelMetadata.Size(m) +} +func (m *TextClassificationModelMetadata) XXX_DiscardUnknown() { + xxx_messageInfo_TextClassificationModelMetadata.DiscardUnknown(m) +} + +var xxx_messageInfo_TextClassificationModelMetadata proto.InternalMessageInfo + +// Dataset metadata that is specific to text extraction +type TextExtractionDatasetMetadata struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *TextExtractionDatasetMetadata) Reset() { *m = TextExtractionDatasetMetadata{} } +func (m *TextExtractionDatasetMetadata) String() string { return proto.CompactTextString(m) } +func (*TextExtractionDatasetMetadata) ProtoMessage() {} +func (*TextExtractionDatasetMetadata) Descriptor() ([]byte, []int) { + return fileDescriptor_text_15189e358b88704e, []int{2} +} +func (m *TextExtractionDatasetMetadata) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_TextExtractionDatasetMetadata.Unmarshal(m, b) +} +func (m *TextExtractionDatasetMetadata) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_TextExtractionDatasetMetadata.Marshal(b, m, deterministic) +} +func (dst *TextExtractionDatasetMetadata) XXX_Merge(src proto.Message) { + xxx_messageInfo_TextExtractionDatasetMetadata.Merge(dst, src) +} +func (m *TextExtractionDatasetMetadata) XXX_Size() int { + return xxx_messageInfo_TextExtractionDatasetMetadata.Size(m) +} +func (m *TextExtractionDatasetMetadata) XXX_DiscardUnknown() { + xxx_messageInfo_TextExtractionDatasetMetadata.DiscardUnknown(m) +} + +var xxx_messageInfo_TextExtractionDatasetMetadata proto.InternalMessageInfo + +// Model metadata that is specific to text extraction. +type TextExtractionModelMetadata struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *TextExtractionModelMetadata) Reset() { *m = TextExtractionModelMetadata{} } +func (m *TextExtractionModelMetadata) String() string { return proto.CompactTextString(m) } +func (*TextExtractionModelMetadata) ProtoMessage() {} +func (*TextExtractionModelMetadata) Descriptor() ([]byte, []int) { + return fileDescriptor_text_15189e358b88704e, []int{3} +} +func (m *TextExtractionModelMetadata) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_TextExtractionModelMetadata.Unmarshal(m, b) +} +func (m *TextExtractionModelMetadata) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_TextExtractionModelMetadata.Marshal(b, m, deterministic) +} +func (dst *TextExtractionModelMetadata) XXX_Merge(src proto.Message) { + xxx_messageInfo_TextExtractionModelMetadata.Merge(dst, src) +} +func (m *TextExtractionModelMetadata) XXX_Size() int { + return xxx_messageInfo_TextExtractionModelMetadata.Size(m) +} +func (m *TextExtractionModelMetadata) XXX_DiscardUnknown() { + xxx_messageInfo_TextExtractionModelMetadata.DiscardUnknown(m) +} + +var xxx_messageInfo_TextExtractionModelMetadata proto.InternalMessageInfo + +// Dataset metadata for text sentiment. +type TextSentimentDatasetMetadata struct { + // Required. + // A sentiment is expressed as an integer ordinal, where higher value + // means a more positive sentiment. The range of sentiments that will be used + // is between 0 and sentiment_max (inclusive on both ends), and all the values + // in the range must be represented in the dataset before a model can be + // created. + // sentiment_max value must be between 1 and 10 (inclusive). + SentimentMax int32 `protobuf:"varint,1,opt,name=sentiment_max,json=sentimentMax,proto3" json:"sentiment_max,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *TextSentimentDatasetMetadata) Reset() { *m = TextSentimentDatasetMetadata{} } +func (m *TextSentimentDatasetMetadata) String() string { return proto.CompactTextString(m) } +func (*TextSentimentDatasetMetadata) ProtoMessage() {} +func (*TextSentimentDatasetMetadata) Descriptor() ([]byte, []int) { + return fileDescriptor_text_15189e358b88704e, []int{4} +} +func (m *TextSentimentDatasetMetadata) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_TextSentimentDatasetMetadata.Unmarshal(m, b) +} +func (m *TextSentimentDatasetMetadata) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_TextSentimentDatasetMetadata.Marshal(b, m, deterministic) +} +func (dst *TextSentimentDatasetMetadata) XXX_Merge(src proto.Message) { + xxx_messageInfo_TextSentimentDatasetMetadata.Merge(dst, src) +} +func (m *TextSentimentDatasetMetadata) XXX_Size() int { + return xxx_messageInfo_TextSentimentDatasetMetadata.Size(m) +} +func (m *TextSentimentDatasetMetadata) XXX_DiscardUnknown() { + xxx_messageInfo_TextSentimentDatasetMetadata.DiscardUnknown(m) +} + +var xxx_messageInfo_TextSentimentDatasetMetadata proto.InternalMessageInfo + +func (m *TextSentimentDatasetMetadata) GetSentimentMax() int32 { + if m != nil { + return m.SentimentMax + } + return 0 +} + +// Model metadata that is specific to text classification. +type TextSentimentModelMetadata struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *TextSentimentModelMetadata) Reset() { *m = TextSentimentModelMetadata{} } +func (m *TextSentimentModelMetadata) String() string { return proto.CompactTextString(m) } +func (*TextSentimentModelMetadata) ProtoMessage() {} +func (*TextSentimentModelMetadata) Descriptor() ([]byte, []int) { + return fileDescriptor_text_15189e358b88704e, []int{5} +} +func (m *TextSentimentModelMetadata) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_TextSentimentModelMetadata.Unmarshal(m, b) +} +func (m *TextSentimentModelMetadata) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_TextSentimentModelMetadata.Marshal(b, m, deterministic) +} +func (dst *TextSentimentModelMetadata) XXX_Merge(src proto.Message) { + xxx_messageInfo_TextSentimentModelMetadata.Merge(dst, src) +} +func (m *TextSentimentModelMetadata) XXX_Size() int { + return xxx_messageInfo_TextSentimentModelMetadata.Size(m) +} +func (m *TextSentimentModelMetadata) XXX_DiscardUnknown() { + xxx_messageInfo_TextSentimentModelMetadata.DiscardUnknown(m) +} + +var xxx_messageInfo_TextSentimentModelMetadata proto.InternalMessageInfo + +func init() { + proto.RegisterType((*TextClassificationDatasetMetadata)(nil), "google.cloud.automl.v1beta1.TextClassificationDatasetMetadata") + proto.RegisterType((*TextClassificationModelMetadata)(nil), "google.cloud.automl.v1beta1.TextClassificationModelMetadata") + proto.RegisterType((*TextExtractionDatasetMetadata)(nil), "google.cloud.automl.v1beta1.TextExtractionDatasetMetadata") + proto.RegisterType((*TextExtractionModelMetadata)(nil), "google.cloud.automl.v1beta1.TextExtractionModelMetadata") + proto.RegisterType((*TextSentimentDatasetMetadata)(nil), "google.cloud.automl.v1beta1.TextSentimentDatasetMetadata") + proto.RegisterType((*TextSentimentModelMetadata)(nil), "google.cloud.automl.v1beta1.TextSentimentModelMetadata") +} + +func init() { + proto.RegisterFile("google/cloud/automl/v1beta1/text.proto", fileDescriptor_text_15189e358b88704e) +} + +var fileDescriptor_text_15189e358b88704e = []byte{ + // 322 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x7c, 0x91, 0x4f, 0x4b, 0x03, 0x31, + 0x10, 0xc5, 0x59, 0x41, 0xc1, 0xa0, 0x1e, 0xea, 0x45, 0xfa, 0x87, 0xda, 0x15, 0xc4, 0x53, 0x62, + 0xf5, 0xe8, 0xa9, 0xad, 0xe2, 0x69, 0xa1, 0xd4, 0xe2, 0x41, 0x0a, 0x75, 0xba, 0x1d, 0x97, 0x85, + 0x6c, 0x66, 0xe9, 0x4e, 0x65, 0xfb, 0x01, 0x3c, 0xfb, 0xbd, 0xfc, 0x54, 0xb2, 0x49, 0x2b, 0xc4, + 0x96, 0x1e, 0x93, 0xf7, 0x7b, 0x2f, 0x2f, 0x33, 0xe2, 0x3a, 0x21, 0x4a, 0x34, 0xaa, 0x58, 0xd3, + 0x72, 0xae, 0x60, 0xc9, 0x94, 0x69, 0xf5, 0xd9, 0x9d, 0x21, 0x43, 0x57, 0x31, 0x96, 0x2c, 0xf3, + 0x05, 0x31, 0xd5, 0x1a, 0x8e, 0x93, 0x96, 0x93, 0x8e, 0x93, 0x6b, 0xae, 0xde, 0x5c, 0x87, 0x40, + 0x9e, 0x2a, 0x30, 0x86, 0x18, 0x38, 0x25, 0x53, 0x38, 0x6b, 0xfd, 0x76, 0xdf, 0x13, 0xb1, 0x86, + 0xa2, 0x48, 0x3f, 0xd2, 0xd8, 0x5a, 0x9c, 0x23, 0xfc, 0x0a, 0x44, 0x67, 0x8c, 0x25, 0x0f, 0x3c, + 0xf1, 0x11, 0x18, 0x0a, 0xe4, 0x08, 0x19, 0xe6, 0xc0, 0x50, 0x7b, 0x17, 0xe7, 0xbe, 0x7b, 0xca, + 0xab, 0x1c, 0x2f, 0x82, 0xcb, 0xe0, 0xe6, 0xec, 0x4e, 0xc9, 0x3d, 0x85, 0xa5, 0x1f, 0x3c, 0x5e, + 0xe5, 0x38, 0xaa, 0xc5, 0x5b, 0x77, 0x61, 0x47, 0xb4, 0xb7, 0x6b, 0x44, 0x34, 0x47, 0xbd, 0x29, + 0x11, 0xb6, 0x45, 0xab, 0x42, 0x9e, 0x4a, 0x5e, 0x40, 0xbc, 0xa3, 0x65, 0xd8, 0x12, 0x0d, 0x1f, + 0xf0, 0xfd, 0x03, 0xd1, 0xac, 0xe4, 0x17, 0x34, 0x9c, 0x66, 0x68, 0xf8, 0xff, 0x27, 0xaf, 0xc4, + 0x69, 0xb1, 0xd1, 0xa6, 0x19, 0x94, 0xf6, 0x7b, 0x87, 0xa3, 0x93, 0xbf, 0xcb, 0x08, 0xca, 0xb0, + 0x29, 0xea, 0x5e, 0x88, 0xf7, 0x44, 0xff, 0x3b, 0x10, 0xed, 0x98, 0xb2, 0x7d, 0x03, 0xe9, 0x1f, + 0x57, 0xfe, 0x61, 0x35, 0xfc, 0x61, 0xf0, 0xd6, 0x5b, 0x93, 0x09, 0x69, 0x30, 0x89, 0xa4, 0x45, + 0xa2, 0x12, 0x34, 0x76, 0x35, 0xca, 0x49, 0x90, 0xa7, 0xc5, 0xce, 0x7d, 0x3e, 0xb8, 0xe3, 0xcf, + 0x41, 0xe3, 0xd9, 0x82, 0x93, 0x41, 0x05, 0x4d, 0x7a, 0x4b, 0xa6, 0x48, 0x4f, 0x5e, 0x1d, 0x34, + 0x3b, 0xb2, 0x59, 0xf7, 0xbf, 0x01, 0x00, 0x00, 0xff, 0xff, 0x91, 0x55, 0x3e, 0x63, 0x7d, 0x02, + 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/cloud/automl/v1beta1/text_extraction.pb.go b/vendor/google.golang.org/genproto/googleapis/cloud/automl/v1beta1/text_extraction.pb.go new file mode 100644 index 0000000..e22dc62 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/cloud/automl/v1beta1/text_extraction.pb.go @@ -0,0 +1,233 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/cloud/automl/v1beta1/text_extraction.proto + +package automl // import "google.golang.org/genproto/googleapis/cloud/automl/v1beta1" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Annotation for identifying spans of text. +type TextExtractionAnnotation struct { + // Output only. A confidence estimate between 0.0 and 1.0. A higher value + // means greater confidence in correctness of the annotation. + Score float32 `protobuf:"fixed32,1,opt,name=score,proto3" json:"score,omitempty"` + // Required. The part of the original text to which this annotation pertains. + TextSegment *TextSegment `protobuf:"bytes,3,opt,name=text_segment,json=textSegment,proto3" json:"text_segment,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *TextExtractionAnnotation) Reset() { *m = TextExtractionAnnotation{} } +func (m *TextExtractionAnnotation) String() string { return proto.CompactTextString(m) } +func (*TextExtractionAnnotation) ProtoMessage() {} +func (*TextExtractionAnnotation) Descriptor() ([]byte, []int) { + return fileDescriptor_text_extraction_819b64ab635b378e, []int{0} +} +func (m *TextExtractionAnnotation) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_TextExtractionAnnotation.Unmarshal(m, b) +} +func (m *TextExtractionAnnotation) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_TextExtractionAnnotation.Marshal(b, m, deterministic) +} +func (dst *TextExtractionAnnotation) XXX_Merge(src proto.Message) { + xxx_messageInfo_TextExtractionAnnotation.Merge(dst, src) +} +func (m *TextExtractionAnnotation) XXX_Size() int { + return xxx_messageInfo_TextExtractionAnnotation.Size(m) +} +func (m *TextExtractionAnnotation) XXX_DiscardUnknown() { + xxx_messageInfo_TextExtractionAnnotation.DiscardUnknown(m) +} + +var xxx_messageInfo_TextExtractionAnnotation proto.InternalMessageInfo + +func (m *TextExtractionAnnotation) GetScore() float32 { + if m != nil { + return m.Score + } + return 0 +} + +func (m *TextExtractionAnnotation) GetTextSegment() *TextSegment { + if m != nil { + return m.TextSegment + } + return nil +} + +// Model evaluation metrics for text extraction problems. +type TextExtractionEvaluationMetrics struct { + // Output only. The Area under precision recall curve metric. + AuPrc float32 `protobuf:"fixed32,1,opt,name=au_prc,json=auPrc,proto3" json:"au_prc,omitempty"` + // Output only. Metrics that have confidence thresholds. + // Precision-recall curve can be derived from it. + ConfidenceMetricsEntries []*TextExtractionEvaluationMetrics_ConfidenceMetricsEntry `protobuf:"bytes,2,rep,name=confidence_metrics_entries,json=confidenceMetricsEntries,proto3" json:"confidence_metrics_entries,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *TextExtractionEvaluationMetrics) Reset() { *m = TextExtractionEvaluationMetrics{} } +func (m *TextExtractionEvaluationMetrics) String() string { return proto.CompactTextString(m) } +func (*TextExtractionEvaluationMetrics) ProtoMessage() {} +func (*TextExtractionEvaluationMetrics) Descriptor() ([]byte, []int) { + return fileDescriptor_text_extraction_819b64ab635b378e, []int{1} +} +func (m *TextExtractionEvaluationMetrics) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_TextExtractionEvaluationMetrics.Unmarshal(m, b) +} +func (m *TextExtractionEvaluationMetrics) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_TextExtractionEvaluationMetrics.Marshal(b, m, deterministic) +} +func (dst *TextExtractionEvaluationMetrics) XXX_Merge(src proto.Message) { + xxx_messageInfo_TextExtractionEvaluationMetrics.Merge(dst, src) +} +func (m *TextExtractionEvaluationMetrics) XXX_Size() int { + return xxx_messageInfo_TextExtractionEvaluationMetrics.Size(m) +} +func (m *TextExtractionEvaluationMetrics) XXX_DiscardUnknown() { + xxx_messageInfo_TextExtractionEvaluationMetrics.DiscardUnknown(m) +} + +var xxx_messageInfo_TextExtractionEvaluationMetrics proto.InternalMessageInfo + +func (m *TextExtractionEvaluationMetrics) GetAuPrc() float32 { + if m != nil { + return m.AuPrc + } + return 0 +} + +func (m *TextExtractionEvaluationMetrics) GetConfidenceMetricsEntries() []*TextExtractionEvaluationMetrics_ConfidenceMetricsEntry { + if m != nil { + return m.ConfidenceMetricsEntries + } + return nil +} + +// Metrics for a single confidence threshold. +type TextExtractionEvaluationMetrics_ConfidenceMetricsEntry struct { + // Output only. The confidence threshold value used to compute the metrics. + // Only annotations with score of at least this threshold are considered to + // be ones the model would return. + ConfidenceThreshold float32 `protobuf:"fixed32,1,opt,name=confidence_threshold,json=confidenceThreshold,proto3" json:"confidence_threshold,omitempty"` + // Output only. Recall under the given confidence threshold. + Recall float32 `protobuf:"fixed32,3,opt,name=recall,proto3" json:"recall,omitempty"` + // Output only. Precision under the given confidence threshold. + Precision float32 `protobuf:"fixed32,4,opt,name=precision,proto3" json:"precision,omitempty"` + // Output only. The harmonic mean of recall and precision. + F1Score float32 `protobuf:"fixed32,5,opt,name=f1_score,json=f1Score,proto3" json:"f1_score,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *TextExtractionEvaluationMetrics_ConfidenceMetricsEntry) Reset() { + *m = TextExtractionEvaluationMetrics_ConfidenceMetricsEntry{} +} +func (m *TextExtractionEvaluationMetrics_ConfidenceMetricsEntry) String() string { + return proto.CompactTextString(m) +} +func (*TextExtractionEvaluationMetrics_ConfidenceMetricsEntry) ProtoMessage() {} +func (*TextExtractionEvaluationMetrics_ConfidenceMetricsEntry) Descriptor() ([]byte, []int) { + return fileDescriptor_text_extraction_819b64ab635b378e, []int{1, 0} +} +func (m *TextExtractionEvaluationMetrics_ConfidenceMetricsEntry) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_TextExtractionEvaluationMetrics_ConfidenceMetricsEntry.Unmarshal(m, b) +} +func (m *TextExtractionEvaluationMetrics_ConfidenceMetricsEntry) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_TextExtractionEvaluationMetrics_ConfidenceMetricsEntry.Marshal(b, m, deterministic) +} +func (dst *TextExtractionEvaluationMetrics_ConfidenceMetricsEntry) XXX_Merge(src proto.Message) { + xxx_messageInfo_TextExtractionEvaluationMetrics_ConfidenceMetricsEntry.Merge(dst, src) +} +func (m *TextExtractionEvaluationMetrics_ConfidenceMetricsEntry) XXX_Size() int { + return xxx_messageInfo_TextExtractionEvaluationMetrics_ConfidenceMetricsEntry.Size(m) +} +func (m *TextExtractionEvaluationMetrics_ConfidenceMetricsEntry) XXX_DiscardUnknown() { + xxx_messageInfo_TextExtractionEvaluationMetrics_ConfidenceMetricsEntry.DiscardUnknown(m) +} + +var xxx_messageInfo_TextExtractionEvaluationMetrics_ConfidenceMetricsEntry proto.InternalMessageInfo + +func (m *TextExtractionEvaluationMetrics_ConfidenceMetricsEntry) GetConfidenceThreshold() float32 { + if m != nil { + return m.ConfidenceThreshold + } + return 0 +} + +func (m *TextExtractionEvaluationMetrics_ConfidenceMetricsEntry) GetRecall() float32 { + if m != nil { + return m.Recall + } + return 0 +} + +func (m *TextExtractionEvaluationMetrics_ConfidenceMetricsEntry) GetPrecision() float32 { + if m != nil { + return m.Precision + } + return 0 +} + +func (m *TextExtractionEvaluationMetrics_ConfidenceMetricsEntry) GetF1Score() float32 { + if m != nil { + return m.F1Score + } + return 0 +} + +func init() { + proto.RegisterType((*TextExtractionAnnotation)(nil), "google.cloud.automl.v1beta1.TextExtractionAnnotation") + proto.RegisterType((*TextExtractionEvaluationMetrics)(nil), "google.cloud.automl.v1beta1.TextExtractionEvaluationMetrics") + proto.RegisterType((*TextExtractionEvaluationMetrics_ConfidenceMetricsEntry)(nil), "google.cloud.automl.v1beta1.TextExtractionEvaluationMetrics.ConfidenceMetricsEntry") +} + +func init() { + proto.RegisterFile("google/cloud/automl/v1beta1/text_extraction.proto", fileDescriptor_text_extraction_819b64ab635b378e) +} + +var fileDescriptor_text_extraction_819b64ab635b378e = []byte{ + // 406 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x84, 0x52, 0xcf, 0x6a, 0xdb, 0x30, + 0x18, 0xc7, 0xee, 0x9a, 0x6d, 0xca, 0x4e, 0x5a, 0x57, 0xbc, 0xb4, 0xd0, 0xd0, 0x53, 0x4e, 0x32, + 0xee, 0x8e, 0x3b, 0x65, 0x25, 0xec, 0x30, 0x0a, 0x25, 0x29, 0x3b, 0x8c, 0x80, 0x51, 0x95, 0x2f, + 0xae, 0x40, 0xd1, 0x67, 0xa4, 0xcf, 0x25, 0x3b, 0xec, 0xb6, 0x17, 0xd8, 0x7d, 0x4f, 0xb4, 0x57, + 0xd9, 0x4b, 0x0c, 0x4b, 0x6e, 0xd2, 0x41, 0x48, 0x8e, 0x3f, 0xfd, 0xfe, 0x5a, 0x16, 0x2b, 0x2a, + 0xc4, 0xca, 0x40, 0xae, 0x0c, 0x36, 0x8b, 0x5c, 0x36, 0x84, 0x2b, 0x93, 0x3f, 0x16, 0xf7, 0x40, + 0xb2, 0xc8, 0x09, 0xd6, 0x54, 0xc2, 0x9a, 0x9c, 0x54, 0xa4, 0xd1, 0x8a, 0xda, 0x21, 0x21, 0x3f, + 0x8b, 0x16, 0x11, 0x2c, 0x22, 0x5a, 0x44, 0x67, 0x19, 0x9c, 0x77, 0x79, 0xb2, 0xd6, 0xb9, 0xb4, + 0x16, 0x49, 0xb6, 0x4e, 0x1f, 0xad, 0x03, 0x71, 0xb0, 0xcd, 0x43, 0xb5, 0x02, 0x4b, 0x51, 0x7f, + 0xf9, 0x83, 0x65, 0x77, 0xb0, 0xa6, 0xc9, 0x66, 0xc2, 0x78, 0x13, 0xc9, 0x4f, 0xd8, 0xb1, 0x57, + 0xe8, 0x20, 0x4b, 0x86, 0xc9, 0x28, 0x9d, 0x46, 0xc0, 0xbf, 0xb0, 0x37, 0xcf, 0x73, 0xb2, 0xa3, + 0x61, 0x32, 0xea, 0x5f, 0x8d, 0xc4, 0x9e, 0xcd, 0xa2, 0xad, 0x98, 0x45, 0xfd, 0xb4, 0x4f, 0x5b, + 0x70, 0xf9, 0x37, 0x65, 0x17, 0xff, 0xf7, 0x4f, 0x1e, 0xa5, 0x69, 0x42, 0xff, 0x0d, 0x90, 0xd3, + 0xca, 0xf3, 0x77, 0xac, 0x27, 0x9b, 0xb2, 0x76, 0xea, 0x69, 0x87, 0x6c, 0x6e, 0x9d, 0xe2, 0xbf, + 0x12, 0x36, 0x50, 0x68, 0x97, 0x7a, 0x01, 0x56, 0x41, 0xb9, 0x8a, 0xea, 0x12, 0x2c, 0x39, 0x0d, + 0x3e, 0x4b, 0x87, 0x47, 0xa3, 0xfe, 0xd5, 0xec, 0xe0, 0xac, 0x3d, 0xcd, 0xe2, 0x7a, 0x13, 0xdf, + 0x9d, 0x4c, 0x2c, 0xb9, 0xef, 0xd3, 0x4c, 0xed, 0x3a, 0xd7, 0xe0, 0x07, 0xbf, 0x13, 0x76, 0xba, + 0xdb, 0xc4, 0x0b, 0x76, 0xf2, 0x6c, 0x2d, 0x3d, 0x38, 0xf0, 0x0f, 0x68, 0x16, 0xdd, 0x37, 0xbd, + 0xdd, 0x72, 0x77, 0x4f, 0x14, 0x3f, 0x65, 0x3d, 0x07, 0x4a, 0x1a, 0x13, 0xee, 0x38, 0x9d, 0x76, + 0x88, 0x9f, 0xb3, 0xd7, 0xb5, 0x03, 0xa5, 0xbd, 0x46, 0x9b, 0xbd, 0x08, 0xd4, 0xf6, 0x80, 0xbf, + 0x67, 0xaf, 0x96, 0x45, 0x19, 0x7f, 0xdc, 0x71, 0x20, 0x5f, 0x2e, 0x8b, 0x59, 0x0b, 0x3f, 0xfd, + 0x4c, 0xd8, 0x85, 0xc2, 0xd5, 0xbe, 0x3b, 0xb9, 0x4d, 0xbe, 0x8d, 0x3b, 0xba, 0x42, 0x23, 0x6d, + 0x25, 0xd0, 0x55, 0x79, 0x05, 0x36, 0x3c, 0x97, 0x3c, 0x52, 0xb2, 0xd6, 0x7e, 0xe7, 0x0b, 0xfb, + 0x18, 0xe1, 0x9f, 0xf4, 0xec, 0x73, 0x10, 0xce, 0xaf, 0x5b, 0xd1, 0x7c, 0xdc, 0x10, 0xde, 0x98, + 0xf9, 0xd7, 0x28, 0xba, 0xef, 0x85, 0xac, 0x0f, 0xff, 0x02, 0x00, 0x00, 0xff, 0xff, 0x68, 0x3e, + 0x66, 0x0a, 0x1a, 0x03, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/cloud/automl/v1beta1/text_segment.pb.go b/vendor/google.golang.org/genproto/googleapis/cloud/automl/v1beta1/text_segment.pb.go new file mode 100644 index 0000000..18aaf87 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/cloud/automl/v1beta1/text_segment.pb.go @@ -0,0 +1,110 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/cloud/automl/v1beta1/text_segment.proto + +package automl // import "google.golang.org/genproto/googleapis/cloud/automl/v1beta1" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// A contiguous part of a text (string), assuming it has an UTF-8 NFC encoding. +// . +type TextSegment struct { + // Output only. The content of the TextSegment. + Content string `protobuf:"bytes,3,opt,name=content,proto3" json:"content,omitempty"` + // Required. Zero-based character index of the first character of the text + // segment (counting characters from the beginning of the text). + StartOffset int64 `protobuf:"varint,1,opt,name=start_offset,json=startOffset,proto3" json:"start_offset,omitempty"` + // Required. Zero-based character index of the first character past the end of + // the text segment (counting character from the beginning of the text). + // The character at the end_offset is NOT included in the text segment. + EndOffset int64 `protobuf:"varint,2,opt,name=end_offset,json=endOffset,proto3" json:"end_offset,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *TextSegment) Reset() { *m = TextSegment{} } +func (m *TextSegment) String() string { return proto.CompactTextString(m) } +func (*TextSegment) ProtoMessage() {} +func (*TextSegment) Descriptor() ([]byte, []int) { + return fileDescriptor_text_segment_7b666583e5e1b1ed, []int{0} +} +func (m *TextSegment) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_TextSegment.Unmarshal(m, b) +} +func (m *TextSegment) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_TextSegment.Marshal(b, m, deterministic) +} +func (dst *TextSegment) XXX_Merge(src proto.Message) { + xxx_messageInfo_TextSegment.Merge(dst, src) +} +func (m *TextSegment) XXX_Size() int { + return xxx_messageInfo_TextSegment.Size(m) +} +func (m *TextSegment) XXX_DiscardUnknown() { + xxx_messageInfo_TextSegment.DiscardUnknown(m) +} + +var xxx_messageInfo_TextSegment proto.InternalMessageInfo + +func (m *TextSegment) GetContent() string { + if m != nil { + return m.Content + } + return "" +} + +func (m *TextSegment) GetStartOffset() int64 { + if m != nil { + return m.StartOffset + } + return 0 +} + +func (m *TextSegment) GetEndOffset() int64 { + if m != nil { + return m.EndOffset + } + return 0 +} + +func init() { + proto.RegisterType((*TextSegment)(nil), "google.cloud.automl.v1beta1.TextSegment") +} + +func init() { + proto.RegisterFile("google/cloud/automl/v1beta1/text_segment.proto", fileDescriptor_text_segment_7b666583e5e1b1ed) +} + +var fileDescriptor_text_segment_7b666583e5e1b1ed = []byte{ + // 255 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x7c, 0xd0, 0xc1, 0x4a, 0x03, 0x31, + 0x10, 0x06, 0x60, 0xd2, 0x82, 0xd2, 0xd4, 0x83, 0xec, 0x69, 0xb1, 0x8a, 0xd5, 0x53, 0x4f, 0x09, + 0xc5, 0xa3, 0xa7, 0xd6, 0x83, 0x27, 0xb1, 0x54, 0xf1, 0x20, 0x0b, 0x25, 0xdd, 0x9d, 0x86, 0x40, + 0x76, 0x66, 0xd9, 0xcc, 0x4a, 0x9f, 0xc4, 0x87, 0xf2, 0xa9, 0xa4, 0xc9, 0x16, 0x3c, 0x48, 0x8f, + 0x99, 0xf9, 0xe6, 0xe7, 0x27, 0x52, 0x59, 0x22, 0xeb, 0x41, 0x97, 0x9e, 0xba, 0x4a, 0x9b, 0x8e, + 0xa9, 0xf6, 0xfa, 0x6b, 0xbe, 0x05, 0x36, 0x73, 0xcd, 0xb0, 0xe7, 0x4d, 0x00, 0x5b, 0x03, 0xb2, + 0x6a, 0x5a, 0x62, 0xca, 0x26, 0xc9, 0xab, 0xe8, 0x55, 0xf2, 0xaa, 0xf7, 0x57, 0xd7, 0x7d, 0x98, + 0x69, 0x9c, 0x36, 0x88, 0xc4, 0x86, 0x1d, 0x61, 0x48, 0xa7, 0xf7, 0x4e, 0x8e, 0xdf, 0x61, 0xcf, + 0x6f, 0x29, 0x2f, 0xcb, 0xe5, 0x79, 0x49, 0xc8, 0x80, 0x9c, 0x0f, 0xa7, 0x62, 0x36, 0x5a, 0x1f, + 0x9f, 0xd9, 0x9d, 0xbc, 0x08, 0x6c, 0x5a, 0xde, 0xd0, 0x6e, 0x17, 0x80, 0x73, 0x31, 0x15, 0xb3, + 0xe1, 0x7a, 0x1c, 0x67, 0xaf, 0x71, 0x94, 0xdd, 0x48, 0x09, 0x58, 0x1d, 0xc1, 0x20, 0x82, 0x11, + 0x60, 0x95, 0xd6, 0xcb, 0x6f, 0x21, 0x6f, 0x4b, 0xaa, 0xd5, 0x89, 0xb2, 0xcb, 0xcb, 0x3f, 0x65, + 0x56, 0x87, 0x82, 0x2b, 0xf1, 0xb9, 0xe8, 0x0f, 0x2c, 0x79, 0x83, 0x56, 0x51, 0x6b, 0xb5, 0x05, + 0x8c, 0xf5, 0x75, 0x5a, 0x99, 0xc6, 0x85, 0x7f, 0x3f, 0xeb, 0x31, 0x3d, 0x7f, 0x06, 0x93, 0xe7, + 0x08, 0x8b, 0xa7, 0x03, 0x2a, 0x16, 0x1d, 0xd3, 0x8b, 0x2f, 0x3e, 0x12, 0xda, 0x9e, 0xc5, 0xac, + 0x87, 0xdf, 0x00, 0x00, 0x00, 0xff, 0xff, 0x7b, 0x39, 0x59, 0x73, 0x77, 0x01, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/cloud/automl/v1beta1/text_sentiment.pb.go b/vendor/google.golang.org/genproto/googleapis/cloud/automl/v1beta1/text_sentiment.pb.go new file mode 100644 index 0000000..28c057e --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/cloud/automl/v1beta1/text_sentiment.pb.go @@ -0,0 +1,235 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/cloud/automl/v1beta1/text_sentiment.proto + +package automl // import "google.golang.org/genproto/googleapis/cloud/automl/v1beta1" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Contains annotation details specific to text sentiment. +type TextSentimentAnnotation struct { + // Output only. The sentiment with the semantic, as given to the + // [AutoMl.ImportData][google.cloud.automl.v1beta1.AutoMl.ImportData] when populating the dataset from which the model used + // for the prediction had been trained. + // The sentiment values are between 0 and + // Dataset.text_sentiment_dataset_metadata.sentiment_max (inclusive), + // with higher value meaning more positive sentiment. They are completely + // relative, i.e. 0 means least positive sentiment and sentiment_max means + // the most positive from the sentiments present in the train data. Therefore + // e.g. if train data had only negative sentiment, then sentiment_max, would + // be still negative (although least negative). + // The sentiment shouldn't be confused with "score" or "magnitude" + // from the previous Natural Language Sentiment Analysis API. + Sentiment int32 `protobuf:"varint,1,opt,name=sentiment,proto3" json:"sentiment,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *TextSentimentAnnotation) Reset() { *m = TextSentimentAnnotation{} } +func (m *TextSentimentAnnotation) String() string { return proto.CompactTextString(m) } +func (*TextSentimentAnnotation) ProtoMessage() {} +func (*TextSentimentAnnotation) Descriptor() ([]byte, []int) { + return fileDescriptor_text_sentiment_5175e400ab562620, []int{0} +} +func (m *TextSentimentAnnotation) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_TextSentimentAnnotation.Unmarshal(m, b) +} +func (m *TextSentimentAnnotation) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_TextSentimentAnnotation.Marshal(b, m, deterministic) +} +func (dst *TextSentimentAnnotation) XXX_Merge(src proto.Message) { + xxx_messageInfo_TextSentimentAnnotation.Merge(dst, src) +} +func (m *TextSentimentAnnotation) XXX_Size() int { + return xxx_messageInfo_TextSentimentAnnotation.Size(m) +} +func (m *TextSentimentAnnotation) XXX_DiscardUnknown() { + xxx_messageInfo_TextSentimentAnnotation.DiscardUnknown(m) +} + +var xxx_messageInfo_TextSentimentAnnotation proto.InternalMessageInfo + +func (m *TextSentimentAnnotation) GetSentiment() int32 { + if m != nil { + return m.Sentiment + } + return 0 +} + +// Model evaluation metrics for text sentiment problems. +type TextSentimentEvaluationMetrics struct { + // Output only. Precision. + Precision float32 `protobuf:"fixed32,1,opt,name=precision,proto3" json:"precision,omitempty"` + // Output only. Recall. + Recall float32 `protobuf:"fixed32,2,opt,name=recall,proto3" json:"recall,omitempty"` + // Output only. The harmonic mean of recall and precision. + F1Score float32 `protobuf:"fixed32,3,opt,name=f1_score,json=f1Score,proto3" json:"f1_score,omitempty"` + // Output only. Mean absolute error. Only set for the overall model + // evaluation, not for evaluation of a single annotation spec. + MeanAbsoluteError float32 `protobuf:"fixed32,4,opt,name=mean_absolute_error,json=meanAbsoluteError,proto3" json:"mean_absolute_error,omitempty"` + // Output only. Mean squared error. Only set for the overall model + // evaluation, not for evaluation of a single annotation spec. + MeanSquaredError float32 `protobuf:"fixed32,5,opt,name=mean_squared_error,json=meanSquaredError,proto3" json:"mean_squared_error,omitempty"` + // Output only. Linear weighted kappa. Only set for the overall model + // evaluation, not for evaluation of a single annotation spec. + LinearKappa float32 `protobuf:"fixed32,6,opt,name=linear_kappa,json=linearKappa,proto3" json:"linear_kappa,omitempty"` + // Output only. Quadratic weighted kappa. Only set for the overall model + // evaluation, not for evaluation of a single annotation spec. + QuadraticKappa float32 `protobuf:"fixed32,7,opt,name=quadratic_kappa,json=quadraticKappa,proto3" json:"quadratic_kappa,omitempty"` + // Output only. Confusion matrix of the evaluation. + // Only set for the overall model evaluation, not for evaluation of a single + // annotation spec. + ConfusionMatrix *ClassificationEvaluationMetrics_ConfusionMatrix `protobuf:"bytes,8,opt,name=confusion_matrix,json=confusionMatrix,proto3" json:"confusion_matrix,omitempty"` + // Output only. The annotation spec ids used for this evaluation. + // Deprecated, remove after Boq Migration and use then + // TextSentimentModelMetadata.annotation_spec_count for count, and list + // all model evaluations to see the exact annotation_spec_ids that were + // used. + AnnotationSpecId []string `protobuf:"bytes,9,rep,name=annotation_spec_id,json=annotationSpecId,proto3" json:"annotation_spec_id,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *TextSentimentEvaluationMetrics) Reset() { *m = TextSentimentEvaluationMetrics{} } +func (m *TextSentimentEvaluationMetrics) String() string { return proto.CompactTextString(m) } +func (*TextSentimentEvaluationMetrics) ProtoMessage() {} +func (*TextSentimentEvaluationMetrics) Descriptor() ([]byte, []int) { + return fileDescriptor_text_sentiment_5175e400ab562620, []int{1} +} +func (m *TextSentimentEvaluationMetrics) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_TextSentimentEvaluationMetrics.Unmarshal(m, b) +} +func (m *TextSentimentEvaluationMetrics) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_TextSentimentEvaluationMetrics.Marshal(b, m, deterministic) +} +func (dst *TextSentimentEvaluationMetrics) XXX_Merge(src proto.Message) { + xxx_messageInfo_TextSentimentEvaluationMetrics.Merge(dst, src) +} +func (m *TextSentimentEvaluationMetrics) XXX_Size() int { + return xxx_messageInfo_TextSentimentEvaluationMetrics.Size(m) +} +func (m *TextSentimentEvaluationMetrics) XXX_DiscardUnknown() { + xxx_messageInfo_TextSentimentEvaluationMetrics.DiscardUnknown(m) +} + +var xxx_messageInfo_TextSentimentEvaluationMetrics proto.InternalMessageInfo + +func (m *TextSentimentEvaluationMetrics) GetPrecision() float32 { + if m != nil { + return m.Precision + } + return 0 +} + +func (m *TextSentimentEvaluationMetrics) GetRecall() float32 { + if m != nil { + return m.Recall + } + return 0 +} + +func (m *TextSentimentEvaluationMetrics) GetF1Score() float32 { + if m != nil { + return m.F1Score + } + return 0 +} + +func (m *TextSentimentEvaluationMetrics) GetMeanAbsoluteError() float32 { + if m != nil { + return m.MeanAbsoluteError + } + return 0 +} + +func (m *TextSentimentEvaluationMetrics) GetMeanSquaredError() float32 { + if m != nil { + return m.MeanSquaredError + } + return 0 +} + +func (m *TextSentimentEvaluationMetrics) GetLinearKappa() float32 { + if m != nil { + return m.LinearKappa + } + return 0 +} + +func (m *TextSentimentEvaluationMetrics) GetQuadraticKappa() float32 { + if m != nil { + return m.QuadraticKappa + } + return 0 +} + +func (m *TextSentimentEvaluationMetrics) GetConfusionMatrix() *ClassificationEvaluationMetrics_ConfusionMatrix { + if m != nil { + return m.ConfusionMatrix + } + return nil +} + +func (m *TextSentimentEvaluationMetrics) GetAnnotationSpecId() []string { + if m != nil { + return m.AnnotationSpecId + } + return nil +} + +func init() { + proto.RegisterType((*TextSentimentAnnotation)(nil), "google.cloud.automl.v1beta1.TextSentimentAnnotation") + proto.RegisterType((*TextSentimentEvaluationMetrics)(nil), "google.cloud.automl.v1beta1.TextSentimentEvaluationMetrics") +} + +func init() { + proto.RegisterFile("google/cloud/automl/v1beta1/text_sentiment.proto", fileDescriptor_text_sentiment_5175e400ab562620) +} + +var fileDescriptor_text_sentiment_5175e400ab562620 = []byte{ + // 452 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x7c, 0x92, 0x41, 0x6f, 0x13, 0x31, + 0x10, 0x85, 0x95, 0x84, 0xa6, 0x8d, 0x8b, 0x68, 0x30, 0x12, 0x2c, 0x6d, 0x05, 0xa1, 0x17, 0x72, + 0x40, 0x5e, 0x02, 0x07, 0x0e, 0x9c, 0xd2, 0xa8, 0x42, 0x08, 0x22, 0xa1, 0x04, 0x71, 0x40, 0x91, + 0x56, 0x13, 0xef, 0x64, 0x65, 0xe1, 0xb5, 0xb7, 0xb6, 0xb7, 0xe4, 0x97, 0xf0, 0x83, 0x38, 0xf2, + 0xab, 0x90, 0xed, 0x4d, 0xa2, 0x08, 0x94, 0xa3, 0xdf, 0xfb, 0xde, 0x5b, 0x7b, 0x67, 0xc8, 0xeb, + 0x42, 0xeb, 0x42, 0x62, 0xca, 0xa5, 0xae, 0xf3, 0x14, 0x6a, 0xa7, 0x4b, 0x99, 0xde, 0x8d, 0x96, + 0xe8, 0x60, 0x94, 0x3a, 0x5c, 0xbb, 0xcc, 0xa2, 0x72, 0xa2, 0x44, 0xe5, 0x58, 0x65, 0xb4, 0xd3, + 0xf4, 0x22, 0x26, 0x58, 0x48, 0xb0, 0x98, 0x60, 0x4d, 0xe2, 0xfc, 0xb2, 0xa9, 0x83, 0x4a, 0xa4, + 0xa0, 0x94, 0x76, 0xe0, 0x84, 0x56, 0x36, 0x46, 0xcf, 0x0f, 0x7e, 0x8c, 0x4b, 0xb0, 0x56, 0xac, + 0x04, 0x0f, 0x91, 0x98, 0xb8, 0x7a, 0x47, 0x9e, 0x7c, 0xc5, 0xb5, 0x9b, 0x6f, 0xee, 0x30, 0xde, + 0x76, 0xd2, 0x4b, 0xd2, 0xdb, 0x5e, 0x2d, 0x69, 0x0d, 0x5a, 0xc3, 0xa3, 0xd9, 0x4e, 0xb8, 0xfa, + 0xdd, 0x21, 0xcf, 0xf6, 0x92, 0x37, 0x77, 0x20, 0xeb, 0x90, 0x9c, 0xa2, 0x33, 0x82, 0x5b, 0x5f, + 0x50, 0x19, 0xe4, 0xc2, 0x0a, 0xad, 0x42, 0x41, 0x7b, 0xb6, 0x13, 0xe8, 0x63, 0xd2, 0x35, 0xc8, + 0x41, 0xca, 0xa4, 0x1d, 0xac, 0xe6, 0x44, 0x9f, 0x92, 0x93, 0xd5, 0x28, 0xb3, 0x5c, 0x1b, 0x4c, + 0x3a, 0xc1, 0x39, 0x5e, 0x8d, 0xe6, 0xfe, 0x48, 0x19, 0x79, 0x54, 0x22, 0xa8, 0x0c, 0x96, 0x56, + 0xcb, 0xda, 0x61, 0x86, 0xc6, 0x68, 0x93, 0xdc, 0x0b, 0xd4, 0x43, 0x6f, 0x8d, 0x1b, 0xe7, 0xc6, + 0x1b, 0xf4, 0x15, 0xa1, 0x81, 0xb7, 0xb7, 0x35, 0x18, 0xcc, 0x1b, 0xfc, 0x28, 0xe0, 0x7d, 0xef, + 0xcc, 0xa3, 0x11, 0xe9, 0x17, 0xe4, 0xbe, 0x14, 0x0a, 0xc1, 0x64, 0x3f, 0xa0, 0xaa, 0x20, 0xe9, + 0x06, 0xee, 0x34, 0x6a, 0x9f, 0xbc, 0x44, 0x5f, 0x92, 0xb3, 0xdb, 0x1a, 0x72, 0x03, 0x4e, 0xf0, + 0x86, 0x3a, 0x0e, 0xd4, 0x83, 0xad, 0x1c, 0xc1, 0x9f, 0xa4, 0xcf, 0xb5, 0x5a, 0xd5, 0xfe, 0xa5, + 0x59, 0x09, 0xce, 0x88, 0x75, 0x72, 0x32, 0x68, 0x0d, 0x4f, 0xdf, 0x7c, 0x66, 0x07, 0xc6, 0xcb, + 0x26, 0x7b, 0x33, 0xfa, 0xe7, 0x97, 0xb2, 0xc9, 0xa6, 0x74, 0x1a, 0x3a, 0x67, 0x67, 0x7c, 0x5f, + 0xf0, 0x4f, 0xde, 0xad, 0x45, 0x66, 0x2b, 0xe4, 0x99, 0xc8, 0x93, 0xde, 0xa0, 0x33, 0xec, 0xcd, + 0xfa, 0x3b, 0x67, 0x5e, 0x21, 0xff, 0x98, 0x5f, 0xff, 0x6a, 0x91, 0xe7, 0x5c, 0x97, 0x87, 0xae, + 0x74, 0x4d, 0xf7, 0xa6, 0xfc, 0xc5, 0x6f, 0xcd, 0xf7, 0x71, 0x13, 0x28, 0xb4, 0x04, 0x55, 0x30, + 0x6d, 0x8a, 0xb4, 0x40, 0x15, 0x36, 0x2a, 0x8d, 0x16, 0x54, 0xc2, 0xfe, 0x77, 0x0d, 0xdf, 0xc7, + 0xe3, 0x9f, 0xf6, 0xc5, 0x87, 0x00, 0x2e, 0x26, 0x1e, 0x5a, 0x8c, 0x6b, 0xa7, 0xa7, 0x72, 0xf1, + 0x2d, 0x42, 0xcb, 0x6e, 0xe8, 0x7a, 0xfb, 0x37, 0x00, 0x00, 0xff, 0xff, 0xa7, 0xb8, 0xf3, 0x06, + 0x3e, 0x03, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/cloud/automl/v1beta1/translation.pb.go b/vendor/google.golang.org/genproto/googleapis/cloud/automl/v1beta1/translation.pb.go new file mode 100644 index 0000000..e899235 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/cloud/automl/v1beta1/translation.pb.go @@ -0,0 +1,261 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/cloud/automl/v1beta1/translation.proto + +package automl // import "google.golang.org/genproto/googleapis/cloud/automl/v1beta1" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Dataset metadata that is specific to translation. +type TranslationDatasetMetadata struct { + // Required. The BCP-47 language code of the source language. + SourceLanguageCode string `protobuf:"bytes,1,opt,name=source_language_code,json=sourceLanguageCode,proto3" json:"source_language_code,omitempty"` + // Required. The BCP-47 language code of the target language. + TargetLanguageCode string `protobuf:"bytes,2,opt,name=target_language_code,json=targetLanguageCode,proto3" json:"target_language_code,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *TranslationDatasetMetadata) Reset() { *m = TranslationDatasetMetadata{} } +func (m *TranslationDatasetMetadata) String() string { return proto.CompactTextString(m) } +func (*TranslationDatasetMetadata) ProtoMessage() {} +func (*TranslationDatasetMetadata) Descriptor() ([]byte, []int) { + return fileDescriptor_translation_a3798f334391f423, []int{0} +} +func (m *TranslationDatasetMetadata) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_TranslationDatasetMetadata.Unmarshal(m, b) +} +func (m *TranslationDatasetMetadata) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_TranslationDatasetMetadata.Marshal(b, m, deterministic) +} +func (dst *TranslationDatasetMetadata) XXX_Merge(src proto.Message) { + xxx_messageInfo_TranslationDatasetMetadata.Merge(dst, src) +} +func (m *TranslationDatasetMetadata) XXX_Size() int { + return xxx_messageInfo_TranslationDatasetMetadata.Size(m) +} +func (m *TranslationDatasetMetadata) XXX_DiscardUnknown() { + xxx_messageInfo_TranslationDatasetMetadata.DiscardUnknown(m) +} + +var xxx_messageInfo_TranslationDatasetMetadata proto.InternalMessageInfo + +func (m *TranslationDatasetMetadata) GetSourceLanguageCode() string { + if m != nil { + return m.SourceLanguageCode + } + return "" +} + +func (m *TranslationDatasetMetadata) GetTargetLanguageCode() string { + if m != nil { + return m.TargetLanguageCode + } + return "" +} + +// Evaluation metrics for the dataset. +type TranslationEvaluationMetrics struct { + // Output only. BLEU score. + BleuScore float64 `protobuf:"fixed64,1,opt,name=bleu_score,json=bleuScore,proto3" json:"bleu_score,omitempty"` + // Output only. BLEU score for base model. + BaseBleuScore float64 `protobuf:"fixed64,2,opt,name=base_bleu_score,json=baseBleuScore,proto3" json:"base_bleu_score,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *TranslationEvaluationMetrics) Reset() { *m = TranslationEvaluationMetrics{} } +func (m *TranslationEvaluationMetrics) String() string { return proto.CompactTextString(m) } +func (*TranslationEvaluationMetrics) ProtoMessage() {} +func (*TranslationEvaluationMetrics) Descriptor() ([]byte, []int) { + return fileDescriptor_translation_a3798f334391f423, []int{1} +} +func (m *TranslationEvaluationMetrics) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_TranslationEvaluationMetrics.Unmarshal(m, b) +} +func (m *TranslationEvaluationMetrics) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_TranslationEvaluationMetrics.Marshal(b, m, deterministic) +} +func (dst *TranslationEvaluationMetrics) XXX_Merge(src proto.Message) { + xxx_messageInfo_TranslationEvaluationMetrics.Merge(dst, src) +} +func (m *TranslationEvaluationMetrics) XXX_Size() int { + return xxx_messageInfo_TranslationEvaluationMetrics.Size(m) +} +func (m *TranslationEvaluationMetrics) XXX_DiscardUnknown() { + xxx_messageInfo_TranslationEvaluationMetrics.DiscardUnknown(m) +} + +var xxx_messageInfo_TranslationEvaluationMetrics proto.InternalMessageInfo + +func (m *TranslationEvaluationMetrics) GetBleuScore() float64 { + if m != nil { + return m.BleuScore + } + return 0 +} + +func (m *TranslationEvaluationMetrics) GetBaseBleuScore() float64 { + if m != nil { + return m.BaseBleuScore + } + return 0 +} + +// Model metadata that is specific to translation. +type TranslationModelMetadata struct { + // The resource name of the model to use as a baseline to train the custom + // model. If unset, we use the default base model provided by Google + // Translate. Format: + // `projects/{project_id}/locations/{location_id}/models/{model_id}` + BaseModel string `protobuf:"bytes,1,opt,name=base_model,json=baseModel,proto3" json:"base_model,omitempty"` + // Output only. Inferred from the dataset. + // The source languge (The BCP-47 language code) that is used for training. + SourceLanguageCode string `protobuf:"bytes,2,opt,name=source_language_code,json=sourceLanguageCode,proto3" json:"source_language_code,omitempty"` + // Output only. The target languge (The BCP-47 language code) that is used for + // training. + TargetLanguageCode string `protobuf:"bytes,3,opt,name=target_language_code,json=targetLanguageCode,proto3" json:"target_language_code,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *TranslationModelMetadata) Reset() { *m = TranslationModelMetadata{} } +func (m *TranslationModelMetadata) String() string { return proto.CompactTextString(m) } +func (*TranslationModelMetadata) ProtoMessage() {} +func (*TranslationModelMetadata) Descriptor() ([]byte, []int) { + return fileDescriptor_translation_a3798f334391f423, []int{2} +} +func (m *TranslationModelMetadata) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_TranslationModelMetadata.Unmarshal(m, b) +} +func (m *TranslationModelMetadata) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_TranslationModelMetadata.Marshal(b, m, deterministic) +} +func (dst *TranslationModelMetadata) XXX_Merge(src proto.Message) { + xxx_messageInfo_TranslationModelMetadata.Merge(dst, src) +} +func (m *TranslationModelMetadata) XXX_Size() int { + return xxx_messageInfo_TranslationModelMetadata.Size(m) +} +func (m *TranslationModelMetadata) XXX_DiscardUnknown() { + xxx_messageInfo_TranslationModelMetadata.DiscardUnknown(m) +} + +var xxx_messageInfo_TranslationModelMetadata proto.InternalMessageInfo + +func (m *TranslationModelMetadata) GetBaseModel() string { + if m != nil { + return m.BaseModel + } + return "" +} + +func (m *TranslationModelMetadata) GetSourceLanguageCode() string { + if m != nil { + return m.SourceLanguageCode + } + return "" +} + +func (m *TranslationModelMetadata) GetTargetLanguageCode() string { + if m != nil { + return m.TargetLanguageCode + } + return "" +} + +// Annotation details specific to translation. +type TranslationAnnotation struct { + // Output only . The translated content. + TranslatedContent *TextSnippet `protobuf:"bytes,1,opt,name=translated_content,json=translatedContent,proto3" json:"translated_content,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *TranslationAnnotation) Reset() { *m = TranslationAnnotation{} } +func (m *TranslationAnnotation) String() string { return proto.CompactTextString(m) } +func (*TranslationAnnotation) ProtoMessage() {} +func (*TranslationAnnotation) Descriptor() ([]byte, []int) { + return fileDescriptor_translation_a3798f334391f423, []int{3} +} +func (m *TranslationAnnotation) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_TranslationAnnotation.Unmarshal(m, b) +} +func (m *TranslationAnnotation) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_TranslationAnnotation.Marshal(b, m, deterministic) +} +func (dst *TranslationAnnotation) XXX_Merge(src proto.Message) { + xxx_messageInfo_TranslationAnnotation.Merge(dst, src) +} +func (m *TranslationAnnotation) XXX_Size() int { + return xxx_messageInfo_TranslationAnnotation.Size(m) +} +func (m *TranslationAnnotation) XXX_DiscardUnknown() { + xxx_messageInfo_TranslationAnnotation.DiscardUnknown(m) +} + +var xxx_messageInfo_TranslationAnnotation proto.InternalMessageInfo + +func (m *TranslationAnnotation) GetTranslatedContent() *TextSnippet { + if m != nil { + return m.TranslatedContent + } + return nil +} + +func init() { + proto.RegisterType((*TranslationDatasetMetadata)(nil), "google.cloud.automl.v1beta1.TranslationDatasetMetadata") + proto.RegisterType((*TranslationEvaluationMetrics)(nil), "google.cloud.automl.v1beta1.TranslationEvaluationMetrics") + proto.RegisterType((*TranslationModelMetadata)(nil), "google.cloud.automl.v1beta1.TranslationModelMetadata") + proto.RegisterType((*TranslationAnnotation)(nil), "google.cloud.automl.v1beta1.TranslationAnnotation") +} + +func init() { + proto.RegisterFile("google/cloud/automl/v1beta1/translation.proto", fileDescriptor_translation_a3798f334391f423) +} + +var fileDescriptor_translation_a3798f334391f423 = []byte{ + // 398 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x94, 0x92, 0x4f, 0xcb, 0xd3, 0x40, + 0x10, 0xc6, 0x49, 0x04, 0xa1, 0x2b, 0xa2, 0x06, 0x85, 0x97, 0xbe, 0x15, 0xa5, 0x07, 0xe9, 0x41, + 0x13, 0xab, 0x47, 0x4f, 0x6d, 0x15, 0x2f, 0x06, 0x4a, 0x5b, 0x14, 0xa4, 0x10, 0x26, 0xc9, 0xb0, + 0x04, 0x36, 0x3b, 0x21, 0x3b, 0x29, 0x1e, 0xfd, 0x14, 0xde, 0xfc, 0x42, 0x7e, 0x2a, 0xd9, 0xdd, + 0xb4, 0x0d, 0x62, 0x0b, 0xef, 0x2d, 0x99, 0xf9, 0x3d, 0xcf, 0xfc, 0xd9, 0x11, 0x6f, 0x24, 0x91, + 0x54, 0x98, 0x14, 0x8a, 0xba, 0x32, 0x81, 0x8e, 0xa9, 0x56, 0xc9, 0x61, 0x9e, 0x23, 0xc3, 0x3c, + 0xe1, 0x16, 0xb4, 0x51, 0xc0, 0x15, 0xe9, 0xb8, 0x69, 0x89, 0x29, 0xba, 0xf5, 0x78, 0xec, 0xf0, + 0xd8, 0xe3, 0x71, 0x8f, 0x8f, 0x27, 0xbd, 0x17, 0x34, 0x55, 0x02, 0x5a, 0x13, 0x3b, 0xa5, 0xf1, + 0xd2, 0xf1, 0xeb, 0x6b, 0x95, 0x4a, 0x60, 0xc8, 0x2a, 0xc6, 0xba, 0xa7, 0xa7, 0x3f, 0x03, 0x31, + 0xde, 0x9d, 0xcb, 0x7f, 0x04, 0x06, 0x83, 0x9c, 0x22, 0x83, 0x45, 0xa3, 0xb7, 0xe2, 0xa9, 0xa1, + 0xae, 0x2d, 0x30, 0x53, 0xa0, 0x65, 0x07, 0x12, 0xb3, 0x82, 0x4a, 0xbc, 0x09, 0x5e, 0x06, 0xb3, + 0xd1, 0x26, 0xf2, 0xb9, 0x2f, 0x7d, 0x6a, 0x45, 0x25, 0x5a, 0x05, 0x43, 0x2b, 0x91, 0xff, 0x51, + 0x84, 0x5e, 0xe1, 0x73, 0x43, 0xc5, 0x14, 0xc5, 0x64, 0xd0, 0xc1, 0xa7, 0x03, 0xa8, 0xce, 0x7d, + 0xa5, 0xc8, 0x6d, 0x55, 0x98, 0xe8, 0xb9, 0x10, 0xb9, 0xc2, 0x2e, 0x33, 0x05, 0xb5, 0xbe, 0x72, + 0xb0, 0x19, 0xd9, 0xc8, 0xd6, 0x06, 0xa2, 0x57, 0xe2, 0x51, 0x0e, 0x06, 0xb3, 0x01, 0x13, 0x3a, + 0xe6, 0xa1, 0x0d, 0x2f, 0x8f, 0xdc, 0xf4, 0x77, 0x20, 0x6e, 0x06, 0x75, 0x52, 0x2a, 0x51, 0x9d, + 0xe6, 0xb4, 0x35, 0xac, 0x49, 0x6d, 0xa3, 0xfd, 0x74, 0x23, 0x1b, 0x71, 0xd8, 0xc5, 0x35, 0x84, + 0x77, 0x5e, 0xc3, 0xbd, 0x8b, 0x6b, 0x68, 0xc4, 0xb3, 0x41, 0x7b, 0x8b, 0xd3, 0xbb, 0x46, 0xdf, + 0x44, 0x74, 0x3c, 0x10, 0x2c, 0xb3, 0x82, 0x34, 0xa3, 0x66, 0xd7, 0xe3, 0x83, 0x77, 0xb3, 0xf8, + 0xca, 0xa1, 0xc4, 0x3b, 0xfc, 0xc1, 0x5b, 0x5d, 0x35, 0x0d, 0xf2, 0xe6, 0xc9, 0xd9, 0x63, 0xe5, + 0x2d, 0x96, 0xbf, 0x02, 0xf1, 0xa2, 0xa0, 0xfa, 0x9a, 0xc5, 0xf2, 0xf1, 0xa0, 0xa7, 0xb5, 0xbd, + 0x98, 0x75, 0xf0, 0x7d, 0xd1, 0x0b, 0x24, 0xd9, 0xd9, 0x62, 0x6a, 0x65, 0x22, 0x51, 0xbb, 0x7b, + 0x4a, 0x7c, 0x0a, 0x9a, 0xca, 0xfc, 0xf7, 0x00, 0x3f, 0xf8, 0xdf, 0x3f, 0xe1, 0xed, 0x67, 0x07, + 0xee, 0x57, 0x16, 0xda, 0x2f, 0x3a, 0xa6, 0x54, 0xed, 0xbf, 0x7a, 0x28, 0xbf, 0xef, 0xbc, 0xde, + 0xff, 0x0d, 0x00, 0x00, 0xff, 0xff, 0x5e, 0x30, 0x1b, 0xfc, 0x35, 0x03, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/cloud/automl/v1beta1/video.pb.go b/vendor/google.golang.org/genproto/googleapis/cloud/automl/v1beta1/video.pb.go new file mode 100644 index 0000000..75587e9 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/cloud/automl/v1beta1/video.pb.go @@ -0,0 +1,111 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/cloud/automl/v1beta1/video.proto + +package automl // import "google.golang.org/genproto/googleapis/cloud/automl/v1beta1" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Dataset metadata specific to video classification. +// All Video Classification datasets are treated as multi label. +type VideoClassificationDatasetMetadata struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *VideoClassificationDatasetMetadata) Reset() { *m = VideoClassificationDatasetMetadata{} } +func (m *VideoClassificationDatasetMetadata) String() string { return proto.CompactTextString(m) } +func (*VideoClassificationDatasetMetadata) ProtoMessage() {} +func (*VideoClassificationDatasetMetadata) Descriptor() ([]byte, []int) { + return fileDescriptor_video_1ed0bfbdbbce0c1a, []int{0} +} +func (m *VideoClassificationDatasetMetadata) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_VideoClassificationDatasetMetadata.Unmarshal(m, b) +} +func (m *VideoClassificationDatasetMetadata) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_VideoClassificationDatasetMetadata.Marshal(b, m, deterministic) +} +func (dst *VideoClassificationDatasetMetadata) XXX_Merge(src proto.Message) { + xxx_messageInfo_VideoClassificationDatasetMetadata.Merge(dst, src) +} +func (m *VideoClassificationDatasetMetadata) XXX_Size() int { + return xxx_messageInfo_VideoClassificationDatasetMetadata.Size(m) +} +func (m *VideoClassificationDatasetMetadata) XXX_DiscardUnknown() { + xxx_messageInfo_VideoClassificationDatasetMetadata.DiscardUnknown(m) +} + +var xxx_messageInfo_VideoClassificationDatasetMetadata proto.InternalMessageInfo + +// Model metadata specific to video classification. +type VideoClassificationModelMetadata struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *VideoClassificationModelMetadata) Reset() { *m = VideoClassificationModelMetadata{} } +func (m *VideoClassificationModelMetadata) String() string { return proto.CompactTextString(m) } +func (*VideoClassificationModelMetadata) ProtoMessage() {} +func (*VideoClassificationModelMetadata) Descriptor() ([]byte, []int) { + return fileDescriptor_video_1ed0bfbdbbce0c1a, []int{1} +} +func (m *VideoClassificationModelMetadata) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_VideoClassificationModelMetadata.Unmarshal(m, b) +} +func (m *VideoClassificationModelMetadata) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_VideoClassificationModelMetadata.Marshal(b, m, deterministic) +} +func (dst *VideoClassificationModelMetadata) XXX_Merge(src proto.Message) { + xxx_messageInfo_VideoClassificationModelMetadata.Merge(dst, src) +} +func (m *VideoClassificationModelMetadata) XXX_Size() int { + return xxx_messageInfo_VideoClassificationModelMetadata.Size(m) +} +func (m *VideoClassificationModelMetadata) XXX_DiscardUnknown() { + xxx_messageInfo_VideoClassificationModelMetadata.DiscardUnknown(m) +} + +var xxx_messageInfo_VideoClassificationModelMetadata proto.InternalMessageInfo + +func init() { + proto.RegisterType((*VideoClassificationDatasetMetadata)(nil), "google.cloud.automl.v1beta1.VideoClassificationDatasetMetadata") + proto.RegisterType((*VideoClassificationModelMetadata)(nil), "google.cloud.automl.v1beta1.VideoClassificationModelMetadata") +} + +func init() { + proto.RegisterFile("google/cloud/automl/v1beta1/video.proto", fileDescriptor_video_1ed0bfbdbbce0c1a) +} + +var fileDescriptor_video_1ed0bfbdbbce0c1a = []byte{ + // 225 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x7c, 0xcf, 0x31, 0x4b, 0xc5, 0x30, + 0x10, 0x07, 0x70, 0x9e, 0x83, 0x43, 0x46, 0xc7, 0xf7, 0x04, 0x25, 0x08, 0x6e, 0x89, 0x0f, 0x47, + 0xa7, 0xb6, 0x82, 0x53, 0xa1, 0x53, 0x07, 0xe9, 0x72, 0x4d, 0x62, 0x08, 0xa4, 0xb9, 0xd2, 0x5c, + 0xfb, 0x39, 0xfc, 0x5c, 0x7e, 0x2a, 0x69, 0x52, 0x07, 0xa1, 0x74, 0x0c, 0xf7, 0xbb, 0xff, 0xff, + 0xc2, 0x9e, 0x2d, 0xa2, 0xf5, 0x46, 0x2a, 0x8f, 0xb3, 0x96, 0x30, 0x13, 0x0e, 0x5e, 0x2e, 0xd7, + 0xde, 0x10, 0x5c, 0xe5, 0xe2, 0xb4, 0x41, 0x31, 0x4e, 0x48, 0x78, 0x77, 0xc9, 0x50, 0x24, 0x28, + 0x32, 0x14, 0x1b, 0x3c, 0xdf, 0x6f, 0x29, 0x30, 0x3a, 0x09, 0x21, 0x20, 0x01, 0x39, 0x0c, 0x31, + 0xaf, 0x9e, 0x5f, 0x8e, 0x3a, 0x94, 0x87, 0x18, 0xdd, 0x97, 0x53, 0x69, 0x25, 0x6f, 0xf0, 0x27, + 0xc6, 0xdb, 0xb5, 0xbb, 0xfa, 0x37, 0x7c, 0x07, 0x82, 0x68, 0xa8, 0x36, 0x04, 0x1a, 0x08, 0x38, + 0x67, 0x8f, 0x3b, 0xaa, 0x46, 0x6d, 0xfc, 0x9f, 0x29, 0xbf, 0x4f, 0xec, 0x41, 0xe1, 0x20, 0x0e, + 0xae, 0x2f, 0x59, 0x4a, 0x69, 0xd6, 0xe6, 0xe6, 0xf4, 0x59, 0x6c, 0xd4, 0xa2, 0x87, 0x60, 0x05, + 0x4e, 0x56, 0x5a, 0x13, 0xd2, 0x5d, 0x32, 0x8f, 0x60, 0x74, 0x71, 0xf7, 0x33, 0x6f, 0xf9, 0xf9, + 0x73, 0x73, 0xf9, 0x48, 0xb0, 0xab, 0x56, 0xd4, 0x15, 0x33, 0x61, 0xed, 0xbb, 0x36, 0xa3, 0xfe, + 0x36, 0x65, 0xbd, 0xfe, 0x06, 0x00, 0x00, 0xff, 0xff, 0x34, 0x41, 0x62, 0x89, 0x7b, 0x01, 0x00, + 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/cloud/bigquery/datatransfer/v1/datatransfer.pb.go b/vendor/google.golang.org/genproto/googleapis/cloud/bigquery/datatransfer/v1/datatransfer.pb.go new file mode 100644 index 0000000..0ceb689 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/cloud/bigquery/datatransfer/v1/datatransfer.pb.go @@ -0,0 +1,2302 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/cloud/bigquery/datatransfer/v1/datatransfer.proto + +package datatransfer // import "google.golang.org/genproto/googleapis/cloud/bigquery/datatransfer/v1" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import duration "github.com/golang/protobuf/ptypes/duration" +import empty "github.com/golang/protobuf/ptypes/empty" +import timestamp "github.com/golang/protobuf/ptypes/timestamp" +import wrappers "github.com/golang/protobuf/ptypes/wrappers" +import _ "google.golang.org/genproto/googleapis/api/annotations" +import field_mask "google.golang.org/genproto/protobuf/field_mask" + +import ( + context "golang.org/x/net/context" + grpc "google.golang.org/grpc" +) + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Parameter type. +type DataSourceParameter_Type int32 + +const ( + // Type unspecified. + DataSourceParameter_TYPE_UNSPECIFIED DataSourceParameter_Type = 0 + // String parameter. + DataSourceParameter_STRING DataSourceParameter_Type = 1 + // Integer parameter (64-bits). + // Will be serialized to json as string. + DataSourceParameter_INTEGER DataSourceParameter_Type = 2 + // Double precision floating point parameter. + DataSourceParameter_DOUBLE DataSourceParameter_Type = 3 + // Boolean parameter. + DataSourceParameter_BOOLEAN DataSourceParameter_Type = 4 + // Record parameter. + DataSourceParameter_RECORD DataSourceParameter_Type = 5 + // Page ID for a Google+ Page. + DataSourceParameter_PLUS_PAGE DataSourceParameter_Type = 6 +) + +var DataSourceParameter_Type_name = map[int32]string{ + 0: "TYPE_UNSPECIFIED", + 1: "STRING", + 2: "INTEGER", + 3: "DOUBLE", + 4: "BOOLEAN", + 5: "RECORD", + 6: "PLUS_PAGE", +} +var DataSourceParameter_Type_value = map[string]int32{ + "TYPE_UNSPECIFIED": 0, + "STRING": 1, + "INTEGER": 2, + "DOUBLE": 3, + "BOOLEAN": 4, + "RECORD": 5, + "PLUS_PAGE": 6, +} + +func (x DataSourceParameter_Type) String() string { + return proto.EnumName(DataSourceParameter_Type_name, int32(x)) +} +func (DataSourceParameter_Type) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_datatransfer_102617b20f253925, []int{0, 0} +} + +// The type of authorization needed for this data source. +type DataSource_AuthorizationType int32 + +const ( + // Type unspecified. + DataSource_AUTHORIZATION_TYPE_UNSPECIFIED DataSource_AuthorizationType = 0 + // Use OAuth 2 authorization codes that can be exchanged + // for a refresh token on the backend. + DataSource_AUTHORIZATION_CODE DataSource_AuthorizationType = 1 + // Return an authorization code for a given Google+ page that can then be + // exchanged for a refresh token on the backend. + DataSource_GOOGLE_PLUS_AUTHORIZATION_CODE DataSource_AuthorizationType = 2 +) + +var DataSource_AuthorizationType_name = map[int32]string{ + 0: "AUTHORIZATION_TYPE_UNSPECIFIED", + 1: "AUTHORIZATION_CODE", + 2: "GOOGLE_PLUS_AUTHORIZATION_CODE", +} +var DataSource_AuthorizationType_value = map[string]int32{ + "AUTHORIZATION_TYPE_UNSPECIFIED": 0, + "AUTHORIZATION_CODE": 1, + "GOOGLE_PLUS_AUTHORIZATION_CODE": 2, +} + +func (x DataSource_AuthorizationType) String() string { + return proto.EnumName(DataSource_AuthorizationType_name, int32(x)) +} +func (DataSource_AuthorizationType) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_datatransfer_102617b20f253925, []int{1, 0} +} + +// Represents how the data source supports data auto refresh. +type DataSource_DataRefreshType int32 + +const ( + // The data source won't support data auto refresh, which is default value. + DataSource_DATA_REFRESH_TYPE_UNSPECIFIED DataSource_DataRefreshType = 0 + // The data source supports data auto refresh, and runs will be scheduled + // for the past few days. Does not allow custom values to be set for each + // transfer config. + DataSource_SLIDING_WINDOW DataSource_DataRefreshType = 1 + // The data source supports data auto refresh, and runs will be scheduled + // for the past few days. Allows custom values to be set for each transfer + // config. + DataSource_CUSTOM_SLIDING_WINDOW DataSource_DataRefreshType = 2 +) + +var DataSource_DataRefreshType_name = map[int32]string{ + 0: "DATA_REFRESH_TYPE_UNSPECIFIED", + 1: "SLIDING_WINDOW", + 2: "CUSTOM_SLIDING_WINDOW", +} +var DataSource_DataRefreshType_value = map[string]int32{ + "DATA_REFRESH_TYPE_UNSPECIFIED": 0, + "SLIDING_WINDOW": 1, + "CUSTOM_SLIDING_WINDOW": 2, +} + +func (x DataSource_DataRefreshType) String() string { + return proto.EnumName(DataSource_DataRefreshType_name, int32(x)) +} +func (DataSource_DataRefreshType) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_datatransfer_102617b20f253925, []int{1, 1} +} + +// Represents which runs should be pulled. +type ListTransferRunsRequest_RunAttempt int32 + +const ( + // All runs should be returned. + ListTransferRunsRequest_RUN_ATTEMPT_UNSPECIFIED ListTransferRunsRequest_RunAttempt = 0 + // Only latest run per day should be returned. + ListTransferRunsRequest_LATEST ListTransferRunsRequest_RunAttempt = 1 +) + +var ListTransferRunsRequest_RunAttempt_name = map[int32]string{ + 0: "RUN_ATTEMPT_UNSPECIFIED", + 1: "LATEST", +} +var ListTransferRunsRequest_RunAttempt_value = map[string]int32{ + "RUN_ATTEMPT_UNSPECIFIED": 0, + "LATEST": 1, +} + +func (x ListTransferRunsRequest_RunAttempt) String() string { + return proto.EnumName(ListTransferRunsRequest_RunAttempt_name, int32(x)) +} +func (ListTransferRunsRequest_RunAttempt) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_datatransfer_102617b20f253925, []int{13, 0} +} + +// Represents a data source parameter with validation rules, so that +// parameters can be rendered in the UI. These parameters are given to us by +// supported data sources, and include all needed information for rendering +// and validation. +// Thus, whoever uses this api can decide to generate either generic ui, +// or custom data source specific forms. +type DataSourceParameter struct { + // Parameter identifier. + ParamId string `protobuf:"bytes,1,opt,name=param_id,json=paramId,proto3" json:"param_id,omitempty"` + // Parameter display name in the user interface. + DisplayName string `protobuf:"bytes,2,opt,name=display_name,json=displayName,proto3" json:"display_name,omitempty"` + // Parameter description. + Description string `protobuf:"bytes,3,opt,name=description,proto3" json:"description,omitempty"` + // Parameter type. + Type DataSourceParameter_Type `protobuf:"varint,4,opt,name=type,proto3,enum=google.cloud.bigquery.datatransfer.v1.DataSourceParameter_Type" json:"type,omitempty"` + // Is parameter required. + Required bool `protobuf:"varint,5,opt,name=required,proto3" json:"required,omitempty"` + // Can parameter have multiple values. + Repeated bool `protobuf:"varint,6,opt,name=repeated,proto3" json:"repeated,omitempty"` + // Regular expression which can be used for parameter validation. + ValidationRegex string `protobuf:"bytes,7,opt,name=validation_regex,json=validationRegex,proto3" json:"validation_regex,omitempty"` + // All possible values for the parameter. + AllowedValues []string `protobuf:"bytes,8,rep,name=allowed_values,json=allowedValues,proto3" json:"allowed_values,omitempty"` + // For integer and double values specifies minimum allowed value. + MinValue *wrappers.DoubleValue `protobuf:"bytes,9,opt,name=min_value,json=minValue,proto3" json:"min_value,omitempty"` + // For integer and double values specifies maxminum allowed value. + MaxValue *wrappers.DoubleValue `protobuf:"bytes,10,opt,name=max_value,json=maxValue,proto3" json:"max_value,omitempty"` + // When parameter is a record, describes child fields. + Fields []*DataSourceParameter `protobuf:"bytes,11,rep,name=fields,proto3" json:"fields,omitempty"` + // Description of the requirements for this field, in case the user input does + // not fulfill the regex pattern or min/max values. + ValidationDescription string `protobuf:"bytes,12,opt,name=validation_description,json=validationDescription,proto3" json:"validation_description,omitempty"` + // URL to a help document to further explain the naming requirements. + ValidationHelpUrl string `protobuf:"bytes,13,opt,name=validation_help_url,json=validationHelpUrl,proto3" json:"validation_help_url,omitempty"` + // Cannot be changed after initial creation. + Immutable bool `protobuf:"varint,14,opt,name=immutable,proto3" json:"immutable,omitempty"` + // If set to true, schema should be taken from the parent with the same + // parameter_id. Only applicable when parameter type is RECORD. + Recurse bool `protobuf:"varint,15,opt,name=recurse,proto3" json:"recurse,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *DataSourceParameter) Reset() { *m = DataSourceParameter{} } +func (m *DataSourceParameter) String() string { return proto.CompactTextString(m) } +func (*DataSourceParameter) ProtoMessage() {} +func (*DataSourceParameter) Descriptor() ([]byte, []int) { + return fileDescriptor_datatransfer_102617b20f253925, []int{0} +} +func (m *DataSourceParameter) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_DataSourceParameter.Unmarshal(m, b) +} +func (m *DataSourceParameter) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_DataSourceParameter.Marshal(b, m, deterministic) +} +func (dst *DataSourceParameter) XXX_Merge(src proto.Message) { + xxx_messageInfo_DataSourceParameter.Merge(dst, src) +} +func (m *DataSourceParameter) XXX_Size() int { + return xxx_messageInfo_DataSourceParameter.Size(m) +} +func (m *DataSourceParameter) XXX_DiscardUnknown() { + xxx_messageInfo_DataSourceParameter.DiscardUnknown(m) +} + +var xxx_messageInfo_DataSourceParameter proto.InternalMessageInfo + +func (m *DataSourceParameter) GetParamId() string { + if m != nil { + return m.ParamId + } + return "" +} + +func (m *DataSourceParameter) GetDisplayName() string { + if m != nil { + return m.DisplayName + } + return "" +} + +func (m *DataSourceParameter) GetDescription() string { + if m != nil { + return m.Description + } + return "" +} + +func (m *DataSourceParameter) GetType() DataSourceParameter_Type { + if m != nil { + return m.Type + } + return DataSourceParameter_TYPE_UNSPECIFIED +} + +func (m *DataSourceParameter) GetRequired() bool { + if m != nil { + return m.Required + } + return false +} + +func (m *DataSourceParameter) GetRepeated() bool { + if m != nil { + return m.Repeated + } + return false +} + +func (m *DataSourceParameter) GetValidationRegex() string { + if m != nil { + return m.ValidationRegex + } + return "" +} + +func (m *DataSourceParameter) GetAllowedValues() []string { + if m != nil { + return m.AllowedValues + } + return nil +} + +func (m *DataSourceParameter) GetMinValue() *wrappers.DoubleValue { + if m != nil { + return m.MinValue + } + return nil +} + +func (m *DataSourceParameter) GetMaxValue() *wrappers.DoubleValue { + if m != nil { + return m.MaxValue + } + return nil +} + +func (m *DataSourceParameter) GetFields() []*DataSourceParameter { + if m != nil { + return m.Fields + } + return nil +} + +func (m *DataSourceParameter) GetValidationDescription() string { + if m != nil { + return m.ValidationDescription + } + return "" +} + +func (m *DataSourceParameter) GetValidationHelpUrl() string { + if m != nil { + return m.ValidationHelpUrl + } + return "" +} + +func (m *DataSourceParameter) GetImmutable() bool { + if m != nil { + return m.Immutable + } + return false +} + +func (m *DataSourceParameter) GetRecurse() bool { + if m != nil { + return m.Recurse + } + return false +} + +// Represents data source metadata. Metadata is sufficient to +// render UI and request proper OAuth tokens. +type DataSource struct { + // Output only. Data source resource name. + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // Data source id. + DataSourceId string `protobuf:"bytes,2,opt,name=data_source_id,json=dataSourceId,proto3" json:"data_source_id,omitempty"` + // User friendly data source name. + DisplayName string `protobuf:"bytes,3,opt,name=display_name,json=displayName,proto3" json:"display_name,omitempty"` + // User friendly data source description string. + Description string `protobuf:"bytes,4,opt,name=description,proto3" json:"description,omitempty"` + // Data source client id which should be used to receive refresh token. + // When not supplied, no offline credentials are populated for data transfer. + ClientId string `protobuf:"bytes,5,opt,name=client_id,json=clientId,proto3" json:"client_id,omitempty"` + // Api auth scopes for which refresh token needs to be obtained. Only valid + // when `client_id` is specified. Ignored otherwise. These are scopes needed + // by a data source to prepare data and ingest them into BigQuery, + // e.g., https://www.googleapis.com/auth/bigquery + Scopes []string `protobuf:"bytes,6,rep,name=scopes,proto3" json:"scopes,omitempty"` + // Deprecated. This field has no effect. + TransferType TransferType `protobuf:"varint,7,opt,name=transfer_type,json=transferType,proto3,enum=google.cloud.bigquery.datatransfer.v1.TransferType" json:"transfer_type,omitempty"` + // Indicates whether the data source supports multiple transfers + // to different BigQuery targets. + SupportsMultipleTransfers bool `protobuf:"varint,8,opt,name=supports_multiple_transfers,json=supportsMultipleTransfers,proto3" json:"supports_multiple_transfers,omitempty"` + // The number of seconds to wait for an update from the data source + // before BigQuery marks the transfer as failed. + UpdateDeadlineSeconds int32 `protobuf:"varint,9,opt,name=update_deadline_seconds,json=updateDeadlineSeconds,proto3" json:"update_deadline_seconds,omitempty"` + // Default data transfer schedule. + // Examples of valid schedules include: + // `1st,3rd monday of month 15:30`, + // `every wed,fri of jan,jun 13:15`, and + // `first sunday of quarter 00:00`. + DefaultSchedule string `protobuf:"bytes,10,opt,name=default_schedule,json=defaultSchedule,proto3" json:"default_schedule,omitempty"` + // Specifies whether the data source supports a user defined schedule, or + // operates on the default schedule. + // When set to `true`, user can override default schedule. + SupportsCustomSchedule bool `protobuf:"varint,11,opt,name=supports_custom_schedule,json=supportsCustomSchedule,proto3" json:"supports_custom_schedule,omitempty"` + // Data source parameters. + Parameters []*DataSourceParameter `protobuf:"bytes,12,rep,name=parameters,proto3" json:"parameters,omitempty"` + // Url for the help document for this data source. + HelpUrl string `protobuf:"bytes,13,opt,name=help_url,json=helpUrl,proto3" json:"help_url,omitempty"` + // Indicates the type of authorization. + AuthorizationType DataSource_AuthorizationType `protobuf:"varint,14,opt,name=authorization_type,json=authorizationType,proto3,enum=google.cloud.bigquery.datatransfer.v1.DataSource_AuthorizationType" json:"authorization_type,omitempty"` + // Specifies whether the data source supports automatic data refresh for the + // past few days, and how it's supported. + // For some data sources, data might not be complete until a few days later, + // so it's useful to refresh data automatically. + DataRefreshType DataSource_DataRefreshType `protobuf:"varint,15,opt,name=data_refresh_type,json=dataRefreshType,proto3,enum=google.cloud.bigquery.datatransfer.v1.DataSource_DataRefreshType" json:"data_refresh_type,omitempty"` + // Default data refresh window on days. + // Only meaningful when `data_refresh_type` = `SLIDING_WINDOW`. + DefaultDataRefreshWindowDays int32 `protobuf:"varint,16,opt,name=default_data_refresh_window_days,json=defaultDataRefreshWindowDays,proto3" json:"default_data_refresh_window_days,omitempty"` + // Disables backfilling and manual run scheduling + // for the data source. + ManualRunsDisabled bool `protobuf:"varint,17,opt,name=manual_runs_disabled,json=manualRunsDisabled,proto3" json:"manual_runs_disabled,omitempty"` + // The minimum interval for scheduler to schedule runs. + MinimumScheduleInterval *duration.Duration `protobuf:"bytes,18,opt,name=minimum_schedule_interval,json=minimumScheduleInterval,proto3" json:"minimum_schedule_interval,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *DataSource) Reset() { *m = DataSource{} } +func (m *DataSource) String() string { return proto.CompactTextString(m) } +func (*DataSource) ProtoMessage() {} +func (*DataSource) Descriptor() ([]byte, []int) { + return fileDescriptor_datatransfer_102617b20f253925, []int{1} +} +func (m *DataSource) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_DataSource.Unmarshal(m, b) +} +func (m *DataSource) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_DataSource.Marshal(b, m, deterministic) +} +func (dst *DataSource) XXX_Merge(src proto.Message) { + xxx_messageInfo_DataSource.Merge(dst, src) +} +func (m *DataSource) XXX_Size() int { + return xxx_messageInfo_DataSource.Size(m) +} +func (m *DataSource) XXX_DiscardUnknown() { + xxx_messageInfo_DataSource.DiscardUnknown(m) +} + +var xxx_messageInfo_DataSource proto.InternalMessageInfo + +func (m *DataSource) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *DataSource) GetDataSourceId() string { + if m != nil { + return m.DataSourceId + } + return "" +} + +func (m *DataSource) GetDisplayName() string { + if m != nil { + return m.DisplayName + } + return "" +} + +func (m *DataSource) GetDescription() string { + if m != nil { + return m.Description + } + return "" +} + +func (m *DataSource) GetClientId() string { + if m != nil { + return m.ClientId + } + return "" +} + +func (m *DataSource) GetScopes() []string { + if m != nil { + return m.Scopes + } + return nil +} + +func (m *DataSource) GetTransferType() TransferType { + if m != nil { + return m.TransferType + } + return TransferType_TRANSFER_TYPE_UNSPECIFIED +} + +func (m *DataSource) GetSupportsMultipleTransfers() bool { + if m != nil { + return m.SupportsMultipleTransfers + } + return false +} + +func (m *DataSource) GetUpdateDeadlineSeconds() int32 { + if m != nil { + return m.UpdateDeadlineSeconds + } + return 0 +} + +func (m *DataSource) GetDefaultSchedule() string { + if m != nil { + return m.DefaultSchedule + } + return "" +} + +func (m *DataSource) GetSupportsCustomSchedule() bool { + if m != nil { + return m.SupportsCustomSchedule + } + return false +} + +func (m *DataSource) GetParameters() []*DataSourceParameter { + if m != nil { + return m.Parameters + } + return nil +} + +func (m *DataSource) GetHelpUrl() string { + if m != nil { + return m.HelpUrl + } + return "" +} + +func (m *DataSource) GetAuthorizationType() DataSource_AuthorizationType { + if m != nil { + return m.AuthorizationType + } + return DataSource_AUTHORIZATION_TYPE_UNSPECIFIED +} + +func (m *DataSource) GetDataRefreshType() DataSource_DataRefreshType { + if m != nil { + return m.DataRefreshType + } + return DataSource_DATA_REFRESH_TYPE_UNSPECIFIED +} + +func (m *DataSource) GetDefaultDataRefreshWindowDays() int32 { + if m != nil { + return m.DefaultDataRefreshWindowDays + } + return 0 +} + +func (m *DataSource) GetManualRunsDisabled() bool { + if m != nil { + return m.ManualRunsDisabled + } + return false +} + +func (m *DataSource) GetMinimumScheduleInterval() *duration.Duration { + if m != nil { + return m.MinimumScheduleInterval + } + return nil +} + +// A request to get data source info. +type GetDataSourceRequest struct { + // The field will contain name of the resource requested, for example: + // `projects/{project_id}/dataSources/{data_source_id}` + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GetDataSourceRequest) Reset() { *m = GetDataSourceRequest{} } +func (m *GetDataSourceRequest) String() string { return proto.CompactTextString(m) } +func (*GetDataSourceRequest) ProtoMessage() {} +func (*GetDataSourceRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_datatransfer_102617b20f253925, []int{2} +} +func (m *GetDataSourceRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GetDataSourceRequest.Unmarshal(m, b) +} +func (m *GetDataSourceRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GetDataSourceRequest.Marshal(b, m, deterministic) +} +func (dst *GetDataSourceRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_GetDataSourceRequest.Merge(dst, src) +} +func (m *GetDataSourceRequest) XXX_Size() int { + return xxx_messageInfo_GetDataSourceRequest.Size(m) +} +func (m *GetDataSourceRequest) XXX_DiscardUnknown() { + xxx_messageInfo_GetDataSourceRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_GetDataSourceRequest proto.InternalMessageInfo + +func (m *GetDataSourceRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +// Request to list supported data sources and their data transfer settings. +type ListDataSourcesRequest struct { + // The BigQuery project id for which data sources should be returned. + // Must be in the form: `projects/{project_id}` + Parent string `protobuf:"bytes,1,opt,name=parent,proto3" json:"parent,omitempty"` + // Pagination token, which can be used to request a specific page + // of `ListDataSourcesRequest` list results. For multiple-page + // results, `ListDataSourcesResponse` outputs + // a `next_page` token, which can be used as the + // `page_token` value to request the next page of list results. + PageToken string `protobuf:"bytes,3,opt,name=page_token,json=pageToken,proto3" json:"page_token,omitempty"` + // Page size. The default page size is the maximum value of 1000 results. + PageSize int32 `protobuf:"varint,4,opt,name=page_size,json=pageSize,proto3" json:"page_size,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ListDataSourcesRequest) Reset() { *m = ListDataSourcesRequest{} } +func (m *ListDataSourcesRequest) String() string { return proto.CompactTextString(m) } +func (*ListDataSourcesRequest) ProtoMessage() {} +func (*ListDataSourcesRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_datatransfer_102617b20f253925, []int{3} +} +func (m *ListDataSourcesRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ListDataSourcesRequest.Unmarshal(m, b) +} +func (m *ListDataSourcesRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ListDataSourcesRequest.Marshal(b, m, deterministic) +} +func (dst *ListDataSourcesRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_ListDataSourcesRequest.Merge(dst, src) +} +func (m *ListDataSourcesRequest) XXX_Size() int { + return xxx_messageInfo_ListDataSourcesRequest.Size(m) +} +func (m *ListDataSourcesRequest) XXX_DiscardUnknown() { + xxx_messageInfo_ListDataSourcesRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_ListDataSourcesRequest proto.InternalMessageInfo + +func (m *ListDataSourcesRequest) GetParent() string { + if m != nil { + return m.Parent + } + return "" +} + +func (m *ListDataSourcesRequest) GetPageToken() string { + if m != nil { + return m.PageToken + } + return "" +} + +func (m *ListDataSourcesRequest) GetPageSize() int32 { + if m != nil { + return m.PageSize + } + return 0 +} + +// Returns list of supported data sources and their metadata. +type ListDataSourcesResponse struct { + // List of supported data sources and their transfer settings. + DataSources []*DataSource `protobuf:"bytes,1,rep,name=data_sources,json=dataSources,proto3" json:"data_sources,omitempty"` + // Output only. The next-pagination token. For multiple-page list results, + // this token can be used as the + // `ListDataSourcesRequest.page_token` + // to request the next page of list results. + NextPageToken string `protobuf:"bytes,2,opt,name=next_page_token,json=nextPageToken,proto3" json:"next_page_token,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ListDataSourcesResponse) Reset() { *m = ListDataSourcesResponse{} } +func (m *ListDataSourcesResponse) String() string { return proto.CompactTextString(m) } +func (*ListDataSourcesResponse) ProtoMessage() {} +func (*ListDataSourcesResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_datatransfer_102617b20f253925, []int{4} +} +func (m *ListDataSourcesResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ListDataSourcesResponse.Unmarshal(m, b) +} +func (m *ListDataSourcesResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ListDataSourcesResponse.Marshal(b, m, deterministic) +} +func (dst *ListDataSourcesResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_ListDataSourcesResponse.Merge(dst, src) +} +func (m *ListDataSourcesResponse) XXX_Size() int { + return xxx_messageInfo_ListDataSourcesResponse.Size(m) +} +func (m *ListDataSourcesResponse) XXX_DiscardUnknown() { + xxx_messageInfo_ListDataSourcesResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_ListDataSourcesResponse proto.InternalMessageInfo + +func (m *ListDataSourcesResponse) GetDataSources() []*DataSource { + if m != nil { + return m.DataSources + } + return nil +} + +func (m *ListDataSourcesResponse) GetNextPageToken() string { + if m != nil { + return m.NextPageToken + } + return "" +} + +// A request to create a data transfer configuration. If new credentials are +// needed for this transfer configuration, an authorization code must be +// provided. If an authorization code is provided, the transfer configuration +// will be associated with the user id corresponding to the +// authorization code. Otherwise, the transfer configuration will be associated +// with the calling user. +type CreateTransferConfigRequest struct { + // The BigQuery project id where the transfer configuration should be created. + // Must be in the format /projects/{project_id}/locations/{location_id} + // If specified location and location of the destination bigquery dataset + // do not match - the request will fail. + Parent string `protobuf:"bytes,1,opt,name=parent,proto3" json:"parent,omitempty"` + // Data transfer configuration to create. + TransferConfig *TransferConfig `protobuf:"bytes,2,opt,name=transfer_config,json=transferConfig,proto3" json:"transfer_config,omitempty"` + // Optional OAuth2 authorization code to use with this transfer configuration. + // This is required if new credentials are needed, as indicated by + // `CheckValidCreds`. + // In order to obtain authorization_code, please make a + // request to + // https://www.gstatic.com/bigquerydatatransfer/oauthz/auth?client_id=&scope=&redirect_uri= + // + // * client_id should be OAuth client_id of BigQuery DTS API for the given + // data source returned by ListDataSources method. + // * data_source_scopes are the scopes returned by ListDataSources method. + // * redirect_uri is an optional parameter. If not specified, then + // authorization code is posted to the opener of authorization flow window. + // Otherwise it will be sent to the redirect uri. A special value of + // urn:ietf:wg:oauth:2.0:oob means that authorization code should be + // returned in the title bar of the browser, with the page text prompting + // the user to copy the code and paste it in the application. + AuthorizationCode string `protobuf:"bytes,3,opt,name=authorization_code,json=authorizationCode,proto3" json:"authorization_code,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *CreateTransferConfigRequest) Reset() { *m = CreateTransferConfigRequest{} } +func (m *CreateTransferConfigRequest) String() string { return proto.CompactTextString(m) } +func (*CreateTransferConfigRequest) ProtoMessage() {} +func (*CreateTransferConfigRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_datatransfer_102617b20f253925, []int{5} +} +func (m *CreateTransferConfigRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_CreateTransferConfigRequest.Unmarshal(m, b) +} +func (m *CreateTransferConfigRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_CreateTransferConfigRequest.Marshal(b, m, deterministic) +} +func (dst *CreateTransferConfigRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_CreateTransferConfigRequest.Merge(dst, src) +} +func (m *CreateTransferConfigRequest) XXX_Size() int { + return xxx_messageInfo_CreateTransferConfigRequest.Size(m) +} +func (m *CreateTransferConfigRequest) XXX_DiscardUnknown() { + xxx_messageInfo_CreateTransferConfigRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_CreateTransferConfigRequest proto.InternalMessageInfo + +func (m *CreateTransferConfigRequest) GetParent() string { + if m != nil { + return m.Parent + } + return "" +} + +func (m *CreateTransferConfigRequest) GetTransferConfig() *TransferConfig { + if m != nil { + return m.TransferConfig + } + return nil +} + +func (m *CreateTransferConfigRequest) GetAuthorizationCode() string { + if m != nil { + return m.AuthorizationCode + } + return "" +} + +// A request to update a transfer configuration. To update the user id of the +// transfer configuration, an authorization code needs to be provided. +type UpdateTransferConfigRequest struct { + // Data transfer configuration to create. + TransferConfig *TransferConfig `protobuf:"bytes,1,opt,name=transfer_config,json=transferConfig,proto3" json:"transfer_config,omitempty"` + // Optional OAuth2 authorization code to use with this transfer configuration. + // If it is provided, the transfer configuration will be associated with the + // authorizing user. + // In order to obtain authorization_code, please make a + // request to + // https://www.gstatic.com/bigquerydatatransfer/oauthz/auth?client_id=&scope=&redirect_uri= + // + // * client_id should be OAuth client_id of BigQuery DTS API for the given + // data source returned by ListDataSources method. + // * data_source_scopes are the scopes returned by ListDataSources method. + // * redirect_uri is an optional parameter. If not specified, then + // authorization code is posted to the opener of authorization flow window. + // Otherwise it will be sent to the redirect uri. A special value of + // urn:ietf:wg:oauth:2.0:oob means that authorization code should be + // returned in the title bar of the browser, with the page text prompting + // the user to copy the code and paste it in the application. + AuthorizationCode string `protobuf:"bytes,3,opt,name=authorization_code,json=authorizationCode,proto3" json:"authorization_code,omitempty"` + // Required list of fields to be updated in this request. + UpdateMask *field_mask.FieldMask `protobuf:"bytes,4,opt,name=update_mask,json=updateMask,proto3" json:"update_mask,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *UpdateTransferConfigRequest) Reset() { *m = UpdateTransferConfigRequest{} } +func (m *UpdateTransferConfigRequest) String() string { return proto.CompactTextString(m) } +func (*UpdateTransferConfigRequest) ProtoMessage() {} +func (*UpdateTransferConfigRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_datatransfer_102617b20f253925, []int{6} +} +func (m *UpdateTransferConfigRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_UpdateTransferConfigRequest.Unmarshal(m, b) +} +func (m *UpdateTransferConfigRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_UpdateTransferConfigRequest.Marshal(b, m, deterministic) +} +func (dst *UpdateTransferConfigRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_UpdateTransferConfigRequest.Merge(dst, src) +} +func (m *UpdateTransferConfigRequest) XXX_Size() int { + return xxx_messageInfo_UpdateTransferConfigRequest.Size(m) +} +func (m *UpdateTransferConfigRequest) XXX_DiscardUnknown() { + xxx_messageInfo_UpdateTransferConfigRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_UpdateTransferConfigRequest proto.InternalMessageInfo + +func (m *UpdateTransferConfigRequest) GetTransferConfig() *TransferConfig { + if m != nil { + return m.TransferConfig + } + return nil +} + +func (m *UpdateTransferConfigRequest) GetAuthorizationCode() string { + if m != nil { + return m.AuthorizationCode + } + return "" +} + +func (m *UpdateTransferConfigRequest) GetUpdateMask() *field_mask.FieldMask { + if m != nil { + return m.UpdateMask + } + return nil +} + +// A request to get data transfer information. +type GetTransferConfigRequest struct { + // The field will contain name of the resource requested, for example: + // `projects/{project_id}/transferConfigs/{config_id}` + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GetTransferConfigRequest) Reset() { *m = GetTransferConfigRequest{} } +func (m *GetTransferConfigRequest) String() string { return proto.CompactTextString(m) } +func (*GetTransferConfigRequest) ProtoMessage() {} +func (*GetTransferConfigRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_datatransfer_102617b20f253925, []int{7} +} +func (m *GetTransferConfigRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GetTransferConfigRequest.Unmarshal(m, b) +} +func (m *GetTransferConfigRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GetTransferConfigRequest.Marshal(b, m, deterministic) +} +func (dst *GetTransferConfigRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_GetTransferConfigRequest.Merge(dst, src) +} +func (m *GetTransferConfigRequest) XXX_Size() int { + return xxx_messageInfo_GetTransferConfigRequest.Size(m) +} +func (m *GetTransferConfigRequest) XXX_DiscardUnknown() { + xxx_messageInfo_GetTransferConfigRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_GetTransferConfigRequest proto.InternalMessageInfo + +func (m *GetTransferConfigRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +// A request to delete data transfer information. All associated transfer runs +// and log messages will be deleted as well. +type DeleteTransferConfigRequest struct { + // The field will contain name of the resource requested, for example: + // `projects/{project_id}/transferConfigs/{config_id}` + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *DeleteTransferConfigRequest) Reset() { *m = DeleteTransferConfigRequest{} } +func (m *DeleteTransferConfigRequest) String() string { return proto.CompactTextString(m) } +func (*DeleteTransferConfigRequest) ProtoMessage() {} +func (*DeleteTransferConfigRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_datatransfer_102617b20f253925, []int{8} +} +func (m *DeleteTransferConfigRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_DeleteTransferConfigRequest.Unmarshal(m, b) +} +func (m *DeleteTransferConfigRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_DeleteTransferConfigRequest.Marshal(b, m, deterministic) +} +func (dst *DeleteTransferConfigRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_DeleteTransferConfigRequest.Merge(dst, src) +} +func (m *DeleteTransferConfigRequest) XXX_Size() int { + return xxx_messageInfo_DeleteTransferConfigRequest.Size(m) +} +func (m *DeleteTransferConfigRequest) XXX_DiscardUnknown() { + xxx_messageInfo_DeleteTransferConfigRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_DeleteTransferConfigRequest proto.InternalMessageInfo + +func (m *DeleteTransferConfigRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +// A request to get data transfer run information. +type GetTransferRunRequest struct { + // The field will contain name of the resource requested, for example: + // `projects/{project_id}/transferConfigs/{config_id}/runs/{run_id}` + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GetTransferRunRequest) Reset() { *m = GetTransferRunRequest{} } +func (m *GetTransferRunRequest) String() string { return proto.CompactTextString(m) } +func (*GetTransferRunRequest) ProtoMessage() {} +func (*GetTransferRunRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_datatransfer_102617b20f253925, []int{9} +} +func (m *GetTransferRunRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GetTransferRunRequest.Unmarshal(m, b) +} +func (m *GetTransferRunRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GetTransferRunRequest.Marshal(b, m, deterministic) +} +func (dst *GetTransferRunRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_GetTransferRunRequest.Merge(dst, src) +} +func (m *GetTransferRunRequest) XXX_Size() int { + return xxx_messageInfo_GetTransferRunRequest.Size(m) +} +func (m *GetTransferRunRequest) XXX_DiscardUnknown() { + xxx_messageInfo_GetTransferRunRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_GetTransferRunRequest proto.InternalMessageInfo + +func (m *GetTransferRunRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +// A request to delete data transfer run information. +type DeleteTransferRunRequest struct { + // The field will contain name of the resource requested, for example: + // `projects/{project_id}/transferConfigs/{config_id}/runs/{run_id}` + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *DeleteTransferRunRequest) Reset() { *m = DeleteTransferRunRequest{} } +func (m *DeleteTransferRunRequest) String() string { return proto.CompactTextString(m) } +func (*DeleteTransferRunRequest) ProtoMessage() {} +func (*DeleteTransferRunRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_datatransfer_102617b20f253925, []int{10} +} +func (m *DeleteTransferRunRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_DeleteTransferRunRequest.Unmarshal(m, b) +} +func (m *DeleteTransferRunRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_DeleteTransferRunRequest.Marshal(b, m, deterministic) +} +func (dst *DeleteTransferRunRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_DeleteTransferRunRequest.Merge(dst, src) +} +func (m *DeleteTransferRunRequest) XXX_Size() int { + return xxx_messageInfo_DeleteTransferRunRequest.Size(m) +} +func (m *DeleteTransferRunRequest) XXX_DiscardUnknown() { + xxx_messageInfo_DeleteTransferRunRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_DeleteTransferRunRequest proto.InternalMessageInfo + +func (m *DeleteTransferRunRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +// A request to list data transfers configured for a BigQuery project. +type ListTransferConfigsRequest struct { + // The BigQuery project id for which data sources + // should be returned: `projects/{project_id}`. + Parent string `protobuf:"bytes,1,opt,name=parent,proto3" json:"parent,omitempty"` + // When specified, only configurations of requested data sources are returned. + DataSourceIds []string `protobuf:"bytes,2,rep,name=data_source_ids,json=dataSourceIds,proto3" json:"data_source_ids,omitempty"` + // Pagination token, which can be used to request a specific page + // of `ListTransfersRequest` list results. For multiple-page + // results, `ListTransfersResponse` outputs + // a `next_page` token, which can be used as the + // `page_token` value to request the next page of list results. + PageToken string `protobuf:"bytes,3,opt,name=page_token,json=pageToken,proto3" json:"page_token,omitempty"` + // Page size. The default page size is the maximum value of 1000 results. + PageSize int32 `protobuf:"varint,4,opt,name=page_size,json=pageSize,proto3" json:"page_size,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ListTransferConfigsRequest) Reset() { *m = ListTransferConfigsRequest{} } +func (m *ListTransferConfigsRequest) String() string { return proto.CompactTextString(m) } +func (*ListTransferConfigsRequest) ProtoMessage() {} +func (*ListTransferConfigsRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_datatransfer_102617b20f253925, []int{11} +} +func (m *ListTransferConfigsRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ListTransferConfigsRequest.Unmarshal(m, b) +} +func (m *ListTransferConfigsRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ListTransferConfigsRequest.Marshal(b, m, deterministic) +} +func (dst *ListTransferConfigsRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_ListTransferConfigsRequest.Merge(dst, src) +} +func (m *ListTransferConfigsRequest) XXX_Size() int { + return xxx_messageInfo_ListTransferConfigsRequest.Size(m) +} +func (m *ListTransferConfigsRequest) XXX_DiscardUnknown() { + xxx_messageInfo_ListTransferConfigsRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_ListTransferConfigsRequest proto.InternalMessageInfo + +func (m *ListTransferConfigsRequest) GetParent() string { + if m != nil { + return m.Parent + } + return "" +} + +func (m *ListTransferConfigsRequest) GetDataSourceIds() []string { + if m != nil { + return m.DataSourceIds + } + return nil +} + +func (m *ListTransferConfigsRequest) GetPageToken() string { + if m != nil { + return m.PageToken + } + return "" +} + +func (m *ListTransferConfigsRequest) GetPageSize() int32 { + if m != nil { + return m.PageSize + } + return 0 +} + +// The returned list of pipelines in the project. +type ListTransferConfigsResponse struct { + // Output only. The stored pipeline transfer configurations. + TransferConfigs []*TransferConfig `protobuf:"bytes,1,rep,name=transfer_configs,json=transferConfigs,proto3" json:"transfer_configs,omitempty"` + // Output only. The next-pagination token. For multiple-page list results, + // this token can be used as the + // `ListTransferConfigsRequest.page_token` + // to request the next page of list results. + NextPageToken string `protobuf:"bytes,2,opt,name=next_page_token,json=nextPageToken,proto3" json:"next_page_token,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ListTransferConfigsResponse) Reset() { *m = ListTransferConfigsResponse{} } +func (m *ListTransferConfigsResponse) String() string { return proto.CompactTextString(m) } +func (*ListTransferConfigsResponse) ProtoMessage() {} +func (*ListTransferConfigsResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_datatransfer_102617b20f253925, []int{12} +} +func (m *ListTransferConfigsResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ListTransferConfigsResponse.Unmarshal(m, b) +} +func (m *ListTransferConfigsResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ListTransferConfigsResponse.Marshal(b, m, deterministic) +} +func (dst *ListTransferConfigsResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_ListTransferConfigsResponse.Merge(dst, src) +} +func (m *ListTransferConfigsResponse) XXX_Size() int { + return xxx_messageInfo_ListTransferConfigsResponse.Size(m) +} +func (m *ListTransferConfigsResponse) XXX_DiscardUnknown() { + xxx_messageInfo_ListTransferConfigsResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_ListTransferConfigsResponse proto.InternalMessageInfo + +func (m *ListTransferConfigsResponse) GetTransferConfigs() []*TransferConfig { + if m != nil { + return m.TransferConfigs + } + return nil +} + +func (m *ListTransferConfigsResponse) GetNextPageToken() string { + if m != nil { + return m.NextPageToken + } + return "" +} + +// A request to list data transfer runs. UI can use this method to show/filter +// specific data transfer runs. The data source can use this method to request +// all scheduled transfer runs. +type ListTransferRunsRequest struct { + // Name of transfer configuration for which transfer runs should be retrieved. + // Format of transfer configuration resource name is: + // `projects/{project_id}/transferConfigs/{config_id}`. + Parent string `protobuf:"bytes,1,opt,name=parent,proto3" json:"parent,omitempty"` + // When specified, only transfer runs with requested states are returned. + States []TransferState `protobuf:"varint,2,rep,packed,name=states,proto3,enum=google.cloud.bigquery.datatransfer.v1.TransferState" json:"states,omitempty"` + // Pagination token, which can be used to request a specific page + // of `ListTransferRunsRequest` list results. For multiple-page + // results, `ListTransferRunsResponse` outputs + // a `next_page` token, which can be used as the + // `page_token` value to request the next page of list results. + PageToken string `protobuf:"bytes,3,opt,name=page_token,json=pageToken,proto3" json:"page_token,omitempty"` + // Page size. The default page size is the maximum value of 1000 results. + PageSize int32 `protobuf:"varint,4,opt,name=page_size,json=pageSize,proto3" json:"page_size,omitempty"` + // Indicates how run attempts are to be pulled. + RunAttempt ListTransferRunsRequest_RunAttempt `protobuf:"varint,5,opt,name=run_attempt,json=runAttempt,proto3,enum=google.cloud.bigquery.datatransfer.v1.ListTransferRunsRequest_RunAttempt" json:"run_attempt,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ListTransferRunsRequest) Reset() { *m = ListTransferRunsRequest{} } +func (m *ListTransferRunsRequest) String() string { return proto.CompactTextString(m) } +func (*ListTransferRunsRequest) ProtoMessage() {} +func (*ListTransferRunsRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_datatransfer_102617b20f253925, []int{13} +} +func (m *ListTransferRunsRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ListTransferRunsRequest.Unmarshal(m, b) +} +func (m *ListTransferRunsRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ListTransferRunsRequest.Marshal(b, m, deterministic) +} +func (dst *ListTransferRunsRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_ListTransferRunsRequest.Merge(dst, src) +} +func (m *ListTransferRunsRequest) XXX_Size() int { + return xxx_messageInfo_ListTransferRunsRequest.Size(m) +} +func (m *ListTransferRunsRequest) XXX_DiscardUnknown() { + xxx_messageInfo_ListTransferRunsRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_ListTransferRunsRequest proto.InternalMessageInfo + +func (m *ListTransferRunsRequest) GetParent() string { + if m != nil { + return m.Parent + } + return "" +} + +func (m *ListTransferRunsRequest) GetStates() []TransferState { + if m != nil { + return m.States + } + return nil +} + +func (m *ListTransferRunsRequest) GetPageToken() string { + if m != nil { + return m.PageToken + } + return "" +} + +func (m *ListTransferRunsRequest) GetPageSize() int32 { + if m != nil { + return m.PageSize + } + return 0 +} + +func (m *ListTransferRunsRequest) GetRunAttempt() ListTransferRunsRequest_RunAttempt { + if m != nil { + return m.RunAttempt + } + return ListTransferRunsRequest_RUN_ATTEMPT_UNSPECIFIED +} + +// The returned list of pipelines in the project. +type ListTransferRunsResponse struct { + // Output only. The stored pipeline transfer runs. + TransferRuns []*TransferRun `protobuf:"bytes,1,rep,name=transfer_runs,json=transferRuns,proto3" json:"transfer_runs,omitempty"` + // Output only. The next-pagination token. For multiple-page list results, + // this token can be used as the + // `ListTransferRunsRequest.page_token` + // to request the next page of list results. + NextPageToken string `protobuf:"bytes,2,opt,name=next_page_token,json=nextPageToken,proto3" json:"next_page_token,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ListTransferRunsResponse) Reset() { *m = ListTransferRunsResponse{} } +func (m *ListTransferRunsResponse) String() string { return proto.CompactTextString(m) } +func (*ListTransferRunsResponse) ProtoMessage() {} +func (*ListTransferRunsResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_datatransfer_102617b20f253925, []int{14} +} +func (m *ListTransferRunsResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ListTransferRunsResponse.Unmarshal(m, b) +} +func (m *ListTransferRunsResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ListTransferRunsResponse.Marshal(b, m, deterministic) +} +func (dst *ListTransferRunsResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_ListTransferRunsResponse.Merge(dst, src) +} +func (m *ListTransferRunsResponse) XXX_Size() int { + return xxx_messageInfo_ListTransferRunsResponse.Size(m) +} +func (m *ListTransferRunsResponse) XXX_DiscardUnknown() { + xxx_messageInfo_ListTransferRunsResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_ListTransferRunsResponse proto.InternalMessageInfo + +func (m *ListTransferRunsResponse) GetTransferRuns() []*TransferRun { + if m != nil { + return m.TransferRuns + } + return nil +} + +func (m *ListTransferRunsResponse) GetNextPageToken() string { + if m != nil { + return m.NextPageToken + } + return "" +} + +// A request to get user facing log messages associated with data transfer run. +type ListTransferLogsRequest struct { + // Transfer run name in the form: + // `projects/{project_id}/transferConfigs/{config_Id}/runs/{run_id}`. + Parent string `protobuf:"bytes,1,opt,name=parent,proto3" json:"parent,omitempty"` + // Pagination token, which can be used to request a specific page + // of `ListTransferLogsRequest` list results. For multiple-page + // results, `ListTransferLogsResponse` outputs + // a `next_page` token, which can be used as the + // `page_token` value to request the next page of list results. + PageToken string `protobuf:"bytes,4,opt,name=page_token,json=pageToken,proto3" json:"page_token,omitempty"` + // Page size. The default page size is the maximum value of 1000 results. + PageSize int32 `protobuf:"varint,5,opt,name=page_size,json=pageSize,proto3" json:"page_size,omitempty"` + // Message types to return. If not populated - INFO, WARNING and ERROR + // messages are returned. + MessageTypes []TransferMessage_MessageSeverity `protobuf:"varint,6,rep,packed,name=message_types,json=messageTypes,proto3,enum=google.cloud.bigquery.datatransfer.v1.TransferMessage_MessageSeverity" json:"message_types,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ListTransferLogsRequest) Reset() { *m = ListTransferLogsRequest{} } +func (m *ListTransferLogsRequest) String() string { return proto.CompactTextString(m) } +func (*ListTransferLogsRequest) ProtoMessage() {} +func (*ListTransferLogsRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_datatransfer_102617b20f253925, []int{15} +} +func (m *ListTransferLogsRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ListTransferLogsRequest.Unmarshal(m, b) +} +func (m *ListTransferLogsRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ListTransferLogsRequest.Marshal(b, m, deterministic) +} +func (dst *ListTransferLogsRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_ListTransferLogsRequest.Merge(dst, src) +} +func (m *ListTransferLogsRequest) XXX_Size() int { + return xxx_messageInfo_ListTransferLogsRequest.Size(m) +} +func (m *ListTransferLogsRequest) XXX_DiscardUnknown() { + xxx_messageInfo_ListTransferLogsRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_ListTransferLogsRequest proto.InternalMessageInfo + +func (m *ListTransferLogsRequest) GetParent() string { + if m != nil { + return m.Parent + } + return "" +} + +func (m *ListTransferLogsRequest) GetPageToken() string { + if m != nil { + return m.PageToken + } + return "" +} + +func (m *ListTransferLogsRequest) GetPageSize() int32 { + if m != nil { + return m.PageSize + } + return 0 +} + +func (m *ListTransferLogsRequest) GetMessageTypes() []TransferMessage_MessageSeverity { + if m != nil { + return m.MessageTypes + } + return nil +} + +// The returned list transfer run messages. +type ListTransferLogsResponse struct { + // Output only. The stored pipeline transfer messages. + TransferMessages []*TransferMessage `protobuf:"bytes,1,rep,name=transfer_messages,json=transferMessages,proto3" json:"transfer_messages,omitempty"` + // Output only. The next-pagination token. For multiple-page list results, + // this token can be used as the + // `GetTransferRunLogRequest.page_token` + // to request the next page of list results. + NextPageToken string `protobuf:"bytes,2,opt,name=next_page_token,json=nextPageToken,proto3" json:"next_page_token,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ListTransferLogsResponse) Reset() { *m = ListTransferLogsResponse{} } +func (m *ListTransferLogsResponse) String() string { return proto.CompactTextString(m) } +func (*ListTransferLogsResponse) ProtoMessage() {} +func (*ListTransferLogsResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_datatransfer_102617b20f253925, []int{16} +} +func (m *ListTransferLogsResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ListTransferLogsResponse.Unmarshal(m, b) +} +func (m *ListTransferLogsResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ListTransferLogsResponse.Marshal(b, m, deterministic) +} +func (dst *ListTransferLogsResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_ListTransferLogsResponse.Merge(dst, src) +} +func (m *ListTransferLogsResponse) XXX_Size() int { + return xxx_messageInfo_ListTransferLogsResponse.Size(m) +} +func (m *ListTransferLogsResponse) XXX_DiscardUnknown() { + xxx_messageInfo_ListTransferLogsResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_ListTransferLogsResponse proto.InternalMessageInfo + +func (m *ListTransferLogsResponse) GetTransferMessages() []*TransferMessage { + if m != nil { + return m.TransferMessages + } + return nil +} + +func (m *ListTransferLogsResponse) GetNextPageToken() string { + if m != nil { + return m.NextPageToken + } + return "" +} + +// A request to determine whether the user has valid credentials. This method +// is used to limit the number of OAuth popups in the user interface. The +// user id is inferred from the API call context. +// If the data source has the Google+ authorization type, this method +// returns false, as it cannot be determined whether the credentials are +// already valid merely based on the user id. +type CheckValidCredsRequest struct { + // The data source in the form: + // `projects/{project_id}/dataSources/{data_source_id}` + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *CheckValidCredsRequest) Reset() { *m = CheckValidCredsRequest{} } +func (m *CheckValidCredsRequest) String() string { return proto.CompactTextString(m) } +func (*CheckValidCredsRequest) ProtoMessage() {} +func (*CheckValidCredsRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_datatransfer_102617b20f253925, []int{17} +} +func (m *CheckValidCredsRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_CheckValidCredsRequest.Unmarshal(m, b) +} +func (m *CheckValidCredsRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_CheckValidCredsRequest.Marshal(b, m, deterministic) +} +func (dst *CheckValidCredsRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_CheckValidCredsRequest.Merge(dst, src) +} +func (m *CheckValidCredsRequest) XXX_Size() int { + return xxx_messageInfo_CheckValidCredsRequest.Size(m) +} +func (m *CheckValidCredsRequest) XXX_DiscardUnknown() { + xxx_messageInfo_CheckValidCredsRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_CheckValidCredsRequest proto.InternalMessageInfo + +func (m *CheckValidCredsRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +// A response indicating whether the credentials exist and are valid. +type CheckValidCredsResponse struct { + // If set to `true`, the credentials exist and are valid. + HasValidCreds bool `protobuf:"varint,1,opt,name=has_valid_creds,json=hasValidCreds,proto3" json:"has_valid_creds,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *CheckValidCredsResponse) Reset() { *m = CheckValidCredsResponse{} } +func (m *CheckValidCredsResponse) String() string { return proto.CompactTextString(m) } +func (*CheckValidCredsResponse) ProtoMessage() {} +func (*CheckValidCredsResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_datatransfer_102617b20f253925, []int{18} +} +func (m *CheckValidCredsResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_CheckValidCredsResponse.Unmarshal(m, b) +} +func (m *CheckValidCredsResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_CheckValidCredsResponse.Marshal(b, m, deterministic) +} +func (dst *CheckValidCredsResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_CheckValidCredsResponse.Merge(dst, src) +} +func (m *CheckValidCredsResponse) XXX_Size() int { + return xxx_messageInfo_CheckValidCredsResponse.Size(m) +} +func (m *CheckValidCredsResponse) XXX_DiscardUnknown() { + xxx_messageInfo_CheckValidCredsResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_CheckValidCredsResponse proto.InternalMessageInfo + +func (m *CheckValidCredsResponse) GetHasValidCreds() bool { + if m != nil { + return m.HasValidCreds + } + return false +} + +// A request to schedule transfer runs for a time range. +type ScheduleTransferRunsRequest struct { + // Transfer configuration name in the form: + // `projects/{project_id}/transferConfigs/{config_id}`. + Parent string `protobuf:"bytes,1,opt,name=parent,proto3" json:"parent,omitempty"` + // Start time of the range of transfer runs. For example, + // `"2017-05-25T00:00:00+00:00"`. + StartTime *timestamp.Timestamp `protobuf:"bytes,2,opt,name=start_time,json=startTime,proto3" json:"start_time,omitempty"` + // End time of the range of transfer runs. For example, + // `"2017-05-30T00:00:00+00:00"`. + EndTime *timestamp.Timestamp `protobuf:"bytes,3,opt,name=end_time,json=endTime,proto3" json:"end_time,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ScheduleTransferRunsRequest) Reset() { *m = ScheduleTransferRunsRequest{} } +func (m *ScheduleTransferRunsRequest) String() string { return proto.CompactTextString(m) } +func (*ScheduleTransferRunsRequest) ProtoMessage() {} +func (*ScheduleTransferRunsRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_datatransfer_102617b20f253925, []int{19} +} +func (m *ScheduleTransferRunsRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ScheduleTransferRunsRequest.Unmarshal(m, b) +} +func (m *ScheduleTransferRunsRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ScheduleTransferRunsRequest.Marshal(b, m, deterministic) +} +func (dst *ScheduleTransferRunsRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_ScheduleTransferRunsRequest.Merge(dst, src) +} +func (m *ScheduleTransferRunsRequest) XXX_Size() int { + return xxx_messageInfo_ScheduleTransferRunsRequest.Size(m) +} +func (m *ScheduleTransferRunsRequest) XXX_DiscardUnknown() { + xxx_messageInfo_ScheduleTransferRunsRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_ScheduleTransferRunsRequest proto.InternalMessageInfo + +func (m *ScheduleTransferRunsRequest) GetParent() string { + if m != nil { + return m.Parent + } + return "" +} + +func (m *ScheduleTransferRunsRequest) GetStartTime() *timestamp.Timestamp { + if m != nil { + return m.StartTime + } + return nil +} + +func (m *ScheduleTransferRunsRequest) GetEndTime() *timestamp.Timestamp { + if m != nil { + return m.EndTime + } + return nil +} + +// A response to schedule transfer runs for a time range. +type ScheduleTransferRunsResponse struct { + // The transfer runs that were scheduled. + Runs []*TransferRun `protobuf:"bytes,1,rep,name=runs,proto3" json:"runs,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ScheduleTransferRunsResponse) Reset() { *m = ScheduleTransferRunsResponse{} } +func (m *ScheduleTransferRunsResponse) String() string { return proto.CompactTextString(m) } +func (*ScheduleTransferRunsResponse) ProtoMessage() {} +func (*ScheduleTransferRunsResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_datatransfer_102617b20f253925, []int{20} +} +func (m *ScheduleTransferRunsResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ScheduleTransferRunsResponse.Unmarshal(m, b) +} +func (m *ScheduleTransferRunsResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ScheduleTransferRunsResponse.Marshal(b, m, deterministic) +} +func (dst *ScheduleTransferRunsResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_ScheduleTransferRunsResponse.Merge(dst, src) +} +func (m *ScheduleTransferRunsResponse) XXX_Size() int { + return xxx_messageInfo_ScheduleTransferRunsResponse.Size(m) +} +func (m *ScheduleTransferRunsResponse) XXX_DiscardUnknown() { + xxx_messageInfo_ScheduleTransferRunsResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_ScheduleTransferRunsResponse proto.InternalMessageInfo + +func (m *ScheduleTransferRunsResponse) GetRuns() []*TransferRun { + if m != nil { + return m.Runs + } + return nil +} + +func init() { + proto.RegisterType((*DataSourceParameter)(nil), "google.cloud.bigquery.datatransfer.v1.DataSourceParameter") + proto.RegisterType((*DataSource)(nil), "google.cloud.bigquery.datatransfer.v1.DataSource") + proto.RegisterType((*GetDataSourceRequest)(nil), "google.cloud.bigquery.datatransfer.v1.GetDataSourceRequest") + proto.RegisterType((*ListDataSourcesRequest)(nil), "google.cloud.bigquery.datatransfer.v1.ListDataSourcesRequest") + proto.RegisterType((*ListDataSourcesResponse)(nil), "google.cloud.bigquery.datatransfer.v1.ListDataSourcesResponse") + proto.RegisterType((*CreateTransferConfigRequest)(nil), "google.cloud.bigquery.datatransfer.v1.CreateTransferConfigRequest") + proto.RegisterType((*UpdateTransferConfigRequest)(nil), "google.cloud.bigquery.datatransfer.v1.UpdateTransferConfigRequest") + proto.RegisterType((*GetTransferConfigRequest)(nil), "google.cloud.bigquery.datatransfer.v1.GetTransferConfigRequest") + proto.RegisterType((*DeleteTransferConfigRequest)(nil), "google.cloud.bigquery.datatransfer.v1.DeleteTransferConfigRequest") + proto.RegisterType((*GetTransferRunRequest)(nil), "google.cloud.bigquery.datatransfer.v1.GetTransferRunRequest") + proto.RegisterType((*DeleteTransferRunRequest)(nil), "google.cloud.bigquery.datatransfer.v1.DeleteTransferRunRequest") + proto.RegisterType((*ListTransferConfigsRequest)(nil), "google.cloud.bigquery.datatransfer.v1.ListTransferConfigsRequest") + proto.RegisterType((*ListTransferConfigsResponse)(nil), "google.cloud.bigquery.datatransfer.v1.ListTransferConfigsResponse") + proto.RegisterType((*ListTransferRunsRequest)(nil), "google.cloud.bigquery.datatransfer.v1.ListTransferRunsRequest") + proto.RegisterType((*ListTransferRunsResponse)(nil), "google.cloud.bigquery.datatransfer.v1.ListTransferRunsResponse") + proto.RegisterType((*ListTransferLogsRequest)(nil), "google.cloud.bigquery.datatransfer.v1.ListTransferLogsRequest") + proto.RegisterType((*ListTransferLogsResponse)(nil), "google.cloud.bigquery.datatransfer.v1.ListTransferLogsResponse") + proto.RegisterType((*CheckValidCredsRequest)(nil), "google.cloud.bigquery.datatransfer.v1.CheckValidCredsRequest") + proto.RegisterType((*CheckValidCredsResponse)(nil), "google.cloud.bigquery.datatransfer.v1.CheckValidCredsResponse") + proto.RegisterType((*ScheduleTransferRunsRequest)(nil), "google.cloud.bigquery.datatransfer.v1.ScheduleTransferRunsRequest") + proto.RegisterType((*ScheduleTransferRunsResponse)(nil), "google.cloud.bigquery.datatransfer.v1.ScheduleTransferRunsResponse") + proto.RegisterEnum("google.cloud.bigquery.datatransfer.v1.DataSourceParameter_Type", DataSourceParameter_Type_name, DataSourceParameter_Type_value) + proto.RegisterEnum("google.cloud.bigquery.datatransfer.v1.DataSource_AuthorizationType", DataSource_AuthorizationType_name, DataSource_AuthorizationType_value) + proto.RegisterEnum("google.cloud.bigquery.datatransfer.v1.DataSource_DataRefreshType", DataSource_DataRefreshType_name, DataSource_DataRefreshType_value) + proto.RegisterEnum("google.cloud.bigquery.datatransfer.v1.ListTransferRunsRequest_RunAttempt", ListTransferRunsRequest_RunAttempt_name, ListTransferRunsRequest_RunAttempt_value) +} + +// Reference imports to suppress errors if they are not otherwise used. +var _ context.Context +var _ grpc.ClientConn + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the grpc package it is being compiled against. +const _ = grpc.SupportPackageIsVersion4 + +// DataTransferServiceClient is the client API for DataTransferService service. +// +// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://godoc.org/google.golang.org/grpc#ClientConn.NewStream. +type DataTransferServiceClient interface { + // Retrieves a supported data source and returns its settings, + // which can be used for UI rendering. + GetDataSource(ctx context.Context, in *GetDataSourceRequest, opts ...grpc.CallOption) (*DataSource, error) + // Lists supported data sources and returns their settings, + // which can be used for UI rendering. + ListDataSources(ctx context.Context, in *ListDataSourcesRequest, opts ...grpc.CallOption) (*ListDataSourcesResponse, error) + // Creates a new data transfer configuration. + CreateTransferConfig(ctx context.Context, in *CreateTransferConfigRequest, opts ...grpc.CallOption) (*TransferConfig, error) + // Updates a data transfer configuration. + // All fields must be set, even if they are not updated. + UpdateTransferConfig(ctx context.Context, in *UpdateTransferConfigRequest, opts ...grpc.CallOption) (*TransferConfig, error) + // Deletes a data transfer configuration, + // including any associated transfer runs and logs. + DeleteTransferConfig(ctx context.Context, in *DeleteTransferConfigRequest, opts ...grpc.CallOption) (*empty.Empty, error) + // Returns information about a data transfer config. + GetTransferConfig(ctx context.Context, in *GetTransferConfigRequest, opts ...grpc.CallOption) (*TransferConfig, error) + // Returns information about all data transfers in the project. + ListTransferConfigs(ctx context.Context, in *ListTransferConfigsRequest, opts ...grpc.CallOption) (*ListTransferConfigsResponse, error) + // Creates transfer runs for a time range [start_time, end_time]. + // For each date - or whatever granularity the data source supports - in the + // range, one transfer run is created. + // Note that runs are created per UTC time in the time range. + ScheduleTransferRuns(ctx context.Context, in *ScheduleTransferRunsRequest, opts ...grpc.CallOption) (*ScheduleTransferRunsResponse, error) + // Returns information about the particular transfer run. + GetTransferRun(ctx context.Context, in *GetTransferRunRequest, opts ...grpc.CallOption) (*TransferRun, error) + // Deletes the specified transfer run. + DeleteTransferRun(ctx context.Context, in *DeleteTransferRunRequest, opts ...grpc.CallOption) (*empty.Empty, error) + // Returns information about running and completed jobs. + ListTransferRuns(ctx context.Context, in *ListTransferRunsRequest, opts ...grpc.CallOption) (*ListTransferRunsResponse, error) + // Returns user facing log messages for the data transfer run. + ListTransferLogs(ctx context.Context, in *ListTransferLogsRequest, opts ...grpc.CallOption) (*ListTransferLogsResponse, error) + // Returns true if valid credentials exist for the given data source and + // requesting user. + // Some data sources doesn't support service account, so we need to talk to + // them on behalf of the end user. This API just checks whether we have OAuth + // token for the particular user, which is a pre-requisite before user can + // create a transfer config. + CheckValidCreds(ctx context.Context, in *CheckValidCredsRequest, opts ...grpc.CallOption) (*CheckValidCredsResponse, error) +} + +type dataTransferServiceClient struct { + cc *grpc.ClientConn +} + +func NewDataTransferServiceClient(cc *grpc.ClientConn) DataTransferServiceClient { + return &dataTransferServiceClient{cc} +} + +func (c *dataTransferServiceClient) GetDataSource(ctx context.Context, in *GetDataSourceRequest, opts ...grpc.CallOption) (*DataSource, error) { + out := new(DataSource) + err := c.cc.Invoke(ctx, "/google.cloud.bigquery.datatransfer.v1.DataTransferService/GetDataSource", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *dataTransferServiceClient) ListDataSources(ctx context.Context, in *ListDataSourcesRequest, opts ...grpc.CallOption) (*ListDataSourcesResponse, error) { + out := new(ListDataSourcesResponse) + err := c.cc.Invoke(ctx, "/google.cloud.bigquery.datatransfer.v1.DataTransferService/ListDataSources", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *dataTransferServiceClient) CreateTransferConfig(ctx context.Context, in *CreateTransferConfigRequest, opts ...grpc.CallOption) (*TransferConfig, error) { + out := new(TransferConfig) + err := c.cc.Invoke(ctx, "/google.cloud.bigquery.datatransfer.v1.DataTransferService/CreateTransferConfig", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *dataTransferServiceClient) UpdateTransferConfig(ctx context.Context, in *UpdateTransferConfigRequest, opts ...grpc.CallOption) (*TransferConfig, error) { + out := new(TransferConfig) + err := c.cc.Invoke(ctx, "/google.cloud.bigquery.datatransfer.v1.DataTransferService/UpdateTransferConfig", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *dataTransferServiceClient) DeleteTransferConfig(ctx context.Context, in *DeleteTransferConfigRequest, opts ...grpc.CallOption) (*empty.Empty, error) { + out := new(empty.Empty) + err := c.cc.Invoke(ctx, "/google.cloud.bigquery.datatransfer.v1.DataTransferService/DeleteTransferConfig", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *dataTransferServiceClient) GetTransferConfig(ctx context.Context, in *GetTransferConfigRequest, opts ...grpc.CallOption) (*TransferConfig, error) { + out := new(TransferConfig) + err := c.cc.Invoke(ctx, "/google.cloud.bigquery.datatransfer.v1.DataTransferService/GetTransferConfig", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *dataTransferServiceClient) ListTransferConfigs(ctx context.Context, in *ListTransferConfigsRequest, opts ...grpc.CallOption) (*ListTransferConfigsResponse, error) { + out := new(ListTransferConfigsResponse) + err := c.cc.Invoke(ctx, "/google.cloud.bigquery.datatransfer.v1.DataTransferService/ListTransferConfigs", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *dataTransferServiceClient) ScheduleTransferRuns(ctx context.Context, in *ScheduleTransferRunsRequest, opts ...grpc.CallOption) (*ScheduleTransferRunsResponse, error) { + out := new(ScheduleTransferRunsResponse) + err := c.cc.Invoke(ctx, "/google.cloud.bigquery.datatransfer.v1.DataTransferService/ScheduleTransferRuns", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *dataTransferServiceClient) GetTransferRun(ctx context.Context, in *GetTransferRunRequest, opts ...grpc.CallOption) (*TransferRun, error) { + out := new(TransferRun) + err := c.cc.Invoke(ctx, "/google.cloud.bigquery.datatransfer.v1.DataTransferService/GetTransferRun", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *dataTransferServiceClient) DeleteTransferRun(ctx context.Context, in *DeleteTransferRunRequest, opts ...grpc.CallOption) (*empty.Empty, error) { + out := new(empty.Empty) + err := c.cc.Invoke(ctx, "/google.cloud.bigquery.datatransfer.v1.DataTransferService/DeleteTransferRun", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *dataTransferServiceClient) ListTransferRuns(ctx context.Context, in *ListTransferRunsRequest, opts ...grpc.CallOption) (*ListTransferRunsResponse, error) { + out := new(ListTransferRunsResponse) + err := c.cc.Invoke(ctx, "/google.cloud.bigquery.datatransfer.v1.DataTransferService/ListTransferRuns", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *dataTransferServiceClient) ListTransferLogs(ctx context.Context, in *ListTransferLogsRequest, opts ...grpc.CallOption) (*ListTransferLogsResponse, error) { + out := new(ListTransferLogsResponse) + err := c.cc.Invoke(ctx, "/google.cloud.bigquery.datatransfer.v1.DataTransferService/ListTransferLogs", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *dataTransferServiceClient) CheckValidCreds(ctx context.Context, in *CheckValidCredsRequest, opts ...grpc.CallOption) (*CheckValidCredsResponse, error) { + out := new(CheckValidCredsResponse) + err := c.cc.Invoke(ctx, "/google.cloud.bigquery.datatransfer.v1.DataTransferService/CheckValidCreds", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +// DataTransferServiceServer is the server API for DataTransferService service. +type DataTransferServiceServer interface { + // Retrieves a supported data source and returns its settings, + // which can be used for UI rendering. + GetDataSource(context.Context, *GetDataSourceRequest) (*DataSource, error) + // Lists supported data sources and returns their settings, + // which can be used for UI rendering. + ListDataSources(context.Context, *ListDataSourcesRequest) (*ListDataSourcesResponse, error) + // Creates a new data transfer configuration. + CreateTransferConfig(context.Context, *CreateTransferConfigRequest) (*TransferConfig, error) + // Updates a data transfer configuration. + // All fields must be set, even if they are not updated. + UpdateTransferConfig(context.Context, *UpdateTransferConfigRequest) (*TransferConfig, error) + // Deletes a data transfer configuration, + // including any associated transfer runs and logs. + DeleteTransferConfig(context.Context, *DeleteTransferConfigRequest) (*empty.Empty, error) + // Returns information about a data transfer config. + GetTransferConfig(context.Context, *GetTransferConfigRequest) (*TransferConfig, error) + // Returns information about all data transfers in the project. + ListTransferConfigs(context.Context, *ListTransferConfigsRequest) (*ListTransferConfigsResponse, error) + // Creates transfer runs for a time range [start_time, end_time]. + // For each date - or whatever granularity the data source supports - in the + // range, one transfer run is created. + // Note that runs are created per UTC time in the time range. + ScheduleTransferRuns(context.Context, *ScheduleTransferRunsRequest) (*ScheduleTransferRunsResponse, error) + // Returns information about the particular transfer run. + GetTransferRun(context.Context, *GetTransferRunRequest) (*TransferRun, error) + // Deletes the specified transfer run. + DeleteTransferRun(context.Context, *DeleteTransferRunRequest) (*empty.Empty, error) + // Returns information about running and completed jobs. + ListTransferRuns(context.Context, *ListTransferRunsRequest) (*ListTransferRunsResponse, error) + // Returns user facing log messages for the data transfer run. + ListTransferLogs(context.Context, *ListTransferLogsRequest) (*ListTransferLogsResponse, error) + // Returns true if valid credentials exist for the given data source and + // requesting user. + // Some data sources doesn't support service account, so we need to talk to + // them on behalf of the end user. This API just checks whether we have OAuth + // token for the particular user, which is a pre-requisite before user can + // create a transfer config. + CheckValidCreds(context.Context, *CheckValidCredsRequest) (*CheckValidCredsResponse, error) +} + +func RegisterDataTransferServiceServer(s *grpc.Server, srv DataTransferServiceServer) { + s.RegisterService(&_DataTransferService_serviceDesc, srv) +} + +func _DataTransferService_GetDataSource_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(GetDataSourceRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(DataTransferServiceServer).GetDataSource(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.bigquery.datatransfer.v1.DataTransferService/GetDataSource", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(DataTransferServiceServer).GetDataSource(ctx, req.(*GetDataSourceRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _DataTransferService_ListDataSources_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(ListDataSourcesRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(DataTransferServiceServer).ListDataSources(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.bigquery.datatransfer.v1.DataTransferService/ListDataSources", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(DataTransferServiceServer).ListDataSources(ctx, req.(*ListDataSourcesRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _DataTransferService_CreateTransferConfig_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(CreateTransferConfigRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(DataTransferServiceServer).CreateTransferConfig(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.bigquery.datatransfer.v1.DataTransferService/CreateTransferConfig", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(DataTransferServiceServer).CreateTransferConfig(ctx, req.(*CreateTransferConfigRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _DataTransferService_UpdateTransferConfig_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(UpdateTransferConfigRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(DataTransferServiceServer).UpdateTransferConfig(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.bigquery.datatransfer.v1.DataTransferService/UpdateTransferConfig", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(DataTransferServiceServer).UpdateTransferConfig(ctx, req.(*UpdateTransferConfigRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _DataTransferService_DeleteTransferConfig_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(DeleteTransferConfigRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(DataTransferServiceServer).DeleteTransferConfig(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.bigquery.datatransfer.v1.DataTransferService/DeleteTransferConfig", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(DataTransferServiceServer).DeleteTransferConfig(ctx, req.(*DeleteTransferConfigRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _DataTransferService_GetTransferConfig_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(GetTransferConfigRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(DataTransferServiceServer).GetTransferConfig(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.bigquery.datatransfer.v1.DataTransferService/GetTransferConfig", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(DataTransferServiceServer).GetTransferConfig(ctx, req.(*GetTransferConfigRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _DataTransferService_ListTransferConfigs_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(ListTransferConfigsRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(DataTransferServiceServer).ListTransferConfigs(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.bigquery.datatransfer.v1.DataTransferService/ListTransferConfigs", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(DataTransferServiceServer).ListTransferConfigs(ctx, req.(*ListTransferConfigsRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _DataTransferService_ScheduleTransferRuns_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(ScheduleTransferRunsRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(DataTransferServiceServer).ScheduleTransferRuns(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.bigquery.datatransfer.v1.DataTransferService/ScheduleTransferRuns", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(DataTransferServiceServer).ScheduleTransferRuns(ctx, req.(*ScheduleTransferRunsRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _DataTransferService_GetTransferRun_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(GetTransferRunRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(DataTransferServiceServer).GetTransferRun(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.bigquery.datatransfer.v1.DataTransferService/GetTransferRun", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(DataTransferServiceServer).GetTransferRun(ctx, req.(*GetTransferRunRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _DataTransferService_DeleteTransferRun_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(DeleteTransferRunRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(DataTransferServiceServer).DeleteTransferRun(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.bigquery.datatransfer.v1.DataTransferService/DeleteTransferRun", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(DataTransferServiceServer).DeleteTransferRun(ctx, req.(*DeleteTransferRunRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _DataTransferService_ListTransferRuns_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(ListTransferRunsRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(DataTransferServiceServer).ListTransferRuns(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.bigquery.datatransfer.v1.DataTransferService/ListTransferRuns", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(DataTransferServiceServer).ListTransferRuns(ctx, req.(*ListTransferRunsRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _DataTransferService_ListTransferLogs_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(ListTransferLogsRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(DataTransferServiceServer).ListTransferLogs(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.bigquery.datatransfer.v1.DataTransferService/ListTransferLogs", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(DataTransferServiceServer).ListTransferLogs(ctx, req.(*ListTransferLogsRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _DataTransferService_CheckValidCreds_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(CheckValidCredsRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(DataTransferServiceServer).CheckValidCreds(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.bigquery.datatransfer.v1.DataTransferService/CheckValidCreds", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(DataTransferServiceServer).CheckValidCreds(ctx, req.(*CheckValidCredsRequest)) + } + return interceptor(ctx, in, info, handler) +} + +var _DataTransferService_serviceDesc = grpc.ServiceDesc{ + ServiceName: "google.cloud.bigquery.datatransfer.v1.DataTransferService", + HandlerType: (*DataTransferServiceServer)(nil), + Methods: []grpc.MethodDesc{ + { + MethodName: "GetDataSource", + Handler: _DataTransferService_GetDataSource_Handler, + }, + { + MethodName: "ListDataSources", + Handler: _DataTransferService_ListDataSources_Handler, + }, + { + MethodName: "CreateTransferConfig", + Handler: _DataTransferService_CreateTransferConfig_Handler, + }, + { + MethodName: "UpdateTransferConfig", + Handler: _DataTransferService_UpdateTransferConfig_Handler, + }, + { + MethodName: "DeleteTransferConfig", + Handler: _DataTransferService_DeleteTransferConfig_Handler, + }, + { + MethodName: "GetTransferConfig", + Handler: _DataTransferService_GetTransferConfig_Handler, + }, + { + MethodName: "ListTransferConfigs", + Handler: _DataTransferService_ListTransferConfigs_Handler, + }, + { + MethodName: "ScheduleTransferRuns", + Handler: _DataTransferService_ScheduleTransferRuns_Handler, + }, + { + MethodName: "GetTransferRun", + Handler: _DataTransferService_GetTransferRun_Handler, + }, + { + MethodName: "DeleteTransferRun", + Handler: _DataTransferService_DeleteTransferRun_Handler, + }, + { + MethodName: "ListTransferRuns", + Handler: _DataTransferService_ListTransferRuns_Handler, + }, + { + MethodName: "ListTransferLogs", + Handler: _DataTransferService_ListTransferLogs_Handler, + }, + { + MethodName: "CheckValidCreds", + Handler: _DataTransferService_CheckValidCreds_Handler, + }, + }, + Streams: []grpc.StreamDesc{}, + Metadata: "google/cloud/bigquery/datatransfer/v1/datatransfer.proto", +} + +func init() { + proto.RegisterFile("google/cloud/bigquery/datatransfer/v1/datatransfer.proto", fileDescriptor_datatransfer_102617b20f253925) +} + +var fileDescriptor_datatransfer_102617b20f253925 = []byte{ + // 2343 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xc4, 0x5a, 0xcf, 0x6f, 0xdb, 0xc8, + 0xf5, 0xff, 0x52, 0xfe, 0xfd, 0x64, 0x5b, 0xf2, 0xc4, 0x71, 0x18, 0x39, 0xbb, 0x5f, 0x2f, 0xdb, + 0x64, 0x1d, 0xb7, 0x95, 0xd6, 0xca, 0x26, 0xcd, 0x3a, 0x4d, 0x02, 0x59, 0x92, 0x1d, 0x15, 0x8e, + 0xe5, 0xa5, 0xe4, 0x64, 0x6b, 0x04, 0xcb, 0x65, 0xc4, 0xb1, 0xcc, 0x0d, 0x45, 0x32, 0x1c, 0xd2, + 0x89, 0x53, 0xa4, 0x87, 0x45, 0x50, 0x60, 0xd1, 0xde, 0xf6, 0xb0, 0x05, 0x8a, 0x5e, 0x7a, 0x69, + 0xb7, 0x40, 0xff, 0x84, 0xde, 0x7a, 0xea, 0xb5, 0xb7, 0x3d, 0xb4, 0x40, 0xdb, 0x4b, 0x6f, 0x05, + 0x7a, 0xe9, 0xa1, 0x87, 0x62, 0x86, 0x43, 0x89, 0x92, 0x68, 0x8b, 0x92, 0x0b, 0xf4, 0x64, 0xcd, + 0xbc, 0x1f, 0x33, 0xef, 0xc7, 0xe7, 0xcd, 0x7b, 0x84, 0xe1, 0x76, 0xd3, 0xb2, 0x9a, 0x06, 0xce, + 0x35, 0x0c, 0xcb, 0xd3, 0x72, 0x4f, 0xf5, 0xe6, 0x73, 0x0f, 0x3b, 0x27, 0x39, 0x4d, 0x75, 0x55, + 0xd7, 0x51, 0x4d, 0x72, 0x88, 0x9d, 0xdc, 0xf1, 0x7a, 0xd7, 0x3a, 0x6b, 0x3b, 0x96, 0x6b, 0xa1, + 0xab, 0xbe, 0x64, 0x96, 0x49, 0x66, 0x03, 0xc9, 0x6c, 0x17, 0xe7, 0xf1, 0x7a, 0xe6, 0x0a, 0x3f, + 0x40, 0xb5, 0xf5, 0x9c, 0x6a, 0x9a, 0x96, 0xab, 0xba, 0xba, 0x65, 0x12, 0x5f, 0x49, 0xe6, 0xfd, + 0x78, 0xc7, 0x77, 0x1f, 0x9d, 0x79, 0x9b, 0x4b, 0xb1, 0xd5, 0x53, 0xef, 0x30, 0xa7, 0x79, 0x0e, + 0x53, 0xcb, 0xe9, 0xcb, 0xbd, 0x74, 0xdc, 0xb2, 0xdd, 0x13, 0x4e, 0x5c, 0xe9, 0x25, 0x1e, 0xea, + 0xd8, 0xd0, 0x94, 0x96, 0x4a, 0x9e, 0x71, 0x8e, 0xff, 0xef, 0xe5, 0x70, 0xf5, 0x16, 0x26, 0xae, + 0xda, 0xb2, 0x4f, 0x3b, 0xff, 0x85, 0xa3, 0xda, 0x36, 0x76, 0xb8, 0x55, 0xd2, 0xaf, 0x27, 0xe1, + 0x42, 0x49, 0x75, 0xd5, 0x9a, 0xe5, 0x39, 0x0d, 0xbc, 0xa7, 0x3a, 0x6a, 0x0b, 0xbb, 0xd8, 0x41, + 0x97, 0x61, 0xda, 0xa6, 0x0b, 0x45, 0xd7, 0x44, 0x61, 0x45, 0x58, 0x9d, 0x91, 0xa7, 0xd8, 0xba, + 0xa2, 0xa1, 0x77, 0x60, 0x56, 0xd3, 0x89, 0x6d, 0xa8, 0x27, 0x8a, 0xa9, 0xb6, 0xb0, 0x98, 0x60, + 0xe4, 0x24, 0xdf, 0xdb, 0x55, 0x5b, 0x18, 0xad, 0x40, 0x52, 0xc3, 0xa4, 0xe1, 0xe8, 0x36, 0x35, + 0x55, 0x1c, 0xe3, 0x1c, 0x9d, 0x2d, 0x54, 0x83, 0x71, 0xf7, 0xc4, 0xc6, 0xe2, 0xf8, 0x8a, 0xb0, + 0x3a, 0x9f, 0xbf, 0x9f, 0x8d, 0x15, 0xa1, 0x6c, 0xc4, 0x4d, 0xb3, 0xf5, 0x13, 0x1b, 0xcb, 0x4c, + 0x19, 0xca, 0xc0, 0xb4, 0x83, 0x9f, 0x7b, 0xba, 0x83, 0x35, 0x71, 0x62, 0x45, 0x58, 0x9d, 0x96, + 0xdb, 0x6b, 0x9f, 0x66, 0x63, 0xd5, 0xc5, 0x9a, 0x38, 0x19, 0xd0, 0xfc, 0x35, 0xba, 0x0e, 0xe9, + 0x63, 0xd5, 0xd0, 0x35, 0x16, 0x18, 0xc5, 0xc1, 0x4d, 0xfc, 0x52, 0x9c, 0x62, 0x77, 0x4e, 0x75, + 0xf6, 0x65, 0xba, 0x8d, 0xae, 0xc2, 0xbc, 0x6a, 0x18, 0xd6, 0x0b, 0xac, 0x29, 0xc7, 0xaa, 0xe1, + 0x61, 0x22, 0x4e, 0xaf, 0x8c, 0xad, 0xce, 0xc8, 0x73, 0x7c, 0xf7, 0x11, 0xdb, 0x44, 0x1f, 0xc0, + 0x4c, 0x4b, 0x37, 0x7d, 0x16, 0x71, 0x66, 0x45, 0x58, 0x4d, 0xe6, 0xaf, 0x04, 0x36, 0x06, 0xa1, + 0xc8, 0x96, 0x2c, 0xef, 0xa9, 0x81, 0x99, 0x84, 0x3c, 0xdd, 0xd2, 0x4d, 0xf6, 0x8b, 0x89, 0xaa, + 0x2f, 0xb9, 0x28, 0xc4, 0x12, 0x55, 0x5f, 0xfa, 0xa2, 0x32, 0x4c, 0xb2, 0x0c, 0x21, 0x62, 0x72, + 0x65, 0x6c, 0x35, 0x99, 0xdf, 0x18, 0xdd, 0xad, 0x32, 0xd7, 0x84, 0x6e, 0xc2, 0x52, 0xc8, 0x37, + 0xe1, 0xa8, 0xce, 0x32, 0x0f, 0x5d, 0xec, 0x50, 0x4b, 0xa1, 0xf8, 0x66, 0xe1, 0x42, 0x48, 0xec, + 0x08, 0x1b, 0xb6, 0xe2, 0x39, 0x86, 0x38, 0xc7, 0x64, 0x16, 0x3a, 0xa4, 0x07, 0xd8, 0xb0, 0xf7, + 0x1d, 0x03, 0x5d, 0x81, 0x19, 0xbd, 0xd5, 0xf2, 0x5c, 0xf5, 0xa9, 0x81, 0xc5, 0x79, 0x16, 0x9f, + 0xce, 0x06, 0x12, 0x61, 0xca, 0xc1, 0x0d, 0xcf, 0x21, 0x58, 0x4c, 0x31, 0x5a, 0xb0, 0x94, 0x74, + 0x18, 0xa7, 0x09, 0x80, 0x16, 0x21, 0x5d, 0xff, 0xc1, 0x5e, 0x59, 0xd9, 0xdf, 0xad, 0xed, 0x95, + 0x8b, 0x95, 0xad, 0x4a, 0xb9, 0x94, 0xfe, 0x3f, 0x04, 0x30, 0x59, 0xab, 0xcb, 0x95, 0xdd, 0xed, + 0xb4, 0x80, 0x92, 0x30, 0x55, 0xd9, 0xad, 0x97, 0xb7, 0xcb, 0x72, 0x3a, 0x41, 0x09, 0xa5, 0xea, + 0xfe, 0xe6, 0x4e, 0x39, 0x3d, 0x46, 0x09, 0x9b, 0xd5, 0xea, 0x4e, 0xb9, 0xb0, 0x9b, 0x1e, 0xa7, + 0x04, 0xb9, 0x5c, 0xac, 0xca, 0xa5, 0xf4, 0x04, 0x9a, 0x83, 0x99, 0xbd, 0x9d, 0xfd, 0x9a, 0xb2, + 0x57, 0xd8, 0x2e, 0xa7, 0x27, 0xa5, 0x7f, 0xcf, 0x00, 0x74, 0x3c, 0x85, 0x10, 0x8c, 0xb3, 0xf4, + 0xf7, 0xd1, 0xc1, 0x7e, 0xa3, 0x6f, 0xc2, 0x3c, 0xf5, 0xad, 0x42, 0x18, 0x0b, 0xc5, 0x8e, 0x0f, + 0x8e, 0x59, 0xad, 0x2d, 0x17, 0x01, 0xa0, 0xb1, 0x81, 0x00, 0x1a, 0xef, 0x07, 0xd0, 0x32, 0xcc, + 0x34, 0x0c, 0x1d, 0x9b, 0x2e, 0x3d, 0x65, 0x82, 0xd1, 0xa7, 0xfd, 0x8d, 0x8a, 0x86, 0x96, 0x60, + 0x92, 0x34, 0x2c, 0x1b, 0x13, 0x71, 0x92, 0x65, 0x27, 0x5f, 0xa1, 0x8f, 0x60, 0x2e, 0x88, 0xbb, + 0xc2, 0xe0, 0x37, 0xc5, 0xe0, 0x77, 0x23, 0x66, 0x9e, 0xd4, 0xf9, 0x6f, 0x06, 0xb9, 0x59, 0x37, + 0xb4, 0x42, 0xf7, 0x60, 0x99, 0x78, 0xb6, 0x6d, 0x39, 0x2e, 0x51, 0x5a, 0x9e, 0xe1, 0xea, 0xb6, + 0x81, 0x95, 0x80, 0x83, 0x82, 0x84, 0x46, 0xed, 0x72, 0xc0, 0xf2, 0x90, 0x73, 0x04, 0x0a, 0x09, + 0xba, 0x05, 0x97, 0x3c, 0x5b, 0x53, 0x5d, 0xac, 0x68, 0x58, 0xd5, 0x0c, 0xdd, 0xc4, 0x0a, 0xc1, + 0x0d, 0xcb, 0xd4, 0x08, 0x83, 0xcf, 0x84, 0x7c, 0xd1, 0x27, 0x97, 0x38, 0xb5, 0xe6, 0x13, 0x29, + 0x74, 0x35, 0x7c, 0xa8, 0x7a, 0x86, 0xab, 0x90, 0xc6, 0x11, 0xd6, 0x3c, 0xc3, 0x07, 0xcd, 0x8c, + 0x9c, 0xe2, 0xfb, 0x35, 0xbe, 0x8d, 0x6e, 0x83, 0xd8, 0xbe, 0x62, 0xc3, 0x23, 0xae, 0xd5, 0xea, + 0x88, 0x24, 0xd9, 0xfd, 0x96, 0x02, 0x7a, 0x91, 0x91, 0xdb, 0x92, 0x07, 0x00, 0x76, 0x00, 0x0c, + 0x22, 0xce, 0x9e, 0x1b, 0x5b, 0x21, 0x6d, 0xb4, 0xd0, 0xf6, 0xa0, 0x63, 0xea, 0x88, 0x63, 0xc2, + 0x01, 0xa4, 0x7a, 0xee, 0x91, 0xe5, 0xe8, 0xaf, 0x7c, 0x18, 0xb1, 0x90, 0xcd, 0xb3, 0x90, 0x15, + 0x87, 0x3e, 0x3e, 0x5b, 0x08, 0xeb, 0x62, 0x21, 0x5c, 0x50, 0x7b, 0xb7, 0x50, 0x0b, 0x16, 0x58, + 0x06, 0x3b, 0xf8, 0xd0, 0xc1, 0xe4, 0xc8, 0x3f, 0x32, 0xc5, 0x8e, 0x2c, 0x0c, 0x7f, 0x24, 0xfd, + 0x29, 0xfb, 0x9a, 0xd8, 0x81, 0x29, 0xad, 0x7b, 0x03, 0x6d, 0xc1, 0x4a, 0x10, 0xbe, 0xae, 0x63, + 0x5f, 0xe8, 0xa6, 0x66, 0xbd, 0x50, 0x34, 0xf5, 0x84, 0x88, 0x69, 0x16, 0xff, 0x2b, 0x9c, 0x2f, + 0xa4, 0xf2, 0x31, 0x63, 0x2a, 0xa9, 0x27, 0x04, 0xbd, 0x07, 0x8b, 0x2d, 0xd5, 0xf4, 0x54, 0x43, + 0x71, 0x3c, 0x93, 0x28, 0x9a, 0x4e, 0x68, 0xdd, 0xd0, 0xc4, 0x05, 0x16, 0x57, 0xe4, 0xd3, 0x64, + 0xcf, 0x24, 0x25, 0x4e, 0x41, 0xfb, 0x70, 0xb9, 0xa5, 0x9b, 0x7a, 0xcb, 0xeb, 0x64, 0x81, 0xa2, + 0x9b, 0x2e, 0x76, 0x8e, 0x55, 0x43, 0x44, 0xac, 0xec, 0x5e, 0xee, 0x2f, 0xbb, 0xfc, 0xf1, 0x96, + 0x2f, 0x71, 0xd9, 0x20, 0x45, 0x2a, 0x5c, 0x52, 0x22, 0xb0, 0xd0, 0xe7, 0x67, 0x24, 0xc1, 0xdb, + 0x85, 0xfd, 0xfa, 0x83, 0xaa, 0x5c, 0x39, 0x28, 0xd4, 0x2b, 0xd5, 0x5d, 0x25, 0xa2, 0x54, 0x2d, + 0x01, 0xea, 0xe6, 0x29, 0x56, 0x4b, 0xe5, 0xb4, 0x40, 0x65, 0xb7, 0xab, 0xd5, 0xed, 0x9d, 0xb2, + 0xc2, 0x6a, 0x51, 0x04, 0x4f, 0x42, 0x6a, 0x40, 0xaa, 0xc7, 0xd3, 0xe8, 0x1d, 0x78, 0xab, 0x54, + 0xa8, 0x17, 0x14, 0xb9, 0xbc, 0x25, 0x97, 0x6b, 0x0f, 0xa2, 0x4e, 0x44, 0x30, 0x5f, 0xdb, 0xa9, + 0x94, 0x2a, 0xbb, 0xdb, 0xca, 0xe3, 0xca, 0x6e, 0xa9, 0xfa, 0x38, 0x2d, 0xa0, 0xcb, 0x70, 0xb1, + 0xb8, 0x5f, 0xab, 0x57, 0x1f, 0x2a, 0x3d, 0xa4, 0x84, 0xb4, 0x06, 0x8b, 0xdb, 0xd8, 0xed, 0x04, + 0x57, 0xc6, 0xcf, 0x3d, 0x4c, 0xdc, 0xa8, 0x3a, 0x28, 0x19, 0xb0, 0xb4, 0xa3, 0x93, 0x10, 0x33, + 0x09, 0xb8, 0x97, 0x60, 0xd2, 0x56, 0x1d, 0x6c, 0xba, 0x9c, 0x9f, 0xaf, 0xd0, 0x5b, 0x14, 0x62, + 0x4d, 0xac, 0xb8, 0xd6, 0x33, 0x1c, 0x34, 0x0c, 0x33, 0x74, 0xa7, 0x4e, 0x37, 0x68, 0xb5, 0x63, + 0x64, 0xa2, 0xbf, 0xf2, 0x7b, 0x86, 0x09, 0x79, 0x9a, 0x6e, 0xd4, 0xf4, 0x57, 0x58, 0xfa, 0x52, + 0x80, 0x4b, 0x7d, 0xc7, 0x11, 0xdb, 0x32, 0x09, 0x46, 0x75, 0x98, 0x0d, 0x55, 0x64, 0x22, 0x0a, + 0x0c, 0xbc, 0xeb, 0x43, 0xa7, 0xb2, 0x9c, 0xec, 0x94, 0x70, 0x82, 0xae, 0x41, 0xca, 0xc4, 0x2f, + 0x5d, 0x25, 0x74, 0x65, 0xbf, 0xd0, 0xcf, 0xd1, 0xed, 0xbd, 0xe0, 0xda, 0xd2, 0xef, 0x05, 0x58, + 0x2e, 0x3a, 0xb4, 0xc9, 0x08, 0x2a, 0x5d, 0xd1, 0x32, 0x0f, 0xf5, 0xe6, 0x20, 0x6f, 0x7c, 0x0c, + 0xa9, 0x76, 0x9d, 0x6e, 0x30, 0x09, 0xa6, 0x3f, 0x99, 0xbf, 0x39, 0x64, 0xa5, 0xe6, 0xc7, 0xcd, + 0xbb, 0x5d, 0x6b, 0xf4, 0x9d, 0xde, 0xca, 0xd2, 0xb0, 0xb4, 0xe0, 0x1d, 0xea, 0x2e, 0x0a, 0x45, + 0x4b, 0xc3, 0xd2, 0xdf, 0x05, 0x58, 0xde, 0x67, 0xe5, 0x37, 0xda, 0x8c, 0x88, 0xeb, 0x0a, 0xff, + 0xbb, 0xeb, 0xa2, 0x3b, 0x90, 0xe4, 0x6f, 0x09, 0xed, 0x94, 0x59, 0xba, 0x24, 0xf3, 0x99, 0x3e, + 0x30, 0x6f, 0xd1, 0x06, 0xe7, 0xa1, 0x4a, 0x9e, 0xc9, 0xe0, 0xb3, 0xd3, 0xdf, 0x52, 0x16, 0xc4, + 0x6d, 0xec, 0x46, 0xdb, 0x19, 0x95, 0xea, 0xeb, 0xb0, 0x5c, 0xc2, 0x06, 0x3e, 0xcd, 0x35, 0x51, + 0x22, 0xdf, 0x82, 0x8b, 0xa1, 0x23, 0x64, 0xcf, 0x3c, 0x8b, 0x39, 0x0b, 0x62, 0xb7, 0xfe, 0x01, + 0xfc, 0x3f, 0x13, 0x20, 0x43, 0xc1, 0xd0, 0x7d, 0x9d, 0x81, 0xf8, 0xbb, 0x06, 0xa9, 0xee, 0xce, + 0x85, 0x88, 0x09, 0xbf, 0xb1, 0x0d, 0xb7, 0x2e, 0xe4, 0x5c, 0x38, 0xfd, 0x95, 0x00, 0xcb, 0x91, + 0x57, 0xe3, 0x58, 0xfd, 0x04, 0xd2, 0x3d, 0x69, 0x14, 0xe0, 0x75, 0xc4, 0x3c, 0x4a, 0x75, 0xe7, + 0x51, 0x7c, 0xdc, 0xfe, 0x29, 0xe1, 0x57, 0x94, 0x90, 0xcf, 0x07, 0x7a, 0x70, 0x07, 0x26, 0x89, + 0xab, 0xba, 0xd8, 0x77, 0xdc, 0x7c, 0xfe, 0xfd, 0x21, 0xef, 0x5c, 0xa3, 0xc2, 0x32, 0xd7, 0x71, + 0x1e, 0x3f, 0xa3, 0x4f, 0x21, 0xe9, 0x78, 0xa6, 0xa2, 0xba, 0x2e, 0x1d, 0x26, 0x59, 0x73, 0x38, + 0x9f, 0xaf, 0xc4, 0xbc, 0xce, 0x29, 0x66, 0x67, 0x65, 0xcf, 0x2c, 0xf8, 0x0a, 0x65, 0x70, 0xda, + 0xbf, 0xa5, 0x9b, 0x00, 0x1d, 0x0a, 0x5a, 0x86, 0x4b, 0xf2, 0xfe, 0xae, 0x52, 0xa8, 0xd7, 0xcb, + 0x0f, 0xf7, 0xea, 0xfd, 0xcd, 0xf8, 0x4e, 0xa1, 0x5e, 0xae, 0xd5, 0xd3, 0x82, 0xf4, 0x73, 0x01, + 0xc4, 0xfe, 0x93, 0x78, 0x1e, 0x3c, 0x0e, 0x75, 0xa9, 0xf4, 0x39, 0xe7, 0x49, 0x90, 0x1f, 0xd2, + 0xa1, 0x14, 0x28, 0xed, 0x26, 0x95, 0x1e, 0x10, 0x3b, 0xfc, 0x5f, 0x0b, 0xdd, 0xe1, 0xdf, 0xb1, + 0x9a, 0x43, 0x3e, 0x60, 0xe3, 0x67, 0x06, 0x6c, 0xa2, 0x27, 0x60, 0xcf, 0x60, 0xae, 0x85, 0x09, + 0x61, 0xe2, 0x27, 0x41, 0xd7, 0x3e, 0x9f, 0xdf, 0x1a, 0xd2, 0xe0, 0x87, 0xbe, 0x8e, 0x2c, 0xff, + 0x5b, 0xc3, 0xc7, 0xd8, 0xd1, 0xdd, 0x13, 0x79, 0x96, 0x2b, 0xa7, 0x8d, 0x01, 0xa1, 0x28, 0x14, + 0xfb, 0x8d, 0xe3, 0xae, 0x6f, 0xc0, 0x42, 0xdb, 0xf5, 0x5c, 0x2a, 0x70, 0xff, 0xad, 0xd1, 0x6e, + 0x23, 0xb7, 0x31, 0xcd, 0x37, 0xe2, 0x87, 0xe1, 0xdb, 0xb0, 0x54, 0x3c, 0xc2, 0x8d, 0x67, 0x8f, + 0xe8, 0xb4, 0x58, 0x74, 0xb0, 0x46, 0xce, 0x2a, 0x7c, 0x05, 0xb8, 0xd4, 0xc7, 0xcd, 0xad, 0xba, + 0x06, 0xa9, 0x23, 0x95, 0x28, 0x6c, 0xea, 0x54, 0x1a, 0x94, 0xc4, 0x24, 0xa7, 0xe5, 0xb9, 0x23, + 0x95, 0x74, 0xf8, 0x59, 0x81, 0x0a, 0x3a, 0xba, 0x61, 0xa0, 0xff, 0x01, 0x00, 0x71, 0x55, 0xc7, + 0x55, 0x5c, 0x9d, 0x7f, 0x0f, 0x89, 0x7a, 0x6f, 0xea, 0xc1, 0xa7, 0x19, 0x79, 0x86, 0x71, 0xd3, + 0x35, 0xba, 0x09, 0xd3, 0xd8, 0xd4, 0x7c, 0xc1, 0xb1, 0x81, 0x82, 0x53, 0xd8, 0xd4, 0xe8, 0x4a, + 0x3a, 0x84, 0x2b, 0xd1, 0x17, 0xe5, 0x16, 0x6f, 0xc1, 0xf8, 0x39, 0x91, 0xc3, 0xe4, 0xf3, 0x3f, + 0x5e, 0xf6, 0x3f, 0x0f, 0xb5, 0x8b, 0x14, 0x76, 0x8e, 0xf5, 0x06, 0x46, 0x7f, 0x16, 0x60, 0xae, + 0xab, 0x1b, 0x44, 0x77, 0x62, 0x9e, 0x11, 0xd5, 0x43, 0x66, 0x86, 0xef, 0xc7, 0xa4, 0x8f, 0x3f, + 0xfb, 0xe3, 0xdf, 0xbe, 0x48, 0x7c, 0x84, 0x72, 0xb9, 0xe3, 0xf5, 0xdc, 0x0f, 0x69, 0x06, 0xdc, + 0xb5, 0x1d, 0xeb, 0x53, 0xdc, 0x70, 0x49, 0x6e, 0x2d, 0x67, 0x58, 0x0d, 0xff, 0xe3, 0x5d, 0x6e, + 0x2d, 0x17, 0x6a, 0xdc, 0x72, 0x6b, 0xaf, 0x0f, 0xae, 0xa2, 0x6f, 0x44, 0x8a, 0x74, 0xb3, 0xa1, + 0x7f, 0x08, 0x90, 0xea, 0x69, 0x2a, 0xd1, 0xdd, 0x21, 0x6a, 0x68, 0x7f, 0xef, 0x9b, 0xb9, 0x37, + 0xaa, 0xb8, 0x1f, 0xd4, 0x1e, 0x93, 0xfd, 0xdc, 0x3b, 0xc5, 0xe8, 0xd7, 0x61, 0x73, 0xda, 0x26, + 0xf7, 0x89, 0x74, 0xb1, 0xa1, 0x5f, 0x24, 0x60, 0x31, 0xaa, 0x5b, 0x45, 0x9b, 0x31, 0x2f, 0x7e, + 0x46, 0xab, 0x9b, 0x19, 0xed, 0x09, 0x97, 0x3e, 0x17, 0x98, 0xd1, 0x9f, 0x09, 0xd2, 0x8d, 0x38, + 0x56, 0xf7, 0x3c, 0xf8, 0x1b, 0xbd, 0x7d, 0xe9, 0xc1, 0x86, 0xf4, 0xee, 0x69, 0xae, 0x18, 0x24, + 0x8b, 0x7e, 0x97, 0x80, 0xc5, 0xa8, 0x36, 0x38, 0xb6, 0x7f, 0xce, 0xe8, 0xa1, 0x47, 0xf5, 0xcf, + 0x57, 0xbe, 0x7f, 0x7e, 0x29, 0xe4, 0x8b, 0xcc, 0xae, 0x9e, 0x3b, 0x67, 0xcf, 0x02, 0x46, 0x8f, + 0xc1, 0xb9, 0xb5, 0xd7, 0xfd, 0xfe, 0xfa, 0x7e, 0xfe, 0xbb, 0xb1, 0xf4, 0xc6, 0xd0, 0x85, 0xbe, + 0x16, 0x60, 0x31, 0xaa, 0x57, 0x8e, 0xed, 0xbf, 0x33, 0x1a, 0xed, 0xcc, 0x52, 0x5f, 0xd9, 0x2c, + 0xb7, 0x6c, 0xf7, 0x44, 0x3a, 0x64, 0xfe, 0xf9, 0x64, 0xed, 0xc6, 0xc0, 0x3a, 0xd1, 0x6f, 0xc2, + 0xc1, 0xf5, 0xb5, 0x77, 0x23, 0xc5, 0xfa, 0x59, 0xd1, 0x3f, 0x05, 0x58, 0xe8, 0x1b, 0x1c, 0xd0, + 0xfd, 0xf8, 0x55, 0xf1, 0xbf, 0x9a, 0x16, 0xdc, 0x6a, 0x34, 0x9a, 0xd5, 0x28, 0xb6, 0xd5, 0x6f, + 0x12, 0x70, 0x21, 0xa2, 0xa5, 0x47, 0x85, 0x11, 0xba, 0xcd, 0xee, 0x49, 0x25, 0xb3, 0x79, 0x1e, + 0x15, 0xbc, 0x62, 0x76, 0xbb, 0x61, 0xb8, 0xda, 0xd1, 0x76, 0xc3, 0xe0, 0x52, 0x81, 0x7e, 0x9b, + 0x80, 0xc5, 0xa8, 0xf7, 0x38, 0x76, 0x66, 0x9f, 0xd1, 0x75, 0x64, 0x8a, 0xe7, 0xd2, 0xc1, 0x3d, + 0xf1, 0x13, 0xbf, 0x4e, 0xbc, 0x11, 0xa4, 0xcd, 0xc1, 0xbe, 0x88, 0x42, 0x73, 0xf0, 0x91, 0x8c, + 0x6a, 0xdd, 0x10, 0xd6, 0x0e, 0xee, 0x48, 0xb7, 0x4e, 0x51, 0x34, 0x58, 0x18, 0xfd, 0x4b, 0x80, + 0xf9, 0xee, 0x11, 0x18, 0x7d, 0x6f, 0x78, 0xa4, 0x74, 0x26, 0xe1, 0xcc, 0x08, 0x1d, 0x8e, 0xe4, + 0x32, 0x8f, 0x98, 0x68, 0x63, 0x78, 0x8c, 0xe4, 0x68, 0x73, 0x44, 0xa1, 0xf2, 0x1e, 0xca, 0xc6, + 0x83, 0x4a, 0x20, 0x81, 0xfe, 0x22, 0xc0, 0x42, 0xdf, 0x40, 0x1f, 0xbb, 0x4e, 0x9c, 0xf6, 0x29, + 0xe0, 0xd4, 0xf2, 0xc7, 0x8d, 0x5c, 0x3b, 0x97, 0x91, 0x6b, 0xc3, 0x1a, 0xf9, 0x79, 0x02, 0xd2, + 0xbd, 0xe3, 0x1d, 0xba, 0x77, 0xbe, 0x09, 0x34, 0x73, 0x7f, 0x64, 0x79, 0x8e, 0x81, 0xee, 0x80, + 0x0f, 0x8b, 0x00, 0x66, 0x5c, 0x3b, 0xe0, 0x71, 0xd2, 0x9e, 0x49, 0xa0, 0xaf, 0x7a, 0x7c, 0x41, + 0xe7, 0xad, 0x91, 0x7c, 0x11, 0x9a, 0x42, 0x47, 0xf2, 0x45, 0x78, 0xd0, 0x93, 0xbe, 0xf0, 0xeb, + 0xc1, 0x4f, 0x05, 0x54, 0x19, 0xc5, 0x1b, 0x41, 0xa4, 0xdb, 0x14, 0xaa, 0xfc, 0xe0, 0x3e, 0xba, + 0x1b, 0xd7, 0x39, 0x91, 0x0a, 0xd0, 0x97, 0x09, 0x48, 0xf5, 0x0c, 0x71, 0xb1, 0x9b, 0xee, 0xe8, + 0x51, 0x31, 0x76, 0xd3, 0x7d, 0xca, 0xec, 0x28, 0xbd, 0xf1, 0x1d, 0xf5, 0x23, 0xe9, 0xfe, 0x90, + 0x83, 0xc6, 0x46, 0xa3, 0x5b, 0x23, 0x2d, 0x9a, 0xb7, 0xa5, 0x1b, 0x31, 0x66, 0x8f, 0x08, 0xc9, + 0xcd, 0xbf, 0x0a, 0x70, 0xbd, 0x61, 0xb5, 0xe2, 0x19, 0xb3, 0xb9, 0x10, 0x9e, 0xd9, 0xf6, 0x68, + 0x45, 0xd8, 0x13, 0x0e, 0x3e, 0xe4, 0xb2, 0x4d, 0xcb, 0x50, 0xcd, 0x66, 0xd6, 0x72, 0x9a, 0xb9, + 0x26, 0x36, 0x59, 0xbd, 0xc8, 0xf9, 0x24, 0xd5, 0xd6, 0xc9, 0x80, 0xff, 0x6f, 0xb8, 0x13, 0x5e, + 0xff, 0x26, 0x71, 0x75, 0xdb, 0xd7, 0x59, 0x64, 0xf7, 0xd9, 0xd4, 0x9b, 0x1f, 0xb2, 0xfb, 0x84, + 0x8f, 0xcf, 0x3e, 0x5a, 0xff, 0x43, 0xc0, 0xf7, 0x84, 0xf1, 0x3d, 0x09, 0xf8, 0x9e, 0x84, 0xf9, + 0x9e, 0x3c, 0x5a, 0x7f, 0x3a, 0xc9, 0x6e, 0x73, 0xe3, 0x3f, 0x01, 0x00, 0x00, 0xff, 0xff, 0x48, + 0xcd, 0xb8, 0x10, 0xeb, 0x21, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/cloud/bigquery/datatransfer/v1/transfer.pb.go b/vendor/google.golang.org/genproto/googleapis/cloud/bigquery/datatransfer/v1/transfer.pb.go new file mode 100644 index 0000000..0aa964d --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/cloud/bigquery/datatransfer/v1/transfer.pb.go @@ -0,0 +1,605 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/cloud/bigquery/datatransfer/v1/transfer.proto + +package datatransfer // import "google.golang.org/genproto/googleapis/cloud/bigquery/datatransfer/v1" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _struct "github.com/golang/protobuf/ptypes/struct" +import timestamp "github.com/golang/protobuf/ptypes/timestamp" +import _ "google.golang.org/genproto/googleapis/api/annotations" +import status "google.golang.org/genproto/googleapis/rpc/status" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// DEPRECATED. Represents data transfer type. +type TransferType int32 + +const ( + // Invalid or Unknown transfer type placeholder. + TransferType_TRANSFER_TYPE_UNSPECIFIED TransferType = 0 + // Batch data transfer. + TransferType_BATCH TransferType = 1 + // Streaming data transfer. Streaming data source currently doesn't + // support multiple transfer configs per project. + TransferType_STREAMING TransferType = 2 +) + +var TransferType_name = map[int32]string{ + 0: "TRANSFER_TYPE_UNSPECIFIED", + 1: "BATCH", + 2: "STREAMING", +} +var TransferType_value = map[string]int32{ + "TRANSFER_TYPE_UNSPECIFIED": 0, + "BATCH": 1, + "STREAMING": 2, +} + +func (x TransferType) String() string { + return proto.EnumName(TransferType_name, int32(x)) +} +func (TransferType) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_transfer_f4aaf0447c06b362, []int{0} +} + +// Represents data transfer run state. +type TransferState int32 + +const ( + // State placeholder. + TransferState_TRANSFER_STATE_UNSPECIFIED TransferState = 0 + // Data transfer is scheduled and is waiting to be picked up by + // data transfer backend. + TransferState_PENDING TransferState = 2 + // Data transfer is in progress. + TransferState_RUNNING TransferState = 3 + // Data transfer completed successsfully. + TransferState_SUCCEEDED TransferState = 4 + // Data transfer failed. + TransferState_FAILED TransferState = 5 + // Data transfer is cancelled. + TransferState_CANCELLED TransferState = 6 +) + +var TransferState_name = map[int32]string{ + 0: "TRANSFER_STATE_UNSPECIFIED", + 2: "PENDING", + 3: "RUNNING", + 4: "SUCCEEDED", + 5: "FAILED", + 6: "CANCELLED", +} +var TransferState_value = map[string]int32{ + "TRANSFER_STATE_UNSPECIFIED": 0, + "PENDING": 2, + "RUNNING": 3, + "SUCCEEDED": 4, + "FAILED": 5, + "CANCELLED": 6, +} + +func (x TransferState) String() string { + return proto.EnumName(TransferState_name, int32(x)) +} +func (TransferState) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_transfer_f4aaf0447c06b362, []int{1} +} + +// Represents data transfer user facing message severity. +type TransferMessage_MessageSeverity int32 + +const ( + // No severity specified. + TransferMessage_MESSAGE_SEVERITY_UNSPECIFIED TransferMessage_MessageSeverity = 0 + // Informational message. + TransferMessage_INFO TransferMessage_MessageSeverity = 1 + // Warning message. + TransferMessage_WARNING TransferMessage_MessageSeverity = 2 + // Error message. + TransferMessage_ERROR TransferMessage_MessageSeverity = 3 +) + +var TransferMessage_MessageSeverity_name = map[int32]string{ + 0: "MESSAGE_SEVERITY_UNSPECIFIED", + 1: "INFO", + 2: "WARNING", + 3: "ERROR", +} +var TransferMessage_MessageSeverity_value = map[string]int32{ + "MESSAGE_SEVERITY_UNSPECIFIED": 0, + "INFO": 1, + "WARNING": 2, + "ERROR": 3, +} + +func (x TransferMessage_MessageSeverity) String() string { + return proto.EnumName(TransferMessage_MessageSeverity_name, int32(x)) +} +func (TransferMessage_MessageSeverity) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_transfer_f4aaf0447c06b362, []int{2, 0} +} + +// Represents a data transfer configuration. A transfer configuration +// contains all metadata needed to perform a data transfer. For example, +// `destination_dataset_id` specifies where data should be stored. +// When a new transfer configuration is created, the specified +// `destination_dataset_id` is created when needed and shared with the +// appropriate data source service account. +type TransferConfig struct { + // The resource name of the transfer config. + // Transfer config names have the form + // `projects/{project_id}/transferConfigs/{config_id}`. + // Where `config_id` is usually a uuid, even though it is not + // guaranteed or required. The name is ignored when creating a transfer + // config. + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // The BigQuery target dataset id. + DestinationDatasetId string `protobuf:"bytes,2,opt,name=destination_dataset_id,json=destinationDatasetId,proto3" json:"destination_dataset_id,omitempty"` + // User specified display name for the data transfer. + DisplayName string `protobuf:"bytes,3,opt,name=display_name,json=displayName,proto3" json:"display_name,omitempty"` + // Data source id. Cannot be changed once data transfer is created. + DataSourceId string `protobuf:"bytes,5,opt,name=data_source_id,json=dataSourceId,proto3" json:"data_source_id,omitempty"` + // Data transfer specific parameters. + Params *_struct.Struct `protobuf:"bytes,9,opt,name=params,proto3" json:"params,omitempty"` + // Data transfer schedule. + // If the data source does not support a custom schedule, this should be + // empty. If it is empty, the default value for the data source will be + // used. + // The specified times are in UTC. + // Examples of valid format: + // `1st,3rd monday of month 15:30`, + // `every wed,fri of jan,jun 13:15`, and + // `first sunday of quarter 00:00`. + // See more explanation about the format here: + // https://cloud.google.com/appengine/docs/flexible/python/scheduling-jobs-with-cron-yaml#the_schedule_format + // NOTE: the granularity should be at least 8 hours, or less frequent. + Schedule string `protobuf:"bytes,7,opt,name=schedule,proto3" json:"schedule,omitempty"` + // The number of days to look back to automatically refresh the data. + // For example, if `data_refresh_window_days = 10`, then every day + // BigQuery reingests data for [today-10, today-1], rather than ingesting data + // for just [today-1]. + // Only valid if the data source supports the feature. Set the value to 0 + // to use the default value. + DataRefreshWindowDays int32 `protobuf:"varint,12,opt,name=data_refresh_window_days,json=dataRefreshWindowDays,proto3" json:"data_refresh_window_days,omitempty"` + // Is this config disabled. When set to true, no runs are scheduled + // for a given transfer. + Disabled bool `protobuf:"varint,13,opt,name=disabled,proto3" json:"disabled,omitempty"` + // Output only. Data transfer modification time. Ignored by server on input. + UpdateTime *timestamp.Timestamp `protobuf:"bytes,4,opt,name=update_time,json=updateTime,proto3" json:"update_time,omitempty"` + // Output only. Next time when data transfer will run. + NextRunTime *timestamp.Timestamp `protobuf:"bytes,8,opt,name=next_run_time,json=nextRunTime,proto3" json:"next_run_time,omitempty"` + // Output only. State of the most recently updated transfer run. + State TransferState `protobuf:"varint,10,opt,name=state,proto3,enum=google.cloud.bigquery.datatransfer.v1.TransferState" json:"state,omitempty"` + // Output only. Unique ID of the user on whose behalf transfer is done. + // Applicable only to data sources that do not support service accounts. + // When set to 0, the data source service account credentials are used. + // May be negative. Note, that this identifier is not stable. + // It may change over time even for the same user. + UserId int64 `protobuf:"varint,11,opt,name=user_id,json=userId,proto3" json:"user_id,omitempty"` + // Output only. Region in which BigQuery dataset is located. + DatasetRegion string `protobuf:"bytes,14,opt,name=dataset_region,json=datasetRegion,proto3" json:"dataset_region,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *TransferConfig) Reset() { *m = TransferConfig{} } +func (m *TransferConfig) String() string { return proto.CompactTextString(m) } +func (*TransferConfig) ProtoMessage() {} +func (*TransferConfig) Descriptor() ([]byte, []int) { + return fileDescriptor_transfer_f4aaf0447c06b362, []int{0} +} +func (m *TransferConfig) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_TransferConfig.Unmarshal(m, b) +} +func (m *TransferConfig) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_TransferConfig.Marshal(b, m, deterministic) +} +func (dst *TransferConfig) XXX_Merge(src proto.Message) { + xxx_messageInfo_TransferConfig.Merge(dst, src) +} +func (m *TransferConfig) XXX_Size() int { + return xxx_messageInfo_TransferConfig.Size(m) +} +func (m *TransferConfig) XXX_DiscardUnknown() { + xxx_messageInfo_TransferConfig.DiscardUnknown(m) +} + +var xxx_messageInfo_TransferConfig proto.InternalMessageInfo + +func (m *TransferConfig) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *TransferConfig) GetDestinationDatasetId() string { + if m != nil { + return m.DestinationDatasetId + } + return "" +} + +func (m *TransferConfig) GetDisplayName() string { + if m != nil { + return m.DisplayName + } + return "" +} + +func (m *TransferConfig) GetDataSourceId() string { + if m != nil { + return m.DataSourceId + } + return "" +} + +func (m *TransferConfig) GetParams() *_struct.Struct { + if m != nil { + return m.Params + } + return nil +} + +func (m *TransferConfig) GetSchedule() string { + if m != nil { + return m.Schedule + } + return "" +} + +func (m *TransferConfig) GetDataRefreshWindowDays() int32 { + if m != nil { + return m.DataRefreshWindowDays + } + return 0 +} + +func (m *TransferConfig) GetDisabled() bool { + if m != nil { + return m.Disabled + } + return false +} + +func (m *TransferConfig) GetUpdateTime() *timestamp.Timestamp { + if m != nil { + return m.UpdateTime + } + return nil +} + +func (m *TransferConfig) GetNextRunTime() *timestamp.Timestamp { + if m != nil { + return m.NextRunTime + } + return nil +} + +func (m *TransferConfig) GetState() TransferState { + if m != nil { + return m.State + } + return TransferState_TRANSFER_STATE_UNSPECIFIED +} + +func (m *TransferConfig) GetUserId() int64 { + if m != nil { + return m.UserId + } + return 0 +} + +func (m *TransferConfig) GetDatasetRegion() string { + if m != nil { + return m.DatasetRegion + } + return "" +} + +// Represents a data transfer run. +type TransferRun struct { + // The resource name of the transfer run. + // Transfer run names have the form + // `projects/{project_id}/locations/{location}/transferConfigs/{config_id}/runs/{run_id}`. + // The name is ignored when creating a transfer run. + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // Minimum time after which a transfer run can be started. + ScheduleTime *timestamp.Timestamp `protobuf:"bytes,3,opt,name=schedule_time,json=scheduleTime,proto3" json:"schedule_time,omitempty"` + // For batch transfer runs, specifies the date and time that + // data should be ingested. + RunTime *timestamp.Timestamp `protobuf:"bytes,10,opt,name=run_time,json=runTime,proto3" json:"run_time,omitempty"` + // Status of the transfer run. + ErrorStatus *status.Status `protobuf:"bytes,21,opt,name=error_status,json=errorStatus,proto3" json:"error_status,omitempty"` + // Output only. Time when transfer run was started. + // Parameter ignored by server for input requests. + StartTime *timestamp.Timestamp `protobuf:"bytes,4,opt,name=start_time,json=startTime,proto3" json:"start_time,omitempty"` + // Output only. Time when transfer run ended. + // Parameter ignored by server for input requests. + EndTime *timestamp.Timestamp `protobuf:"bytes,5,opt,name=end_time,json=endTime,proto3" json:"end_time,omitempty"` + // Output only. Last time the data transfer run state was updated. + UpdateTime *timestamp.Timestamp `protobuf:"bytes,6,opt,name=update_time,json=updateTime,proto3" json:"update_time,omitempty"` + // Output only. Data transfer specific parameters. + Params *_struct.Struct `protobuf:"bytes,9,opt,name=params,proto3" json:"params,omitempty"` + // Output only. The BigQuery target dataset id. + DestinationDatasetId string `protobuf:"bytes,2,opt,name=destination_dataset_id,json=destinationDatasetId,proto3" json:"destination_dataset_id,omitempty"` + // Output only. Data source id. + DataSourceId string `protobuf:"bytes,7,opt,name=data_source_id,json=dataSourceId,proto3" json:"data_source_id,omitempty"` + // Data transfer run state. Ignored for input requests. + State TransferState `protobuf:"varint,8,opt,name=state,proto3,enum=google.cloud.bigquery.datatransfer.v1.TransferState" json:"state,omitempty"` + // Output only. Unique ID of the user on whose behalf transfer is done. + // Applicable only to data sources that do not support service accounts. + // When set to 0, the data source service account credentials are used. + // May be negative. Note, that this identifier is not stable. + // It may change over time even for the same user. + UserId int64 `protobuf:"varint,11,opt,name=user_id,json=userId,proto3" json:"user_id,omitempty"` + // Output only. Describes the schedule of this transfer run if it was + // created as part of a regular schedule. For batch transfer runs that are + // scheduled manually, this is empty. + // NOTE: the system might choose to delay the schedule depending on the + // current load, so `schedule_time` doesn't always matches this. + Schedule string `protobuf:"bytes,12,opt,name=schedule,proto3" json:"schedule,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *TransferRun) Reset() { *m = TransferRun{} } +func (m *TransferRun) String() string { return proto.CompactTextString(m) } +func (*TransferRun) ProtoMessage() {} +func (*TransferRun) Descriptor() ([]byte, []int) { + return fileDescriptor_transfer_f4aaf0447c06b362, []int{1} +} +func (m *TransferRun) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_TransferRun.Unmarshal(m, b) +} +func (m *TransferRun) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_TransferRun.Marshal(b, m, deterministic) +} +func (dst *TransferRun) XXX_Merge(src proto.Message) { + xxx_messageInfo_TransferRun.Merge(dst, src) +} +func (m *TransferRun) XXX_Size() int { + return xxx_messageInfo_TransferRun.Size(m) +} +func (m *TransferRun) XXX_DiscardUnknown() { + xxx_messageInfo_TransferRun.DiscardUnknown(m) +} + +var xxx_messageInfo_TransferRun proto.InternalMessageInfo + +func (m *TransferRun) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *TransferRun) GetScheduleTime() *timestamp.Timestamp { + if m != nil { + return m.ScheduleTime + } + return nil +} + +func (m *TransferRun) GetRunTime() *timestamp.Timestamp { + if m != nil { + return m.RunTime + } + return nil +} + +func (m *TransferRun) GetErrorStatus() *status.Status { + if m != nil { + return m.ErrorStatus + } + return nil +} + +func (m *TransferRun) GetStartTime() *timestamp.Timestamp { + if m != nil { + return m.StartTime + } + return nil +} + +func (m *TransferRun) GetEndTime() *timestamp.Timestamp { + if m != nil { + return m.EndTime + } + return nil +} + +func (m *TransferRun) GetUpdateTime() *timestamp.Timestamp { + if m != nil { + return m.UpdateTime + } + return nil +} + +func (m *TransferRun) GetParams() *_struct.Struct { + if m != nil { + return m.Params + } + return nil +} + +func (m *TransferRun) GetDestinationDatasetId() string { + if m != nil { + return m.DestinationDatasetId + } + return "" +} + +func (m *TransferRun) GetDataSourceId() string { + if m != nil { + return m.DataSourceId + } + return "" +} + +func (m *TransferRun) GetState() TransferState { + if m != nil { + return m.State + } + return TransferState_TRANSFER_STATE_UNSPECIFIED +} + +func (m *TransferRun) GetUserId() int64 { + if m != nil { + return m.UserId + } + return 0 +} + +func (m *TransferRun) GetSchedule() string { + if m != nil { + return m.Schedule + } + return "" +} + +// Represents a user facing message for a particular data transfer run. +type TransferMessage struct { + // Time when message was logged. + MessageTime *timestamp.Timestamp `protobuf:"bytes,1,opt,name=message_time,json=messageTime,proto3" json:"message_time,omitempty"` + // Message severity. + Severity TransferMessage_MessageSeverity `protobuf:"varint,2,opt,name=severity,proto3,enum=google.cloud.bigquery.datatransfer.v1.TransferMessage_MessageSeverity" json:"severity,omitempty"` + // Message text. + MessageText string `protobuf:"bytes,3,opt,name=message_text,json=messageText,proto3" json:"message_text,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *TransferMessage) Reset() { *m = TransferMessage{} } +func (m *TransferMessage) String() string { return proto.CompactTextString(m) } +func (*TransferMessage) ProtoMessage() {} +func (*TransferMessage) Descriptor() ([]byte, []int) { + return fileDescriptor_transfer_f4aaf0447c06b362, []int{2} +} +func (m *TransferMessage) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_TransferMessage.Unmarshal(m, b) +} +func (m *TransferMessage) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_TransferMessage.Marshal(b, m, deterministic) +} +func (dst *TransferMessage) XXX_Merge(src proto.Message) { + xxx_messageInfo_TransferMessage.Merge(dst, src) +} +func (m *TransferMessage) XXX_Size() int { + return xxx_messageInfo_TransferMessage.Size(m) +} +func (m *TransferMessage) XXX_DiscardUnknown() { + xxx_messageInfo_TransferMessage.DiscardUnknown(m) +} + +var xxx_messageInfo_TransferMessage proto.InternalMessageInfo + +func (m *TransferMessage) GetMessageTime() *timestamp.Timestamp { + if m != nil { + return m.MessageTime + } + return nil +} + +func (m *TransferMessage) GetSeverity() TransferMessage_MessageSeverity { + if m != nil { + return m.Severity + } + return TransferMessage_MESSAGE_SEVERITY_UNSPECIFIED +} + +func (m *TransferMessage) GetMessageText() string { + if m != nil { + return m.MessageText + } + return "" +} + +func init() { + proto.RegisterType((*TransferConfig)(nil), "google.cloud.bigquery.datatransfer.v1.TransferConfig") + proto.RegisterType((*TransferRun)(nil), "google.cloud.bigquery.datatransfer.v1.TransferRun") + proto.RegisterType((*TransferMessage)(nil), "google.cloud.bigquery.datatransfer.v1.TransferMessage") + proto.RegisterEnum("google.cloud.bigquery.datatransfer.v1.TransferType", TransferType_name, TransferType_value) + proto.RegisterEnum("google.cloud.bigquery.datatransfer.v1.TransferState", TransferState_name, TransferState_value) + proto.RegisterEnum("google.cloud.bigquery.datatransfer.v1.TransferMessage_MessageSeverity", TransferMessage_MessageSeverity_name, TransferMessage_MessageSeverity_value) +} + +func init() { + proto.RegisterFile("google/cloud/bigquery/datatransfer/v1/transfer.proto", fileDescriptor_transfer_f4aaf0447c06b362) +} + +var fileDescriptor_transfer_f4aaf0447c06b362 = []byte{ + // 922 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xb4, 0x56, 0xdd, 0x6e, 0xe3, 0x44, + 0x14, 0xc6, 0xf9, 0xcf, 0x71, 0x92, 0x8d, 0x46, 0x2c, 0x35, 0xd5, 0x02, 0xa1, 0xa2, 0x52, 0xd8, + 0x0b, 0x5b, 0x2d, 0x5d, 0x21, 0xb4, 0x02, 0x94, 0x1f, 0x27, 0x04, 0x6d, 0xb3, 0xd9, 0xb1, 0xdb, + 0xd5, 0xa2, 0x4a, 0xd6, 0x24, 0x9e, 0x7a, 0x2d, 0x25, 0xb6, 0x99, 0x19, 0x77, 0x9b, 0x87, 0xe0, + 0x25, 0xb8, 0xe4, 0x82, 0x07, 0xe1, 0x82, 0xd7, 0xe0, 0x35, 0x90, 0xc7, 0x76, 0x94, 0xcd, 0x56, + 0x4a, 0x8b, 0xc4, 0x55, 0xe7, 0xcc, 0xf9, 0xbe, 0xaf, 0xdf, 0x9c, 0x1f, 0x2b, 0x70, 0xe6, 0x85, + 0xa1, 0xb7, 0xa4, 0xc6, 0x62, 0x19, 0xc6, 0xae, 0x31, 0xf7, 0xbd, 0x5f, 0x63, 0xca, 0xd6, 0x86, + 0x4b, 0x04, 0x11, 0x8c, 0x04, 0xfc, 0x9a, 0x32, 0xe3, 0xe6, 0xc4, 0xc8, 0xcf, 0x7a, 0xc4, 0x42, + 0x11, 0xa2, 0xe3, 0x94, 0xa5, 0x4b, 0x96, 0x9e, 0xb3, 0xf4, 0x6d, 0x96, 0x7e, 0x73, 0x72, 0xf8, + 0x24, 0x13, 0x27, 0x91, 0x6f, 0x90, 0x20, 0x08, 0x05, 0x11, 0x7e, 0x18, 0xf0, 0x54, 0x64, 0x93, + 0x95, 0xd1, 0x3c, 0xbe, 0x36, 0xb8, 0x60, 0xf1, 0x42, 0x64, 0xd9, 0x2f, 0x76, 0xb3, 0xc2, 0x5f, + 0x51, 0x2e, 0xc8, 0x2a, 0xca, 0x00, 0x07, 0x19, 0x80, 0x45, 0x0b, 0x83, 0x0b, 0x22, 0xe2, 0x4c, + 0xf7, 0xe8, 0xef, 0x12, 0xb4, 0xec, 0xcc, 0xc5, 0x20, 0x0c, 0xae, 0x7d, 0x0f, 0x21, 0x28, 0x05, + 0x64, 0x45, 0x35, 0xa5, 0xa3, 0x74, 0xeb, 0x58, 0x9e, 0xd1, 0x19, 0x7c, 0xe2, 0x52, 0x2e, 0xfc, + 0x40, 0x9a, 0x72, 0x12, 0xef, 0x9c, 0x0a, 0xc7, 0x77, 0xb5, 0x82, 0x44, 0x7d, 0xbc, 0x95, 0x1d, + 0xa6, 0xc9, 0x89, 0x8b, 0xbe, 0x84, 0x86, 0xeb, 0xf3, 0x68, 0x49, 0xd6, 0x8e, 0x54, 0x2c, 0x4a, + 0xac, 0x9a, 0xdd, 0x4d, 0x13, 0xe1, 0xaf, 0xa0, 0x95, 0x88, 0x39, 0x3c, 0x8c, 0xd9, 0x82, 0x26, + 0x82, 0x65, 0x09, 0x6a, 0x24, 0xb7, 0x96, 0xbc, 0x9c, 0xb8, 0xc8, 0x80, 0x4a, 0x44, 0x18, 0x59, + 0x71, 0xad, 0xde, 0x51, 0xba, 0xea, 0xe9, 0x81, 0x9e, 0xd5, 0x34, 0x7f, 0xb0, 0x6e, 0xc9, 0x72, + 0xe0, 0x0c, 0x86, 0x0e, 0xa1, 0xc6, 0x17, 0x6f, 0xa9, 0x1b, 0x2f, 0xa9, 0x56, 0x95, 0x82, 0x9b, + 0x18, 0x7d, 0x0b, 0x9a, 0xfc, 0x97, 0x8c, 0x5e, 0x33, 0xca, 0xdf, 0x3a, 0xef, 0xfc, 0xc0, 0x0d, + 0xdf, 0x39, 0x2e, 0x59, 0x73, 0xad, 0xd1, 0x51, 0xba, 0x65, 0xfc, 0x38, 0xc9, 0xe3, 0x34, 0xfd, + 0x5a, 0x66, 0x87, 0x64, 0x2d, 0x45, 0x5d, 0x9f, 0x93, 0xf9, 0x92, 0xba, 0x5a, 0xb3, 0xa3, 0x74, + 0x6b, 0x78, 0x13, 0xa3, 0xe7, 0xa0, 0xc6, 0x91, 0x4b, 0x04, 0x75, 0x92, 0xd2, 0x6b, 0x25, 0x69, + 0xf3, 0xf0, 0x03, 0x9b, 0x76, 0xde, 0x17, 0x0c, 0x29, 0x3c, 0xb9, 0x40, 0x3f, 0x40, 0x33, 0xa0, + 0xb7, 0xc2, 0x61, 0x71, 0x90, 0xd2, 0x6b, 0x7b, 0xe9, 0x6a, 0x42, 0xc0, 0x71, 0x20, 0xf9, 0x3f, + 0x43, 0x39, 0x69, 0x2a, 0xd5, 0xa0, 0xa3, 0x74, 0x5b, 0xa7, 0x67, 0xfa, 0xbd, 0x26, 0x4e, 0xcf, + 0xfb, 0x6e, 0x25, 0x5c, 0x9c, 0x4a, 0xa0, 0x03, 0xa8, 0xc6, 0x9c, 0xb2, 0xa4, 0x13, 0x6a, 0x47, + 0xe9, 0x16, 0x71, 0x25, 0x09, 0x27, 0x2e, 0x3a, 0x4e, 0x3b, 0x95, 0xb4, 0x9d, 0x51, 0xcf, 0x0f, + 0x03, 0xad, 0x25, 0x0b, 0xdb, 0xcc, 0x6e, 0xb1, 0xbc, 0x3c, 0xfa, 0xad, 0x0c, 0x6a, 0x2e, 0x8c, + 0xe3, 0xe0, 0xce, 0x69, 0xfa, 0x11, 0x9a, 0x79, 0x37, 0xd2, 0xf7, 0x16, 0xf7, 0xbe, 0xb7, 0x91, + 0x13, 0xe4, 0x83, 0x9f, 0x41, 0x6d, 0x53, 0x2b, 0xd8, 0xcb, 0xad, 0xb2, 0xac, 0x4e, 0xcf, 0xa0, + 0x41, 0x19, 0x0b, 0x99, 0x93, 0xae, 0x80, 0xf6, 0x58, 0x52, 0x51, 0x4e, 0x65, 0xd1, 0x42, 0xb7, + 0x64, 0x06, 0xab, 0x12, 0x97, 0x06, 0xe8, 0x3b, 0x00, 0x2e, 0x08, 0x13, 0xf7, 0x6d, 0x6d, 0x5d, + 0xa2, 0x73, 0xa3, 0x34, 0x70, 0x53, 0x62, 0x79, 0xbf, 0x51, 0x1a, 0xb8, 0x92, 0xb6, 0x33, 0x4d, + 0x95, 0x07, 0x4d, 0xd3, 0x83, 0x97, 0xe5, 0xbf, 0x2d, 0xf7, 0x87, 0x9b, 0x5b, 0xbd, 0x63, 0x73, + 0x37, 0xa3, 0x59, 0xfb, 0x1f, 0x47, 0x73, 0x7b, 0xdb, 0x1b, 0xef, 0x6f, 0xfb, 0xd1, 0x9f, 0x05, + 0x78, 0x94, 0xab, 0x9d, 0x53, 0xce, 0x89, 0x47, 0xd1, 0xf7, 0xd0, 0x58, 0xa5, 0xc7, 0xb4, 0xbe, + 0xca, 0xfe, 0x75, 0xcb, 0xf0, 0xb2, 0xc0, 0x73, 0xa8, 0x71, 0x7a, 0x43, 0x99, 0x2f, 0xd6, 0xb2, + 0x42, 0xad, 0xd3, 0xd1, 0x03, 0x9f, 0x95, 0x19, 0xd1, 0xb3, 0xbf, 0x56, 0xa6, 0x86, 0x37, 0xba, + 0xc9, 0xa7, 0x73, 0x63, 0x91, 0xde, 0x8a, 0xfc, 0xd3, 0x99, 0xdb, 0xa0, 0xb7, 0xe2, 0xe8, 0x02, + 0x1e, 0xed, 0xf0, 0x51, 0x07, 0x9e, 0x9c, 0x9b, 0x96, 0xd5, 0x1b, 0x9b, 0x8e, 0x65, 0x5e, 0x9a, + 0x78, 0x62, 0xbf, 0x71, 0x2e, 0xa6, 0xd6, 0xcc, 0x1c, 0x4c, 0x46, 0x13, 0x73, 0xd8, 0xfe, 0x08, + 0xd5, 0xa0, 0x34, 0x99, 0x8e, 0x5e, 0xb6, 0x15, 0xa4, 0x42, 0xf5, 0x75, 0x0f, 0x4f, 0x27, 0xd3, + 0x71, 0xbb, 0x80, 0xea, 0x50, 0x36, 0x31, 0x7e, 0x89, 0xdb, 0xc5, 0xa7, 0x63, 0x68, 0xe4, 0x36, + 0xed, 0x75, 0x44, 0xd1, 0x67, 0xf0, 0xa9, 0x8d, 0x7b, 0x53, 0x6b, 0x64, 0x62, 0xc7, 0x7e, 0x33, + 0x33, 0x77, 0x04, 0xeb, 0x50, 0xee, 0xf7, 0xec, 0xc1, 0x4f, 0x6d, 0x05, 0x35, 0xa1, 0x6e, 0xd9, + 0xd8, 0xec, 0x9d, 0x4b, 0xcd, 0xa7, 0x1c, 0x9a, 0xef, 0xb5, 0x11, 0x7d, 0x0e, 0x87, 0x1b, 0x25, + 0xcb, 0xee, 0xd9, 0xbb, 0x52, 0x2a, 0x54, 0x67, 0xe6, 0x74, 0x98, 0x3a, 0x52, 0xa1, 0x8a, 0x2f, + 0xa6, 0xd2, 0x5e, 0x51, 0x2a, 0x5f, 0x0c, 0x06, 0xa6, 0x39, 0x34, 0x87, 0xed, 0x12, 0x02, 0xa8, + 0x8c, 0x7a, 0x93, 0x17, 0xe6, 0xb0, 0x5d, 0x4e, 0x52, 0x83, 0xde, 0x74, 0x60, 0xbe, 0x48, 0xc2, + 0x4a, 0xff, 0x1f, 0x05, 0xbe, 0x5e, 0x84, 0xab, 0xfb, 0xf5, 0xa3, 0xbf, 0x31, 0x38, 0x4b, 0x5a, + 0x3e, 0x53, 0x7e, 0x79, 0x95, 0xf1, 0xbc, 0x70, 0x49, 0x02, 0x4f, 0x0f, 0x99, 0x67, 0x78, 0x34, + 0x90, 0x03, 0x61, 0xa4, 0x29, 0x12, 0xf9, 0x7c, 0xcf, 0x0f, 0x80, 0xe7, 0xdb, 0xf1, 0xef, 0x85, + 0xf2, 0x78, 0xd0, 0x1f, 0xda, 0x7f, 0x14, 0x8e, 0xc7, 0xa9, 0xf6, 0x40, 0x7a, 0xea, 0xfb, 0xde, + 0x2b, 0xe9, 0x29, 0xd9, 0xa8, 0xdc, 0x86, 0x7e, 0x79, 0xf2, 0x57, 0x8e, 0xbb, 0x92, 0xb8, 0xab, + 0x1c, 0x77, 0xb5, 0x8d, 0xbb, 0xba, 0x3c, 0x99, 0x57, 0xa4, 0xab, 0x6f, 0xfe, 0x0d, 0x00, 0x00, + 0xff, 0xff, 0x0d, 0x74, 0xf0, 0x31, 0x95, 0x08, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/cloud/bigquery/logging/v1/audit_data.pb.go b/vendor/google.golang.org/genproto/googleapis/cloud/bigquery/logging/v1/audit_data.pb.go new file mode 100644 index 0000000..e0d4d60 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/cloud/bigquery/logging/v1/audit_data.pb.go @@ -0,0 +1,3062 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/cloud/bigquery/logging/v1/audit_data.proto + +package logging // import "google.golang.org/genproto/googleapis/cloud/bigquery/logging/v1" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import duration "github.com/golang/protobuf/ptypes/duration" +import timestamp "github.com/golang/protobuf/ptypes/timestamp" +import _ "google.golang.org/genproto/googleapis/api/annotations" +import status "google.golang.org/genproto/googleapis/rpc/status" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// BigQuery request and response messages for audit log. +// Note: `Table.schema` has been deprecated in favor of `Table.schemaJson`. +// `Table.schema` may continue to be present in your logs during this +// transition. +type AuditData struct { + // Request data for each BigQuery method. + // + // Types that are valid to be assigned to Request: + // *AuditData_TableInsertRequest + // *AuditData_TableUpdateRequest + // *AuditData_DatasetListRequest + // *AuditData_DatasetInsertRequest + // *AuditData_DatasetUpdateRequest + // *AuditData_JobInsertRequest + // *AuditData_JobQueryRequest + // *AuditData_JobGetQueryResultsRequest + // *AuditData_TableDataListRequest + Request isAuditData_Request `protobuf_oneof:"request"` + // Response data for each BigQuery method. + // + // Types that are valid to be assigned to Response: + // *AuditData_TableInsertResponse + // *AuditData_TableUpdateResponse + // *AuditData_DatasetInsertResponse + // *AuditData_DatasetUpdateResponse + // *AuditData_JobInsertResponse + // *AuditData_JobQueryResponse + // *AuditData_JobGetQueryResultsResponse + // *AuditData_JobQueryDoneResponse + Response isAuditData_Response `protobuf_oneof:"response"` + // A job completion event. + JobCompletedEvent *JobCompletedEvent `protobuf:"bytes,17,opt,name=job_completed_event,json=jobCompletedEvent,proto3" json:"job_completed_event,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *AuditData) Reset() { *m = AuditData{} } +func (m *AuditData) String() string { return proto.CompactTextString(m) } +func (*AuditData) ProtoMessage() {} +func (*AuditData) Descriptor() ([]byte, []int) { + return fileDescriptor_audit_data_12e22cd9ac1143c8, []int{0} +} +func (m *AuditData) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_AuditData.Unmarshal(m, b) +} +func (m *AuditData) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_AuditData.Marshal(b, m, deterministic) +} +func (dst *AuditData) XXX_Merge(src proto.Message) { + xxx_messageInfo_AuditData.Merge(dst, src) +} +func (m *AuditData) XXX_Size() int { + return xxx_messageInfo_AuditData.Size(m) +} +func (m *AuditData) XXX_DiscardUnknown() { + xxx_messageInfo_AuditData.DiscardUnknown(m) +} + +var xxx_messageInfo_AuditData proto.InternalMessageInfo + +type isAuditData_Request interface { + isAuditData_Request() +} + +type AuditData_TableInsertRequest struct { + TableInsertRequest *TableInsertRequest `protobuf:"bytes,1,opt,name=table_insert_request,json=tableInsertRequest,proto3,oneof"` +} + +type AuditData_TableUpdateRequest struct { + TableUpdateRequest *TableUpdateRequest `protobuf:"bytes,16,opt,name=table_update_request,json=tableUpdateRequest,proto3,oneof"` +} + +type AuditData_DatasetListRequest struct { + DatasetListRequest *DatasetListRequest `protobuf:"bytes,2,opt,name=dataset_list_request,json=datasetListRequest,proto3,oneof"` +} + +type AuditData_DatasetInsertRequest struct { + DatasetInsertRequest *DatasetInsertRequest `protobuf:"bytes,3,opt,name=dataset_insert_request,json=datasetInsertRequest,proto3,oneof"` +} + +type AuditData_DatasetUpdateRequest struct { + DatasetUpdateRequest *DatasetUpdateRequest `protobuf:"bytes,4,opt,name=dataset_update_request,json=datasetUpdateRequest,proto3,oneof"` +} + +type AuditData_JobInsertRequest struct { + JobInsertRequest *JobInsertRequest `protobuf:"bytes,5,opt,name=job_insert_request,json=jobInsertRequest,proto3,oneof"` +} + +type AuditData_JobQueryRequest struct { + JobQueryRequest *JobQueryRequest `protobuf:"bytes,6,opt,name=job_query_request,json=jobQueryRequest,proto3,oneof"` +} + +type AuditData_JobGetQueryResultsRequest struct { + JobGetQueryResultsRequest *JobGetQueryResultsRequest `protobuf:"bytes,7,opt,name=job_get_query_results_request,json=jobGetQueryResultsRequest,proto3,oneof"` +} + +type AuditData_TableDataListRequest struct { + TableDataListRequest *TableDataListRequest `protobuf:"bytes,8,opt,name=table_data_list_request,json=tableDataListRequest,proto3,oneof"` +} + +func (*AuditData_TableInsertRequest) isAuditData_Request() {} + +func (*AuditData_TableUpdateRequest) isAuditData_Request() {} + +func (*AuditData_DatasetListRequest) isAuditData_Request() {} + +func (*AuditData_DatasetInsertRequest) isAuditData_Request() {} + +func (*AuditData_DatasetUpdateRequest) isAuditData_Request() {} + +func (*AuditData_JobInsertRequest) isAuditData_Request() {} + +func (*AuditData_JobQueryRequest) isAuditData_Request() {} + +func (*AuditData_JobGetQueryResultsRequest) isAuditData_Request() {} + +func (*AuditData_TableDataListRequest) isAuditData_Request() {} + +func (m *AuditData) GetRequest() isAuditData_Request { + if m != nil { + return m.Request + } + return nil +} + +func (m *AuditData) GetTableInsertRequest() *TableInsertRequest { + if x, ok := m.GetRequest().(*AuditData_TableInsertRequest); ok { + return x.TableInsertRequest + } + return nil +} + +func (m *AuditData) GetTableUpdateRequest() *TableUpdateRequest { + if x, ok := m.GetRequest().(*AuditData_TableUpdateRequest); ok { + return x.TableUpdateRequest + } + return nil +} + +func (m *AuditData) GetDatasetListRequest() *DatasetListRequest { + if x, ok := m.GetRequest().(*AuditData_DatasetListRequest); ok { + return x.DatasetListRequest + } + return nil +} + +func (m *AuditData) GetDatasetInsertRequest() *DatasetInsertRequest { + if x, ok := m.GetRequest().(*AuditData_DatasetInsertRequest); ok { + return x.DatasetInsertRequest + } + return nil +} + +func (m *AuditData) GetDatasetUpdateRequest() *DatasetUpdateRequest { + if x, ok := m.GetRequest().(*AuditData_DatasetUpdateRequest); ok { + return x.DatasetUpdateRequest + } + return nil +} + +func (m *AuditData) GetJobInsertRequest() *JobInsertRequest { + if x, ok := m.GetRequest().(*AuditData_JobInsertRequest); ok { + return x.JobInsertRequest + } + return nil +} + +func (m *AuditData) GetJobQueryRequest() *JobQueryRequest { + if x, ok := m.GetRequest().(*AuditData_JobQueryRequest); ok { + return x.JobQueryRequest + } + return nil +} + +func (m *AuditData) GetJobGetQueryResultsRequest() *JobGetQueryResultsRequest { + if x, ok := m.GetRequest().(*AuditData_JobGetQueryResultsRequest); ok { + return x.JobGetQueryResultsRequest + } + return nil +} + +func (m *AuditData) GetTableDataListRequest() *TableDataListRequest { + if x, ok := m.GetRequest().(*AuditData_TableDataListRequest); ok { + return x.TableDataListRequest + } + return nil +} + +type isAuditData_Response interface { + isAuditData_Response() +} + +type AuditData_TableInsertResponse struct { + TableInsertResponse *TableInsertResponse `protobuf:"bytes,9,opt,name=table_insert_response,json=tableInsertResponse,proto3,oneof"` +} + +type AuditData_TableUpdateResponse struct { + TableUpdateResponse *TableUpdateResponse `protobuf:"bytes,10,opt,name=table_update_response,json=tableUpdateResponse,proto3,oneof"` +} + +type AuditData_DatasetInsertResponse struct { + DatasetInsertResponse *DatasetInsertResponse `protobuf:"bytes,11,opt,name=dataset_insert_response,json=datasetInsertResponse,proto3,oneof"` +} + +type AuditData_DatasetUpdateResponse struct { + DatasetUpdateResponse *DatasetUpdateResponse `protobuf:"bytes,12,opt,name=dataset_update_response,json=datasetUpdateResponse,proto3,oneof"` +} + +type AuditData_JobInsertResponse struct { + JobInsertResponse *JobInsertResponse `protobuf:"bytes,18,opt,name=job_insert_response,json=jobInsertResponse,proto3,oneof"` +} + +type AuditData_JobQueryResponse struct { + JobQueryResponse *JobQueryResponse `protobuf:"bytes,13,opt,name=job_query_response,json=jobQueryResponse,proto3,oneof"` +} + +type AuditData_JobGetQueryResultsResponse struct { + JobGetQueryResultsResponse *JobGetQueryResultsResponse `protobuf:"bytes,14,opt,name=job_get_query_results_response,json=jobGetQueryResultsResponse,proto3,oneof"` +} + +type AuditData_JobQueryDoneResponse struct { + JobQueryDoneResponse *JobQueryDoneResponse `protobuf:"bytes,15,opt,name=job_query_done_response,json=jobQueryDoneResponse,proto3,oneof"` +} + +func (*AuditData_TableInsertResponse) isAuditData_Response() {} + +func (*AuditData_TableUpdateResponse) isAuditData_Response() {} + +func (*AuditData_DatasetInsertResponse) isAuditData_Response() {} + +func (*AuditData_DatasetUpdateResponse) isAuditData_Response() {} + +func (*AuditData_JobInsertResponse) isAuditData_Response() {} + +func (*AuditData_JobQueryResponse) isAuditData_Response() {} + +func (*AuditData_JobGetQueryResultsResponse) isAuditData_Response() {} + +func (*AuditData_JobQueryDoneResponse) isAuditData_Response() {} + +func (m *AuditData) GetResponse() isAuditData_Response { + if m != nil { + return m.Response + } + return nil +} + +func (m *AuditData) GetTableInsertResponse() *TableInsertResponse { + if x, ok := m.GetResponse().(*AuditData_TableInsertResponse); ok { + return x.TableInsertResponse + } + return nil +} + +func (m *AuditData) GetTableUpdateResponse() *TableUpdateResponse { + if x, ok := m.GetResponse().(*AuditData_TableUpdateResponse); ok { + return x.TableUpdateResponse + } + return nil +} + +func (m *AuditData) GetDatasetInsertResponse() *DatasetInsertResponse { + if x, ok := m.GetResponse().(*AuditData_DatasetInsertResponse); ok { + return x.DatasetInsertResponse + } + return nil +} + +func (m *AuditData) GetDatasetUpdateResponse() *DatasetUpdateResponse { + if x, ok := m.GetResponse().(*AuditData_DatasetUpdateResponse); ok { + return x.DatasetUpdateResponse + } + return nil +} + +func (m *AuditData) GetJobInsertResponse() *JobInsertResponse { + if x, ok := m.GetResponse().(*AuditData_JobInsertResponse); ok { + return x.JobInsertResponse + } + return nil +} + +func (m *AuditData) GetJobQueryResponse() *JobQueryResponse { + if x, ok := m.GetResponse().(*AuditData_JobQueryResponse); ok { + return x.JobQueryResponse + } + return nil +} + +func (m *AuditData) GetJobGetQueryResultsResponse() *JobGetQueryResultsResponse { + if x, ok := m.GetResponse().(*AuditData_JobGetQueryResultsResponse); ok { + return x.JobGetQueryResultsResponse + } + return nil +} + +func (m *AuditData) GetJobQueryDoneResponse() *JobQueryDoneResponse { + if x, ok := m.GetResponse().(*AuditData_JobQueryDoneResponse); ok { + return x.JobQueryDoneResponse + } + return nil +} + +func (m *AuditData) GetJobCompletedEvent() *JobCompletedEvent { + if m != nil { + return m.JobCompletedEvent + } + return nil +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*AuditData) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _AuditData_OneofMarshaler, _AuditData_OneofUnmarshaler, _AuditData_OneofSizer, []interface{}{ + (*AuditData_TableInsertRequest)(nil), + (*AuditData_TableUpdateRequest)(nil), + (*AuditData_DatasetListRequest)(nil), + (*AuditData_DatasetInsertRequest)(nil), + (*AuditData_DatasetUpdateRequest)(nil), + (*AuditData_JobInsertRequest)(nil), + (*AuditData_JobQueryRequest)(nil), + (*AuditData_JobGetQueryResultsRequest)(nil), + (*AuditData_TableDataListRequest)(nil), + (*AuditData_TableInsertResponse)(nil), + (*AuditData_TableUpdateResponse)(nil), + (*AuditData_DatasetInsertResponse)(nil), + (*AuditData_DatasetUpdateResponse)(nil), + (*AuditData_JobInsertResponse)(nil), + (*AuditData_JobQueryResponse)(nil), + (*AuditData_JobGetQueryResultsResponse)(nil), + (*AuditData_JobQueryDoneResponse)(nil), + } +} + +func _AuditData_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*AuditData) + // request + switch x := m.Request.(type) { + case *AuditData_TableInsertRequest: + b.EncodeVarint(1<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.TableInsertRequest); err != nil { + return err + } + case *AuditData_TableUpdateRequest: + b.EncodeVarint(16<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.TableUpdateRequest); err != nil { + return err + } + case *AuditData_DatasetListRequest: + b.EncodeVarint(2<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.DatasetListRequest); err != nil { + return err + } + case *AuditData_DatasetInsertRequest: + b.EncodeVarint(3<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.DatasetInsertRequest); err != nil { + return err + } + case *AuditData_DatasetUpdateRequest: + b.EncodeVarint(4<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.DatasetUpdateRequest); err != nil { + return err + } + case *AuditData_JobInsertRequest: + b.EncodeVarint(5<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.JobInsertRequest); err != nil { + return err + } + case *AuditData_JobQueryRequest: + b.EncodeVarint(6<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.JobQueryRequest); err != nil { + return err + } + case *AuditData_JobGetQueryResultsRequest: + b.EncodeVarint(7<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.JobGetQueryResultsRequest); err != nil { + return err + } + case *AuditData_TableDataListRequest: + b.EncodeVarint(8<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.TableDataListRequest); err != nil { + return err + } + case nil: + default: + return fmt.Errorf("AuditData.Request has unexpected type %T", x) + } + // response + switch x := m.Response.(type) { + case *AuditData_TableInsertResponse: + b.EncodeVarint(9<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.TableInsertResponse); err != nil { + return err + } + case *AuditData_TableUpdateResponse: + b.EncodeVarint(10<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.TableUpdateResponse); err != nil { + return err + } + case *AuditData_DatasetInsertResponse: + b.EncodeVarint(11<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.DatasetInsertResponse); err != nil { + return err + } + case *AuditData_DatasetUpdateResponse: + b.EncodeVarint(12<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.DatasetUpdateResponse); err != nil { + return err + } + case *AuditData_JobInsertResponse: + b.EncodeVarint(18<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.JobInsertResponse); err != nil { + return err + } + case *AuditData_JobQueryResponse: + b.EncodeVarint(13<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.JobQueryResponse); err != nil { + return err + } + case *AuditData_JobGetQueryResultsResponse: + b.EncodeVarint(14<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.JobGetQueryResultsResponse); err != nil { + return err + } + case *AuditData_JobQueryDoneResponse: + b.EncodeVarint(15<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.JobQueryDoneResponse); err != nil { + return err + } + case nil: + default: + return fmt.Errorf("AuditData.Response has unexpected type %T", x) + } + return nil +} + +func _AuditData_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*AuditData) + switch tag { + case 1: // request.table_insert_request + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(TableInsertRequest) + err := b.DecodeMessage(msg) + m.Request = &AuditData_TableInsertRequest{msg} + return true, err + case 16: // request.table_update_request + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(TableUpdateRequest) + err := b.DecodeMessage(msg) + m.Request = &AuditData_TableUpdateRequest{msg} + return true, err + case 2: // request.dataset_list_request + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(DatasetListRequest) + err := b.DecodeMessage(msg) + m.Request = &AuditData_DatasetListRequest{msg} + return true, err + case 3: // request.dataset_insert_request + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(DatasetInsertRequest) + err := b.DecodeMessage(msg) + m.Request = &AuditData_DatasetInsertRequest{msg} + return true, err + case 4: // request.dataset_update_request + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(DatasetUpdateRequest) + err := b.DecodeMessage(msg) + m.Request = &AuditData_DatasetUpdateRequest{msg} + return true, err + case 5: // request.job_insert_request + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(JobInsertRequest) + err := b.DecodeMessage(msg) + m.Request = &AuditData_JobInsertRequest{msg} + return true, err + case 6: // request.job_query_request + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(JobQueryRequest) + err := b.DecodeMessage(msg) + m.Request = &AuditData_JobQueryRequest{msg} + return true, err + case 7: // request.job_get_query_results_request + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(JobGetQueryResultsRequest) + err := b.DecodeMessage(msg) + m.Request = &AuditData_JobGetQueryResultsRequest{msg} + return true, err + case 8: // request.table_data_list_request + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(TableDataListRequest) + err := b.DecodeMessage(msg) + m.Request = &AuditData_TableDataListRequest{msg} + return true, err + case 9: // response.table_insert_response + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(TableInsertResponse) + err := b.DecodeMessage(msg) + m.Response = &AuditData_TableInsertResponse{msg} + return true, err + case 10: // response.table_update_response + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(TableUpdateResponse) + err := b.DecodeMessage(msg) + m.Response = &AuditData_TableUpdateResponse{msg} + return true, err + case 11: // response.dataset_insert_response + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(DatasetInsertResponse) + err := b.DecodeMessage(msg) + m.Response = &AuditData_DatasetInsertResponse{msg} + return true, err + case 12: // response.dataset_update_response + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(DatasetUpdateResponse) + err := b.DecodeMessage(msg) + m.Response = &AuditData_DatasetUpdateResponse{msg} + return true, err + case 18: // response.job_insert_response + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(JobInsertResponse) + err := b.DecodeMessage(msg) + m.Response = &AuditData_JobInsertResponse{msg} + return true, err + case 13: // response.job_query_response + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(JobQueryResponse) + err := b.DecodeMessage(msg) + m.Response = &AuditData_JobQueryResponse{msg} + return true, err + case 14: // response.job_get_query_results_response + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(JobGetQueryResultsResponse) + err := b.DecodeMessage(msg) + m.Response = &AuditData_JobGetQueryResultsResponse{msg} + return true, err + case 15: // response.job_query_done_response + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(JobQueryDoneResponse) + err := b.DecodeMessage(msg) + m.Response = &AuditData_JobQueryDoneResponse{msg} + return true, err + default: + return false, nil + } +} + +func _AuditData_OneofSizer(msg proto.Message) (n int) { + m := msg.(*AuditData) + // request + switch x := m.Request.(type) { + case *AuditData_TableInsertRequest: + s := proto.Size(x.TableInsertRequest) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *AuditData_TableUpdateRequest: + s := proto.Size(x.TableUpdateRequest) + n += 2 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *AuditData_DatasetListRequest: + s := proto.Size(x.DatasetListRequest) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *AuditData_DatasetInsertRequest: + s := proto.Size(x.DatasetInsertRequest) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *AuditData_DatasetUpdateRequest: + s := proto.Size(x.DatasetUpdateRequest) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *AuditData_JobInsertRequest: + s := proto.Size(x.JobInsertRequest) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *AuditData_JobQueryRequest: + s := proto.Size(x.JobQueryRequest) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *AuditData_JobGetQueryResultsRequest: + s := proto.Size(x.JobGetQueryResultsRequest) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *AuditData_TableDataListRequest: + s := proto.Size(x.TableDataListRequest) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + // response + switch x := m.Response.(type) { + case *AuditData_TableInsertResponse: + s := proto.Size(x.TableInsertResponse) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *AuditData_TableUpdateResponse: + s := proto.Size(x.TableUpdateResponse) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *AuditData_DatasetInsertResponse: + s := proto.Size(x.DatasetInsertResponse) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *AuditData_DatasetUpdateResponse: + s := proto.Size(x.DatasetUpdateResponse) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *AuditData_JobInsertResponse: + s := proto.Size(x.JobInsertResponse) + n += 2 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *AuditData_JobQueryResponse: + s := proto.Size(x.JobQueryResponse) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *AuditData_JobGetQueryResultsResponse: + s := proto.Size(x.JobGetQueryResultsResponse) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *AuditData_JobQueryDoneResponse: + s := proto.Size(x.JobQueryDoneResponse) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +// Table insert request. +type TableInsertRequest struct { + // The new table. + Resource *Table `protobuf:"bytes,1,opt,name=resource,proto3" json:"resource,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *TableInsertRequest) Reset() { *m = TableInsertRequest{} } +func (m *TableInsertRequest) String() string { return proto.CompactTextString(m) } +func (*TableInsertRequest) ProtoMessage() {} +func (*TableInsertRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_audit_data_12e22cd9ac1143c8, []int{1} +} +func (m *TableInsertRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_TableInsertRequest.Unmarshal(m, b) +} +func (m *TableInsertRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_TableInsertRequest.Marshal(b, m, deterministic) +} +func (dst *TableInsertRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_TableInsertRequest.Merge(dst, src) +} +func (m *TableInsertRequest) XXX_Size() int { + return xxx_messageInfo_TableInsertRequest.Size(m) +} +func (m *TableInsertRequest) XXX_DiscardUnknown() { + xxx_messageInfo_TableInsertRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_TableInsertRequest proto.InternalMessageInfo + +func (m *TableInsertRequest) GetResource() *Table { + if m != nil { + return m.Resource + } + return nil +} + +// Table update request. +type TableUpdateRequest struct { + // The table to be updated. + Resource *Table `protobuf:"bytes,1,opt,name=resource,proto3" json:"resource,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *TableUpdateRequest) Reset() { *m = TableUpdateRequest{} } +func (m *TableUpdateRequest) String() string { return proto.CompactTextString(m) } +func (*TableUpdateRequest) ProtoMessage() {} +func (*TableUpdateRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_audit_data_12e22cd9ac1143c8, []int{2} +} +func (m *TableUpdateRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_TableUpdateRequest.Unmarshal(m, b) +} +func (m *TableUpdateRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_TableUpdateRequest.Marshal(b, m, deterministic) +} +func (dst *TableUpdateRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_TableUpdateRequest.Merge(dst, src) +} +func (m *TableUpdateRequest) XXX_Size() int { + return xxx_messageInfo_TableUpdateRequest.Size(m) +} +func (m *TableUpdateRequest) XXX_DiscardUnknown() { + xxx_messageInfo_TableUpdateRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_TableUpdateRequest proto.InternalMessageInfo + +func (m *TableUpdateRequest) GetResource() *Table { + if m != nil { + return m.Resource + } + return nil +} + +// Table insert response. +type TableInsertResponse struct { + // Final state of the inserted table. + Resource *Table `protobuf:"bytes,1,opt,name=resource,proto3" json:"resource,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *TableInsertResponse) Reset() { *m = TableInsertResponse{} } +func (m *TableInsertResponse) String() string { return proto.CompactTextString(m) } +func (*TableInsertResponse) ProtoMessage() {} +func (*TableInsertResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_audit_data_12e22cd9ac1143c8, []int{3} +} +func (m *TableInsertResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_TableInsertResponse.Unmarshal(m, b) +} +func (m *TableInsertResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_TableInsertResponse.Marshal(b, m, deterministic) +} +func (dst *TableInsertResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_TableInsertResponse.Merge(dst, src) +} +func (m *TableInsertResponse) XXX_Size() int { + return xxx_messageInfo_TableInsertResponse.Size(m) +} +func (m *TableInsertResponse) XXX_DiscardUnknown() { + xxx_messageInfo_TableInsertResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_TableInsertResponse proto.InternalMessageInfo + +func (m *TableInsertResponse) GetResource() *Table { + if m != nil { + return m.Resource + } + return nil +} + +// Table update response. +type TableUpdateResponse struct { + // Final state of the updated table. + Resource *Table `protobuf:"bytes,1,opt,name=resource,proto3" json:"resource,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *TableUpdateResponse) Reset() { *m = TableUpdateResponse{} } +func (m *TableUpdateResponse) String() string { return proto.CompactTextString(m) } +func (*TableUpdateResponse) ProtoMessage() {} +func (*TableUpdateResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_audit_data_12e22cd9ac1143c8, []int{4} +} +func (m *TableUpdateResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_TableUpdateResponse.Unmarshal(m, b) +} +func (m *TableUpdateResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_TableUpdateResponse.Marshal(b, m, deterministic) +} +func (dst *TableUpdateResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_TableUpdateResponse.Merge(dst, src) +} +func (m *TableUpdateResponse) XXX_Size() int { + return xxx_messageInfo_TableUpdateResponse.Size(m) +} +func (m *TableUpdateResponse) XXX_DiscardUnknown() { + xxx_messageInfo_TableUpdateResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_TableUpdateResponse proto.InternalMessageInfo + +func (m *TableUpdateResponse) GetResource() *Table { + if m != nil { + return m.Resource + } + return nil +} + +// Dataset list request. +type DatasetListRequest struct { + // Whether to list all datasets, including hidden ones. + ListAll bool `protobuf:"varint,1,opt,name=list_all,json=listAll,proto3" json:"list_all,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *DatasetListRequest) Reset() { *m = DatasetListRequest{} } +func (m *DatasetListRequest) String() string { return proto.CompactTextString(m) } +func (*DatasetListRequest) ProtoMessage() {} +func (*DatasetListRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_audit_data_12e22cd9ac1143c8, []int{5} +} +func (m *DatasetListRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_DatasetListRequest.Unmarshal(m, b) +} +func (m *DatasetListRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_DatasetListRequest.Marshal(b, m, deterministic) +} +func (dst *DatasetListRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_DatasetListRequest.Merge(dst, src) +} +func (m *DatasetListRequest) XXX_Size() int { + return xxx_messageInfo_DatasetListRequest.Size(m) +} +func (m *DatasetListRequest) XXX_DiscardUnknown() { + xxx_messageInfo_DatasetListRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_DatasetListRequest proto.InternalMessageInfo + +func (m *DatasetListRequest) GetListAll() bool { + if m != nil { + return m.ListAll + } + return false +} + +// Dataset insert request. +type DatasetInsertRequest struct { + // The dataset to be inserted. + Resource *Dataset `protobuf:"bytes,1,opt,name=resource,proto3" json:"resource,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *DatasetInsertRequest) Reset() { *m = DatasetInsertRequest{} } +func (m *DatasetInsertRequest) String() string { return proto.CompactTextString(m) } +func (*DatasetInsertRequest) ProtoMessage() {} +func (*DatasetInsertRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_audit_data_12e22cd9ac1143c8, []int{6} +} +func (m *DatasetInsertRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_DatasetInsertRequest.Unmarshal(m, b) +} +func (m *DatasetInsertRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_DatasetInsertRequest.Marshal(b, m, deterministic) +} +func (dst *DatasetInsertRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_DatasetInsertRequest.Merge(dst, src) +} +func (m *DatasetInsertRequest) XXX_Size() int { + return xxx_messageInfo_DatasetInsertRequest.Size(m) +} +func (m *DatasetInsertRequest) XXX_DiscardUnknown() { + xxx_messageInfo_DatasetInsertRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_DatasetInsertRequest proto.InternalMessageInfo + +func (m *DatasetInsertRequest) GetResource() *Dataset { + if m != nil { + return m.Resource + } + return nil +} + +// Dataset insert response. +type DatasetInsertResponse struct { + // Final state of the inserted dataset. + Resource *Dataset `protobuf:"bytes,1,opt,name=resource,proto3" json:"resource,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *DatasetInsertResponse) Reset() { *m = DatasetInsertResponse{} } +func (m *DatasetInsertResponse) String() string { return proto.CompactTextString(m) } +func (*DatasetInsertResponse) ProtoMessage() {} +func (*DatasetInsertResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_audit_data_12e22cd9ac1143c8, []int{7} +} +func (m *DatasetInsertResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_DatasetInsertResponse.Unmarshal(m, b) +} +func (m *DatasetInsertResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_DatasetInsertResponse.Marshal(b, m, deterministic) +} +func (dst *DatasetInsertResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_DatasetInsertResponse.Merge(dst, src) +} +func (m *DatasetInsertResponse) XXX_Size() int { + return xxx_messageInfo_DatasetInsertResponse.Size(m) +} +func (m *DatasetInsertResponse) XXX_DiscardUnknown() { + xxx_messageInfo_DatasetInsertResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_DatasetInsertResponse proto.InternalMessageInfo + +func (m *DatasetInsertResponse) GetResource() *Dataset { + if m != nil { + return m.Resource + } + return nil +} + +// Dataset update request. +type DatasetUpdateRequest struct { + // The dataset to be updated. + Resource *Dataset `protobuf:"bytes,1,opt,name=resource,proto3" json:"resource,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *DatasetUpdateRequest) Reset() { *m = DatasetUpdateRequest{} } +func (m *DatasetUpdateRequest) String() string { return proto.CompactTextString(m) } +func (*DatasetUpdateRequest) ProtoMessage() {} +func (*DatasetUpdateRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_audit_data_12e22cd9ac1143c8, []int{8} +} +func (m *DatasetUpdateRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_DatasetUpdateRequest.Unmarshal(m, b) +} +func (m *DatasetUpdateRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_DatasetUpdateRequest.Marshal(b, m, deterministic) +} +func (dst *DatasetUpdateRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_DatasetUpdateRequest.Merge(dst, src) +} +func (m *DatasetUpdateRequest) XXX_Size() int { + return xxx_messageInfo_DatasetUpdateRequest.Size(m) +} +func (m *DatasetUpdateRequest) XXX_DiscardUnknown() { + xxx_messageInfo_DatasetUpdateRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_DatasetUpdateRequest proto.InternalMessageInfo + +func (m *DatasetUpdateRequest) GetResource() *Dataset { + if m != nil { + return m.Resource + } + return nil +} + +// Dataset update response. +type DatasetUpdateResponse struct { + // Final state of the updated dataset. + Resource *Dataset `protobuf:"bytes,1,opt,name=resource,proto3" json:"resource,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *DatasetUpdateResponse) Reset() { *m = DatasetUpdateResponse{} } +func (m *DatasetUpdateResponse) String() string { return proto.CompactTextString(m) } +func (*DatasetUpdateResponse) ProtoMessage() {} +func (*DatasetUpdateResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_audit_data_12e22cd9ac1143c8, []int{9} +} +func (m *DatasetUpdateResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_DatasetUpdateResponse.Unmarshal(m, b) +} +func (m *DatasetUpdateResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_DatasetUpdateResponse.Marshal(b, m, deterministic) +} +func (dst *DatasetUpdateResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_DatasetUpdateResponse.Merge(dst, src) +} +func (m *DatasetUpdateResponse) XXX_Size() int { + return xxx_messageInfo_DatasetUpdateResponse.Size(m) +} +func (m *DatasetUpdateResponse) XXX_DiscardUnknown() { + xxx_messageInfo_DatasetUpdateResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_DatasetUpdateResponse proto.InternalMessageInfo + +func (m *DatasetUpdateResponse) GetResource() *Dataset { + if m != nil { + return m.Resource + } + return nil +} + +// Job insert request. +type JobInsertRequest struct { + // Job insert request. + Resource *Job `protobuf:"bytes,1,opt,name=resource,proto3" json:"resource,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *JobInsertRequest) Reset() { *m = JobInsertRequest{} } +func (m *JobInsertRequest) String() string { return proto.CompactTextString(m) } +func (*JobInsertRequest) ProtoMessage() {} +func (*JobInsertRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_audit_data_12e22cd9ac1143c8, []int{10} +} +func (m *JobInsertRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_JobInsertRequest.Unmarshal(m, b) +} +func (m *JobInsertRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_JobInsertRequest.Marshal(b, m, deterministic) +} +func (dst *JobInsertRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_JobInsertRequest.Merge(dst, src) +} +func (m *JobInsertRequest) XXX_Size() int { + return xxx_messageInfo_JobInsertRequest.Size(m) +} +func (m *JobInsertRequest) XXX_DiscardUnknown() { + xxx_messageInfo_JobInsertRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_JobInsertRequest proto.InternalMessageInfo + +func (m *JobInsertRequest) GetResource() *Job { + if m != nil { + return m.Resource + } + return nil +} + +// Job insert response. +type JobInsertResponse struct { + // Job insert response. + Resource *Job `protobuf:"bytes,1,opt,name=resource,proto3" json:"resource,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *JobInsertResponse) Reset() { *m = JobInsertResponse{} } +func (m *JobInsertResponse) String() string { return proto.CompactTextString(m) } +func (*JobInsertResponse) ProtoMessage() {} +func (*JobInsertResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_audit_data_12e22cd9ac1143c8, []int{11} +} +func (m *JobInsertResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_JobInsertResponse.Unmarshal(m, b) +} +func (m *JobInsertResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_JobInsertResponse.Marshal(b, m, deterministic) +} +func (dst *JobInsertResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_JobInsertResponse.Merge(dst, src) +} +func (m *JobInsertResponse) XXX_Size() int { + return xxx_messageInfo_JobInsertResponse.Size(m) +} +func (m *JobInsertResponse) XXX_DiscardUnknown() { + xxx_messageInfo_JobInsertResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_JobInsertResponse proto.InternalMessageInfo + +func (m *JobInsertResponse) GetResource() *Job { + if m != nil { + return m.Resource + } + return nil +} + +// Job query request. +type JobQueryRequest struct { + // The query. + Query string `protobuf:"bytes,1,opt,name=query,proto3" json:"query,omitempty"` + // The maximum number of results. + MaxResults uint32 `protobuf:"varint,2,opt,name=max_results,json=maxResults,proto3" json:"max_results,omitempty"` + // The default dataset for tables that do not have a dataset specified. + DefaultDataset *DatasetName `protobuf:"bytes,3,opt,name=default_dataset,json=defaultDataset,proto3" json:"default_dataset,omitempty"` + // Project that the query should be charged to. + ProjectId string `protobuf:"bytes,4,opt,name=project_id,json=projectId,proto3" json:"project_id,omitempty"` + // If true, don't actually run the job. Just check that it would run. + DryRun bool `protobuf:"varint,5,opt,name=dry_run,json=dryRun,proto3" json:"dry_run,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *JobQueryRequest) Reset() { *m = JobQueryRequest{} } +func (m *JobQueryRequest) String() string { return proto.CompactTextString(m) } +func (*JobQueryRequest) ProtoMessage() {} +func (*JobQueryRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_audit_data_12e22cd9ac1143c8, []int{12} +} +func (m *JobQueryRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_JobQueryRequest.Unmarshal(m, b) +} +func (m *JobQueryRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_JobQueryRequest.Marshal(b, m, deterministic) +} +func (dst *JobQueryRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_JobQueryRequest.Merge(dst, src) +} +func (m *JobQueryRequest) XXX_Size() int { + return xxx_messageInfo_JobQueryRequest.Size(m) +} +func (m *JobQueryRequest) XXX_DiscardUnknown() { + xxx_messageInfo_JobQueryRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_JobQueryRequest proto.InternalMessageInfo + +func (m *JobQueryRequest) GetQuery() string { + if m != nil { + return m.Query + } + return "" +} + +func (m *JobQueryRequest) GetMaxResults() uint32 { + if m != nil { + return m.MaxResults + } + return 0 +} + +func (m *JobQueryRequest) GetDefaultDataset() *DatasetName { + if m != nil { + return m.DefaultDataset + } + return nil +} + +func (m *JobQueryRequest) GetProjectId() string { + if m != nil { + return m.ProjectId + } + return "" +} + +func (m *JobQueryRequest) GetDryRun() bool { + if m != nil { + return m.DryRun + } + return false +} + +// Job query response. +type JobQueryResponse struct { + // The total number of rows in the full query result set. + TotalResults uint64 `protobuf:"varint,1,opt,name=total_results,json=totalResults,proto3" json:"total_results,omitempty"` + // Information about the queried job. + Job *Job `protobuf:"bytes,2,opt,name=job,proto3" json:"job,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *JobQueryResponse) Reset() { *m = JobQueryResponse{} } +func (m *JobQueryResponse) String() string { return proto.CompactTextString(m) } +func (*JobQueryResponse) ProtoMessage() {} +func (*JobQueryResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_audit_data_12e22cd9ac1143c8, []int{13} +} +func (m *JobQueryResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_JobQueryResponse.Unmarshal(m, b) +} +func (m *JobQueryResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_JobQueryResponse.Marshal(b, m, deterministic) +} +func (dst *JobQueryResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_JobQueryResponse.Merge(dst, src) +} +func (m *JobQueryResponse) XXX_Size() int { + return xxx_messageInfo_JobQueryResponse.Size(m) +} +func (m *JobQueryResponse) XXX_DiscardUnknown() { + xxx_messageInfo_JobQueryResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_JobQueryResponse proto.InternalMessageInfo + +func (m *JobQueryResponse) GetTotalResults() uint64 { + if m != nil { + return m.TotalResults + } + return 0 +} + +func (m *JobQueryResponse) GetJob() *Job { + if m != nil { + return m.Job + } + return nil +} + +// Job getQueryResults request. +type JobGetQueryResultsRequest struct { + // Maximum number of results to return. + MaxResults uint32 `protobuf:"varint,1,opt,name=max_results,json=maxResults,proto3" json:"max_results,omitempty"` + // Zero-based row number at which to start. + StartRow uint64 `protobuf:"varint,2,opt,name=start_row,json=startRow,proto3" json:"start_row,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *JobGetQueryResultsRequest) Reset() { *m = JobGetQueryResultsRequest{} } +func (m *JobGetQueryResultsRequest) String() string { return proto.CompactTextString(m) } +func (*JobGetQueryResultsRequest) ProtoMessage() {} +func (*JobGetQueryResultsRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_audit_data_12e22cd9ac1143c8, []int{14} +} +func (m *JobGetQueryResultsRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_JobGetQueryResultsRequest.Unmarshal(m, b) +} +func (m *JobGetQueryResultsRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_JobGetQueryResultsRequest.Marshal(b, m, deterministic) +} +func (dst *JobGetQueryResultsRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_JobGetQueryResultsRequest.Merge(dst, src) +} +func (m *JobGetQueryResultsRequest) XXX_Size() int { + return xxx_messageInfo_JobGetQueryResultsRequest.Size(m) +} +func (m *JobGetQueryResultsRequest) XXX_DiscardUnknown() { + xxx_messageInfo_JobGetQueryResultsRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_JobGetQueryResultsRequest proto.InternalMessageInfo + +func (m *JobGetQueryResultsRequest) GetMaxResults() uint32 { + if m != nil { + return m.MaxResults + } + return 0 +} + +func (m *JobGetQueryResultsRequest) GetStartRow() uint64 { + if m != nil { + return m.StartRow + } + return 0 +} + +// Job getQueryResults response. +type JobGetQueryResultsResponse struct { + // Total number of results in query results. + TotalResults uint64 `protobuf:"varint,1,opt,name=total_results,json=totalResults,proto3" json:"total_results,omitempty"` + // The job that was created to run the query. + // It completed if `job.status.state` is `DONE`. + // It failed if `job.status.errorResult` is also present. + Job *Job `protobuf:"bytes,2,opt,name=job,proto3" json:"job,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *JobGetQueryResultsResponse) Reset() { *m = JobGetQueryResultsResponse{} } +func (m *JobGetQueryResultsResponse) String() string { return proto.CompactTextString(m) } +func (*JobGetQueryResultsResponse) ProtoMessage() {} +func (*JobGetQueryResultsResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_audit_data_12e22cd9ac1143c8, []int{15} +} +func (m *JobGetQueryResultsResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_JobGetQueryResultsResponse.Unmarshal(m, b) +} +func (m *JobGetQueryResultsResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_JobGetQueryResultsResponse.Marshal(b, m, deterministic) +} +func (dst *JobGetQueryResultsResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_JobGetQueryResultsResponse.Merge(dst, src) +} +func (m *JobGetQueryResultsResponse) XXX_Size() int { + return xxx_messageInfo_JobGetQueryResultsResponse.Size(m) +} +func (m *JobGetQueryResultsResponse) XXX_DiscardUnknown() { + xxx_messageInfo_JobGetQueryResultsResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_JobGetQueryResultsResponse proto.InternalMessageInfo + +func (m *JobGetQueryResultsResponse) GetTotalResults() uint64 { + if m != nil { + return m.TotalResults + } + return 0 +} + +func (m *JobGetQueryResultsResponse) GetJob() *Job { + if m != nil { + return m.Job + } + return nil +} + +// Job getQueryDone response. +type JobQueryDoneResponse struct { + // The job and status information. + // The job completed if `job.status.state` is `DONE`. + Job *Job `protobuf:"bytes,1,opt,name=job,proto3" json:"job,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *JobQueryDoneResponse) Reset() { *m = JobQueryDoneResponse{} } +func (m *JobQueryDoneResponse) String() string { return proto.CompactTextString(m) } +func (*JobQueryDoneResponse) ProtoMessage() {} +func (*JobQueryDoneResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_audit_data_12e22cd9ac1143c8, []int{16} +} +func (m *JobQueryDoneResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_JobQueryDoneResponse.Unmarshal(m, b) +} +func (m *JobQueryDoneResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_JobQueryDoneResponse.Marshal(b, m, deterministic) +} +func (dst *JobQueryDoneResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_JobQueryDoneResponse.Merge(dst, src) +} +func (m *JobQueryDoneResponse) XXX_Size() int { + return xxx_messageInfo_JobQueryDoneResponse.Size(m) +} +func (m *JobQueryDoneResponse) XXX_DiscardUnknown() { + xxx_messageInfo_JobQueryDoneResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_JobQueryDoneResponse proto.InternalMessageInfo + +func (m *JobQueryDoneResponse) GetJob() *Job { + if m != nil { + return m.Job + } + return nil +} + +// Query job completed event. +type JobCompletedEvent struct { + // Name of the event. + EventName string `protobuf:"bytes,1,opt,name=event_name,json=eventName,proto3" json:"event_name,omitempty"` + // Job information. + Job *Job `protobuf:"bytes,2,opt,name=job,proto3" json:"job,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *JobCompletedEvent) Reset() { *m = JobCompletedEvent{} } +func (m *JobCompletedEvent) String() string { return proto.CompactTextString(m) } +func (*JobCompletedEvent) ProtoMessage() {} +func (*JobCompletedEvent) Descriptor() ([]byte, []int) { + return fileDescriptor_audit_data_12e22cd9ac1143c8, []int{17} +} +func (m *JobCompletedEvent) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_JobCompletedEvent.Unmarshal(m, b) +} +func (m *JobCompletedEvent) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_JobCompletedEvent.Marshal(b, m, deterministic) +} +func (dst *JobCompletedEvent) XXX_Merge(src proto.Message) { + xxx_messageInfo_JobCompletedEvent.Merge(dst, src) +} +func (m *JobCompletedEvent) XXX_Size() int { + return xxx_messageInfo_JobCompletedEvent.Size(m) +} +func (m *JobCompletedEvent) XXX_DiscardUnknown() { + xxx_messageInfo_JobCompletedEvent.DiscardUnknown(m) +} + +var xxx_messageInfo_JobCompletedEvent proto.InternalMessageInfo + +func (m *JobCompletedEvent) GetEventName() string { + if m != nil { + return m.EventName + } + return "" +} + +func (m *JobCompletedEvent) GetJob() *Job { + if m != nil { + return m.Job + } + return nil +} + +// Table data-list request. +type TableDataListRequest struct { + // Starting row offset. + StartRow uint64 `protobuf:"varint,1,opt,name=start_row,json=startRow,proto3" json:"start_row,omitempty"` + // Maximum number of results to return. + MaxResults uint32 `protobuf:"varint,2,opt,name=max_results,json=maxResults,proto3" json:"max_results,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *TableDataListRequest) Reset() { *m = TableDataListRequest{} } +func (m *TableDataListRequest) String() string { return proto.CompactTextString(m) } +func (*TableDataListRequest) ProtoMessage() {} +func (*TableDataListRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_audit_data_12e22cd9ac1143c8, []int{18} +} +func (m *TableDataListRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_TableDataListRequest.Unmarshal(m, b) +} +func (m *TableDataListRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_TableDataListRequest.Marshal(b, m, deterministic) +} +func (dst *TableDataListRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_TableDataListRequest.Merge(dst, src) +} +func (m *TableDataListRequest) XXX_Size() int { + return xxx_messageInfo_TableDataListRequest.Size(m) +} +func (m *TableDataListRequest) XXX_DiscardUnknown() { + xxx_messageInfo_TableDataListRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_TableDataListRequest proto.InternalMessageInfo + +func (m *TableDataListRequest) GetStartRow() uint64 { + if m != nil { + return m.StartRow + } + return 0 +} + +func (m *TableDataListRequest) GetMaxResults() uint32 { + if m != nil { + return m.MaxResults + } + return 0 +} + +// Describes a BigQuery table. +// See the [Table](/bigquery/docs/reference/v2/tables) API resource +// for more details on individual fields. +// Note: `Table.schema` has been deprecated in favor of `Table.schemaJson`. +// `Table.schema` may continue to be present in your logs during this +// transition. +type Table struct { + // The name of the table. + TableName *TableName `protobuf:"bytes,1,opt,name=table_name,json=tableName,proto3" json:"table_name,omitempty"` + // User-provided metadata for the table. + Info *TableInfo `protobuf:"bytes,2,opt,name=info,proto3" json:"info,omitempty"` + // A JSON representation of the table's schema. + SchemaJson string `protobuf:"bytes,8,opt,name=schema_json,json=schemaJson,proto3" json:"schema_json,omitempty"` + // If present, this is a virtual table defined by a SQL query. + View *TableViewDefinition `protobuf:"bytes,4,opt,name=view,proto3" json:"view,omitempty"` + // The expiration date for the table, after which the table + // is deleted and the storage reclaimed. + // If not present, the table persists indefinitely. + ExpireTime *timestamp.Timestamp `protobuf:"bytes,5,opt,name=expire_time,json=expireTime,proto3" json:"expire_time,omitempty"` + // The time the table was created. + CreateTime *timestamp.Timestamp `protobuf:"bytes,6,opt,name=create_time,json=createTime,proto3" json:"create_time,omitempty"` + // The time the table was last truncated + // by an operation with a `writeDisposition` of `WRITE_TRUNCATE`. + TruncateTime *timestamp.Timestamp `protobuf:"bytes,7,opt,name=truncate_time,json=truncateTime,proto3" json:"truncate_time,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Table) Reset() { *m = Table{} } +func (m *Table) String() string { return proto.CompactTextString(m) } +func (*Table) ProtoMessage() {} +func (*Table) Descriptor() ([]byte, []int) { + return fileDescriptor_audit_data_12e22cd9ac1143c8, []int{19} +} +func (m *Table) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Table.Unmarshal(m, b) +} +func (m *Table) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Table.Marshal(b, m, deterministic) +} +func (dst *Table) XXX_Merge(src proto.Message) { + xxx_messageInfo_Table.Merge(dst, src) +} +func (m *Table) XXX_Size() int { + return xxx_messageInfo_Table.Size(m) +} +func (m *Table) XXX_DiscardUnknown() { + xxx_messageInfo_Table.DiscardUnknown(m) +} + +var xxx_messageInfo_Table proto.InternalMessageInfo + +func (m *Table) GetTableName() *TableName { + if m != nil { + return m.TableName + } + return nil +} + +func (m *Table) GetInfo() *TableInfo { + if m != nil { + return m.Info + } + return nil +} + +func (m *Table) GetSchemaJson() string { + if m != nil { + return m.SchemaJson + } + return "" +} + +func (m *Table) GetView() *TableViewDefinition { + if m != nil { + return m.View + } + return nil +} + +func (m *Table) GetExpireTime() *timestamp.Timestamp { + if m != nil { + return m.ExpireTime + } + return nil +} + +func (m *Table) GetCreateTime() *timestamp.Timestamp { + if m != nil { + return m.CreateTime + } + return nil +} + +func (m *Table) GetTruncateTime() *timestamp.Timestamp { + if m != nil { + return m.TruncateTime + } + return nil +} + +// User-provided metadata for a table. +type TableInfo struct { + // A short name for the table, such as`"Analytics Data - Jan 2011"`. + FriendlyName string `protobuf:"bytes,1,opt,name=friendly_name,json=friendlyName,proto3" json:"friendly_name,omitempty"` + // A long description, perhaps several paragraphs, + // describing the table contents in detail. + Description string `protobuf:"bytes,2,opt,name=description,proto3" json:"description,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *TableInfo) Reset() { *m = TableInfo{} } +func (m *TableInfo) String() string { return proto.CompactTextString(m) } +func (*TableInfo) ProtoMessage() {} +func (*TableInfo) Descriptor() ([]byte, []int) { + return fileDescriptor_audit_data_12e22cd9ac1143c8, []int{20} +} +func (m *TableInfo) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_TableInfo.Unmarshal(m, b) +} +func (m *TableInfo) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_TableInfo.Marshal(b, m, deterministic) +} +func (dst *TableInfo) XXX_Merge(src proto.Message) { + xxx_messageInfo_TableInfo.Merge(dst, src) +} +func (m *TableInfo) XXX_Size() int { + return xxx_messageInfo_TableInfo.Size(m) +} +func (m *TableInfo) XXX_DiscardUnknown() { + xxx_messageInfo_TableInfo.DiscardUnknown(m) +} + +var xxx_messageInfo_TableInfo proto.InternalMessageInfo + +func (m *TableInfo) GetFriendlyName() string { + if m != nil { + return m.FriendlyName + } + return "" +} + +func (m *TableInfo) GetDescription() string { + if m != nil { + return m.Description + } + return "" +} + +// Describes a virtual table defined by a SQL query. +type TableViewDefinition struct { + // SQL query defining the view. + Query string `protobuf:"bytes,1,opt,name=query,proto3" json:"query,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *TableViewDefinition) Reset() { *m = TableViewDefinition{} } +func (m *TableViewDefinition) String() string { return proto.CompactTextString(m) } +func (*TableViewDefinition) ProtoMessage() {} +func (*TableViewDefinition) Descriptor() ([]byte, []int) { + return fileDescriptor_audit_data_12e22cd9ac1143c8, []int{21} +} +func (m *TableViewDefinition) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_TableViewDefinition.Unmarshal(m, b) +} +func (m *TableViewDefinition) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_TableViewDefinition.Marshal(b, m, deterministic) +} +func (dst *TableViewDefinition) XXX_Merge(src proto.Message) { + xxx_messageInfo_TableViewDefinition.Merge(dst, src) +} +func (m *TableViewDefinition) XXX_Size() int { + return xxx_messageInfo_TableViewDefinition.Size(m) +} +func (m *TableViewDefinition) XXX_DiscardUnknown() { + xxx_messageInfo_TableViewDefinition.DiscardUnknown(m) +} + +var xxx_messageInfo_TableViewDefinition proto.InternalMessageInfo + +func (m *TableViewDefinition) GetQuery() string { + if m != nil { + return m.Query + } + return "" +} + +// BigQuery dataset information. +// See the [Dataset](/bigquery/docs/reference/v2/datasets) API resource +// for more details on individual fields. +type Dataset struct { + // The name of the dataset. + DatasetName *DatasetName `protobuf:"bytes,1,opt,name=dataset_name,json=datasetName,proto3" json:"dataset_name,omitempty"` + // User-provided metadata for the dataset. + Info *DatasetInfo `protobuf:"bytes,2,opt,name=info,proto3" json:"info,omitempty"` + // The time the dataset was created. + CreateTime *timestamp.Timestamp `protobuf:"bytes,4,opt,name=create_time,json=createTime,proto3" json:"create_time,omitempty"` + // The time the dataset was last modified. + UpdateTime *timestamp.Timestamp `protobuf:"bytes,5,opt,name=update_time,json=updateTime,proto3" json:"update_time,omitempty"` + // The access control list for the dataset. + Acl *BigQueryAcl `protobuf:"bytes,6,opt,name=acl,proto3" json:"acl,omitempty"` + // If this field is present, each table that does not specify an + // expiration time is assigned an expiration time by adding this + // duration to the table's `createTime`. If this field is empty, + // there is no default table expiration time. + DefaultTableExpireDuration *duration.Duration `protobuf:"bytes,8,opt,name=default_table_expire_duration,json=defaultTableExpireDuration,proto3" json:"default_table_expire_duration,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Dataset) Reset() { *m = Dataset{} } +func (m *Dataset) String() string { return proto.CompactTextString(m) } +func (*Dataset) ProtoMessage() {} +func (*Dataset) Descriptor() ([]byte, []int) { + return fileDescriptor_audit_data_12e22cd9ac1143c8, []int{22} +} +func (m *Dataset) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Dataset.Unmarshal(m, b) +} +func (m *Dataset) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Dataset.Marshal(b, m, deterministic) +} +func (dst *Dataset) XXX_Merge(src proto.Message) { + xxx_messageInfo_Dataset.Merge(dst, src) +} +func (m *Dataset) XXX_Size() int { + return xxx_messageInfo_Dataset.Size(m) +} +func (m *Dataset) XXX_DiscardUnknown() { + xxx_messageInfo_Dataset.DiscardUnknown(m) +} + +var xxx_messageInfo_Dataset proto.InternalMessageInfo + +func (m *Dataset) GetDatasetName() *DatasetName { + if m != nil { + return m.DatasetName + } + return nil +} + +func (m *Dataset) GetInfo() *DatasetInfo { + if m != nil { + return m.Info + } + return nil +} + +func (m *Dataset) GetCreateTime() *timestamp.Timestamp { + if m != nil { + return m.CreateTime + } + return nil +} + +func (m *Dataset) GetUpdateTime() *timestamp.Timestamp { + if m != nil { + return m.UpdateTime + } + return nil +} + +func (m *Dataset) GetAcl() *BigQueryAcl { + if m != nil { + return m.Acl + } + return nil +} + +func (m *Dataset) GetDefaultTableExpireDuration() *duration.Duration { + if m != nil { + return m.DefaultTableExpireDuration + } + return nil +} + +// User-provided metadata for a dataset. +type DatasetInfo struct { + // A short name for the dataset, such as`"Analytics Data 2011"`. + FriendlyName string `protobuf:"bytes,1,opt,name=friendly_name,json=friendlyName,proto3" json:"friendly_name,omitempty"` + // A long description, perhaps several paragraphs, + // describing the dataset contents in detail. + Description string `protobuf:"bytes,2,opt,name=description,proto3" json:"description,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *DatasetInfo) Reset() { *m = DatasetInfo{} } +func (m *DatasetInfo) String() string { return proto.CompactTextString(m) } +func (*DatasetInfo) ProtoMessage() {} +func (*DatasetInfo) Descriptor() ([]byte, []int) { + return fileDescriptor_audit_data_12e22cd9ac1143c8, []int{23} +} +func (m *DatasetInfo) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_DatasetInfo.Unmarshal(m, b) +} +func (m *DatasetInfo) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_DatasetInfo.Marshal(b, m, deterministic) +} +func (dst *DatasetInfo) XXX_Merge(src proto.Message) { + xxx_messageInfo_DatasetInfo.Merge(dst, src) +} +func (m *DatasetInfo) XXX_Size() int { + return xxx_messageInfo_DatasetInfo.Size(m) +} +func (m *DatasetInfo) XXX_DiscardUnknown() { + xxx_messageInfo_DatasetInfo.DiscardUnknown(m) +} + +var xxx_messageInfo_DatasetInfo proto.InternalMessageInfo + +func (m *DatasetInfo) GetFriendlyName() string { + if m != nil { + return m.FriendlyName + } + return "" +} + +func (m *DatasetInfo) GetDescription() string { + if m != nil { + return m.Description + } + return "" +} + +// An access control list. +type BigQueryAcl struct { + // Access control entry list. + Entries []*BigQueryAcl_Entry `protobuf:"bytes,1,rep,name=entries,proto3" json:"entries,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *BigQueryAcl) Reset() { *m = BigQueryAcl{} } +func (m *BigQueryAcl) String() string { return proto.CompactTextString(m) } +func (*BigQueryAcl) ProtoMessage() {} +func (*BigQueryAcl) Descriptor() ([]byte, []int) { + return fileDescriptor_audit_data_12e22cd9ac1143c8, []int{24} +} +func (m *BigQueryAcl) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_BigQueryAcl.Unmarshal(m, b) +} +func (m *BigQueryAcl) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_BigQueryAcl.Marshal(b, m, deterministic) +} +func (dst *BigQueryAcl) XXX_Merge(src proto.Message) { + xxx_messageInfo_BigQueryAcl.Merge(dst, src) +} +func (m *BigQueryAcl) XXX_Size() int { + return xxx_messageInfo_BigQueryAcl.Size(m) +} +func (m *BigQueryAcl) XXX_DiscardUnknown() { + xxx_messageInfo_BigQueryAcl.DiscardUnknown(m) +} + +var xxx_messageInfo_BigQueryAcl proto.InternalMessageInfo + +func (m *BigQueryAcl) GetEntries() []*BigQueryAcl_Entry { + if m != nil { + return m.Entries + } + return nil +} + +// Access control entry. +type BigQueryAcl_Entry struct { + // The granted role, which can be `READER`, `WRITER`, or `OWNER`. + Role string `protobuf:"bytes,1,opt,name=role,proto3" json:"role,omitempty"` + // Grants access to a group identified by an email address. + GroupEmail string `protobuf:"bytes,2,opt,name=group_email,json=groupEmail,proto3" json:"group_email,omitempty"` + // Grants access to a user identified by an email address. + UserEmail string `protobuf:"bytes,3,opt,name=user_email,json=userEmail,proto3" json:"user_email,omitempty"` + // Grants access to all members of a domain. + Domain string `protobuf:"bytes,4,opt,name=domain,proto3" json:"domain,omitempty"` + // Grants access to special groups. Valid groups are `PROJECT_OWNERS`, + // `PROJECT_READERS`, `PROJECT_WRITERS` and `ALL_AUTHENTICATED_USERS`. + SpecialGroup string `protobuf:"bytes,5,opt,name=special_group,json=specialGroup,proto3" json:"special_group,omitempty"` + // Grants access to a BigQuery View. + ViewName *TableName `protobuf:"bytes,6,opt,name=view_name,json=viewName,proto3" json:"view_name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *BigQueryAcl_Entry) Reset() { *m = BigQueryAcl_Entry{} } +func (m *BigQueryAcl_Entry) String() string { return proto.CompactTextString(m) } +func (*BigQueryAcl_Entry) ProtoMessage() {} +func (*BigQueryAcl_Entry) Descriptor() ([]byte, []int) { + return fileDescriptor_audit_data_12e22cd9ac1143c8, []int{24, 0} +} +func (m *BigQueryAcl_Entry) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_BigQueryAcl_Entry.Unmarshal(m, b) +} +func (m *BigQueryAcl_Entry) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_BigQueryAcl_Entry.Marshal(b, m, deterministic) +} +func (dst *BigQueryAcl_Entry) XXX_Merge(src proto.Message) { + xxx_messageInfo_BigQueryAcl_Entry.Merge(dst, src) +} +func (m *BigQueryAcl_Entry) XXX_Size() int { + return xxx_messageInfo_BigQueryAcl_Entry.Size(m) +} +func (m *BigQueryAcl_Entry) XXX_DiscardUnknown() { + xxx_messageInfo_BigQueryAcl_Entry.DiscardUnknown(m) +} + +var xxx_messageInfo_BigQueryAcl_Entry proto.InternalMessageInfo + +func (m *BigQueryAcl_Entry) GetRole() string { + if m != nil { + return m.Role + } + return "" +} + +func (m *BigQueryAcl_Entry) GetGroupEmail() string { + if m != nil { + return m.GroupEmail + } + return "" +} + +func (m *BigQueryAcl_Entry) GetUserEmail() string { + if m != nil { + return m.UserEmail + } + return "" +} + +func (m *BigQueryAcl_Entry) GetDomain() string { + if m != nil { + return m.Domain + } + return "" +} + +func (m *BigQueryAcl_Entry) GetSpecialGroup() string { + if m != nil { + return m.SpecialGroup + } + return "" +} + +func (m *BigQueryAcl_Entry) GetViewName() *TableName { + if m != nil { + return m.ViewName + } + return nil +} + +// Describes a job. +type Job struct { + // Job name. + JobName *JobName `protobuf:"bytes,1,opt,name=job_name,json=jobName,proto3" json:"job_name,omitempty"` + // Job configuration. + JobConfiguration *JobConfiguration `protobuf:"bytes,2,opt,name=job_configuration,json=jobConfiguration,proto3" json:"job_configuration,omitempty"` + // Job status. + JobStatus *JobStatus `protobuf:"bytes,3,opt,name=job_status,json=jobStatus,proto3" json:"job_status,omitempty"` + // Job statistics. + JobStatistics *JobStatistics `protobuf:"bytes,4,opt,name=job_statistics,json=jobStatistics,proto3" json:"job_statistics,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Job) Reset() { *m = Job{} } +func (m *Job) String() string { return proto.CompactTextString(m) } +func (*Job) ProtoMessage() {} +func (*Job) Descriptor() ([]byte, []int) { + return fileDescriptor_audit_data_12e22cd9ac1143c8, []int{25} +} +func (m *Job) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Job.Unmarshal(m, b) +} +func (m *Job) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Job.Marshal(b, m, deterministic) +} +func (dst *Job) XXX_Merge(src proto.Message) { + xxx_messageInfo_Job.Merge(dst, src) +} +func (m *Job) XXX_Size() int { + return xxx_messageInfo_Job.Size(m) +} +func (m *Job) XXX_DiscardUnknown() { + xxx_messageInfo_Job.DiscardUnknown(m) +} + +var xxx_messageInfo_Job proto.InternalMessageInfo + +func (m *Job) GetJobName() *JobName { + if m != nil { + return m.JobName + } + return nil +} + +func (m *Job) GetJobConfiguration() *JobConfiguration { + if m != nil { + return m.JobConfiguration + } + return nil +} + +func (m *Job) GetJobStatus() *JobStatus { + if m != nil { + return m.JobStatus + } + return nil +} + +func (m *Job) GetJobStatistics() *JobStatistics { + if m != nil { + return m.JobStatistics + } + return nil +} + +// Job configuration information. +// See the [Jobs](/bigquery/docs/reference/v2/jobs) API resource +// for more details on individual fields. +type JobConfiguration struct { + // Job configuration information. + // + // Types that are valid to be assigned to Configuration: + // *JobConfiguration_Query_ + // *JobConfiguration_Load_ + // *JobConfiguration_Extract_ + // *JobConfiguration_TableCopy_ + Configuration isJobConfiguration_Configuration `protobuf_oneof:"configuration"` + // If true, don't actually run the job. Just check that it would run. + DryRun bool `protobuf:"varint,9,opt,name=dry_run,json=dryRun,proto3" json:"dry_run,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *JobConfiguration) Reset() { *m = JobConfiguration{} } +func (m *JobConfiguration) String() string { return proto.CompactTextString(m) } +func (*JobConfiguration) ProtoMessage() {} +func (*JobConfiguration) Descriptor() ([]byte, []int) { + return fileDescriptor_audit_data_12e22cd9ac1143c8, []int{26} +} +func (m *JobConfiguration) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_JobConfiguration.Unmarshal(m, b) +} +func (m *JobConfiguration) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_JobConfiguration.Marshal(b, m, deterministic) +} +func (dst *JobConfiguration) XXX_Merge(src proto.Message) { + xxx_messageInfo_JobConfiguration.Merge(dst, src) +} +func (m *JobConfiguration) XXX_Size() int { + return xxx_messageInfo_JobConfiguration.Size(m) +} +func (m *JobConfiguration) XXX_DiscardUnknown() { + xxx_messageInfo_JobConfiguration.DiscardUnknown(m) +} + +var xxx_messageInfo_JobConfiguration proto.InternalMessageInfo + +type isJobConfiguration_Configuration interface { + isJobConfiguration_Configuration() +} + +type JobConfiguration_Query_ struct { + Query *JobConfiguration_Query `protobuf:"bytes,5,opt,name=query,proto3,oneof"` +} + +type JobConfiguration_Load_ struct { + Load *JobConfiguration_Load `protobuf:"bytes,6,opt,name=load,proto3,oneof"` +} + +type JobConfiguration_Extract_ struct { + Extract *JobConfiguration_Extract `protobuf:"bytes,7,opt,name=extract,proto3,oneof"` +} + +type JobConfiguration_TableCopy_ struct { + TableCopy *JobConfiguration_TableCopy `protobuf:"bytes,8,opt,name=table_copy,json=tableCopy,proto3,oneof"` +} + +func (*JobConfiguration_Query_) isJobConfiguration_Configuration() {} + +func (*JobConfiguration_Load_) isJobConfiguration_Configuration() {} + +func (*JobConfiguration_Extract_) isJobConfiguration_Configuration() {} + +func (*JobConfiguration_TableCopy_) isJobConfiguration_Configuration() {} + +func (m *JobConfiguration) GetConfiguration() isJobConfiguration_Configuration { + if m != nil { + return m.Configuration + } + return nil +} + +func (m *JobConfiguration) GetQuery() *JobConfiguration_Query { + if x, ok := m.GetConfiguration().(*JobConfiguration_Query_); ok { + return x.Query + } + return nil +} + +func (m *JobConfiguration) GetLoad() *JobConfiguration_Load { + if x, ok := m.GetConfiguration().(*JobConfiguration_Load_); ok { + return x.Load + } + return nil +} + +func (m *JobConfiguration) GetExtract() *JobConfiguration_Extract { + if x, ok := m.GetConfiguration().(*JobConfiguration_Extract_); ok { + return x.Extract + } + return nil +} + +func (m *JobConfiguration) GetTableCopy() *JobConfiguration_TableCopy { + if x, ok := m.GetConfiguration().(*JobConfiguration_TableCopy_); ok { + return x.TableCopy + } + return nil +} + +func (m *JobConfiguration) GetDryRun() bool { + if m != nil { + return m.DryRun + } + return false +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*JobConfiguration) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _JobConfiguration_OneofMarshaler, _JobConfiguration_OneofUnmarshaler, _JobConfiguration_OneofSizer, []interface{}{ + (*JobConfiguration_Query_)(nil), + (*JobConfiguration_Load_)(nil), + (*JobConfiguration_Extract_)(nil), + (*JobConfiguration_TableCopy_)(nil), + } +} + +func _JobConfiguration_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*JobConfiguration) + // configuration + switch x := m.Configuration.(type) { + case *JobConfiguration_Query_: + b.EncodeVarint(5<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.Query); err != nil { + return err + } + case *JobConfiguration_Load_: + b.EncodeVarint(6<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.Load); err != nil { + return err + } + case *JobConfiguration_Extract_: + b.EncodeVarint(7<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.Extract); err != nil { + return err + } + case *JobConfiguration_TableCopy_: + b.EncodeVarint(8<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.TableCopy); err != nil { + return err + } + case nil: + default: + return fmt.Errorf("JobConfiguration.Configuration has unexpected type %T", x) + } + return nil +} + +func _JobConfiguration_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*JobConfiguration) + switch tag { + case 5: // configuration.query + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(JobConfiguration_Query) + err := b.DecodeMessage(msg) + m.Configuration = &JobConfiguration_Query_{msg} + return true, err + case 6: // configuration.load + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(JobConfiguration_Load) + err := b.DecodeMessage(msg) + m.Configuration = &JobConfiguration_Load_{msg} + return true, err + case 7: // configuration.extract + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(JobConfiguration_Extract) + err := b.DecodeMessage(msg) + m.Configuration = &JobConfiguration_Extract_{msg} + return true, err + case 8: // configuration.table_copy + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(JobConfiguration_TableCopy) + err := b.DecodeMessage(msg) + m.Configuration = &JobConfiguration_TableCopy_{msg} + return true, err + default: + return false, nil + } +} + +func _JobConfiguration_OneofSizer(msg proto.Message) (n int) { + m := msg.(*JobConfiguration) + // configuration + switch x := m.Configuration.(type) { + case *JobConfiguration_Query_: + s := proto.Size(x.Query) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *JobConfiguration_Load_: + s := proto.Size(x.Load) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *JobConfiguration_Extract_: + s := proto.Size(x.Extract) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *JobConfiguration_TableCopy_: + s := proto.Size(x.TableCopy) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +// Describes a query job, which executes a SQL-like query. +type JobConfiguration_Query struct { + // The SQL query to run. + Query string `protobuf:"bytes,1,opt,name=query,proto3" json:"query,omitempty"` + // The table where results are written. + DestinationTable *TableName `protobuf:"bytes,2,opt,name=destination_table,json=destinationTable,proto3" json:"destination_table,omitempty"` + // Describes when a job is allowed to create a table: + // `CREATE_IF_NEEDED`, `CREATE_NEVER`. + CreateDisposition string `protobuf:"bytes,3,opt,name=create_disposition,json=createDisposition,proto3" json:"create_disposition,omitempty"` + // Describes how writes affect existing tables: + // `WRITE_TRUNCATE`, `WRITE_APPEND`, `WRITE_EMPTY`. + WriteDisposition string `protobuf:"bytes,4,opt,name=write_disposition,json=writeDisposition,proto3" json:"write_disposition,omitempty"` + // If a table name is specified without a dataset in a query, + // this dataset will be added to table name. + DefaultDataset *DatasetName `protobuf:"bytes,5,opt,name=default_dataset,json=defaultDataset,proto3" json:"default_dataset,omitempty"` + // Describes data sources outside BigQuery, if needed. + TableDefinitions []*TableDefinition `protobuf:"bytes,6,rep,name=table_definitions,json=tableDefinitions,proto3" json:"table_definitions,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *JobConfiguration_Query) Reset() { *m = JobConfiguration_Query{} } +func (m *JobConfiguration_Query) String() string { return proto.CompactTextString(m) } +func (*JobConfiguration_Query) ProtoMessage() {} +func (*JobConfiguration_Query) Descriptor() ([]byte, []int) { + return fileDescriptor_audit_data_12e22cd9ac1143c8, []int{26, 0} +} +func (m *JobConfiguration_Query) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_JobConfiguration_Query.Unmarshal(m, b) +} +func (m *JobConfiguration_Query) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_JobConfiguration_Query.Marshal(b, m, deterministic) +} +func (dst *JobConfiguration_Query) XXX_Merge(src proto.Message) { + xxx_messageInfo_JobConfiguration_Query.Merge(dst, src) +} +func (m *JobConfiguration_Query) XXX_Size() int { + return xxx_messageInfo_JobConfiguration_Query.Size(m) +} +func (m *JobConfiguration_Query) XXX_DiscardUnknown() { + xxx_messageInfo_JobConfiguration_Query.DiscardUnknown(m) +} + +var xxx_messageInfo_JobConfiguration_Query proto.InternalMessageInfo + +func (m *JobConfiguration_Query) GetQuery() string { + if m != nil { + return m.Query + } + return "" +} + +func (m *JobConfiguration_Query) GetDestinationTable() *TableName { + if m != nil { + return m.DestinationTable + } + return nil +} + +func (m *JobConfiguration_Query) GetCreateDisposition() string { + if m != nil { + return m.CreateDisposition + } + return "" +} + +func (m *JobConfiguration_Query) GetWriteDisposition() string { + if m != nil { + return m.WriteDisposition + } + return "" +} + +func (m *JobConfiguration_Query) GetDefaultDataset() *DatasetName { + if m != nil { + return m.DefaultDataset + } + return nil +} + +func (m *JobConfiguration_Query) GetTableDefinitions() []*TableDefinition { + if m != nil { + return m.TableDefinitions + } + return nil +} + +// Describes a load job, which loads data from an external source via +// the import pipeline. +type JobConfiguration_Load struct { + // URIs for the data to be imported. Only Google Cloud Storage URIs are + // supported. + SourceUris []string `protobuf:"bytes,1,rep,name=source_uris,json=sourceUris,proto3" json:"source_uris,omitempty"` + // The table schema in JSON format representation of a TableSchema. + SchemaJson string `protobuf:"bytes,6,opt,name=schema_json,json=schemaJson,proto3" json:"schema_json,omitempty"` + // The table where the imported data is written. + DestinationTable *TableName `protobuf:"bytes,3,opt,name=destination_table,json=destinationTable,proto3" json:"destination_table,omitempty"` + // Describes when a job is allowed to create a table: + // `CREATE_IF_NEEDED`, `CREATE_NEVER`. + CreateDisposition string `protobuf:"bytes,4,opt,name=create_disposition,json=createDisposition,proto3" json:"create_disposition,omitempty"` + // Describes how writes affect existing tables: + // `WRITE_TRUNCATE`, `WRITE_APPEND`, `WRITE_EMPTY`. + WriteDisposition string `protobuf:"bytes,5,opt,name=write_disposition,json=writeDisposition,proto3" json:"write_disposition,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *JobConfiguration_Load) Reset() { *m = JobConfiguration_Load{} } +func (m *JobConfiguration_Load) String() string { return proto.CompactTextString(m) } +func (*JobConfiguration_Load) ProtoMessage() {} +func (*JobConfiguration_Load) Descriptor() ([]byte, []int) { + return fileDescriptor_audit_data_12e22cd9ac1143c8, []int{26, 1} +} +func (m *JobConfiguration_Load) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_JobConfiguration_Load.Unmarshal(m, b) +} +func (m *JobConfiguration_Load) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_JobConfiguration_Load.Marshal(b, m, deterministic) +} +func (dst *JobConfiguration_Load) XXX_Merge(src proto.Message) { + xxx_messageInfo_JobConfiguration_Load.Merge(dst, src) +} +func (m *JobConfiguration_Load) XXX_Size() int { + return xxx_messageInfo_JobConfiguration_Load.Size(m) +} +func (m *JobConfiguration_Load) XXX_DiscardUnknown() { + xxx_messageInfo_JobConfiguration_Load.DiscardUnknown(m) +} + +var xxx_messageInfo_JobConfiguration_Load proto.InternalMessageInfo + +func (m *JobConfiguration_Load) GetSourceUris() []string { + if m != nil { + return m.SourceUris + } + return nil +} + +func (m *JobConfiguration_Load) GetSchemaJson() string { + if m != nil { + return m.SchemaJson + } + return "" +} + +func (m *JobConfiguration_Load) GetDestinationTable() *TableName { + if m != nil { + return m.DestinationTable + } + return nil +} + +func (m *JobConfiguration_Load) GetCreateDisposition() string { + if m != nil { + return m.CreateDisposition + } + return "" +} + +func (m *JobConfiguration_Load) GetWriteDisposition() string { + if m != nil { + return m.WriteDisposition + } + return "" +} + +// Describes an extract job, which exports data to an external source +// via the export pipeline. +type JobConfiguration_Extract struct { + // Google Cloud Storage URIs where extracted data should be written. + DestinationUris []string `protobuf:"bytes,1,rep,name=destination_uris,json=destinationUris,proto3" json:"destination_uris,omitempty"` + // The source table. + SourceTable *TableName `protobuf:"bytes,2,opt,name=source_table,json=sourceTable,proto3" json:"source_table,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *JobConfiguration_Extract) Reset() { *m = JobConfiguration_Extract{} } +func (m *JobConfiguration_Extract) String() string { return proto.CompactTextString(m) } +func (*JobConfiguration_Extract) ProtoMessage() {} +func (*JobConfiguration_Extract) Descriptor() ([]byte, []int) { + return fileDescriptor_audit_data_12e22cd9ac1143c8, []int{26, 2} +} +func (m *JobConfiguration_Extract) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_JobConfiguration_Extract.Unmarshal(m, b) +} +func (m *JobConfiguration_Extract) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_JobConfiguration_Extract.Marshal(b, m, deterministic) +} +func (dst *JobConfiguration_Extract) XXX_Merge(src proto.Message) { + xxx_messageInfo_JobConfiguration_Extract.Merge(dst, src) +} +func (m *JobConfiguration_Extract) XXX_Size() int { + return xxx_messageInfo_JobConfiguration_Extract.Size(m) +} +func (m *JobConfiguration_Extract) XXX_DiscardUnknown() { + xxx_messageInfo_JobConfiguration_Extract.DiscardUnknown(m) +} + +var xxx_messageInfo_JobConfiguration_Extract proto.InternalMessageInfo + +func (m *JobConfiguration_Extract) GetDestinationUris() []string { + if m != nil { + return m.DestinationUris + } + return nil +} + +func (m *JobConfiguration_Extract) GetSourceTable() *TableName { + if m != nil { + return m.SourceTable + } + return nil +} + +// Describes a copy job, which copies an existing table to another table. +type JobConfiguration_TableCopy struct { + // Source tables. + SourceTables []*TableName `protobuf:"bytes,1,rep,name=source_tables,json=sourceTables,proto3" json:"source_tables,omitempty"` + // Destination table. + DestinationTable *TableName `protobuf:"bytes,2,opt,name=destination_table,json=destinationTable,proto3" json:"destination_table,omitempty"` + // Describes when a job is allowed to create a table: + // `CREATE_IF_NEEDED`, `CREATE_NEVER`. + CreateDisposition string `protobuf:"bytes,3,opt,name=create_disposition,json=createDisposition,proto3" json:"create_disposition,omitempty"` + // Describes how writes affect existing tables: + // `WRITE_TRUNCATE`, `WRITE_APPEND`, `WRITE_EMPTY`. + WriteDisposition string `protobuf:"bytes,4,opt,name=write_disposition,json=writeDisposition,proto3" json:"write_disposition,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *JobConfiguration_TableCopy) Reset() { *m = JobConfiguration_TableCopy{} } +func (m *JobConfiguration_TableCopy) String() string { return proto.CompactTextString(m) } +func (*JobConfiguration_TableCopy) ProtoMessage() {} +func (*JobConfiguration_TableCopy) Descriptor() ([]byte, []int) { + return fileDescriptor_audit_data_12e22cd9ac1143c8, []int{26, 3} +} +func (m *JobConfiguration_TableCopy) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_JobConfiguration_TableCopy.Unmarshal(m, b) +} +func (m *JobConfiguration_TableCopy) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_JobConfiguration_TableCopy.Marshal(b, m, deterministic) +} +func (dst *JobConfiguration_TableCopy) XXX_Merge(src proto.Message) { + xxx_messageInfo_JobConfiguration_TableCopy.Merge(dst, src) +} +func (m *JobConfiguration_TableCopy) XXX_Size() int { + return xxx_messageInfo_JobConfiguration_TableCopy.Size(m) +} +func (m *JobConfiguration_TableCopy) XXX_DiscardUnknown() { + xxx_messageInfo_JobConfiguration_TableCopy.DiscardUnknown(m) +} + +var xxx_messageInfo_JobConfiguration_TableCopy proto.InternalMessageInfo + +func (m *JobConfiguration_TableCopy) GetSourceTables() []*TableName { + if m != nil { + return m.SourceTables + } + return nil +} + +func (m *JobConfiguration_TableCopy) GetDestinationTable() *TableName { + if m != nil { + return m.DestinationTable + } + return nil +} + +func (m *JobConfiguration_TableCopy) GetCreateDisposition() string { + if m != nil { + return m.CreateDisposition + } + return "" +} + +func (m *JobConfiguration_TableCopy) GetWriteDisposition() string { + if m != nil { + return m.WriteDisposition + } + return "" +} + +// Describes an external data source used in a query. +type TableDefinition struct { + // Name of the table, used in queries. + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // Google Cloud Storage URIs for the data to be imported. + SourceUris []string `protobuf:"bytes,2,rep,name=source_uris,json=sourceUris,proto3" json:"source_uris,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *TableDefinition) Reset() { *m = TableDefinition{} } +func (m *TableDefinition) String() string { return proto.CompactTextString(m) } +func (*TableDefinition) ProtoMessage() {} +func (*TableDefinition) Descriptor() ([]byte, []int) { + return fileDescriptor_audit_data_12e22cd9ac1143c8, []int{27} +} +func (m *TableDefinition) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_TableDefinition.Unmarshal(m, b) +} +func (m *TableDefinition) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_TableDefinition.Marshal(b, m, deterministic) +} +func (dst *TableDefinition) XXX_Merge(src proto.Message) { + xxx_messageInfo_TableDefinition.Merge(dst, src) +} +func (m *TableDefinition) XXX_Size() int { + return xxx_messageInfo_TableDefinition.Size(m) +} +func (m *TableDefinition) XXX_DiscardUnknown() { + xxx_messageInfo_TableDefinition.DiscardUnknown(m) +} + +var xxx_messageInfo_TableDefinition proto.InternalMessageInfo + +func (m *TableDefinition) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *TableDefinition) GetSourceUris() []string { + if m != nil { + return m.SourceUris + } + return nil +} + +// Running state of a job. +type JobStatus struct { + // State of a job: `PENDING`, `RUNNING`, or `DONE`. + State string `protobuf:"bytes,1,opt,name=state,proto3" json:"state,omitempty"` + // If the job did not complete successfully, this field describes why. + Error *status.Status `protobuf:"bytes,2,opt,name=error,proto3" json:"error,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *JobStatus) Reset() { *m = JobStatus{} } +func (m *JobStatus) String() string { return proto.CompactTextString(m) } +func (*JobStatus) ProtoMessage() {} +func (*JobStatus) Descriptor() ([]byte, []int) { + return fileDescriptor_audit_data_12e22cd9ac1143c8, []int{28} +} +func (m *JobStatus) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_JobStatus.Unmarshal(m, b) +} +func (m *JobStatus) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_JobStatus.Marshal(b, m, deterministic) +} +func (dst *JobStatus) XXX_Merge(src proto.Message) { + xxx_messageInfo_JobStatus.Merge(dst, src) +} +func (m *JobStatus) XXX_Size() int { + return xxx_messageInfo_JobStatus.Size(m) +} +func (m *JobStatus) XXX_DiscardUnknown() { + xxx_messageInfo_JobStatus.DiscardUnknown(m) +} + +var xxx_messageInfo_JobStatus proto.InternalMessageInfo + +func (m *JobStatus) GetState() string { + if m != nil { + return m.State + } + return "" +} + +func (m *JobStatus) GetError() *status.Status { + if m != nil { + return m.Error + } + return nil +} + +// Job statistics that may change after a job starts. +type JobStatistics struct { + // Time when the job was created. + CreateTime *timestamp.Timestamp `protobuf:"bytes,1,opt,name=create_time,json=createTime,proto3" json:"create_time,omitempty"` + // Time when the job started. + StartTime *timestamp.Timestamp `protobuf:"bytes,2,opt,name=start_time,json=startTime,proto3" json:"start_time,omitempty"` + // Time when the job ended. + EndTime *timestamp.Timestamp `protobuf:"bytes,3,opt,name=end_time,json=endTime,proto3" json:"end_time,omitempty"` + // Total bytes processed for a job. + TotalProcessedBytes int64 `protobuf:"varint,4,opt,name=total_processed_bytes,json=totalProcessedBytes,proto3" json:"total_processed_bytes,omitempty"` + // Processed bytes, adjusted by the job's CPU usage. + TotalBilledBytes int64 `protobuf:"varint,5,opt,name=total_billed_bytes,json=totalBilledBytes,proto3" json:"total_billed_bytes,omitempty"` + // The tier assigned by CPU-based billing. + BillingTier int32 `protobuf:"varint,7,opt,name=billing_tier,json=billingTier,proto3" json:"billing_tier,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *JobStatistics) Reset() { *m = JobStatistics{} } +func (m *JobStatistics) String() string { return proto.CompactTextString(m) } +func (*JobStatistics) ProtoMessage() {} +func (*JobStatistics) Descriptor() ([]byte, []int) { + return fileDescriptor_audit_data_12e22cd9ac1143c8, []int{29} +} +func (m *JobStatistics) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_JobStatistics.Unmarshal(m, b) +} +func (m *JobStatistics) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_JobStatistics.Marshal(b, m, deterministic) +} +func (dst *JobStatistics) XXX_Merge(src proto.Message) { + xxx_messageInfo_JobStatistics.Merge(dst, src) +} +func (m *JobStatistics) XXX_Size() int { + return xxx_messageInfo_JobStatistics.Size(m) +} +func (m *JobStatistics) XXX_DiscardUnknown() { + xxx_messageInfo_JobStatistics.DiscardUnknown(m) +} + +var xxx_messageInfo_JobStatistics proto.InternalMessageInfo + +func (m *JobStatistics) GetCreateTime() *timestamp.Timestamp { + if m != nil { + return m.CreateTime + } + return nil +} + +func (m *JobStatistics) GetStartTime() *timestamp.Timestamp { + if m != nil { + return m.StartTime + } + return nil +} + +func (m *JobStatistics) GetEndTime() *timestamp.Timestamp { + if m != nil { + return m.EndTime + } + return nil +} + +func (m *JobStatistics) GetTotalProcessedBytes() int64 { + if m != nil { + return m.TotalProcessedBytes + } + return 0 +} + +func (m *JobStatistics) GetTotalBilledBytes() int64 { + if m != nil { + return m.TotalBilledBytes + } + return 0 +} + +func (m *JobStatistics) GetBillingTier() int32 { + if m != nil { + return m.BillingTier + } + return 0 +} + +// The fully-qualified name for a dataset. +type DatasetName struct { + // The project ID. + ProjectId string `protobuf:"bytes,1,opt,name=project_id,json=projectId,proto3" json:"project_id,omitempty"` + // The dataset ID within the project. + DatasetId string `protobuf:"bytes,2,opt,name=dataset_id,json=datasetId,proto3" json:"dataset_id,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *DatasetName) Reset() { *m = DatasetName{} } +func (m *DatasetName) String() string { return proto.CompactTextString(m) } +func (*DatasetName) ProtoMessage() {} +func (*DatasetName) Descriptor() ([]byte, []int) { + return fileDescriptor_audit_data_12e22cd9ac1143c8, []int{30} +} +func (m *DatasetName) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_DatasetName.Unmarshal(m, b) +} +func (m *DatasetName) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_DatasetName.Marshal(b, m, deterministic) +} +func (dst *DatasetName) XXX_Merge(src proto.Message) { + xxx_messageInfo_DatasetName.Merge(dst, src) +} +func (m *DatasetName) XXX_Size() int { + return xxx_messageInfo_DatasetName.Size(m) +} +func (m *DatasetName) XXX_DiscardUnknown() { + xxx_messageInfo_DatasetName.DiscardUnknown(m) +} + +var xxx_messageInfo_DatasetName proto.InternalMessageInfo + +func (m *DatasetName) GetProjectId() string { + if m != nil { + return m.ProjectId + } + return "" +} + +func (m *DatasetName) GetDatasetId() string { + if m != nil { + return m.DatasetId + } + return "" +} + +// The fully-qualified name for a table. +type TableName struct { + // The project ID. + ProjectId string `protobuf:"bytes,1,opt,name=project_id,json=projectId,proto3" json:"project_id,omitempty"` + // The dataset ID within the project. + DatasetId string `protobuf:"bytes,2,opt,name=dataset_id,json=datasetId,proto3" json:"dataset_id,omitempty"` + // The table ID of the table within the dataset. + TableId string `protobuf:"bytes,3,opt,name=table_id,json=tableId,proto3" json:"table_id,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *TableName) Reset() { *m = TableName{} } +func (m *TableName) String() string { return proto.CompactTextString(m) } +func (*TableName) ProtoMessage() {} +func (*TableName) Descriptor() ([]byte, []int) { + return fileDescriptor_audit_data_12e22cd9ac1143c8, []int{31} +} +func (m *TableName) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_TableName.Unmarshal(m, b) +} +func (m *TableName) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_TableName.Marshal(b, m, deterministic) +} +func (dst *TableName) XXX_Merge(src proto.Message) { + xxx_messageInfo_TableName.Merge(dst, src) +} +func (m *TableName) XXX_Size() int { + return xxx_messageInfo_TableName.Size(m) +} +func (m *TableName) XXX_DiscardUnknown() { + xxx_messageInfo_TableName.DiscardUnknown(m) +} + +var xxx_messageInfo_TableName proto.InternalMessageInfo + +func (m *TableName) GetProjectId() string { + if m != nil { + return m.ProjectId + } + return "" +} + +func (m *TableName) GetDatasetId() string { + if m != nil { + return m.DatasetId + } + return "" +} + +func (m *TableName) GetTableId() string { + if m != nil { + return m.TableId + } + return "" +} + +// The fully-qualified name for a job. +type JobName struct { + // The project ID. + ProjectId string `protobuf:"bytes,1,opt,name=project_id,json=projectId,proto3" json:"project_id,omitempty"` + // The job ID within the project. + JobId string `protobuf:"bytes,2,opt,name=job_id,json=jobId,proto3" json:"job_id,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *JobName) Reset() { *m = JobName{} } +func (m *JobName) String() string { return proto.CompactTextString(m) } +func (*JobName) ProtoMessage() {} +func (*JobName) Descriptor() ([]byte, []int) { + return fileDescriptor_audit_data_12e22cd9ac1143c8, []int{32} +} +func (m *JobName) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_JobName.Unmarshal(m, b) +} +func (m *JobName) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_JobName.Marshal(b, m, deterministic) +} +func (dst *JobName) XXX_Merge(src proto.Message) { + xxx_messageInfo_JobName.Merge(dst, src) +} +func (m *JobName) XXX_Size() int { + return xxx_messageInfo_JobName.Size(m) +} +func (m *JobName) XXX_DiscardUnknown() { + xxx_messageInfo_JobName.DiscardUnknown(m) +} + +var xxx_messageInfo_JobName proto.InternalMessageInfo + +func (m *JobName) GetProjectId() string { + if m != nil { + return m.ProjectId + } + return "" +} + +func (m *JobName) GetJobId() string { + if m != nil { + return m.JobId + } + return "" +} + +func init() { + proto.RegisterType((*AuditData)(nil), "google.cloud.bigquery.logging.v1.AuditData") + proto.RegisterType((*TableInsertRequest)(nil), "google.cloud.bigquery.logging.v1.TableInsertRequest") + proto.RegisterType((*TableUpdateRequest)(nil), "google.cloud.bigquery.logging.v1.TableUpdateRequest") + proto.RegisterType((*TableInsertResponse)(nil), "google.cloud.bigquery.logging.v1.TableInsertResponse") + proto.RegisterType((*TableUpdateResponse)(nil), "google.cloud.bigquery.logging.v1.TableUpdateResponse") + proto.RegisterType((*DatasetListRequest)(nil), "google.cloud.bigquery.logging.v1.DatasetListRequest") + proto.RegisterType((*DatasetInsertRequest)(nil), "google.cloud.bigquery.logging.v1.DatasetInsertRequest") + proto.RegisterType((*DatasetInsertResponse)(nil), "google.cloud.bigquery.logging.v1.DatasetInsertResponse") + proto.RegisterType((*DatasetUpdateRequest)(nil), "google.cloud.bigquery.logging.v1.DatasetUpdateRequest") + proto.RegisterType((*DatasetUpdateResponse)(nil), "google.cloud.bigquery.logging.v1.DatasetUpdateResponse") + proto.RegisterType((*JobInsertRequest)(nil), "google.cloud.bigquery.logging.v1.JobInsertRequest") + proto.RegisterType((*JobInsertResponse)(nil), "google.cloud.bigquery.logging.v1.JobInsertResponse") + proto.RegisterType((*JobQueryRequest)(nil), "google.cloud.bigquery.logging.v1.JobQueryRequest") + proto.RegisterType((*JobQueryResponse)(nil), "google.cloud.bigquery.logging.v1.JobQueryResponse") + proto.RegisterType((*JobGetQueryResultsRequest)(nil), "google.cloud.bigquery.logging.v1.JobGetQueryResultsRequest") + proto.RegisterType((*JobGetQueryResultsResponse)(nil), "google.cloud.bigquery.logging.v1.JobGetQueryResultsResponse") + proto.RegisterType((*JobQueryDoneResponse)(nil), "google.cloud.bigquery.logging.v1.JobQueryDoneResponse") + proto.RegisterType((*JobCompletedEvent)(nil), "google.cloud.bigquery.logging.v1.JobCompletedEvent") + proto.RegisterType((*TableDataListRequest)(nil), "google.cloud.bigquery.logging.v1.TableDataListRequest") + proto.RegisterType((*Table)(nil), "google.cloud.bigquery.logging.v1.Table") + proto.RegisterType((*TableInfo)(nil), "google.cloud.bigquery.logging.v1.TableInfo") + proto.RegisterType((*TableViewDefinition)(nil), "google.cloud.bigquery.logging.v1.TableViewDefinition") + proto.RegisterType((*Dataset)(nil), "google.cloud.bigquery.logging.v1.Dataset") + proto.RegisterType((*DatasetInfo)(nil), "google.cloud.bigquery.logging.v1.DatasetInfo") + proto.RegisterType((*BigQueryAcl)(nil), "google.cloud.bigquery.logging.v1.BigQueryAcl") + proto.RegisterType((*BigQueryAcl_Entry)(nil), "google.cloud.bigquery.logging.v1.BigQueryAcl.Entry") + proto.RegisterType((*Job)(nil), "google.cloud.bigquery.logging.v1.Job") + proto.RegisterType((*JobConfiguration)(nil), "google.cloud.bigquery.logging.v1.JobConfiguration") + proto.RegisterType((*JobConfiguration_Query)(nil), "google.cloud.bigquery.logging.v1.JobConfiguration.Query") + proto.RegisterType((*JobConfiguration_Load)(nil), "google.cloud.bigquery.logging.v1.JobConfiguration.Load") + proto.RegisterType((*JobConfiguration_Extract)(nil), "google.cloud.bigquery.logging.v1.JobConfiguration.Extract") + proto.RegisterType((*JobConfiguration_TableCopy)(nil), "google.cloud.bigquery.logging.v1.JobConfiguration.TableCopy") + proto.RegisterType((*TableDefinition)(nil), "google.cloud.bigquery.logging.v1.TableDefinition") + proto.RegisterType((*JobStatus)(nil), "google.cloud.bigquery.logging.v1.JobStatus") + proto.RegisterType((*JobStatistics)(nil), "google.cloud.bigquery.logging.v1.JobStatistics") + proto.RegisterType((*DatasetName)(nil), "google.cloud.bigquery.logging.v1.DatasetName") + proto.RegisterType((*TableName)(nil), "google.cloud.bigquery.logging.v1.TableName") + proto.RegisterType((*JobName)(nil), "google.cloud.bigquery.logging.v1.JobName") +} + +func init() { + proto.RegisterFile("google/cloud/bigquery/logging/v1/audit_data.proto", fileDescriptor_audit_data_12e22cd9ac1143c8) +} + +var fileDescriptor_audit_data_12e22cd9ac1143c8 = []byte{ + // 2036 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xd4, 0x59, 0x4f, 0x73, 0x1b, 0x49, + 0x15, 0x8f, 0x2c, 0xc9, 0x92, 0x9e, 0xec, 0xd8, 0xee, 0xd8, 0x9b, 0x58, 0x90, 0xdd, 0x30, 0x40, + 0xb1, 0x29, 0x13, 0xa9, 0x9c, 0x65, 0x09, 0x90, 0xad, 0x4a, 0xd9, 0xb1, 0x89, 0x93, 0xfd, 0x83, + 0x19, 0x1c, 0x17, 0xbb, 0xc5, 0xae, 0x6a, 0x34, 0xd3, 0xd6, 0xb6, 0x76, 0x34, 0x3d, 0x99, 0x69, + 0xc5, 0x31, 0x17, 0x0a, 0x8a, 0x1b, 0x47, 0x3e, 0x0c, 0x07, 0x2e, 0x50, 0x7c, 0x01, 0x8e, 0x5c, + 0xb8, 0xf0, 0x41, 0x28, 0xaa, 0x5f, 0x77, 0x8f, 0x66, 0x46, 0x72, 0x79, 0xc6, 0x68, 0x0f, 0x7b, + 0x9b, 0x79, 0xdd, 0xbf, 0xf7, 0xeb, 0x7e, 0xfd, 0xfe, 0xf5, 0x0c, 0xec, 0x0e, 0x39, 0x1f, 0xfa, + 0xb4, 0xe7, 0xfa, 0x7c, 0xe2, 0xf5, 0x06, 0x6c, 0xf8, 0x6a, 0x42, 0xa3, 0x8b, 0x9e, 0xcf, 0x87, + 0x43, 0x16, 0x0c, 0x7b, 0xaf, 0x77, 0x7b, 0xce, 0xc4, 0x63, 0xa2, 0xef, 0x39, 0xc2, 0xe9, 0x86, + 0x11, 0x17, 0x9c, 0xdc, 0x53, 0x90, 0x2e, 0x42, 0xba, 0x06, 0xd2, 0xd5, 0x90, 0xee, 0xeb, 0xdd, + 0xce, 0xb7, 0xb5, 0x52, 0x27, 0x64, 0x3d, 0x27, 0x08, 0xb8, 0x70, 0x04, 0xe3, 0x41, 0xac, 0xf0, + 0x9d, 0xb7, 0xf5, 0x28, 0xbe, 0x0d, 0x26, 0x67, 0x3d, 0x6f, 0x12, 0xe1, 0x04, 0x3d, 0xfe, 0x4e, + 0x7e, 0x5c, 0xb0, 0x31, 0x8d, 0x85, 0x33, 0x0e, 0xf5, 0x84, 0xdb, 0x7a, 0x42, 0x14, 0xba, 0xbd, + 0x58, 0x38, 0x62, 0xa2, 0x35, 0x5b, 0xff, 0x5e, 0x83, 0xd6, 0x9e, 0x5c, 0xee, 0x81, 0x23, 0x1c, + 0xf2, 0x25, 0x6c, 0x0a, 0x67, 0xe0, 0xd3, 0x3e, 0x0b, 0x62, 0x1a, 0x89, 0x7e, 0x44, 0x5f, 0x4d, + 0x68, 0x2c, 0xee, 0x54, 0xee, 0x55, 0xde, 0x6d, 0x3f, 0xfc, 0x51, 0xf7, 0xaa, 0x6d, 0x74, 0x4f, + 0x24, 0xfa, 0x39, 0x82, 0x6d, 0x85, 0x3d, 0xba, 0x61, 0x13, 0x31, 0x23, 0x9d, 0x32, 0x4d, 0x42, + 0xcf, 0x11, 0x34, 0x61, 0x5a, 0x2f, 0xc5, 0xf4, 0x12, 0xc1, 0x79, 0xa6, 0x8c, 0x54, 0x32, 0xc9, + 0x93, 0x88, 0xa9, 0xe8, 0xfb, 0x2c, 0x9e, 0xee, 0x69, 0xa9, 0x28, 0xd3, 0x81, 0x42, 0x7f, 0xc4, + 0xe2, 0xf4, 0x9e, 0xbc, 0x19, 0x29, 0x09, 0xe0, 0x2d, 0xc3, 0x94, 0xb3, 0x5f, 0x15, 0xb9, 0x7e, + 0x5c, 0x98, 0x2b, 0x6f, 0x41, 0xb3, 0x83, 0xac, 0x0d, 0x53, 0x7c, 0x39, 0x2b, 0xd6, 0x4a, 0xf2, + 0xe5, 0xed, 0x68, 0xf8, 0xb2, 0x96, 0x1c, 0x00, 0x19, 0xf1, 0x41, 0x7e, 0x6f, 0x75, 0xe4, 0x7a, + 0x78, 0x35, 0xd7, 0x0b, 0x3e, 0xc8, 0xef, 0x6b, 0x7d, 0x94, 0x93, 0x91, 0x3e, 0x6c, 0x48, 0x0e, + 0x04, 0x27, 0x14, 0xcb, 0x48, 0xb1, 0x5b, 0x88, 0xe2, 0x97, 0x52, 0x36, 0x65, 0x58, 0x1b, 0x65, + 0x45, 0xe4, 0x77, 0x70, 0x57, 0x12, 0x0c, 0xa9, 0x48, 0x48, 0xe2, 0x89, 0x2f, 0xe2, 0x84, 0xac, + 0x81, 0x64, 0x8f, 0x0b, 0x91, 0x3d, 0xa3, 0x42, 0x2b, 0x47, 0x1d, 0x53, 0xda, 0xed, 0xd1, 0x65, + 0x83, 0x84, 0xc3, 0x6d, 0xe5, 0xf9, 0xd2, 0xc6, 0x59, 0x97, 0x6c, 0x16, 0x3d, 0x36, 0x74, 0x7e, + 0x79, 0x76, 0x59, 0xa7, 0x54, 0x21, 0x95, 0x93, 0x93, 0xaf, 0x60, 0x2b, 0x17, 0xd4, 0x71, 0xc8, + 0x83, 0x98, 0xde, 0x69, 0x21, 0xdd, 0xfb, 0x25, 0xa3, 0x5a, 0x81, 0x8f, 0x2a, 0xf6, 0x2d, 0x31, + 0x2b, 0x9e, 0x92, 0x25, 0x1e, 0xa9, 0xc9, 0xa0, 0x14, 0x99, 0x71, 0xbc, 0x1c, 0x59, 0x56, 0x4c, + 0x5e, 0xc1, 0xed, 0x99, 0x80, 0xd3, 0x74, 0x6d, 0xa4, 0x7b, 0x54, 0x3a, 0xe2, 0x12, 0xc2, 0x2d, + 0x6f, 0xde, 0x40, 0x9a, 0x32, 0xbf, 0xc3, 0x95, 0x92, 0x94, 0x33, 0x7b, 0xdc, 0xf2, 0xe6, 0x0d, + 0x10, 0x0a, 0xb7, 0x32, 0x61, 0xa7, 0xe9, 0x08, 0xd2, 0xbd, 0x57, 0x2a, 0xee, 0x12, 0xaa, 0x8d, + 0x51, 0x5e, 0x68, 0xa2, 0x3b, 0x09, 0x0a, 0xc5, 0xb2, 0x5a, 0x22, 0xba, 0x8d, 0xb7, 0x1b, 0x92, + 0xf5, 0x51, 0x4e, 0x46, 0xfe, 0x50, 0x81, 0xb7, 0x2f, 0x8b, 0x3e, 0x4d, 0x78, 0x13, 0x09, 0x3f, + 0xb8, 0x5e, 0xf8, 0x25, 0xd4, 0x9d, 0xd1, 0xa5, 0xa3, 0x32, 0x00, 0xa7, 0x1b, 0xf5, 0x78, 0x90, + 0x3a, 0xc2, 0xb5, 0xa2, 0x01, 0x68, 0x76, 0x7b, 0xc0, 0x83, 0xf4, 0x09, 0x6e, 0x8e, 0xe6, 0xc8, + 0x89, 0xab, 0x0e, 0xd0, 0xe5, 0xe3, 0xd0, 0xa7, 0x82, 0x7a, 0x7d, 0xfa, 0x9a, 0x06, 0xe2, 0xce, + 0x46, 0x89, 0x03, 0x7c, 0x6a, 0xb0, 0x87, 0x12, 0x8a, 0xc7, 0x97, 0x15, 0xed, 0xb7, 0xa0, 0xa1, + 0xd3, 0xc8, 0x3e, 0x40, 0xd3, 0xec, 0xc8, 0xfa, 0x14, 0xc8, 0x6c, 0x4d, 0x26, 0x4f, 0x71, 0x06, + 0x9f, 0x44, 0x2e, 0xd5, 0xb5, 0xfd, 0x07, 0x05, 0x03, 0xd3, 0x4e, 0x80, 0x89, 0xea, 0x6c, 0x91, + 0x58, 0x88, 0xea, 0xcf, 0xe0, 0xd6, 0x9c, 0x9c, 0xb3, 0x58, 0xdd, 0xb9, 0x28, 0x5b, 0x88, 0xee, + 0x1e, 0x90, 0xd9, 0x6e, 0x81, 0x6c, 0x43, 0x13, 0xd3, 0xbc, 0xe3, 0xfb, 0xa8, 0xba, 0x69, 0x37, + 0xe4, 0xfb, 0x9e, 0xef, 0x5b, 0x9f, 0xc3, 0xe6, 0xbc, 0x92, 0x4f, 0x0e, 0x67, 0x56, 0x73, 0xbf, + 0x70, 0x5e, 0x49, 0xad, 0xe7, 0x0b, 0xd8, 0x9a, 0x9b, 0xdf, 0x16, 0xa5, 0x7f, 0xba, 0xfc, 0xac, + 0x13, 0x2c, 0x7c, 0xf9, 0xb9, 0xc3, 0x5a, 0x90, 0xfe, 0x97, 0xb0, 0x9e, 0x6f, 0x4a, 0xc8, 0xde, + 0x8c, 0xea, 0xef, 0x17, 0x8a, 0xd0, 0x94, 0xda, 0x53, 0xd8, 0x98, 0xc9, 0xb9, 0x8b, 0xd0, 0xfb, + 0xaf, 0x0a, 0xac, 0xe5, 0x3a, 0x1c, 0xb2, 0x09, 0x75, 0x44, 0xa1, 0xce, 0x96, 0xad, 0x5e, 0xc8, + 0x3b, 0xd0, 0x1e, 0x3b, 0x6f, 0x4c, 0x72, 0xc5, 0x56, 0x77, 0xd5, 0x86, 0xb1, 0xf3, 0x46, 0xe7, + 0x42, 0x72, 0x0a, 0x6b, 0x1e, 0x3d, 0x73, 0x26, 0xbe, 0xba, 0xa6, 0xc4, 0xd4, 0xf4, 0xa8, 0x0f, + 0x0a, 0xdb, 0xf1, 0x13, 0x67, 0x4c, 0xed, 0x9b, 0x5a, 0x8b, 0x96, 0x91, 0xbb, 0x00, 0x61, 0xc4, + 0x47, 0xd4, 0x15, 0x7d, 0xe6, 0x61, 0x1b, 0xda, 0xb2, 0x5b, 0x5a, 0xf2, 0xdc, 0x23, 0xb7, 0xa1, + 0xe1, 0xc9, 0xa4, 0x3f, 0x09, 0xb0, 0x6d, 0x6c, 0xda, 0xcb, 0x5e, 0x74, 0x61, 0x4f, 0x02, 0x2b, + 0xc4, 0x93, 0xc8, 0x16, 0x8b, 0xef, 0xc2, 0xaa, 0xe0, 0xc2, 0xf1, 0x93, 0x6d, 0xc8, 0x2d, 0xd6, + 0xec, 0x15, 0x14, 0x9a, 0x8d, 0x3c, 0x82, 0xea, 0x88, 0x0f, 0x74, 0x33, 0x5f, 0xd0, 0xa2, 0x12, + 0x61, 0x7d, 0x0a, 0xdb, 0x97, 0x36, 0x70, 0x79, 0xfb, 0x55, 0x66, 0xec, 0xf7, 0x2d, 0x68, 0xc5, + 0xc2, 0x91, 0xe5, 0x98, 0x9f, 0x23, 0x79, 0xcd, 0x6e, 0xa2, 0xc0, 0xe6, 0xe7, 0xd6, 0x6f, 0xa1, + 0x73, 0x79, 0x71, 0xfa, 0x9a, 0xb7, 0xf5, 0x0b, 0xd8, 0x9c, 0x57, 0x9b, 0x8c, 0xc2, 0x4a, 0x69, + 0x85, 0x5f, 0xa1, 0x33, 0x67, 0x8b, 0x8d, 0x3c, 0x66, 0xac, 0x61, 0xfd, 0xc0, 0x19, 0x53, 0xed, + 0x7a, 0x2d, 0x94, 0x48, 0xaf, 0xb8, 0xfe, 0xea, 0x4f, 0x60, 0x73, 0x5e, 0x6b, 0x9b, 0x35, 0x77, + 0x25, 0x6b, 0xee, 0x2b, 0x9d, 0xdd, 0xfa, 0x6b, 0x15, 0xea, 0xa8, 0x96, 0xbc, 0x00, 0x50, 0xdd, + 0x69, 0xb2, 0xee, 0xf6, 0xc3, 0x9d, 0x82, 0x69, 0x1e, 0xfd, 0xbd, 0x25, 0xcc, 0x23, 0x79, 0x02, + 0x35, 0x16, 0x9c, 0x71, 0xbd, 0xcb, 0x9d, 0xc2, 0x5d, 0xf4, 0x19, 0xb7, 0x11, 0x28, 0xd7, 0x1d, + 0xbb, 0x5f, 0xd2, 0xb1, 0xd3, 0x1f, 0xc5, 0x3c, 0xc0, 0xe6, 0xbf, 0x65, 0x83, 0x12, 0xbd, 0x88, + 0x79, 0x40, 0x9e, 0x43, 0xed, 0x35, 0xa3, 0xe7, 0xfa, 0x36, 0x57, 0xb4, 0x75, 0x3e, 0x65, 0xf4, + 0xfc, 0x80, 0x9e, 0xb1, 0x80, 0x09, 0xc6, 0x03, 0x1b, 0x55, 0x90, 0xc7, 0xd0, 0xa6, 0x6f, 0x42, + 0x16, 0xd1, 0xbe, 0x60, 0x63, 0xaa, 0xef, 0x6c, 0x1d, 0xa3, 0xd1, 0x7c, 0x36, 0xe8, 0x9e, 0x98, + 0xcf, 0x06, 0x36, 0xa8, 0xe9, 0x52, 0x20, 0xc1, 0x6e, 0x44, 0x65, 0xaf, 0x8b, 0xe0, 0xe5, 0xab, + 0xc1, 0x6a, 0x3a, 0x82, 0x9f, 0xc0, 0xaa, 0x88, 0x26, 0x81, 0x9b, 0xc0, 0x1b, 0x57, 0xc2, 0x57, + 0x0c, 0x40, 0x8a, 0x2c, 0x1b, 0x5a, 0x89, 0xe5, 0x64, 0xf0, 0x9c, 0x45, 0x8c, 0x06, 0x9e, 0x7f, + 0x91, 0xf6, 0xbd, 0x15, 0x23, 0xc4, 0x93, 0xb9, 0x07, 0x6d, 0x8f, 0xc6, 0x6e, 0xc4, 0x42, 0x69, + 0x01, 0x3c, 0xa0, 0x96, 0x9d, 0x16, 0x59, 0x3b, 0xba, 0x07, 0xc8, 0xda, 0x6a, 0x7e, 0x32, 0xb5, + 0xfe, 0x52, 0x85, 0x86, 0xc9, 0x6f, 0xc7, 0xb0, 0x62, 0xda, 0xff, 0x94, 0x0b, 0x95, 0x4c, 0x9a, + 0x6d, 0x6f, 0xfa, 0x42, 0xf6, 0x32, 0x6e, 0xf4, 0xa0, 0xc4, 0x85, 0x25, 0x71, 0xa4, 0xdc, 0xf9, + 0xd4, 0x4a, 0x9d, 0xcf, 0x63, 0x68, 0xeb, 0x8b, 0x4c, 0x51, 0xcf, 0x50, 0xd3, 0xf5, 0xe1, 0x56, + 0x1d, 0xd7, 0xd7, 0x1e, 0x51, 0x60, 0xed, 0xfb, 0x6c, 0x88, 0xa9, 0x69, 0xcf, 0xf5, 0x6d, 0x89, + 0x24, 0xbf, 0x81, 0xbb, 0xa6, 0x0e, 0xa9, 0xc0, 0xd4, 0x5e, 0x6a, 0xbe, 0x6f, 0xe9, 0x2b, 0xf1, + 0xf6, 0xcc, 0x7a, 0x0e, 0xf4, 0x04, 0xbb, 0xa3, 0xf1, 0x78, 0x9e, 0x87, 0x88, 0x36, 0x63, 0xd6, + 0x09, 0xb4, 0x53, 0xd6, 0x5a, 0x94, 0xf3, 0xfc, 0x7d, 0x09, 0xda, 0xa9, 0x8d, 0x90, 0x8f, 0xa1, + 0x41, 0x03, 0x11, 0x31, 0x2a, 0x53, 0x79, 0xb5, 0x58, 0x4b, 0x9f, 0xc2, 0x77, 0x0f, 0x03, 0x11, + 0x5d, 0xd8, 0x46, 0x47, 0xe7, 0x3f, 0x15, 0xa8, 0xa3, 0x88, 0x10, 0xa8, 0x45, 0xdc, 0x37, 0xcb, + 0xc4, 0x67, 0x99, 0x34, 0x86, 0x11, 0x9f, 0x84, 0x7d, 0x3a, 0x76, 0x98, 0xaf, 0x97, 0x07, 0x28, + 0x3a, 0x94, 0x12, 0x99, 0x9a, 0x27, 0x31, 0x8d, 0xf4, 0x78, 0x55, 0xa5, 0x66, 0x29, 0x51, 0xc3, + 0x6f, 0xc1, 0xb2, 0xc7, 0xc7, 0x0e, 0x0b, 0x74, 0x71, 0xd6, 0x6f, 0xd2, 0x36, 0x71, 0x48, 0x5d, + 0xe6, 0xf8, 0x7d, 0x54, 0x86, 0x8e, 0xd0, 0xb2, 0x57, 0xb4, 0xf0, 0x99, 0x94, 0x91, 0x23, 0x68, + 0xc9, 0x6c, 0xa2, 0x8c, 0xb7, 0x5c, 0x3e, 0x7b, 0x36, 0x25, 0x5a, 0x3e, 0x59, 0xff, 0x5c, 0x82, + 0xea, 0x0b, 0x3e, 0x20, 0x07, 0xd0, 0x94, 0x57, 0xa3, 0x54, 0x2c, 0xdd, 0x2f, 0x54, 0x2e, 0x50, + 0x5d, 0x63, 0xa4, 0x1e, 0xcc, 0x47, 0x23, 0x97, 0x07, 0x67, 0x6c, 0x68, 0x3c, 0x67, 0xa9, 0xc4, + 0xcd, 0xf5, 0x69, 0x1a, 0x89, 0xf7, 0xd6, 0x8c, 0x44, 0xd6, 0x0d, 0x49, 0xa0, 0xbe, 0x9c, 0xea, + 0x4e, 0x69, 0xa7, 0x90, 0xe6, 0x5f, 0x21, 0xc4, 0x6e, 0x8d, 0xcc, 0x23, 0x39, 0x85, 0x9b, 0x46, + 0x17, 0x8b, 0x05, 0x73, 0x63, 0x1d, 0xb0, 0xbd, 0xc2, 0xfa, 0x14, 0xcc, 0x5e, 0x1d, 0xa5, 0x5f, + 0xad, 0xbf, 0xb5, 0xb1, 0x87, 0xca, 0x2e, 0xfc, 0xd8, 0x64, 0x34, 0x15, 0xd7, 0x3f, 0x29, 0x6f, + 0x8d, 0x2e, 0xfa, 0xe9, 0xd1, 0x0d, 0xd3, 0x5a, 0x7e, 0x0c, 0x35, 0x9f, 0x3b, 0x9e, 0x3e, 0xfe, + 0x47, 0xd7, 0x50, 0xf8, 0x11, 0x77, 0xbc, 0xa3, 0x1b, 0x36, 0xaa, 0x21, 0xa7, 0xd0, 0xa0, 0x6f, + 0x44, 0xe4, 0xb8, 0xe6, 0xc3, 0xdb, 0xcf, 0xae, 0xa1, 0xf1, 0x50, 0x69, 0x38, 0xba, 0x61, 0x1b, + 0x65, 0xe4, 0x73, 0x53, 0xe9, 0x5d, 0x1e, 0x5e, 0xe8, 0x2c, 0xf2, 0xc1, 0x35, 0x54, 0xa3, 0xf3, + 0x3e, 0xe5, 0xa1, 0xb4, 0x80, 0x2a, 0xfe, 0xf2, 0x25, 0xdd, 0xc8, 0xb6, 0xd2, 0x8d, 0x6c, 0xe7, + 0x4f, 0x55, 0xa8, 0xa3, 0xc5, 0x2e, 0xe9, 0xcc, 0x7f, 0x0d, 0x1b, 0x1e, 0x8d, 0x05, 0x0b, 0x50, + 0xbd, 0x4a, 0x7a, 0x25, 0x5b, 0x08, 0xf4, 0xfd, 0xf5, 0x94, 0x16, 0xd5, 0xdb, 0x3c, 0x00, 0xa2, + 0xab, 0x80, 0xc7, 0xe2, 0x90, 0xc7, 0x58, 0xd2, 0x74, 0x02, 0xd8, 0x50, 0x23, 0x07, 0xd3, 0x01, + 0xb2, 0x03, 0x1b, 0xe7, 0x11, 0xcb, 0xcd, 0x56, 0x39, 0x61, 0x1d, 0x07, 0xd2, 0x93, 0xe7, 0x5c, + 0x17, 0xea, 0x8b, 0xb8, 0x2e, 0x7c, 0x01, 0x1b, 0xfa, 0x5b, 0x68, 0x52, 0x84, 0xe3, 0x3b, 0xcb, + 0x98, 0x44, 0x77, 0x8b, 0x7e, 0x05, 0x9d, 0xb6, 0x3a, 0xeb, 0x22, 0x2b, 0x88, 0x3b, 0xff, 0xad, + 0x40, 0x4d, 0xba, 0x1b, 0xf6, 0x5a, 0x78, 0x89, 0xea, 0x4f, 0x22, 0xa6, 0xf2, 0xb4, 0xec, 0xb5, + 0x50, 0xf4, 0x32, 0x62, 0x71, 0xbe, 0x19, 0x5b, 0x9e, 0x69, 0xc6, 0xe6, 0x1e, 0x5c, 0xf5, 0xeb, + 0x3b, 0xb8, 0x5a, 0xa9, 0x83, 0xab, 0xcf, 0x3f, 0xb8, 0xce, 0x1f, 0x2b, 0xd0, 0xd0, 0xd1, 0x41, + 0xee, 0x43, 0x9a, 0x3b, 0x6d, 0x88, 0xb5, 0x94, 0x1c, 0xad, 0xf1, 0x09, 0xac, 0x68, 0x73, 0x5d, + 0xdb, 0x41, 0xb5, 0xbd, 0x51, 0xd0, 0xf9, 0xf3, 0x92, 0x6e, 0xe2, 0x30, 0x78, 0x8e, 0x61, 0x35, + 0xad, 0xdd, 0x94, 0xcd, 0x52, 0xea, 0x57, 0x52, 0xea, 0xe3, 0x6f, 0x66, 0x54, 0xed, 0xaf, 0xc1, + 0x6a, 0xa6, 0x64, 0x59, 0x3f, 0x87, 0xb5, 0x9c, 0x4f, 0xcb, 0x1e, 0x20, 0xd5, 0xaa, 0xe0, 0x73, + 0xde, 0x99, 0x97, 0xf2, 0xce, 0x6c, 0x7d, 0x08, 0xad, 0xa4, 0xf4, 0xc8, 0x3c, 0x24, 0x6b, 0x8d, + 0x51, 0xa1, 0x5e, 0xc8, 0xbb, 0x50, 0xa7, 0x51, 0xc4, 0x23, 0x6d, 0x25, 0x62, 0xac, 0x14, 0x85, + 0x6e, 0x57, 0xd7, 0x2c, 0x35, 0xc1, 0xfa, 0xc7, 0x12, 0xac, 0x66, 0x0a, 0x4f, 0xbe, 0xdf, 0xac, + 0x94, 0xea, 0x37, 0x7f, 0x0a, 0xa0, 0xae, 0x72, 0x88, 0x5d, 0xba, 0x12, 0xab, 0x2e, 0x7e, 0x08, + 0x7d, 0x1f, 0x9a, 0x34, 0xf0, 0x14, 0xb0, 0x7a, 0x25, 0xb0, 0x41, 0x03, 0x0f, 0x61, 0x0f, 0x61, + 0x4b, 0x5d, 0xb8, 0xc3, 0x88, 0xbb, 0x34, 0x8e, 0xa9, 0xd7, 0x1f, 0x5c, 0x08, 0xaa, 0xea, 0x6e, + 0xd5, 0xbe, 0x85, 0x83, 0xc7, 0x66, 0x6c, 0x5f, 0x0e, 0x91, 0x1f, 0x02, 0x51, 0x98, 0x01, 0xf3, + 0xfd, 0x04, 0x50, 0x47, 0xc0, 0x3a, 0x8e, 0xec, 0xe3, 0x80, 0x9a, 0xfd, 0x1d, 0x58, 0x91, 0xf3, + 0x58, 0x30, 0xec, 0x0b, 0x46, 0x23, 0xac, 0x64, 0x75, 0xbb, 0xad, 0x65, 0x27, 0x8c, 0x46, 0xd6, + 0x87, 0x49, 0x2b, 0x8a, 0x1d, 0x4b, 0xf6, 0x3b, 0x49, 0x25, 0xff, 0x9d, 0xe4, 0x2e, 0x40, 0xf2, + 0x63, 0xc3, 0xd3, 0x4d, 0x5e, 0xcb, 0xfc, 0x90, 0xf0, 0x2c, 0x4f, 0x47, 0xd3, 0xff, 0xaf, 0x8a, + 0x6c, 0x43, 0x53, 0xff, 0x1c, 0xf2, 0xb4, 0x57, 0x37, 0xd4, 0x6f, 0x1d, 0xcf, 0x7a, 0x02, 0x0d, + 0xdd, 0x69, 0x5d, 0xc5, 0xb1, 0x05, 0xcb, 0xf8, 0x87, 0xc2, 0xe8, 0xaf, 0x8f, 0xf8, 0xe0, 0xb9, + 0xb7, 0xff, 0xfb, 0x0a, 0x7c, 0xcf, 0xe5, 0xe3, 0x2b, 0x03, 0x70, 0xff, 0x66, 0xf2, 0x07, 0xfa, + 0x58, 0x9e, 0xe3, 0x71, 0xe5, 0xb3, 0x67, 0x1a, 0x33, 0xe4, 0xbe, 0x13, 0x0c, 0xbb, 0x3c, 0x1a, + 0xf6, 0x86, 0x34, 0xc0, 0x53, 0xee, 0xa9, 0x21, 0x27, 0x64, 0xf1, 0xe5, 0xbf, 0xe0, 0x1f, 0xeb, + 0xc7, 0xc1, 0x32, 0x62, 0xde, 0xfb, 0x5f, 0x00, 0x00, 0x00, 0xff, 0xff, 0x76, 0x60, 0x19, 0x87, + 0xb5, 0x1f, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/cloud/bigquery/storage/v1beta1/avro.pb.go b/vendor/google.golang.org/genproto/googleapis/cloud/bigquery/storage/v1beta1/avro.pb.go new file mode 100644 index 0000000..11b89dd --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/cloud/bigquery/storage/v1beta1/avro.pb.go @@ -0,0 +1,138 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/cloud/bigquery/storage/v1beta1/avro.proto + +package storage // import "google.golang.org/genproto/googleapis/cloud/bigquery/storage/v1beta1" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Avro schema. +type AvroSchema struct { + // Json serialized schema, as described at + // https://avro.apache.org/docs/1.8.1/spec.html + Schema string `protobuf:"bytes,1,opt,name=schema,proto3" json:"schema,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *AvroSchema) Reset() { *m = AvroSchema{} } +func (m *AvroSchema) String() string { return proto.CompactTextString(m) } +func (*AvroSchema) ProtoMessage() {} +func (*AvroSchema) Descriptor() ([]byte, []int) { + return fileDescriptor_avro_4770a0731176dc2d, []int{0} +} +func (m *AvroSchema) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_AvroSchema.Unmarshal(m, b) +} +func (m *AvroSchema) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_AvroSchema.Marshal(b, m, deterministic) +} +func (dst *AvroSchema) XXX_Merge(src proto.Message) { + xxx_messageInfo_AvroSchema.Merge(dst, src) +} +func (m *AvroSchema) XXX_Size() int { + return xxx_messageInfo_AvroSchema.Size(m) +} +func (m *AvroSchema) XXX_DiscardUnknown() { + xxx_messageInfo_AvroSchema.DiscardUnknown(m) +} + +var xxx_messageInfo_AvroSchema proto.InternalMessageInfo + +func (m *AvroSchema) GetSchema() string { + if m != nil { + return m.Schema + } + return "" +} + +// Avro rows. +type AvroRows struct { + // Binary serialized rows in a block. + SerializedBinaryRows []byte `protobuf:"bytes,1,opt,name=serialized_binary_rows,json=serializedBinaryRows,proto3" json:"serialized_binary_rows,omitempty"` + // The count of rows in the returning block. + RowCount int64 `protobuf:"varint,2,opt,name=row_count,json=rowCount,proto3" json:"row_count,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *AvroRows) Reset() { *m = AvroRows{} } +func (m *AvroRows) String() string { return proto.CompactTextString(m) } +func (*AvroRows) ProtoMessage() {} +func (*AvroRows) Descriptor() ([]byte, []int) { + return fileDescriptor_avro_4770a0731176dc2d, []int{1} +} +func (m *AvroRows) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_AvroRows.Unmarshal(m, b) +} +func (m *AvroRows) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_AvroRows.Marshal(b, m, deterministic) +} +func (dst *AvroRows) XXX_Merge(src proto.Message) { + xxx_messageInfo_AvroRows.Merge(dst, src) +} +func (m *AvroRows) XXX_Size() int { + return xxx_messageInfo_AvroRows.Size(m) +} +func (m *AvroRows) XXX_DiscardUnknown() { + xxx_messageInfo_AvroRows.DiscardUnknown(m) +} + +var xxx_messageInfo_AvroRows proto.InternalMessageInfo + +func (m *AvroRows) GetSerializedBinaryRows() []byte { + if m != nil { + return m.SerializedBinaryRows + } + return nil +} + +func (m *AvroRows) GetRowCount() int64 { + if m != nil { + return m.RowCount + } + return 0 +} + +func init() { + proto.RegisterType((*AvroSchema)(nil), "google.cloud.bigquery.storage.v1beta1.AvroSchema") + proto.RegisterType((*AvroRows)(nil), "google.cloud.bigquery.storage.v1beta1.AvroRows") +} + +func init() { + proto.RegisterFile("google/cloud/bigquery/storage/v1beta1/avro.proto", fileDescriptor_avro_4770a0731176dc2d) +} + +var fileDescriptor_avro_4770a0731176dc2d = []byte{ + // 242 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x8c, 0xd0, 0xcf, 0x4b, 0xc3, 0x30, + 0x14, 0x07, 0x70, 0xaa, 0x30, 0xd6, 0xe0, 0xa9, 0xc8, 0x18, 0x78, 0x19, 0x43, 0x61, 0x5e, 0x12, + 0x87, 0xde, 0x3c, 0x59, 0xaf, 0x1e, 0xa4, 0xde, 0x04, 0x29, 0x69, 0x17, 0x9e, 0x81, 0xae, 0xdf, + 0xf9, 0xd2, 0x1f, 0xcc, 0xb3, 0x7f, 0xb8, 0x24, 0x8d, 0x78, 0xd4, 0x5b, 0xbe, 0x7c, 0xf3, 0x81, + 0xf7, 0x9e, 0xb8, 0x21, 0x80, 0x1a, 0xa3, 0xea, 0x06, 0xfd, 0x4e, 0x55, 0x96, 0x3e, 0x7a, 0xc3, + 0x47, 0xe5, 0x3a, 0xb0, 0x26, 0xa3, 0x86, 0x6d, 0x65, 0x3a, 0xbd, 0x55, 0x7a, 0x60, 0xc8, 0x03, + 0xa3, 0x43, 0x76, 0x35, 0x09, 0x19, 0x84, 0xfc, 0x11, 0x32, 0x0a, 0x19, 0xc5, 0xfa, 0x52, 0x88, + 0x87, 0x81, 0xf1, 0x52, 0xbf, 0x9b, 0xbd, 0xce, 0x16, 0x62, 0xe6, 0xc2, 0x6b, 0x99, 0xac, 0x92, + 0x4d, 0x5a, 0xc4, 0xb4, 0x7e, 0x13, 0x73, 0xff, 0xab, 0xc0, 0xe8, 0xb2, 0x3b, 0xb1, 0x70, 0x86, + 0xad, 0x6e, 0xec, 0xa7, 0xd9, 0x95, 0x95, 0x6d, 0x35, 0x1f, 0x4b, 0xc6, 0xe8, 0x82, 0x39, 0x2b, + 0xce, 0x7f, 0xdb, 0x3c, 0x94, 0x41, 0x5d, 0x88, 0x94, 0x31, 0x96, 0x35, 0xfa, 0xb6, 0x5b, 0x9e, + 0xac, 0x92, 0xcd, 0x69, 0x31, 0x67, 0x8c, 0x8f, 0x3e, 0xe7, 0x5f, 0x89, 0xb8, 0xae, 0xb1, 0x97, + 0xff, 0x1a, 0x39, 0x4f, 0xfd, 0x28, 0xcf, 0x7e, 0xc9, 0xd7, 0xa7, 0x28, 0x08, 0x8d, 0x6e, 0x49, + 0x82, 0x49, 0x91, 0x69, 0xc3, 0x01, 0xd4, 0x54, 0xe9, 0x83, 0x75, 0x7f, 0x5c, 0xed, 0x3e, 0xe6, + 0x6a, 0x16, 0xe0, 0xed, 0x77, 0x00, 0x00, 0x00, 0xff, 0xff, 0x16, 0xaf, 0x83, 0xb1, 0x6d, 0x01, + 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/cloud/bigquery/storage/v1beta1/read_options.pb.go b/vendor/google.golang.org/genproto/googleapis/cloud/bigquery/storage/v1beta1/read_options.pb.go new file mode 100644 index 0000000..28c99a2 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/cloud/bigquery/storage/v1beta1/read_options.pb.go @@ -0,0 +1,105 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/cloud/bigquery/storage/v1beta1/read_options.proto + +package storage // import "google.golang.org/genproto/googleapis/cloud/bigquery/storage/v1beta1" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Options dictating how we read a table. +type TableReadOptions struct { + // Optional. Names of the fields in the table that should be read. If empty, + // all fields will be read. If the specified field is a nested field, all the + // sub-fields in the field will be selected. The output field order is + // unrelated to the order of fields in selected_fields. + SelectedFields []string `protobuf:"bytes,1,rep,name=selected_fields,json=selectedFields,proto3" json:"selected_fields,omitempty"` + // Optional. SQL text filtering statement, similar to a WHERE clause in + // a query. Currently, only a single predicate that is a comparison between + // a column and a constant value is supported. Aggregates are not supported. + // + // Examples: "int_field > 5" + // "date_field = CAST('2014-9-27' as DATE)" + // "nullable_field is not NULL" + // "st_equals(geo_field, st_geofromtext("POINT(2, 2)"))" + // "numeric_field BETWEEN 1.0 AND 5.0" + RowRestriction string `protobuf:"bytes,2,opt,name=row_restriction,json=rowRestriction,proto3" json:"row_restriction,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *TableReadOptions) Reset() { *m = TableReadOptions{} } +func (m *TableReadOptions) String() string { return proto.CompactTextString(m) } +func (*TableReadOptions) ProtoMessage() {} +func (*TableReadOptions) Descriptor() ([]byte, []int) { + return fileDescriptor_read_options_931be2a30741eb81, []int{0} +} +func (m *TableReadOptions) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_TableReadOptions.Unmarshal(m, b) +} +func (m *TableReadOptions) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_TableReadOptions.Marshal(b, m, deterministic) +} +func (dst *TableReadOptions) XXX_Merge(src proto.Message) { + xxx_messageInfo_TableReadOptions.Merge(dst, src) +} +func (m *TableReadOptions) XXX_Size() int { + return xxx_messageInfo_TableReadOptions.Size(m) +} +func (m *TableReadOptions) XXX_DiscardUnknown() { + xxx_messageInfo_TableReadOptions.DiscardUnknown(m) +} + +var xxx_messageInfo_TableReadOptions proto.InternalMessageInfo + +func (m *TableReadOptions) GetSelectedFields() []string { + if m != nil { + return m.SelectedFields + } + return nil +} + +func (m *TableReadOptions) GetRowRestriction() string { + if m != nil { + return m.RowRestriction + } + return "" +} + +func init() { + proto.RegisterType((*TableReadOptions)(nil), "google.cloud.bigquery.storage.v1beta1.TableReadOptions") +} + +func init() { + proto.RegisterFile("google/cloud/bigquery/storage/v1beta1/read_options.proto", fileDescriptor_read_options_931be2a30741eb81) +} + +var fileDescriptor_read_options_931be2a30741eb81 = []byte{ + // 213 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x8c, 0xd0, 0x3f, 0x4b, 0xc6, 0x30, + 0x10, 0x06, 0x70, 0x5e, 0x05, 0xc1, 0x0c, 0x2a, 0x9d, 0x3a, 0x16, 0x41, 0xd4, 0x25, 0xa1, 0xb8, + 0x08, 0x6e, 0x0e, 0x4e, 0x82, 0x50, 0x9c, 0x5c, 0x4a, 0xfe, 0x9c, 0x47, 0x20, 0xf6, 0xea, 0x25, + 0xb5, 0xf4, 0xdb, 0x4b, 0xd3, 0x14, 0x47, 0xdf, 0x31, 0x4f, 0x9e, 0x5f, 0xc8, 0x9d, 0x78, 0x44, + 0x22, 0x0c, 0xa0, 0x6c, 0xa0, 0xc9, 0x29, 0xe3, 0xf1, 0x7b, 0x02, 0x5e, 0x54, 0x4c, 0xc4, 0x1a, + 0x41, 0xfd, 0xb4, 0x06, 0x92, 0x6e, 0x15, 0x83, 0x76, 0x3d, 0x8d, 0xc9, 0xd3, 0x10, 0xe5, 0xc8, + 0x94, 0xa8, 0xba, 0xd9, 0xa4, 0xcc, 0x52, 0xee, 0x52, 0x16, 0x29, 0x8b, 0xbc, 0x76, 0xe2, 0xea, + 0x5d, 0x9b, 0x00, 0x1d, 0x68, 0xf7, 0xb6, 0x3d, 0x50, 0xdd, 0x8a, 0xcb, 0x08, 0x01, 0x6c, 0x02, + 0xd7, 0x7f, 0x7a, 0x08, 0x2e, 0xd6, 0x87, 0xe6, 0xf4, 0xee, 0xbc, 0xbb, 0xd8, 0xe3, 0x97, 0x9c, + 0xae, 0x45, 0xa6, 0xb9, 0x67, 0x88, 0x89, 0xbd, 0x5d, 0x71, 0x7d, 0xd2, 0x1c, 0xd6, 0x22, 0xd3, + 0xdc, 0xfd, 0xa5, 0xcf, 0x8b, 0xb8, 0xb7, 0xf4, 0x25, 0x8f, 0xfa, 0xd2, 0xc7, 0x6b, 0xa9, 0x21, + 0x05, 0x3d, 0xa0, 0x24, 0x46, 0x85, 0x30, 0xe4, 0xa9, 0xd4, 0x76, 0xa5, 0x47, 0x1f, 0xff, 0x59, + 0xc9, 0x53, 0x39, 0x9b, 0xb3, 0x0c, 0x1f, 0x7e, 0x03, 0x00, 0x00, 0xff, 0xff, 0xba, 0xd5, 0x14, + 0x41, 0x4a, 0x01, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/cloud/bigquery/storage/v1beta1/storage.pb.go b/vendor/google.golang.org/genproto/googleapis/cloud/bigquery/storage/v1beta1/storage.pb.go new file mode 100644 index 0000000..a446680 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/cloud/bigquery/storage/v1beta1/storage.pb.go @@ -0,0 +1,1327 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/cloud/bigquery/storage/v1beta1/storage.proto + +package storage // import "google.golang.org/genproto/googleapis/cloud/bigquery/storage/v1beta1" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import empty "github.com/golang/protobuf/ptypes/empty" +import timestamp "github.com/golang/protobuf/ptypes/timestamp" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +import ( + context "golang.org/x/net/context" + grpc "google.golang.org/grpc" +) + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Data format for input or output data. +type DataFormat int32 + +const ( + // Data format is unspecified. + DataFormat_DATA_FORMAT_UNSPECIFIED DataFormat = 0 + // Avro is a standard open source row based file format. + // See https://avro.apache.org/ for more details. + DataFormat_AVRO DataFormat = 1 +) + +var DataFormat_name = map[int32]string{ + 0: "DATA_FORMAT_UNSPECIFIED", + 1: "AVRO", +} +var DataFormat_value = map[string]int32{ + "DATA_FORMAT_UNSPECIFIED": 0, + "AVRO": 1, +} + +func (x DataFormat) String() string { + return proto.EnumName(DataFormat_name, int32(x)) +} +func (DataFormat) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_storage_ba3968bfc61218b8, []int{0} +} + +// Information about a single data stream within a read session. +type Stream struct { + // Name of the stream, in the form + // `projects/{project_id}/locations/{location}/streams/{stream_id}`. + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // Rows in the stream. + RowCount int64 `protobuf:"varint,2,opt,name=row_count,json=rowCount,proto3" json:"row_count,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Stream) Reset() { *m = Stream{} } +func (m *Stream) String() string { return proto.CompactTextString(m) } +func (*Stream) ProtoMessage() {} +func (*Stream) Descriptor() ([]byte, []int) { + return fileDescriptor_storage_ba3968bfc61218b8, []int{0} +} +func (m *Stream) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Stream.Unmarshal(m, b) +} +func (m *Stream) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Stream.Marshal(b, m, deterministic) +} +func (dst *Stream) XXX_Merge(src proto.Message) { + xxx_messageInfo_Stream.Merge(dst, src) +} +func (m *Stream) XXX_Size() int { + return xxx_messageInfo_Stream.Size(m) +} +func (m *Stream) XXX_DiscardUnknown() { + xxx_messageInfo_Stream.DiscardUnknown(m) +} + +var xxx_messageInfo_Stream proto.InternalMessageInfo + +func (m *Stream) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *Stream) GetRowCount() int64 { + if m != nil { + return m.RowCount + } + return 0 +} + +// Expresses a point within a given stream using an offset position. +type StreamPosition struct { + // Identifier for a given Stream. + Stream *Stream `protobuf:"bytes,1,opt,name=stream,proto3" json:"stream,omitempty"` + // Position in the stream. + Offset int64 `protobuf:"varint,2,opt,name=offset,proto3" json:"offset,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *StreamPosition) Reset() { *m = StreamPosition{} } +func (m *StreamPosition) String() string { return proto.CompactTextString(m) } +func (*StreamPosition) ProtoMessage() {} +func (*StreamPosition) Descriptor() ([]byte, []int) { + return fileDescriptor_storage_ba3968bfc61218b8, []int{1} +} +func (m *StreamPosition) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_StreamPosition.Unmarshal(m, b) +} +func (m *StreamPosition) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_StreamPosition.Marshal(b, m, deterministic) +} +func (dst *StreamPosition) XXX_Merge(src proto.Message) { + xxx_messageInfo_StreamPosition.Merge(dst, src) +} +func (m *StreamPosition) XXX_Size() int { + return xxx_messageInfo_StreamPosition.Size(m) +} +func (m *StreamPosition) XXX_DiscardUnknown() { + xxx_messageInfo_StreamPosition.DiscardUnknown(m) +} + +var xxx_messageInfo_StreamPosition proto.InternalMessageInfo + +func (m *StreamPosition) GetStream() *Stream { + if m != nil { + return m.Stream + } + return nil +} + +func (m *StreamPosition) GetOffset() int64 { + if m != nil { + return m.Offset + } + return 0 +} + +// Information returned from a `CreateReadSession` request. +type ReadSession struct { + // Unique identifier for the session, in the form + // `projects/{project_id}/locations/{location}/sessions/{session_id}`. + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // Time at which the session becomes invalid. After this time, subsequent + // requests to read this Session will return errors. + ExpireTime *timestamp.Timestamp `protobuf:"bytes,2,opt,name=expire_time,json=expireTime,proto3" json:"expire_time,omitempty"` + // The schema for the read. If read_options.selected_fields is set, the + // schema may be different from the table schema as it will only contain + // the selected fields. + // + // Types that are valid to be assigned to Schema: + // *ReadSession_AvroSchema + Schema isReadSession_Schema `protobuf_oneof:"schema"` + // Streams associated with this session. + Streams []*Stream `protobuf:"bytes,4,rep,name=streams,proto3" json:"streams,omitempty"` + // Table that this ReadSession is reading from. + TableReference *TableReference `protobuf:"bytes,7,opt,name=table_reference,json=tableReference,proto3" json:"table_reference,omitempty"` + // Any modifiers which are applied when reading from the specified table. + TableModifiers *TableModifiers `protobuf:"bytes,8,opt,name=table_modifiers,json=tableModifiers,proto3" json:"table_modifiers,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ReadSession) Reset() { *m = ReadSession{} } +func (m *ReadSession) String() string { return proto.CompactTextString(m) } +func (*ReadSession) ProtoMessage() {} +func (*ReadSession) Descriptor() ([]byte, []int) { + return fileDescriptor_storage_ba3968bfc61218b8, []int{2} +} +func (m *ReadSession) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ReadSession.Unmarshal(m, b) +} +func (m *ReadSession) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ReadSession.Marshal(b, m, deterministic) +} +func (dst *ReadSession) XXX_Merge(src proto.Message) { + xxx_messageInfo_ReadSession.Merge(dst, src) +} +func (m *ReadSession) XXX_Size() int { + return xxx_messageInfo_ReadSession.Size(m) +} +func (m *ReadSession) XXX_DiscardUnknown() { + xxx_messageInfo_ReadSession.DiscardUnknown(m) +} + +var xxx_messageInfo_ReadSession proto.InternalMessageInfo + +func (m *ReadSession) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *ReadSession) GetExpireTime() *timestamp.Timestamp { + if m != nil { + return m.ExpireTime + } + return nil +} + +type isReadSession_Schema interface { + isReadSession_Schema() +} + +type ReadSession_AvroSchema struct { + AvroSchema *AvroSchema `protobuf:"bytes,5,opt,name=avro_schema,json=avroSchema,proto3,oneof"` +} + +func (*ReadSession_AvroSchema) isReadSession_Schema() {} + +func (m *ReadSession) GetSchema() isReadSession_Schema { + if m != nil { + return m.Schema + } + return nil +} + +func (m *ReadSession) GetAvroSchema() *AvroSchema { + if x, ok := m.GetSchema().(*ReadSession_AvroSchema); ok { + return x.AvroSchema + } + return nil +} + +func (m *ReadSession) GetStreams() []*Stream { + if m != nil { + return m.Streams + } + return nil +} + +func (m *ReadSession) GetTableReference() *TableReference { + if m != nil { + return m.TableReference + } + return nil +} + +func (m *ReadSession) GetTableModifiers() *TableModifiers { + if m != nil { + return m.TableModifiers + } + return nil +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*ReadSession) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _ReadSession_OneofMarshaler, _ReadSession_OneofUnmarshaler, _ReadSession_OneofSizer, []interface{}{ + (*ReadSession_AvroSchema)(nil), + } +} + +func _ReadSession_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*ReadSession) + // schema + switch x := m.Schema.(type) { + case *ReadSession_AvroSchema: + b.EncodeVarint(5<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.AvroSchema); err != nil { + return err + } + case nil: + default: + return fmt.Errorf("ReadSession.Schema has unexpected type %T", x) + } + return nil +} + +func _ReadSession_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*ReadSession) + switch tag { + case 5: // schema.avro_schema + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(AvroSchema) + err := b.DecodeMessage(msg) + m.Schema = &ReadSession_AvroSchema{msg} + return true, err + default: + return false, nil + } +} + +func _ReadSession_OneofSizer(msg proto.Message) (n int) { + m := msg.(*ReadSession) + // schema + switch x := m.Schema.(type) { + case *ReadSession_AvroSchema: + s := proto.Size(x.AvroSchema) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +// Creates a new read session, which may include additional options such as +// requested parallelism, projection filters and constraints. +type CreateReadSessionRequest struct { + // Required. Reference to the table to read. + TableReference *TableReference `protobuf:"bytes,1,opt,name=table_reference,json=tableReference,proto3" json:"table_reference,omitempty"` + // Required. String of the form `projects/{project_id}` indicating the + // project this ReadSession is associated with. This is the project that will + // be billed for usage. + Parent string `protobuf:"bytes,6,opt,name=parent,proto3" json:"parent,omitempty"` + // Optional. Any modifiers to the Table (e.g. snapshot timestamp). + TableModifiers *TableModifiers `protobuf:"bytes,2,opt,name=table_modifiers,json=tableModifiers,proto3" json:"table_modifiers,omitempty"` + // Optional. Initial number of streams. If unset or 0, we will + // provide a value of streams so as to produce reasonable throughput. Must be + // non-negative. The number of streams may be lower than the requested number, + // depending on the amount parallelism that is reasonable for the table and + // the maximum amount of parallelism allowed by the system. + // + // Streams must be read starting from offset 0. + RequestedStreams int32 `protobuf:"varint,3,opt,name=requested_streams,json=requestedStreams,proto3" json:"requested_streams,omitempty"` + // Optional. Read options for this session (e.g. column selection, filters). + ReadOptions *TableReadOptions `protobuf:"bytes,4,opt,name=read_options,json=readOptions,proto3" json:"read_options,omitempty"` + // Data output format. Currently default to Avro. + Format DataFormat `protobuf:"varint,5,opt,name=format,proto3,enum=google.cloud.bigquery.storage.v1beta1.DataFormat" json:"format,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *CreateReadSessionRequest) Reset() { *m = CreateReadSessionRequest{} } +func (m *CreateReadSessionRequest) String() string { return proto.CompactTextString(m) } +func (*CreateReadSessionRequest) ProtoMessage() {} +func (*CreateReadSessionRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_storage_ba3968bfc61218b8, []int{3} +} +func (m *CreateReadSessionRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_CreateReadSessionRequest.Unmarshal(m, b) +} +func (m *CreateReadSessionRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_CreateReadSessionRequest.Marshal(b, m, deterministic) +} +func (dst *CreateReadSessionRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_CreateReadSessionRequest.Merge(dst, src) +} +func (m *CreateReadSessionRequest) XXX_Size() int { + return xxx_messageInfo_CreateReadSessionRequest.Size(m) +} +func (m *CreateReadSessionRequest) XXX_DiscardUnknown() { + xxx_messageInfo_CreateReadSessionRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_CreateReadSessionRequest proto.InternalMessageInfo + +func (m *CreateReadSessionRequest) GetTableReference() *TableReference { + if m != nil { + return m.TableReference + } + return nil +} + +func (m *CreateReadSessionRequest) GetParent() string { + if m != nil { + return m.Parent + } + return "" +} + +func (m *CreateReadSessionRequest) GetTableModifiers() *TableModifiers { + if m != nil { + return m.TableModifiers + } + return nil +} + +func (m *CreateReadSessionRequest) GetRequestedStreams() int32 { + if m != nil { + return m.RequestedStreams + } + return 0 +} + +func (m *CreateReadSessionRequest) GetReadOptions() *TableReadOptions { + if m != nil { + return m.ReadOptions + } + return nil +} + +func (m *CreateReadSessionRequest) GetFormat() DataFormat { + if m != nil { + return m.Format + } + return DataFormat_DATA_FORMAT_UNSPECIFIED +} + +// Requesting row data via `ReadRows` must provide Stream position information. +type ReadRowsRequest struct { + // Required. Identifier of the position in the stream to start reading from. + // The offset requested must be less than the last row read from ReadRows. + // Requesting a larger offset is undefined. + ReadPosition *StreamPosition `protobuf:"bytes,1,opt,name=read_position,json=readPosition,proto3" json:"read_position,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ReadRowsRequest) Reset() { *m = ReadRowsRequest{} } +func (m *ReadRowsRequest) String() string { return proto.CompactTextString(m) } +func (*ReadRowsRequest) ProtoMessage() {} +func (*ReadRowsRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_storage_ba3968bfc61218b8, []int{4} +} +func (m *ReadRowsRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ReadRowsRequest.Unmarshal(m, b) +} +func (m *ReadRowsRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ReadRowsRequest.Marshal(b, m, deterministic) +} +func (dst *ReadRowsRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_ReadRowsRequest.Merge(dst, src) +} +func (m *ReadRowsRequest) XXX_Size() int { + return xxx_messageInfo_ReadRowsRequest.Size(m) +} +func (m *ReadRowsRequest) XXX_DiscardUnknown() { + xxx_messageInfo_ReadRowsRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_ReadRowsRequest proto.InternalMessageInfo + +func (m *ReadRowsRequest) GetReadPosition() *StreamPosition { + if m != nil { + return m.ReadPosition + } + return nil +} + +// Progress information for a given Stream. +type StreamStatus struct { + // Number of estimated rows in the current stream. May change over time as + // different readers in the stream progress at rates which are relatively fast + // or slow. + EstimatedRowCount int64 `protobuf:"varint,1,opt,name=estimated_row_count,json=estimatedRowCount,proto3" json:"estimated_row_count,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *StreamStatus) Reset() { *m = StreamStatus{} } +func (m *StreamStatus) String() string { return proto.CompactTextString(m) } +func (*StreamStatus) ProtoMessage() {} +func (*StreamStatus) Descriptor() ([]byte, []int) { + return fileDescriptor_storage_ba3968bfc61218b8, []int{5} +} +func (m *StreamStatus) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_StreamStatus.Unmarshal(m, b) +} +func (m *StreamStatus) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_StreamStatus.Marshal(b, m, deterministic) +} +func (dst *StreamStatus) XXX_Merge(src proto.Message) { + xxx_messageInfo_StreamStatus.Merge(dst, src) +} +func (m *StreamStatus) XXX_Size() int { + return xxx_messageInfo_StreamStatus.Size(m) +} +func (m *StreamStatus) XXX_DiscardUnknown() { + xxx_messageInfo_StreamStatus.DiscardUnknown(m) +} + +var xxx_messageInfo_StreamStatus proto.InternalMessageInfo + +func (m *StreamStatus) GetEstimatedRowCount() int64 { + if m != nil { + return m.EstimatedRowCount + } + return 0 +} + +// Information on if the current connection is being throttled. +type ThrottleStatus struct { + // How much this connection is being throttled. + // 0 is no throttling, 100 is completely throttled. + ThrottlePercent int32 `protobuf:"varint,1,opt,name=throttle_percent,json=throttlePercent,proto3" json:"throttle_percent,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ThrottleStatus) Reset() { *m = ThrottleStatus{} } +func (m *ThrottleStatus) String() string { return proto.CompactTextString(m) } +func (*ThrottleStatus) ProtoMessage() {} +func (*ThrottleStatus) Descriptor() ([]byte, []int) { + return fileDescriptor_storage_ba3968bfc61218b8, []int{6} +} +func (m *ThrottleStatus) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ThrottleStatus.Unmarshal(m, b) +} +func (m *ThrottleStatus) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ThrottleStatus.Marshal(b, m, deterministic) +} +func (dst *ThrottleStatus) XXX_Merge(src proto.Message) { + xxx_messageInfo_ThrottleStatus.Merge(dst, src) +} +func (m *ThrottleStatus) XXX_Size() int { + return xxx_messageInfo_ThrottleStatus.Size(m) +} +func (m *ThrottleStatus) XXX_DiscardUnknown() { + xxx_messageInfo_ThrottleStatus.DiscardUnknown(m) +} + +var xxx_messageInfo_ThrottleStatus proto.InternalMessageInfo + +func (m *ThrottleStatus) GetThrottlePercent() int32 { + if m != nil { + return m.ThrottlePercent + } + return 0 +} + +// Response from calling `ReadRows` may include row data, progress and +// throttling information. +type ReadRowsResponse struct { + // Row data is returned in format specified during session creation. + // + // Types that are valid to be assigned to Rows: + // *ReadRowsResponse_AvroRows + Rows isReadRowsResponse_Rows `protobuf_oneof:"rows"` + // Estimated stream statistics. + Status *StreamStatus `protobuf:"bytes,2,opt,name=status,proto3" json:"status,omitempty"` + // Throttling status. If unset, the latest response still describes + // the current throttling status. + ThrottleStatus *ThrottleStatus `protobuf:"bytes,5,opt,name=throttle_status,json=throttleStatus,proto3" json:"throttle_status,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ReadRowsResponse) Reset() { *m = ReadRowsResponse{} } +func (m *ReadRowsResponse) String() string { return proto.CompactTextString(m) } +func (*ReadRowsResponse) ProtoMessage() {} +func (*ReadRowsResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_storage_ba3968bfc61218b8, []int{7} +} +func (m *ReadRowsResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ReadRowsResponse.Unmarshal(m, b) +} +func (m *ReadRowsResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ReadRowsResponse.Marshal(b, m, deterministic) +} +func (dst *ReadRowsResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_ReadRowsResponse.Merge(dst, src) +} +func (m *ReadRowsResponse) XXX_Size() int { + return xxx_messageInfo_ReadRowsResponse.Size(m) +} +func (m *ReadRowsResponse) XXX_DiscardUnknown() { + xxx_messageInfo_ReadRowsResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_ReadRowsResponse proto.InternalMessageInfo + +type isReadRowsResponse_Rows interface { + isReadRowsResponse_Rows() +} + +type ReadRowsResponse_AvroRows struct { + AvroRows *AvroRows `protobuf:"bytes,3,opt,name=avro_rows,json=avroRows,proto3,oneof"` +} + +func (*ReadRowsResponse_AvroRows) isReadRowsResponse_Rows() {} + +func (m *ReadRowsResponse) GetRows() isReadRowsResponse_Rows { + if m != nil { + return m.Rows + } + return nil +} + +func (m *ReadRowsResponse) GetAvroRows() *AvroRows { + if x, ok := m.GetRows().(*ReadRowsResponse_AvroRows); ok { + return x.AvroRows + } + return nil +} + +func (m *ReadRowsResponse) GetStatus() *StreamStatus { + if m != nil { + return m.Status + } + return nil +} + +func (m *ReadRowsResponse) GetThrottleStatus() *ThrottleStatus { + if m != nil { + return m.ThrottleStatus + } + return nil +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*ReadRowsResponse) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _ReadRowsResponse_OneofMarshaler, _ReadRowsResponse_OneofUnmarshaler, _ReadRowsResponse_OneofSizer, []interface{}{ + (*ReadRowsResponse_AvroRows)(nil), + } +} + +func _ReadRowsResponse_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*ReadRowsResponse) + // rows + switch x := m.Rows.(type) { + case *ReadRowsResponse_AvroRows: + b.EncodeVarint(3<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.AvroRows); err != nil { + return err + } + case nil: + default: + return fmt.Errorf("ReadRowsResponse.Rows has unexpected type %T", x) + } + return nil +} + +func _ReadRowsResponse_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*ReadRowsResponse) + switch tag { + case 3: // rows.avro_rows + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(AvroRows) + err := b.DecodeMessage(msg) + m.Rows = &ReadRowsResponse_AvroRows{msg} + return true, err + default: + return false, nil + } +} + +func _ReadRowsResponse_OneofSizer(msg proto.Message) (n int) { + m := msg.(*ReadRowsResponse) + // rows + switch x := m.Rows.(type) { + case *ReadRowsResponse_AvroRows: + s := proto.Size(x.AvroRows) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +// Information needed to request additional streams for an established read +// session. +type BatchCreateReadSessionStreamsRequest struct { + // Required. Must be a non-expired session obtained from a call to + // CreateReadSession. Only the name field needs to be set. + Session *ReadSession `protobuf:"bytes,1,opt,name=session,proto3" json:"session,omitempty"` + // Required. Number of new streams requested. Must be positive. + // Number of added streams may be less than this, see CreateReadSessionRequest + // for more information. + RequestedStreams int32 `protobuf:"varint,2,opt,name=requested_streams,json=requestedStreams,proto3" json:"requested_streams,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *BatchCreateReadSessionStreamsRequest) Reset() { *m = BatchCreateReadSessionStreamsRequest{} } +func (m *BatchCreateReadSessionStreamsRequest) String() string { return proto.CompactTextString(m) } +func (*BatchCreateReadSessionStreamsRequest) ProtoMessage() {} +func (*BatchCreateReadSessionStreamsRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_storage_ba3968bfc61218b8, []int{8} +} +func (m *BatchCreateReadSessionStreamsRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_BatchCreateReadSessionStreamsRequest.Unmarshal(m, b) +} +func (m *BatchCreateReadSessionStreamsRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_BatchCreateReadSessionStreamsRequest.Marshal(b, m, deterministic) +} +func (dst *BatchCreateReadSessionStreamsRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_BatchCreateReadSessionStreamsRequest.Merge(dst, src) +} +func (m *BatchCreateReadSessionStreamsRequest) XXX_Size() int { + return xxx_messageInfo_BatchCreateReadSessionStreamsRequest.Size(m) +} +func (m *BatchCreateReadSessionStreamsRequest) XXX_DiscardUnknown() { + xxx_messageInfo_BatchCreateReadSessionStreamsRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_BatchCreateReadSessionStreamsRequest proto.InternalMessageInfo + +func (m *BatchCreateReadSessionStreamsRequest) GetSession() *ReadSession { + if m != nil { + return m.Session + } + return nil +} + +func (m *BatchCreateReadSessionStreamsRequest) GetRequestedStreams() int32 { + if m != nil { + return m.RequestedStreams + } + return 0 +} + +// The response from `BatchCreateReadSessionStreams` returns the stream +// identifiers for the newly created streams. +type BatchCreateReadSessionStreamsResponse struct { + // Newly added streams. + Streams []*Stream `protobuf:"bytes,1,rep,name=streams,proto3" json:"streams,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *BatchCreateReadSessionStreamsResponse) Reset() { *m = BatchCreateReadSessionStreamsResponse{} } +func (m *BatchCreateReadSessionStreamsResponse) String() string { return proto.CompactTextString(m) } +func (*BatchCreateReadSessionStreamsResponse) ProtoMessage() {} +func (*BatchCreateReadSessionStreamsResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_storage_ba3968bfc61218b8, []int{9} +} +func (m *BatchCreateReadSessionStreamsResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_BatchCreateReadSessionStreamsResponse.Unmarshal(m, b) +} +func (m *BatchCreateReadSessionStreamsResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_BatchCreateReadSessionStreamsResponse.Marshal(b, m, deterministic) +} +func (dst *BatchCreateReadSessionStreamsResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_BatchCreateReadSessionStreamsResponse.Merge(dst, src) +} +func (m *BatchCreateReadSessionStreamsResponse) XXX_Size() int { + return xxx_messageInfo_BatchCreateReadSessionStreamsResponse.Size(m) +} +func (m *BatchCreateReadSessionStreamsResponse) XXX_DiscardUnknown() { + xxx_messageInfo_BatchCreateReadSessionStreamsResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_BatchCreateReadSessionStreamsResponse proto.InternalMessageInfo + +func (m *BatchCreateReadSessionStreamsResponse) GetStreams() []*Stream { + if m != nil { + return m.Streams + } + return nil +} + +// Request information for invoking `FinalizeStream`. +type FinalizeStreamRequest struct { + // Stream to finalize. + Stream *Stream `protobuf:"bytes,2,opt,name=stream,proto3" json:"stream,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *FinalizeStreamRequest) Reset() { *m = FinalizeStreamRequest{} } +func (m *FinalizeStreamRequest) String() string { return proto.CompactTextString(m) } +func (*FinalizeStreamRequest) ProtoMessage() {} +func (*FinalizeStreamRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_storage_ba3968bfc61218b8, []int{10} +} +func (m *FinalizeStreamRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_FinalizeStreamRequest.Unmarshal(m, b) +} +func (m *FinalizeStreamRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_FinalizeStreamRequest.Marshal(b, m, deterministic) +} +func (dst *FinalizeStreamRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_FinalizeStreamRequest.Merge(dst, src) +} +func (m *FinalizeStreamRequest) XXX_Size() int { + return xxx_messageInfo_FinalizeStreamRequest.Size(m) +} +func (m *FinalizeStreamRequest) XXX_DiscardUnknown() { + xxx_messageInfo_FinalizeStreamRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_FinalizeStreamRequest proto.InternalMessageInfo + +func (m *FinalizeStreamRequest) GetStream() *Stream { + if m != nil { + return m.Stream + } + return nil +} + +// Request information for `SplitReadStream`. +type SplitReadStreamRequest struct { + // Stream to split. + OriginalStream *Stream `protobuf:"bytes,1,opt,name=original_stream,json=originalStream,proto3" json:"original_stream,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *SplitReadStreamRequest) Reset() { *m = SplitReadStreamRequest{} } +func (m *SplitReadStreamRequest) String() string { return proto.CompactTextString(m) } +func (*SplitReadStreamRequest) ProtoMessage() {} +func (*SplitReadStreamRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_storage_ba3968bfc61218b8, []int{11} +} +func (m *SplitReadStreamRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_SplitReadStreamRequest.Unmarshal(m, b) +} +func (m *SplitReadStreamRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_SplitReadStreamRequest.Marshal(b, m, deterministic) +} +func (dst *SplitReadStreamRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_SplitReadStreamRequest.Merge(dst, src) +} +func (m *SplitReadStreamRequest) XXX_Size() int { + return xxx_messageInfo_SplitReadStreamRequest.Size(m) +} +func (m *SplitReadStreamRequest) XXX_DiscardUnknown() { + xxx_messageInfo_SplitReadStreamRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_SplitReadStreamRequest proto.InternalMessageInfo + +func (m *SplitReadStreamRequest) GetOriginalStream() *Stream { + if m != nil { + return m.OriginalStream + } + return nil +} + +// Response from `SplitReadStream`. +type SplitReadStreamResponse struct { + // Primary stream. Will contain the beginning portion of + // |original_stream|. + PrimaryStream *Stream `protobuf:"bytes,1,opt,name=primary_stream,json=primaryStream,proto3" json:"primary_stream,omitempty"` + // Remainder stream. Will contain the tail of |original_stream|. + RemainderStream *Stream `protobuf:"bytes,2,opt,name=remainder_stream,json=remainderStream,proto3" json:"remainder_stream,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *SplitReadStreamResponse) Reset() { *m = SplitReadStreamResponse{} } +func (m *SplitReadStreamResponse) String() string { return proto.CompactTextString(m) } +func (*SplitReadStreamResponse) ProtoMessage() {} +func (*SplitReadStreamResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_storage_ba3968bfc61218b8, []int{12} +} +func (m *SplitReadStreamResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_SplitReadStreamResponse.Unmarshal(m, b) +} +func (m *SplitReadStreamResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_SplitReadStreamResponse.Marshal(b, m, deterministic) +} +func (dst *SplitReadStreamResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_SplitReadStreamResponse.Merge(dst, src) +} +func (m *SplitReadStreamResponse) XXX_Size() int { + return xxx_messageInfo_SplitReadStreamResponse.Size(m) +} +func (m *SplitReadStreamResponse) XXX_DiscardUnknown() { + xxx_messageInfo_SplitReadStreamResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_SplitReadStreamResponse proto.InternalMessageInfo + +func (m *SplitReadStreamResponse) GetPrimaryStream() *Stream { + if m != nil { + return m.PrimaryStream + } + return nil +} + +func (m *SplitReadStreamResponse) GetRemainderStream() *Stream { + if m != nil { + return m.RemainderStream + } + return nil +} + +func init() { + proto.RegisterType((*Stream)(nil), "google.cloud.bigquery.storage.v1beta1.Stream") + proto.RegisterType((*StreamPosition)(nil), "google.cloud.bigquery.storage.v1beta1.StreamPosition") + proto.RegisterType((*ReadSession)(nil), "google.cloud.bigquery.storage.v1beta1.ReadSession") + proto.RegisterType((*CreateReadSessionRequest)(nil), "google.cloud.bigquery.storage.v1beta1.CreateReadSessionRequest") + proto.RegisterType((*ReadRowsRequest)(nil), "google.cloud.bigquery.storage.v1beta1.ReadRowsRequest") + proto.RegisterType((*StreamStatus)(nil), "google.cloud.bigquery.storage.v1beta1.StreamStatus") + proto.RegisterType((*ThrottleStatus)(nil), "google.cloud.bigquery.storage.v1beta1.ThrottleStatus") + proto.RegisterType((*ReadRowsResponse)(nil), "google.cloud.bigquery.storage.v1beta1.ReadRowsResponse") + proto.RegisterType((*BatchCreateReadSessionStreamsRequest)(nil), "google.cloud.bigquery.storage.v1beta1.BatchCreateReadSessionStreamsRequest") + proto.RegisterType((*BatchCreateReadSessionStreamsResponse)(nil), "google.cloud.bigquery.storage.v1beta1.BatchCreateReadSessionStreamsResponse") + proto.RegisterType((*FinalizeStreamRequest)(nil), "google.cloud.bigquery.storage.v1beta1.FinalizeStreamRequest") + proto.RegisterType((*SplitReadStreamRequest)(nil), "google.cloud.bigquery.storage.v1beta1.SplitReadStreamRequest") + proto.RegisterType((*SplitReadStreamResponse)(nil), "google.cloud.bigquery.storage.v1beta1.SplitReadStreamResponse") + proto.RegisterEnum("google.cloud.bigquery.storage.v1beta1.DataFormat", DataFormat_name, DataFormat_value) +} + +// Reference imports to suppress errors if they are not otherwise used. +var _ context.Context +var _ grpc.ClientConn + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the grpc package it is being compiled against. +const _ = grpc.SupportPackageIsVersion4 + +// BigQueryStorageClient is the client API for BigQueryStorage service. +// +// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://godoc.org/google.golang.org/grpc#ClientConn.NewStream. +type BigQueryStorageClient interface { + // Creates a new read session. A read session divides the contents of a + // BigQuery table into one or more streams, which can then be used to read + // data from the table. The read session also specifies properties of the + // data to be read, such as a list of columns or a push-down filter describing + // the rows to be returned. + // + // A particular row can be read by at most one stream. When the caller has + // reached the end of each stream in the session, then all the data in the + // table has been read. + // + // Read sessions automatically expire 24 hours after they are created and do + // not require manual clean-up by the caller. + CreateReadSession(ctx context.Context, in *CreateReadSessionRequest, opts ...grpc.CallOption) (*ReadSession, error) + // Reads rows from the table in the format prescribed by the read session. + // Each response contains one or more table rows, up to a maximum of 10 MiB + // per response; read requests which attempt to read individual rows larger + // than this will fail. + // + // Each request also returns a set of stream statistics reflecting the + // estimated total number of rows in the read stream. This number is computed + // based on the total table size and the number of active streams in the read + // session, and may change as other streams continue to read data. + ReadRows(ctx context.Context, in *ReadRowsRequest, opts ...grpc.CallOption) (BigQueryStorage_ReadRowsClient, error) + // Creates additional streams for a ReadSession. This API can be used to + // dynamically adjust the parallelism of a batch processing task upwards by + // adding additional workers. + BatchCreateReadSessionStreams(ctx context.Context, in *BatchCreateReadSessionStreamsRequest, opts ...grpc.CallOption) (*BatchCreateReadSessionStreamsResponse, error) + // Triggers the graceful termination of a single stream in a ReadSession. This + // API can be used to dynamically adjust the parallelism of a batch processing + // task downwards without losing data. + // + // This API does not delete the stream -- it remains visible in the + // ReadSession, and any data processed by the stream is not released to other + // streams. However, no additional data will be assigned to the stream once + // this call completes. Callers must continue reading data on the stream until + // the end of the stream is reached so that data which has already been + // assigned to the stream will be processed. + // + // This method will return an error if there are no other live streams + // in the Session, or if SplitReadStream() has been called on the given + // Stream. + FinalizeStream(ctx context.Context, in *FinalizeStreamRequest, opts ...grpc.CallOption) (*empty.Empty, error) + // Splits a given read stream into two Streams. These streams are referred to + // as the primary and the residual of the split. The original stream can still + // be read from in the same manner as before. Both of the returned streams can + // also be read from, and the total rows return by both child streams will be + // the same as the rows read from the original stream. + // + // Moreover, the two child streams will be allocated back to back in the + // original Stream. Concretely, it is guaranteed that for streams Original, + // Primary, and Residual, that Original[0-j] = Primary[0-j] and + // Original[j-n] = Residual[0-m] once the streams have been read to + // completion. + // + // This method is guaranteed to be idempotent. + SplitReadStream(ctx context.Context, in *SplitReadStreamRequest, opts ...grpc.CallOption) (*SplitReadStreamResponse, error) +} + +type bigQueryStorageClient struct { + cc *grpc.ClientConn +} + +func NewBigQueryStorageClient(cc *grpc.ClientConn) BigQueryStorageClient { + return &bigQueryStorageClient{cc} +} + +func (c *bigQueryStorageClient) CreateReadSession(ctx context.Context, in *CreateReadSessionRequest, opts ...grpc.CallOption) (*ReadSession, error) { + out := new(ReadSession) + err := c.cc.Invoke(ctx, "/google.cloud.bigquery.storage.v1beta1.BigQueryStorage/CreateReadSession", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *bigQueryStorageClient) ReadRows(ctx context.Context, in *ReadRowsRequest, opts ...grpc.CallOption) (BigQueryStorage_ReadRowsClient, error) { + stream, err := c.cc.NewStream(ctx, &_BigQueryStorage_serviceDesc.Streams[0], "/google.cloud.bigquery.storage.v1beta1.BigQueryStorage/ReadRows", opts...) + if err != nil { + return nil, err + } + x := &bigQueryStorageReadRowsClient{stream} + if err := x.ClientStream.SendMsg(in); err != nil { + return nil, err + } + if err := x.ClientStream.CloseSend(); err != nil { + return nil, err + } + return x, nil +} + +type BigQueryStorage_ReadRowsClient interface { + Recv() (*ReadRowsResponse, error) + grpc.ClientStream +} + +type bigQueryStorageReadRowsClient struct { + grpc.ClientStream +} + +func (x *bigQueryStorageReadRowsClient) Recv() (*ReadRowsResponse, error) { + m := new(ReadRowsResponse) + if err := x.ClientStream.RecvMsg(m); err != nil { + return nil, err + } + return m, nil +} + +func (c *bigQueryStorageClient) BatchCreateReadSessionStreams(ctx context.Context, in *BatchCreateReadSessionStreamsRequest, opts ...grpc.CallOption) (*BatchCreateReadSessionStreamsResponse, error) { + out := new(BatchCreateReadSessionStreamsResponse) + err := c.cc.Invoke(ctx, "/google.cloud.bigquery.storage.v1beta1.BigQueryStorage/BatchCreateReadSessionStreams", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *bigQueryStorageClient) FinalizeStream(ctx context.Context, in *FinalizeStreamRequest, opts ...grpc.CallOption) (*empty.Empty, error) { + out := new(empty.Empty) + err := c.cc.Invoke(ctx, "/google.cloud.bigquery.storage.v1beta1.BigQueryStorage/FinalizeStream", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *bigQueryStorageClient) SplitReadStream(ctx context.Context, in *SplitReadStreamRequest, opts ...grpc.CallOption) (*SplitReadStreamResponse, error) { + out := new(SplitReadStreamResponse) + err := c.cc.Invoke(ctx, "/google.cloud.bigquery.storage.v1beta1.BigQueryStorage/SplitReadStream", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +// BigQueryStorageServer is the server API for BigQueryStorage service. +type BigQueryStorageServer interface { + // Creates a new read session. A read session divides the contents of a + // BigQuery table into one or more streams, which can then be used to read + // data from the table. The read session also specifies properties of the + // data to be read, such as a list of columns or a push-down filter describing + // the rows to be returned. + // + // A particular row can be read by at most one stream. When the caller has + // reached the end of each stream in the session, then all the data in the + // table has been read. + // + // Read sessions automatically expire 24 hours after they are created and do + // not require manual clean-up by the caller. + CreateReadSession(context.Context, *CreateReadSessionRequest) (*ReadSession, error) + // Reads rows from the table in the format prescribed by the read session. + // Each response contains one or more table rows, up to a maximum of 10 MiB + // per response; read requests which attempt to read individual rows larger + // than this will fail. + // + // Each request also returns a set of stream statistics reflecting the + // estimated total number of rows in the read stream. This number is computed + // based on the total table size and the number of active streams in the read + // session, and may change as other streams continue to read data. + ReadRows(*ReadRowsRequest, BigQueryStorage_ReadRowsServer) error + // Creates additional streams for a ReadSession. This API can be used to + // dynamically adjust the parallelism of a batch processing task upwards by + // adding additional workers. + BatchCreateReadSessionStreams(context.Context, *BatchCreateReadSessionStreamsRequest) (*BatchCreateReadSessionStreamsResponse, error) + // Triggers the graceful termination of a single stream in a ReadSession. This + // API can be used to dynamically adjust the parallelism of a batch processing + // task downwards without losing data. + // + // This API does not delete the stream -- it remains visible in the + // ReadSession, and any data processed by the stream is not released to other + // streams. However, no additional data will be assigned to the stream once + // this call completes. Callers must continue reading data on the stream until + // the end of the stream is reached so that data which has already been + // assigned to the stream will be processed. + // + // This method will return an error if there are no other live streams + // in the Session, or if SplitReadStream() has been called on the given + // Stream. + FinalizeStream(context.Context, *FinalizeStreamRequest) (*empty.Empty, error) + // Splits a given read stream into two Streams. These streams are referred to + // as the primary and the residual of the split. The original stream can still + // be read from in the same manner as before. Both of the returned streams can + // also be read from, and the total rows return by both child streams will be + // the same as the rows read from the original stream. + // + // Moreover, the two child streams will be allocated back to back in the + // original Stream. Concretely, it is guaranteed that for streams Original, + // Primary, and Residual, that Original[0-j] = Primary[0-j] and + // Original[j-n] = Residual[0-m] once the streams have been read to + // completion. + // + // This method is guaranteed to be idempotent. + SplitReadStream(context.Context, *SplitReadStreamRequest) (*SplitReadStreamResponse, error) +} + +func RegisterBigQueryStorageServer(s *grpc.Server, srv BigQueryStorageServer) { + s.RegisterService(&_BigQueryStorage_serviceDesc, srv) +} + +func _BigQueryStorage_CreateReadSession_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(CreateReadSessionRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(BigQueryStorageServer).CreateReadSession(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.bigquery.storage.v1beta1.BigQueryStorage/CreateReadSession", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(BigQueryStorageServer).CreateReadSession(ctx, req.(*CreateReadSessionRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _BigQueryStorage_ReadRows_Handler(srv interface{}, stream grpc.ServerStream) error { + m := new(ReadRowsRequest) + if err := stream.RecvMsg(m); err != nil { + return err + } + return srv.(BigQueryStorageServer).ReadRows(m, &bigQueryStorageReadRowsServer{stream}) +} + +type BigQueryStorage_ReadRowsServer interface { + Send(*ReadRowsResponse) error + grpc.ServerStream +} + +type bigQueryStorageReadRowsServer struct { + grpc.ServerStream +} + +func (x *bigQueryStorageReadRowsServer) Send(m *ReadRowsResponse) error { + return x.ServerStream.SendMsg(m) +} + +func _BigQueryStorage_BatchCreateReadSessionStreams_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(BatchCreateReadSessionStreamsRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(BigQueryStorageServer).BatchCreateReadSessionStreams(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.bigquery.storage.v1beta1.BigQueryStorage/BatchCreateReadSessionStreams", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(BigQueryStorageServer).BatchCreateReadSessionStreams(ctx, req.(*BatchCreateReadSessionStreamsRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _BigQueryStorage_FinalizeStream_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(FinalizeStreamRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(BigQueryStorageServer).FinalizeStream(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.bigquery.storage.v1beta1.BigQueryStorage/FinalizeStream", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(BigQueryStorageServer).FinalizeStream(ctx, req.(*FinalizeStreamRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _BigQueryStorage_SplitReadStream_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(SplitReadStreamRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(BigQueryStorageServer).SplitReadStream(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.bigquery.storage.v1beta1.BigQueryStorage/SplitReadStream", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(BigQueryStorageServer).SplitReadStream(ctx, req.(*SplitReadStreamRequest)) + } + return interceptor(ctx, in, info, handler) +} + +var _BigQueryStorage_serviceDesc = grpc.ServiceDesc{ + ServiceName: "google.cloud.bigquery.storage.v1beta1.BigQueryStorage", + HandlerType: (*BigQueryStorageServer)(nil), + Methods: []grpc.MethodDesc{ + { + MethodName: "CreateReadSession", + Handler: _BigQueryStorage_CreateReadSession_Handler, + }, + { + MethodName: "BatchCreateReadSessionStreams", + Handler: _BigQueryStorage_BatchCreateReadSessionStreams_Handler, + }, + { + MethodName: "FinalizeStream", + Handler: _BigQueryStorage_FinalizeStream_Handler, + }, + { + MethodName: "SplitReadStream", + Handler: _BigQueryStorage_SplitReadStream_Handler, + }, + }, + Streams: []grpc.StreamDesc{ + { + StreamName: "ReadRows", + Handler: _BigQueryStorage_ReadRows_Handler, + ServerStreams: true, + }, + }, + Metadata: "google/cloud/bigquery/storage/v1beta1/storage.proto", +} + +func init() { + proto.RegisterFile("google/cloud/bigquery/storage/v1beta1/storage.proto", fileDescriptor_storage_ba3968bfc61218b8) +} + +var fileDescriptor_storage_ba3968bfc61218b8 = []byte{ + // 1160 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xac, 0x57, 0xdd, 0x6e, 0x1b, 0x45, + 0x14, 0xee, 0x3a, 0x89, 0xeb, 0x1e, 0xb7, 0xb6, 0x33, 0x88, 0xd4, 0x38, 0x20, 0xa2, 0x15, 0x95, + 0xd2, 0x54, 0xdd, 0x4d, 0x1c, 0x48, 0x5b, 0xdc, 0x16, 0x9c, 0x3f, 0x1a, 0x35, 0x6d, 0xc2, 0xda, + 0x54, 0x28, 0x17, 0x5d, 0x4d, 0xec, 0xb1, 0xb3, 0xc8, 0xbb, 0xb3, 0x9d, 0x19, 0x27, 0x04, 0x94, + 0x1b, 0x6e, 0x78, 0x00, 0x2e, 0xb8, 0x43, 0xe2, 0x4d, 0xb8, 0xa2, 0xe2, 0x86, 0x1b, 0x5e, 0x81, + 0x47, 0xe0, 0x01, 0xd0, 0xce, 0xcc, 0xfa, 0x27, 0x71, 0x9b, 0x75, 0x93, 0x3b, 0x9f, 0x99, 0xfd, + 0xbe, 0x39, 0xdf, 0x39, 0x67, 0xbf, 0x59, 0xc3, 0x72, 0x9b, 0xd2, 0x76, 0x87, 0xd8, 0x8d, 0x0e, + 0xed, 0x36, 0xed, 0x7d, 0xaf, 0xfd, 0xaa, 0x4b, 0xd8, 0xb1, 0xcd, 0x05, 0x65, 0xb8, 0x4d, 0xec, + 0xc3, 0xa5, 0x7d, 0x22, 0xf0, 0x52, 0x1c, 0x5b, 0x21, 0xa3, 0x82, 0xa2, 0x5b, 0x0a, 0x64, 0x49, + 0x90, 0x15, 0x83, 0xac, 0xf8, 0x21, 0x0d, 0x2a, 0x7d, 0xa8, 0xb9, 0x71, 0xe8, 0xd9, 0x38, 0x08, + 0xa8, 0xc0, 0xc2, 0xa3, 0x01, 0x57, 0x24, 0xa5, 0x0f, 0x06, 0x76, 0x19, 0xe1, 0xb4, 0xcb, 0x1a, + 0x9a, 0xbf, 0xb4, 0x98, 0x2c, 0x29, 0x7c, 0xc8, 0xa8, 0x46, 0xdc, 0x4f, 0x86, 0x60, 0x04, 0x37, + 0x5d, 0x1a, 0x0e, 0xa6, 0x51, 0x49, 0x86, 0x14, 0x78, 0xbf, 0x43, 0x5c, 0x46, 0x5a, 0x84, 0x91, + 0xa0, 0x97, 0xe8, 0xac, 0x06, 0xcb, 0x68, 0xbf, 0xdb, 0xb2, 0x89, 0x1f, 0x8a, 0x63, 0xbd, 0xf9, + 0xf1, 0xe9, 0x4d, 0xe1, 0xf9, 0x84, 0x0b, 0xec, 0x87, 0xea, 0x01, 0xf3, 0x01, 0xa4, 0x6b, 0x82, + 0x11, 0xec, 0x23, 0x04, 0x93, 0x01, 0xf6, 0x49, 0xd1, 0x98, 0x33, 0xe6, 0xaf, 0x39, 0xf2, 0x37, + 0x9a, 0x85, 0x6b, 0x8c, 0x1e, 0xb9, 0x0d, 0xda, 0x0d, 0x44, 0x31, 0x35, 0x67, 0xcc, 0x4f, 0x38, + 0x19, 0x46, 0x8f, 0xd6, 0xa2, 0xd8, 0xa4, 0x90, 0x53, 0xd0, 0x5d, 0xca, 0xbd, 0x48, 0x0e, 0xda, + 0x80, 0x34, 0x97, 0x2b, 0x92, 0x24, 0x5b, 0xbe, 0x6b, 0x25, 0x6a, 0x92, 0xa5, 0x68, 0x1c, 0x0d, + 0x46, 0x33, 0x90, 0xa6, 0xad, 0x16, 0x27, 0xf1, 0x91, 0x3a, 0x32, 0x5f, 0x4f, 0x40, 0xd6, 0x21, + 0xb8, 0x59, 0x23, 0x9c, 0x47, 0xc7, 0x8d, 0xca, 0xb8, 0x02, 0x59, 0xf2, 0x7d, 0xe8, 0x31, 0xe2, + 0x46, 0x4a, 0x25, 0x41, 0xb6, 0x5c, 0x8a, 0xf3, 0x88, 0xcb, 0x60, 0xd5, 0xe3, 0x32, 0x38, 0xa0, + 0x1e, 0x8f, 0x16, 0x50, 0x1d, 0xb2, 0x51, 0x3f, 0x5d, 0xde, 0x38, 0x20, 0x3e, 0x2e, 0x4e, 0x49, + 0xf0, 0x52, 0x42, 0x11, 0xd5, 0x43, 0x46, 0x6b, 0x12, 0xf8, 0xe4, 0x8a, 0x03, 0xb8, 0x17, 0xa1, + 0xaf, 0xe0, 0xaa, 0x12, 0xc6, 0x8b, 0x93, 0x73, 0x13, 0xe3, 0x97, 0x25, 0x46, 0xa3, 0x97, 0x90, + 0x3f, 0x35, 0x02, 0xc5, 0xab, 0x32, 0xc5, 0xcf, 0x12, 0x12, 0xd6, 0x23, 0xb4, 0x13, 0x83, 0x9d, + 0x9c, 0x18, 0x8a, 0xfb, 0xfc, 0x3e, 0x6d, 0x7a, 0x2d, 0x8f, 0x30, 0x5e, 0xcc, 0x8c, 0xcf, 0xff, + 0x2c, 0x06, 0x6b, 0xfe, 0x5e, 0xbc, 0x9a, 0x81, 0xb4, 0xaa, 0xac, 0xf9, 0xe7, 0x04, 0x14, 0xd7, + 0x18, 0xc1, 0x82, 0x0c, 0xf4, 0xd3, 0x21, 0xaf, 0xba, 0x84, 0x8b, 0x51, 0x32, 0x8d, 0xcb, 0x94, + 0x39, 0x03, 0xe9, 0x10, 0x33, 0x12, 0x88, 0x62, 0x5a, 0x0e, 0x8e, 0x8e, 0x46, 0xc9, 0x4f, 0x5d, + 0xa2, 0x7c, 0x74, 0x07, 0xa6, 0x99, 0x92, 0x48, 0x9a, 0x6e, 0x3c, 0x11, 0x13, 0x73, 0xc6, 0xfc, + 0x94, 0x53, 0xe8, 0x6d, 0xd4, 0x74, 0xaf, 0xf7, 0xe0, 0xfa, 0xa0, 0x51, 0x14, 0x27, 0x65, 0x26, + 0xf7, 0xc6, 0xab, 0x00, 0x6e, 0xee, 0x28, 0xb8, 0x93, 0x65, 0xfd, 0x00, 0x6d, 0x41, 0xba, 0x45, + 0x99, 0x8f, 0x85, 0x9c, 0xf0, 0x5c, 0xe2, 0x09, 0x5f, 0xc7, 0x02, 0x6f, 0x4a, 0xa0, 0xa3, 0x09, + 0x4c, 0x1f, 0xf2, 0xd1, 0x31, 0x0e, 0x3d, 0xe2, 0x71, 0xfb, 0xf6, 0xe0, 0x86, 0xcc, 0x3c, 0xd4, + 0xae, 0x30, 0x66, 0xf3, 0x86, 0x2d, 0xc5, 0x91, 0x55, 0x88, 0x23, 0xf3, 0x31, 0x5c, 0x57, 0xfb, + 0x35, 0x81, 0x45, 0x97, 0x23, 0x0b, 0xde, 0x23, 0x5c, 0x78, 0x3e, 0x8e, 0x4a, 0xda, 0x77, 0x2a, + 0x43, 0xda, 0xc6, 0x74, 0x6f, 0xcb, 0x89, 0x2d, 0xab, 0x02, 0xb9, 0xfa, 0x01, 0xa3, 0x42, 0x74, + 0x88, 0x66, 0xb8, 0x0d, 0x05, 0xa1, 0x57, 0xdc, 0x90, 0xb0, 0x06, 0xd1, 0xf0, 0x29, 0x27, 0x1f, + 0xaf, 0xef, 0xaa, 0x65, 0xf3, 0xd7, 0x14, 0x14, 0xfa, 0x62, 0x79, 0x48, 0x03, 0x4e, 0xd0, 0x73, + 0xb8, 0x26, 0x2d, 0x83, 0xd1, 0x23, 0xd5, 0xcc, 0x6c, 0xd9, 0x1e, 0xc3, 0x30, 0x22, 0xae, 0x27, + 0x57, 0x9c, 0x0c, 0xd6, 0xbf, 0xd1, 0xd3, 0xc8, 0x42, 0xa3, 0xcc, 0xf4, 0xec, 0x2d, 0x8f, 0x55, + 0x36, 0x25, 0xca, 0xd1, 0x14, 0x72, 0xa2, 0x63, 0x71, 0x9a, 0x75, 0x6a, 0xbc, 0x89, 0x1e, 0x2a, + 0x96, 0x93, 0x13, 0x43, 0xf1, 0x6a, 0x1a, 0x26, 0x23, 0xdd, 0xe6, 0xef, 0x06, 0x7c, 0xb2, 0x8a, + 0x45, 0xe3, 0xe0, 0xcc, 0x3b, 0xad, 0xc7, 0x39, 0x9e, 0x8d, 0x6d, 0xb8, 0xca, 0xd5, 0x86, 0x9e, + 0x8a, 0x72, 0xc2, 0x44, 0x06, 0x6d, 0x22, 0xa6, 0x18, 0xfd, 0x42, 0xa5, 0x46, 0xbf, 0x50, 0x66, + 0x08, 0xb7, 0xce, 0x49, 0x51, 0x77, 0x74, 0xc0, 0xae, 0x8d, 0x8b, 0xd8, 0xb5, 0xf9, 0x12, 0xde, + 0xdf, 0xf4, 0x02, 0xdc, 0xf1, 0x7e, 0x20, 0x7a, 0x4b, 0x57, 0xa1, 0x7f, 0x4d, 0xa6, 0x2e, 0x70, + 0x4d, 0x9a, 0x21, 0xcc, 0xd4, 0xc2, 0x8e, 0x27, 0xa4, 0x96, 0xa1, 0x03, 0x5e, 0x40, 0x9e, 0x32, + 0xaf, 0x1d, 0x1d, 0xee, 0x5e, 0xe4, 0x42, 0xce, 0xc5, 0x2c, 0x2a, 0x36, 0xff, 0x32, 0xe0, 0xe6, + 0x99, 0x23, 0x75, 0xd9, 0xea, 0x90, 0x0b, 0x99, 0xe7, 0x63, 0x76, 0x7c, 0xa1, 0x23, 0x6f, 0x68, + 0x12, 0xfd, 0x51, 0xf2, 0x2d, 0x14, 0x18, 0xf1, 0xb1, 0x17, 0x34, 0x09, 0x73, 0x2f, 0x52, 0xb4, + 0x7c, 0x8f, 0x46, 0x2d, 0x2c, 0x2c, 0x03, 0xf4, 0xfd, 0x0c, 0xcd, 0xc2, 0xcd, 0xf5, 0x6a, 0xbd, + 0xea, 0x6e, 0xee, 0x38, 0xcf, 0xaa, 0x75, 0xf7, 0x9b, 0xe7, 0xb5, 0xdd, 0x8d, 0xb5, 0xad, 0xcd, + 0xad, 0x8d, 0xf5, 0xc2, 0x15, 0x94, 0x81, 0xc9, 0xea, 0x0b, 0x67, 0xa7, 0x60, 0x94, 0x5f, 0x67, + 0x20, 0xbf, 0xea, 0xb5, 0xbf, 0x8e, 0x4e, 0xaa, 0xa9, 0x83, 0xd0, 0xcf, 0x29, 0x98, 0x3e, 0x33, + 0x54, 0xe8, 0x8b, 0x84, 0xe9, 0xbd, 0xe9, 0x16, 0x2c, 0xbd, 0xc3, 0x9b, 0x61, 0x9e, 0xfc, 0xf4, + 0xcf, 0xbf, 0xbf, 0xa4, 0x8e, 0xcc, 0xc5, 0xde, 0x27, 0xe3, 0x8f, 0x23, 0xbe, 0x19, 0xbf, 0x23, + 0x0d, 0xe1, 0x7a, 0xcd, 0x47, 0xfa, 0x27, 0xb7, 0x17, 0x4e, 0x3e, 0x37, 0x16, 0xf6, 0xbe, 0x34, + 0x2b, 0x6f, 0x86, 0x35, 0xb1, 0xc0, 0x9c, 0x9c, 0x82, 0xd9, 0x7a, 0x55, 0x33, 0xa0, 0x3f, 0x0c, + 0xc8, 0xc4, 0x06, 0x89, 0x56, 0xc6, 0xc8, 0x7f, 0xe0, 0xfa, 0x28, 0xdd, 0x1b, 0x1b, 0xa7, 0x06, + 0xd0, 0xac, 0x4a, 0xf1, 0x15, 0xf4, 0xa0, 0xaf, 0x62, 0xe8, 0x1e, 0xb2, 0xd4, 0xf8, 0x58, 0xd1, + 0x67, 0xe2, 0xa0, 0x08, 0xfd, 0xba, 0xda, 0x0b, 0x27, 0x8b, 0x06, 0xfa, 0xcf, 0x80, 0x8f, 0xde, + 0x6a, 0x13, 0xe8, 0x69, 0xc2, 0xfc, 0x92, 0xf8, 0x61, 0x69, 0xfb, 0x72, 0xc8, 0x74, 0x05, 0xee, + 0xcb, 0x0a, 0x94, 0xcd, 0xbb, 0xfd, 0x0a, 0x68, 0xab, 0x3c, 0x2b, 0x5a, 0xad, 0xc7, 0x9d, 0xfb, + 0xcd, 0x80, 0xdc, 0xb0, 0x57, 0xa1, 0x87, 0x09, 0x53, 0x1b, 0x69, 0x71, 0xa5, 0x99, 0x33, 0x5f, + 0xdc, 0x1b, 0xd1, 0xbf, 0x12, 0x73, 0x45, 0xa6, 0xb8, 0x68, 0xde, 0x19, 0x48, 0xf1, 0xbc, 0xb6, + 0x44, 0x09, 0xfe, 0x6d, 0x40, 0xfe, 0x94, 0xf3, 0xa0, 0x47, 0x49, 0x1d, 0x60, 0xa4, 0x49, 0x96, + 0x1e, 0xbf, 0x2b, 0x5c, 0x57, 0xfb, 0xa1, 0x94, 0xb2, 0x82, 0x3e, 0xed, 0x4b, 0x39, 0x65, 0xba, + 0x6f, 0xd1, 0xb4, 0x7a, 0x0c, 0xb7, 0x1b, 0xd4, 0x4f, 0x96, 0xc2, 0xde, 0xb6, 0x7e, 0xac, 0x4d, + 0x3b, 0x38, 0x68, 0x5b, 0x94, 0xb5, 0xed, 0x36, 0x09, 0x64, 0x69, 0x6d, 0xb5, 0x85, 0x43, 0x8f, + 0x9f, 0xf3, 0xf7, 0xb1, 0xa2, 0xe3, 0xfd, 0xb4, 0x04, 0x2e, 0xff, 0x1f, 0x00, 0x00, 0xff, 0xff, + 0xca, 0xfc, 0x5f, 0xb4, 0x77, 0x0f, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/cloud/bigquery/storage/v1beta1/table_reference.pb.go b/vendor/google.golang.org/genproto/googleapis/cloud/bigquery/storage/v1beta1/table_reference.pb.go new file mode 100644 index 0000000..07e45bd --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/cloud/bigquery/storage/v1beta1/table_reference.pb.go @@ -0,0 +1,151 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/cloud/bigquery/storage/v1beta1/table_reference.proto + +package storage // import "google.golang.org/genproto/googleapis/cloud/bigquery/storage/v1beta1" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import timestamp "github.com/golang/protobuf/ptypes/timestamp" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Table reference that includes just the 3 strings needed to identify a table. +type TableReference struct { + // The assigned project ID of the project. + ProjectId string `protobuf:"bytes,1,opt,name=project_id,json=projectId,proto3" json:"project_id,omitempty"` + // The ID of the dataset in the above project. + DatasetId string `protobuf:"bytes,2,opt,name=dataset_id,json=datasetId,proto3" json:"dataset_id,omitempty"` + // The ID of the table in the above dataset. + TableId string `protobuf:"bytes,3,opt,name=table_id,json=tableId,proto3" json:"table_id,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *TableReference) Reset() { *m = TableReference{} } +func (m *TableReference) String() string { return proto.CompactTextString(m) } +func (*TableReference) ProtoMessage() {} +func (*TableReference) Descriptor() ([]byte, []int) { + return fileDescriptor_table_reference_d7a619381c8a4dbe, []int{0} +} +func (m *TableReference) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_TableReference.Unmarshal(m, b) +} +func (m *TableReference) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_TableReference.Marshal(b, m, deterministic) +} +func (dst *TableReference) XXX_Merge(src proto.Message) { + xxx_messageInfo_TableReference.Merge(dst, src) +} +func (m *TableReference) XXX_Size() int { + return xxx_messageInfo_TableReference.Size(m) +} +func (m *TableReference) XXX_DiscardUnknown() { + xxx_messageInfo_TableReference.DiscardUnknown(m) +} + +var xxx_messageInfo_TableReference proto.InternalMessageInfo + +func (m *TableReference) GetProjectId() string { + if m != nil { + return m.ProjectId + } + return "" +} + +func (m *TableReference) GetDatasetId() string { + if m != nil { + return m.DatasetId + } + return "" +} + +func (m *TableReference) GetTableId() string { + if m != nil { + return m.TableId + } + return "" +} + +// All fields in this message optional. +type TableModifiers struct { + // The snapshot time of the table. If not set, interpreted as now. + SnapshotTime *timestamp.Timestamp `protobuf:"bytes,1,opt,name=snapshot_time,json=snapshotTime,proto3" json:"snapshot_time,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *TableModifiers) Reset() { *m = TableModifiers{} } +func (m *TableModifiers) String() string { return proto.CompactTextString(m) } +func (*TableModifiers) ProtoMessage() {} +func (*TableModifiers) Descriptor() ([]byte, []int) { + return fileDescriptor_table_reference_d7a619381c8a4dbe, []int{1} +} +func (m *TableModifiers) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_TableModifiers.Unmarshal(m, b) +} +func (m *TableModifiers) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_TableModifiers.Marshal(b, m, deterministic) +} +func (dst *TableModifiers) XXX_Merge(src proto.Message) { + xxx_messageInfo_TableModifiers.Merge(dst, src) +} +func (m *TableModifiers) XXX_Size() int { + return xxx_messageInfo_TableModifiers.Size(m) +} +func (m *TableModifiers) XXX_DiscardUnknown() { + xxx_messageInfo_TableModifiers.DiscardUnknown(m) +} + +var xxx_messageInfo_TableModifiers proto.InternalMessageInfo + +func (m *TableModifiers) GetSnapshotTime() *timestamp.Timestamp { + if m != nil { + return m.SnapshotTime + } + return nil +} + +func init() { + proto.RegisterType((*TableReference)(nil), "google.cloud.bigquery.storage.v1beta1.TableReference") + proto.RegisterType((*TableModifiers)(nil), "google.cloud.bigquery.storage.v1beta1.TableModifiers") +} + +func init() { + proto.RegisterFile("google/cloud/bigquery/storage/v1beta1/table_reference.proto", fileDescriptor_table_reference_d7a619381c8a4dbe) +} + +var fileDescriptor_table_reference_d7a619381c8a4dbe = []byte{ + // 293 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x8c, 0x91, 0x4d, 0x4b, 0xc4, 0x30, + 0x10, 0x86, 0x59, 0x05, 0x75, 0xe3, 0xc7, 0xa1, 0x5e, 0x76, 0x0b, 0xa2, 0x2c, 0x08, 0x7a, 0x49, + 0x58, 0x3d, 0xee, 0x41, 0xd8, 0x5b, 0x41, 0x41, 0xcb, 0x9e, 0xbc, 0x94, 0xb4, 0x99, 0xc6, 0x48, + 0xdb, 0x89, 0x49, 0x2a, 0xf8, 0x27, 0xfc, 0xcd, 0xd2, 0x7c, 0x1c, 0x3c, 0xe9, 0x71, 0xe6, 0xed, + 0x33, 0xcf, 0x4c, 0x43, 0x36, 0x12, 0x51, 0x76, 0xc0, 0x9a, 0x0e, 0x47, 0xc1, 0x6a, 0x25, 0x3f, + 0x46, 0x30, 0x5f, 0xcc, 0x3a, 0x34, 0x5c, 0x02, 0xfb, 0x5c, 0xd7, 0xe0, 0xf8, 0x9a, 0x39, 0x5e, + 0x77, 0x50, 0x19, 0x68, 0xc1, 0xc0, 0xd0, 0x00, 0xd5, 0x06, 0x1d, 0x66, 0xd7, 0x01, 0xa6, 0x1e, + 0xa6, 0x09, 0xa6, 0x11, 0xa6, 0x11, 0xce, 0x97, 0xd1, 0xc1, 0xb5, 0x62, 0x06, 0x2c, 0x8e, 0x26, + 0x4d, 0xc8, 0x2f, 0x63, 0xe4, 0xab, 0x7a, 0x6c, 0x99, 0x53, 0x3d, 0x58, 0xc7, 0x7b, 0x1d, 0x3e, + 0x58, 0x29, 0x72, 0xb6, 0x9b, 0xdc, 0x65, 0x52, 0x67, 0x17, 0x84, 0x68, 0x83, 0xef, 0xd0, 0xb8, + 0x4a, 0x89, 0xc5, 0xec, 0x6a, 0x76, 0x33, 0x2f, 0xe7, 0xb1, 0x53, 0x88, 0x29, 0x16, 0xdc, 0x71, + 0x0b, 0x3e, 0xde, 0x0b, 0x71, 0xec, 0x14, 0x22, 0x5b, 0x92, 0xa3, 0x70, 0x8b, 0x12, 0x8b, 0x7d, + 0x1f, 0x1e, 0xfa, 0xba, 0x10, 0xab, 0x97, 0xa8, 0x7a, 0x42, 0xa1, 0x5a, 0x05, 0xc6, 0x66, 0x0f, + 0xe4, 0xd4, 0x0e, 0x5c, 0xdb, 0x37, 0x74, 0xd5, 0xb4, 0x98, 0xb7, 0x1d, 0xdf, 0xe5, 0x34, 0xde, + 0x9d, 0xb6, 0xa6, 0xbb, 0xb4, 0x75, 0x79, 0x92, 0x80, 0xa9, 0xb5, 0xfd, 0x9e, 0x91, 0xdb, 0x06, + 0x7b, 0xfa, 0xaf, 0xff, 0xb4, 0x3d, 0xff, 0x7d, 0xe9, 0xf3, 0x34, 0xfd, 0xf5, 0x31, 0xb2, 0x12, + 0x3b, 0x3e, 0x48, 0x8a, 0x46, 0x32, 0x09, 0x83, 0x37, 0xb3, 0x10, 0x71, 0xad, 0xec, 0x1f, 0xef, + 0xb7, 0x89, 0x75, 0x7d, 0xe0, 0xc1, 0xfb, 0x9f, 0x00, 0x00, 0x00, 0xff, 0xff, 0x68, 0x1c, 0xaf, + 0x7a, 0xf7, 0x01, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/cloud/bigquery/v2/model.pb.go b/vendor/google.golang.org/genproto/googleapis/cloud/bigquery/v2/model.pb.go new file mode 100644 index 0000000..4ce48d7 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/cloud/bigquery/v2/model.pb.go @@ -0,0 +1,2251 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/cloud/bigquery/v2/model.proto + +package bigquery // import "google.golang.org/genproto/googleapis/cloud/bigquery/v2" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import empty "github.com/golang/protobuf/ptypes/empty" +import timestamp "github.com/golang/protobuf/ptypes/timestamp" +import wrappers "github.com/golang/protobuf/ptypes/wrappers" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +import ( + context "golang.org/x/net/context" + grpc "google.golang.org/grpc" +) + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Indicates the type of the Model. +type Model_ModelType int32 + +const ( + Model_MODEL_TYPE_UNSPECIFIED Model_ModelType = 0 + // Linear regression model. + Model_LINEAR_REGRESSION Model_ModelType = 1 + // Logistic regression model. + Model_LOGISTIC_REGRESSION Model_ModelType = 2 + // [Beta] K-means clustering model. + Model_KMEANS Model_ModelType = 3 +) + +var Model_ModelType_name = map[int32]string{ + 0: "MODEL_TYPE_UNSPECIFIED", + 1: "LINEAR_REGRESSION", + 2: "LOGISTIC_REGRESSION", + 3: "KMEANS", +} +var Model_ModelType_value = map[string]int32{ + "MODEL_TYPE_UNSPECIFIED": 0, + "LINEAR_REGRESSION": 1, + "LOGISTIC_REGRESSION": 2, + "KMEANS": 3, +} + +func (x Model_ModelType) String() string { + return proto.EnumName(Model_ModelType_name, int32(x)) +} +func (Model_ModelType) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_model_5def960f0bdfad81, []int{0, 0} +} + +// Loss metric to evaluate model training performance. +type Model_LossType int32 + +const ( + Model_LOSS_TYPE_UNSPECIFIED Model_LossType = 0 + // Mean squared loss, used for linear regression. + Model_MEAN_SQUARED_LOSS Model_LossType = 1 + // Mean log loss, used for logistic regression. + Model_MEAN_LOG_LOSS Model_LossType = 2 +) + +var Model_LossType_name = map[int32]string{ + 0: "LOSS_TYPE_UNSPECIFIED", + 1: "MEAN_SQUARED_LOSS", + 2: "MEAN_LOG_LOSS", +} +var Model_LossType_value = map[string]int32{ + "LOSS_TYPE_UNSPECIFIED": 0, + "MEAN_SQUARED_LOSS": 1, + "MEAN_LOG_LOSS": 2, +} + +func (x Model_LossType) String() string { + return proto.EnumName(Model_LossType_name, int32(x)) +} +func (Model_LossType) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_model_5def960f0bdfad81, []int{0, 1} +} + +// Distance metric used to compute the distance between two points. +type Model_DistanceType int32 + +const ( + Model_DISTANCE_TYPE_UNSPECIFIED Model_DistanceType = 0 + // Eculidean distance. + Model_EUCLIDEAN Model_DistanceType = 1 + // Cosine distance. + Model_COSINE Model_DistanceType = 2 +) + +var Model_DistanceType_name = map[int32]string{ + 0: "DISTANCE_TYPE_UNSPECIFIED", + 1: "EUCLIDEAN", + 2: "COSINE", +} +var Model_DistanceType_value = map[string]int32{ + "DISTANCE_TYPE_UNSPECIFIED": 0, + "EUCLIDEAN": 1, + "COSINE": 2, +} + +func (x Model_DistanceType) String() string { + return proto.EnumName(Model_DistanceType_name, int32(x)) +} +func (Model_DistanceType) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_model_5def960f0bdfad81, []int{0, 2} +} + +// Indicates the method to split input data into multiple tables. +type Model_DataSplitMethod int32 + +const ( + Model_DATA_SPLIT_METHOD_UNSPECIFIED Model_DataSplitMethod = 0 + // Splits data randomly. + Model_RANDOM Model_DataSplitMethod = 1 + // Splits data with the user provided tags. + Model_CUSTOM Model_DataSplitMethod = 2 + // Splits data sequentially. + Model_SEQUENTIAL Model_DataSplitMethod = 3 + // Data split will be skipped. + Model_NO_SPLIT Model_DataSplitMethod = 4 + // Splits data automatically: Uses NO_SPLIT if the data size is small. + // Otherwise uses RANDOM. + Model_AUTO_SPLIT Model_DataSplitMethod = 5 +) + +var Model_DataSplitMethod_name = map[int32]string{ + 0: "DATA_SPLIT_METHOD_UNSPECIFIED", + 1: "RANDOM", + 2: "CUSTOM", + 3: "SEQUENTIAL", + 4: "NO_SPLIT", + 5: "AUTO_SPLIT", +} +var Model_DataSplitMethod_value = map[string]int32{ + "DATA_SPLIT_METHOD_UNSPECIFIED": 0, + "RANDOM": 1, + "CUSTOM": 2, + "SEQUENTIAL": 3, + "NO_SPLIT": 4, + "AUTO_SPLIT": 5, +} + +func (x Model_DataSplitMethod) String() string { + return proto.EnumName(Model_DataSplitMethod_name, int32(x)) +} +func (Model_DataSplitMethod) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_model_5def960f0bdfad81, []int{0, 3} +} + +// Indicates the learning rate optimization strategy to use. +type Model_LearnRateStrategy int32 + +const ( + Model_LEARN_RATE_STRATEGY_UNSPECIFIED Model_LearnRateStrategy = 0 + // Use line search to determine learning rate. + Model_LINE_SEARCH Model_LearnRateStrategy = 1 + // Use a constant learning rate. + Model_CONSTANT Model_LearnRateStrategy = 2 +) + +var Model_LearnRateStrategy_name = map[int32]string{ + 0: "LEARN_RATE_STRATEGY_UNSPECIFIED", + 1: "LINE_SEARCH", + 2: "CONSTANT", +} +var Model_LearnRateStrategy_value = map[string]int32{ + "LEARN_RATE_STRATEGY_UNSPECIFIED": 0, + "LINE_SEARCH": 1, + "CONSTANT": 2, +} + +func (x Model_LearnRateStrategy) String() string { + return proto.EnumName(Model_LearnRateStrategy_name, int32(x)) +} +func (Model_LearnRateStrategy) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_model_5def960f0bdfad81, []int{0, 4} +} + +type Model struct { + // Output only. A hash of this resource. + Etag string `protobuf:"bytes,1,opt,name=etag,proto3" json:"etag,omitempty"` + // Required. Unique identifier for this model. + ModelReference *ModelReference `protobuf:"bytes,2,opt,name=model_reference,json=modelReference,proto3" json:"model_reference,omitempty"` + // Output only. The time when this model was created, in millisecs since the + // epoch. + CreationTime int64 `protobuf:"varint,5,opt,name=creation_time,json=creationTime,proto3" json:"creation_time,omitempty"` + // Output only. The time when this model was last modified, in millisecs + // since the epoch. + LastModifiedTime int64 `protobuf:"varint,6,opt,name=last_modified_time,json=lastModifiedTime,proto3" json:"last_modified_time,omitempty"` + // [Optional] A user-friendly description of this model. + // @mutable bigquery.models.patch + Description string `protobuf:"bytes,12,opt,name=description,proto3" json:"description,omitempty"` + // [Optional] A descriptive name for this model. + // @mutable bigquery.models.patch + FriendlyName string `protobuf:"bytes,14,opt,name=friendly_name,json=friendlyName,proto3" json:"friendly_name,omitempty"` + // [Optional] The labels associated with this model. You can use these to + // organize and group your models. Label keys and values can be no longer + // than 63 characters, can only contain lowercase letters, numeric + // characters, underscores and dashes. International characters are allowed. + // Label values are optional. Label keys must start with a letter and each + // label in the list must have a different key. + // @mutable bigquery.models.patch + Labels map[string]string `protobuf:"bytes,15,rep,name=labels,proto3" json:"labels,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` + // [Optional] The time when this model expires, in milliseconds since the + // epoch. If not present, the model will persist indefinitely. Expired models + // will be deleted and their storage reclaimed. The defaultTableExpirationMs + // property of the encapsulating dataset can be used to set a default + // expirationTime on newly created models. + // @mutable bigquery.models.patch + ExpirationTime int64 `protobuf:"varint,16,opt,name=expiration_time,json=expirationTime,proto3" json:"expiration_time,omitempty"` + // Output only. The geographic location where the model resides. This value + // is inherited from the dataset. + Location string `protobuf:"bytes,13,opt,name=location,proto3" json:"location,omitempty"` + // Output only. Type of the model resource. + ModelType Model_ModelType `protobuf:"varint,7,opt,name=model_type,json=modelType,proto3,enum=google.cloud.bigquery.v2.Model_ModelType" json:"model_type,omitempty"` + // Output only. Information for all training runs in increasing order of + // start_time. + TrainingRuns []*Model_TrainingRun `protobuf:"bytes,9,rep,name=training_runs,json=trainingRuns,proto3" json:"training_runs,omitempty"` + // Output only. Input feature columns that were used to train this model. + FeatureColumns []*StandardSqlField `protobuf:"bytes,10,rep,name=feature_columns,json=featureColumns,proto3" json:"feature_columns,omitempty"` + // Output only. Label columns that were used to train this model. + // The output of the model will have a “predicted_” prefix to these columns. + LabelColumns []*StandardSqlField `protobuf:"bytes,11,rep,name=label_columns,json=labelColumns,proto3" json:"label_columns,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Model) Reset() { *m = Model{} } +func (m *Model) String() string { return proto.CompactTextString(m) } +func (*Model) ProtoMessage() {} +func (*Model) Descriptor() ([]byte, []int) { + return fileDescriptor_model_5def960f0bdfad81, []int{0} +} +func (m *Model) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Model.Unmarshal(m, b) +} +func (m *Model) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Model.Marshal(b, m, deterministic) +} +func (dst *Model) XXX_Merge(src proto.Message) { + xxx_messageInfo_Model.Merge(dst, src) +} +func (m *Model) XXX_Size() int { + return xxx_messageInfo_Model.Size(m) +} +func (m *Model) XXX_DiscardUnknown() { + xxx_messageInfo_Model.DiscardUnknown(m) +} + +var xxx_messageInfo_Model proto.InternalMessageInfo + +func (m *Model) GetEtag() string { + if m != nil { + return m.Etag + } + return "" +} + +func (m *Model) GetModelReference() *ModelReference { + if m != nil { + return m.ModelReference + } + return nil +} + +func (m *Model) GetCreationTime() int64 { + if m != nil { + return m.CreationTime + } + return 0 +} + +func (m *Model) GetLastModifiedTime() int64 { + if m != nil { + return m.LastModifiedTime + } + return 0 +} + +func (m *Model) GetDescription() string { + if m != nil { + return m.Description + } + return "" +} + +func (m *Model) GetFriendlyName() string { + if m != nil { + return m.FriendlyName + } + return "" +} + +func (m *Model) GetLabels() map[string]string { + if m != nil { + return m.Labels + } + return nil +} + +func (m *Model) GetExpirationTime() int64 { + if m != nil { + return m.ExpirationTime + } + return 0 +} + +func (m *Model) GetLocation() string { + if m != nil { + return m.Location + } + return "" +} + +func (m *Model) GetModelType() Model_ModelType { + if m != nil { + return m.ModelType + } + return Model_MODEL_TYPE_UNSPECIFIED +} + +func (m *Model) GetTrainingRuns() []*Model_TrainingRun { + if m != nil { + return m.TrainingRuns + } + return nil +} + +func (m *Model) GetFeatureColumns() []*StandardSqlField { + if m != nil { + return m.FeatureColumns + } + return nil +} + +func (m *Model) GetLabelColumns() []*StandardSqlField { + if m != nil { + return m.LabelColumns + } + return nil +} + +// Evaluation metrics for regression models. +type Model_RegressionMetrics struct { + // Mean absolute error. + MeanAbsoluteError *wrappers.DoubleValue `protobuf:"bytes,1,opt,name=mean_absolute_error,json=meanAbsoluteError,proto3" json:"mean_absolute_error,omitempty"` + // Mean squared error. + MeanSquaredError *wrappers.DoubleValue `protobuf:"bytes,2,opt,name=mean_squared_error,json=meanSquaredError,proto3" json:"mean_squared_error,omitempty"` + // Mean squared log error. + MeanSquaredLogError *wrappers.DoubleValue `protobuf:"bytes,3,opt,name=mean_squared_log_error,json=meanSquaredLogError,proto3" json:"mean_squared_log_error,omitempty"` + // Median absolute error. + MedianAbsoluteError *wrappers.DoubleValue `protobuf:"bytes,4,opt,name=median_absolute_error,json=medianAbsoluteError,proto3" json:"median_absolute_error,omitempty"` + // R^2 score. + RSquared *wrappers.DoubleValue `protobuf:"bytes,5,opt,name=r_squared,json=rSquared,proto3" json:"r_squared,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Model_RegressionMetrics) Reset() { *m = Model_RegressionMetrics{} } +func (m *Model_RegressionMetrics) String() string { return proto.CompactTextString(m) } +func (*Model_RegressionMetrics) ProtoMessage() {} +func (*Model_RegressionMetrics) Descriptor() ([]byte, []int) { + return fileDescriptor_model_5def960f0bdfad81, []int{0, 0} +} +func (m *Model_RegressionMetrics) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Model_RegressionMetrics.Unmarshal(m, b) +} +func (m *Model_RegressionMetrics) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Model_RegressionMetrics.Marshal(b, m, deterministic) +} +func (dst *Model_RegressionMetrics) XXX_Merge(src proto.Message) { + xxx_messageInfo_Model_RegressionMetrics.Merge(dst, src) +} +func (m *Model_RegressionMetrics) XXX_Size() int { + return xxx_messageInfo_Model_RegressionMetrics.Size(m) +} +func (m *Model_RegressionMetrics) XXX_DiscardUnknown() { + xxx_messageInfo_Model_RegressionMetrics.DiscardUnknown(m) +} + +var xxx_messageInfo_Model_RegressionMetrics proto.InternalMessageInfo + +func (m *Model_RegressionMetrics) GetMeanAbsoluteError() *wrappers.DoubleValue { + if m != nil { + return m.MeanAbsoluteError + } + return nil +} + +func (m *Model_RegressionMetrics) GetMeanSquaredError() *wrappers.DoubleValue { + if m != nil { + return m.MeanSquaredError + } + return nil +} + +func (m *Model_RegressionMetrics) GetMeanSquaredLogError() *wrappers.DoubleValue { + if m != nil { + return m.MeanSquaredLogError + } + return nil +} + +func (m *Model_RegressionMetrics) GetMedianAbsoluteError() *wrappers.DoubleValue { + if m != nil { + return m.MedianAbsoluteError + } + return nil +} + +func (m *Model_RegressionMetrics) GetRSquared() *wrappers.DoubleValue { + if m != nil { + return m.RSquared + } + return nil +} + +// Aggregate metrics for classification models. For multi-class models, +// the metrics are either macro-averaged: metrics are calculated for each +// label and then an unweighted average is taken of those values or +// micro-averaged: the metric is calculated globally by counting the total +// number of correctly predicted rows. +type Model_AggregateClassificationMetrics struct { + // Precision is the fraction of actual positive predictions that had + // positive actual labels. For multiclass this is a macro-averaged + // metric treating each class as a binary classifier. + Precision *wrappers.DoubleValue `protobuf:"bytes,1,opt,name=precision,proto3" json:"precision,omitempty"` + // Recall is the fraction of actual positive labels that were given a + // positive prediction. For multiclass this is a macro-averaged metric. + Recall *wrappers.DoubleValue `protobuf:"bytes,2,opt,name=recall,proto3" json:"recall,omitempty"` + // Accuracy is the fraction of predictions given the correct label. For + // multiclass this is a micro-averaged metric. + Accuracy *wrappers.DoubleValue `protobuf:"bytes,3,opt,name=accuracy,proto3" json:"accuracy,omitempty"` + // Threshold at which the metrics are computed. For binary + // classification models this is the positive class threshold. + // For multi-class classfication models this is the confidence + // threshold. + Threshold *wrappers.DoubleValue `protobuf:"bytes,4,opt,name=threshold,proto3" json:"threshold,omitempty"` + // The F1 score is an average of recall and precision. For multiclass + // this is a macro-averaged metric. + F1Score *wrappers.DoubleValue `protobuf:"bytes,5,opt,name=f1_score,json=f1Score,proto3" json:"f1_score,omitempty"` + // Logarithmic Loss. For multiclass this is a macro-averaged metric. + LogLoss *wrappers.DoubleValue `protobuf:"bytes,6,opt,name=log_loss,json=logLoss,proto3" json:"log_loss,omitempty"` + // Area Under a ROC Curve. For multiclass this is a macro-averaged + // metric. + RocAuc *wrappers.DoubleValue `protobuf:"bytes,7,opt,name=roc_auc,json=rocAuc,proto3" json:"roc_auc,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Model_AggregateClassificationMetrics) Reset() { *m = Model_AggregateClassificationMetrics{} } +func (m *Model_AggregateClassificationMetrics) String() string { return proto.CompactTextString(m) } +func (*Model_AggregateClassificationMetrics) ProtoMessage() {} +func (*Model_AggregateClassificationMetrics) Descriptor() ([]byte, []int) { + return fileDescriptor_model_5def960f0bdfad81, []int{0, 1} +} +func (m *Model_AggregateClassificationMetrics) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Model_AggregateClassificationMetrics.Unmarshal(m, b) +} +func (m *Model_AggregateClassificationMetrics) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Model_AggregateClassificationMetrics.Marshal(b, m, deterministic) +} +func (dst *Model_AggregateClassificationMetrics) XXX_Merge(src proto.Message) { + xxx_messageInfo_Model_AggregateClassificationMetrics.Merge(dst, src) +} +func (m *Model_AggregateClassificationMetrics) XXX_Size() int { + return xxx_messageInfo_Model_AggregateClassificationMetrics.Size(m) +} +func (m *Model_AggregateClassificationMetrics) XXX_DiscardUnknown() { + xxx_messageInfo_Model_AggregateClassificationMetrics.DiscardUnknown(m) +} + +var xxx_messageInfo_Model_AggregateClassificationMetrics proto.InternalMessageInfo + +func (m *Model_AggregateClassificationMetrics) GetPrecision() *wrappers.DoubleValue { + if m != nil { + return m.Precision + } + return nil +} + +func (m *Model_AggregateClassificationMetrics) GetRecall() *wrappers.DoubleValue { + if m != nil { + return m.Recall + } + return nil +} + +func (m *Model_AggregateClassificationMetrics) GetAccuracy() *wrappers.DoubleValue { + if m != nil { + return m.Accuracy + } + return nil +} + +func (m *Model_AggregateClassificationMetrics) GetThreshold() *wrappers.DoubleValue { + if m != nil { + return m.Threshold + } + return nil +} + +func (m *Model_AggregateClassificationMetrics) GetF1Score() *wrappers.DoubleValue { + if m != nil { + return m.F1Score + } + return nil +} + +func (m *Model_AggregateClassificationMetrics) GetLogLoss() *wrappers.DoubleValue { + if m != nil { + return m.LogLoss + } + return nil +} + +func (m *Model_AggregateClassificationMetrics) GetRocAuc() *wrappers.DoubleValue { + if m != nil { + return m.RocAuc + } + return nil +} + +// Evaluation metrics for binary classification models. +type Model_BinaryClassificationMetrics struct { + // Aggregate classification metrics. + AggregateClassificationMetrics *Model_AggregateClassificationMetrics `protobuf:"bytes,1,opt,name=aggregate_classification_metrics,json=aggregateClassificationMetrics,proto3" json:"aggregate_classification_metrics,omitempty"` + // Binary confusion matrix at multiple thresholds. + BinaryConfusionMatrixList []*Model_BinaryClassificationMetrics_BinaryConfusionMatrix `protobuf:"bytes,2,rep,name=binary_confusion_matrix_list,json=binaryConfusionMatrixList,proto3" json:"binary_confusion_matrix_list,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Model_BinaryClassificationMetrics) Reset() { *m = Model_BinaryClassificationMetrics{} } +func (m *Model_BinaryClassificationMetrics) String() string { return proto.CompactTextString(m) } +func (*Model_BinaryClassificationMetrics) ProtoMessage() {} +func (*Model_BinaryClassificationMetrics) Descriptor() ([]byte, []int) { + return fileDescriptor_model_5def960f0bdfad81, []int{0, 2} +} +func (m *Model_BinaryClassificationMetrics) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Model_BinaryClassificationMetrics.Unmarshal(m, b) +} +func (m *Model_BinaryClassificationMetrics) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Model_BinaryClassificationMetrics.Marshal(b, m, deterministic) +} +func (dst *Model_BinaryClassificationMetrics) XXX_Merge(src proto.Message) { + xxx_messageInfo_Model_BinaryClassificationMetrics.Merge(dst, src) +} +func (m *Model_BinaryClassificationMetrics) XXX_Size() int { + return xxx_messageInfo_Model_BinaryClassificationMetrics.Size(m) +} +func (m *Model_BinaryClassificationMetrics) XXX_DiscardUnknown() { + xxx_messageInfo_Model_BinaryClassificationMetrics.DiscardUnknown(m) +} + +var xxx_messageInfo_Model_BinaryClassificationMetrics proto.InternalMessageInfo + +func (m *Model_BinaryClassificationMetrics) GetAggregateClassificationMetrics() *Model_AggregateClassificationMetrics { + if m != nil { + return m.AggregateClassificationMetrics + } + return nil +} + +func (m *Model_BinaryClassificationMetrics) GetBinaryConfusionMatrixList() []*Model_BinaryClassificationMetrics_BinaryConfusionMatrix { + if m != nil { + return m.BinaryConfusionMatrixList + } + return nil +} + +// Confusion matrix for binary classification models. +type Model_BinaryClassificationMetrics_BinaryConfusionMatrix struct { + // Threshold value used when computing each of the following metric. + PositiveClassThreshold *wrappers.DoubleValue `protobuf:"bytes,1,opt,name=positive_class_threshold,json=positiveClassThreshold,proto3" json:"positive_class_threshold,omitempty"` + // Number of true samples predicted as true. + TruePositives *wrappers.Int64Value `protobuf:"bytes,2,opt,name=true_positives,json=truePositives,proto3" json:"true_positives,omitempty"` + // Number of false samples predicted as true. + FalsePositives *wrappers.Int64Value `protobuf:"bytes,3,opt,name=false_positives,json=falsePositives,proto3" json:"false_positives,omitempty"` + // Number of true samples predicted as false. + TrueNegatives *wrappers.Int64Value `protobuf:"bytes,4,opt,name=true_negatives,json=trueNegatives,proto3" json:"true_negatives,omitempty"` + // Number of false samples predicted as false. + FalseNegatives *wrappers.Int64Value `protobuf:"bytes,5,opt,name=false_negatives,json=falseNegatives,proto3" json:"false_negatives,omitempty"` + // Aggregate precision. + Precision *wrappers.DoubleValue `protobuf:"bytes,6,opt,name=precision,proto3" json:"precision,omitempty"` + // Aggregate recall. + Recall *wrappers.DoubleValue `protobuf:"bytes,7,opt,name=recall,proto3" json:"recall,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Model_BinaryClassificationMetrics_BinaryConfusionMatrix) Reset() { + *m = Model_BinaryClassificationMetrics_BinaryConfusionMatrix{} +} +func (m *Model_BinaryClassificationMetrics_BinaryConfusionMatrix) String() string { + return proto.CompactTextString(m) +} +func (*Model_BinaryClassificationMetrics_BinaryConfusionMatrix) ProtoMessage() {} +func (*Model_BinaryClassificationMetrics_BinaryConfusionMatrix) Descriptor() ([]byte, []int) { + return fileDescriptor_model_5def960f0bdfad81, []int{0, 2, 0} +} +func (m *Model_BinaryClassificationMetrics_BinaryConfusionMatrix) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Model_BinaryClassificationMetrics_BinaryConfusionMatrix.Unmarshal(m, b) +} +func (m *Model_BinaryClassificationMetrics_BinaryConfusionMatrix) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Model_BinaryClassificationMetrics_BinaryConfusionMatrix.Marshal(b, m, deterministic) +} +func (dst *Model_BinaryClassificationMetrics_BinaryConfusionMatrix) XXX_Merge(src proto.Message) { + xxx_messageInfo_Model_BinaryClassificationMetrics_BinaryConfusionMatrix.Merge(dst, src) +} +func (m *Model_BinaryClassificationMetrics_BinaryConfusionMatrix) XXX_Size() int { + return xxx_messageInfo_Model_BinaryClassificationMetrics_BinaryConfusionMatrix.Size(m) +} +func (m *Model_BinaryClassificationMetrics_BinaryConfusionMatrix) XXX_DiscardUnknown() { + xxx_messageInfo_Model_BinaryClassificationMetrics_BinaryConfusionMatrix.DiscardUnknown(m) +} + +var xxx_messageInfo_Model_BinaryClassificationMetrics_BinaryConfusionMatrix proto.InternalMessageInfo + +func (m *Model_BinaryClassificationMetrics_BinaryConfusionMatrix) GetPositiveClassThreshold() *wrappers.DoubleValue { + if m != nil { + return m.PositiveClassThreshold + } + return nil +} + +func (m *Model_BinaryClassificationMetrics_BinaryConfusionMatrix) GetTruePositives() *wrappers.Int64Value { + if m != nil { + return m.TruePositives + } + return nil +} + +func (m *Model_BinaryClassificationMetrics_BinaryConfusionMatrix) GetFalsePositives() *wrappers.Int64Value { + if m != nil { + return m.FalsePositives + } + return nil +} + +func (m *Model_BinaryClassificationMetrics_BinaryConfusionMatrix) GetTrueNegatives() *wrappers.Int64Value { + if m != nil { + return m.TrueNegatives + } + return nil +} + +func (m *Model_BinaryClassificationMetrics_BinaryConfusionMatrix) GetFalseNegatives() *wrappers.Int64Value { + if m != nil { + return m.FalseNegatives + } + return nil +} + +func (m *Model_BinaryClassificationMetrics_BinaryConfusionMatrix) GetPrecision() *wrappers.DoubleValue { + if m != nil { + return m.Precision + } + return nil +} + +func (m *Model_BinaryClassificationMetrics_BinaryConfusionMatrix) GetRecall() *wrappers.DoubleValue { + if m != nil { + return m.Recall + } + return nil +} + +// Evaluation metrics for multi-class classification models. +type Model_MultiClassClassificationMetrics struct { + // Aggregate classification metrics. + AggregateClassificationMetrics *Model_AggregateClassificationMetrics `protobuf:"bytes,1,opt,name=aggregate_classification_metrics,json=aggregateClassificationMetrics,proto3" json:"aggregate_classification_metrics,omitempty"` + // Confusion matrix at different thresholds. + ConfusionMatrixList []*Model_MultiClassClassificationMetrics_ConfusionMatrix `protobuf:"bytes,2,rep,name=confusion_matrix_list,json=confusionMatrixList,proto3" json:"confusion_matrix_list,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Model_MultiClassClassificationMetrics) Reset() { *m = Model_MultiClassClassificationMetrics{} } +func (m *Model_MultiClassClassificationMetrics) String() string { return proto.CompactTextString(m) } +func (*Model_MultiClassClassificationMetrics) ProtoMessage() {} +func (*Model_MultiClassClassificationMetrics) Descriptor() ([]byte, []int) { + return fileDescriptor_model_5def960f0bdfad81, []int{0, 3} +} +func (m *Model_MultiClassClassificationMetrics) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Model_MultiClassClassificationMetrics.Unmarshal(m, b) +} +func (m *Model_MultiClassClassificationMetrics) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Model_MultiClassClassificationMetrics.Marshal(b, m, deterministic) +} +func (dst *Model_MultiClassClassificationMetrics) XXX_Merge(src proto.Message) { + xxx_messageInfo_Model_MultiClassClassificationMetrics.Merge(dst, src) +} +func (m *Model_MultiClassClassificationMetrics) XXX_Size() int { + return xxx_messageInfo_Model_MultiClassClassificationMetrics.Size(m) +} +func (m *Model_MultiClassClassificationMetrics) XXX_DiscardUnknown() { + xxx_messageInfo_Model_MultiClassClassificationMetrics.DiscardUnknown(m) +} + +var xxx_messageInfo_Model_MultiClassClassificationMetrics proto.InternalMessageInfo + +func (m *Model_MultiClassClassificationMetrics) GetAggregateClassificationMetrics() *Model_AggregateClassificationMetrics { + if m != nil { + return m.AggregateClassificationMetrics + } + return nil +} + +func (m *Model_MultiClassClassificationMetrics) GetConfusionMatrixList() []*Model_MultiClassClassificationMetrics_ConfusionMatrix { + if m != nil { + return m.ConfusionMatrixList + } + return nil +} + +// Confusion matrix for multi-class classification models. +type Model_MultiClassClassificationMetrics_ConfusionMatrix struct { + // Confidence threshold used when computing the entries of the + // confusion matrix. + ConfidenceThreshold *wrappers.DoubleValue `protobuf:"bytes,1,opt,name=confidence_threshold,json=confidenceThreshold,proto3" json:"confidence_threshold,omitempty"` + // One row per actual label. + Rows []*Model_MultiClassClassificationMetrics_ConfusionMatrix_Row `protobuf:"bytes,2,rep,name=rows,proto3" json:"rows,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Model_MultiClassClassificationMetrics_ConfusionMatrix) Reset() { + *m = Model_MultiClassClassificationMetrics_ConfusionMatrix{} +} +func (m *Model_MultiClassClassificationMetrics_ConfusionMatrix) String() string { + return proto.CompactTextString(m) +} +func (*Model_MultiClassClassificationMetrics_ConfusionMatrix) ProtoMessage() {} +func (*Model_MultiClassClassificationMetrics_ConfusionMatrix) Descriptor() ([]byte, []int) { + return fileDescriptor_model_5def960f0bdfad81, []int{0, 3, 0} +} +func (m *Model_MultiClassClassificationMetrics_ConfusionMatrix) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Model_MultiClassClassificationMetrics_ConfusionMatrix.Unmarshal(m, b) +} +func (m *Model_MultiClassClassificationMetrics_ConfusionMatrix) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Model_MultiClassClassificationMetrics_ConfusionMatrix.Marshal(b, m, deterministic) +} +func (dst *Model_MultiClassClassificationMetrics_ConfusionMatrix) XXX_Merge(src proto.Message) { + xxx_messageInfo_Model_MultiClassClassificationMetrics_ConfusionMatrix.Merge(dst, src) +} +func (m *Model_MultiClassClassificationMetrics_ConfusionMatrix) XXX_Size() int { + return xxx_messageInfo_Model_MultiClassClassificationMetrics_ConfusionMatrix.Size(m) +} +func (m *Model_MultiClassClassificationMetrics_ConfusionMatrix) XXX_DiscardUnknown() { + xxx_messageInfo_Model_MultiClassClassificationMetrics_ConfusionMatrix.DiscardUnknown(m) +} + +var xxx_messageInfo_Model_MultiClassClassificationMetrics_ConfusionMatrix proto.InternalMessageInfo + +func (m *Model_MultiClassClassificationMetrics_ConfusionMatrix) GetConfidenceThreshold() *wrappers.DoubleValue { + if m != nil { + return m.ConfidenceThreshold + } + return nil +} + +func (m *Model_MultiClassClassificationMetrics_ConfusionMatrix) GetRows() []*Model_MultiClassClassificationMetrics_ConfusionMatrix_Row { + if m != nil { + return m.Rows + } + return nil +} + +// A single entry in the confusion matrix. +type Model_MultiClassClassificationMetrics_ConfusionMatrix_Entry struct { + // The predicted label. For confidence_threshold > 0, we will + // also add an entry indicating the number of items under the + // confidence threshold. + PredictedLabel string `protobuf:"bytes,1,opt,name=predicted_label,json=predictedLabel,proto3" json:"predicted_label,omitempty"` + // Number of items being predicted as this label. + ItemCount *wrappers.Int64Value `protobuf:"bytes,2,opt,name=item_count,json=itemCount,proto3" json:"item_count,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Model_MultiClassClassificationMetrics_ConfusionMatrix_Entry) Reset() { + *m = Model_MultiClassClassificationMetrics_ConfusionMatrix_Entry{} +} +func (m *Model_MultiClassClassificationMetrics_ConfusionMatrix_Entry) String() string { + return proto.CompactTextString(m) +} +func (*Model_MultiClassClassificationMetrics_ConfusionMatrix_Entry) ProtoMessage() {} +func (*Model_MultiClassClassificationMetrics_ConfusionMatrix_Entry) Descriptor() ([]byte, []int) { + return fileDescriptor_model_5def960f0bdfad81, []int{0, 3, 0, 0} +} +func (m *Model_MultiClassClassificationMetrics_ConfusionMatrix_Entry) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Model_MultiClassClassificationMetrics_ConfusionMatrix_Entry.Unmarshal(m, b) +} +func (m *Model_MultiClassClassificationMetrics_ConfusionMatrix_Entry) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Model_MultiClassClassificationMetrics_ConfusionMatrix_Entry.Marshal(b, m, deterministic) +} +func (dst *Model_MultiClassClassificationMetrics_ConfusionMatrix_Entry) XXX_Merge(src proto.Message) { + xxx_messageInfo_Model_MultiClassClassificationMetrics_ConfusionMatrix_Entry.Merge(dst, src) +} +func (m *Model_MultiClassClassificationMetrics_ConfusionMatrix_Entry) XXX_Size() int { + return xxx_messageInfo_Model_MultiClassClassificationMetrics_ConfusionMatrix_Entry.Size(m) +} +func (m *Model_MultiClassClassificationMetrics_ConfusionMatrix_Entry) XXX_DiscardUnknown() { + xxx_messageInfo_Model_MultiClassClassificationMetrics_ConfusionMatrix_Entry.DiscardUnknown(m) +} + +var xxx_messageInfo_Model_MultiClassClassificationMetrics_ConfusionMatrix_Entry proto.InternalMessageInfo + +func (m *Model_MultiClassClassificationMetrics_ConfusionMatrix_Entry) GetPredictedLabel() string { + if m != nil { + return m.PredictedLabel + } + return "" +} + +func (m *Model_MultiClassClassificationMetrics_ConfusionMatrix_Entry) GetItemCount() *wrappers.Int64Value { + if m != nil { + return m.ItemCount + } + return nil +} + +// A single row in the confusion matrix. +type Model_MultiClassClassificationMetrics_ConfusionMatrix_Row struct { + // The original label of this row. + ActualLabel string `protobuf:"bytes,1,opt,name=actual_label,json=actualLabel,proto3" json:"actual_label,omitempty"` + // Info describing predicted label distribution. + Entries []*Model_MultiClassClassificationMetrics_ConfusionMatrix_Entry `protobuf:"bytes,2,rep,name=entries,proto3" json:"entries,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Model_MultiClassClassificationMetrics_ConfusionMatrix_Row) Reset() { + *m = Model_MultiClassClassificationMetrics_ConfusionMatrix_Row{} +} +func (m *Model_MultiClassClassificationMetrics_ConfusionMatrix_Row) String() string { + return proto.CompactTextString(m) +} +func (*Model_MultiClassClassificationMetrics_ConfusionMatrix_Row) ProtoMessage() {} +func (*Model_MultiClassClassificationMetrics_ConfusionMatrix_Row) Descriptor() ([]byte, []int) { + return fileDescriptor_model_5def960f0bdfad81, []int{0, 3, 0, 1} +} +func (m *Model_MultiClassClassificationMetrics_ConfusionMatrix_Row) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Model_MultiClassClassificationMetrics_ConfusionMatrix_Row.Unmarshal(m, b) +} +func (m *Model_MultiClassClassificationMetrics_ConfusionMatrix_Row) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Model_MultiClassClassificationMetrics_ConfusionMatrix_Row.Marshal(b, m, deterministic) +} +func (dst *Model_MultiClassClassificationMetrics_ConfusionMatrix_Row) XXX_Merge(src proto.Message) { + xxx_messageInfo_Model_MultiClassClassificationMetrics_ConfusionMatrix_Row.Merge(dst, src) +} +func (m *Model_MultiClassClassificationMetrics_ConfusionMatrix_Row) XXX_Size() int { + return xxx_messageInfo_Model_MultiClassClassificationMetrics_ConfusionMatrix_Row.Size(m) +} +func (m *Model_MultiClassClassificationMetrics_ConfusionMatrix_Row) XXX_DiscardUnknown() { + xxx_messageInfo_Model_MultiClassClassificationMetrics_ConfusionMatrix_Row.DiscardUnknown(m) +} + +var xxx_messageInfo_Model_MultiClassClassificationMetrics_ConfusionMatrix_Row proto.InternalMessageInfo + +func (m *Model_MultiClassClassificationMetrics_ConfusionMatrix_Row) GetActualLabel() string { + if m != nil { + return m.ActualLabel + } + return "" +} + +func (m *Model_MultiClassClassificationMetrics_ConfusionMatrix_Row) GetEntries() []*Model_MultiClassClassificationMetrics_ConfusionMatrix_Entry { + if m != nil { + return m.Entries + } + return nil +} + +// Evaluation metrics for clustering models. +type Model_ClusteringMetrics struct { + // Davies-Bouldin index. + DaviesBouldinIndex *wrappers.DoubleValue `protobuf:"bytes,1,opt,name=davies_bouldin_index,json=daviesBouldinIndex,proto3" json:"davies_bouldin_index,omitempty"` + // Mean of squared distances between each sample to its cluster centroid. + MeanSquaredDistance *wrappers.DoubleValue `protobuf:"bytes,2,opt,name=mean_squared_distance,json=meanSquaredDistance,proto3" json:"mean_squared_distance,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Model_ClusteringMetrics) Reset() { *m = Model_ClusteringMetrics{} } +func (m *Model_ClusteringMetrics) String() string { return proto.CompactTextString(m) } +func (*Model_ClusteringMetrics) ProtoMessage() {} +func (*Model_ClusteringMetrics) Descriptor() ([]byte, []int) { + return fileDescriptor_model_5def960f0bdfad81, []int{0, 4} +} +func (m *Model_ClusteringMetrics) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Model_ClusteringMetrics.Unmarshal(m, b) +} +func (m *Model_ClusteringMetrics) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Model_ClusteringMetrics.Marshal(b, m, deterministic) +} +func (dst *Model_ClusteringMetrics) XXX_Merge(src proto.Message) { + xxx_messageInfo_Model_ClusteringMetrics.Merge(dst, src) +} +func (m *Model_ClusteringMetrics) XXX_Size() int { + return xxx_messageInfo_Model_ClusteringMetrics.Size(m) +} +func (m *Model_ClusteringMetrics) XXX_DiscardUnknown() { + xxx_messageInfo_Model_ClusteringMetrics.DiscardUnknown(m) +} + +var xxx_messageInfo_Model_ClusteringMetrics proto.InternalMessageInfo + +func (m *Model_ClusteringMetrics) GetDaviesBouldinIndex() *wrappers.DoubleValue { + if m != nil { + return m.DaviesBouldinIndex + } + return nil +} + +func (m *Model_ClusteringMetrics) GetMeanSquaredDistance() *wrappers.DoubleValue { + if m != nil { + return m.MeanSquaredDistance + } + return nil +} + +// Evaluation metrics of a model. These are either computed on all +// training data or just the eval data based on whether eval data was used +// during training. +type Model_EvaluationMetrics struct { + // Types that are valid to be assigned to Metrics: + // *Model_EvaluationMetrics_RegressionMetrics + // *Model_EvaluationMetrics_BinaryClassificationMetrics + // *Model_EvaluationMetrics_MultiClassClassificationMetrics + // *Model_EvaluationMetrics_ClusteringMetrics + Metrics isModel_EvaluationMetrics_Metrics `protobuf_oneof:"metrics"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Model_EvaluationMetrics) Reset() { *m = Model_EvaluationMetrics{} } +func (m *Model_EvaluationMetrics) String() string { return proto.CompactTextString(m) } +func (*Model_EvaluationMetrics) ProtoMessage() {} +func (*Model_EvaluationMetrics) Descriptor() ([]byte, []int) { + return fileDescriptor_model_5def960f0bdfad81, []int{0, 5} +} +func (m *Model_EvaluationMetrics) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Model_EvaluationMetrics.Unmarshal(m, b) +} +func (m *Model_EvaluationMetrics) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Model_EvaluationMetrics.Marshal(b, m, deterministic) +} +func (dst *Model_EvaluationMetrics) XXX_Merge(src proto.Message) { + xxx_messageInfo_Model_EvaluationMetrics.Merge(dst, src) +} +func (m *Model_EvaluationMetrics) XXX_Size() int { + return xxx_messageInfo_Model_EvaluationMetrics.Size(m) +} +func (m *Model_EvaluationMetrics) XXX_DiscardUnknown() { + xxx_messageInfo_Model_EvaluationMetrics.DiscardUnknown(m) +} + +var xxx_messageInfo_Model_EvaluationMetrics proto.InternalMessageInfo + +type isModel_EvaluationMetrics_Metrics interface { + isModel_EvaluationMetrics_Metrics() +} + +type Model_EvaluationMetrics_RegressionMetrics struct { + RegressionMetrics *Model_RegressionMetrics `protobuf:"bytes,1,opt,name=regression_metrics,json=regressionMetrics,proto3,oneof"` +} + +type Model_EvaluationMetrics_BinaryClassificationMetrics struct { + BinaryClassificationMetrics *Model_BinaryClassificationMetrics `protobuf:"bytes,2,opt,name=binary_classification_metrics,json=binaryClassificationMetrics,proto3,oneof"` +} + +type Model_EvaluationMetrics_MultiClassClassificationMetrics struct { + MultiClassClassificationMetrics *Model_MultiClassClassificationMetrics `protobuf:"bytes,3,opt,name=multi_class_classification_metrics,json=multiClassClassificationMetrics,proto3,oneof"` +} + +type Model_EvaluationMetrics_ClusteringMetrics struct { + ClusteringMetrics *Model_ClusteringMetrics `protobuf:"bytes,4,opt,name=clustering_metrics,json=clusteringMetrics,proto3,oneof"` +} + +func (*Model_EvaluationMetrics_RegressionMetrics) isModel_EvaluationMetrics_Metrics() {} + +func (*Model_EvaluationMetrics_BinaryClassificationMetrics) isModel_EvaluationMetrics_Metrics() {} + +func (*Model_EvaluationMetrics_MultiClassClassificationMetrics) isModel_EvaluationMetrics_Metrics() {} + +func (*Model_EvaluationMetrics_ClusteringMetrics) isModel_EvaluationMetrics_Metrics() {} + +func (m *Model_EvaluationMetrics) GetMetrics() isModel_EvaluationMetrics_Metrics { + if m != nil { + return m.Metrics + } + return nil +} + +func (m *Model_EvaluationMetrics) GetRegressionMetrics() *Model_RegressionMetrics { + if x, ok := m.GetMetrics().(*Model_EvaluationMetrics_RegressionMetrics); ok { + return x.RegressionMetrics + } + return nil +} + +func (m *Model_EvaluationMetrics) GetBinaryClassificationMetrics() *Model_BinaryClassificationMetrics { + if x, ok := m.GetMetrics().(*Model_EvaluationMetrics_BinaryClassificationMetrics); ok { + return x.BinaryClassificationMetrics + } + return nil +} + +func (m *Model_EvaluationMetrics) GetMultiClassClassificationMetrics() *Model_MultiClassClassificationMetrics { + if x, ok := m.GetMetrics().(*Model_EvaluationMetrics_MultiClassClassificationMetrics); ok { + return x.MultiClassClassificationMetrics + } + return nil +} + +func (m *Model_EvaluationMetrics) GetClusteringMetrics() *Model_ClusteringMetrics { + if x, ok := m.GetMetrics().(*Model_EvaluationMetrics_ClusteringMetrics); ok { + return x.ClusteringMetrics + } + return nil +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*Model_EvaluationMetrics) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _Model_EvaluationMetrics_OneofMarshaler, _Model_EvaluationMetrics_OneofUnmarshaler, _Model_EvaluationMetrics_OneofSizer, []interface{}{ + (*Model_EvaluationMetrics_RegressionMetrics)(nil), + (*Model_EvaluationMetrics_BinaryClassificationMetrics)(nil), + (*Model_EvaluationMetrics_MultiClassClassificationMetrics)(nil), + (*Model_EvaluationMetrics_ClusteringMetrics)(nil), + } +} + +func _Model_EvaluationMetrics_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*Model_EvaluationMetrics) + // metrics + switch x := m.Metrics.(type) { + case *Model_EvaluationMetrics_RegressionMetrics: + b.EncodeVarint(1<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.RegressionMetrics); err != nil { + return err + } + case *Model_EvaluationMetrics_BinaryClassificationMetrics: + b.EncodeVarint(2<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.BinaryClassificationMetrics); err != nil { + return err + } + case *Model_EvaluationMetrics_MultiClassClassificationMetrics: + b.EncodeVarint(3<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.MultiClassClassificationMetrics); err != nil { + return err + } + case *Model_EvaluationMetrics_ClusteringMetrics: + b.EncodeVarint(4<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.ClusteringMetrics); err != nil { + return err + } + case nil: + default: + return fmt.Errorf("Model_EvaluationMetrics.Metrics has unexpected type %T", x) + } + return nil +} + +func _Model_EvaluationMetrics_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*Model_EvaluationMetrics) + switch tag { + case 1: // metrics.regression_metrics + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(Model_RegressionMetrics) + err := b.DecodeMessage(msg) + m.Metrics = &Model_EvaluationMetrics_RegressionMetrics{msg} + return true, err + case 2: // metrics.binary_classification_metrics + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(Model_BinaryClassificationMetrics) + err := b.DecodeMessage(msg) + m.Metrics = &Model_EvaluationMetrics_BinaryClassificationMetrics{msg} + return true, err + case 3: // metrics.multi_class_classification_metrics + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(Model_MultiClassClassificationMetrics) + err := b.DecodeMessage(msg) + m.Metrics = &Model_EvaluationMetrics_MultiClassClassificationMetrics{msg} + return true, err + case 4: // metrics.clustering_metrics + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(Model_ClusteringMetrics) + err := b.DecodeMessage(msg) + m.Metrics = &Model_EvaluationMetrics_ClusteringMetrics{msg} + return true, err + default: + return false, nil + } +} + +func _Model_EvaluationMetrics_OneofSizer(msg proto.Message) (n int) { + m := msg.(*Model_EvaluationMetrics) + // metrics + switch x := m.Metrics.(type) { + case *Model_EvaluationMetrics_RegressionMetrics: + s := proto.Size(x.RegressionMetrics) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *Model_EvaluationMetrics_BinaryClassificationMetrics: + s := proto.Size(x.BinaryClassificationMetrics) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *Model_EvaluationMetrics_MultiClassClassificationMetrics: + s := proto.Size(x.MultiClassClassificationMetrics) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *Model_EvaluationMetrics_ClusteringMetrics: + s := proto.Size(x.ClusteringMetrics) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +// Information about a single training query run for the model. +type Model_TrainingRun struct { + // Options that were used for this training run, includes + // user specified and default options that were used. + TrainingOptions *Model_TrainingRun_TrainingOptions `protobuf:"bytes,1,opt,name=training_options,json=trainingOptions,proto3" json:"training_options,omitempty"` + // The start time of this training run. + StartTime *timestamp.Timestamp `protobuf:"bytes,8,opt,name=start_time,json=startTime,proto3" json:"start_time,omitempty"` + // Output of each iteration run, results.size() <= max_iterations. + Results []*Model_TrainingRun_IterationResult `protobuf:"bytes,6,rep,name=results,proto3" json:"results,omitempty"` + // The evaluation metrics over training/eval data that were computed at the + // end of training. + EvaluationMetrics *Model_EvaluationMetrics `protobuf:"bytes,7,opt,name=evaluation_metrics,json=evaluationMetrics,proto3" json:"evaluation_metrics,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Model_TrainingRun) Reset() { *m = Model_TrainingRun{} } +func (m *Model_TrainingRun) String() string { return proto.CompactTextString(m) } +func (*Model_TrainingRun) ProtoMessage() {} +func (*Model_TrainingRun) Descriptor() ([]byte, []int) { + return fileDescriptor_model_5def960f0bdfad81, []int{0, 6} +} +func (m *Model_TrainingRun) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Model_TrainingRun.Unmarshal(m, b) +} +func (m *Model_TrainingRun) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Model_TrainingRun.Marshal(b, m, deterministic) +} +func (dst *Model_TrainingRun) XXX_Merge(src proto.Message) { + xxx_messageInfo_Model_TrainingRun.Merge(dst, src) +} +func (m *Model_TrainingRun) XXX_Size() int { + return xxx_messageInfo_Model_TrainingRun.Size(m) +} +func (m *Model_TrainingRun) XXX_DiscardUnknown() { + xxx_messageInfo_Model_TrainingRun.DiscardUnknown(m) +} + +var xxx_messageInfo_Model_TrainingRun proto.InternalMessageInfo + +func (m *Model_TrainingRun) GetTrainingOptions() *Model_TrainingRun_TrainingOptions { + if m != nil { + return m.TrainingOptions + } + return nil +} + +func (m *Model_TrainingRun) GetStartTime() *timestamp.Timestamp { + if m != nil { + return m.StartTime + } + return nil +} + +func (m *Model_TrainingRun) GetResults() []*Model_TrainingRun_IterationResult { + if m != nil { + return m.Results + } + return nil +} + +func (m *Model_TrainingRun) GetEvaluationMetrics() *Model_EvaluationMetrics { + if m != nil { + return m.EvaluationMetrics + } + return nil +} + +type Model_TrainingRun_TrainingOptions struct { + // The maximum number of iterations in training. + MaxIterations int64 `protobuf:"varint,1,opt,name=max_iterations,json=maxIterations,proto3" json:"max_iterations,omitempty"` + // Type of loss function used during training run. + LossType Model_LossType `protobuf:"varint,2,opt,name=loss_type,json=lossType,proto3,enum=google.cloud.bigquery.v2.Model_LossType" json:"loss_type,omitempty"` + // Learning rate in training. + LearnRate float64 `protobuf:"fixed64,3,opt,name=learn_rate,json=learnRate,proto3" json:"learn_rate,omitempty"` + // L1 regularization coefficient. + L1Regularization *wrappers.DoubleValue `protobuf:"bytes,4,opt,name=l1_regularization,json=l1Regularization,proto3" json:"l1_regularization,omitempty"` + // L2 regularization coefficient. + L2Regularization *wrappers.DoubleValue `protobuf:"bytes,5,opt,name=l2_regularization,json=l2Regularization,proto3" json:"l2_regularization,omitempty"` + // When early_stop is true, stops training when accuracy improvement is + // less than 'min_relative_progress'. + MinRelativeProgress *wrappers.DoubleValue `protobuf:"bytes,6,opt,name=min_relative_progress,json=minRelativeProgress,proto3" json:"min_relative_progress,omitempty"` + // Whether to train a model from the last checkpoint. + WarmStart *wrappers.BoolValue `protobuf:"bytes,7,opt,name=warm_start,json=warmStart,proto3" json:"warm_start,omitempty"` + // Whether to stop early when the loss doesn't improve significantly + // any more (compared to min_relative_progress). + EarlyStop *wrappers.BoolValue `protobuf:"bytes,8,opt,name=early_stop,json=earlyStop,proto3" json:"early_stop,omitempty"` + // Name of input label columns in training data. + InputLabelColumns []string `protobuf:"bytes,9,rep,name=input_label_columns,json=inputLabelColumns,proto3" json:"input_label_columns,omitempty"` + // The data split type for training and evaluation, e.g. RANDOM. + DataSplitMethod Model_DataSplitMethod `protobuf:"varint,10,opt,name=data_split_method,json=dataSplitMethod,proto3,enum=google.cloud.bigquery.v2.Model_DataSplitMethod" json:"data_split_method,omitempty"` + // The fraction of evaluation data over the whole input data. The rest + // of data will be used as training data. The format should be double. + // Accurate to two decimal places. + // Default value is 0.2. + DataSplitEvalFraction float64 `protobuf:"fixed64,11,opt,name=data_split_eval_fraction,json=dataSplitEvalFraction,proto3" json:"data_split_eval_fraction,omitempty"` + // The column to split data with. This column won't be used as a + // feature. + // 1. When data_split_method is CUSTOM, the corresponding column should + // be boolean. The rows with true value tag are eval data, and the false + // are training data. + // 2. When data_split_method is SEQ, the first DATA_SPLIT_EVAL_FRACTION + // rows (from smallest to largest) in the corresponding column are used + // as training data, and the rest are eval data. It respects the order + // in Orderable data types: + // https://cloud.google.com/bigquery/docs/reference/standard-sql/data-types#data-type-properties + DataSplitColumn string `protobuf:"bytes,12,opt,name=data_split_column,json=dataSplitColumn,proto3" json:"data_split_column,omitempty"` + // The strategy to determine learning rate. + LearnRateStrategy Model_LearnRateStrategy `protobuf:"varint,13,opt,name=learn_rate_strategy,json=learnRateStrategy,proto3,enum=google.cloud.bigquery.v2.Model_LearnRateStrategy" json:"learn_rate_strategy,omitempty"` + // Specifies the initial learning rate for line search to start at. + InitialLearnRate float64 `protobuf:"fixed64,16,opt,name=initial_learn_rate,json=initialLearnRate,proto3" json:"initial_learn_rate,omitempty"` + // Weights associated with each label class, for rebalancing the + // training data. + LabelClassWeights map[string]float64 `protobuf:"bytes,17,rep,name=label_class_weights,json=labelClassWeights,proto3" json:"label_class_weights,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"fixed64,2,opt,name=value,proto3"` + // [Beta] Distance type for clustering models. + DistanceType Model_DistanceType `protobuf:"varint,20,opt,name=distance_type,json=distanceType,proto3,enum=google.cloud.bigquery.v2.Model_DistanceType" json:"distance_type,omitempty"` + // [Beta] Number of clusters for clustering models. + NumClusters int64 `protobuf:"varint,21,opt,name=num_clusters,json=numClusters,proto3" json:"num_clusters,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Model_TrainingRun_TrainingOptions) Reset() { *m = Model_TrainingRun_TrainingOptions{} } +func (m *Model_TrainingRun_TrainingOptions) String() string { return proto.CompactTextString(m) } +func (*Model_TrainingRun_TrainingOptions) ProtoMessage() {} +func (*Model_TrainingRun_TrainingOptions) Descriptor() ([]byte, []int) { + return fileDescriptor_model_5def960f0bdfad81, []int{0, 6, 0} +} +func (m *Model_TrainingRun_TrainingOptions) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Model_TrainingRun_TrainingOptions.Unmarshal(m, b) +} +func (m *Model_TrainingRun_TrainingOptions) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Model_TrainingRun_TrainingOptions.Marshal(b, m, deterministic) +} +func (dst *Model_TrainingRun_TrainingOptions) XXX_Merge(src proto.Message) { + xxx_messageInfo_Model_TrainingRun_TrainingOptions.Merge(dst, src) +} +func (m *Model_TrainingRun_TrainingOptions) XXX_Size() int { + return xxx_messageInfo_Model_TrainingRun_TrainingOptions.Size(m) +} +func (m *Model_TrainingRun_TrainingOptions) XXX_DiscardUnknown() { + xxx_messageInfo_Model_TrainingRun_TrainingOptions.DiscardUnknown(m) +} + +var xxx_messageInfo_Model_TrainingRun_TrainingOptions proto.InternalMessageInfo + +func (m *Model_TrainingRun_TrainingOptions) GetMaxIterations() int64 { + if m != nil { + return m.MaxIterations + } + return 0 +} + +func (m *Model_TrainingRun_TrainingOptions) GetLossType() Model_LossType { + if m != nil { + return m.LossType + } + return Model_LOSS_TYPE_UNSPECIFIED +} + +func (m *Model_TrainingRun_TrainingOptions) GetLearnRate() float64 { + if m != nil { + return m.LearnRate + } + return 0 +} + +func (m *Model_TrainingRun_TrainingOptions) GetL1Regularization() *wrappers.DoubleValue { + if m != nil { + return m.L1Regularization + } + return nil +} + +func (m *Model_TrainingRun_TrainingOptions) GetL2Regularization() *wrappers.DoubleValue { + if m != nil { + return m.L2Regularization + } + return nil +} + +func (m *Model_TrainingRun_TrainingOptions) GetMinRelativeProgress() *wrappers.DoubleValue { + if m != nil { + return m.MinRelativeProgress + } + return nil +} + +func (m *Model_TrainingRun_TrainingOptions) GetWarmStart() *wrappers.BoolValue { + if m != nil { + return m.WarmStart + } + return nil +} + +func (m *Model_TrainingRun_TrainingOptions) GetEarlyStop() *wrappers.BoolValue { + if m != nil { + return m.EarlyStop + } + return nil +} + +func (m *Model_TrainingRun_TrainingOptions) GetInputLabelColumns() []string { + if m != nil { + return m.InputLabelColumns + } + return nil +} + +func (m *Model_TrainingRun_TrainingOptions) GetDataSplitMethod() Model_DataSplitMethod { + if m != nil { + return m.DataSplitMethod + } + return Model_DATA_SPLIT_METHOD_UNSPECIFIED +} + +func (m *Model_TrainingRun_TrainingOptions) GetDataSplitEvalFraction() float64 { + if m != nil { + return m.DataSplitEvalFraction + } + return 0 +} + +func (m *Model_TrainingRun_TrainingOptions) GetDataSplitColumn() string { + if m != nil { + return m.DataSplitColumn + } + return "" +} + +func (m *Model_TrainingRun_TrainingOptions) GetLearnRateStrategy() Model_LearnRateStrategy { + if m != nil { + return m.LearnRateStrategy + } + return Model_LEARN_RATE_STRATEGY_UNSPECIFIED +} + +func (m *Model_TrainingRun_TrainingOptions) GetInitialLearnRate() float64 { + if m != nil { + return m.InitialLearnRate + } + return 0 +} + +func (m *Model_TrainingRun_TrainingOptions) GetLabelClassWeights() map[string]float64 { + if m != nil { + return m.LabelClassWeights + } + return nil +} + +func (m *Model_TrainingRun_TrainingOptions) GetDistanceType() Model_DistanceType { + if m != nil { + return m.DistanceType + } + return Model_DISTANCE_TYPE_UNSPECIFIED +} + +func (m *Model_TrainingRun_TrainingOptions) GetNumClusters() int64 { + if m != nil { + return m.NumClusters + } + return 0 +} + +// Information about a single iteration of the training run. +type Model_TrainingRun_IterationResult struct { + // Index of the iteration, 0 based. + Index *wrappers.Int32Value `protobuf:"bytes,1,opt,name=index,proto3" json:"index,omitempty"` + // Time taken to run the iteration in milliseconds. + DurationMs *wrappers.Int64Value `protobuf:"bytes,4,opt,name=duration_ms,json=durationMs,proto3" json:"duration_ms,omitempty"` + // Loss computed on the training data at the end of iteration. + TrainingLoss *wrappers.DoubleValue `protobuf:"bytes,5,opt,name=training_loss,json=trainingLoss,proto3" json:"training_loss,omitempty"` + // Loss computed on the eval data at the end of iteration. + EvalLoss *wrappers.DoubleValue `protobuf:"bytes,6,opt,name=eval_loss,json=evalLoss,proto3" json:"eval_loss,omitempty"` + // Learn rate used for this iteration. + LearnRate float64 `protobuf:"fixed64,7,opt,name=learn_rate,json=learnRate,proto3" json:"learn_rate,omitempty"` + // [Beta] Information about top clusters for clustering models. + ClusterInfos []*Model_TrainingRun_IterationResult_ClusterInfo `protobuf:"bytes,8,rep,name=cluster_infos,json=clusterInfos,proto3" json:"cluster_infos,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Model_TrainingRun_IterationResult) Reset() { *m = Model_TrainingRun_IterationResult{} } +func (m *Model_TrainingRun_IterationResult) String() string { return proto.CompactTextString(m) } +func (*Model_TrainingRun_IterationResult) ProtoMessage() {} +func (*Model_TrainingRun_IterationResult) Descriptor() ([]byte, []int) { + return fileDescriptor_model_5def960f0bdfad81, []int{0, 6, 1} +} +func (m *Model_TrainingRun_IterationResult) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Model_TrainingRun_IterationResult.Unmarshal(m, b) +} +func (m *Model_TrainingRun_IterationResult) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Model_TrainingRun_IterationResult.Marshal(b, m, deterministic) +} +func (dst *Model_TrainingRun_IterationResult) XXX_Merge(src proto.Message) { + xxx_messageInfo_Model_TrainingRun_IterationResult.Merge(dst, src) +} +func (m *Model_TrainingRun_IterationResult) XXX_Size() int { + return xxx_messageInfo_Model_TrainingRun_IterationResult.Size(m) +} +func (m *Model_TrainingRun_IterationResult) XXX_DiscardUnknown() { + xxx_messageInfo_Model_TrainingRun_IterationResult.DiscardUnknown(m) +} + +var xxx_messageInfo_Model_TrainingRun_IterationResult proto.InternalMessageInfo + +func (m *Model_TrainingRun_IterationResult) GetIndex() *wrappers.Int32Value { + if m != nil { + return m.Index + } + return nil +} + +func (m *Model_TrainingRun_IterationResult) GetDurationMs() *wrappers.Int64Value { + if m != nil { + return m.DurationMs + } + return nil +} + +func (m *Model_TrainingRun_IterationResult) GetTrainingLoss() *wrappers.DoubleValue { + if m != nil { + return m.TrainingLoss + } + return nil +} + +func (m *Model_TrainingRun_IterationResult) GetEvalLoss() *wrappers.DoubleValue { + if m != nil { + return m.EvalLoss + } + return nil +} + +func (m *Model_TrainingRun_IterationResult) GetLearnRate() float64 { + if m != nil { + return m.LearnRate + } + return 0 +} + +func (m *Model_TrainingRun_IterationResult) GetClusterInfos() []*Model_TrainingRun_IterationResult_ClusterInfo { + if m != nil { + return m.ClusterInfos + } + return nil +} + +// Information about a single cluster for clustering model. +type Model_TrainingRun_IterationResult_ClusterInfo struct { + // Centroid id. + CentroidId int64 `protobuf:"varint,1,opt,name=centroid_id,json=centroidId,proto3" json:"centroid_id,omitempty"` + // Cluster radius, the average distance from centroid + // to each point assigned to the cluster. + ClusterRadius *wrappers.DoubleValue `protobuf:"bytes,2,opt,name=cluster_radius,json=clusterRadius,proto3" json:"cluster_radius,omitempty"` + // Cluster size, the total number of points assigned to the cluster. + ClusterSize *wrappers.Int64Value `protobuf:"bytes,3,opt,name=cluster_size,json=clusterSize,proto3" json:"cluster_size,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Model_TrainingRun_IterationResult_ClusterInfo) Reset() { + *m = Model_TrainingRun_IterationResult_ClusterInfo{} +} +func (m *Model_TrainingRun_IterationResult_ClusterInfo) String() string { + return proto.CompactTextString(m) +} +func (*Model_TrainingRun_IterationResult_ClusterInfo) ProtoMessage() {} +func (*Model_TrainingRun_IterationResult_ClusterInfo) Descriptor() ([]byte, []int) { + return fileDescriptor_model_5def960f0bdfad81, []int{0, 6, 1, 0} +} +func (m *Model_TrainingRun_IterationResult_ClusterInfo) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Model_TrainingRun_IterationResult_ClusterInfo.Unmarshal(m, b) +} +func (m *Model_TrainingRun_IterationResult_ClusterInfo) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Model_TrainingRun_IterationResult_ClusterInfo.Marshal(b, m, deterministic) +} +func (dst *Model_TrainingRun_IterationResult_ClusterInfo) XXX_Merge(src proto.Message) { + xxx_messageInfo_Model_TrainingRun_IterationResult_ClusterInfo.Merge(dst, src) +} +func (m *Model_TrainingRun_IterationResult_ClusterInfo) XXX_Size() int { + return xxx_messageInfo_Model_TrainingRun_IterationResult_ClusterInfo.Size(m) +} +func (m *Model_TrainingRun_IterationResult_ClusterInfo) XXX_DiscardUnknown() { + xxx_messageInfo_Model_TrainingRun_IterationResult_ClusterInfo.DiscardUnknown(m) +} + +var xxx_messageInfo_Model_TrainingRun_IterationResult_ClusterInfo proto.InternalMessageInfo + +func (m *Model_TrainingRun_IterationResult_ClusterInfo) GetCentroidId() int64 { + if m != nil { + return m.CentroidId + } + return 0 +} + +func (m *Model_TrainingRun_IterationResult_ClusterInfo) GetClusterRadius() *wrappers.DoubleValue { + if m != nil { + return m.ClusterRadius + } + return nil +} + +func (m *Model_TrainingRun_IterationResult_ClusterInfo) GetClusterSize() *wrappers.Int64Value { + if m != nil { + return m.ClusterSize + } + return nil +} + +type GetModelRequest struct { + // Project ID of the requested model. + ProjectId string `protobuf:"bytes,1,opt,name=project_id,json=projectId,proto3" json:"project_id,omitempty"` + // Dataset ID of the requested model. + DatasetId string `protobuf:"bytes,2,opt,name=dataset_id,json=datasetId,proto3" json:"dataset_id,omitempty"` + // Model ID of the requested model. + ModelId string `protobuf:"bytes,3,opt,name=model_id,json=modelId,proto3" json:"model_id,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GetModelRequest) Reset() { *m = GetModelRequest{} } +func (m *GetModelRequest) String() string { return proto.CompactTextString(m) } +func (*GetModelRequest) ProtoMessage() {} +func (*GetModelRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_model_5def960f0bdfad81, []int{1} +} +func (m *GetModelRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GetModelRequest.Unmarshal(m, b) +} +func (m *GetModelRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GetModelRequest.Marshal(b, m, deterministic) +} +func (dst *GetModelRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_GetModelRequest.Merge(dst, src) +} +func (m *GetModelRequest) XXX_Size() int { + return xxx_messageInfo_GetModelRequest.Size(m) +} +func (m *GetModelRequest) XXX_DiscardUnknown() { + xxx_messageInfo_GetModelRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_GetModelRequest proto.InternalMessageInfo + +func (m *GetModelRequest) GetProjectId() string { + if m != nil { + return m.ProjectId + } + return "" +} + +func (m *GetModelRequest) GetDatasetId() string { + if m != nil { + return m.DatasetId + } + return "" +} + +func (m *GetModelRequest) GetModelId() string { + if m != nil { + return m.ModelId + } + return "" +} + +type PatchModelRequest struct { + // Project ID of the model to patch. + ProjectId string `protobuf:"bytes,1,opt,name=project_id,json=projectId,proto3" json:"project_id,omitempty"` + // Dataset ID of the model to patch. + DatasetId string `protobuf:"bytes,2,opt,name=dataset_id,json=datasetId,proto3" json:"dataset_id,omitempty"` + // Model ID of the model to patch. + ModelId string `protobuf:"bytes,3,opt,name=model_id,json=modelId,proto3" json:"model_id,omitempty"` + // Patched model. + // Follows patch semantics. Missing fields are not updated. To clear a field, + // explicitly set to default value. + Model *Model `protobuf:"bytes,4,opt,name=model,proto3" json:"model,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *PatchModelRequest) Reset() { *m = PatchModelRequest{} } +func (m *PatchModelRequest) String() string { return proto.CompactTextString(m) } +func (*PatchModelRequest) ProtoMessage() {} +func (*PatchModelRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_model_5def960f0bdfad81, []int{2} +} +func (m *PatchModelRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_PatchModelRequest.Unmarshal(m, b) +} +func (m *PatchModelRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_PatchModelRequest.Marshal(b, m, deterministic) +} +func (dst *PatchModelRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_PatchModelRequest.Merge(dst, src) +} +func (m *PatchModelRequest) XXX_Size() int { + return xxx_messageInfo_PatchModelRequest.Size(m) +} +func (m *PatchModelRequest) XXX_DiscardUnknown() { + xxx_messageInfo_PatchModelRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_PatchModelRequest proto.InternalMessageInfo + +func (m *PatchModelRequest) GetProjectId() string { + if m != nil { + return m.ProjectId + } + return "" +} + +func (m *PatchModelRequest) GetDatasetId() string { + if m != nil { + return m.DatasetId + } + return "" +} + +func (m *PatchModelRequest) GetModelId() string { + if m != nil { + return m.ModelId + } + return "" +} + +func (m *PatchModelRequest) GetModel() *Model { + if m != nil { + return m.Model + } + return nil +} + +type DeleteModelRequest struct { + // Project ID of the model to delete. + ProjectId string `protobuf:"bytes,1,opt,name=project_id,json=projectId,proto3" json:"project_id,omitempty"` + // Dataset ID of the model to delete. + DatasetId string `protobuf:"bytes,2,opt,name=dataset_id,json=datasetId,proto3" json:"dataset_id,omitempty"` + // Model ID of the model to delete. + ModelId string `protobuf:"bytes,3,opt,name=model_id,json=modelId,proto3" json:"model_id,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *DeleteModelRequest) Reset() { *m = DeleteModelRequest{} } +func (m *DeleteModelRequest) String() string { return proto.CompactTextString(m) } +func (*DeleteModelRequest) ProtoMessage() {} +func (*DeleteModelRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_model_5def960f0bdfad81, []int{3} +} +func (m *DeleteModelRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_DeleteModelRequest.Unmarshal(m, b) +} +func (m *DeleteModelRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_DeleteModelRequest.Marshal(b, m, deterministic) +} +func (dst *DeleteModelRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_DeleteModelRequest.Merge(dst, src) +} +func (m *DeleteModelRequest) XXX_Size() int { + return xxx_messageInfo_DeleteModelRequest.Size(m) +} +func (m *DeleteModelRequest) XXX_DiscardUnknown() { + xxx_messageInfo_DeleteModelRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_DeleteModelRequest proto.InternalMessageInfo + +func (m *DeleteModelRequest) GetProjectId() string { + if m != nil { + return m.ProjectId + } + return "" +} + +func (m *DeleteModelRequest) GetDatasetId() string { + if m != nil { + return m.DatasetId + } + return "" +} + +func (m *DeleteModelRequest) GetModelId() string { + if m != nil { + return m.ModelId + } + return "" +} + +type ListModelsRequest struct { + // Project ID of the models to list. + ProjectId string `protobuf:"bytes,1,opt,name=project_id,json=projectId,proto3" json:"project_id,omitempty"` + // Dataset ID of the models to list. + DatasetId string `protobuf:"bytes,2,opt,name=dataset_id,json=datasetId,proto3" json:"dataset_id,omitempty"` + // The maximum number of results per page. + MaxResults *wrappers.UInt32Value `protobuf:"bytes,3,opt,name=max_results,json=maxResults,proto3" json:"max_results,omitempty"` + // Page token, returned by a previous call to request the next page of + // results + PageToken string `protobuf:"bytes,4,opt,name=page_token,json=pageToken,proto3" json:"page_token,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ListModelsRequest) Reset() { *m = ListModelsRequest{} } +func (m *ListModelsRequest) String() string { return proto.CompactTextString(m) } +func (*ListModelsRequest) ProtoMessage() {} +func (*ListModelsRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_model_5def960f0bdfad81, []int{4} +} +func (m *ListModelsRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ListModelsRequest.Unmarshal(m, b) +} +func (m *ListModelsRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ListModelsRequest.Marshal(b, m, deterministic) +} +func (dst *ListModelsRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_ListModelsRequest.Merge(dst, src) +} +func (m *ListModelsRequest) XXX_Size() int { + return xxx_messageInfo_ListModelsRequest.Size(m) +} +func (m *ListModelsRequest) XXX_DiscardUnknown() { + xxx_messageInfo_ListModelsRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_ListModelsRequest proto.InternalMessageInfo + +func (m *ListModelsRequest) GetProjectId() string { + if m != nil { + return m.ProjectId + } + return "" +} + +func (m *ListModelsRequest) GetDatasetId() string { + if m != nil { + return m.DatasetId + } + return "" +} + +func (m *ListModelsRequest) GetMaxResults() *wrappers.UInt32Value { + if m != nil { + return m.MaxResults + } + return nil +} + +func (m *ListModelsRequest) GetPageToken() string { + if m != nil { + return m.PageToken + } + return "" +} + +type ListModelsResponse struct { + // Models in the requested dataset. Only the following fields are populated: + // model_reference, model_type, creation_time, last_modified_time and + // labels. + Models []*Model `protobuf:"bytes,1,rep,name=models,proto3" json:"models,omitempty"` + // A token to request the next page of results. + NextPageToken string `protobuf:"bytes,2,opt,name=next_page_token,json=nextPageToken,proto3" json:"next_page_token,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ListModelsResponse) Reset() { *m = ListModelsResponse{} } +func (m *ListModelsResponse) String() string { return proto.CompactTextString(m) } +func (*ListModelsResponse) ProtoMessage() {} +func (*ListModelsResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_model_5def960f0bdfad81, []int{5} +} +func (m *ListModelsResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ListModelsResponse.Unmarshal(m, b) +} +func (m *ListModelsResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ListModelsResponse.Marshal(b, m, deterministic) +} +func (dst *ListModelsResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_ListModelsResponse.Merge(dst, src) +} +func (m *ListModelsResponse) XXX_Size() int { + return xxx_messageInfo_ListModelsResponse.Size(m) +} +func (m *ListModelsResponse) XXX_DiscardUnknown() { + xxx_messageInfo_ListModelsResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_ListModelsResponse proto.InternalMessageInfo + +func (m *ListModelsResponse) GetModels() []*Model { + if m != nil { + return m.Models + } + return nil +} + +func (m *ListModelsResponse) GetNextPageToken() string { + if m != nil { + return m.NextPageToken + } + return "" +} + +func init() { + proto.RegisterType((*Model)(nil), "google.cloud.bigquery.v2.Model") + proto.RegisterMapType((map[string]string)(nil), "google.cloud.bigquery.v2.Model.LabelsEntry") + proto.RegisterType((*Model_RegressionMetrics)(nil), "google.cloud.bigquery.v2.Model.RegressionMetrics") + proto.RegisterType((*Model_AggregateClassificationMetrics)(nil), "google.cloud.bigquery.v2.Model.AggregateClassificationMetrics") + proto.RegisterType((*Model_BinaryClassificationMetrics)(nil), "google.cloud.bigquery.v2.Model.BinaryClassificationMetrics") + proto.RegisterType((*Model_BinaryClassificationMetrics_BinaryConfusionMatrix)(nil), "google.cloud.bigquery.v2.Model.BinaryClassificationMetrics.BinaryConfusionMatrix") + proto.RegisterType((*Model_MultiClassClassificationMetrics)(nil), "google.cloud.bigquery.v2.Model.MultiClassClassificationMetrics") + proto.RegisterType((*Model_MultiClassClassificationMetrics_ConfusionMatrix)(nil), "google.cloud.bigquery.v2.Model.MultiClassClassificationMetrics.ConfusionMatrix") + proto.RegisterType((*Model_MultiClassClassificationMetrics_ConfusionMatrix_Entry)(nil), "google.cloud.bigquery.v2.Model.MultiClassClassificationMetrics.ConfusionMatrix.Entry") + proto.RegisterType((*Model_MultiClassClassificationMetrics_ConfusionMatrix_Row)(nil), "google.cloud.bigquery.v2.Model.MultiClassClassificationMetrics.ConfusionMatrix.Row") + proto.RegisterType((*Model_ClusteringMetrics)(nil), "google.cloud.bigquery.v2.Model.ClusteringMetrics") + proto.RegisterType((*Model_EvaluationMetrics)(nil), "google.cloud.bigquery.v2.Model.EvaluationMetrics") + proto.RegisterType((*Model_TrainingRun)(nil), "google.cloud.bigquery.v2.Model.TrainingRun") + proto.RegisterType((*Model_TrainingRun_TrainingOptions)(nil), "google.cloud.bigquery.v2.Model.TrainingRun.TrainingOptions") + proto.RegisterMapType((map[string]float64)(nil), "google.cloud.bigquery.v2.Model.TrainingRun.TrainingOptions.LabelClassWeightsEntry") + proto.RegisterType((*Model_TrainingRun_IterationResult)(nil), "google.cloud.bigquery.v2.Model.TrainingRun.IterationResult") + proto.RegisterType((*Model_TrainingRun_IterationResult_ClusterInfo)(nil), "google.cloud.bigquery.v2.Model.TrainingRun.IterationResult.ClusterInfo") + proto.RegisterType((*GetModelRequest)(nil), "google.cloud.bigquery.v2.GetModelRequest") + proto.RegisterType((*PatchModelRequest)(nil), "google.cloud.bigquery.v2.PatchModelRequest") + proto.RegisterType((*DeleteModelRequest)(nil), "google.cloud.bigquery.v2.DeleteModelRequest") + proto.RegisterType((*ListModelsRequest)(nil), "google.cloud.bigquery.v2.ListModelsRequest") + proto.RegisterType((*ListModelsResponse)(nil), "google.cloud.bigquery.v2.ListModelsResponse") + proto.RegisterEnum("google.cloud.bigquery.v2.Model_ModelType", Model_ModelType_name, Model_ModelType_value) + proto.RegisterEnum("google.cloud.bigquery.v2.Model_LossType", Model_LossType_name, Model_LossType_value) + proto.RegisterEnum("google.cloud.bigquery.v2.Model_DistanceType", Model_DistanceType_name, Model_DistanceType_value) + proto.RegisterEnum("google.cloud.bigquery.v2.Model_DataSplitMethod", Model_DataSplitMethod_name, Model_DataSplitMethod_value) + proto.RegisterEnum("google.cloud.bigquery.v2.Model_LearnRateStrategy", Model_LearnRateStrategy_name, Model_LearnRateStrategy_value) +} + +// Reference imports to suppress errors if they are not otherwise used. +var _ context.Context +var _ grpc.ClientConn + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the grpc package it is being compiled against. +const _ = grpc.SupportPackageIsVersion4 + +// ModelServiceClient is the client API for ModelService service. +// +// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://godoc.org/google.golang.org/grpc#ClientConn.NewStream. +type ModelServiceClient interface { + // Gets the specified model resource by model ID. + GetModel(ctx context.Context, in *GetModelRequest, opts ...grpc.CallOption) (*Model, error) + // Lists all models in the specified dataset. Requires the READER dataset + // role. + ListModels(ctx context.Context, in *ListModelsRequest, opts ...grpc.CallOption) (*ListModelsResponse, error) + // Patch specific fields in the specified model. + PatchModel(ctx context.Context, in *PatchModelRequest, opts ...grpc.CallOption) (*Model, error) + // Deletes the model specified by modelId from the dataset. + DeleteModel(ctx context.Context, in *DeleteModelRequest, opts ...grpc.CallOption) (*empty.Empty, error) +} + +type modelServiceClient struct { + cc *grpc.ClientConn +} + +func NewModelServiceClient(cc *grpc.ClientConn) ModelServiceClient { + return &modelServiceClient{cc} +} + +func (c *modelServiceClient) GetModel(ctx context.Context, in *GetModelRequest, opts ...grpc.CallOption) (*Model, error) { + out := new(Model) + err := c.cc.Invoke(ctx, "/google.cloud.bigquery.v2.ModelService/GetModel", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *modelServiceClient) ListModels(ctx context.Context, in *ListModelsRequest, opts ...grpc.CallOption) (*ListModelsResponse, error) { + out := new(ListModelsResponse) + err := c.cc.Invoke(ctx, "/google.cloud.bigquery.v2.ModelService/ListModels", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *modelServiceClient) PatchModel(ctx context.Context, in *PatchModelRequest, opts ...grpc.CallOption) (*Model, error) { + out := new(Model) + err := c.cc.Invoke(ctx, "/google.cloud.bigquery.v2.ModelService/PatchModel", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *modelServiceClient) DeleteModel(ctx context.Context, in *DeleteModelRequest, opts ...grpc.CallOption) (*empty.Empty, error) { + out := new(empty.Empty) + err := c.cc.Invoke(ctx, "/google.cloud.bigquery.v2.ModelService/DeleteModel", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +// ModelServiceServer is the server API for ModelService service. +type ModelServiceServer interface { + // Gets the specified model resource by model ID. + GetModel(context.Context, *GetModelRequest) (*Model, error) + // Lists all models in the specified dataset. Requires the READER dataset + // role. + ListModels(context.Context, *ListModelsRequest) (*ListModelsResponse, error) + // Patch specific fields in the specified model. + PatchModel(context.Context, *PatchModelRequest) (*Model, error) + // Deletes the model specified by modelId from the dataset. + DeleteModel(context.Context, *DeleteModelRequest) (*empty.Empty, error) +} + +func RegisterModelServiceServer(s *grpc.Server, srv ModelServiceServer) { + s.RegisterService(&_ModelService_serviceDesc, srv) +} + +func _ModelService_GetModel_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(GetModelRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(ModelServiceServer).GetModel(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.bigquery.v2.ModelService/GetModel", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(ModelServiceServer).GetModel(ctx, req.(*GetModelRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _ModelService_ListModels_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(ListModelsRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(ModelServiceServer).ListModels(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.bigquery.v2.ModelService/ListModels", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(ModelServiceServer).ListModels(ctx, req.(*ListModelsRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _ModelService_PatchModel_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(PatchModelRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(ModelServiceServer).PatchModel(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.bigquery.v2.ModelService/PatchModel", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(ModelServiceServer).PatchModel(ctx, req.(*PatchModelRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _ModelService_DeleteModel_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(DeleteModelRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(ModelServiceServer).DeleteModel(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.bigquery.v2.ModelService/DeleteModel", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(ModelServiceServer).DeleteModel(ctx, req.(*DeleteModelRequest)) + } + return interceptor(ctx, in, info, handler) +} + +var _ModelService_serviceDesc = grpc.ServiceDesc{ + ServiceName: "google.cloud.bigquery.v2.ModelService", + HandlerType: (*ModelServiceServer)(nil), + Methods: []grpc.MethodDesc{ + { + MethodName: "GetModel", + Handler: _ModelService_GetModel_Handler, + }, + { + MethodName: "ListModels", + Handler: _ModelService_ListModels_Handler, + }, + { + MethodName: "PatchModel", + Handler: _ModelService_PatchModel_Handler, + }, + { + MethodName: "DeleteModel", + Handler: _ModelService_DeleteModel_Handler, + }, + }, + Streams: []grpc.StreamDesc{}, + Metadata: "google/cloud/bigquery/v2/model.proto", +} + +func init() { + proto.RegisterFile("google/cloud/bigquery/v2/model.proto", fileDescriptor_model_5def960f0bdfad81) +} + +var fileDescriptor_model_5def960f0bdfad81 = []byte{ + // 2541 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xcc, 0x59, 0xdd, 0x73, 0x23, 0x47, + 0x11, 0xb7, 0x2c, 0x7f, 0xa9, 0x65, 0x5b, 0xd2, 0xf8, 0x7c, 0xe8, 0x74, 0x77, 0x39, 0x47, 0xe1, + 0xc3, 0x24, 0x57, 0x72, 0x9d, 0x93, 0x90, 0x5c, 0x02, 0x01, 0x59, 0x52, 0xce, 0x02, 0x59, 0xf2, + 0xed, 0xca, 0x09, 0x01, 0xaa, 0x96, 0xf1, 0xee, 0x48, 0x37, 0xc9, 0x7e, 0xe8, 0x66, 0x66, 0xcf, + 0xf6, 0xbd, 0x01, 0x2f, 0x29, 0xaa, 0xe0, 0x81, 0x27, 0x78, 0xe6, 0x0f, 0xa0, 0x28, 0x2a, 0xef, + 0x54, 0xf1, 0x37, 0xf0, 0x8f, 0x50, 0xc5, 0x13, 0x2f, 0xd4, 0xcc, 0xce, 0xea, 0xd3, 0xb6, 0xf6, + 0xb8, 0xab, 0x14, 0x2f, 0xf6, 0x4e, 0x4f, 0xf7, 0x6f, 0xba, 0x7b, 0xba, 0x7b, 0x7a, 0x46, 0xf0, + 0xcd, 0x7e, 0x10, 0xf4, 0x5d, 0xb2, 0x67, 0xbb, 0x41, 0xe8, 0xec, 0x9d, 0xd2, 0xfe, 0xd3, 0x90, + 0xb0, 0x8b, 0xbd, 0x67, 0xfb, 0x7b, 0x5e, 0xe0, 0x10, 0xb7, 0x32, 0x60, 0x81, 0x08, 0x50, 0x31, + 0xe2, 0xaa, 0x28, 0xae, 0x4a, 0xcc, 0x55, 0x79, 0xb6, 0x5f, 0xaa, 0x5c, 0x2f, 0x6f, 0x31, 0xd2, + 0x23, 0x8c, 0xf8, 0x36, 0x89, 0x90, 0x4a, 0x6f, 0x5d, 0xc9, 0xcf, 0x05, 0xf6, 0x1d, 0xcc, 0x1c, + 0x8b, 0x3f, 0xd5, 0xcb, 0x96, 0x6e, 0x6b, 0x66, 0x35, 0x3a, 0x0d, 0x7b, 0x7b, 0xc4, 0x1b, 0x88, + 0x0b, 0x3d, 0x79, 0x6f, 0x7a, 0x52, 0x50, 0x8f, 0x70, 0x81, 0xbd, 0x81, 0x66, 0x78, 0x6d, 0x9a, + 0xe1, 0x8c, 0xe1, 0xc1, 0x80, 0x30, 0xae, 0xe7, 0xef, 0xe8, 0x79, 0x3c, 0xa0, 0x7b, 0xd8, 0xf7, + 0x03, 0x81, 0x05, 0x0d, 0x7c, 0x3d, 0x5b, 0xfe, 0xe3, 0xdb, 0xb0, 0x7c, 0x24, 0x4d, 0x40, 0x08, + 0x96, 0x88, 0xc0, 0xfd, 0x62, 0x6a, 0x27, 0xb5, 0x9b, 0x31, 0xd4, 0x37, 0x7a, 0x0c, 0xb9, 0x29, + 0xfb, 0x8a, 0x8b, 0x3b, 0xa9, 0xdd, 0xec, 0xfe, 0x6e, 0xe5, 0x2a, 0x57, 0x55, 0x14, 0x9a, 0x11, + 0xf3, 0x1b, 0x9b, 0xde, 0xc4, 0x18, 0xbd, 0x01, 0x1b, 0x36, 0x23, 0x4a, 0x07, 0x4b, 0x9a, 0x52, + 0x5c, 0xde, 0x49, 0xed, 0xa6, 0x8d, 0xf5, 0x98, 0xd8, 0xa5, 0x1e, 0x41, 0xf7, 0x01, 0xb9, 0x98, + 0x0b, 0xcb, 0x0b, 0x1c, 0xda, 0xa3, 0xc4, 0x89, 0x38, 0x57, 0x14, 0x67, 0x5e, 0xce, 0x1c, 0xe9, + 0x09, 0xc5, 0xbd, 0x03, 0x59, 0x87, 0x70, 0x9b, 0xd1, 0x81, 0x04, 0x28, 0xae, 0x2b, 0x03, 0xc6, + 0x49, 0x72, 0xd1, 0x1e, 0xa3, 0xc4, 0x77, 0xdc, 0x0b, 0xcb, 0xc7, 0x1e, 0x29, 0x6e, 0x2a, 0x9e, + 0xf5, 0x98, 0xd8, 0xc6, 0x1e, 0x41, 0x35, 0x58, 0x71, 0xf1, 0x29, 0x71, 0x79, 0x31, 0xb7, 0x93, + 0xde, 0xcd, 0xee, 0xbf, 0x35, 0xc7, 0xc6, 0x4a, 0x4b, 0x71, 0x37, 0x7c, 0xc1, 0x2e, 0x0c, 0x2d, + 0x8a, 0xbe, 0x03, 0x39, 0x72, 0x3e, 0xa0, 0x6c, 0xcc, 0xc0, 0xbc, 0x52, 0x7b, 0x73, 0x44, 0x56, + 0x4a, 0x97, 0x60, 0xcd, 0x0d, 0x6c, 0x35, 0x2e, 0x6e, 0x28, 0x6d, 0x86, 0x63, 0x74, 0x08, 0x10, + 0xb9, 0x5d, 0x5c, 0x0c, 0x48, 0x71, 0x75, 0x27, 0xb5, 0xbb, 0xb9, 0xff, 0xdd, 0x79, 0xda, 0xa8, + 0xbf, 0xdd, 0x8b, 0x01, 0x31, 0x32, 0x5e, 0xfc, 0x89, 0x8e, 0x61, 0x43, 0x30, 0x4c, 0x7d, 0xea, + 0xf7, 0x2d, 0x16, 0xfa, 0xbc, 0x98, 0x49, 0x66, 0x5a, 0x57, 0x0b, 0x19, 0xa1, 0x6f, 0xac, 0x8b, + 0xd1, 0x80, 0x23, 0x13, 0x72, 0x3d, 0x82, 0x45, 0xc8, 0x88, 0x65, 0x07, 0x6e, 0xe8, 0xf9, 0xbc, + 0x08, 0x0a, 0xf3, 0xcd, 0xab, 0x31, 0x4d, 0x1d, 0xf3, 0xe6, 0x53, 0xf7, 0x63, 0x4a, 0x5c, 0xc7, + 0xd8, 0xd4, 0x10, 0xb5, 0x08, 0x01, 0x75, 0x60, 0x43, 0xf9, 0x6f, 0x08, 0x99, 0x7d, 0x61, 0xc8, + 0x75, 0x05, 0xa0, 0x01, 0x4b, 0xbf, 0x4b, 0x43, 0xc1, 0x20, 0x7d, 0x46, 0x38, 0xa7, 0x81, 0x7f, + 0x44, 0x04, 0xa3, 0x36, 0x47, 0x2d, 0xd8, 0xf2, 0x08, 0xf6, 0x2d, 0x7c, 0xca, 0x03, 0x37, 0x14, + 0xc4, 0x22, 0x8c, 0x05, 0x4c, 0x45, 0x7c, 0x76, 0xff, 0x4e, 0xbc, 0x58, 0x9c, 0x48, 0x95, 0x7a, + 0x10, 0x9e, 0xba, 0xe4, 0x13, 0xec, 0x86, 0xc4, 0x28, 0x48, 0xc1, 0xaa, 0x96, 0x6b, 0x48, 0x31, + 0xf4, 0x63, 0x40, 0x0a, 0x8d, 0x3f, 0x0d, 0x31, 0x23, 0x8e, 0x06, 0x5b, 0x4c, 0x00, 0x96, 0x97, + 0x72, 0x66, 0x24, 0x16, 0x61, 0x3d, 0x86, 0x9b, 0x13, 0x58, 0x6e, 0xd0, 0xd7, 0x78, 0xe9, 0x04, + 0x78, 0x5b, 0x63, 0x78, 0xad, 0xa0, 0x1f, 0x41, 0x1e, 0xc3, 0xb6, 0x47, 0x1c, 0x3a, 0x6b, 0xee, + 0x52, 0x32, 0x44, 0x29, 0x3a, 0x69, 0xf0, 0x43, 0xc8, 0xb0, 0x58, 0x43, 0x95, 0xb6, 0xf3, 0x50, + 0xd6, 0x98, 0x56, 0xaa, 0xf4, 0xd7, 0x34, 0xbc, 0x56, 0xed, 0xf7, 0x19, 0xe9, 0x63, 0x41, 0x6a, + 0x2e, 0xe6, 0x9c, 0xf6, 0x68, 0x14, 0xed, 0xf1, 0xe6, 0x7c, 0x00, 0x99, 0x01, 0x23, 0x36, 0x95, + 0x1b, 0x96, 0x68, 0x4b, 0x46, 0xec, 0xe8, 0x1d, 0x58, 0x61, 0xc4, 0xc6, 0xae, 0x9b, 0xc8, 0xfd, + 0x9a, 0x17, 0xbd, 0x0f, 0x6b, 0xd8, 0xb6, 0x43, 0x86, 0xed, 0x8b, 0x44, 0x6e, 0x1e, 0x72, 0x4b, + 0x5d, 0xc5, 0x13, 0x46, 0xf8, 0x93, 0xc0, 0x75, 0x12, 0xf9, 0x73, 0xc4, 0x8e, 0xde, 0x83, 0xb5, + 0xde, 0x03, 0x8b, 0xdb, 0x01, 0x23, 0x89, 0x9c, 0xb8, 0xda, 0x7b, 0x60, 0x4a, 0x66, 0x29, 0x28, + 0xc3, 0xc2, 0x0d, 0x38, 0x57, 0xa5, 0x70, 0xae, 0xa0, 0x1b, 0xf4, 0x5b, 0x01, 0xe7, 0xe8, 0x5d, + 0x58, 0x65, 0x81, 0x6d, 0xe1, 0xd0, 0x56, 0xb5, 0x64, 0xbe, 0x7b, 0x02, 0xbb, 0x1a, 0xda, 0xa5, + 0xaf, 0x56, 0xe0, 0xf6, 0x01, 0xf5, 0x31, 0xbb, 0xb8, 0x7c, 0xc3, 0xbe, 0x4c, 0xc1, 0x0e, 0x8e, + 0xf7, 0xd4, 0xb2, 0x27, 0x78, 0x2c, 0x2f, 0x62, 0xd2, 0x1b, 0xf9, 0xd1, 0xbc, 0x7a, 0x73, 0x7d, + 0x6c, 0x18, 0xaf, 0xe1, 0xeb, 0x63, 0xe7, 0x0f, 0x29, 0xb8, 0x73, 0xaa, 0x54, 0xb5, 0xec, 0xc0, + 0xef, 0x85, 0x5c, 0xa9, 0x80, 0x05, 0xa3, 0xe7, 0x96, 0x4b, 0xb9, 0x28, 0x2e, 0xaa, 0x7a, 0xf2, + 0x78, 0x9e, 0x1a, 0xd7, 0x98, 0x1b, 0xcf, 0xc5, 0xf0, 0x47, 0x0a, 0xdd, 0xb8, 0x75, 0x7a, 0x19, + 0xb9, 0x45, 0xb9, 0x28, 0xfd, 0x3b, 0x0d, 0xdb, 0x97, 0x0a, 0xa1, 0x4f, 0xa0, 0x38, 0x08, 0x38, + 0x15, 0xf4, 0x99, 0xf6, 0x9b, 0x35, 0x8a, 0xa6, 0x24, 0x91, 0x7f, 0x33, 0x96, 0x56, 0x9a, 0x76, + 0x87, 0xa1, 0x75, 0x00, 0x9b, 0x82, 0x85, 0xc4, 0x8a, 0xa7, 0xb9, 0x4e, 0x87, 0xdb, 0x33, 0x68, + 0x4d, 0x5f, 0x7c, 0xef, 0x9d, 0x08, 0x6c, 0x43, 0x8a, 0x1c, 0xc7, 0x12, 0xa8, 0x0e, 0xb9, 0x1e, + 0x76, 0xf9, 0x38, 0x48, 0x7a, 0x3e, 0xc8, 0xa6, 0x92, 0x19, 0xa1, 0xc4, 0x9a, 0xf8, 0x72, 0xd3, + 0x14, 0xc8, 0x52, 0x42, 0x4d, 0xda, 0xb1, 0xc4, 0x48, 0x93, 0x11, 0xc8, 0x72, 0x52, 0x4d, 0x46, + 0x28, 0x13, 0x65, 0x65, 0xe5, 0x7f, 0x2d, 0x2b, 0xab, 0xc9, 0xcb, 0x4a, 0xe9, 0xb7, 0x2b, 0x70, + 0xef, 0x28, 0x74, 0x05, 0x55, 0xbb, 0xf3, 0x7f, 0x9f, 0x3b, 0xbf, 0x49, 0xc1, 0xf6, 0x75, 0x49, + 0xd3, 0x99, 0xdb, 0x78, 0x5c, 0x6f, 0x6b, 0x65, 0x3a, 0x65, 0xb6, 0xec, 0x4b, 0x92, 0xe5, 0x9f, + 0x69, 0xc8, 0x4d, 0xa7, 0x49, 0x07, 0x6e, 0x48, 0x56, 0xea, 0xc8, 0xc6, 0xf1, 0x05, 0x53, 0x64, + 0x6b, 0x24, 0x39, 0xca, 0x8f, 0x3e, 0x2c, 0xb1, 0xe0, 0x8c, 0x6b, 0xc3, 0xcc, 0x57, 0x6c, 0x58, + 0xc5, 0x08, 0xce, 0x0c, 0xb5, 0x40, 0xc9, 0x85, 0x65, 0xd5, 0x16, 0xca, 0x76, 0x70, 0xc0, 0x88, + 0x43, 0x6d, 0x21, 0x0f, 0x75, 0xd9, 0xa1, 0xe8, 0xfe, 0x7a, 0x73, 0x48, 0x56, 0x5d, 0x24, 0xfa, + 0x00, 0x80, 0x0a, 0xe2, 0x59, 0x76, 0x10, 0xfa, 0x22, 0x49, 0xda, 0x66, 0x24, 0x7b, 0x4d, 0x72, + 0x97, 0xfe, 0x94, 0x82, 0xb4, 0x11, 0x9c, 0xa1, 0xd7, 0x61, 0x1d, 0xdb, 0x22, 0xc4, 0xee, 0xc4, + 0x4a, 0xd9, 0x88, 0x16, 0x2d, 0x13, 0xc0, 0x2a, 0xf1, 0x05, 0xa3, 0x24, 0x76, 0xc2, 0xc9, 0xab, + 0x76, 0x42, 0xd4, 0x0e, 0xc7, 0xab, 0x94, 0xbe, 0x4a, 0x41, 0xa1, 0xe6, 0x86, 0x5c, 0x10, 0x46, + 0xfd, 0x7e, 0x1c, 0x73, 0x6d, 0xb8, 0xe1, 0xe0, 0x67, 0x94, 0x70, 0xeb, 0x34, 0x08, 0x5d, 0x87, + 0xfa, 0x16, 0xf5, 0x1d, 0x72, 0x9e, 0x68, 0x67, 0x51, 0x24, 0x79, 0x10, 0x09, 0x36, 0xa5, 0x5c, + 0xd4, 0xeb, 0x8c, 0xb5, 0x4f, 0x0e, 0x95, 0xd7, 0xac, 0xe1, 0x6d, 0x25, 0x79, 0xf7, 0x54, 0xd7, + 0x82, 0xa5, 0xff, 0xa4, 0xa1, 0xd0, 0x78, 0x86, 0xdd, 0x70, 0x22, 0x57, 0x4e, 0x01, 0xb1, 0x61, + 0x57, 0x39, 0x95, 0xa7, 0x0f, 0xe6, 0x79, 0x72, 0xa6, 0x1f, 0x3d, 0x5c, 0x30, 0x0a, 0x6c, 0xa6, + 0x49, 0xfd, 0x55, 0x0a, 0xee, 0xc6, 0x67, 0xd9, 0xe5, 0x75, 0x21, 0x32, 0xea, 0xc3, 0x97, 0x38, + 0xcc, 0x0e, 0x17, 0x8c, 0xdb, 0xa7, 0xd7, 0x1c, 0xed, 0xbf, 0x4f, 0x41, 0xd9, 0x93, 0x1b, 0xaf, + 0x8f, 0xa7, 0x2b, 0x14, 0x89, 0x0e, 0x86, 0x1f, 0xbe, 0x64, 0x08, 0x1d, 0x2e, 0x18, 0xf7, 0xbc, + 0x39, 0xf5, 0xf2, 0x14, 0x90, 0x3d, 0x8c, 0xa2, 0xe1, 0xfa, 0x4b, 0xc9, 0x1c, 0x3f, 0x13, 0x7f, + 0xd2, 0xf1, 0xf6, 0x34, 0xf1, 0x20, 0x03, 0xab, 0x1a, 0xb8, 0xf4, 0xaf, 0x3c, 0x64, 0xc7, 0xae, + 0x40, 0xa8, 0x07, 0xf9, 0xe1, 0x35, 0x2a, 0x50, 0x57, 0xca, 0x78, 0xd7, 0x3f, 0x7c, 0x81, 0x9b, + 0xd4, 0xf0, 0xbb, 0x13, 0x41, 0x18, 0x39, 0x31, 0x49, 0x40, 0x0f, 0x01, 0xb8, 0xc0, 0x4c, 0x44, + 0x17, 0xc7, 0x35, 0xb5, 0x42, 0x69, 0x26, 0x78, 0xbb, 0xf1, 0x0b, 0x80, 0x91, 0x51, 0xdc, 0xea, + 0x3e, 0x79, 0x02, 0xab, 0x8c, 0xf0, 0xd0, 0x15, 0xb2, 0x39, 0x4c, 0xbf, 0xa8, 0x66, 0x4d, 0x41, + 0xa2, 0xbb, 0xa9, 0xa1, 0x30, 0x8c, 0x18, 0x0b, 0xfd, 0x12, 0x10, 0x19, 0xa6, 0xc1, 0xd0, 0xf1, + 0xab, 0xc9, 0x1c, 0x3f, 0x93, 0x40, 0x46, 0x81, 0x4c, 0x93, 0x4a, 0x7f, 0xcf, 0x40, 0x6e, 0xca, + 0x31, 0xe8, 0x5b, 0xb0, 0xe9, 0xe1, 0x73, 0x8b, 0xc6, 0x5a, 0x45, 0xde, 0x4e, 0x1b, 0x1b, 0x1e, + 0x3e, 0x1f, 0xaa, 0xca, 0x51, 0x03, 0x32, 0xb2, 0x1b, 0x8e, 0xae, 0xc9, 0x8b, 0xea, 0x9a, 0xbc, + 0x3b, 0xf7, 0xd2, 0x1e, 0x70, 0xae, 0x6e, 0xc9, 0x6b, 0xae, 0xfe, 0x42, 0x77, 0x01, 0x5c, 0x82, + 0x99, 0x6f, 0x31, 0x2c, 0x88, 0x0a, 0xea, 0x94, 0x91, 0x51, 0x14, 0x03, 0x0b, 0x82, 0x9a, 0x50, + 0x70, 0x1f, 0x58, 0x8c, 0xf4, 0x43, 0x17, 0x33, 0xfa, 0x3c, 0xba, 0xb2, 0x27, 0x69, 0xfa, 0xf3, + 0xee, 0x03, 0x63, 0x42, 0x4a, 0x41, 0xed, 0x4f, 0x43, 0x2d, 0x27, 0x82, 0xda, 0x9f, 0x82, 0x92, + 0x25, 0x8f, 0xfa, 0x16, 0x23, 0xae, 0x6a, 0x74, 0xac, 0x01, 0x0b, 0x54, 0x25, 0x49, 0xd4, 0xe3, + 0x6c, 0x79, 0xd4, 0x37, 0xb4, 0xe4, 0xb1, 0x16, 0x94, 0xc1, 0x77, 0x86, 0x99, 0x67, 0xa9, 0x98, + 0xd2, 0x5b, 0x3c, 0x1b, 0x7c, 0x07, 0x41, 0xe0, 0xea, 0x13, 0x48, 0x72, 0x9b, 0x92, 0x59, 0x8a, + 0x12, 0xcc, 0xdc, 0x0b, 0x8b, 0x8b, 0x60, 0x70, 0x65, 0xdc, 0x8e, 0x89, 0x2a, 0x6e, 0x53, 0x04, + 0x03, 0x54, 0x81, 0x2d, 0xea, 0x0f, 0x42, 0x61, 0x4d, 0x3e, 0x00, 0x64, 0x76, 0xd2, 0xbb, 0x19, + 0xa3, 0xa0, 0xa6, 0x5a, 0x63, 0x37, 0x7b, 0xf4, 0x73, 0x28, 0x38, 0x58, 0x60, 0x8b, 0x0f, 0x5c, + 0x2a, 0x64, 0x40, 0x3e, 0x09, 0x9c, 0x22, 0xa8, 0xbd, 0xdf, 0x9b, 0xb7, 0xf7, 0x75, 0x2c, 0xb0, + 0x29, 0xe5, 0x8e, 0x94, 0x98, 0x91, 0x73, 0x26, 0x09, 0xe8, 0x3d, 0x28, 0x8e, 0x81, 0xcb, 0x58, + 0xb5, 0x7a, 0x0c, 0xdb, 0x6a, 0x9b, 0xb2, 0x2a, 0x2e, 0xb6, 0x87, 0x22, 0x32, 0xb8, 0x3f, 0xd6, + 0x93, 0xe8, 0xcd, 0x09, 0xad, 0x22, 0x23, 0xf4, 0x43, 0xd4, 0x68, 0x91, 0xc8, 0x04, 0x84, 0x61, + 0x6b, 0x14, 0x6e, 0x16, 0x17, 0xf2, 0x5f, 0xff, 0x42, 0x3d, 0x02, 0x6d, 0xce, 0xcf, 0xa9, 0x56, + 0x1c, 0x97, 0xa6, 0x16, 0x34, 0x0a, 0xee, 0x34, 0x09, 0xdd, 0x07, 0x44, 0x7d, 0x2a, 0xa8, 0x6c, + 0x05, 0x46, 0x91, 0x9d, 0x57, 0x16, 0xe4, 0xf5, 0xcc, 0x10, 0x08, 0xfd, 0x3a, 0x05, 0x5b, 0xda, + 0xfb, 0xaa, 0xda, 0x9f, 0x11, 0xda, 0x7f, 0x22, 0x78, 0xb1, 0xa0, 0xea, 0x88, 0xf1, 0x12, 0x15, + 0x2e, 0x7a, 0x22, 0x53, 0x25, 0xfd, 0xd3, 0x08, 0x34, 0x6a, 0x0f, 0x0a, 0xee, 0x34, 0x1d, 0x3d, + 0x86, 0x8d, 0xf8, 0xd4, 0x8e, 0xf2, 0xf9, 0x86, 0xf2, 0xc7, 0xfd, 0xb9, 0x7b, 0xaa, 0x85, 0x54, + 0x4e, 0xaf, 0x3b, 0x63, 0x23, 0xd9, 0x0f, 0xf9, 0xa1, 0x67, 0xe9, 0x4a, 0xcf, 0x8b, 0xdb, 0xaa, + 0x86, 0x64, 0xfd, 0xd0, 0xd3, 0x27, 0x02, 0x2f, 0xd5, 0xe1, 0xe6, 0xe5, 0x2a, 0xa2, 0x3c, 0xa4, + 0xbf, 0x20, 0x17, 0xba, 0x87, 0x92, 0x9f, 0xe8, 0x06, 0x2c, 0xcb, 0xe2, 0x15, 0x55, 0x9a, 0x94, + 0x11, 0x0d, 0x3e, 0x58, 0x7c, 0x3f, 0x55, 0xfa, 0xc7, 0x12, 0xe4, 0xa6, 0x2a, 0x28, 0x7a, 0x00, + 0xcb, 0xe3, 0x3d, 0xcd, 0xa5, 0xbd, 0xdc, 0xdb, 0xfb, 0x51, 0x3a, 0x44, 0x9c, 0xe8, 0xfb, 0x90, + 0x75, 0x42, 0xfd, 0x72, 0xe8, 0x25, 0xba, 0x31, 0x41, 0xcc, 0x7f, 0xc4, 0x51, 0x75, 0xec, 0xa9, + 0x4f, 0xbd, 0x11, 0x24, 0xa9, 0x2b, 0xc3, 0xb7, 0x3d, 0xf5, 0x50, 0xf0, 0x10, 0x32, 0x2a, 0xe6, + 0x13, 0x3f, 0x31, 0xac, 0x49, 0x76, 0x25, 0x3a, 0x59, 0x43, 0x57, 0xa7, 0x6b, 0xa8, 0x0b, 0x1b, + 0x7a, 0x1b, 0x2c, 0xea, 0xf7, 0x02, 0x5e, 0x5c, 0x53, 0xb1, 0xf5, 0xe8, 0x25, 0xce, 0xa8, 0xf8, + 0x58, 0x6f, 0xfa, 0xbd, 0xc0, 0x58, 0xb7, 0x47, 0x03, 0x5e, 0xfa, 0x5b, 0x0a, 0xb2, 0x63, 0xb3, + 0xe8, 0x1e, 0x64, 0x6d, 0xd9, 0x90, 0x06, 0xd4, 0xb1, 0xa8, 0xa3, 0xcf, 0x12, 0x88, 0x49, 0x4d, + 0x07, 0xd5, 0x60, 0x33, 0x56, 0x8f, 0x61, 0x87, 0x86, 0x3c, 0x51, 0xe3, 0x18, 0x9b, 0x64, 0x28, + 0x11, 0xf4, 0x11, 0xc4, 0x5a, 0x58, 0x9c, 0x3e, 0x27, 0x49, 0xae, 0xcd, 0x59, 0x2d, 0x60, 0xd2, + 0xe7, 0xa4, 0xf4, 0x10, 0xb2, 0x63, 0x2f, 0xca, 0xf3, 0x02, 0x30, 0x33, 0x16, 0x80, 0x65, 0x1b, + 0x32, 0xc3, 0xe7, 0x5f, 0x54, 0x82, 0x9b, 0x47, 0x9d, 0x7a, 0xa3, 0x65, 0x75, 0x3f, 0x3b, 0x6e, + 0x58, 0x27, 0x6d, 0xf3, 0xb8, 0x51, 0x6b, 0x7e, 0xdc, 0x6c, 0xd4, 0xf3, 0x0b, 0x68, 0x1b, 0x0a, + 0xad, 0x66, 0xbb, 0x51, 0x35, 0x2c, 0xa3, 0xf1, 0xc8, 0x68, 0x98, 0x66, 0xb3, 0xd3, 0xce, 0xa7, + 0xd0, 0x37, 0x60, 0xab, 0xd5, 0x79, 0xd4, 0x34, 0xbb, 0xcd, 0xda, 0xf8, 0xc4, 0x22, 0x02, 0x58, + 0xf9, 0xc9, 0x51, 0xa3, 0xda, 0x36, 0xf3, 0xe9, 0x72, 0x07, 0xd6, 0xe2, 0xc3, 0x13, 0xdd, 0x82, + 0xed, 0x56, 0xc7, 0x34, 0xaf, 0x58, 0x42, 0x4a, 0x58, 0xe6, 0xe3, 0x93, 0xaa, 0xd1, 0xa8, 0x5b, + 0x92, 0x2f, 0x9f, 0x42, 0x05, 0xd8, 0x50, 0xe4, 0x56, 0xe7, 0x51, 0x44, 0x5a, 0x2c, 0x1f, 0xc2, + 0xfa, 0x78, 0xf6, 0xa2, 0xbb, 0x70, 0xab, 0xde, 0x34, 0xbb, 0xd5, 0x76, 0xad, 0x71, 0x19, 0xf0, + 0x06, 0x64, 0x1a, 0x27, 0xb5, 0x56, 0xb3, 0xde, 0xa8, 0x4a, 0x9d, 0x01, 0x56, 0x6a, 0x1d, 0xb3, + 0xd9, 0x6e, 0xe4, 0x17, 0xcb, 0xcf, 0x21, 0x37, 0x55, 0xdb, 0xd1, 0xeb, 0x70, 0xb7, 0x5e, 0xed, + 0x56, 0x2d, 0xf3, 0xb8, 0xd5, 0xec, 0x5a, 0x47, 0x8d, 0xee, 0x61, 0xa7, 0x3e, 0x05, 0x08, 0xb0, + 0x62, 0x54, 0xdb, 0xf5, 0xce, 0x91, 0x46, 0x3b, 0x31, 0xbb, 0x9d, 0xa3, 0xfc, 0x22, 0xda, 0x04, + 0x30, 0x1b, 0x8f, 0x4f, 0x1a, 0xed, 0x6e, 0xb3, 0xda, 0xca, 0xa7, 0xd1, 0x3a, 0xac, 0xb5, 0x3b, + 0x11, 0x50, 0x7e, 0x49, 0xce, 0x56, 0x4f, 0xba, 0xf1, 0x78, 0xb9, 0xfc, 0x29, 0x14, 0x66, 0x6a, + 0x32, 0x7a, 0x03, 0xee, 0xb5, 0x1a, 0x55, 0xa3, 0x6d, 0x19, 0xd5, 0x6e, 0xc3, 0x32, 0xbb, 0xf2, + 0xdf, 0xa3, 0xcf, 0xa6, 0xd6, 0xcf, 0x41, 0x56, 0x6e, 0x86, 0x65, 0x36, 0xaa, 0x46, 0xed, 0x30, + 0x9f, 0x92, 0x0b, 0xd5, 0x3a, 0x6d, 0xe9, 0x81, 0x6e, 0x7e, 0xb1, 0xfc, 0x39, 0xe4, 0x1e, 0x11, + 0xa1, 0x7f, 0x4e, 0x79, 0x1a, 0x12, 0x2e, 0x64, 0x96, 0x0d, 0x58, 0xf0, 0x39, 0xb1, 0x45, 0x1c, + 0xc7, 0x19, 0x23, 0xa3, 0x29, 0x4d, 0x47, 0x4e, 0xcb, 0xc3, 0x86, 0x13, 0x35, 0x1d, 0x45, 0x49, + 0x46, 0x53, 0x9a, 0x0e, 0xba, 0x05, 0x6b, 0xd1, 0xcf, 0x0a, 0xd4, 0x51, 0xc1, 0x99, 0x31, 0x56, + 0xd5, 0xb8, 0xe9, 0x94, 0xff, 0x9c, 0x82, 0xc2, 0x31, 0x16, 0xf6, 0x93, 0xaf, 0x65, 0x39, 0xf4, + 0x2e, 0x2c, 0xab, 0x4f, 0x5d, 0xe3, 0xee, 0xcd, 0xfb, 0x35, 0x29, 0xe2, 0x2e, 0x7b, 0x80, 0xea, + 0xc4, 0x25, 0x82, 0x7c, 0x3d, 0x4e, 0xf9, 0x4b, 0x0a, 0x0a, 0x2d, 0xca, 0xa3, 0x2d, 0xe0, 0xaf, + 0x66, 0xb9, 0x1f, 0x40, 0x56, 0x76, 0xb6, 0x71, 0xab, 0x7e, 0xd5, 0xb3, 0xf3, 0xc9, 0xd8, 0xe9, + 0x00, 0x1e, 0x3e, 0x37, 0x74, 0x3b, 0x2e, 0x17, 0xc7, 0x7d, 0x62, 0x89, 0xe0, 0x0b, 0x12, 0x35, + 0xa1, 0x72, 0x71, 0xdc, 0x27, 0x5d, 0x49, 0x28, 0x87, 0x80, 0xc6, 0x15, 0xe6, 0x83, 0xc0, 0xe7, + 0x04, 0xbd, 0x07, 0x2b, 0xca, 0x24, 0xd9, 0x45, 0xa7, 0x93, 0xb8, 0x5b, 0xb3, 0xa3, 0x6f, 0x43, + 0xce, 0x27, 0xe7, 0xc2, 0x1a, 0x5b, 0x32, 0x32, 0x68, 0x43, 0x92, 0x8f, 0xe3, 0x65, 0xf7, 0xbf, + 0x4c, 0xc3, 0xba, 0x92, 0x34, 0x09, 0x7b, 0x46, 0x6d, 0x82, 0x7e, 0x0a, 0x6b, 0x71, 0xe8, 0xa2, + 0x6b, 0x7e, 0xb8, 0x9a, 0x0a, 0xef, 0xd2, 0x3c, 0xc5, 0xca, 0x0b, 0x88, 0x02, 0x8c, 0x2c, 0x44, + 0xd7, 0xfc, 0x8e, 0x35, 0xb3, 0x71, 0xa5, 0xfb, 0xc9, 0x98, 0x23, 0xa7, 0x95, 0x17, 0xd0, 0x2f, + 0x00, 0x46, 0x29, 0x71, 0xdd, 0x52, 0x33, 0x89, 0x93, 0xc4, 0x90, 0x13, 0xc8, 0x8e, 0xc5, 0x32, + 0xba, 0x46, 0xb9, 0xd9, 0x90, 0x2f, 0xdd, 0x9c, 0x09, 0x98, 0x86, 0x37, 0x10, 0x17, 0xe5, 0x85, + 0x03, 0x17, 0xee, 0xd8, 0x81, 0x77, 0x25, 0xd8, 0x01, 0x28, 0x9c, 0x63, 0x29, 0xf6, 0xb3, 0x1f, + 0x69, 0xae, 0x7e, 0xe0, 0x62, 0xbf, 0x5f, 0x09, 0x58, 0x7f, 0xaf, 0x4f, 0x7c, 0x05, 0xb9, 0x17, + 0x4d, 0xe1, 0x01, 0xe5, 0xb3, 0xbf, 0x61, 0x7f, 0x18, 0x7f, 0x9f, 0xae, 0x28, 0xe6, 0xb7, 0xff, + 0x1b, 0x00, 0x00, 0xff, 0xff, 0x3e, 0x46, 0x69, 0xef, 0x5f, 0x1f, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/cloud/bigquery/v2/model_reference.pb.go b/vendor/google.golang.org/genproto/googleapis/cloud/bigquery/v2/model_reference.pb.go new file mode 100644 index 0000000..0ec61f9 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/cloud/bigquery/v2/model_reference.pb.go @@ -0,0 +1,106 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/cloud/bigquery/v2/model_reference.proto + +package bigquery // import "google.golang.org/genproto/googleapis/cloud/bigquery/v2" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Id path of a model. +type ModelReference struct { + // [Required] The ID of the project containing this model. + ProjectId string `protobuf:"bytes,1,opt,name=project_id,json=projectId,proto3" json:"project_id,omitempty"` + // [Required] The ID of the dataset containing this model. + DatasetId string `protobuf:"bytes,2,opt,name=dataset_id,json=datasetId,proto3" json:"dataset_id,omitempty"` + // [Required] The ID of the model. The ID must contain only + // letters (a-z, A-Z), numbers (0-9), or underscores (_). The maximum + // length is 1,024 characters. + ModelId string `protobuf:"bytes,3,opt,name=model_id,json=modelId,proto3" json:"model_id,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ModelReference) Reset() { *m = ModelReference{} } +func (m *ModelReference) String() string { return proto.CompactTextString(m) } +func (*ModelReference) ProtoMessage() {} +func (*ModelReference) Descriptor() ([]byte, []int) { + return fileDescriptor_model_reference_02ff0d475f40d0ab, []int{0} +} +func (m *ModelReference) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ModelReference.Unmarshal(m, b) +} +func (m *ModelReference) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ModelReference.Marshal(b, m, deterministic) +} +func (dst *ModelReference) XXX_Merge(src proto.Message) { + xxx_messageInfo_ModelReference.Merge(dst, src) +} +func (m *ModelReference) XXX_Size() int { + return xxx_messageInfo_ModelReference.Size(m) +} +func (m *ModelReference) XXX_DiscardUnknown() { + xxx_messageInfo_ModelReference.DiscardUnknown(m) +} + +var xxx_messageInfo_ModelReference proto.InternalMessageInfo + +func (m *ModelReference) GetProjectId() string { + if m != nil { + return m.ProjectId + } + return "" +} + +func (m *ModelReference) GetDatasetId() string { + if m != nil { + return m.DatasetId + } + return "" +} + +func (m *ModelReference) GetModelId() string { + if m != nil { + return m.ModelId + } + return "" +} + +func init() { + proto.RegisterType((*ModelReference)(nil), "google.cloud.bigquery.v2.ModelReference") +} + +func init() { + proto.RegisterFile("google/cloud/bigquery/v2/model_reference.proto", fileDescriptor_model_reference_02ff0d475f40d0ab) +} + +var fileDescriptor_model_reference_02ff0d475f40d0ab = []byte{ + // 223 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x74, 0x90, 0x31, 0x4b, 0xc4, 0x30, + 0x14, 0x80, 0x39, 0x05, 0xf5, 0x32, 0x38, 0xd4, 0xa5, 0xca, 0x09, 0xe2, 0xe4, 0x94, 0xc0, 0x39, + 0xba, 0x48, 0xb7, 0x0e, 0x82, 0x74, 0x74, 0x29, 0x69, 0xde, 0x33, 0x44, 0xd2, 0xbc, 0x98, 0xa6, + 0x05, 0xff, 0xbd, 0x34, 0x49, 0x07, 0x91, 0xdb, 0x92, 0xf7, 0x7d, 0x79, 0x7c, 0x84, 0x71, 0x4d, + 0xa4, 0x2d, 0x0a, 0x65, 0x69, 0x06, 0x31, 0x18, 0xfd, 0x3d, 0x63, 0xf8, 0x11, 0xcb, 0x51, 0x8c, + 0x04, 0x68, 0xfb, 0x80, 0x9f, 0x18, 0xd0, 0x29, 0xe4, 0x3e, 0x50, 0xa4, 0xaa, 0xce, 0x3e, 0x4f, + 0x3e, 0xdf, 0x7c, 0xbe, 0x1c, 0xef, 0x0e, 0x65, 0x93, 0xf4, 0x46, 0x48, 0xe7, 0x28, 0xca, 0x68, + 0xc8, 0x4d, 0xf9, 0xdd, 0xa3, 0x61, 0xd7, 0x6f, 0xeb, 0xc2, 0x6e, 0xdb, 0x57, 0xdd, 0x33, 0xe6, + 0x03, 0x7d, 0xa1, 0x8a, 0xbd, 0x81, 0x7a, 0xf7, 0xb0, 0x7b, 0xda, 0x77, 0xfb, 0x32, 0x69, 0x61, + 0xc5, 0x20, 0xa3, 0x9c, 0x30, 0xe1, 0xb3, 0x8c, 0xcb, 0xa4, 0x85, 0xea, 0x96, 0x5d, 0xe5, 0x40, + 0x03, 0xf5, 0x79, 0x82, 0x97, 0xe9, 0xde, 0x42, 0x33, 0xb3, 0x83, 0xa2, 0x91, 0x9f, 0x0a, 0x6d, + 0x6e, 0xfe, 0x86, 0xbc, 0xaf, 0x7d, 0x1f, 0xaf, 0x45, 0xd7, 0x64, 0xa5, 0xd3, 0x9c, 0x82, 0x16, + 0x1a, 0x5d, 0x6a, 0x17, 0x19, 0x49, 0x6f, 0xa6, 0xff, 0xdf, 0xf4, 0xb2, 0x9d, 0x87, 0x8b, 0x24, + 0x3f, 0xff, 0x06, 0x00, 0x00, 0xff, 0xff, 0x56, 0x77, 0xce, 0xa8, 0x52, 0x01, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/cloud/bigquery/v2/standard_sql.pb.go b/vendor/google.golang.org/genproto/googleapis/cloud/bigquery/v2/standard_sql.pb.go new file mode 100644 index 0000000..57ec4b9 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/cloud/bigquery/v2/standard_sql.pb.go @@ -0,0 +1,395 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/cloud/bigquery/v2/standard_sql.proto + +package bigquery // import "google.golang.org/genproto/googleapis/cloud/bigquery/v2" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +type StandardSqlDataType_TypeKind int32 + +const ( + // Invalid type. + StandardSqlDataType_TYPE_KIND_UNSPECIFIED StandardSqlDataType_TypeKind = 0 + // Encoded as a string in decimal format. + StandardSqlDataType_INT64 StandardSqlDataType_TypeKind = 2 + // Encoded as a boolean "false" or "true". + StandardSqlDataType_BOOL StandardSqlDataType_TypeKind = 5 + // Encoded as a number, or string "NaN", "Infinity" or "-Infinity". + StandardSqlDataType_FLOAT64 StandardSqlDataType_TypeKind = 7 + // Encoded as a string value. + StandardSqlDataType_STRING StandardSqlDataType_TypeKind = 8 + // Encoded as a base64 string per RFC 4648, section 4. + StandardSqlDataType_BYTES StandardSqlDataType_TypeKind = 9 + // Encoded as an RFC 3339 timestamp with mandatory "Z" time zone string: + // 1985-04-12T23:20:50.52Z + StandardSqlDataType_TIMESTAMP StandardSqlDataType_TypeKind = 19 + // Encoded as RFC 3339 full-date format string: 1985-04-12 + StandardSqlDataType_DATE StandardSqlDataType_TypeKind = 10 + // Encoded as RFC 3339 partial-time format string: 23:20:50.52 + StandardSqlDataType_TIME StandardSqlDataType_TypeKind = 20 + // Encoded as RFC 3339 full-date "T" partial-time: 1985-04-12T23:20:50.52 + StandardSqlDataType_DATETIME StandardSqlDataType_TypeKind = 21 + // Encoded as WKT + StandardSqlDataType_GEOGRAPHY StandardSqlDataType_TypeKind = 22 + // Encoded as a decimal string. + StandardSqlDataType_NUMERIC StandardSqlDataType_TypeKind = 23 + // Encoded as a list with types matching Type.array_type. + StandardSqlDataType_ARRAY StandardSqlDataType_TypeKind = 16 + // Encoded as a list with fields of type Type.struct_type[i]. List is used + // because a JSON object cannot have duplicate field names. + StandardSqlDataType_STRUCT StandardSqlDataType_TypeKind = 17 +) + +var StandardSqlDataType_TypeKind_name = map[int32]string{ + 0: "TYPE_KIND_UNSPECIFIED", + 2: "INT64", + 5: "BOOL", + 7: "FLOAT64", + 8: "STRING", + 9: "BYTES", + 19: "TIMESTAMP", + 10: "DATE", + 20: "TIME", + 21: "DATETIME", + 22: "GEOGRAPHY", + 23: "NUMERIC", + 16: "ARRAY", + 17: "STRUCT", +} +var StandardSqlDataType_TypeKind_value = map[string]int32{ + "TYPE_KIND_UNSPECIFIED": 0, + "INT64": 2, + "BOOL": 5, + "FLOAT64": 7, + "STRING": 8, + "BYTES": 9, + "TIMESTAMP": 19, + "DATE": 10, + "TIME": 20, + "DATETIME": 21, + "GEOGRAPHY": 22, + "NUMERIC": 23, + "ARRAY": 16, + "STRUCT": 17, +} + +func (x StandardSqlDataType_TypeKind) String() string { + return proto.EnumName(StandardSqlDataType_TypeKind_name, int32(x)) +} +func (StandardSqlDataType_TypeKind) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_standard_sql_f6e17ae27add020e, []int{0, 0} +} + +// The type of a variable, e.g., a function argument. +// Examples: +// INT64: {type_kind="INT64"} +// ARRAY: {type_kind="ARRAY", array_element_type="STRING"} +// STRUCT>: +// {type_kind="STRUCT", +// struct_type={fields=[ +// {name="x", type={type_kind="STRING"}}, +// {name="y", type={type_kind="ARRAY", array_element_type="DATE"}} +// ]}} +type StandardSqlDataType struct { + // Required. The top level type of this field. + // Can be any standard SQL data type (e.g., "INT64", "DATE", "ARRAY"). + TypeKind StandardSqlDataType_TypeKind `protobuf:"varint,1,opt,name=type_kind,json=typeKind,proto3,enum=google.cloud.bigquery.v2.StandardSqlDataType_TypeKind" json:"type_kind,omitempty"` + // Types that are valid to be assigned to SubType: + // *StandardSqlDataType_ArrayElementType + // *StandardSqlDataType_StructType + SubType isStandardSqlDataType_SubType `protobuf_oneof:"sub_type"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *StandardSqlDataType) Reset() { *m = StandardSqlDataType{} } +func (m *StandardSqlDataType) String() string { return proto.CompactTextString(m) } +func (*StandardSqlDataType) ProtoMessage() {} +func (*StandardSqlDataType) Descriptor() ([]byte, []int) { + return fileDescriptor_standard_sql_f6e17ae27add020e, []int{0} +} +func (m *StandardSqlDataType) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_StandardSqlDataType.Unmarshal(m, b) +} +func (m *StandardSqlDataType) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_StandardSqlDataType.Marshal(b, m, deterministic) +} +func (dst *StandardSqlDataType) XXX_Merge(src proto.Message) { + xxx_messageInfo_StandardSqlDataType.Merge(dst, src) +} +func (m *StandardSqlDataType) XXX_Size() int { + return xxx_messageInfo_StandardSqlDataType.Size(m) +} +func (m *StandardSqlDataType) XXX_DiscardUnknown() { + xxx_messageInfo_StandardSqlDataType.DiscardUnknown(m) +} + +var xxx_messageInfo_StandardSqlDataType proto.InternalMessageInfo + +func (m *StandardSqlDataType) GetTypeKind() StandardSqlDataType_TypeKind { + if m != nil { + return m.TypeKind + } + return StandardSqlDataType_TYPE_KIND_UNSPECIFIED +} + +type isStandardSqlDataType_SubType interface { + isStandardSqlDataType_SubType() +} + +type StandardSqlDataType_ArrayElementType struct { + ArrayElementType *StandardSqlDataType `protobuf:"bytes,2,opt,name=array_element_type,json=arrayElementType,proto3,oneof"` +} + +type StandardSqlDataType_StructType struct { + StructType *StandardSqlStructType `protobuf:"bytes,3,opt,name=struct_type,json=structType,proto3,oneof"` +} + +func (*StandardSqlDataType_ArrayElementType) isStandardSqlDataType_SubType() {} + +func (*StandardSqlDataType_StructType) isStandardSqlDataType_SubType() {} + +func (m *StandardSqlDataType) GetSubType() isStandardSqlDataType_SubType { + if m != nil { + return m.SubType + } + return nil +} + +func (m *StandardSqlDataType) GetArrayElementType() *StandardSqlDataType { + if x, ok := m.GetSubType().(*StandardSqlDataType_ArrayElementType); ok { + return x.ArrayElementType + } + return nil +} + +func (m *StandardSqlDataType) GetStructType() *StandardSqlStructType { + if x, ok := m.GetSubType().(*StandardSqlDataType_StructType); ok { + return x.StructType + } + return nil +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*StandardSqlDataType) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _StandardSqlDataType_OneofMarshaler, _StandardSqlDataType_OneofUnmarshaler, _StandardSqlDataType_OneofSizer, []interface{}{ + (*StandardSqlDataType_ArrayElementType)(nil), + (*StandardSqlDataType_StructType)(nil), + } +} + +func _StandardSqlDataType_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*StandardSqlDataType) + // sub_type + switch x := m.SubType.(type) { + case *StandardSqlDataType_ArrayElementType: + b.EncodeVarint(2<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.ArrayElementType); err != nil { + return err + } + case *StandardSqlDataType_StructType: + b.EncodeVarint(3<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.StructType); err != nil { + return err + } + case nil: + default: + return fmt.Errorf("StandardSqlDataType.SubType has unexpected type %T", x) + } + return nil +} + +func _StandardSqlDataType_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*StandardSqlDataType) + switch tag { + case 2: // sub_type.array_element_type + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(StandardSqlDataType) + err := b.DecodeMessage(msg) + m.SubType = &StandardSqlDataType_ArrayElementType{msg} + return true, err + case 3: // sub_type.struct_type + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(StandardSqlStructType) + err := b.DecodeMessage(msg) + m.SubType = &StandardSqlDataType_StructType{msg} + return true, err + default: + return false, nil + } +} + +func _StandardSqlDataType_OneofSizer(msg proto.Message) (n int) { + m := msg.(*StandardSqlDataType) + // sub_type + switch x := m.SubType.(type) { + case *StandardSqlDataType_ArrayElementType: + s := proto.Size(x.ArrayElementType) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *StandardSqlDataType_StructType: + s := proto.Size(x.StructType) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +// A field or a column. +type StandardSqlField struct { + // Optional. The name of this field. Can be absent for struct fields. + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // Optional. The type of this parameter. Absent if not explicitly + // specified (e.g., CREATE FUNCTION statement can omit the return type; + // in this case the output parameter does not have this "type" field). + Type *StandardSqlDataType `protobuf:"bytes,2,opt,name=type,proto3" json:"type,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *StandardSqlField) Reset() { *m = StandardSqlField{} } +func (m *StandardSqlField) String() string { return proto.CompactTextString(m) } +func (*StandardSqlField) ProtoMessage() {} +func (*StandardSqlField) Descriptor() ([]byte, []int) { + return fileDescriptor_standard_sql_f6e17ae27add020e, []int{1} +} +func (m *StandardSqlField) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_StandardSqlField.Unmarshal(m, b) +} +func (m *StandardSqlField) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_StandardSqlField.Marshal(b, m, deterministic) +} +func (dst *StandardSqlField) XXX_Merge(src proto.Message) { + xxx_messageInfo_StandardSqlField.Merge(dst, src) +} +func (m *StandardSqlField) XXX_Size() int { + return xxx_messageInfo_StandardSqlField.Size(m) +} +func (m *StandardSqlField) XXX_DiscardUnknown() { + xxx_messageInfo_StandardSqlField.DiscardUnknown(m) +} + +var xxx_messageInfo_StandardSqlField proto.InternalMessageInfo + +func (m *StandardSqlField) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *StandardSqlField) GetType() *StandardSqlDataType { + if m != nil { + return m.Type + } + return nil +} + +type StandardSqlStructType struct { + Fields []*StandardSqlField `protobuf:"bytes,1,rep,name=fields,proto3" json:"fields,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *StandardSqlStructType) Reset() { *m = StandardSqlStructType{} } +func (m *StandardSqlStructType) String() string { return proto.CompactTextString(m) } +func (*StandardSqlStructType) ProtoMessage() {} +func (*StandardSqlStructType) Descriptor() ([]byte, []int) { + return fileDescriptor_standard_sql_f6e17ae27add020e, []int{2} +} +func (m *StandardSqlStructType) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_StandardSqlStructType.Unmarshal(m, b) +} +func (m *StandardSqlStructType) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_StandardSqlStructType.Marshal(b, m, deterministic) +} +func (dst *StandardSqlStructType) XXX_Merge(src proto.Message) { + xxx_messageInfo_StandardSqlStructType.Merge(dst, src) +} +func (m *StandardSqlStructType) XXX_Size() int { + return xxx_messageInfo_StandardSqlStructType.Size(m) +} +func (m *StandardSqlStructType) XXX_DiscardUnknown() { + xxx_messageInfo_StandardSqlStructType.DiscardUnknown(m) +} + +var xxx_messageInfo_StandardSqlStructType proto.InternalMessageInfo + +func (m *StandardSqlStructType) GetFields() []*StandardSqlField { + if m != nil { + return m.Fields + } + return nil +} + +func init() { + proto.RegisterType((*StandardSqlDataType)(nil), "google.cloud.bigquery.v2.StandardSqlDataType") + proto.RegisterType((*StandardSqlField)(nil), "google.cloud.bigquery.v2.StandardSqlField") + proto.RegisterType((*StandardSqlStructType)(nil), "google.cloud.bigquery.v2.StandardSqlStructType") + proto.RegisterEnum("google.cloud.bigquery.v2.StandardSqlDataType_TypeKind", StandardSqlDataType_TypeKind_name, StandardSqlDataType_TypeKind_value) +} + +func init() { + proto.RegisterFile("google/cloud/bigquery/v2/standard_sql.proto", fileDescriptor_standard_sql_f6e17ae27add020e) +} + +var fileDescriptor_standard_sql_f6e17ae27add020e = []byte{ + // 491 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x9c, 0x93, 0x41, 0x6f, 0xd3, 0x30, + 0x14, 0xc7, 0xd7, 0xad, 0xeb, 0xd2, 0x57, 0x40, 0xc6, 0xa3, 0x50, 0xd0, 0x0e, 0x55, 0x4f, 0x15, + 0x88, 0x44, 0x2a, 0x68, 0x17, 0x2e, 0x24, 0x6d, 0xda, 0x45, 0x5b, 0xd3, 0xc8, 0x49, 0x0f, 0x05, + 0xa1, 0xc8, 0x6d, 0x4c, 0x14, 0x91, 0xda, 0x69, 0x92, 0x4e, 0xea, 0xd7, 0xe3, 0x03, 0xf1, 0x19, + 0x90, 0xdd, 0x14, 0x4d, 0x62, 0x93, 0x06, 0x97, 0xe8, 0xd9, 0xef, 0xff, 0xff, 0x3d, 0x3f, 0x3b, + 0x0f, 0xde, 0xc5, 0x42, 0xc4, 0x29, 0x33, 0x56, 0xa9, 0xd8, 0x46, 0xc6, 0x32, 0x89, 0x37, 0x5b, + 0x96, 0xef, 0x8c, 0xdb, 0x81, 0x51, 0x94, 0x94, 0x47, 0x34, 0x8f, 0xc2, 0x62, 0x93, 0xea, 0x59, + 0x2e, 0x4a, 0x81, 0x3b, 0x7b, 0xb1, 0xae, 0xc4, 0xfa, 0x41, 0xac, 0xdf, 0x0e, 0xde, 0x5c, 0x54, + 0x18, 0x9a, 0x25, 0x06, 0xe5, 0x5c, 0x94, 0xb4, 0x4c, 0x04, 0x2f, 0xf6, 0xbe, 0xde, 0xaf, 0x13, + 0x38, 0xf7, 0x2b, 0x9c, 0xbf, 0x49, 0x47, 0xb4, 0xa4, 0xc1, 0x2e, 0x63, 0xd8, 0x87, 0x66, 0xb9, + 0xcb, 0x58, 0xf8, 0x23, 0xe1, 0x51, 0xa7, 0xd6, 0xad, 0xf5, 0x9f, 0x0d, 0x2e, 0xf5, 0x87, 0x6a, + 0xe8, 0xf7, 0x10, 0x74, 0xf9, 0xb9, 0x4e, 0x78, 0x44, 0xb4, 0xb2, 0x8a, 0xf0, 0x37, 0xc0, 0x34, + 0xcf, 0xe9, 0x2e, 0x64, 0x29, 0x5b, 0x33, 0x5e, 0x86, 0x32, 0xd3, 0x39, 0xee, 0xd6, 0xfa, 0xad, + 0xc1, 0xfb, 0x7f, 0xa2, 0x5f, 0x1d, 0x11, 0xa4, 0x50, 0xf6, 0x9e, 0xa4, 0xce, 0x4c, 0xa0, 0x55, + 0x94, 0xf9, 0x76, 0x55, 0x71, 0x4f, 0x14, 0xd7, 0x78, 0x14, 0xd7, 0x57, 0xbe, 0x8a, 0x0c, 0xc5, + 0x9f, 0x55, 0xef, 0x67, 0x0d, 0xb4, 0x43, 0x27, 0xf8, 0x35, 0xb4, 0x83, 0x85, 0x67, 0x87, 0xd7, + 0x8e, 0x3b, 0x0a, 0xe7, 0xae, 0xef, 0xd9, 0x43, 0x67, 0xec, 0xd8, 0x23, 0x74, 0x84, 0x9b, 0x70, + 0xea, 0xb8, 0xc1, 0xe5, 0x47, 0x74, 0x8c, 0x35, 0xa8, 0x5b, 0xb3, 0xd9, 0x0d, 0x3a, 0xc5, 0x2d, + 0x38, 0x1b, 0xdf, 0xcc, 0x4c, 0xb9, 0x7d, 0x86, 0x01, 0x1a, 0x7e, 0x40, 0x1c, 0x77, 0x82, 0x34, + 0xa9, 0xb6, 0x16, 0x81, 0xed, 0xa3, 0x26, 0x7e, 0x0a, 0xcd, 0xc0, 0x99, 0xda, 0x7e, 0x60, 0x4e, + 0x3d, 0x74, 0x2e, 0xcd, 0x23, 0x33, 0xb0, 0x11, 0xc8, 0x48, 0x26, 0xd0, 0x0b, 0xfc, 0x04, 0x34, + 0xb9, 0xa7, 0x56, 0x6d, 0x69, 0x98, 0xd8, 0xb3, 0x09, 0x31, 0xbd, 0xab, 0x05, 0x7a, 0x29, 0x6b, + 0xb8, 0xf3, 0xa9, 0x4d, 0x9c, 0x21, 0x7a, 0x25, 0xb9, 0x26, 0x21, 0xe6, 0x02, 0xa1, 0xaa, 0xdc, + 0x7c, 0x18, 0xa0, 0xe7, 0x16, 0x80, 0x56, 0x6c, 0x97, 0xea, 0x56, 0x7a, 0x09, 0xa0, 0x3b, 0x7d, + 0x8f, 0x13, 0x96, 0x46, 0x18, 0x43, 0x9d, 0xd3, 0x35, 0x53, 0xef, 0xdc, 0x24, 0x2a, 0xc6, 0x26, + 0xd4, 0xff, 0xfb, 0x75, 0x88, 0xb2, 0xf6, 0xbe, 0x42, 0xfb, 0xde, 0x2b, 0xc6, 0x16, 0x34, 0xbe, + 0xcb, 0xc2, 0x45, 0xa7, 0xd6, 0x3d, 0xe9, 0xb7, 0x06, 0x6f, 0x1f, 0x45, 0x57, 0x67, 0x25, 0x95, + 0xd3, 0xca, 0xe1, 0x62, 0x25, 0xd6, 0x0f, 0x1a, 0xad, 0xbb, 0x5d, 0x7a, 0xf2, 0x57, 0xff, 0xf2, + 0xb9, 0xd2, 0xc6, 0x22, 0xa5, 0x3c, 0xd6, 0x45, 0x1e, 0x1b, 0x31, 0xe3, 0x6a, 0x0c, 0x8c, 0x7d, + 0x8a, 0x66, 0x49, 0xf1, 0xf7, 0xb8, 0x7d, 0x3a, 0xc4, 0xcb, 0x86, 0x12, 0x7f, 0xf8, 0x1d, 0x00, + 0x00, 0xff, 0xff, 0xdf, 0x4e, 0x6d, 0x31, 0x9a, 0x03, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/cloud/billing/v1/cloud_billing.pb.go b/vendor/google.golang.org/genproto/googleapis/cloud/billing/v1/cloud_billing.pb.go new file mode 100644 index 0000000..7191e58 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/cloud/billing/v1/cloud_billing.pb.go @@ -0,0 +1,871 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/cloud/billing/v1/cloud_billing.proto + +package billing // import "google.golang.org/genproto/googleapis/cloud/billing/v1" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +import ( + context "golang.org/x/net/context" + grpc "google.golang.org/grpc" +) + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// A billing account in [Google Cloud +// Console](https://console.cloud.google.com/). You can assign a billing account +// to one or more projects. +type BillingAccount struct { + // The resource name of the billing account. The resource name has the form + // `billingAccounts/{billing_account_id}`. For example, + // `billingAccounts/012345-567890-ABCDEF` would be the resource name for + // billing account `012345-567890-ABCDEF`. + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // True if the billing account is open, and will therefore be charged for any + // usage on associated projects. False if the billing account is closed, and + // therefore projects associated with it will be unable to use paid services. + Open bool `protobuf:"varint,2,opt,name=open,proto3" json:"open,omitempty"` + // The display name given to the billing account, such as `My Billing + // Account`. This name is displayed in the Google Cloud Console. + DisplayName string `protobuf:"bytes,3,opt,name=display_name,json=displayName,proto3" json:"display_name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *BillingAccount) Reset() { *m = BillingAccount{} } +func (m *BillingAccount) String() string { return proto.CompactTextString(m) } +func (*BillingAccount) ProtoMessage() {} +func (*BillingAccount) Descriptor() ([]byte, []int) { + return fileDescriptor_cloud_billing_afc96ac7b12ba62e, []int{0} +} +func (m *BillingAccount) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_BillingAccount.Unmarshal(m, b) +} +func (m *BillingAccount) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_BillingAccount.Marshal(b, m, deterministic) +} +func (dst *BillingAccount) XXX_Merge(src proto.Message) { + xxx_messageInfo_BillingAccount.Merge(dst, src) +} +func (m *BillingAccount) XXX_Size() int { + return xxx_messageInfo_BillingAccount.Size(m) +} +func (m *BillingAccount) XXX_DiscardUnknown() { + xxx_messageInfo_BillingAccount.DiscardUnknown(m) +} + +var xxx_messageInfo_BillingAccount proto.InternalMessageInfo + +func (m *BillingAccount) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *BillingAccount) GetOpen() bool { + if m != nil { + return m.Open + } + return false +} + +func (m *BillingAccount) GetDisplayName() string { + if m != nil { + return m.DisplayName + } + return "" +} + +// Encapsulation of billing information for a Cloud Console project. A project +// has at most one associated billing account at a time (but a billing account +// can be assigned to multiple projects). +type ProjectBillingInfo struct { + // The resource name for the `ProjectBillingInfo`; has the form + // `projects/{project_id}/billingInfo`. For example, the resource name for the + // billing information for project `tokyo-rain-123` would be + // `projects/tokyo-rain-123/billingInfo`. This field is read-only. + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // The ID of the project that this `ProjectBillingInfo` represents, such as + // `tokyo-rain-123`. This is a convenience field so that you don't need to + // parse the `name` field to obtain a project ID. This field is read-only. + ProjectId string `protobuf:"bytes,2,opt,name=project_id,json=projectId,proto3" json:"project_id,omitempty"` + // The resource name of the billing account associated with the project, if + // any. For example, `billingAccounts/012345-567890-ABCDEF`. + BillingAccountName string `protobuf:"bytes,3,opt,name=billing_account_name,json=billingAccountName,proto3" json:"billing_account_name,omitempty"` + // True if the project is associated with an open billing account, to which + // usage on the project is charged. False if the project is associated with a + // closed billing account, or no billing account at all, and therefore cannot + // use paid services. This field is read-only. + BillingEnabled bool `protobuf:"varint,4,opt,name=billing_enabled,json=billingEnabled,proto3" json:"billing_enabled,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ProjectBillingInfo) Reset() { *m = ProjectBillingInfo{} } +func (m *ProjectBillingInfo) String() string { return proto.CompactTextString(m) } +func (*ProjectBillingInfo) ProtoMessage() {} +func (*ProjectBillingInfo) Descriptor() ([]byte, []int) { + return fileDescriptor_cloud_billing_afc96ac7b12ba62e, []int{1} +} +func (m *ProjectBillingInfo) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ProjectBillingInfo.Unmarshal(m, b) +} +func (m *ProjectBillingInfo) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ProjectBillingInfo.Marshal(b, m, deterministic) +} +func (dst *ProjectBillingInfo) XXX_Merge(src proto.Message) { + xxx_messageInfo_ProjectBillingInfo.Merge(dst, src) +} +func (m *ProjectBillingInfo) XXX_Size() int { + return xxx_messageInfo_ProjectBillingInfo.Size(m) +} +func (m *ProjectBillingInfo) XXX_DiscardUnknown() { + xxx_messageInfo_ProjectBillingInfo.DiscardUnknown(m) +} + +var xxx_messageInfo_ProjectBillingInfo proto.InternalMessageInfo + +func (m *ProjectBillingInfo) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *ProjectBillingInfo) GetProjectId() string { + if m != nil { + return m.ProjectId + } + return "" +} + +func (m *ProjectBillingInfo) GetBillingAccountName() string { + if m != nil { + return m.BillingAccountName + } + return "" +} + +func (m *ProjectBillingInfo) GetBillingEnabled() bool { + if m != nil { + return m.BillingEnabled + } + return false +} + +// Request message for `GetBillingAccount`. +type GetBillingAccountRequest struct { + // The resource name of the billing account to retrieve. For example, + // `billingAccounts/012345-567890-ABCDEF`. + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GetBillingAccountRequest) Reset() { *m = GetBillingAccountRequest{} } +func (m *GetBillingAccountRequest) String() string { return proto.CompactTextString(m) } +func (*GetBillingAccountRequest) ProtoMessage() {} +func (*GetBillingAccountRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_cloud_billing_afc96ac7b12ba62e, []int{2} +} +func (m *GetBillingAccountRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GetBillingAccountRequest.Unmarshal(m, b) +} +func (m *GetBillingAccountRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GetBillingAccountRequest.Marshal(b, m, deterministic) +} +func (dst *GetBillingAccountRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_GetBillingAccountRequest.Merge(dst, src) +} +func (m *GetBillingAccountRequest) XXX_Size() int { + return xxx_messageInfo_GetBillingAccountRequest.Size(m) +} +func (m *GetBillingAccountRequest) XXX_DiscardUnknown() { + xxx_messageInfo_GetBillingAccountRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_GetBillingAccountRequest proto.InternalMessageInfo + +func (m *GetBillingAccountRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +// Request message for `ListBillingAccounts`. +type ListBillingAccountsRequest struct { + // Requested page size. The maximum page size is 100; this is also the + // default. + PageSize int32 `protobuf:"varint,1,opt,name=page_size,json=pageSize,proto3" json:"page_size,omitempty"` + // A token identifying a page of results to return. This should be a + // `next_page_token` value returned from a previous `ListBillingAccounts` + // call. If unspecified, the first page of results is returned. + PageToken string `protobuf:"bytes,2,opt,name=page_token,json=pageToken,proto3" json:"page_token,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ListBillingAccountsRequest) Reset() { *m = ListBillingAccountsRequest{} } +func (m *ListBillingAccountsRequest) String() string { return proto.CompactTextString(m) } +func (*ListBillingAccountsRequest) ProtoMessage() {} +func (*ListBillingAccountsRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_cloud_billing_afc96ac7b12ba62e, []int{3} +} +func (m *ListBillingAccountsRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ListBillingAccountsRequest.Unmarshal(m, b) +} +func (m *ListBillingAccountsRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ListBillingAccountsRequest.Marshal(b, m, deterministic) +} +func (dst *ListBillingAccountsRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_ListBillingAccountsRequest.Merge(dst, src) +} +func (m *ListBillingAccountsRequest) XXX_Size() int { + return xxx_messageInfo_ListBillingAccountsRequest.Size(m) +} +func (m *ListBillingAccountsRequest) XXX_DiscardUnknown() { + xxx_messageInfo_ListBillingAccountsRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_ListBillingAccountsRequest proto.InternalMessageInfo + +func (m *ListBillingAccountsRequest) GetPageSize() int32 { + if m != nil { + return m.PageSize + } + return 0 +} + +func (m *ListBillingAccountsRequest) GetPageToken() string { + if m != nil { + return m.PageToken + } + return "" +} + +// Response message for `ListBillingAccounts`. +type ListBillingAccountsResponse struct { + // A list of billing accounts. + BillingAccounts []*BillingAccount `protobuf:"bytes,1,rep,name=billing_accounts,json=billingAccounts,proto3" json:"billing_accounts,omitempty"` + // A token to retrieve the next page of results. To retrieve the next page, + // call `ListBillingAccounts` again with the `page_token` field set to this + // value. This field is empty if there are no more results to retrieve. + NextPageToken string `protobuf:"bytes,2,opt,name=next_page_token,json=nextPageToken,proto3" json:"next_page_token,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ListBillingAccountsResponse) Reset() { *m = ListBillingAccountsResponse{} } +func (m *ListBillingAccountsResponse) String() string { return proto.CompactTextString(m) } +func (*ListBillingAccountsResponse) ProtoMessage() {} +func (*ListBillingAccountsResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_cloud_billing_afc96ac7b12ba62e, []int{4} +} +func (m *ListBillingAccountsResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ListBillingAccountsResponse.Unmarshal(m, b) +} +func (m *ListBillingAccountsResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ListBillingAccountsResponse.Marshal(b, m, deterministic) +} +func (dst *ListBillingAccountsResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_ListBillingAccountsResponse.Merge(dst, src) +} +func (m *ListBillingAccountsResponse) XXX_Size() int { + return xxx_messageInfo_ListBillingAccountsResponse.Size(m) +} +func (m *ListBillingAccountsResponse) XXX_DiscardUnknown() { + xxx_messageInfo_ListBillingAccountsResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_ListBillingAccountsResponse proto.InternalMessageInfo + +func (m *ListBillingAccountsResponse) GetBillingAccounts() []*BillingAccount { + if m != nil { + return m.BillingAccounts + } + return nil +} + +func (m *ListBillingAccountsResponse) GetNextPageToken() string { + if m != nil { + return m.NextPageToken + } + return "" +} + +// Request message for `ListProjectBillingInfo`. +type ListProjectBillingInfoRequest struct { + // The resource name of the billing account associated with the projects that + // you want to list. For example, `billingAccounts/012345-567890-ABCDEF`. + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // Requested page size. The maximum page size is 100; this is also the + // default. + PageSize int32 `protobuf:"varint,2,opt,name=page_size,json=pageSize,proto3" json:"page_size,omitempty"` + // A token identifying a page of results to be returned. This should be a + // `next_page_token` value returned from a previous `ListProjectBillingInfo` + // call. If unspecified, the first page of results is returned. + PageToken string `protobuf:"bytes,3,opt,name=page_token,json=pageToken,proto3" json:"page_token,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ListProjectBillingInfoRequest) Reset() { *m = ListProjectBillingInfoRequest{} } +func (m *ListProjectBillingInfoRequest) String() string { return proto.CompactTextString(m) } +func (*ListProjectBillingInfoRequest) ProtoMessage() {} +func (*ListProjectBillingInfoRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_cloud_billing_afc96ac7b12ba62e, []int{5} +} +func (m *ListProjectBillingInfoRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ListProjectBillingInfoRequest.Unmarshal(m, b) +} +func (m *ListProjectBillingInfoRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ListProjectBillingInfoRequest.Marshal(b, m, deterministic) +} +func (dst *ListProjectBillingInfoRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_ListProjectBillingInfoRequest.Merge(dst, src) +} +func (m *ListProjectBillingInfoRequest) XXX_Size() int { + return xxx_messageInfo_ListProjectBillingInfoRequest.Size(m) +} +func (m *ListProjectBillingInfoRequest) XXX_DiscardUnknown() { + xxx_messageInfo_ListProjectBillingInfoRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_ListProjectBillingInfoRequest proto.InternalMessageInfo + +func (m *ListProjectBillingInfoRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *ListProjectBillingInfoRequest) GetPageSize() int32 { + if m != nil { + return m.PageSize + } + return 0 +} + +func (m *ListProjectBillingInfoRequest) GetPageToken() string { + if m != nil { + return m.PageToken + } + return "" +} + +// Request message for `ListProjectBillingInfoResponse`. +type ListProjectBillingInfoResponse struct { + // A list of `ProjectBillingInfo` resources representing the projects + // associated with the billing account. + ProjectBillingInfo []*ProjectBillingInfo `protobuf:"bytes,1,rep,name=project_billing_info,json=projectBillingInfo,proto3" json:"project_billing_info,omitempty"` + // A token to retrieve the next page of results. To retrieve the next page, + // call `ListProjectBillingInfo` again with the `page_token` field set to this + // value. This field is empty if there are no more results to retrieve. + NextPageToken string `protobuf:"bytes,2,opt,name=next_page_token,json=nextPageToken,proto3" json:"next_page_token,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ListProjectBillingInfoResponse) Reset() { *m = ListProjectBillingInfoResponse{} } +func (m *ListProjectBillingInfoResponse) String() string { return proto.CompactTextString(m) } +func (*ListProjectBillingInfoResponse) ProtoMessage() {} +func (*ListProjectBillingInfoResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_cloud_billing_afc96ac7b12ba62e, []int{6} +} +func (m *ListProjectBillingInfoResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ListProjectBillingInfoResponse.Unmarshal(m, b) +} +func (m *ListProjectBillingInfoResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ListProjectBillingInfoResponse.Marshal(b, m, deterministic) +} +func (dst *ListProjectBillingInfoResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_ListProjectBillingInfoResponse.Merge(dst, src) +} +func (m *ListProjectBillingInfoResponse) XXX_Size() int { + return xxx_messageInfo_ListProjectBillingInfoResponse.Size(m) +} +func (m *ListProjectBillingInfoResponse) XXX_DiscardUnknown() { + xxx_messageInfo_ListProjectBillingInfoResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_ListProjectBillingInfoResponse proto.InternalMessageInfo + +func (m *ListProjectBillingInfoResponse) GetProjectBillingInfo() []*ProjectBillingInfo { + if m != nil { + return m.ProjectBillingInfo + } + return nil +} + +func (m *ListProjectBillingInfoResponse) GetNextPageToken() string { + if m != nil { + return m.NextPageToken + } + return "" +} + +// Request message for `GetProjectBillingInfo`. +type GetProjectBillingInfoRequest struct { + // The resource name of the project for which billing information is + // retrieved. For example, `projects/tokyo-rain-123`. + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GetProjectBillingInfoRequest) Reset() { *m = GetProjectBillingInfoRequest{} } +func (m *GetProjectBillingInfoRequest) String() string { return proto.CompactTextString(m) } +func (*GetProjectBillingInfoRequest) ProtoMessage() {} +func (*GetProjectBillingInfoRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_cloud_billing_afc96ac7b12ba62e, []int{7} +} +func (m *GetProjectBillingInfoRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GetProjectBillingInfoRequest.Unmarshal(m, b) +} +func (m *GetProjectBillingInfoRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GetProjectBillingInfoRequest.Marshal(b, m, deterministic) +} +func (dst *GetProjectBillingInfoRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_GetProjectBillingInfoRequest.Merge(dst, src) +} +func (m *GetProjectBillingInfoRequest) XXX_Size() int { + return xxx_messageInfo_GetProjectBillingInfoRequest.Size(m) +} +func (m *GetProjectBillingInfoRequest) XXX_DiscardUnknown() { + xxx_messageInfo_GetProjectBillingInfoRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_GetProjectBillingInfoRequest proto.InternalMessageInfo + +func (m *GetProjectBillingInfoRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +// Request message for `UpdateProjectBillingInfo`. +type UpdateProjectBillingInfoRequest struct { + // The resource name of the project associated with the billing information + // that you want to update. For example, `projects/tokyo-rain-123`. + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // The new billing information for the project. Read-only fields are ignored; + // thus, you may leave empty all fields except `billing_account_name`. + ProjectBillingInfo *ProjectBillingInfo `protobuf:"bytes,2,opt,name=project_billing_info,json=projectBillingInfo,proto3" json:"project_billing_info,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *UpdateProjectBillingInfoRequest) Reset() { *m = UpdateProjectBillingInfoRequest{} } +func (m *UpdateProjectBillingInfoRequest) String() string { return proto.CompactTextString(m) } +func (*UpdateProjectBillingInfoRequest) ProtoMessage() {} +func (*UpdateProjectBillingInfoRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_cloud_billing_afc96ac7b12ba62e, []int{8} +} +func (m *UpdateProjectBillingInfoRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_UpdateProjectBillingInfoRequest.Unmarshal(m, b) +} +func (m *UpdateProjectBillingInfoRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_UpdateProjectBillingInfoRequest.Marshal(b, m, deterministic) +} +func (dst *UpdateProjectBillingInfoRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_UpdateProjectBillingInfoRequest.Merge(dst, src) +} +func (m *UpdateProjectBillingInfoRequest) XXX_Size() int { + return xxx_messageInfo_UpdateProjectBillingInfoRequest.Size(m) +} +func (m *UpdateProjectBillingInfoRequest) XXX_DiscardUnknown() { + xxx_messageInfo_UpdateProjectBillingInfoRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_UpdateProjectBillingInfoRequest proto.InternalMessageInfo + +func (m *UpdateProjectBillingInfoRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *UpdateProjectBillingInfoRequest) GetProjectBillingInfo() *ProjectBillingInfo { + if m != nil { + return m.ProjectBillingInfo + } + return nil +} + +func init() { + proto.RegisterType((*BillingAccount)(nil), "google.cloud.billing.v1.BillingAccount") + proto.RegisterType((*ProjectBillingInfo)(nil), "google.cloud.billing.v1.ProjectBillingInfo") + proto.RegisterType((*GetBillingAccountRequest)(nil), "google.cloud.billing.v1.GetBillingAccountRequest") + proto.RegisterType((*ListBillingAccountsRequest)(nil), "google.cloud.billing.v1.ListBillingAccountsRequest") + proto.RegisterType((*ListBillingAccountsResponse)(nil), "google.cloud.billing.v1.ListBillingAccountsResponse") + proto.RegisterType((*ListProjectBillingInfoRequest)(nil), "google.cloud.billing.v1.ListProjectBillingInfoRequest") + proto.RegisterType((*ListProjectBillingInfoResponse)(nil), "google.cloud.billing.v1.ListProjectBillingInfoResponse") + proto.RegisterType((*GetProjectBillingInfoRequest)(nil), "google.cloud.billing.v1.GetProjectBillingInfoRequest") + proto.RegisterType((*UpdateProjectBillingInfoRequest)(nil), "google.cloud.billing.v1.UpdateProjectBillingInfoRequest") +} + +// Reference imports to suppress errors if they are not otherwise used. +var _ context.Context +var _ grpc.ClientConn + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the grpc package it is being compiled against. +const _ = grpc.SupportPackageIsVersion4 + +// CloudBillingClient is the client API for CloudBilling service. +// +// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://godoc.org/google.golang.org/grpc#ClientConn.NewStream. +type CloudBillingClient interface { + // Gets information about a billing account. The current authenticated user + // must be an [owner of the billing + // account](https://support.google.com/cloud/answer/4430947). + GetBillingAccount(ctx context.Context, in *GetBillingAccountRequest, opts ...grpc.CallOption) (*BillingAccount, error) + // Lists the billing accounts that the current authenticated user + // [owns](https://support.google.com/cloud/answer/4430947). + ListBillingAccounts(ctx context.Context, in *ListBillingAccountsRequest, opts ...grpc.CallOption) (*ListBillingAccountsResponse, error) + // Lists the projects associated with a billing account. The current + // authenticated user must be an [owner of the billing + // account](https://support.google.com/cloud/answer/4430947). + ListProjectBillingInfo(ctx context.Context, in *ListProjectBillingInfoRequest, opts ...grpc.CallOption) (*ListProjectBillingInfoResponse, error) + // Gets the billing information for a project. The current authenticated user + // must have [permission to view the + // project](https://cloud.google.com/docs/permissions-overview#h.bgs0oxofvnoo + // ). + GetProjectBillingInfo(ctx context.Context, in *GetProjectBillingInfoRequest, opts ...grpc.CallOption) (*ProjectBillingInfo, error) + // Sets or updates the billing account associated with a project. You specify + // the new billing account by setting the `billing_account_name` in the + // `ProjectBillingInfo` resource to the resource name of a billing account. + // Associating a project with an open billing account enables billing on the + // project and allows charges for resource usage. If the project already had a + // billing account, this method changes the billing account used for resource + // usage charges. + // + // *Note:* Incurred charges that have not yet been reported in the transaction + // history of the Google Cloud Console may be billed to the new billing + // account, even if the charge occurred before the new billing account was + // assigned to the project. + // + // The current authenticated user must have ownership privileges for both the + // [project](https://cloud.google.com/docs/permissions-overview#h.bgs0oxofvnoo + // ) and the [billing + // account](https://support.google.com/cloud/answer/4430947). + // + // You can disable billing on the project by setting the + // `billing_account_name` field to empty. This action disassociates the + // current billing account from the project. Any billable activity of your + // in-use services will stop, and your application could stop functioning as + // expected. Any unbilled charges to date will be billed to the previously + // associated account. The current authenticated user must be either an owner + // of the project or an owner of the billing account for the project. + // + // Note that associating a project with a *closed* billing account will have + // much the same effect as disabling billing on the project: any paid + // resources used by the project will be shut down. Thus, unless you wish to + // disable billing, you should always call this method with the name of an + // *open* billing account. + UpdateProjectBillingInfo(ctx context.Context, in *UpdateProjectBillingInfoRequest, opts ...grpc.CallOption) (*ProjectBillingInfo, error) +} + +type cloudBillingClient struct { + cc *grpc.ClientConn +} + +func NewCloudBillingClient(cc *grpc.ClientConn) CloudBillingClient { + return &cloudBillingClient{cc} +} + +func (c *cloudBillingClient) GetBillingAccount(ctx context.Context, in *GetBillingAccountRequest, opts ...grpc.CallOption) (*BillingAccount, error) { + out := new(BillingAccount) + err := c.cc.Invoke(ctx, "/google.cloud.billing.v1.CloudBilling/GetBillingAccount", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *cloudBillingClient) ListBillingAccounts(ctx context.Context, in *ListBillingAccountsRequest, opts ...grpc.CallOption) (*ListBillingAccountsResponse, error) { + out := new(ListBillingAccountsResponse) + err := c.cc.Invoke(ctx, "/google.cloud.billing.v1.CloudBilling/ListBillingAccounts", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *cloudBillingClient) ListProjectBillingInfo(ctx context.Context, in *ListProjectBillingInfoRequest, opts ...grpc.CallOption) (*ListProjectBillingInfoResponse, error) { + out := new(ListProjectBillingInfoResponse) + err := c.cc.Invoke(ctx, "/google.cloud.billing.v1.CloudBilling/ListProjectBillingInfo", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *cloudBillingClient) GetProjectBillingInfo(ctx context.Context, in *GetProjectBillingInfoRequest, opts ...grpc.CallOption) (*ProjectBillingInfo, error) { + out := new(ProjectBillingInfo) + err := c.cc.Invoke(ctx, "/google.cloud.billing.v1.CloudBilling/GetProjectBillingInfo", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *cloudBillingClient) UpdateProjectBillingInfo(ctx context.Context, in *UpdateProjectBillingInfoRequest, opts ...grpc.CallOption) (*ProjectBillingInfo, error) { + out := new(ProjectBillingInfo) + err := c.cc.Invoke(ctx, "/google.cloud.billing.v1.CloudBilling/UpdateProjectBillingInfo", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +// CloudBillingServer is the server API for CloudBilling service. +type CloudBillingServer interface { + // Gets information about a billing account. The current authenticated user + // must be an [owner of the billing + // account](https://support.google.com/cloud/answer/4430947). + GetBillingAccount(context.Context, *GetBillingAccountRequest) (*BillingAccount, error) + // Lists the billing accounts that the current authenticated user + // [owns](https://support.google.com/cloud/answer/4430947). + ListBillingAccounts(context.Context, *ListBillingAccountsRequest) (*ListBillingAccountsResponse, error) + // Lists the projects associated with a billing account. The current + // authenticated user must be an [owner of the billing + // account](https://support.google.com/cloud/answer/4430947). + ListProjectBillingInfo(context.Context, *ListProjectBillingInfoRequest) (*ListProjectBillingInfoResponse, error) + // Gets the billing information for a project. The current authenticated user + // must have [permission to view the + // project](https://cloud.google.com/docs/permissions-overview#h.bgs0oxofvnoo + // ). + GetProjectBillingInfo(context.Context, *GetProjectBillingInfoRequest) (*ProjectBillingInfo, error) + // Sets or updates the billing account associated with a project. You specify + // the new billing account by setting the `billing_account_name` in the + // `ProjectBillingInfo` resource to the resource name of a billing account. + // Associating a project with an open billing account enables billing on the + // project and allows charges for resource usage. If the project already had a + // billing account, this method changes the billing account used for resource + // usage charges. + // + // *Note:* Incurred charges that have not yet been reported in the transaction + // history of the Google Cloud Console may be billed to the new billing + // account, even if the charge occurred before the new billing account was + // assigned to the project. + // + // The current authenticated user must have ownership privileges for both the + // [project](https://cloud.google.com/docs/permissions-overview#h.bgs0oxofvnoo + // ) and the [billing + // account](https://support.google.com/cloud/answer/4430947). + // + // You can disable billing on the project by setting the + // `billing_account_name` field to empty. This action disassociates the + // current billing account from the project. Any billable activity of your + // in-use services will stop, and your application could stop functioning as + // expected. Any unbilled charges to date will be billed to the previously + // associated account. The current authenticated user must be either an owner + // of the project or an owner of the billing account for the project. + // + // Note that associating a project with a *closed* billing account will have + // much the same effect as disabling billing on the project: any paid + // resources used by the project will be shut down. Thus, unless you wish to + // disable billing, you should always call this method with the name of an + // *open* billing account. + UpdateProjectBillingInfo(context.Context, *UpdateProjectBillingInfoRequest) (*ProjectBillingInfo, error) +} + +func RegisterCloudBillingServer(s *grpc.Server, srv CloudBillingServer) { + s.RegisterService(&_CloudBilling_serviceDesc, srv) +} + +func _CloudBilling_GetBillingAccount_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(GetBillingAccountRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(CloudBillingServer).GetBillingAccount(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.billing.v1.CloudBilling/GetBillingAccount", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(CloudBillingServer).GetBillingAccount(ctx, req.(*GetBillingAccountRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _CloudBilling_ListBillingAccounts_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(ListBillingAccountsRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(CloudBillingServer).ListBillingAccounts(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.billing.v1.CloudBilling/ListBillingAccounts", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(CloudBillingServer).ListBillingAccounts(ctx, req.(*ListBillingAccountsRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _CloudBilling_ListProjectBillingInfo_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(ListProjectBillingInfoRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(CloudBillingServer).ListProjectBillingInfo(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.billing.v1.CloudBilling/ListProjectBillingInfo", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(CloudBillingServer).ListProjectBillingInfo(ctx, req.(*ListProjectBillingInfoRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _CloudBilling_GetProjectBillingInfo_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(GetProjectBillingInfoRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(CloudBillingServer).GetProjectBillingInfo(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.billing.v1.CloudBilling/GetProjectBillingInfo", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(CloudBillingServer).GetProjectBillingInfo(ctx, req.(*GetProjectBillingInfoRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _CloudBilling_UpdateProjectBillingInfo_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(UpdateProjectBillingInfoRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(CloudBillingServer).UpdateProjectBillingInfo(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.billing.v1.CloudBilling/UpdateProjectBillingInfo", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(CloudBillingServer).UpdateProjectBillingInfo(ctx, req.(*UpdateProjectBillingInfoRequest)) + } + return interceptor(ctx, in, info, handler) +} + +var _CloudBilling_serviceDesc = grpc.ServiceDesc{ + ServiceName: "google.cloud.billing.v1.CloudBilling", + HandlerType: (*CloudBillingServer)(nil), + Methods: []grpc.MethodDesc{ + { + MethodName: "GetBillingAccount", + Handler: _CloudBilling_GetBillingAccount_Handler, + }, + { + MethodName: "ListBillingAccounts", + Handler: _CloudBilling_ListBillingAccounts_Handler, + }, + { + MethodName: "ListProjectBillingInfo", + Handler: _CloudBilling_ListProjectBillingInfo_Handler, + }, + { + MethodName: "GetProjectBillingInfo", + Handler: _CloudBilling_GetProjectBillingInfo_Handler, + }, + { + MethodName: "UpdateProjectBillingInfo", + Handler: _CloudBilling_UpdateProjectBillingInfo_Handler, + }, + }, + Streams: []grpc.StreamDesc{}, + Metadata: "google/cloud/billing/v1/cloud_billing.proto", +} + +func init() { + proto.RegisterFile("google/cloud/billing/v1/cloud_billing.proto", fileDescriptor_cloud_billing_afc96ac7b12ba62e) +} + +var fileDescriptor_cloud_billing_afc96ac7b12ba62e = []byte{ + // 667 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xa4, 0x56, 0xdd, 0x4e, 0xd4, 0x40, + 0x14, 0xce, 0x00, 0x12, 0xf6, 0x80, 0x20, 0x03, 0xe8, 0x66, 0x17, 0x10, 0xea, 0x0f, 0x28, 0xb1, + 0x15, 0xf0, 0xdf, 0xa8, 0x11, 0x63, 0x08, 0x89, 0x31, 0x9b, 0xaa, 0x89, 0xd1, 0x98, 0x66, 0x76, + 0x3b, 0x34, 0xd5, 0x32, 0x53, 0x77, 0x0a, 0x51, 0x8c, 0x37, 0xbe, 0x82, 0x7a, 0xe1, 0x85, 0x37, + 0x5e, 0xe8, 0x2b, 0x78, 0xed, 0x2b, 0xf8, 0x0a, 0xde, 0xfb, 0x0a, 0x66, 0xa6, 0x53, 0x65, 0xbb, + 0x9d, 0x85, 0xc6, 0xbb, 0xe6, 0x9b, 0x73, 0xfa, 0x7d, 0xe7, 0x9b, 0xef, 0x6c, 0x17, 0x96, 0x02, + 0xce, 0x83, 0x88, 0x3a, 0xad, 0x88, 0x6f, 0xfb, 0x4e, 0x33, 0x8c, 0xa2, 0x90, 0x05, 0xce, 0xce, + 0x72, 0x0a, 0x78, 0x1a, 0xb0, 0xe3, 0x36, 0x4f, 0x38, 0x3e, 0x96, 0x16, 0xdb, 0xea, 0xcc, 0xce, + 0xce, 0x76, 0x96, 0x6b, 0xd3, 0xfa, 0x2d, 0x24, 0x0e, 0x1d, 0xc2, 0x18, 0x4f, 0x48, 0x12, 0x72, + 0x26, 0xd2, 0x36, 0xeb, 0x29, 0x8c, 0xae, 0xa5, 0xb5, 0xb7, 0x5b, 0x2d, 0xbe, 0xcd, 0x12, 0x8c, + 0x61, 0x80, 0x91, 0x2d, 0x5a, 0x45, 0x73, 0x68, 0xb1, 0xe2, 0xaa, 0x67, 0x89, 0xf1, 0x98, 0xb2, + 0x6a, 0xdf, 0x1c, 0x5a, 0x1c, 0x72, 0xd5, 0x33, 0x9e, 0x87, 0x11, 0x3f, 0x14, 0x71, 0x44, 0x5e, + 0x7b, 0xaa, 0xbe, 0x5f, 0xd5, 0x0f, 0x6b, 0xec, 0x3e, 0xd9, 0xa2, 0xd6, 0x17, 0x04, 0xb8, 0xd1, + 0xe6, 0xcf, 0x69, 0x2b, 0xd1, 0x24, 0x1b, 0x6c, 0x93, 0x17, 0x32, 0xcc, 0x00, 0xc4, 0x69, 0xa5, + 0x17, 0xfa, 0x8a, 0xa7, 0xe2, 0x56, 0x34, 0xb2, 0xe1, 0xe3, 0xf3, 0x30, 0xa9, 0x47, 0xf2, 0x48, + 0xaa, 0x73, 0x2f, 0x29, 0x6e, 0x76, 0x8c, 0x20, 0xb9, 0xf1, 0x02, 0x8c, 0x65, 0x1d, 0x94, 0x91, + 0x66, 0x44, 0xfd, 0xea, 0x80, 0x52, 0x3f, 0xaa, 0xe1, 0xbb, 0x29, 0x6a, 0xd9, 0x50, 0x5d, 0xa7, + 0x49, 0xa7, 0x09, 0x2e, 0x7d, 0xb9, 0x4d, 0x45, 0xa1, 0x17, 0xd6, 0x63, 0xa8, 0xdd, 0x0b, 0x45, + 0xae, 0x41, 0x64, 0x1d, 0x75, 0xa8, 0xc4, 0x24, 0xa0, 0x9e, 0x08, 0x77, 0xd3, 0xb6, 0x43, 0xee, + 0x90, 0x04, 0x1e, 0x84, 0xbb, 0xe9, 0x90, 0xf2, 0x30, 0xe1, 0x2f, 0xb4, 0x99, 0x72, 0x48, 0x12, + 0xd0, 0x87, 0x12, 0xb0, 0x3e, 0x21, 0xa8, 0x17, 0xbe, 0x5a, 0xc4, 0x9c, 0x09, 0x8a, 0x5d, 0x38, + 0x92, 0x33, 0x41, 0x54, 0xd1, 0x5c, 0xff, 0xe2, 0xf0, 0xca, 0x82, 0x6d, 0xb8, 0x7d, 0x3b, 0x37, + 0xd7, 0x58, 0xa7, 0x53, 0x02, 0x9f, 0x86, 0x31, 0x46, 0x5f, 0x25, 0x5e, 0x97, 0xae, 0xc3, 0x12, + 0x6e, 0xfc, 0xd5, 0xc6, 0x61, 0x46, 0x4a, 0xeb, 0xbe, 0xcd, 0x1e, 0x56, 0x75, 0x9a, 0xd1, 0xd7, + 0xd3, 0x8c, 0xfe, 0xbc, 0x19, 0xdf, 0x10, 0xcc, 0x9a, 0x18, 0xb5, 0x1f, 0xcf, 0x60, 0x32, 0xcb, + 0x4c, 0xe6, 0x4b, 0xc8, 0x36, 0xb9, 0xf6, 0x64, 0xc9, 0xe8, 0x49, 0xc1, 0x2b, 0x71, 0xdc, 0x1d, + 0xd3, 0x83, 0x5a, 0xb3, 0x02, 0xd3, 0xeb, 0xb4, 0x9c, 0x33, 0xd6, 0x07, 0x04, 0xc7, 0x1f, 0xc5, + 0x3e, 0x49, 0x68, 0x39, 0x47, 0x4d, 0x23, 0x4b, 0x61, 0xff, 0x3f, 0xf2, 0xca, 0xef, 0x41, 0x18, + 0xb9, 0x23, 0x7b, 0x35, 0x88, 0x3f, 0x22, 0x18, 0xef, 0xda, 0x0e, 0xbc, 0x6c, 0xe4, 0x31, 0x6d, + 0x52, 0xed, 0xa0, 0x09, 0xb5, 0x4e, 0xbe, 0xfb, 0xf9, 0xeb, 0x7d, 0xdf, 0x2c, 0x9e, 0x96, 0x3f, + 0x74, 0x6f, 0xe4, 0xd0, 0x37, 0x72, 0x99, 0x75, 0xce, 0xbe, 0xc5, 0x9f, 0x11, 0x4c, 0x14, 0xac, + 0x0a, 0x5e, 0x35, 0xd2, 0x98, 0x77, 0xb6, 0x76, 0xa1, 0x5c, 0x53, 0x9a, 0x3e, 0xab, 0xae, 0x84, + 0x4e, 0xe1, 0x09, 0x29, 0x34, 0xbf, 0x56, 0xdf, 0x11, 0x1c, 0x2d, 0x4e, 0x2f, 0xbe, 0xd4, 0x93, + 0xcd, 0x18, 0x87, 0xda, 0xe5, 0xd2, 0x7d, 0x5a, 0xe8, 0x39, 0x25, 0x74, 0x01, 0x9f, 0xea, 0xe5, + 0xa8, 0xa3, 0xd3, 0x20, 0xf0, 0x57, 0x04, 0x53, 0x85, 0x79, 0xc6, 0x17, 0x7b, 0x5d, 0xbb, 0x59, + 0x78, 0x99, 0x54, 0x5a, 0x67, 0x94, 0xd8, 0x13, 0x78, 0xfe, 0x9f, 0xd8, 0x4c, 0x99, 0x54, 0xd9, + 0xdc, 0x23, 0xe7, 0x07, 0x82, 0xaa, 0x69, 0x87, 0xf0, 0x15, 0x23, 0xe9, 0x3e, 0x6b, 0x57, 0x4e, + 0xee, 0x2d, 0x25, 0xf7, 0x6a, 0x6d, 0x7f, 0xb9, 0xd7, 0x0a, 0x17, 0x77, 0xad, 0x0d, 0xf5, 0x16, + 0xdf, 0x32, 0x51, 0xae, 0x8d, 0xef, 0xdd, 0xc6, 0x86, 0xfc, 0x62, 0x37, 0xd0, 0x93, 0x9b, 0xba, + 0x3a, 0xe0, 0x11, 0x61, 0x81, 0xcd, 0xdb, 0x81, 0x13, 0x50, 0xa6, 0xbe, 0xe7, 0x4e, 0x7a, 0x44, + 0xe2, 0x50, 0x74, 0xfd, 0x6d, 0xb8, 0xae, 0x1f, 0x9b, 0x83, 0xaa, 0x74, 0xf5, 0x4f, 0x00, 0x00, + 0x00, 0xff, 0xff, 0xc0, 0x01, 0x24, 0x32, 0x60, 0x08, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/cloud/binaryauthorization/v1beta1/resources.pb.go b/vendor/google.golang.org/genproto/googleapis/cloud/binaryauthorization/v1beta1/resources.pb.go new file mode 100644 index 0000000..4983ea4 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/cloud/binaryauthorization/v1beta1/resources.pb.go @@ -0,0 +1,966 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/cloud/binaryauthorization/v1beta1/resources.proto + +package binaryauthorization // import "google.golang.org/genproto/googleapis/cloud/binaryauthorization/v1beta1" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import timestamp "github.com/golang/protobuf/ptypes/timestamp" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +type Policy_GlobalPolicyEvaluationMode int32 + +const ( + // Not specified: DISABLE is assumed. + Policy_GLOBAL_POLICY_EVALUATION_MODE_UNSPECIFIED Policy_GlobalPolicyEvaluationMode = 0 + // Enables global policy evaluation. + Policy_ENABLE Policy_GlobalPolicyEvaluationMode = 1 + // Disables global policy evaluation. + Policy_DISABLE Policy_GlobalPolicyEvaluationMode = 2 +) + +var Policy_GlobalPolicyEvaluationMode_name = map[int32]string{ + 0: "GLOBAL_POLICY_EVALUATION_MODE_UNSPECIFIED", + 1: "ENABLE", + 2: "DISABLE", +} +var Policy_GlobalPolicyEvaluationMode_value = map[string]int32{ + "GLOBAL_POLICY_EVALUATION_MODE_UNSPECIFIED": 0, + "ENABLE": 1, + "DISABLE": 2, +} + +func (x Policy_GlobalPolicyEvaluationMode) String() string { + return proto.EnumName(Policy_GlobalPolicyEvaluationMode_name, int32(x)) +} +func (Policy_GlobalPolicyEvaluationMode) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_resources_15311d1ac079a106, []int{0, 0} +} + +type AdmissionRule_EvaluationMode int32 + +const ( + // Do not use. + AdmissionRule_EVALUATION_MODE_UNSPECIFIED AdmissionRule_EvaluationMode = 0 + // This rule allows all all pod creations. + AdmissionRule_ALWAYS_ALLOW AdmissionRule_EvaluationMode = 1 + // This rule allows a pod creation if all the attestors listed in + // 'require_attestations_by' have valid attestations for all of the + // images in the pod spec. + AdmissionRule_REQUIRE_ATTESTATION AdmissionRule_EvaluationMode = 2 + // This rule denies all pod creations. + AdmissionRule_ALWAYS_DENY AdmissionRule_EvaluationMode = 3 +) + +var AdmissionRule_EvaluationMode_name = map[int32]string{ + 0: "EVALUATION_MODE_UNSPECIFIED", + 1: "ALWAYS_ALLOW", + 2: "REQUIRE_ATTESTATION", + 3: "ALWAYS_DENY", +} +var AdmissionRule_EvaluationMode_value = map[string]int32{ + "EVALUATION_MODE_UNSPECIFIED": 0, + "ALWAYS_ALLOW": 1, + "REQUIRE_ATTESTATION": 2, + "ALWAYS_DENY": 3, +} + +func (x AdmissionRule_EvaluationMode) String() string { + return proto.EnumName(AdmissionRule_EvaluationMode_name, int32(x)) +} +func (AdmissionRule_EvaluationMode) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_resources_15311d1ac079a106, []int{2, 0} +} + +// Defines the possible actions when a pod creation is denied by an admission +// rule. +type AdmissionRule_EnforcementMode int32 + +const ( + // Do not use. + AdmissionRule_ENFORCEMENT_MODE_UNSPECIFIED AdmissionRule_EnforcementMode = 0 + // Enforce the admission rule by blocking the pod creation. + AdmissionRule_ENFORCED_BLOCK_AND_AUDIT_LOG AdmissionRule_EnforcementMode = 1 + // Dryrun mode: Audit logging only. This will allow the pod creation as if + // the admission request had specified break-glass. + AdmissionRule_DRYRUN_AUDIT_LOG_ONLY AdmissionRule_EnforcementMode = 2 +) + +var AdmissionRule_EnforcementMode_name = map[int32]string{ + 0: "ENFORCEMENT_MODE_UNSPECIFIED", + 1: "ENFORCED_BLOCK_AND_AUDIT_LOG", + 2: "DRYRUN_AUDIT_LOG_ONLY", +} +var AdmissionRule_EnforcementMode_value = map[string]int32{ + "ENFORCEMENT_MODE_UNSPECIFIED": 0, + "ENFORCED_BLOCK_AND_AUDIT_LOG": 1, + "DRYRUN_AUDIT_LOG_ONLY": 2, +} + +func (x AdmissionRule_EnforcementMode) String() string { + return proto.EnumName(AdmissionRule_EnforcementMode_name, int32(x)) +} +func (AdmissionRule_EnforcementMode) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_resources_15311d1ac079a106, []int{2, 1} +} + +// Represents a signature algorithm and other information necessary to verify +// signatures with a given public key. +// This is based primarily on the public key types supported by Tink's +// PemKeyType, which is in turn based on KMS's supported signing algorithms. +// See https://cloud.google.com/kms/docs/algorithms. In the future, BinAuthz +// might support additional public key types independently of Tink and/or KMS. +type PkixPublicKey_SignatureAlgorithm int32 + +const ( + // Not specified. + PkixPublicKey_SIGNATURE_ALGORITHM_UNSPECIFIED PkixPublicKey_SignatureAlgorithm = 0 + // RSASSA-PSS 2048 bit key with a SHA256 digest. + PkixPublicKey_RSA_PSS_2048_SHA256 PkixPublicKey_SignatureAlgorithm = 1 + // RSASSA-PSS 3072 bit key with a SHA256 digest. + PkixPublicKey_RSA_PSS_3072_SHA256 PkixPublicKey_SignatureAlgorithm = 2 + // RSASSA-PSS 4096 bit key with a SHA256 digest. + PkixPublicKey_RSA_PSS_4096_SHA256 PkixPublicKey_SignatureAlgorithm = 3 + // RSASSA-PSS 4096 bit key with a SHA512 digest. + PkixPublicKey_RSA_PSS_4096_SHA512 PkixPublicKey_SignatureAlgorithm = 4 + // RSASSA-PKCS1-v1_5 with a 2048 bit key and a SHA256 digest. + PkixPublicKey_RSA_SIGN_PKCS1_2048_SHA256 PkixPublicKey_SignatureAlgorithm = 5 + // RSASSA-PKCS1-v1_5 with a 3072 bit key and a SHA256 digest. + PkixPublicKey_RSA_SIGN_PKCS1_3072_SHA256 PkixPublicKey_SignatureAlgorithm = 6 + // RSASSA-PKCS1-v1_5 with a 4096 bit key and a SHA256 digest. + PkixPublicKey_RSA_SIGN_PKCS1_4096_SHA256 PkixPublicKey_SignatureAlgorithm = 7 + // RSASSA-PKCS1-v1_5 with a 4096 bit key and a SHA512 digest. + PkixPublicKey_RSA_SIGN_PKCS1_4096_SHA512 PkixPublicKey_SignatureAlgorithm = 8 + // ECDSA on the NIST P-256 curve with a SHA256 digest. + PkixPublicKey_ECDSA_P256_SHA256 PkixPublicKey_SignatureAlgorithm = 9 + // ECDSA on the NIST P-384 curve with a SHA384 digest. + PkixPublicKey_ECDSA_P384_SHA384 PkixPublicKey_SignatureAlgorithm = 10 + // ECDSA on the NIST P-521 curve with a SHA512 digest. + PkixPublicKey_ECDSA_P521_SHA512 PkixPublicKey_SignatureAlgorithm = 11 +) + +var PkixPublicKey_SignatureAlgorithm_name = map[int32]string{ + 0: "SIGNATURE_ALGORITHM_UNSPECIFIED", + 1: "RSA_PSS_2048_SHA256", + 2: "RSA_PSS_3072_SHA256", + 3: "RSA_PSS_4096_SHA256", + 4: "RSA_PSS_4096_SHA512", + 5: "RSA_SIGN_PKCS1_2048_SHA256", + 6: "RSA_SIGN_PKCS1_3072_SHA256", + 7: "RSA_SIGN_PKCS1_4096_SHA256", + 8: "RSA_SIGN_PKCS1_4096_SHA512", + 9: "ECDSA_P256_SHA256", + 10: "ECDSA_P384_SHA384", + 11: "ECDSA_P521_SHA512", +} +var PkixPublicKey_SignatureAlgorithm_value = map[string]int32{ + "SIGNATURE_ALGORITHM_UNSPECIFIED": 0, + "RSA_PSS_2048_SHA256": 1, + "RSA_PSS_3072_SHA256": 2, + "RSA_PSS_4096_SHA256": 3, + "RSA_PSS_4096_SHA512": 4, + "RSA_SIGN_PKCS1_2048_SHA256": 5, + "RSA_SIGN_PKCS1_3072_SHA256": 6, + "RSA_SIGN_PKCS1_4096_SHA256": 7, + "RSA_SIGN_PKCS1_4096_SHA512": 8, + "ECDSA_P256_SHA256": 9, + "ECDSA_P384_SHA384": 10, + "ECDSA_P521_SHA512": 11, +} + +func (x PkixPublicKey_SignatureAlgorithm) String() string { + return proto.EnumName(PkixPublicKey_SignatureAlgorithm_name, int32(x)) +} +func (PkixPublicKey_SignatureAlgorithm) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_resources_15311d1ac079a106, []int{5, 0} +} + +// A [policy][google.cloud.binaryauthorization.v1beta1.Policy] for container image binary authorization. +type Policy struct { + // Output only. The resource name, in the format `projects/*/policy`. There is + // at most one policy per project. + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // Optional. A descriptive comment. + Description string `protobuf:"bytes,6,opt,name=description,proto3" json:"description,omitempty"` + // Optional. Controls the evaluation of a Google-maintained global admission + // policy for common system-level images. Images not covered by the global + // policy will be subject to the project admission policy. This setting + // has no effect when specified inside a global admission policy. + GlobalPolicyEvaluationMode Policy_GlobalPolicyEvaluationMode `protobuf:"varint,7,opt,name=global_policy_evaluation_mode,json=globalPolicyEvaluationMode,proto3,enum=google.cloud.binaryauthorization.v1beta1.Policy_GlobalPolicyEvaluationMode" json:"global_policy_evaluation_mode,omitempty"` + // Optional. Admission policy whitelisting. A matching admission request will + // always be permitted. This feature is typically used to exclude Google or + // third-party infrastructure images from Binary Authorization policies. + AdmissionWhitelistPatterns []*AdmissionWhitelistPattern `protobuf:"bytes,2,rep,name=admission_whitelist_patterns,json=admissionWhitelistPatterns,proto3" json:"admission_whitelist_patterns,omitempty"` + // Optional. Per-cluster admission rules. Cluster spec format: + // `location.clusterId`. There can be at most one admission rule per cluster + // spec. + // A `location` is either a compute zone (e.g. us-central1-a) or a region + // (e.g. us-central1). + // For `clusterId` syntax restrictions see + // https://cloud.google.com/container-engine/reference/rest/v1/projects.zones.clusters. + ClusterAdmissionRules map[string]*AdmissionRule `protobuf:"bytes,3,rep,name=cluster_admission_rules,json=clusterAdmissionRules,proto3" json:"cluster_admission_rules,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` + // Required. Default admission rule for a cluster without a per-cluster, per- + // kubernetes-service-account, or per-istio-service-identity admission rule. + DefaultAdmissionRule *AdmissionRule `protobuf:"bytes,4,opt,name=default_admission_rule,json=defaultAdmissionRule,proto3" json:"default_admission_rule,omitempty"` + // Output only. Time when the policy was last updated. + UpdateTime *timestamp.Timestamp `protobuf:"bytes,5,opt,name=update_time,json=updateTime,proto3" json:"update_time,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Policy) Reset() { *m = Policy{} } +func (m *Policy) String() string { return proto.CompactTextString(m) } +func (*Policy) ProtoMessage() {} +func (*Policy) Descriptor() ([]byte, []int) { + return fileDescriptor_resources_15311d1ac079a106, []int{0} +} +func (m *Policy) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Policy.Unmarshal(m, b) +} +func (m *Policy) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Policy.Marshal(b, m, deterministic) +} +func (dst *Policy) XXX_Merge(src proto.Message) { + xxx_messageInfo_Policy.Merge(dst, src) +} +func (m *Policy) XXX_Size() int { + return xxx_messageInfo_Policy.Size(m) +} +func (m *Policy) XXX_DiscardUnknown() { + xxx_messageInfo_Policy.DiscardUnknown(m) +} + +var xxx_messageInfo_Policy proto.InternalMessageInfo + +func (m *Policy) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *Policy) GetDescription() string { + if m != nil { + return m.Description + } + return "" +} + +func (m *Policy) GetGlobalPolicyEvaluationMode() Policy_GlobalPolicyEvaluationMode { + if m != nil { + return m.GlobalPolicyEvaluationMode + } + return Policy_GLOBAL_POLICY_EVALUATION_MODE_UNSPECIFIED +} + +func (m *Policy) GetAdmissionWhitelistPatterns() []*AdmissionWhitelistPattern { + if m != nil { + return m.AdmissionWhitelistPatterns + } + return nil +} + +func (m *Policy) GetClusterAdmissionRules() map[string]*AdmissionRule { + if m != nil { + return m.ClusterAdmissionRules + } + return nil +} + +func (m *Policy) GetDefaultAdmissionRule() *AdmissionRule { + if m != nil { + return m.DefaultAdmissionRule + } + return nil +} + +func (m *Policy) GetUpdateTime() *timestamp.Timestamp { + if m != nil { + return m.UpdateTime + } + return nil +} + +// An [admission whitelist pattern][google.cloud.binaryauthorization.v1beta1.AdmissionWhitelistPattern] exempts images +// from checks by [admission rules][google.cloud.binaryauthorization.v1beta1.AdmissionRule]. +type AdmissionWhitelistPattern struct { + // An image name pattern to whitelist, in the form `registry/path/to/image`. + // This supports a trailing `*` as a wildcard, but this is allowed only in + // text after the `registry/` part. + NamePattern string `protobuf:"bytes,1,opt,name=name_pattern,json=namePattern,proto3" json:"name_pattern,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *AdmissionWhitelistPattern) Reset() { *m = AdmissionWhitelistPattern{} } +func (m *AdmissionWhitelistPattern) String() string { return proto.CompactTextString(m) } +func (*AdmissionWhitelistPattern) ProtoMessage() {} +func (*AdmissionWhitelistPattern) Descriptor() ([]byte, []int) { + return fileDescriptor_resources_15311d1ac079a106, []int{1} +} +func (m *AdmissionWhitelistPattern) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_AdmissionWhitelistPattern.Unmarshal(m, b) +} +func (m *AdmissionWhitelistPattern) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_AdmissionWhitelistPattern.Marshal(b, m, deterministic) +} +func (dst *AdmissionWhitelistPattern) XXX_Merge(src proto.Message) { + xxx_messageInfo_AdmissionWhitelistPattern.Merge(dst, src) +} +func (m *AdmissionWhitelistPattern) XXX_Size() int { + return xxx_messageInfo_AdmissionWhitelistPattern.Size(m) +} +func (m *AdmissionWhitelistPattern) XXX_DiscardUnknown() { + xxx_messageInfo_AdmissionWhitelistPattern.DiscardUnknown(m) +} + +var xxx_messageInfo_AdmissionWhitelistPattern proto.InternalMessageInfo + +func (m *AdmissionWhitelistPattern) GetNamePattern() string { + if m != nil { + return m.NamePattern + } + return "" +} + +// An [admission rule][google.cloud.binaryauthorization.v1beta1.AdmissionRule] specifies either that all container images +// used in a pod creation request must be attested to by one or more +// [attestors][google.cloud.binaryauthorization.v1beta1.Attestor], that all pod creations will be allowed, or that all +// pod creations will be denied. +// +// Images matching an [admission whitelist pattern][google.cloud.binaryauthorization.v1beta1.AdmissionWhitelistPattern] +// are exempted from admission rules and will never block a pod creation. +type AdmissionRule struct { + // Required. How this admission rule will be evaluated. + EvaluationMode AdmissionRule_EvaluationMode `protobuf:"varint,1,opt,name=evaluation_mode,json=evaluationMode,proto3,enum=google.cloud.binaryauthorization.v1beta1.AdmissionRule_EvaluationMode" json:"evaluation_mode,omitempty"` + // Optional. The resource names of the attestors that must attest to + // a container image, in the format `projects/*/attestors/*`. Each + // attestor must exist before a policy can reference it. To add an attestor + // to a policy the principal issuing the policy change request must be able + // to read the attestor resource. + // + // Note: this field must be non-empty when the evaluation_mode field specifies + // REQUIRE_ATTESTATION, otherwise it must be empty. + RequireAttestationsBy []string `protobuf:"bytes,2,rep,name=require_attestations_by,json=requireAttestationsBy,proto3" json:"require_attestations_by,omitempty"` + // Required. The action when a pod creation is denied by the admission rule. + EnforcementMode AdmissionRule_EnforcementMode `protobuf:"varint,3,opt,name=enforcement_mode,json=enforcementMode,proto3,enum=google.cloud.binaryauthorization.v1beta1.AdmissionRule_EnforcementMode" json:"enforcement_mode,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *AdmissionRule) Reset() { *m = AdmissionRule{} } +func (m *AdmissionRule) String() string { return proto.CompactTextString(m) } +func (*AdmissionRule) ProtoMessage() {} +func (*AdmissionRule) Descriptor() ([]byte, []int) { + return fileDescriptor_resources_15311d1ac079a106, []int{2} +} +func (m *AdmissionRule) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_AdmissionRule.Unmarshal(m, b) +} +func (m *AdmissionRule) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_AdmissionRule.Marshal(b, m, deterministic) +} +func (dst *AdmissionRule) XXX_Merge(src proto.Message) { + xxx_messageInfo_AdmissionRule.Merge(dst, src) +} +func (m *AdmissionRule) XXX_Size() int { + return xxx_messageInfo_AdmissionRule.Size(m) +} +func (m *AdmissionRule) XXX_DiscardUnknown() { + xxx_messageInfo_AdmissionRule.DiscardUnknown(m) +} + +var xxx_messageInfo_AdmissionRule proto.InternalMessageInfo + +func (m *AdmissionRule) GetEvaluationMode() AdmissionRule_EvaluationMode { + if m != nil { + return m.EvaluationMode + } + return AdmissionRule_EVALUATION_MODE_UNSPECIFIED +} + +func (m *AdmissionRule) GetRequireAttestationsBy() []string { + if m != nil { + return m.RequireAttestationsBy + } + return nil +} + +func (m *AdmissionRule) GetEnforcementMode() AdmissionRule_EnforcementMode { + if m != nil { + return m.EnforcementMode + } + return AdmissionRule_ENFORCEMENT_MODE_UNSPECIFIED +} + +// An [attestor][google.cloud.binaryauthorization.v1beta1.Attestor] that attests to container image +// artifacts. An existing attestor cannot be modified except where +// indicated. +type Attestor struct { + // Required. The resource name, in the format: + // `projects/*/attestors/*`. This field may not be updated. + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // Optional. A descriptive comment. This field may be updated. + // The field may be displayed in chooser dialogs. + Description string `protobuf:"bytes,6,opt,name=description,proto3" json:"description,omitempty"` + // Required. Identifies an [attestor][google.cloud.binaryauthorization.v1beta1.Attestor] that attests to a + // container image artifact. This determines how an attestation will + // be stored, and how it will be used during policy + // enforcement. Updates may not change the attestor type, but individual + // attestor fields may be updated + // + // Types that are valid to be assigned to AttestorType: + // *Attestor_UserOwnedDrydockNote + AttestorType isAttestor_AttestorType `protobuf_oneof:"attestor_type"` + // Output only. Time when the attestor was last updated. + UpdateTime *timestamp.Timestamp `protobuf:"bytes,4,opt,name=update_time,json=updateTime,proto3" json:"update_time,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Attestor) Reset() { *m = Attestor{} } +func (m *Attestor) String() string { return proto.CompactTextString(m) } +func (*Attestor) ProtoMessage() {} +func (*Attestor) Descriptor() ([]byte, []int) { + return fileDescriptor_resources_15311d1ac079a106, []int{3} +} +func (m *Attestor) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Attestor.Unmarshal(m, b) +} +func (m *Attestor) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Attestor.Marshal(b, m, deterministic) +} +func (dst *Attestor) XXX_Merge(src proto.Message) { + xxx_messageInfo_Attestor.Merge(dst, src) +} +func (m *Attestor) XXX_Size() int { + return xxx_messageInfo_Attestor.Size(m) +} +func (m *Attestor) XXX_DiscardUnknown() { + xxx_messageInfo_Attestor.DiscardUnknown(m) +} + +var xxx_messageInfo_Attestor proto.InternalMessageInfo + +func (m *Attestor) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *Attestor) GetDescription() string { + if m != nil { + return m.Description + } + return "" +} + +type isAttestor_AttestorType interface { + isAttestor_AttestorType() +} + +type Attestor_UserOwnedDrydockNote struct { + UserOwnedDrydockNote *UserOwnedDrydockNote `protobuf:"bytes,3,opt,name=user_owned_drydock_note,json=userOwnedDrydockNote,proto3,oneof"` +} + +func (*Attestor_UserOwnedDrydockNote) isAttestor_AttestorType() {} + +func (m *Attestor) GetAttestorType() isAttestor_AttestorType { + if m != nil { + return m.AttestorType + } + return nil +} + +func (m *Attestor) GetUserOwnedDrydockNote() *UserOwnedDrydockNote { + if x, ok := m.GetAttestorType().(*Attestor_UserOwnedDrydockNote); ok { + return x.UserOwnedDrydockNote + } + return nil +} + +func (m *Attestor) GetUpdateTime() *timestamp.Timestamp { + if m != nil { + return m.UpdateTime + } + return nil +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*Attestor) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _Attestor_OneofMarshaler, _Attestor_OneofUnmarshaler, _Attestor_OneofSizer, []interface{}{ + (*Attestor_UserOwnedDrydockNote)(nil), + } +} + +func _Attestor_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*Attestor) + // attestor_type + switch x := m.AttestorType.(type) { + case *Attestor_UserOwnedDrydockNote: + b.EncodeVarint(3<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.UserOwnedDrydockNote); err != nil { + return err + } + case nil: + default: + return fmt.Errorf("Attestor.AttestorType has unexpected type %T", x) + } + return nil +} + +func _Attestor_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*Attestor) + switch tag { + case 3: // attestor_type.user_owned_drydock_note + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(UserOwnedDrydockNote) + err := b.DecodeMessage(msg) + m.AttestorType = &Attestor_UserOwnedDrydockNote{msg} + return true, err + default: + return false, nil + } +} + +func _Attestor_OneofSizer(msg proto.Message) (n int) { + m := msg.(*Attestor) + // attestor_type + switch x := m.AttestorType.(type) { + case *Attestor_UserOwnedDrydockNote: + s := proto.Size(x.UserOwnedDrydockNote) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +// An [user owned drydock note][google.cloud.binaryauthorization.v1beta1.UserOwnedDrydockNote] references a Drydock +// ATTESTATION_AUTHORITY Note created by the user. +type UserOwnedDrydockNote struct { + // Required. The Drydock resource name of a ATTESTATION_AUTHORITY Note, + // created by the user, in the format: `projects/*/notes/*` (or the legacy + // `providers/*/notes/*`). This field may not be updated. + // + // An attestation by this attestor is stored as a Drydock + // ATTESTATION_AUTHORITY Occurrence that names a container image and that + // links to this Note. Drydock is an external dependency. + NoteReference string `protobuf:"bytes,1,opt,name=note_reference,json=noteReference,proto3" json:"note_reference,omitempty"` + // Optional. Public keys that verify attestations signed by this + // attestor. This field may be updated. + // + // If this field is non-empty, one of the specified public keys must + // verify that an attestation was signed by this attestor for the + // image specified in the admission request. + // + // If this field is empty, this attestor always returns that no + // valid attestations exist. + PublicKeys []*AttestorPublicKey `protobuf:"bytes,2,rep,name=public_keys,json=publicKeys,proto3" json:"public_keys,omitempty"` + // Output only. This field will contain the service account email address + // that this Attestor will use as the principal when querying Container + // Analysis. Attestor administrators must grant this service account the + // IAM role needed to read attestations from the [note_reference][Note] in + // Container Analysis (`containeranalysis.notes.occurrences.viewer`). + // + // This email address is fixed for the lifetime of the Attestor, but callers + // should not make any other assumptions about the service account email; + // future versions may use an email based on a different naming pattern. + DelegationServiceAccountEmail string `protobuf:"bytes,3,opt,name=delegation_service_account_email,json=delegationServiceAccountEmail,proto3" json:"delegation_service_account_email,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *UserOwnedDrydockNote) Reset() { *m = UserOwnedDrydockNote{} } +func (m *UserOwnedDrydockNote) String() string { return proto.CompactTextString(m) } +func (*UserOwnedDrydockNote) ProtoMessage() {} +func (*UserOwnedDrydockNote) Descriptor() ([]byte, []int) { + return fileDescriptor_resources_15311d1ac079a106, []int{4} +} +func (m *UserOwnedDrydockNote) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_UserOwnedDrydockNote.Unmarshal(m, b) +} +func (m *UserOwnedDrydockNote) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_UserOwnedDrydockNote.Marshal(b, m, deterministic) +} +func (dst *UserOwnedDrydockNote) XXX_Merge(src proto.Message) { + xxx_messageInfo_UserOwnedDrydockNote.Merge(dst, src) +} +func (m *UserOwnedDrydockNote) XXX_Size() int { + return xxx_messageInfo_UserOwnedDrydockNote.Size(m) +} +func (m *UserOwnedDrydockNote) XXX_DiscardUnknown() { + xxx_messageInfo_UserOwnedDrydockNote.DiscardUnknown(m) +} + +var xxx_messageInfo_UserOwnedDrydockNote proto.InternalMessageInfo + +func (m *UserOwnedDrydockNote) GetNoteReference() string { + if m != nil { + return m.NoteReference + } + return "" +} + +func (m *UserOwnedDrydockNote) GetPublicKeys() []*AttestorPublicKey { + if m != nil { + return m.PublicKeys + } + return nil +} + +func (m *UserOwnedDrydockNote) GetDelegationServiceAccountEmail() string { + if m != nil { + return m.DelegationServiceAccountEmail + } + return "" +} + +// A public key in the PkixPublicKey format (see +// https://tools.ietf.org/html/rfc5280#section-4.1.2.7 for details). +// Public keys of this type are typically textually encoded using the PEM +// format. +type PkixPublicKey struct { + // A PEM-encoded public key, as described in + // https://tools.ietf.org/html/rfc7468#section-13 + PublicKeyPem string `protobuf:"bytes,1,opt,name=public_key_pem,json=publicKeyPem,proto3" json:"public_key_pem,omitempty"` + // The signature algorithm used to verify a message against a signature using + // this key. + // These signature algorithm must match the structure and any object + // identifiers encoded in `public_key_pem` (i.e. this algorithm must match + // that of the public key). + SignatureAlgorithm PkixPublicKey_SignatureAlgorithm `protobuf:"varint,2,opt,name=signature_algorithm,json=signatureAlgorithm,proto3,enum=google.cloud.binaryauthorization.v1beta1.PkixPublicKey_SignatureAlgorithm" json:"signature_algorithm,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *PkixPublicKey) Reset() { *m = PkixPublicKey{} } +func (m *PkixPublicKey) String() string { return proto.CompactTextString(m) } +func (*PkixPublicKey) ProtoMessage() {} +func (*PkixPublicKey) Descriptor() ([]byte, []int) { + return fileDescriptor_resources_15311d1ac079a106, []int{5} +} +func (m *PkixPublicKey) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_PkixPublicKey.Unmarshal(m, b) +} +func (m *PkixPublicKey) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_PkixPublicKey.Marshal(b, m, deterministic) +} +func (dst *PkixPublicKey) XXX_Merge(src proto.Message) { + xxx_messageInfo_PkixPublicKey.Merge(dst, src) +} +func (m *PkixPublicKey) XXX_Size() int { + return xxx_messageInfo_PkixPublicKey.Size(m) +} +func (m *PkixPublicKey) XXX_DiscardUnknown() { + xxx_messageInfo_PkixPublicKey.DiscardUnknown(m) +} + +var xxx_messageInfo_PkixPublicKey proto.InternalMessageInfo + +func (m *PkixPublicKey) GetPublicKeyPem() string { + if m != nil { + return m.PublicKeyPem + } + return "" +} + +func (m *PkixPublicKey) GetSignatureAlgorithm() PkixPublicKey_SignatureAlgorithm { + if m != nil { + return m.SignatureAlgorithm + } + return PkixPublicKey_SIGNATURE_ALGORITHM_UNSPECIFIED +} + +// An [attestor public key][google.cloud.binaryauthorization.v1beta1.AttestorPublicKey] that will be used to verify +// attestations signed by this attestor. +type AttestorPublicKey struct { + // Optional. A descriptive comment. This field may be updated. + Comment string `protobuf:"bytes,1,opt,name=comment,proto3" json:"comment,omitempty"` + // The ID of this public key. + // Signatures verified by BinAuthz must include the ID of the public key that + // can be used to verify them, and that ID must match the contents of this + // field exactly. + // Additional restrictions on this field can be imposed based on which public + // key type is encapsulated. See the documentation on `public_key` cases below + // for details. + Id string `protobuf:"bytes,2,opt,name=id,proto3" json:"id,omitempty"` + // Required. A public key reference or serialized instance. This field may be + // updated. + // + // Types that are valid to be assigned to PublicKey: + // *AttestorPublicKey_AsciiArmoredPgpPublicKey + // *AttestorPublicKey_PkixPublicKey + PublicKey isAttestorPublicKey_PublicKey `protobuf_oneof:"public_key"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *AttestorPublicKey) Reset() { *m = AttestorPublicKey{} } +func (m *AttestorPublicKey) String() string { return proto.CompactTextString(m) } +func (*AttestorPublicKey) ProtoMessage() {} +func (*AttestorPublicKey) Descriptor() ([]byte, []int) { + return fileDescriptor_resources_15311d1ac079a106, []int{6} +} +func (m *AttestorPublicKey) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_AttestorPublicKey.Unmarshal(m, b) +} +func (m *AttestorPublicKey) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_AttestorPublicKey.Marshal(b, m, deterministic) +} +func (dst *AttestorPublicKey) XXX_Merge(src proto.Message) { + xxx_messageInfo_AttestorPublicKey.Merge(dst, src) +} +func (m *AttestorPublicKey) XXX_Size() int { + return xxx_messageInfo_AttestorPublicKey.Size(m) +} +func (m *AttestorPublicKey) XXX_DiscardUnknown() { + xxx_messageInfo_AttestorPublicKey.DiscardUnknown(m) +} + +var xxx_messageInfo_AttestorPublicKey proto.InternalMessageInfo + +func (m *AttestorPublicKey) GetComment() string { + if m != nil { + return m.Comment + } + return "" +} + +func (m *AttestorPublicKey) GetId() string { + if m != nil { + return m.Id + } + return "" +} + +type isAttestorPublicKey_PublicKey interface { + isAttestorPublicKey_PublicKey() +} + +type AttestorPublicKey_AsciiArmoredPgpPublicKey struct { + AsciiArmoredPgpPublicKey string `protobuf:"bytes,3,opt,name=ascii_armored_pgp_public_key,json=asciiArmoredPgpPublicKey,proto3,oneof"` +} + +type AttestorPublicKey_PkixPublicKey struct { + PkixPublicKey *PkixPublicKey `protobuf:"bytes,5,opt,name=pkix_public_key,json=pkixPublicKey,proto3,oneof"` +} + +func (*AttestorPublicKey_AsciiArmoredPgpPublicKey) isAttestorPublicKey_PublicKey() {} + +func (*AttestorPublicKey_PkixPublicKey) isAttestorPublicKey_PublicKey() {} + +func (m *AttestorPublicKey) GetPublicKey() isAttestorPublicKey_PublicKey { + if m != nil { + return m.PublicKey + } + return nil +} + +func (m *AttestorPublicKey) GetAsciiArmoredPgpPublicKey() string { + if x, ok := m.GetPublicKey().(*AttestorPublicKey_AsciiArmoredPgpPublicKey); ok { + return x.AsciiArmoredPgpPublicKey + } + return "" +} + +func (m *AttestorPublicKey) GetPkixPublicKey() *PkixPublicKey { + if x, ok := m.GetPublicKey().(*AttestorPublicKey_PkixPublicKey); ok { + return x.PkixPublicKey + } + return nil +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*AttestorPublicKey) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _AttestorPublicKey_OneofMarshaler, _AttestorPublicKey_OneofUnmarshaler, _AttestorPublicKey_OneofSizer, []interface{}{ + (*AttestorPublicKey_AsciiArmoredPgpPublicKey)(nil), + (*AttestorPublicKey_PkixPublicKey)(nil), + } +} + +func _AttestorPublicKey_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*AttestorPublicKey) + // public_key + switch x := m.PublicKey.(type) { + case *AttestorPublicKey_AsciiArmoredPgpPublicKey: + b.EncodeVarint(3<<3 | proto.WireBytes) + b.EncodeStringBytes(x.AsciiArmoredPgpPublicKey) + case *AttestorPublicKey_PkixPublicKey: + b.EncodeVarint(5<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.PkixPublicKey); err != nil { + return err + } + case nil: + default: + return fmt.Errorf("AttestorPublicKey.PublicKey has unexpected type %T", x) + } + return nil +} + +func _AttestorPublicKey_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*AttestorPublicKey) + switch tag { + case 3: // public_key.ascii_armored_pgp_public_key + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeStringBytes() + m.PublicKey = &AttestorPublicKey_AsciiArmoredPgpPublicKey{x} + return true, err + case 5: // public_key.pkix_public_key + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(PkixPublicKey) + err := b.DecodeMessage(msg) + m.PublicKey = &AttestorPublicKey_PkixPublicKey{msg} + return true, err + default: + return false, nil + } +} + +func _AttestorPublicKey_OneofSizer(msg proto.Message) (n int) { + m := msg.(*AttestorPublicKey) + // public_key + switch x := m.PublicKey.(type) { + case *AttestorPublicKey_AsciiArmoredPgpPublicKey: + n += 1 // tag and wire + n += proto.SizeVarint(uint64(len(x.AsciiArmoredPgpPublicKey))) + n += len(x.AsciiArmoredPgpPublicKey) + case *AttestorPublicKey_PkixPublicKey: + s := proto.Size(x.PkixPublicKey) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +func init() { + proto.RegisterType((*Policy)(nil), "google.cloud.binaryauthorization.v1beta1.Policy") + proto.RegisterMapType((map[string]*AdmissionRule)(nil), "google.cloud.binaryauthorization.v1beta1.Policy.ClusterAdmissionRulesEntry") + proto.RegisterType((*AdmissionWhitelistPattern)(nil), "google.cloud.binaryauthorization.v1beta1.AdmissionWhitelistPattern") + proto.RegisterType((*AdmissionRule)(nil), "google.cloud.binaryauthorization.v1beta1.AdmissionRule") + proto.RegisterType((*Attestor)(nil), "google.cloud.binaryauthorization.v1beta1.Attestor") + proto.RegisterType((*UserOwnedDrydockNote)(nil), "google.cloud.binaryauthorization.v1beta1.UserOwnedDrydockNote") + proto.RegisterType((*PkixPublicKey)(nil), "google.cloud.binaryauthorization.v1beta1.PkixPublicKey") + proto.RegisterType((*AttestorPublicKey)(nil), "google.cloud.binaryauthorization.v1beta1.AttestorPublicKey") + proto.RegisterEnum("google.cloud.binaryauthorization.v1beta1.Policy_GlobalPolicyEvaluationMode", Policy_GlobalPolicyEvaluationMode_name, Policy_GlobalPolicyEvaluationMode_value) + proto.RegisterEnum("google.cloud.binaryauthorization.v1beta1.AdmissionRule_EvaluationMode", AdmissionRule_EvaluationMode_name, AdmissionRule_EvaluationMode_value) + proto.RegisterEnum("google.cloud.binaryauthorization.v1beta1.AdmissionRule_EnforcementMode", AdmissionRule_EnforcementMode_name, AdmissionRule_EnforcementMode_value) + proto.RegisterEnum("google.cloud.binaryauthorization.v1beta1.PkixPublicKey_SignatureAlgorithm", PkixPublicKey_SignatureAlgorithm_name, PkixPublicKey_SignatureAlgorithm_value) +} + +func init() { + proto.RegisterFile("google/cloud/binaryauthorization/v1beta1/resources.proto", fileDescriptor_resources_15311d1ac079a106) +} + +var fileDescriptor_resources_15311d1ac079a106 = []byte{ + // 1214 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xa4, 0x56, 0x5f, 0x73, 0xdb, 0xc4, + 0x17, 0x8d, 0x9c, 0x7f, 0xcd, 0x75, 0xe2, 0xb8, 0xdb, 0xe6, 0x17, 0xd7, 0xbf, 0x94, 0x1a, 0x03, + 0x33, 0xe1, 0x01, 0xbb, 0x71, 0x92, 0x36, 0xd0, 0x99, 0x0e, 0xb2, 0xad, 0x3a, 0x26, 0x8e, 0x6d, + 0x64, 0x9b, 0x4e, 0x80, 0x99, 0x65, 0x2d, 0x6d, 0x54, 0x4d, 0x24, 0xad, 0x58, 0x49, 0x6d, 0x0d, + 0x4f, 0x0c, 0xc3, 0xf0, 0x04, 0xaf, 0x7c, 0x3f, 0x86, 0x0f, 0xc0, 0x0c, 0x2f, 0x3c, 0x32, 0xab, + 0x3f, 0xb1, 0xe3, 0x3a, 0x25, 0x09, 0x6f, 0xde, 0x7b, 0xee, 0x9e, 0x7b, 0xf6, 0xee, 0xb9, 0x2b, + 0xc3, 0x81, 0xc1, 0x98, 0x61, 0xd1, 0xb2, 0x66, 0xb1, 0x40, 0x2f, 0x0f, 0x4d, 0x87, 0xf0, 0x11, + 0x09, 0xfc, 0x17, 0x8c, 0x9b, 0xdf, 0x11, 0xdf, 0x64, 0x4e, 0xf9, 0xe5, 0xce, 0x90, 0xfa, 0x64, + 0xa7, 0xcc, 0xa9, 0xc7, 0x02, 0xae, 0x51, 0xaf, 0xe4, 0x72, 0xe6, 0x33, 0xb4, 0x1d, 0xed, 0x2c, + 0x85, 0x3b, 0x4b, 0x33, 0x76, 0x96, 0xe2, 0x9d, 0xf9, 0xad, 0xb8, 0x06, 0x71, 0xcd, 0x32, 0x71, + 0x1c, 0xe6, 0x87, 0x70, 0xcc, 0x93, 0x7f, 0x10, 0xa3, 0xe1, 0x6a, 0x18, 0x9c, 0x96, 0x7d, 0xd3, + 0xa6, 0x9e, 0x4f, 0x6c, 0x37, 0x4a, 0x28, 0xfe, 0xb5, 0x04, 0x4b, 0x5d, 0x66, 0x99, 0xda, 0x08, + 0x21, 0x58, 0x70, 0x88, 0x4d, 0x73, 0x52, 0x41, 0xda, 0x5e, 0x51, 0xc3, 0xdf, 0xa8, 0x00, 0x69, + 0x9d, 0x7a, 0x1a, 0x37, 0x5d, 0xc1, 0x9a, 0x5b, 0x0a, 0xa1, 0xc9, 0x10, 0xfa, 0x55, 0x82, 0xfb, + 0x86, 0xc5, 0x86, 0xc4, 0xc2, 0x6e, 0xc8, 0x83, 0xe9, 0x4b, 0x62, 0x05, 0xa1, 0x0a, 0x6c, 0x33, + 0x9d, 0xe6, 0x96, 0x0b, 0xd2, 0x76, 0xa6, 0x72, 0x54, 0xba, 0xea, 0x91, 0x4a, 0x91, 0x9e, 0x52, + 0x23, 0x64, 0x8d, 0x16, 0xca, 0x39, 0xe7, 0x31, 0xd3, 0xa9, 0x9a, 0x37, 0x2e, 0xc5, 0xd0, 0x4f, + 0x12, 0x6c, 0x11, 0xdd, 0x36, 0x3d, 0x4f, 0x28, 0x78, 0xf5, 0xc2, 0xf4, 0xa9, 0x65, 0x7a, 0x3e, + 0x76, 0x89, 0xef, 0x53, 0xee, 0x78, 0xb9, 0x54, 0x61, 0x7e, 0x3b, 0x5d, 0xa9, 0x5d, 0x5d, 0x8f, + 0x9c, 0xb0, 0x3d, 0x4f, 0xc8, 0xba, 0x11, 0x97, 0x9a, 0x27, 0x97, 0x41, 0x1e, 0xfa, 0x51, 0x82, + 0x4d, 0xcd, 0x0a, 0x3c, 0x9f, 0x72, 0x3c, 0xd6, 0xc3, 0x03, 0x8b, 0x7a, 0xb9, 0xf9, 0x50, 0xc2, + 0xf5, 0x5b, 0x52, 0x8b, 0xf8, 0xce, 0x05, 0xa9, 0x82, 0x4d, 0x71, 0x7c, 0x3e, 0x52, 0x37, 0xb4, + 0x59, 0x18, 0xb2, 0xe1, 0x7f, 0x3a, 0x3d, 0x25, 0x81, 0xe5, 0x4f, 0x89, 0xc8, 0x2d, 0x14, 0xa4, + 0xed, 0x74, 0xe5, 0xf1, 0x0d, 0xda, 0x20, 0x98, 0xd5, 0xbb, 0x31, 0xed, 0x85, 0x28, 0x7a, 0x02, + 0xe9, 0xc0, 0xd5, 0x89, 0x4f, 0xb1, 0x30, 0x5a, 0x6e, 0x31, 0xac, 0x91, 0x4f, 0x6a, 0x24, 0x2e, + 0x2c, 0xf5, 0x13, 0x17, 0xaa, 0x10, 0xa5, 0x8b, 0x40, 0xfe, 0x07, 0x09, 0xf2, 0x97, 0x9f, 0x10, + 0x65, 0x61, 0xfe, 0x8c, 0x8e, 0x62, 0x7b, 0x8a, 0x9f, 0xe8, 0x18, 0x16, 0xc5, 0xdd, 0xd3, 0x5c, + 0xea, 0xbf, 0x9d, 0x25, 0x62, 0xf9, 0x24, 0x75, 0x20, 0x15, 0x75, 0xc8, 0x5f, 0xee, 0x3b, 0xf4, + 0x11, 0x7c, 0xd8, 0x68, 0x75, 0xaa, 0x72, 0x0b, 0x77, 0x3b, 0xad, 0x66, 0xed, 0x04, 0x2b, 0x5f, + 0xc8, 0xad, 0x81, 0xdc, 0x6f, 0x76, 0xda, 0xf8, 0xb8, 0x53, 0x57, 0xf0, 0xa0, 0xdd, 0xeb, 0x2a, + 0xb5, 0xe6, 0xb3, 0xa6, 0x52, 0xcf, 0xce, 0x21, 0x80, 0x25, 0xa5, 0x2d, 0x57, 0x5b, 0x4a, 0x56, + 0x42, 0x69, 0x58, 0xae, 0x37, 0x7b, 0xe1, 0x22, 0x55, 0x7c, 0x0a, 0xf7, 0x2e, 0x35, 0x15, 0x7a, + 0x17, 0x56, 0xc5, 0xec, 0x25, 0x86, 0x8d, 0x0f, 0x9c, 0x16, 0xb1, 0x38, 0xa5, 0xf8, 0xcb, 0x02, + 0xac, 0x5d, 0x6c, 0x3c, 0x83, 0xf5, 0xe9, 0xb9, 0x93, 0xc2, 0xb9, 0x7b, 0x76, 0xc3, 0xa6, 0x94, + 0xa6, 0x46, 0x2e, 0x43, 0x2f, 0xb6, 0xe2, 0x11, 0x6c, 0x72, 0xfa, 0x6d, 0x60, 0x72, 0x8a, 0x85, + 0x28, 0x2f, 0x7e, 0x77, 0xf0, 0x70, 0x14, 0x0e, 0xd8, 0x8a, 0xba, 0x11, 0xc3, 0xf2, 0x04, 0x5a, + 0x1d, 0x21, 0x0e, 0x59, 0xea, 0x9c, 0x32, 0xae, 0x51, 0x9b, 0x3a, 0x7e, 0xa4, 0x74, 0x3e, 0x54, + 0xda, 0xb8, 0xb1, 0xd2, 0x31, 0x5f, 0x28, 0x75, 0x9d, 0x5e, 0x0c, 0x14, 0x6d, 0xc8, 0x4c, 0x5d, + 0xe4, 0x03, 0xf8, 0xff, 0xdb, 0xaf, 0x2e, 0x0b, 0xab, 0x72, 0xeb, 0xb9, 0x7c, 0xd2, 0xc3, 0x72, + 0xab, 0xd5, 0x79, 0x9e, 0x95, 0xd0, 0x26, 0xdc, 0x51, 0x95, 0xcf, 0x07, 0x4d, 0x55, 0xc1, 0x72, + 0xbf, 0xaf, 0xf4, 0xfa, 0xe1, 0xde, 0x6c, 0x0a, 0xad, 0x43, 0x3a, 0x4e, 0xad, 0x2b, 0xed, 0x93, + 0xec, 0x7c, 0xd1, 0x85, 0xf5, 0x29, 0x49, 0xa8, 0x00, 0x5b, 0x4a, 0xfb, 0x59, 0x47, 0xad, 0x29, + 0xc7, 0x4a, 0xbb, 0x3f, 0xab, 0xe0, 0x38, 0xa3, 0x8e, 0xab, 0xad, 0x4e, 0xed, 0x08, 0xcb, 0xed, + 0x3a, 0x96, 0x07, 0xf5, 0x66, 0x1f, 0xb7, 0x3a, 0x8d, 0xac, 0x84, 0xee, 0xc1, 0x46, 0x5d, 0x3d, + 0x51, 0x07, 0xed, 0x71, 0x14, 0x77, 0xda, 0xad, 0x93, 0x6c, 0xaa, 0xf8, 0x73, 0x0a, 0x6e, 0x45, + 0x7d, 0x66, 0xfc, 0x86, 0xef, 0xf8, 0x2b, 0xd8, 0x0c, 0x3c, 0xca, 0x31, 0x7b, 0xe5, 0x50, 0x1d, + 0xeb, 0x7c, 0xa4, 0x33, 0xed, 0x0c, 0x3b, 0xcc, 0x8f, 0xae, 0x27, 0x5d, 0x79, 0x7a, 0xf5, 0xeb, + 0x19, 0x78, 0x94, 0x77, 0x04, 0x4f, 0x3d, 0xa2, 0x69, 0x33, 0x9f, 0x1e, 0xce, 0xa9, 0x77, 0x83, + 0x19, 0xf1, 0xe9, 0x27, 0x63, 0xe1, 0x3a, 0x4f, 0x46, 0x75, 0x1d, 0xd6, 0x48, 0x7c, 0x6e, 0xec, + 0x8f, 0x5c, 0x5a, 0xfc, 0x43, 0x82, 0xbb, 0xb3, 0xca, 0xa3, 0x0f, 0x20, 0x23, 0x0e, 0x83, 0x39, + 0x3d, 0xa5, 0x9c, 0x3a, 0x5a, 0xd2, 0x9f, 0x35, 0x11, 0x55, 0x93, 0x20, 0xfa, 0x1a, 0xd2, 0x6e, + 0x30, 0xb4, 0x4c, 0x0d, 0x9f, 0xd1, 0x51, 0xf2, 0xad, 0x78, 0x72, 0x0d, 0x67, 0xc6, 0x6a, 0xba, + 0x21, 0xc9, 0x11, 0x1d, 0xa9, 0xe0, 0x26, 0x3f, 0x3d, 0xd4, 0x80, 0x82, 0x4e, 0x2d, 0x6a, 0x44, + 0x53, 0xea, 0x51, 0xfe, 0xd2, 0xd4, 0x28, 0x26, 0x9a, 0xc6, 0x02, 0xc7, 0xc7, 0xd4, 0x26, 0xa6, + 0x15, 0x76, 0x7b, 0x45, 0xbd, 0x3f, 0xce, 0xeb, 0x45, 0x69, 0x72, 0x94, 0xa5, 0x88, 0xa4, 0xe2, + 0x6f, 0x0b, 0xb0, 0xd6, 0x3d, 0x33, 0x5f, 0x9f, 0x97, 0x41, 0xef, 0x43, 0x66, 0x2c, 0x1c, 0xbb, + 0xd4, 0x8e, 0xcf, 0xb7, 0x7a, 0x5e, 0xbe, 0x4b, 0x6d, 0xf4, 0x3d, 0xdc, 0xf1, 0x4c, 0xc3, 0x21, + 0x7e, 0x20, 0xe6, 0xd6, 0x32, 0x18, 0x37, 0xfd, 0x17, 0x76, 0xf8, 0x7e, 0x66, 0x2a, 0x9f, 0x5d, + 0xe3, 0x7b, 0x34, 0x59, 0xbb, 0xd4, 0x4b, 0x28, 0xe5, 0x84, 0x51, 0x45, 0xde, 0x1b, 0xb1, 0xe2, + 0xef, 0x29, 0x40, 0x6f, 0xa6, 0xa2, 0xf7, 0xe0, 0x41, 0xaf, 0xd9, 0x68, 0xcb, 0xfd, 0x81, 0x18, + 0xad, 0x56, 0xa3, 0xa3, 0x36, 0xfb, 0x87, 0xc7, 0x53, 0xe3, 0x21, 0xa6, 0xaf, 0x27, 0xe3, 0x6e, + 0xaf, 0x87, 0x2b, 0x0f, 0xf7, 0x0e, 0x70, 0xef, 0x50, 0xae, 0xec, 0x3f, 0x8a, 0xc7, 0x32, 0x06, + 0x76, 0x1f, 0x3e, 0xae, 0x24, 0x40, 0x6a, 0x12, 0xd8, 0x7b, 0xf8, 0xf1, 0xa3, 0x04, 0x98, 0x9f, + 0x05, 0xec, 0xef, 0x54, 0xb2, 0x0b, 0xe8, 0x1d, 0xc8, 0x0b, 0x40, 0x88, 0xc1, 0xdd, 0xa3, 0x5a, + 0x6f, 0xe7, 0x42, 0xa9, 0xc5, 0x19, 0xf8, 0x64, 0xc5, 0xa5, 0x19, 0xf8, 0x64, 0xe1, 0xe5, 0xb7, + 0xe0, 0xa2, 0xfe, 0x2d, 0xb4, 0x01, 0xb7, 0x95, 0x5a, 0x5d, 0x48, 0xab, 0xec, 0x9f, 0x6f, 0x5b, + 0x99, 0x08, 0xef, 0x1e, 0xec, 0x89, 0xf0, 0xee, 0xc1, 0x5e, 0x16, 0x26, 0xc2, 0xfb, 0x95, 0x9d, + 0x84, 0x24, 0x5d, 0xfc, 0x53, 0x82, 0xdb, 0x6f, 0x98, 0x10, 0xe5, 0x60, 0x59, 0x63, 0xb6, 0x78, + 0x8e, 0x62, 0x5b, 0x24, 0x4b, 0x94, 0x81, 0x94, 0xa9, 0x87, 0x06, 0x58, 0x51, 0x53, 0xa6, 0x8e, + 0x3e, 0x85, 0x2d, 0xe2, 0x69, 0xa6, 0x89, 0x09, 0xb7, 0x19, 0xa7, 0x3a, 0x76, 0x0d, 0x17, 0x8f, + 0x9d, 0x15, 0xd9, 0xf3, 0x70, 0x4e, 0xcd, 0x85, 0x59, 0x72, 0x94, 0xd4, 0x35, 0xdc, 0x71, 0x2d, + 0x02, 0xeb, 0xee, 0x99, 0xf9, 0x7a, 0x72, 0xd3, 0xe2, 0x75, 0xbf, 0xcf, 0x17, 0xfc, 0x75, 0x38, + 0xa7, 0xae, 0xb9, 0x93, 0x81, 0xea, 0x2a, 0xc0, 0x98, 0xbd, 0xfa, 0xcd, 0x97, 0x5f, 0xc5, 0xc4, + 0x06, 0xb3, 0x88, 0x63, 0x94, 0x18, 0x37, 0xca, 0x06, 0x75, 0xc2, 0xb7, 0xa3, 0x1c, 0x41, 0xc4, + 0x35, 0xbd, 0x7f, 0xff, 0x1f, 0xfe, 0x64, 0x06, 0xf6, 0xb7, 0x24, 0x0d, 0x97, 0x42, 0xaa, 0xdd, + 0x7f, 0x02, 0x00, 0x00, 0xff, 0xff, 0xf0, 0x82, 0x0c, 0x36, 0xd1, 0x0b, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/cloud/binaryauthorization/v1beta1/service.pb.go b/vendor/google.golang.org/genproto/googleapis/cloud/binaryauthorization/v1beta1/service.pb.go new file mode 100644 index 0000000..cc64b2e --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/cloud/binaryauthorization/v1beta1/service.pb.go @@ -0,0 +1,777 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/cloud/binaryauthorization/v1beta1/service.proto + +package binaryauthorization // import "google.golang.org/genproto/googleapis/cloud/binaryauthorization/v1beta1" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import empty "github.com/golang/protobuf/ptypes/empty" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +import ( + context "golang.org/x/net/context" + grpc "google.golang.org/grpc" +) + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Request message for [BinauthzManagementService.GetPolicy][]. +type GetPolicyRequest struct { + // Required. The resource name of the [policy][google.cloud.binaryauthorization.v1beta1.Policy] to retrieve, + // in the format `projects/*/policy`. + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GetPolicyRequest) Reset() { *m = GetPolicyRequest{} } +func (m *GetPolicyRequest) String() string { return proto.CompactTextString(m) } +func (*GetPolicyRequest) ProtoMessage() {} +func (*GetPolicyRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_service_416c3c6c0a0ebbdc, []int{0} +} +func (m *GetPolicyRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GetPolicyRequest.Unmarshal(m, b) +} +func (m *GetPolicyRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GetPolicyRequest.Marshal(b, m, deterministic) +} +func (dst *GetPolicyRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_GetPolicyRequest.Merge(dst, src) +} +func (m *GetPolicyRequest) XXX_Size() int { + return xxx_messageInfo_GetPolicyRequest.Size(m) +} +func (m *GetPolicyRequest) XXX_DiscardUnknown() { + xxx_messageInfo_GetPolicyRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_GetPolicyRequest proto.InternalMessageInfo + +func (m *GetPolicyRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +// Request message for [BinauthzManagementService.UpdatePolicy][]. +type UpdatePolicyRequest struct { + // Required. A new or updated [policy][google.cloud.binaryauthorization.v1beta1.Policy] value. The service will + // overwrite the [policy name][google.cloud.binaryauthorization.v1beta1.Policy.name] field with the resource name in + // the request URL, in the format `projects/*/policy`. + Policy *Policy `protobuf:"bytes,1,opt,name=policy,proto3" json:"policy,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *UpdatePolicyRequest) Reset() { *m = UpdatePolicyRequest{} } +func (m *UpdatePolicyRequest) String() string { return proto.CompactTextString(m) } +func (*UpdatePolicyRequest) ProtoMessage() {} +func (*UpdatePolicyRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_service_416c3c6c0a0ebbdc, []int{1} +} +func (m *UpdatePolicyRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_UpdatePolicyRequest.Unmarshal(m, b) +} +func (m *UpdatePolicyRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_UpdatePolicyRequest.Marshal(b, m, deterministic) +} +func (dst *UpdatePolicyRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_UpdatePolicyRequest.Merge(dst, src) +} +func (m *UpdatePolicyRequest) XXX_Size() int { + return xxx_messageInfo_UpdatePolicyRequest.Size(m) +} +func (m *UpdatePolicyRequest) XXX_DiscardUnknown() { + xxx_messageInfo_UpdatePolicyRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_UpdatePolicyRequest proto.InternalMessageInfo + +func (m *UpdatePolicyRequest) GetPolicy() *Policy { + if m != nil { + return m.Policy + } + return nil +} + +// Request message for [BinauthzManagementService.CreateAttestor][]. +type CreateAttestorRequest struct { + // Required. The parent of this [attestor][google.cloud.binaryauthorization.v1beta1.Attestor]. + Parent string `protobuf:"bytes,1,opt,name=parent,proto3" json:"parent,omitempty"` + // Required. The [attestors][google.cloud.binaryauthorization.v1beta1.Attestor] ID. + AttestorId string `protobuf:"bytes,2,opt,name=attestor_id,json=attestorId,proto3" json:"attestor_id,omitempty"` + // Required. The initial [attestor][google.cloud.binaryauthorization.v1beta1.Attestor] value. The service will + // overwrite the [attestor name][google.cloud.binaryauthorization.v1beta1.Attestor.name] field with the resource name, + // in the format `projects/*/attestors/*`. + Attestor *Attestor `protobuf:"bytes,3,opt,name=attestor,proto3" json:"attestor,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *CreateAttestorRequest) Reset() { *m = CreateAttestorRequest{} } +func (m *CreateAttestorRequest) String() string { return proto.CompactTextString(m) } +func (*CreateAttestorRequest) ProtoMessage() {} +func (*CreateAttestorRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_service_416c3c6c0a0ebbdc, []int{2} +} +func (m *CreateAttestorRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_CreateAttestorRequest.Unmarshal(m, b) +} +func (m *CreateAttestorRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_CreateAttestorRequest.Marshal(b, m, deterministic) +} +func (dst *CreateAttestorRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_CreateAttestorRequest.Merge(dst, src) +} +func (m *CreateAttestorRequest) XXX_Size() int { + return xxx_messageInfo_CreateAttestorRequest.Size(m) +} +func (m *CreateAttestorRequest) XXX_DiscardUnknown() { + xxx_messageInfo_CreateAttestorRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_CreateAttestorRequest proto.InternalMessageInfo + +func (m *CreateAttestorRequest) GetParent() string { + if m != nil { + return m.Parent + } + return "" +} + +func (m *CreateAttestorRequest) GetAttestorId() string { + if m != nil { + return m.AttestorId + } + return "" +} + +func (m *CreateAttestorRequest) GetAttestor() *Attestor { + if m != nil { + return m.Attestor + } + return nil +} + +// Request message for [BinauthzManagementService.GetAttestor][]. +type GetAttestorRequest struct { + // Required. The name of the [attestor][google.cloud.binaryauthorization.v1beta1.Attestor] to retrieve, in the format + // `projects/*/attestors/*`. + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GetAttestorRequest) Reset() { *m = GetAttestorRequest{} } +func (m *GetAttestorRequest) String() string { return proto.CompactTextString(m) } +func (*GetAttestorRequest) ProtoMessage() {} +func (*GetAttestorRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_service_416c3c6c0a0ebbdc, []int{3} +} +func (m *GetAttestorRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GetAttestorRequest.Unmarshal(m, b) +} +func (m *GetAttestorRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GetAttestorRequest.Marshal(b, m, deterministic) +} +func (dst *GetAttestorRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_GetAttestorRequest.Merge(dst, src) +} +func (m *GetAttestorRequest) XXX_Size() int { + return xxx_messageInfo_GetAttestorRequest.Size(m) +} +func (m *GetAttestorRequest) XXX_DiscardUnknown() { + xxx_messageInfo_GetAttestorRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_GetAttestorRequest proto.InternalMessageInfo + +func (m *GetAttestorRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +// Request message for [BinauthzManagementService.UpdateAttestor][]. +type UpdateAttestorRequest struct { + // Required. The updated [attestor][google.cloud.binaryauthorization.v1beta1.Attestor] value. The service will + // overwrite the [attestor name][google.cloud.binaryauthorization.v1beta1.Attestor.name] field with the resource name + // in the request URL, in the format `projects/*/attestors/*`. + Attestor *Attestor `protobuf:"bytes,1,opt,name=attestor,proto3" json:"attestor,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *UpdateAttestorRequest) Reset() { *m = UpdateAttestorRequest{} } +func (m *UpdateAttestorRequest) String() string { return proto.CompactTextString(m) } +func (*UpdateAttestorRequest) ProtoMessage() {} +func (*UpdateAttestorRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_service_416c3c6c0a0ebbdc, []int{4} +} +func (m *UpdateAttestorRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_UpdateAttestorRequest.Unmarshal(m, b) +} +func (m *UpdateAttestorRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_UpdateAttestorRequest.Marshal(b, m, deterministic) +} +func (dst *UpdateAttestorRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_UpdateAttestorRequest.Merge(dst, src) +} +func (m *UpdateAttestorRequest) XXX_Size() int { + return xxx_messageInfo_UpdateAttestorRequest.Size(m) +} +func (m *UpdateAttestorRequest) XXX_DiscardUnknown() { + xxx_messageInfo_UpdateAttestorRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_UpdateAttestorRequest proto.InternalMessageInfo + +func (m *UpdateAttestorRequest) GetAttestor() *Attestor { + if m != nil { + return m.Attestor + } + return nil +} + +// Request message for [BinauthzManagementService.ListAttestors][]. +type ListAttestorsRequest struct { + // Required. The resource name of the project associated with the + // [attestors][google.cloud.binaryauthorization.v1beta1.Attestor], in the format `projects/*`. + Parent string `protobuf:"bytes,1,opt,name=parent,proto3" json:"parent,omitempty"` + // Requested page size. The server may return fewer results than requested. If + // unspecified, the server will pick an appropriate default. + PageSize int32 `protobuf:"varint,2,opt,name=page_size,json=pageSize,proto3" json:"page_size,omitempty"` + // A token identifying a page of results the server should return. Typically, + // this is the value of [ListAttestorsResponse.next_page_token][google.cloud.binaryauthorization.v1beta1.ListAttestorsResponse.next_page_token] returned + // from the previous call to the `ListAttestors` method. + PageToken string `protobuf:"bytes,3,opt,name=page_token,json=pageToken,proto3" json:"page_token,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ListAttestorsRequest) Reset() { *m = ListAttestorsRequest{} } +func (m *ListAttestorsRequest) String() string { return proto.CompactTextString(m) } +func (*ListAttestorsRequest) ProtoMessage() {} +func (*ListAttestorsRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_service_416c3c6c0a0ebbdc, []int{5} +} +func (m *ListAttestorsRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ListAttestorsRequest.Unmarshal(m, b) +} +func (m *ListAttestorsRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ListAttestorsRequest.Marshal(b, m, deterministic) +} +func (dst *ListAttestorsRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_ListAttestorsRequest.Merge(dst, src) +} +func (m *ListAttestorsRequest) XXX_Size() int { + return xxx_messageInfo_ListAttestorsRequest.Size(m) +} +func (m *ListAttestorsRequest) XXX_DiscardUnknown() { + xxx_messageInfo_ListAttestorsRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_ListAttestorsRequest proto.InternalMessageInfo + +func (m *ListAttestorsRequest) GetParent() string { + if m != nil { + return m.Parent + } + return "" +} + +func (m *ListAttestorsRequest) GetPageSize() int32 { + if m != nil { + return m.PageSize + } + return 0 +} + +func (m *ListAttestorsRequest) GetPageToken() string { + if m != nil { + return m.PageToken + } + return "" +} + +// Response message for [BinauthzManagementService.ListAttestors][]. +type ListAttestorsResponse struct { + // The list of [attestors][google.cloud.binaryauthorization.v1beta1.Attestor]. + Attestors []*Attestor `protobuf:"bytes,1,rep,name=attestors,proto3" json:"attestors,omitempty"` + // A token to retrieve the next page of results. Pass this value in the + // [ListAttestorsRequest.page_token][google.cloud.binaryauthorization.v1beta1.ListAttestorsRequest.page_token] field in the subsequent call to the + // `ListAttestors` method to retrieve the next page of results. + NextPageToken string `protobuf:"bytes,2,opt,name=next_page_token,json=nextPageToken,proto3" json:"next_page_token,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ListAttestorsResponse) Reset() { *m = ListAttestorsResponse{} } +func (m *ListAttestorsResponse) String() string { return proto.CompactTextString(m) } +func (*ListAttestorsResponse) ProtoMessage() {} +func (*ListAttestorsResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_service_416c3c6c0a0ebbdc, []int{6} +} +func (m *ListAttestorsResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ListAttestorsResponse.Unmarshal(m, b) +} +func (m *ListAttestorsResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ListAttestorsResponse.Marshal(b, m, deterministic) +} +func (dst *ListAttestorsResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_ListAttestorsResponse.Merge(dst, src) +} +func (m *ListAttestorsResponse) XXX_Size() int { + return xxx_messageInfo_ListAttestorsResponse.Size(m) +} +func (m *ListAttestorsResponse) XXX_DiscardUnknown() { + xxx_messageInfo_ListAttestorsResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_ListAttestorsResponse proto.InternalMessageInfo + +func (m *ListAttestorsResponse) GetAttestors() []*Attestor { + if m != nil { + return m.Attestors + } + return nil +} + +func (m *ListAttestorsResponse) GetNextPageToken() string { + if m != nil { + return m.NextPageToken + } + return "" +} + +// Request message for [BinauthzManagementService.DeleteAttestor][]. +type DeleteAttestorRequest struct { + // Required. The name of the [attestors][google.cloud.binaryauthorization.v1beta1.Attestor] to delete, in the format + // `projects/*/attestors/*`. + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *DeleteAttestorRequest) Reset() { *m = DeleteAttestorRequest{} } +func (m *DeleteAttestorRequest) String() string { return proto.CompactTextString(m) } +func (*DeleteAttestorRequest) ProtoMessage() {} +func (*DeleteAttestorRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_service_416c3c6c0a0ebbdc, []int{7} +} +func (m *DeleteAttestorRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_DeleteAttestorRequest.Unmarshal(m, b) +} +func (m *DeleteAttestorRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_DeleteAttestorRequest.Marshal(b, m, deterministic) +} +func (dst *DeleteAttestorRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_DeleteAttestorRequest.Merge(dst, src) +} +func (m *DeleteAttestorRequest) XXX_Size() int { + return xxx_messageInfo_DeleteAttestorRequest.Size(m) +} +func (m *DeleteAttestorRequest) XXX_DiscardUnknown() { + xxx_messageInfo_DeleteAttestorRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_DeleteAttestorRequest proto.InternalMessageInfo + +func (m *DeleteAttestorRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func init() { + proto.RegisterType((*GetPolicyRequest)(nil), "google.cloud.binaryauthorization.v1beta1.GetPolicyRequest") + proto.RegisterType((*UpdatePolicyRequest)(nil), "google.cloud.binaryauthorization.v1beta1.UpdatePolicyRequest") + proto.RegisterType((*CreateAttestorRequest)(nil), "google.cloud.binaryauthorization.v1beta1.CreateAttestorRequest") + proto.RegisterType((*GetAttestorRequest)(nil), "google.cloud.binaryauthorization.v1beta1.GetAttestorRequest") + proto.RegisterType((*UpdateAttestorRequest)(nil), "google.cloud.binaryauthorization.v1beta1.UpdateAttestorRequest") + proto.RegisterType((*ListAttestorsRequest)(nil), "google.cloud.binaryauthorization.v1beta1.ListAttestorsRequest") + proto.RegisterType((*ListAttestorsResponse)(nil), "google.cloud.binaryauthorization.v1beta1.ListAttestorsResponse") + proto.RegisterType((*DeleteAttestorRequest)(nil), "google.cloud.binaryauthorization.v1beta1.DeleteAttestorRequest") +} + +// Reference imports to suppress errors if they are not otherwise used. +var _ context.Context +var _ grpc.ClientConn + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the grpc package it is being compiled against. +const _ = grpc.SupportPackageIsVersion4 + +// BinauthzManagementServiceV1Beta1Client is the client API for BinauthzManagementServiceV1Beta1 service. +// +// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://godoc.org/google.golang.org/grpc#ClientConn.NewStream. +type BinauthzManagementServiceV1Beta1Client interface { + // Gets the [policy][google.cloud.binaryauthorization.v1beta1.Policy] for this project. Returns a default + // [policy][google.cloud.binaryauthorization.v1beta1.Policy] if the project does not have one. + GetPolicy(ctx context.Context, in *GetPolicyRequest, opts ...grpc.CallOption) (*Policy, error) + // Creates or updates a project's [policy][google.cloud.binaryauthorization.v1beta1.Policy], and returns a copy of the + // new [policy][google.cloud.binaryauthorization.v1beta1.Policy]. A policy is always updated as a whole, to avoid race + // conditions with concurrent policy enforcement (or management!) + // requests. Returns NOT_FOUND if the project does not exist, INVALID_ARGUMENT + // if the request is malformed. + UpdatePolicy(ctx context.Context, in *UpdatePolicyRequest, opts ...grpc.CallOption) (*Policy, error) + // Creates an [attestor][google.cloud.binaryauthorization.v1beta1.Attestor], and returns a copy of the new + // [attestor][google.cloud.binaryauthorization.v1beta1.Attestor]. Returns NOT_FOUND if the project does not exist, + // INVALID_ARGUMENT if the request is malformed, ALREADY_EXISTS if the + // [attestor][google.cloud.binaryauthorization.v1beta1.Attestor] already exists. + CreateAttestor(ctx context.Context, in *CreateAttestorRequest, opts ...grpc.CallOption) (*Attestor, error) + // Gets an [attestor][google.cloud.binaryauthorization.v1beta1.Attestor]. + // Returns NOT_FOUND if the [attestor][google.cloud.binaryauthorization.v1beta1.Attestor] does not exist. + GetAttestor(ctx context.Context, in *GetAttestorRequest, opts ...grpc.CallOption) (*Attestor, error) + // Updates an [attestor][google.cloud.binaryauthorization.v1beta1.Attestor]. + // Returns NOT_FOUND if the [attestor][google.cloud.binaryauthorization.v1beta1.Attestor] does not exist. + UpdateAttestor(ctx context.Context, in *UpdateAttestorRequest, opts ...grpc.CallOption) (*Attestor, error) + // Lists [attestors][google.cloud.binaryauthorization.v1beta1.Attestor]. + // Returns INVALID_ARGUMENT if the project does not exist. + ListAttestors(ctx context.Context, in *ListAttestorsRequest, opts ...grpc.CallOption) (*ListAttestorsResponse, error) + // Deletes an [attestor][google.cloud.binaryauthorization.v1beta1.Attestor]. Returns NOT_FOUND if the + // [attestor][google.cloud.binaryauthorization.v1beta1.Attestor] does not exist. + DeleteAttestor(ctx context.Context, in *DeleteAttestorRequest, opts ...grpc.CallOption) (*empty.Empty, error) +} + +type binauthzManagementServiceV1Beta1Client struct { + cc *grpc.ClientConn +} + +func NewBinauthzManagementServiceV1Beta1Client(cc *grpc.ClientConn) BinauthzManagementServiceV1Beta1Client { + return &binauthzManagementServiceV1Beta1Client{cc} +} + +func (c *binauthzManagementServiceV1Beta1Client) GetPolicy(ctx context.Context, in *GetPolicyRequest, opts ...grpc.CallOption) (*Policy, error) { + out := new(Policy) + err := c.cc.Invoke(ctx, "/google.cloud.binaryauthorization.v1beta1.BinauthzManagementServiceV1Beta1/GetPolicy", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *binauthzManagementServiceV1Beta1Client) UpdatePolicy(ctx context.Context, in *UpdatePolicyRequest, opts ...grpc.CallOption) (*Policy, error) { + out := new(Policy) + err := c.cc.Invoke(ctx, "/google.cloud.binaryauthorization.v1beta1.BinauthzManagementServiceV1Beta1/UpdatePolicy", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *binauthzManagementServiceV1Beta1Client) CreateAttestor(ctx context.Context, in *CreateAttestorRequest, opts ...grpc.CallOption) (*Attestor, error) { + out := new(Attestor) + err := c.cc.Invoke(ctx, "/google.cloud.binaryauthorization.v1beta1.BinauthzManagementServiceV1Beta1/CreateAttestor", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *binauthzManagementServiceV1Beta1Client) GetAttestor(ctx context.Context, in *GetAttestorRequest, opts ...grpc.CallOption) (*Attestor, error) { + out := new(Attestor) + err := c.cc.Invoke(ctx, "/google.cloud.binaryauthorization.v1beta1.BinauthzManagementServiceV1Beta1/GetAttestor", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *binauthzManagementServiceV1Beta1Client) UpdateAttestor(ctx context.Context, in *UpdateAttestorRequest, opts ...grpc.CallOption) (*Attestor, error) { + out := new(Attestor) + err := c.cc.Invoke(ctx, "/google.cloud.binaryauthorization.v1beta1.BinauthzManagementServiceV1Beta1/UpdateAttestor", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *binauthzManagementServiceV1Beta1Client) ListAttestors(ctx context.Context, in *ListAttestorsRequest, opts ...grpc.CallOption) (*ListAttestorsResponse, error) { + out := new(ListAttestorsResponse) + err := c.cc.Invoke(ctx, "/google.cloud.binaryauthorization.v1beta1.BinauthzManagementServiceV1Beta1/ListAttestors", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *binauthzManagementServiceV1Beta1Client) DeleteAttestor(ctx context.Context, in *DeleteAttestorRequest, opts ...grpc.CallOption) (*empty.Empty, error) { + out := new(empty.Empty) + err := c.cc.Invoke(ctx, "/google.cloud.binaryauthorization.v1beta1.BinauthzManagementServiceV1Beta1/DeleteAttestor", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +// BinauthzManagementServiceV1Beta1Server is the server API for BinauthzManagementServiceV1Beta1 service. +type BinauthzManagementServiceV1Beta1Server interface { + // Gets the [policy][google.cloud.binaryauthorization.v1beta1.Policy] for this project. Returns a default + // [policy][google.cloud.binaryauthorization.v1beta1.Policy] if the project does not have one. + GetPolicy(context.Context, *GetPolicyRequest) (*Policy, error) + // Creates or updates a project's [policy][google.cloud.binaryauthorization.v1beta1.Policy], and returns a copy of the + // new [policy][google.cloud.binaryauthorization.v1beta1.Policy]. A policy is always updated as a whole, to avoid race + // conditions with concurrent policy enforcement (or management!) + // requests. Returns NOT_FOUND if the project does not exist, INVALID_ARGUMENT + // if the request is malformed. + UpdatePolicy(context.Context, *UpdatePolicyRequest) (*Policy, error) + // Creates an [attestor][google.cloud.binaryauthorization.v1beta1.Attestor], and returns a copy of the new + // [attestor][google.cloud.binaryauthorization.v1beta1.Attestor]. Returns NOT_FOUND if the project does not exist, + // INVALID_ARGUMENT if the request is malformed, ALREADY_EXISTS if the + // [attestor][google.cloud.binaryauthorization.v1beta1.Attestor] already exists. + CreateAttestor(context.Context, *CreateAttestorRequest) (*Attestor, error) + // Gets an [attestor][google.cloud.binaryauthorization.v1beta1.Attestor]. + // Returns NOT_FOUND if the [attestor][google.cloud.binaryauthorization.v1beta1.Attestor] does not exist. + GetAttestor(context.Context, *GetAttestorRequest) (*Attestor, error) + // Updates an [attestor][google.cloud.binaryauthorization.v1beta1.Attestor]. + // Returns NOT_FOUND if the [attestor][google.cloud.binaryauthorization.v1beta1.Attestor] does not exist. + UpdateAttestor(context.Context, *UpdateAttestorRequest) (*Attestor, error) + // Lists [attestors][google.cloud.binaryauthorization.v1beta1.Attestor]. + // Returns INVALID_ARGUMENT if the project does not exist. + ListAttestors(context.Context, *ListAttestorsRequest) (*ListAttestorsResponse, error) + // Deletes an [attestor][google.cloud.binaryauthorization.v1beta1.Attestor]. Returns NOT_FOUND if the + // [attestor][google.cloud.binaryauthorization.v1beta1.Attestor] does not exist. + DeleteAttestor(context.Context, *DeleteAttestorRequest) (*empty.Empty, error) +} + +func RegisterBinauthzManagementServiceV1Beta1Server(s *grpc.Server, srv BinauthzManagementServiceV1Beta1Server) { + s.RegisterService(&_BinauthzManagementServiceV1Beta1_serviceDesc, srv) +} + +func _BinauthzManagementServiceV1Beta1_GetPolicy_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(GetPolicyRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(BinauthzManagementServiceV1Beta1Server).GetPolicy(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.binaryauthorization.v1beta1.BinauthzManagementServiceV1Beta1/GetPolicy", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(BinauthzManagementServiceV1Beta1Server).GetPolicy(ctx, req.(*GetPolicyRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _BinauthzManagementServiceV1Beta1_UpdatePolicy_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(UpdatePolicyRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(BinauthzManagementServiceV1Beta1Server).UpdatePolicy(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.binaryauthorization.v1beta1.BinauthzManagementServiceV1Beta1/UpdatePolicy", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(BinauthzManagementServiceV1Beta1Server).UpdatePolicy(ctx, req.(*UpdatePolicyRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _BinauthzManagementServiceV1Beta1_CreateAttestor_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(CreateAttestorRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(BinauthzManagementServiceV1Beta1Server).CreateAttestor(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.binaryauthorization.v1beta1.BinauthzManagementServiceV1Beta1/CreateAttestor", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(BinauthzManagementServiceV1Beta1Server).CreateAttestor(ctx, req.(*CreateAttestorRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _BinauthzManagementServiceV1Beta1_GetAttestor_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(GetAttestorRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(BinauthzManagementServiceV1Beta1Server).GetAttestor(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.binaryauthorization.v1beta1.BinauthzManagementServiceV1Beta1/GetAttestor", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(BinauthzManagementServiceV1Beta1Server).GetAttestor(ctx, req.(*GetAttestorRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _BinauthzManagementServiceV1Beta1_UpdateAttestor_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(UpdateAttestorRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(BinauthzManagementServiceV1Beta1Server).UpdateAttestor(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.binaryauthorization.v1beta1.BinauthzManagementServiceV1Beta1/UpdateAttestor", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(BinauthzManagementServiceV1Beta1Server).UpdateAttestor(ctx, req.(*UpdateAttestorRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _BinauthzManagementServiceV1Beta1_ListAttestors_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(ListAttestorsRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(BinauthzManagementServiceV1Beta1Server).ListAttestors(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.binaryauthorization.v1beta1.BinauthzManagementServiceV1Beta1/ListAttestors", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(BinauthzManagementServiceV1Beta1Server).ListAttestors(ctx, req.(*ListAttestorsRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _BinauthzManagementServiceV1Beta1_DeleteAttestor_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(DeleteAttestorRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(BinauthzManagementServiceV1Beta1Server).DeleteAttestor(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.binaryauthorization.v1beta1.BinauthzManagementServiceV1Beta1/DeleteAttestor", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(BinauthzManagementServiceV1Beta1Server).DeleteAttestor(ctx, req.(*DeleteAttestorRequest)) + } + return interceptor(ctx, in, info, handler) +} + +var _BinauthzManagementServiceV1Beta1_serviceDesc = grpc.ServiceDesc{ + ServiceName: "google.cloud.binaryauthorization.v1beta1.BinauthzManagementServiceV1Beta1", + HandlerType: (*BinauthzManagementServiceV1Beta1Server)(nil), + Methods: []grpc.MethodDesc{ + { + MethodName: "GetPolicy", + Handler: _BinauthzManagementServiceV1Beta1_GetPolicy_Handler, + }, + { + MethodName: "UpdatePolicy", + Handler: _BinauthzManagementServiceV1Beta1_UpdatePolicy_Handler, + }, + { + MethodName: "CreateAttestor", + Handler: _BinauthzManagementServiceV1Beta1_CreateAttestor_Handler, + }, + { + MethodName: "GetAttestor", + Handler: _BinauthzManagementServiceV1Beta1_GetAttestor_Handler, + }, + { + MethodName: "UpdateAttestor", + Handler: _BinauthzManagementServiceV1Beta1_UpdateAttestor_Handler, + }, + { + MethodName: "ListAttestors", + Handler: _BinauthzManagementServiceV1Beta1_ListAttestors_Handler, + }, + { + MethodName: "DeleteAttestor", + Handler: _BinauthzManagementServiceV1Beta1_DeleteAttestor_Handler, + }, + }, + Streams: []grpc.StreamDesc{}, + Metadata: "google/cloud/binaryauthorization/v1beta1/service.proto", +} + +func init() { + proto.RegisterFile("google/cloud/binaryauthorization/v1beta1/service.proto", fileDescriptor_service_416c3c6c0a0ebbdc) +} + +var fileDescriptor_service_416c3c6c0a0ebbdc = []byte{ + // 701 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xac, 0x96, 0xcf, 0x4f, 0x13, 0x41, + 0x14, 0xc7, 0x33, 0xa0, 0x84, 0x3e, 0x04, 0xcd, 0x68, 0x49, 0xb3, 0x68, 0xc4, 0x35, 0x69, 0x2a, + 0x26, 0xbb, 0x52, 0x13, 0x43, 0x40, 0x24, 0xa0, 0x06, 0x4d, 0xd4, 0x90, 0xa2, 0x1e, 0xf4, 0x50, + 0xa7, 0xed, 0x73, 0x5d, 0x2c, 0x33, 0xeb, 0xce, 0x94, 0x48, 0x0d, 0x17, 0xff, 0x03, 0xe3, 0xc9, + 0x9b, 0x89, 0xde, 0x3d, 0x7b, 0xd2, 0xa3, 0x77, 0xff, 0x05, 0xff, 0x08, 0x8f, 0x66, 0x67, 0x7f, + 0xb0, 0xad, 0x0b, 0x6c, 0x1b, 0x6e, 0x9d, 0x1f, 0xef, 0xbd, 0xcf, 0x7b, 0xf3, 0xbe, 0x6f, 0x0b, + 0x37, 0x1c, 0x21, 0x9c, 0x36, 0xda, 0xcd, 0xb6, 0xe8, 0xb4, 0xec, 0x86, 0xcb, 0x99, 0xbf, 0xcb, + 0x3a, 0xea, 0x95, 0xf0, 0xdd, 0x2e, 0x53, 0xae, 0xe0, 0xf6, 0xce, 0x7c, 0x03, 0x15, 0x9b, 0xb7, + 0x25, 0xfa, 0x3b, 0x6e, 0x13, 0x2d, 0xcf, 0x17, 0x4a, 0xd0, 0x4a, 0x68, 0x67, 0x69, 0x3b, 0x2b, + 0xc3, 0xce, 0x8a, 0xec, 0x8c, 0xf3, 0x51, 0x04, 0xe6, 0xb9, 0x36, 0xe3, 0x5c, 0x28, 0x7d, 0x2c, + 0x43, 0x3f, 0xc6, 0x42, 0xee, 0xf8, 0x3e, 0x4a, 0xd1, 0xf1, 0x9b, 0x18, 0x5b, 0xce, 0x44, 0x96, + 0x7a, 0xd5, 0xe8, 0xbc, 0xb4, 0x71, 0xdb, 0x53, 0xbb, 0xe1, 0xa1, 0x59, 0x86, 0x33, 0xeb, 0xa8, + 0x36, 0x44, 0xdb, 0x6d, 0xee, 0xd6, 0xf0, 0x4d, 0x07, 0xa5, 0xa2, 0x14, 0x4e, 0x70, 0xb6, 0x8d, + 0x25, 0x32, 0x4b, 0x2a, 0x85, 0x9a, 0xfe, 0x6d, 0xd6, 0xe1, 0xec, 0x13, 0xaf, 0xc5, 0x14, 0xf6, + 0x5e, 0xbd, 0x07, 0x63, 0x9e, 0xde, 0xd0, 0x97, 0x27, 0xaa, 0xd7, 0xac, 0xbc, 0xe9, 0x5a, 0x91, + 0xa3, 0xc8, 0xde, 0xfc, 0x4c, 0xa0, 0x78, 0xdb, 0x47, 0xa6, 0x70, 0x55, 0x29, 0x94, 0x4a, 0xf8, + 0x71, 0x8c, 0x69, 0x18, 0xf3, 0x98, 0x8f, 0x5c, 0x45, 0x40, 0xd1, 0x8a, 0x5e, 0x84, 0x09, 0x16, + 0x5d, 0xad, 0xbb, 0xad, 0xd2, 0x88, 0x3e, 0x84, 0x78, 0xeb, 0x7e, 0x8b, 0x3e, 0x82, 0xf1, 0x78, + 0x55, 0x1a, 0xd5, 0x78, 0xd5, 0xfc, 0x78, 0x09, 0x45, 0xe2, 0xc3, 0xac, 0x00, 0x5d, 0x47, 0xd5, + 0x8f, 0x97, 0x55, 0x2d, 0x07, 0x8a, 0x61, 0xb5, 0xfa, 0x2f, 0xa7, 0x91, 0xc8, 0x31, 0x20, 0x6d, + 0xc1, 0xb9, 0x07, 0xae, 0x4c, 0x98, 0xe4, 0x51, 0x35, 0x9b, 0x81, 0x82, 0xc7, 0x1c, 0xac, 0x4b, + 0xb7, 0x8b, 0xba, 0x62, 0x27, 0x6b, 0xe3, 0xc1, 0xc6, 0xa6, 0xdb, 0x45, 0x7a, 0x01, 0x40, 0x1f, + 0x2a, 0xf1, 0x1a, 0xb9, 0xae, 0x58, 0xa1, 0xa6, 0xaf, 0x3f, 0x0e, 0x36, 0xcc, 0x0f, 0x04, 0x8a, + 0x7d, 0xc1, 0xa4, 0x27, 0xb8, 0x44, 0xba, 0x01, 0x85, 0x98, 0x48, 0x96, 0xc8, 0xec, 0xe8, 0x90, + 0x69, 0xed, 0x3b, 0xa1, 0x65, 0x38, 0xcd, 0xf1, 0xad, 0xaa, 0xa7, 0x78, 0xc2, 0xf7, 0x9d, 0x0c, + 0xb6, 0x37, 0x12, 0xa6, 0xab, 0x50, 0xbc, 0x83, 0x6d, 0xfc, 0xbf, 0xd0, 0x19, 0xaf, 0x52, 0xfd, + 0x02, 0x30, 0xbb, 0xe6, 0xf2, 0x80, 0xa2, 0xfb, 0x90, 0x71, 0xe6, 0xe0, 0x36, 0x72, 0xb5, 0x19, + 0xca, 0xf5, 0xe9, 0xfc, 0x5a, 0x40, 0x43, 0xbf, 0x12, 0x28, 0x24, 0x8a, 0xa0, 0x8b, 0xf9, 0xd3, + 0xe8, 0x97, 0x91, 0x31, 0xb0, 0x16, 0xcc, 0x2b, 0xef, 0x7f, 0xff, 0xf9, 0x38, 0x72, 0x99, 0x5e, + 0x4a, 0xb4, 0xfc, 0x2e, 0x00, 0x5f, 0xf6, 0x7c, 0xb1, 0x85, 0x4d, 0x25, 0xed, 0x39, 0x3b, 0x54, + 0xcb, 0x1e, 0xfd, 0x4e, 0xe0, 0x54, 0x5a, 0x90, 0x74, 0x39, 0x7f, 0xb4, 0x0c, 0x21, 0x0f, 0x01, + 0xbb, 0xa0, 0x61, 0xab, 0x46, 0x65, 0x1f, 0x36, 0x84, 0xb3, 0x0e, 0x60, 0x5e, 0x8c, 0xa4, 0x4e, + 0x7f, 0x10, 0x98, 0xea, 0x95, 0x3a, 0x5d, 0xc9, 0x1f, 0x3e, 0x73, 0x48, 0x18, 0x43, 0xf4, 0x5b, + 0x9c, 0x81, 0x59, 0x4e, 0x65, 0xa0, 0x65, 0x92, 0x82, 0xdf, 0xb3, 0x93, 0xa6, 0x5c, 0x4c, 0x64, + 0x47, 0xbf, 0x11, 0x98, 0x48, 0x8d, 0x02, 0x7a, 0x73, 0xa0, 0x36, 0x39, 0x0e, 0x76, 0x4b, 0xb3, + 0x57, 0x68, 0xf9, 0xe0, 0x56, 0x49, 0xc0, 0xed, 0xb9, 0x3d, 0xfa, 0x8b, 0xc0, 0x54, 0xef, 0x48, + 0x1a, 0xa4, 0xe6, 0x99, 0xc3, 0x6c, 0x28, 0xee, 0x55, 0xcd, 0xbd, 0x64, 0xd8, 0xfb, 0xdc, 0x31, + 0xa7, 0x75, 0x58, 0x02, 0xa9, 0xe2, 0xff, 0x24, 0x30, 0xd9, 0x33, 0x87, 0xe8, 0xad, 0xfc, 0x20, + 0x59, 0xd3, 0xd2, 0x58, 0x19, 0xda, 0x3e, 0x1c, 0x80, 0x59, 0xaf, 0x71, 0x58, 0x27, 0xd1, 0x4f, + 0x04, 0xa6, 0x7a, 0xe7, 0xd6, 0x20, 0xaf, 0x91, 0x39, 0xf1, 0x8c, 0xe9, 0xd8, 0x41, 0xfc, 0x9d, + 0xb7, 0xee, 0x06, 0xdf, 0xf9, 0x98, 0x6d, 0x2e, 0x67, 0xa7, 0xac, 0xbd, 0x78, 0xf6, 0x3c, 0x72, + 0xe4, 0x88, 0x36, 0xe3, 0x8e, 0x25, 0x7c, 0xc7, 0x76, 0x90, 0x6b, 0xb7, 0x76, 0x78, 0xc4, 0x3c, + 0x57, 0x1e, 0xfd, 0x4f, 0x64, 0x29, 0xe3, 0xec, 0x2f, 0x21, 0x8d, 0x31, 0xed, 0xea, 0xfa, 0xbf, + 0x00, 0x00, 0x00, 0xff, 0xff, 0xa6, 0xda, 0x4c, 0x49, 0x53, 0x09, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/cloud/datacatalog/v1beta1/datacatalog.pb.go b/vendor/google.golang.org/genproto/googleapis/cloud/datacatalog/v1beta1/datacatalog.pb.go new file mode 100644 index 0000000..c851853 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/cloud/datacatalog/v1beta1/datacatalog.pb.go @@ -0,0 +1,524 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/cloud/datacatalog/v1beta1/datacatalog.proto + +package datacatalog // import "google.golang.org/genproto/googleapis/cloud/datacatalog/v1beta1" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +import ( + context "golang.org/x/net/context" + grpc "google.golang.org/grpc" +) + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Entry resources in Cloud Data Catalog can be of different types e.g. BigQuery +// Table entry is of type 'TABLE'. This enum describes all the possible types +// Cloud Data Catalog contains. +type EntryType int32 + +const ( + // Default unknown type + EntryType_ENTRY_TYPE_UNSPECIFIED EntryType = 0 + // The type of entry that has a GoogleSQL schema, including logical views. + EntryType_TABLE EntryType = 2 + // An entry type which is used for streaming entries. Example - Pub/Sub. + EntryType_DATA_STREAM EntryType = 3 +) + +var EntryType_name = map[int32]string{ + 0: "ENTRY_TYPE_UNSPECIFIED", + 2: "TABLE", + 3: "DATA_STREAM", +} +var EntryType_value = map[string]int32{ + "ENTRY_TYPE_UNSPECIFIED": 0, + "TABLE": 2, + "DATA_STREAM": 3, +} + +func (x EntryType) String() string { + return proto.EnumName(EntryType_name, int32(x)) +} +func (EntryType) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_datacatalog_2e06be1501804d5c, []int{0} +} + +// Request message for +// [LookupEntry][google.cloud.datacatalog.v1beta1.DataCatalog.LookupEntry]. +type LookupEntryRequest struct { + // Represents either the Google Cloud Platform resource or SQL name for a + // Google Cloud Platform resource. + // + // Types that are valid to be assigned to TargetName: + // *LookupEntryRequest_LinkedResource + // *LookupEntryRequest_SqlResource + TargetName isLookupEntryRequest_TargetName `protobuf_oneof:"target_name"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *LookupEntryRequest) Reset() { *m = LookupEntryRequest{} } +func (m *LookupEntryRequest) String() string { return proto.CompactTextString(m) } +func (*LookupEntryRequest) ProtoMessage() {} +func (*LookupEntryRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_datacatalog_2e06be1501804d5c, []int{0} +} +func (m *LookupEntryRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_LookupEntryRequest.Unmarshal(m, b) +} +func (m *LookupEntryRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_LookupEntryRequest.Marshal(b, m, deterministic) +} +func (dst *LookupEntryRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_LookupEntryRequest.Merge(dst, src) +} +func (m *LookupEntryRequest) XXX_Size() int { + return xxx_messageInfo_LookupEntryRequest.Size(m) +} +func (m *LookupEntryRequest) XXX_DiscardUnknown() { + xxx_messageInfo_LookupEntryRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_LookupEntryRequest proto.InternalMessageInfo + +type isLookupEntryRequest_TargetName interface { + isLookupEntryRequest_TargetName() +} + +type LookupEntryRequest_LinkedResource struct { + LinkedResource string `protobuf:"bytes,1,opt,name=linked_resource,json=linkedResource,proto3,oneof"` +} + +type LookupEntryRequest_SqlResource struct { + SqlResource string `protobuf:"bytes,3,opt,name=sql_resource,json=sqlResource,proto3,oneof"` +} + +func (*LookupEntryRequest_LinkedResource) isLookupEntryRequest_TargetName() {} + +func (*LookupEntryRequest_SqlResource) isLookupEntryRequest_TargetName() {} + +func (m *LookupEntryRequest) GetTargetName() isLookupEntryRequest_TargetName { + if m != nil { + return m.TargetName + } + return nil +} + +func (m *LookupEntryRequest) GetLinkedResource() string { + if x, ok := m.GetTargetName().(*LookupEntryRequest_LinkedResource); ok { + return x.LinkedResource + } + return "" +} + +func (m *LookupEntryRequest) GetSqlResource() string { + if x, ok := m.GetTargetName().(*LookupEntryRequest_SqlResource); ok { + return x.SqlResource + } + return "" +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*LookupEntryRequest) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _LookupEntryRequest_OneofMarshaler, _LookupEntryRequest_OneofUnmarshaler, _LookupEntryRequest_OneofSizer, []interface{}{ + (*LookupEntryRequest_LinkedResource)(nil), + (*LookupEntryRequest_SqlResource)(nil), + } +} + +func _LookupEntryRequest_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*LookupEntryRequest) + // target_name + switch x := m.TargetName.(type) { + case *LookupEntryRequest_LinkedResource: + b.EncodeVarint(1<<3 | proto.WireBytes) + b.EncodeStringBytes(x.LinkedResource) + case *LookupEntryRequest_SqlResource: + b.EncodeVarint(3<<3 | proto.WireBytes) + b.EncodeStringBytes(x.SqlResource) + case nil: + default: + return fmt.Errorf("LookupEntryRequest.TargetName has unexpected type %T", x) + } + return nil +} + +func _LookupEntryRequest_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*LookupEntryRequest) + switch tag { + case 1: // target_name.linked_resource + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeStringBytes() + m.TargetName = &LookupEntryRequest_LinkedResource{x} + return true, err + case 3: // target_name.sql_resource + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeStringBytes() + m.TargetName = &LookupEntryRequest_SqlResource{x} + return true, err + default: + return false, nil + } +} + +func _LookupEntryRequest_OneofSizer(msg proto.Message) (n int) { + m := msg.(*LookupEntryRequest) + // target_name + switch x := m.TargetName.(type) { + case *LookupEntryRequest_LinkedResource: + n += 1 // tag and wire + n += proto.SizeVarint(uint64(len(x.LinkedResource))) + n += len(x.LinkedResource) + case *LookupEntryRequest_SqlResource: + n += 1 // tag and wire + n += proto.SizeVarint(uint64(len(x.SqlResource))) + n += len(x.SqlResource) + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +// Entry Metadata. +// A Data Catalog Entry resource represents another resource in Google +// Cloud Platform, such as a BigQuery Dataset or a Pub/Sub Topic. Clients can +// use the `linked_resource` field in the Entry resource to refer to the +// original resource id of the source system. +// +// An Entry resource contains resource details, such as its schema. +type Entry struct { + // Output only. The Data Catalog resource name of the entry in URL format. For + // example, + // "projects/{project_id}/locations/{location}/entryGroups/{entry_group_id}/entries/{entry_id}". + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // The full name of the cloud resource the entry belongs to. See: + // https://cloud.google.com/apis/design/resource_names#full_resource_name + // + // Data Catalog supports resources from select Google Cloud Platform systems. + // `linked_resource` is the full name of the Google Cloud Platform resource. + // For example, the `linked_resource` for a table resource from BigQuery is: + // + // "//bigquery.googleapis.com/projects/projectId/datasets/datasetId/tables/tableId". + LinkedResource string `protobuf:"bytes,9,opt,name=linked_resource,json=linkedResource,proto3" json:"linked_resource,omitempty"` + // Type of entry. + Type EntryType `protobuf:"varint,2,opt,name=type,proto3,enum=google.cloud.datacatalog.v1beta1.EntryType" json:"type,omitempty"` + // Type specification information. + // + // Types that are valid to be assigned to TypeSpec: + // *Entry_BigqueryTableSpec + TypeSpec isEntry_TypeSpec `protobuf_oneof:"type_spec"` + // Display information such as title and description. + // A short name to identify the entry, for example, + // "Analytics Data - Jan 2011". + DisplayName string `protobuf:"bytes,3,opt,name=display_name,json=displayName,proto3" json:"display_name,omitempty"` + // Entry description, which can consist of several sentences or paragraphs + // that describe entry contents. + Description string `protobuf:"bytes,4,opt,name=description,proto3" json:"description,omitempty"` + // Schema of the entry. + Schema *Schema `protobuf:"bytes,5,opt,name=schema,proto3" json:"schema,omitempty"` + // Timestamps about the underlying Google Cloud Platform resource -- not about + // this Data Catalog Entry. + SourceSystemTimestamps *SystemTimestamps `protobuf:"bytes,7,opt,name=source_system_timestamps,json=sourceSystemTimestamps,proto3" json:"source_system_timestamps,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Entry) Reset() { *m = Entry{} } +func (m *Entry) String() string { return proto.CompactTextString(m) } +func (*Entry) ProtoMessage() {} +func (*Entry) Descriptor() ([]byte, []int) { + return fileDescriptor_datacatalog_2e06be1501804d5c, []int{1} +} +func (m *Entry) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Entry.Unmarshal(m, b) +} +func (m *Entry) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Entry.Marshal(b, m, deterministic) +} +func (dst *Entry) XXX_Merge(src proto.Message) { + xxx_messageInfo_Entry.Merge(dst, src) +} +func (m *Entry) XXX_Size() int { + return xxx_messageInfo_Entry.Size(m) +} +func (m *Entry) XXX_DiscardUnknown() { + xxx_messageInfo_Entry.DiscardUnknown(m) +} + +var xxx_messageInfo_Entry proto.InternalMessageInfo + +func (m *Entry) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *Entry) GetLinkedResource() string { + if m != nil { + return m.LinkedResource + } + return "" +} + +func (m *Entry) GetType() EntryType { + if m != nil { + return m.Type + } + return EntryType_ENTRY_TYPE_UNSPECIFIED +} + +type isEntry_TypeSpec interface { + isEntry_TypeSpec() +} + +type Entry_BigqueryTableSpec struct { + BigqueryTableSpec *BigQueryTableSpec `protobuf:"bytes,12,opt,name=bigquery_table_spec,json=bigqueryTableSpec,proto3,oneof"` +} + +func (*Entry_BigqueryTableSpec) isEntry_TypeSpec() {} + +func (m *Entry) GetTypeSpec() isEntry_TypeSpec { + if m != nil { + return m.TypeSpec + } + return nil +} + +func (m *Entry) GetBigqueryTableSpec() *BigQueryTableSpec { + if x, ok := m.GetTypeSpec().(*Entry_BigqueryTableSpec); ok { + return x.BigqueryTableSpec + } + return nil +} + +func (m *Entry) GetDisplayName() string { + if m != nil { + return m.DisplayName + } + return "" +} + +func (m *Entry) GetDescription() string { + if m != nil { + return m.Description + } + return "" +} + +func (m *Entry) GetSchema() *Schema { + if m != nil { + return m.Schema + } + return nil +} + +func (m *Entry) GetSourceSystemTimestamps() *SystemTimestamps { + if m != nil { + return m.SourceSystemTimestamps + } + return nil +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*Entry) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _Entry_OneofMarshaler, _Entry_OneofUnmarshaler, _Entry_OneofSizer, []interface{}{ + (*Entry_BigqueryTableSpec)(nil), + } +} + +func _Entry_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*Entry) + // type_spec + switch x := m.TypeSpec.(type) { + case *Entry_BigqueryTableSpec: + b.EncodeVarint(12<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.BigqueryTableSpec); err != nil { + return err + } + case nil: + default: + return fmt.Errorf("Entry.TypeSpec has unexpected type %T", x) + } + return nil +} + +func _Entry_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*Entry) + switch tag { + case 12: // type_spec.bigquery_table_spec + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(BigQueryTableSpec) + err := b.DecodeMessage(msg) + m.TypeSpec = &Entry_BigqueryTableSpec{msg} + return true, err + default: + return false, nil + } +} + +func _Entry_OneofSizer(msg proto.Message) (n int) { + m := msg.(*Entry) + // type_spec + switch x := m.TypeSpec.(type) { + case *Entry_BigqueryTableSpec: + s := proto.Size(x.BigqueryTableSpec) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +func init() { + proto.RegisterType((*LookupEntryRequest)(nil), "google.cloud.datacatalog.v1beta1.LookupEntryRequest") + proto.RegisterType((*Entry)(nil), "google.cloud.datacatalog.v1beta1.Entry") + proto.RegisterEnum("google.cloud.datacatalog.v1beta1.EntryType", EntryType_name, EntryType_value) +} + +// Reference imports to suppress errors if they are not otherwise used. +var _ context.Context +var _ grpc.ClientConn + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the grpc package it is being compiled against. +const _ = grpc.SupportPackageIsVersion4 + +// DataCatalogClient is the client API for DataCatalog service. +// +// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://godoc.org/google.golang.org/grpc#ClientConn.NewStream. +type DataCatalogClient interface { + // Get an entry by target resource name. This method allows clients to use + // the resource name from the source Google Cloud Platform service to get the + // Cloud Data Catalog Entry. + LookupEntry(ctx context.Context, in *LookupEntryRequest, opts ...grpc.CallOption) (*Entry, error) +} + +type dataCatalogClient struct { + cc *grpc.ClientConn +} + +func NewDataCatalogClient(cc *grpc.ClientConn) DataCatalogClient { + return &dataCatalogClient{cc} +} + +func (c *dataCatalogClient) LookupEntry(ctx context.Context, in *LookupEntryRequest, opts ...grpc.CallOption) (*Entry, error) { + out := new(Entry) + err := c.cc.Invoke(ctx, "/google.cloud.datacatalog.v1beta1.DataCatalog/LookupEntry", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +// DataCatalogServer is the server API for DataCatalog service. +type DataCatalogServer interface { + // Get an entry by target resource name. This method allows clients to use + // the resource name from the source Google Cloud Platform service to get the + // Cloud Data Catalog Entry. + LookupEntry(context.Context, *LookupEntryRequest) (*Entry, error) +} + +func RegisterDataCatalogServer(s *grpc.Server, srv DataCatalogServer) { + s.RegisterService(&_DataCatalog_serviceDesc, srv) +} + +func _DataCatalog_LookupEntry_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(LookupEntryRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(DataCatalogServer).LookupEntry(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.datacatalog.v1beta1.DataCatalog/LookupEntry", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(DataCatalogServer).LookupEntry(ctx, req.(*LookupEntryRequest)) + } + return interceptor(ctx, in, info, handler) +} + +var _DataCatalog_serviceDesc = grpc.ServiceDesc{ + ServiceName: "google.cloud.datacatalog.v1beta1.DataCatalog", + HandlerType: (*DataCatalogServer)(nil), + Methods: []grpc.MethodDesc{ + { + MethodName: "LookupEntry", + Handler: _DataCatalog_LookupEntry_Handler, + }, + }, + Streams: []grpc.StreamDesc{}, + Metadata: "google/cloud/datacatalog/v1beta1/datacatalog.proto", +} + +func init() { + proto.RegisterFile("google/cloud/datacatalog/v1beta1/datacatalog.proto", fileDescriptor_datacatalog_2e06be1501804d5c) +} + +var fileDescriptor_datacatalog_2e06be1501804d5c = []byte{ + // 563 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x8c, 0x53, 0x4d, 0x6f, 0xd3, 0x40, + 0x10, 0x8d, 0xf3, 0x51, 0x94, 0x75, 0x68, 0xc3, 0x22, 0x15, 0x13, 0x55, 0x22, 0x84, 0x43, 0x03, + 0x08, 0x5b, 0x49, 0x39, 0xc1, 0x01, 0xf2, 0x61, 0xd4, 0x8a, 0x12, 0x05, 0xc7, 0x1c, 0xca, 0xc5, + 0xda, 0x38, 0x23, 0x63, 0xd5, 0xf6, 0x6e, 0xbc, 0x1b, 0x24, 0x5f, 0xb9, 0x73, 0xe2, 0xcc, 0xaf, + 0xe2, 0x2f, 0x70, 0xe6, 0xcc, 0xb1, 0xf2, 0xda, 0x4d, 0xa2, 0x56, 0x95, 0x73, 0xb3, 0xde, 0xbc, + 0xf7, 0x66, 0x76, 0xfc, 0x06, 0xf5, 0x3d, 0x4a, 0xbd, 0x00, 0x0c, 0x37, 0xa0, 0xab, 0x85, 0xb1, + 0x20, 0x82, 0xb8, 0x44, 0x90, 0x80, 0x7a, 0xc6, 0xf7, 0xde, 0x1c, 0x04, 0xe9, 0x6d, 0x63, 0x3a, + 0x8b, 0xa9, 0xa0, 0xb8, 0x9d, 0x69, 0x74, 0xa9, 0xd1, 0xb7, 0xeb, 0xb9, 0xa6, 0x75, 0x94, 0xbb, + 0x12, 0xe6, 0x1b, 0x24, 0x8a, 0xa8, 0x20, 0xc2, 0xa7, 0x11, 0xcf, 0xf4, 0xad, 0x57, 0x85, 0x3d, + 0xb9, 0xfb, 0x0d, 0x42, 0x92, 0xd3, 0x7b, 0x85, 0x74, 0x41, 0xe6, 0x01, 0x38, 0x9c, 0x81, 0xbb, + 0xbb, 0xc4, 0x0f, 0x81, 0x0b, 0x12, 0xb2, 0x7c, 0xa8, 0x0e, 0x47, 0xf8, 0x9c, 0xd2, 0xcb, 0x15, + 0x33, 0x23, 0x11, 0x27, 0x16, 0x2c, 0x57, 0xc0, 0x05, 0x7e, 0x8e, 0x0e, 0x02, 0x3f, 0xba, 0x84, + 0x85, 0x13, 0x03, 0xa7, 0xab, 0xd8, 0x05, 0x4d, 0x69, 0x2b, 0xdd, 0xfa, 0x69, 0xc9, 0xda, 0xcf, + 0x0a, 0x56, 0x8e, 0xe3, 0x67, 0xa8, 0xc1, 0x97, 0xc1, 0x86, 0x57, 0xc9, 0x79, 0x2a, 0x5f, 0x06, + 0xd7, 0xa4, 0xe1, 0x7d, 0xa4, 0x0a, 0x12, 0x7b, 0x20, 0x9c, 0x88, 0x84, 0xd0, 0xf9, 0x57, 0x41, + 0x35, 0xd9, 0x0f, 0x63, 0x54, 0x4d, 0x91, 0xcc, 0xdd, 0x92, 0xdf, 0xf8, 0xf8, 0x76, 0xf3, 0xba, + 0x2c, 0xdf, 0x6c, 0xfd, 0x0e, 0x55, 0x45, 0xc2, 0x40, 0x2b, 0xb7, 0x95, 0xee, 0x7e, 0xff, 0xa5, + 0x5e, 0xf4, 0x7f, 0x74, 0xd9, 0xd3, 0x4e, 0x18, 0x58, 0x52, 0x88, 0x01, 0x3d, 0x9c, 0xfb, 0xde, + 0x72, 0x05, 0x71, 0xe2, 0x6c, 0x96, 0xa9, 0x35, 0xda, 0x4a, 0x57, 0xed, 0x9f, 0x14, 0xfb, 0x0d, + 0x7d, 0xef, 0x73, 0x2a, 0xb6, 0x53, 0xed, 0x8c, 0x81, 0x7b, 0x5a, 0xb2, 0x1e, 0x5c, 0x3b, 0xae, + 0x41, 0xfc, 0x14, 0x35, 0x16, 0x3e, 0x67, 0x01, 0x49, 0xe4, 0xf3, 0xb3, 0x15, 0x59, 0x6a, 0x8e, + 0x4d, 0xd2, 0x37, 0xb7, 0x91, 0xba, 0x00, 0xee, 0xc6, 0x3e, 0x4b, 0x13, 0xa3, 0x55, 0x73, 0xc6, + 0x06, 0xc2, 0xef, 0xd1, 0x5e, 0x16, 0x0f, 0xad, 0x26, 0xc7, 0xeb, 0x16, 0x8f, 0x37, 0x93, 0x7c, + 0x2b, 0xd7, 0xe1, 0x00, 0x69, 0xd9, 0xe2, 0x1c, 0x9e, 0x70, 0x01, 0xa1, 0xb3, 0x09, 0x83, 0x76, + 0x4f, 0x7a, 0xf6, 0x77, 0xf0, 0x94, 0x52, 0x7b, 0xad, 0xb4, 0x0e, 0x33, 0xcf, 0x9b, 0xf8, 0x50, + 0x45, 0xf5, 0x74, 0xc7, 0x72, 0xa3, 0x2f, 0x46, 0xa8, 0xbe, 0xde, 0x3d, 0x6e, 0xa1, 0x43, 0x73, + 0x62, 0x5b, 0x17, 0x8e, 0x7d, 0x31, 0x35, 0x9d, 0x2f, 0x93, 0xd9, 0xd4, 0x1c, 0x9d, 0x7d, 0x38, + 0x33, 0xc7, 0xcd, 0x12, 0xae, 0xa3, 0x9a, 0x3d, 0x18, 0x9e, 0x9b, 0xcd, 0x32, 0x3e, 0x40, 0xea, + 0x78, 0x60, 0x0f, 0x9c, 0x99, 0x6d, 0x99, 0x83, 0x4f, 0xcd, 0x4a, 0xff, 0xb7, 0x82, 0xd4, 0x31, + 0x11, 0x64, 0x94, 0x8d, 0x84, 0x7f, 0x2a, 0x48, 0xdd, 0xca, 0x2e, 0x7e, 0x5d, 0x3c, 0xfd, 0xed, + 0xa8, 0xb7, 0x8e, 0x77, 0x8c, 0x4d, 0xe7, 0xc9, 0x8f, 0x3f, 0x7f, 0x7f, 0x95, 0x1f, 0xe3, 0x47, + 0xeb, 0x63, 0x82, 0x48, 0xc4, 0x3e, 0xf0, 0x37, 0x81, 0x74, 0x1d, 0x32, 0x74, 0xe4, 0xd2, 0xf0, + 0x4e, 0xbb, 0xa9, 0xf2, 0xf5, 0x63, 0x5e, 0xf3, 0x68, 0x40, 0x22, 0x4f, 0xa7, 0xb1, 0x67, 0x78, + 0x10, 0xc9, 0x43, 0x34, 0xb2, 0x12, 0x61, 0x3e, 0xbf, 0xfb, 0x7c, 0xdf, 0x6e, 0x61, 0xff, 0x15, + 0x65, 0xbe, 0x27, 0xa5, 0x27, 0x57, 0x01, 0x00, 0x00, 0xff, 0xff, 0x7f, 0xc1, 0xb0, 0xec, 0xce, + 0x04, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/cloud/datacatalog/v1beta1/schema.pb.go b/vendor/google.golang.org/genproto/googleapis/cloud/datacatalog/v1beta1/schema.pb.go new file mode 100644 index 0000000..35cb06e --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/cloud/datacatalog/v1beta1/schema.pb.go @@ -0,0 +1,168 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/cloud/datacatalog/v1beta1/schema.proto + +package datacatalog // import "google.golang.org/genproto/googleapis/cloud/datacatalog/v1beta1" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Represents a schema (e.g. BigQuery, GoogleSQL, Avro schema). +type Schema struct { + // Schema of columns. A maximum of 10,000 columns and sub-columns can be + // specified. + Columns []*ColumnSchema `protobuf:"bytes,2,rep,name=columns,proto3" json:"columns,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Schema) Reset() { *m = Schema{} } +func (m *Schema) String() string { return proto.CompactTextString(m) } +func (*Schema) ProtoMessage() {} +func (*Schema) Descriptor() ([]byte, []int) { + return fileDescriptor_schema_be14e9cb1c70fc2b, []int{0} +} +func (m *Schema) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Schema.Unmarshal(m, b) +} +func (m *Schema) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Schema.Marshal(b, m, deterministic) +} +func (dst *Schema) XXX_Merge(src proto.Message) { + xxx_messageInfo_Schema.Merge(dst, src) +} +func (m *Schema) XXX_Size() int { + return xxx_messageInfo_Schema.Size(m) +} +func (m *Schema) XXX_DiscardUnknown() { + xxx_messageInfo_Schema.DiscardUnknown(m) +} + +var xxx_messageInfo_Schema proto.InternalMessageInfo + +func (m *Schema) GetColumns() []*ColumnSchema { + if m != nil { + return m.Columns + } + return nil +} + +// Representation of a column within a schema. Columns could be nested inside +// other columns. +type ColumnSchema struct { + // Required. Name of the column. + Column string `protobuf:"bytes,6,opt,name=column,proto3" json:"column,omitempty"` + // Required. Type of the column. + Type string `protobuf:"bytes,1,opt,name=type,proto3" json:"type,omitempty"` + // Description of the column. + Description string `protobuf:"bytes,2,opt,name=description,proto3" json:"description,omitempty"` + // A column's mode indicates whether the values in this column are + // required, nullable, etc. Only 'NULLABLE', 'REQUIRED' and 'REPEATED' are + // supported, default mode is 'NULLABLE'. + Mode string `protobuf:"bytes,3,opt,name=mode,proto3" json:"mode,omitempty"` + // Schema of sub-columns. + Subcolumns []*ColumnSchema `protobuf:"bytes,7,rep,name=subcolumns,proto3" json:"subcolumns,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ColumnSchema) Reset() { *m = ColumnSchema{} } +func (m *ColumnSchema) String() string { return proto.CompactTextString(m) } +func (*ColumnSchema) ProtoMessage() {} +func (*ColumnSchema) Descriptor() ([]byte, []int) { + return fileDescriptor_schema_be14e9cb1c70fc2b, []int{1} +} +func (m *ColumnSchema) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ColumnSchema.Unmarshal(m, b) +} +func (m *ColumnSchema) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ColumnSchema.Marshal(b, m, deterministic) +} +func (dst *ColumnSchema) XXX_Merge(src proto.Message) { + xxx_messageInfo_ColumnSchema.Merge(dst, src) +} +func (m *ColumnSchema) XXX_Size() int { + return xxx_messageInfo_ColumnSchema.Size(m) +} +func (m *ColumnSchema) XXX_DiscardUnknown() { + xxx_messageInfo_ColumnSchema.DiscardUnknown(m) +} + +var xxx_messageInfo_ColumnSchema proto.InternalMessageInfo + +func (m *ColumnSchema) GetColumn() string { + if m != nil { + return m.Column + } + return "" +} + +func (m *ColumnSchema) GetType() string { + if m != nil { + return m.Type + } + return "" +} + +func (m *ColumnSchema) GetDescription() string { + if m != nil { + return m.Description + } + return "" +} + +func (m *ColumnSchema) GetMode() string { + if m != nil { + return m.Mode + } + return "" +} + +func (m *ColumnSchema) GetSubcolumns() []*ColumnSchema { + if m != nil { + return m.Subcolumns + } + return nil +} + +func init() { + proto.RegisterType((*Schema)(nil), "google.cloud.datacatalog.v1beta1.Schema") + proto.RegisterType((*ColumnSchema)(nil), "google.cloud.datacatalog.v1beta1.ColumnSchema") +} + +func init() { + proto.RegisterFile("google/cloud/datacatalog/v1beta1/schema.proto", fileDescriptor_schema_be14e9cb1c70fc2b) +} + +var fileDescriptor_schema_be14e9cb1c70fc2b = []byte{ + // 251 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x9c, 0x91, 0x31, 0x4b, 0x04, 0x31, + 0x10, 0x85, 0xc9, 0x9d, 0xec, 0xe1, 0x9c, 0x55, 0x0a, 0x49, 0x61, 0xb1, 0x5c, 0x75, 0x8d, 0x09, + 0xa7, 0xa5, 0x9d, 0x36, 0x82, 0x20, 0xb2, 0x76, 0x76, 0xd9, 0x6c, 0x88, 0x0b, 0xd9, 0x4c, 0xd8, + 0x64, 0x05, 0xff, 0x9d, 0x3f, 0xcb, 0x52, 0x2e, 0xd9, 0x83, 0x34, 0x8b, 0x70, 0xdd, 0xe4, 0xbd, + 0xf7, 0x3d, 0x86, 0x0c, 0xdc, 0x1a, 0x44, 0x63, 0xb5, 0x50, 0x16, 0xa7, 0x4e, 0x74, 0x32, 0x4a, + 0x25, 0xa3, 0xb4, 0x68, 0xc4, 0xd7, 0xa1, 0xd5, 0x51, 0x1e, 0x44, 0x50, 0x9f, 0x7a, 0x90, 0xdc, + 0x8f, 0x18, 0x91, 0xd6, 0x39, 0xce, 0x53, 0x9c, 0x17, 0x71, 0x3e, 0xc7, 0x77, 0x0d, 0x54, 0xef, + 0x89, 0xa0, 0xcf, 0xb0, 0x51, 0x68, 0xa7, 0xc1, 0x05, 0xb6, 0xaa, 0xd7, 0xfb, 0xed, 0x1d, 0xe7, + 0xff, 0xd1, 0xfc, 0x29, 0x01, 0xb9, 0xa0, 0x39, 0xe1, 0xbb, 0x1f, 0x02, 0x57, 0xa5, 0x43, 0xaf, + 0xa1, 0xca, 0x1e, 0xab, 0x6a, 0xb2, 0xbf, 0x6c, 0xe6, 0x17, 0xa5, 0x70, 0x11, 0xbf, 0xbd, 0x66, + 0x24, 0xa9, 0x69, 0xa6, 0x35, 0x6c, 0x3b, 0x1d, 0xd4, 0xd8, 0xfb, 0xd8, 0xa3, 0x63, 0xab, 0x64, + 0x95, 0xd2, 0x91, 0x1a, 0xb0, 0xd3, 0x6c, 0x9d, 0xa9, 0xe3, 0x4c, 0x5f, 0x01, 0xc2, 0xd4, 0x9e, + 0xf6, 0xdf, 0x9c, 0xb5, 0x7f, 0xd1, 0xf0, 0xe8, 0xe1, 0x46, 0xe1, 0xb0, 0x58, 0xf0, 0x46, 0x3e, + 0x5e, 0x66, 0xcf, 0xa0, 0x95, 0xce, 0x70, 0x1c, 0x8d, 0x30, 0xda, 0xa5, 0x6f, 0x17, 0xd9, 0x92, + 0xbe, 0x0f, 0xcb, 0x87, 0x7a, 0x28, 0xb4, 0x5f, 0x42, 0xda, 0x2a, 0xa1, 0xf7, 0x7f, 0x01, 0x00, + 0x00, 0xff, 0xff, 0x28, 0x89, 0x34, 0x94, 0xe2, 0x01, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/cloud/datacatalog/v1beta1/table_spec.pb.go b/vendor/google.golang.org/genproto/googleapis/cloud/datacatalog/v1beta1/table_spec.pb.go new file mode 100644 index 0000000..30d9f98 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/cloud/datacatalog/v1beta1/table_spec.pb.go @@ -0,0 +1,173 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/cloud/datacatalog/v1beta1/table_spec.proto + +package datacatalog // import "google.golang.org/genproto/googleapis/cloud/datacatalog/v1beta1" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Table source type. +type TableSourceType int32 + +const ( + // Default unknown type. + TableSourceType_TABLE_SOURCE_TYPE_UNSPECIFIED TableSourceType = 0 + // Table view. + TableSourceType_BIGQUERY_VIEW TableSourceType = 2 + // BigQuery native table. + TableSourceType_BIGQUERY_TABLE TableSourceType = 5 +) + +var TableSourceType_name = map[int32]string{ + 0: "TABLE_SOURCE_TYPE_UNSPECIFIED", + 2: "BIGQUERY_VIEW", + 5: "BIGQUERY_TABLE", +} +var TableSourceType_value = map[string]int32{ + "TABLE_SOURCE_TYPE_UNSPECIFIED": 0, + "BIGQUERY_VIEW": 2, + "BIGQUERY_TABLE": 5, +} + +func (x TableSourceType) String() string { + return proto.EnumName(TableSourceType_name, int32(x)) +} +func (TableSourceType) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_table_spec_345702233622a222, []int{0} +} + +// Describes a BigQuery table. +type BigQueryTableSpec struct { + // The table source type. + TableSourceType TableSourceType `protobuf:"varint,1,opt,name=table_source_type,json=tableSourceType,proto3,enum=google.cloud.datacatalog.v1beta1.TableSourceType" json:"table_source_type,omitempty"` + // Table view specification. This field should only be populated if + // table_source_type is BIGQUERY_VIEW. + ViewSpec *ViewSpec `protobuf:"bytes,2,opt,name=view_spec,json=viewSpec,proto3" json:"view_spec,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *BigQueryTableSpec) Reset() { *m = BigQueryTableSpec{} } +func (m *BigQueryTableSpec) String() string { return proto.CompactTextString(m) } +func (*BigQueryTableSpec) ProtoMessage() {} +func (*BigQueryTableSpec) Descriptor() ([]byte, []int) { + return fileDescriptor_table_spec_345702233622a222, []int{0} +} +func (m *BigQueryTableSpec) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_BigQueryTableSpec.Unmarshal(m, b) +} +func (m *BigQueryTableSpec) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_BigQueryTableSpec.Marshal(b, m, deterministic) +} +func (dst *BigQueryTableSpec) XXX_Merge(src proto.Message) { + xxx_messageInfo_BigQueryTableSpec.Merge(dst, src) +} +func (m *BigQueryTableSpec) XXX_Size() int { + return xxx_messageInfo_BigQueryTableSpec.Size(m) +} +func (m *BigQueryTableSpec) XXX_DiscardUnknown() { + xxx_messageInfo_BigQueryTableSpec.DiscardUnknown(m) +} + +var xxx_messageInfo_BigQueryTableSpec proto.InternalMessageInfo + +func (m *BigQueryTableSpec) GetTableSourceType() TableSourceType { + if m != nil { + return m.TableSourceType + } + return TableSourceType_TABLE_SOURCE_TYPE_UNSPECIFIED +} + +func (m *BigQueryTableSpec) GetViewSpec() *ViewSpec { + if m != nil { + return m.ViewSpec + } + return nil +} + +// Table view specification. +type ViewSpec struct { + // The query that defines the table view. + ViewQuery string `protobuf:"bytes,1,opt,name=view_query,json=viewQuery,proto3" json:"view_query,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ViewSpec) Reset() { *m = ViewSpec{} } +func (m *ViewSpec) String() string { return proto.CompactTextString(m) } +func (*ViewSpec) ProtoMessage() {} +func (*ViewSpec) Descriptor() ([]byte, []int) { + return fileDescriptor_table_spec_345702233622a222, []int{1} +} +func (m *ViewSpec) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ViewSpec.Unmarshal(m, b) +} +func (m *ViewSpec) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ViewSpec.Marshal(b, m, deterministic) +} +func (dst *ViewSpec) XXX_Merge(src proto.Message) { + xxx_messageInfo_ViewSpec.Merge(dst, src) +} +func (m *ViewSpec) XXX_Size() int { + return xxx_messageInfo_ViewSpec.Size(m) +} +func (m *ViewSpec) XXX_DiscardUnknown() { + xxx_messageInfo_ViewSpec.DiscardUnknown(m) +} + +var xxx_messageInfo_ViewSpec proto.InternalMessageInfo + +func (m *ViewSpec) GetViewQuery() string { + if m != nil { + return m.ViewQuery + } + return "" +} + +func init() { + proto.RegisterType((*BigQueryTableSpec)(nil), "google.cloud.datacatalog.v1beta1.BigQueryTableSpec") + proto.RegisterType((*ViewSpec)(nil), "google.cloud.datacatalog.v1beta1.ViewSpec") + proto.RegisterEnum("google.cloud.datacatalog.v1beta1.TableSourceType", TableSourceType_name, TableSourceType_value) +} + +func init() { + proto.RegisterFile("google/cloud/datacatalog/v1beta1/table_spec.proto", fileDescriptor_table_spec_345702233622a222) +} + +var fileDescriptor_table_spec_345702233622a222 = []byte{ + // 319 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x84, 0x91, 0xcf, 0x4b, 0x02, 0x41, + 0x14, 0xc7, 0x1b, 0xa1, 0xd0, 0x89, 0xfc, 0x31, 0x27, 0x0f, 0x09, 0xe6, 0xc9, 0x3c, 0xcc, 0xb2, + 0x76, 0xec, 0xd4, 0xda, 0x24, 0x4b, 0x51, 0xba, 0xae, 0x86, 0x45, 0x2c, 0xe3, 0xf8, 0x18, 0x16, + 0x36, 0x67, 0x5a, 0x47, 0xc5, 0xbf, 0xad, 0x7f, 0xac, 0x63, 0x38, 0x6e, 0x61, 0x81, 0x78, 0x1b, + 0xbe, 0x8f, 0xcf, 0x67, 0xbe, 0x8f, 0x87, 0x5d, 0xa9, 0x94, 0x4c, 0xc0, 0x11, 0x89, 0x5a, 0x4c, + 0x9d, 0x29, 0x37, 0x5c, 0x70, 0xc3, 0x13, 0x25, 0x9d, 0xa5, 0x3b, 0x01, 0xc3, 0x5d, 0xc7, 0xf0, + 0x49, 0x02, 0xd1, 0x5c, 0x83, 0xa0, 0x3a, 0x55, 0x46, 0x91, 0xfa, 0x16, 0xa1, 0x16, 0xa1, 0x3b, + 0x08, 0xcd, 0x90, 0xc6, 0x27, 0xc2, 0x15, 0x2f, 0x96, 0xfd, 0x05, 0xa4, 0xeb, 0x70, 0x83, 0x0f, + 0x34, 0x08, 0xf2, 0x86, 0x2b, 0x99, 0x4b, 0x2d, 0x52, 0x01, 0x91, 0x59, 0x6b, 0xa8, 0xa2, 0x3a, + 0x6a, 0x16, 0xdb, 0x2e, 0x3d, 0xe4, 0xa4, 0x5b, 0x8f, 0x25, 0xc3, 0xb5, 0x86, 0xa0, 0x64, 0xfe, + 0x06, 0xa4, 0x8b, 0x0b, 0xcb, 0x18, 0x56, 0xb6, 0x69, 0x35, 0x57, 0x47, 0xcd, 0xd3, 0x76, 0xeb, + 0xb0, 0x76, 0x14, 0xc3, 0x6a, 0xd3, 0x2e, 0xc8, 0x2f, 0xb3, 0x57, 0xe3, 0x12, 0xe7, 0x7f, 0x52, + 0x52, 0xc3, 0xd8, 0x4a, 0x3f, 0x36, 0xab, 0xd8, 0xb2, 0x85, 0xc0, 0x7e, 0x63, 0x77, 0x6b, 0xbd, + 0xe2, 0xd2, 0xbf, 0x5e, 0xe4, 0x02, 0xd7, 0xc2, 0x1b, 0xef, 0x81, 0x45, 0x83, 0xa7, 0x61, 0xd0, + 0x61, 0x51, 0x38, 0xee, 0xb1, 0x68, 0xf8, 0x38, 0xe8, 0xb1, 0x8e, 0x7f, 0xe7, 0xb3, 0xdb, 0xf2, + 0x11, 0xa9, 0xe0, 0x33, 0xcf, 0xef, 0xf6, 0x87, 0x2c, 0x18, 0x47, 0x23, 0x9f, 0x3d, 0x97, 0x73, + 0x84, 0xe0, 0xe2, 0x6f, 0x64, 0xf1, 0xf2, 0xb1, 0xa7, 0xf1, 0xb9, 0x50, 0xef, 0x7b, 0x57, 0xe8, + 0xa1, 0x97, 0xfb, 0x6c, 0x26, 0x55, 0xc2, 0x67, 0x92, 0xaa, 0x54, 0x3a, 0x12, 0x66, 0xf6, 0x4a, + 0xce, 0x76, 0xc4, 0x75, 0x3c, 0xdf, 0x7f, 0xdb, 0xeb, 0x9d, 0xec, 0x0b, 0xa1, 0xc9, 0x89, 0x45, + 0xaf, 0xbe, 0x03, 0x00, 0x00, 0xff, 0xff, 0x41, 0xe6, 0x4f, 0xb9, 0x15, 0x02, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/cloud/datacatalog/v1beta1/timestamps.pb.go b/vendor/google.golang.org/genproto/googleapis/cloud/datacatalog/v1beta1/timestamps.pb.go new file mode 100644 index 0000000..241e5a2 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/cloud/datacatalog/v1beta1/timestamps.pb.go @@ -0,0 +1,106 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/cloud/datacatalog/v1beta1/timestamps.proto + +package datacatalog // import "google.golang.org/genproto/googleapis/cloud/datacatalog/v1beta1" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import timestamp "github.com/golang/protobuf/ptypes/timestamp" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Timestamps about this resource according to a particular system. +type SystemTimestamps struct { + // Output only. The creation time of the resource within the given system. + CreateTime *timestamp.Timestamp `protobuf:"bytes,1,opt,name=create_time,json=createTime,proto3" json:"create_time,omitempty"` + // Output only. The last-modified time of the resource within the given + // system. + UpdateTime *timestamp.Timestamp `protobuf:"bytes,2,opt,name=update_time,json=updateTime,proto3" json:"update_time,omitempty"` + // Output only. The expiration time of the resource within the given system. + ExpireTime *timestamp.Timestamp `protobuf:"bytes,3,opt,name=expire_time,json=expireTime,proto3" json:"expire_time,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *SystemTimestamps) Reset() { *m = SystemTimestamps{} } +func (m *SystemTimestamps) String() string { return proto.CompactTextString(m) } +func (*SystemTimestamps) ProtoMessage() {} +func (*SystemTimestamps) Descriptor() ([]byte, []int) { + return fileDescriptor_timestamps_d8e8b54cbb1fb3a6, []int{0} +} +func (m *SystemTimestamps) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_SystemTimestamps.Unmarshal(m, b) +} +func (m *SystemTimestamps) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_SystemTimestamps.Marshal(b, m, deterministic) +} +func (dst *SystemTimestamps) XXX_Merge(src proto.Message) { + xxx_messageInfo_SystemTimestamps.Merge(dst, src) +} +func (m *SystemTimestamps) XXX_Size() int { + return xxx_messageInfo_SystemTimestamps.Size(m) +} +func (m *SystemTimestamps) XXX_DiscardUnknown() { + xxx_messageInfo_SystemTimestamps.DiscardUnknown(m) +} + +var xxx_messageInfo_SystemTimestamps proto.InternalMessageInfo + +func (m *SystemTimestamps) GetCreateTime() *timestamp.Timestamp { + if m != nil { + return m.CreateTime + } + return nil +} + +func (m *SystemTimestamps) GetUpdateTime() *timestamp.Timestamp { + if m != nil { + return m.UpdateTime + } + return nil +} + +func (m *SystemTimestamps) GetExpireTime() *timestamp.Timestamp { + if m != nil { + return m.ExpireTime + } + return nil +} + +func init() { + proto.RegisterType((*SystemTimestamps)(nil), "google.cloud.datacatalog.v1beta1.SystemTimestamps") +} + +func init() { + proto.RegisterFile("google/cloud/datacatalog/v1beta1/timestamps.proto", fileDescriptor_timestamps_d8e8b54cbb1fb3a6) +} + +var fileDescriptor_timestamps_d8e8b54cbb1fb3a6 = []byte{ + // 229 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x84, 0xd1, 0x3f, 0x4b, 0x03, 0x41, + 0x10, 0x05, 0x70, 0x56, 0xc1, 0x62, 0xd3, 0x48, 0x2a, 0x09, 0x82, 0xc1, 0xca, 0x6a, 0x96, 0xd3, + 0x32, 0x9d, 0xad, 0x8d, 0xa8, 0x95, 0x8d, 0xcc, 0xdd, 0x8d, 0xcb, 0xc1, 0x6d, 0x66, 0xd9, 0x9d, + 0x13, 0xfd, 0x88, 0x7e, 0x23, 0x4b, 0xd9, 0x3f, 0xb9, 0xa4, 0x09, 0x69, 0xdf, 0xbc, 0xdf, 0x3b, + 0xb8, 0xd5, 0x8d, 0x65, 0xb6, 0x23, 0x99, 0x6e, 0xe4, 0xa9, 0x37, 0x3d, 0x0a, 0x76, 0x28, 0x38, + 0xb2, 0x35, 0x5f, 0x4d, 0x4b, 0x82, 0x8d, 0x91, 0xc1, 0x51, 0x14, 0x74, 0x3e, 0x82, 0x0f, 0x2c, + 0xbc, 0x5c, 0x17, 0x02, 0x99, 0xc0, 0x01, 0x81, 0x4a, 0x56, 0x37, 0x75, 0x34, 0xf7, 0xdb, 0xe9, + 0x73, 0xbf, 0x51, 0x26, 0x6e, 0x7f, 0x95, 0xbe, 0x7c, 0xfd, 0x89, 0x42, 0xee, 0x6d, 0x5e, 0x5f, + 0x6e, 0xf4, 0xa2, 0x0b, 0x84, 0x42, 0x1f, 0xa9, 0x7e, 0xa5, 0xd6, 0xea, 0x6e, 0x71, 0xbf, 0x82, + 0xfa, 0xb5, 0xdd, 0x16, 0xcc, 0xe2, 0x45, 0x97, 0x7a, 0x0a, 0x12, 0x9e, 0x7c, 0x3f, 0xe3, 0xb3, + 0xd3, 0xb8, 0xd4, 0x77, 0x98, 0xbe, 0xfd, 0x10, 0x2a, 0x3e, 0x3f, 0x8d, 0x4b, 0x3d, 0x05, 0x8f, + 0x5e, 0x5f, 0x77, 0xec, 0xe0, 0xd8, 0x4f, 0x79, 0x56, 0xef, 0x4f, 0xf5, 0x66, 0x79, 0xc4, 0xad, + 0x05, 0x0e, 0xd6, 0x58, 0xda, 0xe6, 0x59, 0x53, 0x4e, 0xe8, 0x87, 0x78, 0xfc, 0x09, 0x36, 0x07, + 0xd9, 0x9f, 0x52, 0xed, 0x45, 0xa6, 0x0f, 0xff, 0x01, 0x00, 0x00, 0xff, 0xff, 0xef, 0x7f, 0xec, + 0xb8, 0xbc, 0x01, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/cloud/datalabeling/v1beta1/annotation.pb.go b/vendor/google.golang.org/genproto/googleapis/cloud/datalabeling/v1beta1/annotation.pb.go new file mode 100644 index 0000000..1c45041 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/cloud/datalabeling/v1beta1/annotation.pb.go @@ -0,0 +1,1992 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/cloud/datalabeling/v1beta1/annotation.proto + +package datalabeling // import "google.golang.org/genproto/googleapis/cloud/datalabeling/v1beta1" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import duration "github.com/golang/protobuf/ptypes/duration" +import _ "github.com/golang/protobuf/ptypes/struct" +import _ "github.com/golang/protobuf/ptypes/timestamp" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Specifies where is the answer from. +type AnnotationSource int32 + +const ( + AnnotationSource_ANNOTATION_SOURCE_UNSPECIFIED AnnotationSource = 0 + // Answer is provided by a human contributor. + AnnotationSource_OPERATOR AnnotationSource = 3 +) + +var AnnotationSource_name = map[int32]string{ + 0: "ANNOTATION_SOURCE_UNSPECIFIED", + 3: "OPERATOR", +} +var AnnotationSource_value = map[string]int32{ + "ANNOTATION_SOURCE_UNSPECIFIED": 0, + "OPERATOR": 3, +} + +func (x AnnotationSource) String() string { + return proto.EnumName(AnnotationSource_name, int32(x)) +} +func (AnnotationSource) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_annotation_7e34849fb820a58f, []int{0} +} + +type AnnotationSentiment int32 + +const ( + AnnotationSentiment_ANNOTATION_SENTIMENT_UNSPECIFIED AnnotationSentiment = 0 + // This annotation describes negatively about the data. + AnnotationSentiment_NEGATIVE AnnotationSentiment = 1 + // This label describes positively about the data. + AnnotationSentiment_POSITIVE AnnotationSentiment = 2 +) + +var AnnotationSentiment_name = map[int32]string{ + 0: "ANNOTATION_SENTIMENT_UNSPECIFIED", + 1: "NEGATIVE", + 2: "POSITIVE", +} +var AnnotationSentiment_value = map[string]int32{ + "ANNOTATION_SENTIMENT_UNSPECIFIED": 0, + "NEGATIVE": 1, + "POSITIVE": 2, +} + +func (x AnnotationSentiment) String() string { + return proto.EnumName(AnnotationSentiment_name, int32(x)) +} +func (AnnotationSentiment) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_annotation_7e34849fb820a58f, []int{1} +} + +type AnnotationType int32 + +const ( + AnnotationType_ANNOTATION_TYPE_UNSPECIFIED AnnotationType = 0 + // Classification annotations in an image. + AnnotationType_IMAGE_CLASSIFICATION_ANNOTATION AnnotationType = 1 + // Bounding box annotations in an image. + AnnotationType_IMAGE_BOUNDING_BOX_ANNOTATION AnnotationType = 2 + // Oriented bounding box. The box does not have to be parallel to horizontal + // line. + AnnotationType_IMAGE_ORIENTED_BOUNDING_BOX_ANNOTATION AnnotationType = 13 + // Bounding poly annotations in an image. + AnnotationType_IMAGE_BOUNDING_POLY_ANNOTATION AnnotationType = 10 + // Polyline annotations in an image. + AnnotationType_IMAGE_POLYLINE_ANNOTATION AnnotationType = 11 + // Segmentation annotations in an image. + AnnotationType_IMAGE_SEGMENTATION_ANNOTATION AnnotationType = 12 + // Classification annotations in video shots. + AnnotationType_VIDEO_SHOTS_CLASSIFICATION_ANNOTATION AnnotationType = 3 + // Video object tracking annotation. + AnnotationType_VIDEO_OBJECT_TRACKING_ANNOTATION AnnotationType = 4 + // Video object detection annotation. + AnnotationType_VIDEO_OBJECT_DETECTION_ANNOTATION AnnotationType = 5 + // Video event annotation. + AnnotationType_VIDEO_EVENT_ANNOTATION AnnotationType = 6 + // Speech to text annotation. + AnnotationType_AUDIO_TRANSCRIPTION_ANNOTATION AnnotationType = 7 + // Classification for text. + AnnotationType_TEXT_CLASSIFICATION_ANNOTATION AnnotationType = 8 + // Entity extraction for text. + AnnotationType_TEXT_ENTITY_EXTRACTION_ANNOTATION AnnotationType = 9 +) + +var AnnotationType_name = map[int32]string{ + 0: "ANNOTATION_TYPE_UNSPECIFIED", + 1: "IMAGE_CLASSIFICATION_ANNOTATION", + 2: "IMAGE_BOUNDING_BOX_ANNOTATION", + 13: "IMAGE_ORIENTED_BOUNDING_BOX_ANNOTATION", + 10: "IMAGE_BOUNDING_POLY_ANNOTATION", + 11: "IMAGE_POLYLINE_ANNOTATION", + 12: "IMAGE_SEGMENTATION_ANNOTATION", + 3: "VIDEO_SHOTS_CLASSIFICATION_ANNOTATION", + 4: "VIDEO_OBJECT_TRACKING_ANNOTATION", + 5: "VIDEO_OBJECT_DETECTION_ANNOTATION", + 6: "VIDEO_EVENT_ANNOTATION", + 7: "AUDIO_TRANSCRIPTION_ANNOTATION", + 8: "TEXT_CLASSIFICATION_ANNOTATION", + 9: "TEXT_ENTITY_EXTRACTION_ANNOTATION", +} +var AnnotationType_value = map[string]int32{ + "ANNOTATION_TYPE_UNSPECIFIED": 0, + "IMAGE_CLASSIFICATION_ANNOTATION": 1, + "IMAGE_BOUNDING_BOX_ANNOTATION": 2, + "IMAGE_ORIENTED_BOUNDING_BOX_ANNOTATION": 13, + "IMAGE_BOUNDING_POLY_ANNOTATION": 10, + "IMAGE_POLYLINE_ANNOTATION": 11, + "IMAGE_SEGMENTATION_ANNOTATION": 12, + "VIDEO_SHOTS_CLASSIFICATION_ANNOTATION": 3, + "VIDEO_OBJECT_TRACKING_ANNOTATION": 4, + "VIDEO_OBJECT_DETECTION_ANNOTATION": 5, + "VIDEO_EVENT_ANNOTATION": 6, + "AUDIO_TRANSCRIPTION_ANNOTATION": 7, + "TEXT_CLASSIFICATION_ANNOTATION": 8, + "TEXT_ENTITY_EXTRACTION_ANNOTATION": 9, +} + +func (x AnnotationType) String() string { + return proto.EnumName(AnnotationType_name, int32(x)) +} +func (AnnotationType) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_annotation_7e34849fb820a58f, []int{2} +} + +// Annotation for Example. Each example may have one or more annotations. For +// example in image classification problem, each image might have one or more +// labels. We call labels binded with this image an Annotation. +type Annotation struct { + // Output only. Unique name of this annotation, format is: + // + // projects/{project_id}/datasets/{dataset_id}/annotatedDatasets/{annotated_dataset}/examples/{example_id}/annotations/{annotation_id} + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // Output only. The source of the annotation. + AnnotationSource AnnotationSource `protobuf:"varint,2,opt,name=annotation_source,json=annotationSource,proto3,enum=google.cloud.datalabeling.v1beta1.AnnotationSource" json:"annotation_source,omitempty"` + // Output only. This is the actual annotation value, e.g classification, + // bounding box values are stored here. + AnnotationValue *AnnotationValue `protobuf:"bytes,3,opt,name=annotation_value,json=annotationValue,proto3" json:"annotation_value,omitempty"` + // Output only. Annotation metadata, including information like votes + // for labels. + AnnotationMetadata *AnnotationMetadata `protobuf:"bytes,4,opt,name=annotation_metadata,json=annotationMetadata,proto3" json:"annotation_metadata,omitempty"` + // Output only. Sentiment for this annotation. + AnnotationSentiment AnnotationSentiment `protobuf:"varint,6,opt,name=annotation_sentiment,json=annotationSentiment,proto3,enum=google.cloud.datalabeling.v1beta1.AnnotationSentiment" json:"annotation_sentiment,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Annotation) Reset() { *m = Annotation{} } +func (m *Annotation) String() string { return proto.CompactTextString(m) } +func (*Annotation) ProtoMessage() {} +func (*Annotation) Descriptor() ([]byte, []int) { + return fileDescriptor_annotation_7e34849fb820a58f, []int{0} +} +func (m *Annotation) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Annotation.Unmarshal(m, b) +} +func (m *Annotation) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Annotation.Marshal(b, m, deterministic) +} +func (dst *Annotation) XXX_Merge(src proto.Message) { + xxx_messageInfo_Annotation.Merge(dst, src) +} +func (m *Annotation) XXX_Size() int { + return xxx_messageInfo_Annotation.Size(m) +} +func (m *Annotation) XXX_DiscardUnknown() { + xxx_messageInfo_Annotation.DiscardUnknown(m) +} + +var xxx_messageInfo_Annotation proto.InternalMessageInfo + +func (m *Annotation) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *Annotation) GetAnnotationSource() AnnotationSource { + if m != nil { + return m.AnnotationSource + } + return AnnotationSource_ANNOTATION_SOURCE_UNSPECIFIED +} + +func (m *Annotation) GetAnnotationValue() *AnnotationValue { + if m != nil { + return m.AnnotationValue + } + return nil +} + +func (m *Annotation) GetAnnotationMetadata() *AnnotationMetadata { + if m != nil { + return m.AnnotationMetadata + } + return nil +} + +func (m *Annotation) GetAnnotationSentiment() AnnotationSentiment { + if m != nil { + return m.AnnotationSentiment + } + return AnnotationSentiment_ANNOTATION_SENTIMENT_UNSPECIFIED +} + +// Annotation value for an example. +type AnnotationValue struct { + // Types that are valid to be assigned to ValueType: + // *AnnotationValue_ImageClassificationAnnotation + // *AnnotationValue_ImageBoundingPolyAnnotation + // *AnnotationValue_ImagePolylineAnnotation + // *AnnotationValue_ImageSegmentationAnnotation + // *AnnotationValue_TextClassificationAnnotation + // *AnnotationValue_VideoClassificationAnnotation + // *AnnotationValue_VideoObjectTrackingAnnotation + // *AnnotationValue_VideoEventAnnotation + // *AnnotationValue_AudioRecognitionAnnotation + ValueType isAnnotationValue_ValueType `protobuf_oneof:"value_type"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *AnnotationValue) Reset() { *m = AnnotationValue{} } +func (m *AnnotationValue) String() string { return proto.CompactTextString(m) } +func (*AnnotationValue) ProtoMessage() {} +func (*AnnotationValue) Descriptor() ([]byte, []int) { + return fileDescriptor_annotation_7e34849fb820a58f, []int{1} +} +func (m *AnnotationValue) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_AnnotationValue.Unmarshal(m, b) +} +func (m *AnnotationValue) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_AnnotationValue.Marshal(b, m, deterministic) +} +func (dst *AnnotationValue) XXX_Merge(src proto.Message) { + xxx_messageInfo_AnnotationValue.Merge(dst, src) +} +func (m *AnnotationValue) XXX_Size() int { + return xxx_messageInfo_AnnotationValue.Size(m) +} +func (m *AnnotationValue) XXX_DiscardUnknown() { + xxx_messageInfo_AnnotationValue.DiscardUnknown(m) +} + +var xxx_messageInfo_AnnotationValue proto.InternalMessageInfo + +type isAnnotationValue_ValueType interface { + isAnnotationValue_ValueType() +} + +type AnnotationValue_ImageClassificationAnnotation struct { + ImageClassificationAnnotation *ImageClassificationAnnotation `protobuf:"bytes,1,opt,name=image_classification_annotation,json=imageClassificationAnnotation,proto3,oneof"` +} + +type AnnotationValue_ImageBoundingPolyAnnotation struct { + ImageBoundingPolyAnnotation *ImageBoundingPolyAnnotation `protobuf:"bytes,2,opt,name=image_bounding_poly_annotation,json=imageBoundingPolyAnnotation,proto3,oneof"` +} + +type AnnotationValue_ImagePolylineAnnotation struct { + ImagePolylineAnnotation *ImagePolylineAnnotation `protobuf:"bytes,8,opt,name=image_polyline_annotation,json=imagePolylineAnnotation,proto3,oneof"` +} + +type AnnotationValue_ImageSegmentationAnnotation struct { + ImageSegmentationAnnotation *ImageSegmentationAnnotation `protobuf:"bytes,9,opt,name=image_segmentation_annotation,json=imageSegmentationAnnotation,proto3,oneof"` +} + +type AnnotationValue_TextClassificationAnnotation struct { + TextClassificationAnnotation *TextClassificationAnnotation `protobuf:"bytes,3,opt,name=text_classification_annotation,json=textClassificationAnnotation,proto3,oneof"` +} + +type AnnotationValue_VideoClassificationAnnotation struct { + VideoClassificationAnnotation *VideoClassificationAnnotation `protobuf:"bytes,4,opt,name=video_classification_annotation,json=videoClassificationAnnotation,proto3,oneof"` +} + +type AnnotationValue_VideoObjectTrackingAnnotation struct { + VideoObjectTrackingAnnotation *VideoObjectTrackingAnnotation `protobuf:"bytes,5,opt,name=video_object_tracking_annotation,json=videoObjectTrackingAnnotation,proto3,oneof"` +} + +type AnnotationValue_VideoEventAnnotation struct { + VideoEventAnnotation *VideoEventAnnotation `protobuf:"bytes,6,opt,name=video_event_annotation,json=videoEventAnnotation,proto3,oneof"` +} + +type AnnotationValue_AudioRecognitionAnnotation struct { + AudioRecognitionAnnotation *AudioRecognitionAnnotation `protobuf:"bytes,7,opt,name=audio_recognition_annotation,json=audioRecognitionAnnotation,proto3,oneof"` +} + +func (*AnnotationValue_ImageClassificationAnnotation) isAnnotationValue_ValueType() {} + +func (*AnnotationValue_ImageBoundingPolyAnnotation) isAnnotationValue_ValueType() {} + +func (*AnnotationValue_ImagePolylineAnnotation) isAnnotationValue_ValueType() {} + +func (*AnnotationValue_ImageSegmentationAnnotation) isAnnotationValue_ValueType() {} + +func (*AnnotationValue_TextClassificationAnnotation) isAnnotationValue_ValueType() {} + +func (*AnnotationValue_VideoClassificationAnnotation) isAnnotationValue_ValueType() {} + +func (*AnnotationValue_VideoObjectTrackingAnnotation) isAnnotationValue_ValueType() {} + +func (*AnnotationValue_VideoEventAnnotation) isAnnotationValue_ValueType() {} + +func (*AnnotationValue_AudioRecognitionAnnotation) isAnnotationValue_ValueType() {} + +func (m *AnnotationValue) GetValueType() isAnnotationValue_ValueType { + if m != nil { + return m.ValueType + } + return nil +} + +func (m *AnnotationValue) GetImageClassificationAnnotation() *ImageClassificationAnnotation { + if x, ok := m.GetValueType().(*AnnotationValue_ImageClassificationAnnotation); ok { + return x.ImageClassificationAnnotation + } + return nil +} + +func (m *AnnotationValue) GetImageBoundingPolyAnnotation() *ImageBoundingPolyAnnotation { + if x, ok := m.GetValueType().(*AnnotationValue_ImageBoundingPolyAnnotation); ok { + return x.ImageBoundingPolyAnnotation + } + return nil +} + +func (m *AnnotationValue) GetImagePolylineAnnotation() *ImagePolylineAnnotation { + if x, ok := m.GetValueType().(*AnnotationValue_ImagePolylineAnnotation); ok { + return x.ImagePolylineAnnotation + } + return nil +} + +func (m *AnnotationValue) GetImageSegmentationAnnotation() *ImageSegmentationAnnotation { + if x, ok := m.GetValueType().(*AnnotationValue_ImageSegmentationAnnotation); ok { + return x.ImageSegmentationAnnotation + } + return nil +} + +func (m *AnnotationValue) GetTextClassificationAnnotation() *TextClassificationAnnotation { + if x, ok := m.GetValueType().(*AnnotationValue_TextClassificationAnnotation); ok { + return x.TextClassificationAnnotation + } + return nil +} + +func (m *AnnotationValue) GetVideoClassificationAnnotation() *VideoClassificationAnnotation { + if x, ok := m.GetValueType().(*AnnotationValue_VideoClassificationAnnotation); ok { + return x.VideoClassificationAnnotation + } + return nil +} + +func (m *AnnotationValue) GetVideoObjectTrackingAnnotation() *VideoObjectTrackingAnnotation { + if x, ok := m.GetValueType().(*AnnotationValue_VideoObjectTrackingAnnotation); ok { + return x.VideoObjectTrackingAnnotation + } + return nil +} + +func (m *AnnotationValue) GetVideoEventAnnotation() *VideoEventAnnotation { + if x, ok := m.GetValueType().(*AnnotationValue_VideoEventAnnotation); ok { + return x.VideoEventAnnotation + } + return nil +} + +func (m *AnnotationValue) GetAudioRecognitionAnnotation() *AudioRecognitionAnnotation { + if x, ok := m.GetValueType().(*AnnotationValue_AudioRecognitionAnnotation); ok { + return x.AudioRecognitionAnnotation + } + return nil +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*AnnotationValue) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _AnnotationValue_OneofMarshaler, _AnnotationValue_OneofUnmarshaler, _AnnotationValue_OneofSizer, []interface{}{ + (*AnnotationValue_ImageClassificationAnnotation)(nil), + (*AnnotationValue_ImageBoundingPolyAnnotation)(nil), + (*AnnotationValue_ImagePolylineAnnotation)(nil), + (*AnnotationValue_ImageSegmentationAnnotation)(nil), + (*AnnotationValue_TextClassificationAnnotation)(nil), + (*AnnotationValue_VideoClassificationAnnotation)(nil), + (*AnnotationValue_VideoObjectTrackingAnnotation)(nil), + (*AnnotationValue_VideoEventAnnotation)(nil), + (*AnnotationValue_AudioRecognitionAnnotation)(nil), + } +} + +func _AnnotationValue_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*AnnotationValue) + // value_type + switch x := m.ValueType.(type) { + case *AnnotationValue_ImageClassificationAnnotation: + b.EncodeVarint(1<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.ImageClassificationAnnotation); err != nil { + return err + } + case *AnnotationValue_ImageBoundingPolyAnnotation: + b.EncodeVarint(2<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.ImageBoundingPolyAnnotation); err != nil { + return err + } + case *AnnotationValue_ImagePolylineAnnotation: + b.EncodeVarint(8<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.ImagePolylineAnnotation); err != nil { + return err + } + case *AnnotationValue_ImageSegmentationAnnotation: + b.EncodeVarint(9<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.ImageSegmentationAnnotation); err != nil { + return err + } + case *AnnotationValue_TextClassificationAnnotation: + b.EncodeVarint(3<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.TextClassificationAnnotation); err != nil { + return err + } + case *AnnotationValue_VideoClassificationAnnotation: + b.EncodeVarint(4<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.VideoClassificationAnnotation); err != nil { + return err + } + case *AnnotationValue_VideoObjectTrackingAnnotation: + b.EncodeVarint(5<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.VideoObjectTrackingAnnotation); err != nil { + return err + } + case *AnnotationValue_VideoEventAnnotation: + b.EncodeVarint(6<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.VideoEventAnnotation); err != nil { + return err + } + case *AnnotationValue_AudioRecognitionAnnotation: + b.EncodeVarint(7<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.AudioRecognitionAnnotation); err != nil { + return err + } + case nil: + default: + return fmt.Errorf("AnnotationValue.ValueType has unexpected type %T", x) + } + return nil +} + +func _AnnotationValue_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*AnnotationValue) + switch tag { + case 1: // value_type.image_classification_annotation + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(ImageClassificationAnnotation) + err := b.DecodeMessage(msg) + m.ValueType = &AnnotationValue_ImageClassificationAnnotation{msg} + return true, err + case 2: // value_type.image_bounding_poly_annotation + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(ImageBoundingPolyAnnotation) + err := b.DecodeMessage(msg) + m.ValueType = &AnnotationValue_ImageBoundingPolyAnnotation{msg} + return true, err + case 8: // value_type.image_polyline_annotation + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(ImagePolylineAnnotation) + err := b.DecodeMessage(msg) + m.ValueType = &AnnotationValue_ImagePolylineAnnotation{msg} + return true, err + case 9: // value_type.image_segmentation_annotation + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(ImageSegmentationAnnotation) + err := b.DecodeMessage(msg) + m.ValueType = &AnnotationValue_ImageSegmentationAnnotation{msg} + return true, err + case 3: // value_type.text_classification_annotation + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(TextClassificationAnnotation) + err := b.DecodeMessage(msg) + m.ValueType = &AnnotationValue_TextClassificationAnnotation{msg} + return true, err + case 4: // value_type.video_classification_annotation + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(VideoClassificationAnnotation) + err := b.DecodeMessage(msg) + m.ValueType = &AnnotationValue_VideoClassificationAnnotation{msg} + return true, err + case 5: // value_type.video_object_tracking_annotation + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(VideoObjectTrackingAnnotation) + err := b.DecodeMessage(msg) + m.ValueType = &AnnotationValue_VideoObjectTrackingAnnotation{msg} + return true, err + case 6: // value_type.video_event_annotation + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(VideoEventAnnotation) + err := b.DecodeMessage(msg) + m.ValueType = &AnnotationValue_VideoEventAnnotation{msg} + return true, err + case 7: // value_type.audio_recognition_annotation + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(AudioRecognitionAnnotation) + err := b.DecodeMessage(msg) + m.ValueType = &AnnotationValue_AudioRecognitionAnnotation{msg} + return true, err + default: + return false, nil + } +} + +func _AnnotationValue_OneofSizer(msg proto.Message) (n int) { + m := msg.(*AnnotationValue) + // value_type + switch x := m.ValueType.(type) { + case *AnnotationValue_ImageClassificationAnnotation: + s := proto.Size(x.ImageClassificationAnnotation) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *AnnotationValue_ImageBoundingPolyAnnotation: + s := proto.Size(x.ImageBoundingPolyAnnotation) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *AnnotationValue_ImagePolylineAnnotation: + s := proto.Size(x.ImagePolylineAnnotation) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *AnnotationValue_ImageSegmentationAnnotation: + s := proto.Size(x.ImageSegmentationAnnotation) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *AnnotationValue_TextClassificationAnnotation: + s := proto.Size(x.TextClassificationAnnotation) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *AnnotationValue_VideoClassificationAnnotation: + s := proto.Size(x.VideoClassificationAnnotation) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *AnnotationValue_VideoObjectTrackingAnnotation: + s := proto.Size(x.VideoObjectTrackingAnnotation) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *AnnotationValue_VideoEventAnnotation: + s := proto.Size(x.VideoEventAnnotation) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *AnnotationValue_AudioRecognitionAnnotation: + s := proto.Size(x.AudioRecognitionAnnotation) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +// Image classification annotation definition. +type ImageClassificationAnnotation struct { + // Label of image. + AnnotationSpec *AnnotationSpec `protobuf:"bytes,1,opt,name=annotation_spec,json=annotationSpec,proto3" json:"annotation_spec,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ImageClassificationAnnotation) Reset() { *m = ImageClassificationAnnotation{} } +func (m *ImageClassificationAnnotation) String() string { return proto.CompactTextString(m) } +func (*ImageClassificationAnnotation) ProtoMessage() {} +func (*ImageClassificationAnnotation) Descriptor() ([]byte, []int) { + return fileDescriptor_annotation_7e34849fb820a58f, []int{2} +} +func (m *ImageClassificationAnnotation) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ImageClassificationAnnotation.Unmarshal(m, b) +} +func (m *ImageClassificationAnnotation) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ImageClassificationAnnotation.Marshal(b, m, deterministic) +} +func (dst *ImageClassificationAnnotation) XXX_Merge(src proto.Message) { + xxx_messageInfo_ImageClassificationAnnotation.Merge(dst, src) +} +func (m *ImageClassificationAnnotation) XXX_Size() int { + return xxx_messageInfo_ImageClassificationAnnotation.Size(m) +} +func (m *ImageClassificationAnnotation) XXX_DiscardUnknown() { + xxx_messageInfo_ImageClassificationAnnotation.DiscardUnknown(m) +} + +var xxx_messageInfo_ImageClassificationAnnotation proto.InternalMessageInfo + +func (m *ImageClassificationAnnotation) GetAnnotationSpec() *AnnotationSpec { + if m != nil { + return m.AnnotationSpec + } + return nil +} + +// A vertex represents a 2D point in the image. +// NOTE: the vertex coordinates are in the same scale as the original image. +type Vertex struct { + // X coordinate. + X int32 `protobuf:"varint,1,opt,name=x,proto3" json:"x,omitempty"` + // Y coordinate. + Y int32 `protobuf:"varint,2,opt,name=y,proto3" json:"y,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Vertex) Reset() { *m = Vertex{} } +func (m *Vertex) String() string { return proto.CompactTextString(m) } +func (*Vertex) ProtoMessage() {} +func (*Vertex) Descriptor() ([]byte, []int) { + return fileDescriptor_annotation_7e34849fb820a58f, []int{3} +} +func (m *Vertex) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Vertex.Unmarshal(m, b) +} +func (m *Vertex) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Vertex.Marshal(b, m, deterministic) +} +func (dst *Vertex) XXX_Merge(src proto.Message) { + xxx_messageInfo_Vertex.Merge(dst, src) +} +func (m *Vertex) XXX_Size() int { + return xxx_messageInfo_Vertex.Size(m) +} +func (m *Vertex) XXX_DiscardUnknown() { + xxx_messageInfo_Vertex.DiscardUnknown(m) +} + +var xxx_messageInfo_Vertex proto.InternalMessageInfo + +func (m *Vertex) GetX() int32 { + if m != nil { + return m.X + } + return 0 +} + +func (m *Vertex) GetY() int32 { + if m != nil { + return m.Y + } + return 0 +} + +// A vertex represents a 2D point in the image. +// NOTE: the normalized vertex coordinates are relative to the original image +// and range from 0 to 1. +type NormalizedVertex struct { + // X coordinate. + X float32 `protobuf:"fixed32,1,opt,name=x,proto3" json:"x,omitempty"` + // Y coordinate. + Y float32 `protobuf:"fixed32,2,opt,name=y,proto3" json:"y,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *NormalizedVertex) Reset() { *m = NormalizedVertex{} } +func (m *NormalizedVertex) String() string { return proto.CompactTextString(m) } +func (*NormalizedVertex) ProtoMessage() {} +func (*NormalizedVertex) Descriptor() ([]byte, []int) { + return fileDescriptor_annotation_7e34849fb820a58f, []int{4} +} +func (m *NormalizedVertex) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_NormalizedVertex.Unmarshal(m, b) +} +func (m *NormalizedVertex) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_NormalizedVertex.Marshal(b, m, deterministic) +} +func (dst *NormalizedVertex) XXX_Merge(src proto.Message) { + xxx_messageInfo_NormalizedVertex.Merge(dst, src) +} +func (m *NormalizedVertex) XXX_Size() int { + return xxx_messageInfo_NormalizedVertex.Size(m) +} +func (m *NormalizedVertex) XXX_DiscardUnknown() { + xxx_messageInfo_NormalizedVertex.DiscardUnknown(m) +} + +var xxx_messageInfo_NormalizedVertex proto.InternalMessageInfo + +func (m *NormalizedVertex) GetX() float32 { + if m != nil { + return m.X + } + return 0 +} + +func (m *NormalizedVertex) GetY() float32 { + if m != nil { + return m.Y + } + return 0 +} + +// A bounding polygon in the image. +type BoundingPoly struct { + // The bounding polygon vertices. + Vertices []*Vertex `protobuf:"bytes,1,rep,name=vertices,proto3" json:"vertices,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *BoundingPoly) Reset() { *m = BoundingPoly{} } +func (m *BoundingPoly) String() string { return proto.CompactTextString(m) } +func (*BoundingPoly) ProtoMessage() {} +func (*BoundingPoly) Descriptor() ([]byte, []int) { + return fileDescriptor_annotation_7e34849fb820a58f, []int{5} +} +func (m *BoundingPoly) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_BoundingPoly.Unmarshal(m, b) +} +func (m *BoundingPoly) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_BoundingPoly.Marshal(b, m, deterministic) +} +func (dst *BoundingPoly) XXX_Merge(src proto.Message) { + xxx_messageInfo_BoundingPoly.Merge(dst, src) +} +func (m *BoundingPoly) XXX_Size() int { + return xxx_messageInfo_BoundingPoly.Size(m) +} +func (m *BoundingPoly) XXX_DiscardUnknown() { + xxx_messageInfo_BoundingPoly.DiscardUnknown(m) +} + +var xxx_messageInfo_BoundingPoly proto.InternalMessageInfo + +func (m *BoundingPoly) GetVertices() []*Vertex { + if m != nil { + return m.Vertices + } + return nil +} + +// Normalized bounding polygon. +type NormalizedBoundingPoly struct { + // The bounding polygon normalized vertices. + NormalizedVertices []*NormalizedVertex `protobuf:"bytes,1,rep,name=normalized_vertices,json=normalizedVertices,proto3" json:"normalized_vertices,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *NormalizedBoundingPoly) Reset() { *m = NormalizedBoundingPoly{} } +func (m *NormalizedBoundingPoly) String() string { return proto.CompactTextString(m) } +func (*NormalizedBoundingPoly) ProtoMessage() {} +func (*NormalizedBoundingPoly) Descriptor() ([]byte, []int) { + return fileDescriptor_annotation_7e34849fb820a58f, []int{6} +} +func (m *NormalizedBoundingPoly) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_NormalizedBoundingPoly.Unmarshal(m, b) +} +func (m *NormalizedBoundingPoly) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_NormalizedBoundingPoly.Marshal(b, m, deterministic) +} +func (dst *NormalizedBoundingPoly) XXX_Merge(src proto.Message) { + xxx_messageInfo_NormalizedBoundingPoly.Merge(dst, src) +} +func (m *NormalizedBoundingPoly) XXX_Size() int { + return xxx_messageInfo_NormalizedBoundingPoly.Size(m) +} +func (m *NormalizedBoundingPoly) XXX_DiscardUnknown() { + xxx_messageInfo_NormalizedBoundingPoly.DiscardUnknown(m) +} + +var xxx_messageInfo_NormalizedBoundingPoly proto.InternalMessageInfo + +func (m *NormalizedBoundingPoly) GetNormalizedVertices() []*NormalizedVertex { + if m != nil { + return m.NormalizedVertices + } + return nil +} + +// Image bounding poly annotation. It represents a polygon including +// bounding box in the image. +type ImageBoundingPolyAnnotation struct { + // The region of the polygon. If it is a bounding box, it is guaranteed to be + // four points. + // + // Types that are valid to be assigned to BoundedArea: + // *ImageBoundingPolyAnnotation_BoundingPoly + // *ImageBoundingPolyAnnotation_NormalizedBoundingPoly + BoundedArea isImageBoundingPolyAnnotation_BoundedArea `protobuf_oneof:"bounded_area"` + // Label of object in this bounding polygon. + AnnotationSpec *AnnotationSpec `protobuf:"bytes,1,opt,name=annotation_spec,json=annotationSpec,proto3" json:"annotation_spec,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ImageBoundingPolyAnnotation) Reset() { *m = ImageBoundingPolyAnnotation{} } +func (m *ImageBoundingPolyAnnotation) String() string { return proto.CompactTextString(m) } +func (*ImageBoundingPolyAnnotation) ProtoMessage() {} +func (*ImageBoundingPolyAnnotation) Descriptor() ([]byte, []int) { + return fileDescriptor_annotation_7e34849fb820a58f, []int{7} +} +func (m *ImageBoundingPolyAnnotation) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ImageBoundingPolyAnnotation.Unmarshal(m, b) +} +func (m *ImageBoundingPolyAnnotation) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ImageBoundingPolyAnnotation.Marshal(b, m, deterministic) +} +func (dst *ImageBoundingPolyAnnotation) XXX_Merge(src proto.Message) { + xxx_messageInfo_ImageBoundingPolyAnnotation.Merge(dst, src) +} +func (m *ImageBoundingPolyAnnotation) XXX_Size() int { + return xxx_messageInfo_ImageBoundingPolyAnnotation.Size(m) +} +func (m *ImageBoundingPolyAnnotation) XXX_DiscardUnknown() { + xxx_messageInfo_ImageBoundingPolyAnnotation.DiscardUnknown(m) +} + +var xxx_messageInfo_ImageBoundingPolyAnnotation proto.InternalMessageInfo + +type isImageBoundingPolyAnnotation_BoundedArea interface { + isImageBoundingPolyAnnotation_BoundedArea() +} + +type ImageBoundingPolyAnnotation_BoundingPoly struct { + BoundingPoly *BoundingPoly `protobuf:"bytes,2,opt,name=bounding_poly,json=boundingPoly,proto3,oneof"` +} + +type ImageBoundingPolyAnnotation_NormalizedBoundingPoly struct { + NormalizedBoundingPoly *NormalizedBoundingPoly `protobuf:"bytes,3,opt,name=normalized_bounding_poly,json=normalizedBoundingPoly,proto3,oneof"` +} + +func (*ImageBoundingPolyAnnotation_BoundingPoly) isImageBoundingPolyAnnotation_BoundedArea() {} + +func (*ImageBoundingPolyAnnotation_NormalizedBoundingPoly) isImageBoundingPolyAnnotation_BoundedArea() { +} + +func (m *ImageBoundingPolyAnnotation) GetBoundedArea() isImageBoundingPolyAnnotation_BoundedArea { + if m != nil { + return m.BoundedArea + } + return nil +} + +func (m *ImageBoundingPolyAnnotation) GetBoundingPoly() *BoundingPoly { + if x, ok := m.GetBoundedArea().(*ImageBoundingPolyAnnotation_BoundingPoly); ok { + return x.BoundingPoly + } + return nil +} + +func (m *ImageBoundingPolyAnnotation) GetNormalizedBoundingPoly() *NormalizedBoundingPoly { + if x, ok := m.GetBoundedArea().(*ImageBoundingPolyAnnotation_NormalizedBoundingPoly); ok { + return x.NormalizedBoundingPoly + } + return nil +} + +func (m *ImageBoundingPolyAnnotation) GetAnnotationSpec() *AnnotationSpec { + if m != nil { + return m.AnnotationSpec + } + return nil +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*ImageBoundingPolyAnnotation) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _ImageBoundingPolyAnnotation_OneofMarshaler, _ImageBoundingPolyAnnotation_OneofUnmarshaler, _ImageBoundingPolyAnnotation_OneofSizer, []interface{}{ + (*ImageBoundingPolyAnnotation_BoundingPoly)(nil), + (*ImageBoundingPolyAnnotation_NormalizedBoundingPoly)(nil), + } +} + +func _ImageBoundingPolyAnnotation_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*ImageBoundingPolyAnnotation) + // bounded_area + switch x := m.BoundedArea.(type) { + case *ImageBoundingPolyAnnotation_BoundingPoly: + b.EncodeVarint(2<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.BoundingPoly); err != nil { + return err + } + case *ImageBoundingPolyAnnotation_NormalizedBoundingPoly: + b.EncodeVarint(3<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.NormalizedBoundingPoly); err != nil { + return err + } + case nil: + default: + return fmt.Errorf("ImageBoundingPolyAnnotation.BoundedArea has unexpected type %T", x) + } + return nil +} + +func _ImageBoundingPolyAnnotation_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*ImageBoundingPolyAnnotation) + switch tag { + case 2: // bounded_area.bounding_poly + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(BoundingPoly) + err := b.DecodeMessage(msg) + m.BoundedArea = &ImageBoundingPolyAnnotation_BoundingPoly{msg} + return true, err + case 3: // bounded_area.normalized_bounding_poly + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(NormalizedBoundingPoly) + err := b.DecodeMessage(msg) + m.BoundedArea = &ImageBoundingPolyAnnotation_NormalizedBoundingPoly{msg} + return true, err + default: + return false, nil + } +} + +func _ImageBoundingPolyAnnotation_OneofSizer(msg proto.Message) (n int) { + m := msg.(*ImageBoundingPolyAnnotation) + // bounded_area + switch x := m.BoundedArea.(type) { + case *ImageBoundingPolyAnnotation_BoundingPoly: + s := proto.Size(x.BoundingPoly) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *ImageBoundingPolyAnnotation_NormalizedBoundingPoly: + s := proto.Size(x.NormalizedBoundingPoly) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +// A line with multiple line segments. +type Polyline struct { + // The polyline vertices. + Vertices []*Vertex `protobuf:"bytes,1,rep,name=vertices,proto3" json:"vertices,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Polyline) Reset() { *m = Polyline{} } +func (m *Polyline) String() string { return proto.CompactTextString(m) } +func (*Polyline) ProtoMessage() {} +func (*Polyline) Descriptor() ([]byte, []int) { + return fileDescriptor_annotation_7e34849fb820a58f, []int{8} +} +func (m *Polyline) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Polyline.Unmarshal(m, b) +} +func (m *Polyline) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Polyline.Marshal(b, m, deterministic) +} +func (dst *Polyline) XXX_Merge(src proto.Message) { + xxx_messageInfo_Polyline.Merge(dst, src) +} +func (m *Polyline) XXX_Size() int { + return xxx_messageInfo_Polyline.Size(m) +} +func (m *Polyline) XXX_DiscardUnknown() { + xxx_messageInfo_Polyline.DiscardUnknown(m) +} + +var xxx_messageInfo_Polyline proto.InternalMessageInfo + +func (m *Polyline) GetVertices() []*Vertex { + if m != nil { + return m.Vertices + } + return nil +} + +// Normalized polyline. +type NormalizedPolyline struct { + // The normalized polyline vertices. + NormalizedVertices []*NormalizedVertex `protobuf:"bytes,1,rep,name=normalized_vertices,json=normalizedVertices,proto3" json:"normalized_vertices,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *NormalizedPolyline) Reset() { *m = NormalizedPolyline{} } +func (m *NormalizedPolyline) String() string { return proto.CompactTextString(m) } +func (*NormalizedPolyline) ProtoMessage() {} +func (*NormalizedPolyline) Descriptor() ([]byte, []int) { + return fileDescriptor_annotation_7e34849fb820a58f, []int{9} +} +func (m *NormalizedPolyline) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_NormalizedPolyline.Unmarshal(m, b) +} +func (m *NormalizedPolyline) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_NormalizedPolyline.Marshal(b, m, deterministic) +} +func (dst *NormalizedPolyline) XXX_Merge(src proto.Message) { + xxx_messageInfo_NormalizedPolyline.Merge(dst, src) +} +func (m *NormalizedPolyline) XXX_Size() int { + return xxx_messageInfo_NormalizedPolyline.Size(m) +} +func (m *NormalizedPolyline) XXX_DiscardUnknown() { + xxx_messageInfo_NormalizedPolyline.DiscardUnknown(m) +} + +var xxx_messageInfo_NormalizedPolyline proto.InternalMessageInfo + +func (m *NormalizedPolyline) GetNormalizedVertices() []*NormalizedVertex { + if m != nil { + return m.NormalizedVertices + } + return nil +} + +// A polyline for the image annotation. +type ImagePolylineAnnotation struct { + // Types that are valid to be assigned to Poly: + // *ImagePolylineAnnotation_Polyline + // *ImagePolylineAnnotation_NormalizedPolyline + Poly isImagePolylineAnnotation_Poly `protobuf_oneof:"poly"` + // Label of this polyline. + AnnotationSpec *AnnotationSpec `protobuf:"bytes,1,opt,name=annotation_spec,json=annotationSpec,proto3" json:"annotation_spec,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ImagePolylineAnnotation) Reset() { *m = ImagePolylineAnnotation{} } +func (m *ImagePolylineAnnotation) String() string { return proto.CompactTextString(m) } +func (*ImagePolylineAnnotation) ProtoMessage() {} +func (*ImagePolylineAnnotation) Descriptor() ([]byte, []int) { + return fileDescriptor_annotation_7e34849fb820a58f, []int{10} +} +func (m *ImagePolylineAnnotation) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ImagePolylineAnnotation.Unmarshal(m, b) +} +func (m *ImagePolylineAnnotation) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ImagePolylineAnnotation.Marshal(b, m, deterministic) +} +func (dst *ImagePolylineAnnotation) XXX_Merge(src proto.Message) { + xxx_messageInfo_ImagePolylineAnnotation.Merge(dst, src) +} +func (m *ImagePolylineAnnotation) XXX_Size() int { + return xxx_messageInfo_ImagePolylineAnnotation.Size(m) +} +func (m *ImagePolylineAnnotation) XXX_DiscardUnknown() { + xxx_messageInfo_ImagePolylineAnnotation.DiscardUnknown(m) +} + +var xxx_messageInfo_ImagePolylineAnnotation proto.InternalMessageInfo + +type isImagePolylineAnnotation_Poly interface { + isImagePolylineAnnotation_Poly() +} + +type ImagePolylineAnnotation_Polyline struct { + Polyline *Polyline `protobuf:"bytes,2,opt,name=polyline,proto3,oneof"` +} + +type ImagePolylineAnnotation_NormalizedPolyline struct { + NormalizedPolyline *NormalizedPolyline `protobuf:"bytes,3,opt,name=normalized_polyline,json=normalizedPolyline,proto3,oneof"` +} + +func (*ImagePolylineAnnotation_Polyline) isImagePolylineAnnotation_Poly() {} + +func (*ImagePolylineAnnotation_NormalizedPolyline) isImagePolylineAnnotation_Poly() {} + +func (m *ImagePolylineAnnotation) GetPoly() isImagePolylineAnnotation_Poly { + if m != nil { + return m.Poly + } + return nil +} + +func (m *ImagePolylineAnnotation) GetPolyline() *Polyline { + if x, ok := m.GetPoly().(*ImagePolylineAnnotation_Polyline); ok { + return x.Polyline + } + return nil +} + +func (m *ImagePolylineAnnotation) GetNormalizedPolyline() *NormalizedPolyline { + if x, ok := m.GetPoly().(*ImagePolylineAnnotation_NormalizedPolyline); ok { + return x.NormalizedPolyline + } + return nil +} + +func (m *ImagePolylineAnnotation) GetAnnotationSpec() *AnnotationSpec { + if m != nil { + return m.AnnotationSpec + } + return nil +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*ImagePolylineAnnotation) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _ImagePolylineAnnotation_OneofMarshaler, _ImagePolylineAnnotation_OneofUnmarshaler, _ImagePolylineAnnotation_OneofSizer, []interface{}{ + (*ImagePolylineAnnotation_Polyline)(nil), + (*ImagePolylineAnnotation_NormalizedPolyline)(nil), + } +} + +func _ImagePolylineAnnotation_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*ImagePolylineAnnotation) + // poly + switch x := m.Poly.(type) { + case *ImagePolylineAnnotation_Polyline: + b.EncodeVarint(2<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.Polyline); err != nil { + return err + } + case *ImagePolylineAnnotation_NormalizedPolyline: + b.EncodeVarint(3<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.NormalizedPolyline); err != nil { + return err + } + case nil: + default: + return fmt.Errorf("ImagePolylineAnnotation.Poly has unexpected type %T", x) + } + return nil +} + +func _ImagePolylineAnnotation_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*ImagePolylineAnnotation) + switch tag { + case 2: // poly.polyline + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(Polyline) + err := b.DecodeMessage(msg) + m.Poly = &ImagePolylineAnnotation_Polyline{msg} + return true, err + case 3: // poly.normalized_polyline + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(NormalizedPolyline) + err := b.DecodeMessage(msg) + m.Poly = &ImagePolylineAnnotation_NormalizedPolyline{msg} + return true, err + default: + return false, nil + } +} + +func _ImagePolylineAnnotation_OneofSizer(msg proto.Message) (n int) { + m := msg.(*ImagePolylineAnnotation) + // poly + switch x := m.Poly.(type) { + case *ImagePolylineAnnotation_Polyline: + s := proto.Size(x.Polyline) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *ImagePolylineAnnotation_NormalizedPolyline: + s := proto.Size(x.NormalizedPolyline) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +// Image segmentation annotation. +type ImageSegmentationAnnotation struct { + // The mapping between rgb color and annotation spec. The key is the rgb + // color represented in format of rgb(0, 0, 0). The value is the + // AnnotationSpec. + AnnotationColors map[string]*AnnotationSpec `protobuf:"bytes,1,rep,name=annotation_colors,json=annotationColors,proto3" json:"annotation_colors,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` + // Image format. + MimeType string `protobuf:"bytes,2,opt,name=mime_type,json=mimeType,proto3" json:"mime_type,omitempty"` + // A byte string of a full image's color map. + ImageBytes []byte `protobuf:"bytes,3,opt,name=image_bytes,json=imageBytes,proto3" json:"image_bytes,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ImageSegmentationAnnotation) Reset() { *m = ImageSegmentationAnnotation{} } +func (m *ImageSegmentationAnnotation) String() string { return proto.CompactTextString(m) } +func (*ImageSegmentationAnnotation) ProtoMessage() {} +func (*ImageSegmentationAnnotation) Descriptor() ([]byte, []int) { + return fileDescriptor_annotation_7e34849fb820a58f, []int{11} +} +func (m *ImageSegmentationAnnotation) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ImageSegmentationAnnotation.Unmarshal(m, b) +} +func (m *ImageSegmentationAnnotation) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ImageSegmentationAnnotation.Marshal(b, m, deterministic) +} +func (dst *ImageSegmentationAnnotation) XXX_Merge(src proto.Message) { + xxx_messageInfo_ImageSegmentationAnnotation.Merge(dst, src) +} +func (m *ImageSegmentationAnnotation) XXX_Size() int { + return xxx_messageInfo_ImageSegmentationAnnotation.Size(m) +} +func (m *ImageSegmentationAnnotation) XXX_DiscardUnknown() { + xxx_messageInfo_ImageSegmentationAnnotation.DiscardUnknown(m) +} + +var xxx_messageInfo_ImageSegmentationAnnotation proto.InternalMessageInfo + +func (m *ImageSegmentationAnnotation) GetAnnotationColors() map[string]*AnnotationSpec { + if m != nil { + return m.AnnotationColors + } + return nil +} + +func (m *ImageSegmentationAnnotation) GetMimeType() string { + if m != nil { + return m.MimeType + } + return "" +} + +func (m *ImageSegmentationAnnotation) GetImageBytes() []byte { + if m != nil { + return m.ImageBytes + } + return nil +} + +// Text classification annotation. +type TextClassificationAnnotation struct { + // Label of the text. + AnnotationSpec *AnnotationSpec `protobuf:"bytes,1,opt,name=annotation_spec,json=annotationSpec,proto3" json:"annotation_spec,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *TextClassificationAnnotation) Reset() { *m = TextClassificationAnnotation{} } +func (m *TextClassificationAnnotation) String() string { return proto.CompactTextString(m) } +func (*TextClassificationAnnotation) ProtoMessage() {} +func (*TextClassificationAnnotation) Descriptor() ([]byte, []int) { + return fileDescriptor_annotation_7e34849fb820a58f, []int{12} +} +func (m *TextClassificationAnnotation) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_TextClassificationAnnotation.Unmarshal(m, b) +} +func (m *TextClassificationAnnotation) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_TextClassificationAnnotation.Marshal(b, m, deterministic) +} +func (dst *TextClassificationAnnotation) XXX_Merge(src proto.Message) { + xxx_messageInfo_TextClassificationAnnotation.Merge(dst, src) +} +func (m *TextClassificationAnnotation) XXX_Size() int { + return xxx_messageInfo_TextClassificationAnnotation.Size(m) +} +func (m *TextClassificationAnnotation) XXX_DiscardUnknown() { + xxx_messageInfo_TextClassificationAnnotation.DiscardUnknown(m) +} + +var xxx_messageInfo_TextClassificationAnnotation proto.InternalMessageInfo + +func (m *TextClassificationAnnotation) GetAnnotationSpec() *AnnotationSpec { + if m != nil { + return m.AnnotationSpec + } + return nil +} + +// A time period inside of an example that has a time dimension (e.g. video). +type TimeSegment struct { + // Start of the time segment (inclusive), represented as the duration since + // the example start. + StartTimeOffset *duration.Duration `protobuf:"bytes,1,opt,name=start_time_offset,json=startTimeOffset,proto3" json:"start_time_offset,omitempty"` + // End of the time segment (exclusive), represented as the duration since the + // example start. + EndTimeOffset *duration.Duration `protobuf:"bytes,2,opt,name=end_time_offset,json=endTimeOffset,proto3" json:"end_time_offset,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *TimeSegment) Reset() { *m = TimeSegment{} } +func (m *TimeSegment) String() string { return proto.CompactTextString(m) } +func (*TimeSegment) ProtoMessage() {} +func (*TimeSegment) Descriptor() ([]byte, []int) { + return fileDescriptor_annotation_7e34849fb820a58f, []int{13} +} +func (m *TimeSegment) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_TimeSegment.Unmarshal(m, b) +} +func (m *TimeSegment) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_TimeSegment.Marshal(b, m, deterministic) +} +func (dst *TimeSegment) XXX_Merge(src proto.Message) { + xxx_messageInfo_TimeSegment.Merge(dst, src) +} +func (m *TimeSegment) XXX_Size() int { + return xxx_messageInfo_TimeSegment.Size(m) +} +func (m *TimeSegment) XXX_DiscardUnknown() { + xxx_messageInfo_TimeSegment.DiscardUnknown(m) +} + +var xxx_messageInfo_TimeSegment proto.InternalMessageInfo + +func (m *TimeSegment) GetStartTimeOffset() *duration.Duration { + if m != nil { + return m.StartTimeOffset + } + return nil +} + +func (m *TimeSegment) GetEndTimeOffset() *duration.Duration { + if m != nil { + return m.EndTimeOffset + } + return nil +} + +// Video classification annotation. +type VideoClassificationAnnotation struct { + // The time segment of the video to which the annotation applies. + TimeSegment *TimeSegment `protobuf:"bytes,1,opt,name=time_segment,json=timeSegment,proto3" json:"time_segment,omitempty"` + // Label of the segment specified by time_segment. + AnnotationSpec *AnnotationSpec `protobuf:"bytes,2,opt,name=annotation_spec,json=annotationSpec,proto3" json:"annotation_spec,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *VideoClassificationAnnotation) Reset() { *m = VideoClassificationAnnotation{} } +func (m *VideoClassificationAnnotation) String() string { return proto.CompactTextString(m) } +func (*VideoClassificationAnnotation) ProtoMessage() {} +func (*VideoClassificationAnnotation) Descriptor() ([]byte, []int) { + return fileDescriptor_annotation_7e34849fb820a58f, []int{14} +} +func (m *VideoClassificationAnnotation) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_VideoClassificationAnnotation.Unmarshal(m, b) +} +func (m *VideoClassificationAnnotation) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_VideoClassificationAnnotation.Marshal(b, m, deterministic) +} +func (dst *VideoClassificationAnnotation) XXX_Merge(src proto.Message) { + xxx_messageInfo_VideoClassificationAnnotation.Merge(dst, src) +} +func (m *VideoClassificationAnnotation) XXX_Size() int { + return xxx_messageInfo_VideoClassificationAnnotation.Size(m) +} +func (m *VideoClassificationAnnotation) XXX_DiscardUnknown() { + xxx_messageInfo_VideoClassificationAnnotation.DiscardUnknown(m) +} + +var xxx_messageInfo_VideoClassificationAnnotation proto.InternalMessageInfo + +func (m *VideoClassificationAnnotation) GetTimeSegment() *TimeSegment { + if m != nil { + return m.TimeSegment + } + return nil +} + +func (m *VideoClassificationAnnotation) GetAnnotationSpec() *AnnotationSpec { + if m != nil { + return m.AnnotationSpec + } + return nil +} + +// Video frame level annotation for object detection and tracking. +type ObjectTrackingFrame struct { + // The bounding box location of this object track for the frame. + // + // Types that are valid to be assigned to BoundedArea: + // *ObjectTrackingFrame_BoundingPoly + // *ObjectTrackingFrame_NormalizedBoundingPoly + BoundedArea isObjectTrackingFrame_BoundedArea `protobuf_oneof:"bounded_area"` + // The time offset of this frame relative to the beginning of the video. + TimeOffset *duration.Duration `protobuf:"bytes,3,opt,name=time_offset,json=timeOffset,proto3" json:"time_offset,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ObjectTrackingFrame) Reset() { *m = ObjectTrackingFrame{} } +func (m *ObjectTrackingFrame) String() string { return proto.CompactTextString(m) } +func (*ObjectTrackingFrame) ProtoMessage() {} +func (*ObjectTrackingFrame) Descriptor() ([]byte, []int) { + return fileDescriptor_annotation_7e34849fb820a58f, []int{15} +} +func (m *ObjectTrackingFrame) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ObjectTrackingFrame.Unmarshal(m, b) +} +func (m *ObjectTrackingFrame) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ObjectTrackingFrame.Marshal(b, m, deterministic) +} +func (dst *ObjectTrackingFrame) XXX_Merge(src proto.Message) { + xxx_messageInfo_ObjectTrackingFrame.Merge(dst, src) +} +func (m *ObjectTrackingFrame) XXX_Size() int { + return xxx_messageInfo_ObjectTrackingFrame.Size(m) +} +func (m *ObjectTrackingFrame) XXX_DiscardUnknown() { + xxx_messageInfo_ObjectTrackingFrame.DiscardUnknown(m) +} + +var xxx_messageInfo_ObjectTrackingFrame proto.InternalMessageInfo + +type isObjectTrackingFrame_BoundedArea interface { + isObjectTrackingFrame_BoundedArea() +} + +type ObjectTrackingFrame_BoundingPoly struct { + BoundingPoly *BoundingPoly `protobuf:"bytes,1,opt,name=bounding_poly,json=boundingPoly,proto3,oneof"` +} + +type ObjectTrackingFrame_NormalizedBoundingPoly struct { + NormalizedBoundingPoly *NormalizedBoundingPoly `protobuf:"bytes,2,opt,name=normalized_bounding_poly,json=normalizedBoundingPoly,proto3,oneof"` +} + +func (*ObjectTrackingFrame_BoundingPoly) isObjectTrackingFrame_BoundedArea() {} + +func (*ObjectTrackingFrame_NormalizedBoundingPoly) isObjectTrackingFrame_BoundedArea() {} + +func (m *ObjectTrackingFrame) GetBoundedArea() isObjectTrackingFrame_BoundedArea { + if m != nil { + return m.BoundedArea + } + return nil +} + +func (m *ObjectTrackingFrame) GetBoundingPoly() *BoundingPoly { + if x, ok := m.GetBoundedArea().(*ObjectTrackingFrame_BoundingPoly); ok { + return x.BoundingPoly + } + return nil +} + +func (m *ObjectTrackingFrame) GetNormalizedBoundingPoly() *NormalizedBoundingPoly { + if x, ok := m.GetBoundedArea().(*ObjectTrackingFrame_NormalizedBoundingPoly); ok { + return x.NormalizedBoundingPoly + } + return nil +} + +func (m *ObjectTrackingFrame) GetTimeOffset() *duration.Duration { + if m != nil { + return m.TimeOffset + } + return nil +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*ObjectTrackingFrame) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _ObjectTrackingFrame_OneofMarshaler, _ObjectTrackingFrame_OneofUnmarshaler, _ObjectTrackingFrame_OneofSizer, []interface{}{ + (*ObjectTrackingFrame_BoundingPoly)(nil), + (*ObjectTrackingFrame_NormalizedBoundingPoly)(nil), + } +} + +func _ObjectTrackingFrame_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*ObjectTrackingFrame) + // bounded_area + switch x := m.BoundedArea.(type) { + case *ObjectTrackingFrame_BoundingPoly: + b.EncodeVarint(1<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.BoundingPoly); err != nil { + return err + } + case *ObjectTrackingFrame_NormalizedBoundingPoly: + b.EncodeVarint(2<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.NormalizedBoundingPoly); err != nil { + return err + } + case nil: + default: + return fmt.Errorf("ObjectTrackingFrame.BoundedArea has unexpected type %T", x) + } + return nil +} + +func _ObjectTrackingFrame_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*ObjectTrackingFrame) + switch tag { + case 1: // bounded_area.bounding_poly + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(BoundingPoly) + err := b.DecodeMessage(msg) + m.BoundedArea = &ObjectTrackingFrame_BoundingPoly{msg} + return true, err + case 2: // bounded_area.normalized_bounding_poly + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(NormalizedBoundingPoly) + err := b.DecodeMessage(msg) + m.BoundedArea = &ObjectTrackingFrame_NormalizedBoundingPoly{msg} + return true, err + default: + return false, nil + } +} + +func _ObjectTrackingFrame_OneofSizer(msg proto.Message) (n int) { + m := msg.(*ObjectTrackingFrame) + // bounded_area + switch x := m.BoundedArea.(type) { + case *ObjectTrackingFrame_BoundingPoly: + s := proto.Size(x.BoundingPoly) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *ObjectTrackingFrame_NormalizedBoundingPoly: + s := proto.Size(x.NormalizedBoundingPoly) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +// Video object tracking annotation. +type VideoObjectTrackingAnnotation struct { + // Label of the object tracked in this annotation. + AnnotationSpec *AnnotationSpec `protobuf:"bytes,1,opt,name=annotation_spec,json=annotationSpec,proto3" json:"annotation_spec,omitempty"` + // The time segment of the video to which object tracking applies. + TimeSegment *TimeSegment `protobuf:"bytes,2,opt,name=time_segment,json=timeSegment,proto3" json:"time_segment,omitempty"` + // The list of frames where this object track appears. + ObjectTrackingFrames []*ObjectTrackingFrame `protobuf:"bytes,3,rep,name=object_tracking_frames,json=objectTrackingFrames,proto3" json:"object_tracking_frames,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *VideoObjectTrackingAnnotation) Reset() { *m = VideoObjectTrackingAnnotation{} } +func (m *VideoObjectTrackingAnnotation) String() string { return proto.CompactTextString(m) } +func (*VideoObjectTrackingAnnotation) ProtoMessage() {} +func (*VideoObjectTrackingAnnotation) Descriptor() ([]byte, []int) { + return fileDescriptor_annotation_7e34849fb820a58f, []int{16} +} +func (m *VideoObjectTrackingAnnotation) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_VideoObjectTrackingAnnotation.Unmarshal(m, b) +} +func (m *VideoObjectTrackingAnnotation) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_VideoObjectTrackingAnnotation.Marshal(b, m, deterministic) +} +func (dst *VideoObjectTrackingAnnotation) XXX_Merge(src proto.Message) { + xxx_messageInfo_VideoObjectTrackingAnnotation.Merge(dst, src) +} +func (m *VideoObjectTrackingAnnotation) XXX_Size() int { + return xxx_messageInfo_VideoObjectTrackingAnnotation.Size(m) +} +func (m *VideoObjectTrackingAnnotation) XXX_DiscardUnknown() { + xxx_messageInfo_VideoObjectTrackingAnnotation.DiscardUnknown(m) +} + +var xxx_messageInfo_VideoObjectTrackingAnnotation proto.InternalMessageInfo + +func (m *VideoObjectTrackingAnnotation) GetAnnotationSpec() *AnnotationSpec { + if m != nil { + return m.AnnotationSpec + } + return nil +} + +func (m *VideoObjectTrackingAnnotation) GetTimeSegment() *TimeSegment { + if m != nil { + return m.TimeSegment + } + return nil +} + +func (m *VideoObjectTrackingAnnotation) GetObjectTrackingFrames() []*ObjectTrackingFrame { + if m != nil { + return m.ObjectTrackingFrames + } + return nil +} + +// Video event annotation. +type VideoEventAnnotation struct { + // Label of the event in this annotation. + AnnotationSpec *AnnotationSpec `protobuf:"bytes,1,opt,name=annotation_spec,json=annotationSpec,proto3" json:"annotation_spec,omitempty"` + // The time segment of the video to which the annotation applies. + TimeSegment *TimeSegment `protobuf:"bytes,2,opt,name=time_segment,json=timeSegment,proto3" json:"time_segment,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *VideoEventAnnotation) Reset() { *m = VideoEventAnnotation{} } +func (m *VideoEventAnnotation) String() string { return proto.CompactTextString(m) } +func (*VideoEventAnnotation) ProtoMessage() {} +func (*VideoEventAnnotation) Descriptor() ([]byte, []int) { + return fileDescriptor_annotation_7e34849fb820a58f, []int{17} +} +func (m *VideoEventAnnotation) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_VideoEventAnnotation.Unmarshal(m, b) +} +func (m *VideoEventAnnotation) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_VideoEventAnnotation.Marshal(b, m, deterministic) +} +func (dst *VideoEventAnnotation) XXX_Merge(src proto.Message) { + xxx_messageInfo_VideoEventAnnotation.Merge(dst, src) +} +func (m *VideoEventAnnotation) XXX_Size() int { + return xxx_messageInfo_VideoEventAnnotation.Size(m) +} +func (m *VideoEventAnnotation) XXX_DiscardUnknown() { + xxx_messageInfo_VideoEventAnnotation.DiscardUnknown(m) +} + +var xxx_messageInfo_VideoEventAnnotation proto.InternalMessageInfo + +func (m *VideoEventAnnotation) GetAnnotationSpec() *AnnotationSpec { + if m != nil { + return m.AnnotationSpec + } + return nil +} + +func (m *VideoEventAnnotation) GetTimeSegment() *TimeSegment { + if m != nil { + return m.TimeSegment + } + return nil +} + +// Speech audio recognition. +type AudioRecognitionAnnotation struct { + // Transcript text representing the words spoken. + Transcript string `protobuf:"bytes,1,opt,name=transcript,proto3" json:"transcript,omitempty"` + // Start position in audio file that the transcription corresponds to. + StartOffset *duration.Duration `protobuf:"bytes,2,opt,name=start_offset,json=startOffset,proto3" json:"start_offset,omitempty"` + // End position in audio file that the transcription corresponds to. + EndOffset *duration.Duration `protobuf:"bytes,3,opt,name=end_offset,json=endOffset,proto3" json:"end_offset,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *AudioRecognitionAnnotation) Reset() { *m = AudioRecognitionAnnotation{} } +func (m *AudioRecognitionAnnotation) String() string { return proto.CompactTextString(m) } +func (*AudioRecognitionAnnotation) ProtoMessage() {} +func (*AudioRecognitionAnnotation) Descriptor() ([]byte, []int) { + return fileDescriptor_annotation_7e34849fb820a58f, []int{18} +} +func (m *AudioRecognitionAnnotation) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_AudioRecognitionAnnotation.Unmarshal(m, b) +} +func (m *AudioRecognitionAnnotation) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_AudioRecognitionAnnotation.Marshal(b, m, deterministic) +} +func (dst *AudioRecognitionAnnotation) XXX_Merge(src proto.Message) { + xxx_messageInfo_AudioRecognitionAnnotation.Merge(dst, src) +} +func (m *AudioRecognitionAnnotation) XXX_Size() int { + return xxx_messageInfo_AudioRecognitionAnnotation.Size(m) +} +func (m *AudioRecognitionAnnotation) XXX_DiscardUnknown() { + xxx_messageInfo_AudioRecognitionAnnotation.DiscardUnknown(m) +} + +var xxx_messageInfo_AudioRecognitionAnnotation proto.InternalMessageInfo + +func (m *AudioRecognitionAnnotation) GetTranscript() string { + if m != nil { + return m.Transcript + } + return "" +} + +func (m *AudioRecognitionAnnotation) GetStartOffset() *duration.Duration { + if m != nil { + return m.StartOffset + } + return nil +} + +func (m *AudioRecognitionAnnotation) GetEndOffset() *duration.Duration { + if m != nil { + return m.EndOffset + } + return nil +} + +// Additional information associated with the annotation. +type AnnotationMetadata struct { + // Metadata related to human labeling. + OperatorMetadata *OperatorMetadata `protobuf:"bytes,2,opt,name=operator_metadata,json=operatorMetadata,proto3" json:"operator_metadata,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *AnnotationMetadata) Reset() { *m = AnnotationMetadata{} } +func (m *AnnotationMetadata) String() string { return proto.CompactTextString(m) } +func (*AnnotationMetadata) ProtoMessage() {} +func (*AnnotationMetadata) Descriptor() ([]byte, []int) { + return fileDescriptor_annotation_7e34849fb820a58f, []int{19} +} +func (m *AnnotationMetadata) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_AnnotationMetadata.Unmarshal(m, b) +} +func (m *AnnotationMetadata) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_AnnotationMetadata.Marshal(b, m, deterministic) +} +func (dst *AnnotationMetadata) XXX_Merge(src proto.Message) { + xxx_messageInfo_AnnotationMetadata.Merge(dst, src) +} +func (m *AnnotationMetadata) XXX_Size() int { + return xxx_messageInfo_AnnotationMetadata.Size(m) +} +func (m *AnnotationMetadata) XXX_DiscardUnknown() { + xxx_messageInfo_AnnotationMetadata.DiscardUnknown(m) +} + +var xxx_messageInfo_AnnotationMetadata proto.InternalMessageInfo + +func (m *AnnotationMetadata) GetOperatorMetadata() *OperatorMetadata { + if m != nil { + return m.OperatorMetadata + } + return nil +} + +// General information useful for labels coming from contributors. +type OperatorMetadata struct { + // Confidence score corresponding to a label. For examle, if 3 contributors + // have answered the question and 2 of them agree on the final label, the + // confidence score will be 0.67 (2/3). + Score float32 `protobuf:"fixed32,1,opt,name=score,proto3" json:"score,omitempty"` + // The total number of contributors that answer this question. + TotalVotes int32 `protobuf:"varint,2,opt,name=total_votes,json=totalVotes,proto3" json:"total_votes,omitempty"` + // The total number of contributors that choose this label. + LabelVotes int32 `protobuf:"varint,3,opt,name=label_votes,json=labelVotes,proto3" json:"label_votes,omitempty"` + Comments []string `protobuf:"bytes,4,rep,name=comments,proto3" json:"comments,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *OperatorMetadata) Reset() { *m = OperatorMetadata{} } +func (m *OperatorMetadata) String() string { return proto.CompactTextString(m) } +func (*OperatorMetadata) ProtoMessage() {} +func (*OperatorMetadata) Descriptor() ([]byte, []int) { + return fileDescriptor_annotation_7e34849fb820a58f, []int{20} +} +func (m *OperatorMetadata) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_OperatorMetadata.Unmarshal(m, b) +} +func (m *OperatorMetadata) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_OperatorMetadata.Marshal(b, m, deterministic) +} +func (dst *OperatorMetadata) XXX_Merge(src proto.Message) { + xxx_messageInfo_OperatorMetadata.Merge(dst, src) +} +func (m *OperatorMetadata) XXX_Size() int { + return xxx_messageInfo_OperatorMetadata.Size(m) +} +func (m *OperatorMetadata) XXX_DiscardUnknown() { + xxx_messageInfo_OperatorMetadata.DiscardUnknown(m) +} + +var xxx_messageInfo_OperatorMetadata proto.InternalMessageInfo + +func (m *OperatorMetadata) GetScore() float32 { + if m != nil { + return m.Score + } + return 0 +} + +func (m *OperatorMetadata) GetTotalVotes() int32 { + if m != nil { + return m.TotalVotes + } + return 0 +} + +func (m *OperatorMetadata) GetLabelVotes() int32 { + if m != nil { + return m.LabelVotes + } + return 0 +} + +func (m *OperatorMetadata) GetComments() []string { + if m != nil { + return m.Comments + } + return nil +} + +func init() { + proto.RegisterType((*Annotation)(nil), "google.cloud.datalabeling.v1beta1.Annotation") + proto.RegisterType((*AnnotationValue)(nil), "google.cloud.datalabeling.v1beta1.AnnotationValue") + proto.RegisterType((*ImageClassificationAnnotation)(nil), "google.cloud.datalabeling.v1beta1.ImageClassificationAnnotation") + proto.RegisterType((*Vertex)(nil), "google.cloud.datalabeling.v1beta1.Vertex") + proto.RegisterType((*NormalizedVertex)(nil), "google.cloud.datalabeling.v1beta1.NormalizedVertex") + proto.RegisterType((*BoundingPoly)(nil), "google.cloud.datalabeling.v1beta1.BoundingPoly") + proto.RegisterType((*NormalizedBoundingPoly)(nil), "google.cloud.datalabeling.v1beta1.NormalizedBoundingPoly") + proto.RegisterType((*ImageBoundingPolyAnnotation)(nil), "google.cloud.datalabeling.v1beta1.ImageBoundingPolyAnnotation") + proto.RegisterType((*Polyline)(nil), "google.cloud.datalabeling.v1beta1.Polyline") + proto.RegisterType((*NormalizedPolyline)(nil), "google.cloud.datalabeling.v1beta1.NormalizedPolyline") + proto.RegisterType((*ImagePolylineAnnotation)(nil), "google.cloud.datalabeling.v1beta1.ImagePolylineAnnotation") + proto.RegisterType((*ImageSegmentationAnnotation)(nil), "google.cloud.datalabeling.v1beta1.ImageSegmentationAnnotation") + proto.RegisterMapType((map[string]*AnnotationSpec)(nil), "google.cloud.datalabeling.v1beta1.ImageSegmentationAnnotation.AnnotationColorsEntry") + proto.RegisterType((*TextClassificationAnnotation)(nil), "google.cloud.datalabeling.v1beta1.TextClassificationAnnotation") + proto.RegisterType((*TimeSegment)(nil), "google.cloud.datalabeling.v1beta1.TimeSegment") + proto.RegisterType((*VideoClassificationAnnotation)(nil), "google.cloud.datalabeling.v1beta1.VideoClassificationAnnotation") + proto.RegisterType((*ObjectTrackingFrame)(nil), "google.cloud.datalabeling.v1beta1.ObjectTrackingFrame") + proto.RegisterType((*VideoObjectTrackingAnnotation)(nil), "google.cloud.datalabeling.v1beta1.VideoObjectTrackingAnnotation") + proto.RegisterType((*VideoEventAnnotation)(nil), "google.cloud.datalabeling.v1beta1.VideoEventAnnotation") + proto.RegisterType((*AudioRecognitionAnnotation)(nil), "google.cloud.datalabeling.v1beta1.AudioRecognitionAnnotation") + proto.RegisterType((*AnnotationMetadata)(nil), "google.cloud.datalabeling.v1beta1.AnnotationMetadata") + proto.RegisterType((*OperatorMetadata)(nil), "google.cloud.datalabeling.v1beta1.OperatorMetadata") + proto.RegisterEnum("google.cloud.datalabeling.v1beta1.AnnotationSource", AnnotationSource_name, AnnotationSource_value) + proto.RegisterEnum("google.cloud.datalabeling.v1beta1.AnnotationSentiment", AnnotationSentiment_name, AnnotationSentiment_value) + proto.RegisterEnum("google.cloud.datalabeling.v1beta1.AnnotationType", AnnotationType_name, AnnotationType_value) +} + +func init() { + proto.RegisterFile("google/cloud/datalabeling/v1beta1/annotation.proto", fileDescriptor_annotation_7e34849fb820a58f) +} + +var fileDescriptor_annotation_7e34849fb820a58f = []byte{ + // 1630 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xd4, 0x58, 0xdd, 0x6f, 0xdb, 0x46, + 0x12, 0x37, 0x29, 0xdb, 0x91, 0x47, 0x8a, 0xad, 0xac, 0x7d, 0x8e, 0x22, 0x7f, 0x46, 0x97, 0x1c, + 0x1c, 0x1f, 0x20, 0x21, 0x0e, 0xee, 0x2e, 0x97, 0xdc, 0x97, 0x2c, 0x33, 0x36, 0xaf, 0xb1, 0xa8, + 0x50, 0xb4, 0x9a, 0x04, 0x28, 0x18, 0x9a, 0x5a, 0xab, 0x6c, 0x24, 0x52, 0x20, 0x57, 0x82, 0x95, + 0x02, 0x05, 0x02, 0xf4, 0x03, 0x68, 0x81, 0xbe, 0xf6, 0xdf, 0x28, 0x8a, 0x3c, 0xf6, 0xb5, 0xaf, + 0xed, 0x63, 0xfb, 0xe7, 0x14, 0xbb, 0xa4, 0x24, 0x92, 0xfa, 0x30, 0xd5, 0xc4, 0x05, 0xfa, 0xa6, + 0x9d, 0x9d, 0xf9, 0xcd, 0x6f, 0x67, 0x47, 0x33, 0xb3, 0x84, 0xbd, 0xba, 0x65, 0xd5, 0x1b, 0x38, + 0xaf, 0x37, 0xac, 0x76, 0x2d, 0x5f, 0xd3, 0x88, 0xd6, 0xd0, 0x4e, 0x71, 0xc3, 0x30, 0xeb, 0xf9, + 0xce, 0xdd, 0x53, 0x4c, 0xb4, 0xbb, 0x79, 0xcd, 0x34, 0x2d, 0xa2, 0x11, 0xc3, 0x32, 0x73, 0x2d, + 0xdb, 0x22, 0x16, 0xba, 0xe9, 0xda, 0xe4, 0x98, 0x4d, 0xce, 0x6f, 0x93, 0xf3, 0x6c, 0x32, 0x0f, + 0xa7, 0x81, 0x55, 0x9d, 0x16, 0xd6, 0x55, 0x07, 0x13, 0x17, 0x3f, 0xb3, 0xe9, 0x19, 0xb3, 0xd5, + 0x69, 0xfb, 0x2c, 0x5f, 0x6b, 0xdb, 0x3e, 0xff, 0x99, 0xf5, 0xf0, 0xbe, 0x43, 0xec, 0xb6, 0xde, + 0xb3, 0xde, 0x0a, 0xef, 0x12, 0xa3, 0x89, 0x1d, 0xa2, 0x35, 0x5b, 0x21, 0x73, 0xad, 0x65, 0xf8, + 0x48, 0x38, 0xee, 0x6e, 0xf6, 0x4d, 0x0c, 0xa0, 0xd0, 0x97, 0x22, 0x04, 0xb3, 0xa6, 0xd6, 0xc4, + 0x69, 0x6e, 0x9b, 0xdb, 0x59, 0x90, 0xd9, 0x6f, 0xf4, 0x02, 0xae, 0xf9, 0xc9, 0x5b, 0x6d, 0x5b, + 0xc7, 0x69, 0x7e, 0x9b, 0xdb, 0x59, 0xdc, 0xbb, 0x97, 0xbb, 0x30, 0x36, 0xb9, 0x01, 0x7a, 0x85, + 0x99, 0xca, 0x29, 0x2d, 0x24, 0x41, 0x1f, 0x80, 0x4f, 0xa6, 0x76, 0xb4, 0x46, 0x1b, 0xa7, 0x63, + 0xdb, 0xdc, 0x4e, 0x62, 0x6f, 0x6f, 0x2a, 0x07, 0x55, 0x6a, 0x29, 0x2f, 0x69, 0x41, 0x01, 0x3a, + 0x83, 0x65, 0x1f, 0x7c, 0x13, 0x13, 0x8d, 0xe2, 0xa4, 0x67, 0x99, 0x87, 0xbf, 0x4d, 0xe5, 0xe1, + 0xd8, 0x33, 0x96, 0x91, 0x36, 0x24, 0x43, 0x06, 0xac, 0xf8, 0x03, 0x85, 0x4d, 0x7a, 0x15, 0x26, + 0x49, 0xcf, 0xb3, 0x58, 0xfd, 0x7d, 0xba, 0x58, 0xf5, 0xac, 0x65, 0x1f, 0xf7, 0xbe, 0x30, 0xfb, + 0xd3, 0x02, 0x2c, 0x85, 0xce, 0x8d, 0xbe, 0xe4, 0x60, 0xcb, 0x68, 0x6a, 0x75, 0xac, 0xea, 0x0d, + 0xcd, 0x71, 0x8c, 0x33, 0x43, 0x77, 0x99, 0x0c, 0x00, 0xd8, 0xbd, 0x26, 0xf6, 0xfe, 0x17, 0x81, + 0x8a, 0x48, 0x91, 0x8a, 0x01, 0xa0, 0x81, 0xc3, 0xa3, 0x19, 0x79, 0xc3, 0x98, 0xa4, 0x80, 0x3e, + 0xe3, 0x60, 0xd3, 0x25, 0x73, 0x6a, 0xb5, 0xcd, 0x9a, 0x61, 0xd6, 0xd5, 0x96, 0xd5, 0xe8, 0xfa, + 0xb9, 0xf0, 0x8c, 0xcb, 0x7f, 0xa2, 0x72, 0xd9, 0xf7, 0x70, 0xca, 0x56, 0xa3, 0x1b, 0x60, 0xb2, + 0x66, 0x8c, 0xdf, 0x46, 0xe7, 0x70, 0xc3, 0xa5, 0x41, 0xbd, 0x37, 0x0c, 0x13, 0xfb, 0x19, 0xc4, + 0x19, 0x83, 0x07, 0x51, 0x19, 0x94, 0x3d, 0x88, 0x80, 0xf7, 0xeb, 0xc6, 0xe8, 0x2d, 0xf4, 0x29, + 0x07, 0x6e, 0x8c, 0x54, 0x07, 0xd7, 0xe9, 0xa5, 0x0d, 0x5d, 0xc6, 0xc2, 0x74, 0x01, 0xa8, 0xf8, + 0x60, 0x46, 0x04, 0x60, 0xf4, 0x36, 0xfa, 0x82, 0x83, 0x4d, 0x82, 0xcf, 0xc9, 0x84, 0xa4, 0x70, + 0xff, 0x6a, 0xff, 0x8d, 0xc0, 0x43, 0xc1, 0xe7, 0x64, 0x42, 0x4e, 0xac, 0x93, 0x09, 0xfb, 0x2c, + 0x3f, 0x3b, 0x46, 0x0d, 0x5b, 0x13, 0xa8, 0xcc, 0x46, 0xce, 0xcf, 0x2a, 0x45, 0x9a, 0x94, 0x9f, + 0x9d, 0x49, 0x0a, 0xe8, 0x2b, 0x0e, 0xb6, 0x5d, 0x32, 0xd6, 0xe9, 0x47, 0x58, 0x27, 0x2a, 0xb1, + 0x35, 0xfd, 0x25, 0x4d, 0x53, 0x1f, 0x9b, 0xb9, 0xe9, 0xd8, 0x48, 0x0c, 0x49, 0xf1, 0x80, 0x46, + 0xb0, 0x19, 0xa7, 0x80, 0x2c, 0x58, 0x75, 0xc9, 0xe0, 0x0e, 0x36, 0x89, 0x9f, 0xc2, 0x3c, 0xa3, + 0xf0, 0x8f, 0xa8, 0x14, 0x04, 0x6a, 0x1f, 0xf0, 0xbc, 0xd2, 0x19, 0x21, 0x47, 0xaf, 0x39, 0x58, + 0xd7, 0xda, 0x35, 0xc3, 0x52, 0x6d, 0xac, 0x5b, 0x75, 0xd3, 0x08, 0x5f, 0xc4, 0x15, 0xe6, 0xf7, + 0xdf, 0x51, 0x6a, 0x16, 0x85, 0x91, 0x07, 0x28, 0x01, 0xef, 0x19, 0x6d, 0xec, 0xee, 0x7e, 0x12, + 0x80, 0x95, 0x7a, 0x95, 0x74, 0x5b, 0x38, 0xfb, 0x31, 0x6c, 0x4c, 0x2c, 0x39, 0xe8, 0x39, 0x2c, + 0x85, 0x7a, 0xa8, 0x57, 0xcd, 0xee, 0x4e, 0x57, 0x58, 0x5b, 0x58, 0x97, 0x17, 0xb5, 0xc0, 0x3a, + 0x7b, 0x0b, 0xe6, 0xab, 0xd8, 0x26, 0xf8, 0x1c, 0x25, 0x81, 0x3b, 0x67, 0xb8, 0x73, 0x32, 0xc7, + 0x56, 0x5d, 0x56, 0xa7, 0xe6, 0x64, 0xae, 0x9b, 0xcd, 0x41, 0xaa, 0x64, 0xd9, 0x4d, 0xad, 0x61, + 0xbc, 0xc2, 0xb5, 0xb0, 0x3e, 0x1f, 0xd0, 0xe7, 0xa9, 0xfe, 0x09, 0x24, 0xfd, 0x55, 0x09, 0x09, + 0x10, 0xef, 0x60, 0x9b, 0x18, 0x3a, 0x76, 0xd2, 0xdc, 0x76, 0x6c, 0x27, 0xb1, 0x77, 0x27, 0xca, + 0xbd, 0x32, 0x47, 0x72, 0xdf, 0x34, 0xfb, 0x09, 0xac, 0x0e, 0x68, 0x04, 0x1c, 0xd4, 0x60, 0xd9, + 0xec, 0xef, 0xa8, 0x21, 0x5f, 0x51, 0x7a, 0x75, 0xf8, 0x78, 0x32, 0x32, 0x03, 0x12, 0xe6, 0xff, + 0x17, 0x1e, 0xd6, 0x26, 0x54, 0x64, 0x54, 0x85, 0xab, 0x81, 0x9a, 0xef, 0x15, 0xfa, 0x7c, 0x04, + 0xff, 0x7e, 0xc4, 0xa3, 0x19, 0x39, 0x79, 0xea, 0x3f, 0x5d, 0x1b, 0xd2, 0xbe, 0xd3, 0x05, 0x5d, + 0xb8, 0x25, 0xec, 0x9f, 0x53, 0x1d, 0x31, 0xe4, 0x6c, 0xd5, 0x1c, 0x1d, 0xd4, 0x4b, 0xcc, 0xbb, + 0xfd, 0x45, 0x70, 0x8f, 0x88, 0x6b, 0xaa, 0x66, 0x63, 0x2d, 0xfb, 0x04, 0xe2, 0xbd, 0x4e, 0xf2, + 0xae, 0xb2, 0xe5, 0x15, 0xa0, 0xc1, 0x91, 0xfb, 0xe0, 0xbf, 0x4f, 0xa6, 0x7c, 0xc7, 0xc3, 0xf5, + 0x31, 0x9d, 0x13, 0x89, 0x10, 0xef, 0xb5, 0x64, 0x2f, 0x41, 0xfe, 0x1a, 0xc1, 0x6d, 0x0f, 0xe8, + 0x68, 0x46, 0xee, 0x9b, 0xa3, 0x0f, 0x03, 0x87, 0xe9, 0xa3, 0xc6, 0x22, 0xcf, 0x77, 0xc3, 0x01, + 0x3a, 0x9a, 0xf1, 0x1f, 0xa8, 0x1f, 0xb6, 0xcb, 0xcc, 0x85, 0x79, 0x98, 0xa5, 0xd4, 0xb3, 0x3f, + 0xf7, 0xfe, 0x5e, 0x63, 0x1a, 0xfa, 0x6b, 0x2e, 0x30, 0x8f, 0xeb, 0x56, 0xc3, 0xb2, 0x7b, 0x37, + 0xa7, 0xbc, 0xdd, 0x2c, 0xe1, 0xa3, 0x58, 0x64, 0xb0, 0x82, 0x49, 0xec, 0xae, 0x7f, 0x60, 0x77, + 0xc5, 0x68, 0x0d, 0x16, 0x9a, 0x46, 0xd3, 0xad, 0xdc, 0xec, 0xf6, 0x16, 0xe4, 0x38, 0x15, 0x28, + 0xdd, 0x16, 0x46, 0x5b, 0x90, 0xf0, 0x26, 0xbf, 0x2e, 0xc1, 0x0e, 0xbb, 0x86, 0xa4, 0x0c, 0xee, + 0x90, 0x46, 0x25, 0x99, 0x0e, 0xfc, 0x69, 0xa4, 0x23, 0x94, 0x82, 0xd8, 0x4b, 0xdc, 0xf5, 0x1e, + 0x1f, 0xf4, 0x27, 0x3a, 0x84, 0x39, 0xf7, 0x39, 0xc0, 0xff, 0xd6, 0x30, 0xbb, 0xf6, 0x0f, 0xf8, + 0xfb, 0x5c, 0xf6, 0x15, 0xac, 0x4f, 0x1a, 0x60, 0x2e, 0xb5, 0xc3, 0x7c, 0xc3, 0x41, 0x42, 0x31, + 0x9a, 0xbd, 0xc0, 0x23, 0x01, 0xae, 0x39, 0x44, 0xb3, 0x89, 0x4a, 0x07, 0x7a, 0xd5, 0x3a, 0x3b, + 0x73, 0x30, 0xf1, 0xbc, 0xdd, 0xe8, 0x79, 0xeb, 0x3d, 0xe9, 0x72, 0x07, 0xde, 0x83, 0x50, 0x5e, + 0x62, 0x36, 0x14, 0x47, 0x62, 0x16, 0xa8, 0x00, 0x4b, 0xd8, 0xac, 0x05, 0x40, 0xf8, 0x8b, 0x40, + 0xae, 0x62, 0xb3, 0x36, 0x80, 0xc8, 0xfe, 0xc8, 0xc1, 0xc6, 0xc4, 0x61, 0x0a, 0x3d, 0x81, 0x24, + 0x73, 0xe0, 0xcd, 0xb1, 0x1e, 0xcd, 0x5c, 0x94, 0x79, 0x71, 0x70, 0x62, 0x39, 0x41, 0x7c, 0xc7, + 0x1f, 0x11, 0x6a, 0xfe, 0x5d, 0x85, 0xfa, 0x5b, 0x1e, 0x96, 0x83, 0x93, 0xd6, 0x23, 0x9b, 0xbe, + 0x63, 0x87, 0xfa, 0x12, 0x77, 0xf9, 0x7d, 0x89, 0xbf, 0xbc, 0xbe, 0xf4, 0x00, 0x12, 0xfe, 0x6b, + 0x8f, 0x5d, 0x74, 0xed, 0x40, 0xfa, 0x77, 0x3e, 0xd4, 0x77, 0xbe, 0xe7, 0xbd, 0x1c, 0x18, 0x3b, + 0xa1, 0x5e, 0xe2, 0x7f, 0x63, 0x28, 0xbf, 0xf8, 0xb7, 0xcf, 0xaf, 0x06, 0xac, 0x86, 0xe7, 0xfa, + 0x33, 0x9a, 0x04, 0xb4, 0x1c, 0xd1, 0x42, 0x19, 0xe5, 0x31, 0x3e, 0x22, 0x87, 0xe4, 0x15, 0x6b, + 0x58, 0xe8, 0x64, 0x7f, 0xe0, 0x60, 0x65, 0xd4, 0xf8, 0xfd, 0x07, 0x8b, 0x5a, 0xf6, 0x0d, 0x07, + 0x99, 0xf1, 0xe3, 0x3c, 0xda, 0x04, 0x20, 0xb6, 0x66, 0x3a, 0xba, 0x6d, 0xb4, 0x88, 0x57, 0xa5, + 0x7d, 0x12, 0xf4, 0x2f, 0x48, 0xba, 0x35, 0x2d, 0x6a, 0x25, 0x4a, 0x30, 0x75, 0xaf, 0x94, 0xdd, + 0x07, 0xa0, 0xa5, 0x2c, 0x6a, 0x3a, 0x2f, 0x60, 0xb3, 0xe6, 0x55, 0xb0, 0x0e, 0xa0, 0xe1, 0x2f, + 0x34, 0xe8, 0x05, 0x5c, 0xb3, 0x5a, 0xd8, 0xd6, 0x88, 0x65, 0x0f, 0xbe, 0xf9, 0xb8, 0x94, 0xa2, + 0x0c, 0x38, 0x92, 0x67, 0xdb, 0xff, 0xe2, 0x93, 0xb2, 0x42, 0x92, 0xec, 0xe7, 0x1c, 0xa4, 0xc2, + 0x6a, 0x68, 0x05, 0xe6, 0x1c, 0xdd, 0xb2, 0xb1, 0xf7, 0x28, 0x70, 0x17, 0xb4, 0x27, 0x12, 0x8b, + 0x68, 0x0d, 0xb5, 0x63, 0xd1, 0x9e, 0xe8, 0x3e, 0x29, 0x80, 0x89, 0xaa, 0x54, 0x42, 0x15, 0x18, + 0x05, 0x4f, 0x21, 0xe6, 0x2a, 0x30, 0x91, 0xab, 0x90, 0x81, 0xb8, 0x6e, 0x35, 0xe9, 0x35, 0x39, + 0xe9, 0xd9, 0xed, 0x18, 0xed, 0xb8, 0xbd, 0xf5, 0x6e, 0x11, 0x52, 0xe1, 0xaf, 0x6c, 0xe8, 0x26, + 0x6c, 0x14, 0x4a, 0x25, 0x49, 0x29, 0x28, 0xa2, 0x54, 0x52, 0x2b, 0xd2, 0x89, 0x5c, 0x14, 0xd4, + 0x93, 0x52, 0xa5, 0x2c, 0x14, 0xc5, 0x47, 0xa2, 0x70, 0x90, 0x9a, 0x41, 0x49, 0x88, 0x4b, 0x65, + 0x41, 0x2e, 0x28, 0x92, 0x9c, 0x8a, 0xed, 0xbe, 0x0f, 0xcb, 0x23, 0x3e, 0x3f, 0xa1, 0x5b, 0xb0, + 0xed, 0xc7, 0x11, 0x4a, 0x8a, 0x78, 0x2c, 0x94, 0x94, 0x61, 0xa8, 0x92, 0x70, 0x58, 0x50, 0xc4, + 0xaa, 0x90, 0xe2, 0xe8, 0xaa, 0x2c, 0x55, 0x44, 0xb6, 0xe2, 0x77, 0xbf, 0x9e, 0x85, 0xc5, 0x01, + 0xb2, 0x37, 0x22, 0xac, 0xf9, 0x40, 0x95, 0x67, 0xe5, 0x30, 0xb5, 0x3f, 0xc3, 0x96, 0x78, 0x5c, + 0x38, 0x14, 0xd4, 0xe2, 0xe3, 0x42, 0xa5, 0x22, 0x3e, 0x12, 0x8b, 0xae, 0xea, 0xc0, 0x2a, 0xc5, + 0xd1, 0x23, 0xba, 0x4a, 0xfb, 0xd2, 0x49, 0xe9, 0x40, 0x2c, 0x1d, 0xaa, 0xfb, 0xd2, 0x53, 0xbf, + 0x0a, 0x8f, 0x76, 0xe1, 0x2f, 0xae, 0x8a, 0x24, 0x8b, 0x42, 0x49, 0x11, 0x0e, 0xc6, 0xea, 0x5e, + 0x45, 0x59, 0xd8, 0x0c, 0xc1, 0x95, 0xa5, 0xc7, 0xcf, 0xfc, 0x3a, 0x80, 0x36, 0xe0, 0x86, 0xab, + 0x43, 0xb7, 0x1e, 0x8b, 0x25, 0xc1, 0xbf, 0x9d, 0x18, 0x30, 0xaa, 0x08, 0x87, 0x34, 0x4a, 0x43, + 0xa4, 0x93, 0xe8, 0x0e, 0xdc, 0xae, 0x8a, 0x07, 0x82, 0xa4, 0x56, 0x8e, 0x24, 0xa5, 0x32, 0xe1, + 0x7c, 0x31, 0x1a, 0x7a, 0x57, 0x55, 0xda, 0xff, 0xbf, 0x50, 0x54, 0x54, 0x45, 0x2e, 0x14, 0xdf, + 0xa3, 0xbc, 0x7c, 0x5a, 0xb3, 0xe8, 0x36, 0xdc, 0x0c, 0x68, 0x1d, 0x08, 0x8a, 0x50, 0x0c, 0x83, + 0xcd, 0xa1, 0x0c, 0xac, 0xba, 0x6a, 0x42, 0x95, 0x5e, 0x9f, 0x6f, 0x6f, 0x9e, 0x9e, 0xbc, 0x70, + 0x72, 0x20, 0x4a, 0xd4, 0x43, 0xa9, 0x52, 0x94, 0xc5, 0x72, 0xd8, 0xfe, 0x0a, 0xd5, 0x51, 0x84, + 0xa7, 0xca, 0x04, 0xc2, 0x71, 0x4a, 0x85, 0xe9, 0xd0, 0x24, 0x51, 0x9e, 0xa9, 0xc2, 0x53, 0xca, + 0x38, 0xac, 0xb6, 0xb0, 0x7f, 0x0e, 0xb7, 0x75, 0xab, 0x79, 0xf1, 0x7f, 0xb0, 0xcc, 0x3d, 0x3f, + 0xf6, 0x94, 0xea, 0x56, 0x43, 0x33, 0xeb, 0x39, 0xcb, 0xae, 0xe7, 0xeb, 0xd8, 0x64, 0xd5, 0x20, + 0xef, 0x6e, 0x69, 0x2d, 0xc3, 0x99, 0xf0, 0xdd, 0xfd, 0xa1, 0x5f, 0x78, 0x3a, 0xcf, 0x2c, 0xef, + 0xfd, 0x1a, 0x00, 0x00, 0xff, 0xff, 0x98, 0xce, 0x14, 0xed, 0x07, 0x18, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/cloud/datalabeling/v1beta1/annotation_spec_set.pb.go b/vendor/google.golang.org/genproto/googleapis/cloud/datalabeling/v1beta1/annotation_spec_set.pb.go new file mode 100644 index 0000000..8bfbf53 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/cloud/datalabeling/v1beta1/annotation_spec_set.pb.go @@ -0,0 +1,173 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/cloud/datalabeling/v1beta1/annotation_spec_set.proto + +package datalabeling // import "google.golang.org/genproto/googleapis/cloud/datalabeling/v1beta1" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// AnnotationSpecSet is a collection of label definitions. For example, in +// image classification tasks, we define a set of labels, this set is called +// AnnotationSpecSet. AnnotationSpecSet is immutable upon creation. +type AnnotationSpecSet struct { + // Output only. + // AnnotationSpecSet resource name, format: + // projects/{project_id}/annotationSpecSets/{annotation_spec_set_id} + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // Required. The display name for AnnotationSpecSet defined by user. + // Maximum of 64 characters. + DisplayName string `protobuf:"bytes,2,opt,name=display_name,json=displayName,proto3" json:"display_name,omitempty"` + // Optional. User-provided description of the annotation specification set. + // The description can be up to 10000 characters long. + Description string `protobuf:"bytes,3,opt,name=description,proto3" json:"description,omitempty"` + // Required. The actual spec set defined by the users. + AnnotationSpecs []*AnnotationSpec `protobuf:"bytes,4,rep,name=annotation_specs,json=annotationSpecs,proto3" json:"annotation_specs,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *AnnotationSpecSet) Reset() { *m = AnnotationSpecSet{} } +func (m *AnnotationSpecSet) String() string { return proto.CompactTextString(m) } +func (*AnnotationSpecSet) ProtoMessage() {} +func (*AnnotationSpecSet) Descriptor() ([]byte, []int) { + return fileDescriptor_annotation_spec_set_b6f42ddbc1614a11, []int{0} +} +func (m *AnnotationSpecSet) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_AnnotationSpecSet.Unmarshal(m, b) +} +func (m *AnnotationSpecSet) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_AnnotationSpecSet.Marshal(b, m, deterministic) +} +func (dst *AnnotationSpecSet) XXX_Merge(src proto.Message) { + xxx_messageInfo_AnnotationSpecSet.Merge(dst, src) +} +func (m *AnnotationSpecSet) XXX_Size() int { + return xxx_messageInfo_AnnotationSpecSet.Size(m) +} +func (m *AnnotationSpecSet) XXX_DiscardUnknown() { + xxx_messageInfo_AnnotationSpecSet.DiscardUnknown(m) +} + +var xxx_messageInfo_AnnotationSpecSet proto.InternalMessageInfo + +func (m *AnnotationSpecSet) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *AnnotationSpecSet) GetDisplayName() string { + if m != nil { + return m.DisplayName + } + return "" +} + +func (m *AnnotationSpecSet) GetDescription() string { + if m != nil { + return m.Description + } + return "" +} + +func (m *AnnotationSpecSet) GetAnnotationSpecs() []*AnnotationSpec { + if m != nil { + return m.AnnotationSpecs + } + return nil +} + +// Container of information related to one annotation spec. +type AnnotationSpec struct { + // Required. The display name of the AnnotationSpec. Maximum of 64 characters. + DisplayName string `protobuf:"bytes,1,opt,name=display_name,json=displayName,proto3" json:"display_name,omitempty"` + // Optional. User-provided description of the annotation specification. + // The description can be up to 10000 characters long. + Description string `protobuf:"bytes,2,opt,name=description,proto3" json:"description,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *AnnotationSpec) Reset() { *m = AnnotationSpec{} } +func (m *AnnotationSpec) String() string { return proto.CompactTextString(m) } +func (*AnnotationSpec) ProtoMessage() {} +func (*AnnotationSpec) Descriptor() ([]byte, []int) { + return fileDescriptor_annotation_spec_set_b6f42ddbc1614a11, []int{1} +} +func (m *AnnotationSpec) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_AnnotationSpec.Unmarshal(m, b) +} +func (m *AnnotationSpec) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_AnnotationSpec.Marshal(b, m, deterministic) +} +func (dst *AnnotationSpec) XXX_Merge(src proto.Message) { + xxx_messageInfo_AnnotationSpec.Merge(dst, src) +} +func (m *AnnotationSpec) XXX_Size() int { + return xxx_messageInfo_AnnotationSpec.Size(m) +} +func (m *AnnotationSpec) XXX_DiscardUnknown() { + xxx_messageInfo_AnnotationSpec.DiscardUnknown(m) +} + +var xxx_messageInfo_AnnotationSpec proto.InternalMessageInfo + +func (m *AnnotationSpec) GetDisplayName() string { + if m != nil { + return m.DisplayName + } + return "" +} + +func (m *AnnotationSpec) GetDescription() string { + if m != nil { + return m.Description + } + return "" +} + +func init() { + proto.RegisterType((*AnnotationSpecSet)(nil), "google.cloud.datalabeling.v1beta1.AnnotationSpecSet") + proto.RegisterType((*AnnotationSpec)(nil), "google.cloud.datalabeling.v1beta1.AnnotationSpec") +} + +func init() { + proto.RegisterFile("google/cloud/datalabeling/v1beta1/annotation_spec_set.proto", fileDescriptor_annotation_spec_set_b6f42ddbc1614a11) +} + +var fileDescriptor_annotation_spec_set_b6f42ddbc1614a11 = []byte{ + // 268 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x84, 0x91, 0x5f, 0x4b, 0xc3, 0x30, + 0x14, 0xc5, 0xe9, 0x36, 0x04, 0x33, 0xf1, 0x4f, 0x9f, 0x8a, 0xf8, 0xd0, 0x0d, 0x84, 0x3d, 0x25, + 0x54, 0x1f, 0xf7, 0xa4, 0xef, 0x8a, 0x6c, 0xf8, 0x22, 0x42, 0xb9, 0x6d, 0x2f, 0x21, 0x90, 0xe5, + 0x86, 0x26, 0x8a, 0x7e, 0x45, 0x3f, 0x95, 0x2c, 0x0d, 0xda, 0x3a, 0xb0, 0x6f, 0xc9, 0xb9, 0xe7, + 0x1c, 0x7e, 0x37, 0x61, 0x6b, 0x49, 0x24, 0x35, 0x8a, 0x5a, 0xd3, 0x5b, 0x23, 0x1a, 0xf0, 0xa0, + 0xa1, 0x42, 0xad, 0x8c, 0x14, 0xef, 0x45, 0x85, 0x1e, 0x0a, 0x01, 0xc6, 0x90, 0x07, 0xaf, 0xc8, + 0x94, 0xce, 0x62, 0x5d, 0x3a, 0xf4, 0xdc, 0xb6, 0xe4, 0x29, 0x5d, 0x74, 0x61, 0x1e, 0xc2, 0xbc, + 0x1f, 0xe6, 0x31, 0x7c, 0x79, 0x15, 0xfb, 0xc1, 0xaa, 0x5e, 0x91, 0xeb, 0x0a, 0x96, 0x5f, 0x09, + 0xbb, 0xb8, 0xfb, 0x51, 0xb7, 0x16, 0xeb, 0x2d, 0xfa, 0x34, 0x65, 0x33, 0x03, 0x3b, 0xcc, 0x92, + 0x3c, 0x59, 0x1d, 0x6f, 0xc2, 0x39, 0x5d, 0xb0, 0x93, 0x46, 0x39, 0xab, 0xe1, 0xb3, 0x0c, 0xb3, + 0x49, 0x98, 0xcd, 0xa3, 0xf6, 0xb8, 0xb7, 0xe4, 0x6c, 0xde, 0xa0, 0xab, 0x5b, 0x65, 0xf7, 0x65, + 0xd9, 0x34, 0x3a, 0x7e, 0xa5, 0xf4, 0x95, 0x9d, 0xff, 0x59, 0xc6, 0x65, 0xb3, 0x7c, 0xba, 0x9a, + 0xdf, 0x14, 0x7c, 0x74, 0x15, 0x3e, 0x04, 0xdd, 0x9c, 0xc1, 0xe0, 0xee, 0x96, 0xcf, 0xec, 0x74, + 0x68, 0x39, 0x80, 0x4e, 0x46, 0xa1, 0x27, 0x07, 0xd0, 0xf7, 0x1f, 0xec, 0xba, 0xa6, 0xdd, 0x38, + 0xdf, 0x53, 0xf2, 0xf2, 0x10, 0x4d, 0x92, 0x34, 0x18, 0xc9, 0xa9, 0x95, 0x42, 0xa2, 0x09, 0x4f, + 0x2d, 0xba, 0x11, 0x58, 0xe5, 0xfe, 0xf9, 0xeb, 0x75, 0x5f, 0xac, 0x8e, 0x42, 0xf2, 0xf6, 0x3b, + 0x00, 0x00, 0xff, 0xff, 0x4b, 0xca, 0xe4, 0xde, 0x24, 0x02, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/cloud/datalabeling/v1beta1/data_labeling_service.pb.go b/vendor/google.golang.org/genproto/googleapis/cloud/datalabeling/v1beta1/data_labeling_service.pb.go new file mode 100644 index 0000000..f5be7dd --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/cloud/datalabeling/v1beta1/data_labeling_service.pb.go @@ -0,0 +1,3439 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/cloud/datalabeling/v1beta1/data_labeling_service.proto + +package datalabeling // import "google.golang.org/genproto/googleapis/cloud/datalabeling/v1beta1" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import empty "github.com/golang/protobuf/ptypes/empty" +import _ "google.golang.org/genproto/googleapis/api/annotations" +import longrunning "google.golang.org/genproto/googleapis/longrunning" +import _ "google.golang.org/genproto/protobuf/field_mask" + +import ( + context "golang.org/x/net/context" + grpc "google.golang.org/grpc" +) + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Image labeling task feature. +type LabelImageRequest_Feature int32 + +const ( + LabelImageRequest_FEATURE_UNSPECIFIED LabelImageRequest_Feature = 0 + // Label whole image with one or more of labels. + LabelImageRequest_CLASSIFICATION LabelImageRequest_Feature = 1 + // Label image with bounding boxes for labels. + LabelImageRequest_BOUNDING_BOX LabelImageRequest_Feature = 2 + // Label oriented bounding box. The box does not have to be parallel to + // horizontal line. + LabelImageRequest_ORIENTED_BOUNDING_BOX LabelImageRequest_Feature = 6 + // Label images with bounding poly. A bounding poly is a plane figure that + // is bounded by a finite chain of straight line segments closing in a loop. + LabelImageRequest_BOUNDING_POLY LabelImageRequest_Feature = 3 + // Label images with polyline. Polyline is formed by connected line segments + // which are not in closed form. + LabelImageRequest_POLYLINE LabelImageRequest_Feature = 4 + // Label images with segmentation. Segmentation is different from bounding + // poly since it is more fine-grained, pixel level annotation. + LabelImageRequest_SEGMENTATION LabelImageRequest_Feature = 5 +) + +var LabelImageRequest_Feature_name = map[int32]string{ + 0: "FEATURE_UNSPECIFIED", + 1: "CLASSIFICATION", + 2: "BOUNDING_BOX", + 6: "ORIENTED_BOUNDING_BOX", + 3: "BOUNDING_POLY", + 4: "POLYLINE", + 5: "SEGMENTATION", +} +var LabelImageRequest_Feature_value = map[string]int32{ + "FEATURE_UNSPECIFIED": 0, + "CLASSIFICATION": 1, + "BOUNDING_BOX": 2, + "ORIENTED_BOUNDING_BOX": 6, + "BOUNDING_POLY": 3, + "POLYLINE": 4, + "SEGMENTATION": 5, +} + +func (x LabelImageRequest_Feature) String() string { + return proto.EnumName(LabelImageRequest_Feature_name, int32(x)) +} +func (LabelImageRequest_Feature) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_data_labeling_service_e7f2e6cc30ea93c3, []int{14, 0} +} + +// Video labeling task feature. +type LabelVideoRequest_Feature int32 + +const ( + LabelVideoRequest_FEATURE_UNSPECIFIED LabelVideoRequest_Feature = 0 + // Label whole video or video segment with one or more labels. + LabelVideoRequest_CLASSIFICATION LabelVideoRequest_Feature = 1 + // Label objects with bounding box on image frames extracted from the video. + LabelVideoRequest_OBJECT_DETECTION LabelVideoRequest_Feature = 2 + // Label and track objects in video. + LabelVideoRequest_OBJECT_TRACKING LabelVideoRequest_Feature = 3 + // Label the range of video for the specified events. + LabelVideoRequest_EVENT LabelVideoRequest_Feature = 4 +) + +var LabelVideoRequest_Feature_name = map[int32]string{ + 0: "FEATURE_UNSPECIFIED", + 1: "CLASSIFICATION", + 2: "OBJECT_DETECTION", + 3: "OBJECT_TRACKING", + 4: "EVENT", +} +var LabelVideoRequest_Feature_value = map[string]int32{ + "FEATURE_UNSPECIFIED": 0, + "CLASSIFICATION": 1, + "OBJECT_DETECTION": 2, + "OBJECT_TRACKING": 3, + "EVENT": 4, +} + +func (x LabelVideoRequest_Feature) String() string { + return proto.EnumName(LabelVideoRequest_Feature_name, int32(x)) +} +func (LabelVideoRequest_Feature) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_data_labeling_service_e7f2e6cc30ea93c3, []int{15, 0} +} + +// Text labeling task feature. +type LabelTextRequest_Feature int32 + +const ( + LabelTextRequest_FEATURE_UNSPECIFIED LabelTextRequest_Feature = 0 + // Label text content to one of more labels. + LabelTextRequest_TEXT_CLASSIFICATION LabelTextRequest_Feature = 1 + // Label entities and their span in text. + LabelTextRequest_TEXT_ENTITY_EXTRACTION LabelTextRequest_Feature = 2 +) + +var LabelTextRequest_Feature_name = map[int32]string{ + 0: "FEATURE_UNSPECIFIED", + 1: "TEXT_CLASSIFICATION", + 2: "TEXT_ENTITY_EXTRACTION", +} +var LabelTextRequest_Feature_value = map[string]int32{ + "FEATURE_UNSPECIFIED": 0, + "TEXT_CLASSIFICATION": 1, + "TEXT_ENTITY_EXTRACTION": 2, +} + +func (x LabelTextRequest_Feature) String() string { + return proto.EnumName(LabelTextRequest_Feature_name, int32(x)) +} +func (LabelTextRequest_Feature) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_data_labeling_service_e7f2e6cc30ea93c3, []int{16, 0} +} + +// Audio labeling task feature. +type LabelAudioRequest_Feature int32 + +const ( + LabelAudioRequest_FEATURE_UNSPECIFIED LabelAudioRequest_Feature = 0 + // Transcribe the audios into text. + LabelAudioRequest_AUDIO_TRANSCRIPTION LabelAudioRequest_Feature = 1 +) + +var LabelAudioRequest_Feature_name = map[int32]string{ + 0: "FEATURE_UNSPECIFIED", + 1: "AUDIO_TRANSCRIPTION", +} +var LabelAudioRequest_Feature_value = map[string]int32{ + "FEATURE_UNSPECIFIED": 0, + "AUDIO_TRANSCRIPTION": 1, +} + +func (x LabelAudioRequest_Feature) String() string { + return proto.EnumName(LabelAudioRequest_Feature_name, int32(x)) +} +func (LabelAudioRequest_Feature) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_data_labeling_service_e7f2e6cc30ea93c3, []int{17, 0} +} + +// Request message for CreateDataset. +type CreateDatasetRequest struct { + // Required. Dataset resource parent, format: + // projects/{project_id} + Parent string `protobuf:"bytes,1,opt,name=parent,proto3" json:"parent,omitempty"` + // Required. The dataset to be created. + Dataset *Dataset `protobuf:"bytes,2,opt,name=dataset,proto3" json:"dataset,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *CreateDatasetRequest) Reset() { *m = CreateDatasetRequest{} } +func (m *CreateDatasetRequest) String() string { return proto.CompactTextString(m) } +func (*CreateDatasetRequest) ProtoMessage() {} +func (*CreateDatasetRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_data_labeling_service_e7f2e6cc30ea93c3, []int{0} +} +func (m *CreateDatasetRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_CreateDatasetRequest.Unmarshal(m, b) +} +func (m *CreateDatasetRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_CreateDatasetRequest.Marshal(b, m, deterministic) +} +func (dst *CreateDatasetRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_CreateDatasetRequest.Merge(dst, src) +} +func (m *CreateDatasetRequest) XXX_Size() int { + return xxx_messageInfo_CreateDatasetRequest.Size(m) +} +func (m *CreateDatasetRequest) XXX_DiscardUnknown() { + xxx_messageInfo_CreateDatasetRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_CreateDatasetRequest proto.InternalMessageInfo + +func (m *CreateDatasetRequest) GetParent() string { + if m != nil { + return m.Parent + } + return "" +} + +func (m *CreateDatasetRequest) GetDataset() *Dataset { + if m != nil { + return m.Dataset + } + return nil +} + +// Request message for GetDataSet. +type GetDatasetRequest struct { + // Required. Dataset resource name, format: + // projects/{project_id}/datasets/{dataset_id} + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GetDatasetRequest) Reset() { *m = GetDatasetRequest{} } +func (m *GetDatasetRequest) String() string { return proto.CompactTextString(m) } +func (*GetDatasetRequest) ProtoMessage() {} +func (*GetDatasetRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_data_labeling_service_e7f2e6cc30ea93c3, []int{1} +} +func (m *GetDatasetRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GetDatasetRequest.Unmarshal(m, b) +} +func (m *GetDatasetRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GetDatasetRequest.Marshal(b, m, deterministic) +} +func (dst *GetDatasetRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_GetDatasetRequest.Merge(dst, src) +} +func (m *GetDatasetRequest) XXX_Size() int { + return xxx_messageInfo_GetDatasetRequest.Size(m) +} +func (m *GetDatasetRequest) XXX_DiscardUnknown() { + xxx_messageInfo_GetDatasetRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_GetDatasetRequest proto.InternalMessageInfo + +func (m *GetDatasetRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +// Request message for ListDataset. +type ListDatasetsRequest struct { + // Required. Dataset resource parent, format: + // projects/{project_id} + Parent string `protobuf:"bytes,1,opt,name=parent,proto3" json:"parent,omitempty"` + // Optional. Filter on dataset is not supported at this moment. + Filter string `protobuf:"bytes,2,opt,name=filter,proto3" json:"filter,omitempty"` + // Optional. Requested page size. Server may return fewer results than + // requested. Default value is 100. + PageSize int32 `protobuf:"varint,3,opt,name=page_size,json=pageSize,proto3" json:"page_size,omitempty"` + // Optional. A token identifying a page of results for the server to return. + // Typically obtained by + // [ListDatasetsResponse.next_page_token][google.cloud.datalabeling.v1beta1.ListDatasetsResponse.next_page_token] of the previous + // [DataLabelingService.ListDatasets] call. + // Returns the first page if empty. + PageToken string `protobuf:"bytes,4,opt,name=page_token,json=pageToken,proto3" json:"page_token,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ListDatasetsRequest) Reset() { *m = ListDatasetsRequest{} } +func (m *ListDatasetsRequest) String() string { return proto.CompactTextString(m) } +func (*ListDatasetsRequest) ProtoMessage() {} +func (*ListDatasetsRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_data_labeling_service_e7f2e6cc30ea93c3, []int{2} +} +func (m *ListDatasetsRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ListDatasetsRequest.Unmarshal(m, b) +} +func (m *ListDatasetsRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ListDatasetsRequest.Marshal(b, m, deterministic) +} +func (dst *ListDatasetsRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_ListDatasetsRequest.Merge(dst, src) +} +func (m *ListDatasetsRequest) XXX_Size() int { + return xxx_messageInfo_ListDatasetsRequest.Size(m) +} +func (m *ListDatasetsRequest) XXX_DiscardUnknown() { + xxx_messageInfo_ListDatasetsRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_ListDatasetsRequest proto.InternalMessageInfo + +func (m *ListDatasetsRequest) GetParent() string { + if m != nil { + return m.Parent + } + return "" +} + +func (m *ListDatasetsRequest) GetFilter() string { + if m != nil { + return m.Filter + } + return "" +} + +func (m *ListDatasetsRequest) GetPageSize() int32 { + if m != nil { + return m.PageSize + } + return 0 +} + +func (m *ListDatasetsRequest) GetPageToken() string { + if m != nil { + return m.PageToken + } + return "" +} + +// Results of listing datasets within a project. +type ListDatasetsResponse struct { + // The list of datasets to return. + Datasets []*Dataset `protobuf:"bytes,1,rep,name=datasets,proto3" json:"datasets,omitempty"` + // A token to retrieve next page of results. + NextPageToken string `protobuf:"bytes,2,opt,name=next_page_token,json=nextPageToken,proto3" json:"next_page_token,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ListDatasetsResponse) Reset() { *m = ListDatasetsResponse{} } +func (m *ListDatasetsResponse) String() string { return proto.CompactTextString(m) } +func (*ListDatasetsResponse) ProtoMessage() {} +func (*ListDatasetsResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_data_labeling_service_e7f2e6cc30ea93c3, []int{3} +} +func (m *ListDatasetsResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ListDatasetsResponse.Unmarshal(m, b) +} +func (m *ListDatasetsResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ListDatasetsResponse.Marshal(b, m, deterministic) +} +func (dst *ListDatasetsResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_ListDatasetsResponse.Merge(dst, src) +} +func (m *ListDatasetsResponse) XXX_Size() int { + return xxx_messageInfo_ListDatasetsResponse.Size(m) +} +func (m *ListDatasetsResponse) XXX_DiscardUnknown() { + xxx_messageInfo_ListDatasetsResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_ListDatasetsResponse proto.InternalMessageInfo + +func (m *ListDatasetsResponse) GetDatasets() []*Dataset { + if m != nil { + return m.Datasets + } + return nil +} + +func (m *ListDatasetsResponse) GetNextPageToken() string { + if m != nil { + return m.NextPageToken + } + return "" +} + +// Request message for DeleteDataset. +type DeleteDatasetRequest struct { + // Required. Dataset resource name, format: + // projects/{project_id}/datasets/{dataset_id} + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *DeleteDatasetRequest) Reset() { *m = DeleteDatasetRequest{} } +func (m *DeleteDatasetRequest) String() string { return proto.CompactTextString(m) } +func (*DeleteDatasetRequest) ProtoMessage() {} +func (*DeleteDatasetRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_data_labeling_service_e7f2e6cc30ea93c3, []int{4} +} +func (m *DeleteDatasetRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_DeleteDatasetRequest.Unmarshal(m, b) +} +func (m *DeleteDatasetRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_DeleteDatasetRequest.Marshal(b, m, deterministic) +} +func (dst *DeleteDatasetRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_DeleteDatasetRequest.Merge(dst, src) +} +func (m *DeleteDatasetRequest) XXX_Size() int { + return xxx_messageInfo_DeleteDatasetRequest.Size(m) +} +func (m *DeleteDatasetRequest) XXX_DiscardUnknown() { + xxx_messageInfo_DeleteDatasetRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_DeleteDatasetRequest proto.InternalMessageInfo + +func (m *DeleteDatasetRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +// Request message for ImportData API. +type ImportDataRequest struct { + // Required. Dataset resource name, format: + // projects/{project_id}/datasets/{dataset_id} + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // Required. Specify the input source of the data. + InputConfig *InputConfig `protobuf:"bytes,2,opt,name=input_config,json=inputConfig,proto3" json:"input_config,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ImportDataRequest) Reset() { *m = ImportDataRequest{} } +func (m *ImportDataRequest) String() string { return proto.CompactTextString(m) } +func (*ImportDataRequest) ProtoMessage() {} +func (*ImportDataRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_data_labeling_service_e7f2e6cc30ea93c3, []int{5} +} +func (m *ImportDataRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ImportDataRequest.Unmarshal(m, b) +} +func (m *ImportDataRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ImportDataRequest.Marshal(b, m, deterministic) +} +func (dst *ImportDataRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_ImportDataRequest.Merge(dst, src) +} +func (m *ImportDataRequest) XXX_Size() int { + return xxx_messageInfo_ImportDataRequest.Size(m) +} +func (m *ImportDataRequest) XXX_DiscardUnknown() { + xxx_messageInfo_ImportDataRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_ImportDataRequest proto.InternalMessageInfo + +func (m *ImportDataRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *ImportDataRequest) GetInputConfig() *InputConfig { + if m != nil { + return m.InputConfig + } + return nil +} + +// Request message for ExportData API. +type ExportDataRequest struct { + // Required. Dataset resource name, format: + // projects/{project_id}/datasets/{dataset_id} + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // Required. Annotated dataset resource name. DataItem in + // Dataset and their annotations in specified annotated dataset will be + // exported. It's in format of + // projects/{project_id}/datasets/{dataset_id}/annotatedDatasets/ + // {annotated_dataset_id} + AnnotatedDataset string `protobuf:"bytes,2,opt,name=annotated_dataset,json=annotatedDataset,proto3" json:"annotated_dataset,omitempty"` + // Optional. Filter is not supported at this moment. + Filter string `protobuf:"bytes,3,opt,name=filter,proto3" json:"filter,omitempty"` + // Required. Specify the output destination. + OutputConfig *OutputConfig `protobuf:"bytes,4,opt,name=output_config,json=outputConfig,proto3" json:"output_config,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ExportDataRequest) Reset() { *m = ExportDataRequest{} } +func (m *ExportDataRequest) String() string { return proto.CompactTextString(m) } +func (*ExportDataRequest) ProtoMessage() {} +func (*ExportDataRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_data_labeling_service_e7f2e6cc30ea93c3, []int{6} +} +func (m *ExportDataRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ExportDataRequest.Unmarshal(m, b) +} +func (m *ExportDataRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ExportDataRequest.Marshal(b, m, deterministic) +} +func (dst *ExportDataRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_ExportDataRequest.Merge(dst, src) +} +func (m *ExportDataRequest) XXX_Size() int { + return xxx_messageInfo_ExportDataRequest.Size(m) +} +func (m *ExportDataRequest) XXX_DiscardUnknown() { + xxx_messageInfo_ExportDataRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_ExportDataRequest proto.InternalMessageInfo + +func (m *ExportDataRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *ExportDataRequest) GetAnnotatedDataset() string { + if m != nil { + return m.AnnotatedDataset + } + return "" +} + +func (m *ExportDataRequest) GetFilter() string { + if m != nil { + return m.Filter + } + return "" +} + +func (m *ExportDataRequest) GetOutputConfig() *OutputConfig { + if m != nil { + return m.OutputConfig + } + return nil +} + +// Request message for GetDataItem. +type GetDataItemRequest struct { + // Required. The name of the data item to get, format: + // projects/{project_id}/datasets/{dataset_id}/dataItems/{data_item_id} + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GetDataItemRequest) Reset() { *m = GetDataItemRequest{} } +func (m *GetDataItemRequest) String() string { return proto.CompactTextString(m) } +func (*GetDataItemRequest) ProtoMessage() {} +func (*GetDataItemRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_data_labeling_service_e7f2e6cc30ea93c3, []int{7} +} +func (m *GetDataItemRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GetDataItemRequest.Unmarshal(m, b) +} +func (m *GetDataItemRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GetDataItemRequest.Marshal(b, m, deterministic) +} +func (dst *GetDataItemRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_GetDataItemRequest.Merge(dst, src) +} +func (m *GetDataItemRequest) XXX_Size() int { + return xxx_messageInfo_GetDataItemRequest.Size(m) +} +func (m *GetDataItemRequest) XXX_DiscardUnknown() { + xxx_messageInfo_GetDataItemRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_GetDataItemRequest proto.InternalMessageInfo + +func (m *GetDataItemRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +// Request message for ListDataItems. +type ListDataItemsRequest struct { + // Required. Name of the dataset to list data items, format: + // projects/{project_id}/datasets/{dataset_id} + Parent string `protobuf:"bytes,1,opt,name=parent,proto3" json:"parent,omitempty"` + // Optional. Filter is not supported at this moment. + Filter string `protobuf:"bytes,2,opt,name=filter,proto3" json:"filter,omitempty"` + // Optional. Requested page size. Server may return fewer results than + // requested. Default value is 100. + PageSize int32 `protobuf:"varint,3,opt,name=page_size,json=pageSize,proto3" json:"page_size,omitempty"` + // Optional. A token identifying a page of results for the server to return. + // Typically obtained by + // [ListDataItemsResponse.next_page_token][google.cloud.datalabeling.v1beta1.ListDataItemsResponse.next_page_token] of the previous + // [DataLabelingService.ListDataItems] call. + // Return first page if empty. + PageToken string `protobuf:"bytes,4,opt,name=page_token,json=pageToken,proto3" json:"page_token,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ListDataItemsRequest) Reset() { *m = ListDataItemsRequest{} } +func (m *ListDataItemsRequest) String() string { return proto.CompactTextString(m) } +func (*ListDataItemsRequest) ProtoMessage() {} +func (*ListDataItemsRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_data_labeling_service_e7f2e6cc30ea93c3, []int{8} +} +func (m *ListDataItemsRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ListDataItemsRequest.Unmarshal(m, b) +} +func (m *ListDataItemsRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ListDataItemsRequest.Marshal(b, m, deterministic) +} +func (dst *ListDataItemsRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_ListDataItemsRequest.Merge(dst, src) +} +func (m *ListDataItemsRequest) XXX_Size() int { + return xxx_messageInfo_ListDataItemsRequest.Size(m) +} +func (m *ListDataItemsRequest) XXX_DiscardUnknown() { + xxx_messageInfo_ListDataItemsRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_ListDataItemsRequest proto.InternalMessageInfo + +func (m *ListDataItemsRequest) GetParent() string { + if m != nil { + return m.Parent + } + return "" +} + +func (m *ListDataItemsRequest) GetFilter() string { + if m != nil { + return m.Filter + } + return "" +} + +func (m *ListDataItemsRequest) GetPageSize() int32 { + if m != nil { + return m.PageSize + } + return 0 +} + +func (m *ListDataItemsRequest) GetPageToken() string { + if m != nil { + return m.PageToken + } + return "" +} + +// Results of listing data items in a dataset. +type ListDataItemsResponse struct { + // The list of data items to return. + DataItems []*DataItem `protobuf:"bytes,1,rep,name=data_items,json=dataItems,proto3" json:"data_items,omitempty"` + // A token to retrieve next page of results. + NextPageToken string `protobuf:"bytes,2,opt,name=next_page_token,json=nextPageToken,proto3" json:"next_page_token,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ListDataItemsResponse) Reset() { *m = ListDataItemsResponse{} } +func (m *ListDataItemsResponse) String() string { return proto.CompactTextString(m) } +func (*ListDataItemsResponse) ProtoMessage() {} +func (*ListDataItemsResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_data_labeling_service_e7f2e6cc30ea93c3, []int{9} +} +func (m *ListDataItemsResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ListDataItemsResponse.Unmarshal(m, b) +} +func (m *ListDataItemsResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ListDataItemsResponse.Marshal(b, m, deterministic) +} +func (dst *ListDataItemsResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_ListDataItemsResponse.Merge(dst, src) +} +func (m *ListDataItemsResponse) XXX_Size() int { + return xxx_messageInfo_ListDataItemsResponse.Size(m) +} +func (m *ListDataItemsResponse) XXX_DiscardUnknown() { + xxx_messageInfo_ListDataItemsResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_ListDataItemsResponse proto.InternalMessageInfo + +func (m *ListDataItemsResponse) GetDataItems() []*DataItem { + if m != nil { + return m.DataItems + } + return nil +} + +func (m *ListDataItemsResponse) GetNextPageToken() string { + if m != nil { + return m.NextPageToken + } + return "" +} + +// Request message for GetAnnotatedDataset. +type GetAnnotatedDatasetRequest struct { + // Required. Name of the annotated dataset to get, format: + // projects/{project_id}/datasets/{dataset_id}/annotatedDatasets/ + // {annotated_dataset_id} + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GetAnnotatedDatasetRequest) Reset() { *m = GetAnnotatedDatasetRequest{} } +func (m *GetAnnotatedDatasetRequest) String() string { return proto.CompactTextString(m) } +func (*GetAnnotatedDatasetRequest) ProtoMessage() {} +func (*GetAnnotatedDatasetRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_data_labeling_service_e7f2e6cc30ea93c3, []int{10} +} +func (m *GetAnnotatedDatasetRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GetAnnotatedDatasetRequest.Unmarshal(m, b) +} +func (m *GetAnnotatedDatasetRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GetAnnotatedDatasetRequest.Marshal(b, m, deterministic) +} +func (dst *GetAnnotatedDatasetRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_GetAnnotatedDatasetRequest.Merge(dst, src) +} +func (m *GetAnnotatedDatasetRequest) XXX_Size() int { + return xxx_messageInfo_GetAnnotatedDatasetRequest.Size(m) +} +func (m *GetAnnotatedDatasetRequest) XXX_DiscardUnknown() { + xxx_messageInfo_GetAnnotatedDatasetRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_GetAnnotatedDatasetRequest proto.InternalMessageInfo + +func (m *GetAnnotatedDatasetRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +// Request message for ListAnnotatedDatasets. +type ListAnnotatedDatasetsRequest struct { + // Required. Name of the dataset to list annotated datasets, format: + // projects/{project_id}/datasets/{dataset_id} + Parent string `protobuf:"bytes,1,opt,name=parent,proto3" json:"parent,omitempty"` + // Optional. Filter is not supported at this moment. + Filter string `protobuf:"bytes,2,opt,name=filter,proto3" json:"filter,omitempty"` + // Optional. Requested page size. Server may return fewer results than + // requested. Default value is 100. + PageSize int32 `protobuf:"varint,3,opt,name=page_size,json=pageSize,proto3" json:"page_size,omitempty"` + // Optional. A token identifying a page of results for the server to return. + // Typically obtained by + // [ListAnnotatedDatasetsResponse.next_page_token][google.cloud.datalabeling.v1beta1.ListAnnotatedDatasetsResponse.next_page_token] of the previous + // [DataLabelingService.ListAnnotatedDatasets] call. + // Return first page if empty. + PageToken string `protobuf:"bytes,4,opt,name=page_token,json=pageToken,proto3" json:"page_token,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ListAnnotatedDatasetsRequest) Reset() { *m = ListAnnotatedDatasetsRequest{} } +func (m *ListAnnotatedDatasetsRequest) String() string { return proto.CompactTextString(m) } +func (*ListAnnotatedDatasetsRequest) ProtoMessage() {} +func (*ListAnnotatedDatasetsRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_data_labeling_service_e7f2e6cc30ea93c3, []int{11} +} +func (m *ListAnnotatedDatasetsRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ListAnnotatedDatasetsRequest.Unmarshal(m, b) +} +func (m *ListAnnotatedDatasetsRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ListAnnotatedDatasetsRequest.Marshal(b, m, deterministic) +} +func (dst *ListAnnotatedDatasetsRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_ListAnnotatedDatasetsRequest.Merge(dst, src) +} +func (m *ListAnnotatedDatasetsRequest) XXX_Size() int { + return xxx_messageInfo_ListAnnotatedDatasetsRequest.Size(m) +} +func (m *ListAnnotatedDatasetsRequest) XXX_DiscardUnknown() { + xxx_messageInfo_ListAnnotatedDatasetsRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_ListAnnotatedDatasetsRequest proto.InternalMessageInfo + +func (m *ListAnnotatedDatasetsRequest) GetParent() string { + if m != nil { + return m.Parent + } + return "" +} + +func (m *ListAnnotatedDatasetsRequest) GetFilter() string { + if m != nil { + return m.Filter + } + return "" +} + +func (m *ListAnnotatedDatasetsRequest) GetPageSize() int32 { + if m != nil { + return m.PageSize + } + return 0 +} + +func (m *ListAnnotatedDatasetsRequest) GetPageToken() string { + if m != nil { + return m.PageToken + } + return "" +} + +// Request message for DeleteAnnotatedDataset. +type DeleteAnnotatedDatasetRequest struct { + // Required. Name of the annotated dataset to delete, format: + // projects/{project_id}/datasets/{dataset_id}/annotatedDatasets/ + // {annotated_dataset_id} + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *DeleteAnnotatedDatasetRequest) Reset() { *m = DeleteAnnotatedDatasetRequest{} } +func (m *DeleteAnnotatedDatasetRequest) String() string { return proto.CompactTextString(m) } +func (*DeleteAnnotatedDatasetRequest) ProtoMessage() {} +func (*DeleteAnnotatedDatasetRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_data_labeling_service_e7f2e6cc30ea93c3, []int{12} +} +func (m *DeleteAnnotatedDatasetRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_DeleteAnnotatedDatasetRequest.Unmarshal(m, b) +} +func (m *DeleteAnnotatedDatasetRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_DeleteAnnotatedDatasetRequest.Marshal(b, m, deterministic) +} +func (dst *DeleteAnnotatedDatasetRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_DeleteAnnotatedDatasetRequest.Merge(dst, src) +} +func (m *DeleteAnnotatedDatasetRequest) XXX_Size() int { + return xxx_messageInfo_DeleteAnnotatedDatasetRequest.Size(m) +} +func (m *DeleteAnnotatedDatasetRequest) XXX_DiscardUnknown() { + xxx_messageInfo_DeleteAnnotatedDatasetRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_DeleteAnnotatedDatasetRequest proto.InternalMessageInfo + +func (m *DeleteAnnotatedDatasetRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +// Results of listing annotated datasets for a dataset. +type ListAnnotatedDatasetsResponse struct { + // The list of annotated datasets to return. + AnnotatedDatasets []*AnnotatedDataset `protobuf:"bytes,1,rep,name=annotated_datasets,json=annotatedDatasets,proto3" json:"annotated_datasets,omitempty"` + // A token to retrieve next page of results. + NextPageToken string `protobuf:"bytes,2,opt,name=next_page_token,json=nextPageToken,proto3" json:"next_page_token,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ListAnnotatedDatasetsResponse) Reset() { *m = ListAnnotatedDatasetsResponse{} } +func (m *ListAnnotatedDatasetsResponse) String() string { return proto.CompactTextString(m) } +func (*ListAnnotatedDatasetsResponse) ProtoMessage() {} +func (*ListAnnotatedDatasetsResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_data_labeling_service_e7f2e6cc30ea93c3, []int{13} +} +func (m *ListAnnotatedDatasetsResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ListAnnotatedDatasetsResponse.Unmarshal(m, b) +} +func (m *ListAnnotatedDatasetsResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ListAnnotatedDatasetsResponse.Marshal(b, m, deterministic) +} +func (dst *ListAnnotatedDatasetsResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_ListAnnotatedDatasetsResponse.Merge(dst, src) +} +func (m *ListAnnotatedDatasetsResponse) XXX_Size() int { + return xxx_messageInfo_ListAnnotatedDatasetsResponse.Size(m) +} +func (m *ListAnnotatedDatasetsResponse) XXX_DiscardUnknown() { + xxx_messageInfo_ListAnnotatedDatasetsResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_ListAnnotatedDatasetsResponse proto.InternalMessageInfo + +func (m *ListAnnotatedDatasetsResponse) GetAnnotatedDatasets() []*AnnotatedDataset { + if m != nil { + return m.AnnotatedDatasets + } + return nil +} + +func (m *ListAnnotatedDatasetsResponse) GetNextPageToken() string { + if m != nil { + return m.NextPageToken + } + return "" +} + +// Request message for starting an image labeling task. +type LabelImageRequest struct { + // Required. Config for labeling tasks. The type of request config must + // match the selected feature. + // + // Types that are valid to be assigned to RequestConfig: + // *LabelImageRequest_ImageClassificationConfig + // *LabelImageRequest_BoundingPolyConfig + // *LabelImageRequest_PolylineConfig + // *LabelImageRequest_SegmentationConfig + RequestConfig isLabelImageRequest_RequestConfig `protobuf_oneof:"request_config"` + // Required. Name of the dataset to request labeling task, format: + // projects/{project_id}/datasets/{dataset_id} + Parent string `protobuf:"bytes,1,opt,name=parent,proto3" json:"parent,omitempty"` + // Required. Basic human annotation config. + BasicConfig *HumanAnnotationConfig `protobuf:"bytes,2,opt,name=basic_config,json=basicConfig,proto3" json:"basic_config,omitempty"` + // Required. The type of image labeling task. + Feature LabelImageRequest_Feature `protobuf:"varint,3,opt,name=feature,proto3,enum=google.cloud.datalabeling.v1beta1.LabelImageRequest_Feature" json:"feature,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *LabelImageRequest) Reset() { *m = LabelImageRequest{} } +func (m *LabelImageRequest) String() string { return proto.CompactTextString(m) } +func (*LabelImageRequest) ProtoMessage() {} +func (*LabelImageRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_data_labeling_service_e7f2e6cc30ea93c3, []int{14} +} +func (m *LabelImageRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_LabelImageRequest.Unmarshal(m, b) +} +func (m *LabelImageRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_LabelImageRequest.Marshal(b, m, deterministic) +} +func (dst *LabelImageRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_LabelImageRequest.Merge(dst, src) +} +func (m *LabelImageRequest) XXX_Size() int { + return xxx_messageInfo_LabelImageRequest.Size(m) +} +func (m *LabelImageRequest) XXX_DiscardUnknown() { + xxx_messageInfo_LabelImageRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_LabelImageRequest proto.InternalMessageInfo + +type isLabelImageRequest_RequestConfig interface { + isLabelImageRequest_RequestConfig() +} + +type LabelImageRequest_ImageClassificationConfig struct { + ImageClassificationConfig *ImageClassificationConfig `protobuf:"bytes,4,opt,name=image_classification_config,json=imageClassificationConfig,proto3,oneof"` +} + +type LabelImageRequest_BoundingPolyConfig struct { + BoundingPolyConfig *BoundingPolyConfig `protobuf:"bytes,5,opt,name=bounding_poly_config,json=boundingPolyConfig,proto3,oneof"` +} + +type LabelImageRequest_PolylineConfig struct { + PolylineConfig *PolylineConfig `protobuf:"bytes,6,opt,name=polyline_config,json=polylineConfig,proto3,oneof"` +} + +type LabelImageRequest_SegmentationConfig struct { + SegmentationConfig *SegmentationConfig `protobuf:"bytes,7,opt,name=segmentation_config,json=segmentationConfig,proto3,oneof"` +} + +func (*LabelImageRequest_ImageClassificationConfig) isLabelImageRequest_RequestConfig() {} + +func (*LabelImageRequest_BoundingPolyConfig) isLabelImageRequest_RequestConfig() {} + +func (*LabelImageRequest_PolylineConfig) isLabelImageRequest_RequestConfig() {} + +func (*LabelImageRequest_SegmentationConfig) isLabelImageRequest_RequestConfig() {} + +func (m *LabelImageRequest) GetRequestConfig() isLabelImageRequest_RequestConfig { + if m != nil { + return m.RequestConfig + } + return nil +} + +func (m *LabelImageRequest) GetImageClassificationConfig() *ImageClassificationConfig { + if x, ok := m.GetRequestConfig().(*LabelImageRequest_ImageClassificationConfig); ok { + return x.ImageClassificationConfig + } + return nil +} + +func (m *LabelImageRequest) GetBoundingPolyConfig() *BoundingPolyConfig { + if x, ok := m.GetRequestConfig().(*LabelImageRequest_BoundingPolyConfig); ok { + return x.BoundingPolyConfig + } + return nil +} + +func (m *LabelImageRequest) GetPolylineConfig() *PolylineConfig { + if x, ok := m.GetRequestConfig().(*LabelImageRequest_PolylineConfig); ok { + return x.PolylineConfig + } + return nil +} + +func (m *LabelImageRequest) GetSegmentationConfig() *SegmentationConfig { + if x, ok := m.GetRequestConfig().(*LabelImageRequest_SegmentationConfig); ok { + return x.SegmentationConfig + } + return nil +} + +func (m *LabelImageRequest) GetParent() string { + if m != nil { + return m.Parent + } + return "" +} + +func (m *LabelImageRequest) GetBasicConfig() *HumanAnnotationConfig { + if m != nil { + return m.BasicConfig + } + return nil +} + +func (m *LabelImageRequest) GetFeature() LabelImageRequest_Feature { + if m != nil { + return m.Feature + } + return LabelImageRequest_FEATURE_UNSPECIFIED +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*LabelImageRequest) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _LabelImageRequest_OneofMarshaler, _LabelImageRequest_OneofUnmarshaler, _LabelImageRequest_OneofSizer, []interface{}{ + (*LabelImageRequest_ImageClassificationConfig)(nil), + (*LabelImageRequest_BoundingPolyConfig)(nil), + (*LabelImageRequest_PolylineConfig)(nil), + (*LabelImageRequest_SegmentationConfig)(nil), + } +} + +func _LabelImageRequest_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*LabelImageRequest) + // request_config + switch x := m.RequestConfig.(type) { + case *LabelImageRequest_ImageClassificationConfig: + b.EncodeVarint(4<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.ImageClassificationConfig); err != nil { + return err + } + case *LabelImageRequest_BoundingPolyConfig: + b.EncodeVarint(5<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.BoundingPolyConfig); err != nil { + return err + } + case *LabelImageRequest_PolylineConfig: + b.EncodeVarint(6<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.PolylineConfig); err != nil { + return err + } + case *LabelImageRequest_SegmentationConfig: + b.EncodeVarint(7<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.SegmentationConfig); err != nil { + return err + } + case nil: + default: + return fmt.Errorf("LabelImageRequest.RequestConfig has unexpected type %T", x) + } + return nil +} + +func _LabelImageRequest_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*LabelImageRequest) + switch tag { + case 4: // request_config.image_classification_config + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(ImageClassificationConfig) + err := b.DecodeMessage(msg) + m.RequestConfig = &LabelImageRequest_ImageClassificationConfig{msg} + return true, err + case 5: // request_config.bounding_poly_config + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(BoundingPolyConfig) + err := b.DecodeMessage(msg) + m.RequestConfig = &LabelImageRequest_BoundingPolyConfig{msg} + return true, err + case 6: // request_config.polyline_config + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(PolylineConfig) + err := b.DecodeMessage(msg) + m.RequestConfig = &LabelImageRequest_PolylineConfig{msg} + return true, err + case 7: // request_config.segmentation_config + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(SegmentationConfig) + err := b.DecodeMessage(msg) + m.RequestConfig = &LabelImageRequest_SegmentationConfig{msg} + return true, err + default: + return false, nil + } +} + +func _LabelImageRequest_OneofSizer(msg proto.Message) (n int) { + m := msg.(*LabelImageRequest) + // request_config + switch x := m.RequestConfig.(type) { + case *LabelImageRequest_ImageClassificationConfig: + s := proto.Size(x.ImageClassificationConfig) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *LabelImageRequest_BoundingPolyConfig: + s := proto.Size(x.BoundingPolyConfig) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *LabelImageRequest_PolylineConfig: + s := proto.Size(x.PolylineConfig) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *LabelImageRequest_SegmentationConfig: + s := proto.Size(x.SegmentationConfig) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +// Request message for LabelVideo. +type LabelVideoRequest struct { + // Required. Config for labeling tasks. The type of request config must + // match the selected feature. + // + // Types that are valid to be assigned to RequestConfig: + // *LabelVideoRequest_VideoClassificationConfig + // *LabelVideoRequest_ObjectDetectionConfig + // *LabelVideoRequest_ObjectTrackingConfig + // *LabelVideoRequest_EventConfig + RequestConfig isLabelVideoRequest_RequestConfig `protobuf_oneof:"request_config"` + // Required. Name of the dataset to request labeling task, format: + // projects/{project_id}/datasets/{dataset_id} + Parent string `protobuf:"bytes,1,opt,name=parent,proto3" json:"parent,omitempty"` + // Required. Basic human annotation config. + BasicConfig *HumanAnnotationConfig `protobuf:"bytes,2,opt,name=basic_config,json=basicConfig,proto3" json:"basic_config,omitempty"` + // Required. The type of video labeling task. + Feature LabelVideoRequest_Feature `protobuf:"varint,3,opt,name=feature,proto3,enum=google.cloud.datalabeling.v1beta1.LabelVideoRequest_Feature" json:"feature,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *LabelVideoRequest) Reset() { *m = LabelVideoRequest{} } +func (m *LabelVideoRequest) String() string { return proto.CompactTextString(m) } +func (*LabelVideoRequest) ProtoMessage() {} +func (*LabelVideoRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_data_labeling_service_e7f2e6cc30ea93c3, []int{15} +} +func (m *LabelVideoRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_LabelVideoRequest.Unmarshal(m, b) +} +func (m *LabelVideoRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_LabelVideoRequest.Marshal(b, m, deterministic) +} +func (dst *LabelVideoRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_LabelVideoRequest.Merge(dst, src) +} +func (m *LabelVideoRequest) XXX_Size() int { + return xxx_messageInfo_LabelVideoRequest.Size(m) +} +func (m *LabelVideoRequest) XXX_DiscardUnknown() { + xxx_messageInfo_LabelVideoRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_LabelVideoRequest proto.InternalMessageInfo + +type isLabelVideoRequest_RequestConfig interface { + isLabelVideoRequest_RequestConfig() +} + +type LabelVideoRequest_VideoClassificationConfig struct { + VideoClassificationConfig *VideoClassificationConfig `protobuf:"bytes,4,opt,name=video_classification_config,json=videoClassificationConfig,proto3,oneof"` +} + +type LabelVideoRequest_ObjectDetectionConfig struct { + ObjectDetectionConfig *ObjectDetectionConfig `protobuf:"bytes,5,opt,name=object_detection_config,json=objectDetectionConfig,proto3,oneof"` +} + +type LabelVideoRequest_ObjectTrackingConfig struct { + ObjectTrackingConfig *ObjectTrackingConfig `protobuf:"bytes,6,opt,name=object_tracking_config,json=objectTrackingConfig,proto3,oneof"` +} + +type LabelVideoRequest_EventConfig struct { + EventConfig *EventConfig `protobuf:"bytes,7,opt,name=event_config,json=eventConfig,proto3,oneof"` +} + +func (*LabelVideoRequest_VideoClassificationConfig) isLabelVideoRequest_RequestConfig() {} + +func (*LabelVideoRequest_ObjectDetectionConfig) isLabelVideoRequest_RequestConfig() {} + +func (*LabelVideoRequest_ObjectTrackingConfig) isLabelVideoRequest_RequestConfig() {} + +func (*LabelVideoRequest_EventConfig) isLabelVideoRequest_RequestConfig() {} + +func (m *LabelVideoRequest) GetRequestConfig() isLabelVideoRequest_RequestConfig { + if m != nil { + return m.RequestConfig + } + return nil +} + +func (m *LabelVideoRequest) GetVideoClassificationConfig() *VideoClassificationConfig { + if x, ok := m.GetRequestConfig().(*LabelVideoRequest_VideoClassificationConfig); ok { + return x.VideoClassificationConfig + } + return nil +} + +func (m *LabelVideoRequest) GetObjectDetectionConfig() *ObjectDetectionConfig { + if x, ok := m.GetRequestConfig().(*LabelVideoRequest_ObjectDetectionConfig); ok { + return x.ObjectDetectionConfig + } + return nil +} + +func (m *LabelVideoRequest) GetObjectTrackingConfig() *ObjectTrackingConfig { + if x, ok := m.GetRequestConfig().(*LabelVideoRequest_ObjectTrackingConfig); ok { + return x.ObjectTrackingConfig + } + return nil +} + +func (m *LabelVideoRequest) GetEventConfig() *EventConfig { + if x, ok := m.GetRequestConfig().(*LabelVideoRequest_EventConfig); ok { + return x.EventConfig + } + return nil +} + +func (m *LabelVideoRequest) GetParent() string { + if m != nil { + return m.Parent + } + return "" +} + +func (m *LabelVideoRequest) GetBasicConfig() *HumanAnnotationConfig { + if m != nil { + return m.BasicConfig + } + return nil +} + +func (m *LabelVideoRequest) GetFeature() LabelVideoRequest_Feature { + if m != nil { + return m.Feature + } + return LabelVideoRequest_FEATURE_UNSPECIFIED +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*LabelVideoRequest) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _LabelVideoRequest_OneofMarshaler, _LabelVideoRequest_OneofUnmarshaler, _LabelVideoRequest_OneofSizer, []interface{}{ + (*LabelVideoRequest_VideoClassificationConfig)(nil), + (*LabelVideoRequest_ObjectDetectionConfig)(nil), + (*LabelVideoRequest_ObjectTrackingConfig)(nil), + (*LabelVideoRequest_EventConfig)(nil), + } +} + +func _LabelVideoRequest_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*LabelVideoRequest) + // request_config + switch x := m.RequestConfig.(type) { + case *LabelVideoRequest_VideoClassificationConfig: + b.EncodeVarint(4<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.VideoClassificationConfig); err != nil { + return err + } + case *LabelVideoRequest_ObjectDetectionConfig: + b.EncodeVarint(5<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.ObjectDetectionConfig); err != nil { + return err + } + case *LabelVideoRequest_ObjectTrackingConfig: + b.EncodeVarint(6<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.ObjectTrackingConfig); err != nil { + return err + } + case *LabelVideoRequest_EventConfig: + b.EncodeVarint(7<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.EventConfig); err != nil { + return err + } + case nil: + default: + return fmt.Errorf("LabelVideoRequest.RequestConfig has unexpected type %T", x) + } + return nil +} + +func _LabelVideoRequest_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*LabelVideoRequest) + switch tag { + case 4: // request_config.video_classification_config + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(VideoClassificationConfig) + err := b.DecodeMessage(msg) + m.RequestConfig = &LabelVideoRequest_VideoClassificationConfig{msg} + return true, err + case 5: // request_config.object_detection_config + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(ObjectDetectionConfig) + err := b.DecodeMessage(msg) + m.RequestConfig = &LabelVideoRequest_ObjectDetectionConfig{msg} + return true, err + case 6: // request_config.object_tracking_config + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(ObjectTrackingConfig) + err := b.DecodeMessage(msg) + m.RequestConfig = &LabelVideoRequest_ObjectTrackingConfig{msg} + return true, err + case 7: // request_config.event_config + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(EventConfig) + err := b.DecodeMessage(msg) + m.RequestConfig = &LabelVideoRequest_EventConfig{msg} + return true, err + default: + return false, nil + } +} + +func _LabelVideoRequest_OneofSizer(msg proto.Message) (n int) { + m := msg.(*LabelVideoRequest) + // request_config + switch x := m.RequestConfig.(type) { + case *LabelVideoRequest_VideoClassificationConfig: + s := proto.Size(x.VideoClassificationConfig) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *LabelVideoRequest_ObjectDetectionConfig: + s := proto.Size(x.ObjectDetectionConfig) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *LabelVideoRequest_ObjectTrackingConfig: + s := proto.Size(x.ObjectTrackingConfig) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *LabelVideoRequest_EventConfig: + s := proto.Size(x.EventConfig) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +// Request message for LabelText. +type LabelTextRequest struct { + // Required. Config for labeling tasks. The type of request config must + // match the selected feature. + // + // Types that are valid to be assigned to RequestConfig: + // *LabelTextRequest_TextClassificationConfig + // *LabelTextRequest_TextEntityExtractionConfig + RequestConfig isLabelTextRequest_RequestConfig `protobuf_oneof:"request_config"` + // Required. Name of the data set to request labeling task, format: + // projects/{project_id}/datasets/{dataset_id} + Parent string `protobuf:"bytes,1,opt,name=parent,proto3" json:"parent,omitempty"` + // Required. Basic human annotation config. + BasicConfig *HumanAnnotationConfig `protobuf:"bytes,2,opt,name=basic_config,json=basicConfig,proto3" json:"basic_config,omitempty"` + // Required. The type of text labeling task. + Feature LabelTextRequest_Feature `protobuf:"varint,6,opt,name=feature,proto3,enum=google.cloud.datalabeling.v1beta1.LabelTextRequest_Feature" json:"feature,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *LabelTextRequest) Reset() { *m = LabelTextRequest{} } +func (m *LabelTextRequest) String() string { return proto.CompactTextString(m) } +func (*LabelTextRequest) ProtoMessage() {} +func (*LabelTextRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_data_labeling_service_e7f2e6cc30ea93c3, []int{16} +} +func (m *LabelTextRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_LabelTextRequest.Unmarshal(m, b) +} +func (m *LabelTextRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_LabelTextRequest.Marshal(b, m, deterministic) +} +func (dst *LabelTextRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_LabelTextRequest.Merge(dst, src) +} +func (m *LabelTextRequest) XXX_Size() int { + return xxx_messageInfo_LabelTextRequest.Size(m) +} +func (m *LabelTextRequest) XXX_DiscardUnknown() { + xxx_messageInfo_LabelTextRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_LabelTextRequest proto.InternalMessageInfo + +type isLabelTextRequest_RequestConfig interface { + isLabelTextRequest_RequestConfig() +} + +type LabelTextRequest_TextClassificationConfig struct { + TextClassificationConfig *TextClassificationConfig `protobuf:"bytes,4,opt,name=text_classification_config,json=textClassificationConfig,proto3,oneof"` +} + +type LabelTextRequest_TextEntityExtractionConfig struct { + TextEntityExtractionConfig *TextEntityExtractionConfig `protobuf:"bytes,5,opt,name=text_entity_extraction_config,json=textEntityExtractionConfig,proto3,oneof"` +} + +func (*LabelTextRequest_TextClassificationConfig) isLabelTextRequest_RequestConfig() {} + +func (*LabelTextRequest_TextEntityExtractionConfig) isLabelTextRequest_RequestConfig() {} + +func (m *LabelTextRequest) GetRequestConfig() isLabelTextRequest_RequestConfig { + if m != nil { + return m.RequestConfig + } + return nil +} + +func (m *LabelTextRequest) GetTextClassificationConfig() *TextClassificationConfig { + if x, ok := m.GetRequestConfig().(*LabelTextRequest_TextClassificationConfig); ok { + return x.TextClassificationConfig + } + return nil +} + +func (m *LabelTextRequest) GetTextEntityExtractionConfig() *TextEntityExtractionConfig { + if x, ok := m.GetRequestConfig().(*LabelTextRequest_TextEntityExtractionConfig); ok { + return x.TextEntityExtractionConfig + } + return nil +} + +func (m *LabelTextRequest) GetParent() string { + if m != nil { + return m.Parent + } + return "" +} + +func (m *LabelTextRequest) GetBasicConfig() *HumanAnnotationConfig { + if m != nil { + return m.BasicConfig + } + return nil +} + +func (m *LabelTextRequest) GetFeature() LabelTextRequest_Feature { + if m != nil { + return m.Feature + } + return LabelTextRequest_FEATURE_UNSPECIFIED +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*LabelTextRequest) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _LabelTextRequest_OneofMarshaler, _LabelTextRequest_OneofUnmarshaler, _LabelTextRequest_OneofSizer, []interface{}{ + (*LabelTextRequest_TextClassificationConfig)(nil), + (*LabelTextRequest_TextEntityExtractionConfig)(nil), + } +} + +func _LabelTextRequest_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*LabelTextRequest) + // request_config + switch x := m.RequestConfig.(type) { + case *LabelTextRequest_TextClassificationConfig: + b.EncodeVarint(4<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.TextClassificationConfig); err != nil { + return err + } + case *LabelTextRequest_TextEntityExtractionConfig: + b.EncodeVarint(5<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.TextEntityExtractionConfig); err != nil { + return err + } + case nil: + default: + return fmt.Errorf("LabelTextRequest.RequestConfig has unexpected type %T", x) + } + return nil +} + +func _LabelTextRequest_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*LabelTextRequest) + switch tag { + case 4: // request_config.text_classification_config + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(TextClassificationConfig) + err := b.DecodeMessage(msg) + m.RequestConfig = &LabelTextRequest_TextClassificationConfig{msg} + return true, err + case 5: // request_config.text_entity_extraction_config + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(TextEntityExtractionConfig) + err := b.DecodeMessage(msg) + m.RequestConfig = &LabelTextRequest_TextEntityExtractionConfig{msg} + return true, err + default: + return false, nil + } +} + +func _LabelTextRequest_OneofSizer(msg proto.Message) (n int) { + m := msg.(*LabelTextRequest) + // request_config + switch x := m.RequestConfig.(type) { + case *LabelTextRequest_TextClassificationConfig: + s := proto.Size(x.TextClassificationConfig) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *LabelTextRequest_TextEntityExtractionConfig: + s := proto.Size(x.TextEntityExtractionConfig) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +// Request message for LabelAudio. +type LabelAudioRequest struct { + // Required. Name of the dataset to request labeling task, format: + // projects/{project_id}/datasets/{dataset_id} + Parent string `protobuf:"bytes,1,opt,name=parent,proto3" json:"parent,omitempty"` + // Required. Basic human annotation config. + BasicConfig *HumanAnnotationConfig `protobuf:"bytes,2,opt,name=basic_config,json=basicConfig,proto3" json:"basic_config,omitempty"` + // Required. The type of audio labeling task. + Feature LabelAudioRequest_Feature `protobuf:"varint,3,opt,name=feature,proto3,enum=google.cloud.datalabeling.v1beta1.LabelAudioRequest_Feature" json:"feature,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *LabelAudioRequest) Reset() { *m = LabelAudioRequest{} } +func (m *LabelAudioRequest) String() string { return proto.CompactTextString(m) } +func (*LabelAudioRequest) ProtoMessage() {} +func (*LabelAudioRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_data_labeling_service_e7f2e6cc30ea93c3, []int{17} +} +func (m *LabelAudioRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_LabelAudioRequest.Unmarshal(m, b) +} +func (m *LabelAudioRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_LabelAudioRequest.Marshal(b, m, deterministic) +} +func (dst *LabelAudioRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_LabelAudioRequest.Merge(dst, src) +} +func (m *LabelAudioRequest) XXX_Size() int { + return xxx_messageInfo_LabelAudioRequest.Size(m) +} +func (m *LabelAudioRequest) XXX_DiscardUnknown() { + xxx_messageInfo_LabelAudioRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_LabelAudioRequest proto.InternalMessageInfo + +func (m *LabelAudioRequest) GetParent() string { + if m != nil { + return m.Parent + } + return "" +} + +func (m *LabelAudioRequest) GetBasicConfig() *HumanAnnotationConfig { + if m != nil { + return m.BasicConfig + } + return nil +} + +func (m *LabelAudioRequest) GetFeature() LabelAudioRequest_Feature { + if m != nil { + return m.Feature + } + return LabelAudioRequest_FEATURE_UNSPECIFIED +} + +// Request message for GetExample +type GetExampleRequest struct { + // Required. Name of example, format: + // projects/{project_id}/datasets/{dataset_id}/annotatedDatasets/ + // {annotated_dataset_id}/examples/{example_id} + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // Optional. An expression for filtering Examples. Filter by + // annotation_spec.display_name is supported. Format + // "annotation_spec.display_name = {display_name}" + Filter string `protobuf:"bytes,2,opt,name=filter,proto3" json:"filter,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GetExampleRequest) Reset() { *m = GetExampleRequest{} } +func (m *GetExampleRequest) String() string { return proto.CompactTextString(m) } +func (*GetExampleRequest) ProtoMessage() {} +func (*GetExampleRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_data_labeling_service_e7f2e6cc30ea93c3, []int{18} +} +func (m *GetExampleRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GetExampleRequest.Unmarshal(m, b) +} +func (m *GetExampleRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GetExampleRequest.Marshal(b, m, deterministic) +} +func (dst *GetExampleRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_GetExampleRequest.Merge(dst, src) +} +func (m *GetExampleRequest) XXX_Size() int { + return xxx_messageInfo_GetExampleRequest.Size(m) +} +func (m *GetExampleRequest) XXX_DiscardUnknown() { + xxx_messageInfo_GetExampleRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_GetExampleRequest proto.InternalMessageInfo + +func (m *GetExampleRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *GetExampleRequest) GetFilter() string { + if m != nil { + return m.Filter + } + return "" +} + +// Request message for ListExamples. +type ListExamplesRequest struct { + // Required. Example resource parent. + Parent string `protobuf:"bytes,1,opt,name=parent,proto3" json:"parent,omitempty"` + // Optional. An expression for filtering Examples. For annotated datasets that + // have annotation spec set, filter by + // annotation_spec.display_name is supported. Format + // "annotation_spec.display_name = {display_name}" + Filter string `protobuf:"bytes,2,opt,name=filter,proto3" json:"filter,omitempty"` + // Optional. Requested page size. Server may return fewer results than + // requested. Default value is 100. + PageSize int32 `protobuf:"varint,3,opt,name=page_size,json=pageSize,proto3" json:"page_size,omitempty"` + // Optional. A token identifying a page of results for the server to return. + // Typically obtained by + // [ListExamplesResponse.next_page_token][google.cloud.datalabeling.v1beta1.ListExamplesResponse.next_page_token] of the previous + // [DataLabelingService.ListExamples] call. + // Return first page if empty. + PageToken string `protobuf:"bytes,4,opt,name=page_token,json=pageToken,proto3" json:"page_token,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ListExamplesRequest) Reset() { *m = ListExamplesRequest{} } +func (m *ListExamplesRequest) String() string { return proto.CompactTextString(m) } +func (*ListExamplesRequest) ProtoMessage() {} +func (*ListExamplesRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_data_labeling_service_e7f2e6cc30ea93c3, []int{19} +} +func (m *ListExamplesRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ListExamplesRequest.Unmarshal(m, b) +} +func (m *ListExamplesRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ListExamplesRequest.Marshal(b, m, deterministic) +} +func (dst *ListExamplesRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_ListExamplesRequest.Merge(dst, src) +} +func (m *ListExamplesRequest) XXX_Size() int { + return xxx_messageInfo_ListExamplesRequest.Size(m) +} +func (m *ListExamplesRequest) XXX_DiscardUnknown() { + xxx_messageInfo_ListExamplesRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_ListExamplesRequest proto.InternalMessageInfo + +func (m *ListExamplesRequest) GetParent() string { + if m != nil { + return m.Parent + } + return "" +} + +func (m *ListExamplesRequest) GetFilter() string { + if m != nil { + return m.Filter + } + return "" +} + +func (m *ListExamplesRequest) GetPageSize() int32 { + if m != nil { + return m.PageSize + } + return 0 +} + +func (m *ListExamplesRequest) GetPageToken() string { + if m != nil { + return m.PageToken + } + return "" +} + +// Results of listing Examples in and annotated dataset. +type ListExamplesResponse struct { + // The list of examples to return. + Examples []*Example `protobuf:"bytes,1,rep,name=examples,proto3" json:"examples,omitempty"` + // A token to retrieve next page of results. + NextPageToken string `protobuf:"bytes,2,opt,name=next_page_token,json=nextPageToken,proto3" json:"next_page_token,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ListExamplesResponse) Reset() { *m = ListExamplesResponse{} } +func (m *ListExamplesResponse) String() string { return proto.CompactTextString(m) } +func (*ListExamplesResponse) ProtoMessage() {} +func (*ListExamplesResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_data_labeling_service_e7f2e6cc30ea93c3, []int{20} +} +func (m *ListExamplesResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ListExamplesResponse.Unmarshal(m, b) +} +func (m *ListExamplesResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ListExamplesResponse.Marshal(b, m, deterministic) +} +func (dst *ListExamplesResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_ListExamplesResponse.Merge(dst, src) +} +func (m *ListExamplesResponse) XXX_Size() int { + return xxx_messageInfo_ListExamplesResponse.Size(m) +} +func (m *ListExamplesResponse) XXX_DiscardUnknown() { + xxx_messageInfo_ListExamplesResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_ListExamplesResponse proto.InternalMessageInfo + +func (m *ListExamplesResponse) GetExamples() []*Example { + if m != nil { + return m.Examples + } + return nil +} + +func (m *ListExamplesResponse) GetNextPageToken() string { + if m != nil { + return m.NextPageToken + } + return "" +} + +// Request message for CreateAnnotationSpecSet. +type CreateAnnotationSpecSetRequest struct { + // Required. AnnotationSpecSet resource parent, format: + // projects/{project_id} + Parent string `protobuf:"bytes,1,opt,name=parent,proto3" json:"parent,omitempty"` + // Required. Annotation spec set to create. Annotation specs must be included. + // Only one annotation spec will be accepted for annotation specs with same + // display_name. + AnnotationSpecSet *AnnotationSpecSet `protobuf:"bytes,2,opt,name=annotation_spec_set,json=annotationSpecSet,proto3" json:"annotation_spec_set,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *CreateAnnotationSpecSetRequest) Reset() { *m = CreateAnnotationSpecSetRequest{} } +func (m *CreateAnnotationSpecSetRequest) String() string { return proto.CompactTextString(m) } +func (*CreateAnnotationSpecSetRequest) ProtoMessage() {} +func (*CreateAnnotationSpecSetRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_data_labeling_service_e7f2e6cc30ea93c3, []int{21} +} +func (m *CreateAnnotationSpecSetRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_CreateAnnotationSpecSetRequest.Unmarshal(m, b) +} +func (m *CreateAnnotationSpecSetRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_CreateAnnotationSpecSetRequest.Marshal(b, m, deterministic) +} +func (dst *CreateAnnotationSpecSetRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_CreateAnnotationSpecSetRequest.Merge(dst, src) +} +func (m *CreateAnnotationSpecSetRequest) XXX_Size() int { + return xxx_messageInfo_CreateAnnotationSpecSetRequest.Size(m) +} +func (m *CreateAnnotationSpecSetRequest) XXX_DiscardUnknown() { + xxx_messageInfo_CreateAnnotationSpecSetRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_CreateAnnotationSpecSetRequest proto.InternalMessageInfo + +func (m *CreateAnnotationSpecSetRequest) GetParent() string { + if m != nil { + return m.Parent + } + return "" +} + +func (m *CreateAnnotationSpecSetRequest) GetAnnotationSpecSet() *AnnotationSpecSet { + if m != nil { + return m.AnnotationSpecSet + } + return nil +} + +// Request message for GetAnnotationSpecSet. +type GetAnnotationSpecSetRequest struct { + // Required. AnnotationSpecSet resource name, format: + // projects/{project_id}/annotationSpecSets/{annotation_spec_set_id} + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GetAnnotationSpecSetRequest) Reset() { *m = GetAnnotationSpecSetRequest{} } +func (m *GetAnnotationSpecSetRequest) String() string { return proto.CompactTextString(m) } +func (*GetAnnotationSpecSetRequest) ProtoMessage() {} +func (*GetAnnotationSpecSetRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_data_labeling_service_e7f2e6cc30ea93c3, []int{22} +} +func (m *GetAnnotationSpecSetRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GetAnnotationSpecSetRequest.Unmarshal(m, b) +} +func (m *GetAnnotationSpecSetRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GetAnnotationSpecSetRequest.Marshal(b, m, deterministic) +} +func (dst *GetAnnotationSpecSetRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_GetAnnotationSpecSetRequest.Merge(dst, src) +} +func (m *GetAnnotationSpecSetRequest) XXX_Size() int { + return xxx_messageInfo_GetAnnotationSpecSetRequest.Size(m) +} +func (m *GetAnnotationSpecSetRequest) XXX_DiscardUnknown() { + xxx_messageInfo_GetAnnotationSpecSetRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_GetAnnotationSpecSetRequest proto.InternalMessageInfo + +func (m *GetAnnotationSpecSetRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +// Request message for ListAnnotationSpecSets. +type ListAnnotationSpecSetsRequest struct { + // Required. Parent of AnnotationSpecSet resource, format: + // projects/{project_id} + Parent string `protobuf:"bytes,1,opt,name=parent,proto3" json:"parent,omitempty"` + // Optional. Filter is not supported at this moment. + Filter string `protobuf:"bytes,2,opt,name=filter,proto3" json:"filter,omitempty"` + // Optional. Requested page size. Server may return fewer results than + // requested. Default value is 100. + PageSize int32 `protobuf:"varint,3,opt,name=page_size,json=pageSize,proto3" json:"page_size,omitempty"` + // Optional. A token identifying a page of results for the server to return. + // Typically obtained by + // [ListAnnotationSpecSetsResponse.next_page_token][google.cloud.datalabeling.v1beta1.ListAnnotationSpecSetsResponse.next_page_token] of the previous + // [DataLabelingService.ListAnnotationSpecSets] call. + // Return first page if empty. + PageToken string `protobuf:"bytes,4,opt,name=page_token,json=pageToken,proto3" json:"page_token,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ListAnnotationSpecSetsRequest) Reset() { *m = ListAnnotationSpecSetsRequest{} } +func (m *ListAnnotationSpecSetsRequest) String() string { return proto.CompactTextString(m) } +func (*ListAnnotationSpecSetsRequest) ProtoMessage() {} +func (*ListAnnotationSpecSetsRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_data_labeling_service_e7f2e6cc30ea93c3, []int{23} +} +func (m *ListAnnotationSpecSetsRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ListAnnotationSpecSetsRequest.Unmarshal(m, b) +} +func (m *ListAnnotationSpecSetsRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ListAnnotationSpecSetsRequest.Marshal(b, m, deterministic) +} +func (dst *ListAnnotationSpecSetsRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_ListAnnotationSpecSetsRequest.Merge(dst, src) +} +func (m *ListAnnotationSpecSetsRequest) XXX_Size() int { + return xxx_messageInfo_ListAnnotationSpecSetsRequest.Size(m) +} +func (m *ListAnnotationSpecSetsRequest) XXX_DiscardUnknown() { + xxx_messageInfo_ListAnnotationSpecSetsRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_ListAnnotationSpecSetsRequest proto.InternalMessageInfo + +func (m *ListAnnotationSpecSetsRequest) GetParent() string { + if m != nil { + return m.Parent + } + return "" +} + +func (m *ListAnnotationSpecSetsRequest) GetFilter() string { + if m != nil { + return m.Filter + } + return "" +} + +func (m *ListAnnotationSpecSetsRequest) GetPageSize() int32 { + if m != nil { + return m.PageSize + } + return 0 +} + +func (m *ListAnnotationSpecSetsRequest) GetPageToken() string { + if m != nil { + return m.PageToken + } + return "" +} + +// Results of listing annotation spec set under a project. +type ListAnnotationSpecSetsResponse struct { + // The list of annotation spec sets. + AnnotationSpecSets []*AnnotationSpecSet `protobuf:"bytes,1,rep,name=annotation_spec_sets,json=annotationSpecSets,proto3" json:"annotation_spec_sets,omitempty"` + // A token to retrieve next page of results. + NextPageToken string `protobuf:"bytes,2,opt,name=next_page_token,json=nextPageToken,proto3" json:"next_page_token,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ListAnnotationSpecSetsResponse) Reset() { *m = ListAnnotationSpecSetsResponse{} } +func (m *ListAnnotationSpecSetsResponse) String() string { return proto.CompactTextString(m) } +func (*ListAnnotationSpecSetsResponse) ProtoMessage() {} +func (*ListAnnotationSpecSetsResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_data_labeling_service_e7f2e6cc30ea93c3, []int{24} +} +func (m *ListAnnotationSpecSetsResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ListAnnotationSpecSetsResponse.Unmarshal(m, b) +} +func (m *ListAnnotationSpecSetsResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ListAnnotationSpecSetsResponse.Marshal(b, m, deterministic) +} +func (dst *ListAnnotationSpecSetsResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_ListAnnotationSpecSetsResponse.Merge(dst, src) +} +func (m *ListAnnotationSpecSetsResponse) XXX_Size() int { + return xxx_messageInfo_ListAnnotationSpecSetsResponse.Size(m) +} +func (m *ListAnnotationSpecSetsResponse) XXX_DiscardUnknown() { + xxx_messageInfo_ListAnnotationSpecSetsResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_ListAnnotationSpecSetsResponse proto.InternalMessageInfo + +func (m *ListAnnotationSpecSetsResponse) GetAnnotationSpecSets() []*AnnotationSpecSet { + if m != nil { + return m.AnnotationSpecSets + } + return nil +} + +func (m *ListAnnotationSpecSetsResponse) GetNextPageToken() string { + if m != nil { + return m.NextPageToken + } + return "" +} + +// Request message for DeleteAnnotationSpecSet. +type DeleteAnnotationSpecSetRequest struct { + // Required. AnnotationSpec resource name, format: + // `projects/{project_id}/annotationSpecSets/{annotation_spec_set_id}`. + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *DeleteAnnotationSpecSetRequest) Reset() { *m = DeleteAnnotationSpecSetRequest{} } +func (m *DeleteAnnotationSpecSetRequest) String() string { return proto.CompactTextString(m) } +func (*DeleteAnnotationSpecSetRequest) ProtoMessage() {} +func (*DeleteAnnotationSpecSetRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_data_labeling_service_e7f2e6cc30ea93c3, []int{25} +} +func (m *DeleteAnnotationSpecSetRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_DeleteAnnotationSpecSetRequest.Unmarshal(m, b) +} +func (m *DeleteAnnotationSpecSetRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_DeleteAnnotationSpecSetRequest.Marshal(b, m, deterministic) +} +func (dst *DeleteAnnotationSpecSetRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_DeleteAnnotationSpecSetRequest.Merge(dst, src) +} +func (m *DeleteAnnotationSpecSetRequest) XXX_Size() int { + return xxx_messageInfo_DeleteAnnotationSpecSetRequest.Size(m) +} +func (m *DeleteAnnotationSpecSetRequest) XXX_DiscardUnknown() { + xxx_messageInfo_DeleteAnnotationSpecSetRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_DeleteAnnotationSpecSetRequest proto.InternalMessageInfo + +func (m *DeleteAnnotationSpecSetRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +// Request message for CreateInstruction. +type CreateInstructionRequest struct { + // Required. Instruction resource parent, format: + // projects/{project_id} + Parent string `protobuf:"bytes,1,opt,name=parent,proto3" json:"parent,omitempty"` + // Required. Instruction of how to perform the labeling task. + Instruction *Instruction `protobuf:"bytes,2,opt,name=instruction,proto3" json:"instruction,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *CreateInstructionRequest) Reset() { *m = CreateInstructionRequest{} } +func (m *CreateInstructionRequest) String() string { return proto.CompactTextString(m) } +func (*CreateInstructionRequest) ProtoMessage() {} +func (*CreateInstructionRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_data_labeling_service_e7f2e6cc30ea93c3, []int{26} +} +func (m *CreateInstructionRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_CreateInstructionRequest.Unmarshal(m, b) +} +func (m *CreateInstructionRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_CreateInstructionRequest.Marshal(b, m, deterministic) +} +func (dst *CreateInstructionRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_CreateInstructionRequest.Merge(dst, src) +} +func (m *CreateInstructionRequest) XXX_Size() int { + return xxx_messageInfo_CreateInstructionRequest.Size(m) +} +func (m *CreateInstructionRequest) XXX_DiscardUnknown() { + xxx_messageInfo_CreateInstructionRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_CreateInstructionRequest proto.InternalMessageInfo + +func (m *CreateInstructionRequest) GetParent() string { + if m != nil { + return m.Parent + } + return "" +} + +func (m *CreateInstructionRequest) GetInstruction() *Instruction { + if m != nil { + return m.Instruction + } + return nil +} + +// Request message for GetInstruction. +type GetInstructionRequest struct { + // Required. Instruction resource name, format: + // projects/{project_id}/instructions/{instruction_id} + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GetInstructionRequest) Reset() { *m = GetInstructionRequest{} } +func (m *GetInstructionRequest) String() string { return proto.CompactTextString(m) } +func (*GetInstructionRequest) ProtoMessage() {} +func (*GetInstructionRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_data_labeling_service_e7f2e6cc30ea93c3, []int{27} +} +func (m *GetInstructionRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GetInstructionRequest.Unmarshal(m, b) +} +func (m *GetInstructionRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GetInstructionRequest.Marshal(b, m, deterministic) +} +func (dst *GetInstructionRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_GetInstructionRequest.Merge(dst, src) +} +func (m *GetInstructionRequest) XXX_Size() int { + return xxx_messageInfo_GetInstructionRequest.Size(m) +} +func (m *GetInstructionRequest) XXX_DiscardUnknown() { + xxx_messageInfo_GetInstructionRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_GetInstructionRequest proto.InternalMessageInfo + +func (m *GetInstructionRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +// Request message for DeleteInstruction. +type DeleteInstructionRequest struct { + // Required. Instruction resource name, format: + // projects/{project_id}/instructions/{instruction_id} + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *DeleteInstructionRequest) Reset() { *m = DeleteInstructionRequest{} } +func (m *DeleteInstructionRequest) String() string { return proto.CompactTextString(m) } +func (*DeleteInstructionRequest) ProtoMessage() {} +func (*DeleteInstructionRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_data_labeling_service_e7f2e6cc30ea93c3, []int{28} +} +func (m *DeleteInstructionRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_DeleteInstructionRequest.Unmarshal(m, b) +} +func (m *DeleteInstructionRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_DeleteInstructionRequest.Marshal(b, m, deterministic) +} +func (dst *DeleteInstructionRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_DeleteInstructionRequest.Merge(dst, src) +} +func (m *DeleteInstructionRequest) XXX_Size() int { + return xxx_messageInfo_DeleteInstructionRequest.Size(m) +} +func (m *DeleteInstructionRequest) XXX_DiscardUnknown() { + xxx_messageInfo_DeleteInstructionRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_DeleteInstructionRequest proto.InternalMessageInfo + +func (m *DeleteInstructionRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +// Request message for ListInstructions. +type ListInstructionsRequest struct { + // Required. Instruction resource parent, format: + // projects/{project_id} + Parent string `protobuf:"bytes,1,opt,name=parent,proto3" json:"parent,omitempty"` + // Optional. Filter is not supported at this moment. + Filter string `protobuf:"bytes,2,opt,name=filter,proto3" json:"filter,omitempty"` + // Optional. Requested page size. Server may return fewer results than + // requested. Default value is 100. + PageSize int32 `protobuf:"varint,3,opt,name=page_size,json=pageSize,proto3" json:"page_size,omitempty"` + // Optional. A token identifying a page of results for the server to return. + // Typically obtained by + // [ListInstructionsResponse.next_page_token][google.cloud.datalabeling.v1beta1.ListInstructionsResponse.next_page_token] of the previous + // [DataLabelingService.ListInstructions] call. + // Return first page if empty. + PageToken string `protobuf:"bytes,4,opt,name=page_token,json=pageToken,proto3" json:"page_token,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ListInstructionsRequest) Reset() { *m = ListInstructionsRequest{} } +func (m *ListInstructionsRequest) String() string { return proto.CompactTextString(m) } +func (*ListInstructionsRequest) ProtoMessage() {} +func (*ListInstructionsRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_data_labeling_service_e7f2e6cc30ea93c3, []int{29} +} +func (m *ListInstructionsRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ListInstructionsRequest.Unmarshal(m, b) +} +func (m *ListInstructionsRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ListInstructionsRequest.Marshal(b, m, deterministic) +} +func (dst *ListInstructionsRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_ListInstructionsRequest.Merge(dst, src) +} +func (m *ListInstructionsRequest) XXX_Size() int { + return xxx_messageInfo_ListInstructionsRequest.Size(m) +} +func (m *ListInstructionsRequest) XXX_DiscardUnknown() { + xxx_messageInfo_ListInstructionsRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_ListInstructionsRequest proto.InternalMessageInfo + +func (m *ListInstructionsRequest) GetParent() string { + if m != nil { + return m.Parent + } + return "" +} + +func (m *ListInstructionsRequest) GetFilter() string { + if m != nil { + return m.Filter + } + return "" +} + +func (m *ListInstructionsRequest) GetPageSize() int32 { + if m != nil { + return m.PageSize + } + return 0 +} + +func (m *ListInstructionsRequest) GetPageToken() string { + if m != nil { + return m.PageToken + } + return "" +} + +// Results of listing instructions under a project. +type ListInstructionsResponse struct { + // The list of Instructions to return. + Instructions []*Instruction `protobuf:"bytes,1,rep,name=instructions,proto3" json:"instructions,omitempty"` + // A token to retrieve next page of results. + NextPageToken string `protobuf:"bytes,2,opt,name=next_page_token,json=nextPageToken,proto3" json:"next_page_token,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ListInstructionsResponse) Reset() { *m = ListInstructionsResponse{} } +func (m *ListInstructionsResponse) String() string { return proto.CompactTextString(m) } +func (*ListInstructionsResponse) ProtoMessage() {} +func (*ListInstructionsResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_data_labeling_service_e7f2e6cc30ea93c3, []int{30} +} +func (m *ListInstructionsResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ListInstructionsResponse.Unmarshal(m, b) +} +func (m *ListInstructionsResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ListInstructionsResponse.Marshal(b, m, deterministic) +} +func (dst *ListInstructionsResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_ListInstructionsResponse.Merge(dst, src) +} +func (m *ListInstructionsResponse) XXX_Size() int { + return xxx_messageInfo_ListInstructionsResponse.Size(m) +} +func (m *ListInstructionsResponse) XXX_DiscardUnknown() { + xxx_messageInfo_ListInstructionsResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_ListInstructionsResponse proto.InternalMessageInfo + +func (m *ListInstructionsResponse) GetInstructions() []*Instruction { + if m != nil { + return m.Instructions + } + return nil +} + +func (m *ListInstructionsResponse) GetNextPageToken() string { + if m != nil { + return m.NextPageToken + } + return "" +} + +func init() { + proto.RegisterType((*CreateDatasetRequest)(nil), "google.cloud.datalabeling.v1beta1.CreateDatasetRequest") + proto.RegisterType((*GetDatasetRequest)(nil), "google.cloud.datalabeling.v1beta1.GetDatasetRequest") + proto.RegisterType((*ListDatasetsRequest)(nil), "google.cloud.datalabeling.v1beta1.ListDatasetsRequest") + proto.RegisterType((*ListDatasetsResponse)(nil), "google.cloud.datalabeling.v1beta1.ListDatasetsResponse") + proto.RegisterType((*DeleteDatasetRequest)(nil), "google.cloud.datalabeling.v1beta1.DeleteDatasetRequest") + proto.RegisterType((*ImportDataRequest)(nil), "google.cloud.datalabeling.v1beta1.ImportDataRequest") + proto.RegisterType((*ExportDataRequest)(nil), "google.cloud.datalabeling.v1beta1.ExportDataRequest") + proto.RegisterType((*GetDataItemRequest)(nil), "google.cloud.datalabeling.v1beta1.GetDataItemRequest") + proto.RegisterType((*ListDataItemsRequest)(nil), "google.cloud.datalabeling.v1beta1.ListDataItemsRequest") + proto.RegisterType((*ListDataItemsResponse)(nil), "google.cloud.datalabeling.v1beta1.ListDataItemsResponse") + proto.RegisterType((*GetAnnotatedDatasetRequest)(nil), "google.cloud.datalabeling.v1beta1.GetAnnotatedDatasetRequest") + proto.RegisterType((*ListAnnotatedDatasetsRequest)(nil), "google.cloud.datalabeling.v1beta1.ListAnnotatedDatasetsRequest") + proto.RegisterType((*DeleteAnnotatedDatasetRequest)(nil), "google.cloud.datalabeling.v1beta1.DeleteAnnotatedDatasetRequest") + proto.RegisterType((*ListAnnotatedDatasetsResponse)(nil), "google.cloud.datalabeling.v1beta1.ListAnnotatedDatasetsResponse") + proto.RegisterType((*LabelImageRequest)(nil), "google.cloud.datalabeling.v1beta1.LabelImageRequest") + proto.RegisterType((*LabelVideoRequest)(nil), "google.cloud.datalabeling.v1beta1.LabelVideoRequest") + proto.RegisterType((*LabelTextRequest)(nil), "google.cloud.datalabeling.v1beta1.LabelTextRequest") + proto.RegisterType((*LabelAudioRequest)(nil), "google.cloud.datalabeling.v1beta1.LabelAudioRequest") + proto.RegisterType((*GetExampleRequest)(nil), "google.cloud.datalabeling.v1beta1.GetExampleRequest") + proto.RegisterType((*ListExamplesRequest)(nil), "google.cloud.datalabeling.v1beta1.ListExamplesRequest") + proto.RegisterType((*ListExamplesResponse)(nil), "google.cloud.datalabeling.v1beta1.ListExamplesResponse") + proto.RegisterType((*CreateAnnotationSpecSetRequest)(nil), "google.cloud.datalabeling.v1beta1.CreateAnnotationSpecSetRequest") + proto.RegisterType((*GetAnnotationSpecSetRequest)(nil), "google.cloud.datalabeling.v1beta1.GetAnnotationSpecSetRequest") + proto.RegisterType((*ListAnnotationSpecSetsRequest)(nil), "google.cloud.datalabeling.v1beta1.ListAnnotationSpecSetsRequest") + proto.RegisterType((*ListAnnotationSpecSetsResponse)(nil), "google.cloud.datalabeling.v1beta1.ListAnnotationSpecSetsResponse") + proto.RegisterType((*DeleteAnnotationSpecSetRequest)(nil), "google.cloud.datalabeling.v1beta1.DeleteAnnotationSpecSetRequest") + proto.RegisterType((*CreateInstructionRequest)(nil), "google.cloud.datalabeling.v1beta1.CreateInstructionRequest") + proto.RegisterType((*GetInstructionRequest)(nil), "google.cloud.datalabeling.v1beta1.GetInstructionRequest") + proto.RegisterType((*DeleteInstructionRequest)(nil), "google.cloud.datalabeling.v1beta1.DeleteInstructionRequest") + proto.RegisterType((*ListInstructionsRequest)(nil), "google.cloud.datalabeling.v1beta1.ListInstructionsRequest") + proto.RegisterType((*ListInstructionsResponse)(nil), "google.cloud.datalabeling.v1beta1.ListInstructionsResponse") + proto.RegisterEnum("google.cloud.datalabeling.v1beta1.LabelImageRequest_Feature", LabelImageRequest_Feature_name, LabelImageRequest_Feature_value) + proto.RegisterEnum("google.cloud.datalabeling.v1beta1.LabelVideoRequest_Feature", LabelVideoRequest_Feature_name, LabelVideoRequest_Feature_value) + proto.RegisterEnum("google.cloud.datalabeling.v1beta1.LabelTextRequest_Feature", LabelTextRequest_Feature_name, LabelTextRequest_Feature_value) + proto.RegisterEnum("google.cloud.datalabeling.v1beta1.LabelAudioRequest_Feature", LabelAudioRequest_Feature_name, LabelAudioRequest_Feature_value) +} + +// Reference imports to suppress errors if they are not otherwise used. +var _ context.Context +var _ grpc.ClientConn + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the grpc package it is being compiled against. +const _ = grpc.SupportPackageIsVersion4 + +// DataLabelingServiceClient is the client API for DataLabelingService service. +// +// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://godoc.org/google.golang.org/grpc#ClientConn.NewStream. +type DataLabelingServiceClient interface { + // Creates dataset. If success return a Dataset resource. + CreateDataset(ctx context.Context, in *CreateDatasetRequest, opts ...grpc.CallOption) (*Dataset, error) + // Gets dataset by resource name. + GetDataset(ctx context.Context, in *GetDatasetRequest, opts ...grpc.CallOption) (*Dataset, error) + // Lists datasets under a project. Pagination is supported. + ListDatasets(ctx context.Context, in *ListDatasetsRequest, opts ...grpc.CallOption) (*ListDatasetsResponse, error) + // Deletes a dataset by resource name. + DeleteDataset(ctx context.Context, in *DeleteDatasetRequest, opts ...grpc.CallOption) (*empty.Empty, error) + // Imports data into dataset based on source locations defined in request. + // It can be called multiple times for the same dataset. Each dataset can + // only have one long running operation running on it. For example, no + // labeling task (also long running operation) can be started while + // importing is still ongoing. Vice versa. + ImportData(ctx context.Context, in *ImportDataRequest, opts ...grpc.CallOption) (*longrunning.Operation, error) + // Exports data and annotations from dataset. + ExportData(ctx context.Context, in *ExportDataRequest, opts ...grpc.CallOption) (*longrunning.Operation, error) + // Gets a data item in a dataset by resource name. This API can be + // called after data are imported into dataset. + GetDataItem(ctx context.Context, in *GetDataItemRequest, opts ...grpc.CallOption) (*DataItem, error) + // Lists data items in a dataset. This API can be called after data + // are imported into dataset. Pagination is supported. + ListDataItems(ctx context.Context, in *ListDataItemsRequest, opts ...grpc.CallOption) (*ListDataItemsResponse, error) + // Gets an annotated dataset by resource name. + GetAnnotatedDataset(ctx context.Context, in *GetAnnotatedDatasetRequest, opts ...grpc.CallOption) (*AnnotatedDataset, error) + // Lists annotated datasets for a dataset. Pagination is supported. + ListAnnotatedDatasets(ctx context.Context, in *ListAnnotatedDatasetsRequest, opts ...grpc.CallOption) (*ListAnnotatedDatasetsResponse, error) + // Deletes an annotated dataset by resource name. + DeleteAnnotatedDataset(ctx context.Context, in *DeleteAnnotatedDatasetRequest, opts ...grpc.CallOption) (*empty.Empty, error) + // Starts a labeling task for image. The type of image labeling task is + // configured by feature in the request. + LabelImage(ctx context.Context, in *LabelImageRequest, opts ...grpc.CallOption) (*longrunning.Operation, error) + // Starts a labeling task for video. The type of video labeling task is + // configured by feature in the request. + LabelVideo(ctx context.Context, in *LabelVideoRequest, opts ...grpc.CallOption) (*longrunning.Operation, error) + // Starts a labeling task for text. The type of text labeling task is + // configured by feature in the request. + LabelText(ctx context.Context, in *LabelTextRequest, opts ...grpc.CallOption) (*longrunning.Operation, error) + // Starts a labeling task for audio. The type of audio labeling task is + // configured by feature in the request. + LabelAudio(ctx context.Context, in *LabelAudioRequest, opts ...grpc.CallOption) (*longrunning.Operation, error) + // Gets an example by resource name, including both data and annotation. + GetExample(ctx context.Context, in *GetExampleRequest, opts ...grpc.CallOption) (*Example, error) + // Lists examples in an annotated dataset. Pagination is supported. + ListExamples(ctx context.Context, in *ListExamplesRequest, opts ...grpc.CallOption) (*ListExamplesResponse, error) + // Creates an annotation spec set by providing a set of labels. + CreateAnnotationSpecSet(ctx context.Context, in *CreateAnnotationSpecSetRequest, opts ...grpc.CallOption) (*AnnotationSpecSet, error) + // Gets an annotation spec set by resource name. + GetAnnotationSpecSet(ctx context.Context, in *GetAnnotationSpecSetRequest, opts ...grpc.CallOption) (*AnnotationSpecSet, error) + // Lists annotation spec sets for a project. Pagination is supported. + ListAnnotationSpecSets(ctx context.Context, in *ListAnnotationSpecSetsRequest, opts ...grpc.CallOption) (*ListAnnotationSpecSetsResponse, error) + // Deletes an annotation spec set by resource name. + DeleteAnnotationSpecSet(ctx context.Context, in *DeleteAnnotationSpecSetRequest, opts ...grpc.CallOption) (*empty.Empty, error) + // Creates an instruction for how data should be labeled. + CreateInstruction(ctx context.Context, in *CreateInstructionRequest, opts ...grpc.CallOption) (*longrunning.Operation, error) + // Gets an instruction by resource name. + GetInstruction(ctx context.Context, in *GetInstructionRequest, opts ...grpc.CallOption) (*Instruction, error) + // Lists instructions for a project. Pagination is supported. + ListInstructions(ctx context.Context, in *ListInstructionsRequest, opts ...grpc.CallOption) (*ListInstructionsResponse, error) + // Deletes an instruction object by resource name. + DeleteInstruction(ctx context.Context, in *DeleteInstructionRequest, opts ...grpc.CallOption) (*empty.Empty, error) +} + +type dataLabelingServiceClient struct { + cc *grpc.ClientConn +} + +func NewDataLabelingServiceClient(cc *grpc.ClientConn) DataLabelingServiceClient { + return &dataLabelingServiceClient{cc} +} + +func (c *dataLabelingServiceClient) CreateDataset(ctx context.Context, in *CreateDatasetRequest, opts ...grpc.CallOption) (*Dataset, error) { + out := new(Dataset) + err := c.cc.Invoke(ctx, "/google.cloud.datalabeling.v1beta1.DataLabelingService/CreateDataset", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *dataLabelingServiceClient) GetDataset(ctx context.Context, in *GetDatasetRequest, opts ...grpc.CallOption) (*Dataset, error) { + out := new(Dataset) + err := c.cc.Invoke(ctx, "/google.cloud.datalabeling.v1beta1.DataLabelingService/GetDataset", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *dataLabelingServiceClient) ListDatasets(ctx context.Context, in *ListDatasetsRequest, opts ...grpc.CallOption) (*ListDatasetsResponse, error) { + out := new(ListDatasetsResponse) + err := c.cc.Invoke(ctx, "/google.cloud.datalabeling.v1beta1.DataLabelingService/ListDatasets", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *dataLabelingServiceClient) DeleteDataset(ctx context.Context, in *DeleteDatasetRequest, opts ...grpc.CallOption) (*empty.Empty, error) { + out := new(empty.Empty) + err := c.cc.Invoke(ctx, "/google.cloud.datalabeling.v1beta1.DataLabelingService/DeleteDataset", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *dataLabelingServiceClient) ImportData(ctx context.Context, in *ImportDataRequest, opts ...grpc.CallOption) (*longrunning.Operation, error) { + out := new(longrunning.Operation) + err := c.cc.Invoke(ctx, "/google.cloud.datalabeling.v1beta1.DataLabelingService/ImportData", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *dataLabelingServiceClient) ExportData(ctx context.Context, in *ExportDataRequest, opts ...grpc.CallOption) (*longrunning.Operation, error) { + out := new(longrunning.Operation) + err := c.cc.Invoke(ctx, "/google.cloud.datalabeling.v1beta1.DataLabelingService/ExportData", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *dataLabelingServiceClient) GetDataItem(ctx context.Context, in *GetDataItemRequest, opts ...grpc.CallOption) (*DataItem, error) { + out := new(DataItem) + err := c.cc.Invoke(ctx, "/google.cloud.datalabeling.v1beta1.DataLabelingService/GetDataItem", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *dataLabelingServiceClient) ListDataItems(ctx context.Context, in *ListDataItemsRequest, opts ...grpc.CallOption) (*ListDataItemsResponse, error) { + out := new(ListDataItemsResponse) + err := c.cc.Invoke(ctx, "/google.cloud.datalabeling.v1beta1.DataLabelingService/ListDataItems", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *dataLabelingServiceClient) GetAnnotatedDataset(ctx context.Context, in *GetAnnotatedDatasetRequest, opts ...grpc.CallOption) (*AnnotatedDataset, error) { + out := new(AnnotatedDataset) + err := c.cc.Invoke(ctx, "/google.cloud.datalabeling.v1beta1.DataLabelingService/GetAnnotatedDataset", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *dataLabelingServiceClient) ListAnnotatedDatasets(ctx context.Context, in *ListAnnotatedDatasetsRequest, opts ...grpc.CallOption) (*ListAnnotatedDatasetsResponse, error) { + out := new(ListAnnotatedDatasetsResponse) + err := c.cc.Invoke(ctx, "/google.cloud.datalabeling.v1beta1.DataLabelingService/ListAnnotatedDatasets", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *dataLabelingServiceClient) DeleteAnnotatedDataset(ctx context.Context, in *DeleteAnnotatedDatasetRequest, opts ...grpc.CallOption) (*empty.Empty, error) { + out := new(empty.Empty) + err := c.cc.Invoke(ctx, "/google.cloud.datalabeling.v1beta1.DataLabelingService/DeleteAnnotatedDataset", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *dataLabelingServiceClient) LabelImage(ctx context.Context, in *LabelImageRequest, opts ...grpc.CallOption) (*longrunning.Operation, error) { + out := new(longrunning.Operation) + err := c.cc.Invoke(ctx, "/google.cloud.datalabeling.v1beta1.DataLabelingService/LabelImage", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *dataLabelingServiceClient) LabelVideo(ctx context.Context, in *LabelVideoRequest, opts ...grpc.CallOption) (*longrunning.Operation, error) { + out := new(longrunning.Operation) + err := c.cc.Invoke(ctx, "/google.cloud.datalabeling.v1beta1.DataLabelingService/LabelVideo", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *dataLabelingServiceClient) LabelText(ctx context.Context, in *LabelTextRequest, opts ...grpc.CallOption) (*longrunning.Operation, error) { + out := new(longrunning.Operation) + err := c.cc.Invoke(ctx, "/google.cloud.datalabeling.v1beta1.DataLabelingService/LabelText", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *dataLabelingServiceClient) LabelAudio(ctx context.Context, in *LabelAudioRequest, opts ...grpc.CallOption) (*longrunning.Operation, error) { + out := new(longrunning.Operation) + err := c.cc.Invoke(ctx, "/google.cloud.datalabeling.v1beta1.DataLabelingService/LabelAudio", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *dataLabelingServiceClient) GetExample(ctx context.Context, in *GetExampleRequest, opts ...grpc.CallOption) (*Example, error) { + out := new(Example) + err := c.cc.Invoke(ctx, "/google.cloud.datalabeling.v1beta1.DataLabelingService/GetExample", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *dataLabelingServiceClient) ListExamples(ctx context.Context, in *ListExamplesRequest, opts ...grpc.CallOption) (*ListExamplesResponse, error) { + out := new(ListExamplesResponse) + err := c.cc.Invoke(ctx, "/google.cloud.datalabeling.v1beta1.DataLabelingService/ListExamples", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *dataLabelingServiceClient) CreateAnnotationSpecSet(ctx context.Context, in *CreateAnnotationSpecSetRequest, opts ...grpc.CallOption) (*AnnotationSpecSet, error) { + out := new(AnnotationSpecSet) + err := c.cc.Invoke(ctx, "/google.cloud.datalabeling.v1beta1.DataLabelingService/CreateAnnotationSpecSet", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *dataLabelingServiceClient) GetAnnotationSpecSet(ctx context.Context, in *GetAnnotationSpecSetRequest, opts ...grpc.CallOption) (*AnnotationSpecSet, error) { + out := new(AnnotationSpecSet) + err := c.cc.Invoke(ctx, "/google.cloud.datalabeling.v1beta1.DataLabelingService/GetAnnotationSpecSet", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *dataLabelingServiceClient) ListAnnotationSpecSets(ctx context.Context, in *ListAnnotationSpecSetsRequest, opts ...grpc.CallOption) (*ListAnnotationSpecSetsResponse, error) { + out := new(ListAnnotationSpecSetsResponse) + err := c.cc.Invoke(ctx, "/google.cloud.datalabeling.v1beta1.DataLabelingService/ListAnnotationSpecSets", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *dataLabelingServiceClient) DeleteAnnotationSpecSet(ctx context.Context, in *DeleteAnnotationSpecSetRequest, opts ...grpc.CallOption) (*empty.Empty, error) { + out := new(empty.Empty) + err := c.cc.Invoke(ctx, "/google.cloud.datalabeling.v1beta1.DataLabelingService/DeleteAnnotationSpecSet", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *dataLabelingServiceClient) CreateInstruction(ctx context.Context, in *CreateInstructionRequest, opts ...grpc.CallOption) (*longrunning.Operation, error) { + out := new(longrunning.Operation) + err := c.cc.Invoke(ctx, "/google.cloud.datalabeling.v1beta1.DataLabelingService/CreateInstruction", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *dataLabelingServiceClient) GetInstruction(ctx context.Context, in *GetInstructionRequest, opts ...grpc.CallOption) (*Instruction, error) { + out := new(Instruction) + err := c.cc.Invoke(ctx, "/google.cloud.datalabeling.v1beta1.DataLabelingService/GetInstruction", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *dataLabelingServiceClient) ListInstructions(ctx context.Context, in *ListInstructionsRequest, opts ...grpc.CallOption) (*ListInstructionsResponse, error) { + out := new(ListInstructionsResponse) + err := c.cc.Invoke(ctx, "/google.cloud.datalabeling.v1beta1.DataLabelingService/ListInstructions", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *dataLabelingServiceClient) DeleteInstruction(ctx context.Context, in *DeleteInstructionRequest, opts ...grpc.CallOption) (*empty.Empty, error) { + out := new(empty.Empty) + err := c.cc.Invoke(ctx, "/google.cloud.datalabeling.v1beta1.DataLabelingService/DeleteInstruction", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +// DataLabelingServiceServer is the server API for DataLabelingService service. +type DataLabelingServiceServer interface { + // Creates dataset. If success return a Dataset resource. + CreateDataset(context.Context, *CreateDatasetRequest) (*Dataset, error) + // Gets dataset by resource name. + GetDataset(context.Context, *GetDatasetRequest) (*Dataset, error) + // Lists datasets under a project. Pagination is supported. + ListDatasets(context.Context, *ListDatasetsRequest) (*ListDatasetsResponse, error) + // Deletes a dataset by resource name. + DeleteDataset(context.Context, *DeleteDatasetRequest) (*empty.Empty, error) + // Imports data into dataset based on source locations defined in request. + // It can be called multiple times for the same dataset. Each dataset can + // only have one long running operation running on it. For example, no + // labeling task (also long running operation) can be started while + // importing is still ongoing. Vice versa. + ImportData(context.Context, *ImportDataRequest) (*longrunning.Operation, error) + // Exports data and annotations from dataset. + ExportData(context.Context, *ExportDataRequest) (*longrunning.Operation, error) + // Gets a data item in a dataset by resource name. This API can be + // called after data are imported into dataset. + GetDataItem(context.Context, *GetDataItemRequest) (*DataItem, error) + // Lists data items in a dataset. This API can be called after data + // are imported into dataset. Pagination is supported. + ListDataItems(context.Context, *ListDataItemsRequest) (*ListDataItemsResponse, error) + // Gets an annotated dataset by resource name. + GetAnnotatedDataset(context.Context, *GetAnnotatedDatasetRequest) (*AnnotatedDataset, error) + // Lists annotated datasets for a dataset. Pagination is supported. + ListAnnotatedDatasets(context.Context, *ListAnnotatedDatasetsRequest) (*ListAnnotatedDatasetsResponse, error) + // Deletes an annotated dataset by resource name. + DeleteAnnotatedDataset(context.Context, *DeleteAnnotatedDatasetRequest) (*empty.Empty, error) + // Starts a labeling task for image. The type of image labeling task is + // configured by feature in the request. + LabelImage(context.Context, *LabelImageRequest) (*longrunning.Operation, error) + // Starts a labeling task for video. The type of video labeling task is + // configured by feature in the request. + LabelVideo(context.Context, *LabelVideoRequest) (*longrunning.Operation, error) + // Starts a labeling task for text. The type of text labeling task is + // configured by feature in the request. + LabelText(context.Context, *LabelTextRequest) (*longrunning.Operation, error) + // Starts a labeling task for audio. The type of audio labeling task is + // configured by feature in the request. + LabelAudio(context.Context, *LabelAudioRequest) (*longrunning.Operation, error) + // Gets an example by resource name, including both data and annotation. + GetExample(context.Context, *GetExampleRequest) (*Example, error) + // Lists examples in an annotated dataset. Pagination is supported. + ListExamples(context.Context, *ListExamplesRequest) (*ListExamplesResponse, error) + // Creates an annotation spec set by providing a set of labels. + CreateAnnotationSpecSet(context.Context, *CreateAnnotationSpecSetRequest) (*AnnotationSpecSet, error) + // Gets an annotation spec set by resource name. + GetAnnotationSpecSet(context.Context, *GetAnnotationSpecSetRequest) (*AnnotationSpecSet, error) + // Lists annotation spec sets for a project. Pagination is supported. + ListAnnotationSpecSets(context.Context, *ListAnnotationSpecSetsRequest) (*ListAnnotationSpecSetsResponse, error) + // Deletes an annotation spec set by resource name. + DeleteAnnotationSpecSet(context.Context, *DeleteAnnotationSpecSetRequest) (*empty.Empty, error) + // Creates an instruction for how data should be labeled. + CreateInstruction(context.Context, *CreateInstructionRequest) (*longrunning.Operation, error) + // Gets an instruction by resource name. + GetInstruction(context.Context, *GetInstructionRequest) (*Instruction, error) + // Lists instructions for a project. Pagination is supported. + ListInstructions(context.Context, *ListInstructionsRequest) (*ListInstructionsResponse, error) + // Deletes an instruction object by resource name. + DeleteInstruction(context.Context, *DeleteInstructionRequest) (*empty.Empty, error) +} + +func RegisterDataLabelingServiceServer(s *grpc.Server, srv DataLabelingServiceServer) { + s.RegisterService(&_DataLabelingService_serviceDesc, srv) +} + +func _DataLabelingService_CreateDataset_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(CreateDatasetRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(DataLabelingServiceServer).CreateDataset(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.datalabeling.v1beta1.DataLabelingService/CreateDataset", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(DataLabelingServiceServer).CreateDataset(ctx, req.(*CreateDatasetRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _DataLabelingService_GetDataset_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(GetDatasetRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(DataLabelingServiceServer).GetDataset(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.datalabeling.v1beta1.DataLabelingService/GetDataset", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(DataLabelingServiceServer).GetDataset(ctx, req.(*GetDatasetRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _DataLabelingService_ListDatasets_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(ListDatasetsRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(DataLabelingServiceServer).ListDatasets(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.datalabeling.v1beta1.DataLabelingService/ListDatasets", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(DataLabelingServiceServer).ListDatasets(ctx, req.(*ListDatasetsRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _DataLabelingService_DeleteDataset_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(DeleteDatasetRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(DataLabelingServiceServer).DeleteDataset(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.datalabeling.v1beta1.DataLabelingService/DeleteDataset", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(DataLabelingServiceServer).DeleteDataset(ctx, req.(*DeleteDatasetRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _DataLabelingService_ImportData_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(ImportDataRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(DataLabelingServiceServer).ImportData(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.datalabeling.v1beta1.DataLabelingService/ImportData", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(DataLabelingServiceServer).ImportData(ctx, req.(*ImportDataRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _DataLabelingService_ExportData_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(ExportDataRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(DataLabelingServiceServer).ExportData(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.datalabeling.v1beta1.DataLabelingService/ExportData", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(DataLabelingServiceServer).ExportData(ctx, req.(*ExportDataRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _DataLabelingService_GetDataItem_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(GetDataItemRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(DataLabelingServiceServer).GetDataItem(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.datalabeling.v1beta1.DataLabelingService/GetDataItem", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(DataLabelingServiceServer).GetDataItem(ctx, req.(*GetDataItemRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _DataLabelingService_ListDataItems_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(ListDataItemsRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(DataLabelingServiceServer).ListDataItems(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.datalabeling.v1beta1.DataLabelingService/ListDataItems", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(DataLabelingServiceServer).ListDataItems(ctx, req.(*ListDataItemsRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _DataLabelingService_GetAnnotatedDataset_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(GetAnnotatedDatasetRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(DataLabelingServiceServer).GetAnnotatedDataset(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.datalabeling.v1beta1.DataLabelingService/GetAnnotatedDataset", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(DataLabelingServiceServer).GetAnnotatedDataset(ctx, req.(*GetAnnotatedDatasetRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _DataLabelingService_ListAnnotatedDatasets_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(ListAnnotatedDatasetsRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(DataLabelingServiceServer).ListAnnotatedDatasets(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.datalabeling.v1beta1.DataLabelingService/ListAnnotatedDatasets", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(DataLabelingServiceServer).ListAnnotatedDatasets(ctx, req.(*ListAnnotatedDatasetsRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _DataLabelingService_DeleteAnnotatedDataset_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(DeleteAnnotatedDatasetRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(DataLabelingServiceServer).DeleteAnnotatedDataset(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.datalabeling.v1beta1.DataLabelingService/DeleteAnnotatedDataset", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(DataLabelingServiceServer).DeleteAnnotatedDataset(ctx, req.(*DeleteAnnotatedDatasetRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _DataLabelingService_LabelImage_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(LabelImageRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(DataLabelingServiceServer).LabelImage(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.datalabeling.v1beta1.DataLabelingService/LabelImage", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(DataLabelingServiceServer).LabelImage(ctx, req.(*LabelImageRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _DataLabelingService_LabelVideo_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(LabelVideoRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(DataLabelingServiceServer).LabelVideo(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.datalabeling.v1beta1.DataLabelingService/LabelVideo", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(DataLabelingServiceServer).LabelVideo(ctx, req.(*LabelVideoRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _DataLabelingService_LabelText_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(LabelTextRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(DataLabelingServiceServer).LabelText(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.datalabeling.v1beta1.DataLabelingService/LabelText", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(DataLabelingServiceServer).LabelText(ctx, req.(*LabelTextRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _DataLabelingService_LabelAudio_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(LabelAudioRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(DataLabelingServiceServer).LabelAudio(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.datalabeling.v1beta1.DataLabelingService/LabelAudio", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(DataLabelingServiceServer).LabelAudio(ctx, req.(*LabelAudioRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _DataLabelingService_GetExample_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(GetExampleRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(DataLabelingServiceServer).GetExample(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.datalabeling.v1beta1.DataLabelingService/GetExample", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(DataLabelingServiceServer).GetExample(ctx, req.(*GetExampleRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _DataLabelingService_ListExamples_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(ListExamplesRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(DataLabelingServiceServer).ListExamples(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.datalabeling.v1beta1.DataLabelingService/ListExamples", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(DataLabelingServiceServer).ListExamples(ctx, req.(*ListExamplesRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _DataLabelingService_CreateAnnotationSpecSet_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(CreateAnnotationSpecSetRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(DataLabelingServiceServer).CreateAnnotationSpecSet(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.datalabeling.v1beta1.DataLabelingService/CreateAnnotationSpecSet", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(DataLabelingServiceServer).CreateAnnotationSpecSet(ctx, req.(*CreateAnnotationSpecSetRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _DataLabelingService_GetAnnotationSpecSet_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(GetAnnotationSpecSetRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(DataLabelingServiceServer).GetAnnotationSpecSet(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.datalabeling.v1beta1.DataLabelingService/GetAnnotationSpecSet", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(DataLabelingServiceServer).GetAnnotationSpecSet(ctx, req.(*GetAnnotationSpecSetRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _DataLabelingService_ListAnnotationSpecSets_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(ListAnnotationSpecSetsRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(DataLabelingServiceServer).ListAnnotationSpecSets(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.datalabeling.v1beta1.DataLabelingService/ListAnnotationSpecSets", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(DataLabelingServiceServer).ListAnnotationSpecSets(ctx, req.(*ListAnnotationSpecSetsRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _DataLabelingService_DeleteAnnotationSpecSet_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(DeleteAnnotationSpecSetRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(DataLabelingServiceServer).DeleteAnnotationSpecSet(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.datalabeling.v1beta1.DataLabelingService/DeleteAnnotationSpecSet", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(DataLabelingServiceServer).DeleteAnnotationSpecSet(ctx, req.(*DeleteAnnotationSpecSetRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _DataLabelingService_CreateInstruction_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(CreateInstructionRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(DataLabelingServiceServer).CreateInstruction(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.datalabeling.v1beta1.DataLabelingService/CreateInstruction", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(DataLabelingServiceServer).CreateInstruction(ctx, req.(*CreateInstructionRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _DataLabelingService_GetInstruction_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(GetInstructionRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(DataLabelingServiceServer).GetInstruction(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.datalabeling.v1beta1.DataLabelingService/GetInstruction", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(DataLabelingServiceServer).GetInstruction(ctx, req.(*GetInstructionRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _DataLabelingService_ListInstructions_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(ListInstructionsRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(DataLabelingServiceServer).ListInstructions(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.datalabeling.v1beta1.DataLabelingService/ListInstructions", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(DataLabelingServiceServer).ListInstructions(ctx, req.(*ListInstructionsRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _DataLabelingService_DeleteInstruction_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(DeleteInstructionRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(DataLabelingServiceServer).DeleteInstruction(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.datalabeling.v1beta1.DataLabelingService/DeleteInstruction", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(DataLabelingServiceServer).DeleteInstruction(ctx, req.(*DeleteInstructionRequest)) + } + return interceptor(ctx, in, info, handler) +} + +var _DataLabelingService_serviceDesc = grpc.ServiceDesc{ + ServiceName: "google.cloud.datalabeling.v1beta1.DataLabelingService", + HandlerType: (*DataLabelingServiceServer)(nil), + Methods: []grpc.MethodDesc{ + { + MethodName: "CreateDataset", + Handler: _DataLabelingService_CreateDataset_Handler, + }, + { + MethodName: "GetDataset", + Handler: _DataLabelingService_GetDataset_Handler, + }, + { + MethodName: "ListDatasets", + Handler: _DataLabelingService_ListDatasets_Handler, + }, + { + MethodName: "DeleteDataset", + Handler: _DataLabelingService_DeleteDataset_Handler, + }, + { + MethodName: "ImportData", + Handler: _DataLabelingService_ImportData_Handler, + }, + { + MethodName: "ExportData", + Handler: _DataLabelingService_ExportData_Handler, + }, + { + MethodName: "GetDataItem", + Handler: _DataLabelingService_GetDataItem_Handler, + }, + { + MethodName: "ListDataItems", + Handler: _DataLabelingService_ListDataItems_Handler, + }, + { + MethodName: "GetAnnotatedDataset", + Handler: _DataLabelingService_GetAnnotatedDataset_Handler, + }, + { + MethodName: "ListAnnotatedDatasets", + Handler: _DataLabelingService_ListAnnotatedDatasets_Handler, + }, + { + MethodName: "DeleteAnnotatedDataset", + Handler: _DataLabelingService_DeleteAnnotatedDataset_Handler, + }, + { + MethodName: "LabelImage", + Handler: _DataLabelingService_LabelImage_Handler, + }, + { + MethodName: "LabelVideo", + Handler: _DataLabelingService_LabelVideo_Handler, + }, + { + MethodName: "LabelText", + Handler: _DataLabelingService_LabelText_Handler, + }, + { + MethodName: "LabelAudio", + Handler: _DataLabelingService_LabelAudio_Handler, + }, + { + MethodName: "GetExample", + Handler: _DataLabelingService_GetExample_Handler, + }, + { + MethodName: "ListExamples", + Handler: _DataLabelingService_ListExamples_Handler, + }, + { + MethodName: "CreateAnnotationSpecSet", + Handler: _DataLabelingService_CreateAnnotationSpecSet_Handler, + }, + { + MethodName: "GetAnnotationSpecSet", + Handler: _DataLabelingService_GetAnnotationSpecSet_Handler, + }, + { + MethodName: "ListAnnotationSpecSets", + Handler: _DataLabelingService_ListAnnotationSpecSets_Handler, + }, + { + MethodName: "DeleteAnnotationSpecSet", + Handler: _DataLabelingService_DeleteAnnotationSpecSet_Handler, + }, + { + MethodName: "CreateInstruction", + Handler: _DataLabelingService_CreateInstruction_Handler, + }, + { + MethodName: "GetInstruction", + Handler: _DataLabelingService_GetInstruction_Handler, + }, + { + MethodName: "ListInstructions", + Handler: _DataLabelingService_ListInstructions_Handler, + }, + { + MethodName: "DeleteInstruction", + Handler: _DataLabelingService_DeleteInstruction_Handler, + }, + }, + Streams: []grpc.StreamDesc{}, + Metadata: "google/cloud/datalabeling/v1beta1/data_labeling_service.proto", +} + +func init() { + proto.RegisterFile("google/cloud/datalabeling/v1beta1/data_labeling_service.proto", fileDescriptor_data_labeling_service_e7f2e6cc30ea93c3) +} + +var fileDescriptor_data_labeling_service_e7f2e6cc30ea93c3 = []byte{ + // 2191 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xd4, 0x5a, 0xcd, 0x6f, 0x1b, 0xc7, + 0x15, 0xcf, 0xe8, 0xcb, 0xd6, 0xd3, 0x87, 0xc9, 0x91, 0x2c, 0x31, 0x74, 0x64, 0xa8, 0x1b, 0xa4, + 0x71, 0x68, 0x94, 0xb4, 0x2c, 0xbb, 0x76, 0xa4, 0x2a, 0x0e, 0x45, 0xae, 0x14, 0xa6, 0x0a, 0xa9, + 0x92, 0x94, 0xeb, 0xb4, 0x05, 0x88, 0x25, 0x39, 0x62, 0xb6, 0x26, 0x77, 0x59, 0xee, 0x52, 0x90, + 0x1c, 0xa4, 0x40, 0x8d, 0xb6, 0x87, 0xe6, 0xd0, 0x43, 0x81, 0x1c, 0x5a, 0xc4, 0x40, 0x0f, 0xed, + 0xa5, 0x4d, 0xd0, 0x4b, 0x6f, 0x6d, 0x0e, 0x05, 0x7a, 0xea, 0xc7, 0xa5, 0x40, 0x6f, 0xbd, 0xf5, + 0x0f, 0x29, 0x66, 0x76, 0xf6, 0x8b, 0xbb, 0x4b, 0x0e, 0x65, 0x03, 0x6a, 0x6e, 0xda, 0xd9, 0x7d, + 0xef, 0xfd, 0xde, 0x7b, 0xbf, 0xb7, 0x3b, 0xf3, 0x13, 0x61, 0xa7, 0xa5, 0xeb, 0xad, 0x36, 0xc9, + 0x34, 0xda, 0x7a, 0xbf, 0x99, 0x69, 0x2a, 0xa6, 0xd2, 0x56, 0xea, 0xa4, 0xad, 0x6a, 0xad, 0xcc, + 0xc9, 0x46, 0x9d, 0x98, 0xca, 0x06, 0x5b, 0xac, 0xd9, 0xab, 0x35, 0x83, 0xf4, 0x4e, 0xd4, 0x06, + 0x49, 0x77, 0x7b, 0xba, 0xa9, 0xe3, 0xaf, 0x58, 0xe6, 0x69, 0x66, 0x9e, 0xf6, 0x9a, 0xa7, 0xb9, + 0x79, 0xf2, 0x15, 0x1e, 0x41, 0xe9, 0xaa, 0x19, 0x45, 0xd3, 0x74, 0x53, 0x31, 0x55, 0x5d, 0x33, + 0x2c, 0x07, 0xc9, 0xed, 0xd1, 0xf1, 0x5d, 0xa3, 0x9a, 0xd1, 0x25, 0x8d, 0x9a, 0x41, 0x4c, 0x6e, + 0x9c, 0x11, 0x03, 0xef, 0x1a, 0x3c, 0x18, 0x6d, 0xf0, 0x41, 0xbf, 0xa3, 0x68, 0x35, 0x4f, 0xcc, + 0x86, 0xae, 0x1d, 0xab, 0x2d, 0xee, 0x60, 0x73, 0xb4, 0x03, 0x55, 0x33, 0xcc, 0x5e, 0xbf, 0x41, + 0x6d, 0xb9, 0xd1, 0xed, 0xd1, 0x46, 0x7a, 0x97, 0xf4, 0x7c, 0x75, 0x79, 0x95, 0xdb, 0xb4, 0x75, + 0xad, 0xd5, 0xeb, 0x6b, 0x1a, 0x7d, 0x38, 0xf0, 0xd0, 0x35, 0xfe, 0x10, 0xbb, 0xaa, 0xf7, 0x8f, + 0x33, 0xa4, 0xd3, 0x35, 0xcf, 0xf8, 0xcd, 0xf5, 0xc1, 0x9b, 0xc7, 0x2a, 0x69, 0x37, 0x6b, 0x1d, + 0xc5, 0x78, 0x6c, 0x3d, 0x21, 0x99, 0xb0, 0x9c, 0xeb, 0x11, 0xc5, 0x24, 0x79, 0xab, 0x48, 0x65, + 0xf2, 0x83, 0x3e, 0x31, 0x4c, 0xbc, 0x02, 0x33, 0x5d, 0xa5, 0x47, 0x34, 0x33, 0x81, 0xd6, 0xd1, + 0x8d, 0xd9, 0x32, 0xbf, 0xc2, 0x79, 0xb8, 0xc4, 0xcb, 0x99, 0x98, 0x58, 0x47, 0x37, 0xe6, 0x6e, + 0xa7, 0xd2, 0x23, 0xdb, 0x9f, 0xb6, 0x7d, 0xdb, 0xa6, 0xd2, 0xeb, 0x10, 0xdf, 0x27, 0xe6, 0x40, + 0x48, 0x0c, 0x53, 0x9a, 0xd2, 0x21, 0x3c, 0x20, 0xfb, 0x5b, 0xfa, 0x11, 0x82, 0xa5, 0x03, 0xd5, + 0xb0, 0x1f, 0x35, 0x46, 0xc1, 0x5b, 0x81, 0x99, 0x63, 0xb5, 0x6d, 0x92, 0x1e, 0x43, 0x37, 0x5b, + 0xe6, 0x57, 0xf8, 0x1a, 0xcc, 0x76, 0x95, 0x16, 0xa9, 0x19, 0xea, 0x13, 0x92, 0x98, 0x5c, 0x47, + 0x37, 0xa6, 0xcb, 0x97, 0xe9, 0x42, 0x45, 0x7d, 0x42, 0xf0, 0x1a, 0x00, 0xbb, 0x69, 0xea, 0x8f, + 0x89, 0x96, 0x98, 0x62, 0x86, 0xec, 0xf1, 0x2a, 0x5d, 0x90, 0x7e, 0x8a, 0x60, 0xd9, 0x8f, 0xc1, + 0xe8, 0xea, 0x9a, 0x41, 0xf0, 0x1e, 0x5c, 0xe6, 0x09, 0x19, 0x09, 0xb4, 0x3e, 0x39, 0x66, 0x31, + 0x1c, 0x5b, 0xfc, 0x55, 0xb8, 0xa2, 0x91, 0x53, 0xb3, 0xe6, 0x01, 0x61, 0xa1, 0x5f, 0xa0, 0xcb, + 0x87, 0x0e, 0x90, 0x14, 0x2c, 0xe7, 0x49, 0x9b, 0x04, 0x7a, 0x15, 0x56, 0xb8, 0x27, 0x10, 0x2f, + 0x74, 0xba, 0x7a, 0x8f, 0xa1, 0x1e, 0xf2, 0x20, 0xfe, 0x16, 0xcc, 0xab, 0x5a, 0xb7, 0x6f, 0x72, + 0x8e, 0xf3, 0xae, 0xa6, 0x05, 0x12, 0x29, 0x50, 0xb3, 0x1c, 0xb3, 0x2a, 0xcf, 0xa9, 0xee, 0x85, + 0xf4, 0x17, 0x04, 0x71, 0xf9, 0x54, 0x24, 0xf8, 0x4d, 0x88, 0xf3, 0x29, 0x23, 0xcd, 0x9a, 0x97, + 0x57, 0xb3, 0xe5, 0x98, 0x73, 0x83, 0x67, 0xeb, 0xe9, 0xed, 0xa4, 0xaf, 0xb7, 0x55, 0x58, 0xd0, + 0xfb, 0xa6, 0x27, 0x85, 0x29, 0x96, 0x42, 0x46, 0x20, 0x85, 0x12, 0xb3, 0xe3, 0x39, 0xcc, 0xeb, + 0x9e, 0x2b, 0xe9, 0x06, 0x60, 0x4e, 0xd1, 0x82, 0x49, 0x3a, 0xc3, 0x4a, 0xfd, 0xd4, 0xc3, 0x0f, + 0xfa, 0xec, 0x85, 0x90, 0xf4, 0x63, 0x04, 0x57, 0x07, 0x40, 0x70, 0x96, 0xbe, 0x0b, 0xc0, 0xde, + 0xde, 0x2a, 0x5d, 0xe5, 0x3c, 0xbd, 0x29, 0xc8, 0x53, 0x96, 0xfa, 0x6c, 0xd3, 0xf6, 0x29, 0xcc, + 0xd4, 0x5b, 0x90, 0xdc, 0x27, 0x66, 0x76, 0xa0, 0x83, 0xc3, 0x8a, 0xf8, 0x33, 0x04, 0xaf, 0x50, + 0xfc, 0x83, 0x36, 0x17, 0x52, 0xcc, 0x4d, 0x58, 0xb3, 0x06, 0x6d, 0x9c, 0x0c, 0x7e, 0x87, 0x60, + 0x2d, 0x22, 0x03, 0xde, 0x89, 0x3a, 0xe0, 0x00, 0xdb, 0xed, 0x8e, 0x6c, 0x0a, 0x74, 0x24, 0x80, + 0x26, 0x3e, 0x38, 0x23, 0xe2, 0x1d, 0xfa, 0x62, 0x06, 0xe2, 0x07, 0x34, 0x40, 0xa1, 0xa3, 0xb4, + 0x88, 0x9d, 0xd7, 0x0f, 0xe1, 0x9a, 0x4a, 0xaf, 0x6b, 0x8d, 0xb6, 0x62, 0x18, 0xea, 0xb1, 0xda, + 0xf0, 0x7e, 0xff, 0xf8, 0x60, 0x7d, 0x43, 0xe4, 0xdd, 0x40, 0xbd, 0xe4, 0x7c, 0x4e, 0xac, 0xb9, + 0x7a, 0xe7, 0xa5, 0xf2, 0xcb, 0x6a, 0xd4, 0x4d, 0xac, 0xc2, 0x72, 0x5d, 0xef, 0x6b, 0x4d, 0xba, + 0xc9, 0xe8, 0xea, 0xed, 0x33, 0x3b, 0xf0, 0x34, 0x0b, 0x7c, 0x57, 0x20, 0xf0, 0x2e, 0x37, 0x3f, + 0xd4, 0xdb, 0x67, 0x4e, 0x44, 0x5c, 0x0f, 0xac, 0xe2, 0xef, 0xc1, 0x15, 0x1a, 0xa1, 0xad, 0x6a, + 0xc4, 0x8e, 0x32, 0xc3, 0xa2, 0x6c, 0x08, 0x44, 0x39, 0xe4, 0x96, 0x4e, 0x84, 0xc5, 0xae, 0x6f, + 0x05, 0x7f, 0x00, 0x4b, 0x06, 0x69, 0x75, 0x88, 0xe6, 0xdb, 0x40, 0x24, 0x2e, 0x09, 0xe7, 0x51, + 0xf1, 0x58, 0xbb, 0x79, 0x18, 0x81, 0xd5, 0xc8, 0xb9, 0xf8, 0x2e, 0xcc, 0xd7, 0x15, 0x43, 0x6d, + 0xf8, 0xdf, 0xeb, 0xf7, 0x05, 0x42, 0xbf, 0x43, 0x77, 0x3f, 0x59, 0x67, 0xf3, 0x63, 0xbf, 0xe1, + 0x99, 0x37, 0x1e, 0xf4, 0x21, 0x5c, 0x3a, 0x26, 0x8a, 0xd9, 0xef, 0x59, 0xa3, 0xb5, 0x28, 0xc4, + 0x89, 0x00, 0xdd, 0xd2, 0x7b, 0x96, 0x8f, 0xb2, 0xed, 0x4c, 0xfa, 0x04, 0xc1, 0x25, 0xbe, 0x88, + 0x57, 0x61, 0x69, 0x4f, 0xce, 0x56, 0x8f, 0xca, 0x72, 0xed, 0xa8, 0x58, 0x39, 0x94, 0x73, 0x85, + 0xbd, 0x82, 0x9c, 0x8f, 0xbd, 0x84, 0x31, 0x2c, 0xe6, 0x0e, 0xb2, 0x95, 0x4a, 0x61, 0xaf, 0x90, + 0xcb, 0x56, 0x0b, 0xa5, 0x62, 0x0c, 0xe1, 0x18, 0xcc, 0xef, 0x96, 0x8e, 0x8a, 0xf9, 0x42, 0x71, + 0xbf, 0xb6, 0x5b, 0x7a, 0x14, 0x9b, 0xc0, 0x2f, 0xc3, 0xd5, 0x52, 0xb9, 0x20, 0x17, 0xab, 0x72, + 0xbe, 0xe6, 0xbb, 0x35, 0x83, 0xe3, 0xb0, 0xe0, 0xac, 0x1c, 0x96, 0x0e, 0xde, 0x8f, 0x4d, 0xe2, + 0x79, 0xb8, 0x4c, 0xff, 0x3a, 0x28, 0x14, 0xe5, 0xd8, 0x14, 0xf5, 0x56, 0x91, 0xf7, 0xdf, 0x93, + 0x8b, 0x55, 0xcb, 0xff, 0xf4, 0x6e, 0x0c, 0x16, 0x7b, 0x16, 0x68, 0x5e, 0x4f, 0xe9, 0x97, 0xf6, + 0x00, 0x3d, 0x54, 0x9b, 0x44, 0xf7, 0x0c, 0xd0, 0x09, 0xbd, 0x7e, 0xee, 0x01, 0x62, 0x5e, 0xa3, + 0x06, 0xe8, 0x24, 0xea, 0x26, 0xee, 0xc1, 0xaa, 0x5e, 0xff, 0x3e, 0x69, 0x98, 0xb5, 0x26, 0x31, + 0x49, 0xc3, 0x1b, 0x7b, 0x5a, 0x98, 0x00, 0x25, 0xe6, 0x21, 0x6f, 0x3b, 0x70, 0xe2, 0x5e, 0xd5, + 0xc3, 0x6e, 0x60, 0x1d, 0x56, 0x78, 0x4c, 0xb3, 0xa7, 0x34, 0x1e, 0xd3, 0xd9, 0xf5, 0x0d, 0xd4, + 0x3d, 0xe1, 0x90, 0x55, 0x6e, 0xef, 0x44, 0x5c, 0xd6, 0x43, 0xd6, 0x71, 0x05, 0xe6, 0xc9, 0x09, + 0xd1, 0x4c, 0xff, 0x54, 0x89, 0x6c, 0x59, 0x64, 0x6a, 0xe6, 0x78, 0x9f, 0x23, 0xee, 0xe5, 0x97, + 0x6d, 0x8e, 0xbc, 0xac, 0x0b, 0xce, 0x51, 0xfb, 0x9c, 0x63, 0xb4, 0x0c, 0xb1, 0xd2, 0xee, 0xbb, + 0x72, 0xae, 0x5a, 0xcb, 0xcb, 0x55, 0x39, 0xc7, 0x56, 0x27, 0xf0, 0x12, 0x5c, 0xe1, 0xab, 0xd5, + 0x72, 0x36, 0xf7, 0xcd, 0x42, 0x71, 0x3f, 0x36, 0x89, 0x67, 0x61, 0x5a, 0x7e, 0x28, 0x17, 0xab, + 0xb1, 0xa9, 0x90, 0xe1, 0xf8, 0xdb, 0x14, 0xc4, 0x18, 0xcc, 0x2a, 0x39, 0x75, 0x3e, 0x9a, 0x1f, + 0x42, 0xd2, 0xa4, 0x9f, 0xa6, 0x61, 0xa3, 0xb1, 0x2d, 0x90, 0x3f, 0xf5, 0x19, 0x31, 0x19, 0x09, + 0x33, 0xe2, 0x1e, 0x7e, 0x8a, 0x60, 0x8d, 0x45, 0x27, 0x9a, 0xa9, 0x9a, 0x67, 0x35, 0x72, 0x4a, + 0xc9, 0x1a, 0x9c, 0x8f, 0x1d, 0x41, 0x00, 0x32, 0x73, 0x23, 0x3b, 0x5e, 0x1c, 0x08, 0x2c, 0xc7, + 0xf0, 0xbb, 0x17, 0xc3, 0xb1, 0x23, 0x97, 0x63, 0x33, 0x8c, 0x63, 0xdb, 0xa2, 0x1c, 0xf3, 0x34, + 0x2f, 0x48, 0xb1, 0x6f, 0x0b, 0x50, 0x6c, 0x15, 0x96, 0xaa, 0xf2, 0xa3, 0x6a, 0x2d, 0xc0, 0xb3, + 0x24, 0xac, 0xb0, 0x1b, 0x72, 0xb1, 0x5a, 0xa8, 0xbe, 0x5f, 0x93, 0x1f, 0x51, 0x62, 0x59, 0x6c, + 0x0b, 0x61, 0xd3, 0xa7, 0x13, 0xfc, 0x55, 0x9b, 0xed, 0x37, 0x55, 0x7d, 0xd4, 0x86, 0xf0, 0xff, + 0x74, 0x60, 0xbd, 0xd8, 0x83, 0xd5, 0xdc, 0x16, 0xab, 0x66, 0xf6, 0x28, 0x5f, 0x28, 0xd1, 0x29, + 0x2c, 0x56, 0x72, 0xe5, 0xc2, 0xa1, 0x55, 0x4d, 0xe9, 0x01, 0x3b, 0x4d, 0xcb, 0xa7, 0x4a, 0xa7, + 0xdb, 0x26, 0xc3, 0x8e, 0x5b, 0x11, 0x7b, 0x65, 0xe7, 0x94, 0xcd, 0x5d, 0x5c, 0xe8, 0x29, 0xdb, + 0xc5, 0xe0, 0x9e, 0xb2, 0x09, 0x5f, 0x1b, 0xe3, 0x94, 0x6d, 0x57, 0xc3, 0xb1, 0x15, 0xde, 0x19, + 0x3f, 0x43, 0x70, 0xdd, 0x92, 0x44, 0x5c, 0x2a, 0x54, 0xba, 0xa4, 0x51, 0x19, 0x2d, 0x8e, 0x34, + 0x61, 0x29, 0x44, 0xa8, 0xe2, 0x0c, 0xbc, 0x23, 0xbe, 0xc3, 0xf7, 0x44, 0x8c, 0x2b, 0x83, 0x4b, + 0xd2, 0x06, 0x5c, 0x73, 0x0f, 0x57, 0x41, 0x70, 0x61, 0x67, 0x93, 0x8f, 0xfd, 0x67, 0x13, 0xd7, + 0xe8, 0x42, 0x5a, 0xfd, 0x07, 0x04, 0xd7, 0xa3, 0xd0, 0xf0, 0xa6, 0x1f, 0xc3, 0x72, 0x48, 0x25, + 0x6d, 0x02, 0x9c, 0xaf, 0x94, 0x38, 0x50, 0x4a, 0x71, 0x52, 0xdc, 0x81, 0xeb, 0xbe, 0x13, 0xa1, + 0x58, 0xd9, 0x7f, 0x8c, 0x20, 0x61, 0x51, 0xa9, 0xe0, 0x0a, 0x82, 0xa3, 0x2a, 0x7e, 0x08, 0x73, + 0x1e, 0xf9, 0x70, 0x2c, 0x3d, 0xc6, 0x8d, 0xe1, 0x75, 0x21, 0xdd, 0x84, 0xab, 0xfb, 0xc4, 0x0c, + 0x81, 0x10, 0x86, 0x39, 0x0d, 0x09, 0x2b, 0x53, 0xc1, 0xe7, 0x7f, 0x82, 0x60, 0x95, 0x36, 0xd3, + 0xf3, 0xf8, 0x85, 0x90, 0xea, 0x13, 0x04, 0x89, 0x20, 0x0e, 0x4e, 0xa7, 0x32, 0xcc, 0x7b, 0x0a, + 0x62, 0xd3, 0x68, 0xdc, 0xa2, 0xfa, 0x7c, 0x88, 0x52, 0xe7, 0xf6, 0xe7, 0xaf, 0xc2, 0x12, 0x3d, + 0x9e, 0x1f, 0x70, 0xd7, 0x15, 0x4b, 0x3c, 0xc7, 0xbf, 0x45, 0xb0, 0xe0, 0x93, 0x5e, 0xb1, 0xc8, + 0x46, 0x39, 0x4c, 0xac, 0x4d, 0x8e, 0x21, 0x3b, 0x4a, 0xb7, 0x9e, 0xfe, 0xeb, 0xbf, 0xbf, 0x98, + 0x48, 0x49, 0xaf, 0x39, 0xba, 0xf3, 0x87, 0x56, 0x57, 0x76, 0xba, 0x3d, 0x9d, 0x6e, 0xb6, 0x8d, + 0x4c, 0xea, 0x23, 0x5b, 0x32, 0x37, 0xb6, 0x50, 0x0a, 0x7f, 0x8a, 0x00, 0x5c, 0xb5, 0x16, 0x8b, + 0x0c, 0x5f, 0x40, 0xdc, 0x1d, 0x0b, 0xe2, 0xd7, 0x18, 0xc4, 0xd7, 0xb1, 0x07, 0x22, 0xa5, 0x9a, + 0x07, 0xa0, 0x83, 0x2f, 0x93, 0xfa, 0x08, 0x7f, 0x8e, 0x60, 0xde, 0xab, 0xcf, 0xe2, 0xaf, 0x8b, + 0x7c, 0x93, 0x83, 0xa2, 0x72, 0xf2, 0xde, 0xd8, 0x76, 0x16, 0xbd, 0xc2, 0x00, 0x0f, 0xa9, 0x29, + 0xfe, 0x39, 0x82, 0x05, 0x9f, 0x90, 0x2b, 0xd4, 0xf9, 0x30, 0xe9, 0x37, 0xb9, 0x62, 0x1b, 0xda, + 0x0a, 0x7f, 0x5a, 0xee, 0x74, 0xcd, 0x33, 0x1b, 0x51, 0x4a, 0xb0, 0x84, 0xcf, 0x10, 0x80, 0x2b, + 0x17, 0x0b, 0xb5, 0x38, 0xa0, 0x2e, 0x27, 0xd7, 0x6c, 0x2b, 0xcf, 0xff, 0x2b, 0xd2, 0x25, 0xfb, + 0xff, 0x15, 0xd2, 0x36, 0x83, 0x74, 0x57, 0xba, 0x25, 0x04, 0x69, 0x4b, 0x75, 0xfc, 0x53, 0x0e, + 0x52, 0x80, 0xae, 0xa4, 0x2c, 0x04, 0x30, 0xa0, 0x40, 0xbf, 0x60, 0x80, 0xe4, 0xd4, 0x0b, 0xf0, + 0xf7, 0x08, 0xe6, 0x3c, 0x7a, 0x31, 0xbe, 0x2b, 0x3e, 0x25, 0x1e, 0x7d, 0x39, 0x39, 0x8e, 0x30, + 0x2b, 0xbd, 0xc9, 0x00, 0x6f, 0xe2, 0x0d, 0x11, 0xc0, 0x19, 0x47, 0xc5, 0xa5, 0x0d, 0xff, 0x33, + 0x82, 0x05, 0x9f, 0x5c, 0x8c, 0xc7, 0x21, 0xbf, 0x57, 0xe5, 0x4e, 0xde, 0x1f, 0xdf, 0x90, 0x8f, + 0x4d, 0x08, 0xfe, 0xc0, 0xd8, 0x78, 0x4b, 0xee, 0xa6, 0x80, 0xff, 0x8e, 0x60, 0x29, 0x44, 0x61, + 0xc6, 0x3b, 0x62, 0x65, 0x8f, 0xd0, 0x75, 0x93, 0xe7, 0x51, 0x61, 0xa5, 0x2c, 0x4b, 0x63, 0x1b, + 0xbf, 0x29, 0xd4, 0x86, 0x80, 0x64, 0x4b, 0xdb, 0xf1, 0x1f, 0xae, 0xde, 0x07, 0xb4, 0x63, 0xfc, + 0x40, 0xb0, 0xba, 0x51, 0xba, 0x79, 0xf2, 0xed, 0xf3, 0x3b, 0xe0, 0x6d, 0x0a, 0xc9, 0x6f, 0x78, + 0x9b, 0x82, 0xaa, 0xf4, 0x1f, 0x11, 0xac, 0x84, 0x2b, 0xea, 0xf8, 0x6d, 0xe1, 0x57, 0x5f, 0x54, + 0xd3, 0xa2, 0xde, 0x81, 0x1c, 0x77, 0xea, 0x39, 0xfa, 0xf2, 0x6b, 0x04, 0xe0, 0xca, 0x96, 0x42, + 0xaf, 0x9d, 0x80, 0xca, 0x39, 0xea, 0xb5, 0xf3, 0x16, 0x83, 0x79, 0x5f, 0xda, 0x14, 0x2d, 0x2f, + 0x93, 0xcf, 0xb7, 0x58, 0x60, 0xfa, 0xe6, 0x71, 0x30, 0x32, 0x49, 0x48, 0x1c, 0xa3, 0x57, 0x41, + 0x7a, 0xe1, 0x18, 0x99, 0x42, 0xe9, 0x62, 0x7c, 0x86, 0x60, 0xd6, 0x91, 0x14, 0xf0, 0xe6, 0x39, + 0x04, 0x88, 0x51, 0x08, 0x77, 0x18, 0xc2, 0x7b, 0xd2, 0x6d, 0x51, 0x84, 0x26, 0x39, 0x35, 0x43, + 0x8a, 0xc8, 0x8e, 0xe9, 0xe2, 0x45, 0xf4, 0x9e, 0xea, 0x5f, 0x78, 0x11, 0x15, 0xea, 0xdc, 0xc5, + 0xf8, 0x27, 0x6b, 0x1f, 0xc6, 0x4f, 0xb6, 0xa2, 0xfb, 0x30, 0xbf, 0x2c, 0x90, 0x1c, 0xe3, 0xec, + 0x2c, 0x1d, 0x30, 0xc0, 0x7b, 0x38, 0x7f, 0xde, 0x01, 0xca, 0xd8, 0x87, 0x6f, 0x3a, 0x4b, 0xff, + 0xe0, 0xdb, 0x34, 0xfb, 0x80, 0x2f, 0xbc, 0x4d, 0x1b, 0x50, 0x25, 0x84, 0xb7, 0x69, 0x83, 0x4a, + 0x42, 0x58, 0x3e, 0xc3, 0x1a, 0x10, 0xfa, 0x4a, 0x70, 0x52, 0xc2, 0xff, 0x44, 0xb0, 0x1a, 0xa1, + 0x13, 0xe0, 0xac, 0xf0, 0x4e, 0x3e, 0xea, 0x3c, 0x99, 0x3c, 0xd7, 0x19, 0x57, 0xda, 0x62, 0x29, + 0xde, 0x91, 0x32, 0x43, 0x77, 0xa2, 0xc1, 0x83, 0x30, 0xe5, 0xd7, 0x5f, 0x11, 0x2c, 0x87, 0x09, + 0x0b, 0xf8, 0xad, 0xb1, 0x3e, 0xaa, 0x2f, 0x2a, 0x95, 0x7b, 0x2c, 0x95, 0x0d, 0x9c, 0x89, 0x66, + 0x5f, 0x30, 0x0f, 0x4a, 0xb4, 0x7f, 0x23, 0x58, 0x09, 0x97, 0x17, 0xf0, 0x98, 0x1f, 0xc3, 0xa0, + 0x4e, 0x92, 0xcc, 0x3e, 0x87, 0x07, 0x4e, 0xc3, 0x90, 0xc4, 0x84, 0x7a, 0x84, 0x3f, 0x43, 0xb0, + 0x1a, 0xa1, 0x42, 0x08, 0x31, 0x6e, 0xb8, 0x82, 0x11, 0xf9, 0x1d, 0xe5, 0x78, 0x53, 0x63, 0x37, + 0xe2, 0x37, 0x08, 0xe2, 0x01, 0xf9, 0x03, 0x6f, 0x0b, 0xcf, 0x46, 0x50, 0x81, 0x18, 0xf5, 0x8a, + 0xbd, 0xc3, 0xa0, 0xa6, 0xa5, 0x37, 0x86, 0x96, 0xd6, 0x7b, 0x8c, 0xa7, 0xc4, 0xff, 0x0c, 0xc1, + 0xa2, 0x5f, 0x20, 0xc1, 0xf7, 0xc5, 0x28, 0x1f, 0x82, 0x70, 0x4c, 0x51, 0x41, 0xda, 0x60, 0x90, + 0x6f, 0xe2, 0x37, 0xa2, 0xab, 0xeb, 0xc5, 0x4b, 0xeb, 0xfa, 0x05, 0x82, 0xd8, 0xa0, 0xd4, 0x81, + 0xb7, 0x04, 0x89, 0x19, 0xa2, 0xd3, 0x24, 0xb7, 0xcf, 0x65, 0xcb, 0xe9, 0x1c, 0x92, 0xc0, 0x88, + 0x9a, 0xe3, 0x5f, 0x21, 0x88, 0x07, 0x44, 0x26, 0x21, 0x62, 0x44, 0x49, 0x53, 0x91, 0xe4, 0xe5, + 0xe8, 0x52, 0xe2, 0xe5, 0xdd, 0x3d, 0x85, 0xd7, 0x1a, 0x7a, 0x67, 0x34, 0x98, 0x43, 0xf4, 0x9d, + 0xf7, 0xf8, 0x43, 0x2d, 0xbd, 0xad, 0x68, 0xad, 0xb4, 0xde, 0x6b, 0x65, 0x5a, 0x44, 0x63, 0x10, + 0xf8, 0xef, 0x12, 0x95, 0xae, 0x6a, 0x0c, 0xf9, 0xd1, 0xdf, 0xb6, 0x77, 0xb1, 0x3e, 0xc3, 0x2c, + 0x37, 0xff, 0x17, 0x00, 0x00, 0xff, 0xff, 0x26, 0x81, 0x0d, 0xe1, 0x91, 0x29, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/cloud/datalabeling/v1beta1/dataset.pb.go b/vendor/google.golang.org/genproto/googleapis/cloud/datalabeling/v1beta1/dataset.pb.go new file mode 100644 index 0000000..5574fc2 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/cloud/datalabeling/v1beta1/dataset.pb.go @@ -0,0 +1,2004 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/cloud/datalabeling/v1beta1/dataset.proto + +package datalabeling // import "google.golang.org/genproto/googleapis/cloud/datalabeling/v1beta1" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import duration "github.com/golang/protobuf/ptypes/duration" +import _ "github.com/golang/protobuf/ptypes/struct" +import timestamp "github.com/golang/protobuf/ptypes/timestamp" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +type DataType int32 + +const ( + DataType_DATA_TYPE_UNSPECIFIED DataType = 0 + DataType_IMAGE DataType = 1 + DataType_VIDEO DataType = 2 + DataType_TEXT DataType = 4 + DataType_AUDIO DataType = 5 +) + +var DataType_name = map[int32]string{ + 0: "DATA_TYPE_UNSPECIFIED", + 1: "IMAGE", + 2: "VIDEO", + 4: "TEXT", + 5: "AUDIO", +} +var DataType_value = map[string]int32{ + "DATA_TYPE_UNSPECIFIED": 0, + "IMAGE": 1, + "VIDEO": 2, + "TEXT": 4, + "AUDIO": 5, +} + +func (x DataType) String() string { + return proto.EnumName(DataType_name, int32(x)) +} +func (DataType) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_dataset_800ce425cbaef310, []int{0} +} + +// Dataset is the resource to hold your data. You can request multiple labeling +// tasks for a dataset while each one will generate an AnnotatedDataset. +type Dataset struct { + // Output only. + // Dataset resource name, format is: + // projects/{project_id}/datasets/{dataset_id} + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // Required. The display name of the dataset. Maximum of 64 characters. + DisplayName string `protobuf:"bytes,2,opt,name=display_name,json=displayName,proto3" json:"display_name,omitempty"` + // Optional. User-provided description of the annotation specification set. + // The description can be up to 10000 characters long. + Description string `protobuf:"bytes,3,opt,name=description,proto3" json:"description,omitempty"` + // Output only. Time the dataset is created. + CreateTime *timestamp.Timestamp `protobuf:"bytes,4,opt,name=create_time,json=createTime,proto3" json:"create_time,omitempty"` + // Output only. This is populated with the original input configs + // where ImportData is called. It is available only after the clients + // import data to this dataset. + InputConfigs []*InputConfig `protobuf:"bytes,5,rep,name=input_configs,json=inputConfigs,proto3" json:"input_configs,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Dataset) Reset() { *m = Dataset{} } +func (m *Dataset) String() string { return proto.CompactTextString(m) } +func (*Dataset) ProtoMessage() {} +func (*Dataset) Descriptor() ([]byte, []int) { + return fileDescriptor_dataset_800ce425cbaef310, []int{0} +} +func (m *Dataset) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Dataset.Unmarshal(m, b) +} +func (m *Dataset) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Dataset.Marshal(b, m, deterministic) +} +func (dst *Dataset) XXX_Merge(src proto.Message) { + xxx_messageInfo_Dataset.Merge(dst, src) +} +func (m *Dataset) XXX_Size() int { + return xxx_messageInfo_Dataset.Size(m) +} +func (m *Dataset) XXX_DiscardUnknown() { + xxx_messageInfo_Dataset.DiscardUnknown(m) +} + +var xxx_messageInfo_Dataset proto.InternalMessageInfo + +func (m *Dataset) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *Dataset) GetDisplayName() string { + if m != nil { + return m.DisplayName + } + return "" +} + +func (m *Dataset) GetDescription() string { + if m != nil { + return m.Description + } + return "" +} + +func (m *Dataset) GetCreateTime() *timestamp.Timestamp { + if m != nil { + return m.CreateTime + } + return nil +} + +func (m *Dataset) GetInputConfigs() []*InputConfig { + if m != nil { + return m.InputConfigs + } + return nil +} + +// The configuration of input data, including data type, location, etc. +type InputConfig struct { + // Required. Where the data is from. + // + // Types that are valid to be assigned to Source: + // *InputConfig_GcsSource + Source isInputConfig_Source `protobuf_oneof:"source"` + // Required. Data type must be specifed when user tries to import data. + DataType DataType `protobuf:"varint,1,opt,name=data_type,json=dataType,proto3,enum=google.cloud.datalabeling.v1beta1.DataType" json:"data_type,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *InputConfig) Reset() { *m = InputConfig{} } +func (m *InputConfig) String() string { return proto.CompactTextString(m) } +func (*InputConfig) ProtoMessage() {} +func (*InputConfig) Descriptor() ([]byte, []int) { + return fileDescriptor_dataset_800ce425cbaef310, []int{1} +} +func (m *InputConfig) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_InputConfig.Unmarshal(m, b) +} +func (m *InputConfig) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_InputConfig.Marshal(b, m, deterministic) +} +func (dst *InputConfig) XXX_Merge(src proto.Message) { + xxx_messageInfo_InputConfig.Merge(dst, src) +} +func (m *InputConfig) XXX_Size() int { + return xxx_messageInfo_InputConfig.Size(m) +} +func (m *InputConfig) XXX_DiscardUnknown() { + xxx_messageInfo_InputConfig.DiscardUnknown(m) +} + +var xxx_messageInfo_InputConfig proto.InternalMessageInfo + +type isInputConfig_Source interface { + isInputConfig_Source() +} + +type InputConfig_GcsSource struct { + GcsSource *GcsSource `protobuf:"bytes,2,opt,name=gcs_source,json=gcsSource,proto3,oneof"` +} + +func (*InputConfig_GcsSource) isInputConfig_Source() {} + +func (m *InputConfig) GetSource() isInputConfig_Source { + if m != nil { + return m.Source + } + return nil +} + +func (m *InputConfig) GetGcsSource() *GcsSource { + if x, ok := m.GetSource().(*InputConfig_GcsSource); ok { + return x.GcsSource + } + return nil +} + +func (m *InputConfig) GetDataType() DataType { + if m != nil { + return m.DataType + } + return DataType_DATA_TYPE_UNSPECIFIED +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*InputConfig) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _InputConfig_OneofMarshaler, _InputConfig_OneofUnmarshaler, _InputConfig_OneofSizer, []interface{}{ + (*InputConfig_GcsSource)(nil), + } +} + +func _InputConfig_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*InputConfig) + // source + switch x := m.Source.(type) { + case *InputConfig_GcsSource: + b.EncodeVarint(2<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.GcsSource); err != nil { + return err + } + case nil: + default: + return fmt.Errorf("InputConfig.Source has unexpected type %T", x) + } + return nil +} + +func _InputConfig_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*InputConfig) + switch tag { + case 2: // source.gcs_source + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(GcsSource) + err := b.DecodeMessage(msg) + m.Source = &InputConfig_GcsSource{msg} + return true, err + default: + return false, nil + } +} + +func _InputConfig_OneofSizer(msg proto.Message) (n int) { + m := msg.(*InputConfig) + // source + switch x := m.Source.(type) { + case *InputConfig_GcsSource: + s := proto.Size(x.GcsSource) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +// Source of the GCS file to be imported. Only gcs path is allowed in +// input_uri. +type GcsSource struct { + // Required. The input uri of source file. + InputUri string `protobuf:"bytes,1,opt,name=input_uri,json=inputUri,proto3" json:"input_uri,omitempty"` + // Required. The format of the gcs source. Only "text/csv" is supported. + MimeType string `protobuf:"bytes,2,opt,name=mime_type,json=mimeType,proto3" json:"mime_type,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GcsSource) Reset() { *m = GcsSource{} } +func (m *GcsSource) String() string { return proto.CompactTextString(m) } +func (*GcsSource) ProtoMessage() {} +func (*GcsSource) Descriptor() ([]byte, []int) { + return fileDescriptor_dataset_800ce425cbaef310, []int{2} +} +func (m *GcsSource) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GcsSource.Unmarshal(m, b) +} +func (m *GcsSource) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GcsSource.Marshal(b, m, deterministic) +} +func (dst *GcsSource) XXX_Merge(src proto.Message) { + xxx_messageInfo_GcsSource.Merge(dst, src) +} +func (m *GcsSource) XXX_Size() int { + return xxx_messageInfo_GcsSource.Size(m) +} +func (m *GcsSource) XXX_DiscardUnknown() { + xxx_messageInfo_GcsSource.DiscardUnknown(m) +} + +var xxx_messageInfo_GcsSource proto.InternalMessageInfo + +func (m *GcsSource) GetInputUri() string { + if m != nil { + return m.InputUri + } + return "" +} + +func (m *GcsSource) GetMimeType() string { + if m != nil { + return m.MimeType + } + return "" +} + +// The configuration of output data. +type OutputConfig struct { + // Required. Location to output data to. + // + // Types that are valid to be assigned to Destination: + // *OutputConfig_GcsDestination + // *OutputConfig_GcsFolderDestination + Destination isOutputConfig_Destination `protobuf_oneof:"destination"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *OutputConfig) Reset() { *m = OutputConfig{} } +func (m *OutputConfig) String() string { return proto.CompactTextString(m) } +func (*OutputConfig) ProtoMessage() {} +func (*OutputConfig) Descriptor() ([]byte, []int) { + return fileDescriptor_dataset_800ce425cbaef310, []int{3} +} +func (m *OutputConfig) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_OutputConfig.Unmarshal(m, b) +} +func (m *OutputConfig) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_OutputConfig.Marshal(b, m, deterministic) +} +func (dst *OutputConfig) XXX_Merge(src proto.Message) { + xxx_messageInfo_OutputConfig.Merge(dst, src) +} +func (m *OutputConfig) XXX_Size() int { + return xxx_messageInfo_OutputConfig.Size(m) +} +func (m *OutputConfig) XXX_DiscardUnknown() { + xxx_messageInfo_OutputConfig.DiscardUnknown(m) +} + +var xxx_messageInfo_OutputConfig proto.InternalMessageInfo + +type isOutputConfig_Destination interface { + isOutputConfig_Destination() +} + +type OutputConfig_GcsDestination struct { + GcsDestination *GcsDestination `protobuf:"bytes,1,opt,name=gcs_destination,json=gcsDestination,proto3,oneof"` +} + +type OutputConfig_GcsFolderDestination struct { + GcsFolderDestination *GcsFolderDestination `protobuf:"bytes,2,opt,name=gcs_folder_destination,json=gcsFolderDestination,proto3,oneof"` +} + +func (*OutputConfig_GcsDestination) isOutputConfig_Destination() {} + +func (*OutputConfig_GcsFolderDestination) isOutputConfig_Destination() {} + +func (m *OutputConfig) GetDestination() isOutputConfig_Destination { + if m != nil { + return m.Destination + } + return nil +} + +func (m *OutputConfig) GetGcsDestination() *GcsDestination { + if x, ok := m.GetDestination().(*OutputConfig_GcsDestination); ok { + return x.GcsDestination + } + return nil +} + +func (m *OutputConfig) GetGcsFolderDestination() *GcsFolderDestination { + if x, ok := m.GetDestination().(*OutputConfig_GcsFolderDestination); ok { + return x.GcsFolderDestination + } + return nil +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*OutputConfig) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _OutputConfig_OneofMarshaler, _OutputConfig_OneofUnmarshaler, _OutputConfig_OneofSizer, []interface{}{ + (*OutputConfig_GcsDestination)(nil), + (*OutputConfig_GcsFolderDestination)(nil), + } +} + +func _OutputConfig_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*OutputConfig) + // destination + switch x := m.Destination.(type) { + case *OutputConfig_GcsDestination: + b.EncodeVarint(1<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.GcsDestination); err != nil { + return err + } + case *OutputConfig_GcsFolderDestination: + b.EncodeVarint(2<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.GcsFolderDestination); err != nil { + return err + } + case nil: + default: + return fmt.Errorf("OutputConfig.Destination has unexpected type %T", x) + } + return nil +} + +func _OutputConfig_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*OutputConfig) + switch tag { + case 1: // destination.gcs_destination + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(GcsDestination) + err := b.DecodeMessage(msg) + m.Destination = &OutputConfig_GcsDestination{msg} + return true, err + case 2: // destination.gcs_folder_destination + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(GcsFolderDestination) + err := b.DecodeMessage(msg) + m.Destination = &OutputConfig_GcsFolderDestination{msg} + return true, err + default: + return false, nil + } +} + +func _OutputConfig_OneofSizer(msg proto.Message) (n int) { + m := msg.(*OutputConfig) + // destination + switch x := m.Destination.(type) { + case *OutputConfig_GcsDestination: + s := proto.Size(x.GcsDestination) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *OutputConfig_GcsFolderDestination: + s := proto.Size(x.GcsFolderDestination) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +// Export destination of the data.Only gcs path is allowed in +// output_uri. +type GcsDestination struct { + // Required. The output uri of destination file. + OutputUri string `protobuf:"bytes,1,opt,name=output_uri,json=outputUri,proto3" json:"output_uri,omitempty"` + // Required. The format of the gcs destination. Only "text/csv" and + // "application/json" + // are supported. + MimeType string `protobuf:"bytes,2,opt,name=mime_type,json=mimeType,proto3" json:"mime_type,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GcsDestination) Reset() { *m = GcsDestination{} } +func (m *GcsDestination) String() string { return proto.CompactTextString(m) } +func (*GcsDestination) ProtoMessage() {} +func (*GcsDestination) Descriptor() ([]byte, []int) { + return fileDescriptor_dataset_800ce425cbaef310, []int{4} +} +func (m *GcsDestination) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GcsDestination.Unmarshal(m, b) +} +func (m *GcsDestination) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GcsDestination.Marshal(b, m, deterministic) +} +func (dst *GcsDestination) XXX_Merge(src proto.Message) { + xxx_messageInfo_GcsDestination.Merge(dst, src) +} +func (m *GcsDestination) XXX_Size() int { + return xxx_messageInfo_GcsDestination.Size(m) +} +func (m *GcsDestination) XXX_DiscardUnknown() { + xxx_messageInfo_GcsDestination.DiscardUnknown(m) +} + +var xxx_messageInfo_GcsDestination proto.InternalMessageInfo + +func (m *GcsDestination) GetOutputUri() string { + if m != nil { + return m.OutputUri + } + return "" +} + +func (m *GcsDestination) GetMimeType() string { + if m != nil { + return m.MimeType + } + return "" +} + +// Export folder destination of the data. +type GcsFolderDestination struct { + // Required. GCS folder to export data to. + OutputFolderUri string `protobuf:"bytes,1,opt,name=output_folder_uri,json=outputFolderUri,proto3" json:"output_folder_uri,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GcsFolderDestination) Reset() { *m = GcsFolderDestination{} } +func (m *GcsFolderDestination) String() string { return proto.CompactTextString(m) } +func (*GcsFolderDestination) ProtoMessage() {} +func (*GcsFolderDestination) Descriptor() ([]byte, []int) { + return fileDescriptor_dataset_800ce425cbaef310, []int{5} +} +func (m *GcsFolderDestination) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GcsFolderDestination.Unmarshal(m, b) +} +func (m *GcsFolderDestination) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GcsFolderDestination.Marshal(b, m, deterministic) +} +func (dst *GcsFolderDestination) XXX_Merge(src proto.Message) { + xxx_messageInfo_GcsFolderDestination.Merge(dst, src) +} +func (m *GcsFolderDestination) XXX_Size() int { + return xxx_messageInfo_GcsFolderDestination.Size(m) +} +func (m *GcsFolderDestination) XXX_DiscardUnknown() { + xxx_messageInfo_GcsFolderDestination.DiscardUnknown(m) +} + +var xxx_messageInfo_GcsFolderDestination proto.InternalMessageInfo + +func (m *GcsFolderDestination) GetOutputFolderUri() string { + if m != nil { + return m.OutputFolderUri + } + return "" +} + +// DataItem is a piece of data, without annotation. For example, an image. +type DataItem struct { + // Output only. + // + // Types that are valid to be assigned to Payload: + // *DataItem_ImagePayload + // *DataItem_TextPayload + // *DataItem_VideoPayload + // *DataItem_AudioPayload + Payload isDataItem_Payload `protobuf_oneof:"payload"` + // Output only. Name of the data item, in format of: + // projects/{project_id}/datasets/{dataset_id}/dataItems/{data_item_id} + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *DataItem) Reset() { *m = DataItem{} } +func (m *DataItem) String() string { return proto.CompactTextString(m) } +func (*DataItem) ProtoMessage() {} +func (*DataItem) Descriptor() ([]byte, []int) { + return fileDescriptor_dataset_800ce425cbaef310, []int{6} +} +func (m *DataItem) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_DataItem.Unmarshal(m, b) +} +func (m *DataItem) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_DataItem.Marshal(b, m, deterministic) +} +func (dst *DataItem) XXX_Merge(src proto.Message) { + xxx_messageInfo_DataItem.Merge(dst, src) +} +func (m *DataItem) XXX_Size() int { + return xxx_messageInfo_DataItem.Size(m) +} +func (m *DataItem) XXX_DiscardUnknown() { + xxx_messageInfo_DataItem.DiscardUnknown(m) +} + +var xxx_messageInfo_DataItem proto.InternalMessageInfo + +type isDataItem_Payload interface { + isDataItem_Payload() +} + +type DataItem_ImagePayload struct { + ImagePayload *ImagePayload `protobuf:"bytes,2,opt,name=image_payload,json=imagePayload,proto3,oneof"` +} + +type DataItem_TextPayload struct { + TextPayload *TextPayload `protobuf:"bytes,3,opt,name=text_payload,json=textPayload,proto3,oneof"` +} + +type DataItem_VideoPayload struct { + VideoPayload *VideoPayload `protobuf:"bytes,4,opt,name=video_payload,json=videoPayload,proto3,oneof"` +} + +type DataItem_AudioPayload struct { + AudioPayload *AudioPayload `protobuf:"bytes,5,opt,name=audio_payload,json=audioPayload,proto3,oneof"` +} + +func (*DataItem_ImagePayload) isDataItem_Payload() {} + +func (*DataItem_TextPayload) isDataItem_Payload() {} + +func (*DataItem_VideoPayload) isDataItem_Payload() {} + +func (*DataItem_AudioPayload) isDataItem_Payload() {} + +func (m *DataItem) GetPayload() isDataItem_Payload { + if m != nil { + return m.Payload + } + return nil +} + +func (m *DataItem) GetImagePayload() *ImagePayload { + if x, ok := m.GetPayload().(*DataItem_ImagePayload); ok { + return x.ImagePayload + } + return nil +} + +func (m *DataItem) GetTextPayload() *TextPayload { + if x, ok := m.GetPayload().(*DataItem_TextPayload); ok { + return x.TextPayload + } + return nil +} + +func (m *DataItem) GetVideoPayload() *VideoPayload { + if x, ok := m.GetPayload().(*DataItem_VideoPayload); ok { + return x.VideoPayload + } + return nil +} + +func (m *DataItem) GetAudioPayload() *AudioPayload { + if x, ok := m.GetPayload().(*DataItem_AudioPayload); ok { + return x.AudioPayload + } + return nil +} + +func (m *DataItem) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*DataItem) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _DataItem_OneofMarshaler, _DataItem_OneofUnmarshaler, _DataItem_OneofSizer, []interface{}{ + (*DataItem_ImagePayload)(nil), + (*DataItem_TextPayload)(nil), + (*DataItem_VideoPayload)(nil), + (*DataItem_AudioPayload)(nil), + } +} + +func _DataItem_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*DataItem) + // payload + switch x := m.Payload.(type) { + case *DataItem_ImagePayload: + b.EncodeVarint(2<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.ImagePayload); err != nil { + return err + } + case *DataItem_TextPayload: + b.EncodeVarint(3<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.TextPayload); err != nil { + return err + } + case *DataItem_VideoPayload: + b.EncodeVarint(4<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.VideoPayload); err != nil { + return err + } + case *DataItem_AudioPayload: + b.EncodeVarint(5<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.AudioPayload); err != nil { + return err + } + case nil: + default: + return fmt.Errorf("DataItem.Payload has unexpected type %T", x) + } + return nil +} + +func _DataItem_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*DataItem) + switch tag { + case 2: // payload.image_payload + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(ImagePayload) + err := b.DecodeMessage(msg) + m.Payload = &DataItem_ImagePayload{msg} + return true, err + case 3: // payload.text_payload + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(TextPayload) + err := b.DecodeMessage(msg) + m.Payload = &DataItem_TextPayload{msg} + return true, err + case 4: // payload.video_payload + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(VideoPayload) + err := b.DecodeMessage(msg) + m.Payload = &DataItem_VideoPayload{msg} + return true, err + case 5: // payload.audio_payload + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(AudioPayload) + err := b.DecodeMessage(msg) + m.Payload = &DataItem_AudioPayload{msg} + return true, err + default: + return false, nil + } +} + +func _DataItem_OneofSizer(msg proto.Message) (n int) { + m := msg.(*DataItem) + // payload + switch x := m.Payload.(type) { + case *DataItem_ImagePayload: + s := proto.Size(x.ImagePayload) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *DataItem_TextPayload: + s := proto.Size(x.TextPayload) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *DataItem_VideoPayload: + s := proto.Size(x.VideoPayload) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *DataItem_AudioPayload: + s := proto.Size(x.AudioPayload) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +// AnnotatedDataset is a set holding annotations for data in a Dataset. Each +// labeling task will generate an AnnotatedDataset under the Dataset that the +// task is requested for. +type AnnotatedDataset struct { + // Output only. + // AnnotatedDataset resource name in format of: + // projects/{project_id}/datasets/{dataset_id}/annotatedDatasets/ + // {annotated_dataset_id} + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // Output only. The display name of the AnnotatedDataset. It is specified in + // HumanAnnotationConfig when user starts a labeling task. Maximum of 64 + // characters. + DisplayName string `protobuf:"bytes,2,opt,name=display_name,json=displayName,proto3" json:"display_name,omitempty"` + // Output only. The description of the AnnotatedDataset. It is specified in + // HumanAnnotationConfig when user starts a labeling task. Maximum of 10000 + // characters. + Description string `protobuf:"bytes,9,opt,name=description,proto3" json:"description,omitempty"` + // Output only. Source of the annotation. + AnnotationSource AnnotationSource `protobuf:"varint,3,opt,name=annotation_source,json=annotationSource,proto3,enum=google.cloud.datalabeling.v1beta1.AnnotationSource" json:"annotation_source,omitempty"` + // Output only. Type of the annotation. It is specified when starting labeling + // task. + AnnotationType AnnotationType `protobuf:"varint,8,opt,name=annotation_type,json=annotationType,proto3,enum=google.cloud.datalabeling.v1beta1.AnnotationType" json:"annotation_type,omitempty"` + // Output only. Number of examples in the annotated dataset. + ExampleCount int64 `protobuf:"varint,4,opt,name=example_count,json=exampleCount,proto3" json:"example_count,omitempty"` + // Output only. Number of examples that have annotation in the annotated + // dataset. + CompletedExampleCount int64 `protobuf:"varint,5,opt,name=completed_example_count,json=completedExampleCount,proto3" json:"completed_example_count,omitempty"` + // Output only. Per label statistics. + LabelStats *LabelStats `protobuf:"bytes,6,opt,name=label_stats,json=labelStats,proto3" json:"label_stats,omitempty"` + // Output only. Time the AnnotatedDataset was created. + CreateTime *timestamp.Timestamp `protobuf:"bytes,7,opt,name=create_time,json=createTime,proto3" json:"create_time,omitempty"` + // Output only. Additional information about AnnotatedDataset. + Metadata *AnnotatedDatasetMetadata `protobuf:"bytes,10,opt,name=metadata,proto3" json:"metadata,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *AnnotatedDataset) Reset() { *m = AnnotatedDataset{} } +func (m *AnnotatedDataset) String() string { return proto.CompactTextString(m) } +func (*AnnotatedDataset) ProtoMessage() {} +func (*AnnotatedDataset) Descriptor() ([]byte, []int) { + return fileDescriptor_dataset_800ce425cbaef310, []int{7} +} +func (m *AnnotatedDataset) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_AnnotatedDataset.Unmarshal(m, b) +} +func (m *AnnotatedDataset) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_AnnotatedDataset.Marshal(b, m, deterministic) +} +func (dst *AnnotatedDataset) XXX_Merge(src proto.Message) { + xxx_messageInfo_AnnotatedDataset.Merge(dst, src) +} +func (m *AnnotatedDataset) XXX_Size() int { + return xxx_messageInfo_AnnotatedDataset.Size(m) +} +func (m *AnnotatedDataset) XXX_DiscardUnknown() { + xxx_messageInfo_AnnotatedDataset.DiscardUnknown(m) +} + +var xxx_messageInfo_AnnotatedDataset proto.InternalMessageInfo + +func (m *AnnotatedDataset) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *AnnotatedDataset) GetDisplayName() string { + if m != nil { + return m.DisplayName + } + return "" +} + +func (m *AnnotatedDataset) GetDescription() string { + if m != nil { + return m.Description + } + return "" +} + +func (m *AnnotatedDataset) GetAnnotationSource() AnnotationSource { + if m != nil { + return m.AnnotationSource + } + return AnnotationSource_ANNOTATION_SOURCE_UNSPECIFIED +} + +func (m *AnnotatedDataset) GetAnnotationType() AnnotationType { + if m != nil { + return m.AnnotationType + } + return AnnotationType_ANNOTATION_TYPE_UNSPECIFIED +} + +func (m *AnnotatedDataset) GetExampleCount() int64 { + if m != nil { + return m.ExampleCount + } + return 0 +} + +func (m *AnnotatedDataset) GetCompletedExampleCount() int64 { + if m != nil { + return m.CompletedExampleCount + } + return 0 +} + +func (m *AnnotatedDataset) GetLabelStats() *LabelStats { + if m != nil { + return m.LabelStats + } + return nil +} + +func (m *AnnotatedDataset) GetCreateTime() *timestamp.Timestamp { + if m != nil { + return m.CreateTime + } + return nil +} + +func (m *AnnotatedDataset) GetMetadata() *AnnotatedDatasetMetadata { + if m != nil { + return m.Metadata + } + return nil +} + +// Metadata on AnnotatedDataset. +type AnnotatedDatasetMetadata struct { + // HumanAnnotationConfig used when requesting the human labeling task for this + // AnnotatedDataset. + HumanAnnotationConfig *HumanAnnotationConfig `protobuf:"bytes,1,opt,name=human_annotation_config,json=humanAnnotationConfig,proto3" json:"human_annotation_config,omitempty"` + // Specific request configuration used when requesting the labeling task. + // + // Types that are valid to be assigned to AnnotationRequestConfig: + // *AnnotatedDatasetMetadata_ImageClassificationConfig + // *AnnotatedDatasetMetadata_BoundingPolyConfig + // *AnnotatedDatasetMetadata_PolylineConfig + // *AnnotatedDatasetMetadata_SegmentationConfig + // *AnnotatedDatasetMetadata_VideoClassificationConfig + // *AnnotatedDatasetMetadata_ObjectDetectionConfig + // *AnnotatedDatasetMetadata_ObjectTrackingConfig + // *AnnotatedDatasetMetadata_EventConfig + // *AnnotatedDatasetMetadata_TextClassificationConfig + // *AnnotatedDatasetMetadata_TextEntityExtractionConfig + AnnotationRequestConfig isAnnotatedDatasetMetadata_AnnotationRequestConfig `protobuf_oneof:"annotation_request_config"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *AnnotatedDatasetMetadata) Reset() { *m = AnnotatedDatasetMetadata{} } +func (m *AnnotatedDatasetMetadata) String() string { return proto.CompactTextString(m) } +func (*AnnotatedDatasetMetadata) ProtoMessage() {} +func (*AnnotatedDatasetMetadata) Descriptor() ([]byte, []int) { + return fileDescriptor_dataset_800ce425cbaef310, []int{8} +} +func (m *AnnotatedDatasetMetadata) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_AnnotatedDatasetMetadata.Unmarshal(m, b) +} +func (m *AnnotatedDatasetMetadata) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_AnnotatedDatasetMetadata.Marshal(b, m, deterministic) +} +func (dst *AnnotatedDatasetMetadata) XXX_Merge(src proto.Message) { + xxx_messageInfo_AnnotatedDatasetMetadata.Merge(dst, src) +} +func (m *AnnotatedDatasetMetadata) XXX_Size() int { + return xxx_messageInfo_AnnotatedDatasetMetadata.Size(m) +} +func (m *AnnotatedDatasetMetadata) XXX_DiscardUnknown() { + xxx_messageInfo_AnnotatedDatasetMetadata.DiscardUnknown(m) +} + +var xxx_messageInfo_AnnotatedDatasetMetadata proto.InternalMessageInfo + +func (m *AnnotatedDatasetMetadata) GetHumanAnnotationConfig() *HumanAnnotationConfig { + if m != nil { + return m.HumanAnnotationConfig + } + return nil +} + +type isAnnotatedDatasetMetadata_AnnotationRequestConfig interface { + isAnnotatedDatasetMetadata_AnnotationRequestConfig() +} + +type AnnotatedDatasetMetadata_ImageClassificationConfig struct { + ImageClassificationConfig *ImageClassificationConfig `protobuf:"bytes,2,opt,name=image_classification_config,json=imageClassificationConfig,proto3,oneof"` +} + +type AnnotatedDatasetMetadata_BoundingPolyConfig struct { + BoundingPolyConfig *BoundingPolyConfig `protobuf:"bytes,3,opt,name=bounding_poly_config,json=boundingPolyConfig,proto3,oneof"` +} + +type AnnotatedDatasetMetadata_PolylineConfig struct { + PolylineConfig *PolylineConfig `protobuf:"bytes,4,opt,name=polyline_config,json=polylineConfig,proto3,oneof"` +} + +type AnnotatedDatasetMetadata_SegmentationConfig struct { + SegmentationConfig *SegmentationConfig `protobuf:"bytes,5,opt,name=segmentation_config,json=segmentationConfig,proto3,oneof"` +} + +type AnnotatedDatasetMetadata_VideoClassificationConfig struct { + VideoClassificationConfig *VideoClassificationConfig `protobuf:"bytes,6,opt,name=video_classification_config,json=videoClassificationConfig,proto3,oneof"` +} + +type AnnotatedDatasetMetadata_ObjectDetectionConfig struct { + ObjectDetectionConfig *ObjectDetectionConfig `protobuf:"bytes,7,opt,name=object_detection_config,json=objectDetectionConfig,proto3,oneof"` +} + +type AnnotatedDatasetMetadata_ObjectTrackingConfig struct { + ObjectTrackingConfig *ObjectTrackingConfig `protobuf:"bytes,8,opt,name=object_tracking_config,json=objectTrackingConfig,proto3,oneof"` +} + +type AnnotatedDatasetMetadata_EventConfig struct { + EventConfig *EventConfig `protobuf:"bytes,9,opt,name=event_config,json=eventConfig,proto3,oneof"` +} + +type AnnotatedDatasetMetadata_TextClassificationConfig struct { + TextClassificationConfig *TextClassificationConfig `protobuf:"bytes,10,opt,name=text_classification_config,json=textClassificationConfig,proto3,oneof"` +} + +type AnnotatedDatasetMetadata_TextEntityExtractionConfig struct { + TextEntityExtractionConfig *TextEntityExtractionConfig `protobuf:"bytes,11,opt,name=text_entity_extraction_config,json=textEntityExtractionConfig,proto3,oneof"` +} + +func (*AnnotatedDatasetMetadata_ImageClassificationConfig) isAnnotatedDatasetMetadata_AnnotationRequestConfig() { +} + +func (*AnnotatedDatasetMetadata_BoundingPolyConfig) isAnnotatedDatasetMetadata_AnnotationRequestConfig() { +} + +func (*AnnotatedDatasetMetadata_PolylineConfig) isAnnotatedDatasetMetadata_AnnotationRequestConfig() {} + +func (*AnnotatedDatasetMetadata_SegmentationConfig) isAnnotatedDatasetMetadata_AnnotationRequestConfig() { +} + +func (*AnnotatedDatasetMetadata_VideoClassificationConfig) isAnnotatedDatasetMetadata_AnnotationRequestConfig() { +} + +func (*AnnotatedDatasetMetadata_ObjectDetectionConfig) isAnnotatedDatasetMetadata_AnnotationRequestConfig() { +} + +func (*AnnotatedDatasetMetadata_ObjectTrackingConfig) isAnnotatedDatasetMetadata_AnnotationRequestConfig() { +} + +func (*AnnotatedDatasetMetadata_EventConfig) isAnnotatedDatasetMetadata_AnnotationRequestConfig() {} + +func (*AnnotatedDatasetMetadata_TextClassificationConfig) isAnnotatedDatasetMetadata_AnnotationRequestConfig() { +} + +func (*AnnotatedDatasetMetadata_TextEntityExtractionConfig) isAnnotatedDatasetMetadata_AnnotationRequestConfig() { +} + +func (m *AnnotatedDatasetMetadata) GetAnnotationRequestConfig() isAnnotatedDatasetMetadata_AnnotationRequestConfig { + if m != nil { + return m.AnnotationRequestConfig + } + return nil +} + +func (m *AnnotatedDatasetMetadata) GetImageClassificationConfig() *ImageClassificationConfig { + if x, ok := m.GetAnnotationRequestConfig().(*AnnotatedDatasetMetadata_ImageClassificationConfig); ok { + return x.ImageClassificationConfig + } + return nil +} + +func (m *AnnotatedDatasetMetadata) GetBoundingPolyConfig() *BoundingPolyConfig { + if x, ok := m.GetAnnotationRequestConfig().(*AnnotatedDatasetMetadata_BoundingPolyConfig); ok { + return x.BoundingPolyConfig + } + return nil +} + +func (m *AnnotatedDatasetMetadata) GetPolylineConfig() *PolylineConfig { + if x, ok := m.GetAnnotationRequestConfig().(*AnnotatedDatasetMetadata_PolylineConfig); ok { + return x.PolylineConfig + } + return nil +} + +func (m *AnnotatedDatasetMetadata) GetSegmentationConfig() *SegmentationConfig { + if x, ok := m.GetAnnotationRequestConfig().(*AnnotatedDatasetMetadata_SegmentationConfig); ok { + return x.SegmentationConfig + } + return nil +} + +func (m *AnnotatedDatasetMetadata) GetVideoClassificationConfig() *VideoClassificationConfig { + if x, ok := m.GetAnnotationRequestConfig().(*AnnotatedDatasetMetadata_VideoClassificationConfig); ok { + return x.VideoClassificationConfig + } + return nil +} + +func (m *AnnotatedDatasetMetadata) GetObjectDetectionConfig() *ObjectDetectionConfig { + if x, ok := m.GetAnnotationRequestConfig().(*AnnotatedDatasetMetadata_ObjectDetectionConfig); ok { + return x.ObjectDetectionConfig + } + return nil +} + +func (m *AnnotatedDatasetMetadata) GetObjectTrackingConfig() *ObjectTrackingConfig { + if x, ok := m.GetAnnotationRequestConfig().(*AnnotatedDatasetMetadata_ObjectTrackingConfig); ok { + return x.ObjectTrackingConfig + } + return nil +} + +func (m *AnnotatedDatasetMetadata) GetEventConfig() *EventConfig { + if x, ok := m.GetAnnotationRequestConfig().(*AnnotatedDatasetMetadata_EventConfig); ok { + return x.EventConfig + } + return nil +} + +func (m *AnnotatedDatasetMetadata) GetTextClassificationConfig() *TextClassificationConfig { + if x, ok := m.GetAnnotationRequestConfig().(*AnnotatedDatasetMetadata_TextClassificationConfig); ok { + return x.TextClassificationConfig + } + return nil +} + +func (m *AnnotatedDatasetMetadata) GetTextEntityExtractionConfig() *TextEntityExtractionConfig { + if x, ok := m.GetAnnotationRequestConfig().(*AnnotatedDatasetMetadata_TextEntityExtractionConfig); ok { + return x.TextEntityExtractionConfig + } + return nil +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*AnnotatedDatasetMetadata) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _AnnotatedDatasetMetadata_OneofMarshaler, _AnnotatedDatasetMetadata_OneofUnmarshaler, _AnnotatedDatasetMetadata_OneofSizer, []interface{}{ + (*AnnotatedDatasetMetadata_ImageClassificationConfig)(nil), + (*AnnotatedDatasetMetadata_BoundingPolyConfig)(nil), + (*AnnotatedDatasetMetadata_PolylineConfig)(nil), + (*AnnotatedDatasetMetadata_SegmentationConfig)(nil), + (*AnnotatedDatasetMetadata_VideoClassificationConfig)(nil), + (*AnnotatedDatasetMetadata_ObjectDetectionConfig)(nil), + (*AnnotatedDatasetMetadata_ObjectTrackingConfig)(nil), + (*AnnotatedDatasetMetadata_EventConfig)(nil), + (*AnnotatedDatasetMetadata_TextClassificationConfig)(nil), + (*AnnotatedDatasetMetadata_TextEntityExtractionConfig)(nil), + } +} + +func _AnnotatedDatasetMetadata_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*AnnotatedDatasetMetadata) + // annotation_request_config + switch x := m.AnnotationRequestConfig.(type) { + case *AnnotatedDatasetMetadata_ImageClassificationConfig: + b.EncodeVarint(2<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.ImageClassificationConfig); err != nil { + return err + } + case *AnnotatedDatasetMetadata_BoundingPolyConfig: + b.EncodeVarint(3<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.BoundingPolyConfig); err != nil { + return err + } + case *AnnotatedDatasetMetadata_PolylineConfig: + b.EncodeVarint(4<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.PolylineConfig); err != nil { + return err + } + case *AnnotatedDatasetMetadata_SegmentationConfig: + b.EncodeVarint(5<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.SegmentationConfig); err != nil { + return err + } + case *AnnotatedDatasetMetadata_VideoClassificationConfig: + b.EncodeVarint(6<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.VideoClassificationConfig); err != nil { + return err + } + case *AnnotatedDatasetMetadata_ObjectDetectionConfig: + b.EncodeVarint(7<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.ObjectDetectionConfig); err != nil { + return err + } + case *AnnotatedDatasetMetadata_ObjectTrackingConfig: + b.EncodeVarint(8<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.ObjectTrackingConfig); err != nil { + return err + } + case *AnnotatedDatasetMetadata_EventConfig: + b.EncodeVarint(9<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.EventConfig); err != nil { + return err + } + case *AnnotatedDatasetMetadata_TextClassificationConfig: + b.EncodeVarint(10<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.TextClassificationConfig); err != nil { + return err + } + case *AnnotatedDatasetMetadata_TextEntityExtractionConfig: + b.EncodeVarint(11<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.TextEntityExtractionConfig); err != nil { + return err + } + case nil: + default: + return fmt.Errorf("AnnotatedDatasetMetadata.AnnotationRequestConfig has unexpected type %T", x) + } + return nil +} + +func _AnnotatedDatasetMetadata_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*AnnotatedDatasetMetadata) + switch tag { + case 2: // annotation_request_config.image_classification_config + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(ImageClassificationConfig) + err := b.DecodeMessage(msg) + m.AnnotationRequestConfig = &AnnotatedDatasetMetadata_ImageClassificationConfig{msg} + return true, err + case 3: // annotation_request_config.bounding_poly_config + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(BoundingPolyConfig) + err := b.DecodeMessage(msg) + m.AnnotationRequestConfig = &AnnotatedDatasetMetadata_BoundingPolyConfig{msg} + return true, err + case 4: // annotation_request_config.polyline_config + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(PolylineConfig) + err := b.DecodeMessage(msg) + m.AnnotationRequestConfig = &AnnotatedDatasetMetadata_PolylineConfig{msg} + return true, err + case 5: // annotation_request_config.segmentation_config + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(SegmentationConfig) + err := b.DecodeMessage(msg) + m.AnnotationRequestConfig = &AnnotatedDatasetMetadata_SegmentationConfig{msg} + return true, err + case 6: // annotation_request_config.video_classification_config + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(VideoClassificationConfig) + err := b.DecodeMessage(msg) + m.AnnotationRequestConfig = &AnnotatedDatasetMetadata_VideoClassificationConfig{msg} + return true, err + case 7: // annotation_request_config.object_detection_config + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(ObjectDetectionConfig) + err := b.DecodeMessage(msg) + m.AnnotationRequestConfig = &AnnotatedDatasetMetadata_ObjectDetectionConfig{msg} + return true, err + case 8: // annotation_request_config.object_tracking_config + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(ObjectTrackingConfig) + err := b.DecodeMessage(msg) + m.AnnotationRequestConfig = &AnnotatedDatasetMetadata_ObjectTrackingConfig{msg} + return true, err + case 9: // annotation_request_config.event_config + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(EventConfig) + err := b.DecodeMessage(msg) + m.AnnotationRequestConfig = &AnnotatedDatasetMetadata_EventConfig{msg} + return true, err + case 10: // annotation_request_config.text_classification_config + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(TextClassificationConfig) + err := b.DecodeMessage(msg) + m.AnnotationRequestConfig = &AnnotatedDatasetMetadata_TextClassificationConfig{msg} + return true, err + case 11: // annotation_request_config.text_entity_extraction_config + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(TextEntityExtractionConfig) + err := b.DecodeMessage(msg) + m.AnnotationRequestConfig = &AnnotatedDatasetMetadata_TextEntityExtractionConfig{msg} + return true, err + default: + return false, nil + } +} + +func _AnnotatedDatasetMetadata_OneofSizer(msg proto.Message) (n int) { + m := msg.(*AnnotatedDatasetMetadata) + // annotation_request_config + switch x := m.AnnotationRequestConfig.(type) { + case *AnnotatedDatasetMetadata_ImageClassificationConfig: + s := proto.Size(x.ImageClassificationConfig) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *AnnotatedDatasetMetadata_BoundingPolyConfig: + s := proto.Size(x.BoundingPolyConfig) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *AnnotatedDatasetMetadata_PolylineConfig: + s := proto.Size(x.PolylineConfig) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *AnnotatedDatasetMetadata_SegmentationConfig: + s := proto.Size(x.SegmentationConfig) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *AnnotatedDatasetMetadata_VideoClassificationConfig: + s := proto.Size(x.VideoClassificationConfig) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *AnnotatedDatasetMetadata_ObjectDetectionConfig: + s := proto.Size(x.ObjectDetectionConfig) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *AnnotatedDatasetMetadata_ObjectTrackingConfig: + s := proto.Size(x.ObjectTrackingConfig) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *AnnotatedDatasetMetadata_EventConfig: + s := proto.Size(x.EventConfig) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *AnnotatedDatasetMetadata_TextClassificationConfig: + s := proto.Size(x.TextClassificationConfig) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *AnnotatedDatasetMetadata_TextEntityExtractionConfig: + s := proto.Size(x.TextEntityExtractionConfig) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +// Statistics about annotation specs. +type LabelStats struct { + // Map of each annotation spec's example count. Key is the annotation spec + // name and value is the number of examples for that annotation spec. + ExampleCount map[string]int64 `protobuf:"bytes,1,rep,name=example_count,json=exampleCount,proto3" json:"example_count,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"varint,2,opt,name=value,proto3"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *LabelStats) Reset() { *m = LabelStats{} } +func (m *LabelStats) String() string { return proto.CompactTextString(m) } +func (*LabelStats) ProtoMessage() {} +func (*LabelStats) Descriptor() ([]byte, []int) { + return fileDescriptor_dataset_800ce425cbaef310, []int{9} +} +func (m *LabelStats) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_LabelStats.Unmarshal(m, b) +} +func (m *LabelStats) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_LabelStats.Marshal(b, m, deterministic) +} +func (dst *LabelStats) XXX_Merge(src proto.Message) { + xxx_messageInfo_LabelStats.Merge(dst, src) +} +func (m *LabelStats) XXX_Size() int { + return xxx_messageInfo_LabelStats.Size(m) +} +func (m *LabelStats) XXX_DiscardUnknown() { + xxx_messageInfo_LabelStats.DiscardUnknown(m) +} + +var xxx_messageInfo_LabelStats proto.InternalMessageInfo + +func (m *LabelStats) GetExampleCount() map[string]int64 { + if m != nil { + return m.ExampleCount + } + return nil +} + +// An Example is a piece of data and its annotation. For example, an image with +// label "house". +type Example struct { + // Output only. The data part of Example. + // + // Types that are valid to be assigned to Payload: + // *Example_ImagePayload + // *Example_TextPayload + // *Example_VideoPayload + // *Example_AudioPayload + Payload isExample_Payload `protobuf_oneof:"payload"` + // Output only. Name of the example, in format of: + // projects/{project_id}/datasets/{dataset_id}/annotatedDatasets/ + // {annotated_dataset_id}/examples/{example_id} + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // Output only. Annotations for the piece of data in Example. + // One piece of data can have multiple annotations. + Annotations []*Annotation `protobuf:"bytes,5,rep,name=annotations,proto3" json:"annotations,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Example) Reset() { *m = Example{} } +func (m *Example) String() string { return proto.CompactTextString(m) } +func (*Example) ProtoMessage() {} +func (*Example) Descriptor() ([]byte, []int) { + return fileDescriptor_dataset_800ce425cbaef310, []int{10} +} +func (m *Example) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Example.Unmarshal(m, b) +} +func (m *Example) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Example.Marshal(b, m, deterministic) +} +func (dst *Example) XXX_Merge(src proto.Message) { + xxx_messageInfo_Example.Merge(dst, src) +} +func (m *Example) XXX_Size() int { + return xxx_messageInfo_Example.Size(m) +} +func (m *Example) XXX_DiscardUnknown() { + xxx_messageInfo_Example.DiscardUnknown(m) +} + +var xxx_messageInfo_Example proto.InternalMessageInfo + +type isExample_Payload interface { + isExample_Payload() +} + +type Example_ImagePayload struct { + ImagePayload *ImagePayload `protobuf:"bytes,2,opt,name=image_payload,json=imagePayload,proto3,oneof"` +} + +type Example_TextPayload struct { + TextPayload *TextPayload `protobuf:"bytes,6,opt,name=text_payload,json=textPayload,proto3,oneof"` +} + +type Example_VideoPayload struct { + VideoPayload *VideoPayload `protobuf:"bytes,7,opt,name=video_payload,json=videoPayload,proto3,oneof"` +} + +type Example_AudioPayload struct { + AudioPayload *AudioPayload `protobuf:"bytes,8,opt,name=audio_payload,json=audioPayload,proto3,oneof"` +} + +func (*Example_ImagePayload) isExample_Payload() {} + +func (*Example_TextPayload) isExample_Payload() {} + +func (*Example_VideoPayload) isExample_Payload() {} + +func (*Example_AudioPayload) isExample_Payload() {} + +func (m *Example) GetPayload() isExample_Payload { + if m != nil { + return m.Payload + } + return nil +} + +func (m *Example) GetImagePayload() *ImagePayload { + if x, ok := m.GetPayload().(*Example_ImagePayload); ok { + return x.ImagePayload + } + return nil +} + +func (m *Example) GetTextPayload() *TextPayload { + if x, ok := m.GetPayload().(*Example_TextPayload); ok { + return x.TextPayload + } + return nil +} + +func (m *Example) GetVideoPayload() *VideoPayload { + if x, ok := m.GetPayload().(*Example_VideoPayload); ok { + return x.VideoPayload + } + return nil +} + +func (m *Example) GetAudioPayload() *AudioPayload { + if x, ok := m.GetPayload().(*Example_AudioPayload); ok { + return x.AudioPayload + } + return nil +} + +func (m *Example) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *Example) GetAnnotations() []*Annotation { + if m != nil { + return m.Annotations + } + return nil +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*Example) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _Example_OneofMarshaler, _Example_OneofUnmarshaler, _Example_OneofSizer, []interface{}{ + (*Example_ImagePayload)(nil), + (*Example_TextPayload)(nil), + (*Example_VideoPayload)(nil), + (*Example_AudioPayload)(nil), + } +} + +func _Example_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*Example) + // payload + switch x := m.Payload.(type) { + case *Example_ImagePayload: + b.EncodeVarint(2<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.ImagePayload); err != nil { + return err + } + case *Example_TextPayload: + b.EncodeVarint(6<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.TextPayload); err != nil { + return err + } + case *Example_VideoPayload: + b.EncodeVarint(7<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.VideoPayload); err != nil { + return err + } + case *Example_AudioPayload: + b.EncodeVarint(8<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.AudioPayload); err != nil { + return err + } + case nil: + default: + return fmt.Errorf("Example.Payload has unexpected type %T", x) + } + return nil +} + +func _Example_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*Example) + switch tag { + case 2: // payload.image_payload + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(ImagePayload) + err := b.DecodeMessage(msg) + m.Payload = &Example_ImagePayload{msg} + return true, err + case 6: // payload.text_payload + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(TextPayload) + err := b.DecodeMessage(msg) + m.Payload = &Example_TextPayload{msg} + return true, err + case 7: // payload.video_payload + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(VideoPayload) + err := b.DecodeMessage(msg) + m.Payload = &Example_VideoPayload{msg} + return true, err + case 8: // payload.audio_payload + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(AudioPayload) + err := b.DecodeMessage(msg) + m.Payload = &Example_AudioPayload{msg} + return true, err + default: + return false, nil + } +} + +func _Example_OneofSizer(msg proto.Message) (n int) { + m := msg.(*Example) + // payload + switch x := m.Payload.(type) { + case *Example_ImagePayload: + s := proto.Size(x.ImagePayload) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *Example_TextPayload: + s := proto.Size(x.TextPayload) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *Example_VideoPayload: + s := proto.Size(x.VideoPayload) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *Example_AudioPayload: + s := proto.Size(x.AudioPayload) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +// Container of information about an image. +type ImagePayload struct { + // Image format. + MimeType string `protobuf:"bytes,1,opt,name=mime_type,json=mimeType,proto3" json:"mime_type,omitempty"` + // A byte string of a full image. + ImageThumbnail []byte `protobuf:"bytes,2,opt,name=image_thumbnail,json=imageThumbnail,proto3" json:"image_thumbnail,omitempty"` + // Image uri from the user bucket. + ImageUri string `protobuf:"bytes,3,opt,name=image_uri,json=imageUri,proto3" json:"image_uri,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ImagePayload) Reset() { *m = ImagePayload{} } +func (m *ImagePayload) String() string { return proto.CompactTextString(m) } +func (*ImagePayload) ProtoMessage() {} +func (*ImagePayload) Descriptor() ([]byte, []int) { + return fileDescriptor_dataset_800ce425cbaef310, []int{11} +} +func (m *ImagePayload) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ImagePayload.Unmarshal(m, b) +} +func (m *ImagePayload) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ImagePayload.Marshal(b, m, deterministic) +} +func (dst *ImagePayload) XXX_Merge(src proto.Message) { + xxx_messageInfo_ImagePayload.Merge(dst, src) +} +func (m *ImagePayload) XXX_Size() int { + return xxx_messageInfo_ImagePayload.Size(m) +} +func (m *ImagePayload) XXX_DiscardUnknown() { + xxx_messageInfo_ImagePayload.DiscardUnknown(m) +} + +var xxx_messageInfo_ImagePayload proto.InternalMessageInfo + +func (m *ImagePayload) GetMimeType() string { + if m != nil { + return m.MimeType + } + return "" +} + +func (m *ImagePayload) GetImageThumbnail() []byte { + if m != nil { + return m.ImageThumbnail + } + return nil +} + +func (m *ImagePayload) GetImageUri() string { + if m != nil { + return m.ImageUri + } + return "" +} + +// Container of information about a piece of text. +type TextPayload struct { + // Text content. + TextContent string `protobuf:"bytes,1,opt,name=text_content,json=textContent,proto3" json:"text_content,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *TextPayload) Reset() { *m = TextPayload{} } +func (m *TextPayload) String() string { return proto.CompactTextString(m) } +func (*TextPayload) ProtoMessage() {} +func (*TextPayload) Descriptor() ([]byte, []int) { + return fileDescriptor_dataset_800ce425cbaef310, []int{12} +} +func (m *TextPayload) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_TextPayload.Unmarshal(m, b) +} +func (m *TextPayload) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_TextPayload.Marshal(b, m, deterministic) +} +func (dst *TextPayload) XXX_Merge(src proto.Message) { + xxx_messageInfo_TextPayload.Merge(dst, src) +} +func (m *TextPayload) XXX_Size() int { + return xxx_messageInfo_TextPayload.Size(m) +} +func (m *TextPayload) XXX_DiscardUnknown() { + xxx_messageInfo_TextPayload.DiscardUnknown(m) +} + +var xxx_messageInfo_TextPayload proto.InternalMessageInfo + +func (m *TextPayload) GetTextContent() string { + if m != nil { + return m.TextContent + } + return "" +} + +// Container of information of a video thumbnail. +type VideoThumbnail struct { + // A byte string of the video frame. + Thumbnail []byte `protobuf:"bytes,1,opt,name=thumbnail,proto3" json:"thumbnail,omitempty"` + // Time offset relative to the beginning of the video, corresponding to the + // video frame where the thumbnail has been extracted from. + TimeOffset *duration.Duration `protobuf:"bytes,2,opt,name=time_offset,json=timeOffset,proto3" json:"time_offset,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *VideoThumbnail) Reset() { *m = VideoThumbnail{} } +func (m *VideoThumbnail) String() string { return proto.CompactTextString(m) } +func (*VideoThumbnail) ProtoMessage() {} +func (*VideoThumbnail) Descriptor() ([]byte, []int) { + return fileDescriptor_dataset_800ce425cbaef310, []int{13} +} +func (m *VideoThumbnail) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_VideoThumbnail.Unmarshal(m, b) +} +func (m *VideoThumbnail) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_VideoThumbnail.Marshal(b, m, deterministic) +} +func (dst *VideoThumbnail) XXX_Merge(src proto.Message) { + xxx_messageInfo_VideoThumbnail.Merge(dst, src) +} +func (m *VideoThumbnail) XXX_Size() int { + return xxx_messageInfo_VideoThumbnail.Size(m) +} +func (m *VideoThumbnail) XXX_DiscardUnknown() { + xxx_messageInfo_VideoThumbnail.DiscardUnknown(m) +} + +var xxx_messageInfo_VideoThumbnail proto.InternalMessageInfo + +func (m *VideoThumbnail) GetThumbnail() []byte { + if m != nil { + return m.Thumbnail + } + return nil +} + +func (m *VideoThumbnail) GetTimeOffset() *duration.Duration { + if m != nil { + return m.TimeOffset + } + return nil +} + +// Container of information of a video. +type VideoPayload struct { + // Video format. + MimeType string `protobuf:"bytes,1,opt,name=mime_type,json=mimeType,proto3" json:"mime_type,omitempty"` + // Video uri from the user bucket. + VideoUri string `protobuf:"bytes,2,opt,name=video_uri,json=videoUri,proto3" json:"video_uri,omitempty"` + // The list of video thumbnails. + VideoThumbnails []*VideoThumbnail `protobuf:"bytes,3,rep,name=video_thumbnails,json=videoThumbnails,proto3" json:"video_thumbnails,omitempty"` + // FPS of the video. + FrameRate float32 `protobuf:"fixed32,4,opt,name=frame_rate,json=frameRate,proto3" json:"frame_rate,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *VideoPayload) Reset() { *m = VideoPayload{} } +func (m *VideoPayload) String() string { return proto.CompactTextString(m) } +func (*VideoPayload) ProtoMessage() {} +func (*VideoPayload) Descriptor() ([]byte, []int) { + return fileDescriptor_dataset_800ce425cbaef310, []int{14} +} +func (m *VideoPayload) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_VideoPayload.Unmarshal(m, b) +} +func (m *VideoPayload) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_VideoPayload.Marshal(b, m, deterministic) +} +func (dst *VideoPayload) XXX_Merge(src proto.Message) { + xxx_messageInfo_VideoPayload.Merge(dst, src) +} +func (m *VideoPayload) XXX_Size() int { + return xxx_messageInfo_VideoPayload.Size(m) +} +func (m *VideoPayload) XXX_DiscardUnknown() { + xxx_messageInfo_VideoPayload.DiscardUnknown(m) +} + +var xxx_messageInfo_VideoPayload proto.InternalMessageInfo + +func (m *VideoPayload) GetMimeType() string { + if m != nil { + return m.MimeType + } + return "" +} + +func (m *VideoPayload) GetVideoUri() string { + if m != nil { + return m.VideoUri + } + return "" +} + +func (m *VideoPayload) GetVideoThumbnails() []*VideoThumbnail { + if m != nil { + return m.VideoThumbnails + } + return nil +} + +func (m *VideoPayload) GetFrameRate() float32 { + if m != nil { + return m.FrameRate + } + return 0 +} + +// Container of information of an audio. +type AudioPayload struct { + // Audio uri in user bucket. + AudioUri string `protobuf:"bytes,1,opt,name=audio_uri,json=audioUri,proto3" json:"audio_uri,omitempty"` + // Sample rate in Hertz of the audio data sent in all + // `RecognitionAudio` messages. This field is optional for `FLAC` and `WAV` + // audio files and required for all other audio formats. For details, + // see [AudioEncoding][google.cloud.datalabeling.v1beta1.AudioPayload.AudioEncoding]. + SampleRateHertz int32 `protobuf:"varint,3,opt,name=sample_rate_hertz,json=sampleRateHertz,proto3" json:"sample_rate_hertz,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *AudioPayload) Reset() { *m = AudioPayload{} } +func (m *AudioPayload) String() string { return proto.CompactTextString(m) } +func (*AudioPayload) ProtoMessage() {} +func (*AudioPayload) Descriptor() ([]byte, []int) { + return fileDescriptor_dataset_800ce425cbaef310, []int{15} +} +func (m *AudioPayload) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_AudioPayload.Unmarshal(m, b) +} +func (m *AudioPayload) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_AudioPayload.Marshal(b, m, deterministic) +} +func (dst *AudioPayload) XXX_Merge(src proto.Message) { + xxx_messageInfo_AudioPayload.Merge(dst, src) +} +func (m *AudioPayload) XXX_Size() int { + return xxx_messageInfo_AudioPayload.Size(m) +} +func (m *AudioPayload) XXX_DiscardUnknown() { + xxx_messageInfo_AudioPayload.DiscardUnknown(m) +} + +var xxx_messageInfo_AudioPayload proto.InternalMessageInfo + +func (m *AudioPayload) GetAudioUri() string { + if m != nil { + return m.AudioUri + } + return "" +} + +func (m *AudioPayload) GetSampleRateHertz() int32 { + if m != nil { + return m.SampleRateHertz + } + return 0 +} + +func init() { + proto.RegisterType((*Dataset)(nil), "google.cloud.datalabeling.v1beta1.Dataset") + proto.RegisterType((*InputConfig)(nil), "google.cloud.datalabeling.v1beta1.InputConfig") + proto.RegisterType((*GcsSource)(nil), "google.cloud.datalabeling.v1beta1.GcsSource") + proto.RegisterType((*OutputConfig)(nil), "google.cloud.datalabeling.v1beta1.OutputConfig") + proto.RegisterType((*GcsDestination)(nil), "google.cloud.datalabeling.v1beta1.GcsDestination") + proto.RegisterType((*GcsFolderDestination)(nil), "google.cloud.datalabeling.v1beta1.GcsFolderDestination") + proto.RegisterType((*DataItem)(nil), "google.cloud.datalabeling.v1beta1.DataItem") + proto.RegisterType((*AnnotatedDataset)(nil), "google.cloud.datalabeling.v1beta1.AnnotatedDataset") + proto.RegisterType((*AnnotatedDatasetMetadata)(nil), "google.cloud.datalabeling.v1beta1.AnnotatedDatasetMetadata") + proto.RegisterType((*LabelStats)(nil), "google.cloud.datalabeling.v1beta1.LabelStats") + proto.RegisterMapType((map[string]int64)(nil), "google.cloud.datalabeling.v1beta1.LabelStats.ExampleCountEntry") + proto.RegisterType((*Example)(nil), "google.cloud.datalabeling.v1beta1.Example") + proto.RegisterType((*ImagePayload)(nil), "google.cloud.datalabeling.v1beta1.ImagePayload") + proto.RegisterType((*TextPayload)(nil), "google.cloud.datalabeling.v1beta1.TextPayload") + proto.RegisterType((*VideoThumbnail)(nil), "google.cloud.datalabeling.v1beta1.VideoThumbnail") + proto.RegisterType((*VideoPayload)(nil), "google.cloud.datalabeling.v1beta1.VideoPayload") + proto.RegisterType((*AudioPayload)(nil), "google.cloud.datalabeling.v1beta1.AudioPayload") + proto.RegisterEnum("google.cloud.datalabeling.v1beta1.DataType", DataType_name, DataType_value) +} + +func init() { + proto.RegisterFile("google/cloud/datalabeling/v1beta1/dataset.proto", fileDescriptor_dataset_800ce425cbaef310) +} + +var fileDescriptor_dataset_800ce425cbaef310 = []byte{ + // 1504 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xbc, 0x58, 0xef, 0x4e, 0x1b, 0x47, + 0x10, 0xe7, 0x30, 0x06, 0x7b, 0xce, 0x80, 0xd9, 0x42, 0x63, 0x20, 0x69, 0x89, 0xab, 0xaa, 0x51, + 0xda, 0x98, 0x42, 0xd4, 0x36, 0x4a, 0x5a, 0x45, 0x80, 0x9d, 0x80, 0x14, 0x02, 0x3a, 0x4c, 0xd2, + 0x46, 0x91, 0xae, 0xeb, 0xbb, 0xe5, 0xb8, 0xe4, 0xfe, 0xe5, 0x6e, 0xcf, 0xc2, 0xad, 0xd4, 0x0f, + 0xfd, 0xd0, 0x67, 0xe8, 0x63, 0x54, 0xea, 0x33, 0xf4, 0x15, 0xfa, 0x14, 0xf9, 0xd8, 0x07, 0xa8, + 0xf6, 0x8f, 0xef, 0x0f, 0xd8, 0xe1, 0x90, 0x9a, 0x7e, 0xbb, 0xfd, 0xcd, 0xce, 0x6f, 0x66, 0x76, + 0xd6, 0x33, 0xb3, 0x86, 0x75, 0xcb, 0xf7, 0x2d, 0x87, 0xac, 0x1b, 0x8e, 0x1f, 0x9b, 0xeb, 0x26, + 0xa6, 0xd8, 0xc1, 0x3d, 0xe2, 0xd8, 0x9e, 0xb5, 0xde, 0xdf, 0xe8, 0x11, 0x8a, 0x37, 0x38, 0x18, + 0x11, 0xda, 0x0a, 0x42, 0x9f, 0xfa, 0xe8, 0xa6, 0x50, 0x68, 0x71, 0x85, 0x56, 0x56, 0xa1, 0x25, + 0x15, 0x56, 0x36, 0x2f, 0xe7, 0xc4, 0x9e, 0xe7, 0x53, 0x4c, 0x6d, 0xdf, 0x13, 0xb4, 0x2b, 0x0f, + 0x2f, 0xd7, 0x39, 0x8d, 0x5d, 0xec, 0xe9, 0xa9, 0xa6, 0x6e, 0xf8, 0xde, 0x89, 0x6d, 0x49, 0x82, + 0x8f, 0x24, 0x01, 0x5f, 0xf5, 0xe2, 0x93, 0x75, 0x33, 0x0e, 0xb3, 0x06, 0xae, 0x9f, 0x97, 0x47, + 0x34, 0x8c, 0x0d, 0x19, 0xd5, 0xca, 0xc7, 0xe7, 0xa5, 0xd4, 0x76, 0x49, 0x44, 0xb1, 0x1b, 0x9c, + 0x53, 0xc7, 0x81, 0x9d, 0x71, 0x3e, 0x12, 0xd2, 0xe6, 0x3f, 0x0a, 0xcc, 0xb4, 0xc5, 0x31, 0x21, + 0x04, 0x53, 0x1e, 0x76, 0x49, 0x43, 0x59, 0x53, 0x6e, 0x55, 0x35, 0xfe, 0x8d, 0x6e, 0x42, 0xcd, + 0xb4, 0xa3, 0xc0, 0xc1, 0x03, 0x9d, 0xcb, 0x26, 0xb9, 0x4c, 0x95, 0xd8, 0x53, 0xb6, 0x65, 0x0d, + 0x54, 0x93, 0x44, 0x46, 0x68, 0x07, 0x8c, 0xb8, 0x51, 0x92, 0x3b, 0x52, 0x08, 0x3d, 0x00, 0xd5, + 0x08, 0x09, 0xa6, 0x44, 0x67, 0xce, 0x35, 0xa6, 0xd6, 0x94, 0x5b, 0xea, 0xe6, 0x4a, 0x4b, 0xe6, + 0x63, 0xe8, 0x79, 0xab, 0x3b, 0xf4, 0x5c, 0x03, 0xb1, 0x9d, 0x01, 0xe8, 0x08, 0x66, 0x6d, 0x2f, + 0x88, 0xa9, 0x3c, 0xb4, 0xa8, 0x51, 0x5e, 0x2b, 0xdd, 0x52, 0x37, 0x5b, 0xad, 0x4b, 0xd3, 0xd9, + 0xda, 0x63, 0x7a, 0x3b, 0x5c, 0x4d, 0xab, 0xd9, 0xe9, 0x22, 0x6a, 0xfe, 0xa1, 0x80, 0x9a, 0x91, + 0xa2, 0x7d, 0x00, 0xcb, 0x88, 0xf4, 0xc8, 0x8f, 0x43, 0x43, 0x04, 0xa9, 0x6e, 0x7e, 0x51, 0xc0, + 0xc2, 0x63, 0x23, 0x3a, 0xe2, 0x3a, 0xbb, 0x13, 0x5a, 0xd5, 0x1a, 0x2e, 0xd0, 0x2e, 0x54, 0xd9, + 0x76, 0x9d, 0x0e, 0x02, 0x71, 0x9c, 0x73, 0x9b, 0x9f, 0x17, 0x60, 0x63, 0x89, 0xe8, 0x0e, 0x02, + 0xa2, 0x55, 0x4c, 0xf9, 0xb5, 0x5d, 0x81, 0x69, 0xe1, 0x54, 0xb3, 0x03, 0xd5, 0xc4, 0x1a, 0x5a, + 0x85, 0xaa, 0x38, 0x94, 0x38, 0xb4, 0x65, 0xbe, 0x2a, 0x1c, 0x38, 0x0e, 0x6d, 0x26, 0x74, 0x6d, + 0x97, 0x08, 0xeb, 0x22, 0x61, 0x15, 0x06, 0x30, 0xc2, 0xe6, 0x5b, 0x05, 0x6a, 0x07, 0x31, 0x4d, + 0x43, 0x7f, 0x09, 0xf3, 0x2c, 0x74, 0x93, 0x44, 0xd4, 0xf6, 0xf8, 0xdd, 0xe0, 0x84, 0xea, 0xe6, + 0x46, 0xb1, 0xf8, 0xdb, 0xa9, 0xe2, 0xee, 0x84, 0x36, 0x67, 0xe5, 0x10, 0xe4, 0xc3, 0x87, 0x8c, + 0xfd, 0xc4, 0x77, 0x4c, 0x12, 0xe6, 0x8c, 0x88, 0x43, 0xfe, 0xa6, 0x98, 0x91, 0x47, 0x5c, 0x3f, + 0x6f, 0x6a, 0xd1, 0x1a, 0x81, 0x6f, 0xcf, 0xf2, 0xdb, 0x38, 0x5c, 0x36, 0x9f, 0xc0, 0x5c, 0xde, + 0x47, 0x74, 0x03, 0xc0, 0xe7, 0xf1, 0x67, 0xce, 0xae, 0x2a, 0x90, 0x4b, 0x0f, 0x6f, 0x1b, 0x16, + 0x47, 0x39, 0x83, 0x6e, 0xc3, 0x82, 0xe4, 0x94, 0x81, 0xa6, 0xd4, 0xf3, 0x42, 0x20, 0x74, 0x8e, + 0x43, 0xbb, 0xf9, 0x5b, 0x09, 0x2a, 0x2c, 0xd1, 0x7b, 0x94, 0xb8, 0xe8, 0x19, 0xcc, 0xda, 0x2e, + 0xb6, 0x88, 0x1e, 0xe0, 0x81, 0xe3, 0x63, 0x53, 0x9e, 0xca, 0x7a, 0x91, 0xcb, 0xcd, 0xf4, 0x0e, + 0x85, 0xda, 0xee, 0x84, 0x56, 0xb3, 0x33, 0x6b, 0x74, 0x04, 0x35, 0x4a, 0xce, 0x68, 0x42, 0x5b, + 0xe2, 0xb4, 0x45, 0x7e, 0x33, 0x5d, 0x72, 0x46, 0x53, 0x56, 0x95, 0xa6, 0x4b, 0xe6, 0x6c, 0xdf, + 0x36, 0x89, 0x9f, 0xb0, 0x4e, 0x15, 0x76, 0xf6, 0x19, 0xd3, 0xcb, 0x38, 0xdb, 0xcf, 0xac, 0x19, + 0x2f, 0x8e, 0x4d, 0x3b, 0xe5, 0x2d, 0x17, 0xe6, 0xdd, 0x62, 0x7a, 0x19, 0x5e, 0x9c, 0x59, 0x8f, + 0xaa, 0x67, 0xdb, 0x55, 0x98, 0x91, 0x56, 0x9a, 0x6f, 0xa7, 0xa0, 0xbe, 0x25, 0x0a, 0x22, 0x31, + 0xff, 0xdb, 0x1a, 0x58, 0xbd, 0x58, 0x03, 0x7f, 0x84, 0x85, 0x4c, 0x03, 0x90, 0x85, 0xa6, 0xc4, + 0x4b, 0xc3, 0xdd, 0x22, 0x81, 0x26, 0xba, 0xa2, 0x02, 0x68, 0x75, 0x7c, 0x0e, 0x41, 0x2f, 0x60, + 0x3e, 0x63, 0x81, 0xdf, 0xdf, 0x0a, 0xe7, 0xdf, 0xb8, 0x12, 0x3f, 0x2f, 0x40, 0x73, 0x38, 0xb7, + 0x46, 0x9f, 0xc0, 0x2c, 0x39, 0xc3, 0x6e, 0xe0, 0x10, 0xdd, 0xf0, 0x63, 0x8f, 0xf2, 0xd4, 0x97, + 0xb4, 0x9a, 0x04, 0x77, 0x18, 0x86, 0xbe, 0x86, 0x6b, 0x86, 0xcf, 0x96, 0x94, 0x98, 0x7a, 0x7e, + 0x7b, 0x99, 0x6f, 0x5f, 0x4a, 0xc4, 0x9d, 0xac, 0xde, 0x53, 0x50, 0xb9, 0x3f, 0x7a, 0x44, 0x31, + 0x8d, 0x1a, 0xd3, 0x3c, 0xfb, 0x77, 0x0a, 0x38, 0xfd, 0x84, 0x01, 0x47, 0x4c, 0x49, 0x03, 0x27, + 0xf9, 0x3e, 0xdf, 0x6e, 0x66, 0xae, 0xd4, 0x6e, 0x9e, 0x43, 0xc5, 0x25, 0x14, 0x33, 0x7b, 0x0d, + 0xe0, 0x9a, 0x0f, 0x8a, 0x1f, 0x5f, 0x72, 0x8f, 0xf6, 0x25, 0x85, 0x96, 0x90, 0x35, 0x7f, 0x07, + 0x68, 0x8c, 0xdb, 0x86, 0x02, 0xb8, 0x36, 0x66, 0x48, 0x90, 0xc5, 0xf8, 0x5e, 0x01, 0x27, 0x76, + 0x19, 0x43, 0x9a, 0x48, 0xd9, 0xf8, 0x96, 0x4e, 0x47, 0xc1, 0xe8, 0x17, 0x58, 0x15, 0x95, 0xc7, + 0x70, 0x70, 0x14, 0xd9, 0x27, 0xb6, 0x91, 0xb3, 0x2a, 0xea, 0xd0, 0xb7, 0x45, 0xeb, 0xd0, 0x4e, + 0x8e, 0x44, 0x98, 0xd8, 0x9d, 0xd0, 0x96, 0xed, 0x71, 0x42, 0x64, 0xc3, 0x62, 0xcf, 0x8f, 0x3d, + 0xd3, 0xf6, 0x2c, 0x3d, 0xf0, 0x9d, 0xc1, 0xd0, 0xb0, 0xa8, 0x54, 0x5f, 0x15, 0x30, 0xbc, 0x2d, + 0xd5, 0x0f, 0x7d, 0x67, 0x90, 0x58, 0x44, 0xbd, 0x0b, 0x28, 0xeb, 0x70, 0xcc, 0x82, 0x63, 0x7b, + 0x64, 0x68, 0x65, 0xaa, 0x70, 0x87, 0x3b, 0x94, 0x9a, 0x89, 0x85, 0xb9, 0x20, 0x87, 0xa0, 0x53, + 0xf8, 0x20, 0x22, 0x96, 0x4b, 0xbc, 0x7c, 0xda, 0xca, 0x85, 0xe3, 0x38, 0xca, 0x68, 0xa7, 0x71, + 0x44, 0x17, 0x50, 0x96, 0x32, 0x51, 0x7f, 0x47, 0xa7, 0x6c, 0xba, 0x70, 0xca, 0x78, 0x35, 0x1e, + 0x97, 0xb2, 0xfe, 0x38, 0x21, 0x0a, 0xe1, 0x9a, 0xdf, 0x7b, 0x45, 0x0c, 0xaa, 0x9b, 0x84, 0x12, + 0x23, 0x6b, 0x7b, 0xa6, 0xf0, 0x25, 0x3d, 0xe0, 0x0c, 0xed, 0x21, 0x41, 0x62, 0x77, 0xc9, 0x1f, + 0x25, 0x60, 0xf3, 0x83, 0xb4, 0x49, 0x43, 0x6c, 0xbc, 0x66, 0xb7, 0x45, 0x9a, 0xac, 0x14, 0x9e, + 0x1f, 0x84, 0xc9, 0xae, 0xd4, 0x4f, 0x2c, 0x2e, 0xfa, 0x23, 0x70, 0xd6, 0x39, 0x49, 0x9f, 0x78, + 0xc3, 0x71, 0x93, 0x97, 0xf2, 0x62, 0x9d, 0xb3, 0xc3, 0xd4, 0x12, 0x76, 0x95, 0xa4, 0x4b, 0xf4, + 0x33, 0xac, 0xf0, 0x76, 0x3c, 0x3a, 0x71, 0xc5, 0xcb, 0x0c, 0x6b, 0xce, 0x63, 0xf2, 0xd6, 0xa0, + 0x63, 0x64, 0xe8, 0x57, 0x05, 0x6e, 0x70, 0xeb, 0xc4, 0xa3, 0x36, 0x1d, 0xe8, 0xe4, 0x8c, 0x1d, + 0x65, 0xd6, 0x01, 0x95, 0x3b, 0xf0, 0x5d, 0x41, 0x07, 0x3a, 0x9c, 0xa6, 0x93, 0xb0, 0x24, 0x2e, + 0xf0, 0x18, 0x47, 0x4b, 0xb7, 0x57, 0x61, 0x39, 0x53, 0xda, 0x42, 0xf2, 0x26, 0x26, 0xd1, 0xf0, + 0x8c, 0x9b, 0x7f, 0x2a, 0x00, 0x69, 0x2d, 0x47, 0xe6, 0xf9, 0x66, 0xa3, 0xf0, 0x89, 0xff, 0xe1, + 0x95, 0x3a, 0x42, 0x2b, 0xdb, 0x62, 0x3a, 0x1e, 0x0d, 0x07, 0xf9, 0x6e, 0xb5, 0xf2, 0x10, 0x16, + 0x2e, 0x6c, 0x41, 0x75, 0x28, 0xbd, 0x26, 0x03, 0xd9, 0xfd, 0xd9, 0x27, 0x5a, 0x84, 0x72, 0x1f, + 0x3b, 0xb1, 0xe8, 0xfa, 0x25, 0x4d, 0x2c, 0xee, 0x4f, 0xde, 0x53, 0x9a, 0x7f, 0x97, 0x60, 0x46, + 0x32, 0xfc, 0x6f, 0x73, 0xdc, 0xf4, 0x7b, 0x99, 0xe3, 0x66, 0xde, 0xd3, 0x1c, 0x57, 0x79, 0x6f, + 0x73, 0x1c, 0x3a, 0x00, 0x35, 0xf3, 0x98, 0x95, 0x6f, 0xc2, 0x3b, 0x57, 0x1a, 0x74, 0xb4, 0x2c, + 0x43, 0x76, 0x30, 0x7c, 0x03, 0xb5, 0x6c, 0x52, 0xf2, 0x4f, 0x02, 0x25, 0xff, 0x24, 0x40, 0x9f, + 0xc1, 0xbc, 0xc8, 0x3c, 0x3d, 0x8d, 0xdd, 0x9e, 0x87, 0x6d, 0x87, 0xe7, 0xbe, 0xa6, 0xcd, 0x71, + 0xb8, 0x3b, 0x44, 0xf9, 0x93, 0x8d, 0x6f, 0x64, 0x6f, 0x83, 0x92, 0x7c, 0xb2, 0x31, 0x80, 0x3d, + 0x0a, 0xbe, 0x04, 0x35, 0x93, 0x30, 0x36, 0x71, 0x8a, 0x7a, 0xe1, 0x7b, 0x94, 0xf0, 0x1f, 0x00, + 0x9f, 0x27, 0xf9, 0x4f, 0x5c, 0x40, 0xcd, 0x57, 0x30, 0xc7, 0x93, 0x91, 0x1a, 0xb8, 0x0e, 0xd5, + 0xd4, 0x07, 0x85, 0xfb, 0x90, 0x02, 0xe8, 0x3e, 0xa8, 0x6c, 0x1a, 0xd2, 0xfd, 0x93, 0x93, 0x88, + 0x50, 0x79, 0x3f, 0x97, 0x2f, 0x0c, 0x45, 0x6d, 0xf9, 0xdf, 0x83, 0x06, 0x6c, 0xf7, 0x01, 0xdf, + 0xdc, 0xfc, 0x4b, 0x81, 0x5a, 0x36, 0xf3, 0xef, 0x3e, 0x91, 0x55, 0xa8, 0x8a, 0xeb, 0xc5, 0x02, + 0x95, 0x2f, 0x28, 0x0e, 0xb0, 0xe7, 0xd5, 0x4b, 0xa8, 0x0b, 0x61, 0xe2, 0x59, 0xd4, 0x28, 0xf1, + 0xe4, 0x6d, 0x14, 0xbd, 0x7e, 0x49, 0xc4, 0xda, 0x7c, 0x3f, 0xb7, 0x8e, 0xd8, 0xdb, 0xee, 0x24, + 0xc4, 0x2e, 0xd1, 0x43, 0x4c, 0xc5, 0xff, 0x0c, 0x93, 0x5a, 0x95, 0x23, 0x1a, 0xa6, 0xa4, 0xf9, + 0x1c, 0x6a, 0xd9, 0x8b, 0xc6, 0x3c, 0x15, 0x17, 0x36, 0xf3, 0x8a, 0xe6, 0x00, 0xf3, 0xf4, 0x36, + 0x2c, 0x44, 0xa2, 0x08, 0x31, 0x32, 0xfd, 0x94, 0x84, 0xf4, 0x27, 0x9e, 0xb7, 0xb2, 0x36, 0x2f, + 0x04, 0x8c, 0x73, 0x97, 0xc1, 0xb7, 0x0f, 0xc5, 0x93, 0x8e, 0x87, 0xbf, 0x0c, 0x4b, 0xed, 0xad, + 0xee, 0x96, 0xde, 0xfd, 0xe1, 0xb0, 0xa3, 0x1f, 0x3f, 0x3d, 0x3a, 0xec, 0xec, 0xec, 0x3d, 0xda, + 0xeb, 0xb4, 0xeb, 0x13, 0xa8, 0x0a, 0xe5, 0xbd, 0xfd, 0xad, 0xc7, 0x9d, 0xba, 0xc2, 0x3e, 0x9f, + 0xed, 0xb5, 0x3b, 0x07, 0xf5, 0x49, 0x54, 0x81, 0xa9, 0x6e, 0xe7, 0xfb, 0x6e, 0x7d, 0x8a, 0x81, + 0x5b, 0xc7, 0xed, 0xbd, 0x83, 0x7a, 0x79, 0xfb, 0x0c, 0x3e, 0x35, 0x7c, 0xf7, 0xf2, 0x23, 0x39, + 0x54, 0x5e, 0xec, 0xcb, 0x4d, 0x96, 0xef, 0x60, 0xcf, 0x6a, 0xf9, 0xa1, 0xb5, 0x6e, 0x11, 0x8f, + 0x67, 0x54, 0xfe, 0x47, 0x86, 0x03, 0x3b, 0x7a, 0xc7, 0xff, 0x53, 0x0f, 0xb2, 0x60, 0x6f, 0x9a, + 0x6b, 0xde, 0xfd, 0x37, 0x00, 0x00, 0xff, 0xff, 0x45, 0x58, 0x65, 0x41, 0x60, 0x13, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/cloud/datalabeling/v1beta1/human_annotation_config.pb.go b/vendor/google.golang.org/genproto/googleapis/cloud/datalabeling/v1beta1/human_annotation_config.pb.go new file mode 100644 index 0000000..b0dddcc --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/cloud/datalabeling/v1beta1/human_annotation_config.pb.go @@ -0,0 +1,858 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/cloud/datalabeling/v1beta1/human_annotation_config.proto + +package datalabeling // import "google.golang.org/genproto/googleapis/cloud/datalabeling/v1beta1" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import duration "github.com/golang/protobuf/ptypes/duration" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +type StringAggregationType int32 + +const ( + StringAggregationType_STRING_AGGREGATION_TYPE_UNSPECIFIED StringAggregationType = 0 + // Majority vote to aggregate answers. + StringAggregationType_MAJORITY_VOTE StringAggregationType = 1 + // Unanimous answers will be adopted. + StringAggregationType_UNANIMOUS_VOTE StringAggregationType = 2 + // Preserve all answers by crowd compute. + StringAggregationType_NO_AGGREGATION StringAggregationType = 3 +) + +var StringAggregationType_name = map[int32]string{ + 0: "STRING_AGGREGATION_TYPE_UNSPECIFIED", + 1: "MAJORITY_VOTE", + 2: "UNANIMOUS_VOTE", + 3: "NO_AGGREGATION", +} +var StringAggregationType_value = map[string]int32{ + "STRING_AGGREGATION_TYPE_UNSPECIFIED": 0, + "MAJORITY_VOTE": 1, + "UNANIMOUS_VOTE": 2, + "NO_AGGREGATION": 3, +} + +func (x StringAggregationType) String() string { + return proto.EnumName(StringAggregationType_name, int32(x)) +} +func (StringAggregationType) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_human_annotation_config_28dd7106b95baa9e, []int{0} +} + +// Configuration for how human labeling task should be done. +type HumanAnnotationConfig struct { + // Required except for LabelAudio case. Instruction resource name. + Instruction string `protobuf:"bytes,1,opt,name=instruction,proto3" json:"instruction,omitempty"` + // Required. A human-readable name for AnnotatedDataset defined by + // users. Maximum of 64 characters + // . + AnnotatedDatasetDisplayName string `protobuf:"bytes,2,opt,name=annotated_dataset_display_name,json=annotatedDatasetDisplayName,proto3" json:"annotated_dataset_display_name,omitempty"` + // Optional. A human-readable description for AnnotatedDataset. + // The description can be up to 10000 characters long. + AnnotatedDatasetDescription string `protobuf:"bytes,3,opt,name=annotated_dataset_description,json=annotatedDatasetDescription,proto3" json:"annotated_dataset_description,omitempty"` + // Optional. A human-readable label used to logically group labeling tasks. + // This string must match the regular expression `[a-zA-Z\\d_-]{0,128}`. + LabelGroup string `protobuf:"bytes,4,opt,name=label_group,json=labelGroup,proto3" json:"label_group,omitempty"` + // Optional. The Language of this question, as a + // [BCP-47](https://www.rfc-editor.org/rfc/bcp/bcp47.txt). + // Default value is en-US. + // Only need to set this when task is language related. For example, French + // text classification or Chinese audio transcription. + LanguageCode string `protobuf:"bytes,5,opt,name=language_code,json=languageCode,proto3" json:"language_code,omitempty"` + // Optional. Replication of questions. Each question will be sent to up to + // this number of contributors to label. Aggregated answers will be returned. + // Default is set to 1. + // For image related labeling, valid values are 1, 3, 5. + ReplicaCount int32 `protobuf:"varint,6,opt,name=replica_count,json=replicaCount,proto3" json:"replica_count,omitempty"` + // Optional. Maximum duration for contributors to answer a question. Default + // is 1800 seconds. + QuestionDuration *duration.Duration `protobuf:"bytes,7,opt,name=question_duration,json=questionDuration,proto3" json:"question_duration,omitempty"` + // Optional. If you want your own labeling contributors to manage and work on + // this labeling request, you can set these contributors here. We will give + // them access to the question types in crowdcompute. Note that these + // emails must be registered in crowdcompute worker UI: + // https://crowd-compute.appspot.com/ + ContributorEmails []string `protobuf:"bytes,9,rep,name=contributor_emails,json=contributorEmails,proto3" json:"contributor_emails,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *HumanAnnotationConfig) Reset() { *m = HumanAnnotationConfig{} } +func (m *HumanAnnotationConfig) String() string { return proto.CompactTextString(m) } +func (*HumanAnnotationConfig) ProtoMessage() {} +func (*HumanAnnotationConfig) Descriptor() ([]byte, []int) { + return fileDescriptor_human_annotation_config_28dd7106b95baa9e, []int{0} +} +func (m *HumanAnnotationConfig) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_HumanAnnotationConfig.Unmarshal(m, b) +} +func (m *HumanAnnotationConfig) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_HumanAnnotationConfig.Marshal(b, m, deterministic) +} +func (dst *HumanAnnotationConfig) XXX_Merge(src proto.Message) { + xxx_messageInfo_HumanAnnotationConfig.Merge(dst, src) +} +func (m *HumanAnnotationConfig) XXX_Size() int { + return xxx_messageInfo_HumanAnnotationConfig.Size(m) +} +func (m *HumanAnnotationConfig) XXX_DiscardUnknown() { + xxx_messageInfo_HumanAnnotationConfig.DiscardUnknown(m) +} + +var xxx_messageInfo_HumanAnnotationConfig proto.InternalMessageInfo + +func (m *HumanAnnotationConfig) GetInstruction() string { + if m != nil { + return m.Instruction + } + return "" +} + +func (m *HumanAnnotationConfig) GetAnnotatedDatasetDisplayName() string { + if m != nil { + return m.AnnotatedDatasetDisplayName + } + return "" +} + +func (m *HumanAnnotationConfig) GetAnnotatedDatasetDescription() string { + if m != nil { + return m.AnnotatedDatasetDescription + } + return "" +} + +func (m *HumanAnnotationConfig) GetLabelGroup() string { + if m != nil { + return m.LabelGroup + } + return "" +} + +func (m *HumanAnnotationConfig) GetLanguageCode() string { + if m != nil { + return m.LanguageCode + } + return "" +} + +func (m *HumanAnnotationConfig) GetReplicaCount() int32 { + if m != nil { + return m.ReplicaCount + } + return 0 +} + +func (m *HumanAnnotationConfig) GetQuestionDuration() *duration.Duration { + if m != nil { + return m.QuestionDuration + } + return nil +} + +func (m *HumanAnnotationConfig) GetContributorEmails() []string { + if m != nil { + return m.ContributorEmails + } + return nil +} + +// Config for image classification human labeling task. +type ImageClassificationConfig struct { + // Required. Annotation spec set resource name. + AnnotationSpecSet string `protobuf:"bytes,1,opt,name=annotation_spec_set,json=annotationSpecSet,proto3" json:"annotation_spec_set,omitempty"` + // Optional. If allow_multi_label is true, contributors are able to choose + // multiple labels for one image. + AllowMultiLabel bool `protobuf:"varint,2,opt,name=allow_multi_label,json=allowMultiLabel,proto3" json:"allow_multi_label,omitempty"` + // Optional. The type of how to aggregate answers. + AnswerAggregationType StringAggregationType `protobuf:"varint,3,opt,name=answer_aggregation_type,json=answerAggregationType,proto3,enum=google.cloud.datalabeling.v1beta1.StringAggregationType" json:"answer_aggregation_type,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ImageClassificationConfig) Reset() { *m = ImageClassificationConfig{} } +func (m *ImageClassificationConfig) String() string { return proto.CompactTextString(m) } +func (*ImageClassificationConfig) ProtoMessage() {} +func (*ImageClassificationConfig) Descriptor() ([]byte, []int) { + return fileDescriptor_human_annotation_config_28dd7106b95baa9e, []int{1} +} +func (m *ImageClassificationConfig) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ImageClassificationConfig.Unmarshal(m, b) +} +func (m *ImageClassificationConfig) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ImageClassificationConfig.Marshal(b, m, deterministic) +} +func (dst *ImageClassificationConfig) XXX_Merge(src proto.Message) { + xxx_messageInfo_ImageClassificationConfig.Merge(dst, src) +} +func (m *ImageClassificationConfig) XXX_Size() int { + return xxx_messageInfo_ImageClassificationConfig.Size(m) +} +func (m *ImageClassificationConfig) XXX_DiscardUnknown() { + xxx_messageInfo_ImageClassificationConfig.DiscardUnknown(m) +} + +var xxx_messageInfo_ImageClassificationConfig proto.InternalMessageInfo + +func (m *ImageClassificationConfig) GetAnnotationSpecSet() string { + if m != nil { + return m.AnnotationSpecSet + } + return "" +} + +func (m *ImageClassificationConfig) GetAllowMultiLabel() bool { + if m != nil { + return m.AllowMultiLabel + } + return false +} + +func (m *ImageClassificationConfig) GetAnswerAggregationType() StringAggregationType { + if m != nil { + return m.AnswerAggregationType + } + return StringAggregationType_STRING_AGGREGATION_TYPE_UNSPECIFIED +} + +// Config for image bounding poly (and bounding box) human labeling task. +type BoundingPolyConfig struct { + // Required. Annotation spec set resource name. + AnnotationSpecSet string `protobuf:"bytes,1,opt,name=annotation_spec_set,json=annotationSpecSet,proto3" json:"annotation_spec_set,omitempty"` + // Optional. Instruction message showed on contributors UI. + InstructionMessage string `protobuf:"bytes,2,opt,name=instruction_message,json=instructionMessage,proto3" json:"instruction_message,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *BoundingPolyConfig) Reset() { *m = BoundingPolyConfig{} } +func (m *BoundingPolyConfig) String() string { return proto.CompactTextString(m) } +func (*BoundingPolyConfig) ProtoMessage() {} +func (*BoundingPolyConfig) Descriptor() ([]byte, []int) { + return fileDescriptor_human_annotation_config_28dd7106b95baa9e, []int{2} +} +func (m *BoundingPolyConfig) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_BoundingPolyConfig.Unmarshal(m, b) +} +func (m *BoundingPolyConfig) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_BoundingPolyConfig.Marshal(b, m, deterministic) +} +func (dst *BoundingPolyConfig) XXX_Merge(src proto.Message) { + xxx_messageInfo_BoundingPolyConfig.Merge(dst, src) +} +func (m *BoundingPolyConfig) XXX_Size() int { + return xxx_messageInfo_BoundingPolyConfig.Size(m) +} +func (m *BoundingPolyConfig) XXX_DiscardUnknown() { + xxx_messageInfo_BoundingPolyConfig.DiscardUnknown(m) +} + +var xxx_messageInfo_BoundingPolyConfig proto.InternalMessageInfo + +func (m *BoundingPolyConfig) GetAnnotationSpecSet() string { + if m != nil { + return m.AnnotationSpecSet + } + return "" +} + +func (m *BoundingPolyConfig) GetInstructionMessage() string { + if m != nil { + return m.InstructionMessage + } + return "" +} + +// Config for image polyline human labeling task. +type PolylineConfig struct { + // Required. Annotation spec set resource name. + AnnotationSpecSet string `protobuf:"bytes,1,opt,name=annotation_spec_set,json=annotationSpecSet,proto3" json:"annotation_spec_set,omitempty"` + // Optional. Instruction message showed on contributors UI. + InstructionMessage string `protobuf:"bytes,2,opt,name=instruction_message,json=instructionMessage,proto3" json:"instruction_message,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *PolylineConfig) Reset() { *m = PolylineConfig{} } +func (m *PolylineConfig) String() string { return proto.CompactTextString(m) } +func (*PolylineConfig) ProtoMessage() {} +func (*PolylineConfig) Descriptor() ([]byte, []int) { + return fileDescriptor_human_annotation_config_28dd7106b95baa9e, []int{3} +} +func (m *PolylineConfig) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_PolylineConfig.Unmarshal(m, b) +} +func (m *PolylineConfig) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_PolylineConfig.Marshal(b, m, deterministic) +} +func (dst *PolylineConfig) XXX_Merge(src proto.Message) { + xxx_messageInfo_PolylineConfig.Merge(dst, src) +} +func (m *PolylineConfig) XXX_Size() int { + return xxx_messageInfo_PolylineConfig.Size(m) +} +func (m *PolylineConfig) XXX_DiscardUnknown() { + xxx_messageInfo_PolylineConfig.DiscardUnknown(m) +} + +var xxx_messageInfo_PolylineConfig proto.InternalMessageInfo + +func (m *PolylineConfig) GetAnnotationSpecSet() string { + if m != nil { + return m.AnnotationSpecSet + } + return "" +} + +func (m *PolylineConfig) GetInstructionMessage() string { + if m != nil { + return m.InstructionMessage + } + return "" +} + +// Config for image segmentation +type SegmentationConfig struct { + // Required. Annotation spec set resource name. format: + // projects/{project_id}/annotationSpecSets/{annotation_spec_set_id} + AnnotationSpecSet string `protobuf:"bytes,1,opt,name=annotation_spec_set,json=annotationSpecSet,proto3" json:"annotation_spec_set,omitempty"` + // Instruction message showed on labelers UI. + InstructionMessage string `protobuf:"bytes,2,opt,name=instruction_message,json=instructionMessage,proto3" json:"instruction_message,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *SegmentationConfig) Reset() { *m = SegmentationConfig{} } +func (m *SegmentationConfig) String() string { return proto.CompactTextString(m) } +func (*SegmentationConfig) ProtoMessage() {} +func (*SegmentationConfig) Descriptor() ([]byte, []int) { + return fileDescriptor_human_annotation_config_28dd7106b95baa9e, []int{4} +} +func (m *SegmentationConfig) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_SegmentationConfig.Unmarshal(m, b) +} +func (m *SegmentationConfig) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_SegmentationConfig.Marshal(b, m, deterministic) +} +func (dst *SegmentationConfig) XXX_Merge(src proto.Message) { + xxx_messageInfo_SegmentationConfig.Merge(dst, src) +} +func (m *SegmentationConfig) XXX_Size() int { + return xxx_messageInfo_SegmentationConfig.Size(m) +} +func (m *SegmentationConfig) XXX_DiscardUnknown() { + xxx_messageInfo_SegmentationConfig.DiscardUnknown(m) +} + +var xxx_messageInfo_SegmentationConfig proto.InternalMessageInfo + +func (m *SegmentationConfig) GetAnnotationSpecSet() string { + if m != nil { + return m.AnnotationSpecSet + } + return "" +} + +func (m *SegmentationConfig) GetInstructionMessage() string { + if m != nil { + return m.InstructionMessage + } + return "" +} + +// Config for video classification human labeling task. +// Currently two types of video classification are supported: +// 1. Assign labels on the entire video. +// 2. Split the video into multiple video clips based on camera shot, and +// assign labels on each video clip. +type VideoClassificationConfig struct { + // Required. The list of annotation spec set configs. + // Since watching a video clip takes much longer time than an image, we + // support label with multiple AnnotationSpecSet at the same time. Labels + // in each AnnotationSpecSet will be shown in a group to contributors. + // Contributors can select one or more (depending on whether to allow multi + // label) from each group. + AnnotationSpecSetConfigs []*VideoClassificationConfig_AnnotationSpecSetConfig `protobuf:"bytes,1,rep,name=annotation_spec_set_configs,json=annotationSpecSetConfigs,proto3" json:"annotation_spec_set_configs,omitempty"` + // Optional. Option to apply shot detection on the video. + ApplyShotDetection bool `protobuf:"varint,2,opt,name=apply_shot_detection,json=applyShotDetection,proto3" json:"apply_shot_detection,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *VideoClassificationConfig) Reset() { *m = VideoClassificationConfig{} } +func (m *VideoClassificationConfig) String() string { return proto.CompactTextString(m) } +func (*VideoClassificationConfig) ProtoMessage() {} +func (*VideoClassificationConfig) Descriptor() ([]byte, []int) { + return fileDescriptor_human_annotation_config_28dd7106b95baa9e, []int{5} +} +func (m *VideoClassificationConfig) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_VideoClassificationConfig.Unmarshal(m, b) +} +func (m *VideoClassificationConfig) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_VideoClassificationConfig.Marshal(b, m, deterministic) +} +func (dst *VideoClassificationConfig) XXX_Merge(src proto.Message) { + xxx_messageInfo_VideoClassificationConfig.Merge(dst, src) +} +func (m *VideoClassificationConfig) XXX_Size() int { + return xxx_messageInfo_VideoClassificationConfig.Size(m) +} +func (m *VideoClassificationConfig) XXX_DiscardUnknown() { + xxx_messageInfo_VideoClassificationConfig.DiscardUnknown(m) +} + +var xxx_messageInfo_VideoClassificationConfig proto.InternalMessageInfo + +func (m *VideoClassificationConfig) GetAnnotationSpecSetConfigs() []*VideoClassificationConfig_AnnotationSpecSetConfig { + if m != nil { + return m.AnnotationSpecSetConfigs + } + return nil +} + +func (m *VideoClassificationConfig) GetApplyShotDetection() bool { + if m != nil { + return m.ApplyShotDetection + } + return false +} + +// Annotation spec set with the setting of allowing multi labels or not. +type VideoClassificationConfig_AnnotationSpecSetConfig struct { + // Required. Annotation spec set resource name. + AnnotationSpecSet string `protobuf:"bytes,1,opt,name=annotation_spec_set,json=annotationSpecSet,proto3" json:"annotation_spec_set,omitempty"` + // Optional. If allow_multi_label is true, contributors are able to + // choose multiple labels from one annotation spec set. + AllowMultiLabel bool `protobuf:"varint,2,opt,name=allow_multi_label,json=allowMultiLabel,proto3" json:"allow_multi_label,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *VideoClassificationConfig_AnnotationSpecSetConfig) Reset() { + *m = VideoClassificationConfig_AnnotationSpecSetConfig{} +} +func (m *VideoClassificationConfig_AnnotationSpecSetConfig) String() string { + return proto.CompactTextString(m) +} +func (*VideoClassificationConfig_AnnotationSpecSetConfig) ProtoMessage() {} +func (*VideoClassificationConfig_AnnotationSpecSetConfig) Descriptor() ([]byte, []int) { + return fileDescriptor_human_annotation_config_28dd7106b95baa9e, []int{5, 0} +} +func (m *VideoClassificationConfig_AnnotationSpecSetConfig) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_VideoClassificationConfig_AnnotationSpecSetConfig.Unmarshal(m, b) +} +func (m *VideoClassificationConfig_AnnotationSpecSetConfig) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_VideoClassificationConfig_AnnotationSpecSetConfig.Marshal(b, m, deterministic) +} +func (dst *VideoClassificationConfig_AnnotationSpecSetConfig) XXX_Merge(src proto.Message) { + xxx_messageInfo_VideoClassificationConfig_AnnotationSpecSetConfig.Merge(dst, src) +} +func (m *VideoClassificationConfig_AnnotationSpecSetConfig) XXX_Size() int { + return xxx_messageInfo_VideoClassificationConfig_AnnotationSpecSetConfig.Size(m) +} +func (m *VideoClassificationConfig_AnnotationSpecSetConfig) XXX_DiscardUnknown() { + xxx_messageInfo_VideoClassificationConfig_AnnotationSpecSetConfig.DiscardUnknown(m) +} + +var xxx_messageInfo_VideoClassificationConfig_AnnotationSpecSetConfig proto.InternalMessageInfo + +func (m *VideoClassificationConfig_AnnotationSpecSetConfig) GetAnnotationSpecSet() string { + if m != nil { + return m.AnnotationSpecSet + } + return "" +} + +func (m *VideoClassificationConfig_AnnotationSpecSetConfig) GetAllowMultiLabel() bool { + if m != nil { + return m.AllowMultiLabel + } + return false +} + +// Config for video object detection human labeling task. +// Object detection will be conducted on the images extracted from the video, +// and those objects will be labeled with bounding boxes. +// User need to specify the number of images to be extracted per second as the +// extraction frame rate. +type ObjectDetectionConfig struct { + // Required. Annotation spec set resource name. + AnnotationSpecSet string `protobuf:"bytes,1,opt,name=annotation_spec_set,json=annotationSpecSet,proto3" json:"annotation_spec_set,omitempty"` + // Optional. Instruction message showed on labelers UI. + InstructionMessage string `protobuf:"bytes,2,opt,name=instruction_message,json=instructionMessage,proto3" json:"instruction_message,omitempty"` + // Required. Number of frames per second to be extracted from the video. + ExtractionFrameRate float64 `protobuf:"fixed64,3,opt,name=extraction_frame_rate,json=extractionFrameRate,proto3" json:"extraction_frame_rate,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ObjectDetectionConfig) Reset() { *m = ObjectDetectionConfig{} } +func (m *ObjectDetectionConfig) String() string { return proto.CompactTextString(m) } +func (*ObjectDetectionConfig) ProtoMessage() {} +func (*ObjectDetectionConfig) Descriptor() ([]byte, []int) { + return fileDescriptor_human_annotation_config_28dd7106b95baa9e, []int{6} +} +func (m *ObjectDetectionConfig) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ObjectDetectionConfig.Unmarshal(m, b) +} +func (m *ObjectDetectionConfig) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ObjectDetectionConfig.Marshal(b, m, deterministic) +} +func (dst *ObjectDetectionConfig) XXX_Merge(src proto.Message) { + xxx_messageInfo_ObjectDetectionConfig.Merge(dst, src) +} +func (m *ObjectDetectionConfig) XXX_Size() int { + return xxx_messageInfo_ObjectDetectionConfig.Size(m) +} +func (m *ObjectDetectionConfig) XXX_DiscardUnknown() { + xxx_messageInfo_ObjectDetectionConfig.DiscardUnknown(m) +} + +var xxx_messageInfo_ObjectDetectionConfig proto.InternalMessageInfo + +func (m *ObjectDetectionConfig) GetAnnotationSpecSet() string { + if m != nil { + return m.AnnotationSpecSet + } + return "" +} + +func (m *ObjectDetectionConfig) GetInstructionMessage() string { + if m != nil { + return m.InstructionMessage + } + return "" +} + +func (m *ObjectDetectionConfig) GetExtractionFrameRate() float64 { + if m != nil { + return m.ExtractionFrameRate + } + return 0 +} + +// Config for video object tracking human labeling task. +type ObjectTrackingConfig struct { + // Required. Annotation spec set resource name. + AnnotationSpecSet string `protobuf:"bytes,1,opt,name=annotation_spec_set,json=annotationSpecSet,proto3" json:"annotation_spec_set,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ObjectTrackingConfig) Reset() { *m = ObjectTrackingConfig{} } +func (m *ObjectTrackingConfig) String() string { return proto.CompactTextString(m) } +func (*ObjectTrackingConfig) ProtoMessage() {} +func (*ObjectTrackingConfig) Descriptor() ([]byte, []int) { + return fileDescriptor_human_annotation_config_28dd7106b95baa9e, []int{7} +} +func (m *ObjectTrackingConfig) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ObjectTrackingConfig.Unmarshal(m, b) +} +func (m *ObjectTrackingConfig) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ObjectTrackingConfig.Marshal(b, m, deterministic) +} +func (dst *ObjectTrackingConfig) XXX_Merge(src proto.Message) { + xxx_messageInfo_ObjectTrackingConfig.Merge(dst, src) +} +func (m *ObjectTrackingConfig) XXX_Size() int { + return xxx_messageInfo_ObjectTrackingConfig.Size(m) +} +func (m *ObjectTrackingConfig) XXX_DiscardUnknown() { + xxx_messageInfo_ObjectTrackingConfig.DiscardUnknown(m) +} + +var xxx_messageInfo_ObjectTrackingConfig proto.InternalMessageInfo + +func (m *ObjectTrackingConfig) GetAnnotationSpecSet() string { + if m != nil { + return m.AnnotationSpecSet + } + return "" +} + +// Config for video event human labeling task. +type EventConfig struct { + // Required. The list of annotation spec set resource name. Similar to video + // classification, we support selecting event from multiple AnnotationSpecSet + // at the same time. + AnnotationSpecSets []string `protobuf:"bytes,1,rep,name=annotation_spec_sets,json=annotationSpecSets,proto3" json:"annotation_spec_sets,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *EventConfig) Reset() { *m = EventConfig{} } +func (m *EventConfig) String() string { return proto.CompactTextString(m) } +func (*EventConfig) ProtoMessage() {} +func (*EventConfig) Descriptor() ([]byte, []int) { + return fileDescriptor_human_annotation_config_28dd7106b95baa9e, []int{8} +} +func (m *EventConfig) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_EventConfig.Unmarshal(m, b) +} +func (m *EventConfig) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_EventConfig.Marshal(b, m, deterministic) +} +func (dst *EventConfig) XXX_Merge(src proto.Message) { + xxx_messageInfo_EventConfig.Merge(dst, src) +} +func (m *EventConfig) XXX_Size() int { + return xxx_messageInfo_EventConfig.Size(m) +} +func (m *EventConfig) XXX_DiscardUnknown() { + xxx_messageInfo_EventConfig.DiscardUnknown(m) +} + +var xxx_messageInfo_EventConfig proto.InternalMessageInfo + +func (m *EventConfig) GetAnnotationSpecSets() []string { + if m != nil { + return m.AnnotationSpecSets + } + return nil +} + +// Config for text classification human labeling task. +type TextClassificationConfig struct { + // Optional. If allow_multi_label is true, contributors are able to choose + // multiple labels for one text segment. + AllowMultiLabel bool `protobuf:"varint,1,opt,name=allow_multi_label,json=allowMultiLabel,proto3" json:"allow_multi_label,omitempty"` + // Required. Annotation spec set resource name. + AnnotationSpecSet string `protobuf:"bytes,2,opt,name=annotation_spec_set,json=annotationSpecSet,proto3" json:"annotation_spec_set,omitempty"` + // Optional. Configs for sentiment selection. + SentimentConfig *SentimentConfig `protobuf:"bytes,3,opt,name=sentiment_config,json=sentimentConfig,proto3" json:"sentiment_config,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *TextClassificationConfig) Reset() { *m = TextClassificationConfig{} } +func (m *TextClassificationConfig) String() string { return proto.CompactTextString(m) } +func (*TextClassificationConfig) ProtoMessage() {} +func (*TextClassificationConfig) Descriptor() ([]byte, []int) { + return fileDescriptor_human_annotation_config_28dd7106b95baa9e, []int{9} +} +func (m *TextClassificationConfig) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_TextClassificationConfig.Unmarshal(m, b) +} +func (m *TextClassificationConfig) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_TextClassificationConfig.Marshal(b, m, deterministic) +} +func (dst *TextClassificationConfig) XXX_Merge(src proto.Message) { + xxx_messageInfo_TextClassificationConfig.Merge(dst, src) +} +func (m *TextClassificationConfig) XXX_Size() int { + return xxx_messageInfo_TextClassificationConfig.Size(m) +} +func (m *TextClassificationConfig) XXX_DiscardUnknown() { + xxx_messageInfo_TextClassificationConfig.DiscardUnknown(m) +} + +var xxx_messageInfo_TextClassificationConfig proto.InternalMessageInfo + +func (m *TextClassificationConfig) GetAllowMultiLabel() bool { + if m != nil { + return m.AllowMultiLabel + } + return false +} + +func (m *TextClassificationConfig) GetAnnotationSpecSet() string { + if m != nil { + return m.AnnotationSpecSet + } + return "" +} + +func (m *TextClassificationConfig) GetSentimentConfig() *SentimentConfig { + if m != nil { + return m.SentimentConfig + } + return nil +} + +// Config for setting up sentiments. +type SentimentConfig struct { + // If set to true, contributors will have the option to select sentiment of + // the label they selected, to mark it as negative or positive label. Default + // is false. + EnableLabelSentimentSelection bool `protobuf:"varint,1,opt,name=enable_label_sentiment_selection,json=enableLabelSentimentSelection,proto3" json:"enable_label_sentiment_selection,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *SentimentConfig) Reset() { *m = SentimentConfig{} } +func (m *SentimentConfig) String() string { return proto.CompactTextString(m) } +func (*SentimentConfig) ProtoMessage() {} +func (*SentimentConfig) Descriptor() ([]byte, []int) { + return fileDescriptor_human_annotation_config_28dd7106b95baa9e, []int{10} +} +func (m *SentimentConfig) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_SentimentConfig.Unmarshal(m, b) +} +func (m *SentimentConfig) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_SentimentConfig.Marshal(b, m, deterministic) +} +func (dst *SentimentConfig) XXX_Merge(src proto.Message) { + xxx_messageInfo_SentimentConfig.Merge(dst, src) +} +func (m *SentimentConfig) XXX_Size() int { + return xxx_messageInfo_SentimentConfig.Size(m) +} +func (m *SentimentConfig) XXX_DiscardUnknown() { + xxx_messageInfo_SentimentConfig.DiscardUnknown(m) +} + +var xxx_messageInfo_SentimentConfig proto.InternalMessageInfo + +func (m *SentimentConfig) GetEnableLabelSentimentSelection() bool { + if m != nil { + return m.EnableLabelSentimentSelection + } + return false +} + +// Config for text entity extraction human labeling task. +type TextEntityExtractionConfig struct { + // Required. Annotation spec set resource name. + AnnotationSpecSet string `protobuf:"bytes,1,opt,name=annotation_spec_set,json=annotationSpecSet,proto3" json:"annotation_spec_set,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *TextEntityExtractionConfig) Reset() { *m = TextEntityExtractionConfig{} } +func (m *TextEntityExtractionConfig) String() string { return proto.CompactTextString(m) } +func (*TextEntityExtractionConfig) ProtoMessage() {} +func (*TextEntityExtractionConfig) Descriptor() ([]byte, []int) { + return fileDescriptor_human_annotation_config_28dd7106b95baa9e, []int{11} +} +func (m *TextEntityExtractionConfig) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_TextEntityExtractionConfig.Unmarshal(m, b) +} +func (m *TextEntityExtractionConfig) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_TextEntityExtractionConfig.Marshal(b, m, deterministic) +} +func (dst *TextEntityExtractionConfig) XXX_Merge(src proto.Message) { + xxx_messageInfo_TextEntityExtractionConfig.Merge(dst, src) +} +func (m *TextEntityExtractionConfig) XXX_Size() int { + return xxx_messageInfo_TextEntityExtractionConfig.Size(m) +} +func (m *TextEntityExtractionConfig) XXX_DiscardUnknown() { + xxx_messageInfo_TextEntityExtractionConfig.DiscardUnknown(m) +} + +var xxx_messageInfo_TextEntityExtractionConfig proto.InternalMessageInfo + +func (m *TextEntityExtractionConfig) GetAnnotationSpecSet() string { + if m != nil { + return m.AnnotationSpecSet + } + return "" +} + +func init() { + proto.RegisterType((*HumanAnnotationConfig)(nil), "google.cloud.datalabeling.v1beta1.HumanAnnotationConfig") + proto.RegisterType((*ImageClassificationConfig)(nil), "google.cloud.datalabeling.v1beta1.ImageClassificationConfig") + proto.RegisterType((*BoundingPolyConfig)(nil), "google.cloud.datalabeling.v1beta1.BoundingPolyConfig") + proto.RegisterType((*PolylineConfig)(nil), "google.cloud.datalabeling.v1beta1.PolylineConfig") + proto.RegisterType((*SegmentationConfig)(nil), "google.cloud.datalabeling.v1beta1.SegmentationConfig") + proto.RegisterType((*VideoClassificationConfig)(nil), "google.cloud.datalabeling.v1beta1.VideoClassificationConfig") + proto.RegisterType((*VideoClassificationConfig_AnnotationSpecSetConfig)(nil), "google.cloud.datalabeling.v1beta1.VideoClassificationConfig.AnnotationSpecSetConfig") + proto.RegisterType((*ObjectDetectionConfig)(nil), "google.cloud.datalabeling.v1beta1.ObjectDetectionConfig") + proto.RegisterType((*ObjectTrackingConfig)(nil), "google.cloud.datalabeling.v1beta1.ObjectTrackingConfig") + proto.RegisterType((*EventConfig)(nil), "google.cloud.datalabeling.v1beta1.EventConfig") + proto.RegisterType((*TextClassificationConfig)(nil), "google.cloud.datalabeling.v1beta1.TextClassificationConfig") + proto.RegisterType((*SentimentConfig)(nil), "google.cloud.datalabeling.v1beta1.SentimentConfig") + proto.RegisterType((*TextEntityExtractionConfig)(nil), "google.cloud.datalabeling.v1beta1.TextEntityExtractionConfig") + proto.RegisterEnum("google.cloud.datalabeling.v1beta1.StringAggregationType", StringAggregationType_name, StringAggregationType_value) +} + +func init() { + proto.RegisterFile("google/cloud/datalabeling/v1beta1/human_annotation_config.proto", fileDescriptor_human_annotation_config_28dd7106b95baa9e) +} + +var fileDescriptor_human_annotation_config_28dd7106b95baa9e = []byte{ + // 912 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xbc, 0x56, 0xd1, 0x6e, 0xdb, 0x36, + 0x14, 0x9d, 0xe2, 0xb5, 0x6b, 0xe8, 0x36, 0xb1, 0x99, 0x1a, 0x55, 0xd2, 0xb5, 0xf3, 0x54, 0x0c, + 0x33, 0x0a, 0x4c, 0x5e, 0xbd, 0x97, 0x01, 0x7b, 0x28, 0x1c, 0xdb, 0xf1, 0x3c, 0xd4, 0x76, 0x20, + 0x3b, 0x05, 0x5a, 0x60, 0x20, 0x68, 0xf9, 0x46, 0xe1, 0x26, 0x91, 0xaa, 0x48, 0xb5, 0x31, 0xf6, + 0x19, 0xfb, 0x85, 0x3d, 0xee, 0x77, 0xf6, 0xb2, 0x3f, 0xd8, 0x5f, 0x0c, 0x22, 0xa5, 0xd8, 0x4b, + 0xed, 0xb5, 0xcb, 0xd0, 0xbc, 0x9e, 0x73, 0xef, 0xb9, 0x97, 0xf7, 0x5c, 0x91, 0x42, 0x4f, 0x03, + 0x21, 0x82, 0x10, 0x9a, 0x7e, 0x28, 0xd2, 0x79, 0x73, 0x4e, 0x15, 0x0d, 0xe9, 0x0c, 0x42, 0xc6, + 0x83, 0xe6, 0xeb, 0x27, 0x33, 0x50, 0xf4, 0x49, 0xf3, 0x2c, 0x8d, 0x28, 0x27, 0x94, 0x73, 0xa1, + 0xa8, 0x62, 0x82, 0x13, 0x5f, 0xf0, 0x53, 0x16, 0xb8, 0x71, 0x22, 0x94, 0xc0, 0x9f, 0x1b, 0x01, + 0x57, 0x0b, 0xb8, 0xab, 0x02, 0x6e, 0x2e, 0x70, 0xf0, 0x30, 0xaf, 0xa1, 0x13, 0x66, 0xe9, 0x69, + 0x73, 0x9e, 0x26, 0x5a, 0xc9, 0x48, 0x1c, 0x7c, 0x9a, 0xf3, 0x34, 0x66, 0xcd, 0x65, 0x19, 0x69, + 0x58, 0xe7, 0xb7, 0x12, 0xaa, 0x7d, 0x9f, 0xb5, 0xd0, 0xbe, 0xa0, 0x3a, 0xba, 0x01, 0x5c, 0x47, + 0x65, 0xc6, 0xa5, 0x4a, 0x52, 0x3f, 0x03, 0x6d, 0xab, 0x6e, 0x35, 0xb6, 0xbd, 0x55, 0x08, 0x77, + 0xd0, 0xc3, 0x5c, 0x10, 0xe6, 0x24, 0xeb, 0x4d, 0x82, 0x22, 0x73, 0x26, 0xe3, 0x90, 0x2e, 0x08, + 0xa7, 0x11, 0xd8, 0x5b, 0x3a, 0xe9, 0xfe, 0x45, 0x54, 0xd7, 0x04, 0x75, 0x4d, 0xcc, 0x88, 0x46, + 0x80, 0x0f, 0xd1, 0x83, 0x35, 0x22, 0x20, 0xfd, 0x84, 0xc5, 0xba, 0x70, 0x69, 0x83, 0xc6, 0x32, + 0x04, 0x7f, 0x86, 0xca, 0x7a, 0x2c, 0x24, 0x48, 0x44, 0x1a, 0xdb, 0x1f, 0xeb, 0x0c, 0xa4, 0xa1, + 0x7e, 0x86, 0xe0, 0x47, 0xe8, 0x4e, 0x48, 0x79, 0x90, 0xd2, 0x00, 0x88, 0x2f, 0xe6, 0x60, 0xdf, + 0xd0, 0x21, 0xb7, 0x0b, 0xb0, 0x23, 0xe6, 0x90, 0x05, 0x25, 0x10, 0x87, 0xcc, 0xa7, 0xc4, 0x17, + 0x29, 0x57, 0xf6, 0xcd, 0xba, 0xd5, 0xb8, 0xe1, 0xdd, 0xce, 0xc1, 0x4e, 0x86, 0xe1, 0x23, 0x54, + 0x7d, 0x95, 0x82, 0xd4, 0x4e, 0x15, 0x83, 0xb6, 0x3f, 0xa9, 0x5b, 0x8d, 0x72, 0x6b, 0xdf, 0xcd, + 0xcd, 0x2a, 0x9c, 0x70, 0xbb, 0x79, 0x80, 0x57, 0x29, 0x72, 0x0a, 0x04, 0x7f, 0x85, 0xb0, 0x2f, + 0xb8, 0x4a, 0xd8, 0x2c, 0x55, 0x22, 0x21, 0x10, 0x51, 0x16, 0x4a, 0x7b, 0xbb, 0x5e, 0x6a, 0x6c, + 0x7b, 0xd5, 0x15, 0xa6, 0xa7, 0x09, 0xe7, 0x2f, 0x0b, 0xed, 0x0f, 0xa2, 0xac, 0xd3, 0x90, 0x4a, + 0xc9, 0x4e, 0x99, 0xbf, 0x6a, 0x95, 0x8b, 0xf6, 0x56, 0x16, 0x48, 0xc6, 0xe0, 0x13, 0x09, 0x2a, + 0xb7, 0xac, 0xba, 0xa4, 0x26, 0x31, 0xf8, 0x13, 0x50, 0xf8, 0x31, 0xaa, 0xd2, 0x30, 0x14, 0x6f, + 0x48, 0x94, 0x86, 0x8a, 0x11, 0x3d, 0x28, 0xed, 0xd5, 0x2d, 0x6f, 0x57, 0x13, 0xc3, 0x0c, 0x7f, + 0x96, 0xc1, 0x38, 0x46, 0xf7, 0x28, 0x97, 0x6f, 0x20, 0x21, 0x34, 0x08, 0x12, 0x08, 0x4c, 0x0d, + 0xb5, 0x88, 0x41, 0x3b, 0xb3, 0xd3, 0xfa, 0xd6, 0x7d, 0xe7, 0x8e, 0xba, 0x13, 0x95, 0x30, 0x1e, + 0xb4, 0x97, 0x02, 0xd3, 0x45, 0x0c, 0x5e, 0xcd, 0x08, 0x5f, 0x82, 0x9d, 0x14, 0xe1, 0x43, 0x91, + 0xf2, 0x39, 0xe3, 0xc1, 0xb1, 0x08, 0x17, 0x57, 0x3c, 0x63, 0x13, 0xed, 0xad, 0xec, 0x2a, 0x89, + 0x40, 0x4a, 0x1a, 0x14, 0x1b, 0x89, 0x57, 0xa8, 0xa1, 0x61, 0x9c, 0x57, 0x68, 0x27, 0x2b, 0x17, + 0x32, 0x0e, 0xd7, 0x55, 0x32, 0x45, 0x78, 0x02, 0x41, 0x04, 0x5c, 0xfd, 0x1f, 0x37, 0xff, 0x73, + 0xd9, 0x3f, 0xb7, 0xd0, 0xfe, 0x73, 0x36, 0x07, 0xb1, 0x76, 0x99, 0x7e, 0xb5, 0xd0, 0xfd, 0x35, + 0xf5, 0xf3, 0x7b, 0x49, 0xda, 0x56, 0xbd, 0xd4, 0x28, 0xb7, 0xa6, 0xef, 0xe1, 0xfa, 0xc6, 0x1a, + 0x6e, 0xfb, 0xf2, 0x21, 0x0c, 0xee, 0xd9, 0x74, 0x3d, 0x21, 0xf1, 0xd7, 0xe8, 0x2e, 0x8d, 0xe3, + 0x70, 0x41, 0xe4, 0x99, 0xc8, 0xee, 0x07, 0x05, 0xe6, 0x5a, 0x32, 0x5b, 0x8b, 0x35, 0x37, 0x39, + 0x13, 0xaa, 0x5b, 0x30, 0x07, 0x29, 0xba, 0xb7, 0xa1, 0xcc, 0x87, 0xfc, 0x5e, 0x9c, 0xdf, 0x2d, + 0x54, 0x1b, 0xcf, 0x7e, 0x02, 0x7f, 0xd9, 0xca, 0x35, 0xf9, 0x8a, 0x5b, 0xa8, 0x06, 0xe7, 0x2a, + 0xa1, 0x26, 0xfe, 0x34, 0xa1, 0x11, 0x90, 0x84, 0x2a, 0xf3, 0xa1, 0x5a, 0xde, 0xde, 0x92, 0x3c, + 0xca, 0x38, 0x8f, 0x2a, 0x70, 0x8e, 0xd0, 0x5d, 0xd3, 0xed, 0x34, 0xa1, 0xfe, 0xcf, 0x8c, 0x07, + 0x57, 0x6b, 0xd6, 0x79, 0x8a, 0xca, 0xbd, 0xd7, 0xc0, 0x8b, 0x09, 0x67, 0x76, 0xbd, 0x9d, 0x6e, + 0x96, 0x67, 0xdb, 0xc3, 0x6f, 0xe5, 0x4b, 0xe7, 0x0f, 0x0b, 0xd9, 0x53, 0x38, 0x57, 0x6b, 0x77, + 0x72, 0xad, 0x01, 0xd6, 0xfa, 0x0b, 0x6b, 0x43, 0xe7, 0x5b, 0x9b, 0xc6, 0xfc, 0x23, 0xaa, 0x48, + 0xe0, 0x8a, 0x65, 0x9f, 0x61, 0xbe, 0xe4, 0x7a, 0x60, 0xe5, 0x56, 0xeb, 0x7d, 0x6e, 0xb6, 0x22, + 0x35, 0xdf, 0xe0, 0x5d, 0xf9, 0x4f, 0xc0, 0x79, 0x89, 0x76, 0x2f, 0xc5, 0xe0, 0x3e, 0xaa, 0x03, + 0xa7, 0xb3, 0x10, 0xcc, 0x41, 0xc8, 0xb2, 0xbc, 0x84, 0x10, 0x96, 0xcf, 0xed, 0x2d, 0xef, 0x81, + 0x89, 0xd3, 0x07, 0xbb, 0x50, 0x99, 0x14, 0x41, 0xce, 0x33, 0x74, 0x90, 0x8d, 0xac, 0xc7, 0x15, + 0x53, 0x8b, 0xde, 0x85, 0xbb, 0x57, 0xb3, 0xf0, 0xf1, 0x2f, 0xa8, 0xb6, 0xf6, 0x9e, 0xc6, 0x5f, + 0xa2, 0x47, 0x93, 0xa9, 0x37, 0x18, 0xf5, 0x49, 0xbb, 0xdf, 0xf7, 0x7a, 0xfd, 0xf6, 0x74, 0x30, + 0x1e, 0x91, 0xe9, 0x8b, 0xe3, 0x1e, 0x39, 0x19, 0x4d, 0x8e, 0x7b, 0x9d, 0xc1, 0xd1, 0xa0, 0xd7, + 0xad, 0x7c, 0x84, 0xab, 0xe8, 0xce, 0xb0, 0xfd, 0xc3, 0xd8, 0x1b, 0x4c, 0x5f, 0x90, 0xe7, 0xe3, + 0x69, 0xaf, 0x62, 0x61, 0x8c, 0x76, 0x4e, 0x46, 0xed, 0xd1, 0x60, 0x38, 0x3e, 0x99, 0x18, 0x6c, + 0x2b, 0xc3, 0x46, 0xe3, 0x55, 0xad, 0x4a, 0xe9, 0xf0, 0x1c, 0x7d, 0xe1, 0x8b, 0xe8, 0xdd, 0x03, + 0x3f, 0xb6, 0x5e, 0x0e, 0xf3, 0xa0, 0x40, 0x64, 0xcf, 0xb7, 0x2b, 0x92, 0xa0, 0x19, 0x00, 0xd7, + 0x8f, 0x6e, 0xd3, 0x50, 0x34, 0x66, 0xf2, 0x5f, 0xfe, 0xb9, 0xbe, 0x5b, 0x05, 0x67, 0x37, 0x75, + 0xe6, 0x37, 0x7f, 0x07, 0x00, 0x00, 0xff, 0xff, 0x56, 0x6a, 0x3b, 0xa6, 0xac, 0x09, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/cloud/datalabeling/v1beta1/instruction.pb.go b/vendor/google.golang.org/genproto/googleapis/cloud/datalabeling/v1beta1/instruction.pb.go new file mode 100644 index 0000000..76485d2 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/cloud/datalabeling/v1beta1/instruction.pb.go @@ -0,0 +1,255 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/cloud/datalabeling/v1beta1/instruction.proto + +package datalabeling // import "google.golang.org/genproto/googleapis/cloud/datalabeling/v1beta1" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import timestamp "github.com/golang/protobuf/ptypes/timestamp" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Instruction of how to perform the labeling task for human operators. +// Currently two types of instruction are supported - CSV file and PDF. +// One of the two types instruction must be provided. +// CSV file is only supported for image classification task. Instructions for +// other task should be provided as PDF. +// For image classification, CSV and PDF can be provided at the same time. +type Instruction struct { + // Output only. Instruction resource name, format: + // projects/{project_id}/instructions/{instruction_id} + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // Required. The display name of the instruction. Maximum of 64 characters. + DisplayName string `protobuf:"bytes,2,opt,name=display_name,json=displayName,proto3" json:"display_name,omitempty"` + // Optional. User-provided description of the instruction. + // The description can be up to 10000 characters long. + Description string `protobuf:"bytes,3,opt,name=description,proto3" json:"description,omitempty"` + // Output only. Creation time of instruction. + CreateTime *timestamp.Timestamp `protobuf:"bytes,4,opt,name=create_time,json=createTime,proto3" json:"create_time,omitempty"` + // Output only. Last update time of instruction. + UpdateTime *timestamp.Timestamp `protobuf:"bytes,5,opt,name=update_time,json=updateTime,proto3" json:"update_time,omitempty"` + // Required. The data type of this instruction. + DataType DataType `protobuf:"varint,6,opt,name=data_type,json=dataType,proto3,enum=google.cloud.datalabeling.v1beta1.DataType" json:"data_type,omitempty"` + // One of CSV and PDF instruction is required. + // Instruction from a csv file, such as for classification task. + // Csv file should have exact two columns, in the format of: + // The first column is labeled data, such as image reference, text. + // The second column is comma separated labels associated with data. + CsvInstruction *CsvInstruction `protobuf:"bytes,7,opt,name=csv_instruction,json=csvInstruction,proto3" json:"csv_instruction,omitempty"` + // One of CSV and PDF instruction is required. + // Instruction from a PDF doc. The PDF doc should be in GCS bucket. + PdfInstruction *PdfInstruction `protobuf:"bytes,9,opt,name=pdf_instruction,json=pdfInstruction,proto3" json:"pdf_instruction,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Instruction) Reset() { *m = Instruction{} } +func (m *Instruction) String() string { return proto.CompactTextString(m) } +func (*Instruction) ProtoMessage() {} +func (*Instruction) Descriptor() ([]byte, []int) { + return fileDescriptor_instruction_e84d048d533056c9, []int{0} +} +func (m *Instruction) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Instruction.Unmarshal(m, b) +} +func (m *Instruction) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Instruction.Marshal(b, m, deterministic) +} +func (dst *Instruction) XXX_Merge(src proto.Message) { + xxx_messageInfo_Instruction.Merge(dst, src) +} +func (m *Instruction) XXX_Size() int { + return xxx_messageInfo_Instruction.Size(m) +} +func (m *Instruction) XXX_DiscardUnknown() { + xxx_messageInfo_Instruction.DiscardUnknown(m) +} + +var xxx_messageInfo_Instruction proto.InternalMessageInfo + +func (m *Instruction) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *Instruction) GetDisplayName() string { + if m != nil { + return m.DisplayName + } + return "" +} + +func (m *Instruction) GetDescription() string { + if m != nil { + return m.Description + } + return "" +} + +func (m *Instruction) GetCreateTime() *timestamp.Timestamp { + if m != nil { + return m.CreateTime + } + return nil +} + +func (m *Instruction) GetUpdateTime() *timestamp.Timestamp { + if m != nil { + return m.UpdateTime + } + return nil +} + +func (m *Instruction) GetDataType() DataType { + if m != nil { + return m.DataType + } + return DataType_DATA_TYPE_UNSPECIFIED +} + +func (m *Instruction) GetCsvInstruction() *CsvInstruction { + if m != nil { + return m.CsvInstruction + } + return nil +} + +func (m *Instruction) GetPdfInstruction() *PdfInstruction { + if m != nil { + return m.PdfInstruction + } + return nil +} + +// Instruction from a CSV file. +type CsvInstruction struct { + // CSV file for the instruction. Only gcs path is allowed. + GcsFileUri string `protobuf:"bytes,1,opt,name=gcs_file_uri,json=gcsFileUri,proto3" json:"gcs_file_uri,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *CsvInstruction) Reset() { *m = CsvInstruction{} } +func (m *CsvInstruction) String() string { return proto.CompactTextString(m) } +func (*CsvInstruction) ProtoMessage() {} +func (*CsvInstruction) Descriptor() ([]byte, []int) { + return fileDescriptor_instruction_e84d048d533056c9, []int{1} +} +func (m *CsvInstruction) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_CsvInstruction.Unmarshal(m, b) +} +func (m *CsvInstruction) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_CsvInstruction.Marshal(b, m, deterministic) +} +func (dst *CsvInstruction) XXX_Merge(src proto.Message) { + xxx_messageInfo_CsvInstruction.Merge(dst, src) +} +func (m *CsvInstruction) XXX_Size() int { + return xxx_messageInfo_CsvInstruction.Size(m) +} +func (m *CsvInstruction) XXX_DiscardUnknown() { + xxx_messageInfo_CsvInstruction.DiscardUnknown(m) +} + +var xxx_messageInfo_CsvInstruction proto.InternalMessageInfo + +func (m *CsvInstruction) GetGcsFileUri() string { + if m != nil { + return m.GcsFileUri + } + return "" +} + +// Instruction from a PDF file. +type PdfInstruction struct { + // PDF file for the instruction. Only gcs path is allowed. + GcsFileUri string `protobuf:"bytes,1,opt,name=gcs_file_uri,json=gcsFileUri,proto3" json:"gcs_file_uri,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *PdfInstruction) Reset() { *m = PdfInstruction{} } +func (m *PdfInstruction) String() string { return proto.CompactTextString(m) } +func (*PdfInstruction) ProtoMessage() {} +func (*PdfInstruction) Descriptor() ([]byte, []int) { + return fileDescriptor_instruction_e84d048d533056c9, []int{2} +} +func (m *PdfInstruction) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_PdfInstruction.Unmarshal(m, b) +} +func (m *PdfInstruction) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_PdfInstruction.Marshal(b, m, deterministic) +} +func (dst *PdfInstruction) XXX_Merge(src proto.Message) { + xxx_messageInfo_PdfInstruction.Merge(dst, src) +} +func (m *PdfInstruction) XXX_Size() int { + return xxx_messageInfo_PdfInstruction.Size(m) +} +func (m *PdfInstruction) XXX_DiscardUnknown() { + xxx_messageInfo_PdfInstruction.DiscardUnknown(m) +} + +var xxx_messageInfo_PdfInstruction proto.InternalMessageInfo + +func (m *PdfInstruction) GetGcsFileUri() string { + if m != nil { + return m.GcsFileUri + } + return "" +} + +func init() { + proto.RegisterType((*Instruction)(nil), "google.cloud.datalabeling.v1beta1.Instruction") + proto.RegisterType((*CsvInstruction)(nil), "google.cloud.datalabeling.v1beta1.CsvInstruction") + proto.RegisterType((*PdfInstruction)(nil), "google.cloud.datalabeling.v1beta1.PdfInstruction") +} + +func init() { + proto.RegisterFile("google/cloud/datalabeling/v1beta1/instruction.proto", fileDescriptor_instruction_e84d048d533056c9) +} + +var fileDescriptor_instruction_e84d048d533056c9 = []byte{ + // 398 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x8c, 0x93, 0x41, 0x8b, 0xdb, 0x30, + 0x10, 0x85, 0x71, 0x77, 0xbb, 0x6d, 0xe4, 0x25, 0x05, 0x9f, 0x4c, 0x28, 0xd4, 0xbb, 0x50, 0x08, + 0x14, 0x24, 0x92, 0x3d, 0xee, 0xad, 0x2d, 0xa5, 0x3d, 0xb4, 0x04, 0x93, 0x5e, 0x72, 0x31, 0xb2, + 0x24, 0x0b, 0x81, 0x2c, 0x09, 0x4b, 0x0e, 0xcd, 0xdf, 0xed, 0x2f, 0x29, 0x92, 0x95, 0xc6, 0xbe, + 0xac, 0x73, 0xf3, 0xcc, 0xbc, 0xef, 0xf9, 0x31, 0x83, 0xc0, 0x13, 0xd7, 0x9a, 0x4b, 0x86, 0x88, + 0xd4, 0x3d, 0x45, 0x14, 0x3b, 0x2c, 0x71, 0xcd, 0xa4, 0x50, 0x1c, 0x1d, 0x37, 0x35, 0x73, 0x78, + 0x83, 0x84, 0xb2, 0xae, 0xeb, 0x89, 0x13, 0x5a, 0x41, 0xd3, 0x69, 0xa7, 0xb3, 0x87, 0x01, 0x82, + 0x01, 0x82, 0x63, 0x08, 0x46, 0x68, 0x85, 0xe6, 0x7d, 0x7d, 0xd3, 0x32, 0x37, 0x78, 0xae, 0x3e, + 0x44, 0x20, 0x54, 0x75, 0xdf, 0x20, 0x27, 0x5a, 0x66, 0x1d, 0x6e, 0x4d, 0x14, 0xbc, 0x8f, 0x02, + 0x6c, 0x04, 0xc2, 0x4a, 0x69, 0x87, 0x7d, 0x22, 0x3b, 0x4c, 0x1f, 0xff, 0xde, 0x80, 0xf4, 0xc7, + 0x25, 0x68, 0x96, 0x81, 0x5b, 0x85, 0x5b, 0x96, 0x27, 0x45, 0xb2, 0x5e, 0x94, 0xe1, 0x3b, 0x7b, + 0x00, 0xf7, 0x54, 0x58, 0x23, 0xf1, 0xa9, 0x0a, 0xb3, 0x57, 0x61, 0x96, 0xc6, 0xde, 0x2f, 0x2f, + 0x29, 0x40, 0x4a, 0x99, 0x25, 0x9d, 0x30, 0xde, 0x25, 0xbf, 0x89, 0x8a, 0x4b, 0x2b, 0x7b, 0x06, + 0x29, 0xe9, 0x18, 0x76, 0xac, 0xf2, 0x01, 0xf3, 0xdb, 0x22, 0x59, 0xa7, 0xdb, 0x15, 0x8c, 0x1b, + 0x39, 0xa7, 0x87, 0xfb, 0x73, 0xfa, 0x12, 0x0c, 0x72, 0xdf, 0xf0, 0x70, 0x6f, 0xe8, 0x7f, 0xf8, + 0xf5, 0x3c, 0x3c, 0xc8, 0x03, 0xfc, 0x1d, 0x2c, 0xfc, 0xca, 0x2a, 0x77, 0x32, 0x2c, 0xbf, 0x2b, + 0x92, 0xf5, 0x72, 0xfb, 0x09, 0xce, 0x5e, 0x02, 0x7e, 0xc5, 0x0e, 0xef, 0x4f, 0x86, 0x95, 0x6f, + 0x69, 0xfc, 0xca, 0x0e, 0xe0, 0x1d, 0xb1, 0xc7, 0x6a, 0x74, 0xd8, 0xfc, 0x4d, 0x88, 0xb2, 0xb9, + 0xc2, 0xef, 0x8b, 0x3d, 0x8e, 0x16, 0x5d, 0x2e, 0xc9, 0xa4, 0xf6, 0xde, 0x86, 0x36, 0x13, 0xef, + 0xc5, 0xd5, 0xde, 0x3b, 0xda, 0x4c, 0xbc, 0xcd, 0xa4, 0x7e, 0xdc, 0x82, 0xe5, 0xf4, 0xef, 0x59, + 0x01, 0xee, 0x39, 0xb1, 0x55, 0x23, 0x24, 0xab, 0xfa, 0x4e, 0xc4, 0x73, 0x03, 0x4e, 0xec, 0x37, + 0x21, 0xd9, 0xef, 0x4e, 0x78, 0x66, 0xea, 0x3a, 0xcf, 0x7c, 0xfe, 0x03, 0x3e, 0x12, 0xdd, 0xce, + 0xe7, 0xdd, 0x25, 0x87, 0x9f, 0x51, 0xc4, 0xb5, 0xc4, 0x8a, 0x43, 0xdd, 0x71, 0xc4, 0x99, 0x0a, + 0x97, 0x8c, 0x4f, 0x00, 0x1b, 0x61, 0x5f, 0x78, 0x06, 0xcf, 0xe3, 0x66, 0x7d, 0x17, 0xc8, 0xa7, + 0x7f, 0x01, 0x00, 0x00, 0xff, 0xff, 0xfe, 0x89, 0x16, 0xfd, 0x97, 0x03, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/cloud/datalabeling/v1beta1/operations.pb.go b/vendor/google.golang.org/genproto/googleapis/cloud/datalabeling/v1beta1/operations.pb.go new file mode 100644 index 0000000..adce195 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/cloud/datalabeling/v1beta1/operations.pb.go @@ -0,0 +1,1489 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/cloud/datalabeling/v1beta1/operations.proto + +package datalabeling // import "google.golang.org/genproto/googleapis/cloud/datalabeling/v1beta1" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "github.com/golang/protobuf/ptypes/timestamp" +import _ "google.golang.org/genproto/googleapis/api/annotations" +import status "google.golang.org/genproto/googleapis/rpc/status" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Response used for ImportData longrunning operation. +type ImportDataOperationResponse struct { + // Ouptut only. The name of imported dataset. + Dataset string `protobuf:"bytes,1,opt,name=dataset,proto3" json:"dataset,omitempty"` + // Output only. Total number of examples requested to import + TotalCount int32 `protobuf:"varint,2,opt,name=total_count,json=totalCount,proto3" json:"total_count,omitempty"` + // Output only. Number of examples imported successfully. + ImportCount int32 `protobuf:"varint,3,opt,name=import_count,json=importCount,proto3" json:"import_count,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ImportDataOperationResponse) Reset() { *m = ImportDataOperationResponse{} } +func (m *ImportDataOperationResponse) String() string { return proto.CompactTextString(m) } +func (*ImportDataOperationResponse) ProtoMessage() {} +func (*ImportDataOperationResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_operations_f915b1b50117c3fa, []int{0} +} +func (m *ImportDataOperationResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ImportDataOperationResponse.Unmarshal(m, b) +} +func (m *ImportDataOperationResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ImportDataOperationResponse.Marshal(b, m, deterministic) +} +func (dst *ImportDataOperationResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_ImportDataOperationResponse.Merge(dst, src) +} +func (m *ImportDataOperationResponse) XXX_Size() int { + return xxx_messageInfo_ImportDataOperationResponse.Size(m) +} +func (m *ImportDataOperationResponse) XXX_DiscardUnknown() { + xxx_messageInfo_ImportDataOperationResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_ImportDataOperationResponse proto.InternalMessageInfo + +func (m *ImportDataOperationResponse) GetDataset() string { + if m != nil { + return m.Dataset + } + return "" +} + +func (m *ImportDataOperationResponse) GetTotalCount() int32 { + if m != nil { + return m.TotalCount + } + return 0 +} + +func (m *ImportDataOperationResponse) GetImportCount() int32 { + if m != nil { + return m.ImportCount + } + return 0 +} + +// Response used for ExportDataset longrunning operation. +type ExportDataOperationResponse struct { + // Ouptut only. The name of dataset. + // "projects/*/datasets/*/Datasets/*" + Dataset string `protobuf:"bytes,1,opt,name=dataset,proto3" json:"dataset,omitempty"` + // Output only. Total number of examples requested to export + TotalCount int32 `protobuf:"varint,2,opt,name=total_count,json=totalCount,proto3" json:"total_count,omitempty"` + // Output only. Number of examples exported successfully. + ExportCount int32 `protobuf:"varint,3,opt,name=export_count,json=exportCount,proto3" json:"export_count,omitempty"` + // Output only. Statistic infos of labels in the exported dataset. + LabelStats *LabelStats `protobuf:"bytes,4,opt,name=label_stats,json=labelStats,proto3" json:"label_stats,omitempty"` + // Output only. output_config in the ExportData request. + OutputConfig *OutputConfig `protobuf:"bytes,5,opt,name=output_config,json=outputConfig,proto3" json:"output_config,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ExportDataOperationResponse) Reset() { *m = ExportDataOperationResponse{} } +func (m *ExportDataOperationResponse) String() string { return proto.CompactTextString(m) } +func (*ExportDataOperationResponse) ProtoMessage() {} +func (*ExportDataOperationResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_operations_f915b1b50117c3fa, []int{1} +} +func (m *ExportDataOperationResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ExportDataOperationResponse.Unmarshal(m, b) +} +func (m *ExportDataOperationResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ExportDataOperationResponse.Marshal(b, m, deterministic) +} +func (dst *ExportDataOperationResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_ExportDataOperationResponse.Merge(dst, src) +} +func (m *ExportDataOperationResponse) XXX_Size() int { + return xxx_messageInfo_ExportDataOperationResponse.Size(m) +} +func (m *ExportDataOperationResponse) XXX_DiscardUnknown() { + xxx_messageInfo_ExportDataOperationResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_ExportDataOperationResponse proto.InternalMessageInfo + +func (m *ExportDataOperationResponse) GetDataset() string { + if m != nil { + return m.Dataset + } + return "" +} + +func (m *ExportDataOperationResponse) GetTotalCount() int32 { + if m != nil { + return m.TotalCount + } + return 0 +} + +func (m *ExportDataOperationResponse) GetExportCount() int32 { + if m != nil { + return m.ExportCount + } + return 0 +} + +func (m *ExportDataOperationResponse) GetLabelStats() *LabelStats { + if m != nil { + return m.LabelStats + } + return nil +} + +func (m *ExportDataOperationResponse) GetOutputConfig() *OutputConfig { + if m != nil { + return m.OutputConfig + } + return nil +} + +// Metadata of an ImportData operation. +type ImportDataOperationMetadata struct { + // Ouptut only. The name of imported dataset. + // "projects/*/datasets/*" + Dataset string `protobuf:"bytes,1,opt,name=dataset,proto3" json:"dataset,omitempty"` + // Output only. Partial failures encountered. + // E.g. single files that couldn't be read. + // Status details field will contain standard GCP error details. + PartialFailures []*status.Status `protobuf:"bytes,2,rep,name=partial_failures,json=partialFailures,proto3" json:"partial_failures,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ImportDataOperationMetadata) Reset() { *m = ImportDataOperationMetadata{} } +func (m *ImportDataOperationMetadata) String() string { return proto.CompactTextString(m) } +func (*ImportDataOperationMetadata) ProtoMessage() {} +func (*ImportDataOperationMetadata) Descriptor() ([]byte, []int) { + return fileDescriptor_operations_f915b1b50117c3fa, []int{2} +} +func (m *ImportDataOperationMetadata) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ImportDataOperationMetadata.Unmarshal(m, b) +} +func (m *ImportDataOperationMetadata) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ImportDataOperationMetadata.Marshal(b, m, deterministic) +} +func (dst *ImportDataOperationMetadata) XXX_Merge(src proto.Message) { + xxx_messageInfo_ImportDataOperationMetadata.Merge(dst, src) +} +func (m *ImportDataOperationMetadata) XXX_Size() int { + return xxx_messageInfo_ImportDataOperationMetadata.Size(m) +} +func (m *ImportDataOperationMetadata) XXX_DiscardUnknown() { + xxx_messageInfo_ImportDataOperationMetadata.DiscardUnknown(m) +} + +var xxx_messageInfo_ImportDataOperationMetadata proto.InternalMessageInfo + +func (m *ImportDataOperationMetadata) GetDataset() string { + if m != nil { + return m.Dataset + } + return "" +} + +func (m *ImportDataOperationMetadata) GetPartialFailures() []*status.Status { + if m != nil { + return m.PartialFailures + } + return nil +} + +// Metadata of an ExportData operation. +type ExportDataOperationMetadata struct { + // Output only. The name of dataset to be exported. + // "projects/*/datasets/*/Datasets/*" + Dataset string `protobuf:"bytes,1,opt,name=dataset,proto3" json:"dataset,omitempty"` + // Output only. Partial failures encountered. + // E.g. single files that couldn't be read. + // Status details field will contain standard GCP error details. + PartialFailures []*status.Status `protobuf:"bytes,2,rep,name=partial_failures,json=partialFailures,proto3" json:"partial_failures,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ExportDataOperationMetadata) Reset() { *m = ExportDataOperationMetadata{} } +func (m *ExportDataOperationMetadata) String() string { return proto.CompactTextString(m) } +func (*ExportDataOperationMetadata) ProtoMessage() {} +func (*ExportDataOperationMetadata) Descriptor() ([]byte, []int) { + return fileDescriptor_operations_f915b1b50117c3fa, []int{3} +} +func (m *ExportDataOperationMetadata) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ExportDataOperationMetadata.Unmarshal(m, b) +} +func (m *ExportDataOperationMetadata) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ExportDataOperationMetadata.Marshal(b, m, deterministic) +} +func (dst *ExportDataOperationMetadata) XXX_Merge(src proto.Message) { + xxx_messageInfo_ExportDataOperationMetadata.Merge(dst, src) +} +func (m *ExportDataOperationMetadata) XXX_Size() int { + return xxx_messageInfo_ExportDataOperationMetadata.Size(m) +} +func (m *ExportDataOperationMetadata) XXX_DiscardUnknown() { + xxx_messageInfo_ExportDataOperationMetadata.DiscardUnknown(m) +} + +var xxx_messageInfo_ExportDataOperationMetadata proto.InternalMessageInfo + +func (m *ExportDataOperationMetadata) GetDataset() string { + if m != nil { + return m.Dataset + } + return "" +} + +func (m *ExportDataOperationMetadata) GetPartialFailures() []*status.Status { + if m != nil { + return m.PartialFailures + } + return nil +} + +// Metadata of a labeling operation, such as LabelImage or LabelVideo. +// Next tag: 16 +type LabelOperationMetadata struct { + // Output only. Progress of label operation. Range: [0, 100]. + // Currently not supported. + ProgressPercent int32 `protobuf:"varint,1,opt,name=progress_percent,json=progressPercent,proto3" json:"progress_percent,omitempty"` + // Output only. Partial failures encountered. + // E.g. single files that couldn't be read. + // Status details field will contain standard GCP error details. + PartialFailures []*status.Status `protobuf:"bytes,2,rep,name=partial_failures,json=partialFailures,proto3" json:"partial_failures,omitempty"` + // Ouptut only. Details of specific label operation. + // + // Types that are valid to be assigned to Details: + // *LabelOperationMetadata_ImageClassificationDetails + // *LabelOperationMetadata_ImageBoundingBoxDetails + // *LabelOperationMetadata_ImageBoundingPolyDetails + // *LabelOperationMetadata_ImageOrientedBoundingBoxDetails + // *LabelOperationMetadata_ImagePolylineDetails + // *LabelOperationMetadata_ImageSegmentationDetails + // *LabelOperationMetadata_VideoClassificationDetails + // *LabelOperationMetadata_VideoObjectDetectionDetails + // *LabelOperationMetadata_VideoObjectTrackingDetails + // *LabelOperationMetadata_VideoEventDetails + // *LabelOperationMetadata_TextClassificationDetails + // *LabelOperationMetadata_AudioTranscriptionDetails + // *LabelOperationMetadata_TextEntityExtractionDetails + Details isLabelOperationMetadata_Details `protobuf_oneof:"details"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *LabelOperationMetadata) Reset() { *m = LabelOperationMetadata{} } +func (m *LabelOperationMetadata) String() string { return proto.CompactTextString(m) } +func (*LabelOperationMetadata) ProtoMessage() {} +func (*LabelOperationMetadata) Descriptor() ([]byte, []int) { + return fileDescriptor_operations_f915b1b50117c3fa, []int{4} +} +func (m *LabelOperationMetadata) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_LabelOperationMetadata.Unmarshal(m, b) +} +func (m *LabelOperationMetadata) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_LabelOperationMetadata.Marshal(b, m, deterministic) +} +func (dst *LabelOperationMetadata) XXX_Merge(src proto.Message) { + xxx_messageInfo_LabelOperationMetadata.Merge(dst, src) +} +func (m *LabelOperationMetadata) XXX_Size() int { + return xxx_messageInfo_LabelOperationMetadata.Size(m) +} +func (m *LabelOperationMetadata) XXX_DiscardUnknown() { + xxx_messageInfo_LabelOperationMetadata.DiscardUnknown(m) +} + +var xxx_messageInfo_LabelOperationMetadata proto.InternalMessageInfo + +func (m *LabelOperationMetadata) GetProgressPercent() int32 { + if m != nil { + return m.ProgressPercent + } + return 0 +} + +func (m *LabelOperationMetadata) GetPartialFailures() []*status.Status { + if m != nil { + return m.PartialFailures + } + return nil +} + +type isLabelOperationMetadata_Details interface { + isLabelOperationMetadata_Details() +} + +type LabelOperationMetadata_ImageClassificationDetails struct { + ImageClassificationDetails *LabelImageClassificationOperationMetadata `protobuf:"bytes,3,opt,name=image_classification_details,json=imageClassificationDetails,proto3,oneof"` +} + +type LabelOperationMetadata_ImageBoundingBoxDetails struct { + ImageBoundingBoxDetails *LabelImageBoundingBoxOperationMetadata `protobuf:"bytes,4,opt,name=image_bounding_box_details,json=imageBoundingBoxDetails,proto3,oneof"` +} + +type LabelOperationMetadata_ImageBoundingPolyDetails struct { + ImageBoundingPolyDetails *LabelImageBoundingPolyOperationMetadata `protobuf:"bytes,11,opt,name=image_bounding_poly_details,json=imageBoundingPolyDetails,proto3,oneof"` +} + +type LabelOperationMetadata_ImageOrientedBoundingBoxDetails struct { + ImageOrientedBoundingBoxDetails *LabelImageOrientedBoundingBoxOperationMetadata `protobuf:"bytes,14,opt,name=image_oriented_bounding_box_details,json=imageOrientedBoundingBoxDetails,proto3,oneof"` +} + +type LabelOperationMetadata_ImagePolylineDetails struct { + ImagePolylineDetails *LabelImagePolylineOperationMetadata `protobuf:"bytes,12,opt,name=image_polyline_details,json=imagePolylineDetails,proto3,oneof"` +} + +type LabelOperationMetadata_ImageSegmentationDetails struct { + ImageSegmentationDetails *LabelImageSegmentationOperationMetadata `protobuf:"bytes,15,opt,name=image_segmentation_details,json=imageSegmentationDetails,proto3,oneof"` +} + +type LabelOperationMetadata_VideoClassificationDetails struct { + VideoClassificationDetails *LabelVideoClassificationOperationMetadata `protobuf:"bytes,5,opt,name=video_classification_details,json=videoClassificationDetails,proto3,oneof"` +} + +type LabelOperationMetadata_VideoObjectDetectionDetails struct { + VideoObjectDetectionDetails *LabelVideoObjectDetectionOperationMetadata `protobuf:"bytes,6,opt,name=video_object_detection_details,json=videoObjectDetectionDetails,proto3,oneof"` +} + +type LabelOperationMetadata_VideoObjectTrackingDetails struct { + VideoObjectTrackingDetails *LabelVideoObjectTrackingOperationMetadata `protobuf:"bytes,7,opt,name=video_object_tracking_details,json=videoObjectTrackingDetails,proto3,oneof"` +} + +type LabelOperationMetadata_VideoEventDetails struct { + VideoEventDetails *LabelVideoEventOperationMetadata `protobuf:"bytes,8,opt,name=video_event_details,json=videoEventDetails,proto3,oneof"` +} + +type LabelOperationMetadata_TextClassificationDetails struct { + TextClassificationDetails *LabelTextClassificationOperationMetadata `protobuf:"bytes,9,opt,name=text_classification_details,json=textClassificationDetails,proto3,oneof"` +} + +type LabelOperationMetadata_AudioTranscriptionDetails struct { + AudioTranscriptionDetails *LabelAudioTranscriptionOperationMetadata `protobuf:"bytes,10,opt,name=audio_transcription_details,json=audioTranscriptionDetails,proto3,oneof"` +} + +type LabelOperationMetadata_TextEntityExtractionDetails struct { + TextEntityExtractionDetails *LabelTextEntityExtractionOperationMetadata `protobuf:"bytes,13,opt,name=text_entity_extraction_details,json=textEntityExtractionDetails,proto3,oneof"` +} + +func (*LabelOperationMetadata_ImageClassificationDetails) isLabelOperationMetadata_Details() {} + +func (*LabelOperationMetadata_ImageBoundingBoxDetails) isLabelOperationMetadata_Details() {} + +func (*LabelOperationMetadata_ImageBoundingPolyDetails) isLabelOperationMetadata_Details() {} + +func (*LabelOperationMetadata_ImageOrientedBoundingBoxDetails) isLabelOperationMetadata_Details() {} + +func (*LabelOperationMetadata_ImagePolylineDetails) isLabelOperationMetadata_Details() {} + +func (*LabelOperationMetadata_ImageSegmentationDetails) isLabelOperationMetadata_Details() {} + +func (*LabelOperationMetadata_VideoClassificationDetails) isLabelOperationMetadata_Details() {} + +func (*LabelOperationMetadata_VideoObjectDetectionDetails) isLabelOperationMetadata_Details() {} + +func (*LabelOperationMetadata_VideoObjectTrackingDetails) isLabelOperationMetadata_Details() {} + +func (*LabelOperationMetadata_VideoEventDetails) isLabelOperationMetadata_Details() {} + +func (*LabelOperationMetadata_TextClassificationDetails) isLabelOperationMetadata_Details() {} + +func (*LabelOperationMetadata_AudioTranscriptionDetails) isLabelOperationMetadata_Details() {} + +func (*LabelOperationMetadata_TextEntityExtractionDetails) isLabelOperationMetadata_Details() {} + +func (m *LabelOperationMetadata) GetDetails() isLabelOperationMetadata_Details { + if m != nil { + return m.Details + } + return nil +} + +func (m *LabelOperationMetadata) GetImageClassificationDetails() *LabelImageClassificationOperationMetadata { + if x, ok := m.GetDetails().(*LabelOperationMetadata_ImageClassificationDetails); ok { + return x.ImageClassificationDetails + } + return nil +} + +func (m *LabelOperationMetadata) GetImageBoundingBoxDetails() *LabelImageBoundingBoxOperationMetadata { + if x, ok := m.GetDetails().(*LabelOperationMetadata_ImageBoundingBoxDetails); ok { + return x.ImageBoundingBoxDetails + } + return nil +} + +func (m *LabelOperationMetadata) GetImageBoundingPolyDetails() *LabelImageBoundingPolyOperationMetadata { + if x, ok := m.GetDetails().(*LabelOperationMetadata_ImageBoundingPolyDetails); ok { + return x.ImageBoundingPolyDetails + } + return nil +} + +func (m *LabelOperationMetadata) GetImageOrientedBoundingBoxDetails() *LabelImageOrientedBoundingBoxOperationMetadata { + if x, ok := m.GetDetails().(*LabelOperationMetadata_ImageOrientedBoundingBoxDetails); ok { + return x.ImageOrientedBoundingBoxDetails + } + return nil +} + +func (m *LabelOperationMetadata) GetImagePolylineDetails() *LabelImagePolylineOperationMetadata { + if x, ok := m.GetDetails().(*LabelOperationMetadata_ImagePolylineDetails); ok { + return x.ImagePolylineDetails + } + return nil +} + +func (m *LabelOperationMetadata) GetImageSegmentationDetails() *LabelImageSegmentationOperationMetadata { + if x, ok := m.GetDetails().(*LabelOperationMetadata_ImageSegmentationDetails); ok { + return x.ImageSegmentationDetails + } + return nil +} + +func (m *LabelOperationMetadata) GetVideoClassificationDetails() *LabelVideoClassificationOperationMetadata { + if x, ok := m.GetDetails().(*LabelOperationMetadata_VideoClassificationDetails); ok { + return x.VideoClassificationDetails + } + return nil +} + +func (m *LabelOperationMetadata) GetVideoObjectDetectionDetails() *LabelVideoObjectDetectionOperationMetadata { + if x, ok := m.GetDetails().(*LabelOperationMetadata_VideoObjectDetectionDetails); ok { + return x.VideoObjectDetectionDetails + } + return nil +} + +func (m *LabelOperationMetadata) GetVideoObjectTrackingDetails() *LabelVideoObjectTrackingOperationMetadata { + if x, ok := m.GetDetails().(*LabelOperationMetadata_VideoObjectTrackingDetails); ok { + return x.VideoObjectTrackingDetails + } + return nil +} + +func (m *LabelOperationMetadata) GetVideoEventDetails() *LabelVideoEventOperationMetadata { + if x, ok := m.GetDetails().(*LabelOperationMetadata_VideoEventDetails); ok { + return x.VideoEventDetails + } + return nil +} + +func (m *LabelOperationMetadata) GetTextClassificationDetails() *LabelTextClassificationOperationMetadata { + if x, ok := m.GetDetails().(*LabelOperationMetadata_TextClassificationDetails); ok { + return x.TextClassificationDetails + } + return nil +} + +func (m *LabelOperationMetadata) GetAudioTranscriptionDetails() *LabelAudioTranscriptionOperationMetadata { + if x, ok := m.GetDetails().(*LabelOperationMetadata_AudioTranscriptionDetails); ok { + return x.AudioTranscriptionDetails + } + return nil +} + +func (m *LabelOperationMetadata) GetTextEntityExtractionDetails() *LabelTextEntityExtractionOperationMetadata { + if x, ok := m.GetDetails().(*LabelOperationMetadata_TextEntityExtractionDetails); ok { + return x.TextEntityExtractionDetails + } + return nil +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*LabelOperationMetadata) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _LabelOperationMetadata_OneofMarshaler, _LabelOperationMetadata_OneofUnmarshaler, _LabelOperationMetadata_OneofSizer, []interface{}{ + (*LabelOperationMetadata_ImageClassificationDetails)(nil), + (*LabelOperationMetadata_ImageBoundingBoxDetails)(nil), + (*LabelOperationMetadata_ImageBoundingPolyDetails)(nil), + (*LabelOperationMetadata_ImageOrientedBoundingBoxDetails)(nil), + (*LabelOperationMetadata_ImagePolylineDetails)(nil), + (*LabelOperationMetadata_ImageSegmentationDetails)(nil), + (*LabelOperationMetadata_VideoClassificationDetails)(nil), + (*LabelOperationMetadata_VideoObjectDetectionDetails)(nil), + (*LabelOperationMetadata_VideoObjectTrackingDetails)(nil), + (*LabelOperationMetadata_VideoEventDetails)(nil), + (*LabelOperationMetadata_TextClassificationDetails)(nil), + (*LabelOperationMetadata_AudioTranscriptionDetails)(nil), + (*LabelOperationMetadata_TextEntityExtractionDetails)(nil), + } +} + +func _LabelOperationMetadata_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*LabelOperationMetadata) + // details + switch x := m.Details.(type) { + case *LabelOperationMetadata_ImageClassificationDetails: + b.EncodeVarint(3<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.ImageClassificationDetails); err != nil { + return err + } + case *LabelOperationMetadata_ImageBoundingBoxDetails: + b.EncodeVarint(4<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.ImageBoundingBoxDetails); err != nil { + return err + } + case *LabelOperationMetadata_ImageBoundingPolyDetails: + b.EncodeVarint(11<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.ImageBoundingPolyDetails); err != nil { + return err + } + case *LabelOperationMetadata_ImageOrientedBoundingBoxDetails: + b.EncodeVarint(14<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.ImageOrientedBoundingBoxDetails); err != nil { + return err + } + case *LabelOperationMetadata_ImagePolylineDetails: + b.EncodeVarint(12<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.ImagePolylineDetails); err != nil { + return err + } + case *LabelOperationMetadata_ImageSegmentationDetails: + b.EncodeVarint(15<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.ImageSegmentationDetails); err != nil { + return err + } + case *LabelOperationMetadata_VideoClassificationDetails: + b.EncodeVarint(5<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.VideoClassificationDetails); err != nil { + return err + } + case *LabelOperationMetadata_VideoObjectDetectionDetails: + b.EncodeVarint(6<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.VideoObjectDetectionDetails); err != nil { + return err + } + case *LabelOperationMetadata_VideoObjectTrackingDetails: + b.EncodeVarint(7<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.VideoObjectTrackingDetails); err != nil { + return err + } + case *LabelOperationMetadata_VideoEventDetails: + b.EncodeVarint(8<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.VideoEventDetails); err != nil { + return err + } + case *LabelOperationMetadata_TextClassificationDetails: + b.EncodeVarint(9<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.TextClassificationDetails); err != nil { + return err + } + case *LabelOperationMetadata_AudioTranscriptionDetails: + b.EncodeVarint(10<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.AudioTranscriptionDetails); err != nil { + return err + } + case *LabelOperationMetadata_TextEntityExtractionDetails: + b.EncodeVarint(13<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.TextEntityExtractionDetails); err != nil { + return err + } + case nil: + default: + return fmt.Errorf("LabelOperationMetadata.Details has unexpected type %T", x) + } + return nil +} + +func _LabelOperationMetadata_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*LabelOperationMetadata) + switch tag { + case 3: // details.image_classification_details + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(LabelImageClassificationOperationMetadata) + err := b.DecodeMessage(msg) + m.Details = &LabelOperationMetadata_ImageClassificationDetails{msg} + return true, err + case 4: // details.image_bounding_box_details + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(LabelImageBoundingBoxOperationMetadata) + err := b.DecodeMessage(msg) + m.Details = &LabelOperationMetadata_ImageBoundingBoxDetails{msg} + return true, err + case 11: // details.image_bounding_poly_details + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(LabelImageBoundingPolyOperationMetadata) + err := b.DecodeMessage(msg) + m.Details = &LabelOperationMetadata_ImageBoundingPolyDetails{msg} + return true, err + case 14: // details.image_oriented_bounding_box_details + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(LabelImageOrientedBoundingBoxOperationMetadata) + err := b.DecodeMessage(msg) + m.Details = &LabelOperationMetadata_ImageOrientedBoundingBoxDetails{msg} + return true, err + case 12: // details.image_polyline_details + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(LabelImagePolylineOperationMetadata) + err := b.DecodeMessage(msg) + m.Details = &LabelOperationMetadata_ImagePolylineDetails{msg} + return true, err + case 15: // details.image_segmentation_details + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(LabelImageSegmentationOperationMetadata) + err := b.DecodeMessage(msg) + m.Details = &LabelOperationMetadata_ImageSegmentationDetails{msg} + return true, err + case 5: // details.video_classification_details + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(LabelVideoClassificationOperationMetadata) + err := b.DecodeMessage(msg) + m.Details = &LabelOperationMetadata_VideoClassificationDetails{msg} + return true, err + case 6: // details.video_object_detection_details + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(LabelVideoObjectDetectionOperationMetadata) + err := b.DecodeMessage(msg) + m.Details = &LabelOperationMetadata_VideoObjectDetectionDetails{msg} + return true, err + case 7: // details.video_object_tracking_details + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(LabelVideoObjectTrackingOperationMetadata) + err := b.DecodeMessage(msg) + m.Details = &LabelOperationMetadata_VideoObjectTrackingDetails{msg} + return true, err + case 8: // details.video_event_details + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(LabelVideoEventOperationMetadata) + err := b.DecodeMessage(msg) + m.Details = &LabelOperationMetadata_VideoEventDetails{msg} + return true, err + case 9: // details.text_classification_details + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(LabelTextClassificationOperationMetadata) + err := b.DecodeMessage(msg) + m.Details = &LabelOperationMetadata_TextClassificationDetails{msg} + return true, err + case 10: // details.audio_transcription_details + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(LabelAudioTranscriptionOperationMetadata) + err := b.DecodeMessage(msg) + m.Details = &LabelOperationMetadata_AudioTranscriptionDetails{msg} + return true, err + case 13: // details.text_entity_extraction_details + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(LabelTextEntityExtractionOperationMetadata) + err := b.DecodeMessage(msg) + m.Details = &LabelOperationMetadata_TextEntityExtractionDetails{msg} + return true, err + default: + return false, nil + } +} + +func _LabelOperationMetadata_OneofSizer(msg proto.Message) (n int) { + m := msg.(*LabelOperationMetadata) + // details + switch x := m.Details.(type) { + case *LabelOperationMetadata_ImageClassificationDetails: + s := proto.Size(x.ImageClassificationDetails) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *LabelOperationMetadata_ImageBoundingBoxDetails: + s := proto.Size(x.ImageBoundingBoxDetails) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *LabelOperationMetadata_ImageBoundingPolyDetails: + s := proto.Size(x.ImageBoundingPolyDetails) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *LabelOperationMetadata_ImageOrientedBoundingBoxDetails: + s := proto.Size(x.ImageOrientedBoundingBoxDetails) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *LabelOperationMetadata_ImagePolylineDetails: + s := proto.Size(x.ImagePolylineDetails) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *LabelOperationMetadata_ImageSegmentationDetails: + s := proto.Size(x.ImageSegmentationDetails) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *LabelOperationMetadata_VideoClassificationDetails: + s := proto.Size(x.VideoClassificationDetails) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *LabelOperationMetadata_VideoObjectDetectionDetails: + s := proto.Size(x.VideoObjectDetectionDetails) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *LabelOperationMetadata_VideoObjectTrackingDetails: + s := proto.Size(x.VideoObjectTrackingDetails) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *LabelOperationMetadata_VideoEventDetails: + s := proto.Size(x.VideoEventDetails) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *LabelOperationMetadata_TextClassificationDetails: + s := proto.Size(x.TextClassificationDetails) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *LabelOperationMetadata_AudioTranscriptionDetails: + s := proto.Size(x.AudioTranscriptionDetails) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *LabelOperationMetadata_TextEntityExtractionDetails: + s := proto.Size(x.TextEntityExtractionDetails) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +// Metadata of a LabelImageClassification operation. +type LabelImageClassificationOperationMetadata struct { + // Basic human annotation config used in labeling request. + BasicConfig *HumanAnnotationConfig `protobuf:"bytes,1,opt,name=basic_config,json=basicConfig,proto3" json:"basic_config,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *LabelImageClassificationOperationMetadata) Reset() { + *m = LabelImageClassificationOperationMetadata{} +} +func (m *LabelImageClassificationOperationMetadata) String() string { return proto.CompactTextString(m) } +func (*LabelImageClassificationOperationMetadata) ProtoMessage() {} +func (*LabelImageClassificationOperationMetadata) Descriptor() ([]byte, []int) { + return fileDescriptor_operations_f915b1b50117c3fa, []int{5} +} +func (m *LabelImageClassificationOperationMetadata) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_LabelImageClassificationOperationMetadata.Unmarshal(m, b) +} +func (m *LabelImageClassificationOperationMetadata) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_LabelImageClassificationOperationMetadata.Marshal(b, m, deterministic) +} +func (dst *LabelImageClassificationOperationMetadata) XXX_Merge(src proto.Message) { + xxx_messageInfo_LabelImageClassificationOperationMetadata.Merge(dst, src) +} +func (m *LabelImageClassificationOperationMetadata) XXX_Size() int { + return xxx_messageInfo_LabelImageClassificationOperationMetadata.Size(m) +} +func (m *LabelImageClassificationOperationMetadata) XXX_DiscardUnknown() { + xxx_messageInfo_LabelImageClassificationOperationMetadata.DiscardUnknown(m) +} + +var xxx_messageInfo_LabelImageClassificationOperationMetadata proto.InternalMessageInfo + +func (m *LabelImageClassificationOperationMetadata) GetBasicConfig() *HumanAnnotationConfig { + if m != nil { + return m.BasicConfig + } + return nil +} + +// Details of a LabelImageBoundingBox operation metadata. +type LabelImageBoundingBoxOperationMetadata struct { + // Basic human annotation config used in labeling request. + BasicConfig *HumanAnnotationConfig `protobuf:"bytes,1,opt,name=basic_config,json=basicConfig,proto3" json:"basic_config,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *LabelImageBoundingBoxOperationMetadata) Reset() { + *m = LabelImageBoundingBoxOperationMetadata{} +} +func (m *LabelImageBoundingBoxOperationMetadata) String() string { return proto.CompactTextString(m) } +func (*LabelImageBoundingBoxOperationMetadata) ProtoMessage() {} +func (*LabelImageBoundingBoxOperationMetadata) Descriptor() ([]byte, []int) { + return fileDescriptor_operations_f915b1b50117c3fa, []int{6} +} +func (m *LabelImageBoundingBoxOperationMetadata) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_LabelImageBoundingBoxOperationMetadata.Unmarshal(m, b) +} +func (m *LabelImageBoundingBoxOperationMetadata) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_LabelImageBoundingBoxOperationMetadata.Marshal(b, m, deterministic) +} +func (dst *LabelImageBoundingBoxOperationMetadata) XXX_Merge(src proto.Message) { + xxx_messageInfo_LabelImageBoundingBoxOperationMetadata.Merge(dst, src) +} +func (m *LabelImageBoundingBoxOperationMetadata) XXX_Size() int { + return xxx_messageInfo_LabelImageBoundingBoxOperationMetadata.Size(m) +} +func (m *LabelImageBoundingBoxOperationMetadata) XXX_DiscardUnknown() { + xxx_messageInfo_LabelImageBoundingBoxOperationMetadata.DiscardUnknown(m) +} + +var xxx_messageInfo_LabelImageBoundingBoxOperationMetadata proto.InternalMessageInfo + +func (m *LabelImageBoundingBoxOperationMetadata) GetBasicConfig() *HumanAnnotationConfig { + if m != nil { + return m.BasicConfig + } + return nil +} + +// Details of a LabelImageOrientedBoundingBox operation metadata. +type LabelImageOrientedBoundingBoxOperationMetadata struct { + // Basic human annotation config. + BasicConfig *HumanAnnotationConfig `protobuf:"bytes,1,opt,name=basic_config,json=basicConfig,proto3" json:"basic_config,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *LabelImageOrientedBoundingBoxOperationMetadata) Reset() { + *m = LabelImageOrientedBoundingBoxOperationMetadata{} +} +func (m *LabelImageOrientedBoundingBoxOperationMetadata) String() string { + return proto.CompactTextString(m) +} +func (*LabelImageOrientedBoundingBoxOperationMetadata) ProtoMessage() {} +func (*LabelImageOrientedBoundingBoxOperationMetadata) Descriptor() ([]byte, []int) { + return fileDescriptor_operations_f915b1b50117c3fa, []int{7} +} +func (m *LabelImageOrientedBoundingBoxOperationMetadata) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_LabelImageOrientedBoundingBoxOperationMetadata.Unmarshal(m, b) +} +func (m *LabelImageOrientedBoundingBoxOperationMetadata) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_LabelImageOrientedBoundingBoxOperationMetadata.Marshal(b, m, deterministic) +} +func (dst *LabelImageOrientedBoundingBoxOperationMetadata) XXX_Merge(src proto.Message) { + xxx_messageInfo_LabelImageOrientedBoundingBoxOperationMetadata.Merge(dst, src) +} +func (m *LabelImageOrientedBoundingBoxOperationMetadata) XXX_Size() int { + return xxx_messageInfo_LabelImageOrientedBoundingBoxOperationMetadata.Size(m) +} +func (m *LabelImageOrientedBoundingBoxOperationMetadata) XXX_DiscardUnknown() { + xxx_messageInfo_LabelImageOrientedBoundingBoxOperationMetadata.DiscardUnknown(m) +} + +var xxx_messageInfo_LabelImageOrientedBoundingBoxOperationMetadata proto.InternalMessageInfo + +func (m *LabelImageOrientedBoundingBoxOperationMetadata) GetBasicConfig() *HumanAnnotationConfig { + if m != nil { + return m.BasicConfig + } + return nil +} + +// Details of LabelImageBoundingPoly operation metadata. +type LabelImageBoundingPolyOperationMetadata struct { + // Basic human annotation config used in labeling request. + BasicConfig *HumanAnnotationConfig `protobuf:"bytes,1,opt,name=basic_config,json=basicConfig,proto3" json:"basic_config,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *LabelImageBoundingPolyOperationMetadata) Reset() { + *m = LabelImageBoundingPolyOperationMetadata{} +} +func (m *LabelImageBoundingPolyOperationMetadata) String() string { return proto.CompactTextString(m) } +func (*LabelImageBoundingPolyOperationMetadata) ProtoMessage() {} +func (*LabelImageBoundingPolyOperationMetadata) Descriptor() ([]byte, []int) { + return fileDescriptor_operations_f915b1b50117c3fa, []int{8} +} +func (m *LabelImageBoundingPolyOperationMetadata) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_LabelImageBoundingPolyOperationMetadata.Unmarshal(m, b) +} +func (m *LabelImageBoundingPolyOperationMetadata) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_LabelImageBoundingPolyOperationMetadata.Marshal(b, m, deterministic) +} +func (dst *LabelImageBoundingPolyOperationMetadata) XXX_Merge(src proto.Message) { + xxx_messageInfo_LabelImageBoundingPolyOperationMetadata.Merge(dst, src) +} +func (m *LabelImageBoundingPolyOperationMetadata) XXX_Size() int { + return xxx_messageInfo_LabelImageBoundingPolyOperationMetadata.Size(m) +} +func (m *LabelImageBoundingPolyOperationMetadata) XXX_DiscardUnknown() { + xxx_messageInfo_LabelImageBoundingPolyOperationMetadata.DiscardUnknown(m) +} + +var xxx_messageInfo_LabelImageBoundingPolyOperationMetadata proto.InternalMessageInfo + +func (m *LabelImageBoundingPolyOperationMetadata) GetBasicConfig() *HumanAnnotationConfig { + if m != nil { + return m.BasicConfig + } + return nil +} + +// Details of LabelImagePolyline operation metadata. +type LabelImagePolylineOperationMetadata struct { + // Basic human annotation config used in labeling request. + BasicConfig *HumanAnnotationConfig `protobuf:"bytes,1,opt,name=basic_config,json=basicConfig,proto3" json:"basic_config,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *LabelImagePolylineOperationMetadata) Reset() { *m = LabelImagePolylineOperationMetadata{} } +func (m *LabelImagePolylineOperationMetadata) String() string { return proto.CompactTextString(m) } +func (*LabelImagePolylineOperationMetadata) ProtoMessage() {} +func (*LabelImagePolylineOperationMetadata) Descriptor() ([]byte, []int) { + return fileDescriptor_operations_f915b1b50117c3fa, []int{9} +} +func (m *LabelImagePolylineOperationMetadata) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_LabelImagePolylineOperationMetadata.Unmarshal(m, b) +} +func (m *LabelImagePolylineOperationMetadata) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_LabelImagePolylineOperationMetadata.Marshal(b, m, deterministic) +} +func (dst *LabelImagePolylineOperationMetadata) XXX_Merge(src proto.Message) { + xxx_messageInfo_LabelImagePolylineOperationMetadata.Merge(dst, src) +} +func (m *LabelImagePolylineOperationMetadata) XXX_Size() int { + return xxx_messageInfo_LabelImagePolylineOperationMetadata.Size(m) +} +func (m *LabelImagePolylineOperationMetadata) XXX_DiscardUnknown() { + xxx_messageInfo_LabelImagePolylineOperationMetadata.DiscardUnknown(m) +} + +var xxx_messageInfo_LabelImagePolylineOperationMetadata proto.InternalMessageInfo + +func (m *LabelImagePolylineOperationMetadata) GetBasicConfig() *HumanAnnotationConfig { + if m != nil { + return m.BasicConfig + } + return nil +} + +// Details of a LabelImageSegmentation operation metadata. +type LabelImageSegmentationOperationMetadata struct { + // Basic human annotation config. + BasicConfig *HumanAnnotationConfig `protobuf:"bytes,1,opt,name=basic_config,json=basicConfig,proto3" json:"basic_config,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *LabelImageSegmentationOperationMetadata) Reset() { + *m = LabelImageSegmentationOperationMetadata{} +} +func (m *LabelImageSegmentationOperationMetadata) String() string { return proto.CompactTextString(m) } +func (*LabelImageSegmentationOperationMetadata) ProtoMessage() {} +func (*LabelImageSegmentationOperationMetadata) Descriptor() ([]byte, []int) { + return fileDescriptor_operations_f915b1b50117c3fa, []int{10} +} +func (m *LabelImageSegmentationOperationMetadata) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_LabelImageSegmentationOperationMetadata.Unmarshal(m, b) +} +func (m *LabelImageSegmentationOperationMetadata) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_LabelImageSegmentationOperationMetadata.Marshal(b, m, deterministic) +} +func (dst *LabelImageSegmentationOperationMetadata) XXX_Merge(src proto.Message) { + xxx_messageInfo_LabelImageSegmentationOperationMetadata.Merge(dst, src) +} +func (m *LabelImageSegmentationOperationMetadata) XXX_Size() int { + return xxx_messageInfo_LabelImageSegmentationOperationMetadata.Size(m) +} +func (m *LabelImageSegmentationOperationMetadata) XXX_DiscardUnknown() { + xxx_messageInfo_LabelImageSegmentationOperationMetadata.DiscardUnknown(m) +} + +var xxx_messageInfo_LabelImageSegmentationOperationMetadata proto.InternalMessageInfo + +func (m *LabelImageSegmentationOperationMetadata) GetBasicConfig() *HumanAnnotationConfig { + if m != nil { + return m.BasicConfig + } + return nil +} + +// Details of a LabelVideoClassification operation metadata. +type LabelVideoClassificationOperationMetadata struct { + // Basic human annotation config used in labeling request. + BasicConfig *HumanAnnotationConfig `protobuf:"bytes,1,opt,name=basic_config,json=basicConfig,proto3" json:"basic_config,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *LabelVideoClassificationOperationMetadata) Reset() { + *m = LabelVideoClassificationOperationMetadata{} +} +func (m *LabelVideoClassificationOperationMetadata) String() string { return proto.CompactTextString(m) } +func (*LabelVideoClassificationOperationMetadata) ProtoMessage() {} +func (*LabelVideoClassificationOperationMetadata) Descriptor() ([]byte, []int) { + return fileDescriptor_operations_f915b1b50117c3fa, []int{11} +} +func (m *LabelVideoClassificationOperationMetadata) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_LabelVideoClassificationOperationMetadata.Unmarshal(m, b) +} +func (m *LabelVideoClassificationOperationMetadata) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_LabelVideoClassificationOperationMetadata.Marshal(b, m, deterministic) +} +func (dst *LabelVideoClassificationOperationMetadata) XXX_Merge(src proto.Message) { + xxx_messageInfo_LabelVideoClassificationOperationMetadata.Merge(dst, src) +} +func (m *LabelVideoClassificationOperationMetadata) XXX_Size() int { + return xxx_messageInfo_LabelVideoClassificationOperationMetadata.Size(m) +} +func (m *LabelVideoClassificationOperationMetadata) XXX_DiscardUnknown() { + xxx_messageInfo_LabelVideoClassificationOperationMetadata.DiscardUnknown(m) +} + +var xxx_messageInfo_LabelVideoClassificationOperationMetadata proto.InternalMessageInfo + +func (m *LabelVideoClassificationOperationMetadata) GetBasicConfig() *HumanAnnotationConfig { + if m != nil { + return m.BasicConfig + } + return nil +} + +// Details of a LabelVideoObjectDetection operation metadata. +type LabelVideoObjectDetectionOperationMetadata struct { + // Basic human annotation config used in labeling request. + BasicConfig *HumanAnnotationConfig `protobuf:"bytes,1,opt,name=basic_config,json=basicConfig,proto3" json:"basic_config,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *LabelVideoObjectDetectionOperationMetadata) Reset() { + *m = LabelVideoObjectDetectionOperationMetadata{} +} +func (m *LabelVideoObjectDetectionOperationMetadata) String() string { + return proto.CompactTextString(m) +} +func (*LabelVideoObjectDetectionOperationMetadata) ProtoMessage() {} +func (*LabelVideoObjectDetectionOperationMetadata) Descriptor() ([]byte, []int) { + return fileDescriptor_operations_f915b1b50117c3fa, []int{12} +} +func (m *LabelVideoObjectDetectionOperationMetadata) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_LabelVideoObjectDetectionOperationMetadata.Unmarshal(m, b) +} +func (m *LabelVideoObjectDetectionOperationMetadata) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_LabelVideoObjectDetectionOperationMetadata.Marshal(b, m, deterministic) +} +func (dst *LabelVideoObjectDetectionOperationMetadata) XXX_Merge(src proto.Message) { + xxx_messageInfo_LabelVideoObjectDetectionOperationMetadata.Merge(dst, src) +} +func (m *LabelVideoObjectDetectionOperationMetadata) XXX_Size() int { + return xxx_messageInfo_LabelVideoObjectDetectionOperationMetadata.Size(m) +} +func (m *LabelVideoObjectDetectionOperationMetadata) XXX_DiscardUnknown() { + xxx_messageInfo_LabelVideoObjectDetectionOperationMetadata.DiscardUnknown(m) +} + +var xxx_messageInfo_LabelVideoObjectDetectionOperationMetadata proto.InternalMessageInfo + +func (m *LabelVideoObjectDetectionOperationMetadata) GetBasicConfig() *HumanAnnotationConfig { + if m != nil { + return m.BasicConfig + } + return nil +} + +// Details of a LabelVideoObjectTracking operation metadata. +type LabelVideoObjectTrackingOperationMetadata struct { + // Basic human annotation config used in labeling request. + BasicConfig *HumanAnnotationConfig `protobuf:"bytes,1,opt,name=basic_config,json=basicConfig,proto3" json:"basic_config,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *LabelVideoObjectTrackingOperationMetadata) Reset() { + *m = LabelVideoObjectTrackingOperationMetadata{} +} +func (m *LabelVideoObjectTrackingOperationMetadata) String() string { return proto.CompactTextString(m) } +func (*LabelVideoObjectTrackingOperationMetadata) ProtoMessage() {} +func (*LabelVideoObjectTrackingOperationMetadata) Descriptor() ([]byte, []int) { + return fileDescriptor_operations_f915b1b50117c3fa, []int{13} +} +func (m *LabelVideoObjectTrackingOperationMetadata) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_LabelVideoObjectTrackingOperationMetadata.Unmarshal(m, b) +} +func (m *LabelVideoObjectTrackingOperationMetadata) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_LabelVideoObjectTrackingOperationMetadata.Marshal(b, m, deterministic) +} +func (dst *LabelVideoObjectTrackingOperationMetadata) XXX_Merge(src proto.Message) { + xxx_messageInfo_LabelVideoObjectTrackingOperationMetadata.Merge(dst, src) +} +func (m *LabelVideoObjectTrackingOperationMetadata) XXX_Size() int { + return xxx_messageInfo_LabelVideoObjectTrackingOperationMetadata.Size(m) +} +func (m *LabelVideoObjectTrackingOperationMetadata) XXX_DiscardUnknown() { + xxx_messageInfo_LabelVideoObjectTrackingOperationMetadata.DiscardUnknown(m) +} + +var xxx_messageInfo_LabelVideoObjectTrackingOperationMetadata proto.InternalMessageInfo + +func (m *LabelVideoObjectTrackingOperationMetadata) GetBasicConfig() *HumanAnnotationConfig { + if m != nil { + return m.BasicConfig + } + return nil +} + +// Details of a LabelVideoEvent operation metadata. +type LabelVideoEventOperationMetadata struct { + // Basic human annotation config used in labeling request. + BasicConfig *HumanAnnotationConfig `protobuf:"bytes,1,opt,name=basic_config,json=basicConfig,proto3" json:"basic_config,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *LabelVideoEventOperationMetadata) Reset() { *m = LabelVideoEventOperationMetadata{} } +func (m *LabelVideoEventOperationMetadata) String() string { return proto.CompactTextString(m) } +func (*LabelVideoEventOperationMetadata) ProtoMessage() {} +func (*LabelVideoEventOperationMetadata) Descriptor() ([]byte, []int) { + return fileDescriptor_operations_f915b1b50117c3fa, []int{14} +} +func (m *LabelVideoEventOperationMetadata) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_LabelVideoEventOperationMetadata.Unmarshal(m, b) +} +func (m *LabelVideoEventOperationMetadata) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_LabelVideoEventOperationMetadata.Marshal(b, m, deterministic) +} +func (dst *LabelVideoEventOperationMetadata) XXX_Merge(src proto.Message) { + xxx_messageInfo_LabelVideoEventOperationMetadata.Merge(dst, src) +} +func (m *LabelVideoEventOperationMetadata) XXX_Size() int { + return xxx_messageInfo_LabelVideoEventOperationMetadata.Size(m) +} +func (m *LabelVideoEventOperationMetadata) XXX_DiscardUnknown() { + xxx_messageInfo_LabelVideoEventOperationMetadata.DiscardUnknown(m) +} + +var xxx_messageInfo_LabelVideoEventOperationMetadata proto.InternalMessageInfo + +func (m *LabelVideoEventOperationMetadata) GetBasicConfig() *HumanAnnotationConfig { + if m != nil { + return m.BasicConfig + } + return nil +} + +// Details of a LabelTextClassification operation metadata. +type LabelTextClassificationOperationMetadata struct { + // Basic human annotation config used in labeling request. + BasicConfig *HumanAnnotationConfig `protobuf:"bytes,1,opt,name=basic_config,json=basicConfig,proto3" json:"basic_config,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *LabelTextClassificationOperationMetadata) Reset() { + *m = LabelTextClassificationOperationMetadata{} +} +func (m *LabelTextClassificationOperationMetadata) String() string { return proto.CompactTextString(m) } +func (*LabelTextClassificationOperationMetadata) ProtoMessage() {} +func (*LabelTextClassificationOperationMetadata) Descriptor() ([]byte, []int) { + return fileDescriptor_operations_f915b1b50117c3fa, []int{15} +} +func (m *LabelTextClassificationOperationMetadata) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_LabelTextClassificationOperationMetadata.Unmarshal(m, b) +} +func (m *LabelTextClassificationOperationMetadata) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_LabelTextClassificationOperationMetadata.Marshal(b, m, deterministic) +} +func (dst *LabelTextClassificationOperationMetadata) XXX_Merge(src proto.Message) { + xxx_messageInfo_LabelTextClassificationOperationMetadata.Merge(dst, src) +} +func (m *LabelTextClassificationOperationMetadata) XXX_Size() int { + return xxx_messageInfo_LabelTextClassificationOperationMetadata.Size(m) +} +func (m *LabelTextClassificationOperationMetadata) XXX_DiscardUnknown() { + xxx_messageInfo_LabelTextClassificationOperationMetadata.DiscardUnknown(m) +} + +var xxx_messageInfo_LabelTextClassificationOperationMetadata proto.InternalMessageInfo + +func (m *LabelTextClassificationOperationMetadata) GetBasicConfig() *HumanAnnotationConfig { + if m != nil { + return m.BasicConfig + } + return nil +} + +type LabelAudioTranscriptionOperationMetadata struct { + // Basic human annotation config used in labeling request. + BasicConfig *HumanAnnotationConfig `protobuf:"bytes,1,opt,name=basic_config,json=basicConfig,proto3" json:"basic_config,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *LabelAudioTranscriptionOperationMetadata) Reset() { + *m = LabelAudioTranscriptionOperationMetadata{} +} +func (m *LabelAudioTranscriptionOperationMetadata) String() string { return proto.CompactTextString(m) } +func (*LabelAudioTranscriptionOperationMetadata) ProtoMessage() {} +func (*LabelAudioTranscriptionOperationMetadata) Descriptor() ([]byte, []int) { + return fileDescriptor_operations_f915b1b50117c3fa, []int{16} +} +func (m *LabelAudioTranscriptionOperationMetadata) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_LabelAudioTranscriptionOperationMetadata.Unmarshal(m, b) +} +func (m *LabelAudioTranscriptionOperationMetadata) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_LabelAudioTranscriptionOperationMetadata.Marshal(b, m, deterministic) +} +func (dst *LabelAudioTranscriptionOperationMetadata) XXX_Merge(src proto.Message) { + xxx_messageInfo_LabelAudioTranscriptionOperationMetadata.Merge(dst, src) +} +func (m *LabelAudioTranscriptionOperationMetadata) XXX_Size() int { + return xxx_messageInfo_LabelAudioTranscriptionOperationMetadata.Size(m) +} +func (m *LabelAudioTranscriptionOperationMetadata) XXX_DiscardUnknown() { + xxx_messageInfo_LabelAudioTranscriptionOperationMetadata.DiscardUnknown(m) +} + +var xxx_messageInfo_LabelAudioTranscriptionOperationMetadata proto.InternalMessageInfo + +func (m *LabelAudioTranscriptionOperationMetadata) GetBasicConfig() *HumanAnnotationConfig { + if m != nil { + return m.BasicConfig + } + return nil +} + +// Details of a LabelTextEntityExtraction operation metadata. +type LabelTextEntityExtractionOperationMetadata struct { + // Basic human annotation config used in labeling request. + BasicConfig *HumanAnnotationConfig `protobuf:"bytes,1,opt,name=basic_config,json=basicConfig,proto3" json:"basic_config,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *LabelTextEntityExtractionOperationMetadata) Reset() { + *m = LabelTextEntityExtractionOperationMetadata{} +} +func (m *LabelTextEntityExtractionOperationMetadata) String() string { + return proto.CompactTextString(m) +} +func (*LabelTextEntityExtractionOperationMetadata) ProtoMessage() {} +func (*LabelTextEntityExtractionOperationMetadata) Descriptor() ([]byte, []int) { + return fileDescriptor_operations_f915b1b50117c3fa, []int{17} +} +func (m *LabelTextEntityExtractionOperationMetadata) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_LabelTextEntityExtractionOperationMetadata.Unmarshal(m, b) +} +func (m *LabelTextEntityExtractionOperationMetadata) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_LabelTextEntityExtractionOperationMetadata.Marshal(b, m, deterministic) +} +func (dst *LabelTextEntityExtractionOperationMetadata) XXX_Merge(src proto.Message) { + xxx_messageInfo_LabelTextEntityExtractionOperationMetadata.Merge(dst, src) +} +func (m *LabelTextEntityExtractionOperationMetadata) XXX_Size() int { + return xxx_messageInfo_LabelTextEntityExtractionOperationMetadata.Size(m) +} +func (m *LabelTextEntityExtractionOperationMetadata) XXX_DiscardUnknown() { + xxx_messageInfo_LabelTextEntityExtractionOperationMetadata.DiscardUnknown(m) +} + +var xxx_messageInfo_LabelTextEntityExtractionOperationMetadata proto.InternalMessageInfo + +func (m *LabelTextEntityExtractionOperationMetadata) GetBasicConfig() *HumanAnnotationConfig { + if m != nil { + return m.BasicConfig + } + return nil +} + +// Metadata of a CreateInstruction operation. +type CreateInstructionMetadata struct { + // Output only. The name of the created Instruction. + // projects/{project_id}/instructions/{instruction_id} + Instruction string `protobuf:"bytes,1,opt,name=instruction,proto3" json:"instruction,omitempty"` + // Output only. Partial failures encountered. + // E.g. single files that couldn't be read. + // Status details field will contain standard GCP error details. + PartialFailures []*status.Status `protobuf:"bytes,2,rep,name=partial_failures,json=partialFailures,proto3" json:"partial_failures,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *CreateInstructionMetadata) Reset() { *m = CreateInstructionMetadata{} } +func (m *CreateInstructionMetadata) String() string { return proto.CompactTextString(m) } +func (*CreateInstructionMetadata) ProtoMessage() {} +func (*CreateInstructionMetadata) Descriptor() ([]byte, []int) { + return fileDescriptor_operations_f915b1b50117c3fa, []int{18} +} +func (m *CreateInstructionMetadata) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_CreateInstructionMetadata.Unmarshal(m, b) +} +func (m *CreateInstructionMetadata) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_CreateInstructionMetadata.Marshal(b, m, deterministic) +} +func (dst *CreateInstructionMetadata) XXX_Merge(src proto.Message) { + xxx_messageInfo_CreateInstructionMetadata.Merge(dst, src) +} +func (m *CreateInstructionMetadata) XXX_Size() int { + return xxx_messageInfo_CreateInstructionMetadata.Size(m) +} +func (m *CreateInstructionMetadata) XXX_DiscardUnknown() { + xxx_messageInfo_CreateInstructionMetadata.DiscardUnknown(m) +} + +var xxx_messageInfo_CreateInstructionMetadata proto.InternalMessageInfo + +func (m *CreateInstructionMetadata) GetInstruction() string { + if m != nil { + return m.Instruction + } + return "" +} + +func (m *CreateInstructionMetadata) GetPartialFailures() []*status.Status { + if m != nil { + return m.PartialFailures + } + return nil +} + +func init() { + proto.RegisterType((*ImportDataOperationResponse)(nil), "google.cloud.datalabeling.v1beta1.ImportDataOperationResponse") + proto.RegisterType((*ExportDataOperationResponse)(nil), "google.cloud.datalabeling.v1beta1.ExportDataOperationResponse") + proto.RegisterType((*ImportDataOperationMetadata)(nil), "google.cloud.datalabeling.v1beta1.ImportDataOperationMetadata") + proto.RegisterType((*ExportDataOperationMetadata)(nil), "google.cloud.datalabeling.v1beta1.ExportDataOperationMetadata") + proto.RegisterType((*LabelOperationMetadata)(nil), "google.cloud.datalabeling.v1beta1.LabelOperationMetadata") + proto.RegisterType((*LabelImageClassificationOperationMetadata)(nil), "google.cloud.datalabeling.v1beta1.LabelImageClassificationOperationMetadata") + proto.RegisterType((*LabelImageBoundingBoxOperationMetadata)(nil), "google.cloud.datalabeling.v1beta1.LabelImageBoundingBoxOperationMetadata") + proto.RegisterType((*LabelImageOrientedBoundingBoxOperationMetadata)(nil), "google.cloud.datalabeling.v1beta1.LabelImageOrientedBoundingBoxOperationMetadata") + proto.RegisterType((*LabelImageBoundingPolyOperationMetadata)(nil), "google.cloud.datalabeling.v1beta1.LabelImageBoundingPolyOperationMetadata") + proto.RegisterType((*LabelImagePolylineOperationMetadata)(nil), "google.cloud.datalabeling.v1beta1.LabelImagePolylineOperationMetadata") + proto.RegisterType((*LabelImageSegmentationOperationMetadata)(nil), "google.cloud.datalabeling.v1beta1.LabelImageSegmentationOperationMetadata") + proto.RegisterType((*LabelVideoClassificationOperationMetadata)(nil), "google.cloud.datalabeling.v1beta1.LabelVideoClassificationOperationMetadata") + proto.RegisterType((*LabelVideoObjectDetectionOperationMetadata)(nil), "google.cloud.datalabeling.v1beta1.LabelVideoObjectDetectionOperationMetadata") + proto.RegisterType((*LabelVideoObjectTrackingOperationMetadata)(nil), "google.cloud.datalabeling.v1beta1.LabelVideoObjectTrackingOperationMetadata") + proto.RegisterType((*LabelVideoEventOperationMetadata)(nil), "google.cloud.datalabeling.v1beta1.LabelVideoEventOperationMetadata") + proto.RegisterType((*LabelTextClassificationOperationMetadata)(nil), "google.cloud.datalabeling.v1beta1.LabelTextClassificationOperationMetadata") + proto.RegisterType((*LabelAudioTranscriptionOperationMetadata)(nil), "google.cloud.datalabeling.v1beta1.LabelAudioTranscriptionOperationMetadata") + proto.RegisterType((*LabelTextEntityExtractionOperationMetadata)(nil), "google.cloud.datalabeling.v1beta1.LabelTextEntityExtractionOperationMetadata") + proto.RegisterType((*CreateInstructionMetadata)(nil), "google.cloud.datalabeling.v1beta1.CreateInstructionMetadata") +} + +func init() { + proto.RegisterFile("google/cloud/datalabeling/v1beta1/operations.proto", fileDescriptor_operations_f915b1b50117c3fa) +} + +var fileDescriptor_operations_f915b1b50117c3fa = []byte{ + // 1042 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xbc, 0x98, 0x61, 0x6f, 0xdb, 0x44, + 0x18, 0xc7, 0xb9, 0x8e, 0xae, 0xf4, 0x71, 0x47, 0x87, 0x41, 0x5b, 0xd6, 0x0e, 0x9a, 0x75, 0x02, + 0x32, 0x24, 0x62, 0xad, 0x7b, 0x83, 0x84, 0x10, 0x5a, 0xd3, 0x4e, 0x2b, 0xac, 0xb4, 0xa4, 0x15, + 0x2f, 0xe0, 0x85, 0x75, 0x71, 0xae, 0xe6, 0xc0, 0xb9, 0xb3, 0x7c, 0xe7, 0x28, 0x15, 0x08, 0x24, + 0x24, 0x60, 0x80, 0x26, 0x81, 0xc4, 0x17, 0xd8, 0x97, 0xe2, 0xf3, 0xa0, 0x3b, 0x9f, 0x3d, 0xa7, + 0x71, 0x6a, 0xbb, 0x93, 0xfb, 0x2e, 0x3e, 0xff, 0x9f, 0xff, 0xf3, 0xf3, 0x73, 0xcf, 0xe9, 0x71, + 0x0c, 0x5b, 0x3e, 0xe7, 0x7e, 0x40, 0x1c, 0x2f, 0xe0, 0xf1, 0xd0, 0x19, 0x62, 0x89, 0x03, 0x3c, + 0x20, 0x01, 0x65, 0xbe, 0x33, 0xbe, 0x3f, 0x20, 0x12, 0xdf, 0x77, 0x78, 0x48, 0x22, 0x2c, 0x29, + 0x67, 0xa2, 0x1b, 0x46, 0x5c, 0x72, 0xfb, 0x4e, 0x12, 0xd3, 0xd5, 0x31, 0xdd, 0x7c, 0x4c, 0xd7, + 0xc4, 0xac, 0x39, 0xe5, 0xb6, 0x6a, 0x51, 0x10, 0x99, 0x78, 0xae, 0x7d, 0x5a, 0x1e, 0xf0, 0x6d, + 0x3c, 0xc2, 0xcc, 0xc5, 0x8c, 0x71, 0xa9, 0x71, 0x5c, 0x8f, 0xb3, 0x13, 0xea, 0x1b, 0x83, 0x07, + 0xe5, 0x06, 0x94, 0x09, 0x19, 0xc5, 0x9e, 0x8a, 0x35, 0x41, 0x1b, 0x26, 0x48, 0x5f, 0x0d, 0xe2, + 0x13, 0x47, 0xd2, 0x11, 0x11, 0x12, 0x8f, 0x42, 0x23, 0xb8, 0x69, 0x04, 0x51, 0xe8, 0x39, 0x42, + 0x62, 0x19, 0x9b, 0x1a, 0xac, 0xdd, 0x36, 0x37, 0x70, 0x48, 0x9d, 0x17, 0x48, 0xe6, 0xee, 0xe6, + 0x0f, 0xb0, 0xbe, 0x37, 0x0a, 0x79, 0x24, 0x77, 0xb0, 0xc4, 0x07, 0x69, 0xfd, 0xfa, 0x44, 0x84, + 0x9c, 0x09, 0x62, 0xb7, 0x60, 0xc9, 0x3c, 0x7d, 0x0b, 0xb5, 0x51, 0x67, 0xb9, 0x9f, 0x5e, 0xda, + 0x1b, 0x60, 0x49, 0x2e, 0x71, 0xe0, 0x7a, 0x3c, 0x66, 0xb2, 0xb5, 0xd0, 0x46, 0x9d, 0xc5, 0x3e, + 0xe8, 0xa5, 0x9e, 0x5a, 0xb1, 0xef, 0xc0, 0x0a, 0xd5, 0xce, 0x46, 0x71, 0x45, 0x2b, 0xac, 0x64, + 0x4d, 0x4b, 0x36, 0x9f, 0x2f, 0xc0, 0xfa, 0xee, 0xa4, 0xa9, 0xec, 0x64, 0x32, 0x9b, 0x3d, 0x59, + 0x4b, 0x24, 0x5f, 0x80, 0xa5, 0x0b, 0xef, 0xaa, 0x72, 0x89, 0xd6, 0xab, 0x6d, 0xd4, 0xb1, 0xb6, + 0x3e, 0xec, 0x96, 0xb6, 0x4c, 0xf7, 0x89, 0x5a, 0x38, 0x52, 0x41, 0x7d, 0x08, 0xb2, 0xdf, 0xf6, + 0x31, 0x5c, 0xe3, 0xb1, 0x0c, 0x63, 0x69, 0xb6, 0xbb, 0xb5, 0xa8, 0x1d, 0x9d, 0x0a, 0x8e, 0x07, + 0x3a, 0xae, 0xa7, 0xc3, 0xfa, 0x2b, 0x3c, 0x77, 0xb5, 0x39, 0x2e, 0xdc, 0xa0, 0x7d, 0x22, 0xb1, + 0x72, 0x3a, 0xa7, 0x44, 0x9f, 0xc0, 0xf5, 0x10, 0x47, 0x92, 0xe2, 0xc0, 0x3d, 0xc1, 0x34, 0x88, + 0x23, 0x22, 0x5a, 0x0b, 0xed, 0x2b, 0x1d, 0x6b, 0xcb, 0x4e, 0x89, 0xa2, 0xd0, 0xeb, 0x1e, 0xe9, + 0x5e, 0xe9, 0xaf, 0x1a, 0xed, 0x23, 0x23, 0x55, 0x79, 0x0b, 0xb6, 0xa6, 0xf9, 0xbc, 0xff, 0xad, + 0xc2, 0x0d, 0x5d, 0xe0, 0xd9, 0x9c, 0xf7, 0xe0, 0x7a, 0x18, 0x71, 0x3f, 0x22, 0x42, 0xb8, 0x21, + 0x89, 0x3c, 0xc2, 0x92, 0xe4, 0x8b, 0xfd, 0xd5, 0x74, 0xfd, 0x30, 0x59, 0x7e, 0x49, 0x08, 0xfb, + 0x6f, 0x04, 0xb7, 0xe9, 0x08, 0xfb, 0xc4, 0xf5, 0x02, 0x2c, 0x04, 0x3d, 0xa1, 0x5e, 0x72, 0x90, + 0x87, 0x44, 0x62, 0x1a, 0x08, 0xdd, 0x4e, 0xd6, 0xd6, 0x93, 0xaa, 0xcd, 0xb2, 0xa7, 0xbc, 0x7a, + 0x53, 0x56, 0x33, 0x8f, 0xf7, 0xf8, 0x95, 0xfe, 0x1a, 0x9d, 0xd5, 0xed, 0x24, 0x19, 0xed, 0xa7, + 0x08, 0x92, 0xdb, 0xee, 0x80, 0xc7, 0x6c, 0x48, 0x99, 0xef, 0x0e, 0xf8, 0x24, 0x03, 0x4a, 0xba, + 0x77, 0xaf, 0x16, 0xd0, 0xb6, 0x31, 0xda, 0xe6, 0x93, 0x22, 0x9a, 0x9b, 0xf4, 0x8c, 0x28, 0x45, + 0xf9, 0x0b, 0xc1, 0xfa, 0x19, 0x94, 0x90, 0x07, 0xa7, 0x19, 0x8b, 0xa5, 0x59, 0x3e, 0xbb, 0x10, + 0xcb, 0x21, 0x0f, 0x4e, 0x8b, 0x60, 0x5a, 0xf4, 0xac, 0x2a, 0xa5, 0x79, 0x8e, 0xe0, 0x6e, 0x42, + 0xc3, 0x23, 0x4a, 0x98, 0x24, 0xc3, 0xe2, 0x0a, 0xbd, 0xae, 0xa9, 0xbe, 0xac, 0x45, 0x75, 0x60, + 0x1c, 0x4b, 0x2a, 0xb5, 0x41, 0xe7, 0x88, 0x53, 0xc6, 0x9f, 0xe0, 0x46, 0x82, 0xa8, 0xea, 0x14, + 0x50, 0x46, 0x32, 0xaa, 0x15, 0x4d, 0xf5, 0xa8, 0x16, 0xd5, 0xa1, 0x31, 0x29, 0x42, 0x79, 0x8b, + 0xe6, 0x15, 0x69, 0xfe, 0x3f, 0xb3, 0xe6, 0x11, 0xc4, 0x1f, 0x11, 0x26, 0xa7, 0xbb, 0x79, 0xf5, + 0x02, 0x1b, 0x76, 0x94, 0x33, 0x9a, 0xbf, 0x61, 0x79, 0x55, 0x0a, 0xa3, 0x0e, 0xd7, 0x98, 0x0e, + 0x09, 0x9f, 0x77, 0xb8, 0x16, 0xeb, 0x1d, 0xae, 0xaf, 0x94, 0x57, 0x85, 0xc3, 0x35, 0x9e, 0xd5, + 0xa5, 0x48, 0xff, 0x22, 0x78, 0x27, 0x41, 0xe2, 0x83, 0xef, 0x88, 0x27, 0x15, 0x0a, 0xf1, 0xa6, + 0xa0, 0xae, 0x6a, 0xa8, 0xfd, 0x5a, 0x50, 0x07, 0xda, 0x6c, 0x27, 0xf5, 0x2a, 0xa2, 0x5a, 0x1f, + 0x17, 0x08, 0x53, 0xac, 0x7f, 0x10, 0xbc, 0x3d, 0x85, 0x25, 0x23, 0xec, 0x7d, 0xaf, 0x1a, 0x3b, + 0xa5, 0x5a, 0xba, 0x40, 0xa9, 0x92, 0x64, 0xc7, 0xc6, 0x6a, 0x7e, 0xa9, 0xa6, 0x75, 0x29, 0x53, + 0x0c, 0x6f, 0x26, 0x48, 0x64, 0x4c, 0x98, 0xcc, 0x40, 0x5e, 0xd3, 0x20, 0xbd, 0x5a, 0x20, 0xbb, + 0xca, 0xa1, 0x28, 0xff, 0x1b, 0xe3, 0xec, 0x76, 0x9a, 0xf6, 0x19, 0x82, 0x75, 0x49, 0x26, 0x72, + 0x5e, 0xcf, 0x2c, 0xeb, 0xfc, 0x9f, 0x57, 0xcd, 0x7f, 0x4c, 0x26, 0xb2, 0xbc, 0x65, 0x6e, 0xc9, + 0x19, 0x59, 0x9e, 0x07, 0xc7, 0x43, 0xca, 0xd5, 0x9e, 0x30, 0xe1, 0x45, 0x34, 0x9c, 0xe2, 0x81, + 0x7a, 0x3c, 0x0f, 0x95, 0xd5, 0x71, 0xde, 0xa9, 0x90, 0x07, 0xcf, 0xc8, 0xf2, 0x1d, 0xac, 0xeb, + 0x43, 0x98, 0xa4, 0xf2, 0xd4, 0x25, 0x13, 0xd5, 0x2b, 0x53, 0x48, 0xd7, 0xea, 0x75, 0xb0, 0x2a, + 0xd1, 0xae, 0x36, 0xdb, 0xcd, 0xbc, 0x0a, 0x3b, 0x58, 0x16, 0x08, 0x0d, 0xd6, 0xf6, 0x32, 0x2c, + 0x99, 0xf4, 0x9b, 0x4f, 0x11, 0xdc, 0xab, 0x3c, 0x0c, 0xed, 0x6f, 0x60, 0x65, 0x80, 0x05, 0xf5, + 0xd2, 0x77, 0x29, 0xa4, 0xe1, 0x3f, 0xaa, 0x00, 0xff, 0x58, 0xbd, 0x7c, 0x3f, 0xcc, 0x5e, 0x74, + 0xcd, 0x4b, 0x95, 0xa5, 0xdd, 0xcc, 0x3b, 0xd5, 0xaf, 0x08, 0xde, 0xab, 0x36, 0x06, 0x9b, 0xe5, + 0x78, 0x86, 0xa0, 0xe6, 0xb0, 0x69, 0x96, 0xe7, 0x37, 0x04, 0xef, 0x57, 0x1c, 0xc9, 0xcd, 0x82, + 0xfc, 0x82, 0xe0, 0x6e, 0x85, 0x79, 0x77, 0x99, 0xd5, 0x38, 0x77, 0xde, 0x35, 0x0b, 0x92, 0x9d, + 0x9c, 0x2a, 0x93, 0xae, 0x59, 0x94, 0x3f, 0x10, 0x7c, 0x50, 0x7d, 0xbe, 0x5d, 0x66, 0x59, 0x4a, + 0xa6, 0x5a, 0xb3, 0x28, 0x3f, 0x43, 0xbb, 0x6c, 0xac, 0x35, 0x0b, 0xf0, 0x3b, 0x82, 0x4e, 0xd5, + 0xc1, 0x76, 0x49, 0x24, 0x15, 0x46, 0xda, 0x25, 0xf5, 0x6a, 0xa5, 0x49, 0xd6, 0x2c, 0xcb, 0x8f, + 0x70, 0xab, 0x17, 0x11, 0x2c, 0xc9, 0xde, 0x8b, 0x2f, 0x3b, 0x59, 0xe6, 0x36, 0x58, 0xb9, 0x0f, + 0x3e, 0xe6, 0xff, 0x74, 0x7e, 0xe9, 0x25, 0xff, 0xce, 0x6e, 0x4f, 0xe0, 0x5d, 0x8f, 0x8f, 0xca, + 0x9f, 0xe4, 0x10, 0x7d, 0xbd, 0x6f, 0x44, 0x3e, 0x0f, 0x30, 0xf3, 0xbb, 0x3c, 0xf2, 0x1d, 0x9f, + 0x30, 0xfd, 0xad, 0xc8, 0x7c, 0x29, 0xc3, 0x21, 0x15, 0xe7, 0x7c, 0xbb, 0xfa, 0x38, 0xbf, 0x38, + 0xb8, 0xaa, 0x23, 0x1f, 0xfc, 0x1f, 0x00, 0x00, 0xff, 0xff, 0x62, 0x5e, 0x15, 0xa3, 0xbd, 0x13, + 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/cloud/dataproc/v1/clusters.pb.go b/vendor/google.golang.org/genproto/googleapis/cloud/dataproc/v1/clusters.pb.go new file mode 100644 index 0000000..602d69c --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/cloud/dataproc/v1/clusters.pb.go @@ -0,0 +1,2157 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/cloud/dataproc/v1/clusters.proto + +package dataproc // import "google.golang.org/genproto/googleapis/cloud/dataproc/v1" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import duration "github.com/golang/protobuf/ptypes/duration" +import timestamp "github.com/golang/protobuf/ptypes/timestamp" +import _ "google.golang.org/genproto/googleapis/api/annotations" +import longrunning "google.golang.org/genproto/googleapis/longrunning" +import field_mask "google.golang.org/genproto/protobuf/field_mask" + +import ( + context "golang.org/x/net/context" + grpc "google.golang.org/grpc" +) + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// The cluster state. +type ClusterStatus_State int32 + +const ( + // The cluster state is unknown. + ClusterStatus_UNKNOWN ClusterStatus_State = 0 + // The cluster is being created and set up. It is not ready for use. + ClusterStatus_CREATING ClusterStatus_State = 1 + // The cluster is currently running and healthy. It is ready for use. + ClusterStatus_RUNNING ClusterStatus_State = 2 + // The cluster encountered an error. It is not ready for use. + ClusterStatus_ERROR ClusterStatus_State = 3 + // The cluster is being deleted. It cannot be used. + ClusterStatus_DELETING ClusterStatus_State = 4 + // The cluster is being updated. It continues to accept and process jobs. + ClusterStatus_UPDATING ClusterStatus_State = 5 +) + +var ClusterStatus_State_name = map[int32]string{ + 0: "UNKNOWN", + 1: "CREATING", + 2: "RUNNING", + 3: "ERROR", + 4: "DELETING", + 5: "UPDATING", +} +var ClusterStatus_State_value = map[string]int32{ + "UNKNOWN": 0, + "CREATING": 1, + "RUNNING": 2, + "ERROR": 3, + "DELETING": 4, + "UPDATING": 5, +} + +func (x ClusterStatus_State) String() string { + return proto.EnumName(ClusterStatus_State_name, int32(x)) +} +func (ClusterStatus_State) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_clusters_7d2ba1001a92fd64, []int{9, 0} +} + +// The cluster substate. +type ClusterStatus_Substate int32 + +const ( + // The cluster substate is unknown. + ClusterStatus_UNSPECIFIED ClusterStatus_Substate = 0 + // The cluster is known to be in an unhealthy state + // (for example, critical daemons are not running or HDFS capacity is + // exhausted). + // + // Applies to RUNNING state. + ClusterStatus_UNHEALTHY ClusterStatus_Substate = 1 + // The agent-reported status is out of date (may occur if + // Cloud Dataproc loses communication with Agent). + // + // Applies to RUNNING state. + ClusterStatus_STALE_STATUS ClusterStatus_Substate = 2 +) + +var ClusterStatus_Substate_name = map[int32]string{ + 0: "UNSPECIFIED", + 1: "UNHEALTHY", + 2: "STALE_STATUS", +} +var ClusterStatus_Substate_value = map[string]int32{ + "UNSPECIFIED": 0, + "UNHEALTHY": 1, + "STALE_STATUS": 2, +} + +func (x ClusterStatus_Substate) String() string { + return proto.EnumName(ClusterStatus_Substate_name, int32(x)) +} +func (ClusterStatus_Substate) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_clusters_7d2ba1001a92fd64, []int{9, 1} +} + +// Describes the identifying information, config, and status of +// a cluster of Compute Engine instances. +type Cluster struct { + // Required. The Google Cloud Platform project ID that the cluster belongs to. + ProjectId string `protobuf:"bytes,1,opt,name=project_id,json=projectId,proto3" json:"project_id,omitempty"` + // Required. The cluster name. Cluster names within a project must be + // unique. Names of deleted clusters can be reused. + ClusterName string `protobuf:"bytes,2,opt,name=cluster_name,json=clusterName,proto3" json:"cluster_name,omitempty"` + // Required. The cluster config. Note that Cloud Dataproc may set + // default values, and values may change when clusters are updated. + Config *ClusterConfig `protobuf:"bytes,3,opt,name=config,proto3" json:"config,omitempty"` + // Optional. The labels to associate with this cluster. + // Label **keys** must contain 1 to 63 characters, and must conform to + // [RFC 1035](https://www.ietf.org/rfc/rfc1035.txt). + // Label **values** may be empty, but, if present, must contain 1 to 63 + // characters, and must conform to [RFC + // 1035](https://www.ietf.org/rfc/rfc1035.txt). No more than 32 labels can be + // associated with a cluster. + Labels map[string]string `protobuf:"bytes,8,rep,name=labels,proto3" json:"labels,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` + // Output only. Cluster status. + Status *ClusterStatus `protobuf:"bytes,4,opt,name=status,proto3" json:"status,omitempty"` + // Output only. The previous cluster status. + StatusHistory []*ClusterStatus `protobuf:"bytes,7,rep,name=status_history,json=statusHistory,proto3" json:"status_history,omitempty"` + // Output only. A cluster UUID (Unique Universal Identifier). Cloud Dataproc + // generates this value when it creates the cluster. + ClusterUuid string `protobuf:"bytes,6,opt,name=cluster_uuid,json=clusterUuid,proto3" json:"cluster_uuid,omitempty"` + // Contains cluster daemon metrics such as HDFS and YARN stats. + // + // **Beta Feature**: This report is available for testing purposes only. It + // may be changed before final release. + Metrics *ClusterMetrics `protobuf:"bytes,9,opt,name=metrics,proto3" json:"metrics,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Cluster) Reset() { *m = Cluster{} } +func (m *Cluster) String() string { return proto.CompactTextString(m) } +func (*Cluster) ProtoMessage() {} +func (*Cluster) Descriptor() ([]byte, []int) { + return fileDescriptor_clusters_7d2ba1001a92fd64, []int{0} +} +func (m *Cluster) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Cluster.Unmarshal(m, b) +} +func (m *Cluster) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Cluster.Marshal(b, m, deterministic) +} +func (dst *Cluster) XXX_Merge(src proto.Message) { + xxx_messageInfo_Cluster.Merge(dst, src) +} +func (m *Cluster) XXX_Size() int { + return xxx_messageInfo_Cluster.Size(m) +} +func (m *Cluster) XXX_DiscardUnknown() { + xxx_messageInfo_Cluster.DiscardUnknown(m) +} + +var xxx_messageInfo_Cluster proto.InternalMessageInfo + +func (m *Cluster) GetProjectId() string { + if m != nil { + return m.ProjectId + } + return "" +} + +func (m *Cluster) GetClusterName() string { + if m != nil { + return m.ClusterName + } + return "" +} + +func (m *Cluster) GetConfig() *ClusterConfig { + if m != nil { + return m.Config + } + return nil +} + +func (m *Cluster) GetLabels() map[string]string { + if m != nil { + return m.Labels + } + return nil +} + +func (m *Cluster) GetStatus() *ClusterStatus { + if m != nil { + return m.Status + } + return nil +} + +func (m *Cluster) GetStatusHistory() []*ClusterStatus { + if m != nil { + return m.StatusHistory + } + return nil +} + +func (m *Cluster) GetClusterUuid() string { + if m != nil { + return m.ClusterUuid + } + return "" +} + +func (m *Cluster) GetMetrics() *ClusterMetrics { + if m != nil { + return m.Metrics + } + return nil +} + +// The cluster config. +type ClusterConfig struct { + // Optional. A Cloud Storage staging bucket used for sharing generated + // SSH keys and config. If you do not specify a staging bucket, Cloud + // Dataproc will determine an appropriate Cloud Storage location (US, + // ASIA, or EU) for your cluster's staging bucket according to the Google + // Compute Engine zone where your cluster is deployed, and then it will create + // and manage this project-level, per-location bucket for you. + ConfigBucket string `protobuf:"bytes,1,opt,name=config_bucket,json=configBucket,proto3" json:"config_bucket,omitempty"` + // Required. The shared Compute Engine config settings for + // all instances in a cluster. + GceClusterConfig *GceClusterConfig `protobuf:"bytes,8,opt,name=gce_cluster_config,json=gceClusterConfig,proto3" json:"gce_cluster_config,omitempty"` + // Optional. The Compute Engine config settings for + // the master instance in a cluster. + MasterConfig *InstanceGroupConfig `protobuf:"bytes,9,opt,name=master_config,json=masterConfig,proto3" json:"master_config,omitempty"` + // Optional. The Compute Engine config settings for + // worker instances in a cluster. + WorkerConfig *InstanceGroupConfig `protobuf:"bytes,10,opt,name=worker_config,json=workerConfig,proto3" json:"worker_config,omitempty"` + // Optional. The Compute Engine config settings for + // additional worker instances in a cluster. + SecondaryWorkerConfig *InstanceGroupConfig `protobuf:"bytes,12,opt,name=secondary_worker_config,json=secondaryWorkerConfig,proto3" json:"secondary_worker_config,omitempty"` + // Optional. The config settings for software inside the cluster. + SoftwareConfig *SoftwareConfig `protobuf:"bytes,13,opt,name=software_config,json=softwareConfig,proto3" json:"software_config,omitempty"` + // Optional. Commands to execute on each node after config is + // completed. By default, executables are run on master and all worker nodes. + // You can test a node's `role` metadata to run an executable on + // a master or worker node, as shown below using `curl` (you can also use + // `wget`): + // + // ROLE=$(curl -H Metadata-Flavor:Google + // http://metadata/computeMetadata/v1/instance/attributes/dataproc-role) + // if [[ "${ROLE}" == 'Master' ]]; then + // ... master specific actions ... + // else + // ... worker specific actions ... + // fi + InitializationActions []*NodeInitializationAction `protobuf:"bytes,11,rep,name=initialization_actions,json=initializationActions,proto3" json:"initialization_actions,omitempty"` + // Optional. Encryption settings for the cluster. + EncryptionConfig *EncryptionConfig `protobuf:"bytes,15,opt,name=encryption_config,json=encryptionConfig,proto3" json:"encryption_config,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ClusterConfig) Reset() { *m = ClusterConfig{} } +func (m *ClusterConfig) String() string { return proto.CompactTextString(m) } +func (*ClusterConfig) ProtoMessage() {} +func (*ClusterConfig) Descriptor() ([]byte, []int) { + return fileDescriptor_clusters_7d2ba1001a92fd64, []int{1} +} +func (m *ClusterConfig) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ClusterConfig.Unmarshal(m, b) +} +func (m *ClusterConfig) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ClusterConfig.Marshal(b, m, deterministic) +} +func (dst *ClusterConfig) XXX_Merge(src proto.Message) { + xxx_messageInfo_ClusterConfig.Merge(dst, src) +} +func (m *ClusterConfig) XXX_Size() int { + return xxx_messageInfo_ClusterConfig.Size(m) +} +func (m *ClusterConfig) XXX_DiscardUnknown() { + xxx_messageInfo_ClusterConfig.DiscardUnknown(m) +} + +var xxx_messageInfo_ClusterConfig proto.InternalMessageInfo + +func (m *ClusterConfig) GetConfigBucket() string { + if m != nil { + return m.ConfigBucket + } + return "" +} + +func (m *ClusterConfig) GetGceClusterConfig() *GceClusterConfig { + if m != nil { + return m.GceClusterConfig + } + return nil +} + +func (m *ClusterConfig) GetMasterConfig() *InstanceGroupConfig { + if m != nil { + return m.MasterConfig + } + return nil +} + +func (m *ClusterConfig) GetWorkerConfig() *InstanceGroupConfig { + if m != nil { + return m.WorkerConfig + } + return nil +} + +func (m *ClusterConfig) GetSecondaryWorkerConfig() *InstanceGroupConfig { + if m != nil { + return m.SecondaryWorkerConfig + } + return nil +} + +func (m *ClusterConfig) GetSoftwareConfig() *SoftwareConfig { + if m != nil { + return m.SoftwareConfig + } + return nil +} + +func (m *ClusterConfig) GetInitializationActions() []*NodeInitializationAction { + if m != nil { + return m.InitializationActions + } + return nil +} + +func (m *ClusterConfig) GetEncryptionConfig() *EncryptionConfig { + if m != nil { + return m.EncryptionConfig + } + return nil +} + +// Encryption settings for the cluster. +type EncryptionConfig struct { + // Optional. The Cloud KMS key name to use for PD disk encryption for all + // instances in the cluster. + GcePdKmsKeyName string `protobuf:"bytes,1,opt,name=gce_pd_kms_key_name,json=gcePdKmsKeyName,proto3" json:"gce_pd_kms_key_name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *EncryptionConfig) Reset() { *m = EncryptionConfig{} } +func (m *EncryptionConfig) String() string { return proto.CompactTextString(m) } +func (*EncryptionConfig) ProtoMessage() {} +func (*EncryptionConfig) Descriptor() ([]byte, []int) { + return fileDescriptor_clusters_7d2ba1001a92fd64, []int{2} +} +func (m *EncryptionConfig) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_EncryptionConfig.Unmarshal(m, b) +} +func (m *EncryptionConfig) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_EncryptionConfig.Marshal(b, m, deterministic) +} +func (dst *EncryptionConfig) XXX_Merge(src proto.Message) { + xxx_messageInfo_EncryptionConfig.Merge(dst, src) +} +func (m *EncryptionConfig) XXX_Size() int { + return xxx_messageInfo_EncryptionConfig.Size(m) +} +func (m *EncryptionConfig) XXX_DiscardUnknown() { + xxx_messageInfo_EncryptionConfig.DiscardUnknown(m) +} + +var xxx_messageInfo_EncryptionConfig proto.InternalMessageInfo + +func (m *EncryptionConfig) GetGcePdKmsKeyName() string { + if m != nil { + return m.GcePdKmsKeyName + } + return "" +} + +// Common config settings for resources of Compute Engine cluster +// instances, applicable to all instances in the cluster. +type GceClusterConfig struct { + // Optional. The zone where the Compute Engine cluster will be located. + // On a create request, it is required in the "global" region. If omitted + // in a non-global Cloud Dataproc region, the service will pick a zone in the + // corresponding Compute Engine region. On a get request, zone will + // always be present. + // + // A full URL, partial URI, or short name are valid. Examples: + // + // * `https://www.googleapis.com/compute/v1/projects/[project_id]/zones/[zone]` + // * `projects/[project_id]/zones/[zone]` + // * `us-central1-f` + ZoneUri string `protobuf:"bytes,1,opt,name=zone_uri,json=zoneUri,proto3" json:"zone_uri,omitempty"` + // Optional. The Compute Engine network to be used for machine + // communications. Cannot be specified with subnetwork_uri. If neither + // `network_uri` nor `subnetwork_uri` is specified, the "default" network of + // the project is used, if it exists. Cannot be a "Custom Subnet Network" (see + // [Using Subnetworks](/compute/docs/subnetworks) for more information). + // + // A full URL, partial URI, or short name are valid. Examples: + // + // * `https://www.googleapis.com/compute/v1/projects/[project_id]/regions/global/default` + // * `projects/[project_id]/regions/global/default` + // * `default` + NetworkUri string `protobuf:"bytes,2,opt,name=network_uri,json=networkUri,proto3" json:"network_uri,omitempty"` + // Optional. The Compute Engine subnetwork to be used for machine + // communications. Cannot be specified with network_uri. + // + // A full URL, partial URI, or short name are valid. Examples: + // + // * `https://www.googleapis.com/compute/v1/projects/[project_id]/regions/us-east1/sub0` + // * `projects/[project_id]/regions/us-east1/sub0` + // * `sub0` + SubnetworkUri string `protobuf:"bytes,6,opt,name=subnetwork_uri,json=subnetworkUri,proto3" json:"subnetwork_uri,omitempty"` + // Optional. If true, all instances in the cluster will only have internal IP + // addresses. By default, clusters are not restricted to internal IP + // addresses, and will have ephemeral external IP addresses assigned to each + // instance. This `internal_ip_only` restriction can only be enabled for + // subnetwork enabled networks, and all off-cluster dependencies must be + // configured to be accessible without external IP addresses. + InternalIpOnly bool `protobuf:"varint,7,opt,name=internal_ip_only,json=internalIpOnly,proto3" json:"internal_ip_only,omitempty"` + // Optional. The service account of the instances. Defaults to the default + // Compute Engine service account. Custom service accounts need + // permissions equivalent to the following IAM roles: + // + // * roles/logging.logWriter + // * roles/storage.objectAdmin + // + // (see + // https://cloud.google.com/compute/docs/access/service-accounts#custom_service_accounts + // for more information). + // Example: `[account_id]@[project_id].iam.gserviceaccount.com` + ServiceAccount string `protobuf:"bytes,8,opt,name=service_account,json=serviceAccount,proto3" json:"service_account,omitempty"` + // Optional. The URIs of service account scopes to be included in + // Compute Engine instances. The following base set of scopes is always + // included: + // + // * https://www.googleapis.com/auth/cloud.useraccounts.readonly + // * https://www.googleapis.com/auth/devstorage.read_write + // * https://www.googleapis.com/auth/logging.write + // + // If no scopes are specified, the following defaults are also provided: + // + // * https://www.googleapis.com/auth/bigquery + // * https://www.googleapis.com/auth/bigtable.admin.table + // * https://www.googleapis.com/auth/bigtable.data + // * https://www.googleapis.com/auth/devstorage.full_control + ServiceAccountScopes []string `protobuf:"bytes,3,rep,name=service_account_scopes,json=serviceAccountScopes,proto3" json:"service_account_scopes,omitempty"` + // The Compute Engine tags to add to all instances (see + // [Tagging instances](/compute/docs/label-or-tag-resources#tags)). + Tags []string `protobuf:"bytes,4,rep,name=tags,proto3" json:"tags,omitempty"` + // The Compute Engine metadata entries to add to all instances (see + // [Project and instance + // metadata](https://cloud.google.com/compute/docs/storing-retrieving-metadata#project_and_instance_metadata)). + Metadata map[string]string `protobuf:"bytes,5,rep,name=metadata,proto3" json:"metadata,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GceClusterConfig) Reset() { *m = GceClusterConfig{} } +func (m *GceClusterConfig) String() string { return proto.CompactTextString(m) } +func (*GceClusterConfig) ProtoMessage() {} +func (*GceClusterConfig) Descriptor() ([]byte, []int) { + return fileDescriptor_clusters_7d2ba1001a92fd64, []int{3} +} +func (m *GceClusterConfig) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GceClusterConfig.Unmarshal(m, b) +} +func (m *GceClusterConfig) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GceClusterConfig.Marshal(b, m, deterministic) +} +func (dst *GceClusterConfig) XXX_Merge(src proto.Message) { + xxx_messageInfo_GceClusterConfig.Merge(dst, src) +} +func (m *GceClusterConfig) XXX_Size() int { + return xxx_messageInfo_GceClusterConfig.Size(m) +} +func (m *GceClusterConfig) XXX_DiscardUnknown() { + xxx_messageInfo_GceClusterConfig.DiscardUnknown(m) +} + +var xxx_messageInfo_GceClusterConfig proto.InternalMessageInfo + +func (m *GceClusterConfig) GetZoneUri() string { + if m != nil { + return m.ZoneUri + } + return "" +} + +func (m *GceClusterConfig) GetNetworkUri() string { + if m != nil { + return m.NetworkUri + } + return "" +} + +func (m *GceClusterConfig) GetSubnetworkUri() string { + if m != nil { + return m.SubnetworkUri + } + return "" +} + +func (m *GceClusterConfig) GetInternalIpOnly() bool { + if m != nil { + return m.InternalIpOnly + } + return false +} + +func (m *GceClusterConfig) GetServiceAccount() string { + if m != nil { + return m.ServiceAccount + } + return "" +} + +func (m *GceClusterConfig) GetServiceAccountScopes() []string { + if m != nil { + return m.ServiceAccountScopes + } + return nil +} + +func (m *GceClusterConfig) GetTags() []string { + if m != nil { + return m.Tags + } + return nil +} + +func (m *GceClusterConfig) GetMetadata() map[string]string { + if m != nil { + return m.Metadata + } + return nil +} + +// Optional. The config settings for Compute Engine resources in +// an instance group, such as a master or worker group. +type InstanceGroupConfig struct { + // Optional. The number of VM instances in the instance group. + // For master instance groups, must be set to 1. + NumInstances int32 `protobuf:"varint,1,opt,name=num_instances,json=numInstances,proto3" json:"num_instances,omitempty"` + // Output only. The list of instance names. Cloud Dataproc derives the names + // from `cluster_name`, `num_instances`, and the instance group. + InstanceNames []string `protobuf:"bytes,2,rep,name=instance_names,json=instanceNames,proto3" json:"instance_names,omitempty"` + // Optional. The Compute Engine image resource used for cluster + // instances. It can be specified or may be inferred from + // `SoftwareConfig.image_version`. + ImageUri string `protobuf:"bytes,3,opt,name=image_uri,json=imageUri,proto3" json:"image_uri,omitempty"` + // Optional. The Compute Engine machine type used for cluster instances. + // + // A full URL, partial URI, or short name are valid. Examples: + // + // * `https://www.googleapis.com/compute/v1/projects/[project_id]/zones/us-east1-a/machineTypes/n1-standard-2` + // * `projects/[project_id]/zones/us-east1-a/machineTypes/n1-standard-2` + // * `n1-standard-2` + // + // **Auto Zone Exception**: If you are using the Cloud Dataproc + // [Auto Zone + // Placement](/dataproc/docs/concepts/configuring-clusters/auto-zone#using_auto_zone_placement) + // feature, you must use the short name of the machine type + // resource, for example, `n1-standard-2`. + MachineTypeUri string `protobuf:"bytes,4,opt,name=machine_type_uri,json=machineTypeUri,proto3" json:"machine_type_uri,omitempty"` + // Optional. Disk option config settings. + DiskConfig *DiskConfig `protobuf:"bytes,5,opt,name=disk_config,json=diskConfig,proto3" json:"disk_config,omitempty"` + // Optional. Specifies that this instance group contains preemptible + // instances. + IsPreemptible bool `protobuf:"varint,6,opt,name=is_preemptible,json=isPreemptible,proto3" json:"is_preemptible,omitempty"` + // Output only. The config for Compute Engine Instance Group + // Manager that manages this group. + // This is only used for preemptible instance groups. + ManagedGroupConfig *ManagedGroupConfig `protobuf:"bytes,7,opt,name=managed_group_config,json=managedGroupConfig,proto3" json:"managed_group_config,omitempty"` + // Optional. The Compute Engine accelerator configuration for these + // instances. + // + // **Beta Feature**: This feature is still under development. It may be + // changed before final release. + Accelerators []*AcceleratorConfig `protobuf:"bytes,8,rep,name=accelerators,proto3" json:"accelerators,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *InstanceGroupConfig) Reset() { *m = InstanceGroupConfig{} } +func (m *InstanceGroupConfig) String() string { return proto.CompactTextString(m) } +func (*InstanceGroupConfig) ProtoMessage() {} +func (*InstanceGroupConfig) Descriptor() ([]byte, []int) { + return fileDescriptor_clusters_7d2ba1001a92fd64, []int{4} +} +func (m *InstanceGroupConfig) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_InstanceGroupConfig.Unmarshal(m, b) +} +func (m *InstanceGroupConfig) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_InstanceGroupConfig.Marshal(b, m, deterministic) +} +func (dst *InstanceGroupConfig) XXX_Merge(src proto.Message) { + xxx_messageInfo_InstanceGroupConfig.Merge(dst, src) +} +func (m *InstanceGroupConfig) XXX_Size() int { + return xxx_messageInfo_InstanceGroupConfig.Size(m) +} +func (m *InstanceGroupConfig) XXX_DiscardUnknown() { + xxx_messageInfo_InstanceGroupConfig.DiscardUnknown(m) +} + +var xxx_messageInfo_InstanceGroupConfig proto.InternalMessageInfo + +func (m *InstanceGroupConfig) GetNumInstances() int32 { + if m != nil { + return m.NumInstances + } + return 0 +} + +func (m *InstanceGroupConfig) GetInstanceNames() []string { + if m != nil { + return m.InstanceNames + } + return nil +} + +func (m *InstanceGroupConfig) GetImageUri() string { + if m != nil { + return m.ImageUri + } + return "" +} + +func (m *InstanceGroupConfig) GetMachineTypeUri() string { + if m != nil { + return m.MachineTypeUri + } + return "" +} + +func (m *InstanceGroupConfig) GetDiskConfig() *DiskConfig { + if m != nil { + return m.DiskConfig + } + return nil +} + +func (m *InstanceGroupConfig) GetIsPreemptible() bool { + if m != nil { + return m.IsPreemptible + } + return false +} + +func (m *InstanceGroupConfig) GetManagedGroupConfig() *ManagedGroupConfig { + if m != nil { + return m.ManagedGroupConfig + } + return nil +} + +func (m *InstanceGroupConfig) GetAccelerators() []*AcceleratorConfig { + if m != nil { + return m.Accelerators + } + return nil +} + +// Specifies the resources used to actively manage an instance group. +type ManagedGroupConfig struct { + // Output only. The name of the Instance Template used for the Managed + // Instance Group. + InstanceTemplateName string `protobuf:"bytes,1,opt,name=instance_template_name,json=instanceTemplateName,proto3" json:"instance_template_name,omitempty"` + // Output only. The name of the Instance Group Manager for this group. + InstanceGroupManagerName string `protobuf:"bytes,2,opt,name=instance_group_manager_name,json=instanceGroupManagerName,proto3" json:"instance_group_manager_name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ManagedGroupConfig) Reset() { *m = ManagedGroupConfig{} } +func (m *ManagedGroupConfig) String() string { return proto.CompactTextString(m) } +func (*ManagedGroupConfig) ProtoMessage() {} +func (*ManagedGroupConfig) Descriptor() ([]byte, []int) { + return fileDescriptor_clusters_7d2ba1001a92fd64, []int{5} +} +func (m *ManagedGroupConfig) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ManagedGroupConfig.Unmarshal(m, b) +} +func (m *ManagedGroupConfig) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ManagedGroupConfig.Marshal(b, m, deterministic) +} +func (dst *ManagedGroupConfig) XXX_Merge(src proto.Message) { + xxx_messageInfo_ManagedGroupConfig.Merge(dst, src) +} +func (m *ManagedGroupConfig) XXX_Size() int { + return xxx_messageInfo_ManagedGroupConfig.Size(m) +} +func (m *ManagedGroupConfig) XXX_DiscardUnknown() { + xxx_messageInfo_ManagedGroupConfig.DiscardUnknown(m) +} + +var xxx_messageInfo_ManagedGroupConfig proto.InternalMessageInfo + +func (m *ManagedGroupConfig) GetInstanceTemplateName() string { + if m != nil { + return m.InstanceTemplateName + } + return "" +} + +func (m *ManagedGroupConfig) GetInstanceGroupManagerName() string { + if m != nil { + return m.InstanceGroupManagerName + } + return "" +} + +// Specifies the type and number of accelerator cards attached to the instances +// of an instance. See [GPUs on Compute Engine](/compute/docs/gpus/). +type AcceleratorConfig struct { + // Full URL, partial URI, or short name of the accelerator type resource to + // expose to this instance. See + // [Compute Engine + // AcceleratorTypes](/compute/docs/reference/beta/acceleratorTypes). + // + // Examples: + // + // * `https://www.googleapis.com/compute/beta/projects/[project_id]/zones/us-east1-a/acceleratorTypes/nvidia-tesla-k80` + // * `projects/[project_id]/zones/us-east1-a/acceleratorTypes/nvidia-tesla-k80` + // * `nvidia-tesla-k80` + // + // **Auto Zone Exception**: If you are using the Cloud Dataproc + // [Auto Zone + // Placement](/dataproc/docs/concepts/configuring-clusters/auto-zone#using_auto_zone_placement) + // feature, you must use the short name of the accelerator type + // resource, for example, `nvidia-tesla-k80`. + AcceleratorTypeUri string `protobuf:"bytes,1,opt,name=accelerator_type_uri,json=acceleratorTypeUri,proto3" json:"accelerator_type_uri,omitempty"` + // The number of the accelerator cards of this type exposed to this instance. + AcceleratorCount int32 `protobuf:"varint,2,opt,name=accelerator_count,json=acceleratorCount,proto3" json:"accelerator_count,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *AcceleratorConfig) Reset() { *m = AcceleratorConfig{} } +func (m *AcceleratorConfig) String() string { return proto.CompactTextString(m) } +func (*AcceleratorConfig) ProtoMessage() {} +func (*AcceleratorConfig) Descriptor() ([]byte, []int) { + return fileDescriptor_clusters_7d2ba1001a92fd64, []int{6} +} +func (m *AcceleratorConfig) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_AcceleratorConfig.Unmarshal(m, b) +} +func (m *AcceleratorConfig) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_AcceleratorConfig.Marshal(b, m, deterministic) +} +func (dst *AcceleratorConfig) XXX_Merge(src proto.Message) { + xxx_messageInfo_AcceleratorConfig.Merge(dst, src) +} +func (m *AcceleratorConfig) XXX_Size() int { + return xxx_messageInfo_AcceleratorConfig.Size(m) +} +func (m *AcceleratorConfig) XXX_DiscardUnknown() { + xxx_messageInfo_AcceleratorConfig.DiscardUnknown(m) +} + +var xxx_messageInfo_AcceleratorConfig proto.InternalMessageInfo + +func (m *AcceleratorConfig) GetAcceleratorTypeUri() string { + if m != nil { + return m.AcceleratorTypeUri + } + return "" +} + +func (m *AcceleratorConfig) GetAcceleratorCount() int32 { + if m != nil { + return m.AcceleratorCount + } + return 0 +} + +// Specifies the config of disk options for a group of VM instances. +type DiskConfig struct { + // Optional. Type of the boot disk (default is "pd-standard"). + // Valid values: "pd-ssd" (Persistent Disk Solid State Drive) or + // "pd-standard" (Persistent Disk Hard Disk Drive). + BootDiskType string `protobuf:"bytes,3,opt,name=boot_disk_type,json=bootDiskType,proto3" json:"boot_disk_type,omitempty"` + // Optional. Size in GB of the boot disk (default is 500GB). + BootDiskSizeGb int32 `protobuf:"varint,1,opt,name=boot_disk_size_gb,json=bootDiskSizeGb,proto3" json:"boot_disk_size_gb,omitempty"` + // Optional. Number of attached SSDs, from 0 to 4 (default is 0). + // If SSDs are not attached, the boot disk is used to store runtime logs and + // [HDFS](https://hadoop.apache.org/docs/r1.2.1/hdfs_user_guide.html) data. + // If one or more SSDs are attached, this runtime bulk + // data is spread across them, and the boot disk contains only basic + // config and installed binaries. + NumLocalSsds int32 `protobuf:"varint,2,opt,name=num_local_ssds,json=numLocalSsds,proto3" json:"num_local_ssds,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *DiskConfig) Reset() { *m = DiskConfig{} } +func (m *DiskConfig) String() string { return proto.CompactTextString(m) } +func (*DiskConfig) ProtoMessage() {} +func (*DiskConfig) Descriptor() ([]byte, []int) { + return fileDescriptor_clusters_7d2ba1001a92fd64, []int{7} +} +func (m *DiskConfig) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_DiskConfig.Unmarshal(m, b) +} +func (m *DiskConfig) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_DiskConfig.Marshal(b, m, deterministic) +} +func (dst *DiskConfig) XXX_Merge(src proto.Message) { + xxx_messageInfo_DiskConfig.Merge(dst, src) +} +func (m *DiskConfig) XXX_Size() int { + return xxx_messageInfo_DiskConfig.Size(m) +} +func (m *DiskConfig) XXX_DiscardUnknown() { + xxx_messageInfo_DiskConfig.DiscardUnknown(m) +} + +var xxx_messageInfo_DiskConfig proto.InternalMessageInfo + +func (m *DiskConfig) GetBootDiskType() string { + if m != nil { + return m.BootDiskType + } + return "" +} + +func (m *DiskConfig) GetBootDiskSizeGb() int32 { + if m != nil { + return m.BootDiskSizeGb + } + return 0 +} + +func (m *DiskConfig) GetNumLocalSsds() int32 { + if m != nil { + return m.NumLocalSsds + } + return 0 +} + +// Specifies an executable to run on a fully configured node and a +// timeout period for executable completion. +type NodeInitializationAction struct { + // Required. Cloud Storage URI of executable file. + ExecutableFile string `protobuf:"bytes,1,opt,name=executable_file,json=executableFile,proto3" json:"executable_file,omitempty"` + // Optional. Amount of time executable has to complete. Default is + // 10 minutes. Cluster creation fails with an explanatory error message (the + // name of the executable that caused the error and the exceeded timeout + // period) if the executable is not completed at end of the timeout period. + ExecutionTimeout *duration.Duration `protobuf:"bytes,2,opt,name=execution_timeout,json=executionTimeout,proto3" json:"execution_timeout,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *NodeInitializationAction) Reset() { *m = NodeInitializationAction{} } +func (m *NodeInitializationAction) String() string { return proto.CompactTextString(m) } +func (*NodeInitializationAction) ProtoMessage() {} +func (*NodeInitializationAction) Descriptor() ([]byte, []int) { + return fileDescriptor_clusters_7d2ba1001a92fd64, []int{8} +} +func (m *NodeInitializationAction) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_NodeInitializationAction.Unmarshal(m, b) +} +func (m *NodeInitializationAction) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_NodeInitializationAction.Marshal(b, m, deterministic) +} +func (dst *NodeInitializationAction) XXX_Merge(src proto.Message) { + xxx_messageInfo_NodeInitializationAction.Merge(dst, src) +} +func (m *NodeInitializationAction) XXX_Size() int { + return xxx_messageInfo_NodeInitializationAction.Size(m) +} +func (m *NodeInitializationAction) XXX_DiscardUnknown() { + xxx_messageInfo_NodeInitializationAction.DiscardUnknown(m) +} + +var xxx_messageInfo_NodeInitializationAction proto.InternalMessageInfo + +func (m *NodeInitializationAction) GetExecutableFile() string { + if m != nil { + return m.ExecutableFile + } + return "" +} + +func (m *NodeInitializationAction) GetExecutionTimeout() *duration.Duration { + if m != nil { + return m.ExecutionTimeout + } + return nil +} + +// The status of a cluster and its instances. +type ClusterStatus struct { + // Output only. The cluster's state. + State ClusterStatus_State `protobuf:"varint,1,opt,name=state,proto3,enum=google.cloud.dataproc.v1.ClusterStatus_State" json:"state,omitempty"` + // Output only. Optional details of cluster's state. + Detail string `protobuf:"bytes,2,opt,name=detail,proto3" json:"detail,omitempty"` + // Output only. Time when this state was entered. + StateStartTime *timestamp.Timestamp `protobuf:"bytes,3,opt,name=state_start_time,json=stateStartTime,proto3" json:"state_start_time,omitempty"` + // Output only. Additional state information that includes + // status reported by the agent. + Substate ClusterStatus_Substate `protobuf:"varint,4,opt,name=substate,proto3,enum=google.cloud.dataproc.v1.ClusterStatus_Substate" json:"substate,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ClusterStatus) Reset() { *m = ClusterStatus{} } +func (m *ClusterStatus) String() string { return proto.CompactTextString(m) } +func (*ClusterStatus) ProtoMessage() {} +func (*ClusterStatus) Descriptor() ([]byte, []int) { + return fileDescriptor_clusters_7d2ba1001a92fd64, []int{9} +} +func (m *ClusterStatus) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ClusterStatus.Unmarshal(m, b) +} +func (m *ClusterStatus) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ClusterStatus.Marshal(b, m, deterministic) +} +func (dst *ClusterStatus) XXX_Merge(src proto.Message) { + xxx_messageInfo_ClusterStatus.Merge(dst, src) +} +func (m *ClusterStatus) XXX_Size() int { + return xxx_messageInfo_ClusterStatus.Size(m) +} +func (m *ClusterStatus) XXX_DiscardUnknown() { + xxx_messageInfo_ClusterStatus.DiscardUnknown(m) +} + +var xxx_messageInfo_ClusterStatus proto.InternalMessageInfo + +func (m *ClusterStatus) GetState() ClusterStatus_State { + if m != nil { + return m.State + } + return ClusterStatus_UNKNOWN +} + +func (m *ClusterStatus) GetDetail() string { + if m != nil { + return m.Detail + } + return "" +} + +func (m *ClusterStatus) GetStateStartTime() *timestamp.Timestamp { + if m != nil { + return m.StateStartTime + } + return nil +} + +func (m *ClusterStatus) GetSubstate() ClusterStatus_Substate { + if m != nil { + return m.Substate + } + return ClusterStatus_UNSPECIFIED +} + +// Specifies the selection and config of software inside the cluster. +type SoftwareConfig struct { + // Optional. The version of software inside the cluster. It must be one of the + // supported [Cloud Dataproc + // Versions](/dataproc/docs/concepts/versioning/dataproc-versions#supported_cloud_dataproc_versions), + // such as "1.2" (including a subminor version, such as "1.2.29"), or the + // ["preview" + // version](/dataproc/docs/concepts/versioning/dataproc-versions#other_versions). + // If unspecified, it defaults to the latest version. + ImageVersion string `protobuf:"bytes,1,opt,name=image_version,json=imageVersion,proto3" json:"image_version,omitempty"` + // Optional. The properties to set on daemon config files. + // + // Property keys are specified in `prefix:property` format, such as + // `core:fs.defaultFS`. The following are supported prefixes + // and their mappings: + // + // * capacity-scheduler: `capacity-scheduler.xml` + // * core: `core-site.xml` + // * distcp: `distcp-default.xml` + // * hdfs: `hdfs-site.xml` + // * hive: `hive-site.xml` + // * mapred: `mapred-site.xml` + // * pig: `pig.properties` + // * spark: `spark-defaults.conf` + // * yarn: `yarn-site.xml` + // + // For more information, see + // [Cluster properties](/dataproc/docs/concepts/cluster-properties). + Properties map[string]string `protobuf:"bytes,2,rep,name=properties,proto3" json:"properties,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *SoftwareConfig) Reset() { *m = SoftwareConfig{} } +func (m *SoftwareConfig) String() string { return proto.CompactTextString(m) } +func (*SoftwareConfig) ProtoMessage() {} +func (*SoftwareConfig) Descriptor() ([]byte, []int) { + return fileDescriptor_clusters_7d2ba1001a92fd64, []int{10} +} +func (m *SoftwareConfig) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_SoftwareConfig.Unmarshal(m, b) +} +func (m *SoftwareConfig) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_SoftwareConfig.Marshal(b, m, deterministic) +} +func (dst *SoftwareConfig) XXX_Merge(src proto.Message) { + xxx_messageInfo_SoftwareConfig.Merge(dst, src) +} +func (m *SoftwareConfig) XXX_Size() int { + return xxx_messageInfo_SoftwareConfig.Size(m) +} +func (m *SoftwareConfig) XXX_DiscardUnknown() { + xxx_messageInfo_SoftwareConfig.DiscardUnknown(m) +} + +var xxx_messageInfo_SoftwareConfig proto.InternalMessageInfo + +func (m *SoftwareConfig) GetImageVersion() string { + if m != nil { + return m.ImageVersion + } + return "" +} + +func (m *SoftwareConfig) GetProperties() map[string]string { + if m != nil { + return m.Properties + } + return nil +} + +// Contains cluster daemon metrics, such as HDFS and YARN stats. +// +// **Beta Feature**: This report is available for testing purposes only. It may +// be changed before final release. +type ClusterMetrics struct { + // The HDFS metrics. + HdfsMetrics map[string]int64 `protobuf:"bytes,1,rep,name=hdfs_metrics,json=hdfsMetrics,proto3" json:"hdfs_metrics,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"varint,2,opt,name=value,proto3"` + // The YARN metrics. + YarnMetrics map[string]int64 `protobuf:"bytes,2,rep,name=yarn_metrics,json=yarnMetrics,proto3" json:"yarn_metrics,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"varint,2,opt,name=value,proto3"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ClusterMetrics) Reset() { *m = ClusterMetrics{} } +func (m *ClusterMetrics) String() string { return proto.CompactTextString(m) } +func (*ClusterMetrics) ProtoMessage() {} +func (*ClusterMetrics) Descriptor() ([]byte, []int) { + return fileDescriptor_clusters_7d2ba1001a92fd64, []int{11} +} +func (m *ClusterMetrics) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ClusterMetrics.Unmarshal(m, b) +} +func (m *ClusterMetrics) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ClusterMetrics.Marshal(b, m, deterministic) +} +func (dst *ClusterMetrics) XXX_Merge(src proto.Message) { + xxx_messageInfo_ClusterMetrics.Merge(dst, src) +} +func (m *ClusterMetrics) XXX_Size() int { + return xxx_messageInfo_ClusterMetrics.Size(m) +} +func (m *ClusterMetrics) XXX_DiscardUnknown() { + xxx_messageInfo_ClusterMetrics.DiscardUnknown(m) +} + +var xxx_messageInfo_ClusterMetrics proto.InternalMessageInfo + +func (m *ClusterMetrics) GetHdfsMetrics() map[string]int64 { + if m != nil { + return m.HdfsMetrics + } + return nil +} + +func (m *ClusterMetrics) GetYarnMetrics() map[string]int64 { + if m != nil { + return m.YarnMetrics + } + return nil +} + +// A request to create a cluster. +type CreateClusterRequest struct { + // Required. The ID of the Google Cloud Platform project that the cluster + // belongs to. + ProjectId string `protobuf:"bytes,1,opt,name=project_id,json=projectId,proto3" json:"project_id,omitempty"` + // Required. The Cloud Dataproc region in which to handle the request. + Region string `protobuf:"bytes,3,opt,name=region,proto3" json:"region,omitempty"` + // Required. The cluster to create. + Cluster *Cluster `protobuf:"bytes,2,opt,name=cluster,proto3" json:"cluster,omitempty"` + // Optional. A unique id used to identify the request. If the server + // receives two + // [CreateClusterRequest][google.cloud.dataproc.v1.CreateClusterRequest] + // requests with the same id, then the second request will be ignored and the + // first [google.longrunning.Operation][google.longrunning.Operation] created + // and stored in the backend is returned. + // + // It is recommended to always set this value to a + // [UUID](https://en.wikipedia.org/wiki/Universally_unique_identifier). + // + // The id must contain only letters (a-z, A-Z), numbers (0-9), + // underscores (_), and hyphens (-). The maximum length is 40 characters. + RequestId string `protobuf:"bytes,4,opt,name=request_id,json=requestId,proto3" json:"request_id,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *CreateClusterRequest) Reset() { *m = CreateClusterRequest{} } +func (m *CreateClusterRequest) String() string { return proto.CompactTextString(m) } +func (*CreateClusterRequest) ProtoMessage() {} +func (*CreateClusterRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_clusters_7d2ba1001a92fd64, []int{12} +} +func (m *CreateClusterRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_CreateClusterRequest.Unmarshal(m, b) +} +func (m *CreateClusterRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_CreateClusterRequest.Marshal(b, m, deterministic) +} +func (dst *CreateClusterRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_CreateClusterRequest.Merge(dst, src) +} +func (m *CreateClusterRequest) XXX_Size() int { + return xxx_messageInfo_CreateClusterRequest.Size(m) +} +func (m *CreateClusterRequest) XXX_DiscardUnknown() { + xxx_messageInfo_CreateClusterRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_CreateClusterRequest proto.InternalMessageInfo + +func (m *CreateClusterRequest) GetProjectId() string { + if m != nil { + return m.ProjectId + } + return "" +} + +func (m *CreateClusterRequest) GetRegion() string { + if m != nil { + return m.Region + } + return "" +} + +func (m *CreateClusterRequest) GetCluster() *Cluster { + if m != nil { + return m.Cluster + } + return nil +} + +func (m *CreateClusterRequest) GetRequestId() string { + if m != nil { + return m.RequestId + } + return "" +} + +// A request to update a cluster. +type UpdateClusterRequest struct { + // Required. The ID of the Google Cloud Platform project the + // cluster belongs to. + ProjectId string `protobuf:"bytes,1,opt,name=project_id,json=projectId,proto3" json:"project_id,omitempty"` + // Required. The Cloud Dataproc region in which to handle the request. + Region string `protobuf:"bytes,5,opt,name=region,proto3" json:"region,omitempty"` + // Required. The cluster name. + ClusterName string `protobuf:"bytes,2,opt,name=cluster_name,json=clusterName,proto3" json:"cluster_name,omitempty"` + // Required. The changes to the cluster. + Cluster *Cluster `protobuf:"bytes,3,opt,name=cluster,proto3" json:"cluster,omitempty"` + // Optional. Timeout for graceful YARN decomissioning. Graceful + // decommissioning allows removing nodes from the cluster without + // interrupting jobs in progress. Timeout specifies how long to wait for jobs + // in progress to finish before forcefully removing nodes (and potentially + // interrupting jobs). Default timeout is 0 (for forceful decommission), and + // the maximum allowed timeout is 1 day. + // + // Only supported on Dataproc image versions 1.2 and higher. + GracefulDecommissionTimeout *duration.Duration `protobuf:"bytes,6,opt,name=graceful_decommission_timeout,json=gracefulDecommissionTimeout,proto3" json:"graceful_decommission_timeout,omitempty"` + // Required. Specifies the path, relative to `Cluster`, of + // the field to update. For example, to change the number of workers + // in a cluster to 5, the `update_mask` parameter would be + // specified as `config.worker_config.num_instances`, + // and the `PATCH` request body would specify the new value, as follows: + // + // { + // "config":{ + // "workerConfig":{ + // "numInstances":"5" + // } + // } + // } + // Similarly, to change the number of preemptible workers in a cluster to 5, + // the `update_mask` parameter would be + // `config.secondary_worker_config.num_instances`, and the `PATCH` request + // body would be set as follows: + // + // { + // "config":{ + // "secondaryWorkerConfig":{ + // "numInstances":"5" + // } + // } + // } + // Note: Currently, only the following fields can be updated: + // + //
+ // + // + // + // + // + // + // + // + // + // + // + // + // + // + // + // + // + // + //
MaskPurpose
labelsUpdate labels
config.worker_config.num_instancesResize primary worker group
config.secondary_worker_config.num_instancesResize secondary worker group
+ UpdateMask *field_mask.FieldMask `protobuf:"bytes,4,opt,name=update_mask,json=updateMask,proto3" json:"update_mask,omitempty"` + // Optional. A unique id used to identify the request. If the server + // receives two + // [UpdateClusterRequest][google.cloud.dataproc.v1.UpdateClusterRequest] + // requests with the same id, then the second request will be ignored and the + // first [google.longrunning.Operation][google.longrunning.Operation] created + // and stored in the backend is returned. + // + // It is recommended to always set this value to a + // [UUID](https://en.wikipedia.org/wiki/Universally_unique_identifier). + // + // The id must contain only letters (a-z, A-Z), numbers (0-9), + // underscores (_), and hyphens (-). The maximum length is 40 characters. + RequestId string `protobuf:"bytes,7,opt,name=request_id,json=requestId,proto3" json:"request_id,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *UpdateClusterRequest) Reset() { *m = UpdateClusterRequest{} } +func (m *UpdateClusterRequest) String() string { return proto.CompactTextString(m) } +func (*UpdateClusterRequest) ProtoMessage() {} +func (*UpdateClusterRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_clusters_7d2ba1001a92fd64, []int{13} +} +func (m *UpdateClusterRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_UpdateClusterRequest.Unmarshal(m, b) +} +func (m *UpdateClusterRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_UpdateClusterRequest.Marshal(b, m, deterministic) +} +func (dst *UpdateClusterRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_UpdateClusterRequest.Merge(dst, src) +} +func (m *UpdateClusterRequest) XXX_Size() int { + return xxx_messageInfo_UpdateClusterRequest.Size(m) +} +func (m *UpdateClusterRequest) XXX_DiscardUnknown() { + xxx_messageInfo_UpdateClusterRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_UpdateClusterRequest proto.InternalMessageInfo + +func (m *UpdateClusterRequest) GetProjectId() string { + if m != nil { + return m.ProjectId + } + return "" +} + +func (m *UpdateClusterRequest) GetRegion() string { + if m != nil { + return m.Region + } + return "" +} + +func (m *UpdateClusterRequest) GetClusterName() string { + if m != nil { + return m.ClusterName + } + return "" +} + +func (m *UpdateClusterRequest) GetCluster() *Cluster { + if m != nil { + return m.Cluster + } + return nil +} + +func (m *UpdateClusterRequest) GetGracefulDecommissionTimeout() *duration.Duration { + if m != nil { + return m.GracefulDecommissionTimeout + } + return nil +} + +func (m *UpdateClusterRequest) GetUpdateMask() *field_mask.FieldMask { + if m != nil { + return m.UpdateMask + } + return nil +} + +func (m *UpdateClusterRequest) GetRequestId() string { + if m != nil { + return m.RequestId + } + return "" +} + +// A request to delete a cluster. +type DeleteClusterRequest struct { + // Required. The ID of the Google Cloud Platform project that the cluster + // belongs to. + ProjectId string `protobuf:"bytes,1,opt,name=project_id,json=projectId,proto3" json:"project_id,omitempty"` + // Required. The Cloud Dataproc region in which to handle the request. + Region string `protobuf:"bytes,3,opt,name=region,proto3" json:"region,omitempty"` + // Required. The cluster name. + ClusterName string `protobuf:"bytes,2,opt,name=cluster_name,json=clusterName,proto3" json:"cluster_name,omitempty"` + // Optional. Specifying the `cluster_uuid` means the RPC should fail + // (with error NOT_FOUND) if cluster with specified UUID does not exist. + ClusterUuid string `protobuf:"bytes,4,opt,name=cluster_uuid,json=clusterUuid,proto3" json:"cluster_uuid,omitempty"` + // Optional. A unique id used to identify the request. If the server + // receives two + // [DeleteClusterRequest][google.cloud.dataproc.v1.DeleteClusterRequest] + // requests with the same id, then the second request will be ignored and the + // first [google.longrunning.Operation][google.longrunning.Operation] created + // and stored in the backend is returned. + // + // It is recommended to always set this value to a + // [UUID](https://en.wikipedia.org/wiki/Universally_unique_identifier). + // + // The id must contain only letters (a-z, A-Z), numbers (0-9), + // underscores (_), and hyphens (-). The maximum length is 40 characters. + RequestId string `protobuf:"bytes,5,opt,name=request_id,json=requestId,proto3" json:"request_id,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *DeleteClusterRequest) Reset() { *m = DeleteClusterRequest{} } +func (m *DeleteClusterRequest) String() string { return proto.CompactTextString(m) } +func (*DeleteClusterRequest) ProtoMessage() {} +func (*DeleteClusterRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_clusters_7d2ba1001a92fd64, []int{14} +} +func (m *DeleteClusterRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_DeleteClusterRequest.Unmarshal(m, b) +} +func (m *DeleteClusterRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_DeleteClusterRequest.Marshal(b, m, deterministic) +} +func (dst *DeleteClusterRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_DeleteClusterRequest.Merge(dst, src) +} +func (m *DeleteClusterRequest) XXX_Size() int { + return xxx_messageInfo_DeleteClusterRequest.Size(m) +} +func (m *DeleteClusterRequest) XXX_DiscardUnknown() { + xxx_messageInfo_DeleteClusterRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_DeleteClusterRequest proto.InternalMessageInfo + +func (m *DeleteClusterRequest) GetProjectId() string { + if m != nil { + return m.ProjectId + } + return "" +} + +func (m *DeleteClusterRequest) GetRegion() string { + if m != nil { + return m.Region + } + return "" +} + +func (m *DeleteClusterRequest) GetClusterName() string { + if m != nil { + return m.ClusterName + } + return "" +} + +func (m *DeleteClusterRequest) GetClusterUuid() string { + if m != nil { + return m.ClusterUuid + } + return "" +} + +func (m *DeleteClusterRequest) GetRequestId() string { + if m != nil { + return m.RequestId + } + return "" +} + +// Request to get the resource representation for a cluster in a project. +type GetClusterRequest struct { + // Required. The ID of the Google Cloud Platform project that the cluster + // belongs to. + ProjectId string `protobuf:"bytes,1,opt,name=project_id,json=projectId,proto3" json:"project_id,omitempty"` + // Required. The Cloud Dataproc region in which to handle the request. + Region string `protobuf:"bytes,3,opt,name=region,proto3" json:"region,omitempty"` + // Required. The cluster name. + ClusterName string `protobuf:"bytes,2,opt,name=cluster_name,json=clusterName,proto3" json:"cluster_name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GetClusterRequest) Reset() { *m = GetClusterRequest{} } +func (m *GetClusterRequest) String() string { return proto.CompactTextString(m) } +func (*GetClusterRequest) ProtoMessage() {} +func (*GetClusterRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_clusters_7d2ba1001a92fd64, []int{15} +} +func (m *GetClusterRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GetClusterRequest.Unmarshal(m, b) +} +func (m *GetClusterRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GetClusterRequest.Marshal(b, m, deterministic) +} +func (dst *GetClusterRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_GetClusterRequest.Merge(dst, src) +} +func (m *GetClusterRequest) XXX_Size() int { + return xxx_messageInfo_GetClusterRequest.Size(m) +} +func (m *GetClusterRequest) XXX_DiscardUnknown() { + xxx_messageInfo_GetClusterRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_GetClusterRequest proto.InternalMessageInfo + +func (m *GetClusterRequest) GetProjectId() string { + if m != nil { + return m.ProjectId + } + return "" +} + +func (m *GetClusterRequest) GetRegion() string { + if m != nil { + return m.Region + } + return "" +} + +func (m *GetClusterRequest) GetClusterName() string { + if m != nil { + return m.ClusterName + } + return "" +} + +// A request to list the clusters in a project. +type ListClustersRequest struct { + // Required. The ID of the Google Cloud Platform project that the cluster + // belongs to. + ProjectId string `protobuf:"bytes,1,opt,name=project_id,json=projectId,proto3" json:"project_id,omitempty"` + // Required. The Cloud Dataproc region in which to handle the request. + Region string `protobuf:"bytes,4,opt,name=region,proto3" json:"region,omitempty"` + // Optional. A filter constraining the clusters to list. Filters are + // case-sensitive and have the following syntax: + // + // field = value [AND [field = value]] ... + // + // where **field** is one of `status.state`, `clusterName`, or `labels.[KEY]`, + // and `[KEY]` is a label key. **value** can be `*` to match all values. + // `status.state` can be one of the following: `ACTIVE`, `INACTIVE`, + // `CREATING`, `RUNNING`, `ERROR`, `DELETING`, or `UPDATING`. `ACTIVE` + // contains the `CREATING`, `UPDATING`, and `RUNNING` states. `INACTIVE` + // contains the `DELETING` and `ERROR` states. + // `clusterName` is the name of the cluster provided at creation time. + // Only the logical `AND` operator is supported; space-separated items are + // treated as having an implicit `AND` operator. + // + // Example filter: + // + // status.state = ACTIVE AND clusterName = mycluster + // AND labels.env = staging AND labels.starred = * + Filter string `protobuf:"bytes,5,opt,name=filter,proto3" json:"filter,omitempty"` + // Optional. The standard List page size. + PageSize int32 `protobuf:"varint,2,opt,name=page_size,json=pageSize,proto3" json:"page_size,omitempty"` + // Optional. The standard List page token. + PageToken string `protobuf:"bytes,3,opt,name=page_token,json=pageToken,proto3" json:"page_token,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ListClustersRequest) Reset() { *m = ListClustersRequest{} } +func (m *ListClustersRequest) String() string { return proto.CompactTextString(m) } +func (*ListClustersRequest) ProtoMessage() {} +func (*ListClustersRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_clusters_7d2ba1001a92fd64, []int{16} +} +func (m *ListClustersRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ListClustersRequest.Unmarshal(m, b) +} +func (m *ListClustersRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ListClustersRequest.Marshal(b, m, deterministic) +} +func (dst *ListClustersRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_ListClustersRequest.Merge(dst, src) +} +func (m *ListClustersRequest) XXX_Size() int { + return xxx_messageInfo_ListClustersRequest.Size(m) +} +func (m *ListClustersRequest) XXX_DiscardUnknown() { + xxx_messageInfo_ListClustersRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_ListClustersRequest proto.InternalMessageInfo + +func (m *ListClustersRequest) GetProjectId() string { + if m != nil { + return m.ProjectId + } + return "" +} + +func (m *ListClustersRequest) GetRegion() string { + if m != nil { + return m.Region + } + return "" +} + +func (m *ListClustersRequest) GetFilter() string { + if m != nil { + return m.Filter + } + return "" +} + +func (m *ListClustersRequest) GetPageSize() int32 { + if m != nil { + return m.PageSize + } + return 0 +} + +func (m *ListClustersRequest) GetPageToken() string { + if m != nil { + return m.PageToken + } + return "" +} + +// The list of all clusters in a project. +type ListClustersResponse struct { + // Output only. The clusters in the project. + Clusters []*Cluster `protobuf:"bytes,1,rep,name=clusters,proto3" json:"clusters,omitempty"` + // Output only. This token is included in the response if there are more + // results to fetch. To fetch additional results, provide this value as the + // `page_token` in a subsequent `ListClustersRequest`. + NextPageToken string `protobuf:"bytes,2,opt,name=next_page_token,json=nextPageToken,proto3" json:"next_page_token,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ListClustersResponse) Reset() { *m = ListClustersResponse{} } +func (m *ListClustersResponse) String() string { return proto.CompactTextString(m) } +func (*ListClustersResponse) ProtoMessage() {} +func (*ListClustersResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_clusters_7d2ba1001a92fd64, []int{17} +} +func (m *ListClustersResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ListClustersResponse.Unmarshal(m, b) +} +func (m *ListClustersResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ListClustersResponse.Marshal(b, m, deterministic) +} +func (dst *ListClustersResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_ListClustersResponse.Merge(dst, src) +} +func (m *ListClustersResponse) XXX_Size() int { + return xxx_messageInfo_ListClustersResponse.Size(m) +} +func (m *ListClustersResponse) XXX_DiscardUnknown() { + xxx_messageInfo_ListClustersResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_ListClustersResponse proto.InternalMessageInfo + +func (m *ListClustersResponse) GetClusters() []*Cluster { + if m != nil { + return m.Clusters + } + return nil +} + +func (m *ListClustersResponse) GetNextPageToken() string { + if m != nil { + return m.NextPageToken + } + return "" +} + +// A request to collect cluster diagnostic information. +type DiagnoseClusterRequest struct { + // Required. The ID of the Google Cloud Platform project that the cluster + // belongs to. + ProjectId string `protobuf:"bytes,1,opt,name=project_id,json=projectId,proto3" json:"project_id,omitempty"` + // Required. The Cloud Dataproc region in which to handle the request. + Region string `protobuf:"bytes,3,opt,name=region,proto3" json:"region,omitempty"` + // Required. The cluster name. + ClusterName string `protobuf:"bytes,2,opt,name=cluster_name,json=clusterName,proto3" json:"cluster_name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *DiagnoseClusterRequest) Reset() { *m = DiagnoseClusterRequest{} } +func (m *DiagnoseClusterRequest) String() string { return proto.CompactTextString(m) } +func (*DiagnoseClusterRequest) ProtoMessage() {} +func (*DiagnoseClusterRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_clusters_7d2ba1001a92fd64, []int{18} +} +func (m *DiagnoseClusterRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_DiagnoseClusterRequest.Unmarshal(m, b) +} +func (m *DiagnoseClusterRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_DiagnoseClusterRequest.Marshal(b, m, deterministic) +} +func (dst *DiagnoseClusterRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_DiagnoseClusterRequest.Merge(dst, src) +} +func (m *DiagnoseClusterRequest) XXX_Size() int { + return xxx_messageInfo_DiagnoseClusterRequest.Size(m) +} +func (m *DiagnoseClusterRequest) XXX_DiscardUnknown() { + xxx_messageInfo_DiagnoseClusterRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_DiagnoseClusterRequest proto.InternalMessageInfo + +func (m *DiagnoseClusterRequest) GetProjectId() string { + if m != nil { + return m.ProjectId + } + return "" +} + +func (m *DiagnoseClusterRequest) GetRegion() string { + if m != nil { + return m.Region + } + return "" +} + +func (m *DiagnoseClusterRequest) GetClusterName() string { + if m != nil { + return m.ClusterName + } + return "" +} + +// The location of diagnostic output. +type DiagnoseClusterResults struct { + // Output only. The Cloud Storage URI of the diagnostic output. + // The output report is a plain text file with a summary of collected + // diagnostics. + OutputUri string `protobuf:"bytes,1,opt,name=output_uri,json=outputUri,proto3" json:"output_uri,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *DiagnoseClusterResults) Reset() { *m = DiagnoseClusterResults{} } +func (m *DiagnoseClusterResults) String() string { return proto.CompactTextString(m) } +func (*DiagnoseClusterResults) ProtoMessage() {} +func (*DiagnoseClusterResults) Descriptor() ([]byte, []int) { + return fileDescriptor_clusters_7d2ba1001a92fd64, []int{19} +} +func (m *DiagnoseClusterResults) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_DiagnoseClusterResults.Unmarshal(m, b) +} +func (m *DiagnoseClusterResults) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_DiagnoseClusterResults.Marshal(b, m, deterministic) +} +func (dst *DiagnoseClusterResults) XXX_Merge(src proto.Message) { + xxx_messageInfo_DiagnoseClusterResults.Merge(dst, src) +} +func (m *DiagnoseClusterResults) XXX_Size() int { + return xxx_messageInfo_DiagnoseClusterResults.Size(m) +} +func (m *DiagnoseClusterResults) XXX_DiscardUnknown() { + xxx_messageInfo_DiagnoseClusterResults.DiscardUnknown(m) +} + +var xxx_messageInfo_DiagnoseClusterResults proto.InternalMessageInfo + +func (m *DiagnoseClusterResults) GetOutputUri() string { + if m != nil { + return m.OutputUri + } + return "" +} + +func init() { + proto.RegisterType((*Cluster)(nil), "google.cloud.dataproc.v1.Cluster") + proto.RegisterMapType((map[string]string)(nil), "google.cloud.dataproc.v1.Cluster.LabelsEntry") + proto.RegisterType((*ClusterConfig)(nil), "google.cloud.dataproc.v1.ClusterConfig") + proto.RegisterType((*EncryptionConfig)(nil), "google.cloud.dataproc.v1.EncryptionConfig") + proto.RegisterType((*GceClusterConfig)(nil), "google.cloud.dataproc.v1.GceClusterConfig") + proto.RegisterMapType((map[string]string)(nil), "google.cloud.dataproc.v1.GceClusterConfig.MetadataEntry") + proto.RegisterType((*InstanceGroupConfig)(nil), "google.cloud.dataproc.v1.InstanceGroupConfig") + proto.RegisterType((*ManagedGroupConfig)(nil), "google.cloud.dataproc.v1.ManagedGroupConfig") + proto.RegisterType((*AcceleratorConfig)(nil), "google.cloud.dataproc.v1.AcceleratorConfig") + proto.RegisterType((*DiskConfig)(nil), "google.cloud.dataproc.v1.DiskConfig") + proto.RegisterType((*NodeInitializationAction)(nil), "google.cloud.dataproc.v1.NodeInitializationAction") + proto.RegisterType((*ClusterStatus)(nil), "google.cloud.dataproc.v1.ClusterStatus") + proto.RegisterType((*SoftwareConfig)(nil), "google.cloud.dataproc.v1.SoftwareConfig") + proto.RegisterMapType((map[string]string)(nil), "google.cloud.dataproc.v1.SoftwareConfig.PropertiesEntry") + proto.RegisterType((*ClusterMetrics)(nil), "google.cloud.dataproc.v1.ClusterMetrics") + proto.RegisterMapType((map[string]int64)(nil), "google.cloud.dataproc.v1.ClusterMetrics.HdfsMetricsEntry") + proto.RegisterMapType((map[string]int64)(nil), "google.cloud.dataproc.v1.ClusterMetrics.YarnMetricsEntry") + proto.RegisterType((*CreateClusterRequest)(nil), "google.cloud.dataproc.v1.CreateClusterRequest") + proto.RegisterType((*UpdateClusterRequest)(nil), "google.cloud.dataproc.v1.UpdateClusterRequest") + proto.RegisterType((*DeleteClusterRequest)(nil), "google.cloud.dataproc.v1.DeleteClusterRequest") + proto.RegisterType((*GetClusterRequest)(nil), "google.cloud.dataproc.v1.GetClusterRequest") + proto.RegisterType((*ListClustersRequest)(nil), "google.cloud.dataproc.v1.ListClustersRequest") + proto.RegisterType((*ListClustersResponse)(nil), "google.cloud.dataproc.v1.ListClustersResponse") + proto.RegisterType((*DiagnoseClusterRequest)(nil), "google.cloud.dataproc.v1.DiagnoseClusterRequest") + proto.RegisterType((*DiagnoseClusterResults)(nil), "google.cloud.dataproc.v1.DiagnoseClusterResults") + proto.RegisterEnum("google.cloud.dataproc.v1.ClusterStatus_State", ClusterStatus_State_name, ClusterStatus_State_value) + proto.RegisterEnum("google.cloud.dataproc.v1.ClusterStatus_Substate", ClusterStatus_Substate_name, ClusterStatus_Substate_value) +} + +// Reference imports to suppress errors if they are not otherwise used. +var _ context.Context +var _ grpc.ClientConn + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the grpc package it is being compiled against. +const _ = grpc.SupportPackageIsVersion4 + +// ClusterControllerClient is the client API for ClusterController service. +// +// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://godoc.org/google.golang.org/grpc#ClientConn.NewStream. +type ClusterControllerClient interface { + // Creates a cluster in a project. + CreateCluster(ctx context.Context, in *CreateClusterRequest, opts ...grpc.CallOption) (*longrunning.Operation, error) + // Updates a cluster in a project. + UpdateCluster(ctx context.Context, in *UpdateClusterRequest, opts ...grpc.CallOption) (*longrunning.Operation, error) + // Deletes a cluster in a project. + DeleteCluster(ctx context.Context, in *DeleteClusterRequest, opts ...grpc.CallOption) (*longrunning.Operation, error) + // Gets the resource representation for a cluster in a project. + GetCluster(ctx context.Context, in *GetClusterRequest, opts ...grpc.CallOption) (*Cluster, error) + // Lists all regions/{region}/clusters in a project. + ListClusters(ctx context.Context, in *ListClustersRequest, opts ...grpc.CallOption) (*ListClustersResponse, error) + // Gets cluster diagnostic information. + // After the operation completes, the Operation.response field + // contains `DiagnoseClusterOutputLocation`. + DiagnoseCluster(ctx context.Context, in *DiagnoseClusterRequest, opts ...grpc.CallOption) (*longrunning.Operation, error) +} + +type clusterControllerClient struct { + cc *grpc.ClientConn +} + +func NewClusterControllerClient(cc *grpc.ClientConn) ClusterControllerClient { + return &clusterControllerClient{cc} +} + +func (c *clusterControllerClient) CreateCluster(ctx context.Context, in *CreateClusterRequest, opts ...grpc.CallOption) (*longrunning.Operation, error) { + out := new(longrunning.Operation) + err := c.cc.Invoke(ctx, "/google.cloud.dataproc.v1.ClusterController/CreateCluster", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *clusterControllerClient) UpdateCluster(ctx context.Context, in *UpdateClusterRequest, opts ...grpc.CallOption) (*longrunning.Operation, error) { + out := new(longrunning.Operation) + err := c.cc.Invoke(ctx, "/google.cloud.dataproc.v1.ClusterController/UpdateCluster", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *clusterControllerClient) DeleteCluster(ctx context.Context, in *DeleteClusterRequest, opts ...grpc.CallOption) (*longrunning.Operation, error) { + out := new(longrunning.Operation) + err := c.cc.Invoke(ctx, "/google.cloud.dataproc.v1.ClusterController/DeleteCluster", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *clusterControllerClient) GetCluster(ctx context.Context, in *GetClusterRequest, opts ...grpc.CallOption) (*Cluster, error) { + out := new(Cluster) + err := c.cc.Invoke(ctx, "/google.cloud.dataproc.v1.ClusterController/GetCluster", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *clusterControllerClient) ListClusters(ctx context.Context, in *ListClustersRequest, opts ...grpc.CallOption) (*ListClustersResponse, error) { + out := new(ListClustersResponse) + err := c.cc.Invoke(ctx, "/google.cloud.dataproc.v1.ClusterController/ListClusters", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *clusterControllerClient) DiagnoseCluster(ctx context.Context, in *DiagnoseClusterRequest, opts ...grpc.CallOption) (*longrunning.Operation, error) { + out := new(longrunning.Operation) + err := c.cc.Invoke(ctx, "/google.cloud.dataproc.v1.ClusterController/DiagnoseCluster", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +// ClusterControllerServer is the server API for ClusterController service. +type ClusterControllerServer interface { + // Creates a cluster in a project. + CreateCluster(context.Context, *CreateClusterRequest) (*longrunning.Operation, error) + // Updates a cluster in a project. + UpdateCluster(context.Context, *UpdateClusterRequest) (*longrunning.Operation, error) + // Deletes a cluster in a project. + DeleteCluster(context.Context, *DeleteClusterRequest) (*longrunning.Operation, error) + // Gets the resource representation for a cluster in a project. + GetCluster(context.Context, *GetClusterRequest) (*Cluster, error) + // Lists all regions/{region}/clusters in a project. + ListClusters(context.Context, *ListClustersRequest) (*ListClustersResponse, error) + // Gets cluster diagnostic information. + // After the operation completes, the Operation.response field + // contains `DiagnoseClusterOutputLocation`. + DiagnoseCluster(context.Context, *DiagnoseClusterRequest) (*longrunning.Operation, error) +} + +func RegisterClusterControllerServer(s *grpc.Server, srv ClusterControllerServer) { + s.RegisterService(&_ClusterController_serviceDesc, srv) +} + +func _ClusterController_CreateCluster_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(CreateClusterRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(ClusterControllerServer).CreateCluster(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.dataproc.v1.ClusterController/CreateCluster", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(ClusterControllerServer).CreateCluster(ctx, req.(*CreateClusterRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _ClusterController_UpdateCluster_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(UpdateClusterRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(ClusterControllerServer).UpdateCluster(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.dataproc.v1.ClusterController/UpdateCluster", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(ClusterControllerServer).UpdateCluster(ctx, req.(*UpdateClusterRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _ClusterController_DeleteCluster_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(DeleteClusterRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(ClusterControllerServer).DeleteCluster(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.dataproc.v1.ClusterController/DeleteCluster", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(ClusterControllerServer).DeleteCluster(ctx, req.(*DeleteClusterRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _ClusterController_GetCluster_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(GetClusterRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(ClusterControllerServer).GetCluster(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.dataproc.v1.ClusterController/GetCluster", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(ClusterControllerServer).GetCluster(ctx, req.(*GetClusterRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _ClusterController_ListClusters_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(ListClustersRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(ClusterControllerServer).ListClusters(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.dataproc.v1.ClusterController/ListClusters", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(ClusterControllerServer).ListClusters(ctx, req.(*ListClustersRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _ClusterController_DiagnoseCluster_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(DiagnoseClusterRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(ClusterControllerServer).DiagnoseCluster(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.dataproc.v1.ClusterController/DiagnoseCluster", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(ClusterControllerServer).DiagnoseCluster(ctx, req.(*DiagnoseClusterRequest)) + } + return interceptor(ctx, in, info, handler) +} + +var _ClusterController_serviceDesc = grpc.ServiceDesc{ + ServiceName: "google.cloud.dataproc.v1.ClusterController", + HandlerType: (*ClusterControllerServer)(nil), + Methods: []grpc.MethodDesc{ + { + MethodName: "CreateCluster", + Handler: _ClusterController_CreateCluster_Handler, + }, + { + MethodName: "UpdateCluster", + Handler: _ClusterController_UpdateCluster_Handler, + }, + { + MethodName: "DeleteCluster", + Handler: _ClusterController_DeleteCluster_Handler, + }, + { + MethodName: "GetCluster", + Handler: _ClusterController_GetCluster_Handler, + }, + { + MethodName: "ListClusters", + Handler: _ClusterController_ListClusters_Handler, + }, + { + MethodName: "DiagnoseCluster", + Handler: _ClusterController_DiagnoseCluster_Handler, + }, + }, + Streams: []grpc.StreamDesc{}, + Metadata: "google/cloud/dataproc/v1/clusters.proto", +} + +func init() { + proto.RegisterFile("google/cloud/dataproc/v1/clusters.proto", fileDescriptor_clusters_7d2ba1001a92fd64) +} + +var fileDescriptor_clusters_7d2ba1001a92fd64 = []byte{ + // 2079 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xbc, 0x59, 0xcd, 0x73, 0x1c, 0x47, + 0x15, 0xcf, 0x68, 0xb5, 0xd2, 0xea, 0xed, 0x87, 0x56, 0x1d, 0x45, 0x6c, 0x94, 0x84, 0x38, 0x93, + 0x80, 0x15, 0x27, 0xec, 0xda, 0x0a, 0x14, 0x89, 0x4d, 0x42, 0x64, 0xed, 0xda, 0x16, 0x96, 0xd7, + 0x62, 0x76, 0x65, 0x27, 0x14, 0x30, 0xd5, 0x3b, 0xd3, 0x5a, 0x37, 0x9a, 0xaf, 0x4c, 0xf7, 0x28, + 0x59, 0xbb, 0x7c, 0x81, 0x2a, 0xaa, 0x28, 0x8e, 0x54, 0x71, 0xe6, 0x00, 0x55, 0x90, 0x63, 0xe0, + 0xc4, 0x3f, 0xc0, 0x85, 0xe2, 0xc2, 0xbf, 0xc0, 0x81, 0x3f, 0x83, 0xea, 0x8f, 0xd9, 0x9d, 0x59, + 0x7d, 0xac, 0x64, 0x5c, 0x39, 0x69, 0xe6, 0xf5, 0xef, 0xbd, 0xf7, 0xeb, 0xee, 0xd7, 0xbf, 0x79, + 0xbd, 0x82, 0xcb, 0xc3, 0x30, 0x1c, 0x7a, 0xa4, 0xe5, 0x78, 0x61, 0xe2, 0xb6, 0x5c, 0xcc, 0x71, + 0x14, 0x87, 0x4e, 0xeb, 0xe8, 0x5a, 0xcb, 0xf1, 0x12, 0xc6, 0x49, 0xcc, 0x9a, 0x51, 0x1c, 0xf2, + 0x10, 0x35, 0x14, 0xb0, 0x29, 0x81, 0xcd, 0x14, 0xd8, 0x3c, 0xba, 0xb6, 0xfe, 0xaa, 0x0e, 0x81, + 0x23, 0xda, 0xc2, 0x41, 0x10, 0x72, 0xcc, 0x69, 0x18, 0x68, 0xbf, 0xf5, 0xb7, 0x4f, 0x4d, 0x10, + 0x46, 0x24, 0xce, 0x41, 0xdf, 0xd4, 0x50, 0x2f, 0x0c, 0x86, 0x71, 0x12, 0x04, 0x34, 0x18, 0x1e, + 0x07, 0x7d, 0x53, 0x83, 0xe4, 0xdb, 0x20, 0x39, 0x68, 0xb9, 0x89, 0x02, 0xe8, 0xf1, 0x4b, 0xd3, + 0xe3, 0x07, 0x94, 0x78, 0xae, 0xed, 0x63, 0x76, 0xa8, 0x11, 0xaf, 0x4f, 0x23, 0x38, 0xf5, 0x09, + 0xe3, 0xd8, 0x8f, 0x14, 0xc0, 0xfc, 0xf5, 0x3c, 0x2c, 0x6e, 0xab, 0xd9, 0xa3, 0xd7, 0x00, 0xa2, + 0x38, 0xfc, 0x05, 0x71, 0xb8, 0x4d, 0xdd, 0x86, 0x71, 0xc9, 0xd8, 0x58, 0xb2, 0x96, 0xb4, 0x65, + 0xc7, 0x45, 0x6f, 0x40, 0x45, 0xaf, 0x93, 0x1d, 0x60, 0x9f, 0x34, 0xe6, 0x24, 0xa0, 0xac, 0x6d, + 0x5d, 0xec, 0x13, 0xf4, 0x43, 0x58, 0x70, 0xc2, 0xe0, 0x80, 0x0e, 0x1b, 0x85, 0x4b, 0xc6, 0x46, + 0x79, 0xf3, 0x72, 0xf3, 0xb4, 0x95, 0x6c, 0xea, 0xa4, 0xdb, 0x12, 0x6e, 0x69, 0x37, 0xd4, 0x81, + 0x05, 0x0f, 0x0f, 0x88, 0xc7, 0x1a, 0xa5, 0x4b, 0x85, 0x8d, 0xf2, 0xe6, 0x77, 0x66, 0x06, 0x68, + 0xee, 0x4a, 0x7c, 0x27, 0xe0, 0xf1, 0xc8, 0xd2, 0xce, 0x82, 0x07, 0xe3, 0x98, 0x27, 0xac, 0x31, + 0x7f, 0x4e, 0x1e, 0x3d, 0x09, 0xb7, 0xb4, 0x1b, 0xea, 0x42, 0x4d, 0x3d, 0xd9, 0x8f, 0x28, 0xe3, + 0x61, 0x3c, 0x6a, 0x2c, 0x4a, 0x3e, 0xe7, 0x0e, 0x54, 0x55, 0xee, 0x77, 0x94, 0x77, 0x76, 0xed, + 0x92, 0x84, 0xba, 0x8d, 0x85, 0xdc, 0xda, 0xed, 0x27, 0xd4, 0x45, 0x37, 0x61, 0xd1, 0x27, 0x3c, + 0xa6, 0x0e, 0x6b, 0x2c, 0x49, 0xd2, 0x1b, 0x33, 0x73, 0xdd, 0x53, 0x78, 0x2b, 0x75, 0x5c, 0xff, + 0x00, 0xca, 0x99, 0xe5, 0x40, 0x75, 0x28, 0x1c, 0x92, 0x91, 0xde, 0x49, 0xf1, 0x88, 0x56, 0xa1, + 0x78, 0x84, 0xbd, 0x24, 0xdd, 0x3c, 0xf5, 0x72, 0x7d, 0xee, 0x7d, 0xc3, 0xfc, 0x5b, 0x11, 0xaa, + 0xb9, 0x3d, 0x41, 0x6f, 0x42, 0x55, 0xed, 0x8a, 0x3d, 0x48, 0x9c, 0x43, 0xc2, 0x75, 0x9c, 0x8a, + 0x32, 0xde, 0x94, 0x36, 0xf4, 0x09, 0xa0, 0xa1, 0x43, 0xec, 0x74, 0x72, 0x7a, 0xf7, 0x4b, 0x72, + 0x02, 0x57, 0x4e, 0x9f, 0xc0, 0x6d, 0x87, 0xe4, 0x0b, 0xa0, 0x3e, 0x9c, 0xb2, 0x20, 0x0b, 0xaa, + 0x3e, 0xce, 0x06, 0x55, 0xab, 0x72, 0x46, 0x45, 0xec, 0x04, 0x8c, 0xe3, 0xc0, 0x21, 0xb7, 0xe3, + 0x30, 0x89, 0x74, 0xdc, 0x8a, 0x8a, 0x31, 0x89, 0xf9, 0x79, 0x18, 0x1f, 0x4e, 0x62, 0xc2, 0x33, + 0xc5, 0x54, 0x31, 0x74, 0x4c, 0x02, 0xdf, 0x60, 0xc4, 0x09, 0x03, 0x17, 0xc7, 0x23, 0x3b, 0x1f, + 0xbd, 0xf2, 0x2c, 0xd1, 0x5f, 0x1a, 0x47, 0x7b, 0x98, 0x4d, 0xf3, 0x63, 0x58, 0x66, 0xe1, 0x01, + 0xff, 0x1c, 0xc7, 0x24, 0x0d, 0x5f, 0x9d, 0x55, 0x26, 0x3d, 0xed, 0xa0, 0x23, 0xd7, 0x58, 0xee, + 0x1d, 0x51, 0x58, 0xa3, 0x01, 0xe5, 0x14, 0x7b, 0xf4, 0xb1, 0x94, 0x15, 0x1b, 0x3b, 0x52, 0x7e, + 0x1a, 0x65, 0x59, 0xec, 0x9b, 0xa7, 0x47, 0xee, 0x86, 0x2e, 0xd9, 0xc9, 0xf9, 0x6e, 0x49, 0x57, + 0xeb, 0x25, 0x7a, 0x82, 0x95, 0xa1, 0x87, 0xb0, 0x42, 0x02, 0x27, 0x1e, 0x45, 0x32, 0x8d, 0xe6, + 0xbf, 0x3c, 0xab, 0x4a, 0x3a, 0x63, 0x97, 0xb4, 0x4a, 0xc8, 0x94, 0xc5, 0xfc, 0x18, 0xea, 0xd3, + 0x28, 0xf4, 0x2e, 0xbc, 0x28, 0x6a, 0x32, 0x72, 0xed, 0x43, 0x9f, 0xd9, 0x87, 0x64, 0xa4, 0xf4, + 0x4a, 0x95, 0xef, 0xf2, 0xd0, 0x21, 0x7b, 0xee, 0x5d, 0x9f, 0xdd, 0x25, 0x23, 0xa1, 0x59, 0xe6, + 0x9f, 0x0a, 0x50, 0x9f, 0x2e, 0x47, 0xf4, 0x32, 0x94, 0x1e, 0x87, 0x01, 0xb1, 0x93, 0x98, 0x6a, + 0xbf, 0x45, 0xf1, 0xbe, 0x1f, 0x53, 0xf4, 0x3a, 0x94, 0x03, 0xc2, 0xc5, 0x46, 0xcb, 0x51, 0x75, + 0x90, 0x40, 0x9b, 0x04, 0xe0, 0x5b, 0x50, 0x63, 0xc9, 0x20, 0x8b, 0x51, 0xa7, 0xbd, 0x3a, 0xb1, + 0x0a, 0xd8, 0x06, 0xd4, 0x69, 0xc0, 0x49, 0x1c, 0x60, 0xcf, 0xa6, 0x91, 0x1d, 0x06, 0x9e, 0x10, + 0x19, 0x63, 0xa3, 0x64, 0xd5, 0x52, 0xfb, 0x4e, 0x74, 0x3f, 0xf0, 0x46, 0xe8, 0x32, 0x2c, 0x33, + 0x12, 0x1f, 0x51, 0x87, 0xd8, 0xd8, 0x71, 0xc2, 0x24, 0xe0, 0xf2, 0x80, 0x2d, 0x59, 0x35, 0x6d, + 0xde, 0x52, 0x56, 0xf4, 0x5d, 0x58, 0x9b, 0x02, 0xda, 0xcc, 0x09, 0x23, 0xc2, 0x1a, 0x85, 0x4b, + 0x85, 0x8d, 0x25, 0x6b, 0x35, 0x8f, 0xef, 0xc9, 0x31, 0x84, 0x60, 0x9e, 0xe3, 0xa1, 0x90, 0x4a, + 0x81, 0x91, 0xcf, 0xa8, 0x0f, 0x25, 0x9f, 0x70, 0x2c, 0x36, 0xa3, 0x51, 0x94, 0xc5, 0xf0, 0xfe, + 0xf9, 0x0f, 0x73, 0xf3, 0x9e, 0x76, 0x55, 0xa2, 0x3c, 0x8e, 0xb4, 0x7e, 0x03, 0xaa, 0xb9, 0xa1, + 0x0b, 0x09, 0xd4, 0x3f, 0x0a, 0xf0, 0xe2, 0x09, 0xe7, 0x45, 0xc8, 0x54, 0x90, 0xf8, 0x36, 0xd5, + 0x43, 0x4c, 0x46, 0x2b, 0x5a, 0x95, 0x20, 0xf1, 0x53, 0x38, 0x13, 0x7b, 0x92, 0x02, 0x64, 0x31, + 0xb0, 0xc6, 0x9c, 0x9c, 0x6d, 0x35, 0xb5, 0x8a, 0x52, 0x60, 0xe8, 0x15, 0x58, 0xa2, 0x3e, 0x1e, + 0xaa, 0x7d, 0x2f, 0x48, 0x06, 0x25, 0x69, 0xd0, 0x1b, 0xe6, 0x63, 0xe7, 0x11, 0x0d, 0x88, 0xcd, + 0x47, 0x91, 0xc2, 0xcc, 0xab, 0x7d, 0xd0, 0xf6, 0xfe, 0x28, 0x92, 0xc8, 0x0e, 0x94, 0x5d, 0xca, + 0x0e, 0xd3, 0x3a, 0x2f, 0xca, 0x3a, 0x7f, 0xeb, 0xf4, 0x05, 0x6c, 0x53, 0x76, 0xa8, 0x2b, 0x1c, + 0xdc, 0xf1, 0xb3, 0x24, 0xcd, 0xec, 0x28, 0x26, 0xc4, 0x8f, 0x38, 0x1d, 0x78, 0x44, 0x16, 0x52, + 0xc9, 0xaa, 0x52, 0xb6, 0x37, 0x31, 0xa2, 0x9f, 0xc3, 0xaa, 0x8f, 0x03, 0x3c, 0x24, 0xae, 0x3d, + 0x14, 0xeb, 0x92, 0xa6, 0x5d, 0x94, 0x69, 0xdf, 0x3d, 0x3d, 0xed, 0x3d, 0xe5, 0x95, 0x15, 0x1f, + 0xe4, 0x1f, 0xb3, 0xa1, 0xfb, 0x50, 0xc1, 0x8e, 0x43, 0x3c, 0xd1, 0x9c, 0x84, 0x71, 0xfa, 0x65, + 0x7e, 0xe7, 0xf4, 0xb8, 0x5b, 0x13, 0x74, 0xaa, 0x98, 0xd9, 0x00, 0xe6, 0x6f, 0x0c, 0x40, 0xc7, + 0x73, 0x8b, 0xea, 0x1d, 0xef, 0x11, 0x27, 0x7e, 0xe4, 0x61, 0x4e, 0xb2, 0x27, 0x77, 0x35, 0x1d, + 0xed, 0xeb, 0x41, 0xd9, 0x72, 0x7c, 0x08, 0xaf, 0x8c, 0xbd, 0xd4, 0xf4, 0xd5, 0x14, 0x72, 0x4d, + 0x4a, 0x83, 0x66, 0x0b, 0x47, 0xe5, 0x96, 0x1d, 0x8b, 0x19, 0xc3, 0xca, 0x31, 0xba, 0xe8, 0x2a, + 0xac, 0x66, 0x08, 0x4f, 0x76, 0x5b, 0xf1, 0x40, 0x99, 0xb1, 0x74, 0xc7, 0xdf, 0x81, 0x95, 0xac, + 0x87, 0x3a, 0xa4, 0x73, 0xb2, 0x10, 0xeb, 0x38, 0x1b, 0x3f, 0x09, 0xb8, 0xf9, 0x2b, 0x03, 0x60, + 0xb2, 0xe5, 0xe8, 0x2d, 0xa8, 0x0d, 0xc2, 0x90, 0xdb, 0xb2, 0x64, 0x44, 0x2e, 0x5d, 0x79, 0x15, + 0x61, 0x15, 0x38, 0x91, 0x04, 0xbd, 0x0d, 0x2b, 0x13, 0x14, 0xa3, 0x8f, 0x89, 0x3d, 0x1c, 0xe8, + 0x52, 0xaf, 0xa5, 0xc0, 0x1e, 0x7d, 0x4c, 0x6e, 0x0f, 0x44, 0x40, 0x71, 0x22, 0xbc, 0xd0, 0xc1, + 0x9e, 0xcd, 0x98, 0xcb, 0x34, 0x13, 0x71, 0x24, 0x76, 0x85, 0xb1, 0xc7, 0x5c, 0x66, 0xfe, 0xd6, + 0x80, 0xc6, 0x69, 0x32, 0x2e, 0x24, 0x87, 0x7c, 0x41, 0x9c, 0x84, 0xe3, 0x81, 0x47, 0xec, 0x03, + 0xea, 0xa5, 0x9b, 0x50, 0x9b, 0x98, 0x6f, 0x51, 0x8f, 0xa0, 0x5b, 0xb0, 0xa2, 0x2c, 0x42, 0xd7, + 0x45, 0x73, 0x19, 0x26, 0x6a, 0xe2, 0xe5, 0xcd, 0x97, 0xd3, 0x0a, 0x49, 0x9b, 0xcf, 0x66, 0x5b, + 0xb7, 0xaf, 0x56, 0x7d, 0xec, 0xd3, 0x57, 0x2e, 0xe6, 0xef, 0x0b, 0xe3, 0xf6, 0x43, 0x75, 0x50, + 0x68, 0x1b, 0x8a, 0xa2, 0x87, 0x52, 0x89, 0x6b, 0xe7, 0xe8, 0x04, 0x95, 0x5f, 0x53, 0xfc, 0x21, + 0x96, 0xf2, 0x45, 0x6b, 0xb0, 0xe0, 0x12, 0x8e, 0xa9, 0xa7, 0x0b, 0x41, 0xbf, 0xa1, 0x36, 0xd4, + 0x25, 0xc0, 0x66, 0x1c, 0xc7, 0x5c, 0x12, 0xd7, 0x2d, 0xeb, 0xfa, 0x31, 0xd6, 0xfd, 0xb4, 0x65, + 0xb6, 0x64, 0x4f, 0x48, 0x7a, 0xc2, 0x45, 0x18, 0xd1, 0x2e, 0x94, 0x58, 0x32, 0x50, 0x2c, 0xe7, + 0x25, 0xcb, 0xab, 0xe7, 0x66, 0xa9, 0xfd, 0xac, 0x71, 0x04, 0xf3, 0x01, 0x14, 0x25, 0x77, 0x54, + 0x86, 0xc5, 0xfd, 0xee, 0xdd, 0xee, 0xfd, 0x87, 0xdd, 0xfa, 0x0b, 0xa8, 0x02, 0xa5, 0x6d, 0xab, + 0xb3, 0xd5, 0xdf, 0xe9, 0xde, 0xae, 0x1b, 0x62, 0xc8, 0xda, 0xef, 0x76, 0xc5, 0xcb, 0x1c, 0x5a, + 0x82, 0x62, 0xc7, 0xb2, 0xee, 0x5b, 0xf5, 0x82, 0x40, 0xb5, 0x3b, 0xbb, 0x1d, 0x89, 0x9a, 0x17, + 0x6f, 0xfb, 0x7b, 0x6d, 0xe5, 0x53, 0x34, 0x7f, 0x00, 0xa5, 0x34, 0x1b, 0x5a, 0x86, 0xf2, 0x7e, + 0xb7, 0xb7, 0xd7, 0xd9, 0xde, 0xb9, 0xb5, 0xd3, 0x69, 0xd7, 0x5f, 0x40, 0x55, 0x58, 0xda, 0xef, + 0xde, 0xe9, 0x6c, 0xed, 0xf6, 0xef, 0x7c, 0x5a, 0x37, 0x50, 0x1d, 0x2a, 0xbd, 0xfe, 0xd6, 0x6e, + 0xc7, 0xee, 0xf5, 0xb7, 0xfa, 0xfb, 0xbd, 0xfa, 0x9c, 0xf9, 0x2f, 0x03, 0x6a, 0xf9, 0x3e, 0x42, + 0x28, 0xae, 0x52, 0xc9, 0x23, 0x12, 0x33, 0x1a, 0x06, 0x69, 0x63, 0x28, 0x8d, 0x0f, 0x94, 0x0d, + 0x7d, 0x22, 0x2f, 0x13, 0x11, 0x89, 0x39, 0xd5, 0x6a, 0x7b, 0xe6, 0x37, 0x24, 0x9f, 0xa2, 0xb9, + 0x37, 0x76, 0x55, 0xdf, 0x90, 0x4c, 0xac, 0xf5, 0x0f, 0x61, 0x79, 0x6a, 0xf8, 0x42, 0xdf, 0x91, + 0x7f, 0xce, 0x41, 0x2d, 0xdf, 0x3f, 0xa3, 0x9f, 0x42, 0xe5, 0x91, 0x7b, 0xc0, 0xec, 0xb4, 0xff, + 0x36, 0x24, 0xdb, 0x0f, 0xce, 0xdb, 0x7f, 0x37, 0xef, 0xb8, 0x07, 0x4c, 0x3f, 0x2b, 0xba, 0xe5, + 0x47, 0x13, 0x8b, 0x88, 0x3e, 0xc2, 0x71, 0x30, 0x8e, 0x3e, 0x77, 0xc1, 0xe8, 0x9f, 0xe2, 0x38, + 0xc8, 0x47, 0x1f, 0x4d, 0x2c, 0xeb, 0x1f, 0x41, 0x7d, 0x3a, 0xfd, 0xac, 0xe5, 0x28, 0x64, 0x96, + 0x43, 0xf8, 0x4f, 0x27, 0xb8, 0x88, 0xbf, 0xf9, 0x17, 0x03, 0x56, 0xb7, 0x63, 0x82, 0x79, 0xda, + 0x03, 0x58, 0xe4, 0xb3, 0x84, 0x30, 0x3e, 0xeb, 0x36, 0xb9, 0x06, 0x0b, 0x31, 0x19, 0x8a, 0xea, + 0x51, 0x6a, 0xa7, 0xdf, 0xd0, 0x0d, 0x58, 0xd4, 0x97, 0x09, 0x2d, 0x23, 0x6f, 0xcc, 0x5c, 0x28, + 0x2b, 0xf5, 0x10, 0x39, 0x63, 0x95, 0x5e, 0xe4, 0x54, 0x1f, 0xe7, 0x25, 0x6d, 0xd9, 0x71, 0xcd, + 0xff, 0xce, 0xc1, 0xea, 0x7e, 0xe4, 0xfe, 0x1f, 0x5c, 0x8b, 0x39, 0xae, 0xe7, 0xb8, 0x11, 0x67, + 0xa6, 0x53, 0xb8, 0xf0, 0x74, 0x7e, 0x06, 0xaf, 0x0d, 0x63, 0xec, 0x90, 0x83, 0xc4, 0xb3, 0x5d, + 0xe2, 0x84, 0xbe, 0x4f, 0x19, 0xcb, 0x0a, 0xed, 0xc2, 0x2c, 0xa1, 0x7d, 0x25, 0xf5, 0x6f, 0x67, + 0xdc, 0xb5, 0xe6, 0xa2, 0x1b, 0x50, 0x4e, 0xe4, 0x6a, 0xc8, 0x5f, 0x0c, 0xf4, 0x55, 0xf9, 0xb8, + 0xfe, 0xdd, 0xa2, 0xc4, 0x73, 0xef, 0x61, 0x76, 0x68, 0x81, 0x82, 0x8b, 0xe7, 0xa9, 0xa5, 0x5e, + 0x9c, 0x5e, 0xea, 0xaf, 0x0c, 0x58, 0x6d, 0x13, 0x8f, 0x3c, 0xaf, 0xb2, 0x38, 0xc7, 0x52, 0x4f, + 0xdf, 0xb1, 0xe7, 0x8f, 0xdf, 0xb1, 0xf3, 0xa4, 0x8b, 0xd3, 0xa4, 0x7d, 0x58, 0xb9, 0x4d, 0xf8, + 0xd7, 0x45, 0xd8, 0xfc, 0x83, 0x01, 0x2f, 0xee, 0x52, 0x96, 0x26, 0x64, 0x17, 0xce, 0x38, 0x9f, + 0xcb, 0xb8, 0x06, 0x0b, 0x07, 0xd4, 0x13, 0x95, 0xa6, 0xab, 0x54, 0xbd, 0x89, 0xa6, 0x36, 0x12, + 0x6a, 0x2d, 0x9a, 0x06, 0xdd, 0x09, 0x94, 0x84, 0x41, 0x74, 0x0b, 0x32, 0x97, 0x18, 0xe4, 0xe1, + 0x21, 0x49, 0xa7, 0x20, 0xe1, 0x7d, 0x61, 0x30, 0x9f, 0xc2, 0x6a, 0x9e, 0x21, 0x8b, 0xc2, 0x80, + 0x89, 0xae, 0xab, 0x94, 0xfe, 0x66, 0xa6, 0xd5, 0xf2, 0x1c, 0x75, 0x3d, 0x76, 0x41, 0xdf, 0x86, + 0xe5, 0x80, 0x7c, 0xc1, 0xed, 0x4c, 0x6a, 0xb5, 0x3e, 0x55, 0x61, 0xde, 0x1b, 0xa7, 0x8f, 0x61, + 0xad, 0x4d, 0xf1, 0x30, 0x08, 0xd9, 0xd7, 0x56, 0x46, 0xe6, 0xf7, 0x4f, 0xc8, 0xc9, 0x12, 0x8f, + 0x33, 0x91, 0x33, 0x4c, 0x78, 0x94, 0xf0, 0x4c, 0x33, 0xb8, 0xa4, 0x2c, 0xfb, 0x31, 0xdd, 0xfc, + 0xaa, 0x04, 0x2b, 0x93, 0x7b, 0x10, 0x8f, 0x43, 0xcf, 0x23, 0x31, 0xfa, 0xa3, 0x01, 0xd5, 0x9c, + 0x3e, 0xa2, 0xe6, 0x19, 0x2b, 0x75, 0x82, 0x90, 0xae, 0xbf, 0x96, 0xe2, 0x33, 0xbf, 0x15, 0x36, + 0xef, 0xa7, 0xbf, 0x15, 0x9a, 0xed, 0x5f, 0xfe, 0xfb, 0x3f, 0xbf, 0x9b, 0xfb, 0xc8, 0x7c, 0xaf, + 0x75, 0x74, 0xad, 0xa5, 0x57, 0x80, 0xb5, 0x9e, 0x4c, 0x56, 0xe7, 0x69, 0x4b, 0x4d, 0x9e, 0xb5, + 0x9e, 0xa8, 0x87, 0xa7, 0xe3, 0xdf, 0x3b, 0xaf, 0x8f, 0xa5, 0xe6, 0xaf, 0x06, 0x54, 0x73, 0xd2, + 0x78, 0x16, 0xcd, 0x93, 0x34, 0x74, 0x16, 0xcd, 0x9e, 0xa4, 0x79, 0x6f, 0xf3, 0xe6, 0x33, 0xd0, + 0x6c, 0x3d, 0xc9, 0x6e, 0xda, 0xd3, 0x09, 0xeb, 0x2f, 0x0d, 0xa8, 0xe6, 0x54, 0xe6, 0x2c, 0xd6, + 0x27, 0xc9, 0xd1, 0x2c, 0xd6, 0x3f, 0x92, 0xac, 0xdb, 0x57, 0x9e, 0x03, 0x6b, 0xf4, 0x67, 0x03, + 0x60, 0x22, 0x2f, 0xe8, 0x8c, 0x0b, 0xd4, 0x31, 0x11, 0x5a, 0x9f, 0x7d, 0xba, 0x52, 0xaa, 0xe8, + 0x79, 0x50, 0xfd, 0xd2, 0x80, 0x4a, 0xf6, 0xdc, 0xa3, 0x33, 0xba, 0xef, 0x13, 0x14, 0x6c, 0xbd, + 0x79, 0x5e, 0xb8, 0x92, 0x13, 0xf3, 0x86, 0xe4, 0xfe, 0x3d, 0xf4, 0x2c, 0x35, 0x8c, 0xfe, 0x6e, + 0xc0, 0xf2, 0xd4, 0x89, 0x45, 0x57, 0xcf, 0xba, 0x6c, 0x9f, 0x24, 0x28, 0xb3, 0x0a, 0xe1, 0x81, + 0x64, 0xb8, 0x67, 0xde, 0x7d, 0x0e, 0xe5, 0xeb, 0x6a, 0x06, 0xd7, 0x8d, 0x2b, 0x37, 0x3f, 0x83, + 0x57, 0x9d, 0xd0, 0x3f, 0x95, 0xed, 0xcd, 0xf4, 0x52, 0xc4, 0xf6, 0xc4, 0xe7, 0x78, 0xcf, 0xf8, + 0xc9, 0xc7, 0x1a, 0x3a, 0x0c, 0x3d, 0x1c, 0x0c, 0x9b, 0x61, 0x3c, 0x6c, 0x0d, 0x49, 0x20, 0x3f, + 0xd6, 0x2d, 0x35, 0x84, 0x23, 0xca, 0x8e, 0xff, 0x0b, 0xe2, 0x46, 0xfa, 0x3c, 0x58, 0x90, 0xe0, + 0xf7, 0xfe, 0x17, 0x00, 0x00, 0xff, 0xff, 0xf6, 0x97, 0xd2, 0xcf, 0x0f, 0x19, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/cloud/dataproc/v1/jobs.pb.go b/vendor/google.golang.org/genproto/googleapis/cloud/dataproc/v1/jobs.pb.go new file mode 100644 index 0000000..43daa05 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/cloud/dataproc/v1/jobs.pb.go @@ -0,0 +1,3119 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/cloud/dataproc/v1/jobs.proto + +package dataproc // import "google.golang.org/genproto/googleapis/cloud/dataproc/v1" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import empty "github.com/golang/protobuf/ptypes/empty" +import timestamp "github.com/golang/protobuf/ptypes/timestamp" +import _ "google.golang.org/genproto/googleapis/api/annotations" +import field_mask "google.golang.org/genproto/protobuf/field_mask" + +import ( + context "golang.org/x/net/context" + grpc "google.golang.org/grpc" +) + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// The Log4j level for job execution. When running an +// [Apache Hive](http://hive.apache.org/) job, Cloud +// Dataproc configures the Hive client to an equivalent verbosity level. +type LoggingConfig_Level int32 + +const ( + // Level is unspecified. Use default level for log4j. + LoggingConfig_LEVEL_UNSPECIFIED LoggingConfig_Level = 0 + // Use ALL level for log4j. + LoggingConfig_ALL LoggingConfig_Level = 1 + // Use TRACE level for log4j. + LoggingConfig_TRACE LoggingConfig_Level = 2 + // Use DEBUG level for log4j. + LoggingConfig_DEBUG LoggingConfig_Level = 3 + // Use INFO level for log4j. + LoggingConfig_INFO LoggingConfig_Level = 4 + // Use WARN level for log4j. + LoggingConfig_WARN LoggingConfig_Level = 5 + // Use ERROR level for log4j. + LoggingConfig_ERROR LoggingConfig_Level = 6 + // Use FATAL level for log4j. + LoggingConfig_FATAL LoggingConfig_Level = 7 + // Turn off log4j. + LoggingConfig_OFF LoggingConfig_Level = 8 +) + +var LoggingConfig_Level_name = map[int32]string{ + 0: "LEVEL_UNSPECIFIED", + 1: "ALL", + 2: "TRACE", + 3: "DEBUG", + 4: "INFO", + 5: "WARN", + 6: "ERROR", + 7: "FATAL", + 8: "OFF", +} +var LoggingConfig_Level_value = map[string]int32{ + "LEVEL_UNSPECIFIED": 0, + "ALL": 1, + "TRACE": 2, + "DEBUG": 3, + "INFO": 4, + "WARN": 5, + "ERROR": 6, + "FATAL": 7, + "OFF": 8, +} + +func (x LoggingConfig_Level) String() string { + return proto.EnumName(LoggingConfig_Level_name, int32(x)) +} +func (LoggingConfig_Level) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_jobs_a9b426fd7b89528f, []int{0, 0} +} + +// The job state. +type JobStatus_State int32 + +const ( + // The job state is unknown. + JobStatus_STATE_UNSPECIFIED JobStatus_State = 0 + // The job is pending; it has been submitted, but is not yet running. + JobStatus_PENDING JobStatus_State = 1 + // Job has been received by the service and completed initial setup; + // it will soon be submitted to the cluster. + JobStatus_SETUP_DONE JobStatus_State = 8 + // The job is running on the cluster. + JobStatus_RUNNING JobStatus_State = 2 + // A CancelJob request has been received, but is pending. + JobStatus_CANCEL_PENDING JobStatus_State = 3 + // Transient in-flight resources have been canceled, and the request to + // cancel the running job has been issued to the cluster. + JobStatus_CANCEL_STARTED JobStatus_State = 7 + // The job cancellation was successful. + JobStatus_CANCELLED JobStatus_State = 4 + // The job has completed successfully. + JobStatus_DONE JobStatus_State = 5 + // The job has completed, but encountered an error. + JobStatus_ERROR JobStatus_State = 6 + // Job attempt has failed. The detail field contains failure details for + // this attempt. + // + // Applies to restartable jobs only. + JobStatus_ATTEMPT_FAILURE JobStatus_State = 9 +) + +var JobStatus_State_name = map[int32]string{ + 0: "STATE_UNSPECIFIED", + 1: "PENDING", + 8: "SETUP_DONE", + 2: "RUNNING", + 3: "CANCEL_PENDING", + 7: "CANCEL_STARTED", + 4: "CANCELLED", + 5: "DONE", + 6: "ERROR", + 9: "ATTEMPT_FAILURE", +} +var JobStatus_State_value = map[string]int32{ + "STATE_UNSPECIFIED": 0, + "PENDING": 1, + "SETUP_DONE": 8, + "RUNNING": 2, + "CANCEL_PENDING": 3, + "CANCEL_STARTED": 7, + "CANCELLED": 4, + "DONE": 5, + "ERROR": 6, + "ATTEMPT_FAILURE": 9, +} + +func (x JobStatus_State) String() string { + return proto.EnumName(JobStatus_State_name, int32(x)) +} +func (JobStatus_State) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_jobs_a9b426fd7b89528f, []int{9, 0} +} + +// The job substate. +type JobStatus_Substate int32 + +const ( + // The job substate is unknown. + JobStatus_UNSPECIFIED JobStatus_Substate = 0 + // The Job is submitted to the agent. + // + // Applies to RUNNING state. + JobStatus_SUBMITTED JobStatus_Substate = 1 + // The Job has been received and is awaiting execution (it may be waiting + // for a condition to be met). See the "details" field for the reason for + // the delay. + // + // Applies to RUNNING state. + JobStatus_QUEUED JobStatus_Substate = 2 + // The agent-reported status is out of date, which may be caused by a + // loss of communication between the agent and Cloud Dataproc. If the + // agent does not send a timely update, the job will fail. + // + // Applies to RUNNING state. + JobStatus_STALE_STATUS JobStatus_Substate = 3 +) + +var JobStatus_Substate_name = map[int32]string{ + 0: "UNSPECIFIED", + 1: "SUBMITTED", + 2: "QUEUED", + 3: "STALE_STATUS", +} +var JobStatus_Substate_value = map[string]int32{ + "UNSPECIFIED": 0, + "SUBMITTED": 1, + "QUEUED": 2, + "STALE_STATUS": 3, +} + +func (x JobStatus_Substate) String() string { + return proto.EnumName(JobStatus_Substate_name, int32(x)) +} +func (JobStatus_Substate) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_jobs_a9b426fd7b89528f, []int{9, 1} +} + +// The application state, corresponding to +// YarnProtos.YarnApplicationStateProto. +type YarnApplication_State int32 + +const ( + // Status is unspecified. + YarnApplication_STATE_UNSPECIFIED YarnApplication_State = 0 + // Status is NEW. + YarnApplication_NEW YarnApplication_State = 1 + // Status is NEW_SAVING. + YarnApplication_NEW_SAVING YarnApplication_State = 2 + // Status is SUBMITTED. + YarnApplication_SUBMITTED YarnApplication_State = 3 + // Status is ACCEPTED. + YarnApplication_ACCEPTED YarnApplication_State = 4 + // Status is RUNNING. + YarnApplication_RUNNING YarnApplication_State = 5 + // Status is FINISHED. + YarnApplication_FINISHED YarnApplication_State = 6 + // Status is FAILED. + YarnApplication_FAILED YarnApplication_State = 7 + // Status is KILLED. + YarnApplication_KILLED YarnApplication_State = 8 +) + +var YarnApplication_State_name = map[int32]string{ + 0: "STATE_UNSPECIFIED", + 1: "NEW", + 2: "NEW_SAVING", + 3: "SUBMITTED", + 4: "ACCEPTED", + 5: "RUNNING", + 6: "FINISHED", + 7: "FAILED", + 8: "KILLED", +} +var YarnApplication_State_value = map[string]int32{ + "STATE_UNSPECIFIED": 0, + "NEW": 1, + "NEW_SAVING": 2, + "SUBMITTED": 3, + "ACCEPTED": 4, + "RUNNING": 5, + "FINISHED": 6, + "FAILED": 7, + "KILLED": 8, +} + +func (x YarnApplication_State) String() string { + return proto.EnumName(YarnApplication_State_name, int32(x)) +} +func (YarnApplication_State) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_jobs_a9b426fd7b89528f, []int{11, 0} +} + +// A matcher that specifies categories of job states. +type ListJobsRequest_JobStateMatcher int32 + +const ( + // Match all jobs, regardless of state. + ListJobsRequest_ALL ListJobsRequest_JobStateMatcher = 0 + // Only match jobs in non-terminal states: PENDING, RUNNING, or + // CANCEL_PENDING. + ListJobsRequest_ACTIVE ListJobsRequest_JobStateMatcher = 1 + // Only match jobs in terminal states: CANCELLED, DONE, or ERROR. + ListJobsRequest_NON_ACTIVE ListJobsRequest_JobStateMatcher = 2 +) + +var ListJobsRequest_JobStateMatcher_name = map[int32]string{ + 0: "ALL", + 1: "ACTIVE", + 2: "NON_ACTIVE", +} +var ListJobsRequest_JobStateMatcher_value = map[string]int32{ + "ALL": 0, + "ACTIVE": 1, + "NON_ACTIVE": 2, +} + +func (x ListJobsRequest_JobStateMatcher) String() string { + return proto.EnumName(ListJobsRequest_JobStateMatcher_name, int32(x)) +} +func (ListJobsRequest_JobStateMatcher) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_jobs_a9b426fd7b89528f, []int{16, 0} +} + +// The runtime logging config of the job. +type LoggingConfig struct { + // The per-package log levels for the driver. This may include + // "root" package name to configure rootLogger. + // Examples: + // 'com.google = FATAL', 'root = INFO', 'org.apache = DEBUG' + DriverLogLevels map[string]LoggingConfig_Level `protobuf:"bytes,2,rep,name=driver_log_levels,json=driverLogLevels,proto3" json:"driver_log_levels,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"varint,2,opt,name=value,proto3,enum=google.cloud.dataproc.v1.LoggingConfig_Level"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *LoggingConfig) Reset() { *m = LoggingConfig{} } +func (m *LoggingConfig) String() string { return proto.CompactTextString(m) } +func (*LoggingConfig) ProtoMessage() {} +func (*LoggingConfig) Descriptor() ([]byte, []int) { + return fileDescriptor_jobs_a9b426fd7b89528f, []int{0} +} +func (m *LoggingConfig) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_LoggingConfig.Unmarshal(m, b) +} +func (m *LoggingConfig) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_LoggingConfig.Marshal(b, m, deterministic) +} +func (dst *LoggingConfig) XXX_Merge(src proto.Message) { + xxx_messageInfo_LoggingConfig.Merge(dst, src) +} +func (m *LoggingConfig) XXX_Size() int { + return xxx_messageInfo_LoggingConfig.Size(m) +} +func (m *LoggingConfig) XXX_DiscardUnknown() { + xxx_messageInfo_LoggingConfig.DiscardUnknown(m) +} + +var xxx_messageInfo_LoggingConfig proto.InternalMessageInfo + +func (m *LoggingConfig) GetDriverLogLevels() map[string]LoggingConfig_Level { + if m != nil { + return m.DriverLogLevels + } + return nil +} + +// A Cloud Dataproc job for running +// [Apache Hadoop +// MapReduce](https://hadoop.apache.org/docs/current/hadoop-mapreduce-client/hadoop-mapreduce-client-core/MapReduceTutorial.html) +// jobs on [Apache Hadoop +// YARN](https://hadoop.apache.org/docs/r2.7.1/hadoop-yarn/hadoop-yarn-site/YARN.html). +type HadoopJob struct { + // Required. Indicates the location of the driver's main class. Specify + // either the jar file that contains the main class or the main class name. + // To specify both, add the jar file to `jar_file_uris`, and then specify + // the main class name in this property. + // + // Types that are valid to be assigned to Driver: + // *HadoopJob_MainJarFileUri + // *HadoopJob_MainClass + Driver isHadoopJob_Driver `protobuf_oneof:"driver"` + // Optional. The arguments to pass to the driver. Do not + // include arguments, such as `-libjars` or `-Dfoo=bar`, that can be set as + // job properties, since a collision may occur that causes an incorrect job + // submission. + Args []string `protobuf:"bytes,3,rep,name=args,proto3" json:"args,omitempty"` + // Optional. Jar file URIs to add to the CLASSPATHs of the + // Hadoop driver and tasks. + JarFileUris []string `protobuf:"bytes,4,rep,name=jar_file_uris,json=jarFileUris,proto3" json:"jar_file_uris,omitempty"` + // Optional. HCFS (Hadoop Compatible Filesystem) URIs of files to be copied + // to the working directory of Hadoop drivers and distributed tasks. Useful + // for naively parallel tasks. + FileUris []string `protobuf:"bytes,5,rep,name=file_uris,json=fileUris,proto3" json:"file_uris,omitempty"` + // Optional. HCFS URIs of archives to be extracted in the working directory of + // Hadoop drivers and tasks. Supported file types: + // .jar, .tar, .tar.gz, .tgz, or .zip. + ArchiveUris []string `protobuf:"bytes,6,rep,name=archive_uris,json=archiveUris,proto3" json:"archive_uris,omitempty"` + // Optional. A mapping of property names to values, used to configure Hadoop. + // Properties that conflict with values set by the Cloud Dataproc API may be + // overwritten. Can include properties set in /etc/hadoop/conf/*-site and + // classes in user code. + Properties map[string]string `protobuf:"bytes,7,rep,name=properties,proto3" json:"properties,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` + // Optional. The runtime log config for job execution. + LoggingConfig *LoggingConfig `protobuf:"bytes,8,opt,name=logging_config,json=loggingConfig,proto3" json:"logging_config,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *HadoopJob) Reset() { *m = HadoopJob{} } +func (m *HadoopJob) String() string { return proto.CompactTextString(m) } +func (*HadoopJob) ProtoMessage() {} +func (*HadoopJob) Descriptor() ([]byte, []int) { + return fileDescriptor_jobs_a9b426fd7b89528f, []int{1} +} +func (m *HadoopJob) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_HadoopJob.Unmarshal(m, b) +} +func (m *HadoopJob) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_HadoopJob.Marshal(b, m, deterministic) +} +func (dst *HadoopJob) XXX_Merge(src proto.Message) { + xxx_messageInfo_HadoopJob.Merge(dst, src) +} +func (m *HadoopJob) XXX_Size() int { + return xxx_messageInfo_HadoopJob.Size(m) +} +func (m *HadoopJob) XXX_DiscardUnknown() { + xxx_messageInfo_HadoopJob.DiscardUnknown(m) +} + +var xxx_messageInfo_HadoopJob proto.InternalMessageInfo + +type isHadoopJob_Driver interface { + isHadoopJob_Driver() +} + +type HadoopJob_MainJarFileUri struct { + MainJarFileUri string `protobuf:"bytes,1,opt,name=main_jar_file_uri,json=mainJarFileUri,proto3,oneof"` +} + +type HadoopJob_MainClass struct { + MainClass string `protobuf:"bytes,2,opt,name=main_class,json=mainClass,proto3,oneof"` +} + +func (*HadoopJob_MainJarFileUri) isHadoopJob_Driver() {} + +func (*HadoopJob_MainClass) isHadoopJob_Driver() {} + +func (m *HadoopJob) GetDriver() isHadoopJob_Driver { + if m != nil { + return m.Driver + } + return nil +} + +func (m *HadoopJob) GetMainJarFileUri() string { + if x, ok := m.GetDriver().(*HadoopJob_MainJarFileUri); ok { + return x.MainJarFileUri + } + return "" +} + +func (m *HadoopJob) GetMainClass() string { + if x, ok := m.GetDriver().(*HadoopJob_MainClass); ok { + return x.MainClass + } + return "" +} + +func (m *HadoopJob) GetArgs() []string { + if m != nil { + return m.Args + } + return nil +} + +func (m *HadoopJob) GetJarFileUris() []string { + if m != nil { + return m.JarFileUris + } + return nil +} + +func (m *HadoopJob) GetFileUris() []string { + if m != nil { + return m.FileUris + } + return nil +} + +func (m *HadoopJob) GetArchiveUris() []string { + if m != nil { + return m.ArchiveUris + } + return nil +} + +func (m *HadoopJob) GetProperties() map[string]string { + if m != nil { + return m.Properties + } + return nil +} + +func (m *HadoopJob) GetLoggingConfig() *LoggingConfig { + if m != nil { + return m.LoggingConfig + } + return nil +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*HadoopJob) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _HadoopJob_OneofMarshaler, _HadoopJob_OneofUnmarshaler, _HadoopJob_OneofSizer, []interface{}{ + (*HadoopJob_MainJarFileUri)(nil), + (*HadoopJob_MainClass)(nil), + } +} + +func _HadoopJob_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*HadoopJob) + // driver + switch x := m.Driver.(type) { + case *HadoopJob_MainJarFileUri: + b.EncodeVarint(1<<3 | proto.WireBytes) + b.EncodeStringBytes(x.MainJarFileUri) + case *HadoopJob_MainClass: + b.EncodeVarint(2<<3 | proto.WireBytes) + b.EncodeStringBytes(x.MainClass) + case nil: + default: + return fmt.Errorf("HadoopJob.Driver has unexpected type %T", x) + } + return nil +} + +func _HadoopJob_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*HadoopJob) + switch tag { + case 1: // driver.main_jar_file_uri + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeStringBytes() + m.Driver = &HadoopJob_MainJarFileUri{x} + return true, err + case 2: // driver.main_class + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeStringBytes() + m.Driver = &HadoopJob_MainClass{x} + return true, err + default: + return false, nil + } +} + +func _HadoopJob_OneofSizer(msg proto.Message) (n int) { + m := msg.(*HadoopJob) + // driver + switch x := m.Driver.(type) { + case *HadoopJob_MainJarFileUri: + n += 1 // tag and wire + n += proto.SizeVarint(uint64(len(x.MainJarFileUri))) + n += len(x.MainJarFileUri) + case *HadoopJob_MainClass: + n += 1 // tag and wire + n += proto.SizeVarint(uint64(len(x.MainClass))) + n += len(x.MainClass) + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +// A Cloud Dataproc job for running [Apache Spark](http://spark.apache.org/) +// applications on YARN. +type SparkJob struct { + // Required. The specification of the main method to call to drive the job. + // Specify either the jar file that contains the main class or the main class + // name. To pass both a main jar and a main class in that jar, add the jar to + // `CommonJob.jar_file_uris`, and then specify the main class name in + // `main_class`. + // + // Types that are valid to be assigned to Driver: + // *SparkJob_MainJarFileUri + // *SparkJob_MainClass + Driver isSparkJob_Driver `protobuf_oneof:"driver"` + // Optional. The arguments to pass to the driver. Do not include arguments, + // such as `--conf`, that can be set as job properties, since a collision may + // occur that causes an incorrect job submission. + Args []string `protobuf:"bytes,3,rep,name=args,proto3" json:"args,omitempty"` + // Optional. HCFS URIs of jar files to add to the CLASSPATHs of the + // Spark driver and tasks. + JarFileUris []string `protobuf:"bytes,4,rep,name=jar_file_uris,json=jarFileUris,proto3" json:"jar_file_uris,omitempty"` + // Optional. HCFS URIs of files to be copied to the working directory of + // Spark drivers and distributed tasks. Useful for naively parallel tasks. + FileUris []string `protobuf:"bytes,5,rep,name=file_uris,json=fileUris,proto3" json:"file_uris,omitempty"` + // Optional. HCFS URIs of archives to be extracted in the working directory + // of Spark drivers and tasks. Supported file types: + // .jar, .tar, .tar.gz, .tgz, and .zip. + ArchiveUris []string `protobuf:"bytes,6,rep,name=archive_uris,json=archiveUris,proto3" json:"archive_uris,omitempty"` + // Optional. A mapping of property names to values, used to configure Spark. + // Properties that conflict with values set by the Cloud Dataproc API may be + // overwritten. Can include properties set in + // /etc/spark/conf/spark-defaults.conf and classes in user code. + Properties map[string]string `protobuf:"bytes,7,rep,name=properties,proto3" json:"properties,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` + // Optional. The runtime log config for job execution. + LoggingConfig *LoggingConfig `protobuf:"bytes,8,opt,name=logging_config,json=loggingConfig,proto3" json:"logging_config,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *SparkJob) Reset() { *m = SparkJob{} } +func (m *SparkJob) String() string { return proto.CompactTextString(m) } +func (*SparkJob) ProtoMessage() {} +func (*SparkJob) Descriptor() ([]byte, []int) { + return fileDescriptor_jobs_a9b426fd7b89528f, []int{2} +} +func (m *SparkJob) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_SparkJob.Unmarshal(m, b) +} +func (m *SparkJob) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_SparkJob.Marshal(b, m, deterministic) +} +func (dst *SparkJob) XXX_Merge(src proto.Message) { + xxx_messageInfo_SparkJob.Merge(dst, src) +} +func (m *SparkJob) XXX_Size() int { + return xxx_messageInfo_SparkJob.Size(m) +} +func (m *SparkJob) XXX_DiscardUnknown() { + xxx_messageInfo_SparkJob.DiscardUnknown(m) +} + +var xxx_messageInfo_SparkJob proto.InternalMessageInfo + +type isSparkJob_Driver interface { + isSparkJob_Driver() +} + +type SparkJob_MainJarFileUri struct { + MainJarFileUri string `protobuf:"bytes,1,opt,name=main_jar_file_uri,json=mainJarFileUri,proto3,oneof"` +} + +type SparkJob_MainClass struct { + MainClass string `protobuf:"bytes,2,opt,name=main_class,json=mainClass,proto3,oneof"` +} + +func (*SparkJob_MainJarFileUri) isSparkJob_Driver() {} + +func (*SparkJob_MainClass) isSparkJob_Driver() {} + +func (m *SparkJob) GetDriver() isSparkJob_Driver { + if m != nil { + return m.Driver + } + return nil +} + +func (m *SparkJob) GetMainJarFileUri() string { + if x, ok := m.GetDriver().(*SparkJob_MainJarFileUri); ok { + return x.MainJarFileUri + } + return "" +} + +func (m *SparkJob) GetMainClass() string { + if x, ok := m.GetDriver().(*SparkJob_MainClass); ok { + return x.MainClass + } + return "" +} + +func (m *SparkJob) GetArgs() []string { + if m != nil { + return m.Args + } + return nil +} + +func (m *SparkJob) GetJarFileUris() []string { + if m != nil { + return m.JarFileUris + } + return nil +} + +func (m *SparkJob) GetFileUris() []string { + if m != nil { + return m.FileUris + } + return nil +} + +func (m *SparkJob) GetArchiveUris() []string { + if m != nil { + return m.ArchiveUris + } + return nil +} + +func (m *SparkJob) GetProperties() map[string]string { + if m != nil { + return m.Properties + } + return nil +} + +func (m *SparkJob) GetLoggingConfig() *LoggingConfig { + if m != nil { + return m.LoggingConfig + } + return nil +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*SparkJob) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _SparkJob_OneofMarshaler, _SparkJob_OneofUnmarshaler, _SparkJob_OneofSizer, []interface{}{ + (*SparkJob_MainJarFileUri)(nil), + (*SparkJob_MainClass)(nil), + } +} + +func _SparkJob_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*SparkJob) + // driver + switch x := m.Driver.(type) { + case *SparkJob_MainJarFileUri: + b.EncodeVarint(1<<3 | proto.WireBytes) + b.EncodeStringBytes(x.MainJarFileUri) + case *SparkJob_MainClass: + b.EncodeVarint(2<<3 | proto.WireBytes) + b.EncodeStringBytes(x.MainClass) + case nil: + default: + return fmt.Errorf("SparkJob.Driver has unexpected type %T", x) + } + return nil +} + +func _SparkJob_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*SparkJob) + switch tag { + case 1: // driver.main_jar_file_uri + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeStringBytes() + m.Driver = &SparkJob_MainJarFileUri{x} + return true, err + case 2: // driver.main_class + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeStringBytes() + m.Driver = &SparkJob_MainClass{x} + return true, err + default: + return false, nil + } +} + +func _SparkJob_OneofSizer(msg proto.Message) (n int) { + m := msg.(*SparkJob) + // driver + switch x := m.Driver.(type) { + case *SparkJob_MainJarFileUri: + n += 1 // tag and wire + n += proto.SizeVarint(uint64(len(x.MainJarFileUri))) + n += len(x.MainJarFileUri) + case *SparkJob_MainClass: + n += 1 // tag and wire + n += proto.SizeVarint(uint64(len(x.MainClass))) + n += len(x.MainClass) + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +// A Cloud Dataproc job for running +// [Apache +// PySpark](https://spark.apache.org/docs/0.9.0/python-programming-guide.html) +// applications on YARN. +type PySparkJob struct { + // Required. The HCFS URI of the main Python file to use as the driver. Must + // be a .py file. + MainPythonFileUri string `protobuf:"bytes,1,opt,name=main_python_file_uri,json=mainPythonFileUri,proto3" json:"main_python_file_uri,omitempty"` + // Optional. The arguments to pass to the driver. Do not include arguments, + // such as `--conf`, that can be set as job properties, since a collision may + // occur that causes an incorrect job submission. + Args []string `protobuf:"bytes,2,rep,name=args,proto3" json:"args,omitempty"` + // Optional. HCFS file URIs of Python files to pass to the PySpark + // framework. Supported file types: .py, .egg, and .zip. + PythonFileUris []string `protobuf:"bytes,3,rep,name=python_file_uris,json=pythonFileUris,proto3" json:"python_file_uris,omitempty"` + // Optional. HCFS URIs of jar files to add to the CLASSPATHs of the + // Python driver and tasks. + JarFileUris []string `protobuf:"bytes,4,rep,name=jar_file_uris,json=jarFileUris,proto3" json:"jar_file_uris,omitempty"` + // Optional. HCFS URIs of files to be copied to the working directory of + // Python drivers and distributed tasks. Useful for naively parallel tasks. + FileUris []string `protobuf:"bytes,5,rep,name=file_uris,json=fileUris,proto3" json:"file_uris,omitempty"` + // Optional. HCFS URIs of archives to be extracted in the working directory of + // .jar, .tar, .tar.gz, .tgz, and .zip. + ArchiveUris []string `protobuf:"bytes,6,rep,name=archive_uris,json=archiveUris,proto3" json:"archive_uris,omitempty"` + // Optional. A mapping of property names to values, used to configure PySpark. + // Properties that conflict with values set by the Cloud Dataproc API may be + // overwritten. Can include properties set in + // /etc/spark/conf/spark-defaults.conf and classes in user code. + Properties map[string]string `protobuf:"bytes,7,rep,name=properties,proto3" json:"properties,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` + // Optional. The runtime log config for job execution. + LoggingConfig *LoggingConfig `protobuf:"bytes,8,opt,name=logging_config,json=loggingConfig,proto3" json:"logging_config,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *PySparkJob) Reset() { *m = PySparkJob{} } +func (m *PySparkJob) String() string { return proto.CompactTextString(m) } +func (*PySparkJob) ProtoMessage() {} +func (*PySparkJob) Descriptor() ([]byte, []int) { + return fileDescriptor_jobs_a9b426fd7b89528f, []int{3} +} +func (m *PySparkJob) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_PySparkJob.Unmarshal(m, b) +} +func (m *PySparkJob) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_PySparkJob.Marshal(b, m, deterministic) +} +func (dst *PySparkJob) XXX_Merge(src proto.Message) { + xxx_messageInfo_PySparkJob.Merge(dst, src) +} +func (m *PySparkJob) XXX_Size() int { + return xxx_messageInfo_PySparkJob.Size(m) +} +func (m *PySparkJob) XXX_DiscardUnknown() { + xxx_messageInfo_PySparkJob.DiscardUnknown(m) +} + +var xxx_messageInfo_PySparkJob proto.InternalMessageInfo + +func (m *PySparkJob) GetMainPythonFileUri() string { + if m != nil { + return m.MainPythonFileUri + } + return "" +} + +func (m *PySparkJob) GetArgs() []string { + if m != nil { + return m.Args + } + return nil +} + +func (m *PySparkJob) GetPythonFileUris() []string { + if m != nil { + return m.PythonFileUris + } + return nil +} + +func (m *PySparkJob) GetJarFileUris() []string { + if m != nil { + return m.JarFileUris + } + return nil +} + +func (m *PySparkJob) GetFileUris() []string { + if m != nil { + return m.FileUris + } + return nil +} + +func (m *PySparkJob) GetArchiveUris() []string { + if m != nil { + return m.ArchiveUris + } + return nil +} + +func (m *PySparkJob) GetProperties() map[string]string { + if m != nil { + return m.Properties + } + return nil +} + +func (m *PySparkJob) GetLoggingConfig() *LoggingConfig { + if m != nil { + return m.LoggingConfig + } + return nil +} + +// A list of queries to run on a cluster. +type QueryList struct { + // Required. The queries to execute. You do not need to terminate a query + // with a semicolon. Multiple queries can be specified in one string + // by separating each with a semicolon. Here is an example of an Cloud + // Dataproc API snippet that uses a QueryList to specify a HiveJob: + // + // "hiveJob": { + // "queryList": { + // "queries": [ + // "query1", + // "query2", + // "query3;query4", + // ] + // } + // } + Queries []string `protobuf:"bytes,1,rep,name=queries,proto3" json:"queries,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *QueryList) Reset() { *m = QueryList{} } +func (m *QueryList) String() string { return proto.CompactTextString(m) } +func (*QueryList) ProtoMessage() {} +func (*QueryList) Descriptor() ([]byte, []int) { + return fileDescriptor_jobs_a9b426fd7b89528f, []int{4} +} +func (m *QueryList) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_QueryList.Unmarshal(m, b) +} +func (m *QueryList) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_QueryList.Marshal(b, m, deterministic) +} +func (dst *QueryList) XXX_Merge(src proto.Message) { + xxx_messageInfo_QueryList.Merge(dst, src) +} +func (m *QueryList) XXX_Size() int { + return xxx_messageInfo_QueryList.Size(m) +} +func (m *QueryList) XXX_DiscardUnknown() { + xxx_messageInfo_QueryList.DiscardUnknown(m) +} + +var xxx_messageInfo_QueryList proto.InternalMessageInfo + +func (m *QueryList) GetQueries() []string { + if m != nil { + return m.Queries + } + return nil +} + +// A Cloud Dataproc job for running [Apache Hive](https://hive.apache.org/) +// queries on YARN. +type HiveJob struct { + // Required. The sequence of Hive queries to execute, specified as either + // an HCFS file URI or a list of queries. + // + // Types that are valid to be assigned to Queries: + // *HiveJob_QueryFileUri + // *HiveJob_QueryList + Queries isHiveJob_Queries `protobuf_oneof:"queries"` + // Optional. Whether to continue executing queries if a query fails. + // The default value is `false`. Setting to `true` can be useful when + // executing independent parallel queries. + ContinueOnFailure bool `protobuf:"varint,3,opt,name=continue_on_failure,json=continueOnFailure,proto3" json:"continue_on_failure,omitempty"` + // Optional. Mapping of query variable names to values (equivalent to the + // Hive command: `SET name="value";`). + ScriptVariables map[string]string `protobuf:"bytes,4,rep,name=script_variables,json=scriptVariables,proto3" json:"script_variables,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` + // Optional. A mapping of property names and values, used to configure Hive. + // Properties that conflict with values set by the Cloud Dataproc API may be + // overwritten. Can include properties set in /etc/hadoop/conf/*-site.xml, + // /etc/hive/conf/hive-site.xml, and classes in user code. + Properties map[string]string `protobuf:"bytes,5,rep,name=properties,proto3" json:"properties,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` + // Optional. HCFS URIs of jar files to add to the CLASSPATH of the + // Hive server and Hadoop MapReduce (MR) tasks. Can contain Hive SerDes + // and UDFs. + JarFileUris []string `protobuf:"bytes,6,rep,name=jar_file_uris,json=jarFileUris,proto3" json:"jar_file_uris,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *HiveJob) Reset() { *m = HiveJob{} } +func (m *HiveJob) String() string { return proto.CompactTextString(m) } +func (*HiveJob) ProtoMessage() {} +func (*HiveJob) Descriptor() ([]byte, []int) { + return fileDescriptor_jobs_a9b426fd7b89528f, []int{5} +} +func (m *HiveJob) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_HiveJob.Unmarshal(m, b) +} +func (m *HiveJob) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_HiveJob.Marshal(b, m, deterministic) +} +func (dst *HiveJob) XXX_Merge(src proto.Message) { + xxx_messageInfo_HiveJob.Merge(dst, src) +} +func (m *HiveJob) XXX_Size() int { + return xxx_messageInfo_HiveJob.Size(m) +} +func (m *HiveJob) XXX_DiscardUnknown() { + xxx_messageInfo_HiveJob.DiscardUnknown(m) +} + +var xxx_messageInfo_HiveJob proto.InternalMessageInfo + +type isHiveJob_Queries interface { + isHiveJob_Queries() +} + +type HiveJob_QueryFileUri struct { + QueryFileUri string `protobuf:"bytes,1,opt,name=query_file_uri,json=queryFileUri,proto3,oneof"` +} + +type HiveJob_QueryList struct { + QueryList *QueryList `protobuf:"bytes,2,opt,name=query_list,json=queryList,proto3,oneof"` +} + +func (*HiveJob_QueryFileUri) isHiveJob_Queries() {} + +func (*HiveJob_QueryList) isHiveJob_Queries() {} + +func (m *HiveJob) GetQueries() isHiveJob_Queries { + if m != nil { + return m.Queries + } + return nil +} + +func (m *HiveJob) GetQueryFileUri() string { + if x, ok := m.GetQueries().(*HiveJob_QueryFileUri); ok { + return x.QueryFileUri + } + return "" +} + +func (m *HiveJob) GetQueryList() *QueryList { + if x, ok := m.GetQueries().(*HiveJob_QueryList); ok { + return x.QueryList + } + return nil +} + +func (m *HiveJob) GetContinueOnFailure() bool { + if m != nil { + return m.ContinueOnFailure + } + return false +} + +func (m *HiveJob) GetScriptVariables() map[string]string { + if m != nil { + return m.ScriptVariables + } + return nil +} + +func (m *HiveJob) GetProperties() map[string]string { + if m != nil { + return m.Properties + } + return nil +} + +func (m *HiveJob) GetJarFileUris() []string { + if m != nil { + return m.JarFileUris + } + return nil +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*HiveJob) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _HiveJob_OneofMarshaler, _HiveJob_OneofUnmarshaler, _HiveJob_OneofSizer, []interface{}{ + (*HiveJob_QueryFileUri)(nil), + (*HiveJob_QueryList)(nil), + } +} + +func _HiveJob_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*HiveJob) + // queries + switch x := m.Queries.(type) { + case *HiveJob_QueryFileUri: + b.EncodeVarint(1<<3 | proto.WireBytes) + b.EncodeStringBytes(x.QueryFileUri) + case *HiveJob_QueryList: + b.EncodeVarint(2<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.QueryList); err != nil { + return err + } + case nil: + default: + return fmt.Errorf("HiveJob.Queries has unexpected type %T", x) + } + return nil +} + +func _HiveJob_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*HiveJob) + switch tag { + case 1: // queries.query_file_uri + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeStringBytes() + m.Queries = &HiveJob_QueryFileUri{x} + return true, err + case 2: // queries.query_list + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(QueryList) + err := b.DecodeMessage(msg) + m.Queries = &HiveJob_QueryList{msg} + return true, err + default: + return false, nil + } +} + +func _HiveJob_OneofSizer(msg proto.Message) (n int) { + m := msg.(*HiveJob) + // queries + switch x := m.Queries.(type) { + case *HiveJob_QueryFileUri: + n += 1 // tag and wire + n += proto.SizeVarint(uint64(len(x.QueryFileUri))) + n += len(x.QueryFileUri) + case *HiveJob_QueryList: + s := proto.Size(x.QueryList) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +// A Cloud Dataproc job for running [Apache Spark +// SQL](http://spark.apache.org/sql/) queries. +type SparkSqlJob struct { + // Required. The sequence of Spark SQL queries to execute, specified as + // either an HCFS file URI or as a list of queries. + // + // Types that are valid to be assigned to Queries: + // *SparkSqlJob_QueryFileUri + // *SparkSqlJob_QueryList + Queries isSparkSqlJob_Queries `protobuf_oneof:"queries"` + // Optional. Mapping of query variable names to values (equivalent to the + // Spark SQL command: SET `name="value";`). + ScriptVariables map[string]string `protobuf:"bytes,3,rep,name=script_variables,json=scriptVariables,proto3" json:"script_variables,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` + // Optional. A mapping of property names to values, used to configure + // Spark SQL's SparkConf. Properties that conflict with values set by the + // Cloud Dataproc API may be overwritten. + Properties map[string]string `protobuf:"bytes,4,rep,name=properties,proto3" json:"properties,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` + // Optional. HCFS URIs of jar files to be added to the Spark CLASSPATH. + JarFileUris []string `protobuf:"bytes,56,rep,name=jar_file_uris,json=jarFileUris,proto3" json:"jar_file_uris,omitempty"` + // Optional. The runtime log config for job execution. + LoggingConfig *LoggingConfig `protobuf:"bytes,6,opt,name=logging_config,json=loggingConfig,proto3" json:"logging_config,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *SparkSqlJob) Reset() { *m = SparkSqlJob{} } +func (m *SparkSqlJob) String() string { return proto.CompactTextString(m) } +func (*SparkSqlJob) ProtoMessage() {} +func (*SparkSqlJob) Descriptor() ([]byte, []int) { + return fileDescriptor_jobs_a9b426fd7b89528f, []int{6} +} +func (m *SparkSqlJob) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_SparkSqlJob.Unmarshal(m, b) +} +func (m *SparkSqlJob) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_SparkSqlJob.Marshal(b, m, deterministic) +} +func (dst *SparkSqlJob) XXX_Merge(src proto.Message) { + xxx_messageInfo_SparkSqlJob.Merge(dst, src) +} +func (m *SparkSqlJob) XXX_Size() int { + return xxx_messageInfo_SparkSqlJob.Size(m) +} +func (m *SparkSqlJob) XXX_DiscardUnknown() { + xxx_messageInfo_SparkSqlJob.DiscardUnknown(m) +} + +var xxx_messageInfo_SparkSqlJob proto.InternalMessageInfo + +type isSparkSqlJob_Queries interface { + isSparkSqlJob_Queries() +} + +type SparkSqlJob_QueryFileUri struct { + QueryFileUri string `protobuf:"bytes,1,opt,name=query_file_uri,json=queryFileUri,proto3,oneof"` +} + +type SparkSqlJob_QueryList struct { + QueryList *QueryList `protobuf:"bytes,2,opt,name=query_list,json=queryList,proto3,oneof"` +} + +func (*SparkSqlJob_QueryFileUri) isSparkSqlJob_Queries() {} + +func (*SparkSqlJob_QueryList) isSparkSqlJob_Queries() {} + +func (m *SparkSqlJob) GetQueries() isSparkSqlJob_Queries { + if m != nil { + return m.Queries + } + return nil +} + +func (m *SparkSqlJob) GetQueryFileUri() string { + if x, ok := m.GetQueries().(*SparkSqlJob_QueryFileUri); ok { + return x.QueryFileUri + } + return "" +} + +func (m *SparkSqlJob) GetQueryList() *QueryList { + if x, ok := m.GetQueries().(*SparkSqlJob_QueryList); ok { + return x.QueryList + } + return nil +} + +func (m *SparkSqlJob) GetScriptVariables() map[string]string { + if m != nil { + return m.ScriptVariables + } + return nil +} + +func (m *SparkSqlJob) GetProperties() map[string]string { + if m != nil { + return m.Properties + } + return nil +} + +func (m *SparkSqlJob) GetJarFileUris() []string { + if m != nil { + return m.JarFileUris + } + return nil +} + +func (m *SparkSqlJob) GetLoggingConfig() *LoggingConfig { + if m != nil { + return m.LoggingConfig + } + return nil +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*SparkSqlJob) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _SparkSqlJob_OneofMarshaler, _SparkSqlJob_OneofUnmarshaler, _SparkSqlJob_OneofSizer, []interface{}{ + (*SparkSqlJob_QueryFileUri)(nil), + (*SparkSqlJob_QueryList)(nil), + } +} + +func _SparkSqlJob_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*SparkSqlJob) + // queries + switch x := m.Queries.(type) { + case *SparkSqlJob_QueryFileUri: + b.EncodeVarint(1<<3 | proto.WireBytes) + b.EncodeStringBytes(x.QueryFileUri) + case *SparkSqlJob_QueryList: + b.EncodeVarint(2<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.QueryList); err != nil { + return err + } + case nil: + default: + return fmt.Errorf("SparkSqlJob.Queries has unexpected type %T", x) + } + return nil +} + +func _SparkSqlJob_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*SparkSqlJob) + switch tag { + case 1: // queries.query_file_uri + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeStringBytes() + m.Queries = &SparkSqlJob_QueryFileUri{x} + return true, err + case 2: // queries.query_list + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(QueryList) + err := b.DecodeMessage(msg) + m.Queries = &SparkSqlJob_QueryList{msg} + return true, err + default: + return false, nil + } +} + +func _SparkSqlJob_OneofSizer(msg proto.Message) (n int) { + m := msg.(*SparkSqlJob) + // queries + switch x := m.Queries.(type) { + case *SparkSqlJob_QueryFileUri: + n += 1 // tag and wire + n += proto.SizeVarint(uint64(len(x.QueryFileUri))) + n += len(x.QueryFileUri) + case *SparkSqlJob_QueryList: + s := proto.Size(x.QueryList) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +// A Cloud Dataproc job for running [Apache Pig](https://pig.apache.org/) +// queries on YARN. +type PigJob struct { + // Required. The sequence of Pig queries to execute, specified as an HCFS + // file URI or a list of queries. + // + // Types that are valid to be assigned to Queries: + // *PigJob_QueryFileUri + // *PigJob_QueryList + Queries isPigJob_Queries `protobuf_oneof:"queries"` + // Optional. Whether to continue executing queries if a query fails. + // The default value is `false`. Setting to `true` can be useful when + // executing independent parallel queries. + ContinueOnFailure bool `protobuf:"varint,3,opt,name=continue_on_failure,json=continueOnFailure,proto3" json:"continue_on_failure,omitempty"` + // Optional. Mapping of query variable names to values (equivalent to the Pig + // command: `name=[value]`). + ScriptVariables map[string]string `protobuf:"bytes,4,rep,name=script_variables,json=scriptVariables,proto3" json:"script_variables,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` + // Optional. A mapping of property names to values, used to configure Pig. + // Properties that conflict with values set by the Cloud Dataproc API may be + // overwritten. Can include properties set in /etc/hadoop/conf/*-site.xml, + // /etc/pig/conf/pig.properties, and classes in user code. + Properties map[string]string `protobuf:"bytes,5,rep,name=properties,proto3" json:"properties,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` + // Optional. HCFS URIs of jar files to add to the CLASSPATH of + // the Pig Client and Hadoop MapReduce (MR) tasks. Can contain Pig UDFs. + JarFileUris []string `protobuf:"bytes,6,rep,name=jar_file_uris,json=jarFileUris,proto3" json:"jar_file_uris,omitempty"` + // Optional. The runtime log config for job execution. + LoggingConfig *LoggingConfig `protobuf:"bytes,7,opt,name=logging_config,json=loggingConfig,proto3" json:"logging_config,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *PigJob) Reset() { *m = PigJob{} } +func (m *PigJob) String() string { return proto.CompactTextString(m) } +func (*PigJob) ProtoMessage() {} +func (*PigJob) Descriptor() ([]byte, []int) { + return fileDescriptor_jobs_a9b426fd7b89528f, []int{7} +} +func (m *PigJob) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_PigJob.Unmarshal(m, b) +} +func (m *PigJob) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_PigJob.Marshal(b, m, deterministic) +} +func (dst *PigJob) XXX_Merge(src proto.Message) { + xxx_messageInfo_PigJob.Merge(dst, src) +} +func (m *PigJob) XXX_Size() int { + return xxx_messageInfo_PigJob.Size(m) +} +func (m *PigJob) XXX_DiscardUnknown() { + xxx_messageInfo_PigJob.DiscardUnknown(m) +} + +var xxx_messageInfo_PigJob proto.InternalMessageInfo + +type isPigJob_Queries interface { + isPigJob_Queries() +} + +type PigJob_QueryFileUri struct { + QueryFileUri string `protobuf:"bytes,1,opt,name=query_file_uri,json=queryFileUri,proto3,oneof"` +} + +type PigJob_QueryList struct { + QueryList *QueryList `protobuf:"bytes,2,opt,name=query_list,json=queryList,proto3,oneof"` +} + +func (*PigJob_QueryFileUri) isPigJob_Queries() {} + +func (*PigJob_QueryList) isPigJob_Queries() {} + +func (m *PigJob) GetQueries() isPigJob_Queries { + if m != nil { + return m.Queries + } + return nil +} + +func (m *PigJob) GetQueryFileUri() string { + if x, ok := m.GetQueries().(*PigJob_QueryFileUri); ok { + return x.QueryFileUri + } + return "" +} + +func (m *PigJob) GetQueryList() *QueryList { + if x, ok := m.GetQueries().(*PigJob_QueryList); ok { + return x.QueryList + } + return nil +} + +func (m *PigJob) GetContinueOnFailure() bool { + if m != nil { + return m.ContinueOnFailure + } + return false +} + +func (m *PigJob) GetScriptVariables() map[string]string { + if m != nil { + return m.ScriptVariables + } + return nil +} + +func (m *PigJob) GetProperties() map[string]string { + if m != nil { + return m.Properties + } + return nil +} + +func (m *PigJob) GetJarFileUris() []string { + if m != nil { + return m.JarFileUris + } + return nil +} + +func (m *PigJob) GetLoggingConfig() *LoggingConfig { + if m != nil { + return m.LoggingConfig + } + return nil +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*PigJob) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _PigJob_OneofMarshaler, _PigJob_OneofUnmarshaler, _PigJob_OneofSizer, []interface{}{ + (*PigJob_QueryFileUri)(nil), + (*PigJob_QueryList)(nil), + } +} + +func _PigJob_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*PigJob) + // queries + switch x := m.Queries.(type) { + case *PigJob_QueryFileUri: + b.EncodeVarint(1<<3 | proto.WireBytes) + b.EncodeStringBytes(x.QueryFileUri) + case *PigJob_QueryList: + b.EncodeVarint(2<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.QueryList); err != nil { + return err + } + case nil: + default: + return fmt.Errorf("PigJob.Queries has unexpected type %T", x) + } + return nil +} + +func _PigJob_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*PigJob) + switch tag { + case 1: // queries.query_file_uri + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeStringBytes() + m.Queries = &PigJob_QueryFileUri{x} + return true, err + case 2: // queries.query_list + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(QueryList) + err := b.DecodeMessage(msg) + m.Queries = &PigJob_QueryList{msg} + return true, err + default: + return false, nil + } +} + +func _PigJob_OneofSizer(msg proto.Message) (n int) { + m := msg.(*PigJob) + // queries + switch x := m.Queries.(type) { + case *PigJob_QueryFileUri: + n += 1 // tag and wire + n += proto.SizeVarint(uint64(len(x.QueryFileUri))) + n += len(x.QueryFileUri) + case *PigJob_QueryList: + s := proto.Size(x.QueryList) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +// Cloud Dataproc job config. +type JobPlacement struct { + // Required. The name of the cluster where the job will be submitted. + ClusterName string `protobuf:"bytes,1,opt,name=cluster_name,json=clusterName,proto3" json:"cluster_name,omitempty"` + // Output only. A cluster UUID generated by the Cloud Dataproc service when + // the job is submitted. + ClusterUuid string `protobuf:"bytes,2,opt,name=cluster_uuid,json=clusterUuid,proto3" json:"cluster_uuid,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *JobPlacement) Reset() { *m = JobPlacement{} } +func (m *JobPlacement) String() string { return proto.CompactTextString(m) } +func (*JobPlacement) ProtoMessage() {} +func (*JobPlacement) Descriptor() ([]byte, []int) { + return fileDescriptor_jobs_a9b426fd7b89528f, []int{8} +} +func (m *JobPlacement) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_JobPlacement.Unmarshal(m, b) +} +func (m *JobPlacement) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_JobPlacement.Marshal(b, m, deterministic) +} +func (dst *JobPlacement) XXX_Merge(src proto.Message) { + xxx_messageInfo_JobPlacement.Merge(dst, src) +} +func (m *JobPlacement) XXX_Size() int { + return xxx_messageInfo_JobPlacement.Size(m) +} +func (m *JobPlacement) XXX_DiscardUnknown() { + xxx_messageInfo_JobPlacement.DiscardUnknown(m) +} + +var xxx_messageInfo_JobPlacement proto.InternalMessageInfo + +func (m *JobPlacement) GetClusterName() string { + if m != nil { + return m.ClusterName + } + return "" +} + +func (m *JobPlacement) GetClusterUuid() string { + if m != nil { + return m.ClusterUuid + } + return "" +} + +// Cloud Dataproc job status. +type JobStatus struct { + // Output only. A state message specifying the overall job state. + State JobStatus_State `protobuf:"varint,1,opt,name=state,proto3,enum=google.cloud.dataproc.v1.JobStatus_State" json:"state,omitempty"` + // Output only. Optional job state details, such as an error + // description if the state is ERROR. + Details string `protobuf:"bytes,2,opt,name=details,proto3" json:"details,omitempty"` + // Output only. The time when this state was entered. + StateStartTime *timestamp.Timestamp `protobuf:"bytes,6,opt,name=state_start_time,json=stateStartTime,proto3" json:"state_start_time,omitempty"` + // Output only. Additional state information, which includes + // status reported by the agent. + Substate JobStatus_Substate `protobuf:"varint,7,opt,name=substate,proto3,enum=google.cloud.dataproc.v1.JobStatus_Substate" json:"substate,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *JobStatus) Reset() { *m = JobStatus{} } +func (m *JobStatus) String() string { return proto.CompactTextString(m) } +func (*JobStatus) ProtoMessage() {} +func (*JobStatus) Descriptor() ([]byte, []int) { + return fileDescriptor_jobs_a9b426fd7b89528f, []int{9} +} +func (m *JobStatus) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_JobStatus.Unmarshal(m, b) +} +func (m *JobStatus) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_JobStatus.Marshal(b, m, deterministic) +} +func (dst *JobStatus) XXX_Merge(src proto.Message) { + xxx_messageInfo_JobStatus.Merge(dst, src) +} +func (m *JobStatus) XXX_Size() int { + return xxx_messageInfo_JobStatus.Size(m) +} +func (m *JobStatus) XXX_DiscardUnknown() { + xxx_messageInfo_JobStatus.DiscardUnknown(m) +} + +var xxx_messageInfo_JobStatus proto.InternalMessageInfo + +func (m *JobStatus) GetState() JobStatus_State { + if m != nil { + return m.State + } + return JobStatus_STATE_UNSPECIFIED +} + +func (m *JobStatus) GetDetails() string { + if m != nil { + return m.Details + } + return "" +} + +func (m *JobStatus) GetStateStartTime() *timestamp.Timestamp { + if m != nil { + return m.StateStartTime + } + return nil +} + +func (m *JobStatus) GetSubstate() JobStatus_Substate { + if m != nil { + return m.Substate + } + return JobStatus_UNSPECIFIED +} + +// Encapsulates the full scoping used to reference a job. +type JobReference struct { + // Required. The ID of the Google Cloud Platform project that the job + // belongs to. + ProjectId string `protobuf:"bytes,1,opt,name=project_id,json=projectId,proto3" json:"project_id,omitempty"` + // Optional. The job ID, which must be unique within the project. The job ID + // is generated by the server upon job submission or provided by the user as a + // means to perform retries without creating duplicate jobs. The ID must + // contain only letters (a-z, A-Z), numbers (0-9), underscores (_), or + // hyphens (-). The maximum length is 100 characters. + JobId string `protobuf:"bytes,2,opt,name=job_id,json=jobId,proto3" json:"job_id,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *JobReference) Reset() { *m = JobReference{} } +func (m *JobReference) String() string { return proto.CompactTextString(m) } +func (*JobReference) ProtoMessage() {} +func (*JobReference) Descriptor() ([]byte, []int) { + return fileDescriptor_jobs_a9b426fd7b89528f, []int{10} +} +func (m *JobReference) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_JobReference.Unmarshal(m, b) +} +func (m *JobReference) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_JobReference.Marshal(b, m, deterministic) +} +func (dst *JobReference) XXX_Merge(src proto.Message) { + xxx_messageInfo_JobReference.Merge(dst, src) +} +func (m *JobReference) XXX_Size() int { + return xxx_messageInfo_JobReference.Size(m) +} +func (m *JobReference) XXX_DiscardUnknown() { + xxx_messageInfo_JobReference.DiscardUnknown(m) +} + +var xxx_messageInfo_JobReference proto.InternalMessageInfo + +func (m *JobReference) GetProjectId() string { + if m != nil { + return m.ProjectId + } + return "" +} + +func (m *JobReference) GetJobId() string { + if m != nil { + return m.JobId + } + return "" +} + +// A YARN application created by a job. Application information is a subset of +// org.apache.hadoop.yarn.proto.YarnProtos.ApplicationReportProto. +// +// **Beta Feature**: This report is available for testing purposes only. It may +// be changed before final release. +type YarnApplication struct { + // Required. The application name. + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // Required. The application state. + State YarnApplication_State `protobuf:"varint,2,opt,name=state,proto3,enum=google.cloud.dataproc.v1.YarnApplication_State" json:"state,omitempty"` + // Required. The numerical progress of the application, from 1 to 100. + Progress float32 `protobuf:"fixed32,3,opt,name=progress,proto3" json:"progress,omitempty"` + // Optional. The HTTP URL of the ApplicationMaster, HistoryServer, or + // TimelineServer that provides application-specific information. The URL uses + // the internal hostname, and requires a proxy server for resolution and, + // possibly, access. + TrackingUrl string `protobuf:"bytes,4,opt,name=tracking_url,json=trackingUrl,proto3" json:"tracking_url,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *YarnApplication) Reset() { *m = YarnApplication{} } +func (m *YarnApplication) String() string { return proto.CompactTextString(m) } +func (*YarnApplication) ProtoMessage() {} +func (*YarnApplication) Descriptor() ([]byte, []int) { + return fileDescriptor_jobs_a9b426fd7b89528f, []int{11} +} +func (m *YarnApplication) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_YarnApplication.Unmarshal(m, b) +} +func (m *YarnApplication) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_YarnApplication.Marshal(b, m, deterministic) +} +func (dst *YarnApplication) XXX_Merge(src proto.Message) { + xxx_messageInfo_YarnApplication.Merge(dst, src) +} +func (m *YarnApplication) XXX_Size() int { + return xxx_messageInfo_YarnApplication.Size(m) +} +func (m *YarnApplication) XXX_DiscardUnknown() { + xxx_messageInfo_YarnApplication.DiscardUnknown(m) +} + +var xxx_messageInfo_YarnApplication proto.InternalMessageInfo + +func (m *YarnApplication) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *YarnApplication) GetState() YarnApplication_State { + if m != nil { + return m.State + } + return YarnApplication_STATE_UNSPECIFIED +} + +func (m *YarnApplication) GetProgress() float32 { + if m != nil { + return m.Progress + } + return 0 +} + +func (m *YarnApplication) GetTrackingUrl() string { + if m != nil { + return m.TrackingUrl + } + return "" +} + +// A Cloud Dataproc job resource. +type Job struct { + // Optional. The fully qualified reference to the job, which can be used to + // obtain the equivalent REST path of the job resource. If this property + // is not specified when a job is created, the server generates a + // job_id. + Reference *JobReference `protobuf:"bytes,1,opt,name=reference,proto3" json:"reference,omitempty"` + // Required. Job information, including how, when, and where to + // run the job. + Placement *JobPlacement `protobuf:"bytes,2,opt,name=placement,proto3" json:"placement,omitempty"` + // Required. The application/framework-specific portion of the job. + // + // Types that are valid to be assigned to TypeJob: + // *Job_HadoopJob + // *Job_SparkJob + // *Job_PysparkJob + // *Job_HiveJob + // *Job_PigJob + // *Job_SparkSqlJob + TypeJob isJob_TypeJob `protobuf_oneof:"type_job"` + // Output only. The job status. Additional application-specific + // status information may be contained in the type_job + // and yarn_applications fields. + Status *JobStatus `protobuf:"bytes,8,opt,name=status,proto3" json:"status,omitempty"` + // Output only. The previous job status. + StatusHistory []*JobStatus `protobuf:"bytes,13,rep,name=status_history,json=statusHistory,proto3" json:"status_history,omitempty"` + // Output only. The collection of YARN applications spun up by this job. + // + // **Beta** Feature: This report is available for testing purposes only. It + // may be changed before final release. + YarnApplications []*YarnApplication `protobuf:"bytes,9,rep,name=yarn_applications,json=yarnApplications,proto3" json:"yarn_applications,omitempty"` + // Output only. A URI pointing to the location of the stdout of the job's + // driver program. + DriverOutputResourceUri string `protobuf:"bytes,17,opt,name=driver_output_resource_uri,json=driverOutputResourceUri,proto3" json:"driver_output_resource_uri,omitempty"` + // Output only. If present, the location of miscellaneous control files + // which may be used as part of job setup and handling. If not present, + // control files may be placed in the same location as `driver_output_uri`. + DriverControlFilesUri string `protobuf:"bytes,15,opt,name=driver_control_files_uri,json=driverControlFilesUri,proto3" json:"driver_control_files_uri,omitempty"` + // Optional. The labels to associate with this job. + // Label **keys** must contain 1 to 63 characters, and must conform to + // [RFC 1035](https://www.ietf.org/rfc/rfc1035.txt). + // Label **values** may be empty, but, if present, must contain 1 to 63 + // characters, and must conform to [RFC + // 1035](https://www.ietf.org/rfc/rfc1035.txt). No more than 32 labels can be + // associated with a job. + Labels map[string]string `protobuf:"bytes,18,rep,name=labels,proto3" json:"labels,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` + // Optional. Job scheduling configuration. + Scheduling *JobScheduling `protobuf:"bytes,20,opt,name=scheduling,proto3" json:"scheduling,omitempty"` + // Output only. A UUID that uniquely identifies a job within the project + // over time. This is in contrast to a user-settable reference.job_id that + // may be reused over time. + JobUuid string `protobuf:"bytes,22,opt,name=job_uuid,json=jobUuid,proto3" json:"job_uuid,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Job) Reset() { *m = Job{} } +func (m *Job) String() string { return proto.CompactTextString(m) } +func (*Job) ProtoMessage() {} +func (*Job) Descriptor() ([]byte, []int) { + return fileDescriptor_jobs_a9b426fd7b89528f, []int{12} +} +func (m *Job) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Job.Unmarshal(m, b) +} +func (m *Job) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Job.Marshal(b, m, deterministic) +} +func (dst *Job) XXX_Merge(src proto.Message) { + xxx_messageInfo_Job.Merge(dst, src) +} +func (m *Job) XXX_Size() int { + return xxx_messageInfo_Job.Size(m) +} +func (m *Job) XXX_DiscardUnknown() { + xxx_messageInfo_Job.DiscardUnknown(m) +} + +var xxx_messageInfo_Job proto.InternalMessageInfo + +func (m *Job) GetReference() *JobReference { + if m != nil { + return m.Reference + } + return nil +} + +func (m *Job) GetPlacement() *JobPlacement { + if m != nil { + return m.Placement + } + return nil +} + +type isJob_TypeJob interface { + isJob_TypeJob() +} + +type Job_HadoopJob struct { + HadoopJob *HadoopJob `protobuf:"bytes,3,opt,name=hadoop_job,json=hadoopJob,proto3,oneof"` +} + +type Job_SparkJob struct { + SparkJob *SparkJob `protobuf:"bytes,4,opt,name=spark_job,json=sparkJob,proto3,oneof"` +} + +type Job_PysparkJob struct { + PysparkJob *PySparkJob `protobuf:"bytes,5,opt,name=pyspark_job,json=pysparkJob,proto3,oneof"` +} + +type Job_HiveJob struct { + HiveJob *HiveJob `protobuf:"bytes,6,opt,name=hive_job,json=hiveJob,proto3,oneof"` +} + +type Job_PigJob struct { + PigJob *PigJob `protobuf:"bytes,7,opt,name=pig_job,json=pigJob,proto3,oneof"` +} + +type Job_SparkSqlJob struct { + SparkSqlJob *SparkSqlJob `protobuf:"bytes,12,opt,name=spark_sql_job,json=sparkSqlJob,proto3,oneof"` +} + +func (*Job_HadoopJob) isJob_TypeJob() {} + +func (*Job_SparkJob) isJob_TypeJob() {} + +func (*Job_PysparkJob) isJob_TypeJob() {} + +func (*Job_HiveJob) isJob_TypeJob() {} + +func (*Job_PigJob) isJob_TypeJob() {} + +func (*Job_SparkSqlJob) isJob_TypeJob() {} + +func (m *Job) GetTypeJob() isJob_TypeJob { + if m != nil { + return m.TypeJob + } + return nil +} + +func (m *Job) GetHadoopJob() *HadoopJob { + if x, ok := m.GetTypeJob().(*Job_HadoopJob); ok { + return x.HadoopJob + } + return nil +} + +func (m *Job) GetSparkJob() *SparkJob { + if x, ok := m.GetTypeJob().(*Job_SparkJob); ok { + return x.SparkJob + } + return nil +} + +func (m *Job) GetPysparkJob() *PySparkJob { + if x, ok := m.GetTypeJob().(*Job_PysparkJob); ok { + return x.PysparkJob + } + return nil +} + +func (m *Job) GetHiveJob() *HiveJob { + if x, ok := m.GetTypeJob().(*Job_HiveJob); ok { + return x.HiveJob + } + return nil +} + +func (m *Job) GetPigJob() *PigJob { + if x, ok := m.GetTypeJob().(*Job_PigJob); ok { + return x.PigJob + } + return nil +} + +func (m *Job) GetSparkSqlJob() *SparkSqlJob { + if x, ok := m.GetTypeJob().(*Job_SparkSqlJob); ok { + return x.SparkSqlJob + } + return nil +} + +func (m *Job) GetStatus() *JobStatus { + if m != nil { + return m.Status + } + return nil +} + +func (m *Job) GetStatusHistory() []*JobStatus { + if m != nil { + return m.StatusHistory + } + return nil +} + +func (m *Job) GetYarnApplications() []*YarnApplication { + if m != nil { + return m.YarnApplications + } + return nil +} + +func (m *Job) GetDriverOutputResourceUri() string { + if m != nil { + return m.DriverOutputResourceUri + } + return "" +} + +func (m *Job) GetDriverControlFilesUri() string { + if m != nil { + return m.DriverControlFilesUri + } + return "" +} + +func (m *Job) GetLabels() map[string]string { + if m != nil { + return m.Labels + } + return nil +} + +func (m *Job) GetScheduling() *JobScheduling { + if m != nil { + return m.Scheduling + } + return nil +} + +func (m *Job) GetJobUuid() string { + if m != nil { + return m.JobUuid + } + return "" +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*Job) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _Job_OneofMarshaler, _Job_OneofUnmarshaler, _Job_OneofSizer, []interface{}{ + (*Job_HadoopJob)(nil), + (*Job_SparkJob)(nil), + (*Job_PysparkJob)(nil), + (*Job_HiveJob)(nil), + (*Job_PigJob)(nil), + (*Job_SparkSqlJob)(nil), + } +} + +func _Job_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*Job) + // type_job + switch x := m.TypeJob.(type) { + case *Job_HadoopJob: + b.EncodeVarint(3<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.HadoopJob); err != nil { + return err + } + case *Job_SparkJob: + b.EncodeVarint(4<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.SparkJob); err != nil { + return err + } + case *Job_PysparkJob: + b.EncodeVarint(5<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.PysparkJob); err != nil { + return err + } + case *Job_HiveJob: + b.EncodeVarint(6<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.HiveJob); err != nil { + return err + } + case *Job_PigJob: + b.EncodeVarint(7<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.PigJob); err != nil { + return err + } + case *Job_SparkSqlJob: + b.EncodeVarint(12<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.SparkSqlJob); err != nil { + return err + } + case nil: + default: + return fmt.Errorf("Job.TypeJob has unexpected type %T", x) + } + return nil +} + +func _Job_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*Job) + switch tag { + case 3: // type_job.hadoop_job + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(HadoopJob) + err := b.DecodeMessage(msg) + m.TypeJob = &Job_HadoopJob{msg} + return true, err + case 4: // type_job.spark_job + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(SparkJob) + err := b.DecodeMessage(msg) + m.TypeJob = &Job_SparkJob{msg} + return true, err + case 5: // type_job.pyspark_job + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(PySparkJob) + err := b.DecodeMessage(msg) + m.TypeJob = &Job_PysparkJob{msg} + return true, err + case 6: // type_job.hive_job + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(HiveJob) + err := b.DecodeMessage(msg) + m.TypeJob = &Job_HiveJob{msg} + return true, err + case 7: // type_job.pig_job + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(PigJob) + err := b.DecodeMessage(msg) + m.TypeJob = &Job_PigJob{msg} + return true, err + case 12: // type_job.spark_sql_job + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(SparkSqlJob) + err := b.DecodeMessage(msg) + m.TypeJob = &Job_SparkSqlJob{msg} + return true, err + default: + return false, nil + } +} + +func _Job_OneofSizer(msg proto.Message) (n int) { + m := msg.(*Job) + // type_job + switch x := m.TypeJob.(type) { + case *Job_HadoopJob: + s := proto.Size(x.HadoopJob) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *Job_SparkJob: + s := proto.Size(x.SparkJob) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *Job_PysparkJob: + s := proto.Size(x.PysparkJob) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *Job_HiveJob: + s := proto.Size(x.HiveJob) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *Job_PigJob: + s := proto.Size(x.PigJob) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *Job_SparkSqlJob: + s := proto.Size(x.SparkSqlJob) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +// Job scheduling options. +type JobScheduling struct { + // Optional. Maximum number of times per hour a driver may be restarted as + // a result of driver terminating with non-zero code before job is + // reported failed. + // + // A job may be reported as thrashing if driver exits with non-zero code + // 4 times within 10 minute window. + // + // Maximum value is 10. + MaxFailuresPerHour int32 `protobuf:"varint,1,opt,name=max_failures_per_hour,json=maxFailuresPerHour,proto3" json:"max_failures_per_hour,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *JobScheduling) Reset() { *m = JobScheduling{} } +func (m *JobScheduling) String() string { return proto.CompactTextString(m) } +func (*JobScheduling) ProtoMessage() {} +func (*JobScheduling) Descriptor() ([]byte, []int) { + return fileDescriptor_jobs_a9b426fd7b89528f, []int{13} +} +func (m *JobScheduling) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_JobScheduling.Unmarshal(m, b) +} +func (m *JobScheduling) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_JobScheduling.Marshal(b, m, deterministic) +} +func (dst *JobScheduling) XXX_Merge(src proto.Message) { + xxx_messageInfo_JobScheduling.Merge(dst, src) +} +func (m *JobScheduling) XXX_Size() int { + return xxx_messageInfo_JobScheduling.Size(m) +} +func (m *JobScheduling) XXX_DiscardUnknown() { + xxx_messageInfo_JobScheduling.DiscardUnknown(m) +} + +var xxx_messageInfo_JobScheduling proto.InternalMessageInfo + +func (m *JobScheduling) GetMaxFailuresPerHour() int32 { + if m != nil { + return m.MaxFailuresPerHour + } + return 0 +} + +// A request to submit a job. +type SubmitJobRequest struct { + // Required. The ID of the Google Cloud Platform project that the job + // belongs to. + ProjectId string `protobuf:"bytes,1,opt,name=project_id,json=projectId,proto3" json:"project_id,omitempty"` + // Required. The Cloud Dataproc region in which to handle the request. + Region string `protobuf:"bytes,3,opt,name=region,proto3" json:"region,omitempty"` + // Required. The job resource. + Job *Job `protobuf:"bytes,2,opt,name=job,proto3" json:"job,omitempty"` + // Optional. A unique id used to identify the request. If the server + // receives two [SubmitJobRequest][google.cloud.dataproc.v1.SubmitJobRequest] + // requests with the same id, then the second request will be ignored and the + // first [Job][google.cloud.dataproc.v1.Job] created and stored in the backend + // is returned. + // + // It is recommended to always set this value to a + // [UUID](https://en.wikipedia.org/wiki/Universally_unique_identifier). + // + // The id must contain only letters (a-z, A-Z), numbers (0-9), + // underscores (_), and hyphens (-). The maximum length is 40 characters. + RequestId string `protobuf:"bytes,4,opt,name=request_id,json=requestId,proto3" json:"request_id,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *SubmitJobRequest) Reset() { *m = SubmitJobRequest{} } +func (m *SubmitJobRequest) String() string { return proto.CompactTextString(m) } +func (*SubmitJobRequest) ProtoMessage() {} +func (*SubmitJobRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_jobs_a9b426fd7b89528f, []int{14} +} +func (m *SubmitJobRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_SubmitJobRequest.Unmarshal(m, b) +} +func (m *SubmitJobRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_SubmitJobRequest.Marshal(b, m, deterministic) +} +func (dst *SubmitJobRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_SubmitJobRequest.Merge(dst, src) +} +func (m *SubmitJobRequest) XXX_Size() int { + return xxx_messageInfo_SubmitJobRequest.Size(m) +} +func (m *SubmitJobRequest) XXX_DiscardUnknown() { + xxx_messageInfo_SubmitJobRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_SubmitJobRequest proto.InternalMessageInfo + +func (m *SubmitJobRequest) GetProjectId() string { + if m != nil { + return m.ProjectId + } + return "" +} + +func (m *SubmitJobRequest) GetRegion() string { + if m != nil { + return m.Region + } + return "" +} + +func (m *SubmitJobRequest) GetJob() *Job { + if m != nil { + return m.Job + } + return nil +} + +func (m *SubmitJobRequest) GetRequestId() string { + if m != nil { + return m.RequestId + } + return "" +} + +// A request to get the resource representation for a job in a project. +type GetJobRequest struct { + // Required. The ID of the Google Cloud Platform project that the job + // belongs to. + ProjectId string `protobuf:"bytes,1,opt,name=project_id,json=projectId,proto3" json:"project_id,omitempty"` + // Required. The Cloud Dataproc region in which to handle the request. + Region string `protobuf:"bytes,3,opt,name=region,proto3" json:"region,omitempty"` + // Required. The job ID. + JobId string `protobuf:"bytes,2,opt,name=job_id,json=jobId,proto3" json:"job_id,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GetJobRequest) Reset() { *m = GetJobRequest{} } +func (m *GetJobRequest) String() string { return proto.CompactTextString(m) } +func (*GetJobRequest) ProtoMessage() {} +func (*GetJobRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_jobs_a9b426fd7b89528f, []int{15} +} +func (m *GetJobRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GetJobRequest.Unmarshal(m, b) +} +func (m *GetJobRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GetJobRequest.Marshal(b, m, deterministic) +} +func (dst *GetJobRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_GetJobRequest.Merge(dst, src) +} +func (m *GetJobRequest) XXX_Size() int { + return xxx_messageInfo_GetJobRequest.Size(m) +} +func (m *GetJobRequest) XXX_DiscardUnknown() { + xxx_messageInfo_GetJobRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_GetJobRequest proto.InternalMessageInfo + +func (m *GetJobRequest) GetProjectId() string { + if m != nil { + return m.ProjectId + } + return "" +} + +func (m *GetJobRequest) GetRegion() string { + if m != nil { + return m.Region + } + return "" +} + +func (m *GetJobRequest) GetJobId() string { + if m != nil { + return m.JobId + } + return "" +} + +// A request to list jobs in a project. +type ListJobsRequest struct { + // Required. The ID of the Google Cloud Platform project that the job + // belongs to. + ProjectId string `protobuf:"bytes,1,opt,name=project_id,json=projectId,proto3" json:"project_id,omitempty"` + // Required. The Cloud Dataproc region in which to handle the request. + Region string `protobuf:"bytes,6,opt,name=region,proto3" json:"region,omitempty"` + // Optional. The number of results to return in each response. + PageSize int32 `protobuf:"varint,2,opt,name=page_size,json=pageSize,proto3" json:"page_size,omitempty"` + // Optional. The page token, returned by a previous call, to request the + // next page of results. + PageToken string `protobuf:"bytes,3,opt,name=page_token,json=pageToken,proto3" json:"page_token,omitempty"` + // Optional. If set, the returned jobs list includes only jobs that were + // submitted to the named cluster. + ClusterName string `protobuf:"bytes,4,opt,name=cluster_name,json=clusterName,proto3" json:"cluster_name,omitempty"` + // Optional. Specifies enumerated categories of jobs to list. + // (default = match ALL jobs). + // + // If `filter` is provided, `jobStateMatcher` will be ignored. + JobStateMatcher ListJobsRequest_JobStateMatcher `protobuf:"varint,5,opt,name=job_state_matcher,json=jobStateMatcher,proto3,enum=google.cloud.dataproc.v1.ListJobsRequest_JobStateMatcher" json:"job_state_matcher,omitempty"` + // Optional. A filter constraining the jobs to list. Filters are + // case-sensitive and have the following syntax: + // + // [field = value] AND [field [= value]] ... + // + // where **field** is `status.state` or `labels.[KEY]`, and `[KEY]` is a label + // key. **value** can be `*` to match all values. + // `status.state` can be either `ACTIVE` or `NON_ACTIVE`. + // Only the logical `AND` operator is supported; space-separated items are + // treated as having an implicit `AND` operator. + // + // Example filter: + // + // status.state = ACTIVE AND labels.env = staging AND labels.starred = * + Filter string `protobuf:"bytes,7,opt,name=filter,proto3" json:"filter,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ListJobsRequest) Reset() { *m = ListJobsRequest{} } +func (m *ListJobsRequest) String() string { return proto.CompactTextString(m) } +func (*ListJobsRequest) ProtoMessage() {} +func (*ListJobsRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_jobs_a9b426fd7b89528f, []int{16} +} +func (m *ListJobsRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ListJobsRequest.Unmarshal(m, b) +} +func (m *ListJobsRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ListJobsRequest.Marshal(b, m, deterministic) +} +func (dst *ListJobsRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_ListJobsRequest.Merge(dst, src) +} +func (m *ListJobsRequest) XXX_Size() int { + return xxx_messageInfo_ListJobsRequest.Size(m) +} +func (m *ListJobsRequest) XXX_DiscardUnknown() { + xxx_messageInfo_ListJobsRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_ListJobsRequest proto.InternalMessageInfo + +func (m *ListJobsRequest) GetProjectId() string { + if m != nil { + return m.ProjectId + } + return "" +} + +func (m *ListJobsRequest) GetRegion() string { + if m != nil { + return m.Region + } + return "" +} + +func (m *ListJobsRequest) GetPageSize() int32 { + if m != nil { + return m.PageSize + } + return 0 +} + +func (m *ListJobsRequest) GetPageToken() string { + if m != nil { + return m.PageToken + } + return "" +} + +func (m *ListJobsRequest) GetClusterName() string { + if m != nil { + return m.ClusterName + } + return "" +} + +func (m *ListJobsRequest) GetJobStateMatcher() ListJobsRequest_JobStateMatcher { + if m != nil { + return m.JobStateMatcher + } + return ListJobsRequest_ALL +} + +func (m *ListJobsRequest) GetFilter() string { + if m != nil { + return m.Filter + } + return "" +} + +// A request to update a job. +type UpdateJobRequest struct { + // Required. The ID of the Google Cloud Platform project that the job + // belongs to. + ProjectId string `protobuf:"bytes,1,opt,name=project_id,json=projectId,proto3" json:"project_id,omitempty"` + // Required. The Cloud Dataproc region in which to handle the request. + Region string `protobuf:"bytes,2,opt,name=region,proto3" json:"region,omitempty"` + // Required. The job ID. + JobId string `protobuf:"bytes,3,opt,name=job_id,json=jobId,proto3" json:"job_id,omitempty"` + // Required. The changes to the job. + Job *Job `protobuf:"bytes,4,opt,name=job,proto3" json:"job,omitempty"` + // Required. Specifies the path, relative to Job, of + // the field to update. For example, to update the labels of a Job the + // update_mask parameter would be specified as + // labels, and the `PATCH` request body would specify the new + // value. Note: Currently, labels is the only + // field that can be updated. + UpdateMask *field_mask.FieldMask `protobuf:"bytes,5,opt,name=update_mask,json=updateMask,proto3" json:"update_mask,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *UpdateJobRequest) Reset() { *m = UpdateJobRequest{} } +func (m *UpdateJobRequest) String() string { return proto.CompactTextString(m) } +func (*UpdateJobRequest) ProtoMessage() {} +func (*UpdateJobRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_jobs_a9b426fd7b89528f, []int{17} +} +func (m *UpdateJobRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_UpdateJobRequest.Unmarshal(m, b) +} +func (m *UpdateJobRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_UpdateJobRequest.Marshal(b, m, deterministic) +} +func (dst *UpdateJobRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_UpdateJobRequest.Merge(dst, src) +} +func (m *UpdateJobRequest) XXX_Size() int { + return xxx_messageInfo_UpdateJobRequest.Size(m) +} +func (m *UpdateJobRequest) XXX_DiscardUnknown() { + xxx_messageInfo_UpdateJobRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_UpdateJobRequest proto.InternalMessageInfo + +func (m *UpdateJobRequest) GetProjectId() string { + if m != nil { + return m.ProjectId + } + return "" +} + +func (m *UpdateJobRequest) GetRegion() string { + if m != nil { + return m.Region + } + return "" +} + +func (m *UpdateJobRequest) GetJobId() string { + if m != nil { + return m.JobId + } + return "" +} + +func (m *UpdateJobRequest) GetJob() *Job { + if m != nil { + return m.Job + } + return nil +} + +func (m *UpdateJobRequest) GetUpdateMask() *field_mask.FieldMask { + if m != nil { + return m.UpdateMask + } + return nil +} + +// A list of jobs in a project. +type ListJobsResponse struct { + // Output only. Jobs list. + Jobs []*Job `protobuf:"bytes,1,rep,name=jobs,proto3" json:"jobs,omitempty"` + // Optional. This token is included in the response if there are more results + // to fetch. To fetch additional results, provide this value as the + // `page_token` in a subsequent ListJobsRequest. + NextPageToken string `protobuf:"bytes,2,opt,name=next_page_token,json=nextPageToken,proto3" json:"next_page_token,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ListJobsResponse) Reset() { *m = ListJobsResponse{} } +func (m *ListJobsResponse) String() string { return proto.CompactTextString(m) } +func (*ListJobsResponse) ProtoMessage() {} +func (*ListJobsResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_jobs_a9b426fd7b89528f, []int{18} +} +func (m *ListJobsResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ListJobsResponse.Unmarshal(m, b) +} +func (m *ListJobsResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ListJobsResponse.Marshal(b, m, deterministic) +} +func (dst *ListJobsResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_ListJobsResponse.Merge(dst, src) +} +func (m *ListJobsResponse) XXX_Size() int { + return xxx_messageInfo_ListJobsResponse.Size(m) +} +func (m *ListJobsResponse) XXX_DiscardUnknown() { + xxx_messageInfo_ListJobsResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_ListJobsResponse proto.InternalMessageInfo + +func (m *ListJobsResponse) GetJobs() []*Job { + if m != nil { + return m.Jobs + } + return nil +} + +func (m *ListJobsResponse) GetNextPageToken() string { + if m != nil { + return m.NextPageToken + } + return "" +} + +// A request to cancel a job. +type CancelJobRequest struct { + // Required. The ID of the Google Cloud Platform project that the job + // belongs to. + ProjectId string `protobuf:"bytes,1,opt,name=project_id,json=projectId,proto3" json:"project_id,omitempty"` + // Required. The Cloud Dataproc region in which to handle the request. + Region string `protobuf:"bytes,3,opt,name=region,proto3" json:"region,omitempty"` + // Required. The job ID. + JobId string `protobuf:"bytes,2,opt,name=job_id,json=jobId,proto3" json:"job_id,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *CancelJobRequest) Reset() { *m = CancelJobRequest{} } +func (m *CancelJobRequest) String() string { return proto.CompactTextString(m) } +func (*CancelJobRequest) ProtoMessage() {} +func (*CancelJobRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_jobs_a9b426fd7b89528f, []int{19} +} +func (m *CancelJobRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_CancelJobRequest.Unmarshal(m, b) +} +func (m *CancelJobRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_CancelJobRequest.Marshal(b, m, deterministic) +} +func (dst *CancelJobRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_CancelJobRequest.Merge(dst, src) +} +func (m *CancelJobRequest) XXX_Size() int { + return xxx_messageInfo_CancelJobRequest.Size(m) +} +func (m *CancelJobRequest) XXX_DiscardUnknown() { + xxx_messageInfo_CancelJobRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_CancelJobRequest proto.InternalMessageInfo + +func (m *CancelJobRequest) GetProjectId() string { + if m != nil { + return m.ProjectId + } + return "" +} + +func (m *CancelJobRequest) GetRegion() string { + if m != nil { + return m.Region + } + return "" +} + +func (m *CancelJobRequest) GetJobId() string { + if m != nil { + return m.JobId + } + return "" +} + +// A request to delete a job. +type DeleteJobRequest struct { + // Required. The ID of the Google Cloud Platform project that the job + // belongs to. + ProjectId string `protobuf:"bytes,1,opt,name=project_id,json=projectId,proto3" json:"project_id,omitempty"` + // Required. The Cloud Dataproc region in which to handle the request. + Region string `protobuf:"bytes,3,opt,name=region,proto3" json:"region,omitempty"` + // Required. The job ID. + JobId string `protobuf:"bytes,2,opt,name=job_id,json=jobId,proto3" json:"job_id,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *DeleteJobRequest) Reset() { *m = DeleteJobRequest{} } +func (m *DeleteJobRequest) String() string { return proto.CompactTextString(m) } +func (*DeleteJobRequest) ProtoMessage() {} +func (*DeleteJobRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_jobs_a9b426fd7b89528f, []int{20} +} +func (m *DeleteJobRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_DeleteJobRequest.Unmarshal(m, b) +} +func (m *DeleteJobRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_DeleteJobRequest.Marshal(b, m, deterministic) +} +func (dst *DeleteJobRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_DeleteJobRequest.Merge(dst, src) +} +func (m *DeleteJobRequest) XXX_Size() int { + return xxx_messageInfo_DeleteJobRequest.Size(m) +} +func (m *DeleteJobRequest) XXX_DiscardUnknown() { + xxx_messageInfo_DeleteJobRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_DeleteJobRequest proto.InternalMessageInfo + +func (m *DeleteJobRequest) GetProjectId() string { + if m != nil { + return m.ProjectId + } + return "" +} + +func (m *DeleteJobRequest) GetRegion() string { + if m != nil { + return m.Region + } + return "" +} + +func (m *DeleteJobRequest) GetJobId() string { + if m != nil { + return m.JobId + } + return "" +} + +func init() { + proto.RegisterType((*LoggingConfig)(nil), "google.cloud.dataproc.v1.LoggingConfig") + proto.RegisterMapType((map[string]LoggingConfig_Level)(nil), "google.cloud.dataproc.v1.LoggingConfig.DriverLogLevelsEntry") + proto.RegisterType((*HadoopJob)(nil), "google.cloud.dataproc.v1.HadoopJob") + proto.RegisterMapType((map[string]string)(nil), "google.cloud.dataproc.v1.HadoopJob.PropertiesEntry") + proto.RegisterType((*SparkJob)(nil), "google.cloud.dataproc.v1.SparkJob") + proto.RegisterMapType((map[string]string)(nil), "google.cloud.dataproc.v1.SparkJob.PropertiesEntry") + proto.RegisterType((*PySparkJob)(nil), "google.cloud.dataproc.v1.PySparkJob") + proto.RegisterMapType((map[string]string)(nil), "google.cloud.dataproc.v1.PySparkJob.PropertiesEntry") + proto.RegisterType((*QueryList)(nil), "google.cloud.dataproc.v1.QueryList") + proto.RegisterType((*HiveJob)(nil), "google.cloud.dataproc.v1.HiveJob") + proto.RegisterMapType((map[string]string)(nil), "google.cloud.dataproc.v1.HiveJob.PropertiesEntry") + proto.RegisterMapType((map[string]string)(nil), "google.cloud.dataproc.v1.HiveJob.ScriptVariablesEntry") + proto.RegisterType((*SparkSqlJob)(nil), "google.cloud.dataproc.v1.SparkSqlJob") + proto.RegisterMapType((map[string]string)(nil), "google.cloud.dataproc.v1.SparkSqlJob.PropertiesEntry") + proto.RegisterMapType((map[string]string)(nil), "google.cloud.dataproc.v1.SparkSqlJob.ScriptVariablesEntry") + proto.RegisterType((*PigJob)(nil), "google.cloud.dataproc.v1.PigJob") + proto.RegisterMapType((map[string]string)(nil), "google.cloud.dataproc.v1.PigJob.PropertiesEntry") + proto.RegisterMapType((map[string]string)(nil), "google.cloud.dataproc.v1.PigJob.ScriptVariablesEntry") + proto.RegisterType((*JobPlacement)(nil), "google.cloud.dataproc.v1.JobPlacement") + proto.RegisterType((*JobStatus)(nil), "google.cloud.dataproc.v1.JobStatus") + proto.RegisterType((*JobReference)(nil), "google.cloud.dataproc.v1.JobReference") + proto.RegisterType((*YarnApplication)(nil), "google.cloud.dataproc.v1.YarnApplication") + proto.RegisterType((*Job)(nil), "google.cloud.dataproc.v1.Job") + proto.RegisterMapType((map[string]string)(nil), "google.cloud.dataproc.v1.Job.LabelsEntry") + proto.RegisterType((*JobScheduling)(nil), "google.cloud.dataproc.v1.JobScheduling") + proto.RegisterType((*SubmitJobRequest)(nil), "google.cloud.dataproc.v1.SubmitJobRequest") + proto.RegisterType((*GetJobRequest)(nil), "google.cloud.dataproc.v1.GetJobRequest") + proto.RegisterType((*ListJobsRequest)(nil), "google.cloud.dataproc.v1.ListJobsRequest") + proto.RegisterType((*UpdateJobRequest)(nil), "google.cloud.dataproc.v1.UpdateJobRequest") + proto.RegisterType((*ListJobsResponse)(nil), "google.cloud.dataproc.v1.ListJobsResponse") + proto.RegisterType((*CancelJobRequest)(nil), "google.cloud.dataproc.v1.CancelJobRequest") + proto.RegisterType((*DeleteJobRequest)(nil), "google.cloud.dataproc.v1.DeleteJobRequest") + proto.RegisterEnum("google.cloud.dataproc.v1.LoggingConfig_Level", LoggingConfig_Level_name, LoggingConfig_Level_value) + proto.RegisterEnum("google.cloud.dataproc.v1.JobStatus_State", JobStatus_State_name, JobStatus_State_value) + proto.RegisterEnum("google.cloud.dataproc.v1.JobStatus_Substate", JobStatus_Substate_name, JobStatus_Substate_value) + proto.RegisterEnum("google.cloud.dataproc.v1.YarnApplication_State", YarnApplication_State_name, YarnApplication_State_value) + proto.RegisterEnum("google.cloud.dataproc.v1.ListJobsRequest_JobStateMatcher", ListJobsRequest_JobStateMatcher_name, ListJobsRequest_JobStateMatcher_value) +} + +// Reference imports to suppress errors if they are not otherwise used. +var _ context.Context +var _ grpc.ClientConn + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the grpc package it is being compiled against. +const _ = grpc.SupportPackageIsVersion4 + +// JobControllerClient is the client API for JobController service. +// +// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://godoc.org/google.golang.org/grpc#ClientConn.NewStream. +type JobControllerClient interface { + // Submits a job to a cluster. + SubmitJob(ctx context.Context, in *SubmitJobRequest, opts ...grpc.CallOption) (*Job, error) + // Gets the resource representation for a job in a project. + GetJob(ctx context.Context, in *GetJobRequest, opts ...grpc.CallOption) (*Job, error) + // Lists regions/{region}/jobs in a project. + ListJobs(ctx context.Context, in *ListJobsRequest, opts ...grpc.CallOption) (*ListJobsResponse, error) + // Updates a job in a project. + UpdateJob(ctx context.Context, in *UpdateJobRequest, opts ...grpc.CallOption) (*Job, error) + // Starts a job cancellation request. To access the job resource + // after cancellation, call + // [regions/{region}/jobs.list](/dataproc/docs/reference/rest/v1/projects.regions.jobs/list) + // or + // [regions/{region}/jobs.get](/dataproc/docs/reference/rest/v1/projects.regions.jobs/get). + CancelJob(ctx context.Context, in *CancelJobRequest, opts ...grpc.CallOption) (*Job, error) + // Deletes the job from the project. If the job is active, the delete fails, + // and the response returns `FAILED_PRECONDITION`. + DeleteJob(ctx context.Context, in *DeleteJobRequest, opts ...grpc.CallOption) (*empty.Empty, error) +} + +type jobControllerClient struct { + cc *grpc.ClientConn +} + +func NewJobControllerClient(cc *grpc.ClientConn) JobControllerClient { + return &jobControllerClient{cc} +} + +func (c *jobControllerClient) SubmitJob(ctx context.Context, in *SubmitJobRequest, opts ...grpc.CallOption) (*Job, error) { + out := new(Job) + err := c.cc.Invoke(ctx, "/google.cloud.dataproc.v1.JobController/SubmitJob", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *jobControllerClient) GetJob(ctx context.Context, in *GetJobRequest, opts ...grpc.CallOption) (*Job, error) { + out := new(Job) + err := c.cc.Invoke(ctx, "/google.cloud.dataproc.v1.JobController/GetJob", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *jobControllerClient) ListJobs(ctx context.Context, in *ListJobsRequest, opts ...grpc.CallOption) (*ListJobsResponse, error) { + out := new(ListJobsResponse) + err := c.cc.Invoke(ctx, "/google.cloud.dataproc.v1.JobController/ListJobs", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *jobControllerClient) UpdateJob(ctx context.Context, in *UpdateJobRequest, opts ...grpc.CallOption) (*Job, error) { + out := new(Job) + err := c.cc.Invoke(ctx, "/google.cloud.dataproc.v1.JobController/UpdateJob", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *jobControllerClient) CancelJob(ctx context.Context, in *CancelJobRequest, opts ...grpc.CallOption) (*Job, error) { + out := new(Job) + err := c.cc.Invoke(ctx, "/google.cloud.dataproc.v1.JobController/CancelJob", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *jobControllerClient) DeleteJob(ctx context.Context, in *DeleteJobRequest, opts ...grpc.CallOption) (*empty.Empty, error) { + out := new(empty.Empty) + err := c.cc.Invoke(ctx, "/google.cloud.dataproc.v1.JobController/DeleteJob", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +// JobControllerServer is the server API for JobController service. +type JobControllerServer interface { + // Submits a job to a cluster. + SubmitJob(context.Context, *SubmitJobRequest) (*Job, error) + // Gets the resource representation for a job in a project. + GetJob(context.Context, *GetJobRequest) (*Job, error) + // Lists regions/{region}/jobs in a project. + ListJobs(context.Context, *ListJobsRequest) (*ListJobsResponse, error) + // Updates a job in a project. + UpdateJob(context.Context, *UpdateJobRequest) (*Job, error) + // Starts a job cancellation request. To access the job resource + // after cancellation, call + // [regions/{region}/jobs.list](/dataproc/docs/reference/rest/v1/projects.regions.jobs/list) + // or + // [regions/{region}/jobs.get](/dataproc/docs/reference/rest/v1/projects.regions.jobs/get). + CancelJob(context.Context, *CancelJobRequest) (*Job, error) + // Deletes the job from the project. If the job is active, the delete fails, + // and the response returns `FAILED_PRECONDITION`. + DeleteJob(context.Context, *DeleteJobRequest) (*empty.Empty, error) +} + +func RegisterJobControllerServer(s *grpc.Server, srv JobControllerServer) { + s.RegisterService(&_JobController_serviceDesc, srv) +} + +func _JobController_SubmitJob_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(SubmitJobRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(JobControllerServer).SubmitJob(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.dataproc.v1.JobController/SubmitJob", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(JobControllerServer).SubmitJob(ctx, req.(*SubmitJobRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _JobController_GetJob_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(GetJobRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(JobControllerServer).GetJob(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.dataproc.v1.JobController/GetJob", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(JobControllerServer).GetJob(ctx, req.(*GetJobRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _JobController_ListJobs_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(ListJobsRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(JobControllerServer).ListJobs(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.dataproc.v1.JobController/ListJobs", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(JobControllerServer).ListJobs(ctx, req.(*ListJobsRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _JobController_UpdateJob_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(UpdateJobRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(JobControllerServer).UpdateJob(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.dataproc.v1.JobController/UpdateJob", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(JobControllerServer).UpdateJob(ctx, req.(*UpdateJobRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _JobController_CancelJob_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(CancelJobRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(JobControllerServer).CancelJob(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.dataproc.v1.JobController/CancelJob", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(JobControllerServer).CancelJob(ctx, req.(*CancelJobRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _JobController_DeleteJob_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(DeleteJobRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(JobControllerServer).DeleteJob(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.dataproc.v1.JobController/DeleteJob", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(JobControllerServer).DeleteJob(ctx, req.(*DeleteJobRequest)) + } + return interceptor(ctx, in, info, handler) +} + +var _JobController_serviceDesc = grpc.ServiceDesc{ + ServiceName: "google.cloud.dataproc.v1.JobController", + HandlerType: (*JobControllerServer)(nil), + Methods: []grpc.MethodDesc{ + { + MethodName: "SubmitJob", + Handler: _JobController_SubmitJob_Handler, + }, + { + MethodName: "GetJob", + Handler: _JobController_GetJob_Handler, + }, + { + MethodName: "ListJobs", + Handler: _JobController_ListJobs_Handler, + }, + { + MethodName: "UpdateJob", + Handler: _JobController_UpdateJob_Handler, + }, + { + MethodName: "CancelJob", + Handler: _JobController_CancelJob_Handler, + }, + { + MethodName: "DeleteJob", + Handler: _JobController_DeleteJob_Handler, + }, + }, + Streams: []grpc.StreamDesc{}, + Metadata: "google/cloud/dataproc/v1/jobs.proto", +} + +func init() { + proto.RegisterFile("google/cloud/dataproc/v1/jobs.proto", fileDescriptor_jobs_a9b426fd7b89528f) +} + +var fileDescriptor_jobs_a9b426fd7b89528f = []byte{ + // 2320 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xdc, 0x59, 0xcf, 0x73, 0x23, 0x47, + 0xf5, 0xb7, 0x7e, 0x6b, 0x9e, 0x6c, 0x79, 0xdc, 0xd9, 0xcd, 0x57, 0x5f, 0x25, 0xa9, 0x38, 0xb3, + 0x64, 0xf1, 0x2e, 0x20, 0x61, 0x05, 0x36, 0x1b, 0x1b, 0xd8, 0xc8, 0xd2, 0x78, 0x25, 0x47, 0x2b, + 0x6b, 0x47, 0xd2, 0x6e, 0x41, 0x15, 0x35, 0x3b, 0x92, 0xda, 0xf2, 0xd8, 0xa3, 0x99, 0xf1, 0xf4, + 0x8c, 0x6b, 0x95, 0xad, 0xbd, 0x70, 0xe1, 0x48, 0x01, 0xa7, 0x50, 0xc5, 0x85, 0x1b, 0x7f, 0x00, + 0x5c, 0x28, 0x8a, 0x0b, 0x67, 0x2e, 0x5c, 0xa9, 0x9c, 0x38, 0x72, 0xe2, 0x2f, 0xa0, 0xba, 0x7b, + 0x46, 0x96, 0x64, 0xeb, 0x87, 0x77, 0x21, 0x95, 0xe4, 0xe4, 0x9e, 0x7e, 0x3f, 0xfa, 0x75, 0x7f, + 0x3e, 0xfd, 0xde, 0x6b, 0x19, 0x6e, 0xf5, 0x2d, 0xab, 0x6f, 0xe0, 0x7c, 0xd7, 0xb0, 0xbc, 0x5e, + 0xbe, 0xa7, 0xb9, 0x9a, 0xed, 0x58, 0xdd, 0xfc, 0xf9, 0x76, 0xfe, 0xc4, 0xea, 0x90, 0x9c, 0xed, + 0x58, 0xae, 0x85, 0x32, 0x5c, 0x29, 0xc7, 0x94, 0x72, 0x81, 0x52, 0xee, 0x7c, 0x3b, 0xfb, 0xb6, + 0x6f, 0xae, 0xd9, 0x7a, 0x5e, 0x33, 0x4d, 0xcb, 0xd5, 0x5c, 0xdd, 0x32, 0x7d, 0xbb, 0xec, 0x5b, + 0xbe, 0x94, 0x7d, 0x75, 0xbc, 0xa3, 0x3c, 0x1e, 0xd8, 0xee, 0xd0, 0x17, 0x6e, 0x4e, 0x0b, 0x8f, + 0x74, 0x6c, 0xf4, 0xd4, 0x81, 0x46, 0x4e, 0x7d, 0x8d, 0x77, 0xa7, 0x35, 0x5c, 0x7d, 0x80, 0x89, + 0xab, 0x0d, 0x6c, 0xae, 0x20, 0x7d, 0x1e, 0x86, 0xb5, 0x9a, 0xd5, 0xef, 0xeb, 0x66, 0xbf, 0x64, + 0x99, 0x47, 0x7a, 0x1f, 0x1d, 0xc3, 0x46, 0xcf, 0xd1, 0xcf, 0xb1, 0xa3, 0x1a, 0x56, 0x5f, 0x35, + 0xf0, 0x39, 0x36, 0x48, 0x26, 0xbc, 0x19, 0xd9, 0x4a, 0x15, 0x7e, 0x90, 0x9b, 0xb5, 0x8b, 0xdc, + 0x84, 0x8f, 0x5c, 0x99, 0x39, 0xa8, 0x59, 0xfd, 0x1a, 0x33, 0x97, 0x4d, 0xd7, 0x19, 0x2a, 0xeb, + 0xbd, 0xc9, 0xd9, 0xec, 0x19, 0xdc, 0xb8, 0x4a, 0x11, 0x89, 0x10, 0x39, 0xc5, 0xc3, 0x4c, 0x68, + 0x33, 0xb4, 0x25, 0x28, 0x74, 0x88, 0x4a, 0x10, 0x3b, 0xd7, 0x0c, 0x0f, 0x67, 0xc2, 0x9b, 0xa1, + 0xad, 0x74, 0xe1, 0x3b, 0xcb, 0xc6, 0xc1, 0xbc, 0x2a, 0xdc, 0x76, 0x27, 0x7c, 0x3f, 0x24, 0xd9, + 0x10, 0x63, 0x73, 0xe8, 0x26, 0x6c, 0xd4, 0xe4, 0x27, 0x72, 0x4d, 0x6d, 0xd7, 0x9b, 0x0d, 0xb9, + 0x54, 0xdd, 0xaf, 0xca, 0x65, 0x71, 0x05, 0x25, 0x20, 0x52, 0xac, 0xd5, 0xc4, 0x10, 0x12, 0x20, + 0xd6, 0x52, 0x8a, 0x25, 0x59, 0x0c, 0xd3, 0x61, 0x59, 0xde, 0x6b, 0x3f, 0x14, 0x23, 0x28, 0x09, + 0xd1, 0x6a, 0x7d, 0xff, 0x50, 0x8c, 0xd2, 0xd1, 0xd3, 0xa2, 0x52, 0x17, 0x63, 0x54, 0x2c, 0x2b, + 0xca, 0xa1, 0x22, 0xc6, 0xe9, 0x70, 0xbf, 0xd8, 0x2a, 0xd6, 0xc4, 0x04, 0x75, 0x74, 0xb8, 0xbf, + 0x2f, 0x26, 0xa5, 0xbf, 0x44, 0x40, 0xa8, 0x68, 0x3d, 0xcb, 0xb2, 0x0f, 0xac, 0x0e, 0xfa, 0x16, + 0x6c, 0x0c, 0x34, 0xdd, 0x54, 0x4f, 0x34, 0x47, 0x3d, 0xd2, 0x0d, 0xac, 0x7a, 0x8e, 0xce, 0x37, + 0x5a, 0x59, 0x51, 0xd2, 0x54, 0x74, 0xa0, 0x39, 0xfb, 0xba, 0x81, 0xdb, 0x8e, 0x8e, 0xde, 0x05, + 0x60, 0xca, 0x5d, 0x43, 0x23, 0x84, 0x6d, 0x9d, 0x6a, 0x09, 0x74, 0xae, 0x44, 0xa7, 0x10, 0x82, + 0xa8, 0xe6, 0xf4, 0x49, 0x26, 0xb2, 0x19, 0xd9, 0x12, 0x14, 0x36, 0x46, 0x12, 0xac, 0x8d, 0x3b, + 0x27, 0x99, 0x28, 0x13, 0xa6, 0x4e, 0x46, 0x7e, 0x09, 0x7a, 0x0b, 0x84, 0x0b, 0x79, 0x8c, 0xc9, + 0x93, 0x47, 0x81, 0xf0, 0x3d, 0x58, 0xd5, 0x9c, 0xee, 0xb1, 0x7e, 0xee, 0xcb, 0xe3, 0xdc, 0xde, + 0x9f, 0x63, 0x2a, 0x4d, 0x00, 0xdb, 0xb1, 0x6c, 0xec, 0xb8, 0x3a, 0x26, 0x99, 0x04, 0xe3, 0xc6, + 0x07, 0xb3, 0x31, 0x19, 0x6d, 0x3f, 0xd7, 0x18, 0x59, 0x71, 0x4a, 0x8c, 0xb9, 0x41, 0x75, 0x48, + 0x1b, 0x1c, 0x3c, 0xb5, 0xcb, 0xd0, 0xcb, 0x24, 0x37, 0x43, 0x5b, 0xa9, 0xc2, 0x37, 0x97, 0x04, + 0x5b, 0x59, 0x33, 0xc6, 0x3f, 0xb3, 0x3f, 0x84, 0xf5, 0xa9, 0xe5, 0xae, 0x20, 0xd6, 0x8d, 0x71, + 0x62, 0x09, 0x63, 0x4c, 0xd9, 0x4b, 0x42, 0x9c, 0xf3, 0x55, 0xfa, 0x73, 0x04, 0x92, 0x4d, 0x5b, + 0x73, 0x4e, 0xbf, 0x3e, 0x00, 0x2a, 0x57, 0x00, 0x58, 0x98, 0x7d, 0xce, 0xc1, 0xee, 0xbf, 0x9a, + 0xf8, 0xfd, 0x35, 0x02, 0xd0, 0x18, 0x8e, 0x10, 0xcc, 0xc3, 0x0d, 0x06, 0x8a, 0x3d, 0x74, 0x8f, + 0x2d, 0x73, 0x0a, 0x44, 0x85, 0xa1, 0xdb, 0x60, 0xa2, 0x00, 0xc5, 0x00, 0xa4, 0xf0, 0x18, 0x48, + 0x5b, 0x20, 0x4e, 0xd9, 0x07, 0x20, 0xa6, 0xed, 0x71, 0xe3, 0x2f, 0x06, 0xce, 0xd6, 0x15, 0x70, + 0x7e, 0x6f, 0xf6, 0xb1, 0x5f, 0x1c, 0xc6, 0x57, 0x08, 0x50, 0xe9, 0x7d, 0x10, 0x1e, 0x7b, 0xd8, + 0x19, 0xd6, 0x74, 0xe2, 0xa2, 0x0c, 0x24, 0xce, 0x3c, 0xec, 0xd0, 0xed, 0x86, 0xd8, 0x79, 0x04, + 0x9f, 0xd2, 0x2f, 0xa2, 0x90, 0xa8, 0xe8, 0xe7, 0x98, 0x42, 0x7d, 0x1b, 0xd2, 0x74, 0x7a, 0x78, + 0xf9, 0xa6, 0xae, 0xb2, 0xf9, 0x00, 0xe1, 0x32, 0x00, 0xd7, 0x33, 0x74, 0xe2, 0xb2, 0x95, 0x53, + 0x85, 0x5b, 0xb3, 0x77, 0x39, 0x0a, 0x83, 0x5e, 0xe6, 0xb3, 0x51, 0x4c, 0x39, 0x78, 0xa3, 0x6b, + 0x99, 0xae, 0x6e, 0x7a, 0x58, 0xa5, 0xc4, 0xd0, 0x74, 0xc3, 0x73, 0x70, 0x26, 0xb2, 0x19, 0xda, + 0x4a, 0x2a, 0x1b, 0x81, 0xe8, 0xd0, 0xdc, 0xe7, 0x02, 0xa4, 0x81, 0x48, 0xba, 0x8e, 0x6e, 0xbb, + 0xea, 0xb9, 0xe6, 0xe8, 0x5a, 0xc7, 0xc0, 0x9c, 0x1c, 0xa9, 0xc2, 0xbd, 0x39, 0xb9, 0x94, 0x6f, + 0x2d, 0xd7, 0x64, 0x96, 0x4f, 0x02, 0x43, 0xbf, 0xc2, 0x92, 0xc9, 0x59, 0xf4, 0x78, 0x82, 0x18, + 0x31, 0xe6, 0x7c, 0x7b, 0xb1, 0xf3, 0x79, 0xac, 0xb8, 0xc4, 0xe7, 0xf8, 0x25, 0x3e, 0x67, 0xf7, + 0xe0, 0xc6, 0x55, 0xf1, 0x5d, 0x07, 0xee, 0xd7, 0xbd, 0xfe, 0xc2, 0x88, 0x20, 0xd2, 0x9f, 0xa2, + 0x90, 0x62, 0x84, 0x6f, 0x9e, 0x19, 0x5f, 0x3c, 0x2b, 0xf0, 0x15, 0x28, 0x47, 0x18, 0x10, 0x3b, + 0x0b, 0x12, 0x2e, 0x0f, 0x77, 0x49, 0xa4, 0xdb, 0x13, 0x48, 0x73, 0x1a, 0x7d, 0x7f, 0xb9, 0x05, + 0xae, 0x85, 0xf6, 0xfd, 0xcb, 0xd9, 0xeb, 0x72, 0x9e, 0x88, 0xbf, 0x56, 0x9e, 0xf8, 0x72, 0xb1, + 0xe7, 0x1f, 0x51, 0x88, 0x37, 0xf4, 0xfe, 0x97, 0x3f, 0x9d, 0x3c, 0x9b, 0x99, 0x4e, 0xe6, 0xf0, + 0x80, 0xef, 0x6c, 0x49, 0x8e, 0x35, 0xae, 0xc8, 0x26, 0xdf, 0x5d, 0xe8, 0xfb, 0x35, 0x93, 0xc9, + 0x15, 0xf4, 0x4a, 0x7c, 0x8d, 0xe8, 0xd5, 0x82, 0xd5, 0x03, 0xab, 0xd3, 0x30, 0xb4, 0x2e, 0x1e, + 0x60, 0xd3, 0xa5, 0xd5, 0xbe, 0x6b, 0x78, 0xc4, 0xc5, 0x8e, 0x6a, 0x6a, 0x03, 0xec, 0xfb, 0x4b, + 0xf9, 0x73, 0x75, 0x6d, 0x80, 0xc7, 0x55, 0x3c, 0x4f, 0xef, 0xf9, 0xee, 0x03, 0x95, 0xb6, 0xa7, + 0xf7, 0xa4, 0x7f, 0x45, 0x40, 0x38, 0xb0, 0x3a, 0x4d, 0x57, 0x73, 0x3d, 0x82, 0x1e, 0x40, 0x8c, + 0xb8, 0x9a, 0xcb, 0x9d, 0xa5, 0x0b, 0x77, 0x66, 0x1f, 0xdc, 0xc8, 0x26, 0x47, 0xff, 0x60, 0x85, + 0xdb, 0xd1, 0x6a, 0xdb, 0xc3, 0xae, 0xa6, 0x1b, 0x7e, 0x13, 0xab, 0x04, 0x9f, 0xa8, 0x0c, 0x22, + 0x53, 0x51, 0x89, 0xab, 0x39, 0xae, 0x4a, 0x5f, 0x97, 0xfe, 0xed, 0xcf, 0x06, 0xab, 0x04, 0x4f, + 0xcf, 0x5c, 0x2b, 0x78, 0x7a, 0x2a, 0x69, 0x66, 0xd3, 0xa4, 0x26, 0x74, 0x12, 0x55, 0x20, 0x49, + 0xbc, 0x0e, 0x8f, 0x31, 0xc1, 0x62, 0xfc, 0xf6, 0x52, 0x31, 0xfa, 0x36, 0xca, 0xc8, 0x5a, 0xfa, + 0x7d, 0x08, 0x62, 0x2c, 0x74, 0xfa, 0xc0, 0x6b, 0xb6, 0x8a, 0x2d, 0x79, 0xea, 0x81, 0x97, 0x82, + 0x44, 0x43, 0xae, 0x97, 0xab, 0xf5, 0x87, 0x62, 0x08, 0xa5, 0x01, 0x9a, 0x72, 0xab, 0xdd, 0x50, + 0xcb, 0x87, 0x75, 0x59, 0x4c, 0x52, 0xa1, 0xd2, 0xae, 0xd7, 0xa9, 0x30, 0x8c, 0x10, 0xa4, 0x4b, + 0xc5, 0x7a, 0x49, 0xae, 0xa9, 0x81, 0x41, 0x64, 0x6c, 0xae, 0xd9, 0x2a, 0x2a, 0x2d, 0xb9, 0x2c, + 0x26, 0xd0, 0x1a, 0x08, 0x7c, 0xae, 0x26, 0x97, 0xf9, 0xc3, 0x90, 0x79, 0x9b, 0x78, 0x18, 0xbe, + 0x01, 0xeb, 0xc5, 0x56, 0x4b, 0x7e, 0xd4, 0x68, 0xa9, 0xfb, 0xc5, 0x6a, 0xad, 0xad, 0xc8, 0xa2, + 0x20, 0x55, 0x20, 0x19, 0xec, 0x00, 0xad, 0x43, 0x6a, 0x32, 0xce, 0x35, 0x10, 0x9a, 0xed, 0xbd, + 0x47, 0xd5, 0x16, 0x5d, 0x24, 0x84, 0x00, 0xe2, 0x8f, 0xdb, 0x72, 0x5b, 0x2e, 0x8b, 0x61, 0x24, + 0xc2, 0x6a, 0xb3, 0x55, 0xac, 0xc9, 0x34, 0x86, 0x56, 0xbb, 0x29, 0x46, 0xa4, 0x32, 0x23, 0x91, + 0x82, 0x8f, 0xb0, 0x83, 0xcd, 0x2e, 0x46, 0xef, 0xb0, 0x8b, 0x7a, 0x82, 0xbb, 0xae, 0xaa, 0xf7, + 0x7c, 0x0a, 0x09, 0xfe, 0x4c, 0xb5, 0x87, 0x6e, 0x42, 0xfc, 0xc4, 0xea, 0xa8, 0x23, 0xea, 0xc4, + 0x4e, 0xac, 0x4e, 0xb5, 0x27, 0xfd, 0x21, 0x0c, 0xeb, 0x3f, 0xd6, 0x1c, 0xb3, 0x68, 0xdb, 0x86, + 0xde, 0x65, 0xbf, 0x42, 0xd0, 0xde, 0x77, 0x8c, 0x86, 0x6c, 0x8c, 0xe4, 0x80, 0x4e, 0xfc, 0x31, + 0x9e, 0x9f, 0x0d, 0xd5, 0x94, 0xb7, 0x49, 0x52, 0x65, 0x21, 0x69, 0x3b, 0x56, 0xdf, 0xc1, 0x84, + 0xb0, 0xa4, 0x16, 0x56, 0x46, 0xdf, 0x94, 0xe2, 0xae, 0xa3, 0x75, 0x4f, 0xe9, 0xa5, 0xf7, 0x1c, + 0x23, 0x13, 0xe5, 0x14, 0x0f, 0xe6, 0xda, 0x8e, 0x21, 0xfd, 0x7c, 0x11, 0xd2, 0x09, 0x88, 0xd4, + 0xe5, 0xa7, 0x1c, 0xe5, 0xba, 0xfc, 0x54, 0x6d, 0x16, 0x9f, 0x70, 0x60, 0x27, 0x8e, 0x36, 0x82, + 0x56, 0x21, 0x59, 0x2c, 0x95, 0xe4, 0x46, 0x8b, 0xc1, 0x37, 0x46, 0x81, 0x18, 0x15, 0xed, 0x57, + 0xeb, 0xd5, 0x66, 0x45, 0x2e, 0x8b, 0x71, 0x8a, 0x01, 0x05, 0x8f, 0x81, 0x0e, 0x10, 0xff, 0xa4, + 0xca, 0x10, 0x4f, 0x4a, 0xff, 0x4e, 0x42, 0x84, 0x96, 0x87, 0x32, 0x08, 0x4e, 0x00, 0x01, 0x3b, + 0xb0, 0x54, 0xe1, 0xf6, 0x5c, 0x1a, 0x8f, 0x00, 0x53, 0x2e, 0x0c, 0xa9, 0x17, 0x3b, 0xc8, 0x06, + 0x7e, 0xed, 0x98, 0xef, 0x65, 0x94, 0x3b, 0x94, 0x0b, 0x43, 0x5a, 0x82, 0x8e, 0xd9, 0xab, 0x5b, + 0x3d, 0xb1, 0x3a, 0xec, 0x78, 0xe7, 0x96, 0xa0, 0xd1, 0x0b, 0x9d, 0x96, 0xa0, 0xe3, 0xd1, 0xaf, + 0x15, 0x45, 0x10, 0x08, 0x6d, 0x14, 0x98, 0x93, 0x28, 0x73, 0x22, 0x2d, 0x7e, 0x25, 0x56, 0x56, + 0x94, 0x24, 0x09, 0x5e, 0x5b, 0x0f, 0x21, 0x65, 0x0f, 0x2f, 0x9c, 0xc4, 0x98, 0x93, 0x6f, 0x2c, + 0xf3, 0x36, 0xa9, 0xac, 0x28, 0xe0, 0x9b, 0x52, 0x47, 0x3f, 0x82, 0x24, 0x7b, 0x03, 0x51, 0x2f, + 0x3c, 0xc3, 0xbc, 0xb7, 0xb0, 0x91, 0xad, 0xac, 0x28, 0x89, 0x63, 0xff, 0x2d, 0xb0, 0x0b, 0x09, + 0x5b, 0xef, 0x33, 0x73, 0x5e, 0x3f, 0x36, 0x17, 0x55, 0xae, 0xca, 0x8a, 0x12, 0xb7, 0x79, 0xe5, + 0xff, 0x04, 0xd6, 0xf8, 0x1e, 0xc8, 0x99, 0xc1, 0x5c, 0xac, 0x32, 0x17, 0xef, 0x2f, 0xd5, 0x60, + 0x55, 0x56, 0x94, 0x14, 0x19, 0xeb, 0x3f, 0x77, 0x21, 0x4e, 0x58, 0x02, 0xf3, 0xdf, 0x53, 0xb7, + 0x96, 0xc8, 0x75, 0x8a, 0x6f, 0x82, 0x0e, 0x20, 0xcd, 0x47, 0xea, 0xb1, 0x4e, 0x5c, 0xcb, 0x19, + 0x66, 0xd6, 0x58, 0x1d, 0x5e, 0xca, 0xc9, 0x1a, 0x37, 0xad, 0x70, 0x4b, 0xf4, 0x04, 0x36, 0x86, + 0x9a, 0x63, 0xaa, 0xda, 0xc5, 0x15, 0x25, 0x19, 0x81, 0xb9, 0xbb, 0xb3, 0xf4, 0xa5, 0x56, 0xc4, + 0xe1, 0xe4, 0x04, 0x41, 0xbb, 0x90, 0xf5, 0x7f, 0x41, 0xb4, 0x3c, 0xd7, 0xf6, 0x5c, 0xd5, 0xc1, + 0xc4, 0xf2, 0x9c, 0x2e, 0xef, 0x99, 0x36, 0xd8, 0x5d, 0xfe, 0x3f, 0xae, 0x71, 0xc8, 0x14, 0x14, + 0x5f, 0x4e, 0x9b, 0xa7, 0x0f, 0x21, 0xe3, 0x1b, 0xd3, 0x16, 0xc7, 0xb1, 0x0c, 0xd6, 0x1d, 0x10, + 0x66, 0xba, 0xce, 0x4c, 0x6f, 0x72, 0x79, 0x89, 0x8b, 0x69, 0x9f, 0x40, 0xa8, 0x61, 0x11, 0xe2, + 0x86, 0xd6, 0xc1, 0x06, 0xc9, 0xa0, 0x45, 0x5b, 0xa0, 0x6d, 0x49, 0x8d, 0xe9, 0xf2, 0x96, 0xc4, + 0x37, 0x44, 0x0f, 0x01, 0x48, 0xf7, 0x18, 0xf7, 0x3c, 0x43, 0x37, 0xfb, 0x99, 0x1b, 0x8b, 0xda, + 0x0c, 0x7a, 0xb0, 0x23, 0x75, 0x65, 0xcc, 0x14, 0xfd, 0x3f, 0x24, 0x69, 0x86, 0x65, 0xe5, 0xf9, + 0x4d, 0x5e, 0x31, 0x4f, 0xac, 0x0e, 0x2d, 0xcd, 0xd9, 0x8f, 0x20, 0x35, 0xb6, 0xf4, 0xb5, 0xda, + 0x06, 0x80, 0xa4, 0x3b, 0xb4, 0xd9, 0x15, 0x90, 0xf6, 0x60, 0x6d, 0x62, 0x79, 0xb4, 0x0d, 0x37, + 0x07, 0xda, 0xf3, 0xa0, 0x4d, 0x24, 0xaa, 0x8d, 0x1d, 0xf5, 0xd8, 0xf2, 0x1c, 0xe6, 0x3a, 0xa6, + 0xa0, 0x81, 0xf6, 0xdc, 0xef, 0x14, 0x49, 0x03, 0x3b, 0x15, 0xcb, 0x73, 0xa4, 0xcf, 0x42, 0x20, + 0x36, 0xbd, 0xce, 0x40, 0x77, 0x59, 0x32, 0x3a, 0xf3, 0x30, 0x71, 0x17, 0xd5, 0x8e, 0x37, 0x21, + 0xee, 0xe0, 0xbe, 0x6e, 0x99, 0x2c, 0xa9, 0x08, 0x8a, 0xff, 0x85, 0xf2, 0x10, 0xa1, 0xf7, 0x82, + 0x27, 0xac, 0x77, 0xe6, 0xa7, 0x3d, 0xaa, 0x49, 0xd7, 0x71, 0xf8, 0x92, 0x74, 0x1d, 0x9e, 0xe0, + 0x05, 0x7f, 0xa6, 0xda, 0x93, 0x7e, 0x0a, 0x6b, 0x0f, 0xf1, 0x7f, 0x21, 0xae, 0x19, 0xb5, 0xee, + 0xf3, 0x30, 0xac, 0xd3, 0x2e, 0xfb, 0xc0, 0xea, 0x90, 0x6b, 0xaf, 0x10, 0x9f, 0x58, 0xe1, 0x2d, + 0x10, 0x6c, 0xad, 0x8f, 0x55, 0xa2, 0x7f, 0xca, 0x31, 0x8b, 0x29, 0x49, 0x3a, 0xd1, 0xd4, 0x3f, + 0xe5, 0x95, 0x98, 0x0a, 0x5d, 0xeb, 0x14, 0x07, 0xa1, 0x31, 0xf5, 0x16, 0x9d, 0xb8, 0xd4, 0xed, + 0x45, 0x2f, 0x77, 0x7b, 0x18, 0x36, 0xe8, 0x06, 0x78, 0x97, 0x35, 0xd0, 0xdc, 0xee, 0x31, 0x76, + 0x58, 0x1a, 0x4d, 0x17, 0x3e, 0x9a, 0xd3, 0x01, 0x4f, 0xee, 0x2d, 0xc8, 0x01, 0xf8, 0x11, 0x77, + 0xa0, 0xac, 0x9f, 0x4c, 0x4e, 0xd0, 0xdd, 0x1d, 0xe9, 0x86, 0x8b, 0x1d, 0x96, 0x1d, 0x05, 0xc5, + 0xff, 0x92, 0xee, 0xc1, 0xfa, 0x94, 0x6d, 0xf0, 0x1b, 0xf9, 0x0a, 0x2d, 0x82, 0xc5, 0x52, 0xab, + 0xfa, 0x44, 0xf6, 0x8b, 0xec, 0x61, 0x5d, 0xf5, 0xbf, 0xc3, 0xd2, 0xdf, 0x42, 0x20, 0xb6, 0xed, + 0x9e, 0xe6, 0xe2, 0x57, 0xc1, 0x30, 0x3c, 0x03, 0xc3, 0xc8, 0x18, 0x86, 0x01, 0xe5, 0xa2, 0x4b, + 0x53, 0x6e, 0x17, 0x52, 0x1e, 0x0b, 0x89, 0xfd, 0x87, 0xc4, 0xaf, 0x45, 0x97, 0xfb, 0xd4, 0x7d, + 0x1d, 0x1b, 0xbd, 0x47, 0x1a, 0x39, 0x55, 0x80, 0xab, 0xd3, 0xb1, 0x34, 0x00, 0xf1, 0xe2, 0x50, + 0x89, 0x6d, 0x99, 0x04, 0xa3, 0x6d, 0x88, 0x9e, 0x58, 0x1d, 0xfe, 0x13, 0xd4, 0xc2, 0x10, 0x98, + 0x2a, 0xba, 0x0d, 0xeb, 0x26, 0x7e, 0xee, 0xaa, 0x63, 0xac, 0xe0, 0x9b, 0x5d, 0xa3, 0xd3, 0x8d, + 0x80, 0x19, 0xd2, 0x33, 0x10, 0x4b, 0x9a, 0xd9, 0xc5, 0xc6, 0xff, 0xec, 0x0a, 0x3c, 0x03, 0xb1, + 0x8c, 0x0d, 0xfc, 0x6a, 0x00, 0x2d, 0xb3, 0x42, 0xe1, 0x8f, 0x09, 0x96, 0xa4, 0xfc, 0x44, 0x6d, + 0x60, 0x07, 0x7d, 0x16, 0x02, 0x61, 0x94, 0x71, 0xd0, 0xdd, 0x39, 0xe5, 0x73, 0x2a, 0x2d, 0x65, + 0xe7, 0x1f, 0xae, 0x54, 0xfc, 0xd9, 0xdf, 0xff, 0xf9, 0xeb, 0xf0, 0xae, 0x74, 0x2f, 0x7f, 0xbe, + 0x9d, 0xf7, 0x03, 0x26, 0xf9, 0x17, 0x17, 0x9b, 0x79, 0x99, 0xe7, 0xb1, 0x92, 0xfc, 0x0b, 0x3e, + 0x78, 0xc9, 0xfe, 0x3b, 0xb7, 0x43, 0xd8, 0x42, 0x3b, 0xa1, 0xbb, 0xe8, 0x57, 0x21, 0x88, 0xf3, + 0x94, 0x83, 0xe6, 0xe4, 0xfc, 0x89, 0xa4, 0xb4, 0x28, 0xaa, 0x8f, 0x59, 0x54, 0x3b, 0xe8, 0xfe, + 0x35, 0xa3, 0xca, 0xbf, 0xe0, 0xc7, 0xf9, 0x12, 0xfd, 0x26, 0x04, 0xc9, 0x80, 0x76, 0xe8, 0xce, + 0xd2, 0xf7, 0x3d, 0x7b, 0x77, 0x19, 0x55, 0xce, 0x62, 0xe9, 0x43, 0x16, 0xe5, 0x36, 0xca, 0x5f, + 0x33, 0x4a, 0xf4, 0xdb, 0x10, 0x08, 0xa3, 0x3b, 0x3e, 0x0f, 0xcd, 0xe9, 0x44, 0xb0, 0xe8, 0xdc, + 0x64, 0x16, 0xd1, 0x83, 0xc2, 0x2b, 0x9f, 0xdb, 0x0e, 0xbb, 0xef, 0xbf, 0x0b, 0x81, 0x30, 0xba, + 0x44, 0xf3, 0xe2, 0x9b, 0xbe, 0x69, 0x8b, 0xe2, 0x3b, 0x60, 0xf1, 0x95, 0xa5, 0x07, 0xaf, 0x1c, + 0x5f, 0x97, 0xad, 0x48, 0x69, 0xf7, 0xcb, 0x10, 0x08, 0xa3, 0x7b, 0x38, 0x2f, 0xc8, 0xe9, 0xcb, + 0x9a, 0x7d, 0xf3, 0x52, 0xe6, 0x92, 0x07, 0xb6, 0x3b, 0x0c, 0x58, 0x77, 0xf7, 0x95, 0x4f, 0x6f, + 0x6f, 0x00, 0x6f, 0x77, 0xad, 0xc1, 0xcc, 0x50, 0xf6, 0x04, 0xca, 0x9f, 0x06, 0x5d, 0xb5, 0x11, + 0xfa, 0xc9, 0xc7, 0xbe, 0x5a, 0xdf, 0x32, 0x34, 0xb3, 0x9f, 0xb3, 0x9c, 0x7e, 0xbe, 0x8f, 0x4d, + 0x16, 0x53, 0x9e, 0x8b, 0x34, 0x5b, 0x27, 0x97, 0xff, 0x3b, 0xbe, 0x1b, 0x8c, 0x3b, 0x71, 0xa6, + 0xfc, 0xc1, 0x7f, 0x02, 0x00, 0x00, 0xff, 0xff, 0xa1, 0x58, 0x75, 0xc0, 0x49, 0x1f, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/cloud/dataproc/v1/operations.pb.go b/vendor/google.golang.org/genproto/googleapis/cloud/dataproc/v1/operations.pb.go new file mode 100644 index 0000000..ff470d6 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/cloud/dataproc/v1/operations.pb.go @@ -0,0 +1,274 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/cloud/dataproc/v1/operations.proto + +package dataproc // import "google.golang.org/genproto/googleapis/cloud/dataproc/v1" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import timestamp "github.com/golang/protobuf/ptypes/timestamp" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// The operation state. +type ClusterOperationStatus_State int32 + +const ( + // Unused. + ClusterOperationStatus_UNKNOWN ClusterOperationStatus_State = 0 + // The operation has been created. + ClusterOperationStatus_PENDING ClusterOperationStatus_State = 1 + // The operation is running. + ClusterOperationStatus_RUNNING ClusterOperationStatus_State = 2 + // The operation is done; either cancelled or completed. + ClusterOperationStatus_DONE ClusterOperationStatus_State = 3 +) + +var ClusterOperationStatus_State_name = map[int32]string{ + 0: "UNKNOWN", + 1: "PENDING", + 2: "RUNNING", + 3: "DONE", +} +var ClusterOperationStatus_State_value = map[string]int32{ + "UNKNOWN": 0, + "PENDING": 1, + "RUNNING": 2, + "DONE": 3, +} + +func (x ClusterOperationStatus_State) String() string { + return proto.EnumName(ClusterOperationStatus_State_name, int32(x)) +} +func (ClusterOperationStatus_State) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_operations_5e90e9db40c022db, []int{0, 0} +} + +// The status of the operation. +type ClusterOperationStatus struct { + // Output only. A message containing the operation state. + State ClusterOperationStatus_State `protobuf:"varint,1,opt,name=state,proto3,enum=google.cloud.dataproc.v1.ClusterOperationStatus_State" json:"state,omitempty"` + // Output only. A message containing the detailed operation state. + InnerState string `protobuf:"bytes,2,opt,name=inner_state,json=innerState,proto3" json:"inner_state,omitempty"` + // Output only. A message containing any operation metadata details. + Details string `protobuf:"bytes,3,opt,name=details,proto3" json:"details,omitempty"` + // Output only. The time this state was entered. + StateStartTime *timestamp.Timestamp `protobuf:"bytes,4,opt,name=state_start_time,json=stateStartTime,proto3" json:"state_start_time,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ClusterOperationStatus) Reset() { *m = ClusterOperationStatus{} } +func (m *ClusterOperationStatus) String() string { return proto.CompactTextString(m) } +func (*ClusterOperationStatus) ProtoMessage() {} +func (*ClusterOperationStatus) Descriptor() ([]byte, []int) { + return fileDescriptor_operations_5e90e9db40c022db, []int{0} +} +func (m *ClusterOperationStatus) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ClusterOperationStatus.Unmarshal(m, b) +} +func (m *ClusterOperationStatus) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ClusterOperationStatus.Marshal(b, m, deterministic) +} +func (dst *ClusterOperationStatus) XXX_Merge(src proto.Message) { + xxx_messageInfo_ClusterOperationStatus.Merge(dst, src) +} +func (m *ClusterOperationStatus) XXX_Size() int { + return xxx_messageInfo_ClusterOperationStatus.Size(m) +} +func (m *ClusterOperationStatus) XXX_DiscardUnknown() { + xxx_messageInfo_ClusterOperationStatus.DiscardUnknown(m) +} + +var xxx_messageInfo_ClusterOperationStatus proto.InternalMessageInfo + +func (m *ClusterOperationStatus) GetState() ClusterOperationStatus_State { + if m != nil { + return m.State + } + return ClusterOperationStatus_UNKNOWN +} + +func (m *ClusterOperationStatus) GetInnerState() string { + if m != nil { + return m.InnerState + } + return "" +} + +func (m *ClusterOperationStatus) GetDetails() string { + if m != nil { + return m.Details + } + return "" +} + +func (m *ClusterOperationStatus) GetStateStartTime() *timestamp.Timestamp { + if m != nil { + return m.StateStartTime + } + return nil +} + +// Metadata describing the operation. +type ClusterOperationMetadata struct { + // Output only. Name of the cluster for the operation. + ClusterName string `protobuf:"bytes,7,opt,name=cluster_name,json=clusterName,proto3" json:"cluster_name,omitempty"` + // Output only. Cluster UUID for the operation. + ClusterUuid string `protobuf:"bytes,8,opt,name=cluster_uuid,json=clusterUuid,proto3" json:"cluster_uuid,omitempty"` + // Output only. Current operation status. + Status *ClusterOperationStatus `protobuf:"bytes,9,opt,name=status,proto3" json:"status,omitempty"` + // Output only. The previous operation status. + StatusHistory []*ClusterOperationStatus `protobuf:"bytes,10,rep,name=status_history,json=statusHistory,proto3" json:"status_history,omitempty"` + // Output only. The operation type. + OperationType string `protobuf:"bytes,11,opt,name=operation_type,json=operationType,proto3" json:"operation_type,omitempty"` + // Output only. Short description of operation. + Description string `protobuf:"bytes,12,opt,name=description,proto3" json:"description,omitempty"` + // Output only. Labels associated with the operation + Labels map[string]string `protobuf:"bytes,13,rep,name=labels,proto3" json:"labels,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` + // Output only. Errors encountered during operation execution. + Warnings []string `protobuf:"bytes,14,rep,name=warnings,proto3" json:"warnings,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ClusterOperationMetadata) Reset() { *m = ClusterOperationMetadata{} } +func (m *ClusterOperationMetadata) String() string { return proto.CompactTextString(m) } +func (*ClusterOperationMetadata) ProtoMessage() {} +func (*ClusterOperationMetadata) Descriptor() ([]byte, []int) { + return fileDescriptor_operations_5e90e9db40c022db, []int{1} +} +func (m *ClusterOperationMetadata) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ClusterOperationMetadata.Unmarshal(m, b) +} +func (m *ClusterOperationMetadata) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ClusterOperationMetadata.Marshal(b, m, deterministic) +} +func (dst *ClusterOperationMetadata) XXX_Merge(src proto.Message) { + xxx_messageInfo_ClusterOperationMetadata.Merge(dst, src) +} +func (m *ClusterOperationMetadata) XXX_Size() int { + return xxx_messageInfo_ClusterOperationMetadata.Size(m) +} +func (m *ClusterOperationMetadata) XXX_DiscardUnknown() { + xxx_messageInfo_ClusterOperationMetadata.DiscardUnknown(m) +} + +var xxx_messageInfo_ClusterOperationMetadata proto.InternalMessageInfo + +func (m *ClusterOperationMetadata) GetClusterName() string { + if m != nil { + return m.ClusterName + } + return "" +} + +func (m *ClusterOperationMetadata) GetClusterUuid() string { + if m != nil { + return m.ClusterUuid + } + return "" +} + +func (m *ClusterOperationMetadata) GetStatus() *ClusterOperationStatus { + if m != nil { + return m.Status + } + return nil +} + +func (m *ClusterOperationMetadata) GetStatusHistory() []*ClusterOperationStatus { + if m != nil { + return m.StatusHistory + } + return nil +} + +func (m *ClusterOperationMetadata) GetOperationType() string { + if m != nil { + return m.OperationType + } + return "" +} + +func (m *ClusterOperationMetadata) GetDescription() string { + if m != nil { + return m.Description + } + return "" +} + +func (m *ClusterOperationMetadata) GetLabels() map[string]string { + if m != nil { + return m.Labels + } + return nil +} + +func (m *ClusterOperationMetadata) GetWarnings() []string { + if m != nil { + return m.Warnings + } + return nil +} + +func init() { + proto.RegisterType((*ClusterOperationStatus)(nil), "google.cloud.dataproc.v1.ClusterOperationStatus") + proto.RegisterType((*ClusterOperationMetadata)(nil), "google.cloud.dataproc.v1.ClusterOperationMetadata") + proto.RegisterMapType((map[string]string)(nil), "google.cloud.dataproc.v1.ClusterOperationMetadata.LabelsEntry") + proto.RegisterEnum("google.cloud.dataproc.v1.ClusterOperationStatus_State", ClusterOperationStatus_State_name, ClusterOperationStatus_State_value) +} + +func init() { + proto.RegisterFile("google/cloud/dataproc/v1/operations.proto", fileDescriptor_operations_5e90e9db40c022db) +} + +var fileDescriptor_operations_5e90e9db40c022db = []byte{ + // 531 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x9c, 0x53, 0x5d, 0x8b, 0xd3, 0x40, + 0x14, 0x35, 0xcd, 0xee, 0xb6, 0xbd, 0xd9, 0xd6, 0x32, 0x88, 0x0c, 0x65, 0x61, 0x63, 0x41, 0xa8, + 0x2f, 0x89, 0xbb, 0x82, 0xac, 0x0a, 0x22, 0x6b, 0x8b, 0x2b, 0xae, 0x69, 0xc9, 0x6e, 0x5d, 0xf0, + 0xa5, 0x4c, 0x9b, 0x31, 0x06, 0xd3, 0x99, 0x30, 0x33, 0xa9, 0xf4, 0xe7, 0xf8, 0x3b, 0x7d, 0x91, + 0x99, 0x49, 0x4a, 0xfd, 0x58, 0x50, 0x9f, 0x3a, 0xf7, 0xde, 0x73, 0x4e, 0xce, 0xb9, 0xdc, 0xc2, + 0xa3, 0x94, 0xf3, 0x34, 0xa7, 0xe1, 0x32, 0xe7, 0x65, 0x12, 0x26, 0x44, 0x91, 0x42, 0xf0, 0x65, + 0xb8, 0x3e, 0x09, 0x79, 0x41, 0x05, 0x51, 0x19, 0x67, 0x32, 0x28, 0x04, 0x57, 0x1c, 0x61, 0x0b, + 0x0d, 0x0c, 0x34, 0xa8, 0xa1, 0xc1, 0xfa, 0xa4, 0x7f, 0x54, 0x89, 0x90, 0x22, 0x0b, 0x09, 0x63, + 0x5c, 0xed, 0xf2, 0xfa, 0xc7, 0xd5, 0xd4, 0x54, 0x8b, 0xf2, 0x53, 0xa8, 0xb2, 0x15, 0x95, 0x8a, + 0xac, 0x0a, 0x0b, 0x18, 0x7c, 0x6b, 0xc0, 0xfd, 0xd7, 0x79, 0x29, 0x15, 0x15, 0x93, 0xfa, 0xa3, + 0x57, 0x8a, 0xa8, 0x52, 0xa2, 0x4b, 0xd8, 0x97, 0x8a, 0x28, 0x8a, 0x1d, 0xdf, 0x19, 0x76, 0x4f, + 0x9f, 0x06, 0xb7, 0x79, 0x08, 0xfe, 0x2c, 0x10, 0xe8, 0x1f, 0x1a, 0x5b, 0x11, 0x74, 0x0c, 0x5e, + 0xc6, 0x18, 0x15, 0x73, 0xab, 0xd9, 0xf0, 0x9d, 0x61, 0x3b, 0x06, 0xd3, 0x32, 0x38, 0x84, 0xa1, + 0x99, 0x50, 0x45, 0xb2, 0x5c, 0x62, 0xd7, 0x0c, 0xeb, 0x12, 0x8d, 0xa0, 0x67, 0x48, 0x9a, 0x2a, + 0xd4, 0x5c, 0x47, 0xc0, 0x7b, 0xbe, 0x33, 0xf4, 0x4e, 0xfb, 0xb5, 0xa7, 0x3a, 0x5f, 0x70, 0x5d, + 0xe7, 0x8b, 0xbb, 0x86, 0x73, 0xa5, 0x29, 0xba, 0x39, 0x38, 0x83, 0x7d, 0xfb, 0x21, 0x0f, 0x9a, + 0xb3, 0xe8, 0x5d, 0x34, 0xb9, 0x89, 0x7a, 0x77, 0x74, 0x31, 0x1d, 0x47, 0xa3, 0xb7, 0xd1, 0x9b, + 0x9e, 0xa3, 0x8b, 0x78, 0x16, 0x45, 0xba, 0x68, 0xa0, 0x16, 0xec, 0x8d, 0x26, 0xd1, 0xb8, 0xe7, + 0x0e, 0xbe, 0xbb, 0x80, 0x7f, 0x8d, 0xf8, 0x9e, 0x2a, 0xa2, 0x57, 0x80, 0x1e, 0xc0, 0xe1, 0xd2, + 0xce, 0xe6, 0x8c, 0xac, 0x28, 0x6e, 0x1a, 0xef, 0x5e, 0xd5, 0x8b, 0xc8, 0x8a, 0xee, 0x42, 0xca, + 0x32, 0x4b, 0x70, 0xeb, 0x27, 0xc8, 0xac, 0xcc, 0x12, 0x74, 0x01, 0x07, 0xd2, 0x2c, 0x0d, 0xb7, + 0x4d, 0xb0, 0xc7, 0xff, 0xba, 0xec, 0xb8, 0xe2, 0xa3, 0x1b, 0xe8, 0xda, 0xd7, 0xfc, 0x73, 0x26, + 0x15, 0x17, 0x1b, 0x0c, 0xbe, 0xfb, 0x5f, 0x8a, 0x1d, 0xab, 0x73, 0x61, 0x65, 0xd0, 0x43, 0xe8, + 0x6e, 0xcf, 0x72, 0xae, 0x36, 0x05, 0xc5, 0x9e, 0xc9, 0xd1, 0xd9, 0x76, 0xaf, 0x37, 0x05, 0x45, + 0x3e, 0x78, 0x09, 0x95, 0x4b, 0x91, 0x15, 0xba, 0x85, 0x0f, 0x6d, 0xd6, 0x9d, 0x16, 0xfa, 0x00, + 0x07, 0x39, 0x59, 0xd0, 0x5c, 0xe2, 0x8e, 0x71, 0xf6, 0xf2, 0xef, 0x9d, 0xd5, 0x5b, 0x0f, 0x2e, + 0x8d, 0xc0, 0x98, 0x29, 0xb1, 0x89, 0x2b, 0x35, 0xd4, 0x87, 0xd6, 0x57, 0x22, 0x58, 0xc6, 0x52, + 0x89, 0xbb, 0xbe, 0x3b, 0x6c, 0xc7, 0xdb, 0xba, 0xff, 0x0c, 0xbc, 0x1d, 0x0a, 0xea, 0x81, 0xfb, + 0x85, 0x6e, 0xcc, 0x61, 0xb7, 0x63, 0xfd, 0x44, 0xf7, 0x60, 0x7f, 0x4d, 0xf2, 0xb2, 0x3e, 0x4c, + 0x5b, 0x3c, 0x6f, 0x9c, 0x39, 0xe7, 0x12, 0x8e, 0x96, 0x7c, 0x75, 0xab, 0xc7, 0xf3, 0xbb, 0x5b, + 0x77, 0x72, 0xaa, 0xaf, 0x70, 0xea, 0x7c, 0x7c, 0x55, 0x81, 0x53, 0x9e, 0x13, 0x96, 0x06, 0x5c, + 0xa4, 0x61, 0x4a, 0x99, 0xb9, 0xd1, 0xd0, 0x8e, 0x48, 0x91, 0xc9, 0xdf, 0xff, 0xf7, 0x2f, 0xea, + 0xf7, 0xe2, 0xc0, 0x80, 0x9f, 0xfc, 0x08, 0x00, 0x00, 0xff, 0xff, 0xad, 0xc8, 0x5f, 0xe0, 0x23, + 0x04, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/cloud/dataproc/v1/workflow_templates.pb.go b/vendor/google.golang.org/genproto/googleapis/cloud/dataproc/v1/workflow_templates.pb.go new file mode 100644 index 0000000..9104799 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/cloud/dataproc/v1/workflow_templates.pb.go @@ -0,0 +1,2519 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/cloud/dataproc/v1/workflow_templates.proto + +package dataproc // import "google.golang.org/genproto/googleapis/cloud/dataproc/v1" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import empty "github.com/golang/protobuf/ptypes/empty" +import timestamp "github.com/golang/protobuf/ptypes/timestamp" +import _ "google.golang.org/genproto/googleapis/api/annotations" +import longrunning "google.golang.org/genproto/googleapis/longrunning" + +import ( + context "golang.org/x/net/context" + grpc "google.golang.org/grpc" +) + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// The operation state. +type WorkflowMetadata_State int32 + +const ( + // Unused. + WorkflowMetadata_UNKNOWN WorkflowMetadata_State = 0 + // The operation has been created. + WorkflowMetadata_PENDING WorkflowMetadata_State = 1 + // The operation is running. + WorkflowMetadata_RUNNING WorkflowMetadata_State = 2 + // The operation is done; either cancelled or completed. + WorkflowMetadata_DONE WorkflowMetadata_State = 3 +) + +var WorkflowMetadata_State_name = map[int32]string{ + 0: "UNKNOWN", + 1: "PENDING", + 2: "RUNNING", + 3: "DONE", +} +var WorkflowMetadata_State_value = map[string]int32{ + "UNKNOWN": 0, + "PENDING": 1, + "RUNNING": 2, + "DONE": 3, +} + +func (x WorkflowMetadata_State) String() string { + return proto.EnumName(WorkflowMetadata_State_name, int32(x)) +} +func (WorkflowMetadata_State) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_workflow_templates_68e4e5d738482813, []int{9, 0} +} + +// The workflow node state. +type WorkflowNode_NodeState int32 + +const ( + // State is unspecified. + WorkflowNode_NODE_STATE_UNSPECIFIED WorkflowNode_NodeState = 0 + // The node is awaiting prerequisite node to finish. + WorkflowNode_BLOCKED WorkflowNode_NodeState = 1 + // The node is runnable but not running. + WorkflowNode_RUNNABLE WorkflowNode_NodeState = 2 + // The node is running. + WorkflowNode_RUNNING WorkflowNode_NodeState = 3 + // The node completed successfully. + WorkflowNode_COMPLETED WorkflowNode_NodeState = 4 + // The node failed. A node can be marked FAILED because + // its ancestor or peer failed. + WorkflowNode_FAILED WorkflowNode_NodeState = 5 +) + +var WorkflowNode_NodeState_name = map[int32]string{ + 0: "NODE_STATE_UNSPECIFIED", + 1: "BLOCKED", + 2: "RUNNABLE", + 3: "RUNNING", + 4: "COMPLETED", + 5: "FAILED", +} +var WorkflowNode_NodeState_value = map[string]int32{ + "NODE_STATE_UNSPECIFIED": 0, + "BLOCKED": 1, + "RUNNABLE": 2, + "RUNNING": 3, + "COMPLETED": 4, + "FAILED": 5, +} + +func (x WorkflowNode_NodeState) String() string { + return proto.EnumName(WorkflowNode_NodeState_name, int32(x)) +} +func (WorkflowNode_NodeState) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_workflow_templates_68e4e5d738482813, []int{12, 0} +} + +// A Cloud Dataproc workflow template resource. +type WorkflowTemplate struct { + // Required. The template id. + // + // The id must contain only letters (a-z, A-Z), numbers (0-9), + // underscores (_), and hyphens (-). Cannot begin or end with underscore + // or hyphen. Must consist of between 3 and 50 characters. + Id string `protobuf:"bytes,2,opt,name=id,proto3" json:"id,omitempty"` + // Output only. The "resource name" of the template, as described + // in https://cloud.google.com/apis/design/resource_names of the form + // `projects/{project_id}/regions/{region}/workflowTemplates/{template_id}` + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // Optional. Used to perform a consistent read-modify-write. + // + // This field should be left blank for a `CreateWorkflowTemplate` request. It + // is required for an `UpdateWorkflowTemplate` request, and must match the + // current server version. A typical update template flow would fetch the + // current template with a `GetWorkflowTemplate` request, which will return + // the current template with the `version` field filled in with the + // current server version. The user updates other fields in the template, + // then returns it as part of the `UpdateWorkflowTemplate` request. + Version int32 `protobuf:"varint,3,opt,name=version,proto3" json:"version,omitempty"` + // Output only. The time template was created. + CreateTime *timestamp.Timestamp `protobuf:"bytes,4,opt,name=create_time,json=createTime,proto3" json:"create_time,omitempty"` + // Output only. The time template was last updated. + UpdateTime *timestamp.Timestamp `protobuf:"bytes,5,opt,name=update_time,json=updateTime,proto3" json:"update_time,omitempty"` + // Optional. The labels to associate with this template. These labels + // will be propagated to all jobs and clusters created by the workflow + // instance. + // + // Label **keys** must contain 1 to 63 characters, and must conform to + // [RFC 1035](https://www.ietf.org/rfc/rfc1035.txt). + // + // Label **values** may be empty, but, if present, must contain 1 to 63 + // characters, and must conform to + // [RFC 1035](https://www.ietf.org/rfc/rfc1035.txt). + // + // No more than 32 labels can be associated with a template. + Labels map[string]string `protobuf:"bytes,6,rep,name=labels,proto3" json:"labels,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` + // Required. WorkflowTemplate scheduling information. + Placement *WorkflowTemplatePlacement `protobuf:"bytes,7,opt,name=placement,proto3" json:"placement,omitempty"` + // Required. The Directed Acyclic Graph of Jobs to submit. + Jobs []*OrderedJob `protobuf:"bytes,8,rep,name=jobs,proto3" json:"jobs,omitempty"` + // Optional. Template parameters whose values are substituted into the + // template. Values for parameters must be provided when the template is + // instantiated. + Parameters []*TemplateParameter `protobuf:"bytes,9,rep,name=parameters,proto3" json:"parameters,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *WorkflowTemplate) Reset() { *m = WorkflowTemplate{} } +func (m *WorkflowTemplate) String() string { return proto.CompactTextString(m) } +func (*WorkflowTemplate) ProtoMessage() {} +func (*WorkflowTemplate) Descriptor() ([]byte, []int) { + return fileDescriptor_workflow_templates_68e4e5d738482813, []int{0} +} +func (m *WorkflowTemplate) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_WorkflowTemplate.Unmarshal(m, b) +} +func (m *WorkflowTemplate) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_WorkflowTemplate.Marshal(b, m, deterministic) +} +func (dst *WorkflowTemplate) XXX_Merge(src proto.Message) { + xxx_messageInfo_WorkflowTemplate.Merge(dst, src) +} +func (m *WorkflowTemplate) XXX_Size() int { + return xxx_messageInfo_WorkflowTemplate.Size(m) +} +func (m *WorkflowTemplate) XXX_DiscardUnknown() { + xxx_messageInfo_WorkflowTemplate.DiscardUnknown(m) +} + +var xxx_messageInfo_WorkflowTemplate proto.InternalMessageInfo + +func (m *WorkflowTemplate) GetId() string { + if m != nil { + return m.Id + } + return "" +} + +func (m *WorkflowTemplate) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *WorkflowTemplate) GetVersion() int32 { + if m != nil { + return m.Version + } + return 0 +} + +func (m *WorkflowTemplate) GetCreateTime() *timestamp.Timestamp { + if m != nil { + return m.CreateTime + } + return nil +} + +func (m *WorkflowTemplate) GetUpdateTime() *timestamp.Timestamp { + if m != nil { + return m.UpdateTime + } + return nil +} + +func (m *WorkflowTemplate) GetLabels() map[string]string { + if m != nil { + return m.Labels + } + return nil +} + +func (m *WorkflowTemplate) GetPlacement() *WorkflowTemplatePlacement { + if m != nil { + return m.Placement + } + return nil +} + +func (m *WorkflowTemplate) GetJobs() []*OrderedJob { + if m != nil { + return m.Jobs + } + return nil +} + +func (m *WorkflowTemplate) GetParameters() []*TemplateParameter { + if m != nil { + return m.Parameters + } + return nil +} + +// Specifies workflow execution target. +// +// Either `managed_cluster` or `cluster_selector` is required. +type WorkflowTemplatePlacement struct { + // Required. Specifies where workflow executes; either on a managed + // cluster or an existing cluster chosen by labels. + // + // Types that are valid to be assigned to Placement: + // *WorkflowTemplatePlacement_ManagedCluster + // *WorkflowTemplatePlacement_ClusterSelector + Placement isWorkflowTemplatePlacement_Placement `protobuf_oneof:"placement"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *WorkflowTemplatePlacement) Reset() { *m = WorkflowTemplatePlacement{} } +func (m *WorkflowTemplatePlacement) String() string { return proto.CompactTextString(m) } +func (*WorkflowTemplatePlacement) ProtoMessage() {} +func (*WorkflowTemplatePlacement) Descriptor() ([]byte, []int) { + return fileDescriptor_workflow_templates_68e4e5d738482813, []int{1} +} +func (m *WorkflowTemplatePlacement) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_WorkflowTemplatePlacement.Unmarshal(m, b) +} +func (m *WorkflowTemplatePlacement) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_WorkflowTemplatePlacement.Marshal(b, m, deterministic) +} +func (dst *WorkflowTemplatePlacement) XXX_Merge(src proto.Message) { + xxx_messageInfo_WorkflowTemplatePlacement.Merge(dst, src) +} +func (m *WorkflowTemplatePlacement) XXX_Size() int { + return xxx_messageInfo_WorkflowTemplatePlacement.Size(m) +} +func (m *WorkflowTemplatePlacement) XXX_DiscardUnknown() { + xxx_messageInfo_WorkflowTemplatePlacement.DiscardUnknown(m) +} + +var xxx_messageInfo_WorkflowTemplatePlacement proto.InternalMessageInfo + +type isWorkflowTemplatePlacement_Placement interface { + isWorkflowTemplatePlacement_Placement() +} + +type WorkflowTemplatePlacement_ManagedCluster struct { + ManagedCluster *ManagedCluster `protobuf:"bytes,1,opt,name=managed_cluster,json=managedCluster,proto3,oneof"` +} + +type WorkflowTemplatePlacement_ClusterSelector struct { + ClusterSelector *ClusterSelector `protobuf:"bytes,2,opt,name=cluster_selector,json=clusterSelector,proto3,oneof"` +} + +func (*WorkflowTemplatePlacement_ManagedCluster) isWorkflowTemplatePlacement_Placement() {} + +func (*WorkflowTemplatePlacement_ClusterSelector) isWorkflowTemplatePlacement_Placement() {} + +func (m *WorkflowTemplatePlacement) GetPlacement() isWorkflowTemplatePlacement_Placement { + if m != nil { + return m.Placement + } + return nil +} + +func (m *WorkflowTemplatePlacement) GetManagedCluster() *ManagedCluster { + if x, ok := m.GetPlacement().(*WorkflowTemplatePlacement_ManagedCluster); ok { + return x.ManagedCluster + } + return nil +} + +func (m *WorkflowTemplatePlacement) GetClusterSelector() *ClusterSelector { + if x, ok := m.GetPlacement().(*WorkflowTemplatePlacement_ClusterSelector); ok { + return x.ClusterSelector + } + return nil +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*WorkflowTemplatePlacement) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _WorkflowTemplatePlacement_OneofMarshaler, _WorkflowTemplatePlacement_OneofUnmarshaler, _WorkflowTemplatePlacement_OneofSizer, []interface{}{ + (*WorkflowTemplatePlacement_ManagedCluster)(nil), + (*WorkflowTemplatePlacement_ClusterSelector)(nil), + } +} + +func _WorkflowTemplatePlacement_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*WorkflowTemplatePlacement) + // placement + switch x := m.Placement.(type) { + case *WorkflowTemplatePlacement_ManagedCluster: + b.EncodeVarint(1<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.ManagedCluster); err != nil { + return err + } + case *WorkflowTemplatePlacement_ClusterSelector: + b.EncodeVarint(2<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.ClusterSelector); err != nil { + return err + } + case nil: + default: + return fmt.Errorf("WorkflowTemplatePlacement.Placement has unexpected type %T", x) + } + return nil +} + +func _WorkflowTemplatePlacement_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*WorkflowTemplatePlacement) + switch tag { + case 1: // placement.managed_cluster + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(ManagedCluster) + err := b.DecodeMessage(msg) + m.Placement = &WorkflowTemplatePlacement_ManagedCluster{msg} + return true, err + case 2: // placement.cluster_selector + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(ClusterSelector) + err := b.DecodeMessage(msg) + m.Placement = &WorkflowTemplatePlacement_ClusterSelector{msg} + return true, err + default: + return false, nil + } +} + +func _WorkflowTemplatePlacement_OneofSizer(msg proto.Message) (n int) { + m := msg.(*WorkflowTemplatePlacement) + // placement + switch x := m.Placement.(type) { + case *WorkflowTemplatePlacement_ManagedCluster: + s := proto.Size(x.ManagedCluster) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *WorkflowTemplatePlacement_ClusterSelector: + s := proto.Size(x.ClusterSelector) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +// Cluster that is managed by the workflow. +type ManagedCluster struct { + // Required. The cluster name prefix. A unique cluster name will be formed by + // appending a random suffix. + // + // The name must contain only lower-case letters (a-z), numbers (0-9), + // and hyphens (-). Must begin with a letter. Cannot begin or end with + // hyphen. Must consist of between 2 and 35 characters. + ClusterName string `protobuf:"bytes,2,opt,name=cluster_name,json=clusterName,proto3" json:"cluster_name,omitempty"` + // Required. The cluster configuration. + Config *ClusterConfig `protobuf:"bytes,3,opt,name=config,proto3" json:"config,omitempty"` + // Optional. The labels to associate with this cluster. + // + // Label keys must be between 1 and 63 characters long, and must conform to + // the following PCRE regular expression: + // [\p{Ll}\p{Lo}][\p{Ll}\p{Lo}\p{N}_-]{0,62} + // + // Label values must be between 1 and 63 characters long, and must conform to + // the following PCRE regular expression: [\p{Ll}\p{Lo}\p{N}_-]{0,63} + // + // No more than 32 labels can be associated with a given cluster. + Labels map[string]string `protobuf:"bytes,4,rep,name=labels,proto3" json:"labels,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ManagedCluster) Reset() { *m = ManagedCluster{} } +func (m *ManagedCluster) String() string { return proto.CompactTextString(m) } +func (*ManagedCluster) ProtoMessage() {} +func (*ManagedCluster) Descriptor() ([]byte, []int) { + return fileDescriptor_workflow_templates_68e4e5d738482813, []int{2} +} +func (m *ManagedCluster) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ManagedCluster.Unmarshal(m, b) +} +func (m *ManagedCluster) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ManagedCluster.Marshal(b, m, deterministic) +} +func (dst *ManagedCluster) XXX_Merge(src proto.Message) { + xxx_messageInfo_ManagedCluster.Merge(dst, src) +} +func (m *ManagedCluster) XXX_Size() int { + return xxx_messageInfo_ManagedCluster.Size(m) +} +func (m *ManagedCluster) XXX_DiscardUnknown() { + xxx_messageInfo_ManagedCluster.DiscardUnknown(m) +} + +var xxx_messageInfo_ManagedCluster proto.InternalMessageInfo + +func (m *ManagedCluster) GetClusterName() string { + if m != nil { + return m.ClusterName + } + return "" +} + +func (m *ManagedCluster) GetConfig() *ClusterConfig { + if m != nil { + return m.Config + } + return nil +} + +func (m *ManagedCluster) GetLabels() map[string]string { + if m != nil { + return m.Labels + } + return nil +} + +// A selector that chooses target cluster for jobs based on metadata. +type ClusterSelector struct { + // Optional. The zone where workflow process executes. This parameter does not + // affect the selection of the cluster. + // + // If unspecified, the zone of the first cluster matching the selector + // is used. + Zone string `protobuf:"bytes,1,opt,name=zone,proto3" json:"zone,omitempty"` + // Required. The cluster labels. Cluster must have all labels + // to match. + ClusterLabels map[string]string `protobuf:"bytes,2,rep,name=cluster_labels,json=clusterLabels,proto3" json:"cluster_labels,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ClusterSelector) Reset() { *m = ClusterSelector{} } +func (m *ClusterSelector) String() string { return proto.CompactTextString(m) } +func (*ClusterSelector) ProtoMessage() {} +func (*ClusterSelector) Descriptor() ([]byte, []int) { + return fileDescriptor_workflow_templates_68e4e5d738482813, []int{3} +} +func (m *ClusterSelector) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ClusterSelector.Unmarshal(m, b) +} +func (m *ClusterSelector) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ClusterSelector.Marshal(b, m, deterministic) +} +func (dst *ClusterSelector) XXX_Merge(src proto.Message) { + xxx_messageInfo_ClusterSelector.Merge(dst, src) +} +func (m *ClusterSelector) XXX_Size() int { + return xxx_messageInfo_ClusterSelector.Size(m) +} +func (m *ClusterSelector) XXX_DiscardUnknown() { + xxx_messageInfo_ClusterSelector.DiscardUnknown(m) +} + +var xxx_messageInfo_ClusterSelector proto.InternalMessageInfo + +func (m *ClusterSelector) GetZone() string { + if m != nil { + return m.Zone + } + return "" +} + +func (m *ClusterSelector) GetClusterLabels() map[string]string { + if m != nil { + return m.ClusterLabels + } + return nil +} + +// A job executed by the workflow. +type OrderedJob struct { + // Required. The step id. The id must be unique among all jobs + // within the template. + // + // The step id is used as prefix for job id, as job + // `goog-dataproc-workflow-step-id` label, and in + // [prerequisiteStepIds][google.cloud.dataproc.v1.OrderedJob.prerequisite_step_ids] + // field from other steps. + // + // The id must contain only letters (a-z, A-Z), numbers (0-9), + // underscores (_), and hyphens (-). Cannot begin or end with underscore + // or hyphen. Must consist of between 3 and 50 characters. + StepId string `protobuf:"bytes,1,opt,name=step_id,json=stepId,proto3" json:"step_id,omitempty"` + // Required. The job definition. + // + // Types that are valid to be assigned to JobType: + // *OrderedJob_HadoopJob + // *OrderedJob_SparkJob + // *OrderedJob_PysparkJob + // *OrderedJob_HiveJob + // *OrderedJob_PigJob + // *OrderedJob_SparkSqlJob + JobType isOrderedJob_JobType `protobuf_oneof:"job_type"` + // Optional. The labels to associate with this job. + // + // Label keys must be between 1 and 63 characters long, and must conform to + // the following regular expression: + // [\p{Ll}\p{Lo}][\p{Ll}\p{Lo}\p{N}_-]{0,62} + // + // Label values must be between 1 and 63 characters long, and must conform to + // the following regular expression: [\p{Ll}\p{Lo}\p{N}_-]{0,63} + // + // No more than 32 labels can be associated with a given job. + Labels map[string]string `protobuf:"bytes,8,rep,name=labels,proto3" json:"labels,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` + // Optional. Job scheduling configuration. + Scheduling *JobScheduling `protobuf:"bytes,9,opt,name=scheduling,proto3" json:"scheduling,omitempty"` + // Optional. The optional list of prerequisite job step_ids. + // If not specified, the job will start at the beginning of workflow. + PrerequisiteStepIds []string `protobuf:"bytes,10,rep,name=prerequisite_step_ids,json=prerequisiteStepIds,proto3" json:"prerequisite_step_ids,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *OrderedJob) Reset() { *m = OrderedJob{} } +func (m *OrderedJob) String() string { return proto.CompactTextString(m) } +func (*OrderedJob) ProtoMessage() {} +func (*OrderedJob) Descriptor() ([]byte, []int) { + return fileDescriptor_workflow_templates_68e4e5d738482813, []int{4} +} +func (m *OrderedJob) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_OrderedJob.Unmarshal(m, b) +} +func (m *OrderedJob) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_OrderedJob.Marshal(b, m, deterministic) +} +func (dst *OrderedJob) XXX_Merge(src proto.Message) { + xxx_messageInfo_OrderedJob.Merge(dst, src) +} +func (m *OrderedJob) XXX_Size() int { + return xxx_messageInfo_OrderedJob.Size(m) +} +func (m *OrderedJob) XXX_DiscardUnknown() { + xxx_messageInfo_OrderedJob.DiscardUnknown(m) +} + +var xxx_messageInfo_OrderedJob proto.InternalMessageInfo + +func (m *OrderedJob) GetStepId() string { + if m != nil { + return m.StepId + } + return "" +} + +type isOrderedJob_JobType interface { + isOrderedJob_JobType() +} + +type OrderedJob_HadoopJob struct { + HadoopJob *HadoopJob `protobuf:"bytes,2,opt,name=hadoop_job,json=hadoopJob,proto3,oneof"` +} + +type OrderedJob_SparkJob struct { + SparkJob *SparkJob `protobuf:"bytes,3,opt,name=spark_job,json=sparkJob,proto3,oneof"` +} + +type OrderedJob_PysparkJob struct { + PysparkJob *PySparkJob `protobuf:"bytes,4,opt,name=pyspark_job,json=pysparkJob,proto3,oneof"` +} + +type OrderedJob_HiveJob struct { + HiveJob *HiveJob `protobuf:"bytes,5,opt,name=hive_job,json=hiveJob,proto3,oneof"` +} + +type OrderedJob_PigJob struct { + PigJob *PigJob `protobuf:"bytes,6,opt,name=pig_job,json=pigJob,proto3,oneof"` +} + +type OrderedJob_SparkSqlJob struct { + SparkSqlJob *SparkSqlJob `protobuf:"bytes,7,opt,name=spark_sql_job,json=sparkSqlJob,proto3,oneof"` +} + +func (*OrderedJob_HadoopJob) isOrderedJob_JobType() {} + +func (*OrderedJob_SparkJob) isOrderedJob_JobType() {} + +func (*OrderedJob_PysparkJob) isOrderedJob_JobType() {} + +func (*OrderedJob_HiveJob) isOrderedJob_JobType() {} + +func (*OrderedJob_PigJob) isOrderedJob_JobType() {} + +func (*OrderedJob_SparkSqlJob) isOrderedJob_JobType() {} + +func (m *OrderedJob) GetJobType() isOrderedJob_JobType { + if m != nil { + return m.JobType + } + return nil +} + +func (m *OrderedJob) GetHadoopJob() *HadoopJob { + if x, ok := m.GetJobType().(*OrderedJob_HadoopJob); ok { + return x.HadoopJob + } + return nil +} + +func (m *OrderedJob) GetSparkJob() *SparkJob { + if x, ok := m.GetJobType().(*OrderedJob_SparkJob); ok { + return x.SparkJob + } + return nil +} + +func (m *OrderedJob) GetPysparkJob() *PySparkJob { + if x, ok := m.GetJobType().(*OrderedJob_PysparkJob); ok { + return x.PysparkJob + } + return nil +} + +func (m *OrderedJob) GetHiveJob() *HiveJob { + if x, ok := m.GetJobType().(*OrderedJob_HiveJob); ok { + return x.HiveJob + } + return nil +} + +func (m *OrderedJob) GetPigJob() *PigJob { + if x, ok := m.GetJobType().(*OrderedJob_PigJob); ok { + return x.PigJob + } + return nil +} + +func (m *OrderedJob) GetSparkSqlJob() *SparkSqlJob { + if x, ok := m.GetJobType().(*OrderedJob_SparkSqlJob); ok { + return x.SparkSqlJob + } + return nil +} + +func (m *OrderedJob) GetLabels() map[string]string { + if m != nil { + return m.Labels + } + return nil +} + +func (m *OrderedJob) GetScheduling() *JobScheduling { + if m != nil { + return m.Scheduling + } + return nil +} + +func (m *OrderedJob) GetPrerequisiteStepIds() []string { + if m != nil { + return m.PrerequisiteStepIds + } + return nil +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*OrderedJob) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _OrderedJob_OneofMarshaler, _OrderedJob_OneofUnmarshaler, _OrderedJob_OneofSizer, []interface{}{ + (*OrderedJob_HadoopJob)(nil), + (*OrderedJob_SparkJob)(nil), + (*OrderedJob_PysparkJob)(nil), + (*OrderedJob_HiveJob)(nil), + (*OrderedJob_PigJob)(nil), + (*OrderedJob_SparkSqlJob)(nil), + } +} + +func _OrderedJob_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*OrderedJob) + // job_type + switch x := m.JobType.(type) { + case *OrderedJob_HadoopJob: + b.EncodeVarint(2<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.HadoopJob); err != nil { + return err + } + case *OrderedJob_SparkJob: + b.EncodeVarint(3<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.SparkJob); err != nil { + return err + } + case *OrderedJob_PysparkJob: + b.EncodeVarint(4<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.PysparkJob); err != nil { + return err + } + case *OrderedJob_HiveJob: + b.EncodeVarint(5<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.HiveJob); err != nil { + return err + } + case *OrderedJob_PigJob: + b.EncodeVarint(6<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.PigJob); err != nil { + return err + } + case *OrderedJob_SparkSqlJob: + b.EncodeVarint(7<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.SparkSqlJob); err != nil { + return err + } + case nil: + default: + return fmt.Errorf("OrderedJob.JobType has unexpected type %T", x) + } + return nil +} + +func _OrderedJob_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*OrderedJob) + switch tag { + case 2: // job_type.hadoop_job + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(HadoopJob) + err := b.DecodeMessage(msg) + m.JobType = &OrderedJob_HadoopJob{msg} + return true, err + case 3: // job_type.spark_job + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(SparkJob) + err := b.DecodeMessage(msg) + m.JobType = &OrderedJob_SparkJob{msg} + return true, err + case 4: // job_type.pyspark_job + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(PySparkJob) + err := b.DecodeMessage(msg) + m.JobType = &OrderedJob_PysparkJob{msg} + return true, err + case 5: // job_type.hive_job + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(HiveJob) + err := b.DecodeMessage(msg) + m.JobType = &OrderedJob_HiveJob{msg} + return true, err + case 6: // job_type.pig_job + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(PigJob) + err := b.DecodeMessage(msg) + m.JobType = &OrderedJob_PigJob{msg} + return true, err + case 7: // job_type.spark_sql_job + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(SparkSqlJob) + err := b.DecodeMessage(msg) + m.JobType = &OrderedJob_SparkSqlJob{msg} + return true, err + default: + return false, nil + } +} + +func _OrderedJob_OneofSizer(msg proto.Message) (n int) { + m := msg.(*OrderedJob) + // job_type + switch x := m.JobType.(type) { + case *OrderedJob_HadoopJob: + s := proto.Size(x.HadoopJob) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *OrderedJob_SparkJob: + s := proto.Size(x.SparkJob) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *OrderedJob_PysparkJob: + s := proto.Size(x.PysparkJob) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *OrderedJob_HiveJob: + s := proto.Size(x.HiveJob) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *OrderedJob_PigJob: + s := proto.Size(x.PigJob) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *OrderedJob_SparkSqlJob: + s := proto.Size(x.SparkSqlJob) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +// A configurable parameter that replaces one or more fields in the template. +// Parameterizable fields: +// - Labels +// - File uris +// - Job properties +// - Job arguments +// - Script variables +// - Main class (in HadoopJob and SparkJob) +// - Zone (in ClusterSelector) +type TemplateParameter struct { + // Required. Parameter name. + // The parameter name is used as the key, and paired with the + // parameter value, which are passed to the template when the template + // is instantiated. + // The name must contain only capital letters (A-Z), numbers (0-9), and + // underscores (_), and must not start with a number. The maximum length is + // 40 characters. + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // Required. Paths to all fields that the parameter replaces. + // A field is allowed to appear in at most one parameter's list of field + // paths. + // + // A field path is similar in syntax to a + // [google.protobuf.FieldMask][google.protobuf.FieldMask]. For example, a + // field path that references the zone field of a workflow template's cluster + // selector would be specified as `placement.clusterSelector.zone`. + // + // Also, field paths can reference fields using the following syntax: + // + // * Values in maps can be referenced by key: + // * labels['key'] + // * placement.clusterSelector.clusterLabels['key'] + // * placement.managedCluster.labels['key'] + // * placement.clusterSelector.clusterLabels['key'] + // * jobs['step-id'].labels['key'] + // + // * Jobs in the jobs list can be referenced by step-id: + // * jobs['step-id'].hadoopJob.mainJarFileUri + // * jobs['step-id'].hiveJob.queryFileUri + // * jobs['step-id'].pySparkJob.mainPythonFileUri + // * jobs['step-id'].hadoopJob.jarFileUris[0] + // * jobs['step-id'].hadoopJob.archiveUris[0] + // * jobs['step-id'].hadoopJob.fileUris[0] + // * jobs['step-id'].pySparkJob.pythonFileUris[0] + // + // * Items in repeated fields can be referenced by a zero-based index: + // * jobs['step-id'].sparkJob.args[0] + // + // * Other examples: + // * jobs['step-id'].hadoopJob.properties['key'] + // * jobs['step-id'].hadoopJob.args[0] + // * jobs['step-id'].hiveJob.scriptVariables['key'] + // * jobs['step-id'].hadoopJob.mainJarFileUri + // * placement.clusterSelector.zone + // + // It may not be possible to parameterize maps and repeated fields in their + // entirety since only individual map values and individual items in repeated + // fields can be referenced. For example, the following field paths are + // invalid: + // + // - placement.clusterSelector.clusterLabels + // - jobs['step-id'].sparkJob.args + Fields []string `protobuf:"bytes,2,rep,name=fields,proto3" json:"fields,omitempty"` + // Optional. Brief description of the parameter. + // Must not exceed 1024 characters. + Description string `protobuf:"bytes,3,opt,name=description,proto3" json:"description,omitempty"` + // Optional. Validation rules to be applied to this parameter's value. + Validation *ParameterValidation `protobuf:"bytes,4,opt,name=validation,proto3" json:"validation,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *TemplateParameter) Reset() { *m = TemplateParameter{} } +func (m *TemplateParameter) String() string { return proto.CompactTextString(m) } +func (*TemplateParameter) ProtoMessage() {} +func (*TemplateParameter) Descriptor() ([]byte, []int) { + return fileDescriptor_workflow_templates_68e4e5d738482813, []int{5} +} +func (m *TemplateParameter) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_TemplateParameter.Unmarshal(m, b) +} +func (m *TemplateParameter) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_TemplateParameter.Marshal(b, m, deterministic) +} +func (dst *TemplateParameter) XXX_Merge(src proto.Message) { + xxx_messageInfo_TemplateParameter.Merge(dst, src) +} +func (m *TemplateParameter) XXX_Size() int { + return xxx_messageInfo_TemplateParameter.Size(m) +} +func (m *TemplateParameter) XXX_DiscardUnknown() { + xxx_messageInfo_TemplateParameter.DiscardUnknown(m) +} + +var xxx_messageInfo_TemplateParameter proto.InternalMessageInfo + +func (m *TemplateParameter) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *TemplateParameter) GetFields() []string { + if m != nil { + return m.Fields + } + return nil +} + +func (m *TemplateParameter) GetDescription() string { + if m != nil { + return m.Description + } + return "" +} + +func (m *TemplateParameter) GetValidation() *ParameterValidation { + if m != nil { + return m.Validation + } + return nil +} + +// Configuration for parameter validation. +type ParameterValidation struct { + // Required. The type of validation to be performed. + // + // Types that are valid to be assigned to ValidationType: + // *ParameterValidation_Regex + // *ParameterValidation_Values + ValidationType isParameterValidation_ValidationType `protobuf_oneof:"validation_type"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ParameterValidation) Reset() { *m = ParameterValidation{} } +func (m *ParameterValidation) String() string { return proto.CompactTextString(m) } +func (*ParameterValidation) ProtoMessage() {} +func (*ParameterValidation) Descriptor() ([]byte, []int) { + return fileDescriptor_workflow_templates_68e4e5d738482813, []int{6} +} +func (m *ParameterValidation) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ParameterValidation.Unmarshal(m, b) +} +func (m *ParameterValidation) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ParameterValidation.Marshal(b, m, deterministic) +} +func (dst *ParameterValidation) XXX_Merge(src proto.Message) { + xxx_messageInfo_ParameterValidation.Merge(dst, src) +} +func (m *ParameterValidation) XXX_Size() int { + return xxx_messageInfo_ParameterValidation.Size(m) +} +func (m *ParameterValidation) XXX_DiscardUnknown() { + xxx_messageInfo_ParameterValidation.DiscardUnknown(m) +} + +var xxx_messageInfo_ParameterValidation proto.InternalMessageInfo + +type isParameterValidation_ValidationType interface { + isParameterValidation_ValidationType() +} + +type ParameterValidation_Regex struct { + Regex *RegexValidation `protobuf:"bytes,1,opt,name=regex,proto3,oneof"` +} + +type ParameterValidation_Values struct { + Values *ValueValidation `protobuf:"bytes,2,opt,name=values,proto3,oneof"` +} + +func (*ParameterValidation_Regex) isParameterValidation_ValidationType() {} + +func (*ParameterValidation_Values) isParameterValidation_ValidationType() {} + +func (m *ParameterValidation) GetValidationType() isParameterValidation_ValidationType { + if m != nil { + return m.ValidationType + } + return nil +} + +func (m *ParameterValidation) GetRegex() *RegexValidation { + if x, ok := m.GetValidationType().(*ParameterValidation_Regex); ok { + return x.Regex + } + return nil +} + +func (m *ParameterValidation) GetValues() *ValueValidation { + if x, ok := m.GetValidationType().(*ParameterValidation_Values); ok { + return x.Values + } + return nil +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*ParameterValidation) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _ParameterValidation_OneofMarshaler, _ParameterValidation_OneofUnmarshaler, _ParameterValidation_OneofSizer, []interface{}{ + (*ParameterValidation_Regex)(nil), + (*ParameterValidation_Values)(nil), + } +} + +func _ParameterValidation_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*ParameterValidation) + // validation_type + switch x := m.ValidationType.(type) { + case *ParameterValidation_Regex: + b.EncodeVarint(1<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.Regex); err != nil { + return err + } + case *ParameterValidation_Values: + b.EncodeVarint(2<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.Values); err != nil { + return err + } + case nil: + default: + return fmt.Errorf("ParameterValidation.ValidationType has unexpected type %T", x) + } + return nil +} + +func _ParameterValidation_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*ParameterValidation) + switch tag { + case 1: // validation_type.regex + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(RegexValidation) + err := b.DecodeMessage(msg) + m.ValidationType = &ParameterValidation_Regex{msg} + return true, err + case 2: // validation_type.values + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(ValueValidation) + err := b.DecodeMessage(msg) + m.ValidationType = &ParameterValidation_Values{msg} + return true, err + default: + return false, nil + } +} + +func _ParameterValidation_OneofSizer(msg proto.Message) (n int) { + m := msg.(*ParameterValidation) + // validation_type + switch x := m.ValidationType.(type) { + case *ParameterValidation_Regex: + s := proto.Size(x.Regex) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *ParameterValidation_Values: + s := proto.Size(x.Values) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +// Validation based on regular expressions. +type RegexValidation struct { + // Required. RE2 regular expressions used to validate the parameter's value. + // The value must match the regex in its entirety (substring + // matches are not sufficient). + Regexes []string `protobuf:"bytes,1,rep,name=regexes,proto3" json:"regexes,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *RegexValidation) Reset() { *m = RegexValidation{} } +func (m *RegexValidation) String() string { return proto.CompactTextString(m) } +func (*RegexValidation) ProtoMessage() {} +func (*RegexValidation) Descriptor() ([]byte, []int) { + return fileDescriptor_workflow_templates_68e4e5d738482813, []int{7} +} +func (m *RegexValidation) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_RegexValidation.Unmarshal(m, b) +} +func (m *RegexValidation) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_RegexValidation.Marshal(b, m, deterministic) +} +func (dst *RegexValidation) XXX_Merge(src proto.Message) { + xxx_messageInfo_RegexValidation.Merge(dst, src) +} +func (m *RegexValidation) XXX_Size() int { + return xxx_messageInfo_RegexValidation.Size(m) +} +func (m *RegexValidation) XXX_DiscardUnknown() { + xxx_messageInfo_RegexValidation.DiscardUnknown(m) +} + +var xxx_messageInfo_RegexValidation proto.InternalMessageInfo + +func (m *RegexValidation) GetRegexes() []string { + if m != nil { + return m.Regexes + } + return nil +} + +// Validation based on a list of allowed values. +type ValueValidation struct { + // Required. List of allowed values for the parameter. + Values []string `protobuf:"bytes,1,rep,name=values,proto3" json:"values,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ValueValidation) Reset() { *m = ValueValidation{} } +func (m *ValueValidation) String() string { return proto.CompactTextString(m) } +func (*ValueValidation) ProtoMessage() {} +func (*ValueValidation) Descriptor() ([]byte, []int) { + return fileDescriptor_workflow_templates_68e4e5d738482813, []int{8} +} +func (m *ValueValidation) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ValueValidation.Unmarshal(m, b) +} +func (m *ValueValidation) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ValueValidation.Marshal(b, m, deterministic) +} +func (dst *ValueValidation) XXX_Merge(src proto.Message) { + xxx_messageInfo_ValueValidation.Merge(dst, src) +} +func (m *ValueValidation) XXX_Size() int { + return xxx_messageInfo_ValueValidation.Size(m) +} +func (m *ValueValidation) XXX_DiscardUnknown() { + xxx_messageInfo_ValueValidation.DiscardUnknown(m) +} + +var xxx_messageInfo_ValueValidation proto.InternalMessageInfo + +func (m *ValueValidation) GetValues() []string { + if m != nil { + return m.Values + } + return nil +} + +// A Cloud Dataproc workflow template resource. +type WorkflowMetadata struct { + // Output only. The "resource name" of the template. + Template string `protobuf:"bytes,1,opt,name=template,proto3" json:"template,omitempty"` + // Output only. The version of template at the time of + // workflow instantiation. + Version int32 `protobuf:"varint,2,opt,name=version,proto3" json:"version,omitempty"` + // Output only. The create cluster operation metadata. + CreateCluster *ClusterOperation `protobuf:"bytes,3,opt,name=create_cluster,json=createCluster,proto3" json:"create_cluster,omitempty"` + // Output only. The workflow graph. + Graph *WorkflowGraph `protobuf:"bytes,4,opt,name=graph,proto3" json:"graph,omitempty"` + // Output only. The delete cluster operation metadata. + DeleteCluster *ClusterOperation `protobuf:"bytes,5,opt,name=delete_cluster,json=deleteCluster,proto3" json:"delete_cluster,omitempty"` + // Output only. The workflow state. + State WorkflowMetadata_State `protobuf:"varint,6,opt,name=state,proto3,enum=google.cloud.dataproc.v1.WorkflowMetadata_State" json:"state,omitempty"` + // Output only. The name of the target cluster. + ClusterName string `protobuf:"bytes,7,opt,name=cluster_name,json=clusterName,proto3" json:"cluster_name,omitempty"` + // Map from parameter names to values that were used for those parameters. + Parameters map[string]string `protobuf:"bytes,8,rep,name=parameters,proto3" json:"parameters,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` + // Output only. Workflow start time. + StartTime *timestamp.Timestamp `protobuf:"bytes,9,opt,name=start_time,json=startTime,proto3" json:"start_time,omitempty"` + // Output only. Workflow end time. + EndTime *timestamp.Timestamp `protobuf:"bytes,10,opt,name=end_time,json=endTime,proto3" json:"end_time,omitempty"` + // Output only. The UUID of target cluster. + ClusterUuid string `protobuf:"bytes,11,opt,name=cluster_uuid,json=clusterUuid,proto3" json:"cluster_uuid,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *WorkflowMetadata) Reset() { *m = WorkflowMetadata{} } +func (m *WorkflowMetadata) String() string { return proto.CompactTextString(m) } +func (*WorkflowMetadata) ProtoMessage() {} +func (*WorkflowMetadata) Descriptor() ([]byte, []int) { + return fileDescriptor_workflow_templates_68e4e5d738482813, []int{9} +} +func (m *WorkflowMetadata) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_WorkflowMetadata.Unmarshal(m, b) +} +func (m *WorkflowMetadata) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_WorkflowMetadata.Marshal(b, m, deterministic) +} +func (dst *WorkflowMetadata) XXX_Merge(src proto.Message) { + xxx_messageInfo_WorkflowMetadata.Merge(dst, src) +} +func (m *WorkflowMetadata) XXX_Size() int { + return xxx_messageInfo_WorkflowMetadata.Size(m) +} +func (m *WorkflowMetadata) XXX_DiscardUnknown() { + xxx_messageInfo_WorkflowMetadata.DiscardUnknown(m) +} + +var xxx_messageInfo_WorkflowMetadata proto.InternalMessageInfo + +func (m *WorkflowMetadata) GetTemplate() string { + if m != nil { + return m.Template + } + return "" +} + +func (m *WorkflowMetadata) GetVersion() int32 { + if m != nil { + return m.Version + } + return 0 +} + +func (m *WorkflowMetadata) GetCreateCluster() *ClusterOperation { + if m != nil { + return m.CreateCluster + } + return nil +} + +func (m *WorkflowMetadata) GetGraph() *WorkflowGraph { + if m != nil { + return m.Graph + } + return nil +} + +func (m *WorkflowMetadata) GetDeleteCluster() *ClusterOperation { + if m != nil { + return m.DeleteCluster + } + return nil +} + +func (m *WorkflowMetadata) GetState() WorkflowMetadata_State { + if m != nil { + return m.State + } + return WorkflowMetadata_UNKNOWN +} + +func (m *WorkflowMetadata) GetClusterName() string { + if m != nil { + return m.ClusterName + } + return "" +} + +func (m *WorkflowMetadata) GetParameters() map[string]string { + if m != nil { + return m.Parameters + } + return nil +} + +func (m *WorkflowMetadata) GetStartTime() *timestamp.Timestamp { + if m != nil { + return m.StartTime + } + return nil +} + +func (m *WorkflowMetadata) GetEndTime() *timestamp.Timestamp { + if m != nil { + return m.EndTime + } + return nil +} + +func (m *WorkflowMetadata) GetClusterUuid() string { + if m != nil { + return m.ClusterUuid + } + return "" +} + +// The cluster operation triggered by a workflow. +type ClusterOperation struct { + // Output only. The id of the cluster operation. + OperationId string `protobuf:"bytes,1,opt,name=operation_id,json=operationId,proto3" json:"operation_id,omitempty"` + // Output only. Error, if operation failed. + Error string `protobuf:"bytes,2,opt,name=error,proto3" json:"error,omitempty"` + // Output only. Indicates the operation is done. + Done bool `protobuf:"varint,3,opt,name=done,proto3" json:"done,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ClusterOperation) Reset() { *m = ClusterOperation{} } +func (m *ClusterOperation) String() string { return proto.CompactTextString(m) } +func (*ClusterOperation) ProtoMessage() {} +func (*ClusterOperation) Descriptor() ([]byte, []int) { + return fileDescriptor_workflow_templates_68e4e5d738482813, []int{10} +} +func (m *ClusterOperation) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ClusterOperation.Unmarshal(m, b) +} +func (m *ClusterOperation) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ClusterOperation.Marshal(b, m, deterministic) +} +func (dst *ClusterOperation) XXX_Merge(src proto.Message) { + xxx_messageInfo_ClusterOperation.Merge(dst, src) +} +func (m *ClusterOperation) XXX_Size() int { + return xxx_messageInfo_ClusterOperation.Size(m) +} +func (m *ClusterOperation) XXX_DiscardUnknown() { + xxx_messageInfo_ClusterOperation.DiscardUnknown(m) +} + +var xxx_messageInfo_ClusterOperation proto.InternalMessageInfo + +func (m *ClusterOperation) GetOperationId() string { + if m != nil { + return m.OperationId + } + return "" +} + +func (m *ClusterOperation) GetError() string { + if m != nil { + return m.Error + } + return "" +} + +func (m *ClusterOperation) GetDone() bool { + if m != nil { + return m.Done + } + return false +} + +// The workflow graph. +type WorkflowGraph struct { + // Output only. The workflow nodes. + Nodes []*WorkflowNode `protobuf:"bytes,1,rep,name=nodes,proto3" json:"nodes,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *WorkflowGraph) Reset() { *m = WorkflowGraph{} } +func (m *WorkflowGraph) String() string { return proto.CompactTextString(m) } +func (*WorkflowGraph) ProtoMessage() {} +func (*WorkflowGraph) Descriptor() ([]byte, []int) { + return fileDescriptor_workflow_templates_68e4e5d738482813, []int{11} +} +func (m *WorkflowGraph) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_WorkflowGraph.Unmarshal(m, b) +} +func (m *WorkflowGraph) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_WorkflowGraph.Marshal(b, m, deterministic) +} +func (dst *WorkflowGraph) XXX_Merge(src proto.Message) { + xxx_messageInfo_WorkflowGraph.Merge(dst, src) +} +func (m *WorkflowGraph) XXX_Size() int { + return xxx_messageInfo_WorkflowGraph.Size(m) +} +func (m *WorkflowGraph) XXX_DiscardUnknown() { + xxx_messageInfo_WorkflowGraph.DiscardUnknown(m) +} + +var xxx_messageInfo_WorkflowGraph proto.InternalMessageInfo + +func (m *WorkflowGraph) GetNodes() []*WorkflowNode { + if m != nil { + return m.Nodes + } + return nil +} + +// The workflow node. +type WorkflowNode struct { + // Output only. The name of the node. + StepId string `protobuf:"bytes,1,opt,name=step_id,json=stepId,proto3" json:"step_id,omitempty"` + // Output only. Node's prerequisite nodes. + PrerequisiteStepIds []string `protobuf:"bytes,2,rep,name=prerequisite_step_ids,json=prerequisiteStepIds,proto3" json:"prerequisite_step_ids,omitempty"` + // Output only. The job id; populated after the node enters RUNNING state. + JobId string `protobuf:"bytes,3,opt,name=job_id,json=jobId,proto3" json:"job_id,omitempty"` + // Output only. The node state. + State WorkflowNode_NodeState `protobuf:"varint,5,opt,name=state,proto3,enum=google.cloud.dataproc.v1.WorkflowNode_NodeState" json:"state,omitempty"` + // Output only. The error detail. + Error string `protobuf:"bytes,6,opt,name=error,proto3" json:"error,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *WorkflowNode) Reset() { *m = WorkflowNode{} } +func (m *WorkflowNode) String() string { return proto.CompactTextString(m) } +func (*WorkflowNode) ProtoMessage() {} +func (*WorkflowNode) Descriptor() ([]byte, []int) { + return fileDescriptor_workflow_templates_68e4e5d738482813, []int{12} +} +func (m *WorkflowNode) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_WorkflowNode.Unmarshal(m, b) +} +func (m *WorkflowNode) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_WorkflowNode.Marshal(b, m, deterministic) +} +func (dst *WorkflowNode) XXX_Merge(src proto.Message) { + xxx_messageInfo_WorkflowNode.Merge(dst, src) +} +func (m *WorkflowNode) XXX_Size() int { + return xxx_messageInfo_WorkflowNode.Size(m) +} +func (m *WorkflowNode) XXX_DiscardUnknown() { + xxx_messageInfo_WorkflowNode.DiscardUnknown(m) +} + +var xxx_messageInfo_WorkflowNode proto.InternalMessageInfo + +func (m *WorkflowNode) GetStepId() string { + if m != nil { + return m.StepId + } + return "" +} + +func (m *WorkflowNode) GetPrerequisiteStepIds() []string { + if m != nil { + return m.PrerequisiteStepIds + } + return nil +} + +func (m *WorkflowNode) GetJobId() string { + if m != nil { + return m.JobId + } + return "" +} + +func (m *WorkflowNode) GetState() WorkflowNode_NodeState { + if m != nil { + return m.State + } + return WorkflowNode_NODE_STATE_UNSPECIFIED +} + +func (m *WorkflowNode) GetError() string { + if m != nil { + return m.Error + } + return "" +} + +// A request to create a workflow template. +type CreateWorkflowTemplateRequest struct { + // Required. The "resource name" of the region, as described + // in https://cloud.google.com/apis/design/resource_names of the form + // `projects/{project_id}/regions/{region}` + Parent string `protobuf:"bytes,1,opt,name=parent,proto3" json:"parent,omitempty"` + // Required. The Dataproc workflow template to create. + Template *WorkflowTemplate `protobuf:"bytes,2,opt,name=template,proto3" json:"template,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *CreateWorkflowTemplateRequest) Reset() { *m = CreateWorkflowTemplateRequest{} } +func (m *CreateWorkflowTemplateRequest) String() string { return proto.CompactTextString(m) } +func (*CreateWorkflowTemplateRequest) ProtoMessage() {} +func (*CreateWorkflowTemplateRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_workflow_templates_68e4e5d738482813, []int{13} +} +func (m *CreateWorkflowTemplateRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_CreateWorkflowTemplateRequest.Unmarshal(m, b) +} +func (m *CreateWorkflowTemplateRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_CreateWorkflowTemplateRequest.Marshal(b, m, deterministic) +} +func (dst *CreateWorkflowTemplateRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_CreateWorkflowTemplateRequest.Merge(dst, src) +} +func (m *CreateWorkflowTemplateRequest) XXX_Size() int { + return xxx_messageInfo_CreateWorkflowTemplateRequest.Size(m) +} +func (m *CreateWorkflowTemplateRequest) XXX_DiscardUnknown() { + xxx_messageInfo_CreateWorkflowTemplateRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_CreateWorkflowTemplateRequest proto.InternalMessageInfo + +func (m *CreateWorkflowTemplateRequest) GetParent() string { + if m != nil { + return m.Parent + } + return "" +} + +func (m *CreateWorkflowTemplateRequest) GetTemplate() *WorkflowTemplate { + if m != nil { + return m.Template + } + return nil +} + +// A request to fetch a workflow template. +type GetWorkflowTemplateRequest struct { + // Required. The "resource name" of the workflow template, as described + // in https://cloud.google.com/apis/design/resource_names of the form + // `projects/{project_id}/regions/{region}/workflowTemplates/{template_id}` + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // Optional. The version of workflow template to retrieve. Only previously + // instatiated versions can be retrieved. + // + // If unspecified, retrieves the current version. + Version int32 `protobuf:"varint,2,opt,name=version,proto3" json:"version,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GetWorkflowTemplateRequest) Reset() { *m = GetWorkflowTemplateRequest{} } +func (m *GetWorkflowTemplateRequest) String() string { return proto.CompactTextString(m) } +func (*GetWorkflowTemplateRequest) ProtoMessage() {} +func (*GetWorkflowTemplateRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_workflow_templates_68e4e5d738482813, []int{14} +} +func (m *GetWorkflowTemplateRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GetWorkflowTemplateRequest.Unmarshal(m, b) +} +func (m *GetWorkflowTemplateRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GetWorkflowTemplateRequest.Marshal(b, m, deterministic) +} +func (dst *GetWorkflowTemplateRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_GetWorkflowTemplateRequest.Merge(dst, src) +} +func (m *GetWorkflowTemplateRequest) XXX_Size() int { + return xxx_messageInfo_GetWorkflowTemplateRequest.Size(m) +} +func (m *GetWorkflowTemplateRequest) XXX_DiscardUnknown() { + xxx_messageInfo_GetWorkflowTemplateRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_GetWorkflowTemplateRequest proto.InternalMessageInfo + +func (m *GetWorkflowTemplateRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *GetWorkflowTemplateRequest) GetVersion() int32 { + if m != nil { + return m.Version + } + return 0 +} + +// A request to instantiate a workflow template. +type InstantiateWorkflowTemplateRequest struct { + // Required. The "resource name" of the workflow template, as described + // in https://cloud.google.com/apis/design/resource_names of the form + // `projects/{project_id}/regions/{region}/workflowTemplates/{template_id}` + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // Optional. The version of workflow template to instantiate. If specified, + // the workflow will be instantiated only if the current version of + // the workflow template has the supplied version. + // + // This option cannot be used to instantiate a previous version of + // workflow template. + Version int32 `protobuf:"varint,2,opt,name=version,proto3" json:"version,omitempty"` + // Optional. A tag that prevents multiple concurrent workflow + // instances with the same tag from running. This mitigates risk of + // concurrent instances started due to retries. + // + // It is recommended to always set this value to a + // [UUID](https://en.wikipedia.org/wiki/Universally_unique_identifier). + // + // The tag must contain only letters (a-z, A-Z), numbers (0-9), + // underscores (_), and hyphens (-). The maximum length is 40 characters. + RequestId string `protobuf:"bytes,5,opt,name=request_id,json=requestId,proto3" json:"request_id,omitempty"` + // Optional. Map from parameter names to values that should be used for those + // parameters. Values may not exceed 100 characters. + Parameters map[string]string `protobuf:"bytes,6,rep,name=parameters,proto3" json:"parameters,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *InstantiateWorkflowTemplateRequest) Reset() { *m = InstantiateWorkflowTemplateRequest{} } +func (m *InstantiateWorkflowTemplateRequest) String() string { return proto.CompactTextString(m) } +func (*InstantiateWorkflowTemplateRequest) ProtoMessage() {} +func (*InstantiateWorkflowTemplateRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_workflow_templates_68e4e5d738482813, []int{15} +} +func (m *InstantiateWorkflowTemplateRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_InstantiateWorkflowTemplateRequest.Unmarshal(m, b) +} +func (m *InstantiateWorkflowTemplateRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_InstantiateWorkflowTemplateRequest.Marshal(b, m, deterministic) +} +func (dst *InstantiateWorkflowTemplateRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_InstantiateWorkflowTemplateRequest.Merge(dst, src) +} +func (m *InstantiateWorkflowTemplateRequest) XXX_Size() int { + return xxx_messageInfo_InstantiateWorkflowTemplateRequest.Size(m) +} +func (m *InstantiateWorkflowTemplateRequest) XXX_DiscardUnknown() { + xxx_messageInfo_InstantiateWorkflowTemplateRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_InstantiateWorkflowTemplateRequest proto.InternalMessageInfo + +func (m *InstantiateWorkflowTemplateRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *InstantiateWorkflowTemplateRequest) GetVersion() int32 { + if m != nil { + return m.Version + } + return 0 +} + +func (m *InstantiateWorkflowTemplateRequest) GetRequestId() string { + if m != nil { + return m.RequestId + } + return "" +} + +func (m *InstantiateWorkflowTemplateRequest) GetParameters() map[string]string { + if m != nil { + return m.Parameters + } + return nil +} + +// A request to instantiate an inline workflow template. +type InstantiateInlineWorkflowTemplateRequest struct { + // Required. The "resource name" of the workflow template region, as described + // in https://cloud.google.com/apis/design/resource_names of the form + // `projects/{project_id}/regions/{region}` + Parent string `protobuf:"bytes,1,opt,name=parent,proto3" json:"parent,omitempty"` + // Required. The workflow template to instantiate. + Template *WorkflowTemplate `protobuf:"bytes,2,opt,name=template,proto3" json:"template,omitempty"` + // Optional. A tag that prevents multiple concurrent workflow + // instances with the same tag from running. This mitigates risk of + // concurrent instances started due to retries. + // + // It is recommended to always set this value to a + // [UUID](https://en.wikipedia.org/wiki/Universally_unique_identifier). + // + // The tag must contain only letters (a-z, A-Z), numbers (0-9), + // underscores (_), and hyphens (-). The maximum length is 40 characters. + RequestId string `protobuf:"bytes,3,opt,name=request_id,json=requestId,proto3" json:"request_id,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *InstantiateInlineWorkflowTemplateRequest) Reset() { + *m = InstantiateInlineWorkflowTemplateRequest{} +} +func (m *InstantiateInlineWorkflowTemplateRequest) String() string { return proto.CompactTextString(m) } +func (*InstantiateInlineWorkflowTemplateRequest) ProtoMessage() {} +func (*InstantiateInlineWorkflowTemplateRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_workflow_templates_68e4e5d738482813, []int{16} +} +func (m *InstantiateInlineWorkflowTemplateRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_InstantiateInlineWorkflowTemplateRequest.Unmarshal(m, b) +} +func (m *InstantiateInlineWorkflowTemplateRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_InstantiateInlineWorkflowTemplateRequest.Marshal(b, m, deterministic) +} +func (dst *InstantiateInlineWorkflowTemplateRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_InstantiateInlineWorkflowTemplateRequest.Merge(dst, src) +} +func (m *InstantiateInlineWorkflowTemplateRequest) XXX_Size() int { + return xxx_messageInfo_InstantiateInlineWorkflowTemplateRequest.Size(m) +} +func (m *InstantiateInlineWorkflowTemplateRequest) XXX_DiscardUnknown() { + xxx_messageInfo_InstantiateInlineWorkflowTemplateRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_InstantiateInlineWorkflowTemplateRequest proto.InternalMessageInfo + +func (m *InstantiateInlineWorkflowTemplateRequest) GetParent() string { + if m != nil { + return m.Parent + } + return "" +} + +func (m *InstantiateInlineWorkflowTemplateRequest) GetTemplate() *WorkflowTemplate { + if m != nil { + return m.Template + } + return nil +} + +func (m *InstantiateInlineWorkflowTemplateRequest) GetRequestId() string { + if m != nil { + return m.RequestId + } + return "" +} + +// A request to update a workflow template. +type UpdateWorkflowTemplateRequest struct { + // Required. The updated workflow template. + // + // The `template.version` field must match the current version. + Template *WorkflowTemplate `protobuf:"bytes,1,opt,name=template,proto3" json:"template,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *UpdateWorkflowTemplateRequest) Reset() { *m = UpdateWorkflowTemplateRequest{} } +func (m *UpdateWorkflowTemplateRequest) String() string { return proto.CompactTextString(m) } +func (*UpdateWorkflowTemplateRequest) ProtoMessage() {} +func (*UpdateWorkflowTemplateRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_workflow_templates_68e4e5d738482813, []int{17} +} +func (m *UpdateWorkflowTemplateRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_UpdateWorkflowTemplateRequest.Unmarshal(m, b) +} +func (m *UpdateWorkflowTemplateRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_UpdateWorkflowTemplateRequest.Marshal(b, m, deterministic) +} +func (dst *UpdateWorkflowTemplateRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_UpdateWorkflowTemplateRequest.Merge(dst, src) +} +func (m *UpdateWorkflowTemplateRequest) XXX_Size() int { + return xxx_messageInfo_UpdateWorkflowTemplateRequest.Size(m) +} +func (m *UpdateWorkflowTemplateRequest) XXX_DiscardUnknown() { + xxx_messageInfo_UpdateWorkflowTemplateRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_UpdateWorkflowTemplateRequest proto.InternalMessageInfo + +func (m *UpdateWorkflowTemplateRequest) GetTemplate() *WorkflowTemplate { + if m != nil { + return m.Template + } + return nil +} + +// A request to list workflow templates in a project. +type ListWorkflowTemplatesRequest struct { + // Required. The "resource name" of the region, as described + // in https://cloud.google.com/apis/design/resource_names of the form + // `projects/{project_id}/regions/{region}` + Parent string `protobuf:"bytes,1,opt,name=parent,proto3" json:"parent,omitempty"` + // Optional. The maximum number of results to return in each response. + PageSize int32 `protobuf:"varint,2,opt,name=page_size,json=pageSize,proto3" json:"page_size,omitempty"` + // Optional. The page token, returned by a previous call, to request the + // next page of results. + PageToken string `protobuf:"bytes,3,opt,name=page_token,json=pageToken,proto3" json:"page_token,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ListWorkflowTemplatesRequest) Reset() { *m = ListWorkflowTemplatesRequest{} } +func (m *ListWorkflowTemplatesRequest) String() string { return proto.CompactTextString(m) } +func (*ListWorkflowTemplatesRequest) ProtoMessage() {} +func (*ListWorkflowTemplatesRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_workflow_templates_68e4e5d738482813, []int{18} +} +func (m *ListWorkflowTemplatesRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ListWorkflowTemplatesRequest.Unmarshal(m, b) +} +func (m *ListWorkflowTemplatesRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ListWorkflowTemplatesRequest.Marshal(b, m, deterministic) +} +func (dst *ListWorkflowTemplatesRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_ListWorkflowTemplatesRequest.Merge(dst, src) +} +func (m *ListWorkflowTemplatesRequest) XXX_Size() int { + return xxx_messageInfo_ListWorkflowTemplatesRequest.Size(m) +} +func (m *ListWorkflowTemplatesRequest) XXX_DiscardUnknown() { + xxx_messageInfo_ListWorkflowTemplatesRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_ListWorkflowTemplatesRequest proto.InternalMessageInfo + +func (m *ListWorkflowTemplatesRequest) GetParent() string { + if m != nil { + return m.Parent + } + return "" +} + +func (m *ListWorkflowTemplatesRequest) GetPageSize() int32 { + if m != nil { + return m.PageSize + } + return 0 +} + +func (m *ListWorkflowTemplatesRequest) GetPageToken() string { + if m != nil { + return m.PageToken + } + return "" +} + +// A response to a request to list workflow templates in a project. +type ListWorkflowTemplatesResponse struct { + // Output only. WorkflowTemplates list. + Templates []*WorkflowTemplate `protobuf:"bytes,1,rep,name=templates,proto3" json:"templates,omitempty"` + // Output only. This token is included in the response if there are more + // results to fetch. To fetch additional results, provide this value as the + // page_token in a subsequent ListWorkflowTemplatesRequest. + NextPageToken string `protobuf:"bytes,2,opt,name=next_page_token,json=nextPageToken,proto3" json:"next_page_token,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ListWorkflowTemplatesResponse) Reset() { *m = ListWorkflowTemplatesResponse{} } +func (m *ListWorkflowTemplatesResponse) String() string { return proto.CompactTextString(m) } +func (*ListWorkflowTemplatesResponse) ProtoMessage() {} +func (*ListWorkflowTemplatesResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_workflow_templates_68e4e5d738482813, []int{19} +} +func (m *ListWorkflowTemplatesResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ListWorkflowTemplatesResponse.Unmarshal(m, b) +} +func (m *ListWorkflowTemplatesResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ListWorkflowTemplatesResponse.Marshal(b, m, deterministic) +} +func (dst *ListWorkflowTemplatesResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_ListWorkflowTemplatesResponse.Merge(dst, src) +} +func (m *ListWorkflowTemplatesResponse) XXX_Size() int { + return xxx_messageInfo_ListWorkflowTemplatesResponse.Size(m) +} +func (m *ListWorkflowTemplatesResponse) XXX_DiscardUnknown() { + xxx_messageInfo_ListWorkflowTemplatesResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_ListWorkflowTemplatesResponse proto.InternalMessageInfo + +func (m *ListWorkflowTemplatesResponse) GetTemplates() []*WorkflowTemplate { + if m != nil { + return m.Templates + } + return nil +} + +func (m *ListWorkflowTemplatesResponse) GetNextPageToken() string { + if m != nil { + return m.NextPageToken + } + return "" +} + +// A request to delete a workflow template. +// +// Currently started workflows will remain running. +type DeleteWorkflowTemplateRequest struct { + // Required. The "resource name" of the workflow template, as described + // in https://cloud.google.com/apis/design/resource_names of the form + // `projects/{project_id}/regions/{region}/workflowTemplates/{template_id}` + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // Optional. The version of workflow template to delete. If specified, + // will only delete the template if the current server version matches + // specified version. + Version int32 `protobuf:"varint,2,opt,name=version,proto3" json:"version,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *DeleteWorkflowTemplateRequest) Reset() { *m = DeleteWorkflowTemplateRequest{} } +func (m *DeleteWorkflowTemplateRequest) String() string { return proto.CompactTextString(m) } +func (*DeleteWorkflowTemplateRequest) ProtoMessage() {} +func (*DeleteWorkflowTemplateRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_workflow_templates_68e4e5d738482813, []int{20} +} +func (m *DeleteWorkflowTemplateRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_DeleteWorkflowTemplateRequest.Unmarshal(m, b) +} +func (m *DeleteWorkflowTemplateRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_DeleteWorkflowTemplateRequest.Marshal(b, m, deterministic) +} +func (dst *DeleteWorkflowTemplateRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_DeleteWorkflowTemplateRequest.Merge(dst, src) +} +func (m *DeleteWorkflowTemplateRequest) XXX_Size() int { + return xxx_messageInfo_DeleteWorkflowTemplateRequest.Size(m) +} +func (m *DeleteWorkflowTemplateRequest) XXX_DiscardUnknown() { + xxx_messageInfo_DeleteWorkflowTemplateRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_DeleteWorkflowTemplateRequest proto.InternalMessageInfo + +func (m *DeleteWorkflowTemplateRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *DeleteWorkflowTemplateRequest) GetVersion() int32 { + if m != nil { + return m.Version + } + return 0 +} + +func init() { + proto.RegisterType((*WorkflowTemplate)(nil), "google.cloud.dataproc.v1.WorkflowTemplate") + proto.RegisterMapType((map[string]string)(nil), "google.cloud.dataproc.v1.WorkflowTemplate.LabelsEntry") + proto.RegisterType((*WorkflowTemplatePlacement)(nil), "google.cloud.dataproc.v1.WorkflowTemplatePlacement") + proto.RegisterType((*ManagedCluster)(nil), "google.cloud.dataproc.v1.ManagedCluster") + proto.RegisterMapType((map[string]string)(nil), "google.cloud.dataproc.v1.ManagedCluster.LabelsEntry") + proto.RegisterType((*ClusterSelector)(nil), "google.cloud.dataproc.v1.ClusterSelector") + proto.RegisterMapType((map[string]string)(nil), "google.cloud.dataproc.v1.ClusterSelector.ClusterLabelsEntry") + proto.RegisterType((*OrderedJob)(nil), "google.cloud.dataproc.v1.OrderedJob") + proto.RegisterMapType((map[string]string)(nil), "google.cloud.dataproc.v1.OrderedJob.LabelsEntry") + proto.RegisterType((*TemplateParameter)(nil), "google.cloud.dataproc.v1.TemplateParameter") + proto.RegisterType((*ParameterValidation)(nil), "google.cloud.dataproc.v1.ParameterValidation") + proto.RegisterType((*RegexValidation)(nil), "google.cloud.dataproc.v1.RegexValidation") + proto.RegisterType((*ValueValidation)(nil), "google.cloud.dataproc.v1.ValueValidation") + proto.RegisterType((*WorkflowMetadata)(nil), "google.cloud.dataproc.v1.WorkflowMetadata") + proto.RegisterMapType((map[string]string)(nil), "google.cloud.dataproc.v1.WorkflowMetadata.ParametersEntry") + proto.RegisterType((*ClusterOperation)(nil), "google.cloud.dataproc.v1.ClusterOperation") + proto.RegisterType((*WorkflowGraph)(nil), "google.cloud.dataproc.v1.WorkflowGraph") + proto.RegisterType((*WorkflowNode)(nil), "google.cloud.dataproc.v1.WorkflowNode") + proto.RegisterType((*CreateWorkflowTemplateRequest)(nil), "google.cloud.dataproc.v1.CreateWorkflowTemplateRequest") + proto.RegisterType((*GetWorkflowTemplateRequest)(nil), "google.cloud.dataproc.v1.GetWorkflowTemplateRequest") + proto.RegisterType((*InstantiateWorkflowTemplateRequest)(nil), "google.cloud.dataproc.v1.InstantiateWorkflowTemplateRequest") + proto.RegisterMapType((map[string]string)(nil), "google.cloud.dataproc.v1.InstantiateWorkflowTemplateRequest.ParametersEntry") + proto.RegisterType((*InstantiateInlineWorkflowTemplateRequest)(nil), "google.cloud.dataproc.v1.InstantiateInlineWorkflowTemplateRequest") + proto.RegisterType((*UpdateWorkflowTemplateRequest)(nil), "google.cloud.dataproc.v1.UpdateWorkflowTemplateRequest") + proto.RegisterType((*ListWorkflowTemplatesRequest)(nil), "google.cloud.dataproc.v1.ListWorkflowTemplatesRequest") + proto.RegisterType((*ListWorkflowTemplatesResponse)(nil), "google.cloud.dataproc.v1.ListWorkflowTemplatesResponse") + proto.RegisterType((*DeleteWorkflowTemplateRequest)(nil), "google.cloud.dataproc.v1.DeleteWorkflowTemplateRequest") + proto.RegisterEnum("google.cloud.dataproc.v1.WorkflowMetadata_State", WorkflowMetadata_State_name, WorkflowMetadata_State_value) + proto.RegisterEnum("google.cloud.dataproc.v1.WorkflowNode_NodeState", WorkflowNode_NodeState_name, WorkflowNode_NodeState_value) +} + +// Reference imports to suppress errors if they are not otherwise used. +var _ context.Context +var _ grpc.ClientConn + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the grpc package it is being compiled against. +const _ = grpc.SupportPackageIsVersion4 + +// WorkflowTemplateServiceClient is the client API for WorkflowTemplateService service. +// +// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://godoc.org/google.golang.org/grpc#ClientConn.NewStream. +type WorkflowTemplateServiceClient interface { + // Creates new workflow template. + CreateWorkflowTemplate(ctx context.Context, in *CreateWorkflowTemplateRequest, opts ...grpc.CallOption) (*WorkflowTemplate, error) + // Retrieves the latest workflow template. + // + // Can retrieve previously instantiated template by specifying optional + // version parameter. + GetWorkflowTemplate(ctx context.Context, in *GetWorkflowTemplateRequest, opts ...grpc.CallOption) (*WorkflowTemplate, error) + // Instantiates a template and begins execution. + // + // The returned Operation can be used to track execution of + // workflow by polling + // [operations.get][google.longrunning.Operations.GetOperation]. + // The Operation will complete when entire workflow is finished. + // + // The running workflow can be aborted via + // [operations.cancel][google.longrunning.Operations.CancelOperation]. + // This will cause any inflight jobs to be cancelled and workflow-owned + // clusters to be deleted. + // + // The [Operation.metadata][google.longrunning.Operation.metadata] will be + // [WorkflowMetadata][google.cloud.dataproc.v1.WorkflowMetadata]. + // + // On successful completion, + // [Operation.response][google.longrunning.Operation.response] will be + // [Empty][google.protobuf.Empty]. + InstantiateWorkflowTemplate(ctx context.Context, in *InstantiateWorkflowTemplateRequest, opts ...grpc.CallOption) (*longrunning.Operation, error) + // Instantiates a template and begins execution. + // + // This method is equivalent to executing the sequence + // [CreateWorkflowTemplate][google.cloud.dataproc.v1.WorkflowTemplateService.CreateWorkflowTemplate], + // [InstantiateWorkflowTemplate][google.cloud.dataproc.v1.WorkflowTemplateService.InstantiateWorkflowTemplate], + // [DeleteWorkflowTemplate][google.cloud.dataproc.v1.WorkflowTemplateService.DeleteWorkflowTemplate]. + // + // The returned Operation can be used to track execution of + // workflow by polling + // [operations.get][google.longrunning.Operations.GetOperation]. + // The Operation will complete when entire workflow is finished. + // + // The running workflow can be aborted via + // [operations.cancel][google.longrunning.Operations.CancelOperation]. + // This will cause any inflight jobs to be cancelled and workflow-owned + // clusters to be deleted. + // + // The [Operation.metadata][google.longrunning.Operation.metadata] will be + // [WorkflowMetadata][google.cloud.dataproc.v1.WorkflowMetadata]. + // + // On successful completion, + // [Operation.response][google.longrunning.Operation.response] will be + // [Empty][google.protobuf.Empty]. + InstantiateInlineWorkflowTemplate(ctx context.Context, in *InstantiateInlineWorkflowTemplateRequest, opts ...grpc.CallOption) (*longrunning.Operation, error) + // Updates (replaces) workflow template. The updated template + // must contain version that matches the current server version. + UpdateWorkflowTemplate(ctx context.Context, in *UpdateWorkflowTemplateRequest, opts ...grpc.CallOption) (*WorkflowTemplate, error) + // Lists workflows that match the specified filter in the request. + ListWorkflowTemplates(ctx context.Context, in *ListWorkflowTemplatesRequest, opts ...grpc.CallOption) (*ListWorkflowTemplatesResponse, error) + // Deletes a workflow template. It does not cancel in-progress workflows. + DeleteWorkflowTemplate(ctx context.Context, in *DeleteWorkflowTemplateRequest, opts ...grpc.CallOption) (*empty.Empty, error) +} + +type workflowTemplateServiceClient struct { + cc *grpc.ClientConn +} + +func NewWorkflowTemplateServiceClient(cc *grpc.ClientConn) WorkflowTemplateServiceClient { + return &workflowTemplateServiceClient{cc} +} + +func (c *workflowTemplateServiceClient) CreateWorkflowTemplate(ctx context.Context, in *CreateWorkflowTemplateRequest, opts ...grpc.CallOption) (*WorkflowTemplate, error) { + out := new(WorkflowTemplate) + err := c.cc.Invoke(ctx, "/google.cloud.dataproc.v1.WorkflowTemplateService/CreateWorkflowTemplate", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *workflowTemplateServiceClient) GetWorkflowTemplate(ctx context.Context, in *GetWorkflowTemplateRequest, opts ...grpc.CallOption) (*WorkflowTemplate, error) { + out := new(WorkflowTemplate) + err := c.cc.Invoke(ctx, "/google.cloud.dataproc.v1.WorkflowTemplateService/GetWorkflowTemplate", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *workflowTemplateServiceClient) InstantiateWorkflowTemplate(ctx context.Context, in *InstantiateWorkflowTemplateRequest, opts ...grpc.CallOption) (*longrunning.Operation, error) { + out := new(longrunning.Operation) + err := c.cc.Invoke(ctx, "/google.cloud.dataproc.v1.WorkflowTemplateService/InstantiateWorkflowTemplate", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *workflowTemplateServiceClient) InstantiateInlineWorkflowTemplate(ctx context.Context, in *InstantiateInlineWorkflowTemplateRequest, opts ...grpc.CallOption) (*longrunning.Operation, error) { + out := new(longrunning.Operation) + err := c.cc.Invoke(ctx, "/google.cloud.dataproc.v1.WorkflowTemplateService/InstantiateInlineWorkflowTemplate", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *workflowTemplateServiceClient) UpdateWorkflowTemplate(ctx context.Context, in *UpdateWorkflowTemplateRequest, opts ...grpc.CallOption) (*WorkflowTemplate, error) { + out := new(WorkflowTemplate) + err := c.cc.Invoke(ctx, "/google.cloud.dataproc.v1.WorkflowTemplateService/UpdateWorkflowTemplate", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *workflowTemplateServiceClient) ListWorkflowTemplates(ctx context.Context, in *ListWorkflowTemplatesRequest, opts ...grpc.CallOption) (*ListWorkflowTemplatesResponse, error) { + out := new(ListWorkflowTemplatesResponse) + err := c.cc.Invoke(ctx, "/google.cloud.dataproc.v1.WorkflowTemplateService/ListWorkflowTemplates", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *workflowTemplateServiceClient) DeleteWorkflowTemplate(ctx context.Context, in *DeleteWorkflowTemplateRequest, opts ...grpc.CallOption) (*empty.Empty, error) { + out := new(empty.Empty) + err := c.cc.Invoke(ctx, "/google.cloud.dataproc.v1.WorkflowTemplateService/DeleteWorkflowTemplate", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +// WorkflowTemplateServiceServer is the server API for WorkflowTemplateService service. +type WorkflowTemplateServiceServer interface { + // Creates new workflow template. + CreateWorkflowTemplate(context.Context, *CreateWorkflowTemplateRequest) (*WorkflowTemplate, error) + // Retrieves the latest workflow template. + // + // Can retrieve previously instantiated template by specifying optional + // version parameter. + GetWorkflowTemplate(context.Context, *GetWorkflowTemplateRequest) (*WorkflowTemplate, error) + // Instantiates a template and begins execution. + // + // The returned Operation can be used to track execution of + // workflow by polling + // [operations.get][google.longrunning.Operations.GetOperation]. + // The Operation will complete when entire workflow is finished. + // + // The running workflow can be aborted via + // [operations.cancel][google.longrunning.Operations.CancelOperation]. + // This will cause any inflight jobs to be cancelled and workflow-owned + // clusters to be deleted. + // + // The [Operation.metadata][google.longrunning.Operation.metadata] will be + // [WorkflowMetadata][google.cloud.dataproc.v1.WorkflowMetadata]. + // + // On successful completion, + // [Operation.response][google.longrunning.Operation.response] will be + // [Empty][google.protobuf.Empty]. + InstantiateWorkflowTemplate(context.Context, *InstantiateWorkflowTemplateRequest) (*longrunning.Operation, error) + // Instantiates a template and begins execution. + // + // This method is equivalent to executing the sequence + // [CreateWorkflowTemplate][google.cloud.dataproc.v1.WorkflowTemplateService.CreateWorkflowTemplate], + // [InstantiateWorkflowTemplate][google.cloud.dataproc.v1.WorkflowTemplateService.InstantiateWorkflowTemplate], + // [DeleteWorkflowTemplate][google.cloud.dataproc.v1.WorkflowTemplateService.DeleteWorkflowTemplate]. + // + // The returned Operation can be used to track execution of + // workflow by polling + // [operations.get][google.longrunning.Operations.GetOperation]. + // The Operation will complete when entire workflow is finished. + // + // The running workflow can be aborted via + // [operations.cancel][google.longrunning.Operations.CancelOperation]. + // This will cause any inflight jobs to be cancelled and workflow-owned + // clusters to be deleted. + // + // The [Operation.metadata][google.longrunning.Operation.metadata] will be + // [WorkflowMetadata][google.cloud.dataproc.v1.WorkflowMetadata]. + // + // On successful completion, + // [Operation.response][google.longrunning.Operation.response] will be + // [Empty][google.protobuf.Empty]. + InstantiateInlineWorkflowTemplate(context.Context, *InstantiateInlineWorkflowTemplateRequest) (*longrunning.Operation, error) + // Updates (replaces) workflow template. The updated template + // must contain version that matches the current server version. + UpdateWorkflowTemplate(context.Context, *UpdateWorkflowTemplateRequest) (*WorkflowTemplate, error) + // Lists workflows that match the specified filter in the request. + ListWorkflowTemplates(context.Context, *ListWorkflowTemplatesRequest) (*ListWorkflowTemplatesResponse, error) + // Deletes a workflow template. It does not cancel in-progress workflows. + DeleteWorkflowTemplate(context.Context, *DeleteWorkflowTemplateRequest) (*empty.Empty, error) +} + +func RegisterWorkflowTemplateServiceServer(s *grpc.Server, srv WorkflowTemplateServiceServer) { + s.RegisterService(&_WorkflowTemplateService_serviceDesc, srv) +} + +func _WorkflowTemplateService_CreateWorkflowTemplate_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(CreateWorkflowTemplateRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(WorkflowTemplateServiceServer).CreateWorkflowTemplate(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.dataproc.v1.WorkflowTemplateService/CreateWorkflowTemplate", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(WorkflowTemplateServiceServer).CreateWorkflowTemplate(ctx, req.(*CreateWorkflowTemplateRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _WorkflowTemplateService_GetWorkflowTemplate_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(GetWorkflowTemplateRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(WorkflowTemplateServiceServer).GetWorkflowTemplate(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.dataproc.v1.WorkflowTemplateService/GetWorkflowTemplate", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(WorkflowTemplateServiceServer).GetWorkflowTemplate(ctx, req.(*GetWorkflowTemplateRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _WorkflowTemplateService_InstantiateWorkflowTemplate_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(InstantiateWorkflowTemplateRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(WorkflowTemplateServiceServer).InstantiateWorkflowTemplate(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.dataproc.v1.WorkflowTemplateService/InstantiateWorkflowTemplate", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(WorkflowTemplateServiceServer).InstantiateWorkflowTemplate(ctx, req.(*InstantiateWorkflowTemplateRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _WorkflowTemplateService_InstantiateInlineWorkflowTemplate_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(InstantiateInlineWorkflowTemplateRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(WorkflowTemplateServiceServer).InstantiateInlineWorkflowTemplate(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.dataproc.v1.WorkflowTemplateService/InstantiateInlineWorkflowTemplate", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(WorkflowTemplateServiceServer).InstantiateInlineWorkflowTemplate(ctx, req.(*InstantiateInlineWorkflowTemplateRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _WorkflowTemplateService_UpdateWorkflowTemplate_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(UpdateWorkflowTemplateRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(WorkflowTemplateServiceServer).UpdateWorkflowTemplate(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.dataproc.v1.WorkflowTemplateService/UpdateWorkflowTemplate", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(WorkflowTemplateServiceServer).UpdateWorkflowTemplate(ctx, req.(*UpdateWorkflowTemplateRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _WorkflowTemplateService_ListWorkflowTemplates_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(ListWorkflowTemplatesRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(WorkflowTemplateServiceServer).ListWorkflowTemplates(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.dataproc.v1.WorkflowTemplateService/ListWorkflowTemplates", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(WorkflowTemplateServiceServer).ListWorkflowTemplates(ctx, req.(*ListWorkflowTemplatesRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _WorkflowTemplateService_DeleteWorkflowTemplate_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(DeleteWorkflowTemplateRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(WorkflowTemplateServiceServer).DeleteWorkflowTemplate(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.dataproc.v1.WorkflowTemplateService/DeleteWorkflowTemplate", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(WorkflowTemplateServiceServer).DeleteWorkflowTemplate(ctx, req.(*DeleteWorkflowTemplateRequest)) + } + return interceptor(ctx, in, info, handler) +} + +var _WorkflowTemplateService_serviceDesc = grpc.ServiceDesc{ + ServiceName: "google.cloud.dataproc.v1.WorkflowTemplateService", + HandlerType: (*WorkflowTemplateServiceServer)(nil), + Methods: []grpc.MethodDesc{ + { + MethodName: "CreateWorkflowTemplate", + Handler: _WorkflowTemplateService_CreateWorkflowTemplate_Handler, + }, + { + MethodName: "GetWorkflowTemplate", + Handler: _WorkflowTemplateService_GetWorkflowTemplate_Handler, + }, + { + MethodName: "InstantiateWorkflowTemplate", + Handler: _WorkflowTemplateService_InstantiateWorkflowTemplate_Handler, + }, + { + MethodName: "InstantiateInlineWorkflowTemplate", + Handler: _WorkflowTemplateService_InstantiateInlineWorkflowTemplate_Handler, + }, + { + MethodName: "UpdateWorkflowTemplate", + Handler: _WorkflowTemplateService_UpdateWorkflowTemplate_Handler, + }, + { + MethodName: "ListWorkflowTemplates", + Handler: _WorkflowTemplateService_ListWorkflowTemplates_Handler, + }, + { + MethodName: "DeleteWorkflowTemplate", + Handler: _WorkflowTemplateService_DeleteWorkflowTemplate_Handler, + }, + }, + Streams: []grpc.StreamDesc{}, + Metadata: "google/cloud/dataproc/v1/workflow_templates.proto", +} + +func init() { + proto.RegisterFile("google/cloud/dataproc/v1/workflow_templates.proto", fileDescriptor_workflow_templates_68e4e5d738482813) +} + +var fileDescriptor_workflow_templates_68e4e5d738482813 = []byte{ + // 2000 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xc4, 0x59, 0xcd, 0x6f, 0xdb, 0xc8, + 0x15, 0x37, 0x65, 0x4b, 0x96, 0x9e, 0x62, 0x5b, 0x3b, 0x69, 0xbc, 0xaa, 0x12, 0xa3, 0x0e, 0xb7, + 0xdd, 0x78, 0xbd, 0xa8, 0xd4, 0x38, 0x4d, 0x37, 0xeb, 0x24, 0x9b, 0xb5, 0x2d, 0xd9, 0x56, 0x62, + 0xcb, 0x0e, 0x65, 0x67, 0x01, 0x5f, 0x04, 0x4a, 0x9c, 0xd0, 0x74, 0x28, 0x0e, 0x43, 0x52, 0xda, + 0x75, 0x8a, 0x45, 0x81, 0xdc, 0x16, 0xe8, 0xa1, 0xc5, 0xf6, 0x16, 0xa0, 0xc7, 0x02, 0x3d, 0x14, + 0xed, 0xa9, 0xa7, 0xfe, 0x09, 0x05, 0x7a, 0x28, 0x7a, 0x2e, 0x50, 0xf4, 0xda, 0x3f, 0xa1, 0x45, + 0x31, 0x1f, 0xa4, 0xa8, 0x0f, 0x52, 0x96, 0x5d, 0xa0, 0x17, 0x83, 0x33, 0x7a, 0xbf, 0x37, 0xef, + 0x6b, 0x7e, 0xef, 0x91, 0x86, 0xbb, 0x3a, 0x21, 0xba, 0x89, 0x4b, 0x2d, 0x93, 0x74, 0xb4, 0x92, + 0xa6, 0x7a, 0xaa, 0xed, 0x90, 0x56, 0xa9, 0x7b, 0xb7, 0xf4, 0x25, 0x71, 0x5e, 0xbd, 0x34, 0xc9, + 0x97, 0x0d, 0x0f, 0xb7, 0x6d, 0x53, 0xf5, 0xb0, 0x5b, 0xb4, 0x1d, 0xe2, 0x11, 0x94, 0xe7, 0x90, + 0x22, 0x83, 0x14, 0x7d, 0x48, 0xb1, 0x7b, 0xb7, 0x70, 0x4b, 0x28, 0x53, 0x6d, 0xa3, 0xa4, 0x5a, + 0x16, 0xf1, 0x54, 0xcf, 0x20, 0x96, 0xc0, 0x15, 0xee, 0x44, 0x1e, 0xd5, 0x32, 0x3b, 0xae, 0x87, + 0x1d, 0x5f, 0xf0, 0x83, 0x48, 0xc1, 0x33, 0xd2, 0x1c, 0x14, 0x32, 0x89, 0xa5, 0x3b, 0x1d, 0xcb, + 0x32, 0x2c, 0xbd, 0x44, 0x6c, 0xec, 0xf4, 0x1d, 0x79, 0x53, 0x08, 0xb1, 0x55, 0xb3, 0xf3, 0xb2, + 0x84, 0xdb, 0xb6, 0x77, 0x2e, 0x7e, 0xfc, 0xde, 0xe0, 0x8f, 0x9e, 0xd1, 0xc6, 0xae, 0xa7, 0xb6, + 0x6d, 0x2e, 0x20, 0xff, 0x7e, 0x06, 0x72, 0x5f, 0x88, 0x28, 0x1c, 0x89, 0x20, 0xa0, 0x79, 0x48, + 0x18, 0x5a, 0x3e, 0xb1, 0x2c, 0xad, 0x64, 0x94, 0x84, 0xa1, 0x21, 0x04, 0x33, 0x96, 0xda, 0xc6, + 0x79, 0x89, 0xed, 0xb0, 0x67, 0x94, 0x87, 0xd9, 0x2e, 0x76, 0x5c, 0x83, 0x58, 0xf9, 0xe9, 0x65, + 0x69, 0x25, 0xa9, 0xf8, 0x4b, 0xf4, 0x10, 0xb2, 0x2d, 0x07, 0xab, 0x1e, 0x6e, 0xd0, 0xc3, 0xf2, + 0x33, 0xcb, 0xd2, 0x4a, 0x76, 0xad, 0x50, 0x14, 0x11, 0xf5, 0x2d, 0x29, 0x1e, 0xf9, 0x96, 0x28, + 0xc0, 0xc5, 0xe9, 0x06, 0x05, 0x77, 0x6c, 0x2d, 0x00, 0x27, 0xc7, 0x83, 0xb9, 0x38, 0x03, 0xd7, + 0x20, 0x65, 0xaa, 0x4d, 0x6c, 0xba, 0xf9, 0xd4, 0xf2, 0xf4, 0x4a, 0x76, 0xed, 0x27, 0xc5, 0xa8, + 0x34, 0x16, 0x07, 0x7d, 0x2e, 0xee, 0x31, 0x60, 0xc5, 0xf2, 0x9c, 0x73, 0x45, 0x68, 0x41, 0xcf, + 0x21, 0x63, 0x9b, 0x6a, 0x0b, 0xb7, 0xb1, 0xe5, 0xe5, 0x67, 0x99, 0x29, 0xf7, 0x2e, 0xae, 0xf2, + 0xd0, 0x87, 0x2a, 0x3d, 0x2d, 0xe8, 0x01, 0xcc, 0xd0, 0x04, 0xe7, 0xd3, 0xcc, 0xc0, 0xef, 0x47, + 0x6b, 0x3b, 0x70, 0x34, 0xec, 0x60, 0xed, 0x29, 0x69, 0x2a, 0x0c, 0x81, 0x9e, 0x01, 0xd8, 0xaa, + 0xa3, 0xb6, 0x31, 0xad, 0xa2, 0x7c, 0x86, 0xe1, 0x3f, 0x8e, 0xc6, 0x07, 0x56, 0xf8, 0x18, 0x25, + 0x04, 0x2f, 0x7c, 0x0a, 0xd9, 0x90, 0xc3, 0x28, 0x07, 0xd3, 0xaf, 0xf0, 0xb9, 0xc8, 0x2f, 0x7d, + 0x44, 0xdf, 0x81, 0x64, 0x57, 0x35, 0x3b, 0x58, 0x54, 0x01, 0x5f, 0xac, 0x27, 0x1e, 0x48, 0xf2, + 0xdf, 0x24, 0xf8, 0x6e, 0xa4, 0xab, 0xa8, 0x0e, 0x0b, 0x6d, 0xd5, 0x52, 0x75, 0xac, 0x35, 0x44, + 0xc5, 0x33, 0xad, 0xd9, 0xb5, 0x95, 0x68, 0x53, 0xf7, 0x39, 0x60, 0x8b, 0xcb, 0xef, 0x4e, 0x29, + 0xf3, 0xed, 0xbe, 0x1d, 0xf4, 0x02, 0x72, 0x42, 0x59, 0xc3, 0xc5, 0x26, 0x6e, 0x79, 0xc4, 0x61, + 0x76, 0x65, 0xd7, 0x3e, 0x8a, 0xd6, 0x2a, 0xc0, 0x75, 0x01, 0xd8, 0x9d, 0x52, 0x16, 0x5a, 0xfd, + 0x5b, 0x9b, 0xd9, 0x50, 0x7e, 0xe5, 0x7f, 0x4b, 0x30, 0xdf, 0x6f, 0x09, 0xba, 0x0d, 0xd7, 0xfc, + 0x73, 0x59, 0xfd, 0xf3, 0x58, 0x64, 0xc5, 0x5e, 0x8d, 0x5e, 0x83, 0x27, 0x90, 0x6a, 0x11, 0xeb, + 0xa5, 0xa1, 0xb3, 0x5b, 0x90, 0x5d, 0xbb, 0x33, 0xd6, 0xa0, 0x2d, 0x26, 0xae, 0x08, 0x18, 0xda, + 0x0b, 0x6a, 0x76, 0x86, 0xa5, 0xf4, 0xc7, 0x17, 0x8d, 0xd3, 0xa8, 0x8a, 0xbd, 0x4a, 0x5e, 0xff, + 0x2c, 0xc1, 0xc2, 0x40, 0xcc, 0xe8, 0xc5, 0x7f, 0x43, 0xac, 0xe0, 0xe2, 0xd3, 0x67, 0xd4, 0x82, + 0x79, 0x3f, 0x28, 0xc2, 0xf0, 0x04, 0x33, 0xfc, 0xd1, 0x85, 0x53, 0xe1, 0xaf, 0xc3, 0x0e, 0xcc, + 0xb5, 0xc2, 0x7b, 0x85, 0xcf, 0x01, 0x0d, 0x0b, 0x4d, 0xe4, 0xce, 0x5f, 0x92, 0x00, 0xbd, 0x3b, + 0x84, 0xde, 0x87, 0x59, 0xd7, 0xc3, 0x76, 0xc3, 0xd0, 0x04, 0x3c, 0x45, 0x97, 0x55, 0x0d, 0x95, + 0x01, 0x4e, 0x55, 0x8d, 0x10, 0xbb, 0x71, 0x46, 0x9a, 0xa2, 0xaa, 0x3e, 0x88, 0x76, 0x65, 0x97, + 0xc9, 0x3e, 0x25, 0xcd, 0xdd, 0x29, 0x25, 0x73, 0xea, 0x2f, 0xd0, 0x06, 0x64, 0x5c, 0x5b, 0x75, + 0x5e, 0x31, 0x25, 0xbc, 0x12, 0xe4, 0x68, 0x25, 0x75, 0x2a, 0xca, 0x75, 0xa4, 0x5d, 0xf1, 0x8c, + 0x76, 0x20, 0x6b, 0x9f, 0xf7, 0x94, 0x70, 0xda, 0x8c, 0x21, 0x88, 0xc3, 0xf3, 0x90, 0x1a, 0x10, + 0x50, 0xaa, 0xe8, 0x33, 0x48, 0x9f, 0x1a, 0x5d, 0xcc, 0xb4, 0x70, 0xfe, 0xbc, 0x1d, 0xe3, 0x8f, + 0xd1, 0xc5, 0x5c, 0xc5, 0xec, 0x29, 0x7f, 0x44, 0x0f, 0x61, 0xd6, 0x36, 0x74, 0x06, 0x4f, 0x31, + 0xf8, 0x72, 0x8c, 0x11, 0x86, 0xce, 0xd1, 0x29, 0x9b, 0x3d, 0xa1, 0x67, 0x30, 0xc7, 0x7d, 0x70, + 0x5f, 0x9b, 0x4c, 0x05, 0xa7, 0xcd, 0x1f, 0x8c, 0x09, 0x46, 0xfd, 0xb5, 0xc9, 0xf5, 0x64, 0xdd, + 0xde, 0x12, 0xed, 0x06, 0x77, 0x83, 0xd3, 0xe5, 0x8f, 0x2e, 0x42, 0x97, 0x23, 0x99, 0x7c, 0x07, + 0xc0, 0x6d, 0x9d, 0x62, 0xad, 0x63, 0x1a, 0x96, 0x9e, 0xcf, 0x8c, 0xbb, 0xaa, 0x4f, 0x49, 0xb3, + 0x1e, 0x88, 0x2b, 0x21, 0x28, 0x5a, 0x83, 0x1b, 0xb6, 0x83, 0x1d, 0xfc, 0xba, 0x63, 0xb8, 0x86, + 0x87, 0x1b, 0xa2, 0xa8, 0xdc, 0x3c, 0x2c, 0x4f, 0xaf, 0x64, 0x94, 0xeb, 0xe1, 0x1f, 0xeb, 0xac, + 0xc2, 0xae, 0x72, 0x29, 0x37, 0x01, 0xd2, 0x67, 0xa4, 0xd9, 0xf0, 0xce, 0x6d, 0x2c, 0xff, 0x41, + 0x82, 0xf7, 0x86, 0x58, 0x7d, 0x64, 0x6f, 0x5e, 0x84, 0xd4, 0x4b, 0x03, 0x9b, 0x1a, 0xbf, 0x9a, + 0x19, 0x45, 0xac, 0xd0, 0x32, 0x64, 0x35, 0xec, 0xb6, 0x1c, 0xc3, 0xf6, 0xfc, 0xbe, 0x9d, 0x51, + 0xc2, 0x5b, 0x68, 0x1f, 0xa0, 0xab, 0x9a, 0x86, 0xc6, 0x26, 0x0c, 0x51, 0x83, 0x3f, 0x8c, 0x49, + 0xbf, 0x6f, 0xc6, 0x8b, 0x00, 0xa4, 0x84, 0x14, 0x50, 0x93, 0xaf, 0x8f, 0x90, 0x41, 0x1b, 0x90, + 0x74, 0xb0, 0x8e, 0xbf, 0x12, 0xbd, 0x21, 0x86, 0xc5, 0x15, 0x2a, 0xd6, 0x43, 0xee, 0x4e, 0x29, + 0x1c, 0x89, 0xb6, 0x20, 0xc5, 0xc2, 0xe4, 0x8e, 0xef, 0x04, 0x2f, 0xa8, 0x5c, 0x9f, 0x0e, 0x01, + 0xdd, 0x7c, 0x0f, 0x16, 0x7a, 0xd6, 0xf2, 0x28, 0x7f, 0x0c, 0x0b, 0x03, 0x67, 0xd2, 0x51, 0x87, + 0x9d, 0x89, 0xdd, 0xbc, 0xc4, 0xe2, 0xe9, 0x2f, 0xe5, 0x8f, 0x60, 0x61, 0x40, 0x39, 0x8d, 0xbd, + 0xb0, 0x8b, 0xcb, 0x8a, 0x95, 0xfc, 0x36, 0xd5, 0x1b, 0xb4, 0xf6, 0xb1, 0xa7, 0x52, 0x03, 0x51, + 0x01, 0xd2, 0xfe, 0xe4, 0x29, 0x12, 0x18, 0xac, 0xc3, 0x03, 0x56, 0xa2, 0x7f, 0xc0, 0x7a, 0x0e, + 0xf3, 0x62, 0xc0, 0xf2, 0x5b, 0x2c, 0x67, 0x9c, 0xd5, 0xb1, 0x0c, 0x7c, 0xe0, 0x0f, 0x8f, 0xca, + 0x1c, 0xd7, 0xe0, 0x77, 0xba, 0xc7, 0x90, 0xd4, 0x1d, 0xd5, 0x3e, 0x15, 0x29, 0xbf, 0x33, 0x7e, + 0xca, 0xd9, 0xa1, 0xe2, 0x0a, 0x47, 0x51, 0x8b, 0x34, 0x6c, 0xe2, 0x90, 0x45, 0xc9, 0xc9, 0x2d, + 0xe2, 0x1a, 0x7c, 0x8b, 0xb6, 0x21, 0xe9, 0x7a, 0x34, 0x2e, 0x94, 0x83, 0xe6, 0xe3, 0xae, 0xfe, + 0x60, 0x54, 0x8b, 0x75, 0x8a, 0x53, 0x38, 0x7c, 0xa8, 0x87, 0xcf, 0x0e, 0xf7, 0xf0, 0x93, 0xbe, + 0xc9, 0x8a, 0x53, 0xcd, 0xfa, 0x04, 0xe7, 0x05, 0x15, 0x2e, 0x48, 0x27, 0xa4, 0x0d, 0x7d, 0x0a, + 0xe0, 0x7a, 0xaa, 0xe3, 0xf1, 0x71, 0x36, 0x33, 0x76, 0x9c, 0xcd, 0x30, 0x69, 0x36, 0xcd, 0xde, + 0x87, 0x34, 0xb6, 0x34, 0x0e, 0x84, 0xb1, 0xc0, 0x59, 0x6c, 0x69, 0x0c, 0x16, 0x72, 0xb8, 0xd3, + 0x31, 0xb4, 0x7c, 0xb6, 0xcf, 0xe1, 0xe3, 0x8e, 0xa1, 0x15, 0x1e, 0xc3, 0xc2, 0x80, 0xcd, 0x13, + 0xb5, 0xd6, 0x07, 0x90, 0x64, 0x21, 0x46, 0x59, 0x98, 0x3d, 0xae, 0x3d, 0xab, 0x1d, 0x7c, 0x51, + 0xcb, 0x4d, 0xd1, 0xc5, 0x61, 0xa5, 0x56, 0xae, 0xd6, 0x76, 0x72, 0x12, 0x5d, 0x28, 0xc7, 0xb5, + 0x1a, 0x5d, 0x24, 0x50, 0x1a, 0x66, 0xca, 0x07, 0xb5, 0x4a, 0x6e, 0x5a, 0x6e, 0x40, 0x6e, 0x30, + 0xef, 0xd4, 0xde, 0xe0, 0x9d, 0xa6, 0xd7, 0x9e, 0xb3, 0xc1, 0x5e, 0x55, 0xa3, 0xa6, 0x60, 0xc7, + 0x11, 0x43, 0x5f, 0x46, 0xe1, 0x0b, 0xca, 0x7c, 0x1a, 0x1d, 0x4e, 0x68, 0xf1, 0xa7, 0x15, 0xf6, + 0x2c, 0xef, 0xc3, 0x5c, 0x5f, 0x81, 0xa2, 0x47, 0x90, 0xb4, 0x88, 0x26, 0x6e, 0x63, 0x76, 0xed, + 0xc3, 0xf1, 0x69, 0xad, 0x11, 0x0d, 0x2b, 0x1c, 0x24, 0xff, 0x29, 0x01, 0xd7, 0xc2, 0xfb, 0xd1, + 0x63, 0x44, 0x64, 0x5f, 0x48, 0x44, 0xf6, 0x05, 0x74, 0x03, 0x52, 0x94, 0xdc, 0x0d, 0x4d, 0x30, + 0x71, 0xf2, 0x8c, 0x34, 0xab, 0x5a, 0xaf, 0xf2, 0x93, 0x17, 0xad, 0x7c, 0x6a, 0x5a, 0x91, 0xfe, + 0xe9, 0xab, 0xfc, 0x20, 0x6a, 0xa9, 0x50, 0xd4, 0xe4, 0x33, 0xc8, 0x04, 0x92, 0xa8, 0x00, 0x8b, + 0xb5, 0x83, 0x72, 0xa5, 0x51, 0x3f, 0xda, 0x38, 0xaa, 0x34, 0x8e, 0x6b, 0xf5, 0xc3, 0xca, 0x56, + 0x75, 0xbb, 0x5a, 0x29, 0xf3, 0x7c, 0x6e, 0xee, 0x1d, 0x6c, 0x3d, 0xab, 0x94, 0x73, 0x12, 0xba, + 0x06, 0x69, 0x9a, 0xcf, 0x8d, 0xcd, 0xbd, 0x4a, 0x2e, 0x11, 0xce, 0xee, 0x34, 0x9a, 0x83, 0xcc, + 0xd6, 0xc1, 0xfe, 0xe1, 0x5e, 0xe5, 0xa8, 0x52, 0xce, 0xcd, 0x20, 0x80, 0xd4, 0xf6, 0x46, 0x75, + 0xaf, 0x52, 0xce, 0x25, 0xe5, 0x9f, 0xc1, 0xd2, 0x16, 0xa3, 0x99, 0xc1, 0xf7, 0x05, 0x05, 0xbf, + 0xee, 0x60, 0xd7, 0xa3, 0x64, 0x69, 0xab, 0x0e, 0x7d, 0xbb, 0x12, 0xd1, 0xe4, 0x2b, 0xb4, 0x1d, + 0xe2, 0xc5, 0xc4, 0x38, 0x26, 0x19, 0x52, 0x1e, 0x60, 0xe5, 0xa7, 0x50, 0xd8, 0xc1, 0x5e, 0xd4, + 0xe9, 0x63, 0x5e, 0x6b, 0xfb, 0x59, 0x57, 0xfe, 0x75, 0x02, 0xe4, 0xaa, 0xe5, 0x7a, 0xaa, 0xe5, + 0x19, 0x31, 0x2e, 0x4d, 0xa4, 0x14, 0x2d, 0x01, 0x38, 0x1c, 0x48, 0xcb, 0x20, 0xc9, 0x30, 0x19, + 0xb1, 0x53, 0xd5, 0x90, 0xd9, 0xc7, 0x4c, 0xfc, 0xa5, 0x76, 0x2f, 0x3a, 0x12, 0xe3, 0xcd, 0x8b, + 0xe3, 0xaa, 0xab, 0xd2, 0xc2, 0x6f, 0x25, 0x58, 0x09, 0x59, 0x50, 0xb5, 0x4c, 0xc3, 0xfa, 0x7f, + 0x65, 0x7e, 0x20, 0xb0, 0xd3, 0x03, 0x81, 0x95, 0x75, 0x58, 0x3a, 0x66, 0xdf, 0x0d, 0xa2, 0xec, + 0xdb, 0x1e, 0xe8, 0xcc, 0x97, 0xad, 0x40, 0x07, 0x6e, 0xed, 0x19, 0xee, 0x50, 0x09, 0xba, 0xe3, + 0xe2, 0x70, 0x13, 0x32, 0xb6, 0xaa, 0xe3, 0x86, 0x6b, 0xbc, 0xc1, 0xa2, 0x68, 0xd2, 0x74, 0xa3, + 0x6e, 0xbc, 0x61, 0xce, 0xb1, 0x1f, 0x3d, 0xf2, 0x0a, 0xfb, 0x63, 0x1c, 0x13, 0x3f, 0xa2, 0x1b, + 0xf2, 0x2f, 0x25, 0x58, 0x8a, 0x38, 0xd4, 0xb5, 0x89, 0xe5, 0x62, 0xb4, 0x0b, 0x99, 0xe0, 0x8b, + 0x97, 0x60, 0xc6, 0x49, 0xdc, 0xeb, 0x81, 0xd1, 0x87, 0xb0, 0x60, 0xe1, 0xaf, 0xbc, 0x46, 0xc8, + 0x1e, 0x5e, 0x18, 0x73, 0x74, 0xfb, 0x30, 0xb0, 0x69, 0x1f, 0x96, 0xca, 0xac, 0xbf, 0xff, 0x4f, + 0xee, 0xcd, 0xda, 0x3f, 0xe6, 0xe1, 0xfd, 0x41, 0x4d, 0x75, 0xec, 0x74, 0x8d, 0x16, 0x46, 0xdf, + 0x24, 0x60, 0x71, 0x34, 0xed, 0xa0, 0x4f, 0x62, 0xe6, 0x91, 0x38, 0xa2, 0x2a, 0x4c, 0x10, 0x1d, + 0xf9, 0x1b, 0xe9, 0xed, 0x5f, 0xff, 0xf9, 0x6d, 0xe2, 0xad, 0x24, 0xdf, 0x2f, 0x75, 0xef, 0x96, + 0x7e, 0xca, 0x33, 0xfa, 0xd8, 0x76, 0xc8, 0x19, 0x6e, 0x79, 0x6e, 0x69, 0xb5, 0x64, 0x92, 0x16, + 0xff, 0x84, 0x57, 0x5a, 0xfd, 0x3a, 0xf8, 0x06, 0x19, 0x64, 0x69, 0x3d, 0xa8, 0x9e, 0x93, 0x27, + 0xf2, 0xbd, 0x08, 0x15, 0x0e, 0xd6, 0x2f, 0xa0, 0x00, 0xfd, 0x4b, 0x82, 0xeb, 0x23, 0x18, 0x10, + 0xc5, 0x7c, 0x65, 0x88, 0x26, 0xcc, 0x89, 0xa2, 0xe0, 0xb1, 0x20, 0x58, 0x88, 0xc7, 0x80, 0xa6, + 0x33, 0x22, 0x02, 0xc3, 0xf6, 0x97, 0x56, 0xbf, 0x3e, 0xb9, 0x8f, 0xee, 0x8d, 0x04, 0x06, 0x7e, + 0x8f, 0x84, 0xa1, 0x5f, 0x24, 0xe0, 0x66, 0x0c, 0x0b, 0xa2, 0x47, 0x57, 0x21, 0xcf, 0xc2, 0x92, + 0x8f, 0x0e, 0x7d, 0x90, 0x2d, 0x06, 0x93, 0x8c, 0xfc, 0x2d, 0x4f, 0xfc, 0xcf, 0x25, 0x79, 0xe3, + 0x52, 0x4e, 0xaf, 0x1b, 0x3d, 0x13, 0xd6, 0xa5, 0xd5, 0x93, 0xb2, 0xfc, 0xe4, 0x12, 0x31, 0x18, + 0xd0, 0x82, 0xfe, 0x98, 0x80, 0xdb, 0x63, 0x69, 0x19, 0x6d, 0x5e, 0x28, 0x30, 0xb1, 0x9c, 0x3e, + 0x2e, 0x3c, 0xbf, 0xe3, 0xe1, 0xf9, 0x8d, 0x24, 0xef, 0x5c, 0xee, 0x5e, 0x18, 0x83, 0x86, 0x84, + 0x6e, 0xca, 0x73, 0xb9, 0x72, 0x99, 0x9b, 0x12, 0xa3, 0x12, 0xbd, 0x4b, 0xc0, 0xe2, 0xe8, 0x26, + 0x11, 0xc7, 0x23, 0xb1, 0x6d, 0x65, 0xa2, 0x1b, 0xf4, 0x8e, 0xc7, 0xeb, 0x57, 0x52, 0xe1, 0x33, + 0xe6, 0x9a, 0x6f, 0x58, 0x71, 0xe2, 0xba, 0xea, 0x85, 0x69, 0xb7, 0xf0, 0x28, 0x5e, 0xd7, 0x98, + 0xda, 0xea, 0x45, 0xe7, 0x3f, 0x12, 0xdc, 0x18, 0xd9, 0x64, 0x50, 0xcc, 0x57, 0xf7, 0xb8, 0x56, + 0x58, 0xf8, 0x64, 0x62, 0x1c, 0xef, 0x66, 0x03, 0x54, 0x33, 0x69, 0x59, 0x05, 0x54, 0x33, 0x59, + 0xe9, 0xa0, 0xbf, 0x4b, 0xb0, 0x38, 0xba, 0xa5, 0xc5, 0x95, 0x47, 0x6c, 0x13, 0x2c, 0x2c, 0x0e, + 0xbd, 0xe0, 0x55, 0xda, 0xb6, 0x77, 0xee, 0x7b, 0xb8, 0x7a, 0x59, 0x32, 0x5d, 0xbd, 0x0c, 0x99, + 0x6e, 0xbe, 0x81, 0x5b, 0x2d, 0xd2, 0x8e, 0xf4, 0x65, 0x73, 0x71, 0x28, 0x25, 0x87, 0xd4, 0xec, + 0x43, 0xe9, 0xe4, 0x73, 0x81, 0xd1, 0x89, 0xa9, 0x5a, 0x7a, 0x91, 0x38, 0x7a, 0x49, 0xc7, 0x16, + 0x73, 0xaa, 0xc4, 0x7f, 0x52, 0x6d, 0xc3, 0x1d, 0xfe, 0xe7, 0xd7, 0x43, 0xff, 0xb9, 0x99, 0x62, + 0xc2, 0xf7, 0xfe, 0x1b, 0x00, 0x00, 0xff, 0xff, 0x70, 0xa3, 0x03, 0xbc, 0xbc, 0x1b, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/cloud/dataproc/v1beta2/clusters.pb.go b/vendor/google.golang.org/genproto/googleapis/cloud/dataproc/v1beta2/clusters.pb.go new file mode 100644 index 0000000..ca14551 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/cloud/dataproc/v1beta2/clusters.pb.go @@ -0,0 +1,2358 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/cloud/dataproc/v1beta2/clusters.proto + +package dataproc // import "google.golang.org/genproto/googleapis/cloud/dataproc/v1beta2" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import duration "github.com/golang/protobuf/ptypes/duration" +import timestamp "github.com/golang/protobuf/ptypes/timestamp" +import _ "google.golang.org/genproto/googleapis/api/annotations" +import longrunning "google.golang.org/genproto/googleapis/longrunning" +import field_mask "google.golang.org/genproto/protobuf/field_mask" + +import ( + context "golang.org/x/net/context" + grpc "google.golang.org/grpc" +) + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// The cluster state. +type ClusterStatus_State int32 + +const ( + // The cluster state is unknown. + ClusterStatus_UNKNOWN ClusterStatus_State = 0 + // The cluster is being created and set up. It is not ready for use. + ClusterStatus_CREATING ClusterStatus_State = 1 + // The cluster is currently running and healthy. It is ready for use. + ClusterStatus_RUNNING ClusterStatus_State = 2 + // The cluster encountered an error. It is not ready for use. + ClusterStatus_ERROR ClusterStatus_State = 3 + // The cluster is being deleted. It cannot be used. + ClusterStatus_DELETING ClusterStatus_State = 4 + // The cluster is being updated. It continues to accept and process jobs. + ClusterStatus_UPDATING ClusterStatus_State = 5 +) + +var ClusterStatus_State_name = map[int32]string{ + 0: "UNKNOWN", + 1: "CREATING", + 2: "RUNNING", + 3: "ERROR", + 4: "DELETING", + 5: "UPDATING", +} +var ClusterStatus_State_value = map[string]int32{ + "UNKNOWN": 0, + "CREATING": 1, + "RUNNING": 2, + "ERROR": 3, + "DELETING": 4, + "UPDATING": 5, +} + +func (x ClusterStatus_State) String() string { + return proto.EnumName(ClusterStatus_State_name, int32(x)) +} +func (ClusterStatus_State) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_clusters_794cf83e9cf994b6, []int{10, 0} +} + +// The cluster substate. +type ClusterStatus_Substate int32 + +const ( + // The cluster substate is unknown. + ClusterStatus_UNSPECIFIED ClusterStatus_Substate = 0 + // The cluster is known to be in an unhealthy state + // (for example, critical daemons are not running or HDFS capacity is + // exhausted). + // + // Applies to RUNNING state. + ClusterStatus_UNHEALTHY ClusterStatus_Substate = 1 + // The agent-reported status is out of date (may occur if + // Cloud Dataproc loses communication with Agent). + // + // Applies to RUNNING state. + ClusterStatus_STALE_STATUS ClusterStatus_Substate = 2 +) + +var ClusterStatus_Substate_name = map[int32]string{ + 0: "UNSPECIFIED", + 1: "UNHEALTHY", + 2: "STALE_STATUS", +} +var ClusterStatus_Substate_value = map[string]int32{ + "UNSPECIFIED": 0, + "UNHEALTHY": 1, + "STALE_STATUS": 2, +} + +func (x ClusterStatus_Substate) String() string { + return proto.EnumName(ClusterStatus_Substate_name, int32(x)) +} +func (ClusterStatus_Substate) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_clusters_794cf83e9cf994b6, []int{10, 1} +} + +// Describes the identifying information, config, and status of +// a cluster of Compute Engine instances. +type Cluster struct { + // Required. The Google Cloud Platform project ID that the cluster belongs to. + ProjectId string `protobuf:"bytes,1,opt,name=project_id,json=projectId,proto3" json:"project_id,omitempty"` + // Required. The cluster name. Cluster names within a project must be + // unique. Names of deleted clusters can be reused. + ClusterName string `protobuf:"bytes,2,opt,name=cluster_name,json=clusterName,proto3" json:"cluster_name,omitempty"` + // Required. The cluster config. Note that Cloud Dataproc may set + // default values, and values may change when clusters are updated. + Config *ClusterConfig `protobuf:"bytes,3,opt,name=config,proto3" json:"config,omitempty"` + // Optional. The labels to associate with this cluster. + // Label **keys** must contain 1 to 63 characters, and must conform to + // [RFC 1035](https://www.ietf.org/rfc/rfc1035.txt). + // Label **values** may be empty, but, if present, must contain 1 to 63 + // characters, and must conform to [RFC + // 1035](https://www.ietf.org/rfc/rfc1035.txt). No more than 32 labels can be + // associated with a cluster. + Labels map[string]string `protobuf:"bytes,8,rep,name=labels,proto3" json:"labels,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` + // Output only. Cluster status. + Status *ClusterStatus `protobuf:"bytes,4,opt,name=status,proto3" json:"status,omitempty"` + // Output only. The previous cluster status. + StatusHistory []*ClusterStatus `protobuf:"bytes,7,rep,name=status_history,json=statusHistory,proto3" json:"status_history,omitempty"` + // Output only. A cluster UUID (Unique Universal Identifier). Cloud Dataproc + // generates this value when it creates the cluster. + ClusterUuid string `protobuf:"bytes,6,opt,name=cluster_uuid,json=clusterUuid,proto3" json:"cluster_uuid,omitempty"` + // Output only. Contains cluster daemon metrics such as HDFS and YARN stats. + // + // **Beta Feature**: This report is available for testing purposes only. It + // may be changed before final release. + Metrics *ClusterMetrics `protobuf:"bytes,9,opt,name=metrics,proto3" json:"metrics,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Cluster) Reset() { *m = Cluster{} } +func (m *Cluster) String() string { return proto.CompactTextString(m) } +func (*Cluster) ProtoMessage() {} +func (*Cluster) Descriptor() ([]byte, []int) { + return fileDescriptor_clusters_794cf83e9cf994b6, []int{0} +} +func (m *Cluster) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Cluster.Unmarshal(m, b) +} +func (m *Cluster) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Cluster.Marshal(b, m, deterministic) +} +func (dst *Cluster) XXX_Merge(src proto.Message) { + xxx_messageInfo_Cluster.Merge(dst, src) +} +func (m *Cluster) XXX_Size() int { + return xxx_messageInfo_Cluster.Size(m) +} +func (m *Cluster) XXX_DiscardUnknown() { + xxx_messageInfo_Cluster.DiscardUnknown(m) +} + +var xxx_messageInfo_Cluster proto.InternalMessageInfo + +func (m *Cluster) GetProjectId() string { + if m != nil { + return m.ProjectId + } + return "" +} + +func (m *Cluster) GetClusterName() string { + if m != nil { + return m.ClusterName + } + return "" +} + +func (m *Cluster) GetConfig() *ClusterConfig { + if m != nil { + return m.Config + } + return nil +} + +func (m *Cluster) GetLabels() map[string]string { + if m != nil { + return m.Labels + } + return nil +} + +func (m *Cluster) GetStatus() *ClusterStatus { + if m != nil { + return m.Status + } + return nil +} + +func (m *Cluster) GetStatusHistory() []*ClusterStatus { + if m != nil { + return m.StatusHistory + } + return nil +} + +func (m *Cluster) GetClusterUuid() string { + if m != nil { + return m.ClusterUuid + } + return "" +} + +func (m *Cluster) GetMetrics() *ClusterMetrics { + if m != nil { + return m.Metrics + } + return nil +} + +// The cluster config. +type ClusterConfig struct { + // Optional. A Cloud Storage staging bucket used for sharing generated + // SSH keys and config. If you do not specify a staging bucket, Cloud + // Dataproc will determine an appropriate Cloud Storage location (US, + // ASIA, or EU) for your cluster's staging bucket according to the Google + // Compute Engine zone where your cluster is deployed, and then it will create + // and manage this project-level, per-location bucket for you. + ConfigBucket string `protobuf:"bytes,1,opt,name=config_bucket,json=configBucket,proto3" json:"config_bucket,omitempty"` + // Required. The shared Compute Engine config settings for + // all instances in a cluster. + GceClusterConfig *GceClusterConfig `protobuf:"bytes,8,opt,name=gce_cluster_config,json=gceClusterConfig,proto3" json:"gce_cluster_config,omitempty"` + // Optional. The Compute Engine config settings for + // the master instance in a cluster. + MasterConfig *InstanceGroupConfig `protobuf:"bytes,9,opt,name=master_config,json=masterConfig,proto3" json:"master_config,omitempty"` + // Optional. The Compute Engine config settings for + // worker instances in a cluster. + WorkerConfig *InstanceGroupConfig `protobuf:"bytes,10,opt,name=worker_config,json=workerConfig,proto3" json:"worker_config,omitempty"` + // Optional. The Compute Engine config settings for + // additional worker instances in a cluster. + SecondaryWorkerConfig *InstanceGroupConfig `protobuf:"bytes,12,opt,name=secondary_worker_config,json=secondaryWorkerConfig,proto3" json:"secondary_worker_config,omitempty"` + // Optional. The config settings for software inside the cluster. + SoftwareConfig *SoftwareConfig `protobuf:"bytes,13,opt,name=software_config,json=softwareConfig,proto3" json:"software_config,omitempty"` + // Optional. The config setting for auto delete cluster schedule. + LifecycleConfig *LifecycleConfig `protobuf:"bytes,14,opt,name=lifecycle_config,json=lifecycleConfig,proto3" json:"lifecycle_config,omitempty"` + // Optional. Commands to execute on each node after config is + // completed. By default, executables are run on master and all worker nodes. + // You can test a node's role metadata to run an executable on + // a master or worker node, as shown below using `curl` (you can also use + // `wget`): + // + // ROLE=$(curl -H Metadata-Flavor:Google + // http://metadata/computeMetadata/v1beta2/instance/attributes/dataproc-role) + // if [[ "${ROLE}" == 'Master' ]]; then + // ... master specific actions ... + // else + // ... worker specific actions ... + // fi + InitializationActions []*NodeInitializationAction `protobuf:"bytes,11,rep,name=initialization_actions,json=initializationActions,proto3" json:"initialization_actions,omitempty"` + // Optional. Encryption settings for the cluster. + EncryptionConfig *EncryptionConfig `protobuf:"bytes,15,opt,name=encryption_config,json=encryptionConfig,proto3" json:"encryption_config,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ClusterConfig) Reset() { *m = ClusterConfig{} } +func (m *ClusterConfig) String() string { return proto.CompactTextString(m) } +func (*ClusterConfig) ProtoMessage() {} +func (*ClusterConfig) Descriptor() ([]byte, []int) { + return fileDescriptor_clusters_794cf83e9cf994b6, []int{1} +} +func (m *ClusterConfig) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ClusterConfig.Unmarshal(m, b) +} +func (m *ClusterConfig) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ClusterConfig.Marshal(b, m, deterministic) +} +func (dst *ClusterConfig) XXX_Merge(src proto.Message) { + xxx_messageInfo_ClusterConfig.Merge(dst, src) +} +func (m *ClusterConfig) XXX_Size() int { + return xxx_messageInfo_ClusterConfig.Size(m) +} +func (m *ClusterConfig) XXX_DiscardUnknown() { + xxx_messageInfo_ClusterConfig.DiscardUnknown(m) +} + +var xxx_messageInfo_ClusterConfig proto.InternalMessageInfo + +func (m *ClusterConfig) GetConfigBucket() string { + if m != nil { + return m.ConfigBucket + } + return "" +} + +func (m *ClusterConfig) GetGceClusterConfig() *GceClusterConfig { + if m != nil { + return m.GceClusterConfig + } + return nil +} + +func (m *ClusterConfig) GetMasterConfig() *InstanceGroupConfig { + if m != nil { + return m.MasterConfig + } + return nil +} + +func (m *ClusterConfig) GetWorkerConfig() *InstanceGroupConfig { + if m != nil { + return m.WorkerConfig + } + return nil +} + +func (m *ClusterConfig) GetSecondaryWorkerConfig() *InstanceGroupConfig { + if m != nil { + return m.SecondaryWorkerConfig + } + return nil +} + +func (m *ClusterConfig) GetSoftwareConfig() *SoftwareConfig { + if m != nil { + return m.SoftwareConfig + } + return nil +} + +func (m *ClusterConfig) GetLifecycleConfig() *LifecycleConfig { + if m != nil { + return m.LifecycleConfig + } + return nil +} + +func (m *ClusterConfig) GetInitializationActions() []*NodeInitializationAction { + if m != nil { + return m.InitializationActions + } + return nil +} + +func (m *ClusterConfig) GetEncryptionConfig() *EncryptionConfig { + if m != nil { + return m.EncryptionConfig + } + return nil +} + +// Encryption settings for the cluster. +type EncryptionConfig struct { + // Optional. The Cloud KMS key name to use for PD disk encryption for all + // instances in the cluster. + GcePdKmsKeyName string `protobuf:"bytes,1,opt,name=gce_pd_kms_key_name,json=gcePdKmsKeyName,proto3" json:"gce_pd_kms_key_name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *EncryptionConfig) Reset() { *m = EncryptionConfig{} } +func (m *EncryptionConfig) String() string { return proto.CompactTextString(m) } +func (*EncryptionConfig) ProtoMessage() {} +func (*EncryptionConfig) Descriptor() ([]byte, []int) { + return fileDescriptor_clusters_794cf83e9cf994b6, []int{2} +} +func (m *EncryptionConfig) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_EncryptionConfig.Unmarshal(m, b) +} +func (m *EncryptionConfig) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_EncryptionConfig.Marshal(b, m, deterministic) +} +func (dst *EncryptionConfig) XXX_Merge(src proto.Message) { + xxx_messageInfo_EncryptionConfig.Merge(dst, src) +} +func (m *EncryptionConfig) XXX_Size() int { + return xxx_messageInfo_EncryptionConfig.Size(m) +} +func (m *EncryptionConfig) XXX_DiscardUnknown() { + xxx_messageInfo_EncryptionConfig.DiscardUnknown(m) +} + +var xxx_messageInfo_EncryptionConfig proto.InternalMessageInfo + +func (m *EncryptionConfig) GetGcePdKmsKeyName() string { + if m != nil { + return m.GcePdKmsKeyName + } + return "" +} + +// Common config settings for resources of Compute Engine cluster +// instances, applicable to all instances in the cluster. +type GceClusterConfig struct { + // Optional. The zone where the Compute Engine cluster will be located. + // On a create request, it is required in the "global" region. If omitted + // in a non-global Cloud Dataproc region, the service will pick a zone in the + // corresponding Compute Engine region. On a get request, zone will always be + // present. + // + // A full URL, partial URI, or short name are valid. Examples: + // + // * `https://www.googleapis.com/compute/v1/projects/[project_id]/zones/[zone]` + // * `projects/[project_id]/zones/[zone]` + // * `us-central1-f` + ZoneUri string `protobuf:"bytes,1,opt,name=zone_uri,json=zoneUri,proto3" json:"zone_uri,omitempty"` + // Optional. The Compute Engine network to be used for machine + // communications. Cannot be specified with subnetwork_uri. If neither + // `network_uri` nor `subnetwork_uri` is specified, the "default" network of + // the project is used, if it exists. Cannot be a "Custom Subnet Network" (see + // [Using Subnetworks](/compute/docs/subnetworks) for more information). + // + // A full URL, partial URI, or short name are valid. Examples: + // + // * `https://www.googleapis.com/compute/v1/projects/[project_id]/regions/global/default` + // * `projects/[project_id]/regions/global/default` + // * `default` + NetworkUri string `protobuf:"bytes,2,opt,name=network_uri,json=networkUri,proto3" json:"network_uri,omitempty"` + // Optional. The Compute Engine subnetwork to be used for machine + // communications. Cannot be specified with network_uri. + // + // A full URL, partial URI, or short name are valid. Examples: + // + // * `https://www.googleapis.com/compute/v1/projects/[project_id]/regions/us-east1/sub0` + // * `projects/[project_id]/regions/us-east1/sub0` + // * `sub0` + SubnetworkUri string `protobuf:"bytes,6,opt,name=subnetwork_uri,json=subnetworkUri,proto3" json:"subnetwork_uri,omitempty"` + // Optional. If true, all instances in the cluster will only have internal IP + // addresses. By default, clusters are not restricted to internal IP + // addresses, and will have ephemeral external IP addresses assigned to each + // instance. This `internal_ip_only` restriction can only be enabled for + // subnetwork enabled networks, and all off-cluster dependencies must be + // configured to be accessible without external IP addresses. + InternalIpOnly bool `protobuf:"varint,7,opt,name=internal_ip_only,json=internalIpOnly,proto3" json:"internal_ip_only,omitempty"` + // Optional. The service account of the instances. Defaults to the default + // Compute Engine service account. Custom service accounts need + // permissions equivalent to the following IAM roles: + // + // * roles/logging.logWriter + // * roles/storage.objectAdmin + // + // (see + // https://cloud.google.com/compute/docs/access/service-accounts#custom_service_accounts + // for more information). + // Example: `[account_id]@[project_id].iam.gserviceaccount.com` + ServiceAccount string `protobuf:"bytes,8,opt,name=service_account,json=serviceAccount,proto3" json:"service_account,omitempty"` + // Optional. The URIs of service account scopes to be included in + // Compute Engine instances. The following base set of scopes is always + // included: + // + // * https://www.googleapis.com/auth/cloud.useraccounts.readonly + // * https://www.googleapis.com/auth/devstorage.read_write + // * https://www.googleapis.com/auth/logging.write + // + // If no scopes are specified, the following defaults are also provided: + // + // * https://www.googleapis.com/auth/bigquery + // * https://www.googleapis.com/auth/bigtable.admin.table + // * https://www.googleapis.com/auth/bigtable.data + // * https://www.googleapis.com/auth/devstorage.full_control + ServiceAccountScopes []string `protobuf:"bytes,3,rep,name=service_account_scopes,json=serviceAccountScopes,proto3" json:"service_account_scopes,omitempty"` + // The Compute Engine tags to add to all instances (see + // [Tagging instances](/compute/docs/label-or-tag-resources#tags)). + Tags []string `protobuf:"bytes,4,rep,name=tags,proto3" json:"tags,omitempty"` + // The Compute Engine metadata entries to add to all instances (see + // [Project and instance + // metadata](https://cloud.google.com/compute/docs/storing-retrieving-metadata#project_and_instance_metadata)). + Metadata map[string]string `protobuf:"bytes,5,rep,name=metadata,proto3" json:"metadata,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GceClusterConfig) Reset() { *m = GceClusterConfig{} } +func (m *GceClusterConfig) String() string { return proto.CompactTextString(m) } +func (*GceClusterConfig) ProtoMessage() {} +func (*GceClusterConfig) Descriptor() ([]byte, []int) { + return fileDescriptor_clusters_794cf83e9cf994b6, []int{3} +} +func (m *GceClusterConfig) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GceClusterConfig.Unmarshal(m, b) +} +func (m *GceClusterConfig) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GceClusterConfig.Marshal(b, m, deterministic) +} +func (dst *GceClusterConfig) XXX_Merge(src proto.Message) { + xxx_messageInfo_GceClusterConfig.Merge(dst, src) +} +func (m *GceClusterConfig) XXX_Size() int { + return xxx_messageInfo_GceClusterConfig.Size(m) +} +func (m *GceClusterConfig) XXX_DiscardUnknown() { + xxx_messageInfo_GceClusterConfig.DiscardUnknown(m) +} + +var xxx_messageInfo_GceClusterConfig proto.InternalMessageInfo + +func (m *GceClusterConfig) GetZoneUri() string { + if m != nil { + return m.ZoneUri + } + return "" +} + +func (m *GceClusterConfig) GetNetworkUri() string { + if m != nil { + return m.NetworkUri + } + return "" +} + +func (m *GceClusterConfig) GetSubnetworkUri() string { + if m != nil { + return m.SubnetworkUri + } + return "" +} + +func (m *GceClusterConfig) GetInternalIpOnly() bool { + if m != nil { + return m.InternalIpOnly + } + return false +} + +func (m *GceClusterConfig) GetServiceAccount() string { + if m != nil { + return m.ServiceAccount + } + return "" +} + +func (m *GceClusterConfig) GetServiceAccountScopes() []string { + if m != nil { + return m.ServiceAccountScopes + } + return nil +} + +func (m *GceClusterConfig) GetTags() []string { + if m != nil { + return m.Tags + } + return nil +} + +func (m *GceClusterConfig) GetMetadata() map[string]string { + if m != nil { + return m.Metadata + } + return nil +} + +// Optional. The config settings for Compute Engine resources in +// an instance group, such as a master or worker group. +type InstanceGroupConfig struct { + // Optional. The number of VM instances in the instance group. + // For master instance groups, must be set to 1. + NumInstances int32 `protobuf:"varint,1,opt,name=num_instances,json=numInstances,proto3" json:"num_instances,omitempty"` + // Output only. The list of instance names. Cloud Dataproc derives the names + // from `cluster_name`, `num_instances`, and the instance group. + InstanceNames []string `protobuf:"bytes,2,rep,name=instance_names,json=instanceNames,proto3" json:"instance_names,omitempty"` + // Optional. The Compute Engine image resource used for cluster + // instances. It can be specified or may be inferred from + // `SoftwareConfig.image_version`. + ImageUri string `protobuf:"bytes,3,opt,name=image_uri,json=imageUri,proto3" json:"image_uri,omitempty"` + // Optional. The Compute Engine machine type used for cluster instances. + // + // A full URL, partial URI, or short name are valid. Examples: + // + // * `https://www.googleapis.com/compute/v1/projects/[project_id]/zones/us-east1-a/machineTypes/n1-standard-2` + // * `projects/[project_id]/zones/us-east1-a/machineTypes/n1-standard-2` + // * `n1-standard-2` + // + // **Auto Zone Exception**: If you are using the Cloud Dataproc + // [Auto Zone + // Placement](/dataproc/docs/concepts/configuring-clusters/auto-zone#using_auto_zone_placement) + // feature, you must use the short name of the machine type + // resource, for example, `n1-standard-2`. + MachineTypeUri string `protobuf:"bytes,4,opt,name=machine_type_uri,json=machineTypeUri,proto3" json:"machine_type_uri,omitempty"` + // Optional. Disk option config settings. + DiskConfig *DiskConfig `protobuf:"bytes,5,opt,name=disk_config,json=diskConfig,proto3" json:"disk_config,omitempty"` + // Optional. Specifies that this instance group contains preemptible + // instances. + IsPreemptible bool `protobuf:"varint,6,opt,name=is_preemptible,json=isPreemptible,proto3" json:"is_preemptible,omitempty"` + // Output only. The config for Compute Engine Instance Group + // Manager that manages this group. + // This is only used for preemptible instance groups. + ManagedGroupConfig *ManagedGroupConfig `protobuf:"bytes,7,opt,name=managed_group_config,json=managedGroupConfig,proto3" json:"managed_group_config,omitempty"` + // Optional. The Compute Engine accelerator configuration for these + // instances. + // + // **Beta Feature**: This feature is still under development. It may be + // changed before final release. + Accelerators []*AcceleratorConfig `protobuf:"bytes,8,rep,name=accelerators,proto3" json:"accelerators,omitempty"` + // Optional. Specifies the minimum cpu platform for the Instance Group. + // See [Cloud Dataproc→Minimum CPU Platform] + // (/dataproc/docs/concepts/compute/dataproc-min-cpu). + MinCpuPlatform string `protobuf:"bytes,9,opt,name=min_cpu_platform,json=minCpuPlatform,proto3" json:"min_cpu_platform,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *InstanceGroupConfig) Reset() { *m = InstanceGroupConfig{} } +func (m *InstanceGroupConfig) String() string { return proto.CompactTextString(m) } +func (*InstanceGroupConfig) ProtoMessage() {} +func (*InstanceGroupConfig) Descriptor() ([]byte, []int) { + return fileDescriptor_clusters_794cf83e9cf994b6, []int{4} +} +func (m *InstanceGroupConfig) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_InstanceGroupConfig.Unmarshal(m, b) +} +func (m *InstanceGroupConfig) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_InstanceGroupConfig.Marshal(b, m, deterministic) +} +func (dst *InstanceGroupConfig) XXX_Merge(src proto.Message) { + xxx_messageInfo_InstanceGroupConfig.Merge(dst, src) +} +func (m *InstanceGroupConfig) XXX_Size() int { + return xxx_messageInfo_InstanceGroupConfig.Size(m) +} +func (m *InstanceGroupConfig) XXX_DiscardUnknown() { + xxx_messageInfo_InstanceGroupConfig.DiscardUnknown(m) +} + +var xxx_messageInfo_InstanceGroupConfig proto.InternalMessageInfo + +func (m *InstanceGroupConfig) GetNumInstances() int32 { + if m != nil { + return m.NumInstances + } + return 0 +} + +func (m *InstanceGroupConfig) GetInstanceNames() []string { + if m != nil { + return m.InstanceNames + } + return nil +} + +func (m *InstanceGroupConfig) GetImageUri() string { + if m != nil { + return m.ImageUri + } + return "" +} + +func (m *InstanceGroupConfig) GetMachineTypeUri() string { + if m != nil { + return m.MachineTypeUri + } + return "" +} + +func (m *InstanceGroupConfig) GetDiskConfig() *DiskConfig { + if m != nil { + return m.DiskConfig + } + return nil +} + +func (m *InstanceGroupConfig) GetIsPreemptible() bool { + if m != nil { + return m.IsPreemptible + } + return false +} + +func (m *InstanceGroupConfig) GetManagedGroupConfig() *ManagedGroupConfig { + if m != nil { + return m.ManagedGroupConfig + } + return nil +} + +func (m *InstanceGroupConfig) GetAccelerators() []*AcceleratorConfig { + if m != nil { + return m.Accelerators + } + return nil +} + +func (m *InstanceGroupConfig) GetMinCpuPlatform() string { + if m != nil { + return m.MinCpuPlatform + } + return "" +} + +// Specifies the resources used to actively manage an instance group. +type ManagedGroupConfig struct { + // Output only. The name of the Instance Template used for the Managed + // Instance Group. + InstanceTemplateName string `protobuf:"bytes,1,opt,name=instance_template_name,json=instanceTemplateName,proto3" json:"instance_template_name,omitempty"` + // Output only. The name of the Instance Group Manager for this group. + InstanceGroupManagerName string `protobuf:"bytes,2,opt,name=instance_group_manager_name,json=instanceGroupManagerName,proto3" json:"instance_group_manager_name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ManagedGroupConfig) Reset() { *m = ManagedGroupConfig{} } +func (m *ManagedGroupConfig) String() string { return proto.CompactTextString(m) } +func (*ManagedGroupConfig) ProtoMessage() {} +func (*ManagedGroupConfig) Descriptor() ([]byte, []int) { + return fileDescriptor_clusters_794cf83e9cf994b6, []int{5} +} +func (m *ManagedGroupConfig) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ManagedGroupConfig.Unmarshal(m, b) +} +func (m *ManagedGroupConfig) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ManagedGroupConfig.Marshal(b, m, deterministic) +} +func (dst *ManagedGroupConfig) XXX_Merge(src proto.Message) { + xxx_messageInfo_ManagedGroupConfig.Merge(dst, src) +} +func (m *ManagedGroupConfig) XXX_Size() int { + return xxx_messageInfo_ManagedGroupConfig.Size(m) +} +func (m *ManagedGroupConfig) XXX_DiscardUnknown() { + xxx_messageInfo_ManagedGroupConfig.DiscardUnknown(m) +} + +var xxx_messageInfo_ManagedGroupConfig proto.InternalMessageInfo + +func (m *ManagedGroupConfig) GetInstanceTemplateName() string { + if m != nil { + return m.InstanceTemplateName + } + return "" +} + +func (m *ManagedGroupConfig) GetInstanceGroupManagerName() string { + if m != nil { + return m.InstanceGroupManagerName + } + return "" +} + +// Specifies the type and number of accelerator cards attached to the instances +// of an instance group (see [GPUs on Compute Engine](/compute/docs/gpus/)). +type AcceleratorConfig struct { + // Full URL, partial URI, or short name of the accelerator type resource to + // expose to this instance. See [Compute Engine AcceleratorTypes]( + // /compute/docs/reference/beta/acceleratorTypes) + // + // Examples + // * `https://www.googleapis.com/compute/beta/projects/[project_id]/zones/us-east1-a/acceleratorTypes/nvidia-tesla-k80` + // * `projects/[project_id]/zones/us-east1-a/acceleratorTypes/nvidia-tesla-k80` + // * `nvidia-tesla-k80` + // + // **Auto Zone Exception**: If you are using the Cloud Dataproc + // [Auto Zone + // Placement](/dataproc/docs/concepts/configuring-clusters/auto-zone#using_auto_zone_placement) + // feature, you must use the short name of the accelerator type + // resource, for example, `nvidia-tesla-k80`. + AcceleratorTypeUri string `protobuf:"bytes,1,opt,name=accelerator_type_uri,json=acceleratorTypeUri,proto3" json:"accelerator_type_uri,omitempty"` + // The number of the accelerator cards of this type exposed to this instance. + AcceleratorCount int32 `protobuf:"varint,2,opt,name=accelerator_count,json=acceleratorCount,proto3" json:"accelerator_count,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *AcceleratorConfig) Reset() { *m = AcceleratorConfig{} } +func (m *AcceleratorConfig) String() string { return proto.CompactTextString(m) } +func (*AcceleratorConfig) ProtoMessage() {} +func (*AcceleratorConfig) Descriptor() ([]byte, []int) { + return fileDescriptor_clusters_794cf83e9cf994b6, []int{6} +} +func (m *AcceleratorConfig) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_AcceleratorConfig.Unmarshal(m, b) +} +func (m *AcceleratorConfig) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_AcceleratorConfig.Marshal(b, m, deterministic) +} +func (dst *AcceleratorConfig) XXX_Merge(src proto.Message) { + xxx_messageInfo_AcceleratorConfig.Merge(dst, src) +} +func (m *AcceleratorConfig) XXX_Size() int { + return xxx_messageInfo_AcceleratorConfig.Size(m) +} +func (m *AcceleratorConfig) XXX_DiscardUnknown() { + xxx_messageInfo_AcceleratorConfig.DiscardUnknown(m) +} + +var xxx_messageInfo_AcceleratorConfig proto.InternalMessageInfo + +func (m *AcceleratorConfig) GetAcceleratorTypeUri() string { + if m != nil { + return m.AcceleratorTypeUri + } + return "" +} + +func (m *AcceleratorConfig) GetAcceleratorCount() int32 { + if m != nil { + return m.AcceleratorCount + } + return 0 +} + +// Specifies the config of disk options for a group of VM instances. +type DiskConfig struct { + // Optional. Type of the boot disk (default is "pd-standard"). + // Valid values: "pd-ssd" (Persistent Disk Solid State Drive) or + // "pd-standard" (Persistent Disk Hard Disk Drive). + BootDiskType string `protobuf:"bytes,3,opt,name=boot_disk_type,json=bootDiskType,proto3" json:"boot_disk_type,omitempty"` + // Optional. Size in GB of the boot disk (default is 500GB). + BootDiskSizeGb int32 `protobuf:"varint,1,opt,name=boot_disk_size_gb,json=bootDiskSizeGb,proto3" json:"boot_disk_size_gb,omitempty"` + // Optional. Number of attached SSDs, from 0 to 4 (default is 0). + // If SSDs are not attached, the boot disk is used to store runtime logs and + // [HDFS](https://hadoop.apache.org/docs/r1.2.1/hdfs_user_guide.html) data. + // If one or more SSDs are attached, this runtime bulk + // data is spread across them, and the boot disk contains only basic + // config and installed binaries. + NumLocalSsds int32 `protobuf:"varint,2,opt,name=num_local_ssds,json=numLocalSsds,proto3" json:"num_local_ssds,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *DiskConfig) Reset() { *m = DiskConfig{} } +func (m *DiskConfig) String() string { return proto.CompactTextString(m) } +func (*DiskConfig) ProtoMessage() {} +func (*DiskConfig) Descriptor() ([]byte, []int) { + return fileDescriptor_clusters_794cf83e9cf994b6, []int{7} +} +func (m *DiskConfig) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_DiskConfig.Unmarshal(m, b) +} +func (m *DiskConfig) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_DiskConfig.Marshal(b, m, deterministic) +} +func (dst *DiskConfig) XXX_Merge(src proto.Message) { + xxx_messageInfo_DiskConfig.Merge(dst, src) +} +func (m *DiskConfig) XXX_Size() int { + return xxx_messageInfo_DiskConfig.Size(m) +} +func (m *DiskConfig) XXX_DiscardUnknown() { + xxx_messageInfo_DiskConfig.DiscardUnknown(m) +} + +var xxx_messageInfo_DiskConfig proto.InternalMessageInfo + +func (m *DiskConfig) GetBootDiskType() string { + if m != nil { + return m.BootDiskType + } + return "" +} + +func (m *DiskConfig) GetBootDiskSizeGb() int32 { + if m != nil { + return m.BootDiskSizeGb + } + return 0 +} + +func (m *DiskConfig) GetNumLocalSsds() int32 { + if m != nil { + return m.NumLocalSsds + } + return 0 +} + +// Specifies the cluster auto-delete schedule configuration. +type LifecycleConfig struct { + // Optional. The duration to keep the cluster alive while idling. + // Passing this threshold will cause the cluster to be + // deleted. Valid range: **[10m, 14d]**. + // + // Example: **"10m"**, the minimum value, to delete the + // cluster when it has had no jobs running for 10 minutes. + IdleDeleteTtl *duration.Duration `protobuf:"bytes,1,opt,name=idle_delete_ttl,json=idleDeleteTtl,proto3" json:"idle_delete_ttl,omitempty"` + // Optional. Either the exact time the cluster should be deleted at or + // the cluster maximum age. + // + // Types that are valid to be assigned to Ttl: + // *LifecycleConfig_AutoDeleteTime + // *LifecycleConfig_AutoDeleteTtl + Ttl isLifecycleConfig_Ttl `protobuf_oneof:"ttl"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *LifecycleConfig) Reset() { *m = LifecycleConfig{} } +func (m *LifecycleConfig) String() string { return proto.CompactTextString(m) } +func (*LifecycleConfig) ProtoMessage() {} +func (*LifecycleConfig) Descriptor() ([]byte, []int) { + return fileDescriptor_clusters_794cf83e9cf994b6, []int{8} +} +func (m *LifecycleConfig) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_LifecycleConfig.Unmarshal(m, b) +} +func (m *LifecycleConfig) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_LifecycleConfig.Marshal(b, m, deterministic) +} +func (dst *LifecycleConfig) XXX_Merge(src proto.Message) { + xxx_messageInfo_LifecycleConfig.Merge(dst, src) +} +func (m *LifecycleConfig) XXX_Size() int { + return xxx_messageInfo_LifecycleConfig.Size(m) +} +func (m *LifecycleConfig) XXX_DiscardUnknown() { + xxx_messageInfo_LifecycleConfig.DiscardUnknown(m) +} + +var xxx_messageInfo_LifecycleConfig proto.InternalMessageInfo + +func (m *LifecycleConfig) GetIdleDeleteTtl() *duration.Duration { + if m != nil { + return m.IdleDeleteTtl + } + return nil +} + +type isLifecycleConfig_Ttl interface { + isLifecycleConfig_Ttl() +} + +type LifecycleConfig_AutoDeleteTime struct { + AutoDeleteTime *timestamp.Timestamp `protobuf:"bytes,2,opt,name=auto_delete_time,json=autoDeleteTime,proto3,oneof"` +} + +type LifecycleConfig_AutoDeleteTtl struct { + AutoDeleteTtl *duration.Duration `protobuf:"bytes,3,opt,name=auto_delete_ttl,json=autoDeleteTtl,proto3,oneof"` +} + +func (*LifecycleConfig_AutoDeleteTime) isLifecycleConfig_Ttl() {} + +func (*LifecycleConfig_AutoDeleteTtl) isLifecycleConfig_Ttl() {} + +func (m *LifecycleConfig) GetTtl() isLifecycleConfig_Ttl { + if m != nil { + return m.Ttl + } + return nil +} + +func (m *LifecycleConfig) GetAutoDeleteTime() *timestamp.Timestamp { + if x, ok := m.GetTtl().(*LifecycleConfig_AutoDeleteTime); ok { + return x.AutoDeleteTime + } + return nil +} + +func (m *LifecycleConfig) GetAutoDeleteTtl() *duration.Duration { + if x, ok := m.GetTtl().(*LifecycleConfig_AutoDeleteTtl); ok { + return x.AutoDeleteTtl + } + return nil +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*LifecycleConfig) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _LifecycleConfig_OneofMarshaler, _LifecycleConfig_OneofUnmarshaler, _LifecycleConfig_OneofSizer, []interface{}{ + (*LifecycleConfig_AutoDeleteTime)(nil), + (*LifecycleConfig_AutoDeleteTtl)(nil), + } +} + +func _LifecycleConfig_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*LifecycleConfig) + // ttl + switch x := m.Ttl.(type) { + case *LifecycleConfig_AutoDeleteTime: + b.EncodeVarint(2<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.AutoDeleteTime); err != nil { + return err + } + case *LifecycleConfig_AutoDeleteTtl: + b.EncodeVarint(3<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.AutoDeleteTtl); err != nil { + return err + } + case nil: + default: + return fmt.Errorf("LifecycleConfig.Ttl has unexpected type %T", x) + } + return nil +} + +func _LifecycleConfig_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*LifecycleConfig) + switch tag { + case 2: // ttl.auto_delete_time + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(timestamp.Timestamp) + err := b.DecodeMessage(msg) + m.Ttl = &LifecycleConfig_AutoDeleteTime{msg} + return true, err + case 3: // ttl.auto_delete_ttl + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(duration.Duration) + err := b.DecodeMessage(msg) + m.Ttl = &LifecycleConfig_AutoDeleteTtl{msg} + return true, err + default: + return false, nil + } +} + +func _LifecycleConfig_OneofSizer(msg proto.Message) (n int) { + m := msg.(*LifecycleConfig) + // ttl + switch x := m.Ttl.(type) { + case *LifecycleConfig_AutoDeleteTime: + s := proto.Size(x.AutoDeleteTime) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *LifecycleConfig_AutoDeleteTtl: + s := proto.Size(x.AutoDeleteTtl) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +// Specifies an executable to run on a fully configured node and a +// timeout period for executable completion. +type NodeInitializationAction struct { + // Required. Cloud Storage URI of executable file. + ExecutableFile string `protobuf:"bytes,1,opt,name=executable_file,json=executableFile,proto3" json:"executable_file,omitempty"` + // Optional. Amount of time executable has to complete. Default is + // 10 minutes. Cluster creation fails with an explanatory error message (the + // name of the executable that caused the error and the exceeded timeout + // period) if the executable is not completed at end of the timeout period. + ExecutionTimeout *duration.Duration `protobuf:"bytes,2,opt,name=execution_timeout,json=executionTimeout,proto3" json:"execution_timeout,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *NodeInitializationAction) Reset() { *m = NodeInitializationAction{} } +func (m *NodeInitializationAction) String() string { return proto.CompactTextString(m) } +func (*NodeInitializationAction) ProtoMessage() {} +func (*NodeInitializationAction) Descriptor() ([]byte, []int) { + return fileDescriptor_clusters_794cf83e9cf994b6, []int{9} +} +func (m *NodeInitializationAction) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_NodeInitializationAction.Unmarshal(m, b) +} +func (m *NodeInitializationAction) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_NodeInitializationAction.Marshal(b, m, deterministic) +} +func (dst *NodeInitializationAction) XXX_Merge(src proto.Message) { + xxx_messageInfo_NodeInitializationAction.Merge(dst, src) +} +func (m *NodeInitializationAction) XXX_Size() int { + return xxx_messageInfo_NodeInitializationAction.Size(m) +} +func (m *NodeInitializationAction) XXX_DiscardUnknown() { + xxx_messageInfo_NodeInitializationAction.DiscardUnknown(m) +} + +var xxx_messageInfo_NodeInitializationAction proto.InternalMessageInfo + +func (m *NodeInitializationAction) GetExecutableFile() string { + if m != nil { + return m.ExecutableFile + } + return "" +} + +func (m *NodeInitializationAction) GetExecutionTimeout() *duration.Duration { + if m != nil { + return m.ExecutionTimeout + } + return nil +} + +// The status of a cluster and its instances. +type ClusterStatus struct { + // Output only. The cluster's state. + State ClusterStatus_State `protobuf:"varint,1,opt,name=state,proto3,enum=google.cloud.dataproc.v1beta2.ClusterStatus_State" json:"state,omitempty"` + // Output only. Optional details of cluster's state. + Detail string `protobuf:"bytes,2,opt,name=detail,proto3" json:"detail,omitempty"` + // Output only. Time when this state was entered. + StateStartTime *timestamp.Timestamp `protobuf:"bytes,3,opt,name=state_start_time,json=stateStartTime,proto3" json:"state_start_time,omitempty"` + // Output only. Additional state information that includes + // status reported by the agent. + Substate ClusterStatus_Substate `protobuf:"varint,4,opt,name=substate,proto3,enum=google.cloud.dataproc.v1beta2.ClusterStatus_Substate" json:"substate,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ClusterStatus) Reset() { *m = ClusterStatus{} } +func (m *ClusterStatus) String() string { return proto.CompactTextString(m) } +func (*ClusterStatus) ProtoMessage() {} +func (*ClusterStatus) Descriptor() ([]byte, []int) { + return fileDescriptor_clusters_794cf83e9cf994b6, []int{10} +} +func (m *ClusterStatus) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ClusterStatus.Unmarshal(m, b) +} +func (m *ClusterStatus) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ClusterStatus.Marshal(b, m, deterministic) +} +func (dst *ClusterStatus) XXX_Merge(src proto.Message) { + xxx_messageInfo_ClusterStatus.Merge(dst, src) +} +func (m *ClusterStatus) XXX_Size() int { + return xxx_messageInfo_ClusterStatus.Size(m) +} +func (m *ClusterStatus) XXX_DiscardUnknown() { + xxx_messageInfo_ClusterStatus.DiscardUnknown(m) +} + +var xxx_messageInfo_ClusterStatus proto.InternalMessageInfo + +func (m *ClusterStatus) GetState() ClusterStatus_State { + if m != nil { + return m.State + } + return ClusterStatus_UNKNOWN +} + +func (m *ClusterStatus) GetDetail() string { + if m != nil { + return m.Detail + } + return "" +} + +func (m *ClusterStatus) GetStateStartTime() *timestamp.Timestamp { + if m != nil { + return m.StateStartTime + } + return nil +} + +func (m *ClusterStatus) GetSubstate() ClusterStatus_Substate { + if m != nil { + return m.Substate + } + return ClusterStatus_UNSPECIFIED +} + +// Specifies the selection and config of software inside the cluster. +type SoftwareConfig struct { + // Optional. The version of software inside the cluster. It must be one of the + // supported [Cloud Dataproc + // Versions](/dataproc/docs/concepts/versioning/dataproc-versions#supported_cloud_dataproc_versions), + // such as "1.2" (including a subminor version, such as "1.2.29"), or the + // ["preview" + // version](/dataproc/docs/concepts/versioning/dataproc-versions#other_versions). + // If unspecified, it defaults to the latest version. + ImageVersion string `protobuf:"bytes,1,opt,name=image_version,json=imageVersion,proto3" json:"image_version,omitempty"` + // Optional. The properties to set on daemon config files. + // + // Property keys are specified in `prefix:property` format, such as + // `core:fs.defaultFS`. The following are supported prefixes + // and their mappings: + // + // * capacity-scheduler: `capacity-scheduler.xml` + // * core: `core-site.xml` + // * distcp: `distcp-default.xml` + // * hdfs: `hdfs-site.xml` + // * hive: `hive-site.xml` + // * mapred: `mapred-site.xml` + // * pig: `pig.properties` + // * spark: `spark-defaults.conf` + // * yarn: `yarn-site.xml` + // + // For more information, see + // [Cluster properties](/dataproc/docs/concepts/cluster-properties). + Properties map[string]string `protobuf:"bytes,2,rep,name=properties,proto3" json:"properties,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *SoftwareConfig) Reset() { *m = SoftwareConfig{} } +func (m *SoftwareConfig) String() string { return proto.CompactTextString(m) } +func (*SoftwareConfig) ProtoMessage() {} +func (*SoftwareConfig) Descriptor() ([]byte, []int) { + return fileDescriptor_clusters_794cf83e9cf994b6, []int{11} +} +func (m *SoftwareConfig) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_SoftwareConfig.Unmarshal(m, b) +} +func (m *SoftwareConfig) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_SoftwareConfig.Marshal(b, m, deterministic) +} +func (dst *SoftwareConfig) XXX_Merge(src proto.Message) { + xxx_messageInfo_SoftwareConfig.Merge(dst, src) +} +func (m *SoftwareConfig) XXX_Size() int { + return xxx_messageInfo_SoftwareConfig.Size(m) +} +func (m *SoftwareConfig) XXX_DiscardUnknown() { + xxx_messageInfo_SoftwareConfig.DiscardUnknown(m) +} + +var xxx_messageInfo_SoftwareConfig proto.InternalMessageInfo + +func (m *SoftwareConfig) GetImageVersion() string { + if m != nil { + return m.ImageVersion + } + return "" +} + +func (m *SoftwareConfig) GetProperties() map[string]string { + if m != nil { + return m.Properties + } + return nil +} + +// Contains cluster daemon metrics, such as HDFS and YARN stats. +// +// **Beta Feature**: This report is available for testing purposes only. It may +// be changed before final release. +type ClusterMetrics struct { + // The HDFS metrics. + HdfsMetrics map[string]int64 `protobuf:"bytes,1,rep,name=hdfs_metrics,json=hdfsMetrics,proto3" json:"hdfs_metrics,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"varint,2,opt,name=value,proto3"` + // The YARN metrics. + YarnMetrics map[string]int64 `protobuf:"bytes,2,rep,name=yarn_metrics,json=yarnMetrics,proto3" json:"yarn_metrics,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"varint,2,opt,name=value,proto3"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ClusterMetrics) Reset() { *m = ClusterMetrics{} } +func (m *ClusterMetrics) String() string { return proto.CompactTextString(m) } +func (*ClusterMetrics) ProtoMessage() {} +func (*ClusterMetrics) Descriptor() ([]byte, []int) { + return fileDescriptor_clusters_794cf83e9cf994b6, []int{12} +} +func (m *ClusterMetrics) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ClusterMetrics.Unmarshal(m, b) +} +func (m *ClusterMetrics) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ClusterMetrics.Marshal(b, m, deterministic) +} +func (dst *ClusterMetrics) XXX_Merge(src proto.Message) { + xxx_messageInfo_ClusterMetrics.Merge(dst, src) +} +func (m *ClusterMetrics) XXX_Size() int { + return xxx_messageInfo_ClusterMetrics.Size(m) +} +func (m *ClusterMetrics) XXX_DiscardUnknown() { + xxx_messageInfo_ClusterMetrics.DiscardUnknown(m) +} + +var xxx_messageInfo_ClusterMetrics proto.InternalMessageInfo + +func (m *ClusterMetrics) GetHdfsMetrics() map[string]int64 { + if m != nil { + return m.HdfsMetrics + } + return nil +} + +func (m *ClusterMetrics) GetYarnMetrics() map[string]int64 { + if m != nil { + return m.YarnMetrics + } + return nil +} + +// A request to create a cluster. +type CreateClusterRequest struct { + // Required. The ID of the Google Cloud Platform project that the cluster + // belongs to. + ProjectId string `protobuf:"bytes,1,opt,name=project_id,json=projectId,proto3" json:"project_id,omitempty"` + // Required. The Cloud Dataproc region in which to handle the request. + Region string `protobuf:"bytes,3,opt,name=region,proto3" json:"region,omitempty"` + // Required. The cluster to create. + Cluster *Cluster `protobuf:"bytes,2,opt,name=cluster,proto3" json:"cluster,omitempty"` + // Optional. A unique id used to identify the request. If the server + // receives two + // [CreateClusterRequest][google.cloud.dataproc.v1beta2.CreateClusterRequest] + // requests with the same id, then the second request will be ignored and the + // first [google.longrunning.Operation][google.longrunning.Operation] created + // and stored in the backend is returned. + // + // It is recommended to always set this value to a + // [UUID](https://en.wikipedia.org/wiki/Universally_unique_identifier). + // + // The id must contain only letters (a-z, A-Z), numbers (0-9), + // underscores (_), and hyphens (-). The maximum length is 40 characters. + RequestId string `protobuf:"bytes,4,opt,name=request_id,json=requestId,proto3" json:"request_id,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *CreateClusterRequest) Reset() { *m = CreateClusterRequest{} } +func (m *CreateClusterRequest) String() string { return proto.CompactTextString(m) } +func (*CreateClusterRequest) ProtoMessage() {} +func (*CreateClusterRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_clusters_794cf83e9cf994b6, []int{13} +} +func (m *CreateClusterRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_CreateClusterRequest.Unmarshal(m, b) +} +func (m *CreateClusterRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_CreateClusterRequest.Marshal(b, m, deterministic) +} +func (dst *CreateClusterRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_CreateClusterRequest.Merge(dst, src) +} +func (m *CreateClusterRequest) XXX_Size() int { + return xxx_messageInfo_CreateClusterRequest.Size(m) +} +func (m *CreateClusterRequest) XXX_DiscardUnknown() { + xxx_messageInfo_CreateClusterRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_CreateClusterRequest proto.InternalMessageInfo + +func (m *CreateClusterRequest) GetProjectId() string { + if m != nil { + return m.ProjectId + } + return "" +} + +func (m *CreateClusterRequest) GetRegion() string { + if m != nil { + return m.Region + } + return "" +} + +func (m *CreateClusterRequest) GetCluster() *Cluster { + if m != nil { + return m.Cluster + } + return nil +} + +func (m *CreateClusterRequest) GetRequestId() string { + if m != nil { + return m.RequestId + } + return "" +} + +// A request to update a cluster. +type UpdateClusterRequest struct { + // Required. The ID of the Google Cloud Platform project the + // cluster belongs to. + ProjectId string `protobuf:"bytes,1,opt,name=project_id,json=projectId,proto3" json:"project_id,omitempty"` + // Required. The Cloud Dataproc region in which to handle the request. + Region string `protobuf:"bytes,5,opt,name=region,proto3" json:"region,omitempty"` + // Required. The cluster name. + ClusterName string `protobuf:"bytes,2,opt,name=cluster_name,json=clusterName,proto3" json:"cluster_name,omitempty"` + // Required. The changes to the cluster. + Cluster *Cluster `protobuf:"bytes,3,opt,name=cluster,proto3" json:"cluster,omitempty"` + // Optional. Timeout for graceful YARN decomissioning. Graceful + // decommissioning allows removing nodes from the cluster without + // interrupting jobs in progress. Timeout specifies how long to wait for jobs + // in progress to finish before forcefully removing nodes (and potentially + // interrupting jobs). Default timeout is 0 (for forceful decommission), and + // the maximum allowed timeout is 1 day. + // + // Only supported on Dataproc image versions 1.2 and higher. + GracefulDecommissionTimeout *duration.Duration `protobuf:"bytes,6,opt,name=graceful_decommission_timeout,json=gracefulDecommissionTimeout,proto3" json:"graceful_decommission_timeout,omitempty"` + // Required. Specifies the path, relative to `Cluster`, of + // the field to update. For example, to change the number of workers + // in a cluster to 5, the `update_mask` parameter would be + // specified as `config.worker_config.num_instances`, + // and the `PATCH` request body would specify the new value, as follows: + // + // { + // "config":{ + // "workerConfig":{ + // "numInstances":"5" + // } + // } + // } + // + // Similarly, to change the number of preemptible workers in a cluster to 5, + // the `update_mask` parameter would be + // `config.secondary_worker_config.num_instances`, and the `PATCH` request + // body would be set as follows: + // + // { + // "config":{ + // "secondaryWorkerConfig":{ + // "numInstances":"5" + // } + // } + // } + // Note: currently only the following fields can be updated: + // + // + // + // + // + // + // + // + // + // + // + // + // + // + // + // + // + // + // + // + // + // + // + //
MaskPurpose
labelsUpdates labels
config.worker_config.num_instancesResize primary worker + // group
config.secondary_worker_config.num_instancesResize secondary + // worker group
config.lifecycle_config.auto_delete_ttlReset MAX TTL + // duration
config.lifecycle_config.auto_delete_timeUpdate MAX TTL + // deletion timestamp
config.lifecycle_config.idle_delete_ttlUpdate Idle TTL + // duration
+ UpdateMask *field_mask.FieldMask `protobuf:"bytes,4,opt,name=update_mask,json=updateMask,proto3" json:"update_mask,omitempty"` + // Optional. A unique id used to identify the request. If the server + // receives two + // [UpdateClusterRequest][google.cloud.dataproc.v1beta2.UpdateClusterRequest] + // requests with the same id, then the second request will be ignored and the + // first [google.longrunning.Operation][google.longrunning.Operation] created + // and stored in the backend is returned. + // + // It is recommended to always set this value to a + // [UUID](https://en.wikipedia.org/wiki/Universally_unique_identifier). + // + // The id must contain only letters (a-z, A-Z), numbers (0-9), + // underscores (_), and hyphens (-). The maximum length is 40 characters. + RequestId string `protobuf:"bytes,7,opt,name=request_id,json=requestId,proto3" json:"request_id,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *UpdateClusterRequest) Reset() { *m = UpdateClusterRequest{} } +func (m *UpdateClusterRequest) String() string { return proto.CompactTextString(m) } +func (*UpdateClusterRequest) ProtoMessage() {} +func (*UpdateClusterRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_clusters_794cf83e9cf994b6, []int{14} +} +func (m *UpdateClusterRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_UpdateClusterRequest.Unmarshal(m, b) +} +func (m *UpdateClusterRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_UpdateClusterRequest.Marshal(b, m, deterministic) +} +func (dst *UpdateClusterRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_UpdateClusterRequest.Merge(dst, src) +} +func (m *UpdateClusterRequest) XXX_Size() int { + return xxx_messageInfo_UpdateClusterRequest.Size(m) +} +func (m *UpdateClusterRequest) XXX_DiscardUnknown() { + xxx_messageInfo_UpdateClusterRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_UpdateClusterRequest proto.InternalMessageInfo + +func (m *UpdateClusterRequest) GetProjectId() string { + if m != nil { + return m.ProjectId + } + return "" +} + +func (m *UpdateClusterRequest) GetRegion() string { + if m != nil { + return m.Region + } + return "" +} + +func (m *UpdateClusterRequest) GetClusterName() string { + if m != nil { + return m.ClusterName + } + return "" +} + +func (m *UpdateClusterRequest) GetCluster() *Cluster { + if m != nil { + return m.Cluster + } + return nil +} + +func (m *UpdateClusterRequest) GetGracefulDecommissionTimeout() *duration.Duration { + if m != nil { + return m.GracefulDecommissionTimeout + } + return nil +} + +func (m *UpdateClusterRequest) GetUpdateMask() *field_mask.FieldMask { + if m != nil { + return m.UpdateMask + } + return nil +} + +func (m *UpdateClusterRequest) GetRequestId() string { + if m != nil { + return m.RequestId + } + return "" +} + +// A request to delete a cluster. +type DeleteClusterRequest struct { + // Required. The ID of the Google Cloud Platform project that the cluster + // belongs to. + ProjectId string `protobuf:"bytes,1,opt,name=project_id,json=projectId,proto3" json:"project_id,omitempty"` + // Required. The Cloud Dataproc region in which to handle the request. + Region string `protobuf:"bytes,3,opt,name=region,proto3" json:"region,omitempty"` + // Required. The cluster name. + ClusterName string `protobuf:"bytes,2,opt,name=cluster_name,json=clusterName,proto3" json:"cluster_name,omitempty"` + // Optional. Specifying the `cluster_uuid` means the RPC should fail + // (with error NOT_FOUND) if cluster with specified UUID does not exist. + ClusterUuid string `protobuf:"bytes,4,opt,name=cluster_uuid,json=clusterUuid,proto3" json:"cluster_uuid,omitempty"` + // Optional. A unique id used to identify the request. If the server + // receives two + // [DeleteClusterRequest][google.cloud.dataproc.v1beta2.DeleteClusterRequest] + // requests with the same id, then the second request will be ignored and the + // first [google.longrunning.Operation][google.longrunning.Operation] created + // and stored in the backend is returned. + // + // It is recommended to always set this value to a + // [UUID](https://en.wikipedia.org/wiki/Universally_unique_identifier). + // + // The id must contain only letters (a-z, A-Z), numbers (0-9), + // underscores (_), and hyphens (-). The maximum length is 40 characters. + RequestId string `protobuf:"bytes,5,opt,name=request_id,json=requestId,proto3" json:"request_id,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *DeleteClusterRequest) Reset() { *m = DeleteClusterRequest{} } +func (m *DeleteClusterRequest) String() string { return proto.CompactTextString(m) } +func (*DeleteClusterRequest) ProtoMessage() {} +func (*DeleteClusterRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_clusters_794cf83e9cf994b6, []int{15} +} +func (m *DeleteClusterRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_DeleteClusterRequest.Unmarshal(m, b) +} +func (m *DeleteClusterRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_DeleteClusterRequest.Marshal(b, m, deterministic) +} +func (dst *DeleteClusterRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_DeleteClusterRequest.Merge(dst, src) +} +func (m *DeleteClusterRequest) XXX_Size() int { + return xxx_messageInfo_DeleteClusterRequest.Size(m) +} +func (m *DeleteClusterRequest) XXX_DiscardUnknown() { + xxx_messageInfo_DeleteClusterRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_DeleteClusterRequest proto.InternalMessageInfo + +func (m *DeleteClusterRequest) GetProjectId() string { + if m != nil { + return m.ProjectId + } + return "" +} + +func (m *DeleteClusterRequest) GetRegion() string { + if m != nil { + return m.Region + } + return "" +} + +func (m *DeleteClusterRequest) GetClusterName() string { + if m != nil { + return m.ClusterName + } + return "" +} + +func (m *DeleteClusterRequest) GetClusterUuid() string { + if m != nil { + return m.ClusterUuid + } + return "" +} + +func (m *DeleteClusterRequest) GetRequestId() string { + if m != nil { + return m.RequestId + } + return "" +} + +// Request to get the resource representation for a cluster in a project. +type GetClusterRequest struct { + // Required. The ID of the Google Cloud Platform project that the cluster + // belongs to. + ProjectId string `protobuf:"bytes,1,opt,name=project_id,json=projectId,proto3" json:"project_id,omitempty"` + // Required. The Cloud Dataproc region in which to handle the request. + Region string `protobuf:"bytes,3,opt,name=region,proto3" json:"region,omitempty"` + // Required. The cluster name. + ClusterName string `protobuf:"bytes,2,opt,name=cluster_name,json=clusterName,proto3" json:"cluster_name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GetClusterRequest) Reset() { *m = GetClusterRequest{} } +func (m *GetClusterRequest) String() string { return proto.CompactTextString(m) } +func (*GetClusterRequest) ProtoMessage() {} +func (*GetClusterRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_clusters_794cf83e9cf994b6, []int{16} +} +func (m *GetClusterRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GetClusterRequest.Unmarshal(m, b) +} +func (m *GetClusterRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GetClusterRequest.Marshal(b, m, deterministic) +} +func (dst *GetClusterRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_GetClusterRequest.Merge(dst, src) +} +func (m *GetClusterRequest) XXX_Size() int { + return xxx_messageInfo_GetClusterRequest.Size(m) +} +func (m *GetClusterRequest) XXX_DiscardUnknown() { + xxx_messageInfo_GetClusterRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_GetClusterRequest proto.InternalMessageInfo + +func (m *GetClusterRequest) GetProjectId() string { + if m != nil { + return m.ProjectId + } + return "" +} + +func (m *GetClusterRequest) GetRegion() string { + if m != nil { + return m.Region + } + return "" +} + +func (m *GetClusterRequest) GetClusterName() string { + if m != nil { + return m.ClusterName + } + return "" +} + +// A request to list the clusters in a project. +type ListClustersRequest struct { + // Required. The ID of the Google Cloud Platform project that the cluster + // belongs to. + ProjectId string `protobuf:"bytes,1,opt,name=project_id,json=projectId,proto3" json:"project_id,omitempty"` + // Required. The Cloud Dataproc region in which to handle the request. + Region string `protobuf:"bytes,4,opt,name=region,proto3" json:"region,omitempty"` + // Optional. A filter constraining the clusters to list. Filters are + // case-sensitive and have the following syntax: + // + // field = value [AND [field = value]] ... + // + // where **field** is one of `status.state`, `clusterName`, or `labels.[KEY]`, + // and `[KEY]` is a label key. **value** can be `*` to match all values. + // `status.state` can be one of the following: `ACTIVE`, `INACTIVE`, + // `CREATING`, `RUNNING`, `ERROR`, `DELETING`, or `UPDATING`. `ACTIVE` + // contains the `CREATING`, `UPDATING`, and `RUNNING` states. `INACTIVE` + // contains the `DELETING` and `ERROR` states. + // `clusterName` is the name of the cluster provided at creation time. + // Only the logical `AND` operator is supported; space-separated items are + // treated as having an implicit `AND` operator. + // + // Example filter: + // + // status.state = ACTIVE AND clusterName = mycluster + // AND labels.env = staging AND labels.starred = * + Filter string `protobuf:"bytes,5,opt,name=filter,proto3" json:"filter,omitempty"` + // Optional. The standard List page size. + PageSize int32 `protobuf:"varint,2,opt,name=page_size,json=pageSize,proto3" json:"page_size,omitempty"` + // Optional. The standard List page token. + PageToken string `protobuf:"bytes,3,opt,name=page_token,json=pageToken,proto3" json:"page_token,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ListClustersRequest) Reset() { *m = ListClustersRequest{} } +func (m *ListClustersRequest) String() string { return proto.CompactTextString(m) } +func (*ListClustersRequest) ProtoMessage() {} +func (*ListClustersRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_clusters_794cf83e9cf994b6, []int{17} +} +func (m *ListClustersRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ListClustersRequest.Unmarshal(m, b) +} +func (m *ListClustersRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ListClustersRequest.Marshal(b, m, deterministic) +} +func (dst *ListClustersRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_ListClustersRequest.Merge(dst, src) +} +func (m *ListClustersRequest) XXX_Size() int { + return xxx_messageInfo_ListClustersRequest.Size(m) +} +func (m *ListClustersRequest) XXX_DiscardUnknown() { + xxx_messageInfo_ListClustersRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_ListClustersRequest proto.InternalMessageInfo + +func (m *ListClustersRequest) GetProjectId() string { + if m != nil { + return m.ProjectId + } + return "" +} + +func (m *ListClustersRequest) GetRegion() string { + if m != nil { + return m.Region + } + return "" +} + +func (m *ListClustersRequest) GetFilter() string { + if m != nil { + return m.Filter + } + return "" +} + +func (m *ListClustersRequest) GetPageSize() int32 { + if m != nil { + return m.PageSize + } + return 0 +} + +func (m *ListClustersRequest) GetPageToken() string { + if m != nil { + return m.PageToken + } + return "" +} + +// The list of all clusters in a project. +type ListClustersResponse struct { + // Output only. The clusters in the project. + Clusters []*Cluster `protobuf:"bytes,1,rep,name=clusters,proto3" json:"clusters,omitempty"` + // Output only. This token is included in the response if there are more + // results to fetch. To fetch additional results, provide this value as the + // `page_token` in a subsequent ListClustersRequest. + NextPageToken string `protobuf:"bytes,2,opt,name=next_page_token,json=nextPageToken,proto3" json:"next_page_token,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ListClustersResponse) Reset() { *m = ListClustersResponse{} } +func (m *ListClustersResponse) String() string { return proto.CompactTextString(m) } +func (*ListClustersResponse) ProtoMessage() {} +func (*ListClustersResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_clusters_794cf83e9cf994b6, []int{18} +} +func (m *ListClustersResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ListClustersResponse.Unmarshal(m, b) +} +func (m *ListClustersResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ListClustersResponse.Marshal(b, m, deterministic) +} +func (dst *ListClustersResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_ListClustersResponse.Merge(dst, src) +} +func (m *ListClustersResponse) XXX_Size() int { + return xxx_messageInfo_ListClustersResponse.Size(m) +} +func (m *ListClustersResponse) XXX_DiscardUnknown() { + xxx_messageInfo_ListClustersResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_ListClustersResponse proto.InternalMessageInfo + +func (m *ListClustersResponse) GetClusters() []*Cluster { + if m != nil { + return m.Clusters + } + return nil +} + +func (m *ListClustersResponse) GetNextPageToken() string { + if m != nil { + return m.NextPageToken + } + return "" +} + +// A request to collect cluster diagnostic information. +type DiagnoseClusterRequest struct { + // Required. The ID of the Google Cloud Platform project that the cluster + // belongs to. + ProjectId string `protobuf:"bytes,1,opt,name=project_id,json=projectId,proto3" json:"project_id,omitempty"` + // Required. The Cloud Dataproc region in which to handle the request. + Region string `protobuf:"bytes,3,opt,name=region,proto3" json:"region,omitempty"` + // Required. The cluster name. + ClusterName string `protobuf:"bytes,2,opt,name=cluster_name,json=clusterName,proto3" json:"cluster_name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *DiagnoseClusterRequest) Reset() { *m = DiagnoseClusterRequest{} } +func (m *DiagnoseClusterRequest) String() string { return proto.CompactTextString(m) } +func (*DiagnoseClusterRequest) ProtoMessage() {} +func (*DiagnoseClusterRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_clusters_794cf83e9cf994b6, []int{19} +} +func (m *DiagnoseClusterRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_DiagnoseClusterRequest.Unmarshal(m, b) +} +func (m *DiagnoseClusterRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_DiagnoseClusterRequest.Marshal(b, m, deterministic) +} +func (dst *DiagnoseClusterRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_DiagnoseClusterRequest.Merge(dst, src) +} +func (m *DiagnoseClusterRequest) XXX_Size() int { + return xxx_messageInfo_DiagnoseClusterRequest.Size(m) +} +func (m *DiagnoseClusterRequest) XXX_DiscardUnknown() { + xxx_messageInfo_DiagnoseClusterRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_DiagnoseClusterRequest proto.InternalMessageInfo + +func (m *DiagnoseClusterRequest) GetProjectId() string { + if m != nil { + return m.ProjectId + } + return "" +} + +func (m *DiagnoseClusterRequest) GetRegion() string { + if m != nil { + return m.Region + } + return "" +} + +func (m *DiagnoseClusterRequest) GetClusterName() string { + if m != nil { + return m.ClusterName + } + return "" +} + +// The location of diagnostic output. +type DiagnoseClusterResults struct { + // Output only. The Cloud Storage URI of the diagnostic output. + // The output report is a plain text file with a summary of collected + // diagnostics. + OutputUri string `protobuf:"bytes,1,opt,name=output_uri,json=outputUri,proto3" json:"output_uri,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *DiagnoseClusterResults) Reset() { *m = DiagnoseClusterResults{} } +func (m *DiagnoseClusterResults) String() string { return proto.CompactTextString(m) } +func (*DiagnoseClusterResults) ProtoMessage() {} +func (*DiagnoseClusterResults) Descriptor() ([]byte, []int) { + return fileDescriptor_clusters_794cf83e9cf994b6, []int{20} +} +func (m *DiagnoseClusterResults) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_DiagnoseClusterResults.Unmarshal(m, b) +} +func (m *DiagnoseClusterResults) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_DiagnoseClusterResults.Marshal(b, m, deterministic) +} +func (dst *DiagnoseClusterResults) XXX_Merge(src proto.Message) { + xxx_messageInfo_DiagnoseClusterResults.Merge(dst, src) +} +func (m *DiagnoseClusterResults) XXX_Size() int { + return xxx_messageInfo_DiagnoseClusterResults.Size(m) +} +func (m *DiagnoseClusterResults) XXX_DiscardUnknown() { + xxx_messageInfo_DiagnoseClusterResults.DiscardUnknown(m) +} + +var xxx_messageInfo_DiagnoseClusterResults proto.InternalMessageInfo + +func (m *DiagnoseClusterResults) GetOutputUri() string { + if m != nil { + return m.OutputUri + } + return "" +} + +func init() { + proto.RegisterType((*Cluster)(nil), "google.cloud.dataproc.v1beta2.Cluster") + proto.RegisterMapType((map[string]string)(nil), "google.cloud.dataproc.v1beta2.Cluster.LabelsEntry") + proto.RegisterType((*ClusterConfig)(nil), "google.cloud.dataproc.v1beta2.ClusterConfig") + proto.RegisterType((*EncryptionConfig)(nil), "google.cloud.dataproc.v1beta2.EncryptionConfig") + proto.RegisterType((*GceClusterConfig)(nil), "google.cloud.dataproc.v1beta2.GceClusterConfig") + proto.RegisterMapType((map[string]string)(nil), "google.cloud.dataproc.v1beta2.GceClusterConfig.MetadataEntry") + proto.RegisterType((*InstanceGroupConfig)(nil), "google.cloud.dataproc.v1beta2.InstanceGroupConfig") + proto.RegisterType((*ManagedGroupConfig)(nil), "google.cloud.dataproc.v1beta2.ManagedGroupConfig") + proto.RegisterType((*AcceleratorConfig)(nil), "google.cloud.dataproc.v1beta2.AcceleratorConfig") + proto.RegisterType((*DiskConfig)(nil), "google.cloud.dataproc.v1beta2.DiskConfig") + proto.RegisterType((*LifecycleConfig)(nil), "google.cloud.dataproc.v1beta2.LifecycleConfig") + proto.RegisterType((*NodeInitializationAction)(nil), "google.cloud.dataproc.v1beta2.NodeInitializationAction") + proto.RegisterType((*ClusterStatus)(nil), "google.cloud.dataproc.v1beta2.ClusterStatus") + proto.RegisterType((*SoftwareConfig)(nil), "google.cloud.dataproc.v1beta2.SoftwareConfig") + proto.RegisterMapType((map[string]string)(nil), "google.cloud.dataproc.v1beta2.SoftwareConfig.PropertiesEntry") + proto.RegisterType((*ClusterMetrics)(nil), "google.cloud.dataproc.v1beta2.ClusterMetrics") + proto.RegisterMapType((map[string]int64)(nil), "google.cloud.dataproc.v1beta2.ClusterMetrics.HdfsMetricsEntry") + proto.RegisterMapType((map[string]int64)(nil), "google.cloud.dataproc.v1beta2.ClusterMetrics.YarnMetricsEntry") + proto.RegisterType((*CreateClusterRequest)(nil), "google.cloud.dataproc.v1beta2.CreateClusterRequest") + proto.RegisterType((*UpdateClusterRequest)(nil), "google.cloud.dataproc.v1beta2.UpdateClusterRequest") + proto.RegisterType((*DeleteClusterRequest)(nil), "google.cloud.dataproc.v1beta2.DeleteClusterRequest") + proto.RegisterType((*GetClusterRequest)(nil), "google.cloud.dataproc.v1beta2.GetClusterRequest") + proto.RegisterType((*ListClustersRequest)(nil), "google.cloud.dataproc.v1beta2.ListClustersRequest") + proto.RegisterType((*ListClustersResponse)(nil), "google.cloud.dataproc.v1beta2.ListClustersResponse") + proto.RegisterType((*DiagnoseClusterRequest)(nil), "google.cloud.dataproc.v1beta2.DiagnoseClusterRequest") + proto.RegisterType((*DiagnoseClusterResults)(nil), "google.cloud.dataproc.v1beta2.DiagnoseClusterResults") + proto.RegisterEnum("google.cloud.dataproc.v1beta2.ClusterStatus_State", ClusterStatus_State_name, ClusterStatus_State_value) + proto.RegisterEnum("google.cloud.dataproc.v1beta2.ClusterStatus_Substate", ClusterStatus_Substate_name, ClusterStatus_Substate_value) +} + +// Reference imports to suppress errors if they are not otherwise used. +var _ context.Context +var _ grpc.ClientConn + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the grpc package it is being compiled against. +const _ = grpc.SupportPackageIsVersion4 + +// ClusterControllerClient is the client API for ClusterController service. +// +// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://godoc.org/google.golang.org/grpc#ClientConn.NewStream. +type ClusterControllerClient interface { + // Creates a cluster in a project. + CreateCluster(ctx context.Context, in *CreateClusterRequest, opts ...grpc.CallOption) (*longrunning.Operation, error) + // Updates a cluster in a project. + UpdateCluster(ctx context.Context, in *UpdateClusterRequest, opts ...grpc.CallOption) (*longrunning.Operation, error) + // Deletes a cluster in a project. + DeleteCluster(ctx context.Context, in *DeleteClusterRequest, opts ...grpc.CallOption) (*longrunning.Operation, error) + // Gets the resource representation for a cluster in a project. + GetCluster(ctx context.Context, in *GetClusterRequest, opts ...grpc.CallOption) (*Cluster, error) + // Lists all regions/{region}/clusters in a project. + ListClusters(ctx context.Context, in *ListClustersRequest, opts ...grpc.CallOption) (*ListClustersResponse, error) + // Gets cluster diagnostic information. + // After the operation completes, the Operation.response field + // contains `DiagnoseClusterOutputLocation`. + DiagnoseCluster(ctx context.Context, in *DiagnoseClusterRequest, opts ...grpc.CallOption) (*longrunning.Operation, error) +} + +type clusterControllerClient struct { + cc *grpc.ClientConn +} + +func NewClusterControllerClient(cc *grpc.ClientConn) ClusterControllerClient { + return &clusterControllerClient{cc} +} + +func (c *clusterControllerClient) CreateCluster(ctx context.Context, in *CreateClusterRequest, opts ...grpc.CallOption) (*longrunning.Operation, error) { + out := new(longrunning.Operation) + err := c.cc.Invoke(ctx, "/google.cloud.dataproc.v1beta2.ClusterController/CreateCluster", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *clusterControllerClient) UpdateCluster(ctx context.Context, in *UpdateClusterRequest, opts ...grpc.CallOption) (*longrunning.Operation, error) { + out := new(longrunning.Operation) + err := c.cc.Invoke(ctx, "/google.cloud.dataproc.v1beta2.ClusterController/UpdateCluster", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *clusterControllerClient) DeleteCluster(ctx context.Context, in *DeleteClusterRequest, opts ...grpc.CallOption) (*longrunning.Operation, error) { + out := new(longrunning.Operation) + err := c.cc.Invoke(ctx, "/google.cloud.dataproc.v1beta2.ClusterController/DeleteCluster", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *clusterControllerClient) GetCluster(ctx context.Context, in *GetClusterRequest, opts ...grpc.CallOption) (*Cluster, error) { + out := new(Cluster) + err := c.cc.Invoke(ctx, "/google.cloud.dataproc.v1beta2.ClusterController/GetCluster", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *clusterControllerClient) ListClusters(ctx context.Context, in *ListClustersRequest, opts ...grpc.CallOption) (*ListClustersResponse, error) { + out := new(ListClustersResponse) + err := c.cc.Invoke(ctx, "/google.cloud.dataproc.v1beta2.ClusterController/ListClusters", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *clusterControllerClient) DiagnoseCluster(ctx context.Context, in *DiagnoseClusterRequest, opts ...grpc.CallOption) (*longrunning.Operation, error) { + out := new(longrunning.Operation) + err := c.cc.Invoke(ctx, "/google.cloud.dataproc.v1beta2.ClusterController/DiagnoseCluster", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +// ClusterControllerServer is the server API for ClusterController service. +type ClusterControllerServer interface { + // Creates a cluster in a project. + CreateCluster(context.Context, *CreateClusterRequest) (*longrunning.Operation, error) + // Updates a cluster in a project. + UpdateCluster(context.Context, *UpdateClusterRequest) (*longrunning.Operation, error) + // Deletes a cluster in a project. + DeleteCluster(context.Context, *DeleteClusterRequest) (*longrunning.Operation, error) + // Gets the resource representation for a cluster in a project. + GetCluster(context.Context, *GetClusterRequest) (*Cluster, error) + // Lists all regions/{region}/clusters in a project. + ListClusters(context.Context, *ListClustersRequest) (*ListClustersResponse, error) + // Gets cluster diagnostic information. + // After the operation completes, the Operation.response field + // contains `DiagnoseClusterOutputLocation`. + DiagnoseCluster(context.Context, *DiagnoseClusterRequest) (*longrunning.Operation, error) +} + +func RegisterClusterControllerServer(s *grpc.Server, srv ClusterControllerServer) { + s.RegisterService(&_ClusterController_serviceDesc, srv) +} + +func _ClusterController_CreateCluster_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(CreateClusterRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(ClusterControllerServer).CreateCluster(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.dataproc.v1beta2.ClusterController/CreateCluster", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(ClusterControllerServer).CreateCluster(ctx, req.(*CreateClusterRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _ClusterController_UpdateCluster_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(UpdateClusterRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(ClusterControllerServer).UpdateCluster(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.dataproc.v1beta2.ClusterController/UpdateCluster", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(ClusterControllerServer).UpdateCluster(ctx, req.(*UpdateClusterRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _ClusterController_DeleteCluster_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(DeleteClusterRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(ClusterControllerServer).DeleteCluster(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.dataproc.v1beta2.ClusterController/DeleteCluster", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(ClusterControllerServer).DeleteCluster(ctx, req.(*DeleteClusterRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _ClusterController_GetCluster_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(GetClusterRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(ClusterControllerServer).GetCluster(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.dataproc.v1beta2.ClusterController/GetCluster", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(ClusterControllerServer).GetCluster(ctx, req.(*GetClusterRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _ClusterController_ListClusters_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(ListClustersRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(ClusterControllerServer).ListClusters(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.dataproc.v1beta2.ClusterController/ListClusters", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(ClusterControllerServer).ListClusters(ctx, req.(*ListClustersRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _ClusterController_DiagnoseCluster_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(DiagnoseClusterRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(ClusterControllerServer).DiagnoseCluster(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.dataproc.v1beta2.ClusterController/DiagnoseCluster", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(ClusterControllerServer).DiagnoseCluster(ctx, req.(*DiagnoseClusterRequest)) + } + return interceptor(ctx, in, info, handler) +} + +var _ClusterController_serviceDesc = grpc.ServiceDesc{ + ServiceName: "google.cloud.dataproc.v1beta2.ClusterController", + HandlerType: (*ClusterControllerServer)(nil), + Methods: []grpc.MethodDesc{ + { + MethodName: "CreateCluster", + Handler: _ClusterController_CreateCluster_Handler, + }, + { + MethodName: "UpdateCluster", + Handler: _ClusterController_UpdateCluster_Handler, + }, + { + MethodName: "DeleteCluster", + Handler: _ClusterController_DeleteCluster_Handler, + }, + { + MethodName: "GetCluster", + Handler: _ClusterController_GetCluster_Handler, + }, + { + MethodName: "ListClusters", + Handler: _ClusterController_ListClusters_Handler, + }, + { + MethodName: "DiagnoseCluster", + Handler: _ClusterController_DiagnoseCluster_Handler, + }, + }, + Streams: []grpc.StreamDesc{}, + Metadata: "google/cloud/dataproc/v1beta2/clusters.proto", +} + +func init() { + proto.RegisterFile("google/cloud/dataproc/v1beta2/clusters.proto", fileDescriptor_clusters_794cf83e9cf994b6) +} + +var fileDescriptor_clusters_794cf83e9cf994b6 = []byte{ + // 2227 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xbc, 0x59, 0x4b, 0x73, 0x1b, 0xc7, + 0x11, 0xd6, 0x12, 0x7c, 0x80, 0x0d, 0xe2, 0xc1, 0x31, 0xcd, 0xc0, 0x74, 0x14, 0xcb, 0xeb, 0xc4, + 0xa1, 0x1d, 0x05, 0xb0, 0xa9, 0xb8, 0xec, 0x48, 0x91, 0x4b, 0x14, 0x09, 0x91, 0xb4, 0x28, 0x8a, + 0x59, 0x00, 0x52, 0x94, 0x44, 0xb5, 0x35, 0xd8, 0x1d, 0x40, 0x63, 0xee, 0x2b, 0x3b, 0xb3, 0xb2, + 0x21, 0x95, 0xaa, 0x52, 0xce, 0x29, 0x95, 0x63, 0x7e, 0x40, 0x7c, 0xce, 0xc1, 0x07, 0x57, 0xaa, + 0x52, 0x39, 0xe4, 0x96, 0x7f, 0xe0, 0xaa, 0xfc, 0x82, 0x1c, 0x72, 0xc9, 0x3d, 0xc7, 0xd4, 0x3c, + 0x16, 0xd8, 0x05, 0x29, 0x81, 0x64, 0x54, 0x3e, 0x11, 0xdb, 0xd3, 0xfd, 0xf5, 0x37, 0x3d, 0x3d, + 0x3d, 0x3d, 0x43, 0xb8, 0x3c, 0x08, 0xc3, 0x81, 0x47, 0x9a, 0x8e, 0x17, 0x26, 0x6e, 0xd3, 0xc5, + 0x1c, 0x47, 0x71, 0xe8, 0x34, 0x1f, 0xbf, 0xdf, 0x23, 0x1c, 0x6f, 0x34, 0x1d, 0x2f, 0x61, 0x9c, + 0xc4, 0xac, 0x11, 0xc5, 0x21, 0x0f, 0xd1, 0x45, 0xa5, 0xdd, 0x90, 0xda, 0x8d, 0x54, 0xbb, 0xa1, + 0xb5, 0xd7, 0xbe, 0xab, 0xc1, 0x70, 0x44, 0x9b, 0x38, 0x08, 0x42, 0x8e, 0x39, 0x0d, 0x03, 0x6d, + 0xbc, 0xd6, 0x78, 0xb1, 0xab, 0x30, 0x22, 0x71, 0x4e, 0xff, 0xdd, 0x17, 0xeb, 0xb3, 0x47, 0x38, + 0x26, 0xae, 0xd6, 0x7d, 0x4b, 0xeb, 0x7a, 0x61, 0x30, 0x88, 0x93, 0x20, 0xa0, 0xc1, 0xe0, 0x38, + 0xe0, 0xf7, 0xb4, 0x92, 0xfc, 0xea, 0x25, 0xfd, 0xa6, 0x9b, 0x28, 0x05, 0x3d, 0x7e, 0x69, 0x72, + 0xbc, 0x4f, 0x89, 0xe7, 0xda, 0x3e, 0x66, 0x47, 0x5a, 0xe3, 0x8d, 0x49, 0x0d, 0x4e, 0x7d, 0xc2, + 0x38, 0xf6, 0x23, 0xa5, 0x60, 0xfe, 0x69, 0x16, 0x16, 0xb6, 0x54, 0xcc, 0xd0, 0x45, 0x80, 0x28, + 0x0e, 0x3f, 0x25, 0x0e, 0xb7, 0xa9, 0x5b, 0x37, 0x2e, 0x19, 0xeb, 0x8b, 0xd6, 0xa2, 0x96, 0xec, + 0xb9, 0xe8, 0x4d, 0x58, 0xd2, 0xd1, 0xb5, 0x03, 0xec, 0x93, 0xfa, 0x8c, 0x54, 0x28, 0x69, 0xd9, + 0x01, 0xf6, 0x09, 0xda, 0x86, 0x79, 0x27, 0x0c, 0xfa, 0x74, 0x50, 0x2f, 0x5c, 0x32, 0xd6, 0x4b, + 0x1b, 0x97, 0x1b, 0x2f, 0x8c, 0x7f, 0x43, 0x7b, 0xde, 0x92, 0x36, 0x96, 0xb6, 0x45, 0x9f, 0xc0, + 0xbc, 0x87, 0x7b, 0xc4, 0x63, 0xf5, 0xe2, 0xa5, 0xc2, 0x7a, 0x69, 0x63, 0xe3, 0x74, 0x28, 0x8d, + 0x7d, 0x69, 0xd4, 0x0a, 0x78, 0x3c, 0xb4, 0x34, 0x82, 0x60, 0xc4, 0x38, 0xe6, 0x09, 0xab, 0xcf, + 0x9e, 0x85, 0x51, 0x5b, 0xda, 0x58, 0xda, 0x16, 0xb5, 0xa1, 0xa2, 0x7e, 0xd9, 0x8f, 0x28, 0xe3, + 0x61, 0x3c, 0xac, 0x2f, 0x48, 0x66, 0x67, 0x43, 0x2b, 0x2b, 0x8c, 0x5d, 0x05, 0x91, 0x8d, 0x67, + 0x92, 0x50, 0xb7, 0x3e, 0x9f, 0x8b, 0x67, 0x37, 0xa1, 0x2e, 0xda, 0x81, 0x05, 0x9f, 0xf0, 0x98, + 0x3a, 0xac, 0xbe, 0x28, 0xe9, 0xff, 0xf8, 0x74, 0x0e, 0xef, 0x28, 0x23, 0x2b, 0xb5, 0x5e, 0xfb, + 0x29, 0x94, 0x32, 0xd1, 0x41, 0x35, 0x28, 0x1c, 0x91, 0xa1, 0x5e, 0x62, 0xf1, 0x13, 0xad, 0xc0, + 0xdc, 0x63, 0xec, 0x25, 0xe9, 0xaa, 0xaa, 0x8f, 0xab, 0x33, 0x1f, 0x19, 0xe6, 0xd7, 0xf3, 0x50, + 0xce, 0xad, 0x13, 0x7a, 0x0b, 0xca, 0x6a, 0xa5, 0xec, 0x5e, 0xe2, 0x1c, 0x11, 0xae, 0x71, 0x96, + 0x94, 0xf0, 0xa6, 0x94, 0xa1, 0x87, 0x80, 0x06, 0x0e, 0xb1, 0xd3, 0x19, 0xea, 0xb4, 0x28, 0xca, + 0x59, 0x34, 0xa7, 0xcc, 0x62, 0xc7, 0x21, 0xf9, 0xcc, 0xa8, 0x0d, 0x26, 0x24, 0xe8, 0x3e, 0x94, + 0x7d, 0x9c, 0x45, 0x56, 0xf1, 0x99, 0x96, 0x2a, 0x7b, 0x01, 0xe3, 0x38, 0x70, 0xc8, 0x4e, 0x1c, + 0x26, 0x91, 0x06, 0x5f, 0x52, 0x40, 0x63, 0xe0, 0xcf, 0xc2, 0xf8, 0x68, 0x0c, 0x0c, 0xe7, 0x07, + 0x56, 0x40, 0x1a, 0xf8, 0x53, 0xf8, 0x0e, 0x23, 0x4e, 0x18, 0xb8, 0x38, 0x1e, 0xda, 0x79, 0x17, + 0x4b, 0xe7, 0x76, 0xf1, 0xea, 0x08, 0xf2, 0x7e, 0xd6, 0xd7, 0x3d, 0xa8, 0xb2, 0xb0, 0xcf, 0x3f, + 0xc3, 0x31, 0x49, 0x7d, 0x94, 0x4f, 0x95, 0x3f, 0x6d, 0x6d, 0xa5, 0xe1, 0x2b, 0x2c, 0xf7, 0x8d, + 0x1e, 0x40, 0xcd, 0xa3, 0x7d, 0xe2, 0x0c, 0x1d, 0x6f, 0x04, 0x5c, 0x91, 0xc0, 0x8d, 0x29, 0xc0, + 0xfb, 0xa9, 0x99, 0x46, 0xae, 0x7a, 0x79, 0x01, 0x0a, 0x60, 0x95, 0x06, 0x94, 0x53, 0xec, 0xd1, + 0x27, 0xb2, 0xc6, 0xd9, 0xd8, 0x91, 0xb5, 0xb0, 0x5e, 0x92, 0x5b, 0xed, 0xc3, 0x29, 0x0e, 0x0e, + 0x42, 0x97, 0xec, 0xe5, 0x00, 0x36, 0xa5, 0xbd, 0xf5, 0x2a, 0x3d, 0x41, 0xca, 0xd0, 0xaf, 0x61, + 0x99, 0x04, 0x4e, 0x3c, 0x8c, 0xa4, 0x2f, 0x3d, 0x97, 0xea, 0xa9, 0xd2, 0xb3, 0x35, 0xb2, 0x4b, + 0xd3, 0x93, 0x4c, 0x48, 0xcc, 0x1b, 0x50, 0x9b, 0xd4, 0x42, 0x97, 0xe1, 0x15, 0xb1, 0x23, 0x22, + 0xd7, 0x3e, 0xf2, 0x99, 0x7d, 0x44, 0x86, 0xaa, 0x8c, 0xaa, 0xcd, 0x53, 0x1d, 0x38, 0xe4, 0xd0, + 0xbd, 0xed, 0xb3, 0xdb, 0x64, 0x28, 0x4a, 0xa9, 0xf9, 0xe7, 0x02, 0xd4, 0x26, 0xf7, 0x01, 0x7a, + 0x0d, 0x8a, 0x4f, 0xc2, 0x80, 0xd8, 0x49, 0x4c, 0xb5, 0xdd, 0x82, 0xf8, 0xee, 0xc6, 0x14, 0xbd, + 0x01, 0xa5, 0x80, 0x70, 0x91, 0x57, 0x72, 0x54, 0x6d, 0x63, 0xd0, 0x22, 0xa1, 0xf0, 0x03, 0xa8, + 0xb0, 0xa4, 0x97, 0xd5, 0x51, 0x05, 0xa7, 0x3c, 0x96, 0x0a, 0xb5, 0x75, 0xa8, 0xd1, 0x80, 0x93, + 0x38, 0xc0, 0x9e, 0x4d, 0x23, 0x3b, 0x0c, 0x3c, 0x51, 0xec, 0x8c, 0xf5, 0xa2, 0x55, 0x49, 0xe5, + 0x7b, 0xd1, 0xdd, 0xc0, 0x1b, 0xa2, 0x1f, 0x42, 0x95, 0x91, 0xf8, 0x31, 0x75, 0x88, 0x8d, 0x1d, + 0x27, 0x4c, 0x02, 0x2e, 0xb7, 0xf7, 0xa2, 0x55, 0xd1, 0xe2, 0x4d, 0x25, 0x45, 0x3f, 0x81, 0xd5, + 0x09, 0x45, 0x9b, 0x39, 0x61, 0x44, 0x58, 0xbd, 0x70, 0xa9, 0xb0, 0xbe, 0x68, 0xad, 0xe4, 0xf5, + 0xdb, 0x72, 0x0c, 0x21, 0x98, 0xe5, 0x78, 0x20, 0xea, 0xb6, 0xd0, 0x91, 0xbf, 0xd1, 0x03, 0x28, + 0xfa, 0x84, 0x63, 0xb1, 0x22, 0xf5, 0x39, 0x99, 0x16, 0xd7, 0xcf, 0x58, 0x4a, 0x1a, 0x77, 0xb4, + 0xbd, 0x3a, 0x26, 0x46, 0x70, 0x6b, 0xd7, 0xa0, 0x9c, 0x1b, 0x3a, 0x53, 0x8d, 0xfc, 0xed, 0x2c, + 0xbc, 0x72, 0xc2, 0xf6, 0x14, 0x95, 0x32, 0x48, 0x7c, 0x9b, 0xea, 0x21, 0x26, 0xd1, 0xe6, 0xac, + 0xa5, 0x20, 0xf1, 0x53, 0x75, 0x26, 0x16, 0x26, 0x55, 0x90, 0x19, 0xc1, 0xea, 0x33, 0x72, 0xca, + 0xe5, 0x54, 0x2a, 0xf2, 0x81, 0xa1, 0xd7, 0x61, 0x91, 0xfa, 0x78, 0xa0, 0x16, 0xbf, 0x20, 0x19, + 0x14, 0xa5, 0x40, 0xaf, 0x9a, 0x8f, 0x9d, 0x47, 0x34, 0x20, 0x36, 0x1f, 0x46, 0x4a, 0x67, 0x56, + 0x2d, 0x86, 0x96, 0x77, 0x86, 0x91, 0xd4, 0xfc, 0x04, 0x4a, 0x2e, 0x65, 0x47, 0x69, 0xc6, 0xcf, + 0xc9, 0x8c, 0x7f, 0x67, 0x4a, 0x14, 0xb7, 0x29, 0x3b, 0xd2, 0xb9, 0x0e, 0xee, 0xe8, 0xb7, 0x64, + 0xce, 0xec, 0x28, 0x26, 0xc4, 0x8f, 0x38, 0xed, 0x79, 0x44, 0xa6, 0x54, 0xd1, 0x2a, 0x53, 0x76, + 0x38, 0x16, 0x22, 0x07, 0x56, 0x7c, 0x1c, 0xe0, 0x01, 0x71, 0xed, 0x81, 0x08, 0x4e, 0xea, 0x7b, + 0x41, 0xfa, 0x7e, 0x7f, 0x8a, 0xef, 0x3b, 0xca, 0x34, 0x5b, 0xf5, 0x90, 0x7f, 0x4c, 0x86, 0x3a, + 0xb0, 0x84, 0x1d, 0x87, 0x78, 0xa2, 0x85, 0x0a, 0xe3, 0xb4, 0x75, 0x78, 0x6f, 0x0a, 0xf8, 0xe6, + 0xd8, 0x24, 0x2d, 0xda, 0x59, 0x14, 0x19, 0x57, 0x1a, 0xd8, 0x4e, 0x94, 0xd8, 0x91, 0x87, 0x79, + 0x3f, 0x8c, 0x7d, 0x79, 0xd2, 0x88, 0xb8, 0xd2, 0x60, 0x2b, 0x4a, 0x0e, 0xb5, 0xd4, 0xfc, 0xbd, + 0x01, 0xe8, 0x38, 0x55, 0x91, 0xfb, 0xa3, 0xc5, 0xe5, 0xc4, 0x17, 0x20, 0x24, 0xbb, 0xef, 0x57, + 0xd2, 0xd1, 0x8e, 0x1e, 0x94, 0x7d, 0xd4, 0x75, 0x78, 0x7d, 0x64, 0xa5, 0x42, 0xa6, 0x66, 0x9c, + 0xeb, 0xbc, 0xea, 0x34, 0x9b, 0x71, 0xca, 0xb7, 0x6c, 0xc3, 0xcc, 0x18, 0x96, 0x8f, 0x4d, 0x0c, + 0xbd, 0x07, 0x2b, 0x99, 0xa9, 0x8d, 0xd3, 0x44, 0xf1, 0x40, 0x99, 0xb1, 0x34, 0x55, 0x7e, 0x04, + 0xcb, 0x59, 0x0b, 0xb5, 0xc5, 0x67, 0x64, 0x06, 0xd7, 0x70, 0x16, 0x3f, 0x09, 0xb8, 0xf9, 0x3b, + 0x03, 0x60, 0x9c, 0x26, 0xe8, 0xfb, 0x50, 0xe9, 0x85, 0x21, 0xb7, 0x65, 0xae, 0x09, 0x5f, 0x3a, + 0x65, 0x97, 0x84, 0x54, 0xe8, 0x09, 0x27, 0xe8, 0x1d, 0x58, 0x1e, 0x6b, 0x31, 0xfa, 0x84, 0xd8, + 0x83, 0x9e, 0xde, 0x23, 0x95, 0x54, 0xb1, 0x4d, 0x9f, 0x90, 0x9d, 0x9e, 0x00, 0x14, 0x5b, 0xc9, + 0x0b, 0x1d, 0xec, 0xd9, 0x8c, 0xb9, 0x4c, 0x33, 0x11, 0x7b, 0x69, 0x5f, 0x08, 0xdb, 0xcc, 0x65, + 0xe6, 0xbf, 0x0d, 0xa8, 0x4e, 0x1c, 0x35, 0x68, 0x13, 0xaa, 0xd4, 0xf5, 0x88, 0xed, 0x12, 0x8f, + 0x70, 0x62, 0x73, 0xee, 0x49, 0x17, 0xa5, 0x8d, 0xd7, 0xd2, 0xe4, 0x48, 0xbb, 0xe3, 0xc6, 0xb6, + 0xee, 0xaf, 0xad, 0xb2, 0xb0, 0xd8, 0x96, 0x06, 0x1d, 0xee, 0xa1, 0x5b, 0x50, 0xc3, 0x09, 0x0f, + 0x47, 0x10, 0x54, 0x2f, 0x42, 0x69, 0x63, 0xed, 0x18, 0x46, 0x27, 0xed, 0xb0, 0x77, 0x2f, 0x58, + 0x15, 0x61, 0xa5, 0x61, 0xa8, 0x4f, 0xd0, 0x16, 0x54, 0x73, 0x38, 0xdc, 0xd3, 0x8d, 0xf2, 0xf3, + 0xa9, 0xec, 0x5e, 0xb0, 0xca, 0x19, 0x14, 0xee, 0xdd, 0x9c, 0x83, 0x02, 0xe7, 0x9e, 0xf9, 0x07, + 0x03, 0xea, 0xcf, 0x3b, 0xf4, 0x44, 0x6d, 0x26, 0x9f, 0x13, 0x27, 0xe1, 0xb8, 0xe7, 0x11, 0xbb, + 0x4f, 0xbd, 0x34, 0xdf, 0x2a, 0x63, 0xf1, 0x2d, 0xea, 0x11, 0x74, 0x0b, 0x96, 0x95, 0x44, 0x9c, + 0x82, 0x62, 0x5e, 0x61, 0xc2, 0xf5, 0xd4, 0x5e, 0x10, 0x9e, 0xda, 0xc8, 0xa6, 0xa3, 0x4c, 0xcc, + 0x2f, 0x0b, 0xa3, 0x2e, 0x51, 0x75, 0xbb, 0x68, 0x17, 0xe6, 0x44, 0xbf, 0xab, 0x1c, 0x57, 0x4e, + 0xdb, 0xc4, 0x2b, 0xe3, 0x86, 0xf8, 0x43, 0x2c, 0x05, 0x80, 0x56, 0x61, 0xde, 0x25, 0x1c, 0x53, + 0x4f, 0x27, 0xbe, 0xfe, 0x42, 0xdb, 0x50, 0x93, 0x0a, 0x36, 0xe3, 0x38, 0xe6, 0x6a, 0x55, 0x0a, + 0xd3, 0x56, 0xc5, 0x92, 0x9d, 0x3c, 0x69, 0x0b, 0x13, 0xb9, 0x26, 0x3f, 0x87, 0x22, 0x4b, 0x7a, + 0x8a, 0xea, 0xac, 0xa4, 0xfa, 0xc1, 0xd9, 0xa8, 0x6a, 0x63, 0x6b, 0x04, 0x63, 0xde, 0x83, 0x39, + 0x39, 0x01, 0x54, 0x82, 0x85, 0xee, 0xc1, 0xed, 0x83, 0xbb, 0xf7, 0x0f, 0x6a, 0x17, 0xd0, 0x12, + 0x14, 0xb7, 0xac, 0xd6, 0x66, 0x67, 0xef, 0x60, 0xa7, 0x66, 0x88, 0x21, 0xab, 0x7b, 0x70, 0x20, + 0x3e, 0x66, 0xd0, 0x22, 0xcc, 0xb5, 0x2c, 0xeb, 0xae, 0x55, 0x2b, 0x08, 0xad, 0xed, 0xd6, 0x7e, + 0x4b, 0x6a, 0xcd, 0x8a, 0xaf, 0xee, 0xe1, 0xb6, 0xb2, 0x99, 0x33, 0x7f, 0x06, 0xc5, 0xd4, 0x1b, + 0xaa, 0x42, 0xa9, 0x7b, 0xd0, 0x3e, 0x6c, 0x6d, 0xed, 0xdd, 0xda, 0x6b, 0x6d, 0xd7, 0x2e, 0xa0, + 0x32, 0x2c, 0x76, 0x0f, 0x76, 0x5b, 0x9b, 0xfb, 0x9d, 0xdd, 0x07, 0x35, 0x03, 0xd5, 0x60, 0xa9, + 0xdd, 0xd9, 0xdc, 0x6f, 0xd9, 0xed, 0xce, 0x66, 0xa7, 0xdb, 0xae, 0xcd, 0x98, 0xdf, 0x18, 0x50, + 0xc9, 0xf7, 0x77, 0xe2, 0x7c, 0x52, 0x67, 0xca, 0x63, 0x12, 0x33, 0x1a, 0x06, 0x69, 0x27, 0x2f, + 0x85, 0xf7, 0x94, 0x0c, 0x3d, 0x94, 0xd7, 0xc2, 0x88, 0xc4, 0x9c, 0xea, 0xb3, 0x69, 0xfa, 0xb1, + 0x9b, 0xf7, 0xd3, 0x38, 0x1c, 0xd9, 0xab, 0x63, 0x37, 0x03, 0xb8, 0x76, 0x1d, 0xaa, 0x13, 0xc3, + 0x67, 0x3a, 0x7a, 0xff, 0x39, 0x03, 0x95, 0xfc, 0xad, 0x07, 0x61, 0x58, 0x7a, 0xe4, 0xf6, 0x99, + 0x9d, 0x5e, 0x9d, 0x0c, 0x49, 0xf9, 0xe3, 0x33, 0x5d, 0x9d, 0x1a, 0xbb, 0x6e, 0x9f, 0xe9, 0xdf, + 0x8a, 0x73, 0xe9, 0xd1, 0x58, 0x22, 0x5c, 0x0c, 0x71, 0x1c, 0x8c, 0x5c, 0xcc, 0x9c, 0xc7, 0xc5, + 0x03, 0x1c, 0x07, 0x79, 0x17, 0xc3, 0xb1, 0x64, 0xed, 0x63, 0xa8, 0x4d, 0x72, 0x98, 0x16, 0x98, + 0x42, 0x26, 0x30, 0xc2, 0x7e, 0xd2, 0xc1, 0x59, 0xec, 0xcd, 0xaf, 0x0c, 0x58, 0xd9, 0x8a, 0x09, + 0xe6, 0x69, 0x03, 0x65, 0x91, 0xdf, 0x24, 0x84, 0xf1, 0x69, 0xcf, 0x04, 0xab, 0x30, 0x1f, 0x93, + 0x81, 0x48, 0x26, 0x55, 0xf1, 0xf5, 0x17, 0xba, 0x01, 0x0b, 0xfa, 0x32, 0xa8, 0xeb, 0xcb, 0xdb, + 0xa7, 0x8b, 0x96, 0x95, 0x9a, 0x09, 0xc7, 0xb1, 0xe2, 0x20, 0x1c, 0xab, 0xf6, 0x66, 0x51, 0x4b, + 0xf6, 0x5c, 0xf3, 0x3f, 0x33, 0xb0, 0xd2, 0x8d, 0xdc, 0xff, 0x83, 0xf0, 0x5c, 0x8e, 0xf0, 0x29, + 0xde, 0x3b, 0x32, 0x73, 0x2a, 0x9c, 0x6f, 0x4e, 0x0f, 0xe1, 0xe2, 0x20, 0xc6, 0x0e, 0xe9, 0x27, + 0x9e, 0xed, 0x12, 0x27, 0xf4, 0x7d, 0xca, 0x58, 0xb6, 0x16, 0xcf, 0x4f, 0xab, 0xc5, 0xaf, 0xa7, + 0xf6, 0xdb, 0x19, 0x73, 0x5d, 0x96, 0xd1, 0x35, 0x28, 0x25, 0x32, 0x24, 0xf2, 0x51, 0x48, 0xbf, + 0x81, 0x1c, 0xaf, 0x8e, 0xb7, 0x28, 0xf1, 0xdc, 0x3b, 0x98, 0x1d, 0x59, 0xa0, 0xd4, 0xc5, 0xef, + 0x89, 0x78, 0x2f, 0x4c, 0xc6, 0xfb, 0x6b, 0x03, 0x56, 0xd4, 0xa9, 0xf4, 0x72, 0x12, 0xe4, 0x14, + 0xf1, 0x9e, 0x7c, 0x32, 0x99, 0x3d, 0xfe, 0x64, 0x92, 0x27, 0x3d, 0x37, 0x49, 0xda, 0x87, 0xe5, + 0x1d, 0xc2, 0xbf, 0x2d, 0xc2, 0xe6, 0x97, 0x06, 0xbc, 0xb2, 0x4f, 0x59, 0xea, 0x90, 0x9d, 0xd9, + 0xe3, 0x6c, 0xce, 0xe3, 0x2a, 0xcc, 0xf7, 0xa9, 0x27, 0xd2, 0x4d, 0xa7, 0xaa, 0xfa, 0x12, 0x77, + 0x83, 0x48, 0x94, 0x71, 0xd1, 0x42, 0xe9, 0xbe, 0xa8, 0x28, 0x04, 0xa2, 0x77, 0x92, 0xbe, 0xc4, + 0x20, 0x0f, 0x8f, 0x48, 0x3a, 0x05, 0xa9, 0xde, 0x11, 0x02, 0xf3, 0x0b, 0x03, 0x56, 0xf2, 0x14, + 0x59, 0x14, 0x06, 0x8c, 0xa0, 0x9b, 0x50, 0x4c, 0x5f, 0x53, 0x75, 0x09, 0x3d, 0x6d, 0x76, 0x8f, + 0xec, 0xd0, 0xdb, 0x50, 0x0d, 0xc8, 0xe7, 0xdc, 0xce, 0x10, 0x50, 0x51, 0x2a, 0x0b, 0xf1, 0xe1, + 0x88, 0x44, 0x0c, 0xab, 0xdb, 0x14, 0x0f, 0x82, 0x90, 0x7d, 0x6b, 0xc9, 0x64, 0x7e, 0x78, 0x82, + 0x4f, 0x96, 0x78, 0x9c, 0x09, 0x9f, 0x61, 0xc2, 0xa3, 0x84, 0x67, 0x1a, 0xe4, 0x45, 0x25, 0xe9, + 0xc6, 0x74, 0xe3, 0xbf, 0x45, 0x58, 0x1e, 0x5f, 0x2a, 0x79, 0x1c, 0x7a, 0x1e, 0x89, 0xd1, 0x57, + 0x06, 0x94, 0x73, 0xf5, 0x12, 0x5d, 0x99, 0x16, 0xae, 0x13, 0xaa, 0xeb, 0xda, 0xc5, 0xd4, 0x28, + 0xf3, 0x32, 0xdc, 0xb8, 0x9b, 0xbe, 0x0c, 0x9b, 0x7b, 0x5f, 0x7c, 0xf3, 0xaf, 0x3f, 0xce, 0x6c, + 0x99, 0x1f, 0x8d, 0x9e, 0x95, 0x75, 0x2c, 0x58, 0xf3, 0xe9, 0x38, 0x4e, 0xcf, 0x9a, 0x2a, 0x0c, + 0xac, 0xf9, 0x54, 0xfd, 0x78, 0x36, 0x7a, 0x18, 0xbf, 0x3a, 0x2a, 0x3d, 0x7f, 0x37, 0xa0, 0x9c, + 0xab, 0x97, 0x53, 0x09, 0x9f, 0x54, 0x5d, 0xa7, 0x11, 0xfe, 0x85, 0x24, 0x6c, 0x6d, 0xec, 0x9c, + 0x97, 0x70, 0xf3, 0x69, 0x76, 0x21, 0x9f, 0x8d, 0xf9, 0xff, 0xc5, 0x80, 0x72, 0xae, 0xfe, 0x4c, + 0xe5, 0x7f, 0x52, 0xb5, 0x9a, 0xc6, 0xff, 0xae, 0xe4, 0xbf, 0xf7, 0xee, 0xcb, 0xe2, 0x8f, 0xfe, + 0x6a, 0x00, 0x8c, 0x4b, 0x10, 0x9a, 0x76, 0x43, 0x3d, 0x56, 0xad, 0xd6, 0x4e, 0xb9, 0x0b, 0x53, + 0xe6, 0xe8, 0xa5, 0x31, 0xff, 0x9b, 0x01, 0x4b, 0xd9, 0x4a, 0x81, 0x36, 0xa6, 0x3e, 0xfa, 0x1d, + 0xab, 0x7c, 0x6b, 0x57, 0xce, 0x64, 0xa3, 0x4a, 0x91, 0x79, 0x43, 0x4e, 0xe5, 0x2a, 0x3a, 0x77, + 0xd6, 0xa3, 0x7f, 0x18, 0x50, 0x9d, 0xd8, 0xed, 0xe8, 0x83, 0xa9, 0xaf, 0x1e, 0x27, 0x55, 0xa4, + 0x69, 0x09, 0xf3, 0x2b, 0xc9, 0xb5, 0x6b, 0x1e, 0xbe, 0xac, 0x84, 0x77, 0x35, 0x8d, 0xab, 0xc6, + 0xbb, 0x37, 0x9f, 0xc2, 0x9b, 0x4e, 0xe8, 0xbf, 0x98, 0xf7, 0xcd, 0xf4, 0x22, 0xc6, 0x0e, 0xc5, + 0xf9, 0x7e, 0x68, 0xfc, 0xb2, 0xa5, 0xf5, 0x07, 0xa1, 0x87, 0x83, 0x41, 0x23, 0x8c, 0x07, 0xcd, + 0x01, 0x09, 0xe4, 0xe9, 0xdf, 0x54, 0x43, 0x38, 0xa2, 0xec, 0x39, 0xff, 0xb7, 0xba, 0x96, 0x0a, + 0x7a, 0xf3, 0xd2, 0xe2, 0xca, 0xff, 0x02, 0x00, 0x00, 0xff, 0xff, 0x71, 0x37, 0xfc, 0x0f, 0x83, + 0x1b, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/cloud/dataproc/v1beta2/jobs.pb.go b/vendor/google.golang.org/genproto/googleapis/cloud/dataproc/v1beta2/jobs.pb.go new file mode 100644 index 0000000..8ea6e8e --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/cloud/dataproc/v1beta2/jobs.pb.go @@ -0,0 +1,3132 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/cloud/dataproc/v1beta2/jobs.proto + +package dataproc // import "google.golang.org/genproto/googleapis/cloud/dataproc/v1beta2" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import empty "github.com/golang/protobuf/ptypes/empty" +import timestamp "github.com/golang/protobuf/ptypes/timestamp" +import _ "google.golang.org/genproto/googleapis/api/annotations" +import field_mask "google.golang.org/genproto/protobuf/field_mask" + +import ( + context "golang.org/x/net/context" + grpc "google.golang.org/grpc" +) + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// The Log4j level for job execution. When running an +// [Apache Hive](http://hive.apache.org/) job, Cloud +// Dataproc configures the Hive client to an equivalent verbosity level. +type LoggingConfig_Level int32 + +const ( + // Level is unspecified. Use default level for log4j. + LoggingConfig_LEVEL_UNSPECIFIED LoggingConfig_Level = 0 + // Use ALL level for log4j. + LoggingConfig_ALL LoggingConfig_Level = 1 + // Use TRACE level for log4j. + LoggingConfig_TRACE LoggingConfig_Level = 2 + // Use DEBUG level for log4j. + LoggingConfig_DEBUG LoggingConfig_Level = 3 + // Use INFO level for log4j. + LoggingConfig_INFO LoggingConfig_Level = 4 + // Use WARN level for log4j. + LoggingConfig_WARN LoggingConfig_Level = 5 + // Use ERROR level for log4j. + LoggingConfig_ERROR LoggingConfig_Level = 6 + // Use FATAL level for log4j. + LoggingConfig_FATAL LoggingConfig_Level = 7 + // Turn off log4j. + LoggingConfig_OFF LoggingConfig_Level = 8 +) + +var LoggingConfig_Level_name = map[int32]string{ + 0: "LEVEL_UNSPECIFIED", + 1: "ALL", + 2: "TRACE", + 3: "DEBUG", + 4: "INFO", + 5: "WARN", + 6: "ERROR", + 7: "FATAL", + 8: "OFF", +} +var LoggingConfig_Level_value = map[string]int32{ + "LEVEL_UNSPECIFIED": 0, + "ALL": 1, + "TRACE": 2, + "DEBUG": 3, + "INFO": 4, + "WARN": 5, + "ERROR": 6, + "FATAL": 7, + "OFF": 8, +} + +func (x LoggingConfig_Level) String() string { + return proto.EnumName(LoggingConfig_Level_name, int32(x)) +} +func (LoggingConfig_Level) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_jobs_283891adf4097baf, []int{0, 0} +} + +// The job state. +type JobStatus_State int32 + +const ( + // The job state is unknown. + JobStatus_STATE_UNSPECIFIED JobStatus_State = 0 + // The job is pending; it has been submitted, but is not yet running. + JobStatus_PENDING JobStatus_State = 1 + // Job has been received by the service and completed initial setup; + // it will soon be submitted to the cluster. + JobStatus_SETUP_DONE JobStatus_State = 8 + // The job is running on the cluster. + JobStatus_RUNNING JobStatus_State = 2 + // A CancelJob request has been received, but is pending. + JobStatus_CANCEL_PENDING JobStatus_State = 3 + // Transient in-flight resources have been canceled, and the request to + // cancel the running job has been issued to the cluster. + JobStatus_CANCEL_STARTED JobStatus_State = 7 + // The job cancellation was successful. + JobStatus_CANCELLED JobStatus_State = 4 + // The job has completed successfully. + JobStatus_DONE JobStatus_State = 5 + // The job has completed, but encountered an error. + JobStatus_ERROR JobStatus_State = 6 + // Job attempt has failed. The detail field contains failure details for + // this attempt. + // + // Applies to restartable jobs only. + JobStatus_ATTEMPT_FAILURE JobStatus_State = 9 +) + +var JobStatus_State_name = map[int32]string{ + 0: "STATE_UNSPECIFIED", + 1: "PENDING", + 8: "SETUP_DONE", + 2: "RUNNING", + 3: "CANCEL_PENDING", + 7: "CANCEL_STARTED", + 4: "CANCELLED", + 5: "DONE", + 6: "ERROR", + 9: "ATTEMPT_FAILURE", +} +var JobStatus_State_value = map[string]int32{ + "STATE_UNSPECIFIED": 0, + "PENDING": 1, + "SETUP_DONE": 8, + "RUNNING": 2, + "CANCEL_PENDING": 3, + "CANCEL_STARTED": 7, + "CANCELLED": 4, + "DONE": 5, + "ERROR": 6, + "ATTEMPT_FAILURE": 9, +} + +func (x JobStatus_State) String() string { + return proto.EnumName(JobStatus_State_name, int32(x)) +} +func (JobStatus_State) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_jobs_283891adf4097baf, []int{9, 0} +} + +// The job substate. +type JobStatus_Substate int32 + +const ( + // The job substate is unknown. + JobStatus_UNSPECIFIED JobStatus_Substate = 0 + // The Job is submitted to the agent. + // + // Applies to RUNNING state. + JobStatus_SUBMITTED JobStatus_Substate = 1 + // The Job has been received and is awaiting execution (it may be waiting + // for a condition to be met). See the "details" field for the reason for + // the delay. + // + // Applies to RUNNING state. + JobStatus_QUEUED JobStatus_Substate = 2 + // The agent-reported status is out of date, which may be caused by a + // loss of communication between the agent and Cloud Dataproc. If the + // agent does not send a timely update, the job will fail. + // + // Applies to RUNNING state. + JobStatus_STALE_STATUS JobStatus_Substate = 3 +) + +var JobStatus_Substate_name = map[int32]string{ + 0: "UNSPECIFIED", + 1: "SUBMITTED", + 2: "QUEUED", + 3: "STALE_STATUS", +} +var JobStatus_Substate_value = map[string]int32{ + "UNSPECIFIED": 0, + "SUBMITTED": 1, + "QUEUED": 2, + "STALE_STATUS": 3, +} + +func (x JobStatus_Substate) String() string { + return proto.EnumName(JobStatus_Substate_name, int32(x)) +} +func (JobStatus_Substate) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_jobs_283891adf4097baf, []int{9, 1} +} + +// The application state, corresponding to +// YarnProtos.YarnApplicationStateProto. +type YarnApplication_State int32 + +const ( + // Status is unspecified. + YarnApplication_STATE_UNSPECIFIED YarnApplication_State = 0 + // Status is NEW. + YarnApplication_NEW YarnApplication_State = 1 + // Status is NEW_SAVING. + YarnApplication_NEW_SAVING YarnApplication_State = 2 + // Status is SUBMITTED. + YarnApplication_SUBMITTED YarnApplication_State = 3 + // Status is ACCEPTED. + YarnApplication_ACCEPTED YarnApplication_State = 4 + // Status is RUNNING. + YarnApplication_RUNNING YarnApplication_State = 5 + // Status is FINISHED. + YarnApplication_FINISHED YarnApplication_State = 6 + // Status is FAILED. + YarnApplication_FAILED YarnApplication_State = 7 + // Status is KILLED. + YarnApplication_KILLED YarnApplication_State = 8 +) + +var YarnApplication_State_name = map[int32]string{ + 0: "STATE_UNSPECIFIED", + 1: "NEW", + 2: "NEW_SAVING", + 3: "SUBMITTED", + 4: "ACCEPTED", + 5: "RUNNING", + 6: "FINISHED", + 7: "FAILED", + 8: "KILLED", +} +var YarnApplication_State_value = map[string]int32{ + "STATE_UNSPECIFIED": 0, + "NEW": 1, + "NEW_SAVING": 2, + "SUBMITTED": 3, + "ACCEPTED": 4, + "RUNNING": 5, + "FINISHED": 6, + "FAILED": 7, + "KILLED": 8, +} + +func (x YarnApplication_State) String() string { + return proto.EnumName(YarnApplication_State_name, int32(x)) +} +func (YarnApplication_State) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_jobs_283891adf4097baf, []int{11, 0} +} + +// A matcher that specifies categories of job states. +type ListJobsRequest_JobStateMatcher int32 + +const ( + // Match all jobs, regardless of state. + ListJobsRequest_ALL ListJobsRequest_JobStateMatcher = 0 + // Only match jobs in non-terminal states: PENDING, RUNNING, or + // CANCEL_PENDING. + ListJobsRequest_ACTIVE ListJobsRequest_JobStateMatcher = 1 + // Only match jobs in terminal states: CANCELLED, DONE, or ERROR. + ListJobsRequest_NON_ACTIVE ListJobsRequest_JobStateMatcher = 2 +) + +var ListJobsRequest_JobStateMatcher_name = map[int32]string{ + 0: "ALL", + 1: "ACTIVE", + 2: "NON_ACTIVE", +} +var ListJobsRequest_JobStateMatcher_value = map[string]int32{ + "ALL": 0, + "ACTIVE": 1, + "NON_ACTIVE": 2, +} + +func (x ListJobsRequest_JobStateMatcher) String() string { + return proto.EnumName(ListJobsRequest_JobStateMatcher_name, int32(x)) +} +func (ListJobsRequest_JobStateMatcher) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_jobs_283891adf4097baf, []int{16, 0} +} + +// The runtime logging config of the job. +type LoggingConfig struct { + // The per-package log levels for the driver. This may include + // "root" package name to configure rootLogger. + // Examples: + // 'com.google = FATAL', 'root = INFO', 'org.apache = DEBUG' + DriverLogLevels map[string]LoggingConfig_Level `protobuf:"bytes,2,rep,name=driver_log_levels,json=driverLogLevels,proto3" json:"driver_log_levels,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"varint,2,opt,name=value,proto3,enum=google.cloud.dataproc.v1beta2.LoggingConfig_Level"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *LoggingConfig) Reset() { *m = LoggingConfig{} } +func (m *LoggingConfig) String() string { return proto.CompactTextString(m) } +func (*LoggingConfig) ProtoMessage() {} +func (*LoggingConfig) Descriptor() ([]byte, []int) { + return fileDescriptor_jobs_283891adf4097baf, []int{0} +} +func (m *LoggingConfig) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_LoggingConfig.Unmarshal(m, b) +} +func (m *LoggingConfig) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_LoggingConfig.Marshal(b, m, deterministic) +} +func (dst *LoggingConfig) XXX_Merge(src proto.Message) { + xxx_messageInfo_LoggingConfig.Merge(dst, src) +} +func (m *LoggingConfig) XXX_Size() int { + return xxx_messageInfo_LoggingConfig.Size(m) +} +func (m *LoggingConfig) XXX_DiscardUnknown() { + xxx_messageInfo_LoggingConfig.DiscardUnknown(m) +} + +var xxx_messageInfo_LoggingConfig proto.InternalMessageInfo + +func (m *LoggingConfig) GetDriverLogLevels() map[string]LoggingConfig_Level { + if m != nil { + return m.DriverLogLevels + } + return nil +} + +// A Cloud Dataproc job for running +// [Apache Hadoop +// MapReduce](https://hadoop.apache.org/docs/current/hadoop-mapreduce-client/hadoop-mapreduce-client-core/MapReduceTutorial.html) +// jobs on [Apache Hadoop +// YARN](https://hadoop.apache.org/docs/r2.7.1/hadoop-yarn/hadoop-yarn-site/YARN.html). +type HadoopJob struct { + // Required. Indicates the location of the driver's main class. Specify + // either the jar file that contains the main class or the main class name. + // To specify both, add the jar file to `jar_file_uris`, and then specify + // the main class name in this property. + // + // Types that are valid to be assigned to Driver: + // *HadoopJob_MainJarFileUri + // *HadoopJob_MainClass + Driver isHadoopJob_Driver `protobuf_oneof:"driver"` + // Optional. The arguments to pass to the driver. Do not + // include arguments, such as `-libjars` or `-Dfoo=bar`, that can be set as + // job properties, since a collision may occur that causes an incorrect job + // submission. + Args []string `protobuf:"bytes,3,rep,name=args,proto3" json:"args,omitempty"` + // Optional. Jar file URIs to add to the CLASSPATHs of the + // Hadoop driver and tasks. + JarFileUris []string `protobuf:"bytes,4,rep,name=jar_file_uris,json=jarFileUris,proto3" json:"jar_file_uris,omitempty"` + // Optional. HCFS (Hadoop Compatible Filesystem) URIs of files to be copied + // to the working directory of Hadoop drivers and distributed tasks. Useful + // for naively parallel tasks. + FileUris []string `protobuf:"bytes,5,rep,name=file_uris,json=fileUris,proto3" json:"file_uris,omitempty"` + // Optional. HCFS URIs of archives to be extracted in the working directory of + // Hadoop drivers and tasks. Supported file types: + // .jar, .tar, .tar.gz, .tgz, or .zip. + ArchiveUris []string `protobuf:"bytes,6,rep,name=archive_uris,json=archiveUris,proto3" json:"archive_uris,omitempty"` + // Optional. A mapping of property names to values, used to configure Hadoop. + // Properties that conflict with values set by the Cloud Dataproc API may be + // overwritten. Can include properties set in /etc/hadoop/conf/*-site and + // classes in user code. + Properties map[string]string `protobuf:"bytes,7,rep,name=properties,proto3" json:"properties,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` + // Optional. The runtime log config for job execution. + LoggingConfig *LoggingConfig `protobuf:"bytes,8,opt,name=logging_config,json=loggingConfig,proto3" json:"logging_config,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *HadoopJob) Reset() { *m = HadoopJob{} } +func (m *HadoopJob) String() string { return proto.CompactTextString(m) } +func (*HadoopJob) ProtoMessage() {} +func (*HadoopJob) Descriptor() ([]byte, []int) { + return fileDescriptor_jobs_283891adf4097baf, []int{1} +} +func (m *HadoopJob) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_HadoopJob.Unmarshal(m, b) +} +func (m *HadoopJob) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_HadoopJob.Marshal(b, m, deterministic) +} +func (dst *HadoopJob) XXX_Merge(src proto.Message) { + xxx_messageInfo_HadoopJob.Merge(dst, src) +} +func (m *HadoopJob) XXX_Size() int { + return xxx_messageInfo_HadoopJob.Size(m) +} +func (m *HadoopJob) XXX_DiscardUnknown() { + xxx_messageInfo_HadoopJob.DiscardUnknown(m) +} + +var xxx_messageInfo_HadoopJob proto.InternalMessageInfo + +type isHadoopJob_Driver interface { + isHadoopJob_Driver() +} + +type HadoopJob_MainJarFileUri struct { + MainJarFileUri string `protobuf:"bytes,1,opt,name=main_jar_file_uri,json=mainJarFileUri,proto3,oneof"` +} + +type HadoopJob_MainClass struct { + MainClass string `protobuf:"bytes,2,opt,name=main_class,json=mainClass,proto3,oneof"` +} + +func (*HadoopJob_MainJarFileUri) isHadoopJob_Driver() {} + +func (*HadoopJob_MainClass) isHadoopJob_Driver() {} + +func (m *HadoopJob) GetDriver() isHadoopJob_Driver { + if m != nil { + return m.Driver + } + return nil +} + +func (m *HadoopJob) GetMainJarFileUri() string { + if x, ok := m.GetDriver().(*HadoopJob_MainJarFileUri); ok { + return x.MainJarFileUri + } + return "" +} + +func (m *HadoopJob) GetMainClass() string { + if x, ok := m.GetDriver().(*HadoopJob_MainClass); ok { + return x.MainClass + } + return "" +} + +func (m *HadoopJob) GetArgs() []string { + if m != nil { + return m.Args + } + return nil +} + +func (m *HadoopJob) GetJarFileUris() []string { + if m != nil { + return m.JarFileUris + } + return nil +} + +func (m *HadoopJob) GetFileUris() []string { + if m != nil { + return m.FileUris + } + return nil +} + +func (m *HadoopJob) GetArchiveUris() []string { + if m != nil { + return m.ArchiveUris + } + return nil +} + +func (m *HadoopJob) GetProperties() map[string]string { + if m != nil { + return m.Properties + } + return nil +} + +func (m *HadoopJob) GetLoggingConfig() *LoggingConfig { + if m != nil { + return m.LoggingConfig + } + return nil +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*HadoopJob) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _HadoopJob_OneofMarshaler, _HadoopJob_OneofUnmarshaler, _HadoopJob_OneofSizer, []interface{}{ + (*HadoopJob_MainJarFileUri)(nil), + (*HadoopJob_MainClass)(nil), + } +} + +func _HadoopJob_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*HadoopJob) + // driver + switch x := m.Driver.(type) { + case *HadoopJob_MainJarFileUri: + b.EncodeVarint(1<<3 | proto.WireBytes) + b.EncodeStringBytes(x.MainJarFileUri) + case *HadoopJob_MainClass: + b.EncodeVarint(2<<3 | proto.WireBytes) + b.EncodeStringBytes(x.MainClass) + case nil: + default: + return fmt.Errorf("HadoopJob.Driver has unexpected type %T", x) + } + return nil +} + +func _HadoopJob_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*HadoopJob) + switch tag { + case 1: // driver.main_jar_file_uri + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeStringBytes() + m.Driver = &HadoopJob_MainJarFileUri{x} + return true, err + case 2: // driver.main_class + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeStringBytes() + m.Driver = &HadoopJob_MainClass{x} + return true, err + default: + return false, nil + } +} + +func _HadoopJob_OneofSizer(msg proto.Message) (n int) { + m := msg.(*HadoopJob) + // driver + switch x := m.Driver.(type) { + case *HadoopJob_MainJarFileUri: + n += 1 // tag and wire + n += proto.SizeVarint(uint64(len(x.MainJarFileUri))) + n += len(x.MainJarFileUri) + case *HadoopJob_MainClass: + n += 1 // tag and wire + n += proto.SizeVarint(uint64(len(x.MainClass))) + n += len(x.MainClass) + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +// A Cloud Dataproc job for running [Apache Spark](http://spark.apache.org/) +// applications on YARN. +type SparkJob struct { + // Required. The specification of the main method to call to drive the job. + // Specify either the jar file that contains the main class or the main class + // name. To pass both a main jar and a main class in that jar, add the jar to + // `CommonJob.jar_file_uris`, and then specify the main class name in + // `main_class`. + // + // Types that are valid to be assigned to Driver: + // *SparkJob_MainJarFileUri + // *SparkJob_MainClass + Driver isSparkJob_Driver `protobuf_oneof:"driver"` + // Optional. The arguments to pass to the driver. Do not include arguments, + // such as `--conf`, that can be set as job properties, since a collision may + // occur that causes an incorrect job submission. + Args []string `protobuf:"bytes,3,rep,name=args,proto3" json:"args,omitempty"` + // Optional. HCFS URIs of jar files to add to the CLASSPATHs of the + // Spark driver and tasks. + JarFileUris []string `protobuf:"bytes,4,rep,name=jar_file_uris,json=jarFileUris,proto3" json:"jar_file_uris,omitempty"` + // Optional. HCFS URIs of files to be copied to the working directory of + // Spark drivers and distributed tasks. Useful for naively parallel tasks. + FileUris []string `protobuf:"bytes,5,rep,name=file_uris,json=fileUris,proto3" json:"file_uris,omitempty"` + // Optional. HCFS URIs of archives to be extracted in the working directory + // of Spark drivers and tasks. Supported file types: + // .jar, .tar, .tar.gz, .tgz, and .zip. + ArchiveUris []string `protobuf:"bytes,6,rep,name=archive_uris,json=archiveUris,proto3" json:"archive_uris,omitempty"` + // Optional. A mapping of property names to values, used to configure Spark. + // Properties that conflict with values set by the Cloud Dataproc API may be + // overwritten. Can include properties set in + // /etc/spark/conf/spark-defaults.conf and classes in user code. + Properties map[string]string `protobuf:"bytes,7,rep,name=properties,proto3" json:"properties,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` + // Optional. The runtime log config for job execution. + LoggingConfig *LoggingConfig `protobuf:"bytes,8,opt,name=logging_config,json=loggingConfig,proto3" json:"logging_config,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *SparkJob) Reset() { *m = SparkJob{} } +func (m *SparkJob) String() string { return proto.CompactTextString(m) } +func (*SparkJob) ProtoMessage() {} +func (*SparkJob) Descriptor() ([]byte, []int) { + return fileDescriptor_jobs_283891adf4097baf, []int{2} +} +func (m *SparkJob) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_SparkJob.Unmarshal(m, b) +} +func (m *SparkJob) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_SparkJob.Marshal(b, m, deterministic) +} +func (dst *SparkJob) XXX_Merge(src proto.Message) { + xxx_messageInfo_SparkJob.Merge(dst, src) +} +func (m *SparkJob) XXX_Size() int { + return xxx_messageInfo_SparkJob.Size(m) +} +func (m *SparkJob) XXX_DiscardUnknown() { + xxx_messageInfo_SparkJob.DiscardUnknown(m) +} + +var xxx_messageInfo_SparkJob proto.InternalMessageInfo + +type isSparkJob_Driver interface { + isSparkJob_Driver() +} + +type SparkJob_MainJarFileUri struct { + MainJarFileUri string `protobuf:"bytes,1,opt,name=main_jar_file_uri,json=mainJarFileUri,proto3,oneof"` +} + +type SparkJob_MainClass struct { + MainClass string `protobuf:"bytes,2,opt,name=main_class,json=mainClass,proto3,oneof"` +} + +func (*SparkJob_MainJarFileUri) isSparkJob_Driver() {} + +func (*SparkJob_MainClass) isSparkJob_Driver() {} + +func (m *SparkJob) GetDriver() isSparkJob_Driver { + if m != nil { + return m.Driver + } + return nil +} + +func (m *SparkJob) GetMainJarFileUri() string { + if x, ok := m.GetDriver().(*SparkJob_MainJarFileUri); ok { + return x.MainJarFileUri + } + return "" +} + +func (m *SparkJob) GetMainClass() string { + if x, ok := m.GetDriver().(*SparkJob_MainClass); ok { + return x.MainClass + } + return "" +} + +func (m *SparkJob) GetArgs() []string { + if m != nil { + return m.Args + } + return nil +} + +func (m *SparkJob) GetJarFileUris() []string { + if m != nil { + return m.JarFileUris + } + return nil +} + +func (m *SparkJob) GetFileUris() []string { + if m != nil { + return m.FileUris + } + return nil +} + +func (m *SparkJob) GetArchiveUris() []string { + if m != nil { + return m.ArchiveUris + } + return nil +} + +func (m *SparkJob) GetProperties() map[string]string { + if m != nil { + return m.Properties + } + return nil +} + +func (m *SparkJob) GetLoggingConfig() *LoggingConfig { + if m != nil { + return m.LoggingConfig + } + return nil +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*SparkJob) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _SparkJob_OneofMarshaler, _SparkJob_OneofUnmarshaler, _SparkJob_OneofSizer, []interface{}{ + (*SparkJob_MainJarFileUri)(nil), + (*SparkJob_MainClass)(nil), + } +} + +func _SparkJob_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*SparkJob) + // driver + switch x := m.Driver.(type) { + case *SparkJob_MainJarFileUri: + b.EncodeVarint(1<<3 | proto.WireBytes) + b.EncodeStringBytes(x.MainJarFileUri) + case *SparkJob_MainClass: + b.EncodeVarint(2<<3 | proto.WireBytes) + b.EncodeStringBytes(x.MainClass) + case nil: + default: + return fmt.Errorf("SparkJob.Driver has unexpected type %T", x) + } + return nil +} + +func _SparkJob_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*SparkJob) + switch tag { + case 1: // driver.main_jar_file_uri + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeStringBytes() + m.Driver = &SparkJob_MainJarFileUri{x} + return true, err + case 2: // driver.main_class + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeStringBytes() + m.Driver = &SparkJob_MainClass{x} + return true, err + default: + return false, nil + } +} + +func _SparkJob_OneofSizer(msg proto.Message) (n int) { + m := msg.(*SparkJob) + // driver + switch x := m.Driver.(type) { + case *SparkJob_MainJarFileUri: + n += 1 // tag and wire + n += proto.SizeVarint(uint64(len(x.MainJarFileUri))) + n += len(x.MainJarFileUri) + case *SparkJob_MainClass: + n += 1 // tag and wire + n += proto.SizeVarint(uint64(len(x.MainClass))) + n += len(x.MainClass) + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +// A Cloud Dataproc job for running +// [Apache +// PySpark](https://spark.apache.org/docs/0.9.0/python-programming-guide.html) +// applications on YARN. +type PySparkJob struct { + // Required. The HCFS URI of the main Python file to use as the driver. Must + // be a .py file. + MainPythonFileUri string `protobuf:"bytes,1,opt,name=main_python_file_uri,json=mainPythonFileUri,proto3" json:"main_python_file_uri,omitempty"` + // Optional. The arguments to pass to the driver. Do not include arguments, + // such as `--conf`, that can be set as job properties, since a collision may + // occur that causes an incorrect job submission. + Args []string `protobuf:"bytes,2,rep,name=args,proto3" json:"args,omitempty"` + // Optional. HCFS file URIs of Python files to pass to the PySpark + // framework. Supported file types: .py, .egg, and .zip. + PythonFileUris []string `protobuf:"bytes,3,rep,name=python_file_uris,json=pythonFileUris,proto3" json:"python_file_uris,omitempty"` + // Optional. HCFS URIs of jar files to add to the CLASSPATHs of the + // Python driver and tasks. + JarFileUris []string `protobuf:"bytes,4,rep,name=jar_file_uris,json=jarFileUris,proto3" json:"jar_file_uris,omitempty"` + // Optional. HCFS URIs of files to be copied to the working directory of + // Python drivers and distributed tasks. Useful for naively parallel tasks. + FileUris []string `protobuf:"bytes,5,rep,name=file_uris,json=fileUris,proto3" json:"file_uris,omitempty"` + // Optional. HCFS URIs of archives to be extracted in the working directory of + // .jar, .tar, .tar.gz, .tgz, and .zip. + ArchiveUris []string `protobuf:"bytes,6,rep,name=archive_uris,json=archiveUris,proto3" json:"archive_uris,omitempty"` + // Optional. A mapping of property names to values, used to configure PySpark. + // Properties that conflict with values set by the Cloud Dataproc API may be + // overwritten. Can include properties set in + // /etc/spark/conf/spark-defaults.conf and classes in user code. + Properties map[string]string `protobuf:"bytes,7,rep,name=properties,proto3" json:"properties,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` + // Optional. The runtime log config for job execution. + LoggingConfig *LoggingConfig `protobuf:"bytes,8,opt,name=logging_config,json=loggingConfig,proto3" json:"logging_config,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *PySparkJob) Reset() { *m = PySparkJob{} } +func (m *PySparkJob) String() string { return proto.CompactTextString(m) } +func (*PySparkJob) ProtoMessage() {} +func (*PySparkJob) Descriptor() ([]byte, []int) { + return fileDescriptor_jobs_283891adf4097baf, []int{3} +} +func (m *PySparkJob) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_PySparkJob.Unmarshal(m, b) +} +func (m *PySparkJob) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_PySparkJob.Marshal(b, m, deterministic) +} +func (dst *PySparkJob) XXX_Merge(src proto.Message) { + xxx_messageInfo_PySparkJob.Merge(dst, src) +} +func (m *PySparkJob) XXX_Size() int { + return xxx_messageInfo_PySparkJob.Size(m) +} +func (m *PySparkJob) XXX_DiscardUnknown() { + xxx_messageInfo_PySparkJob.DiscardUnknown(m) +} + +var xxx_messageInfo_PySparkJob proto.InternalMessageInfo + +func (m *PySparkJob) GetMainPythonFileUri() string { + if m != nil { + return m.MainPythonFileUri + } + return "" +} + +func (m *PySparkJob) GetArgs() []string { + if m != nil { + return m.Args + } + return nil +} + +func (m *PySparkJob) GetPythonFileUris() []string { + if m != nil { + return m.PythonFileUris + } + return nil +} + +func (m *PySparkJob) GetJarFileUris() []string { + if m != nil { + return m.JarFileUris + } + return nil +} + +func (m *PySparkJob) GetFileUris() []string { + if m != nil { + return m.FileUris + } + return nil +} + +func (m *PySparkJob) GetArchiveUris() []string { + if m != nil { + return m.ArchiveUris + } + return nil +} + +func (m *PySparkJob) GetProperties() map[string]string { + if m != nil { + return m.Properties + } + return nil +} + +func (m *PySparkJob) GetLoggingConfig() *LoggingConfig { + if m != nil { + return m.LoggingConfig + } + return nil +} + +// A list of queries to run on a cluster. +type QueryList struct { + // Required. The queries to execute. You do not need to terminate a query + // with a semicolon. Multiple queries can be specified in one string + // by separating each with a semicolon. Here is an example of an Cloud + // Dataproc API snippet that uses a QueryList to specify a HiveJob: + // + // "hiveJob": { + // "queryList": { + // "queries": [ + // "query1", + // "query2", + // "query3;query4", + // ] + // } + // } + Queries []string `protobuf:"bytes,1,rep,name=queries,proto3" json:"queries,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *QueryList) Reset() { *m = QueryList{} } +func (m *QueryList) String() string { return proto.CompactTextString(m) } +func (*QueryList) ProtoMessage() {} +func (*QueryList) Descriptor() ([]byte, []int) { + return fileDescriptor_jobs_283891adf4097baf, []int{4} +} +func (m *QueryList) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_QueryList.Unmarshal(m, b) +} +func (m *QueryList) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_QueryList.Marshal(b, m, deterministic) +} +func (dst *QueryList) XXX_Merge(src proto.Message) { + xxx_messageInfo_QueryList.Merge(dst, src) +} +func (m *QueryList) XXX_Size() int { + return xxx_messageInfo_QueryList.Size(m) +} +func (m *QueryList) XXX_DiscardUnknown() { + xxx_messageInfo_QueryList.DiscardUnknown(m) +} + +var xxx_messageInfo_QueryList proto.InternalMessageInfo + +func (m *QueryList) GetQueries() []string { + if m != nil { + return m.Queries + } + return nil +} + +// A Cloud Dataproc job for running [Apache Hive](https://hive.apache.org/) +// queries on YARN. +type HiveJob struct { + // Required. The sequence of Hive queries to execute, specified as either + // an HCFS file URI or a list of queries. + // + // Types that are valid to be assigned to Queries: + // *HiveJob_QueryFileUri + // *HiveJob_QueryList + Queries isHiveJob_Queries `protobuf_oneof:"queries"` + // Optional. Whether to continue executing queries if a query fails. + // The default value is `false`. Setting to `true` can be useful when + // executing independent parallel queries. + ContinueOnFailure bool `protobuf:"varint,3,opt,name=continue_on_failure,json=continueOnFailure,proto3" json:"continue_on_failure,omitempty"` + // Optional. Mapping of query variable names to values (equivalent to the + // Hive command: `SET name="value";`). + ScriptVariables map[string]string `protobuf:"bytes,4,rep,name=script_variables,json=scriptVariables,proto3" json:"script_variables,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` + // Optional. A mapping of property names and values, used to configure Hive. + // Properties that conflict with values set by the Cloud Dataproc API may be + // overwritten. Can include properties set in /etc/hadoop/conf/*-site.xml, + // /etc/hive/conf/hive-site.xml, and classes in user code. + Properties map[string]string `protobuf:"bytes,5,rep,name=properties,proto3" json:"properties,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` + // Optional. HCFS URIs of jar files to add to the CLASSPATH of the + // Hive server and Hadoop MapReduce (MR) tasks. Can contain Hive SerDes + // and UDFs. + JarFileUris []string `protobuf:"bytes,6,rep,name=jar_file_uris,json=jarFileUris,proto3" json:"jar_file_uris,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *HiveJob) Reset() { *m = HiveJob{} } +func (m *HiveJob) String() string { return proto.CompactTextString(m) } +func (*HiveJob) ProtoMessage() {} +func (*HiveJob) Descriptor() ([]byte, []int) { + return fileDescriptor_jobs_283891adf4097baf, []int{5} +} +func (m *HiveJob) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_HiveJob.Unmarshal(m, b) +} +func (m *HiveJob) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_HiveJob.Marshal(b, m, deterministic) +} +func (dst *HiveJob) XXX_Merge(src proto.Message) { + xxx_messageInfo_HiveJob.Merge(dst, src) +} +func (m *HiveJob) XXX_Size() int { + return xxx_messageInfo_HiveJob.Size(m) +} +func (m *HiveJob) XXX_DiscardUnknown() { + xxx_messageInfo_HiveJob.DiscardUnknown(m) +} + +var xxx_messageInfo_HiveJob proto.InternalMessageInfo + +type isHiveJob_Queries interface { + isHiveJob_Queries() +} + +type HiveJob_QueryFileUri struct { + QueryFileUri string `protobuf:"bytes,1,opt,name=query_file_uri,json=queryFileUri,proto3,oneof"` +} + +type HiveJob_QueryList struct { + QueryList *QueryList `protobuf:"bytes,2,opt,name=query_list,json=queryList,proto3,oneof"` +} + +func (*HiveJob_QueryFileUri) isHiveJob_Queries() {} + +func (*HiveJob_QueryList) isHiveJob_Queries() {} + +func (m *HiveJob) GetQueries() isHiveJob_Queries { + if m != nil { + return m.Queries + } + return nil +} + +func (m *HiveJob) GetQueryFileUri() string { + if x, ok := m.GetQueries().(*HiveJob_QueryFileUri); ok { + return x.QueryFileUri + } + return "" +} + +func (m *HiveJob) GetQueryList() *QueryList { + if x, ok := m.GetQueries().(*HiveJob_QueryList); ok { + return x.QueryList + } + return nil +} + +func (m *HiveJob) GetContinueOnFailure() bool { + if m != nil { + return m.ContinueOnFailure + } + return false +} + +func (m *HiveJob) GetScriptVariables() map[string]string { + if m != nil { + return m.ScriptVariables + } + return nil +} + +func (m *HiveJob) GetProperties() map[string]string { + if m != nil { + return m.Properties + } + return nil +} + +func (m *HiveJob) GetJarFileUris() []string { + if m != nil { + return m.JarFileUris + } + return nil +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*HiveJob) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _HiveJob_OneofMarshaler, _HiveJob_OneofUnmarshaler, _HiveJob_OneofSizer, []interface{}{ + (*HiveJob_QueryFileUri)(nil), + (*HiveJob_QueryList)(nil), + } +} + +func _HiveJob_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*HiveJob) + // queries + switch x := m.Queries.(type) { + case *HiveJob_QueryFileUri: + b.EncodeVarint(1<<3 | proto.WireBytes) + b.EncodeStringBytes(x.QueryFileUri) + case *HiveJob_QueryList: + b.EncodeVarint(2<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.QueryList); err != nil { + return err + } + case nil: + default: + return fmt.Errorf("HiveJob.Queries has unexpected type %T", x) + } + return nil +} + +func _HiveJob_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*HiveJob) + switch tag { + case 1: // queries.query_file_uri + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeStringBytes() + m.Queries = &HiveJob_QueryFileUri{x} + return true, err + case 2: // queries.query_list + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(QueryList) + err := b.DecodeMessage(msg) + m.Queries = &HiveJob_QueryList{msg} + return true, err + default: + return false, nil + } +} + +func _HiveJob_OneofSizer(msg proto.Message) (n int) { + m := msg.(*HiveJob) + // queries + switch x := m.Queries.(type) { + case *HiveJob_QueryFileUri: + n += 1 // tag and wire + n += proto.SizeVarint(uint64(len(x.QueryFileUri))) + n += len(x.QueryFileUri) + case *HiveJob_QueryList: + s := proto.Size(x.QueryList) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +// A Cloud Dataproc job for running [Apache Spark +// SQL](http://spark.apache.org/sql/) queries. +type SparkSqlJob struct { + // Required. The sequence of Spark SQL queries to execute, specified as + // either an HCFS file URI or as a list of queries. + // + // Types that are valid to be assigned to Queries: + // *SparkSqlJob_QueryFileUri + // *SparkSqlJob_QueryList + Queries isSparkSqlJob_Queries `protobuf_oneof:"queries"` + // Optional. Mapping of query variable names to values (equivalent to the + // Spark SQL command: SET `name="value";`). + ScriptVariables map[string]string `protobuf:"bytes,3,rep,name=script_variables,json=scriptVariables,proto3" json:"script_variables,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` + // Optional. A mapping of property names to values, used to configure + // Spark SQL's SparkConf. Properties that conflict with values set by the + // Cloud Dataproc API may be overwritten. + Properties map[string]string `protobuf:"bytes,4,rep,name=properties,proto3" json:"properties,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` + // Optional. HCFS URIs of jar files to be added to the Spark CLASSPATH. + JarFileUris []string `protobuf:"bytes,56,rep,name=jar_file_uris,json=jarFileUris,proto3" json:"jar_file_uris,omitempty"` + // Optional. The runtime log config for job execution. + LoggingConfig *LoggingConfig `protobuf:"bytes,6,opt,name=logging_config,json=loggingConfig,proto3" json:"logging_config,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *SparkSqlJob) Reset() { *m = SparkSqlJob{} } +func (m *SparkSqlJob) String() string { return proto.CompactTextString(m) } +func (*SparkSqlJob) ProtoMessage() {} +func (*SparkSqlJob) Descriptor() ([]byte, []int) { + return fileDescriptor_jobs_283891adf4097baf, []int{6} +} +func (m *SparkSqlJob) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_SparkSqlJob.Unmarshal(m, b) +} +func (m *SparkSqlJob) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_SparkSqlJob.Marshal(b, m, deterministic) +} +func (dst *SparkSqlJob) XXX_Merge(src proto.Message) { + xxx_messageInfo_SparkSqlJob.Merge(dst, src) +} +func (m *SparkSqlJob) XXX_Size() int { + return xxx_messageInfo_SparkSqlJob.Size(m) +} +func (m *SparkSqlJob) XXX_DiscardUnknown() { + xxx_messageInfo_SparkSqlJob.DiscardUnknown(m) +} + +var xxx_messageInfo_SparkSqlJob proto.InternalMessageInfo + +type isSparkSqlJob_Queries interface { + isSparkSqlJob_Queries() +} + +type SparkSqlJob_QueryFileUri struct { + QueryFileUri string `protobuf:"bytes,1,opt,name=query_file_uri,json=queryFileUri,proto3,oneof"` +} + +type SparkSqlJob_QueryList struct { + QueryList *QueryList `protobuf:"bytes,2,opt,name=query_list,json=queryList,proto3,oneof"` +} + +func (*SparkSqlJob_QueryFileUri) isSparkSqlJob_Queries() {} + +func (*SparkSqlJob_QueryList) isSparkSqlJob_Queries() {} + +func (m *SparkSqlJob) GetQueries() isSparkSqlJob_Queries { + if m != nil { + return m.Queries + } + return nil +} + +func (m *SparkSqlJob) GetQueryFileUri() string { + if x, ok := m.GetQueries().(*SparkSqlJob_QueryFileUri); ok { + return x.QueryFileUri + } + return "" +} + +func (m *SparkSqlJob) GetQueryList() *QueryList { + if x, ok := m.GetQueries().(*SparkSqlJob_QueryList); ok { + return x.QueryList + } + return nil +} + +func (m *SparkSqlJob) GetScriptVariables() map[string]string { + if m != nil { + return m.ScriptVariables + } + return nil +} + +func (m *SparkSqlJob) GetProperties() map[string]string { + if m != nil { + return m.Properties + } + return nil +} + +func (m *SparkSqlJob) GetJarFileUris() []string { + if m != nil { + return m.JarFileUris + } + return nil +} + +func (m *SparkSqlJob) GetLoggingConfig() *LoggingConfig { + if m != nil { + return m.LoggingConfig + } + return nil +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*SparkSqlJob) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _SparkSqlJob_OneofMarshaler, _SparkSqlJob_OneofUnmarshaler, _SparkSqlJob_OneofSizer, []interface{}{ + (*SparkSqlJob_QueryFileUri)(nil), + (*SparkSqlJob_QueryList)(nil), + } +} + +func _SparkSqlJob_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*SparkSqlJob) + // queries + switch x := m.Queries.(type) { + case *SparkSqlJob_QueryFileUri: + b.EncodeVarint(1<<3 | proto.WireBytes) + b.EncodeStringBytes(x.QueryFileUri) + case *SparkSqlJob_QueryList: + b.EncodeVarint(2<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.QueryList); err != nil { + return err + } + case nil: + default: + return fmt.Errorf("SparkSqlJob.Queries has unexpected type %T", x) + } + return nil +} + +func _SparkSqlJob_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*SparkSqlJob) + switch tag { + case 1: // queries.query_file_uri + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeStringBytes() + m.Queries = &SparkSqlJob_QueryFileUri{x} + return true, err + case 2: // queries.query_list + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(QueryList) + err := b.DecodeMessage(msg) + m.Queries = &SparkSqlJob_QueryList{msg} + return true, err + default: + return false, nil + } +} + +func _SparkSqlJob_OneofSizer(msg proto.Message) (n int) { + m := msg.(*SparkSqlJob) + // queries + switch x := m.Queries.(type) { + case *SparkSqlJob_QueryFileUri: + n += 1 // tag and wire + n += proto.SizeVarint(uint64(len(x.QueryFileUri))) + n += len(x.QueryFileUri) + case *SparkSqlJob_QueryList: + s := proto.Size(x.QueryList) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +// A Cloud Dataproc job for running [Apache Pig](https://pig.apache.org/) +// queries on YARN. +type PigJob struct { + // Required. The sequence of Pig queries to execute, specified as an HCFS + // file URI or a list of queries. + // + // Types that are valid to be assigned to Queries: + // *PigJob_QueryFileUri + // *PigJob_QueryList + Queries isPigJob_Queries `protobuf_oneof:"queries"` + // Optional. Whether to continue executing queries if a query fails. + // The default value is `false`. Setting to `true` can be useful when + // executing independent parallel queries. + ContinueOnFailure bool `protobuf:"varint,3,opt,name=continue_on_failure,json=continueOnFailure,proto3" json:"continue_on_failure,omitempty"` + // Optional. Mapping of query variable names to values (equivalent to the Pig + // command: `name=[value]`). + ScriptVariables map[string]string `protobuf:"bytes,4,rep,name=script_variables,json=scriptVariables,proto3" json:"script_variables,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` + // Optional. A mapping of property names to values, used to configure Pig. + // Properties that conflict with values set by the Cloud Dataproc API may be + // overwritten. Can include properties set in /etc/hadoop/conf/*-site.xml, + // /etc/pig/conf/pig.properties, and classes in user code. + Properties map[string]string `protobuf:"bytes,5,rep,name=properties,proto3" json:"properties,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` + // Optional. HCFS URIs of jar files to add to the CLASSPATH of + // the Pig Client and Hadoop MapReduce (MR) tasks. Can contain Pig UDFs. + JarFileUris []string `protobuf:"bytes,6,rep,name=jar_file_uris,json=jarFileUris,proto3" json:"jar_file_uris,omitempty"` + // Optional. The runtime log config for job execution. + LoggingConfig *LoggingConfig `protobuf:"bytes,7,opt,name=logging_config,json=loggingConfig,proto3" json:"logging_config,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *PigJob) Reset() { *m = PigJob{} } +func (m *PigJob) String() string { return proto.CompactTextString(m) } +func (*PigJob) ProtoMessage() {} +func (*PigJob) Descriptor() ([]byte, []int) { + return fileDescriptor_jobs_283891adf4097baf, []int{7} +} +func (m *PigJob) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_PigJob.Unmarshal(m, b) +} +func (m *PigJob) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_PigJob.Marshal(b, m, deterministic) +} +func (dst *PigJob) XXX_Merge(src proto.Message) { + xxx_messageInfo_PigJob.Merge(dst, src) +} +func (m *PigJob) XXX_Size() int { + return xxx_messageInfo_PigJob.Size(m) +} +func (m *PigJob) XXX_DiscardUnknown() { + xxx_messageInfo_PigJob.DiscardUnknown(m) +} + +var xxx_messageInfo_PigJob proto.InternalMessageInfo + +type isPigJob_Queries interface { + isPigJob_Queries() +} + +type PigJob_QueryFileUri struct { + QueryFileUri string `protobuf:"bytes,1,opt,name=query_file_uri,json=queryFileUri,proto3,oneof"` +} + +type PigJob_QueryList struct { + QueryList *QueryList `protobuf:"bytes,2,opt,name=query_list,json=queryList,proto3,oneof"` +} + +func (*PigJob_QueryFileUri) isPigJob_Queries() {} + +func (*PigJob_QueryList) isPigJob_Queries() {} + +func (m *PigJob) GetQueries() isPigJob_Queries { + if m != nil { + return m.Queries + } + return nil +} + +func (m *PigJob) GetQueryFileUri() string { + if x, ok := m.GetQueries().(*PigJob_QueryFileUri); ok { + return x.QueryFileUri + } + return "" +} + +func (m *PigJob) GetQueryList() *QueryList { + if x, ok := m.GetQueries().(*PigJob_QueryList); ok { + return x.QueryList + } + return nil +} + +func (m *PigJob) GetContinueOnFailure() bool { + if m != nil { + return m.ContinueOnFailure + } + return false +} + +func (m *PigJob) GetScriptVariables() map[string]string { + if m != nil { + return m.ScriptVariables + } + return nil +} + +func (m *PigJob) GetProperties() map[string]string { + if m != nil { + return m.Properties + } + return nil +} + +func (m *PigJob) GetJarFileUris() []string { + if m != nil { + return m.JarFileUris + } + return nil +} + +func (m *PigJob) GetLoggingConfig() *LoggingConfig { + if m != nil { + return m.LoggingConfig + } + return nil +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*PigJob) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _PigJob_OneofMarshaler, _PigJob_OneofUnmarshaler, _PigJob_OneofSizer, []interface{}{ + (*PigJob_QueryFileUri)(nil), + (*PigJob_QueryList)(nil), + } +} + +func _PigJob_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*PigJob) + // queries + switch x := m.Queries.(type) { + case *PigJob_QueryFileUri: + b.EncodeVarint(1<<3 | proto.WireBytes) + b.EncodeStringBytes(x.QueryFileUri) + case *PigJob_QueryList: + b.EncodeVarint(2<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.QueryList); err != nil { + return err + } + case nil: + default: + return fmt.Errorf("PigJob.Queries has unexpected type %T", x) + } + return nil +} + +func _PigJob_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*PigJob) + switch tag { + case 1: // queries.query_file_uri + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeStringBytes() + m.Queries = &PigJob_QueryFileUri{x} + return true, err + case 2: // queries.query_list + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(QueryList) + err := b.DecodeMessage(msg) + m.Queries = &PigJob_QueryList{msg} + return true, err + default: + return false, nil + } +} + +func _PigJob_OneofSizer(msg proto.Message) (n int) { + m := msg.(*PigJob) + // queries + switch x := m.Queries.(type) { + case *PigJob_QueryFileUri: + n += 1 // tag and wire + n += proto.SizeVarint(uint64(len(x.QueryFileUri))) + n += len(x.QueryFileUri) + case *PigJob_QueryList: + s := proto.Size(x.QueryList) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +// Cloud Dataproc job config. +type JobPlacement struct { + // Required. The name of the cluster where the job will be submitted. + ClusterName string `protobuf:"bytes,1,opt,name=cluster_name,json=clusterName,proto3" json:"cluster_name,omitempty"` + // Output only. A cluster UUID generated by the Cloud Dataproc service when + // the job is submitted. + ClusterUuid string `protobuf:"bytes,2,opt,name=cluster_uuid,json=clusterUuid,proto3" json:"cluster_uuid,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *JobPlacement) Reset() { *m = JobPlacement{} } +func (m *JobPlacement) String() string { return proto.CompactTextString(m) } +func (*JobPlacement) ProtoMessage() {} +func (*JobPlacement) Descriptor() ([]byte, []int) { + return fileDescriptor_jobs_283891adf4097baf, []int{8} +} +func (m *JobPlacement) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_JobPlacement.Unmarshal(m, b) +} +func (m *JobPlacement) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_JobPlacement.Marshal(b, m, deterministic) +} +func (dst *JobPlacement) XXX_Merge(src proto.Message) { + xxx_messageInfo_JobPlacement.Merge(dst, src) +} +func (m *JobPlacement) XXX_Size() int { + return xxx_messageInfo_JobPlacement.Size(m) +} +func (m *JobPlacement) XXX_DiscardUnknown() { + xxx_messageInfo_JobPlacement.DiscardUnknown(m) +} + +var xxx_messageInfo_JobPlacement proto.InternalMessageInfo + +func (m *JobPlacement) GetClusterName() string { + if m != nil { + return m.ClusterName + } + return "" +} + +func (m *JobPlacement) GetClusterUuid() string { + if m != nil { + return m.ClusterUuid + } + return "" +} + +// Cloud Dataproc job status. +type JobStatus struct { + // Output only. A state message specifying the overall job state. + State JobStatus_State `protobuf:"varint,1,opt,name=state,proto3,enum=google.cloud.dataproc.v1beta2.JobStatus_State" json:"state,omitempty"` + // Output only. Optional job state details, such as an error + // description if the state is ERROR. + Details string `protobuf:"bytes,2,opt,name=details,proto3" json:"details,omitempty"` + // Output only. The time when this state was entered. + StateStartTime *timestamp.Timestamp `protobuf:"bytes,6,opt,name=state_start_time,json=stateStartTime,proto3" json:"state_start_time,omitempty"` + // Output only. Additional state information, which includes + // status reported by the agent. + Substate JobStatus_Substate `protobuf:"varint,7,opt,name=substate,proto3,enum=google.cloud.dataproc.v1beta2.JobStatus_Substate" json:"substate,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *JobStatus) Reset() { *m = JobStatus{} } +func (m *JobStatus) String() string { return proto.CompactTextString(m) } +func (*JobStatus) ProtoMessage() {} +func (*JobStatus) Descriptor() ([]byte, []int) { + return fileDescriptor_jobs_283891adf4097baf, []int{9} +} +func (m *JobStatus) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_JobStatus.Unmarshal(m, b) +} +func (m *JobStatus) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_JobStatus.Marshal(b, m, deterministic) +} +func (dst *JobStatus) XXX_Merge(src proto.Message) { + xxx_messageInfo_JobStatus.Merge(dst, src) +} +func (m *JobStatus) XXX_Size() int { + return xxx_messageInfo_JobStatus.Size(m) +} +func (m *JobStatus) XXX_DiscardUnknown() { + xxx_messageInfo_JobStatus.DiscardUnknown(m) +} + +var xxx_messageInfo_JobStatus proto.InternalMessageInfo + +func (m *JobStatus) GetState() JobStatus_State { + if m != nil { + return m.State + } + return JobStatus_STATE_UNSPECIFIED +} + +func (m *JobStatus) GetDetails() string { + if m != nil { + return m.Details + } + return "" +} + +func (m *JobStatus) GetStateStartTime() *timestamp.Timestamp { + if m != nil { + return m.StateStartTime + } + return nil +} + +func (m *JobStatus) GetSubstate() JobStatus_Substate { + if m != nil { + return m.Substate + } + return JobStatus_UNSPECIFIED +} + +// Encapsulates the full scoping used to reference a job. +type JobReference struct { + // Required. The ID of the Google Cloud Platform project that the job + // belongs to. + ProjectId string `protobuf:"bytes,1,opt,name=project_id,json=projectId,proto3" json:"project_id,omitempty"` + // Optional. The job ID, which must be unique within the project. The job ID + // is generated by the server upon job submission or provided by the user as a + // means to perform retries without creating duplicate jobs. The ID must + // contain only letters (a-z, A-Z), numbers (0-9), underscores (_), or + // hyphens (-). The maximum length is 100 characters. + JobId string `protobuf:"bytes,2,opt,name=job_id,json=jobId,proto3" json:"job_id,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *JobReference) Reset() { *m = JobReference{} } +func (m *JobReference) String() string { return proto.CompactTextString(m) } +func (*JobReference) ProtoMessage() {} +func (*JobReference) Descriptor() ([]byte, []int) { + return fileDescriptor_jobs_283891adf4097baf, []int{10} +} +func (m *JobReference) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_JobReference.Unmarshal(m, b) +} +func (m *JobReference) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_JobReference.Marshal(b, m, deterministic) +} +func (dst *JobReference) XXX_Merge(src proto.Message) { + xxx_messageInfo_JobReference.Merge(dst, src) +} +func (m *JobReference) XXX_Size() int { + return xxx_messageInfo_JobReference.Size(m) +} +func (m *JobReference) XXX_DiscardUnknown() { + xxx_messageInfo_JobReference.DiscardUnknown(m) +} + +var xxx_messageInfo_JobReference proto.InternalMessageInfo + +func (m *JobReference) GetProjectId() string { + if m != nil { + return m.ProjectId + } + return "" +} + +func (m *JobReference) GetJobId() string { + if m != nil { + return m.JobId + } + return "" +} + +// A YARN application created by a job. Application information is a subset of +// org.apache.hadoop.yarn.proto.YarnProtos.ApplicationReportProto. +// +// **Beta Feature**: This report is available for testing purposes only. It may +// be changed before final release. +type YarnApplication struct { + // Required. The application name. + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // Required. The application state. + State YarnApplication_State `protobuf:"varint,2,opt,name=state,proto3,enum=google.cloud.dataproc.v1beta2.YarnApplication_State" json:"state,omitempty"` + // Required. The numerical progress of the application, from 1 to 100. + Progress float32 `protobuf:"fixed32,3,opt,name=progress,proto3" json:"progress,omitempty"` + // Optional. The HTTP URL of the ApplicationMaster, HistoryServer, or + // TimelineServer that provides application-specific information. The URL uses + // the internal hostname, and requires a proxy server for resolution and, + // possibly, access. + TrackingUrl string `protobuf:"bytes,4,opt,name=tracking_url,json=trackingUrl,proto3" json:"tracking_url,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *YarnApplication) Reset() { *m = YarnApplication{} } +func (m *YarnApplication) String() string { return proto.CompactTextString(m) } +func (*YarnApplication) ProtoMessage() {} +func (*YarnApplication) Descriptor() ([]byte, []int) { + return fileDescriptor_jobs_283891adf4097baf, []int{11} +} +func (m *YarnApplication) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_YarnApplication.Unmarshal(m, b) +} +func (m *YarnApplication) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_YarnApplication.Marshal(b, m, deterministic) +} +func (dst *YarnApplication) XXX_Merge(src proto.Message) { + xxx_messageInfo_YarnApplication.Merge(dst, src) +} +func (m *YarnApplication) XXX_Size() int { + return xxx_messageInfo_YarnApplication.Size(m) +} +func (m *YarnApplication) XXX_DiscardUnknown() { + xxx_messageInfo_YarnApplication.DiscardUnknown(m) +} + +var xxx_messageInfo_YarnApplication proto.InternalMessageInfo + +func (m *YarnApplication) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *YarnApplication) GetState() YarnApplication_State { + if m != nil { + return m.State + } + return YarnApplication_STATE_UNSPECIFIED +} + +func (m *YarnApplication) GetProgress() float32 { + if m != nil { + return m.Progress + } + return 0 +} + +func (m *YarnApplication) GetTrackingUrl() string { + if m != nil { + return m.TrackingUrl + } + return "" +} + +// A Cloud Dataproc job resource. +type Job struct { + // Optional. The fully qualified reference to the job, which can be used to + // obtain the equivalent REST path of the job resource. If this property + // is not specified when a job is created, the server generates a + // job_id. + Reference *JobReference `protobuf:"bytes,1,opt,name=reference,proto3" json:"reference,omitempty"` + // Required. Job information, including how, when, and where to + // run the job. + Placement *JobPlacement `protobuf:"bytes,2,opt,name=placement,proto3" json:"placement,omitempty"` + // Required. The application/framework-specific portion of the job. + // + // Types that are valid to be assigned to TypeJob: + // *Job_HadoopJob + // *Job_SparkJob + // *Job_PysparkJob + // *Job_HiveJob + // *Job_PigJob + // *Job_SparkSqlJob + TypeJob isJob_TypeJob `protobuf_oneof:"type_job"` + // Output only. The job status. Additional application-specific + // status information may be contained in the type_job + // and yarn_applications fields. + Status *JobStatus `protobuf:"bytes,8,opt,name=status,proto3" json:"status,omitempty"` + // Output only. The previous job status. + StatusHistory []*JobStatus `protobuf:"bytes,13,rep,name=status_history,json=statusHistory,proto3" json:"status_history,omitempty"` + // Output only. The collection of YARN applications spun up by this job. + // + // **Beta** Feature: This report is available for testing purposes only. It + // may be changed before final release. + YarnApplications []*YarnApplication `protobuf:"bytes,9,rep,name=yarn_applications,json=yarnApplications,proto3" json:"yarn_applications,omitempty"` + // Output only. The email address of the user submitting the job. For jobs + // submitted on the cluster, the address is username@hostname. + SubmittedBy string `protobuf:"bytes,10,opt,name=submitted_by,json=submittedBy,proto3" json:"submitted_by,omitempty"` + // Output only. A URI pointing to the location of the stdout of the job's + // driver program. + DriverOutputResourceUri string `protobuf:"bytes,17,opt,name=driver_output_resource_uri,json=driverOutputResourceUri,proto3" json:"driver_output_resource_uri,omitempty"` + // Output only. If present, the location of miscellaneous control files + // which may be used as part of job setup and handling. If not present, + // control files may be placed in the same location as `driver_output_uri`. + DriverControlFilesUri string `protobuf:"bytes,15,opt,name=driver_control_files_uri,json=driverControlFilesUri,proto3" json:"driver_control_files_uri,omitempty"` + // Optional. The labels to associate with this job. + // Label **keys** must contain 1 to 63 characters, and must conform to + // [RFC 1035](https://www.ietf.org/rfc/rfc1035.txt). + // Label **values** may be empty, but, if present, must contain 1 to 63 + // characters, and must conform to [RFC + // 1035](https://www.ietf.org/rfc/rfc1035.txt). No more than 32 labels can be + // associated with a job. + Labels map[string]string `protobuf:"bytes,18,rep,name=labels,proto3" json:"labels,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` + // Optional. Job scheduling configuration. + Scheduling *JobScheduling `protobuf:"bytes,20,opt,name=scheduling,proto3" json:"scheduling,omitempty"` + // Output only. A UUID that uniquely identifies a job within the project + // over time. This is in contrast to a user-settable reference.job_id that + // may be reused over time. + JobUuid string `protobuf:"bytes,22,opt,name=job_uuid,json=jobUuid,proto3" json:"job_uuid,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Job) Reset() { *m = Job{} } +func (m *Job) String() string { return proto.CompactTextString(m) } +func (*Job) ProtoMessage() {} +func (*Job) Descriptor() ([]byte, []int) { + return fileDescriptor_jobs_283891adf4097baf, []int{12} +} +func (m *Job) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Job.Unmarshal(m, b) +} +func (m *Job) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Job.Marshal(b, m, deterministic) +} +func (dst *Job) XXX_Merge(src proto.Message) { + xxx_messageInfo_Job.Merge(dst, src) +} +func (m *Job) XXX_Size() int { + return xxx_messageInfo_Job.Size(m) +} +func (m *Job) XXX_DiscardUnknown() { + xxx_messageInfo_Job.DiscardUnknown(m) +} + +var xxx_messageInfo_Job proto.InternalMessageInfo + +func (m *Job) GetReference() *JobReference { + if m != nil { + return m.Reference + } + return nil +} + +func (m *Job) GetPlacement() *JobPlacement { + if m != nil { + return m.Placement + } + return nil +} + +type isJob_TypeJob interface { + isJob_TypeJob() +} + +type Job_HadoopJob struct { + HadoopJob *HadoopJob `protobuf:"bytes,3,opt,name=hadoop_job,json=hadoopJob,proto3,oneof"` +} + +type Job_SparkJob struct { + SparkJob *SparkJob `protobuf:"bytes,4,opt,name=spark_job,json=sparkJob,proto3,oneof"` +} + +type Job_PysparkJob struct { + PysparkJob *PySparkJob `protobuf:"bytes,5,opt,name=pyspark_job,json=pysparkJob,proto3,oneof"` +} + +type Job_HiveJob struct { + HiveJob *HiveJob `protobuf:"bytes,6,opt,name=hive_job,json=hiveJob,proto3,oneof"` +} + +type Job_PigJob struct { + PigJob *PigJob `protobuf:"bytes,7,opt,name=pig_job,json=pigJob,proto3,oneof"` +} + +type Job_SparkSqlJob struct { + SparkSqlJob *SparkSqlJob `protobuf:"bytes,12,opt,name=spark_sql_job,json=sparkSqlJob,proto3,oneof"` +} + +func (*Job_HadoopJob) isJob_TypeJob() {} + +func (*Job_SparkJob) isJob_TypeJob() {} + +func (*Job_PysparkJob) isJob_TypeJob() {} + +func (*Job_HiveJob) isJob_TypeJob() {} + +func (*Job_PigJob) isJob_TypeJob() {} + +func (*Job_SparkSqlJob) isJob_TypeJob() {} + +func (m *Job) GetTypeJob() isJob_TypeJob { + if m != nil { + return m.TypeJob + } + return nil +} + +func (m *Job) GetHadoopJob() *HadoopJob { + if x, ok := m.GetTypeJob().(*Job_HadoopJob); ok { + return x.HadoopJob + } + return nil +} + +func (m *Job) GetSparkJob() *SparkJob { + if x, ok := m.GetTypeJob().(*Job_SparkJob); ok { + return x.SparkJob + } + return nil +} + +func (m *Job) GetPysparkJob() *PySparkJob { + if x, ok := m.GetTypeJob().(*Job_PysparkJob); ok { + return x.PysparkJob + } + return nil +} + +func (m *Job) GetHiveJob() *HiveJob { + if x, ok := m.GetTypeJob().(*Job_HiveJob); ok { + return x.HiveJob + } + return nil +} + +func (m *Job) GetPigJob() *PigJob { + if x, ok := m.GetTypeJob().(*Job_PigJob); ok { + return x.PigJob + } + return nil +} + +func (m *Job) GetSparkSqlJob() *SparkSqlJob { + if x, ok := m.GetTypeJob().(*Job_SparkSqlJob); ok { + return x.SparkSqlJob + } + return nil +} + +func (m *Job) GetStatus() *JobStatus { + if m != nil { + return m.Status + } + return nil +} + +func (m *Job) GetStatusHistory() []*JobStatus { + if m != nil { + return m.StatusHistory + } + return nil +} + +func (m *Job) GetYarnApplications() []*YarnApplication { + if m != nil { + return m.YarnApplications + } + return nil +} + +func (m *Job) GetSubmittedBy() string { + if m != nil { + return m.SubmittedBy + } + return "" +} + +func (m *Job) GetDriverOutputResourceUri() string { + if m != nil { + return m.DriverOutputResourceUri + } + return "" +} + +func (m *Job) GetDriverControlFilesUri() string { + if m != nil { + return m.DriverControlFilesUri + } + return "" +} + +func (m *Job) GetLabels() map[string]string { + if m != nil { + return m.Labels + } + return nil +} + +func (m *Job) GetScheduling() *JobScheduling { + if m != nil { + return m.Scheduling + } + return nil +} + +func (m *Job) GetJobUuid() string { + if m != nil { + return m.JobUuid + } + return "" +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*Job) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _Job_OneofMarshaler, _Job_OneofUnmarshaler, _Job_OneofSizer, []interface{}{ + (*Job_HadoopJob)(nil), + (*Job_SparkJob)(nil), + (*Job_PysparkJob)(nil), + (*Job_HiveJob)(nil), + (*Job_PigJob)(nil), + (*Job_SparkSqlJob)(nil), + } +} + +func _Job_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*Job) + // type_job + switch x := m.TypeJob.(type) { + case *Job_HadoopJob: + b.EncodeVarint(3<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.HadoopJob); err != nil { + return err + } + case *Job_SparkJob: + b.EncodeVarint(4<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.SparkJob); err != nil { + return err + } + case *Job_PysparkJob: + b.EncodeVarint(5<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.PysparkJob); err != nil { + return err + } + case *Job_HiveJob: + b.EncodeVarint(6<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.HiveJob); err != nil { + return err + } + case *Job_PigJob: + b.EncodeVarint(7<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.PigJob); err != nil { + return err + } + case *Job_SparkSqlJob: + b.EncodeVarint(12<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.SparkSqlJob); err != nil { + return err + } + case nil: + default: + return fmt.Errorf("Job.TypeJob has unexpected type %T", x) + } + return nil +} + +func _Job_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*Job) + switch tag { + case 3: // type_job.hadoop_job + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(HadoopJob) + err := b.DecodeMessage(msg) + m.TypeJob = &Job_HadoopJob{msg} + return true, err + case 4: // type_job.spark_job + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(SparkJob) + err := b.DecodeMessage(msg) + m.TypeJob = &Job_SparkJob{msg} + return true, err + case 5: // type_job.pyspark_job + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(PySparkJob) + err := b.DecodeMessage(msg) + m.TypeJob = &Job_PysparkJob{msg} + return true, err + case 6: // type_job.hive_job + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(HiveJob) + err := b.DecodeMessage(msg) + m.TypeJob = &Job_HiveJob{msg} + return true, err + case 7: // type_job.pig_job + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(PigJob) + err := b.DecodeMessage(msg) + m.TypeJob = &Job_PigJob{msg} + return true, err + case 12: // type_job.spark_sql_job + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(SparkSqlJob) + err := b.DecodeMessage(msg) + m.TypeJob = &Job_SparkSqlJob{msg} + return true, err + default: + return false, nil + } +} + +func _Job_OneofSizer(msg proto.Message) (n int) { + m := msg.(*Job) + // type_job + switch x := m.TypeJob.(type) { + case *Job_HadoopJob: + s := proto.Size(x.HadoopJob) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *Job_SparkJob: + s := proto.Size(x.SparkJob) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *Job_PysparkJob: + s := proto.Size(x.PysparkJob) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *Job_HiveJob: + s := proto.Size(x.HiveJob) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *Job_PigJob: + s := proto.Size(x.PigJob) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *Job_SparkSqlJob: + s := proto.Size(x.SparkSqlJob) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +// Job scheduling options. +type JobScheduling struct { + // Optional. Maximum number of times per hour a driver may be restarted as + // a result of driver terminating with non-zero code before job is + // reported failed. + // + // A job may be reported as thrashing if driver exits with non-zero code + // 4 times within 10 minute window. + // + // Maximum value is 10. + MaxFailuresPerHour int32 `protobuf:"varint,1,opt,name=max_failures_per_hour,json=maxFailuresPerHour,proto3" json:"max_failures_per_hour,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *JobScheduling) Reset() { *m = JobScheduling{} } +func (m *JobScheduling) String() string { return proto.CompactTextString(m) } +func (*JobScheduling) ProtoMessage() {} +func (*JobScheduling) Descriptor() ([]byte, []int) { + return fileDescriptor_jobs_283891adf4097baf, []int{13} +} +func (m *JobScheduling) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_JobScheduling.Unmarshal(m, b) +} +func (m *JobScheduling) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_JobScheduling.Marshal(b, m, deterministic) +} +func (dst *JobScheduling) XXX_Merge(src proto.Message) { + xxx_messageInfo_JobScheduling.Merge(dst, src) +} +func (m *JobScheduling) XXX_Size() int { + return xxx_messageInfo_JobScheduling.Size(m) +} +func (m *JobScheduling) XXX_DiscardUnknown() { + xxx_messageInfo_JobScheduling.DiscardUnknown(m) +} + +var xxx_messageInfo_JobScheduling proto.InternalMessageInfo + +func (m *JobScheduling) GetMaxFailuresPerHour() int32 { + if m != nil { + return m.MaxFailuresPerHour + } + return 0 +} + +// A request to submit a job. +type SubmitJobRequest struct { + // Required. The ID of the Google Cloud Platform project that the job + // belongs to. + ProjectId string `protobuf:"bytes,1,opt,name=project_id,json=projectId,proto3" json:"project_id,omitempty"` + // Required. The Cloud Dataproc region in which to handle the request. + Region string `protobuf:"bytes,3,opt,name=region,proto3" json:"region,omitempty"` + // Required. The job resource. + Job *Job `protobuf:"bytes,2,opt,name=job,proto3" json:"job,omitempty"` + // Optional. A unique id used to identify the request. If the server + // receives two + // [SubmitJobRequest][google.cloud.dataproc.v1beta2.SubmitJobRequest] requests + // with the same id, then the second request will be ignored and the first + // [Job][google.cloud.dataproc.v1beta2.Job] created and stored in the backend + // is returned. + // + // It is recommended to always set this value to a + // [UUID](https://en.wikipedia.org/wiki/Universally_unique_identifier). + // + // The id must contain only letters (a-z, A-Z), numbers (0-9), + // underscores (_), and hyphens (-). The maximum length is 40 characters. + RequestId string `protobuf:"bytes,4,opt,name=request_id,json=requestId,proto3" json:"request_id,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *SubmitJobRequest) Reset() { *m = SubmitJobRequest{} } +func (m *SubmitJobRequest) String() string { return proto.CompactTextString(m) } +func (*SubmitJobRequest) ProtoMessage() {} +func (*SubmitJobRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_jobs_283891adf4097baf, []int{14} +} +func (m *SubmitJobRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_SubmitJobRequest.Unmarshal(m, b) +} +func (m *SubmitJobRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_SubmitJobRequest.Marshal(b, m, deterministic) +} +func (dst *SubmitJobRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_SubmitJobRequest.Merge(dst, src) +} +func (m *SubmitJobRequest) XXX_Size() int { + return xxx_messageInfo_SubmitJobRequest.Size(m) +} +func (m *SubmitJobRequest) XXX_DiscardUnknown() { + xxx_messageInfo_SubmitJobRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_SubmitJobRequest proto.InternalMessageInfo + +func (m *SubmitJobRequest) GetProjectId() string { + if m != nil { + return m.ProjectId + } + return "" +} + +func (m *SubmitJobRequest) GetRegion() string { + if m != nil { + return m.Region + } + return "" +} + +func (m *SubmitJobRequest) GetJob() *Job { + if m != nil { + return m.Job + } + return nil +} + +func (m *SubmitJobRequest) GetRequestId() string { + if m != nil { + return m.RequestId + } + return "" +} + +// A request to get the resource representation for a job in a project. +type GetJobRequest struct { + // Required. The ID of the Google Cloud Platform project that the job + // belongs to. + ProjectId string `protobuf:"bytes,1,opt,name=project_id,json=projectId,proto3" json:"project_id,omitempty"` + // Required. The Cloud Dataproc region in which to handle the request. + Region string `protobuf:"bytes,3,opt,name=region,proto3" json:"region,omitempty"` + // Required. The job ID. + JobId string `protobuf:"bytes,2,opt,name=job_id,json=jobId,proto3" json:"job_id,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GetJobRequest) Reset() { *m = GetJobRequest{} } +func (m *GetJobRequest) String() string { return proto.CompactTextString(m) } +func (*GetJobRequest) ProtoMessage() {} +func (*GetJobRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_jobs_283891adf4097baf, []int{15} +} +func (m *GetJobRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GetJobRequest.Unmarshal(m, b) +} +func (m *GetJobRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GetJobRequest.Marshal(b, m, deterministic) +} +func (dst *GetJobRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_GetJobRequest.Merge(dst, src) +} +func (m *GetJobRequest) XXX_Size() int { + return xxx_messageInfo_GetJobRequest.Size(m) +} +func (m *GetJobRequest) XXX_DiscardUnknown() { + xxx_messageInfo_GetJobRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_GetJobRequest proto.InternalMessageInfo + +func (m *GetJobRequest) GetProjectId() string { + if m != nil { + return m.ProjectId + } + return "" +} + +func (m *GetJobRequest) GetRegion() string { + if m != nil { + return m.Region + } + return "" +} + +func (m *GetJobRequest) GetJobId() string { + if m != nil { + return m.JobId + } + return "" +} + +// A request to list jobs in a project. +type ListJobsRequest struct { + // Required. The ID of the Google Cloud Platform project that the job + // belongs to. + ProjectId string `protobuf:"bytes,1,opt,name=project_id,json=projectId,proto3" json:"project_id,omitempty"` + // Required. The Cloud Dataproc region in which to handle the request. + Region string `protobuf:"bytes,6,opt,name=region,proto3" json:"region,omitempty"` + // Optional. The number of results to return in each response. + PageSize int32 `protobuf:"varint,2,opt,name=page_size,json=pageSize,proto3" json:"page_size,omitempty"` + // Optional. The page token, returned by a previous call, to request the + // next page of results. + PageToken string `protobuf:"bytes,3,opt,name=page_token,json=pageToken,proto3" json:"page_token,omitempty"` + // Optional. If set, the returned jobs list includes only jobs that were + // submitted to the named cluster. + ClusterName string `protobuf:"bytes,4,opt,name=cluster_name,json=clusterName,proto3" json:"cluster_name,omitempty"` + // Optional. Specifies enumerated categories of jobs to list. + // (default = match ALL jobs). + // + // If `filter` is provided, `jobStateMatcher` will be ignored. + JobStateMatcher ListJobsRequest_JobStateMatcher `protobuf:"varint,5,opt,name=job_state_matcher,json=jobStateMatcher,proto3,enum=google.cloud.dataproc.v1beta2.ListJobsRequest_JobStateMatcher" json:"job_state_matcher,omitempty"` + // Optional. A filter constraining the jobs to list. Filters are + // case-sensitive and have the following syntax: + // + // [field = value] AND [field [= value]] ... + // + // where **field** is `status.state` or `labels.[KEY]`, and `[KEY]` is a label + // key. **value** can be `*` to match all values. + // `status.state` can be either `ACTIVE` or `NON_ACTIVE`. + // Only the logical `AND` operator is supported; space-separated items are + // treated as having an implicit `AND` operator. + // + // Example filter: + // + // status.state = ACTIVE AND labels.env = staging AND labels.starred = * + Filter string `protobuf:"bytes,7,opt,name=filter,proto3" json:"filter,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ListJobsRequest) Reset() { *m = ListJobsRequest{} } +func (m *ListJobsRequest) String() string { return proto.CompactTextString(m) } +func (*ListJobsRequest) ProtoMessage() {} +func (*ListJobsRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_jobs_283891adf4097baf, []int{16} +} +func (m *ListJobsRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ListJobsRequest.Unmarshal(m, b) +} +func (m *ListJobsRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ListJobsRequest.Marshal(b, m, deterministic) +} +func (dst *ListJobsRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_ListJobsRequest.Merge(dst, src) +} +func (m *ListJobsRequest) XXX_Size() int { + return xxx_messageInfo_ListJobsRequest.Size(m) +} +func (m *ListJobsRequest) XXX_DiscardUnknown() { + xxx_messageInfo_ListJobsRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_ListJobsRequest proto.InternalMessageInfo + +func (m *ListJobsRequest) GetProjectId() string { + if m != nil { + return m.ProjectId + } + return "" +} + +func (m *ListJobsRequest) GetRegion() string { + if m != nil { + return m.Region + } + return "" +} + +func (m *ListJobsRequest) GetPageSize() int32 { + if m != nil { + return m.PageSize + } + return 0 +} + +func (m *ListJobsRequest) GetPageToken() string { + if m != nil { + return m.PageToken + } + return "" +} + +func (m *ListJobsRequest) GetClusterName() string { + if m != nil { + return m.ClusterName + } + return "" +} + +func (m *ListJobsRequest) GetJobStateMatcher() ListJobsRequest_JobStateMatcher { + if m != nil { + return m.JobStateMatcher + } + return ListJobsRequest_ALL +} + +func (m *ListJobsRequest) GetFilter() string { + if m != nil { + return m.Filter + } + return "" +} + +// A request to update a job. +type UpdateJobRequest struct { + // Required. The ID of the Google Cloud Platform project that the job + // belongs to. + ProjectId string `protobuf:"bytes,1,opt,name=project_id,json=projectId,proto3" json:"project_id,omitempty"` + // Required. The Cloud Dataproc region in which to handle the request. + Region string `protobuf:"bytes,2,opt,name=region,proto3" json:"region,omitempty"` + // Required. The job ID. + JobId string `protobuf:"bytes,3,opt,name=job_id,json=jobId,proto3" json:"job_id,omitempty"` + // Required. The changes to the job. + Job *Job `protobuf:"bytes,4,opt,name=job,proto3" json:"job,omitempty"` + // Required. Specifies the path, relative to Job, of + // the field to update. For example, to update the labels of a Job the + // update_mask parameter would be specified as + // labels, and the `PATCH` request body would specify the new + // value. Note: Currently, labels is the only + // field that can be updated. + UpdateMask *field_mask.FieldMask `protobuf:"bytes,5,opt,name=update_mask,json=updateMask,proto3" json:"update_mask,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *UpdateJobRequest) Reset() { *m = UpdateJobRequest{} } +func (m *UpdateJobRequest) String() string { return proto.CompactTextString(m) } +func (*UpdateJobRequest) ProtoMessage() {} +func (*UpdateJobRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_jobs_283891adf4097baf, []int{17} +} +func (m *UpdateJobRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_UpdateJobRequest.Unmarshal(m, b) +} +func (m *UpdateJobRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_UpdateJobRequest.Marshal(b, m, deterministic) +} +func (dst *UpdateJobRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_UpdateJobRequest.Merge(dst, src) +} +func (m *UpdateJobRequest) XXX_Size() int { + return xxx_messageInfo_UpdateJobRequest.Size(m) +} +func (m *UpdateJobRequest) XXX_DiscardUnknown() { + xxx_messageInfo_UpdateJobRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_UpdateJobRequest proto.InternalMessageInfo + +func (m *UpdateJobRequest) GetProjectId() string { + if m != nil { + return m.ProjectId + } + return "" +} + +func (m *UpdateJobRequest) GetRegion() string { + if m != nil { + return m.Region + } + return "" +} + +func (m *UpdateJobRequest) GetJobId() string { + if m != nil { + return m.JobId + } + return "" +} + +func (m *UpdateJobRequest) GetJob() *Job { + if m != nil { + return m.Job + } + return nil +} + +func (m *UpdateJobRequest) GetUpdateMask() *field_mask.FieldMask { + if m != nil { + return m.UpdateMask + } + return nil +} + +// A list of jobs in a project. +type ListJobsResponse struct { + // Output only. Jobs list. + Jobs []*Job `protobuf:"bytes,1,rep,name=jobs,proto3" json:"jobs,omitempty"` + // Optional. This token is included in the response if there are more results + // to fetch. To fetch additional results, provide this value as the + // `page_token` in a subsequent ListJobsRequest. + NextPageToken string `protobuf:"bytes,2,opt,name=next_page_token,json=nextPageToken,proto3" json:"next_page_token,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ListJobsResponse) Reset() { *m = ListJobsResponse{} } +func (m *ListJobsResponse) String() string { return proto.CompactTextString(m) } +func (*ListJobsResponse) ProtoMessage() {} +func (*ListJobsResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_jobs_283891adf4097baf, []int{18} +} +func (m *ListJobsResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ListJobsResponse.Unmarshal(m, b) +} +func (m *ListJobsResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ListJobsResponse.Marshal(b, m, deterministic) +} +func (dst *ListJobsResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_ListJobsResponse.Merge(dst, src) +} +func (m *ListJobsResponse) XXX_Size() int { + return xxx_messageInfo_ListJobsResponse.Size(m) +} +func (m *ListJobsResponse) XXX_DiscardUnknown() { + xxx_messageInfo_ListJobsResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_ListJobsResponse proto.InternalMessageInfo + +func (m *ListJobsResponse) GetJobs() []*Job { + if m != nil { + return m.Jobs + } + return nil +} + +func (m *ListJobsResponse) GetNextPageToken() string { + if m != nil { + return m.NextPageToken + } + return "" +} + +// A request to cancel a job. +type CancelJobRequest struct { + // Required. The ID of the Google Cloud Platform project that the job + // belongs to. + ProjectId string `protobuf:"bytes,1,opt,name=project_id,json=projectId,proto3" json:"project_id,omitempty"` + // Required. The Cloud Dataproc region in which to handle the request. + Region string `protobuf:"bytes,3,opt,name=region,proto3" json:"region,omitempty"` + // Required. The job ID. + JobId string `protobuf:"bytes,2,opt,name=job_id,json=jobId,proto3" json:"job_id,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *CancelJobRequest) Reset() { *m = CancelJobRequest{} } +func (m *CancelJobRequest) String() string { return proto.CompactTextString(m) } +func (*CancelJobRequest) ProtoMessage() {} +func (*CancelJobRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_jobs_283891adf4097baf, []int{19} +} +func (m *CancelJobRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_CancelJobRequest.Unmarshal(m, b) +} +func (m *CancelJobRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_CancelJobRequest.Marshal(b, m, deterministic) +} +func (dst *CancelJobRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_CancelJobRequest.Merge(dst, src) +} +func (m *CancelJobRequest) XXX_Size() int { + return xxx_messageInfo_CancelJobRequest.Size(m) +} +func (m *CancelJobRequest) XXX_DiscardUnknown() { + xxx_messageInfo_CancelJobRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_CancelJobRequest proto.InternalMessageInfo + +func (m *CancelJobRequest) GetProjectId() string { + if m != nil { + return m.ProjectId + } + return "" +} + +func (m *CancelJobRequest) GetRegion() string { + if m != nil { + return m.Region + } + return "" +} + +func (m *CancelJobRequest) GetJobId() string { + if m != nil { + return m.JobId + } + return "" +} + +// A request to delete a job. +type DeleteJobRequest struct { + // Required. The ID of the Google Cloud Platform project that the job + // belongs to. + ProjectId string `protobuf:"bytes,1,opt,name=project_id,json=projectId,proto3" json:"project_id,omitempty"` + // Required. The Cloud Dataproc region in which to handle the request. + Region string `protobuf:"bytes,3,opt,name=region,proto3" json:"region,omitempty"` + // Required. The job ID. + JobId string `protobuf:"bytes,2,opt,name=job_id,json=jobId,proto3" json:"job_id,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *DeleteJobRequest) Reset() { *m = DeleteJobRequest{} } +func (m *DeleteJobRequest) String() string { return proto.CompactTextString(m) } +func (*DeleteJobRequest) ProtoMessage() {} +func (*DeleteJobRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_jobs_283891adf4097baf, []int{20} +} +func (m *DeleteJobRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_DeleteJobRequest.Unmarshal(m, b) +} +func (m *DeleteJobRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_DeleteJobRequest.Marshal(b, m, deterministic) +} +func (dst *DeleteJobRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_DeleteJobRequest.Merge(dst, src) +} +func (m *DeleteJobRequest) XXX_Size() int { + return xxx_messageInfo_DeleteJobRequest.Size(m) +} +func (m *DeleteJobRequest) XXX_DiscardUnknown() { + xxx_messageInfo_DeleteJobRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_DeleteJobRequest proto.InternalMessageInfo + +func (m *DeleteJobRequest) GetProjectId() string { + if m != nil { + return m.ProjectId + } + return "" +} + +func (m *DeleteJobRequest) GetRegion() string { + if m != nil { + return m.Region + } + return "" +} + +func (m *DeleteJobRequest) GetJobId() string { + if m != nil { + return m.JobId + } + return "" +} + +func init() { + proto.RegisterType((*LoggingConfig)(nil), "google.cloud.dataproc.v1beta2.LoggingConfig") + proto.RegisterMapType((map[string]LoggingConfig_Level)(nil), "google.cloud.dataproc.v1beta2.LoggingConfig.DriverLogLevelsEntry") + proto.RegisterType((*HadoopJob)(nil), "google.cloud.dataproc.v1beta2.HadoopJob") + proto.RegisterMapType((map[string]string)(nil), "google.cloud.dataproc.v1beta2.HadoopJob.PropertiesEntry") + proto.RegisterType((*SparkJob)(nil), "google.cloud.dataproc.v1beta2.SparkJob") + proto.RegisterMapType((map[string]string)(nil), "google.cloud.dataproc.v1beta2.SparkJob.PropertiesEntry") + proto.RegisterType((*PySparkJob)(nil), "google.cloud.dataproc.v1beta2.PySparkJob") + proto.RegisterMapType((map[string]string)(nil), "google.cloud.dataproc.v1beta2.PySparkJob.PropertiesEntry") + proto.RegisterType((*QueryList)(nil), "google.cloud.dataproc.v1beta2.QueryList") + proto.RegisterType((*HiveJob)(nil), "google.cloud.dataproc.v1beta2.HiveJob") + proto.RegisterMapType((map[string]string)(nil), "google.cloud.dataproc.v1beta2.HiveJob.PropertiesEntry") + proto.RegisterMapType((map[string]string)(nil), "google.cloud.dataproc.v1beta2.HiveJob.ScriptVariablesEntry") + proto.RegisterType((*SparkSqlJob)(nil), "google.cloud.dataproc.v1beta2.SparkSqlJob") + proto.RegisterMapType((map[string]string)(nil), "google.cloud.dataproc.v1beta2.SparkSqlJob.PropertiesEntry") + proto.RegisterMapType((map[string]string)(nil), "google.cloud.dataproc.v1beta2.SparkSqlJob.ScriptVariablesEntry") + proto.RegisterType((*PigJob)(nil), "google.cloud.dataproc.v1beta2.PigJob") + proto.RegisterMapType((map[string]string)(nil), "google.cloud.dataproc.v1beta2.PigJob.PropertiesEntry") + proto.RegisterMapType((map[string]string)(nil), "google.cloud.dataproc.v1beta2.PigJob.ScriptVariablesEntry") + proto.RegisterType((*JobPlacement)(nil), "google.cloud.dataproc.v1beta2.JobPlacement") + proto.RegisterType((*JobStatus)(nil), "google.cloud.dataproc.v1beta2.JobStatus") + proto.RegisterType((*JobReference)(nil), "google.cloud.dataproc.v1beta2.JobReference") + proto.RegisterType((*YarnApplication)(nil), "google.cloud.dataproc.v1beta2.YarnApplication") + proto.RegisterType((*Job)(nil), "google.cloud.dataproc.v1beta2.Job") + proto.RegisterMapType((map[string]string)(nil), "google.cloud.dataproc.v1beta2.Job.LabelsEntry") + proto.RegisterType((*JobScheduling)(nil), "google.cloud.dataproc.v1beta2.JobScheduling") + proto.RegisterType((*SubmitJobRequest)(nil), "google.cloud.dataproc.v1beta2.SubmitJobRequest") + proto.RegisterType((*GetJobRequest)(nil), "google.cloud.dataproc.v1beta2.GetJobRequest") + proto.RegisterType((*ListJobsRequest)(nil), "google.cloud.dataproc.v1beta2.ListJobsRequest") + proto.RegisterType((*UpdateJobRequest)(nil), "google.cloud.dataproc.v1beta2.UpdateJobRequest") + proto.RegisterType((*ListJobsResponse)(nil), "google.cloud.dataproc.v1beta2.ListJobsResponse") + proto.RegisterType((*CancelJobRequest)(nil), "google.cloud.dataproc.v1beta2.CancelJobRequest") + proto.RegisterType((*DeleteJobRequest)(nil), "google.cloud.dataproc.v1beta2.DeleteJobRequest") + proto.RegisterEnum("google.cloud.dataproc.v1beta2.LoggingConfig_Level", LoggingConfig_Level_name, LoggingConfig_Level_value) + proto.RegisterEnum("google.cloud.dataproc.v1beta2.JobStatus_State", JobStatus_State_name, JobStatus_State_value) + proto.RegisterEnum("google.cloud.dataproc.v1beta2.JobStatus_Substate", JobStatus_Substate_name, JobStatus_Substate_value) + proto.RegisterEnum("google.cloud.dataproc.v1beta2.YarnApplication_State", YarnApplication_State_name, YarnApplication_State_value) + proto.RegisterEnum("google.cloud.dataproc.v1beta2.ListJobsRequest_JobStateMatcher", ListJobsRequest_JobStateMatcher_name, ListJobsRequest_JobStateMatcher_value) +} + +// Reference imports to suppress errors if they are not otherwise used. +var _ context.Context +var _ grpc.ClientConn + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the grpc package it is being compiled against. +const _ = grpc.SupportPackageIsVersion4 + +// JobControllerClient is the client API for JobController service. +// +// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://godoc.org/google.golang.org/grpc#ClientConn.NewStream. +type JobControllerClient interface { + // Submits a job to a cluster. + SubmitJob(ctx context.Context, in *SubmitJobRequest, opts ...grpc.CallOption) (*Job, error) + // Gets the resource representation for a job in a project. + GetJob(ctx context.Context, in *GetJobRequest, opts ...grpc.CallOption) (*Job, error) + // Lists regions/{region}/jobs in a project. + ListJobs(ctx context.Context, in *ListJobsRequest, opts ...grpc.CallOption) (*ListJobsResponse, error) + // Updates a job in a project. + UpdateJob(ctx context.Context, in *UpdateJobRequest, opts ...grpc.CallOption) (*Job, error) + // Starts a job cancellation request. To access the job resource + // after cancellation, call + // [regions/{region}/jobs.list](/dataproc/docs/reference/rest/v1beta2/projects.regions.jobs/list) + // or + // [regions/{region}/jobs.get](/dataproc/docs/reference/rest/v1beta2/projects.regions.jobs/get). + CancelJob(ctx context.Context, in *CancelJobRequest, opts ...grpc.CallOption) (*Job, error) + // Deletes the job from the project. If the job is active, the delete fails, + // and the response returns `FAILED_PRECONDITION`. + DeleteJob(ctx context.Context, in *DeleteJobRequest, opts ...grpc.CallOption) (*empty.Empty, error) +} + +type jobControllerClient struct { + cc *grpc.ClientConn +} + +func NewJobControllerClient(cc *grpc.ClientConn) JobControllerClient { + return &jobControllerClient{cc} +} + +func (c *jobControllerClient) SubmitJob(ctx context.Context, in *SubmitJobRequest, opts ...grpc.CallOption) (*Job, error) { + out := new(Job) + err := c.cc.Invoke(ctx, "/google.cloud.dataproc.v1beta2.JobController/SubmitJob", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *jobControllerClient) GetJob(ctx context.Context, in *GetJobRequest, opts ...grpc.CallOption) (*Job, error) { + out := new(Job) + err := c.cc.Invoke(ctx, "/google.cloud.dataproc.v1beta2.JobController/GetJob", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *jobControllerClient) ListJobs(ctx context.Context, in *ListJobsRequest, opts ...grpc.CallOption) (*ListJobsResponse, error) { + out := new(ListJobsResponse) + err := c.cc.Invoke(ctx, "/google.cloud.dataproc.v1beta2.JobController/ListJobs", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *jobControllerClient) UpdateJob(ctx context.Context, in *UpdateJobRequest, opts ...grpc.CallOption) (*Job, error) { + out := new(Job) + err := c.cc.Invoke(ctx, "/google.cloud.dataproc.v1beta2.JobController/UpdateJob", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *jobControllerClient) CancelJob(ctx context.Context, in *CancelJobRequest, opts ...grpc.CallOption) (*Job, error) { + out := new(Job) + err := c.cc.Invoke(ctx, "/google.cloud.dataproc.v1beta2.JobController/CancelJob", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *jobControllerClient) DeleteJob(ctx context.Context, in *DeleteJobRequest, opts ...grpc.CallOption) (*empty.Empty, error) { + out := new(empty.Empty) + err := c.cc.Invoke(ctx, "/google.cloud.dataproc.v1beta2.JobController/DeleteJob", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +// JobControllerServer is the server API for JobController service. +type JobControllerServer interface { + // Submits a job to a cluster. + SubmitJob(context.Context, *SubmitJobRequest) (*Job, error) + // Gets the resource representation for a job in a project. + GetJob(context.Context, *GetJobRequest) (*Job, error) + // Lists regions/{region}/jobs in a project. + ListJobs(context.Context, *ListJobsRequest) (*ListJobsResponse, error) + // Updates a job in a project. + UpdateJob(context.Context, *UpdateJobRequest) (*Job, error) + // Starts a job cancellation request. To access the job resource + // after cancellation, call + // [regions/{region}/jobs.list](/dataproc/docs/reference/rest/v1beta2/projects.regions.jobs/list) + // or + // [regions/{region}/jobs.get](/dataproc/docs/reference/rest/v1beta2/projects.regions.jobs/get). + CancelJob(context.Context, *CancelJobRequest) (*Job, error) + // Deletes the job from the project. If the job is active, the delete fails, + // and the response returns `FAILED_PRECONDITION`. + DeleteJob(context.Context, *DeleteJobRequest) (*empty.Empty, error) +} + +func RegisterJobControllerServer(s *grpc.Server, srv JobControllerServer) { + s.RegisterService(&_JobController_serviceDesc, srv) +} + +func _JobController_SubmitJob_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(SubmitJobRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(JobControllerServer).SubmitJob(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.dataproc.v1beta2.JobController/SubmitJob", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(JobControllerServer).SubmitJob(ctx, req.(*SubmitJobRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _JobController_GetJob_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(GetJobRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(JobControllerServer).GetJob(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.dataproc.v1beta2.JobController/GetJob", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(JobControllerServer).GetJob(ctx, req.(*GetJobRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _JobController_ListJobs_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(ListJobsRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(JobControllerServer).ListJobs(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.dataproc.v1beta2.JobController/ListJobs", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(JobControllerServer).ListJobs(ctx, req.(*ListJobsRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _JobController_UpdateJob_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(UpdateJobRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(JobControllerServer).UpdateJob(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.dataproc.v1beta2.JobController/UpdateJob", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(JobControllerServer).UpdateJob(ctx, req.(*UpdateJobRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _JobController_CancelJob_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(CancelJobRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(JobControllerServer).CancelJob(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.dataproc.v1beta2.JobController/CancelJob", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(JobControllerServer).CancelJob(ctx, req.(*CancelJobRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _JobController_DeleteJob_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(DeleteJobRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(JobControllerServer).DeleteJob(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.dataproc.v1beta2.JobController/DeleteJob", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(JobControllerServer).DeleteJob(ctx, req.(*DeleteJobRequest)) + } + return interceptor(ctx, in, info, handler) +} + +var _JobController_serviceDesc = grpc.ServiceDesc{ + ServiceName: "google.cloud.dataproc.v1beta2.JobController", + HandlerType: (*JobControllerServer)(nil), + Methods: []grpc.MethodDesc{ + { + MethodName: "SubmitJob", + Handler: _JobController_SubmitJob_Handler, + }, + { + MethodName: "GetJob", + Handler: _JobController_GetJob_Handler, + }, + { + MethodName: "ListJobs", + Handler: _JobController_ListJobs_Handler, + }, + { + MethodName: "UpdateJob", + Handler: _JobController_UpdateJob_Handler, + }, + { + MethodName: "CancelJob", + Handler: _JobController_CancelJob_Handler, + }, + { + MethodName: "DeleteJob", + Handler: _JobController_DeleteJob_Handler, + }, + }, + Streams: []grpc.StreamDesc{}, + Metadata: "google/cloud/dataproc/v1beta2/jobs.proto", +} + +func init() { + proto.RegisterFile("google/cloud/dataproc/v1beta2/jobs.proto", fileDescriptor_jobs_283891adf4097baf) +} + +var fileDescriptor_jobs_283891adf4097baf = []byte{ + // 2341 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xdc, 0x5a, 0xcf, 0x73, 0xdb, 0xc6, + 0xf5, 0x17, 0x7f, 0x13, 0x8f, 0xfa, 0x01, 0x6d, 0xec, 0x7c, 0xf9, 0xa5, 0x9b, 0x89, 0x82, 0x99, + 0xb8, 0xaa, 0xd3, 0x21, 0xc7, 0xac, 0xeb, 0x38, 0x76, 0xd3, 0x98, 0x22, 0x41, 0x93, 0x2a, 0x4d, + 0x31, 0x20, 0x69, 0x37, 0xe9, 0x74, 0x10, 0x90, 0x5c, 0x51, 0xa0, 0x40, 0x02, 0xde, 0x05, 0x54, + 0x33, 0x1e, 0x5f, 0x7a, 0xe9, 0xa1, 0xc7, 0x1e, 0x3b, 0x93, 0x99, 0xde, 0x9a, 0x99, 0x5e, 0x7a, + 0xed, 0x3f, 0xd0, 0xe9, 0xa5, 0x3d, 0x64, 0xa6, 0xff, 0x40, 0x7b, 0xe8, 0xb1, 0xa7, 0x9e, 0x3b, + 0xbb, 0x0b, 0x50, 0x24, 0x25, 0x9b, 0x90, 0xd5, 0x5f, 0xce, 0x49, 0xc0, 0xdb, 0xf7, 0xde, 0xbe, + 0xdd, 0xcf, 0x67, 0xdf, 0x7b, 0x0b, 0x0a, 0x76, 0x87, 0xb6, 0x3d, 0xb4, 0x70, 0xa1, 0x6f, 0xd9, + 0xde, 0xa0, 0x30, 0x30, 0x5c, 0xc3, 0x21, 0x76, 0xbf, 0x70, 0x72, 0xb3, 0x87, 0x5d, 0xa3, 0x58, + 0x18, 0xd9, 0x3d, 0x9a, 0x77, 0x88, 0xed, 0xda, 0xe8, 0x2d, 0xa1, 0x99, 0xe7, 0x9a, 0xf9, 0x40, + 0x33, 0xef, 0x6b, 0xe6, 0xbe, 0xe1, 0x3b, 0x32, 0x1c, 0xb3, 0x60, 0x4c, 0x26, 0xb6, 0x6b, 0xb8, + 0xa6, 0x3d, 0xf1, 0x8d, 0x73, 0xd7, 0xfc, 0x51, 0xfe, 0xd6, 0xf3, 0x0e, 0x0b, 0x78, 0xec, 0xb8, + 0x53, 0x7f, 0x70, 0x67, 0x79, 0xf0, 0xd0, 0xc4, 0xd6, 0x40, 0x1f, 0x1b, 0xf4, 0xd8, 0xd7, 0x78, + 0x7b, 0x59, 0xc3, 0x35, 0xc7, 0x98, 0xba, 0xc6, 0xd8, 0x11, 0x0a, 0xca, 0xdf, 0xa2, 0xb0, 0xd1, + 0xb0, 0x87, 0x43, 0x73, 0x32, 0x2c, 0xdb, 0x93, 0x43, 0x73, 0x88, 0xc6, 0xb0, 0x3d, 0x20, 0xe6, + 0x09, 0x26, 0xba, 0x65, 0x0f, 0x75, 0x0b, 0x9f, 0x60, 0x8b, 0x66, 0xa3, 0x3b, 0xb1, 0xdd, 0x4c, + 0xb1, 0x94, 0x7f, 0xe9, 0x52, 0xf2, 0x0b, 0x8e, 0xf2, 0x15, 0xee, 0xa5, 0x61, 0x0f, 0x1b, 0xdc, + 0x87, 0x3a, 0x71, 0xc9, 0x54, 0xdb, 0x1a, 0x2c, 0x4a, 0x73, 0x27, 0x70, 0xe5, 0x3c, 0x45, 0x24, + 0x43, 0xec, 0x18, 0x4f, 0xb3, 0x91, 0x9d, 0xc8, 0xae, 0xa4, 0xb1, 0x47, 0x54, 0x83, 0xc4, 0x89, + 0x61, 0x79, 0x38, 0x1b, 0xdd, 0x89, 0xec, 0x6e, 0x16, 0x8b, 0x17, 0x0a, 0x86, 0xbb, 0xd6, 0x84, + 0x83, 0xbb, 0xd1, 0x3b, 0x11, 0xc5, 0x81, 0x04, 0x97, 0xa1, 0xab, 0xb0, 0xdd, 0x50, 0x1f, 0xa9, + 0x0d, 0xbd, 0xdb, 0x6c, 0xb7, 0xd4, 0x72, 0xbd, 0x5a, 0x57, 0x2b, 0xf2, 0x1a, 0x4a, 0x41, 0xac, + 0xd4, 0x68, 0xc8, 0x11, 0x24, 0x41, 0xa2, 0xa3, 0x95, 0xca, 0xaa, 0x1c, 0x65, 0x8f, 0x15, 0x75, + 0xaf, 0xfb, 0x40, 0x8e, 0xa1, 0x34, 0xc4, 0xeb, 0xcd, 0xea, 0x81, 0x1c, 0x67, 0x4f, 0x8f, 0x4b, + 0x5a, 0x53, 0x4e, 0xb0, 0x61, 0x55, 0xd3, 0x0e, 0x34, 0x39, 0xc9, 0x1e, 0xab, 0xa5, 0x4e, 0xa9, + 0x21, 0xa7, 0x98, 0xa3, 0x83, 0x6a, 0x55, 0x4e, 0x2b, 0x7f, 0x88, 0x81, 0x54, 0x33, 0x06, 0xb6, + 0xed, 0xec, 0xdb, 0x3d, 0xf4, 0x1e, 0x6c, 0x8f, 0x0d, 0x73, 0xa2, 0x8f, 0x0c, 0xa2, 0x1f, 0x9a, + 0x16, 0xd6, 0x3d, 0x62, 0x8a, 0xd5, 0xd6, 0xd6, 0xb4, 0x4d, 0x36, 0xb4, 0x6f, 0x90, 0xaa, 0x69, + 0xe1, 0x2e, 0x31, 0xd1, 0xdb, 0x00, 0x5c, 0xb9, 0x6f, 0x19, 0x94, 0xf2, 0xf5, 0x33, 0x2d, 0x89, + 0xc9, 0xca, 0x4c, 0x84, 0x10, 0xc4, 0x0d, 0x32, 0xa4, 0xd9, 0xd8, 0x4e, 0x6c, 0x57, 0xd2, 0xf8, + 0x33, 0x52, 0x60, 0x63, 0xde, 0x39, 0xcd, 0xc6, 0xf9, 0x60, 0x66, 0x34, 0xf3, 0x4b, 0xd1, 0x35, + 0x90, 0x4e, 0xc7, 0x13, 0x7c, 0x3c, 0x7d, 0x18, 0x0c, 0xbe, 0x03, 0xeb, 0x06, 0xe9, 0x1f, 0x99, + 0x27, 0xfe, 0x78, 0x52, 0xd8, 0xfb, 0x32, 0xae, 0xf2, 0x43, 0x00, 0x87, 0xd8, 0x0e, 0x26, 0xae, + 0x89, 0x69, 0x36, 0xc5, 0x59, 0x72, 0x67, 0x05, 0x30, 0xb3, 0x3d, 0xc8, 0xb7, 0x66, 0xa6, 0x82, + 0x1c, 0x73, 0xbe, 0x50, 0x1b, 0x36, 0x2d, 0x81, 0xa0, 0xde, 0xe7, 0x10, 0x66, 0xd3, 0x3b, 0x91, + 0xdd, 0x4c, 0xf1, 0xdb, 0x17, 0x81, 0x5d, 0xdb, 0xb0, 0xe6, 0x5f, 0x73, 0x1f, 0xc2, 0xd6, 0xd2, + 0x9c, 0xe7, 0xf0, 0xec, 0xca, 0x3c, 0xcf, 0xa4, 0x39, 0xce, 0xec, 0xa5, 0x21, 0x29, 0xe8, 0xab, + 0xfc, 0x3e, 0x06, 0xe9, 0xb6, 0x63, 0x90, 0xe3, 0xaf, 0x0f, 0x94, 0x8f, 0xcf, 0x81, 0xf2, 0xfd, + 0x15, 0x9b, 0x1d, 0x6c, 0xc1, 0x6b, 0x8c, 0xe4, 0x1f, 0x63, 0x00, 0xad, 0xe9, 0x0c, 0xcb, 0x02, + 0x5c, 0xe1, 0xf0, 0x38, 0x53, 0xf7, 0xc8, 0x9e, 0x2c, 0xc1, 0xa9, 0x71, 0x9c, 0x5b, 0x7c, 0x28, + 0xc0, 0x33, 0x80, 0x2b, 0x3a, 0x07, 0xd7, 0x2e, 0xc8, 0x4b, 0xf6, 0x01, 0x9c, 0x9b, 0xce, 0xbc, + 0xf1, 0x7f, 0x06, 0xd8, 0x4f, 0xce, 0x01, 0xf6, 0x83, 0x15, 0x7b, 0x7f, 0xba, 0x23, 0xaf, 0x1b, + 0xb4, 0xca, 0xbb, 0x20, 0x7d, 0xec, 0x61, 0x32, 0x6d, 0x98, 0xd4, 0x45, 0x59, 0x48, 0x3d, 0xf1, + 0x30, 0x61, 0x0b, 0x8f, 0xf0, 0x9d, 0x09, 0x5e, 0x95, 0x2f, 0xe2, 0x90, 0xaa, 0x99, 0x27, 0x98, + 0x81, 0x7e, 0x1d, 0x36, 0x99, 0x78, 0x7a, 0xf6, 0xf4, 0xae, 0x73, 0x79, 0x80, 0x75, 0x1d, 0x40, + 0xe8, 0x59, 0x26, 0x75, 0xf9, 0xcc, 0x99, 0xe2, 0xee, 0x8a, 0xa5, 0xce, 0x62, 0x61, 0xa7, 0xfc, + 0xc9, 0x2c, 0xb0, 0x3c, 0xbc, 0xd1, 0xb7, 0x27, 0xae, 0x39, 0xf1, 0xb0, 0xce, 0x78, 0x62, 0x98, + 0x96, 0x47, 0x70, 0x36, 0xb6, 0x13, 0xd9, 0x4d, 0x6b, 0xdb, 0xc1, 0xd0, 0xc1, 0xa4, 0x2a, 0x06, + 0xd0, 0x21, 0xc8, 0xb4, 0x4f, 0x4c, 0xc7, 0xd5, 0x4f, 0x0c, 0x62, 0x1a, 0x3d, 0x0b, 0x0b, 0xae, + 0x64, 0x8a, 0xf7, 0x56, 0xa5, 0x5b, 0xb1, 0xc8, 0x7c, 0x9b, 0x9b, 0x3f, 0x0a, 0xac, 0xfd, 0x72, + 0x4c, 0x17, 0xa5, 0xe8, 0xd1, 0x02, 0x59, 0x12, 0x7c, 0x86, 0xdb, 0x21, 0x67, 0x78, 0x19, 0x53, + 0xce, 0x10, 0x3d, 0x79, 0x86, 0xe8, 0xb9, 0x3d, 0xb8, 0x72, 0x5e, 0x90, 0x17, 0x41, 0xff, 0xb2, + 0x79, 0x41, 0x9a, 0xf1, 0x45, 0xf9, 0x53, 0x1c, 0x32, 0xfc, 0x10, 0xb4, 0x9f, 0x58, 0xff, 0x25, + 0x92, 0x8c, 0xce, 0x01, 0x3d, 0xc6, 0x21, 0xf9, 0x28, 0x4c, 0x62, 0x16, 0x81, 0x87, 0x04, 0xfe, + 0xd3, 0x05, 0xe0, 0x05, 0xb5, 0xee, 0x5e, 0x60, 0x96, 0x0b, 0x81, 0x7f, 0xe7, 0x6c, 0x96, 0x3b, + 0x9b, 0x4a, 0x92, 0x97, 0x4f, 0x25, 0xff, 0x5b, 0x8c, 0xfa, 0x7b, 0x1c, 0x92, 0x2d, 0x73, 0xf8, + 0x9a, 0x64, 0x1c, 0xfc, 0xc2, 0x8c, 0xb3, 0x8a, 0x16, 0x62, 0x8d, 0x21, 0x79, 0xd7, 0x3d, 0x27, + 0xe1, 0x7c, 0x37, 0xdc, 0x04, 0x97, 0xcc, 0x37, 0xe7, 0x50, 0x2e, 0xf5, 0x75, 0xa3, 0x5c, 0x07, + 0xd6, 0xf7, 0xed, 0x5e, 0xcb, 0x32, 0xfa, 0x78, 0x8c, 0x27, 0x2e, 0x6b, 0x17, 0xfa, 0x96, 0x47, + 0x5d, 0x4c, 0xf4, 0x89, 0x31, 0xc6, 0xbe, 0xbf, 0x8c, 0x2f, 0x6b, 0x1a, 0x63, 0x3c, 0xaf, 0xe2, + 0x79, 0xe6, 0xc0, 0x77, 0x1f, 0xa8, 0x74, 0x3d, 0x73, 0xa0, 0xfc, 0x23, 0x06, 0xd2, 0xbe, 0xdd, + 0x6b, 0xbb, 0x86, 0xeb, 0x51, 0x54, 0x81, 0x04, 0x75, 0x0d, 0x57, 0x38, 0xdb, 0x2c, 0xe6, 0x57, + 0xec, 0xde, 0xcc, 0x30, 0xcf, 0xfe, 0x60, 0x4d, 0x18, 0xb3, 0x4a, 0x3d, 0xc0, 0xae, 0x61, 0x5a, + 0x7e, 0x53, 0xac, 0x05, 0xaf, 0xa8, 0x02, 0x32, 0x57, 0xd1, 0xa9, 0x6b, 0x10, 0x57, 0x67, 0x37, + 0x58, 0x3f, 0x37, 0xe4, 0x82, 0xa9, 0x82, 0xeb, 0x6d, 0xbe, 0x13, 0x5c, 0x6f, 0xb5, 0x4d, 0x6e, + 0xd3, 0x66, 0x26, 0x4c, 0x88, 0x1e, 0x42, 0x9a, 0x7a, 0x3d, 0x11, 0x68, 0x8a, 0x07, 0x7a, 0x33, + 0x7c, 0xa0, 0xbe, 0xa1, 0x36, 0x73, 0xa1, 0x7c, 0x19, 0x81, 0x04, 0x8f, 0x9f, 0xdd, 0x1f, 0xdb, + 0x9d, 0x52, 0x47, 0x5d, 0xba, 0x3f, 0x66, 0x20, 0xd5, 0x52, 0x9b, 0x95, 0x7a, 0xf3, 0x81, 0x1c, + 0x41, 0x9b, 0x00, 0x6d, 0xb5, 0xd3, 0x6d, 0xe9, 0x95, 0x83, 0xa6, 0x2a, 0xa7, 0xd9, 0xa0, 0xd6, + 0x6d, 0x36, 0xd9, 0x60, 0x14, 0x21, 0xd8, 0x2c, 0x97, 0x9a, 0x65, 0xb5, 0xa1, 0x07, 0x06, 0xb1, + 0x39, 0x59, 0xbb, 0x53, 0xd2, 0x3a, 0x6a, 0x45, 0x4e, 0xa1, 0x0d, 0x90, 0x84, 0xac, 0xa1, 0x56, + 0xc4, 0xbd, 0x93, 0x7b, 0x5b, 0xb8, 0x77, 0xbe, 0x01, 0x5b, 0xa5, 0x4e, 0x47, 0x7d, 0xd8, 0xea, + 0xe8, 0xd5, 0x52, 0xbd, 0xd1, 0xd5, 0x54, 0x59, 0x52, 0x6a, 0x90, 0x0e, 0x56, 0x80, 0xb6, 0x20, + 0xb3, 0x18, 0xe7, 0x06, 0x48, 0xed, 0xee, 0xde, 0xc3, 0x7a, 0x87, 0x4d, 0x12, 0x41, 0x00, 0xc9, + 0x8f, 0xbb, 0x6a, 0x57, 0xad, 0xc8, 0x51, 0x24, 0xc3, 0x7a, 0xbb, 0x53, 0x6a, 0xa8, 0x2c, 0x86, + 0x4e, 0xb7, 0x2d, 0xc7, 0x94, 0x0a, 0xa7, 0x93, 0x86, 0x0f, 0x31, 0xc1, 0x93, 0x3e, 0x46, 0x6f, + 0xf1, 0xc3, 0x3b, 0xc2, 0x7d, 0x57, 0x37, 0x07, 0x3e, 0x99, 0x24, 0x5f, 0x52, 0x1f, 0xa0, 0xab, + 0x90, 0x1c, 0xd9, 0x3d, 0x7d, 0x46, 0xa2, 0xc4, 0xc8, 0xee, 0xd5, 0x07, 0xca, 0xef, 0xa2, 0xb0, + 0xf5, 0x89, 0x41, 0x26, 0x25, 0xc7, 0xb1, 0xcc, 0x3e, 0xff, 0xdc, 0xc1, 0xda, 0xe8, 0x39, 0x42, + 0xf2, 0x67, 0xb4, 0x1f, 0x10, 0x4b, 0x5c, 0xf8, 0x6f, 0xad, 0xc0, 0x6b, 0xc9, 0xe5, 0x22, 0xbd, + 0x72, 0x90, 0x76, 0x88, 0x3d, 0x24, 0x98, 0x52, 0x9e, 0xf2, 0xa2, 0xda, 0xec, 0x9d, 0x31, 0xde, + 0x25, 0x46, 0xff, 0x98, 0x25, 0x02, 0x8f, 0x58, 0xd9, 0xb8, 0x60, 0x7c, 0x20, 0xeb, 0x12, 0x4b, + 0xf9, 0xd9, 0x2a, 0xb8, 0x53, 0x10, 0x6b, 0xaa, 0x8f, 0x05, 0xd4, 0x4d, 0xf5, 0xb1, 0xde, 0x2e, + 0x3d, 0x12, 0xe8, 0x2e, 0xec, 0x6f, 0x0c, 0xad, 0x43, 0xba, 0x54, 0x2e, 0xab, 0xad, 0x0e, 0xc7, + 0x70, 0x8e, 0x07, 0x09, 0x36, 0x54, 0xad, 0x37, 0xeb, 0xed, 0x9a, 0x5a, 0x91, 0x93, 0x0c, 0x08, + 0x86, 0x20, 0x47, 0x1e, 0x20, 0xf9, 0x83, 0x3a, 0x87, 0x3d, 0xad, 0xfc, 0x59, 0x82, 0x18, 0xab, + 0x20, 0x75, 0x90, 0x48, 0x80, 0x03, 0xdf, 0xb5, 0x4c, 0xf1, 0xbd, 0xd5, 0x84, 0x9e, 0x41, 0xa7, + 0x9d, 0x5a, 0x33, 0x57, 0x4e, 0x90, 0x21, 0xfc, 0x1a, 0x13, 0xc2, 0xd5, 0x2c, 0xa9, 0x68, 0xa7, + 0xd6, 0xac, 0x5e, 0x1d, 0xf1, 0xeb, 0xbd, 0x3e, 0xb2, 0x7b, 0x7c, 0xa3, 0x57, 0xd7, 0xab, 0xd9, + 0xf7, 0x00, 0x56, 0xaf, 0x8e, 0x66, 0x1f, 0x48, 0xaa, 0x20, 0x51, 0xd6, 0x5f, 0x70, 0x4f, 0x71, + 0xee, 0xe9, 0x9b, 0x21, 0xaf, 0xa3, 0xb5, 0x35, 0x2d, 0x4d, 0x83, 0x1b, 0x5d, 0x03, 0x32, 0xce, + 0xf4, 0xd4, 0x53, 0x82, 0x7b, 0xfa, 0x56, 0xe8, 0xfb, 0x4f, 0x6d, 0x4d, 0x03, 0xdf, 0x9e, 0x79, + 0x2b, 0x43, 0x9a, 0x5f, 0xb6, 0x98, 0x2b, 0x91, 0x84, 0xae, 0x87, 0xeb, 0x8e, 0x6b, 0x6b, 0x5a, + 0xea, 0xc8, 0xbf, 0x6f, 0xdc, 0x87, 0x94, 0x63, 0x0e, 0xb9, 0x0f, 0x51, 0x71, 0xde, 0x0d, 0x55, + 0xf0, 0x6a, 0x6b, 0x5a, 0xd2, 0x11, 0xfd, 0x43, 0x0b, 0x36, 0xc4, 0x92, 0xe8, 0x13, 0x8b, 0xfb, + 0x59, 0xe7, 0x7e, 0x6e, 0x84, 0x6f, 0xd8, 0x6a, 0x6b, 0x5a, 0x86, 0xce, 0xb5, 0xb7, 0xf7, 0x21, + 0x49, 0x79, 0xb6, 0xf3, 0xaf, 0x70, 0xbb, 0x61, 0xb3, 0xa3, 0xe6, 0xdb, 0xa1, 0x03, 0xd8, 0x14, + 0x4f, 0xfa, 0x91, 0x49, 0x5d, 0x9b, 0x4c, 0xb3, 0x1b, 0xbc, 0x9a, 0x87, 0xf7, 0xb4, 0x21, 0xec, + 0x6b, 0xc2, 0x1c, 0xfd, 0x08, 0xb6, 0xa7, 0x06, 0x99, 0xe8, 0xc6, 0xe9, 0xa1, 0xa6, 0x59, 0x89, + 0xfb, 0xcc, 0x5f, 0x2c, 0x17, 0x68, 0xf2, 0x74, 0x51, 0xc0, 0x0f, 0x3d, 0xf5, 0x7a, 0x63, 0xd3, + 0x75, 0xf1, 0x40, 0xef, 0x4d, 0xb3, 0x20, 0x0e, 0xfd, 0x4c, 0xb6, 0x37, 0x45, 0xf7, 0x20, 0xe7, + 0x7f, 0x09, 0xb5, 0x3d, 0xd7, 0xf1, 0x5c, 0x9d, 0x60, 0x6a, 0x7b, 0xa4, 0x2f, 0x1a, 0xb6, 0x6d, + 0x6e, 0xf0, 0x7f, 0x42, 0xe3, 0x80, 0x2b, 0x68, 0xfe, 0x38, 0xeb, 0xdc, 0xde, 0x87, 0xac, 0x6f, + 0xcc, 0x5a, 0x2b, 0x62, 0x5b, 0xbc, 0x17, 0xa1, 0xdc, 0x74, 0x8b, 0x9b, 0x5e, 0x15, 0xe3, 0x65, + 0x31, 0xcc, 0xba, 0x12, 0xca, 0x0c, 0xab, 0x90, 0xb4, 0x8c, 0x1e, 0xb6, 0x68, 0x16, 0x85, 0x5a, + 0x2a, 0xeb, 0x84, 0x1a, 0xdc, 0x40, 0x74, 0x41, 0xbe, 0x35, 0x6a, 0x00, 0xd0, 0xfe, 0x11, 0x1e, + 0x78, 0x96, 0x39, 0x19, 0x66, 0xaf, 0x84, 0xea, 0x6c, 0x18, 0x14, 0x33, 0x1b, 0x6d, 0xce, 0x1e, + 0xfd, 0x3f, 0xa4, 0x59, 0x2a, 0xe7, 0x1d, 0xc1, 0x9b, 0xa2, 0x3e, 0x8f, 0xec, 0x1e, 0xeb, 0x06, + 0x72, 0x1f, 0x40, 0x66, 0x6e, 0xfe, 0x0b, 0x75, 0x2a, 0x00, 0x69, 0x77, 0xea, 0xf0, 0xd3, 0xa4, + 0xec, 0xc1, 0xc6, 0xc2, 0xf4, 0xe8, 0x26, 0x5c, 0x1d, 0x1b, 0x4f, 0x83, 0x46, 0x95, 0xea, 0x0e, + 0x26, 0xfa, 0x91, 0xed, 0x11, 0xee, 0x3a, 0xa1, 0xa1, 0xb1, 0xf1, 0xd4, 0xef, 0x55, 0x69, 0x0b, + 0x93, 0x9a, 0xed, 0x11, 0xe5, 0x8b, 0x08, 0xc8, 0x6d, 0x8e, 0x20, 0xcf, 0x75, 0x4f, 0x3c, 0x4c, + 0xdd, 0x55, 0x45, 0xea, 0x4d, 0x48, 0x12, 0x3c, 0x34, 0xed, 0x09, 0x4f, 0x57, 0x92, 0xe6, 0xbf, + 0xa1, 0x5b, 0x10, 0x63, 0x07, 0x4b, 0xe4, 0x43, 0x25, 0x44, 0x6a, 0x65, 0xea, 0x6c, 0x32, 0x22, + 0xe6, 0x65, 0x93, 0x89, 0x4a, 0x22, 0xf9, 0x92, 0xfa, 0x40, 0xf9, 0x31, 0x6c, 0x3c, 0xc0, 0xff, + 0x82, 0xe0, 0x5e, 0x50, 0x59, 0xff, 0x12, 0x85, 0x2d, 0xd6, 0xec, 0xef, 0xdb, 0x3d, 0x7a, 0xe1, + 0x19, 0x92, 0x0b, 0x33, 0x5c, 0x03, 0xc9, 0x31, 0x86, 0x58, 0xa7, 0xe6, 0xe7, 0x02, 0xb8, 0x84, + 0x96, 0x66, 0x82, 0xb6, 0xf9, 0xb9, 0xa8, 0xfb, 0x6c, 0xd0, 0xb5, 0x8f, 0x71, 0x10, 0x1a, 0x57, + 0xef, 0x30, 0xc1, 0x99, 0x2e, 0x33, 0x7e, 0xb6, 0xcb, 0x1c, 0xc1, 0x36, 0x5b, 0x80, 0x68, 0xec, + 0xc6, 0x86, 0xdb, 0x3f, 0xc2, 0x84, 0xe7, 0xe6, 0xcd, 0xe2, 0xf7, 0x57, 0xb5, 0xdf, 0x8b, 0x0b, + 0x0c, 0xf2, 0x07, 0x7e, 0x28, 0xbc, 0x68, 0x5b, 0xa3, 0x45, 0x01, 0x5b, 0xe2, 0xa1, 0x69, 0xb9, + 0x98, 0xf0, 0x6c, 0x2b, 0x69, 0xfe, 0x9b, 0x72, 0x1b, 0xb6, 0x96, 0x6c, 0x83, 0xaf, 0xfe, 0x6b, + 0xac, 0xe4, 0x96, 0xca, 0x9d, 0xfa, 0x23, 0xd5, 0x2f, 0xe9, 0x07, 0x4d, 0xdd, 0x7f, 0x8f, 0x2a, + 0x5f, 0x45, 0x40, 0xee, 0x3a, 0x03, 0xc3, 0xc5, 0xaf, 0x02, 0x64, 0xf4, 0x05, 0x40, 0xc6, 0xe6, + 0x80, 0x0c, 0xc8, 0x17, 0xbf, 0x18, 0xf9, 0xee, 0x41, 0xc6, 0xe3, 0x71, 0xf1, 0x9f, 0x80, 0xfc, + 0x52, 0x77, 0xb6, 0x49, 0xae, 0x9a, 0xd8, 0x1a, 0x3c, 0x34, 0xe8, 0xb1, 0x06, 0x42, 0x9d, 0x3d, + 0x2b, 0x04, 0xe4, 0xd3, 0x9d, 0xa5, 0x8e, 0x3d, 0xa1, 0x18, 0xdd, 0x86, 0xf8, 0xc8, 0xee, 0x89, + 0x6f, 0x67, 0xe1, 0xe2, 0xe0, 0xfa, 0xe8, 0x3a, 0x6c, 0x4d, 0xf0, 0x53, 0x57, 0x9f, 0x23, 0x89, + 0x58, 0xf6, 0x06, 0x13, 0xb7, 0x02, 0xa2, 0x28, 0x9f, 0x81, 0x5c, 0x36, 0x26, 0x7d, 0x6c, 0xfd, + 0xdb, 0x4e, 0xc4, 0x67, 0x20, 0x57, 0xb0, 0x85, 0x5f, 0x0d, 0xaa, 0x30, 0x33, 0x14, 0x7f, 0x9e, + 0xe6, 0x89, 0xcb, 0x4f, 0xe3, 0x16, 0x26, 0xe8, 0xd7, 0x11, 0x90, 0x66, 0x59, 0x08, 0x15, 0x56, + 0xd5, 0xe4, 0xa5, 0x7c, 0x95, 0x0b, 0xb1, 0xcd, 0x4a, 0xf5, 0xa7, 0x5f, 0xfd, 0xf5, 0x17, 0xd1, + 0xfb, 0xca, 0xbd, 0xd9, 0x0f, 0x8e, 0x7e, 0xfc, 0xb4, 0xf0, 0xec, 0x74, 0x6d, 0xcf, 0x0b, 0x22, + 0x74, 0x5a, 0x78, 0x26, 0x1e, 0x9e, 0xf3, 0xdf, 0x25, 0xef, 0x8a, 0x22, 0x77, 0x37, 0x72, 0x03, + 0xfd, 0x2a, 0x02, 0x49, 0x91, 0x90, 0xd0, 0xaa, 0xda, 0xb0, 0x90, 0xb7, 0x42, 0x05, 0xa9, 0xf2, + 0x20, 0x3f, 0x42, 0x1f, 0xbe, 0x4a, 0x90, 0x85, 0x67, 0x62, 0xb3, 0x9f, 0xa3, 0x2f, 0x23, 0x90, + 0x0e, 0x98, 0x89, 0xf2, 0x17, 0x4b, 0x0e, 0xb9, 0x42, 0x68, 0x7d, 0x41, 0x79, 0xe5, 0x7b, 0x3c, + 0xe8, 0xdb, 0xe8, 0xd6, 0xab, 0x04, 0x8d, 0x7e, 0x13, 0x01, 0x69, 0x96, 0x1a, 0x56, 0x42, 0xbf, + 0x9c, 0x44, 0x42, 0xed, 0xea, 0x3e, 0x0f, 0xb0, 0x52, 0xbc, 0xdc, 0xae, 0xde, 0xe5, 0x09, 0xe3, + 0xb7, 0x11, 0x90, 0x66, 0x07, 0x70, 0x65, 0xb8, 0xcb, 0x47, 0x35, 0x54, 0xb8, 0x07, 0x3c, 0xdc, + 0xba, 0x52, 0xb9, 0x5c, 0xb8, 0x7d, 0x3e, 0x37, 0xa3, 0xec, 0x2f, 0x23, 0x20, 0xcd, 0x8e, 0xf4, + 0xca, 0x98, 0x97, 0x0f, 0x7f, 0xee, 0xcd, 0x33, 0xe9, 0x50, 0x1d, 0x3b, 0xee, 0x34, 0x20, 0xeb, + 0x8d, 0xcb, 0x6d, 0xeb, 0xde, 0x4f, 0xe0, 0x9d, 0xbe, 0x3d, 0x7e, 0x79, 0x50, 0x7b, 0x12, 0x63, + 0x5c, 0x8b, 0xcd, 0xdf, 0x8a, 0x7c, 0xaa, 0xfa, 0xba, 0x43, 0xdb, 0x32, 0x26, 0xc3, 0xbc, 0x4d, + 0x86, 0x85, 0x21, 0x9e, 0xf0, 0xe8, 0x0a, 0x62, 0xc8, 0x70, 0x4c, 0xfa, 0x82, 0xff, 0x33, 0xb8, + 0x17, 0x08, 0x7a, 0x49, 0x6e, 0xf1, 0x9d, 0x7f, 0x06, 0x00, 0x00, 0xff, 0xff, 0x8e, 0xbb, 0x4e, + 0xb3, 0x98, 0x20, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/cloud/dataproc/v1beta2/operations.pb.go b/vendor/google.golang.org/genproto/googleapis/cloud/dataproc/v1beta2/operations.pb.go new file mode 100644 index 0000000..5966dac --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/cloud/dataproc/v1beta2/operations.pb.go @@ -0,0 +1,274 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/cloud/dataproc/v1beta2/operations.proto + +package dataproc // import "google.golang.org/genproto/googleapis/cloud/dataproc/v1beta2" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import timestamp "github.com/golang/protobuf/ptypes/timestamp" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// The operation state. +type ClusterOperationStatus_State int32 + +const ( + // Unused. + ClusterOperationStatus_UNKNOWN ClusterOperationStatus_State = 0 + // The operation has been created. + ClusterOperationStatus_PENDING ClusterOperationStatus_State = 1 + // The operation is running. + ClusterOperationStatus_RUNNING ClusterOperationStatus_State = 2 + // The operation is done; either cancelled or completed. + ClusterOperationStatus_DONE ClusterOperationStatus_State = 3 +) + +var ClusterOperationStatus_State_name = map[int32]string{ + 0: "UNKNOWN", + 1: "PENDING", + 2: "RUNNING", + 3: "DONE", +} +var ClusterOperationStatus_State_value = map[string]int32{ + "UNKNOWN": 0, + "PENDING": 1, + "RUNNING": 2, + "DONE": 3, +} + +func (x ClusterOperationStatus_State) String() string { + return proto.EnumName(ClusterOperationStatus_State_name, int32(x)) +} +func (ClusterOperationStatus_State) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_operations_80c07007f2fde7d1, []int{0, 0} +} + +// The status of the operation. +type ClusterOperationStatus struct { + // Output only. A message containing the operation state. + State ClusterOperationStatus_State `protobuf:"varint,1,opt,name=state,proto3,enum=google.cloud.dataproc.v1beta2.ClusterOperationStatus_State" json:"state,omitempty"` + // Output only. A message containing the detailed operation state. + InnerState string `protobuf:"bytes,2,opt,name=inner_state,json=innerState,proto3" json:"inner_state,omitempty"` + // Output only. A message containing any operation metadata details. + Details string `protobuf:"bytes,3,opt,name=details,proto3" json:"details,omitempty"` + // Output only. The time this state was entered. + StateStartTime *timestamp.Timestamp `protobuf:"bytes,4,opt,name=state_start_time,json=stateStartTime,proto3" json:"state_start_time,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ClusterOperationStatus) Reset() { *m = ClusterOperationStatus{} } +func (m *ClusterOperationStatus) String() string { return proto.CompactTextString(m) } +func (*ClusterOperationStatus) ProtoMessage() {} +func (*ClusterOperationStatus) Descriptor() ([]byte, []int) { + return fileDescriptor_operations_80c07007f2fde7d1, []int{0} +} +func (m *ClusterOperationStatus) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ClusterOperationStatus.Unmarshal(m, b) +} +func (m *ClusterOperationStatus) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ClusterOperationStatus.Marshal(b, m, deterministic) +} +func (dst *ClusterOperationStatus) XXX_Merge(src proto.Message) { + xxx_messageInfo_ClusterOperationStatus.Merge(dst, src) +} +func (m *ClusterOperationStatus) XXX_Size() int { + return xxx_messageInfo_ClusterOperationStatus.Size(m) +} +func (m *ClusterOperationStatus) XXX_DiscardUnknown() { + xxx_messageInfo_ClusterOperationStatus.DiscardUnknown(m) +} + +var xxx_messageInfo_ClusterOperationStatus proto.InternalMessageInfo + +func (m *ClusterOperationStatus) GetState() ClusterOperationStatus_State { + if m != nil { + return m.State + } + return ClusterOperationStatus_UNKNOWN +} + +func (m *ClusterOperationStatus) GetInnerState() string { + if m != nil { + return m.InnerState + } + return "" +} + +func (m *ClusterOperationStatus) GetDetails() string { + if m != nil { + return m.Details + } + return "" +} + +func (m *ClusterOperationStatus) GetStateStartTime() *timestamp.Timestamp { + if m != nil { + return m.StateStartTime + } + return nil +} + +// Metadata describing the operation. +type ClusterOperationMetadata struct { + // Output only. Name of the cluster for the operation. + ClusterName string `protobuf:"bytes,7,opt,name=cluster_name,json=clusterName,proto3" json:"cluster_name,omitempty"` + // Output only. Cluster UUID for the operation. + ClusterUuid string `protobuf:"bytes,8,opt,name=cluster_uuid,json=clusterUuid,proto3" json:"cluster_uuid,omitempty"` + // Output only. Current operation status. + Status *ClusterOperationStatus `protobuf:"bytes,9,opt,name=status,proto3" json:"status,omitempty"` + // Output only. The previous operation status. + StatusHistory []*ClusterOperationStatus `protobuf:"bytes,10,rep,name=status_history,json=statusHistory,proto3" json:"status_history,omitempty"` + // Output only. The operation type. + OperationType string `protobuf:"bytes,11,opt,name=operation_type,json=operationType,proto3" json:"operation_type,omitempty"` + // Output only. Short description of operation. + Description string `protobuf:"bytes,12,opt,name=description,proto3" json:"description,omitempty"` + // Output only. Labels associated with the operation + Labels map[string]string `protobuf:"bytes,13,rep,name=labels,proto3" json:"labels,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` + // Output only. Errors encountered during operation execution. + Warnings []string `protobuf:"bytes,14,rep,name=warnings,proto3" json:"warnings,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ClusterOperationMetadata) Reset() { *m = ClusterOperationMetadata{} } +func (m *ClusterOperationMetadata) String() string { return proto.CompactTextString(m) } +func (*ClusterOperationMetadata) ProtoMessage() {} +func (*ClusterOperationMetadata) Descriptor() ([]byte, []int) { + return fileDescriptor_operations_80c07007f2fde7d1, []int{1} +} +func (m *ClusterOperationMetadata) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ClusterOperationMetadata.Unmarshal(m, b) +} +func (m *ClusterOperationMetadata) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ClusterOperationMetadata.Marshal(b, m, deterministic) +} +func (dst *ClusterOperationMetadata) XXX_Merge(src proto.Message) { + xxx_messageInfo_ClusterOperationMetadata.Merge(dst, src) +} +func (m *ClusterOperationMetadata) XXX_Size() int { + return xxx_messageInfo_ClusterOperationMetadata.Size(m) +} +func (m *ClusterOperationMetadata) XXX_DiscardUnknown() { + xxx_messageInfo_ClusterOperationMetadata.DiscardUnknown(m) +} + +var xxx_messageInfo_ClusterOperationMetadata proto.InternalMessageInfo + +func (m *ClusterOperationMetadata) GetClusterName() string { + if m != nil { + return m.ClusterName + } + return "" +} + +func (m *ClusterOperationMetadata) GetClusterUuid() string { + if m != nil { + return m.ClusterUuid + } + return "" +} + +func (m *ClusterOperationMetadata) GetStatus() *ClusterOperationStatus { + if m != nil { + return m.Status + } + return nil +} + +func (m *ClusterOperationMetadata) GetStatusHistory() []*ClusterOperationStatus { + if m != nil { + return m.StatusHistory + } + return nil +} + +func (m *ClusterOperationMetadata) GetOperationType() string { + if m != nil { + return m.OperationType + } + return "" +} + +func (m *ClusterOperationMetadata) GetDescription() string { + if m != nil { + return m.Description + } + return "" +} + +func (m *ClusterOperationMetadata) GetLabels() map[string]string { + if m != nil { + return m.Labels + } + return nil +} + +func (m *ClusterOperationMetadata) GetWarnings() []string { + if m != nil { + return m.Warnings + } + return nil +} + +func init() { + proto.RegisterType((*ClusterOperationStatus)(nil), "google.cloud.dataproc.v1beta2.ClusterOperationStatus") + proto.RegisterType((*ClusterOperationMetadata)(nil), "google.cloud.dataproc.v1beta2.ClusterOperationMetadata") + proto.RegisterMapType((map[string]string)(nil), "google.cloud.dataproc.v1beta2.ClusterOperationMetadata.LabelsEntry") + proto.RegisterEnum("google.cloud.dataproc.v1beta2.ClusterOperationStatus_State", ClusterOperationStatus_State_name, ClusterOperationStatus_State_value) +} + +func init() { + proto.RegisterFile("google/cloud/dataproc/v1beta2/operations.proto", fileDescriptor_operations_80c07007f2fde7d1) +} + +var fileDescriptor_operations_80c07007f2fde7d1 = []byte{ + // 537 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x9c, 0x54, 0x5f, 0x8b, 0xd3, 0x4e, + 0x14, 0xfd, 0xa5, 0xff, 0x7b, 0xb3, 0xed, 0xaf, 0x0c, 0x22, 0x43, 0x51, 0x36, 0x5b, 0x10, 0xfa, + 0x94, 0x60, 0x45, 0x58, 0xdd, 0xb7, 0xdd, 0x16, 0x15, 0xdd, 0xb4, 0x66, 0xb7, 0x08, 0x2a, 0x94, + 0x69, 0x33, 0xc6, 0x60, 0x3a, 0x13, 0x66, 0x26, 0x2b, 0x7d, 0xf0, 0x0b, 0xf8, 0x41, 0xfc, 0x9c, + 0x32, 0x33, 0x49, 0xa9, 0xa2, 0x0b, 0xee, 0x53, 0xe7, 0xde, 0x7b, 0xce, 0x99, 0x73, 0xa6, 0x97, + 0x80, 0x9f, 0x70, 0x9e, 0x64, 0x34, 0xd8, 0x64, 0xbc, 0x88, 0x83, 0x98, 0x28, 0x92, 0x0b, 0xbe, + 0x09, 0x6e, 0x1e, 0xaf, 0xa9, 0x22, 0x93, 0x80, 0xe7, 0x54, 0x10, 0x95, 0x72, 0x26, 0xfd, 0x5c, + 0x70, 0xc5, 0xd1, 0x43, 0x8b, 0xf7, 0x0d, 0xde, 0xaf, 0xf0, 0x7e, 0x89, 0x1f, 0x3e, 0x28, 0xe5, + 0x48, 0x9e, 0x06, 0x84, 0x31, 0xae, 0x0e, 0xc9, 0xc3, 0xe3, 0x72, 0x6a, 0xaa, 0x75, 0xf1, 0x29, + 0x50, 0xe9, 0x96, 0x4a, 0x45, 0xb6, 0xb9, 0x05, 0x8c, 0x7e, 0xd4, 0xe0, 0xfe, 0x45, 0x56, 0x48, + 0x45, 0xc5, 0xbc, 0xba, 0xf9, 0x4a, 0x11, 0x55, 0x48, 0xf4, 0x16, 0x9a, 0x52, 0x11, 0x45, 0xb1, + 0xe3, 0x39, 0xe3, 0xfe, 0xe4, 0xcc, 0xbf, 0xd5, 0x88, 0xff, 0x67, 0x15, 0x5f, 0xff, 0xd0, 0xc8, + 0x2a, 0xa1, 0x63, 0x70, 0x53, 0xc6, 0xa8, 0x58, 0x59, 0xe1, 0x9a, 0xe7, 0x8c, 0xbb, 0x11, 0x98, + 0x96, 0xc1, 0x21, 0x0c, 0xed, 0x98, 0x2a, 0x92, 0x66, 0x12, 0xd7, 0xcd, 0xb0, 0x2a, 0xd1, 0x14, + 0x06, 0x86, 0xa4, 0xa9, 0x42, 0xad, 0x74, 0x0e, 0xdc, 0xf0, 0x9c, 0xb1, 0x3b, 0x19, 0x56, 0xc6, + 0xaa, 0x90, 0xfe, 0x75, 0x15, 0x32, 0xea, 0x1b, 0xce, 0x95, 0xa6, 0xe8, 0xe6, 0xe8, 0x14, 0x9a, + 0xf6, 0x22, 0x17, 0xda, 0xcb, 0xf0, 0x75, 0x38, 0x7f, 0x17, 0x0e, 0xfe, 0xd3, 0xc5, 0x62, 0x16, + 0x4e, 0x5f, 0x85, 0x2f, 0x06, 0x8e, 0x2e, 0xa2, 0x65, 0x18, 0xea, 0xa2, 0x86, 0x3a, 0xd0, 0x98, + 0xce, 0xc3, 0xd9, 0xa0, 0x3e, 0xfa, 0xde, 0x00, 0xfc, 0x7b, 0xc4, 0x4b, 0xaa, 0x88, 0x7e, 0x07, + 0x74, 0x02, 0x47, 0x1b, 0x3b, 0x5b, 0x31, 0xb2, 0xa5, 0xb8, 0x6d, 0xbc, 0xbb, 0x65, 0x2f, 0x24, + 0x5b, 0x7a, 0x08, 0x29, 0x8a, 0x34, 0xc6, 0x9d, 0x5f, 0x20, 0xcb, 0x22, 0x8d, 0xd1, 0x25, 0xb4, + 0xa4, 0x79, 0x34, 0xdc, 0x35, 0xc1, 0x9e, 0xde, 0xe9, 0xc5, 0xa3, 0x52, 0x04, 0x7d, 0x84, 0xbe, + 0x3d, 0xad, 0x3e, 0xa7, 0x52, 0x71, 0xb1, 0xc3, 0xe0, 0xd5, 0xef, 0x2e, 0xdb, 0xb3, 0x62, 0x2f, + 0xad, 0x16, 0x7a, 0x04, 0xfd, 0xfd, 0xaa, 0xae, 0xd4, 0x2e, 0xa7, 0xd8, 0x35, 0x89, 0x7a, 0xfb, + 0xee, 0xf5, 0x2e, 0xa7, 0xc8, 0x03, 0x37, 0xa6, 0x72, 0x23, 0xd2, 0x5c, 0xb7, 0xf0, 0x91, 0x4d, + 0x7d, 0xd0, 0x42, 0x1f, 0xa0, 0x95, 0x91, 0x35, 0xcd, 0x24, 0xee, 0x19, 0x7b, 0x17, 0xff, 0x68, + 0xaf, 0xfa, 0x13, 0xfc, 0x37, 0x46, 0x65, 0xc6, 0x94, 0xd8, 0x45, 0xa5, 0x24, 0x1a, 0x42, 0xe7, + 0x2b, 0x11, 0x2c, 0x65, 0x89, 0xc4, 0x7d, 0xaf, 0x3e, 0xee, 0x46, 0xfb, 0x7a, 0xf8, 0x0c, 0xdc, + 0x03, 0x0a, 0x1a, 0x40, 0xfd, 0x0b, 0xdd, 0x99, 0x65, 0xef, 0x46, 0xfa, 0x88, 0xee, 0x41, 0xf3, + 0x86, 0x64, 0x45, 0xb5, 0xa7, 0xb6, 0x78, 0x5e, 0x3b, 0x75, 0xce, 0xbf, 0xc1, 0xc9, 0x86, 0x6f, + 0x6f, 0x37, 0x7a, 0xfe, 0xff, 0xde, 0xa2, 0x5c, 0xe8, 0xcd, 0x5c, 0x38, 0xef, 0x67, 0x25, 0x23, + 0xe1, 0x19, 0x61, 0x89, 0xcf, 0x45, 0x12, 0x24, 0x94, 0x99, 0xbd, 0x0d, 0xec, 0x88, 0xe4, 0xa9, + 0xfc, 0xcb, 0xa7, 0xe1, 0xac, 0x6a, 0xac, 0x5b, 0x86, 0xf1, 0xe4, 0x67, 0x00, 0x00, 0x00, 0xff, + 0xff, 0x83, 0x10, 0x95, 0x5e, 0x4b, 0x04, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/cloud/dataproc/v1beta2/shared.pb.go b/vendor/google.golang.org/genproto/googleapis/cloud/dataproc/v1beta2/shared.pb.go new file mode 100644 index 0000000..51c8eb6 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/cloud/dataproc/v1beta2/shared.pb.go @@ -0,0 +1,38 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/cloud/dataproc/v1beta2/shared.proto + +package dataproc // import "google.golang.org/genproto/googleapis/cloud/dataproc/v1beta2" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +func init() { + proto.RegisterFile("google/cloud/dataproc/v1beta2/shared.proto", fileDescriptor_shared_5c6b7853e2deb331) +} + +var fileDescriptor_shared_5c6b7853e2deb331 = []byte{ + // 157 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x7c, 0x8e, 0xa1, 0x0e, 0x02, 0x31, + 0x0c, 0x40, 0x83, 0x41, 0x1c, 0x0e, 0x49, 0x40, 0x60, 0x11, 0x6d, 0x00, 0x89, 0xbb, 0x04, 0x7f, + 0x09, 0x0e, 0xd7, 0xdb, 0x96, 0xb2, 0x64, 0xac, 0xcb, 0x36, 0x48, 0xf8, 0x7b, 0xc2, 0xb6, 0x93, + 0x9c, 0x6d, 0xdf, 0x7b, 0x6d, 0x77, 0x60, 0x11, 0x76, 0x06, 0x95, 0x93, 0x97, 0x46, 0x4d, 0x99, + 0x42, 0x14, 0x85, 0xef, 0xe3, 0x68, 0x32, 0x9d, 0x30, 0x3d, 0x28, 0x1a, 0x0d, 0x21, 0x4a, 0x96, + 0xf5, 0xae, 0xb2, 0x50, 0x58, 0x98, 0x58, 0x68, 0xec, 0x66, 0xdb, 0x52, 0x14, 0x2c, 0x92, 0xf7, + 0x92, 0x29, 0x5b, 0xf1, 0xa9, 0xca, 0xfd, 0xa7, 0xdb, 0x2b, 0x79, 0xc2, 0x6c, 0xa2, 0x5f, 0xdd, + 0xca, 0xbd, 0xe1, 0x67, 0x0c, 0x8b, 0xfb, 0xb5, 0xd1, 0x2c, 0x8e, 0x3c, 0x83, 0x44, 0x46, 0x36, + 0xbe, 0xf4, 0xb0, 0xae, 0x28, 0xd8, 0xf4, 0xe7, 0xf7, 0xcb, 0x34, 0x18, 0x97, 0xc5, 0x38, 0x7f, + 0x03, 0x00, 0x00, 0xff, 0xff, 0x63, 0xea, 0xb3, 0xa5, 0xec, 0x00, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/cloud/dataproc/v1beta2/workflow_templates.pb.go b/vendor/google.golang.org/genproto/googleapis/cloud/dataproc/v1beta2/workflow_templates.pb.go new file mode 100644 index 0000000..10d5bac --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/cloud/dataproc/v1beta2/workflow_templates.pb.go @@ -0,0 +1,2541 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/cloud/dataproc/v1beta2/workflow_templates.proto + +package dataproc // import "google.golang.org/genproto/googleapis/cloud/dataproc/v1beta2" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import empty "github.com/golang/protobuf/ptypes/empty" +import timestamp "github.com/golang/protobuf/ptypes/timestamp" +import _ "google.golang.org/genproto/googleapis/api/annotations" +import longrunning "google.golang.org/genproto/googleapis/longrunning" + +import ( + context "golang.org/x/net/context" + grpc "google.golang.org/grpc" +) + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// The operation state. +type WorkflowMetadata_State int32 + +const ( + // Unused. + WorkflowMetadata_UNKNOWN WorkflowMetadata_State = 0 + // The operation has been created. + WorkflowMetadata_PENDING WorkflowMetadata_State = 1 + // The operation is running. + WorkflowMetadata_RUNNING WorkflowMetadata_State = 2 + // The operation is done; either cancelled or completed. + WorkflowMetadata_DONE WorkflowMetadata_State = 3 +) + +var WorkflowMetadata_State_name = map[int32]string{ + 0: "UNKNOWN", + 1: "PENDING", + 2: "RUNNING", + 3: "DONE", +} +var WorkflowMetadata_State_value = map[string]int32{ + "UNKNOWN": 0, + "PENDING": 1, + "RUNNING": 2, + "DONE": 3, +} + +func (x WorkflowMetadata_State) String() string { + return proto.EnumName(WorkflowMetadata_State_name, int32(x)) +} +func (WorkflowMetadata_State) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_workflow_templates_fbee3ed8540fe470, []int{9, 0} +} + +// The workflow node state. +type WorkflowNode_NodeState int32 + +const ( + // State is unspecified. + WorkflowNode_NODE_STATUS_UNSPECIFIED WorkflowNode_NodeState = 0 + // The node is awaiting prerequisite node to finish. + WorkflowNode_BLOCKED WorkflowNode_NodeState = 1 + // The node is runnable but not running. + WorkflowNode_RUNNABLE WorkflowNode_NodeState = 2 + // The node is running. + WorkflowNode_RUNNING WorkflowNode_NodeState = 3 + // The node completed successfully. + WorkflowNode_COMPLETED WorkflowNode_NodeState = 4 + // The node failed. A node can be marked FAILED because + // its ancestor or peer failed. + WorkflowNode_FAILED WorkflowNode_NodeState = 5 +) + +var WorkflowNode_NodeState_name = map[int32]string{ + 0: "NODE_STATUS_UNSPECIFIED", + 1: "BLOCKED", + 2: "RUNNABLE", + 3: "RUNNING", + 4: "COMPLETED", + 5: "FAILED", +} +var WorkflowNode_NodeState_value = map[string]int32{ + "NODE_STATUS_UNSPECIFIED": 0, + "BLOCKED": 1, + "RUNNABLE": 2, + "RUNNING": 3, + "COMPLETED": 4, + "FAILED": 5, +} + +func (x WorkflowNode_NodeState) String() string { + return proto.EnumName(WorkflowNode_NodeState_name, int32(x)) +} +func (WorkflowNode_NodeState) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_workflow_templates_fbee3ed8540fe470, []int{12, 0} +} + +// A Cloud Dataproc workflow template resource. +type WorkflowTemplate struct { + // Required. The template id. + // + // The id must contain only letters (a-z, A-Z), numbers (0-9), + // underscores (_), and hyphens (-). Cannot begin or end with underscore + // or hyphen. Must consist of between 3 and 50 characters. + Id string `protobuf:"bytes,2,opt,name=id,proto3" json:"id,omitempty"` + // Output only. The "resource name" of the template, as described + // in https://cloud.google.com/apis/design/resource_names of the form + // `projects/{project_id}/regions/{region}/workflowTemplates/{template_id}` + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // Optional. Used to perform a consistent read-modify-write. + // + // This field should be left blank for a `CreateWorkflowTemplate` request. It + // is required for an `UpdateWorkflowTemplate` request, and must match the + // current server version. A typical update template flow would fetch the + // current template with a `GetWorkflowTemplate` request, which will return + // the current template with the `version` field filled in with the + // current server version. The user updates other fields in the template, + // then returns it as part of the `UpdateWorkflowTemplate` request. + Version int32 `protobuf:"varint,3,opt,name=version,proto3" json:"version,omitempty"` + // Output only. The time template was created. + CreateTime *timestamp.Timestamp `protobuf:"bytes,4,opt,name=create_time,json=createTime,proto3" json:"create_time,omitempty"` + // Output only. The time template was last updated. + UpdateTime *timestamp.Timestamp `protobuf:"bytes,5,opt,name=update_time,json=updateTime,proto3" json:"update_time,omitempty"` + // Optional. The labels to associate with this template. These labels + // will be propagated to all jobs and clusters created by the workflow + // instance. + // + // Label **keys** must contain 1 to 63 characters, and must conform to + // [RFC 1035](https://www.ietf.org/rfc/rfc1035.txt). + // + // Label **values** may be empty, but, if present, must contain 1 to 63 + // characters, and must conform to + // [RFC 1035](https://www.ietf.org/rfc/rfc1035.txt). + // + // No more than 32 labels can be associated with a template. + Labels map[string]string `protobuf:"bytes,6,rep,name=labels,proto3" json:"labels,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` + // Required. WorkflowTemplate scheduling information. + Placement *WorkflowTemplatePlacement `protobuf:"bytes,7,opt,name=placement,proto3" json:"placement,omitempty"` + // Required. The Directed Acyclic Graph of Jobs to submit. + Jobs []*OrderedJob `protobuf:"bytes,8,rep,name=jobs,proto3" json:"jobs,omitempty"` + // Optional. Template parameters whose values are substituted into the + // template. Values for parameters must be provided when the template is + // instantiated. + Parameters []*TemplateParameter `protobuf:"bytes,9,rep,name=parameters,proto3" json:"parameters,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *WorkflowTemplate) Reset() { *m = WorkflowTemplate{} } +func (m *WorkflowTemplate) String() string { return proto.CompactTextString(m) } +func (*WorkflowTemplate) ProtoMessage() {} +func (*WorkflowTemplate) Descriptor() ([]byte, []int) { + return fileDescriptor_workflow_templates_fbee3ed8540fe470, []int{0} +} +func (m *WorkflowTemplate) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_WorkflowTemplate.Unmarshal(m, b) +} +func (m *WorkflowTemplate) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_WorkflowTemplate.Marshal(b, m, deterministic) +} +func (dst *WorkflowTemplate) XXX_Merge(src proto.Message) { + xxx_messageInfo_WorkflowTemplate.Merge(dst, src) +} +func (m *WorkflowTemplate) XXX_Size() int { + return xxx_messageInfo_WorkflowTemplate.Size(m) +} +func (m *WorkflowTemplate) XXX_DiscardUnknown() { + xxx_messageInfo_WorkflowTemplate.DiscardUnknown(m) +} + +var xxx_messageInfo_WorkflowTemplate proto.InternalMessageInfo + +func (m *WorkflowTemplate) GetId() string { + if m != nil { + return m.Id + } + return "" +} + +func (m *WorkflowTemplate) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *WorkflowTemplate) GetVersion() int32 { + if m != nil { + return m.Version + } + return 0 +} + +func (m *WorkflowTemplate) GetCreateTime() *timestamp.Timestamp { + if m != nil { + return m.CreateTime + } + return nil +} + +func (m *WorkflowTemplate) GetUpdateTime() *timestamp.Timestamp { + if m != nil { + return m.UpdateTime + } + return nil +} + +func (m *WorkflowTemplate) GetLabels() map[string]string { + if m != nil { + return m.Labels + } + return nil +} + +func (m *WorkflowTemplate) GetPlacement() *WorkflowTemplatePlacement { + if m != nil { + return m.Placement + } + return nil +} + +func (m *WorkflowTemplate) GetJobs() []*OrderedJob { + if m != nil { + return m.Jobs + } + return nil +} + +func (m *WorkflowTemplate) GetParameters() []*TemplateParameter { + if m != nil { + return m.Parameters + } + return nil +} + +// Specifies workflow execution target. +// +// Either `managed_cluster` or `cluster_selector` is required. +type WorkflowTemplatePlacement struct { + // Required. Specifies where workflow executes; either on a managed + // cluster or an existing cluster chosen by labels. + // + // Types that are valid to be assigned to Placement: + // *WorkflowTemplatePlacement_ManagedCluster + // *WorkflowTemplatePlacement_ClusterSelector + Placement isWorkflowTemplatePlacement_Placement `protobuf_oneof:"placement"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *WorkflowTemplatePlacement) Reset() { *m = WorkflowTemplatePlacement{} } +func (m *WorkflowTemplatePlacement) String() string { return proto.CompactTextString(m) } +func (*WorkflowTemplatePlacement) ProtoMessage() {} +func (*WorkflowTemplatePlacement) Descriptor() ([]byte, []int) { + return fileDescriptor_workflow_templates_fbee3ed8540fe470, []int{1} +} +func (m *WorkflowTemplatePlacement) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_WorkflowTemplatePlacement.Unmarshal(m, b) +} +func (m *WorkflowTemplatePlacement) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_WorkflowTemplatePlacement.Marshal(b, m, deterministic) +} +func (dst *WorkflowTemplatePlacement) XXX_Merge(src proto.Message) { + xxx_messageInfo_WorkflowTemplatePlacement.Merge(dst, src) +} +func (m *WorkflowTemplatePlacement) XXX_Size() int { + return xxx_messageInfo_WorkflowTemplatePlacement.Size(m) +} +func (m *WorkflowTemplatePlacement) XXX_DiscardUnknown() { + xxx_messageInfo_WorkflowTemplatePlacement.DiscardUnknown(m) +} + +var xxx_messageInfo_WorkflowTemplatePlacement proto.InternalMessageInfo + +type isWorkflowTemplatePlacement_Placement interface { + isWorkflowTemplatePlacement_Placement() +} + +type WorkflowTemplatePlacement_ManagedCluster struct { + ManagedCluster *ManagedCluster `protobuf:"bytes,1,opt,name=managed_cluster,json=managedCluster,proto3,oneof"` +} + +type WorkflowTemplatePlacement_ClusterSelector struct { + ClusterSelector *ClusterSelector `protobuf:"bytes,2,opt,name=cluster_selector,json=clusterSelector,proto3,oneof"` +} + +func (*WorkflowTemplatePlacement_ManagedCluster) isWorkflowTemplatePlacement_Placement() {} + +func (*WorkflowTemplatePlacement_ClusterSelector) isWorkflowTemplatePlacement_Placement() {} + +func (m *WorkflowTemplatePlacement) GetPlacement() isWorkflowTemplatePlacement_Placement { + if m != nil { + return m.Placement + } + return nil +} + +func (m *WorkflowTemplatePlacement) GetManagedCluster() *ManagedCluster { + if x, ok := m.GetPlacement().(*WorkflowTemplatePlacement_ManagedCluster); ok { + return x.ManagedCluster + } + return nil +} + +func (m *WorkflowTemplatePlacement) GetClusterSelector() *ClusterSelector { + if x, ok := m.GetPlacement().(*WorkflowTemplatePlacement_ClusterSelector); ok { + return x.ClusterSelector + } + return nil +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*WorkflowTemplatePlacement) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _WorkflowTemplatePlacement_OneofMarshaler, _WorkflowTemplatePlacement_OneofUnmarshaler, _WorkflowTemplatePlacement_OneofSizer, []interface{}{ + (*WorkflowTemplatePlacement_ManagedCluster)(nil), + (*WorkflowTemplatePlacement_ClusterSelector)(nil), + } +} + +func _WorkflowTemplatePlacement_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*WorkflowTemplatePlacement) + // placement + switch x := m.Placement.(type) { + case *WorkflowTemplatePlacement_ManagedCluster: + b.EncodeVarint(1<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.ManagedCluster); err != nil { + return err + } + case *WorkflowTemplatePlacement_ClusterSelector: + b.EncodeVarint(2<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.ClusterSelector); err != nil { + return err + } + case nil: + default: + return fmt.Errorf("WorkflowTemplatePlacement.Placement has unexpected type %T", x) + } + return nil +} + +func _WorkflowTemplatePlacement_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*WorkflowTemplatePlacement) + switch tag { + case 1: // placement.managed_cluster + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(ManagedCluster) + err := b.DecodeMessage(msg) + m.Placement = &WorkflowTemplatePlacement_ManagedCluster{msg} + return true, err + case 2: // placement.cluster_selector + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(ClusterSelector) + err := b.DecodeMessage(msg) + m.Placement = &WorkflowTemplatePlacement_ClusterSelector{msg} + return true, err + default: + return false, nil + } +} + +func _WorkflowTemplatePlacement_OneofSizer(msg proto.Message) (n int) { + m := msg.(*WorkflowTemplatePlacement) + // placement + switch x := m.Placement.(type) { + case *WorkflowTemplatePlacement_ManagedCluster: + s := proto.Size(x.ManagedCluster) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *WorkflowTemplatePlacement_ClusterSelector: + s := proto.Size(x.ClusterSelector) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +// Cluster that is managed by the workflow. +type ManagedCluster struct { + // Required. The cluster name prefix. A unique cluster name will be formed by + // appending a random suffix. + // + // The name must contain only lower-case letters (a-z), numbers (0-9), + // and hyphens (-). Must begin with a letter. Cannot begin or end with + // hyphen. Must consist of between 2 and 35 characters. + ClusterName string `protobuf:"bytes,2,opt,name=cluster_name,json=clusterName,proto3" json:"cluster_name,omitempty"` + // Required. The cluster configuration. + Config *ClusterConfig `protobuf:"bytes,3,opt,name=config,proto3" json:"config,omitempty"` + // Optional. The labels to associate with this cluster. + // + // Label keys must be between 1 and 63 characters long, and must conform to + // the following PCRE regular expression: + // [\p{Ll}\p{Lo}][\p{Ll}\p{Lo}\p{N}_-]{0,62} + // + // Label values must be between 1 and 63 characters long, and must conform to + // the following PCRE regular expression: [\p{Ll}\p{Lo}\p{N}_-]{0,63} + // + // No more than 32 labels can be associated with a given cluster. + Labels map[string]string `protobuf:"bytes,4,rep,name=labels,proto3" json:"labels,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ManagedCluster) Reset() { *m = ManagedCluster{} } +func (m *ManagedCluster) String() string { return proto.CompactTextString(m) } +func (*ManagedCluster) ProtoMessage() {} +func (*ManagedCluster) Descriptor() ([]byte, []int) { + return fileDescriptor_workflow_templates_fbee3ed8540fe470, []int{2} +} +func (m *ManagedCluster) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ManagedCluster.Unmarshal(m, b) +} +func (m *ManagedCluster) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ManagedCluster.Marshal(b, m, deterministic) +} +func (dst *ManagedCluster) XXX_Merge(src proto.Message) { + xxx_messageInfo_ManagedCluster.Merge(dst, src) +} +func (m *ManagedCluster) XXX_Size() int { + return xxx_messageInfo_ManagedCluster.Size(m) +} +func (m *ManagedCluster) XXX_DiscardUnknown() { + xxx_messageInfo_ManagedCluster.DiscardUnknown(m) +} + +var xxx_messageInfo_ManagedCluster proto.InternalMessageInfo + +func (m *ManagedCluster) GetClusterName() string { + if m != nil { + return m.ClusterName + } + return "" +} + +func (m *ManagedCluster) GetConfig() *ClusterConfig { + if m != nil { + return m.Config + } + return nil +} + +func (m *ManagedCluster) GetLabels() map[string]string { + if m != nil { + return m.Labels + } + return nil +} + +// A selector that chooses target cluster for jobs based on metadata. +type ClusterSelector struct { + // Optional. The zone where workflow process executes. This parameter does not + // affect the selection of the cluster. + // + // If unspecified, the zone of the first cluster matching the selector + // is used. + Zone string `protobuf:"bytes,1,opt,name=zone,proto3" json:"zone,omitempty"` + // Required. The cluster labels. Cluster must have all labels + // to match. + ClusterLabels map[string]string `protobuf:"bytes,2,rep,name=cluster_labels,json=clusterLabels,proto3" json:"cluster_labels,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ClusterSelector) Reset() { *m = ClusterSelector{} } +func (m *ClusterSelector) String() string { return proto.CompactTextString(m) } +func (*ClusterSelector) ProtoMessage() {} +func (*ClusterSelector) Descriptor() ([]byte, []int) { + return fileDescriptor_workflow_templates_fbee3ed8540fe470, []int{3} +} +func (m *ClusterSelector) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ClusterSelector.Unmarshal(m, b) +} +func (m *ClusterSelector) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ClusterSelector.Marshal(b, m, deterministic) +} +func (dst *ClusterSelector) XXX_Merge(src proto.Message) { + xxx_messageInfo_ClusterSelector.Merge(dst, src) +} +func (m *ClusterSelector) XXX_Size() int { + return xxx_messageInfo_ClusterSelector.Size(m) +} +func (m *ClusterSelector) XXX_DiscardUnknown() { + xxx_messageInfo_ClusterSelector.DiscardUnknown(m) +} + +var xxx_messageInfo_ClusterSelector proto.InternalMessageInfo + +func (m *ClusterSelector) GetZone() string { + if m != nil { + return m.Zone + } + return "" +} + +func (m *ClusterSelector) GetClusterLabels() map[string]string { + if m != nil { + return m.ClusterLabels + } + return nil +} + +// A job executed by the workflow. +type OrderedJob struct { + // Required. The step id. The id must be unique among all jobs + // within the template. + // + // The step id is used as prefix for job id, as job + // `goog-dataproc-workflow-step-id` label, and in + // [prerequisiteStepIds][google.cloud.dataproc.v1beta2.OrderedJob.prerequisite_step_ids] + // field from other steps. + // + // The id must contain only letters (a-z, A-Z), numbers (0-9), + // underscores (_), and hyphens (-). Cannot begin or end with underscore + // or hyphen. Must consist of between 3 and 50 characters. + StepId string `protobuf:"bytes,1,opt,name=step_id,json=stepId,proto3" json:"step_id,omitempty"` + // Required. The job definition. + // + // Types that are valid to be assigned to JobType: + // *OrderedJob_HadoopJob + // *OrderedJob_SparkJob + // *OrderedJob_PysparkJob + // *OrderedJob_HiveJob + // *OrderedJob_PigJob + // *OrderedJob_SparkSqlJob + JobType isOrderedJob_JobType `protobuf_oneof:"job_type"` + // Optional. The labels to associate with this job. + // + // Label keys must be between 1 and 63 characters long, and must conform to + // the following regular expression: + // [\p{Ll}\p{Lo}][\p{Ll}\p{Lo}\p{N}_-]{0,62} + // + // Label values must be between 1 and 63 characters long, and must conform to + // the following regular expression: [\p{Ll}\p{Lo}\p{N}_-]{0,63} + // + // No more than 32 labels can be associated with a given job. + Labels map[string]string `protobuf:"bytes,8,rep,name=labels,proto3" json:"labels,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` + // Optional. Job scheduling configuration. + Scheduling *JobScheduling `protobuf:"bytes,9,opt,name=scheduling,proto3" json:"scheduling,omitempty"` + // Optional. The optional list of prerequisite job step_ids. + // If not specified, the job will start at the beginning of workflow. + PrerequisiteStepIds []string `protobuf:"bytes,10,rep,name=prerequisite_step_ids,json=prerequisiteStepIds,proto3" json:"prerequisite_step_ids,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *OrderedJob) Reset() { *m = OrderedJob{} } +func (m *OrderedJob) String() string { return proto.CompactTextString(m) } +func (*OrderedJob) ProtoMessage() {} +func (*OrderedJob) Descriptor() ([]byte, []int) { + return fileDescriptor_workflow_templates_fbee3ed8540fe470, []int{4} +} +func (m *OrderedJob) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_OrderedJob.Unmarshal(m, b) +} +func (m *OrderedJob) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_OrderedJob.Marshal(b, m, deterministic) +} +func (dst *OrderedJob) XXX_Merge(src proto.Message) { + xxx_messageInfo_OrderedJob.Merge(dst, src) +} +func (m *OrderedJob) XXX_Size() int { + return xxx_messageInfo_OrderedJob.Size(m) +} +func (m *OrderedJob) XXX_DiscardUnknown() { + xxx_messageInfo_OrderedJob.DiscardUnknown(m) +} + +var xxx_messageInfo_OrderedJob proto.InternalMessageInfo + +func (m *OrderedJob) GetStepId() string { + if m != nil { + return m.StepId + } + return "" +} + +type isOrderedJob_JobType interface { + isOrderedJob_JobType() +} + +type OrderedJob_HadoopJob struct { + HadoopJob *HadoopJob `protobuf:"bytes,2,opt,name=hadoop_job,json=hadoopJob,proto3,oneof"` +} + +type OrderedJob_SparkJob struct { + SparkJob *SparkJob `protobuf:"bytes,3,opt,name=spark_job,json=sparkJob,proto3,oneof"` +} + +type OrderedJob_PysparkJob struct { + PysparkJob *PySparkJob `protobuf:"bytes,4,opt,name=pyspark_job,json=pysparkJob,proto3,oneof"` +} + +type OrderedJob_HiveJob struct { + HiveJob *HiveJob `protobuf:"bytes,5,opt,name=hive_job,json=hiveJob,proto3,oneof"` +} + +type OrderedJob_PigJob struct { + PigJob *PigJob `protobuf:"bytes,6,opt,name=pig_job,json=pigJob,proto3,oneof"` +} + +type OrderedJob_SparkSqlJob struct { + SparkSqlJob *SparkSqlJob `protobuf:"bytes,7,opt,name=spark_sql_job,json=sparkSqlJob,proto3,oneof"` +} + +func (*OrderedJob_HadoopJob) isOrderedJob_JobType() {} + +func (*OrderedJob_SparkJob) isOrderedJob_JobType() {} + +func (*OrderedJob_PysparkJob) isOrderedJob_JobType() {} + +func (*OrderedJob_HiveJob) isOrderedJob_JobType() {} + +func (*OrderedJob_PigJob) isOrderedJob_JobType() {} + +func (*OrderedJob_SparkSqlJob) isOrderedJob_JobType() {} + +func (m *OrderedJob) GetJobType() isOrderedJob_JobType { + if m != nil { + return m.JobType + } + return nil +} + +func (m *OrderedJob) GetHadoopJob() *HadoopJob { + if x, ok := m.GetJobType().(*OrderedJob_HadoopJob); ok { + return x.HadoopJob + } + return nil +} + +func (m *OrderedJob) GetSparkJob() *SparkJob { + if x, ok := m.GetJobType().(*OrderedJob_SparkJob); ok { + return x.SparkJob + } + return nil +} + +func (m *OrderedJob) GetPysparkJob() *PySparkJob { + if x, ok := m.GetJobType().(*OrderedJob_PysparkJob); ok { + return x.PysparkJob + } + return nil +} + +func (m *OrderedJob) GetHiveJob() *HiveJob { + if x, ok := m.GetJobType().(*OrderedJob_HiveJob); ok { + return x.HiveJob + } + return nil +} + +func (m *OrderedJob) GetPigJob() *PigJob { + if x, ok := m.GetJobType().(*OrderedJob_PigJob); ok { + return x.PigJob + } + return nil +} + +func (m *OrderedJob) GetSparkSqlJob() *SparkSqlJob { + if x, ok := m.GetJobType().(*OrderedJob_SparkSqlJob); ok { + return x.SparkSqlJob + } + return nil +} + +func (m *OrderedJob) GetLabels() map[string]string { + if m != nil { + return m.Labels + } + return nil +} + +func (m *OrderedJob) GetScheduling() *JobScheduling { + if m != nil { + return m.Scheduling + } + return nil +} + +func (m *OrderedJob) GetPrerequisiteStepIds() []string { + if m != nil { + return m.PrerequisiteStepIds + } + return nil +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*OrderedJob) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _OrderedJob_OneofMarshaler, _OrderedJob_OneofUnmarshaler, _OrderedJob_OneofSizer, []interface{}{ + (*OrderedJob_HadoopJob)(nil), + (*OrderedJob_SparkJob)(nil), + (*OrderedJob_PysparkJob)(nil), + (*OrderedJob_HiveJob)(nil), + (*OrderedJob_PigJob)(nil), + (*OrderedJob_SparkSqlJob)(nil), + } +} + +func _OrderedJob_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*OrderedJob) + // job_type + switch x := m.JobType.(type) { + case *OrderedJob_HadoopJob: + b.EncodeVarint(2<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.HadoopJob); err != nil { + return err + } + case *OrderedJob_SparkJob: + b.EncodeVarint(3<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.SparkJob); err != nil { + return err + } + case *OrderedJob_PysparkJob: + b.EncodeVarint(4<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.PysparkJob); err != nil { + return err + } + case *OrderedJob_HiveJob: + b.EncodeVarint(5<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.HiveJob); err != nil { + return err + } + case *OrderedJob_PigJob: + b.EncodeVarint(6<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.PigJob); err != nil { + return err + } + case *OrderedJob_SparkSqlJob: + b.EncodeVarint(7<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.SparkSqlJob); err != nil { + return err + } + case nil: + default: + return fmt.Errorf("OrderedJob.JobType has unexpected type %T", x) + } + return nil +} + +func _OrderedJob_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*OrderedJob) + switch tag { + case 2: // job_type.hadoop_job + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(HadoopJob) + err := b.DecodeMessage(msg) + m.JobType = &OrderedJob_HadoopJob{msg} + return true, err + case 3: // job_type.spark_job + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(SparkJob) + err := b.DecodeMessage(msg) + m.JobType = &OrderedJob_SparkJob{msg} + return true, err + case 4: // job_type.pyspark_job + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(PySparkJob) + err := b.DecodeMessage(msg) + m.JobType = &OrderedJob_PysparkJob{msg} + return true, err + case 5: // job_type.hive_job + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(HiveJob) + err := b.DecodeMessage(msg) + m.JobType = &OrderedJob_HiveJob{msg} + return true, err + case 6: // job_type.pig_job + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(PigJob) + err := b.DecodeMessage(msg) + m.JobType = &OrderedJob_PigJob{msg} + return true, err + case 7: // job_type.spark_sql_job + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(SparkSqlJob) + err := b.DecodeMessage(msg) + m.JobType = &OrderedJob_SparkSqlJob{msg} + return true, err + default: + return false, nil + } +} + +func _OrderedJob_OneofSizer(msg proto.Message) (n int) { + m := msg.(*OrderedJob) + // job_type + switch x := m.JobType.(type) { + case *OrderedJob_HadoopJob: + s := proto.Size(x.HadoopJob) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *OrderedJob_SparkJob: + s := proto.Size(x.SparkJob) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *OrderedJob_PysparkJob: + s := proto.Size(x.PysparkJob) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *OrderedJob_HiveJob: + s := proto.Size(x.HiveJob) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *OrderedJob_PigJob: + s := proto.Size(x.PigJob) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *OrderedJob_SparkSqlJob: + s := proto.Size(x.SparkSqlJob) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +// A configurable parameter that replaces one or more fields in the template. +// Parameterizable fields: +// - Labels +// - File uris +// - Job properties +// - Job arguments +// - Script variables +// - Main class (in HadoopJob and SparkJob) +// - Zone (in ClusterSelector) +type TemplateParameter struct { + // Required. Parameter name. + // The parameter name is used as the key, and paired with the + // parameter value, which are passed to the template when the template + // is instantiated. + // The name must contain only capital letters (A-Z), numbers (0-9), and + // underscores (_), and must not start with a number. The maximum length is + // 40 characters. + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // Required. Paths to all fields that the parameter replaces. + // A field is allowed to appear in at most one parameter's list of field + // paths. + // + // A field path is similar in syntax to a + // [google.protobuf.FieldMask][google.protobuf.FieldMask]. For example, a + // field path that references the zone field of a workflow template's cluster + // selector would be specified as `placement.clusterSelector.zone`. + // + // Also, field paths can reference fields using the following syntax: + // + // * Values in maps can be referenced by key: + // * labels['key'] + // * placement.clusterSelector.clusterLabels['key'] + // * placement.managedCluster.labels['key'] + // * placement.clusterSelector.clusterLabels['key'] + // * jobs['step-id'].labels['key'] + // + // * Jobs in the jobs list can be referenced by step-id: + // * jobs['step-id'].hadoopJob.mainJarFileUri + // * jobs['step-id'].hiveJob.queryFileUri + // * jobs['step-id'].pySparkJob.mainPythonFileUri + // * jobs['step-id'].hadoopJob.jarFileUris[0] + // * jobs['step-id'].hadoopJob.archiveUris[0] + // * jobs['step-id'].hadoopJob.fileUris[0] + // * jobs['step-id'].pySparkJob.pythonFileUris[0] + // + // * Items in repeated fields can be referenced by a zero-based index: + // * jobs['step-id'].sparkJob.args[0] + // + // * Other examples: + // * jobs['step-id'].hadoopJob.properties['key'] + // * jobs['step-id'].hadoopJob.args[0] + // * jobs['step-id'].hiveJob.scriptVariables['key'] + // * jobs['step-id'].hadoopJob.mainJarFileUri + // * placement.clusterSelector.zone + // + // It may not be possible to parameterize maps and repeated fields in their + // entirety since only individual map values and individual items in repeated + // fields can be referenced. For example, the following field paths are + // invalid: + // + // - placement.clusterSelector.clusterLabels + // - jobs['step-id'].sparkJob.args + Fields []string `protobuf:"bytes,2,rep,name=fields,proto3" json:"fields,omitempty"` + // Optional. Brief description of the parameter. + // Must not exceed 1024 characters. + Description string `protobuf:"bytes,3,opt,name=description,proto3" json:"description,omitempty"` + // Optional. Validation rules to be applied to this parameter's value. + Validation *ParameterValidation `protobuf:"bytes,4,opt,name=validation,proto3" json:"validation,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *TemplateParameter) Reset() { *m = TemplateParameter{} } +func (m *TemplateParameter) String() string { return proto.CompactTextString(m) } +func (*TemplateParameter) ProtoMessage() {} +func (*TemplateParameter) Descriptor() ([]byte, []int) { + return fileDescriptor_workflow_templates_fbee3ed8540fe470, []int{5} +} +func (m *TemplateParameter) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_TemplateParameter.Unmarshal(m, b) +} +func (m *TemplateParameter) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_TemplateParameter.Marshal(b, m, deterministic) +} +func (dst *TemplateParameter) XXX_Merge(src proto.Message) { + xxx_messageInfo_TemplateParameter.Merge(dst, src) +} +func (m *TemplateParameter) XXX_Size() int { + return xxx_messageInfo_TemplateParameter.Size(m) +} +func (m *TemplateParameter) XXX_DiscardUnknown() { + xxx_messageInfo_TemplateParameter.DiscardUnknown(m) +} + +var xxx_messageInfo_TemplateParameter proto.InternalMessageInfo + +func (m *TemplateParameter) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *TemplateParameter) GetFields() []string { + if m != nil { + return m.Fields + } + return nil +} + +func (m *TemplateParameter) GetDescription() string { + if m != nil { + return m.Description + } + return "" +} + +func (m *TemplateParameter) GetValidation() *ParameterValidation { + if m != nil { + return m.Validation + } + return nil +} + +// Configuration for parameter validation. +type ParameterValidation struct { + // Required. The type of validation to be performed. + // + // Types that are valid to be assigned to ValidationType: + // *ParameterValidation_Regex + // *ParameterValidation_Values + ValidationType isParameterValidation_ValidationType `protobuf_oneof:"validation_type"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ParameterValidation) Reset() { *m = ParameterValidation{} } +func (m *ParameterValidation) String() string { return proto.CompactTextString(m) } +func (*ParameterValidation) ProtoMessage() {} +func (*ParameterValidation) Descriptor() ([]byte, []int) { + return fileDescriptor_workflow_templates_fbee3ed8540fe470, []int{6} +} +func (m *ParameterValidation) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ParameterValidation.Unmarshal(m, b) +} +func (m *ParameterValidation) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ParameterValidation.Marshal(b, m, deterministic) +} +func (dst *ParameterValidation) XXX_Merge(src proto.Message) { + xxx_messageInfo_ParameterValidation.Merge(dst, src) +} +func (m *ParameterValidation) XXX_Size() int { + return xxx_messageInfo_ParameterValidation.Size(m) +} +func (m *ParameterValidation) XXX_DiscardUnknown() { + xxx_messageInfo_ParameterValidation.DiscardUnknown(m) +} + +var xxx_messageInfo_ParameterValidation proto.InternalMessageInfo + +type isParameterValidation_ValidationType interface { + isParameterValidation_ValidationType() +} + +type ParameterValidation_Regex struct { + Regex *RegexValidation `protobuf:"bytes,1,opt,name=regex,proto3,oneof"` +} + +type ParameterValidation_Values struct { + Values *ValueValidation `protobuf:"bytes,2,opt,name=values,proto3,oneof"` +} + +func (*ParameterValidation_Regex) isParameterValidation_ValidationType() {} + +func (*ParameterValidation_Values) isParameterValidation_ValidationType() {} + +func (m *ParameterValidation) GetValidationType() isParameterValidation_ValidationType { + if m != nil { + return m.ValidationType + } + return nil +} + +func (m *ParameterValidation) GetRegex() *RegexValidation { + if x, ok := m.GetValidationType().(*ParameterValidation_Regex); ok { + return x.Regex + } + return nil +} + +func (m *ParameterValidation) GetValues() *ValueValidation { + if x, ok := m.GetValidationType().(*ParameterValidation_Values); ok { + return x.Values + } + return nil +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*ParameterValidation) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _ParameterValidation_OneofMarshaler, _ParameterValidation_OneofUnmarshaler, _ParameterValidation_OneofSizer, []interface{}{ + (*ParameterValidation_Regex)(nil), + (*ParameterValidation_Values)(nil), + } +} + +func _ParameterValidation_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*ParameterValidation) + // validation_type + switch x := m.ValidationType.(type) { + case *ParameterValidation_Regex: + b.EncodeVarint(1<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.Regex); err != nil { + return err + } + case *ParameterValidation_Values: + b.EncodeVarint(2<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.Values); err != nil { + return err + } + case nil: + default: + return fmt.Errorf("ParameterValidation.ValidationType has unexpected type %T", x) + } + return nil +} + +func _ParameterValidation_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*ParameterValidation) + switch tag { + case 1: // validation_type.regex + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(RegexValidation) + err := b.DecodeMessage(msg) + m.ValidationType = &ParameterValidation_Regex{msg} + return true, err + case 2: // validation_type.values + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(ValueValidation) + err := b.DecodeMessage(msg) + m.ValidationType = &ParameterValidation_Values{msg} + return true, err + default: + return false, nil + } +} + +func _ParameterValidation_OneofSizer(msg proto.Message) (n int) { + m := msg.(*ParameterValidation) + // validation_type + switch x := m.ValidationType.(type) { + case *ParameterValidation_Regex: + s := proto.Size(x.Regex) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *ParameterValidation_Values: + s := proto.Size(x.Values) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +// Validation based on regular expressions. +type RegexValidation struct { + // Required. RE2 regular expressions used to validate the parameter's value. + // The value must match the regex in its entirety (substring + // matches are not sufficient). + Regexes []string `protobuf:"bytes,1,rep,name=regexes,proto3" json:"regexes,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *RegexValidation) Reset() { *m = RegexValidation{} } +func (m *RegexValidation) String() string { return proto.CompactTextString(m) } +func (*RegexValidation) ProtoMessage() {} +func (*RegexValidation) Descriptor() ([]byte, []int) { + return fileDescriptor_workflow_templates_fbee3ed8540fe470, []int{7} +} +func (m *RegexValidation) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_RegexValidation.Unmarshal(m, b) +} +func (m *RegexValidation) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_RegexValidation.Marshal(b, m, deterministic) +} +func (dst *RegexValidation) XXX_Merge(src proto.Message) { + xxx_messageInfo_RegexValidation.Merge(dst, src) +} +func (m *RegexValidation) XXX_Size() int { + return xxx_messageInfo_RegexValidation.Size(m) +} +func (m *RegexValidation) XXX_DiscardUnknown() { + xxx_messageInfo_RegexValidation.DiscardUnknown(m) +} + +var xxx_messageInfo_RegexValidation proto.InternalMessageInfo + +func (m *RegexValidation) GetRegexes() []string { + if m != nil { + return m.Regexes + } + return nil +} + +// Validation based on a list of allowed values. +type ValueValidation struct { + // Required. List of allowed values for the parameter. + Values []string `protobuf:"bytes,1,rep,name=values,proto3" json:"values,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ValueValidation) Reset() { *m = ValueValidation{} } +func (m *ValueValidation) String() string { return proto.CompactTextString(m) } +func (*ValueValidation) ProtoMessage() {} +func (*ValueValidation) Descriptor() ([]byte, []int) { + return fileDescriptor_workflow_templates_fbee3ed8540fe470, []int{8} +} +func (m *ValueValidation) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ValueValidation.Unmarshal(m, b) +} +func (m *ValueValidation) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ValueValidation.Marshal(b, m, deterministic) +} +func (dst *ValueValidation) XXX_Merge(src proto.Message) { + xxx_messageInfo_ValueValidation.Merge(dst, src) +} +func (m *ValueValidation) XXX_Size() int { + return xxx_messageInfo_ValueValidation.Size(m) +} +func (m *ValueValidation) XXX_DiscardUnknown() { + xxx_messageInfo_ValueValidation.DiscardUnknown(m) +} + +var xxx_messageInfo_ValueValidation proto.InternalMessageInfo + +func (m *ValueValidation) GetValues() []string { + if m != nil { + return m.Values + } + return nil +} + +// A Cloud Dataproc workflow template resource. +type WorkflowMetadata struct { + // Output only. The "resource name" of the template. + Template string `protobuf:"bytes,1,opt,name=template,proto3" json:"template,omitempty"` + // Output only. The version of template at the time of + // workflow instantiation. + Version int32 `protobuf:"varint,2,opt,name=version,proto3" json:"version,omitempty"` + // Output only. The create cluster operation metadata. + CreateCluster *ClusterOperation `protobuf:"bytes,3,opt,name=create_cluster,json=createCluster,proto3" json:"create_cluster,omitempty"` + // Output only. The workflow graph. + Graph *WorkflowGraph `protobuf:"bytes,4,opt,name=graph,proto3" json:"graph,omitempty"` + // Output only. The delete cluster operation metadata. + DeleteCluster *ClusterOperation `protobuf:"bytes,5,opt,name=delete_cluster,json=deleteCluster,proto3" json:"delete_cluster,omitempty"` + // Output only. The workflow state. + State WorkflowMetadata_State `protobuf:"varint,6,opt,name=state,proto3,enum=google.cloud.dataproc.v1beta2.WorkflowMetadata_State" json:"state,omitempty"` + // Output only. The name of the target cluster. + ClusterName string `protobuf:"bytes,7,opt,name=cluster_name,json=clusterName,proto3" json:"cluster_name,omitempty"` + // Map from parameter names to values that were used for those parameters. + Parameters map[string]string `protobuf:"bytes,8,rep,name=parameters,proto3" json:"parameters,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` + // Output only. Workflow start time. + StartTime *timestamp.Timestamp `protobuf:"bytes,9,opt,name=start_time,json=startTime,proto3" json:"start_time,omitempty"` + // Output only. Workflow end time. + EndTime *timestamp.Timestamp `protobuf:"bytes,10,opt,name=end_time,json=endTime,proto3" json:"end_time,omitempty"` + // Output only. The UUID of target cluster. + ClusterUuid string `protobuf:"bytes,11,opt,name=cluster_uuid,json=clusterUuid,proto3" json:"cluster_uuid,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *WorkflowMetadata) Reset() { *m = WorkflowMetadata{} } +func (m *WorkflowMetadata) String() string { return proto.CompactTextString(m) } +func (*WorkflowMetadata) ProtoMessage() {} +func (*WorkflowMetadata) Descriptor() ([]byte, []int) { + return fileDescriptor_workflow_templates_fbee3ed8540fe470, []int{9} +} +func (m *WorkflowMetadata) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_WorkflowMetadata.Unmarshal(m, b) +} +func (m *WorkflowMetadata) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_WorkflowMetadata.Marshal(b, m, deterministic) +} +func (dst *WorkflowMetadata) XXX_Merge(src proto.Message) { + xxx_messageInfo_WorkflowMetadata.Merge(dst, src) +} +func (m *WorkflowMetadata) XXX_Size() int { + return xxx_messageInfo_WorkflowMetadata.Size(m) +} +func (m *WorkflowMetadata) XXX_DiscardUnknown() { + xxx_messageInfo_WorkflowMetadata.DiscardUnknown(m) +} + +var xxx_messageInfo_WorkflowMetadata proto.InternalMessageInfo + +func (m *WorkflowMetadata) GetTemplate() string { + if m != nil { + return m.Template + } + return "" +} + +func (m *WorkflowMetadata) GetVersion() int32 { + if m != nil { + return m.Version + } + return 0 +} + +func (m *WorkflowMetadata) GetCreateCluster() *ClusterOperation { + if m != nil { + return m.CreateCluster + } + return nil +} + +func (m *WorkflowMetadata) GetGraph() *WorkflowGraph { + if m != nil { + return m.Graph + } + return nil +} + +func (m *WorkflowMetadata) GetDeleteCluster() *ClusterOperation { + if m != nil { + return m.DeleteCluster + } + return nil +} + +func (m *WorkflowMetadata) GetState() WorkflowMetadata_State { + if m != nil { + return m.State + } + return WorkflowMetadata_UNKNOWN +} + +func (m *WorkflowMetadata) GetClusterName() string { + if m != nil { + return m.ClusterName + } + return "" +} + +func (m *WorkflowMetadata) GetParameters() map[string]string { + if m != nil { + return m.Parameters + } + return nil +} + +func (m *WorkflowMetadata) GetStartTime() *timestamp.Timestamp { + if m != nil { + return m.StartTime + } + return nil +} + +func (m *WorkflowMetadata) GetEndTime() *timestamp.Timestamp { + if m != nil { + return m.EndTime + } + return nil +} + +func (m *WorkflowMetadata) GetClusterUuid() string { + if m != nil { + return m.ClusterUuid + } + return "" +} + +// The cluster operation triggered by a workflow. +type ClusterOperation struct { + // Output only. The id of the cluster operation. + OperationId string `protobuf:"bytes,1,opt,name=operation_id,json=operationId,proto3" json:"operation_id,omitempty"` + // Output only. Error, if operation failed. + Error string `protobuf:"bytes,2,opt,name=error,proto3" json:"error,omitempty"` + // Output only. Indicates the operation is done. + Done bool `protobuf:"varint,3,opt,name=done,proto3" json:"done,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ClusterOperation) Reset() { *m = ClusterOperation{} } +func (m *ClusterOperation) String() string { return proto.CompactTextString(m) } +func (*ClusterOperation) ProtoMessage() {} +func (*ClusterOperation) Descriptor() ([]byte, []int) { + return fileDescriptor_workflow_templates_fbee3ed8540fe470, []int{10} +} +func (m *ClusterOperation) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ClusterOperation.Unmarshal(m, b) +} +func (m *ClusterOperation) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ClusterOperation.Marshal(b, m, deterministic) +} +func (dst *ClusterOperation) XXX_Merge(src proto.Message) { + xxx_messageInfo_ClusterOperation.Merge(dst, src) +} +func (m *ClusterOperation) XXX_Size() int { + return xxx_messageInfo_ClusterOperation.Size(m) +} +func (m *ClusterOperation) XXX_DiscardUnknown() { + xxx_messageInfo_ClusterOperation.DiscardUnknown(m) +} + +var xxx_messageInfo_ClusterOperation proto.InternalMessageInfo + +func (m *ClusterOperation) GetOperationId() string { + if m != nil { + return m.OperationId + } + return "" +} + +func (m *ClusterOperation) GetError() string { + if m != nil { + return m.Error + } + return "" +} + +func (m *ClusterOperation) GetDone() bool { + if m != nil { + return m.Done + } + return false +} + +// The workflow graph. +type WorkflowGraph struct { + // Output only. The workflow nodes. + Nodes []*WorkflowNode `protobuf:"bytes,1,rep,name=nodes,proto3" json:"nodes,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *WorkflowGraph) Reset() { *m = WorkflowGraph{} } +func (m *WorkflowGraph) String() string { return proto.CompactTextString(m) } +func (*WorkflowGraph) ProtoMessage() {} +func (*WorkflowGraph) Descriptor() ([]byte, []int) { + return fileDescriptor_workflow_templates_fbee3ed8540fe470, []int{11} +} +func (m *WorkflowGraph) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_WorkflowGraph.Unmarshal(m, b) +} +func (m *WorkflowGraph) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_WorkflowGraph.Marshal(b, m, deterministic) +} +func (dst *WorkflowGraph) XXX_Merge(src proto.Message) { + xxx_messageInfo_WorkflowGraph.Merge(dst, src) +} +func (m *WorkflowGraph) XXX_Size() int { + return xxx_messageInfo_WorkflowGraph.Size(m) +} +func (m *WorkflowGraph) XXX_DiscardUnknown() { + xxx_messageInfo_WorkflowGraph.DiscardUnknown(m) +} + +var xxx_messageInfo_WorkflowGraph proto.InternalMessageInfo + +func (m *WorkflowGraph) GetNodes() []*WorkflowNode { + if m != nil { + return m.Nodes + } + return nil +} + +// The workflow node. +type WorkflowNode struct { + // Output only. The name of the node. + StepId string `protobuf:"bytes,1,opt,name=step_id,json=stepId,proto3" json:"step_id,omitempty"` + // Output only. Node's prerequisite nodes. + PrerequisiteStepIds []string `protobuf:"bytes,2,rep,name=prerequisite_step_ids,json=prerequisiteStepIds,proto3" json:"prerequisite_step_ids,omitempty"` + // Output only. The job id; populated after the node enters RUNNING state. + JobId string `protobuf:"bytes,3,opt,name=job_id,json=jobId,proto3" json:"job_id,omitempty"` + // Output only. The node state. + State WorkflowNode_NodeState `protobuf:"varint,5,opt,name=state,proto3,enum=google.cloud.dataproc.v1beta2.WorkflowNode_NodeState" json:"state,omitempty"` + // Output only. The error detail. + Error string `protobuf:"bytes,6,opt,name=error,proto3" json:"error,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *WorkflowNode) Reset() { *m = WorkflowNode{} } +func (m *WorkflowNode) String() string { return proto.CompactTextString(m) } +func (*WorkflowNode) ProtoMessage() {} +func (*WorkflowNode) Descriptor() ([]byte, []int) { + return fileDescriptor_workflow_templates_fbee3ed8540fe470, []int{12} +} +func (m *WorkflowNode) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_WorkflowNode.Unmarshal(m, b) +} +func (m *WorkflowNode) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_WorkflowNode.Marshal(b, m, deterministic) +} +func (dst *WorkflowNode) XXX_Merge(src proto.Message) { + xxx_messageInfo_WorkflowNode.Merge(dst, src) +} +func (m *WorkflowNode) XXX_Size() int { + return xxx_messageInfo_WorkflowNode.Size(m) +} +func (m *WorkflowNode) XXX_DiscardUnknown() { + xxx_messageInfo_WorkflowNode.DiscardUnknown(m) +} + +var xxx_messageInfo_WorkflowNode proto.InternalMessageInfo + +func (m *WorkflowNode) GetStepId() string { + if m != nil { + return m.StepId + } + return "" +} + +func (m *WorkflowNode) GetPrerequisiteStepIds() []string { + if m != nil { + return m.PrerequisiteStepIds + } + return nil +} + +func (m *WorkflowNode) GetJobId() string { + if m != nil { + return m.JobId + } + return "" +} + +func (m *WorkflowNode) GetState() WorkflowNode_NodeState { + if m != nil { + return m.State + } + return WorkflowNode_NODE_STATUS_UNSPECIFIED +} + +func (m *WorkflowNode) GetError() string { + if m != nil { + return m.Error + } + return "" +} + +// A request to create a workflow template. +type CreateWorkflowTemplateRequest struct { + // Required. The "resource name" of the region, as described + // in https://cloud.google.com/apis/design/resource_names of the form + // `projects/{project_id}/regions/{region}` + Parent string `protobuf:"bytes,1,opt,name=parent,proto3" json:"parent,omitempty"` + // Required. The Dataproc workflow template to create. + Template *WorkflowTemplate `protobuf:"bytes,2,opt,name=template,proto3" json:"template,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *CreateWorkflowTemplateRequest) Reset() { *m = CreateWorkflowTemplateRequest{} } +func (m *CreateWorkflowTemplateRequest) String() string { return proto.CompactTextString(m) } +func (*CreateWorkflowTemplateRequest) ProtoMessage() {} +func (*CreateWorkflowTemplateRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_workflow_templates_fbee3ed8540fe470, []int{13} +} +func (m *CreateWorkflowTemplateRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_CreateWorkflowTemplateRequest.Unmarshal(m, b) +} +func (m *CreateWorkflowTemplateRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_CreateWorkflowTemplateRequest.Marshal(b, m, deterministic) +} +func (dst *CreateWorkflowTemplateRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_CreateWorkflowTemplateRequest.Merge(dst, src) +} +func (m *CreateWorkflowTemplateRequest) XXX_Size() int { + return xxx_messageInfo_CreateWorkflowTemplateRequest.Size(m) +} +func (m *CreateWorkflowTemplateRequest) XXX_DiscardUnknown() { + xxx_messageInfo_CreateWorkflowTemplateRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_CreateWorkflowTemplateRequest proto.InternalMessageInfo + +func (m *CreateWorkflowTemplateRequest) GetParent() string { + if m != nil { + return m.Parent + } + return "" +} + +func (m *CreateWorkflowTemplateRequest) GetTemplate() *WorkflowTemplate { + if m != nil { + return m.Template + } + return nil +} + +// A request to fetch a workflow template. +type GetWorkflowTemplateRequest struct { + // Required. The "resource name" of the workflow template, as described + // in https://cloud.google.com/apis/design/resource_names of the form + // `projects/{project_id}/regions/{region}/workflowTemplates/{template_id}` + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // Optional. The version of workflow template to retrieve. Only previously + // instatiated versions can be retrieved. + // + // If unspecified, retrieves the current version. + Version int32 `protobuf:"varint,2,opt,name=version,proto3" json:"version,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GetWorkflowTemplateRequest) Reset() { *m = GetWorkflowTemplateRequest{} } +func (m *GetWorkflowTemplateRequest) String() string { return proto.CompactTextString(m) } +func (*GetWorkflowTemplateRequest) ProtoMessage() {} +func (*GetWorkflowTemplateRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_workflow_templates_fbee3ed8540fe470, []int{14} +} +func (m *GetWorkflowTemplateRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GetWorkflowTemplateRequest.Unmarshal(m, b) +} +func (m *GetWorkflowTemplateRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GetWorkflowTemplateRequest.Marshal(b, m, deterministic) +} +func (dst *GetWorkflowTemplateRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_GetWorkflowTemplateRequest.Merge(dst, src) +} +func (m *GetWorkflowTemplateRequest) XXX_Size() int { + return xxx_messageInfo_GetWorkflowTemplateRequest.Size(m) +} +func (m *GetWorkflowTemplateRequest) XXX_DiscardUnknown() { + xxx_messageInfo_GetWorkflowTemplateRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_GetWorkflowTemplateRequest proto.InternalMessageInfo + +func (m *GetWorkflowTemplateRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *GetWorkflowTemplateRequest) GetVersion() int32 { + if m != nil { + return m.Version + } + return 0 +} + +// A request to instantiate a workflow template. +type InstantiateWorkflowTemplateRequest struct { + // Required. The "resource name" of the workflow template, as described + // in https://cloud.google.com/apis/design/resource_names of the form + // `projects/{project_id}/regions/{region}/workflowTemplates/{template_id}` + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // Optional. The version of workflow template to instantiate. If specified, + // the workflow will be instantiated only if the current version of + // the workflow template has the supplied version. + // + // This option cannot be used to instantiate a previous version of + // workflow template. + Version int32 `protobuf:"varint,2,opt,name=version,proto3" json:"version,omitempty"` + // Deprecated. Please use `request_id` field instead. + InstanceId string `protobuf:"bytes,3,opt,name=instance_id,json=instanceId,proto3" json:"instance_id,omitempty"` // Deprecated: Do not use. + // Optional. A tag that prevents multiple concurrent workflow + // instances with the same tag from running. This mitigates risk of + // concurrent instances started due to retries. + // + // It is recommended to always set this value to a + // [UUID](https://en.wikipedia.org/wiki/Universally_unique_identifier). + // + // The tag must contain only letters (a-z, A-Z), numbers (0-9), + // underscores (_), and hyphens (-). The maximum length is 40 characters. + RequestId string `protobuf:"bytes,5,opt,name=request_id,json=requestId,proto3" json:"request_id,omitempty"` + // Optional. Map from parameter names to values that should be used for those + // parameters. Values may not exceed 100 characters. + Parameters map[string]string `protobuf:"bytes,4,rep,name=parameters,proto3" json:"parameters,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *InstantiateWorkflowTemplateRequest) Reset() { *m = InstantiateWorkflowTemplateRequest{} } +func (m *InstantiateWorkflowTemplateRequest) String() string { return proto.CompactTextString(m) } +func (*InstantiateWorkflowTemplateRequest) ProtoMessage() {} +func (*InstantiateWorkflowTemplateRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_workflow_templates_fbee3ed8540fe470, []int{15} +} +func (m *InstantiateWorkflowTemplateRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_InstantiateWorkflowTemplateRequest.Unmarshal(m, b) +} +func (m *InstantiateWorkflowTemplateRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_InstantiateWorkflowTemplateRequest.Marshal(b, m, deterministic) +} +func (dst *InstantiateWorkflowTemplateRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_InstantiateWorkflowTemplateRequest.Merge(dst, src) +} +func (m *InstantiateWorkflowTemplateRequest) XXX_Size() int { + return xxx_messageInfo_InstantiateWorkflowTemplateRequest.Size(m) +} +func (m *InstantiateWorkflowTemplateRequest) XXX_DiscardUnknown() { + xxx_messageInfo_InstantiateWorkflowTemplateRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_InstantiateWorkflowTemplateRequest proto.InternalMessageInfo + +func (m *InstantiateWorkflowTemplateRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *InstantiateWorkflowTemplateRequest) GetVersion() int32 { + if m != nil { + return m.Version + } + return 0 +} + +// Deprecated: Do not use. +func (m *InstantiateWorkflowTemplateRequest) GetInstanceId() string { + if m != nil { + return m.InstanceId + } + return "" +} + +func (m *InstantiateWorkflowTemplateRequest) GetRequestId() string { + if m != nil { + return m.RequestId + } + return "" +} + +func (m *InstantiateWorkflowTemplateRequest) GetParameters() map[string]string { + if m != nil { + return m.Parameters + } + return nil +} + +// A request to instantiate an inline workflow template. +type InstantiateInlineWorkflowTemplateRequest struct { + // Required. The "resource name" of the workflow template region, as described + // in https://cloud.google.com/apis/design/resource_names of the form + // `projects/{project_id}/regions/{region}` + Parent string `protobuf:"bytes,1,opt,name=parent,proto3" json:"parent,omitempty"` + // Required. The workflow template to instantiate. + Template *WorkflowTemplate `protobuf:"bytes,2,opt,name=template,proto3" json:"template,omitempty"` + // Deprecated. Please use `request_id` field instead. + InstanceId string `protobuf:"bytes,3,opt,name=instance_id,json=instanceId,proto3" json:"instance_id,omitempty"` + // Optional. A tag that prevents multiple concurrent workflow + // instances with the same tag from running. This mitigates risk of + // concurrent instances started due to retries. + // + // It is recommended to always set this value to a + // [UUID](https://en.wikipedia.org/wiki/Universally_unique_identifier). + // + // The tag must contain only letters (a-z, A-Z), numbers (0-9), + // underscores (_), and hyphens (-). The maximum length is 40 characters. + RequestId string `protobuf:"bytes,4,opt,name=request_id,json=requestId,proto3" json:"request_id,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *InstantiateInlineWorkflowTemplateRequest) Reset() { + *m = InstantiateInlineWorkflowTemplateRequest{} +} +func (m *InstantiateInlineWorkflowTemplateRequest) String() string { return proto.CompactTextString(m) } +func (*InstantiateInlineWorkflowTemplateRequest) ProtoMessage() {} +func (*InstantiateInlineWorkflowTemplateRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_workflow_templates_fbee3ed8540fe470, []int{16} +} +func (m *InstantiateInlineWorkflowTemplateRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_InstantiateInlineWorkflowTemplateRequest.Unmarshal(m, b) +} +func (m *InstantiateInlineWorkflowTemplateRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_InstantiateInlineWorkflowTemplateRequest.Marshal(b, m, deterministic) +} +func (dst *InstantiateInlineWorkflowTemplateRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_InstantiateInlineWorkflowTemplateRequest.Merge(dst, src) +} +func (m *InstantiateInlineWorkflowTemplateRequest) XXX_Size() int { + return xxx_messageInfo_InstantiateInlineWorkflowTemplateRequest.Size(m) +} +func (m *InstantiateInlineWorkflowTemplateRequest) XXX_DiscardUnknown() { + xxx_messageInfo_InstantiateInlineWorkflowTemplateRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_InstantiateInlineWorkflowTemplateRequest proto.InternalMessageInfo + +func (m *InstantiateInlineWorkflowTemplateRequest) GetParent() string { + if m != nil { + return m.Parent + } + return "" +} + +func (m *InstantiateInlineWorkflowTemplateRequest) GetTemplate() *WorkflowTemplate { + if m != nil { + return m.Template + } + return nil +} + +func (m *InstantiateInlineWorkflowTemplateRequest) GetInstanceId() string { + if m != nil { + return m.InstanceId + } + return "" +} + +func (m *InstantiateInlineWorkflowTemplateRequest) GetRequestId() string { + if m != nil { + return m.RequestId + } + return "" +} + +// A request to update a workflow template. +type UpdateWorkflowTemplateRequest struct { + // Required. The updated workflow template. + // + // The `template.version` field must match the current version. + Template *WorkflowTemplate `protobuf:"bytes,1,opt,name=template,proto3" json:"template,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *UpdateWorkflowTemplateRequest) Reset() { *m = UpdateWorkflowTemplateRequest{} } +func (m *UpdateWorkflowTemplateRequest) String() string { return proto.CompactTextString(m) } +func (*UpdateWorkflowTemplateRequest) ProtoMessage() {} +func (*UpdateWorkflowTemplateRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_workflow_templates_fbee3ed8540fe470, []int{17} +} +func (m *UpdateWorkflowTemplateRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_UpdateWorkflowTemplateRequest.Unmarshal(m, b) +} +func (m *UpdateWorkflowTemplateRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_UpdateWorkflowTemplateRequest.Marshal(b, m, deterministic) +} +func (dst *UpdateWorkflowTemplateRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_UpdateWorkflowTemplateRequest.Merge(dst, src) +} +func (m *UpdateWorkflowTemplateRequest) XXX_Size() int { + return xxx_messageInfo_UpdateWorkflowTemplateRequest.Size(m) +} +func (m *UpdateWorkflowTemplateRequest) XXX_DiscardUnknown() { + xxx_messageInfo_UpdateWorkflowTemplateRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_UpdateWorkflowTemplateRequest proto.InternalMessageInfo + +func (m *UpdateWorkflowTemplateRequest) GetTemplate() *WorkflowTemplate { + if m != nil { + return m.Template + } + return nil +} + +// A request to list workflow templates in a project. +type ListWorkflowTemplatesRequest struct { + // Required. The "resource name" of the region, as described + // in https://cloud.google.com/apis/design/resource_names of the form + // `projects/{project_id}/regions/{region}` + Parent string `protobuf:"bytes,1,opt,name=parent,proto3" json:"parent,omitempty"` + // Optional. The maximum number of results to return in each response. + PageSize int32 `protobuf:"varint,2,opt,name=page_size,json=pageSize,proto3" json:"page_size,omitempty"` + // Optional. The page token, returned by a previous call, to request the + // next page of results. + PageToken string `protobuf:"bytes,3,opt,name=page_token,json=pageToken,proto3" json:"page_token,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ListWorkflowTemplatesRequest) Reset() { *m = ListWorkflowTemplatesRequest{} } +func (m *ListWorkflowTemplatesRequest) String() string { return proto.CompactTextString(m) } +func (*ListWorkflowTemplatesRequest) ProtoMessage() {} +func (*ListWorkflowTemplatesRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_workflow_templates_fbee3ed8540fe470, []int{18} +} +func (m *ListWorkflowTemplatesRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ListWorkflowTemplatesRequest.Unmarshal(m, b) +} +func (m *ListWorkflowTemplatesRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ListWorkflowTemplatesRequest.Marshal(b, m, deterministic) +} +func (dst *ListWorkflowTemplatesRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_ListWorkflowTemplatesRequest.Merge(dst, src) +} +func (m *ListWorkflowTemplatesRequest) XXX_Size() int { + return xxx_messageInfo_ListWorkflowTemplatesRequest.Size(m) +} +func (m *ListWorkflowTemplatesRequest) XXX_DiscardUnknown() { + xxx_messageInfo_ListWorkflowTemplatesRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_ListWorkflowTemplatesRequest proto.InternalMessageInfo + +func (m *ListWorkflowTemplatesRequest) GetParent() string { + if m != nil { + return m.Parent + } + return "" +} + +func (m *ListWorkflowTemplatesRequest) GetPageSize() int32 { + if m != nil { + return m.PageSize + } + return 0 +} + +func (m *ListWorkflowTemplatesRequest) GetPageToken() string { + if m != nil { + return m.PageToken + } + return "" +} + +// A response to a request to list workflow templates in a project. +type ListWorkflowTemplatesResponse struct { + // Output only. WorkflowTemplates list. + Templates []*WorkflowTemplate `protobuf:"bytes,1,rep,name=templates,proto3" json:"templates,omitempty"` + // Output only. This token is included in the response if there are more + // results to fetch. To fetch additional results, provide this value as the + // page_token in a subsequent ListWorkflowTemplatesRequest. + NextPageToken string `protobuf:"bytes,2,opt,name=next_page_token,json=nextPageToken,proto3" json:"next_page_token,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ListWorkflowTemplatesResponse) Reset() { *m = ListWorkflowTemplatesResponse{} } +func (m *ListWorkflowTemplatesResponse) String() string { return proto.CompactTextString(m) } +func (*ListWorkflowTemplatesResponse) ProtoMessage() {} +func (*ListWorkflowTemplatesResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_workflow_templates_fbee3ed8540fe470, []int{19} +} +func (m *ListWorkflowTemplatesResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ListWorkflowTemplatesResponse.Unmarshal(m, b) +} +func (m *ListWorkflowTemplatesResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ListWorkflowTemplatesResponse.Marshal(b, m, deterministic) +} +func (dst *ListWorkflowTemplatesResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_ListWorkflowTemplatesResponse.Merge(dst, src) +} +func (m *ListWorkflowTemplatesResponse) XXX_Size() int { + return xxx_messageInfo_ListWorkflowTemplatesResponse.Size(m) +} +func (m *ListWorkflowTemplatesResponse) XXX_DiscardUnknown() { + xxx_messageInfo_ListWorkflowTemplatesResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_ListWorkflowTemplatesResponse proto.InternalMessageInfo + +func (m *ListWorkflowTemplatesResponse) GetTemplates() []*WorkflowTemplate { + if m != nil { + return m.Templates + } + return nil +} + +func (m *ListWorkflowTemplatesResponse) GetNextPageToken() string { + if m != nil { + return m.NextPageToken + } + return "" +} + +// A request to delete a workflow template. +// +// Currently started workflows will remain running. +type DeleteWorkflowTemplateRequest struct { + // Required. The "resource name" of the workflow template, as described + // in https://cloud.google.com/apis/design/resource_names of the form + // `projects/{project_id}/regions/{region}/workflowTemplates/{template_id}` + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // Optional. The version of workflow template to delete. If specified, + // will only delete the template if the current server version matches + // specified version. + Version int32 `protobuf:"varint,2,opt,name=version,proto3" json:"version,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *DeleteWorkflowTemplateRequest) Reset() { *m = DeleteWorkflowTemplateRequest{} } +func (m *DeleteWorkflowTemplateRequest) String() string { return proto.CompactTextString(m) } +func (*DeleteWorkflowTemplateRequest) ProtoMessage() {} +func (*DeleteWorkflowTemplateRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_workflow_templates_fbee3ed8540fe470, []int{20} +} +func (m *DeleteWorkflowTemplateRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_DeleteWorkflowTemplateRequest.Unmarshal(m, b) +} +func (m *DeleteWorkflowTemplateRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_DeleteWorkflowTemplateRequest.Marshal(b, m, deterministic) +} +func (dst *DeleteWorkflowTemplateRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_DeleteWorkflowTemplateRequest.Merge(dst, src) +} +func (m *DeleteWorkflowTemplateRequest) XXX_Size() int { + return xxx_messageInfo_DeleteWorkflowTemplateRequest.Size(m) +} +func (m *DeleteWorkflowTemplateRequest) XXX_DiscardUnknown() { + xxx_messageInfo_DeleteWorkflowTemplateRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_DeleteWorkflowTemplateRequest proto.InternalMessageInfo + +func (m *DeleteWorkflowTemplateRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *DeleteWorkflowTemplateRequest) GetVersion() int32 { + if m != nil { + return m.Version + } + return 0 +} + +func init() { + proto.RegisterType((*WorkflowTemplate)(nil), "google.cloud.dataproc.v1beta2.WorkflowTemplate") + proto.RegisterMapType((map[string]string)(nil), "google.cloud.dataproc.v1beta2.WorkflowTemplate.LabelsEntry") + proto.RegisterType((*WorkflowTemplatePlacement)(nil), "google.cloud.dataproc.v1beta2.WorkflowTemplatePlacement") + proto.RegisterType((*ManagedCluster)(nil), "google.cloud.dataproc.v1beta2.ManagedCluster") + proto.RegisterMapType((map[string]string)(nil), "google.cloud.dataproc.v1beta2.ManagedCluster.LabelsEntry") + proto.RegisterType((*ClusterSelector)(nil), "google.cloud.dataproc.v1beta2.ClusterSelector") + proto.RegisterMapType((map[string]string)(nil), "google.cloud.dataproc.v1beta2.ClusterSelector.ClusterLabelsEntry") + proto.RegisterType((*OrderedJob)(nil), "google.cloud.dataproc.v1beta2.OrderedJob") + proto.RegisterMapType((map[string]string)(nil), "google.cloud.dataproc.v1beta2.OrderedJob.LabelsEntry") + proto.RegisterType((*TemplateParameter)(nil), "google.cloud.dataproc.v1beta2.TemplateParameter") + proto.RegisterType((*ParameterValidation)(nil), "google.cloud.dataproc.v1beta2.ParameterValidation") + proto.RegisterType((*RegexValidation)(nil), "google.cloud.dataproc.v1beta2.RegexValidation") + proto.RegisterType((*ValueValidation)(nil), "google.cloud.dataproc.v1beta2.ValueValidation") + proto.RegisterType((*WorkflowMetadata)(nil), "google.cloud.dataproc.v1beta2.WorkflowMetadata") + proto.RegisterMapType((map[string]string)(nil), "google.cloud.dataproc.v1beta2.WorkflowMetadata.ParametersEntry") + proto.RegisterType((*ClusterOperation)(nil), "google.cloud.dataproc.v1beta2.ClusterOperation") + proto.RegisterType((*WorkflowGraph)(nil), "google.cloud.dataproc.v1beta2.WorkflowGraph") + proto.RegisterType((*WorkflowNode)(nil), "google.cloud.dataproc.v1beta2.WorkflowNode") + proto.RegisterType((*CreateWorkflowTemplateRequest)(nil), "google.cloud.dataproc.v1beta2.CreateWorkflowTemplateRequest") + proto.RegisterType((*GetWorkflowTemplateRequest)(nil), "google.cloud.dataproc.v1beta2.GetWorkflowTemplateRequest") + proto.RegisterType((*InstantiateWorkflowTemplateRequest)(nil), "google.cloud.dataproc.v1beta2.InstantiateWorkflowTemplateRequest") + proto.RegisterMapType((map[string]string)(nil), "google.cloud.dataproc.v1beta2.InstantiateWorkflowTemplateRequest.ParametersEntry") + proto.RegisterType((*InstantiateInlineWorkflowTemplateRequest)(nil), "google.cloud.dataproc.v1beta2.InstantiateInlineWorkflowTemplateRequest") + proto.RegisterType((*UpdateWorkflowTemplateRequest)(nil), "google.cloud.dataproc.v1beta2.UpdateWorkflowTemplateRequest") + proto.RegisterType((*ListWorkflowTemplatesRequest)(nil), "google.cloud.dataproc.v1beta2.ListWorkflowTemplatesRequest") + proto.RegisterType((*ListWorkflowTemplatesResponse)(nil), "google.cloud.dataproc.v1beta2.ListWorkflowTemplatesResponse") + proto.RegisterType((*DeleteWorkflowTemplateRequest)(nil), "google.cloud.dataproc.v1beta2.DeleteWorkflowTemplateRequest") + proto.RegisterEnum("google.cloud.dataproc.v1beta2.WorkflowMetadata_State", WorkflowMetadata_State_name, WorkflowMetadata_State_value) + proto.RegisterEnum("google.cloud.dataproc.v1beta2.WorkflowNode_NodeState", WorkflowNode_NodeState_name, WorkflowNode_NodeState_value) +} + +// Reference imports to suppress errors if they are not otherwise used. +var _ context.Context +var _ grpc.ClientConn + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the grpc package it is being compiled against. +const _ = grpc.SupportPackageIsVersion4 + +// WorkflowTemplateServiceClient is the client API for WorkflowTemplateService service. +// +// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://godoc.org/google.golang.org/grpc#ClientConn.NewStream. +type WorkflowTemplateServiceClient interface { + // Creates new workflow template. + CreateWorkflowTemplate(ctx context.Context, in *CreateWorkflowTemplateRequest, opts ...grpc.CallOption) (*WorkflowTemplate, error) + // Retrieves the latest workflow template. + // + // Can retrieve previously instantiated template by specifying optional + // version parameter. + GetWorkflowTemplate(ctx context.Context, in *GetWorkflowTemplateRequest, opts ...grpc.CallOption) (*WorkflowTemplate, error) + // Instantiates a template and begins execution. + // + // The returned Operation can be used to track execution of + // workflow by polling + // [operations.get][google.longrunning.Operations.GetOperation]. + // The Operation will complete when entire workflow is finished. + // + // The running workflow can be aborted via + // [operations.cancel][google.longrunning.Operations.CancelOperation]. + // This will cause any inflight jobs to be cancelled and workflow-owned + // clusters to be deleted. + // + // The [Operation.metadata][google.longrunning.Operation.metadata] will be + // [WorkflowMetadata][google.cloud.dataproc.v1beta2.WorkflowMetadata]. + // + // On successful completion, + // [Operation.response][google.longrunning.Operation.response] will be + // [Empty][google.protobuf.Empty]. + InstantiateWorkflowTemplate(ctx context.Context, in *InstantiateWorkflowTemplateRequest, opts ...grpc.CallOption) (*longrunning.Operation, error) + // Instantiates a template and begins execution. + // + // This method is equivalent to executing the sequence + // [CreateWorkflowTemplate][google.cloud.dataproc.v1beta2.WorkflowTemplateService.CreateWorkflowTemplate], + // [InstantiateWorkflowTemplate][google.cloud.dataproc.v1beta2.WorkflowTemplateService.InstantiateWorkflowTemplate], + // [DeleteWorkflowTemplate][google.cloud.dataproc.v1beta2.WorkflowTemplateService.DeleteWorkflowTemplate]. + // + // The returned Operation can be used to track execution of + // workflow by polling + // [operations.get][google.longrunning.Operations.GetOperation]. + // The Operation will complete when entire workflow is finished. + // + // The running workflow can be aborted via + // [operations.cancel][google.longrunning.Operations.CancelOperation]. + // This will cause any inflight jobs to be cancelled and workflow-owned + // clusters to be deleted. + // + // The [Operation.metadata][google.longrunning.Operation.metadata] will be + // [WorkflowMetadata][google.cloud.dataproc.v1beta2.WorkflowMetadata]. + // + // On successful completion, + // [Operation.response][google.longrunning.Operation.response] will be + // [Empty][google.protobuf.Empty]. + InstantiateInlineWorkflowTemplate(ctx context.Context, in *InstantiateInlineWorkflowTemplateRequest, opts ...grpc.CallOption) (*longrunning.Operation, error) + // Updates (replaces) workflow template. The updated template + // must contain version that matches the current server version. + UpdateWorkflowTemplate(ctx context.Context, in *UpdateWorkflowTemplateRequest, opts ...grpc.CallOption) (*WorkflowTemplate, error) + // Lists workflows that match the specified filter in the request. + ListWorkflowTemplates(ctx context.Context, in *ListWorkflowTemplatesRequest, opts ...grpc.CallOption) (*ListWorkflowTemplatesResponse, error) + // Deletes a workflow template. It does not cancel in-progress workflows. + DeleteWorkflowTemplate(ctx context.Context, in *DeleteWorkflowTemplateRequest, opts ...grpc.CallOption) (*empty.Empty, error) +} + +type workflowTemplateServiceClient struct { + cc *grpc.ClientConn +} + +func NewWorkflowTemplateServiceClient(cc *grpc.ClientConn) WorkflowTemplateServiceClient { + return &workflowTemplateServiceClient{cc} +} + +func (c *workflowTemplateServiceClient) CreateWorkflowTemplate(ctx context.Context, in *CreateWorkflowTemplateRequest, opts ...grpc.CallOption) (*WorkflowTemplate, error) { + out := new(WorkflowTemplate) + err := c.cc.Invoke(ctx, "/google.cloud.dataproc.v1beta2.WorkflowTemplateService/CreateWorkflowTemplate", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *workflowTemplateServiceClient) GetWorkflowTemplate(ctx context.Context, in *GetWorkflowTemplateRequest, opts ...grpc.CallOption) (*WorkflowTemplate, error) { + out := new(WorkflowTemplate) + err := c.cc.Invoke(ctx, "/google.cloud.dataproc.v1beta2.WorkflowTemplateService/GetWorkflowTemplate", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *workflowTemplateServiceClient) InstantiateWorkflowTemplate(ctx context.Context, in *InstantiateWorkflowTemplateRequest, opts ...grpc.CallOption) (*longrunning.Operation, error) { + out := new(longrunning.Operation) + err := c.cc.Invoke(ctx, "/google.cloud.dataproc.v1beta2.WorkflowTemplateService/InstantiateWorkflowTemplate", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *workflowTemplateServiceClient) InstantiateInlineWorkflowTemplate(ctx context.Context, in *InstantiateInlineWorkflowTemplateRequest, opts ...grpc.CallOption) (*longrunning.Operation, error) { + out := new(longrunning.Operation) + err := c.cc.Invoke(ctx, "/google.cloud.dataproc.v1beta2.WorkflowTemplateService/InstantiateInlineWorkflowTemplate", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *workflowTemplateServiceClient) UpdateWorkflowTemplate(ctx context.Context, in *UpdateWorkflowTemplateRequest, opts ...grpc.CallOption) (*WorkflowTemplate, error) { + out := new(WorkflowTemplate) + err := c.cc.Invoke(ctx, "/google.cloud.dataproc.v1beta2.WorkflowTemplateService/UpdateWorkflowTemplate", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *workflowTemplateServiceClient) ListWorkflowTemplates(ctx context.Context, in *ListWorkflowTemplatesRequest, opts ...grpc.CallOption) (*ListWorkflowTemplatesResponse, error) { + out := new(ListWorkflowTemplatesResponse) + err := c.cc.Invoke(ctx, "/google.cloud.dataproc.v1beta2.WorkflowTemplateService/ListWorkflowTemplates", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *workflowTemplateServiceClient) DeleteWorkflowTemplate(ctx context.Context, in *DeleteWorkflowTemplateRequest, opts ...grpc.CallOption) (*empty.Empty, error) { + out := new(empty.Empty) + err := c.cc.Invoke(ctx, "/google.cloud.dataproc.v1beta2.WorkflowTemplateService/DeleteWorkflowTemplate", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +// WorkflowTemplateServiceServer is the server API for WorkflowTemplateService service. +type WorkflowTemplateServiceServer interface { + // Creates new workflow template. + CreateWorkflowTemplate(context.Context, *CreateWorkflowTemplateRequest) (*WorkflowTemplate, error) + // Retrieves the latest workflow template. + // + // Can retrieve previously instantiated template by specifying optional + // version parameter. + GetWorkflowTemplate(context.Context, *GetWorkflowTemplateRequest) (*WorkflowTemplate, error) + // Instantiates a template and begins execution. + // + // The returned Operation can be used to track execution of + // workflow by polling + // [operations.get][google.longrunning.Operations.GetOperation]. + // The Operation will complete when entire workflow is finished. + // + // The running workflow can be aborted via + // [operations.cancel][google.longrunning.Operations.CancelOperation]. + // This will cause any inflight jobs to be cancelled and workflow-owned + // clusters to be deleted. + // + // The [Operation.metadata][google.longrunning.Operation.metadata] will be + // [WorkflowMetadata][google.cloud.dataproc.v1beta2.WorkflowMetadata]. + // + // On successful completion, + // [Operation.response][google.longrunning.Operation.response] will be + // [Empty][google.protobuf.Empty]. + InstantiateWorkflowTemplate(context.Context, *InstantiateWorkflowTemplateRequest) (*longrunning.Operation, error) + // Instantiates a template and begins execution. + // + // This method is equivalent to executing the sequence + // [CreateWorkflowTemplate][google.cloud.dataproc.v1beta2.WorkflowTemplateService.CreateWorkflowTemplate], + // [InstantiateWorkflowTemplate][google.cloud.dataproc.v1beta2.WorkflowTemplateService.InstantiateWorkflowTemplate], + // [DeleteWorkflowTemplate][google.cloud.dataproc.v1beta2.WorkflowTemplateService.DeleteWorkflowTemplate]. + // + // The returned Operation can be used to track execution of + // workflow by polling + // [operations.get][google.longrunning.Operations.GetOperation]. + // The Operation will complete when entire workflow is finished. + // + // The running workflow can be aborted via + // [operations.cancel][google.longrunning.Operations.CancelOperation]. + // This will cause any inflight jobs to be cancelled and workflow-owned + // clusters to be deleted. + // + // The [Operation.metadata][google.longrunning.Operation.metadata] will be + // [WorkflowMetadata][google.cloud.dataproc.v1beta2.WorkflowMetadata]. + // + // On successful completion, + // [Operation.response][google.longrunning.Operation.response] will be + // [Empty][google.protobuf.Empty]. + InstantiateInlineWorkflowTemplate(context.Context, *InstantiateInlineWorkflowTemplateRequest) (*longrunning.Operation, error) + // Updates (replaces) workflow template. The updated template + // must contain version that matches the current server version. + UpdateWorkflowTemplate(context.Context, *UpdateWorkflowTemplateRequest) (*WorkflowTemplate, error) + // Lists workflows that match the specified filter in the request. + ListWorkflowTemplates(context.Context, *ListWorkflowTemplatesRequest) (*ListWorkflowTemplatesResponse, error) + // Deletes a workflow template. It does not cancel in-progress workflows. + DeleteWorkflowTemplate(context.Context, *DeleteWorkflowTemplateRequest) (*empty.Empty, error) +} + +func RegisterWorkflowTemplateServiceServer(s *grpc.Server, srv WorkflowTemplateServiceServer) { + s.RegisterService(&_WorkflowTemplateService_serviceDesc, srv) +} + +func _WorkflowTemplateService_CreateWorkflowTemplate_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(CreateWorkflowTemplateRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(WorkflowTemplateServiceServer).CreateWorkflowTemplate(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.dataproc.v1beta2.WorkflowTemplateService/CreateWorkflowTemplate", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(WorkflowTemplateServiceServer).CreateWorkflowTemplate(ctx, req.(*CreateWorkflowTemplateRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _WorkflowTemplateService_GetWorkflowTemplate_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(GetWorkflowTemplateRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(WorkflowTemplateServiceServer).GetWorkflowTemplate(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.dataproc.v1beta2.WorkflowTemplateService/GetWorkflowTemplate", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(WorkflowTemplateServiceServer).GetWorkflowTemplate(ctx, req.(*GetWorkflowTemplateRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _WorkflowTemplateService_InstantiateWorkflowTemplate_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(InstantiateWorkflowTemplateRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(WorkflowTemplateServiceServer).InstantiateWorkflowTemplate(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.dataproc.v1beta2.WorkflowTemplateService/InstantiateWorkflowTemplate", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(WorkflowTemplateServiceServer).InstantiateWorkflowTemplate(ctx, req.(*InstantiateWorkflowTemplateRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _WorkflowTemplateService_InstantiateInlineWorkflowTemplate_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(InstantiateInlineWorkflowTemplateRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(WorkflowTemplateServiceServer).InstantiateInlineWorkflowTemplate(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.dataproc.v1beta2.WorkflowTemplateService/InstantiateInlineWorkflowTemplate", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(WorkflowTemplateServiceServer).InstantiateInlineWorkflowTemplate(ctx, req.(*InstantiateInlineWorkflowTemplateRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _WorkflowTemplateService_UpdateWorkflowTemplate_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(UpdateWorkflowTemplateRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(WorkflowTemplateServiceServer).UpdateWorkflowTemplate(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.dataproc.v1beta2.WorkflowTemplateService/UpdateWorkflowTemplate", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(WorkflowTemplateServiceServer).UpdateWorkflowTemplate(ctx, req.(*UpdateWorkflowTemplateRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _WorkflowTemplateService_ListWorkflowTemplates_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(ListWorkflowTemplatesRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(WorkflowTemplateServiceServer).ListWorkflowTemplates(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.dataproc.v1beta2.WorkflowTemplateService/ListWorkflowTemplates", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(WorkflowTemplateServiceServer).ListWorkflowTemplates(ctx, req.(*ListWorkflowTemplatesRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _WorkflowTemplateService_DeleteWorkflowTemplate_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(DeleteWorkflowTemplateRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(WorkflowTemplateServiceServer).DeleteWorkflowTemplate(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.dataproc.v1beta2.WorkflowTemplateService/DeleteWorkflowTemplate", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(WorkflowTemplateServiceServer).DeleteWorkflowTemplate(ctx, req.(*DeleteWorkflowTemplateRequest)) + } + return interceptor(ctx, in, info, handler) +} + +var _WorkflowTemplateService_serviceDesc = grpc.ServiceDesc{ + ServiceName: "google.cloud.dataproc.v1beta2.WorkflowTemplateService", + HandlerType: (*WorkflowTemplateServiceServer)(nil), + Methods: []grpc.MethodDesc{ + { + MethodName: "CreateWorkflowTemplate", + Handler: _WorkflowTemplateService_CreateWorkflowTemplate_Handler, + }, + { + MethodName: "GetWorkflowTemplate", + Handler: _WorkflowTemplateService_GetWorkflowTemplate_Handler, + }, + { + MethodName: "InstantiateWorkflowTemplate", + Handler: _WorkflowTemplateService_InstantiateWorkflowTemplate_Handler, + }, + { + MethodName: "InstantiateInlineWorkflowTemplate", + Handler: _WorkflowTemplateService_InstantiateInlineWorkflowTemplate_Handler, + }, + { + MethodName: "UpdateWorkflowTemplate", + Handler: _WorkflowTemplateService_UpdateWorkflowTemplate_Handler, + }, + { + MethodName: "ListWorkflowTemplates", + Handler: _WorkflowTemplateService_ListWorkflowTemplates_Handler, + }, + { + MethodName: "DeleteWorkflowTemplate", + Handler: _WorkflowTemplateService_DeleteWorkflowTemplate_Handler, + }, + }, + Streams: []grpc.StreamDesc{}, + Metadata: "google/cloud/dataproc/v1beta2/workflow_templates.proto", +} + +func init() { + proto.RegisterFile("google/cloud/dataproc/v1beta2/workflow_templates.proto", fileDescriptor_workflow_templates_fbee3ed8540fe470) +} + +var fileDescriptor_workflow_templates_fbee3ed8540fe470 = []byte{ + // 2038 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xc4, 0x59, 0xcf, 0x6f, 0xe3, 0x58, + 0x1d, 0xaf, 0xdd, 0x26, 0x4d, 0xbe, 0x99, 0x36, 0xd9, 0x37, 0x4c, 0x27, 0x64, 0xb6, 0xda, 0x8e, + 0x57, 0x2c, 0xdd, 0xee, 0x90, 0x40, 0xd0, 0xa2, 0x99, 0xce, 0x8c, 0xd8, 0xa6, 0x49, 0xa7, 0x69, + 0xd3, 0x34, 0xeb, 0xb4, 0x5d, 0x34, 0x1c, 0x22, 0x27, 0x7e, 0x4d, 0xdd, 0x3a, 0xb6, 0x6b, 0x3b, + 0xdd, 0xe9, 0xa0, 0x45, 0x02, 0xad, 0xc4, 0x85, 0x13, 0x2b, 0xed, 0x05, 0x2d, 0x9c, 0x38, 0xc3, + 0x05, 0x21, 0x71, 0x04, 0x21, 0x71, 0x43, 0x88, 0x3f, 0x60, 0x2e, 0x9c, 0xb9, 0x72, 0x45, 0xef, + 0x87, 0x1d, 0xe7, 0xa7, 0x93, 0x16, 0x69, 0x2f, 0x95, 0xdf, 0xcb, 0xfb, 0x7c, 0xde, 0xf7, 0xf7, + 0xf7, 0x6b, 0x17, 0x7e, 0xd0, 0x36, 0xcd, 0xb6, 0x8e, 0x73, 0x2d, 0xdd, 0xec, 0xaa, 0x39, 0x55, + 0x71, 0x15, 0xcb, 0x36, 0x5b, 0xb9, 0xab, 0xef, 0x35, 0xb1, 0xab, 0xe4, 0x73, 0x9f, 0x9a, 0xf6, + 0xc5, 0xa9, 0x6e, 0x7e, 0xda, 0x70, 0x71, 0xc7, 0xd2, 0x15, 0x17, 0x3b, 0x59, 0xcb, 0x36, 0x5d, + 0x13, 0xad, 0x32, 0x5c, 0x96, 0xe2, 0xb2, 0x1e, 0x2e, 0xcb, 0x71, 0x99, 0xb7, 0x39, 0xad, 0x62, + 0x69, 0x39, 0xc5, 0x30, 0x4c, 0x57, 0x71, 0x35, 0xd3, 0xe0, 0xe0, 0xcc, 0xa3, 0xc9, 0x97, 0xb6, + 0xf4, 0xae, 0xe3, 0x62, 0xdb, 0x3b, 0xbd, 0x3e, 0xf9, 0xf4, 0xb9, 0xd9, 0xf4, 0x4e, 0xbe, 0xcb, + 0x4f, 0xea, 0xa6, 0xd1, 0xb6, 0xbb, 0x86, 0xa1, 0x19, 0xed, 0x9c, 0x69, 0x61, 0xbb, 0xef, 0xf2, + 0x07, 0xfc, 0x10, 0x5d, 0x35, 0xbb, 0xa7, 0x39, 0xdc, 0xb1, 0xdc, 0x6b, 0xfe, 0xe3, 0x3b, 0x83, + 0x3f, 0xba, 0x5a, 0x07, 0x3b, 0xae, 0xd2, 0xb1, 0xd8, 0x01, 0xe9, 0xaf, 0x0b, 0x90, 0xfa, 0x84, + 0x1b, 0xe5, 0x88, 0xdb, 0x04, 0x2d, 0x83, 0xa8, 0xa9, 0x69, 0x71, 0x4d, 0x58, 0x8f, 0xcb, 0xa2, + 0xa6, 0x22, 0x04, 0x0b, 0x86, 0xd2, 0xc1, 0x69, 0x81, 0xee, 0xd0, 0x67, 0x94, 0x86, 0xc5, 0x2b, + 0x6c, 0x3b, 0x9a, 0x69, 0xa4, 0xe7, 0xd7, 0x84, 0xf5, 0x88, 0xec, 0x2d, 0xd1, 0x53, 0x48, 0xb4, + 0x6c, 0xac, 0xb8, 0xb8, 0x41, 0x2e, 0x4b, 0x2f, 0xac, 0x09, 0xeb, 0x89, 0x7c, 0x26, 0xcb, 0x0d, + 0xec, 0x49, 0x92, 0x3d, 0xf2, 0x24, 0x91, 0x81, 0x1d, 0x27, 0x1b, 0x04, 0xdc, 0xb5, 0x54, 0x1f, + 0x1c, 0x09, 0x07, 0xb3, 0xe3, 0x14, 0x5c, 0x87, 0xa8, 0xae, 0x34, 0xb1, 0xee, 0xa4, 0xa3, 0x6b, + 0xf3, 0xeb, 0x89, 0xfc, 0xd3, 0xec, 0x44, 0xaf, 0x66, 0x07, 0x15, 0xcf, 0x56, 0x28, 0xba, 0x64, + 0xb8, 0xf6, 0xb5, 0xcc, 0xa9, 0xd0, 0x09, 0xc4, 0x2d, 0x5d, 0x69, 0xe1, 0x0e, 0x36, 0xdc, 0xf4, + 0x22, 0x95, 0xe7, 0xf1, 0x8c, 0xbc, 0x35, 0x0f, 0x2f, 0xf7, 0xa8, 0xd0, 0x73, 0x58, 0x20, 0xae, + 0x4e, 0xc7, 0xa8, 0xa8, 0xef, 0x87, 0x50, 0x1e, 0xda, 0x2a, 0xb6, 0xb1, 0xba, 0x67, 0x36, 0x65, + 0x0a, 0x43, 0x35, 0x00, 0x4b, 0xb1, 0x95, 0x0e, 0x26, 0x91, 0x95, 0x8e, 0x53, 0x92, 0xef, 0x86, + 0x90, 0xf8, 0xf2, 0x78, 0x40, 0x39, 0xc0, 0x91, 0x79, 0x02, 0x89, 0x80, 0xfe, 0x28, 0x05, 0xf3, + 0x17, 0xf8, 0x9a, 0xfb, 0x9c, 0x3c, 0xa2, 0x6f, 0x40, 0xe4, 0x4a, 0xd1, 0xbb, 0x98, 0x47, 0x06, + 0x5b, 0x6c, 0x8a, 0x8f, 0x05, 0xe9, 0x8d, 0x00, 0xdf, 0x1c, 0xab, 0x34, 0xfa, 0x11, 0x24, 0x3b, + 0x8a, 0xa1, 0xb4, 0xb1, 0xda, 0xe0, 0xa9, 0x40, 0x59, 0x13, 0xf9, 0xef, 0x84, 0xc8, 0x7b, 0xc0, + 0x50, 0xdb, 0x0c, 0xb4, 0x3b, 0x27, 0x2f, 0x77, 0xfa, 0x76, 0xd0, 0x8f, 0x21, 0xc5, 0x19, 0x1b, + 0x0e, 0xd6, 0x71, 0xcb, 0x35, 0x6d, 0x2a, 0x5c, 0x22, 0x9f, 0x0d, 0xa1, 0xe6, 0x0c, 0x75, 0x8e, + 0xda, 0x9d, 0x93, 0x93, 0xad, 0xfe, 0xad, 0x42, 0x22, 0xe0, 0x78, 0xe9, 0x17, 0x22, 0x2c, 0xf7, + 0x8b, 0x83, 0x1e, 0xc2, 0x1d, 0xef, 0x72, 0x9a, 0x1d, 0xcc, 0x2a, 0x09, 0xbe, 0x57, 0x25, 0x49, + 0x52, 0x84, 0x68, 0xcb, 0x34, 0x4e, 0xb5, 0x36, 0xcd, 0x91, 0x44, 0xfe, 0xd1, 0x74, 0x52, 0x6d, + 0x53, 0x8c, 0xcc, 0xb1, 0xe8, 0x63, 0x3f, 0xac, 0x17, 0xa8, 0x9b, 0x9f, 0xcc, 0x64, 0xb6, 0x51, + 0x41, 0x7d, 0x1b, 0x5f, 0xff, 0x53, 0x80, 0xe4, 0x80, 0xf5, 0x48, 0x81, 0x78, 0x6d, 0x1a, 0x7e, + 0x81, 0x20, 0xcf, 0xe8, 0x0c, 0x96, 0x3d, 0xf3, 0x70, 0xe9, 0x45, 0x2a, 0xfd, 0xd6, 0x6c, 0x9e, + 0xf1, 0xd6, 0x41, 0x2d, 0x96, 0x5a, 0xc1, 0xbd, 0xcc, 0x47, 0x80, 0x86, 0x0f, 0xcd, 0xa4, 0xd3, + 0x7f, 0x23, 0x00, 0xbd, 0x0c, 0x43, 0xf7, 0x61, 0xd1, 0x71, 0xb1, 0xd5, 0xd0, 0x54, 0x0e, 0x8f, + 0x92, 0x65, 0x59, 0x45, 0x65, 0x80, 0x33, 0x45, 0x35, 0x4d, 0xab, 0x71, 0x6e, 0x36, 0x79, 0xa4, + 0xad, 0x87, 0xe8, 0xb3, 0x4b, 0x01, 0x7b, 0x66, 0x73, 0x77, 0x4e, 0x8e, 0x9f, 0x79, 0x0b, 0xb4, + 0x03, 0x71, 0xc7, 0x52, 0xec, 0x0b, 0xca, 0xc4, 0xa2, 0xe3, 0xdb, 0x21, 0x4c, 0x75, 0x72, 0x9e, + 0x11, 0xc5, 0x1c, 0xfe, 0x8c, 0x2a, 0x90, 0xb0, 0xae, 0x7b, 0x4c, 0xac, 0xda, 0x86, 0x55, 0x93, + 0xda, 0x75, 0x80, 0x0b, 0x38, 0x9e, 0xb0, 0x6d, 0x43, 0xec, 0x4c, 0xbb, 0xc2, 0x94, 0x8a, 0xd5, + 0xde, 0xf7, 0xc2, 0xd4, 0xd3, 0xae, 0x30, 0xe3, 0x59, 0x3c, 0x63, 0x8f, 0xe8, 0x23, 0x58, 0xb4, + 0xb4, 0x36, 0xe5, 0x88, 0x52, 0x8e, 0x6f, 0x85, 0x89, 0xa3, 0xb5, 0x19, 0x45, 0xd4, 0xa2, 0x4f, + 0xa8, 0x06, 0x4b, 0x4c, 0x25, 0xe7, 0x52, 0xa7, 0x3c, 0xac, 0xee, 0x6e, 0x4c, 0x63, 0xa0, 0xfa, + 0xa5, 0xce, 0xc8, 0x12, 0x4e, 0x6f, 0x89, 0x0e, 0xfc, 0x1c, 0x62, 0xf5, 0xf6, 0xc3, 0xa9, 0xeb, + 0xed, 0xc8, 0xa6, 0x50, 0x01, 0x70, 0x5a, 0x67, 0x58, 0xed, 0xea, 0x9a, 0xd1, 0x4e, 0xc7, 0xa7, + 0x4a, 0xee, 0x3d, 0xb3, 0x59, 0xf7, 0x31, 0x72, 0x00, 0x8f, 0xf2, 0x70, 0xcf, 0xb2, 0xb1, 0x8d, + 0x2f, 0xbb, 0x9a, 0xa3, 0xb9, 0xb8, 0xc1, 0x83, 0xcf, 0x49, 0xc3, 0xda, 0xfc, 0x7a, 0x5c, 0xbe, + 0x1b, 0xfc, 0xb1, 0x4e, 0x23, 0xf1, 0x36, 0x19, 0x5c, 0x00, 0x88, 0x9d, 0x9b, 0xcd, 0x86, 0x7b, + 0x6d, 0x61, 0xe9, 0x8f, 0x02, 0xbc, 0x35, 0xd4, 0x16, 0x46, 0x36, 0xfc, 0x15, 0x88, 0x9e, 0x6a, + 0x58, 0x57, 0x59, 0x1e, 0xc7, 0x65, 0xbe, 0x42, 0x6b, 0x90, 0x50, 0xb1, 0xd3, 0xb2, 0x35, 0xcb, + 0xf5, 0x86, 0x81, 0xb8, 0x1c, 0xdc, 0x42, 0x32, 0xc0, 0x95, 0xa2, 0x6b, 0x2a, 0x1d, 0x5b, 0x78, + 0x84, 0xe6, 0xc3, 0x42, 0xc2, 0x93, 0xe5, 0xc4, 0x47, 0xca, 0x01, 0x16, 0xe9, 0xcf, 0x02, 0xdc, + 0x1d, 0x71, 0x06, 0xed, 0x40, 0xc4, 0xc6, 0x6d, 0xfc, 0x8a, 0x77, 0x98, 0xb0, 0x36, 0x20, 0x93, + 0xb3, 0x3d, 0xf8, 0xee, 0x9c, 0xcc, 0xe0, 0x68, 0x17, 0xa2, 0xd4, 0x60, 0xce, 0x94, 0xfd, 0xe4, + 0x84, 0x1c, 0xee, 0x23, 0xe2, 0xf8, 0xc2, 0x5b, 0x90, 0xec, 0xc9, 0xcd, 0x8c, 0xfe, 0x01, 0x24, + 0x07, 0x2e, 0x26, 0xe3, 0x14, 0xbd, 0x18, 0x3b, 0x69, 0x81, 0x9a, 0xd7, 0x5b, 0x4a, 0xef, 0x43, + 0x72, 0x80, 0x9c, 0xb8, 0x82, 0x0b, 0xc7, 0xce, 0xf2, 0x95, 0xf4, 0xeb, 0x68, 0x6f, 0x98, 0x3b, + 0xc0, 0xae, 0x42, 0xa4, 0x44, 0x19, 0x88, 0x79, 0xc3, 0x2e, 0xf7, 0xa7, 0xbf, 0x0e, 0x0e, 0x71, + 0x62, 0xff, 0x10, 0x77, 0x02, 0xcb, 0x7c, 0x88, 0xf3, 0x5a, 0x36, 0xab, 0x51, 0xb9, 0xe9, 0xaa, + 0xf7, 0xa1, 0x37, 0xa5, 0xca, 0x4b, 0x8c, 0xc6, 0x6b, 0x9a, 0x05, 0x88, 0xb4, 0x6d, 0xc5, 0x3a, + 0xe3, 0x61, 0xf0, 0x68, 0xca, 0x49, 0xea, 0x05, 0xc1, 0xc8, 0x0c, 0x4a, 0x64, 0x53, 0xb1, 0x8e, + 0x03, 0xb2, 0x45, 0x6e, 0x28, 0x1b, 0xa3, 0xf1, 0x64, 0xdb, 0x87, 0x88, 0xe3, 0x12, 0x33, 0x91, + 0xaa, 0xb5, 0x1c, 0x5a, 0x22, 0x06, 0x2d, 0x9d, 0xad, 0x13, 0xb0, 0xcc, 0x38, 0x86, 0xa6, 0x83, + 0xc5, 0xe1, 0xe9, 0xa0, 0xd1, 0x37, 0xc2, 0xb1, 0xba, 0xf4, 0xc3, 0x59, 0x2f, 0xf5, 0x93, 0x80, + 0x57, 0xa8, 0x00, 0x25, 0x7a, 0x02, 0xe0, 0xb8, 0x8a, 0xed, 0xb2, 0x59, 0x3a, 0x1e, 0x3a, 0x4b, + 0xc7, 0xe9, 0x69, 0x3a, 0x4a, 0x7f, 0x08, 0x31, 0x6c, 0xa8, 0x0c, 0x08, 0xa1, 0xc0, 0x45, 0x6c, + 0xa8, 0x14, 0x16, 0xd0, 0xba, 0xdb, 0xd5, 0xd4, 0x74, 0xa2, 0x4f, 0xeb, 0xe3, 0xae, 0xa6, 0x66, + 0x9e, 0x43, 0x72, 0x40, 0xe6, 0x99, 0x5a, 0xf5, 0x63, 0x88, 0x50, 0x3b, 0xa3, 0x04, 0x2c, 0x1e, + 0x57, 0xf7, 0xab, 0x87, 0x9f, 0x54, 0x53, 0x73, 0x64, 0x51, 0x2b, 0x55, 0x8b, 0xe5, 0xea, 0x8b, + 0x94, 0x40, 0x16, 0xf2, 0x71, 0xb5, 0x4a, 0x16, 0x22, 0x8a, 0xc1, 0x42, 0xf1, 0xb0, 0x5a, 0x4a, + 0xcd, 0x4b, 0x0d, 0x48, 0x0d, 0x46, 0x00, 0x91, 0xd7, 0x7f, 0xa1, 0xea, 0xb5, 0xfb, 0x84, 0xbf, + 0x57, 0x56, 0x89, 0x28, 0xd8, 0xb6, 0xf9, 0x60, 0x19, 0x97, 0xd9, 0x82, 0x54, 0x48, 0x95, 0x4c, + 0x3c, 0x24, 0x2b, 0x62, 0x32, 0x7d, 0x96, 0x64, 0x58, 0xea, 0x8b, 0x57, 0xb4, 0x05, 0x11, 0xc3, + 0x54, 0x79, 0x9a, 0x26, 0xf2, 0x1f, 0x4c, 0xe9, 0xdb, 0xaa, 0xa9, 0x62, 0x99, 0x21, 0xa5, 0xbf, + 0x88, 0x70, 0x27, 0xb8, 0x3f, 0x7e, 0x36, 0x19, 0xdb, 0x44, 0xc4, 0xb1, 0x4d, 0x04, 0xdd, 0x83, + 0x28, 0xe9, 0x04, 0x9a, 0xca, 0xcb, 0x76, 0xe4, 0xdc, 0x6c, 0x96, 0xd5, 0x5e, 0x22, 0x44, 0x66, + 0x4a, 0x04, 0x22, 0x5f, 0x96, 0xfc, 0xe9, 0x4b, 0x04, 0xdf, 0x7e, 0xd1, 0x80, 0xfd, 0xa4, 0x0b, + 0x88, 0xfb, 0x27, 0xd1, 0x03, 0xb8, 0x5f, 0x3d, 0x2c, 0x96, 0x1a, 0xf5, 0xa3, 0xad, 0xa3, 0xe3, + 0x7a, 0xe3, 0xb8, 0x5a, 0xaf, 0x95, 0xb6, 0xcb, 0x3b, 0xe5, 0x52, 0x91, 0xb9, 0xb6, 0x50, 0x39, + 0xdc, 0xde, 0x2f, 0x15, 0x53, 0x02, 0xba, 0x03, 0x31, 0xe2, 0xda, 0xad, 0x42, 0xa5, 0x94, 0x12, + 0x83, 0x8e, 0x9e, 0x47, 0x4b, 0x10, 0xdf, 0x3e, 0x3c, 0xa8, 0x55, 0x4a, 0x47, 0xa5, 0x62, 0x6a, + 0x01, 0x01, 0x44, 0x77, 0xb6, 0xca, 0x95, 0x52, 0x31, 0x15, 0x91, 0x3e, 0x17, 0x60, 0x75, 0x9b, + 0x96, 0xa1, 0xc1, 0x97, 0x14, 0x19, 0x5f, 0x76, 0xb1, 0xe3, 0x92, 0x8a, 0x6a, 0x29, 0x36, 0x79, + 0xc3, 0xe3, 0x46, 0x65, 0x2b, 0xb4, 0x1f, 0x28, 0x9e, 0xe2, 0x54, 0x45, 0x66, 0xe8, 0x06, 0x9f, + 0x40, 0xda, 0x83, 0xcc, 0x0b, 0xec, 0x8e, 0x13, 0x21, 0xe4, 0x25, 0xbb, 0xbf, 0x3e, 0x4b, 0x7f, + 0x17, 0x41, 0x2a, 0x1b, 0x8e, 0xab, 0x18, 0xae, 0x36, 0x41, 0xaf, 0x99, 0x48, 0xd1, 0xbb, 0x90, + 0xd0, 0x28, 0x67, 0x0b, 0xfb, 0x31, 0x51, 0x10, 0xd3, 0x82, 0x0c, 0xde, 0x76, 0x59, 0x45, 0xab, + 0x00, 0x36, 0x63, 0x27, 0x67, 0x22, 0x94, 0x38, 0xce, 0x77, 0xca, 0x2a, 0xba, 0xec, 0x2b, 0x6a, + 0xec, 0x85, 0xe5, 0xe3, 0x10, 0x9b, 0x85, 0x2b, 0x32, 0xa9, 0xcc, 0xdd, 0xb6, 0xa2, 0xfc, 0x43, + 0x80, 0xf5, 0x80, 0x04, 0x65, 0x43, 0xd7, 0x8c, 0xaf, 0x35, 0x50, 0xd0, 0x3b, 0x23, 0xfc, 0x30, + 0xc1, 0x07, 0x0b, 0x03, 0x3e, 0x90, 0x74, 0x58, 0x3d, 0xa6, 0x5f, 0x45, 0xc6, 0x69, 0xb1, 0x3f, + 0x30, 0x13, 0xdc, 0x2a, 0xac, 0x6d, 0x78, 0xbb, 0xa2, 0x39, 0x43, 0x71, 0xed, 0x84, 0x99, 0xec, + 0x01, 0xc4, 0x2d, 0xa5, 0x8d, 0x1b, 0x8e, 0xf6, 0x1a, 0xf3, 0x48, 0x8c, 0x91, 0x8d, 0xba, 0xf6, + 0x1a, 0x13, 0x0d, 0xe9, 0x8f, 0xae, 0x79, 0x81, 0xbd, 0xa1, 0x92, 0x1e, 0x3f, 0x22, 0x1b, 0xd2, + 0x97, 0x02, 0xac, 0x8e, 0xb9, 0xd4, 0xb1, 0x4c, 0xc3, 0xc1, 0xe8, 0x00, 0xe2, 0xfe, 0x37, 0x3e, + 0x5e, 0x7f, 0x67, 0xd6, 0xb1, 0xc7, 0x80, 0xde, 0x83, 0xa4, 0x81, 0x5f, 0xb9, 0x8d, 0x80, 0x50, + 0x2c, 0x90, 0x96, 0xc8, 0x76, 0xcd, 0x17, 0xec, 0x00, 0x56, 0x8b, 0x74, 0xa8, 0xf8, 0xbf, 0x64, + 0x64, 0xfe, 0x4d, 0x12, 0xee, 0x0f, 0x32, 0xd5, 0xb1, 0x7d, 0xa5, 0xb5, 0x30, 0xfa, 0x4a, 0x84, + 0x95, 0xd1, 0x55, 0x0d, 0x3d, 0x0b, 0x9b, 0x84, 0x26, 0x15, 0xc3, 0xcc, 0xac, 0x76, 0x92, 0xbe, + 0x10, 0x7e, 0xfe, 0xaf, 0x7f, 0x7f, 0x21, 0xfe, 0x52, 0x90, 0x1e, 0xfb, 0x9f, 0x31, 0x7f, 0xc2, + 0xbc, 0xfc, 0xdc, 0xb2, 0xcd, 0x73, 0xdc, 0x72, 0x9d, 0xdc, 0x46, 0xce, 0xc6, 0x6d, 0xcd, 0x34, + 0x9c, 0xdc, 0xc6, 0x67, 0xfe, 0x77, 0x58, 0xdf, 0x6f, 0x9b, 0x7e, 0x3c, 0xbd, 0xdc, 0x91, 0x36, + 0x27, 0xb1, 0xe8, 0x66, 0x8b, 0x7d, 0xff, 0x0c, 0xe1, 0x41, 0x3f, 0x13, 0xe1, 0xee, 0x88, 0x7a, + 0x8b, 0xc2, 0x3e, 0x9f, 0x8c, 0xaf, 0xd1, 0xb3, 0x5b, 0xe6, 0xa7, 0xd4, 0x30, 0xaf, 0x50, 0xc0, + 0x2e, 0xc4, 0xe3, 0x23, 0xad, 0x32, 0xac, 0x4c, 0x6e, 0xe3, 0xb3, 0x97, 0xcf, 0xd0, 0xe6, 0x78, + 0x6c, 0xc0, 0x16, 0x23, 0xd1, 0xe8, 0xb7, 0x22, 0x3c, 0x98, 0x50, 0x5d, 0xd1, 0xd6, 0xad, 0x2b, + 0x73, 0x66, 0xd5, 0xa3, 0x08, 0x7c, 0xa5, 0xce, 0xfa, 0x13, 0x96, 0xf4, 0x15, 0x8b, 0x8d, 0x2f, + 0x05, 0xa9, 0x78, 0x53, 0x1b, 0x6c, 0x6a, 0x3d, 0x29, 0x36, 0x85, 0x8d, 0x97, 0xfb, 0xd2, 0xce, + 0xcd, 0x4d, 0x32, 0x40, 0x86, 0xfe, 0x26, 0xc2, 0xc3, 0xd0, 0xea, 0x8f, 0x5e, 0x4c, 0x6f, 0xa7, + 0x89, 0xfd, 0x23, 0xcc, 0x5a, 0x7f, 0x62, 0xd6, 0xfa, 0x83, 0x20, 0x55, 0x6e, 0x91, 0x03, 0xda, + 0xa0, 0x34, 0x81, 0xec, 0x3a, 0x91, 0xf6, 0x6e, 0x9c, 0xa3, 0x13, 0x78, 0xd1, 0xef, 0x45, 0x58, + 0x19, 0xdd, 0x74, 0x42, 0xab, 0xd1, 0xc4, 0x5e, 0x35, 0x7b, 0xce, 0xfd, 0x8e, 0xd9, 0xf0, 0x37, + 0x42, 0x66, 0xab, 0xa7, 0xa9, 0x27, 0x67, 0x76, 0xc6, 0xd0, 0xeb, 0x19, 0xee, 0x30, 0xb3, 0x3d, + 0x05, 0x5d, 0x68, 0xf8, 0xf5, 0x2c, 0xf6, 0x2b, 0x11, 0xee, 0x8d, 0xec, 0x61, 0x28, 0xec, 0xff, + 0x16, 0x93, 0xda, 0x6d, 0xe6, 0xd9, 0xcd, 0xc0, 0xac, 0x6d, 0x8e, 0x2a, 0x58, 0xb3, 0x05, 0x49, + 0x7f, 0xc1, 0x9a, 0x35, 0x74, 0xd1, 0x7f, 0x04, 0x58, 0x19, 0xdd, 0x40, 0x43, 0xc3, 0x68, 0x62, + 0xdf, 0xcd, 0xac, 0x0c, 0xbd, 0xbe, 0x96, 0x3a, 0x96, 0x7b, 0xed, 0x29, 0xbc, 0x71, 0x8b, 0x0a, + 0xbd, 0x71, 0x8b, 0x0a, 0x5d, 0xf8, 0x5c, 0x80, 0x87, 0x2d, 0xb3, 0x33, 0x59, 0xb7, 0xc2, 0xca, + 0x90, 0xc7, 0x6a, 0x44, 0x8d, 0x9a, 0xf0, 0xb2, 0xc4, 0x81, 0x6d, 0x53, 0x57, 0x8c, 0x76, 0xd6, + 0xb4, 0xdb, 0xb9, 0x36, 0x36, 0xa8, 0x92, 0x39, 0xf6, 0x93, 0x62, 0x69, 0xce, 0x98, 0x7f, 0x36, + 0x3e, 0xf5, 0x36, 0x9a, 0x51, 0x8a, 0xf8, 0xfe, 0xff, 0x02, 0x00, 0x00, 0xff, 0xff, 0x16, 0x05, + 0x09, 0xf3, 0x40, 0x1d, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/cloud/dialogflow/v2/agent.pb.go b/vendor/google.golang.org/genproto/googleapis/cloud/dialogflow/v2/agent.pb.go new file mode 100644 index 0000000..9bc8f7c --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/cloud/dialogflow/v2/agent.pb.go @@ -0,0 +1,1261 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/cloud/dialogflow/v2/agent.proto + +package dialogflow // import "google.golang.org/genproto/googleapis/cloud/dialogflow/v2" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "github.com/golang/protobuf/ptypes/empty" +import _ "github.com/golang/protobuf/ptypes/struct" +import _ "google.golang.org/genproto/googleapis/api/annotations" +import longrunning "google.golang.org/genproto/googleapis/longrunning" +import _ "google.golang.org/genproto/protobuf/field_mask" + +import ( + context "golang.org/x/net/context" + grpc "google.golang.org/grpc" +) + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Match mode determines how intents are detected from user queries. +type Agent_MatchMode int32 + +const ( + // Not specified. + Agent_MATCH_MODE_UNSPECIFIED Agent_MatchMode = 0 + // Best for agents with a small number of examples in intents and/or wide + // use of templates syntax and composite entities. + Agent_MATCH_MODE_HYBRID Agent_MatchMode = 1 + // Can be used for agents with a large number of examples in intents, + // especially the ones using @sys.any or very large developer entities. + Agent_MATCH_MODE_ML_ONLY Agent_MatchMode = 2 +) + +var Agent_MatchMode_name = map[int32]string{ + 0: "MATCH_MODE_UNSPECIFIED", + 1: "MATCH_MODE_HYBRID", + 2: "MATCH_MODE_ML_ONLY", +} +var Agent_MatchMode_value = map[string]int32{ + "MATCH_MODE_UNSPECIFIED": 0, + "MATCH_MODE_HYBRID": 1, + "MATCH_MODE_ML_ONLY": 2, +} + +func (x Agent_MatchMode) String() string { + return proto.EnumName(Agent_MatchMode_name, int32(x)) +} +func (Agent_MatchMode) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_agent_df65ea9e23826b36, []int{0, 0} +} + +// Represents a conversational agent. +type Agent struct { + // Required. The project of this agent. + // Format: `projects/`. + Parent string `protobuf:"bytes,1,opt,name=parent,proto3" json:"parent,omitempty"` + // Required. The name of this agent. + DisplayName string `protobuf:"bytes,2,opt,name=display_name,json=displayName,proto3" json:"display_name,omitempty"` + // Required. The default language of the agent as a language tag. See + // [Language + // Support](https://cloud.google.com/dialogflow-enterprise/docs/reference/language) + // for a list of the currently supported language codes. This field cannot be + // set by the `Update` method. + DefaultLanguageCode string `protobuf:"bytes,3,opt,name=default_language_code,json=defaultLanguageCode,proto3" json:"default_language_code,omitempty"` + // Optional. The list of all languages supported by this agent (except for the + // `default_language_code`). + SupportedLanguageCodes []string `protobuf:"bytes,4,rep,name=supported_language_codes,json=supportedLanguageCodes,proto3" json:"supported_language_codes,omitempty"` + // Required. The time zone of this agent from the + // [time zone database](https://www.iana.org/time-zones), e.g., + // America/New_York, Europe/Paris. + TimeZone string `protobuf:"bytes,5,opt,name=time_zone,json=timeZone,proto3" json:"time_zone,omitempty"` + // Optional. The description of this agent. + // The maximum length is 500 characters. If exceeded, the request is rejected. + Description string `protobuf:"bytes,6,opt,name=description,proto3" json:"description,omitempty"` + // Optional. The URI of the agent's avatar. + // Avatars are used throughout the Dialogflow console and in the self-hosted + // [Web + // Demo](https://cloud.google.com/dialogflow-enterprise/docs/integrations/web-demo) + // integration. + AvatarUri string `protobuf:"bytes,7,opt,name=avatar_uri,json=avatarUri,proto3" json:"avatar_uri,omitempty"` + // Optional. Determines whether this agent should log conversation queries. + EnableLogging bool `protobuf:"varint,8,opt,name=enable_logging,json=enableLogging,proto3" json:"enable_logging,omitempty"` + // Optional. Determines how intents are detected from user queries. + MatchMode Agent_MatchMode `protobuf:"varint,9,opt,name=match_mode,json=matchMode,proto3,enum=google.cloud.dialogflow.v2.Agent_MatchMode" json:"match_mode,omitempty"` + // Optional. To filter out false positive results and still get variety in + // matched natural language inputs for your agent, you can tune the machine + // learning classification threshold. If the returned score value is less than + // the threshold value, then a fallback intent will be triggered or, if there + // are no fallback intents defined, no intent will be triggered. The score + // values range from 0.0 (completely uncertain) to 1.0 (completely certain). + // If set to 0.0, the default of 0.3 is used. + ClassificationThreshold float32 `protobuf:"fixed32,10,opt,name=classification_threshold,json=classificationThreshold,proto3" json:"classification_threshold,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Agent) Reset() { *m = Agent{} } +func (m *Agent) String() string { return proto.CompactTextString(m) } +func (*Agent) ProtoMessage() {} +func (*Agent) Descriptor() ([]byte, []int) { + return fileDescriptor_agent_df65ea9e23826b36, []int{0} +} +func (m *Agent) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Agent.Unmarshal(m, b) +} +func (m *Agent) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Agent.Marshal(b, m, deterministic) +} +func (dst *Agent) XXX_Merge(src proto.Message) { + xxx_messageInfo_Agent.Merge(dst, src) +} +func (m *Agent) XXX_Size() int { + return xxx_messageInfo_Agent.Size(m) +} +func (m *Agent) XXX_DiscardUnknown() { + xxx_messageInfo_Agent.DiscardUnknown(m) +} + +var xxx_messageInfo_Agent proto.InternalMessageInfo + +func (m *Agent) GetParent() string { + if m != nil { + return m.Parent + } + return "" +} + +func (m *Agent) GetDisplayName() string { + if m != nil { + return m.DisplayName + } + return "" +} + +func (m *Agent) GetDefaultLanguageCode() string { + if m != nil { + return m.DefaultLanguageCode + } + return "" +} + +func (m *Agent) GetSupportedLanguageCodes() []string { + if m != nil { + return m.SupportedLanguageCodes + } + return nil +} + +func (m *Agent) GetTimeZone() string { + if m != nil { + return m.TimeZone + } + return "" +} + +func (m *Agent) GetDescription() string { + if m != nil { + return m.Description + } + return "" +} + +func (m *Agent) GetAvatarUri() string { + if m != nil { + return m.AvatarUri + } + return "" +} + +func (m *Agent) GetEnableLogging() bool { + if m != nil { + return m.EnableLogging + } + return false +} + +func (m *Agent) GetMatchMode() Agent_MatchMode { + if m != nil { + return m.MatchMode + } + return Agent_MATCH_MODE_UNSPECIFIED +} + +func (m *Agent) GetClassificationThreshold() float32 { + if m != nil { + return m.ClassificationThreshold + } + return 0 +} + +// The request message for [Agents.GetAgent][google.cloud.dialogflow.v2.Agents.GetAgent]. +type GetAgentRequest struct { + // Required. The project that the agent to fetch is associated with. + // Format: `projects/`. + Parent string `protobuf:"bytes,1,opt,name=parent,proto3" json:"parent,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GetAgentRequest) Reset() { *m = GetAgentRequest{} } +func (m *GetAgentRequest) String() string { return proto.CompactTextString(m) } +func (*GetAgentRequest) ProtoMessage() {} +func (*GetAgentRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_agent_df65ea9e23826b36, []int{1} +} +func (m *GetAgentRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GetAgentRequest.Unmarshal(m, b) +} +func (m *GetAgentRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GetAgentRequest.Marshal(b, m, deterministic) +} +func (dst *GetAgentRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_GetAgentRequest.Merge(dst, src) +} +func (m *GetAgentRequest) XXX_Size() int { + return xxx_messageInfo_GetAgentRequest.Size(m) +} +func (m *GetAgentRequest) XXX_DiscardUnknown() { + xxx_messageInfo_GetAgentRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_GetAgentRequest proto.InternalMessageInfo + +func (m *GetAgentRequest) GetParent() string { + if m != nil { + return m.Parent + } + return "" +} + +// The request message for [Agents.SearchAgents][google.cloud.dialogflow.v2.Agents.SearchAgents]. +type SearchAgentsRequest struct { + // Required. The project to list agents from. + // Format: `projects/`. + Parent string `protobuf:"bytes,1,opt,name=parent,proto3" json:"parent,omitempty"` + // Optional. The maximum number of items to return in a single page. By + // default 100 and at most 1000. + PageSize int32 `protobuf:"varint,2,opt,name=page_size,json=pageSize,proto3" json:"page_size,omitempty"` + // Optional. The next_page_token value returned from a previous list request. + PageToken string `protobuf:"bytes,3,opt,name=page_token,json=pageToken,proto3" json:"page_token,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *SearchAgentsRequest) Reset() { *m = SearchAgentsRequest{} } +func (m *SearchAgentsRequest) String() string { return proto.CompactTextString(m) } +func (*SearchAgentsRequest) ProtoMessage() {} +func (*SearchAgentsRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_agent_df65ea9e23826b36, []int{2} +} +func (m *SearchAgentsRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_SearchAgentsRequest.Unmarshal(m, b) +} +func (m *SearchAgentsRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_SearchAgentsRequest.Marshal(b, m, deterministic) +} +func (dst *SearchAgentsRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_SearchAgentsRequest.Merge(dst, src) +} +func (m *SearchAgentsRequest) XXX_Size() int { + return xxx_messageInfo_SearchAgentsRequest.Size(m) +} +func (m *SearchAgentsRequest) XXX_DiscardUnknown() { + xxx_messageInfo_SearchAgentsRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_SearchAgentsRequest proto.InternalMessageInfo + +func (m *SearchAgentsRequest) GetParent() string { + if m != nil { + return m.Parent + } + return "" +} + +func (m *SearchAgentsRequest) GetPageSize() int32 { + if m != nil { + return m.PageSize + } + return 0 +} + +func (m *SearchAgentsRequest) GetPageToken() string { + if m != nil { + return m.PageToken + } + return "" +} + +// The response message for [Agents.SearchAgents][google.cloud.dialogflow.v2.Agents.SearchAgents]. +type SearchAgentsResponse struct { + // The list of agents. There will be a maximum number of items returned based + // on the page_size field in the request. + Agents []*Agent `protobuf:"bytes,1,rep,name=agents,proto3" json:"agents,omitempty"` + // Token to retrieve the next page of results, or empty if there are no + // more results in the list. + NextPageToken string `protobuf:"bytes,2,opt,name=next_page_token,json=nextPageToken,proto3" json:"next_page_token,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *SearchAgentsResponse) Reset() { *m = SearchAgentsResponse{} } +func (m *SearchAgentsResponse) String() string { return proto.CompactTextString(m) } +func (*SearchAgentsResponse) ProtoMessage() {} +func (*SearchAgentsResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_agent_df65ea9e23826b36, []int{3} +} +func (m *SearchAgentsResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_SearchAgentsResponse.Unmarshal(m, b) +} +func (m *SearchAgentsResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_SearchAgentsResponse.Marshal(b, m, deterministic) +} +func (dst *SearchAgentsResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_SearchAgentsResponse.Merge(dst, src) +} +func (m *SearchAgentsResponse) XXX_Size() int { + return xxx_messageInfo_SearchAgentsResponse.Size(m) +} +func (m *SearchAgentsResponse) XXX_DiscardUnknown() { + xxx_messageInfo_SearchAgentsResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_SearchAgentsResponse proto.InternalMessageInfo + +func (m *SearchAgentsResponse) GetAgents() []*Agent { + if m != nil { + return m.Agents + } + return nil +} + +func (m *SearchAgentsResponse) GetNextPageToken() string { + if m != nil { + return m.NextPageToken + } + return "" +} + +// The request message for [Agents.TrainAgent][google.cloud.dialogflow.v2.Agents.TrainAgent]. +type TrainAgentRequest struct { + // Required. The project that the agent to train is associated with. + // Format: `projects/`. + Parent string `protobuf:"bytes,1,opt,name=parent,proto3" json:"parent,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *TrainAgentRequest) Reset() { *m = TrainAgentRequest{} } +func (m *TrainAgentRequest) String() string { return proto.CompactTextString(m) } +func (*TrainAgentRequest) ProtoMessage() {} +func (*TrainAgentRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_agent_df65ea9e23826b36, []int{4} +} +func (m *TrainAgentRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_TrainAgentRequest.Unmarshal(m, b) +} +func (m *TrainAgentRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_TrainAgentRequest.Marshal(b, m, deterministic) +} +func (dst *TrainAgentRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_TrainAgentRequest.Merge(dst, src) +} +func (m *TrainAgentRequest) XXX_Size() int { + return xxx_messageInfo_TrainAgentRequest.Size(m) +} +func (m *TrainAgentRequest) XXX_DiscardUnknown() { + xxx_messageInfo_TrainAgentRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_TrainAgentRequest proto.InternalMessageInfo + +func (m *TrainAgentRequest) GetParent() string { + if m != nil { + return m.Parent + } + return "" +} + +// The request message for [Agents.ExportAgent][google.cloud.dialogflow.v2.Agents.ExportAgent]. +type ExportAgentRequest struct { + // Required. The project that the agent to export is associated with. + // Format: `projects/`. + Parent string `protobuf:"bytes,1,opt,name=parent,proto3" json:"parent,omitempty"` + // Optional. The + // [Google Cloud Storage](https://cloud.google.com/storage/docs/) + // URI to export the agent to. + // The format of this URI must be `gs:///`. + // If left unspecified, the serialized agent is returned inline. + AgentUri string `protobuf:"bytes,2,opt,name=agent_uri,json=agentUri,proto3" json:"agent_uri,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ExportAgentRequest) Reset() { *m = ExportAgentRequest{} } +func (m *ExportAgentRequest) String() string { return proto.CompactTextString(m) } +func (*ExportAgentRequest) ProtoMessage() {} +func (*ExportAgentRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_agent_df65ea9e23826b36, []int{5} +} +func (m *ExportAgentRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ExportAgentRequest.Unmarshal(m, b) +} +func (m *ExportAgentRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ExportAgentRequest.Marshal(b, m, deterministic) +} +func (dst *ExportAgentRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_ExportAgentRequest.Merge(dst, src) +} +func (m *ExportAgentRequest) XXX_Size() int { + return xxx_messageInfo_ExportAgentRequest.Size(m) +} +func (m *ExportAgentRequest) XXX_DiscardUnknown() { + xxx_messageInfo_ExportAgentRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_ExportAgentRequest proto.InternalMessageInfo + +func (m *ExportAgentRequest) GetParent() string { + if m != nil { + return m.Parent + } + return "" +} + +func (m *ExportAgentRequest) GetAgentUri() string { + if m != nil { + return m.AgentUri + } + return "" +} + +// The response message for [Agents.ExportAgent][google.cloud.dialogflow.v2.Agents.ExportAgent]. +type ExportAgentResponse struct { + // Required. The exported agent. + // + // Types that are valid to be assigned to Agent: + // *ExportAgentResponse_AgentUri + // *ExportAgentResponse_AgentContent + Agent isExportAgentResponse_Agent `protobuf_oneof:"agent"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ExportAgentResponse) Reset() { *m = ExportAgentResponse{} } +func (m *ExportAgentResponse) String() string { return proto.CompactTextString(m) } +func (*ExportAgentResponse) ProtoMessage() {} +func (*ExportAgentResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_agent_df65ea9e23826b36, []int{6} +} +func (m *ExportAgentResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ExportAgentResponse.Unmarshal(m, b) +} +func (m *ExportAgentResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ExportAgentResponse.Marshal(b, m, deterministic) +} +func (dst *ExportAgentResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_ExportAgentResponse.Merge(dst, src) +} +func (m *ExportAgentResponse) XXX_Size() int { + return xxx_messageInfo_ExportAgentResponse.Size(m) +} +func (m *ExportAgentResponse) XXX_DiscardUnknown() { + xxx_messageInfo_ExportAgentResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_ExportAgentResponse proto.InternalMessageInfo + +type isExportAgentResponse_Agent interface { + isExportAgentResponse_Agent() +} + +type ExportAgentResponse_AgentUri struct { + AgentUri string `protobuf:"bytes,1,opt,name=agent_uri,json=agentUri,proto3,oneof"` +} + +type ExportAgentResponse_AgentContent struct { + AgentContent []byte `protobuf:"bytes,2,opt,name=agent_content,json=agentContent,proto3,oneof"` +} + +func (*ExportAgentResponse_AgentUri) isExportAgentResponse_Agent() {} + +func (*ExportAgentResponse_AgentContent) isExportAgentResponse_Agent() {} + +func (m *ExportAgentResponse) GetAgent() isExportAgentResponse_Agent { + if m != nil { + return m.Agent + } + return nil +} + +func (m *ExportAgentResponse) GetAgentUri() string { + if x, ok := m.GetAgent().(*ExportAgentResponse_AgentUri); ok { + return x.AgentUri + } + return "" +} + +func (m *ExportAgentResponse) GetAgentContent() []byte { + if x, ok := m.GetAgent().(*ExportAgentResponse_AgentContent); ok { + return x.AgentContent + } + return nil +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*ExportAgentResponse) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _ExportAgentResponse_OneofMarshaler, _ExportAgentResponse_OneofUnmarshaler, _ExportAgentResponse_OneofSizer, []interface{}{ + (*ExportAgentResponse_AgentUri)(nil), + (*ExportAgentResponse_AgentContent)(nil), + } +} + +func _ExportAgentResponse_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*ExportAgentResponse) + // agent + switch x := m.Agent.(type) { + case *ExportAgentResponse_AgentUri: + b.EncodeVarint(1<<3 | proto.WireBytes) + b.EncodeStringBytes(x.AgentUri) + case *ExportAgentResponse_AgentContent: + b.EncodeVarint(2<<3 | proto.WireBytes) + b.EncodeRawBytes(x.AgentContent) + case nil: + default: + return fmt.Errorf("ExportAgentResponse.Agent has unexpected type %T", x) + } + return nil +} + +func _ExportAgentResponse_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*ExportAgentResponse) + switch tag { + case 1: // agent.agent_uri + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeStringBytes() + m.Agent = &ExportAgentResponse_AgentUri{x} + return true, err + case 2: // agent.agent_content + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeRawBytes(true) + m.Agent = &ExportAgentResponse_AgentContent{x} + return true, err + default: + return false, nil + } +} + +func _ExportAgentResponse_OneofSizer(msg proto.Message) (n int) { + m := msg.(*ExportAgentResponse) + // agent + switch x := m.Agent.(type) { + case *ExportAgentResponse_AgentUri: + n += 1 // tag and wire + n += proto.SizeVarint(uint64(len(x.AgentUri))) + n += len(x.AgentUri) + case *ExportAgentResponse_AgentContent: + n += 1 // tag and wire + n += proto.SizeVarint(uint64(len(x.AgentContent))) + n += len(x.AgentContent) + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +// The request message for [Agents.ImportAgent][google.cloud.dialogflow.v2.Agents.ImportAgent]. +type ImportAgentRequest struct { + // Required. The project that the agent to import is associated with. + // Format: `projects/`. + Parent string `protobuf:"bytes,1,opt,name=parent,proto3" json:"parent,omitempty"` + // Required. The agent to import. + // + // Types that are valid to be assigned to Agent: + // *ImportAgentRequest_AgentUri + // *ImportAgentRequest_AgentContent + Agent isImportAgentRequest_Agent `protobuf_oneof:"agent"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ImportAgentRequest) Reset() { *m = ImportAgentRequest{} } +func (m *ImportAgentRequest) String() string { return proto.CompactTextString(m) } +func (*ImportAgentRequest) ProtoMessage() {} +func (*ImportAgentRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_agent_df65ea9e23826b36, []int{7} +} +func (m *ImportAgentRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ImportAgentRequest.Unmarshal(m, b) +} +func (m *ImportAgentRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ImportAgentRequest.Marshal(b, m, deterministic) +} +func (dst *ImportAgentRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_ImportAgentRequest.Merge(dst, src) +} +func (m *ImportAgentRequest) XXX_Size() int { + return xxx_messageInfo_ImportAgentRequest.Size(m) +} +func (m *ImportAgentRequest) XXX_DiscardUnknown() { + xxx_messageInfo_ImportAgentRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_ImportAgentRequest proto.InternalMessageInfo + +func (m *ImportAgentRequest) GetParent() string { + if m != nil { + return m.Parent + } + return "" +} + +type isImportAgentRequest_Agent interface { + isImportAgentRequest_Agent() +} + +type ImportAgentRequest_AgentUri struct { + AgentUri string `protobuf:"bytes,2,opt,name=agent_uri,json=agentUri,proto3,oneof"` +} + +type ImportAgentRequest_AgentContent struct { + AgentContent []byte `protobuf:"bytes,3,opt,name=agent_content,json=agentContent,proto3,oneof"` +} + +func (*ImportAgentRequest_AgentUri) isImportAgentRequest_Agent() {} + +func (*ImportAgentRequest_AgentContent) isImportAgentRequest_Agent() {} + +func (m *ImportAgentRequest) GetAgent() isImportAgentRequest_Agent { + if m != nil { + return m.Agent + } + return nil +} + +func (m *ImportAgentRequest) GetAgentUri() string { + if x, ok := m.GetAgent().(*ImportAgentRequest_AgentUri); ok { + return x.AgentUri + } + return "" +} + +func (m *ImportAgentRequest) GetAgentContent() []byte { + if x, ok := m.GetAgent().(*ImportAgentRequest_AgentContent); ok { + return x.AgentContent + } + return nil +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*ImportAgentRequest) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _ImportAgentRequest_OneofMarshaler, _ImportAgentRequest_OneofUnmarshaler, _ImportAgentRequest_OneofSizer, []interface{}{ + (*ImportAgentRequest_AgentUri)(nil), + (*ImportAgentRequest_AgentContent)(nil), + } +} + +func _ImportAgentRequest_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*ImportAgentRequest) + // agent + switch x := m.Agent.(type) { + case *ImportAgentRequest_AgentUri: + b.EncodeVarint(2<<3 | proto.WireBytes) + b.EncodeStringBytes(x.AgentUri) + case *ImportAgentRequest_AgentContent: + b.EncodeVarint(3<<3 | proto.WireBytes) + b.EncodeRawBytes(x.AgentContent) + case nil: + default: + return fmt.Errorf("ImportAgentRequest.Agent has unexpected type %T", x) + } + return nil +} + +func _ImportAgentRequest_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*ImportAgentRequest) + switch tag { + case 2: // agent.agent_uri + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeStringBytes() + m.Agent = &ImportAgentRequest_AgentUri{x} + return true, err + case 3: // agent.agent_content + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeRawBytes(true) + m.Agent = &ImportAgentRequest_AgentContent{x} + return true, err + default: + return false, nil + } +} + +func _ImportAgentRequest_OneofSizer(msg proto.Message) (n int) { + m := msg.(*ImportAgentRequest) + // agent + switch x := m.Agent.(type) { + case *ImportAgentRequest_AgentUri: + n += 1 // tag and wire + n += proto.SizeVarint(uint64(len(x.AgentUri))) + n += len(x.AgentUri) + case *ImportAgentRequest_AgentContent: + n += 1 // tag and wire + n += proto.SizeVarint(uint64(len(x.AgentContent))) + n += len(x.AgentContent) + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +// The request message for [Agents.RestoreAgent][google.cloud.dialogflow.v2.Agents.RestoreAgent]. +type RestoreAgentRequest struct { + // Required. The project that the agent to restore is associated with. + // Format: `projects/`. + Parent string `protobuf:"bytes,1,opt,name=parent,proto3" json:"parent,omitempty"` + // Required. The agent to restore. + // + // Types that are valid to be assigned to Agent: + // *RestoreAgentRequest_AgentUri + // *RestoreAgentRequest_AgentContent + Agent isRestoreAgentRequest_Agent `protobuf_oneof:"agent"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *RestoreAgentRequest) Reset() { *m = RestoreAgentRequest{} } +func (m *RestoreAgentRequest) String() string { return proto.CompactTextString(m) } +func (*RestoreAgentRequest) ProtoMessage() {} +func (*RestoreAgentRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_agent_df65ea9e23826b36, []int{8} +} +func (m *RestoreAgentRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_RestoreAgentRequest.Unmarshal(m, b) +} +func (m *RestoreAgentRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_RestoreAgentRequest.Marshal(b, m, deterministic) +} +func (dst *RestoreAgentRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_RestoreAgentRequest.Merge(dst, src) +} +func (m *RestoreAgentRequest) XXX_Size() int { + return xxx_messageInfo_RestoreAgentRequest.Size(m) +} +func (m *RestoreAgentRequest) XXX_DiscardUnknown() { + xxx_messageInfo_RestoreAgentRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_RestoreAgentRequest proto.InternalMessageInfo + +func (m *RestoreAgentRequest) GetParent() string { + if m != nil { + return m.Parent + } + return "" +} + +type isRestoreAgentRequest_Agent interface { + isRestoreAgentRequest_Agent() +} + +type RestoreAgentRequest_AgentUri struct { + AgentUri string `protobuf:"bytes,2,opt,name=agent_uri,json=agentUri,proto3,oneof"` +} + +type RestoreAgentRequest_AgentContent struct { + AgentContent []byte `protobuf:"bytes,3,opt,name=agent_content,json=agentContent,proto3,oneof"` +} + +func (*RestoreAgentRequest_AgentUri) isRestoreAgentRequest_Agent() {} + +func (*RestoreAgentRequest_AgentContent) isRestoreAgentRequest_Agent() {} + +func (m *RestoreAgentRequest) GetAgent() isRestoreAgentRequest_Agent { + if m != nil { + return m.Agent + } + return nil +} + +func (m *RestoreAgentRequest) GetAgentUri() string { + if x, ok := m.GetAgent().(*RestoreAgentRequest_AgentUri); ok { + return x.AgentUri + } + return "" +} + +func (m *RestoreAgentRequest) GetAgentContent() []byte { + if x, ok := m.GetAgent().(*RestoreAgentRequest_AgentContent); ok { + return x.AgentContent + } + return nil +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*RestoreAgentRequest) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _RestoreAgentRequest_OneofMarshaler, _RestoreAgentRequest_OneofUnmarshaler, _RestoreAgentRequest_OneofSizer, []interface{}{ + (*RestoreAgentRequest_AgentUri)(nil), + (*RestoreAgentRequest_AgentContent)(nil), + } +} + +func _RestoreAgentRequest_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*RestoreAgentRequest) + // agent + switch x := m.Agent.(type) { + case *RestoreAgentRequest_AgentUri: + b.EncodeVarint(2<<3 | proto.WireBytes) + b.EncodeStringBytes(x.AgentUri) + case *RestoreAgentRequest_AgentContent: + b.EncodeVarint(3<<3 | proto.WireBytes) + b.EncodeRawBytes(x.AgentContent) + case nil: + default: + return fmt.Errorf("RestoreAgentRequest.Agent has unexpected type %T", x) + } + return nil +} + +func _RestoreAgentRequest_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*RestoreAgentRequest) + switch tag { + case 2: // agent.agent_uri + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeStringBytes() + m.Agent = &RestoreAgentRequest_AgentUri{x} + return true, err + case 3: // agent.agent_content + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeRawBytes(true) + m.Agent = &RestoreAgentRequest_AgentContent{x} + return true, err + default: + return false, nil + } +} + +func _RestoreAgentRequest_OneofSizer(msg proto.Message) (n int) { + m := msg.(*RestoreAgentRequest) + // agent + switch x := m.Agent.(type) { + case *RestoreAgentRequest_AgentUri: + n += 1 // tag and wire + n += proto.SizeVarint(uint64(len(x.AgentUri))) + n += len(x.AgentUri) + case *RestoreAgentRequest_AgentContent: + n += 1 // tag and wire + n += proto.SizeVarint(uint64(len(x.AgentContent))) + n += len(x.AgentContent) + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +func init() { + proto.RegisterType((*Agent)(nil), "google.cloud.dialogflow.v2.Agent") + proto.RegisterType((*GetAgentRequest)(nil), "google.cloud.dialogflow.v2.GetAgentRequest") + proto.RegisterType((*SearchAgentsRequest)(nil), "google.cloud.dialogflow.v2.SearchAgentsRequest") + proto.RegisterType((*SearchAgentsResponse)(nil), "google.cloud.dialogflow.v2.SearchAgentsResponse") + proto.RegisterType((*TrainAgentRequest)(nil), "google.cloud.dialogflow.v2.TrainAgentRequest") + proto.RegisterType((*ExportAgentRequest)(nil), "google.cloud.dialogflow.v2.ExportAgentRequest") + proto.RegisterType((*ExportAgentResponse)(nil), "google.cloud.dialogflow.v2.ExportAgentResponse") + proto.RegisterType((*ImportAgentRequest)(nil), "google.cloud.dialogflow.v2.ImportAgentRequest") + proto.RegisterType((*RestoreAgentRequest)(nil), "google.cloud.dialogflow.v2.RestoreAgentRequest") + proto.RegisterEnum("google.cloud.dialogflow.v2.Agent_MatchMode", Agent_MatchMode_name, Agent_MatchMode_value) +} + +// Reference imports to suppress errors if they are not otherwise used. +var _ context.Context +var _ grpc.ClientConn + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the grpc package it is being compiled against. +const _ = grpc.SupportPackageIsVersion4 + +// AgentsClient is the client API for Agents service. +// +// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://godoc.org/google.golang.org/grpc#ClientConn.NewStream. +type AgentsClient interface { + // Retrieves the specified agent. + GetAgent(ctx context.Context, in *GetAgentRequest, opts ...grpc.CallOption) (*Agent, error) + // Returns the list of agents. + // + // Since there is at most one conversational agent per project, this method is + // useful primarily for listing all agents across projects the caller has + // access to. One can achieve that with a wildcard project collection id "-". + // Refer to [List + // Sub-Collections](https://cloud.google.com/apis/design/design_patterns#list_sub-collections). + SearchAgents(ctx context.Context, in *SearchAgentsRequest, opts ...grpc.CallOption) (*SearchAgentsResponse, error) + // Trains the specified agent. + // + // Operation + TrainAgent(ctx context.Context, in *TrainAgentRequest, opts ...grpc.CallOption) (*longrunning.Operation, error) + // Exports the specified agent to a ZIP file. + // + // Operation + ExportAgent(ctx context.Context, in *ExportAgentRequest, opts ...grpc.CallOption) (*longrunning.Operation, error) + // Imports the specified agent from a ZIP file. + // + // Uploads new intents and entity types without deleting the existing ones. + // Intents and entity types with the same name are replaced with the new + // versions from ImportAgentRequest. + // + // Operation + ImportAgent(ctx context.Context, in *ImportAgentRequest, opts ...grpc.CallOption) (*longrunning.Operation, error) + // Restores the specified agent from a ZIP file. + // + // Replaces the current agent version with a new one. All the intents and + // entity types in the older version are deleted. + // + // Operation + RestoreAgent(ctx context.Context, in *RestoreAgentRequest, opts ...grpc.CallOption) (*longrunning.Operation, error) +} + +type agentsClient struct { + cc *grpc.ClientConn +} + +func NewAgentsClient(cc *grpc.ClientConn) AgentsClient { + return &agentsClient{cc} +} + +func (c *agentsClient) GetAgent(ctx context.Context, in *GetAgentRequest, opts ...grpc.CallOption) (*Agent, error) { + out := new(Agent) + err := c.cc.Invoke(ctx, "/google.cloud.dialogflow.v2.Agents/GetAgent", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *agentsClient) SearchAgents(ctx context.Context, in *SearchAgentsRequest, opts ...grpc.CallOption) (*SearchAgentsResponse, error) { + out := new(SearchAgentsResponse) + err := c.cc.Invoke(ctx, "/google.cloud.dialogflow.v2.Agents/SearchAgents", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *agentsClient) TrainAgent(ctx context.Context, in *TrainAgentRequest, opts ...grpc.CallOption) (*longrunning.Operation, error) { + out := new(longrunning.Operation) + err := c.cc.Invoke(ctx, "/google.cloud.dialogflow.v2.Agents/TrainAgent", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *agentsClient) ExportAgent(ctx context.Context, in *ExportAgentRequest, opts ...grpc.CallOption) (*longrunning.Operation, error) { + out := new(longrunning.Operation) + err := c.cc.Invoke(ctx, "/google.cloud.dialogflow.v2.Agents/ExportAgent", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *agentsClient) ImportAgent(ctx context.Context, in *ImportAgentRequest, opts ...grpc.CallOption) (*longrunning.Operation, error) { + out := new(longrunning.Operation) + err := c.cc.Invoke(ctx, "/google.cloud.dialogflow.v2.Agents/ImportAgent", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *agentsClient) RestoreAgent(ctx context.Context, in *RestoreAgentRequest, opts ...grpc.CallOption) (*longrunning.Operation, error) { + out := new(longrunning.Operation) + err := c.cc.Invoke(ctx, "/google.cloud.dialogflow.v2.Agents/RestoreAgent", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +// AgentsServer is the server API for Agents service. +type AgentsServer interface { + // Retrieves the specified agent. + GetAgent(context.Context, *GetAgentRequest) (*Agent, error) + // Returns the list of agents. + // + // Since there is at most one conversational agent per project, this method is + // useful primarily for listing all agents across projects the caller has + // access to. One can achieve that with a wildcard project collection id "-". + // Refer to [List + // Sub-Collections](https://cloud.google.com/apis/design/design_patterns#list_sub-collections). + SearchAgents(context.Context, *SearchAgentsRequest) (*SearchAgentsResponse, error) + // Trains the specified agent. + // + // Operation + TrainAgent(context.Context, *TrainAgentRequest) (*longrunning.Operation, error) + // Exports the specified agent to a ZIP file. + // + // Operation + ExportAgent(context.Context, *ExportAgentRequest) (*longrunning.Operation, error) + // Imports the specified agent from a ZIP file. + // + // Uploads new intents and entity types without deleting the existing ones. + // Intents and entity types with the same name are replaced with the new + // versions from ImportAgentRequest. + // + // Operation + ImportAgent(context.Context, *ImportAgentRequest) (*longrunning.Operation, error) + // Restores the specified agent from a ZIP file. + // + // Replaces the current agent version with a new one. All the intents and + // entity types in the older version are deleted. + // + // Operation + RestoreAgent(context.Context, *RestoreAgentRequest) (*longrunning.Operation, error) +} + +func RegisterAgentsServer(s *grpc.Server, srv AgentsServer) { + s.RegisterService(&_Agents_serviceDesc, srv) +} + +func _Agents_GetAgent_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(GetAgentRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(AgentsServer).GetAgent(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.dialogflow.v2.Agents/GetAgent", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(AgentsServer).GetAgent(ctx, req.(*GetAgentRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _Agents_SearchAgents_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(SearchAgentsRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(AgentsServer).SearchAgents(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.dialogflow.v2.Agents/SearchAgents", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(AgentsServer).SearchAgents(ctx, req.(*SearchAgentsRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _Agents_TrainAgent_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(TrainAgentRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(AgentsServer).TrainAgent(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.dialogflow.v2.Agents/TrainAgent", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(AgentsServer).TrainAgent(ctx, req.(*TrainAgentRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _Agents_ExportAgent_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(ExportAgentRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(AgentsServer).ExportAgent(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.dialogflow.v2.Agents/ExportAgent", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(AgentsServer).ExportAgent(ctx, req.(*ExportAgentRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _Agents_ImportAgent_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(ImportAgentRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(AgentsServer).ImportAgent(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.dialogflow.v2.Agents/ImportAgent", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(AgentsServer).ImportAgent(ctx, req.(*ImportAgentRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _Agents_RestoreAgent_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(RestoreAgentRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(AgentsServer).RestoreAgent(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.dialogflow.v2.Agents/RestoreAgent", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(AgentsServer).RestoreAgent(ctx, req.(*RestoreAgentRequest)) + } + return interceptor(ctx, in, info, handler) +} + +var _Agents_serviceDesc = grpc.ServiceDesc{ + ServiceName: "google.cloud.dialogflow.v2.Agents", + HandlerType: (*AgentsServer)(nil), + Methods: []grpc.MethodDesc{ + { + MethodName: "GetAgent", + Handler: _Agents_GetAgent_Handler, + }, + { + MethodName: "SearchAgents", + Handler: _Agents_SearchAgents_Handler, + }, + { + MethodName: "TrainAgent", + Handler: _Agents_TrainAgent_Handler, + }, + { + MethodName: "ExportAgent", + Handler: _Agents_ExportAgent_Handler, + }, + { + MethodName: "ImportAgent", + Handler: _Agents_ImportAgent_Handler, + }, + { + MethodName: "RestoreAgent", + Handler: _Agents_RestoreAgent_Handler, + }, + }, + Streams: []grpc.StreamDesc{}, + Metadata: "google/cloud/dialogflow/v2/agent.proto", +} + +func init() { + proto.RegisterFile("google/cloud/dialogflow/v2/agent.proto", fileDescriptor_agent_df65ea9e23826b36) +} + +var fileDescriptor_agent_df65ea9e23826b36 = []byte{ + // 975 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xc4, 0x56, 0x41, 0x6f, 0xdc, 0x44, + 0x14, 0x8e, 0x37, 0xcd, 0x76, 0x77, 0x92, 0xb4, 0xe9, 0x84, 0x06, 0xb3, 0x6d, 0x60, 0xeb, 0x92, + 0x68, 0x9b, 0x82, 0x5d, 0x2d, 0x17, 0x08, 0xe2, 0xd0, 0x64, 0xd3, 0x66, 0x51, 0x36, 0x89, 0x9c, + 0xa4, 0x52, 0x7b, 0xb1, 0x26, 0xf6, 0xac, 0x33, 0xd4, 0x9e, 0x31, 0x33, 0xe3, 0xd0, 0xa4, 0x70, + 0x80, 0x23, 0x12, 0x12, 0x12, 0x5c, 0x38, 0x21, 0x71, 0xe4, 0xef, 0xf0, 0x17, 0x38, 0x73, 0xe6, + 0x88, 0x66, 0xec, 0xcd, 0x7a, 0x93, 0xd4, 0x9b, 0x03, 0x52, 0x6f, 0xf6, 0xfb, 0xbe, 0xf7, 0xde, + 0x37, 0x33, 0xdf, 0xd3, 0x0c, 0x58, 0x0e, 0x19, 0x0b, 0x23, 0xec, 0xf8, 0x11, 0x4b, 0x03, 0x27, + 0x20, 0x28, 0x62, 0x61, 0x3f, 0x62, 0xdf, 0x38, 0xc7, 0x6d, 0x07, 0x85, 0x98, 0x4a, 0x3b, 0xe1, + 0x4c, 0x32, 0xd8, 0xc8, 0x78, 0xb6, 0xe6, 0xd9, 0x43, 0x9e, 0x7d, 0xdc, 0x6e, 0xdc, 0xcd, 0x6b, + 0xa0, 0x84, 0x38, 0x88, 0x52, 0x26, 0x91, 0x24, 0x8c, 0x8a, 0x2c, 0xb3, 0xf1, 0x5e, 0x01, 0xe5, + 0x58, 0xb0, 0x94, 0xfb, 0x38, 0x87, 0xee, 0xe7, 0x50, 0xc4, 0x68, 0xc8, 0x53, 0x4a, 0x09, 0x0d, + 0x1d, 0x96, 0x60, 0x3e, 0x92, 0x7f, 0x27, 0x27, 0xe9, 0xbf, 0xc3, 0xb4, 0xef, 0xe0, 0x38, 0x91, + 0x27, 0x39, 0xd8, 0x3c, 0x0f, 0xf6, 0x09, 0x8e, 0x02, 0x2f, 0x46, 0xe2, 0x65, 0xce, 0xb8, 0x7b, + 0x9e, 0x21, 0x24, 0x4f, 0xfd, 0x7c, 0x59, 0xd6, 0xaf, 0xd7, 0xc0, 0xd4, 0x63, 0xb5, 0x4c, 0xb8, + 0x00, 0xaa, 0x09, 0xe2, 0x98, 0x4a, 0xd3, 0x68, 0x1a, 0xad, 0xba, 0x9b, 0xff, 0xc1, 0x7b, 0x60, + 0x26, 0x20, 0x22, 0x89, 0xd0, 0x89, 0x47, 0x51, 0x8c, 0xcd, 0x8a, 0x46, 0xa7, 0xf3, 0xd8, 0x36, + 0x8a, 0x31, 0x6c, 0x83, 0xdb, 0x01, 0xee, 0xa3, 0x34, 0x92, 0x5e, 0x84, 0x68, 0x98, 0xa2, 0x10, + 0x7b, 0x3e, 0x0b, 0xb0, 0x39, 0xa9, 0xb9, 0xf3, 0x39, 0xb8, 0x95, 0x63, 0xeb, 0x2c, 0xc0, 0xf0, + 0x53, 0x60, 0x8a, 0x34, 0x49, 0x18, 0x97, 0x38, 0x18, 0xcd, 0x12, 0xe6, 0xb5, 0xe6, 0x64, 0xab, + 0xee, 0x2e, 0x9c, 0xe1, 0xc5, 0x44, 0x01, 0xef, 0x80, 0xba, 0x24, 0x31, 0xf6, 0x4e, 0x19, 0xc5, + 0xe6, 0x94, 0xee, 0x50, 0x53, 0x81, 0x17, 0x8c, 0x62, 0xd8, 0x04, 0xd3, 0x01, 0x16, 0x3e, 0x27, + 0x89, 0xda, 0x42, 0xb3, 0x9a, 0x8b, 0x1d, 0x86, 0xe0, 0x22, 0x00, 0xe8, 0x18, 0x49, 0xc4, 0xbd, + 0x94, 0x13, 0xf3, 0xba, 0x26, 0xd4, 0xb3, 0xc8, 0x01, 0x27, 0x70, 0x09, 0xdc, 0xc0, 0x14, 0x1d, + 0x46, 0xd8, 0x8b, 0x58, 0x18, 0x12, 0x1a, 0x9a, 0xb5, 0xa6, 0xd1, 0xaa, 0xb9, 0xb3, 0x59, 0x74, + 0x2b, 0x0b, 0xc2, 0x2f, 0x01, 0x88, 0x91, 0xf4, 0x8f, 0xbc, 0x58, 0xad, 0xb3, 0xde, 0x34, 0x5a, + 0x37, 0xda, 0x0f, 0xed, 0x37, 0x7b, 0xc4, 0xd6, 0x9b, 0x6c, 0xf7, 0x54, 0x4e, 0x8f, 0x05, 0xd8, + 0xad, 0xc7, 0x83, 0x4f, 0xf8, 0x19, 0x30, 0xfd, 0x08, 0x09, 0x41, 0xfa, 0xc4, 0xd7, 0x27, 0xef, + 0xc9, 0x23, 0x8e, 0xc5, 0x11, 0x8b, 0x02, 0x13, 0x34, 0x8d, 0x56, 0xc5, 0x7d, 0x77, 0x14, 0xdf, + 0x1f, 0xc0, 0xd6, 0x33, 0x50, 0x3f, 0x2b, 0x09, 0x1b, 0x60, 0xa1, 0xf7, 0x78, 0x7f, 0x7d, 0xd3, + 0xeb, 0xed, 0x74, 0x36, 0xbc, 0x83, 0xed, 0xbd, 0xdd, 0x8d, 0xf5, 0xee, 0x93, 0xee, 0x46, 0x67, + 0x6e, 0x02, 0xde, 0x06, 0xb7, 0x0a, 0xd8, 0xe6, 0xf3, 0x35, 0xb7, 0xdb, 0x99, 0x33, 0xe0, 0x02, + 0x80, 0x85, 0x70, 0x6f, 0xcb, 0xdb, 0xd9, 0xde, 0x7a, 0x3e, 0x57, 0xb1, 0x1e, 0x80, 0x9b, 0x4f, + 0xb1, 0xd4, 0x9a, 0x5d, 0xfc, 0x75, 0x8a, 0xc5, 0x1b, 0xfd, 0x61, 0x11, 0x30, 0xbf, 0x87, 0x11, + 0xf7, 0x8f, 0x34, 0x5b, 0x8c, 0xa1, 0xab, 0xd3, 0x4b, 0xd4, 0x49, 0x0b, 0x72, 0x9a, 0x79, 0x69, + 0xca, 0xad, 0xa9, 0xc0, 0x1e, 0x39, 0xc5, 0xea, 0x6c, 0x34, 0x28, 0xd9, 0x4b, 0x4c, 0x73, 0xf7, + 0x68, 0xfa, 0xbe, 0x0a, 0x58, 0x27, 0xe0, 0x9d, 0xd1, 0x56, 0x22, 0x61, 0x54, 0xa8, 0x0d, 0xac, + 0xea, 0x51, 0x15, 0xa6, 0xd1, 0x9c, 0x6c, 0x4d, 0xb7, 0xef, 0x8d, 0x3d, 0x08, 0x37, 0x4f, 0x80, + 0xcb, 0xe0, 0x26, 0xc5, 0xaf, 0xa4, 0x57, 0x68, 0x9b, 0x19, 0x7c, 0x56, 0x85, 0x77, 0xcf, 0x5a, + 0x3f, 0x04, 0xb7, 0xf6, 0x39, 0x22, 0xf4, 0x4a, 0x5b, 0xd2, 0x05, 0x70, 0xe3, 0x95, 0xb2, 0xee, + 0x55, 0xd8, 0x6a, 0x47, 0xb4, 0x18, 0xed, 0xc7, 0xac, 0x79, 0x4d, 0x07, 0x0e, 0x38, 0xb1, 0x02, + 0x30, 0x3f, 0x52, 0x2a, 0x5f, 0xf1, 0x62, 0x31, 0x47, 0x97, 0xdb, 0x9c, 0x18, 0x66, 0xc1, 0x25, + 0x30, 0x9b, 0xc1, 0x3e, 0xa3, 0x52, 0x75, 0x54, 0x65, 0x67, 0x36, 0x27, 0xdc, 0x19, 0x1d, 0x5e, + 0xcf, 0xa2, 0x6b, 0xd7, 0xc1, 0x94, 0xfe, 0xb7, 0x5e, 0x03, 0xd8, 0x8d, 0xaf, 0x2c, 0x78, 0xf1, + 0x82, 0xe0, 0xf2, 0xe6, 0x93, 0xe5, 0xcd, 0xbf, 0x05, 0xf3, 0x2e, 0x16, 0x92, 0x71, 0xfc, 0x16, + 0xba, 0xb7, 0xff, 0xa9, 0x82, 0x6a, 0x66, 0x27, 0xf8, 0xbd, 0x01, 0x6a, 0x03, 0xd7, 0xc3, 0xd2, + 0x61, 0x3e, 0x37, 0x1b, 0x8d, 0xf1, 0x86, 0xb3, 0x96, 0x7e, 0xf8, 0xeb, 0xef, 0x5f, 0x2a, 0x1f, + 0xc0, 0x45, 0x75, 0xb1, 0xbc, 0xce, 0xd6, 0xf2, 0x45, 0xc2, 0xd9, 0x57, 0xd8, 0x97, 0xc2, 0x59, + 0xf9, 0x2e, 0xbb, 0x6c, 0xe0, 0xef, 0x06, 0x98, 0x29, 0x7a, 0x1c, 0x3a, 0x65, 0xa5, 0x2f, 0x19, + 0xbc, 0xc6, 0xa3, 0xab, 0x27, 0x64, 0x66, 0xb2, 0x3e, 0xd2, 0xd2, 0x96, 0xe1, 0x87, 0xa5, 0xd2, + 0x56, 0x85, 0xce, 0x85, 0x3f, 0x1a, 0x00, 0x0c, 0x47, 0x01, 0x7e, 0x5c, 0xd6, 0xee, 0xc2, 0xc8, + 0x34, 0x16, 0x07, 0xf4, 0xc2, 0x95, 0x67, 0xef, 0x0c, 0xae, 0x3c, 0xcb, 0xd6, 0x52, 0x5a, 0xd6, + 0xfd, 0x72, 0x29, 0x52, 0xd5, 0x5d, 0x35, 0x56, 0xe0, 0x4f, 0x06, 0x98, 0x2e, 0xcc, 0x07, 0xb4, + 0xcb, 0xd4, 0x5c, 0x9c, 0xc9, 0x71, 0x72, 0x1c, 0x2d, 0xe7, 0x81, 0x35, 0x66, 0x67, 0xb0, 0x2e, + 0x3c, 0xd0, 0x53, 0x98, 0xa4, 0x72, 0x3d, 0x17, 0x47, 0xee, 0x7f, 0xd2, 0x43, 0xe2, 0x81, 0x9e, + 0x9f, 0x0d, 0x30, 0x53, 0x1c, 0xae, 0x72, 0x3b, 0x5d, 0x32, 0x86, 0xe3, 0x14, 0x3d, 0xd2, 0x8a, + 0x56, 0xac, 0xa5, 0x72, 0x45, 0x3c, 0xab, 0xbc, 0x6a, 0xac, 0xac, 0xfd, 0x66, 0x80, 0xf7, 0x7d, + 0x16, 0x97, 0xe8, 0x58, 0x03, 0x5a, 0xc1, 0xae, 0x7a, 0xa0, 0xec, 0x1a, 0x2f, 0x3a, 0x39, 0x33, + 0x64, 0xea, 0x99, 0x60, 0x33, 0x1e, 0x3a, 0x21, 0xa6, 0xfa, 0xf9, 0xe2, 0x64, 0x10, 0x4a, 0x88, + 0xb8, 0xec, 0x01, 0xf7, 0xf9, 0xf0, 0xef, 0x5f, 0xc3, 0xf8, 0xa3, 0x52, 0xe9, 0x3c, 0xf9, 0xb3, + 0xd2, 0x78, 0x9a, 0x95, 0x5b, 0xd7, 0x8d, 0x3b, 0xc3, 0xc6, 0xcf, 0xda, 0x87, 0x55, 0x5d, 0xf5, + 0x93, 0xff, 0x02, 0x00, 0x00, 0xff, 0xff, 0x49, 0x30, 0x4b, 0xa6, 0x15, 0x0a, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/cloud/dialogflow/v2/audio_config.pb.go b/vendor/google.golang.org/genproto/googleapis/cloud/dialogflow/v2/audio_config.pb.go new file mode 100644 index 0000000..1e086d5 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/cloud/dialogflow/v2/audio_config.pb.go @@ -0,0 +1,599 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/cloud/dialogflow/v2/audio_config.proto + +package dialogflow // import "google.golang.org/genproto/googleapis/cloud/dialogflow/v2" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Audio encoding of the audio content sent in the conversational query request. +// Refer to the +// [Cloud Speech API +// documentation](https://cloud.google.com/speech-to-text/docs/basics) for more +// details. +type AudioEncoding int32 + +const ( + // Not specified. + AudioEncoding_AUDIO_ENCODING_UNSPECIFIED AudioEncoding = 0 + // Uncompressed 16-bit signed little-endian samples (Linear PCM). + AudioEncoding_AUDIO_ENCODING_LINEAR_16 AudioEncoding = 1 + // [`FLAC`](https://xiph.org/flac/documentation.html) (Free Lossless Audio + // Codec) is the recommended encoding because it is lossless (therefore + // recognition is not compromised) and requires only about half the + // bandwidth of `LINEAR16`. `FLAC` stream encoding supports 16-bit and + // 24-bit samples, however, not all fields in `STREAMINFO` are supported. + AudioEncoding_AUDIO_ENCODING_FLAC AudioEncoding = 2 + // 8-bit samples that compand 14-bit audio samples using G.711 PCMU/mu-law. + AudioEncoding_AUDIO_ENCODING_MULAW AudioEncoding = 3 + // Adaptive Multi-Rate Narrowband codec. `sample_rate_hertz` must be 8000. + AudioEncoding_AUDIO_ENCODING_AMR AudioEncoding = 4 + // Adaptive Multi-Rate Wideband codec. `sample_rate_hertz` must be 16000. + AudioEncoding_AUDIO_ENCODING_AMR_WB AudioEncoding = 5 + // Opus encoded audio frames in Ogg container + // ([OggOpus](https://wiki.xiph.org/OggOpus)). + // `sample_rate_hertz` must be 16000. + AudioEncoding_AUDIO_ENCODING_OGG_OPUS AudioEncoding = 6 + // Although the use of lossy encodings is not recommended, if a very low + // bitrate encoding is required, `OGG_OPUS` is highly preferred over + // Speex encoding. The [Speex](https://speex.org/) encoding supported by + // Dialogflow API has a header byte in each block, as in MIME type + // `audio/x-speex-with-header-byte`. + // It is a variant of the RTP Speex encoding defined in + // [RFC 5574](https://tools.ietf.org/html/rfc5574). + // The stream is a sequence of blocks, one block per RTP packet. Each block + // starts with a byte containing the length of the block, in bytes, followed + // by one or more frames of Speex data, padded to an integral number of + // bytes (octets) as specified in RFC 5574. In other words, each RTP header + // is replaced with a single byte containing the block length. Only Speex + // wideband is supported. `sample_rate_hertz` must be 16000. + AudioEncoding_AUDIO_ENCODING_SPEEX_WITH_HEADER_BYTE AudioEncoding = 7 +) + +var AudioEncoding_name = map[int32]string{ + 0: "AUDIO_ENCODING_UNSPECIFIED", + 1: "AUDIO_ENCODING_LINEAR_16", + 2: "AUDIO_ENCODING_FLAC", + 3: "AUDIO_ENCODING_MULAW", + 4: "AUDIO_ENCODING_AMR", + 5: "AUDIO_ENCODING_AMR_WB", + 6: "AUDIO_ENCODING_OGG_OPUS", + 7: "AUDIO_ENCODING_SPEEX_WITH_HEADER_BYTE", +} +var AudioEncoding_value = map[string]int32{ + "AUDIO_ENCODING_UNSPECIFIED": 0, + "AUDIO_ENCODING_LINEAR_16": 1, + "AUDIO_ENCODING_FLAC": 2, + "AUDIO_ENCODING_MULAW": 3, + "AUDIO_ENCODING_AMR": 4, + "AUDIO_ENCODING_AMR_WB": 5, + "AUDIO_ENCODING_OGG_OPUS": 6, + "AUDIO_ENCODING_SPEEX_WITH_HEADER_BYTE": 7, +} + +func (x AudioEncoding) String() string { + return proto.EnumName(AudioEncoding_name, int32(x)) +} +func (AudioEncoding) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_audio_config_af44fcb5715f52d7, []int{0} +} + +// Variant of the specified [Speech model][google.cloud.dialogflow.v2.InputAudioConfig.model] to use. +// +// See the [Cloud Speech +// documentation](https://cloud.google.com/speech-to-text/docs/enhanced-models) +// for which models have different variants. For example, the "phone_call" model +// has both a standard and an enhanced variant. When you use an enhanced model, +// you will generally receive higher quality results than for a standard model. +type SpeechModelVariant int32 + +const ( + // No model variant specified. In this case Dialogflow defaults to + // USE_BEST_AVAILABLE. + SpeechModelVariant_SPEECH_MODEL_VARIANT_UNSPECIFIED SpeechModelVariant = 0 + // Use the best available variant of the [Speech + // model][InputAudioConfig.model] that the caller is eligible for. + // + // Please see the [Dialogflow + // docs](https://cloud.google.com/dialogflow-enterprise/docs/data-logging) for + // how to make your project eligible for enhanced models. + SpeechModelVariant_USE_BEST_AVAILABLE SpeechModelVariant = 1 + // Use standard model variant even if an enhanced model is available. See the + // [Cloud Speech + // documentation](https://cloud.google.com/speech-to-text/docs/enhanced-models) + // for details about enhanced models. + SpeechModelVariant_USE_STANDARD SpeechModelVariant = 2 + // Use an enhanced model variant: + // + // * If an enhanced variant does not exist for the given + // [model][google.cloud.dialogflow.v2.InputAudioConfig.model] and request language, Dialogflow falls + // back to the standard variant. + // + // The [Cloud Speech + // documentation](https://cloud.google.com/speech-to-text/docs/enhanced-models) + // describes which models have enhanced variants. + // + // * If the API caller isn't eligible for enhanced models, Dialogflow returns + // an error. Please see the [Dialogflow + // docs](https://cloud.google.com/dialogflow-enterprise/docs/data-logging) + // for how to make your project eligible. + SpeechModelVariant_USE_ENHANCED SpeechModelVariant = 3 +) + +var SpeechModelVariant_name = map[int32]string{ + 0: "SPEECH_MODEL_VARIANT_UNSPECIFIED", + 1: "USE_BEST_AVAILABLE", + 2: "USE_STANDARD", + 3: "USE_ENHANCED", +} +var SpeechModelVariant_value = map[string]int32{ + "SPEECH_MODEL_VARIANT_UNSPECIFIED": 0, + "USE_BEST_AVAILABLE": 1, + "USE_STANDARD": 2, + "USE_ENHANCED": 3, +} + +func (x SpeechModelVariant) String() string { + return proto.EnumName(SpeechModelVariant_name, int32(x)) +} +func (SpeechModelVariant) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_audio_config_af44fcb5715f52d7, []int{1} +} + +// Gender of the voice as described in +// [SSML voice element](https://www.w3.org/TR/speech-synthesis11/#edef_voice). +type SsmlVoiceGender int32 + +const ( + // An unspecified gender, which means that the client doesn't care which + // gender the selected voice will have. + SsmlVoiceGender_SSML_VOICE_GENDER_UNSPECIFIED SsmlVoiceGender = 0 + // A male voice. + SsmlVoiceGender_SSML_VOICE_GENDER_MALE SsmlVoiceGender = 1 + // A female voice. + SsmlVoiceGender_SSML_VOICE_GENDER_FEMALE SsmlVoiceGender = 2 + // A gender-neutral voice. + SsmlVoiceGender_SSML_VOICE_GENDER_NEUTRAL SsmlVoiceGender = 3 +) + +var SsmlVoiceGender_name = map[int32]string{ + 0: "SSML_VOICE_GENDER_UNSPECIFIED", + 1: "SSML_VOICE_GENDER_MALE", + 2: "SSML_VOICE_GENDER_FEMALE", + 3: "SSML_VOICE_GENDER_NEUTRAL", +} +var SsmlVoiceGender_value = map[string]int32{ + "SSML_VOICE_GENDER_UNSPECIFIED": 0, + "SSML_VOICE_GENDER_MALE": 1, + "SSML_VOICE_GENDER_FEMALE": 2, + "SSML_VOICE_GENDER_NEUTRAL": 3, +} + +func (x SsmlVoiceGender) String() string { + return proto.EnumName(SsmlVoiceGender_name, int32(x)) +} +func (SsmlVoiceGender) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_audio_config_af44fcb5715f52d7, []int{2} +} + +// Audio encoding of the output audio format in Text-To-Speech. +type OutputAudioEncoding int32 + +const ( + // Not specified. + OutputAudioEncoding_OUTPUT_AUDIO_ENCODING_UNSPECIFIED OutputAudioEncoding = 0 + // Uncompressed 16-bit signed little-endian samples (Linear PCM). + // Audio content returned as LINEAR16 also contains a WAV header. + OutputAudioEncoding_OUTPUT_AUDIO_ENCODING_LINEAR_16 OutputAudioEncoding = 1 + // MP3 audio. + OutputAudioEncoding_OUTPUT_AUDIO_ENCODING_MP3 OutputAudioEncoding = 2 + // Opus encoded audio wrapped in an ogg container. The result will be a + // file which can be played natively on Android, and in browsers (at least + // Chrome and Firefox). The quality of the encoding is considerably higher + // than MP3 while using approximately the same bitrate. + OutputAudioEncoding_OUTPUT_AUDIO_ENCODING_OGG_OPUS OutputAudioEncoding = 3 +) + +var OutputAudioEncoding_name = map[int32]string{ + 0: "OUTPUT_AUDIO_ENCODING_UNSPECIFIED", + 1: "OUTPUT_AUDIO_ENCODING_LINEAR_16", + 2: "OUTPUT_AUDIO_ENCODING_MP3", + 3: "OUTPUT_AUDIO_ENCODING_OGG_OPUS", +} +var OutputAudioEncoding_value = map[string]int32{ + "OUTPUT_AUDIO_ENCODING_UNSPECIFIED": 0, + "OUTPUT_AUDIO_ENCODING_LINEAR_16": 1, + "OUTPUT_AUDIO_ENCODING_MP3": 2, + "OUTPUT_AUDIO_ENCODING_OGG_OPUS": 3, +} + +func (x OutputAudioEncoding) String() string { + return proto.EnumName(OutputAudioEncoding_name, int32(x)) +} +func (OutputAudioEncoding) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_audio_config_af44fcb5715f52d7, []int{3} +} + +// Instructs the speech recognizer how to process the audio content. +type InputAudioConfig struct { + // Required. Audio encoding of the audio content to process. + AudioEncoding AudioEncoding `protobuf:"varint,1,opt,name=audio_encoding,json=audioEncoding,proto3,enum=google.cloud.dialogflow.v2.AudioEncoding" json:"audio_encoding,omitempty"` + // Required. Sample rate (in Hertz) of the audio content sent in the query. + // Refer to + // [Cloud Speech API + // documentation](https://cloud.google.com/speech-to-text/docs/basics) for + // more details. + SampleRateHertz int32 `protobuf:"varint,2,opt,name=sample_rate_hertz,json=sampleRateHertz,proto3" json:"sample_rate_hertz,omitempty"` + // Required. The language of the supplied audio. Dialogflow does not do + // translations. See [Language + // Support](https://cloud.google.com/dialogflow-enterprise/docs/reference/language) + // for a list of the currently supported language codes. Note that queries in + // the same session do not necessarily need to specify the same language. + LanguageCode string `protobuf:"bytes,3,opt,name=language_code,json=languageCode,proto3" json:"language_code,omitempty"` + // Optional. The collection of phrase hints which are used to boost accuracy + // of speech recognition. + // Refer to + // [Cloud Speech API + // documentation](https://cloud.google.com/speech-to-text/docs/basics#phrase-hints) + // for more details. + PhraseHints []string `protobuf:"bytes,4,rep,name=phrase_hints,json=phraseHints,proto3" json:"phrase_hints,omitempty"` + // Optional. Which variant of the [Speech model][google.cloud.dialogflow.v2.InputAudioConfig.model] to use. + ModelVariant SpeechModelVariant `protobuf:"varint,10,opt,name=model_variant,json=modelVariant,proto3,enum=google.cloud.dialogflow.v2.SpeechModelVariant" json:"model_variant,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *InputAudioConfig) Reset() { *m = InputAudioConfig{} } +func (m *InputAudioConfig) String() string { return proto.CompactTextString(m) } +func (*InputAudioConfig) ProtoMessage() {} +func (*InputAudioConfig) Descriptor() ([]byte, []int) { + return fileDescriptor_audio_config_af44fcb5715f52d7, []int{0} +} +func (m *InputAudioConfig) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_InputAudioConfig.Unmarshal(m, b) +} +func (m *InputAudioConfig) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_InputAudioConfig.Marshal(b, m, deterministic) +} +func (dst *InputAudioConfig) XXX_Merge(src proto.Message) { + xxx_messageInfo_InputAudioConfig.Merge(dst, src) +} +func (m *InputAudioConfig) XXX_Size() int { + return xxx_messageInfo_InputAudioConfig.Size(m) +} +func (m *InputAudioConfig) XXX_DiscardUnknown() { + xxx_messageInfo_InputAudioConfig.DiscardUnknown(m) +} + +var xxx_messageInfo_InputAudioConfig proto.InternalMessageInfo + +func (m *InputAudioConfig) GetAudioEncoding() AudioEncoding { + if m != nil { + return m.AudioEncoding + } + return AudioEncoding_AUDIO_ENCODING_UNSPECIFIED +} + +func (m *InputAudioConfig) GetSampleRateHertz() int32 { + if m != nil { + return m.SampleRateHertz + } + return 0 +} + +func (m *InputAudioConfig) GetLanguageCode() string { + if m != nil { + return m.LanguageCode + } + return "" +} + +func (m *InputAudioConfig) GetPhraseHints() []string { + if m != nil { + return m.PhraseHints + } + return nil +} + +func (m *InputAudioConfig) GetModelVariant() SpeechModelVariant { + if m != nil { + return m.ModelVariant + } + return SpeechModelVariant_SPEECH_MODEL_VARIANT_UNSPECIFIED +} + +// Description of which voice to use for speech synthesis. +type VoiceSelectionParams struct { + // Optional. The name of the voice. If not set, the service will choose a + // voice based on the other parameters such as language_code and gender. + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // Optional. The preferred gender of the voice. If not set, the service will + // choose a voice based on the other parameters such as language_code and + // name. Note that this is only a preference, not requirement. If a + // voice of the appropriate gender is not available, the synthesizer should + // substitute a voice with a different gender rather than failing the request. + SsmlGender SsmlVoiceGender `protobuf:"varint,2,opt,name=ssml_gender,json=ssmlGender,proto3,enum=google.cloud.dialogflow.v2.SsmlVoiceGender" json:"ssml_gender,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *VoiceSelectionParams) Reset() { *m = VoiceSelectionParams{} } +func (m *VoiceSelectionParams) String() string { return proto.CompactTextString(m) } +func (*VoiceSelectionParams) ProtoMessage() {} +func (*VoiceSelectionParams) Descriptor() ([]byte, []int) { + return fileDescriptor_audio_config_af44fcb5715f52d7, []int{1} +} +func (m *VoiceSelectionParams) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_VoiceSelectionParams.Unmarshal(m, b) +} +func (m *VoiceSelectionParams) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_VoiceSelectionParams.Marshal(b, m, deterministic) +} +func (dst *VoiceSelectionParams) XXX_Merge(src proto.Message) { + xxx_messageInfo_VoiceSelectionParams.Merge(dst, src) +} +func (m *VoiceSelectionParams) XXX_Size() int { + return xxx_messageInfo_VoiceSelectionParams.Size(m) +} +func (m *VoiceSelectionParams) XXX_DiscardUnknown() { + xxx_messageInfo_VoiceSelectionParams.DiscardUnknown(m) +} + +var xxx_messageInfo_VoiceSelectionParams proto.InternalMessageInfo + +func (m *VoiceSelectionParams) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *VoiceSelectionParams) GetSsmlGender() SsmlVoiceGender { + if m != nil { + return m.SsmlGender + } + return SsmlVoiceGender_SSML_VOICE_GENDER_UNSPECIFIED +} + +// Configuration of how speech should be synthesized. +type SynthesizeSpeechConfig struct { + // Optional. Speaking rate/speed, in the range [0.25, 4.0]. 1.0 is the normal + // native speed supported by the specific voice. 2.0 is twice as fast, and + // 0.5 is half as fast. If unset(0.0), defaults to the native 1.0 speed. Any + // other values < 0.25 or > 4.0 will return an error. + SpeakingRate float64 `protobuf:"fixed64,1,opt,name=speaking_rate,json=speakingRate,proto3" json:"speaking_rate,omitempty"` + // Optional. Speaking pitch, in the range [-20.0, 20.0]. 20 means increase 20 + // semitones from the original pitch. -20 means decrease 20 semitones from the + // original pitch. + Pitch float64 `protobuf:"fixed64,2,opt,name=pitch,proto3" json:"pitch,omitempty"` + // Optional. Volume gain (in dB) of the normal native volume supported by the + // specific voice, in the range [-96.0, 16.0]. If unset, or set to a value of + // 0.0 (dB), will play at normal native signal amplitude. A value of -6.0 (dB) + // will play at approximately half the amplitude of the normal native signal + // amplitude. A value of +6.0 (dB) will play at approximately twice the + // amplitude of the normal native signal amplitude. We strongly recommend not + // to exceed +10 (dB) as there's usually no effective increase in loudness for + // any value greater than that. + VolumeGainDb float64 `protobuf:"fixed64,3,opt,name=volume_gain_db,json=volumeGainDb,proto3" json:"volume_gain_db,omitempty"` + // Optional. An identifier which selects 'audio effects' profiles that are + // applied on (post synthesized) text to speech. Effects are applied on top of + // each other in the order they are given. + EffectsProfileId []string `protobuf:"bytes,5,rep,name=effects_profile_id,json=effectsProfileId,proto3" json:"effects_profile_id,omitempty"` + // Optional. The desired voice of the synthesized audio. + Voice *VoiceSelectionParams `protobuf:"bytes,4,opt,name=voice,proto3" json:"voice,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *SynthesizeSpeechConfig) Reset() { *m = SynthesizeSpeechConfig{} } +func (m *SynthesizeSpeechConfig) String() string { return proto.CompactTextString(m) } +func (*SynthesizeSpeechConfig) ProtoMessage() {} +func (*SynthesizeSpeechConfig) Descriptor() ([]byte, []int) { + return fileDescriptor_audio_config_af44fcb5715f52d7, []int{2} +} +func (m *SynthesizeSpeechConfig) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_SynthesizeSpeechConfig.Unmarshal(m, b) +} +func (m *SynthesizeSpeechConfig) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_SynthesizeSpeechConfig.Marshal(b, m, deterministic) +} +func (dst *SynthesizeSpeechConfig) XXX_Merge(src proto.Message) { + xxx_messageInfo_SynthesizeSpeechConfig.Merge(dst, src) +} +func (m *SynthesizeSpeechConfig) XXX_Size() int { + return xxx_messageInfo_SynthesizeSpeechConfig.Size(m) +} +func (m *SynthesizeSpeechConfig) XXX_DiscardUnknown() { + xxx_messageInfo_SynthesizeSpeechConfig.DiscardUnknown(m) +} + +var xxx_messageInfo_SynthesizeSpeechConfig proto.InternalMessageInfo + +func (m *SynthesizeSpeechConfig) GetSpeakingRate() float64 { + if m != nil { + return m.SpeakingRate + } + return 0 +} + +func (m *SynthesizeSpeechConfig) GetPitch() float64 { + if m != nil { + return m.Pitch + } + return 0 +} + +func (m *SynthesizeSpeechConfig) GetVolumeGainDb() float64 { + if m != nil { + return m.VolumeGainDb + } + return 0 +} + +func (m *SynthesizeSpeechConfig) GetEffectsProfileId() []string { + if m != nil { + return m.EffectsProfileId + } + return nil +} + +func (m *SynthesizeSpeechConfig) GetVoice() *VoiceSelectionParams { + if m != nil { + return m.Voice + } + return nil +} + +// Instructs the speech synthesizer on how to generate the output audio content. +type OutputAudioConfig struct { + // Required. Audio encoding of the synthesized audio content. + AudioEncoding OutputAudioEncoding `protobuf:"varint,1,opt,name=audio_encoding,json=audioEncoding,proto3,enum=google.cloud.dialogflow.v2.OutputAudioEncoding" json:"audio_encoding,omitempty"` + // Optional. The synthesis sample rate (in hertz) for this audio. If not + // provided, then the synthesizer will use the default sample rate based on + // the audio encoding. If this is different from the voice's natural sample + // rate, then the synthesizer will honor this request by converting to the + // desired sample rate (which might result in worse audio quality). + SampleRateHertz int32 `protobuf:"varint,2,opt,name=sample_rate_hertz,json=sampleRateHertz,proto3" json:"sample_rate_hertz,omitempty"` + // Optional. Configuration of how speech should be synthesized. + SynthesizeSpeechConfig *SynthesizeSpeechConfig `protobuf:"bytes,3,opt,name=synthesize_speech_config,json=synthesizeSpeechConfig,proto3" json:"synthesize_speech_config,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *OutputAudioConfig) Reset() { *m = OutputAudioConfig{} } +func (m *OutputAudioConfig) String() string { return proto.CompactTextString(m) } +func (*OutputAudioConfig) ProtoMessage() {} +func (*OutputAudioConfig) Descriptor() ([]byte, []int) { + return fileDescriptor_audio_config_af44fcb5715f52d7, []int{3} +} +func (m *OutputAudioConfig) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_OutputAudioConfig.Unmarshal(m, b) +} +func (m *OutputAudioConfig) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_OutputAudioConfig.Marshal(b, m, deterministic) +} +func (dst *OutputAudioConfig) XXX_Merge(src proto.Message) { + xxx_messageInfo_OutputAudioConfig.Merge(dst, src) +} +func (m *OutputAudioConfig) XXX_Size() int { + return xxx_messageInfo_OutputAudioConfig.Size(m) +} +func (m *OutputAudioConfig) XXX_DiscardUnknown() { + xxx_messageInfo_OutputAudioConfig.DiscardUnknown(m) +} + +var xxx_messageInfo_OutputAudioConfig proto.InternalMessageInfo + +func (m *OutputAudioConfig) GetAudioEncoding() OutputAudioEncoding { + if m != nil { + return m.AudioEncoding + } + return OutputAudioEncoding_OUTPUT_AUDIO_ENCODING_UNSPECIFIED +} + +func (m *OutputAudioConfig) GetSampleRateHertz() int32 { + if m != nil { + return m.SampleRateHertz + } + return 0 +} + +func (m *OutputAudioConfig) GetSynthesizeSpeechConfig() *SynthesizeSpeechConfig { + if m != nil { + return m.SynthesizeSpeechConfig + } + return nil +} + +func init() { + proto.RegisterType((*InputAudioConfig)(nil), "google.cloud.dialogflow.v2.InputAudioConfig") + proto.RegisterType((*VoiceSelectionParams)(nil), "google.cloud.dialogflow.v2.VoiceSelectionParams") + proto.RegisterType((*SynthesizeSpeechConfig)(nil), "google.cloud.dialogflow.v2.SynthesizeSpeechConfig") + proto.RegisterType((*OutputAudioConfig)(nil), "google.cloud.dialogflow.v2.OutputAudioConfig") + proto.RegisterEnum("google.cloud.dialogflow.v2.AudioEncoding", AudioEncoding_name, AudioEncoding_value) + proto.RegisterEnum("google.cloud.dialogflow.v2.SpeechModelVariant", SpeechModelVariant_name, SpeechModelVariant_value) + proto.RegisterEnum("google.cloud.dialogflow.v2.SsmlVoiceGender", SsmlVoiceGender_name, SsmlVoiceGender_value) + proto.RegisterEnum("google.cloud.dialogflow.v2.OutputAudioEncoding", OutputAudioEncoding_name, OutputAudioEncoding_value) +} + +func init() { + proto.RegisterFile("google/cloud/dialogflow/v2/audio_config.proto", fileDescriptor_audio_config_af44fcb5715f52d7) +} + +var fileDescriptor_audio_config_af44fcb5715f52d7 = []byte{ + // 898 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xac, 0x55, 0x4f, 0x6f, 0xe2, 0xc6, + 0x1b, 0xfe, 0xd9, 0x24, 0xfb, 0x53, 0xde, 0x90, 0xac, 0x77, 0x36, 0xcd, 0x3a, 0x69, 0x92, 0x92, + 0x64, 0x57, 0x62, 0x69, 0x0b, 0x2d, 0x2b, 0xf5, 0xd2, 0x93, 0xc1, 0x03, 0x58, 0x02, 0x63, 0xd9, + 0x40, 0xda, 0x5e, 0x46, 0x13, 0x7b, 0x30, 0x56, 0x8d, 0xc7, 0xc2, 0x86, 0xb6, 0x7b, 0xef, 0xb1, + 0x9f, 0xa1, 0x52, 0xd5, 0x53, 0xa5, 0x7e, 0xb6, 0x5e, 0x2b, 0xf5, 0x52, 0x79, 0x4c, 0x36, 0x2c, + 0xb0, 0x9c, 0x7a, 0xf3, 0x3c, 0xcf, 0xfb, 0xf7, 0x79, 0xdf, 0xf1, 0xc0, 0xe7, 0x3e, 0xe7, 0x7e, + 0xc8, 0x6a, 0x6e, 0xc8, 0xe7, 0x5e, 0xcd, 0x0b, 0x68, 0xc8, 0xfd, 0x71, 0xc8, 0x7f, 0xa8, 0x2d, + 0xea, 0x35, 0x3a, 0xf7, 0x02, 0x4e, 0x5c, 0x1e, 0x8d, 0x03, 0xbf, 0x1a, 0xcf, 0x78, 0xca, 0xd1, + 0x79, 0x6e, 0x5e, 0x15, 0xe6, 0xd5, 0x47, 0xf3, 0xea, 0xa2, 0x7e, 0x7e, 0xb1, 0x0c, 0x45, 0xe3, + 0xa0, 0x46, 0xa3, 0x88, 0xa7, 0x34, 0x0d, 0x78, 0x94, 0xe4, 0x9e, 0x37, 0x7f, 0xca, 0xa0, 0x18, + 0x51, 0x3c, 0x4f, 0xb5, 0x2c, 0x6a, 0x53, 0x04, 0x45, 0x16, 0x1c, 0xe7, 0x49, 0x58, 0xe4, 0x72, + 0x2f, 0x88, 0x7c, 0x55, 0x2a, 0x49, 0xe5, 0xe3, 0xfa, 0xeb, 0xea, 0x87, 0xf3, 0x54, 0x45, 0x00, + 0xbc, 0x74, 0xb0, 0x8f, 0xe8, 0xea, 0x11, 0x55, 0xe0, 0x59, 0x42, 0xa7, 0x71, 0xc8, 0xc8, 0x8c, + 0xa6, 0x8c, 0x4c, 0xd8, 0x2c, 0x7d, 0xab, 0xca, 0x25, 0xa9, 0xbc, 0x6f, 0x3f, 0xcd, 0x09, 0x9b, + 0xa6, 0xac, 0x93, 0xc1, 0xe8, 0x16, 0x8e, 0x42, 0x1a, 0xf9, 0x73, 0xea, 0x33, 0xe2, 0x72, 0x8f, + 0xa9, 0x85, 0x92, 0x54, 0x3e, 0xb0, 0x8b, 0x0f, 0x60, 0x93, 0x7b, 0x0c, 0x5d, 0x43, 0x31, 0x9e, + 0xcc, 0x68, 0xc2, 0xc8, 0x24, 0x88, 0xd2, 0x44, 0xdd, 0x2b, 0x15, 0xca, 0x07, 0xf6, 0x61, 0x8e, + 0x75, 0x32, 0x08, 0x39, 0x70, 0x34, 0xe5, 0x1e, 0x0b, 0xc9, 0x82, 0xce, 0x02, 0x1a, 0xa5, 0x2a, + 0x88, 0x26, 0xaa, 0xbb, 0x9a, 0x70, 0x62, 0xc6, 0xdc, 0x49, 0x2f, 0x73, 0x1b, 0xe5, 0x5e, 0x76, + 0x71, 0xba, 0x72, 0xba, 0xf9, 0x11, 0x4e, 0x46, 0x3c, 0x70, 0x99, 0xc3, 0x42, 0xe6, 0x66, 0x42, + 0x5a, 0x74, 0x46, 0xa7, 0x09, 0x42, 0xb0, 0x17, 0xd1, 0x29, 0x13, 0x42, 0x1d, 0xd8, 0xe2, 0x1b, + 0x75, 0xe1, 0x30, 0x49, 0xa6, 0x21, 0xf1, 0x59, 0xe4, 0xb1, 0x99, 0x68, 0xf7, 0xb8, 0xfe, 0xe9, + 0xce, 0xf4, 0xc9, 0x34, 0x14, 0xe1, 0xdb, 0xc2, 0xc5, 0x86, 0xcc, 0x3f, 0xff, 0xbe, 0xf9, 0x4b, + 0x82, 0x53, 0xe7, 0xa7, 0x28, 0x9d, 0xb0, 0x24, 0x78, 0xcb, 0xf2, 0x42, 0x97, 0xf3, 0xba, 0x85, + 0xa3, 0x24, 0x66, 0xf4, 0xfb, 0x20, 0xf2, 0x85, 0xbe, 0xa2, 0x0a, 0xc9, 0x2e, 0x3e, 0x80, 0x99, + 0xb6, 0xe8, 0x04, 0xf6, 0xe3, 0x20, 0x75, 0x27, 0xa2, 0x0e, 0xc9, 0xce, 0x0f, 0xe8, 0x25, 0x1c, + 0x2f, 0x78, 0x38, 0x9f, 0x32, 0xe2, 0xd3, 0x20, 0x22, 0xde, 0xbd, 0x50, 0x5b, 0xb2, 0x8b, 0x39, + 0xda, 0xa6, 0x41, 0xa4, 0xdf, 0xa3, 0xcf, 0x00, 0xb1, 0xf1, 0x98, 0xb9, 0x69, 0x42, 0xe2, 0x19, + 0x1f, 0x07, 0x21, 0x23, 0x81, 0xa7, 0xee, 0x0b, 0xcd, 0x95, 0x25, 0x63, 0xe5, 0x84, 0xe1, 0xa1, + 0x16, 0xec, 0x2f, 0xb2, 0x26, 0xd4, 0xbd, 0x92, 0x54, 0x3e, 0xac, 0x7f, 0xb1, 0xab, 0xe3, 0x6d, + 0x62, 0xda, 0xb9, 0xfb, 0xcd, 0xcf, 0x32, 0x3c, 0xeb, 0xcf, 0xd3, 0xb5, 0xe5, 0x1c, 0x7d, 0x60, + 0x39, 0x6b, 0xbb, 0xd2, 0xac, 0x84, 0xf9, 0x2f, 0x56, 0x34, 0x04, 0x35, 0x79, 0x37, 0x0a, 0x92, + 0x88, 0x59, 0x2c, 0x6f, 0xa4, 0xd0, 0xef, 0xb0, 0x5e, 0xdf, 0x39, 0xe6, 0xad, 0x63, 0xb4, 0x4f, + 0x93, 0xad, 0x78, 0xe5, 0x1f, 0x09, 0x8e, 0xde, 0x2b, 0x1d, 0x5d, 0xc1, 0xb9, 0x36, 0xd4, 0x8d, + 0x3e, 0xc1, 0x66, 0xb3, 0xaf, 0x1b, 0x66, 0x9b, 0x0c, 0x4d, 0xc7, 0xc2, 0x4d, 0xa3, 0x65, 0x60, + 0x5d, 0xf9, 0x1f, 0xba, 0x00, 0x75, 0x8d, 0xef, 0x1a, 0x26, 0xd6, 0x6c, 0xf2, 0xe5, 0x57, 0x8a, + 0x84, 0x5e, 0xc0, 0xf3, 0x35, 0xb6, 0xd5, 0xd5, 0x9a, 0x8a, 0x8c, 0x54, 0x38, 0x59, 0x23, 0x7a, + 0xc3, 0xae, 0x76, 0xa7, 0x14, 0xd0, 0x29, 0xa0, 0x35, 0x46, 0xeb, 0xd9, 0xca, 0x1e, 0x3a, 0x83, + 0x8f, 0x36, 0x71, 0x72, 0xd7, 0x50, 0xf6, 0xd1, 0xc7, 0xf0, 0x62, 0x8d, 0xea, 0xb7, 0xdb, 0xa4, + 0x6f, 0x0d, 0x1d, 0xe5, 0x09, 0x7a, 0x0d, 0xaf, 0xd6, 0x48, 0xc7, 0xc2, 0xf8, 0x1b, 0x72, 0x67, + 0x0c, 0x3a, 0xa4, 0x83, 0x35, 0x1d, 0xdb, 0xa4, 0xf1, 0xed, 0x00, 0x2b, 0xff, 0xaf, 0x2c, 0x00, + 0x6d, 0xde, 0x4a, 0xf4, 0x12, 0x4a, 0x99, 0x47, 0xb3, 0x43, 0x7a, 0x7d, 0x1d, 0x77, 0xc9, 0x48, + 0xb3, 0x0d, 0xcd, 0x1c, 0xac, 0xe9, 0x70, 0x0a, 0x68, 0xe8, 0x60, 0xd2, 0xc0, 0xce, 0x80, 0x68, + 0x23, 0xcd, 0xe8, 0x6a, 0x8d, 0x2e, 0x56, 0x24, 0xa4, 0x40, 0x31, 0xc3, 0x9d, 0x81, 0x66, 0xea, + 0x9a, 0xad, 0x2b, 0xf2, 0x03, 0x82, 0xcd, 0x8e, 0x66, 0x36, 0xb1, 0xae, 0x14, 0x2a, 0xbf, 0x48, + 0xf0, 0x74, 0xed, 0x3e, 0xa2, 0x6b, 0xb8, 0x74, 0x9c, 0x5e, 0x97, 0x8c, 0xfa, 0x46, 0x13, 0x93, + 0x36, 0x36, 0xb3, 0x3a, 0xdf, 0x4f, 0x79, 0x0e, 0xa7, 0x9b, 0x26, 0x3d, 0x4d, 0xa4, 0xbd, 0x00, + 0x75, 0x93, 0x6b, 0x61, 0xc1, 0xca, 0xe8, 0x12, 0xce, 0x36, 0x59, 0x13, 0x0f, 0x07, 0xb6, 0xd6, + 0x55, 0x0a, 0x95, 0xdf, 0x25, 0x78, 0xbe, 0x65, 0x8d, 0xd1, 0x2b, 0xb8, 0xee, 0x0f, 0x07, 0xd6, + 0x70, 0x40, 0x76, 0xae, 0xc4, 0x2d, 0x7c, 0xb2, 0xdd, 0x6c, 0x75, 0x33, 0x2e, 0xe1, 0x6c, 0xbb, + 0x51, 0xcf, 0x7a, 0xa3, 0xc8, 0xe8, 0x06, 0xae, 0xb6, 0xd3, 0xef, 0x26, 0x5b, 0x68, 0xfc, 0x2a, + 0xc1, 0x95, 0xcb, 0xa7, 0x3b, 0xd6, 0xbf, 0xa1, 0xac, 0x5c, 0x67, 0x2b, 0x7b, 0x85, 0x2c, 0xe9, + 0x3b, 0x7d, 0x69, 0xef, 0xf3, 0xec, 0x37, 0x5f, 0xe5, 0x33, 0xbf, 0xe6, 0xb3, 0x48, 0xbc, 0x51, + 0xb5, 0x9c, 0xa2, 0x71, 0x90, 0x6c, 0x7b, 0x0f, 0xbf, 0x7e, 0x3c, 0xfd, 0x2d, 0x49, 0xbf, 0xc9, + 0xb2, 0xde, 0xfa, 0x43, 0x3e, 0x6f, 0xe7, 0xe1, 0x9a, 0x22, 0xbd, 0xfe, 0x98, 0x7e, 0x54, 0xbf, + 0x7f, 0x22, 0xa2, 0xbe, 0xf9, 0x37, 0x00, 0x00, 0xff, 0xff, 0x15, 0xde, 0x52, 0x01, 0x64, 0x07, + 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/cloud/dialogflow/v2/context.pb.go b/vendor/google.golang.org/genproto/googleapis/cloud/dialogflow/v2/context.pb.go new file mode 100644 index 0000000..6882491 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/cloud/dialogflow/v2/context.pb.go @@ -0,0 +1,747 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/cloud/dialogflow/v2/context.proto + +package dialogflow // import "google.golang.org/genproto/googleapis/cloud/dialogflow/v2" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import empty "github.com/golang/protobuf/ptypes/empty" +import _struct "github.com/golang/protobuf/ptypes/struct" +import _ "google.golang.org/genproto/googleapis/api/annotations" +import field_mask "google.golang.org/genproto/protobuf/field_mask" + +import ( + context "golang.org/x/net/context" + grpc "google.golang.org/grpc" +) + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Represents a context. +type Context struct { + // Required. The unique identifier of the context. Format: + // `projects//agent/sessions//contexts/`. + // + // The `Context ID` is always converted to lowercase, may only contain + // characters in [a-zA-Z0-9_-%] and may be at most 250 bytes long. + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // Optional. The number of conversational query requests after which the + // context expires. If set to `0` (the default) the context expires + // immediately. Contexts expire automatically after 20 minutes if there + // are no matching queries. + LifespanCount int32 `protobuf:"varint,2,opt,name=lifespan_count,json=lifespanCount,proto3" json:"lifespan_count,omitempty"` + // Optional. The collection of parameters associated with this context. + // Refer to [this + // doc](https://cloud.google.com/dialogflow-enterprise/docs/intents-actions-parameters) + // for syntax. + Parameters *_struct.Struct `protobuf:"bytes,3,opt,name=parameters,proto3" json:"parameters,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Context) Reset() { *m = Context{} } +func (m *Context) String() string { return proto.CompactTextString(m) } +func (*Context) ProtoMessage() {} +func (*Context) Descriptor() ([]byte, []int) { + return fileDescriptor_context_d5871c2b8dccc66a, []int{0} +} +func (m *Context) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Context.Unmarshal(m, b) +} +func (m *Context) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Context.Marshal(b, m, deterministic) +} +func (dst *Context) XXX_Merge(src proto.Message) { + xxx_messageInfo_Context.Merge(dst, src) +} +func (m *Context) XXX_Size() int { + return xxx_messageInfo_Context.Size(m) +} +func (m *Context) XXX_DiscardUnknown() { + xxx_messageInfo_Context.DiscardUnknown(m) +} + +var xxx_messageInfo_Context proto.InternalMessageInfo + +func (m *Context) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *Context) GetLifespanCount() int32 { + if m != nil { + return m.LifespanCount + } + return 0 +} + +func (m *Context) GetParameters() *_struct.Struct { + if m != nil { + return m.Parameters + } + return nil +} + +// The request message for [Contexts.ListContexts][google.cloud.dialogflow.v2.Contexts.ListContexts]. +type ListContextsRequest struct { + // Required. The session to list all contexts from. + // Format: `projects//agent/sessions/`. + Parent string `protobuf:"bytes,1,opt,name=parent,proto3" json:"parent,omitempty"` + // Optional. The maximum number of items to return in a single page. By + // default 100 and at most 1000. + PageSize int32 `protobuf:"varint,2,opt,name=page_size,json=pageSize,proto3" json:"page_size,omitempty"` + // Optional. The next_page_token value returned from a previous list request. + PageToken string `protobuf:"bytes,3,opt,name=page_token,json=pageToken,proto3" json:"page_token,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ListContextsRequest) Reset() { *m = ListContextsRequest{} } +func (m *ListContextsRequest) String() string { return proto.CompactTextString(m) } +func (*ListContextsRequest) ProtoMessage() {} +func (*ListContextsRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_context_d5871c2b8dccc66a, []int{1} +} +func (m *ListContextsRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ListContextsRequest.Unmarshal(m, b) +} +func (m *ListContextsRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ListContextsRequest.Marshal(b, m, deterministic) +} +func (dst *ListContextsRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_ListContextsRequest.Merge(dst, src) +} +func (m *ListContextsRequest) XXX_Size() int { + return xxx_messageInfo_ListContextsRequest.Size(m) +} +func (m *ListContextsRequest) XXX_DiscardUnknown() { + xxx_messageInfo_ListContextsRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_ListContextsRequest proto.InternalMessageInfo + +func (m *ListContextsRequest) GetParent() string { + if m != nil { + return m.Parent + } + return "" +} + +func (m *ListContextsRequest) GetPageSize() int32 { + if m != nil { + return m.PageSize + } + return 0 +} + +func (m *ListContextsRequest) GetPageToken() string { + if m != nil { + return m.PageToken + } + return "" +} + +// The response message for [Contexts.ListContexts][google.cloud.dialogflow.v2.Contexts.ListContexts]. +type ListContextsResponse struct { + // The list of contexts. There will be a maximum number of items + // returned based on the page_size field in the request. + Contexts []*Context `protobuf:"bytes,1,rep,name=contexts,proto3" json:"contexts,omitempty"` + // Token to retrieve the next page of results, or empty if there are no + // more results in the list. + NextPageToken string `protobuf:"bytes,2,opt,name=next_page_token,json=nextPageToken,proto3" json:"next_page_token,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ListContextsResponse) Reset() { *m = ListContextsResponse{} } +func (m *ListContextsResponse) String() string { return proto.CompactTextString(m) } +func (*ListContextsResponse) ProtoMessage() {} +func (*ListContextsResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_context_d5871c2b8dccc66a, []int{2} +} +func (m *ListContextsResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ListContextsResponse.Unmarshal(m, b) +} +func (m *ListContextsResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ListContextsResponse.Marshal(b, m, deterministic) +} +func (dst *ListContextsResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_ListContextsResponse.Merge(dst, src) +} +func (m *ListContextsResponse) XXX_Size() int { + return xxx_messageInfo_ListContextsResponse.Size(m) +} +func (m *ListContextsResponse) XXX_DiscardUnknown() { + xxx_messageInfo_ListContextsResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_ListContextsResponse proto.InternalMessageInfo + +func (m *ListContextsResponse) GetContexts() []*Context { + if m != nil { + return m.Contexts + } + return nil +} + +func (m *ListContextsResponse) GetNextPageToken() string { + if m != nil { + return m.NextPageToken + } + return "" +} + +// The request message for [Contexts.GetContext][google.cloud.dialogflow.v2.Contexts.GetContext]. +type GetContextRequest struct { + // Required. The name of the context. Format: + // `projects//agent/sessions//contexts/`. + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GetContextRequest) Reset() { *m = GetContextRequest{} } +func (m *GetContextRequest) String() string { return proto.CompactTextString(m) } +func (*GetContextRequest) ProtoMessage() {} +func (*GetContextRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_context_d5871c2b8dccc66a, []int{3} +} +func (m *GetContextRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GetContextRequest.Unmarshal(m, b) +} +func (m *GetContextRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GetContextRequest.Marshal(b, m, deterministic) +} +func (dst *GetContextRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_GetContextRequest.Merge(dst, src) +} +func (m *GetContextRequest) XXX_Size() int { + return xxx_messageInfo_GetContextRequest.Size(m) +} +func (m *GetContextRequest) XXX_DiscardUnknown() { + xxx_messageInfo_GetContextRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_GetContextRequest proto.InternalMessageInfo + +func (m *GetContextRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +// The request message for [Contexts.CreateContext][google.cloud.dialogflow.v2.Contexts.CreateContext]. +type CreateContextRequest struct { + // Required. The session to create a context for. + // Format: `projects//agent/sessions/`. + Parent string `protobuf:"bytes,1,opt,name=parent,proto3" json:"parent,omitempty"` + // Required. The context to create. + Context *Context `protobuf:"bytes,2,opt,name=context,proto3" json:"context,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *CreateContextRequest) Reset() { *m = CreateContextRequest{} } +func (m *CreateContextRequest) String() string { return proto.CompactTextString(m) } +func (*CreateContextRequest) ProtoMessage() {} +func (*CreateContextRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_context_d5871c2b8dccc66a, []int{4} +} +func (m *CreateContextRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_CreateContextRequest.Unmarshal(m, b) +} +func (m *CreateContextRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_CreateContextRequest.Marshal(b, m, deterministic) +} +func (dst *CreateContextRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_CreateContextRequest.Merge(dst, src) +} +func (m *CreateContextRequest) XXX_Size() int { + return xxx_messageInfo_CreateContextRequest.Size(m) +} +func (m *CreateContextRequest) XXX_DiscardUnknown() { + xxx_messageInfo_CreateContextRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_CreateContextRequest proto.InternalMessageInfo + +func (m *CreateContextRequest) GetParent() string { + if m != nil { + return m.Parent + } + return "" +} + +func (m *CreateContextRequest) GetContext() *Context { + if m != nil { + return m.Context + } + return nil +} + +// The request message for [Contexts.UpdateContext][google.cloud.dialogflow.v2.Contexts.UpdateContext]. +type UpdateContextRequest struct { + // Required. The context to update. + Context *Context `protobuf:"bytes,1,opt,name=context,proto3" json:"context,omitempty"` + // Optional. The mask to control which fields get updated. + UpdateMask *field_mask.FieldMask `protobuf:"bytes,2,opt,name=update_mask,json=updateMask,proto3" json:"update_mask,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *UpdateContextRequest) Reset() { *m = UpdateContextRequest{} } +func (m *UpdateContextRequest) String() string { return proto.CompactTextString(m) } +func (*UpdateContextRequest) ProtoMessage() {} +func (*UpdateContextRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_context_d5871c2b8dccc66a, []int{5} +} +func (m *UpdateContextRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_UpdateContextRequest.Unmarshal(m, b) +} +func (m *UpdateContextRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_UpdateContextRequest.Marshal(b, m, deterministic) +} +func (dst *UpdateContextRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_UpdateContextRequest.Merge(dst, src) +} +func (m *UpdateContextRequest) XXX_Size() int { + return xxx_messageInfo_UpdateContextRequest.Size(m) +} +func (m *UpdateContextRequest) XXX_DiscardUnknown() { + xxx_messageInfo_UpdateContextRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_UpdateContextRequest proto.InternalMessageInfo + +func (m *UpdateContextRequest) GetContext() *Context { + if m != nil { + return m.Context + } + return nil +} + +func (m *UpdateContextRequest) GetUpdateMask() *field_mask.FieldMask { + if m != nil { + return m.UpdateMask + } + return nil +} + +// The request message for [Contexts.DeleteContext][google.cloud.dialogflow.v2.Contexts.DeleteContext]. +type DeleteContextRequest struct { + // Required. The name of the context to delete. Format: + // `projects//agent/sessions//contexts/`. + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *DeleteContextRequest) Reset() { *m = DeleteContextRequest{} } +func (m *DeleteContextRequest) String() string { return proto.CompactTextString(m) } +func (*DeleteContextRequest) ProtoMessage() {} +func (*DeleteContextRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_context_d5871c2b8dccc66a, []int{6} +} +func (m *DeleteContextRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_DeleteContextRequest.Unmarshal(m, b) +} +func (m *DeleteContextRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_DeleteContextRequest.Marshal(b, m, deterministic) +} +func (dst *DeleteContextRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_DeleteContextRequest.Merge(dst, src) +} +func (m *DeleteContextRequest) XXX_Size() int { + return xxx_messageInfo_DeleteContextRequest.Size(m) +} +func (m *DeleteContextRequest) XXX_DiscardUnknown() { + xxx_messageInfo_DeleteContextRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_DeleteContextRequest proto.InternalMessageInfo + +func (m *DeleteContextRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +// The request message for [Contexts.DeleteAllContexts][google.cloud.dialogflow.v2.Contexts.DeleteAllContexts]. +type DeleteAllContextsRequest struct { + // Required. The name of the session to delete all contexts from. Format: + // `projects//agent/sessions/`. + Parent string `protobuf:"bytes,1,opt,name=parent,proto3" json:"parent,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *DeleteAllContextsRequest) Reset() { *m = DeleteAllContextsRequest{} } +func (m *DeleteAllContextsRequest) String() string { return proto.CompactTextString(m) } +func (*DeleteAllContextsRequest) ProtoMessage() {} +func (*DeleteAllContextsRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_context_d5871c2b8dccc66a, []int{7} +} +func (m *DeleteAllContextsRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_DeleteAllContextsRequest.Unmarshal(m, b) +} +func (m *DeleteAllContextsRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_DeleteAllContextsRequest.Marshal(b, m, deterministic) +} +func (dst *DeleteAllContextsRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_DeleteAllContextsRequest.Merge(dst, src) +} +func (m *DeleteAllContextsRequest) XXX_Size() int { + return xxx_messageInfo_DeleteAllContextsRequest.Size(m) +} +func (m *DeleteAllContextsRequest) XXX_DiscardUnknown() { + xxx_messageInfo_DeleteAllContextsRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_DeleteAllContextsRequest proto.InternalMessageInfo + +func (m *DeleteAllContextsRequest) GetParent() string { + if m != nil { + return m.Parent + } + return "" +} + +func init() { + proto.RegisterType((*Context)(nil), "google.cloud.dialogflow.v2.Context") + proto.RegisterType((*ListContextsRequest)(nil), "google.cloud.dialogflow.v2.ListContextsRequest") + proto.RegisterType((*ListContextsResponse)(nil), "google.cloud.dialogflow.v2.ListContextsResponse") + proto.RegisterType((*GetContextRequest)(nil), "google.cloud.dialogflow.v2.GetContextRequest") + proto.RegisterType((*CreateContextRequest)(nil), "google.cloud.dialogflow.v2.CreateContextRequest") + proto.RegisterType((*UpdateContextRequest)(nil), "google.cloud.dialogflow.v2.UpdateContextRequest") + proto.RegisterType((*DeleteContextRequest)(nil), "google.cloud.dialogflow.v2.DeleteContextRequest") + proto.RegisterType((*DeleteAllContextsRequest)(nil), "google.cloud.dialogflow.v2.DeleteAllContextsRequest") +} + +// Reference imports to suppress errors if they are not otherwise used. +var _ context.Context +var _ grpc.ClientConn + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the grpc package it is being compiled against. +const _ = grpc.SupportPackageIsVersion4 + +// ContextsClient is the client API for Contexts service. +// +// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://godoc.org/google.golang.org/grpc#ClientConn.NewStream. +type ContextsClient interface { + // Returns the list of all contexts in the specified session. + ListContexts(ctx context.Context, in *ListContextsRequest, opts ...grpc.CallOption) (*ListContextsResponse, error) + // Retrieves the specified context. + GetContext(ctx context.Context, in *GetContextRequest, opts ...grpc.CallOption) (*Context, error) + // Creates a context. + // + // If the specified context already exists, overrides the context. + CreateContext(ctx context.Context, in *CreateContextRequest, opts ...grpc.CallOption) (*Context, error) + // Updates the specified context. + UpdateContext(ctx context.Context, in *UpdateContextRequest, opts ...grpc.CallOption) (*Context, error) + // Deletes the specified context. + DeleteContext(ctx context.Context, in *DeleteContextRequest, opts ...grpc.CallOption) (*empty.Empty, error) + // Deletes all active contexts in the specified session. + DeleteAllContexts(ctx context.Context, in *DeleteAllContextsRequest, opts ...grpc.CallOption) (*empty.Empty, error) +} + +type contextsClient struct { + cc *grpc.ClientConn +} + +func NewContextsClient(cc *grpc.ClientConn) ContextsClient { + return &contextsClient{cc} +} + +func (c *contextsClient) ListContexts(ctx context.Context, in *ListContextsRequest, opts ...grpc.CallOption) (*ListContextsResponse, error) { + out := new(ListContextsResponse) + err := c.cc.Invoke(ctx, "/google.cloud.dialogflow.v2.Contexts/ListContexts", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *contextsClient) GetContext(ctx context.Context, in *GetContextRequest, opts ...grpc.CallOption) (*Context, error) { + out := new(Context) + err := c.cc.Invoke(ctx, "/google.cloud.dialogflow.v2.Contexts/GetContext", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *contextsClient) CreateContext(ctx context.Context, in *CreateContextRequest, opts ...grpc.CallOption) (*Context, error) { + out := new(Context) + err := c.cc.Invoke(ctx, "/google.cloud.dialogflow.v2.Contexts/CreateContext", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *contextsClient) UpdateContext(ctx context.Context, in *UpdateContextRequest, opts ...grpc.CallOption) (*Context, error) { + out := new(Context) + err := c.cc.Invoke(ctx, "/google.cloud.dialogflow.v2.Contexts/UpdateContext", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *contextsClient) DeleteContext(ctx context.Context, in *DeleteContextRequest, opts ...grpc.CallOption) (*empty.Empty, error) { + out := new(empty.Empty) + err := c.cc.Invoke(ctx, "/google.cloud.dialogflow.v2.Contexts/DeleteContext", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *contextsClient) DeleteAllContexts(ctx context.Context, in *DeleteAllContextsRequest, opts ...grpc.CallOption) (*empty.Empty, error) { + out := new(empty.Empty) + err := c.cc.Invoke(ctx, "/google.cloud.dialogflow.v2.Contexts/DeleteAllContexts", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +// ContextsServer is the server API for Contexts service. +type ContextsServer interface { + // Returns the list of all contexts in the specified session. + ListContexts(context.Context, *ListContextsRequest) (*ListContextsResponse, error) + // Retrieves the specified context. + GetContext(context.Context, *GetContextRequest) (*Context, error) + // Creates a context. + // + // If the specified context already exists, overrides the context. + CreateContext(context.Context, *CreateContextRequest) (*Context, error) + // Updates the specified context. + UpdateContext(context.Context, *UpdateContextRequest) (*Context, error) + // Deletes the specified context. + DeleteContext(context.Context, *DeleteContextRequest) (*empty.Empty, error) + // Deletes all active contexts in the specified session. + DeleteAllContexts(context.Context, *DeleteAllContextsRequest) (*empty.Empty, error) +} + +func RegisterContextsServer(s *grpc.Server, srv ContextsServer) { + s.RegisterService(&_Contexts_serviceDesc, srv) +} + +func _Contexts_ListContexts_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(ListContextsRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(ContextsServer).ListContexts(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.dialogflow.v2.Contexts/ListContexts", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(ContextsServer).ListContexts(ctx, req.(*ListContextsRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _Contexts_GetContext_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(GetContextRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(ContextsServer).GetContext(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.dialogflow.v2.Contexts/GetContext", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(ContextsServer).GetContext(ctx, req.(*GetContextRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _Contexts_CreateContext_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(CreateContextRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(ContextsServer).CreateContext(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.dialogflow.v2.Contexts/CreateContext", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(ContextsServer).CreateContext(ctx, req.(*CreateContextRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _Contexts_UpdateContext_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(UpdateContextRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(ContextsServer).UpdateContext(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.dialogflow.v2.Contexts/UpdateContext", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(ContextsServer).UpdateContext(ctx, req.(*UpdateContextRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _Contexts_DeleteContext_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(DeleteContextRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(ContextsServer).DeleteContext(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.dialogflow.v2.Contexts/DeleteContext", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(ContextsServer).DeleteContext(ctx, req.(*DeleteContextRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _Contexts_DeleteAllContexts_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(DeleteAllContextsRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(ContextsServer).DeleteAllContexts(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.dialogflow.v2.Contexts/DeleteAllContexts", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(ContextsServer).DeleteAllContexts(ctx, req.(*DeleteAllContextsRequest)) + } + return interceptor(ctx, in, info, handler) +} + +var _Contexts_serviceDesc = grpc.ServiceDesc{ + ServiceName: "google.cloud.dialogflow.v2.Contexts", + HandlerType: (*ContextsServer)(nil), + Methods: []grpc.MethodDesc{ + { + MethodName: "ListContexts", + Handler: _Contexts_ListContexts_Handler, + }, + { + MethodName: "GetContext", + Handler: _Contexts_GetContext_Handler, + }, + { + MethodName: "CreateContext", + Handler: _Contexts_CreateContext_Handler, + }, + { + MethodName: "UpdateContext", + Handler: _Contexts_UpdateContext_Handler, + }, + { + MethodName: "DeleteContext", + Handler: _Contexts_DeleteContext_Handler, + }, + { + MethodName: "DeleteAllContexts", + Handler: _Contexts_DeleteAllContexts_Handler, + }, + }, + Streams: []grpc.StreamDesc{}, + Metadata: "google/cloud/dialogflow/v2/context.proto", +} + +func init() { + proto.RegisterFile("google/cloud/dialogflow/v2/context.proto", fileDescriptor_context_d5871c2b8dccc66a) +} + +var fileDescriptor_context_d5871c2b8dccc66a = []byte{ + // 728 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x9c, 0x96, 0x4f, 0x6f, 0xd3, 0x3e, + 0x18, 0xc7, 0xe5, 0xee, 0xf7, 0xdb, 0x1f, 0x6f, 0x05, 0xcd, 0x54, 0xa3, 0x64, 0x03, 0x55, 0x41, + 0x40, 0x55, 0x89, 0x18, 0x32, 0x24, 0x34, 0xc6, 0x84, 0x58, 0xcb, 0x26, 0x21, 0x90, 0xa6, 0x0e, + 0x38, 0x70, 0xa9, 0xbc, 0xf6, 0x69, 0x14, 0x96, 0xc6, 0x21, 0x76, 0xc7, 0x18, 0x9a, 0x90, 0x78, + 0x09, 0xc0, 0x6d, 0x9c, 0x38, 0x4e, 0x9c, 0x78, 0x2b, 0xbc, 0x05, 0xee, 0x5c, 0x39, 0xa2, 0x38, + 0x4e, 0xff, 0xaf, 0xa4, 0xdc, 0x1a, 0xfb, 0x6b, 0x7f, 0x3f, 0x7e, 0xfc, 0x7d, 0x92, 0xe2, 0xa2, + 0xc3, 0xb9, 0xe3, 0x01, 0xad, 0x7b, 0xbc, 0xdd, 0xa0, 0x0d, 0x97, 0x79, 0xdc, 0x69, 0x7a, 0xfc, + 0x0d, 0x3d, 0xb0, 0x69, 0x9d, 0xfb, 0x12, 0x0e, 0xa5, 0x15, 0x84, 0x5c, 0x72, 0x62, 0xc4, 0x4a, + 0x4b, 0x29, 0xad, 0xae, 0xd2, 0x3a, 0xb0, 0x8d, 0x15, 0xbd, 0x0b, 0x0b, 0x5c, 0xca, 0x7c, 0x9f, + 0x4b, 0x26, 0x5d, 0xee, 0x8b, 0x78, 0xa5, 0x71, 0xa9, 0x67, 0x36, 0x04, 0xc1, 0xdb, 0x61, 0x1d, + 0xf4, 0xd4, 0xb2, 0x9e, 0x52, 0x4f, 0x7b, 0xed, 0x26, 0x85, 0x56, 0x20, 0xdf, 0xea, 0xc9, 0xc2, + 0xe0, 0x64, 0xd3, 0x05, 0xaf, 0x51, 0x6b, 0x31, 0xb1, 0xaf, 0x15, 0x2b, 0x83, 0x0a, 0x21, 0xc3, + 0x76, 0x5d, 0x13, 0x9b, 0xc7, 0x78, 0xa6, 0x1c, 0x1f, 0x81, 0x10, 0xfc, 0x9f, 0xcf, 0x5a, 0x90, + 0x47, 0x05, 0x54, 0x9c, 0xab, 0xaa, 0xdf, 0xe4, 0x1a, 0x3e, 0xe7, 0xb9, 0x4d, 0x10, 0x01, 0xf3, + 0x6b, 0x75, 0xde, 0xf6, 0x65, 0x3e, 0x53, 0x40, 0xc5, 0xff, 0xab, 0xd9, 0x64, 0xb4, 0x1c, 0x0d, + 0x92, 0xbb, 0x18, 0x07, 0x2c, 0x64, 0x2d, 0x90, 0x10, 0x8a, 0xfc, 0x54, 0x01, 0x15, 0xe7, 0xed, + 0x8b, 0x96, 0x2e, 0x46, 0x62, 0x6c, 0xed, 0x2a, 0xe3, 0x6a, 0x8f, 0xd4, 0x74, 0xf1, 0x85, 0x27, + 0xae, 0x90, 0x1a, 0x41, 0x54, 0xe1, 0x75, 0x1b, 0x84, 0x24, 0x4b, 0x78, 0x3a, 0x60, 0x21, 0xf8, + 0x52, 0xc3, 0xe8, 0x27, 0xb2, 0x8c, 0xe7, 0x02, 0xe6, 0x40, 0x4d, 0xb8, 0x47, 0xa0, 0x49, 0x66, + 0xa3, 0x81, 0x5d, 0xf7, 0x08, 0xc8, 0xe5, 0x08, 0xc2, 0x81, 0x9a, 0xe4, 0xfb, 0xe0, 0x2b, 0x88, + 0xb9, 0xaa, 0x92, 0x3f, 0x8b, 0x06, 0xcc, 0xf7, 0x38, 0xd7, 0x6f, 0x25, 0x02, 0xee, 0x0b, 0x20, + 0x0f, 0xf0, 0xac, 0xbe, 0x44, 0x91, 0x47, 0x85, 0xa9, 0xe2, 0xbc, 0x7d, 0xd5, 0x3a, 0xfb, 0x1a, + 0x2d, 0xbd, 0xbe, 0xda, 0x59, 0x44, 0xae, 0xe3, 0xf3, 0x3e, 0x1c, 0xca, 0x5a, 0x8f, 0x79, 0x46, + 0x99, 0x67, 0xa3, 0xe1, 0x9d, 0x0e, 0xc0, 0x0d, 0xbc, 0xb8, 0x0d, 0x89, 0x7f, 0x72, 0xd2, 0x11, + 0x45, 0x37, 0x5b, 0x38, 0x57, 0x0e, 0x81, 0x49, 0x18, 0xd0, 0x9e, 0x55, 0x95, 0x0d, 0x3c, 0xa3, + 0x61, 0x94, 0x71, 0xca, 0x03, 0x24, 0x6b, 0xcc, 0x8f, 0x08, 0xe7, 0x9e, 0x07, 0x8d, 0x61, 0xbf, + 0x9e, 0x7d, 0xd1, 0xe4, 0xfb, 0x92, 0x75, 0x3c, 0xdf, 0x56, 0xdb, 0xaa, 0x34, 0x6a, 0x34, 0x63, + 0x28, 0x15, 0x5b, 0x51, 0x60, 0x9f, 0x32, 0xb1, 0x5f, 0xc5, 0xb1, 0x3c, 0xfa, 0x6d, 0x96, 0x70, + 0xae, 0x02, 0x1e, 0x0c, 0x31, 0x8d, 0xaa, 0x97, 0x8d, 0xf3, 0xb1, 0xf6, 0xa1, 0xe7, 0xa5, 0x4c, + 0x92, 0xfd, 0x6b, 0x06, 0xcf, 0x26, 0x5a, 0xf2, 0x0d, 0xe1, 0x85, 0xde, 0x6c, 0x10, 0x3a, 0xee, + 0xa0, 0x23, 0x02, 0x6b, 0xdc, 0x4a, 0xbf, 0x20, 0x8e, 0x9d, 0xb9, 0xf6, 0xe1, 0xc7, 0xcf, 0x4f, + 0x99, 0x55, 0x72, 0x3b, 0x7a, 0x8b, 0xbc, 0x8b, 0xa9, 0x36, 0x82, 0x90, 0xbf, 0x82, 0xba, 0x14, + 0xb4, 0x44, 0x99, 0x03, 0xbe, 0xa4, 0x02, 0x84, 0x88, 0xde, 0x12, 0xb4, 0x74, 0x4c, 0x3b, 0x81, + 0x3b, 0x41, 0x18, 0x77, 0x93, 0x44, 0x6e, 0x8e, 0xf3, 0x1e, 0x4a, 0x9c, 0x91, 0xe6, 0x12, 0x07, + 0xe8, 0xa2, 0x2a, 0x8f, 0x63, 0xeb, 0xa0, 0xd1, 0xd2, 0x31, 0x39, 0x45, 0x38, 0xdb, 0x17, 0x5f, + 0x32, 0xb6, 0x38, 0xa3, 0x92, 0x9e, 0x8e, 0x71, 0x53, 0x31, 0xde, 0x37, 0x27, 0xaf, 0xe0, 0xbd, + 0x4e, 0x46, 0xbf, 0x23, 0x9c, 0xed, 0xcb, 0xfe, 0x78, 0xd8, 0x51, 0x6d, 0x92, 0x0e, 0xf6, 0xb1, + 0x82, 0xad, 0xd8, 0x6b, 0x0a, 0x36, 0xf9, 0x6a, 0x4c, 0x52, 0xd8, 0x2e, 0xf4, 0x67, 0x84, 0xb3, + 0x7d, 0xcd, 0x31, 0x1e, 0x7a, 0x54, 0x1f, 0x19, 0x4b, 0x43, 0x7d, 0xf8, 0x28, 0xfa, 0xaa, 0x24, + 0x17, 0x5f, 0xfa, 0x87, 0x8b, 0xff, 0x82, 0xf0, 0xe2, 0x50, 0x1f, 0x92, 0x3b, 0x7f, 0x47, 0x1b, + 0x6e, 0xdb, 0x94, 0x78, 0x93, 0xdc, 0xf9, 0xe6, 0x09, 0xc2, 0x57, 0xea, 0xbc, 0x35, 0x06, 0x67, + 0x73, 0x41, 0x63, 0xec, 0x44, 0xa6, 0x3b, 0xe8, 0x65, 0x45, 0x6b, 0x1d, 0xee, 0x31, 0xdf, 0xb1, + 0x78, 0xe8, 0x50, 0x07, 0x7c, 0x85, 0x44, 0xe3, 0x29, 0x16, 0xb8, 0x62, 0xd4, 0xff, 0x82, 0xf5, + 0xee, 0xd3, 0x6f, 0x84, 0xbe, 0x66, 0x32, 0x95, 0xad, 0xd3, 0x8c, 0xb1, 0x1d, 0x6f, 0x57, 0x56, + 0xd6, 0x95, 0xae, 0xf5, 0x0b, 0x7b, 0x6f, 0x5a, 0xed, 0xba, 0xfa, 0x27, 0x00, 0x00, 0xff, 0xff, + 0x82, 0x9d, 0xa3, 0xd9, 0x6c, 0x08, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/cloud/dialogflow/v2/entity_type.pb.go b/vendor/google.golang.org/genproto/googleapis/cloud/dialogflow/v2/entity_type.pb.go new file mode 100644 index 0000000..24ccdb7 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/cloud/dialogflow/v2/entity_type.pb.go @@ -0,0 +1,1624 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/cloud/dialogflow/v2/entity_type.proto + +package dialogflow // import "google.golang.org/genproto/googleapis/cloud/dialogflow/v2" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import empty "github.com/golang/protobuf/ptypes/empty" +import _ "github.com/golang/protobuf/ptypes/struct" +import _ "google.golang.org/genproto/googleapis/api/annotations" +import longrunning "google.golang.org/genproto/googleapis/longrunning" +import field_mask "google.golang.org/genproto/protobuf/field_mask" + +import ( + context "golang.org/x/net/context" + grpc "google.golang.org/grpc" +) + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Represents kinds of entities. +type EntityType_Kind int32 + +const ( + // Not specified. This value should be never used. + EntityType_KIND_UNSPECIFIED EntityType_Kind = 0 + // Map entity types allow mapping of a group of synonyms to a canonical + // value. + EntityType_KIND_MAP EntityType_Kind = 1 + // List entity types contain a set of entries that do not map to canonical + // values. However, list entity types can contain references to other entity + // types (with or without aliases). + EntityType_KIND_LIST EntityType_Kind = 2 +) + +var EntityType_Kind_name = map[int32]string{ + 0: "KIND_UNSPECIFIED", + 1: "KIND_MAP", + 2: "KIND_LIST", +} +var EntityType_Kind_value = map[string]int32{ + "KIND_UNSPECIFIED": 0, + "KIND_MAP": 1, + "KIND_LIST": 2, +} + +func (x EntityType_Kind) String() string { + return proto.EnumName(EntityType_Kind_name, int32(x)) +} +func (EntityType_Kind) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_entity_type_d3df53e4da2ed9bd, []int{0, 0} +} + +// Represents different entity type expansion modes. Automated expansion +// allows an agent to recognize values that have not been explicitly listed in +// the entity (for example, new kinds of shopping list items). +type EntityType_AutoExpansionMode int32 + +const ( + // Auto expansion disabled for the entity. + EntityType_AUTO_EXPANSION_MODE_UNSPECIFIED EntityType_AutoExpansionMode = 0 + // Allows an agent to recognize values that have not been explicitly + // listed in the entity. + EntityType_AUTO_EXPANSION_MODE_DEFAULT EntityType_AutoExpansionMode = 1 +) + +var EntityType_AutoExpansionMode_name = map[int32]string{ + 0: "AUTO_EXPANSION_MODE_UNSPECIFIED", + 1: "AUTO_EXPANSION_MODE_DEFAULT", +} +var EntityType_AutoExpansionMode_value = map[string]int32{ + "AUTO_EXPANSION_MODE_UNSPECIFIED": 0, + "AUTO_EXPANSION_MODE_DEFAULT": 1, +} + +func (x EntityType_AutoExpansionMode) String() string { + return proto.EnumName(EntityType_AutoExpansionMode_name, int32(x)) +} +func (EntityType_AutoExpansionMode) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_entity_type_d3df53e4da2ed9bd, []int{0, 1} +} + +// Represents an entity type. +// Entity types serve as a tool for extracting parameter values from natural +// language queries. +type EntityType struct { + // The unique identifier of the entity type. + // Required for [EntityTypes.UpdateEntityType][google.cloud.dialogflow.v2.EntityTypes.UpdateEntityType] and + // [EntityTypes.BatchUpdateEntityTypes][google.cloud.dialogflow.v2.EntityTypes.BatchUpdateEntityTypes] methods. + // Format: `projects//agent/entityTypes/`. + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // Required. The name of the entity type. + DisplayName string `protobuf:"bytes,2,opt,name=display_name,json=displayName,proto3" json:"display_name,omitempty"` + // Required. Indicates the kind of entity type. + Kind EntityType_Kind `protobuf:"varint,3,opt,name=kind,proto3,enum=google.cloud.dialogflow.v2.EntityType_Kind" json:"kind,omitempty"` + // Optional. Indicates whether the entity type can be automatically + // expanded. + AutoExpansionMode EntityType_AutoExpansionMode `protobuf:"varint,4,opt,name=auto_expansion_mode,json=autoExpansionMode,proto3,enum=google.cloud.dialogflow.v2.EntityType_AutoExpansionMode" json:"auto_expansion_mode,omitempty"` + // Optional. The collection of entity entries associated with the entity type. + Entities []*EntityType_Entity `protobuf:"bytes,6,rep,name=entities,proto3" json:"entities,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *EntityType) Reset() { *m = EntityType{} } +func (m *EntityType) String() string { return proto.CompactTextString(m) } +func (*EntityType) ProtoMessage() {} +func (*EntityType) Descriptor() ([]byte, []int) { + return fileDescriptor_entity_type_d3df53e4da2ed9bd, []int{0} +} +func (m *EntityType) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_EntityType.Unmarshal(m, b) +} +func (m *EntityType) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_EntityType.Marshal(b, m, deterministic) +} +func (dst *EntityType) XXX_Merge(src proto.Message) { + xxx_messageInfo_EntityType.Merge(dst, src) +} +func (m *EntityType) XXX_Size() int { + return xxx_messageInfo_EntityType.Size(m) +} +func (m *EntityType) XXX_DiscardUnknown() { + xxx_messageInfo_EntityType.DiscardUnknown(m) +} + +var xxx_messageInfo_EntityType proto.InternalMessageInfo + +func (m *EntityType) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *EntityType) GetDisplayName() string { + if m != nil { + return m.DisplayName + } + return "" +} + +func (m *EntityType) GetKind() EntityType_Kind { + if m != nil { + return m.Kind + } + return EntityType_KIND_UNSPECIFIED +} + +func (m *EntityType) GetAutoExpansionMode() EntityType_AutoExpansionMode { + if m != nil { + return m.AutoExpansionMode + } + return EntityType_AUTO_EXPANSION_MODE_UNSPECIFIED +} + +func (m *EntityType) GetEntities() []*EntityType_Entity { + if m != nil { + return m.Entities + } + return nil +} + +// An **entity entry** for an associated entity type. +type EntityType_Entity struct { + // Required. The primary value associated with this entity entry. + // For example, if the entity type is *vegetable*, the value could be + // *scallions*. + // + // For `KIND_MAP` entity types: + // + // * A canonical value to be used in place of synonyms. + // + // For `KIND_LIST` entity types: + // + // * A string that can contain references to other entity types (with or + // without aliases). + Value string `protobuf:"bytes,1,opt,name=value,proto3" json:"value,omitempty"` + // Required. A collection of value synonyms. For example, if the entity type + // is *vegetable*, and `value` is *scallions*, a synonym could be *green + // onions*. + // + // For `KIND_LIST` entity types: + // + // * This collection must contain exactly one synonym equal to `value`. + Synonyms []string `protobuf:"bytes,2,rep,name=synonyms,proto3" json:"synonyms,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *EntityType_Entity) Reset() { *m = EntityType_Entity{} } +func (m *EntityType_Entity) String() string { return proto.CompactTextString(m) } +func (*EntityType_Entity) ProtoMessage() {} +func (*EntityType_Entity) Descriptor() ([]byte, []int) { + return fileDescriptor_entity_type_d3df53e4da2ed9bd, []int{0, 0} +} +func (m *EntityType_Entity) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_EntityType_Entity.Unmarshal(m, b) +} +func (m *EntityType_Entity) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_EntityType_Entity.Marshal(b, m, deterministic) +} +func (dst *EntityType_Entity) XXX_Merge(src proto.Message) { + xxx_messageInfo_EntityType_Entity.Merge(dst, src) +} +func (m *EntityType_Entity) XXX_Size() int { + return xxx_messageInfo_EntityType_Entity.Size(m) +} +func (m *EntityType_Entity) XXX_DiscardUnknown() { + xxx_messageInfo_EntityType_Entity.DiscardUnknown(m) +} + +var xxx_messageInfo_EntityType_Entity proto.InternalMessageInfo + +func (m *EntityType_Entity) GetValue() string { + if m != nil { + return m.Value + } + return "" +} + +func (m *EntityType_Entity) GetSynonyms() []string { + if m != nil { + return m.Synonyms + } + return nil +} + +// The request message for [EntityTypes.ListEntityTypes][google.cloud.dialogflow.v2.EntityTypes.ListEntityTypes]. +type ListEntityTypesRequest struct { + // Required. The agent to list all entity types from. + // Format: `projects//agent`. + Parent string `protobuf:"bytes,1,opt,name=parent,proto3" json:"parent,omitempty"` + // Optional. The language to list entity synonyms for. If not specified, + // the agent's default language is used. + // [Many + // languages](https://cloud.google.com/dialogflow-enterprise/docs/reference/language) + // are supported. Note: languages must be enabled in the agent before they can + // be used. + LanguageCode string `protobuf:"bytes,2,opt,name=language_code,json=languageCode,proto3" json:"language_code,omitempty"` + // Optional. The maximum number of items to return in a single page. By + // default 100 and at most 1000. + PageSize int32 `protobuf:"varint,3,opt,name=page_size,json=pageSize,proto3" json:"page_size,omitempty"` + // Optional. The next_page_token value returned from a previous list request. + PageToken string `protobuf:"bytes,4,opt,name=page_token,json=pageToken,proto3" json:"page_token,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ListEntityTypesRequest) Reset() { *m = ListEntityTypesRequest{} } +func (m *ListEntityTypesRequest) String() string { return proto.CompactTextString(m) } +func (*ListEntityTypesRequest) ProtoMessage() {} +func (*ListEntityTypesRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_entity_type_d3df53e4da2ed9bd, []int{1} +} +func (m *ListEntityTypesRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ListEntityTypesRequest.Unmarshal(m, b) +} +func (m *ListEntityTypesRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ListEntityTypesRequest.Marshal(b, m, deterministic) +} +func (dst *ListEntityTypesRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_ListEntityTypesRequest.Merge(dst, src) +} +func (m *ListEntityTypesRequest) XXX_Size() int { + return xxx_messageInfo_ListEntityTypesRequest.Size(m) +} +func (m *ListEntityTypesRequest) XXX_DiscardUnknown() { + xxx_messageInfo_ListEntityTypesRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_ListEntityTypesRequest proto.InternalMessageInfo + +func (m *ListEntityTypesRequest) GetParent() string { + if m != nil { + return m.Parent + } + return "" +} + +func (m *ListEntityTypesRequest) GetLanguageCode() string { + if m != nil { + return m.LanguageCode + } + return "" +} + +func (m *ListEntityTypesRequest) GetPageSize() int32 { + if m != nil { + return m.PageSize + } + return 0 +} + +func (m *ListEntityTypesRequest) GetPageToken() string { + if m != nil { + return m.PageToken + } + return "" +} + +// The response message for [EntityTypes.ListEntityTypes][google.cloud.dialogflow.v2.EntityTypes.ListEntityTypes]. +type ListEntityTypesResponse struct { + // The list of agent entity types. There will be a maximum number of items + // returned based on the page_size field in the request. + EntityTypes []*EntityType `protobuf:"bytes,1,rep,name=entity_types,json=entityTypes,proto3" json:"entity_types,omitempty"` + // Token to retrieve the next page of results, or empty if there are no + // more results in the list. + NextPageToken string `protobuf:"bytes,2,opt,name=next_page_token,json=nextPageToken,proto3" json:"next_page_token,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ListEntityTypesResponse) Reset() { *m = ListEntityTypesResponse{} } +func (m *ListEntityTypesResponse) String() string { return proto.CompactTextString(m) } +func (*ListEntityTypesResponse) ProtoMessage() {} +func (*ListEntityTypesResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_entity_type_d3df53e4da2ed9bd, []int{2} +} +func (m *ListEntityTypesResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ListEntityTypesResponse.Unmarshal(m, b) +} +func (m *ListEntityTypesResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ListEntityTypesResponse.Marshal(b, m, deterministic) +} +func (dst *ListEntityTypesResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_ListEntityTypesResponse.Merge(dst, src) +} +func (m *ListEntityTypesResponse) XXX_Size() int { + return xxx_messageInfo_ListEntityTypesResponse.Size(m) +} +func (m *ListEntityTypesResponse) XXX_DiscardUnknown() { + xxx_messageInfo_ListEntityTypesResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_ListEntityTypesResponse proto.InternalMessageInfo + +func (m *ListEntityTypesResponse) GetEntityTypes() []*EntityType { + if m != nil { + return m.EntityTypes + } + return nil +} + +func (m *ListEntityTypesResponse) GetNextPageToken() string { + if m != nil { + return m.NextPageToken + } + return "" +} + +// The request message for [EntityTypes.GetEntityType][google.cloud.dialogflow.v2.EntityTypes.GetEntityType]. +type GetEntityTypeRequest struct { + // Required. The name of the entity type. + // Format: `projects//agent/entityTypes/`. + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // Optional. The language to retrieve entity synonyms for. If not specified, + // the agent's default language is used. + // [Many + // languages](https://cloud.google.com/dialogflow-enterprise/docs/reference/language) + // are supported. Note: languages must be enabled in the agent before they can + // be used. + LanguageCode string `protobuf:"bytes,2,opt,name=language_code,json=languageCode,proto3" json:"language_code,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GetEntityTypeRequest) Reset() { *m = GetEntityTypeRequest{} } +func (m *GetEntityTypeRequest) String() string { return proto.CompactTextString(m) } +func (*GetEntityTypeRequest) ProtoMessage() {} +func (*GetEntityTypeRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_entity_type_d3df53e4da2ed9bd, []int{3} +} +func (m *GetEntityTypeRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GetEntityTypeRequest.Unmarshal(m, b) +} +func (m *GetEntityTypeRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GetEntityTypeRequest.Marshal(b, m, deterministic) +} +func (dst *GetEntityTypeRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_GetEntityTypeRequest.Merge(dst, src) +} +func (m *GetEntityTypeRequest) XXX_Size() int { + return xxx_messageInfo_GetEntityTypeRequest.Size(m) +} +func (m *GetEntityTypeRequest) XXX_DiscardUnknown() { + xxx_messageInfo_GetEntityTypeRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_GetEntityTypeRequest proto.InternalMessageInfo + +func (m *GetEntityTypeRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *GetEntityTypeRequest) GetLanguageCode() string { + if m != nil { + return m.LanguageCode + } + return "" +} + +// The request message for [EntityTypes.CreateEntityType][google.cloud.dialogflow.v2.EntityTypes.CreateEntityType]. +type CreateEntityTypeRequest struct { + // Required. The agent to create a entity type for. + // Format: `projects//agent`. + Parent string `protobuf:"bytes,1,opt,name=parent,proto3" json:"parent,omitempty"` + // Required. The entity type to create. + EntityType *EntityType `protobuf:"bytes,2,opt,name=entity_type,json=entityType,proto3" json:"entity_type,omitempty"` + // Optional. The language of entity synonyms defined in `entity_type`. If not + // specified, the agent's default language is used. + // [Many + // languages](https://cloud.google.com/dialogflow-enterprise/docs/reference/language) + // are supported. Note: languages must be enabled in the agent before they can + // be used. + LanguageCode string `protobuf:"bytes,3,opt,name=language_code,json=languageCode,proto3" json:"language_code,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *CreateEntityTypeRequest) Reset() { *m = CreateEntityTypeRequest{} } +func (m *CreateEntityTypeRequest) String() string { return proto.CompactTextString(m) } +func (*CreateEntityTypeRequest) ProtoMessage() {} +func (*CreateEntityTypeRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_entity_type_d3df53e4da2ed9bd, []int{4} +} +func (m *CreateEntityTypeRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_CreateEntityTypeRequest.Unmarshal(m, b) +} +func (m *CreateEntityTypeRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_CreateEntityTypeRequest.Marshal(b, m, deterministic) +} +func (dst *CreateEntityTypeRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_CreateEntityTypeRequest.Merge(dst, src) +} +func (m *CreateEntityTypeRequest) XXX_Size() int { + return xxx_messageInfo_CreateEntityTypeRequest.Size(m) +} +func (m *CreateEntityTypeRequest) XXX_DiscardUnknown() { + xxx_messageInfo_CreateEntityTypeRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_CreateEntityTypeRequest proto.InternalMessageInfo + +func (m *CreateEntityTypeRequest) GetParent() string { + if m != nil { + return m.Parent + } + return "" +} + +func (m *CreateEntityTypeRequest) GetEntityType() *EntityType { + if m != nil { + return m.EntityType + } + return nil +} + +func (m *CreateEntityTypeRequest) GetLanguageCode() string { + if m != nil { + return m.LanguageCode + } + return "" +} + +// The request message for [EntityTypes.UpdateEntityType][google.cloud.dialogflow.v2.EntityTypes.UpdateEntityType]. +type UpdateEntityTypeRequest struct { + // Required. The entity type to update. + EntityType *EntityType `protobuf:"bytes,1,opt,name=entity_type,json=entityType,proto3" json:"entity_type,omitempty"` + // Optional. The language of entity synonyms defined in `entity_type`. If not + // specified, the agent's default language is used. + // [Many + // languages](https://cloud.google.com/dialogflow-enterprise/docs/reference/language) + // are supported. Note: languages must be enabled in the agent before they can + // be used. + LanguageCode string `protobuf:"bytes,2,opt,name=language_code,json=languageCode,proto3" json:"language_code,omitempty"` + // Optional. The mask to control which fields get updated. + UpdateMask *field_mask.FieldMask `protobuf:"bytes,3,opt,name=update_mask,json=updateMask,proto3" json:"update_mask,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *UpdateEntityTypeRequest) Reset() { *m = UpdateEntityTypeRequest{} } +func (m *UpdateEntityTypeRequest) String() string { return proto.CompactTextString(m) } +func (*UpdateEntityTypeRequest) ProtoMessage() {} +func (*UpdateEntityTypeRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_entity_type_d3df53e4da2ed9bd, []int{5} +} +func (m *UpdateEntityTypeRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_UpdateEntityTypeRequest.Unmarshal(m, b) +} +func (m *UpdateEntityTypeRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_UpdateEntityTypeRequest.Marshal(b, m, deterministic) +} +func (dst *UpdateEntityTypeRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_UpdateEntityTypeRequest.Merge(dst, src) +} +func (m *UpdateEntityTypeRequest) XXX_Size() int { + return xxx_messageInfo_UpdateEntityTypeRequest.Size(m) +} +func (m *UpdateEntityTypeRequest) XXX_DiscardUnknown() { + xxx_messageInfo_UpdateEntityTypeRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_UpdateEntityTypeRequest proto.InternalMessageInfo + +func (m *UpdateEntityTypeRequest) GetEntityType() *EntityType { + if m != nil { + return m.EntityType + } + return nil +} + +func (m *UpdateEntityTypeRequest) GetLanguageCode() string { + if m != nil { + return m.LanguageCode + } + return "" +} + +func (m *UpdateEntityTypeRequest) GetUpdateMask() *field_mask.FieldMask { + if m != nil { + return m.UpdateMask + } + return nil +} + +// The request message for [EntityTypes.DeleteEntityType][google.cloud.dialogflow.v2.EntityTypes.DeleteEntityType]. +type DeleteEntityTypeRequest struct { + // Required. The name of the entity type to delete. + // Format: `projects//agent/entityTypes/`. + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *DeleteEntityTypeRequest) Reset() { *m = DeleteEntityTypeRequest{} } +func (m *DeleteEntityTypeRequest) String() string { return proto.CompactTextString(m) } +func (*DeleteEntityTypeRequest) ProtoMessage() {} +func (*DeleteEntityTypeRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_entity_type_d3df53e4da2ed9bd, []int{6} +} +func (m *DeleteEntityTypeRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_DeleteEntityTypeRequest.Unmarshal(m, b) +} +func (m *DeleteEntityTypeRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_DeleteEntityTypeRequest.Marshal(b, m, deterministic) +} +func (dst *DeleteEntityTypeRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_DeleteEntityTypeRequest.Merge(dst, src) +} +func (m *DeleteEntityTypeRequest) XXX_Size() int { + return xxx_messageInfo_DeleteEntityTypeRequest.Size(m) +} +func (m *DeleteEntityTypeRequest) XXX_DiscardUnknown() { + xxx_messageInfo_DeleteEntityTypeRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_DeleteEntityTypeRequest proto.InternalMessageInfo + +func (m *DeleteEntityTypeRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +// The request message for [EntityTypes.BatchUpdateEntityTypes][google.cloud.dialogflow.v2.EntityTypes.BatchUpdateEntityTypes]. +type BatchUpdateEntityTypesRequest struct { + // Required. The name of the agent to update or create entity types in. + // Format: `projects//agent`. + Parent string `protobuf:"bytes,1,opt,name=parent,proto3" json:"parent,omitempty"` + // Required. The source of the entity type batch. + // + // For each entity type in the batch: + // + // * If `name` is specified, we update an existing entity type. + // * If `name` is not specified, we create a new entity type. + // + // Types that are valid to be assigned to EntityTypeBatch: + // *BatchUpdateEntityTypesRequest_EntityTypeBatchUri + // *BatchUpdateEntityTypesRequest_EntityTypeBatchInline + EntityTypeBatch isBatchUpdateEntityTypesRequest_EntityTypeBatch `protobuf_oneof:"entity_type_batch"` + // Optional. The language of entity synonyms defined in `entity_types`. If not + // specified, the agent's default language is used. + // [Many + // languages](https://cloud.google.com/dialogflow-enterprise/docs/reference/language) + // are supported. Note: languages must be enabled in the agent before they can + // be used. + LanguageCode string `protobuf:"bytes,4,opt,name=language_code,json=languageCode,proto3" json:"language_code,omitempty"` + // Optional. The mask to control which fields get updated. + UpdateMask *field_mask.FieldMask `protobuf:"bytes,5,opt,name=update_mask,json=updateMask,proto3" json:"update_mask,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *BatchUpdateEntityTypesRequest) Reset() { *m = BatchUpdateEntityTypesRequest{} } +func (m *BatchUpdateEntityTypesRequest) String() string { return proto.CompactTextString(m) } +func (*BatchUpdateEntityTypesRequest) ProtoMessage() {} +func (*BatchUpdateEntityTypesRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_entity_type_d3df53e4da2ed9bd, []int{7} +} +func (m *BatchUpdateEntityTypesRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_BatchUpdateEntityTypesRequest.Unmarshal(m, b) +} +func (m *BatchUpdateEntityTypesRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_BatchUpdateEntityTypesRequest.Marshal(b, m, deterministic) +} +func (dst *BatchUpdateEntityTypesRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_BatchUpdateEntityTypesRequest.Merge(dst, src) +} +func (m *BatchUpdateEntityTypesRequest) XXX_Size() int { + return xxx_messageInfo_BatchUpdateEntityTypesRequest.Size(m) +} +func (m *BatchUpdateEntityTypesRequest) XXX_DiscardUnknown() { + xxx_messageInfo_BatchUpdateEntityTypesRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_BatchUpdateEntityTypesRequest proto.InternalMessageInfo + +func (m *BatchUpdateEntityTypesRequest) GetParent() string { + if m != nil { + return m.Parent + } + return "" +} + +type isBatchUpdateEntityTypesRequest_EntityTypeBatch interface { + isBatchUpdateEntityTypesRequest_EntityTypeBatch() +} + +type BatchUpdateEntityTypesRequest_EntityTypeBatchUri struct { + EntityTypeBatchUri string `protobuf:"bytes,2,opt,name=entity_type_batch_uri,json=entityTypeBatchUri,proto3,oneof"` +} + +type BatchUpdateEntityTypesRequest_EntityTypeBatchInline struct { + EntityTypeBatchInline *EntityTypeBatch `protobuf:"bytes,3,opt,name=entity_type_batch_inline,json=entityTypeBatchInline,proto3,oneof"` +} + +func (*BatchUpdateEntityTypesRequest_EntityTypeBatchUri) isBatchUpdateEntityTypesRequest_EntityTypeBatch() { +} + +func (*BatchUpdateEntityTypesRequest_EntityTypeBatchInline) isBatchUpdateEntityTypesRequest_EntityTypeBatch() { +} + +func (m *BatchUpdateEntityTypesRequest) GetEntityTypeBatch() isBatchUpdateEntityTypesRequest_EntityTypeBatch { + if m != nil { + return m.EntityTypeBatch + } + return nil +} + +func (m *BatchUpdateEntityTypesRequest) GetEntityTypeBatchUri() string { + if x, ok := m.GetEntityTypeBatch().(*BatchUpdateEntityTypesRequest_EntityTypeBatchUri); ok { + return x.EntityTypeBatchUri + } + return "" +} + +func (m *BatchUpdateEntityTypesRequest) GetEntityTypeBatchInline() *EntityTypeBatch { + if x, ok := m.GetEntityTypeBatch().(*BatchUpdateEntityTypesRequest_EntityTypeBatchInline); ok { + return x.EntityTypeBatchInline + } + return nil +} + +func (m *BatchUpdateEntityTypesRequest) GetLanguageCode() string { + if m != nil { + return m.LanguageCode + } + return "" +} + +func (m *BatchUpdateEntityTypesRequest) GetUpdateMask() *field_mask.FieldMask { + if m != nil { + return m.UpdateMask + } + return nil +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*BatchUpdateEntityTypesRequest) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _BatchUpdateEntityTypesRequest_OneofMarshaler, _BatchUpdateEntityTypesRequest_OneofUnmarshaler, _BatchUpdateEntityTypesRequest_OneofSizer, []interface{}{ + (*BatchUpdateEntityTypesRequest_EntityTypeBatchUri)(nil), + (*BatchUpdateEntityTypesRequest_EntityTypeBatchInline)(nil), + } +} + +func _BatchUpdateEntityTypesRequest_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*BatchUpdateEntityTypesRequest) + // entity_type_batch + switch x := m.EntityTypeBatch.(type) { + case *BatchUpdateEntityTypesRequest_EntityTypeBatchUri: + b.EncodeVarint(2<<3 | proto.WireBytes) + b.EncodeStringBytes(x.EntityTypeBatchUri) + case *BatchUpdateEntityTypesRequest_EntityTypeBatchInline: + b.EncodeVarint(3<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.EntityTypeBatchInline); err != nil { + return err + } + case nil: + default: + return fmt.Errorf("BatchUpdateEntityTypesRequest.EntityTypeBatch has unexpected type %T", x) + } + return nil +} + +func _BatchUpdateEntityTypesRequest_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*BatchUpdateEntityTypesRequest) + switch tag { + case 2: // entity_type_batch.entity_type_batch_uri + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeStringBytes() + m.EntityTypeBatch = &BatchUpdateEntityTypesRequest_EntityTypeBatchUri{x} + return true, err + case 3: // entity_type_batch.entity_type_batch_inline + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(EntityTypeBatch) + err := b.DecodeMessage(msg) + m.EntityTypeBatch = &BatchUpdateEntityTypesRequest_EntityTypeBatchInline{msg} + return true, err + default: + return false, nil + } +} + +func _BatchUpdateEntityTypesRequest_OneofSizer(msg proto.Message) (n int) { + m := msg.(*BatchUpdateEntityTypesRequest) + // entity_type_batch + switch x := m.EntityTypeBatch.(type) { + case *BatchUpdateEntityTypesRequest_EntityTypeBatchUri: + n += 1 // tag and wire + n += proto.SizeVarint(uint64(len(x.EntityTypeBatchUri))) + n += len(x.EntityTypeBatchUri) + case *BatchUpdateEntityTypesRequest_EntityTypeBatchInline: + s := proto.Size(x.EntityTypeBatchInline) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +// The response message for [EntityTypes.BatchUpdateEntityTypes][google.cloud.dialogflow.v2.EntityTypes.BatchUpdateEntityTypes]. +type BatchUpdateEntityTypesResponse struct { + // The collection of updated or created entity types. + EntityTypes []*EntityType `protobuf:"bytes,1,rep,name=entity_types,json=entityTypes,proto3" json:"entity_types,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *BatchUpdateEntityTypesResponse) Reset() { *m = BatchUpdateEntityTypesResponse{} } +func (m *BatchUpdateEntityTypesResponse) String() string { return proto.CompactTextString(m) } +func (*BatchUpdateEntityTypesResponse) ProtoMessage() {} +func (*BatchUpdateEntityTypesResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_entity_type_d3df53e4da2ed9bd, []int{8} +} +func (m *BatchUpdateEntityTypesResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_BatchUpdateEntityTypesResponse.Unmarshal(m, b) +} +func (m *BatchUpdateEntityTypesResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_BatchUpdateEntityTypesResponse.Marshal(b, m, deterministic) +} +func (dst *BatchUpdateEntityTypesResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_BatchUpdateEntityTypesResponse.Merge(dst, src) +} +func (m *BatchUpdateEntityTypesResponse) XXX_Size() int { + return xxx_messageInfo_BatchUpdateEntityTypesResponse.Size(m) +} +func (m *BatchUpdateEntityTypesResponse) XXX_DiscardUnknown() { + xxx_messageInfo_BatchUpdateEntityTypesResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_BatchUpdateEntityTypesResponse proto.InternalMessageInfo + +func (m *BatchUpdateEntityTypesResponse) GetEntityTypes() []*EntityType { + if m != nil { + return m.EntityTypes + } + return nil +} + +// The request message for [EntityTypes.BatchDeleteEntityTypes][google.cloud.dialogflow.v2.EntityTypes.BatchDeleteEntityTypes]. +type BatchDeleteEntityTypesRequest struct { + // Required. The name of the agent to delete all entities types for. Format: + // `projects//agent`. + Parent string `protobuf:"bytes,1,opt,name=parent,proto3" json:"parent,omitempty"` + // Required. The names entity types to delete. All names must point to the + // same agent as `parent`. + EntityTypeNames []string `protobuf:"bytes,2,rep,name=entity_type_names,json=entityTypeNames,proto3" json:"entity_type_names,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *BatchDeleteEntityTypesRequest) Reset() { *m = BatchDeleteEntityTypesRequest{} } +func (m *BatchDeleteEntityTypesRequest) String() string { return proto.CompactTextString(m) } +func (*BatchDeleteEntityTypesRequest) ProtoMessage() {} +func (*BatchDeleteEntityTypesRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_entity_type_d3df53e4da2ed9bd, []int{9} +} +func (m *BatchDeleteEntityTypesRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_BatchDeleteEntityTypesRequest.Unmarshal(m, b) +} +func (m *BatchDeleteEntityTypesRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_BatchDeleteEntityTypesRequest.Marshal(b, m, deterministic) +} +func (dst *BatchDeleteEntityTypesRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_BatchDeleteEntityTypesRequest.Merge(dst, src) +} +func (m *BatchDeleteEntityTypesRequest) XXX_Size() int { + return xxx_messageInfo_BatchDeleteEntityTypesRequest.Size(m) +} +func (m *BatchDeleteEntityTypesRequest) XXX_DiscardUnknown() { + xxx_messageInfo_BatchDeleteEntityTypesRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_BatchDeleteEntityTypesRequest proto.InternalMessageInfo + +func (m *BatchDeleteEntityTypesRequest) GetParent() string { + if m != nil { + return m.Parent + } + return "" +} + +func (m *BatchDeleteEntityTypesRequest) GetEntityTypeNames() []string { + if m != nil { + return m.EntityTypeNames + } + return nil +} + +// The request message for [EntityTypes.BatchCreateEntities][google.cloud.dialogflow.v2.EntityTypes.BatchCreateEntities]. +type BatchCreateEntitiesRequest struct { + // Required. The name of the entity type to create entities in. Format: + // `projects//agent/entityTypes/`. + Parent string `protobuf:"bytes,1,opt,name=parent,proto3" json:"parent,omitempty"` + // Required. The entities to create. + Entities []*EntityType_Entity `protobuf:"bytes,2,rep,name=entities,proto3" json:"entities,omitempty"` + // Optional. The language of entity synonyms defined in `entities`. If not + // specified, the agent's default language is used. + // [Many + // languages](https://cloud.google.com/dialogflow-enterprise/docs/reference/language) + // are supported. Note: languages must be enabled in the agent before they can + // be used. + LanguageCode string `protobuf:"bytes,3,opt,name=language_code,json=languageCode,proto3" json:"language_code,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *BatchCreateEntitiesRequest) Reset() { *m = BatchCreateEntitiesRequest{} } +func (m *BatchCreateEntitiesRequest) String() string { return proto.CompactTextString(m) } +func (*BatchCreateEntitiesRequest) ProtoMessage() {} +func (*BatchCreateEntitiesRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_entity_type_d3df53e4da2ed9bd, []int{10} +} +func (m *BatchCreateEntitiesRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_BatchCreateEntitiesRequest.Unmarshal(m, b) +} +func (m *BatchCreateEntitiesRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_BatchCreateEntitiesRequest.Marshal(b, m, deterministic) +} +func (dst *BatchCreateEntitiesRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_BatchCreateEntitiesRequest.Merge(dst, src) +} +func (m *BatchCreateEntitiesRequest) XXX_Size() int { + return xxx_messageInfo_BatchCreateEntitiesRequest.Size(m) +} +func (m *BatchCreateEntitiesRequest) XXX_DiscardUnknown() { + xxx_messageInfo_BatchCreateEntitiesRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_BatchCreateEntitiesRequest proto.InternalMessageInfo + +func (m *BatchCreateEntitiesRequest) GetParent() string { + if m != nil { + return m.Parent + } + return "" +} + +func (m *BatchCreateEntitiesRequest) GetEntities() []*EntityType_Entity { + if m != nil { + return m.Entities + } + return nil +} + +func (m *BatchCreateEntitiesRequest) GetLanguageCode() string { + if m != nil { + return m.LanguageCode + } + return "" +} + +// The request message for [EntityTypes.BatchUpdateEntities][google.cloud.dialogflow.v2.EntityTypes.BatchUpdateEntities]. +type BatchUpdateEntitiesRequest struct { + // Required. The name of the entity type to update or create entities in. + // Format: `projects//agent/entityTypes/`. + Parent string `protobuf:"bytes,1,opt,name=parent,proto3" json:"parent,omitempty"` + // Required. The entities to update or create. + Entities []*EntityType_Entity `protobuf:"bytes,2,rep,name=entities,proto3" json:"entities,omitempty"` + // Optional. The language of entity synonyms defined in `entities`. If not + // specified, the agent's default language is used. + // [Many + // languages](https://cloud.google.com/dialogflow-enterprise/docs/reference/language) + // are supported. Note: languages must be enabled in the agent before they can + // be used. + LanguageCode string `protobuf:"bytes,3,opt,name=language_code,json=languageCode,proto3" json:"language_code,omitempty"` + // Optional. The mask to control which fields get updated. + UpdateMask *field_mask.FieldMask `protobuf:"bytes,4,opt,name=update_mask,json=updateMask,proto3" json:"update_mask,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *BatchUpdateEntitiesRequest) Reset() { *m = BatchUpdateEntitiesRequest{} } +func (m *BatchUpdateEntitiesRequest) String() string { return proto.CompactTextString(m) } +func (*BatchUpdateEntitiesRequest) ProtoMessage() {} +func (*BatchUpdateEntitiesRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_entity_type_d3df53e4da2ed9bd, []int{11} +} +func (m *BatchUpdateEntitiesRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_BatchUpdateEntitiesRequest.Unmarshal(m, b) +} +func (m *BatchUpdateEntitiesRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_BatchUpdateEntitiesRequest.Marshal(b, m, deterministic) +} +func (dst *BatchUpdateEntitiesRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_BatchUpdateEntitiesRequest.Merge(dst, src) +} +func (m *BatchUpdateEntitiesRequest) XXX_Size() int { + return xxx_messageInfo_BatchUpdateEntitiesRequest.Size(m) +} +func (m *BatchUpdateEntitiesRequest) XXX_DiscardUnknown() { + xxx_messageInfo_BatchUpdateEntitiesRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_BatchUpdateEntitiesRequest proto.InternalMessageInfo + +func (m *BatchUpdateEntitiesRequest) GetParent() string { + if m != nil { + return m.Parent + } + return "" +} + +func (m *BatchUpdateEntitiesRequest) GetEntities() []*EntityType_Entity { + if m != nil { + return m.Entities + } + return nil +} + +func (m *BatchUpdateEntitiesRequest) GetLanguageCode() string { + if m != nil { + return m.LanguageCode + } + return "" +} + +func (m *BatchUpdateEntitiesRequest) GetUpdateMask() *field_mask.FieldMask { + if m != nil { + return m.UpdateMask + } + return nil +} + +// The request message for [EntityTypes.BatchDeleteEntities][google.cloud.dialogflow.v2.EntityTypes.BatchDeleteEntities]. +type BatchDeleteEntitiesRequest struct { + // Required. The name of the entity type to delete entries for. Format: + // `projects//agent/entityTypes/`. + Parent string `protobuf:"bytes,1,opt,name=parent,proto3" json:"parent,omitempty"` + // Required. The canonical `values` of the entities to delete. Note that + // these are not fully-qualified names, i.e. they don't start with + // `projects/`. + EntityValues []string `protobuf:"bytes,2,rep,name=entity_values,json=entityValues,proto3" json:"entity_values,omitempty"` + // Optional. The language of entity synonyms defined in `entities`. If not + // specified, the agent's default language is used. + // [Many + // languages](https://cloud.google.com/dialogflow-enterprise/docs/reference/language) + // are supported. Note: languages must be enabled in the agent before they can + // be used. + LanguageCode string `protobuf:"bytes,3,opt,name=language_code,json=languageCode,proto3" json:"language_code,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *BatchDeleteEntitiesRequest) Reset() { *m = BatchDeleteEntitiesRequest{} } +func (m *BatchDeleteEntitiesRequest) String() string { return proto.CompactTextString(m) } +func (*BatchDeleteEntitiesRequest) ProtoMessage() {} +func (*BatchDeleteEntitiesRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_entity_type_d3df53e4da2ed9bd, []int{12} +} +func (m *BatchDeleteEntitiesRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_BatchDeleteEntitiesRequest.Unmarshal(m, b) +} +func (m *BatchDeleteEntitiesRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_BatchDeleteEntitiesRequest.Marshal(b, m, deterministic) +} +func (dst *BatchDeleteEntitiesRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_BatchDeleteEntitiesRequest.Merge(dst, src) +} +func (m *BatchDeleteEntitiesRequest) XXX_Size() int { + return xxx_messageInfo_BatchDeleteEntitiesRequest.Size(m) +} +func (m *BatchDeleteEntitiesRequest) XXX_DiscardUnknown() { + xxx_messageInfo_BatchDeleteEntitiesRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_BatchDeleteEntitiesRequest proto.InternalMessageInfo + +func (m *BatchDeleteEntitiesRequest) GetParent() string { + if m != nil { + return m.Parent + } + return "" +} + +func (m *BatchDeleteEntitiesRequest) GetEntityValues() []string { + if m != nil { + return m.EntityValues + } + return nil +} + +func (m *BatchDeleteEntitiesRequest) GetLanguageCode() string { + if m != nil { + return m.LanguageCode + } + return "" +} + +// This message is a wrapper around a collection of entity types. +type EntityTypeBatch struct { + // A collection of entity types. + EntityTypes []*EntityType `protobuf:"bytes,1,rep,name=entity_types,json=entityTypes,proto3" json:"entity_types,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *EntityTypeBatch) Reset() { *m = EntityTypeBatch{} } +func (m *EntityTypeBatch) String() string { return proto.CompactTextString(m) } +func (*EntityTypeBatch) ProtoMessage() {} +func (*EntityTypeBatch) Descriptor() ([]byte, []int) { + return fileDescriptor_entity_type_d3df53e4da2ed9bd, []int{13} +} +func (m *EntityTypeBatch) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_EntityTypeBatch.Unmarshal(m, b) +} +func (m *EntityTypeBatch) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_EntityTypeBatch.Marshal(b, m, deterministic) +} +func (dst *EntityTypeBatch) XXX_Merge(src proto.Message) { + xxx_messageInfo_EntityTypeBatch.Merge(dst, src) +} +func (m *EntityTypeBatch) XXX_Size() int { + return xxx_messageInfo_EntityTypeBatch.Size(m) +} +func (m *EntityTypeBatch) XXX_DiscardUnknown() { + xxx_messageInfo_EntityTypeBatch.DiscardUnknown(m) +} + +var xxx_messageInfo_EntityTypeBatch proto.InternalMessageInfo + +func (m *EntityTypeBatch) GetEntityTypes() []*EntityType { + if m != nil { + return m.EntityTypes + } + return nil +} + +func init() { + proto.RegisterType((*EntityType)(nil), "google.cloud.dialogflow.v2.EntityType") + proto.RegisterType((*EntityType_Entity)(nil), "google.cloud.dialogflow.v2.EntityType.Entity") + proto.RegisterType((*ListEntityTypesRequest)(nil), "google.cloud.dialogflow.v2.ListEntityTypesRequest") + proto.RegisterType((*ListEntityTypesResponse)(nil), "google.cloud.dialogflow.v2.ListEntityTypesResponse") + proto.RegisterType((*GetEntityTypeRequest)(nil), "google.cloud.dialogflow.v2.GetEntityTypeRequest") + proto.RegisterType((*CreateEntityTypeRequest)(nil), "google.cloud.dialogflow.v2.CreateEntityTypeRequest") + proto.RegisterType((*UpdateEntityTypeRequest)(nil), "google.cloud.dialogflow.v2.UpdateEntityTypeRequest") + proto.RegisterType((*DeleteEntityTypeRequest)(nil), "google.cloud.dialogflow.v2.DeleteEntityTypeRequest") + proto.RegisterType((*BatchUpdateEntityTypesRequest)(nil), "google.cloud.dialogflow.v2.BatchUpdateEntityTypesRequest") + proto.RegisterType((*BatchUpdateEntityTypesResponse)(nil), "google.cloud.dialogflow.v2.BatchUpdateEntityTypesResponse") + proto.RegisterType((*BatchDeleteEntityTypesRequest)(nil), "google.cloud.dialogflow.v2.BatchDeleteEntityTypesRequest") + proto.RegisterType((*BatchCreateEntitiesRequest)(nil), "google.cloud.dialogflow.v2.BatchCreateEntitiesRequest") + proto.RegisterType((*BatchUpdateEntitiesRequest)(nil), "google.cloud.dialogflow.v2.BatchUpdateEntitiesRequest") + proto.RegisterType((*BatchDeleteEntitiesRequest)(nil), "google.cloud.dialogflow.v2.BatchDeleteEntitiesRequest") + proto.RegisterType((*EntityTypeBatch)(nil), "google.cloud.dialogflow.v2.EntityTypeBatch") + proto.RegisterEnum("google.cloud.dialogflow.v2.EntityType_Kind", EntityType_Kind_name, EntityType_Kind_value) + proto.RegisterEnum("google.cloud.dialogflow.v2.EntityType_AutoExpansionMode", EntityType_AutoExpansionMode_name, EntityType_AutoExpansionMode_value) +} + +// Reference imports to suppress errors if they are not otherwise used. +var _ context.Context +var _ grpc.ClientConn + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the grpc package it is being compiled against. +const _ = grpc.SupportPackageIsVersion4 + +// EntityTypesClient is the client API for EntityTypes service. +// +// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://godoc.org/google.golang.org/grpc#ClientConn.NewStream. +type EntityTypesClient interface { + // Returns the list of all entity types in the specified agent. + ListEntityTypes(ctx context.Context, in *ListEntityTypesRequest, opts ...grpc.CallOption) (*ListEntityTypesResponse, error) + // Retrieves the specified entity type. + GetEntityType(ctx context.Context, in *GetEntityTypeRequest, opts ...grpc.CallOption) (*EntityType, error) + // Creates an entity type in the specified agent. + CreateEntityType(ctx context.Context, in *CreateEntityTypeRequest, opts ...grpc.CallOption) (*EntityType, error) + // Updates the specified entity type. + UpdateEntityType(ctx context.Context, in *UpdateEntityTypeRequest, opts ...grpc.CallOption) (*EntityType, error) + // Deletes the specified entity type. + DeleteEntityType(ctx context.Context, in *DeleteEntityTypeRequest, opts ...grpc.CallOption) (*empty.Empty, error) + // Updates/Creates multiple entity types in the specified agent. + // + // Operation + BatchUpdateEntityTypes(ctx context.Context, in *BatchUpdateEntityTypesRequest, opts ...grpc.CallOption) (*longrunning.Operation, error) + // Deletes entity types in the specified agent. + // + // Operation + BatchDeleteEntityTypes(ctx context.Context, in *BatchDeleteEntityTypesRequest, opts ...grpc.CallOption) (*longrunning.Operation, error) + // Creates multiple new entities in the specified entity type. + // + // Operation + BatchCreateEntities(ctx context.Context, in *BatchCreateEntitiesRequest, opts ...grpc.CallOption) (*longrunning.Operation, error) + // Updates or creates multiple entities in the specified entity type. This + // method does not affect entities in the entity type that aren't explicitly + // specified in the request. + // + // Operation + BatchUpdateEntities(ctx context.Context, in *BatchUpdateEntitiesRequest, opts ...grpc.CallOption) (*longrunning.Operation, error) + // Deletes entities in the specified entity type. + // + // Operation + BatchDeleteEntities(ctx context.Context, in *BatchDeleteEntitiesRequest, opts ...grpc.CallOption) (*longrunning.Operation, error) +} + +type entityTypesClient struct { + cc *grpc.ClientConn +} + +func NewEntityTypesClient(cc *grpc.ClientConn) EntityTypesClient { + return &entityTypesClient{cc} +} + +func (c *entityTypesClient) ListEntityTypes(ctx context.Context, in *ListEntityTypesRequest, opts ...grpc.CallOption) (*ListEntityTypesResponse, error) { + out := new(ListEntityTypesResponse) + err := c.cc.Invoke(ctx, "/google.cloud.dialogflow.v2.EntityTypes/ListEntityTypes", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *entityTypesClient) GetEntityType(ctx context.Context, in *GetEntityTypeRequest, opts ...grpc.CallOption) (*EntityType, error) { + out := new(EntityType) + err := c.cc.Invoke(ctx, "/google.cloud.dialogflow.v2.EntityTypes/GetEntityType", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *entityTypesClient) CreateEntityType(ctx context.Context, in *CreateEntityTypeRequest, opts ...grpc.CallOption) (*EntityType, error) { + out := new(EntityType) + err := c.cc.Invoke(ctx, "/google.cloud.dialogflow.v2.EntityTypes/CreateEntityType", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *entityTypesClient) UpdateEntityType(ctx context.Context, in *UpdateEntityTypeRequest, opts ...grpc.CallOption) (*EntityType, error) { + out := new(EntityType) + err := c.cc.Invoke(ctx, "/google.cloud.dialogflow.v2.EntityTypes/UpdateEntityType", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *entityTypesClient) DeleteEntityType(ctx context.Context, in *DeleteEntityTypeRequest, opts ...grpc.CallOption) (*empty.Empty, error) { + out := new(empty.Empty) + err := c.cc.Invoke(ctx, "/google.cloud.dialogflow.v2.EntityTypes/DeleteEntityType", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *entityTypesClient) BatchUpdateEntityTypes(ctx context.Context, in *BatchUpdateEntityTypesRequest, opts ...grpc.CallOption) (*longrunning.Operation, error) { + out := new(longrunning.Operation) + err := c.cc.Invoke(ctx, "/google.cloud.dialogflow.v2.EntityTypes/BatchUpdateEntityTypes", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *entityTypesClient) BatchDeleteEntityTypes(ctx context.Context, in *BatchDeleteEntityTypesRequest, opts ...grpc.CallOption) (*longrunning.Operation, error) { + out := new(longrunning.Operation) + err := c.cc.Invoke(ctx, "/google.cloud.dialogflow.v2.EntityTypes/BatchDeleteEntityTypes", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *entityTypesClient) BatchCreateEntities(ctx context.Context, in *BatchCreateEntitiesRequest, opts ...grpc.CallOption) (*longrunning.Operation, error) { + out := new(longrunning.Operation) + err := c.cc.Invoke(ctx, "/google.cloud.dialogflow.v2.EntityTypes/BatchCreateEntities", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *entityTypesClient) BatchUpdateEntities(ctx context.Context, in *BatchUpdateEntitiesRequest, opts ...grpc.CallOption) (*longrunning.Operation, error) { + out := new(longrunning.Operation) + err := c.cc.Invoke(ctx, "/google.cloud.dialogflow.v2.EntityTypes/BatchUpdateEntities", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *entityTypesClient) BatchDeleteEntities(ctx context.Context, in *BatchDeleteEntitiesRequest, opts ...grpc.CallOption) (*longrunning.Operation, error) { + out := new(longrunning.Operation) + err := c.cc.Invoke(ctx, "/google.cloud.dialogflow.v2.EntityTypes/BatchDeleteEntities", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +// EntityTypesServer is the server API for EntityTypes service. +type EntityTypesServer interface { + // Returns the list of all entity types in the specified agent. + ListEntityTypes(context.Context, *ListEntityTypesRequest) (*ListEntityTypesResponse, error) + // Retrieves the specified entity type. + GetEntityType(context.Context, *GetEntityTypeRequest) (*EntityType, error) + // Creates an entity type in the specified agent. + CreateEntityType(context.Context, *CreateEntityTypeRequest) (*EntityType, error) + // Updates the specified entity type. + UpdateEntityType(context.Context, *UpdateEntityTypeRequest) (*EntityType, error) + // Deletes the specified entity type. + DeleteEntityType(context.Context, *DeleteEntityTypeRequest) (*empty.Empty, error) + // Updates/Creates multiple entity types in the specified agent. + // + // Operation + BatchUpdateEntityTypes(context.Context, *BatchUpdateEntityTypesRequest) (*longrunning.Operation, error) + // Deletes entity types in the specified agent. + // + // Operation + BatchDeleteEntityTypes(context.Context, *BatchDeleteEntityTypesRequest) (*longrunning.Operation, error) + // Creates multiple new entities in the specified entity type. + // + // Operation + BatchCreateEntities(context.Context, *BatchCreateEntitiesRequest) (*longrunning.Operation, error) + // Updates or creates multiple entities in the specified entity type. This + // method does not affect entities in the entity type that aren't explicitly + // specified in the request. + // + // Operation + BatchUpdateEntities(context.Context, *BatchUpdateEntitiesRequest) (*longrunning.Operation, error) + // Deletes entities in the specified entity type. + // + // Operation + BatchDeleteEntities(context.Context, *BatchDeleteEntitiesRequest) (*longrunning.Operation, error) +} + +func RegisterEntityTypesServer(s *grpc.Server, srv EntityTypesServer) { + s.RegisterService(&_EntityTypes_serviceDesc, srv) +} + +func _EntityTypes_ListEntityTypes_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(ListEntityTypesRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(EntityTypesServer).ListEntityTypes(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.dialogflow.v2.EntityTypes/ListEntityTypes", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(EntityTypesServer).ListEntityTypes(ctx, req.(*ListEntityTypesRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _EntityTypes_GetEntityType_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(GetEntityTypeRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(EntityTypesServer).GetEntityType(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.dialogflow.v2.EntityTypes/GetEntityType", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(EntityTypesServer).GetEntityType(ctx, req.(*GetEntityTypeRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _EntityTypes_CreateEntityType_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(CreateEntityTypeRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(EntityTypesServer).CreateEntityType(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.dialogflow.v2.EntityTypes/CreateEntityType", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(EntityTypesServer).CreateEntityType(ctx, req.(*CreateEntityTypeRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _EntityTypes_UpdateEntityType_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(UpdateEntityTypeRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(EntityTypesServer).UpdateEntityType(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.dialogflow.v2.EntityTypes/UpdateEntityType", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(EntityTypesServer).UpdateEntityType(ctx, req.(*UpdateEntityTypeRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _EntityTypes_DeleteEntityType_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(DeleteEntityTypeRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(EntityTypesServer).DeleteEntityType(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.dialogflow.v2.EntityTypes/DeleteEntityType", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(EntityTypesServer).DeleteEntityType(ctx, req.(*DeleteEntityTypeRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _EntityTypes_BatchUpdateEntityTypes_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(BatchUpdateEntityTypesRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(EntityTypesServer).BatchUpdateEntityTypes(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.dialogflow.v2.EntityTypes/BatchUpdateEntityTypes", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(EntityTypesServer).BatchUpdateEntityTypes(ctx, req.(*BatchUpdateEntityTypesRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _EntityTypes_BatchDeleteEntityTypes_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(BatchDeleteEntityTypesRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(EntityTypesServer).BatchDeleteEntityTypes(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.dialogflow.v2.EntityTypes/BatchDeleteEntityTypes", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(EntityTypesServer).BatchDeleteEntityTypes(ctx, req.(*BatchDeleteEntityTypesRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _EntityTypes_BatchCreateEntities_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(BatchCreateEntitiesRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(EntityTypesServer).BatchCreateEntities(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.dialogflow.v2.EntityTypes/BatchCreateEntities", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(EntityTypesServer).BatchCreateEntities(ctx, req.(*BatchCreateEntitiesRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _EntityTypes_BatchUpdateEntities_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(BatchUpdateEntitiesRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(EntityTypesServer).BatchUpdateEntities(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.dialogflow.v2.EntityTypes/BatchUpdateEntities", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(EntityTypesServer).BatchUpdateEntities(ctx, req.(*BatchUpdateEntitiesRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _EntityTypes_BatchDeleteEntities_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(BatchDeleteEntitiesRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(EntityTypesServer).BatchDeleteEntities(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.dialogflow.v2.EntityTypes/BatchDeleteEntities", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(EntityTypesServer).BatchDeleteEntities(ctx, req.(*BatchDeleteEntitiesRequest)) + } + return interceptor(ctx, in, info, handler) +} + +var _EntityTypes_serviceDesc = grpc.ServiceDesc{ + ServiceName: "google.cloud.dialogflow.v2.EntityTypes", + HandlerType: (*EntityTypesServer)(nil), + Methods: []grpc.MethodDesc{ + { + MethodName: "ListEntityTypes", + Handler: _EntityTypes_ListEntityTypes_Handler, + }, + { + MethodName: "GetEntityType", + Handler: _EntityTypes_GetEntityType_Handler, + }, + { + MethodName: "CreateEntityType", + Handler: _EntityTypes_CreateEntityType_Handler, + }, + { + MethodName: "UpdateEntityType", + Handler: _EntityTypes_UpdateEntityType_Handler, + }, + { + MethodName: "DeleteEntityType", + Handler: _EntityTypes_DeleteEntityType_Handler, + }, + { + MethodName: "BatchUpdateEntityTypes", + Handler: _EntityTypes_BatchUpdateEntityTypes_Handler, + }, + { + MethodName: "BatchDeleteEntityTypes", + Handler: _EntityTypes_BatchDeleteEntityTypes_Handler, + }, + { + MethodName: "BatchCreateEntities", + Handler: _EntityTypes_BatchCreateEntities_Handler, + }, + { + MethodName: "BatchUpdateEntities", + Handler: _EntityTypes_BatchUpdateEntities_Handler, + }, + { + MethodName: "BatchDeleteEntities", + Handler: _EntityTypes_BatchDeleteEntities_Handler, + }, + }, + Streams: []grpc.StreamDesc{}, + Metadata: "google/cloud/dialogflow/v2/entity_type.proto", +} + +func init() { + proto.RegisterFile("google/cloud/dialogflow/v2/entity_type.proto", fileDescriptor_entity_type_d3df53e4da2ed9bd) +} + +var fileDescriptor_entity_type_d3df53e4da2ed9bd = []byte{ + // 1243 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xcc, 0x98, 0x4d, 0x6f, 0x1b, 0x45, + 0x18, 0xc7, 0x33, 0x8e, 0x13, 0x25, 0x8f, 0x93, 0xc6, 0x99, 0xa4, 0x89, 0xd9, 0x34, 0x6d, 0xd8, + 0x48, 0x55, 0x48, 0xa9, 0x17, 0x1c, 0x15, 0xb5, 0xa9, 0x80, 0x26, 0xb1, 0x93, 0x9a, 0xbc, 0x59, + 0x9b, 0xa4, 0x02, 0x84, 0xb4, 0xda, 0xd8, 0x13, 0x77, 0x89, 0x3d, 0xb3, 0xec, 0x4b, 0xa8, 0x8b, + 0xca, 0x01, 0x8e, 0x70, 0x01, 0xae, 0x08, 0x24, 0xc4, 0x09, 0x09, 0x71, 0xe1, 0x82, 0xb8, 0x72, + 0xe3, 0xc8, 0x27, 0x40, 0xe2, 0x43, 0x70, 0x44, 0x3b, 0xbb, 0x9b, 0x5d, 0xaf, 0xd7, 0xf6, 0x46, + 0x84, 0x8a, 0x9b, 0xe7, 0xed, 0x79, 0x7e, 0xcf, 0xcb, 0xcc, 0x3f, 0x1b, 0x78, 0xb9, 0xce, 0x58, + 0xbd, 0x41, 0xa4, 0x6a, 0x83, 0xd9, 0x35, 0xa9, 0xa6, 0xa9, 0x0d, 0x56, 0x3f, 0x69, 0xb0, 0x0f, + 0xa5, 0xb3, 0x82, 0x44, 0xa8, 0xa5, 0x59, 0x2d, 0xc5, 0x6a, 0xe9, 0x24, 0xaf, 0x1b, 0xcc, 0x62, + 0x58, 0x70, 0x77, 0xe7, 0xf9, 0xee, 0x7c, 0xb0, 0x3b, 0x7f, 0x56, 0x10, 0xae, 0x79, 0x96, 0x54, + 0x5d, 0x93, 0x54, 0x4a, 0x99, 0xa5, 0x5a, 0x1a, 0xa3, 0xa6, 0x7b, 0x52, 0x78, 0x21, 0xb4, 0x6a, + 0x10, 0x93, 0xd9, 0x46, 0xd5, 0x33, 0x2a, 0x2c, 0x7a, 0x4b, 0x0d, 0x46, 0xeb, 0x86, 0x4d, 0xa9, + 0x46, 0xeb, 0x12, 0xd3, 0x89, 0xd1, 0x76, 0x7e, 0xce, 0xdb, 0xc4, 0x47, 0xc7, 0xf6, 0x89, 0x44, + 0x9a, 0xba, 0xd5, 0xf2, 0x16, 0x17, 0xa2, 0x8b, 0x27, 0x1a, 0x69, 0xd4, 0x94, 0xa6, 0x6a, 0x9e, + 0x7a, 0x3b, 0xae, 0x45, 0x77, 0x98, 0x96, 0x61, 0x57, 0x2d, 0x77, 0x55, 0xfc, 0x2c, 0x0d, 0x50, + 0xe2, 0xc1, 0x1e, 0xb6, 0x74, 0x82, 0x31, 0xa4, 0xa9, 0xda, 0x24, 0x39, 0xb4, 0x80, 0x96, 0x46, + 0x65, 0xfe, 0x1b, 0xbf, 0x08, 0x63, 0x35, 0xcd, 0xd4, 0x1b, 0x6a, 0x4b, 0xe1, 0x6b, 0x29, 0xbe, + 0x96, 0xf1, 0xe6, 0xf6, 0x9c, 0x2d, 0x6f, 0x42, 0xfa, 0x54, 0xa3, 0xb5, 0xdc, 0xe0, 0x02, 0x5a, + 0xba, 0x52, 0xb8, 0x95, 0xef, 0x9e, 0xab, 0x7c, 0xe0, 0x2c, 0xbf, 0xad, 0xd1, 0x9a, 0xcc, 0x0f, + 0xe2, 0xc7, 0x30, 0xa5, 0xda, 0x16, 0x53, 0xc8, 0x13, 0x5d, 0xa5, 0xa6, 0xc6, 0xa8, 0xd2, 0x64, + 0x35, 0x92, 0x4b, 0x73, 0x7b, 0x77, 0x13, 0xda, 0x5b, 0xb3, 0x2d, 0x56, 0xf2, 0x0d, 0xec, 0xb2, + 0x1a, 0x91, 0x27, 0xd5, 0xe8, 0x14, 0x2e, 0xc3, 0x08, 0x2f, 0xae, 0x46, 0xcc, 0xdc, 0xf0, 0xc2, + 0xe0, 0x52, 0xa6, 0x70, 0x3b, 0xa1, 0x79, 0xf7, 0xa7, 0x7c, 0x7e, 0x5c, 0x58, 0x85, 0x61, 0x77, + 0x0e, 0x4f, 0xc3, 0xd0, 0x99, 0xda, 0xb0, 0xfd, 0xbc, 0xb9, 0x03, 0x2c, 0xc0, 0x88, 0xd9, 0xa2, + 0x8c, 0xb6, 0x9a, 0x66, 0x2e, 0xb5, 0x30, 0xb8, 0x34, 0x2a, 0x9f, 0x8f, 0xc5, 0x7b, 0x90, 0x76, + 0xc2, 0xc7, 0xd3, 0x90, 0xdd, 0x2e, 0xef, 0x15, 0x95, 0xa3, 0xbd, 0x83, 0x4a, 0x69, 0xa3, 0xbc, + 0x59, 0x2e, 0x15, 0xb3, 0x03, 0x78, 0x0c, 0x46, 0xf8, 0xec, 0xee, 0x5a, 0x25, 0x8b, 0xf0, 0x38, + 0x8c, 0xf2, 0xd1, 0x4e, 0xf9, 0xe0, 0x30, 0x9b, 0x12, 0xdf, 0x81, 0xc9, 0x8e, 0x48, 0xf1, 0x22, + 0xdc, 0x58, 0x3b, 0x3a, 0xdc, 0x57, 0x4a, 0x6f, 0x57, 0xd6, 0xf6, 0x0e, 0xca, 0xfb, 0x7b, 0xca, + 0xee, 0x7e, 0xb1, 0x14, 0x31, 0x7b, 0x03, 0xe6, 0xe2, 0x36, 0x15, 0x4b, 0x9b, 0x6b, 0x47, 0x3b, + 0x87, 0x59, 0x24, 0x7e, 0x81, 0x60, 0x66, 0x47, 0x33, 0xad, 0x20, 0x6a, 0x53, 0x26, 0x1f, 0xd8, + 0xc4, 0xb4, 0xf0, 0x0c, 0x0c, 0xeb, 0xaa, 0x41, 0xa8, 0xe5, 0xc5, 0xe8, 0x8d, 0xf0, 0x22, 0x8c, + 0x37, 0x54, 0x5a, 0xb7, 0xd5, 0x3a, 0x51, 0xaa, 0x4e, 0xcd, 0xdc, 0xf6, 0x18, 0xf3, 0x27, 0x37, + 0x1c, 0xba, 0x39, 0x18, 0xd5, 0x9d, 0x0d, 0xa6, 0xf6, 0x94, 0xf0, 0x26, 0x19, 0x92, 0x47, 0x9c, + 0x89, 0x03, 0xed, 0x29, 0xc1, 0xf3, 0x00, 0x7c, 0xd1, 0x62, 0xa7, 0x84, 0xf2, 0x92, 0x8f, 0xca, + 0x7c, 0xfb, 0xa1, 0x33, 0x21, 0x7e, 0x8e, 0x60, 0xb6, 0x83, 0xc9, 0xd4, 0x19, 0x35, 0x9d, 0x62, + 0x8e, 0x85, 0x6e, 0xaa, 0x99, 0x43, 0xbc, 0xa0, 0x37, 0x93, 0x15, 0x54, 0xce, 0x90, 0xc0, 0x24, + 0xbe, 0x09, 0x13, 0x94, 0x3c, 0xb1, 0x94, 0x10, 0x8a, 0x1b, 0xc9, 0xb8, 0x33, 0x5d, 0x39, 0xc7, + 0xd9, 0x87, 0xe9, 0x2d, 0x12, 0x82, 0xf1, 0xf3, 0x13, 0x77, 0x73, 0x92, 0xe4, 0x46, 0xfc, 0x16, + 0xc1, 0xec, 0x86, 0x41, 0x54, 0x8b, 0x74, 0x1a, 0xed, 0x96, 0xf4, 0x2d, 0xc8, 0x84, 0xe2, 0xe6, + 0x66, 0x93, 0x87, 0x0d, 0x41, 0xd8, 0x9d, 0x84, 0x83, 0x31, 0x84, 0xbf, 0x21, 0x98, 0x3d, 0xd2, + 0x6b, 0xb1, 0x84, 0x11, 0x12, 0x74, 0x79, 0x24, 0x71, 0x7d, 0x74, 0x1f, 0x32, 0x36, 0x07, 0xe1, + 0x0f, 0x1c, 0x87, 0xcd, 0x14, 0x04, 0xdf, 0x9b, 0xff, 0xc2, 0xe5, 0x37, 0x9d, 0x37, 0x70, 0x57, + 0x35, 0x4f, 0x65, 0x70, 0xb7, 0x3b, 0xbf, 0xc5, 0xdb, 0x30, 0x5b, 0x24, 0x0d, 0x12, 0x17, 0x45, + 0x4c, 0xf1, 0xc4, 0xdf, 0x53, 0x30, 0xbf, 0xae, 0x5a, 0xd5, 0xc7, 0xd1, 0xd0, 0xfb, 0x5e, 0x89, + 0x15, 0xb8, 0x1a, 0xca, 0x89, 0x72, 0xec, 0x18, 0x51, 0x6c, 0x43, 0x73, 0x43, 0x7a, 0x38, 0x20, + 0xe3, 0x20, 0x6e, 0xd7, 0x83, 0xa1, 0xe1, 0x13, 0xc8, 0x75, 0x1e, 0xd2, 0x68, 0x43, 0xa3, 0xc4, + 0x8b, 0x33, 0xe1, 0xb3, 0xca, 0x2d, 0x3e, 0x1c, 0x90, 0xaf, 0x46, 0x9c, 0x94, 0xb9, 0xad, 0xce, + 0x3c, 0xa7, 0xfb, 0xe7, 0x79, 0xe8, 0x22, 0x79, 0x5e, 0x9f, 0x82, 0xc9, 0x8e, 0x48, 0xc4, 0x53, + 0xb8, 0xde, 0x2d, 0x99, 0x97, 0x7e, 0x97, 0xc5, 0xaa, 0x57, 0xb9, 0x68, 0xb9, 0xfb, 0x56, 0x6e, + 0xb9, 0x1d, 0xdd, 0xe9, 0x03, 0xff, 0xe9, 0x9e, 0x08, 0x1c, 0x38, 0x92, 0x67, 0x8a, 0xdf, 0x23, + 0x10, 0xb8, 0x97, 0xd0, 0xe5, 0xd5, 0xfa, 0xbb, 0x08, 0xeb, 0x4f, 0xea, 0x5f, 0xe9, 0x4f, 0xb2, + 0xcb, 0xfb, 0xa7, 0x8f, 0x19, 0xca, 0xfc, 0xff, 0x0f, 0x33, 0xda, 0x71, 0xe9, 0x0b, 0xdd, 0xec, + 0x8f, 0xbd, 0x10, 0x43, 0xf5, 0xd6, 0x12, 0x29, 0x97, 0x57, 0x6c, 0x2e, 0xd7, 0x7e, 0xa1, 0xbd, + 0x2e, 0x7c, 0xc4, 0xe7, 0x92, 0xe5, 0xf8, 0x3d, 0x98, 0x88, 0xdc, 0xbf, 0x4b, 0xec, 0xe6, 0xc2, + 0xa7, 0x57, 0x20, 0x13, 0xea, 0x61, 0xfc, 0x23, 0x82, 0x89, 0x88, 0x20, 0xe2, 0x42, 0x2f, 0xc3, + 0xf1, 0x8a, 0x2e, 0xac, 0x5c, 0xe8, 0x8c, 0x7b, 0x4b, 0xc5, 0x57, 0x3f, 0xf9, 0xe3, 0xaf, 0xaf, + 0x52, 0xb7, 0xf0, 0x4b, 0xce, 0x5f, 0xc9, 0x1f, 0xb9, 0x99, 0x7c, 0x5d, 0x37, 0xd8, 0xfb, 0xa4, + 0x6a, 0x99, 0xd2, 0xb2, 0xa4, 0xd6, 0x09, 0xb5, 0x9e, 0x49, 0x61, 0x65, 0xfd, 0x1a, 0xc1, 0x78, + 0x9b, 0x64, 0xe2, 0x57, 0x7a, 0x79, 0x8e, 0x53, 0x57, 0x21, 0x61, 0xe2, 0x22, 0x78, 0xce, 0xfd, + 0xed, 0x80, 0x0b, 0xb3, 0x49, 0xcb, 0xcf, 0xf0, 0x4f, 0x08, 0xb2, 0x51, 0xfd, 0xc5, 0x3d, 0x73, + 0xd3, 0x45, 0xad, 0x13, 0x43, 0xbe, 0xc1, 0x21, 0xef, 0x8a, 0xc9, 0x73, 0xb8, 0x1a, 0x16, 0x59, + 0xfc, 0x2b, 0x82, 0x6c, 0xf4, 0x1d, 0xed, 0x4d, 0xdc, 0x45, 0xbd, 0x13, 0x13, 0xbf, 0xc5, 0x89, + 0x8b, 0x85, 0x3b, 0x9c, 0x38, 0xfc, 0x71, 0x94, 0x24, 0xc5, 0xed, 0xf4, 0x5f, 0x22, 0xc8, 0x46, + 0x1f, 0xe6, 0xde, 0xf4, 0x5d, 0x54, 0x5b, 0x98, 0xe9, 0x78, 0x1e, 0x4a, 0xce, 0x97, 0x91, 0xdf, + 0x04, 0xcb, 0x17, 0x68, 0x82, 0x9f, 0x11, 0xcc, 0xc4, 0xeb, 0x13, 0xbe, 0xd7, 0x0b, 0xad, 0xe7, + 0x1f, 0x08, 0xc2, 0xbc, 0x7f, 0x34, 0xf4, 0x7d, 0x97, 0xdf, 0xf7, 0xbf, 0xef, 0xc4, 0x07, 0x9c, + 0x73, 0x55, 0xbc, 0x93, 0xbc, 0x0f, 0x8e, 0x03, 0x7f, 0xab, 0x68, 0x39, 0xc0, 0xee, 0x50, 0xba, + 0x04, 0xd8, 0xdd, 0xd4, 0xf1, 0xbf, 0xc2, 0x76, 0xfd, 0x39, 0xd8, 0xbf, 0x20, 0x98, 0x8a, 0x91, + 0x4e, 0xfc, 0x5a, 0x5f, 0xe6, 0x58, 0xad, 0xed, 0x07, 0xbc, 0xcd, 0x81, 0x4b, 0xe2, 0x83, 0x5e, + 0xc0, 0x91, 0x8e, 0x90, 0x7c, 0xf9, 0x72, 0xd9, 0x5d, 0xbf, 0x6d, 0xec, 0xed, 0x7a, 0x9a, 0x80, + 0x3d, 0x56, 0x80, 0x9f, 0x03, 0x7b, 0xd0, 0x2e, 0xe7, 0xec, 0xed, 0x42, 0x99, 0x80, 0x3d, 0x56, + 0x59, 0x9f, 0x03, 0xfb, 0x79, 0xcf, 0xac, 0x7f, 0x83, 0xe0, 0x7a, 0x95, 0x35, 0x7b, 0x90, 0xae, + 0x87, 0x44, 0xb8, 0xe2, 0xbc, 0x08, 0x15, 0xf4, 0x6e, 0xd1, 0xdb, 0x5e, 0x67, 0x8e, 0x60, 0xe7, + 0x99, 0x51, 0x97, 0xea, 0x84, 0xf2, 0xf7, 0x42, 0x72, 0x97, 0x54, 0x5d, 0x33, 0xe3, 0xfe, 0x05, + 0x74, 0x3f, 0x18, 0xfd, 0x8d, 0xd0, 0x77, 0xa9, 0x54, 0x71, 0xf3, 0x87, 0x94, 0xb0, 0xe5, 0x9a, + 0xdb, 0xe0, 0xde, 0x8b, 0x81, 0xf7, 0x47, 0x85, 0xe3, 0x61, 0x6e, 0x75, 0xe5, 0x9f, 0x00, 0x00, + 0x00, 0xff, 0xff, 0x3a, 0xb1, 0xdd, 0xd9, 0x57, 0x12, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/cloud/dialogflow/v2/intent.pb.go b/vendor/google.golang.org/genproto/googleapis/cloud/dialogflow/v2/intent.pb.go new file mode 100644 index 0000000..ecae959 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/cloud/dialogflow/v2/intent.pb.go @@ -0,0 +1,3390 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/cloud/dialogflow/v2/intent.proto + +package dialogflow // import "google.golang.org/genproto/googleapis/cloud/dialogflow/v2" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "github.com/golang/protobuf/ptypes/duration" +import empty "github.com/golang/protobuf/ptypes/empty" +import _struct "github.com/golang/protobuf/ptypes/struct" +import _ "google.golang.org/genproto/googleapis/api/annotations" +import longrunning "google.golang.org/genproto/googleapis/longrunning" +import field_mask "google.golang.org/genproto/protobuf/field_mask" + +import ( + context "golang.org/x/net/context" + grpc "google.golang.org/grpc" +) + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Represents the options for views of an intent. +// An intent can be a sizable object. Therefore, we provide a resource view that +// does not return training phrases in the response by default. +type IntentView int32 + +const ( + // Training phrases field is not populated in the response. + IntentView_INTENT_VIEW_UNSPECIFIED IntentView = 0 + // All fields are populated. + IntentView_INTENT_VIEW_FULL IntentView = 1 +) + +var IntentView_name = map[int32]string{ + 0: "INTENT_VIEW_UNSPECIFIED", + 1: "INTENT_VIEW_FULL", +} +var IntentView_value = map[string]int32{ + "INTENT_VIEW_UNSPECIFIED": 0, + "INTENT_VIEW_FULL": 1, +} + +func (x IntentView) String() string { + return proto.EnumName(IntentView_name, int32(x)) +} +func (IntentView) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_intent_79fab48477a8b612, []int{0} +} + +// Represents the different states that webhooks can be in. +type Intent_WebhookState int32 + +const ( + // Webhook is disabled in the agent and in the intent. + Intent_WEBHOOK_STATE_UNSPECIFIED Intent_WebhookState = 0 + // Webhook is enabled in the agent and in the intent. + Intent_WEBHOOK_STATE_ENABLED Intent_WebhookState = 1 + // Webhook is enabled in the agent and in the intent. Also, each slot + // filling prompt is forwarded to the webhook. + Intent_WEBHOOK_STATE_ENABLED_FOR_SLOT_FILLING Intent_WebhookState = 2 +) + +var Intent_WebhookState_name = map[int32]string{ + 0: "WEBHOOK_STATE_UNSPECIFIED", + 1: "WEBHOOK_STATE_ENABLED", + 2: "WEBHOOK_STATE_ENABLED_FOR_SLOT_FILLING", +} +var Intent_WebhookState_value = map[string]int32{ + "WEBHOOK_STATE_UNSPECIFIED": 0, + "WEBHOOK_STATE_ENABLED": 1, + "WEBHOOK_STATE_ENABLED_FOR_SLOT_FILLING": 2, +} + +func (x Intent_WebhookState) String() string { + return proto.EnumName(Intent_WebhookState_name, int32(x)) +} +func (Intent_WebhookState) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_intent_79fab48477a8b612, []int{0, 0} +} + +// Represents different types of training phrases. +type Intent_TrainingPhrase_Type int32 + +const ( + // Not specified. This value should never be used. + Intent_TrainingPhrase_TYPE_UNSPECIFIED Intent_TrainingPhrase_Type = 0 + // Examples do not contain @-prefixed entity type names, but example parts + // can be annotated with entity types. + Intent_TrainingPhrase_EXAMPLE Intent_TrainingPhrase_Type = 1 + // Templates are not annotated with entity types, but they can contain + // @-prefixed entity type names as substrings. + // Template mode has been deprecated. Example mode is the only supported + // way to create new training phrases. If you have existing training + // phrases that you've created in template mode, those will continue to + // work. + Intent_TrainingPhrase_TEMPLATE Intent_TrainingPhrase_Type = 2 // Deprecated: Do not use. +) + +var Intent_TrainingPhrase_Type_name = map[int32]string{ + 0: "TYPE_UNSPECIFIED", + 1: "EXAMPLE", + 2: "TEMPLATE", +} +var Intent_TrainingPhrase_Type_value = map[string]int32{ + "TYPE_UNSPECIFIED": 0, + "EXAMPLE": 1, + "TEMPLATE": 2, +} + +func (x Intent_TrainingPhrase_Type) String() string { + return proto.EnumName(Intent_TrainingPhrase_Type_name, int32(x)) +} +func (Intent_TrainingPhrase_Type) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_intent_79fab48477a8b612, []int{0, 0, 0} +} + +// Represents different platforms that a rich message can be intended for. +type Intent_Message_Platform int32 + +const ( + // Not specified. + Intent_Message_PLATFORM_UNSPECIFIED Intent_Message_Platform = 0 + // Facebook. + Intent_Message_FACEBOOK Intent_Message_Platform = 1 + // Slack. + Intent_Message_SLACK Intent_Message_Platform = 2 + // Telegram. + Intent_Message_TELEGRAM Intent_Message_Platform = 3 + // Kik. + Intent_Message_KIK Intent_Message_Platform = 4 + // Skype. + Intent_Message_SKYPE Intent_Message_Platform = 5 + // Line. + Intent_Message_LINE Intent_Message_Platform = 6 + // Viber. + Intent_Message_VIBER Intent_Message_Platform = 7 + // Actions on Google. + // When using Actions on Google, you can choose one of the specific + // Intent.Message types that mention support for Actions on Google, + // or you can use the advanced Intent.Message.payload field. + // The payload field provides access to AoG features not available in the + // specific message types. + // If using the Intent.Message.payload field, it should have a structure + // similar to the JSON message shown here. For more information, see + // [Actions on Google Webhook + // Format](https://developers.google.com/actions/dialogflow/webhook) + //
{
+	//   "expectUserResponse": true,
+	//   "isSsml": false,
+	//   "noInputPrompts": [],
+	//   "richResponse": {
+	//     "items": [
+	//       {
+	//         "simpleResponse": {
+	//           "displayText": "hi",
+	//           "textToSpeech": "hello"
+	//         }
+	//       }
+	//     ],
+	//     "suggestions": [
+	//       {
+	//         "title": "Say this"
+	//       },
+	//       {
+	//         "title": "or this"
+	//       }
+	//     ]
+	//   },
+	//   "systemIntent": {
+	//     "data": {
+	//       "@type": "type.googleapis.com/google.actions.v2.OptionValueSpec",
+	//       "listSelect": {
+	//         "items": [
+	//           {
+	//             "optionInfo": {
+	//               "key": "key1",
+	//               "synonyms": [
+	//                 "key one"
+	//               ]
+	//             },
+	//             "title": "must not be empty, but unique"
+	//           },
+	//           {
+	//             "optionInfo": {
+	//               "key": "key2",
+	//               "synonyms": [
+	//                 "key two"
+	//               ]
+	//             },
+	//             "title": "must not be empty, but unique"
+	//           }
+	//         ]
+	//       }
+	//     },
+	//     "intent": "actions.intent.OPTION"
+	//   }
+	// }
+ Intent_Message_ACTIONS_ON_GOOGLE Intent_Message_Platform = 8 +) + +var Intent_Message_Platform_name = map[int32]string{ + 0: "PLATFORM_UNSPECIFIED", + 1: "FACEBOOK", + 2: "SLACK", + 3: "TELEGRAM", + 4: "KIK", + 5: "SKYPE", + 6: "LINE", + 7: "VIBER", + 8: "ACTIONS_ON_GOOGLE", +} +var Intent_Message_Platform_value = map[string]int32{ + "PLATFORM_UNSPECIFIED": 0, + "FACEBOOK": 1, + "SLACK": 2, + "TELEGRAM": 3, + "KIK": 4, + "SKYPE": 5, + "LINE": 6, + "VIBER": 7, + "ACTIONS_ON_GOOGLE": 8, +} + +func (x Intent_Message_Platform) String() string { + return proto.EnumName(Intent_Message_Platform_name, int32(x)) +} +func (Intent_Message_Platform) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_intent_79fab48477a8b612, []int{0, 2, 0} +} + +// Represents an intent. +// Intents convert a number of user expressions or patterns into an action. An +// action is an extraction of a user command or sentence semantics. +type Intent struct { + // The unique identifier of this intent. + // Required for [Intents.UpdateIntent][google.cloud.dialogflow.v2.Intents.UpdateIntent] and [Intents.BatchUpdateIntents][google.cloud.dialogflow.v2.Intents.BatchUpdateIntents] + // methods. + // Format: `projects//agent/intents/`. + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // Required. The name of this intent. + DisplayName string `protobuf:"bytes,2,opt,name=display_name,json=displayName,proto3" json:"display_name,omitempty"` + // Optional. Indicates whether webhooks are enabled for the intent. + WebhookState Intent_WebhookState `protobuf:"varint,6,opt,name=webhook_state,json=webhookState,proto3,enum=google.cloud.dialogflow.v2.Intent_WebhookState" json:"webhook_state,omitempty"` + // Optional. The priority of this intent. Higher numbers represent higher + // priorities. If this is zero or unspecified, we use the default + // priority 500000. + // + // Negative numbers mean that the intent is disabled. + Priority int32 `protobuf:"varint,3,opt,name=priority,proto3" json:"priority,omitempty"` + // Optional. Indicates whether this is a fallback intent. + IsFallback bool `protobuf:"varint,4,opt,name=is_fallback,json=isFallback,proto3" json:"is_fallback,omitempty"` + // Optional. Indicates whether Machine Learning is disabled for the intent. + // Note: If `ml_diabled` setting is set to true, then this intent is not + // taken into account during inference in `ML ONLY` match mode. Also, + // auto-markup in the UI is turned off. + MlDisabled bool `protobuf:"varint,19,opt,name=ml_disabled,json=mlDisabled,proto3" json:"ml_disabled,omitempty"` + // Optional. The list of context names required for this intent to be + // triggered. + // Format: `projects//agent/sessions/-/contexts/`. + InputContextNames []string `protobuf:"bytes,7,rep,name=input_context_names,json=inputContextNames,proto3" json:"input_context_names,omitempty"` + // Optional. The collection of event names that trigger the intent. + // If the collection of input contexts is not empty, all of the contexts must + // be present in the active user session for an event to trigger this intent. + Events []string `protobuf:"bytes,8,rep,name=events,proto3" json:"events,omitempty"` + // Optional. The collection of examples that the agent is + // trained on. + TrainingPhrases []*Intent_TrainingPhrase `protobuf:"bytes,9,rep,name=training_phrases,json=trainingPhrases,proto3" json:"training_phrases,omitempty"` + // Optional. The name of the action associated with the intent. + // Note: The action name must not contain whitespaces. + Action string `protobuf:"bytes,10,opt,name=action,proto3" json:"action,omitempty"` + // Optional. The collection of contexts that are activated when the intent + // is matched. Context messages in this collection should not set the + // parameters field. Setting the `lifespan_count` to 0 will reset the context + // when the intent is matched. + // Format: `projects//agent/sessions/-/contexts/`. + OutputContexts []*Context `protobuf:"bytes,11,rep,name=output_contexts,json=outputContexts,proto3" json:"output_contexts,omitempty"` + // Optional. Indicates whether to delete all contexts in the current + // session when this intent is matched. + ResetContexts bool `protobuf:"varint,12,opt,name=reset_contexts,json=resetContexts,proto3" json:"reset_contexts,omitempty"` + // Optional. The collection of parameters associated with the intent. + Parameters []*Intent_Parameter `protobuf:"bytes,13,rep,name=parameters,proto3" json:"parameters,omitempty"` + // Optional. The collection of rich messages corresponding to the + // `Response` field in the Dialogflow console. + Messages []*Intent_Message `protobuf:"bytes,14,rep,name=messages,proto3" json:"messages,omitempty"` + // Optional. The list of platforms for which the first response will be + // taken from among the messages assigned to the DEFAULT_PLATFORM. + DefaultResponsePlatforms []Intent_Message_Platform `protobuf:"varint,15,rep,packed,name=default_response_platforms,json=defaultResponsePlatforms,proto3,enum=google.cloud.dialogflow.v2.Intent_Message_Platform" json:"default_response_platforms,omitempty"` + // Read-only. The unique identifier of the root intent in the chain of + // followup intents. It identifies the correct followup intents chain for + // this intent. We populate this field only in the output. + // + // Format: `projects//agent/intents/`. + RootFollowupIntentName string `protobuf:"bytes,16,opt,name=root_followup_intent_name,json=rootFollowupIntentName,proto3" json:"root_followup_intent_name,omitempty"` + // Read-only after creation. The unique identifier of the parent intent in the + // chain of followup intents. You can set this field when creating an intent, + // for example with [CreateIntent][] or [BatchUpdateIntents][], in order to + // make this intent a followup intent. + // + // It identifies the parent followup intent. + // Format: `projects//agent/intents/`. + ParentFollowupIntentName string `protobuf:"bytes,17,opt,name=parent_followup_intent_name,json=parentFollowupIntentName,proto3" json:"parent_followup_intent_name,omitempty"` + // Read-only. Information about all followup intents that have this intent as + // a direct or indirect parent. We populate this field only in the output. + FollowupIntentInfo []*Intent_FollowupIntentInfo `protobuf:"bytes,18,rep,name=followup_intent_info,json=followupIntentInfo,proto3" json:"followup_intent_info,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Intent) Reset() { *m = Intent{} } +func (m *Intent) String() string { return proto.CompactTextString(m) } +func (*Intent) ProtoMessage() {} +func (*Intent) Descriptor() ([]byte, []int) { + return fileDescriptor_intent_79fab48477a8b612, []int{0} +} +func (m *Intent) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Intent.Unmarshal(m, b) +} +func (m *Intent) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Intent.Marshal(b, m, deterministic) +} +func (dst *Intent) XXX_Merge(src proto.Message) { + xxx_messageInfo_Intent.Merge(dst, src) +} +func (m *Intent) XXX_Size() int { + return xxx_messageInfo_Intent.Size(m) +} +func (m *Intent) XXX_DiscardUnknown() { + xxx_messageInfo_Intent.DiscardUnknown(m) +} + +var xxx_messageInfo_Intent proto.InternalMessageInfo + +func (m *Intent) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *Intent) GetDisplayName() string { + if m != nil { + return m.DisplayName + } + return "" +} + +func (m *Intent) GetWebhookState() Intent_WebhookState { + if m != nil { + return m.WebhookState + } + return Intent_WEBHOOK_STATE_UNSPECIFIED +} + +func (m *Intent) GetPriority() int32 { + if m != nil { + return m.Priority + } + return 0 +} + +func (m *Intent) GetIsFallback() bool { + if m != nil { + return m.IsFallback + } + return false +} + +func (m *Intent) GetMlDisabled() bool { + if m != nil { + return m.MlDisabled + } + return false +} + +func (m *Intent) GetInputContextNames() []string { + if m != nil { + return m.InputContextNames + } + return nil +} + +func (m *Intent) GetEvents() []string { + if m != nil { + return m.Events + } + return nil +} + +func (m *Intent) GetTrainingPhrases() []*Intent_TrainingPhrase { + if m != nil { + return m.TrainingPhrases + } + return nil +} + +func (m *Intent) GetAction() string { + if m != nil { + return m.Action + } + return "" +} + +func (m *Intent) GetOutputContexts() []*Context { + if m != nil { + return m.OutputContexts + } + return nil +} + +func (m *Intent) GetResetContexts() bool { + if m != nil { + return m.ResetContexts + } + return false +} + +func (m *Intent) GetParameters() []*Intent_Parameter { + if m != nil { + return m.Parameters + } + return nil +} + +func (m *Intent) GetMessages() []*Intent_Message { + if m != nil { + return m.Messages + } + return nil +} + +func (m *Intent) GetDefaultResponsePlatforms() []Intent_Message_Platform { + if m != nil { + return m.DefaultResponsePlatforms + } + return nil +} + +func (m *Intent) GetRootFollowupIntentName() string { + if m != nil { + return m.RootFollowupIntentName + } + return "" +} + +func (m *Intent) GetParentFollowupIntentName() string { + if m != nil { + return m.ParentFollowupIntentName + } + return "" +} + +func (m *Intent) GetFollowupIntentInfo() []*Intent_FollowupIntentInfo { + if m != nil { + return m.FollowupIntentInfo + } + return nil +} + +// Represents an example that the agent is trained on. +type Intent_TrainingPhrase struct { + // Output only. The unique identifier of this training phrase. + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // Required. The type of the training phrase. + Type Intent_TrainingPhrase_Type `protobuf:"varint,2,opt,name=type,proto3,enum=google.cloud.dialogflow.v2.Intent_TrainingPhrase_Type" json:"type,omitempty"` + // Required. The ordered list of training phrase parts. + // The parts are concatenated in order to form the training phrase. + // + // Note: The API does not automatically annotate training phrases like the + // Dialogflow Console does. + // + // Note: Do not forget to include whitespace at part boundaries, + // so the training phrase is well formatted when the parts are concatenated. + // + // If the training phrase does not need to be annotated with parameters, + // you just need a single part with only the [Part.text][google.cloud.dialogflow.v2.Intent.TrainingPhrase.Part.text] field set. + // + // If you want to annotate the training phrase, you must create multiple + // parts, where the fields of each part are populated in one of two ways: + // + // - `Part.text` is set to a part of the phrase that has no parameters. + // - `Part.text` is set to a part of the phrase that you want to annotate, + // and the `entity_type`, `alias`, and `user_defined` fields are all + // set. + Parts []*Intent_TrainingPhrase_Part `protobuf:"bytes,3,rep,name=parts,proto3" json:"parts,omitempty"` + // Optional. Indicates how many times this example was added to + // the intent. Each time a developer adds an existing sample by editing an + // intent or training, this counter is increased. + TimesAddedCount int32 `protobuf:"varint,4,opt,name=times_added_count,json=timesAddedCount,proto3" json:"times_added_count,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Intent_TrainingPhrase) Reset() { *m = Intent_TrainingPhrase{} } +func (m *Intent_TrainingPhrase) String() string { return proto.CompactTextString(m) } +func (*Intent_TrainingPhrase) ProtoMessage() {} +func (*Intent_TrainingPhrase) Descriptor() ([]byte, []int) { + return fileDescriptor_intent_79fab48477a8b612, []int{0, 0} +} +func (m *Intent_TrainingPhrase) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Intent_TrainingPhrase.Unmarshal(m, b) +} +func (m *Intent_TrainingPhrase) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Intent_TrainingPhrase.Marshal(b, m, deterministic) +} +func (dst *Intent_TrainingPhrase) XXX_Merge(src proto.Message) { + xxx_messageInfo_Intent_TrainingPhrase.Merge(dst, src) +} +func (m *Intent_TrainingPhrase) XXX_Size() int { + return xxx_messageInfo_Intent_TrainingPhrase.Size(m) +} +func (m *Intent_TrainingPhrase) XXX_DiscardUnknown() { + xxx_messageInfo_Intent_TrainingPhrase.DiscardUnknown(m) +} + +var xxx_messageInfo_Intent_TrainingPhrase proto.InternalMessageInfo + +func (m *Intent_TrainingPhrase) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *Intent_TrainingPhrase) GetType() Intent_TrainingPhrase_Type { + if m != nil { + return m.Type + } + return Intent_TrainingPhrase_TYPE_UNSPECIFIED +} + +func (m *Intent_TrainingPhrase) GetParts() []*Intent_TrainingPhrase_Part { + if m != nil { + return m.Parts + } + return nil +} + +func (m *Intent_TrainingPhrase) GetTimesAddedCount() int32 { + if m != nil { + return m.TimesAddedCount + } + return 0 +} + +// Represents a part of a training phrase. +type Intent_TrainingPhrase_Part struct { + // Required. The text for this part. + Text string `protobuf:"bytes,1,opt,name=text,proto3" json:"text,omitempty"` + // Optional. The entity type name prefixed with `@`. + // This field is required for annotated parts of the training phrase. + EntityType string `protobuf:"bytes,2,opt,name=entity_type,json=entityType,proto3" json:"entity_type,omitempty"` + // Optional. The parameter name for the value extracted from the + // annotated part of the example. + // This field is required for annotated parts of the training phrase. + Alias string `protobuf:"bytes,3,opt,name=alias,proto3" json:"alias,omitempty"` + // Optional. Indicates whether the text was manually annotated. + // This field is set to true when the Dialogflow Console is used to + // manually annotate the part. When creating an annotated part with the + // API, you must set this to true. + UserDefined bool `protobuf:"varint,4,opt,name=user_defined,json=userDefined,proto3" json:"user_defined,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Intent_TrainingPhrase_Part) Reset() { *m = Intent_TrainingPhrase_Part{} } +func (m *Intent_TrainingPhrase_Part) String() string { return proto.CompactTextString(m) } +func (*Intent_TrainingPhrase_Part) ProtoMessage() {} +func (*Intent_TrainingPhrase_Part) Descriptor() ([]byte, []int) { + return fileDescriptor_intent_79fab48477a8b612, []int{0, 0, 0} +} +func (m *Intent_TrainingPhrase_Part) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Intent_TrainingPhrase_Part.Unmarshal(m, b) +} +func (m *Intent_TrainingPhrase_Part) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Intent_TrainingPhrase_Part.Marshal(b, m, deterministic) +} +func (dst *Intent_TrainingPhrase_Part) XXX_Merge(src proto.Message) { + xxx_messageInfo_Intent_TrainingPhrase_Part.Merge(dst, src) +} +func (m *Intent_TrainingPhrase_Part) XXX_Size() int { + return xxx_messageInfo_Intent_TrainingPhrase_Part.Size(m) +} +func (m *Intent_TrainingPhrase_Part) XXX_DiscardUnknown() { + xxx_messageInfo_Intent_TrainingPhrase_Part.DiscardUnknown(m) +} + +var xxx_messageInfo_Intent_TrainingPhrase_Part proto.InternalMessageInfo + +func (m *Intent_TrainingPhrase_Part) GetText() string { + if m != nil { + return m.Text + } + return "" +} + +func (m *Intent_TrainingPhrase_Part) GetEntityType() string { + if m != nil { + return m.EntityType + } + return "" +} + +func (m *Intent_TrainingPhrase_Part) GetAlias() string { + if m != nil { + return m.Alias + } + return "" +} + +func (m *Intent_TrainingPhrase_Part) GetUserDefined() bool { + if m != nil { + return m.UserDefined + } + return false +} + +// Represents intent parameters. +type Intent_Parameter struct { + // The unique identifier of this parameter. + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // Required. The name of the parameter. + DisplayName string `protobuf:"bytes,2,opt,name=display_name,json=displayName,proto3" json:"display_name,omitempty"` + // Optional. The definition of the parameter value. It can be: + // - a constant string, + // - a parameter value defined as `$parameter_name`, + // - an original parameter value defined as `$parameter_name.original`, + // - a parameter value from some context defined as + // `#context_name.parameter_name`. + Value string `protobuf:"bytes,3,opt,name=value,proto3" json:"value,omitempty"` + // Optional. The default value to use when the `value` yields an empty + // result. + // Default values can be extracted from contexts by using the following + // syntax: `#context_name.parameter_name`. + DefaultValue string `protobuf:"bytes,4,opt,name=default_value,json=defaultValue,proto3" json:"default_value,omitempty"` + // Optional. The name of the entity type, prefixed with `@`, that + // describes values of the parameter. If the parameter is + // required, this must be provided. + EntityTypeDisplayName string `protobuf:"bytes,5,opt,name=entity_type_display_name,json=entityTypeDisplayName,proto3" json:"entity_type_display_name,omitempty"` + // Optional. Indicates whether the parameter is required. That is, + // whether the intent cannot be completed without collecting the parameter + // value. + Mandatory bool `protobuf:"varint,6,opt,name=mandatory,proto3" json:"mandatory,omitempty"` + // Optional. The collection of prompts that the agent can present to the + // user in order to collect value for the parameter. + Prompts []string `protobuf:"bytes,7,rep,name=prompts,proto3" json:"prompts,omitempty"` + // Optional. Indicates whether the parameter represents a list of values. + IsList bool `protobuf:"varint,8,opt,name=is_list,json=isList,proto3" json:"is_list,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Intent_Parameter) Reset() { *m = Intent_Parameter{} } +func (m *Intent_Parameter) String() string { return proto.CompactTextString(m) } +func (*Intent_Parameter) ProtoMessage() {} +func (*Intent_Parameter) Descriptor() ([]byte, []int) { + return fileDescriptor_intent_79fab48477a8b612, []int{0, 1} +} +func (m *Intent_Parameter) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Intent_Parameter.Unmarshal(m, b) +} +func (m *Intent_Parameter) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Intent_Parameter.Marshal(b, m, deterministic) +} +func (dst *Intent_Parameter) XXX_Merge(src proto.Message) { + xxx_messageInfo_Intent_Parameter.Merge(dst, src) +} +func (m *Intent_Parameter) XXX_Size() int { + return xxx_messageInfo_Intent_Parameter.Size(m) +} +func (m *Intent_Parameter) XXX_DiscardUnknown() { + xxx_messageInfo_Intent_Parameter.DiscardUnknown(m) +} + +var xxx_messageInfo_Intent_Parameter proto.InternalMessageInfo + +func (m *Intent_Parameter) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *Intent_Parameter) GetDisplayName() string { + if m != nil { + return m.DisplayName + } + return "" +} + +func (m *Intent_Parameter) GetValue() string { + if m != nil { + return m.Value + } + return "" +} + +func (m *Intent_Parameter) GetDefaultValue() string { + if m != nil { + return m.DefaultValue + } + return "" +} + +func (m *Intent_Parameter) GetEntityTypeDisplayName() string { + if m != nil { + return m.EntityTypeDisplayName + } + return "" +} + +func (m *Intent_Parameter) GetMandatory() bool { + if m != nil { + return m.Mandatory + } + return false +} + +func (m *Intent_Parameter) GetPrompts() []string { + if m != nil { + return m.Prompts + } + return nil +} + +func (m *Intent_Parameter) GetIsList() bool { + if m != nil { + return m.IsList + } + return false +} + +// Corresponds to the `Response` field in the Dialogflow console. +type Intent_Message struct { + // Required. The rich response message. + // + // Types that are valid to be assigned to Message: + // *Intent_Message_Text_ + // *Intent_Message_Image_ + // *Intent_Message_QuickReplies_ + // *Intent_Message_Card_ + // *Intent_Message_Payload + // *Intent_Message_SimpleResponses_ + // *Intent_Message_BasicCard_ + // *Intent_Message_Suggestions_ + // *Intent_Message_LinkOutSuggestion_ + // *Intent_Message_ListSelect_ + // *Intent_Message_CarouselSelect_ + Message isIntent_Message_Message `protobuf_oneof:"message"` + // Optional. The platform that this message is intended for. + Platform Intent_Message_Platform `protobuf:"varint,6,opt,name=platform,proto3,enum=google.cloud.dialogflow.v2.Intent_Message_Platform" json:"platform,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Intent_Message) Reset() { *m = Intent_Message{} } +func (m *Intent_Message) String() string { return proto.CompactTextString(m) } +func (*Intent_Message) ProtoMessage() {} +func (*Intent_Message) Descriptor() ([]byte, []int) { + return fileDescriptor_intent_79fab48477a8b612, []int{0, 2} +} +func (m *Intent_Message) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Intent_Message.Unmarshal(m, b) +} +func (m *Intent_Message) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Intent_Message.Marshal(b, m, deterministic) +} +func (dst *Intent_Message) XXX_Merge(src proto.Message) { + xxx_messageInfo_Intent_Message.Merge(dst, src) +} +func (m *Intent_Message) XXX_Size() int { + return xxx_messageInfo_Intent_Message.Size(m) +} +func (m *Intent_Message) XXX_DiscardUnknown() { + xxx_messageInfo_Intent_Message.DiscardUnknown(m) +} + +var xxx_messageInfo_Intent_Message proto.InternalMessageInfo + +type isIntent_Message_Message interface { + isIntent_Message_Message() +} + +type Intent_Message_Text_ struct { + Text *Intent_Message_Text `protobuf:"bytes,1,opt,name=text,proto3,oneof"` +} + +type Intent_Message_Image_ struct { + Image *Intent_Message_Image `protobuf:"bytes,2,opt,name=image,proto3,oneof"` +} + +type Intent_Message_QuickReplies_ struct { + QuickReplies *Intent_Message_QuickReplies `protobuf:"bytes,3,opt,name=quick_replies,json=quickReplies,proto3,oneof"` +} + +type Intent_Message_Card_ struct { + Card *Intent_Message_Card `protobuf:"bytes,4,opt,name=card,proto3,oneof"` +} + +type Intent_Message_Payload struct { + Payload *_struct.Struct `protobuf:"bytes,5,opt,name=payload,proto3,oneof"` +} + +type Intent_Message_SimpleResponses_ struct { + SimpleResponses *Intent_Message_SimpleResponses `protobuf:"bytes,7,opt,name=simple_responses,json=simpleResponses,proto3,oneof"` +} + +type Intent_Message_BasicCard_ struct { + BasicCard *Intent_Message_BasicCard `protobuf:"bytes,8,opt,name=basic_card,json=basicCard,proto3,oneof"` +} + +type Intent_Message_Suggestions_ struct { + Suggestions *Intent_Message_Suggestions `protobuf:"bytes,9,opt,name=suggestions,proto3,oneof"` +} + +type Intent_Message_LinkOutSuggestion_ struct { + LinkOutSuggestion *Intent_Message_LinkOutSuggestion `protobuf:"bytes,10,opt,name=link_out_suggestion,json=linkOutSuggestion,proto3,oneof"` +} + +type Intent_Message_ListSelect_ struct { + ListSelect *Intent_Message_ListSelect `protobuf:"bytes,11,opt,name=list_select,json=listSelect,proto3,oneof"` +} + +type Intent_Message_CarouselSelect_ struct { + CarouselSelect *Intent_Message_CarouselSelect `protobuf:"bytes,12,opt,name=carousel_select,json=carouselSelect,proto3,oneof"` +} + +func (*Intent_Message_Text_) isIntent_Message_Message() {} + +func (*Intent_Message_Image_) isIntent_Message_Message() {} + +func (*Intent_Message_QuickReplies_) isIntent_Message_Message() {} + +func (*Intent_Message_Card_) isIntent_Message_Message() {} + +func (*Intent_Message_Payload) isIntent_Message_Message() {} + +func (*Intent_Message_SimpleResponses_) isIntent_Message_Message() {} + +func (*Intent_Message_BasicCard_) isIntent_Message_Message() {} + +func (*Intent_Message_Suggestions_) isIntent_Message_Message() {} + +func (*Intent_Message_LinkOutSuggestion_) isIntent_Message_Message() {} + +func (*Intent_Message_ListSelect_) isIntent_Message_Message() {} + +func (*Intent_Message_CarouselSelect_) isIntent_Message_Message() {} + +func (m *Intent_Message) GetMessage() isIntent_Message_Message { + if m != nil { + return m.Message + } + return nil +} + +func (m *Intent_Message) GetText() *Intent_Message_Text { + if x, ok := m.GetMessage().(*Intent_Message_Text_); ok { + return x.Text + } + return nil +} + +func (m *Intent_Message) GetImage() *Intent_Message_Image { + if x, ok := m.GetMessage().(*Intent_Message_Image_); ok { + return x.Image + } + return nil +} + +func (m *Intent_Message) GetQuickReplies() *Intent_Message_QuickReplies { + if x, ok := m.GetMessage().(*Intent_Message_QuickReplies_); ok { + return x.QuickReplies + } + return nil +} + +func (m *Intent_Message) GetCard() *Intent_Message_Card { + if x, ok := m.GetMessage().(*Intent_Message_Card_); ok { + return x.Card + } + return nil +} + +func (m *Intent_Message) GetPayload() *_struct.Struct { + if x, ok := m.GetMessage().(*Intent_Message_Payload); ok { + return x.Payload + } + return nil +} + +func (m *Intent_Message) GetSimpleResponses() *Intent_Message_SimpleResponses { + if x, ok := m.GetMessage().(*Intent_Message_SimpleResponses_); ok { + return x.SimpleResponses + } + return nil +} + +func (m *Intent_Message) GetBasicCard() *Intent_Message_BasicCard { + if x, ok := m.GetMessage().(*Intent_Message_BasicCard_); ok { + return x.BasicCard + } + return nil +} + +func (m *Intent_Message) GetSuggestions() *Intent_Message_Suggestions { + if x, ok := m.GetMessage().(*Intent_Message_Suggestions_); ok { + return x.Suggestions + } + return nil +} + +func (m *Intent_Message) GetLinkOutSuggestion() *Intent_Message_LinkOutSuggestion { + if x, ok := m.GetMessage().(*Intent_Message_LinkOutSuggestion_); ok { + return x.LinkOutSuggestion + } + return nil +} + +func (m *Intent_Message) GetListSelect() *Intent_Message_ListSelect { + if x, ok := m.GetMessage().(*Intent_Message_ListSelect_); ok { + return x.ListSelect + } + return nil +} + +func (m *Intent_Message) GetCarouselSelect() *Intent_Message_CarouselSelect { + if x, ok := m.GetMessage().(*Intent_Message_CarouselSelect_); ok { + return x.CarouselSelect + } + return nil +} + +func (m *Intent_Message) GetPlatform() Intent_Message_Platform { + if m != nil { + return m.Platform + } + return Intent_Message_PLATFORM_UNSPECIFIED +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*Intent_Message) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _Intent_Message_OneofMarshaler, _Intent_Message_OneofUnmarshaler, _Intent_Message_OneofSizer, []interface{}{ + (*Intent_Message_Text_)(nil), + (*Intent_Message_Image_)(nil), + (*Intent_Message_QuickReplies_)(nil), + (*Intent_Message_Card_)(nil), + (*Intent_Message_Payload)(nil), + (*Intent_Message_SimpleResponses_)(nil), + (*Intent_Message_BasicCard_)(nil), + (*Intent_Message_Suggestions_)(nil), + (*Intent_Message_LinkOutSuggestion_)(nil), + (*Intent_Message_ListSelect_)(nil), + (*Intent_Message_CarouselSelect_)(nil), + } +} + +func _Intent_Message_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*Intent_Message) + // message + switch x := m.Message.(type) { + case *Intent_Message_Text_: + b.EncodeVarint(1<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.Text); err != nil { + return err + } + case *Intent_Message_Image_: + b.EncodeVarint(2<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.Image); err != nil { + return err + } + case *Intent_Message_QuickReplies_: + b.EncodeVarint(3<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.QuickReplies); err != nil { + return err + } + case *Intent_Message_Card_: + b.EncodeVarint(4<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.Card); err != nil { + return err + } + case *Intent_Message_Payload: + b.EncodeVarint(5<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.Payload); err != nil { + return err + } + case *Intent_Message_SimpleResponses_: + b.EncodeVarint(7<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.SimpleResponses); err != nil { + return err + } + case *Intent_Message_BasicCard_: + b.EncodeVarint(8<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.BasicCard); err != nil { + return err + } + case *Intent_Message_Suggestions_: + b.EncodeVarint(9<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.Suggestions); err != nil { + return err + } + case *Intent_Message_LinkOutSuggestion_: + b.EncodeVarint(10<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.LinkOutSuggestion); err != nil { + return err + } + case *Intent_Message_ListSelect_: + b.EncodeVarint(11<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.ListSelect); err != nil { + return err + } + case *Intent_Message_CarouselSelect_: + b.EncodeVarint(12<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.CarouselSelect); err != nil { + return err + } + case nil: + default: + return fmt.Errorf("Intent_Message.Message has unexpected type %T", x) + } + return nil +} + +func _Intent_Message_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*Intent_Message) + switch tag { + case 1: // message.text + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(Intent_Message_Text) + err := b.DecodeMessage(msg) + m.Message = &Intent_Message_Text_{msg} + return true, err + case 2: // message.image + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(Intent_Message_Image) + err := b.DecodeMessage(msg) + m.Message = &Intent_Message_Image_{msg} + return true, err + case 3: // message.quick_replies + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(Intent_Message_QuickReplies) + err := b.DecodeMessage(msg) + m.Message = &Intent_Message_QuickReplies_{msg} + return true, err + case 4: // message.card + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(Intent_Message_Card) + err := b.DecodeMessage(msg) + m.Message = &Intent_Message_Card_{msg} + return true, err + case 5: // message.payload + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(_struct.Struct) + err := b.DecodeMessage(msg) + m.Message = &Intent_Message_Payload{msg} + return true, err + case 7: // message.simple_responses + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(Intent_Message_SimpleResponses) + err := b.DecodeMessage(msg) + m.Message = &Intent_Message_SimpleResponses_{msg} + return true, err + case 8: // message.basic_card + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(Intent_Message_BasicCard) + err := b.DecodeMessage(msg) + m.Message = &Intent_Message_BasicCard_{msg} + return true, err + case 9: // message.suggestions + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(Intent_Message_Suggestions) + err := b.DecodeMessage(msg) + m.Message = &Intent_Message_Suggestions_{msg} + return true, err + case 10: // message.link_out_suggestion + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(Intent_Message_LinkOutSuggestion) + err := b.DecodeMessage(msg) + m.Message = &Intent_Message_LinkOutSuggestion_{msg} + return true, err + case 11: // message.list_select + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(Intent_Message_ListSelect) + err := b.DecodeMessage(msg) + m.Message = &Intent_Message_ListSelect_{msg} + return true, err + case 12: // message.carousel_select + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(Intent_Message_CarouselSelect) + err := b.DecodeMessage(msg) + m.Message = &Intent_Message_CarouselSelect_{msg} + return true, err + default: + return false, nil + } +} + +func _Intent_Message_OneofSizer(msg proto.Message) (n int) { + m := msg.(*Intent_Message) + // message + switch x := m.Message.(type) { + case *Intent_Message_Text_: + s := proto.Size(x.Text) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *Intent_Message_Image_: + s := proto.Size(x.Image) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *Intent_Message_QuickReplies_: + s := proto.Size(x.QuickReplies) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *Intent_Message_Card_: + s := proto.Size(x.Card) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *Intent_Message_Payload: + s := proto.Size(x.Payload) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *Intent_Message_SimpleResponses_: + s := proto.Size(x.SimpleResponses) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *Intent_Message_BasicCard_: + s := proto.Size(x.BasicCard) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *Intent_Message_Suggestions_: + s := proto.Size(x.Suggestions) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *Intent_Message_LinkOutSuggestion_: + s := proto.Size(x.LinkOutSuggestion) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *Intent_Message_ListSelect_: + s := proto.Size(x.ListSelect) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *Intent_Message_CarouselSelect_: + s := proto.Size(x.CarouselSelect) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +// The text response message. +type Intent_Message_Text struct { + // Optional. The collection of the agent's responses. + Text []string `protobuf:"bytes,1,rep,name=text,proto3" json:"text,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Intent_Message_Text) Reset() { *m = Intent_Message_Text{} } +func (m *Intent_Message_Text) String() string { return proto.CompactTextString(m) } +func (*Intent_Message_Text) ProtoMessage() {} +func (*Intent_Message_Text) Descriptor() ([]byte, []int) { + return fileDescriptor_intent_79fab48477a8b612, []int{0, 2, 0} +} +func (m *Intent_Message_Text) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Intent_Message_Text.Unmarshal(m, b) +} +func (m *Intent_Message_Text) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Intent_Message_Text.Marshal(b, m, deterministic) +} +func (dst *Intent_Message_Text) XXX_Merge(src proto.Message) { + xxx_messageInfo_Intent_Message_Text.Merge(dst, src) +} +func (m *Intent_Message_Text) XXX_Size() int { + return xxx_messageInfo_Intent_Message_Text.Size(m) +} +func (m *Intent_Message_Text) XXX_DiscardUnknown() { + xxx_messageInfo_Intent_Message_Text.DiscardUnknown(m) +} + +var xxx_messageInfo_Intent_Message_Text proto.InternalMessageInfo + +func (m *Intent_Message_Text) GetText() []string { + if m != nil { + return m.Text + } + return nil +} + +// The image response message. +type Intent_Message_Image struct { + // Optional. The public URI to an image file. + ImageUri string `protobuf:"bytes,1,opt,name=image_uri,json=imageUri,proto3" json:"image_uri,omitempty"` + // Optional. A text description of the image to be used for accessibility, + // e.g., screen readers. + AccessibilityText string `protobuf:"bytes,2,opt,name=accessibility_text,json=accessibilityText,proto3" json:"accessibility_text,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Intent_Message_Image) Reset() { *m = Intent_Message_Image{} } +func (m *Intent_Message_Image) String() string { return proto.CompactTextString(m) } +func (*Intent_Message_Image) ProtoMessage() {} +func (*Intent_Message_Image) Descriptor() ([]byte, []int) { + return fileDescriptor_intent_79fab48477a8b612, []int{0, 2, 1} +} +func (m *Intent_Message_Image) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Intent_Message_Image.Unmarshal(m, b) +} +func (m *Intent_Message_Image) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Intent_Message_Image.Marshal(b, m, deterministic) +} +func (dst *Intent_Message_Image) XXX_Merge(src proto.Message) { + xxx_messageInfo_Intent_Message_Image.Merge(dst, src) +} +func (m *Intent_Message_Image) XXX_Size() int { + return xxx_messageInfo_Intent_Message_Image.Size(m) +} +func (m *Intent_Message_Image) XXX_DiscardUnknown() { + xxx_messageInfo_Intent_Message_Image.DiscardUnknown(m) +} + +var xxx_messageInfo_Intent_Message_Image proto.InternalMessageInfo + +func (m *Intent_Message_Image) GetImageUri() string { + if m != nil { + return m.ImageUri + } + return "" +} + +func (m *Intent_Message_Image) GetAccessibilityText() string { + if m != nil { + return m.AccessibilityText + } + return "" +} + +// The quick replies response message. +type Intent_Message_QuickReplies struct { + // Optional. The title of the collection of quick replies. + Title string `protobuf:"bytes,1,opt,name=title,proto3" json:"title,omitempty"` + // Optional. The collection of quick replies. + QuickReplies []string `protobuf:"bytes,2,rep,name=quick_replies,json=quickReplies,proto3" json:"quick_replies,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Intent_Message_QuickReplies) Reset() { *m = Intent_Message_QuickReplies{} } +func (m *Intent_Message_QuickReplies) String() string { return proto.CompactTextString(m) } +func (*Intent_Message_QuickReplies) ProtoMessage() {} +func (*Intent_Message_QuickReplies) Descriptor() ([]byte, []int) { + return fileDescriptor_intent_79fab48477a8b612, []int{0, 2, 2} +} +func (m *Intent_Message_QuickReplies) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Intent_Message_QuickReplies.Unmarshal(m, b) +} +func (m *Intent_Message_QuickReplies) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Intent_Message_QuickReplies.Marshal(b, m, deterministic) +} +func (dst *Intent_Message_QuickReplies) XXX_Merge(src proto.Message) { + xxx_messageInfo_Intent_Message_QuickReplies.Merge(dst, src) +} +func (m *Intent_Message_QuickReplies) XXX_Size() int { + return xxx_messageInfo_Intent_Message_QuickReplies.Size(m) +} +func (m *Intent_Message_QuickReplies) XXX_DiscardUnknown() { + xxx_messageInfo_Intent_Message_QuickReplies.DiscardUnknown(m) +} + +var xxx_messageInfo_Intent_Message_QuickReplies proto.InternalMessageInfo + +func (m *Intent_Message_QuickReplies) GetTitle() string { + if m != nil { + return m.Title + } + return "" +} + +func (m *Intent_Message_QuickReplies) GetQuickReplies() []string { + if m != nil { + return m.QuickReplies + } + return nil +} + +// The card response message. +type Intent_Message_Card struct { + // Optional. The title of the card. + Title string `protobuf:"bytes,1,opt,name=title,proto3" json:"title,omitempty"` + // Optional. The subtitle of the card. + Subtitle string `protobuf:"bytes,2,opt,name=subtitle,proto3" json:"subtitle,omitempty"` + // Optional. The public URI to an image file for the card. + ImageUri string `protobuf:"bytes,3,opt,name=image_uri,json=imageUri,proto3" json:"image_uri,omitempty"` + // Optional. The collection of card buttons. + Buttons []*Intent_Message_Card_Button `protobuf:"bytes,4,rep,name=buttons,proto3" json:"buttons,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Intent_Message_Card) Reset() { *m = Intent_Message_Card{} } +func (m *Intent_Message_Card) String() string { return proto.CompactTextString(m) } +func (*Intent_Message_Card) ProtoMessage() {} +func (*Intent_Message_Card) Descriptor() ([]byte, []int) { + return fileDescriptor_intent_79fab48477a8b612, []int{0, 2, 3} +} +func (m *Intent_Message_Card) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Intent_Message_Card.Unmarshal(m, b) +} +func (m *Intent_Message_Card) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Intent_Message_Card.Marshal(b, m, deterministic) +} +func (dst *Intent_Message_Card) XXX_Merge(src proto.Message) { + xxx_messageInfo_Intent_Message_Card.Merge(dst, src) +} +func (m *Intent_Message_Card) XXX_Size() int { + return xxx_messageInfo_Intent_Message_Card.Size(m) +} +func (m *Intent_Message_Card) XXX_DiscardUnknown() { + xxx_messageInfo_Intent_Message_Card.DiscardUnknown(m) +} + +var xxx_messageInfo_Intent_Message_Card proto.InternalMessageInfo + +func (m *Intent_Message_Card) GetTitle() string { + if m != nil { + return m.Title + } + return "" +} + +func (m *Intent_Message_Card) GetSubtitle() string { + if m != nil { + return m.Subtitle + } + return "" +} + +func (m *Intent_Message_Card) GetImageUri() string { + if m != nil { + return m.ImageUri + } + return "" +} + +func (m *Intent_Message_Card) GetButtons() []*Intent_Message_Card_Button { + if m != nil { + return m.Buttons + } + return nil +} + +// Optional. Contains information about a button. +type Intent_Message_Card_Button struct { + // Optional. The text to show on the button. + Text string `protobuf:"bytes,1,opt,name=text,proto3" json:"text,omitempty"` + // Optional. The text to send back to the Dialogflow API or a URI to + // open. + Postback string `protobuf:"bytes,2,opt,name=postback,proto3" json:"postback,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Intent_Message_Card_Button) Reset() { *m = Intent_Message_Card_Button{} } +func (m *Intent_Message_Card_Button) String() string { return proto.CompactTextString(m) } +func (*Intent_Message_Card_Button) ProtoMessage() {} +func (*Intent_Message_Card_Button) Descriptor() ([]byte, []int) { + return fileDescriptor_intent_79fab48477a8b612, []int{0, 2, 3, 0} +} +func (m *Intent_Message_Card_Button) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Intent_Message_Card_Button.Unmarshal(m, b) +} +func (m *Intent_Message_Card_Button) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Intent_Message_Card_Button.Marshal(b, m, deterministic) +} +func (dst *Intent_Message_Card_Button) XXX_Merge(src proto.Message) { + xxx_messageInfo_Intent_Message_Card_Button.Merge(dst, src) +} +func (m *Intent_Message_Card_Button) XXX_Size() int { + return xxx_messageInfo_Intent_Message_Card_Button.Size(m) +} +func (m *Intent_Message_Card_Button) XXX_DiscardUnknown() { + xxx_messageInfo_Intent_Message_Card_Button.DiscardUnknown(m) +} + +var xxx_messageInfo_Intent_Message_Card_Button proto.InternalMessageInfo + +func (m *Intent_Message_Card_Button) GetText() string { + if m != nil { + return m.Text + } + return "" +} + +func (m *Intent_Message_Card_Button) GetPostback() string { + if m != nil { + return m.Postback + } + return "" +} + +// The simple response message containing speech or text. +type Intent_Message_SimpleResponse struct { + // One of text_to_speech or ssml must be provided. The plain text of the + // speech output. Mutually exclusive with ssml. + TextToSpeech string `protobuf:"bytes,1,opt,name=text_to_speech,json=textToSpeech,proto3" json:"text_to_speech,omitempty"` + // One of text_to_speech or ssml must be provided. Structured spoken + // response to the user in the SSML format. Mutually exclusive with + // text_to_speech. + Ssml string `protobuf:"bytes,2,opt,name=ssml,proto3" json:"ssml,omitempty"` + // Optional. The text to display. + DisplayText string `protobuf:"bytes,3,opt,name=display_text,json=displayText,proto3" json:"display_text,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Intent_Message_SimpleResponse) Reset() { *m = Intent_Message_SimpleResponse{} } +func (m *Intent_Message_SimpleResponse) String() string { return proto.CompactTextString(m) } +func (*Intent_Message_SimpleResponse) ProtoMessage() {} +func (*Intent_Message_SimpleResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_intent_79fab48477a8b612, []int{0, 2, 4} +} +func (m *Intent_Message_SimpleResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Intent_Message_SimpleResponse.Unmarshal(m, b) +} +func (m *Intent_Message_SimpleResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Intent_Message_SimpleResponse.Marshal(b, m, deterministic) +} +func (dst *Intent_Message_SimpleResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_Intent_Message_SimpleResponse.Merge(dst, src) +} +func (m *Intent_Message_SimpleResponse) XXX_Size() int { + return xxx_messageInfo_Intent_Message_SimpleResponse.Size(m) +} +func (m *Intent_Message_SimpleResponse) XXX_DiscardUnknown() { + xxx_messageInfo_Intent_Message_SimpleResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_Intent_Message_SimpleResponse proto.InternalMessageInfo + +func (m *Intent_Message_SimpleResponse) GetTextToSpeech() string { + if m != nil { + return m.TextToSpeech + } + return "" +} + +func (m *Intent_Message_SimpleResponse) GetSsml() string { + if m != nil { + return m.Ssml + } + return "" +} + +func (m *Intent_Message_SimpleResponse) GetDisplayText() string { + if m != nil { + return m.DisplayText + } + return "" +} + +// The collection of simple response candidates. +// This message in `QueryResult.fulfillment_messages` and +// `WebhookResponse.fulfillment_messages` should contain only one +// `SimpleResponse`. +type Intent_Message_SimpleResponses struct { + // Required. The list of simple responses. + SimpleResponses []*Intent_Message_SimpleResponse `protobuf:"bytes,1,rep,name=simple_responses,json=simpleResponses,proto3" json:"simple_responses,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Intent_Message_SimpleResponses) Reset() { *m = Intent_Message_SimpleResponses{} } +func (m *Intent_Message_SimpleResponses) String() string { return proto.CompactTextString(m) } +func (*Intent_Message_SimpleResponses) ProtoMessage() {} +func (*Intent_Message_SimpleResponses) Descriptor() ([]byte, []int) { + return fileDescriptor_intent_79fab48477a8b612, []int{0, 2, 5} +} +func (m *Intent_Message_SimpleResponses) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Intent_Message_SimpleResponses.Unmarshal(m, b) +} +func (m *Intent_Message_SimpleResponses) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Intent_Message_SimpleResponses.Marshal(b, m, deterministic) +} +func (dst *Intent_Message_SimpleResponses) XXX_Merge(src proto.Message) { + xxx_messageInfo_Intent_Message_SimpleResponses.Merge(dst, src) +} +func (m *Intent_Message_SimpleResponses) XXX_Size() int { + return xxx_messageInfo_Intent_Message_SimpleResponses.Size(m) +} +func (m *Intent_Message_SimpleResponses) XXX_DiscardUnknown() { + xxx_messageInfo_Intent_Message_SimpleResponses.DiscardUnknown(m) +} + +var xxx_messageInfo_Intent_Message_SimpleResponses proto.InternalMessageInfo + +func (m *Intent_Message_SimpleResponses) GetSimpleResponses() []*Intent_Message_SimpleResponse { + if m != nil { + return m.SimpleResponses + } + return nil +} + +// The basic card message. Useful for displaying information. +type Intent_Message_BasicCard struct { + // Optional. The title of the card. + Title string `protobuf:"bytes,1,opt,name=title,proto3" json:"title,omitempty"` + // Optional. The subtitle of the card. + Subtitle string `protobuf:"bytes,2,opt,name=subtitle,proto3" json:"subtitle,omitempty"` + // Required, unless image is present. The body text of the card. + FormattedText string `protobuf:"bytes,3,opt,name=formatted_text,json=formattedText,proto3" json:"formatted_text,omitempty"` + // Optional. The image for the card. + Image *Intent_Message_Image `protobuf:"bytes,4,opt,name=image,proto3" json:"image,omitempty"` + // Optional. The collection of card buttons. + Buttons []*Intent_Message_BasicCard_Button `protobuf:"bytes,5,rep,name=buttons,proto3" json:"buttons,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Intent_Message_BasicCard) Reset() { *m = Intent_Message_BasicCard{} } +func (m *Intent_Message_BasicCard) String() string { return proto.CompactTextString(m) } +func (*Intent_Message_BasicCard) ProtoMessage() {} +func (*Intent_Message_BasicCard) Descriptor() ([]byte, []int) { + return fileDescriptor_intent_79fab48477a8b612, []int{0, 2, 6} +} +func (m *Intent_Message_BasicCard) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Intent_Message_BasicCard.Unmarshal(m, b) +} +func (m *Intent_Message_BasicCard) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Intent_Message_BasicCard.Marshal(b, m, deterministic) +} +func (dst *Intent_Message_BasicCard) XXX_Merge(src proto.Message) { + xxx_messageInfo_Intent_Message_BasicCard.Merge(dst, src) +} +func (m *Intent_Message_BasicCard) XXX_Size() int { + return xxx_messageInfo_Intent_Message_BasicCard.Size(m) +} +func (m *Intent_Message_BasicCard) XXX_DiscardUnknown() { + xxx_messageInfo_Intent_Message_BasicCard.DiscardUnknown(m) +} + +var xxx_messageInfo_Intent_Message_BasicCard proto.InternalMessageInfo + +func (m *Intent_Message_BasicCard) GetTitle() string { + if m != nil { + return m.Title + } + return "" +} + +func (m *Intent_Message_BasicCard) GetSubtitle() string { + if m != nil { + return m.Subtitle + } + return "" +} + +func (m *Intent_Message_BasicCard) GetFormattedText() string { + if m != nil { + return m.FormattedText + } + return "" +} + +func (m *Intent_Message_BasicCard) GetImage() *Intent_Message_Image { + if m != nil { + return m.Image + } + return nil +} + +func (m *Intent_Message_BasicCard) GetButtons() []*Intent_Message_BasicCard_Button { + if m != nil { + return m.Buttons + } + return nil +} + +// The button object that appears at the bottom of a card. +type Intent_Message_BasicCard_Button struct { + // Required. The title of the button. + Title string `protobuf:"bytes,1,opt,name=title,proto3" json:"title,omitempty"` + // Required. Action to take when a user taps on the button. + OpenUriAction *Intent_Message_BasicCard_Button_OpenUriAction `protobuf:"bytes,2,opt,name=open_uri_action,json=openUriAction,proto3" json:"open_uri_action,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Intent_Message_BasicCard_Button) Reset() { *m = Intent_Message_BasicCard_Button{} } +func (m *Intent_Message_BasicCard_Button) String() string { return proto.CompactTextString(m) } +func (*Intent_Message_BasicCard_Button) ProtoMessage() {} +func (*Intent_Message_BasicCard_Button) Descriptor() ([]byte, []int) { + return fileDescriptor_intent_79fab48477a8b612, []int{0, 2, 6, 0} +} +func (m *Intent_Message_BasicCard_Button) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Intent_Message_BasicCard_Button.Unmarshal(m, b) +} +func (m *Intent_Message_BasicCard_Button) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Intent_Message_BasicCard_Button.Marshal(b, m, deterministic) +} +func (dst *Intent_Message_BasicCard_Button) XXX_Merge(src proto.Message) { + xxx_messageInfo_Intent_Message_BasicCard_Button.Merge(dst, src) +} +func (m *Intent_Message_BasicCard_Button) XXX_Size() int { + return xxx_messageInfo_Intent_Message_BasicCard_Button.Size(m) +} +func (m *Intent_Message_BasicCard_Button) XXX_DiscardUnknown() { + xxx_messageInfo_Intent_Message_BasicCard_Button.DiscardUnknown(m) +} + +var xxx_messageInfo_Intent_Message_BasicCard_Button proto.InternalMessageInfo + +func (m *Intent_Message_BasicCard_Button) GetTitle() string { + if m != nil { + return m.Title + } + return "" +} + +func (m *Intent_Message_BasicCard_Button) GetOpenUriAction() *Intent_Message_BasicCard_Button_OpenUriAction { + if m != nil { + return m.OpenUriAction + } + return nil +} + +// Opens the given URI. +type Intent_Message_BasicCard_Button_OpenUriAction struct { + // Required. The HTTP or HTTPS scheme URI. + Uri string `protobuf:"bytes,1,opt,name=uri,proto3" json:"uri,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Intent_Message_BasicCard_Button_OpenUriAction) Reset() { + *m = Intent_Message_BasicCard_Button_OpenUriAction{} +} +func (m *Intent_Message_BasicCard_Button_OpenUriAction) String() string { + return proto.CompactTextString(m) +} +func (*Intent_Message_BasicCard_Button_OpenUriAction) ProtoMessage() {} +func (*Intent_Message_BasicCard_Button_OpenUriAction) Descriptor() ([]byte, []int) { + return fileDescriptor_intent_79fab48477a8b612, []int{0, 2, 6, 0, 0} +} +func (m *Intent_Message_BasicCard_Button_OpenUriAction) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Intent_Message_BasicCard_Button_OpenUriAction.Unmarshal(m, b) +} +func (m *Intent_Message_BasicCard_Button_OpenUriAction) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Intent_Message_BasicCard_Button_OpenUriAction.Marshal(b, m, deterministic) +} +func (dst *Intent_Message_BasicCard_Button_OpenUriAction) XXX_Merge(src proto.Message) { + xxx_messageInfo_Intent_Message_BasicCard_Button_OpenUriAction.Merge(dst, src) +} +func (m *Intent_Message_BasicCard_Button_OpenUriAction) XXX_Size() int { + return xxx_messageInfo_Intent_Message_BasicCard_Button_OpenUriAction.Size(m) +} +func (m *Intent_Message_BasicCard_Button_OpenUriAction) XXX_DiscardUnknown() { + xxx_messageInfo_Intent_Message_BasicCard_Button_OpenUriAction.DiscardUnknown(m) +} + +var xxx_messageInfo_Intent_Message_BasicCard_Button_OpenUriAction proto.InternalMessageInfo + +func (m *Intent_Message_BasicCard_Button_OpenUriAction) GetUri() string { + if m != nil { + return m.Uri + } + return "" +} + +// The suggestion chip message that the user can tap to quickly post a reply +// to the conversation. +type Intent_Message_Suggestion struct { + // Required. The text shown the in the suggestion chip. + Title string `protobuf:"bytes,1,opt,name=title,proto3" json:"title,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Intent_Message_Suggestion) Reset() { *m = Intent_Message_Suggestion{} } +func (m *Intent_Message_Suggestion) String() string { return proto.CompactTextString(m) } +func (*Intent_Message_Suggestion) ProtoMessage() {} +func (*Intent_Message_Suggestion) Descriptor() ([]byte, []int) { + return fileDescriptor_intent_79fab48477a8b612, []int{0, 2, 7} +} +func (m *Intent_Message_Suggestion) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Intent_Message_Suggestion.Unmarshal(m, b) +} +func (m *Intent_Message_Suggestion) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Intent_Message_Suggestion.Marshal(b, m, deterministic) +} +func (dst *Intent_Message_Suggestion) XXX_Merge(src proto.Message) { + xxx_messageInfo_Intent_Message_Suggestion.Merge(dst, src) +} +func (m *Intent_Message_Suggestion) XXX_Size() int { + return xxx_messageInfo_Intent_Message_Suggestion.Size(m) +} +func (m *Intent_Message_Suggestion) XXX_DiscardUnknown() { + xxx_messageInfo_Intent_Message_Suggestion.DiscardUnknown(m) +} + +var xxx_messageInfo_Intent_Message_Suggestion proto.InternalMessageInfo + +func (m *Intent_Message_Suggestion) GetTitle() string { + if m != nil { + return m.Title + } + return "" +} + +// The collection of suggestions. +type Intent_Message_Suggestions struct { + // Required. The list of suggested replies. + Suggestions []*Intent_Message_Suggestion `protobuf:"bytes,1,rep,name=suggestions,proto3" json:"suggestions,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Intent_Message_Suggestions) Reset() { *m = Intent_Message_Suggestions{} } +func (m *Intent_Message_Suggestions) String() string { return proto.CompactTextString(m) } +func (*Intent_Message_Suggestions) ProtoMessage() {} +func (*Intent_Message_Suggestions) Descriptor() ([]byte, []int) { + return fileDescriptor_intent_79fab48477a8b612, []int{0, 2, 8} +} +func (m *Intent_Message_Suggestions) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Intent_Message_Suggestions.Unmarshal(m, b) +} +func (m *Intent_Message_Suggestions) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Intent_Message_Suggestions.Marshal(b, m, deterministic) +} +func (dst *Intent_Message_Suggestions) XXX_Merge(src proto.Message) { + xxx_messageInfo_Intent_Message_Suggestions.Merge(dst, src) +} +func (m *Intent_Message_Suggestions) XXX_Size() int { + return xxx_messageInfo_Intent_Message_Suggestions.Size(m) +} +func (m *Intent_Message_Suggestions) XXX_DiscardUnknown() { + xxx_messageInfo_Intent_Message_Suggestions.DiscardUnknown(m) +} + +var xxx_messageInfo_Intent_Message_Suggestions proto.InternalMessageInfo + +func (m *Intent_Message_Suggestions) GetSuggestions() []*Intent_Message_Suggestion { + if m != nil { + return m.Suggestions + } + return nil +} + +// The suggestion chip message that allows the user to jump out to the app +// or website associated with this agent. +type Intent_Message_LinkOutSuggestion struct { + // Required. The name of the app or site this chip is linking to. + DestinationName string `protobuf:"bytes,1,opt,name=destination_name,json=destinationName,proto3" json:"destination_name,omitempty"` + // Required. The URI of the app or site to open when the user taps the + // suggestion chip. + Uri string `protobuf:"bytes,2,opt,name=uri,proto3" json:"uri,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Intent_Message_LinkOutSuggestion) Reset() { *m = Intent_Message_LinkOutSuggestion{} } +func (m *Intent_Message_LinkOutSuggestion) String() string { return proto.CompactTextString(m) } +func (*Intent_Message_LinkOutSuggestion) ProtoMessage() {} +func (*Intent_Message_LinkOutSuggestion) Descriptor() ([]byte, []int) { + return fileDescriptor_intent_79fab48477a8b612, []int{0, 2, 9} +} +func (m *Intent_Message_LinkOutSuggestion) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Intent_Message_LinkOutSuggestion.Unmarshal(m, b) +} +func (m *Intent_Message_LinkOutSuggestion) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Intent_Message_LinkOutSuggestion.Marshal(b, m, deterministic) +} +func (dst *Intent_Message_LinkOutSuggestion) XXX_Merge(src proto.Message) { + xxx_messageInfo_Intent_Message_LinkOutSuggestion.Merge(dst, src) +} +func (m *Intent_Message_LinkOutSuggestion) XXX_Size() int { + return xxx_messageInfo_Intent_Message_LinkOutSuggestion.Size(m) +} +func (m *Intent_Message_LinkOutSuggestion) XXX_DiscardUnknown() { + xxx_messageInfo_Intent_Message_LinkOutSuggestion.DiscardUnknown(m) +} + +var xxx_messageInfo_Intent_Message_LinkOutSuggestion proto.InternalMessageInfo + +func (m *Intent_Message_LinkOutSuggestion) GetDestinationName() string { + if m != nil { + return m.DestinationName + } + return "" +} + +func (m *Intent_Message_LinkOutSuggestion) GetUri() string { + if m != nil { + return m.Uri + } + return "" +} + +// The card for presenting a list of options to select from. +type Intent_Message_ListSelect struct { + // Optional. The overall title of the list. + Title string `protobuf:"bytes,1,opt,name=title,proto3" json:"title,omitempty"` + // Required. List items. + Items []*Intent_Message_ListSelect_Item `protobuf:"bytes,2,rep,name=items,proto3" json:"items,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Intent_Message_ListSelect) Reset() { *m = Intent_Message_ListSelect{} } +func (m *Intent_Message_ListSelect) String() string { return proto.CompactTextString(m) } +func (*Intent_Message_ListSelect) ProtoMessage() {} +func (*Intent_Message_ListSelect) Descriptor() ([]byte, []int) { + return fileDescriptor_intent_79fab48477a8b612, []int{0, 2, 10} +} +func (m *Intent_Message_ListSelect) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Intent_Message_ListSelect.Unmarshal(m, b) +} +func (m *Intent_Message_ListSelect) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Intent_Message_ListSelect.Marshal(b, m, deterministic) +} +func (dst *Intent_Message_ListSelect) XXX_Merge(src proto.Message) { + xxx_messageInfo_Intent_Message_ListSelect.Merge(dst, src) +} +func (m *Intent_Message_ListSelect) XXX_Size() int { + return xxx_messageInfo_Intent_Message_ListSelect.Size(m) +} +func (m *Intent_Message_ListSelect) XXX_DiscardUnknown() { + xxx_messageInfo_Intent_Message_ListSelect.DiscardUnknown(m) +} + +var xxx_messageInfo_Intent_Message_ListSelect proto.InternalMessageInfo + +func (m *Intent_Message_ListSelect) GetTitle() string { + if m != nil { + return m.Title + } + return "" +} + +func (m *Intent_Message_ListSelect) GetItems() []*Intent_Message_ListSelect_Item { + if m != nil { + return m.Items + } + return nil +} + +// An item in the list. +type Intent_Message_ListSelect_Item struct { + // Required. Additional information about this option. + Info *Intent_Message_SelectItemInfo `protobuf:"bytes,1,opt,name=info,proto3" json:"info,omitempty"` + // Required. The title of the list item. + Title string `protobuf:"bytes,2,opt,name=title,proto3" json:"title,omitempty"` + // Optional. The main text describing the item. + Description string `protobuf:"bytes,3,opt,name=description,proto3" json:"description,omitempty"` + // Optional. The image to display. + Image *Intent_Message_Image `protobuf:"bytes,4,opt,name=image,proto3" json:"image,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Intent_Message_ListSelect_Item) Reset() { *m = Intent_Message_ListSelect_Item{} } +func (m *Intent_Message_ListSelect_Item) String() string { return proto.CompactTextString(m) } +func (*Intent_Message_ListSelect_Item) ProtoMessage() {} +func (*Intent_Message_ListSelect_Item) Descriptor() ([]byte, []int) { + return fileDescriptor_intent_79fab48477a8b612, []int{0, 2, 10, 0} +} +func (m *Intent_Message_ListSelect_Item) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Intent_Message_ListSelect_Item.Unmarshal(m, b) +} +func (m *Intent_Message_ListSelect_Item) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Intent_Message_ListSelect_Item.Marshal(b, m, deterministic) +} +func (dst *Intent_Message_ListSelect_Item) XXX_Merge(src proto.Message) { + xxx_messageInfo_Intent_Message_ListSelect_Item.Merge(dst, src) +} +func (m *Intent_Message_ListSelect_Item) XXX_Size() int { + return xxx_messageInfo_Intent_Message_ListSelect_Item.Size(m) +} +func (m *Intent_Message_ListSelect_Item) XXX_DiscardUnknown() { + xxx_messageInfo_Intent_Message_ListSelect_Item.DiscardUnknown(m) +} + +var xxx_messageInfo_Intent_Message_ListSelect_Item proto.InternalMessageInfo + +func (m *Intent_Message_ListSelect_Item) GetInfo() *Intent_Message_SelectItemInfo { + if m != nil { + return m.Info + } + return nil +} + +func (m *Intent_Message_ListSelect_Item) GetTitle() string { + if m != nil { + return m.Title + } + return "" +} + +func (m *Intent_Message_ListSelect_Item) GetDescription() string { + if m != nil { + return m.Description + } + return "" +} + +func (m *Intent_Message_ListSelect_Item) GetImage() *Intent_Message_Image { + if m != nil { + return m.Image + } + return nil +} + +// The card for presenting a carousel of options to select from. +type Intent_Message_CarouselSelect struct { + // Required. Carousel items. + Items []*Intent_Message_CarouselSelect_Item `protobuf:"bytes,1,rep,name=items,proto3" json:"items,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Intent_Message_CarouselSelect) Reset() { *m = Intent_Message_CarouselSelect{} } +func (m *Intent_Message_CarouselSelect) String() string { return proto.CompactTextString(m) } +func (*Intent_Message_CarouselSelect) ProtoMessage() {} +func (*Intent_Message_CarouselSelect) Descriptor() ([]byte, []int) { + return fileDescriptor_intent_79fab48477a8b612, []int{0, 2, 11} +} +func (m *Intent_Message_CarouselSelect) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Intent_Message_CarouselSelect.Unmarshal(m, b) +} +func (m *Intent_Message_CarouselSelect) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Intent_Message_CarouselSelect.Marshal(b, m, deterministic) +} +func (dst *Intent_Message_CarouselSelect) XXX_Merge(src proto.Message) { + xxx_messageInfo_Intent_Message_CarouselSelect.Merge(dst, src) +} +func (m *Intent_Message_CarouselSelect) XXX_Size() int { + return xxx_messageInfo_Intent_Message_CarouselSelect.Size(m) +} +func (m *Intent_Message_CarouselSelect) XXX_DiscardUnknown() { + xxx_messageInfo_Intent_Message_CarouselSelect.DiscardUnknown(m) +} + +var xxx_messageInfo_Intent_Message_CarouselSelect proto.InternalMessageInfo + +func (m *Intent_Message_CarouselSelect) GetItems() []*Intent_Message_CarouselSelect_Item { + if m != nil { + return m.Items + } + return nil +} + +// An item in the carousel. +type Intent_Message_CarouselSelect_Item struct { + // Required. Additional info about the option item. + Info *Intent_Message_SelectItemInfo `protobuf:"bytes,1,opt,name=info,proto3" json:"info,omitempty"` + // Required. Title of the carousel item. + Title string `protobuf:"bytes,2,opt,name=title,proto3" json:"title,omitempty"` + // Optional. The body text of the card. + Description string `protobuf:"bytes,3,opt,name=description,proto3" json:"description,omitempty"` + // Optional. The image to display. + Image *Intent_Message_Image `protobuf:"bytes,4,opt,name=image,proto3" json:"image,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Intent_Message_CarouselSelect_Item) Reset() { *m = Intent_Message_CarouselSelect_Item{} } +func (m *Intent_Message_CarouselSelect_Item) String() string { return proto.CompactTextString(m) } +func (*Intent_Message_CarouselSelect_Item) ProtoMessage() {} +func (*Intent_Message_CarouselSelect_Item) Descriptor() ([]byte, []int) { + return fileDescriptor_intent_79fab48477a8b612, []int{0, 2, 11, 0} +} +func (m *Intent_Message_CarouselSelect_Item) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Intent_Message_CarouselSelect_Item.Unmarshal(m, b) +} +func (m *Intent_Message_CarouselSelect_Item) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Intent_Message_CarouselSelect_Item.Marshal(b, m, deterministic) +} +func (dst *Intent_Message_CarouselSelect_Item) XXX_Merge(src proto.Message) { + xxx_messageInfo_Intent_Message_CarouselSelect_Item.Merge(dst, src) +} +func (m *Intent_Message_CarouselSelect_Item) XXX_Size() int { + return xxx_messageInfo_Intent_Message_CarouselSelect_Item.Size(m) +} +func (m *Intent_Message_CarouselSelect_Item) XXX_DiscardUnknown() { + xxx_messageInfo_Intent_Message_CarouselSelect_Item.DiscardUnknown(m) +} + +var xxx_messageInfo_Intent_Message_CarouselSelect_Item proto.InternalMessageInfo + +func (m *Intent_Message_CarouselSelect_Item) GetInfo() *Intent_Message_SelectItemInfo { + if m != nil { + return m.Info + } + return nil +} + +func (m *Intent_Message_CarouselSelect_Item) GetTitle() string { + if m != nil { + return m.Title + } + return "" +} + +func (m *Intent_Message_CarouselSelect_Item) GetDescription() string { + if m != nil { + return m.Description + } + return "" +} + +func (m *Intent_Message_CarouselSelect_Item) GetImage() *Intent_Message_Image { + if m != nil { + return m.Image + } + return nil +} + +// Additional info about the select item for when it is triggered in a +// dialog. +type Intent_Message_SelectItemInfo struct { + // Required. A unique key that will be sent back to the agent if this + // response is given. + Key string `protobuf:"bytes,1,opt,name=key,proto3" json:"key,omitempty"` + // Optional. A list of synonyms that can also be used to trigger this + // item in dialog. + Synonyms []string `protobuf:"bytes,2,rep,name=synonyms,proto3" json:"synonyms,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Intent_Message_SelectItemInfo) Reset() { *m = Intent_Message_SelectItemInfo{} } +func (m *Intent_Message_SelectItemInfo) String() string { return proto.CompactTextString(m) } +func (*Intent_Message_SelectItemInfo) ProtoMessage() {} +func (*Intent_Message_SelectItemInfo) Descriptor() ([]byte, []int) { + return fileDescriptor_intent_79fab48477a8b612, []int{0, 2, 12} +} +func (m *Intent_Message_SelectItemInfo) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Intent_Message_SelectItemInfo.Unmarshal(m, b) +} +func (m *Intent_Message_SelectItemInfo) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Intent_Message_SelectItemInfo.Marshal(b, m, deterministic) +} +func (dst *Intent_Message_SelectItemInfo) XXX_Merge(src proto.Message) { + xxx_messageInfo_Intent_Message_SelectItemInfo.Merge(dst, src) +} +func (m *Intent_Message_SelectItemInfo) XXX_Size() int { + return xxx_messageInfo_Intent_Message_SelectItemInfo.Size(m) +} +func (m *Intent_Message_SelectItemInfo) XXX_DiscardUnknown() { + xxx_messageInfo_Intent_Message_SelectItemInfo.DiscardUnknown(m) +} + +var xxx_messageInfo_Intent_Message_SelectItemInfo proto.InternalMessageInfo + +func (m *Intent_Message_SelectItemInfo) GetKey() string { + if m != nil { + return m.Key + } + return "" +} + +func (m *Intent_Message_SelectItemInfo) GetSynonyms() []string { + if m != nil { + return m.Synonyms + } + return nil +} + +// Represents a single followup intent in the chain. +type Intent_FollowupIntentInfo struct { + // The unique identifier of the followup intent. + // Format: `projects//agent/intents/`. + FollowupIntentName string `protobuf:"bytes,1,opt,name=followup_intent_name,json=followupIntentName,proto3" json:"followup_intent_name,omitempty"` + // The unique identifier of the followup intent's parent. + // Format: `projects//agent/intents/`. + ParentFollowupIntentName string `protobuf:"bytes,2,opt,name=parent_followup_intent_name,json=parentFollowupIntentName,proto3" json:"parent_followup_intent_name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Intent_FollowupIntentInfo) Reset() { *m = Intent_FollowupIntentInfo{} } +func (m *Intent_FollowupIntentInfo) String() string { return proto.CompactTextString(m) } +func (*Intent_FollowupIntentInfo) ProtoMessage() {} +func (*Intent_FollowupIntentInfo) Descriptor() ([]byte, []int) { + return fileDescriptor_intent_79fab48477a8b612, []int{0, 3} +} +func (m *Intent_FollowupIntentInfo) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Intent_FollowupIntentInfo.Unmarshal(m, b) +} +func (m *Intent_FollowupIntentInfo) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Intent_FollowupIntentInfo.Marshal(b, m, deterministic) +} +func (dst *Intent_FollowupIntentInfo) XXX_Merge(src proto.Message) { + xxx_messageInfo_Intent_FollowupIntentInfo.Merge(dst, src) +} +func (m *Intent_FollowupIntentInfo) XXX_Size() int { + return xxx_messageInfo_Intent_FollowupIntentInfo.Size(m) +} +func (m *Intent_FollowupIntentInfo) XXX_DiscardUnknown() { + xxx_messageInfo_Intent_FollowupIntentInfo.DiscardUnknown(m) +} + +var xxx_messageInfo_Intent_FollowupIntentInfo proto.InternalMessageInfo + +func (m *Intent_FollowupIntentInfo) GetFollowupIntentName() string { + if m != nil { + return m.FollowupIntentName + } + return "" +} + +func (m *Intent_FollowupIntentInfo) GetParentFollowupIntentName() string { + if m != nil { + return m.ParentFollowupIntentName + } + return "" +} + +// The request message for [Intents.ListIntents][google.cloud.dialogflow.v2.Intents.ListIntents]. +type ListIntentsRequest struct { + // Required. The agent to list all intents from. + // Format: `projects//agent`. + Parent string `protobuf:"bytes,1,opt,name=parent,proto3" json:"parent,omitempty"` + // Optional. The language to list training phrases, parameters and rich + // messages for. If not specified, the agent's default language is used. + // [Many + // languages](https://cloud.google.com/dialogflow-enterprise/docs/reference/language) + // are supported. Note: languages must be enabled in the agent before they can + // be used. + LanguageCode string `protobuf:"bytes,2,opt,name=language_code,json=languageCode,proto3" json:"language_code,omitempty"` + // Optional. The resource view to apply to the returned intent. + IntentView IntentView `protobuf:"varint,3,opt,name=intent_view,json=intentView,proto3,enum=google.cloud.dialogflow.v2.IntentView" json:"intent_view,omitempty"` + // Optional. The maximum number of items to return in a single page. By + // default 100 and at most 1000. + PageSize int32 `protobuf:"varint,4,opt,name=page_size,json=pageSize,proto3" json:"page_size,omitempty"` + // Optional. The next_page_token value returned from a previous list request. + PageToken string `protobuf:"bytes,5,opt,name=page_token,json=pageToken,proto3" json:"page_token,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ListIntentsRequest) Reset() { *m = ListIntentsRequest{} } +func (m *ListIntentsRequest) String() string { return proto.CompactTextString(m) } +func (*ListIntentsRequest) ProtoMessage() {} +func (*ListIntentsRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_intent_79fab48477a8b612, []int{1} +} +func (m *ListIntentsRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ListIntentsRequest.Unmarshal(m, b) +} +func (m *ListIntentsRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ListIntentsRequest.Marshal(b, m, deterministic) +} +func (dst *ListIntentsRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_ListIntentsRequest.Merge(dst, src) +} +func (m *ListIntentsRequest) XXX_Size() int { + return xxx_messageInfo_ListIntentsRequest.Size(m) +} +func (m *ListIntentsRequest) XXX_DiscardUnknown() { + xxx_messageInfo_ListIntentsRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_ListIntentsRequest proto.InternalMessageInfo + +func (m *ListIntentsRequest) GetParent() string { + if m != nil { + return m.Parent + } + return "" +} + +func (m *ListIntentsRequest) GetLanguageCode() string { + if m != nil { + return m.LanguageCode + } + return "" +} + +func (m *ListIntentsRequest) GetIntentView() IntentView { + if m != nil { + return m.IntentView + } + return IntentView_INTENT_VIEW_UNSPECIFIED +} + +func (m *ListIntentsRequest) GetPageSize() int32 { + if m != nil { + return m.PageSize + } + return 0 +} + +func (m *ListIntentsRequest) GetPageToken() string { + if m != nil { + return m.PageToken + } + return "" +} + +// The response message for [Intents.ListIntents][google.cloud.dialogflow.v2.Intents.ListIntents]. +type ListIntentsResponse struct { + // The list of agent intents. There will be a maximum number of items + // returned based on the page_size field in the request. + Intents []*Intent `protobuf:"bytes,1,rep,name=intents,proto3" json:"intents,omitempty"` + // Token to retrieve the next page of results, or empty if there are no + // more results in the list. + NextPageToken string `protobuf:"bytes,2,opt,name=next_page_token,json=nextPageToken,proto3" json:"next_page_token,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ListIntentsResponse) Reset() { *m = ListIntentsResponse{} } +func (m *ListIntentsResponse) String() string { return proto.CompactTextString(m) } +func (*ListIntentsResponse) ProtoMessage() {} +func (*ListIntentsResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_intent_79fab48477a8b612, []int{2} +} +func (m *ListIntentsResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ListIntentsResponse.Unmarshal(m, b) +} +func (m *ListIntentsResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ListIntentsResponse.Marshal(b, m, deterministic) +} +func (dst *ListIntentsResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_ListIntentsResponse.Merge(dst, src) +} +func (m *ListIntentsResponse) XXX_Size() int { + return xxx_messageInfo_ListIntentsResponse.Size(m) +} +func (m *ListIntentsResponse) XXX_DiscardUnknown() { + xxx_messageInfo_ListIntentsResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_ListIntentsResponse proto.InternalMessageInfo + +func (m *ListIntentsResponse) GetIntents() []*Intent { + if m != nil { + return m.Intents + } + return nil +} + +func (m *ListIntentsResponse) GetNextPageToken() string { + if m != nil { + return m.NextPageToken + } + return "" +} + +// The request message for [Intents.GetIntent][google.cloud.dialogflow.v2.Intents.GetIntent]. +type GetIntentRequest struct { + // Required. The name of the intent. + // Format: `projects//agent/intents/`. + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // Optional. The language to retrieve training phrases, parameters and rich + // messages for. If not specified, the agent's default language is used. + // [Many + // languages](https://cloud.google.com/dialogflow-enterprise/docs/reference/language) + // are supported. Note: languages must be enabled in the agent before they can + // be used. + LanguageCode string `protobuf:"bytes,2,opt,name=language_code,json=languageCode,proto3" json:"language_code,omitempty"` + // Optional. The resource view to apply to the returned intent. + IntentView IntentView `protobuf:"varint,3,opt,name=intent_view,json=intentView,proto3,enum=google.cloud.dialogflow.v2.IntentView" json:"intent_view,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GetIntentRequest) Reset() { *m = GetIntentRequest{} } +func (m *GetIntentRequest) String() string { return proto.CompactTextString(m) } +func (*GetIntentRequest) ProtoMessage() {} +func (*GetIntentRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_intent_79fab48477a8b612, []int{3} +} +func (m *GetIntentRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GetIntentRequest.Unmarshal(m, b) +} +func (m *GetIntentRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GetIntentRequest.Marshal(b, m, deterministic) +} +func (dst *GetIntentRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_GetIntentRequest.Merge(dst, src) +} +func (m *GetIntentRequest) XXX_Size() int { + return xxx_messageInfo_GetIntentRequest.Size(m) +} +func (m *GetIntentRequest) XXX_DiscardUnknown() { + xxx_messageInfo_GetIntentRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_GetIntentRequest proto.InternalMessageInfo + +func (m *GetIntentRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *GetIntentRequest) GetLanguageCode() string { + if m != nil { + return m.LanguageCode + } + return "" +} + +func (m *GetIntentRequest) GetIntentView() IntentView { + if m != nil { + return m.IntentView + } + return IntentView_INTENT_VIEW_UNSPECIFIED +} + +// The request message for [Intents.CreateIntent][google.cloud.dialogflow.v2.Intents.CreateIntent]. +type CreateIntentRequest struct { + // Required. The agent to create a intent for. + // Format: `projects//agent`. + Parent string `protobuf:"bytes,1,opt,name=parent,proto3" json:"parent,omitempty"` + // Required. The intent to create. + Intent *Intent `protobuf:"bytes,2,opt,name=intent,proto3" json:"intent,omitempty"` + // Optional. The language of training phrases, parameters and rich messages + // defined in `intent`. If not specified, the agent's default language is + // used. [Many + // languages](https://cloud.google.com/dialogflow-enterprise/docs/reference/language) + // are supported. Note: languages must be enabled in the agent before they can + // be used. + LanguageCode string `protobuf:"bytes,3,opt,name=language_code,json=languageCode,proto3" json:"language_code,omitempty"` + // Optional. The resource view to apply to the returned intent. + IntentView IntentView `protobuf:"varint,4,opt,name=intent_view,json=intentView,proto3,enum=google.cloud.dialogflow.v2.IntentView" json:"intent_view,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *CreateIntentRequest) Reset() { *m = CreateIntentRequest{} } +func (m *CreateIntentRequest) String() string { return proto.CompactTextString(m) } +func (*CreateIntentRequest) ProtoMessage() {} +func (*CreateIntentRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_intent_79fab48477a8b612, []int{4} +} +func (m *CreateIntentRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_CreateIntentRequest.Unmarshal(m, b) +} +func (m *CreateIntentRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_CreateIntentRequest.Marshal(b, m, deterministic) +} +func (dst *CreateIntentRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_CreateIntentRequest.Merge(dst, src) +} +func (m *CreateIntentRequest) XXX_Size() int { + return xxx_messageInfo_CreateIntentRequest.Size(m) +} +func (m *CreateIntentRequest) XXX_DiscardUnknown() { + xxx_messageInfo_CreateIntentRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_CreateIntentRequest proto.InternalMessageInfo + +func (m *CreateIntentRequest) GetParent() string { + if m != nil { + return m.Parent + } + return "" +} + +func (m *CreateIntentRequest) GetIntent() *Intent { + if m != nil { + return m.Intent + } + return nil +} + +func (m *CreateIntentRequest) GetLanguageCode() string { + if m != nil { + return m.LanguageCode + } + return "" +} + +func (m *CreateIntentRequest) GetIntentView() IntentView { + if m != nil { + return m.IntentView + } + return IntentView_INTENT_VIEW_UNSPECIFIED +} + +// The request message for [Intents.UpdateIntent][google.cloud.dialogflow.v2.Intents.UpdateIntent]. +type UpdateIntentRequest struct { + // Required. The intent to update. + Intent *Intent `protobuf:"bytes,1,opt,name=intent,proto3" json:"intent,omitempty"` + // Optional. The language of training phrases, parameters and rich messages + // defined in `intent`. If not specified, the agent's default language is + // used. [Many + // languages](https://cloud.google.com/dialogflow-enterprise/docs/reference/language) + // are supported. Note: languages must be enabled in the agent before they can + // be used. + LanguageCode string `protobuf:"bytes,2,opt,name=language_code,json=languageCode,proto3" json:"language_code,omitempty"` + // Optional. The mask to control which fields get updated. + UpdateMask *field_mask.FieldMask `protobuf:"bytes,3,opt,name=update_mask,json=updateMask,proto3" json:"update_mask,omitempty"` + // Optional. The resource view to apply to the returned intent. + IntentView IntentView `protobuf:"varint,4,opt,name=intent_view,json=intentView,proto3,enum=google.cloud.dialogflow.v2.IntentView" json:"intent_view,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *UpdateIntentRequest) Reset() { *m = UpdateIntentRequest{} } +func (m *UpdateIntentRequest) String() string { return proto.CompactTextString(m) } +func (*UpdateIntentRequest) ProtoMessage() {} +func (*UpdateIntentRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_intent_79fab48477a8b612, []int{5} +} +func (m *UpdateIntentRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_UpdateIntentRequest.Unmarshal(m, b) +} +func (m *UpdateIntentRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_UpdateIntentRequest.Marshal(b, m, deterministic) +} +func (dst *UpdateIntentRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_UpdateIntentRequest.Merge(dst, src) +} +func (m *UpdateIntentRequest) XXX_Size() int { + return xxx_messageInfo_UpdateIntentRequest.Size(m) +} +func (m *UpdateIntentRequest) XXX_DiscardUnknown() { + xxx_messageInfo_UpdateIntentRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_UpdateIntentRequest proto.InternalMessageInfo + +func (m *UpdateIntentRequest) GetIntent() *Intent { + if m != nil { + return m.Intent + } + return nil +} + +func (m *UpdateIntentRequest) GetLanguageCode() string { + if m != nil { + return m.LanguageCode + } + return "" +} + +func (m *UpdateIntentRequest) GetUpdateMask() *field_mask.FieldMask { + if m != nil { + return m.UpdateMask + } + return nil +} + +func (m *UpdateIntentRequest) GetIntentView() IntentView { + if m != nil { + return m.IntentView + } + return IntentView_INTENT_VIEW_UNSPECIFIED +} + +// The request message for [Intents.DeleteIntent][google.cloud.dialogflow.v2.Intents.DeleteIntent]. +type DeleteIntentRequest struct { + // Required. The name of the intent to delete. If this intent has direct or + // indirect followup intents, we also delete them. + // Format: `projects//agent/intents/`. + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *DeleteIntentRequest) Reset() { *m = DeleteIntentRequest{} } +func (m *DeleteIntentRequest) String() string { return proto.CompactTextString(m) } +func (*DeleteIntentRequest) ProtoMessage() {} +func (*DeleteIntentRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_intent_79fab48477a8b612, []int{6} +} +func (m *DeleteIntentRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_DeleteIntentRequest.Unmarshal(m, b) +} +func (m *DeleteIntentRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_DeleteIntentRequest.Marshal(b, m, deterministic) +} +func (dst *DeleteIntentRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_DeleteIntentRequest.Merge(dst, src) +} +func (m *DeleteIntentRequest) XXX_Size() int { + return xxx_messageInfo_DeleteIntentRequest.Size(m) +} +func (m *DeleteIntentRequest) XXX_DiscardUnknown() { + xxx_messageInfo_DeleteIntentRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_DeleteIntentRequest proto.InternalMessageInfo + +func (m *DeleteIntentRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +// The request message for [Intents.BatchUpdateIntents][google.cloud.dialogflow.v2.Intents.BatchUpdateIntents]. +type BatchUpdateIntentsRequest struct { + // Required. The name of the agent to update or create intents in. + // Format: `projects//agent`. + Parent string `protobuf:"bytes,1,opt,name=parent,proto3" json:"parent,omitempty"` + // Required. The source of the intent batch. + // + // Types that are valid to be assigned to IntentBatch: + // *BatchUpdateIntentsRequest_IntentBatchUri + // *BatchUpdateIntentsRequest_IntentBatchInline + IntentBatch isBatchUpdateIntentsRequest_IntentBatch `protobuf_oneof:"intent_batch"` + // Optional. The language of training phrases, parameters and rich messages + // defined in `intents`. If not specified, the agent's default language is + // used. [Many + // languages](https://cloud.google.com/dialogflow-enterprise/docs/reference/language) + // are supported. Note: languages must be enabled in the agent before they can + // be used. + LanguageCode string `protobuf:"bytes,4,opt,name=language_code,json=languageCode,proto3" json:"language_code,omitempty"` + // Optional. The mask to control which fields get updated. + UpdateMask *field_mask.FieldMask `protobuf:"bytes,5,opt,name=update_mask,json=updateMask,proto3" json:"update_mask,omitempty"` + // Optional. The resource view to apply to the returned intent. + IntentView IntentView `protobuf:"varint,6,opt,name=intent_view,json=intentView,proto3,enum=google.cloud.dialogflow.v2.IntentView" json:"intent_view,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *BatchUpdateIntentsRequest) Reset() { *m = BatchUpdateIntentsRequest{} } +func (m *BatchUpdateIntentsRequest) String() string { return proto.CompactTextString(m) } +func (*BatchUpdateIntentsRequest) ProtoMessage() {} +func (*BatchUpdateIntentsRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_intent_79fab48477a8b612, []int{7} +} +func (m *BatchUpdateIntentsRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_BatchUpdateIntentsRequest.Unmarshal(m, b) +} +func (m *BatchUpdateIntentsRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_BatchUpdateIntentsRequest.Marshal(b, m, deterministic) +} +func (dst *BatchUpdateIntentsRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_BatchUpdateIntentsRequest.Merge(dst, src) +} +func (m *BatchUpdateIntentsRequest) XXX_Size() int { + return xxx_messageInfo_BatchUpdateIntentsRequest.Size(m) +} +func (m *BatchUpdateIntentsRequest) XXX_DiscardUnknown() { + xxx_messageInfo_BatchUpdateIntentsRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_BatchUpdateIntentsRequest proto.InternalMessageInfo + +func (m *BatchUpdateIntentsRequest) GetParent() string { + if m != nil { + return m.Parent + } + return "" +} + +type isBatchUpdateIntentsRequest_IntentBatch interface { + isBatchUpdateIntentsRequest_IntentBatch() +} + +type BatchUpdateIntentsRequest_IntentBatchUri struct { + IntentBatchUri string `protobuf:"bytes,2,opt,name=intent_batch_uri,json=intentBatchUri,proto3,oneof"` +} + +type BatchUpdateIntentsRequest_IntentBatchInline struct { + IntentBatchInline *IntentBatch `protobuf:"bytes,3,opt,name=intent_batch_inline,json=intentBatchInline,proto3,oneof"` +} + +func (*BatchUpdateIntentsRequest_IntentBatchUri) isBatchUpdateIntentsRequest_IntentBatch() {} + +func (*BatchUpdateIntentsRequest_IntentBatchInline) isBatchUpdateIntentsRequest_IntentBatch() {} + +func (m *BatchUpdateIntentsRequest) GetIntentBatch() isBatchUpdateIntentsRequest_IntentBatch { + if m != nil { + return m.IntentBatch + } + return nil +} + +func (m *BatchUpdateIntentsRequest) GetIntentBatchUri() string { + if x, ok := m.GetIntentBatch().(*BatchUpdateIntentsRequest_IntentBatchUri); ok { + return x.IntentBatchUri + } + return "" +} + +func (m *BatchUpdateIntentsRequest) GetIntentBatchInline() *IntentBatch { + if x, ok := m.GetIntentBatch().(*BatchUpdateIntentsRequest_IntentBatchInline); ok { + return x.IntentBatchInline + } + return nil +} + +func (m *BatchUpdateIntentsRequest) GetLanguageCode() string { + if m != nil { + return m.LanguageCode + } + return "" +} + +func (m *BatchUpdateIntentsRequest) GetUpdateMask() *field_mask.FieldMask { + if m != nil { + return m.UpdateMask + } + return nil +} + +func (m *BatchUpdateIntentsRequest) GetIntentView() IntentView { + if m != nil { + return m.IntentView + } + return IntentView_INTENT_VIEW_UNSPECIFIED +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*BatchUpdateIntentsRequest) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _BatchUpdateIntentsRequest_OneofMarshaler, _BatchUpdateIntentsRequest_OneofUnmarshaler, _BatchUpdateIntentsRequest_OneofSizer, []interface{}{ + (*BatchUpdateIntentsRequest_IntentBatchUri)(nil), + (*BatchUpdateIntentsRequest_IntentBatchInline)(nil), + } +} + +func _BatchUpdateIntentsRequest_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*BatchUpdateIntentsRequest) + // intent_batch + switch x := m.IntentBatch.(type) { + case *BatchUpdateIntentsRequest_IntentBatchUri: + b.EncodeVarint(2<<3 | proto.WireBytes) + b.EncodeStringBytes(x.IntentBatchUri) + case *BatchUpdateIntentsRequest_IntentBatchInline: + b.EncodeVarint(3<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.IntentBatchInline); err != nil { + return err + } + case nil: + default: + return fmt.Errorf("BatchUpdateIntentsRequest.IntentBatch has unexpected type %T", x) + } + return nil +} + +func _BatchUpdateIntentsRequest_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*BatchUpdateIntentsRequest) + switch tag { + case 2: // intent_batch.intent_batch_uri + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeStringBytes() + m.IntentBatch = &BatchUpdateIntentsRequest_IntentBatchUri{x} + return true, err + case 3: // intent_batch.intent_batch_inline + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(IntentBatch) + err := b.DecodeMessage(msg) + m.IntentBatch = &BatchUpdateIntentsRequest_IntentBatchInline{msg} + return true, err + default: + return false, nil + } +} + +func _BatchUpdateIntentsRequest_OneofSizer(msg proto.Message) (n int) { + m := msg.(*BatchUpdateIntentsRequest) + // intent_batch + switch x := m.IntentBatch.(type) { + case *BatchUpdateIntentsRequest_IntentBatchUri: + n += 1 // tag and wire + n += proto.SizeVarint(uint64(len(x.IntentBatchUri))) + n += len(x.IntentBatchUri) + case *BatchUpdateIntentsRequest_IntentBatchInline: + s := proto.Size(x.IntentBatchInline) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +// The response message for [Intents.BatchUpdateIntents][google.cloud.dialogflow.v2.Intents.BatchUpdateIntents]. +type BatchUpdateIntentsResponse struct { + // The collection of updated or created intents. + Intents []*Intent `protobuf:"bytes,1,rep,name=intents,proto3" json:"intents,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *BatchUpdateIntentsResponse) Reset() { *m = BatchUpdateIntentsResponse{} } +func (m *BatchUpdateIntentsResponse) String() string { return proto.CompactTextString(m) } +func (*BatchUpdateIntentsResponse) ProtoMessage() {} +func (*BatchUpdateIntentsResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_intent_79fab48477a8b612, []int{8} +} +func (m *BatchUpdateIntentsResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_BatchUpdateIntentsResponse.Unmarshal(m, b) +} +func (m *BatchUpdateIntentsResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_BatchUpdateIntentsResponse.Marshal(b, m, deterministic) +} +func (dst *BatchUpdateIntentsResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_BatchUpdateIntentsResponse.Merge(dst, src) +} +func (m *BatchUpdateIntentsResponse) XXX_Size() int { + return xxx_messageInfo_BatchUpdateIntentsResponse.Size(m) +} +func (m *BatchUpdateIntentsResponse) XXX_DiscardUnknown() { + xxx_messageInfo_BatchUpdateIntentsResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_BatchUpdateIntentsResponse proto.InternalMessageInfo + +func (m *BatchUpdateIntentsResponse) GetIntents() []*Intent { + if m != nil { + return m.Intents + } + return nil +} + +// The request message for [Intents.BatchDeleteIntents][google.cloud.dialogflow.v2.Intents.BatchDeleteIntents]. +type BatchDeleteIntentsRequest struct { + // Required. The name of the agent to delete all entities types for. Format: + // `projects//agent`. + Parent string `protobuf:"bytes,1,opt,name=parent,proto3" json:"parent,omitempty"` + // Required. The collection of intents to delete. Only intent `name` must be + // filled in. + Intents []*Intent `protobuf:"bytes,2,rep,name=intents,proto3" json:"intents,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *BatchDeleteIntentsRequest) Reset() { *m = BatchDeleteIntentsRequest{} } +func (m *BatchDeleteIntentsRequest) String() string { return proto.CompactTextString(m) } +func (*BatchDeleteIntentsRequest) ProtoMessage() {} +func (*BatchDeleteIntentsRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_intent_79fab48477a8b612, []int{9} +} +func (m *BatchDeleteIntentsRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_BatchDeleteIntentsRequest.Unmarshal(m, b) +} +func (m *BatchDeleteIntentsRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_BatchDeleteIntentsRequest.Marshal(b, m, deterministic) +} +func (dst *BatchDeleteIntentsRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_BatchDeleteIntentsRequest.Merge(dst, src) +} +func (m *BatchDeleteIntentsRequest) XXX_Size() int { + return xxx_messageInfo_BatchDeleteIntentsRequest.Size(m) +} +func (m *BatchDeleteIntentsRequest) XXX_DiscardUnknown() { + xxx_messageInfo_BatchDeleteIntentsRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_BatchDeleteIntentsRequest proto.InternalMessageInfo + +func (m *BatchDeleteIntentsRequest) GetParent() string { + if m != nil { + return m.Parent + } + return "" +} + +func (m *BatchDeleteIntentsRequest) GetIntents() []*Intent { + if m != nil { + return m.Intents + } + return nil +} + +// This message is a wrapper around a collection of intents. +type IntentBatch struct { + // A collection of intents. + Intents []*Intent `protobuf:"bytes,1,rep,name=intents,proto3" json:"intents,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *IntentBatch) Reset() { *m = IntentBatch{} } +func (m *IntentBatch) String() string { return proto.CompactTextString(m) } +func (*IntentBatch) ProtoMessage() {} +func (*IntentBatch) Descriptor() ([]byte, []int) { + return fileDescriptor_intent_79fab48477a8b612, []int{10} +} +func (m *IntentBatch) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_IntentBatch.Unmarshal(m, b) +} +func (m *IntentBatch) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_IntentBatch.Marshal(b, m, deterministic) +} +func (dst *IntentBatch) XXX_Merge(src proto.Message) { + xxx_messageInfo_IntentBatch.Merge(dst, src) +} +func (m *IntentBatch) XXX_Size() int { + return xxx_messageInfo_IntentBatch.Size(m) +} +func (m *IntentBatch) XXX_DiscardUnknown() { + xxx_messageInfo_IntentBatch.DiscardUnknown(m) +} + +var xxx_messageInfo_IntentBatch proto.InternalMessageInfo + +func (m *IntentBatch) GetIntents() []*Intent { + if m != nil { + return m.Intents + } + return nil +} + +func init() { + proto.RegisterType((*Intent)(nil), "google.cloud.dialogflow.v2.Intent") + proto.RegisterType((*Intent_TrainingPhrase)(nil), "google.cloud.dialogflow.v2.Intent.TrainingPhrase") + proto.RegisterType((*Intent_TrainingPhrase_Part)(nil), "google.cloud.dialogflow.v2.Intent.TrainingPhrase.Part") + proto.RegisterType((*Intent_Parameter)(nil), "google.cloud.dialogflow.v2.Intent.Parameter") + proto.RegisterType((*Intent_Message)(nil), "google.cloud.dialogflow.v2.Intent.Message") + proto.RegisterType((*Intent_Message_Text)(nil), "google.cloud.dialogflow.v2.Intent.Message.Text") + proto.RegisterType((*Intent_Message_Image)(nil), "google.cloud.dialogflow.v2.Intent.Message.Image") + proto.RegisterType((*Intent_Message_QuickReplies)(nil), "google.cloud.dialogflow.v2.Intent.Message.QuickReplies") + proto.RegisterType((*Intent_Message_Card)(nil), "google.cloud.dialogflow.v2.Intent.Message.Card") + proto.RegisterType((*Intent_Message_Card_Button)(nil), "google.cloud.dialogflow.v2.Intent.Message.Card.Button") + proto.RegisterType((*Intent_Message_SimpleResponse)(nil), "google.cloud.dialogflow.v2.Intent.Message.SimpleResponse") + proto.RegisterType((*Intent_Message_SimpleResponses)(nil), "google.cloud.dialogflow.v2.Intent.Message.SimpleResponses") + proto.RegisterType((*Intent_Message_BasicCard)(nil), "google.cloud.dialogflow.v2.Intent.Message.BasicCard") + proto.RegisterType((*Intent_Message_BasicCard_Button)(nil), "google.cloud.dialogflow.v2.Intent.Message.BasicCard.Button") + proto.RegisterType((*Intent_Message_BasicCard_Button_OpenUriAction)(nil), "google.cloud.dialogflow.v2.Intent.Message.BasicCard.Button.OpenUriAction") + proto.RegisterType((*Intent_Message_Suggestion)(nil), "google.cloud.dialogflow.v2.Intent.Message.Suggestion") + proto.RegisterType((*Intent_Message_Suggestions)(nil), "google.cloud.dialogflow.v2.Intent.Message.Suggestions") + proto.RegisterType((*Intent_Message_LinkOutSuggestion)(nil), "google.cloud.dialogflow.v2.Intent.Message.LinkOutSuggestion") + proto.RegisterType((*Intent_Message_ListSelect)(nil), "google.cloud.dialogflow.v2.Intent.Message.ListSelect") + proto.RegisterType((*Intent_Message_ListSelect_Item)(nil), "google.cloud.dialogflow.v2.Intent.Message.ListSelect.Item") + proto.RegisterType((*Intent_Message_CarouselSelect)(nil), "google.cloud.dialogflow.v2.Intent.Message.CarouselSelect") + proto.RegisterType((*Intent_Message_CarouselSelect_Item)(nil), "google.cloud.dialogflow.v2.Intent.Message.CarouselSelect.Item") + proto.RegisterType((*Intent_Message_SelectItemInfo)(nil), "google.cloud.dialogflow.v2.Intent.Message.SelectItemInfo") + proto.RegisterType((*Intent_FollowupIntentInfo)(nil), "google.cloud.dialogflow.v2.Intent.FollowupIntentInfo") + proto.RegisterType((*ListIntentsRequest)(nil), "google.cloud.dialogflow.v2.ListIntentsRequest") + proto.RegisterType((*ListIntentsResponse)(nil), "google.cloud.dialogflow.v2.ListIntentsResponse") + proto.RegisterType((*GetIntentRequest)(nil), "google.cloud.dialogflow.v2.GetIntentRequest") + proto.RegisterType((*CreateIntentRequest)(nil), "google.cloud.dialogflow.v2.CreateIntentRequest") + proto.RegisterType((*UpdateIntentRequest)(nil), "google.cloud.dialogflow.v2.UpdateIntentRequest") + proto.RegisterType((*DeleteIntentRequest)(nil), "google.cloud.dialogflow.v2.DeleteIntentRequest") + proto.RegisterType((*BatchUpdateIntentsRequest)(nil), "google.cloud.dialogflow.v2.BatchUpdateIntentsRequest") + proto.RegisterType((*BatchUpdateIntentsResponse)(nil), "google.cloud.dialogflow.v2.BatchUpdateIntentsResponse") + proto.RegisterType((*BatchDeleteIntentsRequest)(nil), "google.cloud.dialogflow.v2.BatchDeleteIntentsRequest") + proto.RegisterType((*IntentBatch)(nil), "google.cloud.dialogflow.v2.IntentBatch") + proto.RegisterEnum("google.cloud.dialogflow.v2.IntentView", IntentView_name, IntentView_value) + proto.RegisterEnum("google.cloud.dialogflow.v2.Intent_WebhookState", Intent_WebhookState_name, Intent_WebhookState_value) + proto.RegisterEnum("google.cloud.dialogflow.v2.Intent_TrainingPhrase_Type", Intent_TrainingPhrase_Type_name, Intent_TrainingPhrase_Type_value) + proto.RegisterEnum("google.cloud.dialogflow.v2.Intent_Message_Platform", Intent_Message_Platform_name, Intent_Message_Platform_value) +} + +// Reference imports to suppress errors if they are not otherwise used. +var _ context.Context +var _ grpc.ClientConn + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the grpc package it is being compiled against. +const _ = grpc.SupportPackageIsVersion4 + +// IntentsClient is the client API for Intents service. +// +// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://godoc.org/google.golang.org/grpc#ClientConn.NewStream. +type IntentsClient interface { + // Returns the list of all intents in the specified agent. + ListIntents(ctx context.Context, in *ListIntentsRequest, opts ...grpc.CallOption) (*ListIntentsResponse, error) + // Retrieves the specified intent. + GetIntent(ctx context.Context, in *GetIntentRequest, opts ...grpc.CallOption) (*Intent, error) + // Creates an intent in the specified agent. + CreateIntent(ctx context.Context, in *CreateIntentRequest, opts ...grpc.CallOption) (*Intent, error) + // Updates the specified intent. + UpdateIntent(ctx context.Context, in *UpdateIntentRequest, opts ...grpc.CallOption) (*Intent, error) + // Deletes the specified intent and its direct or indirect followup intents. + DeleteIntent(ctx context.Context, in *DeleteIntentRequest, opts ...grpc.CallOption) (*empty.Empty, error) + // Updates/Creates multiple intents in the specified agent. + // + // Operation + BatchUpdateIntents(ctx context.Context, in *BatchUpdateIntentsRequest, opts ...grpc.CallOption) (*longrunning.Operation, error) + // Deletes intents in the specified agent. + // + // Operation + BatchDeleteIntents(ctx context.Context, in *BatchDeleteIntentsRequest, opts ...grpc.CallOption) (*longrunning.Operation, error) +} + +type intentsClient struct { + cc *grpc.ClientConn +} + +func NewIntentsClient(cc *grpc.ClientConn) IntentsClient { + return &intentsClient{cc} +} + +func (c *intentsClient) ListIntents(ctx context.Context, in *ListIntentsRequest, opts ...grpc.CallOption) (*ListIntentsResponse, error) { + out := new(ListIntentsResponse) + err := c.cc.Invoke(ctx, "/google.cloud.dialogflow.v2.Intents/ListIntents", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *intentsClient) GetIntent(ctx context.Context, in *GetIntentRequest, opts ...grpc.CallOption) (*Intent, error) { + out := new(Intent) + err := c.cc.Invoke(ctx, "/google.cloud.dialogflow.v2.Intents/GetIntent", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *intentsClient) CreateIntent(ctx context.Context, in *CreateIntentRequest, opts ...grpc.CallOption) (*Intent, error) { + out := new(Intent) + err := c.cc.Invoke(ctx, "/google.cloud.dialogflow.v2.Intents/CreateIntent", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *intentsClient) UpdateIntent(ctx context.Context, in *UpdateIntentRequest, opts ...grpc.CallOption) (*Intent, error) { + out := new(Intent) + err := c.cc.Invoke(ctx, "/google.cloud.dialogflow.v2.Intents/UpdateIntent", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *intentsClient) DeleteIntent(ctx context.Context, in *DeleteIntentRequest, opts ...grpc.CallOption) (*empty.Empty, error) { + out := new(empty.Empty) + err := c.cc.Invoke(ctx, "/google.cloud.dialogflow.v2.Intents/DeleteIntent", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *intentsClient) BatchUpdateIntents(ctx context.Context, in *BatchUpdateIntentsRequest, opts ...grpc.CallOption) (*longrunning.Operation, error) { + out := new(longrunning.Operation) + err := c.cc.Invoke(ctx, "/google.cloud.dialogflow.v2.Intents/BatchUpdateIntents", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *intentsClient) BatchDeleteIntents(ctx context.Context, in *BatchDeleteIntentsRequest, opts ...grpc.CallOption) (*longrunning.Operation, error) { + out := new(longrunning.Operation) + err := c.cc.Invoke(ctx, "/google.cloud.dialogflow.v2.Intents/BatchDeleteIntents", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +// IntentsServer is the server API for Intents service. +type IntentsServer interface { + // Returns the list of all intents in the specified agent. + ListIntents(context.Context, *ListIntentsRequest) (*ListIntentsResponse, error) + // Retrieves the specified intent. + GetIntent(context.Context, *GetIntentRequest) (*Intent, error) + // Creates an intent in the specified agent. + CreateIntent(context.Context, *CreateIntentRequest) (*Intent, error) + // Updates the specified intent. + UpdateIntent(context.Context, *UpdateIntentRequest) (*Intent, error) + // Deletes the specified intent and its direct or indirect followup intents. + DeleteIntent(context.Context, *DeleteIntentRequest) (*empty.Empty, error) + // Updates/Creates multiple intents in the specified agent. + // + // Operation + BatchUpdateIntents(context.Context, *BatchUpdateIntentsRequest) (*longrunning.Operation, error) + // Deletes intents in the specified agent. + // + // Operation + BatchDeleteIntents(context.Context, *BatchDeleteIntentsRequest) (*longrunning.Operation, error) +} + +func RegisterIntentsServer(s *grpc.Server, srv IntentsServer) { + s.RegisterService(&_Intents_serviceDesc, srv) +} + +func _Intents_ListIntents_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(ListIntentsRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(IntentsServer).ListIntents(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.dialogflow.v2.Intents/ListIntents", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(IntentsServer).ListIntents(ctx, req.(*ListIntentsRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _Intents_GetIntent_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(GetIntentRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(IntentsServer).GetIntent(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.dialogflow.v2.Intents/GetIntent", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(IntentsServer).GetIntent(ctx, req.(*GetIntentRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _Intents_CreateIntent_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(CreateIntentRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(IntentsServer).CreateIntent(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.dialogflow.v2.Intents/CreateIntent", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(IntentsServer).CreateIntent(ctx, req.(*CreateIntentRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _Intents_UpdateIntent_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(UpdateIntentRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(IntentsServer).UpdateIntent(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.dialogflow.v2.Intents/UpdateIntent", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(IntentsServer).UpdateIntent(ctx, req.(*UpdateIntentRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _Intents_DeleteIntent_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(DeleteIntentRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(IntentsServer).DeleteIntent(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.dialogflow.v2.Intents/DeleteIntent", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(IntentsServer).DeleteIntent(ctx, req.(*DeleteIntentRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _Intents_BatchUpdateIntents_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(BatchUpdateIntentsRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(IntentsServer).BatchUpdateIntents(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.dialogflow.v2.Intents/BatchUpdateIntents", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(IntentsServer).BatchUpdateIntents(ctx, req.(*BatchUpdateIntentsRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _Intents_BatchDeleteIntents_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(BatchDeleteIntentsRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(IntentsServer).BatchDeleteIntents(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.dialogflow.v2.Intents/BatchDeleteIntents", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(IntentsServer).BatchDeleteIntents(ctx, req.(*BatchDeleteIntentsRequest)) + } + return interceptor(ctx, in, info, handler) +} + +var _Intents_serviceDesc = grpc.ServiceDesc{ + ServiceName: "google.cloud.dialogflow.v2.Intents", + HandlerType: (*IntentsServer)(nil), + Methods: []grpc.MethodDesc{ + { + MethodName: "ListIntents", + Handler: _Intents_ListIntents_Handler, + }, + { + MethodName: "GetIntent", + Handler: _Intents_GetIntent_Handler, + }, + { + MethodName: "CreateIntent", + Handler: _Intents_CreateIntent_Handler, + }, + { + MethodName: "UpdateIntent", + Handler: _Intents_UpdateIntent_Handler, + }, + { + MethodName: "DeleteIntent", + Handler: _Intents_DeleteIntent_Handler, + }, + { + MethodName: "BatchUpdateIntents", + Handler: _Intents_BatchUpdateIntents_Handler, + }, + { + MethodName: "BatchDeleteIntents", + Handler: _Intents_BatchDeleteIntents_Handler, + }, + }, + Streams: []grpc.StreamDesc{}, + Metadata: "google/cloud/dialogflow/v2/intent.proto", +} + +func init() { + proto.RegisterFile("google/cloud/dialogflow/v2/intent.proto", fileDescriptor_intent_79fab48477a8b612) +} + +var fileDescriptor_intent_79fab48477a8b612 = []byte{ + // 2583 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xe4, 0x5a, 0xcb, 0x73, 0x23, 0x47, + 0x19, 0xf7, 0xe8, 0x61, 0x4b, 0x9f, 0x64, 0x59, 0x6e, 0x6f, 0x36, 0xda, 0xc9, 0x03, 0x47, 0x21, + 0x89, 0xe3, 0x4a, 0xa4, 0x44, 0x21, 0x09, 0xd9, 0x4d, 0x42, 0xf9, 0x21, 0xdb, 0xc2, 0xb2, 0x25, + 0xc6, 0xf2, 0x6e, 0x92, 0xa2, 0x98, 0x1a, 0x6b, 0x5a, 0xda, 0xc1, 0xa3, 0x99, 0xd9, 0xe9, 0x9e, + 0x75, 0x94, 0x54, 0x2e, 0x54, 0x01, 0x07, 0xb8, 0xc1, 0x29, 0x55, 0x29, 0x0a, 0x4e, 0x14, 0x67, + 0x4e, 0x1c, 0xf8, 0x0b, 0xb8, 0x51, 0x3c, 0x0e, 0x5c, 0xa8, 0xe2, 0x5f, 0xa0, 0x8a, 0x03, 0x07, + 0xaa, 0x1f, 0x23, 0x8d, 0x1e, 0x6b, 0x4b, 0xde, 0x85, 0x0b, 0x37, 0xf5, 0xf7, 0x7e, 0x74, 0xff, + 0xbe, 0xee, 0xb1, 0xe1, 0x95, 0xae, 0xeb, 0x76, 0x6d, 0x5c, 0x6e, 0xdb, 0x6e, 0x60, 0x96, 0x4d, + 0xcb, 0xb0, 0xdd, 0x6e, 0xc7, 0x76, 0x2f, 0xca, 0x0f, 0x2b, 0x65, 0xcb, 0xa1, 0xd8, 0xa1, 0x25, + 0xcf, 0x77, 0xa9, 0x8b, 0x54, 0x21, 0x58, 0xe2, 0x82, 0xa5, 0xa1, 0x60, 0xe9, 0x61, 0x45, 0x7d, + 0x56, 0x1a, 0x31, 0x3c, 0xab, 0x6c, 0x38, 0x8e, 0x4b, 0x0d, 0x6a, 0xb9, 0x0e, 0x11, 0x9a, 0xea, + 0xad, 0x08, 0xd7, 0xc7, 0xc4, 0x0d, 0xfc, 0x36, 0x96, 0xac, 0x8d, 0x4b, 0xbc, 0xb7, 0x5d, 0x87, + 0xe2, 0x4f, 0xa5, 0x7b, 0xf5, 0x45, 0x29, 0x69, 0xbb, 0x4e, 0xd7, 0x0f, 0x1c, 0xc7, 0x72, 0xba, + 0x65, 0xd7, 0xc3, 0xfe, 0x88, 0xa7, 0xe7, 0xa5, 0x10, 0x5f, 0x9d, 0x05, 0x9d, 0xb2, 0x19, 0x08, + 0x01, 0xc9, 0x7f, 0x66, 0x9c, 0x8f, 0x7b, 0x1e, 0xed, 0x4b, 0xe6, 0xfa, 0x38, 0xb3, 0x63, 0x61, + 0xdb, 0xd4, 0x7b, 0x06, 0x39, 0x97, 0x12, 0xcf, 0x8e, 0x4b, 0x10, 0xea, 0x07, 0x6d, 0x19, 0x61, + 0xf1, 0x6f, 0x1b, 0xb0, 0x58, 0xe3, 0x15, 0x43, 0x08, 0x12, 0x8e, 0xd1, 0xc3, 0x05, 0x65, 0x5d, + 0xd9, 0x48, 0x6b, 0xfc, 0x37, 0x7a, 0x01, 0xb2, 0xa6, 0x45, 0x3c, 0xdb, 0xe8, 0xeb, 0x9c, 0x17, + 0xe3, 0xbc, 0x8c, 0xa4, 0x1d, 0x33, 0x91, 0x16, 0x2c, 0x5f, 0xe0, 0xb3, 0xfb, 0xae, 0x7b, 0xae, + 0x13, 0x6a, 0x50, 0x5c, 0x58, 0x5c, 0x57, 0x36, 0x72, 0x95, 0x72, 0xe9, 0xd1, 0xa5, 0x2f, 0x09, + 0x8f, 0xa5, 0x7b, 0x42, 0xef, 0x84, 0xa9, 0x69, 0xd9, 0x8b, 0xc8, 0x0a, 0xa9, 0x90, 0xf2, 0x7c, + 0xcb, 0xf5, 0x2d, 0xda, 0x2f, 0xc4, 0xd7, 0x95, 0x8d, 0xa4, 0x36, 0x58, 0xa3, 0xaf, 0x41, 0xc6, + 0x22, 0x7a, 0xc7, 0xb0, 0xed, 0x33, 0xa3, 0x7d, 0x5e, 0x48, 0xac, 0x2b, 0x1b, 0x29, 0x0d, 0x2c, + 0xb2, 0x27, 0x29, 0x4c, 0xa0, 0x67, 0xeb, 0xa6, 0x45, 0x8c, 0x33, 0x1b, 0x9b, 0x85, 0x35, 0x21, + 0xd0, 0xb3, 0x77, 0x25, 0x05, 0x95, 0x60, 0xcd, 0x72, 0xbc, 0x80, 0xea, 0xb2, 0x5d, 0x3c, 0x39, + 0x52, 0x58, 0x5a, 0x8f, 0x6f, 0xa4, 0xb5, 0x55, 0xce, 0xda, 0x11, 0x1c, 0x96, 0x22, 0x41, 0x37, + 0x61, 0x11, 0x3f, 0xc4, 0x0e, 0x25, 0x85, 0x14, 0x17, 0x91, 0x2b, 0xf4, 0x5d, 0xc8, 0x53, 0xdf, + 0xb0, 0x58, 0x5f, 0x75, 0xef, 0xbe, 0x6f, 0x10, 0x4c, 0x0a, 0xe9, 0xf5, 0xf8, 0x46, 0xa6, 0xf2, + 0xe6, 0x0c, 0xe9, 0xb7, 0xa4, 0x6a, 0x93, 0x6b, 0x6a, 0x2b, 0x74, 0x64, 0xcd, 0xbd, 0x1a, 0x6d, + 0xb6, 0x11, 0x0a, 0xc0, 0xcb, 0x2e, 0x57, 0xa8, 0x0e, 0x2b, 0x6e, 0x40, 0x23, 0xe1, 0x93, 0x42, + 0x86, 0x3b, 0x7d, 0xf1, 0x32, 0xa7, 0x32, 0x21, 0x2d, 0x27, 0x74, 0xe5, 0x92, 0xa0, 0x97, 0x20, + 0xe7, 0x63, 0x82, 0x23, 0xc6, 0xb2, 0xbc, 0x5e, 0xcb, 0x9c, 0x3a, 0x10, 0xab, 0x03, 0x78, 0x86, + 0x6f, 0xf4, 0x30, 0xc5, 0x3e, 0x29, 0x2c, 0x73, 0x7f, 0xaf, 0xcd, 0x90, 0x64, 0x33, 0x54, 0xd2, + 0x22, 0xfa, 0x68, 0x0f, 0x52, 0x3d, 0x4c, 0x88, 0xd1, 0xc5, 0xa4, 0x90, 0xe3, 0xb6, 0x36, 0x67, + 0xb0, 0x75, 0x24, 0x54, 0xb4, 0x81, 0x2e, 0x7a, 0x00, 0xaa, 0x89, 0x3b, 0x46, 0x60, 0x53, 0xdd, + 0xc7, 0xc4, 0x73, 0x1d, 0x82, 0x75, 0xcf, 0x36, 0x68, 0xc7, 0xf5, 0x7b, 0xa4, 0xb0, 0xb2, 0x1e, + 0xdf, 0xc8, 0x55, 0xde, 0x9a, 0xdd, 0x72, 0xa9, 0x29, 0x75, 0xb5, 0x82, 0x34, 0xab, 0x49, 0xab, + 0x21, 0x83, 0xa0, 0xf7, 0xe0, 0x96, 0xef, 0xba, 0x54, 0xef, 0xb8, 0xb6, 0xed, 0x5e, 0x04, 0x9e, + 0x2e, 0x00, 0x47, 0x9c, 0x8f, 0x3c, 0x6f, 0xd4, 0x4d, 0x26, 0xb0, 0x27, 0xf9, 0xc2, 0x03, 0x3f, + 0x2a, 0x1f, 0xc0, 0x33, 0x9e, 0xe1, 0x33, 0xe1, 0xa9, 0xca, 0xab, 0x5c, 0xb9, 0x20, 0x44, 0xa6, + 0xa8, 0x77, 0xe1, 0xc6, 0xb8, 0x9e, 0xe5, 0x74, 0xdc, 0x02, 0xe2, 0x05, 0x7c, 0x7b, 0x86, 0x34, + 0x47, 0x8d, 0xd6, 0x9c, 0x8e, 0xab, 0xa1, 0xce, 0x04, 0x4d, 0xfd, 0x2a, 0x0e, 0xb9, 0xd1, 0xcd, + 0x39, 0x15, 0x1c, 0xbe, 0x0d, 0x09, 0xda, 0xf7, 0x04, 0x28, 0xe4, 0x2a, 0xef, 0xcc, 0xbd, 0xe3, + 0x4b, 0xad, 0xbe, 0x87, 0x35, 0x6e, 0x03, 0xd5, 0x21, 0xe9, 0x19, 0x3e, 0x25, 0x85, 0x38, 0x4f, + 0xe6, 0x1a, 0xc6, 0x9a, 0x86, 0x4f, 0x35, 0x61, 0x04, 0x6d, 0xc2, 0x2a, 0xb5, 0x7a, 0x98, 0xe8, + 0x86, 0x69, 0x62, 0x53, 0x6f, 0xbb, 0x81, 0x43, 0x39, 0x4e, 0x24, 0xb5, 0x15, 0xce, 0xd8, 0x62, + 0xf4, 0x1d, 0x46, 0x56, 0x29, 0x24, 0x98, 0x2a, 0xcb, 0x90, 0xed, 0xf4, 0x30, 0x43, 0xf6, 0x9b, + 0x01, 0x09, 0x76, 0xa8, 0x45, 0xfb, 0xfa, 0x20, 0xd1, 0xb4, 0x06, 0x82, 0xc4, 0x82, 0x47, 0x37, + 0x20, 0x69, 0xd8, 0x96, 0x41, 0x38, 0x46, 0xa5, 0x35, 0xb1, 0x60, 0xa8, 0x19, 0x10, 0xec, 0xeb, + 0x26, 0xee, 0x58, 0x0e, 0x36, 0x25, 0x42, 0x65, 0x18, 0x6d, 0x57, 0x90, 0x8a, 0x77, 0x20, 0x21, + 0x0d, 0xe4, 0x5b, 0x1f, 0x37, 0xab, 0xfa, 0xe9, 0xf1, 0x49, 0xb3, 0xba, 0x53, 0xdb, 0xab, 0x55, + 0x77, 0xf3, 0x0b, 0x28, 0x03, 0x4b, 0xd5, 0x8f, 0xb6, 0x8e, 0x9a, 0xf5, 0x6a, 0x5e, 0x41, 0x79, + 0x48, 0xb5, 0xaa, 0x47, 0xcd, 0xfa, 0x56, 0xab, 0x9a, 0x8f, 0xa9, 0xb1, 0x94, 0xa2, 0xfe, 0x38, + 0x06, 0xe9, 0xc1, 0xb9, 0xba, 0x2e, 0x6e, 0xdf, 0x80, 0xe4, 0x43, 0xc3, 0x0e, 0x70, 0x18, 0x3a, + 0x5f, 0xa0, 0x17, 0x61, 0x39, 0x3c, 0x50, 0x82, 0x9b, 0xe0, 0xdc, 0xac, 0x24, 0xde, 0xe5, 0x42, + 0xef, 0x42, 0x21, 0x52, 0x16, 0x7d, 0xc4, 0x53, 0x92, 0xcb, 0x3f, 0x35, 0xac, 0xd1, 0x6e, 0xc4, + 0xe7, 0xb3, 0x90, 0xee, 0x19, 0x8e, 0x69, 0x50, 0xd7, 0xef, 0xf3, 0x39, 0x91, 0xd2, 0x86, 0x04, + 0x54, 0x80, 0x25, 0xcf, 0x77, 0x7b, 0x1e, 0x0d, 0x91, 0x38, 0x5c, 0xa2, 0xa7, 0x61, 0xc9, 0x22, + 0xba, 0x6d, 0x11, 0x5a, 0x48, 0x71, 0xad, 0x45, 0x8b, 0xd4, 0x2d, 0x42, 0xd5, 0x5f, 0xdc, 0x82, + 0x25, 0x79, 0x76, 0x51, 0x35, 0xd2, 0xc0, 0xcc, 0x4c, 0xf3, 0x27, 0x3c, 0xf5, 0x2d, 0xfc, 0x29, + 0x3d, 0x58, 0x90, 0x3d, 0x3f, 0x80, 0xa4, 0xd5, 0x33, 0xba, 0xa2, 0x66, 0x99, 0xca, 0x1b, 0x73, + 0xd8, 0xa9, 0x31, 0xbd, 0x83, 0x05, 0x4d, 0x18, 0x40, 0xdf, 0x83, 0xe5, 0x07, 0x81, 0xd5, 0x3e, + 0xd7, 0x7d, 0xec, 0xd9, 0x16, 0x16, 0x9b, 0x24, 0x53, 0x79, 0x77, 0x0e, 0x8b, 0xdf, 0x61, 0xfa, + 0x9a, 0x50, 0x3f, 0x58, 0xd0, 0xb2, 0x0f, 0x22, 0x6b, 0x96, 0x70, 0xdb, 0xf0, 0xc5, 0xf6, 0x9a, + 0x2f, 0xe1, 0x1d, 0xc3, 0x37, 0x59, 0xc2, 0x4c, 0x1d, 0xbd, 0x05, 0x4b, 0x9e, 0xd1, 0xb7, 0x5d, + 0xc3, 0xe4, 0xcd, 0xcb, 0x54, 0x9e, 0x0e, 0x2d, 0x85, 0x57, 0x86, 0xd2, 0x09, 0xbf, 0x32, 0x1c, + 0x2c, 0x68, 0xa1, 0x24, 0xea, 0x42, 0x9e, 0x58, 0x3d, 0xcf, 0xc6, 0x03, 0xdc, 0x65, 0x4d, 0x63, + 0xda, 0xb7, 0xe7, 0x88, 0xe3, 0x84, 0x9b, 0x08, 0x31, 0x96, 0x65, 0xb8, 0x42, 0x46, 0x49, 0xe8, + 0x14, 0xe0, 0xcc, 0x20, 0x56, 0x5b, 0xe7, 0xa9, 0xa6, 0xb8, 0x8b, 0x6f, 0xcc, 0xe1, 0x62, 0x9b, + 0x29, 0xcb, 0x7c, 0xd3, 0x67, 0xe1, 0x02, 0x7d, 0x02, 0x19, 0x12, 0x74, 0xbb, 0x98, 0xf0, 0x9b, + 0x58, 0x21, 0xcd, 0xed, 0xbe, 0x33, 0x4f, 0xe8, 0x43, 0xed, 0x83, 0x05, 0x2d, 0x6a, 0x0c, 0x39, + 0xb0, 0x66, 0x5b, 0xce, 0xb9, 0xee, 0x06, 0x54, 0x1f, 0xd2, 0xf9, 0x10, 0xcf, 0x54, 0xde, 0x9f, + 0xc3, 0x47, 0xdd, 0x72, 0xce, 0x1b, 0x01, 0x1d, 0xba, 0x3a, 0x58, 0xd0, 0x56, 0xed, 0x71, 0x22, + 0xfa, 0x08, 0x32, 0xec, 0x68, 0xe8, 0x04, 0xdb, 0xb8, 0x4d, 0x0b, 0x19, 0xee, 0xe7, 0xed, 0xb9, + 0xfc, 0x10, 0x7a, 0xc2, 0x95, 0x0f, 0x16, 0x34, 0xb0, 0x07, 0x2b, 0x64, 0xc2, 0x4a, 0xdb, 0xf0, + 0xdd, 0x80, 0x60, 0x3b, 0xb4, 0x9e, 0xe5, 0xd6, 0xdf, 0x9b, 0x6f, 0xb3, 0x71, 0x0b, 0x03, 0x0f, + 0xb9, 0xf6, 0x08, 0x05, 0x35, 0x20, 0x15, 0xce, 0x6c, 0x79, 0x79, 0xbc, 0xd6, 0xc8, 0x1e, 0x18, + 0x51, 0x55, 0x48, 0xb0, 0x23, 0x1d, 0x81, 0xf4, 0x78, 0x08, 0xe9, 0xea, 0x09, 0x24, 0xf9, 0x31, + 0x45, 0xcf, 0x40, 0x9a, 0x1f, 0x53, 0x3d, 0xf0, 0x2d, 0x89, 0x9d, 0x29, 0x4e, 0x38, 0xf5, 0x2d, + 0xf4, 0x3a, 0x20, 0xa3, 0xdd, 0xc6, 0x84, 0x58, 0x67, 0x96, 0xcd, 0x81, 0x8e, 0xd9, 0x11, 0x28, + 0xba, 0x3a, 0xc2, 0x61, 0x8e, 0xd4, 0x1a, 0x64, 0xa3, 0x27, 0x95, 0x61, 0x2b, 0xb5, 0xa8, 0x1d, + 0x62, 0xb2, 0x58, 0x30, 0x6c, 0x1d, 0xc5, 0x83, 0x18, 0x8f, 0x6b, 0xe4, 0x50, 0xab, 0x7f, 0x57, + 0x20, 0xc1, 0x77, 0xe8, 0x74, 0x1b, 0x2a, 0xa4, 0x48, 0x70, 0x26, 0x18, 0x22, 0x9c, 0xc1, 0x7a, + 0x34, 0xa3, 0xf8, 0x58, 0x46, 0x4d, 0x58, 0x3a, 0x0b, 0x28, 0x65, 0x9b, 0x3d, 0x31, 0xf3, 0x88, + 0x8d, 0xe2, 0x45, 0x69, 0x9b, 0xab, 0x6b, 0xa1, 0x19, 0xf5, 0x9b, 0xb0, 0x28, 0x48, 0x53, 0x47, + 0x27, 0xbb, 0xc0, 0xbb, 0x84, 0xf2, 0x1b, 0xba, 0x0c, 0x34, 0x5c, 0xab, 0x3d, 0xc8, 0x8d, 0x9e, + 0x7c, 0xf4, 0x75, 0xc8, 0xf1, 0x7b, 0x38, 0x75, 0x75, 0xe2, 0x61, 0xdc, 0xbe, 0x2f, 0x6d, 0x65, + 0x19, 0xb5, 0xe5, 0x9e, 0x70, 0x1a, 0xf3, 0x43, 0x48, 0xcf, 0x96, 0xf6, 0xf8, 0xef, 0xe8, 0xa4, + 0xe3, 0x31, 0xc4, 0x47, 0x26, 0x1d, 0xef, 0xce, 0x05, 0xac, 0x8c, 0x01, 0x0d, 0x32, 0xa7, 0xc0, + 0x97, 0xc2, 0xcb, 0xf2, 0xde, 0xb5, 0xe1, 0x6b, 0x02, 0xbb, 0xd4, 0xdf, 0xc5, 0x21, 0x3d, 0xc0, + 0x9f, 0x6b, 0x34, 0xf4, 0x25, 0xc8, 0xb1, 0xfd, 0x6c, 0x50, 0x8a, 0xcd, 0x68, 0x76, 0xcb, 0x03, + 0x2a, 0xdf, 0xe6, 0x7b, 0xe1, 0xc4, 0x4a, 0x5c, 0x6f, 0x62, 0x85, 0xf3, 0xea, 0x74, 0xb8, 0x45, + 0x92, 0xbc, 0x16, 0x77, 0xae, 0x83, 0xb3, 0x13, 0xfb, 0xe4, 0xb7, 0xca, 0x60, 0xa3, 0x4c, 0x2f, + 0xc1, 0x03, 0x58, 0x71, 0x3d, 0xec, 0xb0, 0x6d, 0xab, 0xcb, 0x07, 0x8f, 0x98, 0xbd, 0xb5, 0xc7, + 0xf0, 0x5f, 0x6a, 0x78, 0xd8, 0x39, 0xf5, 0xad, 0x2d, 0x6e, 0x50, 0x5b, 0x76, 0xa3, 0x4b, 0xf5, + 0x05, 0x58, 0x1e, 0xe1, 0xa3, 0x3c, 0xc4, 0x87, 0x38, 0xc0, 0x7e, 0xaa, 0x45, 0x80, 0x08, 0xc6, + 0x4e, 0x8d, 0x5c, 0xed, 0x40, 0x26, 0x32, 0x07, 0xd0, 0xbd, 0xd1, 0xa1, 0xa2, 0xcc, 0x7c, 0x2f, + 0x9f, 0x1c, 0x2a, 0x23, 0x13, 0x45, 0x6d, 0xc2, 0xea, 0xc4, 0x2c, 0x40, 0xaf, 0x42, 0xde, 0x64, + 0x3f, 0x1d, 0xfe, 0xb1, 0x40, 0x8f, 0xdc, 0x01, 0x57, 0x22, 0x74, 0x7e, 0xef, 0x92, 0xd9, 0xc5, + 0x86, 0xd9, 0xfd, 0x21, 0x06, 0x30, 0x84, 0xfd, 0x47, 0x34, 0xa6, 0x09, 0x49, 0x8b, 0xe2, 0x9e, + 0x00, 0xaa, 0xf9, 0x26, 0xfb, 0xd0, 0x76, 0xa9, 0x46, 0x71, 0x4f, 0x13, 0x86, 0xd4, 0x3f, 0x29, + 0x90, 0x60, 0x6b, 0x74, 0x04, 0x09, 0xfe, 0x76, 0x51, 0xe6, 0x1e, 0x27, 0xc2, 0x2a, 0x33, 0xc2, + 0xdf, 0x2f, 0xdc, 0xcc, 0x30, 0xfe, 0x58, 0x34, 0xfe, 0x75, 0xc8, 0x98, 0x98, 0xb4, 0x7d, 0xcb, + 0xe3, 0x9b, 0x2a, 0x84, 0x86, 0x21, 0xe9, 0x49, 0x1d, 0x1d, 0xf5, 0xf7, 0x31, 0xc8, 0x8d, 0xce, + 0x39, 0xd4, 0x0a, 0x8b, 0x27, 0xb6, 0xc1, 0x87, 0xd7, 0x9e, 0x98, 0xff, 0x17, 0x05, 0xfc, 0x10, + 0x72, 0xa3, 0x71, 0xb1, 0x3d, 0x7b, 0x8e, 0xfb, 0xe1, 0x89, 0x3c, 0xc7, 0x7d, 0x0e, 0x95, 0x7d, + 0xc7, 0x75, 0xfa, 0xbd, 0x70, 0x74, 0x0e, 0xd6, 0xc5, 0x9f, 0x28, 0x90, 0x0a, 0x6f, 0x02, 0xa8, + 0x00, 0x37, 0xd8, 0x73, 0x69, 0xaf, 0xa1, 0x1d, 0x8d, 0x3d, 0xac, 0xb2, 0x90, 0xda, 0xdb, 0xda, + 0xa9, 0x6e, 0x37, 0x1a, 0x87, 0x79, 0x05, 0xa5, 0x21, 0x79, 0x52, 0xdf, 0xda, 0x39, 0xcc, 0xc7, + 0x18, 0xa3, 0x55, 0xad, 0x57, 0xf7, 0xb5, 0xad, 0xa3, 0x7c, 0x1c, 0x2d, 0x41, 0xfc, 0xb0, 0x76, + 0x98, 0x4f, 0x70, 0x89, 0xc3, 0x8f, 0x9b, 0xd5, 0x7c, 0x12, 0xa5, 0x20, 0x51, 0xaf, 0x1d, 0x57, + 0xf3, 0x8b, 0x8c, 0x78, 0xb7, 0xb6, 0x5d, 0xd5, 0xf2, 0x4b, 0xe8, 0x29, 0x58, 0xdd, 0xda, 0x69, + 0xd5, 0x1a, 0xc7, 0x27, 0x7a, 0xe3, 0x58, 0xdf, 0x6f, 0x34, 0xf6, 0xeb, 0xd5, 0x7c, 0x6a, 0x3b, + 0x0d, 0x4b, 0xf2, 0x13, 0x85, 0xfa, 0x43, 0x05, 0xd0, 0xe4, 0xb3, 0x1b, 0xbd, 0x31, 0xf9, 0x96, + 0x8f, 0x1c, 0xe0, 0xb1, 0x47, 0xf9, 0x2c, 0x1f, 0x0f, 0x62, 0x97, 0x7f, 0x3c, 0x28, 0x52, 0xc8, + 0x46, 0x3f, 0xb7, 0xa1, 0xe7, 0xe0, 0xd6, 0xbd, 0xea, 0xf6, 0x41, 0xa3, 0x71, 0xa8, 0x9f, 0xb4, + 0xb6, 0x5a, 0xe3, 0x2f, 0xd0, 0x5b, 0xf0, 0xd4, 0x28, 0xbb, 0x7a, 0xbc, 0xb5, 0x5d, 0xaf, 0xee, + 0xe6, 0x15, 0xb4, 0x09, 0x2f, 0x4f, 0x65, 0xe9, 0x7b, 0x0d, 0x4d, 0x3f, 0xa9, 0x37, 0x5a, 0xfa, + 0x5e, 0xad, 0x5e, 0xaf, 0x1d, 0xef, 0xe7, 0x63, 0xc5, 0x3f, 0x2b, 0x80, 0x18, 0x14, 0x88, 0x40, + 0x88, 0x86, 0x1f, 0x04, 0x98, 0x50, 0x74, 0x13, 0x16, 0x45, 0xa0, 0x32, 0x5f, 0xb9, 0x62, 0x37, + 0x24, 0xdb, 0x70, 0xba, 0x01, 0xbb, 0xc4, 0xb4, 0x5d, 0x33, 0xcc, 0x2a, 0x1b, 0x12, 0x77, 0x5c, + 0x13, 0xa3, 0x7d, 0xc8, 0xc8, 0xc4, 0x1f, 0x5a, 0xf8, 0x82, 0x6f, 0xca, 0x5c, 0xe5, 0xe5, 0xab, + 0x37, 0xde, 0x5d, 0x0b, 0x5f, 0x68, 0x60, 0x0d, 0x7e, 0xb3, 0xfb, 0x92, 0xc7, 0x3c, 0x11, 0xeb, + 0x33, 0x2c, 0xbf, 0x0e, 0xa4, 0x18, 0xe1, 0xc4, 0xfa, 0x8c, 0xd5, 0x07, 0x38, 0x93, 0xba, 0xe7, + 0xd8, 0x91, 0xaf, 0x5a, 0x2e, 0xde, 0x62, 0x84, 0xe2, 0xe7, 0xb0, 0x36, 0x92, 0x97, 0xbc, 0xc7, + 0xbc, 0x0f, 0x4b, 0xc2, 0x41, 0x78, 0xec, 0x8b, 0x57, 0xc7, 0xa5, 0x85, 0x2a, 0xe8, 0x65, 0x58, + 0x71, 0xd8, 0x2d, 0x28, 0xe2, 0x58, 0x14, 0x60, 0x99, 0x91, 0x9b, 0x03, 0xe7, 0x3f, 0x57, 0x20, + 0xbf, 0x8f, 0xa5, 0xf3, 0xb0, 0xa6, 0xd3, 0x3e, 0x03, 0xfc, 0x4f, 0xeb, 0x59, 0xfc, 0x8b, 0x02, + 0x6b, 0x3b, 0x3e, 0x36, 0x28, 0x1e, 0x8d, 0xec, 0x51, 0xdd, 0xbe, 0x0d, 0x8b, 0x42, 0x5b, 0x8e, + 0xfb, 0x59, 0x6a, 0x25, 0x35, 0x26, 0x33, 0x8b, 0x5f, 0x9d, 0x59, 0xe2, 0xda, 0x99, 0xfd, 0x5b, + 0x81, 0xb5, 0x53, 0xcf, 0x9c, 0xc8, 0x6c, 0x98, 0x81, 0xf2, 0xf8, 0x19, 0x4c, 0xeb, 0xcd, 0x1d, + 0xc8, 0x04, 0xdc, 0x2f, 0xff, 0xa2, 0x2f, 0x3f, 0x20, 0xa8, 0x13, 0xef, 0xf3, 0x3d, 0x0b, 0xdb, + 0xe6, 0x91, 0x41, 0xce, 0x35, 0x10, 0xe2, 0xec, 0xf7, 0x93, 0x4b, 0xff, 0x55, 0x58, 0xdb, 0xc5, + 0x36, 0x1e, 0xcf, 0x7e, 0xca, 0x8e, 0x2b, 0xfe, 0x33, 0x06, 0xb7, 0xb6, 0x0d, 0xda, 0xbe, 0x1f, + 0x2d, 0xd7, 0x95, 0xe7, 0x7e, 0x13, 0xf2, 0x32, 0xd2, 0x33, 0xa6, 0xab, 0x0f, 0x2e, 0x2b, 0xec, + 0xb5, 0x28, 0x38, 0xc2, 0xa8, 0x6f, 0xa1, 0x8f, 0x61, 0x6d, 0x44, 0xd6, 0x72, 0x6c, 0xcb, 0xc1, + 0xb2, 0x34, 0xaf, 0x5c, 0x9d, 0x1d, 0x37, 0xc4, 0x1e, 0xd2, 0x11, 0xbb, 0x35, 0x6e, 0x63, 0xb2, + 0x25, 0x89, 0xab, 0x5b, 0x92, 0x7c, 0x9c, 0x96, 0x2c, 0x5e, 0xb7, 0x25, 0xdb, 0x39, 0xc8, 0x46, + 0xab, 0x50, 0xfc, 0x04, 0xd4, 0x69, 0x65, 0x7f, 0x12, 0xb0, 0x54, 0x7c, 0x20, 0x5b, 0x1a, 0xdd, + 0x03, 0x57, 0xb6, 0x34, 0xe2, 0x32, 0x36, 0xbf, 0xcb, 0x43, 0xc8, 0x44, 0xba, 0xf5, 0x78, 0xf1, + 0x6f, 0x7e, 0x0b, 0xa0, 0x16, 0x45, 0xfd, 0xa7, 0x6b, 0xc7, 0xad, 0xea, 0x71, 0x4b, 0xbf, 0x5b, + 0xab, 0xde, 0x1b, 0x1b, 0x7b, 0x37, 0x20, 0x1f, 0x65, 0xee, 0x9d, 0xd6, 0xeb, 0x79, 0xa5, 0xf2, + 0xd7, 0x14, 0x2c, 0xc9, 0xb4, 0xd1, 0x57, 0x0a, 0x64, 0x22, 0xc8, 0x8f, 0x4a, 0x97, 0x45, 0x32, + 0x39, 0xfa, 0xd4, 0xf2, 0xcc, 0xf2, 0xa2, 0x77, 0xc5, 0xd7, 0x7f, 0xf0, 0xc7, 0x7f, 0xfc, 0x2c, + 0xf6, 0x0a, 0x7a, 0xa9, 0xfc, 0xb0, 0x52, 0xfe, 0x5c, 0x54, 0xf7, 0x03, 0xcf, 0x77, 0xbf, 0x8f, + 0xdb, 0x94, 0x94, 0x37, 0xcb, 0x46, 0x17, 0x3b, 0xf4, 0x8b, 0x72, 0x38, 0x43, 0x7e, 0xaa, 0x40, + 0x7a, 0x30, 0x1b, 0xd0, 0xa5, 0x7f, 0xa1, 0x19, 0x1f, 0x21, 0xea, 0x0c, 0x55, 0x1d, 0x0b, 0x87, + 0x9d, 0xf9, 0x89, 0x60, 0xc2, 0x58, 0xca, 0x9b, 0x5f, 0xa0, 0x2f, 0x15, 0xc8, 0x46, 0x67, 0x02, + 0xba, 0x34, 0xff, 0x29, 0xd3, 0x63, 0xa6, 0xa0, 0xde, 0xe6, 0x41, 0x95, 0x8b, 0xb3, 0xd5, 0xe8, + 0x76, 0x08, 0xc1, 0xbf, 0x54, 0x20, 0x1b, 0x3d, 0x30, 0x97, 0x07, 0x37, 0x65, 0x00, 0xcc, 0x14, + 0xdc, 0xfb, 0x3c, 0xb8, 0x77, 0x2a, 0xaf, 0xf1, 0xe0, 0xe4, 0x9f, 0xa7, 0xaf, 0x2a, 0xdc, 0x20, + 0xc6, 0x1f, 0x29, 0x90, 0x8d, 0x1e, 0xbc, 0xcb, 0x63, 0x9c, 0x02, 0xd3, 0xea, 0xcd, 0x09, 0x6c, + 0xaa, 0xf6, 0x3c, 0xda, 0x0f, 0x3b, 0xb9, 0x39, 0x63, 0x27, 0x7f, 0xad, 0x00, 0x9a, 0x84, 0x18, + 0x74, 0xe9, 0xf3, 0xf6, 0x91, 0x93, 0x40, 0x7d, 0x2e, 0x54, 0x8b, 0xfc, 0x69, 0x9c, 0x3d, 0xd9, + 0xc5, 0x5f, 0xbe, 0xc3, 0x9a, 0x15, 0xdf, 0x9c, 0xad, 0xa1, 0x67, 0x43, 0x3f, 0xb7, 0x95, 0xcd, + 0x61, 0xa8, 0x23, 0x88, 0x35, 0x43, 0xa8, 0xd3, 0x10, 0xee, 0xbf, 0x11, 0xaa, 0xf0, 0x73, 0x5b, + 0xd9, 0xdc, 0xfe, 0x52, 0x81, 0xe7, 0xdb, 0x6e, 0xef, 0x92, 0xc8, 0xb6, 0x25, 0x12, 0x36, 0x59, + 0xf7, 0x9a, 0xca, 0x27, 0xbb, 0x52, 0xb4, 0xeb, 0xb2, 0xb1, 0x54, 0x72, 0xfd, 0x6e, 0xb9, 0x8b, + 0x1d, 0xde, 0xdb, 0xb2, 0x60, 0x19, 0x9e, 0x45, 0xa6, 0xfd, 0x73, 0xc2, 0x9d, 0xe1, 0xea, 0x5f, + 0x8a, 0xf2, 0xab, 0x58, 0x6c, 0x77, 0xef, 0x37, 0x31, 0x75, 0x5f, 0x98, 0xdb, 0xe1, 0x9e, 0x77, + 0x87, 0x9e, 0xef, 0x56, 0xce, 0x16, 0xb9, 0xd5, 0xb7, 0xfe, 0x13, 0x00, 0x00, 0xff, 0xff, 0xe0, + 0xe1, 0x3b, 0xf8, 0x6f, 0x21, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/cloud/dialogflow/v2/session.pb.go b/vendor/google.golang.org/genproto/googleapis/cloud/dialogflow/v2/session.pb.go new file mode 100644 index 0000000..89f21a4 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/cloud/dialogflow/v2/session.pb.go @@ -0,0 +1,1585 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/cloud/dialogflow/v2/session.proto + +package dialogflow // import "google.golang.org/genproto/googleapis/cloud/dialogflow/v2" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _struct "github.com/golang/protobuf/ptypes/struct" +import _ "google.golang.org/genproto/googleapis/api/annotations" +import status "google.golang.org/genproto/googleapis/rpc/status" +import latlng "google.golang.org/genproto/googleapis/type/latlng" + +import ( + context "golang.org/x/net/context" + grpc "google.golang.org/grpc" +) + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Type of the response message. +type StreamingRecognitionResult_MessageType int32 + +const ( + // Not specified. Should never be used. + StreamingRecognitionResult_MESSAGE_TYPE_UNSPECIFIED StreamingRecognitionResult_MessageType = 0 + // Message contains a (possibly partial) transcript. + StreamingRecognitionResult_TRANSCRIPT StreamingRecognitionResult_MessageType = 1 + // Event indicates that the server has detected the end of the user's speech + // utterance and expects no additional speech. Therefore, the server will + // not process additional audio (although it may subsequently return + // additional results). The client should stop sending additional audio + // data, half-close the gRPC connection, and wait for any additional results + // until the server closes the gRPC connection. This message is only sent if + // `single_utterance` was set to `true`, and is not used otherwise. + StreamingRecognitionResult_END_OF_SINGLE_UTTERANCE StreamingRecognitionResult_MessageType = 2 +) + +var StreamingRecognitionResult_MessageType_name = map[int32]string{ + 0: "MESSAGE_TYPE_UNSPECIFIED", + 1: "TRANSCRIPT", + 2: "END_OF_SINGLE_UTTERANCE", +} +var StreamingRecognitionResult_MessageType_value = map[string]int32{ + "MESSAGE_TYPE_UNSPECIFIED": 0, + "TRANSCRIPT": 1, + "END_OF_SINGLE_UTTERANCE": 2, +} + +func (x StreamingRecognitionResult_MessageType) String() string { + return proto.EnumName(StreamingRecognitionResult_MessageType_name, int32(x)) +} +func (StreamingRecognitionResult_MessageType) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_session_f34861f1b5aa3372, []int{7, 0} +} + +// The request to detect user's intent. +type DetectIntentRequest struct { + // Required. The name of the session this query is sent to. Format: + // `projects//agent/sessions/`. It's up to the API + // caller to choose an appropriate session ID. It can be a random number or + // some type of user identifier (preferably hashed). The length of the session + // ID must not exceed 36 bytes. + Session string `protobuf:"bytes,1,opt,name=session,proto3" json:"session,omitempty"` + // Optional. The parameters of this query. + QueryParams *QueryParameters `protobuf:"bytes,2,opt,name=query_params,json=queryParams,proto3" json:"query_params,omitempty"` + // Required. The input specification. It can be set to: + // + // 1. an audio config + // which instructs the speech recognizer how to process the speech audio, + // + // 2. a conversational query in the form of text, or + // + // 3. an event that specifies which intent to trigger. + QueryInput *QueryInput `protobuf:"bytes,3,opt,name=query_input,json=queryInput,proto3" json:"query_input,omitempty"` + // Optional. Instructs the speech synthesizer how to generate the output + // audio. If this field is not set and agent-level speech synthesizer is not + // configured, no output audio is generated. + OutputAudioConfig *OutputAudioConfig `protobuf:"bytes,4,opt,name=output_audio_config,json=outputAudioConfig,proto3" json:"output_audio_config,omitempty"` + // Optional. The natural language speech audio to be processed. This field + // should be populated iff `query_input` is set to an input audio config. + // A single request can contain up to 1 minute of speech audio data. + InputAudio []byte `protobuf:"bytes,5,opt,name=input_audio,json=inputAudio,proto3" json:"input_audio,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *DetectIntentRequest) Reset() { *m = DetectIntentRequest{} } +func (m *DetectIntentRequest) String() string { return proto.CompactTextString(m) } +func (*DetectIntentRequest) ProtoMessage() {} +func (*DetectIntentRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_session_f34861f1b5aa3372, []int{0} +} +func (m *DetectIntentRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_DetectIntentRequest.Unmarshal(m, b) +} +func (m *DetectIntentRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_DetectIntentRequest.Marshal(b, m, deterministic) +} +func (dst *DetectIntentRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_DetectIntentRequest.Merge(dst, src) +} +func (m *DetectIntentRequest) XXX_Size() int { + return xxx_messageInfo_DetectIntentRequest.Size(m) +} +func (m *DetectIntentRequest) XXX_DiscardUnknown() { + xxx_messageInfo_DetectIntentRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_DetectIntentRequest proto.InternalMessageInfo + +func (m *DetectIntentRequest) GetSession() string { + if m != nil { + return m.Session + } + return "" +} + +func (m *DetectIntentRequest) GetQueryParams() *QueryParameters { + if m != nil { + return m.QueryParams + } + return nil +} + +func (m *DetectIntentRequest) GetQueryInput() *QueryInput { + if m != nil { + return m.QueryInput + } + return nil +} + +func (m *DetectIntentRequest) GetOutputAudioConfig() *OutputAudioConfig { + if m != nil { + return m.OutputAudioConfig + } + return nil +} + +func (m *DetectIntentRequest) GetInputAudio() []byte { + if m != nil { + return m.InputAudio + } + return nil +} + +// The message returned from the DetectIntent method. +type DetectIntentResponse struct { + // The unique identifier of the response. It can be used to + // locate a response in the training example set or for reporting issues. + ResponseId string `protobuf:"bytes,1,opt,name=response_id,json=responseId,proto3" json:"response_id,omitempty"` + // The selected results of the conversational query or event processing. + // See `alternative_query_results` for additional potential results. + QueryResult *QueryResult `protobuf:"bytes,2,opt,name=query_result,json=queryResult,proto3" json:"query_result,omitempty"` + // Specifies the status of the webhook request. + WebhookStatus *status.Status `protobuf:"bytes,3,opt,name=webhook_status,json=webhookStatus,proto3" json:"webhook_status,omitempty"` + // The audio data bytes encoded as specified in the request. + // Note: The output audio is generated based on the values of default platform + // text responses found in the `query_result.fulfillment_messages` field. If + // multiple default text responses exist, they will be concatenated when + // generating audio. If no default platform text responses exist, the + // generated audio content will be empty. + OutputAudio []byte `protobuf:"bytes,4,opt,name=output_audio,json=outputAudio,proto3" json:"output_audio,omitempty"` + // The config used by the speech synthesizer to generate the output audio. + OutputAudioConfig *OutputAudioConfig `protobuf:"bytes,6,opt,name=output_audio_config,json=outputAudioConfig,proto3" json:"output_audio_config,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *DetectIntentResponse) Reset() { *m = DetectIntentResponse{} } +func (m *DetectIntentResponse) String() string { return proto.CompactTextString(m) } +func (*DetectIntentResponse) ProtoMessage() {} +func (*DetectIntentResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_session_f34861f1b5aa3372, []int{1} +} +func (m *DetectIntentResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_DetectIntentResponse.Unmarshal(m, b) +} +func (m *DetectIntentResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_DetectIntentResponse.Marshal(b, m, deterministic) +} +func (dst *DetectIntentResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_DetectIntentResponse.Merge(dst, src) +} +func (m *DetectIntentResponse) XXX_Size() int { + return xxx_messageInfo_DetectIntentResponse.Size(m) +} +func (m *DetectIntentResponse) XXX_DiscardUnknown() { + xxx_messageInfo_DetectIntentResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_DetectIntentResponse proto.InternalMessageInfo + +func (m *DetectIntentResponse) GetResponseId() string { + if m != nil { + return m.ResponseId + } + return "" +} + +func (m *DetectIntentResponse) GetQueryResult() *QueryResult { + if m != nil { + return m.QueryResult + } + return nil +} + +func (m *DetectIntentResponse) GetWebhookStatus() *status.Status { + if m != nil { + return m.WebhookStatus + } + return nil +} + +func (m *DetectIntentResponse) GetOutputAudio() []byte { + if m != nil { + return m.OutputAudio + } + return nil +} + +func (m *DetectIntentResponse) GetOutputAudioConfig() *OutputAudioConfig { + if m != nil { + return m.OutputAudioConfig + } + return nil +} + +// Represents the parameters of the conversational query. +type QueryParameters struct { + // Optional. The time zone of this conversational query from the + // [time zone database](https://www.iana.org/time-zones), e.g., + // America/New_York, Europe/Paris. If not provided, the time zone specified in + // agent settings is used. + TimeZone string `protobuf:"bytes,1,opt,name=time_zone,json=timeZone,proto3" json:"time_zone,omitempty"` + // Optional. The geo location of this conversational query. + GeoLocation *latlng.LatLng `protobuf:"bytes,2,opt,name=geo_location,json=geoLocation,proto3" json:"geo_location,omitempty"` + // Optional. The collection of contexts to be activated before this query is + // executed. + Contexts []*Context `protobuf:"bytes,3,rep,name=contexts,proto3" json:"contexts,omitempty"` + // Optional. Specifies whether to delete all contexts in the current session + // before the new ones are activated. + ResetContexts bool `protobuf:"varint,4,opt,name=reset_contexts,json=resetContexts,proto3" json:"reset_contexts,omitempty"` + // Optional. Additional session entity types to replace or extend developer + // entity types with. The entity synonyms apply to all languages and persist + // for the session of this query. + SessionEntityTypes []*SessionEntityType `protobuf:"bytes,5,rep,name=session_entity_types,json=sessionEntityTypes,proto3" json:"session_entity_types,omitempty"` + // Optional. This field can be used to pass custom data into the webhook + // associated with the agent. Arbitrary JSON objects are supported. + Payload *_struct.Struct `protobuf:"bytes,6,opt,name=payload,proto3" json:"payload,omitempty"` + // Optional. Configures the type of sentiment analysis to perform. If not + // provided, sentiment analysis is not performed. + SentimentAnalysisRequestConfig *SentimentAnalysisRequestConfig `protobuf:"bytes,10,opt,name=sentiment_analysis_request_config,json=sentimentAnalysisRequestConfig,proto3" json:"sentiment_analysis_request_config,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *QueryParameters) Reset() { *m = QueryParameters{} } +func (m *QueryParameters) String() string { return proto.CompactTextString(m) } +func (*QueryParameters) ProtoMessage() {} +func (*QueryParameters) Descriptor() ([]byte, []int) { + return fileDescriptor_session_f34861f1b5aa3372, []int{2} +} +func (m *QueryParameters) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_QueryParameters.Unmarshal(m, b) +} +func (m *QueryParameters) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_QueryParameters.Marshal(b, m, deterministic) +} +func (dst *QueryParameters) XXX_Merge(src proto.Message) { + xxx_messageInfo_QueryParameters.Merge(dst, src) +} +func (m *QueryParameters) XXX_Size() int { + return xxx_messageInfo_QueryParameters.Size(m) +} +func (m *QueryParameters) XXX_DiscardUnknown() { + xxx_messageInfo_QueryParameters.DiscardUnknown(m) +} + +var xxx_messageInfo_QueryParameters proto.InternalMessageInfo + +func (m *QueryParameters) GetTimeZone() string { + if m != nil { + return m.TimeZone + } + return "" +} + +func (m *QueryParameters) GetGeoLocation() *latlng.LatLng { + if m != nil { + return m.GeoLocation + } + return nil +} + +func (m *QueryParameters) GetContexts() []*Context { + if m != nil { + return m.Contexts + } + return nil +} + +func (m *QueryParameters) GetResetContexts() bool { + if m != nil { + return m.ResetContexts + } + return false +} + +func (m *QueryParameters) GetSessionEntityTypes() []*SessionEntityType { + if m != nil { + return m.SessionEntityTypes + } + return nil +} + +func (m *QueryParameters) GetPayload() *_struct.Struct { + if m != nil { + return m.Payload + } + return nil +} + +func (m *QueryParameters) GetSentimentAnalysisRequestConfig() *SentimentAnalysisRequestConfig { + if m != nil { + return m.SentimentAnalysisRequestConfig + } + return nil +} + +// Represents the query input. It can contain either: +// +// 1. An audio config which +// instructs the speech recognizer how to process the speech audio. +// +// 2. A conversational query in the form of text,. +// +// 3. An event that specifies which intent to trigger. +type QueryInput struct { + // Required. The input specification. + // + // Types that are valid to be assigned to Input: + // *QueryInput_AudioConfig + // *QueryInput_Text + // *QueryInput_Event + Input isQueryInput_Input `protobuf_oneof:"input"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *QueryInput) Reset() { *m = QueryInput{} } +func (m *QueryInput) String() string { return proto.CompactTextString(m) } +func (*QueryInput) ProtoMessage() {} +func (*QueryInput) Descriptor() ([]byte, []int) { + return fileDescriptor_session_f34861f1b5aa3372, []int{3} +} +func (m *QueryInput) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_QueryInput.Unmarshal(m, b) +} +func (m *QueryInput) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_QueryInput.Marshal(b, m, deterministic) +} +func (dst *QueryInput) XXX_Merge(src proto.Message) { + xxx_messageInfo_QueryInput.Merge(dst, src) +} +func (m *QueryInput) XXX_Size() int { + return xxx_messageInfo_QueryInput.Size(m) +} +func (m *QueryInput) XXX_DiscardUnknown() { + xxx_messageInfo_QueryInput.DiscardUnknown(m) +} + +var xxx_messageInfo_QueryInput proto.InternalMessageInfo + +type isQueryInput_Input interface { + isQueryInput_Input() +} + +type QueryInput_AudioConfig struct { + AudioConfig *InputAudioConfig `protobuf:"bytes,1,opt,name=audio_config,json=audioConfig,proto3,oneof"` +} + +type QueryInput_Text struct { + Text *TextInput `protobuf:"bytes,2,opt,name=text,proto3,oneof"` +} + +type QueryInput_Event struct { + Event *EventInput `protobuf:"bytes,3,opt,name=event,proto3,oneof"` +} + +func (*QueryInput_AudioConfig) isQueryInput_Input() {} + +func (*QueryInput_Text) isQueryInput_Input() {} + +func (*QueryInput_Event) isQueryInput_Input() {} + +func (m *QueryInput) GetInput() isQueryInput_Input { + if m != nil { + return m.Input + } + return nil +} + +func (m *QueryInput) GetAudioConfig() *InputAudioConfig { + if x, ok := m.GetInput().(*QueryInput_AudioConfig); ok { + return x.AudioConfig + } + return nil +} + +func (m *QueryInput) GetText() *TextInput { + if x, ok := m.GetInput().(*QueryInput_Text); ok { + return x.Text + } + return nil +} + +func (m *QueryInput) GetEvent() *EventInput { + if x, ok := m.GetInput().(*QueryInput_Event); ok { + return x.Event + } + return nil +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*QueryInput) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _QueryInput_OneofMarshaler, _QueryInput_OneofUnmarshaler, _QueryInput_OneofSizer, []interface{}{ + (*QueryInput_AudioConfig)(nil), + (*QueryInput_Text)(nil), + (*QueryInput_Event)(nil), + } +} + +func _QueryInput_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*QueryInput) + // input + switch x := m.Input.(type) { + case *QueryInput_AudioConfig: + b.EncodeVarint(1<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.AudioConfig); err != nil { + return err + } + case *QueryInput_Text: + b.EncodeVarint(2<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.Text); err != nil { + return err + } + case *QueryInput_Event: + b.EncodeVarint(3<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.Event); err != nil { + return err + } + case nil: + default: + return fmt.Errorf("QueryInput.Input has unexpected type %T", x) + } + return nil +} + +func _QueryInput_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*QueryInput) + switch tag { + case 1: // input.audio_config + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(InputAudioConfig) + err := b.DecodeMessage(msg) + m.Input = &QueryInput_AudioConfig{msg} + return true, err + case 2: // input.text + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(TextInput) + err := b.DecodeMessage(msg) + m.Input = &QueryInput_Text{msg} + return true, err + case 3: // input.event + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(EventInput) + err := b.DecodeMessage(msg) + m.Input = &QueryInput_Event{msg} + return true, err + default: + return false, nil + } +} + +func _QueryInput_OneofSizer(msg proto.Message) (n int) { + m := msg.(*QueryInput) + // input + switch x := m.Input.(type) { + case *QueryInput_AudioConfig: + s := proto.Size(x.AudioConfig) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *QueryInput_Text: + s := proto.Size(x.Text) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *QueryInput_Event: + s := proto.Size(x.Event) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +// Represents the result of conversational query or event processing. +type QueryResult struct { + // The original conversational query text: + // - If natural language text was provided as input, `query_text` contains + // a copy of the input. + // - If natural language speech audio was provided as input, `query_text` + // contains the speech recognition result. If speech recognizer produced + // multiple alternatives, a particular one is picked. + // - If an event was provided as input, `query_text` is not set. + QueryText string `protobuf:"bytes,1,opt,name=query_text,json=queryText,proto3" json:"query_text,omitempty"` + // The language that was triggered during intent detection. + // See [Language + // Support](https://cloud.google.com/dialogflow-enterprise/docs/reference/language) + // for a list of the currently supported language codes. + LanguageCode string `protobuf:"bytes,15,opt,name=language_code,json=languageCode,proto3" json:"language_code,omitempty"` + // The Speech recognition confidence between 0.0 and 1.0. A higher number + // indicates an estimated greater likelihood that the recognized words are + // correct. The default of 0.0 is a sentinel value indicating that confidence + // was not set. + // + // This field is not guaranteed to be accurate or set. In particular this + // field isn't set for StreamingDetectIntent since the streaming endpoint has + // separate confidence estimates per portion of the audio in + // StreamingRecognitionResult. + SpeechRecognitionConfidence float32 `protobuf:"fixed32,2,opt,name=speech_recognition_confidence,json=speechRecognitionConfidence,proto3" json:"speech_recognition_confidence,omitempty"` + // The action name from the matched intent. + Action string `protobuf:"bytes,3,opt,name=action,proto3" json:"action,omitempty"` + // The collection of extracted parameters. + Parameters *_struct.Struct `protobuf:"bytes,4,opt,name=parameters,proto3" json:"parameters,omitempty"` + // This field is set to: + // - `false` if the matched intent has required parameters and not all of + // the required parameter values have been collected. + // - `true` if all required parameter values have been collected, or if the + // matched intent doesn't contain any required parameters. + AllRequiredParamsPresent bool `protobuf:"varint,5,opt,name=all_required_params_present,json=allRequiredParamsPresent,proto3" json:"all_required_params_present,omitempty"` + // The text to be pronounced to the user or shown on the screen. + // Note: This is a legacy field, `fulfillment_messages` should be preferred. + FulfillmentText string `protobuf:"bytes,6,opt,name=fulfillment_text,json=fulfillmentText,proto3" json:"fulfillment_text,omitempty"` + // The collection of rich messages to present to the user. + FulfillmentMessages []*Intent_Message `protobuf:"bytes,7,rep,name=fulfillment_messages,json=fulfillmentMessages,proto3" json:"fulfillment_messages,omitempty"` + // If the query was fulfilled by a webhook call, this field is set to the + // value of the `source` field returned in the webhook response. + WebhookSource string `protobuf:"bytes,8,opt,name=webhook_source,json=webhookSource,proto3" json:"webhook_source,omitempty"` + // If the query was fulfilled by a webhook call, this field is set to the + // value of the `payload` field returned in the webhook response. + WebhookPayload *_struct.Struct `protobuf:"bytes,9,opt,name=webhook_payload,json=webhookPayload,proto3" json:"webhook_payload,omitempty"` + // The collection of output contexts. If applicable, + // `output_contexts.parameters` contains entries with name + // `.original` containing the original parameter values + // before the query. + OutputContexts []*Context `protobuf:"bytes,10,rep,name=output_contexts,json=outputContexts,proto3" json:"output_contexts,omitempty"` + // The intent that matched the conversational query. Some, not + // all fields are filled in this message, including but not limited to: + // `name`, `display_name` and `webhook_state`. + Intent *Intent `protobuf:"bytes,11,opt,name=intent,proto3" json:"intent,omitempty"` + // The intent detection confidence. Values range from 0.0 + // (completely uncertain) to 1.0 (completely certain). + // If there are `multiple knowledge_answers` messages, this value is set to + // the greatest `knowledgeAnswers.match_confidence` value in the list. + IntentDetectionConfidence float32 `protobuf:"fixed32,12,opt,name=intent_detection_confidence,json=intentDetectionConfidence,proto3" json:"intent_detection_confidence,omitempty"` + // The free-form diagnostic info. For example, this field could contain + // webhook call latency. The string keys of the Struct's fields map can change + // without notice. + DiagnosticInfo *_struct.Struct `protobuf:"bytes,14,opt,name=diagnostic_info,json=diagnosticInfo,proto3" json:"diagnostic_info,omitempty"` + // The sentiment analysis result, which depends on the + // `sentiment_analysis_request_config` specified in the request. + SentimentAnalysisResult *SentimentAnalysisResult `protobuf:"bytes,17,opt,name=sentiment_analysis_result,json=sentimentAnalysisResult,proto3" json:"sentiment_analysis_result,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *QueryResult) Reset() { *m = QueryResult{} } +func (m *QueryResult) String() string { return proto.CompactTextString(m) } +func (*QueryResult) ProtoMessage() {} +func (*QueryResult) Descriptor() ([]byte, []int) { + return fileDescriptor_session_f34861f1b5aa3372, []int{4} +} +func (m *QueryResult) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_QueryResult.Unmarshal(m, b) +} +func (m *QueryResult) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_QueryResult.Marshal(b, m, deterministic) +} +func (dst *QueryResult) XXX_Merge(src proto.Message) { + xxx_messageInfo_QueryResult.Merge(dst, src) +} +func (m *QueryResult) XXX_Size() int { + return xxx_messageInfo_QueryResult.Size(m) +} +func (m *QueryResult) XXX_DiscardUnknown() { + xxx_messageInfo_QueryResult.DiscardUnknown(m) +} + +var xxx_messageInfo_QueryResult proto.InternalMessageInfo + +func (m *QueryResult) GetQueryText() string { + if m != nil { + return m.QueryText + } + return "" +} + +func (m *QueryResult) GetLanguageCode() string { + if m != nil { + return m.LanguageCode + } + return "" +} + +func (m *QueryResult) GetSpeechRecognitionConfidence() float32 { + if m != nil { + return m.SpeechRecognitionConfidence + } + return 0 +} + +func (m *QueryResult) GetAction() string { + if m != nil { + return m.Action + } + return "" +} + +func (m *QueryResult) GetParameters() *_struct.Struct { + if m != nil { + return m.Parameters + } + return nil +} + +func (m *QueryResult) GetAllRequiredParamsPresent() bool { + if m != nil { + return m.AllRequiredParamsPresent + } + return false +} + +func (m *QueryResult) GetFulfillmentText() string { + if m != nil { + return m.FulfillmentText + } + return "" +} + +func (m *QueryResult) GetFulfillmentMessages() []*Intent_Message { + if m != nil { + return m.FulfillmentMessages + } + return nil +} + +func (m *QueryResult) GetWebhookSource() string { + if m != nil { + return m.WebhookSource + } + return "" +} + +func (m *QueryResult) GetWebhookPayload() *_struct.Struct { + if m != nil { + return m.WebhookPayload + } + return nil +} + +func (m *QueryResult) GetOutputContexts() []*Context { + if m != nil { + return m.OutputContexts + } + return nil +} + +func (m *QueryResult) GetIntent() *Intent { + if m != nil { + return m.Intent + } + return nil +} + +func (m *QueryResult) GetIntentDetectionConfidence() float32 { + if m != nil { + return m.IntentDetectionConfidence + } + return 0 +} + +func (m *QueryResult) GetDiagnosticInfo() *_struct.Struct { + if m != nil { + return m.DiagnosticInfo + } + return nil +} + +func (m *QueryResult) GetSentimentAnalysisResult() *SentimentAnalysisResult { + if m != nil { + return m.SentimentAnalysisResult + } + return nil +} + +// The top-level message sent by the client to the +// `StreamingDetectIntent` method. +// +// Multiple request messages should be sent in order: +// +// 1. The first message must contain `session`, `query_input` plus optionally +// `query_params` and/or `single_utterance`. The message must not contain `input_audio`. +// +// 2. If `query_input` was set to a streaming input audio config, +// all subsequent messages must contain only `input_audio`. +// Otherwise, finish the request stream. +type StreamingDetectIntentRequest struct { + // Required. The name of the session the query is sent to. + // Format of the session name: + // `projects//agent/sessions/`. It’s up to the API + // caller to choose an appropriate `Session ID`. It can be a random number or + // some type of user identifier (preferably hashed). The length of the session + // ID must not exceed 36 characters. + Session string `protobuf:"bytes,1,opt,name=session,proto3" json:"session,omitempty"` + // Optional. The parameters of this query. + QueryParams *QueryParameters `protobuf:"bytes,2,opt,name=query_params,json=queryParams,proto3" json:"query_params,omitempty"` + // Required. The input specification. It can be set to: + // + // 1. an audio config which instructs the speech recognizer how to process + // the speech audio, + // + // 2. a conversational query in the form of text, or + // + // 3. an event that specifies which intent to trigger. + QueryInput *QueryInput `protobuf:"bytes,3,opt,name=query_input,json=queryInput,proto3" json:"query_input,omitempty"` + // Optional. If `false` (default), recognition does not cease until the + // client closes the stream. + // If `true`, the recognizer will detect a single spoken utterance in input + // audio. Recognition ceases when it detects the audio's voice has + // stopped or paused. In this case, once a detected intent is received, the + // client should close the stream and start a new request with a new stream as + // needed. + // This setting is ignored when `query_input` is a piece of text or an event. + SingleUtterance bool `protobuf:"varint,4,opt,name=single_utterance,json=singleUtterance,proto3" json:"single_utterance,omitempty"` + // Optional. Instructs the speech synthesizer how to generate the output + // audio. If this field is not set and agent-level speech synthesizer is not + // configured, no output audio is generated. + OutputAudioConfig *OutputAudioConfig `protobuf:"bytes,5,opt,name=output_audio_config,json=outputAudioConfig,proto3" json:"output_audio_config,omitempty"` + // Optional. The input audio content to be recognized. Must be sent if + // `query_input` was set to a streaming input audio config. The complete audio + // over all streaming messages must not exceed 1 minute. + InputAudio []byte `protobuf:"bytes,6,opt,name=input_audio,json=inputAudio,proto3" json:"input_audio,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *StreamingDetectIntentRequest) Reset() { *m = StreamingDetectIntentRequest{} } +func (m *StreamingDetectIntentRequest) String() string { return proto.CompactTextString(m) } +func (*StreamingDetectIntentRequest) ProtoMessage() {} +func (*StreamingDetectIntentRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_session_f34861f1b5aa3372, []int{5} +} +func (m *StreamingDetectIntentRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_StreamingDetectIntentRequest.Unmarshal(m, b) +} +func (m *StreamingDetectIntentRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_StreamingDetectIntentRequest.Marshal(b, m, deterministic) +} +func (dst *StreamingDetectIntentRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_StreamingDetectIntentRequest.Merge(dst, src) +} +func (m *StreamingDetectIntentRequest) XXX_Size() int { + return xxx_messageInfo_StreamingDetectIntentRequest.Size(m) +} +func (m *StreamingDetectIntentRequest) XXX_DiscardUnknown() { + xxx_messageInfo_StreamingDetectIntentRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_StreamingDetectIntentRequest proto.InternalMessageInfo + +func (m *StreamingDetectIntentRequest) GetSession() string { + if m != nil { + return m.Session + } + return "" +} + +func (m *StreamingDetectIntentRequest) GetQueryParams() *QueryParameters { + if m != nil { + return m.QueryParams + } + return nil +} + +func (m *StreamingDetectIntentRequest) GetQueryInput() *QueryInput { + if m != nil { + return m.QueryInput + } + return nil +} + +func (m *StreamingDetectIntentRequest) GetSingleUtterance() bool { + if m != nil { + return m.SingleUtterance + } + return false +} + +func (m *StreamingDetectIntentRequest) GetOutputAudioConfig() *OutputAudioConfig { + if m != nil { + return m.OutputAudioConfig + } + return nil +} + +func (m *StreamingDetectIntentRequest) GetInputAudio() []byte { + if m != nil { + return m.InputAudio + } + return nil +} + +// The top-level message returned from the +// `StreamingDetectIntent` method. +// +// Multiple response messages can be returned in order: +// +// 1. If the input was set to streaming audio, the first one or more messages +// contain `recognition_result`. Each `recognition_result` represents a more +// complete transcript of what the user said. The last `recognition_result` +// has `is_final` set to `true`. +// +// 2. The next message contains `response_id`, `query_result` +// and optionally `webhook_status` if a WebHook was called. +type StreamingDetectIntentResponse struct { + // The unique identifier of the response. It can be used to + // locate a response in the training example set or for reporting issues. + ResponseId string `protobuf:"bytes,1,opt,name=response_id,json=responseId,proto3" json:"response_id,omitempty"` + // The result of speech recognition. + RecognitionResult *StreamingRecognitionResult `protobuf:"bytes,2,opt,name=recognition_result,json=recognitionResult,proto3" json:"recognition_result,omitempty"` + // The result of the conversational query or event processing. + QueryResult *QueryResult `protobuf:"bytes,3,opt,name=query_result,json=queryResult,proto3" json:"query_result,omitempty"` + // Specifies the status of the webhook request. + WebhookStatus *status.Status `protobuf:"bytes,4,opt,name=webhook_status,json=webhookStatus,proto3" json:"webhook_status,omitempty"` + // The audio data bytes encoded as specified in the request. + OutputAudio []byte `protobuf:"bytes,5,opt,name=output_audio,json=outputAudio,proto3" json:"output_audio,omitempty"` + // Instructs the speech synthesizer how to generate the output audio. This + // field is populated from the agent-level speech synthesizer configuration, + // if enabled. + OutputAudioConfig *OutputAudioConfig `protobuf:"bytes,6,opt,name=output_audio_config,json=outputAudioConfig,proto3" json:"output_audio_config,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *StreamingDetectIntentResponse) Reset() { *m = StreamingDetectIntentResponse{} } +func (m *StreamingDetectIntentResponse) String() string { return proto.CompactTextString(m) } +func (*StreamingDetectIntentResponse) ProtoMessage() {} +func (*StreamingDetectIntentResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_session_f34861f1b5aa3372, []int{6} +} +func (m *StreamingDetectIntentResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_StreamingDetectIntentResponse.Unmarshal(m, b) +} +func (m *StreamingDetectIntentResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_StreamingDetectIntentResponse.Marshal(b, m, deterministic) +} +func (dst *StreamingDetectIntentResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_StreamingDetectIntentResponse.Merge(dst, src) +} +func (m *StreamingDetectIntentResponse) XXX_Size() int { + return xxx_messageInfo_StreamingDetectIntentResponse.Size(m) +} +func (m *StreamingDetectIntentResponse) XXX_DiscardUnknown() { + xxx_messageInfo_StreamingDetectIntentResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_StreamingDetectIntentResponse proto.InternalMessageInfo + +func (m *StreamingDetectIntentResponse) GetResponseId() string { + if m != nil { + return m.ResponseId + } + return "" +} + +func (m *StreamingDetectIntentResponse) GetRecognitionResult() *StreamingRecognitionResult { + if m != nil { + return m.RecognitionResult + } + return nil +} + +func (m *StreamingDetectIntentResponse) GetQueryResult() *QueryResult { + if m != nil { + return m.QueryResult + } + return nil +} + +func (m *StreamingDetectIntentResponse) GetWebhookStatus() *status.Status { + if m != nil { + return m.WebhookStatus + } + return nil +} + +func (m *StreamingDetectIntentResponse) GetOutputAudio() []byte { + if m != nil { + return m.OutputAudio + } + return nil +} + +func (m *StreamingDetectIntentResponse) GetOutputAudioConfig() *OutputAudioConfig { + if m != nil { + return m.OutputAudioConfig + } + return nil +} + +// Contains a speech recognition result corresponding to a portion of the audio +// that is currently being processed or an indication that this is the end +// of the single requested utterance. +// +// Example: +// +// 1. transcript: "tube" +// +// 2. transcript: "to be a" +// +// 3. transcript: "to be" +// +// 4. transcript: "to be or not to be" +// is_final: true +// +// 5. transcript: " that's" +// +// 6. transcript: " that is" +// +// 7. message_type: `MESSAGE_TYPE_END_OF_SINGLE_UTTERANCE` +// +// 8. transcript: " that is the question" +// is_final: true +// +// Only two of the responses contain final results (#4 and #8 indicated by +// `is_final: true`). Concatenating these generates the full transcript: "to be +// or not to be that is the question". +// +// In each response we populate: +// +// * for `MESSAGE_TYPE_TRANSCRIPT`: `transcript` and possibly `is_final`. +// +// * for `MESSAGE_TYPE_END_OF_SINGLE_UTTERANCE`: only `message_type`. +type StreamingRecognitionResult struct { + // Type of the result message. + MessageType StreamingRecognitionResult_MessageType `protobuf:"varint,1,opt,name=message_type,json=messageType,proto3,enum=google.cloud.dialogflow.v2.StreamingRecognitionResult_MessageType" json:"message_type,omitempty"` + // Transcript text representing the words that the user spoke. + // Populated if and only if `message_type` = `MESSAGE_TYPE_TRANSCRIPT`. + Transcript string `protobuf:"bytes,2,opt,name=transcript,proto3" json:"transcript,omitempty"` + // If `false`, the `StreamingRecognitionResult` represents an + // interim result that may change. If `true`, the recognizer will not return + // any further hypotheses about this piece of the audio. May only be populated + // for `message_type` = `MESSAGE_TYPE_TRANSCRIPT`. + IsFinal bool `protobuf:"varint,3,opt,name=is_final,json=isFinal,proto3" json:"is_final,omitempty"` + // The Speech confidence between 0.0 and 1.0 for the current portion of audio. + // A higher number indicates an estimated greater likelihood that the + // recognized words are correct. The default of 0.0 is a sentinel value + // indicating that confidence was not set. + // + // This field is typically only provided if `is_final` is true and you should + // not rely on it being accurate or even set. + Confidence float32 `protobuf:"fixed32,4,opt,name=confidence,proto3" json:"confidence,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *StreamingRecognitionResult) Reset() { *m = StreamingRecognitionResult{} } +func (m *StreamingRecognitionResult) String() string { return proto.CompactTextString(m) } +func (*StreamingRecognitionResult) ProtoMessage() {} +func (*StreamingRecognitionResult) Descriptor() ([]byte, []int) { + return fileDescriptor_session_f34861f1b5aa3372, []int{7} +} +func (m *StreamingRecognitionResult) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_StreamingRecognitionResult.Unmarshal(m, b) +} +func (m *StreamingRecognitionResult) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_StreamingRecognitionResult.Marshal(b, m, deterministic) +} +func (dst *StreamingRecognitionResult) XXX_Merge(src proto.Message) { + xxx_messageInfo_StreamingRecognitionResult.Merge(dst, src) +} +func (m *StreamingRecognitionResult) XXX_Size() int { + return xxx_messageInfo_StreamingRecognitionResult.Size(m) +} +func (m *StreamingRecognitionResult) XXX_DiscardUnknown() { + xxx_messageInfo_StreamingRecognitionResult.DiscardUnknown(m) +} + +var xxx_messageInfo_StreamingRecognitionResult proto.InternalMessageInfo + +func (m *StreamingRecognitionResult) GetMessageType() StreamingRecognitionResult_MessageType { + if m != nil { + return m.MessageType + } + return StreamingRecognitionResult_MESSAGE_TYPE_UNSPECIFIED +} + +func (m *StreamingRecognitionResult) GetTranscript() string { + if m != nil { + return m.Transcript + } + return "" +} + +func (m *StreamingRecognitionResult) GetIsFinal() bool { + if m != nil { + return m.IsFinal + } + return false +} + +func (m *StreamingRecognitionResult) GetConfidence() float32 { + if m != nil { + return m.Confidence + } + return 0 +} + +// Represents the natural language text to be processed. +type TextInput struct { + // Required. The UTF-8 encoded natural language text to be processed. + // Text length must not exceed 256 characters. + Text string `protobuf:"bytes,1,opt,name=text,proto3" json:"text,omitempty"` + // Required. The language of this conversational query. See [Language + // Support](https://cloud.google.com/dialogflow-enterprise/docs/reference/language) + // for a list of the currently supported language codes. Note that queries in + // the same session do not necessarily need to specify the same language. + LanguageCode string `protobuf:"bytes,2,opt,name=language_code,json=languageCode,proto3" json:"language_code,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *TextInput) Reset() { *m = TextInput{} } +func (m *TextInput) String() string { return proto.CompactTextString(m) } +func (*TextInput) ProtoMessage() {} +func (*TextInput) Descriptor() ([]byte, []int) { + return fileDescriptor_session_f34861f1b5aa3372, []int{8} +} +func (m *TextInput) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_TextInput.Unmarshal(m, b) +} +func (m *TextInput) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_TextInput.Marshal(b, m, deterministic) +} +func (dst *TextInput) XXX_Merge(src proto.Message) { + xxx_messageInfo_TextInput.Merge(dst, src) +} +func (m *TextInput) XXX_Size() int { + return xxx_messageInfo_TextInput.Size(m) +} +func (m *TextInput) XXX_DiscardUnknown() { + xxx_messageInfo_TextInput.DiscardUnknown(m) +} + +var xxx_messageInfo_TextInput proto.InternalMessageInfo + +func (m *TextInput) GetText() string { + if m != nil { + return m.Text + } + return "" +} + +func (m *TextInput) GetLanguageCode() string { + if m != nil { + return m.LanguageCode + } + return "" +} + +// Events allow for matching intents by event name instead of the natural +// language input. For instance, input `` can trigger a personalized welcome response. +// The parameter `name` may be used by the agent in the response: +// `"Hello #welcome_event.name! What can I do for you today?"`. +type EventInput struct { + // Required. The unique identifier of the event. + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // Optional. The collection of parameters associated with the event. + Parameters *_struct.Struct `protobuf:"bytes,2,opt,name=parameters,proto3" json:"parameters,omitempty"` + // Required. The language of this query. See [Language + // Support](https://cloud.google.com/dialogflow-enterprise/docs/reference/language) + // for a list of the currently supported language codes. Note that queries in + // the same session do not necessarily need to specify the same language. + LanguageCode string `protobuf:"bytes,3,opt,name=language_code,json=languageCode,proto3" json:"language_code,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *EventInput) Reset() { *m = EventInput{} } +func (m *EventInput) String() string { return proto.CompactTextString(m) } +func (*EventInput) ProtoMessage() {} +func (*EventInput) Descriptor() ([]byte, []int) { + return fileDescriptor_session_f34861f1b5aa3372, []int{9} +} +func (m *EventInput) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_EventInput.Unmarshal(m, b) +} +func (m *EventInput) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_EventInput.Marshal(b, m, deterministic) +} +func (dst *EventInput) XXX_Merge(src proto.Message) { + xxx_messageInfo_EventInput.Merge(dst, src) +} +func (m *EventInput) XXX_Size() int { + return xxx_messageInfo_EventInput.Size(m) +} +func (m *EventInput) XXX_DiscardUnknown() { + xxx_messageInfo_EventInput.DiscardUnknown(m) +} + +var xxx_messageInfo_EventInput proto.InternalMessageInfo + +func (m *EventInput) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *EventInput) GetParameters() *_struct.Struct { + if m != nil { + return m.Parameters + } + return nil +} + +func (m *EventInput) GetLanguageCode() string { + if m != nil { + return m.LanguageCode + } + return "" +} + +// Configures the types of sentiment analysis to perform. +type SentimentAnalysisRequestConfig struct { + // Optional. Instructs the service to perform sentiment analysis on + // `query_text`. If not provided, sentiment analysis is not performed on + // `query_text`. + AnalyzeQueryTextSentiment bool `protobuf:"varint,1,opt,name=analyze_query_text_sentiment,json=analyzeQueryTextSentiment,proto3" json:"analyze_query_text_sentiment,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *SentimentAnalysisRequestConfig) Reset() { *m = SentimentAnalysisRequestConfig{} } +func (m *SentimentAnalysisRequestConfig) String() string { return proto.CompactTextString(m) } +func (*SentimentAnalysisRequestConfig) ProtoMessage() {} +func (*SentimentAnalysisRequestConfig) Descriptor() ([]byte, []int) { + return fileDescriptor_session_f34861f1b5aa3372, []int{10} +} +func (m *SentimentAnalysisRequestConfig) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_SentimentAnalysisRequestConfig.Unmarshal(m, b) +} +func (m *SentimentAnalysisRequestConfig) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_SentimentAnalysisRequestConfig.Marshal(b, m, deterministic) +} +func (dst *SentimentAnalysisRequestConfig) XXX_Merge(src proto.Message) { + xxx_messageInfo_SentimentAnalysisRequestConfig.Merge(dst, src) +} +func (m *SentimentAnalysisRequestConfig) XXX_Size() int { + return xxx_messageInfo_SentimentAnalysisRequestConfig.Size(m) +} +func (m *SentimentAnalysisRequestConfig) XXX_DiscardUnknown() { + xxx_messageInfo_SentimentAnalysisRequestConfig.DiscardUnknown(m) +} + +var xxx_messageInfo_SentimentAnalysisRequestConfig proto.InternalMessageInfo + +func (m *SentimentAnalysisRequestConfig) GetAnalyzeQueryTextSentiment() bool { + if m != nil { + return m.AnalyzeQueryTextSentiment + } + return false +} + +// The result of sentiment analysis as configured by +// `sentiment_analysis_request_config`. +type SentimentAnalysisResult struct { + // The sentiment analysis result for `query_text`. + QueryTextSentiment *Sentiment `protobuf:"bytes,1,opt,name=query_text_sentiment,json=queryTextSentiment,proto3" json:"query_text_sentiment,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *SentimentAnalysisResult) Reset() { *m = SentimentAnalysisResult{} } +func (m *SentimentAnalysisResult) String() string { return proto.CompactTextString(m) } +func (*SentimentAnalysisResult) ProtoMessage() {} +func (*SentimentAnalysisResult) Descriptor() ([]byte, []int) { + return fileDescriptor_session_f34861f1b5aa3372, []int{11} +} +func (m *SentimentAnalysisResult) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_SentimentAnalysisResult.Unmarshal(m, b) +} +func (m *SentimentAnalysisResult) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_SentimentAnalysisResult.Marshal(b, m, deterministic) +} +func (dst *SentimentAnalysisResult) XXX_Merge(src proto.Message) { + xxx_messageInfo_SentimentAnalysisResult.Merge(dst, src) +} +func (m *SentimentAnalysisResult) XXX_Size() int { + return xxx_messageInfo_SentimentAnalysisResult.Size(m) +} +func (m *SentimentAnalysisResult) XXX_DiscardUnknown() { + xxx_messageInfo_SentimentAnalysisResult.DiscardUnknown(m) +} + +var xxx_messageInfo_SentimentAnalysisResult proto.InternalMessageInfo + +func (m *SentimentAnalysisResult) GetQueryTextSentiment() *Sentiment { + if m != nil { + return m.QueryTextSentiment + } + return nil +} + +// The sentiment, such as positive/negative feeling or association, for a unit +// of analysis, such as the query text. +type Sentiment struct { + // Sentiment score between -1.0 (negative sentiment) and 1.0 (positive + // sentiment). + Score float32 `protobuf:"fixed32,1,opt,name=score,proto3" json:"score,omitempty"` + // A non-negative number in the [0, +inf) range, which represents the absolute + // magnitude of sentiment, regardless of score (positive or negative). + Magnitude float32 `protobuf:"fixed32,2,opt,name=magnitude,proto3" json:"magnitude,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Sentiment) Reset() { *m = Sentiment{} } +func (m *Sentiment) String() string { return proto.CompactTextString(m) } +func (*Sentiment) ProtoMessage() {} +func (*Sentiment) Descriptor() ([]byte, []int) { + return fileDescriptor_session_f34861f1b5aa3372, []int{12} +} +func (m *Sentiment) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Sentiment.Unmarshal(m, b) +} +func (m *Sentiment) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Sentiment.Marshal(b, m, deterministic) +} +func (dst *Sentiment) XXX_Merge(src proto.Message) { + xxx_messageInfo_Sentiment.Merge(dst, src) +} +func (m *Sentiment) XXX_Size() int { + return xxx_messageInfo_Sentiment.Size(m) +} +func (m *Sentiment) XXX_DiscardUnknown() { + xxx_messageInfo_Sentiment.DiscardUnknown(m) +} + +var xxx_messageInfo_Sentiment proto.InternalMessageInfo + +func (m *Sentiment) GetScore() float32 { + if m != nil { + return m.Score + } + return 0 +} + +func (m *Sentiment) GetMagnitude() float32 { + if m != nil { + return m.Magnitude + } + return 0 +} + +func init() { + proto.RegisterType((*DetectIntentRequest)(nil), "google.cloud.dialogflow.v2.DetectIntentRequest") + proto.RegisterType((*DetectIntentResponse)(nil), "google.cloud.dialogflow.v2.DetectIntentResponse") + proto.RegisterType((*QueryParameters)(nil), "google.cloud.dialogflow.v2.QueryParameters") + proto.RegisterType((*QueryInput)(nil), "google.cloud.dialogflow.v2.QueryInput") + proto.RegisterType((*QueryResult)(nil), "google.cloud.dialogflow.v2.QueryResult") + proto.RegisterType((*StreamingDetectIntentRequest)(nil), "google.cloud.dialogflow.v2.StreamingDetectIntentRequest") + proto.RegisterType((*StreamingDetectIntentResponse)(nil), "google.cloud.dialogflow.v2.StreamingDetectIntentResponse") + proto.RegisterType((*StreamingRecognitionResult)(nil), "google.cloud.dialogflow.v2.StreamingRecognitionResult") + proto.RegisterType((*TextInput)(nil), "google.cloud.dialogflow.v2.TextInput") + proto.RegisterType((*EventInput)(nil), "google.cloud.dialogflow.v2.EventInput") + proto.RegisterType((*SentimentAnalysisRequestConfig)(nil), "google.cloud.dialogflow.v2.SentimentAnalysisRequestConfig") + proto.RegisterType((*SentimentAnalysisResult)(nil), "google.cloud.dialogflow.v2.SentimentAnalysisResult") + proto.RegisterType((*Sentiment)(nil), "google.cloud.dialogflow.v2.Sentiment") + proto.RegisterEnum("google.cloud.dialogflow.v2.StreamingRecognitionResult_MessageType", StreamingRecognitionResult_MessageType_name, StreamingRecognitionResult_MessageType_value) +} + +// Reference imports to suppress errors if they are not otherwise used. +var _ context.Context +var _ grpc.ClientConn + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the grpc package it is being compiled against. +const _ = grpc.SupportPackageIsVersion4 + +// SessionsClient is the client API for Sessions service. +// +// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://godoc.org/google.golang.org/grpc#ClientConn.NewStream. +type SessionsClient interface { + // Processes a natural language query and returns structured, actionable data + // as a result. This method is not idempotent, because it may cause contexts + // and session entity types to be updated, which in turn might affect + // results of future queries. + DetectIntent(ctx context.Context, in *DetectIntentRequest, opts ...grpc.CallOption) (*DetectIntentResponse, error) + // Processes a natural language query in audio format in a streaming fashion + // and returns structured, actionable data as a result. This method is only + // available via the gRPC API (not REST). + StreamingDetectIntent(ctx context.Context, opts ...grpc.CallOption) (Sessions_StreamingDetectIntentClient, error) +} + +type sessionsClient struct { + cc *grpc.ClientConn +} + +func NewSessionsClient(cc *grpc.ClientConn) SessionsClient { + return &sessionsClient{cc} +} + +func (c *sessionsClient) DetectIntent(ctx context.Context, in *DetectIntentRequest, opts ...grpc.CallOption) (*DetectIntentResponse, error) { + out := new(DetectIntentResponse) + err := c.cc.Invoke(ctx, "/google.cloud.dialogflow.v2.Sessions/DetectIntent", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *sessionsClient) StreamingDetectIntent(ctx context.Context, opts ...grpc.CallOption) (Sessions_StreamingDetectIntentClient, error) { + stream, err := c.cc.NewStream(ctx, &_Sessions_serviceDesc.Streams[0], "/google.cloud.dialogflow.v2.Sessions/StreamingDetectIntent", opts...) + if err != nil { + return nil, err + } + x := &sessionsStreamingDetectIntentClient{stream} + return x, nil +} + +type Sessions_StreamingDetectIntentClient interface { + Send(*StreamingDetectIntentRequest) error + Recv() (*StreamingDetectIntentResponse, error) + grpc.ClientStream +} + +type sessionsStreamingDetectIntentClient struct { + grpc.ClientStream +} + +func (x *sessionsStreamingDetectIntentClient) Send(m *StreamingDetectIntentRequest) error { + return x.ClientStream.SendMsg(m) +} + +func (x *sessionsStreamingDetectIntentClient) Recv() (*StreamingDetectIntentResponse, error) { + m := new(StreamingDetectIntentResponse) + if err := x.ClientStream.RecvMsg(m); err != nil { + return nil, err + } + return m, nil +} + +// SessionsServer is the server API for Sessions service. +type SessionsServer interface { + // Processes a natural language query and returns structured, actionable data + // as a result. This method is not idempotent, because it may cause contexts + // and session entity types to be updated, which in turn might affect + // results of future queries. + DetectIntent(context.Context, *DetectIntentRequest) (*DetectIntentResponse, error) + // Processes a natural language query in audio format in a streaming fashion + // and returns structured, actionable data as a result. This method is only + // available via the gRPC API (not REST). + StreamingDetectIntent(Sessions_StreamingDetectIntentServer) error +} + +func RegisterSessionsServer(s *grpc.Server, srv SessionsServer) { + s.RegisterService(&_Sessions_serviceDesc, srv) +} + +func _Sessions_DetectIntent_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(DetectIntentRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(SessionsServer).DetectIntent(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.dialogflow.v2.Sessions/DetectIntent", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(SessionsServer).DetectIntent(ctx, req.(*DetectIntentRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _Sessions_StreamingDetectIntent_Handler(srv interface{}, stream grpc.ServerStream) error { + return srv.(SessionsServer).StreamingDetectIntent(&sessionsStreamingDetectIntentServer{stream}) +} + +type Sessions_StreamingDetectIntentServer interface { + Send(*StreamingDetectIntentResponse) error + Recv() (*StreamingDetectIntentRequest, error) + grpc.ServerStream +} + +type sessionsStreamingDetectIntentServer struct { + grpc.ServerStream +} + +func (x *sessionsStreamingDetectIntentServer) Send(m *StreamingDetectIntentResponse) error { + return x.ServerStream.SendMsg(m) +} + +func (x *sessionsStreamingDetectIntentServer) Recv() (*StreamingDetectIntentRequest, error) { + m := new(StreamingDetectIntentRequest) + if err := x.ServerStream.RecvMsg(m); err != nil { + return nil, err + } + return m, nil +} + +var _Sessions_serviceDesc = grpc.ServiceDesc{ + ServiceName: "google.cloud.dialogflow.v2.Sessions", + HandlerType: (*SessionsServer)(nil), + Methods: []grpc.MethodDesc{ + { + MethodName: "DetectIntent", + Handler: _Sessions_DetectIntent_Handler, + }, + }, + Streams: []grpc.StreamDesc{ + { + StreamName: "StreamingDetectIntent", + Handler: _Sessions_StreamingDetectIntent_Handler, + ServerStreams: true, + ClientStreams: true, + }, + }, + Metadata: "google/cloud/dialogflow/v2/session.proto", +} + +func init() { + proto.RegisterFile("google/cloud/dialogflow/v2/session.proto", fileDescriptor_session_f34861f1b5aa3372) +} + +var fileDescriptor_session_f34861f1b5aa3372 = []byte{ + // 1581 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xdc, 0x58, 0x4b, 0x73, 0x1b, 0xc7, + 0x11, 0xe6, 0x82, 0x2f, 0xa0, 0x01, 0x91, 0xd4, 0x90, 0x09, 0xc1, 0x87, 0x18, 0x09, 0x2a, 0x45, + 0x14, 0x13, 0x61, 0x15, 0x28, 0xa5, 0x44, 0x54, 0xe9, 0x41, 0x02, 0x20, 0x85, 0x14, 0x45, 0x41, + 0x0b, 0x2a, 0x0f, 0x55, 0xa9, 0xb6, 0x56, 0x8b, 0xc1, 0x6a, 0x93, 0xc5, 0xcc, 0x72, 0x67, 0x96, + 0x12, 0x95, 0x4a, 0x0e, 0xa9, 0xca, 0x1f, 0xb0, 0xcb, 0x27, 0xdf, 0x7c, 0xf4, 0xc9, 0x07, 0x1f, + 0x7c, 0xf4, 0xd1, 0x77, 0x57, 0xf9, 0x17, 0xd8, 0x17, 0xff, 0x02, 0x1f, 0x7c, 0x70, 0xed, 0xcc, + 0x2c, 0xb0, 0x24, 0x81, 0x25, 0xad, 0x92, 0x7d, 0xf0, 0x0d, 0xd3, 0xf3, 0x75, 0x4f, 0xef, 0x37, + 0xdd, 0xdf, 0x74, 0x01, 0x56, 0x1d, 0x4a, 0x1d, 0x0f, 0xeb, 0xb6, 0x47, 0xc3, 0xb6, 0xde, 0x76, + 0x2d, 0x8f, 0x3a, 0x1d, 0x8f, 0xbe, 0xd2, 0x0f, 0x2a, 0x3a, 0xc3, 0x8c, 0xb9, 0x94, 0x94, 0xfd, + 0x80, 0x72, 0x8a, 0x16, 0x25, 0xb2, 0x2c, 0x90, 0xe5, 0x3e, 0xb2, 0x7c, 0x50, 0x59, 0x5c, 0x56, + 0x51, 0x2c, 0xdf, 0xd5, 0x2d, 0x42, 0x28, 0xb7, 0xb8, 0x4b, 0x09, 0x93, 0x9e, 0x8b, 0x0b, 0x89, + 0xdd, 0x00, 0x33, 0x1a, 0x06, 0x36, 0x56, 0x5b, 0xd7, 0x53, 0x8e, 0xb7, 0xc2, 0xb6, 0x4b, 0x4d, + 0x9b, 0x92, 0x8e, 0xeb, 0x28, 0x78, 0x5a, 0xb6, 0x36, 0x25, 0x1c, 0xbf, 0xe6, 0x0a, 0x79, 0x35, + 0x05, 0xe9, 0x12, 0x8e, 0x49, 0x0c, 0xfc, 0xe3, 0xe9, 0x04, 0x98, 0x98, 0x70, 0x97, 0x1f, 0x9a, + 0xfc, 0xd0, 0x8f, 0xf3, 0x8e, 0x3f, 0x58, 0xac, 0x5e, 0x84, 0x1d, 0x9d, 0xf1, 0x20, 0xb4, 0xe3, + 0x98, 0xf3, 0x6a, 0x37, 0xf0, 0x6d, 0x9d, 0x71, 0x8b, 0x87, 0x31, 0x13, 0x45, 0xb5, 0x11, 0x45, + 0xd2, 0x3d, 0x8b, 0x7b, 0x44, 0x7d, 0x59, 0xe9, 0x8b, 0x0c, 0xcc, 0xd6, 0x30, 0xc7, 0x36, 0x6f, + 0x88, 0xec, 0x0c, 0xbc, 0x1f, 0x62, 0xc6, 0x51, 0x11, 0x26, 0x55, 0x16, 0x45, 0xed, 0xa2, 0xb6, + 0x9a, 0x33, 0xe2, 0x25, 0xda, 0x85, 0xc2, 0x7e, 0x88, 0x83, 0x43, 0xd3, 0xb7, 0x02, 0xab, 0xcb, + 0x8a, 0x99, 0x8b, 0xda, 0x6a, 0xbe, 0xf2, 0xbb, 0xf2, 0xf0, 0x6b, 0x2a, 0x3f, 0x89, 0xf0, 0xcd, + 0x08, 0x8e, 0x39, 0x0e, 0x98, 0x91, 0xdf, 0xef, 0x19, 0x18, 0xda, 0x06, 0xb9, 0x34, 0x5d, 0xe2, + 0x87, 0xbc, 0x38, 0x2a, 0xc2, 0xfd, 0xf6, 0xd4, 0x70, 0x8d, 0x08, 0x6d, 0xc0, 0x7e, 0xef, 0x37, + 0x7a, 0x0e, 0xb3, 0x34, 0xe4, 0x7e, 0xc8, 0xcd, 0xe4, 0x0d, 0x16, 0xc7, 0x44, 0xc0, 0xeb, 0x69, + 0x01, 0x1f, 0x0b, 0xb7, 0x8d, 0xc8, 0xab, 0x2a, 0x9c, 0x8c, 0xf3, 0xf4, 0xb8, 0x09, 0xfd, 0x06, + 0xf2, 0x22, 0x43, 0x19, 0xbd, 0x38, 0x7e, 0x51, 0x5b, 0x2d, 0x18, 0x20, 0x4c, 0x02, 0x56, 0xfa, + 0x3c, 0x03, 0x73, 0x47, 0xa9, 0x64, 0x3e, 0x25, 0x0c, 0x47, 0x9e, 0x81, 0xfa, 0x6d, 0xba, 0x6d, + 0xc5, 0x27, 0xc4, 0xa6, 0x46, 0x1b, 0xfd, 0x25, 0xa6, 0x34, 0xc0, 0x2c, 0xf4, 0xb8, 0xa2, 0xf4, + 0xea, 0xa9, 0x1c, 0x18, 0x02, 0xae, 0xe8, 0x94, 0x0b, 0x74, 0x1b, 0xa6, 0x5e, 0xe1, 0x17, 0x2f, + 0x29, 0xfd, 0x97, 0x29, 0x4b, 0x40, 0x31, 0x8a, 0xe2, 0x68, 0x81, 0x6f, 0x97, 0x5b, 0x62, 0xc7, + 0x38, 0xa7, 0x90, 0x72, 0x89, 0x2e, 0x41, 0x21, 0x49, 0xa0, 0x60, 0xae, 0x60, 0xe4, 0x13, 0x54, + 0x0c, 0xe3, 0x78, 0xe2, 0xdd, 0x70, 0x5c, 0xfa, 0x76, 0x14, 0xa6, 0x8f, 0x15, 0x0b, 0x5a, 0x82, + 0x1c, 0x77, 0xbb, 0xd8, 0x7c, 0x43, 0x09, 0x56, 0xdc, 0x65, 0x23, 0xc3, 0x33, 0x4a, 0x30, 0xba, + 0x05, 0x05, 0x07, 0x53, 0xd3, 0xa3, 0xb6, 0xe8, 0x7c, 0xc5, 0xdc, 0x6c, 0x9c, 0x88, 0xe8, 0x9c, + 0x1d, 0x8b, 0xef, 0x10, 0xc7, 0xc8, 0x3b, 0x98, 0xee, 0x28, 0x1c, 0xba, 0x0f, 0x59, 0xd5, 0xb7, + 0x11, 0x3f, 0xa3, 0xab, 0xf9, 0xca, 0xe5, 0xb4, 0xe4, 0xab, 0x12, 0x6b, 0xf4, 0x9c, 0xd0, 0x15, + 0x98, 0x0a, 0x30, 0xc3, 0xdc, 0xec, 0x85, 0x89, 0xd8, 0xca, 0x1a, 0xe7, 0x84, 0xb5, 0x1a, 0xc3, + 0x4c, 0x98, 0x1b, 0xd0, 0xcc, 0xac, 0x38, 0x2e, 0xce, 0x4c, 0x25, 0xac, 0x25, 0xfd, 0xea, 0xc2, + 0x6d, 0xef, 0xd0, 0xc7, 0x06, 0x62, 0xc7, 0x4d, 0x0c, 0xfd, 0x01, 0x26, 0x7d, 0xeb, 0xd0, 0xa3, + 0x56, 0x5b, 0x5d, 0xc2, 0x7c, 0x1c, 0x33, 0x96, 0x88, 0x72, 0x4b, 0x48, 0x84, 0x11, 0xe3, 0xd0, + 0xff, 0x35, 0xb8, 0xc4, 0xa2, 0x6c, 0xba, 0x98, 0x70, 0xd3, 0x22, 0x96, 0x77, 0xc8, 0x5c, 0x66, + 0x06, 0xb2, 0xf3, 0xe3, 0x2b, 0x05, 0x11, 0x6d, 0x3d, 0x3d, 0x43, 0x15, 0x64, 0x43, 0xc5, 0x50, + 0xe2, 0xa1, 0xee, 0x77, 0x85, 0xa5, 0xee, 0x97, 0xbe, 0xd1, 0x00, 0xfa, 0xad, 0x8c, 0x9e, 0x40, + 0xe1, 0x48, 0x4d, 0x69, 0x22, 0x81, 0xdf, 0xa7, 0x25, 0xd0, 0x20, 0x47, 0xeb, 0xe7, 0xe1, 0x88, + 0x91, 0xb7, 0x12, 0x2d, 0x7b, 0x07, 0xc6, 0xa2, 0x6b, 0x50, 0x55, 0x71, 0x25, 0x2d, 0xd4, 0x1e, + 0x7e, 0xcd, 0x45, 0xb8, 0x87, 0x23, 0x86, 0x70, 0x42, 0xf7, 0x60, 0x1c, 0x1f, 0x60, 0x72, 0x26, + 0x45, 0xaa, 0x47, 0xc0, 0xd8, 0x5d, 0xba, 0x6d, 0x4e, 0xc2, 0xb8, 0x10, 0x87, 0xd2, 0x07, 0x93, + 0x90, 0x4f, 0xb4, 0x2b, 0xba, 0x00, 0x52, 0xb5, 0x4c, 0x91, 0x9b, 0xac, 0xe8, 0x9c, 0xb0, 0x44, + 0x49, 0xa0, 0xcb, 0x70, 0xce, 0xb3, 0x88, 0x13, 0x5a, 0x0e, 0x36, 0x6d, 0xda, 0xc6, 0xc5, 0x69, + 0x81, 0x28, 0xc4, 0xc6, 0x2a, 0x6d, 0x63, 0xb4, 0x09, 0x17, 0x98, 0x8f, 0xb1, 0xfd, 0xd2, 0x0c, + 0xb0, 0x4d, 0x1d, 0xe2, 0x46, 0x55, 0x2d, 0x99, 0x6b, 0x63, 0x62, 0x63, 0xf1, 0xc9, 0x19, 0x63, + 0x49, 0x82, 0x8c, 0x3e, 0xa6, 0xda, 0x83, 0xa0, 0x5f, 0xc3, 0x84, 0x65, 0x8b, 0xae, 0x19, 0x15, + 0x27, 0xa8, 0x15, 0xfa, 0x13, 0x80, 0xdf, 0x6b, 0x3f, 0x25, 0x9f, 0x43, 0xab, 0x2a, 0x01, 0x45, + 0x77, 0x61, 0xc9, 0xf2, 0x3c, 0x51, 0x48, 0x6e, 0x80, 0xdb, 0xea, 0x81, 0x30, 0xfd, 0xa8, 0x25, + 0x08, 0x17, 0x8a, 0x99, 0x35, 0x8a, 0x96, 0xe7, 0x19, 0x0a, 0x21, 0x5f, 0x80, 0xa6, 0xdc, 0x47, + 0xd7, 0x60, 0xa6, 0x13, 0x7a, 0x1d, 0xd7, 0xf3, 0x44, 0x61, 0x0a, 0x76, 0x26, 0x44, 0x66, 0xd3, + 0x09, 0xbb, 0xe0, 0xe8, 0x39, 0xcc, 0x25, 0xa1, 0x5d, 0xcc, 0x98, 0xe5, 0x60, 0x56, 0x9c, 0x14, + 0x6d, 0xb5, 0x96, 0x5e, 0x33, 0xe2, 0x11, 0x7e, 0x24, 0x5d, 0x8c, 0xd9, 0x44, 0x1c, 0x65, 0x13, + 0xcd, 0xdd, 0xd3, 0x50, 0x31, 0x35, 0x14, 0xb3, 0x22, 0x8f, 0x9e, 0x5e, 0x0a, 0x23, 0x7a, 0x00, + 0xd3, 0x31, 0x2c, 0xee, 0xc1, 0x5c, 0x3a, 0x5b, 0x71, 0xd8, 0xa6, 0x6a, 0xc5, 0x1d, 0x98, 0x56, + 0x72, 0xda, 0x93, 0x11, 0x38, 0xbb, 0x1a, 0x4d, 0x49, 0xdf, 0x9e, 0xd8, 0xac, 0xc3, 0x84, 0x1c, + 0x31, 0x8a, 0x79, 0x91, 0x46, 0xe9, 0x74, 0x1e, 0x0c, 0xe5, 0x81, 0xee, 0xc1, 0x92, 0xfc, 0x65, + 0xb6, 0xc5, 0x13, 0x76, 0xac, 0x9c, 0x0a, 0xa2, 0x9c, 0x16, 0x24, 0xa4, 0x16, 0x23, 0x12, 0xc5, + 0xf4, 0x00, 0xa6, 0xdb, 0xae, 0xe5, 0x10, 0xca, 0xb8, 0x6b, 0x9b, 0x2e, 0xe9, 0xd0, 0xe2, 0xd4, + 0x29, 0x5c, 0xf4, 0xf1, 0x0d, 0xd2, 0xa1, 0x88, 0xc2, 0xc2, 0x40, 0x55, 0x12, 0x2f, 0xe2, 0x79, + 0x11, 0xeb, 0xe6, 0x8f, 0x54, 0x23, 0xf1, 0x3a, 0xce, 0xb3, 0xc1, 0x1b, 0xa5, 0xef, 0x33, 0xb0, + 0xdc, 0xe2, 0x01, 0xb6, 0xba, 0x2e, 0x71, 0x7e, 0x21, 0x33, 0xd0, 0x35, 0x98, 0x61, 0x2e, 0x71, + 0x3c, 0x6c, 0x86, 0x9c, 0xe3, 0xc0, 0x8a, 0xee, 0x4e, 0x3e, 0x4c, 0xd3, 0xd2, 0xfe, 0x34, 0x36, + 0x0f, 0x7b, 0xca, 0xc7, 0x7f, 0x9a, 0x71, 0x69, 0xe2, 0xc4, 0xb8, 0xf4, 0xc9, 0x28, 0x5c, 0x18, + 0x42, 0xff, 0x59, 0xe7, 0x26, 0x0c, 0x28, 0x29, 0x7f, 0x47, 0xa6, 0xa7, 0x5b, 0xa9, 0xb5, 0x12, + 0x9f, 0x9b, 0x50, 0x46, 0x55, 0x2e, 0xe7, 0x83, 0xe3, 0xa6, 0x13, 0xe3, 0xd9, 0xe8, 0x3b, 0x1d, + 0xcf, 0xc6, 0xde, 0x76, 0x3c, 0x1b, 0xff, 0xd9, 0xc7, 0xb3, 0xcf, 0x32, 0xb0, 0x38, 0x9c, 0x3a, + 0x84, 0xa1, 0xa0, 0x94, 0x58, 0x4c, 0x39, 0xe2, 0xc2, 0xa6, 0x2a, 0x9b, 0x6f, 0x77, 0x11, 0xb1, + 0x42, 0x8b, 0xc9, 0x27, 0xdf, 0xed, 0x2f, 0xd0, 0x0a, 0x00, 0x0f, 0x2c, 0xc2, 0xec, 0xc0, 0xf5, + 0xe5, 0x6d, 0xe7, 0x8c, 0x84, 0x05, 0x2d, 0x40, 0xd6, 0x65, 0x66, 0xc7, 0x25, 0x96, 0x27, 0xae, + 0x2a, 0x6b, 0x4c, 0xba, 0x6c, 0x2b, 0x5a, 0x46, 0xae, 0x09, 0x51, 0x1b, 0x13, 0xa2, 0x96, 0xb0, + 0x94, 0xfe, 0x0e, 0xf9, 0xc4, 0xb1, 0x68, 0x19, 0x8a, 0x8f, 0xea, 0xad, 0xd6, 0xc6, 0x76, 0xdd, + 0xdc, 0xfb, 0x47, 0xb3, 0x6e, 0x3e, 0xdd, 0x6d, 0x35, 0xeb, 0xd5, 0xc6, 0x56, 0xa3, 0x5e, 0x9b, + 0x19, 0x41, 0x53, 0x00, 0x7b, 0xc6, 0xc6, 0x6e, 0xab, 0x6a, 0x34, 0x9a, 0x7b, 0x33, 0x1a, 0x5a, + 0x82, 0xf9, 0xfa, 0x6e, 0xcd, 0x7c, 0xbc, 0x65, 0xb6, 0x1a, 0xbb, 0xdb, 0x3b, 0x75, 0xf3, 0xe9, + 0xde, 0x5e, 0xdd, 0xd8, 0xd8, 0xad, 0xd6, 0x67, 0x32, 0xa5, 0x1a, 0xe4, 0x7a, 0x23, 0x06, 0x42, + 0x6a, 0x2e, 0x91, 0x15, 0x2d, 0xc7, 0x8d, 0x13, 0xcf, 0x7e, 0xe6, 0xe4, 0xb3, 0x5f, 0xfa, 0x2f, + 0x40, 0x7f, 0xd4, 0x88, 0xc2, 0x10, 0xab, 0x1b, 0x0f, 0xc5, 0xe2, 0xf7, 0xb1, 0xc7, 0x3b, 0x73, + 0xf6, 0xc7, 0xfb, 0xc4, 0xf9, 0xa3, 0x03, 0xce, 0xb7, 0x60, 0x25, 0x7d, 0xe8, 0x43, 0xf7, 0x61, + 0x59, 0x68, 0xf7, 0x1b, 0x6c, 0xf6, 0x87, 0x1c, 0xb3, 0x27, 0xc1, 0x22, 0xd7, 0xac, 0xb1, 0xa0, + 0x30, 0x4f, 0xe2, 0xa9, 0xa7, 0x17, 0xb5, 0x14, 0xc0, 0xfc, 0x10, 0x25, 0x47, 0x7f, 0x83, 0xb9, + 0xa1, 0x31, 0x4f, 0x19, 0xef, 0x7a, 0x21, 0x0d, 0xb4, 0x7f, 0xf2, 0xcc, 0xfb, 0x90, 0xeb, 0x2d, + 0xd0, 0x1c, 0x8c, 0x33, 0x9b, 0x06, 0x92, 0xd6, 0x8c, 0x21, 0x17, 0x68, 0x19, 0x72, 0x5d, 0x2b, + 0x2a, 0xd0, 0xb0, 0x1d, 0x0f, 0x57, 0x7d, 0x43, 0xe5, 0xab, 0x0c, 0x64, 0xd5, 0xbc, 0xce, 0xd0, + 0xa7, 0x1a, 0x14, 0x92, 0x7a, 0x86, 0xf4, 0xb4, 0xcc, 0x06, 0x3c, 0x3c, 0x8b, 0x37, 0xce, 0xee, + 0x20, 0xb5, 0xb0, 0xb4, 0xf1, 0xbf, 0x2f, 0xbf, 0x7e, 0x3f, 0x73, 0xa7, 0x74, 0x4b, 0x3f, 0xa8, + 0xe8, 0xff, 0x56, 0xcf, 0xd4, 0x5d, 0x3f, 0xa0, 0xff, 0xc4, 0x36, 0x67, 0xfa, 0x9a, 0x6e, 0x39, + 0x98, 0xf0, 0xf8, 0x8f, 0x05, 0xa6, 0xaf, 0xfd, 0x67, 0xbd, 0x9d, 0x88, 0xb3, 0xae, 0xad, 0xa1, + 0xf7, 0x34, 0xf8, 0xd5, 0x40, 0x3d, 0x46, 0x7f, 0x3e, 0x53, 0x07, 0x0f, 0xfa, 0x90, 0xdb, 0x6f, + 0xe1, 0xa9, 0xbe, 0x68, 0x64, 0x55, 0xbb, 0xa1, 0x6d, 0x7e, 0xa8, 0xc1, 0x8a, 0x4d, 0xbb, 0x29, + 0x61, 0x36, 0x0b, 0x8a, 0xf8, 0x66, 0x54, 0xdc, 0x4d, 0xed, 0x59, 0x4d, 0x61, 0x1d, 0x1a, 0x95, + 0x6e, 0x99, 0x06, 0x8e, 0xee, 0x60, 0x22, 0x4a, 0x5f, 0x97, 0x5b, 0x96, 0xef, 0xb2, 0x41, 0xff, + 0xbb, 0xdc, 0xe9, 0xaf, 0xbe, 0xd3, 0xb4, 0x8f, 0x32, 0x99, 0xda, 0xd6, 0xc7, 0x99, 0xc5, 0x6d, + 0x19, 0xae, 0x2a, 0x8e, 0xae, 0xf5, 0x8f, 0xfe, 0x6b, 0xe5, 0xc5, 0x84, 0x88, 0x7a, 0xf3, 0x87, + 0x00, 0x00, 0x00, 0xff, 0xff, 0xad, 0xf5, 0x89, 0x0f, 0xcd, 0x12, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/cloud/dialogflow/v2/session_entity_type.pb.go b/vendor/google.golang.org/genproto/googleapis/cloud/dialogflow/v2/session_entity_type.pb.go new file mode 100644 index 0000000..cdda5f6 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/cloud/dialogflow/v2/session_entity_type.pb.go @@ -0,0 +1,723 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/cloud/dialogflow/v2/session_entity_type.proto + +package dialogflow // import "google.golang.org/genproto/googleapis/cloud/dialogflow/v2" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import empty "github.com/golang/protobuf/ptypes/empty" +import _ "google.golang.org/genproto/googleapis/api/annotations" +import field_mask "google.golang.org/genproto/protobuf/field_mask" + +import ( + context "golang.org/x/net/context" + grpc "google.golang.org/grpc" +) + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// The types of modifications for a session entity type. +type SessionEntityType_EntityOverrideMode int32 + +const ( + // Not specified. This value should be never used. + SessionEntityType_ENTITY_OVERRIDE_MODE_UNSPECIFIED SessionEntityType_EntityOverrideMode = 0 + // The collection of session entities overrides the collection of entities + // in the corresponding developer entity type. + SessionEntityType_ENTITY_OVERRIDE_MODE_OVERRIDE SessionEntityType_EntityOverrideMode = 1 + // The collection of session entities extends the collection of entities in + // the corresponding developer entity type. + // + // Note: Even in this override mode calls to `ListSessionEntityTypes`, + // `GetSessionEntityType`, `CreateSessionEntityType` and + // `UpdateSessionEntityType` only return the additional entities added in + // this session entity type. If you want to get the supplemented list, + // please call [EntityTypes.GetEntityType][google.cloud.dialogflow.v2.EntityTypes.GetEntityType] on the developer entity type + // and merge. + SessionEntityType_ENTITY_OVERRIDE_MODE_SUPPLEMENT SessionEntityType_EntityOverrideMode = 2 +) + +var SessionEntityType_EntityOverrideMode_name = map[int32]string{ + 0: "ENTITY_OVERRIDE_MODE_UNSPECIFIED", + 1: "ENTITY_OVERRIDE_MODE_OVERRIDE", + 2: "ENTITY_OVERRIDE_MODE_SUPPLEMENT", +} +var SessionEntityType_EntityOverrideMode_value = map[string]int32{ + "ENTITY_OVERRIDE_MODE_UNSPECIFIED": 0, + "ENTITY_OVERRIDE_MODE_OVERRIDE": 1, + "ENTITY_OVERRIDE_MODE_SUPPLEMENT": 2, +} + +func (x SessionEntityType_EntityOverrideMode) String() string { + return proto.EnumName(SessionEntityType_EntityOverrideMode_name, int32(x)) +} +func (SessionEntityType_EntityOverrideMode) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_session_entity_type_97b7b2145a2e309a, []int{0, 0} +} + +// Represents a session entity type. +// +// Extends or replaces a developer entity type at the user session level (we +// refer to the entity types defined at the agent level as "developer entity +// types"). +// +// Note: session entity types apply to all queries, regardless of the language. +type SessionEntityType struct { + // Required. The unique identifier of this session entity type. Format: + // `projects//agent/sessions//entityTypes/`. + // + // `` must be the display name of an existing entity + // type in the same agent that will be overridden or supplemented. + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // Required. Indicates whether the additional data should override or + // supplement the developer entity type definition. + EntityOverrideMode SessionEntityType_EntityOverrideMode `protobuf:"varint,2,opt,name=entity_override_mode,json=entityOverrideMode,proto3,enum=google.cloud.dialogflow.v2.SessionEntityType_EntityOverrideMode" json:"entity_override_mode,omitempty"` + // Required. The collection of entities associated with this session entity + // type. + Entities []*EntityType_Entity `protobuf:"bytes,3,rep,name=entities,proto3" json:"entities,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *SessionEntityType) Reset() { *m = SessionEntityType{} } +func (m *SessionEntityType) String() string { return proto.CompactTextString(m) } +func (*SessionEntityType) ProtoMessage() {} +func (*SessionEntityType) Descriptor() ([]byte, []int) { + return fileDescriptor_session_entity_type_97b7b2145a2e309a, []int{0} +} +func (m *SessionEntityType) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_SessionEntityType.Unmarshal(m, b) +} +func (m *SessionEntityType) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_SessionEntityType.Marshal(b, m, deterministic) +} +func (dst *SessionEntityType) XXX_Merge(src proto.Message) { + xxx_messageInfo_SessionEntityType.Merge(dst, src) +} +func (m *SessionEntityType) XXX_Size() int { + return xxx_messageInfo_SessionEntityType.Size(m) +} +func (m *SessionEntityType) XXX_DiscardUnknown() { + xxx_messageInfo_SessionEntityType.DiscardUnknown(m) +} + +var xxx_messageInfo_SessionEntityType proto.InternalMessageInfo + +func (m *SessionEntityType) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *SessionEntityType) GetEntityOverrideMode() SessionEntityType_EntityOverrideMode { + if m != nil { + return m.EntityOverrideMode + } + return SessionEntityType_ENTITY_OVERRIDE_MODE_UNSPECIFIED +} + +func (m *SessionEntityType) GetEntities() []*EntityType_Entity { + if m != nil { + return m.Entities + } + return nil +} + +// The request message for [SessionEntityTypes.ListSessionEntityTypes][google.cloud.dialogflow.v2.SessionEntityTypes.ListSessionEntityTypes]. +type ListSessionEntityTypesRequest struct { + // Required. The session to list all session entity types from. + // Format: `projects//agent/sessions/`. + Parent string `protobuf:"bytes,1,opt,name=parent,proto3" json:"parent,omitempty"` + // Optional. The maximum number of items to return in a single page. By + // default 100 and at most 1000. + PageSize int32 `protobuf:"varint,2,opt,name=page_size,json=pageSize,proto3" json:"page_size,omitempty"` + // Optional. The next_page_token value returned from a previous list request. + PageToken string `protobuf:"bytes,3,opt,name=page_token,json=pageToken,proto3" json:"page_token,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ListSessionEntityTypesRequest) Reset() { *m = ListSessionEntityTypesRequest{} } +func (m *ListSessionEntityTypesRequest) String() string { return proto.CompactTextString(m) } +func (*ListSessionEntityTypesRequest) ProtoMessage() {} +func (*ListSessionEntityTypesRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_session_entity_type_97b7b2145a2e309a, []int{1} +} +func (m *ListSessionEntityTypesRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ListSessionEntityTypesRequest.Unmarshal(m, b) +} +func (m *ListSessionEntityTypesRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ListSessionEntityTypesRequest.Marshal(b, m, deterministic) +} +func (dst *ListSessionEntityTypesRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_ListSessionEntityTypesRequest.Merge(dst, src) +} +func (m *ListSessionEntityTypesRequest) XXX_Size() int { + return xxx_messageInfo_ListSessionEntityTypesRequest.Size(m) +} +func (m *ListSessionEntityTypesRequest) XXX_DiscardUnknown() { + xxx_messageInfo_ListSessionEntityTypesRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_ListSessionEntityTypesRequest proto.InternalMessageInfo + +func (m *ListSessionEntityTypesRequest) GetParent() string { + if m != nil { + return m.Parent + } + return "" +} + +func (m *ListSessionEntityTypesRequest) GetPageSize() int32 { + if m != nil { + return m.PageSize + } + return 0 +} + +func (m *ListSessionEntityTypesRequest) GetPageToken() string { + if m != nil { + return m.PageToken + } + return "" +} + +// The response message for [SessionEntityTypes.ListSessionEntityTypes][google.cloud.dialogflow.v2.SessionEntityTypes.ListSessionEntityTypes]. +type ListSessionEntityTypesResponse struct { + // The list of session entity types. There will be a maximum number of items + // returned based on the page_size field in the request. + SessionEntityTypes []*SessionEntityType `protobuf:"bytes,1,rep,name=session_entity_types,json=sessionEntityTypes,proto3" json:"session_entity_types,omitempty"` + // Token to retrieve the next page of results, or empty if there are no + // more results in the list. + NextPageToken string `protobuf:"bytes,2,opt,name=next_page_token,json=nextPageToken,proto3" json:"next_page_token,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ListSessionEntityTypesResponse) Reset() { *m = ListSessionEntityTypesResponse{} } +func (m *ListSessionEntityTypesResponse) String() string { return proto.CompactTextString(m) } +func (*ListSessionEntityTypesResponse) ProtoMessage() {} +func (*ListSessionEntityTypesResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_session_entity_type_97b7b2145a2e309a, []int{2} +} +func (m *ListSessionEntityTypesResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ListSessionEntityTypesResponse.Unmarshal(m, b) +} +func (m *ListSessionEntityTypesResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ListSessionEntityTypesResponse.Marshal(b, m, deterministic) +} +func (dst *ListSessionEntityTypesResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_ListSessionEntityTypesResponse.Merge(dst, src) +} +func (m *ListSessionEntityTypesResponse) XXX_Size() int { + return xxx_messageInfo_ListSessionEntityTypesResponse.Size(m) +} +func (m *ListSessionEntityTypesResponse) XXX_DiscardUnknown() { + xxx_messageInfo_ListSessionEntityTypesResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_ListSessionEntityTypesResponse proto.InternalMessageInfo + +func (m *ListSessionEntityTypesResponse) GetSessionEntityTypes() []*SessionEntityType { + if m != nil { + return m.SessionEntityTypes + } + return nil +} + +func (m *ListSessionEntityTypesResponse) GetNextPageToken() string { + if m != nil { + return m.NextPageToken + } + return "" +} + +// The request message for [SessionEntityTypes.GetSessionEntityType][google.cloud.dialogflow.v2.SessionEntityTypes.GetSessionEntityType]. +type GetSessionEntityTypeRequest struct { + // Required. The name of the session entity type. Format: + // `projects//agent/sessions//entityTypes/`. + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GetSessionEntityTypeRequest) Reset() { *m = GetSessionEntityTypeRequest{} } +func (m *GetSessionEntityTypeRequest) String() string { return proto.CompactTextString(m) } +func (*GetSessionEntityTypeRequest) ProtoMessage() {} +func (*GetSessionEntityTypeRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_session_entity_type_97b7b2145a2e309a, []int{3} +} +func (m *GetSessionEntityTypeRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GetSessionEntityTypeRequest.Unmarshal(m, b) +} +func (m *GetSessionEntityTypeRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GetSessionEntityTypeRequest.Marshal(b, m, deterministic) +} +func (dst *GetSessionEntityTypeRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_GetSessionEntityTypeRequest.Merge(dst, src) +} +func (m *GetSessionEntityTypeRequest) XXX_Size() int { + return xxx_messageInfo_GetSessionEntityTypeRequest.Size(m) +} +func (m *GetSessionEntityTypeRequest) XXX_DiscardUnknown() { + xxx_messageInfo_GetSessionEntityTypeRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_GetSessionEntityTypeRequest proto.InternalMessageInfo + +func (m *GetSessionEntityTypeRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +// The request message for [SessionEntityTypes.CreateSessionEntityType][google.cloud.dialogflow.v2.SessionEntityTypes.CreateSessionEntityType]. +type CreateSessionEntityTypeRequest struct { + // Required. The session to create a session entity type for. + // Format: `projects//agent/sessions/`. + Parent string `protobuf:"bytes,1,opt,name=parent,proto3" json:"parent,omitempty"` + // Required. The session entity type to create. + SessionEntityType *SessionEntityType `protobuf:"bytes,2,opt,name=session_entity_type,json=sessionEntityType,proto3" json:"session_entity_type,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *CreateSessionEntityTypeRequest) Reset() { *m = CreateSessionEntityTypeRequest{} } +func (m *CreateSessionEntityTypeRequest) String() string { return proto.CompactTextString(m) } +func (*CreateSessionEntityTypeRequest) ProtoMessage() {} +func (*CreateSessionEntityTypeRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_session_entity_type_97b7b2145a2e309a, []int{4} +} +func (m *CreateSessionEntityTypeRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_CreateSessionEntityTypeRequest.Unmarshal(m, b) +} +func (m *CreateSessionEntityTypeRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_CreateSessionEntityTypeRequest.Marshal(b, m, deterministic) +} +func (dst *CreateSessionEntityTypeRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_CreateSessionEntityTypeRequest.Merge(dst, src) +} +func (m *CreateSessionEntityTypeRequest) XXX_Size() int { + return xxx_messageInfo_CreateSessionEntityTypeRequest.Size(m) +} +func (m *CreateSessionEntityTypeRequest) XXX_DiscardUnknown() { + xxx_messageInfo_CreateSessionEntityTypeRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_CreateSessionEntityTypeRequest proto.InternalMessageInfo + +func (m *CreateSessionEntityTypeRequest) GetParent() string { + if m != nil { + return m.Parent + } + return "" +} + +func (m *CreateSessionEntityTypeRequest) GetSessionEntityType() *SessionEntityType { + if m != nil { + return m.SessionEntityType + } + return nil +} + +// The request message for [SessionEntityTypes.UpdateSessionEntityType][google.cloud.dialogflow.v2.SessionEntityTypes.UpdateSessionEntityType]. +type UpdateSessionEntityTypeRequest struct { + // Required. The entity type to update. Format: + // `projects//agent/sessions//entityTypes/`. + SessionEntityType *SessionEntityType `protobuf:"bytes,1,opt,name=session_entity_type,json=sessionEntityType,proto3" json:"session_entity_type,omitempty"` + // Optional. The mask to control which fields get updated. + UpdateMask *field_mask.FieldMask `protobuf:"bytes,2,opt,name=update_mask,json=updateMask,proto3" json:"update_mask,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *UpdateSessionEntityTypeRequest) Reset() { *m = UpdateSessionEntityTypeRequest{} } +func (m *UpdateSessionEntityTypeRequest) String() string { return proto.CompactTextString(m) } +func (*UpdateSessionEntityTypeRequest) ProtoMessage() {} +func (*UpdateSessionEntityTypeRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_session_entity_type_97b7b2145a2e309a, []int{5} +} +func (m *UpdateSessionEntityTypeRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_UpdateSessionEntityTypeRequest.Unmarshal(m, b) +} +func (m *UpdateSessionEntityTypeRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_UpdateSessionEntityTypeRequest.Marshal(b, m, deterministic) +} +func (dst *UpdateSessionEntityTypeRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_UpdateSessionEntityTypeRequest.Merge(dst, src) +} +func (m *UpdateSessionEntityTypeRequest) XXX_Size() int { + return xxx_messageInfo_UpdateSessionEntityTypeRequest.Size(m) +} +func (m *UpdateSessionEntityTypeRequest) XXX_DiscardUnknown() { + xxx_messageInfo_UpdateSessionEntityTypeRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_UpdateSessionEntityTypeRequest proto.InternalMessageInfo + +func (m *UpdateSessionEntityTypeRequest) GetSessionEntityType() *SessionEntityType { + if m != nil { + return m.SessionEntityType + } + return nil +} + +func (m *UpdateSessionEntityTypeRequest) GetUpdateMask() *field_mask.FieldMask { + if m != nil { + return m.UpdateMask + } + return nil +} + +// The request message for [SessionEntityTypes.DeleteSessionEntityType][google.cloud.dialogflow.v2.SessionEntityTypes.DeleteSessionEntityType]. +type DeleteSessionEntityTypeRequest struct { + // Required. The name of the entity type to delete. Format: + // `projects//agent/sessions//entityTypes/`. + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *DeleteSessionEntityTypeRequest) Reset() { *m = DeleteSessionEntityTypeRequest{} } +func (m *DeleteSessionEntityTypeRequest) String() string { return proto.CompactTextString(m) } +func (*DeleteSessionEntityTypeRequest) ProtoMessage() {} +func (*DeleteSessionEntityTypeRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_session_entity_type_97b7b2145a2e309a, []int{6} +} +func (m *DeleteSessionEntityTypeRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_DeleteSessionEntityTypeRequest.Unmarshal(m, b) +} +func (m *DeleteSessionEntityTypeRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_DeleteSessionEntityTypeRequest.Marshal(b, m, deterministic) +} +func (dst *DeleteSessionEntityTypeRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_DeleteSessionEntityTypeRequest.Merge(dst, src) +} +func (m *DeleteSessionEntityTypeRequest) XXX_Size() int { + return xxx_messageInfo_DeleteSessionEntityTypeRequest.Size(m) +} +func (m *DeleteSessionEntityTypeRequest) XXX_DiscardUnknown() { + xxx_messageInfo_DeleteSessionEntityTypeRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_DeleteSessionEntityTypeRequest proto.InternalMessageInfo + +func (m *DeleteSessionEntityTypeRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func init() { + proto.RegisterType((*SessionEntityType)(nil), "google.cloud.dialogflow.v2.SessionEntityType") + proto.RegisterType((*ListSessionEntityTypesRequest)(nil), "google.cloud.dialogflow.v2.ListSessionEntityTypesRequest") + proto.RegisterType((*ListSessionEntityTypesResponse)(nil), "google.cloud.dialogflow.v2.ListSessionEntityTypesResponse") + proto.RegisterType((*GetSessionEntityTypeRequest)(nil), "google.cloud.dialogflow.v2.GetSessionEntityTypeRequest") + proto.RegisterType((*CreateSessionEntityTypeRequest)(nil), "google.cloud.dialogflow.v2.CreateSessionEntityTypeRequest") + proto.RegisterType((*UpdateSessionEntityTypeRequest)(nil), "google.cloud.dialogflow.v2.UpdateSessionEntityTypeRequest") + proto.RegisterType((*DeleteSessionEntityTypeRequest)(nil), "google.cloud.dialogflow.v2.DeleteSessionEntityTypeRequest") + proto.RegisterEnum("google.cloud.dialogflow.v2.SessionEntityType_EntityOverrideMode", SessionEntityType_EntityOverrideMode_name, SessionEntityType_EntityOverrideMode_value) +} + +// Reference imports to suppress errors if they are not otherwise used. +var _ context.Context +var _ grpc.ClientConn + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the grpc package it is being compiled against. +const _ = grpc.SupportPackageIsVersion4 + +// SessionEntityTypesClient is the client API for SessionEntityTypes service. +// +// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://godoc.org/google.golang.org/grpc#ClientConn.NewStream. +type SessionEntityTypesClient interface { + // Returns the list of all session entity types in the specified session. + ListSessionEntityTypes(ctx context.Context, in *ListSessionEntityTypesRequest, opts ...grpc.CallOption) (*ListSessionEntityTypesResponse, error) + // Retrieves the specified session entity type. + GetSessionEntityType(ctx context.Context, in *GetSessionEntityTypeRequest, opts ...grpc.CallOption) (*SessionEntityType, error) + // Creates a session entity type. + // + // If the specified session entity type already exists, overrides the session + // entity type. + CreateSessionEntityType(ctx context.Context, in *CreateSessionEntityTypeRequest, opts ...grpc.CallOption) (*SessionEntityType, error) + // Updates the specified session entity type. + UpdateSessionEntityType(ctx context.Context, in *UpdateSessionEntityTypeRequest, opts ...grpc.CallOption) (*SessionEntityType, error) + // Deletes the specified session entity type. + DeleteSessionEntityType(ctx context.Context, in *DeleteSessionEntityTypeRequest, opts ...grpc.CallOption) (*empty.Empty, error) +} + +type sessionEntityTypesClient struct { + cc *grpc.ClientConn +} + +func NewSessionEntityTypesClient(cc *grpc.ClientConn) SessionEntityTypesClient { + return &sessionEntityTypesClient{cc} +} + +func (c *sessionEntityTypesClient) ListSessionEntityTypes(ctx context.Context, in *ListSessionEntityTypesRequest, opts ...grpc.CallOption) (*ListSessionEntityTypesResponse, error) { + out := new(ListSessionEntityTypesResponse) + err := c.cc.Invoke(ctx, "/google.cloud.dialogflow.v2.SessionEntityTypes/ListSessionEntityTypes", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *sessionEntityTypesClient) GetSessionEntityType(ctx context.Context, in *GetSessionEntityTypeRequest, opts ...grpc.CallOption) (*SessionEntityType, error) { + out := new(SessionEntityType) + err := c.cc.Invoke(ctx, "/google.cloud.dialogflow.v2.SessionEntityTypes/GetSessionEntityType", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *sessionEntityTypesClient) CreateSessionEntityType(ctx context.Context, in *CreateSessionEntityTypeRequest, opts ...grpc.CallOption) (*SessionEntityType, error) { + out := new(SessionEntityType) + err := c.cc.Invoke(ctx, "/google.cloud.dialogflow.v2.SessionEntityTypes/CreateSessionEntityType", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *sessionEntityTypesClient) UpdateSessionEntityType(ctx context.Context, in *UpdateSessionEntityTypeRequest, opts ...grpc.CallOption) (*SessionEntityType, error) { + out := new(SessionEntityType) + err := c.cc.Invoke(ctx, "/google.cloud.dialogflow.v2.SessionEntityTypes/UpdateSessionEntityType", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *sessionEntityTypesClient) DeleteSessionEntityType(ctx context.Context, in *DeleteSessionEntityTypeRequest, opts ...grpc.CallOption) (*empty.Empty, error) { + out := new(empty.Empty) + err := c.cc.Invoke(ctx, "/google.cloud.dialogflow.v2.SessionEntityTypes/DeleteSessionEntityType", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +// SessionEntityTypesServer is the server API for SessionEntityTypes service. +type SessionEntityTypesServer interface { + // Returns the list of all session entity types in the specified session. + ListSessionEntityTypes(context.Context, *ListSessionEntityTypesRequest) (*ListSessionEntityTypesResponse, error) + // Retrieves the specified session entity type. + GetSessionEntityType(context.Context, *GetSessionEntityTypeRequest) (*SessionEntityType, error) + // Creates a session entity type. + // + // If the specified session entity type already exists, overrides the session + // entity type. + CreateSessionEntityType(context.Context, *CreateSessionEntityTypeRequest) (*SessionEntityType, error) + // Updates the specified session entity type. + UpdateSessionEntityType(context.Context, *UpdateSessionEntityTypeRequest) (*SessionEntityType, error) + // Deletes the specified session entity type. + DeleteSessionEntityType(context.Context, *DeleteSessionEntityTypeRequest) (*empty.Empty, error) +} + +func RegisterSessionEntityTypesServer(s *grpc.Server, srv SessionEntityTypesServer) { + s.RegisterService(&_SessionEntityTypes_serviceDesc, srv) +} + +func _SessionEntityTypes_ListSessionEntityTypes_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(ListSessionEntityTypesRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(SessionEntityTypesServer).ListSessionEntityTypes(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.dialogflow.v2.SessionEntityTypes/ListSessionEntityTypes", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(SessionEntityTypesServer).ListSessionEntityTypes(ctx, req.(*ListSessionEntityTypesRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _SessionEntityTypes_GetSessionEntityType_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(GetSessionEntityTypeRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(SessionEntityTypesServer).GetSessionEntityType(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.dialogflow.v2.SessionEntityTypes/GetSessionEntityType", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(SessionEntityTypesServer).GetSessionEntityType(ctx, req.(*GetSessionEntityTypeRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _SessionEntityTypes_CreateSessionEntityType_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(CreateSessionEntityTypeRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(SessionEntityTypesServer).CreateSessionEntityType(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.dialogflow.v2.SessionEntityTypes/CreateSessionEntityType", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(SessionEntityTypesServer).CreateSessionEntityType(ctx, req.(*CreateSessionEntityTypeRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _SessionEntityTypes_UpdateSessionEntityType_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(UpdateSessionEntityTypeRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(SessionEntityTypesServer).UpdateSessionEntityType(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.dialogflow.v2.SessionEntityTypes/UpdateSessionEntityType", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(SessionEntityTypesServer).UpdateSessionEntityType(ctx, req.(*UpdateSessionEntityTypeRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _SessionEntityTypes_DeleteSessionEntityType_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(DeleteSessionEntityTypeRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(SessionEntityTypesServer).DeleteSessionEntityType(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.dialogflow.v2.SessionEntityTypes/DeleteSessionEntityType", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(SessionEntityTypesServer).DeleteSessionEntityType(ctx, req.(*DeleteSessionEntityTypeRequest)) + } + return interceptor(ctx, in, info, handler) +} + +var _SessionEntityTypes_serviceDesc = grpc.ServiceDesc{ + ServiceName: "google.cloud.dialogflow.v2.SessionEntityTypes", + HandlerType: (*SessionEntityTypesServer)(nil), + Methods: []grpc.MethodDesc{ + { + MethodName: "ListSessionEntityTypes", + Handler: _SessionEntityTypes_ListSessionEntityTypes_Handler, + }, + { + MethodName: "GetSessionEntityType", + Handler: _SessionEntityTypes_GetSessionEntityType_Handler, + }, + { + MethodName: "CreateSessionEntityType", + Handler: _SessionEntityTypes_CreateSessionEntityType_Handler, + }, + { + MethodName: "UpdateSessionEntityType", + Handler: _SessionEntityTypes_UpdateSessionEntityType_Handler, + }, + { + MethodName: "DeleteSessionEntityType", + Handler: _SessionEntityTypes_DeleteSessionEntityType_Handler, + }, + }, + Streams: []grpc.StreamDesc{}, + Metadata: "google/cloud/dialogflow/v2/session_entity_type.proto", +} + +func init() { + proto.RegisterFile("google/cloud/dialogflow/v2/session_entity_type.proto", fileDescriptor_session_entity_type_97b7b2145a2e309a) +} + +var fileDescriptor_session_entity_type_97b7b2145a2e309a = []byte{ + // 808 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xac, 0x96, 0xcd, 0x4e, 0xdb, 0x58, + 0x14, 0xc7, 0xe7, 0x9a, 0x19, 0x04, 0x17, 0xcd, 0x0c, 0xdc, 0x41, 0x21, 0x63, 0x86, 0x4c, 0xc6, + 0x33, 0x1a, 0xa1, 0xa8, 0xb5, 0xd5, 0x14, 0xb5, 0x2a, 0xb4, 0x52, 0x05, 0x31, 0x34, 0x2a, 0x81, + 0xd4, 0x09, 0x48, 0xad, 0x54, 0x59, 0x26, 0x39, 0x58, 0x2e, 0x89, 0xaf, 0xeb, 0xeb, 0xd0, 0x86, + 0x8a, 0x0d, 0xaf, 0xd0, 0x45, 0xd5, 0x6d, 0xa5, 0x2e, 0xda, 0xbe, 0x02, 0xaf, 0xd0, 0x55, 0x17, + 0x7d, 0x81, 0xee, 0x78, 0x81, 0x2e, 0x2b, 0x5f, 0x3b, 0x80, 0xb0, 0x7d, 0xab, 0xa0, 0xee, 0x7c, + 0x3f, 0xce, 0x39, 0xbf, 0xff, 0xb9, 0xe7, 0x1c, 0x19, 0x2f, 0xd8, 0x94, 0xda, 0x1d, 0xd0, 0x5a, + 0x1d, 0xda, 0x6b, 0x6b, 0x6d, 0xc7, 0xea, 0x50, 0x7b, 0xb7, 0x43, 0x9f, 0x69, 0xfb, 0x65, 0x8d, + 0x01, 0x63, 0x0e, 0x75, 0x4d, 0x70, 0x03, 0x27, 0xe8, 0x9b, 0x41, 0xdf, 0x03, 0xd5, 0xf3, 0x69, + 0x40, 0x89, 0x1c, 0x59, 0xa9, 0xdc, 0x4a, 0x3d, 0xb3, 0x52, 0xf7, 0xcb, 0xf2, 0x5f, 0xb1, 0x47, + 0xcb, 0x73, 0x34, 0xcb, 0x75, 0x69, 0x60, 0x05, 0x0e, 0x75, 0x59, 0x64, 0x29, 0xff, 0x79, 0xee, + 0xd4, 0x07, 0x46, 0x7b, 0x7e, 0x2b, 0x76, 0x2a, 0x5f, 0x11, 0xa0, 0x24, 0x10, 0xe4, 0xd9, 0xf8, + 0x36, 0x5f, 0xed, 0xf4, 0x76, 0x35, 0xe8, 0x7a, 0x41, 0x3f, 0x3e, 0x2c, 0x5e, 0x3c, 0xdc, 0x75, + 0xa0, 0xd3, 0x36, 0xbb, 0x16, 0xdb, 0x8b, 0x6e, 0x28, 0x27, 0x12, 0x9e, 0x6a, 0x44, 0xfa, 0x74, + 0xee, 0xbb, 0xd9, 0xf7, 0x80, 0x10, 0xfc, 0xb3, 0x6b, 0x75, 0x21, 0x8f, 0x8a, 0x68, 0x7e, 0xdc, + 0xe0, 0xdf, 0xc4, 0xc7, 0xd3, 0x71, 0x74, 0xba, 0x0f, 0xbe, 0xef, 0xb4, 0xc1, 0xec, 0xd2, 0x36, + 0xe4, 0xa5, 0x22, 0x9a, 0xff, 0xad, 0x7c, 0x57, 0xcd, 0x4e, 0x85, 0x9a, 0x08, 0xa0, 0x46, 0x9f, + 0x9b, 0xb1, 0xa3, 0x1a, 0x6d, 0x83, 0x41, 0x20, 0xb1, 0x47, 0xaa, 0x78, 0x8c, 0xef, 0x3a, 0xc0, + 0xf2, 0x23, 0xc5, 0x91, 0xf9, 0x89, 0xf2, 0x55, 0x51, 0x9c, 0x44, 0x00, 0xe3, 0xd4, 0x5c, 0x39, + 0x42, 0x98, 0x24, 0xa3, 0x92, 0xff, 0x70, 0x51, 0xdf, 0x68, 0x56, 0x9b, 0x0f, 0xcd, 0xcd, 0x6d, + 0xdd, 0x30, 0xaa, 0x15, 0xdd, 0xac, 0x6d, 0x56, 0x74, 0x73, 0x6b, 0xa3, 0x51, 0xd7, 0x57, 0xaa, + 0xab, 0x55, 0xbd, 0x32, 0xf9, 0x13, 0xf9, 0x07, 0xcf, 0xa5, 0xde, 0x1a, 0xac, 0x26, 0x11, 0xf9, + 0x17, 0xff, 0x9d, 0x7a, 0xa5, 0xb1, 0x55, 0xaf, 0xaf, 0xeb, 0x35, 0x7d, 0xa3, 0x39, 0x29, 0x29, + 0x0c, 0xcf, 0xad, 0x3b, 0x2c, 0x48, 0xe4, 0x83, 0x19, 0xf0, 0xb4, 0x07, 0x2c, 0x20, 0x39, 0x3c, + 0xea, 0x59, 0x3e, 0xb8, 0x41, 0x9c, 0xfa, 0x78, 0x45, 0x66, 0xf1, 0xb8, 0x67, 0xd9, 0x60, 0x32, + 0xe7, 0x20, 0xca, 0xf8, 0x2f, 0xc6, 0x58, 0xb8, 0xd1, 0x70, 0x0e, 0x80, 0xcc, 0x61, 0xcc, 0x0f, + 0x03, 0xba, 0x07, 0x6e, 0x7e, 0x84, 0x1b, 0xf2, 0xeb, 0xcd, 0x70, 0x43, 0x79, 0x87, 0x70, 0x21, + 0x2b, 0x2a, 0xf3, 0xa8, 0xcb, 0x80, 0x98, 0x78, 0x3a, 0xa5, 0xc8, 0x59, 0x1e, 0x7d, 0x3f, 0xe7, + 0x09, 0xaf, 0x06, 0x61, 0x89, 0x40, 0xe4, 0x7f, 0xfc, 0xbb, 0x0b, 0xcf, 0x03, 0xf3, 0x1c, 0xa7, + 0xc4, 0x39, 0x7f, 0x0d, 0xb7, 0xeb, 0xa7, 0xac, 0xd7, 0xf0, 0xec, 0x1a, 0x24, 0x49, 0x07, 0xe9, + 0x49, 0xa9, 0x4b, 0xe5, 0x15, 0xc2, 0x85, 0x15, 0x1f, 0xac, 0x00, 0x32, 0xcd, 0xb2, 0xb2, 0xfa, + 0x18, 0xff, 0x91, 0x22, 0x9b, 0x93, 0x0d, 0xad, 0x7a, 0x2a, 0xa1, 0x5a, 0x39, 0x46, 0xb8, 0xb0, + 0xe5, 0xb5, 0x45, 0x64, 0x19, 0x04, 0xe8, 0xc7, 0x10, 0x90, 0x25, 0x3c, 0xd1, 0xe3, 0x00, 0xbc, + 0xe5, 0x63, 0x61, 0xf2, 0xc0, 0xed, 0x60, 0x2a, 0xa8, 0xab, 0xe1, 0x54, 0xa8, 0x59, 0x6c, 0xcf, + 0xc0, 0xd1, 0xf5, 0xf0, 0x5b, 0x59, 0xc0, 0x85, 0x0a, 0x74, 0x40, 0x40, 0x9f, 0xf2, 0x1c, 0xe5, + 0xd7, 0x63, 0x98, 0x24, 0x2b, 0x8d, 0x7c, 0x44, 0x38, 0x97, 0x5e, 0x84, 0xe4, 0x96, 0x48, 0xa6, + 0xb0, 0x5d, 0xe4, 0xc5, 0xcb, 0x98, 0x46, 0x35, 0xaf, 0xdc, 0x3e, 0xfa, 0xf4, 0xe5, 0xa5, 0x74, + 0x83, 0x2c, 0x84, 0x73, 0xf5, 0x45, 0x54, 0x11, 0x77, 0x3c, 0x9f, 0x3e, 0x81, 0x56, 0xc0, 0xb4, + 0x92, 0x66, 0xd9, 0xe0, 0x06, 0x83, 0xd9, 0xcf, 0xb4, 0xd2, 0x61, 0x3c, 0x7c, 0x23, 0xe8, 0x63, + 0x84, 0xa7, 0xd3, 0x2a, 0x95, 0xdc, 0x14, 0x21, 0x09, 0x6a, 0x5b, 0x1e, 0xee, 0xb5, 0x2f, 0xe0, + 0x87, 0xa9, 0x17, 0xc1, 0x9f, 0x67, 0xd7, 0x4a, 0x87, 0xe4, 0x33, 0xc2, 0x33, 0x19, 0x4d, 0x43, + 0x84, 0x49, 0x15, 0x77, 0xda, 0xb0, 0x22, 0x1e, 0x70, 0x11, 0xf7, 0x95, 0x4b, 0xbd, 0xc1, 0x62, + 0x5a, 0xeb, 0x90, 0x13, 0x84, 0x67, 0x32, 0x9a, 0x4e, 0xac, 0x4c, 0xdc, 0xa9, 0xc3, 0x2a, 0x03, + 0xae, 0xcc, 0x2c, 0xdf, 0xe3, 0xca, 0xd2, 0xfe, 0x20, 0x86, 0x7c, 0xb2, 0x74, 0xb5, 0x1f, 0x10, + 0x9e, 0xc9, 0x68, 0x52, 0xb1, 0x5a, 0x71, 0x67, 0xcb, 0xb9, 0xc4, 0x8c, 0xd0, 0xc3, 0xdf, 0x8a, + 0x41, 0xd5, 0x95, 0x2e, 0x55, 0x75, 0xcb, 0x6f, 0x11, 0x2e, 0xb4, 0x68, 0x57, 0xc0, 0xb5, 0x9c, + 0x4b, 0x20, 0xd5, 0x43, 0x82, 0x3a, 0x7a, 0x54, 0x89, 0xad, 0x6c, 0xda, 0xb1, 0x5c, 0x5b, 0xa5, + 0xbe, 0xad, 0xd9, 0xe0, 0x72, 0x3e, 0x2d, 0x3a, 0xb2, 0x3c, 0x87, 0xa5, 0xfd, 0x35, 0x2d, 0x9d, + 0xad, 0xbe, 0x22, 0xf4, 0x46, 0x92, 0x2a, 0xab, 0xef, 0x25, 0x79, 0x2d, 0x72, 0xb7, 0xc2, 0x21, + 0x2a, 0x67, 0x10, 0xdb, 0xe5, 0x9d, 0x51, 0xee, 0xf5, 0xfa, 0xb7, 0x00, 0x00, 0x00, 0xff, 0xff, + 0x7b, 0xde, 0xc9, 0xc2, 0x15, 0x0a, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/cloud/dialogflow/v2/webhook.pb.go b/vendor/google.golang.org/genproto/googleapis/cloud/dialogflow/v2/webhook.pb.go new file mode 100644 index 0000000..50bd135 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/cloud/dialogflow/v2/webhook.pb.go @@ -0,0 +1,328 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/cloud/dialogflow/v2/webhook.proto + +package dialogflow // import "google.golang.org/genproto/googleapis/cloud/dialogflow/v2" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _struct "github.com/golang/protobuf/ptypes/struct" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// The request message for a webhook call. +type WebhookRequest struct { + // The unique identifier of detectIntent request session. + // Can be used to identify end-user inside webhook implementation. + // Format: `projects//agent/sessions/`, or + // `projects//agent/environments//users//sessions/`. + Session string `protobuf:"bytes,4,opt,name=session,proto3" json:"session,omitempty"` + // The unique identifier of the response. Contains the same value as + // `[Streaming]DetectIntentResponse.response_id`. + ResponseId string `protobuf:"bytes,1,opt,name=response_id,json=responseId,proto3" json:"response_id,omitempty"` + // The result of the conversational query or event processing. Contains the + // same value as `[Streaming]DetectIntentResponse.query_result`. + QueryResult *QueryResult `protobuf:"bytes,2,opt,name=query_result,json=queryResult,proto3" json:"query_result,omitempty"` + // Optional. The contents of the original request that was passed to + // `[Streaming]DetectIntent` call. + OriginalDetectIntentRequest *OriginalDetectIntentRequest `protobuf:"bytes,3,opt,name=original_detect_intent_request,json=originalDetectIntentRequest,proto3" json:"original_detect_intent_request,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *WebhookRequest) Reset() { *m = WebhookRequest{} } +func (m *WebhookRequest) String() string { return proto.CompactTextString(m) } +func (*WebhookRequest) ProtoMessage() {} +func (*WebhookRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_webhook_a595880048808234, []int{0} +} +func (m *WebhookRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_WebhookRequest.Unmarshal(m, b) +} +func (m *WebhookRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_WebhookRequest.Marshal(b, m, deterministic) +} +func (dst *WebhookRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_WebhookRequest.Merge(dst, src) +} +func (m *WebhookRequest) XXX_Size() int { + return xxx_messageInfo_WebhookRequest.Size(m) +} +func (m *WebhookRequest) XXX_DiscardUnknown() { + xxx_messageInfo_WebhookRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_WebhookRequest proto.InternalMessageInfo + +func (m *WebhookRequest) GetSession() string { + if m != nil { + return m.Session + } + return "" +} + +func (m *WebhookRequest) GetResponseId() string { + if m != nil { + return m.ResponseId + } + return "" +} + +func (m *WebhookRequest) GetQueryResult() *QueryResult { + if m != nil { + return m.QueryResult + } + return nil +} + +func (m *WebhookRequest) GetOriginalDetectIntentRequest() *OriginalDetectIntentRequest { + if m != nil { + return m.OriginalDetectIntentRequest + } + return nil +} + +// The response message for a webhook call. +type WebhookResponse struct { + // Optional. The text to be shown on the screen. This value is passed directly + // to `QueryResult.fulfillment_text`. + FulfillmentText string `protobuf:"bytes,1,opt,name=fulfillment_text,json=fulfillmentText,proto3" json:"fulfillment_text,omitempty"` + // Optional. The collection of rich messages to present to the user. This + // value is passed directly to `QueryResult.fulfillment_messages`. + FulfillmentMessages []*Intent_Message `protobuf:"bytes,2,rep,name=fulfillment_messages,json=fulfillmentMessages,proto3" json:"fulfillment_messages,omitempty"` + // Optional. This value is passed directly to `QueryResult.webhook_source`. + Source string `protobuf:"bytes,3,opt,name=source,proto3" json:"source,omitempty"` + // Optional. This value is passed directly to `QueryResult.webhook_payload`. + // See the related `fulfillment_messages[i].payload field`, which may be used + // as an alternative to this field. + // + // This field can be used for Actions on Google responses. + // It should have a structure similar to the JSON message shown here. For more + // information, see + // [Actions on Google Webhook + // Format](https://developers.google.com/actions/dialogflow/webhook) + //
{
+	//   "google": {
+	//     "expectUserResponse": true,
+	//     "richResponse": {
+	//       "items": [
+	//         {
+	//           "simpleResponse": {
+	//             "textToSpeech": "this is a simple response"
+	//           }
+	//         }
+	//       ]
+	//     }
+	//   }
+	// }
+ Payload *_struct.Struct `protobuf:"bytes,4,opt,name=payload,proto3" json:"payload,omitempty"` + // Optional. The collection of output contexts. This value is passed directly + // to `QueryResult.output_contexts`. + OutputContexts []*Context `protobuf:"bytes,5,rep,name=output_contexts,json=outputContexts,proto3" json:"output_contexts,omitempty"` + // Optional. Makes the platform immediately invoke another `DetectIntent` call + // internally with the specified event as input. + FollowupEventInput *EventInput `protobuf:"bytes,6,opt,name=followup_event_input,json=followupEventInput,proto3" json:"followup_event_input,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *WebhookResponse) Reset() { *m = WebhookResponse{} } +func (m *WebhookResponse) String() string { return proto.CompactTextString(m) } +func (*WebhookResponse) ProtoMessage() {} +func (*WebhookResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_webhook_a595880048808234, []int{1} +} +func (m *WebhookResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_WebhookResponse.Unmarshal(m, b) +} +func (m *WebhookResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_WebhookResponse.Marshal(b, m, deterministic) +} +func (dst *WebhookResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_WebhookResponse.Merge(dst, src) +} +func (m *WebhookResponse) XXX_Size() int { + return xxx_messageInfo_WebhookResponse.Size(m) +} +func (m *WebhookResponse) XXX_DiscardUnknown() { + xxx_messageInfo_WebhookResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_WebhookResponse proto.InternalMessageInfo + +func (m *WebhookResponse) GetFulfillmentText() string { + if m != nil { + return m.FulfillmentText + } + return "" +} + +func (m *WebhookResponse) GetFulfillmentMessages() []*Intent_Message { + if m != nil { + return m.FulfillmentMessages + } + return nil +} + +func (m *WebhookResponse) GetSource() string { + if m != nil { + return m.Source + } + return "" +} + +func (m *WebhookResponse) GetPayload() *_struct.Struct { + if m != nil { + return m.Payload + } + return nil +} + +func (m *WebhookResponse) GetOutputContexts() []*Context { + if m != nil { + return m.OutputContexts + } + return nil +} + +func (m *WebhookResponse) GetFollowupEventInput() *EventInput { + if m != nil { + return m.FollowupEventInput + } + return nil +} + +// Represents the contents of the original request that was passed to +// the `[Streaming]DetectIntent` call. +type OriginalDetectIntentRequest struct { + // The source of this request, e.g., `google`, `facebook`, `slack`. It is set + // by Dialogflow-owned servers. + Source string `protobuf:"bytes,1,opt,name=source,proto3" json:"source,omitempty"` + // Optional. The version of the protocol used for this request. + // This field is AoG-specific. + Version string `protobuf:"bytes,2,opt,name=version,proto3" json:"version,omitempty"` + // Optional. This field is set to the value of the `QueryParameters.payload` + // field passed in the request. Some integrations that query a Dialogflow + // agent may provide additional information in the payload. + // + // In particular for the Telephony Gateway this field has the form: + //
{
+	//  "telephony": {
+	//    "caller_id": "+18558363987"
+	//  }
+	// }
+ // Note: The caller ID field (`caller_id`) will be redacted for Standard + // Edition agents and populated with the caller ID in [E.164 + // format](https://en.wikipedia.org/wiki/E.164) for Enterprise Edition agents. + Payload *_struct.Struct `protobuf:"bytes,3,opt,name=payload,proto3" json:"payload,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *OriginalDetectIntentRequest) Reset() { *m = OriginalDetectIntentRequest{} } +func (m *OriginalDetectIntentRequest) String() string { return proto.CompactTextString(m) } +func (*OriginalDetectIntentRequest) ProtoMessage() {} +func (*OriginalDetectIntentRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_webhook_a595880048808234, []int{2} +} +func (m *OriginalDetectIntentRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_OriginalDetectIntentRequest.Unmarshal(m, b) +} +func (m *OriginalDetectIntentRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_OriginalDetectIntentRequest.Marshal(b, m, deterministic) +} +func (dst *OriginalDetectIntentRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_OriginalDetectIntentRequest.Merge(dst, src) +} +func (m *OriginalDetectIntentRequest) XXX_Size() int { + return xxx_messageInfo_OriginalDetectIntentRequest.Size(m) +} +func (m *OriginalDetectIntentRequest) XXX_DiscardUnknown() { + xxx_messageInfo_OriginalDetectIntentRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_OriginalDetectIntentRequest proto.InternalMessageInfo + +func (m *OriginalDetectIntentRequest) GetSource() string { + if m != nil { + return m.Source + } + return "" +} + +func (m *OriginalDetectIntentRequest) GetVersion() string { + if m != nil { + return m.Version + } + return "" +} + +func (m *OriginalDetectIntentRequest) GetPayload() *_struct.Struct { + if m != nil { + return m.Payload + } + return nil +} + +func init() { + proto.RegisterType((*WebhookRequest)(nil), "google.cloud.dialogflow.v2.WebhookRequest") + proto.RegisterType((*WebhookResponse)(nil), "google.cloud.dialogflow.v2.WebhookResponse") + proto.RegisterType((*OriginalDetectIntentRequest)(nil), "google.cloud.dialogflow.v2.OriginalDetectIntentRequest") +} + +func init() { + proto.RegisterFile("google/cloud/dialogflow/v2/webhook.proto", fileDescriptor_webhook_a595880048808234) +} + +var fileDescriptor_webhook_a595880048808234 = []byte{ + // 553 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x8c, 0x93, 0x41, 0x6f, 0xd3, 0x4c, + 0x10, 0x86, 0x65, 0xe7, 0xfb, 0x12, 0x75, 0x53, 0x35, 0x68, 0xa9, 0xc0, 0x4a, 0x51, 0x89, 0x82, + 0x44, 0x03, 0x07, 0x5b, 0x98, 0x03, 0x07, 0x6e, 0x6d, 0x00, 0x05, 0x81, 0x28, 0x06, 0x01, 0x42, + 0x42, 0x96, 0x63, 0x6f, 0xcc, 0x8a, 0xcd, 0x8e, 0xe3, 0xdd, 0x4d, 0x5a, 0x89, 0x13, 0x3f, 0x80, + 0x1b, 0x27, 0x6e, 0x1c, 0xf9, 0x85, 0x1c, 0x91, 0x77, 0xd7, 0xc4, 0x20, 0x6a, 0x71, 0x9c, 0x99, + 0xc7, 0xef, 0xcc, 0xbc, 0x3b, 0x46, 0x93, 0x1c, 0x20, 0x67, 0x24, 0x48, 0x19, 0xa8, 0x2c, 0xc8, + 0x68, 0xc2, 0x20, 0x5f, 0x30, 0xd8, 0x04, 0xeb, 0x30, 0xd8, 0x90, 0xf9, 0x7b, 0x80, 0x0f, 0x7e, + 0x51, 0x82, 0x04, 0x3c, 0x34, 0xa4, 0xaf, 0x49, 0x7f, 0x4b, 0xfa, 0xeb, 0x70, 0xd8, 0xa6, 0x92, + 0x02, 0x97, 0xe4, 0x4c, 0x1a, 0x95, 0xe1, 0x51, 0x0b, 0x49, 0xb9, 0x24, 0xbc, 0x06, 0xdb, 0x24, + 0x05, 0x11, 0x82, 0x02, 0xb7, 0xe4, 0x35, 0x4b, 0xea, 0x68, 0xae, 0x16, 0x81, 0x90, 0xa5, 0x4a, + 0xe5, 0x1f, 0xd5, 0xa4, 0xa0, 0x41, 0xc2, 0x39, 0xc8, 0x44, 0x52, 0xe0, 0xc2, 0x54, 0xc7, 0x5f, + 0x5c, 0xb4, 0xf7, 0xda, 0xac, 0x19, 0x91, 0x95, 0x22, 0x42, 0x62, 0x0f, 0xf5, 0xac, 0xbe, 0xf7, + 0xdf, 0xc8, 0x99, 0xec, 0x44, 0x75, 0x88, 0xaf, 0xa3, 0x7e, 0x49, 0x44, 0x01, 0x5c, 0x90, 0x98, + 0x66, 0x9e, 0xa3, 0xab, 0xa8, 0x4e, 0xcd, 0x32, 0xfc, 0x18, 0xed, 0xae, 0x14, 0x29, 0xcf, 0xe3, + 0x92, 0x08, 0xc5, 0xa4, 0xe7, 0x8e, 0x9c, 0x49, 0x3f, 0x3c, 0xf2, 0x2f, 0x76, 0xce, 0x7f, 0x5e, + 0xf1, 0x91, 0xc6, 0xa3, 0xfe, 0x6a, 0x1b, 0xe0, 0x8f, 0xe8, 0x10, 0x4a, 0x9a, 0x53, 0x9e, 0xb0, + 0x38, 0x23, 0x92, 0xa4, 0x32, 0x36, 0xfe, 0xc4, 0xa5, 0x19, 0xd4, 0xeb, 0x68, 0xf5, 0x7b, 0x6d, + 0xea, 0xcf, 0xac, 0xc2, 0x54, 0x0b, 0xcc, 0xf4, 0xf7, 0x76, 0xcf, 0xe8, 0x00, 0x2e, 0x2e, 0x8e, + 0x3f, 0x77, 0xd0, 0xe0, 0x97, 0x2f, 0x66, 0x3f, 0x7c, 0x0b, 0x5d, 0x5a, 0x28, 0xb6, 0xa0, 0x8c, + 0x2d, 0xab, 0x31, 0xaa, 0x47, 0xb5, 0x1e, 0x0c, 0x1a, 0xf9, 0x97, 0xe4, 0x4c, 0xe2, 0x77, 0x68, + 0xbf, 0x89, 0x2e, 0x89, 0x10, 0x49, 0x4e, 0x84, 0xe7, 0x8e, 0x3a, 0x93, 0x7e, 0x78, 0xbb, 0x6d, + 0x64, 0x33, 0x87, 0xff, 0xd4, 0x7c, 0x12, 0x5d, 0x6e, 0xe8, 0xd8, 0x9c, 0xc0, 0x57, 0x50, 0x57, + 0x80, 0x2a, 0x53, 0xa2, 0x3d, 0xd8, 0x89, 0x6c, 0x84, 0xef, 0xa0, 0x5e, 0x91, 0x9c, 0x33, 0x48, + 0x32, 0xfd, 0x74, 0xfd, 0xf0, 0x6a, 0xdd, 0xa9, 0xbe, 0x0d, 0xff, 0x85, 0xbe, 0x8d, 0xa8, 0xe6, + 0xf0, 0x13, 0x34, 0x00, 0x25, 0x0b, 0x25, 0x63, 0x7b, 0xa7, 0xc2, 0xfb, 0x5f, 0x0f, 0x79, 0xa3, + 0x6d, 0xc8, 0x13, 0xc3, 0x46, 0x7b, 0xe6, 0x5b, 0x1b, 0x0a, 0xfc, 0x06, 0xed, 0x2f, 0x80, 0x31, + 0xd8, 0xa8, 0x22, 0x26, 0xeb, 0x6a, 0x75, 0xca, 0x0b, 0x25, 0xbd, 0xae, 0x9e, 0xe6, 0x66, 0x9b, + 0xe4, 0x83, 0x0a, 0x9f, 0x55, 0x74, 0x84, 0x6b, 0x8d, 0x6d, 0x6e, 0xfc, 0xc9, 0x41, 0x07, 0x2d, + 0xaf, 0xd9, 0xb0, 0xc4, 0xf9, 0xcd, 0x12, 0x0f, 0xf5, 0xd6, 0xa4, 0xd4, 0xd7, 0xec, 0x9a, 0x6b, + 0xb6, 0x61, 0xd3, 0xac, 0xce, 0xbf, 0x99, 0x75, 0xfc, 0xd5, 0x41, 0x87, 0x29, 0x2c, 0x5b, 0xd6, + 0x38, 0xde, 0xb5, 0x57, 0x73, 0x5a, 0x69, 0x9c, 0x3a, 0x6f, 0xa7, 0x96, 0xcd, 0x81, 0x25, 0x3c, + 0xf7, 0xa1, 0xcc, 0x83, 0x9c, 0x70, 0xdd, 0x21, 0x30, 0xa5, 0xa4, 0xa0, 0xe2, 0x6f, 0x7f, 0xf9, + 0xfd, 0x6d, 0xf4, 0xc3, 0x71, 0xbe, 0xb9, 0xee, 0xf4, 0xe1, 0x77, 0x77, 0xf8, 0xc8, 0xc8, 0x9d, + 0xe8, 0xd6, 0xd3, 0x6d, 0xeb, 0x57, 0xe1, 0xbc, 0xab, 0x55, 0xef, 0xfe, 0x0c, 0x00, 0x00, 0xff, + 0xff, 0xbc, 0x1f, 0x79, 0x23, 0xd3, 0x04, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/cloud/dialogflow/v2beta1/agent.pb.go b/vendor/google.golang.org/genproto/googleapis/cloud/dialogflow/v2beta1/agent.pb.go new file mode 100644 index 0000000..d62febb --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/cloud/dialogflow/v2beta1/agent.pb.go @@ -0,0 +1,1270 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/cloud/dialogflow/v2beta1/agent.proto + +package dialogflow // import "google.golang.org/genproto/googleapis/cloud/dialogflow/v2beta1" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "github.com/golang/protobuf/ptypes/empty" +import _ "github.com/golang/protobuf/ptypes/struct" +import _ "google.golang.org/genproto/googleapis/api/annotations" +import longrunning "google.golang.org/genproto/googleapis/longrunning" +import _ "google.golang.org/genproto/protobuf/field_mask" + +import ( + context "golang.org/x/net/context" + grpc "google.golang.org/grpc" +) + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Match mode determines how intents are detected from user queries. +type Agent_MatchMode int32 + +const ( + // Not specified. + Agent_MATCH_MODE_UNSPECIFIED Agent_MatchMode = 0 + // Best for agents with a small number of examples in intents and/or wide + // use of templates syntax and composite entities. + Agent_MATCH_MODE_HYBRID Agent_MatchMode = 1 + // Can be used for agents with a large number of examples in intents, + // especially the ones using @sys.any or very large developer entities. + Agent_MATCH_MODE_ML_ONLY Agent_MatchMode = 2 +) + +var Agent_MatchMode_name = map[int32]string{ + 0: "MATCH_MODE_UNSPECIFIED", + 1: "MATCH_MODE_HYBRID", + 2: "MATCH_MODE_ML_ONLY", +} +var Agent_MatchMode_value = map[string]int32{ + "MATCH_MODE_UNSPECIFIED": 0, + "MATCH_MODE_HYBRID": 1, + "MATCH_MODE_ML_ONLY": 2, +} + +func (x Agent_MatchMode) String() string { + return proto.EnumName(Agent_MatchMode_name, int32(x)) +} +func (Agent_MatchMode) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_agent_b87bd04ebcb8ded2, []int{0, 0} +} + +// Represents a conversational agent. +type Agent struct { + // Required. The project of this agent. + // Format: `projects/`. + Parent string `protobuf:"bytes,1,opt,name=parent,proto3" json:"parent,omitempty"` + // Required. The name of this agent. + DisplayName string `protobuf:"bytes,2,opt,name=display_name,json=displayName,proto3" json:"display_name,omitempty"` + // Required. The default language of the agent as a language tag. See + // [Language + // Support](https://cloud.google.com/dialogflow-enterprise/docs/reference/language) + // for a list of the currently supported language codes. This field cannot be + // set by the `Update` method. + DefaultLanguageCode string `protobuf:"bytes,3,opt,name=default_language_code,json=defaultLanguageCode,proto3" json:"default_language_code,omitempty"` + // Optional. The list of all languages supported by this agent (except for the + // `default_language_code`). + SupportedLanguageCodes []string `protobuf:"bytes,4,rep,name=supported_language_codes,json=supportedLanguageCodes,proto3" json:"supported_language_codes,omitempty"` + // Required. The time zone of this agent from the + // [time zone database](https://www.iana.org/time-zones), e.g., + // America/New_York, Europe/Paris. + TimeZone string `protobuf:"bytes,5,opt,name=time_zone,json=timeZone,proto3" json:"time_zone,omitempty"` + // Optional. The description of this agent. + // The maximum length is 500 characters. If exceeded, the request is rejected. + Description string `protobuf:"bytes,6,opt,name=description,proto3" json:"description,omitempty"` + // Optional. The URI of the agent's avatar. + // Avatars are used throughout the Dialogflow console and in the self-hosted + // [Web + // Demo](https://cloud.google.com/dialogflow-enterprise/docs/integrations/web-demo) + // integration. + AvatarUri string `protobuf:"bytes,7,opt,name=avatar_uri,json=avatarUri,proto3" json:"avatar_uri,omitempty"` + // Optional. Determines whether this agent should log conversation queries. + EnableLogging bool `protobuf:"varint,8,opt,name=enable_logging,json=enableLogging,proto3" json:"enable_logging,omitempty"` + // Optional. Determines how intents are detected from user queries. + MatchMode Agent_MatchMode `protobuf:"varint,9,opt,name=match_mode,json=matchMode,proto3,enum=google.cloud.dialogflow.v2beta1.Agent_MatchMode" json:"match_mode,omitempty"` + // Optional. To filter out false positive results and still get variety in + // matched natural language inputs for your agent, you can tune the machine + // learning classification threshold. If the returned score value is less than + // the threshold value, then a fallback intent will be triggered or, if there + // are no fallback intents defined, no intent will be triggered. The score + // values range from 0.0 (completely uncertain) to 1.0 (completely certain). + // If set to 0.0, the default of 0.3 is used. + ClassificationThreshold float32 `protobuf:"fixed32,10,opt,name=classification_threshold,json=classificationThreshold,proto3" json:"classification_threshold,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Agent) Reset() { *m = Agent{} } +func (m *Agent) String() string { return proto.CompactTextString(m) } +func (*Agent) ProtoMessage() {} +func (*Agent) Descriptor() ([]byte, []int) { + return fileDescriptor_agent_b87bd04ebcb8ded2, []int{0} +} +func (m *Agent) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Agent.Unmarshal(m, b) +} +func (m *Agent) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Agent.Marshal(b, m, deterministic) +} +func (dst *Agent) XXX_Merge(src proto.Message) { + xxx_messageInfo_Agent.Merge(dst, src) +} +func (m *Agent) XXX_Size() int { + return xxx_messageInfo_Agent.Size(m) +} +func (m *Agent) XXX_DiscardUnknown() { + xxx_messageInfo_Agent.DiscardUnknown(m) +} + +var xxx_messageInfo_Agent proto.InternalMessageInfo + +func (m *Agent) GetParent() string { + if m != nil { + return m.Parent + } + return "" +} + +func (m *Agent) GetDisplayName() string { + if m != nil { + return m.DisplayName + } + return "" +} + +func (m *Agent) GetDefaultLanguageCode() string { + if m != nil { + return m.DefaultLanguageCode + } + return "" +} + +func (m *Agent) GetSupportedLanguageCodes() []string { + if m != nil { + return m.SupportedLanguageCodes + } + return nil +} + +func (m *Agent) GetTimeZone() string { + if m != nil { + return m.TimeZone + } + return "" +} + +func (m *Agent) GetDescription() string { + if m != nil { + return m.Description + } + return "" +} + +func (m *Agent) GetAvatarUri() string { + if m != nil { + return m.AvatarUri + } + return "" +} + +func (m *Agent) GetEnableLogging() bool { + if m != nil { + return m.EnableLogging + } + return false +} + +func (m *Agent) GetMatchMode() Agent_MatchMode { + if m != nil { + return m.MatchMode + } + return Agent_MATCH_MODE_UNSPECIFIED +} + +func (m *Agent) GetClassificationThreshold() float32 { + if m != nil { + return m.ClassificationThreshold + } + return 0 +} + +// The request message for [Agents.GetAgent][google.cloud.dialogflow.v2beta1.Agents.GetAgent]. +type GetAgentRequest struct { + // Required. The project that the agent to fetch is associated with. + // Format: `projects/`. + Parent string `protobuf:"bytes,1,opt,name=parent,proto3" json:"parent,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GetAgentRequest) Reset() { *m = GetAgentRequest{} } +func (m *GetAgentRequest) String() string { return proto.CompactTextString(m) } +func (*GetAgentRequest) ProtoMessage() {} +func (*GetAgentRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_agent_b87bd04ebcb8ded2, []int{1} +} +func (m *GetAgentRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GetAgentRequest.Unmarshal(m, b) +} +func (m *GetAgentRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GetAgentRequest.Marshal(b, m, deterministic) +} +func (dst *GetAgentRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_GetAgentRequest.Merge(dst, src) +} +func (m *GetAgentRequest) XXX_Size() int { + return xxx_messageInfo_GetAgentRequest.Size(m) +} +func (m *GetAgentRequest) XXX_DiscardUnknown() { + xxx_messageInfo_GetAgentRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_GetAgentRequest proto.InternalMessageInfo + +func (m *GetAgentRequest) GetParent() string { + if m != nil { + return m.Parent + } + return "" +} + +// The request message for [Agents.SearchAgents][google.cloud.dialogflow.v2beta1.Agents.SearchAgents]. +type SearchAgentsRequest struct { + // Required. The project to list agents from. + // Format: `projects/`. + Parent string `protobuf:"bytes,1,opt,name=parent,proto3" json:"parent,omitempty"` + // Optional. The maximum number of items to return in a single page. By + // default 100 and at most 1000. + PageSize int32 `protobuf:"varint,2,opt,name=page_size,json=pageSize,proto3" json:"page_size,omitempty"` + // Optional. The next_page_token value returned from a previous list request. + PageToken string `protobuf:"bytes,3,opt,name=page_token,json=pageToken,proto3" json:"page_token,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *SearchAgentsRequest) Reset() { *m = SearchAgentsRequest{} } +func (m *SearchAgentsRequest) String() string { return proto.CompactTextString(m) } +func (*SearchAgentsRequest) ProtoMessage() {} +func (*SearchAgentsRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_agent_b87bd04ebcb8ded2, []int{2} +} +func (m *SearchAgentsRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_SearchAgentsRequest.Unmarshal(m, b) +} +func (m *SearchAgentsRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_SearchAgentsRequest.Marshal(b, m, deterministic) +} +func (dst *SearchAgentsRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_SearchAgentsRequest.Merge(dst, src) +} +func (m *SearchAgentsRequest) XXX_Size() int { + return xxx_messageInfo_SearchAgentsRequest.Size(m) +} +func (m *SearchAgentsRequest) XXX_DiscardUnknown() { + xxx_messageInfo_SearchAgentsRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_SearchAgentsRequest proto.InternalMessageInfo + +func (m *SearchAgentsRequest) GetParent() string { + if m != nil { + return m.Parent + } + return "" +} + +func (m *SearchAgentsRequest) GetPageSize() int32 { + if m != nil { + return m.PageSize + } + return 0 +} + +func (m *SearchAgentsRequest) GetPageToken() string { + if m != nil { + return m.PageToken + } + return "" +} + +// The response message for [Agents.SearchAgents][google.cloud.dialogflow.v2beta1.Agents.SearchAgents]. +type SearchAgentsResponse struct { + // The list of agents. There will be a maximum number of items returned based + // on the page_size field in the request. + Agents []*Agent `protobuf:"bytes,1,rep,name=agents,proto3" json:"agents,omitempty"` + // Token to retrieve the next page of results, or empty if there are no + // more results in the list. + NextPageToken string `protobuf:"bytes,2,opt,name=next_page_token,json=nextPageToken,proto3" json:"next_page_token,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *SearchAgentsResponse) Reset() { *m = SearchAgentsResponse{} } +func (m *SearchAgentsResponse) String() string { return proto.CompactTextString(m) } +func (*SearchAgentsResponse) ProtoMessage() {} +func (*SearchAgentsResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_agent_b87bd04ebcb8ded2, []int{3} +} +func (m *SearchAgentsResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_SearchAgentsResponse.Unmarshal(m, b) +} +func (m *SearchAgentsResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_SearchAgentsResponse.Marshal(b, m, deterministic) +} +func (dst *SearchAgentsResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_SearchAgentsResponse.Merge(dst, src) +} +func (m *SearchAgentsResponse) XXX_Size() int { + return xxx_messageInfo_SearchAgentsResponse.Size(m) +} +func (m *SearchAgentsResponse) XXX_DiscardUnknown() { + xxx_messageInfo_SearchAgentsResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_SearchAgentsResponse proto.InternalMessageInfo + +func (m *SearchAgentsResponse) GetAgents() []*Agent { + if m != nil { + return m.Agents + } + return nil +} + +func (m *SearchAgentsResponse) GetNextPageToken() string { + if m != nil { + return m.NextPageToken + } + return "" +} + +// The request message for [Agents.TrainAgent][google.cloud.dialogflow.v2beta1.Agents.TrainAgent]. +type TrainAgentRequest struct { + // Required. The project that the agent to train is associated with. + // Format: `projects/`. + Parent string `protobuf:"bytes,1,opt,name=parent,proto3" json:"parent,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *TrainAgentRequest) Reset() { *m = TrainAgentRequest{} } +func (m *TrainAgentRequest) String() string { return proto.CompactTextString(m) } +func (*TrainAgentRequest) ProtoMessage() {} +func (*TrainAgentRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_agent_b87bd04ebcb8ded2, []int{4} +} +func (m *TrainAgentRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_TrainAgentRequest.Unmarshal(m, b) +} +func (m *TrainAgentRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_TrainAgentRequest.Marshal(b, m, deterministic) +} +func (dst *TrainAgentRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_TrainAgentRequest.Merge(dst, src) +} +func (m *TrainAgentRequest) XXX_Size() int { + return xxx_messageInfo_TrainAgentRequest.Size(m) +} +func (m *TrainAgentRequest) XXX_DiscardUnknown() { + xxx_messageInfo_TrainAgentRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_TrainAgentRequest proto.InternalMessageInfo + +func (m *TrainAgentRequest) GetParent() string { + if m != nil { + return m.Parent + } + return "" +} + +// The request message for [Agents.ExportAgent][google.cloud.dialogflow.v2beta1.Agents.ExportAgent]. +type ExportAgentRequest struct { + // Required. The project that the agent to export is associated with. + // Format: `projects/`. + Parent string `protobuf:"bytes,1,opt,name=parent,proto3" json:"parent,omitempty"` + // Optional. The + // [Google Cloud Storage](https://cloud.google.com/storage/docs/) + // URI to export the agent to. + // The format of this URI must be `gs:///`. + // If left unspecified, the serialized agent is returned inline. + AgentUri string `protobuf:"bytes,2,opt,name=agent_uri,json=agentUri,proto3" json:"agent_uri,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ExportAgentRequest) Reset() { *m = ExportAgentRequest{} } +func (m *ExportAgentRequest) String() string { return proto.CompactTextString(m) } +func (*ExportAgentRequest) ProtoMessage() {} +func (*ExportAgentRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_agent_b87bd04ebcb8ded2, []int{5} +} +func (m *ExportAgentRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ExportAgentRequest.Unmarshal(m, b) +} +func (m *ExportAgentRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ExportAgentRequest.Marshal(b, m, deterministic) +} +func (dst *ExportAgentRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_ExportAgentRequest.Merge(dst, src) +} +func (m *ExportAgentRequest) XXX_Size() int { + return xxx_messageInfo_ExportAgentRequest.Size(m) +} +func (m *ExportAgentRequest) XXX_DiscardUnknown() { + xxx_messageInfo_ExportAgentRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_ExportAgentRequest proto.InternalMessageInfo + +func (m *ExportAgentRequest) GetParent() string { + if m != nil { + return m.Parent + } + return "" +} + +func (m *ExportAgentRequest) GetAgentUri() string { + if m != nil { + return m.AgentUri + } + return "" +} + +// The response message for [Agents.ExportAgent][google.cloud.dialogflow.v2beta1.Agents.ExportAgent]. +type ExportAgentResponse struct { + // Required. The exported agent. + // + // Types that are valid to be assigned to Agent: + // *ExportAgentResponse_AgentUri + // *ExportAgentResponse_AgentContent + Agent isExportAgentResponse_Agent `protobuf_oneof:"agent"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ExportAgentResponse) Reset() { *m = ExportAgentResponse{} } +func (m *ExportAgentResponse) String() string { return proto.CompactTextString(m) } +func (*ExportAgentResponse) ProtoMessage() {} +func (*ExportAgentResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_agent_b87bd04ebcb8ded2, []int{6} +} +func (m *ExportAgentResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ExportAgentResponse.Unmarshal(m, b) +} +func (m *ExportAgentResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ExportAgentResponse.Marshal(b, m, deterministic) +} +func (dst *ExportAgentResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_ExportAgentResponse.Merge(dst, src) +} +func (m *ExportAgentResponse) XXX_Size() int { + return xxx_messageInfo_ExportAgentResponse.Size(m) +} +func (m *ExportAgentResponse) XXX_DiscardUnknown() { + xxx_messageInfo_ExportAgentResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_ExportAgentResponse proto.InternalMessageInfo + +type isExportAgentResponse_Agent interface { + isExportAgentResponse_Agent() +} + +type ExportAgentResponse_AgentUri struct { + AgentUri string `protobuf:"bytes,1,opt,name=agent_uri,json=agentUri,proto3,oneof"` +} + +type ExportAgentResponse_AgentContent struct { + AgentContent []byte `protobuf:"bytes,2,opt,name=agent_content,json=agentContent,proto3,oneof"` +} + +func (*ExportAgentResponse_AgentUri) isExportAgentResponse_Agent() {} + +func (*ExportAgentResponse_AgentContent) isExportAgentResponse_Agent() {} + +func (m *ExportAgentResponse) GetAgent() isExportAgentResponse_Agent { + if m != nil { + return m.Agent + } + return nil +} + +func (m *ExportAgentResponse) GetAgentUri() string { + if x, ok := m.GetAgent().(*ExportAgentResponse_AgentUri); ok { + return x.AgentUri + } + return "" +} + +func (m *ExportAgentResponse) GetAgentContent() []byte { + if x, ok := m.GetAgent().(*ExportAgentResponse_AgentContent); ok { + return x.AgentContent + } + return nil +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*ExportAgentResponse) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _ExportAgentResponse_OneofMarshaler, _ExportAgentResponse_OneofUnmarshaler, _ExportAgentResponse_OneofSizer, []interface{}{ + (*ExportAgentResponse_AgentUri)(nil), + (*ExportAgentResponse_AgentContent)(nil), + } +} + +func _ExportAgentResponse_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*ExportAgentResponse) + // agent + switch x := m.Agent.(type) { + case *ExportAgentResponse_AgentUri: + b.EncodeVarint(1<<3 | proto.WireBytes) + b.EncodeStringBytes(x.AgentUri) + case *ExportAgentResponse_AgentContent: + b.EncodeVarint(2<<3 | proto.WireBytes) + b.EncodeRawBytes(x.AgentContent) + case nil: + default: + return fmt.Errorf("ExportAgentResponse.Agent has unexpected type %T", x) + } + return nil +} + +func _ExportAgentResponse_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*ExportAgentResponse) + switch tag { + case 1: // agent.agent_uri + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeStringBytes() + m.Agent = &ExportAgentResponse_AgentUri{x} + return true, err + case 2: // agent.agent_content + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeRawBytes(true) + m.Agent = &ExportAgentResponse_AgentContent{x} + return true, err + default: + return false, nil + } +} + +func _ExportAgentResponse_OneofSizer(msg proto.Message) (n int) { + m := msg.(*ExportAgentResponse) + // agent + switch x := m.Agent.(type) { + case *ExportAgentResponse_AgentUri: + n += 1 // tag and wire + n += proto.SizeVarint(uint64(len(x.AgentUri))) + n += len(x.AgentUri) + case *ExportAgentResponse_AgentContent: + n += 1 // tag and wire + n += proto.SizeVarint(uint64(len(x.AgentContent))) + n += len(x.AgentContent) + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +// The request message for [Agents.ImportAgent][google.cloud.dialogflow.v2beta1.Agents.ImportAgent]. +type ImportAgentRequest struct { + // Required. The project that the agent to import is associated with. + // Format: `projects/`. + Parent string `protobuf:"bytes,1,opt,name=parent,proto3" json:"parent,omitempty"` + // Required. The agent to import. + // + // Types that are valid to be assigned to Agent: + // *ImportAgentRequest_AgentUri + // *ImportAgentRequest_AgentContent + Agent isImportAgentRequest_Agent `protobuf_oneof:"agent"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ImportAgentRequest) Reset() { *m = ImportAgentRequest{} } +func (m *ImportAgentRequest) String() string { return proto.CompactTextString(m) } +func (*ImportAgentRequest) ProtoMessage() {} +func (*ImportAgentRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_agent_b87bd04ebcb8ded2, []int{7} +} +func (m *ImportAgentRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ImportAgentRequest.Unmarshal(m, b) +} +func (m *ImportAgentRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ImportAgentRequest.Marshal(b, m, deterministic) +} +func (dst *ImportAgentRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_ImportAgentRequest.Merge(dst, src) +} +func (m *ImportAgentRequest) XXX_Size() int { + return xxx_messageInfo_ImportAgentRequest.Size(m) +} +func (m *ImportAgentRequest) XXX_DiscardUnknown() { + xxx_messageInfo_ImportAgentRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_ImportAgentRequest proto.InternalMessageInfo + +func (m *ImportAgentRequest) GetParent() string { + if m != nil { + return m.Parent + } + return "" +} + +type isImportAgentRequest_Agent interface { + isImportAgentRequest_Agent() +} + +type ImportAgentRequest_AgentUri struct { + AgentUri string `protobuf:"bytes,2,opt,name=agent_uri,json=agentUri,proto3,oneof"` +} + +type ImportAgentRequest_AgentContent struct { + AgentContent []byte `protobuf:"bytes,3,opt,name=agent_content,json=agentContent,proto3,oneof"` +} + +func (*ImportAgentRequest_AgentUri) isImportAgentRequest_Agent() {} + +func (*ImportAgentRequest_AgentContent) isImportAgentRequest_Agent() {} + +func (m *ImportAgentRequest) GetAgent() isImportAgentRequest_Agent { + if m != nil { + return m.Agent + } + return nil +} + +func (m *ImportAgentRequest) GetAgentUri() string { + if x, ok := m.GetAgent().(*ImportAgentRequest_AgentUri); ok { + return x.AgentUri + } + return "" +} + +func (m *ImportAgentRequest) GetAgentContent() []byte { + if x, ok := m.GetAgent().(*ImportAgentRequest_AgentContent); ok { + return x.AgentContent + } + return nil +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*ImportAgentRequest) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _ImportAgentRequest_OneofMarshaler, _ImportAgentRequest_OneofUnmarshaler, _ImportAgentRequest_OneofSizer, []interface{}{ + (*ImportAgentRequest_AgentUri)(nil), + (*ImportAgentRequest_AgentContent)(nil), + } +} + +func _ImportAgentRequest_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*ImportAgentRequest) + // agent + switch x := m.Agent.(type) { + case *ImportAgentRequest_AgentUri: + b.EncodeVarint(2<<3 | proto.WireBytes) + b.EncodeStringBytes(x.AgentUri) + case *ImportAgentRequest_AgentContent: + b.EncodeVarint(3<<3 | proto.WireBytes) + b.EncodeRawBytes(x.AgentContent) + case nil: + default: + return fmt.Errorf("ImportAgentRequest.Agent has unexpected type %T", x) + } + return nil +} + +func _ImportAgentRequest_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*ImportAgentRequest) + switch tag { + case 2: // agent.agent_uri + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeStringBytes() + m.Agent = &ImportAgentRequest_AgentUri{x} + return true, err + case 3: // agent.agent_content + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeRawBytes(true) + m.Agent = &ImportAgentRequest_AgentContent{x} + return true, err + default: + return false, nil + } +} + +func _ImportAgentRequest_OneofSizer(msg proto.Message) (n int) { + m := msg.(*ImportAgentRequest) + // agent + switch x := m.Agent.(type) { + case *ImportAgentRequest_AgentUri: + n += 1 // tag and wire + n += proto.SizeVarint(uint64(len(x.AgentUri))) + n += len(x.AgentUri) + case *ImportAgentRequest_AgentContent: + n += 1 // tag and wire + n += proto.SizeVarint(uint64(len(x.AgentContent))) + n += len(x.AgentContent) + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +// The request message for [Agents.RestoreAgent][google.cloud.dialogflow.v2beta1.Agents.RestoreAgent]. +type RestoreAgentRequest struct { + // Required. The project that the agent to restore is associated with. + // Format: `projects/`. + Parent string `protobuf:"bytes,1,opt,name=parent,proto3" json:"parent,omitempty"` + // Required. The agent to restore. + // + // Types that are valid to be assigned to Agent: + // *RestoreAgentRequest_AgentUri + // *RestoreAgentRequest_AgentContent + Agent isRestoreAgentRequest_Agent `protobuf_oneof:"agent"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *RestoreAgentRequest) Reset() { *m = RestoreAgentRequest{} } +func (m *RestoreAgentRequest) String() string { return proto.CompactTextString(m) } +func (*RestoreAgentRequest) ProtoMessage() {} +func (*RestoreAgentRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_agent_b87bd04ebcb8ded2, []int{8} +} +func (m *RestoreAgentRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_RestoreAgentRequest.Unmarshal(m, b) +} +func (m *RestoreAgentRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_RestoreAgentRequest.Marshal(b, m, deterministic) +} +func (dst *RestoreAgentRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_RestoreAgentRequest.Merge(dst, src) +} +func (m *RestoreAgentRequest) XXX_Size() int { + return xxx_messageInfo_RestoreAgentRequest.Size(m) +} +func (m *RestoreAgentRequest) XXX_DiscardUnknown() { + xxx_messageInfo_RestoreAgentRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_RestoreAgentRequest proto.InternalMessageInfo + +func (m *RestoreAgentRequest) GetParent() string { + if m != nil { + return m.Parent + } + return "" +} + +type isRestoreAgentRequest_Agent interface { + isRestoreAgentRequest_Agent() +} + +type RestoreAgentRequest_AgentUri struct { + AgentUri string `protobuf:"bytes,2,opt,name=agent_uri,json=agentUri,proto3,oneof"` +} + +type RestoreAgentRequest_AgentContent struct { + AgentContent []byte `protobuf:"bytes,3,opt,name=agent_content,json=agentContent,proto3,oneof"` +} + +func (*RestoreAgentRequest_AgentUri) isRestoreAgentRequest_Agent() {} + +func (*RestoreAgentRequest_AgentContent) isRestoreAgentRequest_Agent() {} + +func (m *RestoreAgentRequest) GetAgent() isRestoreAgentRequest_Agent { + if m != nil { + return m.Agent + } + return nil +} + +func (m *RestoreAgentRequest) GetAgentUri() string { + if x, ok := m.GetAgent().(*RestoreAgentRequest_AgentUri); ok { + return x.AgentUri + } + return "" +} + +func (m *RestoreAgentRequest) GetAgentContent() []byte { + if x, ok := m.GetAgent().(*RestoreAgentRequest_AgentContent); ok { + return x.AgentContent + } + return nil +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*RestoreAgentRequest) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _RestoreAgentRequest_OneofMarshaler, _RestoreAgentRequest_OneofUnmarshaler, _RestoreAgentRequest_OneofSizer, []interface{}{ + (*RestoreAgentRequest_AgentUri)(nil), + (*RestoreAgentRequest_AgentContent)(nil), + } +} + +func _RestoreAgentRequest_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*RestoreAgentRequest) + // agent + switch x := m.Agent.(type) { + case *RestoreAgentRequest_AgentUri: + b.EncodeVarint(2<<3 | proto.WireBytes) + b.EncodeStringBytes(x.AgentUri) + case *RestoreAgentRequest_AgentContent: + b.EncodeVarint(3<<3 | proto.WireBytes) + b.EncodeRawBytes(x.AgentContent) + case nil: + default: + return fmt.Errorf("RestoreAgentRequest.Agent has unexpected type %T", x) + } + return nil +} + +func _RestoreAgentRequest_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*RestoreAgentRequest) + switch tag { + case 2: // agent.agent_uri + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeStringBytes() + m.Agent = &RestoreAgentRequest_AgentUri{x} + return true, err + case 3: // agent.agent_content + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeRawBytes(true) + m.Agent = &RestoreAgentRequest_AgentContent{x} + return true, err + default: + return false, nil + } +} + +func _RestoreAgentRequest_OneofSizer(msg proto.Message) (n int) { + m := msg.(*RestoreAgentRequest) + // agent + switch x := m.Agent.(type) { + case *RestoreAgentRequest_AgentUri: + n += 1 // tag and wire + n += proto.SizeVarint(uint64(len(x.AgentUri))) + n += len(x.AgentUri) + case *RestoreAgentRequest_AgentContent: + n += 1 // tag and wire + n += proto.SizeVarint(uint64(len(x.AgentContent))) + n += len(x.AgentContent) + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +func init() { + proto.RegisterType((*Agent)(nil), "google.cloud.dialogflow.v2beta1.Agent") + proto.RegisterType((*GetAgentRequest)(nil), "google.cloud.dialogflow.v2beta1.GetAgentRequest") + proto.RegisterType((*SearchAgentsRequest)(nil), "google.cloud.dialogflow.v2beta1.SearchAgentsRequest") + proto.RegisterType((*SearchAgentsResponse)(nil), "google.cloud.dialogflow.v2beta1.SearchAgentsResponse") + proto.RegisterType((*TrainAgentRequest)(nil), "google.cloud.dialogflow.v2beta1.TrainAgentRequest") + proto.RegisterType((*ExportAgentRequest)(nil), "google.cloud.dialogflow.v2beta1.ExportAgentRequest") + proto.RegisterType((*ExportAgentResponse)(nil), "google.cloud.dialogflow.v2beta1.ExportAgentResponse") + proto.RegisterType((*ImportAgentRequest)(nil), "google.cloud.dialogflow.v2beta1.ImportAgentRequest") + proto.RegisterType((*RestoreAgentRequest)(nil), "google.cloud.dialogflow.v2beta1.RestoreAgentRequest") + proto.RegisterEnum("google.cloud.dialogflow.v2beta1.Agent_MatchMode", Agent_MatchMode_name, Agent_MatchMode_value) +} + +// Reference imports to suppress errors if they are not otherwise used. +var _ context.Context +var _ grpc.ClientConn + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the grpc package it is being compiled against. +const _ = grpc.SupportPackageIsVersion4 + +// AgentsClient is the client API for Agents service. +// +// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://godoc.org/google.golang.org/grpc#ClientConn.NewStream. +type AgentsClient interface { + // Retrieves the specified agent. + GetAgent(ctx context.Context, in *GetAgentRequest, opts ...grpc.CallOption) (*Agent, error) + // Returns the list of agents. + // + // Since there is at most one conversational agent per project, this method is + // useful primarily for listing all agents across projects the caller has + // access to. One can achieve that with a wildcard project collection id "-". + // Refer to [List + // Sub-Collections](https://cloud.google.com/apis/design/design_patterns#list_sub-collections). + SearchAgents(ctx context.Context, in *SearchAgentsRequest, opts ...grpc.CallOption) (*SearchAgentsResponse, error) + // Trains the specified agent. + // + // + // Operation + TrainAgent(ctx context.Context, in *TrainAgentRequest, opts ...grpc.CallOption) (*longrunning.Operation, error) + // Exports the specified agent to a ZIP file. + // + // + // Operation + ExportAgent(ctx context.Context, in *ExportAgentRequest, opts ...grpc.CallOption) (*longrunning.Operation, error) + // Imports the specified agent from a ZIP file. + // + // Uploads new intents and entity types without deleting the existing ones. + // Intents and entity types with the same name are replaced with the new + // versions from ImportAgentRequest. + // + // + // Operation + ImportAgent(ctx context.Context, in *ImportAgentRequest, opts ...grpc.CallOption) (*longrunning.Operation, error) + // Restores the specified agent from a ZIP file. + // + // Replaces the current agent version with a new one. All the intents and + // entity types in the older version are deleted. + // + // + // Operation + RestoreAgent(ctx context.Context, in *RestoreAgentRequest, opts ...grpc.CallOption) (*longrunning.Operation, error) +} + +type agentsClient struct { + cc *grpc.ClientConn +} + +func NewAgentsClient(cc *grpc.ClientConn) AgentsClient { + return &agentsClient{cc} +} + +func (c *agentsClient) GetAgent(ctx context.Context, in *GetAgentRequest, opts ...grpc.CallOption) (*Agent, error) { + out := new(Agent) + err := c.cc.Invoke(ctx, "/google.cloud.dialogflow.v2beta1.Agents/GetAgent", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *agentsClient) SearchAgents(ctx context.Context, in *SearchAgentsRequest, opts ...grpc.CallOption) (*SearchAgentsResponse, error) { + out := new(SearchAgentsResponse) + err := c.cc.Invoke(ctx, "/google.cloud.dialogflow.v2beta1.Agents/SearchAgents", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *agentsClient) TrainAgent(ctx context.Context, in *TrainAgentRequest, opts ...grpc.CallOption) (*longrunning.Operation, error) { + out := new(longrunning.Operation) + err := c.cc.Invoke(ctx, "/google.cloud.dialogflow.v2beta1.Agents/TrainAgent", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *agentsClient) ExportAgent(ctx context.Context, in *ExportAgentRequest, opts ...grpc.CallOption) (*longrunning.Operation, error) { + out := new(longrunning.Operation) + err := c.cc.Invoke(ctx, "/google.cloud.dialogflow.v2beta1.Agents/ExportAgent", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *agentsClient) ImportAgent(ctx context.Context, in *ImportAgentRequest, opts ...grpc.CallOption) (*longrunning.Operation, error) { + out := new(longrunning.Operation) + err := c.cc.Invoke(ctx, "/google.cloud.dialogflow.v2beta1.Agents/ImportAgent", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *agentsClient) RestoreAgent(ctx context.Context, in *RestoreAgentRequest, opts ...grpc.CallOption) (*longrunning.Operation, error) { + out := new(longrunning.Operation) + err := c.cc.Invoke(ctx, "/google.cloud.dialogflow.v2beta1.Agents/RestoreAgent", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +// AgentsServer is the server API for Agents service. +type AgentsServer interface { + // Retrieves the specified agent. + GetAgent(context.Context, *GetAgentRequest) (*Agent, error) + // Returns the list of agents. + // + // Since there is at most one conversational agent per project, this method is + // useful primarily for listing all agents across projects the caller has + // access to. One can achieve that with a wildcard project collection id "-". + // Refer to [List + // Sub-Collections](https://cloud.google.com/apis/design/design_patterns#list_sub-collections). + SearchAgents(context.Context, *SearchAgentsRequest) (*SearchAgentsResponse, error) + // Trains the specified agent. + // + // + // Operation + TrainAgent(context.Context, *TrainAgentRequest) (*longrunning.Operation, error) + // Exports the specified agent to a ZIP file. + // + // + // Operation + ExportAgent(context.Context, *ExportAgentRequest) (*longrunning.Operation, error) + // Imports the specified agent from a ZIP file. + // + // Uploads new intents and entity types without deleting the existing ones. + // Intents and entity types with the same name are replaced with the new + // versions from ImportAgentRequest. + // + // + // Operation + ImportAgent(context.Context, *ImportAgentRequest) (*longrunning.Operation, error) + // Restores the specified agent from a ZIP file. + // + // Replaces the current agent version with a new one. All the intents and + // entity types in the older version are deleted. + // + // + // Operation + RestoreAgent(context.Context, *RestoreAgentRequest) (*longrunning.Operation, error) +} + +func RegisterAgentsServer(s *grpc.Server, srv AgentsServer) { + s.RegisterService(&_Agents_serviceDesc, srv) +} + +func _Agents_GetAgent_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(GetAgentRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(AgentsServer).GetAgent(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.dialogflow.v2beta1.Agents/GetAgent", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(AgentsServer).GetAgent(ctx, req.(*GetAgentRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _Agents_SearchAgents_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(SearchAgentsRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(AgentsServer).SearchAgents(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.dialogflow.v2beta1.Agents/SearchAgents", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(AgentsServer).SearchAgents(ctx, req.(*SearchAgentsRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _Agents_TrainAgent_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(TrainAgentRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(AgentsServer).TrainAgent(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.dialogflow.v2beta1.Agents/TrainAgent", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(AgentsServer).TrainAgent(ctx, req.(*TrainAgentRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _Agents_ExportAgent_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(ExportAgentRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(AgentsServer).ExportAgent(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.dialogflow.v2beta1.Agents/ExportAgent", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(AgentsServer).ExportAgent(ctx, req.(*ExportAgentRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _Agents_ImportAgent_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(ImportAgentRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(AgentsServer).ImportAgent(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.dialogflow.v2beta1.Agents/ImportAgent", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(AgentsServer).ImportAgent(ctx, req.(*ImportAgentRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _Agents_RestoreAgent_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(RestoreAgentRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(AgentsServer).RestoreAgent(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.dialogflow.v2beta1.Agents/RestoreAgent", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(AgentsServer).RestoreAgent(ctx, req.(*RestoreAgentRequest)) + } + return interceptor(ctx, in, info, handler) +} + +var _Agents_serviceDesc = grpc.ServiceDesc{ + ServiceName: "google.cloud.dialogflow.v2beta1.Agents", + HandlerType: (*AgentsServer)(nil), + Methods: []grpc.MethodDesc{ + { + MethodName: "GetAgent", + Handler: _Agents_GetAgent_Handler, + }, + { + MethodName: "SearchAgents", + Handler: _Agents_SearchAgents_Handler, + }, + { + MethodName: "TrainAgent", + Handler: _Agents_TrainAgent_Handler, + }, + { + MethodName: "ExportAgent", + Handler: _Agents_ExportAgent_Handler, + }, + { + MethodName: "ImportAgent", + Handler: _Agents_ImportAgent_Handler, + }, + { + MethodName: "RestoreAgent", + Handler: _Agents_RestoreAgent_Handler, + }, + }, + Streams: []grpc.StreamDesc{}, + Metadata: "google/cloud/dialogflow/v2beta1/agent.proto", +} + +func init() { + proto.RegisterFile("google/cloud/dialogflow/v2beta1/agent.proto", fileDescriptor_agent_b87bd04ebcb8ded2) +} + +var fileDescriptor_agent_b87bd04ebcb8ded2 = []byte{ + // 985 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xc4, 0x56, 0xdd, 0x6e, 0xdc, 0x44, + 0x14, 0x8e, 0x37, 0x4d, 0xb2, 0x7b, 0x92, 0xb4, 0xe9, 0x84, 0x06, 0xb3, 0x6d, 0xd4, 0xc5, 0xa5, + 0xd5, 0x36, 0x11, 0x6b, 0xba, 0x69, 0x25, 0x08, 0x02, 0xa9, 0xf9, 0x69, 0xb3, 0x52, 0xfe, 0xe4, + 0x24, 0x95, 0xda, 0x1b, 0x6b, 0x62, 0x4f, 0x9c, 0xa1, 0xf6, 0x8c, 0x99, 0x19, 0x97, 0x36, 0x05, + 0x2e, 0x78, 0x02, 0x24, 0x90, 0x10, 0x5c, 0x72, 0x85, 0xb8, 0xe0, 0x8a, 0x37, 0xe1, 0x15, 0x78, + 0x08, 0x2e, 0x91, 0xc7, 0xde, 0xac, 0x37, 0x69, 0x6b, 0x23, 0x21, 0xf5, 0xce, 0xfe, 0xbe, 0xef, + 0x9c, 0xf3, 0xcd, 0xcc, 0x39, 0x9a, 0x81, 0xc5, 0x80, 0xf3, 0x20, 0x24, 0xb6, 0x17, 0xf2, 0xc4, + 0xb7, 0x7d, 0x8a, 0x43, 0x1e, 0x1c, 0x85, 0xfc, 0x2b, 0xfb, 0x59, 0xf7, 0x90, 0x28, 0x7c, 0xc7, + 0xc6, 0x01, 0x61, 0xaa, 0x13, 0x0b, 0xae, 0x38, 0xba, 0x9e, 0x89, 0x3b, 0x5a, 0xdc, 0x19, 0x88, + 0x3b, 0xb9, 0xb8, 0x79, 0x2d, 0xcf, 0x86, 0x63, 0x6a, 0x63, 0xc6, 0xb8, 0xc2, 0x8a, 0x72, 0x26, + 0xb3, 0xf0, 0xe6, 0x7b, 0x05, 0x56, 0x10, 0xc9, 0x13, 0xe1, 0x91, 0x9c, 0xba, 0x91, 0x53, 0x21, + 0x67, 0x81, 0x48, 0x18, 0xa3, 0x2c, 0xb0, 0x79, 0x4c, 0xc4, 0x50, 0xfc, 0xd5, 0x5c, 0xa4, 0xff, + 0x0e, 0x93, 0x23, 0x9b, 0x44, 0xb1, 0x7a, 0x91, 0x93, 0xad, 0xb3, 0xe4, 0x11, 0x25, 0xa1, 0xef, + 0x46, 0x58, 0x3e, 0xcd, 0x15, 0xd7, 0xce, 0x2a, 0xa4, 0x12, 0x89, 0x97, 0xaf, 0xcd, 0xfa, 0xf9, + 0x02, 0x8c, 0xdd, 0x4f, 0xd7, 0x8a, 0xe6, 0x60, 0x3c, 0xc6, 0x82, 0x30, 0x65, 0x1a, 0x2d, 0xa3, + 0xdd, 0x70, 0xf2, 0x3f, 0xf4, 0x3e, 0x4c, 0xf9, 0x54, 0xc6, 0x21, 0x7e, 0xe1, 0x32, 0x1c, 0x11, + 0xb3, 0xa6, 0xd9, 0xc9, 0x1c, 0xdb, 0xc6, 0x11, 0x41, 0x5d, 0xb8, 0xe2, 0x93, 0x23, 0x9c, 0x84, + 0xca, 0x0d, 0x31, 0x0b, 0x12, 0x1c, 0x10, 0xd7, 0xe3, 0x3e, 0x31, 0x47, 0xb5, 0x76, 0x36, 0x27, + 0x37, 0x73, 0x6e, 0x95, 0xfb, 0x04, 0x7d, 0x0c, 0xa6, 0x4c, 0xe2, 0x98, 0x0b, 0x45, 0xfc, 0xe1, + 0x28, 0x69, 0x5e, 0x68, 0x8d, 0xb6, 0x1b, 0xce, 0xdc, 0x29, 0x5f, 0x0c, 0x94, 0xe8, 0x2a, 0x34, + 0x14, 0x8d, 0x88, 0x7b, 0xc2, 0x19, 0x31, 0xc7, 0x74, 0x85, 0x7a, 0x0a, 0x3c, 0xe1, 0x8c, 0xa0, + 0x16, 0x4c, 0xfa, 0x44, 0x7a, 0x82, 0xc6, 0xe9, 0x16, 0x9a, 0xe3, 0xb9, 0xd9, 0x01, 0x84, 0xe6, + 0x01, 0xf0, 0x33, 0xac, 0xb0, 0x70, 0x13, 0x41, 0xcd, 0x09, 0x2d, 0x68, 0x64, 0xc8, 0x81, 0xa0, + 0xe8, 0x26, 0x5c, 0x24, 0x0c, 0x1f, 0x86, 0xc4, 0x0d, 0x79, 0x10, 0x50, 0x16, 0x98, 0xf5, 0x96, + 0xd1, 0xae, 0x3b, 0xd3, 0x19, 0xba, 0x99, 0x81, 0x68, 0x07, 0x20, 0xc2, 0xca, 0x3b, 0x76, 0xa3, + 0x74, 0x9d, 0x8d, 0x96, 0xd1, 0xbe, 0xd8, 0xfd, 0xa8, 0x53, 0xd2, 0x28, 0x1d, 0xbd, 0xd3, 0x9d, + 0xad, 0x34, 0x70, 0x8b, 0xfb, 0xc4, 0x69, 0x44, 0xfd, 0x4f, 0xf4, 0x09, 0x98, 0x5e, 0x88, 0xa5, + 0xa4, 0x47, 0xd4, 0xd3, 0xc7, 0xef, 0xaa, 0x63, 0x41, 0xe4, 0x31, 0x0f, 0x7d, 0x13, 0x5a, 0x46, + 0xbb, 0xe6, 0xbc, 0x3b, 0xcc, 0xef, 0xf7, 0x69, 0xeb, 0x11, 0x34, 0x4e, 0x53, 0xa2, 0x26, 0xcc, + 0x6d, 0xdd, 0xdf, 0x5f, 0xdd, 0x70, 0xb7, 0x76, 0xd6, 0xd6, 0xdd, 0x83, 0xed, 0xbd, 0xdd, 0xf5, + 0xd5, 0xde, 0x83, 0xde, 0xfa, 0xda, 0xcc, 0x08, 0xba, 0x02, 0x97, 0x0b, 0xdc, 0xc6, 0xe3, 0x15, + 0xa7, 0xb7, 0x36, 0x63, 0xa0, 0x39, 0x40, 0x05, 0x78, 0x6b, 0xd3, 0xdd, 0xd9, 0xde, 0x7c, 0x3c, + 0x53, 0xb3, 0x6e, 0xc3, 0xa5, 0x87, 0x44, 0x69, 0xcf, 0x0e, 0xf9, 0x32, 0x21, 0xf2, 0xb5, 0x4d, + 0x62, 0x51, 0x98, 0xdd, 0x23, 0x58, 0x78, 0xc7, 0x5a, 0x2d, 0x4b, 0xe4, 0xe9, 0x11, 0xc6, 0xe9, + 0x71, 0x4b, 0x7a, 0x92, 0x35, 0xd4, 0x98, 0x53, 0x4f, 0x81, 0x3d, 0x7a, 0x42, 0xd2, 0x03, 0xd2, + 0xa4, 0xe2, 0x4f, 0x09, 0xcb, 0x5b, 0x48, 0xcb, 0xf7, 0x53, 0xc0, 0xfa, 0x16, 0xde, 0x19, 0x2e, + 0x25, 0x63, 0xce, 0x24, 0x41, 0x9f, 0xc3, 0xb8, 0x1e, 0x5a, 0x69, 0x1a, 0xad, 0xd1, 0xf6, 0x64, + 0xf7, 0x56, 0xb5, 0xd3, 0x70, 0xf2, 0x28, 0x74, 0x0b, 0x2e, 0x31, 0xf2, 0x5c, 0xb9, 0x85, 0xda, + 0x59, 0xab, 0x4f, 0xa7, 0xf0, 0xee, 0x69, 0xfd, 0x45, 0xb8, 0xbc, 0x2f, 0x30, 0x65, 0x95, 0xf6, + 0xa5, 0x07, 0x68, 0xfd, 0x79, 0xda, 0xc4, 0x55, 0xd4, 0xe9, 0xb6, 0x68, 0x33, 0xba, 0x33, 0xb3, + 0xe2, 0x75, 0x0d, 0x1c, 0x08, 0x6a, 0xf9, 0x30, 0x3b, 0x94, 0x2a, 0x5f, 0xf6, 0x7c, 0x31, 0x46, + 0xa7, 0xdb, 0x18, 0x19, 0x44, 0xa1, 0x9b, 0x30, 0x9d, 0xd1, 0x1e, 0x67, 0x2a, 0xad, 0x98, 0xa6, + 0x9d, 0xda, 0x18, 0x71, 0xa6, 0x34, 0xbc, 0x9a, 0xa1, 0x2b, 0x13, 0x30, 0xa6, 0xff, 0xad, 0x97, + 0x80, 0x7a, 0x51, 0x65, 0xc3, 0xf3, 0xe7, 0x0c, 0xbf, 0xb9, 0xf8, 0xe8, 0x9b, 0x8b, 0x7f, 0x0d, + 0xb3, 0x0e, 0x91, 0x8a, 0x0b, 0xf2, 0x16, 0xaa, 0x77, 0xff, 0x9c, 0x80, 0xf1, 0xac, 0xa7, 0xd0, + 0xf7, 0x06, 0xd4, 0xfb, 0xad, 0x8f, 0xca, 0xc7, 0xfa, 0xcc, 0x94, 0x34, 0x2b, 0xb6, 0x9e, 0xb5, + 0xf0, 0xdd, 0x5f, 0x7f, 0xff, 0x50, 0xfb, 0x00, 0x59, 0xa7, 0xd7, 0xce, 0xcb, 0x6c, 0x69, 0x9f, + 0xc5, 0x82, 0x7f, 0x41, 0x3c, 0x25, 0xed, 0x85, 0x6f, 0xb2, 0xab, 0x08, 0xfd, 0x61, 0xc0, 0x54, + 0xb1, 0xef, 0xd1, 0xdd, 0xd2, 0x22, 0xaf, 0x98, 0xc8, 0xe6, 0xbd, 0xff, 0x18, 0x95, 0x75, 0x99, + 0x75, 0x47, 0x3b, 0x5d, 0x44, 0xb7, 0xcb, 0x9d, 0x2e, 0x4b, 0x9d, 0x00, 0xfd, 0x68, 0x00, 0x0c, + 0x06, 0x05, 0x75, 0x4b, 0x0b, 0x9f, 0x9b, 0xaa, 0xe6, 0x7c, 0x3f, 0xa6, 0x70, 0x3f, 0x76, 0x76, + 0xfa, 0xf7, 0xa3, 0xb5, 0xa4, 0x4d, 0x7d, 0x68, 0xb5, 0x2b, 0x98, 0x52, 0x69, 0xf2, 0x65, 0x63, + 0x01, 0xfd, 0x64, 0xc0, 0x64, 0x61, 0x8e, 0xd0, 0x52, 0xa9, 0xaf, 0xf3, 0x03, 0x5c, 0x66, 0xec, + 0xae, 0x36, 0xd6, 0xb1, 0xaa, 0xec, 0x16, 0xd1, 0xd9, 0xfb, 0xce, 0x0a, 0xb3, 0x57, 0xc1, 0xd9, + 0xf9, 0x49, 0xfd, 0x3f, 0x9d, 0xd1, 0xa8, 0xef, 0xec, 0x17, 0x03, 0xa6, 0x8a, 0x83, 0x59, 0xa1, + 0xf7, 0x5e, 0x31, 0xc7, 0x65, 0xde, 0xee, 0x69, 0x6f, 0xb6, 0xb5, 0x50, 0xc1, 0x9b, 0xc8, 0xd2, + 0x2f, 0x1b, 0x0b, 0x2b, 0xbf, 0x19, 0x70, 0xc3, 0xe3, 0x51, 0x99, 0xa3, 0x15, 0xd0, 0x5e, 0x76, + 0xd3, 0x57, 0xcf, 0xae, 0xf1, 0xa4, 0x97, 0xcb, 0x03, 0x9e, 0xbe, 0x3d, 0x3a, 0x5c, 0x04, 0x76, + 0x40, 0x98, 0x7e, 0x13, 0xd9, 0x19, 0x85, 0x63, 0x2a, 0x5f, 0xfb, 0x3e, 0xfc, 0x74, 0x00, 0xfd, + 0x63, 0x18, 0xbf, 0xd6, 0x6a, 0x6b, 0x0f, 0x7e, 0xaf, 0x5d, 0x7f, 0x98, 0xe5, 0x5c, 0xd5, 0x16, + 0xd6, 0x06, 0x16, 0x1e, 0x65, 0x41, 0x87, 0xe3, 0x3a, 0xff, 0xd2, 0xbf, 0x01, 0x00, 0x00, 0xff, + 0xff, 0xdd, 0x39, 0xa3, 0x5b, 0x7e, 0x0a, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/cloud/dialogflow/v2beta1/audio_config.pb.go b/vendor/google.golang.org/genproto/googleapis/cloud/dialogflow/v2beta1/audio_config.pb.go new file mode 100644 index 0000000..1e93ba1 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/cloud/dialogflow/v2beta1/audio_config.pb.go @@ -0,0 +1,619 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/cloud/dialogflow/v2beta1/audio_config.proto + +package dialogflow // import "google.golang.org/genproto/googleapis/cloud/dialogflow/v2beta1" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Audio encoding of the audio content sent in the conversational query request. +// Refer to the +// [Cloud Speech API +// documentation](https://cloud.google.com/speech-to-text/docs/basics) for more +// details. +type AudioEncoding int32 + +const ( + // Not specified. + AudioEncoding_AUDIO_ENCODING_UNSPECIFIED AudioEncoding = 0 + // Uncompressed 16-bit signed little-endian samples (Linear PCM). + AudioEncoding_AUDIO_ENCODING_LINEAR_16 AudioEncoding = 1 + // [`FLAC`](https://xiph.org/flac/documentation.html) (Free Lossless Audio + // Codec) is the recommended encoding because it is lossless (therefore + // recognition is not compromised) and requires only about half the + // bandwidth of `LINEAR16`. `FLAC` stream encoding supports 16-bit and + // 24-bit samples, however, not all fields in `STREAMINFO` are supported. + AudioEncoding_AUDIO_ENCODING_FLAC AudioEncoding = 2 + // 8-bit samples that compand 14-bit audio samples using G.711 PCMU/mu-law. + AudioEncoding_AUDIO_ENCODING_MULAW AudioEncoding = 3 + // Adaptive Multi-Rate Narrowband codec. `sample_rate_hertz` must be 8000. + AudioEncoding_AUDIO_ENCODING_AMR AudioEncoding = 4 + // Adaptive Multi-Rate Wideband codec. `sample_rate_hertz` must be 16000. + AudioEncoding_AUDIO_ENCODING_AMR_WB AudioEncoding = 5 + // Opus encoded audio frames in Ogg container + // ([OggOpus](https://wiki.xiph.org/OggOpus)). + // `sample_rate_hertz` must be 16000. + AudioEncoding_AUDIO_ENCODING_OGG_OPUS AudioEncoding = 6 + // Although the use of lossy encodings is not recommended, if a very low + // bitrate encoding is required, `OGG_OPUS` is highly preferred over + // Speex encoding. The [Speex](https://speex.org/) encoding supported by + // Dialogflow API has a header byte in each block, as in MIME type + // `audio/x-speex-with-header-byte`. + // It is a variant of the RTP Speex encoding defined in + // [RFC 5574](https://tools.ietf.org/html/rfc5574). + // The stream is a sequence of blocks, one block per RTP packet. Each block + // starts with a byte containing the length of the block, in bytes, followed + // by one or more frames of Speex data, padded to an integral number of + // bytes (octets) as specified in RFC 5574. In other words, each RTP header + // is replaced with a single byte containing the block length. Only Speex + // wideband is supported. `sample_rate_hertz` must be 16000. + AudioEncoding_AUDIO_ENCODING_SPEEX_WITH_HEADER_BYTE AudioEncoding = 7 +) + +var AudioEncoding_name = map[int32]string{ + 0: "AUDIO_ENCODING_UNSPECIFIED", + 1: "AUDIO_ENCODING_LINEAR_16", + 2: "AUDIO_ENCODING_FLAC", + 3: "AUDIO_ENCODING_MULAW", + 4: "AUDIO_ENCODING_AMR", + 5: "AUDIO_ENCODING_AMR_WB", + 6: "AUDIO_ENCODING_OGG_OPUS", + 7: "AUDIO_ENCODING_SPEEX_WITH_HEADER_BYTE", +} +var AudioEncoding_value = map[string]int32{ + "AUDIO_ENCODING_UNSPECIFIED": 0, + "AUDIO_ENCODING_LINEAR_16": 1, + "AUDIO_ENCODING_FLAC": 2, + "AUDIO_ENCODING_MULAW": 3, + "AUDIO_ENCODING_AMR": 4, + "AUDIO_ENCODING_AMR_WB": 5, + "AUDIO_ENCODING_OGG_OPUS": 6, + "AUDIO_ENCODING_SPEEX_WITH_HEADER_BYTE": 7, +} + +func (x AudioEncoding) String() string { + return proto.EnumName(AudioEncoding_name, int32(x)) +} +func (AudioEncoding) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_audio_config_e6cfe6aed50d3b5d, []int{0} +} + +// Variant of the specified [Speech model][google.cloud.dialogflow.v2beta1.InputAudioConfig.model] to use. +// +// See the [Cloud Speech +// documentation](https://cloud.google.com/speech-to-text/docs/enhanced-models) +// for which models have different variants. For example, the "phone_call" model +// has both a standard and an enhanced variant. When you use an enhanced model, +// you will generally receive higher quality results than for a standard model. +type SpeechModelVariant int32 + +const ( + // No model variant specified. In this case Dialogflow defaults to + // USE_BEST_AVAILABLE. + SpeechModelVariant_SPEECH_MODEL_VARIANT_UNSPECIFIED SpeechModelVariant = 0 + // Use the best available variant of the [Speech + // model][InputAudioConfig.model] that the caller is eligible for. + // + // Please see the [Dialogflow + // docs](https://cloud.google.com/dialogflow-enterprise/docs/data-logging) for + // how to make your project eligible for enhanced models. + SpeechModelVariant_USE_BEST_AVAILABLE SpeechModelVariant = 1 + // Use standard model variant even if an enhanced model is available. See the + // [Cloud Speech + // documentation](https://cloud.google.com/speech-to-text/docs/enhanced-models) + // for details about enhanced models. + SpeechModelVariant_USE_STANDARD SpeechModelVariant = 2 + // Use an enhanced model variant: + // + // * If an enhanced variant does not exist for the given + // [model][google.cloud.dialogflow.v2beta1.InputAudioConfig.model] and request language, Dialogflow falls + // back to the standard variant. + // + // The [Cloud Speech + // documentation](https://cloud.google.com/speech-to-text/docs/enhanced-models) + // describes which models have enhanced variants. + // + // * If the API caller isn't eligible for enhanced models, Dialogflow returns + // an error. Please see the [Dialogflow + // docs](https://cloud.google.com/dialogflow-enterprise/docs/data-logging) + // for how to make your project eligible. + SpeechModelVariant_USE_ENHANCED SpeechModelVariant = 3 +) + +var SpeechModelVariant_name = map[int32]string{ + 0: "SPEECH_MODEL_VARIANT_UNSPECIFIED", + 1: "USE_BEST_AVAILABLE", + 2: "USE_STANDARD", + 3: "USE_ENHANCED", +} +var SpeechModelVariant_value = map[string]int32{ + "SPEECH_MODEL_VARIANT_UNSPECIFIED": 0, + "USE_BEST_AVAILABLE": 1, + "USE_STANDARD": 2, + "USE_ENHANCED": 3, +} + +func (x SpeechModelVariant) String() string { + return proto.EnumName(SpeechModelVariant_name, int32(x)) +} +func (SpeechModelVariant) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_audio_config_e6cfe6aed50d3b5d, []int{1} +} + +// Gender of the voice as described in +// [SSML voice element](https://www.w3.org/TR/speech-synthesis11/#edef_voice). +type SsmlVoiceGender int32 + +const ( + // An unspecified gender, which means that the client doesn't care which + // gender the selected voice will have. + SsmlVoiceGender_SSML_VOICE_GENDER_UNSPECIFIED SsmlVoiceGender = 0 + // A male voice. + SsmlVoiceGender_SSML_VOICE_GENDER_MALE SsmlVoiceGender = 1 + // A female voice. + SsmlVoiceGender_SSML_VOICE_GENDER_FEMALE SsmlVoiceGender = 2 + // A gender-neutral voice. + SsmlVoiceGender_SSML_VOICE_GENDER_NEUTRAL SsmlVoiceGender = 3 +) + +var SsmlVoiceGender_name = map[int32]string{ + 0: "SSML_VOICE_GENDER_UNSPECIFIED", + 1: "SSML_VOICE_GENDER_MALE", + 2: "SSML_VOICE_GENDER_FEMALE", + 3: "SSML_VOICE_GENDER_NEUTRAL", +} +var SsmlVoiceGender_value = map[string]int32{ + "SSML_VOICE_GENDER_UNSPECIFIED": 0, + "SSML_VOICE_GENDER_MALE": 1, + "SSML_VOICE_GENDER_FEMALE": 2, + "SSML_VOICE_GENDER_NEUTRAL": 3, +} + +func (x SsmlVoiceGender) String() string { + return proto.EnumName(SsmlVoiceGender_name, int32(x)) +} +func (SsmlVoiceGender) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_audio_config_e6cfe6aed50d3b5d, []int{2} +} + +// Audio encoding of the output audio format in Text-To-Speech. +type OutputAudioEncoding int32 + +const ( + // Not specified. + OutputAudioEncoding_OUTPUT_AUDIO_ENCODING_UNSPECIFIED OutputAudioEncoding = 0 + // Uncompressed 16-bit signed little-endian samples (Linear PCM). + // Audio content returned as LINEAR16 also contains a WAV header. + OutputAudioEncoding_OUTPUT_AUDIO_ENCODING_LINEAR_16 OutputAudioEncoding = 1 + // MP3 audio. + OutputAudioEncoding_OUTPUT_AUDIO_ENCODING_MP3 OutputAudioEncoding = 2 + // Opus encoded audio wrapped in an ogg container. The result will be a + // file which can be played natively on Android, and in browsers (at least + // Chrome and Firefox). The quality of the encoding is considerably higher + // than MP3 while using approximately the same bitrate. + OutputAudioEncoding_OUTPUT_AUDIO_ENCODING_OGG_OPUS OutputAudioEncoding = 3 +) + +var OutputAudioEncoding_name = map[int32]string{ + 0: "OUTPUT_AUDIO_ENCODING_UNSPECIFIED", + 1: "OUTPUT_AUDIO_ENCODING_LINEAR_16", + 2: "OUTPUT_AUDIO_ENCODING_MP3", + 3: "OUTPUT_AUDIO_ENCODING_OGG_OPUS", +} +var OutputAudioEncoding_value = map[string]int32{ + "OUTPUT_AUDIO_ENCODING_UNSPECIFIED": 0, + "OUTPUT_AUDIO_ENCODING_LINEAR_16": 1, + "OUTPUT_AUDIO_ENCODING_MP3": 2, + "OUTPUT_AUDIO_ENCODING_OGG_OPUS": 3, +} + +func (x OutputAudioEncoding) String() string { + return proto.EnumName(OutputAudioEncoding_name, int32(x)) +} +func (OutputAudioEncoding) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_audio_config_e6cfe6aed50d3b5d, []int{3} +} + +// Instructs the speech recognizer on how to process the audio content. +type InputAudioConfig struct { + // Required. Audio encoding of the audio content to process. + AudioEncoding AudioEncoding `protobuf:"varint,1,opt,name=audio_encoding,json=audioEncoding,proto3,enum=google.cloud.dialogflow.v2beta1.AudioEncoding" json:"audio_encoding,omitempty"` + // Required. Sample rate (in Hertz) of the audio content sent in the query. + // Refer to + // [Cloud Speech API + // documentation](https://cloud.google.com/speech-to-text/docs/basics) for + // more details. + SampleRateHertz int32 `protobuf:"varint,2,opt,name=sample_rate_hertz,json=sampleRateHertz,proto3" json:"sample_rate_hertz,omitempty"` + // Required. The language of the supplied audio. Dialogflow does not do + // translations. See [Language + // Support](https://cloud.google.com/dialogflow-enterprise/docs/reference/language) + // for a list of the currently supported language codes. Note that queries in + // the same session do not necessarily need to specify the same language. + LanguageCode string `protobuf:"bytes,3,opt,name=language_code,json=languageCode,proto3" json:"language_code,omitempty"` + // Optional. The collection of phrase hints which are used to boost accuracy + // of speech recognition. + // Refer to + // [Cloud Speech API + // documentation](https://cloud.google.com/speech-to-text/docs/basics#phrase-hints) + // for more details. + PhraseHints []string `protobuf:"bytes,4,rep,name=phrase_hints,json=phraseHints,proto3" json:"phrase_hints,omitempty"` + // Optional. Which Speech model to select for the given request. Select the + // model best suited to your domain to get best results. If a model is not + // explicitly specified, then we auto-select a model based on the parameters + // in the InputAudioConfig. + // If enhanced speech model is enabled for the agent and an enhanced + // version of the specified model for the language does not exist, then the + // speech is recognized using the standard version of the specified model. + // Refer to + // [Cloud Speech API + // documentation](https://cloud.google.com/speech-to-text/docs/basics#select-model) + // for more details. + Model string `protobuf:"bytes,7,opt,name=model,proto3" json:"model,omitempty"` + // Optional. Which variant of the [Speech model][google.cloud.dialogflow.v2beta1.InputAudioConfig.model] to use. + ModelVariant SpeechModelVariant `protobuf:"varint,10,opt,name=model_variant,json=modelVariant,proto3,enum=google.cloud.dialogflow.v2beta1.SpeechModelVariant" json:"model_variant,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *InputAudioConfig) Reset() { *m = InputAudioConfig{} } +func (m *InputAudioConfig) String() string { return proto.CompactTextString(m) } +func (*InputAudioConfig) ProtoMessage() {} +func (*InputAudioConfig) Descriptor() ([]byte, []int) { + return fileDescriptor_audio_config_e6cfe6aed50d3b5d, []int{0} +} +func (m *InputAudioConfig) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_InputAudioConfig.Unmarshal(m, b) +} +func (m *InputAudioConfig) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_InputAudioConfig.Marshal(b, m, deterministic) +} +func (dst *InputAudioConfig) XXX_Merge(src proto.Message) { + xxx_messageInfo_InputAudioConfig.Merge(dst, src) +} +func (m *InputAudioConfig) XXX_Size() int { + return xxx_messageInfo_InputAudioConfig.Size(m) +} +func (m *InputAudioConfig) XXX_DiscardUnknown() { + xxx_messageInfo_InputAudioConfig.DiscardUnknown(m) +} + +var xxx_messageInfo_InputAudioConfig proto.InternalMessageInfo + +func (m *InputAudioConfig) GetAudioEncoding() AudioEncoding { + if m != nil { + return m.AudioEncoding + } + return AudioEncoding_AUDIO_ENCODING_UNSPECIFIED +} + +func (m *InputAudioConfig) GetSampleRateHertz() int32 { + if m != nil { + return m.SampleRateHertz + } + return 0 +} + +func (m *InputAudioConfig) GetLanguageCode() string { + if m != nil { + return m.LanguageCode + } + return "" +} + +func (m *InputAudioConfig) GetPhraseHints() []string { + if m != nil { + return m.PhraseHints + } + return nil +} + +func (m *InputAudioConfig) GetModel() string { + if m != nil { + return m.Model + } + return "" +} + +func (m *InputAudioConfig) GetModelVariant() SpeechModelVariant { + if m != nil { + return m.ModelVariant + } + return SpeechModelVariant_SPEECH_MODEL_VARIANT_UNSPECIFIED +} + +// Description of which voice to use for speech synthesis. +type VoiceSelectionParams struct { + // Optional. The name of the voice. If not set, the service will choose a + // voice based on the other parameters such as language_code and gender. + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // Optional. The preferred gender of the voice. If not set, the service will + // choose a voice based on the other parameters such as language_code and + // name. Note that this is only a preference, not requirement. If a + // voice of the appropriate gender is not available, the synthesizer should + // substitute a voice with a different gender rather than failing the request. + SsmlGender SsmlVoiceGender `protobuf:"varint,2,opt,name=ssml_gender,json=ssmlGender,proto3,enum=google.cloud.dialogflow.v2beta1.SsmlVoiceGender" json:"ssml_gender,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *VoiceSelectionParams) Reset() { *m = VoiceSelectionParams{} } +func (m *VoiceSelectionParams) String() string { return proto.CompactTextString(m) } +func (*VoiceSelectionParams) ProtoMessage() {} +func (*VoiceSelectionParams) Descriptor() ([]byte, []int) { + return fileDescriptor_audio_config_e6cfe6aed50d3b5d, []int{1} +} +func (m *VoiceSelectionParams) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_VoiceSelectionParams.Unmarshal(m, b) +} +func (m *VoiceSelectionParams) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_VoiceSelectionParams.Marshal(b, m, deterministic) +} +func (dst *VoiceSelectionParams) XXX_Merge(src proto.Message) { + xxx_messageInfo_VoiceSelectionParams.Merge(dst, src) +} +func (m *VoiceSelectionParams) XXX_Size() int { + return xxx_messageInfo_VoiceSelectionParams.Size(m) +} +func (m *VoiceSelectionParams) XXX_DiscardUnknown() { + xxx_messageInfo_VoiceSelectionParams.DiscardUnknown(m) +} + +var xxx_messageInfo_VoiceSelectionParams proto.InternalMessageInfo + +func (m *VoiceSelectionParams) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *VoiceSelectionParams) GetSsmlGender() SsmlVoiceGender { + if m != nil { + return m.SsmlGender + } + return SsmlVoiceGender_SSML_VOICE_GENDER_UNSPECIFIED +} + +// Configuration of how speech should be synthesized. +type SynthesizeSpeechConfig struct { + // Optional. Speaking rate/speed, in the range [0.25, 4.0]. 1.0 is the normal + // native speed supported by the specific voice. 2.0 is twice as fast, and + // 0.5 is half as fast. If unset(0.0), defaults to the native 1.0 speed. Any + // other values < 0.25 or > 4.0 will return an error. + SpeakingRate float64 `protobuf:"fixed64,1,opt,name=speaking_rate,json=speakingRate,proto3" json:"speaking_rate,omitempty"` + // Optional. Speaking pitch, in the range [-20.0, 20.0]. 20 means increase 20 + // semitones from the original pitch. -20 means decrease 20 semitones from the + // original pitch. + Pitch float64 `protobuf:"fixed64,2,opt,name=pitch,proto3" json:"pitch,omitempty"` + // Optional. Volume gain (in dB) of the normal native volume supported by the + // specific voice, in the range [-96.0, 16.0]. If unset, or set to a value of + // 0.0 (dB), will play at normal native signal amplitude. A value of -6.0 (dB) + // will play at approximately half the amplitude of the normal native signal + // amplitude. A value of +6.0 (dB) will play at approximately twice the + // amplitude of the normal native signal amplitude. We strongly recommend not + // to exceed +10 (dB) as there's usually no effective increase in loudness for + // any value greater than that. + VolumeGainDb float64 `protobuf:"fixed64,3,opt,name=volume_gain_db,json=volumeGainDb,proto3" json:"volume_gain_db,omitempty"` + // Optional. An identifier which selects 'audio effects' profiles that are + // applied on (post synthesized) text to speech. Effects are applied on top of + // each other in the order they are given. + EffectsProfileId []string `protobuf:"bytes,5,rep,name=effects_profile_id,json=effectsProfileId,proto3" json:"effects_profile_id,omitempty"` + // Optional. The desired voice of the synthesized audio. + Voice *VoiceSelectionParams `protobuf:"bytes,4,opt,name=voice,proto3" json:"voice,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *SynthesizeSpeechConfig) Reset() { *m = SynthesizeSpeechConfig{} } +func (m *SynthesizeSpeechConfig) String() string { return proto.CompactTextString(m) } +func (*SynthesizeSpeechConfig) ProtoMessage() {} +func (*SynthesizeSpeechConfig) Descriptor() ([]byte, []int) { + return fileDescriptor_audio_config_e6cfe6aed50d3b5d, []int{2} +} +func (m *SynthesizeSpeechConfig) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_SynthesizeSpeechConfig.Unmarshal(m, b) +} +func (m *SynthesizeSpeechConfig) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_SynthesizeSpeechConfig.Marshal(b, m, deterministic) +} +func (dst *SynthesizeSpeechConfig) XXX_Merge(src proto.Message) { + xxx_messageInfo_SynthesizeSpeechConfig.Merge(dst, src) +} +func (m *SynthesizeSpeechConfig) XXX_Size() int { + return xxx_messageInfo_SynthesizeSpeechConfig.Size(m) +} +func (m *SynthesizeSpeechConfig) XXX_DiscardUnknown() { + xxx_messageInfo_SynthesizeSpeechConfig.DiscardUnknown(m) +} + +var xxx_messageInfo_SynthesizeSpeechConfig proto.InternalMessageInfo + +func (m *SynthesizeSpeechConfig) GetSpeakingRate() float64 { + if m != nil { + return m.SpeakingRate + } + return 0 +} + +func (m *SynthesizeSpeechConfig) GetPitch() float64 { + if m != nil { + return m.Pitch + } + return 0 +} + +func (m *SynthesizeSpeechConfig) GetVolumeGainDb() float64 { + if m != nil { + return m.VolumeGainDb + } + return 0 +} + +func (m *SynthesizeSpeechConfig) GetEffectsProfileId() []string { + if m != nil { + return m.EffectsProfileId + } + return nil +} + +func (m *SynthesizeSpeechConfig) GetVoice() *VoiceSelectionParams { + if m != nil { + return m.Voice + } + return nil +} + +// Instructs the speech synthesizer how to generate the output audio content. +type OutputAudioConfig struct { + // Required. Audio encoding of the synthesized audio content. + AudioEncoding OutputAudioEncoding `protobuf:"varint,1,opt,name=audio_encoding,json=audioEncoding,proto3,enum=google.cloud.dialogflow.v2beta1.OutputAudioEncoding" json:"audio_encoding,omitempty"` + // Optional. The synthesis sample rate (in hertz) for this audio. If not + // provided, then the synthesizer will use the default sample rate based on + // the audio encoding. If this is different from the voice's natural sample + // rate, then the synthesizer will honor this request by converting to the + // desired sample rate (which might result in worse audio quality). + SampleRateHertz int32 `protobuf:"varint,2,opt,name=sample_rate_hertz,json=sampleRateHertz,proto3" json:"sample_rate_hertz,omitempty"` + // Optional. Configuration of how speech should be synthesized. + SynthesizeSpeechConfig *SynthesizeSpeechConfig `protobuf:"bytes,3,opt,name=synthesize_speech_config,json=synthesizeSpeechConfig,proto3" json:"synthesize_speech_config,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *OutputAudioConfig) Reset() { *m = OutputAudioConfig{} } +func (m *OutputAudioConfig) String() string { return proto.CompactTextString(m) } +func (*OutputAudioConfig) ProtoMessage() {} +func (*OutputAudioConfig) Descriptor() ([]byte, []int) { + return fileDescriptor_audio_config_e6cfe6aed50d3b5d, []int{3} +} +func (m *OutputAudioConfig) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_OutputAudioConfig.Unmarshal(m, b) +} +func (m *OutputAudioConfig) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_OutputAudioConfig.Marshal(b, m, deterministic) +} +func (dst *OutputAudioConfig) XXX_Merge(src proto.Message) { + xxx_messageInfo_OutputAudioConfig.Merge(dst, src) +} +func (m *OutputAudioConfig) XXX_Size() int { + return xxx_messageInfo_OutputAudioConfig.Size(m) +} +func (m *OutputAudioConfig) XXX_DiscardUnknown() { + xxx_messageInfo_OutputAudioConfig.DiscardUnknown(m) +} + +var xxx_messageInfo_OutputAudioConfig proto.InternalMessageInfo + +func (m *OutputAudioConfig) GetAudioEncoding() OutputAudioEncoding { + if m != nil { + return m.AudioEncoding + } + return OutputAudioEncoding_OUTPUT_AUDIO_ENCODING_UNSPECIFIED +} + +func (m *OutputAudioConfig) GetSampleRateHertz() int32 { + if m != nil { + return m.SampleRateHertz + } + return 0 +} + +func (m *OutputAudioConfig) GetSynthesizeSpeechConfig() *SynthesizeSpeechConfig { + if m != nil { + return m.SynthesizeSpeechConfig + } + return nil +} + +func init() { + proto.RegisterType((*InputAudioConfig)(nil), "google.cloud.dialogflow.v2beta1.InputAudioConfig") + proto.RegisterType((*VoiceSelectionParams)(nil), "google.cloud.dialogflow.v2beta1.VoiceSelectionParams") + proto.RegisterType((*SynthesizeSpeechConfig)(nil), "google.cloud.dialogflow.v2beta1.SynthesizeSpeechConfig") + proto.RegisterType((*OutputAudioConfig)(nil), "google.cloud.dialogflow.v2beta1.OutputAudioConfig") + proto.RegisterEnum("google.cloud.dialogflow.v2beta1.AudioEncoding", AudioEncoding_name, AudioEncoding_value) + proto.RegisterEnum("google.cloud.dialogflow.v2beta1.SpeechModelVariant", SpeechModelVariant_name, SpeechModelVariant_value) + proto.RegisterEnum("google.cloud.dialogflow.v2beta1.SsmlVoiceGender", SsmlVoiceGender_name, SsmlVoiceGender_value) + proto.RegisterEnum("google.cloud.dialogflow.v2beta1.OutputAudioEncoding", OutputAudioEncoding_name, OutputAudioEncoding_value) +} + +func init() { + proto.RegisterFile("google/cloud/dialogflow/v2beta1/audio_config.proto", fileDescriptor_audio_config_e6cfe6aed50d3b5d) +} + +var fileDescriptor_audio_config_e6cfe6aed50d3b5d = []byte{ + // 920 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xac, 0x55, 0x4f, 0x6f, 0xe3, 0x44, + 0x14, 0xc7, 0x4e, 0xbb, 0xab, 0xbe, 0xa6, 0x5d, 0xef, 0x6c, 0xe9, 0xba, 0xa5, 0xdd, 0xa6, 0xed, + 0xae, 0x14, 0x22, 0x94, 0xb0, 0x29, 0x7f, 0x0e, 0x9c, 0x9c, 0x78, 0x9a, 0x58, 0x24, 0x4e, 0xb0, + 0x93, 0x74, 0x81, 0xc3, 0x68, 0x6a, 0x4f, 0x1c, 0x0b, 0xc7, 0x63, 0x62, 0x27, 0x88, 0x95, 0xf8, + 0x08, 0x88, 0xef, 0x80, 0x38, 0x71, 0xe0, 0x9b, 0xf0, 0x6d, 0x38, 0x20, 0x71, 0x41, 0x1e, 0xa7, + 0xdb, 0x6c, 0x92, 0x25, 0x17, 0x6e, 0x33, 0xbf, 0xdf, 0xfc, 0xde, 0x7b, 0xf3, 0x7b, 0x6f, 0x34, + 0x50, 0xf5, 0x38, 0xf7, 0x02, 0x56, 0x71, 0x02, 0x3e, 0x75, 0x2b, 0xae, 0x4f, 0x03, 0xee, 0x0d, + 0x03, 0xfe, 0x43, 0x65, 0x56, 0xbd, 0x65, 0x09, 0x7d, 0x59, 0xa1, 0x53, 0xd7, 0xe7, 0xc4, 0xe1, + 0xe1, 0xd0, 0xf7, 0xca, 0xd1, 0x84, 0x27, 0x1c, 0x9d, 0x65, 0x9a, 0xb2, 0xd0, 0x94, 0xef, 0x35, + 0xe5, 0xb9, 0xe6, 0xf8, 0x64, 0x1e, 0x94, 0x46, 0x7e, 0x85, 0x86, 0x21, 0x4f, 0x68, 0xe2, 0xf3, + 0x30, 0xce, 0xe4, 0x17, 0x7f, 0xca, 0xa0, 0x18, 0x61, 0x34, 0x4d, 0xb4, 0x34, 0x74, 0x5d, 0x44, + 0x46, 0x7d, 0xd8, 0xcf, 0x32, 0xb1, 0xd0, 0xe1, 0xae, 0x1f, 0x7a, 0xaa, 0x54, 0x90, 0x8a, 0xfb, + 0xd5, 0x72, 0x79, 0x43, 0xb2, 0xb2, 0x88, 0x82, 0xe7, 0x2a, 0x6b, 0x8f, 0x2e, 0x6e, 0x51, 0x09, + 0x1e, 0xc7, 0x74, 0x1c, 0x05, 0x8c, 0x4c, 0x68, 0xc2, 0xc8, 0x88, 0x4d, 0x92, 0xd7, 0xaa, 0x5c, + 0x90, 0x8a, 0xdb, 0xd6, 0xa3, 0x8c, 0xb0, 0x68, 0xc2, 0x9a, 0x29, 0x8c, 0x2e, 0x61, 0x2f, 0xa0, + 0xa1, 0x37, 0xa5, 0x1e, 0x23, 0x0e, 0x77, 0x99, 0x9a, 0x2b, 0x48, 0xc5, 0x1d, 0x2b, 0x7f, 0x07, + 0xd6, 0xb9, 0xcb, 0xd0, 0x39, 0xe4, 0xa3, 0xd1, 0x84, 0xc6, 0x8c, 0x8c, 0xfc, 0x30, 0x89, 0xd5, + 0xad, 0x42, 0xae, 0xb8, 0x63, 0xed, 0x66, 0x58, 0x33, 0x85, 0xd0, 0x01, 0x6c, 0x8f, 0xb9, 0xcb, + 0x02, 0xf5, 0xa1, 0xd0, 0x67, 0x1b, 0xf4, 0x0a, 0xf6, 0xc4, 0x82, 0xcc, 0xe8, 0xc4, 0xa7, 0x61, + 0xa2, 0x82, 0xb8, 0xdf, 0xd5, 0xc6, 0xfb, 0xd9, 0x11, 0x63, 0xce, 0xa8, 0x9d, 0x6a, 0x07, 0x99, + 0xd4, 0xca, 0x8f, 0x17, 0x76, 0x17, 0x3f, 0xc1, 0xc1, 0x80, 0xfb, 0x0e, 0xb3, 0x59, 0xc0, 0x9c, + 0xd4, 0xe8, 0x2e, 0x9d, 0xd0, 0x71, 0x8c, 0x10, 0x6c, 0x85, 0x74, 0xcc, 0x84, 0x91, 0x3b, 0x96, + 0x58, 0xa3, 0xaf, 0x60, 0x37, 0x8e, 0xc7, 0x01, 0xf1, 0x58, 0xe8, 0xb2, 0x89, 0x70, 0x62, 0xbf, + 0xfa, 0xf1, 0xe6, 0x1a, 0xe2, 0x71, 0x20, 0x72, 0x34, 0x84, 0xce, 0x82, 0x34, 0x48, 0xb6, 0xbe, + 0xf8, 0x4b, 0x82, 0x43, 0xfb, 0xc7, 0x30, 0x19, 0xb1, 0xd8, 0x7f, 0xcd, 0xb2, 0x6a, 0xe7, 0x4d, + 0xbd, 0x84, 0xbd, 0x38, 0x62, 0xf4, 0x3b, 0x3f, 0xf4, 0x84, 0xff, 0xa2, 0x14, 0xc9, 0xca, 0xdf, + 0x81, 0xa9, 0xf7, 0xa9, 0x5d, 0x91, 0x9f, 0x38, 0x23, 0x51, 0x8c, 0x64, 0x65, 0x1b, 0xf4, 0x1c, + 0xf6, 0x67, 0x3c, 0x98, 0x8e, 0x19, 0xf1, 0xa8, 0x1f, 0x12, 0xf7, 0x56, 0x74, 0x43, 0xb2, 0xf2, + 0x19, 0xda, 0xa0, 0x7e, 0xa8, 0xdf, 0xa2, 0x8f, 0x00, 0xb1, 0xe1, 0x90, 0x39, 0x49, 0x4c, 0xa2, + 0x09, 0x1f, 0xfa, 0x01, 0x23, 0xbe, 0xab, 0x6e, 0x8b, 0x9e, 0x28, 0x73, 0xa6, 0x9b, 0x11, 0x86, + 0x8b, 0xbe, 0x84, 0xed, 0x59, 0x7a, 0x09, 0x75, 0xab, 0x20, 0x15, 0x77, 0xab, 0x9f, 0x6e, 0xbc, + 0xf6, 0x3a, 0x5b, 0xad, 0x2c, 0xc6, 0xc5, 0x2f, 0x32, 0x3c, 0xee, 0x4c, 0x93, 0xa5, 0x31, 0xfe, + 0xf6, 0x1d, 0x63, 0xfc, 0xc9, 0xc6, 0x5c, 0x0b, 0xb1, 0xfe, 0x8f, 0x61, 0xfe, 0x1e, 0xd4, 0xf8, + 0x4d, 0x53, 0x48, 0x2c, 0xba, 0x32, 0x7f, 0xc5, 0xc2, 0xc9, 0xdd, 0xea, 0xe7, 0x9b, 0xbb, 0xbe, + 0xb6, 0xab, 0xd6, 0x61, 0xbc, 0x16, 0x2f, 0xfd, 0x23, 0xc1, 0xde, 0x5b, 0xf5, 0xa3, 0x67, 0x70, + 0xac, 0xf5, 0x75, 0xa3, 0x43, 0xb0, 0x59, 0xef, 0xe8, 0x86, 0xd9, 0x20, 0x7d, 0xd3, 0xee, 0xe2, + 0xba, 0x71, 0x6d, 0x60, 0x5d, 0x79, 0x0f, 0x9d, 0x80, 0xba, 0xc4, 0xb7, 0x0c, 0x13, 0x6b, 0x16, + 0x79, 0xf9, 0x99, 0x22, 0xa1, 0xa7, 0xf0, 0x64, 0x89, 0xbd, 0x6e, 0x69, 0x75, 0x45, 0x46, 0x2a, + 0x1c, 0x2c, 0x11, 0xed, 0x7e, 0x4b, 0xbb, 0x51, 0x72, 0xe8, 0x10, 0xd0, 0x12, 0xa3, 0xb5, 0x2d, + 0x65, 0x0b, 0x1d, 0xc1, 0xfb, 0xab, 0x38, 0xb9, 0xa9, 0x29, 0xdb, 0xe8, 0x03, 0x78, 0xba, 0x44, + 0x75, 0x1a, 0x0d, 0xd2, 0xe9, 0xf6, 0x6d, 0xe5, 0x01, 0xfa, 0x10, 0x5e, 0x2c, 0x91, 0x76, 0x17, + 0xe3, 0x57, 0xe4, 0xc6, 0xe8, 0x35, 0x49, 0x13, 0x6b, 0x3a, 0xb6, 0x48, 0xed, 0xeb, 0x1e, 0x56, + 0x1e, 0x96, 0x66, 0x80, 0x56, 0x5f, 0x2a, 0x7a, 0x0e, 0x85, 0x54, 0x51, 0x6f, 0x92, 0x76, 0x47, + 0xc7, 0x2d, 0x32, 0xd0, 0x2c, 0x43, 0x33, 0x7b, 0x4b, 0x3e, 0x1c, 0x02, 0xea, 0xdb, 0x98, 0xd4, + 0xb0, 0xdd, 0x23, 0xda, 0x40, 0x33, 0x5a, 0x5a, 0xad, 0x85, 0x15, 0x09, 0x29, 0x90, 0x4f, 0x71, + 0xbb, 0xa7, 0x99, 0xba, 0x66, 0xe9, 0x8a, 0x7c, 0x87, 0x60, 0xb3, 0xa9, 0x99, 0x75, 0xac, 0x2b, + 0xb9, 0xd2, 0xcf, 0x12, 0x3c, 0x5a, 0x7a, 0x9e, 0xe8, 0x1c, 0x4e, 0x6d, 0xbb, 0xdd, 0x22, 0x83, + 0x8e, 0x51, 0xc7, 0xa4, 0x81, 0xcd, 0xb4, 0xce, 0xb7, 0x53, 0x1e, 0xc3, 0xe1, 0xea, 0x91, 0xb6, + 0x26, 0xd2, 0x9e, 0x80, 0xba, 0xca, 0x5d, 0x63, 0xc1, 0xca, 0xe8, 0x14, 0x8e, 0x56, 0x59, 0x13, + 0xf7, 0x7b, 0x96, 0xd6, 0x52, 0x72, 0xa5, 0xdf, 0x24, 0x78, 0xb2, 0x66, 0x96, 0xd1, 0x0b, 0x38, + 0xef, 0xf4, 0x7b, 0xdd, 0x7e, 0x8f, 0xfc, 0xe7, 0x48, 0x5c, 0xc2, 0xd9, 0xfa, 0x63, 0x8b, 0x93, + 0x71, 0x0a, 0x47, 0xeb, 0x0f, 0xb5, 0xbb, 0x57, 0x8a, 0x8c, 0x2e, 0xe0, 0xd9, 0x7a, 0xfa, 0x4d, + 0x67, 0x73, 0xb5, 0x3f, 0x24, 0xb8, 0x74, 0xf8, 0x78, 0xd3, 0x1b, 0xa8, 0x29, 0x0b, 0xaf, 0xbb, + 0x9b, 0x7e, 0x5f, 0x5d, 0xe9, 0x1b, 0x63, 0x2e, 0xf2, 0x78, 0xfa, 0x35, 0x94, 0xf9, 0xc4, 0xab, + 0x78, 0x2c, 0x14, 0x9f, 0x5b, 0x25, 0xa3, 0x68, 0xe4, 0xc7, 0xef, 0xfc, 0x52, 0xbf, 0xb8, 0x87, + 0xfe, 0x96, 0xa4, 0x5f, 0x65, 0x59, 0xbf, 0xfe, 0x5d, 0x3e, 0x6b, 0x64, 0x31, 0xeb, 0xa2, 0x10, + 0xfd, 0xbe, 0x90, 0x41, 0x26, 0xba, 0x7d, 0x20, 0xe2, 0x5f, 0xfd, 0x1b, 0x00, 0x00, 0xff, 0xff, + 0x21, 0x49, 0x7d, 0x05, 0xb1, 0x07, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/cloud/dialogflow/v2beta1/context.pb.go b/vendor/google.golang.org/genproto/googleapis/cloud/dialogflow/v2beta1/context.pb.go new file mode 100644 index 0000000..0a4ad26 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/cloud/dialogflow/v2beta1/context.pb.go @@ -0,0 +1,777 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/cloud/dialogflow/v2beta1/context.proto + +package dialogflow // import "google.golang.org/genproto/googleapis/cloud/dialogflow/v2beta1" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import empty "github.com/golang/protobuf/ptypes/empty" +import _struct "github.com/golang/protobuf/ptypes/struct" +import _ "google.golang.org/genproto/googleapis/api/annotations" +import field_mask "google.golang.org/genproto/protobuf/field_mask" + +import ( + context "golang.org/x/net/context" + grpc "google.golang.org/grpc" +) + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Represents a context. +type Context struct { + // Required. The unique identifier of the context. Format: + // `projects//agent/sessions//contexts/`, + // or `projects//agent/environments//users//sessions//contexts/`. + // + // The `Context ID` is always converted to lowercase, may only contain + // characters in a-zA-Z0-9_-% and may be at most 250 bytes long. + // + // If `Environment ID` is not specified, we assume default 'draft' + // environment. If `User ID` is not specified, we assume default '-' user. + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // Optional. The number of conversational query requests after which the + // context expires. If set to `0` (the default) the context expires + // immediately. Contexts expire automatically after 20 minutes if there + // are no matching queries. + LifespanCount int32 `protobuf:"varint,2,opt,name=lifespan_count,json=lifespanCount,proto3" json:"lifespan_count,omitempty"` + // Optional. The collection of parameters associated with this context. + // Refer to [this + // doc](https://cloud.google.com/dialogflow-enterprise/docs/intents-actions-parameters) + // for syntax. + Parameters *_struct.Struct `protobuf:"bytes,3,opt,name=parameters,proto3" json:"parameters,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Context) Reset() { *m = Context{} } +func (m *Context) String() string { return proto.CompactTextString(m) } +func (*Context) ProtoMessage() {} +func (*Context) Descriptor() ([]byte, []int) { + return fileDescriptor_context_4c18307a7450af94, []int{0} +} +func (m *Context) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Context.Unmarshal(m, b) +} +func (m *Context) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Context.Marshal(b, m, deterministic) +} +func (dst *Context) XXX_Merge(src proto.Message) { + xxx_messageInfo_Context.Merge(dst, src) +} +func (m *Context) XXX_Size() int { + return xxx_messageInfo_Context.Size(m) +} +func (m *Context) XXX_DiscardUnknown() { + xxx_messageInfo_Context.DiscardUnknown(m) +} + +var xxx_messageInfo_Context proto.InternalMessageInfo + +func (m *Context) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *Context) GetLifespanCount() int32 { + if m != nil { + return m.LifespanCount + } + return 0 +} + +func (m *Context) GetParameters() *_struct.Struct { + if m != nil { + return m.Parameters + } + return nil +} + +// The request message for [Contexts.ListContexts][google.cloud.dialogflow.v2beta1.Contexts.ListContexts]. +type ListContextsRequest struct { + // Required. The session to list all contexts from. + // Format: `projects//agent/sessions/` or + // `projects//agent/environments//users//sessions/`. If `Environment ID` is not specified, we assume + // default 'draft' environment. If `User ID` is not specified, we assume + // default '-' user. + Parent string `protobuf:"bytes,1,opt,name=parent,proto3" json:"parent,omitempty"` + // Optional. The maximum number of items to return in a single page. By + // default 100 and at most 1000. + PageSize int32 `protobuf:"varint,2,opt,name=page_size,json=pageSize,proto3" json:"page_size,omitempty"` + // Optional. The next_page_token value returned from a previous list request. + PageToken string `protobuf:"bytes,3,opt,name=page_token,json=pageToken,proto3" json:"page_token,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ListContextsRequest) Reset() { *m = ListContextsRequest{} } +func (m *ListContextsRequest) String() string { return proto.CompactTextString(m) } +func (*ListContextsRequest) ProtoMessage() {} +func (*ListContextsRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_context_4c18307a7450af94, []int{1} +} +func (m *ListContextsRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ListContextsRequest.Unmarshal(m, b) +} +func (m *ListContextsRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ListContextsRequest.Marshal(b, m, deterministic) +} +func (dst *ListContextsRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_ListContextsRequest.Merge(dst, src) +} +func (m *ListContextsRequest) XXX_Size() int { + return xxx_messageInfo_ListContextsRequest.Size(m) +} +func (m *ListContextsRequest) XXX_DiscardUnknown() { + xxx_messageInfo_ListContextsRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_ListContextsRequest proto.InternalMessageInfo + +func (m *ListContextsRequest) GetParent() string { + if m != nil { + return m.Parent + } + return "" +} + +func (m *ListContextsRequest) GetPageSize() int32 { + if m != nil { + return m.PageSize + } + return 0 +} + +func (m *ListContextsRequest) GetPageToken() string { + if m != nil { + return m.PageToken + } + return "" +} + +// The response message for [Contexts.ListContexts][google.cloud.dialogflow.v2beta1.Contexts.ListContexts]. +type ListContextsResponse struct { + // The list of contexts. There will be a maximum number of items + // returned based on the page_size field in the request. + Contexts []*Context `protobuf:"bytes,1,rep,name=contexts,proto3" json:"contexts,omitempty"` + // Token to retrieve the next page of results, or empty if there are no + // more results in the list. + NextPageToken string `protobuf:"bytes,2,opt,name=next_page_token,json=nextPageToken,proto3" json:"next_page_token,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ListContextsResponse) Reset() { *m = ListContextsResponse{} } +func (m *ListContextsResponse) String() string { return proto.CompactTextString(m) } +func (*ListContextsResponse) ProtoMessage() {} +func (*ListContextsResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_context_4c18307a7450af94, []int{2} +} +func (m *ListContextsResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ListContextsResponse.Unmarshal(m, b) +} +func (m *ListContextsResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ListContextsResponse.Marshal(b, m, deterministic) +} +func (dst *ListContextsResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_ListContextsResponse.Merge(dst, src) +} +func (m *ListContextsResponse) XXX_Size() int { + return xxx_messageInfo_ListContextsResponse.Size(m) +} +func (m *ListContextsResponse) XXX_DiscardUnknown() { + xxx_messageInfo_ListContextsResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_ListContextsResponse proto.InternalMessageInfo + +func (m *ListContextsResponse) GetContexts() []*Context { + if m != nil { + return m.Contexts + } + return nil +} + +func (m *ListContextsResponse) GetNextPageToken() string { + if m != nil { + return m.NextPageToken + } + return "" +} + +// The request message for [Contexts.GetContext][google.cloud.dialogflow.v2beta1.Contexts.GetContext]. +type GetContextRequest struct { + // Required. The name of the context. Format: + // `projects//agent/sessions//contexts/` + // or `projects//agent/environments//users//sessions//contexts/`. If `Environment ID` is + // not specified, we assume default 'draft' environment. If `User ID` is not + // specified, we assume default '-' user. + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GetContextRequest) Reset() { *m = GetContextRequest{} } +func (m *GetContextRequest) String() string { return proto.CompactTextString(m) } +func (*GetContextRequest) ProtoMessage() {} +func (*GetContextRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_context_4c18307a7450af94, []int{3} +} +func (m *GetContextRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GetContextRequest.Unmarshal(m, b) +} +func (m *GetContextRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GetContextRequest.Marshal(b, m, deterministic) +} +func (dst *GetContextRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_GetContextRequest.Merge(dst, src) +} +func (m *GetContextRequest) XXX_Size() int { + return xxx_messageInfo_GetContextRequest.Size(m) +} +func (m *GetContextRequest) XXX_DiscardUnknown() { + xxx_messageInfo_GetContextRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_GetContextRequest proto.InternalMessageInfo + +func (m *GetContextRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +// The request message for [Contexts.CreateContext][google.cloud.dialogflow.v2beta1.Contexts.CreateContext]. +type CreateContextRequest struct { + // Required. The session to create a context for. + // Format: `projects//agent/sessions/` or + // `projects//agent/environments//users//sessions/`. If `Environment ID` is not specified, we assume + // default 'draft' environment. If `User ID` is not specified, we assume + // default '-' user. + Parent string `protobuf:"bytes,1,opt,name=parent,proto3" json:"parent,omitempty"` + // Required. The context to create. + Context *Context `protobuf:"bytes,2,opt,name=context,proto3" json:"context,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *CreateContextRequest) Reset() { *m = CreateContextRequest{} } +func (m *CreateContextRequest) String() string { return proto.CompactTextString(m) } +func (*CreateContextRequest) ProtoMessage() {} +func (*CreateContextRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_context_4c18307a7450af94, []int{4} +} +func (m *CreateContextRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_CreateContextRequest.Unmarshal(m, b) +} +func (m *CreateContextRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_CreateContextRequest.Marshal(b, m, deterministic) +} +func (dst *CreateContextRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_CreateContextRequest.Merge(dst, src) +} +func (m *CreateContextRequest) XXX_Size() int { + return xxx_messageInfo_CreateContextRequest.Size(m) +} +func (m *CreateContextRequest) XXX_DiscardUnknown() { + xxx_messageInfo_CreateContextRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_CreateContextRequest proto.InternalMessageInfo + +func (m *CreateContextRequest) GetParent() string { + if m != nil { + return m.Parent + } + return "" +} + +func (m *CreateContextRequest) GetContext() *Context { + if m != nil { + return m.Context + } + return nil +} + +// The request message for [Contexts.UpdateContext][google.cloud.dialogflow.v2beta1.Contexts.UpdateContext]. +type UpdateContextRequest struct { + // Required. The context to update. + Context *Context `protobuf:"bytes,1,opt,name=context,proto3" json:"context,omitempty"` + // Optional. The mask to control which fields get updated. + UpdateMask *field_mask.FieldMask `protobuf:"bytes,2,opt,name=update_mask,json=updateMask,proto3" json:"update_mask,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *UpdateContextRequest) Reset() { *m = UpdateContextRequest{} } +func (m *UpdateContextRequest) String() string { return proto.CompactTextString(m) } +func (*UpdateContextRequest) ProtoMessage() {} +func (*UpdateContextRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_context_4c18307a7450af94, []int{5} +} +func (m *UpdateContextRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_UpdateContextRequest.Unmarshal(m, b) +} +func (m *UpdateContextRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_UpdateContextRequest.Marshal(b, m, deterministic) +} +func (dst *UpdateContextRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_UpdateContextRequest.Merge(dst, src) +} +func (m *UpdateContextRequest) XXX_Size() int { + return xxx_messageInfo_UpdateContextRequest.Size(m) +} +func (m *UpdateContextRequest) XXX_DiscardUnknown() { + xxx_messageInfo_UpdateContextRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_UpdateContextRequest proto.InternalMessageInfo + +func (m *UpdateContextRequest) GetContext() *Context { + if m != nil { + return m.Context + } + return nil +} + +func (m *UpdateContextRequest) GetUpdateMask() *field_mask.FieldMask { + if m != nil { + return m.UpdateMask + } + return nil +} + +// The request message for [Contexts.DeleteContext][google.cloud.dialogflow.v2beta1.Contexts.DeleteContext]. +type DeleteContextRequest struct { + // Required. The name of the context to delete. Format: + // `projects//agent/sessions//contexts/` + // or `projects//agent/environments//users//sessions//contexts/`. If `Environment ID` is + // not specified, we assume default 'draft' environment. If `User ID` is not + // specified, we assume default '-' user. + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *DeleteContextRequest) Reset() { *m = DeleteContextRequest{} } +func (m *DeleteContextRequest) String() string { return proto.CompactTextString(m) } +func (*DeleteContextRequest) ProtoMessage() {} +func (*DeleteContextRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_context_4c18307a7450af94, []int{6} +} +func (m *DeleteContextRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_DeleteContextRequest.Unmarshal(m, b) +} +func (m *DeleteContextRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_DeleteContextRequest.Marshal(b, m, deterministic) +} +func (dst *DeleteContextRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_DeleteContextRequest.Merge(dst, src) +} +func (m *DeleteContextRequest) XXX_Size() int { + return xxx_messageInfo_DeleteContextRequest.Size(m) +} +func (m *DeleteContextRequest) XXX_DiscardUnknown() { + xxx_messageInfo_DeleteContextRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_DeleteContextRequest proto.InternalMessageInfo + +func (m *DeleteContextRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +// The request message for [Contexts.DeleteAllContexts][google.cloud.dialogflow.v2beta1.Contexts.DeleteAllContexts]. +type DeleteAllContextsRequest struct { + // Required. The name of the session to delete all contexts from. Format: + // `projects//agent/sessions/` or `projects//agent/environments//users//sessions/`. If `Environment ID` is not specified we assume default 'draft' + // environment. If `User ID` is not specified, we assume default '-' user. + Parent string `protobuf:"bytes,1,opt,name=parent,proto3" json:"parent,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *DeleteAllContextsRequest) Reset() { *m = DeleteAllContextsRequest{} } +func (m *DeleteAllContextsRequest) String() string { return proto.CompactTextString(m) } +func (*DeleteAllContextsRequest) ProtoMessage() {} +func (*DeleteAllContextsRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_context_4c18307a7450af94, []int{7} +} +func (m *DeleteAllContextsRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_DeleteAllContextsRequest.Unmarshal(m, b) +} +func (m *DeleteAllContextsRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_DeleteAllContextsRequest.Marshal(b, m, deterministic) +} +func (dst *DeleteAllContextsRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_DeleteAllContextsRequest.Merge(dst, src) +} +func (m *DeleteAllContextsRequest) XXX_Size() int { + return xxx_messageInfo_DeleteAllContextsRequest.Size(m) +} +func (m *DeleteAllContextsRequest) XXX_DiscardUnknown() { + xxx_messageInfo_DeleteAllContextsRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_DeleteAllContextsRequest proto.InternalMessageInfo + +func (m *DeleteAllContextsRequest) GetParent() string { + if m != nil { + return m.Parent + } + return "" +} + +func init() { + proto.RegisterType((*Context)(nil), "google.cloud.dialogflow.v2beta1.Context") + proto.RegisterType((*ListContextsRequest)(nil), "google.cloud.dialogflow.v2beta1.ListContextsRequest") + proto.RegisterType((*ListContextsResponse)(nil), "google.cloud.dialogflow.v2beta1.ListContextsResponse") + proto.RegisterType((*GetContextRequest)(nil), "google.cloud.dialogflow.v2beta1.GetContextRequest") + proto.RegisterType((*CreateContextRequest)(nil), "google.cloud.dialogflow.v2beta1.CreateContextRequest") + proto.RegisterType((*UpdateContextRequest)(nil), "google.cloud.dialogflow.v2beta1.UpdateContextRequest") + proto.RegisterType((*DeleteContextRequest)(nil), "google.cloud.dialogflow.v2beta1.DeleteContextRequest") + proto.RegisterType((*DeleteAllContextsRequest)(nil), "google.cloud.dialogflow.v2beta1.DeleteAllContextsRequest") +} + +// Reference imports to suppress errors if they are not otherwise used. +var _ context.Context +var _ grpc.ClientConn + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the grpc package it is being compiled against. +const _ = grpc.SupportPackageIsVersion4 + +// ContextsClient is the client API for Contexts service. +// +// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://godoc.org/google.golang.org/grpc#ClientConn.NewStream. +type ContextsClient interface { + // Returns the list of all contexts in the specified session. + ListContexts(ctx context.Context, in *ListContextsRequest, opts ...grpc.CallOption) (*ListContextsResponse, error) + // Retrieves the specified context. + GetContext(ctx context.Context, in *GetContextRequest, opts ...grpc.CallOption) (*Context, error) + // Creates a context. + // + // If the specified context already exists, overrides the context. + CreateContext(ctx context.Context, in *CreateContextRequest, opts ...grpc.CallOption) (*Context, error) + // Updates the specified context. + UpdateContext(ctx context.Context, in *UpdateContextRequest, opts ...grpc.CallOption) (*Context, error) + // Deletes the specified context. + DeleteContext(ctx context.Context, in *DeleteContextRequest, opts ...grpc.CallOption) (*empty.Empty, error) + // Deletes all active contexts in the specified session. + DeleteAllContexts(ctx context.Context, in *DeleteAllContextsRequest, opts ...grpc.CallOption) (*empty.Empty, error) +} + +type contextsClient struct { + cc *grpc.ClientConn +} + +func NewContextsClient(cc *grpc.ClientConn) ContextsClient { + return &contextsClient{cc} +} + +func (c *contextsClient) ListContexts(ctx context.Context, in *ListContextsRequest, opts ...grpc.CallOption) (*ListContextsResponse, error) { + out := new(ListContextsResponse) + err := c.cc.Invoke(ctx, "/google.cloud.dialogflow.v2beta1.Contexts/ListContexts", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *contextsClient) GetContext(ctx context.Context, in *GetContextRequest, opts ...grpc.CallOption) (*Context, error) { + out := new(Context) + err := c.cc.Invoke(ctx, "/google.cloud.dialogflow.v2beta1.Contexts/GetContext", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *contextsClient) CreateContext(ctx context.Context, in *CreateContextRequest, opts ...grpc.CallOption) (*Context, error) { + out := new(Context) + err := c.cc.Invoke(ctx, "/google.cloud.dialogflow.v2beta1.Contexts/CreateContext", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *contextsClient) UpdateContext(ctx context.Context, in *UpdateContextRequest, opts ...grpc.CallOption) (*Context, error) { + out := new(Context) + err := c.cc.Invoke(ctx, "/google.cloud.dialogflow.v2beta1.Contexts/UpdateContext", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *contextsClient) DeleteContext(ctx context.Context, in *DeleteContextRequest, opts ...grpc.CallOption) (*empty.Empty, error) { + out := new(empty.Empty) + err := c.cc.Invoke(ctx, "/google.cloud.dialogflow.v2beta1.Contexts/DeleteContext", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *contextsClient) DeleteAllContexts(ctx context.Context, in *DeleteAllContextsRequest, opts ...grpc.CallOption) (*empty.Empty, error) { + out := new(empty.Empty) + err := c.cc.Invoke(ctx, "/google.cloud.dialogflow.v2beta1.Contexts/DeleteAllContexts", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +// ContextsServer is the server API for Contexts service. +type ContextsServer interface { + // Returns the list of all contexts in the specified session. + ListContexts(context.Context, *ListContextsRequest) (*ListContextsResponse, error) + // Retrieves the specified context. + GetContext(context.Context, *GetContextRequest) (*Context, error) + // Creates a context. + // + // If the specified context already exists, overrides the context. + CreateContext(context.Context, *CreateContextRequest) (*Context, error) + // Updates the specified context. + UpdateContext(context.Context, *UpdateContextRequest) (*Context, error) + // Deletes the specified context. + DeleteContext(context.Context, *DeleteContextRequest) (*empty.Empty, error) + // Deletes all active contexts in the specified session. + DeleteAllContexts(context.Context, *DeleteAllContextsRequest) (*empty.Empty, error) +} + +func RegisterContextsServer(s *grpc.Server, srv ContextsServer) { + s.RegisterService(&_Contexts_serviceDesc, srv) +} + +func _Contexts_ListContexts_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(ListContextsRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(ContextsServer).ListContexts(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.dialogflow.v2beta1.Contexts/ListContexts", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(ContextsServer).ListContexts(ctx, req.(*ListContextsRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _Contexts_GetContext_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(GetContextRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(ContextsServer).GetContext(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.dialogflow.v2beta1.Contexts/GetContext", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(ContextsServer).GetContext(ctx, req.(*GetContextRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _Contexts_CreateContext_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(CreateContextRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(ContextsServer).CreateContext(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.dialogflow.v2beta1.Contexts/CreateContext", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(ContextsServer).CreateContext(ctx, req.(*CreateContextRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _Contexts_UpdateContext_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(UpdateContextRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(ContextsServer).UpdateContext(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.dialogflow.v2beta1.Contexts/UpdateContext", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(ContextsServer).UpdateContext(ctx, req.(*UpdateContextRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _Contexts_DeleteContext_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(DeleteContextRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(ContextsServer).DeleteContext(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.dialogflow.v2beta1.Contexts/DeleteContext", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(ContextsServer).DeleteContext(ctx, req.(*DeleteContextRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _Contexts_DeleteAllContexts_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(DeleteAllContextsRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(ContextsServer).DeleteAllContexts(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.dialogflow.v2beta1.Contexts/DeleteAllContexts", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(ContextsServer).DeleteAllContexts(ctx, req.(*DeleteAllContextsRequest)) + } + return interceptor(ctx, in, info, handler) +} + +var _Contexts_serviceDesc = grpc.ServiceDesc{ + ServiceName: "google.cloud.dialogflow.v2beta1.Contexts", + HandlerType: (*ContextsServer)(nil), + Methods: []grpc.MethodDesc{ + { + MethodName: "ListContexts", + Handler: _Contexts_ListContexts_Handler, + }, + { + MethodName: "GetContext", + Handler: _Contexts_GetContext_Handler, + }, + { + MethodName: "CreateContext", + Handler: _Contexts_CreateContext_Handler, + }, + { + MethodName: "UpdateContext", + Handler: _Contexts_UpdateContext_Handler, + }, + { + MethodName: "DeleteContext", + Handler: _Contexts_DeleteContext_Handler, + }, + { + MethodName: "DeleteAllContexts", + Handler: _Contexts_DeleteAllContexts_Handler, + }, + }, + Streams: []grpc.StreamDesc{}, + Metadata: "google/cloud/dialogflow/v2beta1/context.proto", +} + +func init() { + proto.RegisterFile("google/cloud/dialogflow/v2beta1/context.proto", fileDescriptor_context_4c18307a7450af94) +} + +var fileDescriptor_context_4c18307a7450af94 = []byte{ + // 817 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xac, 0x56, 0x4d, 0x6b, 0x1b, 0x47, + 0x18, 0x66, 0xd6, 0xad, 0x3f, 0xc6, 0x56, 0x8b, 0xa7, 0xc2, 0x55, 0x65, 0x17, 0x8b, 0x2d, 0x6d, + 0x85, 0xa0, 0xbb, 0x74, 0xfb, 0x45, 0x6b, 0x5a, 0xa8, 0xa5, 0xda, 0x14, 0x6a, 0x6a, 0xe4, 0xba, + 0x14, 0x5f, 0xd4, 0xb1, 0xf4, 0x6a, 0xd9, 0x78, 0x35, 0xb3, 0xd9, 0x99, 0x75, 0x1c, 0x07, 0x5f, + 0x42, 0x2e, 0xb9, 0x84, 0x40, 0x08, 0x39, 0xe4, 0x66, 0xc8, 0xc5, 0xb9, 0xe5, 0x6f, 0xe4, 0x98, + 0xbf, 0x90, 0x3f, 0x90, 0x5b, 0x72, 0x08, 0x84, 0xdd, 0x9d, 0xd5, 0x4a, 0xd6, 0x3a, 0xd2, 0x1a, + 0x9f, 0xb4, 0xfb, 0x7e, 0xcd, 0xf3, 0xbc, 0xf3, 0xbc, 0xaf, 0x16, 0x7f, 0x63, 0x73, 0x6e, 0xbb, + 0x60, 0xb6, 0x5d, 0x1e, 0x74, 0xcc, 0x8e, 0x43, 0x5d, 0x6e, 0x77, 0x5d, 0x7e, 0xc3, 0x3c, 0xb4, + 0xf6, 0x41, 0xd2, 0x6f, 0xcd, 0x36, 0x67, 0x12, 0x8e, 0xa4, 0xe1, 0xf9, 0x5c, 0x72, 0xb2, 0x1a, + 0x87, 0x1b, 0x51, 0xb8, 0x91, 0x86, 0x1b, 0x2a, 0xbc, 0xbc, 0xa2, 0xea, 0x51, 0xcf, 0x31, 0x29, + 0x63, 0x5c, 0x52, 0xe9, 0x70, 0x26, 0xe2, 0xf4, 0xf2, 0x67, 0x03, 0x5e, 0x1f, 0x04, 0x0f, 0xfc, + 0x36, 0x28, 0xd7, 0xb2, 0x72, 0x45, 0x6f, 0xfb, 0x41, 0xd7, 0x84, 0x9e, 0x27, 0x6f, 0x2a, 0x67, + 0xe5, 0xbc, 0xb3, 0xeb, 0x80, 0xdb, 0x69, 0xf5, 0xa8, 0x38, 0x50, 0x11, 0x2b, 0xe7, 0x23, 0x84, + 0xf4, 0x83, 0xb6, 0x82, 0xad, 0x9f, 0xe0, 0x99, 0x7a, 0xcc, 0x83, 0x10, 0xfc, 0x01, 0xa3, 0x3d, + 0x28, 0xa1, 0x0a, 0xaa, 0xce, 0x35, 0xa3, 0x67, 0xf2, 0x25, 0xfe, 0xc8, 0x75, 0xba, 0x20, 0x3c, + 0xca, 0x5a, 0x6d, 0x1e, 0x30, 0x59, 0xd2, 0x2a, 0xa8, 0xfa, 0x61, 0xb3, 0x90, 0x58, 0xeb, 0xa1, + 0x91, 0xfc, 0x84, 0xb1, 0x47, 0x7d, 0xda, 0x03, 0x09, 0xbe, 0x28, 0x4d, 0x55, 0x50, 0x75, 0xde, + 0xfa, 0xd4, 0x50, 0x1d, 0x49, 0x0e, 0x36, 0x76, 0xa2, 0x83, 0x9b, 0x03, 0xa1, 0xba, 0x83, 0x3f, + 0xf9, 0xcb, 0x11, 0x52, 0x41, 0x10, 0x4d, 0xb8, 0x1e, 0x80, 0x90, 0x64, 0x09, 0x4f, 0x7b, 0xd4, + 0x07, 0x26, 0x15, 0x18, 0xf5, 0x46, 0x96, 0xf1, 0x9c, 0x47, 0x6d, 0x68, 0x09, 0xe7, 0x18, 0x14, + 0x92, 0xd9, 0xd0, 0xb0, 0xe3, 0x1c, 0x03, 0xf9, 0x3c, 0x04, 0x61, 0x43, 0x4b, 0xf2, 0x03, 0x60, + 0x11, 0x88, 0xb9, 0x66, 0x14, 0xfe, 0x4f, 0x68, 0xd0, 0xef, 0x20, 0x5c, 0x1c, 0x3e, 0x4b, 0x78, + 0x9c, 0x09, 0x20, 0x0d, 0x3c, 0xab, 0xae, 0x52, 0x94, 0x50, 0x65, 0xaa, 0x3a, 0x6f, 0x55, 0x8d, + 0x31, 0x97, 0x69, 0xa8, 0x22, 0xcd, 0x7e, 0x26, 0xf9, 0x0a, 0x7f, 0xcc, 0xe0, 0x48, 0xb6, 0x06, + 0x20, 0x68, 0x11, 0x84, 0x42, 0x68, 0xde, 0xee, 0xc3, 0xf8, 0x1a, 0x2f, 0x6e, 0x42, 0x02, 0x22, + 0xe1, 0x9b, 0xd1, 0x7a, 0xdd, 0xc7, 0xc5, 0xba, 0x0f, 0x54, 0xc2, 0xb9, 0xd8, 0x8b, 0x7a, 0xb3, + 0x8e, 0x67, 0x14, 0x98, 0xe8, 0xe0, 0x3c, 0x2c, 0x92, 0x44, 0xfd, 0x11, 0xc2, 0xc5, 0x5d, 0xaf, + 0x33, 0x7a, 0xe8, 0x40, 0x71, 0x74, 0xc9, 0xe2, 0x64, 0x0d, 0xcf, 0x07, 0x51, 0xed, 0x48, 0x9d, + 0x0a, 0x64, 0x79, 0x44, 0x25, 0x1b, 0xa1, 0x80, 0xb7, 0xa8, 0x38, 0x68, 0xe2, 0x38, 0x3c, 0x7c, + 0xd6, 0x6b, 0xb8, 0xd8, 0x00, 0x17, 0x46, 0x80, 0x65, 0x75, 0xce, 0xc2, 0xa5, 0x38, 0xf6, 0x77, + 0xd7, 0x9d, 0x50, 0x59, 0xd6, 0xf3, 0x05, 0x3c, 0x9b, 0xc4, 0x92, 0x7b, 0x1a, 0x5e, 0x18, 0x94, + 0x0a, 0xf9, 0x7e, 0x2c, 0xdb, 0x0c, 0x15, 0x97, 0x7f, 0xc8, 0x99, 0x15, 0xeb, 0x51, 0xbf, 0x8f, + 0x6e, 0xbf, 0x78, 0xf9, 0x40, 0xbb, 0x8b, 0xc8, 0x8f, 0xfd, 0x5d, 0x73, 0x2b, 0x86, 0xf9, 0xab, + 0xe7, 0xf3, 0x6b, 0xd0, 0x96, 0xc2, 0xac, 0x99, 0xd4, 0x06, 0x26, 0x4d, 0x01, 0x42, 0x84, 0x6b, + 0xc4, 0xac, 0x9d, 0x24, 0x0b, 0x49, 0xec, 0xfd, 0x4d, 0xb6, 0xc6, 0x67, 0x02, 0x3b, 0x74, 0x7c, + 0xce, 0x7a, 0xc0, 0x22, 0x63, 0x20, 0xc0, 0x0f, 0x7f, 0xb3, 0x0a, 0x92, 0xb7, 0x08, 0xe3, 0x54, + 0xb5, 0xc4, 0x1a, 0x4b, 0x6c, 0x44, 0xe2, 0xe5, 0x89, 0x05, 0x93, 0xcd, 0x3f, 0xbc, 0xd8, 0xf7, + 0xb1, 0xef, 0x63, 0x35, 0x6b, 0x27, 0xc3, 0xfc, 0xb3, 0x33, 0xc7, 0xb2, 0x1f, 0x2c, 0x48, 0x1e, + 0x6a, 0xb8, 0x30, 0x34, 0x8c, 0x64, 0xfc, 0xdd, 0x66, 0x0d, 0x6f, 0x8e, 0x2e, 0x9c, 0xc6, 0x5d, + 0x78, 0x8c, 0xf4, 0x4b, 0xaa, 0xe0, 0x97, 0x64, 0xf0, 0xf6, 0xfe, 0xd3, 0xaf, 0x56, 0x0e, 0xfd, + 0xca, 0xe4, 0x89, 0x86, 0x0b, 0x43, 0xfb, 0x62, 0x82, 0xbe, 0x64, 0xed, 0x97, 0x1c, 0x7d, 0x79, + 0x16, 0xf7, 0xe5, 0x29, 0xb2, 0x7e, 0x4b, 0x49, 0x25, 0x7f, 0xc5, 0x79, 0x54, 0x92, 0xf6, 0xe7, + 0x7f, 0x6b, 0x77, 0xd2, 0x52, 0xb9, 0x64, 0x93, 0xf6, 0xe9, 0x15, 0xc2, 0x85, 0xa1, 0xf5, 0x35, + 0x41, 0x9f, 0xb2, 0xd6, 0x5d, 0x79, 0x69, 0x64, 0x5d, 0xfe, 0x11, 0x7e, 0x0c, 0xa4, 0x33, 0x53, + 0xbb, 0xf4, 0xcc, 0xd4, 0xae, 0x78, 0x66, 0xde, 0x20, 0xbc, 0x38, 0xb2, 0x86, 0xc9, 0xcf, 0x13, + 0xf2, 0x1e, 0x5d, 0xdd, 0xf9, 0xb8, 0xe7, 0xdb, 0x97, 0xb5, 0xab, 0x1d, 0x90, 0xf5, 0x33, 0x84, + 0xbf, 0x68, 0xf3, 0xde, 0x38, 0xae, 0xeb, 0x0b, 0x8a, 0xe3, 0x76, 0xc8, 0x68, 0x1b, 0xed, 0xfd, + 0xa9, 0x12, 0x6c, 0xee, 0x52, 0x66, 0x1b, 0xdc, 0xb7, 0x4d, 0x1b, 0x58, 0xc4, 0xd7, 0x8c, 0x5d, + 0xd4, 0x73, 0xc4, 0x85, 0x9f, 0xa4, 0x6b, 0xa9, 0xe9, 0x35, 0x42, 0xa7, 0x9a, 0xd6, 0xd8, 0x38, + 0xd3, 0x56, 0x37, 0xe3, 0x9a, 0xf5, 0x08, 0x44, 0x23, 0x05, 0xf1, 0x6f, 0x9c, 0xb4, 0x3f, 0x1d, + 0xd5, 0xff, 0xee, 0x5d, 0x00, 0x00, 0x00, 0xff, 0xff, 0x18, 0x1c, 0x8a, 0x0f, 0xf1, 0x0a, 0x00, + 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/cloud/dialogflow/v2beta1/document.pb.go b/vendor/google.golang.org/genproto/googleapis/cloud/dialogflow/v2beta1/document.pb.go new file mode 100644 index 0000000..3000e66 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/cloud/dialogflow/v2beta1/document.pb.go @@ -0,0 +1,1052 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/cloud/dialogflow/v2beta1/document.proto + +package dialogflow // import "google.golang.org/genproto/googleapis/cloud/dialogflow/v2beta1" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "github.com/golang/protobuf/ptypes/empty" +import _ "google.golang.org/genproto/googleapis/api/annotations" +import longrunning "google.golang.org/genproto/googleapis/longrunning" +import _ "google.golang.org/genproto/googleapis/rpc/status" +import field_mask "google.golang.org/genproto/protobuf/field_mask" + +import ( + context "golang.org/x/net/context" + grpc "google.golang.org/grpc" +) + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// The knowledge type of document content. +type Document_KnowledgeType int32 + +const ( + // The type is unspecified or arbitrary. + Document_KNOWLEDGE_TYPE_UNSPECIFIED Document_KnowledgeType = 0 + // The document content contains question and answer pairs as either HTML or + // CSV. Typical FAQ HTML formats are parsed accurately, but unusual formats + // may fail to be parsed. + // + // CSV must have questions in the first column and answers in the second, + // with no header. Because of this explicit format, they are always parsed + // accurately. + Document_FAQ Document_KnowledgeType = 1 + // Documents for which unstructured text is extracted and used for + // question answering. + Document_EXTRACTIVE_QA Document_KnowledgeType = 2 +) + +var Document_KnowledgeType_name = map[int32]string{ + 0: "KNOWLEDGE_TYPE_UNSPECIFIED", + 1: "FAQ", + 2: "EXTRACTIVE_QA", +} +var Document_KnowledgeType_value = map[string]int32{ + "KNOWLEDGE_TYPE_UNSPECIFIED": 0, + "FAQ": 1, + "EXTRACTIVE_QA": 2, +} + +func (x Document_KnowledgeType) String() string { + return proto.EnumName(Document_KnowledgeType_name, int32(x)) +} +func (Document_KnowledgeType) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_document_b7905c72d2ee7910, []int{0, 0} +} + +// States of the operation. +type KnowledgeOperationMetadata_State int32 + +const ( + // State unspecified. + KnowledgeOperationMetadata_STATE_UNSPECIFIED KnowledgeOperationMetadata_State = 0 + // The operation has been created. + KnowledgeOperationMetadata_PENDING KnowledgeOperationMetadata_State = 1 + // The operation is currently running. + KnowledgeOperationMetadata_RUNNING KnowledgeOperationMetadata_State = 2 + // The operation is done, either cancelled or completed. + KnowledgeOperationMetadata_DONE KnowledgeOperationMetadata_State = 3 +) + +var KnowledgeOperationMetadata_State_name = map[int32]string{ + 0: "STATE_UNSPECIFIED", + 1: "PENDING", + 2: "RUNNING", + 3: "DONE", +} +var KnowledgeOperationMetadata_State_value = map[string]int32{ + "STATE_UNSPECIFIED": 0, + "PENDING": 1, + "RUNNING": 2, + "DONE": 3, +} + +func (x KnowledgeOperationMetadata_State) String() string { + return proto.EnumName(KnowledgeOperationMetadata_State_name, int32(x)) +} +func (KnowledgeOperationMetadata_State) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_document_b7905c72d2ee7910, []int{7, 0} +} + +// A document resource. +// +// Note: resource `projects.agent.knowledgeBases.documents` is deprecated, +// please use `projects.knowledgeBases.documents` instead. +type Document struct { + // The document resource name. + // The name must be empty when creating a document. + // Format: `projects//knowledgeBases//documents/`. + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // Required. The display name of the document. The name must be 1024 bytes or + // less; otherwise, the creation request fails. + DisplayName string `protobuf:"bytes,2,opt,name=display_name,json=displayName,proto3" json:"display_name,omitempty"` + // Required. The MIME type of this document. + MimeType string `protobuf:"bytes,3,opt,name=mime_type,json=mimeType,proto3" json:"mime_type,omitempty"` + // Required. The knowledge type of document content. + KnowledgeTypes []Document_KnowledgeType `protobuf:"varint,4,rep,packed,name=knowledge_types,json=knowledgeTypes,proto3,enum=google.cloud.dialogflow.v2beta1.Document_KnowledgeType" json:"knowledge_types,omitempty"` + // Required. The source of this document. + // + // Types that are valid to be assigned to Source: + // *Document_ContentUri + // *Document_Content + // *Document_RawContent + Source isDocument_Source `protobuf_oneof:"source"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Document) Reset() { *m = Document{} } +func (m *Document) String() string { return proto.CompactTextString(m) } +func (*Document) ProtoMessage() {} +func (*Document) Descriptor() ([]byte, []int) { + return fileDescriptor_document_b7905c72d2ee7910, []int{0} +} +func (m *Document) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Document.Unmarshal(m, b) +} +func (m *Document) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Document.Marshal(b, m, deterministic) +} +func (dst *Document) XXX_Merge(src proto.Message) { + xxx_messageInfo_Document.Merge(dst, src) +} +func (m *Document) XXX_Size() int { + return xxx_messageInfo_Document.Size(m) +} +func (m *Document) XXX_DiscardUnknown() { + xxx_messageInfo_Document.DiscardUnknown(m) +} + +var xxx_messageInfo_Document proto.InternalMessageInfo + +func (m *Document) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *Document) GetDisplayName() string { + if m != nil { + return m.DisplayName + } + return "" +} + +func (m *Document) GetMimeType() string { + if m != nil { + return m.MimeType + } + return "" +} + +func (m *Document) GetKnowledgeTypes() []Document_KnowledgeType { + if m != nil { + return m.KnowledgeTypes + } + return nil +} + +type isDocument_Source interface { + isDocument_Source() +} + +type Document_ContentUri struct { + ContentUri string `protobuf:"bytes,5,opt,name=content_uri,json=contentUri,proto3,oneof"` +} + +type Document_Content struct { + Content string `protobuf:"bytes,6,opt,name=content,proto3,oneof"` +} + +type Document_RawContent struct { + RawContent []byte `protobuf:"bytes,9,opt,name=raw_content,json=rawContent,proto3,oneof"` +} + +func (*Document_ContentUri) isDocument_Source() {} + +func (*Document_Content) isDocument_Source() {} + +func (*Document_RawContent) isDocument_Source() {} + +func (m *Document) GetSource() isDocument_Source { + if m != nil { + return m.Source + } + return nil +} + +func (m *Document) GetContentUri() string { + if x, ok := m.GetSource().(*Document_ContentUri); ok { + return x.ContentUri + } + return "" +} + +func (m *Document) GetContent() string { + if x, ok := m.GetSource().(*Document_Content); ok { + return x.Content + } + return "" +} + +func (m *Document) GetRawContent() []byte { + if x, ok := m.GetSource().(*Document_RawContent); ok { + return x.RawContent + } + return nil +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*Document) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _Document_OneofMarshaler, _Document_OneofUnmarshaler, _Document_OneofSizer, []interface{}{ + (*Document_ContentUri)(nil), + (*Document_Content)(nil), + (*Document_RawContent)(nil), + } +} + +func _Document_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*Document) + // source + switch x := m.Source.(type) { + case *Document_ContentUri: + b.EncodeVarint(5<<3 | proto.WireBytes) + b.EncodeStringBytes(x.ContentUri) + case *Document_Content: + b.EncodeVarint(6<<3 | proto.WireBytes) + b.EncodeStringBytes(x.Content) + case *Document_RawContent: + b.EncodeVarint(9<<3 | proto.WireBytes) + b.EncodeRawBytes(x.RawContent) + case nil: + default: + return fmt.Errorf("Document.Source has unexpected type %T", x) + } + return nil +} + +func _Document_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*Document) + switch tag { + case 5: // source.content_uri + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeStringBytes() + m.Source = &Document_ContentUri{x} + return true, err + case 6: // source.content + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeStringBytes() + m.Source = &Document_Content{x} + return true, err + case 9: // source.raw_content + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeRawBytes(true) + m.Source = &Document_RawContent{x} + return true, err + default: + return false, nil + } +} + +func _Document_OneofSizer(msg proto.Message) (n int) { + m := msg.(*Document) + // source + switch x := m.Source.(type) { + case *Document_ContentUri: + n += 1 // tag and wire + n += proto.SizeVarint(uint64(len(x.ContentUri))) + n += len(x.ContentUri) + case *Document_Content: + n += 1 // tag and wire + n += proto.SizeVarint(uint64(len(x.Content))) + n += len(x.Content) + case *Document_RawContent: + n += 1 // tag and wire + n += proto.SizeVarint(uint64(len(x.RawContent))) + n += len(x.RawContent) + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +// Request message for [Documents.ListDocuments][google.cloud.dialogflow.v2beta1.Documents.ListDocuments]. +type ListDocumentsRequest struct { + // Required. The knowledge base to list all documents for. + // Format: `projects//knowledgeBases/`. + Parent string `protobuf:"bytes,1,opt,name=parent,proto3" json:"parent,omitempty"` + // Optional. The maximum number of items to return in a single page. By + // default 10 and at most 100. + PageSize int32 `protobuf:"varint,2,opt,name=page_size,json=pageSize,proto3" json:"page_size,omitempty"` + // Optional. The next_page_token value returned from a previous list request. + PageToken string `protobuf:"bytes,3,opt,name=page_token,json=pageToken,proto3" json:"page_token,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ListDocumentsRequest) Reset() { *m = ListDocumentsRequest{} } +func (m *ListDocumentsRequest) String() string { return proto.CompactTextString(m) } +func (*ListDocumentsRequest) ProtoMessage() {} +func (*ListDocumentsRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_document_b7905c72d2ee7910, []int{1} +} +func (m *ListDocumentsRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ListDocumentsRequest.Unmarshal(m, b) +} +func (m *ListDocumentsRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ListDocumentsRequest.Marshal(b, m, deterministic) +} +func (dst *ListDocumentsRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_ListDocumentsRequest.Merge(dst, src) +} +func (m *ListDocumentsRequest) XXX_Size() int { + return xxx_messageInfo_ListDocumentsRequest.Size(m) +} +func (m *ListDocumentsRequest) XXX_DiscardUnknown() { + xxx_messageInfo_ListDocumentsRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_ListDocumentsRequest proto.InternalMessageInfo + +func (m *ListDocumentsRequest) GetParent() string { + if m != nil { + return m.Parent + } + return "" +} + +func (m *ListDocumentsRequest) GetPageSize() int32 { + if m != nil { + return m.PageSize + } + return 0 +} + +func (m *ListDocumentsRequest) GetPageToken() string { + if m != nil { + return m.PageToken + } + return "" +} + +// Response message for [Documents.ListDocuments][google.cloud.dialogflow.v2beta1.Documents.ListDocuments]. +type ListDocumentsResponse struct { + // The list of documents. + Documents []*Document `protobuf:"bytes,1,rep,name=documents,proto3" json:"documents,omitempty"` + // Token to retrieve the next page of results, or empty if there are no + // more results in the list. + NextPageToken string `protobuf:"bytes,2,opt,name=next_page_token,json=nextPageToken,proto3" json:"next_page_token,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ListDocumentsResponse) Reset() { *m = ListDocumentsResponse{} } +func (m *ListDocumentsResponse) String() string { return proto.CompactTextString(m) } +func (*ListDocumentsResponse) ProtoMessage() {} +func (*ListDocumentsResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_document_b7905c72d2ee7910, []int{2} +} +func (m *ListDocumentsResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ListDocumentsResponse.Unmarshal(m, b) +} +func (m *ListDocumentsResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ListDocumentsResponse.Marshal(b, m, deterministic) +} +func (dst *ListDocumentsResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_ListDocumentsResponse.Merge(dst, src) +} +func (m *ListDocumentsResponse) XXX_Size() int { + return xxx_messageInfo_ListDocumentsResponse.Size(m) +} +func (m *ListDocumentsResponse) XXX_DiscardUnknown() { + xxx_messageInfo_ListDocumentsResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_ListDocumentsResponse proto.InternalMessageInfo + +func (m *ListDocumentsResponse) GetDocuments() []*Document { + if m != nil { + return m.Documents + } + return nil +} + +func (m *ListDocumentsResponse) GetNextPageToken() string { + if m != nil { + return m.NextPageToken + } + return "" +} + +// Request message for [Documents.GetDocument][google.cloud.dialogflow.v2beta1.Documents.GetDocument]. +type GetDocumentRequest struct { + // Required. The name of the document to retrieve. + // Format `projects//knowledgeBases//documents/`. + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GetDocumentRequest) Reset() { *m = GetDocumentRequest{} } +func (m *GetDocumentRequest) String() string { return proto.CompactTextString(m) } +func (*GetDocumentRequest) ProtoMessage() {} +func (*GetDocumentRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_document_b7905c72d2ee7910, []int{3} +} +func (m *GetDocumentRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GetDocumentRequest.Unmarshal(m, b) +} +func (m *GetDocumentRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GetDocumentRequest.Marshal(b, m, deterministic) +} +func (dst *GetDocumentRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_GetDocumentRequest.Merge(dst, src) +} +func (m *GetDocumentRequest) XXX_Size() int { + return xxx_messageInfo_GetDocumentRequest.Size(m) +} +func (m *GetDocumentRequest) XXX_DiscardUnknown() { + xxx_messageInfo_GetDocumentRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_GetDocumentRequest proto.InternalMessageInfo + +func (m *GetDocumentRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +// Request message for [Documents.CreateDocument][google.cloud.dialogflow.v2beta1.Documents.CreateDocument]. +type CreateDocumentRequest struct { + // Required. The knoweldge base to create a document for. + // Format: `projects//knowledgeBases/`. + Parent string `protobuf:"bytes,1,opt,name=parent,proto3" json:"parent,omitempty"` + // Required. The document to create. + Document *Document `protobuf:"bytes,2,opt,name=document,proto3" json:"document,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *CreateDocumentRequest) Reset() { *m = CreateDocumentRequest{} } +func (m *CreateDocumentRequest) String() string { return proto.CompactTextString(m) } +func (*CreateDocumentRequest) ProtoMessage() {} +func (*CreateDocumentRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_document_b7905c72d2ee7910, []int{4} +} +func (m *CreateDocumentRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_CreateDocumentRequest.Unmarshal(m, b) +} +func (m *CreateDocumentRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_CreateDocumentRequest.Marshal(b, m, deterministic) +} +func (dst *CreateDocumentRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_CreateDocumentRequest.Merge(dst, src) +} +func (m *CreateDocumentRequest) XXX_Size() int { + return xxx_messageInfo_CreateDocumentRequest.Size(m) +} +func (m *CreateDocumentRequest) XXX_DiscardUnknown() { + xxx_messageInfo_CreateDocumentRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_CreateDocumentRequest proto.InternalMessageInfo + +func (m *CreateDocumentRequest) GetParent() string { + if m != nil { + return m.Parent + } + return "" +} + +func (m *CreateDocumentRequest) GetDocument() *Document { + if m != nil { + return m.Document + } + return nil +} + +// Request message for [Documents.DeleteDocument][google.cloud.dialogflow.v2beta1.Documents.DeleteDocument]. +type DeleteDocumentRequest struct { + // The name of the document to delete. + // Format: `projects//knowledgeBases//documents/`. + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *DeleteDocumentRequest) Reset() { *m = DeleteDocumentRequest{} } +func (m *DeleteDocumentRequest) String() string { return proto.CompactTextString(m) } +func (*DeleteDocumentRequest) ProtoMessage() {} +func (*DeleteDocumentRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_document_b7905c72d2ee7910, []int{5} +} +func (m *DeleteDocumentRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_DeleteDocumentRequest.Unmarshal(m, b) +} +func (m *DeleteDocumentRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_DeleteDocumentRequest.Marshal(b, m, deterministic) +} +func (dst *DeleteDocumentRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_DeleteDocumentRequest.Merge(dst, src) +} +func (m *DeleteDocumentRequest) XXX_Size() int { + return xxx_messageInfo_DeleteDocumentRequest.Size(m) +} +func (m *DeleteDocumentRequest) XXX_DiscardUnknown() { + xxx_messageInfo_DeleteDocumentRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_DeleteDocumentRequest proto.InternalMessageInfo + +func (m *DeleteDocumentRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +// Request message for [Documents.UpdateDocument][google.cloud.dialogflow.v2beta1.Documents.UpdateDocument]. +type UpdateDocumentRequest struct { + // Required. The document to update. + Document *Document `protobuf:"bytes,1,opt,name=document,proto3" json:"document,omitempty"` + // Optional. Not specified means `update all`. + // Currently, only `display_name` can be updated, an InvalidArgument will be + // returned for attempting to update other fields. + UpdateMask *field_mask.FieldMask `protobuf:"bytes,2,opt,name=update_mask,json=updateMask,proto3" json:"update_mask,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *UpdateDocumentRequest) Reset() { *m = UpdateDocumentRequest{} } +func (m *UpdateDocumentRequest) String() string { return proto.CompactTextString(m) } +func (*UpdateDocumentRequest) ProtoMessage() {} +func (*UpdateDocumentRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_document_b7905c72d2ee7910, []int{6} +} +func (m *UpdateDocumentRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_UpdateDocumentRequest.Unmarshal(m, b) +} +func (m *UpdateDocumentRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_UpdateDocumentRequest.Marshal(b, m, deterministic) +} +func (dst *UpdateDocumentRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_UpdateDocumentRequest.Merge(dst, src) +} +func (m *UpdateDocumentRequest) XXX_Size() int { + return xxx_messageInfo_UpdateDocumentRequest.Size(m) +} +func (m *UpdateDocumentRequest) XXX_DiscardUnknown() { + xxx_messageInfo_UpdateDocumentRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_UpdateDocumentRequest proto.InternalMessageInfo + +func (m *UpdateDocumentRequest) GetDocument() *Document { + if m != nil { + return m.Document + } + return nil +} + +func (m *UpdateDocumentRequest) GetUpdateMask() *field_mask.FieldMask { + if m != nil { + return m.UpdateMask + } + return nil +} + +// Metadata in google::longrunning::Operation for Knowledge operations. +type KnowledgeOperationMetadata struct { + // Required. The current state of this operation. + State KnowledgeOperationMetadata_State `protobuf:"varint,1,opt,name=state,proto3,enum=google.cloud.dialogflow.v2beta1.KnowledgeOperationMetadata_State" json:"state,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *KnowledgeOperationMetadata) Reset() { *m = KnowledgeOperationMetadata{} } +func (m *KnowledgeOperationMetadata) String() string { return proto.CompactTextString(m) } +func (*KnowledgeOperationMetadata) ProtoMessage() {} +func (*KnowledgeOperationMetadata) Descriptor() ([]byte, []int) { + return fileDescriptor_document_b7905c72d2ee7910, []int{7} +} +func (m *KnowledgeOperationMetadata) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_KnowledgeOperationMetadata.Unmarshal(m, b) +} +func (m *KnowledgeOperationMetadata) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_KnowledgeOperationMetadata.Marshal(b, m, deterministic) +} +func (dst *KnowledgeOperationMetadata) XXX_Merge(src proto.Message) { + xxx_messageInfo_KnowledgeOperationMetadata.Merge(dst, src) +} +func (m *KnowledgeOperationMetadata) XXX_Size() int { + return xxx_messageInfo_KnowledgeOperationMetadata.Size(m) +} +func (m *KnowledgeOperationMetadata) XXX_DiscardUnknown() { + xxx_messageInfo_KnowledgeOperationMetadata.DiscardUnknown(m) +} + +var xxx_messageInfo_KnowledgeOperationMetadata proto.InternalMessageInfo + +func (m *KnowledgeOperationMetadata) GetState() KnowledgeOperationMetadata_State { + if m != nil { + return m.State + } + return KnowledgeOperationMetadata_STATE_UNSPECIFIED +} + +// Request message for [Documents.ReloadDocument][google.cloud.dialogflow.v2beta1.Documents.ReloadDocument]. +type ReloadDocumentRequest struct { + // The name of the document to reload. + // Format: `projects//knowledgeBases//documents/` + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ReloadDocumentRequest) Reset() { *m = ReloadDocumentRequest{} } +func (m *ReloadDocumentRequest) String() string { return proto.CompactTextString(m) } +func (*ReloadDocumentRequest) ProtoMessage() {} +func (*ReloadDocumentRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_document_b7905c72d2ee7910, []int{8} +} +func (m *ReloadDocumentRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ReloadDocumentRequest.Unmarshal(m, b) +} +func (m *ReloadDocumentRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ReloadDocumentRequest.Marshal(b, m, deterministic) +} +func (dst *ReloadDocumentRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_ReloadDocumentRequest.Merge(dst, src) +} +func (m *ReloadDocumentRequest) XXX_Size() int { + return xxx_messageInfo_ReloadDocumentRequest.Size(m) +} +func (m *ReloadDocumentRequest) XXX_DiscardUnknown() { + xxx_messageInfo_ReloadDocumentRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_ReloadDocumentRequest proto.InternalMessageInfo + +func (m *ReloadDocumentRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func init() { + proto.RegisterType((*Document)(nil), "google.cloud.dialogflow.v2beta1.Document") + proto.RegisterType((*ListDocumentsRequest)(nil), "google.cloud.dialogflow.v2beta1.ListDocumentsRequest") + proto.RegisterType((*ListDocumentsResponse)(nil), "google.cloud.dialogflow.v2beta1.ListDocumentsResponse") + proto.RegisterType((*GetDocumentRequest)(nil), "google.cloud.dialogflow.v2beta1.GetDocumentRequest") + proto.RegisterType((*CreateDocumentRequest)(nil), "google.cloud.dialogflow.v2beta1.CreateDocumentRequest") + proto.RegisterType((*DeleteDocumentRequest)(nil), "google.cloud.dialogflow.v2beta1.DeleteDocumentRequest") + proto.RegisterType((*UpdateDocumentRequest)(nil), "google.cloud.dialogflow.v2beta1.UpdateDocumentRequest") + proto.RegisterType((*KnowledgeOperationMetadata)(nil), "google.cloud.dialogflow.v2beta1.KnowledgeOperationMetadata") + proto.RegisterType((*ReloadDocumentRequest)(nil), "google.cloud.dialogflow.v2beta1.ReloadDocumentRequest") + proto.RegisterEnum("google.cloud.dialogflow.v2beta1.Document_KnowledgeType", Document_KnowledgeType_name, Document_KnowledgeType_value) + proto.RegisterEnum("google.cloud.dialogflow.v2beta1.KnowledgeOperationMetadata_State", KnowledgeOperationMetadata_State_name, KnowledgeOperationMetadata_State_value) +} + +// Reference imports to suppress errors if they are not otherwise used. +var _ context.Context +var _ grpc.ClientConn + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the grpc package it is being compiled against. +const _ = grpc.SupportPackageIsVersion4 + +// DocumentsClient is the client API for Documents service. +// +// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://godoc.org/google.golang.org/grpc#ClientConn.NewStream. +type DocumentsClient interface { + // Returns the list of all documents of the knowledge base. + ListDocuments(ctx context.Context, in *ListDocumentsRequest, opts ...grpc.CallOption) (*ListDocumentsResponse, error) + // Retrieves the specified document. + GetDocument(ctx context.Context, in *GetDocumentRequest, opts ...grpc.CallOption) (*Document, error) + // Creates a new document. + // + // Operation + CreateDocument(ctx context.Context, in *CreateDocumentRequest, opts ...grpc.CallOption) (*longrunning.Operation, error) + // Deletes the specified document. + // + // Operation + DeleteDocument(ctx context.Context, in *DeleteDocumentRequest, opts ...grpc.CallOption) (*longrunning.Operation, error) + // Updates the specified document. + // Operation + UpdateDocument(ctx context.Context, in *UpdateDocumentRequest, opts ...grpc.CallOption) (*longrunning.Operation, error) + // Reloads the specified document from its specified source, content_uri or + // content. The previously loaded content of the document will be deleted. + // Note: Even when the content of the document has not changed, there still + // may be side effects because of internal implementation changes. + // Operation + ReloadDocument(ctx context.Context, in *ReloadDocumentRequest, opts ...grpc.CallOption) (*longrunning.Operation, error) +} + +type documentsClient struct { + cc *grpc.ClientConn +} + +func NewDocumentsClient(cc *grpc.ClientConn) DocumentsClient { + return &documentsClient{cc} +} + +func (c *documentsClient) ListDocuments(ctx context.Context, in *ListDocumentsRequest, opts ...grpc.CallOption) (*ListDocumentsResponse, error) { + out := new(ListDocumentsResponse) + err := c.cc.Invoke(ctx, "/google.cloud.dialogflow.v2beta1.Documents/ListDocuments", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *documentsClient) GetDocument(ctx context.Context, in *GetDocumentRequest, opts ...grpc.CallOption) (*Document, error) { + out := new(Document) + err := c.cc.Invoke(ctx, "/google.cloud.dialogflow.v2beta1.Documents/GetDocument", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *documentsClient) CreateDocument(ctx context.Context, in *CreateDocumentRequest, opts ...grpc.CallOption) (*longrunning.Operation, error) { + out := new(longrunning.Operation) + err := c.cc.Invoke(ctx, "/google.cloud.dialogflow.v2beta1.Documents/CreateDocument", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *documentsClient) DeleteDocument(ctx context.Context, in *DeleteDocumentRequest, opts ...grpc.CallOption) (*longrunning.Operation, error) { + out := new(longrunning.Operation) + err := c.cc.Invoke(ctx, "/google.cloud.dialogflow.v2beta1.Documents/DeleteDocument", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *documentsClient) UpdateDocument(ctx context.Context, in *UpdateDocumentRequest, opts ...grpc.CallOption) (*longrunning.Operation, error) { + out := new(longrunning.Operation) + err := c.cc.Invoke(ctx, "/google.cloud.dialogflow.v2beta1.Documents/UpdateDocument", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *documentsClient) ReloadDocument(ctx context.Context, in *ReloadDocumentRequest, opts ...grpc.CallOption) (*longrunning.Operation, error) { + out := new(longrunning.Operation) + err := c.cc.Invoke(ctx, "/google.cloud.dialogflow.v2beta1.Documents/ReloadDocument", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +// DocumentsServer is the server API for Documents service. +type DocumentsServer interface { + // Returns the list of all documents of the knowledge base. + ListDocuments(context.Context, *ListDocumentsRequest) (*ListDocumentsResponse, error) + // Retrieves the specified document. + GetDocument(context.Context, *GetDocumentRequest) (*Document, error) + // Creates a new document. + // + // Operation + CreateDocument(context.Context, *CreateDocumentRequest) (*longrunning.Operation, error) + // Deletes the specified document. + // + // Operation + DeleteDocument(context.Context, *DeleteDocumentRequest) (*longrunning.Operation, error) + // Updates the specified document. + // Operation + UpdateDocument(context.Context, *UpdateDocumentRequest) (*longrunning.Operation, error) + // Reloads the specified document from its specified source, content_uri or + // content. The previously loaded content of the document will be deleted. + // Note: Even when the content of the document has not changed, there still + // may be side effects because of internal implementation changes. + // Operation + ReloadDocument(context.Context, *ReloadDocumentRequest) (*longrunning.Operation, error) +} + +func RegisterDocumentsServer(s *grpc.Server, srv DocumentsServer) { + s.RegisterService(&_Documents_serviceDesc, srv) +} + +func _Documents_ListDocuments_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(ListDocumentsRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(DocumentsServer).ListDocuments(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.dialogflow.v2beta1.Documents/ListDocuments", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(DocumentsServer).ListDocuments(ctx, req.(*ListDocumentsRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _Documents_GetDocument_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(GetDocumentRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(DocumentsServer).GetDocument(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.dialogflow.v2beta1.Documents/GetDocument", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(DocumentsServer).GetDocument(ctx, req.(*GetDocumentRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _Documents_CreateDocument_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(CreateDocumentRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(DocumentsServer).CreateDocument(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.dialogflow.v2beta1.Documents/CreateDocument", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(DocumentsServer).CreateDocument(ctx, req.(*CreateDocumentRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _Documents_DeleteDocument_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(DeleteDocumentRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(DocumentsServer).DeleteDocument(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.dialogflow.v2beta1.Documents/DeleteDocument", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(DocumentsServer).DeleteDocument(ctx, req.(*DeleteDocumentRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _Documents_UpdateDocument_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(UpdateDocumentRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(DocumentsServer).UpdateDocument(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.dialogflow.v2beta1.Documents/UpdateDocument", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(DocumentsServer).UpdateDocument(ctx, req.(*UpdateDocumentRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _Documents_ReloadDocument_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(ReloadDocumentRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(DocumentsServer).ReloadDocument(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.dialogflow.v2beta1.Documents/ReloadDocument", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(DocumentsServer).ReloadDocument(ctx, req.(*ReloadDocumentRequest)) + } + return interceptor(ctx, in, info, handler) +} + +var _Documents_serviceDesc = grpc.ServiceDesc{ + ServiceName: "google.cloud.dialogflow.v2beta1.Documents", + HandlerType: (*DocumentsServer)(nil), + Methods: []grpc.MethodDesc{ + { + MethodName: "ListDocuments", + Handler: _Documents_ListDocuments_Handler, + }, + { + MethodName: "GetDocument", + Handler: _Documents_GetDocument_Handler, + }, + { + MethodName: "CreateDocument", + Handler: _Documents_CreateDocument_Handler, + }, + { + MethodName: "DeleteDocument", + Handler: _Documents_DeleteDocument_Handler, + }, + { + MethodName: "UpdateDocument", + Handler: _Documents_UpdateDocument_Handler, + }, + { + MethodName: "ReloadDocument", + Handler: _Documents_ReloadDocument_Handler, + }, + }, + Streams: []grpc.StreamDesc{}, + Metadata: "google/cloud/dialogflow/v2beta1/document.proto", +} + +func init() { + proto.RegisterFile("google/cloud/dialogflow/v2beta1/document.proto", fileDescriptor_document_b7905c72d2ee7910) +} + +var fileDescriptor_document_b7905c72d2ee7910 = []byte{ + // 1056 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xa4, 0x57, 0x4d, 0x6f, 0x1b, 0x45, + 0x18, 0xce, 0x6c, 0xbe, 0x9c, 0xd7, 0x89, 0x9b, 0x8e, 0x08, 0x18, 0x97, 0xb6, 0xe9, 0x56, 0x42, + 0xc1, 0x48, 0xbb, 0xc2, 0x15, 0x54, 0x4a, 0x55, 0x20, 0x8e, 0x9d, 0xd4, 0x4a, 0xeb, 0xb8, 0x1b, + 0xa7, 0x85, 0x5c, 0x96, 0x89, 0x3d, 0x59, 0x6d, 0xb3, 0xde, 0x59, 0x76, 0xc6, 0x0d, 0x09, 0xaa, + 0x04, 0x5c, 0xe0, 0x84, 0x84, 0x10, 0x27, 0x2e, 0x88, 0x0b, 0x12, 0xfc, 0x02, 0x7e, 0x07, 0x7f, + 0x81, 0x13, 0x27, 0x8e, 0x5c, 0x10, 0x68, 0xf6, 0xcb, 0x1f, 0xdd, 0xc4, 0x4e, 0x72, 0xf3, 0xbc, + 0x1f, 0xcf, 0x3e, 0xcf, 0xbc, 0x1f, 0x93, 0x80, 0x66, 0x31, 0x66, 0x39, 0x54, 0x6f, 0x39, 0xac, + 0xdb, 0xd6, 0xdb, 0x36, 0x71, 0x98, 0x75, 0xe0, 0xb0, 0x23, 0xfd, 0x79, 0x69, 0x9f, 0x0a, 0xf2, + 0x8e, 0xde, 0x66, 0xad, 0x6e, 0x87, 0xba, 0x42, 0xf3, 0x7c, 0x26, 0x18, 0xbe, 0x19, 0xc6, 0x6b, + 0x41, 0xbc, 0xd6, 0x8b, 0xd7, 0xa2, 0xf8, 0xc2, 0x1b, 0x11, 0x20, 0xf1, 0x6c, 0x9d, 0xb8, 0x2e, + 0x13, 0x44, 0xd8, 0xcc, 0xe5, 0x61, 0x7a, 0xe1, 0xf5, 0x3e, 0xaf, 0x4f, 0x39, 0xeb, 0xfa, 0x2d, + 0x1a, 0xb9, 0x6e, 0x47, 0x2e, 0x87, 0xb9, 0x96, 0xdf, 0x75, 0x5d, 0xdb, 0xb5, 0x74, 0xe6, 0x51, + 0x7f, 0x20, 0xff, 0x5a, 0x14, 0x14, 0x9c, 0xf6, 0xbb, 0x07, 0x3a, 0xed, 0x78, 0xe2, 0x38, 0x72, + 0x2e, 0x0f, 0x3b, 0x0f, 0x6c, 0xea, 0xb4, 0xcd, 0x0e, 0xe1, 0x87, 0x51, 0xc4, 0x6b, 0x51, 0x84, + 0xef, 0xb5, 0x74, 0x2e, 0x88, 0xe8, 0x46, 0xb8, 0xea, 0xbf, 0x0a, 0x64, 0x2a, 0x91, 0x52, 0x8c, + 0x61, 0xca, 0x25, 0x1d, 0x9a, 0x47, 0xcb, 0x68, 0x65, 0xce, 0x08, 0x7e, 0xe3, 0x5b, 0x30, 0xdf, + 0xb6, 0xb9, 0xe7, 0x90, 0x63, 0x33, 0xf0, 0x29, 0x81, 0x2f, 0x1b, 0xd9, 0xea, 0x32, 0xe4, 0x1a, + 0xcc, 0x75, 0xec, 0x0e, 0x35, 0xc5, 0xb1, 0x47, 0xf3, 0x93, 0x81, 0x3f, 0x23, 0x0d, 0xcd, 0x63, + 0x8f, 0xe2, 0x4f, 0xe0, 0xca, 0xa1, 0xcb, 0x8e, 0x1c, 0xda, 0xb6, 0xc2, 0x08, 0x9e, 0x9f, 0x5a, + 0x9e, 0x5c, 0xc9, 0x95, 0xee, 0x6a, 0x23, 0x6e, 0x54, 0x8b, 0x79, 0x69, 0x5b, 0x31, 0x80, 0x44, + 0x34, 0x72, 0x87, 0xfd, 0x47, 0x8e, 0x6f, 0x41, 0xb6, 0xc5, 0x5c, 0x41, 0x5d, 0x61, 0x76, 0x7d, + 0x3b, 0x3f, 0x2d, 0x09, 0x3c, 0x98, 0x30, 0x20, 0x32, 0xee, 0xfa, 0x36, 0xbe, 0x01, 0xb3, 0xd1, + 0x29, 0x3f, 0x23, 0xdd, 0x65, 0x25, 0x8f, 0x1e, 0x4c, 0x18, 0xb1, 0x51, 0x42, 0xf8, 0xe4, 0xc8, + 0x8c, 0x63, 0xe6, 0x96, 0xd1, 0xca, 0xbc, 0x84, 0xf0, 0xc9, 0xd1, 0x7a, 0x68, 0x53, 0xb7, 0x60, + 0x61, 0x80, 0x06, 0xbe, 0x01, 0x85, 0xad, 0xfa, 0xf6, 0xd3, 0x87, 0xd5, 0xca, 0x66, 0xd5, 0x6c, + 0x7e, 0xdc, 0xa8, 0x9a, 0xbb, 0xf5, 0x9d, 0x46, 0x75, 0xbd, 0xb6, 0x51, 0xab, 0x56, 0x16, 0x27, + 0xf0, 0x2c, 0x4c, 0x6e, 0xac, 0x3d, 0x5e, 0x44, 0xf8, 0x2a, 0x2c, 0x54, 0x3f, 0x6a, 0x1a, 0x6b, + 0xeb, 0xcd, 0xda, 0x93, 0xaa, 0xf9, 0x78, 0x6d, 0x51, 0x29, 0x67, 0x60, 0x26, 0x6c, 0x01, 0xf5, + 0x19, 0xbc, 0xf2, 0xd0, 0xe6, 0x22, 0x96, 0xca, 0x0d, 0xfa, 0x69, 0x97, 0x72, 0x81, 0x5f, 0x85, + 0x19, 0x8f, 0xf8, 0x92, 0x4c, 0x58, 0x8c, 0xe8, 0x24, 0xef, 0xda, 0x23, 0x16, 0x35, 0xb9, 0x7d, + 0x12, 0xd6, 0x62, 0xda, 0xc8, 0x48, 0xc3, 0x8e, 0x7d, 0x42, 0xf1, 0x75, 0x80, 0xc0, 0x29, 0xd8, + 0x21, 0x75, 0xa3, 0x4a, 0x04, 0xe1, 0x4d, 0x69, 0x50, 0xbf, 0x41, 0xb0, 0x34, 0xf4, 0x31, 0xee, + 0x31, 0x97, 0x53, 0xbc, 0x09, 0x73, 0x71, 0xbb, 0xf3, 0x3c, 0x5a, 0x9e, 0x5c, 0xc9, 0x96, 0xde, + 0x1a, 0xbb, 0x3c, 0x46, 0x2f, 0x17, 0xbf, 0x09, 0x57, 0x5c, 0xfa, 0x99, 0x30, 0xfb, 0x68, 0x84, + 0x0d, 0xb3, 0x20, 0xcd, 0x8d, 0x84, 0xca, 0x0a, 0xe0, 0x4d, 0x9a, 0x10, 0x89, 0x45, 0xa7, 0xf4, + 0x9f, 0xfa, 0x1c, 0x96, 0xd6, 0x7d, 0x4a, 0x04, 0x1d, 0x0e, 0x3e, 0xed, 0x86, 0xaa, 0x90, 0x89, + 0xf9, 0x04, 0xdf, 0x3e, 0x97, 0x94, 0x24, 0x55, 0x7d, 0x1b, 0x96, 0x2a, 0xd4, 0xa1, 0x2f, 0x7f, + 0x37, 0x8d, 0xe4, 0x8f, 0x08, 0x96, 0x76, 0xbd, 0x76, 0x0a, 0xcb, 0x7e, 0x36, 0xe8, 0xc2, 0x6c, + 0xf0, 0x3d, 0xc8, 0x76, 0x03, 0xfc, 0x60, 0xa8, 0x23, 0x5d, 0x85, 0x18, 0x29, 0x9e, 0x7b, 0x6d, + 0x43, 0xce, 0xfd, 0x23, 0xc2, 0x0f, 0x0d, 0x08, 0xc3, 0xe5, 0x6f, 0xf5, 0x77, 0x04, 0x85, 0xa4, + 0x77, 0xb7, 0xe3, 0xcd, 0xf2, 0x88, 0x0a, 0xd2, 0x26, 0x82, 0xe0, 0xa7, 0x30, 0x2d, 0x57, 0x42, + 0xa8, 0x28, 0x57, 0x5a, 0x1b, 0xc9, 0xef, 0x74, 0x2c, 0x6d, 0x47, 0x02, 0x19, 0x21, 0x9e, 0x5a, + 0x86, 0xe9, 0xe0, 0x8c, 0x97, 0xe0, 0xea, 0x4e, 0x73, 0xad, 0x39, 0x3c, 0x21, 0x59, 0x98, 0x6d, + 0x54, 0xeb, 0x95, 0x5a, 0x7d, 0x73, 0x11, 0xc9, 0x83, 0xb1, 0x5b, 0xaf, 0xcb, 0x83, 0x82, 0x33, + 0x30, 0x55, 0xd9, 0xae, 0x57, 0x17, 0x27, 0x65, 0x19, 0x0c, 0xea, 0x30, 0xd2, 0x1e, 0xa3, 0x0c, + 0xa5, 0xaf, 0xe7, 0x61, 0x2e, 0x69, 0x6e, 0xfc, 0xa5, 0x02, 0x0b, 0x03, 0xed, 0x8e, 0xdf, 0x1d, + 0x29, 0x2d, 0x6d, 0x16, 0x0b, 0xef, 0x9d, 0x37, 0x2d, 0x9c, 0x2a, 0xf5, 0x0b, 0xf4, 0xd5, 0x1f, + 0x7f, 0x7e, 0xaf, 0x9c, 0xe0, 0xbb, 0xc9, 0xa3, 0xf2, 0x79, 0xd8, 0xa4, 0xf7, 0x3d, 0x9f, 0x3d, + 0xa3, 0x2d, 0xc1, 0xf5, 0xa2, 0x9e, 0x6c, 0xb3, 0x32, 0xe1, 0x94, 0xeb, 0xc5, 0x17, 0xc9, 0xcb, + 0xc3, 0xf7, 0x3e, 0xc0, 0xf7, 0xcf, 0x4a, 0x25, 0x16, 0x75, 0xc5, 0x59, 0x00, 0xf8, 0x6f, 0x04, + 0xd9, 0xbe, 0x41, 0xc3, 0x77, 0x46, 0x4a, 0x79, 0x79, 0x2c, 0x0b, 0xe3, 0x77, 0x6c, 0xaa, 0x64, + 0x59, 0x9a, 0xb3, 0x04, 0xf7, 0xe8, 0xea, 0xc5, 0x17, 0x83, 0x92, 0x87, 0x53, 0xd3, 0x05, 0x0f, + 0x00, 0xe0, 0xff, 0x10, 0xe4, 0x06, 0x37, 0x06, 0x1e, 0x5d, 0xc0, 0xd4, 0x15, 0x53, 0xb8, 0x1e, + 0xe7, 0xf5, 0x3d, 0xcd, 0x5a, 0xd2, 0xf4, 0xea, 0x0f, 0xa1, 0xd8, 0x6f, 0x91, 0x7a, 0xd1, 0x02, + 0xaf, 0x26, 0x13, 0xbe, 0x57, 0x53, 0x2f, 0x57, 0xea, 0x1e, 0x14, 0xfe, 0x0b, 0x41, 0x6e, 0x70, + 0x77, 0x8d, 0x71, 0x03, 0xa9, 0xcb, 0x6e, 0xd4, 0x0d, 0xc4, 0xe5, 0x2e, 0x5e, 0xbc, 0xdc, 0xc5, + 0x4b, 0x96, 0xfb, 0x3b, 0x05, 0x72, 0x83, 0xab, 0x77, 0x0c, 0xb1, 0xa9, 0xbb, 0x7a, 0x94, 0xd8, + 0x5f, 0x42, 0xb1, 0x3f, 0xa1, 0xd2, 0x87, 0x3d, 0xca, 0xc9, 0x5f, 0x89, 0xe7, 0x92, 0xdd, 0x57, + 0x77, 0xa3, 0xb4, 0x31, 0x06, 0xda, 0x18, 0x37, 0xd1, 0xd7, 0x00, 0x72, 0x04, 0x06, 0xb7, 0xe6, + 0x18, 0x77, 0x92, 0xba, 0x66, 0xc7, 0x1f, 0x81, 0xf7, 0x2f, 0xd8, 0x01, 0xab, 0x7e, 0xf0, 0xdd, + 0x55, 0x54, 0xdc, 0xab, 0xa9, 0x95, 0x4b, 0xf5, 0x42, 0x0f, 0xaa, 0xfc, 0x1b, 0x82, 0xdb, 0x2d, + 0xd6, 0x19, 0x25, 0xba, 0xbc, 0x10, 0xeb, 0x6d, 0xc8, 0x27, 0xb4, 0x81, 0xf6, 0x6a, 0x51, 0x86, + 0xc5, 0x1c, 0xe2, 0x5a, 0x1a, 0xf3, 0x2d, 0xdd, 0xa2, 0x6e, 0xf0, 0xc0, 0xea, 0xa1, 0x8b, 0x78, + 0x36, 0x3f, 0xf5, 0xbf, 0x86, 0x7b, 0x3d, 0xd3, 0x3f, 0x08, 0xfd, 0xac, 0x28, 0x95, 0x8d, 0x5f, + 0x95, 0x9b, 0x9b, 0x21, 0xe6, 0x7a, 0xc0, 0xa2, 0xd2, 0x63, 0xf1, 0x24, 0x4c, 0xda, 0x9f, 0x09, + 0xf0, 0xef, 0xfc, 0x1f, 0x00, 0x00, 0xff, 0xff, 0x85, 0x95, 0xd5, 0xf8, 0x94, 0x0c, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/cloud/dialogflow/v2beta1/entity_type.pb.go b/vendor/google.golang.org/genproto/googleapis/cloud/dialogflow/v2beta1/entity_type.pb.go new file mode 100644 index 0000000..0537424 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/cloud/dialogflow/v2beta1/entity_type.pb.go @@ -0,0 +1,1624 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/cloud/dialogflow/v2beta1/entity_type.proto + +package dialogflow // import "google.golang.org/genproto/googleapis/cloud/dialogflow/v2beta1" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import empty "github.com/golang/protobuf/ptypes/empty" +import _ "github.com/golang/protobuf/ptypes/struct" +import _ "google.golang.org/genproto/googleapis/api/annotations" +import longrunning "google.golang.org/genproto/googleapis/longrunning" +import field_mask "google.golang.org/genproto/protobuf/field_mask" + +import ( + context "golang.org/x/net/context" + grpc "google.golang.org/grpc" +) + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Represents kinds of entities. +type EntityType_Kind int32 + +const ( + // Not specified. This value should be never used. + EntityType_KIND_UNSPECIFIED EntityType_Kind = 0 + // Map entity types allow mapping of a group of synonyms to a canonical + // value. + EntityType_KIND_MAP EntityType_Kind = 1 + // List entity types contain a set of entries that do not map to canonical + // values. However, list entity types can contain references to other entity + // types (with or without aliases). + EntityType_KIND_LIST EntityType_Kind = 2 +) + +var EntityType_Kind_name = map[int32]string{ + 0: "KIND_UNSPECIFIED", + 1: "KIND_MAP", + 2: "KIND_LIST", +} +var EntityType_Kind_value = map[string]int32{ + "KIND_UNSPECIFIED": 0, + "KIND_MAP": 1, + "KIND_LIST": 2, +} + +func (x EntityType_Kind) String() string { + return proto.EnumName(EntityType_Kind_name, int32(x)) +} +func (EntityType_Kind) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_entity_type_6dfa1fc30f052b5b, []int{0, 0} +} + +// Represents different entity type expansion modes. Automated expansion +// allows an agent to recognize values that have not been explicitly listed in +// the entity (for example, new kinds of shopping list items). +type EntityType_AutoExpansionMode int32 + +const ( + // Auto expansion disabled for the entity. + EntityType_AUTO_EXPANSION_MODE_UNSPECIFIED EntityType_AutoExpansionMode = 0 + // Allows an agent to recognize values that have not been explicitly + // listed in the entity. + EntityType_AUTO_EXPANSION_MODE_DEFAULT EntityType_AutoExpansionMode = 1 +) + +var EntityType_AutoExpansionMode_name = map[int32]string{ + 0: "AUTO_EXPANSION_MODE_UNSPECIFIED", + 1: "AUTO_EXPANSION_MODE_DEFAULT", +} +var EntityType_AutoExpansionMode_value = map[string]int32{ + "AUTO_EXPANSION_MODE_UNSPECIFIED": 0, + "AUTO_EXPANSION_MODE_DEFAULT": 1, +} + +func (x EntityType_AutoExpansionMode) String() string { + return proto.EnumName(EntityType_AutoExpansionMode_name, int32(x)) +} +func (EntityType_AutoExpansionMode) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_entity_type_6dfa1fc30f052b5b, []int{0, 1} +} + +// Represents an entity type. +// Entity types serve as a tool for extracting parameter values from natural +// language queries. +type EntityType struct { + // The unique identifier of the entity type. + // Required for [EntityTypes.UpdateEntityType][google.cloud.dialogflow.v2beta1.EntityTypes.UpdateEntityType] and + // [EntityTypes.BatchUpdateEntityTypes][google.cloud.dialogflow.v2beta1.EntityTypes.BatchUpdateEntityTypes] methods. + // Format: `projects//agent/entityTypes/`. + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // Required. The name of the entity type. + DisplayName string `protobuf:"bytes,2,opt,name=display_name,json=displayName,proto3" json:"display_name,omitempty"` + // Required. Indicates the kind of entity type. + Kind EntityType_Kind `protobuf:"varint,3,opt,name=kind,proto3,enum=google.cloud.dialogflow.v2beta1.EntityType_Kind" json:"kind,omitempty"` + // Optional. Indicates whether the entity type can be automatically + // expanded. + AutoExpansionMode EntityType_AutoExpansionMode `protobuf:"varint,4,opt,name=auto_expansion_mode,json=autoExpansionMode,proto3,enum=google.cloud.dialogflow.v2beta1.EntityType_AutoExpansionMode" json:"auto_expansion_mode,omitempty"` + // Optional. The collection of entity entries associated with the entity type. + Entities []*EntityType_Entity `protobuf:"bytes,6,rep,name=entities,proto3" json:"entities,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *EntityType) Reset() { *m = EntityType{} } +func (m *EntityType) String() string { return proto.CompactTextString(m) } +func (*EntityType) ProtoMessage() {} +func (*EntityType) Descriptor() ([]byte, []int) { + return fileDescriptor_entity_type_6dfa1fc30f052b5b, []int{0} +} +func (m *EntityType) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_EntityType.Unmarshal(m, b) +} +func (m *EntityType) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_EntityType.Marshal(b, m, deterministic) +} +func (dst *EntityType) XXX_Merge(src proto.Message) { + xxx_messageInfo_EntityType.Merge(dst, src) +} +func (m *EntityType) XXX_Size() int { + return xxx_messageInfo_EntityType.Size(m) +} +func (m *EntityType) XXX_DiscardUnknown() { + xxx_messageInfo_EntityType.DiscardUnknown(m) +} + +var xxx_messageInfo_EntityType proto.InternalMessageInfo + +func (m *EntityType) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *EntityType) GetDisplayName() string { + if m != nil { + return m.DisplayName + } + return "" +} + +func (m *EntityType) GetKind() EntityType_Kind { + if m != nil { + return m.Kind + } + return EntityType_KIND_UNSPECIFIED +} + +func (m *EntityType) GetAutoExpansionMode() EntityType_AutoExpansionMode { + if m != nil { + return m.AutoExpansionMode + } + return EntityType_AUTO_EXPANSION_MODE_UNSPECIFIED +} + +func (m *EntityType) GetEntities() []*EntityType_Entity { + if m != nil { + return m.Entities + } + return nil +} + +// An **entity entry** for an associated entity type. +type EntityType_Entity struct { + // Required. The primary value associated with this entity entry. + // For example, if the entity type is *vegetable*, the value could be + // *scallions*. + // + // For `KIND_MAP` entity types: + // + // * A canonical value to be used in place of synonyms. + // + // For `KIND_LIST` entity types: + // + // * A string that can contain references to other entity types (with or + // without aliases). + Value string `protobuf:"bytes,1,opt,name=value,proto3" json:"value,omitempty"` + // Required. A collection of value synonyms. For example, if the entity type + // is *vegetable*, and `value` is *scallions*, a synonym could be *green + // onions*. + // + // For `KIND_LIST` entity types: + // + // * This collection must contain exactly one synonym equal to `value`. + Synonyms []string `protobuf:"bytes,2,rep,name=synonyms,proto3" json:"synonyms,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *EntityType_Entity) Reset() { *m = EntityType_Entity{} } +func (m *EntityType_Entity) String() string { return proto.CompactTextString(m) } +func (*EntityType_Entity) ProtoMessage() {} +func (*EntityType_Entity) Descriptor() ([]byte, []int) { + return fileDescriptor_entity_type_6dfa1fc30f052b5b, []int{0, 0} +} +func (m *EntityType_Entity) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_EntityType_Entity.Unmarshal(m, b) +} +func (m *EntityType_Entity) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_EntityType_Entity.Marshal(b, m, deterministic) +} +func (dst *EntityType_Entity) XXX_Merge(src proto.Message) { + xxx_messageInfo_EntityType_Entity.Merge(dst, src) +} +func (m *EntityType_Entity) XXX_Size() int { + return xxx_messageInfo_EntityType_Entity.Size(m) +} +func (m *EntityType_Entity) XXX_DiscardUnknown() { + xxx_messageInfo_EntityType_Entity.DiscardUnknown(m) +} + +var xxx_messageInfo_EntityType_Entity proto.InternalMessageInfo + +func (m *EntityType_Entity) GetValue() string { + if m != nil { + return m.Value + } + return "" +} + +func (m *EntityType_Entity) GetSynonyms() []string { + if m != nil { + return m.Synonyms + } + return nil +} + +// The request message for [EntityTypes.ListEntityTypes][google.cloud.dialogflow.v2beta1.EntityTypes.ListEntityTypes]. +type ListEntityTypesRequest struct { + // Required. The agent to list all entity types from. + // Format: `projects//agent`. + Parent string `protobuf:"bytes,1,opt,name=parent,proto3" json:"parent,omitempty"` + // Optional. The language to list entity synonyms for. If not specified, + // the agent's default language is used. + // [Many + // languages](https://cloud.google.com/dialogflow-enterprise/docs/reference/language) + // are supported. Note: languages must be enabled in the agent before they can + // be used. + LanguageCode string `protobuf:"bytes,2,opt,name=language_code,json=languageCode,proto3" json:"language_code,omitempty"` + // Optional. The maximum number of items to return in a single page. By + // default 100 and at most 1000. + PageSize int32 `protobuf:"varint,3,opt,name=page_size,json=pageSize,proto3" json:"page_size,omitempty"` + // Optional. The next_page_token value returned from a previous list request. + PageToken string `protobuf:"bytes,4,opt,name=page_token,json=pageToken,proto3" json:"page_token,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ListEntityTypesRequest) Reset() { *m = ListEntityTypesRequest{} } +func (m *ListEntityTypesRequest) String() string { return proto.CompactTextString(m) } +func (*ListEntityTypesRequest) ProtoMessage() {} +func (*ListEntityTypesRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_entity_type_6dfa1fc30f052b5b, []int{1} +} +func (m *ListEntityTypesRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ListEntityTypesRequest.Unmarshal(m, b) +} +func (m *ListEntityTypesRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ListEntityTypesRequest.Marshal(b, m, deterministic) +} +func (dst *ListEntityTypesRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_ListEntityTypesRequest.Merge(dst, src) +} +func (m *ListEntityTypesRequest) XXX_Size() int { + return xxx_messageInfo_ListEntityTypesRequest.Size(m) +} +func (m *ListEntityTypesRequest) XXX_DiscardUnknown() { + xxx_messageInfo_ListEntityTypesRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_ListEntityTypesRequest proto.InternalMessageInfo + +func (m *ListEntityTypesRequest) GetParent() string { + if m != nil { + return m.Parent + } + return "" +} + +func (m *ListEntityTypesRequest) GetLanguageCode() string { + if m != nil { + return m.LanguageCode + } + return "" +} + +func (m *ListEntityTypesRequest) GetPageSize() int32 { + if m != nil { + return m.PageSize + } + return 0 +} + +func (m *ListEntityTypesRequest) GetPageToken() string { + if m != nil { + return m.PageToken + } + return "" +} + +// The response message for [EntityTypes.ListEntityTypes][google.cloud.dialogflow.v2beta1.EntityTypes.ListEntityTypes]. +type ListEntityTypesResponse struct { + // The list of agent entity types. There will be a maximum number of items + // returned based on the page_size field in the request. + EntityTypes []*EntityType `protobuf:"bytes,1,rep,name=entity_types,json=entityTypes,proto3" json:"entity_types,omitempty"` + // Token to retrieve the next page of results, or empty if there are no + // more results in the list. + NextPageToken string `protobuf:"bytes,2,opt,name=next_page_token,json=nextPageToken,proto3" json:"next_page_token,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ListEntityTypesResponse) Reset() { *m = ListEntityTypesResponse{} } +func (m *ListEntityTypesResponse) String() string { return proto.CompactTextString(m) } +func (*ListEntityTypesResponse) ProtoMessage() {} +func (*ListEntityTypesResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_entity_type_6dfa1fc30f052b5b, []int{2} +} +func (m *ListEntityTypesResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ListEntityTypesResponse.Unmarshal(m, b) +} +func (m *ListEntityTypesResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ListEntityTypesResponse.Marshal(b, m, deterministic) +} +func (dst *ListEntityTypesResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_ListEntityTypesResponse.Merge(dst, src) +} +func (m *ListEntityTypesResponse) XXX_Size() int { + return xxx_messageInfo_ListEntityTypesResponse.Size(m) +} +func (m *ListEntityTypesResponse) XXX_DiscardUnknown() { + xxx_messageInfo_ListEntityTypesResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_ListEntityTypesResponse proto.InternalMessageInfo + +func (m *ListEntityTypesResponse) GetEntityTypes() []*EntityType { + if m != nil { + return m.EntityTypes + } + return nil +} + +func (m *ListEntityTypesResponse) GetNextPageToken() string { + if m != nil { + return m.NextPageToken + } + return "" +} + +// The request message for [EntityTypes.GetEntityType][google.cloud.dialogflow.v2beta1.EntityTypes.GetEntityType]. +type GetEntityTypeRequest struct { + // Required. The name of the entity type. + // Format: `projects//agent/entityTypes/`. + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // Optional. The language to retrieve entity synonyms for. If not specified, + // the agent's default language is used. + // [Many + // languages](https://cloud.google.com/dialogflow-enterprise/docs/reference/language) + // are supported. Note: languages must be enabled in the agent before they can + // be used. + LanguageCode string `protobuf:"bytes,2,opt,name=language_code,json=languageCode,proto3" json:"language_code,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GetEntityTypeRequest) Reset() { *m = GetEntityTypeRequest{} } +func (m *GetEntityTypeRequest) String() string { return proto.CompactTextString(m) } +func (*GetEntityTypeRequest) ProtoMessage() {} +func (*GetEntityTypeRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_entity_type_6dfa1fc30f052b5b, []int{3} +} +func (m *GetEntityTypeRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GetEntityTypeRequest.Unmarshal(m, b) +} +func (m *GetEntityTypeRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GetEntityTypeRequest.Marshal(b, m, deterministic) +} +func (dst *GetEntityTypeRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_GetEntityTypeRequest.Merge(dst, src) +} +func (m *GetEntityTypeRequest) XXX_Size() int { + return xxx_messageInfo_GetEntityTypeRequest.Size(m) +} +func (m *GetEntityTypeRequest) XXX_DiscardUnknown() { + xxx_messageInfo_GetEntityTypeRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_GetEntityTypeRequest proto.InternalMessageInfo + +func (m *GetEntityTypeRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *GetEntityTypeRequest) GetLanguageCode() string { + if m != nil { + return m.LanguageCode + } + return "" +} + +// The request message for [EntityTypes.CreateEntityType][google.cloud.dialogflow.v2beta1.EntityTypes.CreateEntityType]. +type CreateEntityTypeRequest struct { + // Required. The agent to create a entity type for. + // Format: `projects//agent`. + Parent string `protobuf:"bytes,1,opt,name=parent,proto3" json:"parent,omitempty"` + // Required. The entity type to create. + EntityType *EntityType `protobuf:"bytes,2,opt,name=entity_type,json=entityType,proto3" json:"entity_type,omitempty"` + // Optional. The language of entity synonyms defined in `entity_type`. If not + // specified, the agent's default language is used. + // [Many + // languages](https://cloud.google.com/dialogflow-enterprise/docs/reference/language) + // are supported. Note: languages must be enabled in the agent before they can + // be used. + LanguageCode string `protobuf:"bytes,3,opt,name=language_code,json=languageCode,proto3" json:"language_code,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *CreateEntityTypeRequest) Reset() { *m = CreateEntityTypeRequest{} } +func (m *CreateEntityTypeRequest) String() string { return proto.CompactTextString(m) } +func (*CreateEntityTypeRequest) ProtoMessage() {} +func (*CreateEntityTypeRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_entity_type_6dfa1fc30f052b5b, []int{4} +} +func (m *CreateEntityTypeRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_CreateEntityTypeRequest.Unmarshal(m, b) +} +func (m *CreateEntityTypeRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_CreateEntityTypeRequest.Marshal(b, m, deterministic) +} +func (dst *CreateEntityTypeRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_CreateEntityTypeRequest.Merge(dst, src) +} +func (m *CreateEntityTypeRequest) XXX_Size() int { + return xxx_messageInfo_CreateEntityTypeRequest.Size(m) +} +func (m *CreateEntityTypeRequest) XXX_DiscardUnknown() { + xxx_messageInfo_CreateEntityTypeRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_CreateEntityTypeRequest proto.InternalMessageInfo + +func (m *CreateEntityTypeRequest) GetParent() string { + if m != nil { + return m.Parent + } + return "" +} + +func (m *CreateEntityTypeRequest) GetEntityType() *EntityType { + if m != nil { + return m.EntityType + } + return nil +} + +func (m *CreateEntityTypeRequest) GetLanguageCode() string { + if m != nil { + return m.LanguageCode + } + return "" +} + +// The request message for [EntityTypes.UpdateEntityType][google.cloud.dialogflow.v2beta1.EntityTypes.UpdateEntityType]. +type UpdateEntityTypeRequest struct { + // Required. The entity type to update. + EntityType *EntityType `protobuf:"bytes,1,opt,name=entity_type,json=entityType,proto3" json:"entity_type,omitempty"` + // Optional. The language of entity synonyms defined in `entity_type`. If not + // specified, the agent's default language is used. + // [Many + // languages](https://cloud.google.com/dialogflow-enterprise/docs/reference/language) + // are supported. Note: languages must be enabled in the agent before they can + // be used. + LanguageCode string `protobuf:"bytes,2,opt,name=language_code,json=languageCode,proto3" json:"language_code,omitempty"` + // Optional. The mask to control which fields get updated. + UpdateMask *field_mask.FieldMask `protobuf:"bytes,3,opt,name=update_mask,json=updateMask,proto3" json:"update_mask,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *UpdateEntityTypeRequest) Reset() { *m = UpdateEntityTypeRequest{} } +func (m *UpdateEntityTypeRequest) String() string { return proto.CompactTextString(m) } +func (*UpdateEntityTypeRequest) ProtoMessage() {} +func (*UpdateEntityTypeRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_entity_type_6dfa1fc30f052b5b, []int{5} +} +func (m *UpdateEntityTypeRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_UpdateEntityTypeRequest.Unmarshal(m, b) +} +func (m *UpdateEntityTypeRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_UpdateEntityTypeRequest.Marshal(b, m, deterministic) +} +func (dst *UpdateEntityTypeRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_UpdateEntityTypeRequest.Merge(dst, src) +} +func (m *UpdateEntityTypeRequest) XXX_Size() int { + return xxx_messageInfo_UpdateEntityTypeRequest.Size(m) +} +func (m *UpdateEntityTypeRequest) XXX_DiscardUnknown() { + xxx_messageInfo_UpdateEntityTypeRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_UpdateEntityTypeRequest proto.InternalMessageInfo + +func (m *UpdateEntityTypeRequest) GetEntityType() *EntityType { + if m != nil { + return m.EntityType + } + return nil +} + +func (m *UpdateEntityTypeRequest) GetLanguageCode() string { + if m != nil { + return m.LanguageCode + } + return "" +} + +func (m *UpdateEntityTypeRequest) GetUpdateMask() *field_mask.FieldMask { + if m != nil { + return m.UpdateMask + } + return nil +} + +// The request message for [EntityTypes.DeleteEntityType][google.cloud.dialogflow.v2beta1.EntityTypes.DeleteEntityType]. +type DeleteEntityTypeRequest struct { + // Required. The name of the entity type to delete. + // Format: `projects//agent/entityTypes/`. + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *DeleteEntityTypeRequest) Reset() { *m = DeleteEntityTypeRequest{} } +func (m *DeleteEntityTypeRequest) String() string { return proto.CompactTextString(m) } +func (*DeleteEntityTypeRequest) ProtoMessage() {} +func (*DeleteEntityTypeRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_entity_type_6dfa1fc30f052b5b, []int{6} +} +func (m *DeleteEntityTypeRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_DeleteEntityTypeRequest.Unmarshal(m, b) +} +func (m *DeleteEntityTypeRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_DeleteEntityTypeRequest.Marshal(b, m, deterministic) +} +func (dst *DeleteEntityTypeRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_DeleteEntityTypeRequest.Merge(dst, src) +} +func (m *DeleteEntityTypeRequest) XXX_Size() int { + return xxx_messageInfo_DeleteEntityTypeRequest.Size(m) +} +func (m *DeleteEntityTypeRequest) XXX_DiscardUnknown() { + xxx_messageInfo_DeleteEntityTypeRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_DeleteEntityTypeRequest proto.InternalMessageInfo + +func (m *DeleteEntityTypeRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +// The request message for [EntityTypes.BatchUpdateEntityTypes][google.cloud.dialogflow.v2beta1.EntityTypes.BatchUpdateEntityTypes]. +type BatchUpdateEntityTypesRequest struct { + // Required. The name of the agent to update or create entity types in. + // Format: `projects//agent`. + Parent string `protobuf:"bytes,1,opt,name=parent,proto3" json:"parent,omitempty"` + // Required. The source of the entity type batch. + // + // For each entity type in the batch: + // + // * If `name` is specified, we update an existing entity type. + // * If `name` is not specified, we create a new entity type. + // + // Types that are valid to be assigned to EntityTypeBatch: + // *BatchUpdateEntityTypesRequest_EntityTypeBatchUri + // *BatchUpdateEntityTypesRequest_EntityTypeBatchInline + EntityTypeBatch isBatchUpdateEntityTypesRequest_EntityTypeBatch `protobuf_oneof:"entity_type_batch"` + // Optional. The language of entity synonyms defined in `entity_types`. If not + // specified, the agent's default language is used. + // [Many + // languages](https://cloud.google.com/dialogflow-enterprise/docs/reference/language) + // are supported. Note: languages must be enabled in the agent before they can + // be used. + LanguageCode string `protobuf:"bytes,4,opt,name=language_code,json=languageCode,proto3" json:"language_code,omitempty"` + // Optional. The mask to control which fields get updated. + UpdateMask *field_mask.FieldMask `protobuf:"bytes,5,opt,name=update_mask,json=updateMask,proto3" json:"update_mask,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *BatchUpdateEntityTypesRequest) Reset() { *m = BatchUpdateEntityTypesRequest{} } +func (m *BatchUpdateEntityTypesRequest) String() string { return proto.CompactTextString(m) } +func (*BatchUpdateEntityTypesRequest) ProtoMessage() {} +func (*BatchUpdateEntityTypesRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_entity_type_6dfa1fc30f052b5b, []int{7} +} +func (m *BatchUpdateEntityTypesRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_BatchUpdateEntityTypesRequest.Unmarshal(m, b) +} +func (m *BatchUpdateEntityTypesRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_BatchUpdateEntityTypesRequest.Marshal(b, m, deterministic) +} +func (dst *BatchUpdateEntityTypesRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_BatchUpdateEntityTypesRequest.Merge(dst, src) +} +func (m *BatchUpdateEntityTypesRequest) XXX_Size() int { + return xxx_messageInfo_BatchUpdateEntityTypesRequest.Size(m) +} +func (m *BatchUpdateEntityTypesRequest) XXX_DiscardUnknown() { + xxx_messageInfo_BatchUpdateEntityTypesRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_BatchUpdateEntityTypesRequest proto.InternalMessageInfo + +func (m *BatchUpdateEntityTypesRequest) GetParent() string { + if m != nil { + return m.Parent + } + return "" +} + +type isBatchUpdateEntityTypesRequest_EntityTypeBatch interface { + isBatchUpdateEntityTypesRequest_EntityTypeBatch() +} + +type BatchUpdateEntityTypesRequest_EntityTypeBatchUri struct { + EntityTypeBatchUri string `protobuf:"bytes,2,opt,name=entity_type_batch_uri,json=entityTypeBatchUri,proto3,oneof"` +} + +type BatchUpdateEntityTypesRequest_EntityTypeBatchInline struct { + EntityTypeBatchInline *EntityTypeBatch `protobuf:"bytes,3,opt,name=entity_type_batch_inline,json=entityTypeBatchInline,proto3,oneof"` +} + +func (*BatchUpdateEntityTypesRequest_EntityTypeBatchUri) isBatchUpdateEntityTypesRequest_EntityTypeBatch() { +} + +func (*BatchUpdateEntityTypesRequest_EntityTypeBatchInline) isBatchUpdateEntityTypesRequest_EntityTypeBatch() { +} + +func (m *BatchUpdateEntityTypesRequest) GetEntityTypeBatch() isBatchUpdateEntityTypesRequest_EntityTypeBatch { + if m != nil { + return m.EntityTypeBatch + } + return nil +} + +func (m *BatchUpdateEntityTypesRequest) GetEntityTypeBatchUri() string { + if x, ok := m.GetEntityTypeBatch().(*BatchUpdateEntityTypesRequest_EntityTypeBatchUri); ok { + return x.EntityTypeBatchUri + } + return "" +} + +func (m *BatchUpdateEntityTypesRequest) GetEntityTypeBatchInline() *EntityTypeBatch { + if x, ok := m.GetEntityTypeBatch().(*BatchUpdateEntityTypesRequest_EntityTypeBatchInline); ok { + return x.EntityTypeBatchInline + } + return nil +} + +func (m *BatchUpdateEntityTypesRequest) GetLanguageCode() string { + if m != nil { + return m.LanguageCode + } + return "" +} + +func (m *BatchUpdateEntityTypesRequest) GetUpdateMask() *field_mask.FieldMask { + if m != nil { + return m.UpdateMask + } + return nil +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*BatchUpdateEntityTypesRequest) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _BatchUpdateEntityTypesRequest_OneofMarshaler, _BatchUpdateEntityTypesRequest_OneofUnmarshaler, _BatchUpdateEntityTypesRequest_OneofSizer, []interface{}{ + (*BatchUpdateEntityTypesRequest_EntityTypeBatchUri)(nil), + (*BatchUpdateEntityTypesRequest_EntityTypeBatchInline)(nil), + } +} + +func _BatchUpdateEntityTypesRequest_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*BatchUpdateEntityTypesRequest) + // entity_type_batch + switch x := m.EntityTypeBatch.(type) { + case *BatchUpdateEntityTypesRequest_EntityTypeBatchUri: + b.EncodeVarint(2<<3 | proto.WireBytes) + b.EncodeStringBytes(x.EntityTypeBatchUri) + case *BatchUpdateEntityTypesRequest_EntityTypeBatchInline: + b.EncodeVarint(3<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.EntityTypeBatchInline); err != nil { + return err + } + case nil: + default: + return fmt.Errorf("BatchUpdateEntityTypesRequest.EntityTypeBatch has unexpected type %T", x) + } + return nil +} + +func _BatchUpdateEntityTypesRequest_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*BatchUpdateEntityTypesRequest) + switch tag { + case 2: // entity_type_batch.entity_type_batch_uri + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeStringBytes() + m.EntityTypeBatch = &BatchUpdateEntityTypesRequest_EntityTypeBatchUri{x} + return true, err + case 3: // entity_type_batch.entity_type_batch_inline + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(EntityTypeBatch) + err := b.DecodeMessage(msg) + m.EntityTypeBatch = &BatchUpdateEntityTypesRequest_EntityTypeBatchInline{msg} + return true, err + default: + return false, nil + } +} + +func _BatchUpdateEntityTypesRequest_OneofSizer(msg proto.Message) (n int) { + m := msg.(*BatchUpdateEntityTypesRequest) + // entity_type_batch + switch x := m.EntityTypeBatch.(type) { + case *BatchUpdateEntityTypesRequest_EntityTypeBatchUri: + n += 1 // tag and wire + n += proto.SizeVarint(uint64(len(x.EntityTypeBatchUri))) + n += len(x.EntityTypeBatchUri) + case *BatchUpdateEntityTypesRequest_EntityTypeBatchInline: + s := proto.Size(x.EntityTypeBatchInline) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +// The response message for [EntityTypes.BatchUpdateEntityTypes][google.cloud.dialogflow.v2beta1.EntityTypes.BatchUpdateEntityTypes]. +type BatchUpdateEntityTypesResponse struct { + // The collection of updated or created entity types. + EntityTypes []*EntityType `protobuf:"bytes,1,rep,name=entity_types,json=entityTypes,proto3" json:"entity_types,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *BatchUpdateEntityTypesResponse) Reset() { *m = BatchUpdateEntityTypesResponse{} } +func (m *BatchUpdateEntityTypesResponse) String() string { return proto.CompactTextString(m) } +func (*BatchUpdateEntityTypesResponse) ProtoMessage() {} +func (*BatchUpdateEntityTypesResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_entity_type_6dfa1fc30f052b5b, []int{8} +} +func (m *BatchUpdateEntityTypesResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_BatchUpdateEntityTypesResponse.Unmarshal(m, b) +} +func (m *BatchUpdateEntityTypesResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_BatchUpdateEntityTypesResponse.Marshal(b, m, deterministic) +} +func (dst *BatchUpdateEntityTypesResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_BatchUpdateEntityTypesResponse.Merge(dst, src) +} +func (m *BatchUpdateEntityTypesResponse) XXX_Size() int { + return xxx_messageInfo_BatchUpdateEntityTypesResponse.Size(m) +} +func (m *BatchUpdateEntityTypesResponse) XXX_DiscardUnknown() { + xxx_messageInfo_BatchUpdateEntityTypesResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_BatchUpdateEntityTypesResponse proto.InternalMessageInfo + +func (m *BatchUpdateEntityTypesResponse) GetEntityTypes() []*EntityType { + if m != nil { + return m.EntityTypes + } + return nil +} + +// The request message for [EntityTypes.BatchDeleteEntityTypes][google.cloud.dialogflow.v2beta1.EntityTypes.BatchDeleteEntityTypes]. +type BatchDeleteEntityTypesRequest struct { + // Required. The name of the agent to delete all entities types for. Format: + // `projects//agent`. + Parent string `protobuf:"bytes,1,opt,name=parent,proto3" json:"parent,omitempty"` + // Required. The names entity types to delete. All names must point to the + // same agent as `parent`. + EntityTypeNames []string `protobuf:"bytes,2,rep,name=entity_type_names,json=entityTypeNames,proto3" json:"entity_type_names,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *BatchDeleteEntityTypesRequest) Reset() { *m = BatchDeleteEntityTypesRequest{} } +func (m *BatchDeleteEntityTypesRequest) String() string { return proto.CompactTextString(m) } +func (*BatchDeleteEntityTypesRequest) ProtoMessage() {} +func (*BatchDeleteEntityTypesRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_entity_type_6dfa1fc30f052b5b, []int{9} +} +func (m *BatchDeleteEntityTypesRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_BatchDeleteEntityTypesRequest.Unmarshal(m, b) +} +func (m *BatchDeleteEntityTypesRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_BatchDeleteEntityTypesRequest.Marshal(b, m, deterministic) +} +func (dst *BatchDeleteEntityTypesRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_BatchDeleteEntityTypesRequest.Merge(dst, src) +} +func (m *BatchDeleteEntityTypesRequest) XXX_Size() int { + return xxx_messageInfo_BatchDeleteEntityTypesRequest.Size(m) +} +func (m *BatchDeleteEntityTypesRequest) XXX_DiscardUnknown() { + xxx_messageInfo_BatchDeleteEntityTypesRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_BatchDeleteEntityTypesRequest proto.InternalMessageInfo + +func (m *BatchDeleteEntityTypesRequest) GetParent() string { + if m != nil { + return m.Parent + } + return "" +} + +func (m *BatchDeleteEntityTypesRequest) GetEntityTypeNames() []string { + if m != nil { + return m.EntityTypeNames + } + return nil +} + +// The request message for [EntityTypes.BatchCreateEntities][google.cloud.dialogflow.v2beta1.EntityTypes.BatchCreateEntities]. +type BatchCreateEntitiesRequest struct { + // Required. The name of the entity type to create entities in. Format: + // `projects//agent/entityTypes/`. + Parent string `protobuf:"bytes,1,opt,name=parent,proto3" json:"parent,omitempty"` + // Required. The entities to create. + Entities []*EntityType_Entity `protobuf:"bytes,2,rep,name=entities,proto3" json:"entities,omitempty"` + // Optional. The language of entity synonyms defined in `entities`. If not + // specified, the agent's default language is used. + // [Many + // languages](https://cloud.google.com/dialogflow-enterprise/docs/reference/language) + // are supported. Note: languages must be enabled in the agent before they can + // be used. + LanguageCode string `protobuf:"bytes,3,opt,name=language_code,json=languageCode,proto3" json:"language_code,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *BatchCreateEntitiesRequest) Reset() { *m = BatchCreateEntitiesRequest{} } +func (m *BatchCreateEntitiesRequest) String() string { return proto.CompactTextString(m) } +func (*BatchCreateEntitiesRequest) ProtoMessage() {} +func (*BatchCreateEntitiesRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_entity_type_6dfa1fc30f052b5b, []int{10} +} +func (m *BatchCreateEntitiesRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_BatchCreateEntitiesRequest.Unmarshal(m, b) +} +func (m *BatchCreateEntitiesRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_BatchCreateEntitiesRequest.Marshal(b, m, deterministic) +} +func (dst *BatchCreateEntitiesRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_BatchCreateEntitiesRequest.Merge(dst, src) +} +func (m *BatchCreateEntitiesRequest) XXX_Size() int { + return xxx_messageInfo_BatchCreateEntitiesRequest.Size(m) +} +func (m *BatchCreateEntitiesRequest) XXX_DiscardUnknown() { + xxx_messageInfo_BatchCreateEntitiesRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_BatchCreateEntitiesRequest proto.InternalMessageInfo + +func (m *BatchCreateEntitiesRequest) GetParent() string { + if m != nil { + return m.Parent + } + return "" +} + +func (m *BatchCreateEntitiesRequest) GetEntities() []*EntityType_Entity { + if m != nil { + return m.Entities + } + return nil +} + +func (m *BatchCreateEntitiesRequest) GetLanguageCode() string { + if m != nil { + return m.LanguageCode + } + return "" +} + +// The request message for [EntityTypes.BatchUpdateEntities][google.cloud.dialogflow.v2beta1.EntityTypes.BatchUpdateEntities]. +type BatchUpdateEntitiesRequest struct { + // Required. The name of the entity type to update or create entities in. + // Format: `projects//agent/entityTypes/`. + Parent string `protobuf:"bytes,1,opt,name=parent,proto3" json:"parent,omitempty"` + // Required. The entities to update or create. + Entities []*EntityType_Entity `protobuf:"bytes,2,rep,name=entities,proto3" json:"entities,omitempty"` + // Optional. The language of entity synonyms defined in `entities`. If not + // specified, the agent's default language is used. + // [Many + // languages](https://cloud.google.com/dialogflow-enterprise/docs/reference/language) + // are supported. Note: languages must be enabled in the agent before they can + // be used. + LanguageCode string `protobuf:"bytes,3,opt,name=language_code,json=languageCode,proto3" json:"language_code,omitempty"` + // Optional. The mask to control which fields get updated. + UpdateMask *field_mask.FieldMask `protobuf:"bytes,4,opt,name=update_mask,json=updateMask,proto3" json:"update_mask,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *BatchUpdateEntitiesRequest) Reset() { *m = BatchUpdateEntitiesRequest{} } +func (m *BatchUpdateEntitiesRequest) String() string { return proto.CompactTextString(m) } +func (*BatchUpdateEntitiesRequest) ProtoMessage() {} +func (*BatchUpdateEntitiesRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_entity_type_6dfa1fc30f052b5b, []int{11} +} +func (m *BatchUpdateEntitiesRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_BatchUpdateEntitiesRequest.Unmarshal(m, b) +} +func (m *BatchUpdateEntitiesRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_BatchUpdateEntitiesRequest.Marshal(b, m, deterministic) +} +func (dst *BatchUpdateEntitiesRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_BatchUpdateEntitiesRequest.Merge(dst, src) +} +func (m *BatchUpdateEntitiesRequest) XXX_Size() int { + return xxx_messageInfo_BatchUpdateEntitiesRequest.Size(m) +} +func (m *BatchUpdateEntitiesRequest) XXX_DiscardUnknown() { + xxx_messageInfo_BatchUpdateEntitiesRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_BatchUpdateEntitiesRequest proto.InternalMessageInfo + +func (m *BatchUpdateEntitiesRequest) GetParent() string { + if m != nil { + return m.Parent + } + return "" +} + +func (m *BatchUpdateEntitiesRequest) GetEntities() []*EntityType_Entity { + if m != nil { + return m.Entities + } + return nil +} + +func (m *BatchUpdateEntitiesRequest) GetLanguageCode() string { + if m != nil { + return m.LanguageCode + } + return "" +} + +func (m *BatchUpdateEntitiesRequest) GetUpdateMask() *field_mask.FieldMask { + if m != nil { + return m.UpdateMask + } + return nil +} + +// The request message for [EntityTypes.BatchDeleteEntities][google.cloud.dialogflow.v2beta1.EntityTypes.BatchDeleteEntities]. +type BatchDeleteEntitiesRequest struct { + // Required. The name of the entity type to delete entries for. Format: + // `projects//agent/entityTypes/`. + Parent string `protobuf:"bytes,1,opt,name=parent,proto3" json:"parent,omitempty"` + // Required. The canonical `values` of the entities to delete. Note that + // these are not fully-qualified names, i.e. they don't start with + // `projects/`. + EntityValues []string `protobuf:"bytes,2,rep,name=entity_values,json=entityValues,proto3" json:"entity_values,omitempty"` + // Optional. The language of entity synonyms defined in `entities`. If not + // specified, the agent's default language is used. + // [Many + // languages](https://cloud.google.com/dialogflow-enterprise/docs/reference/language) + // are supported. Note: languages must be enabled in the agent before they can + // be used. + LanguageCode string `protobuf:"bytes,3,opt,name=language_code,json=languageCode,proto3" json:"language_code,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *BatchDeleteEntitiesRequest) Reset() { *m = BatchDeleteEntitiesRequest{} } +func (m *BatchDeleteEntitiesRequest) String() string { return proto.CompactTextString(m) } +func (*BatchDeleteEntitiesRequest) ProtoMessage() {} +func (*BatchDeleteEntitiesRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_entity_type_6dfa1fc30f052b5b, []int{12} +} +func (m *BatchDeleteEntitiesRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_BatchDeleteEntitiesRequest.Unmarshal(m, b) +} +func (m *BatchDeleteEntitiesRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_BatchDeleteEntitiesRequest.Marshal(b, m, deterministic) +} +func (dst *BatchDeleteEntitiesRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_BatchDeleteEntitiesRequest.Merge(dst, src) +} +func (m *BatchDeleteEntitiesRequest) XXX_Size() int { + return xxx_messageInfo_BatchDeleteEntitiesRequest.Size(m) +} +func (m *BatchDeleteEntitiesRequest) XXX_DiscardUnknown() { + xxx_messageInfo_BatchDeleteEntitiesRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_BatchDeleteEntitiesRequest proto.InternalMessageInfo + +func (m *BatchDeleteEntitiesRequest) GetParent() string { + if m != nil { + return m.Parent + } + return "" +} + +func (m *BatchDeleteEntitiesRequest) GetEntityValues() []string { + if m != nil { + return m.EntityValues + } + return nil +} + +func (m *BatchDeleteEntitiesRequest) GetLanguageCode() string { + if m != nil { + return m.LanguageCode + } + return "" +} + +// This message is a wrapper around a collection of entity types. +type EntityTypeBatch struct { + // A collection of entity types. + EntityTypes []*EntityType `protobuf:"bytes,1,rep,name=entity_types,json=entityTypes,proto3" json:"entity_types,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *EntityTypeBatch) Reset() { *m = EntityTypeBatch{} } +func (m *EntityTypeBatch) String() string { return proto.CompactTextString(m) } +func (*EntityTypeBatch) ProtoMessage() {} +func (*EntityTypeBatch) Descriptor() ([]byte, []int) { + return fileDescriptor_entity_type_6dfa1fc30f052b5b, []int{13} +} +func (m *EntityTypeBatch) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_EntityTypeBatch.Unmarshal(m, b) +} +func (m *EntityTypeBatch) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_EntityTypeBatch.Marshal(b, m, deterministic) +} +func (dst *EntityTypeBatch) XXX_Merge(src proto.Message) { + xxx_messageInfo_EntityTypeBatch.Merge(dst, src) +} +func (m *EntityTypeBatch) XXX_Size() int { + return xxx_messageInfo_EntityTypeBatch.Size(m) +} +func (m *EntityTypeBatch) XXX_DiscardUnknown() { + xxx_messageInfo_EntityTypeBatch.DiscardUnknown(m) +} + +var xxx_messageInfo_EntityTypeBatch proto.InternalMessageInfo + +func (m *EntityTypeBatch) GetEntityTypes() []*EntityType { + if m != nil { + return m.EntityTypes + } + return nil +} + +func init() { + proto.RegisterType((*EntityType)(nil), "google.cloud.dialogflow.v2beta1.EntityType") + proto.RegisterType((*EntityType_Entity)(nil), "google.cloud.dialogflow.v2beta1.EntityType.Entity") + proto.RegisterType((*ListEntityTypesRequest)(nil), "google.cloud.dialogflow.v2beta1.ListEntityTypesRequest") + proto.RegisterType((*ListEntityTypesResponse)(nil), "google.cloud.dialogflow.v2beta1.ListEntityTypesResponse") + proto.RegisterType((*GetEntityTypeRequest)(nil), "google.cloud.dialogflow.v2beta1.GetEntityTypeRequest") + proto.RegisterType((*CreateEntityTypeRequest)(nil), "google.cloud.dialogflow.v2beta1.CreateEntityTypeRequest") + proto.RegisterType((*UpdateEntityTypeRequest)(nil), "google.cloud.dialogflow.v2beta1.UpdateEntityTypeRequest") + proto.RegisterType((*DeleteEntityTypeRequest)(nil), "google.cloud.dialogflow.v2beta1.DeleteEntityTypeRequest") + proto.RegisterType((*BatchUpdateEntityTypesRequest)(nil), "google.cloud.dialogflow.v2beta1.BatchUpdateEntityTypesRequest") + proto.RegisterType((*BatchUpdateEntityTypesResponse)(nil), "google.cloud.dialogflow.v2beta1.BatchUpdateEntityTypesResponse") + proto.RegisterType((*BatchDeleteEntityTypesRequest)(nil), "google.cloud.dialogflow.v2beta1.BatchDeleteEntityTypesRequest") + proto.RegisterType((*BatchCreateEntitiesRequest)(nil), "google.cloud.dialogflow.v2beta1.BatchCreateEntitiesRequest") + proto.RegisterType((*BatchUpdateEntitiesRequest)(nil), "google.cloud.dialogflow.v2beta1.BatchUpdateEntitiesRequest") + proto.RegisterType((*BatchDeleteEntitiesRequest)(nil), "google.cloud.dialogflow.v2beta1.BatchDeleteEntitiesRequest") + proto.RegisterType((*EntityTypeBatch)(nil), "google.cloud.dialogflow.v2beta1.EntityTypeBatch") + proto.RegisterEnum("google.cloud.dialogflow.v2beta1.EntityType_Kind", EntityType_Kind_name, EntityType_Kind_value) + proto.RegisterEnum("google.cloud.dialogflow.v2beta1.EntityType_AutoExpansionMode", EntityType_AutoExpansionMode_name, EntityType_AutoExpansionMode_value) +} + +// Reference imports to suppress errors if they are not otherwise used. +var _ context.Context +var _ grpc.ClientConn + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the grpc package it is being compiled against. +const _ = grpc.SupportPackageIsVersion4 + +// EntityTypesClient is the client API for EntityTypes service. +// +// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://godoc.org/google.golang.org/grpc#ClientConn.NewStream. +type EntityTypesClient interface { + // Returns the list of all entity types in the specified agent. + ListEntityTypes(ctx context.Context, in *ListEntityTypesRequest, opts ...grpc.CallOption) (*ListEntityTypesResponse, error) + // Retrieves the specified entity type. + GetEntityType(ctx context.Context, in *GetEntityTypeRequest, opts ...grpc.CallOption) (*EntityType, error) + // Creates an entity type in the specified agent. + CreateEntityType(ctx context.Context, in *CreateEntityTypeRequest, opts ...grpc.CallOption) (*EntityType, error) + // Updates the specified entity type. + UpdateEntityType(ctx context.Context, in *UpdateEntityTypeRequest, opts ...grpc.CallOption) (*EntityType, error) + // Deletes the specified entity type. + DeleteEntityType(ctx context.Context, in *DeleteEntityTypeRequest, opts ...grpc.CallOption) (*empty.Empty, error) + // Updates/Creates multiple entity types in the specified agent. + // + // Operation + BatchUpdateEntityTypes(ctx context.Context, in *BatchUpdateEntityTypesRequest, opts ...grpc.CallOption) (*longrunning.Operation, error) + // Deletes entity types in the specified agent. + // + // Operation + BatchDeleteEntityTypes(ctx context.Context, in *BatchDeleteEntityTypesRequest, opts ...grpc.CallOption) (*longrunning.Operation, error) + // Creates multiple new entities in the specified entity type. + // + // Operation + BatchCreateEntities(ctx context.Context, in *BatchCreateEntitiesRequest, opts ...grpc.CallOption) (*longrunning.Operation, error) + // Updates or creates multiple entities in the specified entity type. This + // method does not affect entities in the entity type that aren't explicitly + // specified in the request. + // + // Operation + BatchUpdateEntities(ctx context.Context, in *BatchUpdateEntitiesRequest, opts ...grpc.CallOption) (*longrunning.Operation, error) + // Deletes entities in the specified entity type. + // + // Operation + BatchDeleteEntities(ctx context.Context, in *BatchDeleteEntitiesRequest, opts ...grpc.CallOption) (*longrunning.Operation, error) +} + +type entityTypesClient struct { + cc *grpc.ClientConn +} + +func NewEntityTypesClient(cc *grpc.ClientConn) EntityTypesClient { + return &entityTypesClient{cc} +} + +func (c *entityTypesClient) ListEntityTypes(ctx context.Context, in *ListEntityTypesRequest, opts ...grpc.CallOption) (*ListEntityTypesResponse, error) { + out := new(ListEntityTypesResponse) + err := c.cc.Invoke(ctx, "/google.cloud.dialogflow.v2beta1.EntityTypes/ListEntityTypes", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *entityTypesClient) GetEntityType(ctx context.Context, in *GetEntityTypeRequest, opts ...grpc.CallOption) (*EntityType, error) { + out := new(EntityType) + err := c.cc.Invoke(ctx, "/google.cloud.dialogflow.v2beta1.EntityTypes/GetEntityType", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *entityTypesClient) CreateEntityType(ctx context.Context, in *CreateEntityTypeRequest, opts ...grpc.CallOption) (*EntityType, error) { + out := new(EntityType) + err := c.cc.Invoke(ctx, "/google.cloud.dialogflow.v2beta1.EntityTypes/CreateEntityType", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *entityTypesClient) UpdateEntityType(ctx context.Context, in *UpdateEntityTypeRequest, opts ...grpc.CallOption) (*EntityType, error) { + out := new(EntityType) + err := c.cc.Invoke(ctx, "/google.cloud.dialogflow.v2beta1.EntityTypes/UpdateEntityType", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *entityTypesClient) DeleteEntityType(ctx context.Context, in *DeleteEntityTypeRequest, opts ...grpc.CallOption) (*empty.Empty, error) { + out := new(empty.Empty) + err := c.cc.Invoke(ctx, "/google.cloud.dialogflow.v2beta1.EntityTypes/DeleteEntityType", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *entityTypesClient) BatchUpdateEntityTypes(ctx context.Context, in *BatchUpdateEntityTypesRequest, opts ...grpc.CallOption) (*longrunning.Operation, error) { + out := new(longrunning.Operation) + err := c.cc.Invoke(ctx, "/google.cloud.dialogflow.v2beta1.EntityTypes/BatchUpdateEntityTypes", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *entityTypesClient) BatchDeleteEntityTypes(ctx context.Context, in *BatchDeleteEntityTypesRequest, opts ...grpc.CallOption) (*longrunning.Operation, error) { + out := new(longrunning.Operation) + err := c.cc.Invoke(ctx, "/google.cloud.dialogflow.v2beta1.EntityTypes/BatchDeleteEntityTypes", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *entityTypesClient) BatchCreateEntities(ctx context.Context, in *BatchCreateEntitiesRequest, opts ...grpc.CallOption) (*longrunning.Operation, error) { + out := new(longrunning.Operation) + err := c.cc.Invoke(ctx, "/google.cloud.dialogflow.v2beta1.EntityTypes/BatchCreateEntities", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *entityTypesClient) BatchUpdateEntities(ctx context.Context, in *BatchUpdateEntitiesRequest, opts ...grpc.CallOption) (*longrunning.Operation, error) { + out := new(longrunning.Operation) + err := c.cc.Invoke(ctx, "/google.cloud.dialogflow.v2beta1.EntityTypes/BatchUpdateEntities", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *entityTypesClient) BatchDeleteEntities(ctx context.Context, in *BatchDeleteEntitiesRequest, opts ...grpc.CallOption) (*longrunning.Operation, error) { + out := new(longrunning.Operation) + err := c.cc.Invoke(ctx, "/google.cloud.dialogflow.v2beta1.EntityTypes/BatchDeleteEntities", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +// EntityTypesServer is the server API for EntityTypes service. +type EntityTypesServer interface { + // Returns the list of all entity types in the specified agent. + ListEntityTypes(context.Context, *ListEntityTypesRequest) (*ListEntityTypesResponse, error) + // Retrieves the specified entity type. + GetEntityType(context.Context, *GetEntityTypeRequest) (*EntityType, error) + // Creates an entity type in the specified agent. + CreateEntityType(context.Context, *CreateEntityTypeRequest) (*EntityType, error) + // Updates the specified entity type. + UpdateEntityType(context.Context, *UpdateEntityTypeRequest) (*EntityType, error) + // Deletes the specified entity type. + DeleteEntityType(context.Context, *DeleteEntityTypeRequest) (*empty.Empty, error) + // Updates/Creates multiple entity types in the specified agent. + // + // Operation + BatchUpdateEntityTypes(context.Context, *BatchUpdateEntityTypesRequest) (*longrunning.Operation, error) + // Deletes entity types in the specified agent. + // + // Operation + BatchDeleteEntityTypes(context.Context, *BatchDeleteEntityTypesRequest) (*longrunning.Operation, error) + // Creates multiple new entities in the specified entity type. + // + // Operation + BatchCreateEntities(context.Context, *BatchCreateEntitiesRequest) (*longrunning.Operation, error) + // Updates or creates multiple entities in the specified entity type. This + // method does not affect entities in the entity type that aren't explicitly + // specified in the request. + // + // Operation + BatchUpdateEntities(context.Context, *BatchUpdateEntitiesRequest) (*longrunning.Operation, error) + // Deletes entities in the specified entity type. + // + // Operation + BatchDeleteEntities(context.Context, *BatchDeleteEntitiesRequest) (*longrunning.Operation, error) +} + +func RegisterEntityTypesServer(s *grpc.Server, srv EntityTypesServer) { + s.RegisterService(&_EntityTypes_serviceDesc, srv) +} + +func _EntityTypes_ListEntityTypes_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(ListEntityTypesRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(EntityTypesServer).ListEntityTypes(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.dialogflow.v2beta1.EntityTypes/ListEntityTypes", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(EntityTypesServer).ListEntityTypes(ctx, req.(*ListEntityTypesRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _EntityTypes_GetEntityType_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(GetEntityTypeRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(EntityTypesServer).GetEntityType(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.dialogflow.v2beta1.EntityTypes/GetEntityType", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(EntityTypesServer).GetEntityType(ctx, req.(*GetEntityTypeRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _EntityTypes_CreateEntityType_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(CreateEntityTypeRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(EntityTypesServer).CreateEntityType(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.dialogflow.v2beta1.EntityTypes/CreateEntityType", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(EntityTypesServer).CreateEntityType(ctx, req.(*CreateEntityTypeRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _EntityTypes_UpdateEntityType_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(UpdateEntityTypeRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(EntityTypesServer).UpdateEntityType(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.dialogflow.v2beta1.EntityTypes/UpdateEntityType", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(EntityTypesServer).UpdateEntityType(ctx, req.(*UpdateEntityTypeRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _EntityTypes_DeleteEntityType_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(DeleteEntityTypeRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(EntityTypesServer).DeleteEntityType(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.dialogflow.v2beta1.EntityTypes/DeleteEntityType", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(EntityTypesServer).DeleteEntityType(ctx, req.(*DeleteEntityTypeRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _EntityTypes_BatchUpdateEntityTypes_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(BatchUpdateEntityTypesRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(EntityTypesServer).BatchUpdateEntityTypes(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.dialogflow.v2beta1.EntityTypes/BatchUpdateEntityTypes", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(EntityTypesServer).BatchUpdateEntityTypes(ctx, req.(*BatchUpdateEntityTypesRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _EntityTypes_BatchDeleteEntityTypes_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(BatchDeleteEntityTypesRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(EntityTypesServer).BatchDeleteEntityTypes(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.dialogflow.v2beta1.EntityTypes/BatchDeleteEntityTypes", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(EntityTypesServer).BatchDeleteEntityTypes(ctx, req.(*BatchDeleteEntityTypesRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _EntityTypes_BatchCreateEntities_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(BatchCreateEntitiesRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(EntityTypesServer).BatchCreateEntities(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.dialogflow.v2beta1.EntityTypes/BatchCreateEntities", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(EntityTypesServer).BatchCreateEntities(ctx, req.(*BatchCreateEntitiesRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _EntityTypes_BatchUpdateEntities_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(BatchUpdateEntitiesRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(EntityTypesServer).BatchUpdateEntities(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.dialogflow.v2beta1.EntityTypes/BatchUpdateEntities", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(EntityTypesServer).BatchUpdateEntities(ctx, req.(*BatchUpdateEntitiesRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _EntityTypes_BatchDeleteEntities_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(BatchDeleteEntitiesRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(EntityTypesServer).BatchDeleteEntities(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.dialogflow.v2beta1.EntityTypes/BatchDeleteEntities", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(EntityTypesServer).BatchDeleteEntities(ctx, req.(*BatchDeleteEntitiesRequest)) + } + return interceptor(ctx, in, info, handler) +} + +var _EntityTypes_serviceDesc = grpc.ServiceDesc{ + ServiceName: "google.cloud.dialogflow.v2beta1.EntityTypes", + HandlerType: (*EntityTypesServer)(nil), + Methods: []grpc.MethodDesc{ + { + MethodName: "ListEntityTypes", + Handler: _EntityTypes_ListEntityTypes_Handler, + }, + { + MethodName: "GetEntityType", + Handler: _EntityTypes_GetEntityType_Handler, + }, + { + MethodName: "CreateEntityType", + Handler: _EntityTypes_CreateEntityType_Handler, + }, + { + MethodName: "UpdateEntityType", + Handler: _EntityTypes_UpdateEntityType_Handler, + }, + { + MethodName: "DeleteEntityType", + Handler: _EntityTypes_DeleteEntityType_Handler, + }, + { + MethodName: "BatchUpdateEntityTypes", + Handler: _EntityTypes_BatchUpdateEntityTypes_Handler, + }, + { + MethodName: "BatchDeleteEntityTypes", + Handler: _EntityTypes_BatchDeleteEntityTypes_Handler, + }, + { + MethodName: "BatchCreateEntities", + Handler: _EntityTypes_BatchCreateEntities_Handler, + }, + { + MethodName: "BatchUpdateEntities", + Handler: _EntityTypes_BatchUpdateEntities_Handler, + }, + { + MethodName: "BatchDeleteEntities", + Handler: _EntityTypes_BatchDeleteEntities_Handler, + }, + }, + Streams: []grpc.StreamDesc{}, + Metadata: "google/cloud/dialogflow/v2beta1/entity_type.proto", +} + +func init() { + proto.RegisterFile("google/cloud/dialogflow/v2beta1/entity_type.proto", fileDescriptor_entity_type_6dfa1fc30f052b5b) +} + +var fileDescriptor_entity_type_6dfa1fc30f052b5b = []byte{ + // 1246 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xcc, 0x58, 0x4f, 0x6f, 0xe3, 0x44, + 0x14, 0xdf, 0x49, 0xb3, 0x55, 0xfb, 0xd2, 0x6e, 0xd3, 0x69, 0xb7, 0x0d, 0xe9, 0x96, 0x16, 0x57, + 0x42, 0x55, 0x11, 0xf1, 0x6e, 0x56, 0xfc, 0x6b, 0x55, 0x50, 0xdb, 0xa4, 0xbb, 0x61, 0xdb, 0x24, + 0x72, 0xdb, 0x15, 0x70, 0xb1, 0xdc, 0x64, 0x1a, 0x4c, 0x93, 0x19, 0xe3, 0x3f, 0xcb, 0x66, 0xd1, + 0x72, 0xe0, 0x1b, 0x20, 0x6e, 0x88, 0x13, 0xe2, 0x02, 0x88, 0x6f, 0x80, 0xb8, 0x70, 0x82, 0x13, + 0x12, 0x5f, 0x01, 0xf1, 0x19, 0x90, 0xb8, 0x20, 0x8f, 0xed, 0xd8, 0x71, 0x1c, 0x6c, 0xc3, 0xee, + 0x8a, 0x5b, 0xe6, 0xcf, 0x7b, 0xef, 0xf7, 0x7b, 0xef, 0xcd, 0xfc, 0xc6, 0x81, 0x5b, 0x1d, 0xc6, + 0x3a, 0x5d, 0x22, 0xb6, 0xba, 0xcc, 0x6a, 0x8b, 0x6d, 0x55, 0xe9, 0xb2, 0xce, 0x45, 0x97, 0x7d, + 0x24, 0x3e, 0x28, 0x9f, 0x13, 0x53, 0xb9, 0x25, 0x12, 0x6a, 0xaa, 0x66, 0x5f, 0x36, 0xfb, 0x1a, + 0x29, 0x69, 0x3a, 0x33, 0x19, 0x5e, 0x73, 0x4c, 0x4a, 0xdc, 0xa4, 0xe4, 0x9b, 0x94, 0x5c, 0x93, + 0xe2, 0x0d, 0xd7, 0xa7, 0xa2, 0xa9, 0xa2, 0x42, 0x29, 0x33, 0x15, 0x53, 0x65, 0xd4, 0x70, 0xcc, + 0x8b, 0xcf, 0x05, 0x56, 0x75, 0x62, 0x30, 0x4b, 0x6f, 0xb9, 0x9e, 0x8b, 0x1b, 0xee, 0x52, 0x97, + 0xd1, 0x8e, 0x6e, 0x51, 0xaa, 0xd2, 0x8e, 0xc8, 0x34, 0xa2, 0x0f, 0xd9, 0xaf, 0xb8, 0x9b, 0xf8, + 0xe8, 0xdc, 0xba, 0x10, 0x49, 0x4f, 0x33, 0xfb, 0xee, 0xe2, 0x7a, 0x78, 0xf1, 0x42, 0x25, 0xdd, + 0xb6, 0xdc, 0x53, 0x8c, 0x4b, 0x77, 0xc7, 0x8d, 0xf0, 0x0e, 0xc3, 0xd4, 0xad, 0x96, 0xe9, 0xac, + 0x0a, 0x5f, 0x64, 0x01, 0xaa, 0x9c, 0xf1, 0x69, 0x5f, 0x23, 0x18, 0x43, 0x96, 0x2a, 0x3d, 0x52, + 0x40, 0xeb, 0x68, 0x73, 0x5a, 0xe2, 0xbf, 0xf1, 0x0b, 0x30, 0xd3, 0x56, 0x0d, 0xad, 0xab, 0xf4, + 0x65, 0xbe, 0x96, 0xe1, 0x6b, 0x39, 0x77, 0xae, 0x6e, 0x6f, 0xa9, 0x40, 0xf6, 0x52, 0xa5, 0xed, + 0xc2, 0xc4, 0x3a, 0xda, 0xbc, 0x56, 0xbe, 0x59, 0x8a, 0x49, 0x58, 0xc9, 0x8f, 0x58, 0xba, 0xa7, + 0xd2, 0xb6, 0xc4, 0xad, 0x71, 0x0f, 0x16, 0x14, 0xcb, 0x64, 0x32, 0x79, 0xa8, 0x29, 0xd4, 0x50, + 0x19, 0x95, 0x7b, 0xac, 0x4d, 0x0a, 0x59, 0xee, 0x74, 0x37, 0x8d, 0xd3, 0x3d, 0xcb, 0x64, 0x55, + 0xcf, 0xcb, 0x31, 0x6b, 0x13, 0x69, 0x5e, 0x09, 0x4f, 0xe1, 0x3a, 0x4c, 0xf1, 0x5a, 0xab, 0xc4, + 0x28, 0x4c, 0xae, 0x4f, 0x6c, 0xe6, 0xca, 0xe5, 0x34, 0x31, 0x9c, 0x9f, 0xd2, 0xc0, 0x47, 0x71, + 0x1b, 0x26, 0x9d, 0x39, 0xbc, 0x08, 0x57, 0x1f, 0x28, 0x5d, 0xcb, 0x4b, 0xa3, 0x33, 0xc0, 0x45, + 0x98, 0x32, 0xfa, 0x94, 0xd1, 0x7e, 0xcf, 0x28, 0x64, 0xd6, 0x27, 0x36, 0xa7, 0xa5, 0xc1, 0x58, + 0x78, 0x03, 0xb2, 0x76, 0x22, 0xf0, 0x22, 0xe4, 0xef, 0xd5, 0xea, 0x15, 0xf9, 0xac, 0x7e, 0xd2, + 0xac, 0x1e, 0xd4, 0x0e, 0x6b, 0xd5, 0x4a, 0xfe, 0x0a, 0x9e, 0x81, 0x29, 0x3e, 0x7b, 0xbc, 0xd7, + 0xcc, 0x23, 0x3c, 0x0b, 0xd3, 0x7c, 0x74, 0x54, 0x3b, 0x39, 0xcd, 0x67, 0x84, 0x77, 0x61, 0x7e, + 0x84, 0x2e, 0xde, 0x80, 0xb5, 0xbd, 0xb3, 0xd3, 0x86, 0x5c, 0x7d, 0xa7, 0xb9, 0x57, 0x3f, 0xa9, + 0x35, 0xea, 0xf2, 0x71, 0xa3, 0x52, 0x0d, 0xb9, 0x5d, 0x83, 0x95, 0xa8, 0x4d, 0x95, 0xea, 0xe1, + 0xde, 0xd9, 0xd1, 0x69, 0x1e, 0x09, 0x9f, 0x21, 0x58, 0x3a, 0x52, 0x0d, 0xd3, 0x67, 0x6d, 0x48, + 0xe4, 0x43, 0x8b, 0x18, 0x26, 0x5e, 0x82, 0x49, 0x4d, 0xd1, 0x09, 0x35, 0x5d, 0x8e, 0xee, 0x08, + 0x6f, 0xc0, 0x6c, 0x57, 0xa1, 0x1d, 0x4b, 0xe9, 0x10, 0xb9, 0x65, 0x57, 0xcf, 0xe9, 0x96, 0x19, + 0x6f, 0xf2, 0xc0, 0x46, 0xb7, 0x02, 0xd3, 0x9a, 0xbd, 0xc1, 0x50, 0x1f, 0x11, 0xde, 0x33, 0x57, + 0xa5, 0x29, 0x7b, 0xe2, 0x44, 0x7d, 0x44, 0xf0, 0x2a, 0x00, 0x5f, 0x34, 0xd9, 0x25, 0xa1, 0xbc, + 0xf8, 0xd3, 0x12, 0xdf, 0x7e, 0x6a, 0x4f, 0xd8, 0x98, 0x96, 0x47, 0x30, 0x19, 0x1a, 0xa3, 0x86, + 0x5d, 0xd1, 0x99, 0xc0, 0xe9, 0x35, 0x0a, 0x88, 0x57, 0xf5, 0xa5, 0x14, 0x55, 0x95, 0x72, 0xc4, + 0xf7, 0x8b, 0x5f, 0x84, 0x39, 0x4a, 0x1e, 0x9a, 0x72, 0x00, 0x8f, 0x43, 0x67, 0xd6, 0x9e, 0x6e, + 0x0e, 0x30, 0x35, 0x60, 0xf1, 0x0e, 0x09, 0x20, 0xf2, 0x92, 0x14, 0x75, 0x9a, 0x92, 0x24, 0x48, + 0xf8, 0x1a, 0xc1, 0xf2, 0x81, 0x4e, 0x14, 0x93, 0x8c, 0x3a, 0x1d, 0x97, 0xf9, 0x23, 0xc8, 0x05, + 0xc8, 0x73, 0xb7, 0x29, 0xb9, 0x83, 0xcf, 0x7d, 0x14, 0xe6, 0x44, 0x04, 0xcc, 0x9f, 0x11, 0x2c, + 0x9f, 0x69, 0xed, 0x48, 0x98, 0x21, 0x38, 0xe8, 0x09, 0xc3, 0x89, 0x6a, 0xab, 0x1d, 0xc8, 0x59, + 0x1c, 0x0d, 0xbf, 0xfe, 0x38, 0xe2, 0x5c, 0xb9, 0xe8, 0x85, 0xf4, 0xee, 0xbf, 0xd2, 0xa1, 0x7d, + 0x43, 0x1e, 0x2b, 0xc6, 0xa5, 0x04, 0xce, 0x76, 0xfb, 0xb7, 0xf0, 0x32, 0x2c, 0x57, 0x48, 0x97, + 0x44, 0x51, 0x89, 0x28, 0xa3, 0xf0, 0x6b, 0x06, 0x56, 0xf7, 0x15, 0xb3, 0xf5, 0x7e, 0x98, 0x7f, + 0xec, 0x09, 0xb9, 0x0d, 0xd7, 0x03, 0x89, 0x91, 0xcf, 0x6d, 0x27, 0xb2, 0xa5, 0xab, 0x0e, 0xa5, + 0xbb, 0x57, 0x24, 0xec, 0xf3, 0x76, 0x22, 0xe8, 0x2a, 0xbe, 0x84, 0xc2, 0xa8, 0x91, 0x4a, 0xbb, + 0x2a, 0x25, 0x2e, 0xcf, 0x34, 0x97, 0x2e, 0x77, 0x7b, 0xf7, 0x8a, 0x74, 0x3d, 0x14, 0xa9, 0xc6, + 0x1d, 0x8e, 0x26, 0x3b, 0x1b, 0x9f, 0xec, 0xab, 0x69, 0x92, 0xbd, 0xbf, 0x00, 0xf3, 0x23, 0x74, + 0x04, 0x0d, 0x9e, 0x1f, 0x97, 0xd1, 0xa7, 0x73, 0xbe, 0x85, 0x96, 0x5b, 0xc3, 0x70, 0xe1, 0x63, + 0x6b, 0xb8, 0x35, 0x8c, 0xdf, 0xee, 0x08, 0xef, 0x4e, 0x9f, 0xf3, 0x03, 0xd8, 0xd2, 0x68, 0x08, + 0xdf, 0x20, 0x28, 0xf2, 0x28, 0x81, 0x03, 0xad, 0xc6, 0x87, 0x08, 0xaa, 0x53, 0xe6, 0xbf, 0xab, + 0x53, 0xb2, 0x03, 0xfd, 0x87, 0x87, 0x35, 0x50, 0x83, 0xff, 0x29, 0xd6, 0x70, 0x03, 0x66, 0x53, + 0x9d, 0xf6, 0x4f, 0x5c, 0x9e, 0x81, 0xca, 0xab, 0x89, 0xc4, 0xcd, 0x2d, 0x3b, 0x57, 0x74, 0xaf, + 0xe4, 0x6e, 0x53, 0xde, 0xe7, 0x73, 0xc9, 0x12, 0xad, 0xc0, 0x5c, 0xe8, 0x38, 0x3e, 0xe9, 0xe6, + 0x2e, 0xff, 0x75, 0x0d, 0x72, 0x81, 0x96, 0xc6, 0x3f, 0x20, 0x98, 0x0b, 0x09, 0x27, 0x7e, 0x2d, + 0xd6, 0x7b, 0xb4, 0xfc, 0x17, 0x5f, 0x4f, 0x6f, 0xe8, 0x9c, 0x61, 0xe1, 0xd5, 0x4f, 0x7f, 0xfb, + 0xfd, 0xf3, 0xcc, 0x4d, 0x5c, 0x1a, 0x3c, 0xb8, 0x3f, 0x76, 0xb2, 0xbb, 0xab, 0xe9, 0xec, 0x03, + 0xd2, 0x32, 0x0d, 0x71, 0x4b, 0x54, 0x3a, 0x84, 0x9a, 0x8f, 0xc5, 0xa0, 0x16, 0x7f, 0x87, 0x60, + 0x76, 0x48, 0x64, 0xf1, 0x2b, 0xb1, 0x18, 0xa2, 0x44, 0xb9, 0x98, 0x26, 0xa3, 0x51, 0x68, 0xed, + 0xc3, 0x3e, 0x82, 0x35, 0x08, 0x55, 0xdc, 0x7a, 0x8c, 0x7f, 0x44, 0x90, 0x0f, 0x0b, 0x38, 0x8e, + 0x4f, 0xda, 0x18, 0xcd, 0x4f, 0x87, 0xf9, 0x80, 0x63, 0xde, 0x15, 0x52, 0x66, 0x78, 0x3b, 0xa8, + 0xd7, 0xf8, 0x17, 0x04, 0xf9, 0xf0, 0x45, 0x9c, 0x80, 0xc0, 0x98, 0xd7, 0x40, 0x3a, 0x02, 0x0d, + 0x4e, 0xa0, 0x56, 0xde, 0xf6, 0x09, 0x04, 0x3f, 0xca, 0x92, 0x14, 0x60, 0x98, 0xcc, 0x97, 0x08, + 0xf2, 0xe1, 0x3b, 0x3e, 0x01, 0x99, 0x31, 0xef, 0x81, 0xe2, 0xd2, 0xc8, 0x25, 0x53, 0xb5, 0xbf, + 0xc8, 0xbc, 0x66, 0xd9, 0xfa, 0x17, 0xcd, 0xb2, 0x14, 0xad, 0x7c, 0xf8, 0xcd, 0x58, 0x90, 0xff, + 0xf8, 0x08, 0x29, 0xae, 0x7a, 0xf6, 0x81, 0x2f, 0xcc, 0x52, 0xc3, 0xfb, 0xc2, 0x14, 0xaa, 0x1c, + 0xf1, 0x5b, 0xc2, 0x76, 0xca, 0x56, 0x39, 0xf7, 0x83, 0x6e, 0xa3, 0x2d, 0x9f, 0xc0, 0x88, 0x90, + 0x26, 0x25, 0x30, 0x4e, 0x81, 0x9f, 0x2a, 0x01, 0x27, 0xa8, 0x4d, 0xe0, 0x27, 0x04, 0x0b, 0x11, + 0x1a, 0x8d, 0x77, 0x92, 0xa1, 0x8f, 0x54, 0xf6, 0x38, 0xe8, 0x4d, 0x0e, 0xfd, 0x6d, 0xa1, 0x1a, + 0x0b, 0x3d, 0xd4, 0x2f, 0xa2, 0xa7, 0x93, 0x0e, 0x0b, 0x27, 0xf8, 0x10, 0x8b, 0x61, 0xf5, 0x4e, + 0xca, 0x22, 0x52, 0xf3, 0x9f, 0x15, 0x0b, 0xbf, 0x99, 0x06, 0x2c, 0x86, 0xb5, 0x39, 0x29, 0x8b, + 0x48, 0x45, 0x7f, 0x56, 0x2c, 0x06, 0x1d, 0xb5, 0xff, 0x3d, 0x82, 0x8d, 0x16, 0xeb, 0xc5, 0x61, + 0xde, 0x0f, 0x3c, 0x03, 0x9a, 0xf6, 0x6d, 0xd2, 0x44, 0xef, 0xd5, 0x5c, 0x9b, 0x0e, 0xb3, 0x9f, + 0x0c, 0x25, 0xa6, 0x77, 0xc4, 0x0e, 0xa1, 0xfc, 0xae, 0x11, 0x9d, 0x25, 0x45, 0x53, 0x8d, 0xb1, + 0x7f, 0x60, 0xed, 0xf8, 0x53, 0x7f, 0x22, 0xf4, 0x55, 0x26, 0x53, 0x39, 0xfc, 0x36, 0xb3, 0x76, + 0xc7, 0xf1, 0x79, 0xc0, 0x71, 0x54, 0x7c, 0x1c, 0xf7, 0x1d, 0xa3, 0xf3, 0x49, 0xee, 0xff, 0xf6, + 0xdf, 0x01, 0x00, 0x00, 0xff, 0xff, 0xfb, 0xe2, 0x2e, 0xe6, 0x1f, 0x13, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/cloud/dialogflow/v2beta1/intent.pb.go b/vendor/google.golang.org/genproto/googleapis/cloud/dialogflow/v2beta1/intent.pb.go new file mode 100644 index 0000000..46cb31a --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/cloud/dialogflow/v2beta1/intent.pb.go @@ -0,0 +1,3787 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/cloud/dialogflow/v2beta1/intent.proto + +package dialogflow // import "google.golang.org/genproto/googleapis/cloud/dialogflow/v2beta1" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "github.com/golang/protobuf/ptypes/duration" +import empty "github.com/golang/protobuf/ptypes/empty" +import _struct "github.com/golang/protobuf/ptypes/struct" +import _ "google.golang.org/genproto/googleapis/api/annotations" +import longrunning "google.golang.org/genproto/googleapis/longrunning" +import field_mask "google.golang.org/genproto/protobuf/field_mask" + +import ( + context "golang.org/x/net/context" + grpc "google.golang.org/grpc" +) + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Represents the options for views of an intent. +// An intent can be a sizable object. Therefore, we provide a resource view that +// does not return training phrases in the response by default. +type IntentView int32 + +const ( + // Training phrases field is not populated in the response. + IntentView_INTENT_VIEW_UNSPECIFIED IntentView = 0 + // All fields are populated. + IntentView_INTENT_VIEW_FULL IntentView = 1 +) + +var IntentView_name = map[int32]string{ + 0: "INTENT_VIEW_UNSPECIFIED", + 1: "INTENT_VIEW_FULL", +} +var IntentView_value = map[string]int32{ + "INTENT_VIEW_UNSPECIFIED": 0, + "INTENT_VIEW_FULL": 1, +} + +func (x IntentView) String() string { + return proto.EnumName(IntentView_name, int32(x)) +} +func (IntentView) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_intent_2d72c44f592bb8da, []int{0} +} + +// Represents the different states that webhooks can be in. +type Intent_WebhookState int32 + +const ( + // Webhook is disabled in the agent and in the intent. + Intent_WEBHOOK_STATE_UNSPECIFIED Intent_WebhookState = 0 + // Webhook is enabled in the agent and in the intent. + Intent_WEBHOOK_STATE_ENABLED Intent_WebhookState = 1 + // Webhook is enabled in the agent and in the intent. Also, each slot + // filling prompt is forwarded to the webhook. + Intent_WEBHOOK_STATE_ENABLED_FOR_SLOT_FILLING Intent_WebhookState = 2 +) + +var Intent_WebhookState_name = map[int32]string{ + 0: "WEBHOOK_STATE_UNSPECIFIED", + 1: "WEBHOOK_STATE_ENABLED", + 2: "WEBHOOK_STATE_ENABLED_FOR_SLOT_FILLING", +} +var Intent_WebhookState_value = map[string]int32{ + "WEBHOOK_STATE_UNSPECIFIED": 0, + "WEBHOOK_STATE_ENABLED": 1, + "WEBHOOK_STATE_ENABLED_FOR_SLOT_FILLING": 2, +} + +func (x Intent_WebhookState) String() string { + return proto.EnumName(Intent_WebhookState_name, int32(x)) +} +func (Intent_WebhookState) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_intent_2d72c44f592bb8da, []int{0, 0} +} + +// Represents different types of training phrases. +type Intent_TrainingPhrase_Type int32 + +const ( + // Not specified. This value should never be used. + Intent_TrainingPhrase_TYPE_UNSPECIFIED Intent_TrainingPhrase_Type = 0 + // Examples do not contain @-prefixed entity type names, but example parts + // can be annotated with entity types. + Intent_TrainingPhrase_EXAMPLE Intent_TrainingPhrase_Type = 1 + // Templates are not annotated with entity types, but they can contain + // @-prefixed entity type names as substrings. + // Template mode has been deprecated. Example mode is the only supported + // way to create new training phrases. If you have existing training + // phrases that you've created in template mode, those will continue to + // work. + Intent_TrainingPhrase_TEMPLATE Intent_TrainingPhrase_Type = 2 // Deprecated: Do not use. +) + +var Intent_TrainingPhrase_Type_name = map[int32]string{ + 0: "TYPE_UNSPECIFIED", + 1: "EXAMPLE", + 2: "TEMPLATE", +} +var Intent_TrainingPhrase_Type_value = map[string]int32{ + "TYPE_UNSPECIFIED": 0, + "EXAMPLE": 1, + "TEMPLATE": 2, +} + +func (x Intent_TrainingPhrase_Type) String() string { + return proto.EnumName(Intent_TrainingPhrase_Type_name, int32(x)) +} +func (Intent_TrainingPhrase_Type) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_intent_2d72c44f592bb8da, []int{0, 0, 0} +} + +// Represents different platforms that a rich message can be intended for. +type Intent_Message_Platform int32 + +const ( + // Not specified. + Intent_Message_PLATFORM_UNSPECIFIED Intent_Message_Platform = 0 + // Facebook. + Intent_Message_FACEBOOK Intent_Message_Platform = 1 + // Slack. + Intent_Message_SLACK Intent_Message_Platform = 2 + // Telegram. + Intent_Message_TELEGRAM Intent_Message_Platform = 3 + // Kik. + Intent_Message_KIK Intent_Message_Platform = 4 + // Skype. + Intent_Message_SKYPE Intent_Message_Platform = 5 + // Line. + Intent_Message_LINE Intent_Message_Platform = 6 + // Viber. + Intent_Message_VIBER Intent_Message_Platform = 7 + // Actions on Google. + // When using Actions on Google, you can choose one of the specific + // Intent.Message types that mention support for Actions on Google, + // or you can use the advanced Intent.Message.payload field. + // The payload field provides access to AoG features not available in the + // specific message types. + // If using the Intent.Message.payload field, it should have a structure + // similar to the JSON message shown here. For more information, see + // [Actions on Google Webhook + // Format](https://developers.google.com/actions/dialogflow/webhook) + //
{
+	//   "expectUserResponse": true,
+	//   "isSsml": false,
+	//   "noInputPrompts": [],
+	//   "richResponse": {
+	//     "items": [
+	//       {
+	//         "simpleResponse": {
+	//           "displayText": "hi",
+	//           "textToSpeech": "hello"
+	//         }
+	//       }
+	//     ],
+	//     "suggestions": [
+	//       {
+	//         "title": "Say this"
+	//       },
+	//       {
+	//         "title": "or this"
+	//       }
+	//     ]
+	//   },
+	//   "systemIntent": {
+	//     "data": {
+	//       "@type": "type.googleapis.com/google.actions.v2.OptionValueSpec",
+	//       "listSelect": {
+	//         "items": [
+	//           {
+	//             "optionInfo": {
+	//               "key": "key1",
+	//               "synonyms": [
+	//                 "key one"
+	//               ]
+	//             },
+	//             "title": "must not be empty, but unique"
+	//           },
+	//           {
+	//             "optionInfo": {
+	//               "key": "key2",
+	//               "synonyms": [
+	//                 "key two"
+	//               ]
+	//             },
+	//             "title": "must not be empty, but unique"
+	//           }
+	//         ]
+	//       }
+	//     },
+	//     "intent": "actions.intent.OPTION"
+	//   }
+	// }
+ Intent_Message_ACTIONS_ON_GOOGLE Intent_Message_Platform = 8 + // Telephony Gateway. + Intent_Message_TELEPHONY Intent_Message_Platform = 10 +) + +var Intent_Message_Platform_name = map[int32]string{ + 0: "PLATFORM_UNSPECIFIED", + 1: "FACEBOOK", + 2: "SLACK", + 3: "TELEGRAM", + 4: "KIK", + 5: "SKYPE", + 6: "LINE", + 7: "VIBER", + 8: "ACTIONS_ON_GOOGLE", + 10: "TELEPHONY", +} +var Intent_Message_Platform_value = map[string]int32{ + "PLATFORM_UNSPECIFIED": 0, + "FACEBOOK": 1, + "SLACK": 2, + "TELEGRAM": 3, + "KIK": 4, + "SKYPE": 5, + "LINE": 6, + "VIBER": 7, + "ACTIONS_ON_GOOGLE": 8, + "TELEPHONY": 10, +} + +func (x Intent_Message_Platform) String() string { + return proto.EnumName(Intent_Message_Platform_name, int32(x)) +} +func (Intent_Message_Platform) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_intent_2d72c44f592bb8da, []int{0, 2, 0} +} + +// Represents an intent. +// Intents convert a number of user expressions or patterns into an action. An +// action is an extraction of a user command or sentence semantics. +type Intent struct { + // The unique identifier of this intent. + // Required for [Intents.UpdateIntent][google.cloud.dialogflow.v2beta1.Intents.UpdateIntent] and [Intents.BatchUpdateIntents][google.cloud.dialogflow.v2beta1.Intents.BatchUpdateIntents] + // methods. + // Format: `projects//agent/intents/`. + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // Required. The name of this intent. + DisplayName string `protobuf:"bytes,2,opt,name=display_name,json=displayName,proto3" json:"display_name,omitempty"` + // Optional. Indicates whether webhooks are enabled for the intent. + WebhookState Intent_WebhookState `protobuf:"varint,6,opt,name=webhook_state,json=webhookState,proto3,enum=google.cloud.dialogflow.v2beta1.Intent_WebhookState" json:"webhook_state,omitempty"` + // Optional. The priority of this intent. Higher numbers represent higher + // priorities. If this is zero or unspecified, we use the default + // priority 500000. + // + // Negative numbers mean that the intent is disabled. + Priority int32 `protobuf:"varint,3,opt,name=priority,proto3" json:"priority,omitempty"` + // Optional. Indicates whether this is a fallback intent. + IsFallback bool `protobuf:"varint,4,opt,name=is_fallback,json=isFallback,proto3" json:"is_fallback,omitempty"` + // Optional. Indicates whether Machine Learning is enabled for the intent. + // Note: If `ml_enabled` setting is set to false, then this intent is not + // taken into account during inference in `ML ONLY` match mode. Also, + // auto-markup in the UI is turned off. + // DEPRECATED! Please use `ml_disabled` field instead. + // NOTE: If both `ml_enabled` and `ml_disabled` are either not set or false, + // then the default value is determined as follows: + // - Before April 15th, 2018 the default is: + // ml_enabled = false / ml_disabled = true. + // - After April 15th, 2018 the default is: + // ml_enabled = true / ml_disabled = false. + MlEnabled bool `protobuf:"varint,5,opt,name=ml_enabled,json=mlEnabled,proto3" json:"ml_enabled,omitempty"` // Deprecated: Do not use. + // Optional. Indicates whether Machine Learning is disabled for the intent. + // Note: If `ml_disabled` setting is set to true, then this intent is not + // taken into account during inference in `ML ONLY` match mode. Also, + // auto-markup in the UI is turned off. + MlDisabled bool `protobuf:"varint,19,opt,name=ml_disabled,json=mlDisabled,proto3" json:"ml_disabled,omitempty"` + // Optional. Indicates that this intent ends an interaction. Some integrations + // (e.g., Actions on Google or Dialogflow phone gateway) use this information + // to close interaction with an end user. Default is false. + EndInteraction bool `protobuf:"varint,21,opt,name=end_interaction,json=endInteraction,proto3" json:"end_interaction,omitempty"` + // Optional. The list of context names required for this intent to be + // triggered. + // Format: `projects//agent/sessions/-/contexts/`. + InputContextNames []string `protobuf:"bytes,7,rep,name=input_context_names,json=inputContextNames,proto3" json:"input_context_names,omitempty"` + // Optional. The collection of event names that trigger the intent. + // If the collection of input contexts is not empty, all of the contexts must + // be present in the active user session for an event to trigger this intent. + Events []string `protobuf:"bytes,8,rep,name=events,proto3" json:"events,omitempty"` + // Optional. The collection of examples that the agent is + // trained on. + TrainingPhrases []*Intent_TrainingPhrase `protobuf:"bytes,9,rep,name=training_phrases,json=trainingPhrases,proto3" json:"training_phrases,omitempty"` + // Optional. The name of the action associated with the intent. + // Note: The action name must not contain whitespaces. + Action string `protobuf:"bytes,10,opt,name=action,proto3" json:"action,omitempty"` + // Optional. The collection of contexts that are activated when the intent + // is matched. Context messages in this collection should not set the + // parameters field. Setting the `lifespan_count` to 0 will reset the context + // when the intent is matched. + // Format: `projects//agent/sessions/-/contexts/`. + OutputContexts []*Context `protobuf:"bytes,11,rep,name=output_contexts,json=outputContexts,proto3" json:"output_contexts,omitempty"` + // Optional. Indicates whether to delete all contexts in the current + // session when this intent is matched. + ResetContexts bool `protobuf:"varint,12,opt,name=reset_contexts,json=resetContexts,proto3" json:"reset_contexts,omitempty"` + // Optional. The collection of parameters associated with the intent. + Parameters []*Intent_Parameter `protobuf:"bytes,13,rep,name=parameters,proto3" json:"parameters,omitempty"` + // Optional. The collection of rich messages corresponding to the + // `Response` field in the Dialogflow console. + Messages []*Intent_Message `protobuf:"bytes,14,rep,name=messages,proto3" json:"messages,omitempty"` + // Optional. The list of platforms for which the first response will be + // taken from among the messages assigned to the DEFAULT_PLATFORM. + DefaultResponsePlatforms []Intent_Message_Platform `protobuf:"varint,15,rep,packed,name=default_response_platforms,json=defaultResponsePlatforms,proto3,enum=google.cloud.dialogflow.v2beta1.Intent_Message_Platform" json:"default_response_platforms,omitempty"` + // Read-only. The unique identifier of the root intent in the chain of + // followup intents. It identifies the correct followup intents chain for + // this intent. We populate this field only in the output. + // + // Format: `projects//agent/intents/`. + RootFollowupIntentName string `protobuf:"bytes,16,opt,name=root_followup_intent_name,json=rootFollowupIntentName,proto3" json:"root_followup_intent_name,omitempty"` + // Read-only after creation. The unique identifier of the parent intent in the + // chain of followup intents. You can set this field when creating an intent, + // for example with [CreateIntent][] or [BatchUpdateIntents][], in order to + // make this intent a followup intent. + // + // It identifies the parent followup intent. + // Format: `projects//agent/intents/`. + ParentFollowupIntentName string `protobuf:"bytes,17,opt,name=parent_followup_intent_name,json=parentFollowupIntentName,proto3" json:"parent_followup_intent_name,omitempty"` + // Read-only. Information about all followup intents that have this intent as + // a direct or indirect parent. We populate this field only in the output. + FollowupIntentInfo []*Intent_FollowupIntentInfo `protobuf:"bytes,18,rep,name=followup_intent_info,json=followupIntentInfo,proto3" json:"followup_intent_info,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Intent) Reset() { *m = Intent{} } +func (m *Intent) String() string { return proto.CompactTextString(m) } +func (*Intent) ProtoMessage() {} +func (*Intent) Descriptor() ([]byte, []int) { + return fileDescriptor_intent_2d72c44f592bb8da, []int{0} +} +func (m *Intent) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Intent.Unmarshal(m, b) +} +func (m *Intent) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Intent.Marshal(b, m, deterministic) +} +func (dst *Intent) XXX_Merge(src proto.Message) { + xxx_messageInfo_Intent.Merge(dst, src) +} +func (m *Intent) XXX_Size() int { + return xxx_messageInfo_Intent.Size(m) +} +func (m *Intent) XXX_DiscardUnknown() { + xxx_messageInfo_Intent.DiscardUnknown(m) +} + +var xxx_messageInfo_Intent proto.InternalMessageInfo + +func (m *Intent) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *Intent) GetDisplayName() string { + if m != nil { + return m.DisplayName + } + return "" +} + +func (m *Intent) GetWebhookState() Intent_WebhookState { + if m != nil { + return m.WebhookState + } + return Intent_WEBHOOK_STATE_UNSPECIFIED +} + +func (m *Intent) GetPriority() int32 { + if m != nil { + return m.Priority + } + return 0 +} + +func (m *Intent) GetIsFallback() bool { + if m != nil { + return m.IsFallback + } + return false +} + +// Deprecated: Do not use. +func (m *Intent) GetMlEnabled() bool { + if m != nil { + return m.MlEnabled + } + return false +} + +func (m *Intent) GetMlDisabled() bool { + if m != nil { + return m.MlDisabled + } + return false +} + +func (m *Intent) GetEndInteraction() bool { + if m != nil { + return m.EndInteraction + } + return false +} + +func (m *Intent) GetInputContextNames() []string { + if m != nil { + return m.InputContextNames + } + return nil +} + +func (m *Intent) GetEvents() []string { + if m != nil { + return m.Events + } + return nil +} + +func (m *Intent) GetTrainingPhrases() []*Intent_TrainingPhrase { + if m != nil { + return m.TrainingPhrases + } + return nil +} + +func (m *Intent) GetAction() string { + if m != nil { + return m.Action + } + return "" +} + +func (m *Intent) GetOutputContexts() []*Context { + if m != nil { + return m.OutputContexts + } + return nil +} + +func (m *Intent) GetResetContexts() bool { + if m != nil { + return m.ResetContexts + } + return false +} + +func (m *Intent) GetParameters() []*Intent_Parameter { + if m != nil { + return m.Parameters + } + return nil +} + +func (m *Intent) GetMessages() []*Intent_Message { + if m != nil { + return m.Messages + } + return nil +} + +func (m *Intent) GetDefaultResponsePlatforms() []Intent_Message_Platform { + if m != nil { + return m.DefaultResponsePlatforms + } + return nil +} + +func (m *Intent) GetRootFollowupIntentName() string { + if m != nil { + return m.RootFollowupIntentName + } + return "" +} + +func (m *Intent) GetParentFollowupIntentName() string { + if m != nil { + return m.ParentFollowupIntentName + } + return "" +} + +func (m *Intent) GetFollowupIntentInfo() []*Intent_FollowupIntentInfo { + if m != nil { + return m.FollowupIntentInfo + } + return nil +} + +// Represents an example that the agent is trained on. +type Intent_TrainingPhrase struct { + // Output only. The unique identifier of this training phrase. + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // Required. The type of the training phrase. + Type Intent_TrainingPhrase_Type `protobuf:"varint,2,opt,name=type,proto3,enum=google.cloud.dialogflow.v2beta1.Intent_TrainingPhrase_Type" json:"type,omitempty"` + // Required. The ordered list of training phrase parts. + // The parts are concatenated in order to form the training phrase. + // + // Note: The API does not automatically annotate training phrases like the + // Dialogflow Console does. + // + // Note: Do not forget to include whitespace at part boundaries, + // so the training phrase is well formatted when the parts are concatenated. + // + // If the training phrase does not need to be annotated with parameters, + // you just need a single part with only the [Part.text][google.cloud.dialogflow.v2beta1.Intent.TrainingPhrase.Part.text] field set. + // + // If you want to annotate the training phrase, you must create multiple + // parts, where the fields of each part are populated in one of two ways: + // + // - `Part.text` is set to a part of the phrase that has no parameters. + // - `Part.text` is set to a part of the phrase that you want to annotate, + // and the `entity_type`, `alias`, and `user_defined` fields are all + // set. + Parts []*Intent_TrainingPhrase_Part `protobuf:"bytes,3,rep,name=parts,proto3" json:"parts,omitempty"` + // Optional. Indicates how many times this example was added to + // the intent. Each time a developer adds an existing sample by editing an + // intent or training, this counter is increased. + TimesAddedCount int32 `protobuf:"varint,4,opt,name=times_added_count,json=timesAddedCount,proto3" json:"times_added_count,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Intent_TrainingPhrase) Reset() { *m = Intent_TrainingPhrase{} } +func (m *Intent_TrainingPhrase) String() string { return proto.CompactTextString(m) } +func (*Intent_TrainingPhrase) ProtoMessage() {} +func (*Intent_TrainingPhrase) Descriptor() ([]byte, []int) { + return fileDescriptor_intent_2d72c44f592bb8da, []int{0, 0} +} +func (m *Intent_TrainingPhrase) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Intent_TrainingPhrase.Unmarshal(m, b) +} +func (m *Intent_TrainingPhrase) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Intent_TrainingPhrase.Marshal(b, m, deterministic) +} +func (dst *Intent_TrainingPhrase) XXX_Merge(src proto.Message) { + xxx_messageInfo_Intent_TrainingPhrase.Merge(dst, src) +} +func (m *Intent_TrainingPhrase) XXX_Size() int { + return xxx_messageInfo_Intent_TrainingPhrase.Size(m) +} +func (m *Intent_TrainingPhrase) XXX_DiscardUnknown() { + xxx_messageInfo_Intent_TrainingPhrase.DiscardUnknown(m) +} + +var xxx_messageInfo_Intent_TrainingPhrase proto.InternalMessageInfo + +func (m *Intent_TrainingPhrase) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *Intent_TrainingPhrase) GetType() Intent_TrainingPhrase_Type { + if m != nil { + return m.Type + } + return Intent_TrainingPhrase_TYPE_UNSPECIFIED +} + +func (m *Intent_TrainingPhrase) GetParts() []*Intent_TrainingPhrase_Part { + if m != nil { + return m.Parts + } + return nil +} + +func (m *Intent_TrainingPhrase) GetTimesAddedCount() int32 { + if m != nil { + return m.TimesAddedCount + } + return 0 +} + +// Represents a part of a training phrase. +type Intent_TrainingPhrase_Part struct { + // Required. The text for this part. + Text string `protobuf:"bytes,1,opt,name=text,proto3" json:"text,omitempty"` + // Optional. The entity type name prefixed with `@`. + // This field is required for annotated parts of the training phrase. + EntityType string `protobuf:"bytes,2,opt,name=entity_type,json=entityType,proto3" json:"entity_type,omitempty"` + // Optional. The parameter name for the value extracted from the + // annotated part of the example. + // This field is required for annotated parts of the training phrase. + Alias string `protobuf:"bytes,3,opt,name=alias,proto3" json:"alias,omitempty"` + // Optional. Indicates whether the text was manually annotated. + // This field is set to true when the Dialogflow Console is used to + // manually annotate the part. When creating an annotated part with the + // API, you must set this to true. + UserDefined bool `protobuf:"varint,4,opt,name=user_defined,json=userDefined,proto3" json:"user_defined,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Intent_TrainingPhrase_Part) Reset() { *m = Intent_TrainingPhrase_Part{} } +func (m *Intent_TrainingPhrase_Part) String() string { return proto.CompactTextString(m) } +func (*Intent_TrainingPhrase_Part) ProtoMessage() {} +func (*Intent_TrainingPhrase_Part) Descriptor() ([]byte, []int) { + return fileDescriptor_intent_2d72c44f592bb8da, []int{0, 0, 0} +} +func (m *Intent_TrainingPhrase_Part) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Intent_TrainingPhrase_Part.Unmarshal(m, b) +} +func (m *Intent_TrainingPhrase_Part) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Intent_TrainingPhrase_Part.Marshal(b, m, deterministic) +} +func (dst *Intent_TrainingPhrase_Part) XXX_Merge(src proto.Message) { + xxx_messageInfo_Intent_TrainingPhrase_Part.Merge(dst, src) +} +func (m *Intent_TrainingPhrase_Part) XXX_Size() int { + return xxx_messageInfo_Intent_TrainingPhrase_Part.Size(m) +} +func (m *Intent_TrainingPhrase_Part) XXX_DiscardUnknown() { + xxx_messageInfo_Intent_TrainingPhrase_Part.DiscardUnknown(m) +} + +var xxx_messageInfo_Intent_TrainingPhrase_Part proto.InternalMessageInfo + +func (m *Intent_TrainingPhrase_Part) GetText() string { + if m != nil { + return m.Text + } + return "" +} + +func (m *Intent_TrainingPhrase_Part) GetEntityType() string { + if m != nil { + return m.EntityType + } + return "" +} + +func (m *Intent_TrainingPhrase_Part) GetAlias() string { + if m != nil { + return m.Alias + } + return "" +} + +func (m *Intent_TrainingPhrase_Part) GetUserDefined() bool { + if m != nil { + return m.UserDefined + } + return false +} + +// Represents intent parameters. +type Intent_Parameter struct { + // The unique identifier of this parameter. + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // Required. The name of the parameter. + DisplayName string `protobuf:"bytes,2,opt,name=display_name,json=displayName,proto3" json:"display_name,omitempty"` + // Optional. The definition of the parameter value. It can be: + // - a constant string, + // - a parameter value defined as `$parameter_name`, + // - an original parameter value defined as `$parameter_name.original`, + // - a parameter value from some context defined as + // `#context_name.parameter_name`. + Value string `protobuf:"bytes,3,opt,name=value,proto3" json:"value,omitempty"` + // Optional. The default value to use when the `value` yields an empty + // result. + // Default values can be extracted from contexts by using the following + // syntax: `#context_name.parameter_name`. + DefaultValue string `protobuf:"bytes,4,opt,name=default_value,json=defaultValue,proto3" json:"default_value,omitempty"` + // Optional. The name of the entity type, prefixed with `@`, that + // describes values of the parameter. If the parameter is + // required, this must be provided. + EntityTypeDisplayName string `protobuf:"bytes,5,opt,name=entity_type_display_name,json=entityTypeDisplayName,proto3" json:"entity_type_display_name,omitempty"` + // Optional. Indicates whether the parameter is required. That is, + // whether the intent cannot be completed without collecting the parameter + // value. + Mandatory bool `protobuf:"varint,6,opt,name=mandatory,proto3" json:"mandatory,omitempty"` + // Optional. The collection of prompts that the agent can present to the + // user in order to collect value for the parameter. + Prompts []string `protobuf:"bytes,7,rep,name=prompts,proto3" json:"prompts,omitempty"` + // Optional. Indicates whether the parameter represents a list of values. + IsList bool `protobuf:"varint,8,opt,name=is_list,json=isList,proto3" json:"is_list,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Intent_Parameter) Reset() { *m = Intent_Parameter{} } +func (m *Intent_Parameter) String() string { return proto.CompactTextString(m) } +func (*Intent_Parameter) ProtoMessage() {} +func (*Intent_Parameter) Descriptor() ([]byte, []int) { + return fileDescriptor_intent_2d72c44f592bb8da, []int{0, 1} +} +func (m *Intent_Parameter) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Intent_Parameter.Unmarshal(m, b) +} +func (m *Intent_Parameter) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Intent_Parameter.Marshal(b, m, deterministic) +} +func (dst *Intent_Parameter) XXX_Merge(src proto.Message) { + xxx_messageInfo_Intent_Parameter.Merge(dst, src) +} +func (m *Intent_Parameter) XXX_Size() int { + return xxx_messageInfo_Intent_Parameter.Size(m) +} +func (m *Intent_Parameter) XXX_DiscardUnknown() { + xxx_messageInfo_Intent_Parameter.DiscardUnknown(m) +} + +var xxx_messageInfo_Intent_Parameter proto.InternalMessageInfo + +func (m *Intent_Parameter) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *Intent_Parameter) GetDisplayName() string { + if m != nil { + return m.DisplayName + } + return "" +} + +func (m *Intent_Parameter) GetValue() string { + if m != nil { + return m.Value + } + return "" +} + +func (m *Intent_Parameter) GetDefaultValue() string { + if m != nil { + return m.DefaultValue + } + return "" +} + +func (m *Intent_Parameter) GetEntityTypeDisplayName() string { + if m != nil { + return m.EntityTypeDisplayName + } + return "" +} + +func (m *Intent_Parameter) GetMandatory() bool { + if m != nil { + return m.Mandatory + } + return false +} + +func (m *Intent_Parameter) GetPrompts() []string { + if m != nil { + return m.Prompts + } + return nil +} + +func (m *Intent_Parameter) GetIsList() bool { + if m != nil { + return m.IsList + } + return false +} + +// Corresponds to the `Response` field in the Dialogflow console. +type Intent_Message struct { + // Required. The rich response message. + // + // Types that are valid to be assigned to Message: + // *Intent_Message_Text_ + // *Intent_Message_Image_ + // *Intent_Message_QuickReplies_ + // *Intent_Message_Card_ + // *Intent_Message_Payload + // *Intent_Message_SimpleResponses_ + // *Intent_Message_BasicCard_ + // *Intent_Message_Suggestions_ + // *Intent_Message_LinkOutSuggestion_ + // *Intent_Message_ListSelect_ + // *Intent_Message_CarouselSelect_ + // *Intent_Message_TelephonyPlayAudio_ + // *Intent_Message_TelephonySynthesizeSpeech_ + // *Intent_Message_TelephonyTransferCall_ + Message isIntent_Message_Message `protobuf_oneof:"message"` + // Optional. The platform that this message is intended for. + Platform Intent_Message_Platform `protobuf:"varint,6,opt,name=platform,proto3,enum=google.cloud.dialogflow.v2beta1.Intent_Message_Platform" json:"platform,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Intent_Message) Reset() { *m = Intent_Message{} } +func (m *Intent_Message) String() string { return proto.CompactTextString(m) } +func (*Intent_Message) ProtoMessage() {} +func (*Intent_Message) Descriptor() ([]byte, []int) { + return fileDescriptor_intent_2d72c44f592bb8da, []int{0, 2} +} +func (m *Intent_Message) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Intent_Message.Unmarshal(m, b) +} +func (m *Intent_Message) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Intent_Message.Marshal(b, m, deterministic) +} +func (dst *Intent_Message) XXX_Merge(src proto.Message) { + xxx_messageInfo_Intent_Message.Merge(dst, src) +} +func (m *Intent_Message) XXX_Size() int { + return xxx_messageInfo_Intent_Message.Size(m) +} +func (m *Intent_Message) XXX_DiscardUnknown() { + xxx_messageInfo_Intent_Message.DiscardUnknown(m) +} + +var xxx_messageInfo_Intent_Message proto.InternalMessageInfo + +type isIntent_Message_Message interface { + isIntent_Message_Message() +} + +type Intent_Message_Text_ struct { + Text *Intent_Message_Text `protobuf:"bytes,1,opt,name=text,proto3,oneof"` +} + +type Intent_Message_Image_ struct { + Image *Intent_Message_Image `protobuf:"bytes,2,opt,name=image,proto3,oneof"` +} + +type Intent_Message_QuickReplies_ struct { + QuickReplies *Intent_Message_QuickReplies `protobuf:"bytes,3,opt,name=quick_replies,json=quickReplies,proto3,oneof"` +} + +type Intent_Message_Card_ struct { + Card *Intent_Message_Card `protobuf:"bytes,4,opt,name=card,proto3,oneof"` +} + +type Intent_Message_Payload struct { + Payload *_struct.Struct `protobuf:"bytes,5,opt,name=payload,proto3,oneof"` +} + +type Intent_Message_SimpleResponses_ struct { + SimpleResponses *Intent_Message_SimpleResponses `protobuf:"bytes,7,opt,name=simple_responses,json=simpleResponses,proto3,oneof"` +} + +type Intent_Message_BasicCard_ struct { + BasicCard *Intent_Message_BasicCard `protobuf:"bytes,8,opt,name=basic_card,json=basicCard,proto3,oneof"` +} + +type Intent_Message_Suggestions_ struct { + Suggestions *Intent_Message_Suggestions `protobuf:"bytes,9,opt,name=suggestions,proto3,oneof"` +} + +type Intent_Message_LinkOutSuggestion_ struct { + LinkOutSuggestion *Intent_Message_LinkOutSuggestion `protobuf:"bytes,10,opt,name=link_out_suggestion,json=linkOutSuggestion,proto3,oneof"` +} + +type Intent_Message_ListSelect_ struct { + ListSelect *Intent_Message_ListSelect `protobuf:"bytes,11,opt,name=list_select,json=listSelect,proto3,oneof"` +} + +type Intent_Message_CarouselSelect_ struct { + CarouselSelect *Intent_Message_CarouselSelect `protobuf:"bytes,12,opt,name=carousel_select,json=carouselSelect,proto3,oneof"` +} + +type Intent_Message_TelephonyPlayAudio_ struct { + TelephonyPlayAudio *Intent_Message_TelephonyPlayAudio `protobuf:"bytes,13,opt,name=telephony_play_audio,json=telephonyPlayAudio,proto3,oneof"` +} + +type Intent_Message_TelephonySynthesizeSpeech_ struct { + TelephonySynthesizeSpeech *Intent_Message_TelephonySynthesizeSpeech `protobuf:"bytes,14,opt,name=telephony_synthesize_speech,json=telephonySynthesizeSpeech,proto3,oneof"` +} + +type Intent_Message_TelephonyTransferCall_ struct { + TelephonyTransferCall *Intent_Message_TelephonyTransferCall `protobuf:"bytes,15,opt,name=telephony_transfer_call,json=telephonyTransferCall,proto3,oneof"` +} + +func (*Intent_Message_Text_) isIntent_Message_Message() {} + +func (*Intent_Message_Image_) isIntent_Message_Message() {} + +func (*Intent_Message_QuickReplies_) isIntent_Message_Message() {} + +func (*Intent_Message_Card_) isIntent_Message_Message() {} + +func (*Intent_Message_Payload) isIntent_Message_Message() {} + +func (*Intent_Message_SimpleResponses_) isIntent_Message_Message() {} + +func (*Intent_Message_BasicCard_) isIntent_Message_Message() {} + +func (*Intent_Message_Suggestions_) isIntent_Message_Message() {} + +func (*Intent_Message_LinkOutSuggestion_) isIntent_Message_Message() {} + +func (*Intent_Message_ListSelect_) isIntent_Message_Message() {} + +func (*Intent_Message_CarouselSelect_) isIntent_Message_Message() {} + +func (*Intent_Message_TelephonyPlayAudio_) isIntent_Message_Message() {} + +func (*Intent_Message_TelephonySynthesizeSpeech_) isIntent_Message_Message() {} + +func (*Intent_Message_TelephonyTransferCall_) isIntent_Message_Message() {} + +func (m *Intent_Message) GetMessage() isIntent_Message_Message { + if m != nil { + return m.Message + } + return nil +} + +func (m *Intent_Message) GetText() *Intent_Message_Text { + if x, ok := m.GetMessage().(*Intent_Message_Text_); ok { + return x.Text + } + return nil +} + +func (m *Intent_Message) GetImage() *Intent_Message_Image { + if x, ok := m.GetMessage().(*Intent_Message_Image_); ok { + return x.Image + } + return nil +} + +func (m *Intent_Message) GetQuickReplies() *Intent_Message_QuickReplies { + if x, ok := m.GetMessage().(*Intent_Message_QuickReplies_); ok { + return x.QuickReplies + } + return nil +} + +func (m *Intent_Message) GetCard() *Intent_Message_Card { + if x, ok := m.GetMessage().(*Intent_Message_Card_); ok { + return x.Card + } + return nil +} + +func (m *Intent_Message) GetPayload() *_struct.Struct { + if x, ok := m.GetMessage().(*Intent_Message_Payload); ok { + return x.Payload + } + return nil +} + +func (m *Intent_Message) GetSimpleResponses() *Intent_Message_SimpleResponses { + if x, ok := m.GetMessage().(*Intent_Message_SimpleResponses_); ok { + return x.SimpleResponses + } + return nil +} + +func (m *Intent_Message) GetBasicCard() *Intent_Message_BasicCard { + if x, ok := m.GetMessage().(*Intent_Message_BasicCard_); ok { + return x.BasicCard + } + return nil +} + +func (m *Intent_Message) GetSuggestions() *Intent_Message_Suggestions { + if x, ok := m.GetMessage().(*Intent_Message_Suggestions_); ok { + return x.Suggestions + } + return nil +} + +func (m *Intent_Message) GetLinkOutSuggestion() *Intent_Message_LinkOutSuggestion { + if x, ok := m.GetMessage().(*Intent_Message_LinkOutSuggestion_); ok { + return x.LinkOutSuggestion + } + return nil +} + +func (m *Intent_Message) GetListSelect() *Intent_Message_ListSelect { + if x, ok := m.GetMessage().(*Intent_Message_ListSelect_); ok { + return x.ListSelect + } + return nil +} + +func (m *Intent_Message) GetCarouselSelect() *Intent_Message_CarouselSelect { + if x, ok := m.GetMessage().(*Intent_Message_CarouselSelect_); ok { + return x.CarouselSelect + } + return nil +} + +func (m *Intent_Message) GetTelephonyPlayAudio() *Intent_Message_TelephonyPlayAudio { + if x, ok := m.GetMessage().(*Intent_Message_TelephonyPlayAudio_); ok { + return x.TelephonyPlayAudio + } + return nil +} + +func (m *Intent_Message) GetTelephonySynthesizeSpeech() *Intent_Message_TelephonySynthesizeSpeech { + if x, ok := m.GetMessage().(*Intent_Message_TelephonySynthesizeSpeech_); ok { + return x.TelephonySynthesizeSpeech + } + return nil +} + +func (m *Intent_Message) GetTelephonyTransferCall() *Intent_Message_TelephonyTransferCall { + if x, ok := m.GetMessage().(*Intent_Message_TelephonyTransferCall_); ok { + return x.TelephonyTransferCall + } + return nil +} + +func (m *Intent_Message) GetPlatform() Intent_Message_Platform { + if m != nil { + return m.Platform + } + return Intent_Message_PLATFORM_UNSPECIFIED +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*Intent_Message) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _Intent_Message_OneofMarshaler, _Intent_Message_OneofUnmarshaler, _Intent_Message_OneofSizer, []interface{}{ + (*Intent_Message_Text_)(nil), + (*Intent_Message_Image_)(nil), + (*Intent_Message_QuickReplies_)(nil), + (*Intent_Message_Card_)(nil), + (*Intent_Message_Payload)(nil), + (*Intent_Message_SimpleResponses_)(nil), + (*Intent_Message_BasicCard_)(nil), + (*Intent_Message_Suggestions_)(nil), + (*Intent_Message_LinkOutSuggestion_)(nil), + (*Intent_Message_ListSelect_)(nil), + (*Intent_Message_CarouselSelect_)(nil), + (*Intent_Message_TelephonyPlayAudio_)(nil), + (*Intent_Message_TelephonySynthesizeSpeech_)(nil), + (*Intent_Message_TelephonyTransferCall_)(nil), + } +} + +func _Intent_Message_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*Intent_Message) + // message + switch x := m.Message.(type) { + case *Intent_Message_Text_: + b.EncodeVarint(1<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.Text); err != nil { + return err + } + case *Intent_Message_Image_: + b.EncodeVarint(2<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.Image); err != nil { + return err + } + case *Intent_Message_QuickReplies_: + b.EncodeVarint(3<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.QuickReplies); err != nil { + return err + } + case *Intent_Message_Card_: + b.EncodeVarint(4<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.Card); err != nil { + return err + } + case *Intent_Message_Payload: + b.EncodeVarint(5<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.Payload); err != nil { + return err + } + case *Intent_Message_SimpleResponses_: + b.EncodeVarint(7<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.SimpleResponses); err != nil { + return err + } + case *Intent_Message_BasicCard_: + b.EncodeVarint(8<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.BasicCard); err != nil { + return err + } + case *Intent_Message_Suggestions_: + b.EncodeVarint(9<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.Suggestions); err != nil { + return err + } + case *Intent_Message_LinkOutSuggestion_: + b.EncodeVarint(10<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.LinkOutSuggestion); err != nil { + return err + } + case *Intent_Message_ListSelect_: + b.EncodeVarint(11<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.ListSelect); err != nil { + return err + } + case *Intent_Message_CarouselSelect_: + b.EncodeVarint(12<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.CarouselSelect); err != nil { + return err + } + case *Intent_Message_TelephonyPlayAudio_: + b.EncodeVarint(13<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.TelephonyPlayAudio); err != nil { + return err + } + case *Intent_Message_TelephonySynthesizeSpeech_: + b.EncodeVarint(14<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.TelephonySynthesizeSpeech); err != nil { + return err + } + case *Intent_Message_TelephonyTransferCall_: + b.EncodeVarint(15<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.TelephonyTransferCall); err != nil { + return err + } + case nil: + default: + return fmt.Errorf("Intent_Message.Message has unexpected type %T", x) + } + return nil +} + +func _Intent_Message_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*Intent_Message) + switch tag { + case 1: // message.text + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(Intent_Message_Text) + err := b.DecodeMessage(msg) + m.Message = &Intent_Message_Text_{msg} + return true, err + case 2: // message.image + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(Intent_Message_Image) + err := b.DecodeMessage(msg) + m.Message = &Intent_Message_Image_{msg} + return true, err + case 3: // message.quick_replies + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(Intent_Message_QuickReplies) + err := b.DecodeMessage(msg) + m.Message = &Intent_Message_QuickReplies_{msg} + return true, err + case 4: // message.card + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(Intent_Message_Card) + err := b.DecodeMessage(msg) + m.Message = &Intent_Message_Card_{msg} + return true, err + case 5: // message.payload + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(_struct.Struct) + err := b.DecodeMessage(msg) + m.Message = &Intent_Message_Payload{msg} + return true, err + case 7: // message.simple_responses + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(Intent_Message_SimpleResponses) + err := b.DecodeMessage(msg) + m.Message = &Intent_Message_SimpleResponses_{msg} + return true, err + case 8: // message.basic_card + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(Intent_Message_BasicCard) + err := b.DecodeMessage(msg) + m.Message = &Intent_Message_BasicCard_{msg} + return true, err + case 9: // message.suggestions + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(Intent_Message_Suggestions) + err := b.DecodeMessage(msg) + m.Message = &Intent_Message_Suggestions_{msg} + return true, err + case 10: // message.link_out_suggestion + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(Intent_Message_LinkOutSuggestion) + err := b.DecodeMessage(msg) + m.Message = &Intent_Message_LinkOutSuggestion_{msg} + return true, err + case 11: // message.list_select + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(Intent_Message_ListSelect) + err := b.DecodeMessage(msg) + m.Message = &Intent_Message_ListSelect_{msg} + return true, err + case 12: // message.carousel_select + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(Intent_Message_CarouselSelect) + err := b.DecodeMessage(msg) + m.Message = &Intent_Message_CarouselSelect_{msg} + return true, err + case 13: // message.telephony_play_audio + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(Intent_Message_TelephonyPlayAudio) + err := b.DecodeMessage(msg) + m.Message = &Intent_Message_TelephonyPlayAudio_{msg} + return true, err + case 14: // message.telephony_synthesize_speech + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(Intent_Message_TelephonySynthesizeSpeech) + err := b.DecodeMessage(msg) + m.Message = &Intent_Message_TelephonySynthesizeSpeech_{msg} + return true, err + case 15: // message.telephony_transfer_call + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(Intent_Message_TelephonyTransferCall) + err := b.DecodeMessage(msg) + m.Message = &Intent_Message_TelephonyTransferCall_{msg} + return true, err + default: + return false, nil + } +} + +func _Intent_Message_OneofSizer(msg proto.Message) (n int) { + m := msg.(*Intent_Message) + // message + switch x := m.Message.(type) { + case *Intent_Message_Text_: + s := proto.Size(x.Text) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *Intent_Message_Image_: + s := proto.Size(x.Image) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *Intent_Message_QuickReplies_: + s := proto.Size(x.QuickReplies) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *Intent_Message_Card_: + s := proto.Size(x.Card) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *Intent_Message_Payload: + s := proto.Size(x.Payload) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *Intent_Message_SimpleResponses_: + s := proto.Size(x.SimpleResponses) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *Intent_Message_BasicCard_: + s := proto.Size(x.BasicCard) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *Intent_Message_Suggestions_: + s := proto.Size(x.Suggestions) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *Intent_Message_LinkOutSuggestion_: + s := proto.Size(x.LinkOutSuggestion) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *Intent_Message_ListSelect_: + s := proto.Size(x.ListSelect) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *Intent_Message_CarouselSelect_: + s := proto.Size(x.CarouselSelect) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *Intent_Message_TelephonyPlayAudio_: + s := proto.Size(x.TelephonyPlayAudio) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *Intent_Message_TelephonySynthesizeSpeech_: + s := proto.Size(x.TelephonySynthesizeSpeech) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *Intent_Message_TelephonyTransferCall_: + s := proto.Size(x.TelephonyTransferCall) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +// The text response message. +type Intent_Message_Text struct { + // Optional. The collection of the agent's responses. + Text []string `protobuf:"bytes,1,rep,name=text,proto3" json:"text,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Intent_Message_Text) Reset() { *m = Intent_Message_Text{} } +func (m *Intent_Message_Text) String() string { return proto.CompactTextString(m) } +func (*Intent_Message_Text) ProtoMessage() {} +func (*Intent_Message_Text) Descriptor() ([]byte, []int) { + return fileDescriptor_intent_2d72c44f592bb8da, []int{0, 2, 0} +} +func (m *Intent_Message_Text) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Intent_Message_Text.Unmarshal(m, b) +} +func (m *Intent_Message_Text) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Intent_Message_Text.Marshal(b, m, deterministic) +} +func (dst *Intent_Message_Text) XXX_Merge(src proto.Message) { + xxx_messageInfo_Intent_Message_Text.Merge(dst, src) +} +func (m *Intent_Message_Text) XXX_Size() int { + return xxx_messageInfo_Intent_Message_Text.Size(m) +} +func (m *Intent_Message_Text) XXX_DiscardUnknown() { + xxx_messageInfo_Intent_Message_Text.DiscardUnknown(m) +} + +var xxx_messageInfo_Intent_Message_Text proto.InternalMessageInfo + +func (m *Intent_Message_Text) GetText() []string { + if m != nil { + return m.Text + } + return nil +} + +// The image response message. +type Intent_Message_Image struct { + // Optional. The public URI to an image file. + ImageUri string `protobuf:"bytes,1,opt,name=image_uri,json=imageUri,proto3" json:"image_uri,omitempty"` + // A text description of the image to be used for accessibility, + // e.g., screen readers. Required if image_uri is set for CarouselSelect. + AccessibilityText string `protobuf:"bytes,2,opt,name=accessibility_text,json=accessibilityText,proto3" json:"accessibility_text,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Intent_Message_Image) Reset() { *m = Intent_Message_Image{} } +func (m *Intent_Message_Image) String() string { return proto.CompactTextString(m) } +func (*Intent_Message_Image) ProtoMessage() {} +func (*Intent_Message_Image) Descriptor() ([]byte, []int) { + return fileDescriptor_intent_2d72c44f592bb8da, []int{0, 2, 1} +} +func (m *Intent_Message_Image) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Intent_Message_Image.Unmarshal(m, b) +} +func (m *Intent_Message_Image) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Intent_Message_Image.Marshal(b, m, deterministic) +} +func (dst *Intent_Message_Image) XXX_Merge(src proto.Message) { + xxx_messageInfo_Intent_Message_Image.Merge(dst, src) +} +func (m *Intent_Message_Image) XXX_Size() int { + return xxx_messageInfo_Intent_Message_Image.Size(m) +} +func (m *Intent_Message_Image) XXX_DiscardUnknown() { + xxx_messageInfo_Intent_Message_Image.DiscardUnknown(m) +} + +var xxx_messageInfo_Intent_Message_Image proto.InternalMessageInfo + +func (m *Intent_Message_Image) GetImageUri() string { + if m != nil { + return m.ImageUri + } + return "" +} + +func (m *Intent_Message_Image) GetAccessibilityText() string { + if m != nil { + return m.AccessibilityText + } + return "" +} + +// The quick replies response message. +type Intent_Message_QuickReplies struct { + // Optional. The title of the collection of quick replies. + Title string `protobuf:"bytes,1,opt,name=title,proto3" json:"title,omitempty"` + // Optional. The collection of quick replies. + QuickReplies []string `protobuf:"bytes,2,rep,name=quick_replies,json=quickReplies,proto3" json:"quick_replies,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Intent_Message_QuickReplies) Reset() { *m = Intent_Message_QuickReplies{} } +func (m *Intent_Message_QuickReplies) String() string { return proto.CompactTextString(m) } +func (*Intent_Message_QuickReplies) ProtoMessage() {} +func (*Intent_Message_QuickReplies) Descriptor() ([]byte, []int) { + return fileDescriptor_intent_2d72c44f592bb8da, []int{0, 2, 2} +} +func (m *Intent_Message_QuickReplies) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Intent_Message_QuickReplies.Unmarshal(m, b) +} +func (m *Intent_Message_QuickReplies) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Intent_Message_QuickReplies.Marshal(b, m, deterministic) +} +func (dst *Intent_Message_QuickReplies) XXX_Merge(src proto.Message) { + xxx_messageInfo_Intent_Message_QuickReplies.Merge(dst, src) +} +func (m *Intent_Message_QuickReplies) XXX_Size() int { + return xxx_messageInfo_Intent_Message_QuickReplies.Size(m) +} +func (m *Intent_Message_QuickReplies) XXX_DiscardUnknown() { + xxx_messageInfo_Intent_Message_QuickReplies.DiscardUnknown(m) +} + +var xxx_messageInfo_Intent_Message_QuickReplies proto.InternalMessageInfo + +func (m *Intent_Message_QuickReplies) GetTitle() string { + if m != nil { + return m.Title + } + return "" +} + +func (m *Intent_Message_QuickReplies) GetQuickReplies() []string { + if m != nil { + return m.QuickReplies + } + return nil +} + +// The card response message. +type Intent_Message_Card struct { + // Optional. The title of the card. + Title string `protobuf:"bytes,1,opt,name=title,proto3" json:"title,omitempty"` + // Optional. The subtitle of the card. + Subtitle string `protobuf:"bytes,2,opt,name=subtitle,proto3" json:"subtitle,omitempty"` + // Optional. The public URI to an image file for the card. + ImageUri string `protobuf:"bytes,3,opt,name=image_uri,json=imageUri,proto3" json:"image_uri,omitempty"` + // Optional. The collection of card buttons. + Buttons []*Intent_Message_Card_Button `protobuf:"bytes,4,rep,name=buttons,proto3" json:"buttons,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Intent_Message_Card) Reset() { *m = Intent_Message_Card{} } +func (m *Intent_Message_Card) String() string { return proto.CompactTextString(m) } +func (*Intent_Message_Card) ProtoMessage() {} +func (*Intent_Message_Card) Descriptor() ([]byte, []int) { + return fileDescriptor_intent_2d72c44f592bb8da, []int{0, 2, 3} +} +func (m *Intent_Message_Card) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Intent_Message_Card.Unmarshal(m, b) +} +func (m *Intent_Message_Card) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Intent_Message_Card.Marshal(b, m, deterministic) +} +func (dst *Intent_Message_Card) XXX_Merge(src proto.Message) { + xxx_messageInfo_Intent_Message_Card.Merge(dst, src) +} +func (m *Intent_Message_Card) XXX_Size() int { + return xxx_messageInfo_Intent_Message_Card.Size(m) +} +func (m *Intent_Message_Card) XXX_DiscardUnknown() { + xxx_messageInfo_Intent_Message_Card.DiscardUnknown(m) +} + +var xxx_messageInfo_Intent_Message_Card proto.InternalMessageInfo + +func (m *Intent_Message_Card) GetTitle() string { + if m != nil { + return m.Title + } + return "" +} + +func (m *Intent_Message_Card) GetSubtitle() string { + if m != nil { + return m.Subtitle + } + return "" +} + +func (m *Intent_Message_Card) GetImageUri() string { + if m != nil { + return m.ImageUri + } + return "" +} + +func (m *Intent_Message_Card) GetButtons() []*Intent_Message_Card_Button { + if m != nil { + return m.Buttons + } + return nil +} + +// Optional. Contains information about a button. +type Intent_Message_Card_Button struct { + // Optional. The text to show on the button. + Text string `protobuf:"bytes,1,opt,name=text,proto3" json:"text,omitempty"` + // Optional. The text to send back to the Dialogflow API or a URI to + // open. + Postback string `protobuf:"bytes,2,opt,name=postback,proto3" json:"postback,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Intent_Message_Card_Button) Reset() { *m = Intent_Message_Card_Button{} } +func (m *Intent_Message_Card_Button) String() string { return proto.CompactTextString(m) } +func (*Intent_Message_Card_Button) ProtoMessage() {} +func (*Intent_Message_Card_Button) Descriptor() ([]byte, []int) { + return fileDescriptor_intent_2d72c44f592bb8da, []int{0, 2, 3, 0} +} +func (m *Intent_Message_Card_Button) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Intent_Message_Card_Button.Unmarshal(m, b) +} +func (m *Intent_Message_Card_Button) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Intent_Message_Card_Button.Marshal(b, m, deterministic) +} +func (dst *Intent_Message_Card_Button) XXX_Merge(src proto.Message) { + xxx_messageInfo_Intent_Message_Card_Button.Merge(dst, src) +} +func (m *Intent_Message_Card_Button) XXX_Size() int { + return xxx_messageInfo_Intent_Message_Card_Button.Size(m) +} +func (m *Intent_Message_Card_Button) XXX_DiscardUnknown() { + xxx_messageInfo_Intent_Message_Card_Button.DiscardUnknown(m) +} + +var xxx_messageInfo_Intent_Message_Card_Button proto.InternalMessageInfo + +func (m *Intent_Message_Card_Button) GetText() string { + if m != nil { + return m.Text + } + return "" +} + +func (m *Intent_Message_Card_Button) GetPostback() string { + if m != nil { + return m.Postback + } + return "" +} + +// The simple response message containing speech or text. +type Intent_Message_SimpleResponse struct { + // One of text_to_speech or ssml must be provided. The plain text of the + // speech output. Mutually exclusive with ssml. + TextToSpeech string `protobuf:"bytes,1,opt,name=text_to_speech,json=textToSpeech,proto3" json:"text_to_speech,omitempty"` + // One of text_to_speech or ssml must be provided. Structured spoken + // response to the user in the SSML format. Mutually exclusive with + // text_to_speech. + Ssml string `protobuf:"bytes,2,opt,name=ssml,proto3" json:"ssml,omitempty"` + // Optional. The text to display. + DisplayText string `protobuf:"bytes,3,opt,name=display_text,json=displayText,proto3" json:"display_text,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Intent_Message_SimpleResponse) Reset() { *m = Intent_Message_SimpleResponse{} } +func (m *Intent_Message_SimpleResponse) String() string { return proto.CompactTextString(m) } +func (*Intent_Message_SimpleResponse) ProtoMessage() {} +func (*Intent_Message_SimpleResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_intent_2d72c44f592bb8da, []int{0, 2, 4} +} +func (m *Intent_Message_SimpleResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Intent_Message_SimpleResponse.Unmarshal(m, b) +} +func (m *Intent_Message_SimpleResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Intent_Message_SimpleResponse.Marshal(b, m, deterministic) +} +func (dst *Intent_Message_SimpleResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_Intent_Message_SimpleResponse.Merge(dst, src) +} +func (m *Intent_Message_SimpleResponse) XXX_Size() int { + return xxx_messageInfo_Intent_Message_SimpleResponse.Size(m) +} +func (m *Intent_Message_SimpleResponse) XXX_DiscardUnknown() { + xxx_messageInfo_Intent_Message_SimpleResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_Intent_Message_SimpleResponse proto.InternalMessageInfo + +func (m *Intent_Message_SimpleResponse) GetTextToSpeech() string { + if m != nil { + return m.TextToSpeech + } + return "" +} + +func (m *Intent_Message_SimpleResponse) GetSsml() string { + if m != nil { + return m.Ssml + } + return "" +} + +func (m *Intent_Message_SimpleResponse) GetDisplayText() string { + if m != nil { + return m.DisplayText + } + return "" +} + +// The collection of simple response candidates. +// This message in `QueryResult.fulfillment_messages` and +// `WebhookResponse.fulfillment_messages` should contain only one +// `SimpleResponse`. +type Intent_Message_SimpleResponses struct { + // Required. The list of simple responses. + SimpleResponses []*Intent_Message_SimpleResponse `protobuf:"bytes,1,rep,name=simple_responses,json=simpleResponses,proto3" json:"simple_responses,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Intent_Message_SimpleResponses) Reset() { *m = Intent_Message_SimpleResponses{} } +func (m *Intent_Message_SimpleResponses) String() string { return proto.CompactTextString(m) } +func (*Intent_Message_SimpleResponses) ProtoMessage() {} +func (*Intent_Message_SimpleResponses) Descriptor() ([]byte, []int) { + return fileDescriptor_intent_2d72c44f592bb8da, []int{0, 2, 5} +} +func (m *Intent_Message_SimpleResponses) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Intent_Message_SimpleResponses.Unmarshal(m, b) +} +func (m *Intent_Message_SimpleResponses) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Intent_Message_SimpleResponses.Marshal(b, m, deterministic) +} +func (dst *Intent_Message_SimpleResponses) XXX_Merge(src proto.Message) { + xxx_messageInfo_Intent_Message_SimpleResponses.Merge(dst, src) +} +func (m *Intent_Message_SimpleResponses) XXX_Size() int { + return xxx_messageInfo_Intent_Message_SimpleResponses.Size(m) +} +func (m *Intent_Message_SimpleResponses) XXX_DiscardUnknown() { + xxx_messageInfo_Intent_Message_SimpleResponses.DiscardUnknown(m) +} + +var xxx_messageInfo_Intent_Message_SimpleResponses proto.InternalMessageInfo + +func (m *Intent_Message_SimpleResponses) GetSimpleResponses() []*Intent_Message_SimpleResponse { + if m != nil { + return m.SimpleResponses + } + return nil +} + +// The basic card message. Useful for displaying information. +type Intent_Message_BasicCard struct { + // Optional. The title of the card. + Title string `protobuf:"bytes,1,opt,name=title,proto3" json:"title,omitempty"` + // Optional. The subtitle of the card. + Subtitle string `protobuf:"bytes,2,opt,name=subtitle,proto3" json:"subtitle,omitempty"` + // Required, unless image is present. The body text of the card. + FormattedText string `protobuf:"bytes,3,opt,name=formatted_text,json=formattedText,proto3" json:"formatted_text,omitempty"` + // Optional. The image for the card. + Image *Intent_Message_Image `protobuf:"bytes,4,opt,name=image,proto3" json:"image,omitempty"` + // Optional. The collection of card buttons. + Buttons []*Intent_Message_BasicCard_Button `protobuf:"bytes,5,rep,name=buttons,proto3" json:"buttons,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Intent_Message_BasicCard) Reset() { *m = Intent_Message_BasicCard{} } +func (m *Intent_Message_BasicCard) String() string { return proto.CompactTextString(m) } +func (*Intent_Message_BasicCard) ProtoMessage() {} +func (*Intent_Message_BasicCard) Descriptor() ([]byte, []int) { + return fileDescriptor_intent_2d72c44f592bb8da, []int{0, 2, 6} +} +func (m *Intent_Message_BasicCard) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Intent_Message_BasicCard.Unmarshal(m, b) +} +func (m *Intent_Message_BasicCard) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Intent_Message_BasicCard.Marshal(b, m, deterministic) +} +func (dst *Intent_Message_BasicCard) XXX_Merge(src proto.Message) { + xxx_messageInfo_Intent_Message_BasicCard.Merge(dst, src) +} +func (m *Intent_Message_BasicCard) XXX_Size() int { + return xxx_messageInfo_Intent_Message_BasicCard.Size(m) +} +func (m *Intent_Message_BasicCard) XXX_DiscardUnknown() { + xxx_messageInfo_Intent_Message_BasicCard.DiscardUnknown(m) +} + +var xxx_messageInfo_Intent_Message_BasicCard proto.InternalMessageInfo + +func (m *Intent_Message_BasicCard) GetTitle() string { + if m != nil { + return m.Title + } + return "" +} + +func (m *Intent_Message_BasicCard) GetSubtitle() string { + if m != nil { + return m.Subtitle + } + return "" +} + +func (m *Intent_Message_BasicCard) GetFormattedText() string { + if m != nil { + return m.FormattedText + } + return "" +} + +func (m *Intent_Message_BasicCard) GetImage() *Intent_Message_Image { + if m != nil { + return m.Image + } + return nil +} + +func (m *Intent_Message_BasicCard) GetButtons() []*Intent_Message_BasicCard_Button { + if m != nil { + return m.Buttons + } + return nil +} + +// The button object that appears at the bottom of a card. +type Intent_Message_BasicCard_Button struct { + // Required. The title of the button. + Title string `protobuf:"bytes,1,opt,name=title,proto3" json:"title,omitempty"` + // Required. Action to take when a user taps on the button. + OpenUriAction *Intent_Message_BasicCard_Button_OpenUriAction `protobuf:"bytes,2,opt,name=open_uri_action,json=openUriAction,proto3" json:"open_uri_action,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Intent_Message_BasicCard_Button) Reset() { *m = Intent_Message_BasicCard_Button{} } +func (m *Intent_Message_BasicCard_Button) String() string { return proto.CompactTextString(m) } +func (*Intent_Message_BasicCard_Button) ProtoMessage() {} +func (*Intent_Message_BasicCard_Button) Descriptor() ([]byte, []int) { + return fileDescriptor_intent_2d72c44f592bb8da, []int{0, 2, 6, 0} +} +func (m *Intent_Message_BasicCard_Button) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Intent_Message_BasicCard_Button.Unmarshal(m, b) +} +func (m *Intent_Message_BasicCard_Button) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Intent_Message_BasicCard_Button.Marshal(b, m, deterministic) +} +func (dst *Intent_Message_BasicCard_Button) XXX_Merge(src proto.Message) { + xxx_messageInfo_Intent_Message_BasicCard_Button.Merge(dst, src) +} +func (m *Intent_Message_BasicCard_Button) XXX_Size() int { + return xxx_messageInfo_Intent_Message_BasicCard_Button.Size(m) +} +func (m *Intent_Message_BasicCard_Button) XXX_DiscardUnknown() { + xxx_messageInfo_Intent_Message_BasicCard_Button.DiscardUnknown(m) +} + +var xxx_messageInfo_Intent_Message_BasicCard_Button proto.InternalMessageInfo + +func (m *Intent_Message_BasicCard_Button) GetTitle() string { + if m != nil { + return m.Title + } + return "" +} + +func (m *Intent_Message_BasicCard_Button) GetOpenUriAction() *Intent_Message_BasicCard_Button_OpenUriAction { + if m != nil { + return m.OpenUriAction + } + return nil +} + +// Opens the given URI. +type Intent_Message_BasicCard_Button_OpenUriAction struct { + // Required. The HTTP or HTTPS scheme URI. + Uri string `protobuf:"bytes,1,opt,name=uri,proto3" json:"uri,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Intent_Message_BasicCard_Button_OpenUriAction) Reset() { + *m = Intent_Message_BasicCard_Button_OpenUriAction{} +} +func (m *Intent_Message_BasicCard_Button_OpenUriAction) String() string { + return proto.CompactTextString(m) +} +func (*Intent_Message_BasicCard_Button_OpenUriAction) ProtoMessage() {} +func (*Intent_Message_BasicCard_Button_OpenUriAction) Descriptor() ([]byte, []int) { + return fileDescriptor_intent_2d72c44f592bb8da, []int{0, 2, 6, 0, 0} +} +func (m *Intent_Message_BasicCard_Button_OpenUriAction) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Intent_Message_BasicCard_Button_OpenUriAction.Unmarshal(m, b) +} +func (m *Intent_Message_BasicCard_Button_OpenUriAction) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Intent_Message_BasicCard_Button_OpenUriAction.Marshal(b, m, deterministic) +} +func (dst *Intent_Message_BasicCard_Button_OpenUriAction) XXX_Merge(src proto.Message) { + xxx_messageInfo_Intent_Message_BasicCard_Button_OpenUriAction.Merge(dst, src) +} +func (m *Intent_Message_BasicCard_Button_OpenUriAction) XXX_Size() int { + return xxx_messageInfo_Intent_Message_BasicCard_Button_OpenUriAction.Size(m) +} +func (m *Intent_Message_BasicCard_Button_OpenUriAction) XXX_DiscardUnknown() { + xxx_messageInfo_Intent_Message_BasicCard_Button_OpenUriAction.DiscardUnknown(m) +} + +var xxx_messageInfo_Intent_Message_BasicCard_Button_OpenUriAction proto.InternalMessageInfo + +func (m *Intent_Message_BasicCard_Button_OpenUriAction) GetUri() string { + if m != nil { + return m.Uri + } + return "" +} + +// The suggestion chip message that the user can tap to quickly post a reply +// to the conversation. +type Intent_Message_Suggestion struct { + // Required. The text shown the in the suggestion chip. + Title string `protobuf:"bytes,1,opt,name=title,proto3" json:"title,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Intent_Message_Suggestion) Reset() { *m = Intent_Message_Suggestion{} } +func (m *Intent_Message_Suggestion) String() string { return proto.CompactTextString(m) } +func (*Intent_Message_Suggestion) ProtoMessage() {} +func (*Intent_Message_Suggestion) Descriptor() ([]byte, []int) { + return fileDescriptor_intent_2d72c44f592bb8da, []int{0, 2, 7} +} +func (m *Intent_Message_Suggestion) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Intent_Message_Suggestion.Unmarshal(m, b) +} +func (m *Intent_Message_Suggestion) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Intent_Message_Suggestion.Marshal(b, m, deterministic) +} +func (dst *Intent_Message_Suggestion) XXX_Merge(src proto.Message) { + xxx_messageInfo_Intent_Message_Suggestion.Merge(dst, src) +} +func (m *Intent_Message_Suggestion) XXX_Size() int { + return xxx_messageInfo_Intent_Message_Suggestion.Size(m) +} +func (m *Intent_Message_Suggestion) XXX_DiscardUnknown() { + xxx_messageInfo_Intent_Message_Suggestion.DiscardUnknown(m) +} + +var xxx_messageInfo_Intent_Message_Suggestion proto.InternalMessageInfo + +func (m *Intent_Message_Suggestion) GetTitle() string { + if m != nil { + return m.Title + } + return "" +} + +// The collection of suggestions. +type Intent_Message_Suggestions struct { + // Required. The list of suggested replies. + Suggestions []*Intent_Message_Suggestion `protobuf:"bytes,1,rep,name=suggestions,proto3" json:"suggestions,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Intent_Message_Suggestions) Reset() { *m = Intent_Message_Suggestions{} } +func (m *Intent_Message_Suggestions) String() string { return proto.CompactTextString(m) } +func (*Intent_Message_Suggestions) ProtoMessage() {} +func (*Intent_Message_Suggestions) Descriptor() ([]byte, []int) { + return fileDescriptor_intent_2d72c44f592bb8da, []int{0, 2, 8} +} +func (m *Intent_Message_Suggestions) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Intent_Message_Suggestions.Unmarshal(m, b) +} +func (m *Intent_Message_Suggestions) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Intent_Message_Suggestions.Marshal(b, m, deterministic) +} +func (dst *Intent_Message_Suggestions) XXX_Merge(src proto.Message) { + xxx_messageInfo_Intent_Message_Suggestions.Merge(dst, src) +} +func (m *Intent_Message_Suggestions) XXX_Size() int { + return xxx_messageInfo_Intent_Message_Suggestions.Size(m) +} +func (m *Intent_Message_Suggestions) XXX_DiscardUnknown() { + xxx_messageInfo_Intent_Message_Suggestions.DiscardUnknown(m) +} + +var xxx_messageInfo_Intent_Message_Suggestions proto.InternalMessageInfo + +func (m *Intent_Message_Suggestions) GetSuggestions() []*Intent_Message_Suggestion { + if m != nil { + return m.Suggestions + } + return nil +} + +// The suggestion chip message that allows the user to jump out to the app +// or website associated with this agent. +type Intent_Message_LinkOutSuggestion struct { + // Required. The name of the app or site this chip is linking to. + DestinationName string `protobuf:"bytes,1,opt,name=destination_name,json=destinationName,proto3" json:"destination_name,omitempty"` + // Required. The URI of the app or site to open when the user taps the + // suggestion chip. + Uri string `protobuf:"bytes,2,opt,name=uri,proto3" json:"uri,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Intent_Message_LinkOutSuggestion) Reset() { *m = Intent_Message_LinkOutSuggestion{} } +func (m *Intent_Message_LinkOutSuggestion) String() string { return proto.CompactTextString(m) } +func (*Intent_Message_LinkOutSuggestion) ProtoMessage() {} +func (*Intent_Message_LinkOutSuggestion) Descriptor() ([]byte, []int) { + return fileDescriptor_intent_2d72c44f592bb8da, []int{0, 2, 9} +} +func (m *Intent_Message_LinkOutSuggestion) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Intent_Message_LinkOutSuggestion.Unmarshal(m, b) +} +func (m *Intent_Message_LinkOutSuggestion) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Intent_Message_LinkOutSuggestion.Marshal(b, m, deterministic) +} +func (dst *Intent_Message_LinkOutSuggestion) XXX_Merge(src proto.Message) { + xxx_messageInfo_Intent_Message_LinkOutSuggestion.Merge(dst, src) +} +func (m *Intent_Message_LinkOutSuggestion) XXX_Size() int { + return xxx_messageInfo_Intent_Message_LinkOutSuggestion.Size(m) +} +func (m *Intent_Message_LinkOutSuggestion) XXX_DiscardUnknown() { + xxx_messageInfo_Intent_Message_LinkOutSuggestion.DiscardUnknown(m) +} + +var xxx_messageInfo_Intent_Message_LinkOutSuggestion proto.InternalMessageInfo + +func (m *Intent_Message_LinkOutSuggestion) GetDestinationName() string { + if m != nil { + return m.DestinationName + } + return "" +} + +func (m *Intent_Message_LinkOutSuggestion) GetUri() string { + if m != nil { + return m.Uri + } + return "" +} + +// The card for presenting a list of options to select from. +type Intent_Message_ListSelect struct { + // Optional. The overall title of the list. + Title string `protobuf:"bytes,1,opt,name=title,proto3" json:"title,omitempty"` + // Required. List items. + Items []*Intent_Message_ListSelect_Item `protobuf:"bytes,2,rep,name=items,proto3" json:"items,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Intent_Message_ListSelect) Reset() { *m = Intent_Message_ListSelect{} } +func (m *Intent_Message_ListSelect) String() string { return proto.CompactTextString(m) } +func (*Intent_Message_ListSelect) ProtoMessage() {} +func (*Intent_Message_ListSelect) Descriptor() ([]byte, []int) { + return fileDescriptor_intent_2d72c44f592bb8da, []int{0, 2, 10} +} +func (m *Intent_Message_ListSelect) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Intent_Message_ListSelect.Unmarshal(m, b) +} +func (m *Intent_Message_ListSelect) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Intent_Message_ListSelect.Marshal(b, m, deterministic) +} +func (dst *Intent_Message_ListSelect) XXX_Merge(src proto.Message) { + xxx_messageInfo_Intent_Message_ListSelect.Merge(dst, src) +} +func (m *Intent_Message_ListSelect) XXX_Size() int { + return xxx_messageInfo_Intent_Message_ListSelect.Size(m) +} +func (m *Intent_Message_ListSelect) XXX_DiscardUnknown() { + xxx_messageInfo_Intent_Message_ListSelect.DiscardUnknown(m) +} + +var xxx_messageInfo_Intent_Message_ListSelect proto.InternalMessageInfo + +func (m *Intent_Message_ListSelect) GetTitle() string { + if m != nil { + return m.Title + } + return "" +} + +func (m *Intent_Message_ListSelect) GetItems() []*Intent_Message_ListSelect_Item { + if m != nil { + return m.Items + } + return nil +} + +// An item in the list. +type Intent_Message_ListSelect_Item struct { + // Required. Additional information about this option. + Info *Intent_Message_SelectItemInfo `protobuf:"bytes,1,opt,name=info,proto3" json:"info,omitempty"` + // Required. The title of the list item. + Title string `protobuf:"bytes,2,opt,name=title,proto3" json:"title,omitempty"` + // Optional. The main text describing the item. + Description string `protobuf:"bytes,3,opt,name=description,proto3" json:"description,omitempty"` + // Optional. The image to display. + Image *Intent_Message_Image `protobuf:"bytes,4,opt,name=image,proto3" json:"image,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Intent_Message_ListSelect_Item) Reset() { *m = Intent_Message_ListSelect_Item{} } +func (m *Intent_Message_ListSelect_Item) String() string { return proto.CompactTextString(m) } +func (*Intent_Message_ListSelect_Item) ProtoMessage() {} +func (*Intent_Message_ListSelect_Item) Descriptor() ([]byte, []int) { + return fileDescriptor_intent_2d72c44f592bb8da, []int{0, 2, 10, 0} +} +func (m *Intent_Message_ListSelect_Item) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Intent_Message_ListSelect_Item.Unmarshal(m, b) +} +func (m *Intent_Message_ListSelect_Item) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Intent_Message_ListSelect_Item.Marshal(b, m, deterministic) +} +func (dst *Intent_Message_ListSelect_Item) XXX_Merge(src proto.Message) { + xxx_messageInfo_Intent_Message_ListSelect_Item.Merge(dst, src) +} +func (m *Intent_Message_ListSelect_Item) XXX_Size() int { + return xxx_messageInfo_Intent_Message_ListSelect_Item.Size(m) +} +func (m *Intent_Message_ListSelect_Item) XXX_DiscardUnknown() { + xxx_messageInfo_Intent_Message_ListSelect_Item.DiscardUnknown(m) +} + +var xxx_messageInfo_Intent_Message_ListSelect_Item proto.InternalMessageInfo + +func (m *Intent_Message_ListSelect_Item) GetInfo() *Intent_Message_SelectItemInfo { + if m != nil { + return m.Info + } + return nil +} + +func (m *Intent_Message_ListSelect_Item) GetTitle() string { + if m != nil { + return m.Title + } + return "" +} + +func (m *Intent_Message_ListSelect_Item) GetDescription() string { + if m != nil { + return m.Description + } + return "" +} + +func (m *Intent_Message_ListSelect_Item) GetImage() *Intent_Message_Image { + if m != nil { + return m.Image + } + return nil +} + +// The card for presenting a carousel of options to select from. +type Intent_Message_CarouselSelect struct { + // Required. Carousel items. + Items []*Intent_Message_CarouselSelect_Item `protobuf:"bytes,1,rep,name=items,proto3" json:"items,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Intent_Message_CarouselSelect) Reset() { *m = Intent_Message_CarouselSelect{} } +func (m *Intent_Message_CarouselSelect) String() string { return proto.CompactTextString(m) } +func (*Intent_Message_CarouselSelect) ProtoMessage() {} +func (*Intent_Message_CarouselSelect) Descriptor() ([]byte, []int) { + return fileDescriptor_intent_2d72c44f592bb8da, []int{0, 2, 11} +} +func (m *Intent_Message_CarouselSelect) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Intent_Message_CarouselSelect.Unmarshal(m, b) +} +func (m *Intent_Message_CarouselSelect) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Intent_Message_CarouselSelect.Marshal(b, m, deterministic) +} +func (dst *Intent_Message_CarouselSelect) XXX_Merge(src proto.Message) { + xxx_messageInfo_Intent_Message_CarouselSelect.Merge(dst, src) +} +func (m *Intent_Message_CarouselSelect) XXX_Size() int { + return xxx_messageInfo_Intent_Message_CarouselSelect.Size(m) +} +func (m *Intent_Message_CarouselSelect) XXX_DiscardUnknown() { + xxx_messageInfo_Intent_Message_CarouselSelect.DiscardUnknown(m) +} + +var xxx_messageInfo_Intent_Message_CarouselSelect proto.InternalMessageInfo + +func (m *Intent_Message_CarouselSelect) GetItems() []*Intent_Message_CarouselSelect_Item { + if m != nil { + return m.Items + } + return nil +} + +// An item in the carousel. +type Intent_Message_CarouselSelect_Item struct { + // Required. Additional info about the option item. + Info *Intent_Message_SelectItemInfo `protobuf:"bytes,1,opt,name=info,proto3" json:"info,omitempty"` + // Required. Title of the carousel item. + Title string `protobuf:"bytes,2,opt,name=title,proto3" json:"title,omitempty"` + // Optional. The body text of the card. + Description string `protobuf:"bytes,3,opt,name=description,proto3" json:"description,omitempty"` + // Optional. The image to display. + Image *Intent_Message_Image `protobuf:"bytes,4,opt,name=image,proto3" json:"image,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Intent_Message_CarouselSelect_Item) Reset() { *m = Intent_Message_CarouselSelect_Item{} } +func (m *Intent_Message_CarouselSelect_Item) String() string { return proto.CompactTextString(m) } +func (*Intent_Message_CarouselSelect_Item) ProtoMessage() {} +func (*Intent_Message_CarouselSelect_Item) Descriptor() ([]byte, []int) { + return fileDescriptor_intent_2d72c44f592bb8da, []int{0, 2, 11, 0} +} +func (m *Intent_Message_CarouselSelect_Item) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Intent_Message_CarouselSelect_Item.Unmarshal(m, b) +} +func (m *Intent_Message_CarouselSelect_Item) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Intent_Message_CarouselSelect_Item.Marshal(b, m, deterministic) +} +func (dst *Intent_Message_CarouselSelect_Item) XXX_Merge(src proto.Message) { + xxx_messageInfo_Intent_Message_CarouselSelect_Item.Merge(dst, src) +} +func (m *Intent_Message_CarouselSelect_Item) XXX_Size() int { + return xxx_messageInfo_Intent_Message_CarouselSelect_Item.Size(m) +} +func (m *Intent_Message_CarouselSelect_Item) XXX_DiscardUnknown() { + xxx_messageInfo_Intent_Message_CarouselSelect_Item.DiscardUnknown(m) +} + +var xxx_messageInfo_Intent_Message_CarouselSelect_Item proto.InternalMessageInfo + +func (m *Intent_Message_CarouselSelect_Item) GetInfo() *Intent_Message_SelectItemInfo { + if m != nil { + return m.Info + } + return nil +} + +func (m *Intent_Message_CarouselSelect_Item) GetTitle() string { + if m != nil { + return m.Title + } + return "" +} + +func (m *Intent_Message_CarouselSelect_Item) GetDescription() string { + if m != nil { + return m.Description + } + return "" +} + +func (m *Intent_Message_CarouselSelect_Item) GetImage() *Intent_Message_Image { + if m != nil { + return m.Image + } + return nil +} + +// Additional info about the select item for when it is triggered in a +// dialog. +type Intent_Message_SelectItemInfo struct { + // Required. A unique key that will be sent back to the agent if this + // response is given. + Key string `protobuf:"bytes,1,opt,name=key,proto3" json:"key,omitempty"` + // Optional. A list of synonyms that can also be used to trigger this + // item in dialog. + Synonyms []string `protobuf:"bytes,2,rep,name=synonyms,proto3" json:"synonyms,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Intent_Message_SelectItemInfo) Reset() { *m = Intent_Message_SelectItemInfo{} } +func (m *Intent_Message_SelectItemInfo) String() string { return proto.CompactTextString(m) } +func (*Intent_Message_SelectItemInfo) ProtoMessage() {} +func (*Intent_Message_SelectItemInfo) Descriptor() ([]byte, []int) { + return fileDescriptor_intent_2d72c44f592bb8da, []int{0, 2, 12} +} +func (m *Intent_Message_SelectItemInfo) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Intent_Message_SelectItemInfo.Unmarshal(m, b) +} +func (m *Intent_Message_SelectItemInfo) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Intent_Message_SelectItemInfo.Marshal(b, m, deterministic) +} +func (dst *Intent_Message_SelectItemInfo) XXX_Merge(src proto.Message) { + xxx_messageInfo_Intent_Message_SelectItemInfo.Merge(dst, src) +} +func (m *Intent_Message_SelectItemInfo) XXX_Size() int { + return xxx_messageInfo_Intent_Message_SelectItemInfo.Size(m) +} +func (m *Intent_Message_SelectItemInfo) XXX_DiscardUnknown() { + xxx_messageInfo_Intent_Message_SelectItemInfo.DiscardUnknown(m) +} + +var xxx_messageInfo_Intent_Message_SelectItemInfo proto.InternalMessageInfo + +func (m *Intent_Message_SelectItemInfo) GetKey() string { + if m != nil { + return m.Key + } + return "" +} + +func (m *Intent_Message_SelectItemInfo) GetSynonyms() []string { + if m != nil { + return m.Synonyms + } + return nil +} + +// Plays audio from a file in Telephony Gateway. +type Intent_Message_TelephonyPlayAudio struct { + // Required. URI to a Google Cloud Storage object containing the audio to + // play, e.g., "gs://bucket/object". The object must contain a single + // channel (mono) of linear PCM audio (2 bytes / sample) at 8kHz. + // + // This object must be readable by the `service-@gcp-sa-dialogflow.iam.gserviceaccount.com` service account + // where is the number of the Telephony Gateway project + // (usually the same as the Dialogflow agent project). If the Google Cloud + // Storage bucket is in the Telephony Gateway project, this permission is + // added by default when enabling the Dialogflow V2 API. + // + // For audio from other sources, consider using the + // `TelephonySynthesizeSpeech` message with SSML. + AudioUri string `protobuf:"bytes,1,opt,name=audio_uri,json=audioUri,proto3" json:"audio_uri,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Intent_Message_TelephonyPlayAudio) Reset() { *m = Intent_Message_TelephonyPlayAudio{} } +func (m *Intent_Message_TelephonyPlayAudio) String() string { return proto.CompactTextString(m) } +func (*Intent_Message_TelephonyPlayAudio) ProtoMessage() {} +func (*Intent_Message_TelephonyPlayAudio) Descriptor() ([]byte, []int) { + return fileDescriptor_intent_2d72c44f592bb8da, []int{0, 2, 13} +} +func (m *Intent_Message_TelephonyPlayAudio) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Intent_Message_TelephonyPlayAudio.Unmarshal(m, b) +} +func (m *Intent_Message_TelephonyPlayAudio) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Intent_Message_TelephonyPlayAudio.Marshal(b, m, deterministic) +} +func (dst *Intent_Message_TelephonyPlayAudio) XXX_Merge(src proto.Message) { + xxx_messageInfo_Intent_Message_TelephonyPlayAudio.Merge(dst, src) +} +func (m *Intent_Message_TelephonyPlayAudio) XXX_Size() int { + return xxx_messageInfo_Intent_Message_TelephonyPlayAudio.Size(m) +} +func (m *Intent_Message_TelephonyPlayAudio) XXX_DiscardUnknown() { + xxx_messageInfo_Intent_Message_TelephonyPlayAudio.DiscardUnknown(m) +} + +var xxx_messageInfo_Intent_Message_TelephonyPlayAudio proto.InternalMessageInfo + +func (m *Intent_Message_TelephonyPlayAudio) GetAudioUri() string { + if m != nil { + return m.AudioUri + } + return "" +} + +// Synthesizes speech and plays back the synthesized audio to the caller in +// Telephony Gateway. +// +// Telephony Gateway takes the synthesizer settings from +// `DetectIntentResponse.output_audio_config` which can either be set +// at request-level or can come from the agent-level synthesizer config. +type Intent_Message_TelephonySynthesizeSpeech struct { + // Required. The source to be synthesized. + // + // Types that are valid to be assigned to Source: + // *Intent_Message_TelephonySynthesizeSpeech_Text + // *Intent_Message_TelephonySynthesizeSpeech_Ssml + Source isIntent_Message_TelephonySynthesizeSpeech_Source `protobuf_oneof:"source"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Intent_Message_TelephonySynthesizeSpeech) Reset() { + *m = Intent_Message_TelephonySynthesizeSpeech{} +} +func (m *Intent_Message_TelephonySynthesizeSpeech) String() string { return proto.CompactTextString(m) } +func (*Intent_Message_TelephonySynthesizeSpeech) ProtoMessage() {} +func (*Intent_Message_TelephonySynthesizeSpeech) Descriptor() ([]byte, []int) { + return fileDescriptor_intent_2d72c44f592bb8da, []int{0, 2, 14} +} +func (m *Intent_Message_TelephonySynthesizeSpeech) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Intent_Message_TelephonySynthesizeSpeech.Unmarshal(m, b) +} +func (m *Intent_Message_TelephonySynthesizeSpeech) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Intent_Message_TelephonySynthesizeSpeech.Marshal(b, m, deterministic) +} +func (dst *Intent_Message_TelephonySynthesizeSpeech) XXX_Merge(src proto.Message) { + xxx_messageInfo_Intent_Message_TelephonySynthesizeSpeech.Merge(dst, src) +} +func (m *Intent_Message_TelephonySynthesizeSpeech) XXX_Size() int { + return xxx_messageInfo_Intent_Message_TelephonySynthesizeSpeech.Size(m) +} +func (m *Intent_Message_TelephonySynthesizeSpeech) XXX_DiscardUnknown() { + xxx_messageInfo_Intent_Message_TelephonySynthesizeSpeech.DiscardUnknown(m) +} + +var xxx_messageInfo_Intent_Message_TelephonySynthesizeSpeech proto.InternalMessageInfo + +type isIntent_Message_TelephonySynthesizeSpeech_Source interface { + isIntent_Message_TelephonySynthesizeSpeech_Source() +} + +type Intent_Message_TelephonySynthesizeSpeech_Text struct { + Text string `protobuf:"bytes,1,opt,name=text,proto3,oneof"` +} + +type Intent_Message_TelephonySynthesizeSpeech_Ssml struct { + Ssml string `protobuf:"bytes,2,opt,name=ssml,proto3,oneof"` +} + +func (*Intent_Message_TelephonySynthesizeSpeech_Text) isIntent_Message_TelephonySynthesizeSpeech_Source() { +} + +func (*Intent_Message_TelephonySynthesizeSpeech_Ssml) isIntent_Message_TelephonySynthesizeSpeech_Source() { +} + +func (m *Intent_Message_TelephonySynthesizeSpeech) GetSource() isIntent_Message_TelephonySynthesizeSpeech_Source { + if m != nil { + return m.Source + } + return nil +} + +func (m *Intent_Message_TelephonySynthesizeSpeech) GetText() string { + if x, ok := m.GetSource().(*Intent_Message_TelephonySynthesizeSpeech_Text); ok { + return x.Text + } + return "" +} + +func (m *Intent_Message_TelephonySynthesizeSpeech) GetSsml() string { + if x, ok := m.GetSource().(*Intent_Message_TelephonySynthesizeSpeech_Ssml); ok { + return x.Ssml + } + return "" +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*Intent_Message_TelephonySynthesizeSpeech) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _Intent_Message_TelephonySynthesizeSpeech_OneofMarshaler, _Intent_Message_TelephonySynthesizeSpeech_OneofUnmarshaler, _Intent_Message_TelephonySynthesizeSpeech_OneofSizer, []interface{}{ + (*Intent_Message_TelephonySynthesizeSpeech_Text)(nil), + (*Intent_Message_TelephonySynthesizeSpeech_Ssml)(nil), + } +} + +func _Intent_Message_TelephonySynthesizeSpeech_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*Intent_Message_TelephonySynthesizeSpeech) + // source + switch x := m.Source.(type) { + case *Intent_Message_TelephonySynthesizeSpeech_Text: + b.EncodeVarint(1<<3 | proto.WireBytes) + b.EncodeStringBytes(x.Text) + case *Intent_Message_TelephonySynthesizeSpeech_Ssml: + b.EncodeVarint(2<<3 | proto.WireBytes) + b.EncodeStringBytes(x.Ssml) + case nil: + default: + return fmt.Errorf("Intent_Message_TelephonySynthesizeSpeech.Source has unexpected type %T", x) + } + return nil +} + +func _Intent_Message_TelephonySynthesizeSpeech_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*Intent_Message_TelephonySynthesizeSpeech) + switch tag { + case 1: // source.text + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeStringBytes() + m.Source = &Intent_Message_TelephonySynthesizeSpeech_Text{x} + return true, err + case 2: // source.ssml + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeStringBytes() + m.Source = &Intent_Message_TelephonySynthesizeSpeech_Ssml{x} + return true, err + default: + return false, nil + } +} + +func _Intent_Message_TelephonySynthesizeSpeech_OneofSizer(msg proto.Message) (n int) { + m := msg.(*Intent_Message_TelephonySynthesizeSpeech) + // source + switch x := m.Source.(type) { + case *Intent_Message_TelephonySynthesizeSpeech_Text: + n += 1 // tag and wire + n += proto.SizeVarint(uint64(len(x.Text))) + n += len(x.Text) + case *Intent_Message_TelephonySynthesizeSpeech_Ssml: + n += 1 // tag and wire + n += proto.SizeVarint(uint64(len(x.Ssml))) + n += len(x.Ssml) + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +// Transfers the call in Telephony Gateway. +type Intent_Message_TelephonyTransferCall struct { + // Required. The phone number to transfer the call to + // in [E.164 format](https://en.wikipedia.org/wiki/E.164). + // + // We currently only allow transferring to US numbers (+1xxxyyyzzzz). + PhoneNumber string `protobuf:"bytes,1,opt,name=phone_number,json=phoneNumber,proto3" json:"phone_number,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Intent_Message_TelephonyTransferCall) Reset() { *m = Intent_Message_TelephonyTransferCall{} } +func (m *Intent_Message_TelephonyTransferCall) String() string { return proto.CompactTextString(m) } +func (*Intent_Message_TelephonyTransferCall) ProtoMessage() {} +func (*Intent_Message_TelephonyTransferCall) Descriptor() ([]byte, []int) { + return fileDescriptor_intent_2d72c44f592bb8da, []int{0, 2, 15} +} +func (m *Intent_Message_TelephonyTransferCall) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Intent_Message_TelephonyTransferCall.Unmarshal(m, b) +} +func (m *Intent_Message_TelephonyTransferCall) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Intent_Message_TelephonyTransferCall.Marshal(b, m, deterministic) +} +func (dst *Intent_Message_TelephonyTransferCall) XXX_Merge(src proto.Message) { + xxx_messageInfo_Intent_Message_TelephonyTransferCall.Merge(dst, src) +} +func (m *Intent_Message_TelephonyTransferCall) XXX_Size() int { + return xxx_messageInfo_Intent_Message_TelephonyTransferCall.Size(m) +} +func (m *Intent_Message_TelephonyTransferCall) XXX_DiscardUnknown() { + xxx_messageInfo_Intent_Message_TelephonyTransferCall.DiscardUnknown(m) +} + +var xxx_messageInfo_Intent_Message_TelephonyTransferCall proto.InternalMessageInfo + +func (m *Intent_Message_TelephonyTransferCall) GetPhoneNumber() string { + if m != nil { + return m.PhoneNumber + } + return "" +} + +// Represents a single followup intent in the chain. +type Intent_FollowupIntentInfo struct { + // The unique identifier of the followup intent. + // Format: `projects//agent/intents/`. + FollowupIntentName string `protobuf:"bytes,1,opt,name=followup_intent_name,json=followupIntentName,proto3" json:"followup_intent_name,omitempty"` + // The unique identifier of the followup intent's parent. + // Format: `projects//agent/intents/`. + ParentFollowupIntentName string `protobuf:"bytes,2,opt,name=parent_followup_intent_name,json=parentFollowupIntentName,proto3" json:"parent_followup_intent_name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Intent_FollowupIntentInfo) Reset() { *m = Intent_FollowupIntentInfo{} } +func (m *Intent_FollowupIntentInfo) String() string { return proto.CompactTextString(m) } +func (*Intent_FollowupIntentInfo) ProtoMessage() {} +func (*Intent_FollowupIntentInfo) Descriptor() ([]byte, []int) { + return fileDescriptor_intent_2d72c44f592bb8da, []int{0, 3} +} +func (m *Intent_FollowupIntentInfo) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Intent_FollowupIntentInfo.Unmarshal(m, b) +} +func (m *Intent_FollowupIntentInfo) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Intent_FollowupIntentInfo.Marshal(b, m, deterministic) +} +func (dst *Intent_FollowupIntentInfo) XXX_Merge(src proto.Message) { + xxx_messageInfo_Intent_FollowupIntentInfo.Merge(dst, src) +} +func (m *Intent_FollowupIntentInfo) XXX_Size() int { + return xxx_messageInfo_Intent_FollowupIntentInfo.Size(m) +} +func (m *Intent_FollowupIntentInfo) XXX_DiscardUnknown() { + xxx_messageInfo_Intent_FollowupIntentInfo.DiscardUnknown(m) +} + +var xxx_messageInfo_Intent_FollowupIntentInfo proto.InternalMessageInfo + +func (m *Intent_FollowupIntentInfo) GetFollowupIntentName() string { + if m != nil { + return m.FollowupIntentName + } + return "" +} + +func (m *Intent_FollowupIntentInfo) GetParentFollowupIntentName() string { + if m != nil { + return m.ParentFollowupIntentName + } + return "" +} + +// The request message for [Intents.ListIntents][google.cloud.dialogflow.v2beta1.Intents.ListIntents]. +type ListIntentsRequest struct { + // Required. The agent to list all intents from. + // Format: `projects//agent`. + Parent string `protobuf:"bytes,1,opt,name=parent,proto3" json:"parent,omitempty"` + // Optional. The language to list training phrases, parameters and rich + // messages for. If not specified, the agent's default language is used. + // [Many + // languages](https://cloud.google.com/dialogflow-enterprise/docs/reference/language) + // are supported. Note: languages must be enabled in the agent before they can + // be used. + LanguageCode string `protobuf:"bytes,2,opt,name=language_code,json=languageCode,proto3" json:"language_code,omitempty"` + // Optional. The resource view to apply to the returned intent. + IntentView IntentView `protobuf:"varint,3,opt,name=intent_view,json=intentView,proto3,enum=google.cloud.dialogflow.v2beta1.IntentView" json:"intent_view,omitempty"` + // Optional. The maximum number of items to return in a single page. By + // default 100 and at most 1000. + PageSize int32 `protobuf:"varint,4,opt,name=page_size,json=pageSize,proto3" json:"page_size,omitempty"` + // Optional. The next_page_token value returned from a previous list request. + PageToken string `protobuf:"bytes,5,opt,name=page_token,json=pageToken,proto3" json:"page_token,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ListIntentsRequest) Reset() { *m = ListIntentsRequest{} } +func (m *ListIntentsRequest) String() string { return proto.CompactTextString(m) } +func (*ListIntentsRequest) ProtoMessage() {} +func (*ListIntentsRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_intent_2d72c44f592bb8da, []int{1} +} +func (m *ListIntentsRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ListIntentsRequest.Unmarshal(m, b) +} +func (m *ListIntentsRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ListIntentsRequest.Marshal(b, m, deterministic) +} +func (dst *ListIntentsRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_ListIntentsRequest.Merge(dst, src) +} +func (m *ListIntentsRequest) XXX_Size() int { + return xxx_messageInfo_ListIntentsRequest.Size(m) +} +func (m *ListIntentsRequest) XXX_DiscardUnknown() { + xxx_messageInfo_ListIntentsRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_ListIntentsRequest proto.InternalMessageInfo + +func (m *ListIntentsRequest) GetParent() string { + if m != nil { + return m.Parent + } + return "" +} + +func (m *ListIntentsRequest) GetLanguageCode() string { + if m != nil { + return m.LanguageCode + } + return "" +} + +func (m *ListIntentsRequest) GetIntentView() IntentView { + if m != nil { + return m.IntentView + } + return IntentView_INTENT_VIEW_UNSPECIFIED +} + +func (m *ListIntentsRequest) GetPageSize() int32 { + if m != nil { + return m.PageSize + } + return 0 +} + +func (m *ListIntentsRequest) GetPageToken() string { + if m != nil { + return m.PageToken + } + return "" +} + +// The response message for [Intents.ListIntents][google.cloud.dialogflow.v2beta1.Intents.ListIntents]. +type ListIntentsResponse struct { + // The list of agent intents. There will be a maximum number of items + // returned based on the page_size field in the request. + Intents []*Intent `protobuf:"bytes,1,rep,name=intents,proto3" json:"intents,omitempty"` + // Token to retrieve the next page of results, or empty if there are no + // more results in the list. + NextPageToken string `protobuf:"bytes,2,opt,name=next_page_token,json=nextPageToken,proto3" json:"next_page_token,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ListIntentsResponse) Reset() { *m = ListIntentsResponse{} } +func (m *ListIntentsResponse) String() string { return proto.CompactTextString(m) } +func (*ListIntentsResponse) ProtoMessage() {} +func (*ListIntentsResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_intent_2d72c44f592bb8da, []int{2} +} +func (m *ListIntentsResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ListIntentsResponse.Unmarshal(m, b) +} +func (m *ListIntentsResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ListIntentsResponse.Marshal(b, m, deterministic) +} +func (dst *ListIntentsResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_ListIntentsResponse.Merge(dst, src) +} +func (m *ListIntentsResponse) XXX_Size() int { + return xxx_messageInfo_ListIntentsResponse.Size(m) +} +func (m *ListIntentsResponse) XXX_DiscardUnknown() { + xxx_messageInfo_ListIntentsResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_ListIntentsResponse proto.InternalMessageInfo + +func (m *ListIntentsResponse) GetIntents() []*Intent { + if m != nil { + return m.Intents + } + return nil +} + +func (m *ListIntentsResponse) GetNextPageToken() string { + if m != nil { + return m.NextPageToken + } + return "" +} + +// The request message for [Intents.GetIntent][google.cloud.dialogflow.v2beta1.Intents.GetIntent]. +type GetIntentRequest struct { + // Required. The name of the intent. + // Format: `projects//agent/intents/`. + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // Optional. The language to retrieve training phrases, parameters and rich + // messages for. If not specified, the agent's default language is used. + // [Many + // languages](https://cloud.google.com/dialogflow-enterprise/docs/reference/language) + // are supported. Note: languages must be enabled in the agent before they can + // be used. + LanguageCode string `protobuf:"bytes,2,opt,name=language_code,json=languageCode,proto3" json:"language_code,omitempty"` + // Optional. The resource view to apply to the returned intent. + IntentView IntentView `protobuf:"varint,3,opt,name=intent_view,json=intentView,proto3,enum=google.cloud.dialogflow.v2beta1.IntentView" json:"intent_view,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GetIntentRequest) Reset() { *m = GetIntentRequest{} } +func (m *GetIntentRequest) String() string { return proto.CompactTextString(m) } +func (*GetIntentRequest) ProtoMessage() {} +func (*GetIntentRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_intent_2d72c44f592bb8da, []int{3} +} +func (m *GetIntentRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GetIntentRequest.Unmarshal(m, b) +} +func (m *GetIntentRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GetIntentRequest.Marshal(b, m, deterministic) +} +func (dst *GetIntentRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_GetIntentRequest.Merge(dst, src) +} +func (m *GetIntentRequest) XXX_Size() int { + return xxx_messageInfo_GetIntentRequest.Size(m) +} +func (m *GetIntentRequest) XXX_DiscardUnknown() { + xxx_messageInfo_GetIntentRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_GetIntentRequest proto.InternalMessageInfo + +func (m *GetIntentRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *GetIntentRequest) GetLanguageCode() string { + if m != nil { + return m.LanguageCode + } + return "" +} + +func (m *GetIntentRequest) GetIntentView() IntentView { + if m != nil { + return m.IntentView + } + return IntentView_INTENT_VIEW_UNSPECIFIED +} + +// The request message for [Intents.CreateIntent][google.cloud.dialogflow.v2beta1.Intents.CreateIntent]. +type CreateIntentRequest struct { + // Required. The agent to create a intent for. + // Format: `projects//agent`. + Parent string `protobuf:"bytes,1,opt,name=parent,proto3" json:"parent,omitempty"` + // Required. The intent to create. + Intent *Intent `protobuf:"bytes,2,opt,name=intent,proto3" json:"intent,omitempty"` + // Optional. The language of training phrases, parameters and rich messages + // defined in `intent`. If not specified, the agent's default language is + // used. [Many + // languages](https://cloud.google.com/dialogflow-enterprise/docs/reference/language) + // are supported. Note: languages must be enabled in the agent before they can + // be used. + LanguageCode string `protobuf:"bytes,3,opt,name=language_code,json=languageCode,proto3" json:"language_code,omitempty"` + // Optional. The resource view to apply to the returned intent. + IntentView IntentView `protobuf:"varint,4,opt,name=intent_view,json=intentView,proto3,enum=google.cloud.dialogflow.v2beta1.IntentView" json:"intent_view,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *CreateIntentRequest) Reset() { *m = CreateIntentRequest{} } +func (m *CreateIntentRequest) String() string { return proto.CompactTextString(m) } +func (*CreateIntentRequest) ProtoMessage() {} +func (*CreateIntentRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_intent_2d72c44f592bb8da, []int{4} +} +func (m *CreateIntentRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_CreateIntentRequest.Unmarshal(m, b) +} +func (m *CreateIntentRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_CreateIntentRequest.Marshal(b, m, deterministic) +} +func (dst *CreateIntentRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_CreateIntentRequest.Merge(dst, src) +} +func (m *CreateIntentRequest) XXX_Size() int { + return xxx_messageInfo_CreateIntentRequest.Size(m) +} +func (m *CreateIntentRequest) XXX_DiscardUnknown() { + xxx_messageInfo_CreateIntentRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_CreateIntentRequest proto.InternalMessageInfo + +func (m *CreateIntentRequest) GetParent() string { + if m != nil { + return m.Parent + } + return "" +} + +func (m *CreateIntentRequest) GetIntent() *Intent { + if m != nil { + return m.Intent + } + return nil +} + +func (m *CreateIntentRequest) GetLanguageCode() string { + if m != nil { + return m.LanguageCode + } + return "" +} + +func (m *CreateIntentRequest) GetIntentView() IntentView { + if m != nil { + return m.IntentView + } + return IntentView_INTENT_VIEW_UNSPECIFIED +} + +// The request message for [Intents.UpdateIntent][google.cloud.dialogflow.v2beta1.Intents.UpdateIntent]. +type UpdateIntentRequest struct { + // Required. The intent to update. + Intent *Intent `protobuf:"bytes,1,opt,name=intent,proto3" json:"intent,omitempty"` + // Optional. The language of training phrases, parameters and rich messages + // defined in `intent`. If not specified, the agent's default language is + // used. [Many + // languages](https://cloud.google.com/dialogflow-enterprise/docs/reference/language) + // are supported. Note: languages must be enabled in the agent before they can + // be used. + LanguageCode string `protobuf:"bytes,2,opt,name=language_code,json=languageCode,proto3" json:"language_code,omitempty"` + // Optional. The mask to control which fields get updated. + UpdateMask *field_mask.FieldMask `protobuf:"bytes,3,opt,name=update_mask,json=updateMask,proto3" json:"update_mask,omitempty"` + // Optional. The resource view to apply to the returned intent. + IntentView IntentView `protobuf:"varint,4,opt,name=intent_view,json=intentView,proto3,enum=google.cloud.dialogflow.v2beta1.IntentView" json:"intent_view,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *UpdateIntentRequest) Reset() { *m = UpdateIntentRequest{} } +func (m *UpdateIntentRequest) String() string { return proto.CompactTextString(m) } +func (*UpdateIntentRequest) ProtoMessage() {} +func (*UpdateIntentRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_intent_2d72c44f592bb8da, []int{5} +} +func (m *UpdateIntentRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_UpdateIntentRequest.Unmarshal(m, b) +} +func (m *UpdateIntentRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_UpdateIntentRequest.Marshal(b, m, deterministic) +} +func (dst *UpdateIntentRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_UpdateIntentRequest.Merge(dst, src) +} +func (m *UpdateIntentRequest) XXX_Size() int { + return xxx_messageInfo_UpdateIntentRequest.Size(m) +} +func (m *UpdateIntentRequest) XXX_DiscardUnknown() { + xxx_messageInfo_UpdateIntentRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_UpdateIntentRequest proto.InternalMessageInfo + +func (m *UpdateIntentRequest) GetIntent() *Intent { + if m != nil { + return m.Intent + } + return nil +} + +func (m *UpdateIntentRequest) GetLanguageCode() string { + if m != nil { + return m.LanguageCode + } + return "" +} + +func (m *UpdateIntentRequest) GetUpdateMask() *field_mask.FieldMask { + if m != nil { + return m.UpdateMask + } + return nil +} + +func (m *UpdateIntentRequest) GetIntentView() IntentView { + if m != nil { + return m.IntentView + } + return IntentView_INTENT_VIEW_UNSPECIFIED +} + +// The request message for [Intents.DeleteIntent][google.cloud.dialogflow.v2beta1.Intents.DeleteIntent]. +type DeleteIntentRequest struct { + // Required. The name of the intent to delete. If this intent has direct or + // indirect followup intents, we also delete them. + // + // Format: `projects//agent/intents/`. + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *DeleteIntentRequest) Reset() { *m = DeleteIntentRequest{} } +func (m *DeleteIntentRequest) String() string { return proto.CompactTextString(m) } +func (*DeleteIntentRequest) ProtoMessage() {} +func (*DeleteIntentRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_intent_2d72c44f592bb8da, []int{6} +} +func (m *DeleteIntentRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_DeleteIntentRequest.Unmarshal(m, b) +} +func (m *DeleteIntentRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_DeleteIntentRequest.Marshal(b, m, deterministic) +} +func (dst *DeleteIntentRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_DeleteIntentRequest.Merge(dst, src) +} +func (m *DeleteIntentRequest) XXX_Size() int { + return xxx_messageInfo_DeleteIntentRequest.Size(m) +} +func (m *DeleteIntentRequest) XXX_DiscardUnknown() { + xxx_messageInfo_DeleteIntentRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_DeleteIntentRequest proto.InternalMessageInfo + +func (m *DeleteIntentRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +// The request message for [Intents.BatchUpdateIntents][google.cloud.dialogflow.v2beta1.Intents.BatchUpdateIntents]. +type BatchUpdateIntentsRequest struct { + // Required. The name of the agent to update or create intents in. + // Format: `projects//agent`. + Parent string `protobuf:"bytes,1,opt,name=parent,proto3" json:"parent,omitempty"` + // Required. The source of the intent batch. + // + // Types that are valid to be assigned to IntentBatch: + // *BatchUpdateIntentsRequest_IntentBatchUri + // *BatchUpdateIntentsRequest_IntentBatchInline + IntentBatch isBatchUpdateIntentsRequest_IntentBatch `protobuf_oneof:"intent_batch"` + // Optional. The language of training phrases, parameters and rich messages + // defined in `intents`. If not specified, the agent's default language is + // used. [Many + // languages](https://cloud.google.com/dialogflow-enterprise/docs/reference/language) + // are supported. Note: languages must be enabled in the agent before they can + // be used. + LanguageCode string `protobuf:"bytes,4,opt,name=language_code,json=languageCode,proto3" json:"language_code,omitempty"` + // Optional. The mask to control which fields get updated. + UpdateMask *field_mask.FieldMask `protobuf:"bytes,5,opt,name=update_mask,json=updateMask,proto3" json:"update_mask,omitempty"` + // Optional. The resource view to apply to the returned intent. + IntentView IntentView `protobuf:"varint,6,opt,name=intent_view,json=intentView,proto3,enum=google.cloud.dialogflow.v2beta1.IntentView" json:"intent_view,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *BatchUpdateIntentsRequest) Reset() { *m = BatchUpdateIntentsRequest{} } +func (m *BatchUpdateIntentsRequest) String() string { return proto.CompactTextString(m) } +func (*BatchUpdateIntentsRequest) ProtoMessage() {} +func (*BatchUpdateIntentsRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_intent_2d72c44f592bb8da, []int{7} +} +func (m *BatchUpdateIntentsRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_BatchUpdateIntentsRequest.Unmarshal(m, b) +} +func (m *BatchUpdateIntentsRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_BatchUpdateIntentsRequest.Marshal(b, m, deterministic) +} +func (dst *BatchUpdateIntentsRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_BatchUpdateIntentsRequest.Merge(dst, src) +} +func (m *BatchUpdateIntentsRequest) XXX_Size() int { + return xxx_messageInfo_BatchUpdateIntentsRequest.Size(m) +} +func (m *BatchUpdateIntentsRequest) XXX_DiscardUnknown() { + xxx_messageInfo_BatchUpdateIntentsRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_BatchUpdateIntentsRequest proto.InternalMessageInfo + +func (m *BatchUpdateIntentsRequest) GetParent() string { + if m != nil { + return m.Parent + } + return "" +} + +type isBatchUpdateIntentsRequest_IntentBatch interface { + isBatchUpdateIntentsRequest_IntentBatch() +} + +type BatchUpdateIntentsRequest_IntentBatchUri struct { + IntentBatchUri string `protobuf:"bytes,2,opt,name=intent_batch_uri,json=intentBatchUri,proto3,oneof"` +} + +type BatchUpdateIntentsRequest_IntentBatchInline struct { + IntentBatchInline *IntentBatch `protobuf:"bytes,3,opt,name=intent_batch_inline,json=intentBatchInline,proto3,oneof"` +} + +func (*BatchUpdateIntentsRequest_IntentBatchUri) isBatchUpdateIntentsRequest_IntentBatch() {} + +func (*BatchUpdateIntentsRequest_IntentBatchInline) isBatchUpdateIntentsRequest_IntentBatch() {} + +func (m *BatchUpdateIntentsRequest) GetIntentBatch() isBatchUpdateIntentsRequest_IntentBatch { + if m != nil { + return m.IntentBatch + } + return nil +} + +func (m *BatchUpdateIntentsRequest) GetIntentBatchUri() string { + if x, ok := m.GetIntentBatch().(*BatchUpdateIntentsRequest_IntentBatchUri); ok { + return x.IntentBatchUri + } + return "" +} + +func (m *BatchUpdateIntentsRequest) GetIntentBatchInline() *IntentBatch { + if x, ok := m.GetIntentBatch().(*BatchUpdateIntentsRequest_IntentBatchInline); ok { + return x.IntentBatchInline + } + return nil +} + +func (m *BatchUpdateIntentsRequest) GetLanguageCode() string { + if m != nil { + return m.LanguageCode + } + return "" +} + +func (m *BatchUpdateIntentsRequest) GetUpdateMask() *field_mask.FieldMask { + if m != nil { + return m.UpdateMask + } + return nil +} + +func (m *BatchUpdateIntentsRequest) GetIntentView() IntentView { + if m != nil { + return m.IntentView + } + return IntentView_INTENT_VIEW_UNSPECIFIED +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*BatchUpdateIntentsRequest) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _BatchUpdateIntentsRequest_OneofMarshaler, _BatchUpdateIntentsRequest_OneofUnmarshaler, _BatchUpdateIntentsRequest_OneofSizer, []interface{}{ + (*BatchUpdateIntentsRequest_IntentBatchUri)(nil), + (*BatchUpdateIntentsRequest_IntentBatchInline)(nil), + } +} + +func _BatchUpdateIntentsRequest_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*BatchUpdateIntentsRequest) + // intent_batch + switch x := m.IntentBatch.(type) { + case *BatchUpdateIntentsRequest_IntentBatchUri: + b.EncodeVarint(2<<3 | proto.WireBytes) + b.EncodeStringBytes(x.IntentBatchUri) + case *BatchUpdateIntentsRequest_IntentBatchInline: + b.EncodeVarint(3<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.IntentBatchInline); err != nil { + return err + } + case nil: + default: + return fmt.Errorf("BatchUpdateIntentsRequest.IntentBatch has unexpected type %T", x) + } + return nil +} + +func _BatchUpdateIntentsRequest_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*BatchUpdateIntentsRequest) + switch tag { + case 2: // intent_batch.intent_batch_uri + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeStringBytes() + m.IntentBatch = &BatchUpdateIntentsRequest_IntentBatchUri{x} + return true, err + case 3: // intent_batch.intent_batch_inline + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(IntentBatch) + err := b.DecodeMessage(msg) + m.IntentBatch = &BatchUpdateIntentsRequest_IntentBatchInline{msg} + return true, err + default: + return false, nil + } +} + +func _BatchUpdateIntentsRequest_OneofSizer(msg proto.Message) (n int) { + m := msg.(*BatchUpdateIntentsRequest) + // intent_batch + switch x := m.IntentBatch.(type) { + case *BatchUpdateIntentsRequest_IntentBatchUri: + n += 1 // tag and wire + n += proto.SizeVarint(uint64(len(x.IntentBatchUri))) + n += len(x.IntentBatchUri) + case *BatchUpdateIntentsRequest_IntentBatchInline: + s := proto.Size(x.IntentBatchInline) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +// The response message for [Intents.BatchUpdateIntents][google.cloud.dialogflow.v2beta1.Intents.BatchUpdateIntents]. +type BatchUpdateIntentsResponse struct { + // The collection of updated or created intents. + Intents []*Intent `protobuf:"bytes,1,rep,name=intents,proto3" json:"intents,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *BatchUpdateIntentsResponse) Reset() { *m = BatchUpdateIntentsResponse{} } +func (m *BatchUpdateIntentsResponse) String() string { return proto.CompactTextString(m) } +func (*BatchUpdateIntentsResponse) ProtoMessage() {} +func (*BatchUpdateIntentsResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_intent_2d72c44f592bb8da, []int{8} +} +func (m *BatchUpdateIntentsResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_BatchUpdateIntentsResponse.Unmarshal(m, b) +} +func (m *BatchUpdateIntentsResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_BatchUpdateIntentsResponse.Marshal(b, m, deterministic) +} +func (dst *BatchUpdateIntentsResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_BatchUpdateIntentsResponse.Merge(dst, src) +} +func (m *BatchUpdateIntentsResponse) XXX_Size() int { + return xxx_messageInfo_BatchUpdateIntentsResponse.Size(m) +} +func (m *BatchUpdateIntentsResponse) XXX_DiscardUnknown() { + xxx_messageInfo_BatchUpdateIntentsResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_BatchUpdateIntentsResponse proto.InternalMessageInfo + +func (m *BatchUpdateIntentsResponse) GetIntents() []*Intent { + if m != nil { + return m.Intents + } + return nil +} + +// The request message for [Intents.BatchDeleteIntents][google.cloud.dialogflow.v2beta1.Intents.BatchDeleteIntents]. +type BatchDeleteIntentsRequest struct { + // Required. The name of the agent to delete all entities types for. Format: + // `projects//agent`. + Parent string `protobuf:"bytes,1,opt,name=parent,proto3" json:"parent,omitempty"` + // Required. The collection of intents to delete. Only intent `name` must be + // filled in. + Intents []*Intent `protobuf:"bytes,2,rep,name=intents,proto3" json:"intents,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *BatchDeleteIntentsRequest) Reset() { *m = BatchDeleteIntentsRequest{} } +func (m *BatchDeleteIntentsRequest) String() string { return proto.CompactTextString(m) } +func (*BatchDeleteIntentsRequest) ProtoMessage() {} +func (*BatchDeleteIntentsRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_intent_2d72c44f592bb8da, []int{9} +} +func (m *BatchDeleteIntentsRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_BatchDeleteIntentsRequest.Unmarshal(m, b) +} +func (m *BatchDeleteIntentsRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_BatchDeleteIntentsRequest.Marshal(b, m, deterministic) +} +func (dst *BatchDeleteIntentsRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_BatchDeleteIntentsRequest.Merge(dst, src) +} +func (m *BatchDeleteIntentsRequest) XXX_Size() int { + return xxx_messageInfo_BatchDeleteIntentsRequest.Size(m) +} +func (m *BatchDeleteIntentsRequest) XXX_DiscardUnknown() { + xxx_messageInfo_BatchDeleteIntentsRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_BatchDeleteIntentsRequest proto.InternalMessageInfo + +func (m *BatchDeleteIntentsRequest) GetParent() string { + if m != nil { + return m.Parent + } + return "" +} + +func (m *BatchDeleteIntentsRequest) GetIntents() []*Intent { + if m != nil { + return m.Intents + } + return nil +} + +// This message is a wrapper around a collection of intents. +type IntentBatch struct { + // A collection of intents. + Intents []*Intent `protobuf:"bytes,1,rep,name=intents,proto3" json:"intents,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *IntentBatch) Reset() { *m = IntentBatch{} } +func (m *IntentBatch) String() string { return proto.CompactTextString(m) } +func (*IntentBatch) ProtoMessage() {} +func (*IntentBatch) Descriptor() ([]byte, []int) { + return fileDescriptor_intent_2d72c44f592bb8da, []int{10} +} +func (m *IntentBatch) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_IntentBatch.Unmarshal(m, b) +} +func (m *IntentBatch) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_IntentBatch.Marshal(b, m, deterministic) +} +func (dst *IntentBatch) XXX_Merge(src proto.Message) { + xxx_messageInfo_IntentBatch.Merge(dst, src) +} +func (m *IntentBatch) XXX_Size() int { + return xxx_messageInfo_IntentBatch.Size(m) +} +func (m *IntentBatch) XXX_DiscardUnknown() { + xxx_messageInfo_IntentBatch.DiscardUnknown(m) +} + +var xxx_messageInfo_IntentBatch proto.InternalMessageInfo + +func (m *IntentBatch) GetIntents() []*Intent { + if m != nil { + return m.Intents + } + return nil +} + +func init() { + proto.RegisterType((*Intent)(nil), "google.cloud.dialogflow.v2beta1.Intent") + proto.RegisterType((*Intent_TrainingPhrase)(nil), "google.cloud.dialogflow.v2beta1.Intent.TrainingPhrase") + proto.RegisterType((*Intent_TrainingPhrase_Part)(nil), "google.cloud.dialogflow.v2beta1.Intent.TrainingPhrase.Part") + proto.RegisterType((*Intent_Parameter)(nil), "google.cloud.dialogflow.v2beta1.Intent.Parameter") + proto.RegisterType((*Intent_Message)(nil), "google.cloud.dialogflow.v2beta1.Intent.Message") + proto.RegisterType((*Intent_Message_Text)(nil), "google.cloud.dialogflow.v2beta1.Intent.Message.Text") + proto.RegisterType((*Intent_Message_Image)(nil), "google.cloud.dialogflow.v2beta1.Intent.Message.Image") + proto.RegisterType((*Intent_Message_QuickReplies)(nil), "google.cloud.dialogflow.v2beta1.Intent.Message.QuickReplies") + proto.RegisterType((*Intent_Message_Card)(nil), "google.cloud.dialogflow.v2beta1.Intent.Message.Card") + proto.RegisterType((*Intent_Message_Card_Button)(nil), "google.cloud.dialogflow.v2beta1.Intent.Message.Card.Button") + proto.RegisterType((*Intent_Message_SimpleResponse)(nil), "google.cloud.dialogflow.v2beta1.Intent.Message.SimpleResponse") + proto.RegisterType((*Intent_Message_SimpleResponses)(nil), "google.cloud.dialogflow.v2beta1.Intent.Message.SimpleResponses") + proto.RegisterType((*Intent_Message_BasicCard)(nil), "google.cloud.dialogflow.v2beta1.Intent.Message.BasicCard") + proto.RegisterType((*Intent_Message_BasicCard_Button)(nil), "google.cloud.dialogflow.v2beta1.Intent.Message.BasicCard.Button") + proto.RegisterType((*Intent_Message_BasicCard_Button_OpenUriAction)(nil), "google.cloud.dialogflow.v2beta1.Intent.Message.BasicCard.Button.OpenUriAction") + proto.RegisterType((*Intent_Message_Suggestion)(nil), "google.cloud.dialogflow.v2beta1.Intent.Message.Suggestion") + proto.RegisterType((*Intent_Message_Suggestions)(nil), "google.cloud.dialogflow.v2beta1.Intent.Message.Suggestions") + proto.RegisterType((*Intent_Message_LinkOutSuggestion)(nil), "google.cloud.dialogflow.v2beta1.Intent.Message.LinkOutSuggestion") + proto.RegisterType((*Intent_Message_ListSelect)(nil), "google.cloud.dialogflow.v2beta1.Intent.Message.ListSelect") + proto.RegisterType((*Intent_Message_ListSelect_Item)(nil), "google.cloud.dialogflow.v2beta1.Intent.Message.ListSelect.Item") + proto.RegisterType((*Intent_Message_CarouselSelect)(nil), "google.cloud.dialogflow.v2beta1.Intent.Message.CarouselSelect") + proto.RegisterType((*Intent_Message_CarouselSelect_Item)(nil), "google.cloud.dialogflow.v2beta1.Intent.Message.CarouselSelect.Item") + proto.RegisterType((*Intent_Message_SelectItemInfo)(nil), "google.cloud.dialogflow.v2beta1.Intent.Message.SelectItemInfo") + proto.RegisterType((*Intent_Message_TelephonyPlayAudio)(nil), "google.cloud.dialogflow.v2beta1.Intent.Message.TelephonyPlayAudio") + proto.RegisterType((*Intent_Message_TelephonySynthesizeSpeech)(nil), "google.cloud.dialogflow.v2beta1.Intent.Message.TelephonySynthesizeSpeech") + proto.RegisterType((*Intent_Message_TelephonyTransferCall)(nil), "google.cloud.dialogflow.v2beta1.Intent.Message.TelephonyTransferCall") + proto.RegisterType((*Intent_FollowupIntentInfo)(nil), "google.cloud.dialogflow.v2beta1.Intent.FollowupIntentInfo") + proto.RegisterType((*ListIntentsRequest)(nil), "google.cloud.dialogflow.v2beta1.ListIntentsRequest") + proto.RegisterType((*ListIntentsResponse)(nil), "google.cloud.dialogflow.v2beta1.ListIntentsResponse") + proto.RegisterType((*GetIntentRequest)(nil), "google.cloud.dialogflow.v2beta1.GetIntentRequest") + proto.RegisterType((*CreateIntentRequest)(nil), "google.cloud.dialogflow.v2beta1.CreateIntentRequest") + proto.RegisterType((*UpdateIntentRequest)(nil), "google.cloud.dialogflow.v2beta1.UpdateIntentRequest") + proto.RegisterType((*DeleteIntentRequest)(nil), "google.cloud.dialogflow.v2beta1.DeleteIntentRequest") + proto.RegisterType((*BatchUpdateIntentsRequest)(nil), "google.cloud.dialogflow.v2beta1.BatchUpdateIntentsRequest") + proto.RegisterType((*BatchUpdateIntentsResponse)(nil), "google.cloud.dialogflow.v2beta1.BatchUpdateIntentsResponse") + proto.RegisterType((*BatchDeleteIntentsRequest)(nil), "google.cloud.dialogflow.v2beta1.BatchDeleteIntentsRequest") + proto.RegisterType((*IntentBatch)(nil), "google.cloud.dialogflow.v2beta1.IntentBatch") + proto.RegisterEnum("google.cloud.dialogflow.v2beta1.IntentView", IntentView_name, IntentView_value) + proto.RegisterEnum("google.cloud.dialogflow.v2beta1.Intent_WebhookState", Intent_WebhookState_name, Intent_WebhookState_value) + proto.RegisterEnum("google.cloud.dialogflow.v2beta1.Intent_TrainingPhrase_Type", Intent_TrainingPhrase_Type_name, Intent_TrainingPhrase_Type_value) + proto.RegisterEnum("google.cloud.dialogflow.v2beta1.Intent_Message_Platform", Intent_Message_Platform_name, Intent_Message_Platform_value) +} + +// Reference imports to suppress errors if they are not otherwise used. +var _ context.Context +var _ grpc.ClientConn + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the grpc package it is being compiled against. +const _ = grpc.SupportPackageIsVersion4 + +// IntentsClient is the client API for Intents service. +// +// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://godoc.org/google.golang.org/grpc#ClientConn.NewStream. +type IntentsClient interface { + // Returns the list of all intents in the specified agent. + ListIntents(ctx context.Context, in *ListIntentsRequest, opts ...grpc.CallOption) (*ListIntentsResponse, error) + // Retrieves the specified intent. + GetIntent(ctx context.Context, in *GetIntentRequest, opts ...grpc.CallOption) (*Intent, error) + // Creates an intent in the specified agent. + CreateIntent(ctx context.Context, in *CreateIntentRequest, opts ...grpc.CallOption) (*Intent, error) + // Updates the specified intent. + UpdateIntent(ctx context.Context, in *UpdateIntentRequest, opts ...grpc.CallOption) (*Intent, error) + // Deletes the specified intent and its direct or indirect followup intents. + DeleteIntent(ctx context.Context, in *DeleteIntentRequest, opts ...grpc.CallOption) (*empty.Empty, error) + // Updates/Creates multiple intents in the specified agent. + // + // Operation + BatchUpdateIntents(ctx context.Context, in *BatchUpdateIntentsRequest, opts ...grpc.CallOption) (*longrunning.Operation, error) + // Deletes intents in the specified agent. + // + // Operation + BatchDeleteIntents(ctx context.Context, in *BatchDeleteIntentsRequest, opts ...grpc.CallOption) (*longrunning.Operation, error) +} + +type intentsClient struct { + cc *grpc.ClientConn +} + +func NewIntentsClient(cc *grpc.ClientConn) IntentsClient { + return &intentsClient{cc} +} + +func (c *intentsClient) ListIntents(ctx context.Context, in *ListIntentsRequest, opts ...grpc.CallOption) (*ListIntentsResponse, error) { + out := new(ListIntentsResponse) + err := c.cc.Invoke(ctx, "/google.cloud.dialogflow.v2beta1.Intents/ListIntents", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *intentsClient) GetIntent(ctx context.Context, in *GetIntentRequest, opts ...grpc.CallOption) (*Intent, error) { + out := new(Intent) + err := c.cc.Invoke(ctx, "/google.cloud.dialogflow.v2beta1.Intents/GetIntent", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *intentsClient) CreateIntent(ctx context.Context, in *CreateIntentRequest, opts ...grpc.CallOption) (*Intent, error) { + out := new(Intent) + err := c.cc.Invoke(ctx, "/google.cloud.dialogflow.v2beta1.Intents/CreateIntent", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *intentsClient) UpdateIntent(ctx context.Context, in *UpdateIntentRequest, opts ...grpc.CallOption) (*Intent, error) { + out := new(Intent) + err := c.cc.Invoke(ctx, "/google.cloud.dialogflow.v2beta1.Intents/UpdateIntent", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *intentsClient) DeleteIntent(ctx context.Context, in *DeleteIntentRequest, opts ...grpc.CallOption) (*empty.Empty, error) { + out := new(empty.Empty) + err := c.cc.Invoke(ctx, "/google.cloud.dialogflow.v2beta1.Intents/DeleteIntent", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *intentsClient) BatchUpdateIntents(ctx context.Context, in *BatchUpdateIntentsRequest, opts ...grpc.CallOption) (*longrunning.Operation, error) { + out := new(longrunning.Operation) + err := c.cc.Invoke(ctx, "/google.cloud.dialogflow.v2beta1.Intents/BatchUpdateIntents", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *intentsClient) BatchDeleteIntents(ctx context.Context, in *BatchDeleteIntentsRequest, opts ...grpc.CallOption) (*longrunning.Operation, error) { + out := new(longrunning.Operation) + err := c.cc.Invoke(ctx, "/google.cloud.dialogflow.v2beta1.Intents/BatchDeleteIntents", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +// IntentsServer is the server API for Intents service. +type IntentsServer interface { + // Returns the list of all intents in the specified agent. + ListIntents(context.Context, *ListIntentsRequest) (*ListIntentsResponse, error) + // Retrieves the specified intent. + GetIntent(context.Context, *GetIntentRequest) (*Intent, error) + // Creates an intent in the specified agent. + CreateIntent(context.Context, *CreateIntentRequest) (*Intent, error) + // Updates the specified intent. + UpdateIntent(context.Context, *UpdateIntentRequest) (*Intent, error) + // Deletes the specified intent and its direct or indirect followup intents. + DeleteIntent(context.Context, *DeleteIntentRequest) (*empty.Empty, error) + // Updates/Creates multiple intents in the specified agent. + // + // Operation + BatchUpdateIntents(context.Context, *BatchUpdateIntentsRequest) (*longrunning.Operation, error) + // Deletes intents in the specified agent. + // + // Operation + BatchDeleteIntents(context.Context, *BatchDeleteIntentsRequest) (*longrunning.Operation, error) +} + +func RegisterIntentsServer(s *grpc.Server, srv IntentsServer) { + s.RegisterService(&_Intents_serviceDesc, srv) +} + +func _Intents_ListIntents_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(ListIntentsRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(IntentsServer).ListIntents(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.dialogflow.v2beta1.Intents/ListIntents", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(IntentsServer).ListIntents(ctx, req.(*ListIntentsRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _Intents_GetIntent_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(GetIntentRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(IntentsServer).GetIntent(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.dialogflow.v2beta1.Intents/GetIntent", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(IntentsServer).GetIntent(ctx, req.(*GetIntentRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _Intents_CreateIntent_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(CreateIntentRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(IntentsServer).CreateIntent(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.dialogflow.v2beta1.Intents/CreateIntent", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(IntentsServer).CreateIntent(ctx, req.(*CreateIntentRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _Intents_UpdateIntent_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(UpdateIntentRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(IntentsServer).UpdateIntent(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.dialogflow.v2beta1.Intents/UpdateIntent", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(IntentsServer).UpdateIntent(ctx, req.(*UpdateIntentRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _Intents_DeleteIntent_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(DeleteIntentRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(IntentsServer).DeleteIntent(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.dialogflow.v2beta1.Intents/DeleteIntent", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(IntentsServer).DeleteIntent(ctx, req.(*DeleteIntentRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _Intents_BatchUpdateIntents_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(BatchUpdateIntentsRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(IntentsServer).BatchUpdateIntents(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.dialogflow.v2beta1.Intents/BatchUpdateIntents", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(IntentsServer).BatchUpdateIntents(ctx, req.(*BatchUpdateIntentsRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _Intents_BatchDeleteIntents_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(BatchDeleteIntentsRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(IntentsServer).BatchDeleteIntents(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.dialogflow.v2beta1.Intents/BatchDeleteIntents", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(IntentsServer).BatchDeleteIntents(ctx, req.(*BatchDeleteIntentsRequest)) + } + return interceptor(ctx, in, info, handler) +} + +var _Intents_serviceDesc = grpc.ServiceDesc{ + ServiceName: "google.cloud.dialogflow.v2beta1.Intents", + HandlerType: (*IntentsServer)(nil), + Methods: []grpc.MethodDesc{ + { + MethodName: "ListIntents", + Handler: _Intents_ListIntents_Handler, + }, + { + MethodName: "GetIntent", + Handler: _Intents_GetIntent_Handler, + }, + { + MethodName: "CreateIntent", + Handler: _Intents_CreateIntent_Handler, + }, + { + MethodName: "UpdateIntent", + Handler: _Intents_UpdateIntent_Handler, + }, + { + MethodName: "DeleteIntent", + Handler: _Intents_DeleteIntent_Handler, + }, + { + MethodName: "BatchUpdateIntents", + Handler: _Intents_BatchUpdateIntents_Handler, + }, + { + MethodName: "BatchDeleteIntents", + Handler: _Intents_BatchDeleteIntents_Handler, + }, + }, + Streams: []grpc.StreamDesc{}, + Metadata: "google/cloud/dialogflow/v2beta1/intent.proto", +} + +func init() { + proto.RegisterFile("google/cloud/dialogflow/v2beta1/intent.proto", fileDescriptor_intent_2d72c44f592bb8da) +} + +var fileDescriptor_intent_2d72c44f592bb8da = []byte{ + // 2831 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xe4, 0x5a, 0xcd, 0x73, 0x23, 0x57, + 0xb5, 0x77, 0xeb, 0xc3, 0x96, 0x8e, 0x64, 0x59, 0xbe, 0xf6, 0xcc, 0xf4, 0xf4, 0xe4, 0xc3, 0x51, + 0x5e, 0x12, 0xc7, 0x2f, 0x91, 0xde, 0x38, 0x79, 0x79, 0xc9, 0xcc, 0x4b, 0x82, 0x64, 0xcb, 0x23, + 0x61, 0xd9, 0xd2, 0xb4, 0xe5, 0x09, 0x93, 0x02, 0xba, 0x5a, 0xd2, 0xb5, 0xa6, 0x71, 0xab, 0xbb, + 0xd3, 0xf7, 0xca, 0x13, 0x05, 0x52, 0x50, 0x54, 0x01, 0x0b, 0x36, 0x54, 0xb1, 0x02, 0x36, 0x14, + 0x1b, 0x2a, 0x14, 0x0b, 0x2a, 0x3b, 0xfe, 0x04, 0x16, 0xac, 0x58, 0x66, 0x41, 0x51, 0x45, 0xb1, + 0xe0, 0x2f, 0x60, 0x07, 0x75, 0x3f, 0x5a, 0x6a, 0x59, 0x1a, 0x2c, 0xd9, 0x03, 0x1b, 0x76, 0xba, + 0xe7, 0x9c, 0xfb, 0x3b, 0x5f, 0xf7, 0x9e, 0x73, 0xfa, 0xda, 0xf0, 0x5a, 0xd7, 0x75, 0xbb, 0x36, + 0x2e, 0xb4, 0x6d, 0xb7, 0xdf, 0x29, 0x74, 0x2c, 0xd3, 0x76, 0xbb, 0x27, 0xb6, 0xfb, 0xb8, 0x70, + 0xb6, 0xdd, 0xc2, 0xd4, 0xbc, 0x5d, 0xb0, 0x1c, 0x8a, 0x1d, 0x9a, 0xf7, 0x7c, 0x97, 0xba, 0xe8, + 0x79, 0x21, 0x9d, 0xe7, 0xd2, 0xf9, 0x91, 0x74, 0x5e, 0x4a, 0x6b, 0xcf, 0x48, 0x38, 0xd3, 0xb3, + 0x0a, 0xa6, 0xe3, 0xb8, 0xd4, 0xa4, 0x96, 0xeb, 0x10, 0xb1, 0x5d, 0xbb, 0x19, 0xe2, 0xfa, 0x98, + 0xb8, 0x7d, 0xbf, 0x8d, 0x25, 0xeb, 0xf5, 0x8b, 0xec, 0x68, 0xbb, 0x0e, 0xc5, 0x1f, 0x4b, 0x43, + 0xb4, 0x17, 0xa5, 0xb8, 0xed, 0x3a, 0x5d, 0xbf, 0xef, 0x38, 0x96, 0xd3, 0x2d, 0xb8, 0x1e, 0xf6, + 0xc7, 0xd4, 0x3d, 0x27, 0x85, 0xf8, 0xaa, 0xd5, 0x3f, 0x29, 0x74, 0xfa, 0x42, 0x40, 0xf2, 0x6f, + 0x9d, 0xe7, 0xe3, 0x9e, 0x47, 0x07, 0x92, 0xb9, 0x71, 0x9e, 0x79, 0x62, 0x61, 0xbb, 0x63, 0xf4, + 0x4c, 0x72, 0x2a, 0x25, 0x9e, 0x39, 0x2f, 0x41, 0xa8, 0xdf, 0x6f, 0x4b, 0x0b, 0x73, 0x3f, 0x2f, + 0xc0, 0x62, 0x95, 0xc7, 0x0e, 0x21, 0x88, 0x39, 0x66, 0x0f, 0xab, 0xca, 0x86, 0xb2, 0x99, 0xd4, + 0xf9, 0x6f, 0xf4, 0x02, 0xa4, 0x3b, 0x16, 0xf1, 0x6c, 0x73, 0x60, 0x70, 0x5e, 0x84, 0xf3, 0x52, + 0x92, 0x76, 0xc8, 0x44, 0x1e, 0xc2, 0xf2, 0x63, 0xdc, 0x7a, 0xe4, 0xba, 0xa7, 0x06, 0xa1, 0x26, + 0xc5, 0xea, 0xe2, 0x86, 0xb2, 0x99, 0xd9, 0x7e, 0x33, 0x7f, 0x41, 0x12, 0xf2, 0x42, 0x6d, 0xfe, + 0x03, 0xb1, 0xf9, 0x88, 0xed, 0xd5, 0xd3, 0x8f, 0x43, 0x2b, 0xa4, 0x41, 0xc2, 0xf3, 0x2d, 0xd7, + 0xb7, 0xe8, 0x40, 0x8d, 0x6e, 0x28, 0x9b, 0x71, 0x7d, 0xb8, 0x46, 0xcf, 0x43, 0xca, 0x22, 0xc6, + 0x89, 0x69, 0xdb, 0x2d, 0xb3, 0x7d, 0xaa, 0xc6, 0x36, 0x94, 0xcd, 0x84, 0x0e, 0x16, 0xd9, 0x93, + 0x14, 0xf4, 0x02, 0x40, 0xcf, 0x36, 0xb0, 0x63, 0xb6, 0x6c, 0xdc, 0x51, 0xe3, 0x8c, 0x5f, 0x8a, + 0xa8, 0x8a, 0x9e, 0xec, 0xd9, 0x65, 0x41, 0x64, 0x18, 0x3d, 0xdb, 0xe8, 0x58, 0x44, 0xc8, 0xac, + 0x09, 0x8c, 0x9e, 0xbd, 0x2b, 0x29, 0xe8, 0x15, 0x58, 0xc1, 0x4e, 0xc7, 0x60, 0x87, 0xcb, 0x37, + 0xdb, 0x2c, 0x27, 0xea, 0x35, 0x2e, 0x94, 0xc1, 0x4e, 0xa7, 0x3a, 0xa2, 0xa2, 0x3c, 0xac, 0x59, + 0x8e, 0xd7, 0xa7, 0x86, 0xcc, 0x3f, 0x8f, 0x16, 0x51, 0x97, 0x36, 0xa2, 0x9b, 0x49, 0x7d, 0x95, + 0xb3, 0x76, 0x04, 0x87, 0xc5, 0x8c, 0xa0, 0xeb, 0xb0, 0x88, 0xcf, 0xb0, 0x43, 0x89, 0x9a, 0xe0, + 0x22, 0x72, 0x85, 0x4c, 0xc8, 0x52, 0xdf, 0xb4, 0xd8, 0x41, 0x31, 0xbc, 0x47, 0xbe, 0x49, 0x30, + 0x51, 0x93, 0x1b, 0xd1, 0xcd, 0xd4, 0xf6, 0x5b, 0xb3, 0xc6, 0xb3, 0x29, 0xf7, 0x37, 0xf8, 0x76, + 0x7d, 0x85, 0x8e, 0xad, 0xb9, 0x6a, 0xe9, 0x0a, 0xf0, 0x64, 0xca, 0x15, 0xba, 0x0f, 0x2b, 0x6e, + 0x9f, 0x86, 0x7c, 0x20, 0x6a, 0x8a, 0x6b, 0xde, 0xbc, 0x50, 0xb3, 0x74, 0x4d, 0xcf, 0x08, 0x00, + 0xb9, 0x24, 0xe8, 0x25, 0xc8, 0xf8, 0x98, 0xe0, 0x10, 0x62, 0x9a, 0x47, 0x6f, 0x99, 0x53, 0x87, + 0x62, 0xf7, 0x01, 0x3c, 0xd3, 0x37, 0x7b, 0x98, 0x62, 0x9f, 0xa8, 0xcb, 0x5c, 0xe9, 0xed, 0x59, + 0xdd, 0x6d, 0x04, 0x3b, 0xf5, 0x10, 0x08, 0xda, 0x87, 0x44, 0x0f, 0x13, 0x62, 0x76, 0x31, 0x51, + 0x33, 0x1c, 0xb0, 0x30, 0x2b, 0xe0, 0x81, 0xd8, 0xa7, 0x0f, 0x01, 0xd0, 0x19, 0x68, 0x1d, 0x7c, + 0x62, 0xf6, 0x6d, 0x6a, 0xf8, 0x98, 0x78, 0xae, 0x43, 0xb0, 0xe1, 0xd9, 0x26, 0x3d, 0x71, 0xfd, + 0x1e, 0x51, 0x57, 0x36, 0xa2, 0x9b, 0x99, 0xed, 0xb7, 0xe7, 0x84, 0xcf, 0x37, 0x24, 0x80, 0xae, + 0x4a, 0x6c, 0x5d, 0x42, 0x07, 0x0c, 0x82, 0xde, 0x81, 0x9b, 0xbe, 0xeb, 0x52, 0xe3, 0xc4, 0xb5, + 0x6d, 0xf7, 0x71, 0xdf, 0x33, 0x44, 0x91, 0x13, 0x37, 0x31, 0xcb, 0x93, 0x77, 0x9d, 0x09, 0xec, + 0x49, 0xbe, 0xd0, 0xc0, 0x2f, 0xe5, 0xbb, 0x70, 0xcb, 0x33, 0x7d, 0x26, 0x3c, 0x75, 0xf3, 0x2a, + 0xdf, 0xac, 0x0a, 0x91, 0x29, 0xdb, 0x6d, 0x58, 0x3f, 0xbf, 0xcf, 0x72, 0x4e, 0x5c, 0x15, 0xf1, + 0x50, 0xde, 0x99, 0xd5, 0xd7, 0x71, 0xe4, 0xaa, 0x73, 0xe2, 0xea, 0xe8, 0x64, 0x82, 0xa6, 0xfd, + 0x32, 0x0a, 0x99, 0xf1, 0x53, 0x3b, 0xb5, 0x16, 0xd5, 0x21, 0x46, 0x07, 0x9e, 0xa8, 0x41, 0x99, + 0xed, 0xbb, 0x97, 0xbb, 0x0f, 0xf9, 0xe6, 0xc0, 0xc3, 0x3a, 0x07, 0x42, 0xf7, 0x21, 0xee, 0x99, + 0x3e, 0x25, 0x6a, 0x94, 0xbb, 0x75, 0x59, 0xc4, 0x86, 0xe9, 0x53, 0x5d, 0x20, 0xa1, 0x2d, 0x58, + 0xa5, 0x56, 0x0f, 0x13, 0xc3, 0xec, 0x74, 0x70, 0xc7, 0x68, 0xbb, 0x7d, 0x87, 0xf2, 0xda, 0x14, + 0xd7, 0x57, 0x38, 0xa3, 0xc8, 0xe8, 0x3b, 0x8c, 0xac, 0x51, 0x88, 0xb1, 0xad, 0xcc, 0x57, 0x76, + 0x0f, 0x02, 0x5f, 0xd9, 0x6f, 0x56, 0x99, 0xb0, 0x43, 0x2d, 0x3a, 0x30, 0x86, 0x2e, 0x27, 0x75, + 0x10, 0x24, 0xe6, 0x01, 0x5a, 0x87, 0xb8, 0x69, 0x5b, 0x26, 0xe1, 0x75, 0x31, 0xa9, 0x8b, 0x05, + 0x2b, 0xd7, 0x7d, 0x82, 0x7d, 0xa3, 0x83, 0x4f, 0x2c, 0x07, 0x77, 0x64, 0x55, 0x4c, 0x31, 0xda, + 0xae, 0x20, 0xe5, 0xee, 0x42, 0x4c, 0x02, 0x64, 0x9b, 0x0f, 0x1b, 0x65, 0xe3, 0xf8, 0xf0, 0xa8, + 0x51, 0xde, 0xa9, 0xee, 0x55, 0xcb, 0xbb, 0xd9, 0x05, 0x94, 0x82, 0xa5, 0xf2, 0x57, 0x8a, 0x07, + 0x8d, 0x5a, 0x39, 0xab, 0xa0, 0x2c, 0x24, 0x9a, 0xe5, 0x83, 0x46, 0xad, 0xd8, 0x2c, 0x67, 0x23, + 0x5a, 0x24, 0xa1, 0x68, 0x3f, 0x88, 0x40, 0x72, 0x78, 0xe1, 0x2e, 0xdb, 0x30, 0xd6, 0x21, 0x7e, + 0x66, 0xda, 0x7d, 0x1c, 0x98, 0xce, 0x17, 0xe8, 0x45, 0x58, 0x0e, 0x2e, 0x99, 0xe0, 0xc6, 0x38, + 0x37, 0x2d, 0x89, 0x0f, 0xb8, 0xd0, 0xff, 0x81, 0x1a, 0x0a, 0x8b, 0x31, 0xa6, 0x29, 0xce, 0xe5, + 0xaf, 0x8d, 0x62, 0xb4, 0x1b, 0xd2, 0xf9, 0x0c, 0x24, 0x7b, 0xa6, 0xd3, 0x31, 0xa9, 0xeb, 0x0f, + 0x78, 0x83, 0x4a, 0xe8, 0x23, 0x02, 0x52, 0x61, 0xc9, 0xf3, 0xdd, 0x9e, 0x47, 0x83, 0x8a, 0x1d, + 0x2c, 0xd1, 0x0d, 0x58, 0xb2, 0x88, 0x61, 0x5b, 0x84, 0xaa, 0x09, 0xbe, 0x6b, 0xd1, 0x22, 0x35, + 0x8b, 0x50, 0xed, 0xaf, 0xcf, 0xc1, 0x92, 0xbc, 0xca, 0xe8, 0xcb, 0xa1, 0x04, 0xa6, 0x66, 0x6f, + 0x7c, 0x41, 0x25, 0x68, 0xe2, 0x8f, 0x69, 0x65, 0x41, 0x26, 0xfe, 0x00, 0xe2, 0x56, 0xcf, 0xec, + 0x8a, 0xc0, 0xa5, 0xb6, 0xff, 0x77, 0x5e, 0xb0, 0x2a, 0xdb, 0x5c, 0x59, 0xd0, 0x05, 0x0a, 0x6a, + 0xc3, 0xf2, 0x47, 0x7d, 0xab, 0x7d, 0x6a, 0xf8, 0xd8, 0xb3, 0x2d, 0x2c, 0x8e, 0x4b, 0x6a, 0xfb, + 0xff, 0xe7, 0x85, 0xbd, 0xcf, 0x40, 0x74, 0x81, 0x51, 0x59, 0xd0, 0xd3, 0x1f, 0x85, 0xd6, 0xcc, + 0xff, 0xb6, 0xe9, 0x8b, 0xd3, 0x76, 0x09, 0xff, 0x77, 0x4c, 0xbf, 0xc3, 0xfc, 0x67, 0x18, 0xe8, + 0x0d, 0x58, 0xf2, 0xcc, 0x81, 0xed, 0x9a, 0xa2, 0x65, 0xa7, 0xb6, 0x6f, 0x04, 0x70, 0xc1, 0xfc, + 0x92, 0x3f, 0xe2, 0xf3, 0x4b, 0x65, 0x41, 0x0f, 0x24, 0x91, 0x0d, 0x59, 0x62, 0xf5, 0x3c, 0x1b, + 0x0f, 0xeb, 0x33, 0x4b, 0x24, 0xdb, 0xfd, 0xfe, 0xbc, 0xc6, 0x1c, 0x71, 0x9c, 0xa0, 0x16, 0x33, + 0x5f, 0x57, 0xc8, 0x38, 0x09, 0x7d, 0x08, 0xd0, 0x32, 0x89, 0xd5, 0x36, 0xb8, 0xd3, 0x09, 0xae, + 0xe7, 0x9d, 0x79, 0xf5, 0x94, 0x18, 0x82, 0xf4, 0x3c, 0xd9, 0x0a, 0x16, 0xc8, 0x80, 0x14, 0xe9, + 0x77, 0xbb, 0x98, 0xf0, 0x01, 0x51, 0x4d, 0x72, 0xf0, 0xbb, 0x73, 0x3b, 0x31, 0x82, 0xa8, 0x2c, + 0xe8, 0x61, 0x44, 0x44, 0x60, 0xcd, 0xb6, 0x9c, 0x53, 0xc3, 0xed, 0x53, 0x63, 0x44, 0xe7, 0xa3, + 0x40, 0x6a, 0xbb, 0x38, 0xaf, 0xa2, 0x9a, 0xe5, 0x9c, 0xd6, 0xfb, 0x74, 0xa4, 0xaf, 0xb2, 0xa0, + 0xaf, 0xda, 0xe7, 0x89, 0xe8, 0x6b, 0x90, 0x62, 0x57, 0xc8, 0x20, 0xd8, 0xc6, 0x6d, 0xaa, 0xa6, + 0xb8, 0xb2, 0x3b, 0xf3, 0x2b, 0x23, 0xf4, 0x88, 0x23, 0x54, 0x16, 0x74, 0xb0, 0x87, 0x2b, 0x64, + 0xc1, 0x4a, 0xdb, 0xf4, 0xdd, 0x3e, 0xc1, 0x76, 0xa0, 0x22, 0xcd, 0x55, 0xbc, 0x77, 0x89, 0xa3, + 0xc8, 0x61, 0x86, 0x6a, 0x32, 0xed, 0x31, 0x0a, 0x3a, 0x83, 0x75, 0x8a, 0x6d, 0xec, 0x3d, 0x72, + 0x9d, 0x81, 0xc1, 0x6b, 0x8f, 0xd9, 0xef, 0x58, 0xae, 0xba, 0xcc, 0xf5, 0x95, 0xe6, 0xbf, 0xfa, + 0x12, 0xab, 0x61, 0x9b, 0x83, 0x22, 0x43, 0xaa, 0x2c, 0xe8, 0x88, 0x4e, 0x50, 0xd1, 0x0f, 0x15, + 0xb8, 0x35, 0x52, 0x4c, 0x06, 0x0e, 0x7d, 0x84, 0x89, 0xf5, 0x09, 0x36, 0x88, 0x87, 0x71, 0xfb, + 0x91, 0x9a, 0xe1, 0xfa, 0xab, 0x97, 0xd6, 0x7f, 0x34, 0x44, 0x3c, 0xe2, 0x80, 0x95, 0x05, 0xfd, + 0x26, 0x7d, 0x12, 0x13, 0x7d, 0x1b, 0x6e, 0x8c, 0x8c, 0xa1, 0xbe, 0xe9, 0x90, 0x13, 0xec, 0x1b, + 0x6d, 0xd3, 0xb6, 0xd5, 0x15, 0x6e, 0x48, 0xf9, 0xd2, 0x86, 0x34, 0x25, 0xda, 0x8e, 0x69, 0xdb, + 0x95, 0x05, 0xfd, 0x1a, 0x9d, 0xc6, 0x40, 0x4d, 0x48, 0x04, 0x03, 0x98, 0xfc, 0xdc, 0xb8, 0xfc, + 0xfc, 0x35, 0x44, 0xd2, 0x34, 0x88, 0xb1, 0x5a, 0x1c, 0x6a, 0xc8, 0xd1, 0xa0, 0x21, 0x6b, 0x47, + 0x10, 0xe7, 0xa5, 0x15, 0xdd, 0x82, 0x24, 0x2f, 0xad, 0x46, 0xdf, 0xb7, 0x64, 0xe7, 0x4b, 0x70, + 0xc2, 0xb1, 0x6f, 0xa1, 0xd7, 0x01, 0x99, 0xed, 0x36, 0x26, 0xc4, 0x6a, 0x59, 0x36, 0x6f, 0x53, + 0x0c, 0x47, 0xf4, 0xc0, 0xd5, 0x31, 0x0e, 0x53, 0xa4, 0x55, 0x21, 0x1d, 0x2e, 0xac, 0xac, 0x33, + 0x52, 0x8b, 0xda, 0x41, 0x47, 0x15, 0x0b, 0xd6, 0x19, 0xc7, 0x6b, 0x78, 0x84, 0xdb, 0x35, 0x56, + 0x83, 0xb5, 0xbf, 0x28, 0x10, 0xe3, 0x15, 0x64, 0x3a, 0x86, 0x06, 0x09, 0xd2, 0x6f, 0x09, 0x86, + 0x30, 0x67, 0xb8, 0x1e, 0xf7, 0x28, 0x7a, 0xce, 0xa3, 0x63, 0x58, 0x6a, 0xf5, 0x29, 0x65, 0xc5, + 0x28, 0x36, 0xdf, 0x94, 0x14, 0x2e, 0xef, 0xf9, 0x12, 0xc7, 0xd0, 0x03, 0x2c, 0xed, 0x6d, 0x58, + 0x14, 0xa4, 0xa9, 0xd3, 0x0f, 0xfb, 0xee, 0x73, 0x09, 0xe5, 0x1f, 0x76, 0xd2, 0xda, 0x60, 0xad, + 0xf5, 0x20, 0x33, 0x5e, 0xa3, 0xd1, 0x7f, 0x41, 0x86, 0x7f, 0x72, 0x51, 0x37, 0xb8, 0x0d, 0x02, + 0x2b, 0xcd, 0xa8, 0x4d, 0x57, 0x9e, 0x59, 0x04, 0x31, 0x42, 0x7a, 0xb6, 0xc4, 0xe3, 0xbf, 0xc3, + 0xc3, 0x0a, 0xb7, 0x21, 0x3a, 0x36, 0xac, 0xf0, 0x14, 0x7d, 0x0b, 0x56, 0xce, 0xb5, 0x04, 0x64, + 0x4d, 0xe9, 0x36, 0x0a, 0x8f, 0xcd, 0x7b, 0x57, 0xeb, 0x36, 0x13, 0xad, 0x46, 0xfb, 0x5d, 0x14, + 0x92, 0xc3, 0x4e, 0x71, 0x89, 0xd4, 0xbe, 0x04, 0x19, 0x76, 0xb2, 0x4d, 0x4a, 0x71, 0x27, 0xec, + 0xe2, 0xf2, 0x90, 0xca, 0x0f, 0xfc, 0x7e, 0x30, 0x74, 0xc4, 0xae, 0x30, 0x74, 0x04, 0x23, 0xc7, + 0x87, 0xa3, 0x13, 0x13, 0xe7, 0x51, 0xf9, 0xd2, 0xa5, 0x7b, 0xe3, 0xc4, 0xb1, 0xf9, 0xad, 0x32, + 0x3c, 0x37, 0xd3, 0x83, 0x71, 0x06, 0x2b, 0xae, 0x87, 0x1d, 0x76, 0x94, 0x0d, 0xf9, 0x95, 0x2b, + 0x06, 0xa9, 0xc3, 0xab, 0x1a, 0x91, 0xaf, 0x7b, 0xd8, 0x39, 0xf6, 0xad, 0x22, 0x47, 0xd5, 0x97, + 0xdd, 0xf0, 0x52, 0x7b, 0x01, 0x96, 0xc7, 0xf8, 0x28, 0x0b, 0xd1, 0x51, 0x81, 0x60, 0x3f, 0xb5, + 0x1c, 0x40, 0xa8, 0x25, 0x4e, 0x35, 0x5f, 0x3b, 0x85, 0x54, 0xa8, 0x77, 0xa3, 0xaf, 0x8e, 0x4f, + 0x03, 0xca, 0x7c, 0x5f, 0x5f, 0x93, 0xd3, 0xc0, 0xd8, 0x28, 0xa0, 0x35, 0x60, 0x75, 0xa2, 0x7f, + 0xa3, 0x57, 0x21, 0xdb, 0x61, 0x3f, 0x1d, 0xfe, 0x02, 0x65, 0x84, 0xe6, 0xfb, 0x95, 0x10, 0x9d, + 0xcf, 0xd4, 0xd2, 0xc5, 0xc8, 0xc8, 0xc5, 0x2f, 0x22, 0x00, 0xa3, 0x2e, 0xfd, 0x84, 0x14, 0x1d, + 0x43, 0xdc, 0xa2, 0xb8, 0x27, 0xca, 0xd8, 0x25, 0x26, 0xb4, 0x91, 0x82, 0x7c, 0x95, 0xe2, 0x9e, + 0x2e, 0xd0, 0xb4, 0x3f, 0x2a, 0x10, 0x63, 0x6b, 0xa4, 0x43, 0x8c, 0x7f, 0xab, 0x2a, 0x97, 0x1b, + 0x01, 0x04, 0x34, 0x43, 0xe2, 0xdf, 0xab, 0x1c, 0x6b, 0xe4, 0x49, 0x24, 0xec, 0xc9, 0x06, 0xa4, + 0x3a, 0x98, 0xb4, 0x7d, 0xcb, 0xe3, 0x07, 0x2d, 0xa8, 0x1e, 0x23, 0xd2, 0x53, 0xbd, 0x58, 0xda, + 0xef, 0x23, 0x90, 0x19, 0x1f, 0x50, 0xd0, 0xc3, 0x20, 0x96, 0xe2, 0x68, 0xec, 0x5c, 0x6d, 0xde, + 0xf9, 0x0f, 0x8b, 0xe7, 0x7b, 0x90, 0x19, 0x37, 0x8e, 0x9d, 0xe8, 0x53, 0x3c, 0x08, 0x2e, 0xed, + 0x29, 0x1e, 0xf0, 0xe2, 0x3a, 0x70, 0x5c, 0x67, 0xd0, 0x0b, 0xda, 0xee, 0x70, 0xad, 0xdd, 0x06, + 0x34, 0x39, 0xbf, 0xb1, 0x6e, 0xca, 0x47, 0xc2, 0xf0, 0x7c, 0xc0, 0x09, 0xc7, 0xbe, 0xa5, 0xdd, + 0x87, 0x9b, 0x4f, 0x1c, 0xb9, 0xd0, 0x7a, 0xb8, 0x13, 0x0e, 0x3f, 0x08, 0xd7, 0xc3, 0x7d, 0x8b, + 0x51, 0xd9, 0xaa, 0x94, 0x80, 0x45, 0xf1, 0x2e, 0xad, 0xdd, 0x81, 0x6b, 0x53, 0x87, 0x27, 0xd6, + 0xdc, 0x18, 0x11, 0x1b, 0x4e, 0xbf, 0xd7, 0xc2, 0xbe, 0xb4, 0x25, 0xc5, 0x69, 0x87, 0x9c, 0x94, + 0xfb, 0xa9, 0x02, 0x89, 0x60, 0x0e, 0x42, 0x2a, 0xac, 0xb3, 0x4f, 0xfd, 0xbd, 0xba, 0x7e, 0x70, + 0xee, 0x51, 0x20, 0x0d, 0x89, 0xbd, 0xe2, 0x4e, 0xb9, 0x54, 0xaf, 0xef, 0x67, 0x15, 0x94, 0x84, + 0xf8, 0x51, 0xad, 0xb8, 0xb3, 0x9f, 0x8d, 0x30, 0x46, 0xb3, 0x5c, 0x2b, 0xdf, 0xd3, 0x8b, 0x07, + 0xd9, 0x28, 0x5a, 0x82, 0xe8, 0x7e, 0x75, 0x3f, 0x1b, 0xe3, 0x12, 0xfb, 0x0f, 0x1b, 0xe5, 0x6c, + 0x1c, 0x25, 0x20, 0x56, 0xab, 0x1e, 0x96, 0xb3, 0x8b, 0x8c, 0xf8, 0xa0, 0x5a, 0x2a, 0xeb, 0xd9, + 0x25, 0x74, 0x0d, 0x56, 0x8b, 0x3b, 0xcd, 0x6a, 0xfd, 0xf0, 0xc8, 0xa8, 0x1f, 0x1a, 0xf7, 0xea, + 0xf5, 0x7b, 0xb5, 0x72, 0x36, 0x81, 0x96, 0x21, 0xc9, 0xd0, 0x1a, 0x95, 0xfa, 0xe1, 0xc3, 0x2c, + 0x94, 0x92, 0xb0, 0x24, 0x5f, 0xe0, 0xb4, 0xef, 0x29, 0x80, 0x26, 0xdf, 0x92, 0xd0, 0xff, 0x4c, + 0xbe, 0x52, 0x85, 0xea, 0xd5, 0xb9, 0x97, 0xa6, 0x59, 0x9e, 0xc5, 0x22, 0xff, 0xfc, 0x59, 0x2c, + 0x47, 0x21, 0x1d, 0x7e, 0xad, 0x46, 0xcf, 0xc2, 0xcd, 0x0f, 0xca, 0xa5, 0x4a, 0xbd, 0xbe, 0x6f, + 0x1c, 0x35, 0x8b, 0xcd, 0xf3, 0x8f, 0x29, 0x37, 0xe1, 0xda, 0x38, 0xbb, 0x7c, 0x58, 0x2c, 0xd5, + 0xca, 0xbb, 0x59, 0x05, 0x6d, 0xc1, 0xcb, 0x53, 0x59, 0xc6, 0x5e, 0x5d, 0x37, 0x8e, 0x6a, 0xf5, + 0xa6, 0xb1, 0x57, 0xad, 0xd5, 0xaa, 0x87, 0xf7, 0xb2, 0x91, 0xdc, 0x17, 0x0a, 0x20, 0x56, 0xf4, + 0x84, 0x21, 0x44, 0xc7, 0x1f, 0xf5, 0x31, 0xa1, 0xe8, 0x3a, 0x2c, 0x0a, 0x43, 0xa5, 0xbf, 0x72, + 0xc5, 0xc6, 0x45, 0xdb, 0x74, 0xba, 0x7d, 0x36, 0xd1, 0xb5, 0xdd, 0x4e, 0xe0, 0x55, 0x3a, 0x20, + 0xee, 0xb8, 0x1d, 0x8c, 0x6a, 0x90, 0x92, 0x8e, 0x9f, 0x59, 0xf8, 0x31, 0xbf, 0x6a, 0x99, 0xed, + 0xff, 0x9e, 0xf1, 0x3a, 0x3d, 0xb0, 0xf0, 0x63, 0x1d, 0xac, 0xe1, 0x6f, 0x76, 0xe6, 0x3d, 0xa6, + 0x8e, 0x1d, 0x66, 0xf9, 0xda, 0x95, 0x60, 0x84, 0x23, 0xeb, 0x13, 0x16, 0x24, 0xe0, 0x4c, 0xea, + 0x9e, 0x62, 0x47, 0xbe, 0xd2, 0x70, 0xf1, 0x26, 0x23, 0xe4, 0xbe, 0xa3, 0xc0, 0xda, 0x98, 0x77, + 0x72, 0xaa, 0x2b, 0xc2, 0x92, 0xd0, 0x10, 0x14, 0xb7, 0x57, 0x66, 0xb4, 0x4e, 0x0f, 0xf6, 0xa1, + 0x97, 0x61, 0xc5, 0x61, 0x83, 0x61, 0x48, 0xbd, 0x88, 0xc5, 0x32, 0x23, 0x37, 0x86, 0x26, 0xfc, + 0x44, 0x81, 0xec, 0x3d, 0x2c, 0x2d, 0x08, 0xc2, 0x3b, 0xed, 0x71, 0xeb, 0xdf, 0x1f, 0xda, 0xdc, + 0x9f, 0x14, 0x58, 0xdb, 0xf1, 0xb1, 0x49, 0xf1, 0xb8, 0x79, 0x4f, 0xca, 0xfe, 0xfb, 0xb0, 0x28, + 0x76, 0xcb, 0xb9, 0x67, 0xe6, 0xa8, 0xc9, 0x6d, 0x93, 0x3e, 0x46, 0x2f, 0xf6, 0x31, 0x76, 0x35, + 0x1f, 0xbf, 0x1f, 0x81, 0xb5, 0x63, 0xaf, 0x33, 0xe1, 0xe3, 0xc8, 0x17, 0xe5, 0x29, 0xf9, 0x32, + 0x2d, 0x5f, 0x77, 0x21, 0xd5, 0xe7, 0xca, 0xf9, 0x1f, 0xcd, 0xe4, 0x03, 0x99, 0x36, 0xf1, 0xea, + 0xb4, 0x67, 0x61, 0xbb, 0x73, 0x60, 0x92, 0x53, 0x1d, 0x84, 0x38, 0xfb, 0xfd, 0x94, 0x03, 0xf1, + 0x2a, 0xac, 0xed, 0x62, 0x1b, 0x9f, 0x8f, 0xc3, 0x94, 0xa3, 0x98, 0xfb, 0x7b, 0x04, 0x6e, 0x96, + 0x4c, 0xda, 0x7e, 0x14, 0x0e, 0xdc, 0x85, 0xb5, 0x61, 0x0b, 0xb2, 0xd2, 0xdc, 0x16, 0xdb, 0x6b, + 0x0c, 0xe7, 0xb7, 0xca, 0x82, 0x9e, 0x11, 0x1c, 0x01, 0xea, 0x5b, 0xe8, 0xeb, 0xb0, 0x36, 0x26, + 0x6b, 0x39, 0xb6, 0xe5, 0x60, 0x19, 0x9f, 0xd7, 0x66, 0x74, 0x91, 0xa3, 0x55, 0x16, 0xf4, 0xd5, + 0x10, 0x78, 0x95, 0x03, 0x4d, 0x26, 0x27, 0x76, 0x71, 0x72, 0xe2, 0x57, 0x49, 0xce, 0xe2, 0x95, + 0x92, 0x53, 0xca, 0x40, 0x3a, 0x1c, 0x8f, 0x9c, 0x01, 0xda, 0xb4, 0x04, 0x3c, 0xb5, 0xf2, 0x95, + 0x3b, 0x93, 0x19, 0x0e, 0x1f, 0x89, 0x0b, 0x33, 0x1c, 0xd2, 0x1b, 0xb9, 0xa4, 0xde, 0x06, 0xa4, + 0x42, 0xc9, 0x7b, 0x0a, 0x9e, 0x6c, 0xbd, 0x0f, 0x50, 0x0d, 0x77, 0x8b, 0x1b, 0xd5, 0xc3, 0x66, + 0xf9, 0xb0, 0x69, 0x3c, 0xa8, 0x96, 0x3f, 0x38, 0xd7, 0x33, 0xd7, 0x21, 0x1b, 0x66, 0xee, 0x1d, + 0xd7, 0x6a, 0x59, 0x65, 0xfb, 0xf3, 0x24, 0x2c, 0xc9, 0x00, 0xa0, 0x5f, 0x2b, 0x90, 0x0a, 0x35, + 0x0c, 0xf4, 0xc6, 0x85, 0xe6, 0x4c, 0x36, 0x4f, 0xed, 0xcd, 0xf9, 0x36, 0x89, 0xa4, 0xe6, 0xb6, + 0xbf, 0xfb, 0x87, 0x3f, 0xff, 0x38, 0xf2, 0x1a, 0xda, 0x1a, 0xfe, 0xb9, 0xff, 0x9b, 0x22, 0xec, + 0xef, 0x7a, 0xbe, 0xfb, 0x0d, 0xdc, 0xa6, 0xa4, 0xb0, 0x55, 0x30, 0xbb, 0xd8, 0xa1, 0x9f, 0x16, + 0x82, 0x26, 0xf4, 0x33, 0x05, 0x92, 0xc3, 0xe6, 0x82, 0x2e, 0xfe, 0xb3, 0xe6, 0xf9, 0x46, 0xa4, + 0xcd, 0x1a, 0xee, 0x69, 0xd6, 0xb1, 0x52, 0x31, 0x61, 0x5b, 0x60, 0x5a, 0x61, 0xeb, 0x53, 0xf4, + 0x99, 0x02, 0xe9, 0x70, 0x7b, 0x41, 0x17, 0x07, 0x66, 0x4a, 0x37, 0x9a, 0xdd, 0xc6, 0x3b, 0xdc, + 0xc6, 0x37, 0x73, 0x73, 0x44, 0xf0, 0x4e, 0x50, 0xcd, 0x7f, 0xa3, 0x40, 0x3a, 0x7c, 0xd9, 0x66, + 0xb0, 0x75, 0x4a, 0x57, 0x99, 0xdd, 0xd6, 0x22, 0xb7, 0xf5, 0xee, 0xf6, 0xed, 0x91, 0xad, 0xf2, + 0xbf, 0x4c, 0x2e, 0x0a, 0xeb, 0xd0, 0xe4, 0x1f, 0x29, 0x90, 0x0e, 0x5f, 0xdf, 0x19, 0x4c, 0x9e, + 0xd2, 0x00, 0xb4, 0xeb, 0x13, 0x05, 0xaf, 0xdc, 0xf3, 0xe8, 0x20, 0xc8, 0xf8, 0xd6, 0x3c, 0x19, + 0xff, 0x5c, 0x01, 0x34, 0x59, 0xb7, 0xd0, 0xc5, 0xaf, 0x0a, 0x4f, 0xec, 0x36, 0xda, 0xb3, 0xc1, + 0xde, 0xd0, 0xbf, 0xb9, 0xe4, 0xeb, 0xc1, 0xbf, 0xb9, 0x04, 0x71, 0xcc, 0xbd, 0x35, 0x47, 0xce, + 0x5b, 0x23, 0x65, 0x77, 0x94, 0xad, 0x91, 0xd1, 0x63, 0xb5, 0x70, 0x56, 0xa3, 0xa7, 0x15, 0xd0, + 0x7f, 0x99, 0xd1, 0x42, 0xd9, 0x1d, 0x65, 0xab, 0xf4, 0x99, 0x02, 0x2f, 0xb6, 0xdd, 0xde, 0x45, + 0x36, 0x96, 0x64, 0xb5, 0x6d, 0xb0, 0xdc, 0x36, 0x94, 0x0f, 0xab, 0x52, 0xbe, 0xeb, 0xb2, 0x4e, + 0x98, 0x77, 0xfd, 0x6e, 0xa1, 0x8b, 0x1d, 0x9e, 0xf9, 0x82, 0x60, 0x99, 0x9e, 0x45, 0x9e, 0xf8, + 0x1f, 0x48, 0x77, 0x47, 0xa4, 0xbf, 0x29, 0xca, 0x2f, 0x22, 0x91, 0xdd, 0xbd, 0x5f, 0x45, 0x9e, + 0xbf, 0x27, 0x30, 0x77, 0xb8, 0x0d, 0xbb, 0x23, 0x1b, 0x1e, 0x88, 0x4d, 0xad, 0x45, 0x8e, 0xff, + 0xc6, 0x3f, 0x02, 0x00, 0x00, 0xff, 0xff, 0xda, 0x37, 0x44, 0x7c, 0x68, 0x25, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/cloud/dialogflow/v2beta1/knowledge_base.pb.go b/vendor/google.golang.org/genproto/googleapis/cloud/dialogflow/v2beta1/knowledge_base.pb.go new file mode 100644 index 0000000..2dd28c6 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/cloud/dialogflow/v2beta1/knowledge_base.pb.go @@ -0,0 +1,663 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/cloud/dialogflow/v2beta1/knowledge_base.proto + +package dialogflow // import "google.golang.org/genproto/googleapis/cloud/dialogflow/v2beta1" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import empty "github.com/golang/protobuf/ptypes/empty" +import _ "google.golang.org/genproto/googleapis/api/annotations" +import field_mask "google.golang.org/genproto/protobuf/field_mask" + +import ( + context "golang.org/x/net/context" + grpc "google.golang.org/grpc" +) + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Represents knowledge base resource. +// +// Note: resource `projects.agent.knowledgeBases` is deprecated, please use +// `projects.knowledgeBases` instead. +type KnowledgeBase struct { + // The knowledge base resource name. + // The name must be empty when creating a knowledge base. + // Format: `projects//knowledgeBases/`. + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // Required. The display name of the knowledge base. The name must be 1024 + // bytes or less; otherwise, the creation request fails. + DisplayName string `protobuf:"bytes,2,opt,name=display_name,json=displayName,proto3" json:"display_name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *KnowledgeBase) Reset() { *m = KnowledgeBase{} } +func (m *KnowledgeBase) String() string { return proto.CompactTextString(m) } +func (*KnowledgeBase) ProtoMessage() {} +func (*KnowledgeBase) Descriptor() ([]byte, []int) { + return fileDescriptor_knowledge_base_54c2aba98392ab6e, []int{0} +} +func (m *KnowledgeBase) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_KnowledgeBase.Unmarshal(m, b) +} +func (m *KnowledgeBase) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_KnowledgeBase.Marshal(b, m, deterministic) +} +func (dst *KnowledgeBase) XXX_Merge(src proto.Message) { + xxx_messageInfo_KnowledgeBase.Merge(dst, src) +} +func (m *KnowledgeBase) XXX_Size() int { + return xxx_messageInfo_KnowledgeBase.Size(m) +} +func (m *KnowledgeBase) XXX_DiscardUnknown() { + xxx_messageInfo_KnowledgeBase.DiscardUnknown(m) +} + +var xxx_messageInfo_KnowledgeBase proto.InternalMessageInfo + +func (m *KnowledgeBase) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *KnowledgeBase) GetDisplayName() string { + if m != nil { + return m.DisplayName + } + return "" +} + +// Request message for [KnowledgeBases.ListKnowledgeBases][google.cloud.dialogflow.v2beta1.KnowledgeBases.ListKnowledgeBases]. +type ListKnowledgeBasesRequest struct { + // Required. The project to list of knowledge bases for. + // Format: `projects/`. + Parent string `protobuf:"bytes,1,opt,name=parent,proto3" json:"parent,omitempty"` + // Optional. The maximum number of items to return in a single page. By + // default 10 and at most 100. + PageSize int32 `protobuf:"varint,2,opt,name=page_size,json=pageSize,proto3" json:"page_size,omitempty"` + // Optional. The next_page_token value returned from a previous list request. + PageToken string `protobuf:"bytes,3,opt,name=page_token,json=pageToken,proto3" json:"page_token,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ListKnowledgeBasesRequest) Reset() { *m = ListKnowledgeBasesRequest{} } +func (m *ListKnowledgeBasesRequest) String() string { return proto.CompactTextString(m) } +func (*ListKnowledgeBasesRequest) ProtoMessage() {} +func (*ListKnowledgeBasesRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_knowledge_base_54c2aba98392ab6e, []int{1} +} +func (m *ListKnowledgeBasesRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ListKnowledgeBasesRequest.Unmarshal(m, b) +} +func (m *ListKnowledgeBasesRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ListKnowledgeBasesRequest.Marshal(b, m, deterministic) +} +func (dst *ListKnowledgeBasesRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_ListKnowledgeBasesRequest.Merge(dst, src) +} +func (m *ListKnowledgeBasesRequest) XXX_Size() int { + return xxx_messageInfo_ListKnowledgeBasesRequest.Size(m) +} +func (m *ListKnowledgeBasesRequest) XXX_DiscardUnknown() { + xxx_messageInfo_ListKnowledgeBasesRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_ListKnowledgeBasesRequest proto.InternalMessageInfo + +func (m *ListKnowledgeBasesRequest) GetParent() string { + if m != nil { + return m.Parent + } + return "" +} + +func (m *ListKnowledgeBasesRequest) GetPageSize() int32 { + if m != nil { + return m.PageSize + } + return 0 +} + +func (m *ListKnowledgeBasesRequest) GetPageToken() string { + if m != nil { + return m.PageToken + } + return "" +} + +// Response message for [KnowledgeBases.ListKnowledgeBases][google.cloud.dialogflow.v2beta1.KnowledgeBases.ListKnowledgeBases]. +type ListKnowledgeBasesResponse struct { + // The list of knowledge bases. + KnowledgeBases []*KnowledgeBase `protobuf:"bytes,1,rep,name=knowledge_bases,json=knowledgeBases,proto3" json:"knowledge_bases,omitempty"` + // Token to retrieve the next page of results, or empty if there are no + // more results in the list. + NextPageToken string `protobuf:"bytes,2,opt,name=next_page_token,json=nextPageToken,proto3" json:"next_page_token,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ListKnowledgeBasesResponse) Reset() { *m = ListKnowledgeBasesResponse{} } +func (m *ListKnowledgeBasesResponse) String() string { return proto.CompactTextString(m) } +func (*ListKnowledgeBasesResponse) ProtoMessage() {} +func (*ListKnowledgeBasesResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_knowledge_base_54c2aba98392ab6e, []int{2} +} +func (m *ListKnowledgeBasesResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ListKnowledgeBasesResponse.Unmarshal(m, b) +} +func (m *ListKnowledgeBasesResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ListKnowledgeBasesResponse.Marshal(b, m, deterministic) +} +func (dst *ListKnowledgeBasesResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_ListKnowledgeBasesResponse.Merge(dst, src) +} +func (m *ListKnowledgeBasesResponse) XXX_Size() int { + return xxx_messageInfo_ListKnowledgeBasesResponse.Size(m) +} +func (m *ListKnowledgeBasesResponse) XXX_DiscardUnknown() { + xxx_messageInfo_ListKnowledgeBasesResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_ListKnowledgeBasesResponse proto.InternalMessageInfo + +func (m *ListKnowledgeBasesResponse) GetKnowledgeBases() []*KnowledgeBase { + if m != nil { + return m.KnowledgeBases + } + return nil +} + +func (m *ListKnowledgeBasesResponse) GetNextPageToken() string { + if m != nil { + return m.NextPageToken + } + return "" +} + +// Request message for [KnowledgeBase.GetDocument][]. +type GetKnowledgeBaseRequest struct { + // Required. The name of the knowledge base to retrieve. + // Format `projects//knowledgeBases/`. + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GetKnowledgeBaseRequest) Reset() { *m = GetKnowledgeBaseRequest{} } +func (m *GetKnowledgeBaseRequest) String() string { return proto.CompactTextString(m) } +func (*GetKnowledgeBaseRequest) ProtoMessage() {} +func (*GetKnowledgeBaseRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_knowledge_base_54c2aba98392ab6e, []int{3} +} +func (m *GetKnowledgeBaseRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GetKnowledgeBaseRequest.Unmarshal(m, b) +} +func (m *GetKnowledgeBaseRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GetKnowledgeBaseRequest.Marshal(b, m, deterministic) +} +func (dst *GetKnowledgeBaseRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_GetKnowledgeBaseRequest.Merge(dst, src) +} +func (m *GetKnowledgeBaseRequest) XXX_Size() int { + return xxx_messageInfo_GetKnowledgeBaseRequest.Size(m) +} +func (m *GetKnowledgeBaseRequest) XXX_DiscardUnknown() { + xxx_messageInfo_GetKnowledgeBaseRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_GetKnowledgeBaseRequest proto.InternalMessageInfo + +func (m *GetKnowledgeBaseRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +// Request message for [KnowledgeBases.CreateKnowledgeBase][google.cloud.dialogflow.v2beta1.KnowledgeBases.CreateKnowledgeBase]. +type CreateKnowledgeBaseRequest struct { + // Required. The project to create a knowledge base for. + // Format: `projects/`. + Parent string `protobuf:"bytes,1,opt,name=parent,proto3" json:"parent,omitempty"` + // Required. The knowledge base to create. + KnowledgeBase *KnowledgeBase `protobuf:"bytes,2,opt,name=knowledge_base,json=knowledgeBase,proto3" json:"knowledge_base,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *CreateKnowledgeBaseRequest) Reset() { *m = CreateKnowledgeBaseRequest{} } +func (m *CreateKnowledgeBaseRequest) String() string { return proto.CompactTextString(m) } +func (*CreateKnowledgeBaseRequest) ProtoMessage() {} +func (*CreateKnowledgeBaseRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_knowledge_base_54c2aba98392ab6e, []int{4} +} +func (m *CreateKnowledgeBaseRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_CreateKnowledgeBaseRequest.Unmarshal(m, b) +} +func (m *CreateKnowledgeBaseRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_CreateKnowledgeBaseRequest.Marshal(b, m, deterministic) +} +func (dst *CreateKnowledgeBaseRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_CreateKnowledgeBaseRequest.Merge(dst, src) +} +func (m *CreateKnowledgeBaseRequest) XXX_Size() int { + return xxx_messageInfo_CreateKnowledgeBaseRequest.Size(m) +} +func (m *CreateKnowledgeBaseRequest) XXX_DiscardUnknown() { + xxx_messageInfo_CreateKnowledgeBaseRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_CreateKnowledgeBaseRequest proto.InternalMessageInfo + +func (m *CreateKnowledgeBaseRequest) GetParent() string { + if m != nil { + return m.Parent + } + return "" +} + +func (m *CreateKnowledgeBaseRequest) GetKnowledgeBase() *KnowledgeBase { + if m != nil { + return m.KnowledgeBase + } + return nil +} + +// Request message for [KnowledgeBases.DeleteKnowledgeBase][google.cloud.dialogflow.v2beta1.KnowledgeBases.DeleteKnowledgeBase]. +type DeleteKnowledgeBaseRequest struct { + // Required. The name of the knowledge base to delete. + // Format: `projects//knowledgeBases/`. + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // Optional. Force deletes the knowledge base. When set to true, any documents + // in the knowledge base are also deleted. + Force bool `protobuf:"varint,2,opt,name=force,proto3" json:"force,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *DeleteKnowledgeBaseRequest) Reset() { *m = DeleteKnowledgeBaseRequest{} } +func (m *DeleteKnowledgeBaseRequest) String() string { return proto.CompactTextString(m) } +func (*DeleteKnowledgeBaseRequest) ProtoMessage() {} +func (*DeleteKnowledgeBaseRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_knowledge_base_54c2aba98392ab6e, []int{5} +} +func (m *DeleteKnowledgeBaseRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_DeleteKnowledgeBaseRequest.Unmarshal(m, b) +} +func (m *DeleteKnowledgeBaseRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_DeleteKnowledgeBaseRequest.Marshal(b, m, deterministic) +} +func (dst *DeleteKnowledgeBaseRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_DeleteKnowledgeBaseRequest.Merge(dst, src) +} +func (m *DeleteKnowledgeBaseRequest) XXX_Size() int { + return xxx_messageInfo_DeleteKnowledgeBaseRequest.Size(m) +} +func (m *DeleteKnowledgeBaseRequest) XXX_DiscardUnknown() { + xxx_messageInfo_DeleteKnowledgeBaseRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_DeleteKnowledgeBaseRequest proto.InternalMessageInfo + +func (m *DeleteKnowledgeBaseRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *DeleteKnowledgeBaseRequest) GetForce() bool { + if m != nil { + return m.Force + } + return false +} + +// Request message for [KnowledgeBases.UpdateKnowledgeBase][google.cloud.dialogflow.v2beta1.KnowledgeBases.UpdateKnowledgeBase]. +type UpdateKnowledgeBaseRequest struct { + // Required. The knowledge base to update. + KnowledgeBase *KnowledgeBase `protobuf:"bytes,1,opt,name=knowledge_base,json=knowledgeBase,proto3" json:"knowledge_base,omitempty"` + // Optional. Not specified means `update all`. + // Currently, only `display_name` can be updated, an InvalidArgument will be + // returned for attempting to update other fields. + UpdateMask *field_mask.FieldMask `protobuf:"bytes,2,opt,name=update_mask,json=updateMask,proto3" json:"update_mask,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *UpdateKnowledgeBaseRequest) Reset() { *m = UpdateKnowledgeBaseRequest{} } +func (m *UpdateKnowledgeBaseRequest) String() string { return proto.CompactTextString(m) } +func (*UpdateKnowledgeBaseRequest) ProtoMessage() {} +func (*UpdateKnowledgeBaseRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_knowledge_base_54c2aba98392ab6e, []int{6} +} +func (m *UpdateKnowledgeBaseRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_UpdateKnowledgeBaseRequest.Unmarshal(m, b) +} +func (m *UpdateKnowledgeBaseRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_UpdateKnowledgeBaseRequest.Marshal(b, m, deterministic) +} +func (dst *UpdateKnowledgeBaseRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_UpdateKnowledgeBaseRequest.Merge(dst, src) +} +func (m *UpdateKnowledgeBaseRequest) XXX_Size() int { + return xxx_messageInfo_UpdateKnowledgeBaseRequest.Size(m) +} +func (m *UpdateKnowledgeBaseRequest) XXX_DiscardUnknown() { + xxx_messageInfo_UpdateKnowledgeBaseRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_UpdateKnowledgeBaseRequest proto.InternalMessageInfo + +func (m *UpdateKnowledgeBaseRequest) GetKnowledgeBase() *KnowledgeBase { + if m != nil { + return m.KnowledgeBase + } + return nil +} + +func (m *UpdateKnowledgeBaseRequest) GetUpdateMask() *field_mask.FieldMask { + if m != nil { + return m.UpdateMask + } + return nil +} + +func init() { + proto.RegisterType((*KnowledgeBase)(nil), "google.cloud.dialogflow.v2beta1.KnowledgeBase") + proto.RegisterType((*ListKnowledgeBasesRequest)(nil), "google.cloud.dialogflow.v2beta1.ListKnowledgeBasesRequest") + proto.RegisterType((*ListKnowledgeBasesResponse)(nil), "google.cloud.dialogflow.v2beta1.ListKnowledgeBasesResponse") + proto.RegisterType((*GetKnowledgeBaseRequest)(nil), "google.cloud.dialogflow.v2beta1.GetKnowledgeBaseRequest") + proto.RegisterType((*CreateKnowledgeBaseRequest)(nil), "google.cloud.dialogflow.v2beta1.CreateKnowledgeBaseRequest") + proto.RegisterType((*DeleteKnowledgeBaseRequest)(nil), "google.cloud.dialogflow.v2beta1.DeleteKnowledgeBaseRequest") + proto.RegisterType((*UpdateKnowledgeBaseRequest)(nil), "google.cloud.dialogflow.v2beta1.UpdateKnowledgeBaseRequest") +} + +// Reference imports to suppress errors if they are not otherwise used. +var _ context.Context +var _ grpc.ClientConn + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the grpc package it is being compiled against. +const _ = grpc.SupportPackageIsVersion4 + +// KnowledgeBasesClient is the client API for KnowledgeBases service. +// +// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://godoc.org/google.golang.org/grpc#ClientConn.NewStream. +type KnowledgeBasesClient interface { + // Returns the list of all knowledge bases of the specified agent. + ListKnowledgeBases(ctx context.Context, in *ListKnowledgeBasesRequest, opts ...grpc.CallOption) (*ListKnowledgeBasesResponse, error) + // Retrieves the specified knowledge base. + GetKnowledgeBase(ctx context.Context, in *GetKnowledgeBaseRequest, opts ...grpc.CallOption) (*KnowledgeBase, error) + // Creates a knowledge base. + CreateKnowledgeBase(ctx context.Context, in *CreateKnowledgeBaseRequest, opts ...grpc.CallOption) (*KnowledgeBase, error) + // Deletes the specified knowledge base. + DeleteKnowledgeBase(ctx context.Context, in *DeleteKnowledgeBaseRequest, opts ...grpc.CallOption) (*empty.Empty, error) + // Updates the specified knowledge base. + UpdateKnowledgeBase(ctx context.Context, in *UpdateKnowledgeBaseRequest, opts ...grpc.CallOption) (*KnowledgeBase, error) +} + +type knowledgeBasesClient struct { + cc *grpc.ClientConn +} + +func NewKnowledgeBasesClient(cc *grpc.ClientConn) KnowledgeBasesClient { + return &knowledgeBasesClient{cc} +} + +func (c *knowledgeBasesClient) ListKnowledgeBases(ctx context.Context, in *ListKnowledgeBasesRequest, opts ...grpc.CallOption) (*ListKnowledgeBasesResponse, error) { + out := new(ListKnowledgeBasesResponse) + err := c.cc.Invoke(ctx, "/google.cloud.dialogflow.v2beta1.KnowledgeBases/ListKnowledgeBases", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *knowledgeBasesClient) GetKnowledgeBase(ctx context.Context, in *GetKnowledgeBaseRequest, opts ...grpc.CallOption) (*KnowledgeBase, error) { + out := new(KnowledgeBase) + err := c.cc.Invoke(ctx, "/google.cloud.dialogflow.v2beta1.KnowledgeBases/GetKnowledgeBase", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *knowledgeBasesClient) CreateKnowledgeBase(ctx context.Context, in *CreateKnowledgeBaseRequest, opts ...grpc.CallOption) (*KnowledgeBase, error) { + out := new(KnowledgeBase) + err := c.cc.Invoke(ctx, "/google.cloud.dialogflow.v2beta1.KnowledgeBases/CreateKnowledgeBase", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *knowledgeBasesClient) DeleteKnowledgeBase(ctx context.Context, in *DeleteKnowledgeBaseRequest, opts ...grpc.CallOption) (*empty.Empty, error) { + out := new(empty.Empty) + err := c.cc.Invoke(ctx, "/google.cloud.dialogflow.v2beta1.KnowledgeBases/DeleteKnowledgeBase", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *knowledgeBasesClient) UpdateKnowledgeBase(ctx context.Context, in *UpdateKnowledgeBaseRequest, opts ...grpc.CallOption) (*KnowledgeBase, error) { + out := new(KnowledgeBase) + err := c.cc.Invoke(ctx, "/google.cloud.dialogflow.v2beta1.KnowledgeBases/UpdateKnowledgeBase", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +// KnowledgeBasesServer is the server API for KnowledgeBases service. +type KnowledgeBasesServer interface { + // Returns the list of all knowledge bases of the specified agent. + ListKnowledgeBases(context.Context, *ListKnowledgeBasesRequest) (*ListKnowledgeBasesResponse, error) + // Retrieves the specified knowledge base. + GetKnowledgeBase(context.Context, *GetKnowledgeBaseRequest) (*KnowledgeBase, error) + // Creates a knowledge base. + CreateKnowledgeBase(context.Context, *CreateKnowledgeBaseRequest) (*KnowledgeBase, error) + // Deletes the specified knowledge base. + DeleteKnowledgeBase(context.Context, *DeleteKnowledgeBaseRequest) (*empty.Empty, error) + // Updates the specified knowledge base. + UpdateKnowledgeBase(context.Context, *UpdateKnowledgeBaseRequest) (*KnowledgeBase, error) +} + +func RegisterKnowledgeBasesServer(s *grpc.Server, srv KnowledgeBasesServer) { + s.RegisterService(&_KnowledgeBases_serviceDesc, srv) +} + +func _KnowledgeBases_ListKnowledgeBases_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(ListKnowledgeBasesRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(KnowledgeBasesServer).ListKnowledgeBases(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.dialogflow.v2beta1.KnowledgeBases/ListKnowledgeBases", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(KnowledgeBasesServer).ListKnowledgeBases(ctx, req.(*ListKnowledgeBasesRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _KnowledgeBases_GetKnowledgeBase_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(GetKnowledgeBaseRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(KnowledgeBasesServer).GetKnowledgeBase(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.dialogflow.v2beta1.KnowledgeBases/GetKnowledgeBase", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(KnowledgeBasesServer).GetKnowledgeBase(ctx, req.(*GetKnowledgeBaseRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _KnowledgeBases_CreateKnowledgeBase_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(CreateKnowledgeBaseRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(KnowledgeBasesServer).CreateKnowledgeBase(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.dialogflow.v2beta1.KnowledgeBases/CreateKnowledgeBase", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(KnowledgeBasesServer).CreateKnowledgeBase(ctx, req.(*CreateKnowledgeBaseRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _KnowledgeBases_DeleteKnowledgeBase_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(DeleteKnowledgeBaseRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(KnowledgeBasesServer).DeleteKnowledgeBase(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.dialogflow.v2beta1.KnowledgeBases/DeleteKnowledgeBase", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(KnowledgeBasesServer).DeleteKnowledgeBase(ctx, req.(*DeleteKnowledgeBaseRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _KnowledgeBases_UpdateKnowledgeBase_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(UpdateKnowledgeBaseRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(KnowledgeBasesServer).UpdateKnowledgeBase(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.dialogflow.v2beta1.KnowledgeBases/UpdateKnowledgeBase", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(KnowledgeBasesServer).UpdateKnowledgeBase(ctx, req.(*UpdateKnowledgeBaseRequest)) + } + return interceptor(ctx, in, info, handler) +} + +var _KnowledgeBases_serviceDesc = grpc.ServiceDesc{ + ServiceName: "google.cloud.dialogflow.v2beta1.KnowledgeBases", + HandlerType: (*KnowledgeBasesServer)(nil), + Methods: []grpc.MethodDesc{ + { + MethodName: "ListKnowledgeBases", + Handler: _KnowledgeBases_ListKnowledgeBases_Handler, + }, + { + MethodName: "GetKnowledgeBase", + Handler: _KnowledgeBases_GetKnowledgeBase_Handler, + }, + { + MethodName: "CreateKnowledgeBase", + Handler: _KnowledgeBases_CreateKnowledgeBase_Handler, + }, + { + MethodName: "DeleteKnowledgeBase", + Handler: _KnowledgeBases_DeleteKnowledgeBase_Handler, + }, + { + MethodName: "UpdateKnowledgeBase", + Handler: _KnowledgeBases_UpdateKnowledgeBase_Handler, + }, + }, + Streams: []grpc.StreamDesc{}, + Metadata: "google/cloud/dialogflow/v2beta1/knowledge_base.proto", +} + +func init() { + proto.RegisterFile("google/cloud/dialogflow/v2beta1/knowledge_base.proto", fileDescriptor_knowledge_base_54c2aba98392ab6e) +} + +var fileDescriptor_knowledge_base_54c2aba98392ab6e = []byte{ + // 736 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xac, 0x56, 0xcf, 0x4f, 0x13, 0x41, + 0x14, 0xce, 0x14, 0x21, 0xf0, 0x10, 0x30, 0x83, 0x41, 0x58, 0x34, 0xe0, 0x9a, 0x18, 0x52, 0xe3, + 0x6e, 0x28, 0x1e, 0x0c, 0xc4, 0xc4, 0x00, 0x96, 0x18, 0x7f, 0x84, 0x54, 0xd1, 0x84, 0x4b, 0x33, + 0x6d, 0x5f, 0xd7, 0xb5, 0xdb, 0x9d, 0x75, 0x67, 0x2b, 0x82, 0xe1, 0xc2, 0x4d, 0x4d, 0xbc, 0x78, + 0x36, 0x31, 0x5e, 0x4c, 0x8c, 0x07, 0xfe, 0x17, 0xfd, 0x13, 0x8c, 0xff, 0x82, 0x1e, 0xcd, 0xce, + 0x6c, 0x29, 0xbb, 0xdd, 0xb5, 0x94, 0x78, 0xeb, 0xcc, 0x9b, 0xf7, 0xcd, 0xf7, 0xbd, 0xef, 0xbd, + 0xe9, 0xc2, 0x0d, 0x8b, 0x73, 0xcb, 0x41, 0xb3, 0xea, 0xf0, 0x56, 0xcd, 0xac, 0xd9, 0xcc, 0xe1, + 0x56, 0xdd, 0xe1, 0x3b, 0xe6, 0xcb, 0x42, 0x05, 0x03, 0xb6, 0x68, 0x36, 0x5c, 0xbe, 0xe3, 0x60, + 0xcd, 0xc2, 0x72, 0x85, 0x09, 0x34, 0x3c, 0x9f, 0x07, 0x9c, 0xce, 0xa9, 0x2c, 0x43, 0x66, 0x19, + 0x9d, 0x2c, 0x23, 0xca, 0xd2, 0x2e, 0x46, 0xb0, 0xcc, 0xb3, 0x4d, 0xe6, 0xba, 0x3c, 0x60, 0x81, + 0xcd, 0x5d, 0xa1, 0xd2, 0xb5, 0x99, 0x63, 0x51, 0x1f, 0x05, 0x6f, 0xf9, 0xd5, 0x08, 0x59, 0x9b, + 0x8d, 0x42, 0x72, 0x55, 0x69, 0xd5, 0x4d, 0x6c, 0x7a, 0xc1, 0x6e, 0x14, 0x9c, 0x4f, 0x06, 0xeb, + 0x36, 0x3a, 0xb5, 0x72, 0x93, 0x89, 0x86, 0x3a, 0xa1, 0x17, 0x61, 0xec, 0x5e, 0x9b, 0xf0, 0x2a, + 0x13, 0x48, 0x29, 0x9c, 0x71, 0x59, 0x13, 0xa7, 0xc9, 0x3c, 0x59, 0x18, 0x29, 0xc9, 0xdf, 0xf4, + 0x32, 0x9c, 0xad, 0xd9, 0xc2, 0x73, 0xd8, 0x6e, 0x59, 0xc6, 0x72, 0x32, 0x36, 0x1a, 0xed, 0x3d, + 0x64, 0x4d, 0xd4, 0x39, 0xcc, 0xdc, 0xb7, 0x45, 0x10, 0xc3, 0x12, 0x25, 0x7c, 0xd1, 0x42, 0x11, + 0xd0, 0x29, 0x18, 0xf2, 0x98, 0x8f, 0x6e, 0x10, 0xa1, 0x46, 0x2b, 0x3a, 0x0b, 0x23, 0x1e, 0xb3, + 0xb0, 0x2c, 0xec, 0x3d, 0x05, 0x3a, 0x58, 0x1a, 0x0e, 0x37, 0x1e, 0xd9, 0x7b, 0x48, 0x2f, 0x01, + 0xc8, 0x60, 0xc0, 0x1b, 0xe8, 0x4e, 0x0f, 0xc8, 0x44, 0x79, 0xfc, 0x71, 0xb8, 0xa1, 0x7f, 0x24, + 0xa0, 0xa5, 0xdd, 0x28, 0x3c, 0xee, 0x0a, 0xa4, 0x4f, 0x61, 0x22, 0x6e, 0x84, 0x98, 0x26, 0xf3, + 0x03, 0x0b, 0xa3, 0x05, 0xc3, 0xe8, 0x61, 0x85, 0x11, 0x43, 0x2c, 0x8d, 0x37, 0x62, 0x17, 0xd0, + 0xab, 0x30, 0xe1, 0xe2, 0xab, 0xa0, 0x7c, 0x8c, 0x9b, 0x2a, 0xc7, 0x58, 0xb8, 0xbd, 0x79, 0xc4, + 0xef, 0x3a, 0x5c, 0xd8, 0xc0, 0x38, 0xbb, 0x76, 0x39, 0x52, 0x4a, 0xac, 0xbf, 0x23, 0xa0, 0xad, + 0xf9, 0xc8, 0x02, 0x4c, 0x4d, 0xc9, 0xaa, 0xe0, 0x16, 0x8c, 0xc7, 0x65, 0x4a, 0x32, 0xfd, 0xab, + 0x1c, 0x8b, 0xa9, 0xd4, 0x8b, 0xa0, 0xad, 0xa3, 0x83, 0x19, 0x64, 0xd2, 0x5a, 0xe4, 0x3c, 0x0c, + 0xd6, 0xb9, 0x5f, 0x55, 0xf7, 0x0f, 0x97, 0xd4, 0x42, 0x3f, 0x24, 0xa0, 0x6d, 0x79, 0xb5, 0x2c, + 0x55, 0xdd, 0xec, 0xc9, 0x7f, 0x60, 0x4f, 0x57, 0x60, 0xb4, 0x25, 0x2f, 0x95, 0x8d, 0x1e, 0x55, + 0x44, 0x6b, 0x63, 0xb6, 0x67, 0xc1, 0x28, 0x86, 0xb3, 0xf0, 0x80, 0x89, 0x46, 0x09, 0xd4, 0xf1, + 0xf0, 0x77, 0xe1, 0x2d, 0xc0, 0x78, 0xbc, 0xa7, 0xe8, 0x6f, 0x02, 0xb4, 0xbb, 0xd5, 0xe8, 0x72, + 0x4f, 0x96, 0x99, 0x13, 0xa1, 0xad, 0x9c, 0x2a, 0x57, 0xf5, 0xb6, 0xfe, 0xec, 0xe0, 0xfb, 0xcf, + 0x0f, 0xb9, 0x0a, 0xbd, 0x76, 0xf4, 0xe6, 0xbc, 0x56, 0xed, 0x70, 0xcb, 0xf3, 0xf9, 0x73, 0xac, + 0x06, 0xc2, 0xcc, 0xef, 0x9b, 0xf1, 0xbe, 0xdd, 0x5e, 0xa2, 0x8b, 0xff, 0x38, 0x6e, 0x32, 0x0b, + 0xdd, 0x20, 0x99, 0x44, 0x7f, 0x11, 0x38, 0x97, 0xec, 0x62, 0x7a, 0xb3, 0x27, 0xf7, 0x8c, 0xc6, + 0xd7, 0xfa, 0xf4, 0x35, 0x4d, 0x68, 0xd8, 0x6c, 0xc7, 0x79, 0xc7, 0x09, 0x9b, 0xf9, 0xfd, 0xb8, + 0xd0, 0xe4, 0x71, 0x29, 0xb3, 0x3b, 0x89, 0xbe, 0xcf, 0xc1, 0x64, 0xca, 0xf8, 0xd1, 0xde, 0x3e, + 0x65, 0x0f, 0x6d, 0xdf, 0x72, 0xdf, 0x10, 0xa9, 0xf7, 0x80, 0xe8, 0xfd, 0x38, 0xbb, 0x9c, 0x98, + 0xa1, 0xed, 0x35, 0xbd, 0x7f, 0xa7, 0x93, 0x20, 0xf4, 0x07, 0x81, 0xc9, 0x94, 0x27, 0xe0, 0x04, + 0x05, 0xc9, 0x7e, 0x38, 0xb4, 0xa9, 0xae, 0x19, 0xbc, 0x13, 0xfe, 0x59, 0xb5, 0x7d, 0xce, 0xf7, + 0xe7, 0x73, 0xfe, 0x14, 0x3e, 0x7f, 0xcb, 0xc1, 0x64, 0xca, 0x83, 0x74, 0x02, 0x59, 0xd9, 0xcf, + 0x58, 0xdf, 0x3e, 0x7f, 0x51, 0x3e, 0x7f, 0x22, 0x85, 0xe5, 0x8e, 0x82, 0xc4, 0x67, 0x43, 0x4f, + 0xfd, 0x5d, 0xb6, 0x97, 0x0a, 0xb7, 0x4f, 0x8c, 0x96, 0x51, 0x9e, 0x24, 0xe6, 0xea, 0x21, 0x81, + 0x2b, 0x55, 0xde, 0xec, 0xa5, 0x6f, 0x95, 0xc6, 0x04, 0x6e, 0x86, 0xee, 0x6e, 0x92, 0xed, 0xbb, + 0x51, 0x9a, 0xc5, 0x1d, 0xe6, 0x5a, 0x06, 0xf7, 0x2d, 0xd3, 0x42, 0x57, 0x7a, 0x6f, 0xaa, 0x10, + 0xf3, 0x6c, 0x91, 0xf9, 0x25, 0xb5, 0xd2, 0xd9, 0xfa, 0x43, 0xc8, 0xe7, 0x5c, 0x6e, 0xbd, 0xf8, + 0x35, 0x37, 0xb7, 0xa1, 0x30, 0xd7, 0x24, 0x95, 0xf5, 0x0e, 0x95, 0x27, 0x2a, 0xa9, 0x32, 0x24, + 0xf1, 0x97, 0xfe, 0x06, 0x00, 0x00, 0xff, 0xff, 0xd1, 0xd8, 0x8c, 0xdc, 0xa8, 0x09, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/cloud/dialogflow/v2beta1/session.pb.go b/vendor/google.golang.org/genproto/googleapis/cloud/dialogflow/v2beta1/session.pb.go new file mode 100644 index 0000000..ec74ff1 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/cloud/dialogflow/v2beta1/session.pb.go @@ -0,0 +1,1845 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/cloud/dialogflow/v2beta1/session.proto + +package dialogflow // import "google.golang.org/genproto/googleapis/cloud/dialogflow/v2beta1" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _struct "github.com/golang/protobuf/ptypes/struct" +import _ "google.golang.org/genproto/googleapis/api/annotations" +import status "google.golang.org/genproto/googleapis/rpc/status" +import latlng "google.golang.org/genproto/googleapis/type/latlng" + +import ( + context "golang.org/x/net/context" + grpc "google.golang.org/grpc" +) + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Represents the system's confidence that this knowledge answer is a good +// match for this conversational query. +type KnowledgeAnswers_Answer_MatchConfidenceLevel int32 + +const ( + // Not specified. + KnowledgeAnswers_Answer_MATCH_CONFIDENCE_LEVEL_UNSPECIFIED KnowledgeAnswers_Answer_MatchConfidenceLevel = 0 + // Indicates that the confidence is low. + KnowledgeAnswers_Answer_LOW KnowledgeAnswers_Answer_MatchConfidenceLevel = 1 + // Indicates our confidence is medium. + KnowledgeAnswers_Answer_MEDIUM KnowledgeAnswers_Answer_MatchConfidenceLevel = 2 + // Indicates our confidence is high. + KnowledgeAnswers_Answer_HIGH KnowledgeAnswers_Answer_MatchConfidenceLevel = 3 +) + +var KnowledgeAnswers_Answer_MatchConfidenceLevel_name = map[int32]string{ + 0: "MATCH_CONFIDENCE_LEVEL_UNSPECIFIED", + 1: "LOW", + 2: "MEDIUM", + 3: "HIGH", +} +var KnowledgeAnswers_Answer_MatchConfidenceLevel_value = map[string]int32{ + "MATCH_CONFIDENCE_LEVEL_UNSPECIFIED": 0, + "LOW": 1, + "MEDIUM": 2, + "HIGH": 3, +} + +func (x KnowledgeAnswers_Answer_MatchConfidenceLevel) String() string { + return proto.EnumName(KnowledgeAnswers_Answer_MatchConfidenceLevel_name, int32(x)) +} +func (KnowledgeAnswers_Answer_MatchConfidenceLevel) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_session_b99a06fb068de54e, []int{5, 0, 0} +} + +// Type of the response message. +type StreamingRecognitionResult_MessageType int32 + +const ( + // Not specified. Should never be used. + StreamingRecognitionResult_MESSAGE_TYPE_UNSPECIFIED StreamingRecognitionResult_MessageType = 0 + // Message contains a (possibly partial) transcript. + StreamingRecognitionResult_TRANSCRIPT StreamingRecognitionResult_MessageType = 1 + // Event indicates that the server has detected the end of the user's speech + // utterance and expects no additional speech. Therefore, the server will + // not process additional audio (although it may subsequently return + // additional results). The client should stop sending additional audio + // data, half-close the gRPC connection, and wait for any additional results + // until the server closes the gRPC connection. This message is only sent if + // `single_utterance` was set to `true`, and is not used otherwise. + StreamingRecognitionResult_END_OF_SINGLE_UTTERANCE StreamingRecognitionResult_MessageType = 2 +) + +var StreamingRecognitionResult_MessageType_name = map[int32]string{ + 0: "MESSAGE_TYPE_UNSPECIFIED", + 1: "TRANSCRIPT", + 2: "END_OF_SINGLE_UTTERANCE", +} +var StreamingRecognitionResult_MessageType_value = map[string]int32{ + "MESSAGE_TYPE_UNSPECIFIED": 0, + "TRANSCRIPT": 1, + "END_OF_SINGLE_UTTERANCE": 2, +} + +func (x StreamingRecognitionResult_MessageType) String() string { + return proto.EnumName(StreamingRecognitionResult_MessageType_name, int32(x)) +} +func (StreamingRecognitionResult_MessageType) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_session_b99a06fb068de54e, []int{8, 0} +} + +// The request to detect user's intent. +type DetectIntentRequest struct { + // Required. The name of the session this query is sent to. Format: + // `projects//agent/sessions/`, or + // `projects//agent/environments//users//sessions/`. If `Environment ID` is not specified, we assume + // default 'draft' environment. If `User ID` is not specified, we are using + // "-". It’s up to the API caller to choose an appropriate `Session ID` and + // `User Id`. They can be a random numbers or some type of user and session + // identifiers (preferably hashed). The length of the `Session ID` and + // `User ID` must not exceed 36 characters. + Session string `protobuf:"bytes,1,opt,name=session,proto3" json:"session,omitempty"` + // Optional. The parameters of this query. + QueryParams *QueryParameters `protobuf:"bytes,2,opt,name=query_params,json=queryParams,proto3" json:"query_params,omitempty"` + // Required. The input specification. It can be set to: + // + // 1. an audio config + // which instructs the speech recognizer how to process the speech audio, + // + // 2. a conversational query in the form of text, or + // + // 3. an event that specifies which intent to trigger. + QueryInput *QueryInput `protobuf:"bytes,3,opt,name=query_input,json=queryInput,proto3" json:"query_input,omitempty"` + // Optional. Instructs the speech synthesizer how to generate the output + // audio. If this field is not set and agent-level speech synthesizer is not + // configured, no output audio is generated. + OutputAudioConfig *OutputAudioConfig `protobuf:"bytes,4,opt,name=output_audio_config,json=outputAudioConfig,proto3" json:"output_audio_config,omitempty"` + // Optional. The natural language speech audio to be processed. This field + // should be populated iff `query_input` is set to an input audio config. + // A single request can contain up to 1 minute of speech audio data. + InputAudio []byte `protobuf:"bytes,5,opt,name=input_audio,json=inputAudio,proto3" json:"input_audio,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *DetectIntentRequest) Reset() { *m = DetectIntentRequest{} } +func (m *DetectIntentRequest) String() string { return proto.CompactTextString(m) } +func (*DetectIntentRequest) ProtoMessage() {} +func (*DetectIntentRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_session_b99a06fb068de54e, []int{0} +} +func (m *DetectIntentRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_DetectIntentRequest.Unmarshal(m, b) +} +func (m *DetectIntentRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_DetectIntentRequest.Marshal(b, m, deterministic) +} +func (dst *DetectIntentRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_DetectIntentRequest.Merge(dst, src) +} +func (m *DetectIntentRequest) XXX_Size() int { + return xxx_messageInfo_DetectIntentRequest.Size(m) +} +func (m *DetectIntentRequest) XXX_DiscardUnknown() { + xxx_messageInfo_DetectIntentRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_DetectIntentRequest proto.InternalMessageInfo + +func (m *DetectIntentRequest) GetSession() string { + if m != nil { + return m.Session + } + return "" +} + +func (m *DetectIntentRequest) GetQueryParams() *QueryParameters { + if m != nil { + return m.QueryParams + } + return nil +} + +func (m *DetectIntentRequest) GetQueryInput() *QueryInput { + if m != nil { + return m.QueryInput + } + return nil +} + +func (m *DetectIntentRequest) GetOutputAudioConfig() *OutputAudioConfig { + if m != nil { + return m.OutputAudioConfig + } + return nil +} + +func (m *DetectIntentRequest) GetInputAudio() []byte { + if m != nil { + return m.InputAudio + } + return nil +} + +// The message returned from the DetectIntent method. +type DetectIntentResponse struct { + // The unique identifier of the response. It can be used to + // locate a response in the training example set or for reporting issues. + ResponseId string `protobuf:"bytes,1,opt,name=response_id,json=responseId,proto3" json:"response_id,omitempty"` + // The selected results of the conversational query or event processing. + // See `alternative_query_results` for additional potential results. + QueryResult *QueryResult `protobuf:"bytes,2,opt,name=query_result,json=queryResult,proto3" json:"query_result,omitempty"` + // If Knowledge Connectors are enabled, there could be more than one result + // returned for a given query or event, and this field will contain all + // results except for the top one, which is captured in query_result. The + // alternative results are ordered by decreasing + // `QueryResult.intent_detection_confidence`. If Knowledge Connectors are + // disabled, this field will be empty until multiple responses for regular + // intents are supported, at which point those additional results will be + // surfaced here. + AlternativeQueryResults []*QueryResult `protobuf:"bytes,5,rep,name=alternative_query_results,json=alternativeQueryResults,proto3" json:"alternative_query_results,omitempty"` + // Specifies the status of the webhook request. + WebhookStatus *status.Status `protobuf:"bytes,3,opt,name=webhook_status,json=webhookStatus,proto3" json:"webhook_status,omitempty"` + // The audio data bytes encoded as specified in the request. + // Note: The output audio is generated based on the values of default platform + // text responses found in the `query_result.fulfillment_messages` field. If + // multiple default text responses exist, they will be concatenated when + // generating audio. If no default platform text responses exist, the + // generated audio content will be empty. + OutputAudio []byte `protobuf:"bytes,4,opt,name=output_audio,json=outputAudio,proto3" json:"output_audio,omitempty"` + // Instructs the speech synthesizer how to generate the output audio. This + // field is populated from the agent-level speech synthesizer configuration, + // if enabled. + OutputAudioConfig *OutputAudioConfig `protobuf:"bytes,6,opt,name=output_audio_config,json=outputAudioConfig,proto3" json:"output_audio_config,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *DetectIntentResponse) Reset() { *m = DetectIntentResponse{} } +func (m *DetectIntentResponse) String() string { return proto.CompactTextString(m) } +func (*DetectIntentResponse) ProtoMessage() {} +func (*DetectIntentResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_session_b99a06fb068de54e, []int{1} +} +func (m *DetectIntentResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_DetectIntentResponse.Unmarshal(m, b) +} +func (m *DetectIntentResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_DetectIntentResponse.Marshal(b, m, deterministic) +} +func (dst *DetectIntentResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_DetectIntentResponse.Merge(dst, src) +} +func (m *DetectIntentResponse) XXX_Size() int { + return xxx_messageInfo_DetectIntentResponse.Size(m) +} +func (m *DetectIntentResponse) XXX_DiscardUnknown() { + xxx_messageInfo_DetectIntentResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_DetectIntentResponse proto.InternalMessageInfo + +func (m *DetectIntentResponse) GetResponseId() string { + if m != nil { + return m.ResponseId + } + return "" +} + +func (m *DetectIntentResponse) GetQueryResult() *QueryResult { + if m != nil { + return m.QueryResult + } + return nil +} + +func (m *DetectIntentResponse) GetAlternativeQueryResults() []*QueryResult { + if m != nil { + return m.AlternativeQueryResults + } + return nil +} + +func (m *DetectIntentResponse) GetWebhookStatus() *status.Status { + if m != nil { + return m.WebhookStatus + } + return nil +} + +func (m *DetectIntentResponse) GetOutputAudio() []byte { + if m != nil { + return m.OutputAudio + } + return nil +} + +func (m *DetectIntentResponse) GetOutputAudioConfig() *OutputAudioConfig { + if m != nil { + return m.OutputAudioConfig + } + return nil +} + +// Represents the parameters of the conversational query. +type QueryParameters struct { + // Optional. The time zone of this conversational query from the + // [time zone database](https://www.iana.org/time-zones), e.g., + // America/New_York, Europe/Paris. If not provided, the time zone specified in + // agent settings is used. + TimeZone string `protobuf:"bytes,1,opt,name=time_zone,json=timeZone,proto3" json:"time_zone,omitempty"` + // Optional. The geo location of this conversational query. + GeoLocation *latlng.LatLng `protobuf:"bytes,2,opt,name=geo_location,json=geoLocation,proto3" json:"geo_location,omitempty"` + // Optional. The collection of contexts to be activated before this query is + // executed. + Contexts []*Context `protobuf:"bytes,3,rep,name=contexts,proto3" json:"contexts,omitempty"` + // Optional. Specifies whether to delete all contexts in the current session + // before the new ones are activated. + ResetContexts bool `protobuf:"varint,4,opt,name=reset_contexts,json=resetContexts,proto3" json:"reset_contexts,omitempty"` + // Optional. Additional session entity types to replace or extend developer + // entity types with. The entity synonyms apply to all languages and persist + // for the session of this query. + SessionEntityTypes []*SessionEntityType `protobuf:"bytes,5,rep,name=session_entity_types,json=sessionEntityTypes,proto3" json:"session_entity_types,omitempty"` + // Optional. This field can be used to pass custom data into the webhook + // associated with the agent. Arbitrary JSON objects are supported. + Payload *_struct.Struct `protobuf:"bytes,6,opt,name=payload,proto3" json:"payload,omitempty"` + // Optional. KnowledgeBases to get alternative results from. If not set, the + // KnowledgeBases enabled in the agent (through UI) will be used. + // Format: `projects//knowledgeBases/`. + KnowledgeBaseNames []string `protobuf:"bytes,12,rep,name=knowledge_base_names,json=knowledgeBaseNames,proto3" json:"knowledge_base_names,omitempty"` + // Optional. Configures the type of sentiment analysis to perform. If not + // provided, sentiment analysis is not performed. + // Note: Sentiment Analysis is only currently available for Enterprise Edition + // agents. + SentimentAnalysisRequestConfig *SentimentAnalysisRequestConfig `protobuf:"bytes,10,opt,name=sentiment_analysis_request_config,json=sentimentAnalysisRequestConfig,proto3" json:"sentiment_analysis_request_config,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *QueryParameters) Reset() { *m = QueryParameters{} } +func (m *QueryParameters) String() string { return proto.CompactTextString(m) } +func (*QueryParameters) ProtoMessage() {} +func (*QueryParameters) Descriptor() ([]byte, []int) { + return fileDescriptor_session_b99a06fb068de54e, []int{2} +} +func (m *QueryParameters) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_QueryParameters.Unmarshal(m, b) +} +func (m *QueryParameters) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_QueryParameters.Marshal(b, m, deterministic) +} +func (dst *QueryParameters) XXX_Merge(src proto.Message) { + xxx_messageInfo_QueryParameters.Merge(dst, src) +} +func (m *QueryParameters) XXX_Size() int { + return xxx_messageInfo_QueryParameters.Size(m) +} +func (m *QueryParameters) XXX_DiscardUnknown() { + xxx_messageInfo_QueryParameters.DiscardUnknown(m) +} + +var xxx_messageInfo_QueryParameters proto.InternalMessageInfo + +func (m *QueryParameters) GetTimeZone() string { + if m != nil { + return m.TimeZone + } + return "" +} + +func (m *QueryParameters) GetGeoLocation() *latlng.LatLng { + if m != nil { + return m.GeoLocation + } + return nil +} + +func (m *QueryParameters) GetContexts() []*Context { + if m != nil { + return m.Contexts + } + return nil +} + +func (m *QueryParameters) GetResetContexts() bool { + if m != nil { + return m.ResetContexts + } + return false +} + +func (m *QueryParameters) GetSessionEntityTypes() []*SessionEntityType { + if m != nil { + return m.SessionEntityTypes + } + return nil +} + +func (m *QueryParameters) GetPayload() *_struct.Struct { + if m != nil { + return m.Payload + } + return nil +} + +func (m *QueryParameters) GetKnowledgeBaseNames() []string { + if m != nil { + return m.KnowledgeBaseNames + } + return nil +} + +func (m *QueryParameters) GetSentimentAnalysisRequestConfig() *SentimentAnalysisRequestConfig { + if m != nil { + return m.SentimentAnalysisRequestConfig + } + return nil +} + +// Represents the query input. It can contain either: +// +// 1. An audio config which +// instructs the speech recognizer how to process the speech audio. +// +// 2. A conversational query in the form of text,. +// +// 3. An event that specifies which intent to trigger. +type QueryInput struct { + // Required. The input specification. + // + // Types that are valid to be assigned to Input: + // *QueryInput_AudioConfig + // *QueryInput_Text + // *QueryInput_Event + Input isQueryInput_Input `protobuf_oneof:"input"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *QueryInput) Reset() { *m = QueryInput{} } +func (m *QueryInput) String() string { return proto.CompactTextString(m) } +func (*QueryInput) ProtoMessage() {} +func (*QueryInput) Descriptor() ([]byte, []int) { + return fileDescriptor_session_b99a06fb068de54e, []int{3} +} +func (m *QueryInput) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_QueryInput.Unmarshal(m, b) +} +func (m *QueryInput) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_QueryInput.Marshal(b, m, deterministic) +} +func (dst *QueryInput) XXX_Merge(src proto.Message) { + xxx_messageInfo_QueryInput.Merge(dst, src) +} +func (m *QueryInput) XXX_Size() int { + return xxx_messageInfo_QueryInput.Size(m) +} +func (m *QueryInput) XXX_DiscardUnknown() { + xxx_messageInfo_QueryInput.DiscardUnknown(m) +} + +var xxx_messageInfo_QueryInput proto.InternalMessageInfo + +type isQueryInput_Input interface { + isQueryInput_Input() +} + +type QueryInput_AudioConfig struct { + AudioConfig *InputAudioConfig `protobuf:"bytes,1,opt,name=audio_config,json=audioConfig,proto3,oneof"` +} + +type QueryInput_Text struct { + Text *TextInput `protobuf:"bytes,2,opt,name=text,proto3,oneof"` +} + +type QueryInput_Event struct { + Event *EventInput `protobuf:"bytes,3,opt,name=event,proto3,oneof"` +} + +func (*QueryInput_AudioConfig) isQueryInput_Input() {} + +func (*QueryInput_Text) isQueryInput_Input() {} + +func (*QueryInput_Event) isQueryInput_Input() {} + +func (m *QueryInput) GetInput() isQueryInput_Input { + if m != nil { + return m.Input + } + return nil +} + +func (m *QueryInput) GetAudioConfig() *InputAudioConfig { + if x, ok := m.GetInput().(*QueryInput_AudioConfig); ok { + return x.AudioConfig + } + return nil +} + +func (m *QueryInput) GetText() *TextInput { + if x, ok := m.GetInput().(*QueryInput_Text); ok { + return x.Text + } + return nil +} + +func (m *QueryInput) GetEvent() *EventInput { + if x, ok := m.GetInput().(*QueryInput_Event); ok { + return x.Event + } + return nil +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*QueryInput) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _QueryInput_OneofMarshaler, _QueryInput_OneofUnmarshaler, _QueryInput_OneofSizer, []interface{}{ + (*QueryInput_AudioConfig)(nil), + (*QueryInput_Text)(nil), + (*QueryInput_Event)(nil), + } +} + +func _QueryInput_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*QueryInput) + // input + switch x := m.Input.(type) { + case *QueryInput_AudioConfig: + b.EncodeVarint(1<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.AudioConfig); err != nil { + return err + } + case *QueryInput_Text: + b.EncodeVarint(2<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.Text); err != nil { + return err + } + case *QueryInput_Event: + b.EncodeVarint(3<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.Event); err != nil { + return err + } + case nil: + default: + return fmt.Errorf("QueryInput.Input has unexpected type %T", x) + } + return nil +} + +func _QueryInput_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*QueryInput) + switch tag { + case 1: // input.audio_config + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(InputAudioConfig) + err := b.DecodeMessage(msg) + m.Input = &QueryInput_AudioConfig{msg} + return true, err + case 2: // input.text + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(TextInput) + err := b.DecodeMessage(msg) + m.Input = &QueryInput_Text{msg} + return true, err + case 3: // input.event + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(EventInput) + err := b.DecodeMessage(msg) + m.Input = &QueryInput_Event{msg} + return true, err + default: + return false, nil + } +} + +func _QueryInput_OneofSizer(msg proto.Message) (n int) { + m := msg.(*QueryInput) + // input + switch x := m.Input.(type) { + case *QueryInput_AudioConfig: + s := proto.Size(x.AudioConfig) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *QueryInput_Text: + s := proto.Size(x.Text) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *QueryInput_Event: + s := proto.Size(x.Event) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +// Represents the result of conversational query or event processing. +type QueryResult struct { + // The original conversational query text: + // - If natural language text was provided as input, `query_text` contains + // a copy of the input. + // - If natural language speech audio was provided as input, `query_text` + // contains the speech recognition result. If speech recognizer produced + // multiple alternatives, a particular one is picked. + // - If an event was provided as input, `query_text` is not set. + QueryText string `protobuf:"bytes,1,opt,name=query_text,json=queryText,proto3" json:"query_text,omitempty"` + // The language that was triggered during intent detection. + // See [Language + // Support](https://cloud.google.com/dialogflow-enterprise/docs/reference/language) + // for a list of the currently supported language codes. + LanguageCode string `protobuf:"bytes,15,opt,name=language_code,json=languageCode,proto3" json:"language_code,omitempty"` + // The Speech recognition confidence between 0.0 and 1.0. A higher number + // indicates an estimated greater likelihood that the recognized words are + // correct. The default of 0.0 is a sentinel value indicating that confidence + // was not set. + // + // This field is not guaranteed to be accurate or set. In particular this + // field isn't set for StreamingDetectIntent since the streaming endpoint has + // separate confidence estimates per portion of the audio in + // StreamingRecognitionResult. + SpeechRecognitionConfidence float32 `protobuf:"fixed32,2,opt,name=speech_recognition_confidence,json=speechRecognitionConfidence,proto3" json:"speech_recognition_confidence,omitempty"` + // The action name from the matched intent. + Action string `protobuf:"bytes,3,opt,name=action,proto3" json:"action,omitempty"` + // The collection of extracted parameters. + Parameters *_struct.Struct `protobuf:"bytes,4,opt,name=parameters,proto3" json:"parameters,omitempty"` + // This field is set to: + // - `false` if the matched intent has required parameters and not all of + // the required parameter values have been collected. + // - `true` if all required parameter values have been collected, or if the + // matched intent doesn't contain any required parameters. + AllRequiredParamsPresent bool `protobuf:"varint,5,opt,name=all_required_params_present,json=allRequiredParamsPresent,proto3" json:"all_required_params_present,omitempty"` + // The text to be pronounced to the user or shown on the screen. + // Note: This is a legacy field, `fulfillment_messages` should be preferred. + FulfillmentText string `protobuf:"bytes,6,opt,name=fulfillment_text,json=fulfillmentText,proto3" json:"fulfillment_text,omitempty"` + // The collection of rich messages to present to the user. + FulfillmentMessages []*Intent_Message `protobuf:"bytes,7,rep,name=fulfillment_messages,json=fulfillmentMessages,proto3" json:"fulfillment_messages,omitempty"` + // If the query was fulfilled by a webhook call, this field is set to the + // value of the `source` field returned in the webhook response. + WebhookSource string `protobuf:"bytes,8,opt,name=webhook_source,json=webhookSource,proto3" json:"webhook_source,omitempty"` + // If the query was fulfilled by a webhook call, this field is set to the + // value of the `payload` field returned in the webhook response. + WebhookPayload *_struct.Struct `protobuf:"bytes,9,opt,name=webhook_payload,json=webhookPayload,proto3" json:"webhook_payload,omitempty"` + // The collection of output contexts. If applicable, + // `output_contexts.parameters` contains entries with name + // `.original` containing the original parameter values + // before the query. + OutputContexts []*Context `protobuf:"bytes,10,rep,name=output_contexts,json=outputContexts,proto3" json:"output_contexts,omitempty"` + // The intent that matched the conversational query. Some, not + // all fields are filled in this message, including but not limited to: + // `name`, `display_name` and `webhook_state`. + Intent *Intent `protobuf:"bytes,11,opt,name=intent,proto3" json:"intent,omitempty"` + // The intent detection confidence. Values range from 0.0 + // (completely uncertain) to 1.0 (completely certain). + // If there are `multiple knowledge_answers` messages, this value is set to + // the greatest `knowledgeAnswers.match_confidence` value in the list. + IntentDetectionConfidence float32 `protobuf:"fixed32,12,opt,name=intent_detection_confidence,json=intentDetectionConfidence,proto3" json:"intent_detection_confidence,omitempty"` + // The free-form diagnostic info. For example, this field could contain + // webhook call latency. The string keys of the Struct's fields map can change + // without notice. + DiagnosticInfo *_struct.Struct `protobuf:"bytes,14,opt,name=diagnostic_info,json=diagnosticInfo,proto3" json:"diagnostic_info,omitempty"` + // The sentiment analysis result, which depends on the + // `sentiment_analysis_request_config` specified in the request. + SentimentAnalysisResult *SentimentAnalysisResult `protobuf:"bytes,17,opt,name=sentiment_analysis_result,json=sentimentAnalysisResult,proto3" json:"sentiment_analysis_result,omitempty"` + // The result from Knowledge Connector (if any), ordered by decreasing + // `KnowledgeAnswers.match_confidence`. + KnowledgeAnswers *KnowledgeAnswers `protobuf:"bytes,18,opt,name=knowledge_answers,json=knowledgeAnswers,proto3" json:"knowledge_answers,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *QueryResult) Reset() { *m = QueryResult{} } +func (m *QueryResult) String() string { return proto.CompactTextString(m) } +func (*QueryResult) ProtoMessage() {} +func (*QueryResult) Descriptor() ([]byte, []int) { + return fileDescriptor_session_b99a06fb068de54e, []int{4} +} +func (m *QueryResult) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_QueryResult.Unmarshal(m, b) +} +func (m *QueryResult) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_QueryResult.Marshal(b, m, deterministic) +} +func (dst *QueryResult) XXX_Merge(src proto.Message) { + xxx_messageInfo_QueryResult.Merge(dst, src) +} +func (m *QueryResult) XXX_Size() int { + return xxx_messageInfo_QueryResult.Size(m) +} +func (m *QueryResult) XXX_DiscardUnknown() { + xxx_messageInfo_QueryResult.DiscardUnknown(m) +} + +var xxx_messageInfo_QueryResult proto.InternalMessageInfo + +func (m *QueryResult) GetQueryText() string { + if m != nil { + return m.QueryText + } + return "" +} + +func (m *QueryResult) GetLanguageCode() string { + if m != nil { + return m.LanguageCode + } + return "" +} + +func (m *QueryResult) GetSpeechRecognitionConfidence() float32 { + if m != nil { + return m.SpeechRecognitionConfidence + } + return 0 +} + +func (m *QueryResult) GetAction() string { + if m != nil { + return m.Action + } + return "" +} + +func (m *QueryResult) GetParameters() *_struct.Struct { + if m != nil { + return m.Parameters + } + return nil +} + +func (m *QueryResult) GetAllRequiredParamsPresent() bool { + if m != nil { + return m.AllRequiredParamsPresent + } + return false +} + +func (m *QueryResult) GetFulfillmentText() string { + if m != nil { + return m.FulfillmentText + } + return "" +} + +func (m *QueryResult) GetFulfillmentMessages() []*Intent_Message { + if m != nil { + return m.FulfillmentMessages + } + return nil +} + +func (m *QueryResult) GetWebhookSource() string { + if m != nil { + return m.WebhookSource + } + return "" +} + +func (m *QueryResult) GetWebhookPayload() *_struct.Struct { + if m != nil { + return m.WebhookPayload + } + return nil +} + +func (m *QueryResult) GetOutputContexts() []*Context { + if m != nil { + return m.OutputContexts + } + return nil +} + +func (m *QueryResult) GetIntent() *Intent { + if m != nil { + return m.Intent + } + return nil +} + +func (m *QueryResult) GetIntentDetectionConfidence() float32 { + if m != nil { + return m.IntentDetectionConfidence + } + return 0 +} + +func (m *QueryResult) GetDiagnosticInfo() *_struct.Struct { + if m != nil { + return m.DiagnosticInfo + } + return nil +} + +func (m *QueryResult) GetSentimentAnalysisResult() *SentimentAnalysisResult { + if m != nil { + return m.SentimentAnalysisResult + } + return nil +} + +func (m *QueryResult) GetKnowledgeAnswers() *KnowledgeAnswers { + if m != nil { + return m.KnowledgeAnswers + } + return nil +} + +// Represents the result of querying a Knowledge base. +type KnowledgeAnswers struct { + // A list of answers from Knowledge Connector. + Answers []*KnowledgeAnswers_Answer `protobuf:"bytes,1,rep,name=answers,proto3" json:"answers,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *KnowledgeAnswers) Reset() { *m = KnowledgeAnswers{} } +func (m *KnowledgeAnswers) String() string { return proto.CompactTextString(m) } +func (*KnowledgeAnswers) ProtoMessage() {} +func (*KnowledgeAnswers) Descriptor() ([]byte, []int) { + return fileDescriptor_session_b99a06fb068de54e, []int{5} +} +func (m *KnowledgeAnswers) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_KnowledgeAnswers.Unmarshal(m, b) +} +func (m *KnowledgeAnswers) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_KnowledgeAnswers.Marshal(b, m, deterministic) +} +func (dst *KnowledgeAnswers) XXX_Merge(src proto.Message) { + xxx_messageInfo_KnowledgeAnswers.Merge(dst, src) +} +func (m *KnowledgeAnswers) XXX_Size() int { + return xxx_messageInfo_KnowledgeAnswers.Size(m) +} +func (m *KnowledgeAnswers) XXX_DiscardUnknown() { + xxx_messageInfo_KnowledgeAnswers.DiscardUnknown(m) +} + +var xxx_messageInfo_KnowledgeAnswers proto.InternalMessageInfo + +func (m *KnowledgeAnswers) GetAnswers() []*KnowledgeAnswers_Answer { + if m != nil { + return m.Answers + } + return nil +} + +// An answer from Knowledge Connector. +type KnowledgeAnswers_Answer struct { + // Indicates which Knowledge Document this answer was extracted from. + // Format: `projects//knowledgeBases//documents/`. + Source string `protobuf:"bytes,1,opt,name=source,proto3" json:"source,omitempty"` + // The corresponding FAQ question if the answer was extracted from a FAQ + // Document, empty otherwise. + FaqQuestion string `protobuf:"bytes,2,opt,name=faq_question,json=faqQuestion,proto3" json:"faq_question,omitempty"` + // The piece of text from the `source` knowledge base document that answers + // this conversational query. + Answer string `protobuf:"bytes,3,opt,name=answer,proto3" json:"answer,omitempty"` + // The system's confidence level that this knowledge answer is a good match + // for this conversational query. + // NOTE: The confidence level for a given `` pair may change + // without notice, as it depends on models that are constantly being + // improved. However, it will change less frequently than the confidence + // score below, and should be preferred for referencing the quality of an + // answer. + MatchConfidenceLevel KnowledgeAnswers_Answer_MatchConfidenceLevel `protobuf:"varint,4,opt,name=match_confidence_level,json=matchConfidenceLevel,proto3,enum=google.cloud.dialogflow.v2beta1.KnowledgeAnswers_Answer_MatchConfidenceLevel" json:"match_confidence_level,omitempty"` + // The system's confidence score that this Knowledge answer is a good match + // for this conversational query. + // The range is from 0.0 (completely uncertain) to 1.0 (completely certain). + // Note: The confidence score is likely to vary somewhat (possibly even for + // identical requests), as the underlying model is under constant + // improvement. It may be deprecated in the future. We recommend using + // `match_confidence_level` which should be generally more stable. + MatchConfidence float32 `protobuf:"fixed32,5,opt,name=match_confidence,json=matchConfidence,proto3" json:"match_confidence,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *KnowledgeAnswers_Answer) Reset() { *m = KnowledgeAnswers_Answer{} } +func (m *KnowledgeAnswers_Answer) String() string { return proto.CompactTextString(m) } +func (*KnowledgeAnswers_Answer) ProtoMessage() {} +func (*KnowledgeAnswers_Answer) Descriptor() ([]byte, []int) { + return fileDescriptor_session_b99a06fb068de54e, []int{5, 0} +} +func (m *KnowledgeAnswers_Answer) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_KnowledgeAnswers_Answer.Unmarshal(m, b) +} +func (m *KnowledgeAnswers_Answer) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_KnowledgeAnswers_Answer.Marshal(b, m, deterministic) +} +func (dst *KnowledgeAnswers_Answer) XXX_Merge(src proto.Message) { + xxx_messageInfo_KnowledgeAnswers_Answer.Merge(dst, src) +} +func (m *KnowledgeAnswers_Answer) XXX_Size() int { + return xxx_messageInfo_KnowledgeAnswers_Answer.Size(m) +} +func (m *KnowledgeAnswers_Answer) XXX_DiscardUnknown() { + xxx_messageInfo_KnowledgeAnswers_Answer.DiscardUnknown(m) +} + +var xxx_messageInfo_KnowledgeAnswers_Answer proto.InternalMessageInfo + +func (m *KnowledgeAnswers_Answer) GetSource() string { + if m != nil { + return m.Source + } + return "" +} + +func (m *KnowledgeAnswers_Answer) GetFaqQuestion() string { + if m != nil { + return m.FaqQuestion + } + return "" +} + +func (m *KnowledgeAnswers_Answer) GetAnswer() string { + if m != nil { + return m.Answer + } + return "" +} + +func (m *KnowledgeAnswers_Answer) GetMatchConfidenceLevel() KnowledgeAnswers_Answer_MatchConfidenceLevel { + if m != nil { + return m.MatchConfidenceLevel + } + return KnowledgeAnswers_Answer_MATCH_CONFIDENCE_LEVEL_UNSPECIFIED +} + +func (m *KnowledgeAnswers_Answer) GetMatchConfidence() float32 { + if m != nil { + return m.MatchConfidence + } + return 0 +} + +// The top-level message sent by the client to the +// `StreamingDetectIntent` method. +// +// Multiple request messages should be sent in order: +// +// 1. The first message must contain `session`, `query_input` plus optionally +// `query_params` and/or `single_utterance`. If the client wants to receive +// an audio response, it should also contain `output_audio_config`. +// The message must not contain `input_audio`. +// +// 2. If `query_input` was set to a streaming input audio config, +// all subsequent messages must contain only `input_audio`. +// Otherwise, finish the request stream. +type StreamingDetectIntentRequest struct { + // Required. The name of the session the query is sent to. + // Format of the session name: + // `projects//agent/sessions/`, or + // `projects//agent/environments//users//sessions/`. If `Environment ID` is not specified, we assume + // default 'draft' environment. If `User ID` is not specified, we are using + // "-". It’s up to the API caller to choose an appropriate `Session ID` and + // `User Id`. They can be a random numbers or some type of user and session + // identifiers (preferably hashed). The length of the `Session ID` and + // `User ID` must not exceed 36 characters. + Session string `protobuf:"bytes,1,opt,name=session,proto3" json:"session,omitempty"` + // Optional. The parameters of this query. + QueryParams *QueryParameters `protobuf:"bytes,2,opt,name=query_params,json=queryParams,proto3" json:"query_params,omitempty"` + // Required. The input specification. It can be set to: + // + // 1. an audio config which instructs the speech recognizer how to process + // the speech audio, + // + // 2. a conversational query in the form of text, or + // + // 3. an event that specifies which intent to trigger. + QueryInput *QueryInput `protobuf:"bytes,3,opt,name=query_input,json=queryInput,proto3" json:"query_input,omitempty"` + // Optional. If `false` (default), recognition does not cease until the + // client closes the stream. + // If `true`, the recognizer will detect a single spoken utterance in input + // audio. Recognition ceases when it detects the audio's voice has + // stopped or paused. In this case, once a detected intent is received, the + // client should close the stream and start a new request with a new stream as + // needed. + // This setting is ignored when `query_input` is a piece of text or an event. + SingleUtterance bool `protobuf:"varint,4,opt,name=single_utterance,json=singleUtterance,proto3" json:"single_utterance,omitempty"` + // Optional. Instructs the speech synthesizer how to generate the output + // audio. If this field is not set and agent-level speech synthesizer is not + // configured, no output audio is generated. + OutputAudioConfig *OutputAudioConfig `protobuf:"bytes,5,opt,name=output_audio_config,json=outputAudioConfig,proto3" json:"output_audio_config,omitempty"` + // Optional. The input audio content to be recognized. Must be sent if + // `query_input` was set to a streaming input audio config. The complete audio + // over all streaming messages must not exceed 1 minute. + InputAudio []byte `protobuf:"bytes,6,opt,name=input_audio,json=inputAudio,proto3" json:"input_audio,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *StreamingDetectIntentRequest) Reset() { *m = StreamingDetectIntentRequest{} } +func (m *StreamingDetectIntentRequest) String() string { return proto.CompactTextString(m) } +func (*StreamingDetectIntentRequest) ProtoMessage() {} +func (*StreamingDetectIntentRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_session_b99a06fb068de54e, []int{6} +} +func (m *StreamingDetectIntentRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_StreamingDetectIntentRequest.Unmarshal(m, b) +} +func (m *StreamingDetectIntentRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_StreamingDetectIntentRequest.Marshal(b, m, deterministic) +} +func (dst *StreamingDetectIntentRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_StreamingDetectIntentRequest.Merge(dst, src) +} +func (m *StreamingDetectIntentRequest) XXX_Size() int { + return xxx_messageInfo_StreamingDetectIntentRequest.Size(m) +} +func (m *StreamingDetectIntentRequest) XXX_DiscardUnknown() { + xxx_messageInfo_StreamingDetectIntentRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_StreamingDetectIntentRequest proto.InternalMessageInfo + +func (m *StreamingDetectIntentRequest) GetSession() string { + if m != nil { + return m.Session + } + return "" +} + +func (m *StreamingDetectIntentRequest) GetQueryParams() *QueryParameters { + if m != nil { + return m.QueryParams + } + return nil +} + +func (m *StreamingDetectIntentRequest) GetQueryInput() *QueryInput { + if m != nil { + return m.QueryInput + } + return nil +} + +func (m *StreamingDetectIntentRequest) GetSingleUtterance() bool { + if m != nil { + return m.SingleUtterance + } + return false +} + +func (m *StreamingDetectIntentRequest) GetOutputAudioConfig() *OutputAudioConfig { + if m != nil { + return m.OutputAudioConfig + } + return nil +} + +func (m *StreamingDetectIntentRequest) GetInputAudio() []byte { + if m != nil { + return m.InputAudio + } + return nil +} + +// The top-level message returned from the +// `StreamingDetectIntent` method. +// +// Multiple response messages can be returned in order: +// +// 1. If the input was set to streaming audio, the first one or more messages +// contain `recognition_result`. Each `recognition_result` represents a more +// complete transcript of what the user said. The last `recognition_result` +// has `is_final` set to `true`. +// +// 2. The next message contains `response_id`, `query_result`, +// `alternative_query_results` and optionally `webhook_status` if a WebHook +// was called. +// +// 3. If `output_audio_config` was specified in the request or agent-level +// speech synthesizer is configured, all subsequent messages contain +// `output_audio` and `output_audio_config`. +type StreamingDetectIntentResponse struct { + // The unique identifier of the response. It can be used to + // locate a response in the training example set or for reporting issues. + ResponseId string `protobuf:"bytes,1,opt,name=response_id,json=responseId,proto3" json:"response_id,omitempty"` + // The result of speech recognition. + RecognitionResult *StreamingRecognitionResult `protobuf:"bytes,2,opt,name=recognition_result,json=recognitionResult,proto3" json:"recognition_result,omitempty"` + // The selected results of the conversational query or event processing. + // See `alternative_query_results` for additional potential results. + QueryResult *QueryResult `protobuf:"bytes,3,opt,name=query_result,json=queryResult,proto3" json:"query_result,omitempty"` + // If Knowledge Connectors are enabled, there could be more than one result + // returned for a given query or event, and this field will contain all + // results except for the top one, which is captured in query_result. The + // alternative results are ordered by decreasing + // `QueryResult.intent_detection_confidence`. If Knowledge Connectors are + // disabled, this field will be empty until multiple responses for regular + // intents are supported, at which point those additional results will be + // surfaced here. + AlternativeQueryResults []*QueryResult `protobuf:"bytes,7,rep,name=alternative_query_results,json=alternativeQueryResults,proto3" json:"alternative_query_results,omitempty"` + // Specifies the status of the webhook request. + WebhookStatus *status.Status `protobuf:"bytes,4,opt,name=webhook_status,json=webhookStatus,proto3" json:"webhook_status,omitempty"` + // The audio data bytes encoded as specified in the request. + OutputAudio []byte `protobuf:"bytes,5,opt,name=output_audio,json=outputAudio,proto3" json:"output_audio,omitempty"` + // The config used by the speech synthesizer to generate the output audio. + OutputAudioConfig *OutputAudioConfig `protobuf:"bytes,6,opt,name=output_audio_config,json=outputAudioConfig,proto3" json:"output_audio_config,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *StreamingDetectIntentResponse) Reset() { *m = StreamingDetectIntentResponse{} } +func (m *StreamingDetectIntentResponse) String() string { return proto.CompactTextString(m) } +func (*StreamingDetectIntentResponse) ProtoMessage() {} +func (*StreamingDetectIntentResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_session_b99a06fb068de54e, []int{7} +} +func (m *StreamingDetectIntentResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_StreamingDetectIntentResponse.Unmarshal(m, b) +} +func (m *StreamingDetectIntentResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_StreamingDetectIntentResponse.Marshal(b, m, deterministic) +} +func (dst *StreamingDetectIntentResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_StreamingDetectIntentResponse.Merge(dst, src) +} +func (m *StreamingDetectIntentResponse) XXX_Size() int { + return xxx_messageInfo_StreamingDetectIntentResponse.Size(m) +} +func (m *StreamingDetectIntentResponse) XXX_DiscardUnknown() { + xxx_messageInfo_StreamingDetectIntentResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_StreamingDetectIntentResponse proto.InternalMessageInfo + +func (m *StreamingDetectIntentResponse) GetResponseId() string { + if m != nil { + return m.ResponseId + } + return "" +} + +func (m *StreamingDetectIntentResponse) GetRecognitionResult() *StreamingRecognitionResult { + if m != nil { + return m.RecognitionResult + } + return nil +} + +func (m *StreamingDetectIntentResponse) GetQueryResult() *QueryResult { + if m != nil { + return m.QueryResult + } + return nil +} + +func (m *StreamingDetectIntentResponse) GetAlternativeQueryResults() []*QueryResult { + if m != nil { + return m.AlternativeQueryResults + } + return nil +} + +func (m *StreamingDetectIntentResponse) GetWebhookStatus() *status.Status { + if m != nil { + return m.WebhookStatus + } + return nil +} + +func (m *StreamingDetectIntentResponse) GetOutputAudio() []byte { + if m != nil { + return m.OutputAudio + } + return nil +} + +func (m *StreamingDetectIntentResponse) GetOutputAudioConfig() *OutputAudioConfig { + if m != nil { + return m.OutputAudioConfig + } + return nil +} + +// Contains a speech recognition result corresponding to a portion of the audio +// that is currently being processed or an indication that this is the end +// of the single requested utterance. +// +// Example: +// +// 1. transcript: "tube" +// +// 2. transcript: "to be a" +// +// 3. transcript: "to be" +// +// 4. transcript: "to be or not to be" +// is_final: true +// +// 5. transcript: " that's" +// +// 6. transcript: " that is" +// +// 7. message_type: `MESSAGE_TYPE_END_OF_SINGLE_UTTERANCE` +// +// 8. transcript: " that is the question" +// is_final: true +// +// Only two of the responses contain final results (#4 and #8 indicated by +// `is_final: true`). Concatenating these generates the full transcript: "to be +// or not to be that is the question". +// +// In each response we populate: +// +// * for `MESSAGE_TYPE_TRANSCRIPT`: `transcript` and possibly `is_final`. +// +// * for `MESSAGE_TYPE_END_OF_SINGLE_UTTERANCE`: only `message_type`. +type StreamingRecognitionResult struct { + // Type of the result message. + MessageType StreamingRecognitionResult_MessageType `protobuf:"varint,1,opt,name=message_type,json=messageType,proto3,enum=google.cloud.dialogflow.v2beta1.StreamingRecognitionResult_MessageType" json:"message_type,omitempty"` + // Transcript text representing the words that the user spoke. + // Populated if and only if `message_type` = `MESSAGE_TYPE_TRANSCRIPT`. + Transcript string `protobuf:"bytes,2,opt,name=transcript,proto3" json:"transcript,omitempty"` + // If `false`, the `StreamingRecognitionResult` represents an + // interim result that may change. If `true`, the recognizer will not return + // any further hypotheses about this piece of the audio. May only be populated + // for `message_type` = `MESSAGE_TYPE_TRANSCRIPT`. + IsFinal bool `protobuf:"varint,3,opt,name=is_final,json=isFinal,proto3" json:"is_final,omitempty"` + // The Speech confidence between 0.0 and 1.0 for the current portion of audio. + // A higher number indicates an estimated greater likelihood that the + // recognized words are correct. The default of 0.0 is a sentinel value + // indicating that confidence was not set. + // + // This field is typically only provided if `is_final` is true and you should + // not rely on it being accurate or even set. + Confidence float32 `protobuf:"fixed32,4,opt,name=confidence,proto3" json:"confidence,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *StreamingRecognitionResult) Reset() { *m = StreamingRecognitionResult{} } +func (m *StreamingRecognitionResult) String() string { return proto.CompactTextString(m) } +func (*StreamingRecognitionResult) ProtoMessage() {} +func (*StreamingRecognitionResult) Descriptor() ([]byte, []int) { + return fileDescriptor_session_b99a06fb068de54e, []int{8} +} +func (m *StreamingRecognitionResult) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_StreamingRecognitionResult.Unmarshal(m, b) +} +func (m *StreamingRecognitionResult) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_StreamingRecognitionResult.Marshal(b, m, deterministic) +} +func (dst *StreamingRecognitionResult) XXX_Merge(src proto.Message) { + xxx_messageInfo_StreamingRecognitionResult.Merge(dst, src) +} +func (m *StreamingRecognitionResult) XXX_Size() int { + return xxx_messageInfo_StreamingRecognitionResult.Size(m) +} +func (m *StreamingRecognitionResult) XXX_DiscardUnknown() { + xxx_messageInfo_StreamingRecognitionResult.DiscardUnknown(m) +} + +var xxx_messageInfo_StreamingRecognitionResult proto.InternalMessageInfo + +func (m *StreamingRecognitionResult) GetMessageType() StreamingRecognitionResult_MessageType { + if m != nil { + return m.MessageType + } + return StreamingRecognitionResult_MESSAGE_TYPE_UNSPECIFIED +} + +func (m *StreamingRecognitionResult) GetTranscript() string { + if m != nil { + return m.Transcript + } + return "" +} + +func (m *StreamingRecognitionResult) GetIsFinal() bool { + if m != nil { + return m.IsFinal + } + return false +} + +func (m *StreamingRecognitionResult) GetConfidence() float32 { + if m != nil { + return m.Confidence + } + return 0 +} + +// Represents the natural language text to be processed. +type TextInput struct { + // Required. The UTF-8 encoded natural language text to be processed. + // Text length must not exceed 256 characters. + Text string `protobuf:"bytes,1,opt,name=text,proto3" json:"text,omitempty"` + // Required. The language of this conversational query. See [Language + // Support](https://cloud.google.com/dialogflow-enterprise/docs/reference/language) + // for a list of the currently supported language codes. Note that queries in + // the same session do not necessarily need to specify the same language. + LanguageCode string `protobuf:"bytes,2,opt,name=language_code,json=languageCode,proto3" json:"language_code,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *TextInput) Reset() { *m = TextInput{} } +func (m *TextInput) String() string { return proto.CompactTextString(m) } +func (*TextInput) ProtoMessage() {} +func (*TextInput) Descriptor() ([]byte, []int) { + return fileDescriptor_session_b99a06fb068de54e, []int{9} +} +func (m *TextInput) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_TextInput.Unmarshal(m, b) +} +func (m *TextInput) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_TextInput.Marshal(b, m, deterministic) +} +func (dst *TextInput) XXX_Merge(src proto.Message) { + xxx_messageInfo_TextInput.Merge(dst, src) +} +func (m *TextInput) XXX_Size() int { + return xxx_messageInfo_TextInput.Size(m) +} +func (m *TextInput) XXX_DiscardUnknown() { + xxx_messageInfo_TextInput.DiscardUnknown(m) +} + +var xxx_messageInfo_TextInput proto.InternalMessageInfo + +func (m *TextInput) GetText() string { + if m != nil { + return m.Text + } + return "" +} + +func (m *TextInput) GetLanguageCode() string { + if m != nil { + return m.LanguageCode + } + return "" +} + +// Events allow for matching intents by event name instead of the natural +// language input. For instance, input `` can trigger a personalized welcome response. +// The parameter `name` may be used by the agent in the response: +// `"Hello #welcome_event.name! What can I do for you today?"`. +type EventInput struct { + // Required. The unique identifier of the event. + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // Optional. The collection of parameters associated with the event. + Parameters *_struct.Struct `protobuf:"bytes,2,opt,name=parameters,proto3" json:"parameters,omitempty"` + // Required. The language of this query. See [Language + // Support](https://cloud.google.com/dialogflow-enterprise/docs/reference/language) + // for a list of the currently supported language codes. Note that queries in + // the same session do not necessarily need to specify the same language. + LanguageCode string `protobuf:"bytes,3,opt,name=language_code,json=languageCode,proto3" json:"language_code,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *EventInput) Reset() { *m = EventInput{} } +func (m *EventInput) String() string { return proto.CompactTextString(m) } +func (*EventInput) ProtoMessage() {} +func (*EventInput) Descriptor() ([]byte, []int) { + return fileDescriptor_session_b99a06fb068de54e, []int{10} +} +func (m *EventInput) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_EventInput.Unmarshal(m, b) +} +func (m *EventInput) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_EventInput.Marshal(b, m, deterministic) +} +func (dst *EventInput) XXX_Merge(src proto.Message) { + xxx_messageInfo_EventInput.Merge(dst, src) +} +func (m *EventInput) XXX_Size() int { + return xxx_messageInfo_EventInput.Size(m) +} +func (m *EventInput) XXX_DiscardUnknown() { + xxx_messageInfo_EventInput.DiscardUnknown(m) +} + +var xxx_messageInfo_EventInput proto.InternalMessageInfo + +func (m *EventInput) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *EventInput) GetParameters() *_struct.Struct { + if m != nil { + return m.Parameters + } + return nil +} + +func (m *EventInput) GetLanguageCode() string { + if m != nil { + return m.LanguageCode + } + return "" +} + +// Configures the types of sentiment analysis to perform. +type SentimentAnalysisRequestConfig struct { + // Optional. Instructs the service to perform sentiment analysis on + // `query_text`. If not provided, sentiment analysis is not performed on + // `query_text`. + AnalyzeQueryTextSentiment bool `protobuf:"varint,1,opt,name=analyze_query_text_sentiment,json=analyzeQueryTextSentiment,proto3" json:"analyze_query_text_sentiment,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *SentimentAnalysisRequestConfig) Reset() { *m = SentimentAnalysisRequestConfig{} } +func (m *SentimentAnalysisRequestConfig) String() string { return proto.CompactTextString(m) } +func (*SentimentAnalysisRequestConfig) ProtoMessage() {} +func (*SentimentAnalysisRequestConfig) Descriptor() ([]byte, []int) { + return fileDescriptor_session_b99a06fb068de54e, []int{11} +} +func (m *SentimentAnalysisRequestConfig) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_SentimentAnalysisRequestConfig.Unmarshal(m, b) +} +func (m *SentimentAnalysisRequestConfig) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_SentimentAnalysisRequestConfig.Marshal(b, m, deterministic) +} +func (dst *SentimentAnalysisRequestConfig) XXX_Merge(src proto.Message) { + xxx_messageInfo_SentimentAnalysisRequestConfig.Merge(dst, src) +} +func (m *SentimentAnalysisRequestConfig) XXX_Size() int { + return xxx_messageInfo_SentimentAnalysisRequestConfig.Size(m) +} +func (m *SentimentAnalysisRequestConfig) XXX_DiscardUnknown() { + xxx_messageInfo_SentimentAnalysisRequestConfig.DiscardUnknown(m) +} + +var xxx_messageInfo_SentimentAnalysisRequestConfig proto.InternalMessageInfo + +func (m *SentimentAnalysisRequestConfig) GetAnalyzeQueryTextSentiment() bool { + if m != nil { + return m.AnalyzeQueryTextSentiment + } + return false +} + +// The result of sentiment analysis as configured by +// `sentiment_analysis_request_config`. +type SentimentAnalysisResult struct { + // The sentiment analysis result for `query_text`. + QueryTextSentiment *Sentiment `protobuf:"bytes,1,opt,name=query_text_sentiment,json=queryTextSentiment,proto3" json:"query_text_sentiment,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *SentimentAnalysisResult) Reset() { *m = SentimentAnalysisResult{} } +func (m *SentimentAnalysisResult) String() string { return proto.CompactTextString(m) } +func (*SentimentAnalysisResult) ProtoMessage() {} +func (*SentimentAnalysisResult) Descriptor() ([]byte, []int) { + return fileDescriptor_session_b99a06fb068de54e, []int{12} +} +func (m *SentimentAnalysisResult) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_SentimentAnalysisResult.Unmarshal(m, b) +} +func (m *SentimentAnalysisResult) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_SentimentAnalysisResult.Marshal(b, m, deterministic) +} +func (dst *SentimentAnalysisResult) XXX_Merge(src proto.Message) { + xxx_messageInfo_SentimentAnalysisResult.Merge(dst, src) +} +func (m *SentimentAnalysisResult) XXX_Size() int { + return xxx_messageInfo_SentimentAnalysisResult.Size(m) +} +func (m *SentimentAnalysisResult) XXX_DiscardUnknown() { + xxx_messageInfo_SentimentAnalysisResult.DiscardUnknown(m) +} + +var xxx_messageInfo_SentimentAnalysisResult proto.InternalMessageInfo + +func (m *SentimentAnalysisResult) GetQueryTextSentiment() *Sentiment { + if m != nil { + return m.QueryTextSentiment + } + return nil +} + +// The sentiment, such as positive/negative feeling or association, for a unit +// of analysis, such as the query text. +type Sentiment struct { + // Sentiment score between -1.0 (negative sentiment) and 1.0 (positive + // sentiment). + Score float32 `protobuf:"fixed32,1,opt,name=score,proto3" json:"score,omitempty"` + // A non-negative number in the [0, +inf) range, which represents the absolute + // magnitude of sentiment, regardless of score (positive or negative). + Magnitude float32 `protobuf:"fixed32,2,opt,name=magnitude,proto3" json:"magnitude,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Sentiment) Reset() { *m = Sentiment{} } +func (m *Sentiment) String() string { return proto.CompactTextString(m) } +func (*Sentiment) ProtoMessage() {} +func (*Sentiment) Descriptor() ([]byte, []int) { + return fileDescriptor_session_b99a06fb068de54e, []int{13} +} +func (m *Sentiment) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Sentiment.Unmarshal(m, b) +} +func (m *Sentiment) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Sentiment.Marshal(b, m, deterministic) +} +func (dst *Sentiment) XXX_Merge(src proto.Message) { + xxx_messageInfo_Sentiment.Merge(dst, src) +} +func (m *Sentiment) XXX_Size() int { + return xxx_messageInfo_Sentiment.Size(m) +} +func (m *Sentiment) XXX_DiscardUnknown() { + xxx_messageInfo_Sentiment.DiscardUnknown(m) +} + +var xxx_messageInfo_Sentiment proto.InternalMessageInfo + +func (m *Sentiment) GetScore() float32 { + if m != nil { + return m.Score + } + return 0 +} + +func (m *Sentiment) GetMagnitude() float32 { + if m != nil { + return m.Magnitude + } + return 0 +} + +func init() { + proto.RegisterType((*DetectIntentRequest)(nil), "google.cloud.dialogflow.v2beta1.DetectIntentRequest") + proto.RegisterType((*DetectIntentResponse)(nil), "google.cloud.dialogflow.v2beta1.DetectIntentResponse") + proto.RegisterType((*QueryParameters)(nil), "google.cloud.dialogflow.v2beta1.QueryParameters") + proto.RegisterType((*QueryInput)(nil), "google.cloud.dialogflow.v2beta1.QueryInput") + proto.RegisterType((*QueryResult)(nil), "google.cloud.dialogflow.v2beta1.QueryResult") + proto.RegisterType((*KnowledgeAnswers)(nil), "google.cloud.dialogflow.v2beta1.KnowledgeAnswers") + proto.RegisterType((*KnowledgeAnswers_Answer)(nil), "google.cloud.dialogflow.v2beta1.KnowledgeAnswers.Answer") + proto.RegisterType((*StreamingDetectIntentRequest)(nil), "google.cloud.dialogflow.v2beta1.StreamingDetectIntentRequest") + proto.RegisterType((*StreamingDetectIntentResponse)(nil), "google.cloud.dialogflow.v2beta1.StreamingDetectIntentResponse") + proto.RegisterType((*StreamingRecognitionResult)(nil), "google.cloud.dialogflow.v2beta1.StreamingRecognitionResult") + proto.RegisterType((*TextInput)(nil), "google.cloud.dialogflow.v2beta1.TextInput") + proto.RegisterType((*EventInput)(nil), "google.cloud.dialogflow.v2beta1.EventInput") + proto.RegisterType((*SentimentAnalysisRequestConfig)(nil), "google.cloud.dialogflow.v2beta1.SentimentAnalysisRequestConfig") + proto.RegisterType((*SentimentAnalysisResult)(nil), "google.cloud.dialogflow.v2beta1.SentimentAnalysisResult") + proto.RegisterType((*Sentiment)(nil), "google.cloud.dialogflow.v2beta1.Sentiment") + proto.RegisterEnum("google.cloud.dialogflow.v2beta1.KnowledgeAnswers_Answer_MatchConfidenceLevel", KnowledgeAnswers_Answer_MatchConfidenceLevel_name, KnowledgeAnswers_Answer_MatchConfidenceLevel_value) + proto.RegisterEnum("google.cloud.dialogflow.v2beta1.StreamingRecognitionResult_MessageType", StreamingRecognitionResult_MessageType_name, StreamingRecognitionResult_MessageType_value) +} + +// Reference imports to suppress errors if they are not otherwise used. +var _ context.Context +var _ grpc.ClientConn + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the grpc package it is being compiled against. +const _ = grpc.SupportPackageIsVersion4 + +// SessionsClient is the client API for Sessions service. +// +// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://godoc.org/google.golang.org/grpc#ClientConn.NewStream. +type SessionsClient interface { + // Processes a natural language query and returns structured, actionable data + // as a result. This method is not idempotent, because it may cause contexts + // and session entity types to be updated, which in turn might affect + // results of future queries. + DetectIntent(ctx context.Context, in *DetectIntentRequest, opts ...grpc.CallOption) (*DetectIntentResponse, error) + // Processes a natural language query in audio format in a streaming fashion + // and returns structured, actionable data as a result. This method is only + // available via the gRPC API (not REST). + StreamingDetectIntent(ctx context.Context, opts ...grpc.CallOption) (Sessions_StreamingDetectIntentClient, error) +} + +type sessionsClient struct { + cc *grpc.ClientConn +} + +func NewSessionsClient(cc *grpc.ClientConn) SessionsClient { + return &sessionsClient{cc} +} + +func (c *sessionsClient) DetectIntent(ctx context.Context, in *DetectIntentRequest, opts ...grpc.CallOption) (*DetectIntentResponse, error) { + out := new(DetectIntentResponse) + err := c.cc.Invoke(ctx, "/google.cloud.dialogflow.v2beta1.Sessions/DetectIntent", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *sessionsClient) StreamingDetectIntent(ctx context.Context, opts ...grpc.CallOption) (Sessions_StreamingDetectIntentClient, error) { + stream, err := c.cc.NewStream(ctx, &_Sessions_serviceDesc.Streams[0], "/google.cloud.dialogflow.v2beta1.Sessions/StreamingDetectIntent", opts...) + if err != nil { + return nil, err + } + x := &sessionsStreamingDetectIntentClient{stream} + return x, nil +} + +type Sessions_StreamingDetectIntentClient interface { + Send(*StreamingDetectIntentRequest) error + Recv() (*StreamingDetectIntentResponse, error) + grpc.ClientStream +} + +type sessionsStreamingDetectIntentClient struct { + grpc.ClientStream +} + +func (x *sessionsStreamingDetectIntentClient) Send(m *StreamingDetectIntentRequest) error { + return x.ClientStream.SendMsg(m) +} + +func (x *sessionsStreamingDetectIntentClient) Recv() (*StreamingDetectIntentResponse, error) { + m := new(StreamingDetectIntentResponse) + if err := x.ClientStream.RecvMsg(m); err != nil { + return nil, err + } + return m, nil +} + +// SessionsServer is the server API for Sessions service. +type SessionsServer interface { + // Processes a natural language query and returns structured, actionable data + // as a result. This method is not idempotent, because it may cause contexts + // and session entity types to be updated, which in turn might affect + // results of future queries. + DetectIntent(context.Context, *DetectIntentRequest) (*DetectIntentResponse, error) + // Processes a natural language query in audio format in a streaming fashion + // and returns structured, actionable data as a result. This method is only + // available via the gRPC API (not REST). + StreamingDetectIntent(Sessions_StreamingDetectIntentServer) error +} + +func RegisterSessionsServer(s *grpc.Server, srv SessionsServer) { + s.RegisterService(&_Sessions_serviceDesc, srv) +} + +func _Sessions_DetectIntent_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(DetectIntentRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(SessionsServer).DetectIntent(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.dialogflow.v2beta1.Sessions/DetectIntent", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(SessionsServer).DetectIntent(ctx, req.(*DetectIntentRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _Sessions_StreamingDetectIntent_Handler(srv interface{}, stream grpc.ServerStream) error { + return srv.(SessionsServer).StreamingDetectIntent(&sessionsStreamingDetectIntentServer{stream}) +} + +type Sessions_StreamingDetectIntentServer interface { + Send(*StreamingDetectIntentResponse) error + Recv() (*StreamingDetectIntentRequest, error) + grpc.ServerStream +} + +type sessionsStreamingDetectIntentServer struct { + grpc.ServerStream +} + +func (x *sessionsStreamingDetectIntentServer) Send(m *StreamingDetectIntentResponse) error { + return x.ServerStream.SendMsg(m) +} + +func (x *sessionsStreamingDetectIntentServer) Recv() (*StreamingDetectIntentRequest, error) { + m := new(StreamingDetectIntentRequest) + if err := x.ServerStream.RecvMsg(m); err != nil { + return nil, err + } + return m, nil +} + +var _Sessions_serviceDesc = grpc.ServiceDesc{ + ServiceName: "google.cloud.dialogflow.v2beta1.Sessions", + HandlerType: (*SessionsServer)(nil), + Methods: []grpc.MethodDesc{ + { + MethodName: "DetectIntent", + Handler: _Sessions_DetectIntent_Handler, + }, + }, + Streams: []grpc.StreamDesc{ + { + StreamName: "StreamingDetectIntent", + Handler: _Sessions_StreamingDetectIntent_Handler, + ServerStreams: true, + ClientStreams: true, + }, + }, + Metadata: "google/cloud/dialogflow/v2beta1/session.proto", +} + +func init() { + proto.RegisterFile("google/cloud/dialogflow/v2beta1/session.proto", fileDescriptor_session_b99a06fb068de54e) +} + +var fileDescriptor_session_b99a06fb068de54e = []byte{ + // 1876 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xdc, 0x58, 0xcd, 0x73, 0x1b, 0x49, + 0x15, 0xcf, 0x48, 0xfe, 0x90, 0x9f, 0x14, 0x5b, 0xe9, 0x98, 0x8d, 0xfc, 0x91, 0xc4, 0xab, 0x2d, + 0xc0, 0x9b, 0x5d, 0xa4, 0xc4, 0x7c, 0xed, 0x6e, 0x2a, 0x9b, 0xd8, 0x92, 0x6c, 0xab, 0xf0, 0x57, + 0x5a, 0x4e, 0x16, 0x52, 0xc0, 0x54, 0x7b, 0xd4, 0x9a, 0x4c, 0x32, 0xea, 0x96, 0xa7, 0x5b, 0xf6, + 0x7a, 0x29, 0x38, 0xc0, 0x09, 0x38, 0x72, 0xa2, 0x8a, 0xa2, 0x28, 0x6e, 0x2c, 0x57, 0xae, 0xfc, + 0x03, 0x5c, 0xa1, 0x8a, 0x7f, 0x80, 0x1b, 0x57, 0x0e, 0x1c, 0xa9, 0xfe, 0x18, 0x69, 0x22, 0xcb, + 0x91, 0x12, 0x48, 0x51, 0xb5, 0x27, 0xa9, 0x5f, 0xbf, 0xf7, 0xeb, 0xd7, 0xaf, 0x5f, 0xff, 0xfa, + 0xbd, 0x81, 0xaf, 0xf9, 0x9c, 0xfb, 0x21, 0x2d, 0x7b, 0x21, 0xef, 0x36, 0xcb, 0xcd, 0x80, 0x84, + 0xdc, 0x6f, 0x85, 0xfc, 0xb4, 0x7c, 0xb2, 0x76, 0x44, 0x25, 0xb9, 0x53, 0x16, 0x54, 0x88, 0x80, + 0xb3, 0x52, 0x27, 0xe2, 0x92, 0xa3, 0x9b, 0x46, 0xbd, 0xa4, 0xd5, 0x4b, 0x7d, 0xf5, 0x92, 0x55, + 0x5f, 0x5c, 0xb6, 0x78, 0xa4, 0x13, 0x94, 0x09, 0x63, 0x5c, 0x12, 0x19, 0x70, 0x26, 0x8c, 0xf9, + 0xe2, 0x42, 0x62, 0x36, 0xa2, 0x82, 0x77, 0x23, 0x8f, 0xda, 0xa9, 0xf7, 0x46, 0x39, 0x42, 0x7c, + 0xca, 0xa4, 0x55, 0x5e, 0x1b, 0xa9, 0xdc, 0x6d, 0x06, 0xdc, 0xf5, 0x38, 0x6b, 0x05, 0xbe, 0xb5, + 0x19, 0xb9, 0x53, 0x8f, 0x33, 0x49, 0x3f, 0x8d, 0x97, 0x78, 0x7f, 0x94, 0x7a, 0xc0, 0x64, 0xdf, + 0xa1, 0x0f, 0xc7, 0x0c, 0xa3, 0x4b, 0x99, 0x0c, 0xe4, 0x99, 0x2b, 0xcf, 0x3a, 0xf1, 0xc6, 0xe3, + 0x88, 0xe9, 0xd1, 0x51, 0xb7, 0x55, 0x16, 0x32, 0xea, 0x7a, 0x31, 0xf0, 0x35, 0x3b, 0x1b, 0x75, + 0xbc, 0xb2, 0x90, 0x44, 0x76, 0xe3, 0x50, 0x16, 0xec, 0x84, 0x42, 0x2a, 0x87, 0x44, 0x86, 0xcc, + 0x6e, 0xb4, 0xf8, 0xf7, 0x14, 0x5c, 0xad, 0x52, 0x49, 0x3d, 0x59, 0xd7, 0x2e, 0x62, 0x7a, 0xdc, + 0xa5, 0x42, 0xa2, 0x02, 0x4c, 0x5b, 0x2f, 0x0a, 0xce, 0x8a, 0xb3, 0x3a, 0x83, 0xe3, 0x21, 0x6a, + 0x40, 0xee, 0xb8, 0x4b, 0xa3, 0x33, 0xb7, 0x43, 0x22, 0xd2, 0x16, 0x85, 0xd4, 0x8a, 0xb3, 0x9a, + 0x5d, 0xbb, 0x5d, 0x1a, 0x71, 0xd8, 0xa5, 0x87, 0xca, 0xe8, 0x40, 0xd9, 0x50, 0x49, 0x23, 0x81, + 0xb3, 0xc7, 0x3d, 0x81, 0x40, 0x3b, 0x60, 0x86, 0x6e, 0xc0, 0x3a, 0x5d, 0x59, 0x48, 0x6b, 0xcc, + 0xf7, 0xc6, 0xc3, 0xac, 0x2b, 0x13, 0x0c, 0xc7, 0xbd, 0xff, 0xe8, 0x08, 0xae, 0xf2, 0xae, 0xec, + 0x74, 0xa5, 0x9b, 0x3c, 0xda, 0xc2, 0x84, 0x46, 0x5d, 0x1b, 0x89, 0xba, 0xaf, 0x6d, 0xd7, 0x95, + 0x69, 0x45, 0x5b, 0xe2, 0x2b, 0x7c, 0x50, 0x84, 0x6e, 0x42, 0x56, 0xfb, 0x6a, 0x96, 0x28, 0x4c, + 0xae, 0x38, 0xab, 0x39, 0x0c, 0x5a, 0xa4, 0xd5, 0x8a, 0x7f, 0x4a, 0xc3, 0xfc, 0x8b, 0x91, 0x15, + 0x1d, 0xce, 0x04, 0x55, 0x96, 0x91, 0xfd, 0xef, 0x06, 0x4d, 0x1b, 0x5e, 0x88, 0x45, 0xf5, 0x26, + 0xda, 0x8f, 0x23, 0x1c, 0x51, 0xd1, 0x0d, 0xa5, 0x8d, 0xf0, 0xfb, 0xe3, 0x45, 0x03, 0x6b, 0x1b, + 0x1b, 0x5d, 0x33, 0x40, 0x4f, 0x61, 0x81, 0x84, 0x92, 0x46, 0x8c, 0xc8, 0xe0, 0x84, 0xba, 0x49, + 0x70, 0x51, 0x98, 0x5c, 0x49, 0xbf, 0x32, 0xfa, 0xb5, 0x04, 0x5c, 0x42, 0x2e, 0xd0, 0x87, 0x30, + 0x7b, 0x4a, 0x8f, 0x9e, 0x72, 0xfe, 0xdc, 0x35, 0x09, 0x68, 0x8f, 0x12, 0xc5, 0xf0, 0x51, 0xc7, + 0x2b, 0x35, 0xf4, 0x0c, 0xbe, 0x6c, 0x35, 0xcd, 0x10, 0xbd, 0x0d, 0xb9, 0xe4, 0xa1, 0xe9, 0xd3, + 0xca, 0xe1, 0x6c, 0x22, 0xf2, 0x17, 0x9d, 0xeb, 0xd4, 0xff, 0xf0, 0x5c, 0x8b, 0x7f, 0x9c, 0x80, + 0xb9, 0x81, 0x54, 0x45, 0x4b, 0x30, 0x23, 0x83, 0x36, 0x75, 0x3f, 0xe3, 0x8c, 0xda, 0xf3, 0xca, + 0x28, 0xc1, 0x13, 0xce, 0x28, 0xfa, 0x16, 0xe4, 0x7c, 0xca, 0xdd, 0x90, 0x7b, 0x9a, 0xbd, 0xec, + 0x69, 0x5d, 0x8d, 0xbd, 0xd1, 0x97, 0x77, 0x87, 0xc8, 0x1d, 0xe6, 0xe3, 0xac, 0x4f, 0xf9, 0x8e, + 0xd5, 0x43, 0x55, 0xc8, 0x58, 0x12, 0x51, 0x41, 0x52, 0x67, 0xb0, 0x3a, 0x72, 0x07, 0x15, 0x63, + 0x80, 0x7b, 0x96, 0xe8, 0xcb, 0x30, 0x1b, 0x51, 0x41, 0xa5, 0xdb, 0xc3, 0x52, 0x71, 0xcb, 0xe0, + 0xcb, 0x5a, 0x5a, 0x89, 0xd5, 0x9a, 0x30, 0x3f, 0x84, 0x54, 0xe2, 0xc3, 0x1f, 0x1d, 0xba, 0x86, + 0x31, 0xae, 0x69, 0xdb, 0xc3, 0xb3, 0x0e, 0xc5, 0x48, 0x0c, 0x8a, 0x04, 0xba, 0x03, 0xd3, 0x1d, + 0x72, 0x16, 0x72, 0xd2, 0xb4, 0x67, 0x72, 0x2d, 0x06, 0x8e, 0xf9, 0xaa, 0xd4, 0xd0, 0x7c, 0x85, + 0x63, 0x3d, 0x74, 0x1b, 0xe6, 0x9f, 0x33, 0x7e, 0x1a, 0xd2, 0xa6, 0x4f, 0xdd, 0x23, 0x22, 0xa8, + 0xcb, 0x48, 0x9b, 0x8a, 0x42, 0x6e, 0x25, 0xbd, 0x3a, 0x83, 0x51, 0x6f, 0x6e, 0x83, 0x08, 0xba, + 0xa7, 0x66, 0xd0, 0x2f, 0x1c, 0x78, 0x5b, 0xa8, 0x4d, 0xb4, 0x29, 0x93, 0x2e, 0x61, 0x24, 0x3c, + 0x13, 0x81, 0x70, 0x23, 0x43, 0x5c, 0x71, 0x4e, 0x80, 0x5e, 0xff, 0xfe, 0x18, 0x1b, 0xb3, 0x48, + 0xeb, 0x16, 0xc8, 0x12, 0xa0, 0x4d, 0x90, 0x1b, 0xe2, 0xa5, 0xf3, 0xc5, 0x7f, 0x39, 0x00, 0x7d, + 0x12, 0x42, 0x8f, 0x21, 0xf7, 0x42, 0x66, 0x3a, 0xda, 0x8b, 0x3b, 0x23, 0xbd, 0xa8, 0xb3, 0x17, + 0xb3, 0x70, 0xfb, 0x12, 0xce, 0x92, 0x04, 0xd9, 0x3c, 0x80, 0x09, 0x75, 0x8e, 0x36, 0xb7, 0x6e, + 0x8d, 0xc4, 0x3b, 0xa4, 0x9f, 0x4a, 0x8d, 0xb9, 0x7d, 0x09, 0x6b, 0x4b, 0x54, 0x81, 0x49, 0x7a, + 0x42, 0xd9, 0xf8, 0xd4, 0x5a, 0x53, 0xda, 0x31, 0x86, 0xb1, 0xdd, 0x98, 0x86, 0x49, 0x4d, 0x70, + 0xc5, 0x9f, 0x67, 0x20, 0x9b, 0xb8, 0xf7, 0xe8, 0x3a, 0x18, 0xfa, 0x75, 0xb5, 0x97, 0xe6, 0x86, + 0xcc, 0x68, 0x89, 0xf2, 0x04, 0xbd, 0x03, 0x97, 0x43, 0xc2, 0xfc, 0x2e, 0xf1, 0xa9, 0xeb, 0xf1, + 0x26, 0x2d, 0xcc, 0x69, 0x8d, 0x5c, 0x2c, 0xac, 0xf0, 0x26, 0x45, 0x1b, 0x70, 0x5d, 0x74, 0x28, + 0xf5, 0x9e, 0xba, 0x11, 0xf5, 0xb8, 0xcf, 0x02, 0x75, 0x4b, 0x4c, 0x20, 0x9b, 0x94, 0x79, 0x54, + 0x6f, 0x3e, 0x85, 0x97, 0x8c, 0x12, 0xee, 0xeb, 0x54, 0x7a, 0x2a, 0xe8, 0x2d, 0x98, 0x22, 0x9e, + 0xbe, 0x85, 0x69, 0xbd, 0x82, 0x1d, 0xa1, 0x6f, 0x03, 0x74, 0x7a, 0xd7, 0xd9, 0xbe, 0x03, 0x17, + 0xe6, 0x66, 0x42, 0x15, 0xdd, 0x83, 0x25, 0x12, 0x86, 0x3a, 0xb9, 0x82, 0x88, 0x36, 0xed, 0x9b, + 0xe7, 0x76, 0xd4, 0xed, 0x62, 0x52, 0xb3, 0x7e, 0x06, 0x17, 0x48, 0x18, 0x62, 0xab, 0x61, 0xde, + 0xb3, 0x03, 0x33, 0x8f, 0xde, 0x85, 0x7c, 0xab, 0x1b, 0xb6, 0x82, 0x30, 0xd4, 0xc9, 0xaa, 0xa3, + 0x33, 0xa5, 0x3d, 0x9b, 0x4b, 0xc8, 0x75, 0x8c, 0x8e, 0x60, 0x3e, 0xa9, 0xda, 0xa6, 0x42, 0x10, + 0x9f, 0x8a, 0xc2, 0xb4, 0xbe, 0xa1, 0xe5, 0x31, 0x52, 0x48, 0x57, 0x18, 0xbb, 0xc6, 0x0e, 0x5f, + 0x4d, 0x80, 0x59, 0x99, 0x26, 0x8b, 0x1e, 0x3b, 0xeb, 0x72, 0xaa, 0x90, 0xd1, 0xce, 0xf4, 0x98, + 0x58, 0x0b, 0xd1, 0x03, 0x98, 0x8b, 0xd5, 0xe2, 0xeb, 0x3c, 0xf3, 0xf2, 0x90, 0xc5, 0xb0, 0x07, + 0xf6, 0x56, 0x3f, 0x84, 0x39, 0x4b, 0xd4, 0x3d, 0x5a, 0x82, 0x57, 0xa4, 0xb8, 0x59, 0x03, 0xd0, + 0x63, 0xb0, 0xfb, 0x30, 0x65, 0x8a, 0xa8, 0x42, 0x56, 0xfb, 0xf2, 0xd5, 0x31, 0x23, 0x82, 0xad, + 0x19, 0xfa, 0x18, 0x96, 0xcc, 0x3f, 0xb7, 0xa9, 0x5f, 0xe5, 0x81, 0xec, 0xca, 0xe9, 0xec, 0x5a, + 0x30, 0x2a, 0xd5, 0x58, 0x23, 0x91, 0x5b, 0x0f, 0x60, 0xae, 0x19, 0x10, 0x9f, 0x71, 0x21, 0x03, + 0xcf, 0x0d, 0x58, 0x8b, 0x17, 0x66, 0x47, 0x44, 0xa5, 0xaf, 0x5f, 0x67, 0x2d, 0x8e, 0x24, 0x2c, + 0x0c, 0x25, 0x2e, 0xfd, 0xc8, 0x5f, 0xd1, 0x58, 0x1f, 0xbc, 0x0e, 0x61, 0x99, 0x27, 0x59, 0x0c, + 0x9f, 0x40, 0x3f, 0x84, 0x2b, 0x7d, 0x86, 0x25, 0x4c, 0x9c, 0xaa, 0x2b, 0x80, 0xc6, 0x24, 0xa6, + 0xef, 0xc4, 0x96, 0xeb, 0xc6, 0x10, 0xe7, 0x9f, 0x0f, 0x48, 0x8a, 0x7f, 0x4b, 0x43, 0x7e, 0x50, + 0x0d, 0x61, 0x98, 0x8e, 0x97, 0x72, 0xf4, 0xc1, 0x7f, 0xf0, 0xca, 0x4b, 0x95, 0xcc, 0x2f, 0x8e, + 0x81, 0x16, 0xff, 0x99, 0x82, 0x29, 0x23, 0x53, 0xf7, 0xdc, 0x26, 0xb0, 0xe1, 0x1a, 0x3b, 0x52, + 0x35, 0x44, 0x8b, 0x1c, 0xbb, 0x9a, 0xa1, 0xe3, 0xb7, 0x78, 0x06, 0x67, 0x5b, 0xe4, 0xf8, 0xa1, + 0x15, 0x69, 0x8a, 0xd0, 0x20, 0x3d, 0x8a, 0x30, 0x90, 0x3f, 0x73, 0xe0, 0xad, 0x36, 0x91, 0xde, + 0xd3, 0x44, 0x52, 0xb8, 0x21, 0x3d, 0xa1, 0xa1, 0xe6, 0x8b, 0xd9, 0xb5, 0xdd, 0xd7, 0xdd, 0x41, + 0x69, 0x57, 0xc1, 0xf6, 0x33, 0x69, 0x47, 0x81, 0xe2, 0xf9, 0xf6, 0x10, 0xa9, 0x22, 0x8c, 0x41, + 0x27, 0x34, 0xc9, 0xa4, 0xf0, 0xdc, 0x80, 0x7e, 0xf1, 0x07, 0x30, 0x3f, 0x0c, 0x18, 0x7d, 0x05, + 0x8a, 0xbb, 0xeb, 0x87, 0x95, 0x6d, 0xb7, 0xb2, 0xbf, 0xb7, 0x59, 0xaf, 0xd6, 0xf6, 0x2a, 0x35, + 0x77, 0xa7, 0xf6, 0xb8, 0xb6, 0xe3, 0x3e, 0xda, 0x6b, 0x1c, 0xd4, 0x2a, 0xf5, 0xcd, 0x7a, 0xad, + 0x9a, 0xbf, 0x84, 0xa6, 0x21, 0xbd, 0xb3, 0xff, 0x49, 0xde, 0x41, 0x00, 0x53, 0xbb, 0xb5, 0x6a, + 0xfd, 0xd1, 0x6e, 0x3e, 0x85, 0x32, 0x30, 0xb1, 0x5d, 0xdf, 0xda, 0xce, 0xa7, 0x8b, 0xbf, 0x4c, + 0xc3, 0x72, 0x43, 0x46, 0x94, 0xb4, 0x03, 0xe6, 0x7f, 0xe1, 0x3a, 0x84, 0x77, 0x21, 0x2f, 0x02, + 0xe6, 0x87, 0xd4, 0xed, 0x4a, 0x49, 0x23, 0xa2, 0xe2, 0x6c, 0x0a, 0xa7, 0x39, 0x23, 0x7f, 0x14, + 0x8b, 0x2f, 0x2a, 0x3a, 0x27, 0xdf, 0x60, 0x33, 0x31, 0x75, 0xae, 0x99, 0xf8, 0xc3, 0x04, 0x5c, + 0xbf, 0xe0, 0x34, 0xc6, 0xed, 0x2a, 0x9e, 0x01, 0x4a, 0x3e, 0xac, 0x2f, 0xf4, 0x16, 0x77, 0x47, + 0xd3, 0x4e, 0xbc, 0x78, 0xe2, 0xe1, 0xb5, 0xcc, 0x73, 0x25, 0x1a, 0x14, 0x9d, 0xeb, 0x60, 0xd2, + 0x6f, 0xb4, 0x83, 0x99, 0x7e, 0xb3, 0x1d, 0xcc, 0xc4, 0xeb, 0x76, 0x30, 0x93, 0xff, 0x9f, 0x0e, + 0xe6, 0xcf, 0x29, 0x58, 0xbc, 0xf8, 0xb8, 0xd0, 0x33, 0xc8, 0xd9, 0xe2, 0x42, 0xf7, 0x00, 0x3a, + 0x53, 0x66, 0xd7, 0xb6, 0xfe, 0x8b, 0x0c, 0x88, 0x8b, 0x0e, 0xdd, 0x17, 0x64, 0xdb, 0xfd, 0x01, + 0xba, 0x01, 0x20, 0x23, 0xc2, 0x84, 0x17, 0x05, 0x1d, 0x69, 0xd9, 0x38, 0x21, 0x41, 0x0b, 0x90, + 0x09, 0x84, 0xdb, 0x0a, 0x18, 0x09, 0x75, 0x8e, 0x64, 0xf0, 0x74, 0x20, 0x36, 0xd5, 0x50, 0x99, + 0x26, 0x38, 0x70, 0x42, 0x73, 0x60, 0x42, 0x52, 0xfc, 0x2e, 0x64, 0x13, 0xcb, 0xa2, 0x65, 0x28, + 0xec, 0xd6, 0x1a, 0x8d, 0xf5, 0xad, 0x9a, 0x7b, 0xf8, 0xbd, 0x83, 0xda, 0x00, 0xd7, 0xcd, 0x02, + 0x1c, 0xe2, 0xf5, 0xbd, 0x46, 0x05, 0xd7, 0x0f, 0x0e, 0xf3, 0x0e, 0x5a, 0x82, 0x6b, 0xb5, 0xbd, + 0xaa, 0xbb, 0xbf, 0xe9, 0x36, 0xea, 0x7b, 0x5b, 0x3b, 0x35, 0xf7, 0xd1, 0xe1, 0x61, 0x0d, 0xaf, + 0xef, 0x55, 0x6a, 0xf9, 0x54, 0xb1, 0x0a, 0x33, 0xbd, 0xfa, 0x19, 0x21, 0x5b, 0x79, 0x9b, 0xfb, + 0x64, 0x6a, 0xe9, 0x73, 0xe5, 0x6c, 0xea, 0x7c, 0x39, 0x5b, 0xfc, 0x09, 0x40, 0xbf, 0x84, 0x56, + 0x30, 0xaa, 0xaf, 0x89, 0x61, 0xd4, 0xff, 0x81, 0xa2, 0x34, 0x35, 0x7e, 0x51, 0x7a, 0x6e, 0xfd, + 0xf4, 0x90, 0xf5, 0x09, 0xdc, 0x78, 0x79, 0x6f, 0x83, 0xee, 0xc3, 0xb2, 0x2e, 0x42, 0x3e, 0x8b, + 0xef, 0x93, 0xda, 0x9c, 0xdb, 0x2b, 0x23, 0xb4, 0xaf, 0x19, 0xbc, 0x60, 0x75, 0x1e, 0xc6, 0xd5, + 0x7c, 0x0f, 0xb5, 0x78, 0x0a, 0xd7, 0x2e, 0xa8, 0x46, 0xd0, 0xf7, 0x61, 0xfe, 0x42, 0xcc, 0x71, + 0x1a, 0x98, 0x1e, 0x2e, 0x46, 0xc7, 0xe7, 0x17, 0xbe, 0x0f, 0x33, 0xbd, 0x01, 0x9a, 0x87, 0x49, + 0xe1, 0xf1, 0xc8, 0xc4, 0x36, 0x85, 0xcd, 0x00, 0x2d, 0xc3, 0x4c, 0x9b, 0xa8, 0x2c, 0xed, 0x36, + 0xe3, 0xce, 0xa1, 0x2f, 0x58, 0xfb, 0x4b, 0x1a, 0x32, 0xb6, 0xa5, 0x15, 0xe8, 0xb7, 0x29, 0xc8, + 0x25, 0x29, 0x15, 0x7d, 0x63, 0xa4, 0x7b, 0x43, 0xde, 0xc3, 0xc5, 0x6f, 0xbe, 0xa2, 0x95, 0x21, + 0xe6, 0xe2, 0xef, 0x9c, 0x9f, 0xfe, 0xf5, 0x1f, 0xbf, 0x4a, 0xfd, 0xda, 0x29, 0xde, 0xed, 0x7d, + 0xff, 0xfb, 0x91, 0x7d, 0x49, 0xef, 0x75, 0x22, 0xfe, 0x8c, 0x7a, 0x52, 0x94, 0x6f, 0x99, 0x2f, + 0x9a, 0xf1, 0x97, 0x41, 0x51, 0xbe, 0xf5, 0xe3, 0x8f, 0x9a, 0x09, 0xb8, 0x8f, 0x9c, 0x5b, 0x4f, + 0x3e, 0x29, 0xe2, 0x31, 0x10, 0x28, 0x3b, 0x09, 0x22, 0xce, 0x54, 0xe8, 0x94, 0xb0, 0x2b, 0x68, + 0xa4, 0x7e, 0x5f, 0x02, 0x8c, 0x7e, 0xe3, 0xc0, 0x97, 0x86, 0x3e, 0x3e, 0xe8, 0xde, 0xf8, 0xac, + 0x31, 0x2c, 0x64, 0x1f, 0xbf, 0xae, 0xb9, 0x8d, 0xdd, 0xa5, 0x55, 0xe7, 0xb6, 0xb3, 0xf1, 0xb9, + 0x03, 0xef, 0x78, 0xbc, 0x3d, 0x0a, 0x6b, 0x23, 0x67, 0x4f, 0xfc, 0x40, 0x5d, 0xad, 0x03, 0xe7, + 0x49, 0xdd, 0x1a, 0xf8, 0x5c, 0x5d, 0x9c, 0x12, 0x8f, 0xfc, 0xb2, 0x4f, 0x99, 0xbe, 0x78, 0x65, + 0x33, 0x45, 0x3a, 0x81, 0xb8, 0xf0, 0x2b, 0xed, 0xdd, 0xbe, 0xe8, 0xdf, 0x8e, 0xf3, 0xfb, 0x54, + 0xaa, 0xba, 0xf9, 0x79, 0xea, 0xe6, 0x96, 0xc1, 0xac, 0x68, 0x27, 0xaa, 0x7d, 0x27, 0x1e, 0x1b, + 0xa3, 0xa3, 0x29, 0x8d, 0xff, 0xf5, 0xff, 0x04, 0x00, 0x00, 0xff, 0xff, 0xb2, 0x80, 0x80, 0x30, + 0x4b, 0x17, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/cloud/dialogflow/v2beta1/session_entity_type.pb.go b/vendor/google.golang.org/genproto/googleapis/cloud/dialogflow/v2beta1/session_entity_type.pb.go new file mode 100644 index 0000000..b8b1032 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/cloud/dialogflow/v2beta1/session_entity_type.pb.go @@ -0,0 +1,749 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/cloud/dialogflow/v2beta1/session_entity_type.proto + +package dialogflow // import "google.golang.org/genproto/googleapis/cloud/dialogflow/v2beta1" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import empty "github.com/golang/protobuf/ptypes/empty" +import _ "google.golang.org/genproto/googleapis/api/annotations" +import field_mask "google.golang.org/genproto/protobuf/field_mask" + +import ( + context "golang.org/x/net/context" + grpc "google.golang.org/grpc" +) + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// The types of modifications for a session entity type. +type SessionEntityType_EntityOverrideMode int32 + +const ( + // Not specified. This value should be never used. + SessionEntityType_ENTITY_OVERRIDE_MODE_UNSPECIFIED SessionEntityType_EntityOverrideMode = 0 + // The collection of session entities overrides the collection of entities + // in the corresponding developer entity type. + SessionEntityType_ENTITY_OVERRIDE_MODE_OVERRIDE SessionEntityType_EntityOverrideMode = 1 + // The collection of session entities extends the collection of entities in + // the corresponding developer entity type. + // + // Note: Even in this override mode calls to `ListSessionEntityTypes`, + // `GetSessionEntityType`, `CreateSessionEntityType` and + // `UpdateSessionEntityType` only return the additional entities added in + // this session entity type. If you want to get the supplemented list, + // please call [EntityTypes.GetEntityType][google.cloud.dialogflow.v2beta1.EntityTypes.GetEntityType] on the developer entity type + // and merge. + SessionEntityType_ENTITY_OVERRIDE_MODE_SUPPLEMENT SessionEntityType_EntityOverrideMode = 2 +) + +var SessionEntityType_EntityOverrideMode_name = map[int32]string{ + 0: "ENTITY_OVERRIDE_MODE_UNSPECIFIED", + 1: "ENTITY_OVERRIDE_MODE_OVERRIDE", + 2: "ENTITY_OVERRIDE_MODE_SUPPLEMENT", +} +var SessionEntityType_EntityOverrideMode_value = map[string]int32{ + "ENTITY_OVERRIDE_MODE_UNSPECIFIED": 0, + "ENTITY_OVERRIDE_MODE_OVERRIDE": 1, + "ENTITY_OVERRIDE_MODE_SUPPLEMENT": 2, +} + +func (x SessionEntityType_EntityOverrideMode) String() string { + return proto.EnumName(SessionEntityType_EntityOverrideMode_name, int32(x)) +} +func (SessionEntityType_EntityOverrideMode) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_session_entity_type_c408f12785d6eb00, []int{0, 0} +} + +// Represents a session entity type. +// +// Extends or replaces a developer entity type at the user session level (we +// refer to the entity types defined at the agent level as "developer entity +// types"). +// +// Note: session entity types apply to all queries, regardless of the language. +type SessionEntityType struct { + // Required. The unique identifier of this session entity type. Format: + // `projects//agent/sessions//entityTypes/`, or + // `projects//agent/environments//users//sessions//entityTypes/`. + // If `Environment ID` is not specified, we assume default 'draft' + // environment. If `User ID` is not specified, we assume default '-' user. + // + // `` must be the display name of an existing entity + // type in the same agent that will be overridden or supplemented. + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // Required. Indicates whether the additional data should override or + // supplement the developer entity type definition. + EntityOverrideMode SessionEntityType_EntityOverrideMode `protobuf:"varint,2,opt,name=entity_override_mode,json=entityOverrideMode,proto3,enum=google.cloud.dialogflow.v2beta1.SessionEntityType_EntityOverrideMode" json:"entity_override_mode,omitempty"` + // Required. The collection of entities associated with this session entity + // type. + Entities []*EntityType_Entity `protobuf:"bytes,3,rep,name=entities,proto3" json:"entities,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *SessionEntityType) Reset() { *m = SessionEntityType{} } +func (m *SessionEntityType) String() string { return proto.CompactTextString(m) } +func (*SessionEntityType) ProtoMessage() {} +func (*SessionEntityType) Descriptor() ([]byte, []int) { + return fileDescriptor_session_entity_type_c408f12785d6eb00, []int{0} +} +func (m *SessionEntityType) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_SessionEntityType.Unmarshal(m, b) +} +func (m *SessionEntityType) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_SessionEntityType.Marshal(b, m, deterministic) +} +func (dst *SessionEntityType) XXX_Merge(src proto.Message) { + xxx_messageInfo_SessionEntityType.Merge(dst, src) +} +func (m *SessionEntityType) XXX_Size() int { + return xxx_messageInfo_SessionEntityType.Size(m) +} +func (m *SessionEntityType) XXX_DiscardUnknown() { + xxx_messageInfo_SessionEntityType.DiscardUnknown(m) +} + +var xxx_messageInfo_SessionEntityType proto.InternalMessageInfo + +func (m *SessionEntityType) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *SessionEntityType) GetEntityOverrideMode() SessionEntityType_EntityOverrideMode { + if m != nil { + return m.EntityOverrideMode + } + return SessionEntityType_ENTITY_OVERRIDE_MODE_UNSPECIFIED +} + +func (m *SessionEntityType) GetEntities() []*EntityType_Entity { + if m != nil { + return m.Entities + } + return nil +} + +// The request message for [SessionEntityTypes.ListSessionEntityTypes][google.cloud.dialogflow.v2beta1.SessionEntityTypes.ListSessionEntityTypes]. +type ListSessionEntityTypesRequest struct { + // Required. The session to list all session entity types from. + // Format: `projects//agent/sessions/` or + // `projects//agent/environments//users// + // sessions/`. + // If `Environment ID` is not specified, we assume default 'draft' + // environment. If `User ID` is not specified, we assume default '-' user. + Parent string `protobuf:"bytes,1,opt,name=parent,proto3" json:"parent,omitempty"` + // Optional. The maximum number of items to return in a single page. By + // default 100 and at most 1000. + PageSize int32 `protobuf:"varint,2,opt,name=page_size,json=pageSize,proto3" json:"page_size,omitempty"` + // Optional. The next_page_token value returned from a previous list request. + PageToken string `protobuf:"bytes,3,opt,name=page_token,json=pageToken,proto3" json:"page_token,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ListSessionEntityTypesRequest) Reset() { *m = ListSessionEntityTypesRequest{} } +func (m *ListSessionEntityTypesRequest) String() string { return proto.CompactTextString(m) } +func (*ListSessionEntityTypesRequest) ProtoMessage() {} +func (*ListSessionEntityTypesRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_session_entity_type_c408f12785d6eb00, []int{1} +} +func (m *ListSessionEntityTypesRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ListSessionEntityTypesRequest.Unmarshal(m, b) +} +func (m *ListSessionEntityTypesRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ListSessionEntityTypesRequest.Marshal(b, m, deterministic) +} +func (dst *ListSessionEntityTypesRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_ListSessionEntityTypesRequest.Merge(dst, src) +} +func (m *ListSessionEntityTypesRequest) XXX_Size() int { + return xxx_messageInfo_ListSessionEntityTypesRequest.Size(m) +} +func (m *ListSessionEntityTypesRequest) XXX_DiscardUnknown() { + xxx_messageInfo_ListSessionEntityTypesRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_ListSessionEntityTypesRequest proto.InternalMessageInfo + +func (m *ListSessionEntityTypesRequest) GetParent() string { + if m != nil { + return m.Parent + } + return "" +} + +func (m *ListSessionEntityTypesRequest) GetPageSize() int32 { + if m != nil { + return m.PageSize + } + return 0 +} + +func (m *ListSessionEntityTypesRequest) GetPageToken() string { + if m != nil { + return m.PageToken + } + return "" +} + +// The response message for [SessionEntityTypes.ListSessionEntityTypes][google.cloud.dialogflow.v2beta1.SessionEntityTypes.ListSessionEntityTypes]. +type ListSessionEntityTypesResponse struct { + // The list of session entity types. There will be a maximum number of items + // returned based on the page_size field in the request. + SessionEntityTypes []*SessionEntityType `protobuf:"bytes,1,rep,name=session_entity_types,json=sessionEntityTypes,proto3" json:"session_entity_types,omitempty"` + // Token to retrieve the next page of results, or empty if there are no + // more results in the list. + NextPageToken string `protobuf:"bytes,2,opt,name=next_page_token,json=nextPageToken,proto3" json:"next_page_token,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ListSessionEntityTypesResponse) Reset() { *m = ListSessionEntityTypesResponse{} } +func (m *ListSessionEntityTypesResponse) String() string { return proto.CompactTextString(m) } +func (*ListSessionEntityTypesResponse) ProtoMessage() {} +func (*ListSessionEntityTypesResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_session_entity_type_c408f12785d6eb00, []int{2} +} +func (m *ListSessionEntityTypesResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ListSessionEntityTypesResponse.Unmarshal(m, b) +} +func (m *ListSessionEntityTypesResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ListSessionEntityTypesResponse.Marshal(b, m, deterministic) +} +func (dst *ListSessionEntityTypesResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_ListSessionEntityTypesResponse.Merge(dst, src) +} +func (m *ListSessionEntityTypesResponse) XXX_Size() int { + return xxx_messageInfo_ListSessionEntityTypesResponse.Size(m) +} +func (m *ListSessionEntityTypesResponse) XXX_DiscardUnknown() { + xxx_messageInfo_ListSessionEntityTypesResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_ListSessionEntityTypesResponse proto.InternalMessageInfo + +func (m *ListSessionEntityTypesResponse) GetSessionEntityTypes() []*SessionEntityType { + if m != nil { + return m.SessionEntityTypes + } + return nil +} + +func (m *ListSessionEntityTypesResponse) GetNextPageToken() string { + if m != nil { + return m.NextPageToken + } + return "" +} + +// The request message for [SessionEntityTypes.GetSessionEntityType][google.cloud.dialogflow.v2beta1.SessionEntityTypes.GetSessionEntityType]. +type GetSessionEntityTypeRequest struct { + // Required. The name of the session entity type. Format: + // `projects//agent/sessions//entityTypes/` or `projects//agent/environments//users//sessions//entityTypes/`. If `Environment ID` is not specified, we assume default 'draft' + // environment. If `User ID` is not specified, we assume default '-' user. + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GetSessionEntityTypeRequest) Reset() { *m = GetSessionEntityTypeRequest{} } +func (m *GetSessionEntityTypeRequest) String() string { return proto.CompactTextString(m) } +func (*GetSessionEntityTypeRequest) ProtoMessage() {} +func (*GetSessionEntityTypeRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_session_entity_type_c408f12785d6eb00, []int{3} +} +func (m *GetSessionEntityTypeRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GetSessionEntityTypeRequest.Unmarshal(m, b) +} +func (m *GetSessionEntityTypeRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GetSessionEntityTypeRequest.Marshal(b, m, deterministic) +} +func (dst *GetSessionEntityTypeRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_GetSessionEntityTypeRequest.Merge(dst, src) +} +func (m *GetSessionEntityTypeRequest) XXX_Size() int { + return xxx_messageInfo_GetSessionEntityTypeRequest.Size(m) +} +func (m *GetSessionEntityTypeRequest) XXX_DiscardUnknown() { + xxx_messageInfo_GetSessionEntityTypeRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_GetSessionEntityTypeRequest proto.InternalMessageInfo + +func (m *GetSessionEntityTypeRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +// The request message for [SessionEntityTypes.CreateSessionEntityType][google.cloud.dialogflow.v2beta1.SessionEntityTypes.CreateSessionEntityType]. +type CreateSessionEntityTypeRequest struct { + // Required. The session to create a session entity type for. + // Format: `projects//agent/sessions/` or + // `projects//agent/environments//users// + // sessions/`. If `Environment ID` is not specified, we assume + // default 'draft' environment. If `User ID` is not specified, we assume + // default '-' user. + Parent string `protobuf:"bytes,1,opt,name=parent,proto3" json:"parent,omitempty"` + // Required. The session entity type to create. + SessionEntityType *SessionEntityType `protobuf:"bytes,2,opt,name=session_entity_type,json=sessionEntityType,proto3" json:"session_entity_type,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *CreateSessionEntityTypeRequest) Reset() { *m = CreateSessionEntityTypeRequest{} } +func (m *CreateSessionEntityTypeRequest) String() string { return proto.CompactTextString(m) } +func (*CreateSessionEntityTypeRequest) ProtoMessage() {} +func (*CreateSessionEntityTypeRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_session_entity_type_c408f12785d6eb00, []int{4} +} +func (m *CreateSessionEntityTypeRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_CreateSessionEntityTypeRequest.Unmarshal(m, b) +} +func (m *CreateSessionEntityTypeRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_CreateSessionEntityTypeRequest.Marshal(b, m, deterministic) +} +func (dst *CreateSessionEntityTypeRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_CreateSessionEntityTypeRequest.Merge(dst, src) +} +func (m *CreateSessionEntityTypeRequest) XXX_Size() int { + return xxx_messageInfo_CreateSessionEntityTypeRequest.Size(m) +} +func (m *CreateSessionEntityTypeRequest) XXX_DiscardUnknown() { + xxx_messageInfo_CreateSessionEntityTypeRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_CreateSessionEntityTypeRequest proto.InternalMessageInfo + +func (m *CreateSessionEntityTypeRequest) GetParent() string { + if m != nil { + return m.Parent + } + return "" +} + +func (m *CreateSessionEntityTypeRequest) GetSessionEntityType() *SessionEntityType { + if m != nil { + return m.SessionEntityType + } + return nil +} + +// The request message for [SessionEntityTypes.UpdateSessionEntityType][google.cloud.dialogflow.v2beta1.SessionEntityTypes.UpdateSessionEntityType]. +type UpdateSessionEntityTypeRequest struct { + // Required. The entity type to update. Format: + // `projects//agent/sessions//entityTypes/` or `projects//agent/environments//users//sessions//entityTypes/`. If `Environment ID` is not specified, we assume default 'draft' + // environment. If `User ID` is not specified, we assume default '-' user. + SessionEntityType *SessionEntityType `protobuf:"bytes,1,opt,name=session_entity_type,json=sessionEntityType,proto3" json:"session_entity_type,omitempty"` + // Optional. The mask to control which fields get updated. + UpdateMask *field_mask.FieldMask `protobuf:"bytes,2,opt,name=update_mask,json=updateMask,proto3" json:"update_mask,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *UpdateSessionEntityTypeRequest) Reset() { *m = UpdateSessionEntityTypeRequest{} } +func (m *UpdateSessionEntityTypeRequest) String() string { return proto.CompactTextString(m) } +func (*UpdateSessionEntityTypeRequest) ProtoMessage() {} +func (*UpdateSessionEntityTypeRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_session_entity_type_c408f12785d6eb00, []int{5} +} +func (m *UpdateSessionEntityTypeRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_UpdateSessionEntityTypeRequest.Unmarshal(m, b) +} +func (m *UpdateSessionEntityTypeRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_UpdateSessionEntityTypeRequest.Marshal(b, m, deterministic) +} +func (dst *UpdateSessionEntityTypeRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_UpdateSessionEntityTypeRequest.Merge(dst, src) +} +func (m *UpdateSessionEntityTypeRequest) XXX_Size() int { + return xxx_messageInfo_UpdateSessionEntityTypeRequest.Size(m) +} +func (m *UpdateSessionEntityTypeRequest) XXX_DiscardUnknown() { + xxx_messageInfo_UpdateSessionEntityTypeRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_UpdateSessionEntityTypeRequest proto.InternalMessageInfo + +func (m *UpdateSessionEntityTypeRequest) GetSessionEntityType() *SessionEntityType { + if m != nil { + return m.SessionEntityType + } + return nil +} + +func (m *UpdateSessionEntityTypeRequest) GetUpdateMask() *field_mask.FieldMask { + if m != nil { + return m.UpdateMask + } + return nil +} + +// The request message for [SessionEntityTypes.DeleteSessionEntityType][google.cloud.dialogflow.v2beta1.SessionEntityTypes.DeleteSessionEntityType]. +type DeleteSessionEntityTypeRequest struct { + // Required. The name of the entity type to delete. Format: + // `projects//agent/sessions//entityTypes/` or `projects//agent/environments//users//sessions//entityTypes/`. If `Environment ID` is not specified, we assume default 'draft' + // environment. If `User ID` is not specified, we assume default '-' user. + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *DeleteSessionEntityTypeRequest) Reset() { *m = DeleteSessionEntityTypeRequest{} } +func (m *DeleteSessionEntityTypeRequest) String() string { return proto.CompactTextString(m) } +func (*DeleteSessionEntityTypeRequest) ProtoMessage() {} +func (*DeleteSessionEntityTypeRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_session_entity_type_c408f12785d6eb00, []int{6} +} +func (m *DeleteSessionEntityTypeRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_DeleteSessionEntityTypeRequest.Unmarshal(m, b) +} +func (m *DeleteSessionEntityTypeRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_DeleteSessionEntityTypeRequest.Marshal(b, m, deterministic) +} +func (dst *DeleteSessionEntityTypeRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_DeleteSessionEntityTypeRequest.Merge(dst, src) +} +func (m *DeleteSessionEntityTypeRequest) XXX_Size() int { + return xxx_messageInfo_DeleteSessionEntityTypeRequest.Size(m) +} +func (m *DeleteSessionEntityTypeRequest) XXX_DiscardUnknown() { + xxx_messageInfo_DeleteSessionEntityTypeRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_DeleteSessionEntityTypeRequest proto.InternalMessageInfo + +func (m *DeleteSessionEntityTypeRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func init() { + proto.RegisterType((*SessionEntityType)(nil), "google.cloud.dialogflow.v2beta1.SessionEntityType") + proto.RegisterType((*ListSessionEntityTypesRequest)(nil), "google.cloud.dialogflow.v2beta1.ListSessionEntityTypesRequest") + proto.RegisterType((*ListSessionEntityTypesResponse)(nil), "google.cloud.dialogflow.v2beta1.ListSessionEntityTypesResponse") + proto.RegisterType((*GetSessionEntityTypeRequest)(nil), "google.cloud.dialogflow.v2beta1.GetSessionEntityTypeRequest") + proto.RegisterType((*CreateSessionEntityTypeRequest)(nil), "google.cloud.dialogflow.v2beta1.CreateSessionEntityTypeRequest") + proto.RegisterType((*UpdateSessionEntityTypeRequest)(nil), "google.cloud.dialogflow.v2beta1.UpdateSessionEntityTypeRequest") + proto.RegisterType((*DeleteSessionEntityTypeRequest)(nil), "google.cloud.dialogflow.v2beta1.DeleteSessionEntityTypeRequest") + proto.RegisterEnum("google.cloud.dialogflow.v2beta1.SessionEntityType_EntityOverrideMode", SessionEntityType_EntityOverrideMode_name, SessionEntityType_EntityOverrideMode_value) +} + +// Reference imports to suppress errors if they are not otherwise used. +var _ context.Context +var _ grpc.ClientConn + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the grpc package it is being compiled against. +const _ = grpc.SupportPackageIsVersion4 + +// SessionEntityTypesClient is the client API for SessionEntityTypes service. +// +// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://godoc.org/google.golang.org/grpc#ClientConn.NewStream. +type SessionEntityTypesClient interface { + // Returns the list of all session entity types in the specified session. + ListSessionEntityTypes(ctx context.Context, in *ListSessionEntityTypesRequest, opts ...grpc.CallOption) (*ListSessionEntityTypesResponse, error) + // Retrieves the specified session entity type. + GetSessionEntityType(ctx context.Context, in *GetSessionEntityTypeRequest, opts ...grpc.CallOption) (*SessionEntityType, error) + // Creates a session entity type. + // + // If the specified session entity type already exists, overrides the + // session entity type. + CreateSessionEntityType(ctx context.Context, in *CreateSessionEntityTypeRequest, opts ...grpc.CallOption) (*SessionEntityType, error) + // Updates the specified session entity type. + UpdateSessionEntityType(ctx context.Context, in *UpdateSessionEntityTypeRequest, opts ...grpc.CallOption) (*SessionEntityType, error) + // Deletes the specified session entity type. + DeleteSessionEntityType(ctx context.Context, in *DeleteSessionEntityTypeRequest, opts ...grpc.CallOption) (*empty.Empty, error) +} + +type sessionEntityTypesClient struct { + cc *grpc.ClientConn +} + +func NewSessionEntityTypesClient(cc *grpc.ClientConn) SessionEntityTypesClient { + return &sessionEntityTypesClient{cc} +} + +func (c *sessionEntityTypesClient) ListSessionEntityTypes(ctx context.Context, in *ListSessionEntityTypesRequest, opts ...grpc.CallOption) (*ListSessionEntityTypesResponse, error) { + out := new(ListSessionEntityTypesResponse) + err := c.cc.Invoke(ctx, "/google.cloud.dialogflow.v2beta1.SessionEntityTypes/ListSessionEntityTypes", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *sessionEntityTypesClient) GetSessionEntityType(ctx context.Context, in *GetSessionEntityTypeRequest, opts ...grpc.CallOption) (*SessionEntityType, error) { + out := new(SessionEntityType) + err := c.cc.Invoke(ctx, "/google.cloud.dialogflow.v2beta1.SessionEntityTypes/GetSessionEntityType", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *sessionEntityTypesClient) CreateSessionEntityType(ctx context.Context, in *CreateSessionEntityTypeRequest, opts ...grpc.CallOption) (*SessionEntityType, error) { + out := new(SessionEntityType) + err := c.cc.Invoke(ctx, "/google.cloud.dialogflow.v2beta1.SessionEntityTypes/CreateSessionEntityType", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *sessionEntityTypesClient) UpdateSessionEntityType(ctx context.Context, in *UpdateSessionEntityTypeRequest, opts ...grpc.CallOption) (*SessionEntityType, error) { + out := new(SessionEntityType) + err := c.cc.Invoke(ctx, "/google.cloud.dialogflow.v2beta1.SessionEntityTypes/UpdateSessionEntityType", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *sessionEntityTypesClient) DeleteSessionEntityType(ctx context.Context, in *DeleteSessionEntityTypeRequest, opts ...grpc.CallOption) (*empty.Empty, error) { + out := new(empty.Empty) + err := c.cc.Invoke(ctx, "/google.cloud.dialogflow.v2beta1.SessionEntityTypes/DeleteSessionEntityType", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +// SessionEntityTypesServer is the server API for SessionEntityTypes service. +type SessionEntityTypesServer interface { + // Returns the list of all session entity types in the specified session. + ListSessionEntityTypes(context.Context, *ListSessionEntityTypesRequest) (*ListSessionEntityTypesResponse, error) + // Retrieves the specified session entity type. + GetSessionEntityType(context.Context, *GetSessionEntityTypeRequest) (*SessionEntityType, error) + // Creates a session entity type. + // + // If the specified session entity type already exists, overrides the + // session entity type. + CreateSessionEntityType(context.Context, *CreateSessionEntityTypeRequest) (*SessionEntityType, error) + // Updates the specified session entity type. + UpdateSessionEntityType(context.Context, *UpdateSessionEntityTypeRequest) (*SessionEntityType, error) + // Deletes the specified session entity type. + DeleteSessionEntityType(context.Context, *DeleteSessionEntityTypeRequest) (*empty.Empty, error) +} + +func RegisterSessionEntityTypesServer(s *grpc.Server, srv SessionEntityTypesServer) { + s.RegisterService(&_SessionEntityTypes_serviceDesc, srv) +} + +func _SessionEntityTypes_ListSessionEntityTypes_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(ListSessionEntityTypesRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(SessionEntityTypesServer).ListSessionEntityTypes(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.dialogflow.v2beta1.SessionEntityTypes/ListSessionEntityTypes", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(SessionEntityTypesServer).ListSessionEntityTypes(ctx, req.(*ListSessionEntityTypesRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _SessionEntityTypes_GetSessionEntityType_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(GetSessionEntityTypeRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(SessionEntityTypesServer).GetSessionEntityType(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.dialogflow.v2beta1.SessionEntityTypes/GetSessionEntityType", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(SessionEntityTypesServer).GetSessionEntityType(ctx, req.(*GetSessionEntityTypeRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _SessionEntityTypes_CreateSessionEntityType_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(CreateSessionEntityTypeRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(SessionEntityTypesServer).CreateSessionEntityType(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.dialogflow.v2beta1.SessionEntityTypes/CreateSessionEntityType", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(SessionEntityTypesServer).CreateSessionEntityType(ctx, req.(*CreateSessionEntityTypeRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _SessionEntityTypes_UpdateSessionEntityType_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(UpdateSessionEntityTypeRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(SessionEntityTypesServer).UpdateSessionEntityType(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.dialogflow.v2beta1.SessionEntityTypes/UpdateSessionEntityType", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(SessionEntityTypesServer).UpdateSessionEntityType(ctx, req.(*UpdateSessionEntityTypeRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _SessionEntityTypes_DeleteSessionEntityType_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(DeleteSessionEntityTypeRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(SessionEntityTypesServer).DeleteSessionEntityType(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.dialogflow.v2beta1.SessionEntityTypes/DeleteSessionEntityType", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(SessionEntityTypesServer).DeleteSessionEntityType(ctx, req.(*DeleteSessionEntityTypeRequest)) + } + return interceptor(ctx, in, info, handler) +} + +var _SessionEntityTypes_serviceDesc = grpc.ServiceDesc{ + ServiceName: "google.cloud.dialogflow.v2beta1.SessionEntityTypes", + HandlerType: (*SessionEntityTypesServer)(nil), + Methods: []grpc.MethodDesc{ + { + MethodName: "ListSessionEntityTypes", + Handler: _SessionEntityTypes_ListSessionEntityTypes_Handler, + }, + { + MethodName: "GetSessionEntityType", + Handler: _SessionEntityTypes_GetSessionEntityType_Handler, + }, + { + MethodName: "CreateSessionEntityType", + Handler: _SessionEntityTypes_CreateSessionEntityType_Handler, + }, + { + MethodName: "UpdateSessionEntityType", + Handler: _SessionEntityTypes_UpdateSessionEntityType_Handler, + }, + { + MethodName: "DeleteSessionEntityType", + Handler: _SessionEntityTypes_DeleteSessionEntityType_Handler, + }, + }, + Streams: []grpc.StreamDesc{}, + Metadata: "google/cloud/dialogflow/v2beta1/session_entity_type.proto", +} + +func init() { + proto.RegisterFile("google/cloud/dialogflow/v2beta1/session_entity_type.proto", fileDescriptor_session_entity_type_c408f12785d6eb00) +} + +var fileDescriptor_session_entity_type_c408f12785d6eb00 = []byte{ + // 882 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xac, 0x56, 0xcf, 0x6f, 0xe3, 0x44, + 0x14, 0x66, 0x5c, 0x58, 0xed, 0xce, 0xf2, 0xa3, 0x3b, 0x54, 0x69, 0x70, 0x69, 0x1a, 0xbc, 0x08, + 0x55, 0x39, 0xd8, 0x6a, 0xe0, 0xb2, 0x2c, 0x3f, 0xa4, 0x6d, 0xdc, 0x55, 0xa4, 0x4d, 0x1a, 0x39, + 0xe9, 0x4a, 0xf4, 0x62, 0x39, 0xf1, 0xab, 0x65, 0x9a, 0xcc, 0x18, 0xcf, 0xa4, 0x4b, 0x76, 0xb5, + 0x97, 0xbd, 0x72, 0xe0, 0x80, 0xc4, 0x89, 0x0b, 0x1c, 0x39, 0x70, 0x40, 0x5c, 0xb8, 0xc1, 0x99, + 0x13, 0xe2, 0x2f, 0x40, 0xe2, 0xc0, 0x3f, 0x80, 0x04, 0x37, 0xe4, 0xb1, 0xd3, 0x94, 0xfa, 0x57, + 0x1b, 0x7a, 0x8a, 0x3d, 0x33, 0xef, 0xbd, 0xef, 0xfb, 0xfc, 0xde, 0x97, 0xc1, 0x77, 0x3c, 0xc6, + 0xbc, 0x31, 0x18, 0xa3, 0x31, 0x9b, 0xba, 0x86, 0xeb, 0x3b, 0x63, 0xe6, 0x1d, 0x8d, 0xd9, 0x23, + 0xe3, 0xa4, 0x39, 0x04, 0xe1, 0xec, 0x18, 0x1c, 0x38, 0xf7, 0x19, 0xb5, 0x81, 0x0a, 0x5f, 0xcc, + 0x6c, 0x31, 0x0b, 0x40, 0x0f, 0x42, 0x26, 0x18, 0xd9, 0x8a, 0x43, 0x75, 0x19, 0xaa, 0x2f, 0x42, + 0xf5, 0x24, 0x54, 0x7d, 0x3d, 0xc9, 0xed, 0x04, 0xbe, 0xe1, 0x50, 0xca, 0x84, 0x23, 0x7c, 0x46, + 0x79, 0x1c, 0xae, 0xbe, 0x76, 0x66, 0x37, 0x04, 0xce, 0xa6, 0xe1, 0x28, 0xc9, 0xac, 0xee, 0x94, + 0x81, 0x4a, 0x81, 0x51, 0x37, 0x92, 0x10, 0xf9, 0x36, 0x9c, 0x1e, 0x19, 0x30, 0x09, 0xc4, 0x2c, + 0xd9, 0xac, 0x9f, 0xdf, 0x3c, 0xf2, 0x61, 0xec, 0xda, 0x13, 0x87, 0x1f, 0xc7, 0x27, 0xb4, 0xbf, + 0x14, 0x7c, 0xab, 0x1f, 0x33, 0x35, 0x65, 0xee, 0xc1, 0x2c, 0x00, 0x42, 0xf0, 0xf3, 0xd4, 0x99, + 0x40, 0x15, 0xd5, 0xd1, 0xf6, 0x0d, 0x4b, 0x3e, 0x93, 0x47, 0x78, 0x2d, 0xa9, 0xce, 0x4e, 0x20, + 0x0c, 0x7d, 0x17, 0xec, 0x09, 0x73, 0xa1, 0xaa, 0xd4, 0xd1, 0xf6, 0xcb, 0x4d, 0x53, 0x2f, 0x11, + 0x45, 0x4f, 0x55, 0xd1, 0xe3, 0xc7, 0xfd, 0x24, 0x5b, 0x87, 0xb9, 0x60, 0x11, 0x48, 0xad, 0x91, + 0x2e, 0xbe, 0x2e, 0x57, 0x7d, 0xe0, 0xd5, 0x95, 0xfa, 0xca, 0xf6, 0xcd, 0x66, 0xb3, 0xb4, 0x58, + 0xaa, 0x8a, 0x75, 0x9a, 0x43, 0x7b, 0x86, 0x30, 0x49, 0x97, 0x26, 0x6f, 0xe2, 0xba, 0xd9, 0x1d, + 0xb4, 0x07, 0x1f, 0xd9, 0xfb, 0x0f, 0x4d, 0xcb, 0x6a, 0xb7, 0x4c, 0xbb, 0xb3, 0xdf, 0x32, 0xed, + 0x83, 0x6e, 0xbf, 0x67, 0xee, 0xb6, 0xf7, 0xda, 0x66, 0x6b, 0xf5, 0x39, 0xf2, 0x06, 0xde, 0xcc, + 0x3c, 0x35, 0x7f, 0x5b, 0x45, 0xe4, 0x36, 0xde, 0xca, 0x3c, 0xd2, 0x3f, 0xe8, 0xf5, 0x1e, 0x98, + 0x1d, 0xb3, 0x3b, 0x58, 0x55, 0x34, 0x8e, 0x37, 0x1f, 0xf8, 0x5c, 0xa4, 0x44, 0xe1, 0x16, 0x7c, + 0x32, 0x05, 0x2e, 0x48, 0x05, 0x5f, 0x0b, 0x9c, 0x10, 0xa8, 0x48, 0x3e, 0x42, 0xf2, 0x46, 0x36, + 0xf0, 0x8d, 0xc0, 0xf1, 0xc0, 0xe6, 0xfe, 0xe3, 0x58, 0xfb, 0x17, 0xac, 0xeb, 0xd1, 0x42, 0xdf, + 0x7f, 0x0c, 0x64, 0x13, 0x63, 0xb9, 0x29, 0xd8, 0x31, 0xd0, 0xea, 0x8a, 0x0c, 0x94, 0xc7, 0x07, + 0xd1, 0x82, 0xf6, 0x1d, 0xc2, 0xb5, 0xbc, 0xaa, 0x3c, 0x60, 0x94, 0x03, 0x71, 0xf1, 0x5a, 0x46, + 0xe3, 0xf3, 0x2a, 0xba, 0xa0, 0xf0, 0xa9, 0xd4, 0x16, 0xe1, 0xa9, 0x6a, 0xe4, 0x2d, 0xfc, 0x0a, + 0x85, 0x4f, 0x85, 0x7d, 0x06, 0xac, 0x22, 0xc1, 0xbe, 0x14, 0x2d, 0xf7, 0x4e, 0x01, 0xef, 0xe0, + 0x8d, 0xfb, 0x90, 0x86, 0x3b, 0xd7, 0x28, 0xa3, 0x4d, 0xb5, 0xaf, 0x10, 0xae, 0xed, 0x86, 0xe0, + 0x08, 0xc8, 0x0d, 0xcb, 0x93, 0x76, 0x88, 0x5f, 0xcd, 0xe0, 0x2e, 0x91, 0x2d, 0x47, 0xfd, 0x56, + 0x8a, 0xba, 0xf6, 0x33, 0xc2, 0xb5, 0x83, 0xc0, 0x2d, 0x82, 0x97, 0x03, 0x03, 0x5d, 0x21, 0x0c, + 0x72, 0x17, 0xdf, 0x9c, 0x4a, 0x14, 0xd2, 0x0b, 0x12, 0x8a, 0xea, 0x3c, 0xf7, 0xdc, 0x2e, 0xf4, + 0xbd, 0xc8, 0x2e, 0x3a, 0x0e, 0x3f, 0xb6, 0x70, 0x7c, 0x3c, 0x7a, 0xd6, 0xde, 0xc1, 0xb5, 0x16, + 0x8c, 0xa1, 0x80, 0x42, 0xc6, 0x87, 0x69, 0xfe, 0xf2, 0x22, 0x26, 0xe9, 0xc6, 0x23, 0xdf, 0x2b, + 0xb8, 0x92, 0xdd, 0x93, 0xe4, 0x83, 0x52, 0xae, 0x85, 0x23, 0xa4, 0x7e, 0xb8, 0x74, 0x7c, 0x3c, + 0x0c, 0xda, 0x97, 0xe8, 0xd9, 0x6f, 0x7f, 0x7c, 0xa1, 0x7c, 0x8e, 0xc8, 0x9d, 0x53, 0x07, 0x7e, + 0x12, 0x37, 0xcb, 0xfb, 0x41, 0xc8, 0x3e, 0x86, 0x91, 0xe0, 0x46, 0xc3, 0x70, 0x3c, 0xa0, 0x62, + 0xfe, 0x7f, 0xc1, 0x8d, 0xc6, 0xd3, 0xc4, 0xa6, 0x65, 0xb2, 0x43, 0x8b, 0xf4, 0xca, 0x83, 0x81, + 0x9e, 0xf8, 0x21, 0xa3, 0x13, 0xa0, 0x72, 0x71, 0xca, 0x21, 0x8c, 0x7e, 0x73, 0x72, 0x92, 0xaf, + 0x15, 0xbc, 0x96, 0x35, 0x18, 0xe4, 0xbd, 0x52, 0xca, 0x05, 0xf3, 0xa4, 0x2e, 0xd1, 0x5c, 0xd9, + 0x1a, 0x45, 0x1f, 0xbc, 0x48, 0xa1, 0xb3, 0x64, 0x8c, 0xc6, 0xd3, 0xff, 0x6a, 0x94, 0x1d, 0x5c, + 0xaa, 0xd0, 0xb9, 0x9c, 0xe4, 0x57, 0x05, 0xaf, 0xe7, 0x18, 0x01, 0x29, 0xef, 0x8c, 0x62, 0x0b, + 0x59, 0x4a, 0xa9, 0x9f, 0x62, 0xa5, 0x7e, 0x44, 0xda, 0xf2, 0xdd, 0xf4, 0x6e, 0x96, 0x35, 0x1c, + 0x7a, 0xda, 0x95, 0xb7, 0x58, 0x66, 0x21, 0xf2, 0x8f, 0x82, 0xd7, 0x73, 0xdc, 0xeb, 0x02, 0x9a, + 0x16, 0xfb, 0xde, 0x52, 0x9a, 0xfe, 0x19, 0x6b, 0xfa, 0x3b, 0x6a, 0x76, 0x16, 0x0a, 0x64, 0xdd, + 0xdc, 0x2e, 0xd9, 0x91, 0xd9, 0x3a, 0x3f, 0x69, 0xba, 0xcb, 0x54, 0xb9, 0x6c, 0xeb, 0x66, 0x6b, + 0xff, 0x99, 0x82, 0xd7, 0x73, 0x6c, 0xf7, 0x02, 0xda, 0x17, 0x1b, 0xb6, 0x5a, 0x49, 0x59, 0xbf, + 0x19, 0x5d, 0x23, 0x17, 0xd3, 0xdd, 0xf8, 0x3f, 0xd3, 0xdd, 0xb8, 0xf2, 0xe9, 0xbe, 0xf7, 0x03, + 0xc2, 0xb7, 0x47, 0x6c, 0x52, 0xc6, 0xfb, 0x5e, 0x25, 0x45, 0xb9, 0x17, 0x31, 0xec, 0xa1, 0xc3, + 0x76, 0x12, 0xea, 0xb1, 0xb1, 0x43, 0x3d, 0x9d, 0x85, 0x9e, 0xe1, 0x01, 0x95, 0xfc, 0x8d, 0x78, + 0xcb, 0x09, 0x7c, 0x9e, 0x7b, 0x15, 0xbf, 0xbb, 0x58, 0xfa, 0x1b, 0xa1, 0x6f, 0x14, 0xa5, 0xb5, + 0xf7, 0xad, 0xb2, 0x75, 0x3f, 0xce, 0xb9, 0x2b, 0xe1, 0xb4, 0x16, 0x70, 0x1e, 0xc6, 0x41, 0xc3, + 0x6b, 0x32, 0xff, 0xdb, 0xff, 0x06, 0x00, 0x00, 0xff, 0xff, 0xd1, 0x6d, 0x34, 0x40, 0x7e, 0x0c, + 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/cloud/dialogflow/v2beta1/webhook.pb.go b/vendor/google.golang.org/genproto/googleapis/cloud/dialogflow/v2beta1/webhook.pb.go new file mode 100644 index 0000000..d267de1 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/cloud/dialogflow/v2beta1/webhook.pb.go @@ -0,0 +1,352 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/cloud/dialogflow/v2beta1/webhook.proto + +package dialogflow // import "google.golang.org/genproto/googleapis/cloud/dialogflow/v2beta1" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _struct "github.com/golang/protobuf/ptypes/struct" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// The request message for a webhook call. +type WebhookRequest struct { + // The unique identifier of detectIntent request session. + // Can be used to identify end-user inside webhook implementation. + // Format: `projects//agent/sessions/`, or + // `projects//agent/environments//users//sessions/`. + Session string `protobuf:"bytes,4,opt,name=session,proto3" json:"session,omitempty"` + // The unique identifier of the response. Contains the same value as + // `[Streaming]DetectIntentResponse.response_id`. + ResponseId string `protobuf:"bytes,1,opt,name=response_id,json=responseId,proto3" json:"response_id,omitempty"` + // The result of the conversational query or event processing. Contains the + // same value as `[Streaming]DetectIntentResponse.query_result`. + QueryResult *QueryResult `protobuf:"bytes,2,opt,name=query_result,json=queryResult,proto3" json:"query_result,omitempty"` + // Alternative query results from KnowledgeService. + AlternativeQueryResults []*QueryResult `protobuf:"bytes,5,rep,name=alternative_query_results,json=alternativeQueryResults,proto3" json:"alternative_query_results,omitempty"` + // Optional. The contents of the original request that was passed to + // `[Streaming]DetectIntent` call. + OriginalDetectIntentRequest *OriginalDetectIntentRequest `protobuf:"bytes,3,opt,name=original_detect_intent_request,json=originalDetectIntentRequest,proto3" json:"original_detect_intent_request,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *WebhookRequest) Reset() { *m = WebhookRequest{} } +func (m *WebhookRequest) String() string { return proto.CompactTextString(m) } +func (*WebhookRequest) ProtoMessage() {} +func (*WebhookRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_webhook_1f28c13c6ad471f2, []int{0} +} +func (m *WebhookRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_WebhookRequest.Unmarshal(m, b) +} +func (m *WebhookRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_WebhookRequest.Marshal(b, m, deterministic) +} +func (dst *WebhookRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_WebhookRequest.Merge(dst, src) +} +func (m *WebhookRequest) XXX_Size() int { + return xxx_messageInfo_WebhookRequest.Size(m) +} +func (m *WebhookRequest) XXX_DiscardUnknown() { + xxx_messageInfo_WebhookRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_WebhookRequest proto.InternalMessageInfo + +func (m *WebhookRequest) GetSession() string { + if m != nil { + return m.Session + } + return "" +} + +func (m *WebhookRequest) GetResponseId() string { + if m != nil { + return m.ResponseId + } + return "" +} + +func (m *WebhookRequest) GetQueryResult() *QueryResult { + if m != nil { + return m.QueryResult + } + return nil +} + +func (m *WebhookRequest) GetAlternativeQueryResults() []*QueryResult { + if m != nil { + return m.AlternativeQueryResults + } + return nil +} + +func (m *WebhookRequest) GetOriginalDetectIntentRequest() *OriginalDetectIntentRequest { + if m != nil { + return m.OriginalDetectIntentRequest + } + return nil +} + +// The response message for a webhook call. +type WebhookResponse struct { + // Optional. The text to be shown on the screen. This value is passed directly + // to `QueryResult.fulfillment_text`. + FulfillmentText string `protobuf:"bytes,1,opt,name=fulfillment_text,json=fulfillmentText,proto3" json:"fulfillment_text,omitempty"` + // Optional. The collection of rich messages to present to the user. This + // value is passed directly to `QueryResult.fulfillment_messages`. + FulfillmentMessages []*Intent_Message `protobuf:"bytes,2,rep,name=fulfillment_messages,json=fulfillmentMessages,proto3" json:"fulfillment_messages,omitempty"` + // Optional. This value is passed directly to `QueryResult.webhook_source`. + Source string `protobuf:"bytes,3,opt,name=source,proto3" json:"source,omitempty"` + // Optional. This value is passed directly to `QueryResult.webhook_payload`. + // See the related `fulfillment_messages[i].payload field`, which may be used + // as an alternative to this field. + // + // This field can be used for Actions on Google responses. + // It should have a structure similar to the JSON message shown here. For more + // information, see + // [Actions on Google Webhook + // Format](https://developers.google.com/actions/dialogflow/webhook) + //
{
+	//   "google": {
+	//     "expectUserResponse": true,
+	//     "richResponse": {
+	//       "items": [
+	//         {
+	//           "simpleResponse": {
+	//             "textToSpeech": "this is a simple response"
+	//           }
+	//         }
+	//       ]
+	//     }
+	//   }
+	// }
+ Payload *_struct.Struct `protobuf:"bytes,4,opt,name=payload,proto3" json:"payload,omitempty"` + // Optional. The collection of output contexts. This value is passed directly + // to `QueryResult.output_contexts`. + OutputContexts []*Context `protobuf:"bytes,5,rep,name=output_contexts,json=outputContexts,proto3" json:"output_contexts,omitempty"` + // Optional. Makes the platform immediately invoke another `DetectIntent` call + // internally with the specified event as input. + FollowupEventInput *EventInput `protobuf:"bytes,6,opt,name=followup_event_input,json=followupEventInput,proto3" json:"followup_event_input,omitempty"` + // Optional. Indicates that this intent ends an interaction. Some integrations + // (e.g., Actions on Google or Dialogflow phone gateway) use this information + // to close interaction with an end user. Default is false. + EndInteraction bool `protobuf:"varint,8,opt,name=end_interaction,json=endInteraction,proto3" json:"end_interaction,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *WebhookResponse) Reset() { *m = WebhookResponse{} } +func (m *WebhookResponse) String() string { return proto.CompactTextString(m) } +func (*WebhookResponse) ProtoMessage() {} +func (*WebhookResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_webhook_1f28c13c6ad471f2, []int{1} +} +func (m *WebhookResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_WebhookResponse.Unmarshal(m, b) +} +func (m *WebhookResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_WebhookResponse.Marshal(b, m, deterministic) +} +func (dst *WebhookResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_WebhookResponse.Merge(dst, src) +} +func (m *WebhookResponse) XXX_Size() int { + return xxx_messageInfo_WebhookResponse.Size(m) +} +func (m *WebhookResponse) XXX_DiscardUnknown() { + xxx_messageInfo_WebhookResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_WebhookResponse proto.InternalMessageInfo + +func (m *WebhookResponse) GetFulfillmentText() string { + if m != nil { + return m.FulfillmentText + } + return "" +} + +func (m *WebhookResponse) GetFulfillmentMessages() []*Intent_Message { + if m != nil { + return m.FulfillmentMessages + } + return nil +} + +func (m *WebhookResponse) GetSource() string { + if m != nil { + return m.Source + } + return "" +} + +func (m *WebhookResponse) GetPayload() *_struct.Struct { + if m != nil { + return m.Payload + } + return nil +} + +func (m *WebhookResponse) GetOutputContexts() []*Context { + if m != nil { + return m.OutputContexts + } + return nil +} + +func (m *WebhookResponse) GetFollowupEventInput() *EventInput { + if m != nil { + return m.FollowupEventInput + } + return nil +} + +func (m *WebhookResponse) GetEndInteraction() bool { + if m != nil { + return m.EndInteraction + } + return false +} + +// Represents the contents of the original request that was passed to +// the `[Streaming]DetectIntent` call. +type OriginalDetectIntentRequest struct { + // The source of this request, e.g., `google`, `facebook`, `slack`. It is set + // by Dialogflow-owned servers. + Source string `protobuf:"bytes,1,opt,name=source,proto3" json:"source,omitempty"` + // Optional. The version of the protocol used for this request. + // This field is AoG-specific. + Version string `protobuf:"bytes,2,opt,name=version,proto3" json:"version,omitempty"` + // Optional. This field is set to the value of the `QueryParameters.payload` + // field passed in the request. Some integrations that query a Dialogflow + // agent may provide additional information in the payload. + // + // In particular for the Telephony Gateway this field has the form: + //
{
+	//  "telephony": {
+	//    "caller_id": "+18558363987"
+	//  }
+	// }
+ // Note: The caller ID field (`caller_id`) will be redacted for Standard + // Edition agents and populated with the caller ID in [E.164 + // format](https://en.wikipedia.org/wiki/E.164) for Enterprise Edition agents. + Payload *_struct.Struct `protobuf:"bytes,3,opt,name=payload,proto3" json:"payload,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *OriginalDetectIntentRequest) Reset() { *m = OriginalDetectIntentRequest{} } +func (m *OriginalDetectIntentRequest) String() string { return proto.CompactTextString(m) } +func (*OriginalDetectIntentRequest) ProtoMessage() {} +func (*OriginalDetectIntentRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_webhook_1f28c13c6ad471f2, []int{2} +} +func (m *OriginalDetectIntentRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_OriginalDetectIntentRequest.Unmarshal(m, b) +} +func (m *OriginalDetectIntentRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_OriginalDetectIntentRequest.Marshal(b, m, deterministic) +} +func (dst *OriginalDetectIntentRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_OriginalDetectIntentRequest.Merge(dst, src) +} +func (m *OriginalDetectIntentRequest) XXX_Size() int { + return xxx_messageInfo_OriginalDetectIntentRequest.Size(m) +} +func (m *OriginalDetectIntentRequest) XXX_DiscardUnknown() { + xxx_messageInfo_OriginalDetectIntentRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_OriginalDetectIntentRequest proto.InternalMessageInfo + +func (m *OriginalDetectIntentRequest) GetSource() string { + if m != nil { + return m.Source + } + return "" +} + +func (m *OriginalDetectIntentRequest) GetVersion() string { + if m != nil { + return m.Version + } + return "" +} + +func (m *OriginalDetectIntentRequest) GetPayload() *_struct.Struct { + if m != nil { + return m.Payload + } + return nil +} + +func init() { + proto.RegisterType((*WebhookRequest)(nil), "google.cloud.dialogflow.v2beta1.WebhookRequest") + proto.RegisterType((*WebhookResponse)(nil), "google.cloud.dialogflow.v2beta1.WebhookResponse") + proto.RegisterType((*OriginalDetectIntentRequest)(nil), "google.cloud.dialogflow.v2beta1.OriginalDetectIntentRequest") +} + +func init() { + proto.RegisterFile("google/cloud/dialogflow/v2beta1/webhook.proto", fileDescriptor_webhook_1f28c13c6ad471f2) +} + +var fileDescriptor_webhook_1f28c13c6ad471f2 = []byte{ + // 617 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x94, 0x54, 0x4d, 0x6f, 0xd4, 0x30, + 0x10, 0x55, 0x76, 0xa1, 0x1f, 0xde, 0xaa, 0x8b, 0x4c, 0x45, 0x43, 0x8b, 0xe8, 0xaa, 0x1c, 0x58, + 0x44, 0x49, 0xd4, 0x72, 0x42, 0x70, 0x6a, 0x0b, 0x68, 0x0f, 0xa8, 0x6d, 0x40, 0x20, 0x21, 0xa1, + 0xc8, 0x9b, 0xcc, 0xa6, 0x16, 0xae, 0x9d, 0xc6, 0xf6, 0x6e, 0xf7, 0x08, 0x47, 0x7e, 0x06, 0xc7, + 0xfe, 0x10, 0x7e, 0x13, 0x47, 0x14, 0xdb, 0x21, 0x01, 0xa9, 0xa4, 0x1c, 0x67, 0xe6, 0xbd, 0xe7, + 0xc9, 0x9b, 0x99, 0xa0, 0x27, 0x99, 0x10, 0x19, 0x83, 0x30, 0x61, 0x42, 0xa7, 0x61, 0x4a, 0x09, + 0x13, 0xd9, 0x84, 0x89, 0x59, 0x38, 0xdd, 0x1b, 0x83, 0x22, 0xbb, 0xe1, 0x0c, 0xc6, 0xa7, 0x42, + 0x7c, 0x0e, 0xf2, 0x42, 0x28, 0x81, 0xb7, 0x2c, 0x3c, 0x30, 0xf0, 0xa0, 0x86, 0x07, 0x0e, 0xbe, + 0xd1, 0xaa, 0x97, 0x08, 0xae, 0xe0, 0x42, 0x59, 0xbd, 0x8d, 0x9d, 0x36, 0x38, 0xe5, 0x0a, 0x78, + 0x85, 0x6e, 0x15, 0x97, 0x20, 0x25, 0x15, 0xdc, 0xc1, 0x9f, 0x5d, 0x13, 0x1e, 0x03, 0x57, 0x54, + 0xcd, 0x63, 0x35, 0xcf, 0xc1, 0x51, 0xef, 0x39, 0xaa, 0x89, 0xc6, 0x7a, 0x12, 0x4a, 0x55, 0xe8, + 0x44, 0xfd, 0x55, 0x25, 0x39, 0x0d, 0x09, 0xe7, 0x42, 0x11, 0x45, 0x05, 0x97, 0xb6, 0xba, 0xfd, + 0xad, 0x8b, 0x56, 0x3f, 0x58, 0xd7, 0x22, 0x38, 0xd7, 0x20, 0x15, 0xf6, 0xd1, 0xa2, 0x7b, 0xcb, + 0xbf, 0x31, 0xf0, 0x86, 0xcb, 0x51, 0x15, 0xe2, 0x2d, 0xd4, 0x2b, 0x40, 0xe6, 0x82, 0x4b, 0x88, + 0x69, 0xea, 0x7b, 0xa6, 0x8a, 0xaa, 0xd4, 0x28, 0xc5, 0x47, 0x68, 0xe5, 0x5c, 0x43, 0x31, 0x8f, + 0x0b, 0x90, 0x9a, 0x29, 0xbf, 0x33, 0xf0, 0x86, 0xbd, 0xbd, 0x9d, 0xa0, 0x65, 0x10, 0xc1, 0x49, + 0x49, 0x8a, 0x0c, 0x27, 0xea, 0x9d, 0xd7, 0x01, 0x3e, 0x45, 0x77, 0x09, 0x53, 0x50, 0x70, 0xa2, + 0xe8, 0x14, 0xe2, 0xa6, 0xb8, 0xf4, 0x6f, 0x0e, 0xba, 0xff, 0xad, 0xbe, 0xde, 0x90, 0x6b, 0xe4, + 0x25, 0xfe, 0xe2, 0xa1, 0xfb, 0xa2, 0xa0, 0x19, 0xe5, 0x84, 0xc5, 0x29, 0x28, 0x48, 0x54, 0x6c, + 0xe7, 0x19, 0x17, 0xd6, 0x18, 0xbf, 0x6b, 0xbe, 0xe6, 0x45, 0xeb, 0x7b, 0x47, 0x4e, 0xe6, 0xd0, + 0xa8, 0x8c, 0x8c, 0x88, 0x33, 0x37, 0xda, 0x14, 0x57, 0x17, 0xb7, 0x7f, 0x74, 0x51, 0xff, 0xf7, + 0x30, 0xac, 0xa9, 0xf8, 0x11, 0xba, 0x35, 0xd1, 0x6c, 0x42, 0x19, 0x3b, 0x2b, 0x7b, 0x29, 0xd7, + 0xd1, 0x19, 0xdf, 0x6f, 0xe4, 0xdf, 0xc1, 0x85, 0xc2, 0x63, 0xb4, 0xd6, 0x84, 0x9e, 0x81, 0x94, + 0x24, 0x03, 0xe9, 0x77, 0x8c, 0x4f, 0x61, 0x6b, 0xdf, 0xb6, 0x99, 0xe0, 0x8d, 0xe5, 0x45, 0xb7, + 0x1b, 0x62, 0x2e, 0x27, 0xf1, 0x1d, 0xb4, 0x20, 0x85, 0x2e, 0x12, 0x30, 0x6e, 0x2c, 0x47, 0x2e, + 0xc2, 0xbb, 0x68, 0x31, 0x27, 0x73, 0x26, 0x48, 0x6a, 0x96, 0xa6, 0xb7, 0xb7, 0x5e, 0x3d, 0x57, + 0x6d, 0x65, 0xf0, 0xd6, 0x6c, 0x65, 0x54, 0xe1, 0xf0, 0x09, 0xea, 0x0b, 0xad, 0x72, 0xad, 0x62, + 0x77, 0x66, 0xd5, 0x44, 0x87, 0xad, 0x9d, 0x1e, 0x58, 0x42, 0xb4, 0x6a, 0x05, 0x5c, 0x28, 0xf1, + 0x27, 0xb4, 0x36, 0x11, 0x8c, 0x89, 0x99, 0xce, 0x63, 0x98, 0x96, 0x26, 0x50, 0x9e, 0x6b, 0xe5, + 0x2f, 0x98, 0x96, 0x1e, 0xb7, 0xea, 0xbe, 0x2c, 0x39, 0xa3, 0x92, 0x12, 0xe1, 0x4a, 0xa8, 0xce, + 0xe1, 0x87, 0xa8, 0x0f, 0x3c, 0x35, 0x6b, 0x51, 0x90, 0xa4, 0x3c, 0x23, 0x7f, 0x69, 0xe0, 0x0d, + 0x97, 0xa2, 0x55, 0xe0, 0xe9, 0xa8, 0xce, 0x6e, 0x7f, 0xf5, 0xd0, 0xe6, 0x3f, 0xb6, 0xa0, 0xe1, + 0xa2, 0xf7, 0x87, 0x8b, 0x3e, 0x5a, 0x9c, 0x42, 0x61, 0x4e, 0xaf, 0x63, 0x4f, 0xcf, 0x85, 0x4d, + 0x7f, 0xbb, 0xd7, 0xf3, 0x77, 0xff, 0xd2, 0x43, 0x0f, 0x12, 0x71, 0xd6, 0xf6, 0xd1, 0xfb, 0x2b, + 0x6e, 0xe5, 0x8e, 0x4b, 0xa1, 0x63, 0xef, 0xe3, 0xc8, 0x11, 0x32, 0xc1, 0x08, 0xcf, 0x02, 0x51, + 0x64, 0x61, 0x06, 0xdc, 0x3c, 0x13, 0xda, 0x12, 0xc9, 0xa9, 0xbc, 0xf2, 0x47, 0xf5, 0xbc, 0x4e, + 0xfd, 0xf4, 0xbc, 0xef, 0x9d, 0xce, 0xe1, 0xab, 0xcb, 0xce, 0xd6, 0x6b, 0xab, 0x79, 0x60, 0x9a, + 0x38, 0xac, 0x9b, 0x78, 0x6f, 0x49, 0xe3, 0x05, 0xa3, 0xff, 0xf4, 0x57, 0x00, 0x00, 0x00, 0xff, + 0xff, 0xfb, 0x83, 0x9a, 0x89, 0xe3, 0x05, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/cloud/functions/v1beta2/functions.pb.go b/vendor/google.golang.org/genproto/googleapis/cloud/functions/v1beta2/functions.pb.go new file mode 100644 index 0000000..3bf78ef --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/cloud/functions/v1beta2/functions.pb.go @@ -0,0 +1,1461 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/cloud/functions/v1beta2/functions.proto + +package functions // import "google.golang.org/genproto/googleapis/cloud/functions/v1beta2" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import duration "github.com/golang/protobuf/ptypes/duration" +import timestamp "github.com/golang/protobuf/ptypes/timestamp" +import _ "google.golang.org/genproto/googleapis/api/annotations" +import _ "google.golang.org/genproto/googleapis/api/serviceconfig" +import longrunning "google.golang.org/genproto/googleapis/longrunning" + +import ( + context "golang.org/x/net/context" + grpc "google.golang.org/grpc" +) + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Describes the current stage of a deployment. +type CloudFunctionStatus int32 + +const ( + // Status not specified. + CloudFunctionStatus_STATUS_UNSPECIFIED CloudFunctionStatus = 0 + // Successfully deployed. + CloudFunctionStatus_READY CloudFunctionStatus = 1 + // Not deployed correctly - behavior is undefined. The item should be updated + // or deleted to move it out of this state. + CloudFunctionStatus_FAILED CloudFunctionStatus = 2 + // Creation or update in progress. + CloudFunctionStatus_DEPLOYING CloudFunctionStatus = 3 + // Deletion in progress. + CloudFunctionStatus_DELETING CloudFunctionStatus = 4 +) + +var CloudFunctionStatus_name = map[int32]string{ + 0: "STATUS_UNSPECIFIED", + 1: "READY", + 2: "FAILED", + 3: "DEPLOYING", + 4: "DELETING", +} +var CloudFunctionStatus_value = map[string]int32{ + "STATUS_UNSPECIFIED": 0, + "READY": 1, + "FAILED": 2, + "DEPLOYING": 3, + "DELETING": 4, +} + +func (x CloudFunctionStatus) String() string { + return proto.EnumName(CloudFunctionStatus_name, int32(x)) +} +func (CloudFunctionStatus) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_functions_86e4424260a7e456, []int{0} +} + +// Describes a Cloud Function that contains user computation executed in +// response to an event. It encapsulate function and triggers configurations. +type CloudFunction struct { + // A user-defined name of the function. Function names must be unique + // globally and match pattern `projects/*/locations/*/functions/*` + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // The location of the function source code. + // + // Types that are valid to be assigned to SourceCode: + // *CloudFunction_SourceArchiveUrl + // *CloudFunction_SourceRepository + SourceCode isCloudFunction_SourceCode `protobuf_oneof:"source_code"` + // An event that triggers the function. + // + // Types that are valid to be assigned to Trigger: + // *CloudFunction_HttpsTrigger + // *CloudFunction_EventTrigger + Trigger isCloudFunction_Trigger `protobuf_oneof:"trigger"` + // Output only. Status of the function deployment. + Status CloudFunctionStatus `protobuf:"varint,7,opt,name=status,proto3,enum=google.cloud.functions.v1beta2.CloudFunctionStatus" json:"status,omitempty"` + // Output only. Name of the most recent operation modifying the function. If + // the function status is `DEPLOYING` or `DELETING`, then it points to the + // active operation. + LatestOperation string `protobuf:"bytes,8,opt,name=latest_operation,json=latestOperation,proto3" json:"latest_operation,omitempty"` + // The name of the function (as defined in source code) that will be + // executed. Defaults to the resource name suffix, if not specified. For + // backward compatibility, if function with given name is not found, then the + // system will try to use function named "function". + // For Node.js this is name of a function exported by the module specified + // in `source_location`. + EntryPoint string `protobuf:"bytes,9,opt,name=entry_point,json=entryPoint,proto3" json:"entry_point,omitempty"` + // The function execution timeout. Execution is considered failed and + // can be terminated if the function is not completed at the end of the + // timeout period. Defaults to 60 seconds. + Timeout *duration.Duration `protobuf:"bytes,10,opt,name=timeout,proto3" json:"timeout,omitempty"` + // The amount of memory in MB available for a function. + // Defaults to 256MB. + AvailableMemoryMb int32 `protobuf:"varint,11,opt,name=available_memory_mb,json=availableMemoryMb,proto3" json:"available_memory_mb,omitempty"` + // Output only. The service account of the function. + ServiceAccount string `protobuf:"bytes,13,opt,name=service_account,json=serviceAccount,proto3" json:"service_account,omitempty"` + // Output only. The last update timestamp of a Cloud Function. + UpdateTime *timestamp.Timestamp `protobuf:"bytes,15,opt,name=update_time,json=updateTime,proto3" json:"update_time,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *CloudFunction) Reset() { *m = CloudFunction{} } +func (m *CloudFunction) String() string { return proto.CompactTextString(m) } +func (*CloudFunction) ProtoMessage() {} +func (*CloudFunction) Descriptor() ([]byte, []int) { + return fileDescriptor_functions_86e4424260a7e456, []int{0} +} +func (m *CloudFunction) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_CloudFunction.Unmarshal(m, b) +} +func (m *CloudFunction) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_CloudFunction.Marshal(b, m, deterministic) +} +func (dst *CloudFunction) XXX_Merge(src proto.Message) { + xxx_messageInfo_CloudFunction.Merge(dst, src) +} +func (m *CloudFunction) XXX_Size() int { + return xxx_messageInfo_CloudFunction.Size(m) +} +func (m *CloudFunction) XXX_DiscardUnknown() { + xxx_messageInfo_CloudFunction.DiscardUnknown(m) +} + +var xxx_messageInfo_CloudFunction proto.InternalMessageInfo + +func (m *CloudFunction) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +type isCloudFunction_SourceCode interface { + isCloudFunction_SourceCode() +} + +type CloudFunction_SourceArchiveUrl struct { + SourceArchiveUrl string `protobuf:"bytes,14,opt,name=source_archive_url,json=sourceArchiveUrl,proto3,oneof"` +} + +type CloudFunction_SourceRepository struct { + SourceRepository *SourceRepository `protobuf:"bytes,3,opt,name=source_repository,json=sourceRepository,proto3,oneof"` +} + +func (*CloudFunction_SourceArchiveUrl) isCloudFunction_SourceCode() {} + +func (*CloudFunction_SourceRepository) isCloudFunction_SourceCode() {} + +func (m *CloudFunction) GetSourceCode() isCloudFunction_SourceCode { + if m != nil { + return m.SourceCode + } + return nil +} + +func (m *CloudFunction) GetSourceArchiveUrl() string { + if x, ok := m.GetSourceCode().(*CloudFunction_SourceArchiveUrl); ok { + return x.SourceArchiveUrl + } + return "" +} + +func (m *CloudFunction) GetSourceRepository() *SourceRepository { + if x, ok := m.GetSourceCode().(*CloudFunction_SourceRepository); ok { + return x.SourceRepository + } + return nil +} + +type isCloudFunction_Trigger interface { + isCloudFunction_Trigger() +} + +type CloudFunction_HttpsTrigger struct { + HttpsTrigger *HTTPSTrigger `protobuf:"bytes,6,opt,name=https_trigger,json=httpsTrigger,proto3,oneof"` +} + +type CloudFunction_EventTrigger struct { + EventTrigger *EventTrigger `protobuf:"bytes,12,opt,name=event_trigger,json=eventTrigger,proto3,oneof"` +} + +func (*CloudFunction_HttpsTrigger) isCloudFunction_Trigger() {} + +func (*CloudFunction_EventTrigger) isCloudFunction_Trigger() {} + +func (m *CloudFunction) GetTrigger() isCloudFunction_Trigger { + if m != nil { + return m.Trigger + } + return nil +} + +func (m *CloudFunction) GetHttpsTrigger() *HTTPSTrigger { + if x, ok := m.GetTrigger().(*CloudFunction_HttpsTrigger); ok { + return x.HttpsTrigger + } + return nil +} + +func (m *CloudFunction) GetEventTrigger() *EventTrigger { + if x, ok := m.GetTrigger().(*CloudFunction_EventTrigger); ok { + return x.EventTrigger + } + return nil +} + +func (m *CloudFunction) GetStatus() CloudFunctionStatus { + if m != nil { + return m.Status + } + return CloudFunctionStatus_STATUS_UNSPECIFIED +} + +func (m *CloudFunction) GetLatestOperation() string { + if m != nil { + return m.LatestOperation + } + return "" +} + +func (m *CloudFunction) GetEntryPoint() string { + if m != nil { + return m.EntryPoint + } + return "" +} + +func (m *CloudFunction) GetTimeout() *duration.Duration { + if m != nil { + return m.Timeout + } + return nil +} + +func (m *CloudFunction) GetAvailableMemoryMb() int32 { + if m != nil { + return m.AvailableMemoryMb + } + return 0 +} + +func (m *CloudFunction) GetServiceAccount() string { + if m != nil { + return m.ServiceAccount + } + return "" +} + +func (m *CloudFunction) GetUpdateTime() *timestamp.Timestamp { + if m != nil { + return m.UpdateTime + } + return nil +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*CloudFunction) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _CloudFunction_OneofMarshaler, _CloudFunction_OneofUnmarshaler, _CloudFunction_OneofSizer, []interface{}{ + (*CloudFunction_SourceArchiveUrl)(nil), + (*CloudFunction_SourceRepository)(nil), + (*CloudFunction_HttpsTrigger)(nil), + (*CloudFunction_EventTrigger)(nil), + } +} + +func _CloudFunction_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*CloudFunction) + // source_code + switch x := m.SourceCode.(type) { + case *CloudFunction_SourceArchiveUrl: + b.EncodeVarint(14<<3 | proto.WireBytes) + b.EncodeStringBytes(x.SourceArchiveUrl) + case *CloudFunction_SourceRepository: + b.EncodeVarint(3<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.SourceRepository); err != nil { + return err + } + case nil: + default: + return fmt.Errorf("CloudFunction.SourceCode has unexpected type %T", x) + } + // trigger + switch x := m.Trigger.(type) { + case *CloudFunction_HttpsTrigger: + b.EncodeVarint(6<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.HttpsTrigger); err != nil { + return err + } + case *CloudFunction_EventTrigger: + b.EncodeVarint(12<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.EventTrigger); err != nil { + return err + } + case nil: + default: + return fmt.Errorf("CloudFunction.Trigger has unexpected type %T", x) + } + return nil +} + +func _CloudFunction_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*CloudFunction) + switch tag { + case 14: // source_code.source_archive_url + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeStringBytes() + m.SourceCode = &CloudFunction_SourceArchiveUrl{x} + return true, err + case 3: // source_code.source_repository + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(SourceRepository) + err := b.DecodeMessage(msg) + m.SourceCode = &CloudFunction_SourceRepository{msg} + return true, err + case 6: // trigger.https_trigger + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(HTTPSTrigger) + err := b.DecodeMessage(msg) + m.Trigger = &CloudFunction_HttpsTrigger{msg} + return true, err + case 12: // trigger.event_trigger + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(EventTrigger) + err := b.DecodeMessage(msg) + m.Trigger = &CloudFunction_EventTrigger{msg} + return true, err + default: + return false, nil + } +} + +func _CloudFunction_OneofSizer(msg proto.Message) (n int) { + m := msg.(*CloudFunction) + // source_code + switch x := m.SourceCode.(type) { + case *CloudFunction_SourceArchiveUrl: + n += 1 // tag and wire + n += proto.SizeVarint(uint64(len(x.SourceArchiveUrl))) + n += len(x.SourceArchiveUrl) + case *CloudFunction_SourceRepository: + s := proto.Size(x.SourceRepository) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + // trigger + switch x := m.Trigger.(type) { + case *CloudFunction_HttpsTrigger: + s := proto.Size(x.HttpsTrigger) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *CloudFunction_EventTrigger: + s := proto.Size(x.EventTrigger) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +// Describes HTTPSTrigger, could be used to connect web hooks to function. +type HTTPSTrigger struct { + // Output only. The deployed url for the function. + Url string `protobuf:"bytes,1,opt,name=url,proto3" json:"url,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *HTTPSTrigger) Reset() { *m = HTTPSTrigger{} } +func (m *HTTPSTrigger) String() string { return proto.CompactTextString(m) } +func (*HTTPSTrigger) ProtoMessage() {} +func (*HTTPSTrigger) Descriptor() ([]byte, []int) { + return fileDescriptor_functions_86e4424260a7e456, []int{1} +} +func (m *HTTPSTrigger) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_HTTPSTrigger.Unmarshal(m, b) +} +func (m *HTTPSTrigger) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_HTTPSTrigger.Marshal(b, m, deterministic) +} +func (dst *HTTPSTrigger) XXX_Merge(src proto.Message) { + xxx_messageInfo_HTTPSTrigger.Merge(dst, src) +} +func (m *HTTPSTrigger) XXX_Size() int { + return xxx_messageInfo_HTTPSTrigger.Size(m) +} +func (m *HTTPSTrigger) XXX_DiscardUnknown() { + xxx_messageInfo_HTTPSTrigger.DiscardUnknown(m) +} + +var xxx_messageInfo_HTTPSTrigger proto.InternalMessageInfo + +func (m *HTTPSTrigger) GetUrl() string { + if m != nil { + return m.Url + } + return "" +} + +// Describes EventTrigger, used to request events be sent from another +// service. +type EventTrigger struct { + // `event_type` names contain the service that is sending an event and the + // kind of event that was fired. Must be of the form + // `providers/*/eventTypes/*` e.g. Directly handle a Message published to + // Google Cloud Pub/Sub `providers/cloud.pubsub/eventTypes/topic.publish` + // + // Handle an object changing in Google Cloud Storage + // `providers/cloud.storage/eventTypes/object.change` + // + // Handle a write to the Firebase Realtime Database + // `providers/firebase.database/eventTypes/data.write` + EventType string `protobuf:"bytes,1,opt,name=event_type,json=eventType,proto3" json:"event_type,omitempty"` + // Which instance of the source's service should send events. E.g. for Pub/Sub + // this would be a Pub/Sub topic at `projects/*/topics/*`. For Google Cloud + // Storage this would be a bucket at `projects/*/buckets/*`. For any source + // that only supports one instance per-project, this should be the name of the + // project (`projects/*`) + Resource string `protobuf:"bytes,2,opt,name=resource,proto3" json:"resource,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *EventTrigger) Reset() { *m = EventTrigger{} } +func (m *EventTrigger) String() string { return proto.CompactTextString(m) } +func (*EventTrigger) ProtoMessage() {} +func (*EventTrigger) Descriptor() ([]byte, []int) { + return fileDescriptor_functions_86e4424260a7e456, []int{2} +} +func (m *EventTrigger) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_EventTrigger.Unmarshal(m, b) +} +func (m *EventTrigger) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_EventTrigger.Marshal(b, m, deterministic) +} +func (dst *EventTrigger) XXX_Merge(src proto.Message) { + xxx_messageInfo_EventTrigger.Merge(dst, src) +} +func (m *EventTrigger) XXX_Size() int { + return xxx_messageInfo_EventTrigger.Size(m) +} +func (m *EventTrigger) XXX_DiscardUnknown() { + xxx_messageInfo_EventTrigger.DiscardUnknown(m) +} + +var xxx_messageInfo_EventTrigger proto.InternalMessageInfo + +func (m *EventTrigger) GetEventType() string { + if m != nil { + return m.EventType + } + return "" +} + +func (m *EventTrigger) GetResource() string { + if m != nil { + return m.Resource + } + return "" +} + +// Describes the location of the function source in a remote repository. +type SourceRepository struct { + // URL to the hosted repository where the function is defined. Only paths in + // https://source.developers.google.com domain are supported. The path should + // contain the name of the repository. + RepositoryUrl string `protobuf:"bytes,1,opt,name=repository_url,json=repositoryUrl,proto3" json:"repository_url,omitempty"` + // The path within the repository where the function is defined. The path + // should point to the directory where Cloud Functions files are located. Use + // "/" if the function is defined directly in the root directory of a + // repository. + SourcePath string `protobuf:"bytes,2,opt,name=source_path,json=sourcePath,proto3" json:"source_path,omitempty"` + // The version of a function. Defaults to the latest version of the master + // branch. + // + // Types that are valid to be assigned to Version: + // *SourceRepository_Branch + // *SourceRepository_Tag + // *SourceRepository_Revision + Version isSourceRepository_Version `protobuf_oneof:"version"` + // Output only. The id of the revision that was resolved at the moment of + // function creation or update. For example when a user deployed from a + // branch, it will be the revision id of the latest change on this branch at + // that time. If user deployed from revision then this value will be always + // equal to the revision specified by the user. + DeployedRevision string `protobuf:"bytes,6,opt,name=deployed_revision,json=deployedRevision,proto3" json:"deployed_revision,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *SourceRepository) Reset() { *m = SourceRepository{} } +func (m *SourceRepository) String() string { return proto.CompactTextString(m) } +func (*SourceRepository) ProtoMessage() {} +func (*SourceRepository) Descriptor() ([]byte, []int) { + return fileDescriptor_functions_86e4424260a7e456, []int{3} +} +func (m *SourceRepository) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_SourceRepository.Unmarshal(m, b) +} +func (m *SourceRepository) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_SourceRepository.Marshal(b, m, deterministic) +} +func (dst *SourceRepository) XXX_Merge(src proto.Message) { + xxx_messageInfo_SourceRepository.Merge(dst, src) +} +func (m *SourceRepository) XXX_Size() int { + return xxx_messageInfo_SourceRepository.Size(m) +} +func (m *SourceRepository) XXX_DiscardUnknown() { + xxx_messageInfo_SourceRepository.DiscardUnknown(m) +} + +var xxx_messageInfo_SourceRepository proto.InternalMessageInfo + +func (m *SourceRepository) GetRepositoryUrl() string { + if m != nil { + return m.RepositoryUrl + } + return "" +} + +func (m *SourceRepository) GetSourcePath() string { + if m != nil { + return m.SourcePath + } + return "" +} + +type isSourceRepository_Version interface { + isSourceRepository_Version() +} + +type SourceRepository_Branch struct { + Branch string `protobuf:"bytes,3,opt,name=branch,proto3,oneof"` +} + +type SourceRepository_Tag struct { + Tag string `protobuf:"bytes,4,opt,name=tag,proto3,oneof"` +} + +type SourceRepository_Revision struct { + Revision string `protobuf:"bytes,5,opt,name=revision,proto3,oneof"` +} + +func (*SourceRepository_Branch) isSourceRepository_Version() {} + +func (*SourceRepository_Tag) isSourceRepository_Version() {} + +func (*SourceRepository_Revision) isSourceRepository_Version() {} + +func (m *SourceRepository) GetVersion() isSourceRepository_Version { + if m != nil { + return m.Version + } + return nil +} + +func (m *SourceRepository) GetBranch() string { + if x, ok := m.GetVersion().(*SourceRepository_Branch); ok { + return x.Branch + } + return "" +} + +func (m *SourceRepository) GetTag() string { + if x, ok := m.GetVersion().(*SourceRepository_Tag); ok { + return x.Tag + } + return "" +} + +func (m *SourceRepository) GetRevision() string { + if x, ok := m.GetVersion().(*SourceRepository_Revision); ok { + return x.Revision + } + return "" +} + +func (m *SourceRepository) GetDeployedRevision() string { + if m != nil { + return m.DeployedRevision + } + return "" +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*SourceRepository) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _SourceRepository_OneofMarshaler, _SourceRepository_OneofUnmarshaler, _SourceRepository_OneofSizer, []interface{}{ + (*SourceRepository_Branch)(nil), + (*SourceRepository_Tag)(nil), + (*SourceRepository_Revision)(nil), + } +} + +func _SourceRepository_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*SourceRepository) + // version + switch x := m.Version.(type) { + case *SourceRepository_Branch: + b.EncodeVarint(3<<3 | proto.WireBytes) + b.EncodeStringBytes(x.Branch) + case *SourceRepository_Tag: + b.EncodeVarint(4<<3 | proto.WireBytes) + b.EncodeStringBytes(x.Tag) + case *SourceRepository_Revision: + b.EncodeVarint(5<<3 | proto.WireBytes) + b.EncodeStringBytes(x.Revision) + case nil: + default: + return fmt.Errorf("SourceRepository.Version has unexpected type %T", x) + } + return nil +} + +func _SourceRepository_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*SourceRepository) + switch tag { + case 3: // version.branch + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeStringBytes() + m.Version = &SourceRepository_Branch{x} + return true, err + case 4: // version.tag + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeStringBytes() + m.Version = &SourceRepository_Tag{x} + return true, err + case 5: // version.revision + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeStringBytes() + m.Version = &SourceRepository_Revision{x} + return true, err + default: + return false, nil + } +} + +func _SourceRepository_OneofSizer(msg proto.Message) (n int) { + m := msg.(*SourceRepository) + // version + switch x := m.Version.(type) { + case *SourceRepository_Branch: + n += 1 // tag and wire + n += proto.SizeVarint(uint64(len(x.Branch))) + n += len(x.Branch) + case *SourceRepository_Tag: + n += 1 // tag and wire + n += proto.SizeVarint(uint64(len(x.Tag))) + n += len(x.Tag) + case *SourceRepository_Revision: + n += 1 // tag and wire + n += proto.SizeVarint(uint64(len(x.Revision))) + n += len(x.Revision) + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +// Request for the `CreateFunction` method. +type CreateFunctionRequest struct { + // The project and location in which the function should be created, specified + // in the format `projects/*/locations/*` + Location string `protobuf:"bytes,1,opt,name=location,proto3" json:"location,omitempty"` + // Function to be created. + Function *CloudFunction `protobuf:"bytes,2,opt,name=function,proto3" json:"function,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *CreateFunctionRequest) Reset() { *m = CreateFunctionRequest{} } +func (m *CreateFunctionRequest) String() string { return proto.CompactTextString(m) } +func (*CreateFunctionRequest) ProtoMessage() {} +func (*CreateFunctionRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_functions_86e4424260a7e456, []int{4} +} +func (m *CreateFunctionRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_CreateFunctionRequest.Unmarshal(m, b) +} +func (m *CreateFunctionRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_CreateFunctionRequest.Marshal(b, m, deterministic) +} +func (dst *CreateFunctionRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_CreateFunctionRequest.Merge(dst, src) +} +func (m *CreateFunctionRequest) XXX_Size() int { + return xxx_messageInfo_CreateFunctionRequest.Size(m) +} +func (m *CreateFunctionRequest) XXX_DiscardUnknown() { + xxx_messageInfo_CreateFunctionRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_CreateFunctionRequest proto.InternalMessageInfo + +func (m *CreateFunctionRequest) GetLocation() string { + if m != nil { + return m.Location + } + return "" +} + +func (m *CreateFunctionRequest) GetFunction() *CloudFunction { + if m != nil { + return m.Function + } + return nil +} + +// Request for the `UpdateFunction` method. +type UpdateFunctionRequest struct { + // The name of the function to be updated. + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // New version of the function. + Function *CloudFunction `protobuf:"bytes,2,opt,name=function,proto3" json:"function,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *UpdateFunctionRequest) Reset() { *m = UpdateFunctionRequest{} } +func (m *UpdateFunctionRequest) String() string { return proto.CompactTextString(m) } +func (*UpdateFunctionRequest) ProtoMessage() {} +func (*UpdateFunctionRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_functions_86e4424260a7e456, []int{5} +} +func (m *UpdateFunctionRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_UpdateFunctionRequest.Unmarshal(m, b) +} +func (m *UpdateFunctionRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_UpdateFunctionRequest.Marshal(b, m, deterministic) +} +func (dst *UpdateFunctionRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_UpdateFunctionRequest.Merge(dst, src) +} +func (m *UpdateFunctionRequest) XXX_Size() int { + return xxx_messageInfo_UpdateFunctionRequest.Size(m) +} +func (m *UpdateFunctionRequest) XXX_DiscardUnknown() { + xxx_messageInfo_UpdateFunctionRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_UpdateFunctionRequest proto.InternalMessageInfo + +func (m *UpdateFunctionRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *UpdateFunctionRequest) GetFunction() *CloudFunction { + if m != nil { + return m.Function + } + return nil +} + +// Request for the `GetFunction` method. +type GetFunctionRequest struct { + // The name of the function which details should be obtained. + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GetFunctionRequest) Reset() { *m = GetFunctionRequest{} } +func (m *GetFunctionRequest) String() string { return proto.CompactTextString(m) } +func (*GetFunctionRequest) ProtoMessage() {} +func (*GetFunctionRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_functions_86e4424260a7e456, []int{6} +} +func (m *GetFunctionRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GetFunctionRequest.Unmarshal(m, b) +} +func (m *GetFunctionRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GetFunctionRequest.Marshal(b, m, deterministic) +} +func (dst *GetFunctionRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_GetFunctionRequest.Merge(dst, src) +} +func (m *GetFunctionRequest) XXX_Size() int { + return xxx_messageInfo_GetFunctionRequest.Size(m) +} +func (m *GetFunctionRequest) XXX_DiscardUnknown() { + xxx_messageInfo_GetFunctionRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_GetFunctionRequest proto.InternalMessageInfo + +func (m *GetFunctionRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +// Request for the `ListFunctions` method. +type ListFunctionsRequest struct { + // The project and location from which the function should be listed, + // specified in the format `projects/*/locations/*` + // If you want to list functions in all locations, use "-" in place of a + // location. + Location string `protobuf:"bytes,1,opt,name=location,proto3" json:"location,omitempty"` + // Maximum number of functions to return per call. + PageSize int32 `protobuf:"varint,2,opt,name=page_size,json=pageSize,proto3" json:"page_size,omitempty"` + // The value returned by the last + // `ListFunctionsResponse`; indicates that + // this is a continuation of a prior `ListFunctions` call, and that the + // system should return the next page of data. + PageToken string `protobuf:"bytes,3,opt,name=page_token,json=pageToken,proto3" json:"page_token,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ListFunctionsRequest) Reset() { *m = ListFunctionsRequest{} } +func (m *ListFunctionsRequest) String() string { return proto.CompactTextString(m) } +func (*ListFunctionsRequest) ProtoMessage() {} +func (*ListFunctionsRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_functions_86e4424260a7e456, []int{7} +} +func (m *ListFunctionsRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ListFunctionsRequest.Unmarshal(m, b) +} +func (m *ListFunctionsRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ListFunctionsRequest.Marshal(b, m, deterministic) +} +func (dst *ListFunctionsRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_ListFunctionsRequest.Merge(dst, src) +} +func (m *ListFunctionsRequest) XXX_Size() int { + return xxx_messageInfo_ListFunctionsRequest.Size(m) +} +func (m *ListFunctionsRequest) XXX_DiscardUnknown() { + xxx_messageInfo_ListFunctionsRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_ListFunctionsRequest proto.InternalMessageInfo + +func (m *ListFunctionsRequest) GetLocation() string { + if m != nil { + return m.Location + } + return "" +} + +func (m *ListFunctionsRequest) GetPageSize() int32 { + if m != nil { + return m.PageSize + } + return 0 +} + +func (m *ListFunctionsRequest) GetPageToken() string { + if m != nil { + return m.PageToken + } + return "" +} + +// Response for the `ListFunctions` method. +type ListFunctionsResponse struct { + // The functions that match the request. + Functions []*CloudFunction `protobuf:"bytes,1,rep,name=functions,proto3" json:"functions,omitempty"` + // If not empty, indicates that there may be more functions that match + // the request; this value should be passed in a new + // [google.cloud.functions.v1beta2.ListFunctionsRequest][] + // to get more functions. + NextPageToken string `protobuf:"bytes,2,opt,name=next_page_token,json=nextPageToken,proto3" json:"next_page_token,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ListFunctionsResponse) Reset() { *m = ListFunctionsResponse{} } +func (m *ListFunctionsResponse) String() string { return proto.CompactTextString(m) } +func (*ListFunctionsResponse) ProtoMessage() {} +func (*ListFunctionsResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_functions_86e4424260a7e456, []int{8} +} +func (m *ListFunctionsResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ListFunctionsResponse.Unmarshal(m, b) +} +func (m *ListFunctionsResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ListFunctionsResponse.Marshal(b, m, deterministic) +} +func (dst *ListFunctionsResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_ListFunctionsResponse.Merge(dst, src) +} +func (m *ListFunctionsResponse) XXX_Size() int { + return xxx_messageInfo_ListFunctionsResponse.Size(m) +} +func (m *ListFunctionsResponse) XXX_DiscardUnknown() { + xxx_messageInfo_ListFunctionsResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_ListFunctionsResponse proto.InternalMessageInfo + +func (m *ListFunctionsResponse) GetFunctions() []*CloudFunction { + if m != nil { + return m.Functions + } + return nil +} + +func (m *ListFunctionsResponse) GetNextPageToken() string { + if m != nil { + return m.NextPageToken + } + return "" +} + +// Request for the `DeleteFunction` method. +type DeleteFunctionRequest struct { + // The name of the function which should be deleted. + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *DeleteFunctionRequest) Reset() { *m = DeleteFunctionRequest{} } +func (m *DeleteFunctionRequest) String() string { return proto.CompactTextString(m) } +func (*DeleteFunctionRequest) ProtoMessage() {} +func (*DeleteFunctionRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_functions_86e4424260a7e456, []int{9} +} +func (m *DeleteFunctionRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_DeleteFunctionRequest.Unmarshal(m, b) +} +func (m *DeleteFunctionRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_DeleteFunctionRequest.Marshal(b, m, deterministic) +} +func (dst *DeleteFunctionRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_DeleteFunctionRequest.Merge(dst, src) +} +func (m *DeleteFunctionRequest) XXX_Size() int { + return xxx_messageInfo_DeleteFunctionRequest.Size(m) +} +func (m *DeleteFunctionRequest) XXX_DiscardUnknown() { + xxx_messageInfo_DeleteFunctionRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_DeleteFunctionRequest proto.InternalMessageInfo + +func (m *DeleteFunctionRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +// Request for the `CallFunction` method. +type CallFunctionRequest struct { + // The name of the function to be called. + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // Input to be passed to the function. + Data string `protobuf:"bytes,2,opt,name=data,proto3" json:"data,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *CallFunctionRequest) Reset() { *m = CallFunctionRequest{} } +func (m *CallFunctionRequest) String() string { return proto.CompactTextString(m) } +func (*CallFunctionRequest) ProtoMessage() {} +func (*CallFunctionRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_functions_86e4424260a7e456, []int{10} +} +func (m *CallFunctionRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_CallFunctionRequest.Unmarshal(m, b) +} +func (m *CallFunctionRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_CallFunctionRequest.Marshal(b, m, deterministic) +} +func (dst *CallFunctionRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_CallFunctionRequest.Merge(dst, src) +} +func (m *CallFunctionRequest) XXX_Size() int { + return xxx_messageInfo_CallFunctionRequest.Size(m) +} +func (m *CallFunctionRequest) XXX_DiscardUnknown() { + xxx_messageInfo_CallFunctionRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_CallFunctionRequest proto.InternalMessageInfo + +func (m *CallFunctionRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *CallFunctionRequest) GetData() string { + if m != nil { + return m.Data + } + return "" +} + +// Response of `CallFunction` method. +type CallFunctionResponse struct { + // Execution id of function invocation. + ExecutionId string `protobuf:"bytes,1,opt,name=execution_id,json=executionId,proto3" json:"execution_id,omitempty"` + // Result populated for successful execution of synchronous function. Will + // not be populated if function does not return a result through context. + Result string `protobuf:"bytes,2,opt,name=result,proto3" json:"result,omitempty"` + // Either system or user-function generated error. Set if execution + // was not successful. + Error string `protobuf:"bytes,3,opt,name=error,proto3" json:"error,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *CallFunctionResponse) Reset() { *m = CallFunctionResponse{} } +func (m *CallFunctionResponse) String() string { return proto.CompactTextString(m) } +func (*CallFunctionResponse) ProtoMessage() {} +func (*CallFunctionResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_functions_86e4424260a7e456, []int{11} +} +func (m *CallFunctionResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_CallFunctionResponse.Unmarshal(m, b) +} +func (m *CallFunctionResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_CallFunctionResponse.Marshal(b, m, deterministic) +} +func (dst *CallFunctionResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_CallFunctionResponse.Merge(dst, src) +} +func (m *CallFunctionResponse) XXX_Size() int { + return xxx_messageInfo_CallFunctionResponse.Size(m) +} +func (m *CallFunctionResponse) XXX_DiscardUnknown() { + xxx_messageInfo_CallFunctionResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_CallFunctionResponse proto.InternalMessageInfo + +func (m *CallFunctionResponse) GetExecutionId() string { + if m != nil { + return m.ExecutionId + } + return "" +} + +func (m *CallFunctionResponse) GetResult() string { + if m != nil { + return m.Result + } + return "" +} + +func (m *CallFunctionResponse) GetError() string { + if m != nil { + return m.Error + } + return "" +} + +func init() { + proto.RegisterType((*CloudFunction)(nil), "google.cloud.functions.v1beta2.CloudFunction") + proto.RegisterType((*HTTPSTrigger)(nil), "google.cloud.functions.v1beta2.HTTPSTrigger") + proto.RegisterType((*EventTrigger)(nil), "google.cloud.functions.v1beta2.EventTrigger") + proto.RegisterType((*SourceRepository)(nil), "google.cloud.functions.v1beta2.SourceRepository") + proto.RegisterType((*CreateFunctionRequest)(nil), "google.cloud.functions.v1beta2.CreateFunctionRequest") + proto.RegisterType((*UpdateFunctionRequest)(nil), "google.cloud.functions.v1beta2.UpdateFunctionRequest") + proto.RegisterType((*GetFunctionRequest)(nil), "google.cloud.functions.v1beta2.GetFunctionRequest") + proto.RegisterType((*ListFunctionsRequest)(nil), "google.cloud.functions.v1beta2.ListFunctionsRequest") + proto.RegisterType((*ListFunctionsResponse)(nil), "google.cloud.functions.v1beta2.ListFunctionsResponse") + proto.RegisterType((*DeleteFunctionRequest)(nil), "google.cloud.functions.v1beta2.DeleteFunctionRequest") + proto.RegisterType((*CallFunctionRequest)(nil), "google.cloud.functions.v1beta2.CallFunctionRequest") + proto.RegisterType((*CallFunctionResponse)(nil), "google.cloud.functions.v1beta2.CallFunctionResponse") + proto.RegisterEnum("google.cloud.functions.v1beta2.CloudFunctionStatus", CloudFunctionStatus_name, CloudFunctionStatus_value) +} + +// Reference imports to suppress errors if they are not otherwise used. +var _ context.Context +var _ grpc.ClientConn + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the grpc package it is being compiled against. +const _ = grpc.SupportPackageIsVersion4 + +// CloudFunctionsServiceClient is the client API for CloudFunctionsService service. +// +// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://godoc.org/google.golang.org/grpc#ClientConn.NewStream. +type CloudFunctionsServiceClient interface { + // Returns a list of functions that belong to the requested project. + ListFunctions(ctx context.Context, in *ListFunctionsRequest, opts ...grpc.CallOption) (*ListFunctionsResponse, error) + // Returns a function with the given name from the requested project. + GetFunction(ctx context.Context, in *GetFunctionRequest, opts ...grpc.CallOption) (*CloudFunction, error) + // Creates a new function. If a function with the given name already exists in + // the specified project, the long running operation will return + // `ALREADY_EXISTS` error. + CreateFunction(ctx context.Context, in *CreateFunctionRequest, opts ...grpc.CallOption) (*longrunning.Operation, error) + // Updates existing function. + UpdateFunction(ctx context.Context, in *UpdateFunctionRequest, opts ...grpc.CallOption) (*longrunning.Operation, error) + // Deletes a function with the given name from the specified project. If the + // given function is used by some trigger, the trigger will be updated to + // remove this function. + DeleteFunction(ctx context.Context, in *DeleteFunctionRequest, opts ...grpc.CallOption) (*longrunning.Operation, error) + // Invokes synchronously deployed function. To be used for testing, very + // limited traffic allowed. + CallFunction(ctx context.Context, in *CallFunctionRequest, opts ...grpc.CallOption) (*CallFunctionResponse, error) +} + +type cloudFunctionsServiceClient struct { + cc *grpc.ClientConn +} + +func NewCloudFunctionsServiceClient(cc *grpc.ClientConn) CloudFunctionsServiceClient { + return &cloudFunctionsServiceClient{cc} +} + +func (c *cloudFunctionsServiceClient) ListFunctions(ctx context.Context, in *ListFunctionsRequest, opts ...grpc.CallOption) (*ListFunctionsResponse, error) { + out := new(ListFunctionsResponse) + err := c.cc.Invoke(ctx, "/google.cloud.functions.v1beta2.CloudFunctionsService/ListFunctions", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *cloudFunctionsServiceClient) GetFunction(ctx context.Context, in *GetFunctionRequest, opts ...grpc.CallOption) (*CloudFunction, error) { + out := new(CloudFunction) + err := c.cc.Invoke(ctx, "/google.cloud.functions.v1beta2.CloudFunctionsService/GetFunction", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *cloudFunctionsServiceClient) CreateFunction(ctx context.Context, in *CreateFunctionRequest, opts ...grpc.CallOption) (*longrunning.Operation, error) { + out := new(longrunning.Operation) + err := c.cc.Invoke(ctx, "/google.cloud.functions.v1beta2.CloudFunctionsService/CreateFunction", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *cloudFunctionsServiceClient) UpdateFunction(ctx context.Context, in *UpdateFunctionRequest, opts ...grpc.CallOption) (*longrunning.Operation, error) { + out := new(longrunning.Operation) + err := c.cc.Invoke(ctx, "/google.cloud.functions.v1beta2.CloudFunctionsService/UpdateFunction", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *cloudFunctionsServiceClient) DeleteFunction(ctx context.Context, in *DeleteFunctionRequest, opts ...grpc.CallOption) (*longrunning.Operation, error) { + out := new(longrunning.Operation) + err := c.cc.Invoke(ctx, "/google.cloud.functions.v1beta2.CloudFunctionsService/DeleteFunction", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *cloudFunctionsServiceClient) CallFunction(ctx context.Context, in *CallFunctionRequest, opts ...grpc.CallOption) (*CallFunctionResponse, error) { + out := new(CallFunctionResponse) + err := c.cc.Invoke(ctx, "/google.cloud.functions.v1beta2.CloudFunctionsService/CallFunction", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +// CloudFunctionsServiceServer is the server API for CloudFunctionsService service. +type CloudFunctionsServiceServer interface { + // Returns a list of functions that belong to the requested project. + ListFunctions(context.Context, *ListFunctionsRequest) (*ListFunctionsResponse, error) + // Returns a function with the given name from the requested project. + GetFunction(context.Context, *GetFunctionRequest) (*CloudFunction, error) + // Creates a new function. If a function with the given name already exists in + // the specified project, the long running operation will return + // `ALREADY_EXISTS` error. + CreateFunction(context.Context, *CreateFunctionRequest) (*longrunning.Operation, error) + // Updates existing function. + UpdateFunction(context.Context, *UpdateFunctionRequest) (*longrunning.Operation, error) + // Deletes a function with the given name from the specified project. If the + // given function is used by some trigger, the trigger will be updated to + // remove this function. + DeleteFunction(context.Context, *DeleteFunctionRequest) (*longrunning.Operation, error) + // Invokes synchronously deployed function. To be used for testing, very + // limited traffic allowed. + CallFunction(context.Context, *CallFunctionRequest) (*CallFunctionResponse, error) +} + +func RegisterCloudFunctionsServiceServer(s *grpc.Server, srv CloudFunctionsServiceServer) { + s.RegisterService(&_CloudFunctionsService_serviceDesc, srv) +} + +func _CloudFunctionsService_ListFunctions_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(ListFunctionsRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(CloudFunctionsServiceServer).ListFunctions(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.functions.v1beta2.CloudFunctionsService/ListFunctions", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(CloudFunctionsServiceServer).ListFunctions(ctx, req.(*ListFunctionsRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _CloudFunctionsService_GetFunction_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(GetFunctionRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(CloudFunctionsServiceServer).GetFunction(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.functions.v1beta2.CloudFunctionsService/GetFunction", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(CloudFunctionsServiceServer).GetFunction(ctx, req.(*GetFunctionRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _CloudFunctionsService_CreateFunction_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(CreateFunctionRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(CloudFunctionsServiceServer).CreateFunction(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.functions.v1beta2.CloudFunctionsService/CreateFunction", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(CloudFunctionsServiceServer).CreateFunction(ctx, req.(*CreateFunctionRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _CloudFunctionsService_UpdateFunction_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(UpdateFunctionRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(CloudFunctionsServiceServer).UpdateFunction(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.functions.v1beta2.CloudFunctionsService/UpdateFunction", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(CloudFunctionsServiceServer).UpdateFunction(ctx, req.(*UpdateFunctionRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _CloudFunctionsService_DeleteFunction_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(DeleteFunctionRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(CloudFunctionsServiceServer).DeleteFunction(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.functions.v1beta2.CloudFunctionsService/DeleteFunction", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(CloudFunctionsServiceServer).DeleteFunction(ctx, req.(*DeleteFunctionRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _CloudFunctionsService_CallFunction_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(CallFunctionRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(CloudFunctionsServiceServer).CallFunction(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.functions.v1beta2.CloudFunctionsService/CallFunction", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(CloudFunctionsServiceServer).CallFunction(ctx, req.(*CallFunctionRequest)) + } + return interceptor(ctx, in, info, handler) +} + +var _CloudFunctionsService_serviceDesc = grpc.ServiceDesc{ + ServiceName: "google.cloud.functions.v1beta2.CloudFunctionsService", + HandlerType: (*CloudFunctionsServiceServer)(nil), + Methods: []grpc.MethodDesc{ + { + MethodName: "ListFunctions", + Handler: _CloudFunctionsService_ListFunctions_Handler, + }, + { + MethodName: "GetFunction", + Handler: _CloudFunctionsService_GetFunction_Handler, + }, + { + MethodName: "CreateFunction", + Handler: _CloudFunctionsService_CreateFunction_Handler, + }, + { + MethodName: "UpdateFunction", + Handler: _CloudFunctionsService_UpdateFunction_Handler, + }, + { + MethodName: "DeleteFunction", + Handler: _CloudFunctionsService_DeleteFunction_Handler, + }, + { + MethodName: "CallFunction", + Handler: _CloudFunctionsService_CallFunction_Handler, + }, + }, + Streams: []grpc.StreamDesc{}, + Metadata: "google/cloud/functions/v1beta2/functions.proto", +} + +func init() { + proto.RegisterFile("google/cloud/functions/v1beta2/functions.proto", fileDescriptor_functions_86e4424260a7e456) +} + +var fileDescriptor_functions_86e4424260a7e456 = []byte{ + // 1216 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xac, 0x57, 0x4d, 0x73, 0xdb, 0x44, + 0x18, 0xae, 0xf2, 0xe1, 0xc6, 0xaf, 0x3f, 0xe2, 0x6e, 0x9b, 0x8e, 0x30, 0x2d, 0x0d, 0x62, 0x80, + 0x90, 0x82, 0x0d, 0x6e, 0x80, 0x99, 0x7e, 0x30, 0x24, 0xb1, 0x93, 0x78, 0x9a, 0xb6, 0x1e, 0xd9, + 0x39, 0x94, 0x8b, 0x66, 0x2d, 0x6f, 0x15, 0x81, 0xac, 0x15, 0xab, 0x95, 0xa7, 0x29, 0x53, 0x0e, + 0xcc, 0x70, 0xe3, 0xc6, 0x3f, 0xe8, 0x99, 0xe1, 0x07, 0x30, 0xc3, 0x9d, 0x3b, 0x57, 0x86, 0x13, + 0x3f, 0x84, 0xd9, 0xd5, 0x4a, 0x96, 0x93, 0x14, 0x25, 0x19, 0x6e, 0xda, 0xe7, 0xfd, 0x7a, 0x76, + 0xdf, 0x67, 0x5f, 0xaf, 0xa1, 0xe1, 0x50, 0xea, 0x78, 0xa4, 0x69, 0x7b, 0x34, 0x1a, 0x35, 0x9f, + 0x45, 0xbe, 0xcd, 0x5d, 0xea, 0x87, 0xcd, 0xc9, 0x27, 0x43, 0xc2, 0x71, 0x6b, 0x8a, 0x34, 0x02, + 0x46, 0x39, 0x45, 0x6f, 0xc5, 0xfe, 0x0d, 0xe9, 0xdf, 0x98, 0x5a, 0x95, 0x7f, 0xfd, 0x86, 0xca, + 0x87, 0x03, 0xb7, 0x89, 0x7d, 0x9f, 0x72, 0x9c, 0x89, 0xae, 0xaf, 0x64, 0xad, 0x11, 0x3f, 0x54, + 0x70, 0x33, 0x87, 0x04, 0x0d, 0x08, 0x9b, 0xc9, 0xf3, 0x8e, 0x0a, 0xf0, 0xa8, 0xef, 0xb0, 0xc8, + 0xf7, 0x5d, 0xdf, 0x39, 0xe9, 0xa4, 0xa8, 0x36, 0xe5, 0x6a, 0x18, 0x3d, 0x6b, 0x8e, 0xa2, 0xd8, + 0x41, 0xd9, 0x6f, 0x1d, 0xb7, 0x73, 0x77, 0x4c, 0x42, 0x8e, 0xc7, 0x41, 0xec, 0x60, 0xfc, 0xb5, + 0x08, 0x95, 0x6d, 0x41, 0x69, 0x47, 0x31, 0x42, 0x08, 0x16, 0x7c, 0x3c, 0x26, 0xba, 0xb6, 0xaa, + 0xad, 0x15, 0x4d, 0xf9, 0x8d, 0x1a, 0x80, 0x42, 0x1a, 0x31, 0x9b, 0x58, 0x98, 0xd9, 0x87, 0xee, + 0x84, 0x58, 0x11, 0xf3, 0xf4, 0xaa, 0xf0, 0xd8, 0xbb, 0x64, 0xd6, 0x62, 0xdb, 0x66, 0x6c, 0x3a, + 0x60, 0x1e, 0xb2, 0xe0, 0x8a, 0xf2, 0x67, 0x24, 0xa0, 0xa1, 0xcb, 0x29, 0x3b, 0xd2, 0xe7, 0x57, + 0xb5, 0xb5, 0x52, 0xeb, 0xe3, 0xc6, 0x7f, 0x9f, 0x6e, 0xa3, 0x2f, 0x03, 0xcd, 0x34, 0x6e, 0x5a, + 0x60, 0x8a, 0xa1, 0x3e, 0x54, 0x0e, 0x39, 0x0f, 0x42, 0x8b, 0x33, 0xd7, 0x71, 0x08, 0xd3, 0x0b, + 0x32, 0xf9, 0x87, 0x79, 0xc9, 0xf7, 0x06, 0x83, 0x5e, 0x7f, 0x10, 0xc7, 0xec, 0x69, 0x66, 0x59, + 0x26, 0x51, 0x6b, 0x91, 0x94, 0x4c, 0x88, 0xcf, 0xd3, 0xa4, 0xe5, 0xb3, 0x25, 0xed, 0x88, 0xa0, + 0x4c, 0x52, 0x92, 0x59, 0xa3, 0x87, 0x50, 0x08, 0x39, 0xe6, 0x51, 0xa8, 0x5f, 0x5e, 0xd5, 0xd6, + 0xaa, 0xad, 0x3b, 0x79, 0xd9, 0x66, 0xba, 0xd1, 0x97, 0xa1, 0xa6, 0x4a, 0x81, 0x3e, 0x80, 0x9a, + 0x87, 0x39, 0x09, 0xb9, 0x95, 0x2a, 0x41, 0x5f, 0x92, 0x7d, 0x5a, 0x8e, 0xf1, 0x27, 0x09, 0x8c, + 0x6e, 0x41, 0x89, 0xf8, 0x9c, 0x1d, 0x59, 0x01, 0x75, 0x7d, 0xae, 0x17, 0xa5, 0x17, 0x48, 0xa8, + 0x27, 0x10, 0x74, 0x07, 0x2e, 0x0b, 0x31, 0xd0, 0x88, 0xeb, 0x20, 0xf7, 0xf9, 0x46, 0xc2, 0x2c, + 0x11, 0x4b, 0xa3, 0xad, 0xc4, 0x64, 0x26, 0x9e, 0xa8, 0x01, 0x57, 0xf1, 0x04, 0xbb, 0x1e, 0x1e, + 0x7a, 0xc4, 0x1a, 0x93, 0x31, 0x65, 0x47, 0xd6, 0x78, 0xa8, 0x97, 0x56, 0xb5, 0xb5, 0x45, 0xf3, + 0x4a, 0x6a, 0x7a, 0x24, 0x2d, 0x8f, 0x86, 0xe8, 0x7d, 0x58, 0x0e, 0x09, 0x9b, 0xb8, 0x42, 0x39, + 0xb6, 0x4d, 0x23, 0x9f, 0xeb, 0x15, 0xc9, 0xa4, 0xaa, 0xe0, 0xcd, 0x18, 0x45, 0xf7, 0xa0, 0x14, + 0x05, 0x23, 0xcc, 0x89, 0x25, 0x4a, 0xe9, 0xcb, 0x92, 0x51, 0xfd, 0x04, 0xa3, 0x41, 0x22, 0x5f, + 0x13, 0x62, 0x77, 0x01, 0x6c, 0x55, 0xa0, 0xa4, 0xe4, 0x66, 0xd3, 0x11, 0xd9, 0x2a, 0xc2, 0x65, + 0xd5, 0x41, 0x63, 0x15, 0xca, 0xd9, 0x96, 0xa3, 0x1a, 0xcc, 0x0b, 0xe5, 0xc6, 0xda, 0x16, 0x9f, + 0x46, 0x17, 0xca, 0xd9, 0xfe, 0xa1, 0x9b, 0x00, 0x4a, 0x04, 0x47, 0x41, 0x72, 0x09, 0x8a, 0x71, + 0x47, 0x8f, 0x02, 0x82, 0xea, 0xb0, 0xc4, 0x48, 0x5c, 0x4c, 0x9f, 0x93, 0xc6, 0x74, 0x6d, 0xfc, + 0xad, 0x41, 0xed, 0xb8, 0x7a, 0xd1, 0xbb, 0x50, 0x9d, 0xde, 0x01, 0x6b, 0x5a, 0xbc, 0x32, 0x45, + 0xc5, 0x8d, 0xb9, 0x95, 0x6e, 0x21, 0xc0, 0xfc, 0x50, 0xa5, 0x86, 0x18, 0xea, 0x61, 0x7e, 0x88, + 0x74, 0x28, 0x0c, 0x19, 0xf6, 0xed, 0x43, 0x79, 0x8f, 0xc4, 0xb5, 0x53, 0x6b, 0x84, 0x60, 0x9e, + 0x63, 0x47, 0x5f, 0x50, 0xb0, 0x58, 0xa0, 0x1b, 0x82, 0xe6, 0xc4, 0x0d, 0x85, 0x40, 0x16, 0x95, + 0x21, 0x45, 0xd0, 0x6d, 0xb8, 0x32, 0x22, 0x81, 0x47, 0x8f, 0xc8, 0xc8, 0x4a, 0xdd, 0x0a, 0xb2, + 0x64, 0x2d, 0x31, 0x98, 0x0a, 0x17, 0xa7, 0x39, 0x21, 0x4c, 0x7c, 0x1a, 0xdf, 0xc3, 0xca, 0x36, + 0x23, 0x98, 0x93, 0x44, 0x9e, 0x26, 0xf9, 0x36, 0x22, 0x21, 0x17, 0xa7, 0xe2, 0x51, 0x3b, 0xd6, + 0x63, 0xbc, 0xbd, 0x74, 0x8d, 0xba, 0xb0, 0x94, 0x88, 0x5c, 0x6e, 0xab, 0xd4, 0xfa, 0xe8, 0x5c, + 0x57, 0xc0, 0x4c, 0xc3, 0x8d, 0x09, 0xac, 0x1c, 0xc8, 0xae, 0x1f, 0xaf, 0x7f, 0xda, 0xcc, 0xfa, + 0x1f, 0xeb, 0xae, 0x01, 0xda, 0x25, 0xfc, 0x0c, 0x45, 0x0d, 0x1f, 0xae, 0xed, 0xbb, 0x61, 0xea, + 0x1a, 0x9e, 0xe5, 0x80, 0xde, 0x84, 0x62, 0x80, 0x1d, 0x62, 0x85, 0xee, 0x8b, 0x58, 0x53, 0x8b, + 0xe6, 0x92, 0x00, 0xfa, 0xee, 0x0b, 0x22, 0xe4, 0x28, 0x8d, 0x9c, 0x7e, 0x43, 0xfc, 0xb8, 0xf5, + 0xa6, 0x74, 0x1f, 0x08, 0xc0, 0xf8, 0x49, 0x83, 0x95, 0x63, 0x05, 0xc3, 0x80, 0xfa, 0x21, 0x41, + 0x0f, 0xa1, 0x98, 0x6e, 0x50, 0xd7, 0x56, 0xe7, 0xcf, 0xbf, 0xff, 0x69, 0x3c, 0x7a, 0x0f, 0x96, + 0x7d, 0xf2, 0x9c, 0x5b, 0x19, 0x2a, 0xb1, 0x42, 0x2b, 0x02, 0xee, 0xa5, 0x74, 0x6e, 0xc3, 0x4a, + 0x9b, 0x78, 0xe4, 0x4c, 0x0d, 0x32, 0x1e, 0xc0, 0xd5, 0x6d, 0xec, 0x79, 0x67, 0xe9, 0x25, 0x82, + 0x85, 0x11, 0xe6, 0x58, 0x15, 0x95, 0xdf, 0x86, 0x03, 0xd7, 0x66, 0xc3, 0xd5, 0xc6, 0xdf, 0x86, + 0x32, 0x79, 0x4e, 0xec, 0x48, 0x80, 0x96, 0x3b, 0x52, 0x79, 0x4a, 0x29, 0xd6, 0x1d, 0xa1, 0xeb, + 0x50, 0x60, 0x24, 0x8c, 0x3c, 0xae, 0x12, 0xaa, 0x15, 0xba, 0x06, 0x8b, 0x84, 0x31, 0xca, 0xd4, + 0x39, 0xc7, 0x8b, 0x75, 0x0c, 0x57, 0x4f, 0x99, 0xc9, 0xe8, 0x3a, 0xa0, 0xfe, 0x60, 0x73, 0x70, + 0xd0, 0xb7, 0x0e, 0x1e, 0xf7, 0x7b, 0x9d, 0xed, 0xee, 0x4e, 0xb7, 0xd3, 0xae, 0x5d, 0x42, 0x45, + 0x58, 0x34, 0x3b, 0x9b, 0xed, 0xa7, 0x35, 0x0d, 0x01, 0x14, 0x76, 0x36, 0xbb, 0xfb, 0x9d, 0x76, + 0x6d, 0x0e, 0x55, 0xa0, 0xd8, 0xee, 0xf4, 0xf6, 0x9f, 0x3c, 0xed, 0x3e, 0xde, 0xad, 0xcd, 0xa3, + 0x32, 0x2c, 0xb5, 0x3b, 0xfb, 0x9d, 0x81, 0x58, 0x2d, 0xb4, 0xfe, 0x58, 0x82, 0x95, 0x99, 0x1a, + 0x61, 0x3f, 0x1e, 0x8f, 0xe8, 0x37, 0x0d, 0x2a, 0x33, 0x0d, 0x46, 0x1b, 0x79, 0x5d, 0x3c, 0x4d, + 0x80, 0xf5, 0x4f, 0xcf, 0x19, 0x15, 0x1f, 0xa6, 0x71, 0xff, 0x87, 0x3f, 0xff, 0xf9, 0x79, 0xee, + 0x33, 0xb4, 0x91, 0xbe, 0x53, 0xbe, 0x4b, 0x74, 0xfb, 0x20, 0x60, 0xf4, 0x6b, 0x62, 0xf3, 0xb0, + 0xb9, 0xde, 0x4c, 0xb0, 0xb0, 0xb9, 0xfe, 0x72, 0xfa, 0xb6, 0x41, 0xbf, 0x68, 0x50, 0xca, 0x5c, + 0x1c, 0xd4, 0xca, 0x23, 0x71, 0xf2, 0x96, 0xd5, 0xcf, 0x27, 0x5a, 0xe3, 0xae, 0x24, 0xbc, 0x81, + 0x5a, 0x53, 0xc2, 0x42, 0x41, 0xaf, 0x21, 0x9b, 0x79, 0x87, 0xad, 0xbf, 0x44, 0xbf, 0x6a, 0x50, + 0x9d, 0x9d, 0x6f, 0x28, 0xf7, 0xd8, 0x4e, 0x9d, 0x87, 0xf5, 0x9b, 0x49, 0x58, 0xe6, 0xf1, 0xd6, + 0x48, 0x7f, 0x9b, 0x8d, 0x1d, 0x49, 0xf2, 0x4b, 0xe3, 0x42, 0xa7, 0x7a, 0x37, 0x9d, 0x4b, 0xe2, + 0x7c, 0xab, 0xb3, 0x03, 0x31, 0x9f, 0xf0, 0xa9, 0x03, 0x34, 0x8f, 0x70, 0x5b, 0x12, 0xfe, 0xa2, + 0x7e, 0x81, 0x53, 0xcd, 0xd0, 0x7d, 0xa5, 0x41, 0x75, 0x76, 0x3c, 0xe4, 0xd3, 0x3d, 0x75, 0x9c, + 0xe4, 0xd1, 0x55, 0x22, 0x58, 0xbf, 0x88, 0x08, 0x7e, 0xd7, 0xa0, 0x9c, 0x9d, 0x2b, 0x28, 0xff, + 0xc1, 0x76, 0x72, 0x88, 0xd5, 0x37, 0xce, 0x17, 0xa4, 0x6e, 0xdb, 0x96, 0xe4, 0x7d, 0xdf, 0xf8, + 0xfc, 0x02, 0xc7, 0x6c, 0x63, 0xcf, 0xbb, 0xab, 0xad, 0x6f, 0xfd, 0xa8, 0x81, 0x61, 0xd3, 0x71, + 0x4e, 0xfd, 0xad, 0x6a, 0x7a, 0xd7, 0x7b, 0xe2, 0x6d, 0xd5, 0xd3, 0xbe, 0xda, 0x55, 0x11, 0x0e, + 0xf5, 0xb0, 0xef, 0x34, 0x28, 0x73, 0x9a, 0x0e, 0xf1, 0xe5, 0xcb, 0x4b, 0xfd, 0x77, 0xc1, 0x81, + 0x1b, 0xbe, 0xee, 0xff, 0xcb, 0xbd, 0x14, 0x79, 0x35, 0x37, 0xbf, 0xbb, 0xbd, 0x33, 0x2c, 0xc8, + 0xc8, 0x3b, 0xff, 0x06, 0x00, 0x00, 0xff, 0xff, 0x22, 0xa1, 0xda, 0x9f, 0x7d, 0x0d, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/cloud/functions/v1beta2/operations.pb.go b/vendor/google.golang.org/genproto/googleapis/cloud/functions/v1beta2/operations.pb.go new file mode 100644 index 0000000..1a1d762 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/cloud/functions/v1beta2/operations.pb.go @@ -0,0 +1,148 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/cloud/functions/v1beta2/operations.proto + +package functions // import "google.golang.org/genproto/googleapis/cloud/functions/v1beta2" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import any "github.com/golang/protobuf/ptypes/any" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// A type of an operation. +type OperationType int32 + +const ( + // Unknown operation type. + OperationType_OPERATION_UNSPECIFIED OperationType = 0 + // Triggered by CreateFunction call + OperationType_CREATE_FUNCTION OperationType = 1 + // Triggered by UpdateFunction call + OperationType_UPDATE_FUNCTION OperationType = 2 + // Triggered by DeleteFunction call. + OperationType_DELETE_FUNCTION OperationType = 3 +) + +var OperationType_name = map[int32]string{ + 0: "OPERATION_UNSPECIFIED", + 1: "CREATE_FUNCTION", + 2: "UPDATE_FUNCTION", + 3: "DELETE_FUNCTION", +} +var OperationType_value = map[string]int32{ + "OPERATION_UNSPECIFIED": 0, + "CREATE_FUNCTION": 1, + "UPDATE_FUNCTION": 2, + "DELETE_FUNCTION": 3, +} + +func (x OperationType) String() string { + return proto.EnumName(OperationType_name, int32(x)) +} +func (OperationType) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_operations_0d25ce4b01267ed0, []int{0} +} + +// Metadata describing an [Operation][google.longrunning.Operation] +type OperationMetadataV1Beta2 struct { + // Target of the operation - for example + // projects/project-1/locations/region-1/functions/function-1 + Target string `protobuf:"bytes,1,opt,name=target,proto3" json:"target,omitempty"` + // Type of operation. + Type OperationType `protobuf:"varint,2,opt,name=type,proto3,enum=google.cloud.functions.v1beta2.OperationType" json:"type,omitempty"` + // The original request that started the operation. + Request *any.Any `protobuf:"bytes,3,opt,name=request,proto3" json:"request,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *OperationMetadataV1Beta2) Reset() { *m = OperationMetadataV1Beta2{} } +func (m *OperationMetadataV1Beta2) String() string { return proto.CompactTextString(m) } +func (*OperationMetadataV1Beta2) ProtoMessage() {} +func (*OperationMetadataV1Beta2) Descriptor() ([]byte, []int) { + return fileDescriptor_operations_0d25ce4b01267ed0, []int{0} +} +func (m *OperationMetadataV1Beta2) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_OperationMetadataV1Beta2.Unmarshal(m, b) +} +func (m *OperationMetadataV1Beta2) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_OperationMetadataV1Beta2.Marshal(b, m, deterministic) +} +func (dst *OperationMetadataV1Beta2) XXX_Merge(src proto.Message) { + xxx_messageInfo_OperationMetadataV1Beta2.Merge(dst, src) +} +func (m *OperationMetadataV1Beta2) XXX_Size() int { + return xxx_messageInfo_OperationMetadataV1Beta2.Size(m) +} +func (m *OperationMetadataV1Beta2) XXX_DiscardUnknown() { + xxx_messageInfo_OperationMetadataV1Beta2.DiscardUnknown(m) +} + +var xxx_messageInfo_OperationMetadataV1Beta2 proto.InternalMessageInfo + +func (m *OperationMetadataV1Beta2) GetTarget() string { + if m != nil { + return m.Target + } + return "" +} + +func (m *OperationMetadataV1Beta2) GetType() OperationType { + if m != nil { + return m.Type + } + return OperationType_OPERATION_UNSPECIFIED +} + +func (m *OperationMetadataV1Beta2) GetRequest() *any.Any { + if m != nil { + return m.Request + } + return nil +} + +func init() { + proto.RegisterType((*OperationMetadataV1Beta2)(nil), "google.cloud.functions.v1beta2.OperationMetadataV1Beta2") + proto.RegisterEnum("google.cloud.functions.v1beta2.OperationType", OperationType_name, OperationType_value) +} + +func init() { + proto.RegisterFile("google/cloud/functions/v1beta2/operations.proto", fileDescriptor_operations_0d25ce4b01267ed0) +} + +var fileDescriptor_operations_0d25ce4b01267ed0 = []byte{ + // 333 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x84, 0x91, 0x4f, 0x4f, 0xf2, 0x30, + 0x1c, 0xc7, 0x9f, 0xc2, 0x13, 0x8c, 0x35, 0x2a, 0x99, 0x7f, 0x32, 0x88, 0x31, 0x84, 0x13, 0x31, + 0xb1, 0x0d, 0x78, 0xf4, 0x34, 0xa0, 0x18, 0x12, 0x85, 0x65, 0x82, 0x07, 0x2f, 0xa4, 0x40, 0x69, + 0x96, 0xcc, 0xb6, 0x6e, 0x9d, 0xc9, 0x5e, 0x82, 0x2f, 0xc4, 0xf7, 0x69, 0x56, 0xba, 0x05, 0x0e, + 0xea, 0xb1, 0x9f, 0xf6, 0xf3, 0xed, 0xf7, 0x97, 0x1f, 0xc4, 0x5c, 0x4a, 0x1e, 0x31, 0xbc, 0x8a, + 0x64, 0xba, 0xc6, 0x9b, 0x54, 0xac, 0x74, 0x28, 0x45, 0x82, 0x3f, 0xba, 0x4b, 0xa6, 0x69, 0x0f, + 0x4b, 0xc5, 0x62, 0x6a, 0x10, 0x52, 0xb1, 0xd4, 0xd2, 0xb9, 0xde, 0x0a, 0xc8, 0x08, 0xa8, 0x14, + 0x90, 0x15, 0x9a, 0x57, 0x36, 0x90, 0xaa, 0x10, 0x53, 0x21, 0xa4, 0xde, 0xb5, 0x9b, 0x0d, 0x7b, + 0x6b, 0x4e, 0xcb, 0x74, 0x83, 0xa9, 0xc8, 0xb6, 0x57, 0xed, 0x2f, 0x00, 0xdd, 0x69, 0xf1, 0xdb, + 0x13, 0xd3, 0x74, 0x4d, 0x35, 0x7d, 0xe9, 0xf6, 0xf3, 0x54, 0xe7, 0x12, 0xd6, 0x34, 0x8d, 0x39, + 0xd3, 0x2e, 0x68, 0x81, 0xce, 0x61, 0x60, 0x4f, 0x8e, 0x07, 0xff, 0xeb, 0x4c, 0x31, 0xb7, 0xd2, + 0x02, 0x9d, 0x93, 0xde, 0x2d, 0xfa, 0xbd, 0x1c, 0x2a, 0xf3, 0x67, 0x99, 0x62, 0x81, 0x51, 0x1d, + 0x04, 0x0f, 0x62, 0xf6, 0x9e, 0xb2, 0x44, 0xbb, 0xd5, 0x16, 0xe8, 0x1c, 0xf5, 0xce, 0x8b, 0x94, + 0xa2, 0x24, 0xf2, 0x44, 0x16, 0x14, 0x8f, 0x6e, 0x42, 0x78, 0xbc, 0x17, 0xe3, 0x34, 0xe0, 0xc5, + 0xd4, 0x27, 0x81, 0x37, 0x1b, 0x4f, 0x27, 0x8b, 0xf9, 0xe4, 0xd9, 0x27, 0x83, 0xf1, 0x68, 0x4c, + 0x86, 0xf5, 0x7f, 0xce, 0x19, 0x3c, 0x1d, 0x04, 0xc4, 0x9b, 0x91, 0xc5, 0x68, 0x3e, 0x19, 0xe4, + 0x0f, 0xea, 0x20, 0x87, 0x73, 0x7f, 0xb8, 0x07, 0x2b, 0x39, 0x1c, 0x92, 0x47, 0xb2, 0x0b, 0xab, + 0xfd, 0x4f, 0x00, 0xdb, 0x2b, 0xf9, 0xf6, 0xc7, 0x54, 0x7d, 0x77, 0x54, 0xa0, 0xb2, 0x58, 0xe2, + 0xe7, 0xdd, 0x7d, 0xf0, 0xfa, 0x60, 0x5d, 0x2e, 0x23, 0x2a, 0x38, 0x92, 0x31, 0xc7, 0x9c, 0x09, + 0x33, 0x99, 0x5d, 0x3d, 0x55, 0x61, 0xf2, 0xd3, 0xfa, 0xef, 0x4b, 0xb2, 0xac, 0x19, 0xe7, 0xee, + 0x3b, 0x00, 0x00, 0xff, 0xff, 0x3c, 0x8a, 0xb1, 0x83, 0x31, 0x02, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/cloud/iot/v1/device_manager.pb.go b/vendor/google.golang.org/genproto/googleapis/cloud/iot/v1/device_manager.pb.go new file mode 100644 index 0000000..4cd80d9 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/cloud/iot/v1/device_manager.pb.go @@ -0,0 +1,2287 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/cloud/iot/v1/device_manager.proto + +package iot // import "google.golang.org/genproto/googleapis/cloud/iot/v1" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "github.com/golang/protobuf/ptypes/duration" +import empty "github.com/golang/protobuf/ptypes/empty" +import _ "github.com/golang/protobuf/ptypes/timestamp" +import _ "google.golang.org/genproto/googleapis/api/annotations" +import v1 "google.golang.org/genproto/googleapis/iam/v1" +import _ "google.golang.org/genproto/googleapis/rpc/status" +import field_mask "google.golang.org/genproto/protobuf/field_mask" + +import ( + context "golang.org/x/net/context" + grpc "google.golang.org/grpc" +) + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Request for `CreateDeviceRegistry`. +type CreateDeviceRegistryRequest struct { + // The project and cloud region where this device registry must be created. + // For example, `projects/example-project/locations/us-central1`. + Parent string `protobuf:"bytes,1,opt,name=parent,proto3" json:"parent,omitempty"` + // The device registry. The field `name` must be empty. The server will + // generate that field from the device registry `id` provided and the + // `parent` field. + DeviceRegistry *DeviceRegistry `protobuf:"bytes,2,opt,name=device_registry,json=deviceRegistry,proto3" json:"device_registry,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *CreateDeviceRegistryRequest) Reset() { *m = CreateDeviceRegistryRequest{} } +func (m *CreateDeviceRegistryRequest) String() string { return proto.CompactTextString(m) } +func (*CreateDeviceRegistryRequest) ProtoMessage() {} +func (*CreateDeviceRegistryRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_device_manager_d40b22b22b466eb0, []int{0} +} +func (m *CreateDeviceRegistryRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_CreateDeviceRegistryRequest.Unmarshal(m, b) +} +func (m *CreateDeviceRegistryRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_CreateDeviceRegistryRequest.Marshal(b, m, deterministic) +} +func (dst *CreateDeviceRegistryRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_CreateDeviceRegistryRequest.Merge(dst, src) +} +func (m *CreateDeviceRegistryRequest) XXX_Size() int { + return xxx_messageInfo_CreateDeviceRegistryRequest.Size(m) +} +func (m *CreateDeviceRegistryRequest) XXX_DiscardUnknown() { + xxx_messageInfo_CreateDeviceRegistryRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_CreateDeviceRegistryRequest proto.InternalMessageInfo + +func (m *CreateDeviceRegistryRequest) GetParent() string { + if m != nil { + return m.Parent + } + return "" +} + +func (m *CreateDeviceRegistryRequest) GetDeviceRegistry() *DeviceRegistry { + if m != nil { + return m.DeviceRegistry + } + return nil +} + +// Request for `GetDeviceRegistry`. +type GetDeviceRegistryRequest struct { + // The name of the device registry. For example, + // `projects/example-project/locations/us-central1/registries/my-registry`. + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GetDeviceRegistryRequest) Reset() { *m = GetDeviceRegistryRequest{} } +func (m *GetDeviceRegistryRequest) String() string { return proto.CompactTextString(m) } +func (*GetDeviceRegistryRequest) ProtoMessage() {} +func (*GetDeviceRegistryRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_device_manager_d40b22b22b466eb0, []int{1} +} +func (m *GetDeviceRegistryRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GetDeviceRegistryRequest.Unmarshal(m, b) +} +func (m *GetDeviceRegistryRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GetDeviceRegistryRequest.Marshal(b, m, deterministic) +} +func (dst *GetDeviceRegistryRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_GetDeviceRegistryRequest.Merge(dst, src) +} +func (m *GetDeviceRegistryRequest) XXX_Size() int { + return xxx_messageInfo_GetDeviceRegistryRequest.Size(m) +} +func (m *GetDeviceRegistryRequest) XXX_DiscardUnknown() { + xxx_messageInfo_GetDeviceRegistryRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_GetDeviceRegistryRequest proto.InternalMessageInfo + +func (m *GetDeviceRegistryRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +// Request for `DeleteDeviceRegistry`. +type DeleteDeviceRegistryRequest struct { + // The name of the device registry. For example, + // `projects/example-project/locations/us-central1/registries/my-registry`. + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *DeleteDeviceRegistryRequest) Reset() { *m = DeleteDeviceRegistryRequest{} } +func (m *DeleteDeviceRegistryRequest) String() string { return proto.CompactTextString(m) } +func (*DeleteDeviceRegistryRequest) ProtoMessage() {} +func (*DeleteDeviceRegistryRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_device_manager_d40b22b22b466eb0, []int{2} +} +func (m *DeleteDeviceRegistryRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_DeleteDeviceRegistryRequest.Unmarshal(m, b) +} +func (m *DeleteDeviceRegistryRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_DeleteDeviceRegistryRequest.Marshal(b, m, deterministic) +} +func (dst *DeleteDeviceRegistryRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_DeleteDeviceRegistryRequest.Merge(dst, src) +} +func (m *DeleteDeviceRegistryRequest) XXX_Size() int { + return xxx_messageInfo_DeleteDeviceRegistryRequest.Size(m) +} +func (m *DeleteDeviceRegistryRequest) XXX_DiscardUnknown() { + xxx_messageInfo_DeleteDeviceRegistryRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_DeleteDeviceRegistryRequest proto.InternalMessageInfo + +func (m *DeleteDeviceRegistryRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +// Request for `UpdateDeviceRegistry`. +type UpdateDeviceRegistryRequest struct { + // The new values for the device registry. The `id` field must be empty, and + // the `name` field must indicate the path of the resource. For example, + // `projects/example-project/locations/us-central1/registries/my-registry`. + DeviceRegistry *DeviceRegistry `protobuf:"bytes,1,opt,name=device_registry,json=deviceRegistry,proto3" json:"device_registry,omitempty"` + // Only updates the `device_registry` fields indicated by this mask. + // The field mask must not be empty, and it must not contain fields that + // are immutable or only set by the server. + // Mutable top-level fields: `event_notification_config`, `http_config`, + // `mqtt_config`, and `state_notification_config`. + UpdateMask *field_mask.FieldMask `protobuf:"bytes,2,opt,name=update_mask,json=updateMask,proto3" json:"update_mask,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *UpdateDeviceRegistryRequest) Reset() { *m = UpdateDeviceRegistryRequest{} } +func (m *UpdateDeviceRegistryRequest) String() string { return proto.CompactTextString(m) } +func (*UpdateDeviceRegistryRequest) ProtoMessage() {} +func (*UpdateDeviceRegistryRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_device_manager_d40b22b22b466eb0, []int{3} +} +func (m *UpdateDeviceRegistryRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_UpdateDeviceRegistryRequest.Unmarshal(m, b) +} +func (m *UpdateDeviceRegistryRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_UpdateDeviceRegistryRequest.Marshal(b, m, deterministic) +} +func (dst *UpdateDeviceRegistryRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_UpdateDeviceRegistryRequest.Merge(dst, src) +} +func (m *UpdateDeviceRegistryRequest) XXX_Size() int { + return xxx_messageInfo_UpdateDeviceRegistryRequest.Size(m) +} +func (m *UpdateDeviceRegistryRequest) XXX_DiscardUnknown() { + xxx_messageInfo_UpdateDeviceRegistryRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_UpdateDeviceRegistryRequest proto.InternalMessageInfo + +func (m *UpdateDeviceRegistryRequest) GetDeviceRegistry() *DeviceRegistry { + if m != nil { + return m.DeviceRegistry + } + return nil +} + +func (m *UpdateDeviceRegistryRequest) GetUpdateMask() *field_mask.FieldMask { + if m != nil { + return m.UpdateMask + } + return nil +} + +// Request for `ListDeviceRegistries`. +type ListDeviceRegistriesRequest struct { + // The project and cloud region path. For example, + // `projects/example-project/locations/us-central1`. + Parent string `protobuf:"bytes,1,opt,name=parent,proto3" json:"parent,omitempty"` + // The maximum number of registries to return in the response. If this value + // is zero, the service will select a default size. A call may return fewer + // objects than requested. A non-empty `next_page_token` in the response + // indicates that more data is available. + PageSize int32 `protobuf:"varint,2,opt,name=page_size,json=pageSize,proto3" json:"page_size,omitempty"` + // The value returned by the last `ListDeviceRegistriesResponse`; indicates + // that this is a continuation of a prior `ListDeviceRegistries` call and + // the system should return the next page of data. + PageToken string `protobuf:"bytes,3,opt,name=page_token,json=pageToken,proto3" json:"page_token,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ListDeviceRegistriesRequest) Reset() { *m = ListDeviceRegistriesRequest{} } +func (m *ListDeviceRegistriesRequest) String() string { return proto.CompactTextString(m) } +func (*ListDeviceRegistriesRequest) ProtoMessage() {} +func (*ListDeviceRegistriesRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_device_manager_d40b22b22b466eb0, []int{4} +} +func (m *ListDeviceRegistriesRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ListDeviceRegistriesRequest.Unmarshal(m, b) +} +func (m *ListDeviceRegistriesRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ListDeviceRegistriesRequest.Marshal(b, m, deterministic) +} +func (dst *ListDeviceRegistriesRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_ListDeviceRegistriesRequest.Merge(dst, src) +} +func (m *ListDeviceRegistriesRequest) XXX_Size() int { + return xxx_messageInfo_ListDeviceRegistriesRequest.Size(m) +} +func (m *ListDeviceRegistriesRequest) XXX_DiscardUnknown() { + xxx_messageInfo_ListDeviceRegistriesRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_ListDeviceRegistriesRequest proto.InternalMessageInfo + +func (m *ListDeviceRegistriesRequest) GetParent() string { + if m != nil { + return m.Parent + } + return "" +} + +func (m *ListDeviceRegistriesRequest) GetPageSize() int32 { + if m != nil { + return m.PageSize + } + return 0 +} + +func (m *ListDeviceRegistriesRequest) GetPageToken() string { + if m != nil { + return m.PageToken + } + return "" +} + +// Response for `ListDeviceRegistries`. +type ListDeviceRegistriesResponse struct { + // The registries that matched the query. + DeviceRegistries []*DeviceRegistry `protobuf:"bytes,1,rep,name=device_registries,json=deviceRegistries,proto3" json:"device_registries,omitempty"` + // If not empty, indicates that there may be more registries that match the + // request; this value should be passed in a new + // `ListDeviceRegistriesRequest`. + NextPageToken string `protobuf:"bytes,2,opt,name=next_page_token,json=nextPageToken,proto3" json:"next_page_token,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ListDeviceRegistriesResponse) Reset() { *m = ListDeviceRegistriesResponse{} } +func (m *ListDeviceRegistriesResponse) String() string { return proto.CompactTextString(m) } +func (*ListDeviceRegistriesResponse) ProtoMessage() {} +func (*ListDeviceRegistriesResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_device_manager_d40b22b22b466eb0, []int{5} +} +func (m *ListDeviceRegistriesResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ListDeviceRegistriesResponse.Unmarshal(m, b) +} +func (m *ListDeviceRegistriesResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ListDeviceRegistriesResponse.Marshal(b, m, deterministic) +} +func (dst *ListDeviceRegistriesResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_ListDeviceRegistriesResponse.Merge(dst, src) +} +func (m *ListDeviceRegistriesResponse) XXX_Size() int { + return xxx_messageInfo_ListDeviceRegistriesResponse.Size(m) +} +func (m *ListDeviceRegistriesResponse) XXX_DiscardUnknown() { + xxx_messageInfo_ListDeviceRegistriesResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_ListDeviceRegistriesResponse proto.InternalMessageInfo + +func (m *ListDeviceRegistriesResponse) GetDeviceRegistries() []*DeviceRegistry { + if m != nil { + return m.DeviceRegistries + } + return nil +} + +func (m *ListDeviceRegistriesResponse) GetNextPageToken() string { + if m != nil { + return m.NextPageToken + } + return "" +} + +// Request for `CreateDevice`. +type CreateDeviceRequest struct { + // The name of the device registry where this device should be created. + // For example, + // `projects/example-project/locations/us-central1/registries/my-registry`. + Parent string `protobuf:"bytes,1,opt,name=parent,proto3" json:"parent,omitempty"` + // The device registration details. The field `name` must be empty. The server + // generates `name` from the device registry `id` and the + // `parent` field. + Device *Device `protobuf:"bytes,2,opt,name=device,proto3" json:"device,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *CreateDeviceRequest) Reset() { *m = CreateDeviceRequest{} } +func (m *CreateDeviceRequest) String() string { return proto.CompactTextString(m) } +func (*CreateDeviceRequest) ProtoMessage() {} +func (*CreateDeviceRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_device_manager_d40b22b22b466eb0, []int{6} +} +func (m *CreateDeviceRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_CreateDeviceRequest.Unmarshal(m, b) +} +func (m *CreateDeviceRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_CreateDeviceRequest.Marshal(b, m, deterministic) +} +func (dst *CreateDeviceRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_CreateDeviceRequest.Merge(dst, src) +} +func (m *CreateDeviceRequest) XXX_Size() int { + return xxx_messageInfo_CreateDeviceRequest.Size(m) +} +func (m *CreateDeviceRequest) XXX_DiscardUnknown() { + xxx_messageInfo_CreateDeviceRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_CreateDeviceRequest proto.InternalMessageInfo + +func (m *CreateDeviceRequest) GetParent() string { + if m != nil { + return m.Parent + } + return "" +} + +func (m *CreateDeviceRequest) GetDevice() *Device { + if m != nil { + return m.Device + } + return nil +} + +// Request for `GetDevice`. +type GetDeviceRequest struct { + // The name of the device. For example, + // `projects/p0/locations/us-central1/registries/registry0/devices/device0` or + // `projects/p0/locations/us-central1/registries/registry0/devices/{num_id}`. + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // The fields of the `Device` resource to be returned in the response. If the + // field mask is unset or empty, all fields are returned. + FieldMask *field_mask.FieldMask `protobuf:"bytes,2,opt,name=field_mask,json=fieldMask,proto3" json:"field_mask,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GetDeviceRequest) Reset() { *m = GetDeviceRequest{} } +func (m *GetDeviceRequest) String() string { return proto.CompactTextString(m) } +func (*GetDeviceRequest) ProtoMessage() {} +func (*GetDeviceRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_device_manager_d40b22b22b466eb0, []int{7} +} +func (m *GetDeviceRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GetDeviceRequest.Unmarshal(m, b) +} +func (m *GetDeviceRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GetDeviceRequest.Marshal(b, m, deterministic) +} +func (dst *GetDeviceRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_GetDeviceRequest.Merge(dst, src) +} +func (m *GetDeviceRequest) XXX_Size() int { + return xxx_messageInfo_GetDeviceRequest.Size(m) +} +func (m *GetDeviceRequest) XXX_DiscardUnknown() { + xxx_messageInfo_GetDeviceRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_GetDeviceRequest proto.InternalMessageInfo + +func (m *GetDeviceRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *GetDeviceRequest) GetFieldMask() *field_mask.FieldMask { + if m != nil { + return m.FieldMask + } + return nil +} + +// Request for `UpdateDevice`. +type UpdateDeviceRequest struct { + // The new values for the device. The `id` and `num_id` fields must + // be empty, and the field `name` must specify the name path. For example, + // `projects/p0/locations/us-central1/registries/registry0/devices/device0`or + // `projects/p0/locations/us-central1/registries/registry0/devices/{num_id}`. + Device *Device `protobuf:"bytes,2,opt,name=device,proto3" json:"device,omitempty"` + // Only updates the `device` fields indicated by this mask. + // The field mask must not be empty, and it must not contain fields that + // are immutable or only set by the server. + // Mutable top-level fields: `credentials`, `blocked`, and `metadata` + UpdateMask *field_mask.FieldMask `protobuf:"bytes,3,opt,name=update_mask,json=updateMask,proto3" json:"update_mask,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *UpdateDeviceRequest) Reset() { *m = UpdateDeviceRequest{} } +func (m *UpdateDeviceRequest) String() string { return proto.CompactTextString(m) } +func (*UpdateDeviceRequest) ProtoMessage() {} +func (*UpdateDeviceRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_device_manager_d40b22b22b466eb0, []int{8} +} +func (m *UpdateDeviceRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_UpdateDeviceRequest.Unmarshal(m, b) +} +func (m *UpdateDeviceRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_UpdateDeviceRequest.Marshal(b, m, deterministic) +} +func (dst *UpdateDeviceRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_UpdateDeviceRequest.Merge(dst, src) +} +func (m *UpdateDeviceRequest) XXX_Size() int { + return xxx_messageInfo_UpdateDeviceRequest.Size(m) +} +func (m *UpdateDeviceRequest) XXX_DiscardUnknown() { + xxx_messageInfo_UpdateDeviceRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_UpdateDeviceRequest proto.InternalMessageInfo + +func (m *UpdateDeviceRequest) GetDevice() *Device { + if m != nil { + return m.Device + } + return nil +} + +func (m *UpdateDeviceRequest) GetUpdateMask() *field_mask.FieldMask { + if m != nil { + return m.UpdateMask + } + return nil +} + +// Request for `DeleteDevice`. +type DeleteDeviceRequest struct { + // The name of the device. For example, + // `projects/p0/locations/us-central1/registries/registry0/devices/device0` or + // `projects/p0/locations/us-central1/registries/registry0/devices/{num_id}`. + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *DeleteDeviceRequest) Reset() { *m = DeleteDeviceRequest{} } +func (m *DeleteDeviceRequest) String() string { return proto.CompactTextString(m) } +func (*DeleteDeviceRequest) ProtoMessage() {} +func (*DeleteDeviceRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_device_manager_d40b22b22b466eb0, []int{9} +} +func (m *DeleteDeviceRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_DeleteDeviceRequest.Unmarshal(m, b) +} +func (m *DeleteDeviceRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_DeleteDeviceRequest.Marshal(b, m, deterministic) +} +func (dst *DeleteDeviceRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_DeleteDeviceRequest.Merge(dst, src) +} +func (m *DeleteDeviceRequest) XXX_Size() int { + return xxx_messageInfo_DeleteDeviceRequest.Size(m) +} +func (m *DeleteDeviceRequest) XXX_DiscardUnknown() { + xxx_messageInfo_DeleteDeviceRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_DeleteDeviceRequest proto.InternalMessageInfo + +func (m *DeleteDeviceRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +// Request for `ListDevices`. +type ListDevicesRequest struct { + // The device registry path. Required. For example, + // `projects/my-project/locations/us-central1/registries/my-registry`. + Parent string `protobuf:"bytes,1,opt,name=parent,proto3" json:"parent,omitempty"` + // A list of device numeric IDs. If empty, this field is ignored. Maximum + // IDs: 10,000. + DeviceNumIds []uint64 `protobuf:"varint,2,rep,packed,name=device_num_ids,json=deviceNumIds,proto3" json:"device_num_ids,omitempty"` + // A list of device string IDs. For example, `['device0', 'device12']`. + // If empty, this field is ignored. Maximum IDs: 10,000 + DeviceIds []string `protobuf:"bytes,3,rep,name=device_ids,json=deviceIds,proto3" json:"device_ids,omitempty"` + // The fields of the `Device` resource to be returned in the response. The + // fields `id` and `num_id` are always returned, along with any + // other fields specified. + FieldMask *field_mask.FieldMask `protobuf:"bytes,4,opt,name=field_mask,json=fieldMask,proto3" json:"field_mask,omitempty"` + // Options related to gateways. + GatewayListOptions *GatewayListOptions `protobuf:"bytes,6,opt,name=gateway_list_options,json=gatewayListOptions,proto3" json:"gateway_list_options,omitempty"` + // The maximum number of devices to return in the response. If this value + // is zero, the service will select a default size. A call may return fewer + // objects than requested. A non-empty `next_page_token` in the response + // indicates that more data is available. + PageSize int32 `protobuf:"varint,100,opt,name=page_size,json=pageSize,proto3" json:"page_size,omitempty"` + // The value returned by the last `ListDevicesResponse`; indicates + // that this is a continuation of a prior `ListDevices` call and + // the system should return the next page of data. + PageToken string `protobuf:"bytes,101,opt,name=page_token,json=pageToken,proto3" json:"page_token,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ListDevicesRequest) Reset() { *m = ListDevicesRequest{} } +func (m *ListDevicesRequest) String() string { return proto.CompactTextString(m) } +func (*ListDevicesRequest) ProtoMessage() {} +func (*ListDevicesRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_device_manager_d40b22b22b466eb0, []int{10} +} +func (m *ListDevicesRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ListDevicesRequest.Unmarshal(m, b) +} +func (m *ListDevicesRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ListDevicesRequest.Marshal(b, m, deterministic) +} +func (dst *ListDevicesRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_ListDevicesRequest.Merge(dst, src) +} +func (m *ListDevicesRequest) XXX_Size() int { + return xxx_messageInfo_ListDevicesRequest.Size(m) +} +func (m *ListDevicesRequest) XXX_DiscardUnknown() { + xxx_messageInfo_ListDevicesRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_ListDevicesRequest proto.InternalMessageInfo + +func (m *ListDevicesRequest) GetParent() string { + if m != nil { + return m.Parent + } + return "" +} + +func (m *ListDevicesRequest) GetDeviceNumIds() []uint64 { + if m != nil { + return m.DeviceNumIds + } + return nil +} + +func (m *ListDevicesRequest) GetDeviceIds() []string { + if m != nil { + return m.DeviceIds + } + return nil +} + +func (m *ListDevicesRequest) GetFieldMask() *field_mask.FieldMask { + if m != nil { + return m.FieldMask + } + return nil +} + +func (m *ListDevicesRequest) GetGatewayListOptions() *GatewayListOptions { + if m != nil { + return m.GatewayListOptions + } + return nil +} + +func (m *ListDevicesRequest) GetPageSize() int32 { + if m != nil { + return m.PageSize + } + return 0 +} + +func (m *ListDevicesRequest) GetPageToken() string { + if m != nil { + return m.PageToken + } + return "" +} + +// Options for limiting the list based on gateway type and associations. +type GatewayListOptions struct { + // If not set, all devices and gateways are returned. If set, the list is + // filtered based on gateway type and associations. + // + // Types that are valid to be assigned to Filter: + // *GatewayListOptions_GatewayType + // *GatewayListOptions_AssociationsGatewayId + // *GatewayListOptions_AssociationsDeviceId + Filter isGatewayListOptions_Filter `protobuf_oneof:"filter"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GatewayListOptions) Reset() { *m = GatewayListOptions{} } +func (m *GatewayListOptions) String() string { return proto.CompactTextString(m) } +func (*GatewayListOptions) ProtoMessage() {} +func (*GatewayListOptions) Descriptor() ([]byte, []int) { + return fileDescriptor_device_manager_d40b22b22b466eb0, []int{11} +} +func (m *GatewayListOptions) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GatewayListOptions.Unmarshal(m, b) +} +func (m *GatewayListOptions) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GatewayListOptions.Marshal(b, m, deterministic) +} +func (dst *GatewayListOptions) XXX_Merge(src proto.Message) { + xxx_messageInfo_GatewayListOptions.Merge(dst, src) +} +func (m *GatewayListOptions) XXX_Size() int { + return xxx_messageInfo_GatewayListOptions.Size(m) +} +func (m *GatewayListOptions) XXX_DiscardUnknown() { + xxx_messageInfo_GatewayListOptions.DiscardUnknown(m) +} + +var xxx_messageInfo_GatewayListOptions proto.InternalMessageInfo + +type isGatewayListOptions_Filter interface { + isGatewayListOptions_Filter() +} + +type GatewayListOptions_GatewayType struct { + GatewayType GatewayType `protobuf:"varint,1,opt,name=gateway_type,json=gatewayType,proto3,enum=google.cloud.iot.v1.GatewayType,oneof"` +} + +type GatewayListOptions_AssociationsGatewayId struct { + AssociationsGatewayId string `protobuf:"bytes,2,opt,name=associations_gateway_id,json=associationsGatewayId,proto3,oneof"` +} + +type GatewayListOptions_AssociationsDeviceId struct { + AssociationsDeviceId string `protobuf:"bytes,3,opt,name=associations_device_id,json=associationsDeviceId,proto3,oneof"` +} + +func (*GatewayListOptions_GatewayType) isGatewayListOptions_Filter() {} + +func (*GatewayListOptions_AssociationsGatewayId) isGatewayListOptions_Filter() {} + +func (*GatewayListOptions_AssociationsDeviceId) isGatewayListOptions_Filter() {} + +func (m *GatewayListOptions) GetFilter() isGatewayListOptions_Filter { + if m != nil { + return m.Filter + } + return nil +} + +func (m *GatewayListOptions) GetGatewayType() GatewayType { + if x, ok := m.GetFilter().(*GatewayListOptions_GatewayType); ok { + return x.GatewayType + } + return GatewayType_GATEWAY_TYPE_UNSPECIFIED +} + +func (m *GatewayListOptions) GetAssociationsGatewayId() string { + if x, ok := m.GetFilter().(*GatewayListOptions_AssociationsGatewayId); ok { + return x.AssociationsGatewayId + } + return "" +} + +func (m *GatewayListOptions) GetAssociationsDeviceId() string { + if x, ok := m.GetFilter().(*GatewayListOptions_AssociationsDeviceId); ok { + return x.AssociationsDeviceId + } + return "" +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*GatewayListOptions) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _GatewayListOptions_OneofMarshaler, _GatewayListOptions_OneofUnmarshaler, _GatewayListOptions_OneofSizer, []interface{}{ + (*GatewayListOptions_GatewayType)(nil), + (*GatewayListOptions_AssociationsGatewayId)(nil), + (*GatewayListOptions_AssociationsDeviceId)(nil), + } +} + +func _GatewayListOptions_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*GatewayListOptions) + // filter + switch x := m.Filter.(type) { + case *GatewayListOptions_GatewayType: + b.EncodeVarint(1<<3 | proto.WireVarint) + b.EncodeVarint(uint64(x.GatewayType)) + case *GatewayListOptions_AssociationsGatewayId: + b.EncodeVarint(2<<3 | proto.WireBytes) + b.EncodeStringBytes(x.AssociationsGatewayId) + case *GatewayListOptions_AssociationsDeviceId: + b.EncodeVarint(3<<3 | proto.WireBytes) + b.EncodeStringBytes(x.AssociationsDeviceId) + case nil: + default: + return fmt.Errorf("GatewayListOptions.Filter has unexpected type %T", x) + } + return nil +} + +func _GatewayListOptions_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*GatewayListOptions) + switch tag { + case 1: // filter.gateway_type + if wire != proto.WireVarint { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeVarint() + m.Filter = &GatewayListOptions_GatewayType{GatewayType(x)} + return true, err + case 2: // filter.associations_gateway_id + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeStringBytes() + m.Filter = &GatewayListOptions_AssociationsGatewayId{x} + return true, err + case 3: // filter.associations_device_id + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeStringBytes() + m.Filter = &GatewayListOptions_AssociationsDeviceId{x} + return true, err + default: + return false, nil + } +} + +func _GatewayListOptions_OneofSizer(msg proto.Message) (n int) { + m := msg.(*GatewayListOptions) + // filter + switch x := m.Filter.(type) { + case *GatewayListOptions_GatewayType: + n += 1 // tag and wire + n += proto.SizeVarint(uint64(x.GatewayType)) + case *GatewayListOptions_AssociationsGatewayId: + n += 1 // tag and wire + n += proto.SizeVarint(uint64(len(x.AssociationsGatewayId))) + n += len(x.AssociationsGatewayId) + case *GatewayListOptions_AssociationsDeviceId: + n += 1 // tag and wire + n += proto.SizeVarint(uint64(len(x.AssociationsDeviceId))) + n += len(x.AssociationsDeviceId) + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +// Response for `ListDevices`. +type ListDevicesResponse struct { + // The devices that match the request. + Devices []*Device `protobuf:"bytes,1,rep,name=devices,proto3" json:"devices,omitempty"` + // If not empty, indicates that there may be more devices that match the + // request; this value should be passed in a new `ListDevicesRequest`. + NextPageToken string `protobuf:"bytes,2,opt,name=next_page_token,json=nextPageToken,proto3" json:"next_page_token,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ListDevicesResponse) Reset() { *m = ListDevicesResponse{} } +func (m *ListDevicesResponse) String() string { return proto.CompactTextString(m) } +func (*ListDevicesResponse) ProtoMessage() {} +func (*ListDevicesResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_device_manager_d40b22b22b466eb0, []int{12} +} +func (m *ListDevicesResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ListDevicesResponse.Unmarshal(m, b) +} +func (m *ListDevicesResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ListDevicesResponse.Marshal(b, m, deterministic) +} +func (dst *ListDevicesResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_ListDevicesResponse.Merge(dst, src) +} +func (m *ListDevicesResponse) XXX_Size() int { + return xxx_messageInfo_ListDevicesResponse.Size(m) +} +func (m *ListDevicesResponse) XXX_DiscardUnknown() { + xxx_messageInfo_ListDevicesResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_ListDevicesResponse proto.InternalMessageInfo + +func (m *ListDevicesResponse) GetDevices() []*Device { + if m != nil { + return m.Devices + } + return nil +} + +func (m *ListDevicesResponse) GetNextPageToken() string { + if m != nil { + return m.NextPageToken + } + return "" +} + +// Request for `ModifyCloudToDeviceConfig`. +type ModifyCloudToDeviceConfigRequest struct { + // The name of the device. For example, + // `projects/p0/locations/us-central1/registries/registry0/devices/device0` or + // `projects/p0/locations/us-central1/registries/registry0/devices/{num_id}`. + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // The version number to update. If this value is zero, it will not check the + // version number of the server and will always update the current version; + // otherwise, this update will fail if the version number found on the server + // does not match this version number. This is used to support multiple + // simultaneous updates without losing data. + VersionToUpdate int64 `protobuf:"varint,2,opt,name=version_to_update,json=versionToUpdate,proto3" json:"version_to_update,omitempty"` + // The configuration data for the device. + BinaryData []byte `protobuf:"bytes,3,opt,name=binary_data,json=binaryData,proto3" json:"binary_data,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ModifyCloudToDeviceConfigRequest) Reset() { *m = ModifyCloudToDeviceConfigRequest{} } +func (m *ModifyCloudToDeviceConfigRequest) String() string { return proto.CompactTextString(m) } +func (*ModifyCloudToDeviceConfigRequest) ProtoMessage() {} +func (*ModifyCloudToDeviceConfigRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_device_manager_d40b22b22b466eb0, []int{13} +} +func (m *ModifyCloudToDeviceConfigRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ModifyCloudToDeviceConfigRequest.Unmarshal(m, b) +} +func (m *ModifyCloudToDeviceConfigRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ModifyCloudToDeviceConfigRequest.Marshal(b, m, deterministic) +} +func (dst *ModifyCloudToDeviceConfigRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_ModifyCloudToDeviceConfigRequest.Merge(dst, src) +} +func (m *ModifyCloudToDeviceConfigRequest) XXX_Size() int { + return xxx_messageInfo_ModifyCloudToDeviceConfigRequest.Size(m) +} +func (m *ModifyCloudToDeviceConfigRequest) XXX_DiscardUnknown() { + xxx_messageInfo_ModifyCloudToDeviceConfigRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_ModifyCloudToDeviceConfigRequest proto.InternalMessageInfo + +func (m *ModifyCloudToDeviceConfigRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *ModifyCloudToDeviceConfigRequest) GetVersionToUpdate() int64 { + if m != nil { + return m.VersionToUpdate + } + return 0 +} + +func (m *ModifyCloudToDeviceConfigRequest) GetBinaryData() []byte { + if m != nil { + return m.BinaryData + } + return nil +} + +// Request for `ListDeviceConfigVersions`. +type ListDeviceConfigVersionsRequest struct { + // The name of the device. For example, + // `projects/p0/locations/us-central1/registries/registry0/devices/device0` or + // `projects/p0/locations/us-central1/registries/registry0/devices/{num_id}`. + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // The number of versions to list. Versions are listed in decreasing order of + // the version number. The maximum number of versions retained is 10. If this + // value is zero, it will return all the versions available. + NumVersions int32 `protobuf:"varint,2,opt,name=num_versions,json=numVersions,proto3" json:"num_versions,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ListDeviceConfigVersionsRequest) Reset() { *m = ListDeviceConfigVersionsRequest{} } +func (m *ListDeviceConfigVersionsRequest) String() string { return proto.CompactTextString(m) } +func (*ListDeviceConfigVersionsRequest) ProtoMessage() {} +func (*ListDeviceConfigVersionsRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_device_manager_d40b22b22b466eb0, []int{14} +} +func (m *ListDeviceConfigVersionsRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ListDeviceConfigVersionsRequest.Unmarshal(m, b) +} +func (m *ListDeviceConfigVersionsRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ListDeviceConfigVersionsRequest.Marshal(b, m, deterministic) +} +func (dst *ListDeviceConfigVersionsRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_ListDeviceConfigVersionsRequest.Merge(dst, src) +} +func (m *ListDeviceConfigVersionsRequest) XXX_Size() int { + return xxx_messageInfo_ListDeviceConfigVersionsRequest.Size(m) +} +func (m *ListDeviceConfigVersionsRequest) XXX_DiscardUnknown() { + xxx_messageInfo_ListDeviceConfigVersionsRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_ListDeviceConfigVersionsRequest proto.InternalMessageInfo + +func (m *ListDeviceConfigVersionsRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *ListDeviceConfigVersionsRequest) GetNumVersions() int32 { + if m != nil { + return m.NumVersions + } + return 0 +} + +// Response for `ListDeviceConfigVersions`. +type ListDeviceConfigVersionsResponse struct { + // The device configuration for the last few versions. Versions are listed + // in decreasing order, starting from the most recent one. + DeviceConfigs []*DeviceConfig `protobuf:"bytes,1,rep,name=device_configs,json=deviceConfigs,proto3" json:"device_configs,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ListDeviceConfigVersionsResponse) Reset() { *m = ListDeviceConfigVersionsResponse{} } +func (m *ListDeviceConfigVersionsResponse) String() string { return proto.CompactTextString(m) } +func (*ListDeviceConfigVersionsResponse) ProtoMessage() {} +func (*ListDeviceConfigVersionsResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_device_manager_d40b22b22b466eb0, []int{15} +} +func (m *ListDeviceConfigVersionsResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ListDeviceConfigVersionsResponse.Unmarshal(m, b) +} +func (m *ListDeviceConfigVersionsResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ListDeviceConfigVersionsResponse.Marshal(b, m, deterministic) +} +func (dst *ListDeviceConfigVersionsResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_ListDeviceConfigVersionsResponse.Merge(dst, src) +} +func (m *ListDeviceConfigVersionsResponse) XXX_Size() int { + return xxx_messageInfo_ListDeviceConfigVersionsResponse.Size(m) +} +func (m *ListDeviceConfigVersionsResponse) XXX_DiscardUnknown() { + xxx_messageInfo_ListDeviceConfigVersionsResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_ListDeviceConfigVersionsResponse proto.InternalMessageInfo + +func (m *ListDeviceConfigVersionsResponse) GetDeviceConfigs() []*DeviceConfig { + if m != nil { + return m.DeviceConfigs + } + return nil +} + +// Request for `ListDeviceStates`. +type ListDeviceStatesRequest struct { + // The name of the device. For example, + // `projects/p0/locations/us-central1/registries/registry0/devices/device0` or + // `projects/p0/locations/us-central1/registries/registry0/devices/{num_id}`. + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // The number of states to list. States are listed in descending order of + // update time. The maximum number of states retained is 10. If this + // value is zero, it will return all the states available. + NumStates int32 `protobuf:"varint,2,opt,name=num_states,json=numStates,proto3" json:"num_states,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ListDeviceStatesRequest) Reset() { *m = ListDeviceStatesRequest{} } +func (m *ListDeviceStatesRequest) String() string { return proto.CompactTextString(m) } +func (*ListDeviceStatesRequest) ProtoMessage() {} +func (*ListDeviceStatesRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_device_manager_d40b22b22b466eb0, []int{16} +} +func (m *ListDeviceStatesRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ListDeviceStatesRequest.Unmarshal(m, b) +} +func (m *ListDeviceStatesRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ListDeviceStatesRequest.Marshal(b, m, deterministic) +} +func (dst *ListDeviceStatesRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_ListDeviceStatesRequest.Merge(dst, src) +} +func (m *ListDeviceStatesRequest) XXX_Size() int { + return xxx_messageInfo_ListDeviceStatesRequest.Size(m) +} +func (m *ListDeviceStatesRequest) XXX_DiscardUnknown() { + xxx_messageInfo_ListDeviceStatesRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_ListDeviceStatesRequest proto.InternalMessageInfo + +func (m *ListDeviceStatesRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *ListDeviceStatesRequest) GetNumStates() int32 { + if m != nil { + return m.NumStates + } + return 0 +} + +// Response for `ListDeviceStates`. +type ListDeviceStatesResponse struct { + // The last few device states. States are listed in descending order of server + // update time, starting from the most recent one. + DeviceStates []*DeviceState `protobuf:"bytes,1,rep,name=device_states,json=deviceStates,proto3" json:"device_states,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ListDeviceStatesResponse) Reset() { *m = ListDeviceStatesResponse{} } +func (m *ListDeviceStatesResponse) String() string { return proto.CompactTextString(m) } +func (*ListDeviceStatesResponse) ProtoMessage() {} +func (*ListDeviceStatesResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_device_manager_d40b22b22b466eb0, []int{17} +} +func (m *ListDeviceStatesResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ListDeviceStatesResponse.Unmarshal(m, b) +} +func (m *ListDeviceStatesResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ListDeviceStatesResponse.Marshal(b, m, deterministic) +} +func (dst *ListDeviceStatesResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_ListDeviceStatesResponse.Merge(dst, src) +} +func (m *ListDeviceStatesResponse) XXX_Size() int { + return xxx_messageInfo_ListDeviceStatesResponse.Size(m) +} +func (m *ListDeviceStatesResponse) XXX_DiscardUnknown() { + xxx_messageInfo_ListDeviceStatesResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_ListDeviceStatesResponse proto.InternalMessageInfo + +func (m *ListDeviceStatesResponse) GetDeviceStates() []*DeviceState { + if m != nil { + return m.DeviceStates + } + return nil +} + +// Request for `SendCommandToDevice`. +type SendCommandToDeviceRequest struct { + // The name of the device. For example, + // `projects/p0/locations/us-central1/registries/registry0/devices/device0` or + // `projects/p0/locations/us-central1/registries/registry0/devices/{num_id}`. + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // The command data to send to the device. + BinaryData []byte `protobuf:"bytes,2,opt,name=binary_data,json=binaryData,proto3" json:"binary_data,omitempty"` + // Optional subfolder for the command. If empty, the command will be delivered + // to the /devices/{device-id}/commands topic, otherwise it will be delivered + // to the /devices/{device-id}/commands/{subfolder} topic. Multi-level + // subfolders are allowed. This field must not have more than 256 characters, + // and must not contain any MQTT wildcards ("+" or "#") or null characters. + Subfolder string `protobuf:"bytes,3,opt,name=subfolder,proto3" json:"subfolder,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *SendCommandToDeviceRequest) Reset() { *m = SendCommandToDeviceRequest{} } +func (m *SendCommandToDeviceRequest) String() string { return proto.CompactTextString(m) } +func (*SendCommandToDeviceRequest) ProtoMessage() {} +func (*SendCommandToDeviceRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_device_manager_d40b22b22b466eb0, []int{18} +} +func (m *SendCommandToDeviceRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_SendCommandToDeviceRequest.Unmarshal(m, b) +} +func (m *SendCommandToDeviceRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_SendCommandToDeviceRequest.Marshal(b, m, deterministic) +} +func (dst *SendCommandToDeviceRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_SendCommandToDeviceRequest.Merge(dst, src) +} +func (m *SendCommandToDeviceRequest) XXX_Size() int { + return xxx_messageInfo_SendCommandToDeviceRequest.Size(m) +} +func (m *SendCommandToDeviceRequest) XXX_DiscardUnknown() { + xxx_messageInfo_SendCommandToDeviceRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_SendCommandToDeviceRequest proto.InternalMessageInfo + +func (m *SendCommandToDeviceRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *SendCommandToDeviceRequest) GetBinaryData() []byte { + if m != nil { + return m.BinaryData + } + return nil +} + +func (m *SendCommandToDeviceRequest) GetSubfolder() string { + if m != nil { + return m.Subfolder + } + return "" +} + +// Response for `SendCommandToDevice`. +type SendCommandToDeviceResponse struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *SendCommandToDeviceResponse) Reset() { *m = SendCommandToDeviceResponse{} } +func (m *SendCommandToDeviceResponse) String() string { return proto.CompactTextString(m) } +func (*SendCommandToDeviceResponse) ProtoMessage() {} +func (*SendCommandToDeviceResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_device_manager_d40b22b22b466eb0, []int{19} +} +func (m *SendCommandToDeviceResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_SendCommandToDeviceResponse.Unmarshal(m, b) +} +func (m *SendCommandToDeviceResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_SendCommandToDeviceResponse.Marshal(b, m, deterministic) +} +func (dst *SendCommandToDeviceResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_SendCommandToDeviceResponse.Merge(dst, src) +} +func (m *SendCommandToDeviceResponse) XXX_Size() int { + return xxx_messageInfo_SendCommandToDeviceResponse.Size(m) +} +func (m *SendCommandToDeviceResponse) XXX_DiscardUnknown() { + xxx_messageInfo_SendCommandToDeviceResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_SendCommandToDeviceResponse proto.InternalMessageInfo + +// Request for `BindDeviceToGateway`. +type BindDeviceToGatewayRequest struct { + // The name of the registry. For example, + // `projects/example-project/locations/us-central1/registries/my-registry`. + Parent string `protobuf:"bytes,1,opt,name=parent,proto3" json:"parent,omitempty"` + // The value of `gateway_id` can be either the device numeric ID or the + // user-defined device identifier. + GatewayId string `protobuf:"bytes,2,opt,name=gateway_id,json=gatewayId,proto3" json:"gateway_id,omitempty"` + // The device to associate with the specified gateway. The value of + // `device_id` can be either the device numeric ID or the user-defined device + // identifier. + DeviceId string `protobuf:"bytes,3,opt,name=device_id,json=deviceId,proto3" json:"device_id,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *BindDeviceToGatewayRequest) Reset() { *m = BindDeviceToGatewayRequest{} } +func (m *BindDeviceToGatewayRequest) String() string { return proto.CompactTextString(m) } +func (*BindDeviceToGatewayRequest) ProtoMessage() {} +func (*BindDeviceToGatewayRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_device_manager_d40b22b22b466eb0, []int{20} +} +func (m *BindDeviceToGatewayRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_BindDeviceToGatewayRequest.Unmarshal(m, b) +} +func (m *BindDeviceToGatewayRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_BindDeviceToGatewayRequest.Marshal(b, m, deterministic) +} +func (dst *BindDeviceToGatewayRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_BindDeviceToGatewayRequest.Merge(dst, src) +} +func (m *BindDeviceToGatewayRequest) XXX_Size() int { + return xxx_messageInfo_BindDeviceToGatewayRequest.Size(m) +} +func (m *BindDeviceToGatewayRequest) XXX_DiscardUnknown() { + xxx_messageInfo_BindDeviceToGatewayRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_BindDeviceToGatewayRequest proto.InternalMessageInfo + +func (m *BindDeviceToGatewayRequest) GetParent() string { + if m != nil { + return m.Parent + } + return "" +} + +func (m *BindDeviceToGatewayRequest) GetGatewayId() string { + if m != nil { + return m.GatewayId + } + return "" +} + +func (m *BindDeviceToGatewayRequest) GetDeviceId() string { + if m != nil { + return m.DeviceId + } + return "" +} + +// Response for `BindDeviceToGateway`. +type BindDeviceToGatewayResponse struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *BindDeviceToGatewayResponse) Reset() { *m = BindDeviceToGatewayResponse{} } +func (m *BindDeviceToGatewayResponse) String() string { return proto.CompactTextString(m) } +func (*BindDeviceToGatewayResponse) ProtoMessage() {} +func (*BindDeviceToGatewayResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_device_manager_d40b22b22b466eb0, []int{21} +} +func (m *BindDeviceToGatewayResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_BindDeviceToGatewayResponse.Unmarshal(m, b) +} +func (m *BindDeviceToGatewayResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_BindDeviceToGatewayResponse.Marshal(b, m, deterministic) +} +func (dst *BindDeviceToGatewayResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_BindDeviceToGatewayResponse.Merge(dst, src) +} +func (m *BindDeviceToGatewayResponse) XXX_Size() int { + return xxx_messageInfo_BindDeviceToGatewayResponse.Size(m) +} +func (m *BindDeviceToGatewayResponse) XXX_DiscardUnknown() { + xxx_messageInfo_BindDeviceToGatewayResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_BindDeviceToGatewayResponse proto.InternalMessageInfo + +// Request for `UnbindDeviceFromGateway`. +type UnbindDeviceFromGatewayRequest struct { + // The name of the registry. For example, + // `projects/example-project/locations/us-central1/registries/my-registry`. + Parent string `protobuf:"bytes,1,opt,name=parent,proto3" json:"parent,omitempty"` + // The value of `gateway_id` can be either the device numeric ID or the + // user-defined device identifier. + GatewayId string `protobuf:"bytes,2,opt,name=gateway_id,json=gatewayId,proto3" json:"gateway_id,omitempty"` + // The device to disassociate from the specified gateway. The value of + // `device_id` can be either the device numeric ID or the user-defined device + // identifier. + DeviceId string `protobuf:"bytes,3,opt,name=device_id,json=deviceId,proto3" json:"device_id,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *UnbindDeviceFromGatewayRequest) Reset() { *m = UnbindDeviceFromGatewayRequest{} } +func (m *UnbindDeviceFromGatewayRequest) String() string { return proto.CompactTextString(m) } +func (*UnbindDeviceFromGatewayRequest) ProtoMessage() {} +func (*UnbindDeviceFromGatewayRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_device_manager_d40b22b22b466eb0, []int{22} +} +func (m *UnbindDeviceFromGatewayRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_UnbindDeviceFromGatewayRequest.Unmarshal(m, b) +} +func (m *UnbindDeviceFromGatewayRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_UnbindDeviceFromGatewayRequest.Marshal(b, m, deterministic) +} +func (dst *UnbindDeviceFromGatewayRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_UnbindDeviceFromGatewayRequest.Merge(dst, src) +} +func (m *UnbindDeviceFromGatewayRequest) XXX_Size() int { + return xxx_messageInfo_UnbindDeviceFromGatewayRequest.Size(m) +} +func (m *UnbindDeviceFromGatewayRequest) XXX_DiscardUnknown() { + xxx_messageInfo_UnbindDeviceFromGatewayRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_UnbindDeviceFromGatewayRequest proto.InternalMessageInfo + +func (m *UnbindDeviceFromGatewayRequest) GetParent() string { + if m != nil { + return m.Parent + } + return "" +} + +func (m *UnbindDeviceFromGatewayRequest) GetGatewayId() string { + if m != nil { + return m.GatewayId + } + return "" +} + +func (m *UnbindDeviceFromGatewayRequest) GetDeviceId() string { + if m != nil { + return m.DeviceId + } + return "" +} + +// Response for `UnbindDeviceFromGateway`. +type UnbindDeviceFromGatewayResponse struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *UnbindDeviceFromGatewayResponse) Reset() { *m = UnbindDeviceFromGatewayResponse{} } +func (m *UnbindDeviceFromGatewayResponse) String() string { return proto.CompactTextString(m) } +func (*UnbindDeviceFromGatewayResponse) ProtoMessage() {} +func (*UnbindDeviceFromGatewayResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_device_manager_d40b22b22b466eb0, []int{23} +} +func (m *UnbindDeviceFromGatewayResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_UnbindDeviceFromGatewayResponse.Unmarshal(m, b) +} +func (m *UnbindDeviceFromGatewayResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_UnbindDeviceFromGatewayResponse.Marshal(b, m, deterministic) +} +func (dst *UnbindDeviceFromGatewayResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_UnbindDeviceFromGatewayResponse.Merge(dst, src) +} +func (m *UnbindDeviceFromGatewayResponse) XXX_Size() int { + return xxx_messageInfo_UnbindDeviceFromGatewayResponse.Size(m) +} +func (m *UnbindDeviceFromGatewayResponse) XXX_DiscardUnknown() { + xxx_messageInfo_UnbindDeviceFromGatewayResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_UnbindDeviceFromGatewayResponse proto.InternalMessageInfo + +func init() { + proto.RegisterType((*CreateDeviceRegistryRequest)(nil), "google.cloud.iot.v1.CreateDeviceRegistryRequest") + proto.RegisterType((*GetDeviceRegistryRequest)(nil), "google.cloud.iot.v1.GetDeviceRegistryRequest") + proto.RegisterType((*DeleteDeviceRegistryRequest)(nil), "google.cloud.iot.v1.DeleteDeviceRegistryRequest") + proto.RegisterType((*UpdateDeviceRegistryRequest)(nil), "google.cloud.iot.v1.UpdateDeviceRegistryRequest") + proto.RegisterType((*ListDeviceRegistriesRequest)(nil), "google.cloud.iot.v1.ListDeviceRegistriesRequest") + proto.RegisterType((*ListDeviceRegistriesResponse)(nil), "google.cloud.iot.v1.ListDeviceRegistriesResponse") + proto.RegisterType((*CreateDeviceRequest)(nil), "google.cloud.iot.v1.CreateDeviceRequest") + proto.RegisterType((*GetDeviceRequest)(nil), "google.cloud.iot.v1.GetDeviceRequest") + proto.RegisterType((*UpdateDeviceRequest)(nil), "google.cloud.iot.v1.UpdateDeviceRequest") + proto.RegisterType((*DeleteDeviceRequest)(nil), "google.cloud.iot.v1.DeleteDeviceRequest") + proto.RegisterType((*ListDevicesRequest)(nil), "google.cloud.iot.v1.ListDevicesRequest") + proto.RegisterType((*GatewayListOptions)(nil), "google.cloud.iot.v1.GatewayListOptions") + proto.RegisterType((*ListDevicesResponse)(nil), "google.cloud.iot.v1.ListDevicesResponse") + proto.RegisterType((*ModifyCloudToDeviceConfigRequest)(nil), "google.cloud.iot.v1.ModifyCloudToDeviceConfigRequest") + proto.RegisterType((*ListDeviceConfigVersionsRequest)(nil), "google.cloud.iot.v1.ListDeviceConfigVersionsRequest") + proto.RegisterType((*ListDeviceConfigVersionsResponse)(nil), "google.cloud.iot.v1.ListDeviceConfigVersionsResponse") + proto.RegisterType((*ListDeviceStatesRequest)(nil), "google.cloud.iot.v1.ListDeviceStatesRequest") + proto.RegisterType((*ListDeviceStatesResponse)(nil), "google.cloud.iot.v1.ListDeviceStatesResponse") + proto.RegisterType((*SendCommandToDeviceRequest)(nil), "google.cloud.iot.v1.SendCommandToDeviceRequest") + proto.RegisterType((*SendCommandToDeviceResponse)(nil), "google.cloud.iot.v1.SendCommandToDeviceResponse") + proto.RegisterType((*BindDeviceToGatewayRequest)(nil), "google.cloud.iot.v1.BindDeviceToGatewayRequest") + proto.RegisterType((*BindDeviceToGatewayResponse)(nil), "google.cloud.iot.v1.BindDeviceToGatewayResponse") + proto.RegisterType((*UnbindDeviceFromGatewayRequest)(nil), "google.cloud.iot.v1.UnbindDeviceFromGatewayRequest") + proto.RegisterType((*UnbindDeviceFromGatewayResponse)(nil), "google.cloud.iot.v1.UnbindDeviceFromGatewayResponse") +} + +// Reference imports to suppress errors if they are not otherwise used. +var _ context.Context +var _ grpc.ClientConn + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the grpc package it is being compiled against. +const _ = grpc.SupportPackageIsVersion4 + +// DeviceManagerClient is the client API for DeviceManager service. +// +// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://godoc.org/google.golang.org/grpc#ClientConn.NewStream. +type DeviceManagerClient interface { + // Creates a device registry that contains devices. + CreateDeviceRegistry(ctx context.Context, in *CreateDeviceRegistryRequest, opts ...grpc.CallOption) (*DeviceRegistry, error) + // Gets a device registry configuration. + GetDeviceRegistry(ctx context.Context, in *GetDeviceRegistryRequest, opts ...grpc.CallOption) (*DeviceRegistry, error) + // Updates a device registry configuration. + UpdateDeviceRegistry(ctx context.Context, in *UpdateDeviceRegistryRequest, opts ...grpc.CallOption) (*DeviceRegistry, error) + // Deletes a device registry configuration. + DeleteDeviceRegistry(ctx context.Context, in *DeleteDeviceRegistryRequest, opts ...grpc.CallOption) (*empty.Empty, error) + // Lists device registries. + ListDeviceRegistries(ctx context.Context, in *ListDeviceRegistriesRequest, opts ...grpc.CallOption) (*ListDeviceRegistriesResponse, error) + // Creates a device in a device registry. + CreateDevice(ctx context.Context, in *CreateDeviceRequest, opts ...grpc.CallOption) (*Device, error) + // Gets details about a device. + GetDevice(ctx context.Context, in *GetDeviceRequest, opts ...grpc.CallOption) (*Device, error) + // Updates a device. + UpdateDevice(ctx context.Context, in *UpdateDeviceRequest, opts ...grpc.CallOption) (*Device, error) + // Deletes a device. + DeleteDevice(ctx context.Context, in *DeleteDeviceRequest, opts ...grpc.CallOption) (*empty.Empty, error) + // List devices in a device registry. + ListDevices(ctx context.Context, in *ListDevicesRequest, opts ...grpc.CallOption) (*ListDevicesResponse, error) + // Modifies the configuration for the device, which is eventually sent from + // the Cloud IoT Core servers. Returns the modified configuration version and + // its metadata. + ModifyCloudToDeviceConfig(ctx context.Context, in *ModifyCloudToDeviceConfigRequest, opts ...grpc.CallOption) (*DeviceConfig, error) + // Lists the last few versions of the device configuration in descending + // order (i.e.: newest first). + ListDeviceConfigVersions(ctx context.Context, in *ListDeviceConfigVersionsRequest, opts ...grpc.CallOption) (*ListDeviceConfigVersionsResponse, error) + // Lists the last few versions of the device state in descending order (i.e.: + // newest first). + ListDeviceStates(ctx context.Context, in *ListDeviceStatesRequest, opts ...grpc.CallOption) (*ListDeviceStatesResponse, error) + // Sets the access control policy on the specified resource. Replaces any + // existing policy. + SetIamPolicy(ctx context.Context, in *v1.SetIamPolicyRequest, opts ...grpc.CallOption) (*v1.Policy, error) + // Gets the access control policy for a resource. + // Returns an empty policy if the resource exists and does not have a policy + // set. + GetIamPolicy(ctx context.Context, in *v1.GetIamPolicyRequest, opts ...grpc.CallOption) (*v1.Policy, error) + // Returns permissions that a caller has on the specified resource. + // If the resource does not exist, this will return an empty set of + // permissions, not a NOT_FOUND error. + TestIamPermissions(ctx context.Context, in *v1.TestIamPermissionsRequest, opts ...grpc.CallOption) (*v1.TestIamPermissionsResponse, error) + // Sends a command to the specified device. In order for a device to be able + // to receive commands, it must: + // 1) be connected to Cloud IoT Core using the MQTT protocol, and + // 2) be subscribed to the group of MQTT topics specified by + // /devices/{device-id}/commands/#. This subscription will receive commands + // at the top-level topic /devices/{device-id}/commands as well as commands + // for subfolders, like /devices/{device-id}/commands/subfolder. + // Note that subscribing to specific subfolders is not supported. + // If the command could not be delivered to the device, this method will + // return an error; in particular, if the device is not subscribed, this + // method will return FAILED_PRECONDITION. Otherwise, this method will + // return OK. If the subscription is QoS 1, at least once delivery will be + // guaranteed; for QoS 0, no acknowledgment will be expected from the device. + SendCommandToDevice(ctx context.Context, in *SendCommandToDeviceRequest, opts ...grpc.CallOption) (*SendCommandToDeviceResponse, error) + // Associates the device with the gateway. + BindDeviceToGateway(ctx context.Context, in *BindDeviceToGatewayRequest, opts ...grpc.CallOption) (*BindDeviceToGatewayResponse, error) + // Deletes the association between the device and the gateway. + UnbindDeviceFromGateway(ctx context.Context, in *UnbindDeviceFromGatewayRequest, opts ...grpc.CallOption) (*UnbindDeviceFromGatewayResponse, error) +} + +type deviceManagerClient struct { + cc *grpc.ClientConn +} + +func NewDeviceManagerClient(cc *grpc.ClientConn) DeviceManagerClient { + return &deviceManagerClient{cc} +} + +func (c *deviceManagerClient) CreateDeviceRegistry(ctx context.Context, in *CreateDeviceRegistryRequest, opts ...grpc.CallOption) (*DeviceRegistry, error) { + out := new(DeviceRegistry) + err := c.cc.Invoke(ctx, "/google.cloud.iot.v1.DeviceManager/CreateDeviceRegistry", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *deviceManagerClient) GetDeviceRegistry(ctx context.Context, in *GetDeviceRegistryRequest, opts ...grpc.CallOption) (*DeviceRegistry, error) { + out := new(DeviceRegistry) + err := c.cc.Invoke(ctx, "/google.cloud.iot.v1.DeviceManager/GetDeviceRegistry", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *deviceManagerClient) UpdateDeviceRegistry(ctx context.Context, in *UpdateDeviceRegistryRequest, opts ...grpc.CallOption) (*DeviceRegistry, error) { + out := new(DeviceRegistry) + err := c.cc.Invoke(ctx, "/google.cloud.iot.v1.DeviceManager/UpdateDeviceRegistry", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *deviceManagerClient) DeleteDeviceRegistry(ctx context.Context, in *DeleteDeviceRegistryRequest, opts ...grpc.CallOption) (*empty.Empty, error) { + out := new(empty.Empty) + err := c.cc.Invoke(ctx, "/google.cloud.iot.v1.DeviceManager/DeleteDeviceRegistry", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *deviceManagerClient) ListDeviceRegistries(ctx context.Context, in *ListDeviceRegistriesRequest, opts ...grpc.CallOption) (*ListDeviceRegistriesResponse, error) { + out := new(ListDeviceRegistriesResponse) + err := c.cc.Invoke(ctx, "/google.cloud.iot.v1.DeviceManager/ListDeviceRegistries", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *deviceManagerClient) CreateDevice(ctx context.Context, in *CreateDeviceRequest, opts ...grpc.CallOption) (*Device, error) { + out := new(Device) + err := c.cc.Invoke(ctx, "/google.cloud.iot.v1.DeviceManager/CreateDevice", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *deviceManagerClient) GetDevice(ctx context.Context, in *GetDeviceRequest, opts ...grpc.CallOption) (*Device, error) { + out := new(Device) + err := c.cc.Invoke(ctx, "/google.cloud.iot.v1.DeviceManager/GetDevice", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *deviceManagerClient) UpdateDevice(ctx context.Context, in *UpdateDeviceRequest, opts ...grpc.CallOption) (*Device, error) { + out := new(Device) + err := c.cc.Invoke(ctx, "/google.cloud.iot.v1.DeviceManager/UpdateDevice", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *deviceManagerClient) DeleteDevice(ctx context.Context, in *DeleteDeviceRequest, opts ...grpc.CallOption) (*empty.Empty, error) { + out := new(empty.Empty) + err := c.cc.Invoke(ctx, "/google.cloud.iot.v1.DeviceManager/DeleteDevice", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *deviceManagerClient) ListDevices(ctx context.Context, in *ListDevicesRequest, opts ...grpc.CallOption) (*ListDevicesResponse, error) { + out := new(ListDevicesResponse) + err := c.cc.Invoke(ctx, "/google.cloud.iot.v1.DeviceManager/ListDevices", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *deviceManagerClient) ModifyCloudToDeviceConfig(ctx context.Context, in *ModifyCloudToDeviceConfigRequest, opts ...grpc.CallOption) (*DeviceConfig, error) { + out := new(DeviceConfig) + err := c.cc.Invoke(ctx, "/google.cloud.iot.v1.DeviceManager/ModifyCloudToDeviceConfig", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *deviceManagerClient) ListDeviceConfigVersions(ctx context.Context, in *ListDeviceConfigVersionsRequest, opts ...grpc.CallOption) (*ListDeviceConfigVersionsResponse, error) { + out := new(ListDeviceConfigVersionsResponse) + err := c.cc.Invoke(ctx, "/google.cloud.iot.v1.DeviceManager/ListDeviceConfigVersions", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *deviceManagerClient) ListDeviceStates(ctx context.Context, in *ListDeviceStatesRequest, opts ...grpc.CallOption) (*ListDeviceStatesResponse, error) { + out := new(ListDeviceStatesResponse) + err := c.cc.Invoke(ctx, "/google.cloud.iot.v1.DeviceManager/ListDeviceStates", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *deviceManagerClient) SetIamPolicy(ctx context.Context, in *v1.SetIamPolicyRequest, opts ...grpc.CallOption) (*v1.Policy, error) { + out := new(v1.Policy) + err := c.cc.Invoke(ctx, "/google.cloud.iot.v1.DeviceManager/SetIamPolicy", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *deviceManagerClient) GetIamPolicy(ctx context.Context, in *v1.GetIamPolicyRequest, opts ...grpc.CallOption) (*v1.Policy, error) { + out := new(v1.Policy) + err := c.cc.Invoke(ctx, "/google.cloud.iot.v1.DeviceManager/GetIamPolicy", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *deviceManagerClient) TestIamPermissions(ctx context.Context, in *v1.TestIamPermissionsRequest, opts ...grpc.CallOption) (*v1.TestIamPermissionsResponse, error) { + out := new(v1.TestIamPermissionsResponse) + err := c.cc.Invoke(ctx, "/google.cloud.iot.v1.DeviceManager/TestIamPermissions", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *deviceManagerClient) SendCommandToDevice(ctx context.Context, in *SendCommandToDeviceRequest, opts ...grpc.CallOption) (*SendCommandToDeviceResponse, error) { + out := new(SendCommandToDeviceResponse) + err := c.cc.Invoke(ctx, "/google.cloud.iot.v1.DeviceManager/SendCommandToDevice", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *deviceManagerClient) BindDeviceToGateway(ctx context.Context, in *BindDeviceToGatewayRequest, opts ...grpc.CallOption) (*BindDeviceToGatewayResponse, error) { + out := new(BindDeviceToGatewayResponse) + err := c.cc.Invoke(ctx, "/google.cloud.iot.v1.DeviceManager/BindDeviceToGateway", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *deviceManagerClient) UnbindDeviceFromGateway(ctx context.Context, in *UnbindDeviceFromGatewayRequest, opts ...grpc.CallOption) (*UnbindDeviceFromGatewayResponse, error) { + out := new(UnbindDeviceFromGatewayResponse) + err := c.cc.Invoke(ctx, "/google.cloud.iot.v1.DeviceManager/UnbindDeviceFromGateway", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +// DeviceManagerServer is the server API for DeviceManager service. +type DeviceManagerServer interface { + // Creates a device registry that contains devices. + CreateDeviceRegistry(context.Context, *CreateDeviceRegistryRequest) (*DeviceRegistry, error) + // Gets a device registry configuration. + GetDeviceRegistry(context.Context, *GetDeviceRegistryRequest) (*DeviceRegistry, error) + // Updates a device registry configuration. + UpdateDeviceRegistry(context.Context, *UpdateDeviceRegistryRequest) (*DeviceRegistry, error) + // Deletes a device registry configuration. + DeleteDeviceRegistry(context.Context, *DeleteDeviceRegistryRequest) (*empty.Empty, error) + // Lists device registries. + ListDeviceRegistries(context.Context, *ListDeviceRegistriesRequest) (*ListDeviceRegistriesResponse, error) + // Creates a device in a device registry. + CreateDevice(context.Context, *CreateDeviceRequest) (*Device, error) + // Gets details about a device. + GetDevice(context.Context, *GetDeviceRequest) (*Device, error) + // Updates a device. + UpdateDevice(context.Context, *UpdateDeviceRequest) (*Device, error) + // Deletes a device. + DeleteDevice(context.Context, *DeleteDeviceRequest) (*empty.Empty, error) + // List devices in a device registry. + ListDevices(context.Context, *ListDevicesRequest) (*ListDevicesResponse, error) + // Modifies the configuration for the device, which is eventually sent from + // the Cloud IoT Core servers. Returns the modified configuration version and + // its metadata. + ModifyCloudToDeviceConfig(context.Context, *ModifyCloudToDeviceConfigRequest) (*DeviceConfig, error) + // Lists the last few versions of the device configuration in descending + // order (i.e.: newest first). + ListDeviceConfigVersions(context.Context, *ListDeviceConfigVersionsRequest) (*ListDeviceConfigVersionsResponse, error) + // Lists the last few versions of the device state in descending order (i.e.: + // newest first). + ListDeviceStates(context.Context, *ListDeviceStatesRequest) (*ListDeviceStatesResponse, error) + // Sets the access control policy on the specified resource. Replaces any + // existing policy. + SetIamPolicy(context.Context, *v1.SetIamPolicyRequest) (*v1.Policy, error) + // Gets the access control policy for a resource. + // Returns an empty policy if the resource exists and does not have a policy + // set. + GetIamPolicy(context.Context, *v1.GetIamPolicyRequest) (*v1.Policy, error) + // Returns permissions that a caller has on the specified resource. + // If the resource does not exist, this will return an empty set of + // permissions, not a NOT_FOUND error. + TestIamPermissions(context.Context, *v1.TestIamPermissionsRequest) (*v1.TestIamPermissionsResponse, error) + // Sends a command to the specified device. In order for a device to be able + // to receive commands, it must: + // 1) be connected to Cloud IoT Core using the MQTT protocol, and + // 2) be subscribed to the group of MQTT topics specified by + // /devices/{device-id}/commands/#. This subscription will receive commands + // at the top-level topic /devices/{device-id}/commands as well as commands + // for subfolders, like /devices/{device-id}/commands/subfolder. + // Note that subscribing to specific subfolders is not supported. + // If the command could not be delivered to the device, this method will + // return an error; in particular, if the device is not subscribed, this + // method will return FAILED_PRECONDITION. Otherwise, this method will + // return OK. If the subscription is QoS 1, at least once delivery will be + // guaranteed; for QoS 0, no acknowledgment will be expected from the device. + SendCommandToDevice(context.Context, *SendCommandToDeviceRequest) (*SendCommandToDeviceResponse, error) + // Associates the device with the gateway. + BindDeviceToGateway(context.Context, *BindDeviceToGatewayRequest) (*BindDeviceToGatewayResponse, error) + // Deletes the association between the device and the gateway. + UnbindDeviceFromGateway(context.Context, *UnbindDeviceFromGatewayRequest) (*UnbindDeviceFromGatewayResponse, error) +} + +func RegisterDeviceManagerServer(s *grpc.Server, srv DeviceManagerServer) { + s.RegisterService(&_DeviceManager_serviceDesc, srv) +} + +func _DeviceManager_CreateDeviceRegistry_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(CreateDeviceRegistryRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(DeviceManagerServer).CreateDeviceRegistry(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.iot.v1.DeviceManager/CreateDeviceRegistry", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(DeviceManagerServer).CreateDeviceRegistry(ctx, req.(*CreateDeviceRegistryRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _DeviceManager_GetDeviceRegistry_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(GetDeviceRegistryRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(DeviceManagerServer).GetDeviceRegistry(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.iot.v1.DeviceManager/GetDeviceRegistry", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(DeviceManagerServer).GetDeviceRegistry(ctx, req.(*GetDeviceRegistryRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _DeviceManager_UpdateDeviceRegistry_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(UpdateDeviceRegistryRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(DeviceManagerServer).UpdateDeviceRegistry(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.iot.v1.DeviceManager/UpdateDeviceRegistry", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(DeviceManagerServer).UpdateDeviceRegistry(ctx, req.(*UpdateDeviceRegistryRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _DeviceManager_DeleteDeviceRegistry_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(DeleteDeviceRegistryRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(DeviceManagerServer).DeleteDeviceRegistry(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.iot.v1.DeviceManager/DeleteDeviceRegistry", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(DeviceManagerServer).DeleteDeviceRegistry(ctx, req.(*DeleteDeviceRegistryRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _DeviceManager_ListDeviceRegistries_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(ListDeviceRegistriesRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(DeviceManagerServer).ListDeviceRegistries(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.iot.v1.DeviceManager/ListDeviceRegistries", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(DeviceManagerServer).ListDeviceRegistries(ctx, req.(*ListDeviceRegistriesRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _DeviceManager_CreateDevice_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(CreateDeviceRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(DeviceManagerServer).CreateDevice(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.iot.v1.DeviceManager/CreateDevice", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(DeviceManagerServer).CreateDevice(ctx, req.(*CreateDeviceRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _DeviceManager_GetDevice_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(GetDeviceRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(DeviceManagerServer).GetDevice(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.iot.v1.DeviceManager/GetDevice", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(DeviceManagerServer).GetDevice(ctx, req.(*GetDeviceRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _DeviceManager_UpdateDevice_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(UpdateDeviceRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(DeviceManagerServer).UpdateDevice(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.iot.v1.DeviceManager/UpdateDevice", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(DeviceManagerServer).UpdateDevice(ctx, req.(*UpdateDeviceRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _DeviceManager_DeleteDevice_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(DeleteDeviceRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(DeviceManagerServer).DeleteDevice(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.iot.v1.DeviceManager/DeleteDevice", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(DeviceManagerServer).DeleteDevice(ctx, req.(*DeleteDeviceRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _DeviceManager_ListDevices_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(ListDevicesRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(DeviceManagerServer).ListDevices(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.iot.v1.DeviceManager/ListDevices", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(DeviceManagerServer).ListDevices(ctx, req.(*ListDevicesRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _DeviceManager_ModifyCloudToDeviceConfig_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(ModifyCloudToDeviceConfigRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(DeviceManagerServer).ModifyCloudToDeviceConfig(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.iot.v1.DeviceManager/ModifyCloudToDeviceConfig", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(DeviceManagerServer).ModifyCloudToDeviceConfig(ctx, req.(*ModifyCloudToDeviceConfigRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _DeviceManager_ListDeviceConfigVersions_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(ListDeviceConfigVersionsRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(DeviceManagerServer).ListDeviceConfigVersions(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.iot.v1.DeviceManager/ListDeviceConfigVersions", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(DeviceManagerServer).ListDeviceConfigVersions(ctx, req.(*ListDeviceConfigVersionsRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _DeviceManager_ListDeviceStates_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(ListDeviceStatesRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(DeviceManagerServer).ListDeviceStates(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.iot.v1.DeviceManager/ListDeviceStates", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(DeviceManagerServer).ListDeviceStates(ctx, req.(*ListDeviceStatesRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _DeviceManager_SetIamPolicy_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(v1.SetIamPolicyRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(DeviceManagerServer).SetIamPolicy(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.iot.v1.DeviceManager/SetIamPolicy", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(DeviceManagerServer).SetIamPolicy(ctx, req.(*v1.SetIamPolicyRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _DeviceManager_GetIamPolicy_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(v1.GetIamPolicyRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(DeviceManagerServer).GetIamPolicy(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.iot.v1.DeviceManager/GetIamPolicy", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(DeviceManagerServer).GetIamPolicy(ctx, req.(*v1.GetIamPolicyRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _DeviceManager_TestIamPermissions_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(v1.TestIamPermissionsRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(DeviceManagerServer).TestIamPermissions(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.iot.v1.DeviceManager/TestIamPermissions", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(DeviceManagerServer).TestIamPermissions(ctx, req.(*v1.TestIamPermissionsRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _DeviceManager_SendCommandToDevice_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(SendCommandToDeviceRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(DeviceManagerServer).SendCommandToDevice(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.iot.v1.DeviceManager/SendCommandToDevice", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(DeviceManagerServer).SendCommandToDevice(ctx, req.(*SendCommandToDeviceRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _DeviceManager_BindDeviceToGateway_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(BindDeviceToGatewayRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(DeviceManagerServer).BindDeviceToGateway(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.iot.v1.DeviceManager/BindDeviceToGateway", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(DeviceManagerServer).BindDeviceToGateway(ctx, req.(*BindDeviceToGatewayRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _DeviceManager_UnbindDeviceFromGateway_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(UnbindDeviceFromGatewayRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(DeviceManagerServer).UnbindDeviceFromGateway(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.iot.v1.DeviceManager/UnbindDeviceFromGateway", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(DeviceManagerServer).UnbindDeviceFromGateway(ctx, req.(*UnbindDeviceFromGatewayRequest)) + } + return interceptor(ctx, in, info, handler) +} + +var _DeviceManager_serviceDesc = grpc.ServiceDesc{ + ServiceName: "google.cloud.iot.v1.DeviceManager", + HandlerType: (*DeviceManagerServer)(nil), + Methods: []grpc.MethodDesc{ + { + MethodName: "CreateDeviceRegistry", + Handler: _DeviceManager_CreateDeviceRegistry_Handler, + }, + { + MethodName: "GetDeviceRegistry", + Handler: _DeviceManager_GetDeviceRegistry_Handler, + }, + { + MethodName: "UpdateDeviceRegistry", + Handler: _DeviceManager_UpdateDeviceRegistry_Handler, + }, + { + MethodName: "DeleteDeviceRegistry", + Handler: _DeviceManager_DeleteDeviceRegistry_Handler, + }, + { + MethodName: "ListDeviceRegistries", + Handler: _DeviceManager_ListDeviceRegistries_Handler, + }, + { + MethodName: "CreateDevice", + Handler: _DeviceManager_CreateDevice_Handler, + }, + { + MethodName: "GetDevice", + Handler: _DeviceManager_GetDevice_Handler, + }, + { + MethodName: "UpdateDevice", + Handler: _DeviceManager_UpdateDevice_Handler, + }, + { + MethodName: "DeleteDevice", + Handler: _DeviceManager_DeleteDevice_Handler, + }, + { + MethodName: "ListDevices", + Handler: _DeviceManager_ListDevices_Handler, + }, + { + MethodName: "ModifyCloudToDeviceConfig", + Handler: _DeviceManager_ModifyCloudToDeviceConfig_Handler, + }, + { + MethodName: "ListDeviceConfigVersions", + Handler: _DeviceManager_ListDeviceConfigVersions_Handler, + }, + { + MethodName: "ListDeviceStates", + Handler: _DeviceManager_ListDeviceStates_Handler, + }, + { + MethodName: "SetIamPolicy", + Handler: _DeviceManager_SetIamPolicy_Handler, + }, + { + MethodName: "GetIamPolicy", + Handler: _DeviceManager_GetIamPolicy_Handler, + }, + { + MethodName: "TestIamPermissions", + Handler: _DeviceManager_TestIamPermissions_Handler, + }, + { + MethodName: "SendCommandToDevice", + Handler: _DeviceManager_SendCommandToDevice_Handler, + }, + { + MethodName: "BindDeviceToGateway", + Handler: _DeviceManager_BindDeviceToGateway_Handler, + }, + { + MethodName: "UnbindDeviceFromGateway", + Handler: _DeviceManager_UnbindDeviceFromGateway_Handler, + }, + }, + Streams: []grpc.StreamDesc{}, + Metadata: "google/cloud/iot/v1/device_manager.proto", +} + +func init() { + proto.RegisterFile("google/cloud/iot/v1/device_manager.proto", fileDescriptor_device_manager_d40b22b22b466eb0) +} + +var fileDescriptor_device_manager_d40b22b22b466eb0 = []byte{ + // 1801 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xbc, 0x59, 0x4f, 0x6f, 0x1c, 0x49, + 0x15, 0x4f, 0x8d, 0x83, 0xc9, 0x3c, 0x4f, 0x36, 0x49, 0xd9, 0x1b, 0x0f, 0x33, 0xf1, 0x7a, 0x52, + 0x0b, 0xac, 0x77, 0xc4, 0x4e, 0xc7, 0xce, 0x26, 0x0a, 0x5e, 0xc1, 0xb2, 0xfe, 0xb3, 0x9e, 0x20, + 0x7b, 0xf1, 0xb6, 0x9d, 0x05, 0xcc, 0x61, 0x68, 0x4f, 0x97, 0x5b, 0xbd, 0x99, 0xee, 0xea, 0xed, + 0xaa, 0x31, 0x38, 0x28, 0x42, 0x22, 0x02, 0x4e, 0x5c, 0x12, 0x0e, 0x11, 0x12, 0xca, 0x05, 0x10, + 0x07, 0xfe, 0x1c, 0x82, 0x10, 0xdc, 0x90, 0x90, 0xf8, 0x02, 0x1c, 0xf2, 0x05, 0x00, 0xf1, 0x11, + 0x38, 0x70, 0x40, 0xdd, 0x55, 0x3d, 0x9e, 0xee, 0xe9, 0xee, 0xf9, 0x63, 0x94, 0xdb, 0x74, 0xd5, + 0xab, 0xf7, 0x7e, 0xef, 0x57, 0xef, 0x57, 0xfd, 0xaa, 0x07, 0x96, 0x2c, 0xc6, 0xac, 0x0e, 0xd5, + 0xda, 0x1d, 0xd6, 0x35, 0x35, 0x9b, 0x09, 0xed, 0x78, 0x59, 0x33, 0xe9, 0xb1, 0xdd, 0xa6, 0x2d, + 0xc7, 0x70, 0x0d, 0x8b, 0xfa, 0x0d, 0xcf, 0x67, 0x82, 0xe1, 0x59, 0x69, 0xd9, 0x08, 0x2d, 0x1b, + 0x36, 0x13, 0x8d, 0xe3, 0xe5, 0xca, 0x35, 0xb5, 0xdc, 0xf0, 0x6c, 0xcd, 0x70, 0x5d, 0x26, 0x0c, + 0x61, 0x33, 0x97, 0xcb, 0x25, 0x95, 0xd7, 0xd3, 0x9c, 0xfb, 0x94, 0xb3, 0xae, 0xdf, 0xa6, 0x91, + 0xd1, 0x6b, 0xca, 0xc8, 0x36, 0x9c, 0x60, 0xda, 0x36, 0x9c, 0x96, 0xc7, 0x3a, 0x76, 0xfb, 0x44, + 0xcd, 0x57, 0xe2, 0xf3, 0xb1, 0xb9, 0x68, 0x6d, 0xf8, 0x74, 0xd8, 0x3d, 0xd2, 0xcc, 0xae, 0x1f, + 0x22, 0x50, 0xf3, 0xd5, 0xe4, 0x3c, 0x75, 0x3c, 0x11, 0x2d, 0xae, 0x25, 0x27, 0x8f, 0x6c, 0xda, + 0x31, 0x5b, 0x8e, 0xc1, 0xef, 0x2b, 0x8b, 0xc5, 0xa4, 0x85, 0xb0, 0x1d, 0xca, 0x85, 0xe1, 0x78, + 0xca, 0x60, 0x5e, 0x19, 0xf8, 0x5e, 0x5b, 0xe3, 0xc2, 0x10, 0x5d, 0x95, 0x14, 0x79, 0x84, 0xa0, + 0xba, 0xee, 0x53, 0x43, 0xd0, 0x8d, 0x90, 0x4b, 0x9d, 0x5a, 0x36, 0x17, 0xfe, 0x89, 0x4e, 0x3f, + 0xe9, 0x52, 0x2e, 0xf0, 0x55, 0x98, 0xf6, 0x0c, 0x9f, 0xba, 0xa2, 0x8c, 0x6a, 0x68, 0xa9, 0xa8, + 0xab, 0x27, 0xbc, 0x0d, 0x97, 0x14, 0xf9, 0xbe, 0x5a, 0x51, 0x2e, 0xd4, 0xd0, 0xd2, 0xcc, 0xca, + 0xeb, 0x8d, 0x14, 0xfa, 0x1b, 0x09, 0xe7, 0xaf, 0x98, 0xb1, 0x67, 0xd2, 0x80, 0xf2, 0x16, 0x15, + 0xe9, 0x08, 0x30, 0x9c, 0x77, 0x0d, 0x87, 0xaa, 0xf8, 0xe1, 0x6f, 0xb2, 0x0c, 0xd5, 0x0d, 0xda, + 0xa1, 0x59, 0xa0, 0xd3, 0x96, 0xfc, 0x1a, 0x41, 0xf5, 0x9e, 0x67, 0x66, 0x26, 0x9a, 0x92, 0x10, + 0x9a, 0x38, 0x21, 0xfc, 0x0e, 0xcc, 0x74, 0xc3, 0x60, 0xe1, 0x2e, 0x29, 0x6a, 0x2a, 0x91, 0xa7, + 0x68, 0x9b, 0x1a, 0xef, 0x07, 0x1b, 0xb9, 0x63, 0xf0, 0xfb, 0x3a, 0x48, 0xf3, 0xe0, 0x37, 0xf9, + 0x04, 0xaa, 0xdb, 0x36, 0x8f, 0xd3, 0x61, 0x53, 0x3e, 0x6c, 0x4b, 0xaa, 0x50, 0xf4, 0x0c, 0x8b, + 0xb6, 0xb8, 0xfd, 0x80, 0x86, 0x11, 0x3f, 0xa5, 0x5f, 0x08, 0x06, 0xf6, 0xec, 0x07, 0x14, 0x2f, + 0x00, 0x84, 0x93, 0x82, 0xdd, 0xa7, 0x6e, 0x79, 0x2a, 0x5c, 0x18, 0x9a, 0xef, 0x07, 0x03, 0xe4, + 0x29, 0x82, 0x6b, 0xe9, 0x31, 0xb9, 0xc7, 0x5c, 0x4e, 0xf1, 0x2e, 0x5c, 0x89, 0xd3, 0x63, 0x53, + 0x5e, 0x46, 0xb5, 0xa9, 0x51, 0x09, 0xba, 0x6c, 0x26, 0x3c, 0xe3, 0xcf, 0xc3, 0x25, 0x97, 0x7e, + 0x57, 0xb4, 0xfa, 0x60, 0x15, 0x42, 0x58, 0x17, 0x83, 0xe1, 0xdd, 0x1e, 0xb4, 0x43, 0x98, 0x8d, + 0x17, 0x68, 0x3e, 0x0b, 0x37, 0x61, 0x5a, 0x86, 0x52, 0xa4, 0x57, 0xf3, 0xd0, 0x29, 0x53, 0x62, + 0xc0, 0xe5, 0xbe, 0xfa, 0xcb, 0x2c, 0x22, 0xfc, 0x45, 0x80, 0x53, 0xed, 0x8d, 0xb0, 0xab, 0xc5, + 0xa3, 0xe8, 0x27, 0xf9, 0x31, 0x82, 0xd9, 0x78, 0xfd, 0xc9, 0x30, 0x93, 0xe0, 0x4d, 0x96, 0xd7, + 0xd4, 0x58, 0xe5, 0xf5, 0x26, 0xcc, 0xc6, 0xc5, 0x93, 0x2d, 0x9a, 0xbf, 0x16, 0x00, 0x9f, 0x96, + 0xc5, 0xd0, 0x0a, 0xfc, 0x2c, 0x28, 0x1d, 0xb4, 0xdc, 0xae, 0xd3, 0xb2, 0x4d, 0x5e, 0x2e, 0xd4, + 0xa6, 0x96, 0xce, 0xeb, 0x25, 0x39, 0xfa, 0x41, 0xd7, 0xb9, 0x6b, 0xf2, 0xa0, 0x14, 0x95, 0x55, + 0x60, 0x31, 0x55, 0x9b, 0x0a, 0x4a, 0x51, 0x8e, 0x04, 0xd3, 0x71, 0x8e, 0xcf, 0x8f, 0xc1, 0x31, + 0xfe, 0x26, 0xcc, 0x59, 0x86, 0xa0, 0xdf, 0x31, 0x4e, 0x5a, 0x1d, 0x9b, 0x8b, 0x16, 0xf3, 0xc2, + 0x43, 0xbe, 0x3c, 0x1d, 0x3a, 0x79, 0x23, 0x95, 0xd9, 0x2d, 0xb9, 0x20, 0xc8, 0xf2, 0x6b, 0xd2, + 0x5c, 0xc7, 0xd6, 0xc0, 0x58, 0x5c, 0x5c, 0x66, 0xae, 0xb8, 0x68, 0x52, 0x5c, 0x2f, 0x10, 0xe0, + 0xc1, 0x30, 0x78, 0x13, 0x4a, 0x11, 0x5a, 0x71, 0xe2, 0x49, 0xe2, 0x5f, 0x59, 0xa9, 0xe5, 0xa1, + 0xdc, 0x3f, 0xf1, 0x68, 0xf3, 0x9c, 0x3e, 0x63, 0x9d, 0x3e, 0xe2, 0x3b, 0x30, 0x6f, 0x70, 0xce, + 0xda, 0xb6, 0x7c, 0xa3, 0xb5, 0x22, 0x9f, 0xb6, 0x29, 0xf5, 0xd4, 0x3c, 0xa7, 0xbf, 0xda, 0x6f, + 0xa0, 0x5c, 0xdd, 0x35, 0xf1, 0x6d, 0xb8, 0x1a, 0x5b, 0xd9, 0xdb, 0x15, 0x79, 0x3e, 0x34, 0xcf, + 0xe9, 0x73, 0xfd, 0xf3, 0x1b, 0x6a, 0x8b, 0xd6, 0x2e, 0xc0, 0xf4, 0x91, 0xdd, 0x11, 0xd4, 0x27, + 0x02, 0x66, 0x63, 0xe5, 0xa1, 0x0e, 0x8b, 0x5b, 0xf0, 0x69, 0xe9, 0x2b, 0x3a, 0x22, 0x72, 0x8b, + 0x3a, 0xb2, 0x1d, 0xf9, 0x44, 0x78, 0x84, 0xa0, 0xb6, 0xc3, 0x4c, 0xfb, 0xe8, 0x64, 0x3d, 0x70, + 0xb7, 0xcf, 0xa4, 0xa3, 0x75, 0xe6, 0x1e, 0xd9, 0x56, 0x9e, 0x7c, 0xeb, 0x70, 0xe5, 0x98, 0xfa, + 0xdc, 0x66, 0x6e, 0x4b, 0xb0, 0x96, 0x94, 0x44, 0x18, 0x62, 0x4a, 0xbf, 0xa4, 0x26, 0xf6, 0x99, + 0x14, 0x29, 0x5e, 0x84, 0x99, 0x43, 0xdb, 0x35, 0xfc, 0x93, 0x96, 0x69, 0x08, 0x23, 0x64, 0xa4, + 0xa4, 0x83, 0x1c, 0xda, 0x30, 0x84, 0x41, 0xbe, 0x01, 0x8b, 0xa7, 0xb9, 0xcb, 0xd8, 0x1f, 0x49, + 0x1f, 0x3c, 0x0f, 0xc3, 0x75, 0x28, 0x05, 0xe2, 0x50, 0xe1, 0xb8, 0x3a, 0xa8, 0x67, 0xdc, 0xae, + 0x13, 0xad, 0x26, 0x1d, 0xa8, 0x65, 0x7b, 0x56, 0x14, 0x37, 0x7b, 0x52, 0x6b, 0x87, 0x06, 0x11, + 0xd3, 0xd7, 0x73, 0x98, 0x56, 0x04, 0x5d, 0x34, 0xfb, 0x9e, 0x38, 0xd9, 0x86, 0xf9, 0xd3, 0x68, + 0x7b, 0xc2, 0x10, 0x34, 0x17, 0xff, 0x02, 0x40, 0x80, 0x9f, 0x87, 0x86, 0x0a, 0x7d, 0xd1, 0xed, + 0x3a, 0x72, 0x25, 0x31, 0xa0, 0x3c, 0xe8, 0x4d, 0x61, 0xde, 0x04, 0x15, 0x3a, 0x5a, 0x2d, 0x21, + 0xd7, 0x72, 0x20, 0x87, 0x1e, 0xa2, 0xf3, 0x43, 0x85, 0x60, 0x50, 0xd9, 0xa3, 0xae, 0xb9, 0xce, + 0x1c, 0xc7, 0x70, 0x7b, 0xbb, 0x9f, 0x87, 0x39, 0xb1, 0x97, 0x85, 0xe4, 0x5e, 0xe2, 0x6b, 0x50, + 0xe4, 0xdd, 0xc3, 0x23, 0xd6, 0x31, 0xa9, 0x1f, 0xbd, 0x1c, 0x7b, 0x03, 0x64, 0x01, 0xaa, 0xa9, + 0x01, 0x65, 0x5a, 0xc4, 0x83, 0xca, 0x9a, 0xed, 0x9a, 0x72, 0x74, 0x9f, 0x29, 0x7d, 0x0d, 0x3b, + 0x2b, 0x17, 0x00, 0x92, 0x4a, 0xd5, 0x8b, 0x56, 0x4f, 0x9b, 0x55, 0x28, 0x26, 0xe4, 0xa8, 0x5f, + 0x88, 0xce, 0xc8, 0x00, 0x50, 0x6a, 0x44, 0x05, 0x48, 0xc0, 0x6b, 0xf7, 0xdc, 0xc3, 0x9e, 0xc1, + 0xfb, 0x3e, 0x73, 0x5e, 0x02, 0xa8, 0xeb, 0xb0, 0x98, 0x19, 0x55, 0x02, 0x5b, 0xf9, 0x27, 0x81, + 0x8b, 0x72, 0x76, 0x47, 0x76, 0xec, 0xf8, 0x8f, 0x08, 0xe6, 0xd2, 0xda, 0x4f, 0x7c, 0x23, 0xb5, + 0x28, 0x72, 0x3a, 0xd5, 0xca, 0x28, 0x6d, 0x08, 0xd9, 0xfa, 0xc1, 0xdf, 0xff, 0xf1, 0xa4, 0xf0, + 0x1e, 0x69, 0x04, 0x1d, 0xfa, 0xf7, 0x64, 0xd6, 0x5f, 0xf2, 0x7c, 0xf6, 0x31, 0x6d, 0x0b, 0xae, + 0xd5, 0xb5, 0x0e, 0x6b, 0xcb, 0xc3, 0x4e, 0xab, 0x3f, 0xd4, 0x4e, 0x3b, 0x9d, 0xd5, 0x64, 0x6f, + 0x88, 0x9f, 0x21, 0xb8, 0x32, 0xd0, 0xb2, 0xe2, 0xb7, 0xd2, 0x0f, 0xef, 0x8c, 0xd6, 0x76, 0x34, + 0xc8, 0xb7, 0x43, 0xc8, 0x37, 0xb0, 0x84, 0x1c, 0xd4, 0x73, 0x06, 0xe0, 0x3e, 0xbc, 0x5a, 0xfd, + 0x21, 0xfe, 0x1b, 0x82, 0xb9, 0xb4, 0x86, 0x37, 0x83, 0xda, 0x9c, 0xde, 0x78, 0x34, 0x9c, 0x5f, + 0x0f, 0x71, 0x7e, 0xb8, 0xf2, 0xe5, 0x10, 0x67, 0x82, 0xaf, 0xc6, 0xc8, 0xb8, 0x07, 0xa9, 0x7e, + 0x8a, 0x60, 0x2e, 0xad, 0xdb, 0xcf, 0x48, 0x24, 0xe7, 0x62, 0x50, 0xb9, 0x3a, 0xd0, 0x47, 0x6c, + 0x06, 0xf7, 0xac, 0x88, 0xe3, 0xfa, 0xb8, 0x1c, 0x3f, 0x47, 0x30, 0x97, 0xd6, 0x36, 0x67, 0x40, + 0xcb, 0xe9, 0xea, 0x2b, 0xcb, 0x63, 0xac, 0x50, 0x3a, 0x8f, 0x57, 0xc6, 0xc8, 0xc5, 0x1c, 0xd4, + 0x6e, 0xa9, 0x5f, 0x49, 0x78, 0x69, 0x04, 0xb1, 0x49, 0x94, 0x79, 0x2f, 0x72, 0xd2, 0x0c, 0xf1, + 0xac, 0x91, 0x3b, 0xc3, 0xf1, 0xc4, 0x79, 0x54, 0xf7, 0x78, 0xbe, 0x1a, 0xf5, 0xb7, 0x2f, 0x10, + 0x14, 0x7b, 0xaa, 0xc1, 0x9f, 0x1b, 0xa6, 0xaa, 0x11, 0xb0, 0xfd, 0x10, 0x85, 0xe0, 0xbe, 0x8f, + 0xef, 0x8c, 0xb5, 0xc5, 0x11, 0x32, 0xad, 0xfe, 0xf0, 0x60, 0x1d, 0xbf, 0x37, 0xde, 0x5a, 0xcb, + 0x67, 0x5d, 0x2f, 0xee, 0x04, 0xff, 0x17, 0x41, 0xa9, 0x5f, 0x6a, 0x19, 0xdc, 0xa7, 0xdc, 0x14, + 0xf2, 0xf3, 0xfb, 0x85, 0xcc, 0xef, 0xe7, 0x68, 0xe5, 0xdd, 0x3e, 0xfd, 0x35, 0x26, 0xc8, 0x33, + 0xda, 0x84, 0x03, 0x7d, 0xa5, 0x39, 0x91, 0xab, 0x94, 0xb4, 0x7b, 0x1b, 0xfb, 0x18, 0x41, 0xa9, + 0x5f, 0xa0, 0x19, 0xe9, 0xa7, 0xdc, 0x4f, 0x32, 0xb5, 0xfb, 0x95, 0x30, 0xf1, 0xd5, 0xfa, 0xc4, + 0x1b, 0x8b, 0xff, 0x8d, 0x60, 0xa6, 0xaf, 0x8d, 0xc5, 0x6f, 0x0c, 0x91, 0x62, 0x4f, 0xb3, 0x4b, + 0xc3, 0x0d, 0x95, 0x54, 0x13, 0xe5, 0x37, 0x81, 0x36, 0x7a, 0xe5, 0x37, 0xfa, 0xda, 0xde, 0x4e, + 0xf4, 0x9c, 0xe0, 0x3f, 0x15, 0xe0, 0x33, 0x99, 0xad, 0x33, 0xbe, 0x95, 0x9a, 0xcf, 0xb0, 0x56, + 0xbb, 0x32, 0xbc, 0xe7, 0x24, 0x7f, 0x91, 0xf9, 0xff, 0x19, 0x11, 0x7d, 0xd2, 0x6d, 0x5a, 0x75, + 0xb2, 0x70, 0xac, 0xa2, 0xfa, 0xc1, 0xb7, 0xc9, 0xb7, 0xce, 0x2c, 0xce, 0xdc, 0x08, 0xf8, 0x77, + 0x85, 0xfe, 0xd6, 0x36, 0xde, 0x96, 0xe3, 0xb7, 0x87, 0x54, 0x42, 0xea, 0xfd, 0xa0, 0x72, 0x6b, + 0xcc, 0x55, 0xaa, 0x98, 0x7a, 0x5a, 0xc7, 0x5b, 0x93, 0x92, 0xa9, 0xb5, 0x63, 0xae, 0x0f, 0x74, + 0xbc, 0x7b, 0x66, 0xfa, 0x12, 0x3e, 0xf1, 0x8f, 0x0a, 0x70, 0x39, 0x79, 0x17, 0xc0, 0x5f, 0x18, + 0x92, 0x72, 0xec, 0x02, 0x52, 0x79, 0x6b, 0x44, 0x6b, 0x45, 0xcc, 0x4f, 0x25, 0x31, 0x3f, 0x41, + 0xf8, 0xdd, 0x89, 0x89, 0x91, 0x57, 0x93, 0x83, 0xaf, 0xe2, 0xe6, 0xd9, 0x09, 0x91, 0xbe, 0xf0, + 0xbf, 0x10, 0x94, 0xf6, 0xa8, 0xb8, 0x6b, 0x38, 0xbb, 0xe1, 0x37, 0x61, 0x4c, 0xa2, 0xb4, 0x6c, + 0xc3, 0x09, 0x12, 0xea, 0x9f, 0x8c, 0x52, 0x7f, 0x35, 0x61, 0x23, 0x67, 0xc9, 0xcf, 0x64, 0x8a, + 0x4f, 0x10, 0x91, 0x29, 0x46, 0x1f, 0xa9, 0x47, 0xeb, 0xad, 0x78, 0x5f, 0xa0, 0x40, 0x35, 0x3b, + 0xa4, 0x39, 0xb6, 0x97, 0xd3, 0x53, 0x25, 0xe9, 0x2e, 0x4c, 0x74, 0x2b, 0x2f, 0xd1, 0xad, 0x97, + 0x95, 0xa8, 0xf5, 0xff, 0x4d, 0x34, 0xe1, 0x0e, 0x3f, 0x2e, 0x00, 0xde, 0xa7, 0x3c, 0x1c, 0xa4, + 0xbe, 0x63, 0x73, 0x59, 0xf1, 0x4b, 0x89, 0x54, 0x06, 0x4d, 0xa2, 0xa4, 0xdf, 0x1c, 0xc1, 0x52, + 0x15, 0xf5, 0xaf, 0x24, 0x11, 0xcf, 0x10, 0xd9, 0x9c, 0x80, 0x08, 0x31, 0xe0, 0x38, 0xa0, 0x63, + 0x8f, 0x7c, 0x70, 0x16, 0x3a, 0x52, 0x9d, 0xe2, 0xdf, 0x17, 0x60, 0x36, 0xe5, 0x9e, 0x8c, 0xb5, + 0x54, 0x11, 0x67, 0x5f, 0xe1, 0x2b, 0x37, 0x46, 0x5f, 0xa0, 0x38, 0xfa, 0x83, 0xe4, 0xe8, 0xb7, + 0x88, 0x6c, 0x4f, 0xfc, 0x7a, 0xe1, 0x83, 0xfe, 0x03, 0xaa, 0x0e, 0xc8, 0xbd, 0xb3, 0xbf, 0x58, + 0xd2, 0x7d, 0xe3, 0x67, 0x05, 0x98, 0x4d, 0xb9, 0xc8, 0x67, 0x30, 0x96, 0xfd, 0x91, 0x21, 0x83, + 0xb1, 0xbc, 0x6f, 0x04, 0xbd, 0x77, 0x08, 0xd9, 0x18, 0xb7, 0x23, 0x59, 0x3d, 0x1c, 0xf4, 0x1b, + 0x30, 0xa5, 0x93, 0x9d, 0x89, 0x1b, 0x94, 0x0c, 0x9f, 0xf8, 0x79, 0x01, 0xe6, 0x33, 0xbe, 0x2a, + 0xe0, 0x9b, 0xe9, 0x9d, 0x73, 0xee, 0x97, 0x8f, 0xca, 0xdb, 0xe3, 0x2d, 0x52, 0x6c, 0xfd, 0x46, + 0xb2, 0xf5, 0x4b, 0xa4, 0x8e, 0x91, 0x71, 0xd8, 0xea, 0xa6, 0xfb, 0x0e, 0x18, 0xfb, 0x88, 0x7c, + 0x38, 0x39, 0x63, 0xd9, 0x7e, 0xd7, 0x3e, 0x86, 0xf9, 0x36, 0x73, 0xd2, 0x32, 0x5d, 0xc3, 0xb1, + 0xef, 0x2f, 0xbb, 0x41, 0x2b, 0xbd, 0x8b, 0x0e, 0x6e, 0x2b, 0x53, 0x8b, 0x75, 0x0c, 0xd7, 0x6a, + 0x30, 0xdf, 0xd2, 0x2c, 0xea, 0x86, 0x8d, 0xb6, 0x26, 0xa7, 0x0c, 0xcf, 0xe6, 0xb1, 0xbf, 0x47, + 0xdf, 0xb1, 0x99, 0xf8, 0x0f, 0x42, 0x87, 0xd3, 0xa1, 0xd5, 0xcd, 0xff, 0x05, 0x00, 0x00, 0xff, + 0xff, 0x7f, 0x49, 0x56, 0x46, 0xa0, 0x1d, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/cloud/iot/v1/resources.pb.go b/vendor/google.golang.org/genproto/googleapis/cloud/iot/v1/resources.pb.go new file mode 100644 index 0000000..584e614 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/cloud/iot/v1/resources.pb.go @@ -0,0 +1,1586 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/cloud/iot/v1/resources.proto + +package iot // import "google.golang.org/genproto/googleapis/cloud/iot/v1" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import timestamp "github.com/golang/protobuf/ptypes/timestamp" +import _ "google.golang.org/genproto/googleapis/api/annotations" +import status "google.golang.org/genproto/googleapis/rpc/status" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Indicates whether an MQTT connection is enabled or disabled. See the field +// description for details. +type MqttState int32 + +const ( + // No MQTT state specified. If not specified, MQTT will be enabled by default. + MqttState_MQTT_STATE_UNSPECIFIED MqttState = 0 + // Enables a MQTT connection. + MqttState_MQTT_ENABLED MqttState = 1 + // Disables a MQTT connection. + MqttState_MQTT_DISABLED MqttState = 2 +) + +var MqttState_name = map[int32]string{ + 0: "MQTT_STATE_UNSPECIFIED", + 1: "MQTT_ENABLED", + 2: "MQTT_DISABLED", +} +var MqttState_value = map[string]int32{ + "MQTT_STATE_UNSPECIFIED": 0, + "MQTT_ENABLED": 1, + "MQTT_DISABLED": 2, +} + +func (x MqttState) String() string { + return proto.EnumName(MqttState_name, int32(x)) +} +func (MqttState) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_resources_84fa34c15d3e060d, []int{0} +} + +// Indicates whether DeviceService (HTTP) is enabled or disabled for the +// registry. See the field description for details. +type HttpState int32 + +const ( + // No HTTP state specified. If not specified, DeviceService will be + // enabled by default. + HttpState_HTTP_STATE_UNSPECIFIED HttpState = 0 + // Enables DeviceService (HTTP) service for the registry. + HttpState_HTTP_ENABLED HttpState = 1 + // Disables DeviceService (HTTP) service for the registry. + HttpState_HTTP_DISABLED HttpState = 2 +) + +var HttpState_name = map[int32]string{ + 0: "HTTP_STATE_UNSPECIFIED", + 1: "HTTP_ENABLED", + 2: "HTTP_DISABLED", +} +var HttpState_value = map[string]int32{ + "HTTP_STATE_UNSPECIFIED": 0, + "HTTP_ENABLED": 1, + "HTTP_DISABLED": 2, +} + +func (x HttpState) String() string { + return proto.EnumName(HttpState_name, int32(x)) +} +func (HttpState) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_resources_84fa34c15d3e060d, []int{1} +} + +// **Beta Feature** +// +// The logging verbosity for device activity. Specifies which events should be +// written to logs. For example, if the LogLevel is ERROR, only events that +// terminate in errors will be logged. LogLevel is inclusive; enabling INFO +// logging will also enable ERROR logging. +type LogLevel int32 + +const ( + // No logging specified. If not specified, logging will be disabled. + LogLevel_LOG_LEVEL_UNSPECIFIED LogLevel = 0 + // Disables logging. + LogLevel_NONE LogLevel = 10 + // Error events will be logged. + LogLevel_ERROR LogLevel = 20 + // Informational events will be logged, such as connections and + // disconnections. + LogLevel_INFO LogLevel = 30 + // All events will be logged. + LogLevel_DEBUG LogLevel = 40 +) + +var LogLevel_name = map[int32]string{ + 0: "LOG_LEVEL_UNSPECIFIED", + 10: "NONE", + 20: "ERROR", + 30: "INFO", + 40: "DEBUG", +} +var LogLevel_value = map[string]int32{ + "LOG_LEVEL_UNSPECIFIED": 0, + "NONE": 10, + "ERROR": 20, + "INFO": 30, + "DEBUG": 40, +} + +func (x LogLevel) String() string { + return proto.EnumName(LogLevel_name, int32(x)) +} +func (LogLevel) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_resources_84fa34c15d3e060d, []int{2} +} + +// Gateway type. +type GatewayType int32 + +const ( + // If unspecified, the device is considered a non-gateway device. + GatewayType_GATEWAY_TYPE_UNSPECIFIED GatewayType = 0 + // The device is a gateway. + GatewayType_GATEWAY GatewayType = 1 + // The device is not a gateway. + GatewayType_NON_GATEWAY GatewayType = 2 +) + +var GatewayType_name = map[int32]string{ + 0: "GATEWAY_TYPE_UNSPECIFIED", + 1: "GATEWAY", + 2: "NON_GATEWAY", +} +var GatewayType_value = map[string]int32{ + "GATEWAY_TYPE_UNSPECIFIED": 0, + "GATEWAY": 1, + "NON_GATEWAY": 2, +} + +func (x GatewayType) String() string { + return proto.EnumName(GatewayType_name, int32(x)) +} +func (GatewayType) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_resources_84fa34c15d3e060d, []int{3} +} + +// The gateway authorization/authentication method. This setting determines how +// Cloud IoT Core authorizes/authenticate devices to access the gateway. +type GatewayAuthMethod int32 + +const ( + // No authentication/authorization method specified. No devices are allowed to + // access the gateway. + GatewayAuthMethod_GATEWAY_AUTH_METHOD_UNSPECIFIED GatewayAuthMethod = 0 + // The device is authenticated through the gateway association only. Device + // credentials are ignored even if provided. + GatewayAuthMethod_ASSOCIATION_ONLY GatewayAuthMethod = 1 + // The device is authenticated through its own credentials. Gateway + // association is not checked. + GatewayAuthMethod_DEVICE_AUTH_TOKEN_ONLY GatewayAuthMethod = 2 + // The device is authenticated through both device credentials and gateway + // association. The device must be bound to the gateway and must provide its + // own credentials. + GatewayAuthMethod_ASSOCIATION_AND_DEVICE_AUTH_TOKEN GatewayAuthMethod = 3 +) + +var GatewayAuthMethod_name = map[int32]string{ + 0: "GATEWAY_AUTH_METHOD_UNSPECIFIED", + 1: "ASSOCIATION_ONLY", + 2: "DEVICE_AUTH_TOKEN_ONLY", + 3: "ASSOCIATION_AND_DEVICE_AUTH_TOKEN", +} +var GatewayAuthMethod_value = map[string]int32{ + "GATEWAY_AUTH_METHOD_UNSPECIFIED": 0, + "ASSOCIATION_ONLY": 1, + "DEVICE_AUTH_TOKEN_ONLY": 2, + "ASSOCIATION_AND_DEVICE_AUTH_TOKEN": 3, +} + +func (x GatewayAuthMethod) String() string { + return proto.EnumName(GatewayAuthMethod_name, int32(x)) +} +func (GatewayAuthMethod) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_resources_84fa34c15d3e060d, []int{4} +} + +// The supported formats for the public key. +type PublicKeyCertificateFormat int32 + +const ( + // The format has not been specified. This is an invalid default value and + // must not be used. + PublicKeyCertificateFormat_UNSPECIFIED_PUBLIC_KEY_CERTIFICATE_FORMAT PublicKeyCertificateFormat = 0 + // An X.509v3 certificate ([RFC5280](https://www.ietf.org/rfc/rfc5280.txt)), + // encoded in base64, and wrapped by `-----BEGIN CERTIFICATE-----` and + // `-----END CERTIFICATE-----`. + PublicKeyCertificateFormat_X509_CERTIFICATE_PEM PublicKeyCertificateFormat = 1 +) + +var PublicKeyCertificateFormat_name = map[int32]string{ + 0: "UNSPECIFIED_PUBLIC_KEY_CERTIFICATE_FORMAT", + 1: "X509_CERTIFICATE_PEM", +} +var PublicKeyCertificateFormat_value = map[string]int32{ + "UNSPECIFIED_PUBLIC_KEY_CERTIFICATE_FORMAT": 0, + "X509_CERTIFICATE_PEM": 1, +} + +func (x PublicKeyCertificateFormat) String() string { + return proto.EnumName(PublicKeyCertificateFormat_name, int32(x)) +} +func (PublicKeyCertificateFormat) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_resources_84fa34c15d3e060d, []int{5} +} + +// The supported formats for the public key. +type PublicKeyFormat int32 + +const ( + // The format has not been specified. This is an invalid default value and + // must not be used. + PublicKeyFormat_UNSPECIFIED_PUBLIC_KEY_FORMAT PublicKeyFormat = 0 + // An RSA public key encoded in base64, and wrapped by + // `-----BEGIN PUBLIC KEY-----` and `-----END PUBLIC KEY-----`. This can be + // used to verify `RS256` signatures in JWT tokens ([RFC7518]( + // https://www.ietf.org/rfc/rfc7518.txt)). + PublicKeyFormat_RSA_PEM PublicKeyFormat = 3 + // As RSA_PEM, but wrapped in an X.509v3 certificate ([RFC5280]( + // https://www.ietf.org/rfc/rfc5280.txt)), encoded in base64, and wrapped by + // `-----BEGIN CERTIFICATE-----` and `-----END CERTIFICATE-----`. + PublicKeyFormat_RSA_X509_PEM PublicKeyFormat = 1 + // Public key for the ECDSA algorithm using P-256 and SHA-256, encoded in + // base64, and wrapped by `-----BEGIN PUBLIC KEY-----` and `-----END + // PUBLIC KEY-----`. This can be used to verify JWT tokens with the `ES256` + // algorithm ([RFC7518](https://www.ietf.org/rfc/rfc7518.txt)). This curve is + // defined in [OpenSSL](https://www.openssl.org/) as the `prime256v1` curve. + PublicKeyFormat_ES256_PEM PublicKeyFormat = 2 + // As ES256_PEM, but wrapped in an X.509v3 certificate ([RFC5280]( + // https://www.ietf.org/rfc/rfc5280.txt)), encoded in base64, and wrapped by + // `-----BEGIN CERTIFICATE-----` and `-----END CERTIFICATE-----`. + PublicKeyFormat_ES256_X509_PEM PublicKeyFormat = 4 +) + +var PublicKeyFormat_name = map[int32]string{ + 0: "UNSPECIFIED_PUBLIC_KEY_FORMAT", + 3: "RSA_PEM", + 1: "RSA_X509_PEM", + 2: "ES256_PEM", + 4: "ES256_X509_PEM", +} +var PublicKeyFormat_value = map[string]int32{ + "UNSPECIFIED_PUBLIC_KEY_FORMAT": 0, + "RSA_PEM": 3, + "RSA_X509_PEM": 1, + "ES256_PEM": 2, + "ES256_X509_PEM": 4, +} + +func (x PublicKeyFormat) String() string { + return proto.EnumName(PublicKeyFormat_name, int32(x)) +} +func (PublicKeyFormat) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_resources_84fa34c15d3e060d, []int{6} +} + +// The device resource. +type Device struct { + // The user-defined device identifier. The device ID must be unique + // within a device registry. + Id string `protobuf:"bytes,1,opt,name=id,proto3" json:"id,omitempty"` + // The resource path name. For example, + // `projects/p1/locations/us-central1/registries/registry0/devices/dev0` or + // `projects/p1/locations/us-central1/registries/registry0/devices/{num_id}`. + // When `name` is populated as a response from the service, it always ends + // in the device numeric ID. + Name string `protobuf:"bytes,2,opt,name=name,proto3" json:"name,omitempty"` + // [Output only] A server-defined unique numeric ID for the device. This is a + // more compact way to identify devices, and it is globally unique. + NumId uint64 `protobuf:"varint,3,opt,name=num_id,json=numId,proto3" json:"num_id,omitempty"` + // The credentials used to authenticate this device. To allow credential + // rotation without interruption, multiple device credentials can be bound to + // this device. No more than 3 credentials can be bound to a single device at + // a time. When new credentials are added to a device, they are verified + // against the registry credentials. For details, see the description of the + // `DeviceRegistry.credentials` field. + Credentials []*DeviceCredential `protobuf:"bytes,12,rep,name=credentials,proto3" json:"credentials,omitempty"` + // [Output only] The last time an MQTT `PINGREQ` was received. This field + // applies only to devices connecting through MQTT. MQTT clients usually only + // send `PINGREQ` messages if the connection is idle, and no other messages + // have been sent. Timestamps are periodically collected and written to + // storage; they may be stale by a few minutes. + LastHeartbeatTime *timestamp.Timestamp `protobuf:"bytes,7,opt,name=last_heartbeat_time,json=lastHeartbeatTime,proto3" json:"last_heartbeat_time,omitempty"` + // [Output only] The last time a telemetry event was received. Timestamps are + // periodically collected and written to storage; they may be stale by a few + // minutes. + LastEventTime *timestamp.Timestamp `protobuf:"bytes,8,opt,name=last_event_time,json=lastEventTime,proto3" json:"last_event_time,omitempty"` + // [Output only] The last time a state event was received. Timestamps are + // periodically collected and written to storage; they may be stale by a few + // minutes. + LastStateTime *timestamp.Timestamp `protobuf:"bytes,20,opt,name=last_state_time,json=lastStateTime,proto3" json:"last_state_time,omitempty"` + // [Output only] The last time a cloud-to-device config version acknowledgment + // was received from the device. This field is only for configurations + // sent through MQTT. + LastConfigAckTime *timestamp.Timestamp `protobuf:"bytes,14,opt,name=last_config_ack_time,json=lastConfigAckTime,proto3" json:"last_config_ack_time,omitempty"` + // [Output only] The last time a cloud-to-device config version was sent to + // the device. + LastConfigSendTime *timestamp.Timestamp `protobuf:"bytes,18,opt,name=last_config_send_time,json=lastConfigSendTime,proto3" json:"last_config_send_time,omitempty"` + // If a device is blocked, connections or requests from this device will fail. + // Can be used to temporarily prevent the device from connecting if, for + // example, the sensor is generating bad data and needs maintenance. + Blocked bool `protobuf:"varint,19,opt,name=blocked,proto3" json:"blocked,omitempty"` + // [Output only] The time the most recent error occurred, such as a failure to + // publish to Cloud Pub/Sub. This field is the timestamp of + // 'last_error_status'. + LastErrorTime *timestamp.Timestamp `protobuf:"bytes,10,opt,name=last_error_time,json=lastErrorTime,proto3" json:"last_error_time,omitempty"` + // [Output only] The error message of the most recent error, such as a failure + // to publish to Cloud Pub/Sub. 'last_error_time' is the timestamp of this + // field. If no errors have occurred, this field has an empty message + // and the status code 0 == OK. Otherwise, this field is expected to have a + // status code other than OK. + LastErrorStatus *status.Status `protobuf:"bytes,11,opt,name=last_error_status,json=lastErrorStatus,proto3" json:"last_error_status,omitempty"` + // The most recent device configuration, which is eventually sent from + // Cloud IoT Core to the device. If not present on creation, the + // configuration will be initialized with an empty payload and version value + // of `1`. To update this field after creation, use the + // `DeviceManager.ModifyCloudToDeviceConfig` method. + Config *DeviceConfig `protobuf:"bytes,13,opt,name=config,proto3" json:"config,omitempty"` + // [Output only] The state most recently received from the device. If no state + // has been reported, this field is not present. + State *DeviceState `protobuf:"bytes,16,opt,name=state,proto3" json:"state,omitempty"` + // **Beta Feature** + // + // The logging verbosity for device activity. If unspecified, + // DeviceRegistry.log_level will be used. + LogLevel LogLevel `protobuf:"varint,21,opt,name=log_level,json=logLevel,proto3,enum=google.cloud.iot.v1.LogLevel" json:"log_level,omitempty"` + // The metadata key-value pairs assigned to the device. This metadata is not + // interpreted or indexed by Cloud IoT Core. It can be used to add contextual + // information for the device. + // + // Keys must conform to the regular expression [a-zA-Z][a-zA-Z0-9-_.+~%]+ and + // be less than 128 bytes in length. + // + // Values are free-form strings. Each value must be less than or equal to 32 + // KB in size. + // + // The total size of all keys and values must be less than 256 KB, and the + // maximum number of key-value pairs is 500. + Metadata map[string]string `protobuf:"bytes,17,rep,name=metadata,proto3" json:"metadata,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` + // Gateway-related configuration and state. + GatewayConfig *GatewayConfig `protobuf:"bytes,24,opt,name=gateway_config,json=gatewayConfig,proto3" json:"gateway_config,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Device) Reset() { *m = Device{} } +func (m *Device) String() string { return proto.CompactTextString(m) } +func (*Device) ProtoMessage() {} +func (*Device) Descriptor() ([]byte, []int) { + return fileDescriptor_resources_84fa34c15d3e060d, []int{0} +} +func (m *Device) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Device.Unmarshal(m, b) +} +func (m *Device) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Device.Marshal(b, m, deterministic) +} +func (dst *Device) XXX_Merge(src proto.Message) { + xxx_messageInfo_Device.Merge(dst, src) +} +func (m *Device) XXX_Size() int { + return xxx_messageInfo_Device.Size(m) +} +func (m *Device) XXX_DiscardUnknown() { + xxx_messageInfo_Device.DiscardUnknown(m) +} + +var xxx_messageInfo_Device proto.InternalMessageInfo + +func (m *Device) GetId() string { + if m != nil { + return m.Id + } + return "" +} + +func (m *Device) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *Device) GetNumId() uint64 { + if m != nil { + return m.NumId + } + return 0 +} + +func (m *Device) GetCredentials() []*DeviceCredential { + if m != nil { + return m.Credentials + } + return nil +} + +func (m *Device) GetLastHeartbeatTime() *timestamp.Timestamp { + if m != nil { + return m.LastHeartbeatTime + } + return nil +} + +func (m *Device) GetLastEventTime() *timestamp.Timestamp { + if m != nil { + return m.LastEventTime + } + return nil +} + +func (m *Device) GetLastStateTime() *timestamp.Timestamp { + if m != nil { + return m.LastStateTime + } + return nil +} + +func (m *Device) GetLastConfigAckTime() *timestamp.Timestamp { + if m != nil { + return m.LastConfigAckTime + } + return nil +} + +func (m *Device) GetLastConfigSendTime() *timestamp.Timestamp { + if m != nil { + return m.LastConfigSendTime + } + return nil +} + +func (m *Device) GetBlocked() bool { + if m != nil { + return m.Blocked + } + return false +} + +func (m *Device) GetLastErrorTime() *timestamp.Timestamp { + if m != nil { + return m.LastErrorTime + } + return nil +} + +func (m *Device) GetLastErrorStatus() *status.Status { + if m != nil { + return m.LastErrorStatus + } + return nil +} + +func (m *Device) GetConfig() *DeviceConfig { + if m != nil { + return m.Config + } + return nil +} + +func (m *Device) GetState() *DeviceState { + if m != nil { + return m.State + } + return nil +} + +func (m *Device) GetLogLevel() LogLevel { + if m != nil { + return m.LogLevel + } + return LogLevel_LOG_LEVEL_UNSPECIFIED +} + +func (m *Device) GetMetadata() map[string]string { + if m != nil { + return m.Metadata + } + return nil +} + +func (m *Device) GetGatewayConfig() *GatewayConfig { + if m != nil { + return m.GatewayConfig + } + return nil +} + +// Gateway-related configuration and state. +type GatewayConfig struct { + // Indicates whether the device is a gateway. + GatewayType GatewayType `protobuf:"varint,1,opt,name=gateway_type,json=gatewayType,proto3,enum=google.cloud.iot.v1.GatewayType" json:"gateway_type,omitempty"` + // Indicates how to authorize and/or authenticate devices to access the + // gateway. + GatewayAuthMethod GatewayAuthMethod `protobuf:"varint,2,opt,name=gateway_auth_method,json=gatewayAuthMethod,proto3,enum=google.cloud.iot.v1.GatewayAuthMethod" json:"gateway_auth_method,omitempty"` + // [Output only] The ID of the gateway the device accessed most recently. + LastAccessedGatewayId string `protobuf:"bytes,3,opt,name=last_accessed_gateway_id,json=lastAccessedGatewayId,proto3" json:"last_accessed_gateway_id,omitempty"` + // [Output only] The most recent time at which the device accessed the gateway + // specified in `last_accessed_gateway`. + LastAccessedGatewayTime *timestamp.Timestamp `protobuf:"bytes,4,opt,name=last_accessed_gateway_time,json=lastAccessedGatewayTime,proto3" json:"last_accessed_gateway_time,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GatewayConfig) Reset() { *m = GatewayConfig{} } +func (m *GatewayConfig) String() string { return proto.CompactTextString(m) } +func (*GatewayConfig) ProtoMessage() {} +func (*GatewayConfig) Descriptor() ([]byte, []int) { + return fileDescriptor_resources_84fa34c15d3e060d, []int{1} +} +func (m *GatewayConfig) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GatewayConfig.Unmarshal(m, b) +} +func (m *GatewayConfig) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GatewayConfig.Marshal(b, m, deterministic) +} +func (dst *GatewayConfig) XXX_Merge(src proto.Message) { + xxx_messageInfo_GatewayConfig.Merge(dst, src) +} +func (m *GatewayConfig) XXX_Size() int { + return xxx_messageInfo_GatewayConfig.Size(m) +} +func (m *GatewayConfig) XXX_DiscardUnknown() { + xxx_messageInfo_GatewayConfig.DiscardUnknown(m) +} + +var xxx_messageInfo_GatewayConfig proto.InternalMessageInfo + +func (m *GatewayConfig) GetGatewayType() GatewayType { + if m != nil { + return m.GatewayType + } + return GatewayType_GATEWAY_TYPE_UNSPECIFIED +} + +func (m *GatewayConfig) GetGatewayAuthMethod() GatewayAuthMethod { + if m != nil { + return m.GatewayAuthMethod + } + return GatewayAuthMethod_GATEWAY_AUTH_METHOD_UNSPECIFIED +} + +func (m *GatewayConfig) GetLastAccessedGatewayId() string { + if m != nil { + return m.LastAccessedGatewayId + } + return "" +} + +func (m *GatewayConfig) GetLastAccessedGatewayTime() *timestamp.Timestamp { + if m != nil { + return m.LastAccessedGatewayTime + } + return nil +} + +// A container for a group of devices. +type DeviceRegistry struct { + // The identifier of this device registry. For example, `myRegistry`. + Id string `protobuf:"bytes,1,opt,name=id,proto3" json:"id,omitempty"` + // The resource path name. For example, + // `projects/example-project/locations/us-central1/registries/my-registry`. + Name string `protobuf:"bytes,2,opt,name=name,proto3" json:"name,omitempty"` + // The configuration for notification of telemetry events received from the + // device. All telemetry events that were successfully published by the + // device and acknowledged by Cloud IoT Core are guaranteed to be + // delivered to Cloud Pub/Sub. If multiple configurations match a message, + // only the first matching configuration is used. If you try to publish a + // device telemetry event using MQTT without specifying a Cloud Pub/Sub topic + // for the device's registry, the connection closes automatically. If you try + // to do so using an HTTP connection, an error is returned. Up to 10 + // configurations may be provided. + EventNotificationConfigs []*EventNotificationConfig `protobuf:"bytes,10,rep,name=event_notification_configs,json=eventNotificationConfigs,proto3" json:"event_notification_configs,omitempty"` + // The configuration for notification of new states received from the device. + // State updates are guaranteed to be stored in the state history, but + // notifications to Cloud Pub/Sub are not guaranteed. For example, if + // permissions are misconfigured or the specified topic doesn't exist, no + // notification will be published but the state will still be stored in Cloud + // IoT Core. + StateNotificationConfig *StateNotificationConfig `protobuf:"bytes,7,opt,name=state_notification_config,json=stateNotificationConfig,proto3" json:"state_notification_config,omitempty"` + // The MQTT configuration for this device registry. + MqttConfig *MqttConfig `protobuf:"bytes,4,opt,name=mqtt_config,json=mqttConfig,proto3" json:"mqtt_config,omitempty"` + // The DeviceService (HTTP) configuration for this device registry. + HttpConfig *HttpConfig `protobuf:"bytes,9,opt,name=http_config,json=httpConfig,proto3" json:"http_config,omitempty"` + // **Beta Feature** + // + // The default logging verbosity for activity from devices in this registry. + // The verbosity level can be overridden by Device.log_level. + LogLevel LogLevel `protobuf:"varint,11,opt,name=log_level,json=logLevel,proto3,enum=google.cloud.iot.v1.LogLevel" json:"log_level,omitempty"` + // The credentials used to verify the device credentials. No more than 10 + // credentials can be bound to a single registry at a time. The verification + // process occurs at the time of device creation or update. If this field is + // empty, no verification is performed. Otherwise, the credentials of a newly + // created device or added credentials of an updated device should be signed + // with one of these registry credentials. + // + // Note, however, that existing devices will never be affected by + // modifications to this list of credentials: after a device has been + // successfully created in a registry, it should be able to connect even if + // its registry credentials are revoked, deleted, or modified. + Credentials []*RegistryCredential `protobuf:"bytes,8,rep,name=credentials,proto3" json:"credentials,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *DeviceRegistry) Reset() { *m = DeviceRegistry{} } +func (m *DeviceRegistry) String() string { return proto.CompactTextString(m) } +func (*DeviceRegistry) ProtoMessage() {} +func (*DeviceRegistry) Descriptor() ([]byte, []int) { + return fileDescriptor_resources_84fa34c15d3e060d, []int{2} +} +func (m *DeviceRegistry) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_DeviceRegistry.Unmarshal(m, b) +} +func (m *DeviceRegistry) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_DeviceRegistry.Marshal(b, m, deterministic) +} +func (dst *DeviceRegistry) XXX_Merge(src proto.Message) { + xxx_messageInfo_DeviceRegistry.Merge(dst, src) +} +func (m *DeviceRegistry) XXX_Size() int { + return xxx_messageInfo_DeviceRegistry.Size(m) +} +func (m *DeviceRegistry) XXX_DiscardUnknown() { + xxx_messageInfo_DeviceRegistry.DiscardUnknown(m) +} + +var xxx_messageInfo_DeviceRegistry proto.InternalMessageInfo + +func (m *DeviceRegistry) GetId() string { + if m != nil { + return m.Id + } + return "" +} + +func (m *DeviceRegistry) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *DeviceRegistry) GetEventNotificationConfigs() []*EventNotificationConfig { + if m != nil { + return m.EventNotificationConfigs + } + return nil +} + +func (m *DeviceRegistry) GetStateNotificationConfig() *StateNotificationConfig { + if m != nil { + return m.StateNotificationConfig + } + return nil +} + +func (m *DeviceRegistry) GetMqttConfig() *MqttConfig { + if m != nil { + return m.MqttConfig + } + return nil +} + +func (m *DeviceRegistry) GetHttpConfig() *HttpConfig { + if m != nil { + return m.HttpConfig + } + return nil +} + +func (m *DeviceRegistry) GetLogLevel() LogLevel { + if m != nil { + return m.LogLevel + } + return LogLevel_LOG_LEVEL_UNSPECIFIED +} + +func (m *DeviceRegistry) GetCredentials() []*RegistryCredential { + if m != nil { + return m.Credentials + } + return nil +} + +// The configuration of MQTT for a device registry. +type MqttConfig struct { + // If enabled, allows connections using the MQTT protocol. Otherwise, MQTT + // connections to this registry will fail. + MqttEnabledState MqttState `protobuf:"varint,1,opt,name=mqtt_enabled_state,json=mqttEnabledState,proto3,enum=google.cloud.iot.v1.MqttState" json:"mqtt_enabled_state,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *MqttConfig) Reset() { *m = MqttConfig{} } +func (m *MqttConfig) String() string { return proto.CompactTextString(m) } +func (*MqttConfig) ProtoMessage() {} +func (*MqttConfig) Descriptor() ([]byte, []int) { + return fileDescriptor_resources_84fa34c15d3e060d, []int{3} +} +func (m *MqttConfig) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_MqttConfig.Unmarshal(m, b) +} +func (m *MqttConfig) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_MqttConfig.Marshal(b, m, deterministic) +} +func (dst *MqttConfig) XXX_Merge(src proto.Message) { + xxx_messageInfo_MqttConfig.Merge(dst, src) +} +func (m *MqttConfig) XXX_Size() int { + return xxx_messageInfo_MqttConfig.Size(m) +} +func (m *MqttConfig) XXX_DiscardUnknown() { + xxx_messageInfo_MqttConfig.DiscardUnknown(m) +} + +var xxx_messageInfo_MqttConfig proto.InternalMessageInfo + +func (m *MqttConfig) GetMqttEnabledState() MqttState { + if m != nil { + return m.MqttEnabledState + } + return MqttState_MQTT_STATE_UNSPECIFIED +} + +// The configuration of the HTTP bridge for a device registry. +type HttpConfig struct { + // If enabled, allows devices to use DeviceService via the HTTP protocol. + // Otherwise, any requests to DeviceService will fail for this registry. + HttpEnabledState HttpState `protobuf:"varint,1,opt,name=http_enabled_state,json=httpEnabledState,proto3,enum=google.cloud.iot.v1.HttpState" json:"http_enabled_state,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *HttpConfig) Reset() { *m = HttpConfig{} } +func (m *HttpConfig) String() string { return proto.CompactTextString(m) } +func (*HttpConfig) ProtoMessage() {} +func (*HttpConfig) Descriptor() ([]byte, []int) { + return fileDescriptor_resources_84fa34c15d3e060d, []int{4} +} +func (m *HttpConfig) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_HttpConfig.Unmarshal(m, b) +} +func (m *HttpConfig) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_HttpConfig.Marshal(b, m, deterministic) +} +func (dst *HttpConfig) XXX_Merge(src proto.Message) { + xxx_messageInfo_HttpConfig.Merge(dst, src) +} +func (m *HttpConfig) XXX_Size() int { + return xxx_messageInfo_HttpConfig.Size(m) +} +func (m *HttpConfig) XXX_DiscardUnknown() { + xxx_messageInfo_HttpConfig.DiscardUnknown(m) +} + +var xxx_messageInfo_HttpConfig proto.InternalMessageInfo + +func (m *HttpConfig) GetHttpEnabledState() HttpState { + if m != nil { + return m.HttpEnabledState + } + return HttpState_HTTP_STATE_UNSPECIFIED +} + +// The configuration for forwarding telemetry events. +type EventNotificationConfig struct { + // If the subfolder name matches this string exactly, this configuration will + // be used. The string must not include the leading '/' character. If empty, + // all strings are matched. This field is used only for telemetry events; + // subfolders are not supported for state changes. + SubfolderMatches string `protobuf:"bytes,2,opt,name=subfolder_matches,json=subfolderMatches,proto3" json:"subfolder_matches,omitempty"` + // A Cloud Pub/Sub topic name. For example, + // `projects/myProject/topics/deviceEvents`. + PubsubTopicName string `protobuf:"bytes,1,opt,name=pubsub_topic_name,json=pubsubTopicName,proto3" json:"pubsub_topic_name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *EventNotificationConfig) Reset() { *m = EventNotificationConfig{} } +func (m *EventNotificationConfig) String() string { return proto.CompactTextString(m) } +func (*EventNotificationConfig) ProtoMessage() {} +func (*EventNotificationConfig) Descriptor() ([]byte, []int) { + return fileDescriptor_resources_84fa34c15d3e060d, []int{5} +} +func (m *EventNotificationConfig) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_EventNotificationConfig.Unmarshal(m, b) +} +func (m *EventNotificationConfig) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_EventNotificationConfig.Marshal(b, m, deterministic) +} +func (dst *EventNotificationConfig) XXX_Merge(src proto.Message) { + xxx_messageInfo_EventNotificationConfig.Merge(dst, src) +} +func (m *EventNotificationConfig) XXX_Size() int { + return xxx_messageInfo_EventNotificationConfig.Size(m) +} +func (m *EventNotificationConfig) XXX_DiscardUnknown() { + xxx_messageInfo_EventNotificationConfig.DiscardUnknown(m) +} + +var xxx_messageInfo_EventNotificationConfig proto.InternalMessageInfo + +func (m *EventNotificationConfig) GetSubfolderMatches() string { + if m != nil { + return m.SubfolderMatches + } + return "" +} + +func (m *EventNotificationConfig) GetPubsubTopicName() string { + if m != nil { + return m.PubsubTopicName + } + return "" +} + +// The configuration for notification of new states received from the device. +type StateNotificationConfig struct { + // A Cloud Pub/Sub topic name. For example, + // `projects/myProject/topics/deviceEvents`. + PubsubTopicName string `protobuf:"bytes,1,opt,name=pubsub_topic_name,json=pubsubTopicName,proto3" json:"pubsub_topic_name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *StateNotificationConfig) Reset() { *m = StateNotificationConfig{} } +func (m *StateNotificationConfig) String() string { return proto.CompactTextString(m) } +func (*StateNotificationConfig) ProtoMessage() {} +func (*StateNotificationConfig) Descriptor() ([]byte, []int) { + return fileDescriptor_resources_84fa34c15d3e060d, []int{6} +} +func (m *StateNotificationConfig) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_StateNotificationConfig.Unmarshal(m, b) +} +func (m *StateNotificationConfig) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_StateNotificationConfig.Marshal(b, m, deterministic) +} +func (dst *StateNotificationConfig) XXX_Merge(src proto.Message) { + xxx_messageInfo_StateNotificationConfig.Merge(dst, src) +} +func (m *StateNotificationConfig) XXX_Size() int { + return xxx_messageInfo_StateNotificationConfig.Size(m) +} +func (m *StateNotificationConfig) XXX_DiscardUnknown() { + xxx_messageInfo_StateNotificationConfig.DiscardUnknown(m) +} + +var xxx_messageInfo_StateNotificationConfig proto.InternalMessageInfo + +func (m *StateNotificationConfig) GetPubsubTopicName() string { + if m != nil { + return m.PubsubTopicName + } + return "" +} + +// A server-stored registry credential used to validate device credentials. +type RegistryCredential struct { + // The credential data. Reserved for expansion in the future. + // + // Types that are valid to be assigned to Credential: + // *RegistryCredential_PublicKeyCertificate + Credential isRegistryCredential_Credential `protobuf_oneof:"credential"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *RegistryCredential) Reset() { *m = RegistryCredential{} } +func (m *RegistryCredential) String() string { return proto.CompactTextString(m) } +func (*RegistryCredential) ProtoMessage() {} +func (*RegistryCredential) Descriptor() ([]byte, []int) { + return fileDescriptor_resources_84fa34c15d3e060d, []int{7} +} +func (m *RegistryCredential) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_RegistryCredential.Unmarshal(m, b) +} +func (m *RegistryCredential) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_RegistryCredential.Marshal(b, m, deterministic) +} +func (dst *RegistryCredential) XXX_Merge(src proto.Message) { + xxx_messageInfo_RegistryCredential.Merge(dst, src) +} +func (m *RegistryCredential) XXX_Size() int { + return xxx_messageInfo_RegistryCredential.Size(m) +} +func (m *RegistryCredential) XXX_DiscardUnknown() { + xxx_messageInfo_RegistryCredential.DiscardUnknown(m) +} + +var xxx_messageInfo_RegistryCredential proto.InternalMessageInfo + +type isRegistryCredential_Credential interface { + isRegistryCredential_Credential() +} + +type RegistryCredential_PublicKeyCertificate struct { + PublicKeyCertificate *PublicKeyCertificate `protobuf:"bytes,1,opt,name=public_key_certificate,json=publicKeyCertificate,proto3,oneof"` +} + +func (*RegistryCredential_PublicKeyCertificate) isRegistryCredential_Credential() {} + +func (m *RegistryCredential) GetCredential() isRegistryCredential_Credential { + if m != nil { + return m.Credential + } + return nil +} + +func (m *RegistryCredential) GetPublicKeyCertificate() *PublicKeyCertificate { + if x, ok := m.GetCredential().(*RegistryCredential_PublicKeyCertificate); ok { + return x.PublicKeyCertificate + } + return nil +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*RegistryCredential) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _RegistryCredential_OneofMarshaler, _RegistryCredential_OneofUnmarshaler, _RegistryCredential_OneofSizer, []interface{}{ + (*RegistryCredential_PublicKeyCertificate)(nil), + } +} + +func _RegistryCredential_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*RegistryCredential) + // credential + switch x := m.Credential.(type) { + case *RegistryCredential_PublicKeyCertificate: + b.EncodeVarint(1<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.PublicKeyCertificate); err != nil { + return err + } + case nil: + default: + return fmt.Errorf("RegistryCredential.Credential has unexpected type %T", x) + } + return nil +} + +func _RegistryCredential_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*RegistryCredential) + switch tag { + case 1: // credential.public_key_certificate + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(PublicKeyCertificate) + err := b.DecodeMessage(msg) + m.Credential = &RegistryCredential_PublicKeyCertificate{msg} + return true, err + default: + return false, nil + } +} + +func _RegistryCredential_OneofSizer(msg proto.Message) (n int) { + m := msg.(*RegistryCredential) + // credential + switch x := m.Credential.(type) { + case *RegistryCredential_PublicKeyCertificate: + s := proto.Size(x.PublicKeyCertificate) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +// Details of an X.509 certificate. For informational purposes only. +type X509CertificateDetails struct { + // The entity that signed the certificate. + Issuer string `protobuf:"bytes,1,opt,name=issuer,proto3" json:"issuer,omitempty"` + // The entity the certificate and public key belong to. + Subject string `protobuf:"bytes,2,opt,name=subject,proto3" json:"subject,omitempty"` + // The time the certificate becomes valid. + StartTime *timestamp.Timestamp `protobuf:"bytes,3,opt,name=start_time,json=startTime,proto3" json:"start_time,omitempty"` + // The time the certificate becomes invalid. + ExpiryTime *timestamp.Timestamp `protobuf:"bytes,4,opt,name=expiry_time,json=expiryTime,proto3" json:"expiry_time,omitempty"` + // The algorithm used to sign the certificate. + SignatureAlgorithm string `protobuf:"bytes,5,opt,name=signature_algorithm,json=signatureAlgorithm,proto3" json:"signature_algorithm,omitempty"` + // The type of public key in the certificate. + PublicKeyType string `protobuf:"bytes,6,opt,name=public_key_type,json=publicKeyType,proto3" json:"public_key_type,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *X509CertificateDetails) Reset() { *m = X509CertificateDetails{} } +func (m *X509CertificateDetails) String() string { return proto.CompactTextString(m) } +func (*X509CertificateDetails) ProtoMessage() {} +func (*X509CertificateDetails) Descriptor() ([]byte, []int) { + return fileDescriptor_resources_84fa34c15d3e060d, []int{8} +} +func (m *X509CertificateDetails) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_X509CertificateDetails.Unmarshal(m, b) +} +func (m *X509CertificateDetails) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_X509CertificateDetails.Marshal(b, m, deterministic) +} +func (dst *X509CertificateDetails) XXX_Merge(src proto.Message) { + xxx_messageInfo_X509CertificateDetails.Merge(dst, src) +} +func (m *X509CertificateDetails) XXX_Size() int { + return xxx_messageInfo_X509CertificateDetails.Size(m) +} +func (m *X509CertificateDetails) XXX_DiscardUnknown() { + xxx_messageInfo_X509CertificateDetails.DiscardUnknown(m) +} + +var xxx_messageInfo_X509CertificateDetails proto.InternalMessageInfo + +func (m *X509CertificateDetails) GetIssuer() string { + if m != nil { + return m.Issuer + } + return "" +} + +func (m *X509CertificateDetails) GetSubject() string { + if m != nil { + return m.Subject + } + return "" +} + +func (m *X509CertificateDetails) GetStartTime() *timestamp.Timestamp { + if m != nil { + return m.StartTime + } + return nil +} + +func (m *X509CertificateDetails) GetExpiryTime() *timestamp.Timestamp { + if m != nil { + return m.ExpiryTime + } + return nil +} + +func (m *X509CertificateDetails) GetSignatureAlgorithm() string { + if m != nil { + return m.SignatureAlgorithm + } + return "" +} + +func (m *X509CertificateDetails) GetPublicKeyType() string { + if m != nil { + return m.PublicKeyType + } + return "" +} + +// A public key certificate format and data. +type PublicKeyCertificate struct { + // The certificate format. + Format PublicKeyCertificateFormat `protobuf:"varint,1,opt,name=format,proto3,enum=google.cloud.iot.v1.PublicKeyCertificateFormat" json:"format,omitempty"` + // The certificate data. + Certificate string `protobuf:"bytes,2,opt,name=certificate,proto3" json:"certificate,omitempty"` + // [Output only] The certificate details. Used only for X.509 certificates. + X509Details *X509CertificateDetails `protobuf:"bytes,3,opt,name=x509_details,json=x509Details,proto3" json:"x509_details,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *PublicKeyCertificate) Reset() { *m = PublicKeyCertificate{} } +func (m *PublicKeyCertificate) String() string { return proto.CompactTextString(m) } +func (*PublicKeyCertificate) ProtoMessage() {} +func (*PublicKeyCertificate) Descriptor() ([]byte, []int) { + return fileDescriptor_resources_84fa34c15d3e060d, []int{9} +} +func (m *PublicKeyCertificate) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_PublicKeyCertificate.Unmarshal(m, b) +} +func (m *PublicKeyCertificate) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_PublicKeyCertificate.Marshal(b, m, deterministic) +} +func (dst *PublicKeyCertificate) XXX_Merge(src proto.Message) { + xxx_messageInfo_PublicKeyCertificate.Merge(dst, src) +} +func (m *PublicKeyCertificate) XXX_Size() int { + return xxx_messageInfo_PublicKeyCertificate.Size(m) +} +func (m *PublicKeyCertificate) XXX_DiscardUnknown() { + xxx_messageInfo_PublicKeyCertificate.DiscardUnknown(m) +} + +var xxx_messageInfo_PublicKeyCertificate proto.InternalMessageInfo + +func (m *PublicKeyCertificate) GetFormat() PublicKeyCertificateFormat { + if m != nil { + return m.Format + } + return PublicKeyCertificateFormat_UNSPECIFIED_PUBLIC_KEY_CERTIFICATE_FORMAT +} + +func (m *PublicKeyCertificate) GetCertificate() string { + if m != nil { + return m.Certificate + } + return "" +} + +func (m *PublicKeyCertificate) GetX509Details() *X509CertificateDetails { + if m != nil { + return m.X509Details + } + return nil +} + +// A server-stored device credential used for authentication. +type DeviceCredential struct { + // The credential data. Reserved for expansion in the future. + // + // Types that are valid to be assigned to Credential: + // *DeviceCredential_PublicKey + Credential isDeviceCredential_Credential `protobuf_oneof:"credential"` + // [Optional] The time at which this credential becomes invalid. This + // credential will be ignored for new client authentication requests after + // this timestamp; however, it will not be automatically deleted. + ExpirationTime *timestamp.Timestamp `protobuf:"bytes,6,opt,name=expiration_time,json=expirationTime,proto3" json:"expiration_time,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *DeviceCredential) Reset() { *m = DeviceCredential{} } +func (m *DeviceCredential) String() string { return proto.CompactTextString(m) } +func (*DeviceCredential) ProtoMessage() {} +func (*DeviceCredential) Descriptor() ([]byte, []int) { + return fileDescriptor_resources_84fa34c15d3e060d, []int{10} +} +func (m *DeviceCredential) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_DeviceCredential.Unmarshal(m, b) +} +func (m *DeviceCredential) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_DeviceCredential.Marshal(b, m, deterministic) +} +func (dst *DeviceCredential) XXX_Merge(src proto.Message) { + xxx_messageInfo_DeviceCredential.Merge(dst, src) +} +func (m *DeviceCredential) XXX_Size() int { + return xxx_messageInfo_DeviceCredential.Size(m) +} +func (m *DeviceCredential) XXX_DiscardUnknown() { + xxx_messageInfo_DeviceCredential.DiscardUnknown(m) +} + +var xxx_messageInfo_DeviceCredential proto.InternalMessageInfo + +type isDeviceCredential_Credential interface { + isDeviceCredential_Credential() +} + +type DeviceCredential_PublicKey struct { + PublicKey *PublicKeyCredential `protobuf:"bytes,2,opt,name=public_key,json=publicKey,proto3,oneof"` +} + +func (*DeviceCredential_PublicKey) isDeviceCredential_Credential() {} + +func (m *DeviceCredential) GetCredential() isDeviceCredential_Credential { + if m != nil { + return m.Credential + } + return nil +} + +func (m *DeviceCredential) GetPublicKey() *PublicKeyCredential { + if x, ok := m.GetCredential().(*DeviceCredential_PublicKey); ok { + return x.PublicKey + } + return nil +} + +func (m *DeviceCredential) GetExpirationTime() *timestamp.Timestamp { + if m != nil { + return m.ExpirationTime + } + return nil +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*DeviceCredential) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _DeviceCredential_OneofMarshaler, _DeviceCredential_OneofUnmarshaler, _DeviceCredential_OneofSizer, []interface{}{ + (*DeviceCredential_PublicKey)(nil), + } +} + +func _DeviceCredential_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*DeviceCredential) + // credential + switch x := m.Credential.(type) { + case *DeviceCredential_PublicKey: + b.EncodeVarint(2<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.PublicKey); err != nil { + return err + } + case nil: + default: + return fmt.Errorf("DeviceCredential.Credential has unexpected type %T", x) + } + return nil +} + +func _DeviceCredential_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*DeviceCredential) + switch tag { + case 2: // credential.public_key + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(PublicKeyCredential) + err := b.DecodeMessage(msg) + m.Credential = &DeviceCredential_PublicKey{msg} + return true, err + default: + return false, nil + } +} + +func _DeviceCredential_OneofSizer(msg proto.Message) (n int) { + m := msg.(*DeviceCredential) + // credential + switch x := m.Credential.(type) { + case *DeviceCredential_PublicKey: + s := proto.Size(x.PublicKey) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +// A public key format and data. +type PublicKeyCredential struct { + // The format of the key. + Format PublicKeyFormat `protobuf:"varint,1,opt,name=format,proto3,enum=google.cloud.iot.v1.PublicKeyFormat" json:"format,omitempty"` + // The key data. + Key string `protobuf:"bytes,2,opt,name=key,proto3" json:"key,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *PublicKeyCredential) Reset() { *m = PublicKeyCredential{} } +func (m *PublicKeyCredential) String() string { return proto.CompactTextString(m) } +func (*PublicKeyCredential) ProtoMessage() {} +func (*PublicKeyCredential) Descriptor() ([]byte, []int) { + return fileDescriptor_resources_84fa34c15d3e060d, []int{11} +} +func (m *PublicKeyCredential) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_PublicKeyCredential.Unmarshal(m, b) +} +func (m *PublicKeyCredential) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_PublicKeyCredential.Marshal(b, m, deterministic) +} +func (dst *PublicKeyCredential) XXX_Merge(src proto.Message) { + xxx_messageInfo_PublicKeyCredential.Merge(dst, src) +} +func (m *PublicKeyCredential) XXX_Size() int { + return xxx_messageInfo_PublicKeyCredential.Size(m) +} +func (m *PublicKeyCredential) XXX_DiscardUnknown() { + xxx_messageInfo_PublicKeyCredential.DiscardUnknown(m) +} + +var xxx_messageInfo_PublicKeyCredential proto.InternalMessageInfo + +func (m *PublicKeyCredential) GetFormat() PublicKeyFormat { + if m != nil { + return m.Format + } + return PublicKeyFormat_UNSPECIFIED_PUBLIC_KEY_FORMAT +} + +func (m *PublicKeyCredential) GetKey() string { + if m != nil { + return m.Key + } + return "" +} + +// The device configuration. Eventually delivered to devices. +type DeviceConfig struct { + // [Output only] The version of this update. The version number is assigned by + // the server, and is always greater than 0 after device creation. The + // version must be 0 on the `CreateDevice` request if a `config` is + // specified; the response of `CreateDevice` will always have a value of 1. + Version int64 `protobuf:"varint,1,opt,name=version,proto3" json:"version,omitempty"` + // [Output only] The time at which this configuration version was updated in + // Cloud IoT Core. This timestamp is set by the server. + CloudUpdateTime *timestamp.Timestamp `protobuf:"bytes,2,opt,name=cloud_update_time,json=cloudUpdateTime,proto3" json:"cloud_update_time,omitempty"` + // [Output only] The time at which Cloud IoT Core received the + // acknowledgment from the device, indicating that the device has received + // this configuration version. If this field is not present, the device has + // not yet acknowledged that it received this version. Note that when + // the config was sent to the device, many config versions may have been + // available in Cloud IoT Core while the device was disconnected, and on + // connection, only the latest version is sent to the device. Some + // versions may never be sent to the device, and therefore are never + // acknowledged. This timestamp is set by Cloud IoT Core. + DeviceAckTime *timestamp.Timestamp `protobuf:"bytes,3,opt,name=device_ack_time,json=deviceAckTime,proto3" json:"device_ack_time,omitempty"` + // The device configuration data. + BinaryData []byte `protobuf:"bytes,4,opt,name=binary_data,json=binaryData,proto3" json:"binary_data,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *DeviceConfig) Reset() { *m = DeviceConfig{} } +func (m *DeviceConfig) String() string { return proto.CompactTextString(m) } +func (*DeviceConfig) ProtoMessage() {} +func (*DeviceConfig) Descriptor() ([]byte, []int) { + return fileDescriptor_resources_84fa34c15d3e060d, []int{12} +} +func (m *DeviceConfig) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_DeviceConfig.Unmarshal(m, b) +} +func (m *DeviceConfig) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_DeviceConfig.Marshal(b, m, deterministic) +} +func (dst *DeviceConfig) XXX_Merge(src proto.Message) { + xxx_messageInfo_DeviceConfig.Merge(dst, src) +} +func (m *DeviceConfig) XXX_Size() int { + return xxx_messageInfo_DeviceConfig.Size(m) +} +func (m *DeviceConfig) XXX_DiscardUnknown() { + xxx_messageInfo_DeviceConfig.DiscardUnknown(m) +} + +var xxx_messageInfo_DeviceConfig proto.InternalMessageInfo + +func (m *DeviceConfig) GetVersion() int64 { + if m != nil { + return m.Version + } + return 0 +} + +func (m *DeviceConfig) GetCloudUpdateTime() *timestamp.Timestamp { + if m != nil { + return m.CloudUpdateTime + } + return nil +} + +func (m *DeviceConfig) GetDeviceAckTime() *timestamp.Timestamp { + if m != nil { + return m.DeviceAckTime + } + return nil +} + +func (m *DeviceConfig) GetBinaryData() []byte { + if m != nil { + return m.BinaryData + } + return nil +} + +// The device state, as reported by the device. +type DeviceState struct { + // [Output only] The time at which this state version was updated in Cloud + // IoT Core. + UpdateTime *timestamp.Timestamp `protobuf:"bytes,1,opt,name=update_time,json=updateTime,proto3" json:"update_time,omitempty"` + // The device state data. + BinaryData []byte `protobuf:"bytes,2,opt,name=binary_data,json=binaryData,proto3" json:"binary_data,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *DeviceState) Reset() { *m = DeviceState{} } +func (m *DeviceState) String() string { return proto.CompactTextString(m) } +func (*DeviceState) ProtoMessage() {} +func (*DeviceState) Descriptor() ([]byte, []int) { + return fileDescriptor_resources_84fa34c15d3e060d, []int{13} +} +func (m *DeviceState) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_DeviceState.Unmarshal(m, b) +} +func (m *DeviceState) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_DeviceState.Marshal(b, m, deterministic) +} +func (dst *DeviceState) XXX_Merge(src proto.Message) { + xxx_messageInfo_DeviceState.Merge(dst, src) +} +func (m *DeviceState) XXX_Size() int { + return xxx_messageInfo_DeviceState.Size(m) +} +func (m *DeviceState) XXX_DiscardUnknown() { + xxx_messageInfo_DeviceState.DiscardUnknown(m) +} + +var xxx_messageInfo_DeviceState proto.InternalMessageInfo + +func (m *DeviceState) GetUpdateTime() *timestamp.Timestamp { + if m != nil { + return m.UpdateTime + } + return nil +} + +func (m *DeviceState) GetBinaryData() []byte { + if m != nil { + return m.BinaryData + } + return nil +} + +func init() { + proto.RegisterType((*Device)(nil), "google.cloud.iot.v1.Device") + proto.RegisterMapType((map[string]string)(nil), "google.cloud.iot.v1.Device.MetadataEntry") + proto.RegisterType((*GatewayConfig)(nil), "google.cloud.iot.v1.GatewayConfig") + proto.RegisterType((*DeviceRegistry)(nil), "google.cloud.iot.v1.DeviceRegistry") + proto.RegisterType((*MqttConfig)(nil), "google.cloud.iot.v1.MqttConfig") + proto.RegisterType((*HttpConfig)(nil), "google.cloud.iot.v1.HttpConfig") + proto.RegisterType((*EventNotificationConfig)(nil), "google.cloud.iot.v1.EventNotificationConfig") + proto.RegisterType((*StateNotificationConfig)(nil), "google.cloud.iot.v1.StateNotificationConfig") + proto.RegisterType((*RegistryCredential)(nil), "google.cloud.iot.v1.RegistryCredential") + proto.RegisterType((*X509CertificateDetails)(nil), "google.cloud.iot.v1.X509CertificateDetails") + proto.RegisterType((*PublicKeyCertificate)(nil), "google.cloud.iot.v1.PublicKeyCertificate") + proto.RegisterType((*DeviceCredential)(nil), "google.cloud.iot.v1.DeviceCredential") + proto.RegisterType((*PublicKeyCredential)(nil), "google.cloud.iot.v1.PublicKeyCredential") + proto.RegisterType((*DeviceConfig)(nil), "google.cloud.iot.v1.DeviceConfig") + proto.RegisterType((*DeviceState)(nil), "google.cloud.iot.v1.DeviceState") + proto.RegisterEnum("google.cloud.iot.v1.MqttState", MqttState_name, MqttState_value) + proto.RegisterEnum("google.cloud.iot.v1.HttpState", HttpState_name, HttpState_value) + proto.RegisterEnum("google.cloud.iot.v1.LogLevel", LogLevel_name, LogLevel_value) + proto.RegisterEnum("google.cloud.iot.v1.GatewayType", GatewayType_name, GatewayType_value) + proto.RegisterEnum("google.cloud.iot.v1.GatewayAuthMethod", GatewayAuthMethod_name, GatewayAuthMethod_value) + proto.RegisterEnum("google.cloud.iot.v1.PublicKeyCertificateFormat", PublicKeyCertificateFormat_name, PublicKeyCertificateFormat_value) + proto.RegisterEnum("google.cloud.iot.v1.PublicKeyFormat", PublicKeyFormat_name, PublicKeyFormat_value) +} + +func init() { + proto.RegisterFile("google/cloud/iot/v1/resources.proto", fileDescriptor_resources_84fa34c15d3e060d) +} + +var fileDescriptor_resources_84fa34c15d3e060d = []byte{ + // 1672 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x94, 0x58, 0xdd, 0x72, 0xdb, 0xc6, + 0x15, 0x36, 0xa8, 0x1f, 0x8b, 0x07, 0x22, 0x05, 0xae, 0x64, 0x09, 0xe1, 0x24, 0xb1, 0xcc, 0x34, + 0xa9, 0xa2, 0xb4, 0x64, 0xa2, 0x8e, 0xdd, 0x3a, 0xee, 0x74, 0x4a, 0x91, 0x90, 0xc4, 0x8a, 0x3f, + 0x2a, 0x08, 0x39, 0x75, 0x6e, 0x30, 0x4b, 0x60, 0x45, 0x22, 0x22, 0x01, 0x06, 0x58, 0xa8, 0xd1, + 0x03, 0xf4, 0x01, 0x7a, 0xd1, 0x99, 0xce, 0xf4, 0x25, 0xfa, 0x04, 0x7d, 0x87, 0x5e, 0xf4, 0x5d, + 0x7a, 0x99, 0xd9, 0x1f, 0xf0, 0xcf, 0xa0, 0x28, 0xdf, 0x61, 0xcf, 0x39, 0xdf, 0x77, 0x76, 0xcf, + 0xdf, 0xee, 0x00, 0x3e, 0xeb, 0x07, 0x41, 0x7f, 0x48, 0x2a, 0xce, 0x30, 0x88, 0xdd, 0x8a, 0x17, + 0xd0, 0xca, 0xdd, 0x37, 0x95, 0x90, 0x44, 0x41, 0x1c, 0x3a, 0x24, 0x2a, 0x8f, 0xc3, 0x80, 0x06, + 0x68, 0x57, 0x18, 0x95, 0xb9, 0x51, 0xd9, 0x0b, 0x68, 0xf9, 0xee, 0x9b, 0xe2, 0xc7, 0x12, 0x89, + 0xc7, 0x5e, 0x05, 0xfb, 0x7e, 0x40, 0x31, 0xf5, 0x02, 0x5f, 0x42, 0x8a, 0xcf, 0xa5, 0x96, 0xaf, + 0x7a, 0xf1, 0x4d, 0x85, 0x7a, 0x23, 0x12, 0x51, 0x3c, 0x1a, 0x4b, 0x83, 0x03, 0x69, 0x10, 0x8e, + 0x9d, 0x4a, 0x44, 0x31, 0x8d, 0x25, 0xb2, 0xf4, 0xcf, 0x2d, 0xd8, 0xac, 0x93, 0x3b, 0xcf, 0x21, + 0x28, 0x0f, 0x19, 0xcf, 0xd5, 0x95, 0x43, 0xe5, 0x28, 0x6b, 0x66, 0x3c, 0x17, 0x21, 0x58, 0xf7, + 0xf1, 0x88, 0xe8, 0x19, 0x2e, 0xe1, 0xdf, 0xe8, 0x19, 0x6c, 0xfa, 0xf1, 0xc8, 0xf6, 0x5c, 0x7d, + 0xed, 0x50, 0x39, 0x5a, 0x37, 0x37, 0xfc, 0x78, 0xd4, 0x70, 0xd1, 0x39, 0xa8, 0x4e, 0x48, 0x5c, + 0xe2, 0x53, 0x0f, 0x0f, 0x23, 0x7d, 0xfb, 0x70, 0xed, 0x48, 0x3d, 0xf9, 0xbc, 0x9c, 0x72, 0x90, + 0xb2, 0x70, 0x56, 0x9b, 0x58, 0x9b, 0xb3, 0x48, 0xf4, 0x27, 0xd8, 0x1d, 0xe2, 0x88, 0xda, 0x03, + 0x82, 0x43, 0xda, 0x23, 0x98, 0xda, 0xec, 0x24, 0xfa, 0xd3, 0x43, 0xe5, 0x48, 0x3d, 0x29, 0x26, + 0x84, 0xc9, 0x31, 0xcb, 0x56, 0x72, 0x4c, 0xb3, 0xc0, 0x60, 0x17, 0x09, 0x8a, 0xc9, 0xd1, 0x29, + 0xec, 0x70, 0x2e, 0x72, 0x47, 0x7c, 0xc9, 0xb3, 0xb5, 0x92, 0x27, 0xc7, 0x20, 0x06, 0x43, 0xcc, + 0x71, 0xb0, 0x98, 0x11, 0xc1, 0xb1, 0xf7, 0x38, 0x8e, 0x2e, 0x43, 0x70, 0x8e, 0x4b, 0xd8, 0xe3, + 0x1c, 0x4e, 0xe0, 0xdf, 0x78, 0x7d, 0x1b, 0x3b, 0xb7, 0x82, 0x28, 0xff, 0xb8, 0x43, 0xd5, 0x38, + 0xac, 0xea, 0xdc, 0x72, 0xb2, 0x16, 0x3c, 0x9b, 0x25, 0x8b, 0x88, 0xef, 0x0a, 0x36, 0xb4, 0x92, + 0x0d, 0x4d, 0xd9, 0xba, 0xc4, 0x77, 0x39, 0x9d, 0x0e, 0x4f, 0x7b, 0xc3, 0xc0, 0xb9, 0x25, 0xae, + 0xbe, 0x7b, 0xa8, 0x1c, 0x6d, 0x99, 0xc9, 0x72, 0x1a, 0xbd, 0x30, 0x0c, 0x42, 0xe1, 0x02, 0x1e, + 0x19, 0x3d, 0x86, 0xe0, 0xec, 0x7f, 0x80, 0xc2, 0x0c, 0x87, 0xa8, 0x3b, 0x5d, 0xe5, 0x2c, 0x28, + 0x61, 0x09, 0xc7, 0x4e, 0xb9, 0xcb, 0x35, 0xe6, 0xce, 0x04, 0x2d, 0x04, 0xe8, 0x35, 0x6c, 0x8a, + 0x73, 0xea, 0x39, 0x0e, 0x7a, 0xf1, 0x50, 0x45, 0x71, 0x43, 0x53, 0x02, 0xd0, 0x2b, 0xd8, 0xe0, + 0x39, 0xd3, 0x35, 0x8e, 0x3c, 0x7c, 0x00, 0xc9, 0x33, 0x65, 0x0a, 0x73, 0xf4, 0x2d, 0x64, 0x87, + 0x41, 0xdf, 0x1e, 0x92, 0x3b, 0x32, 0xd4, 0x9f, 0x1d, 0x2a, 0x47, 0xf9, 0x93, 0x4f, 0x52, 0xb1, + 0xcd, 0xa0, 0xdf, 0x64, 0x46, 0xe6, 0xd6, 0x50, 0x7e, 0x21, 0x03, 0xb6, 0x46, 0x84, 0x62, 0x17, + 0x53, 0xac, 0x17, 0x78, 0x0b, 0x7c, 0xf9, 0x80, 0xdb, 0x72, 0x4b, 0xda, 0x1a, 0x3e, 0x0d, 0xef, + 0xcd, 0x09, 0x14, 0x35, 0x20, 0xdf, 0xc7, 0x94, 0xfc, 0x15, 0xdf, 0xcb, 0x2c, 0xeb, 0x3a, 0x3f, + 0x43, 0x29, 0x95, 0xec, 0x5c, 0x98, 0xca, 0xe3, 0xe7, 0xfa, 0xb3, 0xcb, 0xe2, 0x1b, 0xc8, 0xcd, + 0x79, 0x41, 0x1a, 0xac, 0xdd, 0x92, 0x7b, 0xd9, 0xe4, 0xec, 0x13, 0xed, 0xc1, 0xc6, 0x1d, 0x1e, + 0xc6, 0x49, 0x9b, 0x8b, 0xc5, 0xb7, 0x99, 0xdf, 0x29, 0xa5, 0xff, 0x64, 0x20, 0x37, 0xc7, 0x8e, + 0x6a, 0xb0, 0x9d, 0xec, 0x8c, 0xde, 0x8f, 0x09, 0xa7, 0xc9, 0x2f, 0x89, 0xad, 0x44, 0x5a, 0xf7, + 0x63, 0x62, 0xaa, 0xfd, 0xe9, 0x02, 0xbd, 0x85, 0xdd, 0x84, 0x04, 0xc7, 0x74, 0x60, 0x8f, 0x08, + 0x1d, 0x04, 0x2e, 0x77, 0x9f, 0x3f, 0xf9, 0xe2, 0x21, 0xae, 0x6a, 0x4c, 0x07, 0x2d, 0x6e, 0x6d, + 0x16, 0xfa, 0x8b, 0x22, 0xf4, 0x5b, 0xd0, 0x79, 0xb1, 0x61, 0xc7, 0x21, 0x51, 0x44, 0x5c, 0x3b, + 0xf1, 0x22, 0x87, 0x55, 0xd6, 0xe4, 0x9d, 0x53, 0x95, 0x6a, 0xc9, 0xd9, 0x70, 0xd1, 0x77, 0x50, + 0x4c, 0x07, 0xf2, 0xa2, 0x5f, 0x5f, 0x59, 0xf4, 0x07, 0x29, 0xb4, 0x4c, 0x5b, 0xfa, 0xc7, 0x3a, + 0xe4, 0x45, 0xae, 0x4d, 0xd2, 0xf7, 0x22, 0x16, 0xff, 0xc7, 0xcc, 0xd8, 0x1f, 0xa0, 0x28, 0x46, + 0x96, 0x1f, 0x50, 0xef, 0xc6, 0x73, 0xf8, 0xa4, 0x97, 0xa5, 0x10, 0xe9, 0xc0, 0x0b, 0xeb, 0x57, + 0xa9, 0x71, 0xe2, 0x73, 0xab, 0x3d, 0x83, 0x92, 0x55, 0xa1, 0x93, 0x74, 0x45, 0x84, 0x06, 0xf0, + 0x91, 0x18, 0x6d, 0x29, 0xbe, 0xe4, 0xd4, 0x4d, 0x77, 0xc5, 0x9b, 0x26, 0xc5, 0xd5, 0x41, 0x94, + 0xae, 0x40, 0x7f, 0x04, 0x75, 0xf4, 0x23, 0x4d, 0x06, 0x97, 0x0c, 0xeb, 0xf3, 0x54, 0xee, 0xd6, + 0x8f, 0x54, 0xce, 0x29, 0x13, 0x46, 0x93, 0x6f, 0xc6, 0x30, 0xa0, 0x74, 0x9c, 0x30, 0x64, 0x1f, + 0x60, 0xb8, 0xa0, 0x74, 0x9c, 0x30, 0x0c, 0x26, 0xdf, 0xf3, 0xcd, 0xad, 0x7e, 0x58, 0x73, 0x37, + 0xe6, 0xaf, 0xb8, 0x2d, 0x9e, 0x86, 0x5f, 0xa6, 0xa2, 0x93, 0x6c, 0x2f, 0xb9, 0xe4, 0x4a, 0xdf, + 0x03, 0x4c, 0x8f, 0x88, 0x9a, 0x80, 0x78, 0x60, 0x88, 0x8f, 0x7b, 0x43, 0xe2, 0x8a, 0xab, 0x46, + 0xb6, 0xd6, 0xa7, 0x4b, 0xe3, 0x23, 0x86, 0x96, 0xc6, 0x90, 0x86, 0x00, 0x72, 0x09, 0xe3, 0x9e, + 0x1e, 0x9e, 0x71, 0xf3, 0x90, 0x3d, 0x9e, 0x9b, 0x81, 0x25, 0x37, 0x43, 0xce, 0x71, 0x87, 0x70, + 0xb0, 0xa4, 0xc2, 0xd0, 0x57, 0x50, 0x88, 0xe2, 0xde, 0x4d, 0x30, 0x74, 0x49, 0x68, 0x8f, 0x30, + 0x75, 0x06, 0x24, 0x92, 0x45, 0xad, 0x4d, 0x14, 0x2d, 0x21, 0x47, 0xc7, 0x50, 0x18, 0xc7, 0xbd, + 0x28, 0xee, 0xd9, 0x34, 0x18, 0x7b, 0x8e, 0xcd, 0x3b, 0x40, 0xf4, 0xc4, 0x8e, 0x50, 0x58, 0x4c, + 0xde, 0xc6, 0x23, 0x52, 0x32, 0xe0, 0x60, 0x49, 0xa9, 0x7d, 0x10, 0xcd, 0xdf, 0x14, 0x40, 0xef, + 0xa7, 0x05, 0x61, 0xd8, 0x1f, 0xc7, 0xbd, 0xa1, 0xe7, 0xd8, 0xb7, 0xe4, 0xde, 0x76, 0x48, 0x28, + 0x9d, 0x08, 0x9e, 0x65, 0xf3, 0xfb, 0x8a, 0x43, 0x2e, 0xc9, 0x7d, 0x6d, 0x0a, 0xb8, 0x78, 0x62, + 0xee, 0x8d, 0x53, 0xe4, 0xa7, 0xdb, 0x00, 0xd3, 0xdc, 0x97, 0xfe, 0x95, 0x81, 0xfd, 0xbf, 0xbc, + 0xfc, 0xfa, 0xf5, 0x8c, 0x45, 0x9d, 0x50, 0xec, 0x0d, 0x23, 0xb4, 0x0f, 0x9b, 0x5e, 0x14, 0xc5, + 0x24, 0x94, 0x67, 0x90, 0x2b, 0x76, 0x45, 0x47, 0x71, 0xef, 0x07, 0xe2, 0x50, 0x19, 0xd0, 0x64, + 0x89, 0x5e, 0x03, 0x44, 0x14, 0x87, 0xf2, 0x6d, 0xb3, 0xb6, 0x72, 0x50, 0x65, 0xb9, 0x35, 0xbf, + 0x99, 0xdf, 0x80, 0x4a, 0x7e, 0x1a, 0x7b, 0xe1, 0xa3, 0x87, 0x1c, 0x08, 0x73, 0x0e, 0xae, 0xc0, + 0x6e, 0xe4, 0xf5, 0x7d, 0x4c, 0xe3, 0x90, 0xd8, 0x78, 0xd8, 0x0f, 0x42, 0x8f, 0x0e, 0x46, 0xfa, + 0x06, 0xdf, 0x1d, 0x9a, 0xa8, 0xaa, 0x89, 0x06, 0x7d, 0x01, 0x3b, 0x33, 0x61, 0xe6, 0x57, 0xc7, + 0x26, 0x37, 0xce, 0x4d, 0x42, 0xc6, 0xae, 0x86, 0xd2, 0x7f, 0x15, 0xd8, 0x4b, 0x0b, 0x2e, 0x3a, + 0x87, 0xcd, 0x9b, 0x20, 0x1c, 0x61, 0x2a, 0x6b, 0xb7, 0xf2, 0xe8, 0xbc, 0x9c, 0x71, 0x98, 0x29, + 0xe1, 0xe8, 0x10, 0xd4, 0xd9, 0x2c, 0x8b, 0x80, 0xce, 0x8a, 0x50, 0x1b, 0xb6, 0x7f, 0x7a, 0xf9, + 0xf5, 0x6b, 0xdb, 0x15, 0x69, 0x91, 0x61, 0xfd, 0x2a, 0xd5, 0x61, 0x7a, 0x26, 0x4d, 0x95, 0x11, + 0xc8, 0x45, 0xe9, 0xdf, 0x0a, 0x68, 0x8b, 0x6f, 0x5e, 0xd4, 0x00, 0x98, 0x06, 0x84, 0xef, 0x42, + 0x3d, 0x39, 0x5a, 0x71, 0xa6, 0x09, 0xfa, 0xe2, 0x89, 0x99, 0x9d, 0xc4, 0x0d, 0xd5, 0x60, 0x87, + 0xa7, 0x46, 0x4c, 0x6e, 0x9e, 0xcd, 0xcd, 0x95, 0xd9, 0xcc, 0x4f, 0x21, 0x4c, 0xb8, 0x50, 0xa4, + 0x04, 0x76, 0x53, 0xdc, 0xa2, 0xdf, 0x2f, 0x24, 0xe1, 0x17, 0x0f, 0x6f, 0x78, 0x21, 0xf2, 0xf2, + 0xe5, 0x91, 0x99, 0xbc, 0x3c, 0x4a, 0xff, 0x53, 0x60, 0x7b, 0xf6, 0xed, 0xc6, 0x2a, 0xfd, 0x8e, + 0x84, 0x91, 0x17, 0xf8, 0xdc, 0xc3, 0x9a, 0x99, 0x2c, 0xd1, 0x19, 0x14, 0xb8, 0x13, 0x3b, 0x1e, + 0xbb, 0x93, 0x87, 0x78, 0x66, 0xe5, 0x31, 0x77, 0x38, 0xe8, 0x9a, 0x63, 0x92, 0xe7, 0xbc, 0xcb, + 0x3d, 0x4e, 0x5f, 0xe1, 0xab, 0xdb, 0x26, 0x27, 0x20, 0xc9, 0x0b, 0xfc, 0x39, 0xa8, 0x3d, 0xcf, + 0xc7, 0xe1, 0xbd, 0xcd, 0x1f, 0x7a, 0xac, 0x75, 0xb6, 0x4d, 0x10, 0xa2, 0x3a, 0xa6, 0xb8, 0x74, + 0x0b, 0xea, 0xcc, 0xc3, 0x92, 0xb5, 0xda, 0xec, 0xae, 0x95, 0xd5, 0xad, 0x16, 0x4f, 0x37, 0xbc, + 0xe0, 0x2c, 0xb3, 0xe8, 0xec, 0xb8, 0x09, 0xd9, 0xc9, 0x75, 0x80, 0x8a, 0xb0, 0xdf, 0xfa, 0xb3, + 0x65, 0xd9, 0x5d, 0xab, 0x6a, 0x19, 0xf6, 0x75, 0xbb, 0x7b, 0x65, 0xd4, 0x1a, 0x67, 0x0d, 0xa3, + 0xae, 0x3d, 0x41, 0x1a, 0x6c, 0x73, 0x9d, 0xd1, 0xae, 0x9e, 0x36, 0x8d, 0xba, 0xa6, 0xa0, 0x02, + 0xe4, 0xb8, 0xa4, 0xde, 0xe8, 0x0a, 0x51, 0x86, 0xb1, 0x4d, 0x2e, 0x00, 0xc6, 0x76, 0x61, 0x59, + 0x57, 0xcb, 0xd8, 0xb8, 0x6e, 0x8e, 0x8d, 0x4b, 0x66, 0xd8, 0x3a, 0xb0, 0x95, 0x5c, 0xa4, 0xe8, + 0x23, 0x78, 0xd6, 0xec, 0x9c, 0xdb, 0x4d, 0xe3, 0xad, 0xd1, 0x5c, 0xe0, 0xda, 0x82, 0xf5, 0x76, + 0xa7, 0x6d, 0x68, 0x80, 0xb2, 0xb0, 0x61, 0x98, 0x66, 0xc7, 0xd4, 0xf6, 0x98, 0xb0, 0xd1, 0x3e, + 0xeb, 0x68, 0x9f, 0x32, 0x61, 0xdd, 0x38, 0xbd, 0x3e, 0xd7, 0x8e, 0x8e, 0x1b, 0xa0, 0xce, 0x3c, + 0x2b, 0xd1, 0xc7, 0xa0, 0x9f, 0x57, 0x2d, 0xe3, 0xbb, 0xea, 0x3b, 0xdb, 0x7a, 0x77, 0xb5, 0xb8, + 0x45, 0x15, 0x9e, 0x4a, 0xad, 0xa6, 0xa0, 0x1d, 0x50, 0xdb, 0x9d, 0xb6, 0x9d, 0x08, 0x32, 0xc7, + 0x7f, 0x57, 0xa0, 0xf0, 0xde, 0xb3, 0x12, 0x7d, 0x06, 0xcf, 0x13, 0xc6, 0xea, 0xb5, 0x75, 0x61, + 0xb7, 0x0c, 0xeb, 0xa2, 0x53, 0x5f, 0x20, 0xde, 0x03, 0xad, 0xda, 0xed, 0x76, 0x6a, 0x8d, 0xaa, + 0xd5, 0xe8, 0xb4, 0xed, 0x4e, 0xbb, 0xc9, 0x3c, 0x14, 0x61, 0xbf, 0x6e, 0xbc, 0x6d, 0xd4, 0x0c, + 0x81, 0xb4, 0x3a, 0x97, 0x86, 0xd4, 0x65, 0xd0, 0xe7, 0xf0, 0x62, 0x16, 0x51, 0x6d, 0xd7, 0xed, + 0xf7, 0x6c, 0xb5, 0xb5, 0x63, 0x02, 0xc5, 0xe5, 0x23, 0x0c, 0xfd, 0x1a, 0xbe, 0x9c, 0xd9, 0x87, + 0x7d, 0x75, 0x7d, 0xda, 0x6c, 0xd4, 0xec, 0x4b, 0xe3, 0x9d, 0x5d, 0x33, 0x4c, 0xab, 0x71, 0xd6, + 0xa8, 0xb1, 0x34, 0x9d, 0x75, 0xcc, 0x56, 0xd5, 0xd2, 0x9e, 0x20, 0x1d, 0xf6, 0xd8, 0x78, 0x9a, + 0x53, 0x5e, 0x19, 0x2d, 0x4d, 0x39, 0xbe, 0x83, 0x9d, 0x85, 0x26, 0x45, 0x2f, 0xe0, 0x93, 0x25, + 0xdc, 0x13, 0x3e, 0x15, 0x9e, 0x9a, 0xdd, 0x2a, 0xa7, 0x58, 0x63, 0xe9, 0x67, 0x0b, 0xee, 0x80, + 0x93, 0xa2, 0x1c, 0x64, 0x8d, 0xee, 0xc9, 0xcb, 0x57, 0x7c, 0x99, 0x41, 0x08, 0xf2, 0x62, 0x39, + 0x31, 0x59, 0x3f, 0xbd, 0x81, 0x03, 0x27, 0x18, 0xa5, 0x0d, 0x8d, 0xd3, 0xbc, 0x99, 0xfc, 0x03, + 0xb9, 0x62, 0xfd, 0x70, 0xa5, 0x7c, 0xff, 0x4a, 0x9a, 0xf5, 0x83, 0x21, 0xf6, 0xfb, 0xe5, 0x20, + 0xec, 0x57, 0xfa, 0xc4, 0xe7, 0xdd, 0x52, 0x11, 0x2a, 0x3c, 0xf6, 0xa2, 0xb9, 0x1f, 0x29, 0x6f, + 0xbc, 0x80, 0xfe, 0x5f, 0x51, 0x7a, 0x9b, 0xdc, 0xea, 0x37, 0x3f, 0x07, 0x00, 0x00, 0xff, 0xff, + 0x19, 0x42, 0x91, 0x59, 0x6d, 0x11, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/cloud/irm/v1alpha2/incidents.pb.go b/vendor/google.golang.org/genproto/googleapis/cloud/irm/v1alpha2/incidents.pb.go new file mode 100644 index 0000000..3a82faa --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/cloud/irm/v1alpha2/incidents.pb.go @@ -0,0 +1,1808 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/cloud/irm/v1alpha2/incidents.proto + +package irm // import "google.golang.org/genproto/googleapis/cloud/irm/v1alpha2" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "github.com/golang/protobuf/ptypes/duration" +import timestamp "github.com/golang/protobuf/ptypes/timestamp" +import _ "google.golang.org/genproto/googleapis/api/annotations" +import _ "google.golang.org/genproto/googleapis/monitoring/v3" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Describes whether the alerting condition is still firing. +type Signal_State int32 + +const ( + // Unspecified + Signal_STATE_UNSPECIFIED Signal_State = 0 + // Firing + Signal_STATE_OPEN Signal_State = 1 + // Non-firing + Signal_STATE_CLOSED Signal_State = 2 +) + +var Signal_State_name = map[int32]string{ + 0: "STATE_UNSPECIFIED", + 1: "STATE_OPEN", + 2: "STATE_CLOSED", +} +var Signal_State_value = map[string]int32{ + "STATE_UNSPECIFIED": 0, + "STATE_OPEN": 1, + "STATE_CLOSED": 2, +} + +func (x Signal_State) String() string { + return proto.EnumName(Signal_State_name, int32(x)) +} +func (Signal_State) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_incidents_a3a68162641107b4, []int{1, 0} +} + +// Specifies the escalation level of this incident, within the IRM protocol +// for handling incidents. +type Incident_EscalationLevel int32 + +const ( + // The incident has not been escalated. This is the value used by all new + // and legacy incidents. + Incident_ESCALATION_LEVEL_UNSPECIFIED Incident_EscalationLevel = 0 + // The incident has been escalated to the organizational level. + Incident_ESCALATION_LEVEL_ORGANIZATION Incident_EscalationLevel = 1 +) + +var Incident_EscalationLevel_name = map[int32]string{ + 0: "ESCALATION_LEVEL_UNSPECIFIED", + 1: "ESCALATION_LEVEL_ORGANIZATION", +} +var Incident_EscalationLevel_value = map[string]int32{ + "ESCALATION_LEVEL_UNSPECIFIED": 0, + "ESCALATION_LEVEL_ORGANIZATION": 1, +} + +func (x Incident_EscalationLevel) String() string { + return proto.EnumName(Incident_EscalationLevel_name, int32(x)) +} +func (Incident_EscalationLevel) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_incidents_a3a68162641107b4, []int{5, 0} +} + +// Severity of an incident. +type Incident_Severity int32 + +const ( + // Severity is not specified. + Incident_SEVERITY_UNSPECIFIED Incident_Severity = 0 + // Huge incident. + Incident_SEVERITY_HUGE Incident_Severity = 1 + // Major incident. + Incident_SEVERITY_MAJOR Incident_Severity = 2 + // Medium incident. + Incident_SEVERITY_MEDIUM Incident_Severity = 3 + // Minor incident. + Incident_SEVERITY_MINOR Incident_Severity = 4 + // Negligible incident. + Incident_SEVERITY_NEGLIGIBLE Incident_Severity = 5 +) + +var Incident_Severity_name = map[int32]string{ + 0: "SEVERITY_UNSPECIFIED", + 1: "SEVERITY_HUGE", + 2: "SEVERITY_MAJOR", + 3: "SEVERITY_MEDIUM", + 4: "SEVERITY_MINOR", + 5: "SEVERITY_NEGLIGIBLE", +} +var Incident_Severity_value = map[string]int32{ + "SEVERITY_UNSPECIFIED": 0, + "SEVERITY_HUGE": 1, + "SEVERITY_MAJOR": 2, + "SEVERITY_MEDIUM": 3, + "SEVERITY_MINOR": 4, + "SEVERITY_NEGLIGIBLE": 5, +} + +func (x Incident_Severity) String() string { + return proto.EnumName(Incident_Severity_name, int32(x)) +} +func (Incident_Severity) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_incidents_a3a68162641107b4, []int{5, 1} +} + +// Stage of an incident. +type Incident_Stage int32 + +const ( + // This is the default value if no stage has been specified. + // Note: The caller of the API should set the stage to DETECTED. + Incident_STAGE_UNSPECIFIED Incident_Stage = 0 + // The incident has been detected. This is the initial stage of a new + // incident. + // Note: The caller still has to set the stage manually. + Incident_STAGE_DETECTED Incident_Stage = 4 + // This incident has been formally characterized. + Incident_STAGE_TRIAGED Incident_Stage = 1 + // This incident has been mitigated, i.e. does not affect the service level + // anymore. + Incident_STAGE_MITIGATED Incident_Stage = 2 + // This incident has been fully resolved, i.e. there are no immediate + // follow-up tasks. + Incident_STAGE_RESOLVED Incident_Stage = 3 + // Postmortem for the incident was written. + Incident_STAGE_DOCUMENTED Incident_Stage = 5 + // Stage for an incident with `duplicate_incident`. This incident is not + // authoritative anymore and the `duplicate_incident` should be used to + // determine the stage. + Incident_STAGE_DUPLICATE Incident_Stage = 6 +) + +var Incident_Stage_name = map[int32]string{ + 0: "STAGE_UNSPECIFIED", + 4: "STAGE_DETECTED", + 1: "STAGE_TRIAGED", + 2: "STAGE_MITIGATED", + 3: "STAGE_RESOLVED", + 5: "STAGE_DOCUMENTED", + 6: "STAGE_DUPLICATE", +} +var Incident_Stage_value = map[string]int32{ + "STAGE_UNSPECIFIED": 0, + "STAGE_DETECTED": 4, + "STAGE_TRIAGED": 1, + "STAGE_MITIGATED": 2, + "STAGE_RESOLVED": 3, + "STAGE_DOCUMENTED": 5, + "STAGE_DUPLICATE": 6, +} + +func (x Incident_Stage) String() string { + return proto.EnumName(Incident_Stage_name, int32(x)) +} +func (Incident_Stage) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_incidents_a3a68162641107b4, []int{5, 2} +} + +// The type of channel/venue for incident communications. +type Incident_CommunicationVenue_ChannelType int32 + +const ( + // An unspecified communication channel. + Incident_CommunicationVenue_CHANNEL_TYPE_UNSPECIFIED Incident_CommunicationVenue_ChannelType = 0 + // A communication channel that is represented by a generic URI. + Incident_CommunicationVenue_CHANNEL_TYPE_URI Incident_CommunicationVenue_ChannelType = 1 + // A communication channel that represents a Slack channel. + Incident_CommunicationVenue_CHANNEL_TYPE_SLACK Incident_CommunicationVenue_ChannelType = 5 +) + +var Incident_CommunicationVenue_ChannelType_name = map[int32]string{ + 0: "CHANNEL_TYPE_UNSPECIFIED", + 1: "CHANNEL_TYPE_URI", + 5: "CHANNEL_TYPE_SLACK", +} +var Incident_CommunicationVenue_ChannelType_value = map[string]int32{ + "CHANNEL_TYPE_UNSPECIFIED": 0, + "CHANNEL_TYPE_URI": 1, + "CHANNEL_TYPE_SLACK": 5, +} + +func (x Incident_CommunicationVenue_ChannelType) String() string { + return proto.EnumName(Incident_CommunicationVenue_ChannelType_name, int32(x)) +} +func (Incident_CommunicationVenue_ChannelType) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_incidents_a3a68162641107b4, []int{5, 0, 0} +} + +// List of possible roles. +type IncidentRole_Type int32 + +const ( + // The role is unspecified. + IncidentRole_TYPE_UNSPECIFIED IncidentRole_Type = 0 + // Incident Commander: Manages response plan, near-term and long-term + // objectives, establishes priorities, and delegates tasks as needed. + IncidentRole_TYPE_INCIDENT_COMMANDER IncidentRole_Type = 1 + // Communications Lead: Keeps everybody outside and within the response team + // informed. + IncidentRole_TYPE_COMMUNICATIONS_LEAD IncidentRole_Type = 2 + // Operations Lead: Figures out what to do, and gets it done. + IncidentRole_TYPE_OPERATIONS_LEAD IncidentRole_Type = 3 + // External Customer Communications Lead: Responsible for communicating + // incident details to customers/public. + IncidentRole_TYPE_EXTERNAL_CUSTOMER_COMMUNICATIONS_LEAD IncidentRole_Type = 4 + // Primary Oncall: Responds to the initial page and handles all + // responsibilities for pre-escalated incidents. + IncidentRole_TYPE_PRIMARY_ONCALL IncidentRole_Type = 5 + // Secondary Oncall: Helps the primary oncall if necessary; mostly useful + // for pre-escalated incidents. + IncidentRole_TYPE_SECONDARY_ONCALL IncidentRole_Type = 6 + // User-specified roles. One example is a Planning Lead, who keeps track of + // the incident. Another is an assistant Incident Commander. + IncidentRole_TYPE_OTHER IncidentRole_Type = 7 +) + +var IncidentRole_Type_name = map[int32]string{ + 0: "TYPE_UNSPECIFIED", + 1: "TYPE_INCIDENT_COMMANDER", + 2: "TYPE_COMMUNICATIONS_LEAD", + 3: "TYPE_OPERATIONS_LEAD", + 4: "TYPE_EXTERNAL_CUSTOMER_COMMUNICATIONS_LEAD", + 5: "TYPE_PRIMARY_ONCALL", + 6: "TYPE_SECONDARY_ONCALL", + 7: "TYPE_OTHER", +} +var IncidentRole_Type_value = map[string]int32{ + "TYPE_UNSPECIFIED": 0, + "TYPE_INCIDENT_COMMANDER": 1, + "TYPE_COMMUNICATIONS_LEAD": 2, + "TYPE_OPERATIONS_LEAD": 3, + "TYPE_EXTERNAL_CUSTOMER_COMMUNICATIONS_LEAD": 4, + "TYPE_PRIMARY_ONCALL": 5, + "TYPE_SECONDARY_ONCALL": 6, + "TYPE_OTHER": 7, +} + +func (x IncidentRole_Type) String() string { + return proto.EnumName(IncidentRole_Type_name, int32(x)) +} +func (IncidentRole_Type) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_incidents_a3a68162641107b4, []int{6, 0} +} + +// Possible types of an artifact. +type Artifact_Type int32 + +const ( + // External type is unspecified. + Artifact_TYPE_UNSPECIFIED Artifact_Type = 0 + // URL. + Artifact_TYPE_URL Artifact_Type = 1 + // A JIRA issue. + Artifact_TYPE_JIRA_ISSUE Artifact_Type = 4 +) + +var Artifact_Type_name = map[int32]string{ + 0: "TYPE_UNSPECIFIED", + 1: "TYPE_URL", + 4: "TYPE_JIRA_ISSUE", +} +var Artifact_Type_value = map[string]int32{ + "TYPE_UNSPECIFIED": 0, + "TYPE_URL": 1, + "TYPE_JIRA_ISSUE": 4, +} + +func (x Artifact_Type) String() string { + return proto.EnumName(Artifact_Type_name, int32(x)) +} +func (Artifact_Type) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_incidents_a3a68162641107b4, []int{8, 0} +} + +// Types of changes that users can subscribe to in an incident. +type Subscription_EventType int32 + +const ( + // An event_type that's not specified is an error. + Subscription_EVENT_TYPE_UNSPECIFIED Subscription_EventType = 0 + // The incident's title has changed. + Subscription_EVENT_TYPE_TITLE_CHANGE Subscription_EventType = 1 + // The incident's synopsis has changed. + Subscription_EVENT_TYPE_SYNOPSIS_CHANGE Subscription_EventType = 2 + // The incident's stage has changed. + Subscription_EVENT_TYPE_STAGE_CHANGE Subscription_EventType = 3 + // The incident's severity has changed. + Subscription_EVENT_TYPE_SEVERITY_CHANGE Subscription_EventType = 4 + // A new annotation has been added to the incident. + Subscription_EVENT_TYPE_ANNOTATION_ADD Subscription_EventType = 5 + // An annotation has been modified. + Subscription_EVENT_TYPE_ANNOTATION_CHANGE Subscription_EventType = 6 +) + +var Subscription_EventType_name = map[int32]string{ + 0: "EVENT_TYPE_UNSPECIFIED", + 1: "EVENT_TYPE_TITLE_CHANGE", + 2: "EVENT_TYPE_SYNOPSIS_CHANGE", + 3: "EVENT_TYPE_STAGE_CHANGE", + 4: "EVENT_TYPE_SEVERITY_CHANGE", + 5: "EVENT_TYPE_ANNOTATION_ADD", + 6: "EVENT_TYPE_ANNOTATION_CHANGE", +} +var Subscription_EventType_value = map[string]int32{ + "EVENT_TYPE_UNSPECIFIED": 0, + "EVENT_TYPE_TITLE_CHANGE": 1, + "EVENT_TYPE_SYNOPSIS_CHANGE": 2, + "EVENT_TYPE_STAGE_CHANGE": 3, + "EVENT_TYPE_SEVERITY_CHANGE": 4, + "EVENT_TYPE_ANNOTATION_ADD": 5, + "EVENT_TYPE_ANNOTATION_CHANGE": 6, +} + +func (x Subscription_EventType) String() string { + return proto.EnumName(Subscription_EventType_name, int32(x)) +} +func (Subscription_EventType) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_incidents_a3a68162641107b4, []int{10, 0} +} + +// A user of the IRM app. +type User struct { + // One of several ways to uniquely identify a user. + // + // Types that are valid to be assigned to User: + // *User_UserId + // *User_Email + User isUser_User `protobuf_oneof:"user"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *User) Reset() { *m = User{} } +func (m *User) String() string { return proto.CompactTextString(m) } +func (*User) ProtoMessage() {} +func (*User) Descriptor() ([]byte, []int) { + return fileDescriptor_incidents_a3a68162641107b4, []int{0} +} +func (m *User) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_User.Unmarshal(m, b) +} +func (m *User) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_User.Marshal(b, m, deterministic) +} +func (dst *User) XXX_Merge(src proto.Message) { + xxx_messageInfo_User.Merge(dst, src) +} +func (m *User) XXX_Size() int { + return xxx_messageInfo_User.Size(m) +} +func (m *User) XXX_DiscardUnknown() { + xxx_messageInfo_User.DiscardUnknown(m) +} + +var xxx_messageInfo_User proto.InternalMessageInfo + +type isUser_User interface { + isUser_User() +} + +type User_UserId struct { + UserId string `protobuf:"bytes,1,opt,name=user_id,json=userId,proto3,oneof"` +} + +type User_Email struct { + Email string `protobuf:"bytes,2,opt,name=email,proto3,oneof"` +} + +func (*User_UserId) isUser_User() {} + +func (*User_Email) isUser_User() {} + +func (m *User) GetUser() isUser_User { + if m != nil { + return m.User + } + return nil +} + +func (m *User) GetUserId() string { + if x, ok := m.GetUser().(*User_UserId); ok { + return x.UserId + } + return "" +} + +func (m *User) GetEmail() string { + if x, ok := m.GetUser().(*User_Email); ok { + return x.Email + } + return "" +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*User) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _User_OneofMarshaler, _User_OneofUnmarshaler, _User_OneofSizer, []interface{}{ + (*User_UserId)(nil), + (*User_Email)(nil), + } +} + +func _User_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*User) + // user + switch x := m.User.(type) { + case *User_UserId: + b.EncodeVarint(1<<3 | proto.WireBytes) + b.EncodeStringBytes(x.UserId) + case *User_Email: + b.EncodeVarint(2<<3 | proto.WireBytes) + b.EncodeStringBytes(x.Email) + case nil: + default: + return fmt.Errorf("User.User has unexpected type %T", x) + } + return nil +} + +func _User_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*User) + switch tag { + case 1: // user.user_id + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeStringBytes() + m.User = &User_UserId{x} + return true, err + case 2: // user.email + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeStringBytes() + m.User = &User_Email{x} + return true, err + default: + return false, nil + } +} + +func _User_OneofSizer(msg proto.Message) (n int) { + m := msg.(*User) + // user + switch x := m.User.(type) { + case *User_UserId: + n += 1 // tag and wire + n += proto.SizeVarint(uint64(len(x.UserId))) + n += len(x.UserId) + case *User_Email: + n += 1 // tag and wire + n += proto.SizeVarint(uint64(len(x.Email))) + n += len(x.Email) + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +// A signal is a message calling attention to a (potential) incident. An example +// is a page based on a Stackdriver Alerting policy. +type Signal struct { + // Resource name of the signal, for example, + // "projects/{project_id}/signals/{signal_id}". + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // Etag to validate the object is unchanged for a read-modify-write operation. + // An empty etag will overwrite other changes. + Etag string `protobuf:"bytes,2,opt,name=etag,proto3" json:"etag,omitempty"` + // Resource name of the incident this signal is currently assigned to. + // May be empty if signal is unassigned. + Incident string `protobuf:"bytes,3,opt,name=incident,proto3" json:"incident,omitempty"` + // Output only. Time this signal was created. + CreateTime *timestamp.Timestamp `protobuf:"bytes,4,opt,name=create_time,json=createTime,proto3" json:"create_time,omitempty"` + // Output only. Time this signal was closed. This field is not populated + // while the signal is still firing. + CloseTime *timestamp.Timestamp `protobuf:"bytes,10,opt,name=close_time,json=closeTime,proto3" json:"close_time,omitempty"` + // The time this Signal was first detected. This is identical to create_time + // for Signals created by Stackdriver Alerting. + DetectTime *timestamp.Timestamp `protobuf:"bytes,15,opt,name=detect_time,json=detectTime,proto3" json:"detect_time,omitempty"` + // Output only. The user that created this signal for manually created + // signals. Empty if this signal was generated by a system (for example, + // Stackdriver Alerting). + Creator *User `protobuf:"bytes,5,opt,name=creator,proto3" json:"creator,omitempty"` + // One-line summary of the signal. + // Immutable. + Title string `protobuf:"bytes,6,opt,name=title,proto3" json:"title,omitempty"` + // Content type string, for example, 'text/plain' or'text/html'. + ContentType string `protobuf:"bytes,7,opt,name=content_type,json=contentType,proto3" json:"content_type,omitempty"` + // Full message of the signal. + // Immutable for Signals created by Stackdriver Alerting. + Content string `protobuf:"bytes,8,opt,name=content,proto3" json:"content,omitempty"` + // The state of this signal. + // For Signals created by Stackdriver Alerting this field is output only. + SignalState Signal_State `protobuf:"varint,9,opt,name=signal_state,json=signalState,proto3,enum=google.cloud.irm.v1alpha2.Signal_State" json:"signal_state,omitempty"` + // A set of artifacts to additional resources for this Signal. For example, a + // link to Stackdriver logging for the Signal. + // Immutable for Signals created by Stackdriver Alerting. + SignalArtifacts []*Signal_SignalArtifact `protobuf:"bytes,16,rep,name=signal_artifacts,json=signalArtifacts,proto3" json:"signal_artifacts,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Signal) Reset() { *m = Signal{} } +func (m *Signal) String() string { return proto.CompactTextString(m) } +func (*Signal) ProtoMessage() {} +func (*Signal) Descriptor() ([]byte, []int) { + return fileDescriptor_incidents_a3a68162641107b4, []int{1} +} +func (m *Signal) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Signal.Unmarshal(m, b) +} +func (m *Signal) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Signal.Marshal(b, m, deterministic) +} +func (dst *Signal) XXX_Merge(src proto.Message) { + xxx_messageInfo_Signal.Merge(dst, src) +} +func (m *Signal) XXX_Size() int { + return xxx_messageInfo_Signal.Size(m) +} +func (m *Signal) XXX_DiscardUnknown() { + xxx_messageInfo_Signal.DiscardUnknown(m) +} + +var xxx_messageInfo_Signal proto.InternalMessageInfo + +func (m *Signal) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *Signal) GetEtag() string { + if m != nil { + return m.Etag + } + return "" +} + +func (m *Signal) GetIncident() string { + if m != nil { + return m.Incident + } + return "" +} + +func (m *Signal) GetCreateTime() *timestamp.Timestamp { + if m != nil { + return m.CreateTime + } + return nil +} + +func (m *Signal) GetCloseTime() *timestamp.Timestamp { + if m != nil { + return m.CloseTime + } + return nil +} + +func (m *Signal) GetDetectTime() *timestamp.Timestamp { + if m != nil { + return m.DetectTime + } + return nil +} + +func (m *Signal) GetCreator() *User { + if m != nil { + return m.Creator + } + return nil +} + +func (m *Signal) GetTitle() string { + if m != nil { + return m.Title + } + return "" +} + +func (m *Signal) GetContentType() string { + if m != nil { + return m.ContentType + } + return "" +} + +func (m *Signal) GetContent() string { + if m != nil { + return m.Content + } + return "" +} + +func (m *Signal) GetSignalState() Signal_State { + if m != nil { + return m.SignalState + } + return Signal_STATE_UNSPECIFIED +} + +func (m *Signal) GetSignalArtifacts() []*Signal_SignalArtifact { + if m != nil { + return m.SignalArtifacts + } + return nil +} + +// An artifact associated with the Signal. +type Signal_SignalArtifact struct { + // The type of resource linked to + // + // Types that are valid to be assigned to ArtifactType: + // *Signal_SignalArtifact_UserType + ArtifactType isSignal_SignalArtifact_ArtifactType `protobuf_oneof:"artifact_type"` + // The URI for the artifact. + Uri string `protobuf:"bytes,3,opt,name=uri,proto3" json:"uri,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Signal_SignalArtifact) Reset() { *m = Signal_SignalArtifact{} } +func (m *Signal_SignalArtifact) String() string { return proto.CompactTextString(m) } +func (*Signal_SignalArtifact) ProtoMessage() {} +func (*Signal_SignalArtifact) Descriptor() ([]byte, []int) { + return fileDescriptor_incidents_a3a68162641107b4, []int{1, 0} +} +func (m *Signal_SignalArtifact) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Signal_SignalArtifact.Unmarshal(m, b) +} +func (m *Signal_SignalArtifact) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Signal_SignalArtifact.Marshal(b, m, deterministic) +} +func (dst *Signal_SignalArtifact) XXX_Merge(src proto.Message) { + xxx_messageInfo_Signal_SignalArtifact.Merge(dst, src) +} +func (m *Signal_SignalArtifact) XXX_Size() int { + return xxx_messageInfo_Signal_SignalArtifact.Size(m) +} +func (m *Signal_SignalArtifact) XXX_DiscardUnknown() { + xxx_messageInfo_Signal_SignalArtifact.DiscardUnknown(m) +} + +var xxx_messageInfo_Signal_SignalArtifact proto.InternalMessageInfo + +type isSignal_SignalArtifact_ArtifactType interface { + isSignal_SignalArtifact_ArtifactType() +} + +type Signal_SignalArtifact_UserType struct { + UserType string `protobuf:"bytes,2,opt,name=user_type,json=userType,proto3,oneof"` +} + +func (*Signal_SignalArtifact_UserType) isSignal_SignalArtifact_ArtifactType() {} + +func (m *Signal_SignalArtifact) GetArtifactType() isSignal_SignalArtifact_ArtifactType { + if m != nil { + return m.ArtifactType + } + return nil +} + +func (m *Signal_SignalArtifact) GetUserType() string { + if x, ok := m.GetArtifactType().(*Signal_SignalArtifact_UserType); ok { + return x.UserType + } + return "" +} + +func (m *Signal_SignalArtifact) GetUri() string { + if m != nil { + return m.Uri + } + return "" +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*Signal_SignalArtifact) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _Signal_SignalArtifact_OneofMarshaler, _Signal_SignalArtifact_OneofUnmarshaler, _Signal_SignalArtifact_OneofSizer, []interface{}{ + (*Signal_SignalArtifact_UserType)(nil), + } +} + +func _Signal_SignalArtifact_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*Signal_SignalArtifact) + // artifact_type + switch x := m.ArtifactType.(type) { + case *Signal_SignalArtifact_UserType: + b.EncodeVarint(2<<3 | proto.WireBytes) + b.EncodeStringBytes(x.UserType) + case nil: + default: + return fmt.Errorf("Signal_SignalArtifact.ArtifactType has unexpected type %T", x) + } + return nil +} + +func _Signal_SignalArtifact_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*Signal_SignalArtifact) + switch tag { + case 2: // artifact_type.user_type + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeStringBytes() + m.ArtifactType = &Signal_SignalArtifact_UserType{x} + return true, err + default: + return false, nil + } +} + +func _Signal_SignalArtifact_OneofSizer(msg proto.Message) (n int) { + m := msg.(*Signal_SignalArtifact) + // artifact_type + switch x := m.ArtifactType.(type) { + case *Signal_SignalArtifact_UserType: + n += 1 // tag and wire + n += proto.SizeVarint(uint64(len(x.UserType))) + n += len(x.UserType) + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +// A text annotation by a user. +type Annotation struct { + // Resource name of the annotation, for example, + // "projects/{project_id}/incidents/{incident_id}/annotations/{annotation_id}". + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // Output only. Author of the annotation. + Author *User `protobuf:"bytes,2,opt,name=author,proto3" json:"author,omitempty"` + // Output only. Time the annotation was created. + CreateTime *timestamp.Timestamp `protobuf:"bytes,3,opt,name=create_time,json=createTime,proto3" json:"create_time,omitempty"` + // Content of the annotation. Immutable. + Content string `protobuf:"bytes,4,opt,name=content,proto3" json:"content,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Annotation) Reset() { *m = Annotation{} } +func (m *Annotation) String() string { return proto.CompactTextString(m) } +func (*Annotation) ProtoMessage() {} +func (*Annotation) Descriptor() ([]byte, []int) { + return fileDescriptor_incidents_a3a68162641107b4, []int{2} +} +func (m *Annotation) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Annotation.Unmarshal(m, b) +} +func (m *Annotation) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Annotation.Marshal(b, m, deterministic) +} +func (dst *Annotation) XXX_Merge(src proto.Message) { + xxx_messageInfo_Annotation.Merge(dst, src) +} +func (m *Annotation) XXX_Size() int { + return xxx_messageInfo_Annotation.Size(m) +} +func (m *Annotation) XXX_DiscardUnknown() { + xxx_messageInfo_Annotation.DiscardUnknown(m) +} + +var xxx_messageInfo_Annotation proto.InternalMessageInfo + +func (m *Annotation) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *Annotation) GetAuthor() *User { + if m != nil { + return m.Author + } + return nil +} + +func (m *Annotation) GetCreateTime() *timestamp.Timestamp { + if m != nil { + return m.CreateTime + } + return nil +} + +func (m *Annotation) GetContent() string { + if m != nil { + return m.Content + } + return "" +} + +// A tag by a user. +type Tag struct { + // Resource name of a tag, for example, + // "projects/{project_id}/incidents/{incident_id}/tags/{tag_id}" + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // Display name of the resource (for example, "cause:rollout"). Immutable. + DisplayName string `protobuf:"bytes,2,opt,name=display_name,json=displayName,proto3" json:"display_name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Tag) Reset() { *m = Tag{} } +func (m *Tag) String() string { return proto.CompactTextString(m) } +func (*Tag) ProtoMessage() {} +func (*Tag) Descriptor() ([]byte, []int) { + return fileDescriptor_incidents_a3a68162641107b4, []int{3} +} +func (m *Tag) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Tag.Unmarshal(m, b) +} +func (m *Tag) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Tag.Marshal(b, m, deterministic) +} +func (dst *Tag) XXX_Merge(src proto.Message) { + xxx_messageInfo_Tag.Merge(dst, src) +} +func (m *Tag) XXX_Size() int { + return xxx_messageInfo_Tag.Size(m) +} +func (m *Tag) XXX_DiscardUnknown() { + xxx_messageInfo_Tag.DiscardUnknown(m) +} + +var xxx_messageInfo_Tag proto.InternalMessageInfo + +func (m *Tag) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *Tag) GetDisplayName() string { + if m != nil { + return m.DisplayName + } + return "" +} + +// Synopsis is a summary of an incident and it contains a textual content, +// an author and a last updated timestamp. +type Synopsis struct { + // Content type string, for example, 'text/plain' or 'text/html'. + ContentType string `protobuf:"bytes,1,opt,name=content_type,json=contentType,proto3" json:"content_type,omitempty"` + // Textual content of the synopsis. It can be plain text or markdown as + // indicated by the content_type. + Content string `protobuf:"bytes,2,opt,name=content,proto3" json:"content,omitempty"` + // Last updated timestamp. + UpdateTime *timestamp.Timestamp `protobuf:"bytes,3,opt,name=update_time,json=updateTime,proto3" json:"update_time,omitempty"` + // Author of the synopsis. + Author *User `protobuf:"bytes,4,opt,name=author,proto3" json:"author,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Synopsis) Reset() { *m = Synopsis{} } +func (m *Synopsis) String() string { return proto.CompactTextString(m) } +func (*Synopsis) ProtoMessage() {} +func (*Synopsis) Descriptor() ([]byte, []int) { + return fileDescriptor_incidents_a3a68162641107b4, []int{4} +} +func (m *Synopsis) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Synopsis.Unmarshal(m, b) +} +func (m *Synopsis) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Synopsis.Marshal(b, m, deterministic) +} +func (dst *Synopsis) XXX_Merge(src proto.Message) { + xxx_messageInfo_Synopsis.Merge(dst, src) +} +func (m *Synopsis) XXX_Size() int { + return xxx_messageInfo_Synopsis.Size(m) +} +func (m *Synopsis) XXX_DiscardUnknown() { + xxx_messageInfo_Synopsis.DiscardUnknown(m) +} + +var xxx_messageInfo_Synopsis proto.InternalMessageInfo + +func (m *Synopsis) GetContentType() string { + if m != nil { + return m.ContentType + } + return "" +} + +func (m *Synopsis) GetContent() string { + if m != nil { + return m.Content + } + return "" +} + +func (m *Synopsis) GetUpdateTime() *timestamp.Timestamp { + if m != nil { + return m.UpdateTime + } + return nil +} + +func (m *Synopsis) GetAuthor() *User { + if m != nil { + return m.Author + } + return nil +} + +// Representation of an incident. +type Incident struct { + // Output only. Resource name of the incident, for example, + // "projects/{project_id}/incidents/{incident_id}". + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // One-line summary of the incident. + Title string `protobuf:"bytes,2,opt,name=title,proto3" json:"title,omitempty"` + // Escalation level of the incident. + EscalationLevel Incident_EscalationLevel `protobuf:"varint,3,opt,name=escalation_level,json=escalationLevel,proto3,enum=google.cloud.irm.v1alpha2.Incident_EscalationLevel" json:"escalation_level,omitempty"` + // Etag to validate the object is unchanged for a read-modify-write operation. + // An empty etag will overwrite other changes. + Etag string `protobuf:"bytes,4,opt,name=etag,proto3" json:"etag,omitempty"` + // Severity of the incident. + Severity Incident_Severity `protobuf:"varint,5,opt,name=severity,proto3,enum=google.cloud.irm.v1alpha2.Incident_Severity" json:"severity,omitempty"` + // Stage of the incident. + Stage Incident_Stage `protobuf:"varint,6,opt,name=stage,proto3,enum=google.cloud.irm.v1alpha2.Incident_Stage" json:"stage,omitempty"` + // Resource name of the incident this incident is a duplicate of. Empty if + // this incident is not a duplicate. + // An incident can only be a duplicate of an incident that is not marked as a + // duplicate already. Setting this to a non-empty value must also set the + // stage to `STAGE_DUPLICATE`. Unsetting this value value must also update + // `stage` to a value other than `STAGE_DUPLICATE`. + DuplicateIncident string `protobuf:"bytes,9,opt,name=duplicate_incident,json=duplicateIncident,proto3" json:"duplicate_incident,omitempty"` + // Output only. Time this incident started. Used to measure the 'elapsed + // time'. Start time of an incident is the earliest creation time of any of + // its Signals or the create time of the incident if no Signals are assigned. + StartTime *timestamp.Timestamp `protobuf:"bytes,7,opt,name=start_time,json=startTime,proto3" json:"start_time,omitempty"` + // Output only. Synopsis of this incident. + Synopsis *Synopsis `protobuf:"bytes,8,opt,name=synopsis,proto3" json:"synopsis,omitempty"` + // Location of communications for this incident. This is informational + // only; IRM does not use this to send messages. + CommunicationVenue *Incident_CommunicationVenue `protobuf:"bytes,10,opt,name=communication_venue,json=communicationVenue,proto3" json:"communication_venue,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Incident) Reset() { *m = Incident{} } +func (m *Incident) String() string { return proto.CompactTextString(m) } +func (*Incident) ProtoMessage() {} +func (*Incident) Descriptor() ([]byte, []int) { + return fileDescriptor_incidents_a3a68162641107b4, []int{5} +} +func (m *Incident) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Incident.Unmarshal(m, b) +} +func (m *Incident) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Incident.Marshal(b, m, deterministic) +} +func (dst *Incident) XXX_Merge(src proto.Message) { + xxx_messageInfo_Incident.Merge(dst, src) +} +func (m *Incident) XXX_Size() int { + return xxx_messageInfo_Incident.Size(m) +} +func (m *Incident) XXX_DiscardUnknown() { + xxx_messageInfo_Incident.DiscardUnknown(m) +} + +var xxx_messageInfo_Incident proto.InternalMessageInfo + +func (m *Incident) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *Incident) GetTitle() string { + if m != nil { + return m.Title + } + return "" +} + +func (m *Incident) GetEscalationLevel() Incident_EscalationLevel { + if m != nil { + return m.EscalationLevel + } + return Incident_ESCALATION_LEVEL_UNSPECIFIED +} + +func (m *Incident) GetEtag() string { + if m != nil { + return m.Etag + } + return "" +} + +func (m *Incident) GetSeverity() Incident_Severity { + if m != nil { + return m.Severity + } + return Incident_SEVERITY_UNSPECIFIED +} + +func (m *Incident) GetStage() Incident_Stage { + if m != nil { + return m.Stage + } + return Incident_STAGE_UNSPECIFIED +} + +func (m *Incident) GetDuplicateIncident() string { + if m != nil { + return m.DuplicateIncident + } + return "" +} + +func (m *Incident) GetStartTime() *timestamp.Timestamp { + if m != nil { + return m.StartTime + } + return nil +} + +func (m *Incident) GetSynopsis() *Synopsis { + if m != nil { + return m.Synopsis + } + return nil +} + +func (m *Incident) GetCommunicationVenue() *Incident_CommunicationVenue { + if m != nil { + return m.CommunicationVenue + } + return nil +} + +// CommunicationVenue is a record of where conversations about an incident +// are happening. +type Incident_CommunicationVenue struct { + // A URI to the web interface of the channel. + Uri string `protobuf:"bytes,1,opt,name=uri,proto3" json:"uri,omitempty"` + // A name representing the channel in IRM UI. + DisplayName string `protobuf:"bytes,2,opt,name=display_name,json=displayName,proto3" json:"display_name,omitempty"` + // The type of channel/venue for incident communications. + ChannelType Incident_CommunicationVenue_ChannelType `protobuf:"varint,3,opt,name=channel_type,json=channelType,proto3,enum=google.cloud.irm.v1alpha2.Incident_CommunicationVenue_ChannelType" json:"channel_type,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Incident_CommunicationVenue) Reset() { *m = Incident_CommunicationVenue{} } +func (m *Incident_CommunicationVenue) String() string { return proto.CompactTextString(m) } +func (*Incident_CommunicationVenue) ProtoMessage() {} +func (*Incident_CommunicationVenue) Descriptor() ([]byte, []int) { + return fileDescriptor_incidents_a3a68162641107b4, []int{5, 0} +} +func (m *Incident_CommunicationVenue) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Incident_CommunicationVenue.Unmarshal(m, b) +} +func (m *Incident_CommunicationVenue) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Incident_CommunicationVenue.Marshal(b, m, deterministic) +} +func (dst *Incident_CommunicationVenue) XXX_Merge(src proto.Message) { + xxx_messageInfo_Incident_CommunicationVenue.Merge(dst, src) +} +func (m *Incident_CommunicationVenue) XXX_Size() int { + return xxx_messageInfo_Incident_CommunicationVenue.Size(m) +} +func (m *Incident_CommunicationVenue) XXX_DiscardUnknown() { + xxx_messageInfo_Incident_CommunicationVenue.DiscardUnknown(m) +} + +var xxx_messageInfo_Incident_CommunicationVenue proto.InternalMessageInfo + +func (m *Incident_CommunicationVenue) GetUri() string { + if m != nil { + return m.Uri + } + return "" +} + +func (m *Incident_CommunicationVenue) GetDisplayName() string { + if m != nil { + return m.DisplayName + } + return "" +} + +func (m *Incident_CommunicationVenue) GetChannelType() Incident_CommunicationVenue_ChannelType { + if m != nil { + return m.ChannelType + } + return Incident_CommunicationVenue_CHANNEL_TYPE_UNSPECIFIED +} + +// Describes a role that can be assigned to an incident. +type IncidentRole struct { + // The type of role. The role type is immutable in role assignments. Each role + // type can only be used once per incident, except for TYPE_OTHER. + Type IncidentRole_Type `protobuf:"varint,1,opt,name=type,proto3,enum=google.cloud.irm.v1alpha2.IncidentRole_Type" json:"type,omitempty"` + // Output only unless TYPE_OTHER is used. Title of the role. For TYPE_OTHER, + // must be unique within an incident. + Title string `protobuf:"bytes,2,opt,name=title,proto3" json:"title,omitempty"` + // Output only unless TYPE_OTHER is used. Description of the role. + Description string `protobuf:"bytes,3,opt,name=description,proto3" json:"description,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *IncidentRole) Reset() { *m = IncidentRole{} } +func (m *IncidentRole) String() string { return proto.CompactTextString(m) } +func (*IncidentRole) ProtoMessage() {} +func (*IncidentRole) Descriptor() ([]byte, []int) { + return fileDescriptor_incidents_a3a68162641107b4, []int{6} +} +func (m *IncidentRole) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_IncidentRole.Unmarshal(m, b) +} +func (m *IncidentRole) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_IncidentRole.Marshal(b, m, deterministic) +} +func (dst *IncidentRole) XXX_Merge(src proto.Message) { + xxx_messageInfo_IncidentRole.Merge(dst, src) +} +func (m *IncidentRole) XXX_Size() int { + return xxx_messageInfo_IncidentRole.Size(m) +} +func (m *IncidentRole) XXX_DiscardUnknown() { + xxx_messageInfo_IncidentRole.DiscardUnknown(m) +} + +var xxx_messageInfo_IncidentRole proto.InternalMessageInfo + +func (m *IncidentRole) GetType() IncidentRole_Type { + if m != nil { + return m.Type + } + return IncidentRole_TYPE_UNSPECIFIED +} + +func (m *IncidentRole) GetTitle() string { + if m != nil { + return m.Title + } + return "" +} + +func (m *IncidentRole) GetDescription() string { + if m != nil { + return m.Description + } + return "" +} + +// Stores the assignee of a role as well as the proposed next assignee. +type IncidentRoleAssignment struct { + // Output only. Resource name such as + // "projects/{project_id}/incidents/{incident_id}/role_assignments/{role_id}". + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // Output only. Etag for this version of the resource. Must be specified in + // update requests and match the current version in storage. Must not be + // modified by the client. + Etag string `protobuf:"bytes,2,opt,name=etag,proto3" json:"etag,omitempty"` + // The role that is or will be assigned. + Role *IncidentRole `protobuf:"bytes,3,opt,name=role,proto3" json:"role,omitempty"` + // The user this role is assigned to. This field can only be directly set + // during creation request. Subsequent updates are done via the + // IncidentRoleHandover methods. + Assignee *User `protobuf:"bytes,4,opt,name=assignee,proto3" json:"assignee,omitempty"` + // The recipient of a requested role handoff. This field can only be directly + // set during creation request. Subsequent updates are done via the + // IncidentRoleHandover methods. + // + // `assignee` is always the current role-holder, and `proposed_assignee` is + // used to track unfinished assignments and handoffs. Let's say Bob assigns + // Alice to a role. Then the fields are: + // `assignee`: nil, `proposed_assignee`: Alice + // If Alice accepts, then the fields are: + // `assignee`: Alice, `proposed_assignee`: nil + // If she cancels, then the RoleAssignment is deleted. + // Let's say Alice has the role. Then the fields are: + // `assignee`: Alice, `proposed_assignee`: nil + // If Alice becomes incapacitated and Bob requests Carol to take over, then + // the fields are: + // `assignee`: Alice, `proposed_assignee`: Carol + // After Carol accepts the handover, the fields are: + // `assignee`: Carol, `proposed_assignee`: nil + // Or if Carol refuses the handover, the fields are: + // `assignee`: Alice, `proposed_assignee`: nil + ProposedAssignee *User `protobuf:"bytes,5,opt,name=proposed_assignee,json=proposedAssignee,proto3" json:"proposed_assignee,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *IncidentRoleAssignment) Reset() { *m = IncidentRoleAssignment{} } +func (m *IncidentRoleAssignment) String() string { return proto.CompactTextString(m) } +func (*IncidentRoleAssignment) ProtoMessage() {} +func (*IncidentRoleAssignment) Descriptor() ([]byte, []int) { + return fileDescriptor_incidents_a3a68162641107b4, []int{7} +} +func (m *IncidentRoleAssignment) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_IncidentRoleAssignment.Unmarshal(m, b) +} +func (m *IncidentRoleAssignment) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_IncidentRoleAssignment.Marshal(b, m, deterministic) +} +func (dst *IncidentRoleAssignment) XXX_Merge(src proto.Message) { + xxx_messageInfo_IncidentRoleAssignment.Merge(dst, src) +} +func (m *IncidentRoleAssignment) XXX_Size() int { + return xxx_messageInfo_IncidentRoleAssignment.Size(m) +} +func (m *IncidentRoleAssignment) XXX_DiscardUnknown() { + xxx_messageInfo_IncidentRoleAssignment.DiscardUnknown(m) +} + +var xxx_messageInfo_IncidentRoleAssignment proto.InternalMessageInfo + +func (m *IncidentRoleAssignment) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *IncidentRoleAssignment) GetEtag() string { + if m != nil { + return m.Etag + } + return "" +} + +func (m *IncidentRoleAssignment) GetRole() *IncidentRole { + if m != nil { + return m.Role + } + return nil +} + +func (m *IncidentRoleAssignment) GetAssignee() *User { + if m != nil { + return m.Assignee + } + return nil +} + +func (m *IncidentRoleAssignment) GetProposedAssignee() *User { + if m != nil { + return m.ProposedAssignee + } + return nil +} + +// External artifact associated to an incident. +type Artifact struct { + // Output only. Resource name such as + // "projects/{project_id}/incidents/{incident_id}/artifacts/{artifact_id}". + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // User provided name of an artifact. + DisplayName string `protobuf:"bytes,2,opt,name=display_name,json=displayName,proto3" json:"display_name,omitempty"` + // Output only. Etag for this version of the resource. Must be specified in + // update requests and match the current version in storage. Must not be + // modified by the client. + Etag string `protobuf:"bytes,3,opt,name=etag,proto3" json:"etag,omitempty"` + // URL to access the artifact. + Url string `protobuf:"bytes,4,opt,name=url,proto3" json:"url,omitempty"` + // Type of this artifact. + Type Artifact_Type `protobuf:"varint,5,opt,name=type,proto3,enum=google.cloud.irm.v1alpha2.Artifact_Type" json:"type,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Artifact) Reset() { *m = Artifact{} } +func (m *Artifact) String() string { return proto.CompactTextString(m) } +func (*Artifact) ProtoMessage() {} +func (*Artifact) Descriptor() ([]byte, []int) { + return fileDescriptor_incidents_a3a68162641107b4, []int{8} +} +func (m *Artifact) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Artifact.Unmarshal(m, b) +} +func (m *Artifact) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Artifact.Marshal(b, m, deterministic) +} +func (dst *Artifact) XXX_Merge(src proto.Message) { + xxx_messageInfo_Artifact.Merge(dst, src) +} +func (m *Artifact) XXX_Size() int { + return xxx_messageInfo_Artifact.Size(m) +} +func (m *Artifact) XXX_DiscardUnknown() { + xxx_messageInfo_Artifact.DiscardUnknown(m) +} + +var xxx_messageInfo_Artifact proto.InternalMessageInfo + +func (m *Artifact) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *Artifact) GetDisplayName() string { + if m != nil { + return m.DisplayName + } + return "" +} + +func (m *Artifact) GetEtag() string { + if m != nil { + return m.Etag + } + return "" +} + +func (m *Artifact) GetUrl() string { + if m != nil { + return m.Url + } + return "" +} + +func (m *Artifact) GetType() Artifact_Type { + if m != nil { + return m.Type + } + return Artifact_TYPE_UNSPECIFIED +} + +// Communication Channels are mechanisms used to receive notifications +// about changes to incidents. +type CommunicationChannel struct { + // An endpoint describes how messages will be delivered. + // + // Types that are valid to be assigned to Endpoint: + // *CommunicationChannel_Email_ + // *CommunicationChannel_NotificationChannel_ + Endpoint isCommunicationChannel_Endpoint `protobuf_oneof:"endpoint"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *CommunicationChannel) Reset() { *m = CommunicationChannel{} } +func (m *CommunicationChannel) String() string { return proto.CompactTextString(m) } +func (*CommunicationChannel) ProtoMessage() {} +func (*CommunicationChannel) Descriptor() ([]byte, []int) { + return fileDescriptor_incidents_a3a68162641107b4, []int{9} +} +func (m *CommunicationChannel) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_CommunicationChannel.Unmarshal(m, b) +} +func (m *CommunicationChannel) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_CommunicationChannel.Marshal(b, m, deterministic) +} +func (dst *CommunicationChannel) XXX_Merge(src proto.Message) { + xxx_messageInfo_CommunicationChannel.Merge(dst, src) +} +func (m *CommunicationChannel) XXX_Size() int { + return xxx_messageInfo_CommunicationChannel.Size(m) +} +func (m *CommunicationChannel) XXX_DiscardUnknown() { + xxx_messageInfo_CommunicationChannel.DiscardUnknown(m) +} + +var xxx_messageInfo_CommunicationChannel proto.InternalMessageInfo + +type isCommunicationChannel_Endpoint interface { + isCommunicationChannel_Endpoint() +} + +type CommunicationChannel_Email_ struct { + Email *CommunicationChannel_Email `protobuf:"bytes,1,opt,name=email,proto3,oneof"` +} + +type CommunicationChannel_NotificationChannel_ struct { + NotificationChannel *CommunicationChannel_NotificationChannel `protobuf:"bytes,2,opt,name=notification_channel,json=notificationChannel,proto3,oneof"` +} + +func (*CommunicationChannel_Email_) isCommunicationChannel_Endpoint() {} + +func (*CommunicationChannel_NotificationChannel_) isCommunicationChannel_Endpoint() {} + +func (m *CommunicationChannel) GetEndpoint() isCommunicationChannel_Endpoint { + if m != nil { + return m.Endpoint + } + return nil +} + +func (m *CommunicationChannel) GetEmail() *CommunicationChannel_Email { + if x, ok := m.GetEndpoint().(*CommunicationChannel_Email_); ok { + return x.Email + } + return nil +} + +func (m *CommunicationChannel) GetNotificationChannel() *CommunicationChannel_NotificationChannel { + if x, ok := m.GetEndpoint().(*CommunicationChannel_NotificationChannel_); ok { + return x.NotificationChannel + } + return nil +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*CommunicationChannel) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _CommunicationChannel_OneofMarshaler, _CommunicationChannel_OneofUnmarshaler, _CommunicationChannel_OneofSizer, []interface{}{ + (*CommunicationChannel_Email_)(nil), + (*CommunicationChannel_NotificationChannel_)(nil), + } +} + +func _CommunicationChannel_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*CommunicationChannel) + // endpoint + switch x := m.Endpoint.(type) { + case *CommunicationChannel_Email_: + b.EncodeVarint(1<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.Email); err != nil { + return err + } + case *CommunicationChannel_NotificationChannel_: + b.EncodeVarint(2<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.NotificationChannel); err != nil { + return err + } + case nil: + default: + return fmt.Errorf("CommunicationChannel.Endpoint has unexpected type %T", x) + } + return nil +} + +func _CommunicationChannel_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*CommunicationChannel) + switch tag { + case 1: // endpoint.email + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(CommunicationChannel_Email) + err := b.DecodeMessage(msg) + m.Endpoint = &CommunicationChannel_Email_{msg} + return true, err + case 2: // endpoint.notification_channel + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(CommunicationChannel_NotificationChannel) + err := b.DecodeMessage(msg) + m.Endpoint = &CommunicationChannel_NotificationChannel_{msg} + return true, err + default: + return false, nil + } +} + +func _CommunicationChannel_OneofSizer(msg proto.Message) (n int) { + m := msg.(*CommunicationChannel) + // endpoint + switch x := m.Endpoint.(type) { + case *CommunicationChannel_Email_: + s := proto.Size(x.Email) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *CommunicationChannel_NotificationChannel_: + s := proto.Size(x.NotificationChannel) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +// A communication channel that delivers messages to an email address. +type CommunicationChannel_Email struct { + // The email address, for example, "user@example.com". + Address string `protobuf:"bytes,1,opt,name=address,proto3" json:"address,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *CommunicationChannel_Email) Reset() { *m = CommunicationChannel_Email{} } +func (m *CommunicationChannel_Email) String() string { return proto.CompactTextString(m) } +func (*CommunicationChannel_Email) ProtoMessage() {} +func (*CommunicationChannel_Email) Descriptor() ([]byte, []int) { + return fileDescriptor_incidents_a3a68162641107b4, []int{9, 0} +} +func (m *CommunicationChannel_Email) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_CommunicationChannel_Email.Unmarshal(m, b) +} +func (m *CommunicationChannel_Email) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_CommunicationChannel_Email.Marshal(b, m, deterministic) +} +func (dst *CommunicationChannel_Email) XXX_Merge(src proto.Message) { + xxx_messageInfo_CommunicationChannel_Email.Merge(dst, src) +} +func (m *CommunicationChannel_Email) XXX_Size() int { + return xxx_messageInfo_CommunicationChannel_Email.Size(m) +} +func (m *CommunicationChannel_Email) XXX_DiscardUnknown() { + xxx_messageInfo_CommunicationChannel_Email.DiscardUnknown(m) +} + +var xxx_messageInfo_CommunicationChannel_Email proto.InternalMessageInfo + +func (m *CommunicationChannel_Email) GetAddress() string { + if m != nil { + return m.Address + } + return "" +} + +// A communication channel that delivers messages to a Stackdriver +// notification channel. +type CommunicationChannel_NotificationChannel struct { + // Stackdriver notification channel name. + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *CommunicationChannel_NotificationChannel) Reset() { + *m = CommunicationChannel_NotificationChannel{} +} +func (m *CommunicationChannel_NotificationChannel) String() string { return proto.CompactTextString(m) } +func (*CommunicationChannel_NotificationChannel) ProtoMessage() {} +func (*CommunicationChannel_NotificationChannel) Descriptor() ([]byte, []int) { + return fileDescriptor_incidents_a3a68162641107b4, []int{9, 1} +} +func (m *CommunicationChannel_NotificationChannel) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_CommunicationChannel_NotificationChannel.Unmarshal(m, b) +} +func (m *CommunicationChannel_NotificationChannel) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_CommunicationChannel_NotificationChannel.Marshal(b, m, deterministic) +} +func (dst *CommunicationChannel_NotificationChannel) XXX_Merge(src proto.Message) { + xxx_messageInfo_CommunicationChannel_NotificationChannel.Merge(dst, src) +} +func (m *CommunicationChannel_NotificationChannel) XXX_Size() int { + return xxx_messageInfo_CommunicationChannel_NotificationChannel.Size(m) +} +func (m *CommunicationChannel_NotificationChannel) XXX_DiscardUnknown() { + xxx_messageInfo_CommunicationChannel_NotificationChannel.DiscardUnknown(m) +} + +var xxx_messageInfo_CommunicationChannel_NotificationChannel proto.InternalMessageInfo + +func (m *CommunicationChannel_NotificationChannel) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +// A subscription allows users to get notifications about changes to +// an incident. +type Subscription struct { + // Output only. Resource name such as + // "projects/{project_id}/incidents/{incident_id}/subscriptions/{subscription_id}". + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // Output only. Etag for this version of the resource. Must be specified in + // update requests and match the current version in storage. Must not be + // modified by the client. + Etag string `protobuf:"bytes,2,opt,name=etag,proto3" json:"etag,omitempty"` + // A communications channel to send subscription messages to. + SubscriptionChannel *CommunicationChannel `protobuf:"bytes,3,opt,name=subscription_channel,json=subscriptionChannel,proto3" json:"subscription_channel,omitempty"` + // Types of events this subscription receives notifications for. + EventTypes []Subscription_EventType `protobuf:"varint,4,rep,packed,name=event_types,json=eventTypes,proto3,enum=google.cloud.irm.v1alpha2.Subscription_EventType" json:"event_types,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Subscription) Reset() { *m = Subscription{} } +func (m *Subscription) String() string { return proto.CompactTextString(m) } +func (*Subscription) ProtoMessage() {} +func (*Subscription) Descriptor() ([]byte, []int) { + return fileDescriptor_incidents_a3a68162641107b4, []int{10} +} +func (m *Subscription) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Subscription.Unmarshal(m, b) +} +func (m *Subscription) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Subscription.Marshal(b, m, deterministic) +} +func (dst *Subscription) XXX_Merge(src proto.Message) { + xxx_messageInfo_Subscription.Merge(dst, src) +} +func (m *Subscription) XXX_Size() int { + return xxx_messageInfo_Subscription.Size(m) +} +func (m *Subscription) XXX_DiscardUnknown() { + xxx_messageInfo_Subscription.DiscardUnknown(m) +} + +var xxx_messageInfo_Subscription proto.InternalMessageInfo + +func (m *Subscription) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *Subscription) GetEtag() string { + if m != nil { + return m.Etag + } + return "" +} + +func (m *Subscription) GetSubscriptionChannel() *CommunicationChannel { + if m != nil { + return m.SubscriptionChannel + } + return nil +} + +func (m *Subscription) GetEventTypes() []Subscription_EventType { + if m != nil { + return m.EventTypes + } + return nil +} + +func init() { + proto.RegisterType((*User)(nil), "google.cloud.irm.v1alpha2.User") + proto.RegisterType((*Signal)(nil), "google.cloud.irm.v1alpha2.Signal") + proto.RegisterType((*Signal_SignalArtifact)(nil), "google.cloud.irm.v1alpha2.Signal.SignalArtifact") + proto.RegisterType((*Annotation)(nil), "google.cloud.irm.v1alpha2.Annotation") + proto.RegisterType((*Tag)(nil), "google.cloud.irm.v1alpha2.Tag") + proto.RegisterType((*Synopsis)(nil), "google.cloud.irm.v1alpha2.Synopsis") + proto.RegisterType((*Incident)(nil), "google.cloud.irm.v1alpha2.Incident") + proto.RegisterType((*Incident_CommunicationVenue)(nil), "google.cloud.irm.v1alpha2.Incident.CommunicationVenue") + proto.RegisterType((*IncidentRole)(nil), "google.cloud.irm.v1alpha2.IncidentRole") + proto.RegisterType((*IncidentRoleAssignment)(nil), "google.cloud.irm.v1alpha2.IncidentRoleAssignment") + proto.RegisterType((*Artifact)(nil), "google.cloud.irm.v1alpha2.Artifact") + proto.RegisterType((*CommunicationChannel)(nil), "google.cloud.irm.v1alpha2.CommunicationChannel") + proto.RegisterType((*CommunicationChannel_Email)(nil), "google.cloud.irm.v1alpha2.CommunicationChannel.Email") + proto.RegisterType((*CommunicationChannel_NotificationChannel)(nil), "google.cloud.irm.v1alpha2.CommunicationChannel.NotificationChannel") + proto.RegisterType((*Subscription)(nil), "google.cloud.irm.v1alpha2.Subscription") + proto.RegisterEnum("google.cloud.irm.v1alpha2.Signal_State", Signal_State_name, Signal_State_value) + proto.RegisterEnum("google.cloud.irm.v1alpha2.Incident_EscalationLevel", Incident_EscalationLevel_name, Incident_EscalationLevel_value) + proto.RegisterEnum("google.cloud.irm.v1alpha2.Incident_Severity", Incident_Severity_name, Incident_Severity_value) + proto.RegisterEnum("google.cloud.irm.v1alpha2.Incident_Stage", Incident_Stage_name, Incident_Stage_value) + proto.RegisterEnum("google.cloud.irm.v1alpha2.Incident_CommunicationVenue_ChannelType", Incident_CommunicationVenue_ChannelType_name, Incident_CommunicationVenue_ChannelType_value) + proto.RegisterEnum("google.cloud.irm.v1alpha2.IncidentRole_Type", IncidentRole_Type_name, IncidentRole_Type_value) + proto.RegisterEnum("google.cloud.irm.v1alpha2.Artifact_Type", Artifact_Type_name, Artifact_Type_value) + proto.RegisterEnum("google.cloud.irm.v1alpha2.Subscription_EventType", Subscription_EventType_name, Subscription_EventType_value) +} + +func init() { + proto.RegisterFile("google/cloud/irm/v1alpha2/incidents.proto", fileDescriptor_incidents_a3a68162641107b4) +} + +var fileDescriptor_incidents_a3a68162641107b4 = []byte{ + // 1666 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x9c, 0x58, 0x4f, 0x6f, 0xdb, 0xc8, + 0x15, 0x0f, 0xf5, 0xcf, 0xf2, 0x93, 0xd7, 0x66, 0xc6, 0xde, 0xac, 0xa2, 0x26, 0xbb, 0x0e, 0xf7, + 0xb0, 0x4e, 0xd1, 0x4a, 0x5d, 0x05, 0x6d, 0xb1, 0x70, 0x80, 0x2c, 0x2d, 0x4d, 0x6d, 0xa6, 0x14, + 0x65, 0x0c, 0x29, 0xa3, 0xde, 0x02, 0x25, 0x18, 0x6a, 0x22, 0x13, 0xa0, 0x48, 0x82, 0xa4, 0x8c, + 0xfa, 0x1b, 0x14, 0x28, 0xfa, 0x01, 0xfa, 0x2d, 0x7a, 0xed, 0xa1, 0x5f, 0xa1, 0x97, 0x9e, 0x7a, + 0x2d, 0xd0, 0x9e, 0x7b, 0xec, 0xb1, 0x98, 0xe1, 0x90, 0xa6, 0x64, 0xc5, 0x91, 0x73, 0x32, 0xe7, + 0xbd, 0xf7, 0x7b, 0xf3, 0xe6, 0xbd, 0x37, 0xbf, 0x37, 0x16, 0xbc, 0x9c, 0x85, 0xe1, 0xcc, 0xa7, + 0x3d, 0xd7, 0x0f, 0x17, 0xd3, 0x9e, 0x17, 0xcf, 0x7b, 0xd7, 0xdf, 0x3a, 0x7e, 0x74, 0xe5, 0xf4, + 0x7b, 0x5e, 0xe0, 0x7a, 0x53, 0x1a, 0xa4, 0x49, 0x37, 0x8a, 0xc3, 0x34, 0x44, 0x4f, 0x33, 0xd3, + 0x2e, 0x37, 0xed, 0x7a, 0xf1, 0xbc, 0x9b, 0x9b, 0x76, 0x9e, 0x09, 0x2f, 0x4e, 0xe4, 0xf5, 0x9c, + 0x20, 0x08, 0x53, 0x27, 0xf5, 0xc2, 0x40, 0x00, 0x3b, 0xf9, 0x1e, 0xf3, 0x30, 0xf0, 0xd2, 0x30, + 0xf6, 0x82, 0x59, 0xef, 0xfa, 0x55, 0x6f, 0x4e, 0xd3, 0xd8, 0x73, 0xed, 0x84, 0xc6, 0xd7, 0x9e, + 0x4b, 0x85, 0xe9, 0x97, 0xc2, 0x94, 0xaf, 0xde, 0x2d, 0xde, 0xf7, 0xa6, 0x8b, 0x98, 0xfb, 0x12, + 0xfa, 0xaf, 0x56, 0xf5, 0xa9, 0x37, 0xa7, 0x49, 0xea, 0xcc, 0xa3, 0xcc, 0x40, 0x51, 0xa1, 0x36, + 0x49, 0x68, 0x8c, 0x9e, 0xc2, 0xd6, 0x22, 0xa1, 0xb1, 0xed, 0x4d, 0xdb, 0xd2, 0xa1, 0x74, 0xb4, + 0x7d, 0xf6, 0x88, 0x34, 0x98, 0x40, 0x9b, 0xa2, 0x27, 0x50, 0xa7, 0x73, 0xc7, 0xf3, 0xdb, 0x15, + 0xa1, 0xc8, 0x96, 0x27, 0x0d, 0xa8, 0x31, 0x0b, 0xe5, 0xef, 0x75, 0x68, 0x98, 0xde, 0x2c, 0x70, + 0x7c, 0x84, 0xa0, 0x16, 0x38, 0x73, 0x9a, 0xb9, 0x20, 0xfc, 0x9b, 0xc9, 0x68, 0xea, 0xcc, 0x32, + 0x34, 0xe1, 0xdf, 0xa8, 0x03, 0xcd, 0x3c, 0x5b, 0xed, 0x2a, 0x97, 0x17, 0x6b, 0x74, 0x0c, 0x2d, + 0x37, 0xa6, 0x4e, 0x4a, 0x6d, 0x16, 0x6b, 0xbb, 0x76, 0x28, 0x1d, 0xb5, 0xfa, 0x9d, 0xae, 0x48, + 0x66, 0x7e, 0x90, 0xae, 0x95, 0x1f, 0x84, 0x40, 0x66, 0xce, 0x04, 0xe8, 0x3b, 0x00, 0xd7, 0x0f, + 0x13, 0x81, 0x85, 0x8f, 0x62, 0xb7, 0xb9, 0x35, 0x87, 0x1e, 0x43, 0x6b, 0x4a, 0x53, 0xea, 0xa6, + 0x19, 0x76, 0xef, 0xe3, 0xfb, 0x66, 0xe6, 0x62, 0xdf, 0x2d, 0x1e, 0x45, 0x18, 0xb7, 0xeb, 0x1c, + 0xf8, 0x55, 0xf7, 0x83, 0xd5, 0xef, 0xb2, 0x84, 0x93, 0xdc, 0x1e, 0x1d, 0x40, 0x3d, 0xf5, 0x52, + 0x9f, 0xb6, 0x1b, 0x3c, 0x11, 0xd9, 0x02, 0xbd, 0x80, 0x1d, 0x37, 0x0c, 0x52, 0x1a, 0xa4, 0x76, + 0x7a, 0x13, 0xd1, 0xf6, 0x16, 0x57, 0xb6, 0x84, 0xcc, 0xba, 0x89, 0x28, 0x6a, 0xc3, 0x96, 0x58, + 0xb6, 0x9b, 0x5c, 0x9b, 0x2f, 0xd1, 0x5b, 0xd8, 0x49, 0x78, 0x41, 0xec, 0x24, 0x75, 0x52, 0xda, + 0xde, 0x3e, 0x94, 0x8e, 0x76, 0xfb, 0xdf, 0xdc, 0x13, 0x52, 0x56, 0xbf, 0xae, 0xc9, 0xcc, 0x49, + 0x2b, 0x03, 0xf3, 0x05, 0xfa, 0x2d, 0xc8, 0xc2, 0x97, 0x13, 0xa7, 0xde, 0x7b, 0xc7, 0x4d, 0x93, + 0xb6, 0x7c, 0x58, 0x3d, 0x6a, 0xf5, 0x7f, 0xb6, 0x81, 0x3f, 0xfe, 0x47, 0x15, 0x40, 0xb2, 0x97, + 0x2c, 0xad, 0x93, 0x0e, 0x81, 0xdd, 0x65, 0x13, 0xf4, 0x1c, 0xb6, 0x79, 0x1f, 0xf2, 0x43, 0xe7, + 0x0d, 0xd7, 0x64, 0x22, 0x7e, 0x66, 0x19, 0xaa, 0x8b, 0xd8, 0x13, 0x3d, 0xc3, 0x3e, 0x4f, 0xf6, + 0xe0, 0xb3, 0x3c, 0x30, 0x0e, 0x52, 0xbe, 0x87, 0x7a, 0x16, 0xf9, 0xe7, 0xf0, 0xd8, 0xb4, 0x54, + 0x0b, 0xdb, 0x13, 0xc3, 0x3c, 0xc7, 0x03, 0xed, 0x57, 0x1a, 0x1e, 0xca, 0x8f, 0xd0, 0x2e, 0x40, + 0x26, 0x1e, 0x9f, 0x63, 0x43, 0x96, 0x90, 0x0c, 0x3b, 0xd9, 0x7a, 0xa0, 0x8f, 0x4d, 0x3c, 0x94, + 0x2b, 0xca, 0x5f, 0x24, 0x00, 0xb5, 0xb8, 0x95, 0x6b, 0x9b, 0xfa, 0x97, 0xd0, 0x70, 0x16, 0xe9, + 0x55, 0x18, 0xf3, 0x18, 0x37, 0x28, 0xb7, 0x30, 0x5f, 0xed, 0xee, 0xea, 0x83, 0xba, 0xbb, 0x54, + 0xf1, 0xda, 0x52, 0xc5, 0x95, 0xd7, 0x50, 0xb5, 0x9c, 0xd9, 0xda, 0x50, 0x5f, 0xc0, 0xce, 0xd4, + 0x4b, 0x22, 0xdf, 0xb9, 0xb1, 0xb9, 0x2e, 0xbb, 0x87, 0x2d, 0x21, 0x33, 0x9c, 0x39, 0x55, 0xfe, + 0x26, 0x41, 0xd3, 0xbc, 0x09, 0xc2, 0x28, 0xf1, 0x92, 0x3b, 0x9d, 0x27, 0xdd, 0xdb, 0x79, 0x95, + 0xe5, 0xce, 0x3b, 0x86, 0xd6, 0x22, 0x9a, 0x3e, 0xe4, 0x78, 0x99, 0x39, 0x3f, 0xde, 0x6d, 0x52, + 0x6b, 0x0f, 0x4a, 0xaa, 0xf2, 0xcf, 0x6d, 0x68, 0x6a, 0x39, 0x7f, 0xac, 0xcb, 0x41, 0x71, 0xc7, + 0x2a, 0xe5, 0x3b, 0xf6, 0x3b, 0x90, 0x69, 0xe2, 0x3a, 0x3e, 0x2f, 0xb3, 0xed, 0xd3, 0x6b, 0xea, + 0xf3, 0x88, 0x77, 0xfb, 0xaf, 0xee, 0xd9, 0x39, 0xdf, 0xa8, 0x8b, 0x0b, 0xac, 0xce, 0xa0, 0x64, + 0x8f, 0x2e, 0x0b, 0x0a, 0xe6, 0xab, 0x95, 0x98, 0xef, 0x0c, 0x9a, 0x09, 0xbd, 0xa6, 0xb1, 0x97, + 0xde, 0x70, 0xa6, 0xd8, 0xed, 0xff, 0x64, 0x93, 0xbd, 0x4c, 0x81, 0x21, 0x05, 0x1a, 0xbd, 0x81, + 0x7a, 0x92, 0x3a, 0xb3, 0x8c, 0x37, 0x76, 0xfb, 0x2f, 0x37, 0x72, 0xc3, 0x00, 0x24, 0xc3, 0xa1, + 0x9f, 0x02, 0x9a, 0x2e, 0x22, 0xdf, 0x73, 0x59, 0xb9, 0x0a, 0x3a, 0xde, 0xe6, 0xc1, 0x3e, 0x2e, + 0x34, 0x45, 0x5e, 0xbf, 0x03, 0x48, 0x52, 0x27, 0x16, 0xf4, 0xb8, 0xf5, 0x71, 0x6a, 0xe5, 0xd6, + 0xbc, 0xb0, 0x6f, 0xa0, 0x99, 0x88, 0xf6, 0xe2, 0x54, 0xd5, 0xea, 0x7f, 0x7d, 0x1f, 0x77, 0x08, + 0x53, 0x52, 0x80, 0xd0, 0x0c, 0xf6, 0xdd, 0x70, 0x3e, 0x5f, 0x04, 0x2c, 0x24, 0x56, 0xac, 0x6b, + 0x1a, 0x2c, 0x72, 0x7e, 0xff, 0xc5, 0x26, 0x27, 0x1f, 0x94, 0xe1, 0x17, 0x0c, 0x4d, 0x90, 0x7b, + 0x47, 0xd6, 0xf9, 0x63, 0x05, 0xd0, 0x5d, 0xd3, 0x9c, 0x76, 0xa4, 0x82, 0x76, 0x36, 0xb8, 0x55, + 0x88, 0xc2, 0x8e, 0x7b, 0xe5, 0x04, 0x01, 0xf5, 0xb3, 0x8b, 0x94, 0xb5, 0xd6, 0xc9, 0xa7, 0x45, + 0xdb, 0x1d, 0x64, 0xae, 0xd8, 0xfd, 0x23, 0x2d, 0xf7, 0x76, 0xa1, 0x5c, 0x42, 0xab, 0xa4, 0x43, + 0xcf, 0xa0, 0x3d, 0x38, 0x53, 0x0d, 0x03, 0xeb, 0xb6, 0x75, 0x79, 0xbe, 0x4a, 0x7e, 0x07, 0x20, + 0x2f, 0x6b, 0x89, 0x26, 0x4b, 0xe8, 0x09, 0xa0, 0x25, 0xa9, 0xa9, 0xab, 0x83, 0x5f, 0xcb, 0x75, + 0xe5, 0x02, 0xf6, 0x56, 0x9a, 0x1c, 0x1d, 0xc2, 0x33, 0x6c, 0x0e, 0x54, 0x5d, 0xb5, 0xb4, 0xb1, + 0x61, 0xeb, 0xf8, 0x02, 0xeb, 0x2b, 0x5b, 0xbc, 0x80, 0xe7, 0x77, 0x2c, 0xc6, 0xe4, 0x54, 0x35, + 0xb4, 0x1f, 0xb8, 0x48, 0x96, 0x94, 0x3f, 0x31, 0xbe, 0xc9, 0xfb, 0xb8, 0x0d, 0x07, 0x26, 0xbe, + 0xc0, 0x44, 0xb3, 0x2e, 0x57, 0x3c, 0x3d, 0x86, 0xcf, 0x0a, 0xcd, 0xd9, 0xe4, 0x14, 0xcb, 0x12, + 0x42, 0xb0, 0x5b, 0x88, 0x46, 0xea, 0xdb, 0x31, 0x91, 0x2b, 0x68, 0x1f, 0xf6, 0x6e, 0x65, 0x78, + 0xa8, 0x4d, 0x46, 0x72, 0x75, 0xd9, 0x50, 0x33, 0xc6, 0x44, 0xae, 0xa1, 0x2f, 0x60, 0xbf, 0x90, + 0x19, 0xf8, 0x54, 0xd7, 0x4e, 0xb5, 0x13, 0x1d, 0xcb, 0x75, 0xe5, 0xcf, 0x12, 0x9f, 0x19, 0xb3, + 0x7c, 0x66, 0x9c, 0xae, 0xa6, 0x8d, 0x79, 0xe3, 0xe2, 0x21, 0xb6, 0xf0, 0xc0, 0xc2, 0x43, 0xb9, + 0xc6, 0xa3, 0xe3, 0x32, 0x8b, 0x68, 0xea, 0x29, 0x1e, 0xca, 0x12, 0x8f, 0x84, 0x8b, 0x46, 0x9a, + 0xa5, 0x9d, 0xaa, 0xcc, 0xae, 0x72, 0x8b, 0x25, 0xd8, 0x1c, 0xeb, 0x17, 0x78, 0x28, 0x57, 0x59, + 0x19, 0x84, 0xbf, 0xf1, 0x60, 0x32, 0xc2, 0x06, 0xb3, 0xac, 0xdf, 0xc2, 0x87, 0x93, 0x73, 0x5d, + 0x1b, 0xa8, 0x16, 0x96, 0x1b, 0xca, 0x7f, 0x2b, 0xb0, 0x93, 0xf7, 0x05, 0x09, 0x7d, 0x8a, 0xbe, + 0x87, 0x5a, 0xc1, 0xcb, 0x9b, 0xb1, 0x07, 0x83, 0x75, 0x79, 0xe3, 0x70, 0xe4, 0x07, 0xd8, 0xf0, + 0x90, 0xbd, 0x7f, 0x12, 0x37, 0xf6, 0x22, 0x56, 0x6d, 0x31, 0x62, 0xcb, 0x22, 0xe5, 0xdf, 0x12, + 0xd4, 0xac, 0xcc, 0x81, 0xbc, 0xa6, 0xb7, 0x7e, 0x04, 0x5f, 0x70, 0xa9, 0x66, 0x0c, 0xb4, 0x21, + 0x36, 0x2c, 0x7b, 0x30, 0x1e, 0x8d, 0x54, 0x63, 0x88, 0x89, 0x2c, 0xb1, 0xb6, 0xe4, 0x4a, 0x26, + 0x9b, 0x18, 0xec, 0x70, 0xda, 0xd8, 0x30, 0x6d, 0x1d, 0xab, 0x2c, 0x47, 0x6d, 0x38, 0xe0, 0xda, + 0xf1, 0x39, 0x26, 0x65, 0x4d, 0x15, 0x75, 0xe1, 0xc7, 0x5c, 0x83, 0x7f, 0x63, 0x61, 0x62, 0xa8, + 0xba, 0x3d, 0x98, 0x98, 0xd6, 0x78, 0x84, 0xc9, 0x5a, 0x4f, 0xbc, 0xc6, 0xdc, 0xfe, 0x9c, 0x68, + 0x23, 0x95, 0x5c, 0xda, 0x63, 0x63, 0xa0, 0xea, 0xba, 0x5c, 0x47, 0x4f, 0xe1, 0xf3, 0xac, 0xb7, + 0xf1, 0x60, 0x6c, 0x0c, 0x4b, 0xaa, 0x06, 0x7b, 0x11, 0x64, 0xbb, 0x5b, 0x67, 0x98, 0xc8, 0x5b, + 0xca, 0x1f, 0x2a, 0xf0, 0xa4, 0x9c, 0x3b, 0x35, 0x61, 0xef, 0x96, 0xf9, 0x87, 0x86, 0xcb, 0xba, + 0x07, 0xee, 0x31, 0xd4, 0xe2, 0xd0, 0xcf, 0x07, 0xe0, 0x37, 0x1b, 0x16, 0x89, 0x70, 0x10, 0x3a, + 0x86, 0xa6, 0xc3, 0xb7, 0xa4, 0x74, 0xd3, 0x49, 0x58, 0x00, 0x90, 0x0e, 0x8f, 0xa3, 0x38, 0x8c, + 0xc2, 0x84, 0x4e, 0xed, 0xc2, 0xcb, 0x86, 0x6f, 0x52, 0x39, 0x47, 0xaa, 0x02, 0xa8, 0xfc, 0x47, + 0x82, 0x66, 0xf1, 0x36, 0xfb, 0xb4, 0xd7, 0x45, 0x91, 0x9f, 0x6a, 0x29, 0x3f, 0x9c, 0x50, 0x7d, + 0x31, 0x19, 0xd9, 0x27, 0x7a, 0x2d, 0xda, 0x3a, 0x1b, 0x8a, 0x47, 0xf7, 0x84, 0x9a, 0xc7, 0x53, + 0x6a, 0x69, 0xe5, 0xcd, 0xbd, 0x9d, 0xb9, 0x03, 0x4d, 0xc1, 0x76, 0x7a, 0x76, 0x4b, 0xf9, 0xea, + 0xad, 0x46, 0x54, 0x5b, 0x33, 0xcd, 0x09, 0x96, 0x6b, 0xca, 0x5f, 0x2b, 0x70, 0xb0, 0xc4, 0xba, + 0x82, 0x53, 0xd1, 0x28, 0xff, 0xef, 0x47, 0xe2, 0x39, 0xfc, 0xf9, 0x3d, 0x81, 0xad, 0xc3, 0x77, + 0x31, 0x03, 0x17, 0xff, 0x34, 0xa1, 0xdf, 0xc3, 0x41, 0x10, 0xa6, 0xde, 0xfb, 0x7c, 0x90, 0x09, + 0x26, 0x17, 0xcf, 0xc8, 0xc1, 0x43, 0xbd, 0x1b, 0x25, 0x5f, 0x42, 0x76, 0xf6, 0x88, 0xec, 0x07, + 0x77, 0xc5, 0x9d, 0x17, 0x50, 0xe7, 0xb1, 0xb0, 0xd7, 0x9b, 0x33, 0x9d, 0xc6, 0x34, 0x49, 0x44, + 0x25, 0xf3, 0x65, 0xe7, 0x25, 0xec, 0xaf, 0x71, 0xb8, 0xae, 0xee, 0x27, 0x00, 0x4d, 0x1a, 0x4c, + 0xa3, 0xd0, 0x0b, 0x52, 0xe5, 0x1f, 0x55, 0xd8, 0x31, 0x17, 0xef, 0x0a, 0xa2, 0xd8, 0xf8, 0x96, + 0xbc, 0x83, 0x83, 0xa4, 0x84, 0x2b, 0x92, 0x91, 0xdd, 0x9a, 0xde, 0x03, 0x93, 0x41, 0xf6, 0xcb, + 0xce, 0xf2, 0xe0, 0x09, 0xb4, 0xe8, 0x75, 0xfe, 0x98, 0x4d, 0xda, 0xb5, 0xc3, 0xea, 0xd1, 0x6e, + 0xff, 0xdb, 0xfb, 0x9e, 0x1f, 0x25, 0x27, 0x5d, 0x7c, 0x2d, 0xde, 0xbc, 0x04, 0x68, 0xfe, 0x99, + 0x28, 0xff, 0x92, 0x60, 0xbb, 0xd0, 0xa0, 0x0e, 0x3c, 0xc1, 0x17, 0x8c, 0xef, 0xd6, 0x73, 0x62, + 0x49, 0x67, 0x69, 0x96, 0x8e, 0x6d, 0x36, 0x6a, 0xf9, 0x30, 0xfb, 0x12, 0x3a, 0x25, 0xa5, 0x79, + 0x69, 0x8c, 0xcf, 0x4d, 0xcd, 0xcc, 0xf5, 0x95, 0x15, 0x70, 0x36, 0x1a, 0x84, 0xb2, 0xba, 0x0a, + 0xce, 0xe7, 0x9a, 0xd0, 0xd7, 0xd0, 0x73, 0x78, 0x5a, 0xd2, 0xab, 0x86, 0x31, 0xb6, 0xb2, 0x89, + 0xac, 0x0e, 0xd9, 0xac, 0x61, 0x73, 0x7c, 0xad, 0x5a, 0x38, 0x68, 0x9c, 0x5c, 0xc1, 0xd7, 0x6e, + 0x38, 0xcf, 0x13, 0xc5, 0x52, 0x94, 0xff, 0xf4, 0x50, 0xa4, 0xca, 0x89, 0xbc, 0x73, 0xe9, 0x87, + 0xd7, 0xc2, 0x64, 0x16, 0xfa, 0x4e, 0x30, 0xeb, 0x86, 0xf1, 0xac, 0x37, 0xa3, 0x01, 0x7f, 0x11, + 0xf6, 0x32, 0x95, 0x13, 0x79, 0xc9, 0x9a, 0x5f, 0x4c, 0x8e, 0xbd, 0x78, 0xfe, 0x3f, 0x49, 0x7a, + 0xd7, 0xe0, 0xb6, 0xaf, 0xfe, 0x1f, 0x00, 0x00, 0xff, 0xff, 0xae, 0x2d, 0x04, 0x54, 0x5c, 0x11, + 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/cloud/irm/v1alpha2/incidents_service.pb.go b/vendor/google.golang.org/genproto/googleapis/cloud/irm/v1alpha2/incidents_service.pb.go new file mode 100644 index 0000000..efa456b --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/cloud/irm/v1alpha2/incidents_service.pb.go @@ -0,0 +1,3740 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/cloud/irm/v1alpha2/incidents_service.proto + +package irm // import "google.golang.org/genproto/googleapis/cloud/irm/v1alpha2" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import empty "github.com/golang/protobuf/ptypes/empty" +import _ "github.com/golang/protobuf/ptypes/timestamp" +import _ "google.golang.org/genproto/googleapis/api/annotations" +import field_mask "google.golang.org/genproto/protobuf/field_mask" + +import ( + context "golang.org/x/net/context" + grpc "google.golang.org/grpc" +) + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Request for the CreateIncident method. +type CreateIncidentRequest struct { + // The incident to create. + Incident *Incident `protobuf:"bytes,1,opt,name=incident,proto3" json:"incident,omitempty"` + // The resource name of the hosting Stackdriver project which the incident + // belongs to. + // The name is of the form `projects/{project_id_or_number}` + // . + Parent string `protobuf:"bytes,2,opt,name=parent,proto3" json:"parent,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *CreateIncidentRequest) Reset() { *m = CreateIncidentRequest{} } +func (m *CreateIncidentRequest) String() string { return proto.CompactTextString(m) } +func (*CreateIncidentRequest) ProtoMessage() {} +func (*CreateIncidentRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_incidents_service_6ca02351b7a08ec3, []int{0} +} +func (m *CreateIncidentRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_CreateIncidentRequest.Unmarshal(m, b) +} +func (m *CreateIncidentRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_CreateIncidentRequest.Marshal(b, m, deterministic) +} +func (dst *CreateIncidentRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_CreateIncidentRequest.Merge(dst, src) +} +func (m *CreateIncidentRequest) XXX_Size() int { + return xxx_messageInfo_CreateIncidentRequest.Size(m) +} +func (m *CreateIncidentRequest) XXX_DiscardUnknown() { + xxx_messageInfo_CreateIncidentRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_CreateIncidentRequest proto.InternalMessageInfo + +func (m *CreateIncidentRequest) GetIncident() *Incident { + if m != nil { + return m.Incident + } + return nil +} + +func (m *CreateIncidentRequest) GetParent() string { + if m != nil { + return m.Parent + } + return "" +} + +// Request for the GetIncident method. +type GetIncidentRequest struct { + // Resource name of the incident, for example, + // "projects/{project_id}/incidents/{incident_id}". + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GetIncidentRequest) Reset() { *m = GetIncidentRequest{} } +func (m *GetIncidentRequest) String() string { return proto.CompactTextString(m) } +func (*GetIncidentRequest) ProtoMessage() {} +func (*GetIncidentRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_incidents_service_6ca02351b7a08ec3, []int{1} +} +func (m *GetIncidentRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GetIncidentRequest.Unmarshal(m, b) +} +func (m *GetIncidentRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GetIncidentRequest.Marshal(b, m, deterministic) +} +func (dst *GetIncidentRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_GetIncidentRequest.Merge(dst, src) +} +func (m *GetIncidentRequest) XXX_Size() int { + return xxx_messageInfo_GetIncidentRequest.Size(m) +} +func (m *GetIncidentRequest) XXX_DiscardUnknown() { + xxx_messageInfo_GetIncidentRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_GetIncidentRequest proto.InternalMessageInfo + +func (m *GetIncidentRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +// Request for the UpdateIncident method. +type UpdateIncidentRequest struct { + // The incident to update with the new values. + Incident *Incident `protobuf:"bytes,1,opt,name=incident,proto3" json:"incident,omitempty"` + // List of fields that should be updated. + UpdateMask *field_mask.FieldMask `protobuf:"bytes,2,opt,name=update_mask,json=updateMask,proto3" json:"update_mask,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *UpdateIncidentRequest) Reset() { *m = UpdateIncidentRequest{} } +func (m *UpdateIncidentRequest) String() string { return proto.CompactTextString(m) } +func (*UpdateIncidentRequest) ProtoMessage() {} +func (*UpdateIncidentRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_incidents_service_6ca02351b7a08ec3, []int{2} +} +func (m *UpdateIncidentRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_UpdateIncidentRequest.Unmarshal(m, b) +} +func (m *UpdateIncidentRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_UpdateIncidentRequest.Marshal(b, m, deterministic) +} +func (dst *UpdateIncidentRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_UpdateIncidentRequest.Merge(dst, src) +} +func (m *UpdateIncidentRequest) XXX_Size() int { + return xxx_messageInfo_UpdateIncidentRequest.Size(m) +} +func (m *UpdateIncidentRequest) XXX_DiscardUnknown() { + xxx_messageInfo_UpdateIncidentRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_UpdateIncidentRequest proto.InternalMessageInfo + +func (m *UpdateIncidentRequest) GetIncident() *Incident { + if m != nil { + return m.Incident + } + return nil +} + +func (m *UpdateIncidentRequest) GetUpdateMask() *field_mask.FieldMask { + if m != nil { + return m.UpdateMask + } + return nil +} + +// Request for the SearchSimilarIncidents method. +type SearchSimilarIncidentsRequest struct { + // Resource name of the incident or signal, for example, + // "projects/{project_id}/incidents/{incident_id}". + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // Number of similar incidents to return. + PageSize int32 `protobuf:"varint,2,opt,name=page_size,json=pageSize,proto3" json:"page_size,omitempty"` + // Page token from an earlier query, as returned in 'next_page_token'. + PageToken string `protobuf:"bytes,3,opt,name=page_token,json=pageToken,proto3" json:"page_token,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *SearchSimilarIncidentsRequest) Reset() { *m = SearchSimilarIncidentsRequest{} } +func (m *SearchSimilarIncidentsRequest) String() string { return proto.CompactTextString(m) } +func (*SearchSimilarIncidentsRequest) ProtoMessage() {} +func (*SearchSimilarIncidentsRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_incidents_service_6ca02351b7a08ec3, []int{3} +} +func (m *SearchSimilarIncidentsRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_SearchSimilarIncidentsRequest.Unmarshal(m, b) +} +func (m *SearchSimilarIncidentsRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_SearchSimilarIncidentsRequest.Marshal(b, m, deterministic) +} +func (dst *SearchSimilarIncidentsRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_SearchSimilarIncidentsRequest.Merge(dst, src) +} +func (m *SearchSimilarIncidentsRequest) XXX_Size() int { + return xxx_messageInfo_SearchSimilarIncidentsRequest.Size(m) +} +func (m *SearchSimilarIncidentsRequest) XXX_DiscardUnknown() { + xxx_messageInfo_SearchSimilarIncidentsRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_SearchSimilarIncidentsRequest proto.InternalMessageInfo + +func (m *SearchSimilarIncidentsRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *SearchSimilarIncidentsRequest) GetPageSize() int32 { + if m != nil { + return m.PageSize + } + return 0 +} + +func (m *SearchSimilarIncidentsRequest) GetPageToken() string { + if m != nil { + return m.PageToken + } + return "" +} + +// Response for the SearchSimilarIncidents method. +type SearchSimilarIncidentsResponse struct { + // The search results, ordered by descending relevance. + Results []*SearchSimilarIncidentsResponse_Result `protobuf:"bytes,1,rep,name=results,proto3" json:"results,omitempty"` + // Page token to fetch the next set of similar incidents. + NextPageToken string `protobuf:"bytes,2,opt,name=next_page_token,json=nextPageToken,proto3" json:"next_page_token,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *SearchSimilarIncidentsResponse) Reset() { *m = SearchSimilarIncidentsResponse{} } +func (m *SearchSimilarIncidentsResponse) String() string { return proto.CompactTextString(m) } +func (*SearchSimilarIncidentsResponse) ProtoMessage() {} +func (*SearchSimilarIncidentsResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_incidents_service_6ca02351b7a08ec3, []int{4} +} +func (m *SearchSimilarIncidentsResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_SearchSimilarIncidentsResponse.Unmarshal(m, b) +} +func (m *SearchSimilarIncidentsResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_SearchSimilarIncidentsResponse.Marshal(b, m, deterministic) +} +func (dst *SearchSimilarIncidentsResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_SearchSimilarIncidentsResponse.Merge(dst, src) +} +func (m *SearchSimilarIncidentsResponse) XXX_Size() int { + return xxx_messageInfo_SearchSimilarIncidentsResponse.Size(m) +} +func (m *SearchSimilarIncidentsResponse) XXX_DiscardUnknown() { + xxx_messageInfo_SearchSimilarIncidentsResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_SearchSimilarIncidentsResponse proto.InternalMessageInfo + +func (m *SearchSimilarIncidentsResponse) GetResults() []*SearchSimilarIncidentsResponse_Result { + if m != nil { + return m.Results + } + return nil +} + +func (m *SearchSimilarIncidentsResponse) GetNextPageToken() string { + if m != nil { + return m.NextPageToken + } + return "" +} + +// A single search result, i.e. an incident with (potentially) additional +// information. +type SearchSimilarIncidentsResponse_Result struct { + // An incident that is "similar" to the incident or signal specified in the + // request. + Incident *Incident `protobuf:"bytes,1,opt,name=incident,proto3" json:"incident,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *SearchSimilarIncidentsResponse_Result) Reset() { *m = SearchSimilarIncidentsResponse_Result{} } +func (m *SearchSimilarIncidentsResponse_Result) String() string { return proto.CompactTextString(m) } +func (*SearchSimilarIncidentsResponse_Result) ProtoMessage() {} +func (*SearchSimilarIncidentsResponse_Result) Descriptor() ([]byte, []int) { + return fileDescriptor_incidents_service_6ca02351b7a08ec3, []int{4, 0} +} +func (m *SearchSimilarIncidentsResponse_Result) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_SearchSimilarIncidentsResponse_Result.Unmarshal(m, b) +} +func (m *SearchSimilarIncidentsResponse_Result) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_SearchSimilarIncidentsResponse_Result.Marshal(b, m, deterministic) +} +func (dst *SearchSimilarIncidentsResponse_Result) XXX_Merge(src proto.Message) { + xxx_messageInfo_SearchSimilarIncidentsResponse_Result.Merge(dst, src) +} +func (m *SearchSimilarIncidentsResponse_Result) XXX_Size() int { + return xxx_messageInfo_SearchSimilarIncidentsResponse_Result.Size(m) +} +func (m *SearchSimilarIncidentsResponse_Result) XXX_DiscardUnknown() { + xxx_messageInfo_SearchSimilarIncidentsResponse_Result.DiscardUnknown(m) +} + +var xxx_messageInfo_SearchSimilarIncidentsResponse_Result proto.InternalMessageInfo + +func (m *SearchSimilarIncidentsResponse_Result) GetIncident() *Incident { + if m != nil { + return m.Incident + } + return nil +} + +// Request for the CreateAnnotation method. +type CreateAnnotationRequest struct { + // Resource name of the incident, for example, + // "projects/{project_id}/incidents/{incident_id}". + Parent string `protobuf:"bytes,1,opt,name=parent,proto3" json:"parent,omitempty"` + // Only annotation.content is an input argument. + Annotation *Annotation `protobuf:"bytes,2,opt,name=annotation,proto3" json:"annotation,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *CreateAnnotationRequest) Reset() { *m = CreateAnnotationRequest{} } +func (m *CreateAnnotationRequest) String() string { return proto.CompactTextString(m) } +func (*CreateAnnotationRequest) ProtoMessage() {} +func (*CreateAnnotationRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_incidents_service_6ca02351b7a08ec3, []int{5} +} +func (m *CreateAnnotationRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_CreateAnnotationRequest.Unmarshal(m, b) +} +func (m *CreateAnnotationRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_CreateAnnotationRequest.Marshal(b, m, deterministic) +} +func (dst *CreateAnnotationRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_CreateAnnotationRequest.Merge(dst, src) +} +func (m *CreateAnnotationRequest) XXX_Size() int { + return xxx_messageInfo_CreateAnnotationRequest.Size(m) +} +func (m *CreateAnnotationRequest) XXX_DiscardUnknown() { + xxx_messageInfo_CreateAnnotationRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_CreateAnnotationRequest proto.InternalMessageInfo + +func (m *CreateAnnotationRequest) GetParent() string { + if m != nil { + return m.Parent + } + return "" +} + +func (m *CreateAnnotationRequest) GetAnnotation() *Annotation { + if m != nil { + return m.Annotation + } + return nil +} + +// Request for the ListAnnotations method. +type ListAnnotationsRequest struct { + // Resource name of the incident, for example, + // "projects/{project_id}/incidents/{incident_id}". + Parent string `protobuf:"bytes,1,opt,name=parent,proto3" json:"parent,omitempty"` + // Number of annotations to return. + PageSize int32 `protobuf:"varint,2,opt,name=page_size,json=pageSize,proto3" json:"page_size,omitempty"` + // Page token from an earlier query, as returned in `next_page_token`. + PageToken string `protobuf:"bytes,3,opt,name=page_token,json=pageToken,proto3" json:"page_token,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ListAnnotationsRequest) Reset() { *m = ListAnnotationsRequest{} } +func (m *ListAnnotationsRequest) String() string { return proto.CompactTextString(m) } +func (*ListAnnotationsRequest) ProtoMessage() {} +func (*ListAnnotationsRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_incidents_service_6ca02351b7a08ec3, []int{6} +} +func (m *ListAnnotationsRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ListAnnotationsRequest.Unmarshal(m, b) +} +func (m *ListAnnotationsRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ListAnnotationsRequest.Marshal(b, m, deterministic) +} +func (dst *ListAnnotationsRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_ListAnnotationsRequest.Merge(dst, src) +} +func (m *ListAnnotationsRequest) XXX_Size() int { + return xxx_messageInfo_ListAnnotationsRequest.Size(m) +} +func (m *ListAnnotationsRequest) XXX_DiscardUnknown() { + xxx_messageInfo_ListAnnotationsRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_ListAnnotationsRequest proto.InternalMessageInfo + +func (m *ListAnnotationsRequest) GetParent() string { + if m != nil { + return m.Parent + } + return "" +} + +func (m *ListAnnotationsRequest) GetPageSize() int32 { + if m != nil { + return m.PageSize + } + return 0 +} + +func (m *ListAnnotationsRequest) GetPageToken() string { + if m != nil { + return m.PageToken + } + return "" +} + +// Response for the ListAnnotations method. +type ListAnnotationsResponse struct { + // List of annotations. + Annotations []*Annotation `protobuf:"bytes,1,rep,name=annotations,proto3" json:"annotations,omitempty"` + // Page token to fetch the next set of annotations. + NextPageToken string `protobuf:"bytes,2,opt,name=next_page_token,json=nextPageToken,proto3" json:"next_page_token,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ListAnnotationsResponse) Reset() { *m = ListAnnotationsResponse{} } +func (m *ListAnnotationsResponse) String() string { return proto.CompactTextString(m) } +func (*ListAnnotationsResponse) ProtoMessage() {} +func (*ListAnnotationsResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_incidents_service_6ca02351b7a08ec3, []int{7} +} +func (m *ListAnnotationsResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ListAnnotationsResponse.Unmarshal(m, b) +} +func (m *ListAnnotationsResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ListAnnotationsResponse.Marshal(b, m, deterministic) +} +func (dst *ListAnnotationsResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_ListAnnotationsResponse.Merge(dst, src) +} +func (m *ListAnnotationsResponse) XXX_Size() int { + return xxx_messageInfo_ListAnnotationsResponse.Size(m) +} +func (m *ListAnnotationsResponse) XXX_DiscardUnknown() { + xxx_messageInfo_ListAnnotationsResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_ListAnnotationsResponse proto.InternalMessageInfo + +func (m *ListAnnotationsResponse) GetAnnotations() []*Annotation { + if m != nil { + return m.Annotations + } + return nil +} + +func (m *ListAnnotationsResponse) GetNextPageToken() string { + if m != nil { + return m.NextPageToken + } + return "" +} + +// Request for the CreateTag method. +type CreateTagRequest struct { + // Resource name of the incident, for example, + // "projects/{project_id}/incidents/{incident_id}". + Parent string `protobuf:"bytes,1,opt,name=parent,proto3" json:"parent,omitempty"` + // Tag to create. Only tag.display_name is an input argument. + Tag *Tag `protobuf:"bytes,2,opt,name=tag,proto3" json:"tag,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *CreateTagRequest) Reset() { *m = CreateTagRequest{} } +func (m *CreateTagRequest) String() string { return proto.CompactTextString(m) } +func (*CreateTagRequest) ProtoMessage() {} +func (*CreateTagRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_incidents_service_6ca02351b7a08ec3, []int{8} +} +func (m *CreateTagRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_CreateTagRequest.Unmarshal(m, b) +} +func (m *CreateTagRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_CreateTagRequest.Marshal(b, m, deterministic) +} +func (dst *CreateTagRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_CreateTagRequest.Merge(dst, src) +} +func (m *CreateTagRequest) XXX_Size() int { + return xxx_messageInfo_CreateTagRequest.Size(m) +} +func (m *CreateTagRequest) XXX_DiscardUnknown() { + xxx_messageInfo_CreateTagRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_CreateTagRequest proto.InternalMessageInfo + +func (m *CreateTagRequest) GetParent() string { + if m != nil { + return m.Parent + } + return "" +} + +func (m *CreateTagRequest) GetTag() *Tag { + if m != nil { + return m.Tag + } + return nil +} + +// Request for the DeleteTag method. +type DeleteTagRequest struct { + // Resource name of the tag. + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *DeleteTagRequest) Reset() { *m = DeleteTagRequest{} } +func (m *DeleteTagRequest) String() string { return proto.CompactTextString(m) } +func (*DeleteTagRequest) ProtoMessage() {} +func (*DeleteTagRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_incidents_service_6ca02351b7a08ec3, []int{9} +} +func (m *DeleteTagRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_DeleteTagRequest.Unmarshal(m, b) +} +func (m *DeleteTagRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_DeleteTagRequest.Marshal(b, m, deterministic) +} +func (dst *DeleteTagRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_DeleteTagRequest.Merge(dst, src) +} +func (m *DeleteTagRequest) XXX_Size() int { + return xxx_messageInfo_DeleteTagRequest.Size(m) +} +func (m *DeleteTagRequest) XXX_DiscardUnknown() { + xxx_messageInfo_DeleteTagRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_DeleteTagRequest proto.InternalMessageInfo + +func (m *DeleteTagRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +// Request for the ListTagsForIncident method. +type ListTagsRequest struct { + // Resource name of the incident, for example, + // "projects/{project_id}/incidents/{incident_id}". + Parent string `protobuf:"bytes,1,opt,name=parent,proto3" json:"parent,omitempty"` + // Number of tags to return. + PageSize int32 `protobuf:"varint,2,opt,name=page_size,json=pageSize,proto3" json:"page_size,omitempty"` + // Page token from an earlier query, as returned in `next_page_token`. + PageToken string `protobuf:"bytes,3,opt,name=page_token,json=pageToken,proto3" json:"page_token,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ListTagsRequest) Reset() { *m = ListTagsRequest{} } +func (m *ListTagsRequest) String() string { return proto.CompactTextString(m) } +func (*ListTagsRequest) ProtoMessage() {} +func (*ListTagsRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_incidents_service_6ca02351b7a08ec3, []int{10} +} +func (m *ListTagsRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ListTagsRequest.Unmarshal(m, b) +} +func (m *ListTagsRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ListTagsRequest.Marshal(b, m, deterministic) +} +func (dst *ListTagsRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_ListTagsRequest.Merge(dst, src) +} +func (m *ListTagsRequest) XXX_Size() int { + return xxx_messageInfo_ListTagsRequest.Size(m) +} +func (m *ListTagsRequest) XXX_DiscardUnknown() { + xxx_messageInfo_ListTagsRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_ListTagsRequest proto.InternalMessageInfo + +func (m *ListTagsRequest) GetParent() string { + if m != nil { + return m.Parent + } + return "" +} + +func (m *ListTagsRequest) GetPageSize() int32 { + if m != nil { + return m.PageSize + } + return 0 +} + +func (m *ListTagsRequest) GetPageToken() string { + if m != nil { + return m.PageToken + } + return "" +} + +// Response for the ListTagsForIncident method. +type ListTagsResponse struct { + // Tags. + Tags []*Tag `protobuf:"bytes,1,rep,name=tags,proto3" json:"tags,omitempty"` + // Page token to fetch the next set of tags. + NextPageToken string `protobuf:"bytes,2,opt,name=next_page_token,json=nextPageToken,proto3" json:"next_page_token,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ListTagsResponse) Reset() { *m = ListTagsResponse{} } +func (m *ListTagsResponse) String() string { return proto.CompactTextString(m) } +func (*ListTagsResponse) ProtoMessage() {} +func (*ListTagsResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_incidents_service_6ca02351b7a08ec3, []int{11} +} +func (m *ListTagsResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ListTagsResponse.Unmarshal(m, b) +} +func (m *ListTagsResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ListTagsResponse.Marshal(b, m, deterministic) +} +func (dst *ListTagsResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_ListTagsResponse.Merge(dst, src) +} +func (m *ListTagsResponse) XXX_Size() int { + return xxx_messageInfo_ListTagsResponse.Size(m) +} +func (m *ListTagsResponse) XXX_DiscardUnknown() { + xxx_messageInfo_ListTagsResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_ListTagsResponse proto.InternalMessageInfo + +func (m *ListTagsResponse) GetTags() []*Tag { + if m != nil { + return m.Tags + } + return nil +} + +func (m *ListTagsResponse) GetNextPageToken() string { + if m != nil { + return m.NextPageToken + } + return "" +} + +// Request for the CreateSignal method. +type CreateSignalRequest struct { + // The resource name of the hosting Stackdriver project which requested + // signal belongs to. + Parent string `protobuf:"bytes,1,opt,name=parent,proto3" json:"parent,omitempty"` + // The signal to create. + Signal *Signal `protobuf:"bytes,2,opt,name=signal,proto3" json:"signal,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *CreateSignalRequest) Reset() { *m = CreateSignalRequest{} } +func (m *CreateSignalRequest) String() string { return proto.CompactTextString(m) } +func (*CreateSignalRequest) ProtoMessage() {} +func (*CreateSignalRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_incidents_service_6ca02351b7a08ec3, []int{12} +} +func (m *CreateSignalRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_CreateSignalRequest.Unmarshal(m, b) +} +func (m *CreateSignalRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_CreateSignalRequest.Marshal(b, m, deterministic) +} +func (dst *CreateSignalRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_CreateSignalRequest.Merge(dst, src) +} +func (m *CreateSignalRequest) XXX_Size() int { + return xxx_messageInfo_CreateSignalRequest.Size(m) +} +func (m *CreateSignalRequest) XXX_DiscardUnknown() { + xxx_messageInfo_CreateSignalRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_CreateSignalRequest proto.InternalMessageInfo + +func (m *CreateSignalRequest) GetParent() string { + if m != nil { + return m.Parent + } + return "" +} + +func (m *CreateSignalRequest) GetSignal() *Signal { + if m != nil { + return m.Signal + } + return nil +} + +// Request for the SearchSignals method. +type SearchSignalsRequest struct { + // The resource name of the hosting Stackdriver project which requested + // incidents belong to. + Parent string `protobuf:"bytes,1,opt,name=parent,proto3" json:"parent,omitempty"` + // Query to specify which signals should be returned. + Query string `protobuf:"bytes,2,opt,name=query,proto3" json:"query,omitempty"` + // Maximum number of `signals` to return in the response. + PageSize int32 `protobuf:"varint,3,opt,name=page_size,json=pageSize,proto3" json:"page_size,omitempty"` + // Page token from an earlier query, as returned in `next_page_token`. All + // field values except for page_size and page_token should be the same as the + // original query (may return an error or unexpected data otherwise). + PageToken string `protobuf:"bytes,4,opt,name=page_token,json=pageToken,proto3" json:"page_token,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *SearchSignalsRequest) Reset() { *m = SearchSignalsRequest{} } +func (m *SearchSignalsRequest) String() string { return proto.CompactTextString(m) } +func (*SearchSignalsRequest) ProtoMessage() {} +func (*SearchSignalsRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_incidents_service_6ca02351b7a08ec3, []int{13} +} +func (m *SearchSignalsRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_SearchSignalsRequest.Unmarshal(m, b) +} +func (m *SearchSignalsRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_SearchSignalsRequest.Marshal(b, m, deterministic) +} +func (dst *SearchSignalsRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_SearchSignalsRequest.Merge(dst, src) +} +func (m *SearchSignalsRequest) XXX_Size() int { + return xxx_messageInfo_SearchSignalsRequest.Size(m) +} +func (m *SearchSignalsRequest) XXX_DiscardUnknown() { + xxx_messageInfo_SearchSignalsRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_SearchSignalsRequest proto.InternalMessageInfo + +func (m *SearchSignalsRequest) GetParent() string { + if m != nil { + return m.Parent + } + return "" +} + +func (m *SearchSignalsRequest) GetQuery() string { + if m != nil { + return m.Query + } + return "" +} + +func (m *SearchSignalsRequest) GetPageSize() int32 { + if m != nil { + return m.PageSize + } + return 0 +} + +func (m *SearchSignalsRequest) GetPageToken() string { + if m != nil { + return m.PageToken + } + return "" +} + +// Response for the SearchSignals method. +type SearchSignalsResponse struct { + // Signals that matched the query in the request. + Signals []*Signal `protobuf:"bytes,1,rep,name=signals,proto3" json:"signals,omitempty"` + // Page token to fetch the next set of signals. + NextPageToken string `protobuf:"bytes,2,opt,name=next_page_token,json=nextPageToken,proto3" json:"next_page_token,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *SearchSignalsResponse) Reset() { *m = SearchSignalsResponse{} } +func (m *SearchSignalsResponse) String() string { return proto.CompactTextString(m) } +func (*SearchSignalsResponse) ProtoMessage() {} +func (*SearchSignalsResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_incidents_service_6ca02351b7a08ec3, []int{14} +} +func (m *SearchSignalsResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_SearchSignalsResponse.Unmarshal(m, b) +} +func (m *SearchSignalsResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_SearchSignalsResponse.Marshal(b, m, deterministic) +} +func (dst *SearchSignalsResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_SearchSignalsResponse.Merge(dst, src) +} +func (m *SearchSignalsResponse) XXX_Size() int { + return xxx_messageInfo_SearchSignalsResponse.Size(m) +} +func (m *SearchSignalsResponse) XXX_DiscardUnknown() { + xxx_messageInfo_SearchSignalsResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_SearchSignalsResponse proto.InternalMessageInfo + +func (m *SearchSignalsResponse) GetSignals() []*Signal { + if m != nil { + return m.Signals + } + return nil +} + +func (m *SearchSignalsResponse) GetNextPageToken() string { + if m != nil { + return m.NextPageToken + } + return "" +} + +// Request for the GetSignal method. +type GetSignalRequest struct { + // Resource name of the Signal resource, for example, + // "projects/{project_id}/signals/{signal_id}". + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GetSignalRequest) Reset() { *m = GetSignalRequest{} } +func (m *GetSignalRequest) String() string { return proto.CompactTextString(m) } +func (*GetSignalRequest) ProtoMessage() {} +func (*GetSignalRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_incidents_service_6ca02351b7a08ec3, []int{15} +} +func (m *GetSignalRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GetSignalRequest.Unmarshal(m, b) +} +func (m *GetSignalRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GetSignalRequest.Marshal(b, m, deterministic) +} +func (dst *GetSignalRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_GetSignalRequest.Merge(dst, src) +} +func (m *GetSignalRequest) XXX_Size() int { + return xxx_messageInfo_GetSignalRequest.Size(m) +} +func (m *GetSignalRequest) XXX_DiscardUnknown() { + xxx_messageInfo_GetSignalRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_GetSignalRequest proto.InternalMessageInfo + +func (m *GetSignalRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +// Request for the UpdateSignal method. +type UpdateSignalRequest struct { + // The signal to update with the new values. + Signal *Signal `protobuf:"bytes,1,opt,name=signal,proto3" json:"signal,omitempty"` + // List of fields that should be updated. + UpdateMask *field_mask.FieldMask `protobuf:"bytes,2,opt,name=update_mask,json=updateMask,proto3" json:"update_mask,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *UpdateSignalRequest) Reset() { *m = UpdateSignalRequest{} } +func (m *UpdateSignalRequest) String() string { return proto.CompactTextString(m) } +func (*UpdateSignalRequest) ProtoMessage() {} +func (*UpdateSignalRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_incidents_service_6ca02351b7a08ec3, []int{16} +} +func (m *UpdateSignalRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_UpdateSignalRequest.Unmarshal(m, b) +} +func (m *UpdateSignalRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_UpdateSignalRequest.Marshal(b, m, deterministic) +} +func (dst *UpdateSignalRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_UpdateSignalRequest.Merge(dst, src) +} +func (m *UpdateSignalRequest) XXX_Size() int { + return xxx_messageInfo_UpdateSignalRequest.Size(m) +} +func (m *UpdateSignalRequest) XXX_DiscardUnknown() { + xxx_messageInfo_UpdateSignalRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_UpdateSignalRequest proto.InternalMessageInfo + +func (m *UpdateSignalRequest) GetSignal() *Signal { + if m != nil { + return m.Signal + } + return nil +} + +func (m *UpdateSignalRequest) GetUpdateMask() *field_mask.FieldMask { + if m != nil { + return m.UpdateMask + } + return nil +} + +// Request for the SearchIncidents method. +type SearchIncidentsRequest struct { + // The resource name of the hosting Stackdriver project which requested + // incidents belong to. + Parent string `protobuf:"bytes,1,opt,name=parent,proto3" json:"parent,omitempty"` + // An expression that defines which incidents to return. + // + // Search atoms can be used to match certain specific fields. Otherwise, + // plain text will match text fields in the incident. + // + // Search atoms: + // * `start` - (timestamp) The time the incident started. + // * `stage` - The stage of the incident, one of detected, triaged, mitigated, + // resolved, documented, or duplicate (which correspond to values in the + // Incident.Stage enum). These are ordered, so `stagemedium`, + // `severity<=minor`, etc.). + // + // Timestamp formats: + // * yyyy-MM-dd - an absolute date, treated as a calendar-day-wide window. + // In other words, the "<" operator will match dates before that date, the + // ">" operator will match dates after that date, and the ":" or "=" + // operators will match the entire day. + // * Nd (for example, 7d) - a relative number of days ago, treated as a moment + // in time (as opposed to a day-wide span). A multiple of 24 hours ago (as + // opposed to calendar days). In the case of daylight savings time, it will + // apply the current timezone to both ends of the range. Note that exact + // matching (for example, `start:7d`) is unlikely to be useful because that + // would only match incidents created precisely at a particular instant in + // time. + // + // Examples: + // + // * `foo` - matches incidents containing the word "foo" + // * `"foo bar"` - matches incidents containing the phrase "foo bar" + // * `foo bar` or `foo AND bar` - matches incidents containing the words "foo" + // and "bar" + // * `foo -bar` or `foo AND NOT bar` - matches incidents containing the word + // "foo" but not the word "bar" + // * `foo OR bar` - matches incidents containing the word "foo" or the word + // "bar" + // * `start>2018-11-28` - matches incidents which started after November 11, + // 2018. + // * `start<=2018-11-28` - matches incidents which started on or before + // November 11, 2018. + // * `start:2018-11-28` - matches incidents which started on November 11, + // 2018. + // * `start>7d` - matches incidents which started after the point in time 7*24 + // hours ago + // * `start>180d` - similar to 7d, but likely to cross the daylight savings + // time boundary, so the end time will be 1 hour different from "now." + // * `foo AND start>90d AND stage
+ // The metadata lists the phone number, formatted according to local + // convention, plus whichever additional elements appear in the text:
    + //
  • number – the actual number, broken down into + // sections as per local convention
  • national_prefix + // – country code, if detected
  • area_code – + // region or area code, if detected
  • extension – + // phone extension (to be dialed after connection), if detected
+ Entity_PHONE_NUMBER Entity_Type = 9 + // Address

+ // The metadata identifies the street number and locality plus whichever + // additional elements appear in the text:
    + //
  • street_number – street number
  • + //
  • locality – city or town
  • + //
  • street_name – street/route name, if detected
  • + //
  • postal_code – postal code, if detected
  • + //
  • country – country, if detected
  • + //
  • broad_region – administrative area, such as the + // state, if detected
  • narrow_region – smaller + // administrative area, such as county, if detected
  • + //
  • sublocality – used in Asian addresses to demark a + // district within a city, if detected
+ Entity_ADDRESS Entity_Type = 10 + // Date

+ // The metadata identifies the components of the date:
    + //
  • year – four digit year, if detected
  • + //
  • month – two digit month number, if detected
  • + //
  • day – two digit day number, if detected
+ Entity_DATE Entity_Type = 11 + // Number

+ // The metadata is the number itself. + Entity_NUMBER Entity_Type = 12 + // Price

+ // The metadata identifies the value and currency. + Entity_PRICE Entity_Type = 13 +) + +var Entity_Type_name = map[int32]string{ + 0: "UNKNOWN", + 1: "PERSON", + 2: "LOCATION", + 3: "ORGANIZATION", + 4: "EVENT", + 5: "WORK_OF_ART", + 6: "CONSUMER_GOOD", + 7: "OTHER", + 9: "PHONE_NUMBER", + 10: "ADDRESS", + 11: "DATE", + 12: "NUMBER", + 13: "PRICE", +} +var Entity_Type_value = map[string]int32{ + "UNKNOWN": 0, + "PERSON": 1, + "LOCATION": 2, + "ORGANIZATION": 3, + "EVENT": 4, + "WORK_OF_ART": 5, + "CONSUMER_GOOD": 6, + "OTHER": 7, + "PHONE_NUMBER": 9, + "ADDRESS": 10, + "DATE": 11, + "NUMBER": 12, + "PRICE": 13, +} + +func (x Entity_Type) String() string { + return proto.EnumName(Entity_Type_name, int32(x)) +} +func (Entity_Type) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_language_service_5dd1bad015472f8e, []int{2, 0} +} + +// The part of speech tags enum. +type PartOfSpeech_Tag int32 + +const ( + // Unknown + PartOfSpeech_UNKNOWN PartOfSpeech_Tag = 0 + // Adjective + PartOfSpeech_ADJ PartOfSpeech_Tag = 1 + // Adposition (preposition and postposition) + PartOfSpeech_ADP PartOfSpeech_Tag = 2 + // Adverb + PartOfSpeech_ADV PartOfSpeech_Tag = 3 + // Conjunction + PartOfSpeech_CONJ PartOfSpeech_Tag = 4 + // Determiner + PartOfSpeech_DET PartOfSpeech_Tag = 5 + // Noun (common and proper) + PartOfSpeech_NOUN PartOfSpeech_Tag = 6 + // Cardinal number + PartOfSpeech_NUM PartOfSpeech_Tag = 7 + // Pronoun + PartOfSpeech_PRON PartOfSpeech_Tag = 8 + // Particle or other function word + PartOfSpeech_PRT PartOfSpeech_Tag = 9 + // Punctuation + PartOfSpeech_PUNCT PartOfSpeech_Tag = 10 + // Verb (all tenses and modes) + PartOfSpeech_VERB PartOfSpeech_Tag = 11 + // Other: foreign words, typos, abbreviations + PartOfSpeech_X PartOfSpeech_Tag = 12 + // Affix + PartOfSpeech_AFFIX PartOfSpeech_Tag = 13 +) + +var PartOfSpeech_Tag_name = map[int32]string{ + 0: "UNKNOWN", + 1: "ADJ", + 2: "ADP", + 3: "ADV", + 4: "CONJ", + 5: "DET", + 6: "NOUN", + 7: "NUM", + 8: "PRON", + 9: "PRT", + 10: "PUNCT", + 11: "VERB", + 12: "X", + 13: "AFFIX", +} +var PartOfSpeech_Tag_value = map[string]int32{ + "UNKNOWN": 0, + "ADJ": 1, + "ADP": 2, + "ADV": 3, + "CONJ": 4, + "DET": 5, + "NOUN": 6, + "NUM": 7, + "PRON": 8, + "PRT": 9, + "PUNCT": 10, + "VERB": 11, + "X": 12, + "AFFIX": 13, +} + +func (x PartOfSpeech_Tag) String() string { + return proto.EnumName(PartOfSpeech_Tag_name, int32(x)) +} +func (PartOfSpeech_Tag) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_language_service_5dd1bad015472f8e, []int{5, 0} +} + +// The characteristic of a verb that expresses time flow during an event. +type PartOfSpeech_Aspect int32 + +const ( + // Aspect is not applicable in the analyzed language or is not predicted. + PartOfSpeech_ASPECT_UNKNOWN PartOfSpeech_Aspect = 0 + // Perfective + PartOfSpeech_PERFECTIVE PartOfSpeech_Aspect = 1 + // Imperfective + PartOfSpeech_IMPERFECTIVE PartOfSpeech_Aspect = 2 + // Progressive + PartOfSpeech_PROGRESSIVE PartOfSpeech_Aspect = 3 +) + +var PartOfSpeech_Aspect_name = map[int32]string{ + 0: "ASPECT_UNKNOWN", + 1: "PERFECTIVE", + 2: "IMPERFECTIVE", + 3: "PROGRESSIVE", +} +var PartOfSpeech_Aspect_value = map[string]int32{ + "ASPECT_UNKNOWN": 0, + "PERFECTIVE": 1, + "IMPERFECTIVE": 2, + "PROGRESSIVE": 3, +} + +func (x PartOfSpeech_Aspect) String() string { + return proto.EnumName(PartOfSpeech_Aspect_name, int32(x)) +} +func (PartOfSpeech_Aspect) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_language_service_5dd1bad015472f8e, []int{5, 1} +} + +// The grammatical function performed by a noun or pronoun in a phrase, +// clause, or sentence. In some languages, other parts of speech, such as +// adjective and determiner, take case inflection in agreement with the noun. +type PartOfSpeech_Case int32 + +const ( + // Case is not applicable in the analyzed language or is not predicted. + PartOfSpeech_CASE_UNKNOWN PartOfSpeech_Case = 0 + // Accusative + PartOfSpeech_ACCUSATIVE PartOfSpeech_Case = 1 + // Adverbial + PartOfSpeech_ADVERBIAL PartOfSpeech_Case = 2 + // Complementive + PartOfSpeech_COMPLEMENTIVE PartOfSpeech_Case = 3 + // Dative + PartOfSpeech_DATIVE PartOfSpeech_Case = 4 + // Genitive + PartOfSpeech_GENITIVE PartOfSpeech_Case = 5 + // Instrumental + PartOfSpeech_INSTRUMENTAL PartOfSpeech_Case = 6 + // Locative + PartOfSpeech_LOCATIVE PartOfSpeech_Case = 7 + // Nominative + PartOfSpeech_NOMINATIVE PartOfSpeech_Case = 8 + // Oblique + PartOfSpeech_OBLIQUE PartOfSpeech_Case = 9 + // Partitive + PartOfSpeech_PARTITIVE PartOfSpeech_Case = 10 + // Prepositional + PartOfSpeech_PREPOSITIONAL PartOfSpeech_Case = 11 + // Reflexive + PartOfSpeech_REFLEXIVE_CASE PartOfSpeech_Case = 12 + // Relative + PartOfSpeech_RELATIVE_CASE PartOfSpeech_Case = 13 + // Vocative + PartOfSpeech_VOCATIVE PartOfSpeech_Case = 14 +) + +var PartOfSpeech_Case_name = map[int32]string{ + 0: "CASE_UNKNOWN", + 1: "ACCUSATIVE", + 2: "ADVERBIAL", + 3: "COMPLEMENTIVE", + 4: "DATIVE", + 5: "GENITIVE", + 6: "INSTRUMENTAL", + 7: "LOCATIVE", + 8: "NOMINATIVE", + 9: "OBLIQUE", + 10: "PARTITIVE", + 11: "PREPOSITIONAL", + 12: "REFLEXIVE_CASE", + 13: "RELATIVE_CASE", + 14: "VOCATIVE", +} +var PartOfSpeech_Case_value = map[string]int32{ + "CASE_UNKNOWN": 0, + "ACCUSATIVE": 1, + "ADVERBIAL": 2, + "COMPLEMENTIVE": 3, + "DATIVE": 4, + "GENITIVE": 5, + "INSTRUMENTAL": 6, + "LOCATIVE": 7, + "NOMINATIVE": 8, + "OBLIQUE": 9, + "PARTITIVE": 10, + "PREPOSITIONAL": 11, + "REFLEXIVE_CASE": 12, + "RELATIVE_CASE": 13, + "VOCATIVE": 14, +} + +func (x PartOfSpeech_Case) String() string { + return proto.EnumName(PartOfSpeech_Case_name, int32(x)) +} +func (PartOfSpeech_Case) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_language_service_5dd1bad015472f8e, []int{5, 2} +} + +// Depending on the language, Form can be categorizing different forms of +// verbs, adjectives, adverbs, etc. For example, categorizing inflected +// endings of verbs and adjectives or distinguishing between short and long +// forms of adjectives and participles +type PartOfSpeech_Form int32 + +const ( + // Form is not applicable in the analyzed language or is not predicted. + PartOfSpeech_FORM_UNKNOWN PartOfSpeech_Form = 0 + // Adnomial + PartOfSpeech_ADNOMIAL PartOfSpeech_Form = 1 + // Auxiliary + PartOfSpeech_AUXILIARY PartOfSpeech_Form = 2 + // Complementizer + PartOfSpeech_COMPLEMENTIZER PartOfSpeech_Form = 3 + // Final ending + PartOfSpeech_FINAL_ENDING PartOfSpeech_Form = 4 + // Gerund + PartOfSpeech_GERUND PartOfSpeech_Form = 5 + // Realis + PartOfSpeech_REALIS PartOfSpeech_Form = 6 + // Irrealis + PartOfSpeech_IRREALIS PartOfSpeech_Form = 7 + // Short form + PartOfSpeech_SHORT PartOfSpeech_Form = 8 + // Long form + PartOfSpeech_LONG PartOfSpeech_Form = 9 + // Order form + PartOfSpeech_ORDER PartOfSpeech_Form = 10 + // Specific form + PartOfSpeech_SPECIFIC PartOfSpeech_Form = 11 +) + +var PartOfSpeech_Form_name = map[int32]string{ + 0: "FORM_UNKNOWN", + 1: "ADNOMIAL", + 2: "AUXILIARY", + 3: "COMPLEMENTIZER", + 4: "FINAL_ENDING", + 5: "GERUND", + 6: "REALIS", + 7: "IRREALIS", + 8: "SHORT", + 9: "LONG", + 10: "ORDER", + 11: "SPECIFIC", +} +var PartOfSpeech_Form_value = map[string]int32{ + "FORM_UNKNOWN": 0, + "ADNOMIAL": 1, + "AUXILIARY": 2, + "COMPLEMENTIZER": 3, + "FINAL_ENDING": 4, + "GERUND": 5, + "REALIS": 6, + "IRREALIS": 7, + "SHORT": 8, + "LONG": 9, + "ORDER": 10, + "SPECIFIC": 11, +} + +func (x PartOfSpeech_Form) String() string { + return proto.EnumName(PartOfSpeech_Form_name, int32(x)) +} +func (PartOfSpeech_Form) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_language_service_5dd1bad015472f8e, []int{5, 3} +} + +// Gender classes of nouns reflected in the behaviour of associated words. +type PartOfSpeech_Gender int32 + +const ( + // Gender is not applicable in the analyzed language or is not predicted. + PartOfSpeech_GENDER_UNKNOWN PartOfSpeech_Gender = 0 + // Feminine + PartOfSpeech_FEMININE PartOfSpeech_Gender = 1 + // Masculine + PartOfSpeech_MASCULINE PartOfSpeech_Gender = 2 + // Neuter + PartOfSpeech_NEUTER PartOfSpeech_Gender = 3 +) + +var PartOfSpeech_Gender_name = map[int32]string{ + 0: "GENDER_UNKNOWN", + 1: "FEMININE", + 2: "MASCULINE", + 3: "NEUTER", +} +var PartOfSpeech_Gender_value = map[string]int32{ + "GENDER_UNKNOWN": 0, + "FEMININE": 1, + "MASCULINE": 2, + "NEUTER": 3, +} + +func (x PartOfSpeech_Gender) String() string { + return proto.EnumName(PartOfSpeech_Gender_name, int32(x)) +} +func (PartOfSpeech_Gender) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_language_service_5dd1bad015472f8e, []int{5, 4} +} + +// The grammatical feature of verbs, used for showing modality and attitude. +type PartOfSpeech_Mood int32 + +const ( + // Mood is not applicable in the analyzed language or is not predicted. + PartOfSpeech_MOOD_UNKNOWN PartOfSpeech_Mood = 0 + // Conditional + PartOfSpeech_CONDITIONAL_MOOD PartOfSpeech_Mood = 1 + // Imperative + PartOfSpeech_IMPERATIVE PartOfSpeech_Mood = 2 + // Indicative + PartOfSpeech_INDICATIVE PartOfSpeech_Mood = 3 + // Interrogative + PartOfSpeech_INTERROGATIVE PartOfSpeech_Mood = 4 + // Jussive + PartOfSpeech_JUSSIVE PartOfSpeech_Mood = 5 + // Subjunctive + PartOfSpeech_SUBJUNCTIVE PartOfSpeech_Mood = 6 +) + +var PartOfSpeech_Mood_name = map[int32]string{ + 0: "MOOD_UNKNOWN", + 1: "CONDITIONAL_MOOD", + 2: "IMPERATIVE", + 3: "INDICATIVE", + 4: "INTERROGATIVE", + 5: "JUSSIVE", + 6: "SUBJUNCTIVE", +} +var PartOfSpeech_Mood_value = map[string]int32{ + "MOOD_UNKNOWN": 0, + "CONDITIONAL_MOOD": 1, + "IMPERATIVE": 2, + "INDICATIVE": 3, + "INTERROGATIVE": 4, + "JUSSIVE": 5, + "SUBJUNCTIVE": 6, +} + +func (x PartOfSpeech_Mood) String() string { + return proto.EnumName(PartOfSpeech_Mood_name, int32(x)) +} +func (PartOfSpeech_Mood) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_language_service_5dd1bad015472f8e, []int{5, 5} +} + +// Count distinctions. +type PartOfSpeech_Number int32 + +const ( + // Number is not applicable in the analyzed language or is not predicted. + PartOfSpeech_NUMBER_UNKNOWN PartOfSpeech_Number = 0 + // Singular + PartOfSpeech_SINGULAR PartOfSpeech_Number = 1 + // Plural + PartOfSpeech_PLURAL PartOfSpeech_Number = 2 + // Dual + PartOfSpeech_DUAL PartOfSpeech_Number = 3 +) + +var PartOfSpeech_Number_name = map[int32]string{ + 0: "NUMBER_UNKNOWN", + 1: "SINGULAR", + 2: "PLURAL", + 3: "DUAL", +} +var PartOfSpeech_Number_value = map[string]int32{ + "NUMBER_UNKNOWN": 0, + "SINGULAR": 1, + "PLURAL": 2, + "DUAL": 3, +} + +func (x PartOfSpeech_Number) String() string { + return proto.EnumName(PartOfSpeech_Number_name, int32(x)) +} +func (PartOfSpeech_Number) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_language_service_5dd1bad015472f8e, []int{5, 6} +} + +// The distinction between the speaker, second person, third person, etc. +type PartOfSpeech_Person int32 + +const ( + // Person is not applicable in the analyzed language or is not predicted. + PartOfSpeech_PERSON_UNKNOWN PartOfSpeech_Person = 0 + // First + PartOfSpeech_FIRST PartOfSpeech_Person = 1 + // Second + PartOfSpeech_SECOND PartOfSpeech_Person = 2 + // Third + PartOfSpeech_THIRD PartOfSpeech_Person = 3 + // Reflexive + PartOfSpeech_REFLEXIVE_PERSON PartOfSpeech_Person = 4 +) + +var PartOfSpeech_Person_name = map[int32]string{ + 0: "PERSON_UNKNOWN", + 1: "FIRST", + 2: "SECOND", + 3: "THIRD", + 4: "REFLEXIVE_PERSON", +} +var PartOfSpeech_Person_value = map[string]int32{ + "PERSON_UNKNOWN": 0, + "FIRST": 1, + "SECOND": 2, + "THIRD": 3, + "REFLEXIVE_PERSON": 4, +} + +func (x PartOfSpeech_Person) String() string { + return proto.EnumName(PartOfSpeech_Person_name, int32(x)) +} +func (PartOfSpeech_Person) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_language_service_5dd1bad015472f8e, []int{5, 7} +} + +// This category shows if the token is part of a proper name. +type PartOfSpeech_Proper int32 + +const ( + // Proper is not applicable in the analyzed language or is not predicted. + PartOfSpeech_PROPER_UNKNOWN PartOfSpeech_Proper = 0 + // Proper + PartOfSpeech_PROPER PartOfSpeech_Proper = 1 + // Not proper + PartOfSpeech_NOT_PROPER PartOfSpeech_Proper = 2 +) + +var PartOfSpeech_Proper_name = map[int32]string{ + 0: "PROPER_UNKNOWN", + 1: "PROPER", + 2: "NOT_PROPER", +} +var PartOfSpeech_Proper_value = map[string]int32{ + "PROPER_UNKNOWN": 0, + "PROPER": 1, + "NOT_PROPER": 2, +} + +func (x PartOfSpeech_Proper) String() string { + return proto.EnumName(PartOfSpeech_Proper_name, int32(x)) +} +func (PartOfSpeech_Proper) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_language_service_5dd1bad015472f8e, []int{5, 8} +} + +// Reciprocal features of a pronoun. +type PartOfSpeech_Reciprocity int32 + +const ( + // Reciprocity is not applicable in the analyzed language or is not + // predicted. + PartOfSpeech_RECIPROCITY_UNKNOWN PartOfSpeech_Reciprocity = 0 + // Reciprocal + PartOfSpeech_RECIPROCAL PartOfSpeech_Reciprocity = 1 + // Non-reciprocal + PartOfSpeech_NON_RECIPROCAL PartOfSpeech_Reciprocity = 2 +) + +var PartOfSpeech_Reciprocity_name = map[int32]string{ + 0: "RECIPROCITY_UNKNOWN", + 1: "RECIPROCAL", + 2: "NON_RECIPROCAL", +} +var PartOfSpeech_Reciprocity_value = map[string]int32{ + "RECIPROCITY_UNKNOWN": 0, + "RECIPROCAL": 1, + "NON_RECIPROCAL": 2, +} + +func (x PartOfSpeech_Reciprocity) String() string { + return proto.EnumName(PartOfSpeech_Reciprocity_name, int32(x)) +} +func (PartOfSpeech_Reciprocity) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_language_service_5dd1bad015472f8e, []int{5, 9} +} + +// Time reference. +type PartOfSpeech_Tense int32 + +const ( + // Tense is not applicable in the analyzed language or is not predicted. + PartOfSpeech_TENSE_UNKNOWN PartOfSpeech_Tense = 0 + // Conditional + PartOfSpeech_CONDITIONAL_TENSE PartOfSpeech_Tense = 1 + // Future + PartOfSpeech_FUTURE PartOfSpeech_Tense = 2 + // Past + PartOfSpeech_PAST PartOfSpeech_Tense = 3 + // Present + PartOfSpeech_PRESENT PartOfSpeech_Tense = 4 + // Imperfect + PartOfSpeech_IMPERFECT PartOfSpeech_Tense = 5 + // Pluperfect + PartOfSpeech_PLUPERFECT PartOfSpeech_Tense = 6 +) + +var PartOfSpeech_Tense_name = map[int32]string{ + 0: "TENSE_UNKNOWN", + 1: "CONDITIONAL_TENSE", + 2: "FUTURE", + 3: "PAST", + 4: "PRESENT", + 5: "IMPERFECT", + 6: "PLUPERFECT", +} +var PartOfSpeech_Tense_value = map[string]int32{ + "TENSE_UNKNOWN": 0, + "CONDITIONAL_TENSE": 1, + "FUTURE": 2, + "PAST": 3, + "PRESENT": 4, + "IMPERFECT": 5, + "PLUPERFECT": 6, +} + +func (x PartOfSpeech_Tense) String() string { + return proto.EnumName(PartOfSpeech_Tense_name, int32(x)) +} +func (PartOfSpeech_Tense) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_language_service_5dd1bad015472f8e, []int{5, 10} +} + +// The relationship between the action that a verb expresses and the +// participants identified by its arguments. +type PartOfSpeech_Voice int32 + +const ( + // Voice is not applicable in the analyzed language or is not predicted. + PartOfSpeech_VOICE_UNKNOWN PartOfSpeech_Voice = 0 + // Active + PartOfSpeech_ACTIVE PartOfSpeech_Voice = 1 + // Causative + PartOfSpeech_CAUSATIVE PartOfSpeech_Voice = 2 + // Passive + PartOfSpeech_PASSIVE PartOfSpeech_Voice = 3 +) + +var PartOfSpeech_Voice_name = map[int32]string{ + 0: "VOICE_UNKNOWN", + 1: "ACTIVE", + 2: "CAUSATIVE", + 3: "PASSIVE", +} +var PartOfSpeech_Voice_value = map[string]int32{ + "VOICE_UNKNOWN": 0, + "ACTIVE": 1, + "CAUSATIVE": 2, + "PASSIVE": 3, +} + +func (x PartOfSpeech_Voice) String() string { + return proto.EnumName(PartOfSpeech_Voice_name, int32(x)) +} +func (PartOfSpeech_Voice) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_language_service_5dd1bad015472f8e, []int{5, 11} +} + +// The parse label enum for the token. +type DependencyEdge_Label int32 + +const ( + // Unknown + DependencyEdge_UNKNOWN DependencyEdge_Label = 0 + // Abbreviation modifier + DependencyEdge_ABBREV DependencyEdge_Label = 1 + // Adjectival complement + DependencyEdge_ACOMP DependencyEdge_Label = 2 + // Adverbial clause modifier + DependencyEdge_ADVCL DependencyEdge_Label = 3 + // Adverbial modifier + DependencyEdge_ADVMOD DependencyEdge_Label = 4 + // Adjectival modifier of an NP + DependencyEdge_AMOD DependencyEdge_Label = 5 + // Appositional modifier of an NP + DependencyEdge_APPOS DependencyEdge_Label = 6 + // Attribute dependent of a copular verb + DependencyEdge_ATTR DependencyEdge_Label = 7 + // Auxiliary (non-main) verb + DependencyEdge_AUX DependencyEdge_Label = 8 + // Passive auxiliary + DependencyEdge_AUXPASS DependencyEdge_Label = 9 + // Coordinating conjunction + DependencyEdge_CC DependencyEdge_Label = 10 + // Clausal complement of a verb or adjective + DependencyEdge_CCOMP DependencyEdge_Label = 11 + // Conjunct + DependencyEdge_CONJ DependencyEdge_Label = 12 + // Clausal subject + DependencyEdge_CSUBJ DependencyEdge_Label = 13 + // Clausal passive subject + DependencyEdge_CSUBJPASS DependencyEdge_Label = 14 + // Dependency (unable to determine) + DependencyEdge_DEP DependencyEdge_Label = 15 + // Determiner + DependencyEdge_DET DependencyEdge_Label = 16 + // Discourse + DependencyEdge_DISCOURSE DependencyEdge_Label = 17 + // Direct object + DependencyEdge_DOBJ DependencyEdge_Label = 18 + // Expletive + DependencyEdge_EXPL DependencyEdge_Label = 19 + // Goes with (part of a word in a text not well edited) + DependencyEdge_GOESWITH DependencyEdge_Label = 20 + // Indirect object + DependencyEdge_IOBJ DependencyEdge_Label = 21 + // Marker (word introducing a subordinate clause) + DependencyEdge_MARK DependencyEdge_Label = 22 + // Multi-word expression + DependencyEdge_MWE DependencyEdge_Label = 23 + // Multi-word verbal expression + DependencyEdge_MWV DependencyEdge_Label = 24 + // Negation modifier + DependencyEdge_NEG DependencyEdge_Label = 25 + // Noun compound modifier + DependencyEdge_NN DependencyEdge_Label = 26 + // Noun phrase used as an adverbial modifier + DependencyEdge_NPADVMOD DependencyEdge_Label = 27 + // Nominal subject + DependencyEdge_NSUBJ DependencyEdge_Label = 28 + // Passive nominal subject + DependencyEdge_NSUBJPASS DependencyEdge_Label = 29 + // Numeric modifier of a noun + DependencyEdge_NUM DependencyEdge_Label = 30 + // Element of compound number + DependencyEdge_NUMBER DependencyEdge_Label = 31 + // Punctuation mark + DependencyEdge_P DependencyEdge_Label = 32 + // Parataxis relation + DependencyEdge_PARATAXIS DependencyEdge_Label = 33 + // Participial modifier + DependencyEdge_PARTMOD DependencyEdge_Label = 34 + // The complement of a preposition is a clause + DependencyEdge_PCOMP DependencyEdge_Label = 35 + // Object of a preposition + DependencyEdge_POBJ DependencyEdge_Label = 36 + // Possession modifier + DependencyEdge_POSS DependencyEdge_Label = 37 + // Postverbal negative particle + DependencyEdge_POSTNEG DependencyEdge_Label = 38 + // Predicate complement + DependencyEdge_PRECOMP DependencyEdge_Label = 39 + // Preconjunt + DependencyEdge_PRECONJ DependencyEdge_Label = 40 + // Predeterminer + DependencyEdge_PREDET DependencyEdge_Label = 41 + // Prefix + DependencyEdge_PREF DependencyEdge_Label = 42 + // Prepositional modifier + DependencyEdge_PREP DependencyEdge_Label = 43 + // The relationship between a verb and verbal morpheme + DependencyEdge_PRONL DependencyEdge_Label = 44 + // Particle + DependencyEdge_PRT DependencyEdge_Label = 45 + // Associative or possessive marker + DependencyEdge_PS DependencyEdge_Label = 46 + // Quantifier phrase modifier + DependencyEdge_QUANTMOD DependencyEdge_Label = 47 + // Relative clause modifier + DependencyEdge_RCMOD DependencyEdge_Label = 48 + // Complementizer in relative clause + DependencyEdge_RCMODREL DependencyEdge_Label = 49 + // Ellipsis without a preceding predicate + DependencyEdge_RDROP DependencyEdge_Label = 50 + // Referent + DependencyEdge_REF DependencyEdge_Label = 51 + // Remnant + DependencyEdge_REMNANT DependencyEdge_Label = 52 + // Reparandum + DependencyEdge_REPARANDUM DependencyEdge_Label = 53 + // Root + DependencyEdge_ROOT DependencyEdge_Label = 54 + // Suffix specifying a unit of number + DependencyEdge_SNUM DependencyEdge_Label = 55 + // Suffix + DependencyEdge_SUFF DependencyEdge_Label = 56 + // Temporal modifier + DependencyEdge_TMOD DependencyEdge_Label = 57 + // Topic marker + DependencyEdge_TOPIC DependencyEdge_Label = 58 + // Clause headed by an infinite form of the verb that modifies a noun + DependencyEdge_VMOD DependencyEdge_Label = 59 + // Vocative + DependencyEdge_VOCATIVE DependencyEdge_Label = 60 + // Open clausal complement + DependencyEdge_XCOMP DependencyEdge_Label = 61 + // Name suffix + DependencyEdge_SUFFIX DependencyEdge_Label = 62 + // Name title + DependencyEdge_TITLE DependencyEdge_Label = 63 + // Adverbial phrase modifier + DependencyEdge_ADVPHMOD DependencyEdge_Label = 64 + // Causative auxiliary + DependencyEdge_AUXCAUS DependencyEdge_Label = 65 + // Helper auxiliary + DependencyEdge_AUXVV DependencyEdge_Label = 66 + // Rentaishi (Prenominal modifier) + DependencyEdge_DTMOD DependencyEdge_Label = 67 + // Foreign words + DependencyEdge_FOREIGN DependencyEdge_Label = 68 + // Keyword + DependencyEdge_KW DependencyEdge_Label = 69 + // List for chains of comparable items + DependencyEdge_LIST DependencyEdge_Label = 70 + // Nominalized clause + DependencyEdge_NOMC DependencyEdge_Label = 71 + // Nominalized clausal subject + DependencyEdge_NOMCSUBJ DependencyEdge_Label = 72 + // Nominalized clausal passive + DependencyEdge_NOMCSUBJPASS DependencyEdge_Label = 73 + // Compound of numeric modifier + DependencyEdge_NUMC DependencyEdge_Label = 74 + // Copula + DependencyEdge_COP DependencyEdge_Label = 75 + // Dislocated relation (for fronted/topicalized elements) + DependencyEdge_DISLOCATED DependencyEdge_Label = 76 + // Aspect marker + DependencyEdge_ASP DependencyEdge_Label = 77 + // Genitive modifier + DependencyEdge_GMOD DependencyEdge_Label = 78 + // Genitive object + DependencyEdge_GOBJ DependencyEdge_Label = 79 + // Infinitival modifier + DependencyEdge_INFMOD DependencyEdge_Label = 80 + // Measure + DependencyEdge_MES DependencyEdge_Label = 81 + // Nominal complement of a noun + DependencyEdge_NCOMP DependencyEdge_Label = 82 +) + +var DependencyEdge_Label_name = map[int32]string{ + 0: "UNKNOWN", + 1: "ABBREV", + 2: "ACOMP", + 3: "ADVCL", + 4: "ADVMOD", + 5: "AMOD", + 6: "APPOS", + 7: "ATTR", + 8: "AUX", + 9: "AUXPASS", + 10: "CC", + 11: "CCOMP", + 12: "CONJ", + 13: "CSUBJ", + 14: "CSUBJPASS", + 15: "DEP", + 16: "DET", + 17: "DISCOURSE", + 18: "DOBJ", + 19: "EXPL", + 20: "GOESWITH", + 21: "IOBJ", + 22: "MARK", + 23: "MWE", + 24: "MWV", + 25: "NEG", + 26: "NN", + 27: "NPADVMOD", + 28: "NSUBJ", + 29: "NSUBJPASS", + 30: "NUM", + 31: "NUMBER", + 32: "P", + 33: "PARATAXIS", + 34: "PARTMOD", + 35: "PCOMP", + 36: "POBJ", + 37: "POSS", + 38: "POSTNEG", + 39: "PRECOMP", + 40: "PRECONJ", + 41: "PREDET", + 42: "PREF", + 43: "PREP", + 44: "PRONL", + 45: "PRT", + 46: "PS", + 47: "QUANTMOD", + 48: "RCMOD", + 49: "RCMODREL", + 50: "RDROP", + 51: "REF", + 52: "REMNANT", + 53: "REPARANDUM", + 54: "ROOT", + 55: "SNUM", + 56: "SUFF", + 57: "TMOD", + 58: "TOPIC", + 59: "VMOD", + 60: "VOCATIVE", + 61: "XCOMP", + 62: "SUFFIX", + 63: "TITLE", + 64: "ADVPHMOD", + 65: "AUXCAUS", + 66: "AUXVV", + 67: "DTMOD", + 68: "FOREIGN", + 69: "KW", + 70: "LIST", + 71: "NOMC", + 72: "NOMCSUBJ", + 73: "NOMCSUBJPASS", + 74: "NUMC", + 75: "COP", + 76: "DISLOCATED", + 77: "ASP", + 78: "GMOD", + 79: "GOBJ", + 80: "INFMOD", + 81: "MES", + 82: "NCOMP", +} +var DependencyEdge_Label_value = map[string]int32{ + "UNKNOWN": 0, + "ABBREV": 1, + "ACOMP": 2, + "ADVCL": 3, + "ADVMOD": 4, + "AMOD": 5, + "APPOS": 6, + "ATTR": 7, + "AUX": 8, + "AUXPASS": 9, + "CC": 10, + "CCOMP": 11, + "CONJ": 12, + "CSUBJ": 13, + "CSUBJPASS": 14, + "DEP": 15, + "DET": 16, + "DISCOURSE": 17, + "DOBJ": 18, + "EXPL": 19, + "GOESWITH": 20, + "IOBJ": 21, + "MARK": 22, + "MWE": 23, + "MWV": 24, + "NEG": 25, + "NN": 26, + "NPADVMOD": 27, + "NSUBJ": 28, + "NSUBJPASS": 29, + "NUM": 30, + "NUMBER": 31, + "P": 32, + "PARATAXIS": 33, + "PARTMOD": 34, + "PCOMP": 35, + "POBJ": 36, + "POSS": 37, + "POSTNEG": 38, + "PRECOMP": 39, + "PRECONJ": 40, + "PREDET": 41, + "PREF": 42, + "PREP": 43, + "PRONL": 44, + "PRT": 45, + "PS": 46, + "QUANTMOD": 47, + "RCMOD": 48, + "RCMODREL": 49, + "RDROP": 50, + "REF": 51, + "REMNANT": 52, + "REPARANDUM": 53, + "ROOT": 54, + "SNUM": 55, + "SUFF": 56, + "TMOD": 57, + "TOPIC": 58, + "VMOD": 59, + "VOCATIVE": 60, + "XCOMP": 61, + "SUFFIX": 62, + "TITLE": 63, + "ADVPHMOD": 64, + "AUXCAUS": 65, + "AUXVV": 66, + "DTMOD": 67, + "FOREIGN": 68, + "KW": 69, + "LIST": 70, + "NOMC": 71, + "NOMCSUBJ": 72, + "NOMCSUBJPASS": 73, + "NUMC": 74, + "COP": 75, + "DISLOCATED": 76, + "ASP": 77, + "GMOD": 78, + "GOBJ": 79, + "INFMOD": 80, + "MES": 81, + "NCOMP": 82, +} + +func (x DependencyEdge_Label) String() string { + return proto.EnumName(DependencyEdge_Label_name, int32(x)) +} +func (DependencyEdge_Label) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_language_service_5dd1bad015472f8e, []int{6, 0} +} + +// The supported types of mentions. +type EntityMention_Type int32 + +const ( + // Unknown + EntityMention_TYPE_UNKNOWN EntityMention_Type = 0 + // Proper name + EntityMention_PROPER EntityMention_Type = 1 + // Common noun (or noun compound) + EntityMention_COMMON EntityMention_Type = 2 +) + +var EntityMention_Type_name = map[int32]string{ + 0: "TYPE_UNKNOWN", + 1: "PROPER", + 2: "COMMON", +} +var EntityMention_Type_value = map[string]int32{ + "TYPE_UNKNOWN": 0, + "PROPER": 1, + "COMMON": 2, +} + +func (x EntityMention_Type) String() string { + return proto.EnumName(EntityMention_Type_name, int32(x)) +} +func (EntityMention_Type) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_language_service_5dd1bad015472f8e, []int{7, 0} +} + +// ################################################################ # +// +// Represents the input to API methods. +type Document struct { + // Required. If the type is not set or is `TYPE_UNSPECIFIED`, + // returns an `INVALID_ARGUMENT` error. + Type Document_Type `protobuf:"varint,1,opt,name=type,proto3,enum=google.cloud.language.v1.Document_Type" json:"type,omitempty"` + // The source of the document: a string containing the content or a + // Google Cloud Storage URI. + // + // Types that are valid to be assigned to Source: + // *Document_Content + // *Document_GcsContentUri + Source isDocument_Source `protobuf_oneof:"source"` + // The language of the document (if not specified, the language is + // automatically detected). Both ISO and BCP-47 language codes are + // accepted.
+ // [Language Support](/natural-language/docs/languages) + // lists currently supported languages for each API method. + // If the language (either specified by the caller or automatically detected) + // is not supported by the called API method, an `INVALID_ARGUMENT` error + // is returned. + Language string `protobuf:"bytes,4,opt,name=language,proto3" json:"language,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Document) Reset() { *m = Document{} } +func (m *Document) String() string { return proto.CompactTextString(m) } +func (*Document) ProtoMessage() {} +func (*Document) Descriptor() ([]byte, []int) { + return fileDescriptor_language_service_5dd1bad015472f8e, []int{0} +} +func (m *Document) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Document.Unmarshal(m, b) +} +func (m *Document) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Document.Marshal(b, m, deterministic) +} +func (dst *Document) XXX_Merge(src proto.Message) { + xxx_messageInfo_Document.Merge(dst, src) +} +func (m *Document) XXX_Size() int { + return xxx_messageInfo_Document.Size(m) +} +func (m *Document) XXX_DiscardUnknown() { + xxx_messageInfo_Document.DiscardUnknown(m) +} + +var xxx_messageInfo_Document proto.InternalMessageInfo + +func (m *Document) GetType() Document_Type { + if m != nil { + return m.Type + } + return Document_TYPE_UNSPECIFIED +} + +type isDocument_Source interface { + isDocument_Source() +} + +type Document_Content struct { + Content string `protobuf:"bytes,2,opt,name=content,proto3,oneof"` +} + +type Document_GcsContentUri struct { + GcsContentUri string `protobuf:"bytes,3,opt,name=gcs_content_uri,json=gcsContentUri,proto3,oneof"` +} + +func (*Document_Content) isDocument_Source() {} + +func (*Document_GcsContentUri) isDocument_Source() {} + +func (m *Document) GetSource() isDocument_Source { + if m != nil { + return m.Source + } + return nil +} + +func (m *Document) GetContent() string { + if x, ok := m.GetSource().(*Document_Content); ok { + return x.Content + } + return "" +} + +func (m *Document) GetGcsContentUri() string { + if x, ok := m.GetSource().(*Document_GcsContentUri); ok { + return x.GcsContentUri + } + return "" +} + +func (m *Document) GetLanguage() string { + if m != nil { + return m.Language + } + return "" +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*Document) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _Document_OneofMarshaler, _Document_OneofUnmarshaler, _Document_OneofSizer, []interface{}{ + (*Document_Content)(nil), + (*Document_GcsContentUri)(nil), + } +} + +func _Document_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*Document) + // source + switch x := m.Source.(type) { + case *Document_Content: + b.EncodeVarint(2<<3 | proto.WireBytes) + b.EncodeStringBytes(x.Content) + case *Document_GcsContentUri: + b.EncodeVarint(3<<3 | proto.WireBytes) + b.EncodeStringBytes(x.GcsContentUri) + case nil: + default: + return fmt.Errorf("Document.Source has unexpected type %T", x) + } + return nil +} + +func _Document_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*Document) + switch tag { + case 2: // source.content + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeStringBytes() + m.Source = &Document_Content{x} + return true, err + case 3: // source.gcs_content_uri + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeStringBytes() + m.Source = &Document_GcsContentUri{x} + return true, err + default: + return false, nil + } +} + +func _Document_OneofSizer(msg proto.Message) (n int) { + m := msg.(*Document) + // source + switch x := m.Source.(type) { + case *Document_Content: + n += 1 // tag and wire + n += proto.SizeVarint(uint64(len(x.Content))) + n += len(x.Content) + case *Document_GcsContentUri: + n += 1 // tag and wire + n += proto.SizeVarint(uint64(len(x.GcsContentUri))) + n += len(x.GcsContentUri) + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +// Represents a sentence in the input document. +type Sentence struct { + // The sentence text. + Text *TextSpan `protobuf:"bytes,1,opt,name=text,proto3" json:"text,omitempty"` + // For calls to [AnalyzeSentiment][] or if + // [AnnotateTextRequest.Features.extract_document_sentiment][google.cloud.language.v1.AnnotateTextRequest.Features.extract_document_sentiment] is set to + // true, this field will contain the sentiment for the sentence. + Sentiment *Sentiment `protobuf:"bytes,2,opt,name=sentiment,proto3" json:"sentiment,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Sentence) Reset() { *m = Sentence{} } +func (m *Sentence) String() string { return proto.CompactTextString(m) } +func (*Sentence) ProtoMessage() {} +func (*Sentence) Descriptor() ([]byte, []int) { + return fileDescriptor_language_service_5dd1bad015472f8e, []int{1} +} +func (m *Sentence) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Sentence.Unmarshal(m, b) +} +func (m *Sentence) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Sentence.Marshal(b, m, deterministic) +} +func (dst *Sentence) XXX_Merge(src proto.Message) { + xxx_messageInfo_Sentence.Merge(dst, src) +} +func (m *Sentence) XXX_Size() int { + return xxx_messageInfo_Sentence.Size(m) +} +func (m *Sentence) XXX_DiscardUnknown() { + xxx_messageInfo_Sentence.DiscardUnknown(m) +} + +var xxx_messageInfo_Sentence proto.InternalMessageInfo + +func (m *Sentence) GetText() *TextSpan { + if m != nil { + return m.Text + } + return nil +} + +func (m *Sentence) GetSentiment() *Sentiment { + if m != nil { + return m.Sentiment + } + return nil +} + +// Represents a phrase in the text that is a known entity, such as +// a person, an organization, or location. The API associates information, such +// as salience and mentions, with entities. +type Entity struct { + // The representative name for the entity. + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // The entity type. + Type Entity_Type `protobuf:"varint,2,opt,name=type,proto3,enum=google.cloud.language.v1.Entity_Type" json:"type,omitempty"` + // Metadata associated with the entity. + // + // For most entity types, the metadata is a Wikipedia URL (`wikipedia_url`) + // and Knowledge Graph MID (`mid`), if they are available. For the metadata + // associated with other entity types, see the Type table below. + Metadata map[string]string `protobuf:"bytes,3,rep,name=metadata,proto3" json:"metadata,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` + // The salience score associated with the entity in the [0, 1.0] range. + // + // The salience score for an entity provides information about the + // importance or centrality of that entity to the entire document text. + // Scores closer to 0 are less salient, while scores closer to 1.0 are highly + // salient. + Salience float32 `protobuf:"fixed32,4,opt,name=salience,proto3" json:"salience,omitempty"` + // The mentions of this entity in the input document. The API currently + // supports proper noun mentions. + Mentions []*EntityMention `protobuf:"bytes,5,rep,name=mentions,proto3" json:"mentions,omitempty"` + // For calls to [AnalyzeEntitySentiment][] or if + // [AnnotateTextRequest.Features.extract_entity_sentiment][google.cloud.language.v1.AnnotateTextRequest.Features.extract_entity_sentiment] is set to + // true, this field will contain the aggregate sentiment expressed for this + // entity in the provided document. + Sentiment *Sentiment `protobuf:"bytes,6,opt,name=sentiment,proto3" json:"sentiment,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Entity) Reset() { *m = Entity{} } +func (m *Entity) String() string { return proto.CompactTextString(m) } +func (*Entity) ProtoMessage() {} +func (*Entity) Descriptor() ([]byte, []int) { + return fileDescriptor_language_service_5dd1bad015472f8e, []int{2} +} +func (m *Entity) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Entity.Unmarshal(m, b) +} +func (m *Entity) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Entity.Marshal(b, m, deterministic) +} +func (dst *Entity) XXX_Merge(src proto.Message) { + xxx_messageInfo_Entity.Merge(dst, src) +} +func (m *Entity) XXX_Size() int { + return xxx_messageInfo_Entity.Size(m) +} +func (m *Entity) XXX_DiscardUnknown() { + xxx_messageInfo_Entity.DiscardUnknown(m) +} + +var xxx_messageInfo_Entity proto.InternalMessageInfo + +func (m *Entity) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *Entity) GetType() Entity_Type { + if m != nil { + return m.Type + } + return Entity_UNKNOWN +} + +func (m *Entity) GetMetadata() map[string]string { + if m != nil { + return m.Metadata + } + return nil +} + +func (m *Entity) GetSalience() float32 { + if m != nil { + return m.Salience + } + return 0 +} + +func (m *Entity) GetMentions() []*EntityMention { + if m != nil { + return m.Mentions + } + return nil +} + +func (m *Entity) GetSentiment() *Sentiment { + if m != nil { + return m.Sentiment + } + return nil +} + +// Represents the smallest syntactic building block of the text. +type Token struct { + // The token text. + Text *TextSpan `protobuf:"bytes,1,opt,name=text,proto3" json:"text,omitempty"` + // Parts of speech tag for this token. + PartOfSpeech *PartOfSpeech `protobuf:"bytes,2,opt,name=part_of_speech,json=partOfSpeech,proto3" json:"part_of_speech,omitempty"` + // Dependency tree parse for this token. + DependencyEdge *DependencyEdge `protobuf:"bytes,3,opt,name=dependency_edge,json=dependencyEdge,proto3" json:"dependency_edge,omitempty"` + // [Lemma](https://en.wikipedia.org/wiki/Lemma_%28morphology%29) of the token. + Lemma string `protobuf:"bytes,4,opt,name=lemma,proto3" json:"lemma,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Token) Reset() { *m = Token{} } +func (m *Token) String() string { return proto.CompactTextString(m) } +func (*Token) ProtoMessage() {} +func (*Token) Descriptor() ([]byte, []int) { + return fileDescriptor_language_service_5dd1bad015472f8e, []int{3} +} +func (m *Token) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Token.Unmarshal(m, b) +} +func (m *Token) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Token.Marshal(b, m, deterministic) +} +func (dst *Token) XXX_Merge(src proto.Message) { + xxx_messageInfo_Token.Merge(dst, src) +} +func (m *Token) XXX_Size() int { + return xxx_messageInfo_Token.Size(m) +} +func (m *Token) XXX_DiscardUnknown() { + xxx_messageInfo_Token.DiscardUnknown(m) +} + +var xxx_messageInfo_Token proto.InternalMessageInfo + +func (m *Token) GetText() *TextSpan { + if m != nil { + return m.Text + } + return nil +} + +func (m *Token) GetPartOfSpeech() *PartOfSpeech { + if m != nil { + return m.PartOfSpeech + } + return nil +} + +func (m *Token) GetDependencyEdge() *DependencyEdge { + if m != nil { + return m.DependencyEdge + } + return nil +} + +func (m *Token) GetLemma() string { + if m != nil { + return m.Lemma + } + return "" +} + +// Represents the feeling associated with the entire text or entities in +// the text. +type Sentiment struct { + // A non-negative number in the [0, +inf) range, which represents + // the absolute magnitude of sentiment regardless of score (positive or + // negative). + Magnitude float32 `protobuf:"fixed32,2,opt,name=magnitude,proto3" json:"magnitude,omitempty"` + // Sentiment score between -1.0 (negative sentiment) and 1.0 + // (positive sentiment). + Score float32 `protobuf:"fixed32,3,opt,name=score,proto3" json:"score,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Sentiment) Reset() { *m = Sentiment{} } +func (m *Sentiment) String() string { return proto.CompactTextString(m) } +func (*Sentiment) ProtoMessage() {} +func (*Sentiment) Descriptor() ([]byte, []int) { + return fileDescriptor_language_service_5dd1bad015472f8e, []int{4} +} +func (m *Sentiment) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Sentiment.Unmarshal(m, b) +} +func (m *Sentiment) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Sentiment.Marshal(b, m, deterministic) +} +func (dst *Sentiment) XXX_Merge(src proto.Message) { + xxx_messageInfo_Sentiment.Merge(dst, src) +} +func (m *Sentiment) XXX_Size() int { + return xxx_messageInfo_Sentiment.Size(m) +} +func (m *Sentiment) XXX_DiscardUnknown() { + xxx_messageInfo_Sentiment.DiscardUnknown(m) +} + +var xxx_messageInfo_Sentiment proto.InternalMessageInfo + +func (m *Sentiment) GetMagnitude() float32 { + if m != nil { + return m.Magnitude + } + return 0 +} + +func (m *Sentiment) GetScore() float32 { + if m != nil { + return m.Score + } + return 0 +} + +// Represents part of speech information for a token. Parts of speech +// are as defined in +// http://www.lrec-conf.org/proceedings/lrec2012/pdf/274_Paper.pdf +type PartOfSpeech struct { + // The part of speech tag. + Tag PartOfSpeech_Tag `protobuf:"varint,1,opt,name=tag,proto3,enum=google.cloud.language.v1.PartOfSpeech_Tag" json:"tag,omitempty"` + // The grammatical aspect. + Aspect PartOfSpeech_Aspect `protobuf:"varint,2,opt,name=aspect,proto3,enum=google.cloud.language.v1.PartOfSpeech_Aspect" json:"aspect,omitempty"` + // The grammatical case. + Case PartOfSpeech_Case `protobuf:"varint,3,opt,name=case,proto3,enum=google.cloud.language.v1.PartOfSpeech_Case" json:"case,omitempty"` + // The grammatical form. + Form PartOfSpeech_Form `protobuf:"varint,4,opt,name=form,proto3,enum=google.cloud.language.v1.PartOfSpeech_Form" json:"form,omitempty"` + // The grammatical gender. + Gender PartOfSpeech_Gender `protobuf:"varint,5,opt,name=gender,proto3,enum=google.cloud.language.v1.PartOfSpeech_Gender" json:"gender,omitempty"` + // The grammatical mood. + Mood PartOfSpeech_Mood `protobuf:"varint,6,opt,name=mood,proto3,enum=google.cloud.language.v1.PartOfSpeech_Mood" json:"mood,omitempty"` + // The grammatical number. + Number PartOfSpeech_Number `protobuf:"varint,7,opt,name=number,proto3,enum=google.cloud.language.v1.PartOfSpeech_Number" json:"number,omitempty"` + // The grammatical person. + Person PartOfSpeech_Person `protobuf:"varint,8,opt,name=person,proto3,enum=google.cloud.language.v1.PartOfSpeech_Person" json:"person,omitempty"` + // The grammatical properness. + Proper PartOfSpeech_Proper `protobuf:"varint,9,opt,name=proper,proto3,enum=google.cloud.language.v1.PartOfSpeech_Proper" json:"proper,omitempty"` + // The grammatical reciprocity. + Reciprocity PartOfSpeech_Reciprocity `protobuf:"varint,10,opt,name=reciprocity,proto3,enum=google.cloud.language.v1.PartOfSpeech_Reciprocity" json:"reciprocity,omitempty"` + // The grammatical tense. + Tense PartOfSpeech_Tense `protobuf:"varint,11,opt,name=tense,proto3,enum=google.cloud.language.v1.PartOfSpeech_Tense" json:"tense,omitempty"` + // The grammatical voice. + Voice PartOfSpeech_Voice `protobuf:"varint,12,opt,name=voice,proto3,enum=google.cloud.language.v1.PartOfSpeech_Voice" json:"voice,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *PartOfSpeech) Reset() { *m = PartOfSpeech{} } +func (m *PartOfSpeech) String() string { return proto.CompactTextString(m) } +func (*PartOfSpeech) ProtoMessage() {} +func (*PartOfSpeech) Descriptor() ([]byte, []int) { + return fileDescriptor_language_service_5dd1bad015472f8e, []int{5} +} +func (m *PartOfSpeech) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_PartOfSpeech.Unmarshal(m, b) +} +func (m *PartOfSpeech) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_PartOfSpeech.Marshal(b, m, deterministic) +} +func (dst *PartOfSpeech) XXX_Merge(src proto.Message) { + xxx_messageInfo_PartOfSpeech.Merge(dst, src) +} +func (m *PartOfSpeech) XXX_Size() int { + return xxx_messageInfo_PartOfSpeech.Size(m) +} +func (m *PartOfSpeech) XXX_DiscardUnknown() { + xxx_messageInfo_PartOfSpeech.DiscardUnknown(m) +} + +var xxx_messageInfo_PartOfSpeech proto.InternalMessageInfo + +func (m *PartOfSpeech) GetTag() PartOfSpeech_Tag { + if m != nil { + return m.Tag + } + return PartOfSpeech_UNKNOWN +} + +func (m *PartOfSpeech) GetAspect() PartOfSpeech_Aspect { + if m != nil { + return m.Aspect + } + return PartOfSpeech_ASPECT_UNKNOWN +} + +func (m *PartOfSpeech) GetCase() PartOfSpeech_Case { + if m != nil { + return m.Case + } + return PartOfSpeech_CASE_UNKNOWN +} + +func (m *PartOfSpeech) GetForm() PartOfSpeech_Form { + if m != nil { + return m.Form + } + return PartOfSpeech_FORM_UNKNOWN +} + +func (m *PartOfSpeech) GetGender() PartOfSpeech_Gender { + if m != nil { + return m.Gender + } + return PartOfSpeech_GENDER_UNKNOWN +} + +func (m *PartOfSpeech) GetMood() PartOfSpeech_Mood { + if m != nil { + return m.Mood + } + return PartOfSpeech_MOOD_UNKNOWN +} + +func (m *PartOfSpeech) GetNumber() PartOfSpeech_Number { + if m != nil { + return m.Number + } + return PartOfSpeech_NUMBER_UNKNOWN +} + +func (m *PartOfSpeech) GetPerson() PartOfSpeech_Person { + if m != nil { + return m.Person + } + return PartOfSpeech_PERSON_UNKNOWN +} + +func (m *PartOfSpeech) GetProper() PartOfSpeech_Proper { + if m != nil { + return m.Proper + } + return PartOfSpeech_PROPER_UNKNOWN +} + +func (m *PartOfSpeech) GetReciprocity() PartOfSpeech_Reciprocity { + if m != nil { + return m.Reciprocity + } + return PartOfSpeech_RECIPROCITY_UNKNOWN +} + +func (m *PartOfSpeech) GetTense() PartOfSpeech_Tense { + if m != nil { + return m.Tense + } + return PartOfSpeech_TENSE_UNKNOWN +} + +func (m *PartOfSpeech) GetVoice() PartOfSpeech_Voice { + if m != nil { + return m.Voice + } + return PartOfSpeech_VOICE_UNKNOWN +} + +// Represents dependency parse tree information for a token. (For more +// information on dependency labels, see +// http://www.aclweb.org/anthology/P13-2017 +type DependencyEdge struct { + // Represents the head of this token in the dependency tree. + // This is the index of the token which has an arc going to this token. + // The index is the position of the token in the array of tokens returned + // by the API method. If this token is a root token, then the + // `head_token_index` is its own index. + HeadTokenIndex int32 `protobuf:"varint,1,opt,name=head_token_index,json=headTokenIndex,proto3" json:"head_token_index,omitempty"` + // The parse label for the token. + Label DependencyEdge_Label `protobuf:"varint,2,opt,name=label,proto3,enum=google.cloud.language.v1.DependencyEdge_Label" json:"label,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *DependencyEdge) Reset() { *m = DependencyEdge{} } +func (m *DependencyEdge) String() string { return proto.CompactTextString(m) } +func (*DependencyEdge) ProtoMessage() {} +func (*DependencyEdge) Descriptor() ([]byte, []int) { + return fileDescriptor_language_service_5dd1bad015472f8e, []int{6} +} +func (m *DependencyEdge) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_DependencyEdge.Unmarshal(m, b) +} +func (m *DependencyEdge) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_DependencyEdge.Marshal(b, m, deterministic) +} +func (dst *DependencyEdge) XXX_Merge(src proto.Message) { + xxx_messageInfo_DependencyEdge.Merge(dst, src) +} +func (m *DependencyEdge) XXX_Size() int { + return xxx_messageInfo_DependencyEdge.Size(m) +} +func (m *DependencyEdge) XXX_DiscardUnknown() { + xxx_messageInfo_DependencyEdge.DiscardUnknown(m) +} + +var xxx_messageInfo_DependencyEdge proto.InternalMessageInfo + +func (m *DependencyEdge) GetHeadTokenIndex() int32 { + if m != nil { + return m.HeadTokenIndex + } + return 0 +} + +func (m *DependencyEdge) GetLabel() DependencyEdge_Label { + if m != nil { + return m.Label + } + return DependencyEdge_UNKNOWN +} + +// Represents a mention for an entity in the text. Currently, proper noun +// mentions are supported. +type EntityMention struct { + // The mention text. + Text *TextSpan `protobuf:"bytes,1,opt,name=text,proto3" json:"text,omitempty"` + // The type of the entity mention. + Type EntityMention_Type `protobuf:"varint,2,opt,name=type,proto3,enum=google.cloud.language.v1.EntityMention_Type" json:"type,omitempty"` + // For calls to [AnalyzeEntitySentiment][] or if + // [AnnotateTextRequest.Features.extract_entity_sentiment][google.cloud.language.v1.AnnotateTextRequest.Features.extract_entity_sentiment] is set to + // true, this field will contain the sentiment expressed for this mention of + // the entity in the provided document. + Sentiment *Sentiment `protobuf:"bytes,3,opt,name=sentiment,proto3" json:"sentiment,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *EntityMention) Reset() { *m = EntityMention{} } +func (m *EntityMention) String() string { return proto.CompactTextString(m) } +func (*EntityMention) ProtoMessage() {} +func (*EntityMention) Descriptor() ([]byte, []int) { + return fileDescriptor_language_service_5dd1bad015472f8e, []int{7} +} +func (m *EntityMention) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_EntityMention.Unmarshal(m, b) +} +func (m *EntityMention) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_EntityMention.Marshal(b, m, deterministic) +} +func (dst *EntityMention) XXX_Merge(src proto.Message) { + xxx_messageInfo_EntityMention.Merge(dst, src) +} +func (m *EntityMention) XXX_Size() int { + return xxx_messageInfo_EntityMention.Size(m) +} +func (m *EntityMention) XXX_DiscardUnknown() { + xxx_messageInfo_EntityMention.DiscardUnknown(m) +} + +var xxx_messageInfo_EntityMention proto.InternalMessageInfo + +func (m *EntityMention) GetText() *TextSpan { + if m != nil { + return m.Text + } + return nil +} + +func (m *EntityMention) GetType() EntityMention_Type { + if m != nil { + return m.Type + } + return EntityMention_TYPE_UNKNOWN +} + +func (m *EntityMention) GetSentiment() *Sentiment { + if m != nil { + return m.Sentiment + } + return nil +} + +// Represents an output piece of text. +type TextSpan struct { + // The content of the output text. + Content string `protobuf:"bytes,1,opt,name=content,proto3" json:"content,omitempty"` + // The API calculates the beginning offset of the content in the original + // document according to the [EncodingType][google.cloud.language.v1.EncodingType] specified in the API request. + BeginOffset int32 `protobuf:"varint,2,opt,name=begin_offset,json=beginOffset,proto3" json:"begin_offset,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *TextSpan) Reset() { *m = TextSpan{} } +func (m *TextSpan) String() string { return proto.CompactTextString(m) } +func (*TextSpan) ProtoMessage() {} +func (*TextSpan) Descriptor() ([]byte, []int) { + return fileDescriptor_language_service_5dd1bad015472f8e, []int{8} +} +func (m *TextSpan) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_TextSpan.Unmarshal(m, b) +} +func (m *TextSpan) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_TextSpan.Marshal(b, m, deterministic) +} +func (dst *TextSpan) XXX_Merge(src proto.Message) { + xxx_messageInfo_TextSpan.Merge(dst, src) +} +func (m *TextSpan) XXX_Size() int { + return xxx_messageInfo_TextSpan.Size(m) +} +func (m *TextSpan) XXX_DiscardUnknown() { + xxx_messageInfo_TextSpan.DiscardUnknown(m) +} + +var xxx_messageInfo_TextSpan proto.InternalMessageInfo + +func (m *TextSpan) GetContent() string { + if m != nil { + return m.Content + } + return "" +} + +func (m *TextSpan) GetBeginOffset() int32 { + if m != nil { + return m.BeginOffset + } + return 0 +} + +// Represents a category returned from the text classifier. +type ClassificationCategory struct { + // The name of the category representing the document, from the [predefined + // taxonomy](/natural-language/docs/categories). + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // The classifier's confidence of the category. Number represents how certain + // the classifier is that this category represents the given text. + Confidence float32 `protobuf:"fixed32,2,opt,name=confidence,proto3" json:"confidence,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ClassificationCategory) Reset() { *m = ClassificationCategory{} } +func (m *ClassificationCategory) String() string { return proto.CompactTextString(m) } +func (*ClassificationCategory) ProtoMessage() {} +func (*ClassificationCategory) Descriptor() ([]byte, []int) { + return fileDescriptor_language_service_5dd1bad015472f8e, []int{9} +} +func (m *ClassificationCategory) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ClassificationCategory.Unmarshal(m, b) +} +func (m *ClassificationCategory) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ClassificationCategory.Marshal(b, m, deterministic) +} +func (dst *ClassificationCategory) XXX_Merge(src proto.Message) { + xxx_messageInfo_ClassificationCategory.Merge(dst, src) +} +func (m *ClassificationCategory) XXX_Size() int { + return xxx_messageInfo_ClassificationCategory.Size(m) +} +func (m *ClassificationCategory) XXX_DiscardUnknown() { + xxx_messageInfo_ClassificationCategory.DiscardUnknown(m) +} + +var xxx_messageInfo_ClassificationCategory proto.InternalMessageInfo + +func (m *ClassificationCategory) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *ClassificationCategory) GetConfidence() float32 { + if m != nil { + return m.Confidence + } + return 0 +} + +// The sentiment analysis request message. +type AnalyzeSentimentRequest struct { + // Input document. + Document *Document `protobuf:"bytes,1,opt,name=document,proto3" json:"document,omitempty"` + // The encoding type used by the API to calculate sentence offsets. + EncodingType EncodingType `protobuf:"varint,2,opt,name=encoding_type,json=encodingType,proto3,enum=google.cloud.language.v1.EncodingType" json:"encoding_type,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *AnalyzeSentimentRequest) Reset() { *m = AnalyzeSentimentRequest{} } +func (m *AnalyzeSentimentRequest) String() string { return proto.CompactTextString(m) } +func (*AnalyzeSentimentRequest) ProtoMessage() {} +func (*AnalyzeSentimentRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_language_service_5dd1bad015472f8e, []int{10} +} +func (m *AnalyzeSentimentRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_AnalyzeSentimentRequest.Unmarshal(m, b) +} +func (m *AnalyzeSentimentRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_AnalyzeSentimentRequest.Marshal(b, m, deterministic) +} +func (dst *AnalyzeSentimentRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_AnalyzeSentimentRequest.Merge(dst, src) +} +func (m *AnalyzeSentimentRequest) XXX_Size() int { + return xxx_messageInfo_AnalyzeSentimentRequest.Size(m) +} +func (m *AnalyzeSentimentRequest) XXX_DiscardUnknown() { + xxx_messageInfo_AnalyzeSentimentRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_AnalyzeSentimentRequest proto.InternalMessageInfo + +func (m *AnalyzeSentimentRequest) GetDocument() *Document { + if m != nil { + return m.Document + } + return nil +} + +func (m *AnalyzeSentimentRequest) GetEncodingType() EncodingType { + if m != nil { + return m.EncodingType + } + return EncodingType_NONE +} + +// The sentiment analysis response message. +type AnalyzeSentimentResponse struct { + // The overall sentiment of the input document. + DocumentSentiment *Sentiment `protobuf:"bytes,1,opt,name=document_sentiment,json=documentSentiment,proto3" json:"document_sentiment,omitempty"` + // The language of the text, which will be the same as the language specified + // in the request or, if not specified, the automatically-detected language. + // See [Document.language][google.cloud.language.v1.Document.language] field for more details. + Language string `protobuf:"bytes,2,opt,name=language,proto3" json:"language,omitempty"` + // The sentiment for all the sentences in the document. + Sentences []*Sentence `protobuf:"bytes,3,rep,name=sentences,proto3" json:"sentences,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *AnalyzeSentimentResponse) Reset() { *m = AnalyzeSentimentResponse{} } +func (m *AnalyzeSentimentResponse) String() string { return proto.CompactTextString(m) } +func (*AnalyzeSentimentResponse) ProtoMessage() {} +func (*AnalyzeSentimentResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_language_service_5dd1bad015472f8e, []int{11} +} +func (m *AnalyzeSentimentResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_AnalyzeSentimentResponse.Unmarshal(m, b) +} +func (m *AnalyzeSentimentResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_AnalyzeSentimentResponse.Marshal(b, m, deterministic) +} +func (dst *AnalyzeSentimentResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_AnalyzeSentimentResponse.Merge(dst, src) +} +func (m *AnalyzeSentimentResponse) XXX_Size() int { + return xxx_messageInfo_AnalyzeSentimentResponse.Size(m) +} +func (m *AnalyzeSentimentResponse) XXX_DiscardUnknown() { + xxx_messageInfo_AnalyzeSentimentResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_AnalyzeSentimentResponse proto.InternalMessageInfo + +func (m *AnalyzeSentimentResponse) GetDocumentSentiment() *Sentiment { + if m != nil { + return m.DocumentSentiment + } + return nil +} + +func (m *AnalyzeSentimentResponse) GetLanguage() string { + if m != nil { + return m.Language + } + return "" +} + +func (m *AnalyzeSentimentResponse) GetSentences() []*Sentence { + if m != nil { + return m.Sentences + } + return nil +} + +// The entity-level sentiment analysis request message. +type AnalyzeEntitySentimentRequest struct { + // Input document. + Document *Document `protobuf:"bytes,1,opt,name=document,proto3" json:"document,omitempty"` + // The encoding type used by the API to calculate offsets. + EncodingType EncodingType `protobuf:"varint,2,opt,name=encoding_type,json=encodingType,proto3,enum=google.cloud.language.v1.EncodingType" json:"encoding_type,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *AnalyzeEntitySentimentRequest) Reset() { *m = AnalyzeEntitySentimentRequest{} } +func (m *AnalyzeEntitySentimentRequest) String() string { return proto.CompactTextString(m) } +func (*AnalyzeEntitySentimentRequest) ProtoMessage() {} +func (*AnalyzeEntitySentimentRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_language_service_5dd1bad015472f8e, []int{12} +} +func (m *AnalyzeEntitySentimentRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_AnalyzeEntitySentimentRequest.Unmarshal(m, b) +} +func (m *AnalyzeEntitySentimentRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_AnalyzeEntitySentimentRequest.Marshal(b, m, deterministic) +} +func (dst *AnalyzeEntitySentimentRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_AnalyzeEntitySentimentRequest.Merge(dst, src) +} +func (m *AnalyzeEntitySentimentRequest) XXX_Size() int { + return xxx_messageInfo_AnalyzeEntitySentimentRequest.Size(m) +} +func (m *AnalyzeEntitySentimentRequest) XXX_DiscardUnknown() { + xxx_messageInfo_AnalyzeEntitySentimentRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_AnalyzeEntitySentimentRequest proto.InternalMessageInfo + +func (m *AnalyzeEntitySentimentRequest) GetDocument() *Document { + if m != nil { + return m.Document + } + return nil +} + +func (m *AnalyzeEntitySentimentRequest) GetEncodingType() EncodingType { + if m != nil { + return m.EncodingType + } + return EncodingType_NONE +} + +// The entity-level sentiment analysis response message. +type AnalyzeEntitySentimentResponse struct { + // The recognized entities in the input document with associated sentiments. + Entities []*Entity `protobuf:"bytes,1,rep,name=entities,proto3" json:"entities,omitempty"` + // The language of the text, which will be the same as the language specified + // in the request or, if not specified, the automatically-detected language. + // See [Document.language][google.cloud.language.v1.Document.language] field for more details. + Language string `protobuf:"bytes,2,opt,name=language,proto3" json:"language,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *AnalyzeEntitySentimentResponse) Reset() { *m = AnalyzeEntitySentimentResponse{} } +func (m *AnalyzeEntitySentimentResponse) String() string { return proto.CompactTextString(m) } +func (*AnalyzeEntitySentimentResponse) ProtoMessage() {} +func (*AnalyzeEntitySentimentResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_language_service_5dd1bad015472f8e, []int{13} +} +func (m *AnalyzeEntitySentimentResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_AnalyzeEntitySentimentResponse.Unmarshal(m, b) +} +func (m *AnalyzeEntitySentimentResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_AnalyzeEntitySentimentResponse.Marshal(b, m, deterministic) +} +func (dst *AnalyzeEntitySentimentResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_AnalyzeEntitySentimentResponse.Merge(dst, src) +} +func (m *AnalyzeEntitySentimentResponse) XXX_Size() int { + return xxx_messageInfo_AnalyzeEntitySentimentResponse.Size(m) +} +func (m *AnalyzeEntitySentimentResponse) XXX_DiscardUnknown() { + xxx_messageInfo_AnalyzeEntitySentimentResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_AnalyzeEntitySentimentResponse proto.InternalMessageInfo + +func (m *AnalyzeEntitySentimentResponse) GetEntities() []*Entity { + if m != nil { + return m.Entities + } + return nil +} + +func (m *AnalyzeEntitySentimentResponse) GetLanguage() string { + if m != nil { + return m.Language + } + return "" +} + +// The entity analysis request message. +type AnalyzeEntitiesRequest struct { + // Input document. + Document *Document `protobuf:"bytes,1,opt,name=document,proto3" json:"document,omitempty"` + // The encoding type used by the API to calculate offsets. + EncodingType EncodingType `protobuf:"varint,2,opt,name=encoding_type,json=encodingType,proto3,enum=google.cloud.language.v1.EncodingType" json:"encoding_type,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *AnalyzeEntitiesRequest) Reset() { *m = AnalyzeEntitiesRequest{} } +func (m *AnalyzeEntitiesRequest) String() string { return proto.CompactTextString(m) } +func (*AnalyzeEntitiesRequest) ProtoMessage() {} +func (*AnalyzeEntitiesRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_language_service_5dd1bad015472f8e, []int{14} +} +func (m *AnalyzeEntitiesRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_AnalyzeEntitiesRequest.Unmarshal(m, b) +} +func (m *AnalyzeEntitiesRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_AnalyzeEntitiesRequest.Marshal(b, m, deterministic) +} +func (dst *AnalyzeEntitiesRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_AnalyzeEntitiesRequest.Merge(dst, src) +} +func (m *AnalyzeEntitiesRequest) XXX_Size() int { + return xxx_messageInfo_AnalyzeEntitiesRequest.Size(m) +} +func (m *AnalyzeEntitiesRequest) XXX_DiscardUnknown() { + xxx_messageInfo_AnalyzeEntitiesRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_AnalyzeEntitiesRequest proto.InternalMessageInfo + +func (m *AnalyzeEntitiesRequest) GetDocument() *Document { + if m != nil { + return m.Document + } + return nil +} + +func (m *AnalyzeEntitiesRequest) GetEncodingType() EncodingType { + if m != nil { + return m.EncodingType + } + return EncodingType_NONE +} + +// The entity analysis response message. +type AnalyzeEntitiesResponse struct { + // The recognized entities in the input document. + Entities []*Entity `protobuf:"bytes,1,rep,name=entities,proto3" json:"entities,omitempty"` + // The language of the text, which will be the same as the language specified + // in the request or, if not specified, the automatically-detected language. + // See [Document.language][google.cloud.language.v1.Document.language] field for more details. + Language string `protobuf:"bytes,2,opt,name=language,proto3" json:"language,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *AnalyzeEntitiesResponse) Reset() { *m = AnalyzeEntitiesResponse{} } +func (m *AnalyzeEntitiesResponse) String() string { return proto.CompactTextString(m) } +func (*AnalyzeEntitiesResponse) ProtoMessage() {} +func (*AnalyzeEntitiesResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_language_service_5dd1bad015472f8e, []int{15} +} +func (m *AnalyzeEntitiesResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_AnalyzeEntitiesResponse.Unmarshal(m, b) +} +func (m *AnalyzeEntitiesResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_AnalyzeEntitiesResponse.Marshal(b, m, deterministic) +} +func (dst *AnalyzeEntitiesResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_AnalyzeEntitiesResponse.Merge(dst, src) +} +func (m *AnalyzeEntitiesResponse) XXX_Size() int { + return xxx_messageInfo_AnalyzeEntitiesResponse.Size(m) +} +func (m *AnalyzeEntitiesResponse) XXX_DiscardUnknown() { + xxx_messageInfo_AnalyzeEntitiesResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_AnalyzeEntitiesResponse proto.InternalMessageInfo + +func (m *AnalyzeEntitiesResponse) GetEntities() []*Entity { + if m != nil { + return m.Entities + } + return nil +} + +func (m *AnalyzeEntitiesResponse) GetLanguage() string { + if m != nil { + return m.Language + } + return "" +} + +// The syntax analysis request message. +type AnalyzeSyntaxRequest struct { + // Input document. + Document *Document `protobuf:"bytes,1,opt,name=document,proto3" json:"document,omitempty"` + // The encoding type used by the API to calculate offsets. + EncodingType EncodingType `protobuf:"varint,2,opt,name=encoding_type,json=encodingType,proto3,enum=google.cloud.language.v1.EncodingType" json:"encoding_type,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *AnalyzeSyntaxRequest) Reset() { *m = AnalyzeSyntaxRequest{} } +func (m *AnalyzeSyntaxRequest) String() string { return proto.CompactTextString(m) } +func (*AnalyzeSyntaxRequest) ProtoMessage() {} +func (*AnalyzeSyntaxRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_language_service_5dd1bad015472f8e, []int{16} +} +func (m *AnalyzeSyntaxRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_AnalyzeSyntaxRequest.Unmarshal(m, b) +} +func (m *AnalyzeSyntaxRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_AnalyzeSyntaxRequest.Marshal(b, m, deterministic) +} +func (dst *AnalyzeSyntaxRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_AnalyzeSyntaxRequest.Merge(dst, src) +} +func (m *AnalyzeSyntaxRequest) XXX_Size() int { + return xxx_messageInfo_AnalyzeSyntaxRequest.Size(m) +} +func (m *AnalyzeSyntaxRequest) XXX_DiscardUnknown() { + xxx_messageInfo_AnalyzeSyntaxRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_AnalyzeSyntaxRequest proto.InternalMessageInfo + +func (m *AnalyzeSyntaxRequest) GetDocument() *Document { + if m != nil { + return m.Document + } + return nil +} + +func (m *AnalyzeSyntaxRequest) GetEncodingType() EncodingType { + if m != nil { + return m.EncodingType + } + return EncodingType_NONE +} + +// The syntax analysis response message. +type AnalyzeSyntaxResponse struct { + // Sentences in the input document. + Sentences []*Sentence `protobuf:"bytes,1,rep,name=sentences,proto3" json:"sentences,omitempty"` + // Tokens, along with their syntactic information, in the input document. + Tokens []*Token `protobuf:"bytes,2,rep,name=tokens,proto3" json:"tokens,omitempty"` + // The language of the text, which will be the same as the language specified + // in the request or, if not specified, the automatically-detected language. + // See [Document.language][google.cloud.language.v1.Document.language] field for more details. + Language string `protobuf:"bytes,3,opt,name=language,proto3" json:"language,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *AnalyzeSyntaxResponse) Reset() { *m = AnalyzeSyntaxResponse{} } +func (m *AnalyzeSyntaxResponse) String() string { return proto.CompactTextString(m) } +func (*AnalyzeSyntaxResponse) ProtoMessage() {} +func (*AnalyzeSyntaxResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_language_service_5dd1bad015472f8e, []int{17} +} +func (m *AnalyzeSyntaxResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_AnalyzeSyntaxResponse.Unmarshal(m, b) +} +func (m *AnalyzeSyntaxResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_AnalyzeSyntaxResponse.Marshal(b, m, deterministic) +} +func (dst *AnalyzeSyntaxResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_AnalyzeSyntaxResponse.Merge(dst, src) +} +func (m *AnalyzeSyntaxResponse) XXX_Size() int { + return xxx_messageInfo_AnalyzeSyntaxResponse.Size(m) +} +func (m *AnalyzeSyntaxResponse) XXX_DiscardUnknown() { + xxx_messageInfo_AnalyzeSyntaxResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_AnalyzeSyntaxResponse proto.InternalMessageInfo + +func (m *AnalyzeSyntaxResponse) GetSentences() []*Sentence { + if m != nil { + return m.Sentences + } + return nil +} + +func (m *AnalyzeSyntaxResponse) GetTokens() []*Token { + if m != nil { + return m.Tokens + } + return nil +} + +func (m *AnalyzeSyntaxResponse) GetLanguage() string { + if m != nil { + return m.Language + } + return "" +} + +// The document classification request message. +type ClassifyTextRequest struct { + // Input document. + Document *Document `protobuf:"bytes,1,opt,name=document,proto3" json:"document,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ClassifyTextRequest) Reset() { *m = ClassifyTextRequest{} } +func (m *ClassifyTextRequest) String() string { return proto.CompactTextString(m) } +func (*ClassifyTextRequest) ProtoMessage() {} +func (*ClassifyTextRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_language_service_5dd1bad015472f8e, []int{18} +} +func (m *ClassifyTextRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ClassifyTextRequest.Unmarshal(m, b) +} +func (m *ClassifyTextRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ClassifyTextRequest.Marshal(b, m, deterministic) +} +func (dst *ClassifyTextRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_ClassifyTextRequest.Merge(dst, src) +} +func (m *ClassifyTextRequest) XXX_Size() int { + return xxx_messageInfo_ClassifyTextRequest.Size(m) +} +func (m *ClassifyTextRequest) XXX_DiscardUnknown() { + xxx_messageInfo_ClassifyTextRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_ClassifyTextRequest proto.InternalMessageInfo + +func (m *ClassifyTextRequest) GetDocument() *Document { + if m != nil { + return m.Document + } + return nil +} + +// The document classification response message. +type ClassifyTextResponse struct { + // Categories representing the input document. + Categories []*ClassificationCategory `protobuf:"bytes,1,rep,name=categories,proto3" json:"categories,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ClassifyTextResponse) Reset() { *m = ClassifyTextResponse{} } +func (m *ClassifyTextResponse) String() string { return proto.CompactTextString(m) } +func (*ClassifyTextResponse) ProtoMessage() {} +func (*ClassifyTextResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_language_service_5dd1bad015472f8e, []int{19} +} +func (m *ClassifyTextResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ClassifyTextResponse.Unmarshal(m, b) +} +func (m *ClassifyTextResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ClassifyTextResponse.Marshal(b, m, deterministic) +} +func (dst *ClassifyTextResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_ClassifyTextResponse.Merge(dst, src) +} +func (m *ClassifyTextResponse) XXX_Size() int { + return xxx_messageInfo_ClassifyTextResponse.Size(m) +} +func (m *ClassifyTextResponse) XXX_DiscardUnknown() { + xxx_messageInfo_ClassifyTextResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_ClassifyTextResponse proto.InternalMessageInfo + +func (m *ClassifyTextResponse) GetCategories() []*ClassificationCategory { + if m != nil { + return m.Categories + } + return nil +} + +// The request message for the text annotation API, which can perform multiple +// analysis types (sentiment, entities, and syntax) in one call. +type AnnotateTextRequest struct { + // Input document. + Document *Document `protobuf:"bytes,1,opt,name=document,proto3" json:"document,omitempty"` + // The enabled features. + Features *AnnotateTextRequest_Features `protobuf:"bytes,2,opt,name=features,proto3" json:"features,omitempty"` + // The encoding type used by the API to calculate offsets. + EncodingType EncodingType `protobuf:"varint,3,opt,name=encoding_type,json=encodingType,proto3,enum=google.cloud.language.v1.EncodingType" json:"encoding_type,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *AnnotateTextRequest) Reset() { *m = AnnotateTextRequest{} } +func (m *AnnotateTextRequest) String() string { return proto.CompactTextString(m) } +func (*AnnotateTextRequest) ProtoMessage() {} +func (*AnnotateTextRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_language_service_5dd1bad015472f8e, []int{20} +} +func (m *AnnotateTextRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_AnnotateTextRequest.Unmarshal(m, b) +} +func (m *AnnotateTextRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_AnnotateTextRequest.Marshal(b, m, deterministic) +} +func (dst *AnnotateTextRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_AnnotateTextRequest.Merge(dst, src) +} +func (m *AnnotateTextRequest) XXX_Size() int { + return xxx_messageInfo_AnnotateTextRequest.Size(m) +} +func (m *AnnotateTextRequest) XXX_DiscardUnknown() { + xxx_messageInfo_AnnotateTextRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_AnnotateTextRequest proto.InternalMessageInfo + +func (m *AnnotateTextRequest) GetDocument() *Document { + if m != nil { + return m.Document + } + return nil +} + +func (m *AnnotateTextRequest) GetFeatures() *AnnotateTextRequest_Features { + if m != nil { + return m.Features + } + return nil +} + +func (m *AnnotateTextRequest) GetEncodingType() EncodingType { + if m != nil { + return m.EncodingType + } + return EncodingType_NONE +} + +// All available features for sentiment, syntax, and semantic analysis. +// Setting each one to true will enable that specific analysis for the input. +type AnnotateTextRequest_Features struct { + // Extract syntax information. + ExtractSyntax bool `protobuf:"varint,1,opt,name=extract_syntax,json=extractSyntax,proto3" json:"extract_syntax,omitempty"` + // Extract entities. + ExtractEntities bool `protobuf:"varint,2,opt,name=extract_entities,json=extractEntities,proto3" json:"extract_entities,omitempty"` + // Extract document-level sentiment. + ExtractDocumentSentiment bool `protobuf:"varint,3,opt,name=extract_document_sentiment,json=extractDocumentSentiment,proto3" json:"extract_document_sentiment,omitempty"` + // Extract entities and their associated sentiment. + ExtractEntitySentiment bool `protobuf:"varint,4,opt,name=extract_entity_sentiment,json=extractEntitySentiment,proto3" json:"extract_entity_sentiment,omitempty"` + // Classify the full document into categories. + ClassifyText bool `protobuf:"varint,6,opt,name=classify_text,json=classifyText,proto3" json:"classify_text,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *AnnotateTextRequest_Features) Reset() { *m = AnnotateTextRequest_Features{} } +func (m *AnnotateTextRequest_Features) String() string { return proto.CompactTextString(m) } +func (*AnnotateTextRequest_Features) ProtoMessage() {} +func (*AnnotateTextRequest_Features) Descriptor() ([]byte, []int) { + return fileDescriptor_language_service_5dd1bad015472f8e, []int{20, 0} +} +func (m *AnnotateTextRequest_Features) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_AnnotateTextRequest_Features.Unmarshal(m, b) +} +func (m *AnnotateTextRequest_Features) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_AnnotateTextRequest_Features.Marshal(b, m, deterministic) +} +func (dst *AnnotateTextRequest_Features) XXX_Merge(src proto.Message) { + xxx_messageInfo_AnnotateTextRequest_Features.Merge(dst, src) +} +func (m *AnnotateTextRequest_Features) XXX_Size() int { + return xxx_messageInfo_AnnotateTextRequest_Features.Size(m) +} +func (m *AnnotateTextRequest_Features) XXX_DiscardUnknown() { + xxx_messageInfo_AnnotateTextRequest_Features.DiscardUnknown(m) +} + +var xxx_messageInfo_AnnotateTextRequest_Features proto.InternalMessageInfo + +func (m *AnnotateTextRequest_Features) GetExtractSyntax() bool { + if m != nil { + return m.ExtractSyntax + } + return false +} + +func (m *AnnotateTextRequest_Features) GetExtractEntities() bool { + if m != nil { + return m.ExtractEntities + } + return false +} + +func (m *AnnotateTextRequest_Features) GetExtractDocumentSentiment() bool { + if m != nil { + return m.ExtractDocumentSentiment + } + return false +} + +func (m *AnnotateTextRequest_Features) GetExtractEntitySentiment() bool { + if m != nil { + return m.ExtractEntitySentiment + } + return false +} + +func (m *AnnotateTextRequest_Features) GetClassifyText() bool { + if m != nil { + return m.ClassifyText + } + return false +} + +// The text annotations response message. +type AnnotateTextResponse struct { + // Sentences in the input document. Populated if the user enables + // [AnnotateTextRequest.Features.extract_syntax][google.cloud.language.v1.AnnotateTextRequest.Features.extract_syntax]. + Sentences []*Sentence `protobuf:"bytes,1,rep,name=sentences,proto3" json:"sentences,omitempty"` + // Tokens, along with their syntactic information, in the input document. + // Populated if the user enables + // [AnnotateTextRequest.Features.extract_syntax][google.cloud.language.v1.AnnotateTextRequest.Features.extract_syntax]. + Tokens []*Token `protobuf:"bytes,2,rep,name=tokens,proto3" json:"tokens,omitempty"` + // Entities, along with their semantic information, in the input document. + // Populated if the user enables + // [AnnotateTextRequest.Features.extract_entities][google.cloud.language.v1.AnnotateTextRequest.Features.extract_entities]. + Entities []*Entity `protobuf:"bytes,3,rep,name=entities,proto3" json:"entities,omitempty"` + // The overall sentiment for the document. Populated if the user enables + // [AnnotateTextRequest.Features.extract_document_sentiment][google.cloud.language.v1.AnnotateTextRequest.Features.extract_document_sentiment]. + DocumentSentiment *Sentiment `protobuf:"bytes,4,opt,name=document_sentiment,json=documentSentiment,proto3" json:"document_sentiment,omitempty"` + // The language of the text, which will be the same as the language specified + // in the request or, if not specified, the automatically-detected language. + // See [Document.language][google.cloud.language.v1.Document.language] field for more details. + Language string `protobuf:"bytes,5,opt,name=language,proto3" json:"language,omitempty"` + // Categories identified in the input document. + Categories []*ClassificationCategory `protobuf:"bytes,6,rep,name=categories,proto3" json:"categories,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *AnnotateTextResponse) Reset() { *m = AnnotateTextResponse{} } +func (m *AnnotateTextResponse) String() string { return proto.CompactTextString(m) } +func (*AnnotateTextResponse) ProtoMessage() {} +func (*AnnotateTextResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_language_service_5dd1bad015472f8e, []int{21} +} +func (m *AnnotateTextResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_AnnotateTextResponse.Unmarshal(m, b) +} +func (m *AnnotateTextResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_AnnotateTextResponse.Marshal(b, m, deterministic) +} +func (dst *AnnotateTextResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_AnnotateTextResponse.Merge(dst, src) +} +func (m *AnnotateTextResponse) XXX_Size() int { + return xxx_messageInfo_AnnotateTextResponse.Size(m) +} +func (m *AnnotateTextResponse) XXX_DiscardUnknown() { + xxx_messageInfo_AnnotateTextResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_AnnotateTextResponse proto.InternalMessageInfo + +func (m *AnnotateTextResponse) GetSentences() []*Sentence { + if m != nil { + return m.Sentences + } + return nil +} + +func (m *AnnotateTextResponse) GetTokens() []*Token { + if m != nil { + return m.Tokens + } + return nil +} + +func (m *AnnotateTextResponse) GetEntities() []*Entity { + if m != nil { + return m.Entities + } + return nil +} + +func (m *AnnotateTextResponse) GetDocumentSentiment() *Sentiment { + if m != nil { + return m.DocumentSentiment + } + return nil +} + +func (m *AnnotateTextResponse) GetLanguage() string { + if m != nil { + return m.Language + } + return "" +} + +func (m *AnnotateTextResponse) GetCategories() []*ClassificationCategory { + if m != nil { + return m.Categories + } + return nil +} + +func init() { + proto.RegisterType((*Document)(nil), "google.cloud.language.v1.Document") + proto.RegisterType((*Sentence)(nil), "google.cloud.language.v1.Sentence") + proto.RegisterType((*Entity)(nil), "google.cloud.language.v1.Entity") + proto.RegisterMapType((map[string]string)(nil), "google.cloud.language.v1.Entity.MetadataEntry") + proto.RegisterType((*Token)(nil), "google.cloud.language.v1.Token") + proto.RegisterType((*Sentiment)(nil), "google.cloud.language.v1.Sentiment") + proto.RegisterType((*PartOfSpeech)(nil), "google.cloud.language.v1.PartOfSpeech") + proto.RegisterType((*DependencyEdge)(nil), "google.cloud.language.v1.DependencyEdge") + proto.RegisterType((*EntityMention)(nil), "google.cloud.language.v1.EntityMention") + proto.RegisterType((*TextSpan)(nil), "google.cloud.language.v1.TextSpan") + proto.RegisterType((*ClassificationCategory)(nil), "google.cloud.language.v1.ClassificationCategory") + proto.RegisterType((*AnalyzeSentimentRequest)(nil), "google.cloud.language.v1.AnalyzeSentimentRequest") + proto.RegisterType((*AnalyzeSentimentResponse)(nil), "google.cloud.language.v1.AnalyzeSentimentResponse") + proto.RegisterType((*AnalyzeEntitySentimentRequest)(nil), "google.cloud.language.v1.AnalyzeEntitySentimentRequest") + proto.RegisterType((*AnalyzeEntitySentimentResponse)(nil), "google.cloud.language.v1.AnalyzeEntitySentimentResponse") + proto.RegisterType((*AnalyzeEntitiesRequest)(nil), "google.cloud.language.v1.AnalyzeEntitiesRequest") + proto.RegisterType((*AnalyzeEntitiesResponse)(nil), "google.cloud.language.v1.AnalyzeEntitiesResponse") + proto.RegisterType((*AnalyzeSyntaxRequest)(nil), "google.cloud.language.v1.AnalyzeSyntaxRequest") + proto.RegisterType((*AnalyzeSyntaxResponse)(nil), "google.cloud.language.v1.AnalyzeSyntaxResponse") + proto.RegisterType((*ClassifyTextRequest)(nil), "google.cloud.language.v1.ClassifyTextRequest") + proto.RegisterType((*ClassifyTextResponse)(nil), "google.cloud.language.v1.ClassifyTextResponse") + proto.RegisterType((*AnnotateTextRequest)(nil), "google.cloud.language.v1.AnnotateTextRequest") + proto.RegisterType((*AnnotateTextRequest_Features)(nil), "google.cloud.language.v1.AnnotateTextRequest.Features") + proto.RegisterType((*AnnotateTextResponse)(nil), "google.cloud.language.v1.AnnotateTextResponse") + proto.RegisterEnum("google.cloud.language.v1.EncodingType", EncodingType_name, EncodingType_value) + proto.RegisterEnum("google.cloud.language.v1.Document_Type", Document_Type_name, Document_Type_value) + proto.RegisterEnum("google.cloud.language.v1.Entity_Type", Entity_Type_name, Entity_Type_value) + proto.RegisterEnum("google.cloud.language.v1.PartOfSpeech_Tag", PartOfSpeech_Tag_name, PartOfSpeech_Tag_value) + proto.RegisterEnum("google.cloud.language.v1.PartOfSpeech_Aspect", PartOfSpeech_Aspect_name, PartOfSpeech_Aspect_value) + proto.RegisterEnum("google.cloud.language.v1.PartOfSpeech_Case", PartOfSpeech_Case_name, PartOfSpeech_Case_value) + proto.RegisterEnum("google.cloud.language.v1.PartOfSpeech_Form", PartOfSpeech_Form_name, PartOfSpeech_Form_value) + proto.RegisterEnum("google.cloud.language.v1.PartOfSpeech_Gender", PartOfSpeech_Gender_name, PartOfSpeech_Gender_value) + proto.RegisterEnum("google.cloud.language.v1.PartOfSpeech_Mood", PartOfSpeech_Mood_name, PartOfSpeech_Mood_value) + proto.RegisterEnum("google.cloud.language.v1.PartOfSpeech_Number", PartOfSpeech_Number_name, PartOfSpeech_Number_value) + proto.RegisterEnum("google.cloud.language.v1.PartOfSpeech_Person", PartOfSpeech_Person_name, PartOfSpeech_Person_value) + proto.RegisterEnum("google.cloud.language.v1.PartOfSpeech_Proper", PartOfSpeech_Proper_name, PartOfSpeech_Proper_value) + proto.RegisterEnum("google.cloud.language.v1.PartOfSpeech_Reciprocity", PartOfSpeech_Reciprocity_name, PartOfSpeech_Reciprocity_value) + proto.RegisterEnum("google.cloud.language.v1.PartOfSpeech_Tense", PartOfSpeech_Tense_name, PartOfSpeech_Tense_value) + proto.RegisterEnum("google.cloud.language.v1.PartOfSpeech_Voice", PartOfSpeech_Voice_name, PartOfSpeech_Voice_value) + proto.RegisterEnum("google.cloud.language.v1.DependencyEdge_Label", DependencyEdge_Label_name, DependencyEdge_Label_value) + proto.RegisterEnum("google.cloud.language.v1.EntityMention_Type", EntityMention_Type_name, EntityMention_Type_value) +} + +// Reference imports to suppress errors if they are not otherwise used. +var _ context.Context +var _ grpc.ClientConn + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the grpc package it is being compiled against. +const _ = grpc.SupportPackageIsVersion4 + +// LanguageServiceClient is the client API for LanguageService service. +// +// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://godoc.org/google.golang.org/grpc#ClientConn.NewStream. +type LanguageServiceClient interface { + // Analyzes the sentiment of the provided text. + AnalyzeSentiment(ctx context.Context, in *AnalyzeSentimentRequest, opts ...grpc.CallOption) (*AnalyzeSentimentResponse, error) + // Finds named entities (currently proper names and common nouns) in the text + // along with entity types, salience, mentions for each entity, and + // other properties. + AnalyzeEntities(ctx context.Context, in *AnalyzeEntitiesRequest, opts ...grpc.CallOption) (*AnalyzeEntitiesResponse, error) + // Finds entities, similar to [AnalyzeEntities][google.cloud.language.v1.LanguageService.AnalyzeEntities] in the text and analyzes + // sentiment associated with each entity and its mentions. + AnalyzeEntitySentiment(ctx context.Context, in *AnalyzeEntitySentimentRequest, opts ...grpc.CallOption) (*AnalyzeEntitySentimentResponse, error) + // Analyzes the syntax of the text and provides sentence boundaries and + // tokenization along with part of speech tags, dependency trees, and other + // properties. + AnalyzeSyntax(ctx context.Context, in *AnalyzeSyntaxRequest, opts ...grpc.CallOption) (*AnalyzeSyntaxResponse, error) + // Classifies a document into categories. + ClassifyText(ctx context.Context, in *ClassifyTextRequest, opts ...grpc.CallOption) (*ClassifyTextResponse, error) + // A convenience method that provides all the features that analyzeSentiment, + // analyzeEntities, and analyzeSyntax provide in one call. + AnnotateText(ctx context.Context, in *AnnotateTextRequest, opts ...grpc.CallOption) (*AnnotateTextResponse, error) +} + +type languageServiceClient struct { + cc *grpc.ClientConn +} + +func NewLanguageServiceClient(cc *grpc.ClientConn) LanguageServiceClient { + return &languageServiceClient{cc} +} + +func (c *languageServiceClient) AnalyzeSentiment(ctx context.Context, in *AnalyzeSentimentRequest, opts ...grpc.CallOption) (*AnalyzeSentimentResponse, error) { + out := new(AnalyzeSentimentResponse) + err := c.cc.Invoke(ctx, "/google.cloud.language.v1.LanguageService/AnalyzeSentiment", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *languageServiceClient) AnalyzeEntities(ctx context.Context, in *AnalyzeEntitiesRequest, opts ...grpc.CallOption) (*AnalyzeEntitiesResponse, error) { + out := new(AnalyzeEntitiesResponse) + err := c.cc.Invoke(ctx, "/google.cloud.language.v1.LanguageService/AnalyzeEntities", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *languageServiceClient) AnalyzeEntitySentiment(ctx context.Context, in *AnalyzeEntitySentimentRequest, opts ...grpc.CallOption) (*AnalyzeEntitySentimentResponse, error) { + out := new(AnalyzeEntitySentimentResponse) + err := c.cc.Invoke(ctx, "/google.cloud.language.v1.LanguageService/AnalyzeEntitySentiment", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *languageServiceClient) AnalyzeSyntax(ctx context.Context, in *AnalyzeSyntaxRequest, opts ...grpc.CallOption) (*AnalyzeSyntaxResponse, error) { + out := new(AnalyzeSyntaxResponse) + err := c.cc.Invoke(ctx, "/google.cloud.language.v1.LanguageService/AnalyzeSyntax", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *languageServiceClient) ClassifyText(ctx context.Context, in *ClassifyTextRequest, opts ...grpc.CallOption) (*ClassifyTextResponse, error) { + out := new(ClassifyTextResponse) + err := c.cc.Invoke(ctx, "/google.cloud.language.v1.LanguageService/ClassifyText", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *languageServiceClient) AnnotateText(ctx context.Context, in *AnnotateTextRequest, opts ...grpc.CallOption) (*AnnotateTextResponse, error) { + out := new(AnnotateTextResponse) + err := c.cc.Invoke(ctx, "/google.cloud.language.v1.LanguageService/AnnotateText", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +// LanguageServiceServer is the server API for LanguageService service. +type LanguageServiceServer interface { + // Analyzes the sentiment of the provided text. + AnalyzeSentiment(context.Context, *AnalyzeSentimentRequest) (*AnalyzeSentimentResponse, error) + // Finds named entities (currently proper names and common nouns) in the text + // along with entity types, salience, mentions for each entity, and + // other properties. + AnalyzeEntities(context.Context, *AnalyzeEntitiesRequest) (*AnalyzeEntitiesResponse, error) + // Finds entities, similar to [AnalyzeEntities][google.cloud.language.v1.LanguageService.AnalyzeEntities] in the text and analyzes + // sentiment associated with each entity and its mentions. + AnalyzeEntitySentiment(context.Context, *AnalyzeEntitySentimentRequest) (*AnalyzeEntitySentimentResponse, error) + // Analyzes the syntax of the text and provides sentence boundaries and + // tokenization along with part of speech tags, dependency trees, and other + // properties. + AnalyzeSyntax(context.Context, *AnalyzeSyntaxRequest) (*AnalyzeSyntaxResponse, error) + // Classifies a document into categories. + ClassifyText(context.Context, *ClassifyTextRequest) (*ClassifyTextResponse, error) + // A convenience method that provides all the features that analyzeSentiment, + // analyzeEntities, and analyzeSyntax provide in one call. + AnnotateText(context.Context, *AnnotateTextRequest) (*AnnotateTextResponse, error) +} + +func RegisterLanguageServiceServer(s *grpc.Server, srv LanguageServiceServer) { + s.RegisterService(&_LanguageService_serviceDesc, srv) +} + +func _LanguageService_AnalyzeSentiment_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(AnalyzeSentimentRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(LanguageServiceServer).AnalyzeSentiment(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.language.v1.LanguageService/AnalyzeSentiment", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(LanguageServiceServer).AnalyzeSentiment(ctx, req.(*AnalyzeSentimentRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _LanguageService_AnalyzeEntities_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(AnalyzeEntitiesRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(LanguageServiceServer).AnalyzeEntities(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.language.v1.LanguageService/AnalyzeEntities", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(LanguageServiceServer).AnalyzeEntities(ctx, req.(*AnalyzeEntitiesRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _LanguageService_AnalyzeEntitySentiment_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(AnalyzeEntitySentimentRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(LanguageServiceServer).AnalyzeEntitySentiment(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.language.v1.LanguageService/AnalyzeEntitySentiment", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(LanguageServiceServer).AnalyzeEntitySentiment(ctx, req.(*AnalyzeEntitySentimentRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _LanguageService_AnalyzeSyntax_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(AnalyzeSyntaxRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(LanguageServiceServer).AnalyzeSyntax(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.language.v1.LanguageService/AnalyzeSyntax", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(LanguageServiceServer).AnalyzeSyntax(ctx, req.(*AnalyzeSyntaxRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _LanguageService_ClassifyText_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(ClassifyTextRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(LanguageServiceServer).ClassifyText(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.language.v1.LanguageService/ClassifyText", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(LanguageServiceServer).ClassifyText(ctx, req.(*ClassifyTextRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _LanguageService_AnnotateText_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(AnnotateTextRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(LanguageServiceServer).AnnotateText(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.language.v1.LanguageService/AnnotateText", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(LanguageServiceServer).AnnotateText(ctx, req.(*AnnotateTextRequest)) + } + return interceptor(ctx, in, info, handler) +} + +var _LanguageService_serviceDesc = grpc.ServiceDesc{ + ServiceName: "google.cloud.language.v1.LanguageService", + HandlerType: (*LanguageServiceServer)(nil), + Methods: []grpc.MethodDesc{ + { + MethodName: "AnalyzeSentiment", + Handler: _LanguageService_AnalyzeSentiment_Handler, + }, + { + MethodName: "AnalyzeEntities", + Handler: _LanguageService_AnalyzeEntities_Handler, + }, + { + MethodName: "AnalyzeEntitySentiment", + Handler: _LanguageService_AnalyzeEntitySentiment_Handler, + }, + { + MethodName: "AnalyzeSyntax", + Handler: _LanguageService_AnalyzeSyntax_Handler, + }, + { + MethodName: "ClassifyText", + Handler: _LanguageService_ClassifyText_Handler, + }, + { + MethodName: "AnnotateText", + Handler: _LanguageService_AnnotateText_Handler, + }, + }, + Streams: []grpc.StreamDesc{}, + Metadata: "google/cloud/language/v1/language_service.proto", +} + +func init() { + proto.RegisterFile("google/cloud/language/v1/language_service.proto", fileDescriptor_language_service_5dd1bad015472f8e) +} + +var fileDescriptor_language_service_5dd1bad015472f8e = []byte{ + // 3123 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xd4, 0x3a, 0xcb, 0x8f, 0xdb, 0xc6, + 0xdd, 0xa6, 0x5e, 0x2b, 0x8d, 0x76, 0xd7, 0x63, 0xda, 0xb1, 0xf5, 0x29, 0x8e, 0xed, 0xd0, 0x79, + 0x38, 0x8e, 0xad, 0xb5, 0x37, 0xdf, 0xe7, 0x38, 0xb6, 0xbf, 0x2f, 0xe6, 0x92, 0x23, 0x2d, 0xd7, + 0x14, 0x49, 0x0f, 0x49, 0x79, 0xe3, 0x0f, 0x85, 0x40, 0x4b, 0x5c, 0x59, 0xc8, 0x2e, 0xa9, 0x8a, + 0x5c, 0xc7, 0x9b, 0x4b, 0x81, 0x02, 0x3d, 0xf6, 0x50, 0xf4, 0x0f, 0xe8, 0xb5, 0x2f, 0xf4, 0x71, + 0x2c, 0xd0, 0x43, 0x0f, 0x3d, 0x14, 0x41, 0x4f, 0xed, 0x2d, 0xa7, 0x1c, 0x7a, 0xeb, 0xbd, 0x68, + 0x81, 0xb6, 0x28, 0x7e, 0x33, 0xa4, 0x44, 0xad, 0x77, 0x6d, 0xad, 0x9b, 0xa2, 0xe9, 0x6d, 0xe6, + 0xc7, 0xdf, 0x7b, 0x7e, 0xaf, 0x19, 0x09, 0xad, 0x0c, 0xc2, 0x70, 0xb0, 0xed, 0xaf, 0xf4, 0xb6, + 0xc3, 0xdd, 0xfe, 0xca, 0xb6, 0x17, 0x0c, 0x76, 0xbd, 0x81, 0xbf, 0xf2, 0xe4, 0xfa, 0x64, 0xdd, + 0x8d, 0xfc, 0xf1, 0x93, 0x61, 0xcf, 0x6f, 0x8c, 0xc6, 0x61, 0x1c, 0x8a, 0x35, 0x4e, 0xd0, 0x60, + 0x04, 0x8d, 0x14, 0xa9, 0xf1, 0xe4, 0x7a, 0xfd, 0x6c, 0xc2, 0xca, 0x1b, 0x0d, 0x57, 0xbc, 0x20, + 0x08, 0x63, 0x2f, 0x1e, 0x86, 0x41, 0xc4, 0xe9, 0xea, 0x67, 0x32, 0x5f, 0x7b, 0xdb, 0x43, 0x3f, + 0x88, 0x93, 0x0f, 0xe7, 0x33, 0x1f, 0xb6, 0x86, 0xfe, 0x76, 0xbf, 0xfb, 0xc8, 0x7f, 0xec, 0x3d, + 0x19, 0x86, 0x63, 0x8e, 0x20, 0xfd, 0x51, 0x40, 0x65, 0x35, 0xec, 0xed, 0xee, 0xf8, 0x41, 0x2c, + 0xde, 0x46, 0x85, 0x78, 0x6f, 0xe4, 0xd7, 0x84, 0x0b, 0xc2, 0xa5, 0xe5, 0xd5, 0xb7, 0x1b, 0x87, + 0x69, 0xd3, 0x48, 0x29, 0x1a, 0xce, 0xde, 0xc8, 0xa7, 0x8c, 0x48, 0xac, 0xa3, 0x85, 0x5e, 0x18, + 0xc4, 0x7e, 0x10, 0xd7, 0x72, 0x17, 0x84, 0x4b, 0x95, 0xf5, 0x63, 0x34, 0x05, 0x88, 0x97, 0xd0, + 0xf1, 0x41, 0x2f, 0xea, 0x26, 0xdb, 0xee, 0xee, 0x78, 0x58, 0xcb, 0x27, 0x38, 0x4b, 0x83, 0x5e, + 0xa4, 0x70, 0xb8, 0x3b, 0x1e, 0x8a, 0x75, 0x54, 0x4e, 0x05, 0xd5, 0x0a, 0x80, 0x42, 0x27, 0x7b, + 0xe9, 0x06, 0x2a, 0x80, 0x3c, 0xf1, 0x14, 0xc2, 0xce, 0x47, 0x16, 0xe9, 0xba, 0x86, 0x6d, 0x11, + 0x45, 0x6b, 0x6a, 0x44, 0xc5, 0xc7, 0xc4, 0x65, 0x84, 0x2c, 0x5d, 0xd6, 0x8c, 0xae, 0x43, 0x36, + 0x1d, 0x2c, 0x88, 0x65, 0x54, 0x58, 0x77, 0xda, 0x3a, 0xce, 0xad, 0x95, 0x51, 0x29, 0x0a, 0x77, + 0xc7, 0x3d, 0x5f, 0xfa, 0x96, 0x80, 0xca, 0xb6, 0x0f, 0xc2, 0x7a, 0xbe, 0x78, 0x03, 0x15, 0x62, + 0xff, 0x69, 0xcc, 0xac, 0xad, 0xae, 0x4a, 0x87, 0x5b, 0xeb, 0xf8, 0x4f, 0x63, 0x7b, 0xe4, 0x05, + 0x94, 0xe1, 0x8b, 0x32, 0xaa, 0x44, 0x7e, 0x10, 0x0f, 0x77, 0x52, 0x53, 0xab, 0xab, 0x17, 0x0f, + 0x27, 0xb6, 0x53, 0x54, 0x3a, 0xa5, 0x92, 0x7e, 0x55, 0x40, 0x25, 0x12, 0xc4, 0xc3, 0x78, 0x4f, + 0x14, 0x51, 0x21, 0xf0, 0x76, 0xb8, 0xcf, 0x2b, 0x94, 0xad, 0xc5, 0x0f, 0x92, 0x73, 0xc8, 0xb1, + 0x73, 0x78, 0xf3, 0x70, 0xe6, 0x9c, 0x47, 0xf6, 0x14, 0x36, 0x50, 0x79, 0xc7, 0x8f, 0xbd, 0xbe, + 0x17, 0x7b, 0xb5, 0xfc, 0x85, 0xfc, 0xa5, 0xea, 0x6a, 0xe3, 0x85, 0xe4, 0xed, 0x84, 0x80, 0x04, + 0xf1, 0x78, 0x8f, 0x4e, 0xe8, 0xe1, 0x2c, 0x22, 0x0f, 0xa2, 0xa9, 0xc7, 0xcf, 0x22, 0x47, 0x27, + 0x7b, 0x51, 0x01, 0x39, 0x01, 0x8b, 0xc1, 0x5a, 0x91, 0xc9, 0x79, 0xfb, 0x45, 0x72, 0xda, 0x1c, + 0x9f, 0x4e, 0x08, 0x67, 0x3d, 0x59, 0x7a, 0x19, 0x4f, 0xd6, 0x6f, 0xa3, 0xa5, 0x19, 0xf5, 0x45, + 0x8c, 0xf2, 0x1f, 0xfb, 0x7b, 0x89, 0x3b, 0x61, 0x29, 0x9e, 0x42, 0xc5, 0x27, 0xde, 0xf6, 0x2e, + 0x77, 0x67, 0x85, 0xf2, 0xcd, 0xad, 0xdc, 0x4d, 0x41, 0xfa, 0x85, 0x90, 0x44, 0x54, 0x15, 0x2d, + 0xb8, 0xc6, 0x3d, 0xc3, 0x7c, 0x60, 0xe0, 0x63, 0x22, 0x42, 0x25, 0x8b, 0x50, 0xdb, 0x34, 0xb0, + 0x20, 0x2e, 0xa2, 0xb2, 0x6e, 0x2a, 0xb2, 0xa3, 0x99, 0x06, 0xce, 0x89, 0x18, 0x2d, 0x9a, 0xb4, + 0x25, 0x1b, 0xda, 0x43, 0x0e, 0xc9, 0x8b, 0x15, 0x54, 0x24, 0x1d, 0x62, 0x38, 0xb8, 0x20, 0x1e, + 0x47, 0xd5, 0x07, 0x26, 0xbd, 0xd7, 0x35, 0x9b, 0x5d, 0x99, 0x3a, 0xb8, 0x28, 0x9e, 0x40, 0x4b, + 0x8a, 0x69, 0xd8, 0x6e, 0x9b, 0xd0, 0x6e, 0xcb, 0x34, 0x55, 0x5c, 0x02, 0x74, 0xd3, 0x59, 0x27, + 0x14, 0x2f, 0x00, 0x2f, 0x6b, 0xdd, 0x34, 0x48, 0xd7, 0x70, 0xdb, 0x6b, 0x84, 0xe2, 0x0a, 0x28, + 0x21, 0xab, 0x2a, 0x25, 0xb6, 0x8d, 0x11, 0x44, 0xaf, 0x2a, 0x3b, 0x04, 0x57, 0x41, 0x9d, 0x04, + 0x65, 0x11, 0xe8, 0x2d, 0xaa, 0x29, 0x04, 0x2f, 0x49, 0x7f, 0x12, 0x50, 0xd1, 0x09, 0x3f, 0xf6, + 0x83, 0x97, 0x8e, 0x63, 0x1d, 0x2d, 0x8f, 0xbc, 0x71, 0xdc, 0x0d, 0xb7, 0xba, 0xd1, 0xc8, 0xf7, + 0x7b, 0x8f, 0x93, 0x60, 0x7e, 0xeb, 0x70, 0x0e, 0x96, 0x37, 0x8e, 0xcd, 0x2d, 0x9b, 0x61, 0xd3, + 0xc5, 0x51, 0x66, 0x27, 0xde, 0x47, 0xc7, 0xfb, 0xfe, 0xc8, 0x0f, 0xfa, 0x7e, 0xd0, 0xdb, 0xeb, + 0xfa, 0xfd, 0x81, 0xcf, 0x52, 0xbc, 0xba, 0x7a, 0xe9, 0x39, 0x65, 0x64, 0x42, 0x40, 0xfa, 0x03, + 0x9f, 0x2e, 0xf7, 0x67, 0xf6, 0x70, 0x70, 0xdb, 0xfe, 0xce, 0x8e, 0x97, 0x14, 0x02, 0xbe, 0x91, + 0x3e, 0x44, 0x95, 0x49, 0x24, 0x88, 0x67, 0x51, 0x65, 0xc7, 0x1b, 0x04, 0xc3, 0x78, 0xb7, 0xcf, + 0xcf, 0x37, 0x47, 0xa7, 0x00, 0x60, 0x10, 0xf5, 0xc2, 0x31, 0xd7, 0x24, 0x47, 0xf9, 0x46, 0xfa, + 0x1b, 0xb8, 0x3e, 0xab, 0xfa, 0x1d, 0x94, 0x8f, 0xbd, 0x41, 0x52, 0xf5, 0x2e, 0xcf, 0x67, 0x7d, + 0xc3, 0xf1, 0x06, 0x14, 0xc8, 0x44, 0x82, 0x4a, 0x5e, 0x34, 0xf2, 0x7b, 0x71, 0x92, 0xae, 0x57, + 0xe7, 0x64, 0x20, 0x33, 0x22, 0x9a, 0x10, 0x8b, 0x1f, 0xa2, 0x42, 0xcf, 0x8b, 0xb8, 0xaa, 0xcb, + 0xab, 0xef, 0xce, 0xc9, 0x44, 0xf1, 0x22, 0x9f, 0x32, 0x42, 0x60, 0xb0, 0x15, 0x8e, 0x77, 0x98, + 0xb3, 0xe6, 0x67, 0xd0, 0x0c, 0xc7, 0x3b, 0x94, 0x11, 0x82, 0x21, 0x03, 0x70, 0xff, 0xb8, 0x56, + 0x3c, 0x92, 0x21, 0x2d, 0x46, 0x44, 0x13, 0x62, 0xd0, 0x63, 0x27, 0x0c, 0xfb, 0x2c, 0x9f, 0xe7, + 0xd7, 0xa3, 0x1d, 0x86, 0x7d, 0xca, 0x08, 0x41, 0x8f, 0x60, 0x77, 0xe7, 0x91, 0x3f, 0xae, 0x2d, + 0x1c, 0x49, 0x0f, 0x83, 0x11, 0xd1, 0x84, 0x18, 0xd8, 0x8c, 0xfc, 0x71, 0x14, 0x06, 0xb5, 0xf2, + 0x91, 0xd8, 0x58, 0x8c, 0x88, 0x26, 0xc4, 0x8c, 0xcd, 0x38, 0x1c, 0xf9, 0xe3, 0x5a, 0xe5, 0x68, + 0x6c, 0x18, 0x11, 0x4d, 0x88, 0x45, 0x07, 0x55, 0xc7, 0x7e, 0x6f, 0x38, 0x1a, 0x87, 0xbd, 0x61, + 0xbc, 0x57, 0x43, 0x8c, 0xd7, 0xea, 0x9c, 0xbc, 0xe8, 0x94, 0x92, 0x66, 0xd9, 0x88, 0x6b, 0xa8, + 0x18, 0xfb, 0x41, 0xe4, 0xd7, 0xaa, 0x8c, 0xdf, 0x95, 0x79, 0x63, 0x17, 0x68, 0x28, 0x27, 0x05, + 0x1e, 0x4f, 0xc2, 0x61, 0xcf, 0xaf, 0x2d, 0x1e, 0x89, 0x47, 0x07, 0x68, 0x28, 0x27, 0x95, 0xbe, + 0x2d, 0xa0, 0xbc, 0xe3, 0x0d, 0x66, 0xeb, 0xe8, 0x02, 0xca, 0xcb, 0xea, 0x06, 0x16, 0xf8, 0xc2, + 0xc2, 0x39, 0xbe, 0xe8, 0xe0, 0x3c, 0x54, 0x37, 0xc5, 0x34, 0x36, 0x70, 0x01, 0x40, 0x2a, 0x81, + 0x6a, 0x59, 0x46, 0x05, 0xc3, 0x74, 0x0d, 0x5c, 0x02, 0x90, 0xe1, 0xb6, 0xf1, 0x02, 0x80, 0x2c, + 0x6a, 0x1a, 0xb8, 0x0c, 0x20, 0x8b, 0x3a, 0xb8, 0xc2, 0x0a, 0xa0, 0x6b, 0x28, 0x0e, 0xaf, 0x90, + 0x1d, 0x42, 0xd7, 0x70, 0x55, 0x2c, 0x22, 0x61, 0x93, 0x17, 0x47, 0xb9, 0xd9, 0xd4, 0x36, 0xf1, + 0x92, 0x64, 0xa2, 0x12, 0x4f, 0x2f, 0x51, 0x44, 0xcb, 0x32, 0x4c, 0x09, 0x4e, 0x77, 0xaa, 0x18, + 0x4c, 0x0a, 0x84, 0x36, 0x89, 0xe2, 0x68, 0x1d, 0x82, 0x05, 0x28, 0xc5, 0x5a, 0x3b, 0x03, 0xc9, + 0x41, 0x2d, 0xb7, 0xa8, 0xd9, 0x82, 0x5a, 0x0c, 0x80, 0xbc, 0xf4, 0x67, 0x01, 0x15, 0x20, 0xd7, + 0x00, 0x57, 0x91, 0x6d, 0x32, 0xcb, 0x4d, 0x56, 0x14, 0xd7, 0x96, 0x13, 0x6e, 0x4b, 0xa8, 0x22, + 0xab, 0xa0, 0x99, 0x26, 0xeb, 0x38, 0xc7, 0xbb, 0x40, 0xdb, 0xd2, 0x49, 0x9b, 0x18, 0x0c, 0x23, + 0x0f, 0x15, 0x5d, 0xe5, 0xd8, 0x05, 0x68, 0x30, 0x2d, 0x62, 0x68, 0x6c, 0x57, 0x64, 0x9a, 0x18, + 0xb6, 0x43, 0x5d, 0x40, 0x96, 0x75, 0x5c, 0x9a, 0x36, 0xa0, 0x0e, 0xc1, 0x0b, 0x20, 0xcb, 0x30, + 0xdb, 0x9a, 0xc1, 0xf7, 0x65, 0xf0, 0xb7, 0xb9, 0xa6, 0x6b, 0xf7, 0x5d, 0x82, 0x2b, 0x20, 0xd8, + 0x92, 0xa9, 0xc3, 0x79, 0x21, 0x10, 0x6c, 0x51, 0x62, 0x99, 0xb6, 0x06, 0xbd, 0x4a, 0xd6, 0x71, + 0x15, 0x9c, 0x41, 0x49, 0x53, 0x27, 0x9b, 0x5a, 0x87, 0x74, 0xc1, 0x0c, 0xbc, 0x08, 0x68, 0x94, + 0xe8, 0x8c, 0x21, 0x07, 0x2d, 0x81, 0xcc, 0x4e, 0x2a, 0x73, 0x59, 0xfa, 0xa9, 0x80, 0x0a, 0x50, + 0x25, 0x40, 0xb9, 0xa6, 0x49, 0xdb, 0x19, 0xd3, 0x17, 0x51, 0x59, 0x56, 0x41, 0x21, 0x59, 0x4f, + 0x0c, 0x77, 0x37, 0x35, 0x5d, 0x93, 0xe9, 0x47, 0x38, 0x07, 0xc2, 0x32, 0x86, 0x3f, 0x24, 0x14, + 0xe7, 0x19, 0x0b, 0xcd, 0x90, 0xf5, 0x2e, 0x31, 0x54, 0xcd, 0x68, 0xe1, 0x02, 0xf8, 0xa2, 0x45, + 0xa8, 0x6b, 0xa8, 0xb8, 0x08, 0x6b, 0x4a, 0x64, 0x5d, 0xb3, 0xb9, 0xdd, 0x1a, 0x4d, 0x76, 0x0b, + 0x70, 0xb4, 0xf6, 0xba, 0x49, 0x1d, 0x5c, 0x86, 0x63, 0xd7, 0x4d, 0xa3, 0xc5, 0x63, 0xc1, 0xa4, + 0x2a, 0xa1, 0x18, 0x01, 0x76, 0x32, 0x0a, 0x2a, 0xb8, 0x2a, 0x11, 0x54, 0xe2, 0x35, 0x09, 0x74, + 0x68, 0x11, 0x43, 0x25, 0x74, 0x56, 0xe9, 0x26, 0x69, 0x6b, 0x86, 0x66, 0x24, 0xa7, 0xd5, 0x96, + 0x6d, 0xc5, 0xd5, 0x61, 0x9b, 0x63, 0xcd, 0x96, 0xb8, 0x0e, 0x28, 0x2b, 0x7d, 0x03, 0x15, 0xa0, + 0x2a, 0x81, 0xd2, 0x6d, 0xd3, 0x54, 0x33, 0x2c, 0x4e, 0x21, 0xac, 0x98, 0x86, 0x9a, 0x38, 0xb6, + 0x0b, 0x5f, 0xb1, 0x00, 0x87, 0xc3, 0xc2, 0x48, 0x4e, 0x82, 0x08, 0xf6, 0x86, 0xaa, 0x25, 0x8e, + 0xcc, 0x83, 0xa7, 0x35, 0xc3, 0x21, 0x94, 0x9a, 0xad, 0xf4, 0xf4, 0xab, 0x68, 0x61, 0xc3, 0xe5, + 0x31, 0x56, 0x84, 0xa0, 0xb3, 0xdd, 0xb5, 0x0d, 0x08, 0x6f, 0x00, 0x94, 0xa4, 0xbb, 0xa8, 0xc4, + 0x6b, 0x1a, 0xd8, 0xc1, 0x67, 0x80, 0x59, 0x3b, 0x6c, 0xcd, 0x68, 0xb9, 0xba, 0x4c, 0xb1, 0xc0, + 0x86, 0x16, 0xdd, 0xa5, 0x2c, 0xe4, 0x60, 0x76, 0x70, 0x65, 0x1d, 0xe7, 0x25, 0x07, 0x95, 0x78, + 0x39, 0x03, 0x0e, 0x7c, 0xa8, 0xc9, 0x70, 0xa8, 0xa0, 0x62, 0x53, 0xa3, 0xb6, 0xc3, 0xc9, 0x6d, + 0x02, 0x36, 0xe1, 0x1c, 0x80, 0x9d, 0x75, 0x8d, 0xaa, 0x38, 0x0f, 0x86, 0x4e, 0x03, 0x26, 0x19, + 0x8a, 0x0a, 0xd2, 0x4d, 0x54, 0xe2, 0xd5, 0x8d, 0x71, 0xa5, 0xa6, 0x35, 0xa3, 0x17, 0x68, 0xc2, + 0x60, 0xdc, 0x25, 0x86, 0xe9, 0x74, 0x93, 0x7d, 0x4e, 0xda, 0x40, 0xd5, 0x4c, 0x2d, 0x13, 0xcf, + 0xa0, 0x93, 0x94, 0x28, 0x9a, 0x45, 0x4d, 0x45, 0x73, 0x3e, 0x9a, 0xcd, 0xa9, 0xf4, 0x03, 0x0b, + 0x2d, 0xb0, 0xdf, 0x34, 0xba, 0x19, 0x58, 0x4e, 0x8a, 0x50, 0x91, 0xd5, 0x31, 0xf0, 0xab, 0x43, + 0x8c, 0x99, 0x9c, 0x7c, 0x05, 0x9d, 0xc8, 0x1e, 0x10, 0xfb, 0xcc, 0xad, 0x6c, 0xba, 0x8e, 0x4b, + 0x09, 0x77, 0x92, 0x25, 0xdb, 0x0e, 0xce, 0xc3, 0x21, 0x58, 0x94, 0xd8, 0x7c, 0x8a, 0x5b, 0x42, + 0x95, 0x49, 0x2d, 0xc0, 0x45, 0x7e, 0xa9, 0x70, 0xd3, 0x7d, 0x49, 0x5a, 0x43, 0x45, 0x56, 0xf8, + 0x40, 0x68, 0xc7, 0xd4, 0x14, 0x32, 0x6b, 0xb8, 0xac, 0x4c, 0x8b, 0x80, 0x22, 0xa7, 0x35, 0x21, + 0xc7, 0x44, 0xc8, 0x69, 0x2d, 0xf9, 0x71, 0x19, 0x2d, 0xcf, 0x4e, 0x3e, 0xe2, 0x25, 0x84, 0x1f, + 0xfb, 0x5e, 0xbf, 0x1b, 0xc3, 0x40, 0xd7, 0x1d, 0x06, 0x7d, 0xff, 0x29, 0x1b, 0x47, 0x8a, 0x74, + 0x19, 0xe0, 0x6c, 0xce, 0xd3, 0x00, 0x2a, 0xaa, 0xa8, 0xb8, 0xed, 0x3d, 0xf2, 0xb7, 0x93, 0x61, + 0xa3, 0x31, 0xef, 0x70, 0xd5, 0xd0, 0x81, 0x8a, 0x72, 0x62, 0xe9, 0x07, 0x0b, 0xa8, 0xc8, 0x00, + 0xcf, 0x4c, 0xbe, 0xf2, 0xda, 0x1a, 0x25, 0x1d, 0x2c, 0xb0, 0x6a, 0x0a, 0xf9, 0xcb, 0x03, 0x42, + 0x56, 0x3b, 0x8a, 0xce, 0x4b, 0x97, 0xac, 0x76, 0xda, 0xa6, 0x8a, 0x0b, 0xe0, 0x41, 0x19, 0x56, + 0x45, 0x86, 0x60, 0x59, 0x26, 0xe4, 0x2d, 0x00, 0x1d, 0x07, 0x06, 0x5c, 0x28, 0xf6, 0xee, 0x26, + 0x2f, 0x52, 0xb2, 0xbb, 0x09, 0xf6, 0xe3, 0x8a, 0x58, 0x42, 0x39, 0x45, 0xc1, 0x08, 0x48, 0x14, + 0xc6, 0xbe, 0x3a, 0x69, 0x06, 0xac, 0x82, 0x2b, 0x90, 0x02, 0x78, 0x89, 0x39, 0x10, 0x96, 0x8c, + 0x6c, 0x99, 0xb7, 0x09, 0x0b, 0x1f, 0x4f, 0xfb, 0x05, 0x06, 0x04, 0x55, 0xb3, 0x15, 0xd3, 0xa5, + 0x36, 0xc1, 0x27, 0x58, 0xcc, 0x9b, 0x6b, 0x1b, 0x58, 0x84, 0x15, 0xd9, 0xb4, 0x74, 0x7c, 0x92, + 0xd5, 0x56, 0x93, 0xd8, 0x0f, 0x34, 0x67, 0x1d, 0x9f, 0x02, 0xb8, 0x06, 0x18, 0xaf, 0xc0, 0xaa, + 0x2d, 0xd3, 0x7b, 0xf8, 0x34, 0x70, 0x6b, 0x3f, 0x20, 0xf8, 0x0c, 0x5f, 0x74, 0x70, 0x8d, 0x35, + 0x1f, 0xd2, 0xc2, 0xff, 0x05, 0x8a, 0x1a, 0x06, 0xae, 0x03, 0x13, 0xc3, 0x4a, 0x6c, 0x7e, 0x15, + 0x34, 0x34, 0x98, 0x86, 0x67, 0x41, 0x01, 0x63, 0xa2, 0xe1, 0x6b, 0x69, 0xd7, 0x3a, 0x97, 0x99, + 0xd7, 0xcf, 0x43, 0x67, 0xb2, 0xf0, 0x85, 0xa4, 0x32, 0xcb, 0x8e, 0xbc, 0xa9, 0xd9, 0xf8, 0x75, + 0x1e, 0x0d, 0xd4, 0x01, 0x8e, 0x12, 0xeb, 0x68, 0xcc, 0x11, 0x17, 0x59, 0x48, 0x82, 0x86, 0x6f, + 0xf0, 0x95, 0x6d, 0xe3, 0x37, 0x19, 0xae, 0x69, 0x3b, 0xa0, 0xd3, 0x5b, 0x49, 0xa4, 0x32, 0xec, + 0xb7, 0x27, 0x1b, 0x63, 0x03, 0x5f, 0xe2, 0x49, 0x47, 0xc0, 0x33, 0xef, 0xf0, 0xb6, 0x49, 0x9a, + 0xf8, 0x72, 0xb2, 0xb2, 0xf0, 0xbb, 0xfc, 0xe2, 0x60, 0x1a, 0x3a, 0xbe, 0x92, 0xf6, 0xd2, 0xab, + 0x60, 0xa1, 0x65, 0xe3, 0x06, 0x58, 0x78, 0xdf, 0x95, 0x0d, 0xa6, 0xcf, 0x0a, 0x60, 0x52, 0x05, + 0x96, 0xd7, 0xe0, 0x03, 0x5b, 0x52, 0xa2, 0xe3, 0xeb, 0xec, 0x83, 0x4a, 0x4d, 0x0b, 0xaf, 0x02, + 0x0b, 0x10, 0xf0, 0x1e, 0xe8, 0x40, 0x49, 0xdb, 0x90, 0x0d, 0x07, 0xff, 0x37, 0x4f, 0x5a, 0xb0, + 0xd3, 0x50, 0xdd, 0x36, 0xfe, 0x1f, 0x90, 0x4e, 0x4d, 0xd3, 0xc1, 0x37, 0x60, 0x65, 0x83, 0x73, + 0xde, 0x67, 0x2b, 0xb7, 0xd9, 0xc4, 0x37, 0x61, 0xc5, 0x24, 0x7e, 0xc0, 0xea, 0x8d, 0x69, 0x69, + 0x0a, 0xbe, 0xc5, 0x7a, 0x3a, 0x00, 0x6f, 0xcf, 0xf4, 0xa0, 0x3b, 0x80, 0xb2, 0xc9, 0xcc, 0xfe, + 0x5f, 0x56, 0xa9, 0x5c, 0xd6, 0xe6, 0xff, 0x8f, 0x51, 0x6a, 0x8e, 0x4e, 0xf0, 0x87, 0xbc, 0x15, + 0x75, 0xac, 0x75, 0xa0, 0xbe, 0x9b, 0x84, 0x1c, 0x64, 0x20, 0x96, 0x59, 0x74, 0xba, 0x9b, 0x9d, + 0x0e, 0x5e, 0x83, 0xa5, 0xca, 0xa4, 0x2a, 0x80, 0xd2, 0x34, 0x29, 0xd1, 0x5a, 0x06, 0x56, 0xc1, + 0x15, 0xf7, 0x1e, 0x60, 0xc2, 0x9a, 0x8b, 0x66, 0x3b, 0xb8, 0xc9, 0xc7, 0x91, 0xb6, 0x82, 0x5b, + 0x2c, 0x00, 0xcc, 0x36, 0x8f, 0xcb, 0x75, 0x68, 0x06, 0xe9, 0x8e, 0x1d, 0xbc, 0xc6, 0x30, 0xdd, + 0xb6, 0x82, 0x37, 0xc0, 0x2d, 0x8a, 0x69, 0xe1, 0x7b, 0xe0, 0x09, 0x55, 0xb3, 0x59, 0xdf, 0x26, + 0x2a, 0xd6, 0x59, 0x2a, 0xd8, 0x16, 0x6e, 0x03, 0x6e, 0x0b, 0xc4, 0x1b, 0x6c, 0x05, 0x67, 0x6d, + 0x82, 0x41, 0x9a, 0xd1, 0x04, 0xa8, 0xc5, 0xc2, 0x90, 0xd8, 0xf8, 0x3e, 0x8b, 0x33, 0x66, 0x30, + 0x95, 0xfe, 0x2e, 0xa0, 0xa5, 0x99, 0x0b, 0xf4, 0x4b, 0x5f, 0xf8, 0xee, 0xce, 0x3c, 0x2b, 0x5c, + 0x99, 0xf3, 0xbe, 0x9e, 0x7d, 0x5d, 0x98, 0xb9, 0xb0, 0xe7, 0x5f, 0xea, 0xe9, 0xe3, 0x5a, 0x72, + 0xe5, 0xc6, 0x68, 0x31, 0x79, 0xc4, 0x39, 0xa8, 0x71, 0x20, 0x54, 0x52, 0xcc, 0x76, 0x1b, 0x6e, + 0xdd, 0x52, 0x0b, 0x95, 0x53, 0x43, 0xc4, 0xda, 0xf4, 0x91, 0x89, 0xdf, 0xf0, 0x27, 0x4f, 0x4c, + 0xaf, 0xa3, 0xc5, 0x47, 0xfe, 0x60, 0x18, 0x74, 0xc3, 0xad, 0xad, 0xc8, 0xe7, 0x97, 0xb1, 0x22, + 0xad, 0x32, 0x98, 0xc9, 0x40, 0x92, 0x8e, 0x4e, 0x2b, 0xdb, 0x5e, 0x14, 0x0d, 0xb7, 0x86, 0x3d, + 0xf6, 0x7c, 0xa6, 0x78, 0xb1, 0x3f, 0x08, 0xc7, 0x07, 0x3f, 0xc2, 0x9c, 0x43, 0xa8, 0x17, 0x06, + 0x5b, 0xc3, 0x3e, 0x7b, 0xff, 0xe0, 0x77, 0xcb, 0x0c, 0x44, 0xfa, 0x91, 0x80, 0xce, 0xc8, 0x81, + 0xb7, 0xbd, 0xf7, 0xa9, 0x3f, 0x35, 0xd4, 0xff, 0xfa, 0xae, 0x1f, 0xc5, 0xa2, 0x82, 0xca, 0xfd, + 0xe4, 0x89, 0xec, 0xc5, 0xa7, 0x94, 0x3e, 0xa6, 0xad, 0xe5, 0xbf, 0x90, 0x73, 0x74, 0x42, 0x28, + 0xde, 0x43, 0x4b, 0x7e, 0xd0, 0x0b, 0xfb, 0xc3, 0x60, 0xd0, 0xcd, 0x9c, 0xdb, 0x5b, 0xcf, 0x3b, + 0x37, 0x8e, 0xce, 0x4e, 0x6c, 0xd1, 0xcf, 0xec, 0xa4, 0xdf, 0x0a, 0xa8, 0xf6, 0xac, 0xb6, 0xd1, + 0x28, 0x84, 0x0e, 0x4a, 0x91, 0x98, 0x4a, 0xed, 0x4e, 0xcf, 0x57, 0x98, 0xff, 0x7c, 0x4f, 0xa4, + 0xe4, 0xd3, 0x9b, 0x79, 0xf6, 0x21, 0x2f, 0x37, 0xfb, 0x90, 0x27, 0xde, 0xe5, 0x61, 0x04, 0x6e, + 0x8c, 0x92, 0x57, 0x2a, 0xe9, 0xf9, 0x62, 0x00, 0x95, 0x4e, 0x89, 0xa4, 0x9f, 0x0b, 0xe8, 0xb5, + 0xc4, 0x1c, 0x1e, 0xac, 0xff, 0x01, 0x47, 0xf0, 0x29, 0x3a, 0x77, 0x98, 0xca, 0xc9, 0x39, 0xdc, + 0x41, 0x65, 0x80, 0xc5, 0x43, 0x3f, 0xaa, 0x09, 0xcc, 0x2d, 0x17, 0x5e, 0x94, 0xa4, 0x74, 0x42, + 0xf1, 0x3c, 0x8f, 0x4b, 0x3f, 0x14, 0xd0, 0xe9, 0xac, 0xf0, 0xa1, 0x1f, 0x7d, 0x75, 0x1d, 0x15, + 0x4d, 0x12, 0x6b, 0xaa, 0xeb, 0xbf, 0xdc, 0x43, 0xdf, 0x17, 0xd0, 0xa9, 0x34, 0x41, 0xf6, 0x82, + 0xd8, 0x7b, 0xfa, 0xd5, 0xf5, 0xcf, 0x4f, 0x04, 0xf4, 0xca, 0x3e, 0x55, 0x13, 0xf7, 0xcc, 0x24, + 0x96, 0xf0, 0x12, 0x89, 0x25, 0xbe, 0x8f, 0x4a, 0x6c, 0x08, 0x8d, 0x6a, 0x39, 0x46, 0x7e, 0xfe, + 0x39, 0xdd, 0x05, 0xf0, 0x68, 0x82, 0x3e, 0xe3, 0xdb, 0xfc, 0x3e, 0xdf, 0x3e, 0x44, 0x27, 0x93, + 0xc2, 0xbb, 0x07, 0x95, 0xfc, 0xcb, 0xf4, 0xac, 0xf4, 0x18, 0x9d, 0x9a, 0xe5, 0x9d, 0xb8, 0xc2, + 0x42, 0xa8, 0xc7, 0xcb, 0xfb, 0x34, 0x56, 0xae, 0x1d, 0xce, 0xfe, 0xe0, 0xc6, 0x40, 0x33, 0x3c, + 0xa4, 0xbf, 0xe6, 0xd1, 0x49, 0x99, 0xff, 0xf4, 0xe2, 0x7f, 0xd9, 0x66, 0x88, 0x0f, 0x50, 0x79, + 0xcb, 0xf7, 0xe2, 0xdd, 0xb1, 0x1f, 0x25, 0xcf, 0xb0, 0x37, 0x0e, 0x67, 0x72, 0x80, 0x16, 0x8d, + 0x66, 0x42, 0x9d, 0x30, 0x4e, 0x99, 0x3d, 0x1b, 0x79, 0xf9, 0x97, 0x8f, 0xbc, 0xfa, 0x5f, 0x04, + 0x54, 0x4e, 0x05, 0x89, 0x6f, 0xa2, 0x65, 0xff, 0x69, 0x3c, 0xf6, 0x7a, 0x71, 0x37, 0x62, 0x61, + 0xc8, 0xac, 0x2f, 0xd3, 0xa5, 0x04, 0xca, 0x63, 0x53, 0x7c, 0x07, 0xe1, 0x14, 0x6d, 0x92, 0xba, + 0x39, 0x86, 0x78, 0x3c, 0x81, 0xa7, 0x59, 0x2e, 0xde, 0x41, 0xf5, 0x14, 0xf5, 0x80, 0x7e, 0x94, + 0x67, 0x44, 0xb5, 0x04, 0x43, 0x7d, 0xa6, 0xe3, 0xdc, 0x44, 0xb5, 0x19, 0x41, 0x7b, 0x19, 0xda, + 0x02, 0xa3, 0x3d, 0x9d, 0x15, 0x38, 0xad, 0xbf, 0xe2, 0x45, 0xb4, 0xd4, 0x4b, 0x62, 0xa8, 0xcb, + 0x26, 0xab, 0x12, 0x43, 0x5f, 0xec, 0x65, 0x02, 0x4b, 0xfa, 0x4e, 0x1e, 0x0a, 0x44, 0xd6, 0xf1, + 0xff, 0xfe, 0xa4, 0xcb, 0x96, 0xc3, 0xfc, 0x91, 0xcb, 0xe1, 0xc1, 0x6d, 0xbf, 0xf0, 0xa5, 0xb5, + 0xfd, 0xe2, 0xbe, 0xb6, 0x3f, 0x9b, 0x92, 0xa5, 0x7f, 0x3e, 0x25, 0x2f, 0xdf, 0x44, 0x8b, 0xd9, + 0x68, 0xe5, 0x53, 0xbb, 0x41, 0xf0, 0x31, 0x58, 0xb9, 0x4e, 0xf3, 0x26, 0xbf, 0xc8, 0xba, 0x4e, + 0xf3, 0xfa, 0x0d, 0x7e, 0x91, 0x75, 0x9d, 0xe6, 0x7b, 0xab, 0x38, 0xbf, 0xfa, 0x33, 0x84, 0x8e, + 0xeb, 0x89, 0x30, 0x9b, 0xff, 0x06, 0x2b, 0xfe, 0x46, 0x40, 0x78, 0xff, 0x8c, 0x24, 0x5e, 0x7f, + 0x5e, 0x1a, 0x1e, 0x38, 0xfd, 0xd5, 0x57, 0x8f, 0x42, 0xc2, 0x83, 0x48, 0x6a, 0x7f, 0x2e, 0x9f, + 0x4e, 0xbd, 0x79, 0x65, 0x26, 0x61, 0x3f, 0x97, 0x27, 0x55, 0xe2, 0x9b, 0xbf, 0xff, 0xc3, 0x77, + 0x73, 0x17, 0xa5, 0x73, 0x2b, 0x4f, 0xae, 0xaf, 0xa4, 0xb0, 0xe8, 0x96, 0xb7, 0x8f, 0xe7, 0x2d, + 0xe1, 0xb2, 0xf8, 0x6b, 0x01, 0x1d, 0xdf, 0xd7, 0x43, 0xc5, 0x6b, 0x2f, 0x54, 0x6b, 0xdf, 0x68, + 0x50, 0xbf, 0x7e, 0x04, 0x8a, 0xc4, 0x0e, 0x7d, 0x6e, 0x3b, 0x24, 0xe9, 0xb5, 0x03, 0xed, 0x48, + 0x59, 0x82, 0x19, 0x5f, 0xec, 0x1b, 0x5b, 0x32, 0x39, 0xfb, 0xfe, 0x7c, 0xba, 0x3d, 0x33, 0x18, + 0xd6, 0x6f, 0x1e, 0x9d, 0x30, 0xb1, 0xcd, 0x9e, 0xdb, 0xb6, 0x77, 0xa4, 0x37, 0x0e, 0xb7, 0x6d, + 0x6f, 0xe6, 0xa4, 0x7e, 0x29, 0xa0, 0xa5, 0x99, 0x66, 0x2e, 0x36, 0x5e, 0x1c, 0x3e, 0xd9, 0x01, + 0xa5, 0xbe, 0x32, 0x37, 0x7e, 0x62, 0xc7, 0xc6, 0xdc, 0x76, 0x5c, 0x90, 0x5e, 0x3d, 0x38, 0xd6, + 0x18, 0x43, 0x50, 0xff, 0x7b, 0x02, 0x5a, 0xcc, 0xf6, 0x5f, 0xf1, 0xea, 0x0b, 0x13, 0x3a, 0x3b, + 0x03, 0xd4, 0x1b, 0xf3, 0xa2, 0x27, 0xba, 0x5f, 0xdb, 0xaf, 0xe2, 0x79, 0xa9, 0x3e, 0xab, 0x62, + 0xb6, 0x68, 0x83, 0x86, 0x9f, 0x09, 0x68, 0x31, 0x5b, 0xb7, 0x9f, 0xa7, 0xe1, 0x01, 0x8d, 0xb5, + 0xde, 0x98, 0x17, 0x3d, 0xd1, 0xf0, 0xff, 0x3f, 0x97, 0xcf, 0x4f, 0xbc, 0x9b, 0xb6, 0xe1, 0xfd, + 0x6e, 0x3e, 0xf1, 0x0c, 0xc6, 0xc1, 0xc6, 0x78, 0x19, 0x09, 0xb7, 0x84, 0xcb, 0xf5, 0x4f, 0x3f, + 0x93, 0xcf, 0x4c, 0x34, 0xe0, 0x7a, 0x79, 0xa3, 0x61, 0xd4, 0xe8, 0x85, 0x3b, 0xbf, 0x93, 0xbf, + 0xf6, 0x38, 0x8e, 0x47, 0xd1, 0xad, 0x95, 0x95, 0x4f, 0x3e, 0xf9, 0x64, 0xdf, 0xc7, 0x15, 0x6f, + 0x37, 0x7e, 0xcc, 0xff, 0x87, 0x72, 0x35, 0xa5, 0xbf, 0x32, 0x1f, 0xfa, 0x68, 0xdb, 0x8b, 0xb7, + 0xc2, 0xf1, 0xce, 0xda, 0x53, 0x74, 0xb6, 0x17, 0xee, 0x1c, 0xea, 0x8d, 0xb5, 0x53, 0xfb, 0xea, + 0xa9, 0x35, 0x0e, 0xe3, 0xd0, 0x12, 0x1e, 0xde, 0x4d, 0x28, 0x06, 0x21, 0x60, 0x37, 0xc2, 0xf1, + 0x60, 0x65, 0xe0, 0x07, 0xec, 0xef, 0x27, 0x2b, 0x53, 0xb1, 0xcf, 0xfe, 0x49, 0xe6, 0x76, 0xba, + 0x7e, 0x54, 0x62, 0xc8, 0xef, 0xfd, 0x23, 0x00, 0x00, 0xff, 0xff, 0x11, 0x69, 0x57, 0x10, 0x50, + 0x23, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/cloud/language/v1beta1/language_service.pb.go b/vendor/google.golang.org/genproto/googleapis/cloud/language/v1beta1/language_service.pb.go new file mode 100644 index 0000000..602e135 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/cloud/language/v1beta1/language_service.pb.go @@ -0,0 +1,2723 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/cloud/language/v1beta1/language_service.proto + +package language // import "google.golang.org/genproto/googleapis/cloud/language/v1beta1" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +import ( + context "golang.org/x/net/context" + grpc "google.golang.org/grpc" +) + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Represents the text encoding that the caller uses to process the output. +// Providing an `EncodingType` is recommended because the API provides the +// beginning offsets for various outputs, such as tokens and mentions, and +// languages that natively use different text encodings may access offsets +// differently. +type EncodingType int32 + +const ( + // If `EncodingType` is not specified, encoding-dependent information (such as + // `begin_offset`) will be set at `-1`. + EncodingType_NONE EncodingType = 0 + // Encoding-dependent information (such as `begin_offset`) is calculated based + // on the UTF-8 encoding of the input. C++ and Go are examples of languages + // that use this encoding natively. + EncodingType_UTF8 EncodingType = 1 + // Encoding-dependent information (such as `begin_offset`) is calculated based + // on the UTF-16 encoding of the input. Java and Javascript are examples of + // languages that use this encoding natively. + EncodingType_UTF16 EncodingType = 2 + // Encoding-dependent information (such as `begin_offset`) is calculated based + // on the UTF-32 encoding of the input. Python is an example of a language + // that uses this encoding natively. + EncodingType_UTF32 EncodingType = 3 +) + +var EncodingType_name = map[int32]string{ + 0: "NONE", + 1: "UTF8", + 2: "UTF16", + 3: "UTF32", +} +var EncodingType_value = map[string]int32{ + "NONE": 0, + "UTF8": 1, + "UTF16": 2, + "UTF32": 3, +} + +func (x EncodingType) String() string { + return proto.EnumName(EncodingType_name, int32(x)) +} +func (EncodingType) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_language_service_a20715f4ac30e6a8, []int{0} +} + +// The document types enum. +type Document_Type int32 + +const ( + // The content type is not specified. + Document_TYPE_UNSPECIFIED Document_Type = 0 + // Plain text + Document_PLAIN_TEXT Document_Type = 1 + // HTML + Document_HTML Document_Type = 2 +) + +var Document_Type_name = map[int32]string{ + 0: "TYPE_UNSPECIFIED", + 1: "PLAIN_TEXT", + 2: "HTML", +} +var Document_Type_value = map[string]int32{ + "TYPE_UNSPECIFIED": 0, + "PLAIN_TEXT": 1, + "HTML": 2, +} + +func (x Document_Type) String() string { + return proto.EnumName(Document_Type_name, int32(x)) +} +func (Document_Type) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_language_service_a20715f4ac30e6a8, []int{0, 0} +} + +// The type of the entity. +type Entity_Type int32 + +const ( + // Unknown + Entity_UNKNOWN Entity_Type = 0 + // Person + Entity_PERSON Entity_Type = 1 + // Location + Entity_LOCATION Entity_Type = 2 + // Organization + Entity_ORGANIZATION Entity_Type = 3 + // Event + Entity_EVENT Entity_Type = 4 + // Work of art + Entity_WORK_OF_ART Entity_Type = 5 + // Consumer goods + Entity_CONSUMER_GOOD Entity_Type = 6 + // Other types + Entity_OTHER Entity_Type = 7 +) + +var Entity_Type_name = map[int32]string{ + 0: "UNKNOWN", + 1: "PERSON", + 2: "LOCATION", + 3: "ORGANIZATION", + 4: "EVENT", + 5: "WORK_OF_ART", + 6: "CONSUMER_GOOD", + 7: "OTHER", +} +var Entity_Type_value = map[string]int32{ + "UNKNOWN": 0, + "PERSON": 1, + "LOCATION": 2, + "ORGANIZATION": 3, + "EVENT": 4, + "WORK_OF_ART": 5, + "CONSUMER_GOOD": 6, + "OTHER": 7, +} + +func (x Entity_Type) String() string { + return proto.EnumName(Entity_Type_name, int32(x)) +} +func (Entity_Type) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_language_service_a20715f4ac30e6a8, []int{2, 0} +} + +// The part of speech tags enum. +type PartOfSpeech_Tag int32 + +const ( + // Unknown + PartOfSpeech_UNKNOWN PartOfSpeech_Tag = 0 + // Adjective + PartOfSpeech_ADJ PartOfSpeech_Tag = 1 + // Adposition (preposition and postposition) + PartOfSpeech_ADP PartOfSpeech_Tag = 2 + // Adverb + PartOfSpeech_ADV PartOfSpeech_Tag = 3 + // Conjunction + PartOfSpeech_CONJ PartOfSpeech_Tag = 4 + // Determiner + PartOfSpeech_DET PartOfSpeech_Tag = 5 + // Noun (common and proper) + PartOfSpeech_NOUN PartOfSpeech_Tag = 6 + // Cardinal number + PartOfSpeech_NUM PartOfSpeech_Tag = 7 + // Pronoun + PartOfSpeech_PRON PartOfSpeech_Tag = 8 + // Particle or other function word + PartOfSpeech_PRT PartOfSpeech_Tag = 9 + // Punctuation + PartOfSpeech_PUNCT PartOfSpeech_Tag = 10 + // Verb (all tenses and modes) + PartOfSpeech_VERB PartOfSpeech_Tag = 11 + // Other: foreign words, typos, abbreviations + PartOfSpeech_X PartOfSpeech_Tag = 12 + // Affix + PartOfSpeech_AFFIX PartOfSpeech_Tag = 13 +) + +var PartOfSpeech_Tag_name = map[int32]string{ + 0: "UNKNOWN", + 1: "ADJ", + 2: "ADP", + 3: "ADV", + 4: "CONJ", + 5: "DET", + 6: "NOUN", + 7: "NUM", + 8: "PRON", + 9: "PRT", + 10: "PUNCT", + 11: "VERB", + 12: "X", + 13: "AFFIX", +} +var PartOfSpeech_Tag_value = map[string]int32{ + "UNKNOWN": 0, + "ADJ": 1, + "ADP": 2, + "ADV": 3, + "CONJ": 4, + "DET": 5, + "NOUN": 6, + "NUM": 7, + "PRON": 8, + "PRT": 9, + "PUNCT": 10, + "VERB": 11, + "X": 12, + "AFFIX": 13, +} + +func (x PartOfSpeech_Tag) String() string { + return proto.EnumName(PartOfSpeech_Tag_name, int32(x)) +} +func (PartOfSpeech_Tag) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_language_service_a20715f4ac30e6a8, []int{5, 0} +} + +// The characteristic of a verb that expresses time flow during an event. +type PartOfSpeech_Aspect int32 + +const ( + // Aspect is not applicable in the analyzed language or is not predicted. + PartOfSpeech_ASPECT_UNKNOWN PartOfSpeech_Aspect = 0 + // Perfective + PartOfSpeech_PERFECTIVE PartOfSpeech_Aspect = 1 + // Imperfective + PartOfSpeech_IMPERFECTIVE PartOfSpeech_Aspect = 2 + // Progressive + PartOfSpeech_PROGRESSIVE PartOfSpeech_Aspect = 3 +) + +var PartOfSpeech_Aspect_name = map[int32]string{ + 0: "ASPECT_UNKNOWN", + 1: "PERFECTIVE", + 2: "IMPERFECTIVE", + 3: "PROGRESSIVE", +} +var PartOfSpeech_Aspect_value = map[string]int32{ + "ASPECT_UNKNOWN": 0, + "PERFECTIVE": 1, + "IMPERFECTIVE": 2, + "PROGRESSIVE": 3, +} + +func (x PartOfSpeech_Aspect) String() string { + return proto.EnumName(PartOfSpeech_Aspect_name, int32(x)) +} +func (PartOfSpeech_Aspect) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_language_service_a20715f4ac30e6a8, []int{5, 1} +} + +// The grammatical function performed by a noun or pronoun in a phrase, +// clause, or sentence. In some languages, other parts of speech, such as +// adjective and determiner, take case inflection in agreement with the noun. +type PartOfSpeech_Case int32 + +const ( + // Case is not applicable in the analyzed language or is not predicted. + PartOfSpeech_CASE_UNKNOWN PartOfSpeech_Case = 0 + // Accusative + PartOfSpeech_ACCUSATIVE PartOfSpeech_Case = 1 + // Adverbial + PartOfSpeech_ADVERBIAL PartOfSpeech_Case = 2 + // Complementive + PartOfSpeech_COMPLEMENTIVE PartOfSpeech_Case = 3 + // Dative + PartOfSpeech_DATIVE PartOfSpeech_Case = 4 + // Genitive + PartOfSpeech_GENITIVE PartOfSpeech_Case = 5 + // Instrumental + PartOfSpeech_INSTRUMENTAL PartOfSpeech_Case = 6 + // Locative + PartOfSpeech_LOCATIVE PartOfSpeech_Case = 7 + // Nominative + PartOfSpeech_NOMINATIVE PartOfSpeech_Case = 8 + // Oblique + PartOfSpeech_OBLIQUE PartOfSpeech_Case = 9 + // Partitive + PartOfSpeech_PARTITIVE PartOfSpeech_Case = 10 + // Prepositional + PartOfSpeech_PREPOSITIONAL PartOfSpeech_Case = 11 + // Reflexive + PartOfSpeech_REFLEXIVE_CASE PartOfSpeech_Case = 12 + // Relative + PartOfSpeech_RELATIVE_CASE PartOfSpeech_Case = 13 + // Vocative + PartOfSpeech_VOCATIVE PartOfSpeech_Case = 14 +) + +var PartOfSpeech_Case_name = map[int32]string{ + 0: "CASE_UNKNOWN", + 1: "ACCUSATIVE", + 2: "ADVERBIAL", + 3: "COMPLEMENTIVE", + 4: "DATIVE", + 5: "GENITIVE", + 6: "INSTRUMENTAL", + 7: "LOCATIVE", + 8: "NOMINATIVE", + 9: "OBLIQUE", + 10: "PARTITIVE", + 11: "PREPOSITIONAL", + 12: "REFLEXIVE_CASE", + 13: "RELATIVE_CASE", + 14: "VOCATIVE", +} +var PartOfSpeech_Case_value = map[string]int32{ + "CASE_UNKNOWN": 0, + "ACCUSATIVE": 1, + "ADVERBIAL": 2, + "COMPLEMENTIVE": 3, + "DATIVE": 4, + "GENITIVE": 5, + "INSTRUMENTAL": 6, + "LOCATIVE": 7, + "NOMINATIVE": 8, + "OBLIQUE": 9, + "PARTITIVE": 10, + "PREPOSITIONAL": 11, + "REFLEXIVE_CASE": 12, + "RELATIVE_CASE": 13, + "VOCATIVE": 14, +} + +func (x PartOfSpeech_Case) String() string { + return proto.EnumName(PartOfSpeech_Case_name, int32(x)) +} +func (PartOfSpeech_Case) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_language_service_a20715f4ac30e6a8, []int{5, 2} +} + +// Depending on the language, Form can be categorizing different forms of +// verbs, adjectives, adverbs, etc. For example, categorizing inflected +// endings of verbs and adjectives or distinguishing between short and long +// forms of adjectives and participles +type PartOfSpeech_Form int32 + +const ( + // Form is not applicable in the analyzed language or is not predicted. + PartOfSpeech_FORM_UNKNOWN PartOfSpeech_Form = 0 + // Adnomial + PartOfSpeech_ADNOMIAL PartOfSpeech_Form = 1 + // Auxiliary + PartOfSpeech_AUXILIARY PartOfSpeech_Form = 2 + // Complementizer + PartOfSpeech_COMPLEMENTIZER PartOfSpeech_Form = 3 + // Final ending + PartOfSpeech_FINAL_ENDING PartOfSpeech_Form = 4 + // Gerund + PartOfSpeech_GERUND PartOfSpeech_Form = 5 + // Realis + PartOfSpeech_REALIS PartOfSpeech_Form = 6 + // Irrealis + PartOfSpeech_IRREALIS PartOfSpeech_Form = 7 + // Short form + PartOfSpeech_SHORT PartOfSpeech_Form = 8 + // Long form + PartOfSpeech_LONG PartOfSpeech_Form = 9 + // Order form + PartOfSpeech_ORDER PartOfSpeech_Form = 10 + // Specific form + PartOfSpeech_SPECIFIC PartOfSpeech_Form = 11 +) + +var PartOfSpeech_Form_name = map[int32]string{ + 0: "FORM_UNKNOWN", + 1: "ADNOMIAL", + 2: "AUXILIARY", + 3: "COMPLEMENTIZER", + 4: "FINAL_ENDING", + 5: "GERUND", + 6: "REALIS", + 7: "IRREALIS", + 8: "SHORT", + 9: "LONG", + 10: "ORDER", + 11: "SPECIFIC", +} +var PartOfSpeech_Form_value = map[string]int32{ + "FORM_UNKNOWN": 0, + "ADNOMIAL": 1, + "AUXILIARY": 2, + "COMPLEMENTIZER": 3, + "FINAL_ENDING": 4, + "GERUND": 5, + "REALIS": 6, + "IRREALIS": 7, + "SHORT": 8, + "LONG": 9, + "ORDER": 10, + "SPECIFIC": 11, +} + +func (x PartOfSpeech_Form) String() string { + return proto.EnumName(PartOfSpeech_Form_name, int32(x)) +} +func (PartOfSpeech_Form) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_language_service_a20715f4ac30e6a8, []int{5, 3} +} + +// Gender classes of nouns reflected in the behaviour of associated words. +type PartOfSpeech_Gender int32 + +const ( + // Gender is not applicable in the analyzed language or is not predicted. + PartOfSpeech_GENDER_UNKNOWN PartOfSpeech_Gender = 0 + // Feminine + PartOfSpeech_FEMININE PartOfSpeech_Gender = 1 + // Masculine + PartOfSpeech_MASCULINE PartOfSpeech_Gender = 2 + // Neuter + PartOfSpeech_NEUTER PartOfSpeech_Gender = 3 +) + +var PartOfSpeech_Gender_name = map[int32]string{ + 0: "GENDER_UNKNOWN", + 1: "FEMININE", + 2: "MASCULINE", + 3: "NEUTER", +} +var PartOfSpeech_Gender_value = map[string]int32{ + "GENDER_UNKNOWN": 0, + "FEMININE": 1, + "MASCULINE": 2, + "NEUTER": 3, +} + +func (x PartOfSpeech_Gender) String() string { + return proto.EnumName(PartOfSpeech_Gender_name, int32(x)) +} +func (PartOfSpeech_Gender) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_language_service_a20715f4ac30e6a8, []int{5, 4} +} + +// The grammatical feature of verbs, used for showing modality and attitude. +type PartOfSpeech_Mood int32 + +const ( + // Mood is not applicable in the analyzed language or is not predicted. + PartOfSpeech_MOOD_UNKNOWN PartOfSpeech_Mood = 0 + // Conditional + PartOfSpeech_CONDITIONAL_MOOD PartOfSpeech_Mood = 1 + // Imperative + PartOfSpeech_IMPERATIVE PartOfSpeech_Mood = 2 + // Indicative + PartOfSpeech_INDICATIVE PartOfSpeech_Mood = 3 + // Interrogative + PartOfSpeech_INTERROGATIVE PartOfSpeech_Mood = 4 + // Jussive + PartOfSpeech_JUSSIVE PartOfSpeech_Mood = 5 + // Subjunctive + PartOfSpeech_SUBJUNCTIVE PartOfSpeech_Mood = 6 +) + +var PartOfSpeech_Mood_name = map[int32]string{ + 0: "MOOD_UNKNOWN", + 1: "CONDITIONAL_MOOD", + 2: "IMPERATIVE", + 3: "INDICATIVE", + 4: "INTERROGATIVE", + 5: "JUSSIVE", + 6: "SUBJUNCTIVE", +} +var PartOfSpeech_Mood_value = map[string]int32{ + "MOOD_UNKNOWN": 0, + "CONDITIONAL_MOOD": 1, + "IMPERATIVE": 2, + "INDICATIVE": 3, + "INTERROGATIVE": 4, + "JUSSIVE": 5, + "SUBJUNCTIVE": 6, +} + +func (x PartOfSpeech_Mood) String() string { + return proto.EnumName(PartOfSpeech_Mood_name, int32(x)) +} +func (PartOfSpeech_Mood) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_language_service_a20715f4ac30e6a8, []int{5, 5} +} + +// Count distinctions. +type PartOfSpeech_Number int32 + +const ( + // Number is not applicable in the analyzed language or is not predicted. + PartOfSpeech_NUMBER_UNKNOWN PartOfSpeech_Number = 0 + // Singular + PartOfSpeech_SINGULAR PartOfSpeech_Number = 1 + // Plural + PartOfSpeech_PLURAL PartOfSpeech_Number = 2 + // Dual + PartOfSpeech_DUAL PartOfSpeech_Number = 3 +) + +var PartOfSpeech_Number_name = map[int32]string{ + 0: "NUMBER_UNKNOWN", + 1: "SINGULAR", + 2: "PLURAL", + 3: "DUAL", +} +var PartOfSpeech_Number_value = map[string]int32{ + "NUMBER_UNKNOWN": 0, + "SINGULAR": 1, + "PLURAL": 2, + "DUAL": 3, +} + +func (x PartOfSpeech_Number) String() string { + return proto.EnumName(PartOfSpeech_Number_name, int32(x)) +} +func (PartOfSpeech_Number) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_language_service_a20715f4ac30e6a8, []int{5, 6} +} + +// The distinction between the speaker, second person, third person, etc. +type PartOfSpeech_Person int32 + +const ( + // Person is not applicable in the analyzed language or is not predicted. + PartOfSpeech_PERSON_UNKNOWN PartOfSpeech_Person = 0 + // First + PartOfSpeech_FIRST PartOfSpeech_Person = 1 + // Second + PartOfSpeech_SECOND PartOfSpeech_Person = 2 + // Third + PartOfSpeech_THIRD PartOfSpeech_Person = 3 + // Reflexive + PartOfSpeech_REFLEXIVE_PERSON PartOfSpeech_Person = 4 +) + +var PartOfSpeech_Person_name = map[int32]string{ + 0: "PERSON_UNKNOWN", + 1: "FIRST", + 2: "SECOND", + 3: "THIRD", + 4: "REFLEXIVE_PERSON", +} +var PartOfSpeech_Person_value = map[string]int32{ + "PERSON_UNKNOWN": 0, + "FIRST": 1, + "SECOND": 2, + "THIRD": 3, + "REFLEXIVE_PERSON": 4, +} + +func (x PartOfSpeech_Person) String() string { + return proto.EnumName(PartOfSpeech_Person_name, int32(x)) +} +func (PartOfSpeech_Person) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_language_service_a20715f4ac30e6a8, []int{5, 7} +} + +// This category shows if the token is part of a proper name. +type PartOfSpeech_Proper int32 + +const ( + // Proper is not applicable in the analyzed language or is not predicted. + PartOfSpeech_PROPER_UNKNOWN PartOfSpeech_Proper = 0 + // Proper + PartOfSpeech_PROPER PartOfSpeech_Proper = 1 + // Not proper + PartOfSpeech_NOT_PROPER PartOfSpeech_Proper = 2 +) + +var PartOfSpeech_Proper_name = map[int32]string{ + 0: "PROPER_UNKNOWN", + 1: "PROPER", + 2: "NOT_PROPER", +} +var PartOfSpeech_Proper_value = map[string]int32{ + "PROPER_UNKNOWN": 0, + "PROPER": 1, + "NOT_PROPER": 2, +} + +func (x PartOfSpeech_Proper) String() string { + return proto.EnumName(PartOfSpeech_Proper_name, int32(x)) +} +func (PartOfSpeech_Proper) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_language_service_a20715f4ac30e6a8, []int{5, 8} +} + +// Reciprocal features of a pronoun. +type PartOfSpeech_Reciprocity int32 + +const ( + // Reciprocity is not applicable in the analyzed language or is not + // predicted. + PartOfSpeech_RECIPROCITY_UNKNOWN PartOfSpeech_Reciprocity = 0 + // Reciprocal + PartOfSpeech_RECIPROCAL PartOfSpeech_Reciprocity = 1 + // Non-reciprocal + PartOfSpeech_NON_RECIPROCAL PartOfSpeech_Reciprocity = 2 +) + +var PartOfSpeech_Reciprocity_name = map[int32]string{ + 0: "RECIPROCITY_UNKNOWN", + 1: "RECIPROCAL", + 2: "NON_RECIPROCAL", +} +var PartOfSpeech_Reciprocity_value = map[string]int32{ + "RECIPROCITY_UNKNOWN": 0, + "RECIPROCAL": 1, + "NON_RECIPROCAL": 2, +} + +func (x PartOfSpeech_Reciprocity) String() string { + return proto.EnumName(PartOfSpeech_Reciprocity_name, int32(x)) +} +func (PartOfSpeech_Reciprocity) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_language_service_a20715f4ac30e6a8, []int{5, 9} +} + +// Time reference. +type PartOfSpeech_Tense int32 + +const ( + // Tense is not applicable in the analyzed language or is not predicted. + PartOfSpeech_TENSE_UNKNOWN PartOfSpeech_Tense = 0 + // Conditional + PartOfSpeech_CONDITIONAL_TENSE PartOfSpeech_Tense = 1 + // Future + PartOfSpeech_FUTURE PartOfSpeech_Tense = 2 + // Past + PartOfSpeech_PAST PartOfSpeech_Tense = 3 + // Present + PartOfSpeech_PRESENT PartOfSpeech_Tense = 4 + // Imperfect + PartOfSpeech_IMPERFECT PartOfSpeech_Tense = 5 + // Pluperfect + PartOfSpeech_PLUPERFECT PartOfSpeech_Tense = 6 +) + +var PartOfSpeech_Tense_name = map[int32]string{ + 0: "TENSE_UNKNOWN", + 1: "CONDITIONAL_TENSE", + 2: "FUTURE", + 3: "PAST", + 4: "PRESENT", + 5: "IMPERFECT", + 6: "PLUPERFECT", +} +var PartOfSpeech_Tense_value = map[string]int32{ + "TENSE_UNKNOWN": 0, + "CONDITIONAL_TENSE": 1, + "FUTURE": 2, + "PAST": 3, + "PRESENT": 4, + "IMPERFECT": 5, + "PLUPERFECT": 6, +} + +func (x PartOfSpeech_Tense) String() string { + return proto.EnumName(PartOfSpeech_Tense_name, int32(x)) +} +func (PartOfSpeech_Tense) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_language_service_a20715f4ac30e6a8, []int{5, 10} +} + +// The relationship between the action that a verb expresses and the +// participants identified by its arguments. +type PartOfSpeech_Voice int32 + +const ( + // Voice is not applicable in the analyzed language or is not predicted. + PartOfSpeech_VOICE_UNKNOWN PartOfSpeech_Voice = 0 + // Active + PartOfSpeech_ACTIVE PartOfSpeech_Voice = 1 + // Causative + PartOfSpeech_CAUSATIVE PartOfSpeech_Voice = 2 + // Passive + PartOfSpeech_PASSIVE PartOfSpeech_Voice = 3 +) + +var PartOfSpeech_Voice_name = map[int32]string{ + 0: "VOICE_UNKNOWN", + 1: "ACTIVE", + 2: "CAUSATIVE", + 3: "PASSIVE", +} +var PartOfSpeech_Voice_value = map[string]int32{ + "VOICE_UNKNOWN": 0, + "ACTIVE": 1, + "CAUSATIVE": 2, + "PASSIVE": 3, +} + +func (x PartOfSpeech_Voice) String() string { + return proto.EnumName(PartOfSpeech_Voice_name, int32(x)) +} +func (PartOfSpeech_Voice) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_language_service_a20715f4ac30e6a8, []int{5, 11} +} + +// The parse label enum for the token. +type DependencyEdge_Label int32 + +const ( + // Unknown + DependencyEdge_UNKNOWN DependencyEdge_Label = 0 + // Abbreviation modifier + DependencyEdge_ABBREV DependencyEdge_Label = 1 + // Adjectival complement + DependencyEdge_ACOMP DependencyEdge_Label = 2 + // Adverbial clause modifier + DependencyEdge_ADVCL DependencyEdge_Label = 3 + // Adverbial modifier + DependencyEdge_ADVMOD DependencyEdge_Label = 4 + // Adjectival modifier of an NP + DependencyEdge_AMOD DependencyEdge_Label = 5 + // Appositional modifier of an NP + DependencyEdge_APPOS DependencyEdge_Label = 6 + // Attribute dependent of a copular verb + DependencyEdge_ATTR DependencyEdge_Label = 7 + // Auxiliary (non-main) verb + DependencyEdge_AUX DependencyEdge_Label = 8 + // Passive auxiliary + DependencyEdge_AUXPASS DependencyEdge_Label = 9 + // Coordinating conjunction + DependencyEdge_CC DependencyEdge_Label = 10 + // Clausal complement of a verb or adjective + DependencyEdge_CCOMP DependencyEdge_Label = 11 + // Conjunct + DependencyEdge_CONJ DependencyEdge_Label = 12 + // Clausal subject + DependencyEdge_CSUBJ DependencyEdge_Label = 13 + // Clausal passive subject + DependencyEdge_CSUBJPASS DependencyEdge_Label = 14 + // Dependency (unable to determine) + DependencyEdge_DEP DependencyEdge_Label = 15 + // Determiner + DependencyEdge_DET DependencyEdge_Label = 16 + // Discourse + DependencyEdge_DISCOURSE DependencyEdge_Label = 17 + // Direct object + DependencyEdge_DOBJ DependencyEdge_Label = 18 + // Expletive + DependencyEdge_EXPL DependencyEdge_Label = 19 + // Goes with (part of a word in a text not well edited) + DependencyEdge_GOESWITH DependencyEdge_Label = 20 + // Indirect object + DependencyEdge_IOBJ DependencyEdge_Label = 21 + // Marker (word introducing a subordinate clause) + DependencyEdge_MARK DependencyEdge_Label = 22 + // Multi-word expression + DependencyEdge_MWE DependencyEdge_Label = 23 + // Multi-word verbal expression + DependencyEdge_MWV DependencyEdge_Label = 24 + // Negation modifier + DependencyEdge_NEG DependencyEdge_Label = 25 + // Noun compound modifier + DependencyEdge_NN DependencyEdge_Label = 26 + // Noun phrase used as an adverbial modifier + DependencyEdge_NPADVMOD DependencyEdge_Label = 27 + // Nominal subject + DependencyEdge_NSUBJ DependencyEdge_Label = 28 + // Passive nominal subject + DependencyEdge_NSUBJPASS DependencyEdge_Label = 29 + // Numeric modifier of a noun + DependencyEdge_NUM DependencyEdge_Label = 30 + // Element of compound number + DependencyEdge_NUMBER DependencyEdge_Label = 31 + // Punctuation mark + DependencyEdge_P DependencyEdge_Label = 32 + // Parataxis relation + DependencyEdge_PARATAXIS DependencyEdge_Label = 33 + // Participial modifier + DependencyEdge_PARTMOD DependencyEdge_Label = 34 + // The complement of a preposition is a clause + DependencyEdge_PCOMP DependencyEdge_Label = 35 + // Object of a preposition + DependencyEdge_POBJ DependencyEdge_Label = 36 + // Possession modifier + DependencyEdge_POSS DependencyEdge_Label = 37 + // Postverbal negative particle + DependencyEdge_POSTNEG DependencyEdge_Label = 38 + // Predicate complement + DependencyEdge_PRECOMP DependencyEdge_Label = 39 + // Preconjunt + DependencyEdge_PRECONJ DependencyEdge_Label = 40 + // Predeterminer + DependencyEdge_PREDET DependencyEdge_Label = 41 + // Prefix + DependencyEdge_PREF DependencyEdge_Label = 42 + // Prepositional modifier + DependencyEdge_PREP DependencyEdge_Label = 43 + // The relationship between a verb and verbal morpheme + DependencyEdge_PRONL DependencyEdge_Label = 44 + // Particle + DependencyEdge_PRT DependencyEdge_Label = 45 + // Associative or possessive marker + DependencyEdge_PS DependencyEdge_Label = 46 + // Quantifier phrase modifier + DependencyEdge_QUANTMOD DependencyEdge_Label = 47 + // Relative clause modifier + DependencyEdge_RCMOD DependencyEdge_Label = 48 + // Complementizer in relative clause + DependencyEdge_RCMODREL DependencyEdge_Label = 49 + // Ellipsis without a preceding predicate + DependencyEdge_RDROP DependencyEdge_Label = 50 + // Referent + DependencyEdge_REF DependencyEdge_Label = 51 + // Remnant + DependencyEdge_REMNANT DependencyEdge_Label = 52 + // Reparandum + DependencyEdge_REPARANDUM DependencyEdge_Label = 53 + // Root + DependencyEdge_ROOT DependencyEdge_Label = 54 + // Suffix specifying a unit of number + DependencyEdge_SNUM DependencyEdge_Label = 55 + // Suffix + DependencyEdge_SUFF DependencyEdge_Label = 56 + // Temporal modifier + DependencyEdge_TMOD DependencyEdge_Label = 57 + // Topic marker + DependencyEdge_TOPIC DependencyEdge_Label = 58 + // Clause headed by an infinite form of the verb that modifies a noun + DependencyEdge_VMOD DependencyEdge_Label = 59 + // Vocative + DependencyEdge_VOCATIVE DependencyEdge_Label = 60 + // Open clausal complement + DependencyEdge_XCOMP DependencyEdge_Label = 61 + // Name suffix + DependencyEdge_SUFFIX DependencyEdge_Label = 62 + // Name title + DependencyEdge_TITLE DependencyEdge_Label = 63 + // Adverbial phrase modifier + DependencyEdge_ADVPHMOD DependencyEdge_Label = 64 + // Causative auxiliary + DependencyEdge_AUXCAUS DependencyEdge_Label = 65 + // Helper auxiliary + DependencyEdge_AUXVV DependencyEdge_Label = 66 + // Rentaishi (Prenominal modifier) + DependencyEdge_DTMOD DependencyEdge_Label = 67 + // Foreign words + DependencyEdge_FOREIGN DependencyEdge_Label = 68 + // Keyword + DependencyEdge_KW DependencyEdge_Label = 69 + // List for chains of comparable items + DependencyEdge_LIST DependencyEdge_Label = 70 + // Nominalized clause + DependencyEdge_NOMC DependencyEdge_Label = 71 + // Nominalized clausal subject + DependencyEdge_NOMCSUBJ DependencyEdge_Label = 72 + // Nominalized clausal passive + DependencyEdge_NOMCSUBJPASS DependencyEdge_Label = 73 + // Compound of numeric modifier + DependencyEdge_NUMC DependencyEdge_Label = 74 + // Copula + DependencyEdge_COP DependencyEdge_Label = 75 + // Dislocated relation (for fronted/topicalized elements) + DependencyEdge_DISLOCATED DependencyEdge_Label = 76 +) + +var DependencyEdge_Label_name = map[int32]string{ + 0: "UNKNOWN", + 1: "ABBREV", + 2: "ACOMP", + 3: "ADVCL", + 4: "ADVMOD", + 5: "AMOD", + 6: "APPOS", + 7: "ATTR", + 8: "AUX", + 9: "AUXPASS", + 10: "CC", + 11: "CCOMP", + 12: "CONJ", + 13: "CSUBJ", + 14: "CSUBJPASS", + 15: "DEP", + 16: "DET", + 17: "DISCOURSE", + 18: "DOBJ", + 19: "EXPL", + 20: "GOESWITH", + 21: "IOBJ", + 22: "MARK", + 23: "MWE", + 24: "MWV", + 25: "NEG", + 26: "NN", + 27: "NPADVMOD", + 28: "NSUBJ", + 29: "NSUBJPASS", + 30: "NUM", + 31: "NUMBER", + 32: "P", + 33: "PARATAXIS", + 34: "PARTMOD", + 35: "PCOMP", + 36: "POBJ", + 37: "POSS", + 38: "POSTNEG", + 39: "PRECOMP", + 40: "PRECONJ", + 41: "PREDET", + 42: "PREF", + 43: "PREP", + 44: "PRONL", + 45: "PRT", + 46: "PS", + 47: "QUANTMOD", + 48: "RCMOD", + 49: "RCMODREL", + 50: "RDROP", + 51: "REF", + 52: "REMNANT", + 53: "REPARANDUM", + 54: "ROOT", + 55: "SNUM", + 56: "SUFF", + 57: "TMOD", + 58: "TOPIC", + 59: "VMOD", + 60: "VOCATIVE", + 61: "XCOMP", + 62: "SUFFIX", + 63: "TITLE", + 64: "ADVPHMOD", + 65: "AUXCAUS", + 66: "AUXVV", + 67: "DTMOD", + 68: "FOREIGN", + 69: "KW", + 70: "LIST", + 71: "NOMC", + 72: "NOMCSUBJ", + 73: "NOMCSUBJPASS", + 74: "NUMC", + 75: "COP", + 76: "DISLOCATED", +} +var DependencyEdge_Label_value = map[string]int32{ + "UNKNOWN": 0, + "ABBREV": 1, + "ACOMP": 2, + "ADVCL": 3, + "ADVMOD": 4, + "AMOD": 5, + "APPOS": 6, + "ATTR": 7, + "AUX": 8, + "AUXPASS": 9, + "CC": 10, + "CCOMP": 11, + "CONJ": 12, + "CSUBJ": 13, + "CSUBJPASS": 14, + "DEP": 15, + "DET": 16, + "DISCOURSE": 17, + "DOBJ": 18, + "EXPL": 19, + "GOESWITH": 20, + "IOBJ": 21, + "MARK": 22, + "MWE": 23, + "MWV": 24, + "NEG": 25, + "NN": 26, + "NPADVMOD": 27, + "NSUBJ": 28, + "NSUBJPASS": 29, + "NUM": 30, + "NUMBER": 31, + "P": 32, + "PARATAXIS": 33, + "PARTMOD": 34, + "PCOMP": 35, + "POBJ": 36, + "POSS": 37, + "POSTNEG": 38, + "PRECOMP": 39, + "PRECONJ": 40, + "PREDET": 41, + "PREF": 42, + "PREP": 43, + "PRONL": 44, + "PRT": 45, + "PS": 46, + "QUANTMOD": 47, + "RCMOD": 48, + "RCMODREL": 49, + "RDROP": 50, + "REF": 51, + "REMNANT": 52, + "REPARANDUM": 53, + "ROOT": 54, + "SNUM": 55, + "SUFF": 56, + "TMOD": 57, + "TOPIC": 58, + "VMOD": 59, + "VOCATIVE": 60, + "XCOMP": 61, + "SUFFIX": 62, + "TITLE": 63, + "ADVPHMOD": 64, + "AUXCAUS": 65, + "AUXVV": 66, + "DTMOD": 67, + "FOREIGN": 68, + "KW": 69, + "LIST": 70, + "NOMC": 71, + "NOMCSUBJ": 72, + "NOMCSUBJPASS": 73, + "NUMC": 74, + "COP": 75, + "DISLOCATED": 76, +} + +func (x DependencyEdge_Label) String() string { + return proto.EnumName(DependencyEdge_Label_name, int32(x)) +} +func (DependencyEdge_Label) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_language_service_a20715f4ac30e6a8, []int{6, 0} +} + +// The supported types of mentions. +type EntityMention_Type int32 + +const ( + // Unknown + EntityMention_TYPE_UNKNOWN EntityMention_Type = 0 + // Proper name + EntityMention_PROPER EntityMention_Type = 1 + // Common noun (or noun compound) + EntityMention_COMMON EntityMention_Type = 2 +) + +var EntityMention_Type_name = map[int32]string{ + 0: "TYPE_UNKNOWN", + 1: "PROPER", + 2: "COMMON", +} +var EntityMention_Type_value = map[string]int32{ + "TYPE_UNKNOWN": 0, + "PROPER": 1, + "COMMON": 2, +} + +func (x EntityMention_Type) String() string { + return proto.EnumName(EntityMention_Type_name, int32(x)) +} +func (EntityMention_Type) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_language_service_a20715f4ac30e6a8, []int{7, 0} +} + +// ################################################################ # +// +// Represents the input to API methods. +type Document struct { + // Required. If the type is not set or is `TYPE_UNSPECIFIED`, + // returns an `INVALID_ARGUMENT` error. + Type Document_Type `protobuf:"varint,1,opt,name=type,proto3,enum=google.cloud.language.v1beta1.Document_Type" json:"type,omitempty"` + // The source of the document: a string containing the content or a + // Google Cloud Storage URI. + // + // Types that are valid to be assigned to Source: + // *Document_Content + // *Document_GcsContentUri + Source isDocument_Source `protobuf_oneof:"source"` + // The language of the document (if not specified, the language is + // automatically detected). Both ISO and BCP-47 language codes are + // accepted.
+ // [Language + // Support](https://cloud.google.com/natural-language/docs/languages) lists + // currently supported languages for each API method. If the language (either + // specified by the caller or automatically detected) is not supported by the + // called API method, an `INVALID_ARGUMENT` error is returned. + Language string `protobuf:"bytes,4,opt,name=language,proto3" json:"language,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Document) Reset() { *m = Document{} } +func (m *Document) String() string { return proto.CompactTextString(m) } +func (*Document) ProtoMessage() {} +func (*Document) Descriptor() ([]byte, []int) { + return fileDescriptor_language_service_a20715f4ac30e6a8, []int{0} +} +func (m *Document) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Document.Unmarshal(m, b) +} +func (m *Document) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Document.Marshal(b, m, deterministic) +} +func (dst *Document) XXX_Merge(src proto.Message) { + xxx_messageInfo_Document.Merge(dst, src) +} +func (m *Document) XXX_Size() int { + return xxx_messageInfo_Document.Size(m) +} +func (m *Document) XXX_DiscardUnknown() { + xxx_messageInfo_Document.DiscardUnknown(m) +} + +var xxx_messageInfo_Document proto.InternalMessageInfo + +func (m *Document) GetType() Document_Type { + if m != nil { + return m.Type + } + return Document_TYPE_UNSPECIFIED +} + +type isDocument_Source interface { + isDocument_Source() +} + +type Document_Content struct { + Content string `protobuf:"bytes,2,opt,name=content,proto3,oneof"` +} + +type Document_GcsContentUri struct { + GcsContentUri string `protobuf:"bytes,3,opt,name=gcs_content_uri,json=gcsContentUri,proto3,oneof"` +} + +func (*Document_Content) isDocument_Source() {} + +func (*Document_GcsContentUri) isDocument_Source() {} + +func (m *Document) GetSource() isDocument_Source { + if m != nil { + return m.Source + } + return nil +} + +func (m *Document) GetContent() string { + if x, ok := m.GetSource().(*Document_Content); ok { + return x.Content + } + return "" +} + +func (m *Document) GetGcsContentUri() string { + if x, ok := m.GetSource().(*Document_GcsContentUri); ok { + return x.GcsContentUri + } + return "" +} + +func (m *Document) GetLanguage() string { + if m != nil { + return m.Language + } + return "" +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*Document) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _Document_OneofMarshaler, _Document_OneofUnmarshaler, _Document_OneofSizer, []interface{}{ + (*Document_Content)(nil), + (*Document_GcsContentUri)(nil), + } +} + +func _Document_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*Document) + // source + switch x := m.Source.(type) { + case *Document_Content: + b.EncodeVarint(2<<3 | proto.WireBytes) + b.EncodeStringBytes(x.Content) + case *Document_GcsContentUri: + b.EncodeVarint(3<<3 | proto.WireBytes) + b.EncodeStringBytes(x.GcsContentUri) + case nil: + default: + return fmt.Errorf("Document.Source has unexpected type %T", x) + } + return nil +} + +func _Document_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*Document) + switch tag { + case 2: // source.content + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeStringBytes() + m.Source = &Document_Content{x} + return true, err + case 3: // source.gcs_content_uri + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeStringBytes() + m.Source = &Document_GcsContentUri{x} + return true, err + default: + return false, nil + } +} + +func _Document_OneofSizer(msg proto.Message) (n int) { + m := msg.(*Document) + // source + switch x := m.Source.(type) { + case *Document_Content: + n += 1 // tag and wire + n += proto.SizeVarint(uint64(len(x.Content))) + n += len(x.Content) + case *Document_GcsContentUri: + n += 1 // tag and wire + n += proto.SizeVarint(uint64(len(x.GcsContentUri))) + n += len(x.GcsContentUri) + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +// Represents a sentence in the input document. +type Sentence struct { + // The sentence text. + Text *TextSpan `protobuf:"bytes,1,opt,name=text,proto3" json:"text,omitempty"` + // For calls to [AnalyzeSentiment][] or if + // [AnnotateTextRequest.Features.extract_document_sentiment][google.cloud.language.v1beta1.AnnotateTextRequest.Features.extract_document_sentiment] + // is set to true, this field will contain the sentiment for the sentence. + Sentiment *Sentiment `protobuf:"bytes,2,opt,name=sentiment,proto3" json:"sentiment,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Sentence) Reset() { *m = Sentence{} } +func (m *Sentence) String() string { return proto.CompactTextString(m) } +func (*Sentence) ProtoMessage() {} +func (*Sentence) Descriptor() ([]byte, []int) { + return fileDescriptor_language_service_a20715f4ac30e6a8, []int{1} +} +func (m *Sentence) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Sentence.Unmarshal(m, b) +} +func (m *Sentence) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Sentence.Marshal(b, m, deterministic) +} +func (dst *Sentence) XXX_Merge(src proto.Message) { + xxx_messageInfo_Sentence.Merge(dst, src) +} +func (m *Sentence) XXX_Size() int { + return xxx_messageInfo_Sentence.Size(m) +} +func (m *Sentence) XXX_DiscardUnknown() { + xxx_messageInfo_Sentence.DiscardUnknown(m) +} + +var xxx_messageInfo_Sentence proto.InternalMessageInfo + +func (m *Sentence) GetText() *TextSpan { + if m != nil { + return m.Text + } + return nil +} + +func (m *Sentence) GetSentiment() *Sentiment { + if m != nil { + return m.Sentiment + } + return nil +} + +// Represents a phrase in the text that is a known entity, such as +// a person, an organization, or location. The API associates information, such +// as salience and mentions, with entities. +type Entity struct { + // The representative name for the entity. + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // The entity type. + Type Entity_Type `protobuf:"varint,2,opt,name=type,proto3,enum=google.cloud.language.v1beta1.Entity_Type" json:"type,omitempty"` + // Metadata associated with the entity. + // + // Currently, Wikipedia URLs and Knowledge Graph MIDs are provided, if + // available. The associated keys are "wikipedia_url" and "mid", respectively. + Metadata map[string]string `protobuf:"bytes,3,rep,name=metadata,proto3" json:"metadata,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` + // The salience score associated with the entity in the [0, 1.0] range. + // + // The salience score for an entity provides information about the + // importance or centrality of that entity to the entire document text. + // Scores closer to 0 are less salient, while scores closer to 1.0 are highly + // salient. + Salience float32 `protobuf:"fixed32,4,opt,name=salience,proto3" json:"salience,omitempty"` + // The mentions of this entity in the input document. The API currently + // supports proper noun mentions. + Mentions []*EntityMention `protobuf:"bytes,5,rep,name=mentions,proto3" json:"mentions,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Entity) Reset() { *m = Entity{} } +func (m *Entity) String() string { return proto.CompactTextString(m) } +func (*Entity) ProtoMessage() {} +func (*Entity) Descriptor() ([]byte, []int) { + return fileDescriptor_language_service_a20715f4ac30e6a8, []int{2} +} +func (m *Entity) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Entity.Unmarshal(m, b) +} +func (m *Entity) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Entity.Marshal(b, m, deterministic) +} +func (dst *Entity) XXX_Merge(src proto.Message) { + xxx_messageInfo_Entity.Merge(dst, src) +} +func (m *Entity) XXX_Size() int { + return xxx_messageInfo_Entity.Size(m) +} +func (m *Entity) XXX_DiscardUnknown() { + xxx_messageInfo_Entity.DiscardUnknown(m) +} + +var xxx_messageInfo_Entity proto.InternalMessageInfo + +func (m *Entity) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *Entity) GetType() Entity_Type { + if m != nil { + return m.Type + } + return Entity_UNKNOWN +} + +func (m *Entity) GetMetadata() map[string]string { + if m != nil { + return m.Metadata + } + return nil +} + +func (m *Entity) GetSalience() float32 { + if m != nil { + return m.Salience + } + return 0 +} + +func (m *Entity) GetMentions() []*EntityMention { + if m != nil { + return m.Mentions + } + return nil +} + +// Represents the smallest syntactic building block of the text. +type Token struct { + // The token text. + Text *TextSpan `protobuf:"bytes,1,opt,name=text,proto3" json:"text,omitempty"` + // Parts of speech tag for this token. + PartOfSpeech *PartOfSpeech `protobuf:"bytes,2,opt,name=part_of_speech,json=partOfSpeech,proto3" json:"part_of_speech,omitempty"` + // Dependency tree parse for this token. + DependencyEdge *DependencyEdge `protobuf:"bytes,3,opt,name=dependency_edge,json=dependencyEdge,proto3" json:"dependency_edge,omitempty"` + // [Lemma](https://en.wikipedia.org/wiki/Lemma_%28morphology%29) of the token. + Lemma string `protobuf:"bytes,4,opt,name=lemma,proto3" json:"lemma,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Token) Reset() { *m = Token{} } +func (m *Token) String() string { return proto.CompactTextString(m) } +func (*Token) ProtoMessage() {} +func (*Token) Descriptor() ([]byte, []int) { + return fileDescriptor_language_service_a20715f4ac30e6a8, []int{3} +} +func (m *Token) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Token.Unmarshal(m, b) +} +func (m *Token) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Token.Marshal(b, m, deterministic) +} +func (dst *Token) XXX_Merge(src proto.Message) { + xxx_messageInfo_Token.Merge(dst, src) +} +func (m *Token) XXX_Size() int { + return xxx_messageInfo_Token.Size(m) +} +func (m *Token) XXX_DiscardUnknown() { + xxx_messageInfo_Token.DiscardUnknown(m) +} + +var xxx_messageInfo_Token proto.InternalMessageInfo + +func (m *Token) GetText() *TextSpan { + if m != nil { + return m.Text + } + return nil +} + +func (m *Token) GetPartOfSpeech() *PartOfSpeech { + if m != nil { + return m.PartOfSpeech + } + return nil +} + +func (m *Token) GetDependencyEdge() *DependencyEdge { + if m != nil { + return m.DependencyEdge + } + return nil +} + +func (m *Token) GetLemma() string { + if m != nil { + return m.Lemma + } + return "" +} + +// Represents the feeling associated with the entire text or entities in +// the text. +type Sentiment struct { + // DEPRECATED FIELD - This field is being deprecated in + // favor of score. Please refer to our documentation at + // https://cloud.google.com/natural-language/docs for more information. + Polarity float32 `protobuf:"fixed32,1,opt,name=polarity,proto3" json:"polarity,omitempty"` + // A non-negative number in the [0, +inf) range, which represents + // the absolute magnitude of sentiment regardless of score (positive or + // negative). + Magnitude float32 `protobuf:"fixed32,2,opt,name=magnitude,proto3" json:"magnitude,omitempty"` + // Sentiment score between -1.0 (negative sentiment) and 1.0 + // (positive sentiment). + Score float32 `protobuf:"fixed32,3,opt,name=score,proto3" json:"score,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Sentiment) Reset() { *m = Sentiment{} } +func (m *Sentiment) String() string { return proto.CompactTextString(m) } +func (*Sentiment) ProtoMessage() {} +func (*Sentiment) Descriptor() ([]byte, []int) { + return fileDescriptor_language_service_a20715f4ac30e6a8, []int{4} +} +func (m *Sentiment) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Sentiment.Unmarshal(m, b) +} +func (m *Sentiment) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Sentiment.Marshal(b, m, deterministic) +} +func (dst *Sentiment) XXX_Merge(src proto.Message) { + xxx_messageInfo_Sentiment.Merge(dst, src) +} +func (m *Sentiment) XXX_Size() int { + return xxx_messageInfo_Sentiment.Size(m) +} +func (m *Sentiment) XXX_DiscardUnknown() { + xxx_messageInfo_Sentiment.DiscardUnknown(m) +} + +var xxx_messageInfo_Sentiment proto.InternalMessageInfo + +func (m *Sentiment) GetPolarity() float32 { + if m != nil { + return m.Polarity + } + return 0 +} + +func (m *Sentiment) GetMagnitude() float32 { + if m != nil { + return m.Magnitude + } + return 0 +} + +func (m *Sentiment) GetScore() float32 { + if m != nil { + return m.Score + } + return 0 +} + +// Represents part of speech information for a token. +type PartOfSpeech struct { + // The part of speech tag. + Tag PartOfSpeech_Tag `protobuf:"varint,1,opt,name=tag,proto3,enum=google.cloud.language.v1beta1.PartOfSpeech_Tag" json:"tag,omitempty"` + // The grammatical aspect. + Aspect PartOfSpeech_Aspect `protobuf:"varint,2,opt,name=aspect,proto3,enum=google.cloud.language.v1beta1.PartOfSpeech_Aspect" json:"aspect,omitempty"` + // The grammatical case. + Case PartOfSpeech_Case `protobuf:"varint,3,opt,name=case,proto3,enum=google.cloud.language.v1beta1.PartOfSpeech_Case" json:"case,omitempty"` + // The grammatical form. + Form PartOfSpeech_Form `protobuf:"varint,4,opt,name=form,proto3,enum=google.cloud.language.v1beta1.PartOfSpeech_Form" json:"form,omitempty"` + // The grammatical gender. + Gender PartOfSpeech_Gender `protobuf:"varint,5,opt,name=gender,proto3,enum=google.cloud.language.v1beta1.PartOfSpeech_Gender" json:"gender,omitempty"` + // The grammatical mood. + Mood PartOfSpeech_Mood `protobuf:"varint,6,opt,name=mood,proto3,enum=google.cloud.language.v1beta1.PartOfSpeech_Mood" json:"mood,omitempty"` + // The grammatical number. + Number PartOfSpeech_Number `protobuf:"varint,7,opt,name=number,proto3,enum=google.cloud.language.v1beta1.PartOfSpeech_Number" json:"number,omitempty"` + // The grammatical person. + Person PartOfSpeech_Person `protobuf:"varint,8,opt,name=person,proto3,enum=google.cloud.language.v1beta1.PartOfSpeech_Person" json:"person,omitempty"` + // The grammatical properness. + Proper PartOfSpeech_Proper `protobuf:"varint,9,opt,name=proper,proto3,enum=google.cloud.language.v1beta1.PartOfSpeech_Proper" json:"proper,omitempty"` + // The grammatical reciprocity. + Reciprocity PartOfSpeech_Reciprocity `protobuf:"varint,10,opt,name=reciprocity,proto3,enum=google.cloud.language.v1beta1.PartOfSpeech_Reciprocity" json:"reciprocity,omitempty"` + // The grammatical tense. + Tense PartOfSpeech_Tense `protobuf:"varint,11,opt,name=tense,proto3,enum=google.cloud.language.v1beta1.PartOfSpeech_Tense" json:"tense,omitempty"` + // The grammatical voice. + Voice PartOfSpeech_Voice `protobuf:"varint,12,opt,name=voice,proto3,enum=google.cloud.language.v1beta1.PartOfSpeech_Voice" json:"voice,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *PartOfSpeech) Reset() { *m = PartOfSpeech{} } +func (m *PartOfSpeech) String() string { return proto.CompactTextString(m) } +func (*PartOfSpeech) ProtoMessage() {} +func (*PartOfSpeech) Descriptor() ([]byte, []int) { + return fileDescriptor_language_service_a20715f4ac30e6a8, []int{5} +} +func (m *PartOfSpeech) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_PartOfSpeech.Unmarshal(m, b) +} +func (m *PartOfSpeech) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_PartOfSpeech.Marshal(b, m, deterministic) +} +func (dst *PartOfSpeech) XXX_Merge(src proto.Message) { + xxx_messageInfo_PartOfSpeech.Merge(dst, src) +} +func (m *PartOfSpeech) XXX_Size() int { + return xxx_messageInfo_PartOfSpeech.Size(m) +} +func (m *PartOfSpeech) XXX_DiscardUnknown() { + xxx_messageInfo_PartOfSpeech.DiscardUnknown(m) +} + +var xxx_messageInfo_PartOfSpeech proto.InternalMessageInfo + +func (m *PartOfSpeech) GetTag() PartOfSpeech_Tag { + if m != nil { + return m.Tag + } + return PartOfSpeech_UNKNOWN +} + +func (m *PartOfSpeech) GetAspect() PartOfSpeech_Aspect { + if m != nil { + return m.Aspect + } + return PartOfSpeech_ASPECT_UNKNOWN +} + +func (m *PartOfSpeech) GetCase() PartOfSpeech_Case { + if m != nil { + return m.Case + } + return PartOfSpeech_CASE_UNKNOWN +} + +func (m *PartOfSpeech) GetForm() PartOfSpeech_Form { + if m != nil { + return m.Form + } + return PartOfSpeech_FORM_UNKNOWN +} + +func (m *PartOfSpeech) GetGender() PartOfSpeech_Gender { + if m != nil { + return m.Gender + } + return PartOfSpeech_GENDER_UNKNOWN +} + +func (m *PartOfSpeech) GetMood() PartOfSpeech_Mood { + if m != nil { + return m.Mood + } + return PartOfSpeech_MOOD_UNKNOWN +} + +func (m *PartOfSpeech) GetNumber() PartOfSpeech_Number { + if m != nil { + return m.Number + } + return PartOfSpeech_NUMBER_UNKNOWN +} + +func (m *PartOfSpeech) GetPerson() PartOfSpeech_Person { + if m != nil { + return m.Person + } + return PartOfSpeech_PERSON_UNKNOWN +} + +func (m *PartOfSpeech) GetProper() PartOfSpeech_Proper { + if m != nil { + return m.Proper + } + return PartOfSpeech_PROPER_UNKNOWN +} + +func (m *PartOfSpeech) GetReciprocity() PartOfSpeech_Reciprocity { + if m != nil { + return m.Reciprocity + } + return PartOfSpeech_RECIPROCITY_UNKNOWN +} + +func (m *PartOfSpeech) GetTense() PartOfSpeech_Tense { + if m != nil { + return m.Tense + } + return PartOfSpeech_TENSE_UNKNOWN +} + +func (m *PartOfSpeech) GetVoice() PartOfSpeech_Voice { + if m != nil { + return m.Voice + } + return PartOfSpeech_VOICE_UNKNOWN +} + +// Represents dependency parse tree information for a token. +type DependencyEdge struct { + // Represents the head of this token in the dependency tree. + // This is the index of the token which has an arc going to this token. + // The index is the position of the token in the array of tokens returned + // by the API method. If this token is a root token, then the + // `head_token_index` is its own index. + HeadTokenIndex int32 `protobuf:"varint,1,opt,name=head_token_index,json=headTokenIndex,proto3" json:"head_token_index,omitempty"` + // The parse label for the token. + Label DependencyEdge_Label `protobuf:"varint,2,opt,name=label,proto3,enum=google.cloud.language.v1beta1.DependencyEdge_Label" json:"label,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *DependencyEdge) Reset() { *m = DependencyEdge{} } +func (m *DependencyEdge) String() string { return proto.CompactTextString(m) } +func (*DependencyEdge) ProtoMessage() {} +func (*DependencyEdge) Descriptor() ([]byte, []int) { + return fileDescriptor_language_service_a20715f4ac30e6a8, []int{6} +} +func (m *DependencyEdge) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_DependencyEdge.Unmarshal(m, b) +} +func (m *DependencyEdge) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_DependencyEdge.Marshal(b, m, deterministic) +} +func (dst *DependencyEdge) XXX_Merge(src proto.Message) { + xxx_messageInfo_DependencyEdge.Merge(dst, src) +} +func (m *DependencyEdge) XXX_Size() int { + return xxx_messageInfo_DependencyEdge.Size(m) +} +func (m *DependencyEdge) XXX_DiscardUnknown() { + xxx_messageInfo_DependencyEdge.DiscardUnknown(m) +} + +var xxx_messageInfo_DependencyEdge proto.InternalMessageInfo + +func (m *DependencyEdge) GetHeadTokenIndex() int32 { + if m != nil { + return m.HeadTokenIndex + } + return 0 +} + +func (m *DependencyEdge) GetLabel() DependencyEdge_Label { + if m != nil { + return m.Label + } + return DependencyEdge_UNKNOWN +} + +// Represents a mention for an entity in the text. Currently, proper noun +// mentions are supported. +type EntityMention struct { + // The mention text. + Text *TextSpan `protobuf:"bytes,1,opt,name=text,proto3" json:"text,omitempty"` + // The type of the entity mention. + Type EntityMention_Type `protobuf:"varint,2,opt,name=type,proto3,enum=google.cloud.language.v1beta1.EntityMention_Type" json:"type,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *EntityMention) Reset() { *m = EntityMention{} } +func (m *EntityMention) String() string { return proto.CompactTextString(m) } +func (*EntityMention) ProtoMessage() {} +func (*EntityMention) Descriptor() ([]byte, []int) { + return fileDescriptor_language_service_a20715f4ac30e6a8, []int{7} +} +func (m *EntityMention) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_EntityMention.Unmarshal(m, b) +} +func (m *EntityMention) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_EntityMention.Marshal(b, m, deterministic) +} +func (dst *EntityMention) XXX_Merge(src proto.Message) { + xxx_messageInfo_EntityMention.Merge(dst, src) +} +func (m *EntityMention) XXX_Size() int { + return xxx_messageInfo_EntityMention.Size(m) +} +func (m *EntityMention) XXX_DiscardUnknown() { + xxx_messageInfo_EntityMention.DiscardUnknown(m) +} + +var xxx_messageInfo_EntityMention proto.InternalMessageInfo + +func (m *EntityMention) GetText() *TextSpan { + if m != nil { + return m.Text + } + return nil +} + +func (m *EntityMention) GetType() EntityMention_Type { + if m != nil { + return m.Type + } + return EntityMention_TYPE_UNKNOWN +} + +// Represents an output piece of text. +type TextSpan struct { + // The content of the output text. + Content string `protobuf:"bytes,1,opt,name=content,proto3" json:"content,omitempty"` + // The API calculates the beginning offset of the content in the original + // document according to the + // [EncodingType][google.cloud.language.v1beta1.EncodingType] specified in the + // API request. + BeginOffset int32 `protobuf:"varint,2,opt,name=begin_offset,json=beginOffset,proto3" json:"begin_offset,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *TextSpan) Reset() { *m = TextSpan{} } +func (m *TextSpan) String() string { return proto.CompactTextString(m) } +func (*TextSpan) ProtoMessage() {} +func (*TextSpan) Descriptor() ([]byte, []int) { + return fileDescriptor_language_service_a20715f4ac30e6a8, []int{8} +} +func (m *TextSpan) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_TextSpan.Unmarshal(m, b) +} +func (m *TextSpan) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_TextSpan.Marshal(b, m, deterministic) +} +func (dst *TextSpan) XXX_Merge(src proto.Message) { + xxx_messageInfo_TextSpan.Merge(dst, src) +} +func (m *TextSpan) XXX_Size() int { + return xxx_messageInfo_TextSpan.Size(m) +} +func (m *TextSpan) XXX_DiscardUnknown() { + xxx_messageInfo_TextSpan.DiscardUnknown(m) +} + +var xxx_messageInfo_TextSpan proto.InternalMessageInfo + +func (m *TextSpan) GetContent() string { + if m != nil { + return m.Content + } + return "" +} + +func (m *TextSpan) GetBeginOffset() int32 { + if m != nil { + return m.BeginOffset + } + return 0 +} + +// The sentiment analysis request message. +type AnalyzeSentimentRequest struct { + // Input document. + Document *Document `protobuf:"bytes,1,opt,name=document,proto3" json:"document,omitempty"` + // The encoding type used by the API to calculate sentence offsets for the + // sentence sentiment. + EncodingType EncodingType `protobuf:"varint,2,opt,name=encoding_type,json=encodingType,proto3,enum=google.cloud.language.v1beta1.EncodingType" json:"encoding_type,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *AnalyzeSentimentRequest) Reset() { *m = AnalyzeSentimentRequest{} } +func (m *AnalyzeSentimentRequest) String() string { return proto.CompactTextString(m) } +func (*AnalyzeSentimentRequest) ProtoMessage() {} +func (*AnalyzeSentimentRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_language_service_a20715f4ac30e6a8, []int{9} +} +func (m *AnalyzeSentimentRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_AnalyzeSentimentRequest.Unmarshal(m, b) +} +func (m *AnalyzeSentimentRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_AnalyzeSentimentRequest.Marshal(b, m, deterministic) +} +func (dst *AnalyzeSentimentRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_AnalyzeSentimentRequest.Merge(dst, src) +} +func (m *AnalyzeSentimentRequest) XXX_Size() int { + return xxx_messageInfo_AnalyzeSentimentRequest.Size(m) +} +func (m *AnalyzeSentimentRequest) XXX_DiscardUnknown() { + xxx_messageInfo_AnalyzeSentimentRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_AnalyzeSentimentRequest proto.InternalMessageInfo + +func (m *AnalyzeSentimentRequest) GetDocument() *Document { + if m != nil { + return m.Document + } + return nil +} + +func (m *AnalyzeSentimentRequest) GetEncodingType() EncodingType { + if m != nil { + return m.EncodingType + } + return EncodingType_NONE +} + +// The sentiment analysis response message. +type AnalyzeSentimentResponse struct { + // The overall sentiment of the input document. + DocumentSentiment *Sentiment `protobuf:"bytes,1,opt,name=document_sentiment,json=documentSentiment,proto3" json:"document_sentiment,omitempty"` + // The language of the text, which will be the same as the language specified + // in the request or, if not specified, the automatically-detected language. + // See [Document.language][google.cloud.language.v1beta1.Document.language] + // field for more details. + Language string `protobuf:"bytes,2,opt,name=language,proto3" json:"language,omitempty"` + // The sentiment for all the sentences in the document. + Sentences []*Sentence `protobuf:"bytes,3,rep,name=sentences,proto3" json:"sentences,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *AnalyzeSentimentResponse) Reset() { *m = AnalyzeSentimentResponse{} } +func (m *AnalyzeSentimentResponse) String() string { return proto.CompactTextString(m) } +func (*AnalyzeSentimentResponse) ProtoMessage() {} +func (*AnalyzeSentimentResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_language_service_a20715f4ac30e6a8, []int{10} +} +func (m *AnalyzeSentimentResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_AnalyzeSentimentResponse.Unmarshal(m, b) +} +func (m *AnalyzeSentimentResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_AnalyzeSentimentResponse.Marshal(b, m, deterministic) +} +func (dst *AnalyzeSentimentResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_AnalyzeSentimentResponse.Merge(dst, src) +} +func (m *AnalyzeSentimentResponse) XXX_Size() int { + return xxx_messageInfo_AnalyzeSentimentResponse.Size(m) +} +func (m *AnalyzeSentimentResponse) XXX_DiscardUnknown() { + xxx_messageInfo_AnalyzeSentimentResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_AnalyzeSentimentResponse proto.InternalMessageInfo + +func (m *AnalyzeSentimentResponse) GetDocumentSentiment() *Sentiment { + if m != nil { + return m.DocumentSentiment + } + return nil +} + +func (m *AnalyzeSentimentResponse) GetLanguage() string { + if m != nil { + return m.Language + } + return "" +} + +func (m *AnalyzeSentimentResponse) GetSentences() []*Sentence { + if m != nil { + return m.Sentences + } + return nil +} + +// The entity analysis request message. +type AnalyzeEntitiesRequest struct { + // Input document. + Document *Document `protobuf:"bytes,1,opt,name=document,proto3" json:"document,omitempty"` + // The encoding type used by the API to calculate offsets. + EncodingType EncodingType `protobuf:"varint,2,opt,name=encoding_type,json=encodingType,proto3,enum=google.cloud.language.v1beta1.EncodingType" json:"encoding_type,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *AnalyzeEntitiesRequest) Reset() { *m = AnalyzeEntitiesRequest{} } +func (m *AnalyzeEntitiesRequest) String() string { return proto.CompactTextString(m) } +func (*AnalyzeEntitiesRequest) ProtoMessage() {} +func (*AnalyzeEntitiesRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_language_service_a20715f4ac30e6a8, []int{11} +} +func (m *AnalyzeEntitiesRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_AnalyzeEntitiesRequest.Unmarshal(m, b) +} +func (m *AnalyzeEntitiesRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_AnalyzeEntitiesRequest.Marshal(b, m, deterministic) +} +func (dst *AnalyzeEntitiesRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_AnalyzeEntitiesRequest.Merge(dst, src) +} +func (m *AnalyzeEntitiesRequest) XXX_Size() int { + return xxx_messageInfo_AnalyzeEntitiesRequest.Size(m) +} +func (m *AnalyzeEntitiesRequest) XXX_DiscardUnknown() { + xxx_messageInfo_AnalyzeEntitiesRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_AnalyzeEntitiesRequest proto.InternalMessageInfo + +func (m *AnalyzeEntitiesRequest) GetDocument() *Document { + if m != nil { + return m.Document + } + return nil +} + +func (m *AnalyzeEntitiesRequest) GetEncodingType() EncodingType { + if m != nil { + return m.EncodingType + } + return EncodingType_NONE +} + +// The entity analysis response message. +type AnalyzeEntitiesResponse struct { + // The recognized entities in the input document. + Entities []*Entity `protobuf:"bytes,1,rep,name=entities,proto3" json:"entities,omitempty"` + // The language of the text, which will be the same as the language specified + // in the request or, if not specified, the automatically-detected language. + // See [Document.language][google.cloud.language.v1beta1.Document.language] + // field for more details. + Language string `protobuf:"bytes,2,opt,name=language,proto3" json:"language,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *AnalyzeEntitiesResponse) Reset() { *m = AnalyzeEntitiesResponse{} } +func (m *AnalyzeEntitiesResponse) String() string { return proto.CompactTextString(m) } +func (*AnalyzeEntitiesResponse) ProtoMessage() {} +func (*AnalyzeEntitiesResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_language_service_a20715f4ac30e6a8, []int{12} +} +func (m *AnalyzeEntitiesResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_AnalyzeEntitiesResponse.Unmarshal(m, b) +} +func (m *AnalyzeEntitiesResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_AnalyzeEntitiesResponse.Marshal(b, m, deterministic) +} +func (dst *AnalyzeEntitiesResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_AnalyzeEntitiesResponse.Merge(dst, src) +} +func (m *AnalyzeEntitiesResponse) XXX_Size() int { + return xxx_messageInfo_AnalyzeEntitiesResponse.Size(m) +} +func (m *AnalyzeEntitiesResponse) XXX_DiscardUnknown() { + xxx_messageInfo_AnalyzeEntitiesResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_AnalyzeEntitiesResponse proto.InternalMessageInfo + +func (m *AnalyzeEntitiesResponse) GetEntities() []*Entity { + if m != nil { + return m.Entities + } + return nil +} + +func (m *AnalyzeEntitiesResponse) GetLanguage() string { + if m != nil { + return m.Language + } + return "" +} + +// The syntax analysis request message. +type AnalyzeSyntaxRequest struct { + // Input document. + Document *Document `protobuf:"bytes,1,opt,name=document,proto3" json:"document,omitempty"` + // The encoding type used by the API to calculate offsets. + EncodingType EncodingType `protobuf:"varint,2,opt,name=encoding_type,json=encodingType,proto3,enum=google.cloud.language.v1beta1.EncodingType" json:"encoding_type,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *AnalyzeSyntaxRequest) Reset() { *m = AnalyzeSyntaxRequest{} } +func (m *AnalyzeSyntaxRequest) String() string { return proto.CompactTextString(m) } +func (*AnalyzeSyntaxRequest) ProtoMessage() {} +func (*AnalyzeSyntaxRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_language_service_a20715f4ac30e6a8, []int{13} +} +func (m *AnalyzeSyntaxRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_AnalyzeSyntaxRequest.Unmarshal(m, b) +} +func (m *AnalyzeSyntaxRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_AnalyzeSyntaxRequest.Marshal(b, m, deterministic) +} +func (dst *AnalyzeSyntaxRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_AnalyzeSyntaxRequest.Merge(dst, src) +} +func (m *AnalyzeSyntaxRequest) XXX_Size() int { + return xxx_messageInfo_AnalyzeSyntaxRequest.Size(m) +} +func (m *AnalyzeSyntaxRequest) XXX_DiscardUnknown() { + xxx_messageInfo_AnalyzeSyntaxRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_AnalyzeSyntaxRequest proto.InternalMessageInfo + +func (m *AnalyzeSyntaxRequest) GetDocument() *Document { + if m != nil { + return m.Document + } + return nil +} + +func (m *AnalyzeSyntaxRequest) GetEncodingType() EncodingType { + if m != nil { + return m.EncodingType + } + return EncodingType_NONE +} + +// The syntax analysis response message. +type AnalyzeSyntaxResponse struct { + // Sentences in the input document. + Sentences []*Sentence `protobuf:"bytes,1,rep,name=sentences,proto3" json:"sentences,omitempty"` + // Tokens, along with their syntactic information, in the input document. + Tokens []*Token `protobuf:"bytes,2,rep,name=tokens,proto3" json:"tokens,omitempty"` + // The language of the text, which will be the same as the language specified + // in the request or, if not specified, the automatically-detected language. + // See [Document.language][google.cloud.language.v1beta1.Document.language] + // field for more details. + Language string `protobuf:"bytes,3,opt,name=language,proto3" json:"language,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *AnalyzeSyntaxResponse) Reset() { *m = AnalyzeSyntaxResponse{} } +func (m *AnalyzeSyntaxResponse) String() string { return proto.CompactTextString(m) } +func (*AnalyzeSyntaxResponse) ProtoMessage() {} +func (*AnalyzeSyntaxResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_language_service_a20715f4ac30e6a8, []int{14} +} +func (m *AnalyzeSyntaxResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_AnalyzeSyntaxResponse.Unmarshal(m, b) +} +func (m *AnalyzeSyntaxResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_AnalyzeSyntaxResponse.Marshal(b, m, deterministic) +} +func (dst *AnalyzeSyntaxResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_AnalyzeSyntaxResponse.Merge(dst, src) +} +func (m *AnalyzeSyntaxResponse) XXX_Size() int { + return xxx_messageInfo_AnalyzeSyntaxResponse.Size(m) +} +func (m *AnalyzeSyntaxResponse) XXX_DiscardUnknown() { + xxx_messageInfo_AnalyzeSyntaxResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_AnalyzeSyntaxResponse proto.InternalMessageInfo + +func (m *AnalyzeSyntaxResponse) GetSentences() []*Sentence { + if m != nil { + return m.Sentences + } + return nil +} + +func (m *AnalyzeSyntaxResponse) GetTokens() []*Token { + if m != nil { + return m.Tokens + } + return nil +} + +func (m *AnalyzeSyntaxResponse) GetLanguage() string { + if m != nil { + return m.Language + } + return "" +} + +// The request message for the text annotation API, which can perform multiple +// analysis types (sentiment, entities, and syntax) in one call. +type AnnotateTextRequest struct { + // Input document. + Document *Document `protobuf:"bytes,1,opt,name=document,proto3" json:"document,omitempty"` + // The enabled features. + Features *AnnotateTextRequest_Features `protobuf:"bytes,2,opt,name=features,proto3" json:"features,omitempty"` + // The encoding type used by the API to calculate offsets. + EncodingType EncodingType `protobuf:"varint,3,opt,name=encoding_type,json=encodingType,proto3,enum=google.cloud.language.v1beta1.EncodingType" json:"encoding_type,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *AnnotateTextRequest) Reset() { *m = AnnotateTextRequest{} } +func (m *AnnotateTextRequest) String() string { return proto.CompactTextString(m) } +func (*AnnotateTextRequest) ProtoMessage() {} +func (*AnnotateTextRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_language_service_a20715f4ac30e6a8, []int{15} +} +func (m *AnnotateTextRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_AnnotateTextRequest.Unmarshal(m, b) +} +func (m *AnnotateTextRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_AnnotateTextRequest.Marshal(b, m, deterministic) +} +func (dst *AnnotateTextRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_AnnotateTextRequest.Merge(dst, src) +} +func (m *AnnotateTextRequest) XXX_Size() int { + return xxx_messageInfo_AnnotateTextRequest.Size(m) +} +func (m *AnnotateTextRequest) XXX_DiscardUnknown() { + xxx_messageInfo_AnnotateTextRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_AnnotateTextRequest proto.InternalMessageInfo + +func (m *AnnotateTextRequest) GetDocument() *Document { + if m != nil { + return m.Document + } + return nil +} + +func (m *AnnotateTextRequest) GetFeatures() *AnnotateTextRequest_Features { + if m != nil { + return m.Features + } + return nil +} + +func (m *AnnotateTextRequest) GetEncodingType() EncodingType { + if m != nil { + return m.EncodingType + } + return EncodingType_NONE +} + +// All available features for sentiment, syntax, and semantic analysis. +// Setting each one to true will enable that specific analysis for the input. +type AnnotateTextRequest_Features struct { + // Extract syntax information. + ExtractSyntax bool `protobuf:"varint,1,opt,name=extract_syntax,json=extractSyntax,proto3" json:"extract_syntax,omitempty"` + // Extract entities. + ExtractEntities bool `protobuf:"varint,2,opt,name=extract_entities,json=extractEntities,proto3" json:"extract_entities,omitempty"` + // Extract document-level sentiment. + ExtractDocumentSentiment bool `protobuf:"varint,3,opt,name=extract_document_sentiment,json=extractDocumentSentiment,proto3" json:"extract_document_sentiment,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *AnnotateTextRequest_Features) Reset() { *m = AnnotateTextRequest_Features{} } +func (m *AnnotateTextRequest_Features) String() string { return proto.CompactTextString(m) } +func (*AnnotateTextRequest_Features) ProtoMessage() {} +func (*AnnotateTextRequest_Features) Descriptor() ([]byte, []int) { + return fileDescriptor_language_service_a20715f4ac30e6a8, []int{15, 0} +} +func (m *AnnotateTextRequest_Features) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_AnnotateTextRequest_Features.Unmarshal(m, b) +} +func (m *AnnotateTextRequest_Features) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_AnnotateTextRequest_Features.Marshal(b, m, deterministic) +} +func (dst *AnnotateTextRequest_Features) XXX_Merge(src proto.Message) { + xxx_messageInfo_AnnotateTextRequest_Features.Merge(dst, src) +} +func (m *AnnotateTextRequest_Features) XXX_Size() int { + return xxx_messageInfo_AnnotateTextRequest_Features.Size(m) +} +func (m *AnnotateTextRequest_Features) XXX_DiscardUnknown() { + xxx_messageInfo_AnnotateTextRequest_Features.DiscardUnknown(m) +} + +var xxx_messageInfo_AnnotateTextRequest_Features proto.InternalMessageInfo + +func (m *AnnotateTextRequest_Features) GetExtractSyntax() bool { + if m != nil { + return m.ExtractSyntax + } + return false +} + +func (m *AnnotateTextRequest_Features) GetExtractEntities() bool { + if m != nil { + return m.ExtractEntities + } + return false +} + +func (m *AnnotateTextRequest_Features) GetExtractDocumentSentiment() bool { + if m != nil { + return m.ExtractDocumentSentiment + } + return false +} + +// The text annotations response message. +type AnnotateTextResponse struct { + // Sentences in the input document. Populated if the user enables + // [AnnotateTextRequest.Features.extract_syntax][google.cloud.language.v1beta1.AnnotateTextRequest.Features.extract_syntax]. + Sentences []*Sentence `protobuf:"bytes,1,rep,name=sentences,proto3" json:"sentences,omitempty"` + // Tokens, along with their syntactic information, in the input document. + // Populated if the user enables + // [AnnotateTextRequest.Features.extract_syntax][google.cloud.language.v1beta1.AnnotateTextRequest.Features.extract_syntax]. + Tokens []*Token `protobuf:"bytes,2,rep,name=tokens,proto3" json:"tokens,omitempty"` + // Entities, along with their semantic information, in the input document. + // Populated if the user enables + // [AnnotateTextRequest.Features.extract_entities][google.cloud.language.v1beta1.AnnotateTextRequest.Features.extract_entities]. + Entities []*Entity `protobuf:"bytes,3,rep,name=entities,proto3" json:"entities,omitempty"` + // The overall sentiment for the document. Populated if the user enables + // [AnnotateTextRequest.Features.extract_document_sentiment][google.cloud.language.v1beta1.AnnotateTextRequest.Features.extract_document_sentiment]. + DocumentSentiment *Sentiment `protobuf:"bytes,4,opt,name=document_sentiment,json=documentSentiment,proto3" json:"document_sentiment,omitempty"` + // The language of the text, which will be the same as the language specified + // in the request or, if not specified, the automatically-detected language. + // See [Document.language][google.cloud.language.v1beta1.Document.language] + // field for more details. + Language string `protobuf:"bytes,5,opt,name=language,proto3" json:"language,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *AnnotateTextResponse) Reset() { *m = AnnotateTextResponse{} } +func (m *AnnotateTextResponse) String() string { return proto.CompactTextString(m) } +func (*AnnotateTextResponse) ProtoMessage() {} +func (*AnnotateTextResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_language_service_a20715f4ac30e6a8, []int{16} +} +func (m *AnnotateTextResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_AnnotateTextResponse.Unmarshal(m, b) +} +func (m *AnnotateTextResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_AnnotateTextResponse.Marshal(b, m, deterministic) +} +func (dst *AnnotateTextResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_AnnotateTextResponse.Merge(dst, src) +} +func (m *AnnotateTextResponse) XXX_Size() int { + return xxx_messageInfo_AnnotateTextResponse.Size(m) +} +func (m *AnnotateTextResponse) XXX_DiscardUnknown() { + xxx_messageInfo_AnnotateTextResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_AnnotateTextResponse proto.InternalMessageInfo + +func (m *AnnotateTextResponse) GetSentences() []*Sentence { + if m != nil { + return m.Sentences + } + return nil +} + +func (m *AnnotateTextResponse) GetTokens() []*Token { + if m != nil { + return m.Tokens + } + return nil +} + +func (m *AnnotateTextResponse) GetEntities() []*Entity { + if m != nil { + return m.Entities + } + return nil +} + +func (m *AnnotateTextResponse) GetDocumentSentiment() *Sentiment { + if m != nil { + return m.DocumentSentiment + } + return nil +} + +func (m *AnnotateTextResponse) GetLanguage() string { + if m != nil { + return m.Language + } + return "" +} + +func init() { + proto.RegisterType((*Document)(nil), "google.cloud.language.v1beta1.Document") + proto.RegisterType((*Sentence)(nil), "google.cloud.language.v1beta1.Sentence") + proto.RegisterType((*Entity)(nil), "google.cloud.language.v1beta1.Entity") + proto.RegisterMapType((map[string]string)(nil), "google.cloud.language.v1beta1.Entity.MetadataEntry") + proto.RegisterType((*Token)(nil), "google.cloud.language.v1beta1.Token") + proto.RegisterType((*Sentiment)(nil), "google.cloud.language.v1beta1.Sentiment") + proto.RegisterType((*PartOfSpeech)(nil), "google.cloud.language.v1beta1.PartOfSpeech") + proto.RegisterType((*DependencyEdge)(nil), "google.cloud.language.v1beta1.DependencyEdge") + proto.RegisterType((*EntityMention)(nil), "google.cloud.language.v1beta1.EntityMention") + proto.RegisterType((*TextSpan)(nil), "google.cloud.language.v1beta1.TextSpan") + proto.RegisterType((*AnalyzeSentimentRequest)(nil), "google.cloud.language.v1beta1.AnalyzeSentimentRequest") + proto.RegisterType((*AnalyzeSentimentResponse)(nil), "google.cloud.language.v1beta1.AnalyzeSentimentResponse") + proto.RegisterType((*AnalyzeEntitiesRequest)(nil), "google.cloud.language.v1beta1.AnalyzeEntitiesRequest") + proto.RegisterType((*AnalyzeEntitiesResponse)(nil), "google.cloud.language.v1beta1.AnalyzeEntitiesResponse") + proto.RegisterType((*AnalyzeSyntaxRequest)(nil), "google.cloud.language.v1beta1.AnalyzeSyntaxRequest") + proto.RegisterType((*AnalyzeSyntaxResponse)(nil), "google.cloud.language.v1beta1.AnalyzeSyntaxResponse") + proto.RegisterType((*AnnotateTextRequest)(nil), "google.cloud.language.v1beta1.AnnotateTextRequest") + proto.RegisterType((*AnnotateTextRequest_Features)(nil), "google.cloud.language.v1beta1.AnnotateTextRequest.Features") + proto.RegisterType((*AnnotateTextResponse)(nil), "google.cloud.language.v1beta1.AnnotateTextResponse") + proto.RegisterEnum("google.cloud.language.v1beta1.EncodingType", EncodingType_name, EncodingType_value) + proto.RegisterEnum("google.cloud.language.v1beta1.Document_Type", Document_Type_name, Document_Type_value) + proto.RegisterEnum("google.cloud.language.v1beta1.Entity_Type", Entity_Type_name, Entity_Type_value) + proto.RegisterEnum("google.cloud.language.v1beta1.PartOfSpeech_Tag", PartOfSpeech_Tag_name, PartOfSpeech_Tag_value) + proto.RegisterEnum("google.cloud.language.v1beta1.PartOfSpeech_Aspect", PartOfSpeech_Aspect_name, PartOfSpeech_Aspect_value) + proto.RegisterEnum("google.cloud.language.v1beta1.PartOfSpeech_Case", PartOfSpeech_Case_name, PartOfSpeech_Case_value) + proto.RegisterEnum("google.cloud.language.v1beta1.PartOfSpeech_Form", PartOfSpeech_Form_name, PartOfSpeech_Form_value) + proto.RegisterEnum("google.cloud.language.v1beta1.PartOfSpeech_Gender", PartOfSpeech_Gender_name, PartOfSpeech_Gender_value) + proto.RegisterEnum("google.cloud.language.v1beta1.PartOfSpeech_Mood", PartOfSpeech_Mood_name, PartOfSpeech_Mood_value) + proto.RegisterEnum("google.cloud.language.v1beta1.PartOfSpeech_Number", PartOfSpeech_Number_name, PartOfSpeech_Number_value) + proto.RegisterEnum("google.cloud.language.v1beta1.PartOfSpeech_Person", PartOfSpeech_Person_name, PartOfSpeech_Person_value) + proto.RegisterEnum("google.cloud.language.v1beta1.PartOfSpeech_Proper", PartOfSpeech_Proper_name, PartOfSpeech_Proper_value) + proto.RegisterEnum("google.cloud.language.v1beta1.PartOfSpeech_Reciprocity", PartOfSpeech_Reciprocity_name, PartOfSpeech_Reciprocity_value) + proto.RegisterEnum("google.cloud.language.v1beta1.PartOfSpeech_Tense", PartOfSpeech_Tense_name, PartOfSpeech_Tense_value) + proto.RegisterEnum("google.cloud.language.v1beta1.PartOfSpeech_Voice", PartOfSpeech_Voice_name, PartOfSpeech_Voice_value) + proto.RegisterEnum("google.cloud.language.v1beta1.DependencyEdge_Label", DependencyEdge_Label_name, DependencyEdge_Label_value) + proto.RegisterEnum("google.cloud.language.v1beta1.EntityMention_Type", EntityMention_Type_name, EntityMention_Type_value) +} + +// Reference imports to suppress errors if they are not otherwise used. +var _ context.Context +var _ grpc.ClientConn + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the grpc package it is being compiled against. +const _ = grpc.SupportPackageIsVersion4 + +// LanguageServiceClient is the client API for LanguageService service. +// +// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://godoc.org/google.golang.org/grpc#ClientConn.NewStream. +type LanguageServiceClient interface { + // Analyzes the sentiment of the provided text. + AnalyzeSentiment(ctx context.Context, in *AnalyzeSentimentRequest, opts ...grpc.CallOption) (*AnalyzeSentimentResponse, error) + // Finds named entities (currently proper names and common nouns) in the text + // along with entity types, salience, mentions for each entity, and + // other properties. + AnalyzeEntities(ctx context.Context, in *AnalyzeEntitiesRequest, opts ...grpc.CallOption) (*AnalyzeEntitiesResponse, error) + // Analyzes the syntax of the text and provides sentence boundaries and + // tokenization along with part of speech tags, dependency trees, and other + // properties. + AnalyzeSyntax(ctx context.Context, in *AnalyzeSyntaxRequest, opts ...grpc.CallOption) (*AnalyzeSyntaxResponse, error) + // A convenience method that provides all the features that analyzeSentiment, + // analyzeEntities, and analyzeSyntax provide in one call. + AnnotateText(ctx context.Context, in *AnnotateTextRequest, opts ...grpc.CallOption) (*AnnotateTextResponse, error) +} + +type languageServiceClient struct { + cc *grpc.ClientConn +} + +func NewLanguageServiceClient(cc *grpc.ClientConn) LanguageServiceClient { + return &languageServiceClient{cc} +} + +func (c *languageServiceClient) AnalyzeSentiment(ctx context.Context, in *AnalyzeSentimentRequest, opts ...grpc.CallOption) (*AnalyzeSentimentResponse, error) { + out := new(AnalyzeSentimentResponse) + err := c.cc.Invoke(ctx, "/google.cloud.language.v1beta1.LanguageService/AnalyzeSentiment", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *languageServiceClient) AnalyzeEntities(ctx context.Context, in *AnalyzeEntitiesRequest, opts ...grpc.CallOption) (*AnalyzeEntitiesResponse, error) { + out := new(AnalyzeEntitiesResponse) + err := c.cc.Invoke(ctx, "/google.cloud.language.v1beta1.LanguageService/AnalyzeEntities", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *languageServiceClient) AnalyzeSyntax(ctx context.Context, in *AnalyzeSyntaxRequest, opts ...grpc.CallOption) (*AnalyzeSyntaxResponse, error) { + out := new(AnalyzeSyntaxResponse) + err := c.cc.Invoke(ctx, "/google.cloud.language.v1beta1.LanguageService/AnalyzeSyntax", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *languageServiceClient) AnnotateText(ctx context.Context, in *AnnotateTextRequest, opts ...grpc.CallOption) (*AnnotateTextResponse, error) { + out := new(AnnotateTextResponse) + err := c.cc.Invoke(ctx, "/google.cloud.language.v1beta1.LanguageService/AnnotateText", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +// LanguageServiceServer is the server API for LanguageService service. +type LanguageServiceServer interface { + // Analyzes the sentiment of the provided text. + AnalyzeSentiment(context.Context, *AnalyzeSentimentRequest) (*AnalyzeSentimentResponse, error) + // Finds named entities (currently proper names and common nouns) in the text + // along with entity types, salience, mentions for each entity, and + // other properties. + AnalyzeEntities(context.Context, *AnalyzeEntitiesRequest) (*AnalyzeEntitiesResponse, error) + // Analyzes the syntax of the text and provides sentence boundaries and + // tokenization along with part of speech tags, dependency trees, and other + // properties. + AnalyzeSyntax(context.Context, *AnalyzeSyntaxRequest) (*AnalyzeSyntaxResponse, error) + // A convenience method that provides all the features that analyzeSentiment, + // analyzeEntities, and analyzeSyntax provide in one call. + AnnotateText(context.Context, *AnnotateTextRequest) (*AnnotateTextResponse, error) +} + +func RegisterLanguageServiceServer(s *grpc.Server, srv LanguageServiceServer) { + s.RegisterService(&_LanguageService_serviceDesc, srv) +} + +func _LanguageService_AnalyzeSentiment_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(AnalyzeSentimentRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(LanguageServiceServer).AnalyzeSentiment(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.language.v1beta1.LanguageService/AnalyzeSentiment", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(LanguageServiceServer).AnalyzeSentiment(ctx, req.(*AnalyzeSentimentRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _LanguageService_AnalyzeEntities_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(AnalyzeEntitiesRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(LanguageServiceServer).AnalyzeEntities(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.language.v1beta1.LanguageService/AnalyzeEntities", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(LanguageServiceServer).AnalyzeEntities(ctx, req.(*AnalyzeEntitiesRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _LanguageService_AnalyzeSyntax_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(AnalyzeSyntaxRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(LanguageServiceServer).AnalyzeSyntax(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.language.v1beta1.LanguageService/AnalyzeSyntax", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(LanguageServiceServer).AnalyzeSyntax(ctx, req.(*AnalyzeSyntaxRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _LanguageService_AnnotateText_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(AnnotateTextRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(LanguageServiceServer).AnnotateText(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.language.v1beta1.LanguageService/AnnotateText", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(LanguageServiceServer).AnnotateText(ctx, req.(*AnnotateTextRequest)) + } + return interceptor(ctx, in, info, handler) +} + +var _LanguageService_serviceDesc = grpc.ServiceDesc{ + ServiceName: "google.cloud.language.v1beta1.LanguageService", + HandlerType: (*LanguageServiceServer)(nil), + Methods: []grpc.MethodDesc{ + { + MethodName: "AnalyzeSentiment", + Handler: _LanguageService_AnalyzeSentiment_Handler, + }, + { + MethodName: "AnalyzeEntities", + Handler: _LanguageService_AnalyzeEntities_Handler, + }, + { + MethodName: "AnalyzeSyntax", + Handler: _LanguageService_AnalyzeSyntax_Handler, + }, + { + MethodName: "AnnotateText", + Handler: _LanguageService_AnnotateText_Handler, + }, + }, + Streams: []grpc.StreamDesc{}, + Metadata: "google/cloud/language/v1beta1/language_service.proto", +} + +func init() { + proto.RegisterFile("google/cloud/language/v1beta1/language_service.proto", fileDescriptor_language_service_a20715f4ac30e6a8) +} + +var fileDescriptor_language_service_a20715f4ac30e6a8 = []byte{ + // 2755 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xcc, 0x59, 0x4f, 0x73, 0xdb, 0xc6, + 0x15, 0x37, 0xf8, 0x4f, 0xe4, 0x92, 0x92, 0xd6, 0x88, 0x93, 0xb0, 0x6a, 0xd2, 0x38, 0x48, 0x5c, + 0x2b, 0x76, 0x42, 0xc5, 0x52, 0xe2, 0xb8, 0x76, 0x9a, 0x06, 0x02, 0x96, 0x14, 0x64, 0x10, 0x40, + 0x16, 0x00, 0x25, 0xa7, 0x07, 0x0e, 0x4c, 0xae, 0x19, 0x4e, 0x24, 0x80, 0x25, 0x21, 0x8f, 0xd5, + 0x4b, 0x67, 0x32, 0xd3, 0x63, 0xa7, 0x87, 0xde, 0x7a, 0x6c, 0x0f, 0x3d, 0x75, 0xd2, 0x99, 0x5e, + 0xda, 0x0f, 0xd0, 0x43, 0xa7, 0xc7, 0xcc, 0xf4, 0x13, 0xf4, 0xd8, 0x43, 0x0f, 0x3d, 0xf4, 0xd8, + 0x79, 0xbb, 0x0b, 0xfe, 0x51, 0x1c, 0x4b, 0x4c, 0x72, 0xc8, 0x6d, 0xf7, 0xf1, 0xfd, 0x7e, 0xef, + 0xed, 0xdb, 0xb7, 0xef, 0x2d, 0x96, 0xe8, 0x9d, 0x41, 0x92, 0x0c, 0x8e, 0xd8, 0x56, 0xef, 0x28, + 0x39, 0xe9, 0x6f, 0x1d, 0x45, 0xf1, 0xe0, 0x24, 0x1a, 0xb0, 0xad, 0xc7, 0xb7, 0x1e, 0xb2, 0x34, + 0xba, 0x35, 0x15, 0x74, 0x27, 0x6c, 0xfc, 0x78, 0xd8, 0x63, 0x8d, 0xd1, 0x38, 0x49, 0x13, 0xf5, + 0x65, 0x81, 0x6a, 0x70, 0x54, 0x23, 0x53, 0x6a, 0x48, 0xd4, 0xc6, 0x4b, 0x92, 0x34, 0x1a, 0x0d, + 0xb7, 0xa2, 0x38, 0x4e, 0xd2, 0x28, 0x1d, 0x26, 0xf1, 0x44, 0x80, 0xb5, 0xff, 0x28, 0xa8, 0x6c, + 0x26, 0xbd, 0x93, 0x63, 0x16, 0xa7, 0xea, 0x87, 0xa8, 0x90, 0x9e, 0x8e, 0x58, 0x5d, 0xb9, 0xaa, + 0x6c, 0xae, 0x6d, 0xbf, 0xd9, 0x78, 0x26, 0x71, 0x23, 0x83, 0x35, 0x82, 0xd3, 0x11, 0xa3, 0x1c, + 0xa9, 0x6e, 0xa0, 0x95, 0x5e, 0x12, 0xa7, 0x2c, 0x4e, 0xeb, 0xb9, 0xab, 0xca, 0x66, 0x65, 0xef, + 0x12, 0xcd, 0x04, 0xea, 0x26, 0x5a, 0x1f, 0xf4, 0x26, 0x5d, 0x39, 0xed, 0x9e, 0x8c, 0x87, 0xf5, + 0xbc, 0xd4, 0x59, 0x1d, 0xf4, 0x26, 0x86, 0x90, 0x87, 0xe3, 0xa1, 0xba, 0x81, 0xca, 0x99, 0xb5, + 0x7a, 0x01, 0x54, 0xe8, 0x74, 0xae, 0xdd, 0x46, 0x05, 0xb0, 0xa7, 0x5e, 0x41, 0x38, 0x78, 0xe0, + 0x91, 0x6e, 0xe8, 0xf8, 0x1e, 0x31, 0xac, 0xa6, 0x45, 0x4c, 0x7c, 0x49, 0x5d, 0x43, 0xc8, 0xb3, + 0x75, 0xcb, 0xe9, 0x06, 0xe4, 0x30, 0xc0, 0x8a, 0x5a, 0x46, 0x85, 0xbd, 0xa0, 0x6d, 0xe3, 0xdc, + 0x6e, 0x19, 0x95, 0x26, 0xc9, 0xc9, 0xb8, 0xc7, 0xb4, 0x5f, 0x2b, 0xa8, 0xec, 0x33, 0x30, 0xd6, + 0x63, 0xea, 0x3d, 0x54, 0x48, 0xd9, 0x93, 0x94, 0x2f, 0xb9, 0xba, 0x7d, 0xfd, 0x9c, 0x25, 0x07, + 0xec, 0x49, 0xea, 0x8f, 0xa2, 0x98, 0x72, 0x90, 0xda, 0x44, 0x95, 0x09, 0x8b, 0xd3, 0xe1, 0x71, + 0xb6, 0xde, 0xea, 0xf6, 0xe6, 0x39, 0x0c, 0x7e, 0xa6, 0x4f, 0x67, 0x50, 0xed, 0x1f, 0x79, 0x54, + 0x22, 0x71, 0x3a, 0x4c, 0x4f, 0x55, 0x15, 0x15, 0xe2, 0xe8, 0x58, 0x6c, 0x41, 0x85, 0xf2, 0xb1, + 0xfa, 0x81, 0xdc, 0x96, 0x1c, 0xdf, 0x96, 0x1b, 0xe7, 0x58, 0x10, 0x44, 0xf3, 0x9b, 0xe2, 0xa2, + 0xf2, 0x31, 0x4b, 0xa3, 0x7e, 0x94, 0x46, 0xf5, 0xfc, 0xd5, 0xfc, 0x66, 0x75, 0x7b, 0xe7, 0x62, + 0x1c, 0x6d, 0x89, 0x22, 0x71, 0x3a, 0x3e, 0xa5, 0x53, 0x12, 0xd8, 0x9f, 0x49, 0x74, 0x34, 0x84, + 0x00, 0xf2, 0xfd, 0xc9, 0xd1, 0xe9, 0x5c, 0xdd, 0x03, 0x63, 0x31, 0x4f, 0xb1, 0x7a, 0x91, 0x1b, + 0x7b, 0xf3, 0x42, 0xc6, 0xda, 0x02, 0x44, 0xa7, 0xe8, 0x8d, 0x7b, 0x68, 0x75, 0xc1, 0x01, 0x15, + 0xa3, 0xfc, 0xa7, 0xec, 0x54, 0x86, 0x06, 0x86, 0xea, 0x15, 0x54, 0x7c, 0x1c, 0x1d, 0x9d, 0x88, + 0xd0, 0x54, 0xa8, 0x98, 0xdc, 0xcd, 0xdd, 0x51, 0xb4, 0x53, 0x99, 0x26, 0x55, 0xb4, 0x12, 0x3a, + 0xf7, 0x1d, 0xf7, 0xc0, 0xc1, 0x97, 0x54, 0x84, 0x4a, 0x1e, 0xa1, 0xbe, 0xeb, 0x60, 0x45, 0xad, + 0xa1, 0xb2, 0xed, 0x1a, 0x7a, 0x60, 0xb9, 0x0e, 0xce, 0xa9, 0x18, 0xd5, 0x5c, 0xda, 0xd2, 0x1d, + 0xeb, 0x63, 0x21, 0xc9, 0xab, 0x15, 0x54, 0x24, 0x1d, 0xe2, 0x04, 0xb8, 0xa0, 0xae, 0xa3, 0xea, + 0x81, 0x4b, 0xef, 0x77, 0xdd, 0x66, 0x57, 0xa7, 0x01, 0x2e, 0xaa, 0x97, 0xd1, 0xaa, 0xe1, 0x3a, + 0x7e, 0xd8, 0x26, 0xb4, 0xdb, 0x72, 0x5d, 0x13, 0x97, 0x40, 0xdd, 0x0d, 0xf6, 0x08, 0xc5, 0x2b, + 0xda, 0x2f, 0x73, 0xa8, 0x18, 0x24, 0x9f, 0xb2, 0xf8, 0x9b, 0x25, 0xd7, 0x47, 0x68, 0x6d, 0x14, + 0x8d, 0xd3, 0x6e, 0xf2, 0xa8, 0x3b, 0x19, 0x31, 0xd6, 0xfb, 0x44, 0x66, 0xd8, 0xcd, 0x73, 0x68, + 0xbc, 0x68, 0x9c, 0xba, 0x8f, 0x7c, 0x0e, 0xa1, 0xb5, 0xd1, 0xdc, 0x4c, 0xed, 0xa0, 0xf5, 0x3e, + 0x1b, 0xb1, 0xb8, 0xcf, 0xe2, 0xde, 0x69, 0x97, 0xf5, 0x07, 0x8c, 0x9f, 0xc0, 0xea, 0xf6, 0x5b, + 0xe7, 0x1d, 0xf5, 0x29, 0x8a, 0xf4, 0x07, 0x8c, 0xae, 0xf5, 0x17, 0xe6, 0xb0, 0x0d, 0x47, 0xec, + 0xf8, 0x38, 0x92, 0x87, 0x55, 0x4c, 0xb4, 0x9f, 0xa2, 0xca, 0x34, 0xdb, 0x21, 0x65, 0x46, 0xc9, + 0x51, 0x34, 0x1e, 0xa6, 0x62, 0x03, 0x73, 0x74, 0x3a, 0x57, 0x5f, 0x42, 0x95, 0xe3, 0x68, 0x10, + 0x0f, 0xd3, 0x93, 0xbe, 0xd8, 0xc9, 0x1c, 0x9d, 0x09, 0x80, 0x7c, 0xd2, 0x4b, 0xc6, 0xc2, 0xd5, + 0x1c, 0x15, 0x13, 0xed, 0xaf, 0x97, 0x51, 0x6d, 0x7e, 0xa5, 0xaa, 0x8e, 0xf2, 0x69, 0x34, 0x90, + 0xa5, 0x6b, 0x6b, 0x89, 0x18, 0x35, 0x82, 0x68, 0x40, 0x01, 0xab, 0xee, 0xa3, 0x52, 0x34, 0x19, + 0xb1, 0x5e, 0x2a, 0x4f, 0xda, 0xf6, 0x32, 0x2c, 0x3a, 0x47, 0x52, 0xc9, 0xa0, 0x9a, 0xa8, 0xd0, + 0x8b, 0x26, 0xc2, 0xe9, 0xb5, 0xed, 0xb7, 0x97, 0x61, 0x32, 0xa2, 0x09, 0xa3, 0x1c, 0x0d, 0x2c, + 0x8f, 0x92, 0xf1, 0x31, 0x8f, 0xeb, 0x92, 0x2c, 0xcd, 0x64, 0x7c, 0x4c, 0x39, 0x1a, 0xd6, 0x35, + 0x80, 0xed, 0x1a, 0xd7, 0x8b, 0xcb, 0xaf, 0xab, 0xc5, 0x91, 0x54, 0x32, 0x80, 0x47, 0xc7, 0x49, + 0xd2, 0xaf, 0x97, 0x96, 0xf7, 0xa8, 0x9d, 0x24, 0x7d, 0xca, 0xd1, 0xe0, 0x51, 0x7c, 0x72, 0xfc, + 0x90, 0x8d, 0xeb, 0x2b, 0xcb, 0x7b, 0xe4, 0x70, 0x24, 0x95, 0x0c, 0xc0, 0x35, 0x62, 0xe3, 0x49, + 0x12, 0xd7, 0xcb, 0xcb, 0x73, 0x79, 0x1c, 0x49, 0x25, 0x03, 0xe7, 0x1a, 0x27, 0x23, 0x36, 0xae, + 0x57, 0xbe, 0x06, 0x17, 0x47, 0x52, 0xc9, 0xa0, 0x3e, 0x40, 0xd5, 0x31, 0xeb, 0x0d, 0x47, 0xe3, + 0xa4, 0x07, 0x49, 0x8f, 0x38, 0xe1, 0x7b, 0xcb, 0x10, 0xd2, 0x19, 0x9c, 0xce, 0x73, 0xa9, 0x2d, + 0x54, 0x4c, 0x59, 0x3c, 0x61, 0xf5, 0x2a, 0x27, 0xbd, 0xb5, 0x54, 0xb6, 0x03, 0x90, 0x0a, 0x3c, + 0x10, 0x3d, 0x4e, 0x86, 0x3d, 0x56, 0xaf, 0x2d, 0x4f, 0xd4, 0x01, 0x20, 0x15, 0x78, 0xed, 0x57, + 0x0a, 0xca, 0x07, 0xd1, 0x60, 0xb1, 0xdc, 0xae, 0xa0, 0xbc, 0x6e, 0xee, 0x63, 0x45, 0x0c, 0x3c, + 0x9c, 0x13, 0x83, 0x0e, 0xce, 0x43, 0x5f, 0x36, 0x5c, 0x67, 0x1f, 0x17, 0x40, 0x64, 0x12, 0x28, + 0xaa, 0x65, 0x54, 0x70, 0xdc, 0xd0, 0xc1, 0x25, 0x10, 0x39, 0x61, 0x1b, 0xaf, 0x80, 0xc8, 0xa3, + 0xae, 0x83, 0xcb, 0x20, 0xf2, 0x68, 0x80, 0x2b, 0x50, 0x67, 0xbd, 0xd0, 0x31, 0x02, 0x8c, 0xe0, + 0xd7, 0x0e, 0xa1, 0xbb, 0xb8, 0xaa, 0x16, 0x91, 0x72, 0x88, 0x6b, 0xf0, 0x9b, 0xde, 0x6c, 0x5a, + 0x87, 0x78, 0x55, 0x73, 0x51, 0x49, 0x1c, 0x48, 0x55, 0x45, 0x6b, 0x3a, 0xdc, 0x10, 0x82, 0xee, + 0xcc, 0x31, 0xb8, 0x25, 0x10, 0xda, 0x24, 0x46, 0x60, 0x75, 0x08, 0x56, 0xa0, 0xfa, 0x5b, 0xed, + 0x39, 0x49, 0x0e, 0x4a, 0xbe, 0x47, 0xdd, 0x16, 0x25, 0xbe, 0x0f, 0x82, 0xbc, 0xf6, 0x3f, 0x05, + 0x15, 0xe0, 0x60, 0x82, 0xae, 0xa1, 0xfb, 0x64, 0x91, 0x4d, 0x37, 0x8c, 0xd0, 0xd7, 0x25, 0xdb, + 0x2a, 0xaa, 0xe8, 0x26, 0x78, 0x66, 0xe9, 0x36, 0xce, 0x89, 0x66, 0xd1, 0xf6, 0x6c, 0xd2, 0x26, + 0x0e, 0xd7, 0xc8, 0x43, 0x1f, 0x32, 0x85, 0x76, 0x01, 0xfa, 0x50, 0x8b, 0x38, 0x16, 0x9f, 0x15, + 0xb9, 0x27, 0x8e, 0x1f, 0xd0, 0x10, 0x94, 0x75, 0x1b, 0x97, 0x66, 0x7d, 0xaa, 0x43, 0xf0, 0x0a, + 0xd8, 0x72, 0xdc, 0xb6, 0xe5, 0x88, 0x79, 0x19, 0xe2, 0xed, 0xee, 0xda, 0xd6, 0x47, 0x21, 0xc1, + 0x15, 0x30, 0xec, 0xe9, 0x34, 0x10, 0x5c, 0x08, 0x0c, 0x7b, 0x94, 0x78, 0xae, 0x6f, 0x41, 0x4b, + 0xd3, 0x6d, 0x5c, 0x85, 0x60, 0x50, 0xd2, 0xb4, 0xc9, 0xa1, 0xd5, 0x21, 0x5d, 0x58, 0x06, 0xae, + 0x81, 0x1a, 0x25, 0x36, 0x27, 0x14, 0xa2, 0x55, 0xb0, 0xd9, 0xc9, 0x6c, 0xae, 0x69, 0x9f, 0x2b, + 0xa8, 0x00, 0xd5, 0x04, 0x9c, 0x6b, 0xba, 0xb4, 0x3d, 0xb7, 0xf4, 0x1a, 0x2a, 0xeb, 0x26, 0x38, + 0xa4, 0xdb, 0x72, 0xe1, 0xe1, 0xa1, 0x65, 0x5b, 0x3a, 0x7d, 0x80, 0x73, 0x60, 0x6c, 0x6e, 0xe1, + 0x1f, 0x13, 0x8a, 0xf3, 0x9c, 0xc2, 0x72, 0x74, 0xbb, 0x4b, 0x1c, 0xd3, 0x72, 0x5a, 0xb8, 0x00, + 0xb1, 0x68, 0x11, 0x1a, 0x3a, 0x26, 0x2e, 0xc2, 0x98, 0x12, 0xdd, 0xb6, 0x7c, 0xb1, 0x6e, 0x8b, + 0xca, 0xd9, 0x0a, 0x6c, 0xad, 0xbf, 0xe7, 0xd2, 0x00, 0x97, 0x61, 0xdb, 0x6d, 0xd7, 0x69, 0x89, + 0x5c, 0x70, 0xa9, 0x49, 0x28, 0x46, 0xa0, 0x2d, 0xaf, 0x81, 0x06, 0xae, 0x6a, 0x04, 0x95, 0x44, + 0xd9, 0x02, 0x1f, 0x5a, 0xc4, 0x31, 0x09, 0x5d, 0x74, 0xba, 0x49, 0xda, 0x96, 0x63, 0x39, 0x72, + 0xb7, 0xda, 0xba, 0x6f, 0x84, 0x36, 0x4c, 0x73, 0xe0, 0x82, 0x43, 0xc2, 0x00, 0x9c, 0xd5, 0x7e, + 0x81, 0x0a, 0x50, 0xb3, 0xc0, 0xe9, 0xb6, 0xeb, 0x9a, 0x73, 0x14, 0x57, 0x10, 0x36, 0x5c, 0xc7, + 0x94, 0x81, 0xed, 0xc2, 0xaf, 0x58, 0x81, 0xcd, 0xe1, 0x69, 0xa4, 0xcb, 0x24, 0x82, 0xb9, 0x63, + 0x5a, 0x32, 0x90, 0x79, 0x88, 0xb4, 0xe5, 0x04, 0x84, 0x52, 0xb7, 0x95, 0xed, 0x7e, 0x15, 0xad, + 0xec, 0x87, 0x22, 0xc7, 0x8a, 0x90, 0x74, 0x7e, 0xb8, 0xbb, 0x0f, 0xe9, 0x0d, 0x82, 0x92, 0xf6, + 0x21, 0x2a, 0x89, 0x62, 0x07, 0xeb, 0x70, 0xc2, 0xf6, 0xee, 0xd9, 0x75, 0xf8, 0x96, 0xd3, 0x0a, + 0x6d, 0x9d, 0x62, 0x85, 0xdf, 0x6d, 0xec, 0x90, 0xf2, 0x94, 0x2b, 0xa3, 0x82, 0x19, 0xea, 0x36, + 0xce, 0x6b, 0x01, 0x2a, 0x89, 0x12, 0x07, 0x0c, 0xe2, 0xee, 0x33, 0xc7, 0x50, 0x41, 0xc5, 0xa6, + 0x45, 0xfd, 0x40, 0xc0, 0x7d, 0x02, 0x6b, 0xc2, 0x39, 0x10, 0x07, 0x7b, 0x16, 0x35, 0x71, 0x1e, + 0x16, 0x3a, 0x4b, 0x18, 0x79, 0x77, 0x2a, 0x68, 0x77, 0x50, 0x49, 0x14, 0x3b, 0xce, 0x4a, 0x5d, + 0x6f, 0xc1, 0x2f, 0xf0, 0x84, 0xcb, 0x44, 0x48, 0x1c, 0x37, 0xe8, 0xca, 0x79, 0x4e, 0xdb, 0x47, + 0xd5, 0xb9, 0xaa, 0xa6, 0xbe, 0x88, 0x9e, 0xa3, 0xc4, 0xb0, 0x3c, 0xea, 0x1a, 0x56, 0xf0, 0x60, + 0xf1, 0x4c, 0x65, 0x3f, 0xf0, 0xd4, 0x82, 0xf5, 0xbb, 0x4e, 0x77, 0x4e, 0x96, 0xd3, 0x26, 0xa8, + 0xc8, 0x8b, 0x19, 0xc4, 0x35, 0x20, 0xce, 0xc2, 0x99, 0x7c, 0x1e, 0x5d, 0x9e, 0xdf, 0x20, 0xfe, + 0xb3, 0x58, 0x65, 0x33, 0x0c, 0x42, 0x4a, 0x44, 0x90, 0x3c, 0xdd, 0x0f, 0x70, 0x1e, 0x36, 0xc1, + 0xa3, 0xc4, 0x17, 0x97, 0xbd, 0x55, 0x54, 0x99, 0xd6, 0x02, 0x5c, 0x14, 0x1f, 0x14, 0x61, 0x36, + 0x2f, 0x69, 0xbb, 0xa8, 0xc8, 0x0b, 0x1f, 0x18, 0xed, 0xb8, 0x96, 0x41, 0x16, 0x17, 0xae, 0x1b, + 0xb3, 0x22, 0x60, 0xe8, 0x59, 0x4d, 0xc8, 0x71, 0x13, 0x7a, 0x56, 0x4b, 0xfe, 0xbb, 0x82, 0xd6, + 0x16, 0x6f, 0x54, 0xea, 0x26, 0xc2, 0x9f, 0xb0, 0xa8, 0xdf, 0x4d, 0xe1, 0xde, 0xd8, 0x1d, 0xc6, + 0x7d, 0xf6, 0x84, 0x5f, 0x65, 0x8a, 0x74, 0x0d, 0xe4, 0xfc, 0x3a, 0x69, 0x81, 0x54, 0xb5, 0x50, + 0xf1, 0x28, 0x7a, 0xc8, 0x8e, 0xe4, 0x1d, 0x65, 0x67, 0xa9, 0x9b, 0x5b, 0xc3, 0x06, 0x28, 0x15, + 0x0c, 0xda, 0xbf, 0x4b, 0xa8, 0xc8, 0x05, 0x5f, 0xba, 0x25, 0xeb, 0xbb, 0xbb, 0x94, 0x74, 0xb0, + 0xc2, 0x4b, 0x2a, 0x1c, 0x62, 0x91, 0x15, 0xba, 0xd9, 0x31, 0x6c, 0x51, 0xbf, 0x74, 0xb3, 0xd3, + 0x76, 0x4d, 0x5c, 0x80, 0x30, 0xea, 0x30, 0x2a, 0x72, 0x05, 0xcf, 0x73, 0xe1, 0xf0, 0x82, 0x30, + 0x08, 0x28, 0x5e, 0xe1, 0x15, 0x3f, 0x3c, 0x14, 0x95, 0x4a, 0x0f, 0x0f, 0x21, 0x08, 0xb8, 0xa2, + 0x96, 0x50, 0xce, 0x30, 0x30, 0x02, 0x88, 0xc1, 0xe9, 0xab, 0xd3, 0x8e, 0xc0, 0xcb, 0xb8, 0x01, + 0xe7, 0x00, 0xaf, 0xf2, 0x28, 0xc2, 0x90, 0xc3, 0xd6, 0x44, 0xaf, 0xf0, 0xf0, 0x7a, 0xd6, 0x34, + 0x30, 0x28, 0x98, 0x96, 0x6f, 0xb8, 0x21, 0xf5, 0x09, 0xbe, 0xcc, 0x13, 0xdf, 0xdd, 0xdd, 0xc7, + 0x2a, 0x8c, 0xc8, 0xa1, 0x67, 0xe3, 0xe7, 0x78, 0x81, 0x75, 0x89, 0x7f, 0x60, 0x05, 0x7b, 0xf8, + 0x0a, 0xc8, 0x2d, 0xd0, 0x78, 0x1e, 0x46, 0x6d, 0x9d, 0xde, 0xc7, 0x2f, 0x00, 0x5b, 0xfb, 0x80, + 0xe0, 0x17, 0xc5, 0xa0, 0x83, 0xeb, 0xbc, 0x03, 0x91, 0x16, 0xfe, 0x1e, 0x38, 0xea, 0x38, 0x78, + 0x03, 0x48, 0x1c, 0x4f, 0xae, 0xf9, 0xfb, 0xe0, 0xa1, 0xc3, 0x3d, 0x7c, 0x09, 0x1c, 0x70, 0xa6, + 0x1e, 0xbe, 0x9c, 0xb5, 0xae, 0x1f, 0xf0, 0x3a, 0xc2, 0x0f, 0x2c, 0x7e, 0x05, 0xda, 0x93, 0x87, + 0xaf, 0xca, 0xf2, 0xac, 0x07, 0xfa, 0xa1, 0xe5, 0xe3, 0x57, 0x45, 0x4a, 0xd0, 0x00, 0x18, 0x35, + 0xde, 0xd6, 0x78, 0x20, 0x5e, 0xe3, 0x79, 0x09, 0x1e, 0xbe, 0x2e, 0x46, 0xbe, 0x8f, 0xaf, 0x71, + 0x5d, 0xd7, 0x0f, 0xc0, 0xa7, 0x1f, 0xca, 0x74, 0xe5, 0xda, 0xd7, 0xa7, 0x13, 0x67, 0x1f, 0x6f, + 0x8a, 0x93, 0x47, 0x20, 0x32, 0x6f, 0x88, 0xde, 0x49, 0x9a, 0xf8, 0x86, 0x1c, 0x79, 0xf8, 0x26, + 0xb7, 0x42, 0x5d, 0xc7, 0xc6, 0x6f, 0x66, 0x0d, 0xf5, 0x2d, 0x58, 0xa1, 0xe7, 0xe3, 0x06, 0xac, + 0xf0, 0xa3, 0x50, 0x77, 0xb8, 0x3f, 0x5b, 0xa0, 0x49, 0x0d, 0x18, 0xbe, 0x0d, 0x3f, 0xf0, 0x21, + 0x25, 0x36, 0xbe, 0xc5, 0x7f, 0x30, 0xa9, 0xeb, 0xe1, 0x6d, 0xa0, 0x00, 0x03, 0x3b, 0xe0, 0x03, + 0x25, 0x6d, 0x47, 0x77, 0x02, 0xfc, 0x8e, 0x38, 0xb9, 0xb0, 0x4e, 0xc7, 0x0c, 0xdb, 0xf8, 0x5d, + 0xb0, 0x4e, 0x5d, 0x37, 0xc0, 0xb7, 0x61, 0xe4, 0x43, 0x70, 0xde, 0xe3, 0xa3, 0xb0, 0xd9, 0xc4, + 0x77, 0x60, 0xc4, 0x2d, 0xfe, 0x88, 0x17, 0x1d, 0xd7, 0xb3, 0x0c, 0x7c, 0x97, 0x37, 0x76, 0x10, + 0xde, 0x5b, 0x68, 0x44, 0xef, 0x83, 0xca, 0x21, 0x5f, 0xf6, 0x8f, 0x79, 0xb9, 0x0a, 0x79, 0xaf, + 0xff, 0x80, 0x23, 0xad, 0xc0, 0x26, 0xf8, 0x27, 0xa2, 0x1f, 0x75, 0xbc, 0x3d, 0x40, 0x7f, 0x28, + 0x53, 0x0e, 0x8e, 0x21, 0xd6, 0x79, 0x76, 0x86, 0x87, 0x9d, 0x0e, 0xde, 0x85, 0xa1, 0xc9, 0xad, + 0x1a, 0xa0, 0xd2, 0x74, 0x29, 0xb1, 0x5a, 0x0e, 0x36, 0x21, 0x14, 0xf7, 0x0f, 0x30, 0xe1, 0x1d, + 0xc6, 0xf2, 0x03, 0xdc, 0x14, 0x77, 0x92, 0xb6, 0x81, 0x5b, 0x3c, 0x01, 0xdc, 0xb6, 0xc8, 0xcb, + 0x3d, 0xe8, 0x08, 0xd9, 0x8c, 0x6f, 0xbc, 0xc5, 0x35, 0xc3, 0xb6, 0x81, 0xf7, 0x21, 0x2c, 0x86, + 0xeb, 0xe1, 0xfb, 0x10, 0x09, 0xd3, 0xf2, 0x79, 0xf3, 0x26, 0x26, 0xb6, 0xb5, 0xbf, 0x29, 0x68, + 0x75, 0xe1, 0x5b, 0xf7, 0x9b, 0x7d, 0x1f, 0x92, 0x85, 0x57, 0x81, 0x5b, 0xcb, 0x7c, 0x64, 0xcf, + 0x3d, 0x0e, 0x68, 0x6f, 0xcb, 0x0f, 0x65, 0x8c, 0x6a, 0xf2, 0x3d, 0xe5, 0x69, 0x75, 0x1c, 0xa1, + 0x92, 0xe1, 0xb6, 0xdb, 0xf0, 0xad, 0xac, 0xb5, 0x50, 0x39, 0x73, 0x45, 0xad, 0xcf, 0xde, 0x7b, + 0xc4, 0x67, 0xf9, 0xf4, 0xb5, 0xe7, 0x55, 0x54, 0x7b, 0xc8, 0x06, 0xc3, 0xb8, 0x9b, 0x3c, 0x7a, + 0x34, 0x61, 0xe2, 0x93, 0xaa, 0x48, 0xab, 0x5c, 0xe6, 0x72, 0x91, 0xf6, 0x27, 0x05, 0xbd, 0xa8, + 0xc7, 0xd1, 0xd1, 0xe9, 0xcf, 0xd9, 0xec, 0x59, 0x84, 0xfd, 0xec, 0x84, 0x4d, 0x52, 0xd5, 0x40, + 0xe5, 0xbe, 0x7c, 0x5f, 0xba, 0x60, 0x78, 0xb2, 0xe7, 0x28, 0x3a, 0x05, 0xaa, 0x1e, 0x5a, 0x65, + 0x71, 0x2f, 0xe9, 0x0f, 0xe3, 0x41, 0x77, 0x2e, 0x56, 0x37, 0xcf, 0x8d, 0x95, 0xc0, 0xf0, 0x28, + 0xd5, 0xd8, 0xdc, 0x4c, 0xfb, 0xa7, 0x82, 0xea, 0x5f, 0x76, 0x79, 0x32, 0x4a, 0xa0, 0x0f, 0x1d, + 0x20, 0x35, 0x33, 0xdd, 0x9d, 0xbd, 0x0b, 0x29, 0x4b, 0xbe, 0x0b, 0x5d, 0xce, 0x38, 0x16, 0x3e, + 0x9e, 0xa7, 0xef, 0x61, 0xb9, 0xc5, 0xf7, 0x30, 0x95, 0x88, 0x37, 0x28, 0x16, 0xf7, 0xd8, 0x44, + 0xbe, 0xee, 0x5c, 0xbf, 0x80, 0x2d, 0xd0, 0xa7, 0x33, 0x24, 0x5c, 0xf2, 0x5e, 0x90, 0x0b, 0xe3, + 0xa9, 0x32, 0x64, 0x93, 0xef, 0xf8, 0x56, 0x3c, 0x99, 0x26, 0xcf, 0xcc, 0x61, 0xb9, 0x11, 0x3a, + 0x2a, 0x33, 0x29, 0xab, 0x2b, 0x3c, 0x24, 0xd7, 0x2e, 0x74, 0x3c, 0xe8, 0x14, 0xf6, 0xac, 0x90, + 0x6b, 0x7f, 0x54, 0xd0, 0x95, 0x2c, 0x09, 0x4e, 0xe3, 0x34, 0x7a, 0xf2, 0x1d, 0x8f, 0xd4, 0x5f, + 0x14, 0xf4, 0xfc, 0x19, 0x7f, 0x65, 0xa0, 0x16, 0x92, 0x47, 0xf9, 0xba, 0xc9, 0xa3, 0xbe, 0x8f, + 0x4a, 0xfc, 0xe2, 0x32, 0xa9, 0xe7, 0x38, 0xc7, 0xeb, 0xe7, 0x55, 0x32, 0x50, 0xa6, 0x12, 0xb3, + 0x10, 0xea, 0xfc, 0x99, 0x50, 0xff, 0x2e, 0x8f, 0x9e, 0xd3, 0xc5, 0xa3, 0x35, 0x83, 0xa2, 0xf3, + 0xad, 0x46, 0xfa, 0x00, 0x95, 0x1f, 0xb1, 0x28, 0x3d, 0x19, 0xb3, 0x89, 0x7c, 0x5b, 0xbb, 0x77, + 0x0e, 0xc9, 0x53, 0x5c, 0x69, 0x34, 0x25, 0x05, 0x9d, 0x92, 0x7d, 0x79, 0x0b, 0xf3, 0xdf, 0x70, + 0x0b, 0x37, 0x7e, 0xab, 0xa0, 0x72, 0x66, 0x48, 0xbd, 0x86, 0xd6, 0xd8, 0x93, 0x74, 0x1c, 0xf5, + 0xd2, 0xee, 0x84, 0xef, 0x27, 0x0f, 0x41, 0x99, 0xae, 0x4a, 0xa9, 0xd8, 0x64, 0xf5, 0x0d, 0x84, + 0x33, 0xb5, 0xe9, 0x69, 0xc8, 0x71, 0xc5, 0x75, 0x29, 0xcf, 0x0e, 0x8e, 0xfa, 0x3e, 0xda, 0xc8, + 0x54, 0x9f, 0x52, 0xc1, 0xf2, 0x1c, 0x54, 0x97, 0x1a, 0xe6, 0xd9, 0xf2, 0xa4, 0x7d, 0x91, 0x83, + 0xf3, 0x30, 0x1f, 0x99, 0xef, 0x52, 0x7a, 0xcd, 0x17, 0x83, 0xfc, 0xd7, 0x2b, 0x06, 0x4f, 0x2f, + 0xec, 0x85, 0x6f, 0xb7, 0xb0, 0x17, 0x17, 0x53, 0xff, 0xc6, 0x1d, 0x54, 0x9b, 0x4f, 0x08, 0x71, + 0x17, 0x71, 0x08, 0xbe, 0x04, 0xa3, 0x30, 0x68, 0xde, 0x11, 0xd7, 0xf3, 0x30, 0x68, 0xde, 0xba, + 0x2d, 0xae, 0xe7, 0x61, 0xd0, 0xdc, 0xd9, 0xc6, 0xf9, 0xed, 0xbf, 0x17, 0xd1, 0xba, 0x2d, 0x69, + 0x7c, 0xf1, 0x57, 0x91, 0xfa, 0x67, 0x05, 0xe1, 0xb3, 0x8d, 0x4b, 0xbd, 0x7d, 0x6e, 0xba, 0x3f, + 0xb5, 0x39, 0x6f, 0xbc, 0xb7, 0x34, 0x4e, 0x24, 0x84, 0xd6, 0xf8, 0xec, 0x8b, 0x7f, 0xfd, 0x26, + 0xb7, 0xa9, 0xbd, 0x36, 0xfd, 0x4f, 0x2b, 0x8b, 0xc9, 0xe4, 0x6e, 0x74, 0x06, 0x74, 0x57, 0xb9, + 0xa1, 0x7e, 0xae, 0xa0, 0xf5, 0x33, 0x45, 0x5e, 0x7d, 0xf7, 0x62, 0xc6, 0xcf, 0x74, 0xb1, 0x8d, + 0xdb, 0xcb, 0xc2, 0xa4, 0xcb, 0x6f, 0x71, 0x97, 0xaf, 0x6b, 0xda, 0x57, 0xbb, 0x9c, 0x61, 0xc0, + 0xe3, 0x3f, 0x28, 0x68, 0x75, 0xa1, 0xd6, 0xaa, 0x3b, 0x17, 0x0c, 0xd6, 0x7c, 0x27, 0xd9, 0x78, + 0x67, 0x39, 0x90, 0xf4, 0xf5, 0x26, 0xf7, 0xf5, 0x9a, 0x76, 0xf5, 0x19, 0xe1, 0xe5, 0x08, 0xf0, + 0xf4, 0xf7, 0x0a, 0xaa, 0xcd, 0x9f, 0x5a, 0x75, 0x7b, 0xf9, 0xe2, 0xb7, 0xb1, 0xb3, 0x14, 0x46, + 0xba, 0x79, 0x83, 0xbb, 0xf9, 0xba, 0xf6, 0xca, 0x53, 0xdd, 0x9c, 0x01, 0xee, 0x2a, 0x37, 0x76, + 0x3f, 0x53, 0xd0, 0xab, 0xbd, 0xe4, 0xf8, 0xd9, 0x66, 0x76, 0xaf, 0x9c, 0x49, 0x77, 0x6f, 0x9c, + 0xa4, 0x89, 0xa7, 0x7c, 0x4c, 0x24, 0x6c, 0x90, 0x00, 0xa4, 0x91, 0x8c, 0x07, 0x5b, 0x03, 0x16, + 0xf3, 0x7f, 0x3e, 0xb7, 0xc4, 0x4f, 0xd1, 0x68, 0x38, 0xf9, 0x8a, 0xff, 0x5b, 0xef, 0x65, 0x82, + 0x87, 0x25, 0x8e, 0xd8, 0xf9, 0x7f, 0x00, 0x00, 0x00, 0xff, 0xff, 0xe6, 0xf6, 0xe5, 0xb1, 0xa0, + 0x1d, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/cloud/language/v1beta2/language_service.pb.go b/vendor/google.golang.org/genproto/googleapis/cloud/language/v1beta2/language_service.pb.go new file mode 100644 index 0000000..c5d9219 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/cloud/language/v1beta2/language_service.pb.go @@ -0,0 +1,3118 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/cloud/language/v1beta2/language_service.proto + +package language // import "google.golang.org/genproto/googleapis/cloud/language/v1beta2" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "github.com/golang/protobuf/ptypes/timestamp" +import _ "google.golang.org/genproto/googleapis/api/annotations" +import _ "google.golang.org/genproto/googleapis/longrunning" +import _ "google.golang.org/genproto/googleapis/rpc/status" + +import ( + context "golang.org/x/net/context" + grpc "google.golang.org/grpc" +) + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Represents the text encoding that the caller uses to process the output. +// Providing an `EncodingType` is recommended because the API provides the +// beginning offsets for various outputs, such as tokens and mentions, and +// languages that natively use different text encodings may access offsets +// differently. +type EncodingType int32 + +const ( + // If `EncodingType` is not specified, encoding-dependent information (such as + // `begin_offset`) will be set at `-1`. + EncodingType_NONE EncodingType = 0 + // Encoding-dependent information (such as `begin_offset`) is calculated based + // on the UTF-8 encoding of the input. C++ and Go are examples of languages + // that use this encoding natively. + EncodingType_UTF8 EncodingType = 1 + // Encoding-dependent information (such as `begin_offset`) is calculated based + // on the UTF-16 encoding of the input. Java and Javascript are examples of + // languages that use this encoding natively. + EncodingType_UTF16 EncodingType = 2 + // Encoding-dependent information (such as `begin_offset`) is calculated based + // on the UTF-32 encoding of the input. Python is an example of a language + // that uses this encoding natively. + EncodingType_UTF32 EncodingType = 3 +) + +var EncodingType_name = map[int32]string{ + 0: "NONE", + 1: "UTF8", + 2: "UTF16", + 3: "UTF32", +} +var EncodingType_value = map[string]int32{ + "NONE": 0, + "UTF8": 1, + "UTF16": 2, + "UTF32": 3, +} + +func (x EncodingType) String() string { + return proto.EnumName(EncodingType_name, int32(x)) +} +func (EncodingType) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_language_service_7b74fe2bae56761e, []int{0} +} + +// The document types enum. +type Document_Type int32 + +const ( + // The content type is not specified. + Document_TYPE_UNSPECIFIED Document_Type = 0 + // Plain text + Document_PLAIN_TEXT Document_Type = 1 + // HTML + Document_HTML Document_Type = 2 +) + +var Document_Type_name = map[int32]string{ + 0: "TYPE_UNSPECIFIED", + 1: "PLAIN_TEXT", + 2: "HTML", +} +var Document_Type_value = map[string]int32{ + "TYPE_UNSPECIFIED": 0, + "PLAIN_TEXT": 1, + "HTML": 2, +} + +func (x Document_Type) String() string { + return proto.EnumName(Document_Type_name, int32(x)) +} +func (Document_Type) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_language_service_7b74fe2bae56761e, []int{0, 0} +} + +// The type of the entity. +type Entity_Type int32 + +const ( + // Unknown + Entity_UNKNOWN Entity_Type = 0 + // Person + Entity_PERSON Entity_Type = 1 + // Location + Entity_LOCATION Entity_Type = 2 + // Organization + Entity_ORGANIZATION Entity_Type = 3 + // Event + Entity_EVENT Entity_Type = 4 + // Work of art + Entity_WORK_OF_ART Entity_Type = 5 + // Consumer goods + Entity_CONSUMER_GOOD Entity_Type = 6 + // Other types + Entity_OTHER Entity_Type = 7 +) + +var Entity_Type_name = map[int32]string{ + 0: "UNKNOWN", + 1: "PERSON", + 2: "LOCATION", + 3: "ORGANIZATION", + 4: "EVENT", + 5: "WORK_OF_ART", + 6: "CONSUMER_GOOD", + 7: "OTHER", +} +var Entity_Type_value = map[string]int32{ + "UNKNOWN": 0, + "PERSON": 1, + "LOCATION": 2, + "ORGANIZATION": 3, + "EVENT": 4, + "WORK_OF_ART": 5, + "CONSUMER_GOOD": 6, + "OTHER": 7, +} + +func (x Entity_Type) String() string { + return proto.EnumName(Entity_Type_name, int32(x)) +} +func (Entity_Type) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_language_service_7b74fe2bae56761e, []int{2, 0} +} + +// The part of speech tags enum. +type PartOfSpeech_Tag int32 + +const ( + // Unknown + PartOfSpeech_UNKNOWN PartOfSpeech_Tag = 0 + // Adjective + PartOfSpeech_ADJ PartOfSpeech_Tag = 1 + // Adposition (preposition and postposition) + PartOfSpeech_ADP PartOfSpeech_Tag = 2 + // Adverb + PartOfSpeech_ADV PartOfSpeech_Tag = 3 + // Conjunction + PartOfSpeech_CONJ PartOfSpeech_Tag = 4 + // Determiner + PartOfSpeech_DET PartOfSpeech_Tag = 5 + // Noun (common and proper) + PartOfSpeech_NOUN PartOfSpeech_Tag = 6 + // Cardinal number + PartOfSpeech_NUM PartOfSpeech_Tag = 7 + // Pronoun + PartOfSpeech_PRON PartOfSpeech_Tag = 8 + // Particle or other function word + PartOfSpeech_PRT PartOfSpeech_Tag = 9 + // Punctuation + PartOfSpeech_PUNCT PartOfSpeech_Tag = 10 + // Verb (all tenses and modes) + PartOfSpeech_VERB PartOfSpeech_Tag = 11 + // Other: foreign words, typos, abbreviations + PartOfSpeech_X PartOfSpeech_Tag = 12 + // Affix + PartOfSpeech_AFFIX PartOfSpeech_Tag = 13 +) + +var PartOfSpeech_Tag_name = map[int32]string{ + 0: "UNKNOWN", + 1: "ADJ", + 2: "ADP", + 3: "ADV", + 4: "CONJ", + 5: "DET", + 6: "NOUN", + 7: "NUM", + 8: "PRON", + 9: "PRT", + 10: "PUNCT", + 11: "VERB", + 12: "X", + 13: "AFFIX", +} +var PartOfSpeech_Tag_value = map[string]int32{ + "UNKNOWN": 0, + "ADJ": 1, + "ADP": 2, + "ADV": 3, + "CONJ": 4, + "DET": 5, + "NOUN": 6, + "NUM": 7, + "PRON": 8, + "PRT": 9, + "PUNCT": 10, + "VERB": 11, + "X": 12, + "AFFIX": 13, +} + +func (x PartOfSpeech_Tag) String() string { + return proto.EnumName(PartOfSpeech_Tag_name, int32(x)) +} +func (PartOfSpeech_Tag) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_language_service_7b74fe2bae56761e, []int{5, 0} +} + +// The characteristic of a verb that expresses time flow during an event. +type PartOfSpeech_Aspect int32 + +const ( + // Aspect is not applicable in the analyzed language or is not predicted. + PartOfSpeech_ASPECT_UNKNOWN PartOfSpeech_Aspect = 0 + // Perfective + PartOfSpeech_PERFECTIVE PartOfSpeech_Aspect = 1 + // Imperfective + PartOfSpeech_IMPERFECTIVE PartOfSpeech_Aspect = 2 + // Progressive + PartOfSpeech_PROGRESSIVE PartOfSpeech_Aspect = 3 +) + +var PartOfSpeech_Aspect_name = map[int32]string{ + 0: "ASPECT_UNKNOWN", + 1: "PERFECTIVE", + 2: "IMPERFECTIVE", + 3: "PROGRESSIVE", +} +var PartOfSpeech_Aspect_value = map[string]int32{ + "ASPECT_UNKNOWN": 0, + "PERFECTIVE": 1, + "IMPERFECTIVE": 2, + "PROGRESSIVE": 3, +} + +func (x PartOfSpeech_Aspect) String() string { + return proto.EnumName(PartOfSpeech_Aspect_name, int32(x)) +} +func (PartOfSpeech_Aspect) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_language_service_7b74fe2bae56761e, []int{5, 1} +} + +// The grammatical function performed by a noun or pronoun in a phrase, +// clause, or sentence. In some languages, other parts of speech, such as +// adjective and determiner, take case inflection in agreement with the noun. +type PartOfSpeech_Case int32 + +const ( + // Case is not applicable in the analyzed language or is not predicted. + PartOfSpeech_CASE_UNKNOWN PartOfSpeech_Case = 0 + // Accusative + PartOfSpeech_ACCUSATIVE PartOfSpeech_Case = 1 + // Adverbial + PartOfSpeech_ADVERBIAL PartOfSpeech_Case = 2 + // Complementive + PartOfSpeech_COMPLEMENTIVE PartOfSpeech_Case = 3 + // Dative + PartOfSpeech_DATIVE PartOfSpeech_Case = 4 + // Genitive + PartOfSpeech_GENITIVE PartOfSpeech_Case = 5 + // Instrumental + PartOfSpeech_INSTRUMENTAL PartOfSpeech_Case = 6 + // Locative + PartOfSpeech_LOCATIVE PartOfSpeech_Case = 7 + // Nominative + PartOfSpeech_NOMINATIVE PartOfSpeech_Case = 8 + // Oblique + PartOfSpeech_OBLIQUE PartOfSpeech_Case = 9 + // Partitive + PartOfSpeech_PARTITIVE PartOfSpeech_Case = 10 + // Prepositional + PartOfSpeech_PREPOSITIONAL PartOfSpeech_Case = 11 + // Reflexive + PartOfSpeech_REFLEXIVE_CASE PartOfSpeech_Case = 12 + // Relative + PartOfSpeech_RELATIVE_CASE PartOfSpeech_Case = 13 + // Vocative + PartOfSpeech_VOCATIVE PartOfSpeech_Case = 14 +) + +var PartOfSpeech_Case_name = map[int32]string{ + 0: "CASE_UNKNOWN", + 1: "ACCUSATIVE", + 2: "ADVERBIAL", + 3: "COMPLEMENTIVE", + 4: "DATIVE", + 5: "GENITIVE", + 6: "INSTRUMENTAL", + 7: "LOCATIVE", + 8: "NOMINATIVE", + 9: "OBLIQUE", + 10: "PARTITIVE", + 11: "PREPOSITIONAL", + 12: "REFLEXIVE_CASE", + 13: "RELATIVE_CASE", + 14: "VOCATIVE", +} +var PartOfSpeech_Case_value = map[string]int32{ + "CASE_UNKNOWN": 0, + "ACCUSATIVE": 1, + "ADVERBIAL": 2, + "COMPLEMENTIVE": 3, + "DATIVE": 4, + "GENITIVE": 5, + "INSTRUMENTAL": 6, + "LOCATIVE": 7, + "NOMINATIVE": 8, + "OBLIQUE": 9, + "PARTITIVE": 10, + "PREPOSITIONAL": 11, + "REFLEXIVE_CASE": 12, + "RELATIVE_CASE": 13, + "VOCATIVE": 14, +} + +func (x PartOfSpeech_Case) String() string { + return proto.EnumName(PartOfSpeech_Case_name, int32(x)) +} +func (PartOfSpeech_Case) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_language_service_7b74fe2bae56761e, []int{5, 2} +} + +// Depending on the language, Form can be categorizing different forms of +// verbs, adjectives, adverbs, etc. For example, categorizing inflected +// endings of verbs and adjectives or distinguishing between short and long +// forms of adjectives and participles +type PartOfSpeech_Form int32 + +const ( + // Form is not applicable in the analyzed language or is not predicted. + PartOfSpeech_FORM_UNKNOWN PartOfSpeech_Form = 0 + // Adnomial + PartOfSpeech_ADNOMIAL PartOfSpeech_Form = 1 + // Auxiliary + PartOfSpeech_AUXILIARY PartOfSpeech_Form = 2 + // Complementizer + PartOfSpeech_COMPLEMENTIZER PartOfSpeech_Form = 3 + // Final ending + PartOfSpeech_FINAL_ENDING PartOfSpeech_Form = 4 + // Gerund + PartOfSpeech_GERUND PartOfSpeech_Form = 5 + // Realis + PartOfSpeech_REALIS PartOfSpeech_Form = 6 + // Irrealis + PartOfSpeech_IRREALIS PartOfSpeech_Form = 7 + // Short form + PartOfSpeech_SHORT PartOfSpeech_Form = 8 + // Long form + PartOfSpeech_LONG PartOfSpeech_Form = 9 + // Order form + PartOfSpeech_ORDER PartOfSpeech_Form = 10 + // Specific form + PartOfSpeech_SPECIFIC PartOfSpeech_Form = 11 +) + +var PartOfSpeech_Form_name = map[int32]string{ + 0: "FORM_UNKNOWN", + 1: "ADNOMIAL", + 2: "AUXILIARY", + 3: "COMPLEMENTIZER", + 4: "FINAL_ENDING", + 5: "GERUND", + 6: "REALIS", + 7: "IRREALIS", + 8: "SHORT", + 9: "LONG", + 10: "ORDER", + 11: "SPECIFIC", +} +var PartOfSpeech_Form_value = map[string]int32{ + "FORM_UNKNOWN": 0, + "ADNOMIAL": 1, + "AUXILIARY": 2, + "COMPLEMENTIZER": 3, + "FINAL_ENDING": 4, + "GERUND": 5, + "REALIS": 6, + "IRREALIS": 7, + "SHORT": 8, + "LONG": 9, + "ORDER": 10, + "SPECIFIC": 11, +} + +func (x PartOfSpeech_Form) String() string { + return proto.EnumName(PartOfSpeech_Form_name, int32(x)) +} +func (PartOfSpeech_Form) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_language_service_7b74fe2bae56761e, []int{5, 3} +} + +// Gender classes of nouns reflected in the behaviour of associated words. +type PartOfSpeech_Gender int32 + +const ( + // Gender is not applicable in the analyzed language or is not predicted. + PartOfSpeech_GENDER_UNKNOWN PartOfSpeech_Gender = 0 + // Feminine + PartOfSpeech_FEMININE PartOfSpeech_Gender = 1 + // Masculine + PartOfSpeech_MASCULINE PartOfSpeech_Gender = 2 + // Neuter + PartOfSpeech_NEUTER PartOfSpeech_Gender = 3 +) + +var PartOfSpeech_Gender_name = map[int32]string{ + 0: "GENDER_UNKNOWN", + 1: "FEMININE", + 2: "MASCULINE", + 3: "NEUTER", +} +var PartOfSpeech_Gender_value = map[string]int32{ + "GENDER_UNKNOWN": 0, + "FEMININE": 1, + "MASCULINE": 2, + "NEUTER": 3, +} + +func (x PartOfSpeech_Gender) String() string { + return proto.EnumName(PartOfSpeech_Gender_name, int32(x)) +} +func (PartOfSpeech_Gender) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_language_service_7b74fe2bae56761e, []int{5, 4} +} + +// The grammatical feature of verbs, used for showing modality and attitude. +type PartOfSpeech_Mood int32 + +const ( + // Mood is not applicable in the analyzed language or is not predicted. + PartOfSpeech_MOOD_UNKNOWN PartOfSpeech_Mood = 0 + // Conditional + PartOfSpeech_CONDITIONAL_MOOD PartOfSpeech_Mood = 1 + // Imperative + PartOfSpeech_IMPERATIVE PartOfSpeech_Mood = 2 + // Indicative + PartOfSpeech_INDICATIVE PartOfSpeech_Mood = 3 + // Interrogative + PartOfSpeech_INTERROGATIVE PartOfSpeech_Mood = 4 + // Jussive + PartOfSpeech_JUSSIVE PartOfSpeech_Mood = 5 + // Subjunctive + PartOfSpeech_SUBJUNCTIVE PartOfSpeech_Mood = 6 +) + +var PartOfSpeech_Mood_name = map[int32]string{ + 0: "MOOD_UNKNOWN", + 1: "CONDITIONAL_MOOD", + 2: "IMPERATIVE", + 3: "INDICATIVE", + 4: "INTERROGATIVE", + 5: "JUSSIVE", + 6: "SUBJUNCTIVE", +} +var PartOfSpeech_Mood_value = map[string]int32{ + "MOOD_UNKNOWN": 0, + "CONDITIONAL_MOOD": 1, + "IMPERATIVE": 2, + "INDICATIVE": 3, + "INTERROGATIVE": 4, + "JUSSIVE": 5, + "SUBJUNCTIVE": 6, +} + +func (x PartOfSpeech_Mood) String() string { + return proto.EnumName(PartOfSpeech_Mood_name, int32(x)) +} +func (PartOfSpeech_Mood) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_language_service_7b74fe2bae56761e, []int{5, 5} +} + +// Count distinctions. +type PartOfSpeech_Number int32 + +const ( + // Number is not applicable in the analyzed language or is not predicted. + PartOfSpeech_NUMBER_UNKNOWN PartOfSpeech_Number = 0 + // Singular + PartOfSpeech_SINGULAR PartOfSpeech_Number = 1 + // Plural + PartOfSpeech_PLURAL PartOfSpeech_Number = 2 + // Dual + PartOfSpeech_DUAL PartOfSpeech_Number = 3 +) + +var PartOfSpeech_Number_name = map[int32]string{ + 0: "NUMBER_UNKNOWN", + 1: "SINGULAR", + 2: "PLURAL", + 3: "DUAL", +} +var PartOfSpeech_Number_value = map[string]int32{ + "NUMBER_UNKNOWN": 0, + "SINGULAR": 1, + "PLURAL": 2, + "DUAL": 3, +} + +func (x PartOfSpeech_Number) String() string { + return proto.EnumName(PartOfSpeech_Number_name, int32(x)) +} +func (PartOfSpeech_Number) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_language_service_7b74fe2bae56761e, []int{5, 6} +} + +// The distinction between the speaker, second person, third person, etc. +type PartOfSpeech_Person int32 + +const ( + // Person is not applicable in the analyzed language or is not predicted. + PartOfSpeech_PERSON_UNKNOWN PartOfSpeech_Person = 0 + // First + PartOfSpeech_FIRST PartOfSpeech_Person = 1 + // Second + PartOfSpeech_SECOND PartOfSpeech_Person = 2 + // Third + PartOfSpeech_THIRD PartOfSpeech_Person = 3 + // Reflexive + PartOfSpeech_REFLEXIVE_PERSON PartOfSpeech_Person = 4 +) + +var PartOfSpeech_Person_name = map[int32]string{ + 0: "PERSON_UNKNOWN", + 1: "FIRST", + 2: "SECOND", + 3: "THIRD", + 4: "REFLEXIVE_PERSON", +} +var PartOfSpeech_Person_value = map[string]int32{ + "PERSON_UNKNOWN": 0, + "FIRST": 1, + "SECOND": 2, + "THIRD": 3, + "REFLEXIVE_PERSON": 4, +} + +func (x PartOfSpeech_Person) String() string { + return proto.EnumName(PartOfSpeech_Person_name, int32(x)) +} +func (PartOfSpeech_Person) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_language_service_7b74fe2bae56761e, []int{5, 7} +} + +// This category shows if the token is part of a proper name. +type PartOfSpeech_Proper int32 + +const ( + // Proper is not applicable in the analyzed language or is not predicted. + PartOfSpeech_PROPER_UNKNOWN PartOfSpeech_Proper = 0 + // Proper + PartOfSpeech_PROPER PartOfSpeech_Proper = 1 + // Not proper + PartOfSpeech_NOT_PROPER PartOfSpeech_Proper = 2 +) + +var PartOfSpeech_Proper_name = map[int32]string{ + 0: "PROPER_UNKNOWN", + 1: "PROPER", + 2: "NOT_PROPER", +} +var PartOfSpeech_Proper_value = map[string]int32{ + "PROPER_UNKNOWN": 0, + "PROPER": 1, + "NOT_PROPER": 2, +} + +func (x PartOfSpeech_Proper) String() string { + return proto.EnumName(PartOfSpeech_Proper_name, int32(x)) +} +func (PartOfSpeech_Proper) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_language_service_7b74fe2bae56761e, []int{5, 8} +} + +// Reciprocal features of a pronoun. +type PartOfSpeech_Reciprocity int32 + +const ( + // Reciprocity is not applicable in the analyzed language or is not + // predicted. + PartOfSpeech_RECIPROCITY_UNKNOWN PartOfSpeech_Reciprocity = 0 + // Reciprocal + PartOfSpeech_RECIPROCAL PartOfSpeech_Reciprocity = 1 + // Non-reciprocal + PartOfSpeech_NON_RECIPROCAL PartOfSpeech_Reciprocity = 2 +) + +var PartOfSpeech_Reciprocity_name = map[int32]string{ + 0: "RECIPROCITY_UNKNOWN", + 1: "RECIPROCAL", + 2: "NON_RECIPROCAL", +} +var PartOfSpeech_Reciprocity_value = map[string]int32{ + "RECIPROCITY_UNKNOWN": 0, + "RECIPROCAL": 1, + "NON_RECIPROCAL": 2, +} + +func (x PartOfSpeech_Reciprocity) String() string { + return proto.EnumName(PartOfSpeech_Reciprocity_name, int32(x)) +} +func (PartOfSpeech_Reciprocity) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_language_service_7b74fe2bae56761e, []int{5, 9} +} + +// Time reference. +type PartOfSpeech_Tense int32 + +const ( + // Tense is not applicable in the analyzed language or is not predicted. + PartOfSpeech_TENSE_UNKNOWN PartOfSpeech_Tense = 0 + // Conditional + PartOfSpeech_CONDITIONAL_TENSE PartOfSpeech_Tense = 1 + // Future + PartOfSpeech_FUTURE PartOfSpeech_Tense = 2 + // Past + PartOfSpeech_PAST PartOfSpeech_Tense = 3 + // Present + PartOfSpeech_PRESENT PartOfSpeech_Tense = 4 + // Imperfect + PartOfSpeech_IMPERFECT PartOfSpeech_Tense = 5 + // Pluperfect + PartOfSpeech_PLUPERFECT PartOfSpeech_Tense = 6 +) + +var PartOfSpeech_Tense_name = map[int32]string{ + 0: "TENSE_UNKNOWN", + 1: "CONDITIONAL_TENSE", + 2: "FUTURE", + 3: "PAST", + 4: "PRESENT", + 5: "IMPERFECT", + 6: "PLUPERFECT", +} +var PartOfSpeech_Tense_value = map[string]int32{ + "TENSE_UNKNOWN": 0, + "CONDITIONAL_TENSE": 1, + "FUTURE": 2, + "PAST": 3, + "PRESENT": 4, + "IMPERFECT": 5, + "PLUPERFECT": 6, +} + +func (x PartOfSpeech_Tense) String() string { + return proto.EnumName(PartOfSpeech_Tense_name, int32(x)) +} +func (PartOfSpeech_Tense) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_language_service_7b74fe2bae56761e, []int{5, 10} +} + +// The relationship between the action that a verb expresses and the +// participants identified by its arguments. +type PartOfSpeech_Voice int32 + +const ( + // Voice is not applicable in the analyzed language or is not predicted. + PartOfSpeech_VOICE_UNKNOWN PartOfSpeech_Voice = 0 + // Active + PartOfSpeech_ACTIVE PartOfSpeech_Voice = 1 + // Causative + PartOfSpeech_CAUSATIVE PartOfSpeech_Voice = 2 + // Passive + PartOfSpeech_PASSIVE PartOfSpeech_Voice = 3 +) + +var PartOfSpeech_Voice_name = map[int32]string{ + 0: "VOICE_UNKNOWN", + 1: "ACTIVE", + 2: "CAUSATIVE", + 3: "PASSIVE", +} +var PartOfSpeech_Voice_value = map[string]int32{ + "VOICE_UNKNOWN": 0, + "ACTIVE": 1, + "CAUSATIVE": 2, + "PASSIVE": 3, +} + +func (x PartOfSpeech_Voice) String() string { + return proto.EnumName(PartOfSpeech_Voice_name, int32(x)) +} +func (PartOfSpeech_Voice) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_language_service_7b74fe2bae56761e, []int{5, 11} +} + +// The parse label enum for the token. +type DependencyEdge_Label int32 + +const ( + // Unknown + DependencyEdge_UNKNOWN DependencyEdge_Label = 0 + // Abbreviation modifier + DependencyEdge_ABBREV DependencyEdge_Label = 1 + // Adjectival complement + DependencyEdge_ACOMP DependencyEdge_Label = 2 + // Adverbial clause modifier + DependencyEdge_ADVCL DependencyEdge_Label = 3 + // Adverbial modifier + DependencyEdge_ADVMOD DependencyEdge_Label = 4 + // Adjectival modifier of an NP + DependencyEdge_AMOD DependencyEdge_Label = 5 + // Appositional modifier of an NP + DependencyEdge_APPOS DependencyEdge_Label = 6 + // Attribute dependent of a copular verb + DependencyEdge_ATTR DependencyEdge_Label = 7 + // Auxiliary (non-main) verb + DependencyEdge_AUX DependencyEdge_Label = 8 + // Passive auxiliary + DependencyEdge_AUXPASS DependencyEdge_Label = 9 + // Coordinating conjunction + DependencyEdge_CC DependencyEdge_Label = 10 + // Clausal complement of a verb or adjective + DependencyEdge_CCOMP DependencyEdge_Label = 11 + // Conjunct + DependencyEdge_CONJ DependencyEdge_Label = 12 + // Clausal subject + DependencyEdge_CSUBJ DependencyEdge_Label = 13 + // Clausal passive subject + DependencyEdge_CSUBJPASS DependencyEdge_Label = 14 + // Dependency (unable to determine) + DependencyEdge_DEP DependencyEdge_Label = 15 + // Determiner + DependencyEdge_DET DependencyEdge_Label = 16 + // Discourse + DependencyEdge_DISCOURSE DependencyEdge_Label = 17 + // Direct object + DependencyEdge_DOBJ DependencyEdge_Label = 18 + // Expletive + DependencyEdge_EXPL DependencyEdge_Label = 19 + // Goes with (part of a word in a text not well edited) + DependencyEdge_GOESWITH DependencyEdge_Label = 20 + // Indirect object + DependencyEdge_IOBJ DependencyEdge_Label = 21 + // Marker (word introducing a subordinate clause) + DependencyEdge_MARK DependencyEdge_Label = 22 + // Multi-word expression + DependencyEdge_MWE DependencyEdge_Label = 23 + // Multi-word verbal expression + DependencyEdge_MWV DependencyEdge_Label = 24 + // Negation modifier + DependencyEdge_NEG DependencyEdge_Label = 25 + // Noun compound modifier + DependencyEdge_NN DependencyEdge_Label = 26 + // Noun phrase used as an adverbial modifier + DependencyEdge_NPADVMOD DependencyEdge_Label = 27 + // Nominal subject + DependencyEdge_NSUBJ DependencyEdge_Label = 28 + // Passive nominal subject + DependencyEdge_NSUBJPASS DependencyEdge_Label = 29 + // Numeric modifier of a noun + DependencyEdge_NUM DependencyEdge_Label = 30 + // Element of compound number + DependencyEdge_NUMBER DependencyEdge_Label = 31 + // Punctuation mark + DependencyEdge_P DependencyEdge_Label = 32 + // Parataxis relation + DependencyEdge_PARATAXIS DependencyEdge_Label = 33 + // Participial modifier + DependencyEdge_PARTMOD DependencyEdge_Label = 34 + // The complement of a preposition is a clause + DependencyEdge_PCOMP DependencyEdge_Label = 35 + // Object of a preposition + DependencyEdge_POBJ DependencyEdge_Label = 36 + // Possession modifier + DependencyEdge_POSS DependencyEdge_Label = 37 + // Postverbal negative particle + DependencyEdge_POSTNEG DependencyEdge_Label = 38 + // Predicate complement + DependencyEdge_PRECOMP DependencyEdge_Label = 39 + // Preconjunt + DependencyEdge_PRECONJ DependencyEdge_Label = 40 + // Predeterminer + DependencyEdge_PREDET DependencyEdge_Label = 41 + // Prefix + DependencyEdge_PREF DependencyEdge_Label = 42 + // Prepositional modifier + DependencyEdge_PREP DependencyEdge_Label = 43 + // The relationship between a verb and verbal morpheme + DependencyEdge_PRONL DependencyEdge_Label = 44 + // Particle + DependencyEdge_PRT DependencyEdge_Label = 45 + // Associative or possessive marker + DependencyEdge_PS DependencyEdge_Label = 46 + // Quantifier phrase modifier + DependencyEdge_QUANTMOD DependencyEdge_Label = 47 + // Relative clause modifier + DependencyEdge_RCMOD DependencyEdge_Label = 48 + // Complementizer in relative clause + DependencyEdge_RCMODREL DependencyEdge_Label = 49 + // Ellipsis without a preceding predicate + DependencyEdge_RDROP DependencyEdge_Label = 50 + // Referent + DependencyEdge_REF DependencyEdge_Label = 51 + // Remnant + DependencyEdge_REMNANT DependencyEdge_Label = 52 + // Reparandum + DependencyEdge_REPARANDUM DependencyEdge_Label = 53 + // Root + DependencyEdge_ROOT DependencyEdge_Label = 54 + // Suffix specifying a unit of number + DependencyEdge_SNUM DependencyEdge_Label = 55 + // Suffix + DependencyEdge_SUFF DependencyEdge_Label = 56 + // Temporal modifier + DependencyEdge_TMOD DependencyEdge_Label = 57 + // Topic marker + DependencyEdge_TOPIC DependencyEdge_Label = 58 + // Clause headed by an infinite form of the verb that modifies a noun + DependencyEdge_VMOD DependencyEdge_Label = 59 + // Vocative + DependencyEdge_VOCATIVE DependencyEdge_Label = 60 + // Open clausal complement + DependencyEdge_XCOMP DependencyEdge_Label = 61 + // Name suffix + DependencyEdge_SUFFIX DependencyEdge_Label = 62 + // Name title + DependencyEdge_TITLE DependencyEdge_Label = 63 + // Adverbial phrase modifier + DependencyEdge_ADVPHMOD DependencyEdge_Label = 64 + // Causative auxiliary + DependencyEdge_AUXCAUS DependencyEdge_Label = 65 + // Helper auxiliary + DependencyEdge_AUXVV DependencyEdge_Label = 66 + // Rentaishi (Prenominal modifier) + DependencyEdge_DTMOD DependencyEdge_Label = 67 + // Foreign words + DependencyEdge_FOREIGN DependencyEdge_Label = 68 + // Keyword + DependencyEdge_KW DependencyEdge_Label = 69 + // List for chains of comparable items + DependencyEdge_LIST DependencyEdge_Label = 70 + // Nominalized clause + DependencyEdge_NOMC DependencyEdge_Label = 71 + // Nominalized clausal subject + DependencyEdge_NOMCSUBJ DependencyEdge_Label = 72 + // Nominalized clausal passive + DependencyEdge_NOMCSUBJPASS DependencyEdge_Label = 73 + // Compound of numeric modifier + DependencyEdge_NUMC DependencyEdge_Label = 74 + // Copula + DependencyEdge_COP DependencyEdge_Label = 75 + // Dislocated relation (for fronted/topicalized elements) + DependencyEdge_DISLOCATED DependencyEdge_Label = 76 + // Aspect marker + DependencyEdge_ASP DependencyEdge_Label = 77 + // Genitive modifier + DependencyEdge_GMOD DependencyEdge_Label = 78 + // Genitive object + DependencyEdge_GOBJ DependencyEdge_Label = 79 + // Infinitival modifier + DependencyEdge_INFMOD DependencyEdge_Label = 80 + // Measure + DependencyEdge_MES DependencyEdge_Label = 81 + // Nominal complement of a noun + DependencyEdge_NCOMP DependencyEdge_Label = 82 +) + +var DependencyEdge_Label_name = map[int32]string{ + 0: "UNKNOWN", + 1: "ABBREV", + 2: "ACOMP", + 3: "ADVCL", + 4: "ADVMOD", + 5: "AMOD", + 6: "APPOS", + 7: "ATTR", + 8: "AUX", + 9: "AUXPASS", + 10: "CC", + 11: "CCOMP", + 12: "CONJ", + 13: "CSUBJ", + 14: "CSUBJPASS", + 15: "DEP", + 16: "DET", + 17: "DISCOURSE", + 18: "DOBJ", + 19: "EXPL", + 20: "GOESWITH", + 21: "IOBJ", + 22: "MARK", + 23: "MWE", + 24: "MWV", + 25: "NEG", + 26: "NN", + 27: "NPADVMOD", + 28: "NSUBJ", + 29: "NSUBJPASS", + 30: "NUM", + 31: "NUMBER", + 32: "P", + 33: "PARATAXIS", + 34: "PARTMOD", + 35: "PCOMP", + 36: "POBJ", + 37: "POSS", + 38: "POSTNEG", + 39: "PRECOMP", + 40: "PRECONJ", + 41: "PREDET", + 42: "PREF", + 43: "PREP", + 44: "PRONL", + 45: "PRT", + 46: "PS", + 47: "QUANTMOD", + 48: "RCMOD", + 49: "RCMODREL", + 50: "RDROP", + 51: "REF", + 52: "REMNANT", + 53: "REPARANDUM", + 54: "ROOT", + 55: "SNUM", + 56: "SUFF", + 57: "TMOD", + 58: "TOPIC", + 59: "VMOD", + 60: "VOCATIVE", + 61: "XCOMP", + 62: "SUFFIX", + 63: "TITLE", + 64: "ADVPHMOD", + 65: "AUXCAUS", + 66: "AUXVV", + 67: "DTMOD", + 68: "FOREIGN", + 69: "KW", + 70: "LIST", + 71: "NOMC", + 72: "NOMCSUBJ", + 73: "NOMCSUBJPASS", + 74: "NUMC", + 75: "COP", + 76: "DISLOCATED", + 77: "ASP", + 78: "GMOD", + 79: "GOBJ", + 80: "INFMOD", + 81: "MES", + 82: "NCOMP", +} +var DependencyEdge_Label_value = map[string]int32{ + "UNKNOWN": 0, + "ABBREV": 1, + "ACOMP": 2, + "ADVCL": 3, + "ADVMOD": 4, + "AMOD": 5, + "APPOS": 6, + "ATTR": 7, + "AUX": 8, + "AUXPASS": 9, + "CC": 10, + "CCOMP": 11, + "CONJ": 12, + "CSUBJ": 13, + "CSUBJPASS": 14, + "DEP": 15, + "DET": 16, + "DISCOURSE": 17, + "DOBJ": 18, + "EXPL": 19, + "GOESWITH": 20, + "IOBJ": 21, + "MARK": 22, + "MWE": 23, + "MWV": 24, + "NEG": 25, + "NN": 26, + "NPADVMOD": 27, + "NSUBJ": 28, + "NSUBJPASS": 29, + "NUM": 30, + "NUMBER": 31, + "P": 32, + "PARATAXIS": 33, + "PARTMOD": 34, + "PCOMP": 35, + "POBJ": 36, + "POSS": 37, + "POSTNEG": 38, + "PRECOMP": 39, + "PRECONJ": 40, + "PREDET": 41, + "PREF": 42, + "PREP": 43, + "PRONL": 44, + "PRT": 45, + "PS": 46, + "QUANTMOD": 47, + "RCMOD": 48, + "RCMODREL": 49, + "RDROP": 50, + "REF": 51, + "REMNANT": 52, + "REPARANDUM": 53, + "ROOT": 54, + "SNUM": 55, + "SUFF": 56, + "TMOD": 57, + "TOPIC": 58, + "VMOD": 59, + "VOCATIVE": 60, + "XCOMP": 61, + "SUFFIX": 62, + "TITLE": 63, + "ADVPHMOD": 64, + "AUXCAUS": 65, + "AUXVV": 66, + "DTMOD": 67, + "FOREIGN": 68, + "KW": 69, + "LIST": 70, + "NOMC": 71, + "NOMCSUBJ": 72, + "NOMCSUBJPASS": 73, + "NUMC": 74, + "COP": 75, + "DISLOCATED": 76, + "ASP": 77, + "GMOD": 78, + "GOBJ": 79, + "INFMOD": 80, + "MES": 81, + "NCOMP": 82, +} + +func (x DependencyEdge_Label) String() string { + return proto.EnumName(DependencyEdge_Label_name, int32(x)) +} +func (DependencyEdge_Label) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_language_service_7b74fe2bae56761e, []int{6, 0} +} + +// The supported types of mentions. +type EntityMention_Type int32 + +const ( + // Unknown + EntityMention_TYPE_UNKNOWN EntityMention_Type = 0 + // Proper name + EntityMention_PROPER EntityMention_Type = 1 + // Common noun (or noun compound) + EntityMention_COMMON EntityMention_Type = 2 +) + +var EntityMention_Type_name = map[int32]string{ + 0: "TYPE_UNKNOWN", + 1: "PROPER", + 2: "COMMON", +} +var EntityMention_Type_value = map[string]int32{ + "TYPE_UNKNOWN": 0, + "PROPER": 1, + "COMMON": 2, +} + +func (x EntityMention_Type) String() string { + return proto.EnumName(EntityMention_Type_name, int32(x)) +} +func (EntityMention_Type) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_language_service_7b74fe2bae56761e, []int{7, 0} +} + +// ################################################################ # +// +// Represents the input to API methods. +type Document struct { + // Required. If the type is not set or is `TYPE_UNSPECIFIED`, + // returns an `INVALID_ARGUMENT` error. + Type Document_Type `protobuf:"varint,1,opt,name=type,proto3,enum=google.cloud.language.v1beta2.Document_Type" json:"type,omitempty"` + // The source of the document: a string containing the content or a + // Google Cloud Storage URI. + // + // Types that are valid to be assigned to Source: + // *Document_Content + // *Document_GcsContentUri + Source isDocument_Source `protobuf_oneof:"source"` + // The language of the document (if not specified, the language is + // automatically detected). Both ISO and BCP-47 language codes are + // accepted.
+ // [Language Support](/natural-language/docs/languages) + // lists currently supported languages for each API method. + // If the language (either specified by the caller or automatically detected) + // is not supported by the called API method, an `INVALID_ARGUMENT` error + // is returned. + Language string `protobuf:"bytes,4,opt,name=language,proto3" json:"language,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Document) Reset() { *m = Document{} } +func (m *Document) String() string { return proto.CompactTextString(m) } +func (*Document) ProtoMessage() {} +func (*Document) Descriptor() ([]byte, []int) { + return fileDescriptor_language_service_7b74fe2bae56761e, []int{0} +} +func (m *Document) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Document.Unmarshal(m, b) +} +func (m *Document) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Document.Marshal(b, m, deterministic) +} +func (dst *Document) XXX_Merge(src proto.Message) { + xxx_messageInfo_Document.Merge(dst, src) +} +func (m *Document) XXX_Size() int { + return xxx_messageInfo_Document.Size(m) +} +func (m *Document) XXX_DiscardUnknown() { + xxx_messageInfo_Document.DiscardUnknown(m) +} + +var xxx_messageInfo_Document proto.InternalMessageInfo + +func (m *Document) GetType() Document_Type { + if m != nil { + return m.Type + } + return Document_TYPE_UNSPECIFIED +} + +type isDocument_Source interface { + isDocument_Source() +} + +type Document_Content struct { + Content string `protobuf:"bytes,2,opt,name=content,proto3,oneof"` +} + +type Document_GcsContentUri struct { + GcsContentUri string `protobuf:"bytes,3,opt,name=gcs_content_uri,json=gcsContentUri,proto3,oneof"` +} + +func (*Document_Content) isDocument_Source() {} + +func (*Document_GcsContentUri) isDocument_Source() {} + +func (m *Document) GetSource() isDocument_Source { + if m != nil { + return m.Source + } + return nil +} + +func (m *Document) GetContent() string { + if x, ok := m.GetSource().(*Document_Content); ok { + return x.Content + } + return "" +} + +func (m *Document) GetGcsContentUri() string { + if x, ok := m.GetSource().(*Document_GcsContentUri); ok { + return x.GcsContentUri + } + return "" +} + +func (m *Document) GetLanguage() string { + if m != nil { + return m.Language + } + return "" +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*Document) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _Document_OneofMarshaler, _Document_OneofUnmarshaler, _Document_OneofSizer, []interface{}{ + (*Document_Content)(nil), + (*Document_GcsContentUri)(nil), + } +} + +func _Document_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*Document) + // source + switch x := m.Source.(type) { + case *Document_Content: + b.EncodeVarint(2<<3 | proto.WireBytes) + b.EncodeStringBytes(x.Content) + case *Document_GcsContentUri: + b.EncodeVarint(3<<3 | proto.WireBytes) + b.EncodeStringBytes(x.GcsContentUri) + case nil: + default: + return fmt.Errorf("Document.Source has unexpected type %T", x) + } + return nil +} + +func _Document_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*Document) + switch tag { + case 2: // source.content + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeStringBytes() + m.Source = &Document_Content{x} + return true, err + case 3: // source.gcs_content_uri + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeStringBytes() + m.Source = &Document_GcsContentUri{x} + return true, err + default: + return false, nil + } +} + +func _Document_OneofSizer(msg proto.Message) (n int) { + m := msg.(*Document) + // source + switch x := m.Source.(type) { + case *Document_Content: + n += 1 // tag and wire + n += proto.SizeVarint(uint64(len(x.Content))) + n += len(x.Content) + case *Document_GcsContentUri: + n += 1 // tag and wire + n += proto.SizeVarint(uint64(len(x.GcsContentUri))) + n += len(x.GcsContentUri) + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +// Represents a sentence in the input document. +type Sentence struct { + // The sentence text. + Text *TextSpan `protobuf:"bytes,1,opt,name=text,proto3" json:"text,omitempty"` + // For calls to [AnalyzeSentiment][] or if + // [AnnotateTextRequest.Features.extract_document_sentiment][google.cloud.language.v1beta2.AnnotateTextRequest.Features.extract_document_sentiment] + // is set to true, this field will contain the sentiment for the sentence. + Sentiment *Sentiment `protobuf:"bytes,2,opt,name=sentiment,proto3" json:"sentiment,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Sentence) Reset() { *m = Sentence{} } +func (m *Sentence) String() string { return proto.CompactTextString(m) } +func (*Sentence) ProtoMessage() {} +func (*Sentence) Descriptor() ([]byte, []int) { + return fileDescriptor_language_service_7b74fe2bae56761e, []int{1} +} +func (m *Sentence) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Sentence.Unmarshal(m, b) +} +func (m *Sentence) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Sentence.Marshal(b, m, deterministic) +} +func (dst *Sentence) XXX_Merge(src proto.Message) { + xxx_messageInfo_Sentence.Merge(dst, src) +} +func (m *Sentence) XXX_Size() int { + return xxx_messageInfo_Sentence.Size(m) +} +func (m *Sentence) XXX_DiscardUnknown() { + xxx_messageInfo_Sentence.DiscardUnknown(m) +} + +var xxx_messageInfo_Sentence proto.InternalMessageInfo + +func (m *Sentence) GetText() *TextSpan { + if m != nil { + return m.Text + } + return nil +} + +func (m *Sentence) GetSentiment() *Sentiment { + if m != nil { + return m.Sentiment + } + return nil +} + +// Represents a phrase in the text that is a known entity, such as +// a person, an organization, or location. The API associates information, such +// as salience and mentions, with entities. +type Entity struct { + // The representative name for the entity. + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // The entity type. + Type Entity_Type `protobuf:"varint,2,opt,name=type,proto3,enum=google.cloud.language.v1beta2.Entity_Type" json:"type,omitempty"` + // Metadata associated with the entity. + // + // Currently, Wikipedia URLs and Knowledge Graph MIDs are provided, if + // available. The associated keys are "wikipedia_url" and "mid", respectively. + Metadata map[string]string `protobuf:"bytes,3,rep,name=metadata,proto3" json:"metadata,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` + // The salience score associated with the entity in the [0, 1.0] range. + // + // The salience score for an entity provides information about the + // importance or centrality of that entity to the entire document text. + // Scores closer to 0 are less salient, while scores closer to 1.0 are highly + // salient. + Salience float32 `protobuf:"fixed32,4,opt,name=salience,proto3" json:"salience,omitempty"` + // The mentions of this entity in the input document. The API currently + // supports proper noun mentions. + Mentions []*EntityMention `protobuf:"bytes,5,rep,name=mentions,proto3" json:"mentions,omitempty"` + // For calls to [AnalyzeEntitySentiment][] or if + // [AnnotateTextRequest.Features.extract_entity_sentiment][google.cloud.language.v1beta2.AnnotateTextRequest.Features.extract_entity_sentiment] + // is set to true, this field will contain the aggregate sentiment expressed + // for this entity in the provided document. + Sentiment *Sentiment `protobuf:"bytes,6,opt,name=sentiment,proto3" json:"sentiment,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Entity) Reset() { *m = Entity{} } +func (m *Entity) String() string { return proto.CompactTextString(m) } +func (*Entity) ProtoMessage() {} +func (*Entity) Descriptor() ([]byte, []int) { + return fileDescriptor_language_service_7b74fe2bae56761e, []int{2} +} +func (m *Entity) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Entity.Unmarshal(m, b) +} +func (m *Entity) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Entity.Marshal(b, m, deterministic) +} +func (dst *Entity) XXX_Merge(src proto.Message) { + xxx_messageInfo_Entity.Merge(dst, src) +} +func (m *Entity) XXX_Size() int { + return xxx_messageInfo_Entity.Size(m) +} +func (m *Entity) XXX_DiscardUnknown() { + xxx_messageInfo_Entity.DiscardUnknown(m) +} + +var xxx_messageInfo_Entity proto.InternalMessageInfo + +func (m *Entity) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *Entity) GetType() Entity_Type { + if m != nil { + return m.Type + } + return Entity_UNKNOWN +} + +func (m *Entity) GetMetadata() map[string]string { + if m != nil { + return m.Metadata + } + return nil +} + +func (m *Entity) GetSalience() float32 { + if m != nil { + return m.Salience + } + return 0 +} + +func (m *Entity) GetMentions() []*EntityMention { + if m != nil { + return m.Mentions + } + return nil +} + +func (m *Entity) GetSentiment() *Sentiment { + if m != nil { + return m.Sentiment + } + return nil +} + +// Represents the smallest syntactic building block of the text. +type Token struct { + // The token text. + Text *TextSpan `protobuf:"bytes,1,opt,name=text,proto3" json:"text,omitempty"` + // Parts of speech tag for this token. + PartOfSpeech *PartOfSpeech `protobuf:"bytes,2,opt,name=part_of_speech,json=partOfSpeech,proto3" json:"part_of_speech,omitempty"` + // Dependency tree parse for this token. + DependencyEdge *DependencyEdge `protobuf:"bytes,3,opt,name=dependency_edge,json=dependencyEdge,proto3" json:"dependency_edge,omitempty"` + // [Lemma](https://en.wikipedia.org/wiki/Lemma_%28morphology%29) of the token. + Lemma string `protobuf:"bytes,4,opt,name=lemma,proto3" json:"lemma,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Token) Reset() { *m = Token{} } +func (m *Token) String() string { return proto.CompactTextString(m) } +func (*Token) ProtoMessage() {} +func (*Token) Descriptor() ([]byte, []int) { + return fileDescriptor_language_service_7b74fe2bae56761e, []int{3} +} +func (m *Token) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Token.Unmarshal(m, b) +} +func (m *Token) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Token.Marshal(b, m, deterministic) +} +func (dst *Token) XXX_Merge(src proto.Message) { + xxx_messageInfo_Token.Merge(dst, src) +} +func (m *Token) XXX_Size() int { + return xxx_messageInfo_Token.Size(m) +} +func (m *Token) XXX_DiscardUnknown() { + xxx_messageInfo_Token.DiscardUnknown(m) +} + +var xxx_messageInfo_Token proto.InternalMessageInfo + +func (m *Token) GetText() *TextSpan { + if m != nil { + return m.Text + } + return nil +} + +func (m *Token) GetPartOfSpeech() *PartOfSpeech { + if m != nil { + return m.PartOfSpeech + } + return nil +} + +func (m *Token) GetDependencyEdge() *DependencyEdge { + if m != nil { + return m.DependencyEdge + } + return nil +} + +func (m *Token) GetLemma() string { + if m != nil { + return m.Lemma + } + return "" +} + +// Represents the feeling associated with the entire text or entities in +// the text. +type Sentiment struct { + // A non-negative number in the [0, +inf) range, which represents + // the absolute magnitude of sentiment regardless of score (positive or + // negative). + Magnitude float32 `protobuf:"fixed32,2,opt,name=magnitude,proto3" json:"magnitude,omitempty"` + // Sentiment score between -1.0 (negative sentiment) and 1.0 + // (positive sentiment). + Score float32 `protobuf:"fixed32,3,opt,name=score,proto3" json:"score,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Sentiment) Reset() { *m = Sentiment{} } +func (m *Sentiment) String() string { return proto.CompactTextString(m) } +func (*Sentiment) ProtoMessage() {} +func (*Sentiment) Descriptor() ([]byte, []int) { + return fileDescriptor_language_service_7b74fe2bae56761e, []int{4} +} +func (m *Sentiment) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Sentiment.Unmarshal(m, b) +} +func (m *Sentiment) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Sentiment.Marshal(b, m, deterministic) +} +func (dst *Sentiment) XXX_Merge(src proto.Message) { + xxx_messageInfo_Sentiment.Merge(dst, src) +} +func (m *Sentiment) XXX_Size() int { + return xxx_messageInfo_Sentiment.Size(m) +} +func (m *Sentiment) XXX_DiscardUnknown() { + xxx_messageInfo_Sentiment.DiscardUnknown(m) +} + +var xxx_messageInfo_Sentiment proto.InternalMessageInfo + +func (m *Sentiment) GetMagnitude() float32 { + if m != nil { + return m.Magnitude + } + return 0 +} + +func (m *Sentiment) GetScore() float32 { + if m != nil { + return m.Score + } + return 0 +} + +// Represents part of speech information for a token. +type PartOfSpeech struct { + // The part of speech tag. + Tag PartOfSpeech_Tag `protobuf:"varint,1,opt,name=tag,proto3,enum=google.cloud.language.v1beta2.PartOfSpeech_Tag" json:"tag,omitempty"` + // The grammatical aspect. + Aspect PartOfSpeech_Aspect `protobuf:"varint,2,opt,name=aspect,proto3,enum=google.cloud.language.v1beta2.PartOfSpeech_Aspect" json:"aspect,omitempty"` + // The grammatical case. + Case PartOfSpeech_Case `protobuf:"varint,3,opt,name=case,proto3,enum=google.cloud.language.v1beta2.PartOfSpeech_Case" json:"case,omitempty"` + // The grammatical form. + Form PartOfSpeech_Form `protobuf:"varint,4,opt,name=form,proto3,enum=google.cloud.language.v1beta2.PartOfSpeech_Form" json:"form,omitempty"` + // The grammatical gender. + Gender PartOfSpeech_Gender `protobuf:"varint,5,opt,name=gender,proto3,enum=google.cloud.language.v1beta2.PartOfSpeech_Gender" json:"gender,omitempty"` + // The grammatical mood. + Mood PartOfSpeech_Mood `protobuf:"varint,6,opt,name=mood,proto3,enum=google.cloud.language.v1beta2.PartOfSpeech_Mood" json:"mood,omitempty"` + // The grammatical number. + Number PartOfSpeech_Number `protobuf:"varint,7,opt,name=number,proto3,enum=google.cloud.language.v1beta2.PartOfSpeech_Number" json:"number,omitempty"` + // The grammatical person. + Person PartOfSpeech_Person `protobuf:"varint,8,opt,name=person,proto3,enum=google.cloud.language.v1beta2.PartOfSpeech_Person" json:"person,omitempty"` + // The grammatical properness. + Proper PartOfSpeech_Proper `protobuf:"varint,9,opt,name=proper,proto3,enum=google.cloud.language.v1beta2.PartOfSpeech_Proper" json:"proper,omitempty"` + // The grammatical reciprocity. + Reciprocity PartOfSpeech_Reciprocity `protobuf:"varint,10,opt,name=reciprocity,proto3,enum=google.cloud.language.v1beta2.PartOfSpeech_Reciprocity" json:"reciprocity,omitempty"` + // The grammatical tense. + Tense PartOfSpeech_Tense `protobuf:"varint,11,opt,name=tense,proto3,enum=google.cloud.language.v1beta2.PartOfSpeech_Tense" json:"tense,omitempty"` + // The grammatical voice. + Voice PartOfSpeech_Voice `protobuf:"varint,12,opt,name=voice,proto3,enum=google.cloud.language.v1beta2.PartOfSpeech_Voice" json:"voice,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *PartOfSpeech) Reset() { *m = PartOfSpeech{} } +func (m *PartOfSpeech) String() string { return proto.CompactTextString(m) } +func (*PartOfSpeech) ProtoMessage() {} +func (*PartOfSpeech) Descriptor() ([]byte, []int) { + return fileDescriptor_language_service_7b74fe2bae56761e, []int{5} +} +func (m *PartOfSpeech) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_PartOfSpeech.Unmarshal(m, b) +} +func (m *PartOfSpeech) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_PartOfSpeech.Marshal(b, m, deterministic) +} +func (dst *PartOfSpeech) XXX_Merge(src proto.Message) { + xxx_messageInfo_PartOfSpeech.Merge(dst, src) +} +func (m *PartOfSpeech) XXX_Size() int { + return xxx_messageInfo_PartOfSpeech.Size(m) +} +func (m *PartOfSpeech) XXX_DiscardUnknown() { + xxx_messageInfo_PartOfSpeech.DiscardUnknown(m) +} + +var xxx_messageInfo_PartOfSpeech proto.InternalMessageInfo + +func (m *PartOfSpeech) GetTag() PartOfSpeech_Tag { + if m != nil { + return m.Tag + } + return PartOfSpeech_UNKNOWN +} + +func (m *PartOfSpeech) GetAspect() PartOfSpeech_Aspect { + if m != nil { + return m.Aspect + } + return PartOfSpeech_ASPECT_UNKNOWN +} + +func (m *PartOfSpeech) GetCase() PartOfSpeech_Case { + if m != nil { + return m.Case + } + return PartOfSpeech_CASE_UNKNOWN +} + +func (m *PartOfSpeech) GetForm() PartOfSpeech_Form { + if m != nil { + return m.Form + } + return PartOfSpeech_FORM_UNKNOWN +} + +func (m *PartOfSpeech) GetGender() PartOfSpeech_Gender { + if m != nil { + return m.Gender + } + return PartOfSpeech_GENDER_UNKNOWN +} + +func (m *PartOfSpeech) GetMood() PartOfSpeech_Mood { + if m != nil { + return m.Mood + } + return PartOfSpeech_MOOD_UNKNOWN +} + +func (m *PartOfSpeech) GetNumber() PartOfSpeech_Number { + if m != nil { + return m.Number + } + return PartOfSpeech_NUMBER_UNKNOWN +} + +func (m *PartOfSpeech) GetPerson() PartOfSpeech_Person { + if m != nil { + return m.Person + } + return PartOfSpeech_PERSON_UNKNOWN +} + +func (m *PartOfSpeech) GetProper() PartOfSpeech_Proper { + if m != nil { + return m.Proper + } + return PartOfSpeech_PROPER_UNKNOWN +} + +func (m *PartOfSpeech) GetReciprocity() PartOfSpeech_Reciprocity { + if m != nil { + return m.Reciprocity + } + return PartOfSpeech_RECIPROCITY_UNKNOWN +} + +func (m *PartOfSpeech) GetTense() PartOfSpeech_Tense { + if m != nil { + return m.Tense + } + return PartOfSpeech_TENSE_UNKNOWN +} + +func (m *PartOfSpeech) GetVoice() PartOfSpeech_Voice { + if m != nil { + return m.Voice + } + return PartOfSpeech_VOICE_UNKNOWN +} + +// Represents dependency parse tree information for a token. +type DependencyEdge struct { + // Represents the head of this token in the dependency tree. + // This is the index of the token which has an arc going to this token. + // The index is the position of the token in the array of tokens returned + // by the API method. If this token is a root token, then the + // `head_token_index` is its own index. + HeadTokenIndex int32 `protobuf:"varint,1,opt,name=head_token_index,json=headTokenIndex,proto3" json:"head_token_index,omitempty"` + // The parse label for the token. + Label DependencyEdge_Label `protobuf:"varint,2,opt,name=label,proto3,enum=google.cloud.language.v1beta2.DependencyEdge_Label" json:"label,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *DependencyEdge) Reset() { *m = DependencyEdge{} } +func (m *DependencyEdge) String() string { return proto.CompactTextString(m) } +func (*DependencyEdge) ProtoMessage() {} +func (*DependencyEdge) Descriptor() ([]byte, []int) { + return fileDescriptor_language_service_7b74fe2bae56761e, []int{6} +} +func (m *DependencyEdge) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_DependencyEdge.Unmarshal(m, b) +} +func (m *DependencyEdge) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_DependencyEdge.Marshal(b, m, deterministic) +} +func (dst *DependencyEdge) XXX_Merge(src proto.Message) { + xxx_messageInfo_DependencyEdge.Merge(dst, src) +} +func (m *DependencyEdge) XXX_Size() int { + return xxx_messageInfo_DependencyEdge.Size(m) +} +func (m *DependencyEdge) XXX_DiscardUnknown() { + xxx_messageInfo_DependencyEdge.DiscardUnknown(m) +} + +var xxx_messageInfo_DependencyEdge proto.InternalMessageInfo + +func (m *DependencyEdge) GetHeadTokenIndex() int32 { + if m != nil { + return m.HeadTokenIndex + } + return 0 +} + +func (m *DependencyEdge) GetLabel() DependencyEdge_Label { + if m != nil { + return m.Label + } + return DependencyEdge_UNKNOWN +} + +// Represents a mention for an entity in the text. Currently, proper noun +// mentions are supported. +type EntityMention struct { + // The mention text. + Text *TextSpan `protobuf:"bytes,1,opt,name=text,proto3" json:"text,omitempty"` + // The type of the entity mention. + Type EntityMention_Type `protobuf:"varint,2,opt,name=type,proto3,enum=google.cloud.language.v1beta2.EntityMention_Type" json:"type,omitempty"` + // For calls to [AnalyzeEntitySentiment][] or if + // [AnnotateTextRequest.Features.extract_entity_sentiment][google.cloud.language.v1beta2.AnnotateTextRequest.Features.extract_entity_sentiment] + // is set to true, this field will contain the sentiment expressed for this + // mention of the entity in the provided document. + Sentiment *Sentiment `protobuf:"bytes,3,opt,name=sentiment,proto3" json:"sentiment,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *EntityMention) Reset() { *m = EntityMention{} } +func (m *EntityMention) String() string { return proto.CompactTextString(m) } +func (*EntityMention) ProtoMessage() {} +func (*EntityMention) Descriptor() ([]byte, []int) { + return fileDescriptor_language_service_7b74fe2bae56761e, []int{7} +} +func (m *EntityMention) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_EntityMention.Unmarshal(m, b) +} +func (m *EntityMention) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_EntityMention.Marshal(b, m, deterministic) +} +func (dst *EntityMention) XXX_Merge(src proto.Message) { + xxx_messageInfo_EntityMention.Merge(dst, src) +} +func (m *EntityMention) XXX_Size() int { + return xxx_messageInfo_EntityMention.Size(m) +} +func (m *EntityMention) XXX_DiscardUnknown() { + xxx_messageInfo_EntityMention.DiscardUnknown(m) +} + +var xxx_messageInfo_EntityMention proto.InternalMessageInfo + +func (m *EntityMention) GetText() *TextSpan { + if m != nil { + return m.Text + } + return nil +} + +func (m *EntityMention) GetType() EntityMention_Type { + if m != nil { + return m.Type + } + return EntityMention_TYPE_UNKNOWN +} + +func (m *EntityMention) GetSentiment() *Sentiment { + if m != nil { + return m.Sentiment + } + return nil +} + +// Represents an output piece of text. +type TextSpan struct { + // The content of the output text. + Content string `protobuf:"bytes,1,opt,name=content,proto3" json:"content,omitempty"` + // The API calculates the beginning offset of the content in the original + // document according to the + // [EncodingType][google.cloud.language.v1beta2.EncodingType] specified in the + // API request. + BeginOffset int32 `protobuf:"varint,2,opt,name=begin_offset,json=beginOffset,proto3" json:"begin_offset,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *TextSpan) Reset() { *m = TextSpan{} } +func (m *TextSpan) String() string { return proto.CompactTextString(m) } +func (*TextSpan) ProtoMessage() {} +func (*TextSpan) Descriptor() ([]byte, []int) { + return fileDescriptor_language_service_7b74fe2bae56761e, []int{8} +} +func (m *TextSpan) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_TextSpan.Unmarshal(m, b) +} +func (m *TextSpan) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_TextSpan.Marshal(b, m, deterministic) +} +func (dst *TextSpan) XXX_Merge(src proto.Message) { + xxx_messageInfo_TextSpan.Merge(dst, src) +} +func (m *TextSpan) XXX_Size() int { + return xxx_messageInfo_TextSpan.Size(m) +} +func (m *TextSpan) XXX_DiscardUnknown() { + xxx_messageInfo_TextSpan.DiscardUnknown(m) +} + +var xxx_messageInfo_TextSpan proto.InternalMessageInfo + +func (m *TextSpan) GetContent() string { + if m != nil { + return m.Content + } + return "" +} + +func (m *TextSpan) GetBeginOffset() int32 { + if m != nil { + return m.BeginOffset + } + return 0 +} + +// Represents a category returned from the text classifier. +type ClassificationCategory struct { + // The name of the category representing the document. + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // The classifier's confidence of the category. Number represents how certain + // the classifier is that this category represents the given text. + Confidence float32 `protobuf:"fixed32,2,opt,name=confidence,proto3" json:"confidence,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ClassificationCategory) Reset() { *m = ClassificationCategory{} } +func (m *ClassificationCategory) String() string { return proto.CompactTextString(m) } +func (*ClassificationCategory) ProtoMessage() {} +func (*ClassificationCategory) Descriptor() ([]byte, []int) { + return fileDescriptor_language_service_7b74fe2bae56761e, []int{9} +} +func (m *ClassificationCategory) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ClassificationCategory.Unmarshal(m, b) +} +func (m *ClassificationCategory) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ClassificationCategory.Marshal(b, m, deterministic) +} +func (dst *ClassificationCategory) XXX_Merge(src proto.Message) { + xxx_messageInfo_ClassificationCategory.Merge(dst, src) +} +func (m *ClassificationCategory) XXX_Size() int { + return xxx_messageInfo_ClassificationCategory.Size(m) +} +func (m *ClassificationCategory) XXX_DiscardUnknown() { + xxx_messageInfo_ClassificationCategory.DiscardUnknown(m) +} + +var xxx_messageInfo_ClassificationCategory proto.InternalMessageInfo + +func (m *ClassificationCategory) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *ClassificationCategory) GetConfidence() float32 { + if m != nil { + return m.Confidence + } + return 0 +} + +// The sentiment analysis request message. +type AnalyzeSentimentRequest struct { + // Input document. + Document *Document `protobuf:"bytes,1,opt,name=document,proto3" json:"document,omitempty"` + // The encoding type used by the API to calculate sentence offsets for the + // sentence sentiment. + EncodingType EncodingType `protobuf:"varint,2,opt,name=encoding_type,json=encodingType,proto3,enum=google.cloud.language.v1beta2.EncodingType" json:"encoding_type,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *AnalyzeSentimentRequest) Reset() { *m = AnalyzeSentimentRequest{} } +func (m *AnalyzeSentimentRequest) String() string { return proto.CompactTextString(m) } +func (*AnalyzeSentimentRequest) ProtoMessage() {} +func (*AnalyzeSentimentRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_language_service_7b74fe2bae56761e, []int{10} +} +func (m *AnalyzeSentimentRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_AnalyzeSentimentRequest.Unmarshal(m, b) +} +func (m *AnalyzeSentimentRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_AnalyzeSentimentRequest.Marshal(b, m, deterministic) +} +func (dst *AnalyzeSentimentRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_AnalyzeSentimentRequest.Merge(dst, src) +} +func (m *AnalyzeSentimentRequest) XXX_Size() int { + return xxx_messageInfo_AnalyzeSentimentRequest.Size(m) +} +func (m *AnalyzeSentimentRequest) XXX_DiscardUnknown() { + xxx_messageInfo_AnalyzeSentimentRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_AnalyzeSentimentRequest proto.InternalMessageInfo + +func (m *AnalyzeSentimentRequest) GetDocument() *Document { + if m != nil { + return m.Document + } + return nil +} + +func (m *AnalyzeSentimentRequest) GetEncodingType() EncodingType { + if m != nil { + return m.EncodingType + } + return EncodingType_NONE +} + +// The sentiment analysis response message. +type AnalyzeSentimentResponse struct { + // The overall sentiment of the input document. + DocumentSentiment *Sentiment `protobuf:"bytes,1,opt,name=document_sentiment,json=documentSentiment,proto3" json:"document_sentiment,omitempty"` + // The language of the text, which will be the same as the language specified + // in the request or, if not specified, the automatically-detected language. + // See [Document.language][google.cloud.language.v1beta2.Document.language] + // field for more details. + Language string `protobuf:"bytes,2,opt,name=language,proto3" json:"language,omitempty"` + // The sentiment for all the sentences in the document. + Sentences []*Sentence `protobuf:"bytes,3,rep,name=sentences,proto3" json:"sentences,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *AnalyzeSentimentResponse) Reset() { *m = AnalyzeSentimentResponse{} } +func (m *AnalyzeSentimentResponse) String() string { return proto.CompactTextString(m) } +func (*AnalyzeSentimentResponse) ProtoMessage() {} +func (*AnalyzeSentimentResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_language_service_7b74fe2bae56761e, []int{11} +} +func (m *AnalyzeSentimentResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_AnalyzeSentimentResponse.Unmarshal(m, b) +} +func (m *AnalyzeSentimentResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_AnalyzeSentimentResponse.Marshal(b, m, deterministic) +} +func (dst *AnalyzeSentimentResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_AnalyzeSentimentResponse.Merge(dst, src) +} +func (m *AnalyzeSentimentResponse) XXX_Size() int { + return xxx_messageInfo_AnalyzeSentimentResponse.Size(m) +} +func (m *AnalyzeSentimentResponse) XXX_DiscardUnknown() { + xxx_messageInfo_AnalyzeSentimentResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_AnalyzeSentimentResponse proto.InternalMessageInfo + +func (m *AnalyzeSentimentResponse) GetDocumentSentiment() *Sentiment { + if m != nil { + return m.DocumentSentiment + } + return nil +} + +func (m *AnalyzeSentimentResponse) GetLanguage() string { + if m != nil { + return m.Language + } + return "" +} + +func (m *AnalyzeSentimentResponse) GetSentences() []*Sentence { + if m != nil { + return m.Sentences + } + return nil +} + +// The entity-level sentiment analysis request message. +type AnalyzeEntitySentimentRequest struct { + // Input document. + Document *Document `protobuf:"bytes,1,opt,name=document,proto3" json:"document,omitempty"` + // The encoding type used by the API to calculate offsets. + EncodingType EncodingType `protobuf:"varint,2,opt,name=encoding_type,json=encodingType,proto3,enum=google.cloud.language.v1beta2.EncodingType" json:"encoding_type,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *AnalyzeEntitySentimentRequest) Reset() { *m = AnalyzeEntitySentimentRequest{} } +func (m *AnalyzeEntitySentimentRequest) String() string { return proto.CompactTextString(m) } +func (*AnalyzeEntitySentimentRequest) ProtoMessage() {} +func (*AnalyzeEntitySentimentRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_language_service_7b74fe2bae56761e, []int{12} +} +func (m *AnalyzeEntitySentimentRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_AnalyzeEntitySentimentRequest.Unmarshal(m, b) +} +func (m *AnalyzeEntitySentimentRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_AnalyzeEntitySentimentRequest.Marshal(b, m, deterministic) +} +func (dst *AnalyzeEntitySentimentRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_AnalyzeEntitySentimentRequest.Merge(dst, src) +} +func (m *AnalyzeEntitySentimentRequest) XXX_Size() int { + return xxx_messageInfo_AnalyzeEntitySentimentRequest.Size(m) +} +func (m *AnalyzeEntitySentimentRequest) XXX_DiscardUnknown() { + xxx_messageInfo_AnalyzeEntitySentimentRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_AnalyzeEntitySentimentRequest proto.InternalMessageInfo + +func (m *AnalyzeEntitySentimentRequest) GetDocument() *Document { + if m != nil { + return m.Document + } + return nil +} + +func (m *AnalyzeEntitySentimentRequest) GetEncodingType() EncodingType { + if m != nil { + return m.EncodingType + } + return EncodingType_NONE +} + +// The entity-level sentiment analysis response message. +type AnalyzeEntitySentimentResponse struct { + // The recognized entities in the input document with associated sentiments. + Entities []*Entity `protobuf:"bytes,1,rep,name=entities,proto3" json:"entities,omitempty"` + // The language of the text, which will be the same as the language specified + // in the request or, if not specified, the automatically-detected language. + // See [Document.language][google.cloud.language.v1beta2.Document.language] + // field for more details. + Language string `protobuf:"bytes,2,opt,name=language,proto3" json:"language,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *AnalyzeEntitySentimentResponse) Reset() { *m = AnalyzeEntitySentimentResponse{} } +func (m *AnalyzeEntitySentimentResponse) String() string { return proto.CompactTextString(m) } +func (*AnalyzeEntitySentimentResponse) ProtoMessage() {} +func (*AnalyzeEntitySentimentResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_language_service_7b74fe2bae56761e, []int{13} +} +func (m *AnalyzeEntitySentimentResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_AnalyzeEntitySentimentResponse.Unmarshal(m, b) +} +func (m *AnalyzeEntitySentimentResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_AnalyzeEntitySentimentResponse.Marshal(b, m, deterministic) +} +func (dst *AnalyzeEntitySentimentResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_AnalyzeEntitySentimentResponse.Merge(dst, src) +} +func (m *AnalyzeEntitySentimentResponse) XXX_Size() int { + return xxx_messageInfo_AnalyzeEntitySentimentResponse.Size(m) +} +func (m *AnalyzeEntitySentimentResponse) XXX_DiscardUnknown() { + xxx_messageInfo_AnalyzeEntitySentimentResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_AnalyzeEntitySentimentResponse proto.InternalMessageInfo + +func (m *AnalyzeEntitySentimentResponse) GetEntities() []*Entity { + if m != nil { + return m.Entities + } + return nil +} + +func (m *AnalyzeEntitySentimentResponse) GetLanguage() string { + if m != nil { + return m.Language + } + return "" +} + +// The entity analysis request message. +type AnalyzeEntitiesRequest struct { + // Input document. + Document *Document `protobuf:"bytes,1,opt,name=document,proto3" json:"document,omitempty"` + // The encoding type used by the API to calculate offsets. + EncodingType EncodingType `protobuf:"varint,2,opt,name=encoding_type,json=encodingType,proto3,enum=google.cloud.language.v1beta2.EncodingType" json:"encoding_type,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *AnalyzeEntitiesRequest) Reset() { *m = AnalyzeEntitiesRequest{} } +func (m *AnalyzeEntitiesRequest) String() string { return proto.CompactTextString(m) } +func (*AnalyzeEntitiesRequest) ProtoMessage() {} +func (*AnalyzeEntitiesRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_language_service_7b74fe2bae56761e, []int{14} +} +func (m *AnalyzeEntitiesRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_AnalyzeEntitiesRequest.Unmarshal(m, b) +} +func (m *AnalyzeEntitiesRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_AnalyzeEntitiesRequest.Marshal(b, m, deterministic) +} +func (dst *AnalyzeEntitiesRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_AnalyzeEntitiesRequest.Merge(dst, src) +} +func (m *AnalyzeEntitiesRequest) XXX_Size() int { + return xxx_messageInfo_AnalyzeEntitiesRequest.Size(m) +} +func (m *AnalyzeEntitiesRequest) XXX_DiscardUnknown() { + xxx_messageInfo_AnalyzeEntitiesRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_AnalyzeEntitiesRequest proto.InternalMessageInfo + +func (m *AnalyzeEntitiesRequest) GetDocument() *Document { + if m != nil { + return m.Document + } + return nil +} + +func (m *AnalyzeEntitiesRequest) GetEncodingType() EncodingType { + if m != nil { + return m.EncodingType + } + return EncodingType_NONE +} + +// The entity analysis response message. +type AnalyzeEntitiesResponse struct { + // The recognized entities in the input document. + Entities []*Entity `protobuf:"bytes,1,rep,name=entities,proto3" json:"entities,omitempty"` + // The language of the text, which will be the same as the language specified + // in the request or, if not specified, the automatically-detected language. + // See [Document.language][google.cloud.language.v1beta2.Document.language] + // field for more details. + Language string `protobuf:"bytes,2,opt,name=language,proto3" json:"language,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *AnalyzeEntitiesResponse) Reset() { *m = AnalyzeEntitiesResponse{} } +func (m *AnalyzeEntitiesResponse) String() string { return proto.CompactTextString(m) } +func (*AnalyzeEntitiesResponse) ProtoMessage() {} +func (*AnalyzeEntitiesResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_language_service_7b74fe2bae56761e, []int{15} +} +func (m *AnalyzeEntitiesResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_AnalyzeEntitiesResponse.Unmarshal(m, b) +} +func (m *AnalyzeEntitiesResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_AnalyzeEntitiesResponse.Marshal(b, m, deterministic) +} +func (dst *AnalyzeEntitiesResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_AnalyzeEntitiesResponse.Merge(dst, src) +} +func (m *AnalyzeEntitiesResponse) XXX_Size() int { + return xxx_messageInfo_AnalyzeEntitiesResponse.Size(m) +} +func (m *AnalyzeEntitiesResponse) XXX_DiscardUnknown() { + xxx_messageInfo_AnalyzeEntitiesResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_AnalyzeEntitiesResponse proto.InternalMessageInfo + +func (m *AnalyzeEntitiesResponse) GetEntities() []*Entity { + if m != nil { + return m.Entities + } + return nil +} + +func (m *AnalyzeEntitiesResponse) GetLanguage() string { + if m != nil { + return m.Language + } + return "" +} + +// The syntax analysis request message. +type AnalyzeSyntaxRequest struct { + // Input document. + Document *Document `protobuf:"bytes,1,opt,name=document,proto3" json:"document,omitempty"` + // The encoding type used by the API to calculate offsets. + EncodingType EncodingType `protobuf:"varint,2,opt,name=encoding_type,json=encodingType,proto3,enum=google.cloud.language.v1beta2.EncodingType" json:"encoding_type,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *AnalyzeSyntaxRequest) Reset() { *m = AnalyzeSyntaxRequest{} } +func (m *AnalyzeSyntaxRequest) String() string { return proto.CompactTextString(m) } +func (*AnalyzeSyntaxRequest) ProtoMessage() {} +func (*AnalyzeSyntaxRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_language_service_7b74fe2bae56761e, []int{16} +} +func (m *AnalyzeSyntaxRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_AnalyzeSyntaxRequest.Unmarshal(m, b) +} +func (m *AnalyzeSyntaxRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_AnalyzeSyntaxRequest.Marshal(b, m, deterministic) +} +func (dst *AnalyzeSyntaxRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_AnalyzeSyntaxRequest.Merge(dst, src) +} +func (m *AnalyzeSyntaxRequest) XXX_Size() int { + return xxx_messageInfo_AnalyzeSyntaxRequest.Size(m) +} +func (m *AnalyzeSyntaxRequest) XXX_DiscardUnknown() { + xxx_messageInfo_AnalyzeSyntaxRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_AnalyzeSyntaxRequest proto.InternalMessageInfo + +func (m *AnalyzeSyntaxRequest) GetDocument() *Document { + if m != nil { + return m.Document + } + return nil +} + +func (m *AnalyzeSyntaxRequest) GetEncodingType() EncodingType { + if m != nil { + return m.EncodingType + } + return EncodingType_NONE +} + +// The syntax analysis response message. +type AnalyzeSyntaxResponse struct { + // Sentences in the input document. + Sentences []*Sentence `protobuf:"bytes,1,rep,name=sentences,proto3" json:"sentences,omitempty"` + // Tokens, along with their syntactic information, in the input document. + Tokens []*Token `protobuf:"bytes,2,rep,name=tokens,proto3" json:"tokens,omitempty"` + // The language of the text, which will be the same as the language specified + // in the request or, if not specified, the automatically-detected language. + // See [Document.language][google.cloud.language.v1beta2.Document.language] + // field for more details. + Language string `protobuf:"bytes,3,opt,name=language,proto3" json:"language,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *AnalyzeSyntaxResponse) Reset() { *m = AnalyzeSyntaxResponse{} } +func (m *AnalyzeSyntaxResponse) String() string { return proto.CompactTextString(m) } +func (*AnalyzeSyntaxResponse) ProtoMessage() {} +func (*AnalyzeSyntaxResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_language_service_7b74fe2bae56761e, []int{17} +} +func (m *AnalyzeSyntaxResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_AnalyzeSyntaxResponse.Unmarshal(m, b) +} +func (m *AnalyzeSyntaxResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_AnalyzeSyntaxResponse.Marshal(b, m, deterministic) +} +func (dst *AnalyzeSyntaxResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_AnalyzeSyntaxResponse.Merge(dst, src) +} +func (m *AnalyzeSyntaxResponse) XXX_Size() int { + return xxx_messageInfo_AnalyzeSyntaxResponse.Size(m) +} +func (m *AnalyzeSyntaxResponse) XXX_DiscardUnknown() { + xxx_messageInfo_AnalyzeSyntaxResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_AnalyzeSyntaxResponse proto.InternalMessageInfo + +func (m *AnalyzeSyntaxResponse) GetSentences() []*Sentence { + if m != nil { + return m.Sentences + } + return nil +} + +func (m *AnalyzeSyntaxResponse) GetTokens() []*Token { + if m != nil { + return m.Tokens + } + return nil +} + +func (m *AnalyzeSyntaxResponse) GetLanguage() string { + if m != nil { + return m.Language + } + return "" +} + +// The document classification request message. +type ClassifyTextRequest struct { + // Input document. + Document *Document `protobuf:"bytes,1,opt,name=document,proto3" json:"document,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ClassifyTextRequest) Reset() { *m = ClassifyTextRequest{} } +func (m *ClassifyTextRequest) String() string { return proto.CompactTextString(m) } +func (*ClassifyTextRequest) ProtoMessage() {} +func (*ClassifyTextRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_language_service_7b74fe2bae56761e, []int{18} +} +func (m *ClassifyTextRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ClassifyTextRequest.Unmarshal(m, b) +} +func (m *ClassifyTextRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ClassifyTextRequest.Marshal(b, m, deterministic) +} +func (dst *ClassifyTextRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_ClassifyTextRequest.Merge(dst, src) +} +func (m *ClassifyTextRequest) XXX_Size() int { + return xxx_messageInfo_ClassifyTextRequest.Size(m) +} +func (m *ClassifyTextRequest) XXX_DiscardUnknown() { + xxx_messageInfo_ClassifyTextRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_ClassifyTextRequest proto.InternalMessageInfo + +func (m *ClassifyTextRequest) GetDocument() *Document { + if m != nil { + return m.Document + } + return nil +} + +// The document classification response message. +type ClassifyTextResponse struct { + // Categories representing the input document. + Categories []*ClassificationCategory `protobuf:"bytes,1,rep,name=categories,proto3" json:"categories,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ClassifyTextResponse) Reset() { *m = ClassifyTextResponse{} } +func (m *ClassifyTextResponse) String() string { return proto.CompactTextString(m) } +func (*ClassifyTextResponse) ProtoMessage() {} +func (*ClassifyTextResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_language_service_7b74fe2bae56761e, []int{19} +} +func (m *ClassifyTextResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ClassifyTextResponse.Unmarshal(m, b) +} +func (m *ClassifyTextResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ClassifyTextResponse.Marshal(b, m, deterministic) +} +func (dst *ClassifyTextResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_ClassifyTextResponse.Merge(dst, src) +} +func (m *ClassifyTextResponse) XXX_Size() int { + return xxx_messageInfo_ClassifyTextResponse.Size(m) +} +func (m *ClassifyTextResponse) XXX_DiscardUnknown() { + xxx_messageInfo_ClassifyTextResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_ClassifyTextResponse proto.InternalMessageInfo + +func (m *ClassifyTextResponse) GetCategories() []*ClassificationCategory { + if m != nil { + return m.Categories + } + return nil +} + +// The request message for the text annotation API, which can perform multiple +// analysis types (sentiment, entities, and syntax) in one call. +type AnnotateTextRequest struct { + // Input document. + Document *Document `protobuf:"bytes,1,opt,name=document,proto3" json:"document,omitempty"` + // The enabled features. + Features *AnnotateTextRequest_Features `protobuf:"bytes,2,opt,name=features,proto3" json:"features,omitempty"` + // The encoding type used by the API to calculate offsets. + EncodingType EncodingType `protobuf:"varint,3,opt,name=encoding_type,json=encodingType,proto3,enum=google.cloud.language.v1beta2.EncodingType" json:"encoding_type,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *AnnotateTextRequest) Reset() { *m = AnnotateTextRequest{} } +func (m *AnnotateTextRequest) String() string { return proto.CompactTextString(m) } +func (*AnnotateTextRequest) ProtoMessage() {} +func (*AnnotateTextRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_language_service_7b74fe2bae56761e, []int{20} +} +func (m *AnnotateTextRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_AnnotateTextRequest.Unmarshal(m, b) +} +func (m *AnnotateTextRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_AnnotateTextRequest.Marshal(b, m, deterministic) +} +func (dst *AnnotateTextRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_AnnotateTextRequest.Merge(dst, src) +} +func (m *AnnotateTextRequest) XXX_Size() int { + return xxx_messageInfo_AnnotateTextRequest.Size(m) +} +func (m *AnnotateTextRequest) XXX_DiscardUnknown() { + xxx_messageInfo_AnnotateTextRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_AnnotateTextRequest proto.InternalMessageInfo + +func (m *AnnotateTextRequest) GetDocument() *Document { + if m != nil { + return m.Document + } + return nil +} + +func (m *AnnotateTextRequest) GetFeatures() *AnnotateTextRequest_Features { + if m != nil { + return m.Features + } + return nil +} + +func (m *AnnotateTextRequest) GetEncodingType() EncodingType { + if m != nil { + return m.EncodingType + } + return EncodingType_NONE +} + +// All available features for sentiment, syntax, and semantic analysis. +// Setting each one to true will enable that specific analysis for the input. +type AnnotateTextRequest_Features struct { + // Extract syntax information. + ExtractSyntax bool `protobuf:"varint,1,opt,name=extract_syntax,json=extractSyntax,proto3" json:"extract_syntax,omitempty"` + // Extract entities. + ExtractEntities bool `protobuf:"varint,2,opt,name=extract_entities,json=extractEntities,proto3" json:"extract_entities,omitempty"` + // Extract document-level sentiment. + ExtractDocumentSentiment bool `protobuf:"varint,3,opt,name=extract_document_sentiment,json=extractDocumentSentiment,proto3" json:"extract_document_sentiment,omitempty"` + // Extract entities and their associated sentiment. + ExtractEntitySentiment bool `protobuf:"varint,4,opt,name=extract_entity_sentiment,json=extractEntitySentiment,proto3" json:"extract_entity_sentiment,omitempty"` + // Classify the full document into categories. + ClassifyText bool `protobuf:"varint,6,opt,name=classify_text,json=classifyText,proto3" json:"classify_text,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *AnnotateTextRequest_Features) Reset() { *m = AnnotateTextRequest_Features{} } +func (m *AnnotateTextRequest_Features) String() string { return proto.CompactTextString(m) } +func (*AnnotateTextRequest_Features) ProtoMessage() {} +func (*AnnotateTextRequest_Features) Descriptor() ([]byte, []int) { + return fileDescriptor_language_service_7b74fe2bae56761e, []int{20, 0} +} +func (m *AnnotateTextRequest_Features) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_AnnotateTextRequest_Features.Unmarshal(m, b) +} +func (m *AnnotateTextRequest_Features) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_AnnotateTextRequest_Features.Marshal(b, m, deterministic) +} +func (dst *AnnotateTextRequest_Features) XXX_Merge(src proto.Message) { + xxx_messageInfo_AnnotateTextRequest_Features.Merge(dst, src) +} +func (m *AnnotateTextRequest_Features) XXX_Size() int { + return xxx_messageInfo_AnnotateTextRequest_Features.Size(m) +} +func (m *AnnotateTextRequest_Features) XXX_DiscardUnknown() { + xxx_messageInfo_AnnotateTextRequest_Features.DiscardUnknown(m) +} + +var xxx_messageInfo_AnnotateTextRequest_Features proto.InternalMessageInfo + +func (m *AnnotateTextRequest_Features) GetExtractSyntax() bool { + if m != nil { + return m.ExtractSyntax + } + return false +} + +func (m *AnnotateTextRequest_Features) GetExtractEntities() bool { + if m != nil { + return m.ExtractEntities + } + return false +} + +func (m *AnnotateTextRequest_Features) GetExtractDocumentSentiment() bool { + if m != nil { + return m.ExtractDocumentSentiment + } + return false +} + +func (m *AnnotateTextRequest_Features) GetExtractEntitySentiment() bool { + if m != nil { + return m.ExtractEntitySentiment + } + return false +} + +func (m *AnnotateTextRequest_Features) GetClassifyText() bool { + if m != nil { + return m.ClassifyText + } + return false +} + +// The text annotations response message. +type AnnotateTextResponse struct { + // Sentences in the input document. Populated if the user enables + // [AnnotateTextRequest.Features.extract_syntax][google.cloud.language.v1beta2.AnnotateTextRequest.Features.extract_syntax]. + Sentences []*Sentence `protobuf:"bytes,1,rep,name=sentences,proto3" json:"sentences,omitempty"` + // Tokens, along with their syntactic information, in the input document. + // Populated if the user enables + // [AnnotateTextRequest.Features.extract_syntax][google.cloud.language.v1beta2.AnnotateTextRequest.Features.extract_syntax]. + Tokens []*Token `protobuf:"bytes,2,rep,name=tokens,proto3" json:"tokens,omitempty"` + // Entities, along with their semantic information, in the input document. + // Populated if the user enables + // [AnnotateTextRequest.Features.extract_entities][google.cloud.language.v1beta2.AnnotateTextRequest.Features.extract_entities]. + Entities []*Entity `protobuf:"bytes,3,rep,name=entities,proto3" json:"entities,omitempty"` + // The overall sentiment for the document. Populated if the user enables + // [AnnotateTextRequest.Features.extract_document_sentiment][google.cloud.language.v1beta2.AnnotateTextRequest.Features.extract_document_sentiment]. + DocumentSentiment *Sentiment `protobuf:"bytes,4,opt,name=document_sentiment,json=documentSentiment,proto3" json:"document_sentiment,omitempty"` + // The language of the text, which will be the same as the language specified + // in the request or, if not specified, the automatically-detected language. + // See [Document.language][google.cloud.language.v1beta2.Document.language] + // field for more details. + Language string `protobuf:"bytes,5,opt,name=language,proto3" json:"language,omitempty"` + // Categories identified in the input document. + Categories []*ClassificationCategory `protobuf:"bytes,6,rep,name=categories,proto3" json:"categories,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *AnnotateTextResponse) Reset() { *m = AnnotateTextResponse{} } +func (m *AnnotateTextResponse) String() string { return proto.CompactTextString(m) } +func (*AnnotateTextResponse) ProtoMessage() {} +func (*AnnotateTextResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_language_service_7b74fe2bae56761e, []int{21} +} +func (m *AnnotateTextResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_AnnotateTextResponse.Unmarshal(m, b) +} +func (m *AnnotateTextResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_AnnotateTextResponse.Marshal(b, m, deterministic) +} +func (dst *AnnotateTextResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_AnnotateTextResponse.Merge(dst, src) +} +func (m *AnnotateTextResponse) XXX_Size() int { + return xxx_messageInfo_AnnotateTextResponse.Size(m) +} +func (m *AnnotateTextResponse) XXX_DiscardUnknown() { + xxx_messageInfo_AnnotateTextResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_AnnotateTextResponse proto.InternalMessageInfo + +func (m *AnnotateTextResponse) GetSentences() []*Sentence { + if m != nil { + return m.Sentences + } + return nil +} + +func (m *AnnotateTextResponse) GetTokens() []*Token { + if m != nil { + return m.Tokens + } + return nil +} + +func (m *AnnotateTextResponse) GetEntities() []*Entity { + if m != nil { + return m.Entities + } + return nil +} + +func (m *AnnotateTextResponse) GetDocumentSentiment() *Sentiment { + if m != nil { + return m.DocumentSentiment + } + return nil +} + +func (m *AnnotateTextResponse) GetLanguage() string { + if m != nil { + return m.Language + } + return "" +} + +func (m *AnnotateTextResponse) GetCategories() []*ClassificationCategory { + if m != nil { + return m.Categories + } + return nil +} + +func init() { + proto.RegisterType((*Document)(nil), "google.cloud.language.v1beta2.Document") + proto.RegisterType((*Sentence)(nil), "google.cloud.language.v1beta2.Sentence") + proto.RegisterType((*Entity)(nil), "google.cloud.language.v1beta2.Entity") + proto.RegisterMapType((map[string]string)(nil), "google.cloud.language.v1beta2.Entity.MetadataEntry") + proto.RegisterType((*Token)(nil), "google.cloud.language.v1beta2.Token") + proto.RegisterType((*Sentiment)(nil), "google.cloud.language.v1beta2.Sentiment") + proto.RegisterType((*PartOfSpeech)(nil), "google.cloud.language.v1beta2.PartOfSpeech") + proto.RegisterType((*DependencyEdge)(nil), "google.cloud.language.v1beta2.DependencyEdge") + proto.RegisterType((*EntityMention)(nil), "google.cloud.language.v1beta2.EntityMention") + proto.RegisterType((*TextSpan)(nil), "google.cloud.language.v1beta2.TextSpan") + proto.RegisterType((*ClassificationCategory)(nil), "google.cloud.language.v1beta2.ClassificationCategory") + proto.RegisterType((*AnalyzeSentimentRequest)(nil), "google.cloud.language.v1beta2.AnalyzeSentimentRequest") + proto.RegisterType((*AnalyzeSentimentResponse)(nil), "google.cloud.language.v1beta2.AnalyzeSentimentResponse") + proto.RegisterType((*AnalyzeEntitySentimentRequest)(nil), "google.cloud.language.v1beta2.AnalyzeEntitySentimentRequest") + proto.RegisterType((*AnalyzeEntitySentimentResponse)(nil), "google.cloud.language.v1beta2.AnalyzeEntitySentimentResponse") + proto.RegisterType((*AnalyzeEntitiesRequest)(nil), "google.cloud.language.v1beta2.AnalyzeEntitiesRequest") + proto.RegisterType((*AnalyzeEntitiesResponse)(nil), "google.cloud.language.v1beta2.AnalyzeEntitiesResponse") + proto.RegisterType((*AnalyzeSyntaxRequest)(nil), "google.cloud.language.v1beta2.AnalyzeSyntaxRequest") + proto.RegisterType((*AnalyzeSyntaxResponse)(nil), "google.cloud.language.v1beta2.AnalyzeSyntaxResponse") + proto.RegisterType((*ClassifyTextRequest)(nil), "google.cloud.language.v1beta2.ClassifyTextRequest") + proto.RegisterType((*ClassifyTextResponse)(nil), "google.cloud.language.v1beta2.ClassifyTextResponse") + proto.RegisterType((*AnnotateTextRequest)(nil), "google.cloud.language.v1beta2.AnnotateTextRequest") + proto.RegisterType((*AnnotateTextRequest_Features)(nil), "google.cloud.language.v1beta2.AnnotateTextRequest.Features") + proto.RegisterType((*AnnotateTextResponse)(nil), "google.cloud.language.v1beta2.AnnotateTextResponse") + proto.RegisterEnum("google.cloud.language.v1beta2.EncodingType", EncodingType_name, EncodingType_value) + proto.RegisterEnum("google.cloud.language.v1beta2.Document_Type", Document_Type_name, Document_Type_value) + proto.RegisterEnum("google.cloud.language.v1beta2.Entity_Type", Entity_Type_name, Entity_Type_value) + proto.RegisterEnum("google.cloud.language.v1beta2.PartOfSpeech_Tag", PartOfSpeech_Tag_name, PartOfSpeech_Tag_value) + proto.RegisterEnum("google.cloud.language.v1beta2.PartOfSpeech_Aspect", PartOfSpeech_Aspect_name, PartOfSpeech_Aspect_value) + proto.RegisterEnum("google.cloud.language.v1beta2.PartOfSpeech_Case", PartOfSpeech_Case_name, PartOfSpeech_Case_value) + proto.RegisterEnum("google.cloud.language.v1beta2.PartOfSpeech_Form", PartOfSpeech_Form_name, PartOfSpeech_Form_value) + proto.RegisterEnum("google.cloud.language.v1beta2.PartOfSpeech_Gender", PartOfSpeech_Gender_name, PartOfSpeech_Gender_value) + proto.RegisterEnum("google.cloud.language.v1beta2.PartOfSpeech_Mood", PartOfSpeech_Mood_name, PartOfSpeech_Mood_value) + proto.RegisterEnum("google.cloud.language.v1beta2.PartOfSpeech_Number", PartOfSpeech_Number_name, PartOfSpeech_Number_value) + proto.RegisterEnum("google.cloud.language.v1beta2.PartOfSpeech_Person", PartOfSpeech_Person_name, PartOfSpeech_Person_value) + proto.RegisterEnum("google.cloud.language.v1beta2.PartOfSpeech_Proper", PartOfSpeech_Proper_name, PartOfSpeech_Proper_value) + proto.RegisterEnum("google.cloud.language.v1beta2.PartOfSpeech_Reciprocity", PartOfSpeech_Reciprocity_name, PartOfSpeech_Reciprocity_value) + proto.RegisterEnum("google.cloud.language.v1beta2.PartOfSpeech_Tense", PartOfSpeech_Tense_name, PartOfSpeech_Tense_value) + proto.RegisterEnum("google.cloud.language.v1beta2.PartOfSpeech_Voice", PartOfSpeech_Voice_name, PartOfSpeech_Voice_value) + proto.RegisterEnum("google.cloud.language.v1beta2.DependencyEdge_Label", DependencyEdge_Label_name, DependencyEdge_Label_value) + proto.RegisterEnum("google.cloud.language.v1beta2.EntityMention_Type", EntityMention_Type_name, EntityMention_Type_value) +} + +// Reference imports to suppress errors if they are not otherwise used. +var _ context.Context +var _ grpc.ClientConn + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the grpc package it is being compiled against. +const _ = grpc.SupportPackageIsVersion4 + +// LanguageServiceClient is the client API for LanguageService service. +// +// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://godoc.org/google.golang.org/grpc#ClientConn.NewStream. +type LanguageServiceClient interface { + // Analyzes the sentiment of the provided text. + AnalyzeSentiment(ctx context.Context, in *AnalyzeSentimentRequest, opts ...grpc.CallOption) (*AnalyzeSentimentResponse, error) + // Finds named entities (currently proper names and common nouns) in the text + // along with entity types, salience, mentions for each entity, and + // other properties. + AnalyzeEntities(ctx context.Context, in *AnalyzeEntitiesRequest, opts ...grpc.CallOption) (*AnalyzeEntitiesResponse, error) + // Finds entities, similar to + // [AnalyzeEntities][google.cloud.language.v1beta2.LanguageService.AnalyzeEntities] + // in the text and analyzes sentiment associated with each entity and its + // mentions. + AnalyzeEntitySentiment(ctx context.Context, in *AnalyzeEntitySentimentRequest, opts ...grpc.CallOption) (*AnalyzeEntitySentimentResponse, error) + // Analyzes the syntax of the text and provides sentence boundaries and + // tokenization along with part of speech tags, dependency trees, and other + // properties. + AnalyzeSyntax(ctx context.Context, in *AnalyzeSyntaxRequest, opts ...grpc.CallOption) (*AnalyzeSyntaxResponse, error) + // Classifies a document into categories. + ClassifyText(ctx context.Context, in *ClassifyTextRequest, opts ...grpc.CallOption) (*ClassifyTextResponse, error) + // A convenience method that provides all syntax, sentiment, entity, and + // classification features in one call. + AnnotateText(ctx context.Context, in *AnnotateTextRequest, opts ...grpc.CallOption) (*AnnotateTextResponse, error) +} + +type languageServiceClient struct { + cc *grpc.ClientConn +} + +func NewLanguageServiceClient(cc *grpc.ClientConn) LanguageServiceClient { + return &languageServiceClient{cc} +} + +func (c *languageServiceClient) AnalyzeSentiment(ctx context.Context, in *AnalyzeSentimentRequest, opts ...grpc.CallOption) (*AnalyzeSentimentResponse, error) { + out := new(AnalyzeSentimentResponse) + err := c.cc.Invoke(ctx, "/google.cloud.language.v1beta2.LanguageService/AnalyzeSentiment", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *languageServiceClient) AnalyzeEntities(ctx context.Context, in *AnalyzeEntitiesRequest, opts ...grpc.CallOption) (*AnalyzeEntitiesResponse, error) { + out := new(AnalyzeEntitiesResponse) + err := c.cc.Invoke(ctx, "/google.cloud.language.v1beta2.LanguageService/AnalyzeEntities", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *languageServiceClient) AnalyzeEntitySentiment(ctx context.Context, in *AnalyzeEntitySentimentRequest, opts ...grpc.CallOption) (*AnalyzeEntitySentimentResponse, error) { + out := new(AnalyzeEntitySentimentResponse) + err := c.cc.Invoke(ctx, "/google.cloud.language.v1beta2.LanguageService/AnalyzeEntitySentiment", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *languageServiceClient) AnalyzeSyntax(ctx context.Context, in *AnalyzeSyntaxRequest, opts ...grpc.CallOption) (*AnalyzeSyntaxResponse, error) { + out := new(AnalyzeSyntaxResponse) + err := c.cc.Invoke(ctx, "/google.cloud.language.v1beta2.LanguageService/AnalyzeSyntax", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *languageServiceClient) ClassifyText(ctx context.Context, in *ClassifyTextRequest, opts ...grpc.CallOption) (*ClassifyTextResponse, error) { + out := new(ClassifyTextResponse) + err := c.cc.Invoke(ctx, "/google.cloud.language.v1beta2.LanguageService/ClassifyText", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *languageServiceClient) AnnotateText(ctx context.Context, in *AnnotateTextRequest, opts ...grpc.CallOption) (*AnnotateTextResponse, error) { + out := new(AnnotateTextResponse) + err := c.cc.Invoke(ctx, "/google.cloud.language.v1beta2.LanguageService/AnnotateText", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +// LanguageServiceServer is the server API for LanguageService service. +type LanguageServiceServer interface { + // Analyzes the sentiment of the provided text. + AnalyzeSentiment(context.Context, *AnalyzeSentimentRequest) (*AnalyzeSentimentResponse, error) + // Finds named entities (currently proper names and common nouns) in the text + // along with entity types, salience, mentions for each entity, and + // other properties. + AnalyzeEntities(context.Context, *AnalyzeEntitiesRequest) (*AnalyzeEntitiesResponse, error) + // Finds entities, similar to + // [AnalyzeEntities][google.cloud.language.v1beta2.LanguageService.AnalyzeEntities] + // in the text and analyzes sentiment associated with each entity and its + // mentions. + AnalyzeEntitySentiment(context.Context, *AnalyzeEntitySentimentRequest) (*AnalyzeEntitySentimentResponse, error) + // Analyzes the syntax of the text and provides sentence boundaries and + // tokenization along with part of speech tags, dependency trees, and other + // properties. + AnalyzeSyntax(context.Context, *AnalyzeSyntaxRequest) (*AnalyzeSyntaxResponse, error) + // Classifies a document into categories. + ClassifyText(context.Context, *ClassifyTextRequest) (*ClassifyTextResponse, error) + // A convenience method that provides all syntax, sentiment, entity, and + // classification features in one call. + AnnotateText(context.Context, *AnnotateTextRequest) (*AnnotateTextResponse, error) +} + +func RegisterLanguageServiceServer(s *grpc.Server, srv LanguageServiceServer) { + s.RegisterService(&_LanguageService_serviceDesc, srv) +} + +func _LanguageService_AnalyzeSentiment_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(AnalyzeSentimentRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(LanguageServiceServer).AnalyzeSentiment(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.language.v1beta2.LanguageService/AnalyzeSentiment", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(LanguageServiceServer).AnalyzeSentiment(ctx, req.(*AnalyzeSentimentRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _LanguageService_AnalyzeEntities_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(AnalyzeEntitiesRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(LanguageServiceServer).AnalyzeEntities(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.language.v1beta2.LanguageService/AnalyzeEntities", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(LanguageServiceServer).AnalyzeEntities(ctx, req.(*AnalyzeEntitiesRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _LanguageService_AnalyzeEntitySentiment_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(AnalyzeEntitySentimentRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(LanguageServiceServer).AnalyzeEntitySentiment(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.language.v1beta2.LanguageService/AnalyzeEntitySentiment", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(LanguageServiceServer).AnalyzeEntitySentiment(ctx, req.(*AnalyzeEntitySentimentRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _LanguageService_AnalyzeSyntax_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(AnalyzeSyntaxRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(LanguageServiceServer).AnalyzeSyntax(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.language.v1beta2.LanguageService/AnalyzeSyntax", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(LanguageServiceServer).AnalyzeSyntax(ctx, req.(*AnalyzeSyntaxRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _LanguageService_ClassifyText_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(ClassifyTextRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(LanguageServiceServer).ClassifyText(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.language.v1beta2.LanguageService/ClassifyText", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(LanguageServiceServer).ClassifyText(ctx, req.(*ClassifyTextRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _LanguageService_AnnotateText_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(AnnotateTextRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(LanguageServiceServer).AnnotateText(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.language.v1beta2.LanguageService/AnnotateText", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(LanguageServiceServer).AnnotateText(ctx, req.(*AnnotateTextRequest)) + } + return interceptor(ctx, in, info, handler) +} + +var _LanguageService_serviceDesc = grpc.ServiceDesc{ + ServiceName: "google.cloud.language.v1beta2.LanguageService", + HandlerType: (*LanguageServiceServer)(nil), + Methods: []grpc.MethodDesc{ + { + MethodName: "AnalyzeSentiment", + Handler: _LanguageService_AnalyzeSentiment_Handler, + }, + { + MethodName: "AnalyzeEntities", + Handler: _LanguageService_AnalyzeEntities_Handler, + }, + { + MethodName: "AnalyzeEntitySentiment", + Handler: _LanguageService_AnalyzeEntitySentiment_Handler, + }, + { + MethodName: "AnalyzeSyntax", + Handler: _LanguageService_AnalyzeSyntax_Handler, + }, + { + MethodName: "ClassifyText", + Handler: _LanguageService_ClassifyText_Handler, + }, + { + MethodName: "AnnotateText", + Handler: _LanguageService_AnnotateText_Handler, + }, + }, + Streams: []grpc.StreamDesc{}, + Metadata: "google/cloud/language/v1beta2/language_service.proto", +} + +func init() { + proto.RegisterFile("google/cloud/language/v1beta2/language_service.proto", fileDescriptor_language_service_7b74fe2bae56761e) +} + +var fileDescriptor_language_service_7b74fe2bae56761e = []byte{ + // 3019 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xd4, 0x3a, 0x4b, 0x73, 0xdb, 0xc6, + 0xfd, 0x06, 0x5f, 0xa2, 0x96, 0x92, 0xbc, 0x86, 0x1d, 0x9b, 0x7f, 0xfd, 0xf3, 0x70, 0xe0, 0xb8, + 0x56, 0xec, 0x44, 0x8a, 0x25, 0xc7, 0x71, 0x6d, 0xe7, 0x01, 0x01, 0x4b, 0x0a, 0x32, 0x09, 0xc0, + 0x0b, 0x80, 0x92, 0x7d, 0xe1, 0xc0, 0x24, 0xc4, 0x70, 0x22, 0x02, 0x2c, 0x01, 0x79, 0xac, 0x5e, + 0x32, 0xcd, 0x4c, 0x8f, 0x99, 0x1e, 0xf2, 0x11, 0x7a, 0xe8, 0xb4, 0x33, 0x9d, 0xb4, 0xd3, 0x99, + 0x4e, 0x7b, 0xe8, 0x27, 0xe8, 0xb1, 0x33, 0xfd, 0x04, 0xfd, 0x00, 0x3d, 0xb6, 0xb7, 0xce, 0x6f, + 0x77, 0x41, 0x82, 0xb2, 0x62, 0x89, 0x8e, 0xa7, 0x93, 0xde, 0x76, 0x7f, 0xf8, 0xbd, 0x9f, 0xbb, + 0x4b, 0xa2, 0x5b, 0xbd, 0x28, 0xea, 0xed, 0x07, 0x6b, 0x9d, 0xfd, 0xe8, 0xa0, 0xbb, 0xb6, 0xef, + 0x87, 0xbd, 0x03, 0xbf, 0x17, 0xac, 0x3d, 0xbd, 0xf9, 0x24, 0x48, 0xfc, 0xf5, 0x31, 0xa0, 0x1d, + 0x07, 0xa3, 0xa7, 0xfd, 0x4e, 0xb0, 0x3a, 0x1c, 0x45, 0x49, 0x24, 0xbf, 0xc1, 0xa9, 0x56, 0x19, + 0xd5, 0x6a, 0x8a, 0xb4, 0x2a, 0xa8, 0x96, 0x5f, 0x17, 0x4c, 0xfd, 0x61, 0x7f, 0xcd, 0x0f, 0xc3, + 0x28, 0xf1, 0x93, 0x7e, 0x14, 0xc6, 0x9c, 0x78, 0xf9, 0x8a, 0xf8, 0xba, 0x1f, 0x85, 0xbd, 0xd1, + 0x41, 0x18, 0xf6, 0xc3, 0xde, 0x5a, 0x34, 0x0c, 0x46, 0x53, 0x48, 0x6f, 0x09, 0x24, 0xb6, 0x7b, + 0x72, 0xb0, 0xb7, 0x96, 0xf4, 0x07, 0x41, 0x9c, 0xf8, 0x83, 0xa1, 0x40, 0xb8, 0x24, 0x10, 0x46, + 0xc3, 0xce, 0x5a, 0x9c, 0xf8, 0xc9, 0x81, 0xa0, 0x54, 0xfe, 0x29, 0xa1, 0xb2, 0x1e, 0x75, 0x0e, + 0x06, 0x41, 0x98, 0xc8, 0x9f, 0xa1, 0x42, 0x72, 0x38, 0x0c, 0xaa, 0xd2, 0x65, 0x69, 0x65, 0x69, + 0xfd, 0xbd, 0xd5, 0x17, 0xea, 0xbd, 0x9a, 0x92, 0xad, 0xba, 0x87, 0xc3, 0x80, 0x32, 0x4a, 0x79, + 0x19, 0xcd, 0x75, 0xa2, 0x30, 0x09, 0xc2, 0xa4, 0x9a, 0xbb, 0x2c, 0xad, 0xcc, 0x6f, 0x9d, 0xa1, + 0x29, 0x40, 0x5e, 0x41, 0x67, 0x7b, 0x9d, 0xb8, 0x2d, 0xb6, 0xed, 0x83, 0x51, 0xbf, 0x9a, 0x17, + 0x38, 0x8b, 0xbd, 0x4e, 0xac, 0x71, 0xb8, 0x37, 0xea, 0xcb, 0xcb, 0xa8, 0x9c, 0x4a, 0xab, 0x16, + 0x00, 0x85, 0x8e, 0xf7, 0xca, 0x6d, 0x54, 0x00, 0x79, 0xf2, 0x05, 0x84, 0xdd, 0x47, 0x36, 0x69, + 0x7b, 0xa6, 0x63, 0x13, 0xcd, 0xa8, 0x19, 0x44, 0xc7, 0x67, 0xe4, 0x25, 0x84, 0xec, 0x86, 0x6a, + 0x98, 0x6d, 0x97, 0xec, 0xba, 0x58, 0x92, 0xcb, 0xa8, 0xb0, 0xe5, 0x36, 0x1b, 0x38, 0xb7, 0x59, + 0x46, 0xa5, 0x38, 0x3a, 0x18, 0x75, 0x02, 0xe5, 0x17, 0x12, 0x2a, 0x3b, 0x01, 0x08, 0xeb, 0x04, + 0xf2, 0x3d, 0x54, 0x48, 0x82, 0x67, 0x09, 0x33, 0xb9, 0xb2, 0x7e, 0xed, 0x04, 0x93, 0xdd, 0xe0, + 0x59, 0xe2, 0x0c, 0xfd, 0x90, 0x32, 0x22, 0xb9, 0x86, 0xe6, 0xe3, 0x20, 0x04, 0x5f, 0x0b, 0x7b, + 0x2b, 0xeb, 0x2b, 0x27, 0x70, 0x70, 0x52, 0x7c, 0x3a, 0x21, 0x55, 0xbe, 0x29, 0xa0, 0x12, 0x09, + 0x93, 0x7e, 0x72, 0x28, 0xcb, 0xa8, 0x10, 0xfa, 0x03, 0x1e, 0x82, 0x79, 0xca, 0xd6, 0xf2, 0x27, + 0x22, 0x2c, 0x39, 0x16, 0x96, 0xeb, 0x27, 0x48, 0xe0, 0x8c, 0xb2, 0x41, 0xb1, 0x50, 0x79, 0x10, + 0x24, 0x7e, 0xd7, 0x4f, 0xfc, 0x6a, 0xfe, 0x72, 0x7e, 0xa5, 0xb2, 0xbe, 0x71, 0x3a, 0x1e, 0x4d, + 0x41, 0x45, 0xc2, 0x64, 0x74, 0x48, 0xc7, 0x4c, 0x20, 0x3e, 0xb1, 0xbf, 0xdf, 0x07, 0x07, 0xb2, + 0xf8, 0xe4, 0xe8, 0x78, 0x2f, 0x6f, 0x81, 0xb0, 0x90, 0x25, 0x67, 0xb5, 0xc8, 0x84, 0xbd, 0x77, + 0x2a, 0x61, 0x4d, 0x4e, 0x44, 0xc7, 0xd4, 0xd3, 0xde, 0x2d, 0xbd, 0xb4, 0x77, 0x97, 0xef, 0xa1, + 0xc5, 0x29, 0x43, 0x64, 0x8c, 0xf2, 0x5f, 0x04, 0x87, 0xc2, 0xc5, 0xb0, 0x94, 0x2f, 0xa0, 0xe2, + 0x53, 0x7f, 0xff, 0x80, 0xbb, 0x78, 0x9e, 0xf2, 0xcd, 0xdd, 0xdc, 0x1d, 0x49, 0x39, 0x14, 0xe9, + 0x56, 0x41, 0x73, 0x9e, 0xf9, 0xc0, 0xb4, 0x76, 0x4c, 0x7c, 0x46, 0x46, 0xa8, 0x64, 0x13, 0xea, + 0x58, 0x26, 0x96, 0xe4, 0x05, 0x54, 0x6e, 0x58, 0x9a, 0xea, 0x1a, 0x96, 0x89, 0x73, 0x32, 0x46, + 0x0b, 0x16, 0xad, 0xab, 0xa6, 0xf1, 0x98, 0x43, 0xf2, 0xf2, 0x3c, 0x2a, 0x92, 0x16, 0x31, 0x5d, + 0x5c, 0x90, 0xcf, 0xa2, 0xca, 0x8e, 0x45, 0x1f, 0xb4, 0xad, 0x5a, 0x5b, 0xa5, 0x2e, 0x2e, 0xca, + 0xe7, 0xd0, 0xa2, 0x66, 0x99, 0x8e, 0xd7, 0x24, 0xb4, 0x5d, 0xb7, 0x2c, 0x1d, 0x97, 0x00, 0xdd, + 0x72, 0xb7, 0x08, 0xc5, 0x73, 0xca, 0xcf, 0x73, 0xa8, 0xe8, 0x46, 0x5f, 0x04, 0xe1, 0xf7, 0x4b, + 0xd2, 0x87, 0x68, 0x69, 0xe8, 0x8f, 0x92, 0x76, 0xb4, 0xd7, 0x8e, 0x87, 0x41, 0xd0, 0xf9, 0x5c, + 0x64, 0xea, 0x8d, 0x13, 0xd8, 0xd8, 0xfe, 0x28, 0xb1, 0xf6, 0x1c, 0x46, 0x42, 0x17, 0x86, 0x99, + 0x9d, 0xdc, 0x42, 0x67, 0xbb, 0xc1, 0x30, 0x08, 0xbb, 0x41, 0xd8, 0x39, 0x6c, 0x07, 0xdd, 0x5e, + 0xc0, 0x2a, 0xb9, 0xb2, 0xfe, 0xfe, 0x49, 0x2d, 0x63, 0x4c, 0x45, 0xba, 0xbd, 0x80, 0x2e, 0x75, + 0xa7, 0xf6, 0x10, 0x86, 0xfd, 0x60, 0x30, 0xf0, 0x45, 0xd1, 0xf3, 0x8d, 0xf2, 0x29, 0x9a, 0x1f, + 0xc7, 0x55, 0x7e, 0x1d, 0xcd, 0x0f, 0xfc, 0x5e, 0xd8, 0x4f, 0x0e, 0xba, 0x3c, 0x5a, 0x39, 0x3a, + 0x01, 0x00, 0x83, 0xb8, 0x13, 0x8d, 0xb8, 0x3a, 0x39, 0xca, 0x37, 0xca, 0x9f, 0xcf, 0xa1, 0x85, + 0xac, 0x35, 0xb2, 0x8a, 0xf2, 0x89, 0xdf, 0x13, 0x6d, 0x6e, 0x6d, 0x06, 0x3f, 0xac, 0xba, 0x7e, + 0x8f, 0x02, 0xad, 0xbc, 0x8d, 0x4a, 0x7e, 0x3c, 0x0c, 0x3a, 0x89, 0xa8, 0xca, 0xf5, 0x59, 0xb8, + 0xa8, 0x8c, 0x92, 0x0a, 0x0e, 0xb2, 0x8e, 0x0a, 0x1d, 0x3f, 0xe6, 0x4a, 0x2f, 0xad, 0x7f, 0x30, + 0x0b, 0x27, 0xcd, 0x8f, 0x03, 0xca, 0xa8, 0x81, 0xcb, 0x5e, 0x34, 0x1a, 0x30, 0xdf, 0xcd, 0xc8, + 0xa5, 0x16, 0x8d, 0x06, 0x94, 0x51, 0x83, 0x5d, 0x3d, 0x08, 0xc9, 0xa8, 0x5a, 0x9c, 0xdd, 0xae, + 0x3a, 0xa3, 0xa4, 0x82, 0x03, 0x68, 0x34, 0x88, 0xa2, 0x2e, 0xab, 0xdd, 0x19, 0x35, 0x6a, 0x46, + 0x51, 0x97, 0x32, 0x6a, 0xd0, 0x28, 0x3c, 0x18, 0x3c, 0x09, 0x46, 0xd5, 0xb9, 0xd9, 0x35, 0x32, + 0x19, 0x25, 0x15, 0x1c, 0x80, 0xd7, 0x30, 0x18, 0xc5, 0x51, 0x58, 0x2d, 0xcf, 0xce, 0xcb, 0x66, + 0x94, 0x54, 0x70, 0x60, 0xbc, 0x46, 0x30, 0x89, 0xab, 0xf3, 0x2f, 0xc1, 0x8b, 0x51, 0x52, 0xc1, + 0x41, 0x7e, 0x84, 0x2a, 0xa3, 0xa0, 0xd3, 0x1f, 0x8e, 0xa2, 0x4e, 0x3f, 0x39, 0xac, 0x22, 0xc6, + 0xf0, 0xa3, 0x59, 0x18, 0xd2, 0x09, 0x39, 0xcd, 0xf2, 0x92, 0xeb, 0xa8, 0x98, 0x04, 0x61, 0x1c, + 0x54, 0x2b, 0x8c, 0xe9, 0xcd, 0x99, 0xb2, 0x1d, 0x08, 0x29, 0xa7, 0x07, 0x46, 0x4f, 0xa3, 0x7e, + 0x27, 0xa8, 0x2e, 0xcc, 0xce, 0xa8, 0x05, 0x84, 0x94, 0xd3, 0x2b, 0x5f, 0x4b, 0x28, 0xef, 0xfa, + 0xbd, 0xe9, 0x96, 0x3a, 0x87, 0xf2, 0xaa, 0xbe, 0x8d, 0x25, 0xbe, 0xb0, 0x71, 0x8e, 0x2f, 0x5a, + 0x38, 0x0f, 0x33, 0x5c, 0xb3, 0xcc, 0x6d, 0x5c, 0x00, 0x90, 0x4e, 0xa0, 0x71, 0x96, 0x51, 0xc1, + 0xb4, 0x3c, 0x13, 0x97, 0x00, 0x64, 0x7a, 0x4d, 0x3c, 0x07, 0x20, 0x9b, 0x5a, 0x26, 0x2e, 0x03, + 0xc8, 0xa6, 0x2e, 0x9e, 0x87, 0x5e, 0x6a, 0x7b, 0xa6, 0xe6, 0x62, 0x04, 0x5f, 0x5b, 0x84, 0x6e, + 0xe2, 0x8a, 0x5c, 0x44, 0xd2, 0x2e, 0x5e, 0x80, 0x6f, 0x6a, 0xad, 0x66, 0xec, 0xe2, 0x45, 0xc5, + 0x42, 0x25, 0x5e, 0x90, 0xb2, 0x8c, 0x96, 0x54, 0x38, 0x4d, 0xb8, 0xed, 0x89, 0x62, 0x70, 0xa2, + 0x20, 0xb4, 0x46, 0x34, 0xd7, 0x68, 0x11, 0x2c, 0x41, 0x87, 0x37, 0x9a, 0x19, 0x48, 0x0e, 0xda, + 0xba, 0x4d, 0xad, 0x3a, 0x25, 0x8e, 0x03, 0x80, 0xbc, 0xf2, 0x2f, 0x09, 0x15, 0xa0, 0x30, 0x01, + 0x57, 0x53, 0x1d, 0x32, 0xcd, 0x4d, 0xd5, 0x34, 0xcf, 0x51, 0x05, 0xb7, 0x45, 0x34, 0xaf, 0xea, + 0xa0, 0x99, 0xa1, 0x36, 0x70, 0x8e, 0x0f, 0x84, 0xa6, 0xdd, 0x20, 0x4d, 0x62, 0x32, 0x8c, 0x3c, + 0xcc, 0x1a, 0x9d, 0x63, 0x17, 0x60, 0xd6, 0xd4, 0x89, 0x69, 0xb0, 0x5d, 0x91, 0x69, 0x62, 0x3a, + 0x2e, 0xf5, 0x00, 0x59, 0x6d, 0xe0, 0xd2, 0x64, 0x16, 0xb5, 0x08, 0x9e, 0x03, 0x59, 0xa6, 0xd5, + 0x34, 0x4c, 0xbe, 0x2f, 0x83, 0xbf, 0xad, 0xcd, 0x86, 0xf1, 0xd0, 0x23, 0x78, 0x1e, 0x04, 0xdb, + 0x2a, 0x75, 0x39, 0x2f, 0x04, 0x82, 0x6d, 0x4a, 0x6c, 0xcb, 0x31, 0x60, 0x6c, 0xa9, 0x0d, 0x5c, + 0x01, 0x67, 0x50, 0x52, 0x6b, 0x90, 0x5d, 0xa3, 0x45, 0xda, 0x60, 0x06, 0x5e, 0x00, 0x34, 0x4a, + 0x1a, 0x8c, 0x21, 0x07, 0x2d, 0x82, 0xcc, 0x56, 0x2a, 0x73, 0x49, 0xf9, 0x56, 0x42, 0x05, 0xe8, + 0x26, 0xa0, 0x5c, 0xcd, 0xa2, 0xcd, 0x8c, 0xe9, 0x0b, 0xa8, 0xac, 0xea, 0xa0, 0x90, 0xda, 0x10, + 0x86, 0x7b, 0xbb, 0x46, 0xc3, 0x50, 0xe9, 0x23, 0x9c, 0x03, 0x61, 0x19, 0xc3, 0x1f, 0x13, 0x8a, + 0xf3, 0x8c, 0x85, 0x61, 0xaa, 0x8d, 0x36, 0x31, 0x75, 0xc3, 0xac, 0xe3, 0x02, 0xf8, 0xa2, 0x4e, + 0xa8, 0x67, 0xea, 0xb8, 0x08, 0x6b, 0x4a, 0xd4, 0x86, 0xe1, 0x70, 0xbb, 0x0d, 0x2a, 0x76, 0x73, + 0x10, 0x5a, 0x67, 0xcb, 0xa2, 0x2e, 0x2e, 0x43, 0xd8, 0x1b, 0x96, 0x59, 0xe7, 0xb9, 0x60, 0x51, + 0x9d, 0x50, 0x8c, 0x00, 0x5b, 0x1c, 0x19, 0x35, 0x5c, 0x51, 0x08, 0x2a, 0xf1, 0xb6, 0x05, 0x3a, + 0xd4, 0x89, 0xa9, 0x13, 0x3a, 0xad, 0x74, 0x8d, 0x34, 0x0d, 0xd3, 0x30, 0x45, 0xb4, 0x9a, 0xaa, + 0xa3, 0x79, 0x0d, 0xd8, 0xe6, 0x40, 0x05, 0x93, 0x78, 0x2e, 0x28, 0xab, 0x7c, 0x89, 0x0a, 0xd0, + 0xb3, 0x40, 0xe9, 0xa6, 0x65, 0xe9, 0x19, 0x16, 0x17, 0x10, 0xd6, 0x2c, 0x53, 0x17, 0x8e, 0x6d, + 0xc3, 0x57, 0x2c, 0x41, 0x70, 0x58, 0x1a, 0xa9, 0x22, 0x89, 0x60, 0x6f, 0xea, 0x86, 0x70, 0x64, + 0x1e, 0x3c, 0x6d, 0x98, 0x2e, 0xa1, 0xd4, 0xaa, 0xa7, 0xd1, 0xaf, 0xa0, 0xb9, 0x6d, 0x8f, 0xe7, + 0x58, 0x11, 0x92, 0xce, 0xf1, 0x36, 0xb7, 0x21, 0xbd, 0x01, 0x50, 0x52, 0x3e, 0x43, 0x25, 0xde, + 0xec, 0xc0, 0x0e, 0xd3, 0x6b, 0x6e, 0x1e, 0xb5, 0xc3, 0x31, 0xcc, 0xba, 0xd7, 0x50, 0x29, 0x96, + 0xd8, 0xf9, 0xa5, 0xe1, 0x51, 0x96, 0x72, 0x65, 0x54, 0xd0, 0x3d, 0xb5, 0x81, 0xf3, 0x8a, 0x8b, + 0x4a, 0xbc, 0xc5, 0x01, 0x07, 0x7e, 0xbe, 0xc9, 0x70, 0x98, 0x47, 0xc5, 0x9a, 0x41, 0x1d, 0x97, + 0x93, 0x3b, 0x04, 0x6c, 0xc2, 0x39, 0x00, 0xbb, 0x5b, 0x06, 0xd5, 0x71, 0x1e, 0x0c, 0x9d, 0x24, + 0x8c, 0x38, 0x1f, 0x15, 0x94, 0x3b, 0xa8, 0xc4, 0x9b, 0x1d, 0xe3, 0x4a, 0x2d, 0x7b, 0x4a, 0x2f, + 0xd0, 0x84, 0xc1, 0xb8, 0x4b, 0x4c, 0xcb, 0x6d, 0x8b, 0x7d, 0x4e, 0xd9, 0x46, 0x95, 0x4c, 0x57, + 0x93, 0x2f, 0xa1, 0xf3, 0x94, 0x68, 0x86, 0x4d, 0x2d, 0xcd, 0x70, 0x1f, 0x4d, 0xd7, 0x54, 0xfa, + 0x81, 0xa5, 0x16, 0xd8, 0x6f, 0x99, 0xed, 0x0c, 0x2c, 0xa7, 0xc4, 0xa8, 0xc8, 0x9a, 0x19, 0xf8, + 0xd5, 0x25, 0xe6, 0x54, 0x4d, 0xbe, 0x86, 0xce, 0x65, 0x03, 0xc4, 0x3e, 0x73, 0x2b, 0x6b, 0x9e, + 0xeb, 0x51, 0xc2, 0x9d, 0x64, 0xab, 0x8e, 0x8b, 0xf3, 0x10, 0x04, 0x9b, 0x12, 0x87, 0x1f, 0xe8, + 0x16, 0xd1, 0xfc, 0xb8, 0x17, 0xe0, 0x22, 0xbf, 0x7c, 0x78, 0xe9, 0xbe, 0xa4, 0x6c, 0xa2, 0x22, + 0x6b, 0x7c, 0x20, 0xb4, 0x65, 0x19, 0x1a, 0x99, 0x36, 0x5c, 0xd5, 0x26, 0x4d, 0x40, 0x53, 0xd3, + 0x9e, 0x90, 0x63, 0x22, 0xd4, 0xb4, 0x97, 0xfc, 0xbe, 0x8c, 0x96, 0xa6, 0x4f, 0x4d, 0xf2, 0x0a, + 0xc2, 0x9f, 0x07, 0x7e, 0xb7, 0x9d, 0xc0, 0xd9, 0xb0, 0xdd, 0x0f, 0xbb, 0xc1, 0x33, 0x76, 0x94, + 0x29, 0xd2, 0x25, 0x80, 0xb3, 0x23, 0xa3, 0x01, 0x50, 0xd9, 0x40, 0xc5, 0x7d, 0xff, 0x49, 0xb0, + 0x2f, 0xce, 0x28, 0x1b, 0x33, 0x9d, 0xce, 0x56, 0x1b, 0x40, 0x4a, 0x39, 0x07, 0xe5, 0xd7, 0x73, + 0xa8, 0xc8, 0x00, 0xcf, 0x9d, 0x84, 0xd5, 0xcd, 0x4d, 0x4a, 0x5a, 0x58, 0x62, 0x2d, 0x15, 0x8a, + 0x98, 0x67, 0x85, 0xaa, 0xb7, 0xb4, 0x06, 0xef, 0x5f, 0xaa, 0xde, 0x6a, 0x5a, 0x3a, 0x2e, 0x80, + 0x1b, 0x55, 0x58, 0x15, 0x19, 0x82, 0x6d, 0x5b, 0x50, 0xbc, 0x00, 0x74, 0x5d, 0x8a, 0xe7, 0x58, + 0xc7, 0xf7, 0x76, 0x79, 0xa7, 0x52, 0xbd, 0x5d, 0x70, 0x02, 0x9e, 0x97, 0x4b, 0x28, 0xa7, 0x69, + 0x18, 0x01, 0x89, 0xc6, 0xd8, 0x57, 0xc6, 0x13, 0x81, 0xb5, 0x71, 0x0d, 0xea, 0x00, 0x2f, 0x32, + 0x2f, 0xc2, 0x92, 0x91, 0x2d, 0xf1, 0x59, 0x61, 0xe3, 0xb3, 0xe9, 0xd0, 0xc0, 0x80, 0xa0, 0x1b, + 0x8e, 0x66, 0x79, 0xd4, 0x21, 0xf8, 0x1c, 0x4b, 0x7c, 0x6b, 0x73, 0x1b, 0xcb, 0xb0, 0x22, 0xbb, + 0x76, 0x03, 0x9f, 0x67, 0x0d, 0xd6, 0x22, 0xce, 0x8e, 0xe1, 0x6e, 0xe1, 0x0b, 0x00, 0x37, 0x00, + 0xe3, 0x35, 0x58, 0x35, 0x55, 0xfa, 0x00, 0x5f, 0x04, 0x6e, 0xcd, 0x1d, 0x82, 0x2f, 0xf1, 0x45, + 0x0b, 0x57, 0xd9, 0x04, 0x22, 0x75, 0xfc, 0x7f, 0xa0, 0xa8, 0x69, 0xe2, 0x65, 0x60, 0x62, 0xda, + 0xc2, 0xe6, 0xff, 0x07, 0x0d, 0x4d, 0xa6, 0xe1, 0xeb, 0xa0, 0x80, 0x39, 0xd6, 0xf0, 0x8d, 0x74, + 0x74, 0xbd, 0xc9, 0xfa, 0x08, 0x2b, 0x58, 0xfc, 0x16, 0x8c, 0x27, 0x1b, 0x5f, 0x16, 0xed, 0x59, + 0x75, 0xd5, 0x5d, 0xc3, 0xc1, 0x6f, 0xf3, 0x94, 0xa0, 0x2e, 0x70, 0x54, 0xd8, 0x58, 0x63, 0x8e, + 0xb8, 0xc2, 0xf2, 0x12, 0x34, 0x7c, 0x87, 0xaf, 0x1c, 0x07, 0x5f, 0x65, 0xb8, 0x96, 0xe3, 0x82, + 0x4e, 0x3f, 0x12, 0xe9, 0xca, 0xb0, 0xaf, 0x8d, 0x37, 0xe6, 0x36, 0x5e, 0xe1, 0x95, 0x47, 0xc0, + 0x33, 0xef, 0xf2, 0xd9, 0x49, 0x6a, 0xf8, 0xba, 0x58, 0xd9, 0xf8, 0x06, 0x93, 0x42, 0x2d, 0xb3, + 0x81, 0xdf, 0x4b, 0x07, 0xea, 0xfb, 0x60, 0xa1, 0xed, 0xe0, 0x55, 0xb0, 0xf0, 0xa1, 0xa7, 0x9a, + 0x4c, 0x9f, 0x35, 0xc0, 0xa4, 0x1a, 0x2c, 0x3f, 0x80, 0x0f, 0x6c, 0x49, 0x49, 0x03, 0xdf, 0x64, + 0x1f, 0x74, 0x6a, 0xd9, 0x78, 0x1d, 0x58, 0x80, 0x80, 0x0d, 0xd0, 0x81, 0x92, 0xa6, 0xa9, 0x9a, + 0x2e, 0xbe, 0xc5, 0x2b, 0x17, 0xec, 0x34, 0x75, 0xaf, 0x89, 0x3f, 0x04, 0xe9, 0xd4, 0xb2, 0x5c, + 0x7c, 0x1b, 0x56, 0x0e, 0x38, 0xe7, 0x23, 0xb6, 0xf2, 0x6a, 0x35, 0x7c, 0x07, 0x56, 0x4c, 0xe2, + 0x8f, 0x59, 0xd3, 0xb1, 0x6c, 0x43, 0xc3, 0x77, 0xd9, 0x60, 0x07, 0xe0, 0xbd, 0xa9, 0x41, 0x74, + 0x1f, 0x50, 0x76, 0x99, 0xd9, 0x1f, 0xb3, 0x76, 0xe5, 0xb1, 0x59, 0xff, 0x09, 0xa3, 0x34, 0xdc, + 0x06, 0xc1, 0x9f, 0xf2, 0x79, 0xd4, 0xb2, 0xb7, 0x80, 0xfa, 0x33, 0x91, 0x72, 0x50, 0x86, 0x58, + 0x65, 0xd9, 0xe9, 0xed, 0xb6, 0x5a, 0x78, 0x13, 0x96, 0x3a, 0x93, 0xaa, 0x01, 0x4a, 0xcd, 0xa2, + 0xc4, 0xa8, 0x9b, 0x58, 0x07, 0x57, 0x3c, 0xd8, 0xc1, 0x84, 0x4d, 0x18, 0xc3, 0x71, 0x71, 0x8d, + 0x9f, 0x49, 0x9a, 0x1a, 0xae, 0xb3, 0x04, 0xb0, 0x9a, 0x3c, 0x2f, 0xb7, 0x60, 0x22, 0xa4, 0x3b, + 0x16, 0x78, 0x83, 0x61, 0x7a, 0x4d, 0x0d, 0x6f, 0x83, 0x5b, 0x34, 0xcb, 0xc6, 0x0f, 0xc0, 0x13, + 0xba, 0xe1, 0xb0, 0xe1, 0x4d, 0x74, 0xdc, 0x60, 0xa5, 0xe0, 0xd8, 0xb8, 0x09, 0xb8, 0x75, 0x10, + 0x6f, 0xb2, 0x15, 0xc4, 0xda, 0x02, 0x83, 0x0c, 0xb3, 0x06, 0x50, 0x9b, 0xa5, 0x21, 0x71, 0xf0, + 0x43, 0x96, 0x67, 0xcc, 0x60, 0xaa, 0x7c, 0x9d, 0x43, 0x8b, 0x53, 0x97, 0xea, 0xef, 0x77, 0x81, + 0x24, 0x53, 0xcf, 0x0f, 0x37, 0x67, 0xb9, 0xcd, 0x67, 0x5f, 0x21, 0xa6, 0xae, 0xf3, 0xf9, 0x97, + 0x7f, 0x2c, 0xf9, 0x40, 0xdc, 0xc8, 0x31, 0x5a, 0x10, 0x0f, 0x40, 0xc7, 0x0d, 0x13, 0x84, 0x4a, + 0x9a, 0xd5, 0x6c, 0xc2, 0xa5, 0x5c, 0xa9, 0xa3, 0x72, 0x6a, 0x92, 0x5c, 0x9d, 0x3c, 0x50, 0xf1, + 0xfb, 0xff, 0xf8, 0x79, 0xea, 0x6d, 0xb4, 0xf0, 0x24, 0xe8, 0xf5, 0xc3, 0x76, 0xb4, 0xb7, 0x17, + 0x07, 0xfc, 0x5e, 0x57, 0xa4, 0x15, 0x06, 0xb3, 0x18, 0x48, 0x69, 0xa0, 0x8b, 0xda, 0xbe, 0x1f, + 0xc7, 0xfd, 0xbd, 0x7e, 0x87, 0xbd, 0xbf, 0x69, 0x7e, 0x12, 0xf4, 0xa2, 0xd1, 0xf1, 0xcf, 0x36, + 0x6f, 0x22, 0xd4, 0x89, 0xc2, 0xbd, 0x7e, 0x97, 0xbd, 0x93, 0xf0, 0xbb, 0x6a, 0x06, 0xa2, 0xfc, + 0x4e, 0x42, 0x97, 0xd4, 0xd0, 0xdf, 0x3f, 0xfc, 0x69, 0x30, 0x31, 0x34, 0xf8, 0xc9, 0x41, 0x10, + 0x27, 0xb2, 0x86, 0xca, 0x5d, 0xf1, 0xbc, 0x76, 0xca, 0xa0, 0xa5, 0xaf, 0x71, 0x74, 0x4c, 0x28, + 0xdb, 0x68, 0x31, 0x08, 0x3b, 0x51, 0xb7, 0x1f, 0xf6, 0xda, 0x99, 0x08, 0xde, 0x38, 0x31, 0x82, + 0x9c, 0x86, 0xc5, 0x6e, 0x21, 0xc8, 0xec, 0x94, 0xbf, 0x4b, 0xa8, 0xfa, 0xbc, 0xca, 0xf1, 0x30, + 0x82, 0xd1, 0xba, 0x83, 0xe4, 0x54, 0x74, 0x7b, 0x12, 0x69, 0x69, 0xc6, 0x48, 0x9f, 0x4b, 0x79, + 0x4c, 0xee, 0xfc, 0xd9, 0xe7, 0xc0, 0xdc, 0xf4, 0x73, 0xa0, 0x4c, 0x78, 0x56, 0x81, 0x43, 0x63, + 0xf1, 0xb8, 0x75, 0xed, 0x14, 0xb2, 0x00, 0x9f, 0x4e, 0x28, 0x95, 0x3f, 0x4a, 0xe8, 0x0d, 0x61, + 0x18, 0x4f, 0xe0, 0xff, 0x95, 0x88, 0x7c, 0x89, 0xde, 0xfc, 0x2e, 0xbd, 0x45, 0x58, 0x54, 0x54, + 0x06, 0x58, 0xd2, 0x0f, 0xe2, 0xaa, 0xc4, 0x1c, 0x74, 0xf5, 0x54, 0x25, 0x4c, 0xc7, 0x64, 0x2f, + 0x0a, 0x00, 0x9c, 0xf8, 0x2f, 0x66, 0x35, 0xe8, 0x07, 0xf1, 0x0f, 0xdc, 0x65, 0xcf, 0xc6, 0x65, + 0x37, 0x51, 0xf8, 0xbf, 0xe3, 0xab, 0xdf, 0x4a, 0xe8, 0x42, 0x5a, 0x3e, 0x87, 0x61, 0xe2, 0x3f, + 0xfb, 0x81, 0x7b, 0xea, 0x4f, 0x12, 0x7a, 0xed, 0x88, 0xbe, 0xc2, 0x51, 0x53, 0x65, 0x27, 0xbd, + 0x6c, 0xd9, 0xc9, 0xf7, 0x51, 0x89, 0x9d, 0x62, 0xe3, 0x6a, 0x8e, 0xf1, 0x78, 0xe7, 0xa4, 0xc9, + 0x04, 0xc8, 0x54, 0xd0, 0x4c, 0xb9, 0x3a, 0x7f, 0xc4, 0xd5, 0x8f, 0xd1, 0x79, 0xd1, 0xaa, 0x0f, + 0xa1, 0xf7, 0xbf, 0x4a, 0x47, 0x2b, 0x03, 0x74, 0x61, 0x9a, 0xb7, 0x70, 0x8a, 0x87, 0x50, 0x87, + 0x0f, 0x84, 0x49, 0xfe, 0x7c, 0x78, 0x02, 0xfb, 0xe3, 0xe7, 0x09, 0xcd, 0x30, 0x52, 0x7e, 0x56, + 0x40, 0xe7, 0x55, 0xfe, 0xbb, 0x50, 0xf0, 0xaa, 0x6d, 0x91, 0x77, 0x50, 0x79, 0x2f, 0xf0, 0x93, + 0x83, 0x51, 0x10, 0x8b, 0x77, 0xe1, 0x7b, 0x27, 0x30, 0x39, 0x46, 0x95, 0xd5, 0x9a, 0x60, 0x41, + 0xc7, 0xcc, 0x9e, 0xcf, 0xc6, 0xfc, 0xf7, 0xcc, 0xc6, 0xe5, 0x7f, 0x4b, 0xa8, 0x9c, 0x0a, 0x92, + 0xaf, 0xa2, 0xa5, 0xe0, 0x59, 0x32, 0xf2, 0x3b, 0x49, 0x3b, 0x66, 0xa9, 0xc9, 0x5c, 0x50, 0xa6, + 0x8b, 0x02, 0xca, 0xf3, 0x55, 0x7e, 0x17, 0xe1, 0x14, 0x6d, 0x5c, 0xd8, 0x39, 0x86, 0x78, 0x56, + 0xc0, 0xd3, 0x1e, 0x20, 0xdf, 0x47, 0xcb, 0x29, 0xea, 0x31, 0x63, 0x2c, 0xcf, 0x88, 0xaa, 0x02, + 0x43, 0x7f, 0x6e, 0x46, 0xdd, 0x41, 0xd5, 0x29, 0x41, 0x87, 0x19, 0xda, 0x02, 0xa3, 0xbd, 0x98, + 0x15, 0x38, 0xe9, 0xd3, 0xf2, 0x15, 0xb4, 0xd8, 0x11, 0xd9, 0xd4, 0x66, 0x87, 0xb4, 0x12, 0x43, + 0x5f, 0xe8, 0x64, 0x52, 0x4c, 0xf9, 0x4d, 0x1e, 0x3a, 0x47, 0xd6, 0xf1, 0x3f, 0xa4, 0x42, 0xcc, + 0xb6, 0xcd, 0xfc, 0xcb, 0xb5, 0xcd, 0xe3, 0x0f, 0x0f, 0x85, 0x57, 0x7b, 0x78, 0x28, 0x1e, 0x39, + 0x3c, 0x4c, 0x17, 0x6c, 0xe9, 0x15, 0x15, 0xec, 0xf5, 0x3b, 0x68, 0x21, 0x9b, 0xc6, 0xfc, 0x66, + 0x60, 0x12, 0x7c, 0x06, 0x56, 0x9e, 0x5b, 0xbb, 0xc3, 0x2f, 0xcb, 0x9e, 0x5b, 0xbb, 0x79, 0x9b, + 0x5f, 0x96, 0x3d, 0xb7, 0xb6, 0xb1, 0x8e, 0xf3, 0xeb, 0x7f, 0x29, 0xa3, 0xb3, 0x0d, 0x21, 0xd1, + 0xe1, 0xbf, 0x21, 0xcb, 0x7f, 0x90, 0x10, 0x3e, 0x7a, 0xe6, 0x92, 0x6f, 0x9f, 0x58, 0xa4, 0xc7, + 0x9e, 0x2b, 0x97, 0x3f, 0x9a, 0x99, 0x8e, 0xe7, 0x99, 0xb2, 0xfa, 0xd5, 0xdf, 0xfe, 0xf1, 0x4d, + 0x6e, 0x45, 0xb9, 0x32, 0xfe, 0xb1, 0x3b, 0x75, 0x75, 0x7c, 0xd7, 0x3f, 0x42, 0x74, 0x57, 0xba, + 0x2e, 0x7f, 0x2b, 0xa1, 0xb3, 0x47, 0xa6, 0xac, 0xfc, 0xe1, 0xe9, 0x84, 0x1f, 0x39, 0x46, 0x2c, + 0xdf, 0x9e, 0x95, 0x4c, 0xa8, 0xfc, 0x3e, 0x53, 0xf9, 0x9a, 0xa2, 0x7c, 0xb7, 0xca, 0x29, 0x0d, + 0x68, 0xfc, 0xd7, 0x23, 0x07, 0x99, 0x4c, 0x89, 0xde, 0x9f, 0x41, 0x83, 0xe7, 0x4e, 0x8e, 0xcb, + 0x1f, 0xbf, 0x24, 0xb5, 0x30, 0xe3, 0x16, 0x33, 0x63, 0x55, 0x79, 0xf7, 0x04, 0x33, 0x0e, 0xa7, + 0xfc, 0xff, 0x2b, 0x09, 0x2d, 0x4e, 0x8d, 0x6e, 0x79, 0xe3, 0x94, 0xa1, 0xcf, 0x1e, 0x4c, 0x96, + 0x6f, 0xcd, 0x46, 0x24, 0x54, 0xbe, 0xc1, 0x54, 0xbe, 0xaa, 0x5c, 0x7e, 0x41, 0xb2, 0x30, 0x0a, + 0xd0, 0xf4, 0x97, 0x12, 0x5a, 0xc8, 0x8e, 0x53, 0x79, 0xfd, 0x74, 0x15, 0x98, 0x9d, 0xeb, 0xcb, + 0x1b, 0x33, 0xd1, 0x08, 0x35, 0xaf, 0x33, 0x35, 0xdf, 0x51, 0xde, 0x3a, 0x46, 0xcd, 0x6c, 0xf7, + 0x4d, 0xb5, 0xcc, 0x36, 0xe0, 0x13, 0xb5, 0x3c, 0x66, 0x4c, 0x2e, 0x6f, 0xcc, 0x44, 0x73, 0x0a, + 0x2d, 0xfd, 0x0c, 0xc1, 0x5d, 0xe9, 0xfa, 0xe6, 0x57, 0x12, 0x7a, 0xbb, 0x13, 0x0d, 0x5e, 0x2c, + 0x66, 0xf3, 0xc2, 0x91, 0x16, 0x63, 0x8f, 0xa2, 0x24, 0xb2, 0xa5, 0xc7, 0x44, 0x90, 0xf5, 0x22, + 0x20, 0x59, 0x8d, 0x46, 0xbd, 0xb5, 0x5e, 0x10, 0xb2, 0xff, 0x89, 0xac, 0xf1, 0x4f, 0xfe, 0xb0, + 0x1f, 0x7f, 0xc7, 0x9f, 0x5f, 0xee, 0xa5, 0x80, 0x27, 0x25, 0x46, 0xb1, 0xf1, 0x9f, 0x00, 0x00, + 0x00, 0xff, 0xff, 0xcc, 0x93, 0x36, 0x44, 0x2d, 0x23, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/cloud/location/locations.pb.go b/vendor/google.golang.org/genproto/googleapis/cloud/location/locations.pb.go new file mode 100644 index 0000000..826f57f --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/cloud/location/locations.pb.go @@ -0,0 +1,424 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/cloud/location/locations.proto + +package location // import "google.golang.org/genproto/googleapis/cloud/location" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import any "github.com/golang/protobuf/ptypes/any" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +import ( + context "golang.org/x/net/context" + grpc "google.golang.org/grpc" +) + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// The request message for +// [Locations.ListLocations][google.cloud.location.Locations.ListLocations]. +type ListLocationsRequest struct { + // The resource that owns the locations collection, if applicable. + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // The standard list filter. + Filter string `protobuf:"bytes,2,opt,name=filter,proto3" json:"filter,omitempty"` + // The standard list page size. + PageSize int32 `protobuf:"varint,3,opt,name=page_size,json=pageSize,proto3" json:"page_size,omitempty"` + // The standard list page token. + PageToken string `protobuf:"bytes,4,opt,name=page_token,json=pageToken,proto3" json:"page_token,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ListLocationsRequest) Reset() { *m = ListLocationsRequest{} } +func (m *ListLocationsRequest) String() string { return proto.CompactTextString(m) } +func (*ListLocationsRequest) ProtoMessage() {} +func (*ListLocationsRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_locations_88684e037ed7e8be, []int{0} +} +func (m *ListLocationsRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ListLocationsRequest.Unmarshal(m, b) +} +func (m *ListLocationsRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ListLocationsRequest.Marshal(b, m, deterministic) +} +func (dst *ListLocationsRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_ListLocationsRequest.Merge(dst, src) +} +func (m *ListLocationsRequest) XXX_Size() int { + return xxx_messageInfo_ListLocationsRequest.Size(m) +} +func (m *ListLocationsRequest) XXX_DiscardUnknown() { + xxx_messageInfo_ListLocationsRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_ListLocationsRequest proto.InternalMessageInfo + +func (m *ListLocationsRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *ListLocationsRequest) GetFilter() string { + if m != nil { + return m.Filter + } + return "" +} + +func (m *ListLocationsRequest) GetPageSize() int32 { + if m != nil { + return m.PageSize + } + return 0 +} + +func (m *ListLocationsRequest) GetPageToken() string { + if m != nil { + return m.PageToken + } + return "" +} + +// The response message for +// [Locations.ListLocations][google.cloud.location.Locations.ListLocations]. +type ListLocationsResponse struct { + // A list of locations that matches the specified filter in the request. + Locations []*Location `protobuf:"bytes,1,rep,name=locations,proto3" json:"locations,omitempty"` + // The standard List next-page token. + NextPageToken string `protobuf:"bytes,2,opt,name=next_page_token,json=nextPageToken,proto3" json:"next_page_token,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ListLocationsResponse) Reset() { *m = ListLocationsResponse{} } +func (m *ListLocationsResponse) String() string { return proto.CompactTextString(m) } +func (*ListLocationsResponse) ProtoMessage() {} +func (*ListLocationsResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_locations_88684e037ed7e8be, []int{1} +} +func (m *ListLocationsResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ListLocationsResponse.Unmarshal(m, b) +} +func (m *ListLocationsResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ListLocationsResponse.Marshal(b, m, deterministic) +} +func (dst *ListLocationsResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_ListLocationsResponse.Merge(dst, src) +} +func (m *ListLocationsResponse) XXX_Size() int { + return xxx_messageInfo_ListLocationsResponse.Size(m) +} +func (m *ListLocationsResponse) XXX_DiscardUnknown() { + xxx_messageInfo_ListLocationsResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_ListLocationsResponse proto.InternalMessageInfo + +func (m *ListLocationsResponse) GetLocations() []*Location { + if m != nil { + return m.Locations + } + return nil +} + +func (m *ListLocationsResponse) GetNextPageToken() string { + if m != nil { + return m.NextPageToken + } + return "" +} + +// The request message for +// [Locations.GetLocation][google.cloud.location.Locations.GetLocation]. +type GetLocationRequest struct { + // Resource name for the location. + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GetLocationRequest) Reset() { *m = GetLocationRequest{} } +func (m *GetLocationRequest) String() string { return proto.CompactTextString(m) } +func (*GetLocationRequest) ProtoMessage() {} +func (*GetLocationRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_locations_88684e037ed7e8be, []int{2} +} +func (m *GetLocationRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GetLocationRequest.Unmarshal(m, b) +} +func (m *GetLocationRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GetLocationRequest.Marshal(b, m, deterministic) +} +func (dst *GetLocationRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_GetLocationRequest.Merge(dst, src) +} +func (m *GetLocationRequest) XXX_Size() int { + return xxx_messageInfo_GetLocationRequest.Size(m) +} +func (m *GetLocationRequest) XXX_DiscardUnknown() { + xxx_messageInfo_GetLocationRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_GetLocationRequest proto.InternalMessageInfo + +func (m *GetLocationRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +// A resource that represents Google Cloud Platform location. +type Location struct { + // Resource name for the location, which may vary between implementations. + // For example: `"projects/example-project/locations/us-east1"` + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // The canonical id for this location. For example: `"us-east1"`. + LocationId string `protobuf:"bytes,4,opt,name=location_id,json=locationId,proto3" json:"location_id,omitempty"` + // The friendly name for this location, typically a nearby city name. + // For example, "Tokyo". + DisplayName string `protobuf:"bytes,5,opt,name=display_name,json=displayName,proto3" json:"display_name,omitempty"` + // Cross-service attributes for the location. For example + // + // {"cloud.googleapis.com/region": "us-east1"} + Labels map[string]string `protobuf:"bytes,2,rep,name=labels,proto3" json:"labels,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` + // Service-specific metadata. For example the available capacity at the given + // location. + Metadata *any.Any `protobuf:"bytes,3,opt,name=metadata,proto3" json:"metadata,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Location) Reset() { *m = Location{} } +func (m *Location) String() string { return proto.CompactTextString(m) } +func (*Location) ProtoMessage() {} +func (*Location) Descriptor() ([]byte, []int) { + return fileDescriptor_locations_88684e037ed7e8be, []int{3} +} +func (m *Location) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Location.Unmarshal(m, b) +} +func (m *Location) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Location.Marshal(b, m, deterministic) +} +func (dst *Location) XXX_Merge(src proto.Message) { + xxx_messageInfo_Location.Merge(dst, src) +} +func (m *Location) XXX_Size() int { + return xxx_messageInfo_Location.Size(m) +} +func (m *Location) XXX_DiscardUnknown() { + xxx_messageInfo_Location.DiscardUnknown(m) +} + +var xxx_messageInfo_Location proto.InternalMessageInfo + +func (m *Location) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *Location) GetLocationId() string { + if m != nil { + return m.LocationId + } + return "" +} + +func (m *Location) GetDisplayName() string { + if m != nil { + return m.DisplayName + } + return "" +} + +func (m *Location) GetLabels() map[string]string { + if m != nil { + return m.Labels + } + return nil +} + +func (m *Location) GetMetadata() *any.Any { + if m != nil { + return m.Metadata + } + return nil +} + +func init() { + proto.RegisterType((*ListLocationsRequest)(nil), "google.cloud.location.ListLocationsRequest") + proto.RegisterType((*ListLocationsResponse)(nil), "google.cloud.location.ListLocationsResponse") + proto.RegisterType((*GetLocationRequest)(nil), "google.cloud.location.GetLocationRequest") + proto.RegisterType((*Location)(nil), "google.cloud.location.Location") + proto.RegisterMapType((map[string]string)(nil), "google.cloud.location.Location.LabelsEntry") +} + +// Reference imports to suppress errors if they are not otherwise used. +var _ context.Context +var _ grpc.ClientConn + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the grpc package it is being compiled against. +const _ = grpc.SupportPackageIsVersion4 + +// LocationsClient is the client API for Locations service. +// +// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://godoc.org/google.golang.org/grpc#ClientConn.NewStream. +type LocationsClient interface { + // Lists information about the supported locations for this service. + ListLocations(ctx context.Context, in *ListLocationsRequest, opts ...grpc.CallOption) (*ListLocationsResponse, error) + // Gets information about a location. + GetLocation(ctx context.Context, in *GetLocationRequest, opts ...grpc.CallOption) (*Location, error) +} + +type locationsClient struct { + cc *grpc.ClientConn +} + +func NewLocationsClient(cc *grpc.ClientConn) LocationsClient { + return &locationsClient{cc} +} + +func (c *locationsClient) ListLocations(ctx context.Context, in *ListLocationsRequest, opts ...grpc.CallOption) (*ListLocationsResponse, error) { + out := new(ListLocationsResponse) + err := c.cc.Invoke(ctx, "/google.cloud.location.Locations/ListLocations", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *locationsClient) GetLocation(ctx context.Context, in *GetLocationRequest, opts ...grpc.CallOption) (*Location, error) { + out := new(Location) + err := c.cc.Invoke(ctx, "/google.cloud.location.Locations/GetLocation", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +// LocationsServer is the server API for Locations service. +type LocationsServer interface { + // Lists information about the supported locations for this service. + ListLocations(context.Context, *ListLocationsRequest) (*ListLocationsResponse, error) + // Gets information about a location. + GetLocation(context.Context, *GetLocationRequest) (*Location, error) +} + +func RegisterLocationsServer(s *grpc.Server, srv LocationsServer) { + s.RegisterService(&_Locations_serviceDesc, srv) +} + +func _Locations_ListLocations_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(ListLocationsRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(LocationsServer).ListLocations(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.location.Locations/ListLocations", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(LocationsServer).ListLocations(ctx, req.(*ListLocationsRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _Locations_GetLocation_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(GetLocationRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(LocationsServer).GetLocation(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.location.Locations/GetLocation", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(LocationsServer).GetLocation(ctx, req.(*GetLocationRequest)) + } + return interceptor(ctx, in, info, handler) +} + +var _Locations_serviceDesc = grpc.ServiceDesc{ + ServiceName: "google.cloud.location.Locations", + HandlerType: (*LocationsServer)(nil), + Methods: []grpc.MethodDesc{ + { + MethodName: "ListLocations", + Handler: _Locations_ListLocations_Handler, + }, + { + MethodName: "GetLocation", + Handler: _Locations_GetLocation_Handler, + }, + }, + Streams: []grpc.StreamDesc{}, + Metadata: "google/cloud/location/locations.proto", +} + +func init() { + proto.RegisterFile("google/cloud/location/locations.proto", fileDescriptor_locations_88684e037ed7e8be) +} + +var fileDescriptor_locations_88684e037ed7e8be = []byte{ + // 527 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x8c, 0x54, 0xcd, 0x6e, 0xd3, 0x40, + 0x10, 0xd6, 0x3a, 0x4d, 0x94, 0x8c, 0x29, 0xa0, 0x55, 0x8a, 0xdc, 0x00, 0x4a, 0x62, 0x04, 0xa4, + 0x05, 0x79, 0x21, 0x5c, 0xf8, 0x51, 0x0e, 0x14, 0x21, 0x84, 0x14, 0xa1, 0xc8, 0x70, 0xe2, 0x12, + 0x6d, 0xe2, 0xad, 0x65, 0xea, 0xec, 0x1a, 0xef, 0xa6, 0xc2, 0x45, 0xed, 0x01, 0xf1, 0x06, 0xe5, + 0x21, 0x78, 0x1f, 0x5e, 0x81, 0x87, 0xe0, 0x88, 0xbc, 0xfe, 0x49, 0x28, 0x2e, 0xe5, 0xb6, 0x3b, + 0xf3, 0x7d, 0xf3, 0xcd, 0xb7, 0x33, 0x36, 0xdc, 0xf6, 0x85, 0xf0, 0x43, 0x46, 0xe6, 0xa1, 0x58, + 0x7a, 0x24, 0x14, 0x73, 0xaa, 0x02, 0xc1, 0xcb, 0x83, 0x74, 0xa2, 0x58, 0x28, 0x81, 0xb7, 0x32, + 0x98, 0xa3, 0x61, 0x4e, 0x91, 0xed, 0xdc, 0xc8, 0xd9, 0x34, 0x0a, 0x08, 0xe5, 0x5c, 0xa8, 0x75, + 0x52, 0x67, 0x3b, 0xcf, 0xea, 0xdb, 0x6c, 0xb9, 0x4f, 0x28, 0x4f, 0xb2, 0x94, 0x7d, 0x02, 0xed, + 0x71, 0x20, 0xd5, 0xb8, 0x90, 0x71, 0xd9, 0xc7, 0x25, 0x93, 0x0a, 0x63, 0xd8, 0xe0, 0x74, 0xc1, + 0x2c, 0xd4, 0x43, 0x83, 0x96, 0xab, 0xcf, 0xf8, 0x1a, 0x34, 0xf6, 0x83, 0x50, 0xb1, 0xd8, 0x32, + 0x74, 0x34, 0xbf, 0xe1, 0xeb, 0xd0, 0x8a, 0xa8, 0xcf, 0xa6, 0x32, 0x38, 0x62, 0x56, 0xad, 0x87, + 0x06, 0x75, 0xb7, 0x99, 0x06, 0xde, 0x06, 0x47, 0x0c, 0xdf, 0x04, 0xd0, 0x49, 0x25, 0x0e, 0x18, + 0xb7, 0x36, 0x34, 0x51, 0xc3, 0xdf, 0xa5, 0x01, 0xfb, 0x04, 0xb6, 0xce, 0xe8, 0xcb, 0x48, 0x70, + 0xc9, 0xf0, 0x08, 0x5a, 0xa5, 0x77, 0x0b, 0xf5, 0x6a, 0x03, 0x73, 0xd8, 0x75, 0x2a, 0xcd, 0x3b, + 0x05, 0xd9, 0x5d, 0x31, 0xf0, 0x1d, 0xb8, 0xc2, 0xd9, 0x27, 0x35, 0x5d, 0xd3, 0xce, 0x9a, 0xde, + 0x4c, 0xc3, 0x93, 0x52, 0x7f, 0x00, 0xf8, 0x15, 0x2b, 0xe5, 0xff, 0xe1, 0xde, 0xfe, 0x66, 0x40, + 0xb3, 0xc0, 0x55, 0x3e, 0x4f, 0x17, 0xcc, 0x42, 0x7f, 0x1a, 0x78, 0xb9, 0x55, 0x28, 0x42, 0xaf, + 0x3d, 0xdc, 0x87, 0x4b, 0x5e, 0x20, 0xa3, 0x90, 0x26, 0x53, 0x4d, 0xae, 0x6b, 0x84, 0x99, 0xc7, + 0xde, 0xa4, 0x35, 0x5e, 0x40, 0x23, 0xa4, 0x33, 0x16, 0x4a, 0xcb, 0xd0, 0x96, 0xef, 0x5d, 0x60, + 0xd9, 0x19, 0x6b, 0xf4, 0x4b, 0xae, 0xe2, 0xc4, 0xcd, 0xa9, 0xf8, 0x01, 0x34, 0x17, 0x4c, 0x51, + 0x8f, 0x2a, 0xaa, 0xc7, 0x61, 0x0e, 0xdb, 0x45, 0x99, 0x62, 0x03, 0x9c, 0xe7, 0x3c, 0x71, 0x4b, + 0x54, 0xe7, 0x09, 0x98, 0x6b, 0x85, 0xf0, 0x55, 0xa8, 0x1d, 0xb0, 0x24, 0x37, 0x97, 0x1e, 0x71, + 0x1b, 0xea, 0x87, 0x34, 0x5c, 0xb2, 0xfc, 0x11, 0xb3, 0xcb, 0x53, 0xe3, 0x31, 0x1a, 0x7e, 0x37, + 0xa0, 0x55, 0x4e, 0x0f, 0x9f, 0x22, 0xd8, 0xfc, 0x63, 0x9e, 0xf8, 0x5c, 0x07, 0x15, 0x5b, 0xd7, + 0xb9, 0xff, 0x7f, 0xe0, 0x6c, 0x45, 0xec, 0xbb, 0x5f, 0x7e, 0xfc, 0x3c, 0x35, 0xfa, 0xb8, 0x4b, + 0x0e, 0x1f, 0x92, 0xcf, 0xe9, 0x93, 0x8e, 0xa2, 0x58, 0x7c, 0x60, 0x73, 0x25, 0xc9, 0xee, 0xf1, + 0xea, 0xd3, 0xc1, 0x5f, 0x11, 0x98, 0x6b, 0x53, 0xc6, 0x3b, 0xe7, 0xc8, 0xfc, 0xbd, 0x09, 0x9d, + 0x8b, 0x76, 0xce, 0xde, 0xd1, 0x4d, 0xdc, 0xc2, 0xfd, 0xaa, 0x26, 0x56, 0x3d, 0x90, 0xdd, 0xe3, + 0x3d, 0x01, 0xdb, 0x73, 0xb1, 0xa8, 0x2e, 0xb8, 0x77, 0xb9, 0xf4, 0x37, 0x49, 0x67, 0x34, 0x41, + 0xef, 0x47, 0x39, 0xd0, 0x17, 0x21, 0xe5, 0xbe, 0x23, 0x62, 0x9f, 0xf8, 0x8c, 0xeb, 0x09, 0x92, + 0x2c, 0x45, 0xa3, 0x40, 0x9e, 0xf9, 0x61, 0x3c, 0x2b, 0x0e, 0xbf, 0x10, 0x9a, 0x35, 0x34, 0xf8, + 0xd1, 0xef, 0x00, 0x00, 0x00, 0xff, 0xff, 0x27, 0xd7, 0x9e, 0x57, 0x5c, 0x04, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/cloud/ml/v1/job_service.pb.go b/vendor/google.golang.org/genproto/googleapis/cloud/ml/v1/job_service.pb.go new file mode 100644 index 0000000..ea6ff8d --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/cloud/ml/v1/job_service.pb.go @@ -0,0 +1,2106 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/cloud/ml/v1/job_service.proto + +package ml // import "google.golang.org/genproto/googleapis/cloud/ml/v1" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import empty "github.com/golang/protobuf/ptypes/empty" +import timestamp "github.com/golang/protobuf/ptypes/timestamp" +import _ "google.golang.org/genproto/googleapis/api/annotations" +import _ "google.golang.org/genproto/googleapis/api/serviceconfig" + +import ( + context "golang.org/x/net/context" + grpc "google.golang.org/grpc" +) + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// A scale tier is an abstract representation of the resources Cloud ML +// will allocate to a training job. When selecting a scale tier for your +// training job, you should consider the size of your training dataset and +// the complexity of your model. As the tiers increase, virtual machines are +// added to handle your job, and the individual machines in the cluster +// generally have more memory and greater processing power than they do at +// lower tiers. The number of training units charged per hour of processing +// increases as tiers get more advanced. Refer to the +// [pricing guide](/ml/pricing) for more details. Note that in addition to +// incurring costs, your use of training resources is constrained by the +// [quota policy](/ml/quota). +type TrainingInput_ScaleTier int32 + +const ( + // A single worker instance. This tier is suitable for learning how to use + // Cloud ML, and for experimenting with new models using small datasets. + TrainingInput_BASIC TrainingInput_ScaleTier = 0 + // Many workers and a few parameter servers. + TrainingInput_STANDARD_1 TrainingInput_ScaleTier = 1 + // A large number of workers with many parameter servers. + TrainingInput_PREMIUM_1 TrainingInput_ScaleTier = 3 + // A single worker instance [with a GPU](ml/docs/how-tos/using-gpus). + TrainingInput_BASIC_GPU TrainingInput_ScaleTier = 6 + // The CUSTOM tier is not a set tier, but rather enables you to use your + // own cluster specification. When you use this tier, set values to + // configure your processing cluster according to these guidelines: + // + // * You _must_ set `TrainingInput.masterType` to specify the type + // of machine to use for your master node. This is the only required + // setting. + // + // * You _may_ set `TrainingInput.workerCount` to specify the number of + // workers to use. If you specify one or more workers, you _must_ also + // set `TrainingInput.workerType` to specify the type of machine to use + // for your worker nodes. + // + // * You _may_ set `TrainingInput.parameterServerCount` to specify the + // number of parameter servers to use. If you specify one or more + // parameter servers, you _must_ also set + // `TrainingInput.parameterServerType` to specify the type of machine to + // use for your parameter servers. + // + // Note that all of your workers must use the same machine type, which can + // be different from your parameter server type and master type. Your + // parameter servers must likewise use the same machine type, which can be + // different from your worker type and master type. + TrainingInput_CUSTOM TrainingInput_ScaleTier = 5 +) + +var TrainingInput_ScaleTier_name = map[int32]string{ + 0: "BASIC", + 1: "STANDARD_1", + 3: "PREMIUM_1", + 6: "BASIC_GPU", + 5: "CUSTOM", +} +var TrainingInput_ScaleTier_value = map[string]int32{ + "BASIC": 0, + "STANDARD_1": 1, + "PREMIUM_1": 3, + "BASIC_GPU": 6, + "CUSTOM": 5, +} + +func (x TrainingInput_ScaleTier) String() string { + return proto.EnumName(TrainingInput_ScaleTier_name, int32(x)) +} +func (TrainingInput_ScaleTier) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_job_service_96b7f358a19eb3fe, []int{0, 0} +} + +// The available types of optimization goals. +type HyperparameterSpec_GoalType int32 + +const ( + // Goal Type will default to maximize. + HyperparameterSpec_GOAL_TYPE_UNSPECIFIED HyperparameterSpec_GoalType = 0 + // Maximize the goal metric. + HyperparameterSpec_MAXIMIZE HyperparameterSpec_GoalType = 1 + // Minimize the goal metric. + HyperparameterSpec_MINIMIZE HyperparameterSpec_GoalType = 2 +) + +var HyperparameterSpec_GoalType_name = map[int32]string{ + 0: "GOAL_TYPE_UNSPECIFIED", + 1: "MAXIMIZE", + 2: "MINIMIZE", +} +var HyperparameterSpec_GoalType_value = map[string]int32{ + "GOAL_TYPE_UNSPECIFIED": 0, + "MAXIMIZE": 1, + "MINIMIZE": 2, +} + +func (x HyperparameterSpec_GoalType) String() string { + return proto.EnumName(HyperparameterSpec_GoalType_name, int32(x)) +} +func (HyperparameterSpec_GoalType) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_job_service_96b7f358a19eb3fe, []int{1, 0} +} + +// The type of the parameter. +type ParameterSpec_ParameterType int32 + +const ( + // You must specify a valid type. Using this unspecified type will result in + // an error. + ParameterSpec_PARAMETER_TYPE_UNSPECIFIED ParameterSpec_ParameterType = 0 + // Type for real-valued parameters. + ParameterSpec_DOUBLE ParameterSpec_ParameterType = 1 + // Type for integral parameters. + ParameterSpec_INTEGER ParameterSpec_ParameterType = 2 + // The parameter is categorical, with a value chosen from the categories + // field. + ParameterSpec_CATEGORICAL ParameterSpec_ParameterType = 3 + // The parameter is real valued, with a fixed set of feasible points. If + // `type==DISCRETE`, feasible_points must be provided, and + // {`min_value`, `max_value`} will be ignored. + ParameterSpec_DISCRETE ParameterSpec_ParameterType = 4 +) + +var ParameterSpec_ParameterType_name = map[int32]string{ + 0: "PARAMETER_TYPE_UNSPECIFIED", + 1: "DOUBLE", + 2: "INTEGER", + 3: "CATEGORICAL", + 4: "DISCRETE", +} +var ParameterSpec_ParameterType_value = map[string]int32{ + "PARAMETER_TYPE_UNSPECIFIED": 0, + "DOUBLE": 1, + "INTEGER": 2, + "CATEGORICAL": 3, + "DISCRETE": 4, +} + +func (x ParameterSpec_ParameterType) String() string { + return proto.EnumName(ParameterSpec_ParameterType_name, int32(x)) +} +func (ParameterSpec_ParameterType) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_job_service_96b7f358a19eb3fe, []int{2, 0} +} + +// The type of scaling that should be applied to this parameter. +type ParameterSpec_ScaleType int32 + +const ( + // By default, no scaling is applied. + ParameterSpec_NONE ParameterSpec_ScaleType = 0 + // Scales the feasible space to (0, 1) linearly. + ParameterSpec_UNIT_LINEAR_SCALE ParameterSpec_ScaleType = 1 + // Scales the feasible space logarithmically to (0, 1). The entire feasible + // space must be strictly positive. + ParameterSpec_UNIT_LOG_SCALE ParameterSpec_ScaleType = 2 + // Scales the feasible space "reverse" logarithmically to (0, 1). The result + // is that values close to the top of the feasible space are spread out more + // than points near the bottom. The entire feasible space must be strictly + // positive. + ParameterSpec_UNIT_REVERSE_LOG_SCALE ParameterSpec_ScaleType = 3 +) + +var ParameterSpec_ScaleType_name = map[int32]string{ + 0: "NONE", + 1: "UNIT_LINEAR_SCALE", + 2: "UNIT_LOG_SCALE", + 3: "UNIT_REVERSE_LOG_SCALE", +} +var ParameterSpec_ScaleType_value = map[string]int32{ + "NONE": 0, + "UNIT_LINEAR_SCALE": 1, + "UNIT_LOG_SCALE": 2, + "UNIT_REVERSE_LOG_SCALE": 3, +} + +func (x ParameterSpec_ScaleType) String() string { + return proto.EnumName(ParameterSpec_ScaleType_name, int32(x)) +} +func (ParameterSpec_ScaleType) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_job_service_96b7f358a19eb3fe, []int{2, 1} +} + +// The format used to separate data instances in the source files. +type PredictionInput_DataFormat int32 + +const ( + // Unspecified format. + PredictionInput_DATA_FORMAT_UNSPECIFIED PredictionInput_DataFormat = 0 + // The source file is a text file with instances separated by the + // new-line character. + PredictionInput_TEXT PredictionInput_DataFormat = 1 + // The source file is a TFRecord file. + PredictionInput_TF_RECORD PredictionInput_DataFormat = 2 + // The source file is a GZIP-compressed TFRecord file. + PredictionInput_TF_RECORD_GZIP PredictionInput_DataFormat = 3 +) + +var PredictionInput_DataFormat_name = map[int32]string{ + 0: "DATA_FORMAT_UNSPECIFIED", + 1: "TEXT", + 2: "TF_RECORD", + 3: "TF_RECORD_GZIP", +} +var PredictionInput_DataFormat_value = map[string]int32{ + "DATA_FORMAT_UNSPECIFIED": 0, + "TEXT": 1, + "TF_RECORD": 2, + "TF_RECORD_GZIP": 3, +} + +func (x PredictionInput_DataFormat) String() string { + return proto.EnumName(PredictionInput_DataFormat_name, int32(x)) +} +func (PredictionInput_DataFormat) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_job_service_96b7f358a19eb3fe, []int{5, 0} +} + +// Describes the job state. +type Job_State int32 + +const ( + // The job state is unspecified. + Job_STATE_UNSPECIFIED Job_State = 0 + // The job has been just created and processing has not yet begun. + Job_QUEUED Job_State = 1 + // The service is preparing to run the job. + Job_PREPARING Job_State = 2 + // The job is in progress. + Job_RUNNING Job_State = 3 + // The job completed successfully. + Job_SUCCEEDED Job_State = 4 + // The job failed. + // `error_message` should contain the details of the failure. + Job_FAILED Job_State = 5 + // The job is being cancelled. + // `error_message` should describe the reason for the cancellation. + Job_CANCELLING Job_State = 6 + // The job has been cancelled. + // `error_message` should describe the reason for the cancellation. + Job_CANCELLED Job_State = 7 +) + +var Job_State_name = map[int32]string{ + 0: "STATE_UNSPECIFIED", + 1: "QUEUED", + 2: "PREPARING", + 3: "RUNNING", + 4: "SUCCEEDED", + 5: "FAILED", + 6: "CANCELLING", + 7: "CANCELLED", +} +var Job_State_value = map[string]int32{ + "STATE_UNSPECIFIED": 0, + "QUEUED": 1, + "PREPARING": 2, + "RUNNING": 3, + "SUCCEEDED": 4, + "FAILED": 5, + "CANCELLING": 6, + "CANCELLED": 7, +} + +func (x Job_State) String() string { + return proto.EnumName(Job_State_name, int32(x)) +} +func (Job_State) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_job_service_96b7f358a19eb3fe, []int{7, 0} +} + +// Represents input parameters for a training job. +type TrainingInput struct { + // Required. Specifies the machine types, the number of replicas for workers + // and parameter servers. + ScaleTier TrainingInput_ScaleTier `protobuf:"varint,1,opt,name=scale_tier,json=scaleTier,proto3,enum=google.cloud.ml.v1.TrainingInput_ScaleTier" json:"scale_tier,omitempty"` + // Optional. Specifies the type of virtual machine to use for your training + // job's master worker. + // + // The following types are supported: + // + //
+ //
standard
+ //
+ // A basic machine configuration suitable for training simple models with + // small to moderate datasets. + //
+ //
large_model
+ //
+ // A machine with a lot of memory, specially suited for parameter servers + // when your model is large (having many hidden layers or layers with very + // large numbers of nodes). + //
+ //
complex_model_s
+ //
+ // A machine suitable for the master and workers of the cluster when your + // model requires more computation than the standard machine can handle + // satisfactorily. + //
+ //
complex_model_m
+ //
+ // A machine with roughly twice the number of cores and roughly double the + // memory of complex_model_s. + //
+ //
complex_model_l
+ //
+ // A machine with roughly twice the number of cores and roughly double the + // memory of complex_model_m. + //
+ //
standard_gpu
+ //
+ // A machine equivalent to standard that + // also includes a + // + // GPU that you can use in your trainer. + //
+ //
complex_model_m_gpu
+ //
+ // A machine equivalent to + // coplex_model_m that also includes + // four GPUs. + //
+ //
+ // + // You must set this value when `scaleTier` is set to `CUSTOM`. + MasterType string `protobuf:"bytes,2,opt,name=master_type,json=masterType,proto3" json:"master_type,omitempty"` + // Optional. Specifies the type of virtual machine to use for your training + // job's worker nodes. + // + // The supported values are the same as those described in the entry for + // `masterType`. + // + // This value must be present when `scaleTier` is set to `CUSTOM` and + // `workerCount` is greater than zero. + WorkerType string `protobuf:"bytes,3,opt,name=worker_type,json=workerType,proto3" json:"worker_type,omitempty"` + // Optional. Specifies the type of virtual machine to use for your training + // job's parameter server. + // + // The supported values are the same as those described in the entry for + // `master_type`. + // + // This value must be present when `scaleTier` is set to `CUSTOM` and + // `parameter_server_count` is greater than zero. + ParameterServerType string `protobuf:"bytes,4,opt,name=parameter_server_type,json=parameterServerType,proto3" json:"parameter_server_type,omitempty"` + // Optional. The number of worker replicas to use for the training job. Each + // replica in the cluster will be of the type specified in `worker_type`. + // + // This value can only be used when `scale_tier` is set to `CUSTOM`. If you + // set this value, you must also set `worker_type`. + WorkerCount int64 `protobuf:"varint,5,opt,name=worker_count,json=workerCount,proto3" json:"worker_count,omitempty"` + // Optional. The number of parameter server replicas to use for the training + // job. Each replica in the cluster will be of the type specified in + // `parameter_server_type`. + // + // This value can only be used when `scale_tier` is set to `CUSTOM`.If you + // set this value, you must also set `parameter_server_type`. + ParameterServerCount int64 `protobuf:"varint,6,opt,name=parameter_server_count,json=parameterServerCount,proto3" json:"parameter_server_count,omitempty"` + // Required. The Google Cloud Storage location of the packages with + // the training program and any additional dependencies. + PackageUris []string `protobuf:"bytes,7,rep,name=package_uris,json=packageUris,proto3" json:"package_uris,omitempty"` + // Required. The Python module name to run after installing the packages. + PythonModule string `protobuf:"bytes,8,opt,name=python_module,json=pythonModule,proto3" json:"python_module,omitempty"` + // Optional. Command line arguments to pass to the program. + Args []string `protobuf:"bytes,10,rep,name=args,proto3" json:"args,omitempty"` + // Optional. The set of Hyperparameters to tune. + Hyperparameters *HyperparameterSpec `protobuf:"bytes,12,opt,name=hyperparameters,proto3" json:"hyperparameters,omitempty"` + // Required. The Google Compute Engine region to run the training job in. + Region string `protobuf:"bytes,14,opt,name=region,proto3" json:"region,omitempty"` + // Optional. A Google Cloud Storage path in which to store training outputs + // and other data needed for training. This path is passed to your TensorFlow + // program as the 'job_dir' command-line argument. The benefit of specifying + // this field is that Cloud ML validates the path for use in training. + JobDir string `protobuf:"bytes,16,opt,name=job_dir,json=jobDir,proto3" json:"job_dir,omitempty"` + // Optional. The Google Cloud ML runtime version to use for training. If not + // set, Google Cloud ML will choose the latest stable version. + RuntimeVersion string `protobuf:"bytes,15,opt,name=runtime_version,json=runtimeVersion,proto3" json:"runtime_version,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *TrainingInput) Reset() { *m = TrainingInput{} } +func (m *TrainingInput) String() string { return proto.CompactTextString(m) } +func (*TrainingInput) ProtoMessage() {} +func (*TrainingInput) Descriptor() ([]byte, []int) { + return fileDescriptor_job_service_96b7f358a19eb3fe, []int{0} +} +func (m *TrainingInput) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_TrainingInput.Unmarshal(m, b) +} +func (m *TrainingInput) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_TrainingInput.Marshal(b, m, deterministic) +} +func (dst *TrainingInput) XXX_Merge(src proto.Message) { + xxx_messageInfo_TrainingInput.Merge(dst, src) +} +func (m *TrainingInput) XXX_Size() int { + return xxx_messageInfo_TrainingInput.Size(m) +} +func (m *TrainingInput) XXX_DiscardUnknown() { + xxx_messageInfo_TrainingInput.DiscardUnknown(m) +} + +var xxx_messageInfo_TrainingInput proto.InternalMessageInfo + +func (m *TrainingInput) GetScaleTier() TrainingInput_ScaleTier { + if m != nil { + return m.ScaleTier + } + return TrainingInput_BASIC +} + +func (m *TrainingInput) GetMasterType() string { + if m != nil { + return m.MasterType + } + return "" +} + +func (m *TrainingInput) GetWorkerType() string { + if m != nil { + return m.WorkerType + } + return "" +} + +func (m *TrainingInput) GetParameterServerType() string { + if m != nil { + return m.ParameterServerType + } + return "" +} + +func (m *TrainingInput) GetWorkerCount() int64 { + if m != nil { + return m.WorkerCount + } + return 0 +} + +func (m *TrainingInput) GetParameterServerCount() int64 { + if m != nil { + return m.ParameterServerCount + } + return 0 +} + +func (m *TrainingInput) GetPackageUris() []string { + if m != nil { + return m.PackageUris + } + return nil +} + +func (m *TrainingInput) GetPythonModule() string { + if m != nil { + return m.PythonModule + } + return "" +} + +func (m *TrainingInput) GetArgs() []string { + if m != nil { + return m.Args + } + return nil +} + +func (m *TrainingInput) GetHyperparameters() *HyperparameterSpec { + if m != nil { + return m.Hyperparameters + } + return nil +} + +func (m *TrainingInput) GetRegion() string { + if m != nil { + return m.Region + } + return "" +} + +func (m *TrainingInput) GetJobDir() string { + if m != nil { + return m.JobDir + } + return "" +} + +func (m *TrainingInput) GetRuntimeVersion() string { + if m != nil { + return m.RuntimeVersion + } + return "" +} + +// Represents a set of hyperparameters to optimize. +type HyperparameterSpec struct { + // Required. The type of goal to use for tuning. Available types are + // `MAXIMIZE` and `MINIMIZE`. + // + // Defaults to `MAXIMIZE`. + Goal HyperparameterSpec_GoalType `protobuf:"varint,1,opt,name=goal,proto3,enum=google.cloud.ml.v1.HyperparameterSpec_GoalType" json:"goal,omitempty"` + // Required. The set of parameters to tune. + Params []*ParameterSpec `protobuf:"bytes,2,rep,name=params,proto3" json:"params,omitempty"` + // Optional. How many training trials should be attempted to optimize + // the specified hyperparameters. + // + // Defaults to one. + MaxTrials int32 `protobuf:"varint,3,opt,name=max_trials,json=maxTrials,proto3" json:"max_trials,omitempty"` + // Optional. The number of training trials to run concurrently. + // You can reduce the time it takes to perform hyperparameter tuning by adding + // trials in parallel. However, each trail only benefits from the information + // gained in completed trials. That means that a trial does not get access to + // the results of trials running at the same time, which could reduce the + // quality of the overall optimization. + // + // Each trial will use the same scale tier and machine types. + // + // Defaults to one. + MaxParallelTrials int32 `protobuf:"varint,4,opt,name=max_parallel_trials,json=maxParallelTrials,proto3" json:"max_parallel_trials,omitempty"` + // Optional. The Tensorflow summary tag name to use for optimizing trials. For + // current versions of Tensorflow, this tag name should exactly match what is + // shown in Tensorboard, including all scopes. For versions of Tensorflow + // prior to 0.12, this should be only the tag passed to tf.Summary. + // By default, "training/hptuning/metric" will be used. + HyperparameterMetricTag string `protobuf:"bytes,5,opt,name=hyperparameter_metric_tag,json=hyperparameterMetricTag,proto3" json:"hyperparameter_metric_tag,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *HyperparameterSpec) Reset() { *m = HyperparameterSpec{} } +func (m *HyperparameterSpec) String() string { return proto.CompactTextString(m) } +func (*HyperparameterSpec) ProtoMessage() {} +func (*HyperparameterSpec) Descriptor() ([]byte, []int) { + return fileDescriptor_job_service_96b7f358a19eb3fe, []int{1} +} +func (m *HyperparameterSpec) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_HyperparameterSpec.Unmarshal(m, b) +} +func (m *HyperparameterSpec) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_HyperparameterSpec.Marshal(b, m, deterministic) +} +func (dst *HyperparameterSpec) XXX_Merge(src proto.Message) { + xxx_messageInfo_HyperparameterSpec.Merge(dst, src) +} +func (m *HyperparameterSpec) XXX_Size() int { + return xxx_messageInfo_HyperparameterSpec.Size(m) +} +func (m *HyperparameterSpec) XXX_DiscardUnknown() { + xxx_messageInfo_HyperparameterSpec.DiscardUnknown(m) +} + +var xxx_messageInfo_HyperparameterSpec proto.InternalMessageInfo + +func (m *HyperparameterSpec) GetGoal() HyperparameterSpec_GoalType { + if m != nil { + return m.Goal + } + return HyperparameterSpec_GOAL_TYPE_UNSPECIFIED +} + +func (m *HyperparameterSpec) GetParams() []*ParameterSpec { + if m != nil { + return m.Params + } + return nil +} + +func (m *HyperparameterSpec) GetMaxTrials() int32 { + if m != nil { + return m.MaxTrials + } + return 0 +} + +func (m *HyperparameterSpec) GetMaxParallelTrials() int32 { + if m != nil { + return m.MaxParallelTrials + } + return 0 +} + +func (m *HyperparameterSpec) GetHyperparameterMetricTag() string { + if m != nil { + return m.HyperparameterMetricTag + } + return "" +} + +// Represents a single hyperparameter to optimize. +type ParameterSpec struct { + // Required. The parameter name must be unique amongst all ParameterConfigs in + // a HyperparameterSpec message. E.g., "learning_rate". + ParameterName string `protobuf:"bytes,1,opt,name=parameter_name,json=parameterName,proto3" json:"parameter_name,omitempty"` + // Required. The type of the parameter. + Type ParameterSpec_ParameterType `protobuf:"varint,4,opt,name=type,proto3,enum=google.cloud.ml.v1.ParameterSpec_ParameterType" json:"type,omitempty"` + // Required if type is `DOUBLE` or `INTEGER`. This field + // should be unset if type is `CATEGORICAL`. This value should be integers if + // type is INTEGER. + MinValue float64 `protobuf:"fixed64,2,opt,name=min_value,json=minValue,proto3" json:"min_value,omitempty"` + // Required if typeis `DOUBLE` or `INTEGER`. This field + // should be unset if type is `CATEGORICAL`. This value should be integers if + // type is `INTEGER`. + MaxValue float64 `protobuf:"fixed64,3,opt,name=max_value,json=maxValue,proto3" json:"max_value,omitempty"` + // Required if type is `CATEGORICAL`. The list of possible categories. + CategoricalValues []string `protobuf:"bytes,5,rep,name=categorical_values,json=categoricalValues,proto3" json:"categorical_values,omitempty"` + // Required if type is `DISCRETE`. + // A list of feasible points. + // The list should be in strictly increasing order. For instance, this + // parameter might have possible settings of 1.5, 2.5, and 4.0. This list + // should not contain more than 1,000 values. + DiscreteValues []float64 `protobuf:"fixed64,6,rep,packed,name=discrete_values,json=discreteValues,proto3" json:"discrete_values,omitempty"` + // Optional. How the parameter should be scaled to the hypercube. + // Leave unset for categorical parameters. + // Some kind of scaling is strongly recommended for real or integral + // parameters (e.g., `UNIT_LINEAR_SCALE`). + ScaleType ParameterSpec_ScaleType `protobuf:"varint,7,opt,name=scale_type,json=scaleType,proto3,enum=google.cloud.ml.v1.ParameterSpec_ScaleType" json:"scale_type,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ParameterSpec) Reset() { *m = ParameterSpec{} } +func (m *ParameterSpec) String() string { return proto.CompactTextString(m) } +func (*ParameterSpec) ProtoMessage() {} +func (*ParameterSpec) Descriptor() ([]byte, []int) { + return fileDescriptor_job_service_96b7f358a19eb3fe, []int{2} +} +func (m *ParameterSpec) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ParameterSpec.Unmarshal(m, b) +} +func (m *ParameterSpec) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ParameterSpec.Marshal(b, m, deterministic) +} +func (dst *ParameterSpec) XXX_Merge(src proto.Message) { + xxx_messageInfo_ParameterSpec.Merge(dst, src) +} +func (m *ParameterSpec) XXX_Size() int { + return xxx_messageInfo_ParameterSpec.Size(m) +} +func (m *ParameterSpec) XXX_DiscardUnknown() { + xxx_messageInfo_ParameterSpec.DiscardUnknown(m) +} + +var xxx_messageInfo_ParameterSpec proto.InternalMessageInfo + +func (m *ParameterSpec) GetParameterName() string { + if m != nil { + return m.ParameterName + } + return "" +} + +func (m *ParameterSpec) GetType() ParameterSpec_ParameterType { + if m != nil { + return m.Type + } + return ParameterSpec_PARAMETER_TYPE_UNSPECIFIED +} + +func (m *ParameterSpec) GetMinValue() float64 { + if m != nil { + return m.MinValue + } + return 0 +} + +func (m *ParameterSpec) GetMaxValue() float64 { + if m != nil { + return m.MaxValue + } + return 0 +} + +func (m *ParameterSpec) GetCategoricalValues() []string { + if m != nil { + return m.CategoricalValues + } + return nil +} + +func (m *ParameterSpec) GetDiscreteValues() []float64 { + if m != nil { + return m.DiscreteValues + } + return nil +} + +func (m *ParameterSpec) GetScaleType() ParameterSpec_ScaleType { + if m != nil { + return m.ScaleType + } + return ParameterSpec_NONE +} + +// Represents the result of a single hyperparameter tuning trial from a +// training job. The TrainingOutput object that is returned on successful +// completion of a training job with hyperparameter tuning includes a list +// of HyperparameterOutput objects, one for each successful trial. +type HyperparameterOutput struct { + // The trial id for these results. + TrialId string `protobuf:"bytes,1,opt,name=trial_id,json=trialId,proto3" json:"trial_id,omitempty"` + // The hyperparameters given to this trial. + Hyperparameters map[string]string `protobuf:"bytes,2,rep,name=hyperparameters,proto3" json:"hyperparameters,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` + // The final objective metric seen for this trial. + FinalMetric *HyperparameterOutput_HyperparameterMetric `protobuf:"bytes,3,opt,name=final_metric,json=finalMetric,proto3" json:"final_metric,omitempty"` + // All recorded object metrics for this trial. + AllMetrics []*HyperparameterOutput_HyperparameterMetric `protobuf:"bytes,4,rep,name=all_metrics,json=allMetrics,proto3" json:"all_metrics,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *HyperparameterOutput) Reset() { *m = HyperparameterOutput{} } +func (m *HyperparameterOutput) String() string { return proto.CompactTextString(m) } +func (*HyperparameterOutput) ProtoMessage() {} +func (*HyperparameterOutput) Descriptor() ([]byte, []int) { + return fileDescriptor_job_service_96b7f358a19eb3fe, []int{3} +} +func (m *HyperparameterOutput) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_HyperparameterOutput.Unmarshal(m, b) +} +func (m *HyperparameterOutput) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_HyperparameterOutput.Marshal(b, m, deterministic) +} +func (dst *HyperparameterOutput) XXX_Merge(src proto.Message) { + xxx_messageInfo_HyperparameterOutput.Merge(dst, src) +} +func (m *HyperparameterOutput) XXX_Size() int { + return xxx_messageInfo_HyperparameterOutput.Size(m) +} +func (m *HyperparameterOutput) XXX_DiscardUnknown() { + xxx_messageInfo_HyperparameterOutput.DiscardUnknown(m) +} + +var xxx_messageInfo_HyperparameterOutput proto.InternalMessageInfo + +func (m *HyperparameterOutput) GetTrialId() string { + if m != nil { + return m.TrialId + } + return "" +} + +func (m *HyperparameterOutput) GetHyperparameters() map[string]string { + if m != nil { + return m.Hyperparameters + } + return nil +} + +func (m *HyperparameterOutput) GetFinalMetric() *HyperparameterOutput_HyperparameterMetric { + if m != nil { + return m.FinalMetric + } + return nil +} + +func (m *HyperparameterOutput) GetAllMetrics() []*HyperparameterOutput_HyperparameterMetric { + if m != nil { + return m.AllMetrics + } + return nil +} + +// An observed value of a metric. +type HyperparameterOutput_HyperparameterMetric struct { + // The global training step for this metric. + TrainingStep int64 `protobuf:"varint,1,opt,name=training_step,json=trainingStep,proto3" json:"training_step,omitempty"` + // The objective value at this training step. + ObjectiveValue float64 `protobuf:"fixed64,2,opt,name=objective_value,json=objectiveValue,proto3" json:"objective_value,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *HyperparameterOutput_HyperparameterMetric) Reset() { + *m = HyperparameterOutput_HyperparameterMetric{} +} +func (m *HyperparameterOutput_HyperparameterMetric) String() string { return proto.CompactTextString(m) } +func (*HyperparameterOutput_HyperparameterMetric) ProtoMessage() {} +func (*HyperparameterOutput_HyperparameterMetric) Descriptor() ([]byte, []int) { + return fileDescriptor_job_service_96b7f358a19eb3fe, []int{3, 0} +} +func (m *HyperparameterOutput_HyperparameterMetric) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_HyperparameterOutput_HyperparameterMetric.Unmarshal(m, b) +} +func (m *HyperparameterOutput_HyperparameterMetric) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_HyperparameterOutput_HyperparameterMetric.Marshal(b, m, deterministic) +} +func (dst *HyperparameterOutput_HyperparameterMetric) XXX_Merge(src proto.Message) { + xxx_messageInfo_HyperparameterOutput_HyperparameterMetric.Merge(dst, src) +} +func (m *HyperparameterOutput_HyperparameterMetric) XXX_Size() int { + return xxx_messageInfo_HyperparameterOutput_HyperparameterMetric.Size(m) +} +func (m *HyperparameterOutput_HyperparameterMetric) XXX_DiscardUnknown() { + xxx_messageInfo_HyperparameterOutput_HyperparameterMetric.DiscardUnknown(m) +} + +var xxx_messageInfo_HyperparameterOutput_HyperparameterMetric proto.InternalMessageInfo + +func (m *HyperparameterOutput_HyperparameterMetric) GetTrainingStep() int64 { + if m != nil { + return m.TrainingStep + } + return 0 +} + +func (m *HyperparameterOutput_HyperparameterMetric) GetObjectiveValue() float64 { + if m != nil { + return m.ObjectiveValue + } + return 0 +} + +// Represents results of a training job. Output only. +type TrainingOutput struct { + // The number of hyperparameter tuning trials that completed successfully. + // Only set for hyperparameter tuning jobs. + CompletedTrialCount int64 `protobuf:"varint,1,opt,name=completed_trial_count,json=completedTrialCount,proto3" json:"completed_trial_count,omitempty"` + // Results for individual Hyperparameter trials. + // Only set for hyperparameter tuning jobs. + Trials []*HyperparameterOutput `protobuf:"bytes,2,rep,name=trials,proto3" json:"trials,omitempty"` + // The amount of ML units consumed by the job. + ConsumedMlUnits float64 `protobuf:"fixed64,3,opt,name=consumed_ml_units,json=consumedMlUnits,proto3" json:"consumed_ml_units,omitempty"` + // Whether this job is a hyperparameter tuning job. + IsHyperparameterTuningJob bool `protobuf:"varint,4,opt,name=is_hyperparameter_tuning_job,json=isHyperparameterTuningJob,proto3" json:"is_hyperparameter_tuning_job,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *TrainingOutput) Reset() { *m = TrainingOutput{} } +func (m *TrainingOutput) String() string { return proto.CompactTextString(m) } +func (*TrainingOutput) ProtoMessage() {} +func (*TrainingOutput) Descriptor() ([]byte, []int) { + return fileDescriptor_job_service_96b7f358a19eb3fe, []int{4} +} +func (m *TrainingOutput) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_TrainingOutput.Unmarshal(m, b) +} +func (m *TrainingOutput) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_TrainingOutput.Marshal(b, m, deterministic) +} +func (dst *TrainingOutput) XXX_Merge(src proto.Message) { + xxx_messageInfo_TrainingOutput.Merge(dst, src) +} +func (m *TrainingOutput) XXX_Size() int { + return xxx_messageInfo_TrainingOutput.Size(m) +} +func (m *TrainingOutput) XXX_DiscardUnknown() { + xxx_messageInfo_TrainingOutput.DiscardUnknown(m) +} + +var xxx_messageInfo_TrainingOutput proto.InternalMessageInfo + +func (m *TrainingOutput) GetCompletedTrialCount() int64 { + if m != nil { + return m.CompletedTrialCount + } + return 0 +} + +func (m *TrainingOutput) GetTrials() []*HyperparameterOutput { + if m != nil { + return m.Trials + } + return nil +} + +func (m *TrainingOutput) GetConsumedMlUnits() float64 { + if m != nil { + return m.ConsumedMlUnits + } + return 0 +} + +func (m *TrainingOutput) GetIsHyperparameterTuningJob() bool { + if m != nil { + return m.IsHyperparameterTuningJob + } + return false +} + +// Represents input parameters for a prediction job. +type PredictionInput struct { + // Required. The model or the version to use for prediction. + // + // Types that are valid to be assigned to ModelVersion: + // *PredictionInput_ModelName + // *PredictionInput_VersionName + // *PredictionInput_Uri + ModelVersion isPredictionInput_ModelVersion `protobuf_oneof:"model_version"` + // Required. The format of the input data files. + DataFormat PredictionInput_DataFormat `protobuf:"varint,3,opt,name=data_format,json=dataFormat,proto3,enum=google.cloud.ml.v1.PredictionInput_DataFormat" json:"data_format,omitempty"` + // Required. The Google Cloud Storage location of the input data files. + // May contain wildcards. + InputPaths []string `protobuf:"bytes,4,rep,name=input_paths,json=inputPaths,proto3" json:"input_paths,omitempty"` + // Required. The output Google Cloud Storage location. + OutputPath string `protobuf:"bytes,5,opt,name=output_path,json=outputPath,proto3" json:"output_path,omitempty"` + // Optional. The maximum number of workers to be used for parallel processing. + // Defaults to 10 if not specified. + MaxWorkerCount int64 `protobuf:"varint,6,opt,name=max_worker_count,json=maxWorkerCount,proto3" json:"max_worker_count,omitempty"` + // Required. The Google Compute Engine region to run the prediction job in. + Region string `protobuf:"bytes,7,opt,name=region,proto3" json:"region,omitempty"` + // Optional. The Google Cloud ML runtime version to use for this batch + // prediction. If not set, Google Cloud ML will pick the runtime version used + // during the CreateVersion request for this model version, or choose the + // latest stable version when model version information is not available + // such as when the model is specified by uri. + RuntimeVersion string `protobuf:"bytes,8,opt,name=runtime_version,json=runtimeVersion,proto3" json:"runtime_version,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *PredictionInput) Reset() { *m = PredictionInput{} } +func (m *PredictionInput) String() string { return proto.CompactTextString(m) } +func (*PredictionInput) ProtoMessage() {} +func (*PredictionInput) Descriptor() ([]byte, []int) { + return fileDescriptor_job_service_96b7f358a19eb3fe, []int{5} +} +func (m *PredictionInput) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_PredictionInput.Unmarshal(m, b) +} +func (m *PredictionInput) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_PredictionInput.Marshal(b, m, deterministic) +} +func (dst *PredictionInput) XXX_Merge(src proto.Message) { + xxx_messageInfo_PredictionInput.Merge(dst, src) +} +func (m *PredictionInput) XXX_Size() int { + return xxx_messageInfo_PredictionInput.Size(m) +} +func (m *PredictionInput) XXX_DiscardUnknown() { + xxx_messageInfo_PredictionInput.DiscardUnknown(m) +} + +var xxx_messageInfo_PredictionInput proto.InternalMessageInfo + +type isPredictionInput_ModelVersion interface { + isPredictionInput_ModelVersion() +} + +type PredictionInput_ModelName struct { + ModelName string `protobuf:"bytes,1,opt,name=model_name,json=modelName,proto3,oneof"` +} + +type PredictionInput_VersionName struct { + VersionName string `protobuf:"bytes,2,opt,name=version_name,json=versionName,proto3,oneof"` +} + +type PredictionInput_Uri struct { + Uri string `protobuf:"bytes,9,opt,name=uri,proto3,oneof"` +} + +func (*PredictionInput_ModelName) isPredictionInput_ModelVersion() {} + +func (*PredictionInput_VersionName) isPredictionInput_ModelVersion() {} + +func (*PredictionInput_Uri) isPredictionInput_ModelVersion() {} + +func (m *PredictionInput) GetModelVersion() isPredictionInput_ModelVersion { + if m != nil { + return m.ModelVersion + } + return nil +} + +func (m *PredictionInput) GetModelName() string { + if x, ok := m.GetModelVersion().(*PredictionInput_ModelName); ok { + return x.ModelName + } + return "" +} + +func (m *PredictionInput) GetVersionName() string { + if x, ok := m.GetModelVersion().(*PredictionInput_VersionName); ok { + return x.VersionName + } + return "" +} + +func (m *PredictionInput) GetUri() string { + if x, ok := m.GetModelVersion().(*PredictionInput_Uri); ok { + return x.Uri + } + return "" +} + +func (m *PredictionInput) GetDataFormat() PredictionInput_DataFormat { + if m != nil { + return m.DataFormat + } + return PredictionInput_DATA_FORMAT_UNSPECIFIED +} + +func (m *PredictionInput) GetInputPaths() []string { + if m != nil { + return m.InputPaths + } + return nil +} + +func (m *PredictionInput) GetOutputPath() string { + if m != nil { + return m.OutputPath + } + return "" +} + +func (m *PredictionInput) GetMaxWorkerCount() int64 { + if m != nil { + return m.MaxWorkerCount + } + return 0 +} + +func (m *PredictionInput) GetRegion() string { + if m != nil { + return m.Region + } + return "" +} + +func (m *PredictionInput) GetRuntimeVersion() string { + if m != nil { + return m.RuntimeVersion + } + return "" +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*PredictionInput) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _PredictionInput_OneofMarshaler, _PredictionInput_OneofUnmarshaler, _PredictionInput_OneofSizer, []interface{}{ + (*PredictionInput_ModelName)(nil), + (*PredictionInput_VersionName)(nil), + (*PredictionInput_Uri)(nil), + } +} + +func _PredictionInput_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*PredictionInput) + // model_version + switch x := m.ModelVersion.(type) { + case *PredictionInput_ModelName: + b.EncodeVarint(1<<3 | proto.WireBytes) + b.EncodeStringBytes(x.ModelName) + case *PredictionInput_VersionName: + b.EncodeVarint(2<<3 | proto.WireBytes) + b.EncodeStringBytes(x.VersionName) + case *PredictionInput_Uri: + b.EncodeVarint(9<<3 | proto.WireBytes) + b.EncodeStringBytes(x.Uri) + case nil: + default: + return fmt.Errorf("PredictionInput.ModelVersion has unexpected type %T", x) + } + return nil +} + +func _PredictionInput_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*PredictionInput) + switch tag { + case 1: // model_version.model_name + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeStringBytes() + m.ModelVersion = &PredictionInput_ModelName{x} + return true, err + case 2: // model_version.version_name + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeStringBytes() + m.ModelVersion = &PredictionInput_VersionName{x} + return true, err + case 9: // model_version.uri + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeStringBytes() + m.ModelVersion = &PredictionInput_Uri{x} + return true, err + default: + return false, nil + } +} + +func _PredictionInput_OneofSizer(msg proto.Message) (n int) { + m := msg.(*PredictionInput) + // model_version + switch x := m.ModelVersion.(type) { + case *PredictionInput_ModelName: + n += 1 // tag and wire + n += proto.SizeVarint(uint64(len(x.ModelName))) + n += len(x.ModelName) + case *PredictionInput_VersionName: + n += 1 // tag and wire + n += proto.SizeVarint(uint64(len(x.VersionName))) + n += len(x.VersionName) + case *PredictionInput_Uri: + n += 1 // tag and wire + n += proto.SizeVarint(uint64(len(x.Uri))) + n += len(x.Uri) + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +// Represents results of a prediction job. +type PredictionOutput struct { + // The output Google Cloud Storage location provided at the job creation time. + OutputPath string `protobuf:"bytes,1,opt,name=output_path,json=outputPath,proto3" json:"output_path,omitempty"` + // The number of generated predictions. + PredictionCount int64 `protobuf:"varint,2,opt,name=prediction_count,json=predictionCount,proto3" json:"prediction_count,omitempty"` + // The number of data instances which resulted in errors. + ErrorCount int64 `protobuf:"varint,3,opt,name=error_count,json=errorCount,proto3" json:"error_count,omitempty"` + // Node hours used by the batch prediction job. + NodeHours float64 `protobuf:"fixed64,4,opt,name=node_hours,json=nodeHours,proto3" json:"node_hours,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *PredictionOutput) Reset() { *m = PredictionOutput{} } +func (m *PredictionOutput) String() string { return proto.CompactTextString(m) } +func (*PredictionOutput) ProtoMessage() {} +func (*PredictionOutput) Descriptor() ([]byte, []int) { + return fileDescriptor_job_service_96b7f358a19eb3fe, []int{6} +} +func (m *PredictionOutput) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_PredictionOutput.Unmarshal(m, b) +} +func (m *PredictionOutput) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_PredictionOutput.Marshal(b, m, deterministic) +} +func (dst *PredictionOutput) XXX_Merge(src proto.Message) { + xxx_messageInfo_PredictionOutput.Merge(dst, src) +} +func (m *PredictionOutput) XXX_Size() int { + return xxx_messageInfo_PredictionOutput.Size(m) +} +func (m *PredictionOutput) XXX_DiscardUnknown() { + xxx_messageInfo_PredictionOutput.DiscardUnknown(m) +} + +var xxx_messageInfo_PredictionOutput proto.InternalMessageInfo + +func (m *PredictionOutput) GetOutputPath() string { + if m != nil { + return m.OutputPath + } + return "" +} + +func (m *PredictionOutput) GetPredictionCount() int64 { + if m != nil { + return m.PredictionCount + } + return 0 +} + +func (m *PredictionOutput) GetErrorCount() int64 { + if m != nil { + return m.ErrorCount + } + return 0 +} + +func (m *PredictionOutput) GetNodeHours() float64 { + if m != nil { + return m.NodeHours + } + return 0 +} + +// Represents a training or prediction job. +type Job struct { + // Required. The user-specified id of the job. + JobId string `protobuf:"bytes,1,opt,name=job_id,json=jobId,proto3" json:"job_id,omitempty"` + // Required. Parameters to create a job. + // + // Types that are valid to be assigned to Input: + // *Job_TrainingInput + // *Job_PredictionInput + Input isJob_Input `protobuf_oneof:"input"` + // Output only. When the job was created. + CreateTime *timestamp.Timestamp `protobuf:"bytes,4,opt,name=create_time,json=createTime,proto3" json:"create_time,omitempty"` + // Output only. When the job processing was started. + StartTime *timestamp.Timestamp `protobuf:"bytes,5,opt,name=start_time,json=startTime,proto3" json:"start_time,omitempty"` + // Output only. When the job processing was completed. + EndTime *timestamp.Timestamp `protobuf:"bytes,6,opt,name=end_time,json=endTime,proto3" json:"end_time,omitempty"` + // Output only. The detailed state of a job. + State Job_State `protobuf:"varint,7,opt,name=state,proto3,enum=google.cloud.ml.v1.Job_State" json:"state,omitempty"` + // Output only. The details of a failure or a cancellation. + ErrorMessage string `protobuf:"bytes,8,opt,name=error_message,json=errorMessage,proto3" json:"error_message,omitempty"` + // Output only. The current result of the job. + // + // Types that are valid to be assigned to Output: + // *Job_TrainingOutput + // *Job_PredictionOutput + Output isJob_Output `protobuf_oneof:"output"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Job) Reset() { *m = Job{} } +func (m *Job) String() string { return proto.CompactTextString(m) } +func (*Job) ProtoMessage() {} +func (*Job) Descriptor() ([]byte, []int) { + return fileDescriptor_job_service_96b7f358a19eb3fe, []int{7} +} +func (m *Job) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Job.Unmarshal(m, b) +} +func (m *Job) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Job.Marshal(b, m, deterministic) +} +func (dst *Job) XXX_Merge(src proto.Message) { + xxx_messageInfo_Job.Merge(dst, src) +} +func (m *Job) XXX_Size() int { + return xxx_messageInfo_Job.Size(m) +} +func (m *Job) XXX_DiscardUnknown() { + xxx_messageInfo_Job.DiscardUnknown(m) +} + +var xxx_messageInfo_Job proto.InternalMessageInfo + +func (m *Job) GetJobId() string { + if m != nil { + return m.JobId + } + return "" +} + +type isJob_Input interface { + isJob_Input() +} + +type Job_TrainingInput struct { + TrainingInput *TrainingInput `protobuf:"bytes,2,opt,name=training_input,json=trainingInput,proto3,oneof"` +} + +type Job_PredictionInput struct { + PredictionInput *PredictionInput `protobuf:"bytes,3,opt,name=prediction_input,json=predictionInput,proto3,oneof"` +} + +func (*Job_TrainingInput) isJob_Input() {} + +func (*Job_PredictionInput) isJob_Input() {} + +func (m *Job) GetInput() isJob_Input { + if m != nil { + return m.Input + } + return nil +} + +func (m *Job) GetTrainingInput() *TrainingInput { + if x, ok := m.GetInput().(*Job_TrainingInput); ok { + return x.TrainingInput + } + return nil +} + +func (m *Job) GetPredictionInput() *PredictionInput { + if x, ok := m.GetInput().(*Job_PredictionInput); ok { + return x.PredictionInput + } + return nil +} + +func (m *Job) GetCreateTime() *timestamp.Timestamp { + if m != nil { + return m.CreateTime + } + return nil +} + +func (m *Job) GetStartTime() *timestamp.Timestamp { + if m != nil { + return m.StartTime + } + return nil +} + +func (m *Job) GetEndTime() *timestamp.Timestamp { + if m != nil { + return m.EndTime + } + return nil +} + +func (m *Job) GetState() Job_State { + if m != nil { + return m.State + } + return Job_STATE_UNSPECIFIED +} + +func (m *Job) GetErrorMessage() string { + if m != nil { + return m.ErrorMessage + } + return "" +} + +type isJob_Output interface { + isJob_Output() +} + +type Job_TrainingOutput struct { + TrainingOutput *TrainingOutput `protobuf:"bytes,9,opt,name=training_output,json=trainingOutput,proto3,oneof"` +} + +type Job_PredictionOutput struct { + PredictionOutput *PredictionOutput `protobuf:"bytes,10,opt,name=prediction_output,json=predictionOutput,proto3,oneof"` +} + +func (*Job_TrainingOutput) isJob_Output() {} + +func (*Job_PredictionOutput) isJob_Output() {} + +func (m *Job) GetOutput() isJob_Output { + if m != nil { + return m.Output + } + return nil +} + +func (m *Job) GetTrainingOutput() *TrainingOutput { + if x, ok := m.GetOutput().(*Job_TrainingOutput); ok { + return x.TrainingOutput + } + return nil +} + +func (m *Job) GetPredictionOutput() *PredictionOutput { + if x, ok := m.GetOutput().(*Job_PredictionOutput); ok { + return x.PredictionOutput + } + return nil +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*Job) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _Job_OneofMarshaler, _Job_OneofUnmarshaler, _Job_OneofSizer, []interface{}{ + (*Job_TrainingInput)(nil), + (*Job_PredictionInput)(nil), + (*Job_TrainingOutput)(nil), + (*Job_PredictionOutput)(nil), + } +} + +func _Job_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*Job) + // input + switch x := m.Input.(type) { + case *Job_TrainingInput: + b.EncodeVarint(2<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.TrainingInput); err != nil { + return err + } + case *Job_PredictionInput: + b.EncodeVarint(3<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.PredictionInput); err != nil { + return err + } + case nil: + default: + return fmt.Errorf("Job.Input has unexpected type %T", x) + } + // output + switch x := m.Output.(type) { + case *Job_TrainingOutput: + b.EncodeVarint(9<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.TrainingOutput); err != nil { + return err + } + case *Job_PredictionOutput: + b.EncodeVarint(10<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.PredictionOutput); err != nil { + return err + } + case nil: + default: + return fmt.Errorf("Job.Output has unexpected type %T", x) + } + return nil +} + +func _Job_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*Job) + switch tag { + case 2: // input.training_input + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(TrainingInput) + err := b.DecodeMessage(msg) + m.Input = &Job_TrainingInput{msg} + return true, err + case 3: // input.prediction_input + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(PredictionInput) + err := b.DecodeMessage(msg) + m.Input = &Job_PredictionInput{msg} + return true, err + case 9: // output.training_output + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(TrainingOutput) + err := b.DecodeMessage(msg) + m.Output = &Job_TrainingOutput{msg} + return true, err + case 10: // output.prediction_output + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(PredictionOutput) + err := b.DecodeMessage(msg) + m.Output = &Job_PredictionOutput{msg} + return true, err + default: + return false, nil + } +} + +func _Job_OneofSizer(msg proto.Message) (n int) { + m := msg.(*Job) + // input + switch x := m.Input.(type) { + case *Job_TrainingInput: + s := proto.Size(x.TrainingInput) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *Job_PredictionInput: + s := proto.Size(x.PredictionInput) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + // output + switch x := m.Output.(type) { + case *Job_TrainingOutput: + s := proto.Size(x.TrainingOutput) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *Job_PredictionOutput: + s := proto.Size(x.PredictionOutput) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +// Request message for the CreateJob method. +type CreateJobRequest struct { + // Required. The project name. + // + // Authorization: requires `Editor` role on the specified project. + Parent string `protobuf:"bytes,1,opt,name=parent,proto3" json:"parent,omitempty"` + // Required. The job to create. + Job *Job `protobuf:"bytes,2,opt,name=job,proto3" json:"job,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *CreateJobRequest) Reset() { *m = CreateJobRequest{} } +func (m *CreateJobRequest) String() string { return proto.CompactTextString(m) } +func (*CreateJobRequest) ProtoMessage() {} +func (*CreateJobRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_job_service_96b7f358a19eb3fe, []int{8} +} +func (m *CreateJobRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_CreateJobRequest.Unmarshal(m, b) +} +func (m *CreateJobRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_CreateJobRequest.Marshal(b, m, deterministic) +} +func (dst *CreateJobRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_CreateJobRequest.Merge(dst, src) +} +func (m *CreateJobRequest) XXX_Size() int { + return xxx_messageInfo_CreateJobRequest.Size(m) +} +func (m *CreateJobRequest) XXX_DiscardUnknown() { + xxx_messageInfo_CreateJobRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_CreateJobRequest proto.InternalMessageInfo + +func (m *CreateJobRequest) GetParent() string { + if m != nil { + return m.Parent + } + return "" +} + +func (m *CreateJobRequest) GetJob() *Job { + if m != nil { + return m.Job + } + return nil +} + +// Request message for the ListJobs method. +type ListJobsRequest struct { + // Required. The name of the project for which to list jobs. + // + // Authorization: requires `Viewer` role on the specified project. + Parent string `protobuf:"bytes,1,opt,name=parent,proto3" json:"parent,omitempty"` + // Optional. Specifies the subset of jobs to retrieve. + Filter string `protobuf:"bytes,2,opt,name=filter,proto3" json:"filter,omitempty"` + // Optional. A page token to request the next page of results. + // + // You get the token from the `next_page_token` field of the response from + // the previous call. + PageToken string `protobuf:"bytes,4,opt,name=page_token,json=pageToken,proto3" json:"page_token,omitempty"` + // Optional. The number of jobs to retrieve per "page" of results. If there + // are more remaining results than this number, the response message will + // contain a valid value in the `next_page_token` field. + // + // The default value is 20, and the maximum page size is 100. + PageSize int32 `protobuf:"varint,5,opt,name=page_size,json=pageSize,proto3" json:"page_size,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ListJobsRequest) Reset() { *m = ListJobsRequest{} } +func (m *ListJobsRequest) String() string { return proto.CompactTextString(m) } +func (*ListJobsRequest) ProtoMessage() {} +func (*ListJobsRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_job_service_96b7f358a19eb3fe, []int{9} +} +func (m *ListJobsRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ListJobsRequest.Unmarshal(m, b) +} +func (m *ListJobsRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ListJobsRequest.Marshal(b, m, deterministic) +} +func (dst *ListJobsRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_ListJobsRequest.Merge(dst, src) +} +func (m *ListJobsRequest) XXX_Size() int { + return xxx_messageInfo_ListJobsRequest.Size(m) +} +func (m *ListJobsRequest) XXX_DiscardUnknown() { + xxx_messageInfo_ListJobsRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_ListJobsRequest proto.InternalMessageInfo + +func (m *ListJobsRequest) GetParent() string { + if m != nil { + return m.Parent + } + return "" +} + +func (m *ListJobsRequest) GetFilter() string { + if m != nil { + return m.Filter + } + return "" +} + +func (m *ListJobsRequest) GetPageToken() string { + if m != nil { + return m.PageToken + } + return "" +} + +func (m *ListJobsRequest) GetPageSize() int32 { + if m != nil { + return m.PageSize + } + return 0 +} + +// Response message for the ListJobs method. +type ListJobsResponse struct { + // The list of jobs. + Jobs []*Job `protobuf:"bytes,1,rep,name=jobs,proto3" json:"jobs,omitempty"` + // Optional. Pass this token as the `page_token` field of the request for a + // subsequent call. + NextPageToken string `protobuf:"bytes,2,opt,name=next_page_token,json=nextPageToken,proto3" json:"next_page_token,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ListJobsResponse) Reset() { *m = ListJobsResponse{} } +func (m *ListJobsResponse) String() string { return proto.CompactTextString(m) } +func (*ListJobsResponse) ProtoMessage() {} +func (*ListJobsResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_job_service_96b7f358a19eb3fe, []int{10} +} +func (m *ListJobsResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ListJobsResponse.Unmarshal(m, b) +} +func (m *ListJobsResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ListJobsResponse.Marshal(b, m, deterministic) +} +func (dst *ListJobsResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_ListJobsResponse.Merge(dst, src) +} +func (m *ListJobsResponse) XXX_Size() int { + return xxx_messageInfo_ListJobsResponse.Size(m) +} +func (m *ListJobsResponse) XXX_DiscardUnknown() { + xxx_messageInfo_ListJobsResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_ListJobsResponse proto.InternalMessageInfo + +func (m *ListJobsResponse) GetJobs() []*Job { + if m != nil { + return m.Jobs + } + return nil +} + +func (m *ListJobsResponse) GetNextPageToken() string { + if m != nil { + return m.NextPageToken + } + return "" +} + +// Request message for the GetJob method. +type GetJobRequest struct { + // Required. The name of the job to get the description of. + // + // Authorization: requires `Viewer` role on the parent project. + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GetJobRequest) Reset() { *m = GetJobRequest{} } +func (m *GetJobRequest) String() string { return proto.CompactTextString(m) } +func (*GetJobRequest) ProtoMessage() {} +func (*GetJobRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_job_service_96b7f358a19eb3fe, []int{11} +} +func (m *GetJobRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GetJobRequest.Unmarshal(m, b) +} +func (m *GetJobRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GetJobRequest.Marshal(b, m, deterministic) +} +func (dst *GetJobRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_GetJobRequest.Merge(dst, src) +} +func (m *GetJobRequest) XXX_Size() int { + return xxx_messageInfo_GetJobRequest.Size(m) +} +func (m *GetJobRequest) XXX_DiscardUnknown() { + xxx_messageInfo_GetJobRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_GetJobRequest proto.InternalMessageInfo + +func (m *GetJobRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +// Request message for the CancelJob method. +type CancelJobRequest struct { + // Required. The name of the job to cancel. + // + // Authorization: requires `Editor` role on the parent project. + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *CancelJobRequest) Reset() { *m = CancelJobRequest{} } +func (m *CancelJobRequest) String() string { return proto.CompactTextString(m) } +func (*CancelJobRequest) ProtoMessage() {} +func (*CancelJobRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_job_service_96b7f358a19eb3fe, []int{12} +} +func (m *CancelJobRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_CancelJobRequest.Unmarshal(m, b) +} +func (m *CancelJobRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_CancelJobRequest.Marshal(b, m, deterministic) +} +func (dst *CancelJobRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_CancelJobRequest.Merge(dst, src) +} +func (m *CancelJobRequest) XXX_Size() int { + return xxx_messageInfo_CancelJobRequest.Size(m) +} +func (m *CancelJobRequest) XXX_DiscardUnknown() { + xxx_messageInfo_CancelJobRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_CancelJobRequest proto.InternalMessageInfo + +func (m *CancelJobRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func init() { + proto.RegisterType((*TrainingInput)(nil), "google.cloud.ml.v1.TrainingInput") + proto.RegisterType((*HyperparameterSpec)(nil), "google.cloud.ml.v1.HyperparameterSpec") + proto.RegisterType((*ParameterSpec)(nil), "google.cloud.ml.v1.ParameterSpec") + proto.RegisterType((*HyperparameterOutput)(nil), "google.cloud.ml.v1.HyperparameterOutput") + proto.RegisterMapType((map[string]string)(nil), "google.cloud.ml.v1.HyperparameterOutput.HyperparametersEntry") + proto.RegisterType((*HyperparameterOutput_HyperparameterMetric)(nil), "google.cloud.ml.v1.HyperparameterOutput.HyperparameterMetric") + proto.RegisterType((*TrainingOutput)(nil), "google.cloud.ml.v1.TrainingOutput") + proto.RegisterType((*PredictionInput)(nil), "google.cloud.ml.v1.PredictionInput") + proto.RegisterType((*PredictionOutput)(nil), "google.cloud.ml.v1.PredictionOutput") + proto.RegisterType((*Job)(nil), "google.cloud.ml.v1.Job") + proto.RegisterType((*CreateJobRequest)(nil), "google.cloud.ml.v1.CreateJobRequest") + proto.RegisterType((*ListJobsRequest)(nil), "google.cloud.ml.v1.ListJobsRequest") + proto.RegisterType((*ListJobsResponse)(nil), "google.cloud.ml.v1.ListJobsResponse") + proto.RegisterType((*GetJobRequest)(nil), "google.cloud.ml.v1.GetJobRequest") + proto.RegisterType((*CancelJobRequest)(nil), "google.cloud.ml.v1.CancelJobRequest") + proto.RegisterEnum("google.cloud.ml.v1.TrainingInput_ScaleTier", TrainingInput_ScaleTier_name, TrainingInput_ScaleTier_value) + proto.RegisterEnum("google.cloud.ml.v1.HyperparameterSpec_GoalType", HyperparameterSpec_GoalType_name, HyperparameterSpec_GoalType_value) + proto.RegisterEnum("google.cloud.ml.v1.ParameterSpec_ParameterType", ParameterSpec_ParameterType_name, ParameterSpec_ParameterType_value) + proto.RegisterEnum("google.cloud.ml.v1.ParameterSpec_ScaleType", ParameterSpec_ScaleType_name, ParameterSpec_ScaleType_value) + proto.RegisterEnum("google.cloud.ml.v1.PredictionInput_DataFormat", PredictionInput_DataFormat_name, PredictionInput_DataFormat_value) + proto.RegisterEnum("google.cloud.ml.v1.Job_State", Job_State_name, Job_State_value) +} + +// Reference imports to suppress errors if they are not otherwise used. +var _ context.Context +var _ grpc.ClientConn + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the grpc package it is being compiled against. +const _ = grpc.SupportPackageIsVersion4 + +// JobServiceClient is the client API for JobService service. +// +// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://godoc.org/google.golang.org/grpc#ClientConn.NewStream. +type JobServiceClient interface { + // Creates a training or a batch prediction job. + CreateJob(ctx context.Context, in *CreateJobRequest, opts ...grpc.CallOption) (*Job, error) + // Lists the jobs in the project. + ListJobs(ctx context.Context, in *ListJobsRequest, opts ...grpc.CallOption) (*ListJobsResponse, error) + // Describes a job. + GetJob(ctx context.Context, in *GetJobRequest, opts ...grpc.CallOption) (*Job, error) + // Cancels a running job. + CancelJob(ctx context.Context, in *CancelJobRequest, opts ...grpc.CallOption) (*empty.Empty, error) +} + +type jobServiceClient struct { + cc *grpc.ClientConn +} + +func NewJobServiceClient(cc *grpc.ClientConn) JobServiceClient { + return &jobServiceClient{cc} +} + +func (c *jobServiceClient) CreateJob(ctx context.Context, in *CreateJobRequest, opts ...grpc.CallOption) (*Job, error) { + out := new(Job) + err := c.cc.Invoke(ctx, "/google.cloud.ml.v1.JobService/CreateJob", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *jobServiceClient) ListJobs(ctx context.Context, in *ListJobsRequest, opts ...grpc.CallOption) (*ListJobsResponse, error) { + out := new(ListJobsResponse) + err := c.cc.Invoke(ctx, "/google.cloud.ml.v1.JobService/ListJobs", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *jobServiceClient) GetJob(ctx context.Context, in *GetJobRequest, opts ...grpc.CallOption) (*Job, error) { + out := new(Job) + err := c.cc.Invoke(ctx, "/google.cloud.ml.v1.JobService/GetJob", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *jobServiceClient) CancelJob(ctx context.Context, in *CancelJobRequest, opts ...grpc.CallOption) (*empty.Empty, error) { + out := new(empty.Empty) + err := c.cc.Invoke(ctx, "/google.cloud.ml.v1.JobService/CancelJob", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +// JobServiceServer is the server API for JobService service. +type JobServiceServer interface { + // Creates a training or a batch prediction job. + CreateJob(context.Context, *CreateJobRequest) (*Job, error) + // Lists the jobs in the project. + ListJobs(context.Context, *ListJobsRequest) (*ListJobsResponse, error) + // Describes a job. + GetJob(context.Context, *GetJobRequest) (*Job, error) + // Cancels a running job. + CancelJob(context.Context, *CancelJobRequest) (*empty.Empty, error) +} + +func RegisterJobServiceServer(s *grpc.Server, srv JobServiceServer) { + s.RegisterService(&_JobService_serviceDesc, srv) +} + +func _JobService_CreateJob_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(CreateJobRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(JobServiceServer).CreateJob(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.ml.v1.JobService/CreateJob", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(JobServiceServer).CreateJob(ctx, req.(*CreateJobRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _JobService_ListJobs_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(ListJobsRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(JobServiceServer).ListJobs(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.ml.v1.JobService/ListJobs", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(JobServiceServer).ListJobs(ctx, req.(*ListJobsRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _JobService_GetJob_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(GetJobRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(JobServiceServer).GetJob(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.ml.v1.JobService/GetJob", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(JobServiceServer).GetJob(ctx, req.(*GetJobRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _JobService_CancelJob_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(CancelJobRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(JobServiceServer).CancelJob(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.ml.v1.JobService/CancelJob", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(JobServiceServer).CancelJob(ctx, req.(*CancelJobRequest)) + } + return interceptor(ctx, in, info, handler) +} + +var _JobService_serviceDesc = grpc.ServiceDesc{ + ServiceName: "google.cloud.ml.v1.JobService", + HandlerType: (*JobServiceServer)(nil), + Methods: []grpc.MethodDesc{ + { + MethodName: "CreateJob", + Handler: _JobService_CreateJob_Handler, + }, + { + MethodName: "ListJobs", + Handler: _JobService_ListJobs_Handler, + }, + { + MethodName: "GetJob", + Handler: _JobService_GetJob_Handler, + }, + { + MethodName: "CancelJob", + Handler: _JobService_CancelJob_Handler, + }, + }, + Streams: []grpc.StreamDesc{}, + Metadata: "google/cloud/ml/v1/job_service.proto", +} + +func init() { + proto.RegisterFile("google/cloud/ml/v1/job_service.proto", fileDescriptor_job_service_96b7f358a19eb3fe) +} + +var fileDescriptor_job_service_96b7f358a19eb3fe = []byte{ + // 2070 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xa4, 0x58, 0xdb, 0x6e, 0x1b, 0xc9, + 0x11, 0x15, 0xaf, 0x22, 0x8b, 0x12, 0x39, 0x6e, 0x5b, 0x36, 0x4d, 0xdb, 0x6b, 0x79, 0xe4, 0x38, + 0xb2, 0x17, 0x21, 0x21, 0xed, 0x06, 0xc8, 0x7a, 0xb1, 0x48, 0x28, 0x72, 0x2c, 0x51, 0x10, 0x29, + 0xa6, 0x39, 0x74, 0x36, 0x46, 0x90, 0x49, 0x93, 0x6c, 0xd3, 0x23, 0xcf, 0x2d, 0x33, 0x4d, 0x45, + 0xda, 0x85, 0x81, 0x20, 0x08, 0xf2, 0x03, 0x79, 0x0f, 0xf2, 0x4d, 0xc9, 0x1f, 0x04, 0x01, 0xf2, + 0x01, 0x79, 0x0e, 0x10, 0xf4, 0x85, 0xc3, 0x8b, 0x28, 0xd9, 0x48, 0xde, 0xd8, 0xa7, 0x4e, 0x55, + 0x75, 0x57, 0x55, 0x57, 0xd7, 0x10, 0x9e, 0x8e, 0x7d, 0x7f, 0xec, 0xd0, 0xda, 0xd0, 0xf1, 0x27, + 0xa3, 0x9a, 0xeb, 0xd4, 0xce, 0xf7, 0x6a, 0x67, 0xfe, 0xc0, 0x8a, 0x68, 0x78, 0x6e, 0x0f, 0x69, + 0x35, 0x08, 0x7d, 0xe6, 0x23, 0x24, 0x59, 0x55, 0xc1, 0xaa, 0xba, 0x4e, 0xf5, 0x7c, 0xaf, 0xf2, + 0x50, 0x69, 0x92, 0xc0, 0xae, 0x11, 0xcf, 0xf3, 0x19, 0x61, 0xb6, 0xef, 0x45, 0x52, 0xa3, 0xb2, + 0x35, 0x2f, 0x9d, 0xb0, 0x77, 0x0a, 0x7e, 0xa0, 0x60, 0xb1, 0x1a, 0x4c, 0xde, 0xd6, 0xa8, 0x1b, + 0xb0, 0x4b, 0x25, 0x7c, 0xbc, 0x2c, 0x64, 0xb6, 0x4b, 0x23, 0x46, 0xdc, 0x40, 0x12, 0xf4, 0x3f, + 0x66, 0x60, 0xd3, 0x0c, 0x89, 0xed, 0xd9, 0xde, 0xb8, 0xe5, 0x05, 0x13, 0x86, 0x8e, 0x01, 0xa2, + 0x21, 0x71, 0xa8, 0xc5, 0x6c, 0x1a, 0x96, 0x13, 0xdb, 0x89, 0xdd, 0xe2, 0xfe, 0xe7, 0xd5, 0xab, + 0xbb, 0xad, 0x2e, 0xa8, 0x55, 0x7b, 0x5c, 0xc7, 0xb4, 0x69, 0x88, 0xf3, 0xd1, 0xf4, 0x27, 0x7a, + 0x0c, 0x05, 0x97, 0x44, 0x8c, 0x86, 0x16, 0xbb, 0x0c, 0x68, 0x39, 0xb9, 0x9d, 0xd8, 0xcd, 0x63, + 0x90, 0x90, 0x79, 0x19, 0x50, 0x4e, 0xf8, 0x9d, 0x1f, 0xbe, 0x9f, 0x12, 0x52, 0x92, 0x20, 0x21, + 0x41, 0xd8, 0x87, 0xad, 0x80, 0x84, 0xc4, 0xa5, 0xdc, 0x08, 0x8f, 0xe0, 0x94, 0x9a, 0x16, 0xd4, + 0xdb, 0xb1, 0xb0, 0x27, 0x64, 0x42, 0xe7, 0x09, 0x6c, 0x28, 0xa3, 0x43, 0x7f, 0xe2, 0xb1, 0x72, + 0x66, 0x3b, 0xb1, 0x9b, 0xc2, 0xca, 0x51, 0x83, 0x43, 0xe8, 0x4b, 0xb8, 0x7b, 0xc5, 0xac, 0x24, + 0x67, 0x05, 0xf9, 0xce, 0x92, 0x5d, 0xa9, 0xf5, 0x04, 0x36, 0x02, 0x32, 0x7c, 0x4f, 0xc6, 0xd4, + 0x9a, 0x84, 0x76, 0x54, 0x5e, 0xdf, 0x4e, 0xed, 0xe6, 0x71, 0x41, 0x61, 0xfd, 0xd0, 0x8e, 0xd0, + 0x0e, 0x6c, 0x06, 0x97, 0xec, 0x9d, 0xef, 0x59, 0xae, 0x3f, 0x9a, 0x38, 0xb4, 0x9c, 0x13, 0xfb, + 0xdc, 0x90, 0x60, 0x5b, 0x60, 0x08, 0x41, 0x9a, 0x84, 0xe3, 0xa8, 0x0c, 0x42, 0x5f, 0xfc, 0x46, + 0x5d, 0x28, 0xbd, 0xbb, 0x0c, 0x68, 0x18, 0x3b, 0x8e, 0xca, 0x1b, 0xdb, 0x89, 0xdd, 0xc2, 0xfe, + 0xb3, 0x55, 0xb1, 0x3f, 0x5a, 0xa0, 0xf6, 0x02, 0x3a, 0xc4, 0xcb, 0xea, 0xe8, 0x2e, 0x64, 0x43, + 0x3a, 0xb6, 0x7d, 0xaf, 0x5c, 0x14, 0x7b, 0x50, 0x2b, 0x74, 0x0f, 0xd6, 0x79, 0x39, 0x8e, 0xec, + 0xb0, 0xac, 0x49, 0xc1, 0x99, 0x3f, 0x68, 0xda, 0x21, 0xfa, 0x21, 0x94, 0xc2, 0x89, 0xc7, 0x2b, + 0xc4, 0x3a, 0xa7, 0x61, 0xc4, 0x35, 0x4b, 0x82, 0x50, 0x54, 0xf0, 0x6b, 0x89, 0xea, 0x5d, 0xc8, + 0xc7, 0xe9, 0x46, 0x79, 0xc8, 0x1c, 0xd4, 0x7b, 0xad, 0x86, 0xb6, 0x86, 0x8a, 0x00, 0x3d, 0xb3, + 0xde, 0x69, 0xd6, 0x71, 0xd3, 0xda, 0xd3, 0x12, 0x68, 0x13, 0xf2, 0x5d, 0x6c, 0xb4, 0x5b, 0xfd, + 0xb6, 0xb5, 0xa7, 0xa5, 0xf8, 0x52, 0x30, 0xad, 0xc3, 0x6e, 0x5f, 0xcb, 0x22, 0x80, 0x6c, 0xa3, + 0xdf, 0x33, 0x4f, 0xdb, 0x5a, 0x46, 0xff, 0x47, 0x12, 0xd0, 0xd5, 0x33, 0xa1, 0x06, 0xa4, 0xc7, + 0x3e, 0x71, 0x54, 0x15, 0xd6, 0x3e, 0x2d, 0x12, 0xd5, 0x43, 0x9f, 0x38, 0xbc, 0x10, 0xb0, 0x50, + 0x46, 0x5f, 0x41, 0x56, 0xc8, 0xa3, 0x72, 0x72, 0x3b, 0xb5, 0x5b, 0xd8, 0x7f, 0xb2, 0xca, 0x4c, + 0x77, 0x21, 0x96, 0x4a, 0x01, 0x3d, 0x02, 0x70, 0xc9, 0x85, 0xc5, 0x42, 0x9b, 0x38, 0x91, 0xa8, + 0xce, 0x0c, 0xce, 0xbb, 0xe4, 0xc2, 0x14, 0x00, 0xaa, 0xc2, 0x6d, 0x2e, 0xe6, 0x64, 0xc7, 0xa1, + 0xce, 0x94, 0x97, 0x16, 0xbc, 0x5b, 0x2e, 0xb9, 0xe8, 0x2a, 0x89, 0xe2, 0xbf, 0x84, 0xfb, 0x8b, + 0x49, 0xb2, 0x5c, 0xca, 0x42, 0x7b, 0x68, 0x31, 0x32, 0x16, 0x55, 0x9a, 0xc7, 0xf7, 0x16, 0x09, + 0x6d, 0x21, 0x37, 0xc9, 0x58, 0xaf, 0x43, 0x6e, 0x7a, 0x2e, 0x74, 0x1f, 0xb6, 0x0e, 0x4f, 0xeb, + 0x27, 0x96, 0xf9, 0xcb, 0xae, 0x61, 0xf5, 0x3b, 0xbd, 0xae, 0xd1, 0x68, 0xbd, 0x6a, 0x19, 0x4d, + 0x6d, 0x0d, 0x6d, 0x40, 0xae, 0x5d, 0xff, 0xb6, 0xd5, 0x6e, 0xbd, 0x31, 0xb4, 0x84, 0x58, 0xb5, + 0x3a, 0x72, 0x95, 0xd4, 0xff, 0x9a, 0x86, 0xcd, 0x85, 0x73, 0xa2, 0x1f, 0x40, 0x71, 0xb6, 0x17, + 0x8f, 0xb8, 0x54, 0x44, 0x3a, 0x8f, 0x37, 0x63, 0xb4, 0x43, 0x5c, 0xca, 0xd3, 0x10, 0xdf, 0xb9, + 0x6b, 0xd2, 0xb0, 0x60, 0x77, 0xb6, 0x92, 0x69, 0xe0, 0xca, 0xe8, 0x01, 0xe4, 0x5d, 0xdb, 0xb3, + 0xce, 0x89, 0x33, 0x91, 0x9d, 0x20, 0x81, 0x73, 0xae, 0xed, 0xbd, 0xe6, 0x6b, 0x21, 0x24, 0x17, + 0x4a, 0x98, 0x52, 0x42, 0x72, 0x21, 0x85, 0x3f, 0x02, 0x34, 0x24, 0x8c, 0x8e, 0xfd, 0xd0, 0x1e, + 0x12, 0x47, 0x92, 0xa2, 0x72, 0x46, 0x5c, 0x9e, 0x5b, 0x73, 0x12, 0xc1, 0x8e, 0x78, 0x19, 0x8f, + 0xec, 0x68, 0x18, 0x52, 0x46, 0xa7, 0xdc, 0xec, 0x76, 0x6a, 0x37, 0x81, 0x8b, 0x53, 0x58, 0x11, + 0x67, 0x9d, 0x8e, 0x1f, 0x6e, 0xfd, 0xfa, 0x4e, 0xb7, 0x78, 0x38, 0x59, 0xfa, 0xfc, 0x60, 0xaa, + 0xd3, 0x5d, 0x06, 0x54, 0x1f, 0xcf, 0x85, 0x56, 0xe4, 0xe8, 0x33, 0xa8, 0x74, 0xeb, 0xb8, 0xde, + 0x36, 0x4c, 0x03, 0xaf, 0x4a, 0x14, 0x40, 0xb6, 0x79, 0xda, 0x3f, 0x38, 0xe1, 0x69, 0x2a, 0xc0, + 0x7a, 0xab, 0x63, 0x1a, 0x87, 0x06, 0xd6, 0x92, 0xa8, 0x04, 0x85, 0x46, 0xdd, 0x34, 0x0e, 0x4f, + 0x71, 0xab, 0x51, 0x3f, 0xd1, 0x52, 0x3c, 0x89, 0xcd, 0x56, 0xaf, 0x81, 0x0d, 0xd3, 0xd0, 0xd2, + 0xfa, 0xaf, 0xa6, 0x77, 0x8f, 0x3b, 0xc9, 0x41, 0xba, 0x73, 0xda, 0x31, 0xb4, 0x35, 0xb4, 0x05, + 0xb7, 0xfa, 0x9d, 0x96, 0x69, 0x9d, 0xb4, 0x3a, 0x46, 0x1d, 0x5b, 0xbd, 0x46, 0x5d, 0x58, 0x46, + 0x50, 0x94, 0xf0, 0xe9, 0xa1, 0xc2, 0x92, 0xa8, 0x02, 0x77, 0x05, 0x86, 0x8d, 0xd7, 0x06, 0xee, + 0x19, 0x73, 0xb2, 0x94, 0xfe, 0xa7, 0x34, 0xdc, 0x59, 0xbc, 0x51, 0xa7, 0x13, 0xc6, 0x5f, 0x85, + 0xfb, 0x90, 0x13, 0xd5, 0x6d, 0xd9, 0x23, 0x55, 0x23, 0xeb, 0x62, 0xdd, 0x1a, 0xa1, 0xf1, 0xd5, + 0xce, 0x25, 0x2f, 0xda, 0x37, 0x1f, 0xbf, 0xaf, 0xd2, 0xfa, 0x12, 0x18, 0x19, 0x1e, 0x0b, 0x2f, + 0xaf, 0x36, 0xb4, 0xdf, 0xc0, 0xc6, 0x5b, 0xdb, 0x23, 0x8e, 0xba, 0x35, 0xa2, 0x4e, 0xfe, 0x77, + 0x2f, 0xf2, 0x6a, 0xe1, 0x82, 0x30, 0x29, 0x17, 0xe8, 0xd7, 0x50, 0x20, 0xce, 0xd4, 0x3e, 0xbf, + 0xc8, 0xa9, 0xff, 0xdf, 0x01, 0x10, 0x47, 0x99, 0x8f, 0x2a, 0xa3, 0xe5, 0xe8, 0x2a, 0xbf, 0x3b, + 0xb0, 0xc9, 0xd4, 0x6b, 0x6a, 0x45, 0x8c, 0x06, 0x22, 0xc4, 0x29, 0xbc, 0x31, 0x05, 0x7b, 0x8c, + 0x06, 0xbc, 0xae, 0xfd, 0xc1, 0x19, 0x1d, 0x32, 0xfb, 0x9c, 0x2e, 0x5c, 0xa3, 0x62, 0x0c, 0x8b, + 0xc2, 0xae, 0x1c, 0x2c, 0x7b, 0x91, 0x01, 0x45, 0x1a, 0xa4, 0xde, 0xd3, 0x4b, 0x95, 0x3e, 0xfe, + 0x13, 0xdd, 0x81, 0xcc, 0xcc, 0x50, 0x1e, 0xcb, 0xc5, 0xcb, 0xe4, 0x4f, 0x12, 0xfa, 0xbf, 0x13, + 0x50, 0x9c, 0x3e, 0xf0, 0xaa, 0x04, 0xf6, 0x61, 0x6b, 0xe8, 0xbb, 0x81, 0x43, 0x19, 0x1d, 0xc9, + 0x56, 0xa7, 0x9e, 0x4c, 0xb9, 0xd9, 0xdb, 0xb1, 0x50, 0x74, 0x3b, 0xf9, 0x62, 0xfe, 0x0c, 0xb2, + 0xaa, 0x29, 0xca, 0x92, 0xd8, 0xfd, 0xd4, 0x58, 0x62, 0xa5, 0x87, 0x5e, 0xc0, 0xad, 0xa1, 0xef, + 0x45, 0x13, 0x97, 0x8e, 0x2c, 0xd7, 0xb1, 0x26, 0x9e, 0xcd, 0x22, 0xd5, 0x21, 0x4a, 0x53, 0x41, + 0xdb, 0xe9, 0x73, 0x18, 0xfd, 0x14, 0x1e, 0xda, 0x91, 0xb5, 0xd4, 0x62, 0xd9, 0x44, 0x84, 0xf5, + 0xcc, 0x1f, 0x88, 0xfe, 0x95, 0xc3, 0xf7, 0xed, 0x68, 0xd1, 0xa3, 0x29, 0x18, 0xc7, 0xfe, 0x40, + 0xff, 0x5b, 0x0a, 0x4a, 0xdd, 0x90, 0x8e, 0xec, 0x21, 0x1f, 0xbc, 0xe4, 0x3c, 0xf4, 0x18, 0xc0, + 0xf5, 0x47, 0xd4, 0x99, 0xeb, 0x8f, 0x47, 0x6b, 0x38, 0x2f, 0x30, 0xd1, 0x1d, 0x77, 0x60, 0x43, + 0x3d, 0x97, 0x92, 0x92, 0x54, 0x94, 0x82, 0x42, 0x05, 0x09, 0x41, 0x6a, 0x12, 0xda, 0xe5, 0xbc, + 0x92, 0xf1, 0x05, 0x3a, 0x85, 0xc2, 0x88, 0x30, 0x62, 0xbd, 0xf5, 0x43, 0x97, 0x30, 0x71, 0xa8, + 0xe2, 0x7e, 0x75, 0x65, 0x03, 0x5a, 0xdc, 0x53, 0xb5, 0x49, 0x18, 0x79, 0x25, 0xb4, 0x30, 0x8c, + 0xe2, 0xdf, 0x7c, 0x9a, 0xb2, 0xb9, 0xdc, 0x0a, 0x08, 0x7b, 0x27, 0xcb, 0x37, 0x8f, 0x41, 0x40, + 0x5d, 0x8e, 0x70, 0x82, 0x2f, 0xc2, 0x2b, 0x18, 0xea, 0xc9, 0x01, 0x09, 0x71, 0x06, 0xda, 0x05, + 0x8d, 0xf7, 0xe1, 0x85, 0xf1, 0x49, 0x4e, 0x44, 0x45, 0x97, 0x5c, 0xfc, 0x62, 0x6e, 0x82, 0x9a, + 0x4d, 0x17, 0xeb, 0x0b, 0xd3, 0xc5, 0x8a, 0x21, 0x22, 0xb7, 0x72, 0x88, 0x78, 0x0d, 0x30, 0x3b, + 0x06, 0x7a, 0x00, 0xf7, 0x9a, 0x75, 0xb3, 0x6e, 0xbd, 0x3a, 0xc5, 0xed, 0xba, 0xb9, 0xd4, 0x2b, + 0x73, 0x90, 0x36, 0x8d, 0x6f, 0x4d, 0x39, 0x51, 0x98, 0xaf, 0x2c, 0x6c, 0x34, 0x4e, 0x71, 0x53, + 0x4b, 0xf2, 0xf6, 0x16, 0x2f, 0xad, 0xc3, 0x37, 0xad, 0xae, 0x96, 0x3a, 0x28, 0xc1, 0xa6, 0xcc, + 0x97, 0x72, 0xaf, 0xff, 0x25, 0x01, 0xda, 0x2c, 0x80, 0xaa, 0x98, 0x97, 0x22, 0x91, 0xb8, 0x12, + 0x89, 0xe7, 0xa0, 0x05, 0xb1, 0x92, 0x8a, 0x44, 0x52, 0x44, 0xa2, 0x34, 0xc3, 0x65, 0x28, 0x1e, + 0x43, 0x81, 0x86, 0xa1, 0x3f, 0x8d, 0x57, 0x4a, 0xb0, 0x40, 0x40, 0x92, 0xf0, 0x08, 0xc0, 0xf3, + 0x47, 0xd4, 0x7a, 0xe7, 0x4f, 0x42, 0x39, 0x1e, 0x24, 0x70, 0x9e, 0x23, 0x47, 0x1c, 0xd0, 0xff, + 0x93, 0x81, 0xd4, 0xb1, 0x3f, 0x40, 0x5b, 0xc0, 0x27, 0xb1, 0x59, 0x87, 0xcd, 0x9c, 0xf9, 0x83, + 0xd6, 0x08, 0x1d, 0x43, 0x31, 0x6e, 0x0e, 0x22, 0x97, 0x62, 0x1f, 0xd7, 0xcc, 0x31, 0x0b, 0x43, + 0xf9, 0xd1, 0x1a, 0x8e, 0xfb, 0x8a, 0x2c, 0xe6, 0xee, 0xc2, 0xa9, 0xa4, 0x35, 0xd9, 0x46, 0x77, + 0x3e, 0xa1, 0xee, 0x8e, 0xd6, 0xe6, 0x0f, 0x2f, 0x2d, 0x7e, 0x0d, 0x85, 0x61, 0x48, 0x09, 0xe3, + 0xdf, 0x0b, 0xae, 0x1c, 0x11, 0x0a, 0xfb, 0x95, 0xa9, 0xb1, 0xe9, 0x77, 0x47, 0xd5, 0x9c, 0x7e, + 0x77, 0x60, 0x90, 0x74, 0x0e, 0xa0, 0xaf, 0x00, 0x22, 0x46, 0x42, 0x26, 0x75, 0x33, 0x1f, 0xd5, + 0xcd, 0x0b, 0xb6, 0x50, 0xfd, 0x31, 0xe4, 0xa8, 0x37, 0x92, 0x8a, 0xd9, 0x8f, 0x2a, 0xae, 0x53, + 0x6f, 0x24, 0xd4, 0xbe, 0x80, 0x4c, 0xc4, 0x08, 0x9b, 0x3e, 0xf7, 0x8f, 0x56, 0x9d, 0xfa, 0xd8, + 0x1f, 0x54, 0x7b, 0x9c, 0x84, 0x25, 0x97, 0xb7, 0x67, 0x99, 0x60, 0x97, 0x46, 0x11, 0x19, 0xc7, + 0x43, 0xbd, 0x00, 0xdb, 0x12, 0x43, 0x6d, 0x28, 0xc5, 0x69, 0x92, 0x75, 0x24, 0x6e, 0x7b, 0x61, + 0x5f, 0xbf, 0x29, 0x4f, 0xb2, 0x1c, 0x8f, 0x12, 0x38, 0xce, 0xb1, 0x2a, 0xd0, 0x1e, 0xdc, 0x9a, + 0xcb, 0x94, 0x32, 0x08, 0xc2, 0xe0, 0xd3, 0x9b, 0x53, 0x15, 0x9b, 0x9c, 0x4b, 0xb5, 0xc4, 0xf4, + 0xdf, 0x27, 0x20, 0x23, 0x4e, 0xc6, 0xe7, 0x85, 0x9e, 0x59, 0x37, 0x57, 0x4c, 0x25, 0x3f, 0xef, + 0x1b, 0x7d, 0xa3, 0x19, 0x4f, 0xef, 0xdd, 0x3a, 0x6e, 0x75, 0x0e, 0xb5, 0x24, 0x1f, 0x52, 0x70, + 0xbf, 0xd3, 0xe1, 0x0b, 0x31, 0xca, 0xf7, 0xfa, 0x8d, 0x86, 0x61, 0x34, 0x8d, 0xa6, 0x96, 0xe6, + 0x6a, 0xaf, 0xea, 0xad, 0x13, 0xa3, 0xa9, 0x65, 0xf8, 0x47, 0x40, 0xa3, 0xde, 0x69, 0x18, 0x27, + 0x27, 0x9c, 0x9a, 0xe5, 0x54, 0xb5, 0x36, 0x9a, 0xda, 0xfa, 0xc1, 0x3a, 0x64, 0x44, 0xd9, 0x1d, + 0xe4, 0x20, 0x2b, 0x4f, 0xa5, 0xf7, 0x41, 0x6b, 0x88, 0x9a, 0x38, 0xf6, 0x07, 0x98, 0xfe, 0x76, + 0x42, 0x23, 0xd1, 0x5e, 0x02, 0x12, 0x52, 0xf5, 0xba, 0xe4, 0xb1, 0x5a, 0xa1, 0xe7, 0x90, 0xe2, + 0x9d, 0x5c, 0xde, 0x80, 0x7b, 0xd7, 0x64, 0x0f, 0x73, 0x8e, 0xfe, 0x01, 0x4a, 0x27, 0x76, 0xc4, + 0x8e, 0xfd, 0x41, 0xf4, 0x31, 0xab, 0x77, 0x21, 0xfb, 0xd6, 0x76, 0x18, 0x0d, 0xd5, 0x43, 0xa8, + 0x56, 0xfc, 0xe2, 0x06, 0xfc, 0x6b, 0x8f, 0xf9, 0xef, 0xa9, 0xa7, 0x3e, 0x39, 0xf3, 0x1c, 0x31, + 0x39, 0xc0, 0xa7, 0x56, 0x21, 0x8e, 0xec, 0xef, 0x64, 0xf5, 0x66, 0x70, 0x8e, 0x03, 0x3d, 0xfb, + 0x3b, 0x3e, 0x11, 0x6a, 0x33, 0xf7, 0x51, 0xe0, 0x7b, 0x11, 0x45, 0x9f, 0x43, 0xfa, 0xcc, 0x1f, + 0x44, 0xe5, 0x84, 0x78, 0x0c, 0xaf, 0xdd, 0xbe, 0x20, 0xa1, 0x67, 0x50, 0xf2, 0xe8, 0x05, 0x6f, + 0x50, 0xf1, 0x0e, 0xe4, 0xee, 0x36, 0x39, 0xdc, 0x9d, 0xee, 0x42, 0xdf, 0x81, 0xcd, 0x43, 0xca, + 0xe6, 0x62, 0x87, 0x20, 0x3d, 0x37, 0xcb, 0x8b, 0xdf, 0xfa, 0x33, 0xd0, 0x1a, 0xc4, 0x1b, 0x52, + 0xe7, 0x66, 0xde, 0xfe, 0xbf, 0x52, 0x00, 0xc7, 0xfe, 0xa0, 0x27, 0xff, 0xab, 0x40, 0x13, 0xc8, + 0xc7, 0xa9, 0x41, 0x2b, 0xeb, 0x6e, 0x39, 0x73, 0x95, 0xeb, 0x4e, 0xa5, 0x3f, 0xff, 0xc3, 0xdf, + 0xff, 0xf9, 0xe7, 0xe4, 0x8e, 0xfe, 0xb0, 0x76, 0xbe, 0x57, 0xfb, 0x5e, 0x46, 0xfe, 0x9b, 0x20, + 0xf4, 0xf9, 0xec, 0x12, 0xd5, 0x5e, 0x7c, 0xa8, 0xf1, 0x53, 0xbf, 0xe4, 0xa9, 0x43, 0xdf, 0x43, + 0x6e, 0x1a, 0x3b, 0xb4, 0xb2, 0x31, 0x2d, 0x25, 0xb6, 0xf2, 0xf4, 0x66, 0x92, 0x0c, 0xbf, 0xfe, + 0x54, 0xec, 0xe0, 0x33, 0x74, 0xe3, 0x0e, 0xd0, 0x19, 0x64, 0x65, 0x3c, 0xd1, 0xca, 0x0e, 0xbb, + 0x10, 0xeb, 0xeb, 0x4f, 0xbb, 0xe8, 0x8b, 0xc7, 0x76, 0xce, 0x93, 0x70, 0x54, 0x7b, 0xf1, 0x01, + 0x5d, 0x42, 0x3e, 0x4e, 0xcb, 0x35, 0xf1, 0x5d, 0xca, 0x5a, 0xe5, 0xee, 0x95, 0x36, 0x67, 0xb8, + 0x01, 0xbb, 0xd4, 0xab, 0xc2, 0xe1, 0xae, 0xbe, 0x73, 0x93, 0xc3, 0x97, 0x43, 0x61, 0xee, 0x65, + 0xe2, 0xc5, 0x01, 0x85, 0xca, 0xd0, 0x77, 0xaf, 0xb8, 0x24, 0x81, 0x5d, 0x3d, 0xdf, 0x3b, 0x28, + 0xcd, 0x8a, 0xa0, 0xcb, 0xfd, 0x74, 0x13, 0x6f, 0xbe, 0x54, 0xd4, 0xb1, 0xef, 0x10, 0x6f, 0x5c, + 0xf5, 0xc3, 0x71, 0x6d, 0x4c, 0x3d, 0xb1, 0x8b, 0x9a, 0x14, 0x91, 0xc0, 0x8e, 0xe6, 0xff, 0xf6, + 0xfa, 0xda, 0x75, 0x06, 0x59, 0x41, 0xf8, 0xe2, 0xbf, 0x01, 0x00, 0x00, 0xff, 0xff, 0x4c, 0x7b, + 0x72, 0xd5, 0x16, 0x13, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/cloud/ml/v1/model_service.pb.go b/vendor/google.golang.org/genproto/googleapis/cloud/ml/v1/model_service.pb.go new file mode 100644 index 0000000..18dc81e --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/cloud/ml/v1/model_service.pb.go @@ -0,0 +1,1365 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/cloud/ml/v1/model_service.proto + +package ml // import "google.golang.org/genproto/googleapis/cloud/ml/v1" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import timestamp "github.com/golang/protobuf/ptypes/timestamp" +import _ "google.golang.org/genproto/googleapis/api/annotations" +import _ "google.golang.org/genproto/googleapis/api/serviceconfig" +import longrunning "google.golang.org/genproto/googleapis/longrunning" + +import ( + context "golang.org/x/net/context" + grpc "google.golang.org/grpc" +) + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Represents a machine learning solution. +// +// A model can have multiple versions, each of which is a deployed, trained +// model ready to receive prediction requests. The model itself is just a +// container. +type Model struct { + // Required. The name specified for the model when it was created. + // + // The model name must be unique within the project it is created in. + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // Optional. The description specified for the model when it was created. + Description string `protobuf:"bytes,2,opt,name=description,proto3" json:"description,omitempty"` + // Output only. The default version of the model. This version will be used to + // handle prediction requests that do not specify a version. + // + // You can change the default version by calling + // [projects.methods.versions.setDefault](/ml/reference/rest/v1/projects.models.versions/setDefault). + DefaultVersion *Version `protobuf:"bytes,3,opt,name=default_version,json=defaultVersion,proto3" json:"default_version,omitempty"` + // Optional. The list of regions where the model is going to be deployed. + // Currently only one region per model is supported. + // Defaults to 'us-central1' if nothing is set. + Regions []string `protobuf:"bytes,4,rep,name=regions,proto3" json:"regions,omitempty"` + // Optional. If true, enables StackDriver Logging for online prediction. + // Default is false. + OnlinePredictionLogging bool `protobuf:"varint,5,opt,name=online_prediction_logging,json=onlinePredictionLogging,proto3" json:"online_prediction_logging,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Model) Reset() { *m = Model{} } +func (m *Model) String() string { return proto.CompactTextString(m) } +func (*Model) ProtoMessage() {} +func (*Model) Descriptor() ([]byte, []int) { + return fileDescriptor_model_service_277271e04d8db06a, []int{0} +} +func (m *Model) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Model.Unmarshal(m, b) +} +func (m *Model) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Model.Marshal(b, m, deterministic) +} +func (dst *Model) XXX_Merge(src proto.Message) { + xxx_messageInfo_Model.Merge(dst, src) +} +func (m *Model) XXX_Size() int { + return xxx_messageInfo_Model.Size(m) +} +func (m *Model) XXX_DiscardUnknown() { + xxx_messageInfo_Model.DiscardUnknown(m) +} + +var xxx_messageInfo_Model proto.InternalMessageInfo + +func (m *Model) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *Model) GetDescription() string { + if m != nil { + return m.Description + } + return "" +} + +func (m *Model) GetDefaultVersion() *Version { + if m != nil { + return m.DefaultVersion + } + return nil +} + +func (m *Model) GetRegions() []string { + if m != nil { + return m.Regions + } + return nil +} + +func (m *Model) GetOnlinePredictionLogging() bool { + if m != nil { + return m.OnlinePredictionLogging + } + return false +} + +// Represents a version of the model. +// +// Each version is a trained model deployed in the cloud, ready to handle +// prediction requests. A model can have multiple versions. You can get +// information about all of the versions of a given model by calling +// [projects.models.versions.list](/ml/reference/rest/v1/projects.models.versions/list). +type Version struct { + // Required.The name specified for the version when it was created. + // + // The version name must be unique within the model it is created in. + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // Optional. The description specified for the version when it was created. + Description string `protobuf:"bytes,2,opt,name=description,proto3" json:"description,omitempty"` + // Output only. If true, this version will be used to handle prediction + // requests that do not specify a version. + // + // You can change the default version by calling + // [projects.methods.versions.setDefault](/ml/reference/rest/v1/projects.models.versions/setDefault). + IsDefault bool `protobuf:"varint,3,opt,name=is_default,json=isDefault,proto3" json:"is_default,omitempty"` + // Required. The Google Cloud Storage location of the trained model used to + // create the version. See the + // [overview of model deployment](/ml/docs/concepts/deployment-overview) for + // more informaiton. + // + // When passing Version to + // [projects.models.versions.create](/ml/reference/rest/v1/projects.models.versions/create) + // the model service uses the specified location as the source of the model. + // Once deployed, the model version is hosted by the prediction service, so + // this location is useful only as a historical record. + DeploymentUri string `protobuf:"bytes,4,opt,name=deployment_uri,json=deploymentUri,proto3" json:"deployment_uri,omitempty"` + // Output only. The time the version was created. + CreateTime *timestamp.Timestamp `protobuf:"bytes,5,opt,name=create_time,json=createTime,proto3" json:"create_time,omitempty"` + // Output only. The time the version was last used for prediction. + LastUseTime *timestamp.Timestamp `protobuf:"bytes,6,opt,name=last_use_time,json=lastUseTime,proto3" json:"last_use_time,omitempty"` + // Optional. The Google Cloud ML runtime version to use for this deployment. + // If not set, Google Cloud ML will choose a version. + RuntimeVersion string `protobuf:"bytes,8,opt,name=runtime_version,json=runtimeVersion,proto3" json:"runtime_version,omitempty"` + // Optional. Manually select the number of nodes to use for serving the + // model. If unset (i.e., by default), the number of nodes used to serve + // the model automatically scales with traffic. However, care should be + // taken to ramp up traffic according to the model's ability to scale. If + // your model needs to handle bursts of traffic beyond it's ability to + // scale, it is recommended you set this field appropriately. + ManualScaling *ManualScaling `protobuf:"bytes,9,opt,name=manual_scaling,json=manualScaling,proto3" json:"manual_scaling,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Version) Reset() { *m = Version{} } +func (m *Version) String() string { return proto.CompactTextString(m) } +func (*Version) ProtoMessage() {} +func (*Version) Descriptor() ([]byte, []int) { + return fileDescriptor_model_service_277271e04d8db06a, []int{1} +} +func (m *Version) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Version.Unmarshal(m, b) +} +func (m *Version) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Version.Marshal(b, m, deterministic) +} +func (dst *Version) XXX_Merge(src proto.Message) { + xxx_messageInfo_Version.Merge(dst, src) +} +func (m *Version) XXX_Size() int { + return xxx_messageInfo_Version.Size(m) +} +func (m *Version) XXX_DiscardUnknown() { + xxx_messageInfo_Version.DiscardUnknown(m) +} + +var xxx_messageInfo_Version proto.InternalMessageInfo + +func (m *Version) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *Version) GetDescription() string { + if m != nil { + return m.Description + } + return "" +} + +func (m *Version) GetIsDefault() bool { + if m != nil { + return m.IsDefault + } + return false +} + +func (m *Version) GetDeploymentUri() string { + if m != nil { + return m.DeploymentUri + } + return "" +} + +func (m *Version) GetCreateTime() *timestamp.Timestamp { + if m != nil { + return m.CreateTime + } + return nil +} + +func (m *Version) GetLastUseTime() *timestamp.Timestamp { + if m != nil { + return m.LastUseTime + } + return nil +} + +func (m *Version) GetRuntimeVersion() string { + if m != nil { + return m.RuntimeVersion + } + return "" +} + +func (m *Version) GetManualScaling() *ManualScaling { + if m != nil { + return m.ManualScaling + } + return nil +} + +// Options for manually scaling a model. +type ManualScaling struct { + // The number of nodes to allocate for this model. These nodes are always up, + // starting from the time the model is deployed, so the cost of operating + // this model will be proportional to nodes * number of hours since + // deployment. + Nodes int32 `protobuf:"varint,1,opt,name=nodes,proto3" json:"nodes,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ManualScaling) Reset() { *m = ManualScaling{} } +func (m *ManualScaling) String() string { return proto.CompactTextString(m) } +func (*ManualScaling) ProtoMessage() {} +func (*ManualScaling) Descriptor() ([]byte, []int) { + return fileDescriptor_model_service_277271e04d8db06a, []int{2} +} +func (m *ManualScaling) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ManualScaling.Unmarshal(m, b) +} +func (m *ManualScaling) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ManualScaling.Marshal(b, m, deterministic) +} +func (dst *ManualScaling) XXX_Merge(src proto.Message) { + xxx_messageInfo_ManualScaling.Merge(dst, src) +} +func (m *ManualScaling) XXX_Size() int { + return xxx_messageInfo_ManualScaling.Size(m) +} +func (m *ManualScaling) XXX_DiscardUnknown() { + xxx_messageInfo_ManualScaling.DiscardUnknown(m) +} + +var xxx_messageInfo_ManualScaling proto.InternalMessageInfo + +func (m *ManualScaling) GetNodes() int32 { + if m != nil { + return m.Nodes + } + return 0 +} + +// Request message for the CreateModel method. +type CreateModelRequest struct { + // Required. The project name. + // + // Authorization: requires `Editor` role on the specified project. + Parent string `protobuf:"bytes,1,opt,name=parent,proto3" json:"parent,omitempty"` + // Required. The model to create. + Model *Model `protobuf:"bytes,2,opt,name=model,proto3" json:"model,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *CreateModelRequest) Reset() { *m = CreateModelRequest{} } +func (m *CreateModelRequest) String() string { return proto.CompactTextString(m) } +func (*CreateModelRequest) ProtoMessage() {} +func (*CreateModelRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_model_service_277271e04d8db06a, []int{3} +} +func (m *CreateModelRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_CreateModelRequest.Unmarshal(m, b) +} +func (m *CreateModelRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_CreateModelRequest.Marshal(b, m, deterministic) +} +func (dst *CreateModelRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_CreateModelRequest.Merge(dst, src) +} +func (m *CreateModelRequest) XXX_Size() int { + return xxx_messageInfo_CreateModelRequest.Size(m) +} +func (m *CreateModelRequest) XXX_DiscardUnknown() { + xxx_messageInfo_CreateModelRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_CreateModelRequest proto.InternalMessageInfo + +func (m *CreateModelRequest) GetParent() string { + if m != nil { + return m.Parent + } + return "" +} + +func (m *CreateModelRequest) GetModel() *Model { + if m != nil { + return m.Model + } + return nil +} + +// Request message for the ListModels method. +type ListModelsRequest struct { + // Required. The name of the project whose models are to be listed. + // + // Authorization: requires `Viewer` role on the specified project. + Parent string `protobuf:"bytes,1,opt,name=parent,proto3" json:"parent,omitempty"` + // Optional. A page token to request the next page of results. + // + // You get the token from the `next_page_token` field of the response from + // the previous call. + PageToken string `protobuf:"bytes,4,opt,name=page_token,json=pageToken,proto3" json:"page_token,omitempty"` + // Optional. The number of models to retrieve per "page" of results. If there + // are more remaining results than this number, the response message will + // contain a valid value in the `next_page_token` field. + // + // The default value is 20, and the maximum page size is 100. + PageSize int32 `protobuf:"varint,5,opt,name=page_size,json=pageSize,proto3" json:"page_size,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ListModelsRequest) Reset() { *m = ListModelsRequest{} } +func (m *ListModelsRequest) String() string { return proto.CompactTextString(m) } +func (*ListModelsRequest) ProtoMessage() {} +func (*ListModelsRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_model_service_277271e04d8db06a, []int{4} +} +func (m *ListModelsRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ListModelsRequest.Unmarshal(m, b) +} +func (m *ListModelsRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ListModelsRequest.Marshal(b, m, deterministic) +} +func (dst *ListModelsRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_ListModelsRequest.Merge(dst, src) +} +func (m *ListModelsRequest) XXX_Size() int { + return xxx_messageInfo_ListModelsRequest.Size(m) +} +func (m *ListModelsRequest) XXX_DiscardUnknown() { + xxx_messageInfo_ListModelsRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_ListModelsRequest proto.InternalMessageInfo + +func (m *ListModelsRequest) GetParent() string { + if m != nil { + return m.Parent + } + return "" +} + +func (m *ListModelsRequest) GetPageToken() string { + if m != nil { + return m.PageToken + } + return "" +} + +func (m *ListModelsRequest) GetPageSize() int32 { + if m != nil { + return m.PageSize + } + return 0 +} + +// Response message for the ListModels method. +type ListModelsResponse struct { + // The list of models. + Models []*Model `protobuf:"bytes,1,rep,name=models,proto3" json:"models,omitempty"` + // Optional. Pass this token as the `page_token` field of the request for a + // subsequent call. + NextPageToken string `protobuf:"bytes,2,opt,name=next_page_token,json=nextPageToken,proto3" json:"next_page_token,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ListModelsResponse) Reset() { *m = ListModelsResponse{} } +func (m *ListModelsResponse) String() string { return proto.CompactTextString(m) } +func (*ListModelsResponse) ProtoMessage() {} +func (*ListModelsResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_model_service_277271e04d8db06a, []int{5} +} +func (m *ListModelsResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ListModelsResponse.Unmarshal(m, b) +} +func (m *ListModelsResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ListModelsResponse.Marshal(b, m, deterministic) +} +func (dst *ListModelsResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_ListModelsResponse.Merge(dst, src) +} +func (m *ListModelsResponse) XXX_Size() int { + return xxx_messageInfo_ListModelsResponse.Size(m) +} +func (m *ListModelsResponse) XXX_DiscardUnknown() { + xxx_messageInfo_ListModelsResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_ListModelsResponse proto.InternalMessageInfo + +func (m *ListModelsResponse) GetModels() []*Model { + if m != nil { + return m.Models + } + return nil +} + +func (m *ListModelsResponse) GetNextPageToken() string { + if m != nil { + return m.NextPageToken + } + return "" +} + +// Request message for the GetModel method. +type GetModelRequest struct { + // Required. The name of the model. + // + // Authorization: requires `Viewer` role on the parent project. + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GetModelRequest) Reset() { *m = GetModelRequest{} } +func (m *GetModelRequest) String() string { return proto.CompactTextString(m) } +func (*GetModelRequest) ProtoMessage() {} +func (*GetModelRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_model_service_277271e04d8db06a, []int{6} +} +func (m *GetModelRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GetModelRequest.Unmarshal(m, b) +} +func (m *GetModelRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GetModelRequest.Marshal(b, m, deterministic) +} +func (dst *GetModelRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_GetModelRequest.Merge(dst, src) +} +func (m *GetModelRequest) XXX_Size() int { + return xxx_messageInfo_GetModelRequest.Size(m) +} +func (m *GetModelRequest) XXX_DiscardUnknown() { + xxx_messageInfo_GetModelRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_GetModelRequest proto.InternalMessageInfo + +func (m *GetModelRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +// Request message for the DeleteModel method. +type DeleteModelRequest struct { + // Required. The name of the model. + // + // Authorization: requires `Editor` role on the parent project. + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *DeleteModelRequest) Reset() { *m = DeleteModelRequest{} } +func (m *DeleteModelRequest) String() string { return proto.CompactTextString(m) } +func (*DeleteModelRequest) ProtoMessage() {} +func (*DeleteModelRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_model_service_277271e04d8db06a, []int{7} +} +func (m *DeleteModelRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_DeleteModelRequest.Unmarshal(m, b) +} +func (m *DeleteModelRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_DeleteModelRequest.Marshal(b, m, deterministic) +} +func (dst *DeleteModelRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_DeleteModelRequest.Merge(dst, src) +} +func (m *DeleteModelRequest) XXX_Size() int { + return xxx_messageInfo_DeleteModelRequest.Size(m) +} +func (m *DeleteModelRequest) XXX_DiscardUnknown() { + xxx_messageInfo_DeleteModelRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_DeleteModelRequest proto.InternalMessageInfo + +func (m *DeleteModelRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +// Uploads the provided trained model version to Cloud Machine Learning. +type CreateVersionRequest struct { + // Required. The name of the model. + // + // Authorization: requires `Editor` role on the parent project. + Parent string `protobuf:"bytes,1,opt,name=parent,proto3" json:"parent,omitempty"` + // Required. The version details. + Version *Version `protobuf:"bytes,2,opt,name=version,proto3" json:"version,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *CreateVersionRequest) Reset() { *m = CreateVersionRequest{} } +func (m *CreateVersionRequest) String() string { return proto.CompactTextString(m) } +func (*CreateVersionRequest) ProtoMessage() {} +func (*CreateVersionRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_model_service_277271e04d8db06a, []int{8} +} +func (m *CreateVersionRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_CreateVersionRequest.Unmarshal(m, b) +} +func (m *CreateVersionRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_CreateVersionRequest.Marshal(b, m, deterministic) +} +func (dst *CreateVersionRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_CreateVersionRequest.Merge(dst, src) +} +func (m *CreateVersionRequest) XXX_Size() int { + return xxx_messageInfo_CreateVersionRequest.Size(m) +} +func (m *CreateVersionRequest) XXX_DiscardUnknown() { + xxx_messageInfo_CreateVersionRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_CreateVersionRequest proto.InternalMessageInfo + +func (m *CreateVersionRequest) GetParent() string { + if m != nil { + return m.Parent + } + return "" +} + +func (m *CreateVersionRequest) GetVersion() *Version { + if m != nil { + return m.Version + } + return nil +} + +// Request message for the ListVersions method. +type ListVersionsRequest struct { + // Required. The name of the model for which to list the version. + // + // Authorization: requires `Viewer` role on the parent project. + Parent string `protobuf:"bytes,1,opt,name=parent,proto3" json:"parent,omitempty"` + // Optional. A page token to request the next page of results. + // + // You get the token from the `next_page_token` field of the response from + // the previous call. + PageToken string `protobuf:"bytes,4,opt,name=page_token,json=pageToken,proto3" json:"page_token,omitempty"` + // Optional. The number of versions to retrieve per "page" of results. If + // there are more remaining results than this number, the response message + // will contain a valid value in the `next_page_token` field. + // + // The default value is 20, and the maximum page size is 100. + PageSize int32 `protobuf:"varint,5,opt,name=page_size,json=pageSize,proto3" json:"page_size,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ListVersionsRequest) Reset() { *m = ListVersionsRequest{} } +func (m *ListVersionsRequest) String() string { return proto.CompactTextString(m) } +func (*ListVersionsRequest) ProtoMessage() {} +func (*ListVersionsRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_model_service_277271e04d8db06a, []int{9} +} +func (m *ListVersionsRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ListVersionsRequest.Unmarshal(m, b) +} +func (m *ListVersionsRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ListVersionsRequest.Marshal(b, m, deterministic) +} +func (dst *ListVersionsRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_ListVersionsRequest.Merge(dst, src) +} +func (m *ListVersionsRequest) XXX_Size() int { + return xxx_messageInfo_ListVersionsRequest.Size(m) +} +func (m *ListVersionsRequest) XXX_DiscardUnknown() { + xxx_messageInfo_ListVersionsRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_ListVersionsRequest proto.InternalMessageInfo + +func (m *ListVersionsRequest) GetParent() string { + if m != nil { + return m.Parent + } + return "" +} + +func (m *ListVersionsRequest) GetPageToken() string { + if m != nil { + return m.PageToken + } + return "" +} + +func (m *ListVersionsRequest) GetPageSize() int32 { + if m != nil { + return m.PageSize + } + return 0 +} + +// Response message for the ListVersions method. +type ListVersionsResponse struct { + // The list of versions. + Versions []*Version `protobuf:"bytes,1,rep,name=versions,proto3" json:"versions,omitempty"` + // Optional. Pass this token as the `page_token` field of the request for a + // subsequent call. + NextPageToken string `protobuf:"bytes,2,opt,name=next_page_token,json=nextPageToken,proto3" json:"next_page_token,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ListVersionsResponse) Reset() { *m = ListVersionsResponse{} } +func (m *ListVersionsResponse) String() string { return proto.CompactTextString(m) } +func (*ListVersionsResponse) ProtoMessage() {} +func (*ListVersionsResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_model_service_277271e04d8db06a, []int{10} +} +func (m *ListVersionsResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ListVersionsResponse.Unmarshal(m, b) +} +func (m *ListVersionsResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ListVersionsResponse.Marshal(b, m, deterministic) +} +func (dst *ListVersionsResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_ListVersionsResponse.Merge(dst, src) +} +func (m *ListVersionsResponse) XXX_Size() int { + return xxx_messageInfo_ListVersionsResponse.Size(m) +} +func (m *ListVersionsResponse) XXX_DiscardUnknown() { + xxx_messageInfo_ListVersionsResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_ListVersionsResponse proto.InternalMessageInfo + +func (m *ListVersionsResponse) GetVersions() []*Version { + if m != nil { + return m.Versions + } + return nil +} + +func (m *ListVersionsResponse) GetNextPageToken() string { + if m != nil { + return m.NextPageToken + } + return "" +} + +// Request message for the GetVersion method. +type GetVersionRequest struct { + // Required. The name of the version. + // + // Authorization: requires `Viewer` role on the parent project. + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GetVersionRequest) Reset() { *m = GetVersionRequest{} } +func (m *GetVersionRequest) String() string { return proto.CompactTextString(m) } +func (*GetVersionRequest) ProtoMessage() {} +func (*GetVersionRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_model_service_277271e04d8db06a, []int{11} +} +func (m *GetVersionRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GetVersionRequest.Unmarshal(m, b) +} +func (m *GetVersionRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GetVersionRequest.Marshal(b, m, deterministic) +} +func (dst *GetVersionRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_GetVersionRequest.Merge(dst, src) +} +func (m *GetVersionRequest) XXX_Size() int { + return xxx_messageInfo_GetVersionRequest.Size(m) +} +func (m *GetVersionRequest) XXX_DiscardUnknown() { + xxx_messageInfo_GetVersionRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_GetVersionRequest proto.InternalMessageInfo + +func (m *GetVersionRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +// Request message for the DeleteVerionRequest method. +type DeleteVersionRequest struct { + // Required. The name of the version. You can get the names of all the + // versions of a model by calling + // [projects.models.versions.list](/ml/reference/rest/v1/projects.models.versions/list). + // + // Authorization: requires `Editor` role on the parent project. + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *DeleteVersionRequest) Reset() { *m = DeleteVersionRequest{} } +func (m *DeleteVersionRequest) String() string { return proto.CompactTextString(m) } +func (*DeleteVersionRequest) ProtoMessage() {} +func (*DeleteVersionRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_model_service_277271e04d8db06a, []int{12} +} +func (m *DeleteVersionRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_DeleteVersionRequest.Unmarshal(m, b) +} +func (m *DeleteVersionRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_DeleteVersionRequest.Marshal(b, m, deterministic) +} +func (dst *DeleteVersionRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_DeleteVersionRequest.Merge(dst, src) +} +func (m *DeleteVersionRequest) XXX_Size() int { + return xxx_messageInfo_DeleteVersionRequest.Size(m) +} +func (m *DeleteVersionRequest) XXX_DiscardUnknown() { + xxx_messageInfo_DeleteVersionRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_DeleteVersionRequest proto.InternalMessageInfo + +func (m *DeleteVersionRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +// Request message for the SetDefaultVersion request. +type SetDefaultVersionRequest struct { + // Required. The name of the version to make the default for the model. You + // can get the names of all the versions of a model by calling + // [projects.models.versions.list](/ml/reference/rest/v1/projects.models.versions/list). + // + // Authorization: requires `Editor` role on the parent project. + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *SetDefaultVersionRequest) Reset() { *m = SetDefaultVersionRequest{} } +func (m *SetDefaultVersionRequest) String() string { return proto.CompactTextString(m) } +func (*SetDefaultVersionRequest) ProtoMessage() {} +func (*SetDefaultVersionRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_model_service_277271e04d8db06a, []int{13} +} +func (m *SetDefaultVersionRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_SetDefaultVersionRequest.Unmarshal(m, b) +} +func (m *SetDefaultVersionRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_SetDefaultVersionRequest.Marshal(b, m, deterministic) +} +func (dst *SetDefaultVersionRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_SetDefaultVersionRequest.Merge(dst, src) +} +func (m *SetDefaultVersionRequest) XXX_Size() int { + return xxx_messageInfo_SetDefaultVersionRequest.Size(m) +} +func (m *SetDefaultVersionRequest) XXX_DiscardUnknown() { + xxx_messageInfo_SetDefaultVersionRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_SetDefaultVersionRequest proto.InternalMessageInfo + +func (m *SetDefaultVersionRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func init() { + proto.RegisterType((*Model)(nil), "google.cloud.ml.v1.Model") + proto.RegisterType((*Version)(nil), "google.cloud.ml.v1.Version") + proto.RegisterType((*ManualScaling)(nil), "google.cloud.ml.v1.ManualScaling") + proto.RegisterType((*CreateModelRequest)(nil), "google.cloud.ml.v1.CreateModelRequest") + proto.RegisterType((*ListModelsRequest)(nil), "google.cloud.ml.v1.ListModelsRequest") + proto.RegisterType((*ListModelsResponse)(nil), "google.cloud.ml.v1.ListModelsResponse") + proto.RegisterType((*GetModelRequest)(nil), "google.cloud.ml.v1.GetModelRequest") + proto.RegisterType((*DeleteModelRequest)(nil), "google.cloud.ml.v1.DeleteModelRequest") + proto.RegisterType((*CreateVersionRequest)(nil), "google.cloud.ml.v1.CreateVersionRequest") + proto.RegisterType((*ListVersionsRequest)(nil), "google.cloud.ml.v1.ListVersionsRequest") + proto.RegisterType((*ListVersionsResponse)(nil), "google.cloud.ml.v1.ListVersionsResponse") + proto.RegisterType((*GetVersionRequest)(nil), "google.cloud.ml.v1.GetVersionRequest") + proto.RegisterType((*DeleteVersionRequest)(nil), "google.cloud.ml.v1.DeleteVersionRequest") + proto.RegisterType((*SetDefaultVersionRequest)(nil), "google.cloud.ml.v1.SetDefaultVersionRequest") +} + +// Reference imports to suppress errors if they are not otherwise used. +var _ context.Context +var _ grpc.ClientConn + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the grpc package it is being compiled against. +const _ = grpc.SupportPackageIsVersion4 + +// ModelServiceClient is the client API for ModelService service. +// +// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://godoc.org/google.golang.org/grpc#ClientConn.NewStream. +type ModelServiceClient interface { + // Creates a model which will later contain one or more versions. + // + // You must add at least one version before you can request predictions from + // the model. Add versions by calling + // [projects.models.versions.create](/ml/reference/rest/v1/projects.models.versions/create). + CreateModel(ctx context.Context, in *CreateModelRequest, opts ...grpc.CallOption) (*Model, error) + // Lists the models in a project. + // + // Each project can contain multiple models, and each model can have multiple + // versions. + ListModels(ctx context.Context, in *ListModelsRequest, opts ...grpc.CallOption) (*ListModelsResponse, error) + // Gets information about a model, including its name, the description (if + // set), and the default version (if at least one version of the model has + // been deployed). + GetModel(ctx context.Context, in *GetModelRequest, opts ...grpc.CallOption) (*Model, error) + // Deletes a model. + // + // You can only delete a model if there are no versions in it. You can delete + // versions by calling + // [projects.models.versions.delete](/ml/reference/rest/v1/projects.models.versions/delete). + DeleteModel(ctx context.Context, in *DeleteModelRequest, opts ...grpc.CallOption) (*longrunning.Operation, error) + // Creates a new version of a model from a trained TensorFlow model. + // + // If the version created in the cloud by this call is the first deployed + // version of the specified model, it will be made the default version of the + // model. When you add a version to a model that already has one or more + // versions, the default version does not automatically change. If you want a + // new version to be the default, you must call + // [projects.models.versions.setDefault](/ml/reference/rest/v1/projects.models.versions/setDefault). + CreateVersion(ctx context.Context, in *CreateVersionRequest, opts ...grpc.CallOption) (*longrunning.Operation, error) + // Gets basic information about all the versions of a model. + // + // If you expect that a model has a lot of versions, or if you need to handle + // only a limited number of results at a time, you can request that the list + // be retrieved in batches (called pages): + ListVersions(ctx context.Context, in *ListVersionsRequest, opts ...grpc.CallOption) (*ListVersionsResponse, error) + // Gets information about a model version. + // + // Models can have multiple versions. You can call + // [projects.models.versions.list](/ml/reference/rest/v1/projects.models.versions/list) + // to get the same information that this method returns for all of the + // versions of a model. + GetVersion(ctx context.Context, in *GetVersionRequest, opts ...grpc.CallOption) (*Version, error) + // Deletes a model version. + // + // Each model can have multiple versions deployed and in use at any given + // time. Use this method to remove a single version. + // + // Note: You cannot delete the version that is set as the default version + // of the model unless it is the only remaining version. + DeleteVersion(ctx context.Context, in *DeleteVersionRequest, opts ...grpc.CallOption) (*longrunning.Operation, error) + // Designates a version to be the default for the model. + // + // The default version is used for prediction requests made against the model + // that don't specify a version. + // + // The first version to be created for a model is automatically set as the + // default. You must make any subsequent changes to the default version + // setting manually using this method. + SetDefaultVersion(ctx context.Context, in *SetDefaultVersionRequest, opts ...grpc.CallOption) (*Version, error) +} + +type modelServiceClient struct { + cc *grpc.ClientConn +} + +func NewModelServiceClient(cc *grpc.ClientConn) ModelServiceClient { + return &modelServiceClient{cc} +} + +func (c *modelServiceClient) CreateModel(ctx context.Context, in *CreateModelRequest, opts ...grpc.CallOption) (*Model, error) { + out := new(Model) + err := c.cc.Invoke(ctx, "/google.cloud.ml.v1.ModelService/CreateModel", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *modelServiceClient) ListModels(ctx context.Context, in *ListModelsRequest, opts ...grpc.CallOption) (*ListModelsResponse, error) { + out := new(ListModelsResponse) + err := c.cc.Invoke(ctx, "/google.cloud.ml.v1.ModelService/ListModels", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *modelServiceClient) GetModel(ctx context.Context, in *GetModelRequest, opts ...grpc.CallOption) (*Model, error) { + out := new(Model) + err := c.cc.Invoke(ctx, "/google.cloud.ml.v1.ModelService/GetModel", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *modelServiceClient) DeleteModel(ctx context.Context, in *DeleteModelRequest, opts ...grpc.CallOption) (*longrunning.Operation, error) { + out := new(longrunning.Operation) + err := c.cc.Invoke(ctx, "/google.cloud.ml.v1.ModelService/DeleteModel", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *modelServiceClient) CreateVersion(ctx context.Context, in *CreateVersionRequest, opts ...grpc.CallOption) (*longrunning.Operation, error) { + out := new(longrunning.Operation) + err := c.cc.Invoke(ctx, "/google.cloud.ml.v1.ModelService/CreateVersion", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *modelServiceClient) ListVersions(ctx context.Context, in *ListVersionsRequest, opts ...grpc.CallOption) (*ListVersionsResponse, error) { + out := new(ListVersionsResponse) + err := c.cc.Invoke(ctx, "/google.cloud.ml.v1.ModelService/ListVersions", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *modelServiceClient) GetVersion(ctx context.Context, in *GetVersionRequest, opts ...grpc.CallOption) (*Version, error) { + out := new(Version) + err := c.cc.Invoke(ctx, "/google.cloud.ml.v1.ModelService/GetVersion", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *modelServiceClient) DeleteVersion(ctx context.Context, in *DeleteVersionRequest, opts ...grpc.CallOption) (*longrunning.Operation, error) { + out := new(longrunning.Operation) + err := c.cc.Invoke(ctx, "/google.cloud.ml.v1.ModelService/DeleteVersion", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *modelServiceClient) SetDefaultVersion(ctx context.Context, in *SetDefaultVersionRequest, opts ...grpc.CallOption) (*Version, error) { + out := new(Version) + err := c.cc.Invoke(ctx, "/google.cloud.ml.v1.ModelService/SetDefaultVersion", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +// ModelServiceServer is the server API for ModelService service. +type ModelServiceServer interface { + // Creates a model which will later contain one or more versions. + // + // You must add at least one version before you can request predictions from + // the model. Add versions by calling + // [projects.models.versions.create](/ml/reference/rest/v1/projects.models.versions/create). + CreateModel(context.Context, *CreateModelRequest) (*Model, error) + // Lists the models in a project. + // + // Each project can contain multiple models, and each model can have multiple + // versions. + ListModels(context.Context, *ListModelsRequest) (*ListModelsResponse, error) + // Gets information about a model, including its name, the description (if + // set), and the default version (if at least one version of the model has + // been deployed). + GetModel(context.Context, *GetModelRequest) (*Model, error) + // Deletes a model. + // + // You can only delete a model if there are no versions in it. You can delete + // versions by calling + // [projects.models.versions.delete](/ml/reference/rest/v1/projects.models.versions/delete). + DeleteModel(context.Context, *DeleteModelRequest) (*longrunning.Operation, error) + // Creates a new version of a model from a trained TensorFlow model. + // + // If the version created in the cloud by this call is the first deployed + // version of the specified model, it will be made the default version of the + // model. When you add a version to a model that already has one or more + // versions, the default version does not automatically change. If you want a + // new version to be the default, you must call + // [projects.models.versions.setDefault](/ml/reference/rest/v1/projects.models.versions/setDefault). + CreateVersion(context.Context, *CreateVersionRequest) (*longrunning.Operation, error) + // Gets basic information about all the versions of a model. + // + // If you expect that a model has a lot of versions, or if you need to handle + // only a limited number of results at a time, you can request that the list + // be retrieved in batches (called pages): + ListVersions(context.Context, *ListVersionsRequest) (*ListVersionsResponse, error) + // Gets information about a model version. + // + // Models can have multiple versions. You can call + // [projects.models.versions.list](/ml/reference/rest/v1/projects.models.versions/list) + // to get the same information that this method returns for all of the + // versions of a model. + GetVersion(context.Context, *GetVersionRequest) (*Version, error) + // Deletes a model version. + // + // Each model can have multiple versions deployed and in use at any given + // time. Use this method to remove a single version. + // + // Note: You cannot delete the version that is set as the default version + // of the model unless it is the only remaining version. + DeleteVersion(context.Context, *DeleteVersionRequest) (*longrunning.Operation, error) + // Designates a version to be the default for the model. + // + // The default version is used for prediction requests made against the model + // that don't specify a version. + // + // The first version to be created for a model is automatically set as the + // default. You must make any subsequent changes to the default version + // setting manually using this method. + SetDefaultVersion(context.Context, *SetDefaultVersionRequest) (*Version, error) +} + +func RegisterModelServiceServer(s *grpc.Server, srv ModelServiceServer) { + s.RegisterService(&_ModelService_serviceDesc, srv) +} + +func _ModelService_CreateModel_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(CreateModelRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(ModelServiceServer).CreateModel(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.ml.v1.ModelService/CreateModel", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(ModelServiceServer).CreateModel(ctx, req.(*CreateModelRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _ModelService_ListModels_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(ListModelsRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(ModelServiceServer).ListModels(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.ml.v1.ModelService/ListModels", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(ModelServiceServer).ListModels(ctx, req.(*ListModelsRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _ModelService_GetModel_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(GetModelRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(ModelServiceServer).GetModel(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.ml.v1.ModelService/GetModel", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(ModelServiceServer).GetModel(ctx, req.(*GetModelRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _ModelService_DeleteModel_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(DeleteModelRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(ModelServiceServer).DeleteModel(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.ml.v1.ModelService/DeleteModel", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(ModelServiceServer).DeleteModel(ctx, req.(*DeleteModelRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _ModelService_CreateVersion_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(CreateVersionRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(ModelServiceServer).CreateVersion(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.ml.v1.ModelService/CreateVersion", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(ModelServiceServer).CreateVersion(ctx, req.(*CreateVersionRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _ModelService_ListVersions_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(ListVersionsRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(ModelServiceServer).ListVersions(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.ml.v1.ModelService/ListVersions", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(ModelServiceServer).ListVersions(ctx, req.(*ListVersionsRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _ModelService_GetVersion_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(GetVersionRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(ModelServiceServer).GetVersion(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.ml.v1.ModelService/GetVersion", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(ModelServiceServer).GetVersion(ctx, req.(*GetVersionRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _ModelService_DeleteVersion_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(DeleteVersionRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(ModelServiceServer).DeleteVersion(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.ml.v1.ModelService/DeleteVersion", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(ModelServiceServer).DeleteVersion(ctx, req.(*DeleteVersionRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _ModelService_SetDefaultVersion_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(SetDefaultVersionRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(ModelServiceServer).SetDefaultVersion(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.ml.v1.ModelService/SetDefaultVersion", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(ModelServiceServer).SetDefaultVersion(ctx, req.(*SetDefaultVersionRequest)) + } + return interceptor(ctx, in, info, handler) +} + +var _ModelService_serviceDesc = grpc.ServiceDesc{ + ServiceName: "google.cloud.ml.v1.ModelService", + HandlerType: (*ModelServiceServer)(nil), + Methods: []grpc.MethodDesc{ + { + MethodName: "CreateModel", + Handler: _ModelService_CreateModel_Handler, + }, + { + MethodName: "ListModels", + Handler: _ModelService_ListModels_Handler, + }, + { + MethodName: "GetModel", + Handler: _ModelService_GetModel_Handler, + }, + { + MethodName: "DeleteModel", + Handler: _ModelService_DeleteModel_Handler, + }, + { + MethodName: "CreateVersion", + Handler: _ModelService_CreateVersion_Handler, + }, + { + MethodName: "ListVersions", + Handler: _ModelService_ListVersions_Handler, + }, + { + MethodName: "GetVersion", + Handler: _ModelService_GetVersion_Handler, + }, + { + MethodName: "DeleteVersion", + Handler: _ModelService_DeleteVersion_Handler, + }, + { + MethodName: "SetDefaultVersion", + Handler: _ModelService_SetDefaultVersion_Handler, + }, + }, + Streams: []grpc.StreamDesc{}, + Metadata: "google/cloud/ml/v1/model_service.proto", +} + +func init() { + proto.RegisterFile("google/cloud/ml/v1/model_service.proto", fileDescriptor_model_service_277271e04d8db06a) +} + +var fileDescriptor_model_service_277271e04d8db06a = []byte{ + // 996 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xb4, 0x56, 0xcf, 0x6f, 0xe3, 0x44, + 0x14, 0x96, 0xdb, 0xa6, 0x4d, 0x5e, 0x36, 0xad, 0x3a, 0x14, 0xc8, 0x66, 0x29, 0x04, 0xaf, 0xda, + 0x86, 0x00, 0xb6, 0x52, 0x8a, 0x10, 0x59, 0x01, 0xd2, 0x52, 0x69, 0x39, 0xec, 0x8a, 0xca, 0xdd, + 0xe5, 0x80, 0x84, 0x2c, 0x6f, 0x32, 0x6b, 0x06, 0xec, 0x19, 0xe3, 0x19, 0x07, 0x58, 0x58, 0x21, + 0xc1, 0x91, 0x23, 0xdc, 0xf9, 0xa3, 0x38, 0x71, 0xe7, 0xc6, 0x99, 0x3b, 0x9a, 0x1f, 0x4e, 0xed, + 0xc4, 0x89, 0x0b, 0x12, 0x37, 0xcf, 0x9b, 0xef, 0xcd, 0xfb, 0xe6, 0x7d, 0xef, 0xbd, 0x31, 0x1c, + 0x87, 0x8c, 0x85, 0x11, 0x76, 0x27, 0x11, 0xcb, 0xa6, 0x6e, 0x1c, 0xb9, 0xb3, 0x91, 0x1b, 0xb3, + 0x29, 0x8e, 0x7c, 0x8e, 0xd3, 0x19, 0x99, 0x60, 0x27, 0x49, 0x99, 0x60, 0x08, 0x69, 0x9c, 0xa3, + 0x70, 0x4e, 0x1c, 0x39, 0xb3, 0x51, 0xef, 0x25, 0xe3, 0x1b, 0x24, 0xc4, 0x0d, 0x28, 0x65, 0x22, + 0x10, 0x84, 0x51, 0xae, 0x3d, 0x7a, 0xcf, 0x17, 0x77, 0x33, 0xf1, 0xb9, 0x31, 0xdf, 0x36, 0xe6, + 0x88, 0xd1, 0x30, 0xcd, 0x28, 0x25, 0x34, 0x74, 0x59, 0x82, 0xd3, 0x92, 0xef, 0x2b, 0x06, 0xa4, + 0x56, 0x8f, 0xb3, 0x27, 0xae, 0x20, 0x31, 0xe6, 0x22, 0x88, 0x13, 0x0d, 0xb0, 0xff, 0xb0, 0xa0, + 0xf1, 0x40, 0xd2, 0x44, 0x08, 0xb6, 0x68, 0x10, 0xe3, 0xae, 0xd5, 0xb7, 0x06, 0x2d, 0x4f, 0x7d, + 0xa3, 0x3e, 0xb4, 0xa7, 0x98, 0x4f, 0x52, 0x92, 0xc8, 0x43, 0xbb, 0x1b, 0x6a, 0xab, 0x68, 0x42, + 0xe7, 0xb0, 0x37, 0xc5, 0x4f, 0x82, 0x2c, 0x12, 0xfe, 0x0c, 0xa7, 0x5c, 0xa2, 0x36, 0xfb, 0xd6, + 0xa0, 0x7d, 0x7a, 0xcb, 0x59, 0xbe, 0xa8, 0xf3, 0x89, 0x86, 0x78, 0xbb, 0xc6, 0xc7, 0xac, 0x51, + 0x17, 0x76, 0x52, 0x1c, 0x4a, 0xde, 0xdd, 0xad, 0xfe, 0xe6, 0xa0, 0xe5, 0xe5, 0x4b, 0x34, 0x86, + 0x9b, 0x8c, 0x46, 0x84, 0x62, 0x3f, 0x49, 0xf1, 0x94, 0x4c, 0x64, 0x50, 0x3f, 0x62, 0x61, 0x48, + 0x68, 0xd8, 0x6d, 0xf4, 0xad, 0x41, 0xd3, 0x7b, 0x51, 0x03, 0x2e, 0xe6, 0xfb, 0xf7, 0xf5, 0xb6, + 0xfd, 0xf7, 0x06, 0xec, 0xe4, 0x11, 0xfe, 0xdb, 0xed, 0x0e, 0x01, 0x08, 0xf7, 0x0d, 0x59, 0x75, + 0xb1, 0xa6, 0xd7, 0x22, 0xfc, 0x5c, 0x1b, 0xd0, 0x11, 0xec, 0x4e, 0x71, 0x12, 0xb1, 0x6f, 0x63, + 0x4c, 0x85, 0x9f, 0xa5, 0xa4, 0xbb, 0xa5, 0xce, 0xe8, 0x5c, 0x59, 0x1f, 0xa5, 0x04, 0xdd, 0x81, + 0xf6, 0x24, 0xc5, 0x81, 0xc0, 0xbe, 0xcc, 0xbe, 0x62, 0xdd, 0x3e, 0xed, 0xe5, 0xf9, 0xc9, 0xa5, + 0x71, 0x1e, 0xe6, 0xd2, 0x78, 0xa0, 0xe1, 0xd2, 0x80, 0xde, 0x87, 0x4e, 0x14, 0x70, 0xe1, 0x67, + 0xdc, 0xb8, 0x6f, 0xd7, 0xba, 0xb7, 0xa5, 0xc3, 0x23, 0xae, 0xfd, 0x4f, 0x60, 0x2f, 0xcd, 0xa8, + 0xf4, 0x9c, 0x0b, 0xd4, 0x54, 0x24, 0x77, 0x8d, 0x39, 0xcf, 0xd0, 0x47, 0xb0, 0x1b, 0x07, 0x34, + 0x0b, 0x22, 0x9f, 0x4f, 0x82, 0x48, 0xa6, 0xb7, 0xa5, 0x22, 0xbd, 0x5a, 0x25, 0xe4, 0x03, 0x85, + 0xbc, 0xd4, 0x40, 0xaf, 0x13, 0x17, 0x97, 0xf6, 0x11, 0x74, 0x4a, 0xfb, 0xe8, 0x00, 0x1a, 0x94, + 0x4d, 0x31, 0x57, 0xd9, 0x6f, 0x78, 0x7a, 0x61, 0x7f, 0x06, 0xe8, 0x43, 0x75, 0x4f, 0x55, 0x7f, + 0x1e, 0xfe, 0x2a, 0xc3, 0x5c, 0xa0, 0x17, 0x60, 0x3b, 0x09, 0x52, 0x4c, 0x85, 0x91, 0xca, 0xac, + 0x90, 0x0b, 0x0d, 0xd5, 0x4e, 0x4a, 0xa6, 0xf6, 0xe9, 0xcd, 0x4a, 0x56, 0xea, 0x20, 0x8d, 0xb3, + 0x43, 0xd8, 0xbf, 0x4f, 0xb8, 0x50, 0x36, 0x5e, 0x77, 0xfa, 0x21, 0x40, 0x12, 0x84, 0xd8, 0x17, + 0xec, 0x4b, 0x4c, 0x8d, 0x8a, 0x2d, 0x69, 0x79, 0x28, 0x0d, 0xe8, 0x16, 0xa8, 0x85, 0xcf, 0xc9, + 0x53, 0xad, 0x5f, 0xc3, 0x6b, 0x4a, 0xc3, 0x25, 0x79, 0x8a, 0x6d, 0x06, 0xa8, 0x18, 0x88, 0x27, + 0x8c, 0x72, 0x8c, 0x46, 0xb0, 0xad, 0x78, 0xc8, 0x4b, 0x6f, 0xae, 0x27, 0x6c, 0x80, 0xe8, 0x18, + 0xf6, 0x28, 0xfe, 0x46, 0xf8, 0x05, 0x26, 0xba, 0x26, 0x3b, 0xd2, 0x7c, 0x91, 0xb3, 0xb1, 0x8f, + 0x60, 0xef, 0x1e, 0x16, 0xa5, 0xac, 0x55, 0x94, 0xb7, 0x3d, 0x00, 0x74, 0x8e, 0x23, 0xbc, 0x90, + 0xdf, 0x2a, 0x24, 0x86, 0x03, 0xad, 0x44, 0xde, 0x9f, 0x35, 0xd9, 0x7a, 0x1b, 0x76, 0xf2, 0x5a, + 0xda, 0xa8, 0x6f, 0xf6, 0x1c, 0x6b, 0x13, 0x78, 0x4e, 0x26, 0xca, 0xd8, 0xff, 0x57, 0x4d, 0xbe, + 0x86, 0x83, 0x72, 0x28, 0xa3, 0xca, 0x3b, 0xd0, 0x34, 0x6c, 0x72, 0x5d, 0xd6, 0x52, 0x9f, 0x83, + 0xaf, 0xad, 0xcd, 0x09, 0xec, 0xdf, 0xc3, 0x62, 0x21, 0x8f, 0x55, 0x39, 0x1f, 0xc2, 0x81, 0x56, + 0xe7, 0x1a, 0x58, 0x07, 0xba, 0x97, 0x58, 0x9c, 0x97, 0x66, 0xe6, 0x1a, 0xfc, 0xe9, 0x5f, 0x2d, + 0xb8, 0xa1, 0x44, 0xbf, 0xd4, 0x4f, 0x0f, 0xfa, 0x01, 0xda, 0x85, 0x56, 0x43, 0xc7, 0x55, 0x77, + 0x5e, 0xee, 0xc5, 0xde, 0xea, 0x9a, 0xb5, 0xdf, 0xfc, 0xf1, 0xf7, 0x3f, 0x7f, 0xd9, 0x38, 0xb1, + 0x5f, 0x96, 0xef, 0xdc, 0x77, 0x5a, 0xb1, 0xf7, 0x92, 0x94, 0x7d, 0x81, 0x27, 0x82, 0xbb, 0xc3, + 0x67, 0xfa, 0xed, 0xe3, 0x63, 0xdd, 0x8c, 0xe8, 0x27, 0x0b, 0xe0, 0xaa, 0x49, 0xd0, 0x51, 0xd5, + 0xc1, 0x4b, 0xdd, 0xda, 0x3b, 0xae, 0x83, 0x69, 0x55, 0xed, 0x63, 0x45, 0xa6, 0x8f, 0x6a, 0xc8, + 0xa0, 0x14, 0x9a, 0x79, 0xe3, 0xa0, 0xdb, 0x55, 0x67, 0x2f, 0xb4, 0xd5, 0xba, 0x04, 0x94, 0x63, + 0xca, 0xb4, 0x17, 0x22, 0x9a, 0x80, 0xee, 0xf0, 0x19, 0xfa, 0x1e, 0xda, 0x85, 0x2e, 0xac, 0x4e, + 0xfd, 0x72, 0x9b, 0xf6, 0x0e, 0x73, 0x5c, 0xe1, 0x79, 0x77, 0x3e, 0xce, 0x9f, 0xf7, 0x3c, 0xfa, + 0xb0, 0x2e, 0xfa, 0xaf, 0x16, 0x74, 0x4a, 0xad, 0x8d, 0x06, 0xab, 0xb5, 0x2f, 0x57, 0x56, 0x1d, + 0x85, 0xb1, 0xa2, 0x70, 0x66, 0xbf, 0x56, 0x9d, 0xf4, 0x2b, 0x12, 0x6e, 0xde, 0x44, 0xe3, 0x7c, + 0x12, 0x48, 0x5a, 0x37, 0x8a, 0xfd, 0x89, 0x4e, 0x56, 0x29, 0xbd, 0x30, 0x2c, 0x7a, 0x83, 0x7a, + 0xa0, 0x29, 0x8a, 0x91, 0xe2, 0xf7, 0x3a, 0xba, 0x3e, 0x3f, 0x55, 0xa5, 0x57, 0xdd, 0x5b, 0x5d, + 0xa5, 0x4b, 0xdd, 0xdd, 0x5b, 0x37, 0x41, 0x16, 0x58, 0xac, 0x12, 0x6a, 0x4e, 0x41, 0x6a, 0xf6, + 0xb3, 0x05, 0x9d, 0xd2, 0x68, 0xa8, 0xd6, 0xac, 0x6a, 0x7a, 0xd4, 0x69, 0x66, 0xd8, 0x0c, 0xff, + 0x05, 0x9b, 0xdf, 0x2c, 0xd8, 0x5f, 0x1a, 0x3e, 0xe8, 0x8d, 0x2a, 0x46, 0xab, 0x66, 0xd4, 0xfa, + 0x0c, 0x7d, 0xa0, 0x38, 0xbd, 0x6b, 0x9f, 0x5d, 0x9b, 0xd3, 0x98, 0xcf, 0x03, 0x8d, 0xad, 0xe1, + 0xdd, 0x10, 0x7a, 0x13, 0x16, 0x2f, 0x85, 0x08, 0x12, 0xe2, 0xcc, 0x46, 0x77, 0xf7, 0x8b, 0x83, + 0xf0, 0x42, 0xfe, 0x2c, 0x5d, 0x58, 0x9f, 0x9e, 0x19, 0x70, 0xc8, 0xa2, 0x80, 0x86, 0x0e, 0x4b, + 0x43, 0x37, 0xc4, 0x54, 0xfd, 0x4a, 0xb9, 0x7a, 0x2b, 0x48, 0x08, 0x2f, 0xfe, 0xcb, 0xdf, 0x89, + 0xa3, 0xc7, 0xdb, 0x0a, 0xf0, 0xd6, 0x3f, 0x01, 0x00, 0x00, 0xff, 0xff, 0xcc, 0x07, 0x42, 0xf7, + 0xeb, 0x0b, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/cloud/ml/v1/operation_metadata.pb.go b/vendor/google.golang.org/genproto/googleapis/cloud/ml/v1/operation_metadata.pb.go new file mode 100644 index 0000000..50a910b --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/cloud/ml/v1/operation_metadata.pb.go @@ -0,0 +1,191 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/cloud/ml/v1/operation_metadata.proto + +package ml // import "google.golang.org/genproto/googleapis/cloud/ml/v1" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import timestamp "github.com/golang/protobuf/ptypes/timestamp" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// The operation type. +type OperationMetadata_OperationType int32 + +const ( + // Unspecified operation type. + OperationMetadata_OPERATION_TYPE_UNSPECIFIED OperationMetadata_OperationType = 0 + // An operation to create a new version. + OperationMetadata_CREATE_VERSION OperationMetadata_OperationType = 1 + // An operation to delete an existing version. + OperationMetadata_DELETE_VERSION OperationMetadata_OperationType = 2 + // An operation to delete an existing model. + OperationMetadata_DELETE_MODEL OperationMetadata_OperationType = 3 +) + +var OperationMetadata_OperationType_name = map[int32]string{ + 0: "OPERATION_TYPE_UNSPECIFIED", + 1: "CREATE_VERSION", + 2: "DELETE_VERSION", + 3: "DELETE_MODEL", +} +var OperationMetadata_OperationType_value = map[string]int32{ + "OPERATION_TYPE_UNSPECIFIED": 0, + "CREATE_VERSION": 1, + "DELETE_VERSION": 2, + "DELETE_MODEL": 3, +} + +func (x OperationMetadata_OperationType) String() string { + return proto.EnumName(OperationMetadata_OperationType_name, int32(x)) +} +func (OperationMetadata_OperationType) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_operation_metadata_c111b6986797fe5f, []int{0, 0} +} + +// Represents the metadata of the long-running operation. +type OperationMetadata struct { + // The time the operation was submitted. + CreateTime *timestamp.Timestamp `protobuf:"bytes,1,opt,name=create_time,json=createTime,proto3" json:"create_time,omitempty"` + // The time operation processing started. + StartTime *timestamp.Timestamp `protobuf:"bytes,2,opt,name=start_time,json=startTime,proto3" json:"start_time,omitempty"` + // The time operation processing completed. + EndTime *timestamp.Timestamp `protobuf:"bytes,3,opt,name=end_time,json=endTime,proto3" json:"end_time,omitempty"` + // Indicates whether a request to cancel this operation has been made. + IsCancellationRequested bool `protobuf:"varint,4,opt,name=is_cancellation_requested,json=isCancellationRequested,proto3" json:"is_cancellation_requested,omitempty"` + // The operation type. + OperationType OperationMetadata_OperationType `protobuf:"varint,5,opt,name=operation_type,json=operationType,proto3,enum=google.cloud.ml.v1.OperationMetadata_OperationType" json:"operation_type,omitempty"` + // Contains the name of the model associated with the operation. + ModelName string `protobuf:"bytes,6,opt,name=model_name,json=modelName,proto3" json:"model_name,omitempty"` + // Contains the version associated with the operation. + Version *Version `protobuf:"bytes,7,opt,name=version,proto3" json:"version,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *OperationMetadata) Reset() { *m = OperationMetadata{} } +func (m *OperationMetadata) String() string { return proto.CompactTextString(m) } +func (*OperationMetadata) ProtoMessage() {} +func (*OperationMetadata) Descriptor() ([]byte, []int) { + return fileDescriptor_operation_metadata_c111b6986797fe5f, []int{0} +} +func (m *OperationMetadata) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_OperationMetadata.Unmarshal(m, b) +} +func (m *OperationMetadata) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_OperationMetadata.Marshal(b, m, deterministic) +} +func (dst *OperationMetadata) XXX_Merge(src proto.Message) { + xxx_messageInfo_OperationMetadata.Merge(dst, src) +} +func (m *OperationMetadata) XXX_Size() int { + return xxx_messageInfo_OperationMetadata.Size(m) +} +func (m *OperationMetadata) XXX_DiscardUnknown() { + xxx_messageInfo_OperationMetadata.DiscardUnknown(m) +} + +var xxx_messageInfo_OperationMetadata proto.InternalMessageInfo + +func (m *OperationMetadata) GetCreateTime() *timestamp.Timestamp { + if m != nil { + return m.CreateTime + } + return nil +} + +func (m *OperationMetadata) GetStartTime() *timestamp.Timestamp { + if m != nil { + return m.StartTime + } + return nil +} + +func (m *OperationMetadata) GetEndTime() *timestamp.Timestamp { + if m != nil { + return m.EndTime + } + return nil +} + +func (m *OperationMetadata) GetIsCancellationRequested() bool { + if m != nil { + return m.IsCancellationRequested + } + return false +} + +func (m *OperationMetadata) GetOperationType() OperationMetadata_OperationType { + if m != nil { + return m.OperationType + } + return OperationMetadata_OPERATION_TYPE_UNSPECIFIED +} + +func (m *OperationMetadata) GetModelName() string { + if m != nil { + return m.ModelName + } + return "" +} + +func (m *OperationMetadata) GetVersion() *Version { + if m != nil { + return m.Version + } + return nil +} + +func init() { + proto.RegisterType((*OperationMetadata)(nil), "google.cloud.ml.v1.OperationMetadata") + proto.RegisterEnum("google.cloud.ml.v1.OperationMetadata_OperationType", OperationMetadata_OperationType_name, OperationMetadata_OperationType_value) +} + +func init() { + proto.RegisterFile("google/cloud/ml/v1/operation_metadata.proto", fileDescriptor_operation_metadata_c111b6986797fe5f) +} + +var fileDescriptor_operation_metadata_c111b6986797fe5f = []byte{ + // 454 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x84, 0x92, 0x5f, 0x6b, 0xdb, 0x30, + 0x14, 0xc5, 0xe7, 0xb6, 0x6b, 0x1a, 0x75, 0x0d, 0x99, 0x1e, 0xb6, 0xcc, 0xfb, 0x17, 0xfa, 0x30, + 0x02, 0x03, 0x99, 0xb4, 0xdb, 0xc3, 0xd6, 0xa7, 0x36, 0xd1, 0x20, 0xd0, 0xc6, 0xc6, 0xf5, 0x0a, + 0xeb, 0x8b, 0x51, 0xed, 0x3b, 0x23, 0x90, 0x25, 0xcf, 0x52, 0x0c, 0xfd, 0x2c, 0xfb, 0xb2, 0x23, + 0x92, 0x4d, 0x33, 0x52, 0xe8, 0xa3, 0xce, 0xfd, 0x9d, 0xab, 0xab, 0x7b, 0x84, 0x3e, 0x17, 0x4a, + 0x15, 0x02, 0x82, 0x4c, 0xa8, 0x55, 0x1e, 0x94, 0x22, 0x68, 0xa6, 0x81, 0xaa, 0xa0, 0x66, 0x86, + 0x2b, 0x99, 0x96, 0x60, 0x58, 0xce, 0x0c, 0x23, 0x55, 0xad, 0x8c, 0xc2, 0xd8, 0xc1, 0xc4, 0xc2, + 0xa4, 0x14, 0xa4, 0x99, 0xfa, 0xef, 0xda, 0x06, 0xac, 0xe2, 0x01, 0x93, 0x52, 0x19, 0xeb, 0xd4, + 0xce, 0xe1, 0x7f, 0x7a, 0xa4, 0x7d, 0xa9, 0x72, 0x10, 0xa9, 0x86, 0xba, 0xe1, 0x19, 0xb4, 0xdc, + 0xc7, 0x96, 0xb3, 0xa7, 0xbb, 0xd5, 0xef, 0xc0, 0xf0, 0x12, 0xb4, 0x61, 0x65, 0xe5, 0x80, 0xe3, + 0xbf, 0x7b, 0xe8, 0x65, 0xd8, 0xcd, 0x75, 0xd5, 0x8e, 0x85, 0xcf, 0xd0, 0x61, 0x56, 0x03, 0x33, + 0x90, 0xae, 0xf9, 0x91, 0x37, 0xf6, 0x26, 0x87, 0x27, 0x3e, 0x69, 0xc7, 0xec, 0x9a, 0x91, 0xa4, + 0x6b, 0x16, 0x23, 0x87, 0xaf, 0x05, 0xfc, 0x0d, 0x21, 0x6d, 0x58, 0x6d, 0x9c, 0x77, 0xe7, 0x49, + 0x6f, 0xdf, 0xd2, 0xd6, 0xfa, 0x15, 0x1d, 0x80, 0xcc, 0x9d, 0x71, 0xf7, 0x49, 0x63, 0x0f, 0x64, + 0x6e, 0x6d, 0xdf, 0xd1, 0x1b, 0xae, 0xd3, 0x8c, 0xc9, 0x0c, 0x84, 0x70, 0x1b, 0xae, 0xe1, 0xcf, + 0x0a, 0xb4, 0x81, 0x7c, 0xb4, 0x37, 0xf6, 0x26, 0x07, 0xf1, 0x6b, 0xae, 0x67, 0x1b, 0xf5, 0xb8, + 0x2b, 0xe3, 0x5b, 0x34, 0x78, 0xc8, 0xc5, 0xdc, 0x57, 0x30, 0x7a, 0x3e, 0xf6, 0x26, 0x83, 0x93, + 0x53, 0xb2, 0x1d, 0x0a, 0xd9, 0xda, 0xd4, 0x83, 0x92, 0xdc, 0x57, 0x10, 0x1f, 0xa9, 0xcd, 0x23, + 0x7e, 0x8f, 0x90, 0x0b, 0x45, 0xb2, 0x12, 0x46, 0xfb, 0x63, 0x6f, 0xd2, 0x8f, 0xfb, 0x56, 0x59, + 0x32, 0xfb, 0xda, 0x5e, 0x03, 0xb5, 0xe6, 0x4a, 0x8e, 0x7a, 0xf6, 0xb1, 0x6f, 0x1f, 0xbb, 0xf3, + 0xc6, 0x21, 0x71, 0xc7, 0x1e, 0x73, 0x74, 0xf4, 0xdf, 0xad, 0xf8, 0x03, 0xf2, 0xc3, 0x88, 0xc6, + 0xe7, 0xc9, 0x22, 0x5c, 0xa6, 0xc9, 0xaf, 0x88, 0xa6, 0x3f, 0x97, 0xd7, 0x11, 0x9d, 0x2d, 0x7e, + 0x2c, 0xe8, 0x7c, 0xf8, 0x0c, 0x63, 0x34, 0x98, 0xc5, 0xf4, 0x3c, 0xa1, 0xe9, 0x0d, 0x8d, 0xaf, + 0x17, 0xe1, 0x72, 0xe8, 0xad, 0xb5, 0x39, 0xbd, 0xa4, 0x1b, 0xda, 0x0e, 0x1e, 0xa2, 0x17, 0xad, + 0x76, 0x15, 0xce, 0xe9, 0xe5, 0x70, 0xf7, 0x42, 0x20, 0x3f, 0x53, 0xe5, 0xd6, 0x54, 0xac, 0xe2, + 0xa4, 0x99, 0x5e, 0xbc, 0xda, 0x5a, 0x47, 0xb4, 0x0e, 0x29, 0xf2, 0x6e, 0xbf, 0xb4, 0x8e, 0x42, + 0x09, 0x26, 0x0b, 0xa2, 0xea, 0x22, 0x28, 0x40, 0xda, 0x08, 0x03, 0x57, 0x62, 0x15, 0xd7, 0x9b, + 0xbf, 0xf7, 0xac, 0x14, 0x77, 0xfb, 0x16, 0x38, 0xfd, 0x17, 0x00, 0x00, 0xff, 0xff, 0x03, 0xf9, + 0xcc, 0xf1, 0x3c, 0x03, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/cloud/ml/v1/prediction_service.pb.go b/vendor/google.golang.org/genproto/googleapis/cloud/ml/v1/prediction_service.pb.go new file mode 100644 index 0000000..94e40b7 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/cloud/ml/v1/prediction_service.pb.go @@ -0,0 +1,373 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/cloud/ml/v1/prediction_service.proto + +package ml // import "google.golang.org/genproto/googleapis/cloud/ml/v1" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "google.golang.org/genproto/googleapis/api/annotations" +import httpbody "google.golang.org/genproto/googleapis/api/httpbody" + +import ( + context "golang.org/x/net/context" + grpc "google.golang.org/grpc" +) + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Request for predictions to be issued against a trained model. +// +// The body of the request is a single JSON object with a single top-level +// field: +// +//
+//
instances
+//
A JSON array containing values representing the instances to use for +// prediction.
+//
+// +// The structure of each element of the instances list is determined by your +// model's input definition. Instances can include named inputs or can contain +// only unlabeled values. +// +// Not all data includes named inputs. Some instances will be simple +// JSON values (boolean, number, or string). However, instances are often lists +// of simple values, or complex nested lists. Here are some examples of request +// bodies: +// +// CSV data with each row encoded as a string value: +//
+// {"instances": ["1.0,true,\\"x\\"", "-2.0,false,\\"y\\""]}
+// 
+// Plain text: +//
+// {"instances": ["the quick brown fox", "la bruja le dio"]}
+// 
+// Sentences encoded as lists of words (vectors of strings): +//
+// {
+//   "instances": [
+//     ["the","quick","brown"],
+//     ["la","bruja","le"],
+//     ...
+//   ]
+// }
+// 
+// Floating point scalar values: +//
+// {"instances": [0.0, 1.1, 2.2]}
+// 
+// Vectors of integers: +//
+// {
+//   "instances": [
+//     [0, 1, 2],
+//     [3, 4, 5],
+//     ...
+//   ]
+// }
+// 
+// Tensors (in this case, two-dimensional tensors): +//
+// {
+//   "instances": [
+//     [
+//       [0, 1, 2],
+//       [3, 4, 5]
+//     ],
+//     ...
+//   ]
+// }
+// 
+// Images can be represented different ways. In this encoding scheme the first +// two dimensions represent the rows and columns of the image, and the third +// contains lists (vectors) of the R, G, and B values for each pixel. +//
+// {
+//   "instances": [
+//     [
+//       [
+//         [138, 30, 66],
+//         [130, 20, 56],
+//         ...
+//       ],
+//       [
+//         [126, 38, 61],
+//         [122, 24, 57],
+//         ...
+//       ],
+//       ...
+//     ],
+//     ...
+//   ]
+// }
+// 
+// JSON strings must be encoded as UTF-8. To send binary data, you must +// base64-encode the data and mark it as binary. To mark a JSON string +// as binary, replace it with a JSON object with a single attribute named `b64`: +//
{"b64": "..."} 
+// For example: +// +// Two Serialized tf.Examples (fake data, for illustrative purposes only): +//
+// {"instances": [{"b64": "X5ad6u"}, {"b64": "IA9j4nx"}]}
+// 
+// Two JPEG image byte strings (fake data, for illustrative purposes only): +//
+// {"instances": [{"b64": "ASa8asdf"}, {"b64": "JLK7ljk3"}]}
+// 
+// If your data includes named references, format each instance as a JSON object +// with the named references as the keys: +// +// JSON input data to be preprocessed: +//
+// {
+//   "instances": [
+//     {
+//       "a": 1.0,
+//       "b": true,
+//       "c": "x"
+//     },
+//     {
+//       "a": -2.0,
+//       "b": false,
+//       "c": "y"
+//     }
+//   ]
+// }
+// 
+// Some models have an underlying TensorFlow graph that accepts multiple input +// tensors. In this case, you should use the names of JSON name/value pairs to +// identify the input tensors, as shown in the following exmaples: +// +// For a graph with input tensor aliases "tag" (string) and "image" +// (base64-encoded string): +//
+// {
+//   "instances": [
+//     {
+//       "tag": "beach",
+//       "image": {"b64": "ASa8asdf"}
+//     },
+//     {
+//       "tag": "car",
+//       "image": {"b64": "JLK7ljk3"}
+//     }
+//   ]
+// }
+// 
+// For a graph with input tensor aliases "tag" (string) and "image" +// (3-dimensional array of 8-bit ints): +//
+// {
+//   "instances": [
+//     {
+//       "tag": "beach",
+//       "image": [
+//         [
+//           [138, 30, 66],
+//           [130, 20, 56],
+//           ...
+//         ],
+//         [
+//           [126, 38, 61],
+//           [122, 24, 57],
+//           ...
+//         ],
+//         ...
+//       ]
+//     },
+//     {
+//       "tag": "car",
+//       "image": [
+//         [
+//           [255, 0, 102],
+//           [255, 0, 97],
+//           ...
+//         ],
+//         [
+//           [254, 1, 101],
+//           [254, 2, 93],
+//           ...
+//         ],
+//         ...
+//       ]
+//     },
+//     ...
+//   ]
+// }
+// 
+// If the call is successful, the response body will contain one prediction +// entry per instance in the request body. If prediction fails for any +// instance, the response body will contain no predictions and will contian +// a single error entry instead. +type PredictRequest struct { + // Required. The resource name of a model or a version. + // + // Authorization: requires `Viewer` role on the parent project. + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // + // Required. The prediction request body. + HttpBody *httpbody.HttpBody `protobuf:"bytes,2,opt,name=http_body,json=httpBody,proto3" json:"http_body,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *PredictRequest) Reset() { *m = PredictRequest{} } +func (m *PredictRequest) String() string { return proto.CompactTextString(m) } +func (*PredictRequest) ProtoMessage() {} +func (*PredictRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_prediction_service_be09b6bf3a730006, []int{0} +} +func (m *PredictRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_PredictRequest.Unmarshal(m, b) +} +func (m *PredictRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_PredictRequest.Marshal(b, m, deterministic) +} +func (dst *PredictRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_PredictRequest.Merge(dst, src) +} +func (m *PredictRequest) XXX_Size() int { + return xxx_messageInfo_PredictRequest.Size(m) +} +func (m *PredictRequest) XXX_DiscardUnknown() { + xxx_messageInfo_PredictRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_PredictRequest proto.InternalMessageInfo + +func (m *PredictRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *PredictRequest) GetHttpBody() *httpbody.HttpBody { + if m != nil { + return m.HttpBody + } + return nil +} + +func init() { + proto.RegisterType((*PredictRequest)(nil), "google.cloud.ml.v1.PredictRequest") +} + +// Reference imports to suppress errors if they are not otherwise used. +var _ context.Context +var _ grpc.ClientConn + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the grpc package it is being compiled against. +const _ = grpc.SupportPackageIsVersion4 + +// OnlinePredictionServiceClient is the client API for OnlinePredictionService service. +// +// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://godoc.org/google.golang.org/grpc#ClientConn.NewStream. +type OnlinePredictionServiceClient interface { + // Performs prediction on the data in the request. + // + // **** REMOVE FROM GENERATED DOCUMENTATION + Predict(ctx context.Context, in *PredictRequest, opts ...grpc.CallOption) (*httpbody.HttpBody, error) +} + +type onlinePredictionServiceClient struct { + cc *grpc.ClientConn +} + +func NewOnlinePredictionServiceClient(cc *grpc.ClientConn) OnlinePredictionServiceClient { + return &onlinePredictionServiceClient{cc} +} + +func (c *onlinePredictionServiceClient) Predict(ctx context.Context, in *PredictRequest, opts ...grpc.CallOption) (*httpbody.HttpBody, error) { + out := new(httpbody.HttpBody) + err := c.cc.Invoke(ctx, "/google.cloud.ml.v1.OnlinePredictionService/Predict", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +// OnlinePredictionServiceServer is the server API for OnlinePredictionService service. +type OnlinePredictionServiceServer interface { + // Performs prediction on the data in the request. + // + // **** REMOVE FROM GENERATED DOCUMENTATION + Predict(context.Context, *PredictRequest) (*httpbody.HttpBody, error) +} + +func RegisterOnlinePredictionServiceServer(s *grpc.Server, srv OnlinePredictionServiceServer) { + s.RegisterService(&_OnlinePredictionService_serviceDesc, srv) +} + +func _OnlinePredictionService_Predict_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(PredictRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(OnlinePredictionServiceServer).Predict(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.ml.v1.OnlinePredictionService/Predict", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(OnlinePredictionServiceServer).Predict(ctx, req.(*PredictRequest)) + } + return interceptor(ctx, in, info, handler) +} + +var _OnlinePredictionService_serviceDesc = grpc.ServiceDesc{ + ServiceName: "google.cloud.ml.v1.OnlinePredictionService", + HandlerType: (*OnlinePredictionServiceServer)(nil), + Methods: []grpc.MethodDesc{ + { + MethodName: "Predict", + Handler: _OnlinePredictionService_Predict_Handler, + }, + }, + Streams: []grpc.StreamDesc{}, + Metadata: "google/cloud/ml/v1/prediction_service.proto", +} + +func init() { + proto.RegisterFile("google/cloud/ml/v1/prediction_service.proto", fileDescriptor_prediction_service_be09b6bf3a730006) +} + +var fileDescriptor_prediction_service_be09b6bf3a730006 = []byte{ + // 308 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x6c, 0x51, 0x4f, 0x4b, 0xfb, 0x30, + 0x18, 0xa6, 0xe3, 0xc7, 0x4f, 0x17, 0xc1, 0x43, 0x10, 0x9d, 0x45, 0x64, 0xd4, 0xcb, 0x9c, 0x90, + 0xd0, 0xe9, 0x69, 0xe2, 0x65, 0x27, 0x6f, 0x96, 0x79, 0x10, 0xbc, 0x8c, 0xac, 0x0d, 0x59, 0x24, + 0xcd, 0x1b, 0xdb, 0xac, 0x30, 0xc4, 0x8b, 0x37, 0xcf, 0x7e, 0x34, 0xbf, 0x82, 0x1f, 0x44, 0xd2, + 0x04, 0x99, 0xd4, 0xdb, 0x4b, 0xde, 0xe7, 0x79, 0x9f, 0x3f, 0x41, 0x17, 0x02, 0x40, 0x28, 0x4e, + 0x73, 0x05, 0xeb, 0x82, 0x96, 0x8a, 0x36, 0x29, 0x35, 0x15, 0x2f, 0x64, 0x6e, 0x25, 0xe8, 0x45, + 0xcd, 0xab, 0x46, 0xe6, 0x9c, 0x98, 0x0a, 0x2c, 0x60, 0xec, 0xc1, 0xa4, 0x05, 0x93, 0x52, 0x91, + 0x26, 0x8d, 0x4f, 0xc2, 0x01, 0x66, 0x24, 0x65, 0x5a, 0x83, 0x65, 0x8e, 0x58, 0x7b, 0x46, 0x7c, + 0xbc, 0xb5, 0x5d, 0x59, 0x6b, 0x96, 0x50, 0x6c, 0xfc, 0x2a, 0x79, 0x40, 0xfb, 0x99, 0x17, 0x9a, + 0xf3, 0xe7, 0x35, 0xaf, 0x2d, 0xc6, 0xe8, 0x9f, 0x66, 0x25, 0x1f, 0x44, 0xc3, 0x68, 0xd4, 0x9f, + 0xb7, 0x33, 0x4e, 0x51, 0xdf, 0xf1, 0x16, 0x8e, 0x38, 0xe8, 0x0d, 0xa3, 0xd1, 0xde, 0xe4, 0x80, + 0x04, 0x1b, 0xcc, 0x48, 0x72, 0x6b, 0xad, 0x99, 0x41, 0xb1, 0x99, 0xef, 0xae, 0xc2, 0x34, 0x79, + 0x8f, 0xd0, 0xd1, 0x9d, 0x56, 0x52, 0xf3, 0xec, 0x27, 0xc8, 0xbd, 0xcf, 0x81, 0x35, 0xda, 0x09, + 0x8f, 0x38, 0x21, 0xdd, 0x34, 0xe4, 0xb7, 0xa3, 0xf8, 0x4f, 0xa9, 0xe4, 0xfc, 0xed, 0xf3, 0xeb, + 0xa3, 0x77, 0x96, 0x9c, 0xba, 0xb2, 0x5e, 0x9c, 0xcd, 0x1b, 0x53, 0xc1, 0x13, 0xcf, 0x6d, 0x4d, + 0xc7, 0xe3, 0xd7, 0x69, 0xe8, 0x6f, 0x1a, 0x8d, 0x67, 0x0a, 0xc5, 0x39, 0x94, 0x1d, 0x25, 0x77, + 0xae, 0x49, 0x67, 0x87, 0x1d, 0x83, 0x99, 0xab, 0x26, 0x8b, 0x1e, 0xaf, 0x02, 0x43, 0x80, 0x62, + 0x5a, 0x10, 0xa8, 0x04, 0x15, 0x5c, 0xb7, 0xc5, 0x51, 0xbf, 0x62, 0x46, 0xd6, 0xdb, 0xbf, 0x76, + 0x5d, 0xaa, 0xe5, 0xff, 0x16, 0x70, 0xf9, 0x1d, 0x00, 0x00, 0xff, 0xff, 0x81, 0x8e, 0x25, 0xca, + 0xd5, 0x01, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/cloud/ml/v1/project_service.pb.go b/vendor/google.golang.org/genproto/googleapis/cloud/ml/v1/project_service.pb.go new file mode 100644 index 0000000..f517fa4 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/cloud/ml/v1/project_service.pb.go @@ -0,0 +1,229 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/cloud/ml/v1/project_service.proto + +package ml // import "google.golang.org/genproto/googleapis/cloud/ml/v1" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +import ( + context "golang.org/x/net/context" + grpc "google.golang.org/grpc" +) + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Requests service account information associated with a project. +type GetConfigRequest struct { + // Required. The project name. + // + // Authorization: requires `Viewer` role on the specified project. + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GetConfigRequest) Reset() { *m = GetConfigRequest{} } +func (m *GetConfigRequest) String() string { return proto.CompactTextString(m) } +func (*GetConfigRequest) ProtoMessage() {} +func (*GetConfigRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_project_service_81d7c159e503bebf, []int{0} +} +func (m *GetConfigRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GetConfigRequest.Unmarshal(m, b) +} +func (m *GetConfigRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GetConfigRequest.Marshal(b, m, deterministic) +} +func (dst *GetConfigRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_GetConfigRequest.Merge(dst, src) +} +func (m *GetConfigRequest) XXX_Size() int { + return xxx_messageInfo_GetConfigRequest.Size(m) +} +func (m *GetConfigRequest) XXX_DiscardUnknown() { + xxx_messageInfo_GetConfigRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_GetConfigRequest proto.InternalMessageInfo + +func (m *GetConfigRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +// Returns service account information associated with a project. +type GetConfigResponse struct { + // The service account Cloud ML uses to access resources in the project. + ServiceAccount string `protobuf:"bytes,1,opt,name=service_account,json=serviceAccount,proto3" json:"service_account,omitempty"` + // The project number for `service_account`. + ServiceAccountProject int64 `protobuf:"varint,2,opt,name=service_account_project,json=serviceAccountProject,proto3" json:"service_account_project,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GetConfigResponse) Reset() { *m = GetConfigResponse{} } +func (m *GetConfigResponse) String() string { return proto.CompactTextString(m) } +func (*GetConfigResponse) ProtoMessage() {} +func (*GetConfigResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_project_service_81d7c159e503bebf, []int{1} +} +func (m *GetConfigResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GetConfigResponse.Unmarshal(m, b) +} +func (m *GetConfigResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GetConfigResponse.Marshal(b, m, deterministic) +} +func (dst *GetConfigResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_GetConfigResponse.Merge(dst, src) +} +func (m *GetConfigResponse) XXX_Size() int { + return xxx_messageInfo_GetConfigResponse.Size(m) +} +func (m *GetConfigResponse) XXX_DiscardUnknown() { + xxx_messageInfo_GetConfigResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_GetConfigResponse proto.InternalMessageInfo + +func (m *GetConfigResponse) GetServiceAccount() string { + if m != nil { + return m.ServiceAccount + } + return "" +} + +func (m *GetConfigResponse) GetServiceAccountProject() int64 { + if m != nil { + return m.ServiceAccountProject + } + return 0 +} + +func init() { + proto.RegisterType((*GetConfigRequest)(nil), "google.cloud.ml.v1.GetConfigRequest") + proto.RegisterType((*GetConfigResponse)(nil), "google.cloud.ml.v1.GetConfigResponse") +} + +// Reference imports to suppress errors if they are not otherwise used. +var _ context.Context +var _ grpc.ClientConn + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the grpc package it is being compiled against. +const _ = grpc.SupportPackageIsVersion4 + +// ProjectManagementServiceClient is the client API for ProjectManagementService service. +// +// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://godoc.org/google.golang.org/grpc#ClientConn.NewStream. +type ProjectManagementServiceClient interface { + // Get the service account information associated with your project. You need + // this information in order to grant the service account persmissions for + // the Google Cloud Storage location where you put your model training code + // for training the model with Google Cloud Machine Learning. + GetConfig(ctx context.Context, in *GetConfigRequest, opts ...grpc.CallOption) (*GetConfigResponse, error) +} + +type projectManagementServiceClient struct { + cc *grpc.ClientConn +} + +func NewProjectManagementServiceClient(cc *grpc.ClientConn) ProjectManagementServiceClient { + return &projectManagementServiceClient{cc} +} + +func (c *projectManagementServiceClient) GetConfig(ctx context.Context, in *GetConfigRequest, opts ...grpc.CallOption) (*GetConfigResponse, error) { + out := new(GetConfigResponse) + err := c.cc.Invoke(ctx, "/google.cloud.ml.v1.ProjectManagementService/GetConfig", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +// ProjectManagementServiceServer is the server API for ProjectManagementService service. +type ProjectManagementServiceServer interface { + // Get the service account information associated with your project. You need + // this information in order to grant the service account persmissions for + // the Google Cloud Storage location where you put your model training code + // for training the model with Google Cloud Machine Learning. + GetConfig(context.Context, *GetConfigRequest) (*GetConfigResponse, error) +} + +func RegisterProjectManagementServiceServer(s *grpc.Server, srv ProjectManagementServiceServer) { + s.RegisterService(&_ProjectManagementService_serviceDesc, srv) +} + +func _ProjectManagementService_GetConfig_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(GetConfigRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(ProjectManagementServiceServer).GetConfig(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.ml.v1.ProjectManagementService/GetConfig", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(ProjectManagementServiceServer).GetConfig(ctx, req.(*GetConfigRequest)) + } + return interceptor(ctx, in, info, handler) +} + +var _ProjectManagementService_serviceDesc = grpc.ServiceDesc{ + ServiceName: "google.cloud.ml.v1.ProjectManagementService", + HandlerType: (*ProjectManagementServiceServer)(nil), + Methods: []grpc.MethodDesc{ + { + MethodName: "GetConfig", + Handler: _ProjectManagementService_GetConfig_Handler, + }, + }, + Streams: []grpc.StreamDesc{}, + Metadata: "google/cloud/ml/v1/project_service.proto", +} + +func init() { + proto.RegisterFile("google/cloud/ml/v1/project_service.proto", fileDescriptor_project_service_81d7c159e503bebf) +} + +var fileDescriptor_project_service_81d7c159e503bebf = []byte{ + // 319 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x84, 0x91, 0xbf, 0x4a, 0x43, 0x31, + 0x14, 0xc6, 0xb9, 0x55, 0x84, 0x66, 0xf0, 0x4f, 0x44, 0x2c, 0x45, 0xb0, 0x16, 0xb5, 0xc5, 0x21, + 0xa1, 0x2a, 0x0e, 0x8a, 0x83, 0x75, 0x70, 0x12, 0x4a, 0xdd, 0x5c, 0x4a, 0xbc, 0x1e, 0x43, 0x24, + 0xc9, 0x89, 0x37, 0xe9, 0x5d, 0xc4, 0x41, 0x5f, 0xc1, 0xdd, 0x97, 0xf2, 0x15, 0x7c, 0x10, 0xe9, + 0x4d, 0x94, 0xda, 0x0e, 0x6e, 0x87, 0x73, 0x7e, 0x5f, 0xf2, 0x7d, 0xe7, 0x90, 0xae, 0x44, 0x94, + 0x1a, 0x78, 0xae, 0x71, 0x7c, 0xcf, 0x8d, 0xe6, 0x65, 0x8f, 0xbb, 0x02, 0x1f, 0x21, 0x0f, 0x23, + 0x0f, 0x45, 0xa9, 0x72, 0x60, 0xae, 0xc0, 0x80, 0x94, 0x46, 0x92, 0x55, 0x24, 0x33, 0x9a, 0x95, + 0xbd, 0xe6, 0x56, 0x52, 0x0b, 0xa7, 0xb8, 0xb0, 0x16, 0x83, 0x08, 0x0a, 0xad, 0x8f, 0x8a, 0xf6, + 0x3e, 0x59, 0xbd, 0x82, 0x70, 0x89, 0xf6, 0x41, 0xc9, 0x21, 0x3c, 0x8d, 0xc1, 0x07, 0x4a, 0xc9, + 0xa2, 0x15, 0x06, 0x1a, 0x59, 0x2b, 0xeb, 0xd6, 0x87, 0x55, 0xdd, 0x0e, 0x64, 0x6d, 0x8a, 0xf3, + 0x0e, 0xad, 0x07, 0xda, 0x21, 0x2b, 0xe9, 0xff, 0x91, 0xc8, 0x73, 0x1c, 0xdb, 0x90, 0x34, 0xcb, + 0xa9, 0x7d, 0x11, 0xbb, 0xf4, 0x84, 0x6c, 0xce, 0x80, 0xa3, 0x14, 0xa0, 0x51, 0x6b, 0x65, 0xdd, + 0x85, 0xe1, 0xc6, 0x5f, 0xc1, 0x20, 0x0e, 0x0f, 0x3f, 0x32, 0xd2, 0x48, 0xf5, 0xb5, 0xb0, 0x42, + 0x82, 0x01, 0x1b, 0x6e, 0x22, 0x4a, 0x5f, 0x33, 0x52, 0xff, 0xf5, 0x44, 0x77, 0xd9, 0x7c, 0x76, + 0x36, 0x1b, 0xad, 0xb9, 0xf7, 0x0f, 0x15, 0x83, 0xb5, 0x3b, 0x6f, 0x9f, 0x5f, 0xef, 0xb5, 0x1d, + 0xba, 0x3d, 0x59, 0xf5, 0xf3, 0x64, 0x01, 0xe7, 0xc9, 0xaf, 0xe7, 0x07, 0x2f, 0xa7, 0xf2, 0x47, + 0xd0, 0x57, 0xa4, 0x99, 0xa3, 0x99, 0x7b, 0x54, 0x38, 0xc5, 0xca, 0x5e, 0x7f, 0x3d, 0x79, 0x4f, + 0x8e, 0x07, 0x93, 0x8d, 0x0f, 0xb2, 0xdb, 0xe3, 0x84, 0x4b, 0xd4, 0xc2, 0x4a, 0x86, 0x85, 0xe4, + 0x12, 0x6c, 0x75, 0x0f, 0x1e, 0x47, 0xc2, 0x29, 0x3f, 0x7d, 0xee, 0x33, 0xa3, 0xef, 0x96, 0x2a, + 0xe0, 0xe8, 0x3b, 0x00, 0x00, 0xff, 0xff, 0xd0, 0xa5, 0x43, 0x33, 0x0e, 0x02, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/cloud/oslogin/common/common.pb.go b/vendor/google.golang.org/genproto/googleapis/cloud/oslogin/common/common.pb.go new file mode 100644 index 0000000..fad4444 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/cloud/oslogin/common/common.pb.go @@ -0,0 +1,232 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/cloud/oslogin/common/common.proto + +package common // import "google.golang.org/genproto/googleapis/cloud/oslogin/common" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// The POSIX account information associated with a Google account. +type PosixAccount struct { + // Only one POSIX account can be marked as primary. + Primary bool `protobuf:"varint,1,opt,name=primary,proto3" json:"primary,omitempty"` + // The username of the POSIX account. + Username string `protobuf:"bytes,2,opt,name=username,proto3" json:"username,omitempty"` + // The user ID. + Uid int64 `protobuf:"varint,3,opt,name=uid,proto3" json:"uid,omitempty"` + // The default group ID. + Gid int64 `protobuf:"varint,4,opt,name=gid,proto3" json:"gid,omitempty"` + // The path to the home directory for this account. + HomeDirectory string `protobuf:"bytes,5,opt,name=home_directory,json=homeDirectory,proto3" json:"home_directory,omitempty"` + // The path to the logic shell for this account. + Shell string `protobuf:"bytes,6,opt,name=shell,proto3" json:"shell,omitempty"` + // The GECOS (user information) entry for this account. + Gecos string `protobuf:"bytes,7,opt,name=gecos,proto3" json:"gecos,omitempty"` + // System identifier for which account the username or uid applies to. + // By default, the empty value is used. + SystemId string `protobuf:"bytes,8,opt,name=system_id,json=systemId,proto3" json:"system_id,omitempty"` + // Output only. A POSIX account identifier. + AccountId string `protobuf:"bytes,9,opt,name=account_id,json=accountId,proto3" json:"account_id,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *PosixAccount) Reset() { *m = PosixAccount{} } +func (m *PosixAccount) String() string { return proto.CompactTextString(m) } +func (*PosixAccount) ProtoMessage() {} +func (*PosixAccount) Descriptor() ([]byte, []int) { + return fileDescriptor_common_085b2e433f956f98, []int{0} +} +func (m *PosixAccount) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_PosixAccount.Unmarshal(m, b) +} +func (m *PosixAccount) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_PosixAccount.Marshal(b, m, deterministic) +} +func (dst *PosixAccount) XXX_Merge(src proto.Message) { + xxx_messageInfo_PosixAccount.Merge(dst, src) +} +func (m *PosixAccount) XXX_Size() int { + return xxx_messageInfo_PosixAccount.Size(m) +} +func (m *PosixAccount) XXX_DiscardUnknown() { + xxx_messageInfo_PosixAccount.DiscardUnknown(m) +} + +var xxx_messageInfo_PosixAccount proto.InternalMessageInfo + +func (m *PosixAccount) GetPrimary() bool { + if m != nil { + return m.Primary + } + return false +} + +func (m *PosixAccount) GetUsername() string { + if m != nil { + return m.Username + } + return "" +} + +func (m *PosixAccount) GetUid() int64 { + if m != nil { + return m.Uid + } + return 0 +} + +func (m *PosixAccount) GetGid() int64 { + if m != nil { + return m.Gid + } + return 0 +} + +func (m *PosixAccount) GetHomeDirectory() string { + if m != nil { + return m.HomeDirectory + } + return "" +} + +func (m *PosixAccount) GetShell() string { + if m != nil { + return m.Shell + } + return "" +} + +func (m *PosixAccount) GetGecos() string { + if m != nil { + return m.Gecos + } + return "" +} + +func (m *PosixAccount) GetSystemId() string { + if m != nil { + return m.SystemId + } + return "" +} + +func (m *PosixAccount) GetAccountId() string { + if m != nil { + return m.AccountId + } + return "" +} + +// The SSH public key information associated with a Google account. +type SshPublicKey struct { + // Public key text in SSH format, defined by + // RFC4253 + // section 6.6. + Key string `protobuf:"bytes,1,opt,name=key,proto3" json:"key,omitempty"` + // An expiration time in microseconds since epoch. + ExpirationTimeUsec int64 `protobuf:"varint,2,opt,name=expiration_time_usec,json=expirationTimeUsec,proto3" json:"expiration_time_usec,omitempty"` + // Output only. The SHA-256 fingerprint of the SSH public key. + Fingerprint string `protobuf:"bytes,3,opt,name=fingerprint,proto3" json:"fingerprint,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *SshPublicKey) Reset() { *m = SshPublicKey{} } +func (m *SshPublicKey) String() string { return proto.CompactTextString(m) } +func (*SshPublicKey) ProtoMessage() {} +func (*SshPublicKey) Descriptor() ([]byte, []int) { + return fileDescriptor_common_085b2e433f956f98, []int{1} +} +func (m *SshPublicKey) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_SshPublicKey.Unmarshal(m, b) +} +func (m *SshPublicKey) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_SshPublicKey.Marshal(b, m, deterministic) +} +func (dst *SshPublicKey) XXX_Merge(src proto.Message) { + xxx_messageInfo_SshPublicKey.Merge(dst, src) +} +func (m *SshPublicKey) XXX_Size() int { + return xxx_messageInfo_SshPublicKey.Size(m) +} +func (m *SshPublicKey) XXX_DiscardUnknown() { + xxx_messageInfo_SshPublicKey.DiscardUnknown(m) +} + +var xxx_messageInfo_SshPublicKey proto.InternalMessageInfo + +func (m *SshPublicKey) GetKey() string { + if m != nil { + return m.Key + } + return "" +} + +func (m *SshPublicKey) GetExpirationTimeUsec() int64 { + if m != nil { + return m.ExpirationTimeUsec + } + return 0 +} + +func (m *SshPublicKey) GetFingerprint() string { + if m != nil { + return m.Fingerprint + } + return "" +} + +func init() { + proto.RegisterType((*PosixAccount)(nil), "google.cloud.oslogin.common.PosixAccount") + proto.RegisterType((*SshPublicKey)(nil), "google.cloud.oslogin.common.SshPublicKey") +} + +func init() { + proto.RegisterFile("google/cloud/oslogin/common/common.proto", fileDescriptor_common_085b2e433f956f98) +} + +var fileDescriptor_common_085b2e433f956f98 = []byte{ + // 406 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x7c, 0x92, 0x41, 0x6b, 0x14, 0x31, + 0x14, 0xc7, 0x99, 0xae, 0x6d, 0x77, 0xe2, 0x2a, 0x12, 0x7a, 0x08, 0x5d, 0xc5, 0xa5, 0x20, 0xec, + 0x69, 0x46, 0xf0, 0xe8, 0xa9, 0xad, 0x20, 0x45, 0xc1, 0x65, 0xd4, 0x8b, 0x2c, 0x2c, 0xd3, 0xe4, + 0x99, 0x7d, 0x38, 0xc9, 0x1b, 0x92, 0x0c, 0x74, 0xbf, 0x92, 0x07, 0x3f, 0x88, 0x5f, 0xc8, 0xab, + 0x24, 0x99, 0x51, 0x0f, 0xd2, 0x53, 0xf2, 0xff, 0xff, 0xfe, 0xf3, 0x92, 0x97, 0x37, 0x6c, 0xad, + 0x89, 0x74, 0x07, 0xb5, 0xec, 0x68, 0x50, 0x35, 0xf9, 0x8e, 0x34, 0xda, 0x5a, 0x92, 0x31, 0x34, + 0x2d, 0x55, 0xef, 0x28, 0x10, 0x5f, 0xe6, 0x64, 0x95, 0x92, 0xd5, 0x98, 0xac, 0x72, 0xe4, 0xfc, + 0xe9, 0x58, 0xa6, 0xed, 0xb1, 0x6e, 0xad, 0xa5, 0xd0, 0x06, 0x24, 0xeb, 0xf3, 0xa7, 0x17, 0xbf, + 0x0a, 0xb6, 0xd8, 0x90, 0xc7, 0xbb, 0x4b, 0x29, 0x69, 0xb0, 0x81, 0x0b, 0x76, 0xda, 0x3b, 0x34, + 0xad, 0x3b, 0x88, 0x62, 0x55, 0xac, 0xe7, 0xcd, 0x24, 0xf9, 0x39, 0x9b, 0x0f, 0x1e, 0x9c, 0x6d, + 0x0d, 0x88, 0xa3, 0x55, 0xb1, 0x2e, 0x9b, 0x3f, 0x9a, 0x3f, 0x61, 0xb3, 0x01, 0x95, 0x98, 0xad, + 0x8a, 0xf5, 0xac, 0x89, 0xdb, 0xe8, 0x68, 0x54, 0xe2, 0x41, 0x76, 0x34, 0x2a, 0xfe, 0x82, 0x3d, + 0xde, 0x93, 0x81, 0x9d, 0x42, 0x07, 0x32, 0x90, 0x3b, 0x88, 0xe3, 0x54, 0xe5, 0x51, 0x74, 0xdf, + 0x4c, 0x26, 0x3f, 0x63, 0xc7, 0x7e, 0x0f, 0x5d, 0x27, 0x4e, 0x12, 0xcd, 0x22, 0xba, 0x1a, 0x24, + 0x79, 0x71, 0x9a, 0xdd, 0x24, 0xf8, 0x92, 0x95, 0xfe, 0xe0, 0x03, 0x98, 0x1d, 0x2a, 0x31, 0xcf, + 0x77, 0xca, 0xc6, 0x8d, 0xe2, 0xcf, 0x18, 0x6b, 0x73, 0x53, 0x91, 0x96, 0x89, 0x96, 0xa3, 0x73, + 0xa3, 0x2e, 0x02, 0x5b, 0x7c, 0xf4, 0xfb, 0xcd, 0x70, 0xdb, 0xa1, 0x7c, 0x07, 0x87, 0x78, 0xe1, + 0x6f, 0x90, 0x9b, 0x2e, 0x9b, 0xb8, 0xe5, 0x2f, 0xd9, 0x19, 0xdc, 0xf5, 0xe8, 0xd2, 0x83, 0xed, + 0x02, 0x1a, 0xd8, 0x0d, 0x1e, 0x64, 0x6a, 0x7e, 0xd6, 0xf0, 0xbf, 0xec, 0x13, 0x1a, 0xf8, 0xec, + 0x41, 0xf2, 0x15, 0x7b, 0xf8, 0x15, 0xad, 0x06, 0xd7, 0x3b, 0xb4, 0x21, 0x3d, 0x47, 0xd9, 0xfc, + 0x6b, 0x5d, 0xfd, 0x28, 0xd8, 0x73, 0x49, 0xa6, 0xba, 0x67, 0x62, 0x57, 0x8b, 0x0f, 0xfe, 0x7d, + 0xd4, 0x9b, 0x38, 0xa1, 0x2f, 0x97, 0x63, 0x54, 0x53, 0xd7, 0x5a, 0x5d, 0x91, 0xd3, 0xb5, 0x06, + 0x9b, 0xa6, 0x57, 0x67, 0xd4, 0xf6, 0xe8, 0xff, 0xfb, 0x97, 0xbc, 0xce, 0xcb, 0xf7, 0xa3, 0xe5, + 0xdb, 0x5c, 0xe3, 0x3a, 0x1d, 0x37, 0x96, 0xaf, 0xae, 0x13, 0xfd, 0x39, 0xd1, 0x6d, 0xa2, 0xdb, + 0x91, 0x6e, 0x33, 0xbd, 0x3d, 0x49, 0x27, 0xbd, 0xfa, 0x1d, 0x00, 0x00, 0xff, 0xff, 0xdc, 0x30, + 0xf7, 0x5b, 0x8e, 0x02, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/cloud/oslogin/v1/oslogin.pb.go b/vendor/google.golang.org/genproto/googleapis/cloud/oslogin/v1/oslogin.pb.go new file mode 100644 index 0000000..326c7b0 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/cloud/oslogin/v1/oslogin.pb.go @@ -0,0 +1,747 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/cloud/oslogin/v1/oslogin.proto + +package oslogin // import "google.golang.org/genproto/googleapis/cloud/oslogin/v1" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import empty "github.com/golang/protobuf/ptypes/empty" +import _ "google.golang.org/genproto/googleapis/api/annotations" +import common "google.golang.org/genproto/googleapis/cloud/oslogin/common" +import field_mask "google.golang.org/genproto/protobuf/field_mask" + +import ( + context "golang.org/x/net/context" + grpc "google.golang.org/grpc" +) + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// The user profile information used for logging in to a virtual machine on +// Google Compute Engine. +type LoginProfile struct { + // The primary email address that uniquely identifies the user. + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // The list of POSIX accounts associated with the user. + PosixAccounts []*common.PosixAccount `protobuf:"bytes,2,rep,name=posix_accounts,json=posixAccounts,proto3" json:"posix_accounts,omitempty"` + // A map from SSH public key fingerprint to the associated key object. + SshPublicKeys map[string]*common.SshPublicKey `protobuf:"bytes,3,rep,name=ssh_public_keys,json=sshPublicKeys,proto3" json:"ssh_public_keys,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` + // Indicates if the user is suspended. A suspended user cannot log in but + // their profile information is retained. + Suspended bool `protobuf:"varint,4,opt,name=suspended,proto3" json:"suspended,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *LoginProfile) Reset() { *m = LoginProfile{} } +func (m *LoginProfile) String() string { return proto.CompactTextString(m) } +func (*LoginProfile) ProtoMessage() {} +func (*LoginProfile) Descriptor() ([]byte, []int) { + return fileDescriptor_oslogin_470b09e56a6f2815, []int{0} +} +func (m *LoginProfile) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_LoginProfile.Unmarshal(m, b) +} +func (m *LoginProfile) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_LoginProfile.Marshal(b, m, deterministic) +} +func (dst *LoginProfile) XXX_Merge(src proto.Message) { + xxx_messageInfo_LoginProfile.Merge(dst, src) +} +func (m *LoginProfile) XXX_Size() int { + return xxx_messageInfo_LoginProfile.Size(m) +} +func (m *LoginProfile) XXX_DiscardUnknown() { + xxx_messageInfo_LoginProfile.DiscardUnknown(m) +} + +var xxx_messageInfo_LoginProfile proto.InternalMessageInfo + +func (m *LoginProfile) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *LoginProfile) GetPosixAccounts() []*common.PosixAccount { + if m != nil { + return m.PosixAccounts + } + return nil +} + +func (m *LoginProfile) GetSshPublicKeys() map[string]*common.SshPublicKey { + if m != nil { + return m.SshPublicKeys + } + return nil +} + +func (m *LoginProfile) GetSuspended() bool { + if m != nil { + return m.Suspended + } + return false +} + +// A request message for deleting a POSIX account entry. +type DeletePosixAccountRequest struct { + // A reference to the POSIX account to update. POSIX accounts are identified + // by the project ID they are associated with. A reference to the POSIX + // account is in format `users/{user}/projects/{project}`. + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *DeletePosixAccountRequest) Reset() { *m = DeletePosixAccountRequest{} } +func (m *DeletePosixAccountRequest) String() string { return proto.CompactTextString(m) } +func (*DeletePosixAccountRequest) ProtoMessage() {} +func (*DeletePosixAccountRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_oslogin_470b09e56a6f2815, []int{1} +} +func (m *DeletePosixAccountRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_DeletePosixAccountRequest.Unmarshal(m, b) +} +func (m *DeletePosixAccountRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_DeletePosixAccountRequest.Marshal(b, m, deterministic) +} +func (dst *DeletePosixAccountRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_DeletePosixAccountRequest.Merge(dst, src) +} +func (m *DeletePosixAccountRequest) XXX_Size() int { + return xxx_messageInfo_DeletePosixAccountRequest.Size(m) +} +func (m *DeletePosixAccountRequest) XXX_DiscardUnknown() { + xxx_messageInfo_DeletePosixAccountRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_DeletePosixAccountRequest proto.InternalMessageInfo + +func (m *DeletePosixAccountRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +// A request message for deleting an SSH public key. +type DeleteSshPublicKeyRequest struct { + // The fingerprint of the public key to update. Public keys are identified by + // their SHA-256 fingerprint. The fingerprint of the public key is in format + // `users/{user}/sshPublicKeys/{fingerprint}`. + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *DeleteSshPublicKeyRequest) Reset() { *m = DeleteSshPublicKeyRequest{} } +func (m *DeleteSshPublicKeyRequest) String() string { return proto.CompactTextString(m) } +func (*DeleteSshPublicKeyRequest) ProtoMessage() {} +func (*DeleteSshPublicKeyRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_oslogin_470b09e56a6f2815, []int{2} +} +func (m *DeleteSshPublicKeyRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_DeleteSshPublicKeyRequest.Unmarshal(m, b) +} +func (m *DeleteSshPublicKeyRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_DeleteSshPublicKeyRequest.Marshal(b, m, deterministic) +} +func (dst *DeleteSshPublicKeyRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_DeleteSshPublicKeyRequest.Merge(dst, src) +} +func (m *DeleteSshPublicKeyRequest) XXX_Size() int { + return xxx_messageInfo_DeleteSshPublicKeyRequest.Size(m) +} +func (m *DeleteSshPublicKeyRequest) XXX_DiscardUnknown() { + xxx_messageInfo_DeleteSshPublicKeyRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_DeleteSshPublicKeyRequest proto.InternalMessageInfo + +func (m *DeleteSshPublicKeyRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +// A request message for retrieving the login profile information for a user. +type GetLoginProfileRequest struct { + // The unique ID for the user in format `users/{user}`. + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GetLoginProfileRequest) Reset() { *m = GetLoginProfileRequest{} } +func (m *GetLoginProfileRequest) String() string { return proto.CompactTextString(m) } +func (*GetLoginProfileRequest) ProtoMessage() {} +func (*GetLoginProfileRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_oslogin_470b09e56a6f2815, []int{3} +} +func (m *GetLoginProfileRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GetLoginProfileRequest.Unmarshal(m, b) +} +func (m *GetLoginProfileRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GetLoginProfileRequest.Marshal(b, m, deterministic) +} +func (dst *GetLoginProfileRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_GetLoginProfileRequest.Merge(dst, src) +} +func (m *GetLoginProfileRequest) XXX_Size() int { + return xxx_messageInfo_GetLoginProfileRequest.Size(m) +} +func (m *GetLoginProfileRequest) XXX_DiscardUnknown() { + xxx_messageInfo_GetLoginProfileRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_GetLoginProfileRequest proto.InternalMessageInfo + +func (m *GetLoginProfileRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +// A request message for retrieving an SSH public key. +type GetSshPublicKeyRequest struct { + // The fingerprint of the public key to retrieve. Public keys are identified + // by their SHA-256 fingerprint. The fingerprint of the public key is in + // format `users/{user}/sshPublicKeys/{fingerprint}`. + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GetSshPublicKeyRequest) Reset() { *m = GetSshPublicKeyRequest{} } +func (m *GetSshPublicKeyRequest) String() string { return proto.CompactTextString(m) } +func (*GetSshPublicKeyRequest) ProtoMessage() {} +func (*GetSshPublicKeyRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_oslogin_470b09e56a6f2815, []int{4} +} +func (m *GetSshPublicKeyRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GetSshPublicKeyRequest.Unmarshal(m, b) +} +func (m *GetSshPublicKeyRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GetSshPublicKeyRequest.Marshal(b, m, deterministic) +} +func (dst *GetSshPublicKeyRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_GetSshPublicKeyRequest.Merge(dst, src) +} +func (m *GetSshPublicKeyRequest) XXX_Size() int { + return xxx_messageInfo_GetSshPublicKeyRequest.Size(m) +} +func (m *GetSshPublicKeyRequest) XXX_DiscardUnknown() { + xxx_messageInfo_GetSshPublicKeyRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_GetSshPublicKeyRequest proto.InternalMessageInfo + +func (m *GetSshPublicKeyRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +// A request message for importing an SSH public key. +type ImportSshPublicKeyRequest struct { + // The unique ID for the user in format `users/{user}`. + Parent string `protobuf:"bytes,1,opt,name=parent,proto3" json:"parent,omitempty"` + // The SSH public key and expiration time. + SshPublicKey *common.SshPublicKey `protobuf:"bytes,2,opt,name=ssh_public_key,json=sshPublicKey,proto3" json:"ssh_public_key,omitempty"` + // The project ID of the Google Cloud Platform project. + ProjectId string `protobuf:"bytes,3,opt,name=project_id,json=projectId,proto3" json:"project_id,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ImportSshPublicKeyRequest) Reset() { *m = ImportSshPublicKeyRequest{} } +func (m *ImportSshPublicKeyRequest) String() string { return proto.CompactTextString(m) } +func (*ImportSshPublicKeyRequest) ProtoMessage() {} +func (*ImportSshPublicKeyRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_oslogin_470b09e56a6f2815, []int{5} +} +func (m *ImportSshPublicKeyRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ImportSshPublicKeyRequest.Unmarshal(m, b) +} +func (m *ImportSshPublicKeyRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ImportSshPublicKeyRequest.Marshal(b, m, deterministic) +} +func (dst *ImportSshPublicKeyRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_ImportSshPublicKeyRequest.Merge(dst, src) +} +func (m *ImportSshPublicKeyRequest) XXX_Size() int { + return xxx_messageInfo_ImportSshPublicKeyRequest.Size(m) +} +func (m *ImportSshPublicKeyRequest) XXX_DiscardUnknown() { + xxx_messageInfo_ImportSshPublicKeyRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_ImportSshPublicKeyRequest proto.InternalMessageInfo + +func (m *ImportSshPublicKeyRequest) GetParent() string { + if m != nil { + return m.Parent + } + return "" +} + +func (m *ImportSshPublicKeyRequest) GetSshPublicKey() *common.SshPublicKey { + if m != nil { + return m.SshPublicKey + } + return nil +} + +func (m *ImportSshPublicKeyRequest) GetProjectId() string { + if m != nil { + return m.ProjectId + } + return "" +} + +// A response message for importing an SSH public key. +type ImportSshPublicKeyResponse struct { + // The login profile information for the user. + LoginProfile *LoginProfile `protobuf:"bytes,1,opt,name=login_profile,json=loginProfile,proto3" json:"login_profile,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ImportSshPublicKeyResponse) Reset() { *m = ImportSshPublicKeyResponse{} } +func (m *ImportSshPublicKeyResponse) String() string { return proto.CompactTextString(m) } +func (*ImportSshPublicKeyResponse) ProtoMessage() {} +func (*ImportSshPublicKeyResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_oslogin_470b09e56a6f2815, []int{6} +} +func (m *ImportSshPublicKeyResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ImportSshPublicKeyResponse.Unmarshal(m, b) +} +func (m *ImportSshPublicKeyResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ImportSshPublicKeyResponse.Marshal(b, m, deterministic) +} +func (dst *ImportSshPublicKeyResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_ImportSshPublicKeyResponse.Merge(dst, src) +} +func (m *ImportSshPublicKeyResponse) XXX_Size() int { + return xxx_messageInfo_ImportSshPublicKeyResponse.Size(m) +} +func (m *ImportSshPublicKeyResponse) XXX_DiscardUnknown() { + xxx_messageInfo_ImportSshPublicKeyResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_ImportSshPublicKeyResponse proto.InternalMessageInfo + +func (m *ImportSshPublicKeyResponse) GetLoginProfile() *LoginProfile { + if m != nil { + return m.LoginProfile + } + return nil +} + +// A request message for updating an SSH public key. +type UpdateSshPublicKeyRequest struct { + // The fingerprint of the public key to update. Public keys are identified by + // their SHA-256 fingerprint. The fingerprint of the public key is in format + // `users/{user}/sshPublicKeys/{fingerprint}`. + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // The SSH public key and expiration time. + SshPublicKey *common.SshPublicKey `protobuf:"bytes,2,opt,name=ssh_public_key,json=sshPublicKey,proto3" json:"ssh_public_key,omitempty"` + // Mask to control which fields get updated. Updates all if not present. + UpdateMask *field_mask.FieldMask `protobuf:"bytes,3,opt,name=update_mask,json=updateMask,proto3" json:"update_mask,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *UpdateSshPublicKeyRequest) Reset() { *m = UpdateSshPublicKeyRequest{} } +func (m *UpdateSshPublicKeyRequest) String() string { return proto.CompactTextString(m) } +func (*UpdateSshPublicKeyRequest) ProtoMessage() {} +func (*UpdateSshPublicKeyRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_oslogin_470b09e56a6f2815, []int{7} +} +func (m *UpdateSshPublicKeyRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_UpdateSshPublicKeyRequest.Unmarshal(m, b) +} +func (m *UpdateSshPublicKeyRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_UpdateSshPublicKeyRequest.Marshal(b, m, deterministic) +} +func (dst *UpdateSshPublicKeyRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_UpdateSshPublicKeyRequest.Merge(dst, src) +} +func (m *UpdateSshPublicKeyRequest) XXX_Size() int { + return xxx_messageInfo_UpdateSshPublicKeyRequest.Size(m) +} +func (m *UpdateSshPublicKeyRequest) XXX_DiscardUnknown() { + xxx_messageInfo_UpdateSshPublicKeyRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_UpdateSshPublicKeyRequest proto.InternalMessageInfo + +func (m *UpdateSshPublicKeyRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *UpdateSshPublicKeyRequest) GetSshPublicKey() *common.SshPublicKey { + if m != nil { + return m.SshPublicKey + } + return nil +} + +func (m *UpdateSshPublicKeyRequest) GetUpdateMask() *field_mask.FieldMask { + if m != nil { + return m.UpdateMask + } + return nil +} + +func init() { + proto.RegisterType((*LoginProfile)(nil), "google.cloud.oslogin.v1.LoginProfile") + proto.RegisterMapType((map[string]*common.SshPublicKey)(nil), "google.cloud.oslogin.v1.LoginProfile.SshPublicKeysEntry") + proto.RegisterType((*DeletePosixAccountRequest)(nil), "google.cloud.oslogin.v1.DeletePosixAccountRequest") + proto.RegisterType((*DeleteSshPublicKeyRequest)(nil), "google.cloud.oslogin.v1.DeleteSshPublicKeyRequest") + proto.RegisterType((*GetLoginProfileRequest)(nil), "google.cloud.oslogin.v1.GetLoginProfileRequest") + proto.RegisterType((*GetSshPublicKeyRequest)(nil), "google.cloud.oslogin.v1.GetSshPublicKeyRequest") + proto.RegisterType((*ImportSshPublicKeyRequest)(nil), "google.cloud.oslogin.v1.ImportSshPublicKeyRequest") + proto.RegisterType((*ImportSshPublicKeyResponse)(nil), "google.cloud.oslogin.v1.ImportSshPublicKeyResponse") + proto.RegisterType((*UpdateSshPublicKeyRequest)(nil), "google.cloud.oslogin.v1.UpdateSshPublicKeyRequest") +} + +// Reference imports to suppress errors if they are not otherwise used. +var _ context.Context +var _ grpc.ClientConn + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the grpc package it is being compiled against. +const _ = grpc.SupportPackageIsVersion4 + +// OsLoginServiceClient is the client API for OsLoginService service. +// +// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://godoc.org/google.golang.org/grpc#ClientConn.NewStream. +type OsLoginServiceClient interface { + // Deletes a POSIX account. + DeletePosixAccount(ctx context.Context, in *DeletePosixAccountRequest, opts ...grpc.CallOption) (*empty.Empty, error) + // Deletes an SSH public key. + DeleteSshPublicKey(ctx context.Context, in *DeleteSshPublicKeyRequest, opts ...grpc.CallOption) (*empty.Empty, error) + // Retrieves the profile information used for logging in to a virtual machine + // on Google Compute Engine. + GetLoginProfile(ctx context.Context, in *GetLoginProfileRequest, opts ...grpc.CallOption) (*LoginProfile, error) + // Retrieves an SSH public key. + GetSshPublicKey(ctx context.Context, in *GetSshPublicKeyRequest, opts ...grpc.CallOption) (*common.SshPublicKey, error) + // Adds an SSH public key and returns the profile information. Default POSIX + // account information is set when no username and UID exist as part of the + // login profile. + ImportSshPublicKey(ctx context.Context, in *ImportSshPublicKeyRequest, opts ...grpc.CallOption) (*ImportSshPublicKeyResponse, error) + // Updates an SSH public key and returns the profile information. This method + // supports patch semantics. + UpdateSshPublicKey(ctx context.Context, in *UpdateSshPublicKeyRequest, opts ...grpc.CallOption) (*common.SshPublicKey, error) +} + +type osLoginServiceClient struct { + cc *grpc.ClientConn +} + +func NewOsLoginServiceClient(cc *grpc.ClientConn) OsLoginServiceClient { + return &osLoginServiceClient{cc} +} + +func (c *osLoginServiceClient) DeletePosixAccount(ctx context.Context, in *DeletePosixAccountRequest, opts ...grpc.CallOption) (*empty.Empty, error) { + out := new(empty.Empty) + err := c.cc.Invoke(ctx, "/google.cloud.oslogin.v1.OsLoginService/DeletePosixAccount", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *osLoginServiceClient) DeleteSshPublicKey(ctx context.Context, in *DeleteSshPublicKeyRequest, opts ...grpc.CallOption) (*empty.Empty, error) { + out := new(empty.Empty) + err := c.cc.Invoke(ctx, "/google.cloud.oslogin.v1.OsLoginService/DeleteSshPublicKey", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *osLoginServiceClient) GetLoginProfile(ctx context.Context, in *GetLoginProfileRequest, opts ...grpc.CallOption) (*LoginProfile, error) { + out := new(LoginProfile) + err := c.cc.Invoke(ctx, "/google.cloud.oslogin.v1.OsLoginService/GetLoginProfile", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *osLoginServiceClient) GetSshPublicKey(ctx context.Context, in *GetSshPublicKeyRequest, opts ...grpc.CallOption) (*common.SshPublicKey, error) { + out := new(common.SshPublicKey) + err := c.cc.Invoke(ctx, "/google.cloud.oslogin.v1.OsLoginService/GetSshPublicKey", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *osLoginServiceClient) ImportSshPublicKey(ctx context.Context, in *ImportSshPublicKeyRequest, opts ...grpc.CallOption) (*ImportSshPublicKeyResponse, error) { + out := new(ImportSshPublicKeyResponse) + err := c.cc.Invoke(ctx, "/google.cloud.oslogin.v1.OsLoginService/ImportSshPublicKey", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *osLoginServiceClient) UpdateSshPublicKey(ctx context.Context, in *UpdateSshPublicKeyRequest, opts ...grpc.CallOption) (*common.SshPublicKey, error) { + out := new(common.SshPublicKey) + err := c.cc.Invoke(ctx, "/google.cloud.oslogin.v1.OsLoginService/UpdateSshPublicKey", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +// OsLoginServiceServer is the server API for OsLoginService service. +type OsLoginServiceServer interface { + // Deletes a POSIX account. + DeletePosixAccount(context.Context, *DeletePosixAccountRequest) (*empty.Empty, error) + // Deletes an SSH public key. + DeleteSshPublicKey(context.Context, *DeleteSshPublicKeyRequest) (*empty.Empty, error) + // Retrieves the profile information used for logging in to a virtual machine + // on Google Compute Engine. + GetLoginProfile(context.Context, *GetLoginProfileRequest) (*LoginProfile, error) + // Retrieves an SSH public key. + GetSshPublicKey(context.Context, *GetSshPublicKeyRequest) (*common.SshPublicKey, error) + // Adds an SSH public key and returns the profile information. Default POSIX + // account information is set when no username and UID exist as part of the + // login profile. + ImportSshPublicKey(context.Context, *ImportSshPublicKeyRequest) (*ImportSshPublicKeyResponse, error) + // Updates an SSH public key and returns the profile information. This method + // supports patch semantics. + UpdateSshPublicKey(context.Context, *UpdateSshPublicKeyRequest) (*common.SshPublicKey, error) +} + +func RegisterOsLoginServiceServer(s *grpc.Server, srv OsLoginServiceServer) { + s.RegisterService(&_OsLoginService_serviceDesc, srv) +} + +func _OsLoginService_DeletePosixAccount_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(DeletePosixAccountRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(OsLoginServiceServer).DeletePosixAccount(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.oslogin.v1.OsLoginService/DeletePosixAccount", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(OsLoginServiceServer).DeletePosixAccount(ctx, req.(*DeletePosixAccountRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _OsLoginService_DeleteSshPublicKey_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(DeleteSshPublicKeyRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(OsLoginServiceServer).DeleteSshPublicKey(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.oslogin.v1.OsLoginService/DeleteSshPublicKey", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(OsLoginServiceServer).DeleteSshPublicKey(ctx, req.(*DeleteSshPublicKeyRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _OsLoginService_GetLoginProfile_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(GetLoginProfileRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(OsLoginServiceServer).GetLoginProfile(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.oslogin.v1.OsLoginService/GetLoginProfile", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(OsLoginServiceServer).GetLoginProfile(ctx, req.(*GetLoginProfileRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _OsLoginService_GetSshPublicKey_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(GetSshPublicKeyRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(OsLoginServiceServer).GetSshPublicKey(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.oslogin.v1.OsLoginService/GetSshPublicKey", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(OsLoginServiceServer).GetSshPublicKey(ctx, req.(*GetSshPublicKeyRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _OsLoginService_ImportSshPublicKey_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(ImportSshPublicKeyRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(OsLoginServiceServer).ImportSshPublicKey(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.oslogin.v1.OsLoginService/ImportSshPublicKey", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(OsLoginServiceServer).ImportSshPublicKey(ctx, req.(*ImportSshPublicKeyRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _OsLoginService_UpdateSshPublicKey_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(UpdateSshPublicKeyRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(OsLoginServiceServer).UpdateSshPublicKey(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.oslogin.v1.OsLoginService/UpdateSshPublicKey", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(OsLoginServiceServer).UpdateSshPublicKey(ctx, req.(*UpdateSshPublicKeyRequest)) + } + return interceptor(ctx, in, info, handler) +} + +var _OsLoginService_serviceDesc = grpc.ServiceDesc{ + ServiceName: "google.cloud.oslogin.v1.OsLoginService", + HandlerType: (*OsLoginServiceServer)(nil), + Methods: []grpc.MethodDesc{ + { + MethodName: "DeletePosixAccount", + Handler: _OsLoginService_DeletePosixAccount_Handler, + }, + { + MethodName: "DeleteSshPublicKey", + Handler: _OsLoginService_DeleteSshPublicKey_Handler, + }, + { + MethodName: "GetLoginProfile", + Handler: _OsLoginService_GetLoginProfile_Handler, + }, + { + MethodName: "GetSshPublicKey", + Handler: _OsLoginService_GetSshPublicKey_Handler, + }, + { + MethodName: "ImportSshPublicKey", + Handler: _OsLoginService_ImportSshPublicKey_Handler, + }, + { + MethodName: "UpdateSshPublicKey", + Handler: _OsLoginService_UpdateSshPublicKey_Handler, + }, + }, + Streams: []grpc.StreamDesc{}, + Metadata: "google/cloud/oslogin/v1/oslogin.proto", +} + +func init() { + proto.RegisterFile("google/cloud/oslogin/v1/oslogin.proto", fileDescriptor_oslogin_470b09e56a6f2815) +} + +var fileDescriptor_oslogin_470b09e56a6f2815 = []byte{ + // 774 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xac, 0x56, 0xcd, 0x6e, 0xd3, 0x40, + 0x10, 0x96, 0x93, 0x52, 0xe8, 0x26, 0x6d, 0xd1, 0x1e, 0xda, 0xd4, 0x6d, 0xd5, 0x60, 0x51, 0x35, + 0x44, 0xc8, 0x56, 0x52, 0x0e, 0x25, 0x15, 0x54, 0x14, 0x4a, 0x55, 0x7e, 0xd4, 0x28, 0x15, 0x3d, + 0xa0, 0x4a, 0xc1, 0x75, 0xb6, 0xae, 0x89, 0xed, 0x5d, 0xbc, 0x76, 0x44, 0x84, 0x7a, 0xe1, 0xc2, + 0x85, 0x13, 0x9c, 0x38, 0x22, 0x6e, 0x5c, 0x78, 0x02, 0x78, 0x00, 0x24, 0x4e, 0x3c, 0x01, 0x12, + 0x0f, 0x82, 0xbc, 0x5e, 0xb7, 0x4e, 0x6c, 0xa7, 0xae, 0xc4, 0x29, 0xbb, 0x9e, 0xf9, 0x66, 0xbf, + 0xfd, 0x76, 0xbe, 0x51, 0xc0, 0xb2, 0x8e, 0xb1, 0x6e, 0x22, 0x45, 0x33, 0xb1, 0xd7, 0x51, 0x30, + 0x35, 0xb1, 0x6e, 0xd8, 0x4a, 0xaf, 0x16, 0x2e, 0x65, 0xe2, 0x60, 0x17, 0xc3, 0xd9, 0x20, 0x4d, + 0x66, 0x69, 0x72, 0x18, 0xeb, 0xd5, 0xc4, 0x05, 0x8e, 0x57, 0x89, 0xa1, 0xa8, 0xb6, 0x8d, 0x5d, + 0xd5, 0x35, 0xb0, 0x4d, 0x03, 0x98, 0x58, 0x49, 0xac, 0xae, 0x61, 0xcb, 0xc2, 0xe1, 0x0f, 0xcf, + 0x9c, 0xe7, 0x99, 0x6c, 0x77, 0xe8, 0x1d, 0x29, 0xc8, 0x22, 0x6e, 0x9f, 0x07, 0xcb, 0xc3, 0xc1, + 0x23, 0x03, 0x99, 0x9d, 0xb6, 0xa5, 0xd2, 0x6e, 0x90, 0x21, 0xfd, 0xc9, 0x81, 0xe2, 0x13, 0xbf, + 0x78, 0xd3, 0xc1, 0x47, 0x86, 0x89, 0x20, 0x04, 0x63, 0xb6, 0x6a, 0xa1, 0x92, 0x50, 0x16, 0x2a, + 0x13, 0x2d, 0xb6, 0x86, 0x4d, 0x30, 0x45, 0x30, 0x35, 0x5e, 0xb7, 0x55, 0x4d, 0xc3, 0x9e, 0xed, + 0xd2, 0x52, 0xae, 0x9c, 0xaf, 0x14, 0xea, 0x37, 0xe4, 0xc4, 0xdb, 0x71, 0x7e, 0x4d, 0x1f, 0x72, + 0x2f, 0x40, 0xb4, 0x26, 0x49, 0x64, 0x47, 0xe1, 0x0b, 0x30, 0x4d, 0xe9, 0x71, 0x9b, 0x78, 0x87, + 0xa6, 0xa1, 0xb5, 0xbb, 0xa8, 0x4f, 0x4b, 0x79, 0x56, 0x72, 0x4d, 0x4e, 0x11, 0x4c, 0x8e, 0xb2, + 0x94, 0xf7, 0xe8, 0x71, 0x93, 0x61, 0x1f, 0xa3, 0x3e, 0xdd, 0xb2, 0x5d, 0xa7, 0xdf, 0x9a, 0xa4, + 0xd1, 0x6f, 0x70, 0x01, 0x4c, 0x50, 0x8f, 0x12, 0x64, 0x77, 0x50, 0xa7, 0x34, 0x56, 0x16, 0x2a, + 0x57, 0x5a, 0x67, 0x1f, 0xc4, 0x2e, 0x80, 0xf1, 0x12, 0xf0, 0x2a, 0xc8, 0x77, 0x51, 0x9f, 0x5f, + 0xdd, 0x5f, 0xc2, 0x0d, 0x70, 0xa9, 0xa7, 0x9a, 0x1e, 0x2a, 0xe5, 0xca, 0xc2, 0xb9, 0x17, 0x8e, + 0x56, 0x6c, 0x05, 0xb8, 0x46, 0x6e, 0x4d, 0x90, 0x14, 0x30, 0xf7, 0x00, 0x99, 0xc8, 0x45, 0x03, + 0x8a, 0xa0, 0x57, 0x1e, 0xa2, 0x6e, 0x92, 0xde, 0x67, 0x80, 0x81, 0x8a, 0x23, 0x00, 0x37, 0xc1, + 0xcc, 0x36, 0x72, 0xa3, 0x0a, 0x9d, 0x9f, 0x9d, 0xb5, 0xf6, 0x17, 0x01, 0xcc, 0xed, 0x58, 0x04, + 0x3b, 0x89, 0x88, 0x19, 0x30, 0x4e, 0x54, 0x07, 0xd9, 0x2e, 0xc7, 0xf0, 0x1d, 0xdc, 0x05, 0x53, + 0x83, 0x0f, 0x7c, 0x71, 0x05, 0x8b, 0xd1, 0x07, 0x85, 0x8b, 0x00, 0x10, 0x07, 0xbf, 0x44, 0x9a, + 0xdb, 0x36, 0x3a, 0xa5, 0x3c, 0x3b, 0x6c, 0x82, 0x7f, 0xd9, 0xe9, 0x48, 0xc7, 0x40, 0x4c, 0x22, + 0x49, 0x09, 0xb6, 0x29, 0x82, 0x8f, 0xc0, 0x24, 0x3b, 0xa7, 0x4d, 0x02, 0x75, 0x18, 0xd9, 0x42, + 0x7d, 0x39, 0x53, 0xb3, 0xb5, 0x8a, 0x66, 0x64, 0x27, 0x7d, 0x17, 0xc0, 0xdc, 0x33, 0xd2, 0x51, + 0x33, 0xbf, 0xce, 0xff, 0xd7, 0x62, 0x1d, 0x14, 0x3c, 0xc6, 0x80, 0x39, 0x99, 0x89, 0x51, 0xa8, + 0x8b, 0x61, 0xb5, 0xd0, 0xec, 0xf2, 0x43, 0xdf, 0xec, 0x4f, 0x55, 0xda, 0x6d, 0x81, 0x20, 0xdd, + 0x5f, 0xd7, 0x7f, 0x5d, 0x06, 0x53, 0xbb, 0x94, 0x5d, 0x70, 0x0f, 0x39, 0x3d, 0x43, 0x43, 0xf0, + 0x9d, 0x00, 0x60, 0xbc, 0x43, 0x61, 0x3d, 0x55, 0x9e, 0xd4, 0x76, 0x16, 0x67, 0x62, 0x2c, 0xb6, + 0xfc, 0x79, 0x24, 0x2d, 0xbf, 0xfd, 0xfd, 0xf7, 0x63, 0x6e, 0xa9, 0xba, 0xe8, 0x8f, 0xc8, 0x37, + 0xbe, 0x2c, 0x77, 0x3c, 0x8a, 0x1c, 0xaa, 0x54, 0x15, 0xfe, 0x88, 0x54, 0xa9, 0x9e, 0xc0, 0xf7, + 0xa7, 0x4c, 0xa2, 0xd7, 0x3f, 0x97, 0x49, 0xc2, 0x4b, 0xa4, 0x32, 0xa9, 0x32, 0x26, 0xd7, 0xab, + 0x52, 0x9c, 0xc9, 0xc0, 0x04, 0xf1, 0xe9, 0x7c, 0x10, 0xc0, 0xf4, 0x90, 0xb1, 0xa0, 0x92, 0xca, + 0x25, 0xd9, 0x82, 0x62, 0xb6, 0x2e, 0x93, 0x56, 0x18, 0xaf, 0x6b, 0x70, 0x29, 0xc6, 0xeb, 0x44, + 0x89, 0x36, 0x20, 0xfc, 0x14, 0x90, 0x1a, 0x10, 0x68, 0x24, 0xa9, 0x24, 0x75, 0xb2, 0xf7, 0x5e, + 0x28, 0x18, 0xcc, 0x22, 0xd8, 0x0f, 0x01, 0xc0, 0xb8, 0x0f, 0x47, 0xbc, 0x5f, 0xea, 0x64, 0x11, + 0x57, 0x2f, 0x84, 0x09, 0x8c, 0x2e, 0x6d, 0x30, 0xae, 0xb7, 0xa5, 0x15, 0xc6, 0x35, 0x98, 0x45, + 0xa7, 0x32, 0x36, 0x8c, 0x18, 0xb0, 0x31, 0xe4, 0x4c, 0xf8, 0x4d, 0x00, 0x30, 0xee, 0xee, 0x11, + 0x17, 0x48, 0x1d, 0x05, 0x17, 0x91, 0xb8, 0xc1, 0x68, 0xdf, 0xaa, 0x67, 0x90, 0x78, 0x98, 0xf1, + 0xe6, 0x67, 0x01, 0xcc, 0x6b, 0xd8, 0x4a, 0x23, 0xb8, 0x59, 0xe4, 0x66, 0x6f, 0xfa, 0x36, 0x68, + 0x0a, 0xcf, 0xef, 0xf2, 0x44, 0x1d, 0x9b, 0xaa, 0xad, 0xcb, 0xd8, 0xd1, 0x15, 0x1d, 0xd9, 0xcc, + 0x24, 0x4a, 0x10, 0x52, 0x89, 0x41, 0x63, 0xff, 0x6b, 0xd6, 0xf9, 0xf2, 0x6b, 0x6e, 0x76, 0x3b, + 0x28, 0x70, 0x9f, 0x9d, 0xc4, 0xab, 0xcb, 0xfb, 0xb5, 0x9f, 0x61, 0xe4, 0x80, 0x45, 0x0e, 0x78, + 0xe4, 0x60, 0xbf, 0x76, 0x38, 0xce, 0xca, 0xaf, 0xfe, 0x0b, 0x00, 0x00, 0xff, 0xff, 0x8e, 0x79, + 0x9f, 0xd6, 0x35, 0x09, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/cloud/oslogin/v1alpha/oslogin.pb.go b/vendor/google.golang.org/genproto/googleapis/cloud/oslogin/v1alpha/oslogin.pb.go new file mode 100644 index 0000000..7a29571 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/cloud/oslogin/v1alpha/oslogin.pb.go @@ -0,0 +1,746 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/cloud/oslogin/v1alpha/oslogin.proto + +package oslogin // import "google.golang.org/genproto/googleapis/cloud/oslogin/v1alpha" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import empty "github.com/golang/protobuf/ptypes/empty" +import _ "google.golang.org/genproto/googleapis/api/annotations" +import common "google.golang.org/genproto/googleapis/cloud/oslogin/common" +import field_mask "google.golang.org/genproto/protobuf/field_mask" + +import ( + context "golang.org/x/net/context" + grpc "google.golang.org/grpc" +) + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// The user profile information used for logging in to a virtual machine on +// Google Compute Engine. +type LoginProfile struct { + // A unique user ID for identifying the user. + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // The list of POSIX accounts associated with the Directory API user. + PosixAccounts []*common.PosixAccount `protobuf:"bytes,2,rep,name=posix_accounts,json=posixAccounts,proto3" json:"posix_accounts,omitempty"` + // A map from SSH public key fingerprint to the associated key object. + SshPublicKeys map[string]*common.SshPublicKey `protobuf:"bytes,3,rep,name=ssh_public_keys,json=sshPublicKeys,proto3" json:"ssh_public_keys,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` + // Indicates if the user is suspended. + Suspended bool `protobuf:"varint,4,opt,name=suspended,proto3" json:"suspended,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *LoginProfile) Reset() { *m = LoginProfile{} } +func (m *LoginProfile) String() string { return proto.CompactTextString(m) } +func (*LoginProfile) ProtoMessage() {} +func (*LoginProfile) Descriptor() ([]byte, []int) { + return fileDescriptor_oslogin_16f0f854d7b09dcc, []int{0} +} +func (m *LoginProfile) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_LoginProfile.Unmarshal(m, b) +} +func (m *LoginProfile) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_LoginProfile.Marshal(b, m, deterministic) +} +func (dst *LoginProfile) XXX_Merge(src proto.Message) { + xxx_messageInfo_LoginProfile.Merge(dst, src) +} +func (m *LoginProfile) XXX_Size() int { + return xxx_messageInfo_LoginProfile.Size(m) +} +func (m *LoginProfile) XXX_DiscardUnknown() { + xxx_messageInfo_LoginProfile.DiscardUnknown(m) +} + +var xxx_messageInfo_LoginProfile proto.InternalMessageInfo + +func (m *LoginProfile) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *LoginProfile) GetPosixAccounts() []*common.PosixAccount { + if m != nil { + return m.PosixAccounts + } + return nil +} + +func (m *LoginProfile) GetSshPublicKeys() map[string]*common.SshPublicKey { + if m != nil { + return m.SshPublicKeys + } + return nil +} + +func (m *LoginProfile) GetSuspended() bool { + if m != nil { + return m.Suspended + } + return false +} + +// A request message for deleting a POSIX account entry. +type DeletePosixAccountRequest struct { + // A reference to the POSIX account to update. POSIX accounts are identified + // by the project ID they are associated with. A reference to the POSIX + // account is in format `users/{user}/projects/{project}`. + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *DeletePosixAccountRequest) Reset() { *m = DeletePosixAccountRequest{} } +func (m *DeletePosixAccountRequest) String() string { return proto.CompactTextString(m) } +func (*DeletePosixAccountRequest) ProtoMessage() {} +func (*DeletePosixAccountRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_oslogin_16f0f854d7b09dcc, []int{1} +} +func (m *DeletePosixAccountRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_DeletePosixAccountRequest.Unmarshal(m, b) +} +func (m *DeletePosixAccountRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_DeletePosixAccountRequest.Marshal(b, m, deterministic) +} +func (dst *DeletePosixAccountRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_DeletePosixAccountRequest.Merge(dst, src) +} +func (m *DeletePosixAccountRequest) XXX_Size() int { + return xxx_messageInfo_DeletePosixAccountRequest.Size(m) +} +func (m *DeletePosixAccountRequest) XXX_DiscardUnknown() { + xxx_messageInfo_DeletePosixAccountRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_DeletePosixAccountRequest proto.InternalMessageInfo + +func (m *DeletePosixAccountRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +// A request message for deleting an SSH public key. +type DeleteSshPublicKeyRequest struct { + // The fingerprint of the public key to update. Public keys are identified by + // their SHA-256 fingerprint. The fingerprint of the public key is in format + // `users/{user}/sshPublicKeys/{fingerprint}`. + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *DeleteSshPublicKeyRequest) Reset() { *m = DeleteSshPublicKeyRequest{} } +func (m *DeleteSshPublicKeyRequest) String() string { return proto.CompactTextString(m) } +func (*DeleteSshPublicKeyRequest) ProtoMessage() {} +func (*DeleteSshPublicKeyRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_oslogin_16f0f854d7b09dcc, []int{2} +} +func (m *DeleteSshPublicKeyRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_DeleteSshPublicKeyRequest.Unmarshal(m, b) +} +func (m *DeleteSshPublicKeyRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_DeleteSshPublicKeyRequest.Marshal(b, m, deterministic) +} +func (dst *DeleteSshPublicKeyRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_DeleteSshPublicKeyRequest.Merge(dst, src) +} +func (m *DeleteSshPublicKeyRequest) XXX_Size() int { + return xxx_messageInfo_DeleteSshPublicKeyRequest.Size(m) +} +func (m *DeleteSshPublicKeyRequest) XXX_DiscardUnknown() { + xxx_messageInfo_DeleteSshPublicKeyRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_DeleteSshPublicKeyRequest proto.InternalMessageInfo + +func (m *DeleteSshPublicKeyRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +// A request message for retrieving the login profile information for a user. +type GetLoginProfileRequest struct { + // The unique ID for the user in format `users/{user}`. + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GetLoginProfileRequest) Reset() { *m = GetLoginProfileRequest{} } +func (m *GetLoginProfileRequest) String() string { return proto.CompactTextString(m) } +func (*GetLoginProfileRequest) ProtoMessage() {} +func (*GetLoginProfileRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_oslogin_16f0f854d7b09dcc, []int{3} +} +func (m *GetLoginProfileRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GetLoginProfileRequest.Unmarshal(m, b) +} +func (m *GetLoginProfileRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GetLoginProfileRequest.Marshal(b, m, deterministic) +} +func (dst *GetLoginProfileRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_GetLoginProfileRequest.Merge(dst, src) +} +func (m *GetLoginProfileRequest) XXX_Size() int { + return xxx_messageInfo_GetLoginProfileRequest.Size(m) +} +func (m *GetLoginProfileRequest) XXX_DiscardUnknown() { + xxx_messageInfo_GetLoginProfileRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_GetLoginProfileRequest proto.InternalMessageInfo + +func (m *GetLoginProfileRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +// A request message for retrieving an SSH public key. +type GetSshPublicKeyRequest struct { + // The fingerprint of the public key to retrieve. Public keys are identified + // by their SHA-256 fingerprint. The fingerprint of the public key is in + // format `users/{user}/sshPublicKeys/{fingerprint}`. + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GetSshPublicKeyRequest) Reset() { *m = GetSshPublicKeyRequest{} } +func (m *GetSshPublicKeyRequest) String() string { return proto.CompactTextString(m) } +func (*GetSshPublicKeyRequest) ProtoMessage() {} +func (*GetSshPublicKeyRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_oslogin_16f0f854d7b09dcc, []int{4} +} +func (m *GetSshPublicKeyRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GetSshPublicKeyRequest.Unmarshal(m, b) +} +func (m *GetSshPublicKeyRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GetSshPublicKeyRequest.Marshal(b, m, deterministic) +} +func (dst *GetSshPublicKeyRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_GetSshPublicKeyRequest.Merge(dst, src) +} +func (m *GetSshPublicKeyRequest) XXX_Size() int { + return xxx_messageInfo_GetSshPublicKeyRequest.Size(m) +} +func (m *GetSshPublicKeyRequest) XXX_DiscardUnknown() { + xxx_messageInfo_GetSshPublicKeyRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_GetSshPublicKeyRequest proto.InternalMessageInfo + +func (m *GetSshPublicKeyRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +// A request message for importing an SSH public key. +type ImportSshPublicKeyRequest struct { + // The unique ID for the user in format `users/{user}`. + Parent string `protobuf:"bytes,1,opt,name=parent,proto3" json:"parent,omitempty"` + // The SSH public key and expiration time. + SshPublicKey *common.SshPublicKey `protobuf:"bytes,2,opt,name=ssh_public_key,json=sshPublicKey,proto3" json:"ssh_public_key,omitempty"` + // The project ID of the Google Cloud Platform project. + ProjectId string `protobuf:"bytes,3,opt,name=project_id,json=projectId,proto3" json:"project_id,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ImportSshPublicKeyRequest) Reset() { *m = ImportSshPublicKeyRequest{} } +func (m *ImportSshPublicKeyRequest) String() string { return proto.CompactTextString(m) } +func (*ImportSshPublicKeyRequest) ProtoMessage() {} +func (*ImportSshPublicKeyRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_oslogin_16f0f854d7b09dcc, []int{5} +} +func (m *ImportSshPublicKeyRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ImportSshPublicKeyRequest.Unmarshal(m, b) +} +func (m *ImportSshPublicKeyRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ImportSshPublicKeyRequest.Marshal(b, m, deterministic) +} +func (dst *ImportSshPublicKeyRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_ImportSshPublicKeyRequest.Merge(dst, src) +} +func (m *ImportSshPublicKeyRequest) XXX_Size() int { + return xxx_messageInfo_ImportSshPublicKeyRequest.Size(m) +} +func (m *ImportSshPublicKeyRequest) XXX_DiscardUnknown() { + xxx_messageInfo_ImportSshPublicKeyRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_ImportSshPublicKeyRequest proto.InternalMessageInfo + +func (m *ImportSshPublicKeyRequest) GetParent() string { + if m != nil { + return m.Parent + } + return "" +} + +func (m *ImportSshPublicKeyRequest) GetSshPublicKey() *common.SshPublicKey { + if m != nil { + return m.SshPublicKey + } + return nil +} + +func (m *ImportSshPublicKeyRequest) GetProjectId() string { + if m != nil { + return m.ProjectId + } + return "" +} + +// A response message for importing an SSH public key. +type ImportSshPublicKeyResponse struct { + // The login profile information for the user. + LoginProfile *LoginProfile `protobuf:"bytes,1,opt,name=login_profile,json=loginProfile,proto3" json:"login_profile,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ImportSshPublicKeyResponse) Reset() { *m = ImportSshPublicKeyResponse{} } +func (m *ImportSshPublicKeyResponse) String() string { return proto.CompactTextString(m) } +func (*ImportSshPublicKeyResponse) ProtoMessage() {} +func (*ImportSshPublicKeyResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_oslogin_16f0f854d7b09dcc, []int{6} +} +func (m *ImportSshPublicKeyResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ImportSshPublicKeyResponse.Unmarshal(m, b) +} +func (m *ImportSshPublicKeyResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ImportSshPublicKeyResponse.Marshal(b, m, deterministic) +} +func (dst *ImportSshPublicKeyResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_ImportSshPublicKeyResponse.Merge(dst, src) +} +func (m *ImportSshPublicKeyResponse) XXX_Size() int { + return xxx_messageInfo_ImportSshPublicKeyResponse.Size(m) +} +func (m *ImportSshPublicKeyResponse) XXX_DiscardUnknown() { + xxx_messageInfo_ImportSshPublicKeyResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_ImportSshPublicKeyResponse proto.InternalMessageInfo + +func (m *ImportSshPublicKeyResponse) GetLoginProfile() *LoginProfile { + if m != nil { + return m.LoginProfile + } + return nil +} + +// A request message for updating an SSH public key. +type UpdateSshPublicKeyRequest struct { + // The fingerprint of the public key to update. Public keys are identified by + // their SHA-256 fingerprint. The fingerprint of the public key is in format + // `users/{user}/sshPublicKeys/{fingerprint}`. + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // The SSH public key and expiration time. + SshPublicKey *common.SshPublicKey `protobuf:"bytes,2,opt,name=ssh_public_key,json=sshPublicKey,proto3" json:"ssh_public_key,omitempty"` + // Mask to control which fields get updated. Updates all if not present. + UpdateMask *field_mask.FieldMask `protobuf:"bytes,3,opt,name=update_mask,json=updateMask,proto3" json:"update_mask,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *UpdateSshPublicKeyRequest) Reset() { *m = UpdateSshPublicKeyRequest{} } +func (m *UpdateSshPublicKeyRequest) String() string { return proto.CompactTextString(m) } +func (*UpdateSshPublicKeyRequest) ProtoMessage() {} +func (*UpdateSshPublicKeyRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_oslogin_16f0f854d7b09dcc, []int{7} +} +func (m *UpdateSshPublicKeyRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_UpdateSshPublicKeyRequest.Unmarshal(m, b) +} +func (m *UpdateSshPublicKeyRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_UpdateSshPublicKeyRequest.Marshal(b, m, deterministic) +} +func (dst *UpdateSshPublicKeyRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_UpdateSshPublicKeyRequest.Merge(dst, src) +} +func (m *UpdateSshPublicKeyRequest) XXX_Size() int { + return xxx_messageInfo_UpdateSshPublicKeyRequest.Size(m) +} +func (m *UpdateSshPublicKeyRequest) XXX_DiscardUnknown() { + xxx_messageInfo_UpdateSshPublicKeyRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_UpdateSshPublicKeyRequest proto.InternalMessageInfo + +func (m *UpdateSshPublicKeyRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *UpdateSshPublicKeyRequest) GetSshPublicKey() *common.SshPublicKey { + if m != nil { + return m.SshPublicKey + } + return nil +} + +func (m *UpdateSshPublicKeyRequest) GetUpdateMask() *field_mask.FieldMask { + if m != nil { + return m.UpdateMask + } + return nil +} + +func init() { + proto.RegisterType((*LoginProfile)(nil), "google.cloud.oslogin.v1alpha.LoginProfile") + proto.RegisterMapType((map[string]*common.SshPublicKey)(nil), "google.cloud.oslogin.v1alpha.LoginProfile.SshPublicKeysEntry") + proto.RegisterType((*DeletePosixAccountRequest)(nil), "google.cloud.oslogin.v1alpha.DeletePosixAccountRequest") + proto.RegisterType((*DeleteSshPublicKeyRequest)(nil), "google.cloud.oslogin.v1alpha.DeleteSshPublicKeyRequest") + proto.RegisterType((*GetLoginProfileRequest)(nil), "google.cloud.oslogin.v1alpha.GetLoginProfileRequest") + proto.RegisterType((*GetSshPublicKeyRequest)(nil), "google.cloud.oslogin.v1alpha.GetSshPublicKeyRequest") + proto.RegisterType((*ImportSshPublicKeyRequest)(nil), "google.cloud.oslogin.v1alpha.ImportSshPublicKeyRequest") + proto.RegisterType((*ImportSshPublicKeyResponse)(nil), "google.cloud.oslogin.v1alpha.ImportSshPublicKeyResponse") + proto.RegisterType((*UpdateSshPublicKeyRequest)(nil), "google.cloud.oslogin.v1alpha.UpdateSshPublicKeyRequest") +} + +// Reference imports to suppress errors if they are not otherwise used. +var _ context.Context +var _ grpc.ClientConn + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the grpc package it is being compiled against. +const _ = grpc.SupportPackageIsVersion4 + +// OsLoginServiceClient is the client API for OsLoginService service. +// +// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://godoc.org/google.golang.org/grpc#ClientConn.NewStream. +type OsLoginServiceClient interface { + // Deletes a POSIX account. + DeletePosixAccount(ctx context.Context, in *DeletePosixAccountRequest, opts ...grpc.CallOption) (*empty.Empty, error) + // Deletes an SSH public key. + DeleteSshPublicKey(ctx context.Context, in *DeleteSshPublicKeyRequest, opts ...grpc.CallOption) (*empty.Empty, error) + // Retrieves the profile information used for logging in to a virtual machine + // on Google Compute Engine. + GetLoginProfile(ctx context.Context, in *GetLoginProfileRequest, opts ...grpc.CallOption) (*LoginProfile, error) + // Retrieves an SSH public key. + GetSshPublicKey(ctx context.Context, in *GetSshPublicKeyRequest, opts ...grpc.CallOption) (*common.SshPublicKey, error) + // Adds an SSH public key and returns the profile information. Default POSIX + // account information is set when no username and UID exist as part of the + // login profile. + ImportSshPublicKey(ctx context.Context, in *ImportSshPublicKeyRequest, opts ...grpc.CallOption) (*ImportSshPublicKeyResponse, error) + // Updates an SSH public key and returns the profile information. This method + // supports patch semantics. + UpdateSshPublicKey(ctx context.Context, in *UpdateSshPublicKeyRequest, opts ...grpc.CallOption) (*common.SshPublicKey, error) +} + +type osLoginServiceClient struct { + cc *grpc.ClientConn +} + +func NewOsLoginServiceClient(cc *grpc.ClientConn) OsLoginServiceClient { + return &osLoginServiceClient{cc} +} + +func (c *osLoginServiceClient) DeletePosixAccount(ctx context.Context, in *DeletePosixAccountRequest, opts ...grpc.CallOption) (*empty.Empty, error) { + out := new(empty.Empty) + err := c.cc.Invoke(ctx, "/google.cloud.oslogin.v1alpha.OsLoginService/DeletePosixAccount", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *osLoginServiceClient) DeleteSshPublicKey(ctx context.Context, in *DeleteSshPublicKeyRequest, opts ...grpc.CallOption) (*empty.Empty, error) { + out := new(empty.Empty) + err := c.cc.Invoke(ctx, "/google.cloud.oslogin.v1alpha.OsLoginService/DeleteSshPublicKey", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *osLoginServiceClient) GetLoginProfile(ctx context.Context, in *GetLoginProfileRequest, opts ...grpc.CallOption) (*LoginProfile, error) { + out := new(LoginProfile) + err := c.cc.Invoke(ctx, "/google.cloud.oslogin.v1alpha.OsLoginService/GetLoginProfile", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *osLoginServiceClient) GetSshPublicKey(ctx context.Context, in *GetSshPublicKeyRequest, opts ...grpc.CallOption) (*common.SshPublicKey, error) { + out := new(common.SshPublicKey) + err := c.cc.Invoke(ctx, "/google.cloud.oslogin.v1alpha.OsLoginService/GetSshPublicKey", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *osLoginServiceClient) ImportSshPublicKey(ctx context.Context, in *ImportSshPublicKeyRequest, opts ...grpc.CallOption) (*ImportSshPublicKeyResponse, error) { + out := new(ImportSshPublicKeyResponse) + err := c.cc.Invoke(ctx, "/google.cloud.oslogin.v1alpha.OsLoginService/ImportSshPublicKey", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *osLoginServiceClient) UpdateSshPublicKey(ctx context.Context, in *UpdateSshPublicKeyRequest, opts ...grpc.CallOption) (*common.SshPublicKey, error) { + out := new(common.SshPublicKey) + err := c.cc.Invoke(ctx, "/google.cloud.oslogin.v1alpha.OsLoginService/UpdateSshPublicKey", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +// OsLoginServiceServer is the server API for OsLoginService service. +type OsLoginServiceServer interface { + // Deletes a POSIX account. + DeletePosixAccount(context.Context, *DeletePosixAccountRequest) (*empty.Empty, error) + // Deletes an SSH public key. + DeleteSshPublicKey(context.Context, *DeleteSshPublicKeyRequest) (*empty.Empty, error) + // Retrieves the profile information used for logging in to a virtual machine + // on Google Compute Engine. + GetLoginProfile(context.Context, *GetLoginProfileRequest) (*LoginProfile, error) + // Retrieves an SSH public key. + GetSshPublicKey(context.Context, *GetSshPublicKeyRequest) (*common.SshPublicKey, error) + // Adds an SSH public key and returns the profile information. Default POSIX + // account information is set when no username and UID exist as part of the + // login profile. + ImportSshPublicKey(context.Context, *ImportSshPublicKeyRequest) (*ImportSshPublicKeyResponse, error) + // Updates an SSH public key and returns the profile information. This method + // supports patch semantics. + UpdateSshPublicKey(context.Context, *UpdateSshPublicKeyRequest) (*common.SshPublicKey, error) +} + +func RegisterOsLoginServiceServer(s *grpc.Server, srv OsLoginServiceServer) { + s.RegisterService(&_OsLoginService_serviceDesc, srv) +} + +func _OsLoginService_DeletePosixAccount_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(DeletePosixAccountRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(OsLoginServiceServer).DeletePosixAccount(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.oslogin.v1alpha.OsLoginService/DeletePosixAccount", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(OsLoginServiceServer).DeletePosixAccount(ctx, req.(*DeletePosixAccountRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _OsLoginService_DeleteSshPublicKey_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(DeleteSshPublicKeyRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(OsLoginServiceServer).DeleteSshPublicKey(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.oslogin.v1alpha.OsLoginService/DeleteSshPublicKey", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(OsLoginServiceServer).DeleteSshPublicKey(ctx, req.(*DeleteSshPublicKeyRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _OsLoginService_GetLoginProfile_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(GetLoginProfileRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(OsLoginServiceServer).GetLoginProfile(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.oslogin.v1alpha.OsLoginService/GetLoginProfile", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(OsLoginServiceServer).GetLoginProfile(ctx, req.(*GetLoginProfileRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _OsLoginService_GetSshPublicKey_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(GetSshPublicKeyRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(OsLoginServiceServer).GetSshPublicKey(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.oslogin.v1alpha.OsLoginService/GetSshPublicKey", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(OsLoginServiceServer).GetSshPublicKey(ctx, req.(*GetSshPublicKeyRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _OsLoginService_ImportSshPublicKey_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(ImportSshPublicKeyRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(OsLoginServiceServer).ImportSshPublicKey(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.oslogin.v1alpha.OsLoginService/ImportSshPublicKey", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(OsLoginServiceServer).ImportSshPublicKey(ctx, req.(*ImportSshPublicKeyRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _OsLoginService_UpdateSshPublicKey_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(UpdateSshPublicKeyRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(OsLoginServiceServer).UpdateSshPublicKey(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.oslogin.v1alpha.OsLoginService/UpdateSshPublicKey", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(OsLoginServiceServer).UpdateSshPublicKey(ctx, req.(*UpdateSshPublicKeyRequest)) + } + return interceptor(ctx, in, info, handler) +} + +var _OsLoginService_serviceDesc = grpc.ServiceDesc{ + ServiceName: "google.cloud.oslogin.v1alpha.OsLoginService", + HandlerType: (*OsLoginServiceServer)(nil), + Methods: []grpc.MethodDesc{ + { + MethodName: "DeletePosixAccount", + Handler: _OsLoginService_DeletePosixAccount_Handler, + }, + { + MethodName: "DeleteSshPublicKey", + Handler: _OsLoginService_DeleteSshPublicKey_Handler, + }, + { + MethodName: "GetLoginProfile", + Handler: _OsLoginService_GetLoginProfile_Handler, + }, + { + MethodName: "GetSshPublicKey", + Handler: _OsLoginService_GetSshPublicKey_Handler, + }, + { + MethodName: "ImportSshPublicKey", + Handler: _OsLoginService_ImportSshPublicKey_Handler, + }, + { + MethodName: "UpdateSshPublicKey", + Handler: _OsLoginService_UpdateSshPublicKey_Handler, + }, + }, + Streams: []grpc.StreamDesc{}, + Metadata: "google/cloud/oslogin/v1alpha/oslogin.proto", +} + +func init() { + proto.RegisterFile("google/cloud/oslogin/v1alpha/oslogin.proto", fileDescriptor_oslogin_16f0f854d7b09dcc) +} + +var fileDescriptor_oslogin_16f0f854d7b09dcc = []byte{ + // 779 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xac, 0x56, 0xcd, 0x6a, 0xdb, 0x4a, + 0x14, 0x46, 0x76, 0x6e, 0x48, 0xc6, 0x4e, 0x72, 0x99, 0x45, 0x70, 0x74, 0x73, 0xc1, 0x88, 0xd0, + 0x3a, 0x26, 0x68, 0x88, 0x5b, 0x68, 0x9a, 0x90, 0x86, 0xfc, 0x35, 0x84, 0xb6, 0xc4, 0x38, 0x34, + 0x8b, 0x12, 0x30, 0x13, 0x79, 0xa2, 0xa8, 0x96, 0x34, 0x53, 0x8d, 0x14, 0x6a, 0x4a, 0x36, 0x7d, + 0x83, 0x12, 0xe8, 0xbe, 0x64, 0xd7, 0x7d, 0x17, 0x5d, 0xf4, 0x05, 0x0a, 0x5d, 0xf5, 0x15, 0x4a, + 0x9f, 0xa3, 0x68, 0x34, 0x4a, 0x64, 0x5b, 0xb6, 0x65, 0xe8, 0xca, 0x3a, 0x73, 0xfe, 0xbe, 0xf3, + 0x9d, 0x1f, 0x0c, 0xaa, 0x26, 0xa5, 0xa6, 0x4d, 0x90, 0x61, 0xd3, 0xa0, 0x85, 0x28, 0xb7, 0xa9, + 0x69, 0xb9, 0xe8, 0x72, 0x15, 0xdb, 0xec, 0x02, 0xc7, 0xb2, 0xce, 0x3c, 0xea, 0x53, 0xb8, 0x18, + 0xd9, 0xea, 0xc2, 0x56, 0x8f, 0x75, 0xd2, 0x56, 0x95, 0x5a, 0x84, 0x99, 0x85, 0xb0, 0xeb, 0x52, + 0x1f, 0xfb, 0x16, 0x75, 0x79, 0xe4, 0xab, 0x56, 0x52, 0xf3, 0x18, 0xd4, 0x71, 0x68, 0xfc, 0x23, + 0x2d, 0xff, 0x93, 0x96, 0x42, 0x3a, 0x0b, 0xce, 0x11, 0x71, 0x98, 0xdf, 0x91, 0xca, 0x72, 0xaf, + 0xf2, 0xdc, 0x22, 0x76, 0xab, 0xe9, 0x60, 0xde, 0x8e, 0x2c, 0xb4, 0xdf, 0x39, 0x50, 0x7c, 0x1e, + 0x06, 0xaf, 0x7b, 0xf4, 0xdc, 0xb2, 0x09, 0x84, 0x60, 0xc2, 0xc5, 0x0e, 0x29, 0x29, 0x65, 0xa5, + 0x32, 0xdd, 0x10, 0xdf, 0xb0, 0x0e, 0x66, 0x19, 0xe5, 0xd6, 0xdb, 0x26, 0x36, 0x0c, 0x1a, 0xb8, + 0x3e, 0x2f, 0xe5, 0xca, 0xf9, 0x4a, 0xa1, 0xb6, 0xac, 0xa7, 0x96, 0x28, 0xf1, 0xd5, 0x43, 0x97, + 0xed, 0xc8, 0xa3, 0x31, 0xc3, 0x12, 0x12, 0x87, 0x04, 0xcc, 0x71, 0x7e, 0xd1, 0x64, 0xc1, 0x99, + 0x6d, 0x19, 0xcd, 0x36, 0xe9, 0xf0, 0x52, 0x5e, 0x84, 0xdc, 0xd4, 0x87, 0xb1, 0xa6, 0x27, 0xa1, + 0xea, 0xc7, 0xfc, 0xa2, 0x2e, 0x02, 0x3c, 0x23, 0x1d, 0xbe, 0xef, 0xfa, 0x5e, 0xa7, 0x31, 0xc3, + 0x93, 0x6f, 0x70, 0x11, 0x4c, 0xf3, 0x80, 0x33, 0xe2, 0xb6, 0x48, 0xab, 0x34, 0x51, 0x56, 0x2a, + 0x53, 0x8d, 0xbb, 0x07, 0xb5, 0x0d, 0x60, 0x7f, 0x08, 0xf8, 0x2f, 0xc8, 0xb7, 0x49, 0x47, 0xd6, + 0x1f, 0x7e, 0xc2, 0x2d, 0xf0, 0xcf, 0x25, 0xb6, 0x03, 0x52, 0xca, 0x95, 0x95, 0x91, 0x55, 0x27, + 0x23, 0x36, 0x22, 0xbf, 0xf5, 0xdc, 0x9a, 0xa2, 0x21, 0xb0, 0xb0, 0x47, 0x6c, 0xe2, 0x93, 0x2e, + 0x5a, 0xc8, 0x9b, 0x80, 0x70, 0x3f, 0x8d, 0xf4, 0x3b, 0x87, 0xae, 0x88, 0x43, 0x1c, 0x56, 0xc0, + 0xfc, 0x01, 0xf1, 0x93, 0x0c, 0x8d, 0xb6, 0xce, 0x1a, 0xfb, 0x46, 0x01, 0x0b, 0x87, 0x0e, 0xa3, + 0x5e, 0xaa, 0xc7, 0x3c, 0x98, 0x64, 0xd8, 0x23, 0xae, 0x2f, 0x7d, 0xa4, 0x04, 0x8f, 0xc0, 0x6c, + 0x77, 0x97, 0xc7, 0x67, 0xb0, 0x98, 0x6c, 0x28, 0xfc, 0x1f, 0x00, 0xe6, 0xd1, 0xd7, 0xc4, 0xf0, + 0x9b, 0x56, 0xab, 0x94, 0x17, 0xc9, 0xa6, 0xe5, 0xcb, 0x61, 0x4b, 0x73, 0x80, 0x9a, 0x06, 0x92, + 0x33, 0xea, 0x72, 0x02, 0x8f, 0xc0, 0x8c, 0xc8, 0xd3, 0x64, 0x11, 0x3b, 0x02, 0x6c, 0xa1, 0x56, + 0xcd, 0x3e, 0x71, 0x8d, 0xa2, 0x9d, 0x90, 0xb4, 0x6f, 0x0a, 0x58, 0x78, 0xc9, 0x5a, 0x38, 0x73, + 0x8b, 0xfe, 0x3e, 0x21, 0x1b, 0xa0, 0x10, 0x08, 0x04, 0x62, 0xa7, 0x05, 0x23, 0x85, 0x9a, 0x1a, + 0x47, 0x8b, 0xd7, 0x5e, 0x7f, 0x1a, 0xae, 0xfd, 0x0b, 0xcc, 0xdb, 0x0d, 0x10, 0x99, 0x87, 0xdf, + 0xb5, 0xeb, 0x29, 0x30, 0x7b, 0xc4, 0x45, 0x81, 0xc7, 0xc4, 0xbb, 0xb4, 0x0c, 0x02, 0x3f, 0x28, + 0x00, 0xf6, 0x8f, 0x29, 0x7c, 0x34, 0x9c, 0xa3, 0x81, 0x83, 0xad, 0xce, 0xf7, 0x41, 0xd9, 0x0f, + 0xcf, 0x93, 0x56, 0x7d, 0xff, 0xf3, 0xd7, 0x75, 0x6e, 0xa9, 0xaa, 0xdd, 0xde, 0xce, 0x77, 0x21, + 0x41, 0x9b, 0x01, 0x27, 0x1e, 0x47, 0x55, 0x24, 0x7b, 0xca, 0x51, 0xf5, 0x0a, 0x7e, 0xbc, 0xc5, + 0x94, 0x24, 0x22, 0x1b, 0xa6, 0x94, 0xc6, 0x0c, 0xc4, 0x84, 0x04, 0xa6, 0xe5, 0xea, 0xfd, 0x01, + 0x98, 0xba, 0x4e, 0x4b, 0x08, 0xec, 0x93, 0x02, 0xe6, 0x7a, 0x36, 0x0e, 0x3e, 0x1c, 0x8e, 0x2a, + 0x7d, 0x41, 0xd5, 0x31, 0x66, 0x50, 0x5b, 0x11, 0x30, 0xef, 0xc1, 0xa5, 0x74, 0x98, 0x57, 0x28, + 0x39, 0xa3, 0xf0, 0x26, 0xc2, 0xd8, 0xc5, 0xdc, 0x68, 0x8c, 0x69, 0xb4, 0x65, 0x9f, 0xd1, 0x98, + 0x49, 0x98, 0x99, 0xc9, 0x1f, 0x0a, 0x80, 0xfd, 0x9b, 0x3b, 0xaa, 0xc5, 0x03, 0x0f, 0x92, 0xba, + 0x36, 0xbe, 0x63, 0x74, 0x24, 0xb4, 0x3d, 0x01, 0xfd, 0x89, 0xb6, 0x72, 0x07, 0x3d, 0x3a, 0x66, + 0xb7, 0xfc, 0xae, 0x5b, 0x7d, 0xde, 0xeb, 0x3d, 0x5b, 0x0d, 0xbf, 0x2a, 0x00, 0xf6, 0x5f, 0x86, + 0x51, 0xf5, 0x0c, 0xbc, 0x25, 0xe3, 0x70, 0xbf, 0x25, 0x0a, 0x78, 0x5c, 0xcb, 0xca, 0x7d, 0x2f, + 0xf6, 0x9d, 0x2f, 0x0a, 0x28, 0x1b, 0xd4, 0x19, 0x0a, 0x75, 0xa7, 0x28, 0xef, 0x46, 0x3d, 0x5c, + 0xa1, 0xba, 0xf2, 0x6a, 0x57, 0x5a, 0x9b, 0xd4, 0xc6, 0xae, 0xa9, 0x53, 0xcf, 0x44, 0x26, 0x71, + 0xc5, 0x82, 0xa1, 0x48, 0x85, 0x99, 0xc5, 0xd3, 0xff, 0x36, 0x6d, 0x48, 0xf9, 0x73, 0x6e, 0xf1, + 0x20, 0x8a, 0xb2, 0x2b, 0x72, 0xca, 0x14, 0xfa, 0xc9, 0xea, 0x76, 0x68, 0xf6, 0x3d, 0x56, 0x9f, + 0x0a, 0xf5, 0xa9, 0x54, 0x9f, 0x9e, 0x44, 0x51, 0xce, 0x26, 0x45, 0xb6, 0x07, 0x7f, 0x02, 0x00, + 0x00, 0xff, 0xff, 0xf8, 0xf2, 0xe5, 0x19, 0xa3, 0x09, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/cloud/oslogin/v1beta/oslogin.pb.go b/vendor/google.golang.org/genproto/googleapis/cloud/oslogin/v1beta/oslogin.pb.go new file mode 100644 index 0000000..25b296c --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/cloud/oslogin/v1beta/oslogin.pb.go @@ -0,0 +1,747 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/cloud/oslogin/v1beta/oslogin.proto + +package oslogin // import "google.golang.org/genproto/googleapis/cloud/oslogin/v1beta" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import empty "github.com/golang/protobuf/ptypes/empty" +import _ "google.golang.org/genproto/googleapis/api/annotations" +import common "google.golang.org/genproto/googleapis/cloud/oslogin/common" +import field_mask "google.golang.org/genproto/protobuf/field_mask" + +import ( + context "golang.org/x/net/context" + grpc "google.golang.org/grpc" +) + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// The user profile information used for logging in to a virtual machine on +// Google Compute Engine. +type LoginProfile struct { + // The primary email address that uniquely identifies the user. + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // The list of POSIX accounts associated with the user. + PosixAccounts []*common.PosixAccount `protobuf:"bytes,2,rep,name=posix_accounts,json=posixAccounts,proto3" json:"posix_accounts,omitempty"` + // A map from SSH public key fingerprint to the associated key object. + SshPublicKeys map[string]*common.SshPublicKey `protobuf:"bytes,3,rep,name=ssh_public_keys,json=sshPublicKeys,proto3" json:"ssh_public_keys,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` + // Indicates if the user is suspended. A suspended user cannot log in but + // their profile information is retained. + Suspended bool `protobuf:"varint,4,opt,name=suspended,proto3" json:"suspended,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *LoginProfile) Reset() { *m = LoginProfile{} } +func (m *LoginProfile) String() string { return proto.CompactTextString(m) } +func (*LoginProfile) ProtoMessage() {} +func (*LoginProfile) Descriptor() ([]byte, []int) { + return fileDescriptor_oslogin_e03b1f0aa7c54cba, []int{0} +} +func (m *LoginProfile) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_LoginProfile.Unmarshal(m, b) +} +func (m *LoginProfile) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_LoginProfile.Marshal(b, m, deterministic) +} +func (dst *LoginProfile) XXX_Merge(src proto.Message) { + xxx_messageInfo_LoginProfile.Merge(dst, src) +} +func (m *LoginProfile) XXX_Size() int { + return xxx_messageInfo_LoginProfile.Size(m) +} +func (m *LoginProfile) XXX_DiscardUnknown() { + xxx_messageInfo_LoginProfile.DiscardUnknown(m) +} + +var xxx_messageInfo_LoginProfile proto.InternalMessageInfo + +func (m *LoginProfile) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *LoginProfile) GetPosixAccounts() []*common.PosixAccount { + if m != nil { + return m.PosixAccounts + } + return nil +} + +func (m *LoginProfile) GetSshPublicKeys() map[string]*common.SshPublicKey { + if m != nil { + return m.SshPublicKeys + } + return nil +} + +func (m *LoginProfile) GetSuspended() bool { + if m != nil { + return m.Suspended + } + return false +} + +// A request message for deleting a POSIX account entry. +type DeletePosixAccountRequest struct { + // A reference to the POSIX account to update. POSIX accounts are identified + // by the project ID they are associated with. A reference to the POSIX + // account is in format `users/{user}/projects/{project}`. + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *DeletePosixAccountRequest) Reset() { *m = DeletePosixAccountRequest{} } +func (m *DeletePosixAccountRequest) String() string { return proto.CompactTextString(m) } +func (*DeletePosixAccountRequest) ProtoMessage() {} +func (*DeletePosixAccountRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_oslogin_e03b1f0aa7c54cba, []int{1} +} +func (m *DeletePosixAccountRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_DeletePosixAccountRequest.Unmarshal(m, b) +} +func (m *DeletePosixAccountRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_DeletePosixAccountRequest.Marshal(b, m, deterministic) +} +func (dst *DeletePosixAccountRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_DeletePosixAccountRequest.Merge(dst, src) +} +func (m *DeletePosixAccountRequest) XXX_Size() int { + return xxx_messageInfo_DeletePosixAccountRequest.Size(m) +} +func (m *DeletePosixAccountRequest) XXX_DiscardUnknown() { + xxx_messageInfo_DeletePosixAccountRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_DeletePosixAccountRequest proto.InternalMessageInfo + +func (m *DeletePosixAccountRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +// A request message for deleting an SSH public key. +type DeleteSshPublicKeyRequest struct { + // The fingerprint of the public key to update. Public keys are identified by + // their SHA-256 fingerprint. The fingerprint of the public key is in format + // `users/{user}/sshPublicKeys/{fingerprint}`. + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *DeleteSshPublicKeyRequest) Reset() { *m = DeleteSshPublicKeyRequest{} } +func (m *DeleteSshPublicKeyRequest) String() string { return proto.CompactTextString(m) } +func (*DeleteSshPublicKeyRequest) ProtoMessage() {} +func (*DeleteSshPublicKeyRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_oslogin_e03b1f0aa7c54cba, []int{2} +} +func (m *DeleteSshPublicKeyRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_DeleteSshPublicKeyRequest.Unmarshal(m, b) +} +func (m *DeleteSshPublicKeyRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_DeleteSshPublicKeyRequest.Marshal(b, m, deterministic) +} +func (dst *DeleteSshPublicKeyRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_DeleteSshPublicKeyRequest.Merge(dst, src) +} +func (m *DeleteSshPublicKeyRequest) XXX_Size() int { + return xxx_messageInfo_DeleteSshPublicKeyRequest.Size(m) +} +func (m *DeleteSshPublicKeyRequest) XXX_DiscardUnknown() { + xxx_messageInfo_DeleteSshPublicKeyRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_DeleteSshPublicKeyRequest proto.InternalMessageInfo + +func (m *DeleteSshPublicKeyRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +// A request message for retrieving the login profile information for a user. +type GetLoginProfileRequest struct { + // The unique ID for the user in format `users/{user}`. + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GetLoginProfileRequest) Reset() { *m = GetLoginProfileRequest{} } +func (m *GetLoginProfileRequest) String() string { return proto.CompactTextString(m) } +func (*GetLoginProfileRequest) ProtoMessage() {} +func (*GetLoginProfileRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_oslogin_e03b1f0aa7c54cba, []int{3} +} +func (m *GetLoginProfileRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GetLoginProfileRequest.Unmarshal(m, b) +} +func (m *GetLoginProfileRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GetLoginProfileRequest.Marshal(b, m, deterministic) +} +func (dst *GetLoginProfileRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_GetLoginProfileRequest.Merge(dst, src) +} +func (m *GetLoginProfileRequest) XXX_Size() int { + return xxx_messageInfo_GetLoginProfileRequest.Size(m) +} +func (m *GetLoginProfileRequest) XXX_DiscardUnknown() { + xxx_messageInfo_GetLoginProfileRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_GetLoginProfileRequest proto.InternalMessageInfo + +func (m *GetLoginProfileRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +// A request message for retrieving an SSH public key. +type GetSshPublicKeyRequest struct { + // The fingerprint of the public key to retrieve. Public keys are identified + // by their SHA-256 fingerprint. The fingerprint of the public key is in + // format `users/{user}/sshPublicKeys/{fingerprint}`. + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GetSshPublicKeyRequest) Reset() { *m = GetSshPublicKeyRequest{} } +func (m *GetSshPublicKeyRequest) String() string { return proto.CompactTextString(m) } +func (*GetSshPublicKeyRequest) ProtoMessage() {} +func (*GetSshPublicKeyRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_oslogin_e03b1f0aa7c54cba, []int{4} +} +func (m *GetSshPublicKeyRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GetSshPublicKeyRequest.Unmarshal(m, b) +} +func (m *GetSshPublicKeyRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GetSshPublicKeyRequest.Marshal(b, m, deterministic) +} +func (dst *GetSshPublicKeyRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_GetSshPublicKeyRequest.Merge(dst, src) +} +func (m *GetSshPublicKeyRequest) XXX_Size() int { + return xxx_messageInfo_GetSshPublicKeyRequest.Size(m) +} +func (m *GetSshPublicKeyRequest) XXX_DiscardUnknown() { + xxx_messageInfo_GetSshPublicKeyRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_GetSshPublicKeyRequest proto.InternalMessageInfo + +func (m *GetSshPublicKeyRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +// A request message for importing an SSH public key. +type ImportSshPublicKeyRequest struct { + // The unique ID for the user in format `users/{user}`. + Parent string `protobuf:"bytes,1,opt,name=parent,proto3" json:"parent,omitempty"` + // The SSH public key and expiration time. + SshPublicKey *common.SshPublicKey `protobuf:"bytes,2,opt,name=ssh_public_key,json=sshPublicKey,proto3" json:"ssh_public_key,omitempty"` + // The project ID of the Google Cloud Platform project. + ProjectId string `protobuf:"bytes,3,opt,name=project_id,json=projectId,proto3" json:"project_id,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ImportSshPublicKeyRequest) Reset() { *m = ImportSshPublicKeyRequest{} } +func (m *ImportSshPublicKeyRequest) String() string { return proto.CompactTextString(m) } +func (*ImportSshPublicKeyRequest) ProtoMessage() {} +func (*ImportSshPublicKeyRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_oslogin_e03b1f0aa7c54cba, []int{5} +} +func (m *ImportSshPublicKeyRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ImportSshPublicKeyRequest.Unmarshal(m, b) +} +func (m *ImportSshPublicKeyRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ImportSshPublicKeyRequest.Marshal(b, m, deterministic) +} +func (dst *ImportSshPublicKeyRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_ImportSshPublicKeyRequest.Merge(dst, src) +} +func (m *ImportSshPublicKeyRequest) XXX_Size() int { + return xxx_messageInfo_ImportSshPublicKeyRequest.Size(m) +} +func (m *ImportSshPublicKeyRequest) XXX_DiscardUnknown() { + xxx_messageInfo_ImportSshPublicKeyRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_ImportSshPublicKeyRequest proto.InternalMessageInfo + +func (m *ImportSshPublicKeyRequest) GetParent() string { + if m != nil { + return m.Parent + } + return "" +} + +func (m *ImportSshPublicKeyRequest) GetSshPublicKey() *common.SshPublicKey { + if m != nil { + return m.SshPublicKey + } + return nil +} + +func (m *ImportSshPublicKeyRequest) GetProjectId() string { + if m != nil { + return m.ProjectId + } + return "" +} + +// A response message for importing an SSH public key. +type ImportSshPublicKeyResponse struct { + // The login profile information for the user. + LoginProfile *LoginProfile `protobuf:"bytes,1,opt,name=login_profile,json=loginProfile,proto3" json:"login_profile,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ImportSshPublicKeyResponse) Reset() { *m = ImportSshPublicKeyResponse{} } +func (m *ImportSshPublicKeyResponse) String() string { return proto.CompactTextString(m) } +func (*ImportSshPublicKeyResponse) ProtoMessage() {} +func (*ImportSshPublicKeyResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_oslogin_e03b1f0aa7c54cba, []int{6} +} +func (m *ImportSshPublicKeyResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ImportSshPublicKeyResponse.Unmarshal(m, b) +} +func (m *ImportSshPublicKeyResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ImportSshPublicKeyResponse.Marshal(b, m, deterministic) +} +func (dst *ImportSshPublicKeyResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_ImportSshPublicKeyResponse.Merge(dst, src) +} +func (m *ImportSshPublicKeyResponse) XXX_Size() int { + return xxx_messageInfo_ImportSshPublicKeyResponse.Size(m) +} +func (m *ImportSshPublicKeyResponse) XXX_DiscardUnknown() { + xxx_messageInfo_ImportSshPublicKeyResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_ImportSshPublicKeyResponse proto.InternalMessageInfo + +func (m *ImportSshPublicKeyResponse) GetLoginProfile() *LoginProfile { + if m != nil { + return m.LoginProfile + } + return nil +} + +// A request message for updating an SSH public key. +type UpdateSshPublicKeyRequest struct { + // The fingerprint of the public key to update. Public keys are identified by + // their SHA-256 fingerprint. The fingerprint of the public key is in format + // `users/{user}/sshPublicKeys/{fingerprint}`. + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // The SSH public key and expiration time. + SshPublicKey *common.SshPublicKey `protobuf:"bytes,2,opt,name=ssh_public_key,json=sshPublicKey,proto3" json:"ssh_public_key,omitempty"` + // Mask to control which fields get updated. Updates all if not present. + UpdateMask *field_mask.FieldMask `protobuf:"bytes,3,opt,name=update_mask,json=updateMask,proto3" json:"update_mask,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *UpdateSshPublicKeyRequest) Reset() { *m = UpdateSshPublicKeyRequest{} } +func (m *UpdateSshPublicKeyRequest) String() string { return proto.CompactTextString(m) } +func (*UpdateSshPublicKeyRequest) ProtoMessage() {} +func (*UpdateSshPublicKeyRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_oslogin_e03b1f0aa7c54cba, []int{7} +} +func (m *UpdateSshPublicKeyRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_UpdateSshPublicKeyRequest.Unmarshal(m, b) +} +func (m *UpdateSshPublicKeyRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_UpdateSshPublicKeyRequest.Marshal(b, m, deterministic) +} +func (dst *UpdateSshPublicKeyRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_UpdateSshPublicKeyRequest.Merge(dst, src) +} +func (m *UpdateSshPublicKeyRequest) XXX_Size() int { + return xxx_messageInfo_UpdateSshPublicKeyRequest.Size(m) +} +func (m *UpdateSshPublicKeyRequest) XXX_DiscardUnknown() { + xxx_messageInfo_UpdateSshPublicKeyRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_UpdateSshPublicKeyRequest proto.InternalMessageInfo + +func (m *UpdateSshPublicKeyRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *UpdateSshPublicKeyRequest) GetSshPublicKey() *common.SshPublicKey { + if m != nil { + return m.SshPublicKey + } + return nil +} + +func (m *UpdateSshPublicKeyRequest) GetUpdateMask() *field_mask.FieldMask { + if m != nil { + return m.UpdateMask + } + return nil +} + +func init() { + proto.RegisterType((*LoginProfile)(nil), "google.cloud.oslogin.v1beta.LoginProfile") + proto.RegisterMapType((map[string]*common.SshPublicKey)(nil), "google.cloud.oslogin.v1beta.LoginProfile.SshPublicKeysEntry") + proto.RegisterType((*DeletePosixAccountRequest)(nil), "google.cloud.oslogin.v1beta.DeletePosixAccountRequest") + proto.RegisterType((*DeleteSshPublicKeyRequest)(nil), "google.cloud.oslogin.v1beta.DeleteSshPublicKeyRequest") + proto.RegisterType((*GetLoginProfileRequest)(nil), "google.cloud.oslogin.v1beta.GetLoginProfileRequest") + proto.RegisterType((*GetSshPublicKeyRequest)(nil), "google.cloud.oslogin.v1beta.GetSshPublicKeyRequest") + proto.RegisterType((*ImportSshPublicKeyRequest)(nil), "google.cloud.oslogin.v1beta.ImportSshPublicKeyRequest") + proto.RegisterType((*ImportSshPublicKeyResponse)(nil), "google.cloud.oslogin.v1beta.ImportSshPublicKeyResponse") + proto.RegisterType((*UpdateSshPublicKeyRequest)(nil), "google.cloud.oslogin.v1beta.UpdateSshPublicKeyRequest") +} + +// Reference imports to suppress errors if they are not otherwise used. +var _ context.Context +var _ grpc.ClientConn + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the grpc package it is being compiled against. +const _ = grpc.SupportPackageIsVersion4 + +// OsLoginServiceClient is the client API for OsLoginService service. +// +// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://godoc.org/google.golang.org/grpc#ClientConn.NewStream. +type OsLoginServiceClient interface { + // Deletes a POSIX account. + DeletePosixAccount(ctx context.Context, in *DeletePosixAccountRequest, opts ...grpc.CallOption) (*empty.Empty, error) + // Deletes an SSH public key. + DeleteSshPublicKey(ctx context.Context, in *DeleteSshPublicKeyRequest, opts ...grpc.CallOption) (*empty.Empty, error) + // Retrieves the profile information used for logging in to a virtual machine + // on Google Compute Engine. + GetLoginProfile(ctx context.Context, in *GetLoginProfileRequest, opts ...grpc.CallOption) (*LoginProfile, error) + // Retrieves an SSH public key. + GetSshPublicKey(ctx context.Context, in *GetSshPublicKeyRequest, opts ...grpc.CallOption) (*common.SshPublicKey, error) + // Adds an SSH public key and returns the profile information. Default POSIX + // account information is set when no username and UID exist as part of the + // login profile. + ImportSshPublicKey(ctx context.Context, in *ImportSshPublicKeyRequest, opts ...grpc.CallOption) (*ImportSshPublicKeyResponse, error) + // Updates an SSH public key and returns the profile information. This method + // supports patch semantics. + UpdateSshPublicKey(ctx context.Context, in *UpdateSshPublicKeyRequest, opts ...grpc.CallOption) (*common.SshPublicKey, error) +} + +type osLoginServiceClient struct { + cc *grpc.ClientConn +} + +func NewOsLoginServiceClient(cc *grpc.ClientConn) OsLoginServiceClient { + return &osLoginServiceClient{cc} +} + +func (c *osLoginServiceClient) DeletePosixAccount(ctx context.Context, in *DeletePosixAccountRequest, opts ...grpc.CallOption) (*empty.Empty, error) { + out := new(empty.Empty) + err := c.cc.Invoke(ctx, "/google.cloud.oslogin.v1beta.OsLoginService/DeletePosixAccount", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *osLoginServiceClient) DeleteSshPublicKey(ctx context.Context, in *DeleteSshPublicKeyRequest, opts ...grpc.CallOption) (*empty.Empty, error) { + out := new(empty.Empty) + err := c.cc.Invoke(ctx, "/google.cloud.oslogin.v1beta.OsLoginService/DeleteSshPublicKey", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *osLoginServiceClient) GetLoginProfile(ctx context.Context, in *GetLoginProfileRequest, opts ...grpc.CallOption) (*LoginProfile, error) { + out := new(LoginProfile) + err := c.cc.Invoke(ctx, "/google.cloud.oslogin.v1beta.OsLoginService/GetLoginProfile", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *osLoginServiceClient) GetSshPublicKey(ctx context.Context, in *GetSshPublicKeyRequest, opts ...grpc.CallOption) (*common.SshPublicKey, error) { + out := new(common.SshPublicKey) + err := c.cc.Invoke(ctx, "/google.cloud.oslogin.v1beta.OsLoginService/GetSshPublicKey", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *osLoginServiceClient) ImportSshPublicKey(ctx context.Context, in *ImportSshPublicKeyRequest, opts ...grpc.CallOption) (*ImportSshPublicKeyResponse, error) { + out := new(ImportSshPublicKeyResponse) + err := c.cc.Invoke(ctx, "/google.cloud.oslogin.v1beta.OsLoginService/ImportSshPublicKey", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *osLoginServiceClient) UpdateSshPublicKey(ctx context.Context, in *UpdateSshPublicKeyRequest, opts ...grpc.CallOption) (*common.SshPublicKey, error) { + out := new(common.SshPublicKey) + err := c.cc.Invoke(ctx, "/google.cloud.oslogin.v1beta.OsLoginService/UpdateSshPublicKey", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +// OsLoginServiceServer is the server API for OsLoginService service. +type OsLoginServiceServer interface { + // Deletes a POSIX account. + DeletePosixAccount(context.Context, *DeletePosixAccountRequest) (*empty.Empty, error) + // Deletes an SSH public key. + DeleteSshPublicKey(context.Context, *DeleteSshPublicKeyRequest) (*empty.Empty, error) + // Retrieves the profile information used for logging in to a virtual machine + // on Google Compute Engine. + GetLoginProfile(context.Context, *GetLoginProfileRequest) (*LoginProfile, error) + // Retrieves an SSH public key. + GetSshPublicKey(context.Context, *GetSshPublicKeyRequest) (*common.SshPublicKey, error) + // Adds an SSH public key and returns the profile information. Default POSIX + // account information is set when no username and UID exist as part of the + // login profile. + ImportSshPublicKey(context.Context, *ImportSshPublicKeyRequest) (*ImportSshPublicKeyResponse, error) + // Updates an SSH public key and returns the profile information. This method + // supports patch semantics. + UpdateSshPublicKey(context.Context, *UpdateSshPublicKeyRequest) (*common.SshPublicKey, error) +} + +func RegisterOsLoginServiceServer(s *grpc.Server, srv OsLoginServiceServer) { + s.RegisterService(&_OsLoginService_serviceDesc, srv) +} + +func _OsLoginService_DeletePosixAccount_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(DeletePosixAccountRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(OsLoginServiceServer).DeletePosixAccount(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.oslogin.v1beta.OsLoginService/DeletePosixAccount", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(OsLoginServiceServer).DeletePosixAccount(ctx, req.(*DeletePosixAccountRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _OsLoginService_DeleteSshPublicKey_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(DeleteSshPublicKeyRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(OsLoginServiceServer).DeleteSshPublicKey(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.oslogin.v1beta.OsLoginService/DeleteSshPublicKey", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(OsLoginServiceServer).DeleteSshPublicKey(ctx, req.(*DeleteSshPublicKeyRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _OsLoginService_GetLoginProfile_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(GetLoginProfileRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(OsLoginServiceServer).GetLoginProfile(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.oslogin.v1beta.OsLoginService/GetLoginProfile", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(OsLoginServiceServer).GetLoginProfile(ctx, req.(*GetLoginProfileRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _OsLoginService_GetSshPublicKey_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(GetSshPublicKeyRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(OsLoginServiceServer).GetSshPublicKey(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.oslogin.v1beta.OsLoginService/GetSshPublicKey", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(OsLoginServiceServer).GetSshPublicKey(ctx, req.(*GetSshPublicKeyRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _OsLoginService_ImportSshPublicKey_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(ImportSshPublicKeyRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(OsLoginServiceServer).ImportSshPublicKey(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.oslogin.v1beta.OsLoginService/ImportSshPublicKey", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(OsLoginServiceServer).ImportSshPublicKey(ctx, req.(*ImportSshPublicKeyRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _OsLoginService_UpdateSshPublicKey_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(UpdateSshPublicKeyRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(OsLoginServiceServer).UpdateSshPublicKey(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.oslogin.v1beta.OsLoginService/UpdateSshPublicKey", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(OsLoginServiceServer).UpdateSshPublicKey(ctx, req.(*UpdateSshPublicKeyRequest)) + } + return interceptor(ctx, in, info, handler) +} + +var _OsLoginService_serviceDesc = grpc.ServiceDesc{ + ServiceName: "google.cloud.oslogin.v1beta.OsLoginService", + HandlerType: (*OsLoginServiceServer)(nil), + Methods: []grpc.MethodDesc{ + { + MethodName: "DeletePosixAccount", + Handler: _OsLoginService_DeletePosixAccount_Handler, + }, + { + MethodName: "DeleteSshPublicKey", + Handler: _OsLoginService_DeleteSshPublicKey_Handler, + }, + { + MethodName: "GetLoginProfile", + Handler: _OsLoginService_GetLoginProfile_Handler, + }, + { + MethodName: "GetSshPublicKey", + Handler: _OsLoginService_GetSshPublicKey_Handler, + }, + { + MethodName: "ImportSshPublicKey", + Handler: _OsLoginService_ImportSshPublicKey_Handler, + }, + { + MethodName: "UpdateSshPublicKey", + Handler: _OsLoginService_UpdateSshPublicKey_Handler, + }, + }, + Streams: []grpc.StreamDesc{}, + Metadata: "google/cloud/oslogin/v1beta/oslogin.proto", +} + +func init() { + proto.RegisterFile("google/cloud/oslogin/v1beta/oslogin.proto", fileDescriptor_oslogin_e03b1f0aa7c54cba) +} + +var fileDescriptor_oslogin_e03b1f0aa7c54cba = []byte{ + // 780 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xac, 0x96, 0x4f, 0x4f, 0x13, 0x4f, + 0x18, 0xc7, 0xb3, 0x2d, 0x3f, 0x02, 0x43, 0x81, 0x5f, 0xe6, 0x40, 0xca, 0x82, 0xb1, 0x2e, 0xd1, + 0x94, 0x62, 0x76, 0x43, 0x49, 0x94, 0x80, 0x68, 0x2c, 0x22, 0x21, 0xfe, 0xa1, 0x29, 0x91, 0x83, + 0x21, 0x69, 0xa6, 0xbb, 0xc3, 0xb2, 0x76, 0x77, 0x67, 0xdc, 0xd9, 0x25, 0x36, 0x86, 0x8b, 0x89, + 0x67, 0x0f, 0x7a, 0x36, 0x31, 0xde, 0xbc, 0x79, 0xf2, 0xe4, 0x1b, 0xe0, 0xea, 0x5b, 0xd0, 0xf7, + 0x61, 0x76, 0x76, 0x16, 0xb6, 0xed, 0xb6, 0x5d, 0x12, 0x4f, 0xdd, 0x99, 0xe7, 0xdf, 0x67, 0xbe, + 0x33, 0xcf, 0x93, 0x82, 0x65, 0x93, 0x10, 0xd3, 0xc6, 0x9a, 0x6e, 0x93, 0xc0, 0xd0, 0x08, 0xb3, + 0x89, 0x69, 0xb9, 0xda, 0xe9, 0x6a, 0x0b, 0xfb, 0x28, 0x5e, 0xaa, 0xd4, 0x23, 0x3e, 0x81, 0x0b, + 0x91, 0xab, 0xca, 0x5d, 0xd5, 0xd8, 0x16, 0xb9, 0xca, 0x8b, 0x22, 0x0f, 0xa2, 0x96, 0x86, 0x5c, + 0x97, 0xf8, 0xc8, 0xb7, 0x88, 0xcb, 0xa2, 0x50, 0xb9, 0x9c, 0x5a, 0x45, 0x27, 0x8e, 0x43, 0xe2, + 0x1f, 0xe1, 0x29, 0x8a, 0x68, 0x7c, 0xd5, 0x0a, 0x8e, 0x35, 0xec, 0x50, 0xbf, 0x23, 0x8c, 0xa5, + 0x5e, 0xe3, 0xb1, 0x85, 0x6d, 0xa3, 0xe9, 0x20, 0xd6, 0x8e, 0x3c, 0x94, 0x3f, 0x39, 0x50, 0x78, + 0x1a, 0x26, 0xaf, 0x7b, 0xe4, 0xd8, 0xb2, 0x31, 0x84, 0x60, 0xcc, 0x45, 0x0e, 0x2e, 0x4a, 0x25, + 0xa9, 0x3c, 0xd9, 0xe0, 0xdf, 0xb0, 0x0e, 0x66, 0x28, 0x61, 0xd6, 0x9b, 0x26, 0xd2, 0x75, 0x12, + 0xb8, 0x3e, 0x2b, 0xe6, 0x4a, 0xf9, 0xf2, 0x54, 0x75, 0x59, 0x4d, 0x3d, 0xa1, 0xe0, 0xab, 0x87, + 0x21, 0x0f, 0xa3, 0x88, 0xc6, 0x34, 0x4d, 0xac, 0x18, 0x34, 0xc0, 0x2c, 0x63, 0x27, 0x4d, 0x1a, + 0xb4, 0x6c, 0x4b, 0x6f, 0xb6, 0x71, 0x87, 0x15, 0xf3, 0x3c, 0xe5, 0x3d, 0x75, 0x88, 0x68, 0x6a, + 0x92, 0x54, 0x3d, 0x60, 0x27, 0x75, 0x1e, 0xff, 0x04, 0x77, 0xd8, 0x8e, 0xeb, 0x7b, 0x9d, 0xc6, + 0x34, 0x4b, 0xee, 0xc1, 0x45, 0x30, 0xc9, 0x02, 0x46, 0xb1, 0x6b, 0x60, 0xa3, 0x38, 0x56, 0x92, + 0xca, 0x13, 0x8d, 0xcb, 0x0d, 0xb9, 0x0d, 0x60, 0x7f, 0x0a, 0xf8, 0x3f, 0xc8, 0xb7, 0x71, 0x47, + 0x1c, 0x3f, 0xfc, 0x84, 0x0f, 0xc0, 0x7f, 0xa7, 0xc8, 0x0e, 0x70, 0x31, 0x57, 0x92, 0x46, 0x1e, + 0x3a, 0x99, 0xb1, 0x11, 0xc5, 0x6d, 0xe4, 0xd6, 0x25, 0x45, 0x03, 0xf3, 0x8f, 0xb0, 0x8d, 0x7d, + 0xdc, 0xa5, 0x0a, 0x7e, 0x1d, 0x60, 0xe6, 0xa7, 0x69, 0x7e, 0x19, 0xd0, 0x95, 0x71, 0x48, 0xc0, + 0x6d, 0x30, 0xb7, 0x8b, 0xfd, 0xa4, 0x42, 0xa3, 0xbd, 0xb3, 0xe6, 0xfe, 0x2a, 0x81, 0xf9, 0x3d, + 0x87, 0x12, 0x2f, 0x35, 0x62, 0x0e, 0x8c, 0x53, 0xe4, 0x61, 0xd7, 0x17, 0x31, 0x62, 0x05, 0xf7, + 0xc1, 0x4c, 0xf7, 0x25, 0x5f, 0x5d, 0xc1, 0x42, 0xf2, 0x42, 0xe1, 0x35, 0x00, 0xa8, 0x47, 0x5e, + 0x61, 0xdd, 0x6f, 0x5a, 0x46, 0x31, 0xcf, 0x8b, 0x4d, 0x8a, 0x9d, 0x3d, 0x43, 0xb1, 0x81, 0x9c, + 0x06, 0xc9, 0x28, 0x71, 0x19, 0x86, 0xcf, 0xc1, 0x34, 0xaf, 0xd3, 0xa4, 0x91, 0x3a, 0x1c, 0x76, + 0x20, 0x4c, 0xca, 0x83, 0x6b, 0x14, 0xec, 0xc4, 0x4a, 0xf9, 0x29, 0x81, 0xf9, 0x17, 0xd4, 0x40, + 0x99, 0x6f, 0xe8, 0xdf, 0xeb, 0xb1, 0x09, 0xa6, 0x02, 0x4e, 0xc0, 0x3b, 0x9a, 0x0b, 0x32, 0x55, + 0x95, 0xe3, 0x6c, 0x71, 0xd3, 0xab, 0x8f, 0xc3, 0xa6, 0x7f, 0x86, 0x58, 0xbb, 0x01, 0x22, 0xf7, + 0xf0, 0xbb, 0xfa, 0x7e, 0x02, 0xcc, 0xec, 0x33, 0x7e, 0xc0, 0x03, 0xec, 0x9d, 0x5a, 0x3a, 0x86, + 0x1f, 0x24, 0x00, 0xfb, 0x5f, 0x29, 0xbc, 0x33, 0x54, 0xa2, 0x81, 0xcf, 0x5a, 0x9e, 0xeb, 0x23, + 0xd9, 0x09, 0x67, 0x93, 0xb2, 0xfc, 0xee, 0xd7, 0xef, 0x8f, 0xb9, 0xa5, 0xca, 0x8d, 0x78, 0x6c, + 0xbe, 0x0d, 0xe5, 0xd9, 0x0a, 0x18, 0xf6, 0x98, 0x56, 0xd1, 0xc4, 0x85, 0x32, 0xad, 0x72, 0x06, + 0x3f, 0x5d, 0x10, 0x25, 0x65, 0xc8, 0x44, 0x94, 0x72, 0x2b, 0x03, 0x89, 0x54, 0x4e, 0x54, 0xae, + 0xdc, 0x4a, 0x27, 0xea, 0x9a, 0x2a, 0x21, 0xd6, 0x67, 0x09, 0xcc, 0xf6, 0x34, 0x1b, 0x5c, 0x1b, + 0xca, 0x94, 0xde, 0x9a, 0x72, 0xf6, 0xd7, 0xa7, 0xac, 0x70, 0xc6, 0x9b, 0x70, 0x29, 0x95, 0xf1, + 0x4c, 0x4b, 0x3e, 0x4e, 0xf8, 0x25, 0x02, 0xec, 0x12, 0x6d, 0x24, 0x60, 0x9a, 0x62, 0xd9, 0xdf, + 0x66, 0x2c, 0x22, 0xcc, 0x2a, 0xe2, 0xb9, 0x04, 0x60, 0x7f, 0xbf, 0x8e, 0xb8, 0xdb, 0x81, 0x53, + 0x48, 0xbe, 0x7b, 0xe5, 0xb8, 0x68, 0x30, 0x28, 0xdb, 0x9c, 0x7b, 0x4b, 0x59, 0xb9, 0xe0, 0x8e, + 0xe6, 0xd7, 0x85, 0xb4, 0x1b, 0x56, 0x5f, 0xf0, 0x46, 0x4f, 0x27, 0xc3, 0x1f, 0x12, 0x80, 0xfd, + 0xd3, 0x60, 0xc4, 0x61, 0x06, 0x8e, 0x8f, 0xab, 0xc8, 0x7e, 0x9f, 0xe3, 0xaf, 0x57, 0x33, 0xca, + 0xde, 0x4b, 0x5e, 0xfb, 0x2e, 0x81, 0xeb, 0x3a, 0x71, 0x86, 0x81, 0xd6, 0x0a, 0x62, 0x50, 0xd4, + 0xc3, 0xb6, 0xa9, 0x4b, 0x2f, 0x6b, 0xc2, 0xd9, 0x24, 0x36, 0x72, 0x4d, 0x95, 0x78, 0xa6, 0x66, + 0x62, 0x97, 0x37, 0x95, 0x16, 0x99, 0x10, 0xb5, 0x58, 0xea, 0x7f, 0xa4, 0x4d, 0xb1, 0xfc, 0x96, + 0x5b, 0xd8, 0x8d, 0x92, 0x6c, 0xf3, 0x8a, 0xa2, 0x82, 0x7a, 0xb8, 0x5a, 0xc3, 0x3e, 0x3a, 0x8f, + 0xad, 0x47, 0xdc, 0x7a, 0x24, 0xac, 0x47, 0x87, 0x3c, 0x47, 0x6b, 0x9c, 0x97, 0x5a, 0xfb, 0x1b, + 0x00, 0x00, 0xff, 0xff, 0xc7, 0x96, 0xd9, 0xc2, 0x8d, 0x09, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/cloud/phishingprotection/v1beta1/phishingprotection.pb.go b/vendor/google.golang.org/genproto/googleapis/cloud/phishingprotection/v1beta1/phishingprotection.pb.go new file mode 100644 index 0000000..3009348 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/cloud/phishingprotection/v1beta1/phishingprotection.pb.go @@ -0,0 +1,225 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/cloud/phishingprotection/v1beta1/phishingprotection.proto + +package phishingprotection // import "google.golang.org/genproto/googleapis/cloud/phishingprotection/v1beta1" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +import ( + context "golang.org/x/net/context" + grpc "google.golang.org/grpc" +) + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// The ReportPhishing request message. +type ReportPhishingRequest struct { + // Required. The name of the project for which the report will be created, + // in the format "projects/{project_number}". + Parent string `protobuf:"bytes,1,opt,name=parent,proto3" json:"parent,omitempty"` + // The URI that is being reported for phishing content to be analyzed. + Uri string `protobuf:"bytes,2,opt,name=uri,proto3" json:"uri,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ReportPhishingRequest) Reset() { *m = ReportPhishingRequest{} } +func (m *ReportPhishingRequest) String() string { return proto.CompactTextString(m) } +func (*ReportPhishingRequest) ProtoMessage() {} +func (*ReportPhishingRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_phishingprotection_3f3199ea7ffa4ad5, []int{0} +} +func (m *ReportPhishingRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ReportPhishingRequest.Unmarshal(m, b) +} +func (m *ReportPhishingRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ReportPhishingRequest.Marshal(b, m, deterministic) +} +func (dst *ReportPhishingRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_ReportPhishingRequest.Merge(dst, src) +} +func (m *ReportPhishingRequest) XXX_Size() int { + return xxx_messageInfo_ReportPhishingRequest.Size(m) +} +func (m *ReportPhishingRequest) XXX_DiscardUnknown() { + xxx_messageInfo_ReportPhishingRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_ReportPhishingRequest proto.InternalMessageInfo + +func (m *ReportPhishingRequest) GetParent() string { + if m != nil { + return m.Parent + } + return "" +} + +func (m *ReportPhishingRequest) GetUri() string { + if m != nil { + return m.Uri + } + return "" +} + +// The ReportPhishing (empty) response message. +type ReportPhishingResponse struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ReportPhishingResponse) Reset() { *m = ReportPhishingResponse{} } +func (m *ReportPhishingResponse) String() string { return proto.CompactTextString(m) } +func (*ReportPhishingResponse) ProtoMessage() {} +func (*ReportPhishingResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_phishingprotection_3f3199ea7ffa4ad5, []int{1} +} +func (m *ReportPhishingResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ReportPhishingResponse.Unmarshal(m, b) +} +func (m *ReportPhishingResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ReportPhishingResponse.Marshal(b, m, deterministic) +} +func (dst *ReportPhishingResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_ReportPhishingResponse.Merge(dst, src) +} +func (m *ReportPhishingResponse) XXX_Size() int { + return xxx_messageInfo_ReportPhishingResponse.Size(m) +} +func (m *ReportPhishingResponse) XXX_DiscardUnknown() { + xxx_messageInfo_ReportPhishingResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_ReportPhishingResponse proto.InternalMessageInfo + +func init() { + proto.RegisterType((*ReportPhishingRequest)(nil), "google.cloud.phishingprotection.v1beta1.ReportPhishingRequest") + proto.RegisterType((*ReportPhishingResponse)(nil), "google.cloud.phishingprotection.v1beta1.ReportPhishingResponse") +} + +// Reference imports to suppress errors if they are not otherwise used. +var _ context.Context +var _ grpc.ClientConn + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the grpc package it is being compiled against. +const _ = grpc.SupportPackageIsVersion4 + +// PhishingProtectionServiceV1Beta1Client is the client API for PhishingProtectionServiceV1Beta1 service. +// +// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://godoc.org/google.golang.org/grpc#ClientConn.NewStream. +type PhishingProtectionServiceV1Beta1Client interface { + // Reports a URI suspected of containing phishing content to be reviewed. Once + // the report review is completed, if its result verifies the existince of + // malicious phishing content, the site will be added the to [Google's Social + // Engineering lists](https://support.google.com/webmasters/answer/6350487/) + // in order to protect users that could get exposed to this threat in + // the future. + ReportPhishing(ctx context.Context, in *ReportPhishingRequest, opts ...grpc.CallOption) (*ReportPhishingResponse, error) +} + +type phishingProtectionServiceV1Beta1Client struct { + cc *grpc.ClientConn +} + +func NewPhishingProtectionServiceV1Beta1Client(cc *grpc.ClientConn) PhishingProtectionServiceV1Beta1Client { + return &phishingProtectionServiceV1Beta1Client{cc} +} + +func (c *phishingProtectionServiceV1Beta1Client) ReportPhishing(ctx context.Context, in *ReportPhishingRequest, opts ...grpc.CallOption) (*ReportPhishingResponse, error) { + out := new(ReportPhishingResponse) + err := c.cc.Invoke(ctx, "/google.cloud.phishingprotection.v1beta1.PhishingProtectionServiceV1Beta1/ReportPhishing", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +// PhishingProtectionServiceV1Beta1Server is the server API for PhishingProtectionServiceV1Beta1 service. +type PhishingProtectionServiceV1Beta1Server interface { + // Reports a URI suspected of containing phishing content to be reviewed. Once + // the report review is completed, if its result verifies the existince of + // malicious phishing content, the site will be added the to [Google's Social + // Engineering lists](https://support.google.com/webmasters/answer/6350487/) + // in order to protect users that could get exposed to this threat in + // the future. + ReportPhishing(context.Context, *ReportPhishingRequest) (*ReportPhishingResponse, error) +} + +func RegisterPhishingProtectionServiceV1Beta1Server(s *grpc.Server, srv PhishingProtectionServiceV1Beta1Server) { + s.RegisterService(&_PhishingProtectionServiceV1Beta1_serviceDesc, srv) +} + +func _PhishingProtectionServiceV1Beta1_ReportPhishing_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(ReportPhishingRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(PhishingProtectionServiceV1Beta1Server).ReportPhishing(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.phishingprotection.v1beta1.PhishingProtectionServiceV1Beta1/ReportPhishing", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(PhishingProtectionServiceV1Beta1Server).ReportPhishing(ctx, req.(*ReportPhishingRequest)) + } + return interceptor(ctx, in, info, handler) +} + +var _PhishingProtectionServiceV1Beta1_serviceDesc = grpc.ServiceDesc{ + ServiceName: "google.cloud.phishingprotection.v1beta1.PhishingProtectionServiceV1Beta1", + HandlerType: (*PhishingProtectionServiceV1Beta1Server)(nil), + Methods: []grpc.MethodDesc{ + { + MethodName: "ReportPhishing", + Handler: _PhishingProtectionServiceV1Beta1_ReportPhishing_Handler, + }, + }, + Streams: []grpc.StreamDesc{}, + Metadata: "google/cloud/phishingprotection/v1beta1/phishingprotection.proto", +} + +func init() { + proto.RegisterFile("google/cloud/phishingprotection/v1beta1/phishingprotection.proto", fileDescriptor_phishingprotection_3f3199ea7ffa4ad5) +} + +var fileDescriptor_phishingprotection_3f3199ea7ffa4ad5 = []byte{ + // 345 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xe2, 0x72, 0x48, 0xcf, 0xcf, 0x4f, + 0xcf, 0x49, 0xd5, 0x4f, 0xce, 0xc9, 0x2f, 0x4d, 0xd1, 0x2f, 0xc8, 0xc8, 0x2c, 0xce, 0xc8, 0xcc, + 0x4b, 0x2f, 0x28, 0xca, 0x2f, 0x49, 0x4d, 0x2e, 0xc9, 0xcc, 0xcf, 0xd3, 0x2f, 0x33, 0x4c, 0x4a, + 0x2d, 0x49, 0x34, 0xc4, 0x22, 0xa5, 0x07, 0x62, 0xe6, 0x0b, 0xa9, 0x43, 0x4c, 0xd0, 0x03, 0x9b, + 0xa0, 0x87, 0x45, 0x19, 0xd4, 0x04, 0x29, 0x19, 0xa8, 0x55, 0x89, 0x05, 0x99, 0xfa, 0x89, 0x79, + 0x79, 0xf9, 0x25, 0x89, 0x20, 0xe9, 0x62, 0x88, 0x31, 0x4a, 0x8e, 0x5c, 0xa2, 0x41, 0xa9, 0x05, + 0xf9, 0x45, 0x25, 0x01, 0x50, 0x13, 0x82, 0x52, 0x0b, 0x4b, 0x53, 0x8b, 0x4b, 0x84, 0xc4, 0xb8, + 0xd8, 0x0a, 0x12, 0x8b, 0x52, 0xf3, 0x4a, 0x24, 0x18, 0x15, 0x18, 0x35, 0x38, 0x83, 0xa0, 0x3c, + 0x21, 0x01, 0x2e, 0xe6, 0xd2, 0xa2, 0x4c, 0x09, 0x26, 0xb0, 0x20, 0x88, 0xa9, 0x24, 0xc1, 0x25, + 0x86, 0x6e, 0x44, 0x71, 0x41, 0x7e, 0x5e, 0x71, 0xaa, 0xd1, 0x7b, 0x46, 0x2e, 0x05, 0x98, 0x60, + 0x00, 0xdc, 0x65, 0xc1, 0xa9, 0x45, 0x65, 0x99, 0xc9, 0xa9, 0x61, 0x86, 0x4e, 0x20, 0xf7, 0x09, + 0x9d, 0x62, 0xe4, 0xe2, 0x43, 0xd5, 0x2f, 0x64, 0xa7, 0x47, 0xa4, 0xe7, 0xf4, 0xb0, 0xba, 0x5d, + 0xca, 0x9e, 0x6c, 0xfd, 0x10, 0x87, 0x2b, 0x99, 0x37, 0x5d, 0x7e, 0x32, 0x99, 0xc9, 0x50, 0x49, + 0x07, 0x1e, 0x0f, 0xd5, 0x10, 0xef, 0xdb, 0x16, 0x14, 0xe5, 0x67, 0xa5, 0x26, 0x97, 0x14, 0xeb, + 0x6b, 0xd5, 0xc2, 0xe3, 0xc6, 0xaa, 0x08, 0x6c, 0x8a, 0x15, 0xa3, 0x96, 0xd3, 0x0f, 0x46, 0x2e, + 0xd5, 0xe4, 0xfc, 0x5c, 0x98, 0xfd, 0xb8, 0x6d, 0x76, 0x12, 0xc7, 0x0c, 0x18, 0x10, 0x2b, 0x3f, + 0x80, 0x31, 0x2a, 0x12, 0xaa, 0x3b, 0x3d, 0x3f, 0x27, 0x31, 0x2f, 0x5d, 0x2f, 0xbf, 0x28, 0x5d, + 0x3f, 0x3d, 0x35, 0x0f, 0x1c, 0x5f, 0xfa, 0x10, 0xa9, 0xc4, 0x82, 0xcc, 0x62, 0x82, 0x69, 0xc7, + 0x1a, 0x53, 0x6a, 0x11, 0x13, 0x8b, 0xbb, 0x73, 0x40, 0xc0, 0x2a, 0x26, 0x75, 0x77, 0x88, 0x15, + 0xce, 0xe0, 0x00, 0xc2, 0x74, 0x8a, 0x1e, 0x34, 0x76, 0x4e, 0xc1, 0x54, 0xc6, 0x80, 0x55, 0xc6, + 0x60, 0xaa, 0x8c, 0x09, 0x83, 0xd8, 0x96, 0xc4, 0x06, 0x76, 0xa0, 0x31, 0x20, 0x00, 0x00, 0xff, + 0xff, 0xf7, 0x8f, 0x06, 0xd9, 0xdb, 0x02, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/cloud/recaptchaenterprise/v1beta1/recaptchaenterprise.pb.go b/vendor/google.golang.org/genproto/googleapis/cloud/recaptchaenterprise/v1beta1/recaptchaenterprise.pb.go new file mode 100644 index 0000000..a7fa485 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/cloud/recaptchaenterprise/v1beta1/recaptchaenterprise.pb.go @@ -0,0 +1,672 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/cloud/recaptchaenterprise/v1beta1/recaptchaenterprise.proto + +package recaptchaenterprise // import "google.golang.org/genproto/googleapis/cloud/recaptchaenterprise/v1beta1" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import timestamp "github.com/golang/protobuf/ptypes/timestamp" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +import ( + context "golang.org/x/net/context" + grpc "google.golang.org/grpc" +) + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Enum that reprensents the types of annotations. +type AnnotateAssessmentRequest_Annotation int32 + +const ( + // Default unspecified type. + AnnotateAssessmentRequest_ANNOTATION_UNSPECIFIED AnnotateAssessmentRequest_Annotation = 0 + // Provides information that the event turned out to be legitimate. + AnnotateAssessmentRequest_LEGITIMATE AnnotateAssessmentRequest_Annotation = 1 + // Provides information that the event turned out to be fraudulent. + AnnotateAssessmentRequest_FRAUDULENT AnnotateAssessmentRequest_Annotation = 2 +) + +var AnnotateAssessmentRequest_Annotation_name = map[int32]string{ + 0: "ANNOTATION_UNSPECIFIED", + 1: "LEGITIMATE", + 2: "FRAUDULENT", +} +var AnnotateAssessmentRequest_Annotation_value = map[string]int32{ + "ANNOTATION_UNSPECIFIED": 0, + "LEGITIMATE": 1, + "FRAUDULENT": 2, +} + +func (x AnnotateAssessmentRequest_Annotation) String() string { + return proto.EnumName(AnnotateAssessmentRequest_Annotation_name, int32(x)) +} +func (AnnotateAssessmentRequest_Annotation) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_recaptchaenterprise_7acd4e5481c3d535, []int{1, 0} +} + +// LINT.IfChange(classification_reason) +// Reasons contributing to the risk analysis verdict. +type Assessment_ClassificationReason int32 + +const ( + // Default unspecified type. + Assessment_CLASSIFICATION_REASON_UNSPECIFIED Assessment_ClassificationReason = 0 + // The event appeared to be automated. + Assessment_AUTOMATION Assessment_ClassificationReason = 1 + // The event was not made from the proper context on the real site. + Assessment_UNEXPECTED_ENVIRONMENT Assessment_ClassificationReason = 2 + // Browsing behavior leading up to the event was generated was out of the + // ordinary. + Assessment_UNEXPECTED_USAGE_PATTERNS Assessment_ClassificationReason = 4 + // Too little traffic has been received from this site thus far to generate + // quality risk analysis. + Assessment_PROVISIONAL_RISK_ANALYSIS Assessment_ClassificationReason = 5 +) + +var Assessment_ClassificationReason_name = map[int32]string{ + 0: "CLASSIFICATION_REASON_UNSPECIFIED", + 1: "AUTOMATION", + 2: "UNEXPECTED_ENVIRONMENT", + 4: "UNEXPECTED_USAGE_PATTERNS", + 5: "PROVISIONAL_RISK_ANALYSIS", +} +var Assessment_ClassificationReason_value = map[string]int32{ + "CLASSIFICATION_REASON_UNSPECIFIED": 0, + "AUTOMATION": 1, + "UNEXPECTED_ENVIRONMENT": 2, + "UNEXPECTED_USAGE_PATTERNS": 4, + "PROVISIONAL_RISK_ANALYSIS": 5, +} + +func (x Assessment_ClassificationReason) String() string { + return proto.EnumName(Assessment_ClassificationReason_name, int32(x)) +} +func (Assessment_ClassificationReason) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_recaptchaenterprise_7acd4e5481c3d535, []int{3, 0} +} + +// Enum that represents the types of invalid token reasons. +type TokenProperties_InvalidReason int32 + +const ( + // Default unspecified type. + TokenProperties_INVALID_REASON_UNSPECIFIED TokenProperties_InvalidReason = 0 + // If the failure reason was not accounted for. + TokenProperties_UNKNOWN_INVALID_REASON TokenProperties_InvalidReason = 1 + // The provided user verification token was malformed. + TokenProperties_MALFORMED TokenProperties_InvalidReason = 2 + // The user verification token had expired. + TokenProperties_EXPIRED TokenProperties_InvalidReason = 3 + // The user verification had already been seen. + TokenProperties_DUPE TokenProperties_InvalidReason = 4 + // The user verification token did not match the provided site secret. + // This may be a configuration error (e.g. development keys used in + // production) or end users trying to use verification tokens from other + // sites. + TokenProperties_SITE_MISMATCH TokenProperties_InvalidReason = 5 + // The user verification token was not present. It is a required input. + TokenProperties_MISSING TokenProperties_InvalidReason = 6 +) + +var TokenProperties_InvalidReason_name = map[int32]string{ + 0: "INVALID_REASON_UNSPECIFIED", + 1: "UNKNOWN_INVALID_REASON", + 2: "MALFORMED", + 3: "EXPIRED", + 4: "DUPE", + 5: "SITE_MISMATCH", + 6: "MISSING", +} +var TokenProperties_InvalidReason_value = map[string]int32{ + "INVALID_REASON_UNSPECIFIED": 0, + "UNKNOWN_INVALID_REASON": 1, + "MALFORMED": 2, + "EXPIRED": 3, + "DUPE": 4, + "SITE_MISMATCH": 5, + "MISSING": 6, +} + +func (x TokenProperties_InvalidReason) String() string { + return proto.EnumName(TokenProperties_InvalidReason_name, int32(x)) +} +func (TokenProperties_InvalidReason) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_recaptchaenterprise_7acd4e5481c3d535, []int{5, 0} +} + +// The create assessment request message. +type CreateAssessmentRequest struct { + // Required. The name of the project in which the assessment will be created, + // in the format "projects/{project_number}". + Parent string `protobuf:"bytes,1,opt,name=parent,proto3" json:"parent,omitempty"` + // The asessment details. + Assessment *Assessment `protobuf:"bytes,2,opt,name=assessment,proto3" json:"assessment,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *CreateAssessmentRequest) Reset() { *m = CreateAssessmentRequest{} } +func (m *CreateAssessmentRequest) String() string { return proto.CompactTextString(m) } +func (*CreateAssessmentRequest) ProtoMessage() {} +func (*CreateAssessmentRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_recaptchaenterprise_7acd4e5481c3d535, []int{0} +} +func (m *CreateAssessmentRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_CreateAssessmentRequest.Unmarshal(m, b) +} +func (m *CreateAssessmentRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_CreateAssessmentRequest.Marshal(b, m, deterministic) +} +func (dst *CreateAssessmentRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_CreateAssessmentRequest.Merge(dst, src) +} +func (m *CreateAssessmentRequest) XXX_Size() int { + return xxx_messageInfo_CreateAssessmentRequest.Size(m) +} +func (m *CreateAssessmentRequest) XXX_DiscardUnknown() { + xxx_messageInfo_CreateAssessmentRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_CreateAssessmentRequest proto.InternalMessageInfo + +func (m *CreateAssessmentRequest) GetParent() string { + if m != nil { + return m.Parent + } + return "" +} + +func (m *CreateAssessmentRequest) GetAssessment() *Assessment { + if m != nil { + return m.Assessment + } + return nil +} + +// The request message to annotate an Assessment. +type AnnotateAssessmentRequest struct { + // Required. The resource name of the Assessment, in the format + // "projects/{project_number}/assessments/{assessment_id}". + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // The annotation that will be assigned to the Event. + Annotation AnnotateAssessmentRequest_Annotation `protobuf:"varint,2,opt,name=annotation,proto3,enum=google.cloud.recaptchaenterprise.v1beta1.AnnotateAssessmentRequest_Annotation" json:"annotation,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *AnnotateAssessmentRequest) Reset() { *m = AnnotateAssessmentRequest{} } +func (m *AnnotateAssessmentRequest) String() string { return proto.CompactTextString(m) } +func (*AnnotateAssessmentRequest) ProtoMessage() {} +func (*AnnotateAssessmentRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_recaptchaenterprise_7acd4e5481c3d535, []int{1} +} +func (m *AnnotateAssessmentRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_AnnotateAssessmentRequest.Unmarshal(m, b) +} +func (m *AnnotateAssessmentRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_AnnotateAssessmentRequest.Marshal(b, m, deterministic) +} +func (dst *AnnotateAssessmentRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_AnnotateAssessmentRequest.Merge(dst, src) +} +func (m *AnnotateAssessmentRequest) XXX_Size() int { + return xxx_messageInfo_AnnotateAssessmentRequest.Size(m) +} +func (m *AnnotateAssessmentRequest) XXX_DiscardUnknown() { + xxx_messageInfo_AnnotateAssessmentRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_AnnotateAssessmentRequest proto.InternalMessageInfo + +func (m *AnnotateAssessmentRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *AnnotateAssessmentRequest) GetAnnotation() AnnotateAssessmentRequest_Annotation { + if m != nil { + return m.Annotation + } + return AnnotateAssessmentRequest_ANNOTATION_UNSPECIFIED +} + +// Empty response for AnnotateAssessment. +type AnnotateAssessmentResponse struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *AnnotateAssessmentResponse) Reset() { *m = AnnotateAssessmentResponse{} } +func (m *AnnotateAssessmentResponse) String() string { return proto.CompactTextString(m) } +func (*AnnotateAssessmentResponse) ProtoMessage() {} +func (*AnnotateAssessmentResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_recaptchaenterprise_7acd4e5481c3d535, []int{2} +} +func (m *AnnotateAssessmentResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_AnnotateAssessmentResponse.Unmarshal(m, b) +} +func (m *AnnotateAssessmentResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_AnnotateAssessmentResponse.Marshal(b, m, deterministic) +} +func (dst *AnnotateAssessmentResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_AnnotateAssessmentResponse.Merge(dst, src) +} +func (m *AnnotateAssessmentResponse) XXX_Size() int { + return xxx_messageInfo_AnnotateAssessmentResponse.Size(m) +} +func (m *AnnotateAssessmentResponse) XXX_DiscardUnknown() { + xxx_messageInfo_AnnotateAssessmentResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_AnnotateAssessmentResponse proto.InternalMessageInfo + +// A recaptcha assessment resource. +type Assessment struct { + // Output only. The resource name for the Assessment in the format + // "projects/{project_number}/assessments/{assessment_id}". + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // The event being assessed. + Event *Event `protobuf:"bytes,2,opt,name=event,proto3" json:"event,omitempty"` + // Output only. Legitimate event score from 0.0 to 1.0. + // (1.0 means very likely legitimate traffic while 0.0 means very likely + // non-legitimate traffic). + Score float32 `protobuf:"fixed32,3,opt,name=score,proto3" json:"score,omitempty"` + // Output only. Properties of the provided event token. + TokenProperties *TokenProperties `protobuf:"bytes,4,opt,name=token_properties,json=tokenProperties,proto3" json:"token_properties,omitempty"` + // Output only. Reasons contributing to the risk analysis verdict. + Reasons []Assessment_ClassificationReason `protobuf:"varint,5,rep,packed,name=reasons,proto3,enum=google.cloud.recaptchaenterprise.v1beta1.Assessment_ClassificationReason" json:"reasons,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Assessment) Reset() { *m = Assessment{} } +func (m *Assessment) String() string { return proto.CompactTextString(m) } +func (*Assessment) ProtoMessage() {} +func (*Assessment) Descriptor() ([]byte, []int) { + return fileDescriptor_recaptchaenterprise_7acd4e5481c3d535, []int{3} +} +func (m *Assessment) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Assessment.Unmarshal(m, b) +} +func (m *Assessment) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Assessment.Marshal(b, m, deterministic) +} +func (dst *Assessment) XXX_Merge(src proto.Message) { + xxx_messageInfo_Assessment.Merge(dst, src) +} +func (m *Assessment) XXX_Size() int { + return xxx_messageInfo_Assessment.Size(m) +} +func (m *Assessment) XXX_DiscardUnknown() { + xxx_messageInfo_Assessment.DiscardUnknown(m) +} + +var xxx_messageInfo_Assessment proto.InternalMessageInfo + +func (m *Assessment) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *Assessment) GetEvent() *Event { + if m != nil { + return m.Event + } + return nil +} + +func (m *Assessment) GetScore() float32 { + if m != nil { + return m.Score + } + return 0 +} + +func (m *Assessment) GetTokenProperties() *TokenProperties { + if m != nil { + return m.TokenProperties + } + return nil +} + +func (m *Assessment) GetReasons() []Assessment_ClassificationReason { + if m != nil { + return m.Reasons + } + return nil +} + +type Event struct { + // The user response token provided by the reCAPTCHA client-side integration + // on your site. + Token string `protobuf:"bytes,1,opt,name=token,proto3" json:"token,omitempty"` + // The site key that was used to invoke reCAPTCHA on your site and generate + // the token. + SiteKey string `protobuf:"bytes,2,opt,name=site_key,json=siteKey,proto3" json:"site_key,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Event) Reset() { *m = Event{} } +func (m *Event) String() string { return proto.CompactTextString(m) } +func (*Event) ProtoMessage() {} +func (*Event) Descriptor() ([]byte, []int) { + return fileDescriptor_recaptchaenterprise_7acd4e5481c3d535, []int{4} +} +func (m *Event) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Event.Unmarshal(m, b) +} +func (m *Event) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Event.Marshal(b, m, deterministic) +} +func (dst *Event) XXX_Merge(src proto.Message) { + xxx_messageInfo_Event.Merge(dst, src) +} +func (m *Event) XXX_Size() int { + return xxx_messageInfo_Event.Size(m) +} +func (m *Event) XXX_DiscardUnknown() { + xxx_messageInfo_Event.DiscardUnknown(m) +} + +var xxx_messageInfo_Event proto.InternalMessageInfo + +func (m *Event) GetToken() string { + if m != nil { + return m.Token + } + return "" +} + +func (m *Event) GetSiteKey() string { + if m != nil { + return m.SiteKey + } + return "" +} + +type TokenProperties struct { + // Output only. Whether the provided user response token is valid. + Valid bool `protobuf:"varint,1,opt,name=valid,proto3" json:"valid,omitempty"` + // Output only. Reason associated with the response when valid = false. + InvalidReason TokenProperties_InvalidReason `protobuf:"varint,2,opt,name=invalid_reason,json=invalidReason,proto3,enum=google.cloud.recaptchaenterprise.v1beta1.TokenProperties_InvalidReason" json:"invalid_reason,omitempty"` + // Output only. The timestamp corresponding to the generation of the token. + CreateTime *timestamp.Timestamp `protobuf:"bytes,3,opt,name=create_time,json=createTime,proto3" json:"create_time,omitempty"` + // Output only. The hostname of the page on which the token was generated. + Hostname string `protobuf:"bytes,4,opt,name=hostname,proto3" json:"hostname,omitempty"` + // Output only. Action name provided at token generation. + Action string `protobuf:"bytes,5,opt,name=action,proto3" json:"action,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *TokenProperties) Reset() { *m = TokenProperties{} } +func (m *TokenProperties) String() string { return proto.CompactTextString(m) } +func (*TokenProperties) ProtoMessage() {} +func (*TokenProperties) Descriptor() ([]byte, []int) { + return fileDescriptor_recaptchaenterprise_7acd4e5481c3d535, []int{5} +} +func (m *TokenProperties) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_TokenProperties.Unmarshal(m, b) +} +func (m *TokenProperties) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_TokenProperties.Marshal(b, m, deterministic) +} +func (dst *TokenProperties) XXX_Merge(src proto.Message) { + xxx_messageInfo_TokenProperties.Merge(dst, src) +} +func (m *TokenProperties) XXX_Size() int { + return xxx_messageInfo_TokenProperties.Size(m) +} +func (m *TokenProperties) XXX_DiscardUnknown() { + xxx_messageInfo_TokenProperties.DiscardUnknown(m) +} + +var xxx_messageInfo_TokenProperties proto.InternalMessageInfo + +func (m *TokenProperties) GetValid() bool { + if m != nil { + return m.Valid + } + return false +} + +func (m *TokenProperties) GetInvalidReason() TokenProperties_InvalidReason { + if m != nil { + return m.InvalidReason + } + return TokenProperties_INVALID_REASON_UNSPECIFIED +} + +func (m *TokenProperties) GetCreateTime() *timestamp.Timestamp { + if m != nil { + return m.CreateTime + } + return nil +} + +func (m *TokenProperties) GetHostname() string { + if m != nil { + return m.Hostname + } + return "" +} + +func (m *TokenProperties) GetAction() string { + if m != nil { + return m.Action + } + return "" +} + +func init() { + proto.RegisterType((*CreateAssessmentRequest)(nil), "google.cloud.recaptchaenterprise.v1beta1.CreateAssessmentRequest") + proto.RegisterType((*AnnotateAssessmentRequest)(nil), "google.cloud.recaptchaenterprise.v1beta1.AnnotateAssessmentRequest") + proto.RegisterType((*AnnotateAssessmentResponse)(nil), "google.cloud.recaptchaenterprise.v1beta1.AnnotateAssessmentResponse") + proto.RegisterType((*Assessment)(nil), "google.cloud.recaptchaenterprise.v1beta1.Assessment") + proto.RegisterType((*Event)(nil), "google.cloud.recaptchaenterprise.v1beta1.Event") + proto.RegisterType((*TokenProperties)(nil), "google.cloud.recaptchaenterprise.v1beta1.TokenProperties") + proto.RegisterEnum("google.cloud.recaptchaenterprise.v1beta1.AnnotateAssessmentRequest_Annotation", AnnotateAssessmentRequest_Annotation_name, AnnotateAssessmentRequest_Annotation_value) + proto.RegisterEnum("google.cloud.recaptchaenterprise.v1beta1.Assessment_ClassificationReason", Assessment_ClassificationReason_name, Assessment_ClassificationReason_value) + proto.RegisterEnum("google.cloud.recaptchaenterprise.v1beta1.TokenProperties_InvalidReason", TokenProperties_InvalidReason_name, TokenProperties_InvalidReason_value) +} + +// Reference imports to suppress errors if they are not otherwise used. +var _ context.Context +var _ grpc.ClientConn + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the grpc package it is being compiled against. +const _ = grpc.SupportPackageIsVersion4 + +// RecaptchaEnterpriseServiceV1Beta1Client is the client API for RecaptchaEnterpriseServiceV1Beta1 service. +// +// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://godoc.org/google.golang.org/grpc#ClientConn.NewStream. +type RecaptchaEnterpriseServiceV1Beta1Client interface { + // Creates an Assessment of the likelihood an event is legitimate. + CreateAssessment(ctx context.Context, in *CreateAssessmentRequest, opts ...grpc.CallOption) (*Assessment, error) + // Annotates a previously created Assessment to provide additional information + // on whether the event turned out to be authentic or fradulent. + AnnotateAssessment(ctx context.Context, in *AnnotateAssessmentRequest, opts ...grpc.CallOption) (*AnnotateAssessmentResponse, error) +} + +type recaptchaEnterpriseServiceV1Beta1Client struct { + cc *grpc.ClientConn +} + +func NewRecaptchaEnterpriseServiceV1Beta1Client(cc *grpc.ClientConn) RecaptchaEnterpriseServiceV1Beta1Client { + return &recaptchaEnterpriseServiceV1Beta1Client{cc} +} + +func (c *recaptchaEnterpriseServiceV1Beta1Client) CreateAssessment(ctx context.Context, in *CreateAssessmentRequest, opts ...grpc.CallOption) (*Assessment, error) { + out := new(Assessment) + err := c.cc.Invoke(ctx, "/google.cloud.recaptchaenterprise.v1beta1.RecaptchaEnterpriseServiceV1Beta1/CreateAssessment", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *recaptchaEnterpriseServiceV1Beta1Client) AnnotateAssessment(ctx context.Context, in *AnnotateAssessmentRequest, opts ...grpc.CallOption) (*AnnotateAssessmentResponse, error) { + out := new(AnnotateAssessmentResponse) + err := c.cc.Invoke(ctx, "/google.cloud.recaptchaenterprise.v1beta1.RecaptchaEnterpriseServiceV1Beta1/AnnotateAssessment", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +// RecaptchaEnterpriseServiceV1Beta1Server is the server API for RecaptchaEnterpriseServiceV1Beta1 service. +type RecaptchaEnterpriseServiceV1Beta1Server interface { + // Creates an Assessment of the likelihood an event is legitimate. + CreateAssessment(context.Context, *CreateAssessmentRequest) (*Assessment, error) + // Annotates a previously created Assessment to provide additional information + // on whether the event turned out to be authentic or fradulent. + AnnotateAssessment(context.Context, *AnnotateAssessmentRequest) (*AnnotateAssessmentResponse, error) +} + +func RegisterRecaptchaEnterpriseServiceV1Beta1Server(s *grpc.Server, srv RecaptchaEnterpriseServiceV1Beta1Server) { + s.RegisterService(&_RecaptchaEnterpriseServiceV1Beta1_serviceDesc, srv) +} + +func _RecaptchaEnterpriseServiceV1Beta1_CreateAssessment_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(CreateAssessmentRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(RecaptchaEnterpriseServiceV1Beta1Server).CreateAssessment(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.recaptchaenterprise.v1beta1.RecaptchaEnterpriseServiceV1Beta1/CreateAssessment", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(RecaptchaEnterpriseServiceV1Beta1Server).CreateAssessment(ctx, req.(*CreateAssessmentRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _RecaptchaEnterpriseServiceV1Beta1_AnnotateAssessment_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(AnnotateAssessmentRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(RecaptchaEnterpriseServiceV1Beta1Server).AnnotateAssessment(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.recaptchaenterprise.v1beta1.RecaptchaEnterpriseServiceV1Beta1/AnnotateAssessment", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(RecaptchaEnterpriseServiceV1Beta1Server).AnnotateAssessment(ctx, req.(*AnnotateAssessmentRequest)) + } + return interceptor(ctx, in, info, handler) +} + +var _RecaptchaEnterpriseServiceV1Beta1_serviceDesc = grpc.ServiceDesc{ + ServiceName: "google.cloud.recaptchaenterprise.v1beta1.RecaptchaEnterpriseServiceV1Beta1", + HandlerType: (*RecaptchaEnterpriseServiceV1Beta1Server)(nil), + Methods: []grpc.MethodDesc{ + { + MethodName: "CreateAssessment", + Handler: _RecaptchaEnterpriseServiceV1Beta1_CreateAssessment_Handler, + }, + { + MethodName: "AnnotateAssessment", + Handler: _RecaptchaEnterpriseServiceV1Beta1_AnnotateAssessment_Handler, + }, + }, + Streams: []grpc.StreamDesc{}, + Metadata: "google/cloud/recaptchaenterprise/v1beta1/recaptchaenterprise.proto", +} + +func init() { + proto.RegisterFile("google/cloud/recaptchaenterprise/v1beta1/recaptchaenterprise.proto", fileDescriptor_recaptchaenterprise_7acd4e5481c3d535) +} + +var fileDescriptor_recaptchaenterprise_7acd4e5481c3d535 = []byte{ + // 945 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xac, 0x56, 0xcd, 0x6f, 0xe3, 0x44, + 0x14, 0xc7, 0xf9, 0xe8, 0xc7, 0xab, 0xda, 0x35, 0xa3, 0xd5, 0x92, 0x46, 0x0b, 0x74, 0x2d, 0x81, + 0xa2, 0x1e, 0x6c, 0xb5, 0x20, 0x04, 0x5d, 0x38, 0xb8, 0xce, 0x34, 0x3b, 0x6a, 0xe2, 0x44, 0x63, + 0xa7, 0x2c, 0x50, 0xc9, 0x72, 0xdd, 0xd9, 0xac, 0xd9, 0xc4, 0x36, 0x1e, 0xb7, 0xd2, 0x0a, 0xed, + 0x85, 0x0b, 0x67, 0xc4, 0x95, 0x13, 0x17, 0x24, 0xfe, 0x13, 0xb8, 0x70, 0xe0, 0xcc, 0x8d, 0x0b, + 0x57, 0xfe, 0x00, 0x84, 0x3c, 0x63, 0x27, 0xe9, 0xe2, 0x85, 0x52, 0xb8, 0xe5, 0xcd, 0x7b, 0xf3, + 0x7b, 0x1f, 0xbf, 0x37, 0x3f, 0x07, 0x0e, 0x27, 0x71, 0x3c, 0x99, 0x32, 0x23, 0x98, 0xc6, 0x17, + 0xe7, 0x46, 0xca, 0x02, 0x3f, 0xc9, 0x82, 0xc7, 0x3e, 0x8b, 0x32, 0x96, 0x26, 0x69, 0xc8, 0x99, + 0x71, 0xb9, 0x77, 0xc6, 0x32, 0x7f, 0xaf, 0xca, 0xa7, 0x27, 0x69, 0x9c, 0xc5, 0xa8, 0x23, 0x31, + 0x74, 0x81, 0xa1, 0x57, 0xc5, 0x15, 0x18, 0xed, 0xbb, 0x45, 0x36, 0x3f, 0x09, 0x0d, 0x3f, 0x8a, + 0xe2, 0xcc, 0xcf, 0xc2, 0x38, 0xe2, 0x12, 0xa7, 0xfd, 0x7a, 0xe1, 0x15, 0xd6, 0xd9, 0xc5, 0x23, + 0x23, 0x0b, 0x67, 0x8c, 0x67, 0xfe, 0x2c, 0x91, 0x01, 0xda, 0x97, 0x0a, 0xbc, 0x62, 0xa5, 0xcc, + 0xcf, 0x98, 0xc9, 0x39, 0xe3, 0x7c, 0xc6, 0xa2, 0x8c, 0xb2, 0xcf, 0x2e, 0x18, 0xcf, 0xd0, 0x1d, + 0x58, 0x49, 0xfc, 0x94, 0x45, 0x59, 0x4b, 0xd9, 0x51, 0x3a, 0xeb, 0xb4, 0xb0, 0x90, 0x0b, 0xe0, + 0xcf, 0x83, 0x5b, 0xb5, 0x1d, 0xa5, 0xb3, 0xb1, 0xff, 0xb6, 0x7e, 0xdd, 0x8a, 0xf5, 0xa5, 0x44, + 0x4b, 0x38, 0xda, 0x6f, 0x0a, 0x6c, 0x9b, 0xb2, 0x81, 0x8a, 0x5a, 0x10, 0x34, 0x22, 0x7f, 0xc6, + 0x8a, 0x4a, 0xc4, 0x6f, 0x14, 0x01, 0x2c, 0x3a, 0x16, 0x75, 0x6c, 0xed, 0xdb, 0xff, 0xa2, 0x8e, + 0x17, 0x25, 0x2b, 0x3d, 0x61, 0x1c, 0xd1, 0xa5, 0x0c, 0xda, 0x03, 0x80, 0x85, 0x07, 0xb5, 0xe1, + 0x8e, 0x69, 0xdb, 0x43, 0xd7, 0x74, 0xc9, 0xd0, 0xf6, 0xc6, 0xb6, 0x33, 0xc2, 0x16, 0x39, 0x22, + 0xb8, 0xab, 0xbe, 0x84, 0xb6, 0x00, 0xfa, 0xb8, 0x47, 0x5c, 0x32, 0x30, 0x5d, 0xac, 0x2a, 0xb9, + 0x7d, 0x44, 0xcd, 0x71, 0x77, 0xdc, 0xc7, 0xb6, 0xab, 0xd6, 0xb4, 0xbb, 0xd0, 0xae, 0xca, 0xce, + 0x93, 0x38, 0xe2, 0x4c, 0xfb, 0xbd, 0x0e, 0xb0, 0x38, 0xae, 0x6c, 0x1d, 0x43, 0x93, 0x5d, 0x2e, + 0xa6, 0x6f, 0x5c, 0xbf, 0x6b, 0x9c, 0x5f, 0xa3, 0xf2, 0x36, 0xba, 0x0d, 0x4d, 0x1e, 0xc4, 0x29, + 0x6b, 0xd5, 0x77, 0x94, 0x4e, 0x8d, 0x4a, 0x03, 0x9d, 0x83, 0x9a, 0xc5, 0x4f, 0x58, 0xe4, 0x25, + 0x69, 0x9c, 0xb0, 0x34, 0x0b, 0x19, 0x6f, 0x35, 0x44, 0x9e, 0xf7, 0xae, 0x9f, 0xc7, 0xcd, 0x11, + 0x46, 0x73, 0x00, 0x7a, 0x2b, 0xbb, 0x7a, 0x80, 0x02, 0x58, 0x4d, 0x99, 0xcf, 0xe3, 0x88, 0xb7, + 0x9a, 0x3b, 0xf5, 0xce, 0xd6, 0x3e, 0xb9, 0xc9, 0x0a, 0xe9, 0xd6, 0xd4, 0xe7, 0x3c, 0x7c, 0x14, + 0x06, 0x92, 0x2f, 0x81, 0x48, 0x4b, 0x64, 0xed, 0x3b, 0x05, 0x6e, 0x57, 0x45, 0xa0, 0x37, 0xe0, + 0x9e, 0xd5, 0x37, 0x1d, 0x87, 0x1c, 0x11, 0x4b, 0x32, 0x48, 0xb1, 0xe9, 0x54, 0x11, 0x69, 0x8e, + 0xdd, 0xe1, 0x40, 0x84, 0xa8, 0x4a, 0x4e, 0xfa, 0xd8, 0xc6, 0x0f, 0x47, 0xd8, 0x72, 0x71, 0xd7, + 0xc3, 0xf6, 0x09, 0xa1, 0x43, 0x7b, 0x20, 0x48, 0x45, 0xaf, 0xc2, 0xf6, 0x92, 0x6f, 0xec, 0x98, + 0x3d, 0xec, 0x8d, 0x4c, 0xd7, 0xc5, 0xd4, 0x76, 0xd4, 0x46, 0xee, 0x1e, 0xd1, 0xe1, 0x09, 0x71, + 0xc8, 0xd0, 0x36, 0xfb, 0x1e, 0x25, 0xce, 0xb1, 0x67, 0xda, 0x66, 0xff, 0x23, 0x87, 0x38, 0x6a, + 0x53, 0x7b, 0x17, 0x9a, 0xb8, 0xe4, 0x44, 0x8c, 0xaa, 0xe0, 0x5b, 0x1a, 0x68, 0x1b, 0xd6, 0x78, + 0x98, 0x31, 0xef, 0x09, 0x7b, 0x2a, 0x38, 0x5f, 0xa7, 0xab, 0xb9, 0x7d, 0xcc, 0x9e, 0x6a, 0xdf, + 0xd4, 0xe1, 0xd6, 0x73, 0xd3, 0xce, 0x41, 0x2e, 0xfd, 0x69, 0x78, 0x2e, 0x40, 0xd6, 0xa8, 0x34, + 0x50, 0x04, 0x5b, 0x61, 0x24, 0x7e, 0x7a, 0x72, 0x40, 0xc5, 0xa3, 0xe9, 0xdd, 0x98, 0x56, 0x9d, + 0x48, 0xbc, 0x62, 0xee, 0x9b, 0xe1, 0xb2, 0x89, 0xee, 0xc3, 0x46, 0x20, 0xb4, 0xc5, 0xcb, 0x65, + 0x47, 0x2c, 0xd9, 0xc6, 0x7e, 0xbb, 0x4c, 0x56, 0x6a, 0x92, 0xee, 0x96, 0x9a, 0x44, 0x41, 0x86, + 0xe7, 0x07, 0xa8, 0x0d, 0x6b, 0x8f, 0x63, 0x9e, 0x89, 0xd5, 0x6f, 0x88, 0x8e, 0xe7, 0x76, 0xae, + 0x4c, 0x7e, 0x20, 0x5e, 0x7d, 0x53, 0x2a, 0x93, 0xb4, 0xb4, 0xaf, 0x14, 0xd8, 0xbc, 0x52, 0x11, + 0x7a, 0x0d, 0xda, 0xc4, 0x3e, 0x31, 0xfb, 0xa4, 0x5b, 0x4d, 0xb0, 0x20, 0xf4, 0xd8, 0x1e, 0x7e, + 0x68, 0x7b, 0x57, 0xe3, 0x54, 0x05, 0x6d, 0xc2, 0xfa, 0xc0, 0xec, 0x1f, 0x0d, 0xe9, 0x00, 0x77, + 0xd5, 0x1a, 0xda, 0x80, 0x55, 0xfc, 0x70, 0x44, 0x28, 0xee, 0xaa, 0x75, 0xb4, 0x06, 0x8d, 0xee, + 0x78, 0x84, 0xd5, 0x06, 0x7a, 0x19, 0x36, 0x1d, 0xe2, 0x62, 0x6f, 0x40, 0x9c, 0x81, 0xe9, 0x5a, + 0x0f, 0xd4, 0x66, 0x1e, 0x39, 0x20, 0x8e, 0x43, 0xec, 0x9e, 0xba, 0xb2, 0xff, 0x53, 0x1d, 0xee, + 0xd1, 0x72, 0xa2, 0x78, 0x3e, 0x51, 0x87, 0xa5, 0x97, 0x61, 0xc0, 0x4e, 0xf6, 0x0e, 0xf3, 0xb9, + 0xa2, 0x1f, 0x14, 0x50, 0x9f, 0xd7, 0x61, 0x64, 0x5e, 0x9f, 0x97, 0x17, 0x68, 0x78, 0xfb, 0x46, + 0xba, 0xac, 0xbd, 0xff, 0xc5, 0xcf, 0xbf, 0x7e, 0x5d, 0x7b, 0x47, 0xeb, 0xcc, 0x3f, 0x55, 0x9f, + 0x4b, 0xed, 0xff, 0x20, 0x49, 0xe3, 0x4f, 0x59, 0x90, 0x71, 0x63, 0xf7, 0x99, 0xb1, 0x10, 0x6f, + 0x7e, 0xb0, 0xa4, 0xe4, 0xe8, 0x17, 0x05, 0xd0, 0x5f, 0xe5, 0x0d, 0x59, 0xff, 0x83, 0x34, 0xb7, + 0xbb, 0xff, 0x0d, 0xa4, 0x50, 0xd8, 0xb2, 0xbf, 0xbd, 0x45, 0x7f, 0xf9, 0x5e, 0x2d, 0x75, 0xb7, + 0xdc, 0x9c, 0xb1, 0xfb, 0xec, 0xa0, 0xf8, 0x0c, 0xb0, 0x03, 0x65, 0xf7, 0xf0, 0x0f, 0x05, 0xde, + 0x0c, 0xe2, 0x59, 0x59, 0xc9, 0xdf, 0xd4, 0x70, 0xd8, 0xaa, 0x60, 0x7e, 0x94, 0xef, 0xfd, 0x48, + 0xf9, 0xf8, 0x93, 0xe2, 0xfe, 0x24, 0x9e, 0xfa, 0xd1, 0x44, 0x8f, 0xd3, 0x89, 0x31, 0x61, 0x91, + 0x78, 0x15, 0x86, 0x74, 0xf9, 0x49, 0xc8, 0xff, 0xf9, 0x6f, 0xc4, 0xfd, 0x0a, 0xdf, 0xb7, 0xb5, + 0x46, 0xcf, 0xa2, 0xf8, 0xfb, 0x5a, 0xa7, 0x27, 0x93, 0x58, 0x62, 0x5c, 0x15, 0xd5, 0xe8, 0xc5, + 0x06, 0xfe, 0x58, 0x86, 0x9e, 0x8a, 0xd0, 0xd3, 0x8a, 0xd0, 0xd3, 0x13, 0x99, 0xf0, 0x6c, 0x45, + 0x14, 0xf9, 0xd6, 0x9f, 0x01, 0x00, 0x00, 0xff, 0xff, 0x2e, 0x13, 0x3b, 0x55, 0xea, 0x08, 0x00, + 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/cloud/redis/v1/cloud_redis.pb.go b/vendor/google.golang.org/genproto/googleapis/cloud/redis/v1/cloud_redis.pb.go new file mode 100644 index 0000000..2097965 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/cloud/redis/v1/cloud_redis.pb.go @@ -0,0 +1,1345 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/cloud/redis/v1/cloud_redis.proto + +package redis // import "google.golang.org/genproto/googleapis/cloud/redis/v1" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import timestamp "github.com/golang/protobuf/ptypes/timestamp" +import _ "google.golang.org/genproto/googleapis/api/annotations" +import longrunning "google.golang.org/genproto/googleapis/longrunning" +import field_mask "google.golang.org/genproto/protobuf/field_mask" + +import ( + context "golang.org/x/net/context" + grpc "google.golang.org/grpc" +) + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Represents the different states of a Redis instance. +type Instance_State int32 + +const ( + // Not set. + Instance_STATE_UNSPECIFIED Instance_State = 0 + // Redis instance is being created. + Instance_CREATING Instance_State = 1 + // Redis instance has been created and is fully usable. + Instance_READY Instance_State = 2 + // Redis instance configuration is being updated. Certain kinds of updates + // may cause the instance to become unusable while the update is in + // progress. + Instance_UPDATING Instance_State = 3 + // Redis instance is being deleted. + Instance_DELETING Instance_State = 4 + // Redis instance is being repaired and may be unusable. + Instance_REPAIRING Instance_State = 5 + // Maintenance is being performed on this Redis instance. + Instance_MAINTENANCE Instance_State = 6 + // Redis instance is failing over (availability may be affected). + Instance_FAILING_OVER Instance_State = 9 +) + +var Instance_State_name = map[int32]string{ + 0: "STATE_UNSPECIFIED", + 1: "CREATING", + 2: "READY", + 3: "UPDATING", + 4: "DELETING", + 5: "REPAIRING", + 6: "MAINTENANCE", + 9: "FAILING_OVER", +} +var Instance_State_value = map[string]int32{ + "STATE_UNSPECIFIED": 0, + "CREATING": 1, + "READY": 2, + "UPDATING": 3, + "DELETING": 4, + "REPAIRING": 5, + "MAINTENANCE": 6, + "FAILING_OVER": 9, +} + +func (x Instance_State) String() string { + return proto.EnumName(Instance_State_name, int32(x)) +} +func (Instance_State) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_cloud_redis_0cf6a76cde0d103b, []int{0, 0} +} + +// Available service tiers to choose from +type Instance_Tier int32 + +const ( + // Not set. + Instance_TIER_UNSPECIFIED Instance_Tier = 0 + // BASIC tier: standalone instance + Instance_BASIC Instance_Tier = 1 + // STANDARD_HA tier: highly available primary/replica instances + Instance_STANDARD_HA Instance_Tier = 3 +) + +var Instance_Tier_name = map[int32]string{ + 0: "TIER_UNSPECIFIED", + 1: "BASIC", + 3: "STANDARD_HA", +} +var Instance_Tier_value = map[string]int32{ + "TIER_UNSPECIFIED": 0, + "BASIC": 1, + "STANDARD_HA": 3, +} + +func (x Instance_Tier) String() string { + return proto.EnumName(Instance_Tier_name, int32(x)) +} +func (Instance_Tier) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_cloud_redis_0cf6a76cde0d103b, []int{0, 1} +} + +type FailoverInstanceRequest_DataProtectionMode int32 + +const ( + FailoverInstanceRequest_DATA_PROTECTION_MODE_UNSPECIFIED FailoverInstanceRequest_DataProtectionMode = 0 + // Instance failover will be protected with data loss control. More + // specifically, the failover will only be performed if the current + // replication offset diff between master and replica is under a certain + // threshold. + FailoverInstanceRequest_LIMITED_DATA_LOSS FailoverInstanceRequest_DataProtectionMode = 1 + // Instance failover will be performed without data loss control. + FailoverInstanceRequest_FORCE_DATA_LOSS FailoverInstanceRequest_DataProtectionMode = 2 +) + +var FailoverInstanceRequest_DataProtectionMode_name = map[int32]string{ + 0: "DATA_PROTECTION_MODE_UNSPECIFIED", + 1: "LIMITED_DATA_LOSS", + 2: "FORCE_DATA_LOSS", +} +var FailoverInstanceRequest_DataProtectionMode_value = map[string]int32{ + "DATA_PROTECTION_MODE_UNSPECIFIED": 0, + "LIMITED_DATA_LOSS": 1, + "FORCE_DATA_LOSS": 2, +} + +func (x FailoverInstanceRequest_DataProtectionMode) String() string { + return proto.EnumName(FailoverInstanceRequest_DataProtectionMode_name, int32(x)) +} +func (FailoverInstanceRequest_DataProtectionMode) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_cloud_redis_0cf6a76cde0d103b, []int{7, 0} +} + +// A Google Cloud Redis instance. +type Instance struct { + // Required. Unique name of the resource in this scope including project and + // location using the form: + // `projects/{project_id}/locations/{location_id}/instances/{instance_id}` + // + // Note: Redis instances are managed and addressed at regional level so + // location_id here refers to a GCP region; however, users may choose which + // specific zone (or collection of zones for cross-zone instances) an instance + // should be provisioned in. Refer to [location_id] and + // [alternative_location_id] fields for more details. + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // An arbitrary and optional user-provided name for the instance. + DisplayName string `protobuf:"bytes,2,opt,name=display_name,json=displayName,proto3" json:"display_name,omitempty"` + // Resource labels to represent user provided metadata + Labels map[string]string `protobuf:"bytes,3,rep,name=labels,proto3" json:"labels,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` + // Optional. The zone where the instance will be provisioned. If not provided, + // the service will choose a zone for the instance. For STANDARD_HA tier, + // instances will be created across two zones for protection against zonal + // failures. If [alternative_location_id] is also provided, it must be + // different from [location_id]. + LocationId string `protobuf:"bytes,4,opt,name=location_id,json=locationId,proto3" json:"location_id,omitempty"` + // Optional. Only applicable to STANDARD_HA tier which protects the instance + // against zonal failures by provisioning it across two zones. If provided, it + // must be a different zone from the one provided in [location_id]. + AlternativeLocationId string `protobuf:"bytes,5,opt,name=alternative_location_id,json=alternativeLocationId,proto3" json:"alternative_location_id,omitempty"` + // Optional. The version of Redis software. + // If not provided, latest supported version will be used. Updating the + // version will perform an upgrade/downgrade to the new version. Currently, + // the supported values are `REDIS_3_2` for Redis 3.2. + RedisVersion string `protobuf:"bytes,7,opt,name=redis_version,json=redisVersion,proto3" json:"redis_version,omitempty"` + // Optional. The CIDR range of internal addresses that are reserved for this + // instance. If not provided, the service will choose an unused /29 block, + // for example, 10.0.0.0/29 or 192.168.0.0/29. Ranges must be unique + // and non-overlapping with existing subnets in an authorized network. + ReservedIpRange string `protobuf:"bytes,9,opt,name=reserved_ip_range,json=reservedIpRange,proto3" json:"reserved_ip_range,omitempty"` + // Output only. Hostname or IP address of the exposed Redis endpoint used by + // clients to connect to the service. + Host string `protobuf:"bytes,10,opt,name=host,proto3" json:"host,omitempty"` + // Output only. The port number of the exposed Redis endpoint. + Port int32 `protobuf:"varint,11,opt,name=port,proto3" json:"port,omitempty"` + // Output only. The current zone where the Redis endpoint is placed. For Basic + // Tier instances, this will always be the same as the [location_id] + // provided by the user at creation time. For Standard Tier instances, + // this can be either [location_id] or [alternative_location_id] and can + // change after a failover event. + CurrentLocationId string `protobuf:"bytes,12,opt,name=current_location_id,json=currentLocationId,proto3" json:"current_location_id,omitempty"` + // Output only. The time the instance was created. + CreateTime *timestamp.Timestamp `protobuf:"bytes,13,opt,name=create_time,json=createTime,proto3" json:"create_time,omitempty"` + // Output only. The current state of this instance. + State Instance_State `protobuf:"varint,14,opt,name=state,proto3,enum=google.cloud.redis.v1.Instance_State" json:"state,omitempty"` + // Output only. Additional information about the current status of this + // instance, if available. + StatusMessage string `protobuf:"bytes,15,opt,name=status_message,json=statusMessage,proto3" json:"status_message,omitempty"` + // Optional. Redis configuration parameters, according to + // http://redis.io/topics/config. Currently, the only supported parameters + // are: + // + // * maxmemory-policy + // * notify-keyspace-events + RedisConfigs map[string]string `protobuf:"bytes,16,rep,name=redis_configs,json=redisConfigs,proto3" json:"redis_configs,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` + // Required. The service tier of the instance. + Tier Instance_Tier `protobuf:"varint,17,opt,name=tier,proto3,enum=google.cloud.redis.v1.Instance_Tier" json:"tier,omitempty"` + // Required. Redis memory size in GiB. + MemorySizeGb int32 `protobuf:"varint,18,opt,name=memory_size_gb,json=memorySizeGb,proto3" json:"memory_size_gb,omitempty"` + // Optional. The full name of the Google Compute Engine + // [network](/compute/docs/networks-and-firewalls#networks) to which the + // instance is connected. If left unspecified, the `default` network + // will be used. + AuthorizedNetwork string `protobuf:"bytes,20,opt,name=authorized_network,json=authorizedNetwork,proto3" json:"authorized_network,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Instance) Reset() { *m = Instance{} } +func (m *Instance) String() string { return proto.CompactTextString(m) } +func (*Instance) ProtoMessage() {} +func (*Instance) Descriptor() ([]byte, []int) { + return fileDescriptor_cloud_redis_0cf6a76cde0d103b, []int{0} +} +func (m *Instance) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Instance.Unmarshal(m, b) +} +func (m *Instance) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Instance.Marshal(b, m, deterministic) +} +func (dst *Instance) XXX_Merge(src proto.Message) { + xxx_messageInfo_Instance.Merge(dst, src) +} +func (m *Instance) XXX_Size() int { + return xxx_messageInfo_Instance.Size(m) +} +func (m *Instance) XXX_DiscardUnknown() { + xxx_messageInfo_Instance.DiscardUnknown(m) +} + +var xxx_messageInfo_Instance proto.InternalMessageInfo + +func (m *Instance) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *Instance) GetDisplayName() string { + if m != nil { + return m.DisplayName + } + return "" +} + +func (m *Instance) GetLabels() map[string]string { + if m != nil { + return m.Labels + } + return nil +} + +func (m *Instance) GetLocationId() string { + if m != nil { + return m.LocationId + } + return "" +} + +func (m *Instance) GetAlternativeLocationId() string { + if m != nil { + return m.AlternativeLocationId + } + return "" +} + +func (m *Instance) GetRedisVersion() string { + if m != nil { + return m.RedisVersion + } + return "" +} + +func (m *Instance) GetReservedIpRange() string { + if m != nil { + return m.ReservedIpRange + } + return "" +} + +func (m *Instance) GetHost() string { + if m != nil { + return m.Host + } + return "" +} + +func (m *Instance) GetPort() int32 { + if m != nil { + return m.Port + } + return 0 +} + +func (m *Instance) GetCurrentLocationId() string { + if m != nil { + return m.CurrentLocationId + } + return "" +} + +func (m *Instance) GetCreateTime() *timestamp.Timestamp { + if m != nil { + return m.CreateTime + } + return nil +} + +func (m *Instance) GetState() Instance_State { + if m != nil { + return m.State + } + return Instance_STATE_UNSPECIFIED +} + +func (m *Instance) GetStatusMessage() string { + if m != nil { + return m.StatusMessage + } + return "" +} + +func (m *Instance) GetRedisConfigs() map[string]string { + if m != nil { + return m.RedisConfigs + } + return nil +} + +func (m *Instance) GetTier() Instance_Tier { + if m != nil { + return m.Tier + } + return Instance_TIER_UNSPECIFIED +} + +func (m *Instance) GetMemorySizeGb() int32 { + if m != nil { + return m.MemorySizeGb + } + return 0 +} + +func (m *Instance) GetAuthorizedNetwork() string { + if m != nil { + return m.AuthorizedNetwork + } + return "" +} + +// Request for [ListInstances][google.cloud.redis.v1.CloudRedis.ListInstances]. +type ListInstancesRequest struct { + // Required. The resource name of the instance location using the form: + // `projects/{project_id}/locations/{location_id}` + // where `location_id` refers to a GCP region + Parent string `protobuf:"bytes,1,opt,name=parent,proto3" json:"parent,omitempty"` + // The maximum number of items to return. + // + // If not specified, a default value of 1000 will be used by the service. + // Regardless of the page_size value, the response may include a partial list + // and a caller should only rely on response's + // [next_page_token][CloudRedis.ListInstancesResponse.next_page_token] + // to determine if there are more instances left to be queried. + PageSize int32 `protobuf:"varint,2,opt,name=page_size,json=pageSize,proto3" json:"page_size,omitempty"` + // The next_page_token value returned from a previous List request, + // if any. + PageToken string `protobuf:"bytes,3,opt,name=page_token,json=pageToken,proto3" json:"page_token,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ListInstancesRequest) Reset() { *m = ListInstancesRequest{} } +func (m *ListInstancesRequest) String() string { return proto.CompactTextString(m) } +func (*ListInstancesRequest) ProtoMessage() {} +func (*ListInstancesRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_cloud_redis_0cf6a76cde0d103b, []int{1} +} +func (m *ListInstancesRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ListInstancesRequest.Unmarshal(m, b) +} +func (m *ListInstancesRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ListInstancesRequest.Marshal(b, m, deterministic) +} +func (dst *ListInstancesRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_ListInstancesRequest.Merge(dst, src) +} +func (m *ListInstancesRequest) XXX_Size() int { + return xxx_messageInfo_ListInstancesRequest.Size(m) +} +func (m *ListInstancesRequest) XXX_DiscardUnknown() { + xxx_messageInfo_ListInstancesRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_ListInstancesRequest proto.InternalMessageInfo + +func (m *ListInstancesRequest) GetParent() string { + if m != nil { + return m.Parent + } + return "" +} + +func (m *ListInstancesRequest) GetPageSize() int32 { + if m != nil { + return m.PageSize + } + return 0 +} + +func (m *ListInstancesRequest) GetPageToken() string { + if m != nil { + return m.PageToken + } + return "" +} + +// Response for [ListInstances][google.cloud.redis.v1.CloudRedis.ListInstances]. +type ListInstancesResponse struct { + // A list of Redis instances in the project in the specified location, + // or across all locations. + // + // If the `location_id` in the parent field of the request is "-", all regions + // available to the project are queried, and the results aggregated. + // If in such an aggregated query a location is unavailable, a dummy Redis + // entry is included in the response with the "name" field set to a value of + // the form projects/{project_id}/locations/{location_id}/instances/- and the + // "status" field set to ERROR and "status_message" field set to "location not + // available for ListInstances". + Instances []*Instance `protobuf:"bytes,1,rep,name=instances,proto3" json:"instances,omitempty"` + // Token to retrieve the next page of results, or empty if there are no more + // results in the list. + NextPageToken string `protobuf:"bytes,2,opt,name=next_page_token,json=nextPageToken,proto3" json:"next_page_token,omitempty"` + // Locations that could not be reached. + Unreachable []string `protobuf:"bytes,3,rep,name=unreachable,proto3" json:"unreachable,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ListInstancesResponse) Reset() { *m = ListInstancesResponse{} } +func (m *ListInstancesResponse) String() string { return proto.CompactTextString(m) } +func (*ListInstancesResponse) ProtoMessage() {} +func (*ListInstancesResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_cloud_redis_0cf6a76cde0d103b, []int{2} +} +func (m *ListInstancesResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ListInstancesResponse.Unmarshal(m, b) +} +func (m *ListInstancesResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ListInstancesResponse.Marshal(b, m, deterministic) +} +func (dst *ListInstancesResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_ListInstancesResponse.Merge(dst, src) +} +func (m *ListInstancesResponse) XXX_Size() int { + return xxx_messageInfo_ListInstancesResponse.Size(m) +} +func (m *ListInstancesResponse) XXX_DiscardUnknown() { + xxx_messageInfo_ListInstancesResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_ListInstancesResponse proto.InternalMessageInfo + +func (m *ListInstancesResponse) GetInstances() []*Instance { + if m != nil { + return m.Instances + } + return nil +} + +func (m *ListInstancesResponse) GetNextPageToken() string { + if m != nil { + return m.NextPageToken + } + return "" +} + +func (m *ListInstancesResponse) GetUnreachable() []string { + if m != nil { + return m.Unreachable + } + return nil +} + +// Request for [GetInstance][google.cloud.redis.v1.CloudRedis.GetInstance]. +type GetInstanceRequest struct { + // Required. Redis instance resource name using the form: + // `projects/{project_id}/locations/{location_id}/instances/{instance_id}` + // where `location_id` refers to a GCP region + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GetInstanceRequest) Reset() { *m = GetInstanceRequest{} } +func (m *GetInstanceRequest) String() string { return proto.CompactTextString(m) } +func (*GetInstanceRequest) ProtoMessage() {} +func (*GetInstanceRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_cloud_redis_0cf6a76cde0d103b, []int{3} +} +func (m *GetInstanceRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GetInstanceRequest.Unmarshal(m, b) +} +func (m *GetInstanceRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GetInstanceRequest.Marshal(b, m, deterministic) +} +func (dst *GetInstanceRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_GetInstanceRequest.Merge(dst, src) +} +func (m *GetInstanceRequest) XXX_Size() int { + return xxx_messageInfo_GetInstanceRequest.Size(m) +} +func (m *GetInstanceRequest) XXX_DiscardUnknown() { + xxx_messageInfo_GetInstanceRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_GetInstanceRequest proto.InternalMessageInfo + +func (m *GetInstanceRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +// Request for [CreateInstance][google.cloud.redis.v1.CloudRedis.CreateInstance]. +type CreateInstanceRequest struct { + // Required. The resource name of the instance location using the form: + // `projects/{project_id}/locations/{location_id}` + // where `location_id` refers to a GCP region + Parent string `protobuf:"bytes,1,opt,name=parent,proto3" json:"parent,omitempty"` + // Required. The logical name of the Redis instance in the customer project + // with the following restrictions: + // + // * Must contain only lowercase letters, numbers, and hyphens. + // * Must start with a letter. + // * Must be between 1-40 characters. + // * Must end with a number or a letter. + // * Must be unique within the customer project / location + InstanceId string `protobuf:"bytes,2,opt,name=instance_id,json=instanceId,proto3" json:"instance_id,omitempty"` + // Required. A Redis [Instance] resource + Instance *Instance `protobuf:"bytes,3,opt,name=instance,proto3" json:"instance,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *CreateInstanceRequest) Reset() { *m = CreateInstanceRequest{} } +func (m *CreateInstanceRequest) String() string { return proto.CompactTextString(m) } +func (*CreateInstanceRequest) ProtoMessage() {} +func (*CreateInstanceRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_cloud_redis_0cf6a76cde0d103b, []int{4} +} +func (m *CreateInstanceRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_CreateInstanceRequest.Unmarshal(m, b) +} +func (m *CreateInstanceRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_CreateInstanceRequest.Marshal(b, m, deterministic) +} +func (dst *CreateInstanceRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_CreateInstanceRequest.Merge(dst, src) +} +func (m *CreateInstanceRequest) XXX_Size() int { + return xxx_messageInfo_CreateInstanceRequest.Size(m) +} +func (m *CreateInstanceRequest) XXX_DiscardUnknown() { + xxx_messageInfo_CreateInstanceRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_CreateInstanceRequest proto.InternalMessageInfo + +func (m *CreateInstanceRequest) GetParent() string { + if m != nil { + return m.Parent + } + return "" +} + +func (m *CreateInstanceRequest) GetInstanceId() string { + if m != nil { + return m.InstanceId + } + return "" +} + +func (m *CreateInstanceRequest) GetInstance() *Instance { + if m != nil { + return m.Instance + } + return nil +} + +// Request for [UpdateInstance][google.cloud.redis.v1.CloudRedis.UpdateInstance]. +type UpdateInstanceRequest struct { + // Required. Mask of fields to update. At least one path must be supplied in + // this field. The elements of the repeated paths field may only include these + // fields from [Instance][CloudRedis.Instance]: + // + // * `displayName` + // * `labels` + // * `memorySizeGb` + // * `redisConfig` + UpdateMask *field_mask.FieldMask `protobuf:"bytes,1,opt,name=update_mask,json=updateMask,proto3" json:"update_mask,omitempty"` + // Required. Update description. + // Only fields specified in update_mask are updated. + Instance *Instance `protobuf:"bytes,2,opt,name=instance,proto3" json:"instance,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *UpdateInstanceRequest) Reset() { *m = UpdateInstanceRequest{} } +func (m *UpdateInstanceRequest) String() string { return proto.CompactTextString(m) } +func (*UpdateInstanceRequest) ProtoMessage() {} +func (*UpdateInstanceRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_cloud_redis_0cf6a76cde0d103b, []int{5} +} +func (m *UpdateInstanceRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_UpdateInstanceRequest.Unmarshal(m, b) +} +func (m *UpdateInstanceRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_UpdateInstanceRequest.Marshal(b, m, deterministic) +} +func (dst *UpdateInstanceRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_UpdateInstanceRequest.Merge(dst, src) +} +func (m *UpdateInstanceRequest) XXX_Size() int { + return xxx_messageInfo_UpdateInstanceRequest.Size(m) +} +func (m *UpdateInstanceRequest) XXX_DiscardUnknown() { + xxx_messageInfo_UpdateInstanceRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_UpdateInstanceRequest proto.InternalMessageInfo + +func (m *UpdateInstanceRequest) GetUpdateMask() *field_mask.FieldMask { + if m != nil { + return m.UpdateMask + } + return nil +} + +func (m *UpdateInstanceRequest) GetInstance() *Instance { + if m != nil { + return m.Instance + } + return nil +} + +// Request for [DeleteInstance][google.cloud.redis.v1.CloudRedis.DeleteInstance]. +type DeleteInstanceRequest struct { + // Required. Redis instance resource name using the form: + // `projects/{project_id}/locations/{location_id}/instances/{instance_id}` + // where `location_id` refers to a GCP region + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *DeleteInstanceRequest) Reset() { *m = DeleteInstanceRequest{} } +func (m *DeleteInstanceRequest) String() string { return proto.CompactTextString(m) } +func (*DeleteInstanceRequest) ProtoMessage() {} +func (*DeleteInstanceRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_cloud_redis_0cf6a76cde0d103b, []int{6} +} +func (m *DeleteInstanceRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_DeleteInstanceRequest.Unmarshal(m, b) +} +func (m *DeleteInstanceRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_DeleteInstanceRequest.Marshal(b, m, deterministic) +} +func (dst *DeleteInstanceRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_DeleteInstanceRequest.Merge(dst, src) +} +func (m *DeleteInstanceRequest) XXX_Size() int { + return xxx_messageInfo_DeleteInstanceRequest.Size(m) +} +func (m *DeleteInstanceRequest) XXX_DiscardUnknown() { + xxx_messageInfo_DeleteInstanceRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_DeleteInstanceRequest proto.InternalMessageInfo + +func (m *DeleteInstanceRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +// Request for [Failover][google.cloud.redis.v1.CloudRedis.FailoverInstance]. +type FailoverInstanceRequest struct { + // Required. Redis instance resource name using the form: + // `projects/{project_id}/locations/{location_id}/instances/{instance_id}` + // where `location_id` refers to a GCP region + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // Optional. Available data protection modes that the user can choose. If it's + // unspecified, data protection mode will be LIMITED_DATA_LOSS by default. + DataProtectionMode FailoverInstanceRequest_DataProtectionMode `protobuf:"varint,2,opt,name=data_protection_mode,json=dataProtectionMode,proto3,enum=google.cloud.redis.v1.FailoverInstanceRequest_DataProtectionMode" json:"data_protection_mode,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *FailoverInstanceRequest) Reset() { *m = FailoverInstanceRequest{} } +func (m *FailoverInstanceRequest) String() string { return proto.CompactTextString(m) } +func (*FailoverInstanceRequest) ProtoMessage() {} +func (*FailoverInstanceRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_cloud_redis_0cf6a76cde0d103b, []int{7} +} +func (m *FailoverInstanceRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_FailoverInstanceRequest.Unmarshal(m, b) +} +func (m *FailoverInstanceRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_FailoverInstanceRequest.Marshal(b, m, deterministic) +} +func (dst *FailoverInstanceRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_FailoverInstanceRequest.Merge(dst, src) +} +func (m *FailoverInstanceRequest) XXX_Size() int { + return xxx_messageInfo_FailoverInstanceRequest.Size(m) +} +func (m *FailoverInstanceRequest) XXX_DiscardUnknown() { + xxx_messageInfo_FailoverInstanceRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_FailoverInstanceRequest proto.InternalMessageInfo + +func (m *FailoverInstanceRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *FailoverInstanceRequest) GetDataProtectionMode() FailoverInstanceRequest_DataProtectionMode { + if m != nil { + return m.DataProtectionMode + } + return FailoverInstanceRequest_DATA_PROTECTION_MODE_UNSPECIFIED +} + +// Represents the v1 metadata of the long-running operation. +type OperationMetadata struct { + // Creation timestamp. + CreateTime *timestamp.Timestamp `protobuf:"bytes,1,opt,name=create_time,json=createTime,proto3" json:"create_time,omitempty"` + // End timestamp. + EndTime *timestamp.Timestamp `protobuf:"bytes,2,opt,name=end_time,json=endTime,proto3" json:"end_time,omitempty"` + // Operation target. + Target string `protobuf:"bytes,3,opt,name=target,proto3" json:"target,omitempty"` + // Operation verb. + Verb string `protobuf:"bytes,4,opt,name=verb,proto3" json:"verb,omitempty"` + // Operation status details. + StatusDetail string `protobuf:"bytes,5,opt,name=status_detail,json=statusDetail,proto3" json:"status_detail,omitempty"` + // Specifies if cancellation was requested for the operation. + CancelRequested bool `protobuf:"varint,6,opt,name=cancel_requested,json=cancelRequested,proto3" json:"cancel_requested,omitempty"` + // API version. + ApiVersion string `protobuf:"bytes,7,opt,name=api_version,json=apiVersion,proto3" json:"api_version,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *OperationMetadata) Reset() { *m = OperationMetadata{} } +func (m *OperationMetadata) String() string { return proto.CompactTextString(m) } +func (*OperationMetadata) ProtoMessage() {} +func (*OperationMetadata) Descriptor() ([]byte, []int) { + return fileDescriptor_cloud_redis_0cf6a76cde0d103b, []int{8} +} +func (m *OperationMetadata) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_OperationMetadata.Unmarshal(m, b) +} +func (m *OperationMetadata) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_OperationMetadata.Marshal(b, m, deterministic) +} +func (dst *OperationMetadata) XXX_Merge(src proto.Message) { + xxx_messageInfo_OperationMetadata.Merge(dst, src) +} +func (m *OperationMetadata) XXX_Size() int { + return xxx_messageInfo_OperationMetadata.Size(m) +} +func (m *OperationMetadata) XXX_DiscardUnknown() { + xxx_messageInfo_OperationMetadata.DiscardUnknown(m) +} + +var xxx_messageInfo_OperationMetadata proto.InternalMessageInfo + +func (m *OperationMetadata) GetCreateTime() *timestamp.Timestamp { + if m != nil { + return m.CreateTime + } + return nil +} + +func (m *OperationMetadata) GetEndTime() *timestamp.Timestamp { + if m != nil { + return m.EndTime + } + return nil +} + +func (m *OperationMetadata) GetTarget() string { + if m != nil { + return m.Target + } + return "" +} + +func (m *OperationMetadata) GetVerb() string { + if m != nil { + return m.Verb + } + return "" +} + +func (m *OperationMetadata) GetStatusDetail() string { + if m != nil { + return m.StatusDetail + } + return "" +} + +func (m *OperationMetadata) GetCancelRequested() bool { + if m != nil { + return m.CancelRequested + } + return false +} + +func (m *OperationMetadata) GetApiVersion() string { + if m != nil { + return m.ApiVersion + } + return "" +} + +// This location metadata represents additional configuration options for a +// given location where a Redis instance may be created. All fields are output +// only. It is returned as content of the +// `google.cloud.location.Location.metadata` field. +type LocationMetadata struct { + // Output only. The set of available zones in the location. The map is keyed + // by the lowercase ID of each zone, as defined by GCE. These keys can be + // specified in `location_id` or `alternative_location_id` fields when + // creating a Redis instance. + AvailableZones map[string]*ZoneMetadata `protobuf:"bytes,1,rep,name=available_zones,json=availableZones,proto3" json:"available_zones,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *LocationMetadata) Reset() { *m = LocationMetadata{} } +func (m *LocationMetadata) String() string { return proto.CompactTextString(m) } +func (*LocationMetadata) ProtoMessage() {} +func (*LocationMetadata) Descriptor() ([]byte, []int) { + return fileDescriptor_cloud_redis_0cf6a76cde0d103b, []int{9} +} +func (m *LocationMetadata) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_LocationMetadata.Unmarshal(m, b) +} +func (m *LocationMetadata) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_LocationMetadata.Marshal(b, m, deterministic) +} +func (dst *LocationMetadata) XXX_Merge(src proto.Message) { + xxx_messageInfo_LocationMetadata.Merge(dst, src) +} +func (m *LocationMetadata) XXX_Size() int { + return xxx_messageInfo_LocationMetadata.Size(m) +} +func (m *LocationMetadata) XXX_DiscardUnknown() { + xxx_messageInfo_LocationMetadata.DiscardUnknown(m) +} + +var xxx_messageInfo_LocationMetadata proto.InternalMessageInfo + +func (m *LocationMetadata) GetAvailableZones() map[string]*ZoneMetadata { + if m != nil { + return m.AvailableZones + } + return nil +} + +// Defines specific information for a particular zone. Currently empty and +// reserved for future use only. +type ZoneMetadata struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ZoneMetadata) Reset() { *m = ZoneMetadata{} } +func (m *ZoneMetadata) String() string { return proto.CompactTextString(m) } +func (*ZoneMetadata) ProtoMessage() {} +func (*ZoneMetadata) Descriptor() ([]byte, []int) { + return fileDescriptor_cloud_redis_0cf6a76cde0d103b, []int{10} +} +func (m *ZoneMetadata) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ZoneMetadata.Unmarshal(m, b) +} +func (m *ZoneMetadata) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ZoneMetadata.Marshal(b, m, deterministic) +} +func (dst *ZoneMetadata) XXX_Merge(src proto.Message) { + xxx_messageInfo_ZoneMetadata.Merge(dst, src) +} +func (m *ZoneMetadata) XXX_Size() int { + return xxx_messageInfo_ZoneMetadata.Size(m) +} +func (m *ZoneMetadata) XXX_DiscardUnknown() { + xxx_messageInfo_ZoneMetadata.DiscardUnknown(m) +} + +var xxx_messageInfo_ZoneMetadata proto.InternalMessageInfo + +func init() { + proto.RegisterType((*Instance)(nil), "google.cloud.redis.v1.Instance") + proto.RegisterMapType((map[string]string)(nil), "google.cloud.redis.v1.Instance.LabelsEntry") + proto.RegisterMapType((map[string]string)(nil), "google.cloud.redis.v1.Instance.RedisConfigsEntry") + proto.RegisterType((*ListInstancesRequest)(nil), "google.cloud.redis.v1.ListInstancesRequest") + proto.RegisterType((*ListInstancesResponse)(nil), "google.cloud.redis.v1.ListInstancesResponse") + proto.RegisterType((*GetInstanceRequest)(nil), "google.cloud.redis.v1.GetInstanceRequest") + proto.RegisterType((*CreateInstanceRequest)(nil), "google.cloud.redis.v1.CreateInstanceRequest") + proto.RegisterType((*UpdateInstanceRequest)(nil), "google.cloud.redis.v1.UpdateInstanceRequest") + proto.RegisterType((*DeleteInstanceRequest)(nil), "google.cloud.redis.v1.DeleteInstanceRequest") + proto.RegisterType((*FailoverInstanceRequest)(nil), "google.cloud.redis.v1.FailoverInstanceRequest") + proto.RegisterType((*OperationMetadata)(nil), "google.cloud.redis.v1.OperationMetadata") + proto.RegisterType((*LocationMetadata)(nil), "google.cloud.redis.v1.LocationMetadata") + proto.RegisterMapType((map[string]*ZoneMetadata)(nil), "google.cloud.redis.v1.LocationMetadata.AvailableZonesEntry") + proto.RegisterType((*ZoneMetadata)(nil), "google.cloud.redis.v1.ZoneMetadata") + proto.RegisterEnum("google.cloud.redis.v1.Instance_State", Instance_State_name, Instance_State_value) + proto.RegisterEnum("google.cloud.redis.v1.Instance_Tier", Instance_Tier_name, Instance_Tier_value) + proto.RegisterEnum("google.cloud.redis.v1.FailoverInstanceRequest_DataProtectionMode", FailoverInstanceRequest_DataProtectionMode_name, FailoverInstanceRequest_DataProtectionMode_value) +} + +// Reference imports to suppress errors if they are not otherwise used. +var _ context.Context +var _ grpc.ClientConn + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the grpc package it is being compiled against. +const _ = grpc.SupportPackageIsVersion4 + +// CloudRedisClient is the client API for CloudRedis service. +// +// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://godoc.org/google.golang.org/grpc#ClientConn.NewStream. +type CloudRedisClient interface { + // Lists all Redis instances owned by a project in either the specified + // location (region) or all locations. + // + // The location should have the following format: + // * `projects/{project_id}/locations/{location_id}` + // + // If `location_id` is specified as `-` (wildcard), then all regions + // available to the project are queried, and the results are aggregated. + ListInstances(ctx context.Context, in *ListInstancesRequest, opts ...grpc.CallOption) (*ListInstancesResponse, error) + // Gets the details of a specific Redis instance. + GetInstance(ctx context.Context, in *GetInstanceRequest, opts ...grpc.CallOption) (*Instance, error) + // Creates a Redis instance based on the specified tier and memory size. + // + // By default, the instance is accessible from the project's + // [default network](/compute/docs/networks-and-firewalls#networks). + // + // The creation is executed asynchronously and callers may check the returned + // operation to track its progress. Once the operation is completed the Redis + // instance will be fully functional. Completed longrunning.Operation will + // contain the new instance object in the response field. + // + // The returned operation is automatically deleted after a few hours, so there + // is no need to call DeleteOperation. + CreateInstance(ctx context.Context, in *CreateInstanceRequest, opts ...grpc.CallOption) (*longrunning.Operation, error) + // Updates the metadata and configuration of a specific Redis instance. + // + // Completed longrunning.Operation will contain the new instance object + // in the response field. The returned operation is automatically deleted + // after a few hours, so there is no need to call DeleteOperation. + UpdateInstance(ctx context.Context, in *UpdateInstanceRequest, opts ...grpc.CallOption) (*longrunning.Operation, error) + // Failover the master role to current replica node against a specific + // STANDARD tier redis instance. + FailoverInstance(ctx context.Context, in *FailoverInstanceRequest, opts ...grpc.CallOption) (*longrunning.Operation, error) + // Deletes a specific Redis instance. Instance stops serving and data is + // deleted. + DeleteInstance(ctx context.Context, in *DeleteInstanceRequest, opts ...grpc.CallOption) (*longrunning.Operation, error) +} + +type cloudRedisClient struct { + cc *grpc.ClientConn +} + +func NewCloudRedisClient(cc *grpc.ClientConn) CloudRedisClient { + return &cloudRedisClient{cc} +} + +func (c *cloudRedisClient) ListInstances(ctx context.Context, in *ListInstancesRequest, opts ...grpc.CallOption) (*ListInstancesResponse, error) { + out := new(ListInstancesResponse) + err := c.cc.Invoke(ctx, "/google.cloud.redis.v1.CloudRedis/ListInstances", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *cloudRedisClient) GetInstance(ctx context.Context, in *GetInstanceRequest, opts ...grpc.CallOption) (*Instance, error) { + out := new(Instance) + err := c.cc.Invoke(ctx, "/google.cloud.redis.v1.CloudRedis/GetInstance", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *cloudRedisClient) CreateInstance(ctx context.Context, in *CreateInstanceRequest, opts ...grpc.CallOption) (*longrunning.Operation, error) { + out := new(longrunning.Operation) + err := c.cc.Invoke(ctx, "/google.cloud.redis.v1.CloudRedis/CreateInstance", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *cloudRedisClient) UpdateInstance(ctx context.Context, in *UpdateInstanceRequest, opts ...grpc.CallOption) (*longrunning.Operation, error) { + out := new(longrunning.Operation) + err := c.cc.Invoke(ctx, "/google.cloud.redis.v1.CloudRedis/UpdateInstance", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *cloudRedisClient) FailoverInstance(ctx context.Context, in *FailoverInstanceRequest, opts ...grpc.CallOption) (*longrunning.Operation, error) { + out := new(longrunning.Operation) + err := c.cc.Invoke(ctx, "/google.cloud.redis.v1.CloudRedis/FailoverInstance", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *cloudRedisClient) DeleteInstance(ctx context.Context, in *DeleteInstanceRequest, opts ...grpc.CallOption) (*longrunning.Operation, error) { + out := new(longrunning.Operation) + err := c.cc.Invoke(ctx, "/google.cloud.redis.v1.CloudRedis/DeleteInstance", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +// CloudRedisServer is the server API for CloudRedis service. +type CloudRedisServer interface { + // Lists all Redis instances owned by a project in either the specified + // location (region) or all locations. + // + // The location should have the following format: + // * `projects/{project_id}/locations/{location_id}` + // + // If `location_id` is specified as `-` (wildcard), then all regions + // available to the project are queried, and the results are aggregated. + ListInstances(context.Context, *ListInstancesRequest) (*ListInstancesResponse, error) + // Gets the details of a specific Redis instance. + GetInstance(context.Context, *GetInstanceRequest) (*Instance, error) + // Creates a Redis instance based on the specified tier and memory size. + // + // By default, the instance is accessible from the project's + // [default network](/compute/docs/networks-and-firewalls#networks). + // + // The creation is executed asynchronously and callers may check the returned + // operation to track its progress. Once the operation is completed the Redis + // instance will be fully functional. Completed longrunning.Operation will + // contain the new instance object in the response field. + // + // The returned operation is automatically deleted after a few hours, so there + // is no need to call DeleteOperation. + CreateInstance(context.Context, *CreateInstanceRequest) (*longrunning.Operation, error) + // Updates the metadata and configuration of a specific Redis instance. + // + // Completed longrunning.Operation will contain the new instance object + // in the response field. The returned operation is automatically deleted + // after a few hours, so there is no need to call DeleteOperation. + UpdateInstance(context.Context, *UpdateInstanceRequest) (*longrunning.Operation, error) + // Failover the master role to current replica node against a specific + // STANDARD tier redis instance. + FailoverInstance(context.Context, *FailoverInstanceRequest) (*longrunning.Operation, error) + // Deletes a specific Redis instance. Instance stops serving and data is + // deleted. + DeleteInstance(context.Context, *DeleteInstanceRequest) (*longrunning.Operation, error) +} + +func RegisterCloudRedisServer(s *grpc.Server, srv CloudRedisServer) { + s.RegisterService(&_CloudRedis_serviceDesc, srv) +} + +func _CloudRedis_ListInstances_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(ListInstancesRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(CloudRedisServer).ListInstances(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.redis.v1.CloudRedis/ListInstances", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(CloudRedisServer).ListInstances(ctx, req.(*ListInstancesRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _CloudRedis_GetInstance_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(GetInstanceRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(CloudRedisServer).GetInstance(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.redis.v1.CloudRedis/GetInstance", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(CloudRedisServer).GetInstance(ctx, req.(*GetInstanceRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _CloudRedis_CreateInstance_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(CreateInstanceRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(CloudRedisServer).CreateInstance(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.redis.v1.CloudRedis/CreateInstance", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(CloudRedisServer).CreateInstance(ctx, req.(*CreateInstanceRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _CloudRedis_UpdateInstance_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(UpdateInstanceRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(CloudRedisServer).UpdateInstance(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.redis.v1.CloudRedis/UpdateInstance", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(CloudRedisServer).UpdateInstance(ctx, req.(*UpdateInstanceRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _CloudRedis_FailoverInstance_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(FailoverInstanceRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(CloudRedisServer).FailoverInstance(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.redis.v1.CloudRedis/FailoverInstance", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(CloudRedisServer).FailoverInstance(ctx, req.(*FailoverInstanceRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _CloudRedis_DeleteInstance_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(DeleteInstanceRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(CloudRedisServer).DeleteInstance(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.redis.v1.CloudRedis/DeleteInstance", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(CloudRedisServer).DeleteInstance(ctx, req.(*DeleteInstanceRequest)) + } + return interceptor(ctx, in, info, handler) +} + +var _CloudRedis_serviceDesc = grpc.ServiceDesc{ + ServiceName: "google.cloud.redis.v1.CloudRedis", + HandlerType: (*CloudRedisServer)(nil), + Methods: []grpc.MethodDesc{ + { + MethodName: "ListInstances", + Handler: _CloudRedis_ListInstances_Handler, + }, + { + MethodName: "GetInstance", + Handler: _CloudRedis_GetInstance_Handler, + }, + { + MethodName: "CreateInstance", + Handler: _CloudRedis_CreateInstance_Handler, + }, + { + MethodName: "UpdateInstance", + Handler: _CloudRedis_UpdateInstance_Handler, + }, + { + MethodName: "FailoverInstance", + Handler: _CloudRedis_FailoverInstance_Handler, + }, + { + MethodName: "DeleteInstance", + Handler: _CloudRedis_DeleteInstance_Handler, + }, + }, + Streams: []grpc.StreamDesc{}, + Metadata: "google/cloud/redis/v1/cloud_redis.proto", +} + +func init() { + proto.RegisterFile("google/cloud/redis/v1/cloud_redis.proto", fileDescriptor_cloud_redis_0cf6a76cde0d103b) +} + +var fileDescriptor_cloud_redis_0cf6a76cde0d103b = []byte{ + // 1500 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x94, 0x57, 0xc1, 0x6e, 0xdb, 0x46, + 0x13, 0xfe, 0x29, 0x5b, 0x8e, 0x35, 0x92, 0x65, 0x7a, 0x63, 0x25, 0x8c, 0xfe, 0x06, 0x56, 0x99, + 0xa4, 0x75, 0x9c, 0x46, 0x82, 0x5d, 0x24, 0x48, 0x6c, 0x04, 0x01, 0x2d, 0xd1, 0x0e, 0x01, 0x5b, + 0x36, 0x28, 0xc5, 0x40, 0x73, 0x21, 0xd6, 0xe2, 0x5a, 0x61, 0x4c, 0x91, 0x2c, 0xb9, 0x52, 0x6b, + 0x17, 0xb9, 0x14, 0xe8, 0xa5, 0x40, 0x81, 0x16, 0xed, 0xb9, 0x68, 0x81, 0x00, 0x7d, 0x99, 0xa2, + 0x97, 0xbe, 0x40, 0x0f, 0x7d, 0x90, 0x62, 0x77, 0x49, 0x5b, 0x92, 0x25, 0xc8, 0xba, 0xed, 0x7c, + 0x33, 0xc3, 0xfd, 0x34, 0xb3, 0xfb, 0xcd, 0x0a, 0x3e, 0x6d, 0xfb, 0x7e, 0xdb, 0x25, 0x95, 0x96, + 0xeb, 0x77, 0xed, 0x4a, 0x48, 0x6c, 0x27, 0xaa, 0xf4, 0xd6, 0x85, 0x69, 0x71, 0xb3, 0x1c, 0x84, + 0x3e, 0xf5, 0x51, 0x41, 0x04, 0x96, 0xb9, 0xa7, 0x2c, 0x3c, 0xbd, 0xf5, 0xe2, 0x47, 0x71, 0x3e, + 0x0e, 0x9c, 0x0a, 0xf6, 0x3c, 0x9f, 0x62, 0xea, 0xf8, 0x5e, 0x9c, 0x54, 0xbc, 0xd3, 0xe7, 0x0d, + 0x49, 0xe4, 0x77, 0xc3, 0x16, 0x89, 0x5d, 0xf7, 0x62, 0x97, 0xeb, 0x7b, 0xed, 0xb0, 0xeb, 0x79, + 0x8e, 0xd7, 0xae, 0xf8, 0x01, 0x09, 0x07, 0xf2, 0x4b, 0x71, 0x10, 0xb7, 0x8e, 0xbb, 0x27, 0x95, + 0x13, 0x87, 0xb8, 0xb6, 0xd5, 0xc1, 0xd1, 0x69, 0x1c, 0xb1, 0x32, 0x1c, 0x41, 0x9d, 0x0e, 0x89, + 0x28, 0xee, 0x04, 0x22, 0x40, 0xfd, 0x6b, 0x1e, 0xe6, 0x0d, 0x2f, 0xa2, 0xd8, 0x6b, 0x11, 0x84, + 0x60, 0xd6, 0xc3, 0x1d, 0xa2, 0x48, 0x25, 0x69, 0x35, 0x63, 0xf2, 0x35, 0xfa, 0x18, 0x72, 0xb6, + 0x13, 0x05, 0x2e, 0x3e, 0xb3, 0xb8, 0x2f, 0xc5, 0x7d, 0xd9, 0x18, 0xab, 0xb3, 0x90, 0x2a, 0xcc, + 0xb9, 0xf8, 0x98, 0xb8, 0x91, 0x32, 0x53, 0x9a, 0x59, 0xcd, 0x6e, 0x3c, 0x2a, 0x8f, 0x2c, 0x46, + 0x39, 0xd9, 0xa7, 0xbc, 0xc7, 0xa3, 0x75, 0x8f, 0x86, 0x67, 0x66, 0x9c, 0x8a, 0x56, 0x20, 0xeb, + 0xfa, 0x2d, 0xfe, 0xf3, 0x2c, 0xc7, 0x56, 0x66, 0xf9, 0x36, 0x90, 0x40, 0x86, 0x8d, 0x9e, 0xc2, + 0x6d, 0xec, 0x52, 0x12, 0x7a, 0x98, 0x3a, 0x3d, 0x62, 0xf5, 0x07, 0xa7, 0x79, 0x70, 0xa1, 0xcf, + 0xbd, 0x77, 0x99, 0x77, 0x0f, 0x16, 0x38, 0x03, 0xab, 0x47, 0xc2, 0xc8, 0xf1, 0x3d, 0xe5, 0x06, + 0x8f, 0xce, 0x71, 0xf0, 0x48, 0x60, 0x68, 0x0d, 0x96, 0x42, 0x12, 0x91, 0xb0, 0x47, 0x6c, 0xcb, + 0x09, 0xac, 0x10, 0x7b, 0x6d, 0xa2, 0x64, 0x78, 0xe0, 0x62, 0xe2, 0x30, 0x02, 0x93, 0xc1, 0xac, + 0x4a, 0x6f, 0xfd, 0x88, 0x2a, 0x20, 0xaa, 0xc4, 0xd6, 0x0c, 0x0b, 0xfc, 0x90, 0x2a, 0xd9, 0x92, + 0xb4, 0x9a, 0x36, 0xf9, 0x1a, 0x95, 0xe1, 0x66, 0xab, 0x1b, 0x86, 0xc4, 0xa3, 0x03, 0x64, 0x73, + 0x3c, 0x6d, 0x29, 0x76, 0xf5, 0x11, 0xdd, 0x82, 0x6c, 0x2b, 0x24, 0x98, 0x12, 0x8b, 0x35, 0x49, + 0x59, 0x28, 0x49, 0xab, 0xd9, 0x8d, 0x62, 0x52, 0xcb, 0xa4, 0x83, 0xe5, 0x66, 0xd2, 0x41, 0x13, + 0x44, 0x38, 0x03, 0xd0, 0x16, 0xa4, 0x23, 0x8a, 0x29, 0x51, 0xf2, 0x25, 0x69, 0x35, 0xbf, 0xf1, + 0x60, 0x52, 0x0b, 0x1a, 0x2c, 0xd8, 0x14, 0x39, 0xe8, 0x01, 0xe4, 0xd9, 0xa2, 0x1b, 0x59, 0x1d, + 0x12, 0x45, 0xb8, 0x4d, 0x94, 0x45, 0x4e, 0x72, 0x41, 0xa0, 0xfb, 0x02, 0x44, 0x47, 0x49, 0x25, + 0x5b, 0xbe, 0x77, 0xe2, 0xb4, 0x23, 0x45, 0xe6, 0xed, 0x5e, 0x9f, 0xb4, 0x97, 0xc9, 0x90, 0xaa, + 0xc8, 0x11, 0x4d, 0x17, 0xc5, 0x8f, 0x21, 0xf4, 0x0c, 0x66, 0xa9, 0x43, 0x42, 0x65, 0x89, 0x53, + 0xbf, 0x3f, 0xe9, 0x73, 0x4d, 0x87, 0x84, 0x26, 0xcf, 0x40, 0xf7, 0x21, 0xdf, 0x21, 0x1d, 0x3f, + 0x3c, 0xb3, 0x22, 0xe7, 0x9c, 0x58, 0xed, 0x63, 0x05, 0xf1, 0x06, 0xe4, 0x04, 0xda, 0x70, 0xce, + 0xc9, 0xee, 0x31, 0x7a, 0x0c, 0x08, 0x77, 0xe9, 0x5b, 0x3f, 0x74, 0xce, 0x89, 0x6d, 0x79, 0x84, + 0x7e, 0xe5, 0x87, 0xa7, 0xca, 0xb2, 0xe8, 0xc3, 0xa5, 0xa7, 0x2e, 0x1c, 0xc5, 0xe7, 0x90, 0xed, + 0x3b, 0xa0, 0x48, 0x86, 0x99, 0x53, 0x72, 0x16, 0xdf, 0x09, 0xb6, 0x44, 0xcb, 0x90, 0xee, 0x61, + 0xb7, 0x9b, 0xdc, 0x05, 0x61, 0x6c, 0xa6, 0x9e, 0x49, 0xc5, 0x97, 0xb0, 0x74, 0xe5, 0xc7, 0x4e, + 0xf3, 0x01, 0xf5, 0x3b, 0x09, 0xd2, 0xbc, 0x35, 0xa8, 0x00, 0x4b, 0x8d, 0xa6, 0xd6, 0xd4, 0xad, + 0xd7, 0xf5, 0xc6, 0xa1, 0x5e, 0x35, 0x76, 0x0c, 0xbd, 0x26, 0xff, 0x0f, 0xe5, 0x60, 0xbe, 0x6a, + 0xea, 0x5a, 0xd3, 0xa8, 0xef, 0xca, 0x12, 0xca, 0x40, 0xda, 0xd4, 0xb5, 0xda, 0x17, 0x72, 0x8a, + 0x39, 0x5e, 0x1f, 0xd6, 0x84, 0x63, 0x86, 0x59, 0x35, 0x7d, 0x4f, 0xe7, 0xd6, 0x2c, 0x5a, 0x80, + 0x8c, 0xa9, 0x1f, 0x6a, 0x86, 0xc9, 0xcc, 0x34, 0x5a, 0x84, 0xec, 0xbe, 0x66, 0xd4, 0x9b, 0x7a, + 0x5d, 0xab, 0x57, 0x75, 0x79, 0x0e, 0xc9, 0x90, 0xdb, 0xd1, 0x8c, 0x3d, 0xa3, 0xbe, 0x6b, 0x1d, + 0x1c, 0xe9, 0xa6, 0x9c, 0x51, 0x9f, 0xc1, 0x2c, 0x2b, 0x33, 0x5a, 0x06, 0xb9, 0x69, 0xe8, 0xe6, + 0x10, 0x89, 0x0c, 0xa4, 0xb7, 0xb5, 0x86, 0x51, 0x95, 0x25, 0xf6, 0xad, 0x46, 0x53, 0xab, 0xd7, + 0x34, 0xb3, 0x66, 0xbd, 0xd2, 0xe4, 0x19, 0xf5, 0x1d, 0x2c, 0xef, 0x39, 0x11, 0x4d, 0xba, 0x15, + 0x99, 0xe4, 0xcb, 0x2e, 0x89, 0x28, 0xba, 0x05, 0x73, 0x01, 0x66, 0x27, 0x3e, 0x2e, 0x44, 0x6c, + 0xa1, 0xff, 0x43, 0x26, 0xc0, 0x6d, 0xc2, 0x1b, 0xc8, 0xeb, 0x91, 0x36, 0xe7, 0x19, 0xc0, 0x7a, + 0x87, 0xee, 0x02, 0x70, 0x27, 0xf5, 0x4f, 0x89, 0xa7, 0xcc, 0xf0, 0x44, 0x1e, 0xde, 0x64, 0x80, + 0xfa, 0x9b, 0x04, 0x85, 0xa1, 0xcd, 0xa2, 0xc0, 0xf7, 0x22, 0x82, 0x5e, 0x40, 0xc6, 0x49, 0x40, + 0x45, 0xe2, 0xc7, 0x74, 0x65, 0xc2, 0xb9, 0x32, 0x2f, 0x33, 0xd0, 0x27, 0xb0, 0xe8, 0x91, 0xaf, + 0xa9, 0xd5, 0xb7, 0xb9, 0x68, 0xd5, 0x02, 0x83, 0x0f, 0x13, 0x02, 0xa8, 0x04, 0xd9, 0xae, 0x17, + 0x12, 0xdc, 0x7a, 0x8b, 0x8f, 0x5d, 0xc2, 0xe5, 0x2f, 0x63, 0xf6, 0x43, 0xea, 0x2a, 0xa0, 0x5d, + 0x72, 0x41, 0x30, 0x29, 0xc6, 0x08, 0xa1, 0x55, 0x7f, 0x90, 0xa0, 0x50, 0xe5, 0x17, 0x7a, 0x38, + 0x7a, 0x5c, 0xe9, 0x56, 0x20, 0x9b, 0x50, 0x66, 0xc2, 0x22, 0x18, 0x42, 0x02, 0x71, 0x45, 0x99, + 0x4f, 0x2c, 0x5e, 0xbc, 0x6b, 0x14, 0xe1, 0x22, 0x41, 0xfd, 0x49, 0x82, 0xc2, 0xeb, 0xc0, 0x1e, + 0xc1, 0x67, 0x0b, 0xb2, 0x5d, 0xee, 0xe0, 0x93, 0x86, 0x93, 0x1a, 0x25, 0x54, 0x3b, 0x6c, 0x18, + 0xed, 0xe3, 0xe8, 0xd4, 0x04, 0x11, 0xce, 0xd6, 0x03, 0x9c, 0x52, 0xd3, 0x72, 0x7a, 0x04, 0x85, + 0x1a, 0x71, 0xc9, 0x55, 0x4a, 0xa3, 0x0a, 0xfa, 0x7d, 0x0a, 0x6e, 0xef, 0x60, 0xc7, 0xf5, 0x7b, + 0x24, 0xbc, 0x46, 0x3c, 0x8a, 0x60, 0xd9, 0xc6, 0x14, 0x5b, 0xec, 0x07, 0x90, 0x16, 0x97, 0xeb, + 0x8e, 0x6f, 0x0b, 0x96, 0xf9, 0x0d, 0x6d, 0x0c, 0xcb, 0x31, 0x3b, 0x94, 0x6b, 0x98, 0xe2, 0xc3, + 0x8b, 0x2f, 0xed, 0xfb, 0x36, 0x31, 0x91, 0x7d, 0x05, 0x53, 0x4f, 0x00, 0x5d, 0x8d, 0x44, 0xf7, + 0xa1, 0x54, 0xd3, 0x9a, 0x9a, 0x75, 0x68, 0x1e, 0x34, 0xf5, 0x6a, 0xd3, 0x38, 0xa8, 0x5b, 0xfb, + 0x07, 0xb5, 0x61, 0x2d, 0x28, 0xc0, 0xd2, 0x9e, 0xb1, 0x6f, 0x34, 0xf5, 0x9a, 0xc5, 0xa3, 0xf7, + 0x0e, 0x1a, 0x0d, 0x59, 0x42, 0x37, 0x61, 0x71, 0xe7, 0xc0, 0xac, 0xea, 0x7d, 0x60, 0x4a, 0xfd, + 0x90, 0x82, 0xa5, 0x83, 0xe4, 0xfd, 0xb0, 0x4f, 0x28, 0x66, 0x5c, 0x86, 0x47, 0x8e, 0x34, 0xd5, + 0xc8, 0x79, 0x02, 0xf3, 0xc4, 0xb3, 0x45, 0x66, 0x6a, 0x62, 0xe6, 0x0d, 0xe2, 0xd9, 0x3c, 0xed, + 0x16, 0xcc, 0x51, 0x1c, 0xb6, 0x09, 0x8d, 0xef, 0x73, 0x6c, 0xb1, 0x96, 0xf4, 0x48, 0x78, 0x1c, + 0x4f, 0x7e, 0xbe, 0x66, 0xb3, 0x3b, 0x1e, 0x4c, 0x36, 0xa1, 0xd8, 0x71, 0xe3, 0x49, 0x9f, 0x13, + 0x60, 0x8d, 0x63, 0xe8, 0x21, 0xc8, 0x2d, 0x56, 0x79, 0xd7, 0x0a, 0x45, 0xed, 0x89, 0xad, 0xcc, + 0x95, 0xa4, 0xd5, 0x79, 0x73, 0x51, 0xe0, 0x66, 0x02, 0xb3, 0x1b, 0x83, 0x03, 0x67, 0xe8, 0x25, + 0x00, 0x38, 0x70, 0xe2, 0x77, 0x80, 0xfa, 0x8f, 0x04, 0x72, 0x32, 0x92, 0x2f, 0xaa, 0x64, 0xc3, + 0x22, 0xee, 0x61, 0xc7, 0x65, 0x17, 0xda, 0x3a, 0xf7, 0xbd, 0x0b, 0x49, 0xd9, 0x1a, 0x73, 0x26, + 0x86, 0xbf, 0x50, 0xd6, 0x92, 0xf4, 0x37, 0x2c, 0x5b, 0xcc, 0xc0, 0x3c, 0x1e, 0x00, 0x8b, 0x27, + 0x70, 0x73, 0x44, 0xd8, 0x88, 0xe9, 0xf1, 0xbc, 0x7f, 0x7a, 0x64, 0x37, 0xee, 0x8d, 0x21, 0xc1, + 0xbe, 0x91, 0x10, 0xe8, 0x1f, 0x31, 0x79, 0xc8, 0xf5, 0xbb, 0x36, 0xfe, 0xbc, 0x01, 0x50, 0x65, + 0xa9, 0x7c, 0x72, 0xa1, 0xdf, 0x25, 0x58, 0x18, 0xd0, 0x54, 0x34, 0xee, 0x39, 0x37, 0x4a, 0xe6, + 0x8b, 0x9f, 0x5d, 0x2f, 0x58, 0xc8, 0xb4, 0xfa, 0xe4, 0xdb, 0xbf, 0xff, 0xfd, 0x39, 0x55, 0x41, + 0x8f, 0xd9, 0xa3, 0xfa, 0x1b, 0x21, 0x6b, 0x2f, 0x82, 0xd0, 0x7f, 0x47, 0x5a, 0x34, 0xaa, 0xac, + 0x55, 0x92, 0x77, 0x53, 0x54, 0x59, 0x7b, 0x5f, 0xb9, 0x94, 0xe7, 0x1f, 0x25, 0xc8, 0xf6, 0xa9, + 0x2a, 0x7a, 0x38, 0x66, 0xd3, 0xab, 0xca, 0x5b, 0x9c, 0x24, 0x36, 0x43, 0x94, 0x98, 0x30, 0x8c, + 0x21, 0x74, 0xc9, 0xa7, 0xb2, 0xf6, 0x1e, 0xfd, 0x2a, 0x41, 0x7e, 0x50, 0xbd, 0xd1, 0xb8, 0x52, + 0x8c, 0x14, 0xf9, 0xe2, 0xdd, 0x24, 0xba, 0xef, 0xc5, 0x5f, 0xbe, 0xb8, 0xb1, 0xea, 0x4b, 0x4e, + 0xeb, 0xb9, 0x3a, 0x5d, 0xa5, 0x36, 0x2f, 0xa4, 0x13, 0xfd, 0x21, 0x41, 0x7e, 0x50, 0xce, 0xc7, + 0x12, 0x1c, 0xa9, 0xfa, 0x93, 0x08, 0xbe, 0xe2, 0x04, 0xb7, 0x37, 0x9e, 0x72, 0x82, 0xc9, 0xb6, + 0xe5, 0xeb, 0x16, 0xb0, 0x8f, 0xe9, 0x07, 0x09, 0xe4, 0x61, 0x55, 0x45, 0xe5, 0xe9, 0xe4, 0x77, + 0x12, 0x5b, 0x8d, 0xb3, 0xdd, 0x52, 0x9f, 0x4e, 0xd5, 0xe5, 0xcd, 0x93, 0x78, 0xb7, 0x4d, 0x69, + 0x0d, 0xfd, 0x22, 0x41, 0x7e, 0x70, 0x18, 0x8d, 0x2d, 0xe8, 0xc8, 0x99, 0x35, 0x89, 0x62, 0x7c, + 0x10, 0xd7, 0xa6, 0x3b, 0x88, 0xdb, 0x11, 0xdc, 0x69, 0xf9, 0x9d, 0xd1, 0x44, 0xb6, 0x95, 0xcb, + 0x8b, 0xde, 0x20, 0x61, 0xcf, 0x69, 0x91, 0xa3, 0x75, 0x36, 0x7a, 0xfc, 0x43, 0xe9, 0xcd, 0x66, + 0x9c, 0xd2, 0xf6, 0x5d, 0xec, 0xb5, 0xcb, 0x7e, 0xd8, 0xae, 0xb4, 0x89, 0xc7, 0x95, 0xbc, 0x22, + 0x5c, 0x38, 0x70, 0xa2, 0xa1, 0x7f, 0xc2, 0x5b, 0x7c, 0x71, 0x3c, 0xc7, 0xc3, 0x3e, 0xff, 0x2f, + 0x00, 0x00, 0xff, 0xff, 0xd9, 0x50, 0x6a, 0xff, 0x2f, 0x0f, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/cloud/redis/v1beta1/cloud_redis.pb.go b/vendor/google.golang.org/genproto/googleapis/cloud/redis/v1beta1/cloud_redis.pb.go new file mode 100644 index 0000000..57a9484 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/cloud/redis/v1beta1/cloud_redis.pb.go @@ -0,0 +1,1266 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/cloud/redis/v1beta1/cloud_redis.proto + +package redis // import "google.golang.org/genproto/googleapis/cloud/redis/v1beta1" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import timestamp "github.com/golang/protobuf/ptypes/timestamp" +import _ "google.golang.org/genproto/googleapis/api/annotations" +import longrunning "google.golang.org/genproto/googleapis/longrunning" +import field_mask "google.golang.org/genproto/protobuf/field_mask" + +import ( + context "golang.org/x/net/context" + grpc "google.golang.org/grpc" +) + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Represents the different states of a Redis instance. +type Instance_State int32 + +const ( + // Not set. + Instance_STATE_UNSPECIFIED Instance_State = 0 + // Redis instance is being created. + Instance_CREATING Instance_State = 1 + // Redis instance has been created and is fully usable. + Instance_READY Instance_State = 2 + // Redis instance configuration is being updated. Certain kinds of updates + // may cause the instance to become unusable while the update is in + // progress. + Instance_UPDATING Instance_State = 3 + // Redis instance is being deleted. + Instance_DELETING Instance_State = 4 + // Redis instance is being repaired and may be unusable. + Instance_REPAIRING Instance_State = 5 + // Maintenance is being performed on this Redis instance. + Instance_MAINTENANCE Instance_State = 6 + // Redis instance is importing data (availability may be affected). + Instance_IMPORTING Instance_State = 8 + // Redis instance is failing over (availability may be affected). + Instance_FAILING_OVER Instance_State = 10 +) + +var Instance_State_name = map[int32]string{ + 0: "STATE_UNSPECIFIED", + 1: "CREATING", + 2: "READY", + 3: "UPDATING", + 4: "DELETING", + 5: "REPAIRING", + 6: "MAINTENANCE", + 8: "IMPORTING", + 10: "FAILING_OVER", +} +var Instance_State_value = map[string]int32{ + "STATE_UNSPECIFIED": 0, + "CREATING": 1, + "READY": 2, + "UPDATING": 3, + "DELETING": 4, + "REPAIRING": 5, + "MAINTENANCE": 6, + "IMPORTING": 8, + "FAILING_OVER": 10, +} + +func (x Instance_State) String() string { + return proto.EnumName(Instance_State_name, int32(x)) +} +func (Instance_State) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_cloud_redis_ec9ebde39ed154fe, []int{0, 0} +} + +// Available service tiers to choose from +type Instance_Tier int32 + +const ( + // Not set. + Instance_TIER_UNSPECIFIED Instance_Tier = 0 + // BASIC tier: standalone instance + Instance_BASIC Instance_Tier = 1 + // STANDARD_HA tier: highly available primary/replica instances + Instance_STANDARD_HA Instance_Tier = 3 +) + +var Instance_Tier_name = map[int32]string{ + 0: "TIER_UNSPECIFIED", + 1: "BASIC", + 3: "STANDARD_HA", +} +var Instance_Tier_value = map[string]int32{ + "TIER_UNSPECIFIED": 0, + "BASIC": 1, + "STANDARD_HA": 3, +} + +func (x Instance_Tier) String() string { + return proto.EnumName(Instance_Tier_name, int32(x)) +} +func (Instance_Tier) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_cloud_redis_ec9ebde39ed154fe, []int{0, 1} +} + +type FailoverInstanceRequest_DataProtectionMode int32 + +const ( + FailoverInstanceRequest_DATA_PROTECTION_MODE_UNSPECIFIED FailoverInstanceRequest_DataProtectionMode = 0 + // Instance failover will be protected with data loss control. More + // specifically, the failover will only be performed if the current + // replication offset diff between master and replica is under a certain + // threshold. + FailoverInstanceRequest_LIMITED_DATA_LOSS FailoverInstanceRequest_DataProtectionMode = 1 + // Instance failover will be performed without data loss control. + FailoverInstanceRequest_FORCE_DATA_LOSS FailoverInstanceRequest_DataProtectionMode = 2 +) + +var FailoverInstanceRequest_DataProtectionMode_name = map[int32]string{ + 0: "DATA_PROTECTION_MODE_UNSPECIFIED", + 1: "LIMITED_DATA_LOSS", + 2: "FORCE_DATA_LOSS", +} +var FailoverInstanceRequest_DataProtectionMode_value = map[string]int32{ + "DATA_PROTECTION_MODE_UNSPECIFIED": 0, + "LIMITED_DATA_LOSS": 1, + "FORCE_DATA_LOSS": 2, +} + +func (x FailoverInstanceRequest_DataProtectionMode) String() string { + return proto.EnumName(FailoverInstanceRequest_DataProtectionMode_name, int32(x)) +} +func (FailoverInstanceRequest_DataProtectionMode) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_cloud_redis_ec9ebde39ed154fe, []int{7, 0} +} + +// A Google Cloud Redis instance. +type Instance struct { + // Required. Unique name of the resource in this scope including project and + // location using the form: + // `projects/{project_id}/locations/{location_id}/instances/{instance_id}` + // + // Note: Redis instances are managed and addressed at regional level so + // location_id here refers to a GCP region; however, users may choose which + // specific zone (or collection of zones for cross-zone instances) an instance + // should be provisioned in. Refer to [location_id] and + // [alternative_location_id] fields for more details. + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // An arbitrary and optional user-provided name for the instance. + DisplayName string `protobuf:"bytes,2,opt,name=display_name,json=displayName,proto3" json:"display_name,omitempty"` + // Resource labels to represent user provided metadata + Labels map[string]string `protobuf:"bytes,3,rep,name=labels,proto3" json:"labels,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` + // Optional. The zone where the instance will be provisioned. If not provided, + // the service will choose a zone for the instance. For STANDARD_HA tier, + // instances will be created across two zones for protection against zonal + // failures. If [alternative_location_id] is also provided, it must be + // different from [location_id]. + LocationId string `protobuf:"bytes,4,opt,name=location_id,json=locationId,proto3" json:"location_id,omitempty"` + // Optional. Only applicable to STANDARD_HA tier which protects the instance + // against zonal failures by provisioning it across two zones. If provided, it + // must be a different zone from the one provided in [location_id]. + AlternativeLocationId string `protobuf:"bytes,5,opt,name=alternative_location_id,json=alternativeLocationId,proto3" json:"alternative_location_id,omitempty"` + // Optional. The version of Redis software. + // If not provided, latest supported version will be used. Updating the + // version will perform an upgrade/downgrade to the new version. Currently, + // the supported values are: + // + // * `REDIS_4_0` for Redis 4.0 compatibility + // * `REDIS_3_2` for Redis 3.2 compatibility (default) + RedisVersion string `protobuf:"bytes,7,opt,name=redis_version,json=redisVersion,proto3" json:"redis_version,omitempty"` + // Optional. The CIDR range of internal addresses that are reserved for this + // instance. If not provided, the service will choose an unused /29 block, + // for example, 10.0.0.0/29 or 192.168.0.0/29. Ranges must be unique + // and non-overlapping with existing subnets in an authorized network. + ReservedIpRange string `protobuf:"bytes,9,opt,name=reserved_ip_range,json=reservedIpRange,proto3" json:"reserved_ip_range,omitempty"` + // Output only. Hostname or IP address of the exposed Redis endpoint used by + // clients to connect to the service. + Host string `protobuf:"bytes,10,opt,name=host,proto3" json:"host,omitempty"` + // Output only. The port number of the exposed Redis endpoint. + Port int32 `protobuf:"varint,11,opt,name=port,proto3" json:"port,omitempty"` + // Output only. The current zone where the Redis endpoint is placed. For Basic + // Tier instances, this will always be the same as the [location_id] + // provided by the user at creation time. For Standard Tier instances, + // this can be either [location_id] or [alternative_location_id] and can + // change after a failover event. + CurrentLocationId string `protobuf:"bytes,12,opt,name=current_location_id,json=currentLocationId,proto3" json:"current_location_id,omitempty"` + // Output only. The time the instance was created. + CreateTime *timestamp.Timestamp `protobuf:"bytes,13,opt,name=create_time,json=createTime,proto3" json:"create_time,omitempty"` + // Output only. The current state of this instance. + State Instance_State `protobuf:"varint,14,opt,name=state,proto3,enum=google.cloud.redis.v1beta1.Instance_State" json:"state,omitempty"` + // Output only. Additional information about the current status of this + // instance, if available. + StatusMessage string `protobuf:"bytes,15,opt,name=status_message,json=statusMessage,proto3" json:"status_message,omitempty"` + // Optional. Redis configuration parameters, according to + // http://redis.io/topics/config. Currently, the only supported parameters + // are: + // + // Redis 3.2 and above: + // + // * maxmemory-policy + // * notify-keyspace-events + // + // Redis 4.0 and above: + // + // * activedefrag + // * lfu-log-factor + // * lfu-decay-time + RedisConfigs map[string]string `protobuf:"bytes,16,rep,name=redis_configs,json=redisConfigs,proto3" json:"redis_configs,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` + // Required. The service tier of the instance. + Tier Instance_Tier `protobuf:"varint,17,opt,name=tier,proto3,enum=google.cloud.redis.v1beta1.Instance_Tier" json:"tier,omitempty"` + // Required. Redis memory size in GiB. + MemorySizeGb int32 `protobuf:"varint,18,opt,name=memory_size_gb,json=memorySizeGb,proto3" json:"memory_size_gb,omitempty"` + // Optional. The full name of the Google Compute Engine + // [network](/compute/docs/networks-and-firewalls#networks) to which the + // instance is connected. If left unspecified, the `default` network + // will be used. + AuthorizedNetwork string `protobuf:"bytes,20,opt,name=authorized_network,json=authorizedNetwork,proto3" json:"authorized_network,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Instance) Reset() { *m = Instance{} } +func (m *Instance) String() string { return proto.CompactTextString(m) } +func (*Instance) ProtoMessage() {} +func (*Instance) Descriptor() ([]byte, []int) { + return fileDescriptor_cloud_redis_ec9ebde39ed154fe, []int{0} +} +func (m *Instance) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Instance.Unmarshal(m, b) +} +func (m *Instance) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Instance.Marshal(b, m, deterministic) +} +func (dst *Instance) XXX_Merge(src proto.Message) { + xxx_messageInfo_Instance.Merge(dst, src) +} +func (m *Instance) XXX_Size() int { + return xxx_messageInfo_Instance.Size(m) +} +func (m *Instance) XXX_DiscardUnknown() { + xxx_messageInfo_Instance.DiscardUnknown(m) +} + +var xxx_messageInfo_Instance proto.InternalMessageInfo + +func (m *Instance) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *Instance) GetDisplayName() string { + if m != nil { + return m.DisplayName + } + return "" +} + +func (m *Instance) GetLabels() map[string]string { + if m != nil { + return m.Labels + } + return nil +} + +func (m *Instance) GetLocationId() string { + if m != nil { + return m.LocationId + } + return "" +} + +func (m *Instance) GetAlternativeLocationId() string { + if m != nil { + return m.AlternativeLocationId + } + return "" +} + +func (m *Instance) GetRedisVersion() string { + if m != nil { + return m.RedisVersion + } + return "" +} + +func (m *Instance) GetReservedIpRange() string { + if m != nil { + return m.ReservedIpRange + } + return "" +} + +func (m *Instance) GetHost() string { + if m != nil { + return m.Host + } + return "" +} + +func (m *Instance) GetPort() int32 { + if m != nil { + return m.Port + } + return 0 +} + +func (m *Instance) GetCurrentLocationId() string { + if m != nil { + return m.CurrentLocationId + } + return "" +} + +func (m *Instance) GetCreateTime() *timestamp.Timestamp { + if m != nil { + return m.CreateTime + } + return nil +} + +func (m *Instance) GetState() Instance_State { + if m != nil { + return m.State + } + return Instance_STATE_UNSPECIFIED +} + +func (m *Instance) GetStatusMessage() string { + if m != nil { + return m.StatusMessage + } + return "" +} + +func (m *Instance) GetRedisConfigs() map[string]string { + if m != nil { + return m.RedisConfigs + } + return nil +} + +func (m *Instance) GetTier() Instance_Tier { + if m != nil { + return m.Tier + } + return Instance_TIER_UNSPECIFIED +} + +func (m *Instance) GetMemorySizeGb() int32 { + if m != nil { + return m.MemorySizeGb + } + return 0 +} + +func (m *Instance) GetAuthorizedNetwork() string { + if m != nil { + return m.AuthorizedNetwork + } + return "" +} + +// Request for +// [ListInstances][google.cloud.redis.v1beta1.CloudRedis.ListInstances]. +type ListInstancesRequest struct { + // Required. The resource name of the instance location using the form: + // `projects/{project_id}/locations/{location_id}` + // where `location_id` refers to a GCP region + Parent string `protobuf:"bytes,1,opt,name=parent,proto3" json:"parent,omitempty"` + // The maximum number of items to return. + // + // If not specified, a default value of 1000 will be used by the service. + // Regardless of the page_size value, the response may include a partial list + // and a caller should only rely on response's + // [next_page_token][CloudRedis.ListInstancesResponse.next_page_token] + // to determine if there are more instances left to be queried. + PageSize int32 `protobuf:"varint,2,opt,name=page_size,json=pageSize,proto3" json:"page_size,omitempty"` + // The next_page_token value returned from a previous List request, + // if any. + PageToken string `protobuf:"bytes,3,opt,name=page_token,json=pageToken,proto3" json:"page_token,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ListInstancesRequest) Reset() { *m = ListInstancesRequest{} } +func (m *ListInstancesRequest) String() string { return proto.CompactTextString(m) } +func (*ListInstancesRequest) ProtoMessage() {} +func (*ListInstancesRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_cloud_redis_ec9ebde39ed154fe, []int{1} +} +func (m *ListInstancesRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ListInstancesRequest.Unmarshal(m, b) +} +func (m *ListInstancesRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ListInstancesRequest.Marshal(b, m, deterministic) +} +func (dst *ListInstancesRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_ListInstancesRequest.Merge(dst, src) +} +func (m *ListInstancesRequest) XXX_Size() int { + return xxx_messageInfo_ListInstancesRequest.Size(m) +} +func (m *ListInstancesRequest) XXX_DiscardUnknown() { + xxx_messageInfo_ListInstancesRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_ListInstancesRequest proto.InternalMessageInfo + +func (m *ListInstancesRequest) GetParent() string { + if m != nil { + return m.Parent + } + return "" +} + +func (m *ListInstancesRequest) GetPageSize() int32 { + if m != nil { + return m.PageSize + } + return 0 +} + +func (m *ListInstancesRequest) GetPageToken() string { + if m != nil { + return m.PageToken + } + return "" +} + +// Response for +// [ListInstances][google.cloud.redis.v1beta1.CloudRedis.ListInstances]. +type ListInstancesResponse struct { + // A list of Redis instances in the project in the specified location, + // or across all locations. + // + // If the `location_id` in the parent field of the request is "-", all regions + // available to the project are queried, and the results aggregated. + // If in such an aggregated query a location is unavailable, a dummy Redis + // entry is included in the response with the "name" field set to a value of + // the form projects/{project_id}/locations/{location_id}/instances/- and the + // "status" field set to ERROR and "status_message" field set to "location not + // available for ListInstances". + Instances []*Instance `protobuf:"bytes,1,rep,name=instances,proto3" json:"instances,omitempty"` + // Token to retrieve the next page of results, or empty if there are no more + // results in the list. + NextPageToken string `protobuf:"bytes,2,opt,name=next_page_token,json=nextPageToken,proto3" json:"next_page_token,omitempty"` + // Locations that could not be reached. + Unreachable []string `protobuf:"bytes,3,rep,name=unreachable,proto3" json:"unreachable,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ListInstancesResponse) Reset() { *m = ListInstancesResponse{} } +func (m *ListInstancesResponse) String() string { return proto.CompactTextString(m) } +func (*ListInstancesResponse) ProtoMessage() {} +func (*ListInstancesResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_cloud_redis_ec9ebde39ed154fe, []int{2} +} +func (m *ListInstancesResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ListInstancesResponse.Unmarshal(m, b) +} +func (m *ListInstancesResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ListInstancesResponse.Marshal(b, m, deterministic) +} +func (dst *ListInstancesResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_ListInstancesResponse.Merge(dst, src) +} +func (m *ListInstancesResponse) XXX_Size() int { + return xxx_messageInfo_ListInstancesResponse.Size(m) +} +func (m *ListInstancesResponse) XXX_DiscardUnknown() { + xxx_messageInfo_ListInstancesResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_ListInstancesResponse proto.InternalMessageInfo + +func (m *ListInstancesResponse) GetInstances() []*Instance { + if m != nil { + return m.Instances + } + return nil +} + +func (m *ListInstancesResponse) GetNextPageToken() string { + if m != nil { + return m.NextPageToken + } + return "" +} + +func (m *ListInstancesResponse) GetUnreachable() []string { + if m != nil { + return m.Unreachable + } + return nil +} + +// Request for [GetInstance][google.cloud.redis.v1beta1.CloudRedis.GetInstance]. +type GetInstanceRequest struct { + // Required. Redis instance resource name using the form: + // `projects/{project_id}/locations/{location_id}/instances/{instance_id}` + // where `location_id` refers to a GCP region + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GetInstanceRequest) Reset() { *m = GetInstanceRequest{} } +func (m *GetInstanceRequest) String() string { return proto.CompactTextString(m) } +func (*GetInstanceRequest) ProtoMessage() {} +func (*GetInstanceRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_cloud_redis_ec9ebde39ed154fe, []int{3} +} +func (m *GetInstanceRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GetInstanceRequest.Unmarshal(m, b) +} +func (m *GetInstanceRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GetInstanceRequest.Marshal(b, m, deterministic) +} +func (dst *GetInstanceRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_GetInstanceRequest.Merge(dst, src) +} +func (m *GetInstanceRequest) XXX_Size() int { + return xxx_messageInfo_GetInstanceRequest.Size(m) +} +func (m *GetInstanceRequest) XXX_DiscardUnknown() { + xxx_messageInfo_GetInstanceRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_GetInstanceRequest proto.InternalMessageInfo + +func (m *GetInstanceRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +// Request for +// [CreateInstance][google.cloud.redis.v1beta1.CloudRedis.CreateInstance]. +type CreateInstanceRequest struct { + // Required. The resource name of the instance location using the form: + // `projects/{project_id}/locations/{location_id}` + // where `location_id` refers to a GCP region + Parent string `protobuf:"bytes,1,opt,name=parent,proto3" json:"parent,omitempty"` + // Required. The logical name of the Redis instance in the customer project + // with the following restrictions: + // + // * Must contain only lowercase letters, numbers, and hyphens. + // * Must start with a letter. + // * Must be between 1-40 characters. + // * Must end with a number or a letter. + // * Must be unique within the customer project / location + InstanceId string `protobuf:"bytes,2,opt,name=instance_id,json=instanceId,proto3" json:"instance_id,omitempty"` + // Required. A Redis [Instance] resource + Instance *Instance `protobuf:"bytes,3,opt,name=instance,proto3" json:"instance,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *CreateInstanceRequest) Reset() { *m = CreateInstanceRequest{} } +func (m *CreateInstanceRequest) String() string { return proto.CompactTextString(m) } +func (*CreateInstanceRequest) ProtoMessage() {} +func (*CreateInstanceRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_cloud_redis_ec9ebde39ed154fe, []int{4} +} +func (m *CreateInstanceRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_CreateInstanceRequest.Unmarshal(m, b) +} +func (m *CreateInstanceRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_CreateInstanceRequest.Marshal(b, m, deterministic) +} +func (dst *CreateInstanceRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_CreateInstanceRequest.Merge(dst, src) +} +func (m *CreateInstanceRequest) XXX_Size() int { + return xxx_messageInfo_CreateInstanceRequest.Size(m) +} +func (m *CreateInstanceRequest) XXX_DiscardUnknown() { + xxx_messageInfo_CreateInstanceRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_CreateInstanceRequest proto.InternalMessageInfo + +func (m *CreateInstanceRequest) GetParent() string { + if m != nil { + return m.Parent + } + return "" +} + +func (m *CreateInstanceRequest) GetInstanceId() string { + if m != nil { + return m.InstanceId + } + return "" +} + +func (m *CreateInstanceRequest) GetInstance() *Instance { + if m != nil { + return m.Instance + } + return nil +} + +// Request for +// [UpdateInstance][google.cloud.redis.v1beta1.CloudRedis.UpdateInstance]. +type UpdateInstanceRequest struct { + // Required. Mask of fields to update. At least one path must be supplied in + // this field. The elements of the repeated paths field may only include these + // fields from [Instance][google.cloud.redis.v1beta1.Instance]: + // + // * `displayName` + // * `labels` + // * `memorySizeGb` + // * `redisConfig` + UpdateMask *field_mask.FieldMask `protobuf:"bytes,1,opt,name=update_mask,json=updateMask,proto3" json:"update_mask,omitempty"` + // Required. Update description. + // Only fields specified in update_mask are updated. + Instance *Instance `protobuf:"bytes,2,opt,name=instance,proto3" json:"instance,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *UpdateInstanceRequest) Reset() { *m = UpdateInstanceRequest{} } +func (m *UpdateInstanceRequest) String() string { return proto.CompactTextString(m) } +func (*UpdateInstanceRequest) ProtoMessage() {} +func (*UpdateInstanceRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_cloud_redis_ec9ebde39ed154fe, []int{5} +} +func (m *UpdateInstanceRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_UpdateInstanceRequest.Unmarshal(m, b) +} +func (m *UpdateInstanceRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_UpdateInstanceRequest.Marshal(b, m, deterministic) +} +func (dst *UpdateInstanceRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_UpdateInstanceRequest.Merge(dst, src) +} +func (m *UpdateInstanceRequest) XXX_Size() int { + return xxx_messageInfo_UpdateInstanceRequest.Size(m) +} +func (m *UpdateInstanceRequest) XXX_DiscardUnknown() { + xxx_messageInfo_UpdateInstanceRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_UpdateInstanceRequest proto.InternalMessageInfo + +func (m *UpdateInstanceRequest) GetUpdateMask() *field_mask.FieldMask { + if m != nil { + return m.UpdateMask + } + return nil +} + +func (m *UpdateInstanceRequest) GetInstance() *Instance { + if m != nil { + return m.Instance + } + return nil +} + +// Request for +// [DeleteInstance][google.cloud.redis.v1beta1.CloudRedis.DeleteInstance]. +type DeleteInstanceRequest struct { + // Required. Redis instance resource name using the form: + // `projects/{project_id}/locations/{location_id}/instances/{instance_id}` + // where `location_id` refers to a GCP region + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *DeleteInstanceRequest) Reset() { *m = DeleteInstanceRequest{} } +func (m *DeleteInstanceRequest) String() string { return proto.CompactTextString(m) } +func (*DeleteInstanceRequest) ProtoMessage() {} +func (*DeleteInstanceRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_cloud_redis_ec9ebde39ed154fe, []int{6} +} +func (m *DeleteInstanceRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_DeleteInstanceRequest.Unmarshal(m, b) +} +func (m *DeleteInstanceRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_DeleteInstanceRequest.Marshal(b, m, deterministic) +} +func (dst *DeleteInstanceRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_DeleteInstanceRequest.Merge(dst, src) +} +func (m *DeleteInstanceRequest) XXX_Size() int { + return xxx_messageInfo_DeleteInstanceRequest.Size(m) +} +func (m *DeleteInstanceRequest) XXX_DiscardUnknown() { + xxx_messageInfo_DeleteInstanceRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_DeleteInstanceRequest proto.InternalMessageInfo + +func (m *DeleteInstanceRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +// Request for +// [Failover][google.cloud.redis.v1beta1.CloudRedis.FailoverInstance]. +type FailoverInstanceRequest struct { + // Required. Redis instance resource name using the form: + // `projects/{project_id}/locations/{location_id}/instances/{instance_id}` + // where `location_id` refers to a GCP region + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // Optional. Available data protection modes that the user can choose. If it's + // unspecified, data protection mode will be LIMITED_DATA_LOSS by default. + DataProtectionMode FailoverInstanceRequest_DataProtectionMode `protobuf:"varint,2,opt,name=data_protection_mode,json=dataProtectionMode,proto3,enum=google.cloud.redis.v1beta1.FailoverInstanceRequest_DataProtectionMode" json:"data_protection_mode,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *FailoverInstanceRequest) Reset() { *m = FailoverInstanceRequest{} } +func (m *FailoverInstanceRequest) String() string { return proto.CompactTextString(m) } +func (*FailoverInstanceRequest) ProtoMessage() {} +func (*FailoverInstanceRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_cloud_redis_ec9ebde39ed154fe, []int{7} +} +func (m *FailoverInstanceRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_FailoverInstanceRequest.Unmarshal(m, b) +} +func (m *FailoverInstanceRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_FailoverInstanceRequest.Marshal(b, m, deterministic) +} +func (dst *FailoverInstanceRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_FailoverInstanceRequest.Merge(dst, src) +} +func (m *FailoverInstanceRequest) XXX_Size() int { + return xxx_messageInfo_FailoverInstanceRequest.Size(m) +} +func (m *FailoverInstanceRequest) XXX_DiscardUnknown() { + xxx_messageInfo_FailoverInstanceRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_FailoverInstanceRequest proto.InternalMessageInfo + +func (m *FailoverInstanceRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *FailoverInstanceRequest) GetDataProtectionMode() FailoverInstanceRequest_DataProtectionMode { + if m != nil { + return m.DataProtectionMode + } + return FailoverInstanceRequest_DATA_PROTECTION_MODE_UNSPECIFIED +} + +// This location metadata represents additional configuration options for a +// given location where a Redis instance may be created. All fields are output +// only. It is returned as content of the +// `google.cloud.location.Location.metadata` field. +type LocationMetadata struct { + // Output only. The set of available zones in the location. The map is keyed + // by the lowercase ID of each zone, as defined by GCE. These keys can be + // specified in `location_id` or `alternative_location_id` fields when + // creating a Redis instance. + AvailableZones map[string]*ZoneMetadata `protobuf:"bytes,1,rep,name=available_zones,json=availableZones,proto3" json:"available_zones,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *LocationMetadata) Reset() { *m = LocationMetadata{} } +func (m *LocationMetadata) String() string { return proto.CompactTextString(m) } +func (*LocationMetadata) ProtoMessage() {} +func (*LocationMetadata) Descriptor() ([]byte, []int) { + return fileDescriptor_cloud_redis_ec9ebde39ed154fe, []int{8} +} +func (m *LocationMetadata) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_LocationMetadata.Unmarshal(m, b) +} +func (m *LocationMetadata) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_LocationMetadata.Marshal(b, m, deterministic) +} +func (dst *LocationMetadata) XXX_Merge(src proto.Message) { + xxx_messageInfo_LocationMetadata.Merge(dst, src) +} +func (m *LocationMetadata) XXX_Size() int { + return xxx_messageInfo_LocationMetadata.Size(m) +} +func (m *LocationMetadata) XXX_DiscardUnknown() { + xxx_messageInfo_LocationMetadata.DiscardUnknown(m) +} + +var xxx_messageInfo_LocationMetadata proto.InternalMessageInfo + +func (m *LocationMetadata) GetAvailableZones() map[string]*ZoneMetadata { + if m != nil { + return m.AvailableZones + } + return nil +} + +// Defines specific information for a particular zone. Currently empty and +// reserved for future use only. +type ZoneMetadata struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ZoneMetadata) Reset() { *m = ZoneMetadata{} } +func (m *ZoneMetadata) String() string { return proto.CompactTextString(m) } +func (*ZoneMetadata) ProtoMessage() {} +func (*ZoneMetadata) Descriptor() ([]byte, []int) { + return fileDescriptor_cloud_redis_ec9ebde39ed154fe, []int{9} +} +func (m *ZoneMetadata) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ZoneMetadata.Unmarshal(m, b) +} +func (m *ZoneMetadata) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ZoneMetadata.Marshal(b, m, deterministic) +} +func (dst *ZoneMetadata) XXX_Merge(src proto.Message) { + xxx_messageInfo_ZoneMetadata.Merge(dst, src) +} +func (m *ZoneMetadata) XXX_Size() int { + return xxx_messageInfo_ZoneMetadata.Size(m) +} +func (m *ZoneMetadata) XXX_DiscardUnknown() { + xxx_messageInfo_ZoneMetadata.DiscardUnknown(m) +} + +var xxx_messageInfo_ZoneMetadata proto.InternalMessageInfo + +func init() { + proto.RegisterType((*Instance)(nil), "google.cloud.redis.v1beta1.Instance") + proto.RegisterMapType((map[string]string)(nil), "google.cloud.redis.v1beta1.Instance.LabelsEntry") + proto.RegisterMapType((map[string]string)(nil), "google.cloud.redis.v1beta1.Instance.RedisConfigsEntry") + proto.RegisterType((*ListInstancesRequest)(nil), "google.cloud.redis.v1beta1.ListInstancesRequest") + proto.RegisterType((*ListInstancesResponse)(nil), "google.cloud.redis.v1beta1.ListInstancesResponse") + proto.RegisterType((*GetInstanceRequest)(nil), "google.cloud.redis.v1beta1.GetInstanceRequest") + proto.RegisterType((*CreateInstanceRequest)(nil), "google.cloud.redis.v1beta1.CreateInstanceRequest") + proto.RegisterType((*UpdateInstanceRequest)(nil), "google.cloud.redis.v1beta1.UpdateInstanceRequest") + proto.RegisterType((*DeleteInstanceRequest)(nil), "google.cloud.redis.v1beta1.DeleteInstanceRequest") + proto.RegisterType((*FailoverInstanceRequest)(nil), "google.cloud.redis.v1beta1.FailoverInstanceRequest") + proto.RegisterType((*LocationMetadata)(nil), "google.cloud.redis.v1beta1.LocationMetadata") + proto.RegisterMapType((map[string]*ZoneMetadata)(nil), "google.cloud.redis.v1beta1.LocationMetadata.AvailableZonesEntry") + proto.RegisterType((*ZoneMetadata)(nil), "google.cloud.redis.v1beta1.ZoneMetadata") + proto.RegisterEnum("google.cloud.redis.v1beta1.Instance_State", Instance_State_name, Instance_State_value) + proto.RegisterEnum("google.cloud.redis.v1beta1.Instance_Tier", Instance_Tier_name, Instance_Tier_value) + proto.RegisterEnum("google.cloud.redis.v1beta1.FailoverInstanceRequest_DataProtectionMode", FailoverInstanceRequest_DataProtectionMode_name, FailoverInstanceRequest_DataProtectionMode_value) +} + +// Reference imports to suppress errors if they are not otherwise used. +var _ context.Context +var _ grpc.ClientConn + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the grpc package it is being compiled against. +const _ = grpc.SupportPackageIsVersion4 + +// CloudRedisClient is the client API for CloudRedis service. +// +// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://godoc.org/google.golang.org/grpc#ClientConn.NewStream. +type CloudRedisClient interface { + // Lists all Redis instances owned by a project in either the specified + // location (region) or all locations. + // + // The location should have the following format: + // * `projects/{project_id}/locations/{location_id}` + // + // If `location_id` is specified as `-` (wildcard), then all regions + // available to the project are queried, and the results are aggregated. + ListInstances(ctx context.Context, in *ListInstancesRequest, opts ...grpc.CallOption) (*ListInstancesResponse, error) + // Gets the details of a specific Redis instance. + GetInstance(ctx context.Context, in *GetInstanceRequest, opts ...grpc.CallOption) (*Instance, error) + // Creates a Redis instance based on the specified tier and memory size. + // + // By default, the instance is accessible from the project's + // [default network](/compute/docs/networks-and-firewalls#networks). + // + // The creation is executed asynchronously and callers may check the returned + // operation to track its progress. Once the operation is completed the Redis + // instance will be fully functional. Completed longrunning.Operation will + // contain the new instance object in the response field. + // + // The returned operation is automatically deleted after a few hours, so there + // is no need to call DeleteOperation. + CreateInstance(ctx context.Context, in *CreateInstanceRequest, opts ...grpc.CallOption) (*longrunning.Operation, error) + // Updates the metadata and configuration of a specific Redis instance. + // + // Completed longrunning.Operation will contain the new instance object + // in the response field. The returned operation is automatically deleted + // after a few hours, so there is no need to call DeleteOperation. + UpdateInstance(ctx context.Context, in *UpdateInstanceRequest, opts ...grpc.CallOption) (*longrunning.Operation, error) + // Failover the master role to current replica node against a specific + // STANDARD tier redis instance. + FailoverInstance(ctx context.Context, in *FailoverInstanceRequest, opts ...grpc.CallOption) (*longrunning.Operation, error) + // Deletes a specific Redis instance. Instance stops serving and data is + // deleted. + DeleteInstance(ctx context.Context, in *DeleteInstanceRequest, opts ...grpc.CallOption) (*longrunning.Operation, error) +} + +type cloudRedisClient struct { + cc *grpc.ClientConn +} + +func NewCloudRedisClient(cc *grpc.ClientConn) CloudRedisClient { + return &cloudRedisClient{cc} +} + +func (c *cloudRedisClient) ListInstances(ctx context.Context, in *ListInstancesRequest, opts ...grpc.CallOption) (*ListInstancesResponse, error) { + out := new(ListInstancesResponse) + err := c.cc.Invoke(ctx, "/google.cloud.redis.v1beta1.CloudRedis/ListInstances", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *cloudRedisClient) GetInstance(ctx context.Context, in *GetInstanceRequest, opts ...grpc.CallOption) (*Instance, error) { + out := new(Instance) + err := c.cc.Invoke(ctx, "/google.cloud.redis.v1beta1.CloudRedis/GetInstance", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *cloudRedisClient) CreateInstance(ctx context.Context, in *CreateInstanceRequest, opts ...grpc.CallOption) (*longrunning.Operation, error) { + out := new(longrunning.Operation) + err := c.cc.Invoke(ctx, "/google.cloud.redis.v1beta1.CloudRedis/CreateInstance", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *cloudRedisClient) UpdateInstance(ctx context.Context, in *UpdateInstanceRequest, opts ...grpc.CallOption) (*longrunning.Operation, error) { + out := new(longrunning.Operation) + err := c.cc.Invoke(ctx, "/google.cloud.redis.v1beta1.CloudRedis/UpdateInstance", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *cloudRedisClient) FailoverInstance(ctx context.Context, in *FailoverInstanceRequest, opts ...grpc.CallOption) (*longrunning.Operation, error) { + out := new(longrunning.Operation) + err := c.cc.Invoke(ctx, "/google.cloud.redis.v1beta1.CloudRedis/FailoverInstance", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *cloudRedisClient) DeleteInstance(ctx context.Context, in *DeleteInstanceRequest, opts ...grpc.CallOption) (*longrunning.Operation, error) { + out := new(longrunning.Operation) + err := c.cc.Invoke(ctx, "/google.cloud.redis.v1beta1.CloudRedis/DeleteInstance", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +// CloudRedisServer is the server API for CloudRedis service. +type CloudRedisServer interface { + // Lists all Redis instances owned by a project in either the specified + // location (region) or all locations. + // + // The location should have the following format: + // * `projects/{project_id}/locations/{location_id}` + // + // If `location_id` is specified as `-` (wildcard), then all regions + // available to the project are queried, and the results are aggregated. + ListInstances(context.Context, *ListInstancesRequest) (*ListInstancesResponse, error) + // Gets the details of a specific Redis instance. + GetInstance(context.Context, *GetInstanceRequest) (*Instance, error) + // Creates a Redis instance based on the specified tier and memory size. + // + // By default, the instance is accessible from the project's + // [default network](/compute/docs/networks-and-firewalls#networks). + // + // The creation is executed asynchronously and callers may check the returned + // operation to track its progress. Once the operation is completed the Redis + // instance will be fully functional. Completed longrunning.Operation will + // contain the new instance object in the response field. + // + // The returned operation is automatically deleted after a few hours, so there + // is no need to call DeleteOperation. + CreateInstance(context.Context, *CreateInstanceRequest) (*longrunning.Operation, error) + // Updates the metadata and configuration of a specific Redis instance. + // + // Completed longrunning.Operation will contain the new instance object + // in the response field. The returned operation is automatically deleted + // after a few hours, so there is no need to call DeleteOperation. + UpdateInstance(context.Context, *UpdateInstanceRequest) (*longrunning.Operation, error) + // Failover the master role to current replica node against a specific + // STANDARD tier redis instance. + FailoverInstance(context.Context, *FailoverInstanceRequest) (*longrunning.Operation, error) + // Deletes a specific Redis instance. Instance stops serving and data is + // deleted. + DeleteInstance(context.Context, *DeleteInstanceRequest) (*longrunning.Operation, error) +} + +func RegisterCloudRedisServer(s *grpc.Server, srv CloudRedisServer) { + s.RegisterService(&_CloudRedis_serviceDesc, srv) +} + +func _CloudRedis_ListInstances_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(ListInstancesRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(CloudRedisServer).ListInstances(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.redis.v1beta1.CloudRedis/ListInstances", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(CloudRedisServer).ListInstances(ctx, req.(*ListInstancesRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _CloudRedis_GetInstance_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(GetInstanceRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(CloudRedisServer).GetInstance(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.redis.v1beta1.CloudRedis/GetInstance", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(CloudRedisServer).GetInstance(ctx, req.(*GetInstanceRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _CloudRedis_CreateInstance_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(CreateInstanceRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(CloudRedisServer).CreateInstance(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.redis.v1beta1.CloudRedis/CreateInstance", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(CloudRedisServer).CreateInstance(ctx, req.(*CreateInstanceRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _CloudRedis_UpdateInstance_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(UpdateInstanceRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(CloudRedisServer).UpdateInstance(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.redis.v1beta1.CloudRedis/UpdateInstance", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(CloudRedisServer).UpdateInstance(ctx, req.(*UpdateInstanceRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _CloudRedis_FailoverInstance_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(FailoverInstanceRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(CloudRedisServer).FailoverInstance(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.redis.v1beta1.CloudRedis/FailoverInstance", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(CloudRedisServer).FailoverInstance(ctx, req.(*FailoverInstanceRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _CloudRedis_DeleteInstance_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(DeleteInstanceRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(CloudRedisServer).DeleteInstance(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.redis.v1beta1.CloudRedis/DeleteInstance", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(CloudRedisServer).DeleteInstance(ctx, req.(*DeleteInstanceRequest)) + } + return interceptor(ctx, in, info, handler) +} + +var _CloudRedis_serviceDesc = grpc.ServiceDesc{ + ServiceName: "google.cloud.redis.v1beta1.CloudRedis", + HandlerType: (*CloudRedisServer)(nil), + Methods: []grpc.MethodDesc{ + { + MethodName: "ListInstances", + Handler: _CloudRedis_ListInstances_Handler, + }, + { + MethodName: "GetInstance", + Handler: _CloudRedis_GetInstance_Handler, + }, + { + MethodName: "CreateInstance", + Handler: _CloudRedis_CreateInstance_Handler, + }, + { + MethodName: "UpdateInstance", + Handler: _CloudRedis_UpdateInstance_Handler, + }, + { + MethodName: "FailoverInstance", + Handler: _CloudRedis_FailoverInstance_Handler, + }, + { + MethodName: "DeleteInstance", + Handler: _CloudRedis_DeleteInstance_Handler, + }, + }, + Streams: []grpc.StreamDesc{}, + Metadata: "google/cloud/redis/v1beta1/cloud_redis.proto", +} + +func init() { + proto.RegisterFile("google/cloud/redis/v1beta1/cloud_redis.proto", fileDescriptor_cloud_redis_ec9ebde39ed154fe) +} + +var fileDescriptor_cloud_redis_ec9ebde39ed154fe = []byte{ + // 1417 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x9c, 0x57, 0x6f, 0x6f, 0xdb, 0x44, + 0x18, 0xc7, 0x6d, 0x53, 0x9a, 0x27, 0x6d, 0xea, 0xde, 0x5a, 0x66, 0x02, 0x63, 0xc1, 0x1b, 0xa8, + 0x14, 0x70, 0x58, 0x86, 0xa6, 0xd1, 0x6a, 0x5b, 0xdd, 0xc4, 0xed, 0x2c, 0x35, 0x7f, 0xe4, 0x64, + 0x93, 0x18, 0x2f, 0xac, 0x6b, 0x7c, 0xcd, 0xbc, 0x3a, 0x3e, 0x63, 0x3b, 0x61, 0x2d, 0x9a, 0x90, + 0xf8, 0x02, 0x48, 0x03, 0xf1, 0x12, 0x5e, 0x21, 0xf1, 0x0a, 0xc1, 0x67, 0xe1, 0x2b, 0xf0, 0x8a, + 0x77, 0x7c, 0x03, 0x74, 0x67, 0xbb, 0x75, 0xd2, 0x34, 0xcb, 0xfa, 0xee, 0xee, 0xf7, 0xfc, 0x9e, + 0xbb, 0x9f, 0x9f, 0xe7, 0xee, 0x67, 0x1b, 0x3e, 0xe9, 0x52, 0xda, 0x75, 0x48, 0xa9, 0xe3, 0xd0, + 0xbe, 0x55, 0xf2, 0x89, 0x65, 0x07, 0xa5, 0xc1, 0xad, 0x03, 0x12, 0xe2, 0x5b, 0x11, 0x66, 0x72, + 0x4c, 0xf1, 0x7c, 0x1a, 0x52, 0x54, 0x88, 0xd8, 0x0a, 0x8f, 0x28, 0x51, 0x24, 0x66, 0x17, 0xde, + 0x8d, 0x57, 0xc2, 0x9e, 0x5d, 0xc2, 0xae, 0x4b, 0x43, 0x1c, 0xda, 0xd4, 0x8d, 0x33, 0x0b, 0x6f, + 0xa7, 0xa2, 0x3e, 0x09, 0x68, 0xdf, 0xef, 0x90, 0x38, 0x74, 0x23, 0x0e, 0x39, 0xd4, 0xed, 0xfa, + 0x7d, 0xd7, 0xb5, 0xdd, 0x6e, 0x89, 0x7a, 0xc4, 0x1f, 0xca, 0x2f, 0xc6, 0x24, 0x3e, 0x3b, 0xe8, + 0x1f, 0x96, 0x0e, 0x6d, 0xe2, 0x58, 0x66, 0x0f, 0x07, 0x47, 0x31, 0xe3, 0xfa, 0x28, 0x23, 0xb4, + 0x7b, 0x24, 0x08, 0x71, 0xcf, 0x8b, 0x08, 0xf2, 0x7f, 0x0b, 0xb0, 0xa0, 0xbb, 0x41, 0x88, 0xdd, + 0x0e, 0x41, 0x08, 0xe6, 0x5c, 0xdc, 0x23, 0x92, 0x50, 0x14, 0xd6, 0xb3, 0x06, 0x1f, 0xa3, 0xf7, + 0x61, 0xd1, 0xb2, 0x03, 0xcf, 0xc1, 0xc7, 0x26, 0x8f, 0xcd, 0xf0, 0x58, 0x2e, 0xc6, 0xea, 0x8c, + 0xf2, 0x10, 0xe6, 0x1d, 0x7c, 0x40, 0x9c, 0x40, 0x9a, 0x2d, 0xce, 0xae, 0xe7, 0xca, 0x9f, 0x29, + 0x17, 0x57, 0x44, 0x49, 0x36, 0x53, 0xf6, 0x79, 0x8a, 0xe6, 0x86, 0xfe, 0xb1, 0x11, 0xe7, 0xa3, + 0xeb, 0x90, 0x73, 0x68, 0x87, 0x3f, 0xa3, 0x69, 0x5b, 0xd2, 0x1c, 0xdf, 0x0b, 0x12, 0x48, 0xb7, + 0xd0, 0x1d, 0xb8, 0x8a, 0x9d, 0x90, 0xf8, 0x2e, 0x0e, 0xed, 0x01, 0x31, 0xd3, 0xe4, 0x0c, 0x27, + 0xaf, 0xa5, 0xc2, 0xfb, 0x67, 0x79, 0x37, 0x60, 0x89, 0xcb, 0x30, 0x07, 0xc4, 0x0f, 0x6c, 0xea, + 0x4a, 0x6f, 0x72, 0xf6, 0x22, 0x07, 0x1f, 0x47, 0x18, 0xda, 0x80, 0x15, 0x9f, 0x04, 0xc4, 0x1f, + 0x10, 0xcb, 0xb4, 0x3d, 0xd3, 0xc7, 0x6e, 0x97, 0x48, 0x59, 0x4e, 0x5c, 0x4e, 0x02, 0xba, 0x67, + 0x30, 0x98, 0x95, 0xea, 0x29, 0x0d, 0x42, 0x09, 0xa2, 0x52, 0xb1, 0x31, 0xc3, 0x3c, 0xea, 0x87, + 0x52, 0xae, 0x28, 0xac, 0x67, 0x0c, 0x3e, 0x46, 0x0a, 0x5c, 0xe9, 0xf4, 0x7d, 0x9f, 0xb8, 0xe1, + 0x90, 0xd8, 0x45, 0x9e, 0xb6, 0x12, 0x87, 0x52, 0x42, 0xb7, 0x20, 0xd7, 0xf1, 0x09, 0x0e, 0x89, + 0xc9, 0x3a, 0x25, 0x2d, 0x15, 0x85, 0xf5, 0x5c, 0xb9, 0x90, 0x14, 0x34, 0x69, 0xa3, 0xd2, 0x4e, + 0xda, 0x68, 0x40, 0x44, 0x67, 0x00, 0xda, 0x86, 0x4c, 0x10, 0xe2, 0x90, 0x48, 0xf9, 0xa2, 0xb0, + 0x9e, 0x2f, 0x6f, 0x4c, 0xd5, 0x87, 0x16, 0xcb, 0x30, 0xa2, 0x44, 0xf4, 0x01, 0xe4, 0xd9, 0xa0, + 0x1f, 0x98, 0x3d, 0x12, 0x04, 0xb8, 0x4b, 0xa4, 0x65, 0xae, 0x74, 0x29, 0x42, 0x6b, 0x11, 0x88, + 0xbe, 0x4a, 0xca, 0xd9, 0xa1, 0xee, 0xa1, 0xdd, 0x0d, 0x24, 0x91, 0x37, 0xfe, 0xce, 0x54, 0x1b, + 0x1a, 0x0c, 0xae, 0x44, 0x89, 0x51, 0xfb, 0xa3, 0x36, 0xc4, 0x10, 0xba, 0x07, 0x73, 0xa1, 0x4d, + 0x7c, 0x69, 0x85, 0x3f, 0xc4, 0x47, 0x53, 0xad, 0xd9, 0xb6, 0x89, 0x6f, 0xf0, 0x34, 0x74, 0x13, + 0xf2, 0x3d, 0xd2, 0xa3, 0xfe, 0xb1, 0x19, 0xd8, 0x27, 0xc4, 0xec, 0x1e, 0x48, 0x88, 0xf7, 0x63, + 0x31, 0x42, 0x5b, 0xf6, 0x09, 0xd9, 0x3b, 0x40, 0x9f, 0x02, 0xc2, 0xfd, 0xf0, 0x29, 0xf5, 0xed, + 0x13, 0x62, 0x99, 0x2e, 0x09, 0xbf, 0xa1, 0xfe, 0x91, 0xb4, 0x1a, 0xb5, 0xe5, 0x2c, 0x52, 0x8f, + 0x02, 0x85, 0x2f, 0x20, 0x97, 0x3a, 0xaf, 0x48, 0x84, 0xd9, 0x23, 0x72, 0x1c, 0xdf, 0x13, 0x36, + 0x44, 0xab, 0x90, 0x19, 0x60, 0xa7, 0x9f, 0xdc, 0x8f, 0x68, 0xb2, 0x39, 0x73, 0x57, 0x28, 0x3c, + 0x80, 0x95, 0x73, 0x4f, 0xfc, 0x3a, 0x0b, 0xc8, 0x3f, 0x09, 0x90, 0xe1, 0x4d, 0x42, 0x6b, 0xb0, + 0xd2, 0x6a, 0xab, 0x6d, 0xcd, 0x7c, 0x54, 0x6f, 0x35, 0xb5, 0x8a, 0xbe, 0xab, 0x6b, 0x55, 0xf1, + 0x0d, 0xb4, 0x08, 0x0b, 0x15, 0x43, 0x53, 0xdb, 0x7a, 0x7d, 0x4f, 0x14, 0x50, 0x16, 0x32, 0x86, + 0xa6, 0x56, 0xbf, 0x14, 0x67, 0x58, 0xe0, 0x51, 0xb3, 0x1a, 0x05, 0x66, 0xd9, 0xac, 0xaa, 0xed, + 0x6b, 0x7c, 0x36, 0x87, 0x96, 0x20, 0x6b, 0x68, 0x4d, 0x55, 0x37, 0xd8, 0x34, 0x83, 0x96, 0x21, + 0x57, 0x53, 0xf5, 0x7a, 0x5b, 0xab, 0xab, 0xf5, 0x8a, 0x26, 0xce, 0xb3, 0xb8, 0x5e, 0x6b, 0x36, + 0x0c, 0x4e, 0x5f, 0x40, 0x22, 0x2c, 0xee, 0xaa, 0xfa, 0xbe, 0x5e, 0xdf, 0x33, 0x1b, 0x8f, 0x35, + 0x43, 0x04, 0xf9, 0x2e, 0xcc, 0xb1, 0xaa, 0xa3, 0x55, 0x10, 0xdb, 0xba, 0x66, 0x8c, 0x68, 0xca, + 0x42, 0x66, 0x47, 0x6d, 0xe9, 0x15, 0x51, 0x60, 0x4b, 0xb7, 0xda, 0x6a, 0xbd, 0xaa, 0x1a, 0x55, + 0xf3, 0xa1, 0x2a, 0xce, 0xca, 0xcf, 0x60, 0x75, 0xdf, 0x0e, 0xc2, 0xa4, 0x79, 0x81, 0x41, 0xbe, + 0xee, 0x93, 0x20, 0x44, 0x6f, 0xc1, 0xbc, 0x87, 0xd9, 0x7d, 0x88, 0xeb, 0x12, 0xcf, 0xd0, 0x3b, + 0x90, 0xf5, 0x70, 0x97, 0xf0, 0x7e, 0xf2, 0xf2, 0x64, 0x8c, 0x05, 0x06, 0xb0, 0x56, 0xa2, 0x6b, + 0x00, 0x3c, 0x18, 0xd2, 0x23, 0xe2, 0x4a, 0xb3, 0x3c, 0x91, 0xd3, 0xdb, 0x0c, 0x90, 0x7f, 0x13, + 0x60, 0x6d, 0x64, 0xb3, 0xc0, 0xa3, 0x6e, 0x40, 0xd0, 0x0e, 0x64, 0xed, 0x04, 0x94, 0x04, 0x7e, + 0x7e, 0x6f, 0x4e, 0x73, 0xd6, 0x8c, 0xb3, 0x34, 0xf4, 0x21, 0x2c, 0xbb, 0xe4, 0x79, 0x68, 0xa6, + 0x14, 0x44, 0xed, 0x5b, 0x62, 0x70, 0x33, 0x51, 0x81, 0x8a, 0x90, 0xeb, 0xbb, 0x3e, 0xc1, 0x9d, + 0xa7, 0xf8, 0xc0, 0x21, 0xdc, 0x26, 0xb3, 0x46, 0x1a, 0x92, 0xd7, 0x01, 0xed, 0x91, 0x53, 0x95, + 0x49, 0x45, 0xc6, 0x18, 0xb2, 0xfc, 0x52, 0x80, 0xb5, 0x0a, 0xbf, 0xf3, 0xa3, 0xec, 0x8b, 0xea, + 0x77, 0x1d, 0x72, 0x89, 0x64, 0xe6, 0x3d, 0x91, 0x42, 0x48, 0x20, 0xdd, 0x42, 0xdb, 0xb0, 0x90, + 0xcc, 0x78, 0x05, 0xa7, 0xad, 0xc4, 0x69, 0x96, 0xfc, 0xb3, 0x00, 0x6b, 0x8f, 0x3c, 0x6b, 0x8c, + 0xa8, 0x2d, 0xc8, 0xf5, 0x79, 0x80, 0xbf, 0x96, 0xb8, 0xb2, 0x71, 0x86, 0xb6, 0xcb, 0xde, 0x5c, + 0x35, 0x1c, 0x1c, 0x19, 0x10, 0xd1, 0xd9, 0x78, 0x48, 0xd8, 0xcc, 0xa5, 0x84, 0x7d, 0x0c, 0x6b, + 0x55, 0xe2, 0x90, 0xf3, 0xba, 0xc6, 0x95, 0xf6, 0x87, 0x19, 0xb8, 0xba, 0x8b, 0x6d, 0x87, 0x0e, + 0x88, 0x3f, 0x05, 0x1f, 0x3d, 0x87, 0x55, 0x0b, 0x87, 0xd8, 0x64, 0x4f, 0x41, 0x3a, 0xdc, 0xdb, + 0x7b, 0xd4, 0x8a, 0xa4, 0xe6, 0xcb, 0xbb, 0x93, 0xa4, 0x5e, 0xb0, 0x8d, 0x52, 0xc5, 0x21, 0x6e, + 0x9e, 0x2e, 0x57, 0xa3, 0x16, 0x31, 0x90, 0x75, 0x0e, 0x93, 0x0f, 0x01, 0x9d, 0x67, 0xa2, 0x9b, + 0x50, 0xac, 0xaa, 0x6d, 0xd5, 0x6c, 0x1a, 0x8d, 0xb6, 0x56, 0x69, 0xeb, 0x8d, 0xba, 0x59, 0x6b, + 0x54, 0x47, 0xed, 0x62, 0x0d, 0x56, 0xf6, 0xf5, 0x9a, 0xde, 0xd6, 0xaa, 0x26, 0x67, 0xef, 0x37, + 0x5a, 0x2d, 0x51, 0x40, 0x57, 0x60, 0x79, 0xb7, 0x61, 0x54, 0xb4, 0x14, 0x38, 0x23, 0xff, 0x2b, + 0x80, 0x98, 0xbc, 0x9d, 0x6a, 0x24, 0xc4, 0x4c, 0x0a, 0xb2, 0x61, 0x19, 0x0f, 0xb0, 0xed, 0xb0, + 0x83, 0x6b, 0x9e, 0x50, 0xf7, 0xf4, 0xfe, 0x6c, 0x4f, 0x7a, 0xe2, 0xd1, 0x65, 0x14, 0x35, 0x59, + 0xe3, 0x09, 0x5b, 0x22, 0x7a, 0x13, 0xe4, 0xf1, 0x10, 0x58, 0x38, 0x82, 0x2b, 0x63, 0x68, 0x63, + 0xec, 0xf3, 0x7e, 0xda, 0x3e, 0x73, 0xe5, 0xf5, 0x49, 0x4a, 0xd8, 0x42, 0x89, 0x8a, 0xb4, 0xd1, + 0xe6, 0x61, 0x31, 0x1d, 0x2a, 0xbf, 0x5c, 0x00, 0xa8, 0xb0, 0x7c, 0xee, 0xdf, 0xe8, 0x4f, 0x01, + 0x96, 0x86, 0xac, 0x04, 0x4d, 0xfc, 0xd0, 0x19, 0x67, 0x71, 0x85, 0x5b, 0xaf, 0x91, 0x11, 0xf9, + 0x94, 0xbc, 0xf9, 0xfd, 0xdf, 0xff, 0xfc, 0x38, 0xf3, 0x39, 0x2a, 0x9f, 0x7e, 0x82, 0x7e, 0x1b, + 0xdd, 0xeb, 0x7b, 0x9e, 0x4f, 0x9f, 0x91, 0x4e, 0x18, 0x94, 0x36, 0x4a, 0xc9, 0xb7, 0x45, 0x50, + 0xda, 0x78, 0x51, 0x3a, 0xf3, 0xa7, 0x5f, 0x05, 0xc8, 0xa5, 0x6c, 0x05, 0x29, 0x93, 0xb6, 0x3f, + 0xef, 0x3f, 0x85, 0xa9, 0x6e, 0xdb, 0x38, 0x85, 0xec, 0x7a, 0x5c, 0xa0, 0xef, 0x4c, 0x5e, 0x69, + 0xe3, 0x05, 0xfa, 0x5d, 0x80, 0xfc, 0xb0, 0x9b, 0xa1, 0x89, 0x35, 0x1a, 0xeb, 0x7c, 0x85, 0x6b, + 0x49, 0x4a, 0xea, 0x73, 0x59, 0x69, 0x24, 0x9f, 0xcb, 0x72, 0x95, 0x0b, 0xbc, 0x2f, 0x5f, 0xa2, + 0x84, 0x9b, 0xa7, 0x56, 0x82, 0xfe, 0x12, 0x20, 0x3f, 0xec, 0x71, 0x93, 0xa5, 0x8e, 0xf5, 0xc3, + 0x57, 0x49, 0xad, 0x71, 0xa9, 0x7b, 0xe5, 0xad, 0x33, 0xa9, 0x89, 0x00, 0x65, 0xda, 0xa2, 0xa6, + 0x34, 0xff, 0x21, 0x80, 0x38, 0x6a, 0x35, 0xe8, 0xf6, 0x25, 0x8c, 0xe9, 0x55, 0xba, 0x77, 0xb9, + 0xee, 0x6d, 0x79, 0xeb, 0xf5, 0xcf, 0xc0, 0xe6, 0x61, 0xbc, 0xe5, 0xa6, 0xb0, 0x81, 0x7e, 0x11, + 0x20, 0x3f, 0x6c, 0xd8, 0x93, 0x8b, 0x3c, 0xd6, 0xdc, 0x5f, 0x25, 0x36, 0x3e, 0xb0, 0x1b, 0x97, + 0x38, 0xb0, 0x3b, 0xdf, 0xc1, 0x7b, 0x1d, 0xda, 0x9b, 0x20, 0x69, 0xa7, 0x70, 0xe6, 0x19, 0x2d, + 0xe2, 0x0f, 0xec, 0x0e, 0xd9, 0x21, 0x91, 0x55, 0xd3, 0xa6, 0xf0, 0xe4, 0x41, 0x9c, 0xd9, 0xa5, + 0x0e, 0x76, 0xbb, 0x0a, 0xf5, 0xbb, 0xa5, 0x2e, 0x71, 0xf9, 0x7b, 0xb0, 0x14, 0x85, 0xb0, 0x67, + 0x07, 0xe3, 0x7e, 0x3d, 0xb7, 0xf8, 0xec, 0x60, 0x9e, 0x73, 0x6f, 0xff, 0x1f, 0x00, 0x00, 0xff, + 0xff, 0xfd, 0x6d, 0xa4, 0x8f, 0xa5, 0x0e, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/cloud/resourcemanager/v2/folders.pb.go b/vendor/google.golang.org/genproto/googleapis/cloud/resourcemanager/v2/folders.pb.go new file mode 100644 index 0000000..86078e3 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/cloud/resourcemanager/v2/folders.pb.go @@ -0,0 +1,1510 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/cloud/resourcemanager/v2/folders.proto + +package resourcemanager // import "google.golang.org/genproto/googleapis/cloud/resourcemanager/v2" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import timestamp "github.com/golang/protobuf/ptypes/timestamp" +import _ "google.golang.org/genproto/googleapis/api/annotations" +import v1 "google.golang.org/genproto/googleapis/iam/v1" +import longrunning "google.golang.org/genproto/googleapis/longrunning" +import field_mask "google.golang.org/genproto/protobuf/field_mask" + +import ( + context "golang.org/x/net/context" + grpc "google.golang.org/grpc" +) + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Folder lifecycle states. +type Folder_LifecycleState int32 + +const ( + // Unspecified state. + Folder_LIFECYCLE_STATE_UNSPECIFIED Folder_LifecycleState = 0 + // The normal and active state. + Folder_ACTIVE Folder_LifecycleState = 1 + // The folder has been marked for deletion by the user. + Folder_DELETE_REQUESTED Folder_LifecycleState = 2 +) + +var Folder_LifecycleState_name = map[int32]string{ + 0: "LIFECYCLE_STATE_UNSPECIFIED", + 1: "ACTIVE", + 2: "DELETE_REQUESTED", +} +var Folder_LifecycleState_value = map[string]int32{ + "LIFECYCLE_STATE_UNSPECIFIED": 0, + "ACTIVE": 1, + "DELETE_REQUESTED": 2, +} + +func (x Folder_LifecycleState) String() string { + return proto.EnumName(Folder_LifecycleState_name, int32(x)) +} +func (Folder_LifecycleState) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_folders_5147156560d8dbeb, []int{0, 0} +} + +// The type of operation that failed. +type FolderOperation_OperationType int32 + +const ( + // Operation type not specified. + FolderOperation_OPERATION_TYPE_UNSPECIFIED FolderOperation_OperationType = 0 + // A create folder operation. + FolderOperation_CREATE FolderOperation_OperationType = 1 + // A move folder operation. + FolderOperation_MOVE FolderOperation_OperationType = 2 +) + +var FolderOperation_OperationType_name = map[int32]string{ + 0: "OPERATION_TYPE_UNSPECIFIED", + 1: "CREATE", + 2: "MOVE", +} +var FolderOperation_OperationType_value = map[string]int32{ + "OPERATION_TYPE_UNSPECIFIED": 0, + "CREATE": 1, + "MOVE": 2, +} + +func (x FolderOperation_OperationType) String() string { + return proto.EnumName(FolderOperation_OperationType_name, int32(x)) +} +func (FolderOperation_OperationType) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_folders_5147156560d8dbeb, []int{11, 0} +} + +// A Folder in an Organization's resource hierarchy, used to +// organize that Organization's resources. +type Folder struct { + // Output only. The resource name of the Folder. + // Its format is `folders/{folder_id}`, for example: "folders/1234". + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // The Folder’s parent's resource name. + // Updates to the folder's parent must be performed via [MoveFolders]. + Parent string `protobuf:"bytes,2,opt,name=parent,proto3" json:"parent,omitempty"` + // The folder’s display name. + // A folder’s display name must be unique amongst its siblings, e.g. + // no two folders with the same parent can share the same display name. + // The display name must start and end with a letter or digit, may contain + // letters, digits, spaces, hyphens and underscores and can be no longer + // than 30 characters. This is captured by the regular expression: + // [\p{L}\p{N}]({\p{L}\p{N}_- ]{0,28}[\p{L}\p{N}])?. + DisplayName string `protobuf:"bytes,3,opt,name=display_name,json=displayName,proto3" json:"display_name,omitempty"` + // Output only. The lifecycle state of the folder. + // Updates to the lifecycle_state must be performed via + // [DeleteFolder] and [UndeleteFolder]. + LifecycleState Folder_LifecycleState `protobuf:"varint,4,opt,name=lifecycle_state,json=lifecycleState,proto3,enum=google.cloud.resourcemanager.v2.Folder_LifecycleState" json:"lifecycle_state,omitempty"` + // Output only. Timestamp when the Folder was created. Assigned by the server. + CreateTime *timestamp.Timestamp `protobuf:"bytes,5,opt,name=create_time,json=createTime,proto3" json:"create_time,omitempty"` + // Output only. Timestamp when the Folder was last modified. + UpdateTime *timestamp.Timestamp `protobuf:"bytes,6,opt,name=update_time,json=updateTime,proto3" json:"update_time,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Folder) Reset() { *m = Folder{} } +func (m *Folder) String() string { return proto.CompactTextString(m) } +func (*Folder) ProtoMessage() {} +func (*Folder) Descriptor() ([]byte, []int) { + return fileDescriptor_folders_5147156560d8dbeb, []int{0} +} +func (m *Folder) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Folder.Unmarshal(m, b) +} +func (m *Folder) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Folder.Marshal(b, m, deterministic) +} +func (dst *Folder) XXX_Merge(src proto.Message) { + xxx_messageInfo_Folder.Merge(dst, src) +} +func (m *Folder) XXX_Size() int { + return xxx_messageInfo_Folder.Size(m) +} +func (m *Folder) XXX_DiscardUnknown() { + xxx_messageInfo_Folder.DiscardUnknown(m) +} + +var xxx_messageInfo_Folder proto.InternalMessageInfo + +func (m *Folder) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *Folder) GetParent() string { + if m != nil { + return m.Parent + } + return "" +} + +func (m *Folder) GetDisplayName() string { + if m != nil { + return m.DisplayName + } + return "" +} + +func (m *Folder) GetLifecycleState() Folder_LifecycleState { + if m != nil { + return m.LifecycleState + } + return Folder_LIFECYCLE_STATE_UNSPECIFIED +} + +func (m *Folder) GetCreateTime() *timestamp.Timestamp { + if m != nil { + return m.CreateTime + } + return nil +} + +func (m *Folder) GetUpdateTime() *timestamp.Timestamp { + if m != nil { + return m.UpdateTime + } + return nil +} + +// The ListFolders request message. +type ListFoldersRequest struct { + // The resource name of the Organization or Folder whose Folders are + // being listed. + // Must be of the form `folders/{folder_id}` or `organizations/{org_id}`. + // Access to this method is controlled by checking the + // `resourcemanager.folders.list` permission on the `parent`. + Parent string `protobuf:"bytes,1,opt,name=parent,proto3" json:"parent,omitempty"` + // The maximum number of Folders to return in the response. + // This field is optional. + PageSize int32 `protobuf:"varint,2,opt,name=page_size,json=pageSize,proto3" json:"page_size,omitempty"` + // A pagination token returned from a previous call to `ListFolders` + // that indicates where this listing should continue from. + // This field is optional. + PageToken string `protobuf:"bytes,3,opt,name=page_token,json=pageToken,proto3" json:"page_token,omitempty"` + // Controls whether Folders in the [DELETE_REQUESTED} state should + // be returned. + ShowDeleted bool `protobuf:"varint,4,opt,name=show_deleted,json=showDeleted,proto3" json:"show_deleted,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ListFoldersRequest) Reset() { *m = ListFoldersRequest{} } +func (m *ListFoldersRequest) String() string { return proto.CompactTextString(m) } +func (*ListFoldersRequest) ProtoMessage() {} +func (*ListFoldersRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_folders_5147156560d8dbeb, []int{1} +} +func (m *ListFoldersRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ListFoldersRequest.Unmarshal(m, b) +} +func (m *ListFoldersRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ListFoldersRequest.Marshal(b, m, deterministic) +} +func (dst *ListFoldersRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_ListFoldersRequest.Merge(dst, src) +} +func (m *ListFoldersRequest) XXX_Size() int { + return xxx_messageInfo_ListFoldersRequest.Size(m) +} +func (m *ListFoldersRequest) XXX_DiscardUnknown() { + xxx_messageInfo_ListFoldersRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_ListFoldersRequest proto.InternalMessageInfo + +func (m *ListFoldersRequest) GetParent() string { + if m != nil { + return m.Parent + } + return "" +} + +func (m *ListFoldersRequest) GetPageSize() int32 { + if m != nil { + return m.PageSize + } + return 0 +} + +func (m *ListFoldersRequest) GetPageToken() string { + if m != nil { + return m.PageToken + } + return "" +} + +func (m *ListFoldersRequest) GetShowDeleted() bool { + if m != nil { + return m.ShowDeleted + } + return false +} + +// The ListFolders response message. +type ListFoldersResponse struct { + // A possibly paginated list of Folders that are direct descendants of + // the specified parent resource. + Folders []*Folder `protobuf:"bytes,1,rep,name=folders,proto3" json:"folders,omitempty"` + // A pagination token returned from a previous call to `ListFolders` + // that indicates from where listing should continue. + // This field is optional. + NextPageToken string `protobuf:"bytes,2,opt,name=next_page_token,json=nextPageToken,proto3" json:"next_page_token,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ListFoldersResponse) Reset() { *m = ListFoldersResponse{} } +func (m *ListFoldersResponse) String() string { return proto.CompactTextString(m) } +func (*ListFoldersResponse) ProtoMessage() {} +func (*ListFoldersResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_folders_5147156560d8dbeb, []int{2} +} +func (m *ListFoldersResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ListFoldersResponse.Unmarshal(m, b) +} +func (m *ListFoldersResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ListFoldersResponse.Marshal(b, m, deterministic) +} +func (dst *ListFoldersResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_ListFoldersResponse.Merge(dst, src) +} +func (m *ListFoldersResponse) XXX_Size() int { + return xxx_messageInfo_ListFoldersResponse.Size(m) +} +func (m *ListFoldersResponse) XXX_DiscardUnknown() { + xxx_messageInfo_ListFoldersResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_ListFoldersResponse proto.InternalMessageInfo + +func (m *ListFoldersResponse) GetFolders() []*Folder { + if m != nil { + return m.Folders + } + return nil +} + +func (m *ListFoldersResponse) GetNextPageToken() string { + if m != nil { + return m.NextPageToken + } + return "" +} + +// The request message for searching folders. +type SearchFoldersRequest struct { + // The maximum number of folders to return in the response. + // This field is optional. + PageSize int32 `protobuf:"varint,1,opt,name=page_size,json=pageSize,proto3" json:"page_size,omitempty"` + // A pagination token returned from a previous call to `SearchFolders` + // that indicates from where search should continue. + // This field is optional. + PageToken string `protobuf:"bytes,2,opt,name=page_token,json=pageToken,proto3" json:"page_token,omitempty"` + // Search criteria used to select the Folders to return. + // If no search criteria is specified then all accessible folders will be + // returned. + // + // Query expressions can be used to restrict results based upon displayName, + // lifecycleState and parent, where the operators `=`, `NOT`, `AND` and `OR` + // can be used along with the suffix wildcard symbol `*`. + // + // Some example queries are: + // |Query|Description| + // |------|-----------| + // |displayName=Test*|Folders whose display name starts with "Test".| + // |lifecycleState=ACTIVE|Folders whose lifecycleState is ACTIVE.| + // |parent=folders/123|Folders whose parent is "folders/123".| + // |parent=folders/123 AND lifecycleState=ACTIVE|Active folders whose + // parent is "folders/123".| + Query string `protobuf:"bytes,3,opt,name=query,proto3" json:"query,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *SearchFoldersRequest) Reset() { *m = SearchFoldersRequest{} } +func (m *SearchFoldersRequest) String() string { return proto.CompactTextString(m) } +func (*SearchFoldersRequest) ProtoMessage() {} +func (*SearchFoldersRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_folders_5147156560d8dbeb, []int{3} +} +func (m *SearchFoldersRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_SearchFoldersRequest.Unmarshal(m, b) +} +func (m *SearchFoldersRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_SearchFoldersRequest.Marshal(b, m, deterministic) +} +func (dst *SearchFoldersRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_SearchFoldersRequest.Merge(dst, src) +} +func (m *SearchFoldersRequest) XXX_Size() int { + return xxx_messageInfo_SearchFoldersRequest.Size(m) +} +func (m *SearchFoldersRequest) XXX_DiscardUnknown() { + xxx_messageInfo_SearchFoldersRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_SearchFoldersRequest proto.InternalMessageInfo + +func (m *SearchFoldersRequest) GetPageSize() int32 { + if m != nil { + return m.PageSize + } + return 0 +} + +func (m *SearchFoldersRequest) GetPageToken() string { + if m != nil { + return m.PageToken + } + return "" +} + +func (m *SearchFoldersRequest) GetQuery() string { + if m != nil { + return m.Query + } + return "" +} + +// The response message for searching folders. +type SearchFoldersResponse struct { + // A possibly paginated folder search results. + // the specified parent resource. + Folders []*Folder `protobuf:"bytes,1,rep,name=folders,proto3" json:"folders,omitempty"` + // A pagination token returned from a previous call to `SearchFolders` + // that indicates from where searching should continue. + // This field is optional. + NextPageToken string `protobuf:"bytes,2,opt,name=next_page_token,json=nextPageToken,proto3" json:"next_page_token,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *SearchFoldersResponse) Reset() { *m = SearchFoldersResponse{} } +func (m *SearchFoldersResponse) String() string { return proto.CompactTextString(m) } +func (*SearchFoldersResponse) ProtoMessage() {} +func (*SearchFoldersResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_folders_5147156560d8dbeb, []int{4} +} +func (m *SearchFoldersResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_SearchFoldersResponse.Unmarshal(m, b) +} +func (m *SearchFoldersResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_SearchFoldersResponse.Marshal(b, m, deterministic) +} +func (dst *SearchFoldersResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_SearchFoldersResponse.Merge(dst, src) +} +func (m *SearchFoldersResponse) XXX_Size() int { + return xxx_messageInfo_SearchFoldersResponse.Size(m) +} +func (m *SearchFoldersResponse) XXX_DiscardUnknown() { + xxx_messageInfo_SearchFoldersResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_SearchFoldersResponse proto.InternalMessageInfo + +func (m *SearchFoldersResponse) GetFolders() []*Folder { + if m != nil { + return m.Folders + } + return nil +} + +func (m *SearchFoldersResponse) GetNextPageToken() string { + if m != nil { + return m.NextPageToken + } + return "" +} + +// The GetFolder request message. +type GetFolderRequest struct { + // The resource name of the Folder to retrieve. + // Must be of the form `folders/{folder_id}`. + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GetFolderRequest) Reset() { *m = GetFolderRequest{} } +func (m *GetFolderRequest) String() string { return proto.CompactTextString(m) } +func (*GetFolderRequest) ProtoMessage() {} +func (*GetFolderRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_folders_5147156560d8dbeb, []int{5} +} +func (m *GetFolderRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GetFolderRequest.Unmarshal(m, b) +} +func (m *GetFolderRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GetFolderRequest.Marshal(b, m, deterministic) +} +func (dst *GetFolderRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_GetFolderRequest.Merge(dst, src) +} +func (m *GetFolderRequest) XXX_Size() int { + return xxx_messageInfo_GetFolderRequest.Size(m) +} +func (m *GetFolderRequest) XXX_DiscardUnknown() { + xxx_messageInfo_GetFolderRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_GetFolderRequest proto.InternalMessageInfo + +func (m *GetFolderRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +// The CreateFolder request message. +type CreateFolderRequest struct { + // The resource name of the new Folder's parent. + // Must be of the form `folders/{folder_id}` or `organizations/{org_id}`. + Parent string `protobuf:"bytes,1,opt,name=parent,proto3" json:"parent,omitempty"` + // The Folder being created, only the display name will be consulted. + // All other fields will be ignored. + Folder *Folder `protobuf:"bytes,2,opt,name=folder,proto3" json:"folder,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *CreateFolderRequest) Reset() { *m = CreateFolderRequest{} } +func (m *CreateFolderRequest) String() string { return proto.CompactTextString(m) } +func (*CreateFolderRequest) ProtoMessage() {} +func (*CreateFolderRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_folders_5147156560d8dbeb, []int{6} +} +func (m *CreateFolderRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_CreateFolderRequest.Unmarshal(m, b) +} +func (m *CreateFolderRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_CreateFolderRequest.Marshal(b, m, deterministic) +} +func (dst *CreateFolderRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_CreateFolderRequest.Merge(dst, src) +} +func (m *CreateFolderRequest) XXX_Size() int { + return xxx_messageInfo_CreateFolderRequest.Size(m) +} +func (m *CreateFolderRequest) XXX_DiscardUnknown() { + xxx_messageInfo_CreateFolderRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_CreateFolderRequest proto.InternalMessageInfo + +func (m *CreateFolderRequest) GetParent() string { + if m != nil { + return m.Parent + } + return "" +} + +func (m *CreateFolderRequest) GetFolder() *Folder { + if m != nil { + return m.Folder + } + return nil +} + +// The MoveFolder request message. +type MoveFolderRequest struct { + // The resource name of the Folder to move. + // Must be of the form folders/{folder_id} + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // The resource name of the Folder or Organization to reparent + // the folder under. + // Must be of the form `folders/{folder_id}` or `organizations/{org_id}`. + DestinationParent string `protobuf:"bytes,2,opt,name=destination_parent,json=destinationParent,proto3" json:"destination_parent,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *MoveFolderRequest) Reset() { *m = MoveFolderRequest{} } +func (m *MoveFolderRequest) String() string { return proto.CompactTextString(m) } +func (*MoveFolderRequest) ProtoMessage() {} +func (*MoveFolderRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_folders_5147156560d8dbeb, []int{7} +} +func (m *MoveFolderRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_MoveFolderRequest.Unmarshal(m, b) +} +func (m *MoveFolderRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_MoveFolderRequest.Marshal(b, m, deterministic) +} +func (dst *MoveFolderRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_MoveFolderRequest.Merge(dst, src) +} +func (m *MoveFolderRequest) XXX_Size() int { + return xxx_messageInfo_MoveFolderRequest.Size(m) +} +func (m *MoveFolderRequest) XXX_DiscardUnknown() { + xxx_messageInfo_MoveFolderRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_MoveFolderRequest proto.InternalMessageInfo + +func (m *MoveFolderRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *MoveFolderRequest) GetDestinationParent() string { + if m != nil { + return m.DestinationParent + } + return "" +} + +// The request message for updating a folder's display name. +type UpdateFolderRequest struct { + // The new definition of the Folder. It must include a + // a `name` and `display_name` field. The other fields + // will be ignored. + Folder *Folder `protobuf:"bytes,1,opt,name=folder,proto3" json:"folder,omitempty"` + // Fields to be updated. + // Only the `display_name` can be updated. + UpdateMask *field_mask.FieldMask `protobuf:"bytes,2,opt,name=update_mask,json=updateMask,proto3" json:"update_mask,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *UpdateFolderRequest) Reset() { *m = UpdateFolderRequest{} } +func (m *UpdateFolderRequest) String() string { return proto.CompactTextString(m) } +func (*UpdateFolderRequest) ProtoMessage() {} +func (*UpdateFolderRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_folders_5147156560d8dbeb, []int{8} +} +func (m *UpdateFolderRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_UpdateFolderRequest.Unmarshal(m, b) +} +func (m *UpdateFolderRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_UpdateFolderRequest.Marshal(b, m, deterministic) +} +func (dst *UpdateFolderRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_UpdateFolderRequest.Merge(dst, src) +} +func (m *UpdateFolderRequest) XXX_Size() int { + return xxx_messageInfo_UpdateFolderRequest.Size(m) +} +func (m *UpdateFolderRequest) XXX_DiscardUnknown() { + xxx_messageInfo_UpdateFolderRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_UpdateFolderRequest proto.InternalMessageInfo + +func (m *UpdateFolderRequest) GetFolder() *Folder { + if m != nil { + return m.Folder + } + return nil +} + +func (m *UpdateFolderRequest) GetUpdateMask() *field_mask.FieldMask { + if m != nil { + return m.UpdateMask + } + return nil +} + +// The DeleteFolder request message. +type DeleteFolderRequest struct { + // the resource name of the Folder to be deleted. + // Must be of the form `folders/{folder_id}`. + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // Instructs DeleteFolderAction to delete a folder even when the folder is not + // empty. + RecursiveDelete bool `protobuf:"varint,2,opt,name=recursive_delete,json=recursiveDelete,proto3" json:"recursive_delete,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *DeleteFolderRequest) Reset() { *m = DeleteFolderRequest{} } +func (m *DeleteFolderRequest) String() string { return proto.CompactTextString(m) } +func (*DeleteFolderRequest) ProtoMessage() {} +func (*DeleteFolderRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_folders_5147156560d8dbeb, []int{9} +} +func (m *DeleteFolderRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_DeleteFolderRequest.Unmarshal(m, b) +} +func (m *DeleteFolderRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_DeleteFolderRequest.Marshal(b, m, deterministic) +} +func (dst *DeleteFolderRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_DeleteFolderRequest.Merge(dst, src) +} +func (m *DeleteFolderRequest) XXX_Size() int { + return xxx_messageInfo_DeleteFolderRequest.Size(m) +} +func (m *DeleteFolderRequest) XXX_DiscardUnknown() { + xxx_messageInfo_DeleteFolderRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_DeleteFolderRequest proto.InternalMessageInfo + +func (m *DeleteFolderRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *DeleteFolderRequest) GetRecursiveDelete() bool { + if m != nil { + return m.RecursiveDelete + } + return false +} + +// The UndeleteFolder request message. +type UndeleteFolderRequest struct { + // The resource name of the Folder to undelete. + // Must be of the form `folders/{folder_id}`. + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *UndeleteFolderRequest) Reset() { *m = UndeleteFolderRequest{} } +func (m *UndeleteFolderRequest) String() string { return proto.CompactTextString(m) } +func (*UndeleteFolderRequest) ProtoMessage() {} +func (*UndeleteFolderRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_folders_5147156560d8dbeb, []int{10} +} +func (m *UndeleteFolderRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_UndeleteFolderRequest.Unmarshal(m, b) +} +func (m *UndeleteFolderRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_UndeleteFolderRequest.Marshal(b, m, deterministic) +} +func (dst *UndeleteFolderRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_UndeleteFolderRequest.Merge(dst, src) +} +func (m *UndeleteFolderRequest) XXX_Size() int { + return xxx_messageInfo_UndeleteFolderRequest.Size(m) +} +func (m *UndeleteFolderRequest) XXX_DiscardUnknown() { + xxx_messageInfo_UndeleteFolderRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_UndeleteFolderRequest proto.InternalMessageInfo + +func (m *UndeleteFolderRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +// Metadata describing a long running folder operation +type FolderOperation struct { + // The display name of the folder. + DisplayName string `protobuf:"bytes,1,opt,name=display_name,json=displayName,proto3" json:"display_name,omitempty"` + // The type of this operation. + OperationType FolderOperation_OperationType `protobuf:"varint,2,opt,name=operation_type,json=operationType,proto3,enum=google.cloud.resourcemanager.v2.FolderOperation_OperationType" json:"operation_type,omitempty"` + // The resource name of the folder's parent. + // Only applicable when the operation_type is MOVE. + SourceParent string `protobuf:"bytes,3,opt,name=source_parent,json=sourceParent,proto3" json:"source_parent,omitempty"` + // The resource name of the folder or organization we are either creating + // the folder under or moving the folder to. + DestinationParent string `protobuf:"bytes,4,opt,name=destination_parent,json=destinationParent,proto3" json:"destination_parent,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *FolderOperation) Reset() { *m = FolderOperation{} } +func (m *FolderOperation) String() string { return proto.CompactTextString(m) } +func (*FolderOperation) ProtoMessage() {} +func (*FolderOperation) Descriptor() ([]byte, []int) { + return fileDescriptor_folders_5147156560d8dbeb, []int{11} +} +func (m *FolderOperation) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_FolderOperation.Unmarshal(m, b) +} +func (m *FolderOperation) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_FolderOperation.Marshal(b, m, deterministic) +} +func (dst *FolderOperation) XXX_Merge(src proto.Message) { + xxx_messageInfo_FolderOperation.Merge(dst, src) +} +func (m *FolderOperation) XXX_Size() int { + return xxx_messageInfo_FolderOperation.Size(m) +} +func (m *FolderOperation) XXX_DiscardUnknown() { + xxx_messageInfo_FolderOperation.DiscardUnknown(m) +} + +var xxx_messageInfo_FolderOperation proto.InternalMessageInfo + +func (m *FolderOperation) GetDisplayName() string { + if m != nil { + return m.DisplayName + } + return "" +} + +func (m *FolderOperation) GetOperationType() FolderOperation_OperationType { + if m != nil { + return m.OperationType + } + return FolderOperation_OPERATION_TYPE_UNSPECIFIED +} + +func (m *FolderOperation) GetSourceParent() string { + if m != nil { + return m.SourceParent + } + return "" +} + +func (m *FolderOperation) GetDestinationParent() string { + if m != nil { + return m.DestinationParent + } + return "" +} + +func init() { + proto.RegisterType((*Folder)(nil), "google.cloud.resourcemanager.v2.Folder") + proto.RegisterType((*ListFoldersRequest)(nil), "google.cloud.resourcemanager.v2.ListFoldersRequest") + proto.RegisterType((*ListFoldersResponse)(nil), "google.cloud.resourcemanager.v2.ListFoldersResponse") + proto.RegisterType((*SearchFoldersRequest)(nil), "google.cloud.resourcemanager.v2.SearchFoldersRequest") + proto.RegisterType((*SearchFoldersResponse)(nil), "google.cloud.resourcemanager.v2.SearchFoldersResponse") + proto.RegisterType((*GetFolderRequest)(nil), "google.cloud.resourcemanager.v2.GetFolderRequest") + proto.RegisterType((*CreateFolderRequest)(nil), "google.cloud.resourcemanager.v2.CreateFolderRequest") + proto.RegisterType((*MoveFolderRequest)(nil), "google.cloud.resourcemanager.v2.MoveFolderRequest") + proto.RegisterType((*UpdateFolderRequest)(nil), "google.cloud.resourcemanager.v2.UpdateFolderRequest") + proto.RegisterType((*DeleteFolderRequest)(nil), "google.cloud.resourcemanager.v2.DeleteFolderRequest") + proto.RegisterType((*UndeleteFolderRequest)(nil), "google.cloud.resourcemanager.v2.UndeleteFolderRequest") + proto.RegisterType((*FolderOperation)(nil), "google.cloud.resourcemanager.v2.FolderOperation") + proto.RegisterEnum("google.cloud.resourcemanager.v2.Folder_LifecycleState", Folder_LifecycleState_name, Folder_LifecycleState_value) + proto.RegisterEnum("google.cloud.resourcemanager.v2.FolderOperation_OperationType", FolderOperation_OperationType_name, FolderOperation_OperationType_value) +} + +// Reference imports to suppress errors if they are not otherwise used. +var _ context.Context +var _ grpc.ClientConn + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the grpc package it is being compiled against. +const _ = grpc.SupportPackageIsVersion4 + +// FoldersClient is the client API for Folders service. +// +// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://godoc.org/google.golang.org/grpc#ClientConn.NewStream. +type FoldersClient interface { + // Lists the Folders that are direct descendants of supplied parent resource. + // List provides a strongly consistent view of the Folders underneath + // the specified parent resource. + // List returns Folders sorted based upon the (ascending) lexical ordering + // of their display_name. + // The caller must have `resourcemanager.folders.list` permission on the + // identified parent. + ListFolders(ctx context.Context, in *ListFoldersRequest, opts ...grpc.CallOption) (*ListFoldersResponse, error) + // Search for folders that match specific filter criteria. + // Search provides an eventually consistent view of the folders a user has + // access to which meet the specified filter criteria. + // + // This will only return folders on which the caller has the + // permission `resourcemanager.folders.get`. + SearchFolders(ctx context.Context, in *SearchFoldersRequest, opts ...grpc.CallOption) (*SearchFoldersResponse, error) + // Retrieves a Folder identified by the supplied resource name. + // Valid Folder resource names have the format `folders/{folder_id}` + // (for example, `folders/1234`). + // The caller must have `resourcemanager.folders.get` permission on the + // identified folder. + GetFolder(ctx context.Context, in *GetFolderRequest, opts ...grpc.CallOption) (*Folder, error) + // Creates a Folder in the resource hierarchy. + // Returns an Operation which can be used to track the progress of the + // folder creation workflow. + // Upon success the Operation.response field will be populated with the + // created Folder. + // + // In order to succeed, the addition of this new Folder must not violate + // the Folder naming, height or fanout constraints. + // + The Folder's display_name must be distinct from all other Folder's that + // share its parent. + // + The addition of the Folder must not cause the active Folder hierarchy + // to exceed a height of 4. Note, the full active + deleted Folder hierarchy + // is allowed to reach a height of 8; this provides additional headroom when + // moving folders that contain deleted folders. + // + The addition of the Folder must not cause the total number of Folders + // under its parent to exceed 100. + // + // If the operation fails due to a folder constraint violation, + // a PreconditionFailure explaining the violation will be returned. + // If the failure occurs synchronously then the PreconditionFailure + // will be returned via the Status.details field and if it occurs + // asynchronously then the PreconditionFailure will be returned + // via the Operation.error field. + // + // The caller must have `resourcemanager.folders.create` permission on the + // identified parent. + CreateFolder(ctx context.Context, in *CreateFolderRequest, opts ...grpc.CallOption) (*longrunning.Operation, error) + // Updates a Folder, changing its display_name. + // Changes to the folder display_name will be rejected if they violate either + // the display_name formatting rules or naming constraints described in + // the [CreateFolder] documentation. + // + The Folder's display name must start and end with a letter or digit, + // may contain letters, digits, spaces, hyphens and underscores and can be + // no longer than 30 characters. This is captured by the regular expression: + // [\p{L}\p{N}]({\p{L}\p{N}_- ]{0,28}[\p{L}\p{N}])?. + // The caller must have `resourcemanager.folders.update` permission on the + // identified folder. + // + // If the update fails due to the unique name constraint then a + // PreconditionFailure explaining this violation will be returned + // in the Status.details field. + UpdateFolder(ctx context.Context, in *UpdateFolderRequest, opts ...grpc.CallOption) (*Folder, error) + // Moves a Folder under a new resource parent. + // Returns an Operation which can be used to track the progress of the + // folder move workflow. + // Upon success the Operation.response field will be populated with the + // moved Folder. + // Upon failure, a FolderOperationError categorizing the failure cause will + // be returned - if the failure occurs synchronously then the + // FolderOperationError will be returned via the Status.details field + // and if it occurs asynchronously then the FolderOperation will be returned + // via the the Operation.error field. + // In addition, the Operation.metadata field will be populated with a + // FolderOperation message as an aid to stateless clients. + // Folder moves will be rejected if they violate either the naming, height + // or fanout constraints described in the [CreateFolder] documentation. + // The caller must have `resourcemanager.folders.move` permission on the + // folder's current and proposed new parent. + MoveFolder(ctx context.Context, in *MoveFolderRequest, opts ...grpc.CallOption) (*longrunning.Operation, error) + // Requests deletion of a Folder. The Folder is moved into the + // [DELETE_REQUESTED] state immediately, and is deleted approximately 30 days + // later. This method may only be called on an empty Folder in the [ACTIVE] + // state, where a Folder is empty if it doesn't contain any Folders or + // Projects in the [ACTIVE] state. + // The caller must have `resourcemanager.folders.delete` permission on the + // identified folder. + DeleteFolder(ctx context.Context, in *DeleteFolderRequest, opts ...grpc.CallOption) (*Folder, error) + // Cancels the deletion request for a Folder. This method may only be + // called on a Folder in the [DELETE_REQUESTED] state. + // In order to succeed, the Folder's parent must be in the [ACTIVE] state. + // In addition, reintroducing the folder into the tree must not violate + // folder naming, height and fanout constraints described in the + // [CreateFolder] documentation. + // The caller must have `resourcemanager.folders.undelete` permission on the + // identified folder. + UndeleteFolder(ctx context.Context, in *UndeleteFolderRequest, opts ...grpc.CallOption) (*Folder, error) + // Gets the access control policy for a Folder. The returned policy may be + // empty if no such policy or resource exists. The `resource` field should + // be the Folder's resource name, e.g. "folders/1234". + // The caller must have `resourcemanager.folders.getIamPolicy` permission + // on the identified folder. + GetIamPolicy(ctx context.Context, in *v1.GetIamPolicyRequest, opts ...grpc.CallOption) (*v1.Policy, error) + // Sets the access control policy on a Folder, replacing any existing policy. + // The `resource` field should be the Folder's resource name, e.g. + // "folders/1234". + // The caller must have `resourcemanager.folders.setIamPolicy` permission + // on the identified folder. + SetIamPolicy(ctx context.Context, in *v1.SetIamPolicyRequest, opts ...grpc.CallOption) (*v1.Policy, error) + // Returns permissions that a caller has on the specified Folder. + // The `resource` field should be the Folder's resource name, + // e.g. "folders/1234". + // + // There are no permissions required for making this API call. + TestIamPermissions(ctx context.Context, in *v1.TestIamPermissionsRequest, opts ...grpc.CallOption) (*v1.TestIamPermissionsResponse, error) +} + +type foldersClient struct { + cc *grpc.ClientConn +} + +func NewFoldersClient(cc *grpc.ClientConn) FoldersClient { + return &foldersClient{cc} +} + +func (c *foldersClient) ListFolders(ctx context.Context, in *ListFoldersRequest, opts ...grpc.CallOption) (*ListFoldersResponse, error) { + out := new(ListFoldersResponse) + err := c.cc.Invoke(ctx, "/google.cloud.resourcemanager.v2.Folders/ListFolders", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *foldersClient) SearchFolders(ctx context.Context, in *SearchFoldersRequest, opts ...grpc.CallOption) (*SearchFoldersResponse, error) { + out := new(SearchFoldersResponse) + err := c.cc.Invoke(ctx, "/google.cloud.resourcemanager.v2.Folders/SearchFolders", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *foldersClient) GetFolder(ctx context.Context, in *GetFolderRequest, opts ...grpc.CallOption) (*Folder, error) { + out := new(Folder) + err := c.cc.Invoke(ctx, "/google.cloud.resourcemanager.v2.Folders/GetFolder", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *foldersClient) CreateFolder(ctx context.Context, in *CreateFolderRequest, opts ...grpc.CallOption) (*longrunning.Operation, error) { + out := new(longrunning.Operation) + err := c.cc.Invoke(ctx, "/google.cloud.resourcemanager.v2.Folders/CreateFolder", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *foldersClient) UpdateFolder(ctx context.Context, in *UpdateFolderRequest, opts ...grpc.CallOption) (*Folder, error) { + out := new(Folder) + err := c.cc.Invoke(ctx, "/google.cloud.resourcemanager.v2.Folders/UpdateFolder", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *foldersClient) MoveFolder(ctx context.Context, in *MoveFolderRequest, opts ...grpc.CallOption) (*longrunning.Operation, error) { + out := new(longrunning.Operation) + err := c.cc.Invoke(ctx, "/google.cloud.resourcemanager.v2.Folders/MoveFolder", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *foldersClient) DeleteFolder(ctx context.Context, in *DeleteFolderRequest, opts ...grpc.CallOption) (*Folder, error) { + out := new(Folder) + err := c.cc.Invoke(ctx, "/google.cloud.resourcemanager.v2.Folders/DeleteFolder", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *foldersClient) UndeleteFolder(ctx context.Context, in *UndeleteFolderRequest, opts ...grpc.CallOption) (*Folder, error) { + out := new(Folder) + err := c.cc.Invoke(ctx, "/google.cloud.resourcemanager.v2.Folders/UndeleteFolder", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *foldersClient) GetIamPolicy(ctx context.Context, in *v1.GetIamPolicyRequest, opts ...grpc.CallOption) (*v1.Policy, error) { + out := new(v1.Policy) + err := c.cc.Invoke(ctx, "/google.cloud.resourcemanager.v2.Folders/GetIamPolicy", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *foldersClient) SetIamPolicy(ctx context.Context, in *v1.SetIamPolicyRequest, opts ...grpc.CallOption) (*v1.Policy, error) { + out := new(v1.Policy) + err := c.cc.Invoke(ctx, "/google.cloud.resourcemanager.v2.Folders/SetIamPolicy", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *foldersClient) TestIamPermissions(ctx context.Context, in *v1.TestIamPermissionsRequest, opts ...grpc.CallOption) (*v1.TestIamPermissionsResponse, error) { + out := new(v1.TestIamPermissionsResponse) + err := c.cc.Invoke(ctx, "/google.cloud.resourcemanager.v2.Folders/TestIamPermissions", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +// FoldersServer is the server API for Folders service. +type FoldersServer interface { + // Lists the Folders that are direct descendants of supplied parent resource. + // List provides a strongly consistent view of the Folders underneath + // the specified parent resource. + // List returns Folders sorted based upon the (ascending) lexical ordering + // of their display_name. + // The caller must have `resourcemanager.folders.list` permission on the + // identified parent. + ListFolders(context.Context, *ListFoldersRequest) (*ListFoldersResponse, error) + // Search for folders that match specific filter criteria. + // Search provides an eventually consistent view of the folders a user has + // access to which meet the specified filter criteria. + // + // This will only return folders on which the caller has the + // permission `resourcemanager.folders.get`. + SearchFolders(context.Context, *SearchFoldersRequest) (*SearchFoldersResponse, error) + // Retrieves a Folder identified by the supplied resource name. + // Valid Folder resource names have the format `folders/{folder_id}` + // (for example, `folders/1234`). + // The caller must have `resourcemanager.folders.get` permission on the + // identified folder. + GetFolder(context.Context, *GetFolderRequest) (*Folder, error) + // Creates a Folder in the resource hierarchy. + // Returns an Operation which can be used to track the progress of the + // folder creation workflow. + // Upon success the Operation.response field will be populated with the + // created Folder. + // + // In order to succeed, the addition of this new Folder must not violate + // the Folder naming, height or fanout constraints. + // + The Folder's display_name must be distinct from all other Folder's that + // share its parent. + // + The addition of the Folder must not cause the active Folder hierarchy + // to exceed a height of 4. Note, the full active + deleted Folder hierarchy + // is allowed to reach a height of 8; this provides additional headroom when + // moving folders that contain deleted folders. + // + The addition of the Folder must not cause the total number of Folders + // under its parent to exceed 100. + // + // If the operation fails due to a folder constraint violation, + // a PreconditionFailure explaining the violation will be returned. + // If the failure occurs synchronously then the PreconditionFailure + // will be returned via the Status.details field and if it occurs + // asynchronously then the PreconditionFailure will be returned + // via the Operation.error field. + // + // The caller must have `resourcemanager.folders.create` permission on the + // identified parent. + CreateFolder(context.Context, *CreateFolderRequest) (*longrunning.Operation, error) + // Updates a Folder, changing its display_name. + // Changes to the folder display_name will be rejected if they violate either + // the display_name formatting rules or naming constraints described in + // the [CreateFolder] documentation. + // + The Folder's display name must start and end with a letter or digit, + // may contain letters, digits, spaces, hyphens and underscores and can be + // no longer than 30 characters. This is captured by the regular expression: + // [\p{L}\p{N}]({\p{L}\p{N}_- ]{0,28}[\p{L}\p{N}])?. + // The caller must have `resourcemanager.folders.update` permission on the + // identified folder. + // + // If the update fails due to the unique name constraint then a + // PreconditionFailure explaining this violation will be returned + // in the Status.details field. + UpdateFolder(context.Context, *UpdateFolderRequest) (*Folder, error) + // Moves a Folder under a new resource parent. + // Returns an Operation which can be used to track the progress of the + // folder move workflow. + // Upon success the Operation.response field will be populated with the + // moved Folder. + // Upon failure, a FolderOperationError categorizing the failure cause will + // be returned - if the failure occurs synchronously then the + // FolderOperationError will be returned via the Status.details field + // and if it occurs asynchronously then the FolderOperation will be returned + // via the the Operation.error field. + // In addition, the Operation.metadata field will be populated with a + // FolderOperation message as an aid to stateless clients. + // Folder moves will be rejected if they violate either the naming, height + // or fanout constraints described in the [CreateFolder] documentation. + // The caller must have `resourcemanager.folders.move` permission on the + // folder's current and proposed new parent. + MoveFolder(context.Context, *MoveFolderRequest) (*longrunning.Operation, error) + // Requests deletion of a Folder. The Folder is moved into the + // [DELETE_REQUESTED] state immediately, and is deleted approximately 30 days + // later. This method may only be called on an empty Folder in the [ACTIVE] + // state, where a Folder is empty if it doesn't contain any Folders or + // Projects in the [ACTIVE] state. + // The caller must have `resourcemanager.folders.delete` permission on the + // identified folder. + DeleteFolder(context.Context, *DeleteFolderRequest) (*Folder, error) + // Cancels the deletion request for a Folder. This method may only be + // called on a Folder in the [DELETE_REQUESTED] state. + // In order to succeed, the Folder's parent must be in the [ACTIVE] state. + // In addition, reintroducing the folder into the tree must not violate + // folder naming, height and fanout constraints described in the + // [CreateFolder] documentation. + // The caller must have `resourcemanager.folders.undelete` permission on the + // identified folder. + UndeleteFolder(context.Context, *UndeleteFolderRequest) (*Folder, error) + // Gets the access control policy for a Folder. The returned policy may be + // empty if no such policy or resource exists. The `resource` field should + // be the Folder's resource name, e.g. "folders/1234". + // The caller must have `resourcemanager.folders.getIamPolicy` permission + // on the identified folder. + GetIamPolicy(context.Context, *v1.GetIamPolicyRequest) (*v1.Policy, error) + // Sets the access control policy on a Folder, replacing any existing policy. + // The `resource` field should be the Folder's resource name, e.g. + // "folders/1234". + // The caller must have `resourcemanager.folders.setIamPolicy` permission + // on the identified folder. + SetIamPolicy(context.Context, *v1.SetIamPolicyRequest) (*v1.Policy, error) + // Returns permissions that a caller has on the specified Folder. + // The `resource` field should be the Folder's resource name, + // e.g. "folders/1234". + // + // There are no permissions required for making this API call. + TestIamPermissions(context.Context, *v1.TestIamPermissionsRequest) (*v1.TestIamPermissionsResponse, error) +} + +func RegisterFoldersServer(s *grpc.Server, srv FoldersServer) { + s.RegisterService(&_Folders_serviceDesc, srv) +} + +func _Folders_ListFolders_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(ListFoldersRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(FoldersServer).ListFolders(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.resourcemanager.v2.Folders/ListFolders", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(FoldersServer).ListFolders(ctx, req.(*ListFoldersRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _Folders_SearchFolders_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(SearchFoldersRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(FoldersServer).SearchFolders(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.resourcemanager.v2.Folders/SearchFolders", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(FoldersServer).SearchFolders(ctx, req.(*SearchFoldersRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _Folders_GetFolder_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(GetFolderRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(FoldersServer).GetFolder(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.resourcemanager.v2.Folders/GetFolder", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(FoldersServer).GetFolder(ctx, req.(*GetFolderRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _Folders_CreateFolder_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(CreateFolderRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(FoldersServer).CreateFolder(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.resourcemanager.v2.Folders/CreateFolder", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(FoldersServer).CreateFolder(ctx, req.(*CreateFolderRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _Folders_UpdateFolder_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(UpdateFolderRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(FoldersServer).UpdateFolder(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.resourcemanager.v2.Folders/UpdateFolder", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(FoldersServer).UpdateFolder(ctx, req.(*UpdateFolderRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _Folders_MoveFolder_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(MoveFolderRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(FoldersServer).MoveFolder(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.resourcemanager.v2.Folders/MoveFolder", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(FoldersServer).MoveFolder(ctx, req.(*MoveFolderRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _Folders_DeleteFolder_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(DeleteFolderRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(FoldersServer).DeleteFolder(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.resourcemanager.v2.Folders/DeleteFolder", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(FoldersServer).DeleteFolder(ctx, req.(*DeleteFolderRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _Folders_UndeleteFolder_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(UndeleteFolderRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(FoldersServer).UndeleteFolder(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.resourcemanager.v2.Folders/UndeleteFolder", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(FoldersServer).UndeleteFolder(ctx, req.(*UndeleteFolderRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _Folders_GetIamPolicy_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(v1.GetIamPolicyRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(FoldersServer).GetIamPolicy(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.resourcemanager.v2.Folders/GetIamPolicy", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(FoldersServer).GetIamPolicy(ctx, req.(*v1.GetIamPolicyRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _Folders_SetIamPolicy_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(v1.SetIamPolicyRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(FoldersServer).SetIamPolicy(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.resourcemanager.v2.Folders/SetIamPolicy", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(FoldersServer).SetIamPolicy(ctx, req.(*v1.SetIamPolicyRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _Folders_TestIamPermissions_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(v1.TestIamPermissionsRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(FoldersServer).TestIamPermissions(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.resourcemanager.v2.Folders/TestIamPermissions", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(FoldersServer).TestIamPermissions(ctx, req.(*v1.TestIamPermissionsRequest)) + } + return interceptor(ctx, in, info, handler) +} + +var _Folders_serviceDesc = grpc.ServiceDesc{ + ServiceName: "google.cloud.resourcemanager.v2.Folders", + HandlerType: (*FoldersServer)(nil), + Methods: []grpc.MethodDesc{ + { + MethodName: "ListFolders", + Handler: _Folders_ListFolders_Handler, + }, + { + MethodName: "SearchFolders", + Handler: _Folders_SearchFolders_Handler, + }, + { + MethodName: "GetFolder", + Handler: _Folders_GetFolder_Handler, + }, + { + MethodName: "CreateFolder", + Handler: _Folders_CreateFolder_Handler, + }, + { + MethodName: "UpdateFolder", + Handler: _Folders_UpdateFolder_Handler, + }, + { + MethodName: "MoveFolder", + Handler: _Folders_MoveFolder_Handler, + }, + { + MethodName: "DeleteFolder", + Handler: _Folders_DeleteFolder_Handler, + }, + { + MethodName: "UndeleteFolder", + Handler: _Folders_UndeleteFolder_Handler, + }, + { + MethodName: "GetIamPolicy", + Handler: _Folders_GetIamPolicy_Handler, + }, + { + MethodName: "SetIamPolicy", + Handler: _Folders_SetIamPolicy_Handler, + }, + { + MethodName: "TestIamPermissions", + Handler: _Folders_TestIamPermissions_Handler, + }, + }, + Streams: []grpc.StreamDesc{}, + Metadata: "google/cloud/resourcemanager/v2/folders.proto", +} + +func init() { + proto.RegisterFile("google/cloud/resourcemanager/v2/folders.proto", fileDescriptor_folders_5147156560d8dbeb) +} + +var fileDescriptor_folders_5147156560d8dbeb = []byte{ + // 1235 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xbc, 0x57, 0x5d, 0x6f, 0xdb, 0xd4, + 0x1b, 0xff, 0x9f, 0xac, 0xcb, 0xba, 0x27, 0x2f, 0xcd, 0x4e, 0xdb, 0xfd, 0x4b, 0xba, 0xbe, 0x70, + 0xca, 0x46, 0xd6, 0x6a, 0xce, 0x9a, 0x41, 0x2f, 0x3a, 0x01, 0xea, 0x52, 0xb7, 0x8a, 0xd4, 0x97, + 0xe0, 0xb8, 0x95, 0x86, 0x2a, 0x59, 0x5e, 0x72, 0x9a, 0x5a, 0x75, 0x6c, 0xcf, 0x76, 0x02, 0xd9, + 0x84, 0x34, 0x4d, 0x9a, 0xb8, 0x98, 0xb8, 0xda, 0x1d, 0x88, 0x0b, 0x6e, 0xb9, 0x45, 0xe2, 0x3b, + 0xc0, 0x2d, 0x5f, 0x81, 0x0f, 0x82, 0x7c, 0x7c, 0x9c, 0xda, 0x4e, 0x3a, 0xa7, 0x08, 0x71, 0x55, + 0xfb, 0x79, 0xfd, 0x3d, 0x6f, 0xbf, 0x3a, 0xf0, 0xa0, 0x6d, 0x9a, 0x6d, 0x9d, 0x96, 0x9b, 0xba, + 0xd9, 0x6d, 0x95, 0x6d, 0xea, 0x98, 0x5d, 0xbb, 0x49, 0x3b, 0xaa, 0xa1, 0xb6, 0xa9, 0x5d, 0xee, + 0x55, 0xca, 0xa7, 0xa6, 0xde, 0xa2, 0xb6, 0x23, 0x58, 0xb6, 0xe9, 0x9a, 0x78, 0xc9, 0x37, 0x17, + 0x98, 0xb9, 0x10, 0x33, 0x17, 0x7a, 0x95, 0xe2, 0x1d, 0x1e, 0x4f, 0xb5, 0xb4, 0xb2, 0x6a, 0x18, + 0xa6, 0xab, 0xba, 0x9a, 0x69, 0x70, 0xf7, 0xe2, 0x22, 0xd7, 0x6a, 0x6a, 0xa7, 0xdc, 0x5b, 0xf7, + 0xfe, 0x28, 0x96, 0xa9, 0x6b, 0xcd, 0x3e, 0xd7, 0x17, 0xa3, 0xfa, 0x88, 0x6e, 0x85, 0xeb, 0x74, + 0xd3, 0x68, 0xdb, 0x5d, 0xc3, 0xd0, 0x8c, 0x76, 0xd9, 0xb4, 0xa8, 0x1d, 0x49, 0xb0, 0xcc, 0x8d, + 0xd8, 0xdb, 0xb3, 0xee, 0x69, 0xf9, 0x54, 0xa3, 0x7a, 0x4b, 0xe9, 0xa8, 0xce, 0x39, 0xb7, 0x58, + 0x8a, 0x5b, 0xb8, 0x5a, 0x87, 0x3a, 0xae, 0xda, 0xb1, 0x7c, 0x03, 0xf2, 0xdd, 0x35, 0x48, 0xef, + 0xb0, 0xa2, 0x31, 0x86, 0x09, 0x43, 0xed, 0xd0, 0x39, 0xb4, 0x8c, 0x4a, 0x37, 0x25, 0xf6, 0x8c, + 0x6f, 0x43, 0xda, 0x52, 0x6d, 0x6a, 0xb8, 0x73, 0x29, 0x26, 0xe5, 0x6f, 0xf8, 0x43, 0xc8, 0xb6, + 0x34, 0xc7, 0xd2, 0xd5, 0xbe, 0xc2, 0x7c, 0xae, 0x31, 0x6d, 0x86, 0xcb, 0x0e, 0x3c, 0x57, 0x05, + 0xa6, 0x74, 0xed, 0x94, 0x36, 0xfb, 0x4d, 0x9d, 0x2a, 0x8e, 0xab, 0xba, 0x74, 0x6e, 0x62, 0x19, + 0x95, 0xf2, 0x95, 0x0d, 0x21, 0xa1, 0xad, 0x82, 0x0f, 0x48, 0xd8, 0x0b, 0xdc, 0x1b, 0x9e, 0xb7, + 0x94, 0xd7, 0x23, 0xef, 0xf8, 0x31, 0x64, 0x9a, 0x36, 0x55, 0x5d, 0xaa, 0x78, 0x45, 0xcd, 0x5d, + 0x5f, 0x46, 0xa5, 0x4c, 0xa5, 0x18, 0x04, 0x0f, 0x2a, 0x16, 0xe4, 0xa0, 0x62, 0x09, 0x7c, 0x73, + 0x4f, 0xe0, 0x39, 0x77, 0xad, 0xd6, 0xc0, 0x39, 0x9d, 0xec, 0xec, 0x9b, 0x7b, 0x02, 0xd2, 0x80, + 0x7c, 0x14, 0x1b, 0x5e, 0x82, 0xf9, 0xbd, 0xda, 0x8e, 0x58, 0x7d, 0x5a, 0xdd, 0x13, 0x95, 0x86, + 0xbc, 0x25, 0x8b, 0xca, 0xd1, 0x41, 0xa3, 0x2e, 0x56, 0x6b, 0x3b, 0x35, 0x71, 0xbb, 0xf0, 0x3f, + 0x0c, 0x90, 0xde, 0xaa, 0xca, 0xb5, 0x63, 0xb1, 0x80, 0xf0, 0x0c, 0x14, 0xb6, 0xc5, 0x3d, 0x51, + 0x16, 0x15, 0x49, 0xfc, 0xf2, 0x48, 0x6c, 0xc8, 0xe2, 0x76, 0x21, 0x45, 0xde, 0x22, 0xc0, 0x7b, + 0x9a, 0xe3, 0xfa, 0xc5, 0x3b, 0x12, 0x7d, 0xde, 0xa5, 0x8e, 0x1b, 0x9a, 0x00, 0x8a, 0x4c, 0x60, + 0x1e, 0x6e, 0x5a, 0x6a, 0x9b, 0x2a, 0x8e, 0xf6, 0x82, 0xb2, 0xe1, 0x5c, 0x97, 0x26, 0x3d, 0x41, + 0x43, 0x7b, 0x41, 0xf1, 0x02, 0x00, 0x53, 0xba, 0xe6, 0x39, 0x35, 0xf8, 0x70, 0x98, 0xb9, 0xec, + 0x09, 0xbc, 0xe9, 0x39, 0x67, 0xe6, 0xd7, 0x4a, 0x8b, 0xea, 0xd4, 0xa5, 0x2d, 0x36, 0x97, 0x49, + 0x29, 0xe3, 0xc9, 0xb6, 0x7d, 0x11, 0x79, 0x85, 0x60, 0x3a, 0x82, 0xc6, 0xb1, 0x4c, 0xc3, 0xa1, + 0x78, 0x0b, 0x6e, 0xf0, 0x1b, 0x99, 0x43, 0xcb, 0xd7, 0x4a, 0x99, 0xca, 0xc7, 0x63, 0x4e, 0x53, + 0x0a, 0xfc, 0xf0, 0x3d, 0x98, 0x32, 0xe8, 0x37, 0xae, 0x12, 0x42, 0xe8, 0x2f, 0x57, 0xce, 0x13, + 0xd7, 0x03, 0x94, 0xe4, 0x0c, 0x66, 0x1a, 0x54, 0xb5, 0x9b, 0x67, 0xb1, 0x8e, 0x44, 0x2a, 0x47, + 0xef, 0xad, 0x3c, 0x15, 0xaf, 0x7c, 0x06, 0xae, 0x3f, 0xef, 0x52, 0xbb, 0xcf, 0x7b, 0xe2, 0xbf, + 0x90, 0xd7, 0x08, 0x66, 0x63, 0xa9, 0xfe, 0xfb, 0x72, 0xef, 0x41, 0x61, 0x97, 0xf2, 0x7e, 0x07, + 0xa5, 0x8e, 0x38, 0x49, 0x62, 0xc0, 0x74, 0x95, 0xed, 0x71, 0xd4, 0xf4, 0xb2, 0x3d, 0xf9, 0x02, + 0xd2, 0x3e, 0x12, 0x96, 0xf5, 0x0a, 0x05, 0x70, 0x37, 0x72, 0x0c, 0xb7, 0xf6, 0xcd, 0x1e, 0x4d, + 0x04, 0x86, 0x1f, 0x00, 0x6e, 0x51, 0xc7, 0xd5, 0x0c, 0xc6, 0x51, 0x4a, 0x84, 0x37, 0x6e, 0x85, + 0x34, 0x75, 0xa6, 0x20, 0xef, 0x10, 0x4c, 0x1f, 0xb1, 0x9b, 0x8a, 0x86, 0xbe, 0x00, 0x8c, 0xfe, + 0x11, 0xe0, 0xd0, 0x69, 0x7b, 0x44, 0xc8, 0xcb, 0x1e, 0x3e, 0xed, 0x1d, 0x8f, 0x2b, 0xf7, 0x55, + 0xe7, 0x3c, 0x38, 0x6d, 0xef, 0x99, 0xc8, 0x30, 0xed, 0x9f, 0x40, 0x72, 0xbd, 0xf7, 0xa1, 0x60, + 0xd3, 0x66, 0xd7, 0x76, 0xb4, 0x1e, 0xe5, 0xa7, 0xc4, 0x92, 0x4d, 0x4a, 0x53, 0x03, 0xb9, 0x1f, + 0x8b, 0xac, 0xc1, 0xec, 0x91, 0xd1, 0x1a, 0x2f, 0x2e, 0xf9, 0x2d, 0x05, 0x53, 0xbe, 0xd5, 0x61, + 0x40, 0xf8, 0x43, 0x7c, 0x8b, 0x86, 0xf9, 0x96, 0x42, 0x7e, 0xf0, 0x0f, 0x42, 0x71, 0xfb, 0x96, + 0x0f, 0x26, 0x5f, 0xf9, 0x7c, 0xcc, 0xfe, 0x0d, 0x92, 0x09, 0x83, 0x27, 0xb9, 0x6f, 0x51, 0x29, + 0x67, 0x86, 0x5f, 0xf1, 0x0a, 0xe4, 0xfc, 0x00, 0xc1, 0x80, 0xfd, 0x4b, 0xca, 0xfa, 0x42, 0x7f, + 0xb6, 0x97, 0xac, 0xc2, 0xc4, 0x65, 0xab, 0x20, 0x42, 0x2e, 0x92, 0x13, 0x2f, 0x42, 0xf1, 0xb0, + 0x2e, 0x4a, 0x5b, 0x72, 0xed, 0xf0, 0x40, 0x91, 0x9f, 0xd6, 0x47, 0xb0, 0x69, 0x55, 0x12, 0xb7, + 0x64, 0x8f, 0x4d, 0x27, 0x61, 0x62, 0xff, 0xf0, 0x58, 0x2c, 0xa4, 0x2a, 0xbf, 0x66, 0xe1, 0x06, + 0x3f, 0x60, 0xfc, 0x3d, 0x82, 0x4c, 0x88, 0xbf, 0xf0, 0xa3, 0xc4, 0x2e, 0x0c, 0x73, 0x6f, 0xf1, + 0x93, 0xab, 0x39, 0xf9, 0x9c, 0x41, 0xa6, 0x5f, 0xff, 0xf9, 0xd7, 0xbb, 0x54, 0x0e, 0x67, 0x42, + 0x1f, 0x14, 0xf8, 0x27, 0x04, 0xb9, 0x08, 0xc5, 0xe0, 0x4f, 0x13, 0x83, 0x8f, 0x62, 0xbf, 0xe2, + 0xc6, 0x55, 0xdd, 0x38, 0xaa, 0x05, 0x86, 0xea, 0xff, 0x04, 0x87, 0x50, 0x6d, 0x3a, 0xcc, 0x74, + 0x13, 0xad, 0xe2, 0x37, 0x08, 0x6e, 0x0e, 0xe8, 0x07, 0xaf, 0x27, 0x26, 0x89, 0x53, 0x55, 0x71, + 0xdc, 0x33, 0x25, 0x77, 0x18, 0x90, 0xdb, 0x78, 0xc6, 0x03, 0xf2, 0xd2, 0xdb, 0xe6, 0xcf, 0x38, + 0x9c, 0xf2, 0xea, 0xb7, 0xf8, 0x15, 0x82, 0x6c, 0x98, 0xde, 0x70, 0xf2, 0x0c, 0x46, 0xb0, 0x61, + 0x71, 0x21, 0xf0, 0x0a, 0x7d, 0x3f, 0x5d, 0xec, 0x35, 0x99, 0x67, 0x18, 0x66, 0x49, 0x78, 0x44, + 0x9b, 0x01, 0x7f, 0xfc, 0x80, 0x20, 0x1b, 0x26, 0xa6, 0x31, 0x20, 0x8c, 0xe0, 0xb1, 0xf1, 0x1b, + 0xb2, 0xc6, 0xc0, 0xdc, 0xad, 0xcc, 0xb3, 0x86, 0xf8, 0x20, 0x84, 0x58, 0x5f, 0x06, 0xe0, 0xde, + 0x20, 0x80, 0x0b, 0x3a, 0xc6, 0x95, 0xc4, 0x24, 0x43, 0xdc, 0x9d, 0xd4, 0x9b, 0x8f, 0x18, 0x9c, + 0x45, 0xf2, 0xc1, 0xa8, 0xf9, 0x6c, 0x76, 0xcc, 0x1e, 0xf5, 0xf6, 0xe5, 0x2d, 0x82, 0x6c, 0x98, + 0x28, 0xc7, 0x68, 0xd2, 0x08, 0x5e, 0xbd, 0xf2, 0xd6, 0xac, 0x8e, 0xde, 0x9a, 0x1f, 0x11, 0xe4, + 0xa3, 0x04, 0x8b, 0x93, 0xef, 0x64, 0x24, 0x23, 0x8f, 0x8f, 0xa8, 0xc4, 0x10, 0x11, 0xb2, 0x30, + 0xb2, 0x4f, 0x5d, 0x1e, 0xdc, 0xeb, 0xd5, 0x4b, 0xc8, 0xee, 0x52, 0xb7, 0xa6, 0x76, 0xea, 0xec, + 0x0b, 0x1f, 0x93, 0x20, 0x85, 0xa6, 0x76, 0x84, 0xde, 0xba, 0x10, 0x56, 0x06, 0x30, 0x66, 0x63, + 0x36, 0xbe, 0x96, 0x3c, 0x64, 0x49, 0x57, 0xc9, 0x5d, 0x96, 0x34, 0x00, 0x17, 0x4e, 0xdc, 0x0e, + 0x05, 0xe3, 0xc9, 0x1b, 0xef, 0x4b, 0xde, 0xf8, 0x37, 0x93, 0x3b, 0xb1, 0xe4, 0x3f, 0x23, 0xc0, + 0x32, 0x75, 0x98, 0x90, 0xda, 0x1d, 0xcd, 0x71, 0xbc, 0x5f, 0x2f, 0xb8, 0x14, 0x8b, 0x3f, 0x6c, + 0x12, 0x20, 0xb9, 0x3f, 0x86, 0x25, 0x27, 0xb8, 0x0d, 0x86, 0xee, 0x21, 0x59, 0xbb, 0x14, 0x9d, + 0x3b, 0xe4, 0xbc, 0x89, 0x56, 0x9f, 0xfc, 0x8e, 0x60, 0xa5, 0x69, 0x76, 0x92, 0xc6, 0xfe, 0x24, + 0xcb, 0x19, 0xb5, 0xee, 0x7d, 0x40, 0xd4, 0xd1, 0x57, 0x07, 0xdc, 0xa1, 0x6d, 0xea, 0xaa, 0xd1, + 0x16, 0x4c, 0xbb, 0x5d, 0x6e, 0x53, 0x83, 0x7d, 0x5e, 0x94, 0x7d, 0x95, 0x6a, 0x69, 0xce, 0xa5, + 0x3f, 0x35, 0x1f, 0xc7, 0x44, 0xbf, 0xa4, 0x96, 0x76, 0xfd, 0x80, 0x55, 0x86, 0x40, 0xe2, 0xea, + 0x7d, 0x8e, 0xe0, 0xb8, 0xf2, 0x47, 0x60, 0x71, 0xc2, 0x2c, 0x4e, 0x62, 0x16, 0x27, 0xc7, 0x95, + 0x67, 0x69, 0x96, 0xfe, 0xd1, 0xdf, 0x01, 0x00, 0x00, 0xff, 0xff, 0xda, 0x9b, 0x45, 0x8f, 0xe8, + 0x0e, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/cloud/runtimeconfig/v1beta1/resources.pb.go b/vendor/google.golang.org/genproto/googleapis/cloud/runtimeconfig/v1beta1/resources.pb.go new file mode 100644 index 0000000..774d008 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/cloud/runtimeconfig/v1beta1/resources.pb.go @@ -0,0 +1,677 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/cloud/runtimeconfig/v1beta1/resources.proto + +package runtimeconfig // import "google.golang.org/genproto/googleapis/cloud/runtimeconfig/v1beta1" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import duration "github.com/golang/protobuf/ptypes/duration" +import timestamp "github.com/golang/protobuf/ptypes/timestamp" +import _ "google.golang.org/genproto/googleapis/api/annotations" +import status "google.golang.org/genproto/googleapis/rpc/status" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// The `VariableState` describes the last known state of the variable and is +// used during a `variables().watch` call to distinguish the state of the +// variable. +type VariableState int32 + +const ( + // Default variable state. + VariableState_VARIABLE_STATE_UNSPECIFIED VariableState = 0 + // The variable was updated, while `variables().watch` was executing. + VariableState_UPDATED VariableState = 1 + // The variable was deleted, while `variables().watch` was executing. + VariableState_DELETED VariableState = 2 +) + +var VariableState_name = map[int32]string{ + 0: "VARIABLE_STATE_UNSPECIFIED", + 1: "UPDATED", + 2: "DELETED", +} +var VariableState_value = map[string]int32{ + "VARIABLE_STATE_UNSPECIFIED": 0, + "UPDATED": 1, + "DELETED": 2, +} + +func (x VariableState) String() string { + return proto.EnumName(VariableState_name, int32(x)) +} +func (VariableState) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_resources_6ae4fdb8583f1f59, []int{0} +} + +// A RuntimeConfig resource is the primary resource in the Cloud RuntimeConfig +// service. A RuntimeConfig resource consists of metadata and a hierarchy of +// variables. +type RuntimeConfig struct { + // The resource name of a runtime config. The name must have the format: + // + // projects/[PROJECT_ID]/configs/[CONFIG_NAME] + // + // The `[PROJECT_ID]` must be a valid project ID, and `[CONFIG_NAME]` is an + // arbitrary name that matches RFC 1035 segment specification. The length of + // `[CONFIG_NAME]` must be less than 64 bytes. + // + // You pick the RuntimeConfig resource name, but the server will validate that + // the name adheres to this format. After you create the resource, you cannot + // change the resource's name. + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // An optional description of the RuntimeConfig object. + Description string `protobuf:"bytes,2,opt,name=description,proto3" json:"description,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *RuntimeConfig) Reset() { *m = RuntimeConfig{} } +func (m *RuntimeConfig) String() string { return proto.CompactTextString(m) } +func (*RuntimeConfig) ProtoMessage() {} +func (*RuntimeConfig) Descriptor() ([]byte, []int) { + return fileDescriptor_resources_6ae4fdb8583f1f59, []int{0} +} +func (m *RuntimeConfig) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_RuntimeConfig.Unmarshal(m, b) +} +func (m *RuntimeConfig) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_RuntimeConfig.Marshal(b, m, deterministic) +} +func (dst *RuntimeConfig) XXX_Merge(src proto.Message) { + xxx_messageInfo_RuntimeConfig.Merge(dst, src) +} +func (m *RuntimeConfig) XXX_Size() int { + return xxx_messageInfo_RuntimeConfig.Size(m) +} +func (m *RuntimeConfig) XXX_DiscardUnknown() { + xxx_messageInfo_RuntimeConfig.DiscardUnknown(m) +} + +var xxx_messageInfo_RuntimeConfig proto.InternalMessageInfo + +func (m *RuntimeConfig) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *RuntimeConfig) GetDescription() string { + if m != nil { + return m.Description + } + return "" +} + +// Describes a single variable within a RuntimeConfig resource. +// The name denotes the hierarchical variable name. For example, +// `ports/serving_port` is a valid variable name. The variable value is an +// opaque string and only leaf variables can have values (that is, variables +// that do not have any child variables). +type Variable struct { + // The name of the variable resource, in the format: + // + // projects/[PROJECT_ID]/configs/[CONFIG_NAME]/variables/[VARIABLE_NAME] + // + // The `[PROJECT_ID]` must be a valid project ID, `[CONFIG_NAME]` must be a + // valid RuntimeConfig reource and `[VARIABLE_NAME]` follows Unix file system + // file path naming. + // + // The `[VARIABLE_NAME]` can contain ASCII letters, numbers, slashes and + // dashes. Slashes are used as path element separators and are not part of the + // `[VARIABLE_NAME]` itself, so `[VARIABLE_NAME]` must contain at least one + // non-slash character. Multiple slashes are coalesced into single slash + // character. Each path segment should follow RFC 1035 segment specification. + // The length of a `[VARIABLE_NAME]` must be less than 256 bytes. + // + // Once you create a variable, you cannot change the variable name. + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // The value of the variable. It can be either a binary or a string + // value. You must specify one of either `value` or `text`. Specifying both + // will cause the server to return an error. + // + // Types that are valid to be assigned to Contents: + // *Variable_Value + // *Variable_Text + Contents isVariable_Contents `protobuf_oneof:"contents"` + // [Output Only] The time of the last variable update. + UpdateTime *timestamp.Timestamp `protobuf:"bytes,3,opt,name=update_time,json=updateTime,proto3" json:"update_time,omitempty"` + // [Ouput only] The current state of the variable. The variable state + // indicates the outcome of the `variables().watch` call and is visible + // through the `get` and `list` calls. + State VariableState `protobuf:"varint,4,opt,name=state,proto3,enum=google.cloud.runtimeconfig.v1beta1.VariableState" json:"state,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Variable) Reset() { *m = Variable{} } +func (m *Variable) String() string { return proto.CompactTextString(m) } +func (*Variable) ProtoMessage() {} +func (*Variable) Descriptor() ([]byte, []int) { + return fileDescriptor_resources_6ae4fdb8583f1f59, []int{1} +} +func (m *Variable) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Variable.Unmarshal(m, b) +} +func (m *Variable) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Variable.Marshal(b, m, deterministic) +} +func (dst *Variable) XXX_Merge(src proto.Message) { + xxx_messageInfo_Variable.Merge(dst, src) +} +func (m *Variable) XXX_Size() int { + return xxx_messageInfo_Variable.Size(m) +} +func (m *Variable) XXX_DiscardUnknown() { + xxx_messageInfo_Variable.DiscardUnknown(m) +} + +var xxx_messageInfo_Variable proto.InternalMessageInfo + +func (m *Variable) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +type isVariable_Contents interface { + isVariable_Contents() +} + +type Variable_Value struct { + Value []byte `protobuf:"bytes,2,opt,name=value,proto3,oneof"` +} + +type Variable_Text struct { + Text string `protobuf:"bytes,5,opt,name=text,proto3,oneof"` +} + +func (*Variable_Value) isVariable_Contents() {} + +func (*Variable_Text) isVariable_Contents() {} + +func (m *Variable) GetContents() isVariable_Contents { + if m != nil { + return m.Contents + } + return nil +} + +func (m *Variable) GetValue() []byte { + if x, ok := m.GetContents().(*Variable_Value); ok { + return x.Value + } + return nil +} + +func (m *Variable) GetText() string { + if x, ok := m.GetContents().(*Variable_Text); ok { + return x.Text + } + return "" +} + +func (m *Variable) GetUpdateTime() *timestamp.Timestamp { + if m != nil { + return m.UpdateTime + } + return nil +} + +func (m *Variable) GetState() VariableState { + if m != nil { + return m.State + } + return VariableState_VARIABLE_STATE_UNSPECIFIED +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*Variable) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _Variable_OneofMarshaler, _Variable_OneofUnmarshaler, _Variable_OneofSizer, []interface{}{ + (*Variable_Value)(nil), + (*Variable_Text)(nil), + } +} + +func _Variable_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*Variable) + // contents + switch x := m.Contents.(type) { + case *Variable_Value: + b.EncodeVarint(2<<3 | proto.WireBytes) + b.EncodeRawBytes(x.Value) + case *Variable_Text: + b.EncodeVarint(5<<3 | proto.WireBytes) + b.EncodeStringBytes(x.Text) + case nil: + default: + return fmt.Errorf("Variable.Contents has unexpected type %T", x) + } + return nil +} + +func _Variable_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*Variable) + switch tag { + case 2: // contents.value + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeRawBytes(true) + m.Contents = &Variable_Value{x} + return true, err + case 5: // contents.text + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeStringBytes() + m.Contents = &Variable_Text{x} + return true, err + default: + return false, nil + } +} + +func _Variable_OneofSizer(msg proto.Message) (n int) { + m := msg.(*Variable) + // contents + switch x := m.Contents.(type) { + case *Variable_Value: + n += 1 // tag and wire + n += proto.SizeVarint(uint64(len(x.Value))) + n += len(x.Value) + case *Variable_Text: + n += 1 // tag and wire + n += proto.SizeVarint(uint64(len(x.Text))) + n += len(x.Text) + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +// The condition that a Waiter resource is waiting for. +type EndCondition struct { + // The condition oneof holds the available condition types for this + // EndCondition. Currently, the only available type is Cardinality. + // + // Types that are valid to be assigned to Condition: + // *EndCondition_Cardinality_ + Condition isEndCondition_Condition `protobuf_oneof:"condition"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *EndCondition) Reset() { *m = EndCondition{} } +func (m *EndCondition) String() string { return proto.CompactTextString(m) } +func (*EndCondition) ProtoMessage() {} +func (*EndCondition) Descriptor() ([]byte, []int) { + return fileDescriptor_resources_6ae4fdb8583f1f59, []int{2} +} +func (m *EndCondition) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_EndCondition.Unmarshal(m, b) +} +func (m *EndCondition) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_EndCondition.Marshal(b, m, deterministic) +} +func (dst *EndCondition) XXX_Merge(src proto.Message) { + xxx_messageInfo_EndCondition.Merge(dst, src) +} +func (m *EndCondition) XXX_Size() int { + return xxx_messageInfo_EndCondition.Size(m) +} +func (m *EndCondition) XXX_DiscardUnknown() { + xxx_messageInfo_EndCondition.DiscardUnknown(m) +} + +var xxx_messageInfo_EndCondition proto.InternalMessageInfo + +type isEndCondition_Condition interface { + isEndCondition_Condition() +} + +type EndCondition_Cardinality_ struct { + Cardinality *EndCondition_Cardinality `protobuf:"bytes,1,opt,name=cardinality,proto3,oneof"` +} + +func (*EndCondition_Cardinality_) isEndCondition_Condition() {} + +func (m *EndCondition) GetCondition() isEndCondition_Condition { + if m != nil { + return m.Condition + } + return nil +} + +func (m *EndCondition) GetCardinality() *EndCondition_Cardinality { + if x, ok := m.GetCondition().(*EndCondition_Cardinality_); ok { + return x.Cardinality + } + return nil +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*EndCondition) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _EndCondition_OneofMarshaler, _EndCondition_OneofUnmarshaler, _EndCondition_OneofSizer, []interface{}{ + (*EndCondition_Cardinality_)(nil), + } +} + +func _EndCondition_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*EndCondition) + // condition + switch x := m.Condition.(type) { + case *EndCondition_Cardinality_: + b.EncodeVarint(1<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.Cardinality); err != nil { + return err + } + case nil: + default: + return fmt.Errorf("EndCondition.Condition has unexpected type %T", x) + } + return nil +} + +func _EndCondition_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*EndCondition) + switch tag { + case 1: // condition.cardinality + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(EndCondition_Cardinality) + err := b.DecodeMessage(msg) + m.Condition = &EndCondition_Cardinality_{msg} + return true, err + default: + return false, nil + } +} + +func _EndCondition_OneofSizer(msg proto.Message) (n int) { + m := msg.(*EndCondition) + // condition + switch x := m.Condition.(type) { + case *EndCondition_Cardinality_: + s := proto.Size(x.Cardinality) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +// A Cardinality condition for the Waiter resource. A cardinality condition is +// met when the number of variables under a specified path prefix reaches a +// predefined number. For example, if you set a Cardinality condition where +// the `path` is set to `/foo` and the number of paths is set to 2, the +// following variables would meet the condition in a RuntimeConfig resource: +// +// + `/foo/variable1 = "value1"` +// + `/foo/variable2 = "value2"` +// + `/bar/variable3 = "value3"` +// +// It would not would not satisify the same condition with the `number` set to +// 3, however, because there is only 2 paths that start with `/foo`. +// Cardinality conditions are recursive; all subtrees under the specific +// path prefix are counted. +type EndCondition_Cardinality struct { + // The root of the variable subtree to monitor. For example, `/foo`. + Path string `protobuf:"bytes,1,opt,name=path,proto3" json:"path,omitempty"` + // The number variables under the `path` that must exist to meet this + // condition. Defaults to 1 if not specified. + Number int32 `protobuf:"varint,2,opt,name=number,proto3" json:"number,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *EndCondition_Cardinality) Reset() { *m = EndCondition_Cardinality{} } +func (m *EndCondition_Cardinality) String() string { return proto.CompactTextString(m) } +func (*EndCondition_Cardinality) ProtoMessage() {} +func (*EndCondition_Cardinality) Descriptor() ([]byte, []int) { + return fileDescriptor_resources_6ae4fdb8583f1f59, []int{2, 0} +} +func (m *EndCondition_Cardinality) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_EndCondition_Cardinality.Unmarshal(m, b) +} +func (m *EndCondition_Cardinality) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_EndCondition_Cardinality.Marshal(b, m, deterministic) +} +func (dst *EndCondition_Cardinality) XXX_Merge(src proto.Message) { + xxx_messageInfo_EndCondition_Cardinality.Merge(dst, src) +} +func (m *EndCondition_Cardinality) XXX_Size() int { + return xxx_messageInfo_EndCondition_Cardinality.Size(m) +} +func (m *EndCondition_Cardinality) XXX_DiscardUnknown() { + xxx_messageInfo_EndCondition_Cardinality.DiscardUnknown(m) +} + +var xxx_messageInfo_EndCondition_Cardinality proto.InternalMessageInfo + +func (m *EndCondition_Cardinality) GetPath() string { + if m != nil { + return m.Path + } + return "" +} + +func (m *EndCondition_Cardinality) GetNumber() int32 { + if m != nil { + return m.Number + } + return 0 +} + +// A Waiter resource waits for some end condition within a RuntimeConfig +// resource to be met before it returns. For example, assume you have a +// distributed system where each node writes to a Variable resource indidicating +// the node's readiness as part of the startup process. +// +// You then configure a Waiter resource with the success condition set to wait +// until some number of nodes have checked in. Afterwards, your application +// runs some arbitrary code after the condition has been met and the waiter +// returns successfully. +// +// Once created, a Waiter resource is immutable. +// +// To learn more about using waiters, read the +// [Creating a +// Waiter](/deployment-manager/runtime-configurator/creating-a-waiter) +// documentation. +type Waiter struct { + // The name of the Waiter resource, in the format: + // + // projects/[PROJECT_ID]/configs/[CONFIG_NAME]/waiters/[WAITER_NAME] + // + // The `[PROJECT_ID]` must be a valid Google Cloud project ID, + // the `[CONFIG_NAME]` must be a valid RuntimeConfig resource, the + // `[WAITER_NAME]` must match RFC 1035 segment specification, and the length + // of `[WAITER_NAME]` must be less than 64 bytes. + // + // After you create a Waiter resource, you cannot change the resource name. + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // [Required] Specifies the timeout of the waiter in seconds, beginning from + // the instant that `waiters().create` method is called. If this time elapses + // before the success or failure conditions are met, the waiter fails and sets + // the `error` code to `DEADLINE_EXCEEDED`. + Timeout *duration.Duration `protobuf:"bytes,2,opt,name=timeout,proto3" json:"timeout,omitempty"` + // [Optional] The failure condition of this waiter. If this condition is met, + // `done` will be set to `true` and the `error` code will be set to `ABORTED`. + // The failure condition takes precedence over the success condition. If both + // conditions are met, a failure will be indicated. This value is optional; if + // no failure condition is set, the only failure scenario will be a timeout. + Failure *EndCondition `protobuf:"bytes,3,opt,name=failure,proto3" json:"failure,omitempty"` + // [Required] The success condition. If this condition is met, `done` will be + // set to `true` and the `error` value will remain unset. The failure + // condition takes precedence over the success condition. If both conditions + // are met, a failure will be indicated. + Success *EndCondition `protobuf:"bytes,4,opt,name=success,proto3" json:"success,omitempty"` + // [Output Only] The instant at which this Waiter resource was created. Adding + // the value of `timeout` to this instant yields the timeout deadline for the + // waiter. + CreateTime *timestamp.Timestamp `protobuf:"bytes,5,opt,name=create_time,json=createTime,proto3" json:"create_time,omitempty"` + // [Output Only] If the value is `false`, it means the waiter is still waiting + // for one of its conditions to be met. + // + // If true, the waiter has finished. If the waiter finished due to a timeout + // or failure, `error` will be set. + Done bool `protobuf:"varint,6,opt,name=done,proto3" json:"done,omitempty"` + // [Output Only] If the waiter ended due to a failure or timeout, this value + // will be set. + Error *status.Status `protobuf:"bytes,7,opt,name=error,proto3" json:"error,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Waiter) Reset() { *m = Waiter{} } +func (m *Waiter) String() string { return proto.CompactTextString(m) } +func (*Waiter) ProtoMessage() {} +func (*Waiter) Descriptor() ([]byte, []int) { + return fileDescriptor_resources_6ae4fdb8583f1f59, []int{3} +} +func (m *Waiter) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Waiter.Unmarshal(m, b) +} +func (m *Waiter) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Waiter.Marshal(b, m, deterministic) +} +func (dst *Waiter) XXX_Merge(src proto.Message) { + xxx_messageInfo_Waiter.Merge(dst, src) +} +func (m *Waiter) XXX_Size() int { + return xxx_messageInfo_Waiter.Size(m) +} +func (m *Waiter) XXX_DiscardUnknown() { + xxx_messageInfo_Waiter.DiscardUnknown(m) +} + +var xxx_messageInfo_Waiter proto.InternalMessageInfo + +func (m *Waiter) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *Waiter) GetTimeout() *duration.Duration { + if m != nil { + return m.Timeout + } + return nil +} + +func (m *Waiter) GetFailure() *EndCondition { + if m != nil { + return m.Failure + } + return nil +} + +func (m *Waiter) GetSuccess() *EndCondition { + if m != nil { + return m.Success + } + return nil +} + +func (m *Waiter) GetCreateTime() *timestamp.Timestamp { + if m != nil { + return m.CreateTime + } + return nil +} + +func (m *Waiter) GetDone() bool { + if m != nil { + return m.Done + } + return false +} + +func (m *Waiter) GetError() *status.Status { + if m != nil { + return m.Error + } + return nil +} + +func init() { + proto.RegisterType((*RuntimeConfig)(nil), "google.cloud.runtimeconfig.v1beta1.RuntimeConfig") + proto.RegisterType((*Variable)(nil), "google.cloud.runtimeconfig.v1beta1.Variable") + proto.RegisterType((*EndCondition)(nil), "google.cloud.runtimeconfig.v1beta1.EndCondition") + proto.RegisterType((*EndCondition_Cardinality)(nil), "google.cloud.runtimeconfig.v1beta1.EndCondition.Cardinality") + proto.RegisterType((*Waiter)(nil), "google.cloud.runtimeconfig.v1beta1.Waiter") + proto.RegisterEnum("google.cloud.runtimeconfig.v1beta1.VariableState", VariableState_name, VariableState_value) +} + +func init() { + proto.RegisterFile("google/cloud/runtimeconfig/v1beta1/resources.proto", fileDescriptor_resources_6ae4fdb8583f1f59) +} + +var fileDescriptor_resources_6ae4fdb8583f1f59 = []byte{ + // 628 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x9c, 0x94, 0xdd, 0x6e, 0xd3, 0x30, + 0x14, 0xc7, 0x9b, 0xd2, 0x8f, 0xed, 0x64, 0x43, 0x93, 0x85, 0x46, 0xa8, 0xd0, 0xa8, 0x7a, 0x81, + 0x2a, 0x2e, 0x12, 0xda, 0x5d, 0xa1, 0x71, 0xd3, 0x8f, 0xb0, 0x15, 0x4d, 0x30, 0xa5, 0x5d, 0x91, + 0xd0, 0xa4, 0xe1, 0x3a, 0x6e, 0x88, 0x94, 0xda, 0x91, 0xe3, 0x4c, 0xf0, 0x4a, 0x3c, 0x01, 0x2f, + 0xc0, 0x0d, 0x0f, 0xc1, 0x15, 0x0f, 0x82, 0xec, 0x38, 0xd0, 0xc2, 0xc4, 0x06, 0x77, 0x3e, 0x3e, + 0xff, 0xf3, 0x3b, 0x1f, 0x3e, 0x09, 0xf4, 0x23, 0xce, 0xa3, 0x84, 0x7a, 0x24, 0xe1, 0x79, 0xe8, + 0x89, 0x9c, 0xc9, 0x78, 0x45, 0x09, 0x67, 0xcb, 0x38, 0xf2, 0xae, 0x7a, 0x0b, 0x2a, 0x71, 0xcf, + 0x13, 0x34, 0xe3, 0xb9, 0x20, 0x34, 0x73, 0x53, 0xc1, 0x25, 0x47, 0x9d, 0x22, 0xc6, 0xd5, 0x31, + 0xee, 0x46, 0x8c, 0x6b, 0x62, 0x5a, 0x0f, 0x0d, 0x17, 0xa7, 0xb1, 0x87, 0x19, 0xe3, 0x12, 0xcb, + 0x98, 0x33, 0x43, 0x68, 0x1d, 0x18, 0xaf, 0xb6, 0x16, 0xf9, 0xd2, 0x0b, 0x73, 0xa1, 0x05, 0xc6, + 0xff, 0xe8, 0x77, 0xbf, 0xca, 0x90, 0x49, 0xbc, 0x4a, 0x8d, 0xe0, 0xbe, 0x11, 0x88, 0x94, 0x78, + 0x99, 0xc4, 0x32, 0x37, 0xe4, 0x8e, 0x0f, 0xbb, 0x41, 0x51, 0xd0, 0x48, 0x17, 0x84, 0x10, 0xd4, + 0x18, 0x5e, 0x51, 0xc7, 0x6a, 0x5b, 0xdd, 0xed, 0x40, 0x9f, 0x51, 0x1b, 0xec, 0x90, 0x66, 0x44, + 0xc4, 0xa9, 0xca, 0xe9, 0x54, 0xb5, 0x6b, 0xfd, 0xaa, 0xf3, 0xcd, 0x82, 0xad, 0x39, 0x16, 0x31, + 0x5e, 0x24, 0xf4, 0x5a, 0xc4, 0x3e, 0xd4, 0xaf, 0x70, 0x92, 0x53, 0x1d, 0xbc, 0x73, 0x52, 0x09, + 0x0a, 0x13, 0xdd, 0x83, 0x9a, 0xa4, 0x1f, 0xa4, 0x53, 0x57, 0xda, 0x93, 0x4a, 0xa0, 0x2d, 0x74, + 0x04, 0x76, 0x9e, 0x86, 0x58, 0xd2, 0x4b, 0x55, 0x99, 0x73, 0xa7, 0x6d, 0x75, 0xed, 0x7e, 0xcb, + 0x35, 0x73, 0x2c, 0xbb, 0x74, 0x67, 0x65, 0x97, 0x01, 0x14, 0x72, 0x75, 0x81, 0x8e, 0xa1, 0xae, + 0x5a, 0xa4, 0x4e, 0xad, 0x6d, 0x75, 0xef, 0xf6, 0x7b, 0xee, 0xcd, 0xe3, 0x77, 0xcb, 0xda, 0xa7, + 0x2a, 0x30, 0x28, 0xe2, 0x87, 0x00, 0x5b, 0x84, 0x33, 0x49, 0x99, 0xcc, 0x3a, 0x9f, 0x2d, 0xd8, + 0xf1, 0x59, 0x38, 0xe2, 0x2c, 0x8c, 0x55, 0xc7, 0xe8, 0x1d, 0xd8, 0x04, 0x8b, 0x30, 0x66, 0x38, + 0x89, 0xe5, 0x47, 0xdd, 0xab, 0xdd, 0x7f, 0x7e, 0x9b, 0x5c, 0xeb, 0x18, 0x77, 0xf4, 0x8b, 0x71, + 0x52, 0x09, 0xd6, 0x91, 0xad, 0x67, 0x60, 0xaf, 0x79, 0xd5, 0x54, 0x53, 0x2c, 0xdf, 0x97, 0x53, + 0x55, 0x67, 0xb4, 0x0f, 0x0d, 0x96, 0xaf, 0x16, 0x54, 0xe8, 0xb1, 0xd6, 0x03, 0x63, 0x0d, 0x6d, + 0xd8, 0x26, 0x65, 0x8a, 0xce, 0xf7, 0x2a, 0x34, 0xde, 0xe0, 0x58, 0x52, 0x71, 0xed, 0xcb, 0x1c, + 0x42, 0x53, 0x15, 0xc9, 0x73, 0xa9, 0x21, 0x76, 0xff, 0xc1, 0x1f, 0x73, 0x1e, 0x9b, 0x6d, 0x0b, + 0x4a, 0x25, 0x7a, 0x09, 0xcd, 0x25, 0x8e, 0x93, 0x5c, 0x94, 0x8f, 0xf3, 0xf4, 0x5f, 0x3b, 0x0f, + 0x4a, 0x80, 0x62, 0x65, 0x39, 0x21, 0x34, 0xcb, 0xf4, 0x8b, 0xfd, 0x17, 0xcb, 0x00, 0xd4, 0xe2, + 0x10, 0x41, 0x7f, 0x2e, 0x4e, 0xfd, 0xe6, 0xc5, 0x29, 0xe4, 0x7a, 0x71, 0x10, 0xd4, 0x42, 0xce, + 0xa8, 0xd3, 0x68, 0x5b, 0xdd, 0xad, 0x40, 0x9f, 0x51, 0x17, 0xea, 0x54, 0x08, 0x2e, 0x9c, 0xa6, + 0x46, 0xa1, 0x12, 0x25, 0x52, 0xe2, 0x4e, 0xf5, 0x87, 0x14, 0x14, 0x82, 0x27, 0x13, 0xd8, 0xdd, + 0xd8, 0x22, 0x74, 0x00, 0xad, 0xf9, 0x20, 0x98, 0x0c, 0x86, 0xa7, 0xfe, 0xe5, 0x74, 0x36, 0x98, + 0xf9, 0x97, 0xe7, 0xaf, 0xa6, 0x67, 0xfe, 0x68, 0xf2, 0x62, 0xe2, 0x8f, 0xf7, 0x2a, 0xc8, 0x86, + 0xe6, 0xf9, 0xd9, 0x78, 0x30, 0xf3, 0xc7, 0x7b, 0x96, 0x32, 0xc6, 0xfe, 0xa9, 0xaf, 0x8c, 0xea, + 0xf0, 0x8b, 0x05, 0x8f, 0x09, 0x5f, 0xdd, 0x62, 0x0c, 0x67, 0xd6, 0xdb, 0xd7, 0x46, 0x15, 0xf1, + 0x04, 0xb3, 0xc8, 0xe5, 0x22, 0xf2, 0x22, 0xca, 0x74, 0xab, 0x5e, 0xe1, 0xc2, 0x69, 0x9c, 0xfd, + 0xed, 0x87, 0x75, 0xb4, 0x71, 0xfb, 0xa9, 0xda, 0x39, 0x2e, 0x88, 0x23, 0x9d, 0x77, 0xe3, 0xf7, + 0xe0, 0xce, 0x7b, 0x43, 0x15, 0xf2, 0xb5, 0x14, 0x5d, 0x68, 0xd1, 0xc5, 0x86, 0xe8, 0x62, 0x5e, + 0x70, 0x17, 0x0d, 0x5d, 0xc5, 0xe1, 0x8f, 0x00, 0x00, 0x00, 0xff, 0xff, 0x1a, 0xc9, 0x60, 0x90, + 0x35, 0x05, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/cloud/runtimeconfig/v1beta1/runtimeconfig.pb.go b/vendor/google.golang.org/genproto/googleapis/cloud/runtimeconfig/v1beta1/runtimeconfig.pb.go new file mode 100644 index 0000000..f7c238c --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/cloud/runtimeconfig/v1beta1/runtimeconfig.pb.go @@ -0,0 +1,1764 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/cloud/runtimeconfig/v1beta1/runtimeconfig.proto + +package runtimeconfig // import "google.golang.org/genproto/googleapis/cloud/runtimeconfig/v1beta1" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import empty "github.com/golang/protobuf/ptypes/empty" +import timestamp "github.com/golang/protobuf/ptypes/timestamp" +import _ "google.golang.org/genproto/googleapis/api/annotations" +import longrunning "google.golang.org/genproto/googleapis/longrunning" + +import ( + context "golang.org/x/net/context" + grpc "google.golang.org/grpc" +) + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Request for the `ListConfigs()` method. +type ListConfigsRequest struct { + // The [project + // ID](https://support.google.com/cloud/answer/6158840?hl=en&ref_topic=6158848) + // for this request, in the format `projects/[PROJECT_ID]`. + Parent string `protobuf:"bytes,1,opt,name=parent,proto3" json:"parent,omitempty"` + // Specifies the number of results to return per page. If there are fewer + // elements than the specified number, returns all elements. + PageSize int32 `protobuf:"varint,2,opt,name=page_size,json=pageSize,proto3" json:"page_size,omitempty"` + // Specifies a page token to use. Set `pageToken` to a `nextPageToken` + // returned by a previous list request to get the next page of results. + PageToken string `protobuf:"bytes,3,opt,name=page_token,json=pageToken,proto3" json:"page_token,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ListConfigsRequest) Reset() { *m = ListConfigsRequest{} } +func (m *ListConfigsRequest) String() string { return proto.CompactTextString(m) } +func (*ListConfigsRequest) ProtoMessage() {} +func (*ListConfigsRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_runtimeconfig_eced5b419d927b84, []int{0} +} +func (m *ListConfigsRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ListConfigsRequest.Unmarshal(m, b) +} +func (m *ListConfigsRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ListConfigsRequest.Marshal(b, m, deterministic) +} +func (dst *ListConfigsRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_ListConfigsRequest.Merge(dst, src) +} +func (m *ListConfigsRequest) XXX_Size() int { + return xxx_messageInfo_ListConfigsRequest.Size(m) +} +func (m *ListConfigsRequest) XXX_DiscardUnknown() { + xxx_messageInfo_ListConfigsRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_ListConfigsRequest proto.InternalMessageInfo + +func (m *ListConfigsRequest) GetParent() string { + if m != nil { + return m.Parent + } + return "" +} + +func (m *ListConfigsRequest) GetPageSize() int32 { + if m != nil { + return m.PageSize + } + return 0 +} + +func (m *ListConfigsRequest) GetPageToken() string { + if m != nil { + return m.PageToken + } + return "" +} + +// `ListConfigs()` returns the following response. The order of returned +// objects is arbitrary; that is, it is not ordered in any particular way. +type ListConfigsResponse struct { + // A list of the configurations in the project. The order of returned + // objects is arbitrary; that is, it is not ordered in any particular way. + Configs []*RuntimeConfig `protobuf:"bytes,1,rep,name=configs,proto3" json:"configs,omitempty"` + // This token allows you to get the next page of results for list requests. + // If the number of results is larger than `pageSize`, use the `nextPageToken` + // as a value for the query parameter `pageToken` in the next list request. + // Subsequent list requests will have their own `nextPageToken` to continue + // paging through the results + NextPageToken string `protobuf:"bytes,2,opt,name=next_page_token,json=nextPageToken,proto3" json:"next_page_token,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ListConfigsResponse) Reset() { *m = ListConfigsResponse{} } +func (m *ListConfigsResponse) String() string { return proto.CompactTextString(m) } +func (*ListConfigsResponse) ProtoMessage() {} +func (*ListConfigsResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_runtimeconfig_eced5b419d927b84, []int{1} +} +func (m *ListConfigsResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ListConfigsResponse.Unmarshal(m, b) +} +func (m *ListConfigsResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ListConfigsResponse.Marshal(b, m, deterministic) +} +func (dst *ListConfigsResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_ListConfigsResponse.Merge(dst, src) +} +func (m *ListConfigsResponse) XXX_Size() int { + return xxx_messageInfo_ListConfigsResponse.Size(m) +} +func (m *ListConfigsResponse) XXX_DiscardUnknown() { + xxx_messageInfo_ListConfigsResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_ListConfigsResponse proto.InternalMessageInfo + +func (m *ListConfigsResponse) GetConfigs() []*RuntimeConfig { + if m != nil { + return m.Configs + } + return nil +} + +func (m *ListConfigsResponse) GetNextPageToken() string { + if m != nil { + return m.NextPageToken + } + return "" +} + +// Gets a RuntimeConfig resource. +type GetConfigRequest struct { + // The name of the RuntimeConfig resource to retrieve, in the format: + // + // `projects/[PROJECT_ID]/configs/[CONFIG_NAME]` + Name string `protobuf:"bytes,2,opt,name=name,proto3" json:"name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GetConfigRequest) Reset() { *m = GetConfigRequest{} } +func (m *GetConfigRequest) String() string { return proto.CompactTextString(m) } +func (*GetConfigRequest) ProtoMessage() {} +func (*GetConfigRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_runtimeconfig_eced5b419d927b84, []int{2} +} +func (m *GetConfigRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GetConfigRequest.Unmarshal(m, b) +} +func (m *GetConfigRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GetConfigRequest.Marshal(b, m, deterministic) +} +func (dst *GetConfigRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_GetConfigRequest.Merge(dst, src) +} +func (m *GetConfigRequest) XXX_Size() int { + return xxx_messageInfo_GetConfigRequest.Size(m) +} +func (m *GetConfigRequest) XXX_DiscardUnknown() { + xxx_messageInfo_GetConfigRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_GetConfigRequest proto.InternalMessageInfo + +func (m *GetConfigRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +// Creates a RuntimeConfig resource. +type CreateConfigRequest struct { + // The [project + // ID](https://support.google.com/cloud/answer/6158840?hl=en&ref_topic=6158848) + // for this request, in the format `projects/[PROJECT_ID]`. + Parent string `protobuf:"bytes,1,opt,name=parent,proto3" json:"parent,omitempty"` + // The RuntimeConfig to create. + Config *RuntimeConfig `protobuf:"bytes,2,opt,name=config,proto3" json:"config,omitempty"` + // An optional but recommended unique `request_id`. If the server + // receives two `create()` requests with the same + // `request_id`, then the second request will be ignored and the + // first resource created and stored in the backend is returned. + // Empty `request_id` fields are ignored. + // + // It is responsibility of the client to ensure uniqueness of the + // `request_id` strings. + // + // `request_id` strings are limited to 64 characters. + RequestId string `protobuf:"bytes,3,opt,name=request_id,json=requestId,proto3" json:"request_id,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *CreateConfigRequest) Reset() { *m = CreateConfigRequest{} } +func (m *CreateConfigRequest) String() string { return proto.CompactTextString(m) } +func (*CreateConfigRequest) ProtoMessage() {} +func (*CreateConfigRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_runtimeconfig_eced5b419d927b84, []int{3} +} +func (m *CreateConfigRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_CreateConfigRequest.Unmarshal(m, b) +} +func (m *CreateConfigRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_CreateConfigRequest.Marshal(b, m, deterministic) +} +func (dst *CreateConfigRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_CreateConfigRequest.Merge(dst, src) +} +func (m *CreateConfigRequest) XXX_Size() int { + return xxx_messageInfo_CreateConfigRequest.Size(m) +} +func (m *CreateConfigRequest) XXX_DiscardUnknown() { + xxx_messageInfo_CreateConfigRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_CreateConfigRequest proto.InternalMessageInfo + +func (m *CreateConfigRequest) GetParent() string { + if m != nil { + return m.Parent + } + return "" +} + +func (m *CreateConfigRequest) GetConfig() *RuntimeConfig { + if m != nil { + return m.Config + } + return nil +} + +func (m *CreateConfigRequest) GetRequestId() string { + if m != nil { + return m.RequestId + } + return "" +} + +// Request message for `UpdateConfig()` method. +type UpdateConfigRequest struct { + // The name of the RuntimeConfig resource to update, in the format: + // + // `projects/[PROJECT_ID]/configs/[CONFIG_NAME]` + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // The config resource to update. + Config *RuntimeConfig `protobuf:"bytes,2,opt,name=config,proto3" json:"config,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *UpdateConfigRequest) Reset() { *m = UpdateConfigRequest{} } +func (m *UpdateConfigRequest) String() string { return proto.CompactTextString(m) } +func (*UpdateConfigRequest) ProtoMessage() {} +func (*UpdateConfigRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_runtimeconfig_eced5b419d927b84, []int{4} +} +func (m *UpdateConfigRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_UpdateConfigRequest.Unmarshal(m, b) +} +func (m *UpdateConfigRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_UpdateConfigRequest.Marshal(b, m, deterministic) +} +func (dst *UpdateConfigRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_UpdateConfigRequest.Merge(dst, src) +} +func (m *UpdateConfigRequest) XXX_Size() int { + return xxx_messageInfo_UpdateConfigRequest.Size(m) +} +func (m *UpdateConfigRequest) XXX_DiscardUnknown() { + xxx_messageInfo_UpdateConfigRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_UpdateConfigRequest proto.InternalMessageInfo + +func (m *UpdateConfigRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *UpdateConfigRequest) GetConfig() *RuntimeConfig { + if m != nil { + return m.Config + } + return nil +} + +// Request for the `DeleteConfig()` method. +type DeleteConfigRequest struct { + // The RuntimeConfig resource to delete, in the format: + // + // `projects/[PROJECT_ID]/configs/[CONFIG_NAME]` + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *DeleteConfigRequest) Reset() { *m = DeleteConfigRequest{} } +func (m *DeleteConfigRequest) String() string { return proto.CompactTextString(m) } +func (*DeleteConfigRequest) ProtoMessage() {} +func (*DeleteConfigRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_runtimeconfig_eced5b419d927b84, []int{5} +} +func (m *DeleteConfigRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_DeleteConfigRequest.Unmarshal(m, b) +} +func (m *DeleteConfigRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_DeleteConfigRequest.Marshal(b, m, deterministic) +} +func (dst *DeleteConfigRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_DeleteConfigRequest.Merge(dst, src) +} +func (m *DeleteConfigRequest) XXX_Size() int { + return xxx_messageInfo_DeleteConfigRequest.Size(m) +} +func (m *DeleteConfigRequest) XXX_DiscardUnknown() { + xxx_messageInfo_DeleteConfigRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_DeleteConfigRequest proto.InternalMessageInfo + +func (m *DeleteConfigRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +// Request for the `ListVariables()` method. +type ListVariablesRequest struct { + // The path to the RuntimeConfig resource for which you want to list + // variables. The configuration must exist beforehand; the path must by in the + // format: + // + // `projects/[PROJECT_ID]/configs/[CONFIG_NAME]` + Parent string `protobuf:"bytes,1,opt,name=parent,proto3" json:"parent,omitempty"` + // Filters variables by matching the specified filter. For example: + // + // `projects/example-project/config/[CONFIG_NAME]/variables/example-variable`. + Filter string `protobuf:"bytes,2,opt,name=filter,proto3" json:"filter,omitempty"` + // Specifies the number of results to return per page. If there are fewer + // elements than the specified number, returns all elements. + PageSize int32 `protobuf:"varint,3,opt,name=page_size,json=pageSize,proto3" json:"page_size,omitempty"` + // Specifies a page token to use. Set `pageToken` to a `nextPageToken` + // returned by a previous list request to get the next page of results. + PageToken string `protobuf:"bytes,4,opt,name=page_token,json=pageToken,proto3" json:"page_token,omitempty"` + // The flag indicates whether the user wants to return values of variables. + // If true, then only those variables that user has IAM GetVariable permission + // will be returned along with their values. + ReturnValues bool `protobuf:"varint,5,opt,name=return_values,json=returnValues,proto3" json:"return_values,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ListVariablesRequest) Reset() { *m = ListVariablesRequest{} } +func (m *ListVariablesRequest) String() string { return proto.CompactTextString(m) } +func (*ListVariablesRequest) ProtoMessage() {} +func (*ListVariablesRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_runtimeconfig_eced5b419d927b84, []int{6} +} +func (m *ListVariablesRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ListVariablesRequest.Unmarshal(m, b) +} +func (m *ListVariablesRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ListVariablesRequest.Marshal(b, m, deterministic) +} +func (dst *ListVariablesRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_ListVariablesRequest.Merge(dst, src) +} +func (m *ListVariablesRequest) XXX_Size() int { + return xxx_messageInfo_ListVariablesRequest.Size(m) +} +func (m *ListVariablesRequest) XXX_DiscardUnknown() { + xxx_messageInfo_ListVariablesRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_ListVariablesRequest proto.InternalMessageInfo + +func (m *ListVariablesRequest) GetParent() string { + if m != nil { + return m.Parent + } + return "" +} + +func (m *ListVariablesRequest) GetFilter() string { + if m != nil { + return m.Filter + } + return "" +} + +func (m *ListVariablesRequest) GetPageSize() int32 { + if m != nil { + return m.PageSize + } + return 0 +} + +func (m *ListVariablesRequest) GetPageToken() string { + if m != nil { + return m.PageToken + } + return "" +} + +func (m *ListVariablesRequest) GetReturnValues() bool { + if m != nil { + return m.ReturnValues + } + return false +} + +// Response for the `ListVariables()` method. +type ListVariablesResponse struct { + // A list of variables and their values. The order of returned variable + // objects is arbitrary. + Variables []*Variable `protobuf:"bytes,1,rep,name=variables,proto3" json:"variables,omitempty"` + // This token allows you to get the next page of results for list requests. + // If the number of results is larger than `pageSize`, use the `nextPageToken` + // as a value for the query parameter `pageToken` in the next list request. + // Subsequent list requests will have their own `nextPageToken` to continue + // paging through the results + NextPageToken string `protobuf:"bytes,2,opt,name=next_page_token,json=nextPageToken,proto3" json:"next_page_token,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ListVariablesResponse) Reset() { *m = ListVariablesResponse{} } +func (m *ListVariablesResponse) String() string { return proto.CompactTextString(m) } +func (*ListVariablesResponse) ProtoMessage() {} +func (*ListVariablesResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_runtimeconfig_eced5b419d927b84, []int{7} +} +func (m *ListVariablesResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ListVariablesResponse.Unmarshal(m, b) +} +func (m *ListVariablesResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ListVariablesResponse.Marshal(b, m, deterministic) +} +func (dst *ListVariablesResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_ListVariablesResponse.Merge(dst, src) +} +func (m *ListVariablesResponse) XXX_Size() int { + return xxx_messageInfo_ListVariablesResponse.Size(m) +} +func (m *ListVariablesResponse) XXX_DiscardUnknown() { + xxx_messageInfo_ListVariablesResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_ListVariablesResponse proto.InternalMessageInfo + +func (m *ListVariablesResponse) GetVariables() []*Variable { + if m != nil { + return m.Variables + } + return nil +} + +func (m *ListVariablesResponse) GetNextPageToken() string { + if m != nil { + return m.NextPageToken + } + return "" +} + +// Request for the `WatchVariable()` method. +type WatchVariableRequest struct { + // The name of the variable to watch, in the format: + // + // `projects/[PROJECT_ID]/configs/[CONFIG_NAME]` + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // If specified, checks the current timestamp of the variable and if the + // current timestamp is newer than `newerThan` timestamp, the method returns + // immediately. + // + // If not specified or the variable has an older timestamp, the watcher waits + // for a the value to change before returning. + NewerThan *timestamp.Timestamp `protobuf:"bytes,4,opt,name=newer_than,json=newerThan,proto3" json:"newer_than,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *WatchVariableRequest) Reset() { *m = WatchVariableRequest{} } +func (m *WatchVariableRequest) String() string { return proto.CompactTextString(m) } +func (*WatchVariableRequest) ProtoMessage() {} +func (*WatchVariableRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_runtimeconfig_eced5b419d927b84, []int{8} +} +func (m *WatchVariableRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_WatchVariableRequest.Unmarshal(m, b) +} +func (m *WatchVariableRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_WatchVariableRequest.Marshal(b, m, deterministic) +} +func (dst *WatchVariableRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_WatchVariableRequest.Merge(dst, src) +} +func (m *WatchVariableRequest) XXX_Size() int { + return xxx_messageInfo_WatchVariableRequest.Size(m) +} +func (m *WatchVariableRequest) XXX_DiscardUnknown() { + xxx_messageInfo_WatchVariableRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_WatchVariableRequest proto.InternalMessageInfo + +func (m *WatchVariableRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *WatchVariableRequest) GetNewerThan() *timestamp.Timestamp { + if m != nil { + return m.NewerThan + } + return nil +} + +// Request for the `GetVariable()` method. +type GetVariableRequest struct { + // The name of the variable to return, in the format: + // + // `projects/[PROJECT_ID]/configs/[CONFIG_NAME]/variables/[VARIBLE_NAME]` + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GetVariableRequest) Reset() { *m = GetVariableRequest{} } +func (m *GetVariableRequest) String() string { return proto.CompactTextString(m) } +func (*GetVariableRequest) ProtoMessage() {} +func (*GetVariableRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_runtimeconfig_eced5b419d927b84, []int{9} +} +func (m *GetVariableRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GetVariableRequest.Unmarshal(m, b) +} +func (m *GetVariableRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GetVariableRequest.Marshal(b, m, deterministic) +} +func (dst *GetVariableRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_GetVariableRequest.Merge(dst, src) +} +func (m *GetVariableRequest) XXX_Size() int { + return xxx_messageInfo_GetVariableRequest.Size(m) +} +func (m *GetVariableRequest) XXX_DiscardUnknown() { + xxx_messageInfo_GetVariableRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_GetVariableRequest proto.InternalMessageInfo + +func (m *GetVariableRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +// Request for the `CreateVariable()` method. +type CreateVariableRequest struct { + // The path to the RutimeConfig resource that this variable should belong to. + // The configuration must exist beforehand; the path must by in the format: + // + // `projects/[PROJECT_ID]/configs/[CONFIG_NAME]` + Parent string `protobuf:"bytes,1,opt,name=parent,proto3" json:"parent,omitempty"` + // The variable to create. + Variable *Variable `protobuf:"bytes,2,opt,name=variable,proto3" json:"variable,omitempty"` + // An optional but recommended unique `request_id`. If the server + // receives two `create()` requests with the same + // `request_id`, then the second request will be ignored and the + // first resource created and stored in the backend is returned. + // Empty `request_id` fields are ignored. + // + // It is responsibility of the client to ensure uniqueness of the + // `request_id` strings. + // + // `request_id` strings are limited to 64 characters. + RequestId string `protobuf:"bytes,3,opt,name=request_id,json=requestId,proto3" json:"request_id,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *CreateVariableRequest) Reset() { *m = CreateVariableRequest{} } +func (m *CreateVariableRequest) String() string { return proto.CompactTextString(m) } +func (*CreateVariableRequest) ProtoMessage() {} +func (*CreateVariableRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_runtimeconfig_eced5b419d927b84, []int{10} +} +func (m *CreateVariableRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_CreateVariableRequest.Unmarshal(m, b) +} +func (m *CreateVariableRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_CreateVariableRequest.Marshal(b, m, deterministic) +} +func (dst *CreateVariableRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_CreateVariableRequest.Merge(dst, src) +} +func (m *CreateVariableRequest) XXX_Size() int { + return xxx_messageInfo_CreateVariableRequest.Size(m) +} +func (m *CreateVariableRequest) XXX_DiscardUnknown() { + xxx_messageInfo_CreateVariableRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_CreateVariableRequest proto.InternalMessageInfo + +func (m *CreateVariableRequest) GetParent() string { + if m != nil { + return m.Parent + } + return "" +} + +func (m *CreateVariableRequest) GetVariable() *Variable { + if m != nil { + return m.Variable + } + return nil +} + +func (m *CreateVariableRequest) GetRequestId() string { + if m != nil { + return m.RequestId + } + return "" +} + +// Request for the `UpdateVariable()` method. +type UpdateVariableRequest struct { + // The name of the variable to update, in the format: + // + // `projects/[PROJECT_ID]/configs/[CONFIG_NAME]/variables/[VARIABLE_NAME]` + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // The variable to update. + Variable *Variable `protobuf:"bytes,2,opt,name=variable,proto3" json:"variable,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *UpdateVariableRequest) Reset() { *m = UpdateVariableRequest{} } +func (m *UpdateVariableRequest) String() string { return proto.CompactTextString(m) } +func (*UpdateVariableRequest) ProtoMessage() {} +func (*UpdateVariableRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_runtimeconfig_eced5b419d927b84, []int{11} +} +func (m *UpdateVariableRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_UpdateVariableRequest.Unmarshal(m, b) +} +func (m *UpdateVariableRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_UpdateVariableRequest.Marshal(b, m, deterministic) +} +func (dst *UpdateVariableRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_UpdateVariableRequest.Merge(dst, src) +} +func (m *UpdateVariableRequest) XXX_Size() int { + return xxx_messageInfo_UpdateVariableRequest.Size(m) +} +func (m *UpdateVariableRequest) XXX_DiscardUnknown() { + xxx_messageInfo_UpdateVariableRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_UpdateVariableRequest proto.InternalMessageInfo + +func (m *UpdateVariableRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *UpdateVariableRequest) GetVariable() *Variable { + if m != nil { + return m.Variable + } + return nil +} + +// Request for the `DeleteVariable()` method. +type DeleteVariableRequest struct { + // The name of the variable to delete, in the format: + // + // `projects/[PROJECT_ID]/configs/[CONFIG_NAME]/variables/[VARIABLE_NAME]` + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // Set to `true` to recursively delete multiple variables with the same + // prefix. + Recursive bool `protobuf:"varint,2,opt,name=recursive,proto3" json:"recursive,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *DeleteVariableRequest) Reset() { *m = DeleteVariableRequest{} } +func (m *DeleteVariableRequest) String() string { return proto.CompactTextString(m) } +func (*DeleteVariableRequest) ProtoMessage() {} +func (*DeleteVariableRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_runtimeconfig_eced5b419d927b84, []int{12} +} +func (m *DeleteVariableRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_DeleteVariableRequest.Unmarshal(m, b) +} +func (m *DeleteVariableRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_DeleteVariableRequest.Marshal(b, m, deterministic) +} +func (dst *DeleteVariableRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_DeleteVariableRequest.Merge(dst, src) +} +func (m *DeleteVariableRequest) XXX_Size() int { + return xxx_messageInfo_DeleteVariableRequest.Size(m) +} +func (m *DeleteVariableRequest) XXX_DiscardUnknown() { + xxx_messageInfo_DeleteVariableRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_DeleteVariableRequest proto.InternalMessageInfo + +func (m *DeleteVariableRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *DeleteVariableRequest) GetRecursive() bool { + if m != nil { + return m.Recursive + } + return false +} + +// Request for the `ListWaiters()` method. +type ListWaitersRequest struct { + // The path to the configuration for which you want to get a list of waiters. + // The configuration must exist beforehand; the path must by in the format: + // + // `projects/[PROJECT_ID]/configs/[CONFIG_NAME]` + Parent string `protobuf:"bytes,1,opt,name=parent,proto3" json:"parent,omitempty"` + // Specifies the number of results to return per page. If there are fewer + // elements than the specified number, returns all elements. + PageSize int32 `protobuf:"varint,2,opt,name=page_size,json=pageSize,proto3" json:"page_size,omitempty"` + // Specifies a page token to use. Set `pageToken` to a `nextPageToken` + // returned by a previous list request to get the next page of results. + PageToken string `protobuf:"bytes,3,opt,name=page_token,json=pageToken,proto3" json:"page_token,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ListWaitersRequest) Reset() { *m = ListWaitersRequest{} } +func (m *ListWaitersRequest) String() string { return proto.CompactTextString(m) } +func (*ListWaitersRequest) ProtoMessage() {} +func (*ListWaitersRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_runtimeconfig_eced5b419d927b84, []int{13} +} +func (m *ListWaitersRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ListWaitersRequest.Unmarshal(m, b) +} +func (m *ListWaitersRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ListWaitersRequest.Marshal(b, m, deterministic) +} +func (dst *ListWaitersRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_ListWaitersRequest.Merge(dst, src) +} +func (m *ListWaitersRequest) XXX_Size() int { + return xxx_messageInfo_ListWaitersRequest.Size(m) +} +func (m *ListWaitersRequest) XXX_DiscardUnknown() { + xxx_messageInfo_ListWaitersRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_ListWaitersRequest proto.InternalMessageInfo + +func (m *ListWaitersRequest) GetParent() string { + if m != nil { + return m.Parent + } + return "" +} + +func (m *ListWaitersRequest) GetPageSize() int32 { + if m != nil { + return m.PageSize + } + return 0 +} + +func (m *ListWaitersRequest) GetPageToken() string { + if m != nil { + return m.PageToken + } + return "" +} + +// Response for the `ListWaiters()` method. +// Order of returned waiter objects is arbitrary. +type ListWaitersResponse struct { + // Found waiters in the project. + Waiters []*Waiter `protobuf:"bytes,1,rep,name=waiters,proto3" json:"waiters,omitempty"` + // This token allows you to get the next page of results for list requests. + // If the number of results is larger than `pageSize`, use the `nextPageToken` + // as a value for the query parameter `pageToken` in the next list request. + // Subsequent list requests will have their own `nextPageToken` to continue + // paging through the results + NextPageToken string `protobuf:"bytes,2,opt,name=next_page_token,json=nextPageToken,proto3" json:"next_page_token,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ListWaitersResponse) Reset() { *m = ListWaitersResponse{} } +func (m *ListWaitersResponse) String() string { return proto.CompactTextString(m) } +func (*ListWaitersResponse) ProtoMessage() {} +func (*ListWaitersResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_runtimeconfig_eced5b419d927b84, []int{14} +} +func (m *ListWaitersResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ListWaitersResponse.Unmarshal(m, b) +} +func (m *ListWaitersResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ListWaitersResponse.Marshal(b, m, deterministic) +} +func (dst *ListWaitersResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_ListWaitersResponse.Merge(dst, src) +} +func (m *ListWaitersResponse) XXX_Size() int { + return xxx_messageInfo_ListWaitersResponse.Size(m) +} +func (m *ListWaitersResponse) XXX_DiscardUnknown() { + xxx_messageInfo_ListWaitersResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_ListWaitersResponse proto.InternalMessageInfo + +func (m *ListWaitersResponse) GetWaiters() []*Waiter { + if m != nil { + return m.Waiters + } + return nil +} + +func (m *ListWaitersResponse) GetNextPageToken() string { + if m != nil { + return m.NextPageToken + } + return "" +} + +// Request for the `GetWaiter()` method. +type GetWaiterRequest struct { + // The fully-qualified name of the Waiter resource object to retrieve, in the + // format: + // + // `projects/[PROJECT_ID]/configs/[CONFIG_NAME]/waiters/[WAITER_NAME]` + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GetWaiterRequest) Reset() { *m = GetWaiterRequest{} } +func (m *GetWaiterRequest) String() string { return proto.CompactTextString(m) } +func (*GetWaiterRequest) ProtoMessage() {} +func (*GetWaiterRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_runtimeconfig_eced5b419d927b84, []int{15} +} +func (m *GetWaiterRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GetWaiterRequest.Unmarshal(m, b) +} +func (m *GetWaiterRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GetWaiterRequest.Marshal(b, m, deterministic) +} +func (dst *GetWaiterRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_GetWaiterRequest.Merge(dst, src) +} +func (m *GetWaiterRequest) XXX_Size() int { + return xxx_messageInfo_GetWaiterRequest.Size(m) +} +func (m *GetWaiterRequest) XXX_DiscardUnknown() { + xxx_messageInfo_GetWaiterRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_GetWaiterRequest proto.InternalMessageInfo + +func (m *GetWaiterRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +// Request message for `CreateWaiter()` method. +type CreateWaiterRequest struct { + // The path to the configuration that will own the waiter. + // The configuration must exist beforehand; the path must by in the format: + // + // `projects/[PROJECT_ID]/configs/[CONFIG_NAME]`. + Parent string `protobuf:"bytes,1,opt,name=parent,proto3" json:"parent,omitempty"` + // The Waiter resource to create. + Waiter *Waiter `protobuf:"bytes,2,opt,name=waiter,proto3" json:"waiter,omitempty"` + // An optional but recommended unique `request_id`. If the server + // receives two `create()` requests with the same + // `request_id`, then the second request will be ignored and the + // first resource created and stored in the backend is returned. + // Empty `request_id` fields are ignored. + // + // It is responsibility of the client to ensure uniqueness of the + // `request_id` strings. + // + // `request_id` strings are limited to 64 characters. + RequestId string `protobuf:"bytes,3,opt,name=request_id,json=requestId,proto3" json:"request_id,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *CreateWaiterRequest) Reset() { *m = CreateWaiterRequest{} } +func (m *CreateWaiterRequest) String() string { return proto.CompactTextString(m) } +func (*CreateWaiterRequest) ProtoMessage() {} +func (*CreateWaiterRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_runtimeconfig_eced5b419d927b84, []int{16} +} +func (m *CreateWaiterRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_CreateWaiterRequest.Unmarshal(m, b) +} +func (m *CreateWaiterRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_CreateWaiterRequest.Marshal(b, m, deterministic) +} +func (dst *CreateWaiterRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_CreateWaiterRequest.Merge(dst, src) +} +func (m *CreateWaiterRequest) XXX_Size() int { + return xxx_messageInfo_CreateWaiterRequest.Size(m) +} +func (m *CreateWaiterRequest) XXX_DiscardUnknown() { + xxx_messageInfo_CreateWaiterRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_CreateWaiterRequest proto.InternalMessageInfo + +func (m *CreateWaiterRequest) GetParent() string { + if m != nil { + return m.Parent + } + return "" +} + +func (m *CreateWaiterRequest) GetWaiter() *Waiter { + if m != nil { + return m.Waiter + } + return nil +} + +func (m *CreateWaiterRequest) GetRequestId() string { + if m != nil { + return m.RequestId + } + return "" +} + +// Request for the `DeleteWaiter()` method. +type DeleteWaiterRequest struct { + // The Waiter resource to delete, in the format: + // + // `projects/[PROJECT_ID]/configs/[CONFIG_NAME]/waiters/[WAITER_NAME]` + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *DeleteWaiterRequest) Reset() { *m = DeleteWaiterRequest{} } +func (m *DeleteWaiterRequest) String() string { return proto.CompactTextString(m) } +func (*DeleteWaiterRequest) ProtoMessage() {} +func (*DeleteWaiterRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_runtimeconfig_eced5b419d927b84, []int{17} +} +func (m *DeleteWaiterRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_DeleteWaiterRequest.Unmarshal(m, b) +} +func (m *DeleteWaiterRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_DeleteWaiterRequest.Marshal(b, m, deterministic) +} +func (dst *DeleteWaiterRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_DeleteWaiterRequest.Merge(dst, src) +} +func (m *DeleteWaiterRequest) XXX_Size() int { + return xxx_messageInfo_DeleteWaiterRequest.Size(m) +} +func (m *DeleteWaiterRequest) XXX_DiscardUnknown() { + xxx_messageInfo_DeleteWaiterRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_DeleteWaiterRequest proto.InternalMessageInfo + +func (m *DeleteWaiterRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func init() { + proto.RegisterType((*ListConfigsRequest)(nil), "google.cloud.runtimeconfig.v1beta1.ListConfigsRequest") + proto.RegisterType((*ListConfigsResponse)(nil), "google.cloud.runtimeconfig.v1beta1.ListConfigsResponse") + proto.RegisterType((*GetConfigRequest)(nil), "google.cloud.runtimeconfig.v1beta1.GetConfigRequest") + proto.RegisterType((*CreateConfigRequest)(nil), "google.cloud.runtimeconfig.v1beta1.CreateConfigRequest") + proto.RegisterType((*UpdateConfigRequest)(nil), "google.cloud.runtimeconfig.v1beta1.UpdateConfigRequest") + proto.RegisterType((*DeleteConfigRequest)(nil), "google.cloud.runtimeconfig.v1beta1.DeleteConfigRequest") + proto.RegisterType((*ListVariablesRequest)(nil), "google.cloud.runtimeconfig.v1beta1.ListVariablesRequest") + proto.RegisterType((*ListVariablesResponse)(nil), "google.cloud.runtimeconfig.v1beta1.ListVariablesResponse") + proto.RegisterType((*WatchVariableRequest)(nil), "google.cloud.runtimeconfig.v1beta1.WatchVariableRequest") + proto.RegisterType((*GetVariableRequest)(nil), "google.cloud.runtimeconfig.v1beta1.GetVariableRequest") + proto.RegisterType((*CreateVariableRequest)(nil), "google.cloud.runtimeconfig.v1beta1.CreateVariableRequest") + proto.RegisterType((*UpdateVariableRequest)(nil), "google.cloud.runtimeconfig.v1beta1.UpdateVariableRequest") + proto.RegisterType((*DeleteVariableRequest)(nil), "google.cloud.runtimeconfig.v1beta1.DeleteVariableRequest") + proto.RegisterType((*ListWaitersRequest)(nil), "google.cloud.runtimeconfig.v1beta1.ListWaitersRequest") + proto.RegisterType((*ListWaitersResponse)(nil), "google.cloud.runtimeconfig.v1beta1.ListWaitersResponse") + proto.RegisterType((*GetWaiterRequest)(nil), "google.cloud.runtimeconfig.v1beta1.GetWaiterRequest") + proto.RegisterType((*CreateWaiterRequest)(nil), "google.cloud.runtimeconfig.v1beta1.CreateWaiterRequest") + proto.RegisterType((*DeleteWaiterRequest)(nil), "google.cloud.runtimeconfig.v1beta1.DeleteWaiterRequest") +} + +// Reference imports to suppress errors if they are not otherwise used. +var _ context.Context +var _ grpc.ClientConn + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the grpc package it is being compiled against. +const _ = grpc.SupportPackageIsVersion4 + +// RuntimeConfigManagerClient is the client API for RuntimeConfigManager service. +// +// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://godoc.org/google.golang.org/grpc#ClientConn.NewStream. +type RuntimeConfigManagerClient interface { + // Lists all the RuntimeConfig resources within project. + ListConfigs(ctx context.Context, in *ListConfigsRequest, opts ...grpc.CallOption) (*ListConfigsResponse, error) + // Gets information about a RuntimeConfig resource. + GetConfig(ctx context.Context, in *GetConfigRequest, opts ...grpc.CallOption) (*RuntimeConfig, error) + // Creates a new RuntimeConfig resource. The configuration name must be + // unique within project. + CreateConfig(ctx context.Context, in *CreateConfigRequest, opts ...grpc.CallOption) (*RuntimeConfig, error) + // Updates a RuntimeConfig resource. The configuration must exist beforehand. + UpdateConfig(ctx context.Context, in *UpdateConfigRequest, opts ...grpc.CallOption) (*RuntimeConfig, error) + // Deletes a RuntimeConfig resource. + DeleteConfig(ctx context.Context, in *DeleteConfigRequest, opts ...grpc.CallOption) (*empty.Empty, error) + // Lists variables within given a configuration, matching any provided + // filters. This only lists variable names, not the values, unless + // `return_values` is true, in which case only variables that user has IAM + // permission to GetVariable will be returned. + ListVariables(ctx context.Context, in *ListVariablesRequest, opts ...grpc.CallOption) (*ListVariablesResponse, error) + // Gets information about a single variable. + GetVariable(ctx context.Context, in *GetVariableRequest, opts ...grpc.CallOption) (*Variable, error) + // Watches a specific variable and waits for a change in the variable's value. + // When there is a change, this method returns the new value or times out. + // + // If a variable is deleted while being watched, the `variableState` state is + // set to `DELETED` and the method returns the last known variable `value`. + // + // If you set the deadline for watching to a larger value than internal + // timeout (60 seconds), the current variable value is returned and the + // `variableState` will be `VARIABLE_STATE_UNSPECIFIED`. + // + // To learn more about creating a watcher, read the + // [Watching a Variable for + // Changes](/deployment-manager/runtime-configurator/watching-a-variable) + // documentation. + WatchVariable(ctx context.Context, in *WatchVariableRequest, opts ...grpc.CallOption) (*Variable, error) + // Creates a variable within the given configuration. You cannot create + // a variable with a name that is a prefix of an existing variable name, or a + // name that has an existing variable name as a prefix. + // + // To learn more about creating a variable, read the + // [Setting and Getting + // Data](/deployment-manager/runtime-configurator/set-and-get-variables) + // documentation. + CreateVariable(ctx context.Context, in *CreateVariableRequest, opts ...grpc.CallOption) (*Variable, error) + // Updates an existing variable with a new value. + UpdateVariable(ctx context.Context, in *UpdateVariableRequest, opts ...grpc.CallOption) (*Variable, error) + // Deletes a variable or multiple variables. + // + // If you specify a variable name, then that variable is deleted. If you + // specify a prefix and `recursive` is true, then all variables with that + // prefix are deleted. You must set a `recursive` to true if you delete + // variables by prefix. + DeleteVariable(ctx context.Context, in *DeleteVariableRequest, opts ...grpc.CallOption) (*empty.Empty, error) + // List waiters within the given configuration. + ListWaiters(ctx context.Context, in *ListWaitersRequest, opts ...grpc.CallOption) (*ListWaitersResponse, error) + // Gets information about a single waiter. + GetWaiter(ctx context.Context, in *GetWaiterRequest, opts ...grpc.CallOption) (*Waiter, error) + // Creates a Waiter resource. This operation returns a long-running Operation + // resource which can be polled for completion. However, a waiter with the + // given name will exist (and can be retrieved) prior to the operation + // completing. If the operation fails, the failed Waiter resource will + // still exist and must be deleted prior to subsequent creation attempts. + CreateWaiter(ctx context.Context, in *CreateWaiterRequest, opts ...grpc.CallOption) (*longrunning.Operation, error) + // Deletes the waiter with the specified name. + DeleteWaiter(ctx context.Context, in *DeleteWaiterRequest, opts ...grpc.CallOption) (*empty.Empty, error) +} + +type runtimeConfigManagerClient struct { + cc *grpc.ClientConn +} + +func NewRuntimeConfigManagerClient(cc *grpc.ClientConn) RuntimeConfigManagerClient { + return &runtimeConfigManagerClient{cc} +} + +func (c *runtimeConfigManagerClient) ListConfigs(ctx context.Context, in *ListConfigsRequest, opts ...grpc.CallOption) (*ListConfigsResponse, error) { + out := new(ListConfigsResponse) + err := c.cc.Invoke(ctx, "/google.cloud.runtimeconfig.v1beta1.RuntimeConfigManager/ListConfigs", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *runtimeConfigManagerClient) GetConfig(ctx context.Context, in *GetConfigRequest, opts ...grpc.CallOption) (*RuntimeConfig, error) { + out := new(RuntimeConfig) + err := c.cc.Invoke(ctx, "/google.cloud.runtimeconfig.v1beta1.RuntimeConfigManager/GetConfig", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *runtimeConfigManagerClient) CreateConfig(ctx context.Context, in *CreateConfigRequest, opts ...grpc.CallOption) (*RuntimeConfig, error) { + out := new(RuntimeConfig) + err := c.cc.Invoke(ctx, "/google.cloud.runtimeconfig.v1beta1.RuntimeConfigManager/CreateConfig", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *runtimeConfigManagerClient) UpdateConfig(ctx context.Context, in *UpdateConfigRequest, opts ...grpc.CallOption) (*RuntimeConfig, error) { + out := new(RuntimeConfig) + err := c.cc.Invoke(ctx, "/google.cloud.runtimeconfig.v1beta1.RuntimeConfigManager/UpdateConfig", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *runtimeConfigManagerClient) DeleteConfig(ctx context.Context, in *DeleteConfigRequest, opts ...grpc.CallOption) (*empty.Empty, error) { + out := new(empty.Empty) + err := c.cc.Invoke(ctx, "/google.cloud.runtimeconfig.v1beta1.RuntimeConfigManager/DeleteConfig", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *runtimeConfigManagerClient) ListVariables(ctx context.Context, in *ListVariablesRequest, opts ...grpc.CallOption) (*ListVariablesResponse, error) { + out := new(ListVariablesResponse) + err := c.cc.Invoke(ctx, "/google.cloud.runtimeconfig.v1beta1.RuntimeConfigManager/ListVariables", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *runtimeConfigManagerClient) GetVariable(ctx context.Context, in *GetVariableRequest, opts ...grpc.CallOption) (*Variable, error) { + out := new(Variable) + err := c.cc.Invoke(ctx, "/google.cloud.runtimeconfig.v1beta1.RuntimeConfigManager/GetVariable", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *runtimeConfigManagerClient) WatchVariable(ctx context.Context, in *WatchVariableRequest, opts ...grpc.CallOption) (*Variable, error) { + out := new(Variable) + err := c.cc.Invoke(ctx, "/google.cloud.runtimeconfig.v1beta1.RuntimeConfigManager/WatchVariable", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *runtimeConfigManagerClient) CreateVariable(ctx context.Context, in *CreateVariableRequest, opts ...grpc.CallOption) (*Variable, error) { + out := new(Variable) + err := c.cc.Invoke(ctx, "/google.cloud.runtimeconfig.v1beta1.RuntimeConfigManager/CreateVariable", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *runtimeConfigManagerClient) UpdateVariable(ctx context.Context, in *UpdateVariableRequest, opts ...grpc.CallOption) (*Variable, error) { + out := new(Variable) + err := c.cc.Invoke(ctx, "/google.cloud.runtimeconfig.v1beta1.RuntimeConfigManager/UpdateVariable", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *runtimeConfigManagerClient) DeleteVariable(ctx context.Context, in *DeleteVariableRequest, opts ...grpc.CallOption) (*empty.Empty, error) { + out := new(empty.Empty) + err := c.cc.Invoke(ctx, "/google.cloud.runtimeconfig.v1beta1.RuntimeConfigManager/DeleteVariable", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *runtimeConfigManagerClient) ListWaiters(ctx context.Context, in *ListWaitersRequest, opts ...grpc.CallOption) (*ListWaitersResponse, error) { + out := new(ListWaitersResponse) + err := c.cc.Invoke(ctx, "/google.cloud.runtimeconfig.v1beta1.RuntimeConfigManager/ListWaiters", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *runtimeConfigManagerClient) GetWaiter(ctx context.Context, in *GetWaiterRequest, opts ...grpc.CallOption) (*Waiter, error) { + out := new(Waiter) + err := c.cc.Invoke(ctx, "/google.cloud.runtimeconfig.v1beta1.RuntimeConfigManager/GetWaiter", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *runtimeConfigManagerClient) CreateWaiter(ctx context.Context, in *CreateWaiterRequest, opts ...grpc.CallOption) (*longrunning.Operation, error) { + out := new(longrunning.Operation) + err := c.cc.Invoke(ctx, "/google.cloud.runtimeconfig.v1beta1.RuntimeConfigManager/CreateWaiter", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *runtimeConfigManagerClient) DeleteWaiter(ctx context.Context, in *DeleteWaiterRequest, opts ...grpc.CallOption) (*empty.Empty, error) { + out := new(empty.Empty) + err := c.cc.Invoke(ctx, "/google.cloud.runtimeconfig.v1beta1.RuntimeConfigManager/DeleteWaiter", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +// RuntimeConfigManagerServer is the server API for RuntimeConfigManager service. +type RuntimeConfigManagerServer interface { + // Lists all the RuntimeConfig resources within project. + ListConfigs(context.Context, *ListConfigsRequest) (*ListConfigsResponse, error) + // Gets information about a RuntimeConfig resource. + GetConfig(context.Context, *GetConfigRequest) (*RuntimeConfig, error) + // Creates a new RuntimeConfig resource. The configuration name must be + // unique within project. + CreateConfig(context.Context, *CreateConfigRequest) (*RuntimeConfig, error) + // Updates a RuntimeConfig resource. The configuration must exist beforehand. + UpdateConfig(context.Context, *UpdateConfigRequest) (*RuntimeConfig, error) + // Deletes a RuntimeConfig resource. + DeleteConfig(context.Context, *DeleteConfigRequest) (*empty.Empty, error) + // Lists variables within given a configuration, matching any provided + // filters. This only lists variable names, not the values, unless + // `return_values` is true, in which case only variables that user has IAM + // permission to GetVariable will be returned. + ListVariables(context.Context, *ListVariablesRequest) (*ListVariablesResponse, error) + // Gets information about a single variable. + GetVariable(context.Context, *GetVariableRequest) (*Variable, error) + // Watches a specific variable and waits for a change in the variable's value. + // When there is a change, this method returns the new value or times out. + // + // If a variable is deleted while being watched, the `variableState` state is + // set to `DELETED` and the method returns the last known variable `value`. + // + // If you set the deadline for watching to a larger value than internal + // timeout (60 seconds), the current variable value is returned and the + // `variableState` will be `VARIABLE_STATE_UNSPECIFIED`. + // + // To learn more about creating a watcher, read the + // [Watching a Variable for + // Changes](/deployment-manager/runtime-configurator/watching-a-variable) + // documentation. + WatchVariable(context.Context, *WatchVariableRequest) (*Variable, error) + // Creates a variable within the given configuration. You cannot create + // a variable with a name that is a prefix of an existing variable name, or a + // name that has an existing variable name as a prefix. + // + // To learn more about creating a variable, read the + // [Setting and Getting + // Data](/deployment-manager/runtime-configurator/set-and-get-variables) + // documentation. + CreateVariable(context.Context, *CreateVariableRequest) (*Variable, error) + // Updates an existing variable with a new value. + UpdateVariable(context.Context, *UpdateVariableRequest) (*Variable, error) + // Deletes a variable or multiple variables. + // + // If you specify a variable name, then that variable is deleted. If you + // specify a prefix and `recursive` is true, then all variables with that + // prefix are deleted. You must set a `recursive` to true if you delete + // variables by prefix. + DeleteVariable(context.Context, *DeleteVariableRequest) (*empty.Empty, error) + // List waiters within the given configuration. + ListWaiters(context.Context, *ListWaitersRequest) (*ListWaitersResponse, error) + // Gets information about a single waiter. + GetWaiter(context.Context, *GetWaiterRequest) (*Waiter, error) + // Creates a Waiter resource. This operation returns a long-running Operation + // resource which can be polled for completion. However, a waiter with the + // given name will exist (and can be retrieved) prior to the operation + // completing. If the operation fails, the failed Waiter resource will + // still exist and must be deleted prior to subsequent creation attempts. + CreateWaiter(context.Context, *CreateWaiterRequest) (*longrunning.Operation, error) + // Deletes the waiter with the specified name. + DeleteWaiter(context.Context, *DeleteWaiterRequest) (*empty.Empty, error) +} + +func RegisterRuntimeConfigManagerServer(s *grpc.Server, srv RuntimeConfigManagerServer) { + s.RegisterService(&_RuntimeConfigManager_serviceDesc, srv) +} + +func _RuntimeConfigManager_ListConfigs_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(ListConfigsRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(RuntimeConfigManagerServer).ListConfigs(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.runtimeconfig.v1beta1.RuntimeConfigManager/ListConfigs", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(RuntimeConfigManagerServer).ListConfigs(ctx, req.(*ListConfigsRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _RuntimeConfigManager_GetConfig_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(GetConfigRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(RuntimeConfigManagerServer).GetConfig(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.runtimeconfig.v1beta1.RuntimeConfigManager/GetConfig", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(RuntimeConfigManagerServer).GetConfig(ctx, req.(*GetConfigRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _RuntimeConfigManager_CreateConfig_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(CreateConfigRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(RuntimeConfigManagerServer).CreateConfig(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.runtimeconfig.v1beta1.RuntimeConfigManager/CreateConfig", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(RuntimeConfigManagerServer).CreateConfig(ctx, req.(*CreateConfigRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _RuntimeConfigManager_UpdateConfig_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(UpdateConfigRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(RuntimeConfigManagerServer).UpdateConfig(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.runtimeconfig.v1beta1.RuntimeConfigManager/UpdateConfig", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(RuntimeConfigManagerServer).UpdateConfig(ctx, req.(*UpdateConfigRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _RuntimeConfigManager_DeleteConfig_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(DeleteConfigRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(RuntimeConfigManagerServer).DeleteConfig(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.runtimeconfig.v1beta1.RuntimeConfigManager/DeleteConfig", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(RuntimeConfigManagerServer).DeleteConfig(ctx, req.(*DeleteConfigRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _RuntimeConfigManager_ListVariables_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(ListVariablesRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(RuntimeConfigManagerServer).ListVariables(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.runtimeconfig.v1beta1.RuntimeConfigManager/ListVariables", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(RuntimeConfigManagerServer).ListVariables(ctx, req.(*ListVariablesRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _RuntimeConfigManager_GetVariable_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(GetVariableRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(RuntimeConfigManagerServer).GetVariable(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.runtimeconfig.v1beta1.RuntimeConfigManager/GetVariable", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(RuntimeConfigManagerServer).GetVariable(ctx, req.(*GetVariableRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _RuntimeConfigManager_WatchVariable_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(WatchVariableRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(RuntimeConfigManagerServer).WatchVariable(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.runtimeconfig.v1beta1.RuntimeConfigManager/WatchVariable", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(RuntimeConfigManagerServer).WatchVariable(ctx, req.(*WatchVariableRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _RuntimeConfigManager_CreateVariable_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(CreateVariableRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(RuntimeConfigManagerServer).CreateVariable(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.runtimeconfig.v1beta1.RuntimeConfigManager/CreateVariable", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(RuntimeConfigManagerServer).CreateVariable(ctx, req.(*CreateVariableRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _RuntimeConfigManager_UpdateVariable_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(UpdateVariableRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(RuntimeConfigManagerServer).UpdateVariable(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.runtimeconfig.v1beta1.RuntimeConfigManager/UpdateVariable", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(RuntimeConfigManagerServer).UpdateVariable(ctx, req.(*UpdateVariableRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _RuntimeConfigManager_DeleteVariable_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(DeleteVariableRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(RuntimeConfigManagerServer).DeleteVariable(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.runtimeconfig.v1beta1.RuntimeConfigManager/DeleteVariable", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(RuntimeConfigManagerServer).DeleteVariable(ctx, req.(*DeleteVariableRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _RuntimeConfigManager_ListWaiters_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(ListWaitersRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(RuntimeConfigManagerServer).ListWaiters(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.runtimeconfig.v1beta1.RuntimeConfigManager/ListWaiters", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(RuntimeConfigManagerServer).ListWaiters(ctx, req.(*ListWaitersRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _RuntimeConfigManager_GetWaiter_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(GetWaiterRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(RuntimeConfigManagerServer).GetWaiter(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.runtimeconfig.v1beta1.RuntimeConfigManager/GetWaiter", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(RuntimeConfigManagerServer).GetWaiter(ctx, req.(*GetWaiterRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _RuntimeConfigManager_CreateWaiter_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(CreateWaiterRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(RuntimeConfigManagerServer).CreateWaiter(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.runtimeconfig.v1beta1.RuntimeConfigManager/CreateWaiter", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(RuntimeConfigManagerServer).CreateWaiter(ctx, req.(*CreateWaiterRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _RuntimeConfigManager_DeleteWaiter_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(DeleteWaiterRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(RuntimeConfigManagerServer).DeleteWaiter(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.runtimeconfig.v1beta1.RuntimeConfigManager/DeleteWaiter", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(RuntimeConfigManagerServer).DeleteWaiter(ctx, req.(*DeleteWaiterRequest)) + } + return interceptor(ctx, in, info, handler) +} + +var _RuntimeConfigManager_serviceDesc = grpc.ServiceDesc{ + ServiceName: "google.cloud.runtimeconfig.v1beta1.RuntimeConfigManager", + HandlerType: (*RuntimeConfigManagerServer)(nil), + Methods: []grpc.MethodDesc{ + { + MethodName: "ListConfigs", + Handler: _RuntimeConfigManager_ListConfigs_Handler, + }, + { + MethodName: "GetConfig", + Handler: _RuntimeConfigManager_GetConfig_Handler, + }, + { + MethodName: "CreateConfig", + Handler: _RuntimeConfigManager_CreateConfig_Handler, + }, + { + MethodName: "UpdateConfig", + Handler: _RuntimeConfigManager_UpdateConfig_Handler, + }, + { + MethodName: "DeleteConfig", + Handler: _RuntimeConfigManager_DeleteConfig_Handler, + }, + { + MethodName: "ListVariables", + Handler: _RuntimeConfigManager_ListVariables_Handler, + }, + { + MethodName: "GetVariable", + Handler: _RuntimeConfigManager_GetVariable_Handler, + }, + { + MethodName: "WatchVariable", + Handler: _RuntimeConfigManager_WatchVariable_Handler, + }, + { + MethodName: "CreateVariable", + Handler: _RuntimeConfigManager_CreateVariable_Handler, + }, + { + MethodName: "UpdateVariable", + Handler: _RuntimeConfigManager_UpdateVariable_Handler, + }, + { + MethodName: "DeleteVariable", + Handler: _RuntimeConfigManager_DeleteVariable_Handler, + }, + { + MethodName: "ListWaiters", + Handler: _RuntimeConfigManager_ListWaiters_Handler, + }, + { + MethodName: "GetWaiter", + Handler: _RuntimeConfigManager_GetWaiter_Handler, + }, + { + MethodName: "CreateWaiter", + Handler: _RuntimeConfigManager_CreateWaiter_Handler, + }, + { + MethodName: "DeleteWaiter", + Handler: _RuntimeConfigManager_DeleteWaiter_Handler, + }, + }, + Streams: []grpc.StreamDesc{}, + Metadata: "google/cloud/runtimeconfig/v1beta1/runtimeconfig.proto", +} + +func init() { + proto.RegisterFile("google/cloud/runtimeconfig/v1beta1/runtimeconfig.proto", fileDescriptor_runtimeconfig_eced5b419d927b84) +} + +var fileDescriptor_runtimeconfig_eced5b419d927b84 = []byte{ + // 1158 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xb4, 0x98, 0x4b, 0x6f, 0xdd, 0x44, + 0x14, 0xc7, 0x35, 0x49, 0x9b, 0xe6, 0x9e, 0x3c, 0x40, 0x93, 0x87, 0x22, 0xb7, 0x15, 0x91, 0x8b, + 0xa2, 0x70, 0x55, 0xd9, 0x4d, 0x5a, 0xa5, 0x49, 0xa0, 0x2c, 0x92, 0xa2, 0x10, 0x1e, 0x6a, 0x65, + 0x42, 0x2a, 0xa1, 0x48, 0xd1, 0xe4, 0x66, 0xe2, 0x18, 0x6e, 0xc6, 0xc6, 0x1e, 0x27, 0x50, 0x94, + 0x0d, 0xec, 0x40, 0x48, 0x48, 0x2c, 0xca, 0x8a, 0x05, 0x12, 0x20, 0x21, 0x84, 0x58, 0xb1, 0x41, + 0x74, 0xc7, 0x86, 0x2d, 0x12, 0x9f, 0x80, 0x0f, 0x82, 0x3c, 0x0f, 0x5f, 0xfb, 0xe6, 0x3e, 0xc6, + 0x21, 0xdd, 0x25, 0xe3, 0xf3, 0xf8, 0xcd, 0x99, 0x33, 0xf3, 0x3f, 0xba, 0xb0, 0xe4, 0x87, 0xa1, + 0xdf, 0xa4, 0x6e, 0xa3, 0x19, 0xa6, 0xfb, 0x6e, 0x9c, 0x32, 0x1e, 0x1c, 0xd1, 0x46, 0xc8, 0x0e, + 0x02, 0xdf, 0x3d, 0x5e, 0xd8, 0xa3, 0x9c, 0x2c, 0x94, 0x57, 0x9d, 0x28, 0x0e, 0x79, 0x88, 0x6d, + 0xe9, 0xe7, 0x08, 0x3f, 0xa7, 0x6c, 0xa1, 0xfc, 0xac, 0x6b, 0x2a, 0x36, 0x89, 0x02, 0x97, 0x30, + 0x16, 0x72, 0xc2, 0x83, 0x90, 0x25, 0x32, 0x82, 0xb5, 0x68, 0x92, 0x99, 0x26, 0x61, 0x1a, 0x37, + 0xa8, 0xf6, 0xb9, 0xa1, 0x7c, 0x9a, 0x21, 0xf3, 0xe3, 0x94, 0xb1, 0x80, 0xf9, 0x6e, 0x18, 0xd1, + 0xb8, 0x14, 0xf8, 0xaa, 0x32, 0x12, 0xff, 0xed, 0xa5, 0x07, 0x2e, 0x3d, 0x8a, 0xf8, 0xc7, 0xea, + 0xe3, 0x0b, 0xed, 0x1f, 0xb3, 0xac, 0x09, 0x27, 0x47, 0x91, 0x34, 0xb0, 0x0f, 0x01, 0xbf, 0x15, + 0x24, 0x7c, 0x5d, 0x80, 0x24, 0x1e, 0xfd, 0x30, 0xa5, 0x09, 0xc7, 0xd3, 0x30, 0x14, 0x91, 0x98, + 0x32, 0x3e, 0x83, 0x66, 0xd1, 0x7c, 0xcd, 0x53, 0xff, 0xe1, 0xab, 0x50, 0x8b, 0x88, 0x4f, 0x77, + 0x93, 0xe0, 0x31, 0x9d, 0x19, 0x98, 0x45, 0xf3, 0x97, 0xbd, 0xe1, 0x6c, 0xe1, 0x9d, 0xe0, 0x31, + 0xc5, 0xd7, 0x01, 0xc4, 0x47, 0x1e, 0x7e, 0x40, 0xd9, 0xcc, 0xa0, 0x70, 0x14, 0xe6, 0x5b, 0xd9, + 0x82, 0xfd, 0x39, 0x82, 0x89, 0x52, 0xaa, 0x24, 0x0a, 0x59, 0x42, 0xf1, 0x9b, 0x70, 0x45, 0x96, + 0x21, 0x99, 0x41, 0xb3, 0x83, 0xf3, 0x23, 0x8b, 0x0b, 0x4e, 0xff, 0x62, 0x3b, 0x9e, 0x5c, 0x95, + 0xc1, 0x3c, 0x1d, 0x01, 0xcf, 0xc1, 0x73, 0x8c, 0x7e, 0xc4, 0x77, 0x0b, 0x20, 0x03, 0x02, 0x64, + 0x2c, 0x5b, 0x7e, 0x98, 0xc3, 0xcc, 0xc1, 0xf3, 0x1b, 0x54, 0xa1, 0xe8, 0x4d, 0x63, 0xb8, 0xc4, + 0xc8, 0x11, 0x55, 0x0e, 0xe2, 0x6f, 0xfb, 0x09, 0x82, 0x89, 0xf5, 0x98, 0x12, 0x4e, 0xcb, 0xb6, + 0xdd, 0x0a, 0xb4, 0x09, 0x43, 0x12, 0x45, 0x44, 0x39, 0xd7, 0x5e, 0x54, 0x80, 0xac, 0x9c, 0xb1, + 0xcc, 0xb6, 0x1b, 0xec, 0xeb, 0x72, 0xaa, 0x95, 0xcd, 0x7d, 0x9b, 0xc3, 0xc4, 0xbb, 0xd1, 0xfe, + 0x19, 0x30, 0xbd, 0x09, 0xd4, 0xda, 0xc4, 0x05, 0x42, 0xd9, 0x2f, 0xc1, 0xc4, 0x7d, 0xda, 0xa4, + 0x06, 0x59, 0xed, 0x1f, 0x11, 0x4c, 0x66, 0xe7, 0xbd, 0x4d, 0xe2, 0x80, 0xec, 0x35, 0x69, 0xdf, + 0xe6, 0x9a, 0x86, 0xa1, 0x83, 0xa0, 0xc9, 0x69, 0xac, 0x4e, 0x40, 0xfd, 0x57, 0x6e, 0xba, 0xc1, + 0x9e, 0x4d, 0x77, 0xa9, 0xad, 0xe9, 0xf0, 0x0d, 0x18, 0x8b, 0x29, 0x4f, 0x63, 0xb6, 0x7b, 0x4c, + 0x9a, 0x29, 0x4d, 0x66, 0x2e, 0xcf, 0xa2, 0xf9, 0x61, 0x6f, 0x54, 0x2e, 0x6e, 0x8b, 0x35, 0xfb, + 0x0b, 0x04, 0x53, 0x6d, 0xa4, 0xaa, 0x37, 0xdf, 0x80, 0xda, 0xb1, 0x5e, 0x54, 0xdd, 0x79, 0xd3, + 0xa4, 0x78, 0x3a, 0x92, 0xd7, 0x72, 0x37, 0x6e, 0x4d, 0x0a, 0x93, 0x8f, 0x08, 0x6f, 0x1c, 0xe6, + 0x31, 0x7a, 0x9c, 0xec, 0x0a, 0x00, 0xa3, 0x27, 0x34, 0xde, 0xe5, 0x87, 0x44, 0xee, 0x7e, 0x64, + 0xd1, 0xd2, 0x80, 0xfa, 0xce, 0x3b, 0x5b, 0xfa, 0xce, 0x7b, 0x35, 0x61, 0xbd, 0x75, 0x48, 0x98, + 0x3d, 0x0f, 0x78, 0x83, 0x72, 0x83, 0x24, 0xf6, 0x37, 0x08, 0xa6, 0xe4, 0x1d, 0x68, 0xb7, 0xee, + 0x76, 0x92, 0xaf, 0xc3, 0xb0, 0xde, 0xb7, 0x6a, 0xb9, 0x6a, 0x55, 0xcb, 0xbd, 0xfb, 0x5d, 0x82, + 0x14, 0xa6, 0xe4, 0x25, 0x30, 0x29, 0xd6, 0x85, 0x51, 0xd9, 0x9b, 0x30, 0x25, 0x6f, 0x81, 0x49, + 0xda, 0x6b, 0x50, 0x8b, 0x69, 0x23, 0x8d, 0x93, 0xe0, 0x58, 0xe6, 0x1d, 0xf6, 0x5a, 0x0b, 0xfa, + 0xfd, 0x7d, 0x44, 0x02, 0x4e, 0xe3, 0x67, 0xfa, 0xfe, 0x7e, 0xa6, 0xde, 0xdf, 0x3c, 0x95, 0xea, + 0xf1, 0xfb, 0x70, 0xe5, 0x44, 0x2e, 0xa9, 0x0e, 0xaf, 0x9b, 0x54, 0x45, 0x46, 0xf1, 0xb4, 0x6b, + 0xc5, 0x87, 0x57, 0x79, 0xf7, 0x68, 0xba, 0xaf, 0xf2, 0x87, 0xb7, 0x6c, 0xdb, 0xad, 0x32, 0x6b, + 0x30, 0x24, 0x51, 0xd4, 0xd1, 0x56, 0xd9, 0x84, 0xf2, 0xec, 0xd7, 0x6c, 0xf9, 0xdb, 0xd7, 0x97, + 0x7e, 0xf1, 0x9f, 0x49, 0x98, 0x2c, 0x3d, 0xa0, 0x6f, 0x13, 0x46, 0x7c, 0x1a, 0xe3, 0x9f, 0x11, + 0x8c, 0x14, 0x44, 0x10, 0x2f, 0x99, 0x60, 0x9e, 0x15, 0x68, 0xeb, 0x6e, 0x65, 0x3f, 0x79, 0xda, + 0xf6, 0xcd, 0x4f, 0xff, 0xfe, 0xf7, 0xeb, 0x81, 0x39, 0xfc, 0x62, 0x3e, 0x74, 0x7c, 0x22, 0x2b, + 0x78, 0x2f, 0x8a, 0xc3, 0xf7, 0x69, 0x83, 0x27, 0x6e, 0xfd, 0xd4, 0xd5, 0x72, 0xfa, 0x1d, 0x82, + 0x5a, 0xae, 0x93, 0xf8, 0x8e, 0x49, 0xd2, 0x76, 0x59, 0xb5, 0xaa, 0xab, 0x4d, 0x27, 0xc8, 0xac, + 0xac, 0x05, 0x44, 0x4d, 0xe8, 0xd6, 0x4f, 0xf1, 0xaf, 0x08, 0x46, 0x8b, 0x1a, 0x8d, 0x8d, 0x8a, + 0xd3, 0x41, 0xd5, 0xcf, 0x83, 0x7a, 0x47, 0xa0, 0x3a, 0xb6, 0x51, 0x3d, 0x57, 0xb5, 0xb6, 0x67, + 0xc8, 0x45, 0xf5, 0x36, 0x43, 0xee, 0xa0, 0xf7, 0xff, 0x03, 0xd9, 0x32, 0xaa, 0x6e, 0x8e, 0xfc, + 0x25, 0x82, 0xd1, 0xa2, 0xf4, 0x9b, 0x21, 0x77, 0x18, 0x16, 0xac, 0xe9, 0x33, 0x02, 0xf5, 0x5a, + 0x36, 0xb1, 0xea, 0x53, 0xaf, 0x9b, 0x9d, 0xfa, 0x53, 0x04, 0x63, 0x25, 0xd1, 0xc6, 0xcb, 0xa6, + 0x77, 0xa2, 0x7d, 0x22, 0xb1, 0x56, 0xce, 0xe1, 0xa9, 0xee, 0xd3, 0xb2, 0x80, 0x5e, 0xc4, 0xb7, + 0x7a, 0x9c, 0x7f, 0x01, 0xdb, 0x6d, 0xcd, 0x03, 0xbf, 0x20, 0x18, 0x29, 0x28, 0xb0, 0xd9, 0x53, + 0x70, 0x56, 0xb2, 0xad, 0x4a, 0x22, 0x66, 0xaf, 0x08, 0xde, 0xdb, 0x78, 0xc1, 0xa0, 0xc8, 0x2d, + 0x58, 0xb7, 0x5e, 0x3f, 0xc5, 0xbf, 0x23, 0x18, 0x2b, 0x4d, 0x26, 0x66, 0x15, 0xef, 0x34, 0xcc, + 0x54, 0x84, 0x5e, 0x13, 0xd0, 0xaf, 0xd8, 0x77, 0x2b, 0x43, 0xaf, 0x9e, 0x64, 0xd9, 0x57, 0x51, + 0x1d, 0xff, 0x81, 0x60, 0xbc, 0x3c, 0xc5, 0xe0, 0x15, 0xf3, 0x77, 0xe2, 0x62, 0xf8, 0x2b, 0x37, + 0xc9, 0x6a, 0x6b, 0x12, 0x7a, 0x8a, 0x60, 0xbc, 0x3c, 0xeb, 0x98, 0xf1, 0x77, 0x9c, 0x8f, 0x2a, + 0xf2, 0xaf, 0x0b, 0xfe, 0x7b, 0x56, 0xf5, 0xa6, 0x29, 0x6c, 0xe0, 0x5b, 0x04, 0xe3, 0xe5, 0xa9, + 0xc9, 0x6c, 0x03, 0x1d, 0x27, 0xad, 0xae, 0x8f, 0x88, 0xea, 0xef, 0xfa, 0x39, 0xfa, 0xfb, 0x37, + 0xa5, 0xcd, 0x6a, 0x40, 0x32, 0xd7, 0xe6, 0xf2, 0xf0, 0x66, 0xae, 0xcd, 0x6d, 0x93, 0x98, 0xbd, + 0x24, 0xd8, 0x6f, 0x61, 0xc7, 0xb0, 0x4d, 0xf4, 0xec, 0xf5, 0xbd, 0x54, 0x69, 0x19, 0xce, 0x58, + 0xa5, 0x4b, 0x53, 0x8c, 0x55, 0x61, 0x5e, 0xea, 0xc4, 0xd9, 0xbd, 0xc6, 0x0a, 0x32, 0x7b, 0xb2, + 0x7f, 0xc8, 0x85, 0x5a, 0xa1, 0x56, 0x10, 0xea, 0x32, 0xed, 0x75, 0xed, 0x58, 0xf8, 0x65, 0xc4, + 0x79, 0xa0, 0x7f, 0x19, 0xb1, 0x5f, 0x15, 0x80, 0xcb, 0x76, 0xc5, 0x42, 0xae, 0xea, 0x41, 0xf0, + 0x49, 0xae, 0x75, 0x55, 0x40, 0x3b, 0x0c, 0x87, 0x5d, 0xdb, 0x54, 0x95, 0xb0, 0x5e, 0xb1, 0x84, + 0x6b, 0x7f, 0x22, 0x98, 0x6b, 0x84, 0x47, 0x06, 0x38, 0x0f, 0xd1, 0x7b, 0x0f, 0x94, 0x95, 0x1f, + 0x36, 0x09, 0xf3, 0x9d, 0x30, 0xf6, 0x5d, 0x9f, 0x32, 0x41, 0xe2, 0xca, 0x4f, 0x24, 0x0a, 0x92, + 0x5e, 0xbf, 0x48, 0xbd, 0x5c, 0x5a, 0xfd, 0x69, 0xc0, 0xde, 0x90, 0x11, 0xd7, 0x45, 0xde, 0xd2, + 0x58, 0xe1, 0x6c, 0x2f, 0xac, 0x65, 0x2e, 0x7f, 0x69, 0xa3, 0x1d, 0x61, 0xb4, 0x53, 0x32, 0xda, + 0xd9, 0x96, 0x71, 0xf7, 0x86, 0x04, 0xc5, 0xed, 0xff, 0x02, 0x00, 0x00, 0xff, 0xff, 0x9f, 0x36, + 0x17, 0x5a, 0x90, 0x13, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/cloud/scheduler/v1/cloudscheduler.pb.go b/vendor/google.golang.org/genproto/googleapis/cloud/scheduler/v1/cloudscheduler.pb.go new file mode 100644 index 0000000..81b7f87 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/cloud/scheduler/v1/cloudscheduler.pb.go @@ -0,0 +1,911 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/cloud/scheduler/v1/cloudscheduler.proto + +package scheduler // import "google.golang.org/genproto/googleapis/cloud/scheduler/v1" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import empty "github.com/golang/protobuf/ptypes/empty" +import _ "google.golang.org/genproto/googleapis/api/annotations" +import field_mask "google.golang.org/genproto/protobuf/field_mask" + +import ( + context "golang.org/x/net/context" + grpc "google.golang.org/grpc" +) + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Request message for listing jobs using [ListJobs][google.cloud.scheduler.v1.CloudScheduler.ListJobs]. +type ListJobsRequest struct { + // Required. + // + // The location name. For example: + // `projects/PROJECT_ID/locations/LOCATION_ID`. + Parent string `protobuf:"bytes,1,opt,name=parent,proto3" json:"parent,omitempty"` + // Requested page size. + // + // The maximum page size is 500. If unspecified, the page size will + // be the maximum. Fewer jobs than requested might be returned, + // even if more jobs exist; use next_page_token to determine if more + // jobs exist. + PageSize int32 `protobuf:"varint,5,opt,name=page_size,json=pageSize,proto3" json:"page_size,omitempty"` + // A token identifying a page of results the server will return. To + // request the first page results, page_token must be empty. To + // request the next page of results, page_token must be the value of + // [next_page_token][google.cloud.scheduler.v1.ListJobsResponse.next_page_token] returned from + // the previous call to [ListJobs][google.cloud.scheduler.v1.CloudScheduler.ListJobs]. It is an error to + // switch the value of [filter][google.cloud.scheduler.v1.ListJobsRequest.filter] or + // [order_by][google.cloud.scheduler.v1.ListJobsRequest.order_by] while iterating through pages. + PageToken string `protobuf:"bytes,6,opt,name=page_token,json=pageToken,proto3" json:"page_token,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ListJobsRequest) Reset() { *m = ListJobsRequest{} } +func (m *ListJobsRequest) String() string { return proto.CompactTextString(m) } +func (*ListJobsRequest) ProtoMessage() {} +func (*ListJobsRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_cloudscheduler_0488ab591dd1e45d, []int{0} +} +func (m *ListJobsRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ListJobsRequest.Unmarshal(m, b) +} +func (m *ListJobsRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ListJobsRequest.Marshal(b, m, deterministic) +} +func (dst *ListJobsRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_ListJobsRequest.Merge(dst, src) +} +func (m *ListJobsRequest) XXX_Size() int { + return xxx_messageInfo_ListJobsRequest.Size(m) +} +func (m *ListJobsRequest) XXX_DiscardUnknown() { + xxx_messageInfo_ListJobsRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_ListJobsRequest proto.InternalMessageInfo + +func (m *ListJobsRequest) GetParent() string { + if m != nil { + return m.Parent + } + return "" +} + +func (m *ListJobsRequest) GetPageSize() int32 { + if m != nil { + return m.PageSize + } + return 0 +} + +func (m *ListJobsRequest) GetPageToken() string { + if m != nil { + return m.PageToken + } + return "" +} + +// Response message for listing jobs using [ListJobs][google.cloud.scheduler.v1.CloudScheduler.ListJobs]. +type ListJobsResponse struct { + // The list of jobs. + Jobs []*Job `protobuf:"bytes,1,rep,name=jobs,proto3" json:"jobs,omitempty"` + // A token to retrieve next page of results. Pass this value in the + // [page_token][google.cloud.scheduler.v1.ListJobsRequest.page_token] field in the subsequent call to + // [ListJobs][google.cloud.scheduler.v1.CloudScheduler.ListJobs] to retrieve the next page of results. + // If this is empty it indicates that there are no more results + // through which to paginate. + // + // The page token is valid for only 2 hours. + NextPageToken string `protobuf:"bytes,2,opt,name=next_page_token,json=nextPageToken,proto3" json:"next_page_token,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ListJobsResponse) Reset() { *m = ListJobsResponse{} } +func (m *ListJobsResponse) String() string { return proto.CompactTextString(m) } +func (*ListJobsResponse) ProtoMessage() {} +func (*ListJobsResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_cloudscheduler_0488ab591dd1e45d, []int{1} +} +func (m *ListJobsResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ListJobsResponse.Unmarshal(m, b) +} +func (m *ListJobsResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ListJobsResponse.Marshal(b, m, deterministic) +} +func (dst *ListJobsResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_ListJobsResponse.Merge(dst, src) +} +func (m *ListJobsResponse) XXX_Size() int { + return xxx_messageInfo_ListJobsResponse.Size(m) +} +func (m *ListJobsResponse) XXX_DiscardUnknown() { + xxx_messageInfo_ListJobsResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_ListJobsResponse proto.InternalMessageInfo + +func (m *ListJobsResponse) GetJobs() []*Job { + if m != nil { + return m.Jobs + } + return nil +} + +func (m *ListJobsResponse) GetNextPageToken() string { + if m != nil { + return m.NextPageToken + } + return "" +} + +// Request message for [GetJob][google.cloud.scheduler.v1.CloudScheduler.GetJob]. +type GetJobRequest struct { + // Required. + // + // The job name. For example: + // `projects/PROJECT_ID/locations/LOCATION_ID/jobs/JOB_ID`. + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GetJobRequest) Reset() { *m = GetJobRequest{} } +func (m *GetJobRequest) String() string { return proto.CompactTextString(m) } +func (*GetJobRequest) ProtoMessage() {} +func (*GetJobRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_cloudscheduler_0488ab591dd1e45d, []int{2} +} +func (m *GetJobRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GetJobRequest.Unmarshal(m, b) +} +func (m *GetJobRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GetJobRequest.Marshal(b, m, deterministic) +} +func (dst *GetJobRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_GetJobRequest.Merge(dst, src) +} +func (m *GetJobRequest) XXX_Size() int { + return xxx_messageInfo_GetJobRequest.Size(m) +} +func (m *GetJobRequest) XXX_DiscardUnknown() { + xxx_messageInfo_GetJobRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_GetJobRequest proto.InternalMessageInfo + +func (m *GetJobRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +// Request message for [CreateJob][google.cloud.scheduler.v1.CloudScheduler.CreateJob]. +type CreateJobRequest struct { + // Required. + // + // The location name. For example: + // `projects/PROJECT_ID/locations/LOCATION_ID`. + Parent string `protobuf:"bytes,1,opt,name=parent,proto3" json:"parent,omitempty"` + // Required. + // + // The job to add. The user can optionally specify a name for the + // job in [name][google.cloud.scheduler.v1.Job.name]. [name][google.cloud.scheduler.v1.Job.name] cannot be the same as an + // existing job. If a name is not specified then the system will + // generate a random unique name that will be returned + // ([name][google.cloud.scheduler.v1.Job.name]) in the response. + Job *Job `protobuf:"bytes,2,opt,name=job,proto3" json:"job,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *CreateJobRequest) Reset() { *m = CreateJobRequest{} } +func (m *CreateJobRequest) String() string { return proto.CompactTextString(m) } +func (*CreateJobRequest) ProtoMessage() {} +func (*CreateJobRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_cloudscheduler_0488ab591dd1e45d, []int{3} +} +func (m *CreateJobRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_CreateJobRequest.Unmarshal(m, b) +} +func (m *CreateJobRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_CreateJobRequest.Marshal(b, m, deterministic) +} +func (dst *CreateJobRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_CreateJobRequest.Merge(dst, src) +} +func (m *CreateJobRequest) XXX_Size() int { + return xxx_messageInfo_CreateJobRequest.Size(m) +} +func (m *CreateJobRequest) XXX_DiscardUnknown() { + xxx_messageInfo_CreateJobRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_CreateJobRequest proto.InternalMessageInfo + +func (m *CreateJobRequest) GetParent() string { + if m != nil { + return m.Parent + } + return "" +} + +func (m *CreateJobRequest) GetJob() *Job { + if m != nil { + return m.Job + } + return nil +} + +// Request message for [UpdateJob][google.cloud.scheduler.v1.CloudScheduler.UpdateJob]. +type UpdateJobRequest struct { + // Required. + // + // The new job properties. [name][google.cloud.scheduler.v1.Job.name] must be specified. + // + // Output only fields cannot be modified using UpdateJob. + // Any value specified for an output only field will be ignored. + Job *Job `protobuf:"bytes,1,opt,name=job,proto3" json:"job,omitempty"` + // A mask used to specify which fields of the job are being updated. + UpdateMask *field_mask.FieldMask `protobuf:"bytes,2,opt,name=update_mask,json=updateMask,proto3" json:"update_mask,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *UpdateJobRequest) Reset() { *m = UpdateJobRequest{} } +func (m *UpdateJobRequest) String() string { return proto.CompactTextString(m) } +func (*UpdateJobRequest) ProtoMessage() {} +func (*UpdateJobRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_cloudscheduler_0488ab591dd1e45d, []int{4} +} +func (m *UpdateJobRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_UpdateJobRequest.Unmarshal(m, b) +} +func (m *UpdateJobRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_UpdateJobRequest.Marshal(b, m, deterministic) +} +func (dst *UpdateJobRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_UpdateJobRequest.Merge(dst, src) +} +func (m *UpdateJobRequest) XXX_Size() int { + return xxx_messageInfo_UpdateJobRequest.Size(m) +} +func (m *UpdateJobRequest) XXX_DiscardUnknown() { + xxx_messageInfo_UpdateJobRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_UpdateJobRequest proto.InternalMessageInfo + +func (m *UpdateJobRequest) GetJob() *Job { + if m != nil { + return m.Job + } + return nil +} + +func (m *UpdateJobRequest) GetUpdateMask() *field_mask.FieldMask { + if m != nil { + return m.UpdateMask + } + return nil +} + +// Request message for deleting a job using +// [DeleteJob][google.cloud.scheduler.v1.CloudScheduler.DeleteJob]. +type DeleteJobRequest struct { + // Required. + // + // The job name. For example: + // `projects/PROJECT_ID/locations/LOCATION_ID/jobs/JOB_ID`. + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *DeleteJobRequest) Reset() { *m = DeleteJobRequest{} } +func (m *DeleteJobRequest) String() string { return proto.CompactTextString(m) } +func (*DeleteJobRequest) ProtoMessage() {} +func (*DeleteJobRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_cloudscheduler_0488ab591dd1e45d, []int{5} +} +func (m *DeleteJobRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_DeleteJobRequest.Unmarshal(m, b) +} +func (m *DeleteJobRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_DeleteJobRequest.Marshal(b, m, deterministic) +} +func (dst *DeleteJobRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_DeleteJobRequest.Merge(dst, src) +} +func (m *DeleteJobRequest) XXX_Size() int { + return xxx_messageInfo_DeleteJobRequest.Size(m) +} +func (m *DeleteJobRequest) XXX_DiscardUnknown() { + xxx_messageInfo_DeleteJobRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_DeleteJobRequest proto.InternalMessageInfo + +func (m *DeleteJobRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +// Request message for [PauseJob][google.cloud.scheduler.v1.CloudScheduler.PauseJob]. +type PauseJobRequest struct { + // Required. + // + // The job name. For example: + // `projects/PROJECT_ID/locations/LOCATION_ID/jobs/JOB_ID`. + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *PauseJobRequest) Reset() { *m = PauseJobRequest{} } +func (m *PauseJobRequest) String() string { return proto.CompactTextString(m) } +func (*PauseJobRequest) ProtoMessage() {} +func (*PauseJobRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_cloudscheduler_0488ab591dd1e45d, []int{6} +} +func (m *PauseJobRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_PauseJobRequest.Unmarshal(m, b) +} +func (m *PauseJobRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_PauseJobRequest.Marshal(b, m, deterministic) +} +func (dst *PauseJobRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_PauseJobRequest.Merge(dst, src) +} +func (m *PauseJobRequest) XXX_Size() int { + return xxx_messageInfo_PauseJobRequest.Size(m) +} +func (m *PauseJobRequest) XXX_DiscardUnknown() { + xxx_messageInfo_PauseJobRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_PauseJobRequest proto.InternalMessageInfo + +func (m *PauseJobRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +// Request message for [ResumeJob][google.cloud.scheduler.v1.CloudScheduler.ResumeJob]. +type ResumeJobRequest struct { + // Required. + // + // The job name. For example: + // `projects/PROJECT_ID/locations/LOCATION_ID/jobs/JOB_ID`. + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ResumeJobRequest) Reset() { *m = ResumeJobRequest{} } +func (m *ResumeJobRequest) String() string { return proto.CompactTextString(m) } +func (*ResumeJobRequest) ProtoMessage() {} +func (*ResumeJobRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_cloudscheduler_0488ab591dd1e45d, []int{7} +} +func (m *ResumeJobRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ResumeJobRequest.Unmarshal(m, b) +} +func (m *ResumeJobRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ResumeJobRequest.Marshal(b, m, deterministic) +} +func (dst *ResumeJobRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_ResumeJobRequest.Merge(dst, src) +} +func (m *ResumeJobRequest) XXX_Size() int { + return xxx_messageInfo_ResumeJobRequest.Size(m) +} +func (m *ResumeJobRequest) XXX_DiscardUnknown() { + xxx_messageInfo_ResumeJobRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_ResumeJobRequest proto.InternalMessageInfo + +func (m *ResumeJobRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +// Request message for forcing a job to run now using +// [RunJob][google.cloud.scheduler.v1.CloudScheduler.RunJob]. +type RunJobRequest struct { + // Required. + // + // The job name. For example: + // `projects/PROJECT_ID/locations/LOCATION_ID/jobs/JOB_ID`. + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *RunJobRequest) Reset() { *m = RunJobRequest{} } +func (m *RunJobRequest) String() string { return proto.CompactTextString(m) } +func (*RunJobRequest) ProtoMessage() {} +func (*RunJobRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_cloudscheduler_0488ab591dd1e45d, []int{8} +} +func (m *RunJobRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_RunJobRequest.Unmarshal(m, b) +} +func (m *RunJobRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_RunJobRequest.Marshal(b, m, deterministic) +} +func (dst *RunJobRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_RunJobRequest.Merge(dst, src) +} +func (m *RunJobRequest) XXX_Size() int { + return xxx_messageInfo_RunJobRequest.Size(m) +} +func (m *RunJobRequest) XXX_DiscardUnknown() { + xxx_messageInfo_RunJobRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_RunJobRequest proto.InternalMessageInfo + +func (m *RunJobRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func init() { + proto.RegisterType((*ListJobsRequest)(nil), "google.cloud.scheduler.v1.ListJobsRequest") + proto.RegisterType((*ListJobsResponse)(nil), "google.cloud.scheduler.v1.ListJobsResponse") + proto.RegisterType((*GetJobRequest)(nil), "google.cloud.scheduler.v1.GetJobRequest") + proto.RegisterType((*CreateJobRequest)(nil), "google.cloud.scheduler.v1.CreateJobRequest") + proto.RegisterType((*UpdateJobRequest)(nil), "google.cloud.scheduler.v1.UpdateJobRequest") + proto.RegisterType((*DeleteJobRequest)(nil), "google.cloud.scheduler.v1.DeleteJobRequest") + proto.RegisterType((*PauseJobRequest)(nil), "google.cloud.scheduler.v1.PauseJobRequest") + proto.RegisterType((*ResumeJobRequest)(nil), "google.cloud.scheduler.v1.ResumeJobRequest") + proto.RegisterType((*RunJobRequest)(nil), "google.cloud.scheduler.v1.RunJobRequest") +} + +// Reference imports to suppress errors if they are not otherwise used. +var _ context.Context +var _ grpc.ClientConn + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the grpc package it is being compiled against. +const _ = grpc.SupportPackageIsVersion4 + +// CloudSchedulerClient is the client API for CloudScheduler service. +// +// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://godoc.org/google.golang.org/grpc#ClientConn.NewStream. +type CloudSchedulerClient interface { + // Lists jobs. + ListJobs(ctx context.Context, in *ListJobsRequest, opts ...grpc.CallOption) (*ListJobsResponse, error) + // Gets a job. + GetJob(ctx context.Context, in *GetJobRequest, opts ...grpc.CallOption) (*Job, error) + // Creates a job. + CreateJob(ctx context.Context, in *CreateJobRequest, opts ...grpc.CallOption) (*Job, error) + // Updates a job. + // + // If successful, the updated [Job][google.cloud.scheduler.v1.Job] is returned. If the job does + // not exist, `NOT_FOUND` is returned. + // + // If UpdateJob does not successfully return, it is possible for the + // job to be in an [Job.State.UPDATE_FAILED][google.cloud.scheduler.v1.Job.State.UPDATE_FAILED] state. A job in this state may + // not be executed. If this happens, retry the UpdateJob request + // until a successful response is received. + UpdateJob(ctx context.Context, in *UpdateJobRequest, opts ...grpc.CallOption) (*Job, error) + // Deletes a job. + DeleteJob(ctx context.Context, in *DeleteJobRequest, opts ...grpc.CallOption) (*empty.Empty, error) + // Pauses a job. + // + // If a job is paused then the system will stop executing the job + // until it is re-enabled via [ResumeJob][google.cloud.scheduler.v1.CloudScheduler.ResumeJob]. The + // state of the job is stored in [state][google.cloud.scheduler.v1.Job.state]; if paused it + // will be set to [Job.State.PAUSED][google.cloud.scheduler.v1.Job.State.PAUSED]. A job must be in [Job.State.ENABLED][google.cloud.scheduler.v1.Job.State.ENABLED] + // to be paused. + PauseJob(ctx context.Context, in *PauseJobRequest, opts ...grpc.CallOption) (*Job, error) + // Resume a job. + // + // This method reenables a job after it has been [Job.State.PAUSED][google.cloud.scheduler.v1.Job.State.PAUSED]. The + // state of a job is stored in [Job.state][google.cloud.scheduler.v1.Job.state]; after calling this method it + // will be set to [Job.State.ENABLED][google.cloud.scheduler.v1.Job.State.ENABLED]. A job must be in + // [Job.State.PAUSED][google.cloud.scheduler.v1.Job.State.PAUSED] to be resumed. + ResumeJob(ctx context.Context, in *ResumeJobRequest, opts ...grpc.CallOption) (*Job, error) + // Forces a job to run now. + // + // When this method is called, Cloud Scheduler will dispatch the job, even + // if the job is already running. + RunJob(ctx context.Context, in *RunJobRequest, opts ...grpc.CallOption) (*Job, error) +} + +type cloudSchedulerClient struct { + cc *grpc.ClientConn +} + +func NewCloudSchedulerClient(cc *grpc.ClientConn) CloudSchedulerClient { + return &cloudSchedulerClient{cc} +} + +func (c *cloudSchedulerClient) ListJobs(ctx context.Context, in *ListJobsRequest, opts ...grpc.CallOption) (*ListJobsResponse, error) { + out := new(ListJobsResponse) + err := c.cc.Invoke(ctx, "/google.cloud.scheduler.v1.CloudScheduler/ListJobs", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *cloudSchedulerClient) GetJob(ctx context.Context, in *GetJobRequest, opts ...grpc.CallOption) (*Job, error) { + out := new(Job) + err := c.cc.Invoke(ctx, "/google.cloud.scheduler.v1.CloudScheduler/GetJob", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *cloudSchedulerClient) CreateJob(ctx context.Context, in *CreateJobRequest, opts ...grpc.CallOption) (*Job, error) { + out := new(Job) + err := c.cc.Invoke(ctx, "/google.cloud.scheduler.v1.CloudScheduler/CreateJob", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *cloudSchedulerClient) UpdateJob(ctx context.Context, in *UpdateJobRequest, opts ...grpc.CallOption) (*Job, error) { + out := new(Job) + err := c.cc.Invoke(ctx, "/google.cloud.scheduler.v1.CloudScheduler/UpdateJob", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *cloudSchedulerClient) DeleteJob(ctx context.Context, in *DeleteJobRequest, opts ...grpc.CallOption) (*empty.Empty, error) { + out := new(empty.Empty) + err := c.cc.Invoke(ctx, "/google.cloud.scheduler.v1.CloudScheduler/DeleteJob", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *cloudSchedulerClient) PauseJob(ctx context.Context, in *PauseJobRequest, opts ...grpc.CallOption) (*Job, error) { + out := new(Job) + err := c.cc.Invoke(ctx, "/google.cloud.scheduler.v1.CloudScheduler/PauseJob", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *cloudSchedulerClient) ResumeJob(ctx context.Context, in *ResumeJobRequest, opts ...grpc.CallOption) (*Job, error) { + out := new(Job) + err := c.cc.Invoke(ctx, "/google.cloud.scheduler.v1.CloudScheduler/ResumeJob", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *cloudSchedulerClient) RunJob(ctx context.Context, in *RunJobRequest, opts ...grpc.CallOption) (*Job, error) { + out := new(Job) + err := c.cc.Invoke(ctx, "/google.cloud.scheduler.v1.CloudScheduler/RunJob", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +// CloudSchedulerServer is the server API for CloudScheduler service. +type CloudSchedulerServer interface { + // Lists jobs. + ListJobs(context.Context, *ListJobsRequest) (*ListJobsResponse, error) + // Gets a job. + GetJob(context.Context, *GetJobRequest) (*Job, error) + // Creates a job. + CreateJob(context.Context, *CreateJobRequest) (*Job, error) + // Updates a job. + // + // If successful, the updated [Job][google.cloud.scheduler.v1.Job] is returned. If the job does + // not exist, `NOT_FOUND` is returned. + // + // If UpdateJob does not successfully return, it is possible for the + // job to be in an [Job.State.UPDATE_FAILED][google.cloud.scheduler.v1.Job.State.UPDATE_FAILED] state. A job in this state may + // not be executed. If this happens, retry the UpdateJob request + // until a successful response is received. + UpdateJob(context.Context, *UpdateJobRequest) (*Job, error) + // Deletes a job. + DeleteJob(context.Context, *DeleteJobRequest) (*empty.Empty, error) + // Pauses a job. + // + // If a job is paused then the system will stop executing the job + // until it is re-enabled via [ResumeJob][google.cloud.scheduler.v1.CloudScheduler.ResumeJob]. The + // state of the job is stored in [state][google.cloud.scheduler.v1.Job.state]; if paused it + // will be set to [Job.State.PAUSED][google.cloud.scheduler.v1.Job.State.PAUSED]. A job must be in [Job.State.ENABLED][google.cloud.scheduler.v1.Job.State.ENABLED] + // to be paused. + PauseJob(context.Context, *PauseJobRequest) (*Job, error) + // Resume a job. + // + // This method reenables a job after it has been [Job.State.PAUSED][google.cloud.scheduler.v1.Job.State.PAUSED]. The + // state of a job is stored in [Job.state][google.cloud.scheduler.v1.Job.state]; after calling this method it + // will be set to [Job.State.ENABLED][google.cloud.scheduler.v1.Job.State.ENABLED]. A job must be in + // [Job.State.PAUSED][google.cloud.scheduler.v1.Job.State.PAUSED] to be resumed. + ResumeJob(context.Context, *ResumeJobRequest) (*Job, error) + // Forces a job to run now. + // + // When this method is called, Cloud Scheduler will dispatch the job, even + // if the job is already running. + RunJob(context.Context, *RunJobRequest) (*Job, error) +} + +func RegisterCloudSchedulerServer(s *grpc.Server, srv CloudSchedulerServer) { + s.RegisterService(&_CloudScheduler_serviceDesc, srv) +} + +func _CloudScheduler_ListJobs_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(ListJobsRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(CloudSchedulerServer).ListJobs(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.scheduler.v1.CloudScheduler/ListJobs", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(CloudSchedulerServer).ListJobs(ctx, req.(*ListJobsRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _CloudScheduler_GetJob_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(GetJobRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(CloudSchedulerServer).GetJob(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.scheduler.v1.CloudScheduler/GetJob", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(CloudSchedulerServer).GetJob(ctx, req.(*GetJobRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _CloudScheduler_CreateJob_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(CreateJobRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(CloudSchedulerServer).CreateJob(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.scheduler.v1.CloudScheduler/CreateJob", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(CloudSchedulerServer).CreateJob(ctx, req.(*CreateJobRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _CloudScheduler_UpdateJob_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(UpdateJobRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(CloudSchedulerServer).UpdateJob(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.scheduler.v1.CloudScheduler/UpdateJob", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(CloudSchedulerServer).UpdateJob(ctx, req.(*UpdateJobRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _CloudScheduler_DeleteJob_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(DeleteJobRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(CloudSchedulerServer).DeleteJob(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.scheduler.v1.CloudScheduler/DeleteJob", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(CloudSchedulerServer).DeleteJob(ctx, req.(*DeleteJobRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _CloudScheduler_PauseJob_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(PauseJobRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(CloudSchedulerServer).PauseJob(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.scheduler.v1.CloudScheduler/PauseJob", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(CloudSchedulerServer).PauseJob(ctx, req.(*PauseJobRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _CloudScheduler_ResumeJob_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(ResumeJobRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(CloudSchedulerServer).ResumeJob(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.scheduler.v1.CloudScheduler/ResumeJob", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(CloudSchedulerServer).ResumeJob(ctx, req.(*ResumeJobRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _CloudScheduler_RunJob_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(RunJobRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(CloudSchedulerServer).RunJob(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.scheduler.v1.CloudScheduler/RunJob", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(CloudSchedulerServer).RunJob(ctx, req.(*RunJobRequest)) + } + return interceptor(ctx, in, info, handler) +} + +var _CloudScheduler_serviceDesc = grpc.ServiceDesc{ + ServiceName: "google.cloud.scheduler.v1.CloudScheduler", + HandlerType: (*CloudSchedulerServer)(nil), + Methods: []grpc.MethodDesc{ + { + MethodName: "ListJobs", + Handler: _CloudScheduler_ListJobs_Handler, + }, + { + MethodName: "GetJob", + Handler: _CloudScheduler_GetJob_Handler, + }, + { + MethodName: "CreateJob", + Handler: _CloudScheduler_CreateJob_Handler, + }, + { + MethodName: "UpdateJob", + Handler: _CloudScheduler_UpdateJob_Handler, + }, + { + MethodName: "DeleteJob", + Handler: _CloudScheduler_DeleteJob_Handler, + }, + { + MethodName: "PauseJob", + Handler: _CloudScheduler_PauseJob_Handler, + }, + { + MethodName: "ResumeJob", + Handler: _CloudScheduler_ResumeJob_Handler, + }, + { + MethodName: "RunJob", + Handler: _CloudScheduler_RunJob_Handler, + }, + }, + Streams: []grpc.StreamDesc{}, + Metadata: "google/cloud/scheduler/v1/cloudscheduler.proto", +} + +func init() { + proto.RegisterFile("google/cloud/scheduler/v1/cloudscheduler.proto", fileDescriptor_cloudscheduler_0488ab591dd1e45d) +} + +var fileDescriptor_cloudscheduler_0488ab591dd1e45d = []byte{ + // 719 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x8c, 0x95, 0xcb, 0x4e, 0xdb, 0x4c, + 0x14, 0xc7, 0x35, 0x5c, 0x22, 0x7c, 0x10, 0x10, 0xcd, 0x02, 0x85, 0xf0, 0xf1, 0x29, 0x32, 0x2a, + 0x8a, 0x5c, 0xe4, 0x81, 0xd0, 0xaa, 0x6a, 0x50, 0x37, 0x5c, 0xda, 0x0a, 0x51, 0x29, 0x32, 0x65, + 0x53, 0x55, 0x42, 0x4e, 0x32, 0xb8, 0x0e, 0x8e, 0xc7, 0xf5, 0xd8, 0xa8, 0xa5, 0x62, 0x01, 0xea, + 0xae, 0x8b, 0xaa, 0xa2, 0xbc, 0x40, 0x5f, 0xa9, 0xaf, 0xd0, 0x07, 0xa9, 0x66, 0x7c, 0x81, 0xb8, + 0xc4, 0xf6, 0x2e, 0x9e, 0xf3, 0x9f, 0x39, 0xbf, 0x39, 0xe7, 0x7f, 0x26, 0xa0, 0x5b, 0x8c, 0x59, + 0x0e, 0x25, 0x3d, 0x87, 0x85, 0x7d, 0xc2, 0x7b, 0x1f, 0x68, 0x3f, 0x74, 0xa8, 0x4f, 0xce, 0x37, + 0xa3, 0xa5, 0x74, 0x45, 0xf7, 0x7c, 0x16, 0x30, 0xbc, 0x14, 0xe9, 0x75, 0x19, 0xd4, 0xef, 0xa2, + 0xe7, 0x9b, 0xf5, 0xff, 0xe2, 0xa3, 0x4c, 0xcf, 0x26, 0xa6, 0xeb, 0xb2, 0xc0, 0x0c, 0x6c, 0xe6, + 0xf2, 0x68, 0x63, 0x7d, 0xe9, 0x5e, 0xd4, 0xa7, 0x9c, 0x85, 0x7e, 0x8f, 0xc6, 0xa1, 0xd5, 0xf1, + 0x0c, 0x03, 0xd6, 0x8d, 0x45, 0xcb, 0xb1, 0x48, 0x7e, 0x75, 0xc3, 0x53, 0x42, 0x87, 0x5e, 0xf0, + 0x39, 0x0e, 0x36, 0xb2, 0xc1, 0x53, 0x9b, 0x3a, 0xfd, 0x93, 0xa1, 0xc9, 0xcf, 0x22, 0x85, 0x4a, + 0x61, 0xe1, 0xd0, 0xe6, 0xc1, 0x01, 0xeb, 0x72, 0x83, 0x7e, 0x0c, 0x29, 0x0f, 0xf0, 0x22, 0x54, + 0x3c, 0xd3, 0xa7, 0x6e, 0x50, 0x43, 0x0d, 0xd4, 0x54, 0x8c, 0xf8, 0x0b, 0x2f, 0x83, 0xe2, 0x99, + 0x16, 0x3d, 0xe1, 0xf6, 0x05, 0xad, 0x4d, 0x37, 0x50, 0x73, 0xda, 0x98, 0x11, 0x0b, 0x47, 0xf6, + 0x05, 0xc5, 0x2b, 0x00, 0x32, 0x18, 0xb0, 0x33, 0xea, 0xd6, 0x2a, 0x72, 0xa3, 0x94, 0xbf, 0x15, + 0x0b, 0xaa, 0x0b, 0xd5, 0xbb, 0x34, 0xdc, 0x63, 0x2e, 0xa7, 0xb8, 0x05, 0x53, 0x03, 0xd6, 0xe5, + 0x35, 0xd4, 0x98, 0x6c, 0xce, 0xb6, 0xfe, 0xd7, 0xc7, 0x56, 0x50, 0x3f, 0x60, 0x5d, 0x43, 0x6a, + 0xf1, 0x1a, 0x2c, 0xb8, 0xf4, 0x53, 0x70, 0x72, 0x2f, 0xd7, 0x84, 0xcc, 0x35, 0x27, 0x96, 0x3b, + 0x69, 0xbe, 0x55, 0x98, 0x7b, 0x45, 0x45, 0xba, 0xe4, 0x52, 0x18, 0xa6, 0x5c, 0x73, 0x48, 0xe3, + 0x2b, 0xc9, 0xdf, 0xea, 0x7b, 0xa8, 0xee, 0xfa, 0xd4, 0x0c, 0xe8, 0x3d, 0xdd, 0xb8, 0xcb, 0x6f, + 0xc0, 0xe4, 0x80, 0x75, 0x65, 0xb2, 0x62, 0x56, 0x21, 0x55, 0xaf, 0x10, 0x54, 0x8f, 0xbd, 0xfe, + 0xe8, 0xf1, 0xf1, 0x31, 0xa8, 0xf4, 0x31, 0x78, 0x1b, 0x66, 0x43, 0x79, 0x8a, 0xec, 0x5a, 0x0c, + 0x50, 0x4f, 0x76, 0x26, 0x8d, 0xd5, 0x5f, 0x8a, 0xc6, 0xbe, 0x31, 0xf9, 0x99, 0x01, 0x91, 0x5c, + 0xfc, 0x56, 0xd7, 0xa0, 0xba, 0x47, 0x1d, 0x3a, 0x82, 0xf0, 0x50, 0x25, 0x1e, 0xc1, 0x42, 0xc7, + 0x0c, 0x79, 0x91, 0x6c, 0x0d, 0xaa, 0x06, 0xe5, 0xe1, 0xb0, 0x48, 0xb7, 0x0a, 0x73, 0x46, 0xe8, + 0xe6, 0x8b, 0x5a, 0xb7, 0x0a, 0xcc, 0xef, 0x8a, 0x8b, 0x1f, 0x25, 0xf7, 0xc6, 0xb7, 0x08, 0x66, + 0x12, 0x9b, 0x60, 0x2d, 0xa7, 0x3a, 0x19, 0xcb, 0xd6, 0x1f, 0x97, 0xd2, 0x46, 0xbe, 0x53, 0x37, + 0xae, 0x7f, 0xff, 0xb9, 0x99, 0xd0, 0x70, 0x53, 0x0c, 0xd2, 0x97, 0xa8, 0xbf, 0x2f, 0x3c, 0x9f, + 0x0d, 0x68, 0x2f, 0xe0, 0x44, 0x23, 0x0e, 0xeb, 0x45, 0x13, 0x4a, 0xb4, 0x4b, 0x22, 0x5d, 0xf7, + 0x15, 0x41, 0x25, 0xb2, 0x13, 0x6e, 0xe6, 0x64, 0x1a, 0x71, 0x5c, 0xbd, 0xa0, 0xbb, 0x19, 0x0c, + 0x51, 0x92, 0x31, 0x10, 0x92, 0x81, 0x68, 0x97, 0xf8, 0x3b, 0x02, 0x25, 0x35, 0x2c, 0xce, 0xbb, + 0x73, 0xd6, 0xd6, 0x85, 0x30, 0x4f, 0x25, 0x0c, 0x51, 0x4b, 0xd7, 0xa4, 0x2d, 0xcd, 0x79, 0x83, + 0x40, 0x49, 0x3d, 0x9e, 0x4b, 0x94, 0x9d, 0x84, 0x42, 0xa2, 0xe7, 0x92, 0x68, 0xab, 0xb5, 0x2e, + 0x89, 0xc4, 0x7b, 0x57, 0xa2, 0x44, 0x11, 0xd5, 0x35, 0x02, 0x25, 0xb5, 0x7d, 0x2e, 0x55, 0x76, + 0x38, 0xea, 0x8b, 0xff, 0x0c, 0xd6, 0xbe, 0x78, 0x4e, 0x93, 0x66, 0x69, 0xe5, 0x9b, 0xf5, 0x03, + 0xc1, 0x4c, 0x32, 0x53, 0xb9, 0x5e, 0xce, 0x0c, 0x5e, 0xd9, 0xc2, 0xa8, 0x7a, 0x59, 0x94, 0xb6, + 0x27, 0x32, 0xb4, 0x91, 0x86, 0x7f, 0x22, 0x50, 0xd2, 0x01, 0xce, 0x2d, 0x4c, 0x76, 0xcc, 0x0b, + 0xa9, 0xda, 0x92, 0xea, 0x89, 0x4a, 0x4a, 0x53, 0xf9, 0x32, 0x85, 0xc0, 0xfa, 0x86, 0xa0, 0x12, + 0xbd, 0x17, 0xb9, 0xe3, 0x35, 0xf2, 0xa4, 0x14, 0x02, 0x3d, 0x93, 0x40, 0x9b, 0xea, 0x7a, 0x79, + 0xa0, 0xd0, 0x6d, 0x23, 0x6d, 0xe7, 0x0a, 0xc1, 0x4a, 0x8f, 0x0d, 0xc7, 0x1f, 0xbf, 0x33, 0x9f, + 0xbe, 0x58, 0x1d, 0xe1, 0x92, 0x0e, 0x7a, 0xb7, 0x13, 0x8b, 0x2d, 0xe6, 0x98, 0xae, 0xa5, 0x33, + 0xdf, 0x22, 0x16, 0x75, 0xa5, 0x87, 0x48, 0x14, 0x32, 0x3d, 0x9b, 0x3f, 0xf0, 0x47, 0xbe, 0x9d, + 0x7e, 0xfc, 0x9a, 0x50, 0x8e, 0x76, 0x5f, 0xef, 0xef, 0x1d, 0x1f, 0xee, 0x1b, 0xdd, 0x8a, 0xdc, + 0xba, 0xf5, 0x37, 0x00, 0x00, 0xff, 0xff, 0x1e, 0x36, 0xc7, 0x33, 0x86, 0x08, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/cloud/scheduler/v1/job.pb.go b/vendor/google.golang.org/genproto/googleapis/cloud/scheduler/v1/job.pb.go new file mode 100644 index 0000000..43ef3dd --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/cloud/scheduler/v1/job.pb.go @@ -0,0 +1,595 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/cloud/scheduler/v1/job.proto + +package scheduler // import "google.golang.org/genproto/googleapis/cloud/scheduler/v1" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import duration "github.com/golang/protobuf/ptypes/duration" +import timestamp "github.com/golang/protobuf/ptypes/timestamp" +import _ "google.golang.org/genproto/googleapis/api/annotations" +import status "google.golang.org/genproto/googleapis/rpc/status" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// State of the job. +type Job_State int32 + +const ( + // Unspecified state. + Job_STATE_UNSPECIFIED Job_State = 0 + // The job is executing normally. + Job_ENABLED Job_State = 1 + // The job is paused by the user. It will not execute. A user can + // intentionally pause the job using + // [PauseJobRequest][google.cloud.scheduler.v1.PauseJobRequest]. + Job_PAUSED Job_State = 2 + // The job is disabled by the system due to error. The user + // cannot directly set a job to be disabled. + Job_DISABLED Job_State = 3 + // The job state resulting from a failed [CloudScheduler.UpdateJob][google.cloud.scheduler.v1.CloudScheduler.UpdateJob] + // operation. To recover a job from this state, retry + // [CloudScheduler.UpdateJob][google.cloud.scheduler.v1.CloudScheduler.UpdateJob] until a successful response is received. + Job_UPDATE_FAILED Job_State = 4 +) + +var Job_State_name = map[int32]string{ + 0: "STATE_UNSPECIFIED", + 1: "ENABLED", + 2: "PAUSED", + 3: "DISABLED", + 4: "UPDATE_FAILED", +} +var Job_State_value = map[string]int32{ + "STATE_UNSPECIFIED": 0, + "ENABLED": 1, + "PAUSED": 2, + "DISABLED": 3, + "UPDATE_FAILED": 4, +} + +func (x Job_State) String() string { + return proto.EnumName(Job_State_name, int32(x)) +} +func (Job_State) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_job_61d7bc76ec491cf9, []int{0, 0} +} + +// Configuration for a job. +// The maximum allowed size for a job is 100KB. +type Job struct { + // Optionally caller-specified in [CreateJob][google.cloud.scheduler.v1.CloudScheduler.CreateJob], after + // which it becomes output only. + // + // The job name. For example: + // `projects/PROJECT_ID/locations/LOCATION_ID/jobs/JOB_ID`. + // + // * `PROJECT_ID` can contain letters ([A-Za-z]), numbers ([0-9]), + // hyphens (-), colons (:), or periods (.). + // For more information, see + // [Identifying + // projects](https://cloud.google.com/resource-manager/docs/creating-managing-projects#identifying_projects) + // * `LOCATION_ID` is the canonical ID for the job's location. + // The list of available locations can be obtained by calling + // [ListLocations][google.cloud.location.Locations.ListLocations]. + // For more information, see https://cloud.google.com/about/locations/. + // * `JOB_ID` can contain only letters ([A-Za-z]), numbers ([0-9]), + // hyphens (-), or underscores (_). The maximum length is 500 characters. + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // Optionally caller-specified in [CreateJob][google.cloud.scheduler.v1.CloudScheduler.CreateJob] or + // [UpdateJob][google.cloud.scheduler.v1.CloudScheduler.UpdateJob]. + // + // A human-readable description for the job. This string must not contain + // more than 500 characters. + Description string `protobuf:"bytes,2,opt,name=description,proto3" json:"description,omitempty"` + // Required. + // + // Delivery settings containing destination and parameters. + // + // Types that are valid to be assigned to Target: + // *Job_PubsubTarget + // *Job_AppEngineHttpTarget + // *Job_HttpTarget + Target isJob_Target `protobuf_oneof:"target"` + // Required, except when used with [UpdateJob][google.cloud.scheduler.v1.CloudScheduler.UpdateJob]. + // + // Describes the schedule on which the job will be executed. + // + // The schedule can be either of the following types: + // + // * [Crontab](http://en.wikipedia.org/wiki/Cron#Overview) + // * English-like + // [schedule](https://cloud.google.com/scheduler/docs/configuring/cron-job-schedules) + // + // As a general rule, execution `n + 1` of a job will not begin + // until execution `n` has finished. Cloud Scheduler will never + // allow two simultaneously outstanding executions. For example, + // this implies that if the `n+1`th execution is scheduled to run at + // 16:00 but the `n`th execution takes until 16:15, the `n+1`th + // execution will not start until `16:15`. + // A scheduled start time will be delayed if the previous + // execution has not ended when its scheduled time occurs. + // + // If [retry_count][google.cloud.scheduler.v1.RetryConfig.retry_count] > 0 and a job attempt fails, + // the job will be tried a total of [retry_count][google.cloud.scheduler.v1.RetryConfig.retry_count] + // times, with exponential backoff, until the next scheduled start + // time. + Schedule string `protobuf:"bytes,20,opt,name=schedule,proto3" json:"schedule,omitempty"` + // Specifies the time zone to be used in interpreting + // [schedule][google.cloud.scheduler.v1.Job.schedule]. The value of this field must be a time + // zone name from the [tz database](http://en.wikipedia.org/wiki/Tz_database). + // + // Note that some time zones include a provision for + // daylight savings time. The rules for daylight saving time are + // determined by the chosen tz. For UTC use the string "utc". If a + // time zone is not specified, the default will be in UTC (also known + // as GMT). + TimeZone string `protobuf:"bytes,21,opt,name=time_zone,json=timeZone,proto3" json:"time_zone,omitempty"` + // Output only. The creation time of the job. + UserUpdateTime *timestamp.Timestamp `protobuf:"bytes,9,opt,name=user_update_time,json=userUpdateTime,proto3" json:"user_update_time,omitempty"` + // Output only. State of the job. + State Job_State `protobuf:"varint,10,opt,name=state,proto3,enum=google.cloud.scheduler.v1.Job_State" json:"state,omitempty"` + // Output only. The response from the target for the last attempted execution. + Status *status.Status `protobuf:"bytes,11,opt,name=status,proto3" json:"status,omitempty"` + // Output only. The next time the job is scheduled. Note that this may be a + // retry of a previously failed attempt or the next execution time + // according to the schedule. + ScheduleTime *timestamp.Timestamp `protobuf:"bytes,17,opt,name=schedule_time,json=scheduleTime,proto3" json:"schedule_time,omitempty"` + // Output only. The time the last job attempt started. + LastAttemptTime *timestamp.Timestamp `protobuf:"bytes,18,opt,name=last_attempt_time,json=lastAttemptTime,proto3" json:"last_attempt_time,omitempty"` + // Settings that determine the retry behavior. + RetryConfig *RetryConfig `protobuf:"bytes,19,opt,name=retry_config,json=retryConfig,proto3" json:"retry_config,omitempty"` + // The deadline for job attempts. If the request handler does not respond by + // this deadline then the request is cancelled and the attempt is marked as a + // `DEADLINE_EXCEEDED` failure. The failed attempt can be viewed in + // execution logs. Cloud Scheduler will retry the job according + // to the [RetryConfig][google.cloud.scheduler.v1.RetryConfig]. + // + // The allowed duration for this deadline is: + // * For [HTTP targets][google.cloud.scheduler.v1.Job.http_target], between 15 seconds and 30 minutes. + // * For [App Engine HTTP targets][google.cloud.scheduler.v1.Job.app_engine_http_target], between 15 + // seconds and 24 hours. + AttemptDeadline *duration.Duration `protobuf:"bytes,22,opt,name=attempt_deadline,json=attemptDeadline,proto3" json:"attempt_deadline,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Job) Reset() { *m = Job{} } +func (m *Job) String() string { return proto.CompactTextString(m) } +func (*Job) ProtoMessage() {} +func (*Job) Descriptor() ([]byte, []int) { + return fileDescriptor_job_61d7bc76ec491cf9, []int{0} +} +func (m *Job) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Job.Unmarshal(m, b) +} +func (m *Job) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Job.Marshal(b, m, deterministic) +} +func (dst *Job) XXX_Merge(src proto.Message) { + xxx_messageInfo_Job.Merge(dst, src) +} +func (m *Job) XXX_Size() int { + return xxx_messageInfo_Job.Size(m) +} +func (m *Job) XXX_DiscardUnknown() { + xxx_messageInfo_Job.DiscardUnknown(m) +} + +var xxx_messageInfo_Job proto.InternalMessageInfo + +func (m *Job) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *Job) GetDescription() string { + if m != nil { + return m.Description + } + return "" +} + +type isJob_Target interface { + isJob_Target() +} + +type Job_PubsubTarget struct { + PubsubTarget *PubsubTarget `protobuf:"bytes,4,opt,name=pubsub_target,json=pubsubTarget,proto3,oneof"` +} + +type Job_AppEngineHttpTarget struct { + AppEngineHttpTarget *AppEngineHttpTarget `protobuf:"bytes,5,opt,name=app_engine_http_target,json=appEngineHttpTarget,proto3,oneof"` +} + +type Job_HttpTarget struct { + HttpTarget *HttpTarget `protobuf:"bytes,6,opt,name=http_target,json=httpTarget,proto3,oneof"` +} + +func (*Job_PubsubTarget) isJob_Target() {} + +func (*Job_AppEngineHttpTarget) isJob_Target() {} + +func (*Job_HttpTarget) isJob_Target() {} + +func (m *Job) GetTarget() isJob_Target { + if m != nil { + return m.Target + } + return nil +} + +func (m *Job) GetPubsubTarget() *PubsubTarget { + if x, ok := m.GetTarget().(*Job_PubsubTarget); ok { + return x.PubsubTarget + } + return nil +} + +func (m *Job) GetAppEngineHttpTarget() *AppEngineHttpTarget { + if x, ok := m.GetTarget().(*Job_AppEngineHttpTarget); ok { + return x.AppEngineHttpTarget + } + return nil +} + +func (m *Job) GetHttpTarget() *HttpTarget { + if x, ok := m.GetTarget().(*Job_HttpTarget); ok { + return x.HttpTarget + } + return nil +} + +func (m *Job) GetSchedule() string { + if m != nil { + return m.Schedule + } + return "" +} + +func (m *Job) GetTimeZone() string { + if m != nil { + return m.TimeZone + } + return "" +} + +func (m *Job) GetUserUpdateTime() *timestamp.Timestamp { + if m != nil { + return m.UserUpdateTime + } + return nil +} + +func (m *Job) GetState() Job_State { + if m != nil { + return m.State + } + return Job_STATE_UNSPECIFIED +} + +func (m *Job) GetStatus() *status.Status { + if m != nil { + return m.Status + } + return nil +} + +func (m *Job) GetScheduleTime() *timestamp.Timestamp { + if m != nil { + return m.ScheduleTime + } + return nil +} + +func (m *Job) GetLastAttemptTime() *timestamp.Timestamp { + if m != nil { + return m.LastAttemptTime + } + return nil +} + +func (m *Job) GetRetryConfig() *RetryConfig { + if m != nil { + return m.RetryConfig + } + return nil +} + +func (m *Job) GetAttemptDeadline() *duration.Duration { + if m != nil { + return m.AttemptDeadline + } + return nil +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*Job) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _Job_OneofMarshaler, _Job_OneofUnmarshaler, _Job_OneofSizer, []interface{}{ + (*Job_PubsubTarget)(nil), + (*Job_AppEngineHttpTarget)(nil), + (*Job_HttpTarget)(nil), + } +} + +func _Job_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*Job) + // target + switch x := m.Target.(type) { + case *Job_PubsubTarget: + b.EncodeVarint(4<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.PubsubTarget); err != nil { + return err + } + case *Job_AppEngineHttpTarget: + b.EncodeVarint(5<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.AppEngineHttpTarget); err != nil { + return err + } + case *Job_HttpTarget: + b.EncodeVarint(6<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.HttpTarget); err != nil { + return err + } + case nil: + default: + return fmt.Errorf("Job.Target has unexpected type %T", x) + } + return nil +} + +func _Job_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*Job) + switch tag { + case 4: // target.pubsub_target + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(PubsubTarget) + err := b.DecodeMessage(msg) + m.Target = &Job_PubsubTarget{msg} + return true, err + case 5: // target.app_engine_http_target + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(AppEngineHttpTarget) + err := b.DecodeMessage(msg) + m.Target = &Job_AppEngineHttpTarget{msg} + return true, err + case 6: // target.http_target + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(HttpTarget) + err := b.DecodeMessage(msg) + m.Target = &Job_HttpTarget{msg} + return true, err + default: + return false, nil + } +} + +func _Job_OneofSizer(msg proto.Message) (n int) { + m := msg.(*Job) + // target + switch x := m.Target.(type) { + case *Job_PubsubTarget: + s := proto.Size(x.PubsubTarget) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *Job_AppEngineHttpTarget: + s := proto.Size(x.AppEngineHttpTarget) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *Job_HttpTarget: + s := proto.Size(x.HttpTarget) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +// Settings that determine the retry behavior. +// +// By default, if a job does not complete successfully (meaning that +// an acknowledgement is not received from the handler, then it will be retried +// with exponential backoff according to the settings in [RetryConfig][google.cloud.scheduler.v1.RetryConfig]. +type RetryConfig struct { + // The number of attempts that the system will make to run a job using the + // exponential backoff procedure described by + // [max_doublings][google.cloud.scheduler.v1.RetryConfig.max_doublings]. + // + // The default value of retry_count is zero. + // + // If retry_count is zero, a job attempt will *not* be retried if + // it fails. Instead the Cloud Scheduler system will wait for the + // next scheduled execution time. + // + // If retry_count is set to a non-zero number then Cloud Scheduler + // will retry failed attempts, using exponential backoff, + // retry_count times, or until the next scheduled execution time, + // whichever comes first. + // + // Values greater than 5 and negative values are not allowed. + RetryCount int32 `protobuf:"varint,1,opt,name=retry_count,json=retryCount,proto3" json:"retry_count,omitempty"` + // The time limit for retrying a failed job, measured from time when an + // execution was first attempted. If specified with + // [retry_count][google.cloud.scheduler.v1.RetryConfig.retry_count], the job will be retried until both + // limits are reached. + // + // The default value for max_retry_duration is zero, which means retry + // duration is unlimited. + MaxRetryDuration *duration.Duration `protobuf:"bytes,2,opt,name=max_retry_duration,json=maxRetryDuration,proto3" json:"max_retry_duration,omitempty"` + // The minimum amount of time to wait before retrying a job after + // it fails. + // + // The default value of this field is 5 seconds. + MinBackoffDuration *duration.Duration `protobuf:"bytes,3,opt,name=min_backoff_duration,json=minBackoffDuration,proto3" json:"min_backoff_duration,omitempty"` + // The maximum amount of time to wait before retrying a job after + // it fails. + // + // The default value of this field is 1 hour. + MaxBackoffDuration *duration.Duration `protobuf:"bytes,4,opt,name=max_backoff_duration,json=maxBackoffDuration,proto3" json:"max_backoff_duration,omitempty"` + // The time between retries will double `max_doublings` times. + // + // A job's retry interval starts at + // [min_backoff_duration][google.cloud.scheduler.v1.RetryConfig.min_backoff_duration], then doubles + // `max_doublings` times, then increases linearly, and finally + // retries retries at intervals of + // [max_backoff_duration][google.cloud.scheduler.v1.RetryConfig.max_backoff_duration] up to + // [retry_count][google.cloud.scheduler.v1.RetryConfig.retry_count] times. + // + // For example, if [min_backoff_duration][google.cloud.scheduler.v1.RetryConfig.min_backoff_duration] is + // 10s, [max_backoff_duration][google.cloud.scheduler.v1.RetryConfig.max_backoff_duration] is 300s, and + // `max_doublings` is 3, then the a job will first be retried in 10s. The + // retry interval will double three times, and then increase linearly by + // 2^3 * 10s. Finally, the job will retry at intervals of + // [max_backoff_duration][google.cloud.scheduler.v1.RetryConfig.max_backoff_duration] until the job has + // been attempted [retry_count][google.cloud.scheduler.v1.RetryConfig.retry_count] times. Thus, the + // requests will retry at 10s, 20s, 40s, 80s, 160s, 240s, 300s, 300s, .... + // + // The default value of this field is 5. + MaxDoublings int32 `protobuf:"varint,5,opt,name=max_doublings,json=maxDoublings,proto3" json:"max_doublings,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *RetryConfig) Reset() { *m = RetryConfig{} } +func (m *RetryConfig) String() string { return proto.CompactTextString(m) } +func (*RetryConfig) ProtoMessage() {} +func (*RetryConfig) Descriptor() ([]byte, []int) { + return fileDescriptor_job_61d7bc76ec491cf9, []int{1} +} +func (m *RetryConfig) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_RetryConfig.Unmarshal(m, b) +} +func (m *RetryConfig) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_RetryConfig.Marshal(b, m, deterministic) +} +func (dst *RetryConfig) XXX_Merge(src proto.Message) { + xxx_messageInfo_RetryConfig.Merge(dst, src) +} +func (m *RetryConfig) XXX_Size() int { + return xxx_messageInfo_RetryConfig.Size(m) +} +func (m *RetryConfig) XXX_DiscardUnknown() { + xxx_messageInfo_RetryConfig.DiscardUnknown(m) +} + +var xxx_messageInfo_RetryConfig proto.InternalMessageInfo + +func (m *RetryConfig) GetRetryCount() int32 { + if m != nil { + return m.RetryCount + } + return 0 +} + +func (m *RetryConfig) GetMaxRetryDuration() *duration.Duration { + if m != nil { + return m.MaxRetryDuration + } + return nil +} + +func (m *RetryConfig) GetMinBackoffDuration() *duration.Duration { + if m != nil { + return m.MinBackoffDuration + } + return nil +} + +func (m *RetryConfig) GetMaxBackoffDuration() *duration.Duration { + if m != nil { + return m.MaxBackoffDuration + } + return nil +} + +func (m *RetryConfig) GetMaxDoublings() int32 { + if m != nil { + return m.MaxDoublings + } + return 0 +} + +func init() { + proto.RegisterType((*Job)(nil), "google.cloud.scheduler.v1.Job") + proto.RegisterType((*RetryConfig)(nil), "google.cloud.scheduler.v1.RetryConfig") + proto.RegisterEnum("google.cloud.scheduler.v1.Job_State", Job_State_name, Job_State_value) +} + +func init() { + proto.RegisterFile("google/cloud/scheduler/v1/job.proto", fileDescriptor_job_61d7bc76ec491cf9) +} + +var fileDescriptor_job_61d7bc76ec491cf9 = []byte{ + // 725 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x84, 0x94, 0x5f, 0x4f, 0xdb, 0x3c, + 0x14, 0xc6, 0x29, 0xb4, 0xa5, 0x9c, 0xb4, 0xd0, 0x9a, 0x3f, 0x6f, 0xe8, 0xbb, 0x8d, 0x0a, 0x36, + 0x86, 0x76, 0x91, 0x08, 0x76, 0xb7, 0x5d, 0x4c, 0x2d, 0x29, 0xa3, 0x6c, 0x42, 0x55, 0xda, 0x4a, + 0x13, 0x37, 0x91, 0x93, 0xb8, 0x21, 0x5b, 0x63, 0x5b, 0x89, 0x83, 0xba, 0x7d, 0x9b, 0x7d, 0x89, + 0x7d, 0xbe, 0x29, 0x4e, 0x52, 0x0a, 0x8c, 0xf6, 0x2e, 0x3e, 0xe7, 0x79, 0x7e, 0xe7, 0xd8, 0x27, + 0x36, 0x1c, 0x79, 0x8c, 0x79, 0x13, 0xa2, 0x3b, 0x13, 0x16, 0xbb, 0x7a, 0xe4, 0xdc, 0x12, 0x37, + 0x9e, 0x90, 0x50, 0xbf, 0x3b, 0xd5, 0xbf, 0x33, 0x5b, 0xe3, 0x21, 0x13, 0x0c, 0xed, 0xa7, 0x22, + 0x4d, 0x8a, 0xb4, 0x99, 0x48, 0xbb, 0x3b, 0x6d, 0xbe, 0xc8, 0xfc, 0x98, 0xfb, 0x3a, 0xa6, 0x94, + 0x09, 0x2c, 0x7c, 0x46, 0xa3, 0xd4, 0xd8, 0xdc, 0x9f, 0xcb, 0x86, 0x24, 0x62, 0x71, 0xe8, 0x90, + 0x2c, 0x75, 0xfc, 0x7c, 0x61, 0x81, 0x43, 0x8f, 0x88, 0x4c, 0xf7, 0x2a, 0xd3, 0xc9, 0x95, 0x1d, + 0x8f, 0x75, 0x37, 0x0e, 0x65, 0x8d, 0x2c, 0x7f, 0xf0, 0x38, 0x2f, 0xfc, 0x80, 0x44, 0x02, 0x07, + 0x3c, 0x13, 0xfc, 0x97, 0x09, 0x42, 0xee, 0xe8, 0x91, 0xc0, 0x22, 0xce, 0x9a, 0x3b, 0xfc, 0xbd, + 0x0e, 0x6b, 0x57, 0xcc, 0x46, 0x08, 0x8a, 0x14, 0x07, 0x44, 0x2d, 0xb4, 0x0a, 0x27, 0x1b, 0xa6, + 0xfc, 0x46, 0x2d, 0x50, 0x5c, 0x12, 0x39, 0xa1, 0xcf, 0x93, 0x52, 0xea, 0xaa, 0x4c, 0xcd, 0x87, + 0xd0, 0x35, 0xd4, 0x78, 0x6c, 0x47, 0xb1, 0x6d, 0xa5, 0xed, 0xaa, 0xc5, 0x56, 0xe1, 0x44, 0x39, + 0x7b, 0xab, 0x3d, 0x7b, 0x56, 0x5a, 0x5f, 0xea, 0x87, 0x52, 0x7e, 0xb9, 0x62, 0x56, 0xf9, 0xdc, + 0x1a, 0x11, 0xd8, 0xc3, 0x9c, 0x5b, 0x84, 0x7a, 0x3e, 0x25, 0xd6, 0xad, 0x10, 0x3c, 0x07, 0x97, + 0x24, 0x58, 0x5b, 0x00, 0x6e, 0x73, 0xde, 0x95, 0xbe, 0x4b, 0x21, 0xf8, 0x8c, 0xbf, 0x8d, 0x9f, + 0x86, 0xd1, 0x25, 0x28, 0xf3, 0xec, 0xb2, 0x64, 0xbf, 0x59, 0xc0, 0x7e, 0x80, 0x84, 0xdb, 0x7b, + 0x52, 0x13, 0x2a, 0xb9, 0x50, 0xdd, 0x91, 0xe7, 0x33, 0x5b, 0xa3, 0xff, 0x61, 0x23, 0x19, 0x83, + 0xf5, 0x8b, 0x51, 0xa2, 0xee, 0xa6, 0xc9, 0x24, 0x70, 0xc3, 0x28, 0x41, 0x06, 0xd4, 0xe3, 0x88, + 0x84, 0x56, 0xcc, 0x5d, 0x2c, 0x88, 0x95, 0xc4, 0xd5, 0x0d, 0xd9, 0x47, 0x33, 0xef, 0x23, 0x1f, + 0xa6, 0x36, 0xcc, 0x87, 0x69, 0x6e, 0x26, 0x9e, 0x91, 0xb4, 0x24, 0x41, 0xf4, 0x01, 0x4a, 0xc9, + 0x34, 0x89, 0x0a, 0xad, 0xc2, 0xc9, 0xe6, 0xd9, 0xeb, 0x05, 0x5b, 0xb8, 0x62, 0xb6, 0x36, 0x48, + 0xb4, 0x66, 0x6a, 0x41, 0xef, 0xa0, 0x9c, 0xfe, 0x09, 0xaa, 0x22, 0xeb, 0xa2, 0xdc, 0x1c, 0x72, + 0x47, 0x2a, 0xe3, 0xc8, 0xcc, 0x14, 0xe8, 0x13, 0xd4, 0x72, 0x58, 0xda, 0x6a, 0x63, 0x69, 0xab, + 0xd5, 0xdc, 0x20, 0x1b, 0xbd, 0x80, 0xc6, 0x04, 0x47, 0xc2, 0xc2, 0x42, 0x90, 0x80, 0x8b, 0x14, + 0x82, 0x96, 0x42, 0xb6, 0x12, 0x53, 0x3b, 0xf5, 0x48, 0x4e, 0x0f, 0xaa, 0x21, 0x11, 0xe1, 0x4f, + 0xcb, 0x61, 0x74, 0xec, 0x7b, 0xea, 0xb6, 0x44, 0x1c, 0x2f, 0xd8, 0xb7, 0x99, 0xc8, 0xcf, 0xa5, + 0xda, 0x54, 0xc2, 0xfb, 0x45, 0x32, 0x81, 0xbc, 0x1b, 0x97, 0x60, 0x77, 0xe2, 0x53, 0xa2, 0xee, + 0x49, 0xdc, 0xfe, 0x93, 0x8e, 0x8c, 0xec, 0xba, 0x99, 0x5b, 0x99, 0xc5, 0xc8, 0x1c, 0x87, 0xdf, + 0xa0, 0x24, 0x4f, 0x15, 0xed, 0x42, 0x63, 0x30, 0x6c, 0x0f, 0xbb, 0xd6, 0xe8, 0x7a, 0xd0, 0xef, + 0x9e, 0xf7, 0x2e, 0x7a, 0x5d, 0xa3, 0xbe, 0x82, 0x14, 0x58, 0xef, 0x5e, 0xb7, 0x3b, 0x5f, 0xbb, + 0x46, 0xbd, 0x80, 0x00, 0xca, 0xfd, 0xf6, 0x68, 0xd0, 0x35, 0xea, 0xab, 0xa8, 0x0a, 0x15, 0xa3, + 0x37, 0x48, 0x33, 0x6b, 0xa8, 0x01, 0xb5, 0x51, 0xdf, 0x48, 0xec, 0x17, 0xed, 0x5e, 0x12, 0x2a, + 0x76, 0x2a, 0x50, 0x4e, 0xff, 0xcf, 0xc3, 0x3f, 0xab, 0xa0, 0xcc, 0x6d, 0x03, 0x1d, 0x80, 0x92, + 0x1f, 0x42, 0x4c, 0x85, 0xbc, 0xb2, 0x25, 0x13, 0xb2, 0xbd, 0xc5, 0x54, 0xa0, 0xcf, 0x80, 0x02, + 0x3c, 0xb5, 0x52, 0x51, 0xfe, 0x54, 0xc8, 0xfb, 0xbb, 0x70, 0x73, 0xf5, 0x00, 0x4f, 0x65, 0x9d, + 0x3c, 0x82, 0xbe, 0xc0, 0x4e, 0xe0, 0x53, 0xcb, 0xc6, 0xce, 0x0f, 0x36, 0x1e, 0xdf, 0xa3, 0xd6, + 0x96, 0xa1, 0x50, 0xe0, 0xd3, 0x4e, 0xea, 0x7a, 0x00, 0xc3, 0xd3, 0xa7, 0xb0, 0xe2, 0x72, 0x18, + 0x9e, 0x3e, 0x86, 0x1d, 0x41, 0x2d, 0x81, 0xb9, 0x2c, 0xb6, 0x27, 0x3e, 0xf5, 0x22, 0xf9, 0x40, + 0x94, 0xcc, 0x6a, 0x80, 0xa7, 0x46, 0x1e, 0xeb, 0x30, 0x78, 0xe9, 0xb0, 0xe0, 0xf9, 0x9f, 0xa3, + 0x53, 0xb9, 0x62, 0x76, 0x3f, 0xa9, 0xd7, 0x2f, 0xdc, 0x74, 0x32, 0x99, 0xc7, 0x26, 0x98, 0x7a, + 0x1a, 0x0b, 0x3d, 0xdd, 0x23, 0x54, 0x76, 0xa3, 0xa7, 0x29, 0xcc, 0xfd, 0xe8, 0x1f, 0x4f, 0xf5, + 0xc7, 0xd9, 0xc2, 0x2e, 0x4b, 0xf9, 0xfb, 0xbf, 0x01, 0x00, 0x00, 0xff, 0xff, 0x58, 0xd1, 0xed, + 0xc5, 0x51, 0x06, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/cloud/scheduler/v1/target.pb.go b/vendor/google.golang.org/genproto/googleapis/cloud/scheduler/v1/target.pb.go new file mode 100644 index 0000000..77e3ce6 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/cloud/scheduler/v1/target.pb.go @@ -0,0 +1,818 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/cloud/scheduler/v1/target.proto + +package scheduler // import "google.golang.org/genproto/googleapis/cloud/scheduler/v1" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "github.com/golang/protobuf/ptypes/any" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// The HTTP method used to execute the job. +type HttpMethod int32 + +const ( + // HTTP method unspecified. Defaults to POST. + HttpMethod_HTTP_METHOD_UNSPECIFIED HttpMethod = 0 + // HTTP POST + HttpMethod_POST HttpMethod = 1 + // HTTP GET + HttpMethod_GET HttpMethod = 2 + // HTTP HEAD + HttpMethod_HEAD HttpMethod = 3 + // HTTP PUT + HttpMethod_PUT HttpMethod = 4 + // HTTP DELETE + HttpMethod_DELETE HttpMethod = 5 + // HTTP PATCH + HttpMethod_PATCH HttpMethod = 6 + // HTTP OPTIONS + HttpMethod_OPTIONS HttpMethod = 7 +) + +var HttpMethod_name = map[int32]string{ + 0: "HTTP_METHOD_UNSPECIFIED", + 1: "POST", + 2: "GET", + 3: "HEAD", + 4: "PUT", + 5: "DELETE", + 6: "PATCH", + 7: "OPTIONS", +} +var HttpMethod_value = map[string]int32{ + "HTTP_METHOD_UNSPECIFIED": 0, + "POST": 1, + "GET": 2, + "HEAD": 3, + "PUT": 4, + "DELETE": 5, + "PATCH": 6, + "OPTIONS": 7, +} + +func (x HttpMethod) String() string { + return proto.EnumName(HttpMethod_name, int32(x)) +} +func (HttpMethod) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_target_21f966c3f7a2a854, []int{0} +} + +// Http target. The job will be pushed to the job handler by means of +// an HTTP request via an [http_method][google.cloud.scheduler.v1.HttpTarget.http_method] such as HTTP +// POST, HTTP GET, etc. The job is acknowledged by means of an HTTP +// response code in the range [200 - 299]. A failure to receive a response +// constitutes a failed execution. For a redirected request, the response +// returned by the redirected request is considered. +type HttpTarget struct { + // Required. + // + // The full URI path that the request will be sent to. This string + // must begin with either "http://" or "https://". Some examples of + // valid values for [uri][google.cloud.scheduler.v1.HttpTarget.uri] are: + // `http://acme.com` and `https://acme.com/sales:8080`. Cloud Scheduler will + // encode some characters for safety and compatibility. The maximum allowed + // URL length is 2083 characters after encoding. + Uri string `protobuf:"bytes,1,opt,name=uri,proto3" json:"uri,omitempty"` + // Which HTTP method to use for the request. + HttpMethod HttpMethod `protobuf:"varint,2,opt,name=http_method,json=httpMethod,proto3,enum=google.cloud.scheduler.v1.HttpMethod" json:"http_method,omitempty"` + // The user can specify HTTP request headers to send with the job's + // HTTP request. This map contains the header field names and + // values. Repeated headers are not supported, but a header value can + // contain commas. These headers represent a subset of the headers + // that will accompany the job's HTTP request. Some HTTP request + // headers will be ignored or replaced. A partial list of headers that + // will be ignored or replaced is below: + // - Host: This will be computed by Cloud Scheduler and derived from + // [uri][google.cloud.scheduler.v1.HttpTarget.uri]. + // * `Content-Length`: This will be computed by Cloud Scheduler. + // * `User-Agent`: This will be set to `"Google-Cloud-Scheduler"`. + // * `X-Google-*`: Google internal use only. + // * `X-AppEngine-*`: Google internal use only. + // + // The total size of headers must be less than 80KB. + Headers map[string]string `protobuf:"bytes,3,rep,name=headers,proto3" json:"headers,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` + // HTTP request body. A request body is allowed only if the HTTP + // method is POST, PUT, or PATCH. It is an error to set body on a job with an + // incompatible [HttpMethod][google.cloud.scheduler.v1.HttpMethod]. + Body []byte `protobuf:"bytes,4,opt,name=body,proto3" json:"body,omitempty"` + // The mode for generating an `Authorization` header for HTTP requests. + // + // If specified, all `Authorization` headers in the [HttpTarget.headers][google.cloud.scheduler.v1.HttpTarget.headers] + // field will be overridden. + // + // Types that are valid to be assigned to AuthorizationHeader: + // *HttpTarget_OauthToken + // *HttpTarget_OidcToken + AuthorizationHeader isHttpTarget_AuthorizationHeader `protobuf_oneof:"authorization_header"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *HttpTarget) Reset() { *m = HttpTarget{} } +func (m *HttpTarget) String() string { return proto.CompactTextString(m) } +func (*HttpTarget) ProtoMessage() {} +func (*HttpTarget) Descriptor() ([]byte, []int) { + return fileDescriptor_target_21f966c3f7a2a854, []int{0} +} +func (m *HttpTarget) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_HttpTarget.Unmarshal(m, b) +} +func (m *HttpTarget) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_HttpTarget.Marshal(b, m, deterministic) +} +func (dst *HttpTarget) XXX_Merge(src proto.Message) { + xxx_messageInfo_HttpTarget.Merge(dst, src) +} +func (m *HttpTarget) XXX_Size() int { + return xxx_messageInfo_HttpTarget.Size(m) +} +func (m *HttpTarget) XXX_DiscardUnknown() { + xxx_messageInfo_HttpTarget.DiscardUnknown(m) +} + +var xxx_messageInfo_HttpTarget proto.InternalMessageInfo + +func (m *HttpTarget) GetUri() string { + if m != nil { + return m.Uri + } + return "" +} + +func (m *HttpTarget) GetHttpMethod() HttpMethod { + if m != nil { + return m.HttpMethod + } + return HttpMethod_HTTP_METHOD_UNSPECIFIED +} + +func (m *HttpTarget) GetHeaders() map[string]string { + if m != nil { + return m.Headers + } + return nil +} + +func (m *HttpTarget) GetBody() []byte { + if m != nil { + return m.Body + } + return nil +} + +type isHttpTarget_AuthorizationHeader interface { + isHttpTarget_AuthorizationHeader() +} + +type HttpTarget_OauthToken struct { + OauthToken *OAuthToken `protobuf:"bytes,5,opt,name=oauth_token,json=oauthToken,proto3,oneof"` +} + +type HttpTarget_OidcToken struct { + OidcToken *OidcToken `protobuf:"bytes,6,opt,name=oidc_token,json=oidcToken,proto3,oneof"` +} + +func (*HttpTarget_OauthToken) isHttpTarget_AuthorizationHeader() {} + +func (*HttpTarget_OidcToken) isHttpTarget_AuthorizationHeader() {} + +func (m *HttpTarget) GetAuthorizationHeader() isHttpTarget_AuthorizationHeader { + if m != nil { + return m.AuthorizationHeader + } + return nil +} + +func (m *HttpTarget) GetOauthToken() *OAuthToken { + if x, ok := m.GetAuthorizationHeader().(*HttpTarget_OauthToken); ok { + return x.OauthToken + } + return nil +} + +func (m *HttpTarget) GetOidcToken() *OidcToken { + if x, ok := m.GetAuthorizationHeader().(*HttpTarget_OidcToken); ok { + return x.OidcToken + } + return nil +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*HttpTarget) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _HttpTarget_OneofMarshaler, _HttpTarget_OneofUnmarshaler, _HttpTarget_OneofSizer, []interface{}{ + (*HttpTarget_OauthToken)(nil), + (*HttpTarget_OidcToken)(nil), + } +} + +func _HttpTarget_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*HttpTarget) + // authorization_header + switch x := m.AuthorizationHeader.(type) { + case *HttpTarget_OauthToken: + b.EncodeVarint(5<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.OauthToken); err != nil { + return err + } + case *HttpTarget_OidcToken: + b.EncodeVarint(6<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.OidcToken); err != nil { + return err + } + case nil: + default: + return fmt.Errorf("HttpTarget.AuthorizationHeader has unexpected type %T", x) + } + return nil +} + +func _HttpTarget_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*HttpTarget) + switch tag { + case 5: // authorization_header.oauth_token + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(OAuthToken) + err := b.DecodeMessage(msg) + m.AuthorizationHeader = &HttpTarget_OauthToken{msg} + return true, err + case 6: // authorization_header.oidc_token + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(OidcToken) + err := b.DecodeMessage(msg) + m.AuthorizationHeader = &HttpTarget_OidcToken{msg} + return true, err + default: + return false, nil + } +} + +func _HttpTarget_OneofSizer(msg proto.Message) (n int) { + m := msg.(*HttpTarget) + // authorization_header + switch x := m.AuthorizationHeader.(type) { + case *HttpTarget_OauthToken: + s := proto.Size(x.OauthToken) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *HttpTarget_OidcToken: + s := proto.Size(x.OidcToken) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +// App Engine target. The job will be pushed to a job handler by means +// of an HTTP request via an [http_method][google.cloud.scheduler.v1.AppEngineHttpTarget.http_method] such +// as HTTP POST, HTTP GET, etc. The job is acknowledged by means of an +// HTTP response code in the range [200 - 299]. Error 503 is +// considered an App Engine system error instead of an application +// error. Requests returning error 503 will be retried regardless of +// retry configuration and not counted against retry counts. Any other +// response code, or a failure to receive a response before the +// deadline, constitutes a failed attempt. +type AppEngineHttpTarget struct { + // The HTTP method to use for the request. PATCH and OPTIONS are not + // permitted. + HttpMethod HttpMethod `protobuf:"varint,1,opt,name=http_method,json=httpMethod,proto3,enum=google.cloud.scheduler.v1.HttpMethod" json:"http_method,omitempty"` + // App Engine Routing setting for the job. + AppEngineRouting *AppEngineRouting `protobuf:"bytes,2,opt,name=app_engine_routing,json=appEngineRouting,proto3" json:"app_engine_routing,omitempty"` + // The relative URI. + // + // The relative URL must begin with "/" and must be a valid HTTP relative URL. + // It can contain a path, query string arguments, and `#` fragments. + // If the relative URL is empty, then the root path "/" will be used. + // No spaces are allowed, and the maximum length allowed is 2083 characters. + RelativeUri string `protobuf:"bytes,3,opt,name=relative_uri,json=relativeUri,proto3" json:"relative_uri,omitempty"` + // HTTP request headers. + // + // This map contains the header field names and values. Headers can be set + // when the job is created. + // + // Cloud Scheduler sets some headers to default values: + // + // * `User-Agent`: By default, this header is + // `"AppEngine-Google; (+http://code.google.com/appengine)"`. + // This header can be modified, but Cloud Scheduler will append + // `"AppEngine-Google; (+http://code.google.com/appengine)"` to the + // modified `User-Agent`. + // * `X-CloudScheduler`: This header will be set to true. + // + // If the job has an [body][google.cloud.scheduler.v1.AppEngineHttpTarget.body], Cloud Scheduler sets + // the following headers: + // + // * `Content-Type`: By default, the `Content-Type` header is set to + // `"application/octet-stream"`. The default can be overridden by explictly + // setting `Content-Type` to a particular media type when the job is + // created. + // For example, `Content-Type` can be set to `"application/json"`. + // * `Content-Length`: This is computed by Cloud Scheduler. This value is + // output only. It cannot be changed. + // + // The headers below are output only. They cannot be set or overridden: + // + // * `X-Google-*`: For Google internal use only. + // * `X-AppEngine-*`: For Google internal use only. + // + // In addition, some App Engine headers, which contain + // job-specific information, are also be sent to the job handler. + Headers map[string]string `protobuf:"bytes,4,rep,name=headers,proto3" json:"headers,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` + // Body. + // + // HTTP request body. A request body is allowed only if the HTTP method is + // POST or PUT. It will result in invalid argument error to set a body on a + // job with an incompatible [HttpMethod][google.cloud.scheduler.v1.HttpMethod]. + Body []byte `protobuf:"bytes,5,opt,name=body,proto3" json:"body,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *AppEngineHttpTarget) Reset() { *m = AppEngineHttpTarget{} } +func (m *AppEngineHttpTarget) String() string { return proto.CompactTextString(m) } +func (*AppEngineHttpTarget) ProtoMessage() {} +func (*AppEngineHttpTarget) Descriptor() ([]byte, []int) { + return fileDescriptor_target_21f966c3f7a2a854, []int{1} +} +func (m *AppEngineHttpTarget) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_AppEngineHttpTarget.Unmarshal(m, b) +} +func (m *AppEngineHttpTarget) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_AppEngineHttpTarget.Marshal(b, m, deterministic) +} +func (dst *AppEngineHttpTarget) XXX_Merge(src proto.Message) { + xxx_messageInfo_AppEngineHttpTarget.Merge(dst, src) +} +func (m *AppEngineHttpTarget) XXX_Size() int { + return xxx_messageInfo_AppEngineHttpTarget.Size(m) +} +func (m *AppEngineHttpTarget) XXX_DiscardUnknown() { + xxx_messageInfo_AppEngineHttpTarget.DiscardUnknown(m) +} + +var xxx_messageInfo_AppEngineHttpTarget proto.InternalMessageInfo + +func (m *AppEngineHttpTarget) GetHttpMethod() HttpMethod { + if m != nil { + return m.HttpMethod + } + return HttpMethod_HTTP_METHOD_UNSPECIFIED +} + +func (m *AppEngineHttpTarget) GetAppEngineRouting() *AppEngineRouting { + if m != nil { + return m.AppEngineRouting + } + return nil +} + +func (m *AppEngineHttpTarget) GetRelativeUri() string { + if m != nil { + return m.RelativeUri + } + return "" +} + +func (m *AppEngineHttpTarget) GetHeaders() map[string]string { + if m != nil { + return m.Headers + } + return nil +} + +func (m *AppEngineHttpTarget) GetBody() []byte { + if m != nil { + return m.Body + } + return nil +} + +// Pub/Sub target. The job will be delivered by publishing a message to +// the given Pub/Sub topic. +type PubsubTarget struct { + // Required. + // + // The name of the Cloud Pub/Sub topic to which messages will + // be published when a job is delivered. The topic name must be in the + // same format as required by PubSub's + // [PublishRequest.name](https://cloud.google.com/pubsub/docs/reference/rpc/google.pubsub.v1#publishrequest), + // for example `projects/PROJECT_ID/topics/TOPIC_ID`. + // + // The topic must be in the same project as the Cloud Scheduler job. + TopicName string `protobuf:"bytes,1,opt,name=topic_name,json=topicName,proto3" json:"topic_name,omitempty"` + // The message payload for PubsubMessage. + // + // Pubsub message must contain either non-empty data, or at least one + // attribute. + Data []byte `protobuf:"bytes,3,opt,name=data,proto3" json:"data,omitempty"` + // Attributes for PubsubMessage. + // + // Pubsub message must contain either non-empty data, or at least one + // attribute. + Attributes map[string]string `protobuf:"bytes,4,rep,name=attributes,proto3" json:"attributes,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *PubsubTarget) Reset() { *m = PubsubTarget{} } +func (m *PubsubTarget) String() string { return proto.CompactTextString(m) } +func (*PubsubTarget) ProtoMessage() {} +func (*PubsubTarget) Descriptor() ([]byte, []int) { + return fileDescriptor_target_21f966c3f7a2a854, []int{2} +} +func (m *PubsubTarget) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_PubsubTarget.Unmarshal(m, b) +} +func (m *PubsubTarget) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_PubsubTarget.Marshal(b, m, deterministic) +} +func (dst *PubsubTarget) XXX_Merge(src proto.Message) { + xxx_messageInfo_PubsubTarget.Merge(dst, src) +} +func (m *PubsubTarget) XXX_Size() int { + return xxx_messageInfo_PubsubTarget.Size(m) +} +func (m *PubsubTarget) XXX_DiscardUnknown() { + xxx_messageInfo_PubsubTarget.DiscardUnknown(m) +} + +var xxx_messageInfo_PubsubTarget proto.InternalMessageInfo + +func (m *PubsubTarget) GetTopicName() string { + if m != nil { + return m.TopicName + } + return "" +} + +func (m *PubsubTarget) GetData() []byte { + if m != nil { + return m.Data + } + return nil +} + +func (m *PubsubTarget) GetAttributes() map[string]string { + if m != nil { + return m.Attributes + } + return nil +} + +// App Engine Routing. +// +// For more information about services, versions, and instances see +// [An Overview of App +// Engine](https://cloud.google.com/appengine/docs/python/an-overview-of-app-engine), +// [Microservices Architecture on Google App +// Engine](https://cloud.google.com/appengine/docs/python/microservices-on-app-engine), +// [App Engine Standard request +// routing](https://cloud.google.com/appengine/docs/standard/python/how-requests-are-routed), +// and [App Engine Flex request +// routing](https://cloud.google.com/appengine/docs/flexible/python/how-requests-are-routed). +type AppEngineRouting struct { + // App service. + // + // By default, the job is sent to the service which is the default + // service when the job is attempted. + Service string `protobuf:"bytes,1,opt,name=service,proto3" json:"service,omitempty"` + // App version. + // + // By default, the job is sent to the version which is the default + // version when the job is attempted. + Version string `protobuf:"bytes,2,opt,name=version,proto3" json:"version,omitempty"` + // App instance. + // + // By default, the job is sent to an instance which is available when + // the job is attempted. + // + // Requests can only be sent to a specific instance if + // [manual scaling is used in App Engine + // Standard](https://cloud.google.com/appengine/docs/python/an-overview-of-app-engine?hl=en_US#scaling_types_and_instance_classes). + // App Engine Flex does not support instances. For more information, see + // [App Engine Standard request + // routing](https://cloud.google.com/appengine/docs/standard/python/how-requests-are-routed) + // and [App Engine Flex request + // routing](https://cloud.google.com/appengine/docs/flexible/python/how-requests-are-routed). + Instance string `protobuf:"bytes,3,opt,name=instance,proto3" json:"instance,omitempty"` + // Output only. The host that the job is sent to. + // + // For more information about how App Engine requests are routed, see + // [here](https://cloud.google.com/appengine/docs/standard/python/how-requests-are-routed). + // + // The host is constructed as: + // + // + // * `host = [application_domain_name]`
+ // `| [service] + '.' + [application_domain_name]`
+ // `| [version] + '.' + [application_domain_name]`
+ // `| [version_dot_service]+ '.' + [application_domain_name]`
+ // `| [instance] + '.' + [application_domain_name]`
+ // `| [instance_dot_service] + '.' + [application_domain_name]`
+ // `| [instance_dot_version] + '.' + [application_domain_name]`
+ // `| [instance_dot_version_dot_service] + '.' + [application_domain_name]` + // + // * `application_domain_name` = The domain name of the app, for + // example .appspot.com, which is associated with the + // job's project ID. + // + // * `service =` [service][google.cloud.scheduler.v1.AppEngineRouting.service] + // + // * `version =` [version][google.cloud.scheduler.v1.AppEngineRouting.version] + // + // * `version_dot_service =` + // [version][google.cloud.scheduler.v1.AppEngineRouting.version] `+ '.' +` + // [service][google.cloud.scheduler.v1.AppEngineRouting.service] + // + // * `instance =` [instance][google.cloud.scheduler.v1.AppEngineRouting.instance] + // + // * `instance_dot_service =` + // [instance][google.cloud.scheduler.v1.AppEngineRouting.instance] `+ '.' +` + // [service][google.cloud.scheduler.v1.AppEngineRouting.service] + // + // * `instance_dot_version =` + // [instance][google.cloud.scheduler.v1.AppEngineRouting.instance] `+ '.' +` + // [version][google.cloud.scheduler.v1.AppEngineRouting.version] + // + // * `instance_dot_version_dot_service =` + // [instance][google.cloud.scheduler.v1.AppEngineRouting.instance] `+ '.' +` + // [version][google.cloud.scheduler.v1.AppEngineRouting.version] `+ '.' +` + // [service][google.cloud.scheduler.v1.AppEngineRouting.service] + // + // + // If [service][google.cloud.scheduler.v1.AppEngineRouting.service] is empty, then the job will be sent + // to the service which is the default service when the job is attempted. + // + // If [version][google.cloud.scheduler.v1.AppEngineRouting.version] is empty, then the job will be sent + // to the version which is the default version when the job is attempted. + // + // If [instance][google.cloud.scheduler.v1.AppEngineRouting.instance] is empty, then the job will be + // sent to an instance which is available when the job is attempted. + // + // If [service][google.cloud.scheduler.v1.AppEngineRouting.service], + // [version][google.cloud.scheduler.v1.AppEngineRouting.version], or + // [instance][google.cloud.scheduler.v1.AppEngineRouting.instance] is invalid, then the job will be sent + // to the default version of the default service when the job is attempted. + Host string `protobuf:"bytes,4,opt,name=host,proto3" json:"host,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *AppEngineRouting) Reset() { *m = AppEngineRouting{} } +func (m *AppEngineRouting) String() string { return proto.CompactTextString(m) } +func (*AppEngineRouting) ProtoMessage() {} +func (*AppEngineRouting) Descriptor() ([]byte, []int) { + return fileDescriptor_target_21f966c3f7a2a854, []int{3} +} +func (m *AppEngineRouting) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_AppEngineRouting.Unmarshal(m, b) +} +func (m *AppEngineRouting) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_AppEngineRouting.Marshal(b, m, deterministic) +} +func (dst *AppEngineRouting) XXX_Merge(src proto.Message) { + xxx_messageInfo_AppEngineRouting.Merge(dst, src) +} +func (m *AppEngineRouting) XXX_Size() int { + return xxx_messageInfo_AppEngineRouting.Size(m) +} +func (m *AppEngineRouting) XXX_DiscardUnknown() { + xxx_messageInfo_AppEngineRouting.DiscardUnknown(m) +} + +var xxx_messageInfo_AppEngineRouting proto.InternalMessageInfo + +func (m *AppEngineRouting) GetService() string { + if m != nil { + return m.Service + } + return "" +} + +func (m *AppEngineRouting) GetVersion() string { + if m != nil { + return m.Version + } + return "" +} + +func (m *AppEngineRouting) GetInstance() string { + if m != nil { + return m.Instance + } + return "" +} + +func (m *AppEngineRouting) GetHost() string { + if m != nil { + return m.Host + } + return "" +} + +// Contains information needed for generating an +// [OAuth token](https://developers.google.com/identity/protocols/OAuth2). +// This type of authorization should be used when sending requests to a GCP +// endpoint. +type OAuthToken struct { + // [Service account email](https://cloud.google.com/iam/docs/service-accounts) + // to be used for generating OAuth token. + // The service account must be within the same project as the job. The caller + // must have iam.serviceAccounts.actAs permission for the service account. + ServiceAccountEmail string `protobuf:"bytes,1,opt,name=service_account_email,json=serviceAccountEmail,proto3" json:"service_account_email,omitempty"` + // OAuth scope to be used for generating OAuth access token. + // If not specified, "https://www.googleapis.com/auth/cloud-platform" + // will be used. + Scope string `protobuf:"bytes,2,opt,name=scope,proto3" json:"scope,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *OAuthToken) Reset() { *m = OAuthToken{} } +func (m *OAuthToken) String() string { return proto.CompactTextString(m) } +func (*OAuthToken) ProtoMessage() {} +func (*OAuthToken) Descriptor() ([]byte, []int) { + return fileDescriptor_target_21f966c3f7a2a854, []int{4} +} +func (m *OAuthToken) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_OAuthToken.Unmarshal(m, b) +} +func (m *OAuthToken) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_OAuthToken.Marshal(b, m, deterministic) +} +func (dst *OAuthToken) XXX_Merge(src proto.Message) { + xxx_messageInfo_OAuthToken.Merge(dst, src) +} +func (m *OAuthToken) XXX_Size() int { + return xxx_messageInfo_OAuthToken.Size(m) +} +func (m *OAuthToken) XXX_DiscardUnknown() { + xxx_messageInfo_OAuthToken.DiscardUnknown(m) +} + +var xxx_messageInfo_OAuthToken proto.InternalMessageInfo + +func (m *OAuthToken) GetServiceAccountEmail() string { + if m != nil { + return m.ServiceAccountEmail + } + return "" +} + +func (m *OAuthToken) GetScope() string { + if m != nil { + return m.Scope + } + return "" +} + +// Contains information needed for generating an +// [OpenID Connect +// token](https://developers.google.com/identity/protocols/OpenIDConnect). This +// type of authorization should be used when sending requests to third party +// endpoints or Cloud Run. +type OidcToken struct { + // [Service account email](https://cloud.google.com/iam/docs/service-accounts) + // to be used for generating OIDC token. + // The service account must be within the same project as the job. The caller + // must have iam.serviceAccounts.actAs permission for the service account. + ServiceAccountEmail string `protobuf:"bytes,1,opt,name=service_account_email,json=serviceAccountEmail,proto3" json:"service_account_email,omitempty"` + // Audience to be used when generating OIDC token. If not specified, the URI + // specified in target will be used. + Audience string `protobuf:"bytes,2,opt,name=audience,proto3" json:"audience,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *OidcToken) Reset() { *m = OidcToken{} } +func (m *OidcToken) String() string { return proto.CompactTextString(m) } +func (*OidcToken) ProtoMessage() {} +func (*OidcToken) Descriptor() ([]byte, []int) { + return fileDescriptor_target_21f966c3f7a2a854, []int{5} +} +func (m *OidcToken) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_OidcToken.Unmarshal(m, b) +} +func (m *OidcToken) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_OidcToken.Marshal(b, m, deterministic) +} +func (dst *OidcToken) XXX_Merge(src proto.Message) { + xxx_messageInfo_OidcToken.Merge(dst, src) +} +func (m *OidcToken) XXX_Size() int { + return xxx_messageInfo_OidcToken.Size(m) +} +func (m *OidcToken) XXX_DiscardUnknown() { + xxx_messageInfo_OidcToken.DiscardUnknown(m) +} + +var xxx_messageInfo_OidcToken proto.InternalMessageInfo + +func (m *OidcToken) GetServiceAccountEmail() string { + if m != nil { + return m.ServiceAccountEmail + } + return "" +} + +func (m *OidcToken) GetAudience() string { + if m != nil { + return m.Audience + } + return "" +} + +func init() { + proto.RegisterType((*HttpTarget)(nil), "google.cloud.scheduler.v1.HttpTarget") + proto.RegisterMapType((map[string]string)(nil), "google.cloud.scheduler.v1.HttpTarget.HeadersEntry") + proto.RegisterType((*AppEngineHttpTarget)(nil), "google.cloud.scheduler.v1.AppEngineHttpTarget") + proto.RegisterMapType((map[string]string)(nil), "google.cloud.scheduler.v1.AppEngineHttpTarget.HeadersEntry") + proto.RegisterType((*PubsubTarget)(nil), "google.cloud.scheduler.v1.PubsubTarget") + proto.RegisterMapType((map[string]string)(nil), "google.cloud.scheduler.v1.PubsubTarget.AttributesEntry") + proto.RegisterType((*AppEngineRouting)(nil), "google.cloud.scheduler.v1.AppEngineRouting") + proto.RegisterType((*OAuthToken)(nil), "google.cloud.scheduler.v1.OAuthToken") + proto.RegisterType((*OidcToken)(nil), "google.cloud.scheduler.v1.OidcToken") + proto.RegisterEnum("google.cloud.scheduler.v1.HttpMethod", HttpMethod_name, HttpMethod_value) +} + +func init() { + proto.RegisterFile("google/cloud/scheduler/v1/target.proto", fileDescriptor_target_21f966c3f7a2a854) +} + +var fileDescriptor_target_21f966c3f7a2a854 = []byte{ + // 750 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xa4, 0x55, 0xdd, 0x6e, 0xeb, 0x44, + 0x10, 0x3e, 0xae, 0xf3, 0xd3, 0x8c, 0x23, 0xb0, 0xf6, 0x1c, 0x20, 0x04, 0x8e, 0x14, 0x22, 0x40, + 0x11, 0x48, 0xb6, 0x4e, 0xb8, 0x00, 0x9d, 0x23, 0x2e, 0x92, 0xc6, 0xc5, 0x95, 0xda, 0xc6, 0x72, + 0x1d, 0x10, 0x70, 0x61, 0x6d, 0xec, 0xc5, 0x5e, 0x35, 0xd9, 0xb5, 0xec, 0x75, 0xa4, 0x70, 0xc9, + 0xfb, 0xf0, 0x40, 0xbc, 0x0d, 0xf2, 0xfa, 0xa7, 0x51, 0x44, 0x43, 0x29, 0x77, 0xfb, 0xed, 0xcc, + 0x7c, 0x33, 0xdf, 0xcc, 0xac, 0x0d, 0x5f, 0x46, 0x9c, 0x47, 0x1b, 0x62, 0x06, 0x1b, 0x9e, 0x87, + 0x66, 0x16, 0xc4, 0x24, 0xcc, 0x37, 0x24, 0x35, 0x77, 0x6f, 0x4c, 0x81, 0xd3, 0x88, 0x08, 0x23, + 0x49, 0xb9, 0xe0, 0xe8, 0xe3, 0xd2, 0xcf, 0x90, 0x7e, 0x46, 0xe3, 0x67, 0xec, 0xde, 0x0c, 0x3f, + 0xad, 0x28, 0x70, 0x42, 0x4d, 0xcc, 0x18, 0x17, 0x58, 0x50, 0xce, 0xb2, 0x32, 0x70, 0x58, 0x05, + 0x9a, 0x12, 0xad, 0xf3, 0xdf, 0x4c, 0xcc, 0xf6, 0xa5, 0x69, 0xfc, 0xa7, 0x0a, 0x60, 0x0b, 0x91, + 0x78, 0x32, 0x11, 0xd2, 0x41, 0xcd, 0x53, 0x3a, 0x50, 0x46, 0xca, 0xa4, 0xe7, 0x16, 0x47, 0x74, + 0x09, 0x5a, 0x2c, 0x44, 0xe2, 0x6f, 0x89, 0x88, 0x79, 0x38, 0x38, 0x1b, 0x29, 0x93, 0xf7, 0xa6, + 0x5f, 0x18, 0x8f, 0x96, 0x62, 0x14, 0x6c, 0x37, 0xd2, 0xd9, 0x85, 0xb8, 0x39, 0xa3, 0x6b, 0xe8, + 0xc6, 0x04, 0x87, 0x24, 0xcd, 0x06, 0xea, 0x48, 0x9d, 0x68, 0xd3, 0xe9, 0xbf, 0x70, 0x94, 0x15, + 0x19, 0x76, 0x19, 0x64, 0x31, 0x91, 0xee, 0xdd, 0x9a, 0x02, 0x21, 0x68, 0xad, 0x79, 0xb8, 0x1f, + 0xb4, 0x46, 0xca, 0xa4, 0xef, 0xca, 0x33, 0xb2, 0x41, 0xe3, 0x38, 0x17, 0xb1, 0x2f, 0xf8, 0x3d, + 0x61, 0x83, 0xf6, 0x48, 0x99, 0x68, 0x27, 0x2b, 0x5d, 0xce, 0x72, 0x11, 0x7b, 0x85, 0xb3, 0xfd, + 0xc2, 0x05, 0x19, 0x2b, 0x11, 0xb2, 0x00, 0x38, 0x0d, 0x83, 0x8a, 0xa8, 0x23, 0x89, 0x3e, 0x3f, + 0x45, 0x44, 0xc3, 0xa0, 0xe6, 0xe9, 0xf1, 0x1a, 0x0c, 0xdf, 0x42, 0xff, 0xb0, 0xfa, 0xa2, 0xb9, + 0xf7, 0x64, 0x5f, 0x37, 0xf7, 0x9e, 0xec, 0xd1, 0x2b, 0x68, 0xef, 0xf0, 0x26, 0x27, 0xb2, 0xad, + 0x3d, 0xb7, 0x04, 0x6f, 0xcf, 0xbe, 0x53, 0xe6, 0x1f, 0xc2, 0xab, 0xa2, 0x1e, 0x9e, 0xd2, 0xdf, + 0xe5, 0x28, 0xfd, 0x52, 0xf9, 0xf8, 0x0f, 0x15, 0x5e, 0xce, 0x92, 0xc4, 0x62, 0x11, 0x65, 0xe4, + 0x60, 0x70, 0x47, 0x63, 0x52, 0x9e, 0x3b, 0xa6, 0x9f, 0x01, 0xe1, 0x24, 0xf1, 0x89, 0xe4, 0xf7, + 0x53, 0x9e, 0x0b, 0xca, 0x22, 0x59, 0x9e, 0x36, 0xfd, 0xfa, 0x04, 0x5d, 0x53, 0x93, 0x5b, 0x86, + 0xb8, 0x3a, 0x3e, 0xba, 0x41, 0x9f, 0x41, 0x3f, 0x25, 0x1b, 0x2c, 0xe8, 0x8e, 0xf8, 0xc5, 0x92, + 0xa9, 0x52, 0xb3, 0x56, 0xdf, 0xad, 0x52, 0x8a, 0x56, 0x0f, 0x4b, 0xd2, 0x92, 0x4b, 0xf2, 0xee, + 0x29, 0x29, 0x9f, 0xbc, 0x2d, 0xed, 0x87, 0x6d, 0xf9, 0x3f, 0xc3, 0x19, 0xff, 0xa5, 0x40, 0xdf, + 0xc9, 0xd7, 0x59, 0xbe, 0xae, 0xba, 0xff, 0x1a, 0x40, 0xf0, 0x84, 0x06, 0x3e, 0xc3, 0x5b, 0x52, + 0x71, 0xf4, 0xe4, 0xcd, 0x2d, 0xde, 0x92, 0x22, 0x7f, 0x88, 0x05, 0x96, 0x8a, 0xfb, 0xae, 0x3c, + 0xa3, 0x9f, 0x00, 0xb0, 0x10, 0x29, 0x5d, 0xe7, 0x82, 0xd4, 0x6a, 0xbf, 0x3d, 0xa1, 0xf6, 0x30, + 0x9f, 0x31, 0x6b, 0x22, 0x4b, 0xa5, 0x07, 0x54, 0xc3, 0xef, 0xe1, 0xfd, 0x23, 0xf3, 0x7f, 0xd2, + 0xb6, 0x03, 0xfd, 0x78, 0x96, 0x68, 0x00, 0xdd, 0x8c, 0xa4, 0x3b, 0x1a, 0xd4, 0xda, 0x6a, 0x58, + 0x58, 0x76, 0x24, 0xcd, 0x28, 0x67, 0x15, 0x53, 0x0d, 0xd1, 0x10, 0xce, 0x29, 0xcb, 0x04, 0x66, + 0x01, 0xa9, 0x26, 0xdd, 0xe0, 0xa2, 0x1f, 0x31, 0xcf, 0x84, 0x7c, 0xbd, 0x3d, 0x57, 0x9e, 0xc7, + 0x3f, 0x02, 0x3c, 0xbc, 0x47, 0x34, 0x85, 0x0f, 0xaa, 0x14, 0x3e, 0x0e, 0x02, 0x9e, 0x33, 0xe1, + 0x93, 0x2d, 0xa6, 0x9b, 0x2a, 0xff, 0xcb, 0xca, 0x38, 0x2b, 0x6d, 0x56, 0x61, 0x2a, 0x34, 0x65, + 0x01, 0x4f, 0x1a, 0x4d, 0x12, 0x8c, 0x7f, 0x85, 0x5e, 0xf3, 0x3c, 0x9f, 0x45, 0x3b, 0x84, 0x73, + 0x9c, 0x87, 0x94, 0x14, 0x42, 0x4a, 0xe6, 0x06, 0x7f, 0x95, 0x95, 0x1f, 0xcf, 0xea, 0xed, 0x7c, + 0x02, 0x1f, 0xd9, 0x9e, 0xe7, 0xf8, 0x37, 0x96, 0x67, 0x2f, 0x17, 0xfe, 0xea, 0xf6, 0xce, 0xb1, + 0x2e, 0xae, 0x2e, 0xaf, 0xac, 0x85, 0xfe, 0x02, 0x9d, 0x43, 0xcb, 0x59, 0xde, 0x79, 0xba, 0x82, + 0xba, 0xa0, 0xfe, 0x60, 0x79, 0xfa, 0x59, 0x71, 0x65, 0x5b, 0xb3, 0x85, 0xae, 0x16, 0x57, 0xce, + 0xca, 0xd3, 0x5b, 0x08, 0xa0, 0xb3, 0xb0, 0xae, 0x2d, 0xcf, 0xd2, 0xdb, 0xa8, 0x07, 0x6d, 0x67, + 0xe6, 0x5d, 0xd8, 0x7a, 0x07, 0x69, 0xd0, 0x5d, 0x3a, 0xde, 0xd5, 0xf2, 0xf6, 0x4e, 0xef, 0xce, + 0x53, 0x78, 0x1d, 0xf0, 0xed, 0xe3, 0xab, 0x32, 0xd7, 0xca, 0x2d, 0x71, 0x8a, 0x0f, 0xbc, 0xa3, + 0xfc, 0x32, 0xaf, 0x3c, 0x23, 0xbe, 0xc1, 0x2c, 0x32, 0x78, 0x1a, 0x99, 0x11, 0x61, 0xf2, 0xf3, + 0x6f, 0x96, 0x26, 0x9c, 0xd0, 0xec, 0x1f, 0xfe, 0x3e, 0xef, 0x1a, 0xb0, 0xee, 0x48, 0xf7, 0x6f, + 0xfe, 0x0e, 0x00, 0x00, 0xff, 0xff, 0x7a, 0x4a, 0x84, 0x17, 0xab, 0x06, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/cloud/scheduler/v1beta1/cloudscheduler.pb.go b/vendor/google.golang.org/genproto/googleapis/cloud/scheduler/v1beta1/cloudscheduler.pb.go new file mode 100644 index 0000000..8ee5ef9 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/cloud/scheduler/v1beta1/cloudscheduler.pb.go @@ -0,0 +1,912 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/cloud/scheduler/v1beta1/cloudscheduler.proto + +package scheduler // import "google.golang.org/genproto/googleapis/cloud/scheduler/v1beta1" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import empty "github.com/golang/protobuf/ptypes/empty" +import _ "google.golang.org/genproto/googleapis/api/annotations" +import field_mask "google.golang.org/genproto/protobuf/field_mask" + +import ( + context "golang.org/x/net/context" + grpc "google.golang.org/grpc" +) + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Request message for listing jobs using [ListJobs][google.cloud.scheduler.v1beta1.CloudScheduler.ListJobs]. +type ListJobsRequest struct { + // Required. + // + // The location name. For example: + // `projects/PROJECT_ID/locations/LOCATION_ID`. + Parent string `protobuf:"bytes,1,opt,name=parent,proto3" json:"parent,omitempty"` + // Requested page size. + // + // The maximum page size is 500. If unspecified, the page size will + // be the maximum. Fewer jobs than requested might be returned, + // even if more jobs exist; use next_page_token to determine if more + // jobs exist. + PageSize int32 `protobuf:"varint,5,opt,name=page_size,json=pageSize,proto3" json:"page_size,omitempty"` + // A token identifying a page of results the server will return. To + // request the first page results, page_token must be empty. To + // request the next page of results, page_token must be the value of + // [next_page_token][google.cloud.scheduler.v1beta1.ListJobsResponse.next_page_token] returned from + // the previous call to [ListJobs][google.cloud.scheduler.v1beta1.CloudScheduler.ListJobs]. It is an error to + // switch the value of [filter][google.cloud.scheduler.v1beta1.ListJobsRequest.filter] or + // [order_by][google.cloud.scheduler.v1beta1.ListJobsRequest.order_by] while iterating through pages. + PageToken string `protobuf:"bytes,6,opt,name=page_token,json=pageToken,proto3" json:"page_token,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ListJobsRequest) Reset() { *m = ListJobsRequest{} } +func (m *ListJobsRequest) String() string { return proto.CompactTextString(m) } +func (*ListJobsRequest) ProtoMessage() {} +func (*ListJobsRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_cloudscheduler_9fba0107986f2580, []int{0} +} +func (m *ListJobsRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ListJobsRequest.Unmarshal(m, b) +} +func (m *ListJobsRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ListJobsRequest.Marshal(b, m, deterministic) +} +func (dst *ListJobsRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_ListJobsRequest.Merge(dst, src) +} +func (m *ListJobsRequest) XXX_Size() int { + return xxx_messageInfo_ListJobsRequest.Size(m) +} +func (m *ListJobsRequest) XXX_DiscardUnknown() { + xxx_messageInfo_ListJobsRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_ListJobsRequest proto.InternalMessageInfo + +func (m *ListJobsRequest) GetParent() string { + if m != nil { + return m.Parent + } + return "" +} + +func (m *ListJobsRequest) GetPageSize() int32 { + if m != nil { + return m.PageSize + } + return 0 +} + +func (m *ListJobsRequest) GetPageToken() string { + if m != nil { + return m.PageToken + } + return "" +} + +// Response message for listing jobs using [ListJobs][google.cloud.scheduler.v1beta1.CloudScheduler.ListJobs]. +type ListJobsResponse struct { + // The list of jobs. + Jobs []*Job `protobuf:"bytes,1,rep,name=jobs,proto3" json:"jobs,omitempty"` + // A token to retrieve next page of results. Pass this value in the + // [page_token][google.cloud.scheduler.v1beta1.ListJobsRequest.page_token] field in the subsequent call to + // [ListJobs][google.cloud.scheduler.v1beta1.CloudScheduler.ListJobs] to retrieve the next page of results. + // If this is empty it indicates that there are no more results + // through which to paginate. + // + // The page token is valid for only 2 hours. + NextPageToken string `protobuf:"bytes,2,opt,name=next_page_token,json=nextPageToken,proto3" json:"next_page_token,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ListJobsResponse) Reset() { *m = ListJobsResponse{} } +func (m *ListJobsResponse) String() string { return proto.CompactTextString(m) } +func (*ListJobsResponse) ProtoMessage() {} +func (*ListJobsResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_cloudscheduler_9fba0107986f2580, []int{1} +} +func (m *ListJobsResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ListJobsResponse.Unmarshal(m, b) +} +func (m *ListJobsResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ListJobsResponse.Marshal(b, m, deterministic) +} +func (dst *ListJobsResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_ListJobsResponse.Merge(dst, src) +} +func (m *ListJobsResponse) XXX_Size() int { + return xxx_messageInfo_ListJobsResponse.Size(m) +} +func (m *ListJobsResponse) XXX_DiscardUnknown() { + xxx_messageInfo_ListJobsResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_ListJobsResponse proto.InternalMessageInfo + +func (m *ListJobsResponse) GetJobs() []*Job { + if m != nil { + return m.Jobs + } + return nil +} + +func (m *ListJobsResponse) GetNextPageToken() string { + if m != nil { + return m.NextPageToken + } + return "" +} + +// Request message for [GetJob][google.cloud.scheduler.v1beta1.CloudScheduler.GetJob]. +type GetJobRequest struct { + // Required. + // + // The job name. For example: + // `projects/PROJECT_ID/locations/LOCATION_ID/jobs/JOB_ID`. + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GetJobRequest) Reset() { *m = GetJobRequest{} } +func (m *GetJobRequest) String() string { return proto.CompactTextString(m) } +func (*GetJobRequest) ProtoMessage() {} +func (*GetJobRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_cloudscheduler_9fba0107986f2580, []int{2} +} +func (m *GetJobRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GetJobRequest.Unmarshal(m, b) +} +func (m *GetJobRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GetJobRequest.Marshal(b, m, deterministic) +} +func (dst *GetJobRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_GetJobRequest.Merge(dst, src) +} +func (m *GetJobRequest) XXX_Size() int { + return xxx_messageInfo_GetJobRequest.Size(m) +} +func (m *GetJobRequest) XXX_DiscardUnknown() { + xxx_messageInfo_GetJobRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_GetJobRequest proto.InternalMessageInfo + +func (m *GetJobRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +// Request message for [CreateJob][google.cloud.scheduler.v1beta1.CloudScheduler.CreateJob]. +type CreateJobRequest struct { + // Required. + // + // The location name. For example: + // `projects/PROJECT_ID/locations/LOCATION_ID`. + Parent string `protobuf:"bytes,1,opt,name=parent,proto3" json:"parent,omitempty"` + // Required. + // + // The job to add. The user can optionally specify a name for the + // job in [name][google.cloud.scheduler.v1beta1.Job.name]. [name][google.cloud.scheduler.v1beta1.Job.name] cannot be the same as an + // existing job. If a name is not specified then the system will + // generate a random unique name that will be returned + // ([name][google.cloud.scheduler.v1beta1.Job.name]) in the response. + Job *Job `protobuf:"bytes,2,opt,name=job,proto3" json:"job,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *CreateJobRequest) Reset() { *m = CreateJobRequest{} } +func (m *CreateJobRequest) String() string { return proto.CompactTextString(m) } +func (*CreateJobRequest) ProtoMessage() {} +func (*CreateJobRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_cloudscheduler_9fba0107986f2580, []int{3} +} +func (m *CreateJobRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_CreateJobRequest.Unmarshal(m, b) +} +func (m *CreateJobRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_CreateJobRequest.Marshal(b, m, deterministic) +} +func (dst *CreateJobRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_CreateJobRequest.Merge(dst, src) +} +func (m *CreateJobRequest) XXX_Size() int { + return xxx_messageInfo_CreateJobRequest.Size(m) +} +func (m *CreateJobRequest) XXX_DiscardUnknown() { + xxx_messageInfo_CreateJobRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_CreateJobRequest proto.InternalMessageInfo + +func (m *CreateJobRequest) GetParent() string { + if m != nil { + return m.Parent + } + return "" +} + +func (m *CreateJobRequest) GetJob() *Job { + if m != nil { + return m.Job + } + return nil +} + +// Request message for [UpdateJob][google.cloud.scheduler.v1beta1.CloudScheduler.UpdateJob]. +type UpdateJobRequest struct { + // Required. + // + // The new job properties. [name][google.cloud.scheduler.v1beta1.Job.name] must be specified. + // + // Output only fields cannot be modified using UpdateJob. + // Any value specified for an output only field will be ignored. + Job *Job `protobuf:"bytes,1,opt,name=job,proto3" json:"job,omitempty"` + // A mask used to specify which fields of the job are being updated. + UpdateMask *field_mask.FieldMask `protobuf:"bytes,2,opt,name=update_mask,json=updateMask,proto3" json:"update_mask,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *UpdateJobRequest) Reset() { *m = UpdateJobRequest{} } +func (m *UpdateJobRequest) String() string { return proto.CompactTextString(m) } +func (*UpdateJobRequest) ProtoMessage() {} +func (*UpdateJobRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_cloudscheduler_9fba0107986f2580, []int{4} +} +func (m *UpdateJobRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_UpdateJobRequest.Unmarshal(m, b) +} +func (m *UpdateJobRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_UpdateJobRequest.Marshal(b, m, deterministic) +} +func (dst *UpdateJobRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_UpdateJobRequest.Merge(dst, src) +} +func (m *UpdateJobRequest) XXX_Size() int { + return xxx_messageInfo_UpdateJobRequest.Size(m) +} +func (m *UpdateJobRequest) XXX_DiscardUnknown() { + xxx_messageInfo_UpdateJobRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_UpdateJobRequest proto.InternalMessageInfo + +func (m *UpdateJobRequest) GetJob() *Job { + if m != nil { + return m.Job + } + return nil +} + +func (m *UpdateJobRequest) GetUpdateMask() *field_mask.FieldMask { + if m != nil { + return m.UpdateMask + } + return nil +} + +// Request message for deleting a job using +// [DeleteJob][google.cloud.scheduler.v1beta1.CloudScheduler.DeleteJob]. +type DeleteJobRequest struct { + // Required. + // + // The job name. For example: + // `projects/PROJECT_ID/locations/LOCATION_ID/jobs/JOB_ID`. + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *DeleteJobRequest) Reset() { *m = DeleteJobRequest{} } +func (m *DeleteJobRequest) String() string { return proto.CompactTextString(m) } +func (*DeleteJobRequest) ProtoMessage() {} +func (*DeleteJobRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_cloudscheduler_9fba0107986f2580, []int{5} +} +func (m *DeleteJobRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_DeleteJobRequest.Unmarshal(m, b) +} +func (m *DeleteJobRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_DeleteJobRequest.Marshal(b, m, deterministic) +} +func (dst *DeleteJobRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_DeleteJobRequest.Merge(dst, src) +} +func (m *DeleteJobRequest) XXX_Size() int { + return xxx_messageInfo_DeleteJobRequest.Size(m) +} +func (m *DeleteJobRequest) XXX_DiscardUnknown() { + xxx_messageInfo_DeleteJobRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_DeleteJobRequest proto.InternalMessageInfo + +func (m *DeleteJobRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +// Request message for [PauseJob][google.cloud.scheduler.v1beta1.CloudScheduler.PauseJob]. +type PauseJobRequest struct { + // Required. + // + // The job name. For example: + // `projects/PROJECT_ID/locations/LOCATION_ID/jobs/JOB_ID`. + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *PauseJobRequest) Reset() { *m = PauseJobRequest{} } +func (m *PauseJobRequest) String() string { return proto.CompactTextString(m) } +func (*PauseJobRequest) ProtoMessage() {} +func (*PauseJobRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_cloudscheduler_9fba0107986f2580, []int{6} +} +func (m *PauseJobRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_PauseJobRequest.Unmarshal(m, b) +} +func (m *PauseJobRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_PauseJobRequest.Marshal(b, m, deterministic) +} +func (dst *PauseJobRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_PauseJobRequest.Merge(dst, src) +} +func (m *PauseJobRequest) XXX_Size() int { + return xxx_messageInfo_PauseJobRequest.Size(m) +} +func (m *PauseJobRequest) XXX_DiscardUnknown() { + xxx_messageInfo_PauseJobRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_PauseJobRequest proto.InternalMessageInfo + +func (m *PauseJobRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +// Request message for [ResumeJob][google.cloud.scheduler.v1beta1.CloudScheduler.ResumeJob]. +type ResumeJobRequest struct { + // Required. + // + // The job name. For example: + // `projects/PROJECT_ID/locations/LOCATION_ID/jobs/JOB_ID`. + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ResumeJobRequest) Reset() { *m = ResumeJobRequest{} } +func (m *ResumeJobRequest) String() string { return proto.CompactTextString(m) } +func (*ResumeJobRequest) ProtoMessage() {} +func (*ResumeJobRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_cloudscheduler_9fba0107986f2580, []int{7} +} +func (m *ResumeJobRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ResumeJobRequest.Unmarshal(m, b) +} +func (m *ResumeJobRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ResumeJobRequest.Marshal(b, m, deterministic) +} +func (dst *ResumeJobRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_ResumeJobRequest.Merge(dst, src) +} +func (m *ResumeJobRequest) XXX_Size() int { + return xxx_messageInfo_ResumeJobRequest.Size(m) +} +func (m *ResumeJobRequest) XXX_DiscardUnknown() { + xxx_messageInfo_ResumeJobRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_ResumeJobRequest proto.InternalMessageInfo + +func (m *ResumeJobRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +// Request message for forcing a job to run now using +// [RunJob][google.cloud.scheduler.v1beta1.CloudScheduler.RunJob]. +type RunJobRequest struct { + // Required. + // + // The job name. For example: + // `projects/PROJECT_ID/locations/LOCATION_ID/jobs/JOB_ID`. + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *RunJobRequest) Reset() { *m = RunJobRequest{} } +func (m *RunJobRequest) String() string { return proto.CompactTextString(m) } +func (*RunJobRequest) ProtoMessage() {} +func (*RunJobRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_cloudscheduler_9fba0107986f2580, []int{8} +} +func (m *RunJobRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_RunJobRequest.Unmarshal(m, b) +} +func (m *RunJobRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_RunJobRequest.Marshal(b, m, deterministic) +} +func (dst *RunJobRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_RunJobRequest.Merge(dst, src) +} +func (m *RunJobRequest) XXX_Size() int { + return xxx_messageInfo_RunJobRequest.Size(m) +} +func (m *RunJobRequest) XXX_DiscardUnknown() { + xxx_messageInfo_RunJobRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_RunJobRequest proto.InternalMessageInfo + +func (m *RunJobRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func init() { + proto.RegisterType((*ListJobsRequest)(nil), "google.cloud.scheduler.v1beta1.ListJobsRequest") + proto.RegisterType((*ListJobsResponse)(nil), "google.cloud.scheduler.v1beta1.ListJobsResponse") + proto.RegisterType((*GetJobRequest)(nil), "google.cloud.scheduler.v1beta1.GetJobRequest") + proto.RegisterType((*CreateJobRequest)(nil), "google.cloud.scheduler.v1beta1.CreateJobRequest") + proto.RegisterType((*UpdateJobRequest)(nil), "google.cloud.scheduler.v1beta1.UpdateJobRequest") + proto.RegisterType((*DeleteJobRequest)(nil), "google.cloud.scheduler.v1beta1.DeleteJobRequest") + proto.RegisterType((*PauseJobRequest)(nil), "google.cloud.scheduler.v1beta1.PauseJobRequest") + proto.RegisterType((*ResumeJobRequest)(nil), "google.cloud.scheduler.v1beta1.ResumeJobRequest") + proto.RegisterType((*RunJobRequest)(nil), "google.cloud.scheduler.v1beta1.RunJobRequest") +} + +// Reference imports to suppress errors if they are not otherwise used. +var _ context.Context +var _ grpc.ClientConn + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the grpc package it is being compiled against. +const _ = grpc.SupportPackageIsVersion4 + +// CloudSchedulerClient is the client API for CloudScheduler service. +// +// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://godoc.org/google.golang.org/grpc#ClientConn.NewStream. +type CloudSchedulerClient interface { + // Lists jobs. + ListJobs(ctx context.Context, in *ListJobsRequest, opts ...grpc.CallOption) (*ListJobsResponse, error) + // Gets a job. + GetJob(ctx context.Context, in *GetJobRequest, opts ...grpc.CallOption) (*Job, error) + // Creates a job. + CreateJob(ctx context.Context, in *CreateJobRequest, opts ...grpc.CallOption) (*Job, error) + // Updates a job. + // + // If successful, the updated [Job][google.cloud.scheduler.v1beta1.Job] is returned. If the job does + // not exist, `NOT_FOUND` is returned. + // + // If UpdateJob does not successfully return, it is possible for the + // job to be in an [Job.State.UPDATE_FAILED][google.cloud.scheduler.v1beta1.Job.State.UPDATE_FAILED] state. A job in this state may + // not be executed. If this happens, retry the UpdateJob request + // until a successful response is received. + UpdateJob(ctx context.Context, in *UpdateJobRequest, opts ...grpc.CallOption) (*Job, error) + // Deletes a job. + DeleteJob(ctx context.Context, in *DeleteJobRequest, opts ...grpc.CallOption) (*empty.Empty, error) + // Pauses a job. + // + // If a job is paused then the system will stop executing the job + // until it is re-enabled via [ResumeJob][google.cloud.scheduler.v1beta1.CloudScheduler.ResumeJob]. The + // state of the job is stored in [state][google.cloud.scheduler.v1beta1.Job.state]; if paused it + // will be set to [Job.State.PAUSED][google.cloud.scheduler.v1beta1.Job.State.PAUSED]. A job must be in [Job.State.ENABLED][google.cloud.scheduler.v1beta1.Job.State.ENABLED] + // to be paused. + PauseJob(ctx context.Context, in *PauseJobRequest, opts ...grpc.CallOption) (*Job, error) + // Resume a job. + // + // This method reenables a job after it has been [Job.State.PAUSED][google.cloud.scheduler.v1beta1.Job.State.PAUSED]. The + // state of a job is stored in [Job.state][google.cloud.scheduler.v1beta1.Job.state]; after calling this method it + // will be set to [Job.State.ENABLED][google.cloud.scheduler.v1beta1.Job.State.ENABLED]. A job must be in + // [Job.State.PAUSED][google.cloud.scheduler.v1beta1.Job.State.PAUSED] to be resumed. + ResumeJob(ctx context.Context, in *ResumeJobRequest, opts ...grpc.CallOption) (*Job, error) + // Forces a job to run now. + // + // When this method is called, Cloud Scheduler will dispatch the job, even + // if the job is already running. + RunJob(ctx context.Context, in *RunJobRequest, opts ...grpc.CallOption) (*Job, error) +} + +type cloudSchedulerClient struct { + cc *grpc.ClientConn +} + +func NewCloudSchedulerClient(cc *grpc.ClientConn) CloudSchedulerClient { + return &cloudSchedulerClient{cc} +} + +func (c *cloudSchedulerClient) ListJobs(ctx context.Context, in *ListJobsRequest, opts ...grpc.CallOption) (*ListJobsResponse, error) { + out := new(ListJobsResponse) + err := c.cc.Invoke(ctx, "/google.cloud.scheduler.v1beta1.CloudScheduler/ListJobs", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *cloudSchedulerClient) GetJob(ctx context.Context, in *GetJobRequest, opts ...grpc.CallOption) (*Job, error) { + out := new(Job) + err := c.cc.Invoke(ctx, "/google.cloud.scheduler.v1beta1.CloudScheduler/GetJob", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *cloudSchedulerClient) CreateJob(ctx context.Context, in *CreateJobRequest, opts ...grpc.CallOption) (*Job, error) { + out := new(Job) + err := c.cc.Invoke(ctx, "/google.cloud.scheduler.v1beta1.CloudScheduler/CreateJob", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *cloudSchedulerClient) UpdateJob(ctx context.Context, in *UpdateJobRequest, opts ...grpc.CallOption) (*Job, error) { + out := new(Job) + err := c.cc.Invoke(ctx, "/google.cloud.scheduler.v1beta1.CloudScheduler/UpdateJob", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *cloudSchedulerClient) DeleteJob(ctx context.Context, in *DeleteJobRequest, opts ...grpc.CallOption) (*empty.Empty, error) { + out := new(empty.Empty) + err := c.cc.Invoke(ctx, "/google.cloud.scheduler.v1beta1.CloudScheduler/DeleteJob", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *cloudSchedulerClient) PauseJob(ctx context.Context, in *PauseJobRequest, opts ...grpc.CallOption) (*Job, error) { + out := new(Job) + err := c.cc.Invoke(ctx, "/google.cloud.scheduler.v1beta1.CloudScheduler/PauseJob", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *cloudSchedulerClient) ResumeJob(ctx context.Context, in *ResumeJobRequest, opts ...grpc.CallOption) (*Job, error) { + out := new(Job) + err := c.cc.Invoke(ctx, "/google.cloud.scheduler.v1beta1.CloudScheduler/ResumeJob", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *cloudSchedulerClient) RunJob(ctx context.Context, in *RunJobRequest, opts ...grpc.CallOption) (*Job, error) { + out := new(Job) + err := c.cc.Invoke(ctx, "/google.cloud.scheduler.v1beta1.CloudScheduler/RunJob", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +// CloudSchedulerServer is the server API for CloudScheduler service. +type CloudSchedulerServer interface { + // Lists jobs. + ListJobs(context.Context, *ListJobsRequest) (*ListJobsResponse, error) + // Gets a job. + GetJob(context.Context, *GetJobRequest) (*Job, error) + // Creates a job. + CreateJob(context.Context, *CreateJobRequest) (*Job, error) + // Updates a job. + // + // If successful, the updated [Job][google.cloud.scheduler.v1beta1.Job] is returned. If the job does + // not exist, `NOT_FOUND` is returned. + // + // If UpdateJob does not successfully return, it is possible for the + // job to be in an [Job.State.UPDATE_FAILED][google.cloud.scheduler.v1beta1.Job.State.UPDATE_FAILED] state. A job in this state may + // not be executed. If this happens, retry the UpdateJob request + // until a successful response is received. + UpdateJob(context.Context, *UpdateJobRequest) (*Job, error) + // Deletes a job. + DeleteJob(context.Context, *DeleteJobRequest) (*empty.Empty, error) + // Pauses a job. + // + // If a job is paused then the system will stop executing the job + // until it is re-enabled via [ResumeJob][google.cloud.scheduler.v1beta1.CloudScheduler.ResumeJob]. The + // state of the job is stored in [state][google.cloud.scheduler.v1beta1.Job.state]; if paused it + // will be set to [Job.State.PAUSED][google.cloud.scheduler.v1beta1.Job.State.PAUSED]. A job must be in [Job.State.ENABLED][google.cloud.scheduler.v1beta1.Job.State.ENABLED] + // to be paused. + PauseJob(context.Context, *PauseJobRequest) (*Job, error) + // Resume a job. + // + // This method reenables a job after it has been [Job.State.PAUSED][google.cloud.scheduler.v1beta1.Job.State.PAUSED]. The + // state of a job is stored in [Job.state][google.cloud.scheduler.v1beta1.Job.state]; after calling this method it + // will be set to [Job.State.ENABLED][google.cloud.scheduler.v1beta1.Job.State.ENABLED]. A job must be in + // [Job.State.PAUSED][google.cloud.scheduler.v1beta1.Job.State.PAUSED] to be resumed. + ResumeJob(context.Context, *ResumeJobRequest) (*Job, error) + // Forces a job to run now. + // + // When this method is called, Cloud Scheduler will dispatch the job, even + // if the job is already running. + RunJob(context.Context, *RunJobRequest) (*Job, error) +} + +func RegisterCloudSchedulerServer(s *grpc.Server, srv CloudSchedulerServer) { + s.RegisterService(&_CloudScheduler_serviceDesc, srv) +} + +func _CloudScheduler_ListJobs_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(ListJobsRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(CloudSchedulerServer).ListJobs(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.scheduler.v1beta1.CloudScheduler/ListJobs", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(CloudSchedulerServer).ListJobs(ctx, req.(*ListJobsRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _CloudScheduler_GetJob_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(GetJobRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(CloudSchedulerServer).GetJob(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.scheduler.v1beta1.CloudScheduler/GetJob", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(CloudSchedulerServer).GetJob(ctx, req.(*GetJobRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _CloudScheduler_CreateJob_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(CreateJobRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(CloudSchedulerServer).CreateJob(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.scheduler.v1beta1.CloudScheduler/CreateJob", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(CloudSchedulerServer).CreateJob(ctx, req.(*CreateJobRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _CloudScheduler_UpdateJob_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(UpdateJobRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(CloudSchedulerServer).UpdateJob(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.scheduler.v1beta1.CloudScheduler/UpdateJob", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(CloudSchedulerServer).UpdateJob(ctx, req.(*UpdateJobRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _CloudScheduler_DeleteJob_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(DeleteJobRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(CloudSchedulerServer).DeleteJob(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.scheduler.v1beta1.CloudScheduler/DeleteJob", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(CloudSchedulerServer).DeleteJob(ctx, req.(*DeleteJobRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _CloudScheduler_PauseJob_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(PauseJobRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(CloudSchedulerServer).PauseJob(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.scheduler.v1beta1.CloudScheduler/PauseJob", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(CloudSchedulerServer).PauseJob(ctx, req.(*PauseJobRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _CloudScheduler_ResumeJob_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(ResumeJobRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(CloudSchedulerServer).ResumeJob(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.scheduler.v1beta1.CloudScheduler/ResumeJob", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(CloudSchedulerServer).ResumeJob(ctx, req.(*ResumeJobRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _CloudScheduler_RunJob_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(RunJobRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(CloudSchedulerServer).RunJob(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.scheduler.v1beta1.CloudScheduler/RunJob", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(CloudSchedulerServer).RunJob(ctx, req.(*RunJobRequest)) + } + return interceptor(ctx, in, info, handler) +} + +var _CloudScheduler_serviceDesc = grpc.ServiceDesc{ + ServiceName: "google.cloud.scheduler.v1beta1.CloudScheduler", + HandlerType: (*CloudSchedulerServer)(nil), + Methods: []grpc.MethodDesc{ + { + MethodName: "ListJobs", + Handler: _CloudScheduler_ListJobs_Handler, + }, + { + MethodName: "GetJob", + Handler: _CloudScheduler_GetJob_Handler, + }, + { + MethodName: "CreateJob", + Handler: _CloudScheduler_CreateJob_Handler, + }, + { + MethodName: "UpdateJob", + Handler: _CloudScheduler_UpdateJob_Handler, + }, + { + MethodName: "DeleteJob", + Handler: _CloudScheduler_DeleteJob_Handler, + }, + { + MethodName: "PauseJob", + Handler: _CloudScheduler_PauseJob_Handler, + }, + { + MethodName: "ResumeJob", + Handler: _CloudScheduler_ResumeJob_Handler, + }, + { + MethodName: "RunJob", + Handler: _CloudScheduler_RunJob_Handler, + }, + }, + Streams: []grpc.StreamDesc{}, + Metadata: "google/cloud/scheduler/v1beta1/cloudscheduler.proto", +} + +func init() { + proto.RegisterFile("google/cloud/scheduler/v1beta1/cloudscheduler.proto", fileDescriptor_cloudscheduler_9fba0107986f2580) +} + +var fileDescriptor_cloudscheduler_9fba0107986f2580 = []byte{ + // 731 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x94, 0x96, 0x4b, 0x4f, 0xdb, 0x4a, + 0x14, 0xc7, 0x35, 0x3c, 0x22, 0x72, 0x10, 0x10, 0xcd, 0x02, 0xe5, 0x86, 0x7b, 0xaf, 0x22, 0xa3, + 0x8b, 0xa2, 0x48, 0x64, 0x6e, 0x78, 0xf4, 0x11, 0xfa, 0x90, 0x78, 0x94, 0x0a, 0x51, 0x29, 0x32, + 0x65, 0xd3, 0x0d, 0x1a, 0x27, 0x83, 0xeb, 0xe0, 0x78, 0x5c, 0x8f, 0x5d, 0xb5, 0x54, 0x6c, 0xba, + 0xa8, 0x2a, 0xb5, 0x3b, 0x76, 0x5d, 0x54, 0x15, 0xed, 0x37, 0xea, 0x57, 0xe8, 0x07, 0xa9, 0x66, + 0xfc, 0x80, 0xb8, 0x80, 0xed, 0x5d, 0x3c, 0xe7, 0x9c, 0x39, 0xbf, 0x39, 0xe7, 0xfc, 0x8f, 0x02, + 0xab, 0x26, 0xe7, 0xa6, 0xcd, 0x48, 0xcf, 0xe6, 0x41, 0x9f, 0x88, 0xde, 0x4b, 0xd6, 0x0f, 0x6c, + 0xe6, 0x91, 0xd7, 0x6d, 0x83, 0xf9, 0xb4, 0x1d, 0x9e, 0x27, 0xc7, 0x2d, 0xd7, 0xe3, 0x3e, 0xc7, + 0xff, 0x86, 0x41, 0x2d, 0x65, 0x6c, 0x5d, 0x5a, 0xa3, 0xa0, 0xda, 0xdf, 0xd1, 0xa5, 0xd4, 0xb5, + 0x08, 0x75, 0x1c, 0xee, 0x53, 0xdf, 0xe2, 0x8e, 0x08, 0xa3, 0x6b, 0x7f, 0x5d, 0xb1, 0x7a, 0x4c, + 0xf0, 0xc0, 0xeb, 0xb1, 0xc8, 0xd4, 0xc8, 0xa0, 0x19, 0x70, 0x23, 0xf2, 0x5c, 0x88, 0x3c, 0xd5, + 0x97, 0x11, 0x1c, 0x13, 0x36, 0x74, 0xfd, 0xb7, 0x91, 0xb1, 0x9e, 0x36, 0x1e, 0x5b, 0xcc, 0xee, + 0x1f, 0x0d, 0xa9, 0x38, 0x09, 0x3d, 0x34, 0x06, 0x73, 0xfb, 0x96, 0xf0, 0xf7, 0xb8, 0x21, 0x74, + 0xf6, 0x2a, 0x60, 0xc2, 0xc7, 0xf3, 0x50, 0x72, 0xa9, 0xc7, 0x1c, 0xbf, 0x8a, 0xea, 0xa8, 0x51, + 0xd6, 0xa3, 0x2f, 0xbc, 0x00, 0x65, 0x97, 0x9a, 0xec, 0x48, 0x58, 0xa7, 0xac, 0x3a, 0x59, 0x47, + 0x8d, 0x49, 0x7d, 0x4a, 0x1e, 0x1c, 0x58, 0xa7, 0x0c, 0xff, 0x03, 0xa0, 0x8c, 0x3e, 0x3f, 0x61, + 0x4e, 0xb5, 0xa4, 0x02, 0x95, 0xfb, 0x73, 0x79, 0xa0, 0x09, 0xa8, 0x5c, 0xa6, 0x11, 0x2e, 0x77, + 0x04, 0xc3, 0x77, 0x61, 0x62, 0xc0, 0x0d, 0x51, 0x45, 0xf5, 0xf1, 0xc6, 0xf4, 0xca, 0x62, 0xeb, + 0xf6, 0x5a, 0xb6, 0xf6, 0xb8, 0xa1, 0xab, 0x00, 0xbc, 0x04, 0x73, 0x0e, 0x7b, 0xe3, 0x1f, 0x5d, + 0x49, 0x38, 0xa6, 0x12, 0xce, 0xc8, 0xe3, 0x6e, 0x92, 0x74, 0x11, 0x66, 0x76, 0x99, 0xcc, 0x19, + 0xbf, 0x0c, 0xc3, 0x84, 0x43, 0x87, 0x2c, 0x7a, 0x97, 0xfa, 0xad, 0x51, 0xa8, 0x6c, 0x79, 0x8c, + 0xfa, 0xec, 0x8a, 0xdf, 0x4d, 0x15, 0x58, 0x87, 0xf1, 0x01, 0x37, 0x54, 0xb2, 0x9c, 0xc0, 0xd2, + 0x5f, 0xfb, 0x80, 0xa0, 0x72, 0xe8, 0xf6, 0x47, 0x73, 0x44, 0x77, 0xa1, 0x62, 0x77, 0xe1, 0x0d, + 0x98, 0x0e, 0xd4, 0x55, 0xaa, 0x89, 0x11, 0x4a, 0x2d, 0x0e, 0x8f, 0xfb, 0xdc, 0x7a, 0x22, 0xfb, + 0xfc, 0x8c, 0x8a, 0x13, 0x1d, 0x42, 0x77, 0xf9, 0x5b, 0x5b, 0x82, 0xca, 0x36, 0xb3, 0xd9, 0x08, + 0xc7, 0x75, 0x35, 0xf9, 0x0f, 0xe6, 0xba, 0x34, 0x10, 0x59, 0x6e, 0x4b, 0x50, 0xd1, 0x99, 0x08, + 0x86, 0x59, 0x7e, 0x8b, 0x30, 0xa3, 0x07, 0xce, 0xed, 0x4e, 0x2b, 0x1f, 0x01, 0x66, 0xb7, 0xe4, + 0xeb, 0x0f, 0xe2, 0xc7, 0xe3, 0x1f, 0x08, 0xa6, 0xe2, 0xa9, 0xc1, 0x24, 0xab, 0x44, 0xa9, 0x31, + 0xae, 0xfd, 0x9f, 0x3f, 0x20, 0x1c, 0x48, 0x6d, 0xfd, 0xfd, 0xcf, 0x5f, 0xe7, 0x63, 0x04, 0x2f, + 0x27, 0x32, 0x7b, 0x17, 0x36, 0xfe, 0xa1, 0xeb, 0xf1, 0x01, 0xeb, 0xf9, 0x82, 0x34, 0x89, 0xcd, + 0x7b, 0xa1, 0x88, 0x49, 0xf3, 0x8c, 0xa8, 0x71, 0x3c, 0x47, 0x50, 0x0a, 0xe7, 0x0c, 0x2f, 0x67, + 0xe5, 0x1c, 0x99, 0xc7, 0x5a, 0x9e, 0xb6, 0x5f, 0x47, 0x25, 0x0b, 0x76, 0x03, 0x93, 0x42, 0x22, + 0xcd, 0x33, 0xfc, 0x15, 0x41, 0x39, 0x19, 0x6c, 0x9c, 0x59, 0x8c, 0xb4, 0x06, 0xf2, 0xb1, 0x75, + 0x14, 0xdb, 0x9a, 0x56, 0xac, 0x62, 0x1d, 0x35, 0xc9, 0x17, 0x08, 0xca, 0x89, 0x2a, 0xb2, 0x01, + 0xd3, 0x02, 0xca, 0x07, 0xf8, 0x48, 0x01, 0xde, 0x5b, 0x69, 0x5f, 0x02, 0xca, 0xd5, 0x99, 0xa3, + 0x80, 0x21, 0xe4, 0x67, 0x04, 0xe5, 0x44, 0x32, 0xd9, 0x90, 0x69, 0x75, 0xd5, 0xe6, 0xff, 0x50, + 0xe6, 0x8e, 0x5c, 0xcf, 0x71, 0x53, 0x9b, 0x05, 0x9b, 0xfa, 0x0d, 0xc1, 0x54, 0xac, 0xcc, 0x6c, + 0x45, 0xa4, 0x34, 0x5c, 0xa8, 0x62, 0xda, 0x6a, 0x21, 0xb2, 0x8e, 0x2b, 0x73, 0x75, 0x50, 0x13, + 0x7f, 0x47, 0x50, 0x4e, 0xb6, 0x42, 0x76, 0xc5, 0xd2, 0x0b, 0x24, 0x1f, 0xe4, 0x63, 0x05, 0x79, + 0x5f, 0x5b, 0x2b, 0x06, 0xe9, 0xa9, 0x64, 0x92, 0xf2, 0x0b, 0x82, 0x52, 0xb8, 0x93, 0xb2, 0x35, + 0x3b, 0xb2, 0xbb, 0xf2, 0xf1, 0x3d, 0x50, 0x7c, 0x77, 0xb4, 0x76, 0x41, 0xbe, 0xc0, 0xe9, 0xa0, + 0xe6, 0xe6, 0x27, 0x04, 0x5a, 0x8f, 0x0f, 0x33, 0x12, 0x6d, 0xce, 0x26, 0x9b, 0xb2, 0x2b, 0x87, + 0xab, 0x8b, 0x5e, 0xec, 0x46, 0x11, 0x26, 0xb7, 0xa9, 0x63, 0xb6, 0xb8, 0x67, 0x12, 0x93, 0x39, + 0x6a, 0xf4, 0x48, 0x68, 0xa2, 0xae, 0x25, 0x6e, 0xfa, 0x53, 0xb1, 0x91, 0x9c, 0x5c, 0x8c, 0x95, + 0x0f, 0xb6, 0x9e, 0xee, 0x6c, 0x1f, 0xee, 0xef, 0xe8, 0x46, 0x49, 0xc5, 0xaf, 0xfe, 0x0e, 0x00, + 0x00, 0xff, 0xff, 0x42, 0xed, 0xf2, 0xae, 0x21, 0x09, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/cloud/scheduler/v1beta1/job.pb.go b/vendor/google.golang.org/genproto/googleapis/cloud/scheduler/v1beta1/job.pb.go new file mode 100644 index 0000000..c6cee09 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/cloud/scheduler/v1beta1/job.pb.go @@ -0,0 +1,597 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/cloud/scheduler/v1beta1/job.proto + +package scheduler // import "google.golang.org/genproto/googleapis/cloud/scheduler/v1beta1" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import duration "github.com/golang/protobuf/ptypes/duration" +import timestamp "github.com/golang/protobuf/ptypes/timestamp" +import _ "google.golang.org/genproto/googleapis/api/annotations" +import status "google.golang.org/genproto/googleapis/rpc/status" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// State of the job. +type Job_State int32 + +const ( + // Unspecified state. + Job_STATE_UNSPECIFIED Job_State = 0 + // The job is executing normally. + Job_ENABLED Job_State = 1 + // The job is paused by the user. It will not execute. A user can + // intentionally pause the job using + // [PauseJobRequest][google.cloud.scheduler.v1beta1.PauseJobRequest]. + Job_PAUSED Job_State = 2 + // The job is disabled by the system due to error. The user + // cannot directly set a job to be disabled. + Job_DISABLED Job_State = 3 + // The job state resulting from a failed [CloudScheduler.UpdateJob][google.cloud.scheduler.v1beta1.CloudScheduler.UpdateJob] + // operation. To recover a job from this state, retry + // [CloudScheduler.UpdateJob][google.cloud.scheduler.v1beta1.CloudScheduler.UpdateJob] until a successful response is received. + Job_UPDATE_FAILED Job_State = 4 +) + +var Job_State_name = map[int32]string{ + 0: "STATE_UNSPECIFIED", + 1: "ENABLED", + 2: "PAUSED", + 3: "DISABLED", + 4: "UPDATE_FAILED", +} +var Job_State_value = map[string]int32{ + "STATE_UNSPECIFIED": 0, + "ENABLED": 1, + "PAUSED": 2, + "DISABLED": 3, + "UPDATE_FAILED": 4, +} + +func (x Job_State) String() string { + return proto.EnumName(Job_State_name, int32(x)) +} +func (Job_State) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_job_4dcb53c8b6d297f5, []int{0, 0} +} + +// Configuration for a job. +// The maximum allowed size for a job is 100KB. +type Job struct { + // Optionally caller-specified in [CreateJob][google.cloud.scheduler.v1beta1.CloudScheduler.CreateJob], after + // which it becomes output only. + // + // The job name. For example: + // `projects/PROJECT_ID/locations/LOCATION_ID/jobs/JOB_ID`. + // + // * `PROJECT_ID` can contain letters ([A-Za-z]), numbers ([0-9]), + // hyphens (-), colons (:), or periods (.). + // For more information, see + // [Identifying + // projects](https://cloud.google.com/resource-manager/docs/creating-managing-projects#identifying_projects) + // * `LOCATION_ID` is the canonical ID for the job's location. + // The list of available locations can be obtained by calling + // [ListLocations][google.cloud.location.Locations.ListLocations]. + // For more information, see https://cloud.google.com/about/locations/. + // * `JOB_ID` can contain only letters ([A-Za-z]), numbers ([0-9]), + // hyphens (-), or underscores (_). The maximum length is 500 characters. + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // Optionally caller-specified in [CreateJob][google.cloud.scheduler.v1beta1.CloudScheduler.CreateJob] or + // [UpdateJob][google.cloud.scheduler.v1beta1.CloudScheduler.UpdateJob]. + // + // A human-readable description for the job. This string must not contain + // more than 500 characters. + Description string `protobuf:"bytes,2,opt,name=description,proto3" json:"description,omitempty"` + // Required. + // + // Delivery settings containing destination and parameters. + // + // Types that are valid to be assigned to Target: + // *Job_PubsubTarget + // *Job_AppEngineHttpTarget + // *Job_HttpTarget + Target isJob_Target `protobuf_oneof:"target"` + // Required, except when used with [UpdateJob][google.cloud.scheduler.v1beta1.CloudScheduler.UpdateJob]. + // + // Describes the schedule on which the job will be executed. + // + // The schedule can be either of the following types: + // + // * [Crontab](http://en.wikipedia.org/wiki/Cron#Overview) + // * English-like + // [schedule](https://cloud.google.com/scheduler/docs/configuring/cron-job-schedules) + // + // As a general rule, execution `n + 1` of a job will not begin + // until execution `n` has finished. Cloud Scheduler will never + // allow two simultaneously outstanding executions. For example, + // this implies that if the `n+1`th execution is scheduled to run at + // 16:00 but the `n`th execution takes until 16:15, the `n+1`th + // execution will not start until `16:15`. + // A scheduled start time will be delayed if the previous + // execution has not ended when its scheduled time occurs. + // + // If [retry_count][google.cloud.scheduler.v1beta1.RetryConfig.retry_count] > 0 and a job attempt fails, + // the job will be tried a total of [retry_count][google.cloud.scheduler.v1beta1.RetryConfig.retry_count] + // times, with exponential backoff, until the next scheduled start + // time. + Schedule string `protobuf:"bytes,20,opt,name=schedule,proto3" json:"schedule,omitempty"` + // Specifies the time zone to be used in interpreting + // [schedule][google.cloud.scheduler.v1beta1.Job.schedule]. The value of this field must be a time + // zone name from the [tz database](http://en.wikipedia.org/wiki/Tz_database). + // + // Note that some time zones include a provision for + // daylight savings time. The rules for daylight saving time are + // determined by the chosen tz. For UTC use the string "utc". If a + // time zone is not specified, the default will be in UTC (also known + // as GMT). + TimeZone string `protobuf:"bytes,21,opt,name=time_zone,json=timeZone,proto3" json:"time_zone,omitempty"` + // Output only. The creation time of the job. + UserUpdateTime *timestamp.Timestamp `protobuf:"bytes,9,opt,name=user_update_time,json=userUpdateTime,proto3" json:"user_update_time,omitempty"` + // Output only. State of the job. + State Job_State `protobuf:"varint,10,opt,name=state,proto3,enum=google.cloud.scheduler.v1beta1.Job_State" json:"state,omitempty"` + // Output only. The response from the target for the last attempted execution. + Status *status.Status `protobuf:"bytes,11,opt,name=status,proto3" json:"status,omitempty"` + // Output only. The next time the job is scheduled. Note that this may be a + // retry of a previously failed attempt or the next execution time + // according to the schedule. + ScheduleTime *timestamp.Timestamp `protobuf:"bytes,17,opt,name=schedule_time,json=scheduleTime,proto3" json:"schedule_time,omitempty"` + // Output only. The time the last job attempt started. + LastAttemptTime *timestamp.Timestamp `protobuf:"bytes,18,opt,name=last_attempt_time,json=lastAttemptTime,proto3" json:"last_attempt_time,omitempty"` + // Settings that determine the retry behavior. + RetryConfig *RetryConfig `protobuf:"bytes,19,opt,name=retry_config,json=retryConfig,proto3" json:"retry_config,omitempty"` + // The deadline for job attempts. If the request handler does not respond by + // this deadline then the request is cancelled and the attempt is marked as a + // `DEADLINE_EXCEEDED` failure. The failed attempt can be viewed in + // execution logs. Cloud Scheduler will retry the job according + // to the [RetryConfig][google.cloud.scheduler.v1beta1.RetryConfig]. + // + // The allowed duration for this deadline is: + // + // * For [HTTP targets][google.cloud.scheduler.v1beta1.Job.http_target], between 15 seconds and 30 minutes. + // * For [App Engine HTTP targets][google.cloud.scheduler.v1beta1.Job.app_engine_http_target], between 15 + // seconds and 24 hours. + // * For [PubSub targets][google.cloud.scheduler.v1beta1.Job.pubsub_target], this field is ignored. + AttemptDeadline *duration.Duration `protobuf:"bytes,22,opt,name=attempt_deadline,json=attemptDeadline,proto3" json:"attempt_deadline,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Job) Reset() { *m = Job{} } +func (m *Job) String() string { return proto.CompactTextString(m) } +func (*Job) ProtoMessage() {} +func (*Job) Descriptor() ([]byte, []int) { + return fileDescriptor_job_4dcb53c8b6d297f5, []int{0} +} +func (m *Job) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Job.Unmarshal(m, b) +} +func (m *Job) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Job.Marshal(b, m, deterministic) +} +func (dst *Job) XXX_Merge(src proto.Message) { + xxx_messageInfo_Job.Merge(dst, src) +} +func (m *Job) XXX_Size() int { + return xxx_messageInfo_Job.Size(m) +} +func (m *Job) XXX_DiscardUnknown() { + xxx_messageInfo_Job.DiscardUnknown(m) +} + +var xxx_messageInfo_Job proto.InternalMessageInfo + +func (m *Job) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *Job) GetDescription() string { + if m != nil { + return m.Description + } + return "" +} + +type isJob_Target interface { + isJob_Target() +} + +type Job_PubsubTarget struct { + PubsubTarget *PubsubTarget `protobuf:"bytes,4,opt,name=pubsub_target,json=pubsubTarget,proto3,oneof"` +} + +type Job_AppEngineHttpTarget struct { + AppEngineHttpTarget *AppEngineHttpTarget `protobuf:"bytes,5,opt,name=app_engine_http_target,json=appEngineHttpTarget,proto3,oneof"` +} + +type Job_HttpTarget struct { + HttpTarget *HttpTarget `protobuf:"bytes,6,opt,name=http_target,json=httpTarget,proto3,oneof"` +} + +func (*Job_PubsubTarget) isJob_Target() {} + +func (*Job_AppEngineHttpTarget) isJob_Target() {} + +func (*Job_HttpTarget) isJob_Target() {} + +func (m *Job) GetTarget() isJob_Target { + if m != nil { + return m.Target + } + return nil +} + +func (m *Job) GetPubsubTarget() *PubsubTarget { + if x, ok := m.GetTarget().(*Job_PubsubTarget); ok { + return x.PubsubTarget + } + return nil +} + +func (m *Job) GetAppEngineHttpTarget() *AppEngineHttpTarget { + if x, ok := m.GetTarget().(*Job_AppEngineHttpTarget); ok { + return x.AppEngineHttpTarget + } + return nil +} + +func (m *Job) GetHttpTarget() *HttpTarget { + if x, ok := m.GetTarget().(*Job_HttpTarget); ok { + return x.HttpTarget + } + return nil +} + +func (m *Job) GetSchedule() string { + if m != nil { + return m.Schedule + } + return "" +} + +func (m *Job) GetTimeZone() string { + if m != nil { + return m.TimeZone + } + return "" +} + +func (m *Job) GetUserUpdateTime() *timestamp.Timestamp { + if m != nil { + return m.UserUpdateTime + } + return nil +} + +func (m *Job) GetState() Job_State { + if m != nil { + return m.State + } + return Job_STATE_UNSPECIFIED +} + +func (m *Job) GetStatus() *status.Status { + if m != nil { + return m.Status + } + return nil +} + +func (m *Job) GetScheduleTime() *timestamp.Timestamp { + if m != nil { + return m.ScheduleTime + } + return nil +} + +func (m *Job) GetLastAttemptTime() *timestamp.Timestamp { + if m != nil { + return m.LastAttemptTime + } + return nil +} + +func (m *Job) GetRetryConfig() *RetryConfig { + if m != nil { + return m.RetryConfig + } + return nil +} + +func (m *Job) GetAttemptDeadline() *duration.Duration { + if m != nil { + return m.AttemptDeadline + } + return nil +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*Job) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _Job_OneofMarshaler, _Job_OneofUnmarshaler, _Job_OneofSizer, []interface{}{ + (*Job_PubsubTarget)(nil), + (*Job_AppEngineHttpTarget)(nil), + (*Job_HttpTarget)(nil), + } +} + +func _Job_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*Job) + // target + switch x := m.Target.(type) { + case *Job_PubsubTarget: + b.EncodeVarint(4<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.PubsubTarget); err != nil { + return err + } + case *Job_AppEngineHttpTarget: + b.EncodeVarint(5<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.AppEngineHttpTarget); err != nil { + return err + } + case *Job_HttpTarget: + b.EncodeVarint(6<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.HttpTarget); err != nil { + return err + } + case nil: + default: + return fmt.Errorf("Job.Target has unexpected type %T", x) + } + return nil +} + +func _Job_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*Job) + switch tag { + case 4: // target.pubsub_target + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(PubsubTarget) + err := b.DecodeMessage(msg) + m.Target = &Job_PubsubTarget{msg} + return true, err + case 5: // target.app_engine_http_target + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(AppEngineHttpTarget) + err := b.DecodeMessage(msg) + m.Target = &Job_AppEngineHttpTarget{msg} + return true, err + case 6: // target.http_target + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(HttpTarget) + err := b.DecodeMessage(msg) + m.Target = &Job_HttpTarget{msg} + return true, err + default: + return false, nil + } +} + +func _Job_OneofSizer(msg proto.Message) (n int) { + m := msg.(*Job) + // target + switch x := m.Target.(type) { + case *Job_PubsubTarget: + s := proto.Size(x.PubsubTarget) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *Job_AppEngineHttpTarget: + s := proto.Size(x.AppEngineHttpTarget) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *Job_HttpTarget: + s := proto.Size(x.HttpTarget) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +// Settings that determine the retry behavior. +// +// By default, if a job does not complete successfully (meaning that +// an acknowledgement is not received from the handler, then it will be retried +// with exponential backoff according to the settings in [RetryConfig][google.cloud.scheduler.v1beta1.RetryConfig]. +type RetryConfig struct { + // The number of attempts that the system will make to run a job using the + // exponential backoff procedure described by + // [max_doublings][google.cloud.scheduler.v1beta1.RetryConfig.max_doublings]. + // + // The default value of retry_count is zero. + // + // If retry_count is zero, a job attempt will *not* be retried if + // it fails. Instead the Cloud Scheduler system will wait for the + // next scheduled execution time. + // + // If retry_count is set to a non-zero number then Cloud Scheduler + // will retry failed attempts, using exponential backoff, + // retry_count times, or until the next scheduled execution time, + // whichever comes first. + // + // Values greater than 5 and negative values are not allowed. + RetryCount int32 `protobuf:"varint,1,opt,name=retry_count,json=retryCount,proto3" json:"retry_count,omitempty"` + // The time limit for retrying a failed job, measured from time when an + // execution was first attempted. If specified with + // [retry_count][google.cloud.scheduler.v1beta1.RetryConfig.retry_count], the job will be retried until both + // limits are reached. + // + // The default value for max_retry_duration is zero, which means retry + // duration is unlimited. + MaxRetryDuration *duration.Duration `protobuf:"bytes,2,opt,name=max_retry_duration,json=maxRetryDuration,proto3" json:"max_retry_duration,omitempty"` + // The minimum amount of time to wait before retrying a job after + // it fails. + // + // The default value of this field is 5 seconds. + MinBackoffDuration *duration.Duration `protobuf:"bytes,3,opt,name=min_backoff_duration,json=minBackoffDuration,proto3" json:"min_backoff_duration,omitempty"` + // The maximum amount of time to wait before retrying a job after + // it fails. + // + // The default value of this field is 1 hour. + MaxBackoffDuration *duration.Duration `protobuf:"bytes,4,opt,name=max_backoff_duration,json=maxBackoffDuration,proto3" json:"max_backoff_duration,omitempty"` + // The time between retries will double `max_doublings` times. + // + // A job's retry interval starts at + // [min_backoff_duration][google.cloud.scheduler.v1beta1.RetryConfig.min_backoff_duration], then doubles + // `max_doublings` times, then increases linearly, and finally + // retries retries at intervals of + // [max_backoff_duration][google.cloud.scheduler.v1beta1.RetryConfig.max_backoff_duration] up to + // [retry_count][google.cloud.scheduler.v1beta1.RetryConfig.retry_count] times. + // + // For example, if [min_backoff_duration][google.cloud.scheduler.v1beta1.RetryConfig.min_backoff_duration] is + // 10s, [max_backoff_duration][google.cloud.scheduler.v1beta1.RetryConfig.max_backoff_duration] is 300s, and + // `max_doublings` is 3, then the a job will first be retried in 10s. The + // retry interval will double three times, and then increase linearly by + // 2^3 * 10s. Finally, the job will retry at intervals of + // [max_backoff_duration][google.cloud.scheduler.v1beta1.RetryConfig.max_backoff_duration] until the job has + // been attempted [retry_count][google.cloud.scheduler.v1beta1.RetryConfig.retry_count] times. Thus, the + // requests will retry at 10s, 20s, 40s, 80s, 160s, 240s, 300s, 300s, .... + // + // The default value of this field is 5. + MaxDoublings int32 `protobuf:"varint,5,opt,name=max_doublings,json=maxDoublings,proto3" json:"max_doublings,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *RetryConfig) Reset() { *m = RetryConfig{} } +func (m *RetryConfig) String() string { return proto.CompactTextString(m) } +func (*RetryConfig) ProtoMessage() {} +func (*RetryConfig) Descriptor() ([]byte, []int) { + return fileDescriptor_job_4dcb53c8b6d297f5, []int{1} +} +func (m *RetryConfig) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_RetryConfig.Unmarshal(m, b) +} +func (m *RetryConfig) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_RetryConfig.Marshal(b, m, deterministic) +} +func (dst *RetryConfig) XXX_Merge(src proto.Message) { + xxx_messageInfo_RetryConfig.Merge(dst, src) +} +func (m *RetryConfig) XXX_Size() int { + return xxx_messageInfo_RetryConfig.Size(m) +} +func (m *RetryConfig) XXX_DiscardUnknown() { + xxx_messageInfo_RetryConfig.DiscardUnknown(m) +} + +var xxx_messageInfo_RetryConfig proto.InternalMessageInfo + +func (m *RetryConfig) GetRetryCount() int32 { + if m != nil { + return m.RetryCount + } + return 0 +} + +func (m *RetryConfig) GetMaxRetryDuration() *duration.Duration { + if m != nil { + return m.MaxRetryDuration + } + return nil +} + +func (m *RetryConfig) GetMinBackoffDuration() *duration.Duration { + if m != nil { + return m.MinBackoffDuration + } + return nil +} + +func (m *RetryConfig) GetMaxBackoffDuration() *duration.Duration { + if m != nil { + return m.MaxBackoffDuration + } + return nil +} + +func (m *RetryConfig) GetMaxDoublings() int32 { + if m != nil { + return m.MaxDoublings + } + return 0 +} + +func init() { + proto.RegisterType((*Job)(nil), "google.cloud.scheduler.v1beta1.Job") + proto.RegisterType((*RetryConfig)(nil), "google.cloud.scheduler.v1beta1.RetryConfig") + proto.RegisterEnum("google.cloud.scheduler.v1beta1.Job_State", Job_State_name, Job_State_value) +} + +func init() { + proto.RegisterFile("google/cloud/scheduler/v1beta1/job.proto", fileDescriptor_job_4dcb53c8b6d297f5) +} + +var fileDescriptor_job_4dcb53c8b6d297f5 = []byte{ + // 728 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x84, 0x94, 0x4f, 0x4f, 0xeb, 0x46, + 0x14, 0xc5, 0x09, 0x24, 0x21, 0x5c, 0x27, 0x90, 0x0c, 0x7f, 0x6a, 0xd2, 0xaa, 0x44, 0xe9, 0x26, + 0x85, 0xca, 0x16, 0xb0, 0xec, 0x02, 0x25, 0x38, 0x40, 0x68, 0x8b, 0x22, 0x27, 0x91, 0x2a, 0x36, + 0xd6, 0xd8, 0x9e, 0x18, 0xd3, 0x78, 0x66, 0x64, 0x8f, 0xab, 0xf0, 0x3e, 0xd8, 0x5b, 0xbc, 0x4f, + 0xf7, 0xe4, 0xb1, 0x1d, 0x02, 0xe8, 0xe1, 0x9d, 0xe7, 0xde, 0x73, 0x7e, 0xf7, 0xcc, 0x8c, 0x6d, + 0xe8, 0x79, 0x8c, 0x79, 0x0b, 0xa2, 0x3b, 0x0b, 0x16, 0xbb, 0x7a, 0xe4, 0x3c, 0x11, 0x37, 0x5e, + 0x90, 0x50, 0xff, 0xff, 0xdc, 0x26, 0x02, 0x9f, 0xeb, 0xcf, 0xcc, 0xd6, 0x78, 0xc8, 0x04, 0x43, + 0xbf, 0xa6, 0x4a, 0x4d, 0x2a, 0xb5, 0x95, 0x52, 0xcb, 0x94, 0xed, 0x5f, 0x32, 0x12, 0xe6, 0xbe, + 0x8e, 0x29, 0x65, 0x02, 0x0b, 0x9f, 0xd1, 0x28, 0x75, 0xb7, 0x8f, 0xd7, 0xba, 0x21, 0x89, 0x58, + 0x1c, 0x3a, 0x24, 0x6b, 0x9d, 0x15, 0x44, 0x10, 0x38, 0xf4, 0x88, 0xc8, 0xc4, 0x59, 0x0a, 0x5d, + 0xae, 0xec, 0x78, 0xae, 0xbb, 0x71, 0x28, 0x07, 0x65, 0xfd, 0x93, 0xf7, 0x7d, 0xe1, 0x07, 0x24, + 0x12, 0x38, 0xe0, 0x99, 0xe0, 0xa7, 0x4c, 0x10, 0x72, 0x47, 0x8f, 0x04, 0x16, 0x71, 0x96, 0xb0, + 0xfb, 0x6d, 0x1b, 0xb6, 0xee, 0x99, 0x8d, 0x10, 0x94, 0x29, 0x0e, 0x88, 0x5a, 0xea, 0x94, 0x7a, + 0x3b, 0xa6, 0x7c, 0x46, 0x1d, 0x50, 0x5c, 0x12, 0x39, 0xa1, 0xcf, 0x93, 0x51, 0xea, 0xa6, 0x6c, + 0xad, 0x97, 0xd0, 0x04, 0x1a, 0x3c, 0xb6, 0xa3, 0xd8, 0xb6, 0xd2, 0xb8, 0x6a, 0xb9, 0x53, 0xea, + 0x29, 0x17, 0x7f, 0x68, 0x9f, 0x9f, 0x9a, 0x36, 0x96, 0xa6, 0xa9, 0xf4, 0xdc, 0x6d, 0x98, 0x75, + 0xbe, 0xb6, 0x46, 0xcf, 0x70, 0x84, 0x39, 0xb7, 0x08, 0xf5, 0x7c, 0x4a, 0xac, 0x27, 0x21, 0x78, + 0x4e, 0xaf, 0x48, 0xfa, 0x65, 0x11, 0xbd, 0xcf, 0xf9, 0x50, 0x9a, 0xef, 0x84, 0xe0, 0xab, 0x21, + 0xfb, 0xf8, 0x63, 0x19, 0xfd, 0x03, 0xca, 0xfa, 0x80, 0xaa, 0x1c, 0x70, 0x5a, 0x34, 0xe0, 0x0d, + 0x17, 0x9e, 0x5e, 0x71, 0x6d, 0xa8, 0xe5, 0x6a, 0xf5, 0x40, 0x1e, 0xd7, 0x6a, 0x8d, 0x7e, 0x86, + 0x9d, 0xe4, 0x56, 0xac, 0x2f, 0x8c, 0x12, 0xf5, 0x30, 0x6d, 0x26, 0x85, 0x47, 0x46, 0x09, 0x32, + 0xa0, 0x19, 0x47, 0x24, 0xb4, 0x62, 0xee, 0x62, 0x41, 0xac, 0xa4, 0xae, 0xee, 0xc8, 0x30, 0xed, + 0x3c, 0x4c, 0x7e, 0xb7, 0xda, 0x34, 0xbf, 0x5b, 0x73, 0x37, 0xf1, 0xcc, 0xa4, 0x25, 0x29, 0xa2, + 0x2b, 0xa8, 0x24, 0x97, 0x4b, 0x54, 0xe8, 0x94, 0x7a, 0xbb, 0x17, 0xbf, 0x17, 0xed, 0xe3, 0x9e, + 0xd9, 0xda, 0x24, 0x31, 0x98, 0xa9, 0x0f, 0x9d, 0x42, 0x35, 0x7d, 0x3b, 0x54, 0x45, 0x0e, 0x47, + 0x39, 0x21, 0xe4, 0x8e, 0x54, 0xc6, 0x91, 0x99, 0x29, 0xd0, 0x15, 0x34, 0x72, 0x62, 0x9a, 0xb7, + 0x55, 0x98, 0xb7, 0x9e, 0x1b, 0x64, 0xda, 0x1b, 0x68, 0x2d, 0x70, 0x24, 0x2c, 0x2c, 0x04, 0x09, + 0xb8, 0x48, 0x21, 0xa8, 0x10, 0xb2, 0x97, 0x98, 0xfa, 0xa9, 0x47, 0x72, 0x1e, 0xa0, 0x1e, 0x12, + 0x11, 0xbe, 0x58, 0x0e, 0xa3, 0x73, 0xdf, 0x53, 0xf7, 0x25, 0xe2, 0xac, 0x68, 0xf3, 0x66, 0xe2, + 0xb9, 0x96, 0x16, 0x53, 0x09, 0x5f, 0x17, 0xc9, 0x5d, 0xe4, 0x91, 0x5c, 0x82, 0xdd, 0x85, 0x4f, + 0x89, 0x7a, 0x24, 0x99, 0xc7, 0x1f, 0x62, 0x19, 0xd9, 0x77, 0x68, 0xee, 0x65, 0x16, 0x23, 0x73, + 0x74, 0xff, 0x85, 0x8a, 0x3c, 0x5a, 0x74, 0x08, 0xad, 0xc9, 0xb4, 0x3f, 0x1d, 0x5a, 0xb3, 0x87, + 0xc9, 0x78, 0x78, 0x3d, 0xba, 0x19, 0x0d, 0x8d, 0xe6, 0x06, 0x52, 0x60, 0x7b, 0xf8, 0xd0, 0x1f, + 0xfc, 0x3d, 0x34, 0x9a, 0x25, 0x04, 0x50, 0x1d, 0xf7, 0x67, 0x93, 0xa1, 0xd1, 0xdc, 0x44, 0x75, + 0xa8, 0x19, 0xa3, 0x49, 0xda, 0xd9, 0x42, 0x2d, 0x68, 0xcc, 0xc6, 0x46, 0x62, 0xbf, 0xe9, 0x8f, + 0x92, 0x52, 0x79, 0x50, 0x83, 0x6a, 0xfa, 0xba, 0x76, 0xbf, 0x6e, 0x82, 0xb2, 0xb6, 0x0d, 0x74, + 0x02, 0x4a, 0x7e, 0x12, 0x31, 0x15, 0xf2, 0x5b, 0xae, 0x98, 0x90, 0xed, 0x2d, 0xa6, 0x02, 0xdd, + 0x02, 0x0a, 0xf0, 0xd2, 0x4a, 0x45, 0xf9, 0x3f, 0x44, 0x7e, 0xd8, 0x9f, 0x6e, 0xae, 0x19, 0xe0, + 0xa5, 0x9c, 0x93, 0x57, 0xd0, 0x5f, 0x70, 0x10, 0xf8, 0xd4, 0xb2, 0xb1, 0xf3, 0x1f, 0x9b, 0xcf, + 0x5f, 0x51, 0x5b, 0x45, 0x28, 0x14, 0xf8, 0x74, 0x90, 0xba, 0xde, 0xc0, 0xf0, 0xf2, 0x23, 0xac, + 0x5c, 0x0c, 0xc3, 0xcb, 0xf7, 0xb0, 0xdf, 0xa0, 0x91, 0xc0, 0x5c, 0x16, 0xdb, 0x0b, 0x9f, 0x7a, + 0x91, 0xfc, 0x69, 0x54, 0xcc, 0x7a, 0x80, 0x97, 0x46, 0x5e, 0x1b, 0xbc, 0x40, 0xd7, 0x61, 0x41, + 0xc1, 0x1b, 0x32, 0xa8, 0xdd, 0x33, 0x7b, 0x9c, 0x0c, 0x1d, 0x97, 0x1e, 0x6f, 0x33, 0xad, 0xc7, + 0x16, 0x98, 0x7a, 0x1a, 0x0b, 0x3d, 0xdd, 0x23, 0x54, 0x46, 0xd2, 0xd3, 0x16, 0xe6, 0x7e, 0xf4, + 0xa3, 0xbf, 0xf9, 0x9f, 0xab, 0x8a, 0x5d, 0x95, 0x9e, 0xcb, 0xef, 0x01, 0x00, 0x00, 0xff, 0xff, + 0x9d, 0xf8, 0x5c, 0x97, 0x83, 0x06, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/cloud/scheduler/v1beta1/target.pb.go b/vendor/google.golang.org/genproto/googleapis/cloud/scheduler/v1beta1/target.pb.go new file mode 100644 index 0000000..23bb3e2 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/cloud/scheduler/v1beta1/target.pb.go @@ -0,0 +1,819 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/cloud/scheduler/v1beta1/target.proto + +package scheduler // import "google.golang.org/genproto/googleapis/cloud/scheduler/v1beta1" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "github.com/golang/protobuf/ptypes/any" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// The HTTP method used to execute the job. +type HttpMethod int32 + +const ( + // HTTP method unspecified. Defaults to POST. + HttpMethod_HTTP_METHOD_UNSPECIFIED HttpMethod = 0 + // HTTP POST + HttpMethod_POST HttpMethod = 1 + // HTTP GET + HttpMethod_GET HttpMethod = 2 + // HTTP HEAD + HttpMethod_HEAD HttpMethod = 3 + // HTTP PUT + HttpMethod_PUT HttpMethod = 4 + // HTTP DELETE + HttpMethod_DELETE HttpMethod = 5 + // HTTP PATCH + HttpMethod_PATCH HttpMethod = 6 + // HTTP OPTIONS + HttpMethod_OPTIONS HttpMethod = 7 +) + +var HttpMethod_name = map[int32]string{ + 0: "HTTP_METHOD_UNSPECIFIED", + 1: "POST", + 2: "GET", + 3: "HEAD", + 4: "PUT", + 5: "DELETE", + 6: "PATCH", + 7: "OPTIONS", +} +var HttpMethod_value = map[string]int32{ + "HTTP_METHOD_UNSPECIFIED": 0, + "POST": 1, + "GET": 2, + "HEAD": 3, + "PUT": 4, + "DELETE": 5, + "PATCH": 6, + "OPTIONS": 7, +} + +func (x HttpMethod) String() string { + return proto.EnumName(HttpMethod_name, int32(x)) +} +func (HttpMethod) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_target_d943b6e907724d6e, []int{0} +} + +// Http target. The job will be pushed to the job handler by means of +// an HTTP request via an [http_method][google.cloud.scheduler.v1beta1.HttpTarget.http_method] such as HTTP +// POST, HTTP GET, etc. The job is acknowledged by means of an HTTP +// response code in the range [200 - 299]. A failure to receive a response +// constitutes a failed execution. For a redirected request, the response +// returned by the redirected request is considered. +type HttpTarget struct { + // Required. + // + // The full URI path that the request will be sent to. This string + // must begin with either "http://" or "https://". Some examples of + // valid values for [uri][google.cloud.scheduler.v1beta1.HttpTarget.uri] are: + // `http://acme.com` and `https://acme.com/sales:8080`. Cloud Scheduler will + // encode some characters for safety and compatibility. The maximum allowed + // URL length is 2083 characters after encoding. + Uri string `protobuf:"bytes,1,opt,name=uri,proto3" json:"uri,omitempty"` + // Which HTTP method to use for the request. + HttpMethod HttpMethod `protobuf:"varint,2,opt,name=http_method,json=httpMethod,proto3,enum=google.cloud.scheduler.v1beta1.HttpMethod" json:"http_method,omitempty"` + // The user can specify HTTP request headers to send with the job's + // HTTP request. This map contains the header field names and + // values. Repeated headers are not supported, but a header value can + // contain commas. These headers represent a subset of the headers + // that will accompany the job's HTTP request. Some HTTP request + // headers will be ignored or replaced. A partial list of headers that + // will be ignored or replaced is below: + // - Host: This will be computed by Cloud Scheduler and derived from + // [uri][google.cloud.scheduler.v1beta1.HttpTarget.uri]. + // * `Content-Length`: This will be computed by Cloud Scheduler. + // * `User-Agent`: This will be set to `"Google-Cloud-Scheduler"`. + // * `X-Google-*`: Google internal use only. + // * `X-AppEngine-*`: Google internal use only. + // + // The total size of headers must be less than 80KB. + Headers map[string]string `protobuf:"bytes,3,rep,name=headers,proto3" json:"headers,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` + // HTTP request body. A request body is allowed only if the HTTP + // method is POST, PUT, or PATCH. It is an error to set body on a job with an + // incompatible [HttpMethod][google.cloud.scheduler.v1beta1.HttpMethod]. + Body []byte `protobuf:"bytes,4,opt,name=body,proto3" json:"body,omitempty"` + // The mode for generating an `Authorization` header for HTTP requests. + // + // If specified, all `Authorization` headers in the [HttpTarget.headers][google.cloud.scheduler.v1beta1.HttpTarget.headers] + // field will be overridden. + // + // Types that are valid to be assigned to AuthorizationHeader: + // *HttpTarget_OauthToken + // *HttpTarget_OidcToken + AuthorizationHeader isHttpTarget_AuthorizationHeader `protobuf_oneof:"authorization_header"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *HttpTarget) Reset() { *m = HttpTarget{} } +func (m *HttpTarget) String() string { return proto.CompactTextString(m) } +func (*HttpTarget) ProtoMessage() {} +func (*HttpTarget) Descriptor() ([]byte, []int) { + return fileDescriptor_target_d943b6e907724d6e, []int{0} +} +func (m *HttpTarget) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_HttpTarget.Unmarshal(m, b) +} +func (m *HttpTarget) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_HttpTarget.Marshal(b, m, deterministic) +} +func (dst *HttpTarget) XXX_Merge(src proto.Message) { + xxx_messageInfo_HttpTarget.Merge(dst, src) +} +func (m *HttpTarget) XXX_Size() int { + return xxx_messageInfo_HttpTarget.Size(m) +} +func (m *HttpTarget) XXX_DiscardUnknown() { + xxx_messageInfo_HttpTarget.DiscardUnknown(m) +} + +var xxx_messageInfo_HttpTarget proto.InternalMessageInfo + +func (m *HttpTarget) GetUri() string { + if m != nil { + return m.Uri + } + return "" +} + +func (m *HttpTarget) GetHttpMethod() HttpMethod { + if m != nil { + return m.HttpMethod + } + return HttpMethod_HTTP_METHOD_UNSPECIFIED +} + +func (m *HttpTarget) GetHeaders() map[string]string { + if m != nil { + return m.Headers + } + return nil +} + +func (m *HttpTarget) GetBody() []byte { + if m != nil { + return m.Body + } + return nil +} + +type isHttpTarget_AuthorizationHeader interface { + isHttpTarget_AuthorizationHeader() +} + +type HttpTarget_OauthToken struct { + OauthToken *OAuthToken `protobuf:"bytes,5,opt,name=oauth_token,json=oauthToken,proto3,oneof"` +} + +type HttpTarget_OidcToken struct { + OidcToken *OidcToken `protobuf:"bytes,6,opt,name=oidc_token,json=oidcToken,proto3,oneof"` +} + +func (*HttpTarget_OauthToken) isHttpTarget_AuthorizationHeader() {} + +func (*HttpTarget_OidcToken) isHttpTarget_AuthorizationHeader() {} + +func (m *HttpTarget) GetAuthorizationHeader() isHttpTarget_AuthorizationHeader { + if m != nil { + return m.AuthorizationHeader + } + return nil +} + +func (m *HttpTarget) GetOauthToken() *OAuthToken { + if x, ok := m.GetAuthorizationHeader().(*HttpTarget_OauthToken); ok { + return x.OauthToken + } + return nil +} + +func (m *HttpTarget) GetOidcToken() *OidcToken { + if x, ok := m.GetAuthorizationHeader().(*HttpTarget_OidcToken); ok { + return x.OidcToken + } + return nil +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*HttpTarget) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _HttpTarget_OneofMarshaler, _HttpTarget_OneofUnmarshaler, _HttpTarget_OneofSizer, []interface{}{ + (*HttpTarget_OauthToken)(nil), + (*HttpTarget_OidcToken)(nil), + } +} + +func _HttpTarget_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*HttpTarget) + // authorization_header + switch x := m.AuthorizationHeader.(type) { + case *HttpTarget_OauthToken: + b.EncodeVarint(5<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.OauthToken); err != nil { + return err + } + case *HttpTarget_OidcToken: + b.EncodeVarint(6<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.OidcToken); err != nil { + return err + } + case nil: + default: + return fmt.Errorf("HttpTarget.AuthorizationHeader has unexpected type %T", x) + } + return nil +} + +func _HttpTarget_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*HttpTarget) + switch tag { + case 5: // authorization_header.oauth_token + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(OAuthToken) + err := b.DecodeMessage(msg) + m.AuthorizationHeader = &HttpTarget_OauthToken{msg} + return true, err + case 6: // authorization_header.oidc_token + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(OidcToken) + err := b.DecodeMessage(msg) + m.AuthorizationHeader = &HttpTarget_OidcToken{msg} + return true, err + default: + return false, nil + } +} + +func _HttpTarget_OneofSizer(msg proto.Message) (n int) { + m := msg.(*HttpTarget) + // authorization_header + switch x := m.AuthorizationHeader.(type) { + case *HttpTarget_OauthToken: + s := proto.Size(x.OauthToken) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *HttpTarget_OidcToken: + s := proto.Size(x.OidcToken) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +// App Engine target. The job will be pushed to a job handler by means +// of an HTTP request via an [http_method][google.cloud.scheduler.v1beta1.AppEngineHttpTarget.http_method] such +// as HTTP POST, HTTP GET, etc. The job is acknowledged by means of an +// HTTP response code in the range [200 - 299]. Error 503 is +// considered an App Engine system error instead of an application +// error. Requests returning error 503 will be retried regardless of +// retry configuration and not counted against retry counts. Any other +// response code, or a failure to receive a response before the +// deadline, constitutes a failed attempt. +type AppEngineHttpTarget struct { + // The HTTP method to use for the request. PATCH and OPTIONS are not + // permitted. + HttpMethod HttpMethod `protobuf:"varint,1,opt,name=http_method,json=httpMethod,proto3,enum=google.cloud.scheduler.v1beta1.HttpMethod" json:"http_method,omitempty"` + // App Engine Routing setting for the job. + AppEngineRouting *AppEngineRouting `protobuf:"bytes,2,opt,name=app_engine_routing,json=appEngineRouting,proto3" json:"app_engine_routing,omitempty"` + // The relative URI. + // + // The relative URL must begin with "/" and must be a valid HTTP relative URL. + // It can contain a path, query string arguments, and `#` fragments. + // If the relative URL is empty, then the root path "/" will be used. + // No spaces are allowed, and the maximum length allowed is 2083 characters. + RelativeUri string `protobuf:"bytes,3,opt,name=relative_uri,json=relativeUri,proto3" json:"relative_uri,omitempty"` + // HTTP request headers. + // + // This map contains the header field names and values. Headers can be set + // when the job is created. + // + // Cloud Scheduler sets some headers to default values: + // + // * `User-Agent`: By default, this header is + // `"AppEngine-Google; (+http://code.google.com/appengine)"`. + // This header can be modified, but Cloud Scheduler will append + // `"AppEngine-Google; (+http://code.google.com/appengine)"` to the + // modified `User-Agent`. + // * `X-CloudScheduler`: This header will be set to true. + // + // If the job has an [body][google.cloud.scheduler.v1beta1.AppEngineHttpTarget.body], Cloud Scheduler sets + // the following headers: + // + // * `Content-Type`: By default, the `Content-Type` header is set to + // `"application/octet-stream"`. The default can be overridden by explictly + // setting `Content-Type` to a particular media type when the job is + // created. + // For example, `Content-Type` can be set to `"application/json"`. + // * `Content-Length`: This is computed by Cloud Scheduler. This value is + // output only. It cannot be changed. + // + // The headers below are output only. They cannot be set or overridden: + // + // * `X-Google-*`: For Google internal use only. + // * `X-AppEngine-*`: For Google internal use only. + // + // In addition, some App Engine headers, which contain + // job-specific information, are also be sent to the job handler. + Headers map[string]string `protobuf:"bytes,4,rep,name=headers,proto3" json:"headers,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` + // Body. + // + // HTTP request body. A request body is allowed only if the HTTP method is + // POST or PUT. It will result in invalid argument error to set a body on a + // job with an incompatible [HttpMethod][google.cloud.scheduler.v1beta1.HttpMethod]. + Body []byte `protobuf:"bytes,5,opt,name=body,proto3" json:"body,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *AppEngineHttpTarget) Reset() { *m = AppEngineHttpTarget{} } +func (m *AppEngineHttpTarget) String() string { return proto.CompactTextString(m) } +func (*AppEngineHttpTarget) ProtoMessage() {} +func (*AppEngineHttpTarget) Descriptor() ([]byte, []int) { + return fileDescriptor_target_d943b6e907724d6e, []int{1} +} +func (m *AppEngineHttpTarget) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_AppEngineHttpTarget.Unmarshal(m, b) +} +func (m *AppEngineHttpTarget) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_AppEngineHttpTarget.Marshal(b, m, deterministic) +} +func (dst *AppEngineHttpTarget) XXX_Merge(src proto.Message) { + xxx_messageInfo_AppEngineHttpTarget.Merge(dst, src) +} +func (m *AppEngineHttpTarget) XXX_Size() int { + return xxx_messageInfo_AppEngineHttpTarget.Size(m) +} +func (m *AppEngineHttpTarget) XXX_DiscardUnknown() { + xxx_messageInfo_AppEngineHttpTarget.DiscardUnknown(m) +} + +var xxx_messageInfo_AppEngineHttpTarget proto.InternalMessageInfo + +func (m *AppEngineHttpTarget) GetHttpMethod() HttpMethod { + if m != nil { + return m.HttpMethod + } + return HttpMethod_HTTP_METHOD_UNSPECIFIED +} + +func (m *AppEngineHttpTarget) GetAppEngineRouting() *AppEngineRouting { + if m != nil { + return m.AppEngineRouting + } + return nil +} + +func (m *AppEngineHttpTarget) GetRelativeUri() string { + if m != nil { + return m.RelativeUri + } + return "" +} + +func (m *AppEngineHttpTarget) GetHeaders() map[string]string { + if m != nil { + return m.Headers + } + return nil +} + +func (m *AppEngineHttpTarget) GetBody() []byte { + if m != nil { + return m.Body + } + return nil +} + +// Pub/Sub target. The job will be delivered by publishing a message to +// the given Pub/Sub topic. +type PubsubTarget struct { + // Required. + // + // The name of the Cloud Pub/Sub topic to which messages will + // be published when a job is delivered. The topic name must be in the + // same format as required by PubSub's + // [PublishRequest.name](https://cloud.google.com/pubsub/docs/reference/rpc/google.pubsub.v1#publishrequest), + // for example `projects/PROJECT_ID/topics/TOPIC_ID`. + // + // The topic must be in the same project as the Cloud Scheduler job. + TopicName string `protobuf:"bytes,1,opt,name=topic_name,json=topicName,proto3" json:"topic_name,omitempty"` + // The message payload for PubsubMessage. + // + // Pubsub message must contain either non-empty data, or at least one + // attribute. + Data []byte `protobuf:"bytes,3,opt,name=data,proto3" json:"data,omitempty"` + // Attributes for PubsubMessage. + // + // Pubsub message must contain either non-empty data, or at least one + // attribute. + Attributes map[string]string `protobuf:"bytes,4,rep,name=attributes,proto3" json:"attributes,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *PubsubTarget) Reset() { *m = PubsubTarget{} } +func (m *PubsubTarget) String() string { return proto.CompactTextString(m) } +func (*PubsubTarget) ProtoMessage() {} +func (*PubsubTarget) Descriptor() ([]byte, []int) { + return fileDescriptor_target_d943b6e907724d6e, []int{2} +} +func (m *PubsubTarget) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_PubsubTarget.Unmarshal(m, b) +} +func (m *PubsubTarget) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_PubsubTarget.Marshal(b, m, deterministic) +} +func (dst *PubsubTarget) XXX_Merge(src proto.Message) { + xxx_messageInfo_PubsubTarget.Merge(dst, src) +} +func (m *PubsubTarget) XXX_Size() int { + return xxx_messageInfo_PubsubTarget.Size(m) +} +func (m *PubsubTarget) XXX_DiscardUnknown() { + xxx_messageInfo_PubsubTarget.DiscardUnknown(m) +} + +var xxx_messageInfo_PubsubTarget proto.InternalMessageInfo + +func (m *PubsubTarget) GetTopicName() string { + if m != nil { + return m.TopicName + } + return "" +} + +func (m *PubsubTarget) GetData() []byte { + if m != nil { + return m.Data + } + return nil +} + +func (m *PubsubTarget) GetAttributes() map[string]string { + if m != nil { + return m.Attributes + } + return nil +} + +// App Engine Routing. +// +// For more information about services, versions, and instances see +// [An Overview of App +// Engine](https://cloud.google.com/appengine/docs/python/an-overview-of-app-engine), +// [Microservices Architecture on Google App +// Engine](https://cloud.google.com/appengine/docs/python/microservices-on-app-engine), +// [App Engine Standard request +// routing](https://cloud.google.com/appengine/docs/standard/python/how-requests-are-routed), +// and [App Engine Flex request +// routing](https://cloud.google.com/appengine/docs/flexible/python/how-requests-are-routed). +type AppEngineRouting struct { + // App service. + // + // By default, the job is sent to the service which is the default + // service when the job is attempted. + Service string `protobuf:"bytes,1,opt,name=service,proto3" json:"service,omitempty"` + // App version. + // + // By default, the job is sent to the version which is the default + // version when the job is attempted. + Version string `protobuf:"bytes,2,opt,name=version,proto3" json:"version,omitempty"` + // App instance. + // + // By default, the job is sent to an instance which is available when + // the job is attempted. + // + // Requests can only be sent to a specific instance if + // [manual scaling is used in App Engine + // Standard](https://cloud.google.com/appengine/docs/python/an-overview-of-app-engine?hl=en_US#scaling_types_and_instance_classes). + // App Engine Flex does not support instances. For more information, see + // [App Engine Standard request + // routing](https://cloud.google.com/appengine/docs/standard/python/how-requests-are-routed) + // and [App Engine Flex request + // routing](https://cloud.google.com/appengine/docs/flexible/python/how-requests-are-routed). + Instance string `protobuf:"bytes,3,opt,name=instance,proto3" json:"instance,omitempty"` + // Output only. The host that the job is sent to. + // + // For more information about how App Engine requests are routed, see + // [here](https://cloud.google.com/appengine/docs/standard/python/how-requests-are-routed). + // + // The host is constructed as: + // + // + // * `host = [application_domain_name]`
+ // `| [service] + '.' + [application_domain_name]`
+ // `| [version] + '.' + [application_domain_name]`
+ // `| [version_dot_service]+ '.' + [application_domain_name]`
+ // `| [instance] + '.' + [application_domain_name]`
+ // `| [instance_dot_service] + '.' + [application_domain_name]`
+ // `| [instance_dot_version] + '.' + [application_domain_name]`
+ // `| [instance_dot_version_dot_service] + '.' + [application_domain_name]` + // + // * `application_domain_name` = The domain name of the app, for + // example .appspot.com, which is associated with the + // job's project ID. + // + // * `service =` [service][google.cloud.scheduler.v1beta1.AppEngineRouting.service] + // + // * `version =` [version][google.cloud.scheduler.v1beta1.AppEngineRouting.version] + // + // * `version_dot_service =` + // [version][google.cloud.scheduler.v1beta1.AppEngineRouting.version] `+ '.' +` + // [service][google.cloud.scheduler.v1beta1.AppEngineRouting.service] + // + // * `instance =` [instance][google.cloud.scheduler.v1beta1.AppEngineRouting.instance] + // + // * `instance_dot_service =` + // [instance][google.cloud.scheduler.v1beta1.AppEngineRouting.instance] `+ '.' +` + // [service][google.cloud.scheduler.v1beta1.AppEngineRouting.service] + // + // * `instance_dot_version =` + // [instance][google.cloud.scheduler.v1beta1.AppEngineRouting.instance] `+ '.' +` + // [version][google.cloud.scheduler.v1beta1.AppEngineRouting.version] + // + // * `instance_dot_version_dot_service =` + // [instance][google.cloud.scheduler.v1beta1.AppEngineRouting.instance] `+ '.' +` + // [version][google.cloud.scheduler.v1beta1.AppEngineRouting.version] `+ '.' +` + // [service][google.cloud.scheduler.v1beta1.AppEngineRouting.service] + // + // + // If [service][google.cloud.scheduler.v1beta1.AppEngineRouting.service] is empty, then the job will be sent + // to the service which is the default service when the job is attempted. + // + // If [version][google.cloud.scheduler.v1beta1.AppEngineRouting.version] is empty, then the job will be sent + // to the version which is the default version when the job is attempted. + // + // If [instance][google.cloud.scheduler.v1beta1.AppEngineRouting.instance] is empty, then the job will be + // sent to an instance which is available when the job is attempted. + // + // If [service][google.cloud.scheduler.v1beta1.AppEngineRouting.service], + // [version][google.cloud.scheduler.v1beta1.AppEngineRouting.version], or + // [instance][google.cloud.scheduler.v1beta1.AppEngineRouting.instance] is invalid, then the job will be sent + // to the default version of the default service when the job is attempted. + Host string `protobuf:"bytes,4,opt,name=host,proto3" json:"host,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *AppEngineRouting) Reset() { *m = AppEngineRouting{} } +func (m *AppEngineRouting) String() string { return proto.CompactTextString(m) } +func (*AppEngineRouting) ProtoMessage() {} +func (*AppEngineRouting) Descriptor() ([]byte, []int) { + return fileDescriptor_target_d943b6e907724d6e, []int{3} +} +func (m *AppEngineRouting) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_AppEngineRouting.Unmarshal(m, b) +} +func (m *AppEngineRouting) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_AppEngineRouting.Marshal(b, m, deterministic) +} +func (dst *AppEngineRouting) XXX_Merge(src proto.Message) { + xxx_messageInfo_AppEngineRouting.Merge(dst, src) +} +func (m *AppEngineRouting) XXX_Size() int { + return xxx_messageInfo_AppEngineRouting.Size(m) +} +func (m *AppEngineRouting) XXX_DiscardUnknown() { + xxx_messageInfo_AppEngineRouting.DiscardUnknown(m) +} + +var xxx_messageInfo_AppEngineRouting proto.InternalMessageInfo + +func (m *AppEngineRouting) GetService() string { + if m != nil { + return m.Service + } + return "" +} + +func (m *AppEngineRouting) GetVersion() string { + if m != nil { + return m.Version + } + return "" +} + +func (m *AppEngineRouting) GetInstance() string { + if m != nil { + return m.Instance + } + return "" +} + +func (m *AppEngineRouting) GetHost() string { + if m != nil { + return m.Host + } + return "" +} + +// Contains information needed for generating an +// [OAuth token](https://developers.google.com/identity/protocols/OAuth2). +// This type of authorization should be used when sending requests to a GCP +// endpoint. +type OAuthToken struct { + // [Service account email](https://cloud.google.com/iam/docs/service-accounts) + // to be used for generating OAuth token. + // The service account must be within the same project as the job. The caller + // must have iam.serviceAccounts.actAs permission for the service account. + ServiceAccountEmail string `protobuf:"bytes,1,opt,name=service_account_email,json=serviceAccountEmail,proto3" json:"service_account_email,omitempty"` + // OAuth scope to be used for generating OAuth access token. + // If not specified, "https://www.googleapis.com/auth/cloud-platform" + // will be used. + Scope string `protobuf:"bytes,2,opt,name=scope,proto3" json:"scope,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *OAuthToken) Reset() { *m = OAuthToken{} } +func (m *OAuthToken) String() string { return proto.CompactTextString(m) } +func (*OAuthToken) ProtoMessage() {} +func (*OAuthToken) Descriptor() ([]byte, []int) { + return fileDescriptor_target_d943b6e907724d6e, []int{4} +} +func (m *OAuthToken) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_OAuthToken.Unmarshal(m, b) +} +func (m *OAuthToken) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_OAuthToken.Marshal(b, m, deterministic) +} +func (dst *OAuthToken) XXX_Merge(src proto.Message) { + xxx_messageInfo_OAuthToken.Merge(dst, src) +} +func (m *OAuthToken) XXX_Size() int { + return xxx_messageInfo_OAuthToken.Size(m) +} +func (m *OAuthToken) XXX_DiscardUnknown() { + xxx_messageInfo_OAuthToken.DiscardUnknown(m) +} + +var xxx_messageInfo_OAuthToken proto.InternalMessageInfo + +func (m *OAuthToken) GetServiceAccountEmail() string { + if m != nil { + return m.ServiceAccountEmail + } + return "" +} + +func (m *OAuthToken) GetScope() string { + if m != nil { + return m.Scope + } + return "" +} + +// Contains information needed for generating an +// [OpenID Connect +// token](https://developers.google.com/identity/protocols/OpenIDConnect). This +// type of authorization should be used when sending requests to third party +// endpoints. +type OidcToken struct { + // [Service account email](https://cloud.google.com/iam/docs/service-accounts) + // to be used for generating OIDC token. + // The service account must be within the same project as the job. The caller + // must have iam.serviceAccounts.actAs permission for the service account. + ServiceAccountEmail string `protobuf:"bytes,1,opt,name=service_account_email,json=serviceAccountEmail,proto3" json:"service_account_email,omitempty"` + // Audience to be used when generating OIDC token. If not specified, the URI + // specified in target will be used. + Audience string `protobuf:"bytes,2,opt,name=audience,proto3" json:"audience,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *OidcToken) Reset() { *m = OidcToken{} } +func (m *OidcToken) String() string { return proto.CompactTextString(m) } +func (*OidcToken) ProtoMessage() {} +func (*OidcToken) Descriptor() ([]byte, []int) { + return fileDescriptor_target_d943b6e907724d6e, []int{5} +} +func (m *OidcToken) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_OidcToken.Unmarshal(m, b) +} +func (m *OidcToken) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_OidcToken.Marshal(b, m, deterministic) +} +func (dst *OidcToken) XXX_Merge(src proto.Message) { + xxx_messageInfo_OidcToken.Merge(dst, src) +} +func (m *OidcToken) XXX_Size() int { + return xxx_messageInfo_OidcToken.Size(m) +} +func (m *OidcToken) XXX_DiscardUnknown() { + xxx_messageInfo_OidcToken.DiscardUnknown(m) +} + +var xxx_messageInfo_OidcToken proto.InternalMessageInfo + +func (m *OidcToken) GetServiceAccountEmail() string { + if m != nil { + return m.ServiceAccountEmail + } + return "" +} + +func (m *OidcToken) GetAudience() string { + if m != nil { + return m.Audience + } + return "" +} + +func init() { + proto.RegisterType((*HttpTarget)(nil), "google.cloud.scheduler.v1beta1.HttpTarget") + proto.RegisterMapType((map[string]string)(nil), "google.cloud.scheduler.v1beta1.HttpTarget.HeadersEntry") + proto.RegisterType((*AppEngineHttpTarget)(nil), "google.cloud.scheduler.v1beta1.AppEngineHttpTarget") + proto.RegisterMapType((map[string]string)(nil), "google.cloud.scheduler.v1beta1.AppEngineHttpTarget.HeadersEntry") + proto.RegisterType((*PubsubTarget)(nil), "google.cloud.scheduler.v1beta1.PubsubTarget") + proto.RegisterMapType((map[string]string)(nil), "google.cloud.scheduler.v1beta1.PubsubTarget.AttributesEntry") + proto.RegisterType((*AppEngineRouting)(nil), "google.cloud.scheduler.v1beta1.AppEngineRouting") + proto.RegisterType((*OAuthToken)(nil), "google.cloud.scheduler.v1beta1.OAuthToken") + proto.RegisterType((*OidcToken)(nil), "google.cloud.scheduler.v1beta1.OidcToken") + proto.RegisterEnum("google.cloud.scheduler.v1beta1.HttpMethod", HttpMethod_name, HttpMethod_value) +} + +func init() { + proto.RegisterFile("google/cloud/scheduler/v1beta1/target.proto", fileDescriptor_target_d943b6e907724d6e) +} + +var fileDescriptor_target_d943b6e907724d6e = []byte{ + // 754 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xa4, 0x55, 0xdb, 0x6e, 0xf3, 0x44, + 0x10, 0xae, 0xeb, 0x1c, 0x9a, 0x71, 0x04, 0xd6, 0xb6, 0x40, 0x08, 0x07, 0x85, 0x5c, 0x85, 0x22, + 0xd9, 0x34, 0x5c, 0x80, 0x0a, 0x48, 0xa4, 0x8d, 0x69, 0x0a, 0xb4, 0x31, 0xae, 0xcb, 0x45, 0x41, + 0x58, 0x1b, 0x7b, 0xb1, 0x57, 0x4d, 0x76, 0x2d, 0x7b, 0x1d, 0x29, 0x88, 0x97, 0xe0, 0x91, 0x78, + 0x11, 0x9e, 0x05, 0x79, 0x7d, 0x68, 0x14, 0x89, 0x3f, 0xfd, 0xff, 0xde, 0xcd, 0xf1, 0x9b, 0xf9, + 0x66, 0x66, 0x6d, 0xf8, 0x2c, 0xe4, 0x3c, 0x5c, 0x12, 0xd3, 0x5f, 0xf2, 0x2c, 0x30, 0x53, 0x3f, + 0x22, 0x41, 0xb6, 0x24, 0x89, 0xb9, 0x3e, 0x5b, 0x10, 0x81, 0xcf, 0x4c, 0x81, 0x93, 0x90, 0x08, + 0x23, 0x4e, 0xb8, 0xe0, 0xe8, 0xe3, 0x22, 0xd8, 0x90, 0xc1, 0x46, 0x1d, 0x6c, 0x94, 0xc1, 0xfd, + 0x0f, 0x4b, 0x30, 0x1c, 0x53, 0x13, 0x33, 0xc6, 0x05, 0x16, 0x94, 0xb3, 0xb4, 0xc8, 0xee, 0xbf, + 0x5f, 0x7a, 0xa5, 0xb6, 0xc8, 0xfe, 0x30, 0x31, 0xdb, 0x14, 0xae, 0xe1, 0x3f, 0x2a, 0xc0, 0x4c, + 0x88, 0xd8, 0x95, 0xd5, 0x90, 0x0e, 0x6a, 0x96, 0xd0, 0x9e, 0x32, 0x50, 0x46, 0x1d, 0x27, 0x17, + 0xd1, 0x8f, 0xa0, 0x45, 0x42, 0xc4, 0xde, 0x8a, 0x88, 0x88, 0x07, 0xbd, 0xc3, 0x81, 0x32, 0x7a, + 0x6b, 0x7c, 0x6a, 0xbc, 0xba, 0x1f, 0x23, 0x87, 0xbc, 0x91, 0x19, 0x0e, 0x44, 0xb5, 0x8c, 0x7e, + 0x86, 0x76, 0x44, 0x70, 0x40, 0x92, 0xb4, 0xa7, 0x0e, 0xd4, 0x91, 0x36, 0xfe, 0xf2, 0x39, 0x40, + 0x45, 0x6f, 0xc6, 0xac, 0xc8, 0xb4, 0x98, 0x48, 0x36, 0x4e, 0x85, 0x83, 0x10, 0x34, 0x16, 0x3c, + 0xd8, 0xf4, 0x1a, 0x03, 0x65, 0xd4, 0x75, 0xa4, 0x8c, 0x6e, 0x40, 0xe3, 0x38, 0x13, 0x91, 0x27, + 0xf8, 0x23, 0x61, 0xbd, 0xe6, 0x40, 0x19, 0x69, 0xfb, 0x7b, 0x9e, 0x4f, 0x32, 0x11, 0xb9, 0x79, + 0xc6, 0xec, 0xc0, 0x01, 0x09, 0x20, 0x35, 0xf4, 0x03, 0x00, 0xa7, 0x81, 0x5f, 0xa2, 0xb5, 0x24, + 0xda, 0xa7, 0x7b, 0xd1, 0x68, 0xe0, 0x57, 0x60, 0x1d, 0x5e, 0x29, 0xfd, 0x73, 0xe8, 0x6e, 0xf3, + 0xc8, 0x07, 0xfe, 0x48, 0x36, 0xd5, 0xc0, 0x1f, 0xc9, 0x06, 0x9d, 0x40, 0x73, 0x8d, 0x97, 0x19, + 0x91, 0xa3, 0xee, 0x38, 0x85, 0x72, 0x7e, 0xf8, 0x95, 0x72, 0xf1, 0x2e, 0x9c, 0xe4, 0x4d, 0xf1, + 0x84, 0xfe, 0x29, 0xd7, 0xeb, 0x15, 0x33, 0x18, 0xfe, 0xad, 0xc2, 0xf1, 0x24, 0x8e, 0x2d, 0x16, + 0x52, 0x46, 0xb6, 0x96, 0xb9, 0xb3, 0x3a, 0xe5, 0x45, 0xab, 0xfb, 0x1d, 0x10, 0x8e, 0x63, 0x8f, + 0xc8, 0x22, 0x5e, 0xc2, 0x33, 0x41, 0x59, 0x28, 0x7b, 0xd4, 0xc6, 0x9f, 0xef, 0xc3, 0xac, 0xbb, + 0x73, 0x8a, 0x3c, 0x47, 0xc7, 0x3b, 0x16, 0xf4, 0x09, 0x74, 0x13, 0xb2, 0xc4, 0x82, 0xae, 0x89, + 0x97, 0x9f, 0xa0, 0x2a, 0xd9, 0x6b, 0x95, 0xed, 0x3e, 0xa1, 0xe8, 0xe1, 0xe9, 0x7a, 0x1a, 0xf2, + 0x7a, 0xbe, 0x7b, 0x76, 0xdd, 0x67, 0x9f, 0x51, 0xf3, 0xe9, 0x8c, 0x5e, 0xb2, 0xab, 0xe1, 0xbf, + 0x0a, 0x74, 0xed, 0x6c, 0x91, 0x66, 0x8b, 0x72, 0x19, 0x1f, 0x01, 0x08, 0x1e, 0x53, 0xdf, 0x63, + 0x78, 0x45, 0x4a, 0x8c, 0x8e, 0xb4, 0xdc, 0xe2, 0x15, 0xc9, 0xeb, 0x07, 0x58, 0x60, 0x49, 0xbb, + 0xeb, 0x48, 0x19, 0xfd, 0x06, 0x80, 0x85, 0x48, 0xe8, 0x22, 0x13, 0xa4, 0xa2, 0xfc, 0xcd, 0x3e, + 0xca, 0xdb, 0x45, 0x8d, 0x49, 0x9d, 0x5e, 0xd0, 0xdd, 0xc2, 0xeb, 0x7f, 0x0b, 0x6f, 0xef, 0xb8, + 0x5f, 0x8b, 0xe0, 0x1a, 0xf4, 0xdd, 0xad, 0xa2, 0x1e, 0xb4, 0x53, 0x92, 0xac, 0xa9, 0x5f, 0x11, + 0xac, 0xd4, 0xdc, 0xb3, 0x26, 0x49, 0x4a, 0x39, 0x2b, 0x91, 0x2a, 0x15, 0xf5, 0xe1, 0x88, 0xb2, + 0x54, 0x60, 0xe6, 0x93, 0x72, 0xe7, 0xb5, 0x9e, 0x0f, 0x25, 0xe2, 0xa9, 0x90, 0x6f, 0xbb, 0xe3, + 0x48, 0x79, 0xf8, 0x0b, 0xc0, 0xd3, 0x43, 0x45, 0x63, 0x78, 0xa7, 0x2c, 0xe1, 0x61, 0xdf, 0xe7, + 0x19, 0x13, 0x1e, 0x59, 0x61, 0xba, 0x2c, 0xeb, 0x1f, 0x97, 0xce, 0x49, 0xe1, 0xb3, 0x72, 0x57, + 0xce, 0x29, 0xf5, 0x79, 0x5c, 0x73, 0x92, 0xca, 0xf0, 0x57, 0xe8, 0xd4, 0x4f, 0xf6, 0x8d, 0x60, + 0xfb, 0x70, 0x84, 0xb3, 0x80, 0x92, 0x9c, 0x48, 0x81, 0x5c, 0xeb, 0xa7, 0x69, 0xf1, 0x91, 0x2d, + 0x9f, 0xd2, 0x07, 0xf0, 0xde, 0xcc, 0x75, 0x6d, 0xef, 0xc6, 0x72, 0x67, 0xf3, 0xa9, 0x77, 0x7f, + 0x7b, 0x67, 0x5b, 0x97, 0xd7, 0xdf, 0x5f, 0x5b, 0x53, 0xfd, 0x00, 0x1d, 0x41, 0xc3, 0x9e, 0xdf, + 0xb9, 0xba, 0x82, 0xda, 0xa0, 0x5e, 0x59, 0xae, 0x7e, 0x98, 0x9b, 0x66, 0xd6, 0x64, 0xaa, 0xab, + 0xb9, 0xc9, 0xbe, 0x77, 0xf5, 0x06, 0x02, 0x68, 0x4d, 0xad, 0x9f, 0x2c, 0xd7, 0xd2, 0x9b, 0xa8, + 0x03, 0x4d, 0x7b, 0xe2, 0x5e, 0xce, 0xf4, 0x16, 0xd2, 0xa0, 0x3d, 0xb7, 0xdd, 0xeb, 0xf9, 0xed, + 0x9d, 0xde, 0xbe, 0xf8, 0x0b, 0x86, 0x3e, 0x5f, 0xed, 0xb9, 0x97, 0x0b, 0xad, 0x38, 0x15, 0x3b, + 0xff, 0x1b, 0xd8, 0xca, 0xc3, 0x55, 0x19, 0x1e, 0xf2, 0x25, 0x66, 0xa1, 0xc1, 0x93, 0xd0, 0x0c, + 0x09, 0x93, 0xff, 0x0a, 0xb3, 0x70, 0xe1, 0x98, 0xa6, 0xff, 0xf7, 0xd3, 0xfa, 0xba, 0xb6, 0x2c, + 0x5a, 0x32, 0xe7, 0x8b, 0xff, 0x02, 0x00, 0x00, 0xff, 0xff, 0x69, 0x77, 0x8b, 0x53, 0xe7, 0x06, + 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/cloud/securitycenter/v1/asset.pb.go b/vendor/google.golang.org/genproto/googleapis/cloud/securitycenter/v1/asset.pb.go new file mode 100644 index 0000000..8ac7976 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/cloud/securitycenter/v1/asset.pb.go @@ -0,0 +1,310 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/cloud/securitycenter/v1/asset.proto + +package securitycenter // import "google.golang.org/genproto/googleapis/cloud/securitycenter/v1" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _struct "github.com/golang/protobuf/ptypes/struct" +import timestamp "github.com/golang/protobuf/ptypes/timestamp" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Cloud Security Command Center's (Cloud SCC) representation of a Google Cloud +// Platform (GCP) resource. +// +// The Asset is a Cloud SCC resource that captures information about a single +// GCP resource. All modifications to an Asset are only within the context of +// Cloud SCC and don't affect the referenced GCP resource. +type Asset struct { + // The relative resource name of this asset. See: + // https://cloud.google.com/apis/design/resource_names#relative_resource_name + // Example: + // "organizations/123/assets/456". + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // Cloud SCC managed properties. These properties are managed by + // Cloud SCC and cannot be modified by the user. + SecurityCenterProperties *Asset_SecurityCenterProperties `protobuf:"bytes,2,opt,name=security_center_properties,json=securityCenterProperties,proto3" json:"security_center_properties,omitempty"` + // Resource managed properties. These properties are managed and defined by + // the GCP resource and cannot be modified by the user. + ResourceProperties map[string]*_struct.Value `protobuf:"bytes,7,rep,name=resource_properties,json=resourceProperties,proto3" json:"resource_properties,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` + // User specified security marks. These marks are entirely managed by the user + // and come from the SecurityMarks resource that belongs to the asset. + SecurityMarks *SecurityMarks `protobuf:"bytes,8,opt,name=security_marks,json=securityMarks,proto3" json:"security_marks,omitempty"` + // The time at which the asset was created in Cloud SCC. + CreateTime *timestamp.Timestamp `protobuf:"bytes,9,opt,name=create_time,json=createTime,proto3" json:"create_time,omitempty"` + // The time at which the asset was last updated, added, or deleted in Cloud + // SCC. + UpdateTime *timestamp.Timestamp `protobuf:"bytes,10,opt,name=update_time,json=updateTime,proto3" json:"update_time,omitempty"` + // IAM Policy information associated with the GCP resource described by the + // Cloud SCC asset. This information is managed and defined by the GCP + // resource and cannot be modified by the user. + IamPolicy *Asset_IamPolicy `protobuf:"bytes,11,opt,name=iam_policy,json=iamPolicy,proto3" json:"iam_policy,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Asset) Reset() { *m = Asset{} } +func (m *Asset) String() string { return proto.CompactTextString(m) } +func (*Asset) ProtoMessage() {} +func (*Asset) Descriptor() ([]byte, []int) { + return fileDescriptor_asset_450623263cfcc7ba, []int{0} +} +func (m *Asset) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Asset.Unmarshal(m, b) +} +func (m *Asset) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Asset.Marshal(b, m, deterministic) +} +func (dst *Asset) XXX_Merge(src proto.Message) { + xxx_messageInfo_Asset.Merge(dst, src) +} +func (m *Asset) XXX_Size() int { + return xxx_messageInfo_Asset.Size(m) +} +func (m *Asset) XXX_DiscardUnknown() { + xxx_messageInfo_Asset.DiscardUnknown(m) +} + +var xxx_messageInfo_Asset proto.InternalMessageInfo + +func (m *Asset) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *Asset) GetSecurityCenterProperties() *Asset_SecurityCenterProperties { + if m != nil { + return m.SecurityCenterProperties + } + return nil +} + +func (m *Asset) GetResourceProperties() map[string]*_struct.Value { + if m != nil { + return m.ResourceProperties + } + return nil +} + +func (m *Asset) GetSecurityMarks() *SecurityMarks { + if m != nil { + return m.SecurityMarks + } + return nil +} + +func (m *Asset) GetCreateTime() *timestamp.Timestamp { + if m != nil { + return m.CreateTime + } + return nil +} + +func (m *Asset) GetUpdateTime() *timestamp.Timestamp { + if m != nil { + return m.UpdateTime + } + return nil +} + +func (m *Asset) GetIamPolicy() *Asset_IamPolicy { + if m != nil { + return m.IamPolicy + } + return nil +} + +// Cloud SCC managed properties. These properties are managed by Cloud SCC and +// cannot be modified by the user. +type Asset_SecurityCenterProperties struct { + // The full resource name of the GCP resource this asset + // represents. This field is immutable after create time. See: + // https://cloud.google.com/apis/design/resource_names#full_resource_name + ResourceName string `protobuf:"bytes,1,opt,name=resource_name,json=resourceName,proto3" json:"resource_name,omitempty"` + // The type of the GCP resource. Examples include: APPLICATION, + // PROJECT, and ORGANIZATION. This is a case insensitive field defined by + // Cloud SCC and/or the producer of the resource and is immutable + // after create time. + ResourceType string `protobuf:"bytes,2,opt,name=resource_type,json=resourceType,proto3" json:"resource_type,omitempty"` + // The full resource name of the immediate parent of the resource. See: + // https://cloud.google.com/apis/design/resource_names#full_resource_name + ResourceParent string `protobuf:"bytes,3,opt,name=resource_parent,json=resourceParent,proto3" json:"resource_parent,omitempty"` + // The full resource name of the project the resource belongs to. See: + // https://cloud.google.com/apis/design/resource_names#full_resource_name + ResourceProject string `protobuf:"bytes,4,opt,name=resource_project,json=resourceProject,proto3" json:"resource_project,omitempty"` + // Owners of the Google Cloud resource. + ResourceOwners []string `protobuf:"bytes,5,rep,name=resource_owners,json=resourceOwners,proto3" json:"resource_owners,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Asset_SecurityCenterProperties) Reset() { *m = Asset_SecurityCenterProperties{} } +func (m *Asset_SecurityCenterProperties) String() string { return proto.CompactTextString(m) } +func (*Asset_SecurityCenterProperties) ProtoMessage() {} +func (*Asset_SecurityCenterProperties) Descriptor() ([]byte, []int) { + return fileDescriptor_asset_450623263cfcc7ba, []int{0, 0} +} +func (m *Asset_SecurityCenterProperties) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Asset_SecurityCenterProperties.Unmarshal(m, b) +} +func (m *Asset_SecurityCenterProperties) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Asset_SecurityCenterProperties.Marshal(b, m, deterministic) +} +func (dst *Asset_SecurityCenterProperties) XXX_Merge(src proto.Message) { + xxx_messageInfo_Asset_SecurityCenterProperties.Merge(dst, src) +} +func (m *Asset_SecurityCenterProperties) XXX_Size() int { + return xxx_messageInfo_Asset_SecurityCenterProperties.Size(m) +} +func (m *Asset_SecurityCenterProperties) XXX_DiscardUnknown() { + xxx_messageInfo_Asset_SecurityCenterProperties.DiscardUnknown(m) +} + +var xxx_messageInfo_Asset_SecurityCenterProperties proto.InternalMessageInfo + +func (m *Asset_SecurityCenterProperties) GetResourceName() string { + if m != nil { + return m.ResourceName + } + return "" +} + +func (m *Asset_SecurityCenterProperties) GetResourceType() string { + if m != nil { + return m.ResourceType + } + return "" +} + +func (m *Asset_SecurityCenterProperties) GetResourceParent() string { + if m != nil { + return m.ResourceParent + } + return "" +} + +func (m *Asset_SecurityCenterProperties) GetResourceProject() string { + if m != nil { + return m.ResourceProject + } + return "" +} + +func (m *Asset_SecurityCenterProperties) GetResourceOwners() []string { + if m != nil { + return m.ResourceOwners + } + return nil +} + +// IAM Policy information associated with the GCP resource described by the +// Cloud SCC asset. This information is managed and defined by the GCP +// resource and cannot be modified by the user. +type Asset_IamPolicy struct { + // The JSON representation of the Policy associated with the asset. + // See https://cloud.google.com/iam/reference/rest/v1/Policy for format + // details. + PolicyBlob string `protobuf:"bytes,1,opt,name=policy_blob,json=policyBlob,proto3" json:"policy_blob,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Asset_IamPolicy) Reset() { *m = Asset_IamPolicy{} } +func (m *Asset_IamPolicy) String() string { return proto.CompactTextString(m) } +func (*Asset_IamPolicy) ProtoMessage() {} +func (*Asset_IamPolicy) Descriptor() ([]byte, []int) { + return fileDescriptor_asset_450623263cfcc7ba, []int{0, 1} +} +func (m *Asset_IamPolicy) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Asset_IamPolicy.Unmarshal(m, b) +} +func (m *Asset_IamPolicy) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Asset_IamPolicy.Marshal(b, m, deterministic) +} +func (dst *Asset_IamPolicy) XXX_Merge(src proto.Message) { + xxx_messageInfo_Asset_IamPolicy.Merge(dst, src) +} +func (m *Asset_IamPolicy) XXX_Size() int { + return xxx_messageInfo_Asset_IamPolicy.Size(m) +} +func (m *Asset_IamPolicy) XXX_DiscardUnknown() { + xxx_messageInfo_Asset_IamPolicy.DiscardUnknown(m) +} + +var xxx_messageInfo_Asset_IamPolicy proto.InternalMessageInfo + +func (m *Asset_IamPolicy) GetPolicyBlob() string { + if m != nil { + return m.PolicyBlob + } + return "" +} + +func init() { + proto.RegisterType((*Asset)(nil), "google.cloud.securitycenter.v1.Asset") + proto.RegisterMapType((map[string]*_struct.Value)(nil), "google.cloud.securitycenter.v1.Asset.ResourcePropertiesEntry") + proto.RegisterType((*Asset_SecurityCenterProperties)(nil), "google.cloud.securitycenter.v1.Asset.SecurityCenterProperties") + proto.RegisterType((*Asset_IamPolicy)(nil), "google.cloud.securitycenter.v1.Asset.IamPolicy") +} + +func init() { + proto.RegisterFile("google/cloud/securitycenter/v1/asset.proto", fileDescriptor_asset_450623263cfcc7ba) +} + +var fileDescriptor_asset_450623263cfcc7ba = []byte{ + // 581 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x8c, 0x54, 0xcd, 0x6e, 0xd3, 0x4c, + 0x14, 0x95, 0x93, 0xe6, 0xeb, 0x97, 0x1b, 0xfa, 0xa3, 0x41, 0x02, 0xcb, 0x42, 0x6d, 0x28, 0x0b, + 0x02, 0x2a, 0xb6, 0xd2, 0x6e, 0x90, 0x2b, 0x90, 0x68, 0x85, 0x10, 0x12, 0x94, 0xc8, 0x44, 0x59, + 0xa0, 0xa0, 0x68, 0xec, 0x0e, 0x96, 0xa9, 0x3d, 0x63, 0xcd, 0x8c, 0x83, 0x2c, 0xf1, 0x24, 0x3c, + 0x02, 0x8f, 0xc2, 0x23, 0xb0, 0x63, 0xcb, 0x53, 0x20, 0xcf, 0xd8, 0xae, 0x4d, 0x15, 0xd2, 0xdd, + 0xf8, 0xdc, 0x73, 0xce, 0x9d, 0x3b, 0xe7, 0x26, 0xf0, 0x38, 0x64, 0x2c, 0x8c, 0x89, 0x13, 0xc4, + 0x2c, 0xbb, 0x70, 0x04, 0x09, 0x32, 0x1e, 0xc9, 0x3c, 0x20, 0x54, 0x12, 0xee, 0x2c, 0xc7, 0x0e, + 0x16, 0x82, 0x48, 0x3b, 0xe5, 0x4c, 0x32, 0xb4, 0xa7, 0xb9, 0xb6, 0xe2, 0xda, 0x6d, 0xae, 0xbd, + 0x1c, 0x5b, 0xf7, 0x4a, 0x2f, 0x9c, 0x46, 0x0e, 0xa6, 0x94, 0x49, 0x2c, 0x23, 0x46, 0x85, 0x56, + 0x5b, 0xc7, 0x6b, 0x3a, 0x55, 0xc8, 0x22, 0xc1, 0xfc, 0xb2, 0x12, 0x55, 0x96, 0xea, 0xcb, 0xcf, + 0x3e, 0x39, 0x42, 0xf2, 0x2c, 0x28, 0x2f, 0x64, 0xed, 0xff, 0x5d, 0x95, 0x51, 0x42, 0x84, 0xc4, + 0x49, 0xaa, 0x09, 0x07, 0xdf, 0x36, 0xa1, 0xf7, 0xa2, 0x98, 0x00, 0x21, 0xd8, 0xa0, 0x38, 0x21, + 0xa6, 0x31, 0x34, 0x46, 0x7d, 0x4f, 0x9d, 0xd1, 0x57, 0xb0, 0xea, 0xa6, 0xfa, 0x1e, 0x8b, 0x94, + 0xb3, 0x94, 0x70, 0x19, 0x11, 0x61, 0x76, 0x86, 0xc6, 0x68, 0x70, 0xf4, 0xdc, 0xfe, 0xf7, 0xd0, + 0xb6, 0xb2, 0xb7, 0xdf, 0x97, 0xf8, 0x99, 0xc2, 0x27, 0xb5, 0x8b, 0x67, 0x8a, 0x15, 0x15, 0x44, + 0xe1, 0x36, 0x27, 0x82, 0x65, 0x3c, 0x20, 0xcd, 0xb6, 0x9b, 0xc3, 0xee, 0x68, 0x70, 0xf4, 0xec, + 0x66, 0x6d, 0xbd, 0xd2, 0xe0, 0xca, 0xf6, 0x25, 0x95, 0x3c, 0xf7, 0x10, 0xbf, 0x56, 0x40, 0x53, + 0xd8, 0x6e, 0x3f, 0xb1, 0xf9, 0xbf, 0x9a, 0xf0, 0xc9, 0xba, 0x56, 0xd5, 0x6c, 0x6f, 0x0b, 0x91, + 0xb7, 0x25, 0x9a, 0x9f, 0xe8, 0x04, 0x06, 0x01, 0x27, 0x58, 0x92, 0x45, 0xf1, 0xf6, 0x66, 0x5f, + 0x59, 0x5a, 0x95, 0x65, 0x15, 0x8c, 0x3d, 0xad, 0x82, 0xf1, 0x40, 0xd3, 0x0b, 0xa0, 0x10, 0x67, + 0xe9, 0x45, 0x2d, 0x86, 0xf5, 0x62, 0x4d, 0x57, 0xe2, 0x73, 0x80, 0x08, 0x27, 0x8b, 0x94, 0xc5, + 0x51, 0x90, 0x9b, 0x03, 0xa5, 0x75, 0x6e, 0xf6, 0x6c, 0xaf, 0x71, 0x32, 0x51, 0x32, 0xaf, 0x1f, + 0x55, 0x47, 0xeb, 0x97, 0x01, 0xe6, 0xaa, 0x18, 0xd1, 0x03, 0xd8, 0xaa, 0xc3, 0x6a, 0xec, 0xd1, + 0xad, 0x0a, 0x3c, 0x2f, 0xf6, 0xa9, 0x49, 0x92, 0x79, 0x4a, 0xd4, 0x0a, 0x35, 0x48, 0xd3, 0x3c, + 0x25, 0xe8, 0x21, 0xec, 0x5c, 0xc5, 0x8e, 0x39, 0xa1, 0xd2, 0xec, 0x2a, 0xda, 0x76, 0x9d, 0x99, + 0x42, 0xd1, 0x23, 0xd8, 0x6d, 0xee, 0xc7, 0x67, 0x12, 0x48, 0x73, 0x43, 0x31, 0x77, 0x1a, 0xe9, + 0x16, 0x70, 0xcb, 0x93, 0x7d, 0xa1, 0x84, 0x0b, 0xb3, 0x37, 0xec, 0x36, 0x3d, 0xdf, 0x29, 0xd4, + 0x3a, 0x84, 0x7e, 0x3d, 0x3b, 0xda, 0x87, 0x81, 0x7e, 0xbc, 0x85, 0x1f, 0x33, 0xbf, 0x9c, 0x08, + 0x34, 0x74, 0x1a, 0x33, 0xdf, 0xfa, 0x08, 0x77, 0x57, 0x2c, 0x18, 0xda, 0x85, 0xee, 0x25, 0xc9, + 0x4b, 0x4d, 0x71, 0x44, 0x87, 0xd0, 0x5b, 0xe2, 0x38, 0x23, 0xe5, 0xef, 0xe6, 0xce, 0xb5, 0x14, + 0x67, 0x45, 0xd5, 0xd3, 0x24, 0xb7, 0xf3, 0xd4, 0x38, 0xfd, 0x69, 0xc0, 0x41, 0xc0, 0x92, 0x35, + 0x91, 0x4d, 0x8c, 0x0f, 0x6f, 0x4a, 0x46, 0xc8, 0x62, 0x4c, 0x43, 0x9b, 0xf1, 0xd0, 0x09, 0x09, + 0x55, 0xc6, 0x8e, 0x2e, 0xe1, 0x34, 0x12, 0xab, 0xfe, 0x58, 0x4e, 0xda, 0xc8, 0xf7, 0xce, 0xde, + 0x2b, 0x6d, 0x77, 0xa6, 0x1a, 0xb6, 0x23, 0xb7, 0x67, 0xe3, 0x1f, 0x15, 0x61, 0xae, 0x08, 0xf3, + 0x36, 0x61, 0x3e, 0x1b, 0xff, 0xee, 0xdc, 0xd7, 0x04, 0xd7, 0x55, 0x0c, 0xd7, 0x6d, 0x53, 0x5c, + 0x77, 0x36, 0xf6, 0xff, 0x53, 0xd7, 0x3b, 0xfe, 0x13, 0x00, 0x00, 0xff, 0xff, 0x31, 0xef, 0x3d, + 0x00, 0x60, 0x05, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/cloud/securitycenter/v1/finding.pb.go b/vendor/google.golang.org/genproto/googleapis/cloud/securitycenter/v1/finding.pb.go new file mode 100644 index 0000000..a742337 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/cloud/securitycenter/v1/finding.pb.go @@ -0,0 +1,249 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/cloud/securitycenter/v1/finding.proto + +package securitycenter // import "google.golang.org/genproto/googleapis/cloud/securitycenter/v1" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _struct "github.com/golang/protobuf/ptypes/struct" +import timestamp "github.com/golang/protobuf/ptypes/timestamp" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// The state of the finding. +type Finding_State int32 + +const ( + // Unspecified state. + Finding_STATE_UNSPECIFIED Finding_State = 0 + // The finding requires attention and has not been addressed yet. + Finding_ACTIVE Finding_State = 1 + // The finding has been fixed, triaged as a non-issue or otherwise addressed + // and is no longer active. + Finding_INACTIVE Finding_State = 2 +) + +var Finding_State_name = map[int32]string{ + 0: "STATE_UNSPECIFIED", + 1: "ACTIVE", + 2: "INACTIVE", +} +var Finding_State_value = map[string]int32{ + "STATE_UNSPECIFIED": 0, + "ACTIVE": 1, + "INACTIVE": 2, +} + +func (x Finding_State) String() string { + return proto.EnumName(Finding_State_name, int32(x)) +} +func (Finding_State) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_finding_12e0037571a78005, []int{0, 0} +} + +// Cloud Security Command Center (Cloud SCC) finding. +// +// A finding is a record of assessment data (security, risk, health or privacy) +// ingested into Cloud SCC for presentation, notification, analysis, +// policy testing, and enforcement. For example, an XSS vulnerability in an +// App Engine application is a finding. +type Finding struct { + // The relative resource name of this finding. See: + // https://cloud.google.com/apis/design/resource_names#relative_resource_name + // Example: + // "organizations/123/sources/456/findings/789" + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // The relative resource name of the source the finding belongs to. See: + // https://cloud.google.com/apis/design/resource_names#relative_resource_name + // This field is immutable after creation time. + // For example: + // "organizations/123/sources/456" + Parent string `protobuf:"bytes,2,opt,name=parent,proto3" json:"parent,omitempty"` + // The full resource name of the Google Cloud Platform (GCP) resource this + // finding is for. See: + // https://cloud.google.com/apis/design/resource_names#full_resource_name + // This field is immutable after creation time. + ResourceName string `protobuf:"bytes,3,opt,name=resource_name,json=resourceName,proto3" json:"resource_name,omitempty"` + // The state of the finding. + State Finding_State `protobuf:"varint,4,opt,name=state,proto3,enum=google.cloud.securitycenter.v1.Finding_State" json:"state,omitempty"` + // The additional taxonomy group within findings from a given source. + // This field is immutable after creation time. + // Example: "XSS_FLASH_INJECTION" + Category string `protobuf:"bytes,5,opt,name=category,proto3" json:"category,omitempty"` + // The URI that, if available, points to a web page outside of Cloud SCC + // where additional information about the finding can be found. This field is + // guaranteed to be either empty or a well formed URL. + ExternalUri string `protobuf:"bytes,6,opt,name=external_uri,json=externalUri,proto3" json:"external_uri,omitempty"` + // Source specific properties. These properties are managed by the source + // that writes the finding. The key names in the source_properties map must be + // between 1 and 255 characters, and must start with a letter and contain + // alphanumeric characters or underscores only. + SourceProperties map[string]*_struct.Value `protobuf:"bytes,7,rep,name=source_properties,json=sourceProperties,proto3" json:"source_properties,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` + // Output only. User specified security marks. These marks are entirely + // managed by the user and come from the SecurityMarks resource that belongs + // to the finding. + SecurityMarks *SecurityMarks `protobuf:"bytes,8,opt,name=security_marks,json=securityMarks,proto3" json:"security_marks,omitempty"` + // The time at which the event took place. For example, if the finding + // represents an open firewall it would capture the time the open firewall was + // detected. + EventTime *timestamp.Timestamp `protobuf:"bytes,9,opt,name=event_time,json=eventTime,proto3" json:"event_time,omitempty"` + // The time at which the finding was created in Cloud SCC. + CreateTime *timestamp.Timestamp `protobuf:"bytes,10,opt,name=create_time,json=createTime,proto3" json:"create_time,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Finding) Reset() { *m = Finding{} } +func (m *Finding) String() string { return proto.CompactTextString(m) } +func (*Finding) ProtoMessage() {} +func (*Finding) Descriptor() ([]byte, []int) { + return fileDescriptor_finding_12e0037571a78005, []int{0} +} +func (m *Finding) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Finding.Unmarshal(m, b) +} +func (m *Finding) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Finding.Marshal(b, m, deterministic) +} +func (dst *Finding) XXX_Merge(src proto.Message) { + xxx_messageInfo_Finding.Merge(dst, src) +} +func (m *Finding) XXX_Size() int { + return xxx_messageInfo_Finding.Size(m) +} +func (m *Finding) XXX_DiscardUnknown() { + xxx_messageInfo_Finding.DiscardUnknown(m) +} + +var xxx_messageInfo_Finding proto.InternalMessageInfo + +func (m *Finding) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *Finding) GetParent() string { + if m != nil { + return m.Parent + } + return "" +} + +func (m *Finding) GetResourceName() string { + if m != nil { + return m.ResourceName + } + return "" +} + +func (m *Finding) GetState() Finding_State { + if m != nil { + return m.State + } + return Finding_STATE_UNSPECIFIED +} + +func (m *Finding) GetCategory() string { + if m != nil { + return m.Category + } + return "" +} + +func (m *Finding) GetExternalUri() string { + if m != nil { + return m.ExternalUri + } + return "" +} + +func (m *Finding) GetSourceProperties() map[string]*_struct.Value { + if m != nil { + return m.SourceProperties + } + return nil +} + +func (m *Finding) GetSecurityMarks() *SecurityMarks { + if m != nil { + return m.SecurityMarks + } + return nil +} + +func (m *Finding) GetEventTime() *timestamp.Timestamp { + if m != nil { + return m.EventTime + } + return nil +} + +func (m *Finding) GetCreateTime() *timestamp.Timestamp { + if m != nil { + return m.CreateTime + } + return nil +} + +func init() { + proto.RegisterType((*Finding)(nil), "google.cloud.securitycenter.v1.Finding") + proto.RegisterMapType((map[string]*_struct.Value)(nil), "google.cloud.securitycenter.v1.Finding.SourcePropertiesEntry") + proto.RegisterEnum("google.cloud.securitycenter.v1.Finding_State", Finding_State_name, Finding_State_value) +} + +func init() { + proto.RegisterFile("google/cloud/securitycenter/v1/finding.proto", fileDescriptor_finding_12e0037571a78005) +} + +var fileDescriptor_finding_12e0037571a78005 = []byte{ + // 555 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x8c, 0x93, 0xcf, 0x6e, 0xda, 0x40, + 0x10, 0xc6, 0x6b, 0x08, 0x04, 0x06, 0x12, 0x91, 0x95, 0x12, 0x59, 0xa8, 0x4a, 0x09, 0xbd, 0x70, + 0x48, 0x6d, 0x41, 0x2e, 0xa9, 0xa3, 0x1e, 0x52, 0x4a, 0x2a, 0xa4, 0x16, 0x21, 0x43, 0x38, 0xb4, + 0x48, 0x68, 0xe3, 0x4c, 0x2c, 0x37, 0xb0, 0x6b, 0xad, 0xd7, 0xa8, 0xbc, 0x52, 0x2f, 0x7d, 0x8f, + 0x3e, 0x42, 0x1f, 0xa1, 0x4f, 0x51, 0x79, 0xd7, 0x8e, 0xea, 0xf4, 0x0f, 0xbd, 0x79, 0xbe, 0xf9, + 0x7d, 0x33, 0xb3, 0x3b, 0x6b, 0x38, 0xf5, 0x39, 0xf7, 0x97, 0x68, 0x7b, 0x4b, 0x1e, 0xdf, 0xda, + 0x11, 0x7a, 0xb1, 0x08, 0xe4, 0xc6, 0x43, 0x26, 0x51, 0xd8, 0xeb, 0xae, 0x7d, 0x17, 0xb0, 0xdb, + 0x80, 0xf9, 0x56, 0x28, 0xb8, 0xe4, 0xe4, 0x58, 0xd3, 0x96, 0xa2, 0xad, 0x3c, 0x6d, 0xad, 0xbb, + 0xcd, 0xa7, 0x69, 0x35, 0x1a, 0x06, 0x36, 0x65, 0x8c, 0x4b, 0x2a, 0x03, 0xce, 0x22, 0xed, 0x6e, + 0x9e, 0x6d, 0xe9, 0x95, 0x29, 0x8b, 0x15, 0x15, 0xf7, 0x99, 0x29, 0x2b, 0xa9, 0xa2, 0x9b, 0xf8, + 0xce, 0x8e, 0xa4, 0x88, 0x3d, 0x99, 0x66, 0x9f, 0x3d, 0xce, 0xca, 0x60, 0x85, 0x91, 0xa4, 0xab, + 0x50, 0x03, 0xed, 0xaf, 0x25, 0xd8, 0xbd, 0xd2, 0x67, 0x20, 0x04, 0x76, 0x18, 0x5d, 0xa1, 0x69, + 0xb4, 0x8c, 0x4e, 0xd5, 0x55, 0xdf, 0xe4, 0x08, 0xca, 0x21, 0x15, 0xc8, 0xa4, 0x59, 0x50, 0x6a, + 0x1a, 0x91, 0xe7, 0xb0, 0x27, 0x30, 0xe2, 0xb1, 0xf0, 0x70, 0xa1, 0x4c, 0x45, 0x95, 0xae, 0x67, + 0xe2, 0x28, 0x31, 0xf7, 0xa1, 0x14, 0x49, 0x2a, 0xd1, 0xdc, 0x69, 0x19, 0x9d, 0xfd, 0xde, 0x0b, + 0xeb, 0xdf, 0xd7, 0x63, 0xa5, 0x83, 0x58, 0x93, 0xc4, 0xe4, 0x6a, 0x2f, 0x69, 0x42, 0xc5, 0xa3, + 0x12, 0x7d, 0x2e, 0x36, 0x66, 0x49, 0x35, 0x79, 0x88, 0xc9, 0x09, 0xd4, 0xf1, 0xb3, 0x44, 0xc1, + 0xe8, 0x72, 0x11, 0x8b, 0xc0, 0x2c, 0xab, 0x7c, 0x2d, 0xd3, 0xae, 0x45, 0x40, 0x3e, 0xc1, 0x41, + 0x3a, 0x66, 0x28, 0x78, 0x88, 0x42, 0x06, 0x18, 0x99, 0xbb, 0xad, 0x62, 0xa7, 0xd6, 0x7b, 0xf5, + 0xdf, 0xf3, 0xa8, 0x02, 0xe3, 0x07, 0xff, 0x80, 0x49, 0xb1, 0x71, 0x1b, 0xd1, 0x23, 0x99, 0x4c, + 0x61, 0x3f, 0xbf, 0x23, 0xb3, 0xd2, 0x32, 0x3a, 0xb5, 0xed, 0x07, 0x9f, 0xa4, 0xca, 0xfb, 0xc4, + 0xe4, 0xee, 0x45, 0xbf, 0x86, 0xe4, 0x25, 0x00, 0xae, 0x91, 0xc9, 0x45, 0xb2, 0x3b, 0xb3, 0xaa, + 0x2a, 0x36, 0xb3, 0x8a, 0xd9, 0x62, 0xad, 0x69, 0xb6, 0x58, 0xb7, 0xaa, 0xe8, 0x24, 0x26, 0x17, + 0x50, 0xf3, 0x04, 0x52, 0x89, 0xda, 0x0b, 0x5b, 0xbd, 0xa0, 0xf1, 0x44, 0x68, 0x7e, 0x84, 0xc3, + 0x3f, 0x1e, 0x9c, 0x34, 0xa0, 0x78, 0x8f, 0x9b, 0xf4, 0x99, 0x24, 0x9f, 0xe4, 0x14, 0x4a, 0x6b, + 0xba, 0x8c, 0x51, 0x3d, 0x92, 0x5a, 0xef, 0xe8, 0xb7, 0x0e, 0xb3, 0x24, 0xeb, 0x6a, 0xc8, 0x29, + 0x9c, 0x1b, 0xed, 0x73, 0x28, 0xa9, 0x2d, 0x93, 0x43, 0x38, 0x98, 0x4c, 0x2f, 0xa7, 0x83, 0xc5, + 0xf5, 0x68, 0x32, 0x1e, 0xf4, 0x87, 0x57, 0xc3, 0xc1, 0x9b, 0xc6, 0x13, 0x02, 0x50, 0xbe, 0xec, + 0x4f, 0x87, 0xb3, 0x41, 0xc3, 0x20, 0x75, 0xa8, 0x0c, 0x47, 0x69, 0x54, 0x78, 0xfd, 0xdd, 0x80, + 0xb6, 0xc7, 0x57, 0x5b, 0xae, 0x74, 0x6c, 0x7c, 0x78, 0x97, 0x12, 0x3e, 0x5f, 0x52, 0xe6, 0x5b, + 0x5c, 0xf8, 0xb6, 0x8f, 0x4c, 0x8d, 0x64, 0xeb, 0x14, 0x0d, 0x83, 0xe8, 0x6f, 0x7f, 0xdb, 0x45, + 0x5e, 0xf9, 0x52, 0x38, 0x7e, 0xab, 0xcb, 0xf5, 0x55, 0xc3, 0x6c, 0x63, 0x7d, 0xdd, 0x70, 0xd6, + 0xfd, 0x96, 0x01, 0x73, 0x05, 0xcc, 0xf3, 0xc0, 0x7c, 0xd6, 0xfd, 0x51, 0x38, 0xd1, 0x80, 0xe3, + 0x28, 0xc2, 0x71, 0xf2, 0x88, 0xe3, 0xcc, 0xba, 0x37, 0x65, 0x35, 0xde, 0xd9, 0xcf, 0x00, 0x00, + 0x00, 0xff, 0xff, 0x39, 0x58, 0x82, 0xa6, 0x77, 0x04, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/cloud/securitycenter/v1/organization_settings.pb.go b/vendor/google.golang.org/genproto/googleapis/cloud/securitycenter/v1/organization_settings.pb.go new file mode 100644 index 0000000..daaee04 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/cloud/securitycenter/v1/organization_settings.pb.go @@ -0,0 +1,219 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/cloud/securitycenter/v1/organization_settings.proto + +package securitycenter // import "google.golang.org/genproto/googleapis/cloud/securitycenter/v1" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// The mode of inclusion when running Asset Discovery. +// Asset discovery can be limited by explicitly identifying projects to be +// included or excluded. If INCLUDE_ONLY is set, then only those projects +// within the organization and their children are discovered during asset +// discovery. If EXCLUDE is set, then projects that don't match those +// projects are discovered during asset discovery. If neither are set, then +// all projects within the organization are discovered during asset +// discovery. +type OrganizationSettings_AssetDiscoveryConfig_InclusionMode int32 + +const ( + // Unspecified. Setting the mode with this value will disable + // inclusion/exclusion filtering for Asset Discovery. + OrganizationSettings_AssetDiscoveryConfig_INCLUSION_MODE_UNSPECIFIED OrganizationSettings_AssetDiscoveryConfig_InclusionMode = 0 + // Asset Discovery will capture only the resources within the projects + // specified. All other resources will be ignored. + OrganizationSettings_AssetDiscoveryConfig_INCLUDE_ONLY OrganizationSettings_AssetDiscoveryConfig_InclusionMode = 1 + // Asset Discovery will ignore all resources under the projects specified. + // All other resources will be retrieved. + OrganizationSettings_AssetDiscoveryConfig_EXCLUDE OrganizationSettings_AssetDiscoveryConfig_InclusionMode = 2 +) + +var OrganizationSettings_AssetDiscoveryConfig_InclusionMode_name = map[int32]string{ + 0: "INCLUSION_MODE_UNSPECIFIED", + 1: "INCLUDE_ONLY", + 2: "EXCLUDE", +} +var OrganizationSettings_AssetDiscoveryConfig_InclusionMode_value = map[string]int32{ + "INCLUSION_MODE_UNSPECIFIED": 0, + "INCLUDE_ONLY": 1, + "EXCLUDE": 2, +} + +func (x OrganizationSettings_AssetDiscoveryConfig_InclusionMode) String() string { + return proto.EnumName(OrganizationSettings_AssetDiscoveryConfig_InclusionMode_name, int32(x)) +} +func (OrganizationSettings_AssetDiscoveryConfig_InclusionMode) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_organization_settings_f518d42165f0c7c2, []int{0, 0, 0} +} + +// User specified settings that are attached to the Cloud Security Command +// Center (Cloud SCC) organization. +type OrganizationSettings struct { + // The relative resource name of the settings. See: + // https://cloud.google.com/apis/design/resource_names#relative_resource_name + // Example: + // "organizations/123/organizationSettings". + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // A flag that indicates if Asset Discovery should be enabled. If the flag is + // set to `true`, then discovery of assets will occur. If it is set to `false, + // all historical assets will remain, but discovery of future assets will not + // occur. + EnableAssetDiscovery bool `protobuf:"varint,2,opt,name=enable_asset_discovery,json=enableAssetDiscovery,proto3" json:"enable_asset_discovery,omitempty"` + // The configuration used for Asset Discovery runs. + AssetDiscoveryConfig *OrganizationSettings_AssetDiscoveryConfig `protobuf:"bytes,3,opt,name=asset_discovery_config,json=assetDiscoveryConfig,proto3" json:"asset_discovery_config,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *OrganizationSettings) Reset() { *m = OrganizationSettings{} } +func (m *OrganizationSettings) String() string { return proto.CompactTextString(m) } +func (*OrganizationSettings) ProtoMessage() {} +func (*OrganizationSettings) Descriptor() ([]byte, []int) { + return fileDescriptor_organization_settings_f518d42165f0c7c2, []int{0} +} +func (m *OrganizationSettings) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_OrganizationSettings.Unmarshal(m, b) +} +func (m *OrganizationSettings) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_OrganizationSettings.Marshal(b, m, deterministic) +} +func (dst *OrganizationSettings) XXX_Merge(src proto.Message) { + xxx_messageInfo_OrganizationSettings.Merge(dst, src) +} +func (m *OrganizationSettings) XXX_Size() int { + return xxx_messageInfo_OrganizationSettings.Size(m) +} +func (m *OrganizationSettings) XXX_DiscardUnknown() { + xxx_messageInfo_OrganizationSettings.DiscardUnknown(m) +} + +var xxx_messageInfo_OrganizationSettings proto.InternalMessageInfo + +func (m *OrganizationSettings) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *OrganizationSettings) GetEnableAssetDiscovery() bool { + if m != nil { + return m.EnableAssetDiscovery + } + return false +} + +func (m *OrganizationSettings) GetAssetDiscoveryConfig() *OrganizationSettings_AssetDiscoveryConfig { + if m != nil { + return m.AssetDiscoveryConfig + } + return nil +} + +// The configuration used for Asset Discovery runs. +type OrganizationSettings_AssetDiscoveryConfig struct { + // The project ids to use for filtering asset discovery. + ProjectIds []string `protobuf:"bytes,1,rep,name=project_ids,json=projectIds,proto3" json:"project_ids,omitempty"` + // The mode to use for filtering asset discovery. + InclusionMode OrganizationSettings_AssetDiscoveryConfig_InclusionMode `protobuf:"varint,2,opt,name=inclusion_mode,json=inclusionMode,proto3,enum=google.cloud.securitycenter.v1.OrganizationSettings_AssetDiscoveryConfig_InclusionMode" json:"inclusion_mode,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *OrganizationSettings_AssetDiscoveryConfig) Reset() { + *m = OrganizationSettings_AssetDiscoveryConfig{} +} +func (m *OrganizationSettings_AssetDiscoveryConfig) String() string { return proto.CompactTextString(m) } +func (*OrganizationSettings_AssetDiscoveryConfig) ProtoMessage() {} +func (*OrganizationSettings_AssetDiscoveryConfig) Descriptor() ([]byte, []int) { + return fileDescriptor_organization_settings_f518d42165f0c7c2, []int{0, 0} +} +func (m *OrganizationSettings_AssetDiscoveryConfig) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_OrganizationSettings_AssetDiscoveryConfig.Unmarshal(m, b) +} +func (m *OrganizationSettings_AssetDiscoveryConfig) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_OrganizationSettings_AssetDiscoveryConfig.Marshal(b, m, deterministic) +} +func (dst *OrganizationSettings_AssetDiscoveryConfig) XXX_Merge(src proto.Message) { + xxx_messageInfo_OrganizationSettings_AssetDiscoveryConfig.Merge(dst, src) +} +func (m *OrganizationSettings_AssetDiscoveryConfig) XXX_Size() int { + return xxx_messageInfo_OrganizationSettings_AssetDiscoveryConfig.Size(m) +} +func (m *OrganizationSettings_AssetDiscoveryConfig) XXX_DiscardUnknown() { + xxx_messageInfo_OrganizationSettings_AssetDiscoveryConfig.DiscardUnknown(m) +} + +var xxx_messageInfo_OrganizationSettings_AssetDiscoveryConfig proto.InternalMessageInfo + +func (m *OrganizationSettings_AssetDiscoveryConfig) GetProjectIds() []string { + if m != nil { + return m.ProjectIds + } + return nil +} + +func (m *OrganizationSettings_AssetDiscoveryConfig) GetInclusionMode() OrganizationSettings_AssetDiscoveryConfig_InclusionMode { + if m != nil { + return m.InclusionMode + } + return OrganizationSettings_AssetDiscoveryConfig_INCLUSION_MODE_UNSPECIFIED +} + +func init() { + proto.RegisterType((*OrganizationSettings)(nil), "google.cloud.securitycenter.v1.OrganizationSettings") + proto.RegisterType((*OrganizationSettings_AssetDiscoveryConfig)(nil), "google.cloud.securitycenter.v1.OrganizationSettings.AssetDiscoveryConfig") + proto.RegisterEnum("google.cloud.securitycenter.v1.OrganizationSettings_AssetDiscoveryConfig_InclusionMode", OrganizationSettings_AssetDiscoveryConfig_InclusionMode_name, OrganizationSettings_AssetDiscoveryConfig_InclusionMode_value) +} + +func init() { + proto.RegisterFile("google/cloud/securitycenter/v1/organization_settings.proto", fileDescriptor_organization_settings_f518d42165f0c7c2) +} + +var fileDescriptor_organization_settings_f518d42165f0c7c2 = []byte{ + // 454 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xac, 0x92, 0xdf, 0x6e, 0xd3, 0x30, + 0x14, 0xc6, 0x49, 0xda, 0x8d, 0xce, 0x65, 0x53, 0x64, 0x55, 0x53, 0x55, 0xa1, 0x52, 0x7a, 0xd5, + 0x2b, 0x47, 0x19, 0x5c, 0x85, 0x2b, 0x48, 0x03, 0xca, 0xd4, 0xa5, 0x53, 0xaa, 0x95, 0x3f, 0xaa, + 0x14, 0x79, 0x8e, 0xb1, 0x8c, 0x52, 0x3b, 0x8a, 0xdd, 0x4a, 0xe3, 0x02, 0x2e, 0x79, 0x17, 0x1e, + 0x85, 0x47, 0xe0, 0x11, 0x78, 0x01, 0x6e, 0xd1, 0x9c, 0x54, 0x5a, 0x50, 0x81, 0x1b, 0xee, 0x92, + 0xf3, 0xfd, 0xbe, 0xef, 0x9c, 0x63, 0x1d, 0xe0, 0x33, 0x29, 0x59, 0x4e, 0x5d, 0x92, 0xcb, 0x4d, + 0xe6, 0x2a, 0x4a, 0x36, 0x25, 0xd7, 0x37, 0x84, 0x0a, 0x4d, 0x4b, 0x77, 0xeb, 0xb9, 0xb2, 0x64, + 0x58, 0xf0, 0x8f, 0x58, 0x73, 0x29, 0x52, 0x45, 0xb5, 0xe6, 0x82, 0x29, 0x54, 0x94, 0x52, 0x4b, + 0x38, 0xac, 0xbc, 0xc8, 0x78, 0x51, 0xd3, 0x8b, 0xb6, 0xde, 0xe0, 0x61, 0x9d, 0x8d, 0x0b, 0xee, + 0x62, 0x21, 0xa4, 0x36, 0x29, 0xb5, 0x7b, 0xfc, 0xb3, 0x05, 0x7a, 0xf3, 0x3b, 0xe9, 0x8b, 0x3a, + 0x1c, 0x42, 0xd0, 0x16, 0x78, 0x4d, 0xfb, 0xd6, 0xc8, 0x9a, 0x1c, 0x25, 0xe6, 0x1b, 0x3e, 0x05, + 0xa7, 0x54, 0xe0, 0xeb, 0x9c, 0xa6, 0x58, 0x29, 0xaa, 0xd3, 0x8c, 0x2b, 0x22, 0xb7, 0xb4, 0xbc, + 0xe9, 0xdb, 0x23, 0x6b, 0xd2, 0x49, 0x7a, 0x95, 0xfa, 0xfc, 0x56, 0x9c, 0xee, 0x34, 0xf8, 0x19, + 0x9c, 0xfe, 0x86, 0xa7, 0x44, 0x8a, 0xf7, 0x9c, 0xf5, 0x5b, 0x23, 0x6b, 0xd2, 0x3d, 0x8b, 0xd0, + 0xdf, 0x37, 0x40, 0xfb, 0xe6, 0x43, 0xcd, 0x26, 0x81, 0x09, 0x4c, 0x7a, 0x78, 0x4f, 0x75, 0xf0, + 0xc5, 0x06, 0xbd, 0x7d, 0x38, 0x7c, 0x04, 0xba, 0x45, 0x29, 0x3f, 0x50, 0xa2, 0x53, 0x9e, 0xa9, + 0xbe, 0x35, 0x6a, 0x4d, 0x8e, 0x12, 0x50, 0x97, 0xa2, 0x4c, 0xc1, 0x4f, 0xe0, 0x84, 0x0b, 0x92, + 0x6f, 0xd4, 0xed, 0xbb, 0xaf, 0x65, 0x46, 0xcd, 0xa2, 0x27, 0x67, 0xaf, 0xff, 0xdb, 0xc8, 0x28, + 0xda, 0xe5, 0x5f, 0xc8, 0x8c, 0x26, 0xc7, 0xfc, 0xee, 0xef, 0x38, 0x06, 0xc7, 0x0d, 0x1d, 0x0e, + 0xc1, 0x20, 0x8a, 0x83, 0xd9, 0xd5, 0x22, 0x9a, 0xc7, 0xe9, 0xc5, 0x7c, 0x1a, 0xa6, 0x57, 0xf1, + 0xe2, 0x32, 0x0c, 0xa2, 0x97, 0x51, 0x38, 0x75, 0xee, 0x41, 0x07, 0x3c, 0x30, 0xfa, 0x34, 0x4c, + 0xe7, 0xf1, 0xec, 0xad, 0x63, 0xc1, 0x2e, 0xb8, 0x1f, 0xbe, 0x31, 0x15, 0xc7, 0x3e, 0x6f, 0x77, + 0xda, 0xce, 0xc1, 0x79, 0xbb, 0x73, 0xe0, 0x1c, 0xbe, 0xf8, 0x6e, 0x81, 0x31, 0x91, 0xeb, 0x7f, + 0x6c, 0x72, 0x69, 0xbd, 0x9b, 0xd5, 0x04, 0x93, 0x39, 0x16, 0x0c, 0xc9, 0x92, 0xb9, 0x8c, 0x0a, + 0x73, 0x3e, 0x6e, 0x25, 0xe1, 0x82, 0xab, 0x3f, 0xdd, 0xee, 0xb3, 0x66, 0xe5, 0xab, 0x3d, 0x7c, + 0x55, 0xc5, 0x05, 0xa6, 0xe1, 0xa2, 0x56, 0x83, 0xaa, 0xe1, 0xd2, 0xfb, 0xb6, 0x03, 0x56, 0x06, + 0x58, 0x35, 0x81, 0xd5, 0xd2, 0xfb, 0x61, 0x3f, 0xae, 0x00, 0xdf, 0x37, 0x84, 0xef, 0x37, 0x11, + 0xdf, 0x5f, 0x7a, 0xd7, 0x87, 0x66, 0xbc, 0x27, 0xbf, 0x02, 0x00, 0x00, 0xff, 0xff, 0x8a, 0x57, + 0x11, 0xc7, 0x59, 0x03, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/cloud/securitycenter/v1/run_asset_discovery_response.pb.go b/vendor/google.golang.org/genproto/googleapis/cloud/securitycenter/v1/run_asset_discovery_response.pb.go new file mode 100644 index 0000000..978bf2c --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/cloud/securitycenter/v1/run_asset_discovery_response.pb.go @@ -0,0 +1,143 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/cloud/securitycenter/v1/run_asset_discovery_response.proto + +package securitycenter // import "google.golang.org/genproto/googleapis/cloud/securitycenter/v1" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import duration "github.com/golang/protobuf/ptypes/duration" +import _ "github.com/golang/protobuf/ptypes/timestamp" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// The state of an asset discovery run. +type RunAssetDiscoveryResponse_State int32 + +const ( + // Asset discovery run state was unspecified. + RunAssetDiscoveryResponse_STATE_UNSPECIFIED RunAssetDiscoveryResponse_State = 0 + // Asset discovery run completed successfully. + RunAssetDiscoveryResponse_COMPLETED RunAssetDiscoveryResponse_State = 1 + // Asset discovery run was cancelled with tasks still pending, as another + // run for the same organization was started with a higher priority. + RunAssetDiscoveryResponse_SUPERSEDED RunAssetDiscoveryResponse_State = 2 + // Asset discovery run was killed and terminated. + RunAssetDiscoveryResponse_TERMINATED RunAssetDiscoveryResponse_State = 3 +) + +var RunAssetDiscoveryResponse_State_name = map[int32]string{ + 0: "STATE_UNSPECIFIED", + 1: "COMPLETED", + 2: "SUPERSEDED", + 3: "TERMINATED", +} +var RunAssetDiscoveryResponse_State_value = map[string]int32{ + "STATE_UNSPECIFIED": 0, + "COMPLETED": 1, + "SUPERSEDED": 2, + "TERMINATED": 3, +} + +func (x RunAssetDiscoveryResponse_State) String() string { + return proto.EnumName(RunAssetDiscoveryResponse_State_name, int32(x)) +} +func (RunAssetDiscoveryResponse_State) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_run_asset_discovery_response_3522292b1e727515, []int{0, 0} +} + +// Response of asset discovery run +type RunAssetDiscoveryResponse struct { + // The state of an asset discovery run. + State RunAssetDiscoveryResponse_State `protobuf:"varint,1,opt,name=state,proto3,enum=google.cloud.securitycenter.v1.RunAssetDiscoveryResponse_State" json:"state,omitempty"` + // The duration between asset discovery run start and end + Duration *duration.Duration `protobuf:"bytes,2,opt,name=duration,proto3" json:"duration,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *RunAssetDiscoveryResponse) Reset() { *m = RunAssetDiscoveryResponse{} } +func (m *RunAssetDiscoveryResponse) String() string { return proto.CompactTextString(m) } +func (*RunAssetDiscoveryResponse) ProtoMessage() {} +func (*RunAssetDiscoveryResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_run_asset_discovery_response_3522292b1e727515, []int{0} +} +func (m *RunAssetDiscoveryResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_RunAssetDiscoveryResponse.Unmarshal(m, b) +} +func (m *RunAssetDiscoveryResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_RunAssetDiscoveryResponse.Marshal(b, m, deterministic) +} +func (dst *RunAssetDiscoveryResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_RunAssetDiscoveryResponse.Merge(dst, src) +} +func (m *RunAssetDiscoveryResponse) XXX_Size() int { + return xxx_messageInfo_RunAssetDiscoveryResponse.Size(m) +} +func (m *RunAssetDiscoveryResponse) XXX_DiscardUnknown() { + xxx_messageInfo_RunAssetDiscoveryResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_RunAssetDiscoveryResponse proto.InternalMessageInfo + +func (m *RunAssetDiscoveryResponse) GetState() RunAssetDiscoveryResponse_State { + if m != nil { + return m.State + } + return RunAssetDiscoveryResponse_STATE_UNSPECIFIED +} + +func (m *RunAssetDiscoveryResponse) GetDuration() *duration.Duration { + if m != nil { + return m.Duration + } + return nil +} + +func init() { + proto.RegisterType((*RunAssetDiscoveryResponse)(nil), "google.cloud.securitycenter.v1.RunAssetDiscoveryResponse") + proto.RegisterEnum("google.cloud.securitycenter.v1.RunAssetDiscoveryResponse_State", RunAssetDiscoveryResponse_State_name, RunAssetDiscoveryResponse_State_value) +} + +func init() { + proto.RegisterFile("google/cloud/securitycenter/v1/run_asset_discovery_response.proto", fileDescriptor_run_asset_discovery_response_3522292b1e727515) +} + +var fileDescriptor_run_asset_discovery_response_3522292b1e727515 = []byte{ + // 377 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x84, 0x92, 0xd1, 0xaa, 0xda, 0x30, + 0x18, 0xc7, 0xd7, 0x0e, 0xc7, 0x96, 0x31, 0x71, 0x85, 0x81, 0xca, 0x70, 0xce, 0x2b, 0xaf, 0x12, + 0xea, 0xd8, 0x4d, 0x77, 0x31, 0xba, 0x36, 0x1b, 0x82, 0xba, 0xd2, 0x56, 0x2f, 0x86, 0x50, 0x62, + 0xcd, 0x4a, 0x41, 0x93, 0x92, 0xa4, 0x82, 0x8f, 0x76, 0x5e, 0xe1, 0x3c, 0xca, 0x79, 0x82, 0x73, + 0x79, 0x68, 0xda, 0x1e, 0xf0, 0x80, 0xe7, 0x5c, 0x26, 0xff, 0xdf, 0xf7, 0xcb, 0xff, 0x83, 0x00, + 0x37, 0xe3, 0x3c, 0x3b, 0x50, 0x94, 0x1e, 0x78, 0xb9, 0x47, 0x92, 0xa6, 0xa5, 0xc8, 0xd5, 0x39, + 0xa5, 0x4c, 0x51, 0x81, 0x4e, 0x36, 0x12, 0x25, 0x4b, 0x88, 0x94, 0x54, 0x25, 0xfb, 0x5c, 0xa6, + 0xfc, 0x44, 0xc5, 0x39, 0x11, 0x54, 0x16, 0x9c, 0x49, 0x0a, 0x0b, 0xc1, 0x15, 0xb7, 0x46, 0xb5, + 0x02, 0x6a, 0x05, 0xbc, 0x54, 0xc0, 0x93, 0x3d, 0x6c, 0x72, 0xa4, 0xe9, 0x5d, 0xf9, 0x1f, 0xed, + 0x4b, 0x41, 0x54, 0xce, 0x59, 0x3d, 0x3f, 0xfc, 0xf2, 0x34, 0x57, 0xf9, 0x91, 0x4a, 0x45, 0x8e, + 0x45, 0x03, 0x7c, 0x6e, 0x00, 0x52, 0xe4, 0x88, 0x30, 0xc6, 0x95, 0x9e, 0x96, 0x75, 0x3a, 0xb9, + 0x37, 0xc0, 0x20, 0x2c, 0x99, 0x5b, 0x95, 0xf4, 0xdb, 0x8e, 0x61, 0x53, 0xd1, 0x5a, 0x83, 0x8e, + 0x54, 0x44, 0xd1, 0xbe, 0x31, 0x36, 0xa6, 0xdd, 0xd9, 0x4f, 0xf8, 0x7c, 0x59, 0x78, 0xd5, 0x04, + 0xa3, 0x4a, 0x13, 0xd6, 0x36, 0xeb, 0x3b, 0x78, 0xdb, 0x6e, 0xd1, 0x37, 0xc7, 0xc6, 0xf4, 0xfd, + 0x6c, 0xd0, 0x9a, 0xdb, 0x35, 0xa0, 0xdf, 0x00, 0xe1, 0x23, 0x3a, 0x59, 0x82, 0x8e, 0xd6, 0x58, + 0x9f, 0xc0, 0xc7, 0x28, 0x76, 0x63, 0x9c, 0xac, 0x57, 0x51, 0x80, 0xbd, 0xf9, 0xef, 0x39, 0xf6, + 0x7b, 0xaf, 0xac, 0x0f, 0xe0, 0x9d, 0xf7, 0x77, 0x19, 0x2c, 0x70, 0x8c, 0xfd, 0x9e, 0x61, 0x75, + 0x01, 0x88, 0xd6, 0x01, 0x0e, 0x23, 0xec, 0x63, 0xbf, 0x67, 0x56, 0xe7, 0x18, 0x87, 0xcb, 0xf9, + 0xca, 0xad, 0xf2, 0xd7, 0xbf, 0x6e, 0x0c, 0x30, 0x49, 0xf9, 0xf1, 0x85, 0x9d, 0x02, 0xe3, 0xdf, + 0xa2, 0x21, 0x32, 0x7e, 0x20, 0x2c, 0x83, 0x5c, 0x64, 0x28, 0xa3, 0x4c, 0x37, 0x45, 0x75, 0x44, + 0x8a, 0x5c, 0x5e, 0xfb, 0x04, 0x3f, 0x2e, 0x6f, 0x6e, 0xcd, 0xd1, 0x1f, 0x3d, 0xb3, 0xf5, 0x2a, + 0x7e, 0x1b, 0x35, 0xa9, 0xa7, 0xd3, 0xed, 0xc6, 0xbe, 0x33, 0xbf, 0xd6, 0x80, 0xe3, 0x68, 0xc2, + 0x71, 0x2e, 0x11, 0xc7, 0xd9, 0xd8, 0xbb, 0x37, 0xfa, 0xf5, 0x6f, 0x0f, 0x01, 0x00, 0x00, 0xff, + 0xff, 0x2c, 0x18, 0x12, 0xe5, 0x81, 0x02, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/cloud/securitycenter/v1/security_marks.pb.go b/vendor/google.golang.org/genproto/googleapis/cloud/securitycenter/v1/security_marks.pb.go new file mode 100644 index 0000000..ca6ff31 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/cloud/securitycenter/v1/security_marks.pb.go @@ -0,0 +1,114 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/cloud/securitycenter/v1/security_marks.proto + +package securitycenter // import "google.golang.org/genproto/googleapis/cloud/securitycenter/v1" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// User specified security marks that are attached to the parent Cloud Security +// Command Center (Cloud SCC) resource. Security marks are scoped within a Cloud +// SCC organization -- they can be modified and viewed by all users who have +// proper permissions on the organization. +type SecurityMarks struct { + // The relative resource name of the SecurityMarks. See: + // https://cloud.google.com/apis/design/resource_names#relative_resource_name + // Examples: + // "organizations/123/assets/456/securityMarks" + // "organizations/123/sources/456/findings/789/securityMarks". + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // Mutable user specified security marks belonging to the parent resource. + // Constraints are as follows: + // - Keys and values are treated as case insensitive + // - Keys must be between 1 - 256 characters (inclusive) + // - Keys must be letters, numbers, underscores, or dashes + // - Values have leading and trailing whitespace trimmed, remaining + // characters must be between 1 - 4096 characters (inclusive) + Marks map[string]string `protobuf:"bytes,2,rep,name=marks,proto3" json:"marks,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *SecurityMarks) Reset() { *m = SecurityMarks{} } +func (m *SecurityMarks) String() string { return proto.CompactTextString(m) } +func (*SecurityMarks) ProtoMessage() {} +func (*SecurityMarks) Descriptor() ([]byte, []int) { + return fileDescriptor_security_marks_00c042d4d9bf7f29, []int{0} +} +func (m *SecurityMarks) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_SecurityMarks.Unmarshal(m, b) +} +func (m *SecurityMarks) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_SecurityMarks.Marshal(b, m, deterministic) +} +func (dst *SecurityMarks) XXX_Merge(src proto.Message) { + xxx_messageInfo_SecurityMarks.Merge(dst, src) +} +func (m *SecurityMarks) XXX_Size() int { + return xxx_messageInfo_SecurityMarks.Size(m) +} +func (m *SecurityMarks) XXX_DiscardUnknown() { + xxx_messageInfo_SecurityMarks.DiscardUnknown(m) +} + +var xxx_messageInfo_SecurityMarks proto.InternalMessageInfo + +func (m *SecurityMarks) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *SecurityMarks) GetMarks() map[string]string { + if m != nil { + return m.Marks + } + return nil +} + +func init() { + proto.RegisterType((*SecurityMarks)(nil), "google.cloud.securitycenter.v1.SecurityMarks") + proto.RegisterMapType((map[string]string)(nil), "google.cloud.securitycenter.v1.SecurityMarks.MarksEntry") +} + +func init() { + proto.RegisterFile("google/cloud/securitycenter/v1/security_marks.proto", fileDescriptor_security_marks_00c042d4d9bf7f29) +} + +var fileDescriptor_security_marks_00c042d4d9bf7f29 = []byte{ + // 295 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xe2, 0x32, 0x4e, 0xcf, 0xcf, 0x4f, + 0xcf, 0x49, 0xd5, 0x4f, 0xce, 0xc9, 0x2f, 0x4d, 0xd1, 0x2f, 0x4e, 0x4d, 0x2e, 0x2d, 0xca, 0x2c, + 0xa9, 0x4c, 0x4e, 0xcd, 0x2b, 0x49, 0x2d, 0xd2, 0x2f, 0x33, 0x84, 0x8b, 0xc4, 0xe7, 0x26, 0x16, + 0x65, 0x17, 0xeb, 0x15, 0x14, 0xe5, 0x97, 0xe4, 0x0b, 0xc9, 0x41, 0x34, 0xe9, 0x81, 0x35, 0xe9, + 0xa1, 0x6a, 0xd2, 0x2b, 0x33, 0x94, 0x92, 0x81, 0x1a, 0x9a, 0x58, 0x90, 0xa9, 0x9f, 0x98, 0x97, + 0x97, 0x5f, 0x92, 0x58, 0x92, 0x99, 0x9f, 0x07, 0xd5, 0xad, 0xb4, 0x96, 0x91, 0x8b, 0x37, 0x18, + 0xaa, 0xc7, 0x17, 0x64, 0xaa, 0x90, 0x10, 0x17, 0x4b, 0x5e, 0x62, 0x6e, 0xaa, 0x04, 0xa3, 0x02, + 0xa3, 0x06, 0x67, 0x10, 0x98, 0x2d, 0xe4, 0xc7, 0xc5, 0x0a, 0xb6, 0x52, 0x82, 0x49, 0x81, 0x59, + 0x83, 0xdb, 0xc8, 0x42, 0x0f, 0xbf, 0x9d, 0x7a, 0x28, 0x26, 0xea, 0x81, 0x49, 0xd7, 0xbc, 0x92, + 0xa2, 0xca, 0x20, 0x88, 0x31, 0x52, 0x16, 0x5c, 0x5c, 0x08, 0x41, 0x21, 0x01, 0x2e, 0xe6, 0xec, + 0xd4, 0x4a, 0xa8, 0x85, 0x20, 0xa6, 0x90, 0x08, 0x17, 0x6b, 0x59, 0x62, 0x4e, 0x69, 0xaa, 0x04, + 0x13, 0x58, 0x0c, 0xc2, 0xb1, 0x62, 0xb2, 0x60, 0x74, 0xba, 0xc5, 0xc8, 0xa5, 0x94, 0x9c, 0x9f, + 0x4b, 0xc0, 0x01, 0x01, 0x8c, 0x51, 0x3e, 0x50, 0x15, 0xe9, 0xf9, 0x39, 0x89, 0x79, 0xe9, 0x7a, + 0xf9, 0x45, 0xe9, 0xfa, 0xe9, 0xa9, 0x79, 0x60, 0x4f, 0xeb, 0x43, 0xa4, 0x12, 0x0b, 0x32, 0x8b, + 0x71, 0x05, 0xb5, 0x35, 0xaa, 0xc8, 0x2a, 0x26, 0x39, 0x77, 0x88, 0x71, 0xce, 0x60, 0x0b, 0x61, + 0xfe, 0x73, 0x86, 0x58, 0x18, 0x66, 0x78, 0x0a, 0xa6, 0x20, 0x06, 0xac, 0x20, 0x06, 0x55, 0x41, + 0x4c, 0x98, 0xe1, 0x2b, 0x26, 0x45, 0x88, 0x02, 0x2b, 0x2b, 0xb0, 0x0a, 0x2b, 0x2b, 0x54, 0x25, + 0x56, 0x56, 0x61, 0x86, 0x49, 0x6c, 0x60, 0xe7, 0x19, 0x03, 0x02, 0x00, 0x00, 0xff, 0xff, 0x57, + 0x70, 0x68, 0x8e, 0x08, 0x02, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/cloud/securitycenter/v1/securitycenter_service.pb.go b/vendor/google.golang.org/genproto/googleapis/cloud/securitycenter/v1/securitycenter_service.pb.go new file mode 100644 index 0000000..9d6b21b --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/cloud/securitycenter/v1/securitycenter_service.pb.go @@ -0,0 +1,2777 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/cloud/securitycenter/v1/securitycenter_service.proto + +package securitycenter // import "google.golang.org/genproto/googleapis/cloud/securitycenter/v1" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import duration "github.com/golang/protobuf/ptypes/duration" +import _ "github.com/golang/protobuf/ptypes/empty" +import _struct "github.com/golang/protobuf/ptypes/struct" +import timestamp "github.com/golang/protobuf/ptypes/timestamp" +import _ "google.golang.org/genproto/googleapis/api/annotations" +import v1 "google.golang.org/genproto/googleapis/iam/v1" +import longrunning "google.golang.org/genproto/googleapis/longrunning" +import field_mask "google.golang.org/genproto/protobuf/field_mask" + +import ( + context "golang.org/x/net/context" + grpc "google.golang.org/grpc" +) + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// The change in state of the asset. +// +// When querying across two points in time this describes +// the change between the two points: ADDED, REMOVED, or ACTIVE. +// If there was no compare_duration supplied in the request the state change +// will be: UNUSED +type ListAssetsResponse_ListAssetsResult_StateChange int32 + +const ( + // State change is unused, this is the canonical default for this enum. + ListAssetsResponse_ListAssetsResult_UNUSED ListAssetsResponse_ListAssetsResult_StateChange = 0 + // Asset was added between the points in time. + ListAssetsResponse_ListAssetsResult_ADDED ListAssetsResponse_ListAssetsResult_StateChange = 1 + // Asset was removed between the points in time. + ListAssetsResponse_ListAssetsResult_REMOVED ListAssetsResponse_ListAssetsResult_StateChange = 2 + // Asset was present at both point(s) in time. + ListAssetsResponse_ListAssetsResult_ACTIVE ListAssetsResponse_ListAssetsResult_StateChange = 3 +) + +var ListAssetsResponse_ListAssetsResult_StateChange_name = map[int32]string{ + 0: "UNUSED", + 1: "ADDED", + 2: "REMOVED", + 3: "ACTIVE", +} +var ListAssetsResponse_ListAssetsResult_StateChange_value = map[string]int32{ + "UNUSED": 0, + "ADDED": 1, + "REMOVED": 2, + "ACTIVE": 3, +} + +func (x ListAssetsResponse_ListAssetsResult_StateChange) String() string { + return proto.EnumName(ListAssetsResponse_ListAssetsResult_StateChange_name, int32(x)) +} +func (ListAssetsResponse_ListAssetsResult_StateChange) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_securitycenter_service_0a41679950f5c36f, []int{12, 0, 0} +} + +// The change in state of the finding. +// +// When querying across two points in time this describes +// the change in the finding between the two points: CHANGED, UNCHANGED, +// ADDED, or REMOVED. Findings can not be deleted, so REMOVED implies that +// the finding at timestamp does not match the filter specified, but it did +// at timestamp - compare_duration. If there was no compare_duration +// supplied in the request the state change will be: UNUSED +type ListFindingsResponse_ListFindingsResult_StateChange int32 + +const ( + // State change is unused, this is the canonical default for this enum. + ListFindingsResponse_ListFindingsResult_UNUSED ListFindingsResponse_ListFindingsResult_StateChange = 0 + // The finding has changed state in some way between the points in time + // and existed at both points. + ListFindingsResponse_ListFindingsResult_CHANGED ListFindingsResponse_ListFindingsResult_StateChange = 1 + // The finding has not changed state between the points in time and + // existed at both points. + ListFindingsResponse_ListFindingsResult_UNCHANGED ListFindingsResponse_ListFindingsResult_StateChange = 2 + // The finding was created between the points in time. + ListFindingsResponse_ListFindingsResult_ADDED ListFindingsResponse_ListFindingsResult_StateChange = 3 + // The finding at timestamp does not match the filter specified, but it + // did at timestamp - compare_duration. + ListFindingsResponse_ListFindingsResult_REMOVED ListFindingsResponse_ListFindingsResult_StateChange = 4 +) + +var ListFindingsResponse_ListFindingsResult_StateChange_name = map[int32]string{ + 0: "UNUSED", + 1: "CHANGED", + 2: "UNCHANGED", + 3: "ADDED", + 4: "REMOVED", +} +var ListFindingsResponse_ListFindingsResult_StateChange_value = map[string]int32{ + "UNUSED": 0, + "CHANGED": 1, + "UNCHANGED": 2, + "ADDED": 3, + "REMOVED": 4, +} + +func (x ListFindingsResponse_ListFindingsResult_StateChange) String() string { + return proto.EnumName(ListFindingsResponse_ListFindingsResult_StateChange_name, int32(x)) +} +func (ListFindingsResponse_ListFindingsResult_StateChange) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_securitycenter_service_0a41679950f5c36f, []int{14, 0, 0} +} + +// Request message for creating a finding. +type CreateFindingRequest struct { + // Resource name of the new finding's parent. Its format should be + // "organizations/[organization_id]/sources/[source_id]". + Parent string `protobuf:"bytes,1,opt,name=parent,proto3" json:"parent,omitempty"` + // Unique identifier provided by the client within the parent scope. + // It must be alphanumeric and less than or equal to 32 characters and + // greater than 0 characters in length. + FindingId string `protobuf:"bytes,2,opt,name=finding_id,json=findingId,proto3" json:"finding_id,omitempty"` + // The Finding being created. The name and security_marks will be ignored as + // they are both output only fields on this resource. + Finding *Finding `protobuf:"bytes,3,opt,name=finding,proto3" json:"finding,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *CreateFindingRequest) Reset() { *m = CreateFindingRequest{} } +func (m *CreateFindingRequest) String() string { return proto.CompactTextString(m) } +func (*CreateFindingRequest) ProtoMessage() {} +func (*CreateFindingRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_securitycenter_service_0a41679950f5c36f, []int{0} +} +func (m *CreateFindingRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_CreateFindingRequest.Unmarshal(m, b) +} +func (m *CreateFindingRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_CreateFindingRequest.Marshal(b, m, deterministic) +} +func (dst *CreateFindingRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_CreateFindingRequest.Merge(dst, src) +} +func (m *CreateFindingRequest) XXX_Size() int { + return xxx_messageInfo_CreateFindingRequest.Size(m) +} +func (m *CreateFindingRequest) XXX_DiscardUnknown() { + xxx_messageInfo_CreateFindingRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_CreateFindingRequest proto.InternalMessageInfo + +func (m *CreateFindingRequest) GetParent() string { + if m != nil { + return m.Parent + } + return "" +} + +func (m *CreateFindingRequest) GetFindingId() string { + if m != nil { + return m.FindingId + } + return "" +} + +func (m *CreateFindingRequest) GetFinding() *Finding { + if m != nil { + return m.Finding + } + return nil +} + +// Request message for creating a source. +type CreateSourceRequest struct { + // Resource name of the new source's parent. Its format should be + // "organizations/[organization_id]". + Parent string `protobuf:"bytes,1,opt,name=parent,proto3" json:"parent,omitempty"` + // The Source being created, only the display_name and description will be + // used. All other fields will be ignored. + Source *Source `protobuf:"bytes,2,opt,name=source,proto3" json:"source,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *CreateSourceRequest) Reset() { *m = CreateSourceRequest{} } +func (m *CreateSourceRequest) String() string { return proto.CompactTextString(m) } +func (*CreateSourceRequest) ProtoMessage() {} +func (*CreateSourceRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_securitycenter_service_0a41679950f5c36f, []int{1} +} +func (m *CreateSourceRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_CreateSourceRequest.Unmarshal(m, b) +} +func (m *CreateSourceRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_CreateSourceRequest.Marshal(b, m, deterministic) +} +func (dst *CreateSourceRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_CreateSourceRequest.Merge(dst, src) +} +func (m *CreateSourceRequest) XXX_Size() int { + return xxx_messageInfo_CreateSourceRequest.Size(m) +} +func (m *CreateSourceRequest) XXX_DiscardUnknown() { + xxx_messageInfo_CreateSourceRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_CreateSourceRequest proto.InternalMessageInfo + +func (m *CreateSourceRequest) GetParent() string { + if m != nil { + return m.Parent + } + return "" +} + +func (m *CreateSourceRequest) GetSource() *Source { + if m != nil { + return m.Source + } + return nil +} + +// Request message for getting organization settings. +type GetOrganizationSettingsRequest struct { + // Name of the organization to get organization settings for. Its format is + // "organizations/[organization_id]/organizationSettings". + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GetOrganizationSettingsRequest) Reset() { *m = GetOrganizationSettingsRequest{} } +func (m *GetOrganizationSettingsRequest) String() string { return proto.CompactTextString(m) } +func (*GetOrganizationSettingsRequest) ProtoMessage() {} +func (*GetOrganizationSettingsRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_securitycenter_service_0a41679950f5c36f, []int{2} +} +func (m *GetOrganizationSettingsRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GetOrganizationSettingsRequest.Unmarshal(m, b) +} +func (m *GetOrganizationSettingsRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GetOrganizationSettingsRequest.Marshal(b, m, deterministic) +} +func (dst *GetOrganizationSettingsRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_GetOrganizationSettingsRequest.Merge(dst, src) +} +func (m *GetOrganizationSettingsRequest) XXX_Size() int { + return xxx_messageInfo_GetOrganizationSettingsRequest.Size(m) +} +func (m *GetOrganizationSettingsRequest) XXX_DiscardUnknown() { + xxx_messageInfo_GetOrganizationSettingsRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_GetOrganizationSettingsRequest proto.InternalMessageInfo + +func (m *GetOrganizationSettingsRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +// Request message for getting a source. +type GetSourceRequest struct { + // Relative resource name of the source. Its format is + // "organizations/[organization_id]/source/[source_id]". + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GetSourceRequest) Reset() { *m = GetSourceRequest{} } +func (m *GetSourceRequest) String() string { return proto.CompactTextString(m) } +func (*GetSourceRequest) ProtoMessage() {} +func (*GetSourceRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_securitycenter_service_0a41679950f5c36f, []int{3} +} +func (m *GetSourceRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GetSourceRequest.Unmarshal(m, b) +} +func (m *GetSourceRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GetSourceRequest.Marshal(b, m, deterministic) +} +func (dst *GetSourceRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_GetSourceRequest.Merge(dst, src) +} +func (m *GetSourceRequest) XXX_Size() int { + return xxx_messageInfo_GetSourceRequest.Size(m) +} +func (m *GetSourceRequest) XXX_DiscardUnknown() { + xxx_messageInfo_GetSourceRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_GetSourceRequest proto.InternalMessageInfo + +func (m *GetSourceRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +// Request message for grouping by assets. +type GroupAssetsRequest struct { + // Name of the organization to groupBy. Its format is + // "organizations/[organization_id]". + Parent string `protobuf:"bytes,1,opt,name=parent,proto3" json:"parent,omitempty"` + // Expression that defines the filter to apply across assets. + // The expression is a list of zero or more restrictions combined via logical + // operators `AND` and `OR`. + // Parentheses are supported, and `OR` has higher precedence than `AND`. + // + // Restrictions have the form ` ` and may have a `-` + // character in front of them to indicate negation. The fields map to those + // defined in the Asset resource. Examples include: + // + // * name + // * security_center_properties.resource_name + // * resource_properties.a_property + // * security_marks.marks.marka + // + // The supported operators are: + // + // * `=` for all value types. + // * `>`, `<`, `>=`, `<=` for integer values. + // * `:`, meaning substring matching, for strings. + // + // The supported value types are: + // + // * string literals in quotes. + // * integer literals without quotes. + // * boolean literals `true` and `false` without quotes. + // + // The following field and operator combinations are supported: + // name | '=' + // update_time | '>', '<', '>=', '<=', '=' + // create_time | '>', '<', '>=', '<=', '=' + // iam_policy.policy_blob | '=', ':' + // resource_properties | '=', ':', '>', '<', '>=', '<=' + // security_marks | '=', ':' + // security_center_properties.resource_name | '=', ':' + // security_center_properties.resource_type | '=', ':' + // security_center_properties.resource_parent | '=', ':' + // security_center_properties.resource_project | '=', ':' + // security_center_properties.resource_owners | '=', ':' + // + // For example, `resource_properties.size = 100` is a valid filter string. + Filter string `protobuf:"bytes,2,opt,name=filter,proto3" json:"filter,omitempty"` + // Expression that defines what assets fields to use for grouping. The string + // value should follow SQL syntax: comma separated list of fields. For + // example: + // "security_center_properties.resource_project,security_center_properties.project". + // + // The following fields are supported when compare_duration is not set: + // + // * security_center_properties.resource_project + // * security_center_properties.resource_type + // * security_center_properties.resource_parent + // + // The following fields are supported when compare_duration is set: + // + // * security_center_properties.resource_type + GroupBy string `protobuf:"bytes,3,opt,name=group_by,json=groupBy,proto3" json:"group_by,omitempty"` + // When compare_duration is set, the GroupResult's "state_change" property is + // updated to indicate whether the asset was added, removed, or remained + // present during the compare_duration period of time that precedes the + // read_time. This is the time between (read_time - compare_duration) and + // read_time. + // + // The state change value is derived based on the presence of the asset at the + // two points in time. Intermediate state changes between the two times don't + // affect the result. For example, the results aren't affected if the asset is + // removed and re-created again. + // + // Possible "state_change" values when compare_duration is specified: + // + // * "ADDED": indicates that the asset was not present at the start of + // compare_duration, but present at reference_time. + // * "REMOVED": indicates that the asset was present at the start of + // compare_duration, but not present at reference_time. + // * "ACTIVE": indicates that the asset was present at both the + // start and the end of the time period defined by + // compare_duration and reference_time. + // + // If compare_duration is not specified, then the only possible state_change + // is "UNUSED", which will be the state_change set for all assets present at + // read_time. + // + // If this field is set then `state_change` must be a specified field in + // `group_by`. + CompareDuration *duration.Duration `protobuf:"bytes,4,opt,name=compare_duration,json=compareDuration,proto3" json:"compare_duration,omitempty"` + // Time used as a reference point when filtering assets. The filter is limited + // to assets existing at the supplied time and their values are those at that + // specific time. Absence of this field will default to the API's version of + // NOW. + ReadTime *timestamp.Timestamp `protobuf:"bytes,5,opt,name=read_time,json=readTime,proto3" json:"read_time,omitempty"` + // The value returned by the last `GroupAssetsResponse`; indicates + // that this is a continuation of a prior `GroupAssets` call, and that the + // system should return the next page of data. + PageToken string `protobuf:"bytes,7,opt,name=page_token,json=pageToken,proto3" json:"page_token,omitempty"` + // The maximum number of results to return in a single response. Default is + // 10, minimum is 1, maximum is 1000. + PageSize int32 `protobuf:"varint,8,opt,name=page_size,json=pageSize,proto3" json:"page_size,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GroupAssetsRequest) Reset() { *m = GroupAssetsRequest{} } +func (m *GroupAssetsRequest) String() string { return proto.CompactTextString(m) } +func (*GroupAssetsRequest) ProtoMessage() {} +func (*GroupAssetsRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_securitycenter_service_0a41679950f5c36f, []int{4} +} +func (m *GroupAssetsRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GroupAssetsRequest.Unmarshal(m, b) +} +func (m *GroupAssetsRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GroupAssetsRequest.Marshal(b, m, deterministic) +} +func (dst *GroupAssetsRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_GroupAssetsRequest.Merge(dst, src) +} +func (m *GroupAssetsRequest) XXX_Size() int { + return xxx_messageInfo_GroupAssetsRequest.Size(m) +} +func (m *GroupAssetsRequest) XXX_DiscardUnknown() { + xxx_messageInfo_GroupAssetsRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_GroupAssetsRequest proto.InternalMessageInfo + +func (m *GroupAssetsRequest) GetParent() string { + if m != nil { + return m.Parent + } + return "" +} + +func (m *GroupAssetsRequest) GetFilter() string { + if m != nil { + return m.Filter + } + return "" +} + +func (m *GroupAssetsRequest) GetGroupBy() string { + if m != nil { + return m.GroupBy + } + return "" +} + +func (m *GroupAssetsRequest) GetCompareDuration() *duration.Duration { + if m != nil { + return m.CompareDuration + } + return nil +} + +func (m *GroupAssetsRequest) GetReadTime() *timestamp.Timestamp { + if m != nil { + return m.ReadTime + } + return nil +} + +func (m *GroupAssetsRequest) GetPageToken() string { + if m != nil { + return m.PageToken + } + return "" +} + +func (m *GroupAssetsRequest) GetPageSize() int32 { + if m != nil { + return m.PageSize + } + return 0 +} + +// Response message for grouping by assets. +type GroupAssetsResponse struct { + // Group results. There exists an element for each existing unique + // combination of property/values. The element contains a count for the number + // of times those specific property/values appear. + GroupByResults []*GroupResult `protobuf:"bytes,1,rep,name=group_by_results,json=groupByResults,proto3" json:"group_by_results,omitempty"` + // Time used for executing the groupBy request. + ReadTime *timestamp.Timestamp `protobuf:"bytes,2,opt,name=read_time,json=readTime,proto3" json:"read_time,omitempty"` + // Token to retrieve the next page of results, or empty if there are no more + // results. + NextPageToken string `protobuf:"bytes,3,opt,name=next_page_token,json=nextPageToken,proto3" json:"next_page_token,omitempty"` + // The total number of results matching the query. + TotalSize int32 `protobuf:"varint,4,opt,name=total_size,json=totalSize,proto3" json:"total_size,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GroupAssetsResponse) Reset() { *m = GroupAssetsResponse{} } +func (m *GroupAssetsResponse) String() string { return proto.CompactTextString(m) } +func (*GroupAssetsResponse) ProtoMessage() {} +func (*GroupAssetsResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_securitycenter_service_0a41679950f5c36f, []int{5} +} +func (m *GroupAssetsResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GroupAssetsResponse.Unmarshal(m, b) +} +func (m *GroupAssetsResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GroupAssetsResponse.Marshal(b, m, deterministic) +} +func (dst *GroupAssetsResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_GroupAssetsResponse.Merge(dst, src) +} +func (m *GroupAssetsResponse) XXX_Size() int { + return xxx_messageInfo_GroupAssetsResponse.Size(m) +} +func (m *GroupAssetsResponse) XXX_DiscardUnknown() { + xxx_messageInfo_GroupAssetsResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_GroupAssetsResponse proto.InternalMessageInfo + +func (m *GroupAssetsResponse) GetGroupByResults() []*GroupResult { + if m != nil { + return m.GroupByResults + } + return nil +} + +func (m *GroupAssetsResponse) GetReadTime() *timestamp.Timestamp { + if m != nil { + return m.ReadTime + } + return nil +} + +func (m *GroupAssetsResponse) GetNextPageToken() string { + if m != nil { + return m.NextPageToken + } + return "" +} + +func (m *GroupAssetsResponse) GetTotalSize() int32 { + if m != nil { + return m.TotalSize + } + return 0 +} + +// Request message for grouping by findings. +type GroupFindingsRequest struct { + // Name of the source to groupBy. Its format is + // "organizations/[organization_id]/sources/[source_id]". To groupBy across + // all sources provide a source_id of `-`. For example: + // organizations/123/sources/- + Parent string `protobuf:"bytes,1,opt,name=parent,proto3" json:"parent,omitempty"` + // Expression that defines the filter to apply across findings. + // The expression is a list of one or more restrictions combined via logical + // operators `AND` and `OR`. + // Parentheses are supported, and `OR` has higher precedence than `AND`. + // + // Restrictions have the form ` ` and may have a `-` + // character in front of them to indicate negation. Examples include: + // + // * name + // * source_properties.a_property + // * security_marks.marks.marka + // + // The supported operators are: + // + // * `=` for all value types. + // * `>`, `<`, `>=`, `<=` for integer values. + // * `:`, meaning substring matching, for strings. + // + // The supported value types are: + // + // * string literals in quotes. + // * integer literals without quotes. + // * boolean literals `true` and `false` without quotes. + // + // The following field and operator combinations are supported: + // name | `=` + // parent | '=', ':' + // resource_name | '=', ':' + // state | '=', ':' + // category | '=', ':' + // external_uri | '=', ':' + // event_time | `>`, `<`, `>=`, `<=` + // security_marks | '=', ':' + // source_properties | '=', ':', `>`, `<`, `>=`, `<=` + // + // For example, `source_properties.size = 100` is a valid filter string. + Filter string `protobuf:"bytes,2,opt,name=filter,proto3" json:"filter,omitempty"` + // Expression that defines what assets fields to use for grouping (including + // `state_change`). The string value should follow SQL syntax: comma separated + // list of fields. For example: "parent,resource_name". + // + // The following fields are supported: + // + // * resource_name + // * category + // * state + // * parent + // + // The following fields are supported when compare_duration is set: + // + // * state_change + GroupBy string `protobuf:"bytes,3,opt,name=group_by,json=groupBy,proto3" json:"group_by,omitempty"` + // Time used as a reference point when filtering findings. The filter is + // limited to findings existing at the supplied time and their values are + // those at that specific time. Absence of this field will default to the + // API's version of NOW. + ReadTime *timestamp.Timestamp `protobuf:"bytes,4,opt,name=read_time,json=readTime,proto3" json:"read_time,omitempty"` + // When compare_duration is set, the GroupResult's "state_change" attribute is + // updated to indicate whether the finding had its state changed, the + // finding's state remained unchanged, or if the finding was added during the + // compare_duration period of time that precedes the read_time. This is the + // time between (read_time - compare_duration) and read_time. + // + // The state_change value is derived based on the presence and state of the + // finding at the two points in time. Intermediate state changes between the + // two times don't affect the result. For example, the results aren't affected + // if the finding is made inactive and then active again. + // + // Possible "state_change" values when compare_duration is specified: + // + // * "CHANGED": indicates that the finding was present at the start of + // compare_duration, but changed its state at read_time. + // * "UNCHANGED": indicates that the finding was present at the start of + // compare_duration and did not change state at read_time. + // * "ADDED": indicates that the finding was not present at the start + // of compare_duration, but was present at read_time. + // + // If compare_duration is not specified, then the only possible state_change + // is "UNUSED", which will be the state_change set for all findings present + // at read_time. + // + // If this field is set then `state_change` must be a specified field in + // `group_by`. + CompareDuration *duration.Duration `protobuf:"bytes,5,opt,name=compare_duration,json=compareDuration,proto3" json:"compare_duration,omitempty"` + // The value returned by the last `GroupFindingsResponse`; indicates + // that this is a continuation of a prior `GroupFindings` call, and + // that the system should return the next page of data. + PageToken string `protobuf:"bytes,7,opt,name=page_token,json=pageToken,proto3" json:"page_token,omitempty"` + // The maximum number of results to return in a single response. Default is + // 10, minimum is 1, maximum is 1000. + PageSize int32 `protobuf:"varint,8,opt,name=page_size,json=pageSize,proto3" json:"page_size,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GroupFindingsRequest) Reset() { *m = GroupFindingsRequest{} } +func (m *GroupFindingsRequest) String() string { return proto.CompactTextString(m) } +func (*GroupFindingsRequest) ProtoMessage() {} +func (*GroupFindingsRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_securitycenter_service_0a41679950f5c36f, []int{6} +} +func (m *GroupFindingsRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GroupFindingsRequest.Unmarshal(m, b) +} +func (m *GroupFindingsRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GroupFindingsRequest.Marshal(b, m, deterministic) +} +func (dst *GroupFindingsRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_GroupFindingsRequest.Merge(dst, src) +} +func (m *GroupFindingsRequest) XXX_Size() int { + return xxx_messageInfo_GroupFindingsRequest.Size(m) +} +func (m *GroupFindingsRequest) XXX_DiscardUnknown() { + xxx_messageInfo_GroupFindingsRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_GroupFindingsRequest proto.InternalMessageInfo + +func (m *GroupFindingsRequest) GetParent() string { + if m != nil { + return m.Parent + } + return "" +} + +func (m *GroupFindingsRequest) GetFilter() string { + if m != nil { + return m.Filter + } + return "" +} + +func (m *GroupFindingsRequest) GetGroupBy() string { + if m != nil { + return m.GroupBy + } + return "" +} + +func (m *GroupFindingsRequest) GetReadTime() *timestamp.Timestamp { + if m != nil { + return m.ReadTime + } + return nil +} + +func (m *GroupFindingsRequest) GetCompareDuration() *duration.Duration { + if m != nil { + return m.CompareDuration + } + return nil +} + +func (m *GroupFindingsRequest) GetPageToken() string { + if m != nil { + return m.PageToken + } + return "" +} + +func (m *GroupFindingsRequest) GetPageSize() int32 { + if m != nil { + return m.PageSize + } + return 0 +} + +// Response message for group by findings. +type GroupFindingsResponse struct { + // Group results. There exists an element for each existing unique + // combination of property/values. The element contains a count for the number + // of times those specific property/values appear. + GroupByResults []*GroupResult `protobuf:"bytes,1,rep,name=group_by_results,json=groupByResults,proto3" json:"group_by_results,omitempty"` + // Time used for executing the groupBy request. + ReadTime *timestamp.Timestamp `protobuf:"bytes,2,opt,name=read_time,json=readTime,proto3" json:"read_time,omitempty"` + // Token to retrieve the next page of results, or empty if there are no more + // results. + NextPageToken string `protobuf:"bytes,3,opt,name=next_page_token,json=nextPageToken,proto3" json:"next_page_token,omitempty"` + // The total number of results matching the query. + TotalSize int32 `protobuf:"varint,4,opt,name=total_size,json=totalSize,proto3" json:"total_size,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GroupFindingsResponse) Reset() { *m = GroupFindingsResponse{} } +func (m *GroupFindingsResponse) String() string { return proto.CompactTextString(m) } +func (*GroupFindingsResponse) ProtoMessage() {} +func (*GroupFindingsResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_securitycenter_service_0a41679950f5c36f, []int{7} +} +func (m *GroupFindingsResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GroupFindingsResponse.Unmarshal(m, b) +} +func (m *GroupFindingsResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GroupFindingsResponse.Marshal(b, m, deterministic) +} +func (dst *GroupFindingsResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_GroupFindingsResponse.Merge(dst, src) +} +func (m *GroupFindingsResponse) XXX_Size() int { + return xxx_messageInfo_GroupFindingsResponse.Size(m) +} +func (m *GroupFindingsResponse) XXX_DiscardUnknown() { + xxx_messageInfo_GroupFindingsResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_GroupFindingsResponse proto.InternalMessageInfo + +func (m *GroupFindingsResponse) GetGroupByResults() []*GroupResult { + if m != nil { + return m.GroupByResults + } + return nil +} + +func (m *GroupFindingsResponse) GetReadTime() *timestamp.Timestamp { + if m != nil { + return m.ReadTime + } + return nil +} + +func (m *GroupFindingsResponse) GetNextPageToken() string { + if m != nil { + return m.NextPageToken + } + return "" +} + +func (m *GroupFindingsResponse) GetTotalSize() int32 { + if m != nil { + return m.TotalSize + } + return 0 +} + +// Result containing the properties and count of a groupBy request. +type GroupResult struct { + // Properties matching the groupBy fields in the request. + Properties map[string]*_struct.Value `protobuf:"bytes,1,rep,name=properties,proto3" json:"properties,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` + // Total count of resources for the given properties. + Count int64 `protobuf:"varint,2,opt,name=count,proto3" json:"count,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GroupResult) Reset() { *m = GroupResult{} } +func (m *GroupResult) String() string { return proto.CompactTextString(m) } +func (*GroupResult) ProtoMessage() {} +func (*GroupResult) Descriptor() ([]byte, []int) { + return fileDescriptor_securitycenter_service_0a41679950f5c36f, []int{8} +} +func (m *GroupResult) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GroupResult.Unmarshal(m, b) +} +func (m *GroupResult) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GroupResult.Marshal(b, m, deterministic) +} +func (dst *GroupResult) XXX_Merge(src proto.Message) { + xxx_messageInfo_GroupResult.Merge(dst, src) +} +func (m *GroupResult) XXX_Size() int { + return xxx_messageInfo_GroupResult.Size(m) +} +func (m *GroupResult) XXX_DiscardUnknown() { + xxx_messageInfo_GroupResult.DiscardUnknown(m) +} + +var xxx_messageInfo_GroupResult proto.InternalMessageInfo + +func (m *GroupResult) GetProperties() map[string]*_struct.Value { + if m != nil { + return m.Properties + } + return nil +} + +func (m *GroupResult) GetCount() int64 { + if m != nil { + return m.Count + } + return 0 +} + +// Request message for listing sources. +type ListSourcesRequest struct { + // Resource name of the parent of sources to list. Its format should be + // "organizations/[organization_id]". + Parent string `protobuf:"bytes,1,opt,name=parent,proto3" json:"parent,omitempty"` + // The value returned by the last `ListSourcesResponse`; indicates + // that this is a continuation of a prior `ListSources` call, and + // that the system should return the next page of data. + PageToken string `protobuf:"bytes,2,opt,name=page_token,json=pageToken,proto3" json:"page_token,omitempty"` + // The maximum number of results to return in a single response. Default is + // 10, minimum is 1, maximum is 1000. + PageSize int32 `protobuf:"varint,7,opt,name=page_size,json=pageSize,proto3" json:"page_size,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ListSourcesRequest) Reset() { *m = ListSourcesRequest{} } +func (m *ListSourcesRequest) String() string { return proto.CompactTextString(m) } +func (*ListSourcesRequest) ProtoMessage() {} +func (*ListSourcesRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_securitycenter_service_0a41679950f5c36f, []int{9} +} +func (m *ListSourcesRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ListSourcesRequest.Unmarshal(m, b) +} +func (m *ListSourcesRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ListSourcesRequest.Marshal(b, m, deterministic) +} +func (dst *ListSourcesRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_ListSourcesRequest.Merge(dst, src) +} +func (m *ListSourcesRequest) XXX_Size() int { + return xxx_messageInfo_ListSourcesRequest.Size(m) +} +func (m *ListSourcesRequest) XXX_DiscardUnknown() { + xxx_messageInfo_ListSourcesRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_ListSourcesRequest proto.InternalMessageInfo + +func (m *ListSourcesRequest) GetParent() string { + if m != nil { + return m.Parent + } + return "" +} + +func (m *ListSourcesRequest) GetPageToken() string { + if m != nil { + return m.PageToken + } + return "" +} + +func (m *ListSourcesRequest) GetPageSize() int32 { + if m != nil { + return m.PageSize + } + return 0 +} + +// Response message for listing sources. +type ListSourcesResponse struct { + // Sources belonging to the requested parent. + Sources []*Source `protobuf:"bytes,1,rep,name=sources,proto3" json:"sources,omitempty"` + // Token to retrieve the next page of results, or empty if there are no more + // results. + NextPageToken string `protobuf:"bytes,2,opt,name=next_page_token,json=nextPageToken,proto3" json:"next_page_token,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ListSourcesResponse) Reset() { *m = ListSourcesResponse{} } +func (m *ListSourcesResponse) String() string { return proto.CompactTextString(m) } +func (*ListSourcesResponse) ProtoMessage() {} +func (*ListSourcesResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_securitycenter_service_0a41679950f5c36f, []int{10} +} +func (m *ListSourcesResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ListSourcesResponse.Unmarshal(m, b) +} +func (m *ListSourcesResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ListSourcesResponse.Marshal(b, m, deterministic) +} +func (dst *ListSourcesResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_ListSourcesResponse.Merge(dst, src) +} +func (m *ListSourcesResponse) XXX_Size() int { + return xxx_messageInfo_ListSourcesResponse.Size(m) +} +func (m *ListSourcesResponse) XXX_DiscardUnknown() { + xxx_messageInfo_ListSourcesResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_ListSourcesResponse proto.InternalMessageInfo + +func (m *ListSourcesResponse) GetSources() []*Source { + if m != nil { + return m.Sources + } + return nil +} + +func (m *ListSourcesResponse) GetNextPageToken() string { + if m != nil { + return m.NextPageToken + } + return "" +} + +// Request message for listing assets. +type ListAssetsRequest struct { + // Name of the organization assets should belong to. Its format is + // "organizations/[organization_id]". + Parent string `protobuf:"bytes,1,opt,name=parent,proto3" json:"parent,omitempty"` + // Expression that defines the filter to apply across assets. + // The expression is a list of zero or more restrictions combined via logical + // operators `AND` and `OR`. + // Parentheses are supported, and `OR` has higher precedence than `AND`. + // + // Restrictions have the form ` ` and may have a `-` + // character in front of them to indicate negation. The fields map to those + // defined in the Asset resource. Examples include: + // + // * name + // * security_center_properties.resource_name + // * resource_properties.a_property + // * security_marks.marks.marka + // + // The supported operators are: + // + // * `=` for all value types. + // * `>`, `<`, `>=`, `<=` for integer values. + // * `:`, meaning substring matching, for strings. + // + // The supported value types are: + // + // * string literals in quotes. + // * integer literals without quotes. + // * boolean literals `true` and `false` without quotes. + // + // The following are the allowed field and operator combinations: + // name | `=` + // update_time | `>`, `<`, `>=`, `<=` + // iam_policy.policy_blob | '=', ':' + // resource_properties | '=', ':', `>`, `<`, `>=`, `<=` + // security_marks | '=', ':' + // security_center_properties.resource_name | '=', ':' + // security_center_properties.resource_type | '=', ':' + // security_center_properties.resource_parent | '=', ':' + // security_center_properties.resource_project | '=', ':' + // security_center_properties.resource_owners | '=', ':' + // + // For example, `resource_properties.size = 100` is a valid filter string. + Filter string `protobuf:"bytes,2,opt,name=filter,proto3" json:"filter,omitempty"` + // Expression that defines what fields and order to use for sorting. The + // string value should follow SQL syntax: comma separated list of fields. For + // example: "name,resource_properties.a_property". The default sorting order + // is ascending. To specify descending order for a field, a suffix " desc" + // should be appended to the field name. For example: "name + // desc,resource_properties.a_property". Redundant space characters in the + // syntax are insignificant. "name desc,resource_properties.a_property" and " + // name desc , resource_properties.a_property " are equivalent. + // + // The following fields are supported: + // name + // update_time + // resource_properties + // security_marks + // security_center_properties.resource_name + // security_center_properties.resource_parent + // security_center_properties.resource_project + // security_center_properties.resource_type + OrderBy string `protobuf:"bytes,3,opt,name=order_by,json=orderBy,proto3" json:"order_by,omitempty"` + // Time used as a reference point when filtering assets. The filter is limited + // to assets existing at the supplied time and their values are those at that + // specific time. Absence of this field will default to the API's version of + // NOW. + ReadTime *timestamp.Timestamp `protobuf:"bytes,4,opt,name=read_time,json=readTime,proto3" json:"read_time,omitempty"` + // When compare_duration is set, the ListAssetsResult's "state_change" + // attribute is updated to indicate whether the asset was added, removed, or + // remained present during the compare_duration period of time that precedes + // the read_time. This is the time between (read_time - compare_duration) and + // read_time. + // + // The state_change value is derived based on the presence of the asset at the + // two points in time. Intermediate state changes between the two times don't + // affect the result. For example, the results aren't affected if the asset is + // removed and re-created again. + // + // Possible "state_change" values when compare_duration is specified: + // + // * "ADDED": indicates that the asset was not present at the start of + // compare_duration, but present at read_time. + // * "REMOVED": indicates that the asset was present at the start of + // compare_duration, but not present at read_time. + // * "ACTIVE": indicates that the asset was present at both the + // start and the end of the time period defined by + // compare_duration and read_time. + // + // If compare_duration is not specified, then the only possible state_change + // is "UNUSED", which will be the state_change set for all assets present at + // read_time. + CompareDuration *duration.Duration `protobuf:"bytes,5,opt,name=compare_duration,json=compareDuration,proto3" json:"compare_duration,omitempty"` + // Optional. + // + // A field mask to specify the ListAssetsResult fields to be listed in the + // response. + // An empty field mask will list all fields. + FieldMask *field_mask.FieldMask `protobuf:"bytes,7,opt,name=field_mask,json=fieldMask,proto3" json:"field_mask,omitempty"` + // The value returned by the last `ListAssetsResponse`; indicates + // that this is a continuation of a prior `ListAssets` call, and + // that the system should return the next page of data. + PageToken string `protobuf:"bytes,8,opt,name=page_token,json=pageToken,proto3" json:"page_token,omitempty"` + // The maximum number of results to return in a single response. Default is + // 10, minimum is 1, maximum is 1000. + PageSize int32 `protobuf:"varint,9,opt,name=page_size,json=pageSize,proto3" json:"page_size,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ListAssetsRequest) Reset() { *m = ListAssetsRequest{} } +func (m *ListAssetsRequest) String() string { return proto.CompactTextString(m) } +func (*ListAssetsRequest) ProtoMessage() {} +func (*ListAssetsRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_securitycenter_service_0a41679950f5c36f, []int{11} +} +func (m *ListAssetsRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ListAssetsRequest.Unmarshal(m, b) +} +func (m *ListAssetsRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ListAssetsRequest.Marshal(b, m, deterministic) +} +func (dst *ListAssetsRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_ListAssetsRequest.Merge(dst, src) +} +func (m *ListAssetsRequest) XXX_Size() int { + return xxx_messageInfo_ListAssetsRequest.Size(m) +} +func (m *ListAssetsRequest) XXX_DiscardUnknown() { + xxx_messageInfo_ListAssetsRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_ListAssetsRequest proto.InternalMessageInfo + +func (m *ListAssetsRequest) GetParent() string { + if m != nil { + return m.Parent + } + return "" +} + +func (m *ListAssetsRequest) GetFilter() string { + if m != nil { + return m.Filter + } + return "" +} + +func (m *ListAssetsRequest) GetOrderBy() string { + if m != nil { + return m.OrderBy + } + return "" +} + +func (m *ListAssetsRequest) GetReadTime() *timestamp.Timestamp { + if m != nil { + return m.ReadTime + } + return nil +} + +func (m *ListAssetsRequest) GetCompareDuration() *duration.Duration { + if m != nil { + return m.CompareDuration + } + return nil +} + +func (m *ListAssetsRequest) GetFieldMask() *field_mask.FieldMask { + if m != nil { + return m.FieldMask + } + return nil +} + +func (m *ListAssetsRequest) GetPageToken() string { + if m != nil { + return m.PageToken + } + return "" +} + +func (m *ListAssetsRequest) GetPageSize() int32 { + if m != nil { + return m.PageSize + } + return 0 +} + +// Response message for listing assets. +type ListAssetsResponse struct { + // Assets matching the list request. + ListAssetsResults []*ListAssetsResponse_ListAssetsResult `protobuf:"bytes,1,rep,name=list_assets_results,json=listAssetsResults,proto3" json:"list_assets_results,omitempty"` + // Time used for executing the list request. + ReadTime *timestamp.Timestamp `protobuf:"bytes,2,opt,name=read_time,json=readTime,proto3" json:"read_time,omitempty"` + // Token to retrieve the next page of results, or empty if there are no more + // results. + NextPageToken string `protobuf:"bytes,3,opt,name=next_page_token,json=nextPageToken,proto3" json:"next_page_token,omitempty"` + // The total number of assets matching the query. + TotalSize int32 `protobuf:"varint,4,opt,name=total_size,json=totalSize,proto3" json:"total_size,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ListAssetsResponse) Reset() { *m = ListAssetsResponse{} } +func (m *ListAssetsResponse) String() string { return proto.CompactTextString(m) } +func (*ListAssetsResponse) ProtoMessage() {} +func (*ListAssetsResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_securitycenter_service_0a41679950f5c36f, []int{12} +} +func (m *ListAssetsResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ListAssetsResponse.Unmarshal(m, b) +} +func (m *ListAssetsResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ListAssetsResponse.Marshal(b, m, deterministic) +} +func (dst *ListAssetsResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_ListAssetsResponse.Merge(dst, src) +} +func (m *ListAssetsResponse) XXX_Size() int { + return xxx_messageInfo_ListAssetsResponse.Size(m) +} +func (m *ListAssetsResponse) XXX_DiscardUnknown() { + xxx_messageInfo_ListAssetsResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_ListAssetsResponse proto.InternalMessageInfo + +func (m *ListAssetsResponse) GetListAssetsResults() []*ListAssetsResponse_ListAssetsResult { + if m != nil { + return m.ListAssetsResults + } + return nil +} + +func (m *ListAssetsResponse) GetReadTime() *timestamp.Timestamp { + if m != nil { + return m.ReadTime + } + return nil +} + +func (m *ListAssetsResponse) GetNextPageToken() string { + if m != nil { + return m.NextPageToken + } + return "" +} + +func (m *ListAssetsResponse) GetTotalSize() int32 { + if m != nil { + return m.TotalSize + } + return 0 +} + +// Result containing the Asset and its State. +type ListAssetsResponse_ListAssetsResult struct { + // Asset matching the search request. + Asset *Asset `protobuf:"bytes,1,opt,name=asset,proto3" json:"asset,omitempty"` + // State change of the asset between the points in time. + StateChange ListAssetsResponse_ListAssetsResult_StateChange `protobuf:"varint,2,opt,name=state_change,json=stateChange,proto3,enum=google.cloud.securitycenter.v1.ListAssetsResponse_ListAssetsResult_StateChange" json:"state_change,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ListAssetsResponse_ListAssetsResult) Reset() { *m = ListAssetsResponse_ListAssetsResult{} } +func (m *ListAssetsResponse_ListAssetsResult) String() string { return proto.CompactTextString(m) } +func (*ListAssetsResponse_ListAssetsResult) ProtoMessage() {} +func (*ListAssetsResponse_ListAssetsResult) Descriptor() ([]byte, []int) { + return fileDescriptor_securitycenter_service_0a41679950f5c36f, []int{12, 0} +} +func (m *ListAssetsResponse_ListAssetsResult) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ListAssetsResponse_ListAssetsResult.Unmarshal(m, b) +} +func (m *ListAssetsResponse_ListAssetsResult) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ListAssetsResponse_ListAssetsResult.Marshal(b, m, deterministic) +} +func (dst *ListAssetsResponse_ListAssetsResult) XXX_Merge(src proto.Message) { + xxx_messageInfo_ListAssetsResponse_ListAssetsResult.Merge(dst, src) +} +func (m *ListAssetsResponse_ListAssetsResult) XXX_Size() int { + return xxx_messageInfo_ListAssetsResponse_ListAssetsResult.Size(m) +} +func (m *ListAssetsResponse_ListAssetsResult) XXX_DiscardUnknown() { + xxx_messageInfo_ListAssetsResponse_ListAssetsResult.DiscardUnknown(m) +} + +var xxx_messageInfo_ListAssetsResponse_ListAssetsResult proto.InternalMessageInfo + +func (m *ListAssetsResponse_ListAssetsResult) GetAsset() *Asset { + if m != nil { + return m.Asset + } + return nil +} + +func (m *ListAssetsResponse_ListAssetsResult) GetStateChange() ListAssetsResponse_ListAssetsResult_StateChange { + if m != nil { + return m.StateChange + } + return ListAssetsResponse_ListAssetsResult_UNUSED +} + +// Request message for listing findings. +type ListFindingsRequest struct { + // Name of the source the findings belong to. Its format is + // "organizations/[organization_id]/sources/[source_id]". To list across all + // sources provide a source_id of `-`. For example: + // organizations/123/sources/- + Parent string `protobuf:"bytes,1,opt,name=parent,proto3" json:"parent,omitempty"` + // Expression that defines the filter to apply across findings. + // The expression is a list of one or more restrictions combined via logical + // operators `AND` and `OR`. + // Parentheses are supported, and `OR` has higher precedence than `AND`. + // + // Restrictions have the form ` ` and may have a `-` + // character in front of them to indicate negation. Examples include: + // + // * name + // * source_properties.a_property + // * security_marks.marks.marka + // + // The supported operators are: + // + // * `=` for all value types. + // * `>`, `<`, `>=`, `<=` for integer values. + // * `:`, meaning substring matching, for strings. + // + // The supported value types are: + // + // * string literals in quotes. + // * integer literals without quotes. + // * boolean literals `true` and `false` without quotes. + // + // The following field and operator combinations are supported: + // name | `=` + // parent | '=', ':' + // resource_name | '=', ':' + // state | '=', ':' + // category | '=', ':' + // external_uri | '=', ':' + // event_time | `>`, `<`, `>=`, `<=` + // security_marks | '=', ':' + // source_properties | '=', ':', `>`, `<`, `>=`, `<=` + // + // For example, `source_properties.size = 100` is a valid filter string. + Filter string `protobuf:"bytes,2,opt,name=filter,proto3" json:"filter,omitempty"` + // Expression that defines what fields and order to use for sorting. The + // string value should follow SQL syntax: comma separated list of fields. For + // example: "name,resource_properties.a_property". The default sorting order + // is ascending. To specify descending order for a field, a suffix " desc" + // should be appended to the field name. For example: "name + // desc,source_properties.a_property". Redundant space characters in the + // syntax are insignificant. "name desc,source_properties.a_property" and " + // name desc , source_properties.a_property " are equivalent. + // + // The following fields are supported: + // name + // parent + // state + // category + // resource_name + // event_time + // source_properties + // security_marks + OrderBy string `protobuf:"bytes,3,opt,name=order_by,json=orderBy,proto3" json:"order_by,omitempty"` + // Time used as a reference point when filtering findings. The filter is + // limited to findings existing at the supplied time and their values are + // those at that specific time. Absence of this field will default to the + // API's version of NOW. + ReadTime *timestamp.Timestamp `protobuf:"bytes,4,opt,name=read_time,json=readTime,proto3" json:"read_time,omitempty"` + // When compare_duration is set, the ListFindingsResult's "state_change" + // attribute is updated to indicate whether the finding had its state changed, + // the finding's state remained unchanged, or if the finding was added in any + // state during the compare_duration period of time that precedes the + // read_time. This is the time between (read_time - compare_duration) and + // read_time. + // + // The state_change value is derived based on the presence and state of the + // finding at the two points in time. Intermediate state changes between the + // two times don't affect the result. For example, the results aren't affected + // if the finding is made inactive and then active again. + // + // Possible "state_change" values when compare_duration is specified: + // + // * "CHANGED": indicates that the finding was present at the start of + // compare_duration, but changed its state at read_time. + // * "UNCHANGED": indicates that the finding was present at the start of + // compare_duration and did not change state at read_time. + // * "ADDED": indicates that the finding was not present at the start + // of compare_duration, but was present at read_time. + // + // If compare_duration is not specified, then the only possible state_change + // is "UNUSED", which will be the state_change set for all findings present at + // read_time. + CompareDuration *duration.Duration `protobuf:"bytes,5,opt,name=compare_duration,json=compareDuration,proto3" json:"compare_duration,omitempty"` + // Optional. + // + // A field mask to specify the Finding fields to be listed in the response. + // An empty field mask will list all fields. + FieldMask *field_mask.FieldMask `protobuf:"bytes,7,opt,name=field_mask,json=fieldMask,proto3" json:"field_mask,omitempty"` + // The value returned by the last `ListFindingsResponse`; indicates + // that this is a continuation of a prior `ListFindings` call, and + // that the system should return the next page of data. + PageToken string `protobuf:"bytes,8,opt,name=page_token,json=pageToken,proto3" json:"page_token,omitempty"` + // The maximum number of results to return in a single response. Default is + // 10, minimum is 1, maximum is 1000. + PageSize int32 `protobuf:"varint,9,opt,name=page_size,json=pageSize,proto3" json:"page_size,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ListFindingsRequest) Reset() { *m = ListFindingsRequest{} } +func (m *ListFindingsRequest) String() string { return proto.CompactTextString(m) } +func (*ListFindingsRequest) ProtoMessage() {} +func (*ListFindingsRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_securitycenter_service_0a41679950f5c36f, []int{13} +} +func (m *ListFindingsRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ListFindingsRequest.Unmarshal(m, b) +} +func (m *ListFindingsRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ListFindingsRequest.Marshal(b, m, deterministic) +} +func (dst *ListFindingsRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_ListFindingsRequest.Merge(dst, src) +} +func (m *ListFindingsRequest) XXX_Size() int { + return xxx_messageInfo_ListFindingsRequest.Size(m) +} +func (m *ListFindingsRequest) XXX_DiscardUnknown() { + xxx_messageInfo_ListFindingsRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_ListFindingsRequest proto.InternalMessageInfo + +func (m *ListFindingsRequest) GetParent() string { + if m != nil { + return m.Parent + } + return "" +} + +func (m *ListFindingsRequest) GetFilter() string { + if m != nil { + return m.Filter + } + return "" +} + +func (m *ListFindingsRequest) GetOrderBy() string { + if m != nil { + return m.OrderBy + } + return "" +} + +func (m *ListFindingsRequest) GetReadTime() *timestamp.Timestamp { + if m != nil { + return m.ReadTime + } + return nil +} + +func (m *ListFindingsRequest) GetCompareDuration() *duration.Duration { + if m != nil { + return m.CompareDuration + } + return nil +} + +func (m *ListFindingsRequest) GetFieldMask() *field_mask.FieldMask { + if m != nil { + return m.FieldMask + } + return nil +} + +func (m *ListFindingsRequest) GetPageToken() string { + if m != nil { + return m.PageToken + } + return "" +} + +func (m *ListFindingsRequest) GetPageSize() int32 { + if m != nil { + return m.PageSize + } + return 0 +} + +// Response message for listing findings. +type ListFindingsResponse struct { + // Findings matching the list request. + ListFindingsResults []*ListFindingsResponse_ListFindingsResult `protobuf:"bytes,1,rep,name=list_findings_results,json=listFindingsResults,proto3" json:"list_findings_results,omitempty"` + // Time used for executing the list request. + ReadTime *timestamp.Timestamp `protobuf:"bytes,2,opt,name=read_time,json=readTime,proto3" json:"read_time,omitempty"` + // Token to retrieve the next page of results, or empty if there are no more + // results. + NextPageToken string `protobuf:"bytes,3,opt,name=next_page_token,json=nextPageToken,proto3" json:"next_page_token,omitempty"` + // The total number of findings matching the query. + TotalSize int32 `protobuf:"varint,4,opt,name=total_size,json=totalSize,proto3" json:"total_size,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ListFindingsResponse) Reset() { *m = ListFindingsResponse{} } +func (m *ListFindingsResponse) String() string { return proto.CompactTextString(m) } +func (*ListFindingsResponse) ProtoMessage() {} +func (*ListFindingsResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_securitycenter_service_0a41679950f5c36f, []int{14} +} +func (m *ListFindingsResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ListFindingsResponse.Unmarshal(m, b) +} +func (m *ListFindingsResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ListFindingsResponse.Marshal(b, m, deterministic) +} +func (dst *ListFindingsResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_ListFindingsResponse.Merge(dst, src) +} +func (m *ListFindingsResponse) XXX_Size() int { + return xxx_messageInfo_ListFindingsResponse.Size(m) +} +func (m *ListFindingsResponse) XXX_DiscardUnknown() { + xxx_messageInfo_ListFindingsResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_ListFindingsResponse proto.InternalMessageInfo + +func (m *ListFindingsResponse) GetListFindingsResults() []*ListFindingsResponse_ListFindingsResult { + if m != nil { + return m.ListFindingsResults + } + return nil +} + +func (m *ListFindingsResponse) GetReadTime() *timestamp.Timestamp { + if m != nil { + return m.ReadTime + } + return nil +} + +func (m *ListFindingsResponse) GetNextPageToken() string { + if m != nil { + return m.NextPageToken + } + return "" +} + +func (m *ListFindingsResponse) GetTotalSize() int32 { + if m != nil { + return m.TotalSize + } + return 0 +} + +// Result containing the Finding and its StateChange. +type ListFindingsResponse_ListFindingsResult struct { + // Finding matching the search request. + Finding *Finding `protobuf:"bytes,1,opt,name=finding,proto3" json:"finding,omitempty"` + // State change of the finding between the points in time. + StateChange ListFindingsResponse_ListFindingsResult_StateChange `protobuf:"varint,2,opt,name=state_change,json=stateChange,proto3,enum=google.cloud.securitycenter.v1.ListFindingsResponse_ListFindingsResult_StateChange" json:"state_change,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ListFindingsResponse_ListFindingsResult) Reset() { + *m = ListFindingsResponse_ListFindingsResult{} +} +func (m *ListFindingsResponse_ListFindingsResult) String() string { return proto.CompactTextString(m) } +func (*ListFindingsResponse_ListFindingsResult) ProtoMessage() {} +func (*ListFindingsResponse_ListFindingsResult) Descriptor() ([]byte, []int) { + return fileDescriptor_securitycenter_service_0a41679950f5c36f, []int{14, 0} +} +func (m *ListFindingsResponse_ListFindingsResult) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ListFindingsResponse_ListFindingsResult.Unmarshal(m, b) +} +func (m *ListFindingsResponse_ListFindingsResult) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ListFindingsResponse_ListFindingsResult.Marshal(b, m, deterministic) +} +func (dst *ListFindingsResponse_ListFindingsResult) XXX_Merge(src proto.Message) { + xxx_messageInfo_ListFindingsResponse_ListFindingsResult.Merge(dst, src) +} +func (m *ListFindingsResponse_ListFindingsResult) XXX_Size() int { + return xxx_messageInfo_ListFindingsResponse_ListFindingsResult.Size(m) +} +func (m *ListFindingsResponse_ListFindingsResult) XXX_DiscardUnknown() { + xxx_messageInfo_ListFindingsResponse_ListFindingsResult.DiscardUnknown(m) +} + +var xxx_messageInfo_ListFindingsResponse_ListFindingsResult proto.InternalMessageInfo + +func (m *ListFindingsResponse_ListFindingsResult) GetFinding() *Finding { + if m != nil { + return m.Finding + } + return nil +} + +func (m *ListFindingsResponse_ListFindingsResult) GetStateChange() ListFindingsResponse_ListFindingsResult_StateChange { + if m != nil { + return m.StateChange + } + return ListFindingsResponse_ListFindingsResult_UNUSED +} + +// Request message for updating a finding's state. +type SetFindingStateRequest struct { + // The relative resource name of the finding. See: + // https://cloud.google.com/apis/design/resource_names#relative_resource_name + // Example: + // "organizations/123/sources/456/finding/789". + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // The desired State of the finding. + State Finding_State `protobuf:"varint,2,opt,name=state,proto3,enum=google.cloud.securitycenter.v1.Finding_State" json:"state,omitempty"` + // The time at which the updated state takes effect. + StartTime *timestamp.Timestamp `protobuf:"bytes,3,opt,name=start_time,json=startTime,proto3" json:"start_time,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *SetFindingStateRequest) Reset() { *m = SetFindingStateRequest{} } +func (m *SetFindingStateRequest) String() string { return proto.CompactTextString(m) } +func (*SetFindingStateRequest) ProtoMessage() {} +func (*SetFindingStateRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_securitycenter_service_0a41679950f5c36f, []int{15} +} +func (m *SetFindingStateRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_SetFindingStateRequest.Unmarshal(m, b) +} +func (m *SetFindingStateRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_SetFindingStateRequest.Marshal(b, m, deterministic) +} +func (dst *SetFindingStateRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_SetFindingStateRequest.Merge(dst, src) +} +func (m *SetFindingStateRequest) XXX_Size() int { + return xxx_messageInfo_SetFindingStateRequest.Size(m) +} +func (m *SetFindingStateRequest) XXX_DiscardUnknown() { + xxx_messageInfo_SetFindingStateRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_SetFindingStateRequest proto.InternalMessageInfo + +func (m *SetFindingStateRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *SetFindingStateRequest) GetState() Finding_State { + if m != nil { + return m.State + } + return Finding_STATE_UNSPECIFIED +} + +func (m *SetFindingStateRequest) GetStartTime() *timestamp.Timestamp { + if m != nil { + return m.StartTime + } + return nil +} + +// Request message for running asset discovery for an organization. +type RunAssetDiscoveryRequest struct { + // Name of the organization to run asset discovery for. Its format is + // "organizations/[organization_id]". + Parent string `protobuf:"bytes,1,opt,name=parent,proto3" json:"parent,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *RunAssetDiscoveryRequest) Reset() { *m = RunAssetDiscoveryRequest{} } +func (m *RunAssetDiscoveryRequest) String() string { return proto.CompactTextString(m) } +func (*RunAssetDiscoveryRequest) ProtoMessage() {} +func (*RunAssetDiscoveryRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_securitycenter_service_0a41679950f5c36f, []int{16} +} +func (m *RunAssetDiscoveryRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_RunAssetDiscoveryRequest.Unmarshal(m, b) +} +func (m *RunAssetDiscoveryRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_RunAssetDiscoveryRequest.Marshal(b, m, deterministic) +} +func (dst *RunAssetDiscoveryRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_RunAssetDiscoveryRequest.Merge(dst, src) +} +func (m *RunAssetDiscoveryRequest) XXX_Size() int { + return xxx_messageInfo_RunAssetDiscoveryRequest.Size(m) +} +func (m *RunAssetDiscoveryRequest) XXX_DiscardUnknown() { + xxx_messageInfo_RunAssetDiscoveryRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_RunAssetDiscoveryRequest proto.InternalMessageInfo + +func (m *RunAssetDiscoveryRequest) GetParent() string { + if m != nil { + return m.Parent + } + return "" +} + +// Request message for updating or creating a finding. +type UpdateFindingRequest struct { + // The finding resource to update or create if it does not already exist. + // parent, security_marks, and update_time will be ignored. + // + // In the case of creation, the finding id portion of the name must be + // alphanumeric and less than or equal to 32 characters and greater than 0 + // characters in length. + Finding *Finding `protobuf:"bytes,1,opt,name=finding,proto3" json:"finding,omitempty"` + // The FieldMask to use when updating the finding resource. This field should + // not be specified when creating a finding. + // + // When updating a finding, an empty mask is treated as updating all mutable + // fields and replacing source_properties. Individual source_properties can + // be added/updated by using "source_properties." in the field + // mask. + UpdateMask *field_mask.FieldMask `protobuf:"bytes,2,opt,name=update_mask,json=updateMask,proto3" json:"update_mask,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *UpdateFindingRequest) Reset() { *m = UpdateFindingRequest{} } +func (m *UpdateFindingRequest) String() string { return proto.CompactTextString(m) } +func (*UpdateFindingRequest) ProtoMessage() {} +func (*UpdateFindingRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_securitycenter_service_0a41679950f5c36f, []int{17} +} +func (m *UpdateFindingRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_UpdateFindingRequest.Unmarshal(m, b) +} +func (m *UpdateFindingRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_UpdateFindingRequest.Marshal(b, m, deterministic) +} +func (dst *UpdateFindingRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_UpdateFindingRequest.Merge(dst, src) +} +func (m *UpdateFindingRequest) XXX_Size() int { + return xxx_messageInfo_UpdateFindingRequest.Size(m) +} +func (m *UpdateFindingRequest) XXX_DiscardUnknown() { + xxx_messageInfo_UpdateFindingRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_UpdateFindingRequest proto.InternalMessageInfo + +func (m *UpdateFindingRequest) GetFinding() *Finding { + if m != nil { + return m.Finding + } + return nil +} + +func (m *UpdateFindingRequest) GetUpdateMask() *field_mask.FieldMask { + if m != nil { + return m.UpdateMask + } + return nil +} + +// Request message for updating an organization's settings. +type UpdateOrganizationSettingsRequest struct { + // The organization settings resource to update. + OrganizationSettings *OrganizationSettings `protobuf:"bytes,1,opt,name=organization_settings,json=organizationSettings,proto3" json:"organization_settings,omitempty"` + // The FieldMask to use when updating the settings resource. + // + // If empty all mutable fields will be updated. + UpdateMask *field_mask.FieldMask `protobuf:"bytes,2,opt,name=update_mask,json=updateMask,proto3" json:"update_mask,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *UpdateOrganizationSettingsRequest) Reset() { *m = UpdateOrganizationSettingsRequest{} } +func (m *UpdateOrganizationSettingsRequest) String() string { return proto.CompactTextString(m) } +func (*UpdateOrganizationSettingsRequest) ProtoMessage() {} +func (*UpdateOrganizationSettingsRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_securitycenter_service_0a41679950f5c36f, []int{18} +} +func (m *UpdateOrganizationSettingsRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_UpdateOrganizationSettingsRequest.Unmarshal(m, b) +} +func (m *UpdateOrganizationSettingsRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_UpdateOrganizationSettingsRequest.Marshal(b, m, deterministic) +} +func (dst *UpdateOrganizationSettingsRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_UpdateOrganizationSettingsRequest.Merge(dst, src) +} +func (m *UpdateOrganizationSettingsRequest) XXX_Size() int { + return xxx_messageInfo_UpdateOrganizationSettingsRequest.Size(m) +} +func (m *UpdateOrganizationSettingsRequest) XXX_DiscardUnknown() { + xxx_messageInfo_UpdateOrganizationSettingsRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_UpdateOrganizationSettingsRequest proto.InternalMessageInfo + +func (m *UpdateOrganizationSettingsRequest) GetOrganizationSettings() *OrganizationSettings { + if m != nil { + return m.OrganizationSettings + } + return nil +} + +func (m *UpdateOrganizationSettingsRequest) GetUpdateMask() *field_mask.FieldMask { + if m != nil { + return m.UpdateMask + } + return nil +} + +// Request message for updating a source. +type UpdateSourceRequest struct { + // The source resource to update. + Source *Source `protobuf:"bytes,1,opt,name=source,proto3" json:"source,omitempty"` + // The FieldMask to use when updating the source resource. + // + // If empty all mutable fields will be updated. + UpdateMask *field_mask.FieldMask `protobuf:"bytes,2,opt,name=update_mask,json=updateMask,proto3" json:"update_mask,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *UpdateSourceRequest) Reset() { *m = UpdateSourceRequest{} } +func (m *UpdateSourceRequest) String() string { return proto.CompactTextString(m) } +func (*UpdateSourceRequest) ProtoMessage() {} +func (*UpdateSourceRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_securitycenter_service_0a41679950f5c36f, []int{19} +} +func (m *UpdateSourceRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_UpdateSourceRequest.Unmarshal(m, b) +} +func (m *UpdateSourceRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_UpdateSourceRequest.Marshal(b, m, deterministic) +} +func (dst *UpdateSourceRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_UpdateSourceRequest.Merge(dst, src) +} +func (m *UpdateSourceRequest) XXX_Size() int { + return xxx_messageInfo_UpdateSourceRequest.Size(m) +} +func (m *UpdateSourceRequest) XXX_DiscardUnknown() { + xxx_messageInfo_UpdateSourceRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_UpdateSourceRequest proto.InternalMessageInfo + +func (m *UpdateSourceRequest) GetSource() *Source { + if m != nil { + return m.Source + } + return nil +} + +func (m *UpdateSourceRequest) GetUpdateMask() *field_mask.FieldMask { + if m != nil { + return m.UpdateMask + } + return nil +} + +// Request message for updating a SecurityMarks resource. +type UpdateSecurityMarksRequest struct { + // The security marks resource to update. + SecurityMarks *SecurityMarks `protobuf:"bytes,1,opt,name=security_marks,json=securityMarks,proto3" json:"security_marks,omitempty"` + // The FieldMask to use when updating the security marks resource. + // + // The field mask must not contain duplicate fields. + // If empty or set to "marks", all marks will be replaced. Individual + // marks can be updated using "marks.". + UpdateMask *field_mask.FieldMask `protobuf:"bytes,2,opt,name=update_mask,json=updateMask,proto3" json:"update_mask,omitempty"` + // The time at which the updated SecurityMarks take effect. + // If not set uses current server time. Updates will be applied to the + // SecurityMarks that are active immediately preceding this time. + StartTime *timestamp.Timestamp `protobuf:"bytes,3,opt,name=start_time,json=startTime,proto3" json:"start_time,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *UpdateSecurityMarksRequest) Reset() { *m = UpdateSecurityMarksRequest{} } +func (m *UpdateSecurityMarksRequest) String() string { return proto.CompactTextString(m) } +func (*UpdateSecurityMarksRequest) ProtoMessage() {} +func (*UpdateSecurityMarksRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_securitycenter_service_0a41679950f5c36f, []int{20} +} +func (m *UpdateSecurityMarksRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_UpdateSecurityMarksRequest.Unmarshal(m, b) +} +func (m *UpdateSecurityMarksRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_UpdateSecurityMarksRequest.Marshal(b, m, deterministic) +} +func (dst *UpdateSecurityMarksRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_UpdateSecurityMarksRequest.Merge(dst, src) +} +func (m *UpdateSecurityMarksRequest) XXX_Size() int { + return xxx_messageInfo_UpdateSecurityMarksRequest.Size(m) +} +func (m *UpdateSecurityMarksRequest) XXX_DiscardUnknown() { + xxx_messageInfo_UpdateSecurityMarksRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_UpdateSecurityMarksRequest proto.InternalMessageInfo + +func (m *UpdateSecurityMarksRequest) GetSecurityMarks() *SecurityMarks { + if m != nil { + return m.SecurityMarks + } + return nil +} + +func (m *UpdateSecurityMarksRequest) GetUpdateMask() *field_mask.FieldMask { + if m != nil { + return m.UpdateMask + } + return nil +} + +func (m *UpdateSecurityMarksRequest) GetStartTime() *timestamp.Timestamp { + if m != nil { + return m.StartTime + } + return nil +} + +func init() { + proto.RegisterType((*CreateFindingRequest)(nil), "google.cloud.securitycenter.v1.CreateFindingRequest") + proto.RegisterType((*CreateSourceRequest)(nil), "google.cloud.securitycenter.v1.CreateSourceRequest") + proto.RegisterType((*GetOrganizationSettingsRequest)(nil), "google.cloud.securitycenter.v1.GetOrganizationSettingsRequest") + proto.RegisterType((*GetSourceRequest)(nil), "google.cloud.securitycenter.v1.GetSourceRequest") + proto.RegisterType((*GroupAssetsRequest)(nil), "google.cloud.securitycenter.v1.GroupAssetsRequest") + proto.RegisterType((*GroupAssetsResponse)(nil), "google.cloud.securitycenter.v1.GroupAssetsResponse") + proto.RegisterType((*GroupFindingsRequest)(nil), "google.cloud.securitycenter.v1.GroupFindingsRequest") + proto.RegisterType((*GroupFindingsResponse)(nil), "google.cloud.securitycenter.v1.GroupFindingsResponse") + proto.RegisterType((*GroupResult)(nil), "google.cloud.securitycenter.v1.GroupResult") + proto.RegisterMapType((map[string]*_struct.Value)(nil), "google.cloud.securitycenter.v1.GroupResult.PropertiesEntry") + proto.RegisterType((*ListSourcesRequest)(nil), "google.cloud.securitycenter.v1.ListSourcesRequest") + proto.RegisterType((*ListSourcesResponse)(nil), "google.cloud.securitycenter.v1.ListSourcesResponse") + proto.RegisterType((*ListAssetsRequest)(nil), "google.cloud.securitycenter.v1.ListAssetsRequest") + proto.RegisterType((*ListAssetsResponse)(nil), "google.cloud.securitycenter.v1.ListAssetsResponse") + proto.RegisterType((*ListAssetsResponse_ListAssetsResult)(nil), "google.cloud.securitycenter.v1.ListAssetsResponse.ListAssetsResult") + proto.RegisterType((*ListFindingsRequest)(nil), "google.cloud.securitycenter.v1.ListFindingsRequest") + proto.RegisterType((*ListFindingsResponse)(nil), "google.cloud.securitycenter.v1.ListFindingsResponse") + proto.RegisterType((*ListFindingsResponse_ListFindingsResult)(nil), "google.cloud.securitycenter.v1.ListFindingsResponse.ListFindingsResult") + proto.RegisterType((*SetFindingStateRequest)(nil), "google.cloud.securitycenter.v1.SetFindingStateRequest") + proto.RegisterType((*RunAssetDiscoveryRequest)(nil), "google.cloud.securitycenter.v1.RunAssetDiscoveryRequest") + proto.RegisterType((*UpdateFindingRequest)(nil), "google.cloud.securitycenter.v1.UpdateFindingRequest") + proto.RegisterType((*UpdateOrganizationSettingsRequest)(nil), "google.cloud.securitycenter.v1.UpdateOrganizationSettingsRequest") + proto.RegisterType((*UpdateSourceRequest)(nil), "google.cloud.securitycenter.v1.UpdateSourceRequest") + proto.RegisterType((*UpdateSecurityMarksRequest)(nil), "google.cloud.securitycenter.v1.UpdateSecurityMarksRequest") + proto.RegisterEnum("google.cloud.securitycenter.v1.ListAssetsResponse_ListAssetsResult_StateChange", ListAssetsResponse_ListAssetsResult_StateChange_name, ListAssetsResponse_ListAssetsResult_StateChange_value) + proto.RegisterEnum("google.cloud.securitycenter.v1.ListFindingsResponse_ListFindingsResult_StateChange", ListFindingsResponse_ListFindingsResult_StateChange_name, ListFindingsResponse_ListFindingsResult_StateChange_value) +} + +// Reference imports to suppress errors if they are not otherwise used. +var _ context.Context +var _ grpc.ClientConn + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the grpc package it is being compiled against. +const _ = grpc.SupportPackageIsVersion4 + +// SecurityCenterClient is the client API for SecurityCenter service. +// +// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://godoc.org/google.golang.org/grpc#ClientConn.NewStream. +type SecurityCenterClient interface { + // Creates a source. + CreateSource(ctx context.Context, in *CreateSourceRequest, opts ...grpc.CallOption) (*Source, error) + // Creates a finding. The corresponding source must exist for finding creation + // to succeed. + CreateFinding(ctx context.Context, in *CreateFindingRequest, opts ...grpc.CallOption) (*Finding, error) + // Gets the access control policy on the specified Source. + GetIamPolicy(ctx context.Context, in *v1.GetIamPolicyRequest, opts ...grpc.CallOption) (*v1.Policy, error) + // Gets the settings for an organization. + GetOrganizationSettings(ctx context.Context, in *GetOrganizationSettingsRequest, opts ...grpc.CallOption) (*OrganizationSettings, error) + // Gets a source. + GetSource(ctx context.Context, in *GetSourceRequest, opts ...grpc.CallOption) (*Source, error) + // Filters an organization's assets and groups them by their specified + // properties. + GroupAssets(ctx context.Context, in *GroupAssetsRequest, opts ...grpc.CallOption) (*GroupAssetsResponse, error) + // Filters an organization or source's findings and groups them by their + // specified properties. + // + // To group across all sources provide a `-` as the source id. + // Example: /v1/organizations/123/sources/-/findings + GroupFindings(ctx context.Context, in *GroupFindingsRequest, opts ...grpc.CallOption) (*GroupFindingsResponse, error) + // Lists an organization's assets. + ListAssets(ctx context.Context, in *ListAssetsRequest, opts ...grpc.CallOption) (*ListAssetsResponse, error) + // Lists an organization or source's findings. + // + // To list across all sources provide a `-` as the source id. + // Example: /v1/organizations/123/sources/-/findings + ListFindings(ctx context.Context, in *ListFindingsRequest, opts ...grpc.CallOption) (*ListFindingsResponse, error) + // Lists all sources belonging to an organization. + ListSources(ctx context.Context, in *ListSourcesRequest, opts ...grpc.CallOption) (*ListSourcesResponse, error) + // Runs asset discovery. The discovery is tracked with a long-running + // operation. + // + // This API can only be called with limited frequency for an organization. If + // it is called too frequently the caller will receive a TOO_MANY_REQUESTS + // error. + RunAssetDiscovery(ctx context.Context, in *RunAssetDiscoveryRequest, opts ...grpc.CallOption) (*longrunning.Operation, error) + // Updates the state of a finding. + SetFindingState(ctx context.Context, in *SetFindingStateRequest, opts ...grpc.CallOption) (*Finding, error) + // Sets the access control policy on the specified Source. + SetIamPolicy(ctx context.Context, in *v1.SetIamPolicyRequest, opts ...grpc.CallOption) (*v1.Policy, error) + // Returns the permissions that a caller has on the specified source. + TestIamPermissions(ctx context.Context, in *v1.TestIamPermissionsRequest, opts ...grpc.CallOption) (*v1.TestIamPermissionsResponse, error) + // Creates or updates a finding. The corresponding source must exist for a + // finding creation to succeed. + UpdateFinding(ctx context.Context, in *UpdateFindingRequest, opts ...grpc.CallOption) (*Finding, error) + // Updates an organization's settings. + UpdateOrganizationSettings(ctx context.Context, in *UpdateOrganizationSettingsRequest, opts ...grpc.CallOption) (*OrganizationSettings, error) + // Updates a source. + UpdateSource(ctx context.Context, in *UpdateSourceRequest, opts ...grpc.CallOption) (*Source, error) + // Updates security marks. + UpdateSecurityMarks(ctx context.Context, in *UpdateSecurityMarksRequest, opts ...grpc.CallOption) (*SecurityMarks, error) +} + +type securityCenterClient struct { + cc *grpc.ClientConn +} + +func NewSecurityCenterClient(cc *grpc.ClientConn) SecurityCenterClient { + return &securityCenterClient{cc} +} + +func (c *securityCenterClient) CreateSource(ctx context.Context, in *CreateSourceRequest, opts ...grpc.CallOption) (*Source, error) { + out := new(Source) + err := c.cc.Invoke(ctx, "/google.cloud.securitycenter.v1.SecurityCenter/CreateSource", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *securityCenterClient) CreateFinding(ctx context.Context, in *CreateFindingRequest, opts ...grpc.CallOption) (*Finding, error) { + out := new(Finding) + err := c.cc.Invoke(ctx, "/google.cloud.securitycenter.v1.SecurityCenter/CreateFinding", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *securityCenterClient) GetIamPolicy(ctx context.Context, in *v1.GetIamPolicyRequest, opts ...grpc.CallOption) (*v1.Policy, error) { + out := new(v1.Policy) + err := c.cc.Invoke(ctx, "/google.cloud.securitycenter.v1.SecurityCenter/GetIamPolicy", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *securityCenterClient) GetOrganizationSettings(ctx context.Context, in *GetOrganizationSettingsRequest, opts ...grpc.CallOption) (*OrganizationSettings, error) { + out := new(OrganizationSettings) + err := c.cc.Invoke(ctx, "/google.cloud.securitycenter.v1.SecurityCenter/GetOrganizationSettings", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *securityCenterClient) GetSource(ctx context.Context, in *GetSourceRequest, opts ...grpc.CallOption) (*Source, error) { + out := new(Source) + err := c.cc.Invoke(ctx, "/google.cloud.securitycenter.v1.SecurityCenter/GetSource", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *securityCenterClient) GroupAssets(ctx context.Context, in *GroupAssetsRequest, opts ...grpc.CallOption) (*GroupAssetsResponse, error) { + out := new(GroupAssetsResponse) + err := c.cc.Invoke(ctx, "/google.cloud.securitycenter.v1.SecurityCenter/GroupAssets", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *securityCenterClient) GroupFindings(ctx context.Context, in *GroupFindingsRequest, opts ...grpc.CallOption) (*GroupFindingsResponse, error) { + out := new(GroupFindingsResponse) + err := c.cc.Invoke(ctx, "/google.cloud.securitycenter.v1.SecurityCenter/GroupFindings", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *securityCenterClient) ListAssets(ctx context.Context, in *ListAssetsRequest, opts ...grpc.CallOption) (*ListAssetsResponse, error) { + out := new(ListAssetsResponse) + err := c.cc.Invoke(ctx, "/google.cloud.securitycenter.v1.SecurityCenter/ListAssets", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *securityCenterClient) ListFindings(ctx context.Context, in *ListFindingsRequest, opts ...grpc.CallOption) (*ListFindingsResponse, error) { + out := new(ListFindingsResponse) + err := c.cc.Invoke(ctx, "/google.cloud.securitycenter.v1.SecurityCenter/ListFindings", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *securityCenterClient) ListSources(ctx context.Context, in *ListSourcesRequest, opts ...grpc.CallOption) (*ListSourcesResponse, error) { + out := new(ListSourcesResponse) + err := c.cc.Invoke(ctx, "/google.cloud.securitycenter.v1.SecurityCenter/ListSources", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *securityCenterClient) RunAssetDiscovery(ctx context.Context, in *RunAssetDiscoveryRequest, opts ...grpc.CallOption) (*longrunning.Operation, error) { + out := new(longrunning.Operation) + err := c.cc.Invoke(ctx, "/google.cloud.securitycenter.v1.SecurityCenter/RunAssetDiscovery", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *securityCenterClient) SetFindingState(ctx context.Context, in *SetFindingStateRequest, opts ...grpc.CallOption) (*Finding, error) { + out := new(Finding) + err := c.cc.Invoke(ctx, "/google.cloud.securitycenter.v1.SecurityCenter/SetFindingState", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *securityCenterClient) SetIamPolicy(ctx context.Context, in *v1.SetIamPolicyRequest, opts ...grpc.CallOption) (*v1.Policy, error) { + out := new(v1.Policy) + err := c.cc.Invoke(ctx, "/google.cloud.securitycenter.v1.SecurityCenter/SetIamPolicy", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *securityCenterClient) TestIamPermissions(ctx context.Context, in *v1.TestIamPermissionsRequest, opts ...grpc.CallOption) (*v1.TestIamPermissionsResponse, error) { + out := new(v1.TestIamPermissionsResponse) + err := c.cc.Invoke(ctx, "/google.cloud.securitycenter.v1.SecurityCenter/TestIamPermissions", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *securityCenterClient) UpdateFinding(ctx context.Context, in *UpdateFindingRequest, opts ...grpc.CallOption) (*Finding, error) { + out := new(Finding) + err := c.cc.Invoke(ctx, "/google.cloud.securitycenter.v1.SecurityCenter/UpdateFinding", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *securityCenterClient) UpdateOrganizationSettings(ctx context.Context, in *UpdateOrganizationSettingsRequest, opts ...grpc.CallOption) (*OrganizationSettings, error) { + out := new(OrganizationSettings) + err := c.cc.Invoke(ctx, "/google.cloud.securitycenter.v1.SecurityCenter/UpdateOrganizationSettings", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *securityCenterClient) UpdateSource(ctx context.Context, in *UpdateSourceRequest, opts ...grpc.CallOption) (*Source, error) { + out := new(Source) + err := c.cc.Invoke(ctx, "/google.cloud.securitycenter.v1.SecurityCenter/UpdateSource", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *securityCenterClient) UpdateSecurityMarks(ctx context.Context, in *UpdateSecurityMarksRequest, opts ...grpc.CallOption) (*SecurityMarks, error) { + out := new(SecurityMarks) + err := c.cc.Invoke(ctx, "/google.cloud.securitycenter.v1.SecurityCenter/UpdateSecurityMarks", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +// SecurityCenterServer is the server API for SecurityCenter service. +type SecurityCenterServer interface { + // Creates a source. + CreateSource(context.Context, *CreateSourceRequest) (*Source, error) + // Creates a finding. The corresponding source must exist for finding creation + // to succeed. + CreateFinding(context.Context, *CreateFindingRequest) (*Finding, error) + // Gets the access control policy on the specified Source. + GetIamPolicy(context.Context, *v1.GetIamPolicyRequest) (*v1.Policy, error) + // Gets the settings for an organization. + GetOrganizationSettings(context.Context, *GetOrganizationSettingsRequest) (*OrganizationSettings, error) + // Gets a source. + GetSource(context.Context, *GetSourceRequest) (*Source, error) + // Filters an organization's assets and groups them by their specified + // properties. + GroupAssets(context.Context, *GroupAssetsRequest) (*GroupAssetsResponse, error) + // Filters an organization or source's findings and groups them by their + // specified properties. + // + // To group across all sources provide a `-` as the source id. + // Example: /v1/organizations/123/sources/-/findings + GroupFindings(context.Context, *GroupFindingsRequest) (*GroupFindingsResponse, error) + // Lists an organization's assets. + ListAssets(context.Context, *ListAssetsRequest) (*ListAssetsResponse, error) + // Lists an organization or source's findings. + // + // To list across all sources provide a `-` as the source id. + // Example: /v1/organizations/123/sources/-/findings + ListFindings(context.Context, *ListFindingsRequest) (*ListFindingsResponse, error) + // Lists all sources belonging to an organization. + ListSources(context.Context, *ListSourcesRequest) (*ListSourcesResponse, error) + // Runs asset discovery. The discovery is tracked with a long-running + // operation. + // + // This API can only be called with limited frequency for an organization. If + // it is called too frequently the caller will receive a TOO_MANY_REQUESTS + // error. + RunAssetDiscovery(context.Context, *RunAssetDiscoveryRequest) (*longrunning.Operation, error) + // Updates the state of a finding. + SetFindingState(context.Context, *SetFindingStateRequest) (*Finding, error) + // Sets the access control policy on the specified Source. + SetIamPolicy(context.Context, *v1.SetIamPolicyRequest) (*v1.Policy, error) + // Returns the permissions that a caller has on the specified source. + TestIamPermissions(context.Context, *v1.TestIamPermissionsRequest) (*v1.TestIamPermissionsResponse, error) + // Creates or updates a finding. The corresponding source must exist for a + // finding creation to succeed. + UpdateFinding(context.Context, *UpdateFindingRequest) (*Finding, error) + // Updates an organization's settings. + UpdateOrganizationSettings(context.Context, *UpdateOrganizationSettingsRequest) (*OrganizationSettings, error) + // Updates a source. + UpdateSource(context.Context, *UpdateSourceRequest) (*Source, error) + // Updates security marks. + UpdateSecurityMarks(context.Context, *UpdateSecurityMarksRequest) (*SecurityMarks, error) +} + +func RegisterSecurityCenterServer(s *grpc.Server, srv SecurityCenterServer) { + s.RegisterService(&_SecurityCenter_serviceDesc, srv) +} + +func _SecurityCenter_CreateSource_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(CreateSourceRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(SecurityCenterServer).CreateSource(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.securitycenter.v1.SecurityCenter/CreateSource", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(SecurityCenterServer).CreateSource(ctx, req.(*CreateSourceRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _SecurityCenter_CreateFinding_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(CreateFindingRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(SecurityCenterServer).CreateFinding(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.securitycenter.v1.SecurityCenter/CreateFinding", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(SecurityCenterServer).CreateFinding(ctx, req.(*CreateFindingRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _SecurityCenter_GetIamPolicy_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(v1.GetIamPolicyRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(SecurityCenterServer).GetIamPolicy(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.securitycenter.v1.SecurityCenter/GetIamPolicy", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(SecurityCenterServer).GetIamPolicy(ctx, req.(*v1.GetIamPolicyRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _SecurityCenter_GetOrganizationSettings_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(GetOrganizationSettingsRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(SecurityCenterServer).GetOrganizationSettings(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.securitycenter.v1.SecurityCenter/GetOrganizationSettings", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(SecurityCenterServer).GetOrganizationSettings(ctx, req.(*GetOrganizationSettingsRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _SecurityCenter_GetSource_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(GetSourceRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(SecurityCenterServer).GetSource(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.securitycenter.v1.SecurityCenter/GetSource", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(SecurityCenterServer).GetSource(ctx, req.(*GetSourceRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _SecurityCenter_GroupAssets_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(GroupAssetsRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(SecurityCenterServer).GroupAssets(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.securitycenter.v1.SecurityCenter/GroupAssets", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(SecurityCenterServer).GroupAssets(ctx, req.(*GroupAssetsRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _SecurityCenter_GroupFindings_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(GroupFindingsRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(SecurityCenterServer).GroupFindings(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.securitycenter.v1.SecurityCenter/GroupFindings", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(SecurityCenterServer).GroupFindings(ctx, req.(*GroupFindingsRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _SecurityCenter_ListAssets_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(ListAssetsRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(SecurityCenterServer).ListAssets(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.securitycenter.v1.SecurityCenter/ListAssets", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(SecurityCenterServer).ListAssets(ctx, req.(*ListAssetsRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _SecurityCenter_ListFindings_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(ListFindingsRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(SecurityCenterServer).ListFindings(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.securitycenter.v1.SecurityCenter/ListFindings", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(SecurityCenterServer).ListFindings(ctx, req.(*ListFindingsRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _SecurityCenter_ListSources_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(ListSourcesRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(SecurityCenterServer).ListSources(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.securitycenter.v1.SecurityCenter/ListSources", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(SecurityCenterServer).ListSources(ctx, req.(*ListSourcesRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _SecurityCenter_RunAssetDiscovery_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(RunAssetDiscoveryRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(SecurityCenterServer).RunAssetDiscovery(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.securitycenter.v1.SecurityCenter/RunAssetDiscovery", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(SecurityCenterServer).RunAssetDiscovery(ctx, req.(*RunAssetDiscoveryRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _SecurityCenter_SetFindingState_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(SetFindingStateRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(SecurityCenterServer).SetFindingState(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.securitycenter.v1.SecurityCenter/SetFindingState", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(SecurityCenterServer).SetFindingState(ctx, req.(*SetFindingStateRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _SecurityCenter_SetIamPolicy_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(v1.SetIamPolicyRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(SecurityCenterServer).SetIamPolicy(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.securitycenter.v1.SecurityCenter/SetIamPolicy", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(SecurityCenterServer).SetIamPolicy(ctx, req.(*v1.SetIamPolicyRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _SecurityCenter_TestIamPermissions_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(v1.TestIamPermissionsRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(SecurityCenterServer).TestIamPermissions(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.securitycenter.v1.SecurityCenter/TestIamPermissions", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(SecurityCenterServer).TestIamPermissions(ctx, req.(*v1.TestIamPermissionsRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _SecurityCenter_UpdateFinding_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(UpdateFindingRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(SecurityCenterServer).UpdateFinding(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.securitycenter.v1.SecurityCenter/UpdateFinding", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(SecurityCenterServer).UpdateFinding(ctx, req.(*UpdateFindingRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _SecurityCenter_UpdateOrganizationSettings_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(UpdateOrganizationSettingsRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(SecurityCenterServer).UpdateOrganizationSettings(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.securitycenter.v1.SecurityCenter/UpdateOrganizationSettings", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(SecurityCenterServer).UpdateOrganizationSettings(ctx, req.(*UpdateOrganizationSettingsRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _SecurityCenter_UpdateSource_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(UpdateSourceRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(SecurityCenterServer).UpdateSource(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.securitycenter.v1.SecurityCenter/UpdateSource", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(SecurityCenterServer).UpdateSource(ctx, req.(*UpdateSourceRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _SecurityCenter_UpdateSecurityMarks_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(UpdateSecurityMarksRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(SecurityCenterServer).UpdateSecurityMarks(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.securitycenter.v1.SecurityCenter/UpdateSecurityMarks", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(SecurityCenterServer).UpdateSecurityMarks(ctx, req.(*UpdateSecurityMarksRequest)) + } + return interceptor(ctx, in, info, handler) +} + +var _SecurityCenter_serviceDesc = grpc.ServiceDesc{ + ServiceName: "google.cloud.securitycenter.v1.SecurityCenter", + HandlerType: (*SecurityCenterServer)(nil), + Methods: []grpc.MethodDesc{ + { + MethodName: "CreateSource", + Handler: _SecurityCenter_CreateSource_Handler, + }, + { + MethodName: "CreateFinding", + Handler: _SecurityCenter_CreateFinding_Handler, + }, + { + MethodName: "GetIamPolicy", + Handler: _SecurityCenter_GetIamPolicy_Handler, + }, + { + MethodName: "GetOrganizationSettings", + Handler: _SecurityCenter_GetOrganizationSettings_Handler, + }, + { + MethodName: "GetSource", + Handler: _SecurityCenter_GetSource_Handler, + }, + { + MethodName: "GroupAssets", + Handler: _SecurityCenter_GroupAssets_Handler, + }, + { + MethodName: "GroupFindings", + Handler: _SecurityCenter_GroupFindings_Handler, + }, + { + MethodName: "ListAssets", + Handler: _SecurityCenter_ListAssets_Handler, + }, + { + MethodName: "ListFindings", + Handler: _SecurityCenter_ListFindings_Handler, + }, + { + MethodName: "ListSources", + Handler: _SecurityCenter_ListSources_Handler, + }, + { + MethodName: "RunAssetDiscovery", + Handler: _SecurityCenter_RunAssetDiscovery_Handler, + }, + { + MethodName: "SetFindingState", + Handler: _SecurityCenter_SetFindingState_Handler, + }, + { + MethodName: "SetIamPolicy", + Handler: _SecurityCenter_SetIamPolicy_Handler, + }, + { + MethodName: "TestIamPermissions", + Handler: _SecurityCenter_TestIamPermissions_Handler, + }, + { + MethodName: "UpdateFinding", + Handler: _SecurityCenter_UpdateFinding_Handler, + }, + { + MethodName: "UpdateOrganizationSettings", + Handler: _SecurityCenter_UpdateOrganizationSettings_Handler, + }, + { + MethodName: "UpdateSource", + Handler: _SecurityCenter_UpdateSource_Handler, + }, + { + MethodName: "UpdateSecurityMarks", + Handler: _SecurityCenter_UpdateSecurityMarks_Handler, + }, + }, + Streams: []grpc.StreamDesc{}, + Metadata: "google/cloud/securitycenter/v1/securitycenter_service.proto", +} + +func init() { + proto.RegisterFile("google/cloud/securitycenter/v1/securitycenter_service.proto", fileDescriptor_securitycenter_service_0a41679950f5c36f) +} + +var fileDescriptor_securitycenter_service_0a41679950f5c36f = []byte{ + // 1987 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xec, 0x5a, 0xcd, 0x6f, 0x1b, 0xc7, + 0x15, 0xef, 0xac, 0x3e, 0xf9, 0x68, 0xd9, 0xf4, 0x48, 0x76, 0x99, 0x4d, 0xec, 0x2a, 0x9b, 0xc4, + 0x55, 0x6c, 0x97, 0x8c, 0x28, 0xbb, 0x76, 0x57, 0x70, 0x60, 0x99, 0x92, 0x15, 0x17, 0xfe, 0x02, + 0x29, 0xe9, 0x90, 0x1a, 0x21, 0xd6, 0xe4, 0x98, 0x59, 0x88, 0xdc, 0x65, 0x77, 0x86, 0x42, 0xe5, + 0xc0, 0x28, 0x90, 0x6b, 0x81, 0x22, 0x68, 0x80, 0x1e, 0x8b, 0xb4, 0xe8, 0xa1, 0x2d, 0x72, 0x68, + 0x83, 0x02, 0x6d, 0x81, 0xa2, 0xe7, 0xa2, 0x3d, 0xf6, 0x50, 0x20, 0xe8, 0xb1, 0xa7, 0xa0, 0xe8, + 0x3f, 0xd0, 0x4b, 0x31, 0x5f, 0xd4, 0xee, 0x72, 0xc5, 0x5d, 0x5a, 0x0a, 0x0c, 0x14, 0xbd, 0x71, + 0x66, 0xde, 0x7b, 0xf3, 0x7b, 0x6f, 0xde, 0x7b, 0xf3, 0xde, 0x2c, 0x61, 0xb5, 0xed, 0xfb, 0xed, + 0x0e, 0x29, 0x37, 0x3b, 0x7e, 0xbf, 0x55, 0xa6, 0xa4, 0xd9, 0x0f, 0x5c, 0xb6, 0xdf, 0x24, 0x1e, + 0x23, 0x41, 0x79, 0x6f, 0x39, 0x36, 0xd3, 0xa0, 0x24, 0xd8, 0x73, 0x9b, 0xa4, 0xd4, 0x0b, 0x7c, + 0xe6, 0xe3, 0xf3, 0x92, 0xb9, 0x24, 0x98, 0x4b, 0x51, 0xd2, 0xd2, 0xde, 0xb2, 0xf9, 0x8a, 0x12, + 0xee, 0xf4, 0xdc, 0xb2, 0xe3, 0x79, 0x3e, 0x73, 0x98, 0xeb, 0x7b, 0x54, 0x72, 0x9b, 0x17, 0x53, + 0xb6, 0x76, 0x28, 0x25, 0x4c, 0xd1, 0x5e, 0x4e, 0xa1, 0x7d, 0xe2, 0x7a, 0x2d, 0xd7, 0x6b, 0x2b, + 0x6a, 0x3b, 0x85, 0xda, 0x0f, 0xda, 0x8e, 0xe7, 0x3e, 0x15, 0x68, 0x1a, 0x94, 0x30, 0xe6, 0x7a, + 0x6d, 0x8d, 0x6a, 0x25, 0xa3, 0x41, 0x1a, 0x5d, 0x27, 0xd8, 0xd5, 0x4c, 0x97, 0xd2, 0x98, 0xfc, + 0x7e, 0xa0, 0xad, 0x66, 0x2a, 0xab, 0x95, 0x5d, 0xa7, 0xcb, 0xd7, 0x5c, 0xa7, 0xdb, 0xe8, 0xf9, + 0x1d, 0xb7, 0xb9, 0xaf, 0xd6, 0xcd, 0xe8, 0x7a, 0x64, 0xed, 0x35, 0xb5, 0xd6, 0xf1, 0xbd, 0x76, + 0xd0, 0xf7, 0x3c, 0xd7, 0x6b, 0x97, 0xfd, 0x1e, 0x09, 0x22, 0x86, 0xd5, 0x1b, 0x88, 0xd1, 0xe3, + 0xfe, 0x93, 0x72, 0xab, 0x2f, 0x09, 0xd4, 0xfa, 0xcb, 0xf1, 0x75, 0xd2, 0xed, 0x31, 0xbd, 0xc3, + 0x62, 0x7c, 0xf1, 0x89, 0x4b, 0x3a, 0xad, 0x46, 0xd7, 0xa1, 0xbb, 0x8a, 0xe2, 0x95, 0x38, 0x05, + 0x65, 0x41, 0xbf, 0xa9, 0x4f, 0xea, 0x6b, 0xf1, 0x55, 0xe6, 0x76, 0x09, 0x65, 0x4e, 0xb7, 0x27, + 0x09, 0xac, 0x8f, 0x10, 0x2c, 0x54, 0x03, 0xe2, 0x30, 0x72, 0x5b, 0x1e, 0x5a, 0x8d, 0x7c, 0xb7, + 0x4f, 0x28, 0xc3, 0x67, 0x61, 0xba, 0xe7, 0x04, 0xc4, 0x63, 0x45, 0xb4, 0x88, 0x96, 0x72, 0x35, + 0x35, 0xc2, 0xe7, 0x00, 0xd4, 0xf1, 0x36, 0xdc, 0x56, 0xd1, 0x10, 0x6b, 0x39, 0x35, 0x73, 0xa7, + 0x85, 0xd7, 0x60, 0x46, 0x0d, 0x8a, 0x13, 0x8b, 0x68, 0x29, 0x5f, 0xf9, 0x7a, 0x69, 0xb4, 0x5b, + 0x96, 0xf4, 0xbe, 0x9a, 0xcf, 0xea, 0xc2, 0xbc, 0x44, 0x54, 0x17, 0xe7, 0x94, 0x06, 0xe8, 0x6d, + 0x98, 0x96, 0x07, 0x2a, 0xc0, 0xe4, 0x2b, 0x17, 0xd2, 0x36, 0x54, 0x62, 0x15, 0x97, 0x75, 0x05, + 0xce, 0x6f, 0x12, 0xf6, 0x20, 0xe4, 0x84, 0x75, 0xe5, 0x83, 0x7a, 0x67, 0x0c, 0x93, 0x9e, 0xd3, + 0x25, 0x6a, 0x5f, 0xf1, 0xdb, 0xba, 0x00, 0x85, 0x4d, 0xc2, 0xa2, 0x08, 0x93, 0xe8, 0x3e, 0x31, + 0x00, 0x6f, 0x06, 0x7e, 0xbf, 0xb7, 0xc6, 0xe3, 0x87, 0xa6, 0x29, 0x73, 0x16, 0xa6, 0x9f, 0xb8, + 0x1d, 0x46, 0x02, 0x65, 0x59, 0x35, 0xc2, 0x2f, 0xc1, 0x6c, 0x9b, 0x4b, 0x69, 0x3c, 0xde, 0x17, + 0x76, 0xcd, 0xd5, 0x66, 0xc4, 0xf8, 0xd6, 0x3e, 0x5e, 0x87, 0x42, 0xd3, 0xef, 0x72, 0xfe, 0x86, + 0xf6, 0xac, 0xe2, 0xa4, 0xb0, 0xc4, 0x4b, 0xda, 0x12, 0xfa, 0xf4, 0x4b, 0xeb, 0x8a, 0xa0, 0x76, + 0x4a, 0xb1, 0xe8, 0x09, 0x7c, 0x0d, 0x72, 0x01, 0x71, 0x5a, 0x0d, 0xee, 0x1f, 0xc5, 0x29, 0xc1, + 0x6e, 0x0e, 0xb1, 0x6f, 0x69, 0xe7, 0xa9, 0xcd, 0x72, 0x62, 0x3e, 0xe4, 0xfe, 0xd0, 0x73, 0xda, + 0xa4, 0xc1, 0xfc, 0x5d, 0xe2, 0x15, 0x67, 0xa4, 0x3f, 0xf0, 0x99, 0x2d, 0x3e, 0x81, 0x5f, 0x06, + 0x31, 0x68, 0x50, 0xf7, 0x29, 0x29, 0xce, 0x2e, 0xa2, 0xa5, 0xa9, 0xda, 0x2c, 0x9f, 0xa8, 0xbb, + 0x4f, 0xc9, 0xb7, 0x27, 0x67, 0xa7, 0x0b, 0x33, 0xd6, 0xbf, 0x10, 0xcc, 0x47, 0x4c, 0x44, 0x7b, + 0xbe, 0x47, 0x09, 0xde, 0x86, 0x82, 0xd6, 0xb9, 0x11, 0x10, 0xda, 0xef, 0x30, 0x5a, 0x44, 0x8b, + 0x13, 0x4b, 0xf9, 0xca, 0xa5, 0xb4, 0x23, 0x16, 0xe2, 0x6a, 0x82, 0xa7, 0x76, 0x52, 0x19, 0x4a, + 0x0e, 0x69, 0x54, 0x53, 0x63, 0x0c, 0x4d, 0x2f, 0xc0, 0x29, 0x8f, 0x7c, 0x8f, 0x35, 0x42, 0xea, + 0xca, 0xa3, 0x98, 0xe3, 0xd3, 0x0f, 0x07, 0x2a, 0x9f, 0x03, 0x60, 0x3e, 0x73, 0x3a, 0x52, 0xe7, + 0x49, 0xa1, 0x73, 0x4e, 0xcc, 0x70, 0xa5, 0xad, 0x9f, 0x19, 0xb0, 0x20, 0xf0, 0x29, 0xc7, 0xff, + 0x32, 0x7c, 0x22, 0xa2, 0xe3, 0xe4, 0x18, 0x3a, 0x26, 0x39, 0xd3, 0xd4, 0xd8, 0xce, 0x74, 0x74, + 0x9f, 0xf8, 0x37, 0x82, 0x33, 0x31, 0x23, 0xfd, 0x6f, 0x7b, 0xc5, 0xdf, 0x11, 0xe4, 0x43, 0xf8, + 0xf0, 0x77, 0x00, 0x7a, 0x01, 0xbf, 0x4b, 0x98, 0x4b, 0xb4, 0x82, 0xab, 0x63, 0x28, 0x58, 0x7a, + 0x38, 0xe0, 0xde, 0xf0, 0x58, 0xb0, 0x5f, 0x0b, 0x89, 0xc3, 0x0b, 0x30, 0xd5, 0xf4, 0xfb, 0x1e, + 0x13, 0x8a, 0x4e, 0xd4, 0xe4, 0xc0, 0xdc, 0x86, 0x53, 0x31, 0x26, 0x5c, 0x80, 0x89, 0x5d, 0xb2, + 0xaf, 0xfc, 0x91, 0xff, 0xc4, 0x97, 0x61, 0x6a, 0xcf, 0xe9, 0xf4, 0xb5, 0x8d, 0xce, 0x0e, 0xd9, + 0x68, 0x87, 0xaf, 0xd6, 0x24, 0x91, 0x6d, 0x5c, 0x47, 0xd6, 0xfb, 0x80, 0xef, 0xba, 0x54, 0xa5, + 0x4a, 0x9a, 0xe1, 0x7a, 0x09, 0x59, 0xd2, 0x18, 0xe9, 0x3a, 0x33, 0x51, 0xd7, 0xb1, 0xbe, 0x0f, + 0xf3, 0x91, 0x9d, 0x94, 0xc7, 0xdc, 0x84, 0x19, 0x99, 0xea, 0xb5, 0x1d, 0xb3, 0xde, 0x10, 0x9a, + 0x2d, 0xe9, 0x8c, 0x8d, 0x84, 0x33, 0xb6, 0x3e, 0x37, 0xe0, 0x34, 0x47, 0x70, 0xe4, 0x5c, 0xef, + 0x07, 0x2d, 0x12, 0x84, 0xe2, 0x5a, 0x8c, 0x5f, 0x7c, 0x5c, 0x7f, 0x8b, 0xdf, 0xfd, 0xba, 0xfe, + 0x10, 0xe6, 0x4f, 0xda, 0xff, 0x36, 0x27, 0xb9, 0xe7, 0xd0, 0x5d, 0x5e, 0x17, 0xa8, 0x9f, 0xb1, + 0x73, 0x9d, 0x1d, 0x79, 0xae, 0xb9, 0xc4, 0x94, 0xf0, 0xc3, 0x49, 0xe9, 0x48, 0xb1, 0x5b, 0x82, + 0xc2, 0x7c, 0xc7, 0xa5, 0xac, 0x21, 0xea, 0x53, 0x1a, 0x4b, 0x09, 0xd5, 0xb4, 0x93, 0x1e, 0x16, + 0x18, 0x9d, 0xe2, 0xa9, 0xe2, 0x74, 0x27, 0x36, 0xf3, 0xc2, 0xb3, 0x85, 0xf9, 0xa1, 0x01, 0x85, + 0x38, 0x4e, 0xbc, 0x0a, 0x53, 0xc2, 0x08, 0xc2, 0xcd, 0xf2, 0x95, 0x37, 0xd2, 0x74, 0x17, 0xcc, + 0x35, 0xc9, 0x83, 0x03, 0x38, 0x41, 0x99, 0xc3, 0x48, 0xa3, 0xf9, 0xbe, 0xe3, 0xb5, 0xa5, 0x52, + 0x27, 0x2b, 0x0f, 0x8e, 0xc1, 0x7e, 0xa5, 0x3a, 0x97, 0x5b, 0x15, 0x62, 0x6b, 0x79, 0x7a, 0x30, + 0xb0, 0x6e, 0x40, 0x3e, 0xb4, 0x86, 0x01, 0xa6, 0xb7, 0xef, 0x6f, 0xd7, 0x37, 0xd6, 0x0b, 0x5f, + 0xc1, 0x39, 0x98, 0x5a, 0x5b, 0x5f, 0xdf, 0x58, 0x2f, 0x20, 0x9c, 0x87, 0x99, 0xda, 0xc6, 0xbd, + 0x07, 0x3b, 0x1b, 0xeb, 0x05, 0x83, 0xd3, 0xac, 0x55, 0xb7, 0xee, 0xec, 0x6c, 0x14, 0x26, 0xac, + 0x7f, 0x18, 0x32, 0xde, 0x8f, 0xe1, 0x1e, 0xfd, 0x7f, 0xbc, 0x0d, 0xc7, 0xdb, 0x6f, 0x27, 0x61, + 0x21, 0x6a, 0x5e, 0x15, 0x71, 0x1f, 0xc0, 0x19, 0x11, 0x71, 0xaa, 0x5e, 0x8f, 0xc7, 0xdc, 0x66, + 0x16, 0x9f, 0x89, 0x0b, 0x8d, 0x4f, 0xf2, 0xb8, 0x13, 0x71, 0x1d, 0x9d, 0x7b, 0xf1, 0x91, 0xf7, + 0x13, 0x43, 0x66, 0xa1, 0x28, 0xae, 0x70, 0xdb, 0x83, 0x9e, 0xaf, 0xed, 0xc1, 0x7b, 0x89, 0x11, + 0x58, 0x3f, 0x26, 0x6b, 0x1e, 0x1e, 0x85, 0xf7, 0x0e, 0x8f, 0xc2, 0x3c, 0xcc, 0x54, 0xdf, 0x59, + 0xbb, 0xbf, 0x29, 0xe2, 0x70, 0x0e, 0x72, 0xdb, 0xf7, 0xf5, 0xd0, 0x38, 0x88, 0xd0, 0x89, 0x70, + 0x84, 0x4e, 0x5a, 0x9f, 0x22, 0x38, 0x5b, 0x27, 0x7a, 0x77, 0x21, 0x79, 0x44, 0x7f, 0x84, 0xab, + 0x30, 0x25, 0xc0, 0x28, 0x75, 0xbf, 0x91, 0xd1, 0x6c, 0x52, 0x9d, 0x9a, 0xe4, 0xe5, 0x71, 0x42, + 0x99, 0x13, 0x30, 0xe9, 0x15, 0x13, 0xa9, 0x5e, 0x91, 0x13, 0xd4, 0x7c, 0x6c, 0x55, 0xa0, 0x58, + 0xeb, 0x7b, 0x22, 0x5f, 0xad, 0xbb, 0xb4, 0xe9, 0xef, 0x91, 0x60, 0x3f, 0x25, 0x91, 0x58, 0x3f, + 0x46, 0xb0, 0xb0, 0xdd, 0x6b, 0x0d, 0xf7, 0xcc, 0xc7, 0xe0, 0x05, 0xab, 0x90, 0xef, 0x0b, 0xd1, + 0x32, 0xe6, 0x8d, 0xd4, 0x98, 0x07, 0x49, 0xce, 0x7f, 0x5b, 0x7f, 0x45, 0xf0, 0xaa, 0x04, 0x36, + 0xaa, 0x9d, 0x75, 0xe1, 0x4c, 0xe2, 0x93, 0x8b, 0xc2, 0x7c, 0x25, 0x0d, 0x73, 0xa2, 0xec, 0x05, + 0x3f, 0x61, 0xf6, 0x68, 0xda, 0xfc, 0x08, 0xc1, 0xbc, 0xd4, 0x26, 0xda, 0x66, 0x1f, 0x34, 0xfc, + 0xe8, 0x79, 0x1a, 0xfe, 0xa3, 0x81, 0xfa, 0x02, 0x81, 0xa9, 0x40, 0xa9, 0x7d, 0xee, 0x39, 0xc1, + 0xee, 0xc0, 0xb6, 0x5b, 0x70, 0x32, 0xfa, 0x24, 0xa5, 0x30, 0xa6, 0xfa, 0x75, 0x54, 0xda, 0x1c, + 0x0d, 0x0f, 0x8f, 0x84, 0xf8, 0x08, 0xc1, 0x51, 0xf9, 0xc8, 0x84, 0x93, 0x1a, 0x58, 0x55, 0x40, + 0xc5, 0x3f, 0x45, 0x70, 0x22, 0xfc, 0x3a, 0x83, 0x57, 0xd2, 0x34, 0x4b, 0x78, 0xcb, 0x31, 0x33, + 0x1e, 0x99, 0x75, 0xe5, 0xc3, 0xbf, 0xfd, 0xf3, 0x63, 0xa3, 0x64, 0xbd, 0x5e, 0xde, 0x5b, 0x2e, + 0x7f, 0x20, 0xc3, 0xef, 0x46, 0xd8, 0xd3, 0x68, 0xf9, 0xe2, 0x33, 0xf5, 0x9e, 0x47, 0x6d, 0x7d, + 0xc0, 0xbf, 0x46, 0x30, 0x17, 0x79, 0xd3, 0xc2, 0x57, 0xb2, 0x81, 0x8c, 0x86, 0xb3, 0x99, 0x35, + 0x7a, 0xad, 0x9b, 0x02, 0xa6, 0x6d, 0x95, 0x47, 0xc0, 0xd4, 0x28, 0x39, 0x60, 0x7d, 0x73, 0xda, + 0x83, 0xb0, 0xff, 0x01, 0x82, 0x13, 0x9b, 0x84, 0xdd, 0x71, 0xba, 0x0f, 0xc5, 0x03, 0x23, 0xb6, + 0xf4, 0xde, 0xae, 0xd3, 0x15, 0xad, 0x5d, 0x68, 0x51, 0xe3, 0x3b, 0x13, 0xa3, 0x91, 0xab, 0x03, + 0x34, 0x57, 0x05, 0x9a, 0x80, 0xc8, 0x8d, 0x47, 0xe0, 0xb1, 0xdb, 0x21, 0xe1, 0x36, 0xba, 0x88, + 0xff, 0x8c, 0xe0, 0xab, 0x87, 0xbc, 0x89, 0xe1, 0xb7, 0x53, 0x9b, 0xd0, 0x91, 0x8f, 0x69, 0xe6, + 0x73, 0xa5, 0x17, 0xeb, 0x9a, 0xd0, 0x69, 0x19, 0x4b, 0x0b, 0xf3, 0x9b, 0x63, 0x48, 0x9f, 0xa4, + 0x04, 0xf4, 0x0c, 0x7f, 0x8c, 0x20, 0x37, 0x78, 0xa8, 0xc3, 0x6f, 0x65, 0x00, 0xff, 0x7c, 0x9e, + 0x7a, 0x59, 0x00, 0xbc, 0x80, 0x5f, 0x3f, 0x1c, 0xe0, 0x81, 0xc1, 0xf1, 0xa7, 0xba, 0xdb, 0x97, + 0x85, 0x32, 0xae, 0x64, 0xea, 0xec, 0x23, 0x6d, 0xa5, 0xb9, 0x32, 0x16, 0x8f, 0x2c, 0x0c, 0x06, + 0x01, 0xf5, 0xe6, 0xc8, 0x80, 0x92, 0xbd, 0x94, 0x2d, 0x9e, 0x48, 0xb8, 0x3f, 0xfc, 0x09, 0xc1, + 0x5c, 0xe4, 0x35, 0x26, 0x3d, 0xa0, 0x92, 0x5e, 0xb8, 0xcc, 0xab, 0x63, 0x72, 0x29, 0xd0, 0x51, + 0x87, 0x1e, 0x23, 0xbc, 0x06, 0x0a, 0x7c, 0x82, 0x00, 0x0e, 0xfa, 0x12, 0xbc, 0x3c, 0x4e, 0x5b, + 0x23, 0xa1, 0x57, 0xc6, 0xef, 0x84, 0xac, 0x4b, 0x02, 0xf7, 0x1b, 0xf8, 0xb5, 0x0c, 0xc6, 0xc6, + 0x9f, 0x21, 0x38, 0x11, 0x2e, 0xdb, 0xd2, 0x13, 0x6b, 0x42, 0xef, 0x93, 0x1e, 0x5d, 0x49, 0xe5, + 0x62, 0x2c, 0xba, 0xb2, 0x1b, 0x18, 0xff, 0x1c, 0x41, 0x3e, 0xf4, 0xe4, 0x82, 0x33, 0x59, 0x29, + 0xfa, 0x12, 0x64, 0xae, 0x8c, 0xc5, 0xa3, 0x10, 0x47, 0xc3, 0x2d, 0xe5, 0x62, 0xc0, 0xbf, 0x44, + 0x70, 0x7a, 0xa8, 0xca, 0xc3, 0xd7, 0xd3, 0x36, 0x3e, 0xac, 0x30, 0x34, 0xcf, 0x69, 0xce, 0xd0, + 0x87, 0x9f, 0xd2, 0x03, 0xfd, 0xe1, 0xc7, 0x5a, 0x15, 0xe0, 0xae, 0x5a, 0x6f, 0x65, 0x09, 0xb2, + 0xa0, 0xef, 0x0d, 0xe4, 0x73, 0x57, 0xfd, 0x1d, 0x82, 0x53, 0xb1, 0xfa, 0x19, 0x7f, 0x33, 0xbd, + 0x7a, 0x48, 0x2a, 0xb8, 0xb3, 0x5f, 0x60, 0x55, 0x81, 0xf8, 0x86, 0x75, 0x3d, 0x43, 0xf6, 0x1a, + 0x9c, 0x3e, 0xbf, 0x39, 0x28, 0x61, 0x62, 0x47, 0x8e, 0x9c, 0xdf, 0x61, 0xf5, 0x51, 0x77, 0x58, + 0xfd, 0xcb, 0xbc, 0xc3, 0x68, 0xec, 0x0e, 0xfb, 0x0d, 0x02, 0xbc, 0x45, 0xa8, 0x98, 0x24, 0x41, + 0xd7, 0xa5, 0x94, 0xf3, 0xe0, 0xa5, 0xd8, 0x7e, 0xc3, 0x24, 0x1a, 0xd9, 0x9b, 0x19, 0x28, 0x95, + 0x37, 0xde, 0x16, 0x68, 0x6f, 0x5a, 0xab, 0x99, 0xd1, 0xb2, 0x21, 0x61, 0x1c, 0xf3, 0xef, 0x11, + 0xcc, 0x45, 0x1a, 0x8b, 0xf4, 0x3c, 0x9b, 0xd4, 0x87, 0x64, 0x3f, 0xf7, 0x77, 0x04, 0xf0, 0x5b, + 0x95, 0x6b, 0x02, 0xb8, 0xfe, 0x6c, 0x3b, 0xc6, 0xf9, 0x0f, 0x0a, 0x98, 0xff, 0x0c, 0xea, 0xe2, + 0xc4, 0xaa, 0x61, 0x2d, 0x9b, 0x1e, 0xc7, 0x5f, 0x38, 0xb4, 0x84, 0x86, 0xef, 0x55, 0x36, 0x84, + 0x86, 0xc9, 0x9f, 0x9a, 0xb3, 0x97, 0x13, 0x76, 0x72, 0xe7, 0x84, 0x7f, 0x81, 0xe0, 0x44, 0xb8, + 0x55, 0x49, 0x4f, 0xde, 0x09, 0x8d, 0x4d, 0xe6, 0x5a, 0x43, 0xe5, 0x97, 0xca, 0x25, 0xa1, 0x93, + 0xfa, 0x9a, 0x9d, 0x52, 0x72, 0x0c, 0x8a, 0xe3, 0x3f, 0x1a, 0x83, 0xae, 0x2a, 0xd2, 0x63, 0xd8, + 0x19, 0x11, 0x27, 0x74, 0x3d, 0xe6, 0x78, 0xdd, 0x8d, 0xf5, 0x07, 0x24, 0x14, 0xf8, 0x0c, 0x55, + 0x6e, 0x4a, 0x0d, 0xa2, 0x1f, 0xf1, 0x13, 0x35, 0x91, 0xd9, 0x92, 0xab, 0x14, 0x16, 0xf5, 0xcc, + 0x8e, 0xb5, 0x5b, 0xef, 0xbe, 0x57, 0xb9, 0x9b, 0x59, 0x66, 0x92, 0x4b, 0xa7, 0xc8, 0xbf, 0xf5, + 0x39, 0x02, 0xab, 0xe9, 0x77, 0x53, 0x14, 0x7e, 0x88, 0xde, 0xbd, 0xab, 0x28, 0xda, 0x7e, 0xc7, + 0xf1, 0xda, 0x25, 0x3f, 0x68, 0x97, 0xdb, 0xc4, 0x13, 0xfd, 0x56, 0x59, 0x2e, 0x39, 0x3d, 0x97, + 0x1e, 0xf6, 0x1f, 0x85, 0xd5, 0xe8, 0xcc, 0xaf, 0x8c, 0xf3, 0x9b, 0x52, 0x5c, 0x55, 0x6c, 0x18, + 0x6d, 0xca, 0x4a, 0x3b, 0xcb, 0x7f, 0xd1, 0x04, 0x8f, 0x04, 0xc1, 0xa3, 0x28, 0xc1, 0xa3, 0x9d, + 0xe5, 0x2f, 0x8c, 0x57, 0x25, 0x81, 0x6d, 0x0b, 0x0a, 0xdb, 0x8e, 0x92, 0xd8, 0xf6, 0xce, 0xf2, + 0xe3, 0x69, 0x01, 0x6f, 0xe5, 0xbf, 0x01, 0x00, 0x00, 0xff, 0xff, 0x95, 0xc1, 0xf5, 0xe5, 0x87, + 0x22, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/cloud/securitycenter/v1/source.pb.go b/vendor/google.golang.org/genproto/googleapis/cloud/securitycenter/v1/source.pb.go new file mode 100644 index 0000000..d3fc083 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/cloud/securitycenter/v1/source.pb.go @@ -0,0 +1,124 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/cloud/securitycenter/v1/source.proto + +package securitycenter // import "google.golang.org/genproto/googleapis/cloud/securitycenter/v1" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Cloud Security Command Center's (Cloud SCC) finding source. A finding source +// is an entity or a mechanism that can produce a finding. A source is like a +// container of findings that come from the same scanner, logger, monitor, etc. +type Source struct { + // The relative resource name of this source. See: + // https://cloud.google.com/apis/design/resource_names#relative_resource_name + // Example: + // "organizations/123/sources/456" + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // The source’s display name. + // A source’s display name must be unique amongst its siblings, for example, + // two sources with the same parent can't share the same display name. + // The display name must start and end with a letter or digit, may contain + // letters, digits, spaces, hyphens, and underscores, and can be no longer + // than 32 characters. This is captured by the regular expression: + // [\p{L}\p{N}]({\p{L}\p{N}_- ]{0,30}[\p{L}\p{N}])?. + DisplayName string `protobuf:"bytes,2,opt,name=display_name,json=displayName,proto3" json:"display_name,omitempty"` + // The description of the source (max of 1024 characters). + // Example: + // "Cloud Security Scanner is a web security scanner for common + // vulnerabilities in App Engine applications. It can automatically + // scan and detect four common vulnerabilities, including cross-site-scripting + // (XSS), Flash injection, mixed content (HTTP in HTTPS), and + // outdated/insecure libraries." + Description string `protobuf:"bytes,3,opt,name=description,proto3" json:"description,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Source) Reset() { *m = Source{} } +func (m *Source) String() string { return proto.CompactTextString(m) } +func (*Source) ProtoMessage() {} +func (*Source) Descriptor() ([]byte, []int) { + return fileDescriptor_source_9f533006806efa3c, []int{0} +} +func (m *Source) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Source.Unmarshal(m, b) +} +func (m *Source) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Source.Marshal(b, m, deterministic) +} +func (dst *Source) XXX_Merge(src proto.Message) { + xxx_messageInfo_Source.Merge(dst, src) +} +func (m *Source) XXX_Size() int { + return xxx_messageInfo_Source.Size(m) +} +func (m *Source) XXX_DiscardUnknown() { + xxx_messageInfo_Source.DiscardUnknown(m) +} + +var xxx_messageInfo_Source proto.InternalMessageInfo + +func (m *Source) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *Source) GetDisplayName() string { + if m != nil { + return m.DisplayName + } + return "" +} + +func (m *Source) GetDescription() string { + if m != nil { + return m.Description + } + return "" +} + +func init() { + proto.RegisterType((*Source)(nil), "google.cloud.securitycenter.v1.Source") +} + +func init() { + proto.RegisterFile("google/cloud/securitycenter/v1/source.proto", fileDescriptor_source_9f533006806efa3c) +} + +var fileDescriptor_source_9f533006806efa3c = []byte{ + // 272 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x7c, 0xd0, 0x41, 0x4a, 0xf4, 0x30, + 0x14, 0x07, 0x70, 0xda, 0xaf, 0xdf, 0xa0, 0x19, 0x17, 0xd2, 0xd5, 0x20, 0x32, 0xcc, 0xcc, 0x4a, + 0x10, 0x12, 0x8a, 0xbb, 0xb8, 0x73, 0x16, 0x82, 0x88, 0x88, 0x03, 0x5d, 0x48, 0x41, 0x62, 0x1a, + 0x42, 0xa0, 0x93, 0x57, 0x92, 0x74, 0x60, 0xae, 0xe4, 0x51, 0x3c, 0x82, 0x47, 0xf0, 0x14, 0x32, + 0x2f, 0x11, 0xe9, 0x42, 0x77, 0xe5, 0xfd, 0x7f, 0xfd, 0xbf, 0x47, 0xc8, 0xa5, 0x06, 0xd0, 0x9d, + 0x62, 0xb2, 0x83, 0xa1, 0x65, 0x5e, 0xc9, 0xc1, 0x99, 0xb0, 0x97, 0xca, 0x06, 0xe5, 0xd8, 0xae, + 0x62, 0x1e, 0x06, 0x27, 0x15, 0xed, 0x1d, 0x04, 0x28, 0xe7, 0x11, 0x53, 0xc4, 0x74, 0x8c, 0xe9, + 0xae, 0x3a, 0x3b, 0x4f, 0x65, 0xa2, 0x37, 0x4c, 0x58, 0x0b, 0x41, 0x04, 0x03, 0xd6, 0xc7, 0xbf, + 0x57, 0x9a, 0x4c, 0x36, 0xd8, 0x56, 0x96, 0xa4, 0xb0, 0x62, 0xab, 0x66, 0xd9, 0x22, 0xbb, 0x38, + 0x7e, 0xc2, 0xef, 0x72, 0x49, 0x4e, 0x5a, 0xe3, 0xfb, 0x4e, 0xec, 0x5f, 0x30, 0xcb, 0x31, 0x9b, + 0xa6, 0xd9, 0xc3, 0x81, 0x2c, 0xc8, 0xb4, 0x55, 0x5e, 0x3a, 0xd3, 0x1f, 0x6a, 0x67, 0xff, 0x92, + 0xf8, 0x19, 0xdd, 0x15, 0x47, 0xc5, 0xe9, 0xff, 0x9b, 0x8f, 0x8c, 0xac, 0x24, 0x6c, 0xe9, 0xdf, + 0xd7, 0x3e, 0x66, 0xcf, 0xf7, 0x49, 0x68, 0xe8, 0x84, 0xd5, 0x14, 0x9c, 0x66, 0x5a, 0x59, 0xbc, + 0x96, 0xc5, 0x48, 0xf4, 0xc6, 0xff, 0xf6, 0x36, 0xd7, 0xe3, 0xc9, 0x5b, 0x3e, 0xbf, 0x8d, 0x75, + 0x6b, 0x5c, 0xb8, 0x49, 0xe9, 0x3a, 0x2e, 0xac, 0xab, 0xf7, 0x6f, 0xd0, 0x20, 0x68, 0xc6, 0xa0, + 0xa9, 0xab, 0xcf, 0x7c, 0x19, 0x01, 0xe7, 0x28, 0x38, 0x1f, 0x13, 0xce, 0xeb, 0xea, 0x75, 0x82, + 0xe7, 0x5d, 0x7d, 0x05, 0x00, 0x00, 0xff, 0xff, 0xa6, 0x8f, 0xba, 0x1a, 0xb9, 0x01, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/cloud/securitycenter/v1beta1/asset.pb.go b/vendor/google.golang.org/genproto/googleapis/cloud/securitycenter/v1beta1/asset.pb.go new file mode 100644 index 0000000..ca28810 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/cloud/securitycenter/v1beta1/asset.pb.go @@ -0,0 +1,248 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/cloud/securitycenter/v1beta1/asset.proto + +package securitycenter // import "google.golang.org/genproto/googleapis/cloud/securitycenter/v1beta1" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _struct "github.com/golang/protobuf/ptypes/struct" +import timestamp "github.com/golang/protobuf/ptypes/timestamp" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Cloud Security Command Center's (Cloud SCC) representation of a Google Cloud +// Platform (GCP) resource. +// +// The Asset is a Cloud SCC resource that captures information about a single +// GCP resource. All modifications to an Asset are only within the context of +// Cloud SCC and don't affect the referenced GCP resource. +type Asset struct { + // The relative resource name of this asset. See: + // https://cloud.google.com/apis/design/resource_names#relative_resource_name + // Example: + // "organizations/123/assets/456". + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // Cloud SCC managed properties. These properties are managed by + // Cloud SCC and cannot be modified by the user. + SecurityCenterProperties *Asset_SecurityCenterProperties `protobuf:"bytes,2,opt,name=security_center_properties,json=securityCenterProperties,proto3" json:"security_center_properties,omitempty"` + // Resource managed properties. These properties are managed and defined by + // the GCP resource and cannot be modified by the user. + ResourceProperties map[string]*_struct.Value `protobuf:"bytes,7,rep,name=resource_properties,json=resourceProperties,proto3" json:"resource_properties,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` + // User specified security marks. These marks are entirely managed by the user + // and come from the SecurityMarks resource that belongs to the asset. + SecurityMarks *SecurityMarks `protobuf:"bytes,8,opt,name=security_marks,json=securityMarks,proto3" json:"security_marks,omitempty"` + // The time at which the asset was created in Cloud SCC. + CreateTime *timestamp.Timestamp `protobuf:"bytes,9,opt,name=create_time,json=createTime,proto3" json:"create_time,omitempty"` + // The time at which the asset was last updated, added, or deleted in Cloud + // SCC. + UpdateTime *timestamp.Timestamp `protobuf:"bytes,10,opt,name=update_time,json=updateTime,proto3" json:"update_time,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Asset) Reset() { *m = Asset{} } +func (m *Asset) String() string { return proto.CompactTextString(m) } +func (*Asset) ProtoMessage() {} +func (*Asset) Descriptor() ([]byte, []int) { + return fileDescriptor_asset_96fa06a748c89784, []int{0} +} +func (m *Asset) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Asset.Unmarshal(m, b) +} +func (m *Asset) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Asset.Marshal(b, m, deterministic) +} +func (dst *Asset) XXX_Merge(src proto.Message) { + xxx_messageInfo_Asset.Merge(dst, src) +} +func (m *Asset) XXX_Size() int { + return xxx_messageInfo_Asset.Size(m) +} +func (m *Asset) XXX_DiscardUnknown() { + xxx_messageInfo_Asset.DiscardUnknown(m) +} + +var xxx_messageInfo_Asset proto.InternalMessageInfo + +func (m *Asset) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *Asset) GetSecurityCenterProperties() *Asset_SecurityCenterProperties { + if m != nil { + return m.SecurityCenterProperties + } + return nil +} + +func (m *Asset) GetResourceProperties() map[string]*_struct.Value { + if m != nil { + return m.ResourceProperties + } + return nil +} + +func (m *Asset) GetSecurityMarks() *SecurityMarks { + if m != nil { + return m.SecurityMarks + } + return nil +} + +func (m *Asset) GetCreateTime() *timestamp.Timestamp { + if m != nil { + return m.CreateTime + } + return nil +} + +func (m *Asset) GetUpdateTime() *timestamp.Timestamp { + if m != nil { + return m.UpdateTime + } + return nil +} + +// Cloud SCC managed properties. These properties are managed by Cloud SCC and +// cannot be modified by the user. +type Asset_SecurityCenterProperties struct { + // The full resource name of the GCP resource this asset + // represents. This field is immutable after create time. See: + // https://cloud.google.com/apis/design/resource_names#full_resource_name + ResourceName string `protobuf:"bytes,1,opt,name=resource_name,json=resourceName,proto3" json:"resource_name,omitempty"` + // The type of the GCP resource. Examples include: APPLICATION, + // PROJECT, and ORGANIZATION. This is a case insensitive field defined by + // Cloud SCC and/or the producer of the resource and is immutable + // after create time. + ResourceType string `protobuf:"bytes,2,opt,name=resource_type,json=resourceType,proto3" json:"resource_type,omitempty"` + // The full resource name of the immediate parent of the resource. See: + // https://cloud.google.com/apis/design/resource_names#full_resource_name + ResourceParent string `protobuf:"bytes,3,opt,name=resource_parent,json=resourceParent,proto3" json:"resource_parent,omitempty"` + // The full resource name of the project the resource belongs to. See: + // https://cloud.google.com/apis/design/resource_names#full_resource_name + ResourceProject string `protobuf:"bytes,4,opt,name=resource_project,json=resourceProject,proto3" json:"resource_project,omitempty"` + // Owners of the Google Cloud resource. + ResourceOwners []string `protobuf:"bytes,5,rep,name=resource_owners,json=resourceOwners,proto3" json:"resource_owners,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Asset_SecurityCenterProperties) Reset() { *m = Asset_SecurityCenterProperties{} } +func (m *Asset_SecurityCenterProperties) String() string { return proto.CompactTextString(m) } +func (*Asset_SecurityCenterProperties) ProtoMessage() {} +func (*Asset_SecurityCenterProperties) Descriptor() ([]byte, []int) { + return fileDescriptor_asset_96fa06a748c89784, []int{0, 0} +} +func (m *Asset_SecurityCenterProperties) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Asset_SecurityCenterProperties.Unmarshal(m, b) +} +func (m *Asset_SecurityCenterProperties) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Asset_SecurityCenterProperties.Marshal(b, m, deterministic) +} +func (dst *Asset_SecurityCenterProperties) XXX_Merge(src proto.Message) { + xxx_messageInfo_Asset_SecurityCenterProperties.Merge(dst, src) +} +func (m *Asset_SecurityCenterProperties) XXX_Size() int { + return xxx_messageInfo_Asset_SecurityCenterProperties.Size(m) +} +func (m *Asset_SecurityCenterProperties) XXX_DiscardUnknown() { + xxx_messageInfo_Asset_SecurityCenterProperties.DiscardUnknown(m) +} + +var xxx_messageInfo_Asset_SecurityCenterProperties proto.InternalMessageInfo + +func (m *Asset_SecurityCenterProperties) GetResourceName() string { + if m != nil { + return m.ResourceName + } + return "" +} + +func (m *Asset_SecurityCenterProperties) GetResourceType() string { + if m != nil { + return m.ResourceType + } + return "" +} + +func (m *Asset_SecurityCenterProperties) GetResourceParent() string { + if m != nil { + return m.ResourceParent + } + return "" +} + +func (m *Asset_SecurityCenterProperties) GetResourceProject() string { + if m != nil { + return m.ResourceProject + } + return "" +} + +func (m *Asset_SecurityCenterProperties) GetResourceOwners() []string { + if m != nil { + return m.ResourceOwners + } + return nil +} + +func init() { + proto.RegisterType((*Asset)(nil), "google.cloud.securitycenter.v1beta1.Asset") + proto.RegisterMapType((map[string]*_struct.Value)(nil), "google.cloud.securitycenter.v1beta1.Asset.ResourcePropertiesEntry") + proto.RegisterType((*Asset_SecurityCenterProperties)(nil), "google.cloud.securitycenter.v1beta1.Asset.SecurityCenterProperties") +} + +func init() { + proto.RegisterFile("google/cloud/securitycenter/v1beta1/asset.proto", fileDescriptor_asset_96fa06a748c89784) +} + +var fileDescriptor_asset_96fa06a748c89784 = []byte{ + // 490 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x94, 0x54, 0x5d, 0x6b, 0xd4, 0x40, + 0x14, 0x25, 0xdd, 0x6e, 0xb5, 0x77, 0xed, 0x07, 0x23, 0xe8, 0x10, 0x04, 0x17, 0xfb, 0xd0, 0x15, + 0x64, 0x42, 0xd7, 0x97, 0x62, 0x9f, 0x6c, 0xf1, 0x51, 0xad, 0xb1, 0x08, 0x0a, 0xb2, 0xcc, 0xa6, + 0xd7, 0x10, 0xbb, 0x99, 0x19, 0x66, 0x26, 0x95, 0xbc, 0x08, 0xfe, 0x1f, 0xff, 0x94, 0xff, 0x44, + 0x66, 0x32, 0x49, 0xb3, 0x96, 0xd5, 0xf5, 0x2d, 0x39, 0xf7, 0x9c, 0x73, 0x73, 0xef, 0xb9, 0xbb, + 0x90, 0xe4, 0x52, 0xe6, 0x0b, 0x4c, 0xb2, 0x85, 0xac, 0x2e, 0x13, 0x83, 0x59, 0xa5, 0x0b, 0x5b, + 0x67, 0x28, 0x2c, 0xea, 0xe4, 0xfa, 0x68, 0x8e, 0x96, 0x1f, 0x25, 0xdc, 0x18, 0xb4, 0x4c, 0x69, + 0x69, 0x25, 0x39, 0x68, 0x04, 0xcc, 0x0b, 0xd8, 0xb2, 0x80, 0x05, 0x41, 0xfc, 0x28, 0xb8, 0x72, + 0x55, 0x24, 0x5c, 0x08, 0x69, 0xb9, 0x2d, 0xa4, 0x30, 0x8d, 0x45, 0x7c, 0xbc, 0x4e, 0xcf, 0x16, + 0x9e, 0x95, 0x5c, 0x5f, 0xb5, 0xca, 0xd6, 0xd7, 0xbf, 0xcd, 0xab, 0x2f, 0x89, 0xb1, 0xba, 0xca, + 0xc2, 0xa7, 0xc5, 0x8f, 0xff, 0xac, 0xda, 0xa2, 0x44, 0x63, 0x79, 0xa9, 0x1a, 0xc2, 0x93, 0x9f, + 0x5b, 0x30, 0x7c, 0xe9, 0x66, 0x21, 0x04, 0x36, 0x05, 0x2f, 0x91, 0x46, 0xe3, 0x68, 0xb2, 0x9d, + 0xfa, 0x67, 0xf2, 0x23, 0x82, 0xb8, 0xeb, 0xda, 0x7c, 0xcd, 0x4c, 0x69, 0xa9, 0x50, 0xdb, 0x02, + 0x0d, 0xdd, 0x18, 0x47, 0x93, 0xd1, 0xf4, 0x8c, 0xad, 0x31, 0x3f, 0xf3, 0x4d, 0xd8, 0xfb, 0x50, + 0x3c, 0xf3, 0xc5, 0xf3, 0xce, 0x2a, 0xa5, 0x66, 0x45, 0x85, 0x18, 0xb8, 0xaf, 0xd1, 0xc8, 0x4a, + 0x67, 0xd8, 0xef, 0x7d, 0x67, 0x3c, 0x98, 0x8c, 0xa6, 0xa7, 0xff, 0xd1, 0x3b, 0x0d, 0x2e, 0x37, + 0xde, 0xaf, 0x84, 0xd5, 0x75, 0x4a, 0xf4, 0xad, 0x02, 0xf9, 0x08, 0xbb, 0xcb, 0xdb, 0xa6, 0x77, + 0xfd, 0xac, 0xd3, 0xb5, 0xfa, 0xb5, 0x53, 0xbe, 0x76, 0xca, 0x74, 0xc7, 0xf4, 0x5f, 0xc9, 0x09, + 0x8c, 0x32, 0x8d, 0xdc, 0xe2, 0xcc, 0x65, 0x41, 0xb7, 0xbd, 0x6f, 0xdc, 0xfa, 0xb6, 0x41, 0xb1, + 0x8b, 0x36, 0xa8, 0x14, 0x1a, 0xba, 0x03, 0x9c, 0xb8, 0x52, 0x97, 0x9d, 0x18, 0xfe, 0x2d, 0x6e, + 0xe8, 0x0e, 0x88, 0x7f, 0x45, 0x40, 0x57, 0x05, 0x40, 0x0e, 0x60, 0xa7, 0x5b, 0x73, 0xef, 0x0e, + 0xee, 0xb5, 0xe0, 0x1b, 0x77, 0x0f, 0x7d, 0x92, 0xad, 0x15, 0xfa, 0x0b, 0xe8, 0x91, 0x2e, 0x6a, + 0x85, 0xe4, 0x10, 0xf6, 0x6e, 0x02, 0xe3, 0x1a, 0x85, 0xa5, 0x03, 0x4f, 0xdb, 0xed, 0x16, 0xed, + 0x51, 0xf2, 0x14, 0xf6, 0xfb, 0xc9, 0x7e, 0xc5, 0xcc, 0xd2, 0x4d, 0xcf, 0xdc, 0xeb, 0x45, 0xe2, + 0xe0, 0x25, 0x4f, 0xf9, 0x4d, 0xa0, 0x36, 0x74, 0x38, 0x1e, 0xf4, 0x3d, 0xdf, 0x7a, 0x34, 0xfe, + 0x0c, 0x0f, 0x57, 0xe4, 0x4c, 0xf6, 0x61, 0x70, 0x85, 0x75, 0x98, 0xcb, 0x3d, 0x92, 0x67, 0x30, + 0xbc, 0xe6, 0x8b, 0x0a, 0xc3, 0x21, 0x3f, 0xb8, 0xb5, 0xc7, 0x0f, 0xae, 0x9a, 0x36, 0xa4, 0x17, + 0x1b, 0xc7, 0xd1, 0xe9, 0x77, 0x38, 0xcc, 0x64, 0xb9, 0xce, 0x11, 0x9c, 0x47, 0x9f, 0xde, 0x05, + 0x5a, 0x2e, 0x17, 0x5c, 0xe4, 0x4c, 0xea, 0x3c, 0xc9, 0x51, 0x78, 0xf3, 0xf0, 0x1f, 0xc3, 0x55, + 0x61, 0xfe, 0xfa, 0x9b, 0x3f, 0x59, 0x86, 0xe7, 0x5b, 0x5e, 0xfd, 0xfc, 0x77, 0x00, 0x00, 0x00, + 0xff, 0xff, 0xd3, 0x05, 0x13, 0x94, 0xa4, 0x04, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/cloud/securitycenter/v1beta1/finding.pb.go b/vendor/google.golang.org/genproto/googleapis/cloud/securitycenter/v1beta1/finding.pb.go new file mode 100644 index 0000000..dad444c --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/cloud/securitycenter/v1beta1/finding.pb.go @@ -0,0 +1,246 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/cloud/securitycenter/v1beta1/finding.proto + +package securitycenter // import "google.golang.org/genproto/googleapis/cloud/securitycenter/v1beta1" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _struct "github.com/golang/protobuf/ptypes/struct" +import timestamp "github.com/golang/protobuf/ptypes/timestamp" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// The state of the finding. +type Finding_State int32 + +const ( + // Unspecified state. + Finding_STATE_UNSPECIFIED Finding_State = 0 + // The finding requires attention and has not been addressed yet. + Finding_ACTIVE Finding_State = 1 + // The finding has been fixed, triaged as a non-issue or otherwise addressed + // and is no longer active. + Finding_INACTIVE Finding_State = 2 +) + +var Finding_State_name = map[int32]string{ + 0: "STATE_UNSPECIFIED", + 1: "ACTIVE", + 2: "INACTIVE", +} +var Finding_State_value = map[string]int32{ + "STATE_UNSPECIFIED": 0, + "ACTIVE": 1, + "INACTIVE": 2, +} + +func (x Finding_State) String() string { + return proto.EnumName(Finding_State_name, int32(x)) +} +func (Finding_State) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_finding_136f8b994030e0de, []int{0, 0} +} + +// Cloud Security Command Center (Cloud SCC) finding. +// +// A finding is a record of assessment data (security, risk, health or privacy) +// ingested into Cloud SCC for presentation, notification, analysis, +// policy testing, and enforcement. For example, an XSS vulnerability in an +// App Engine application is a finding. +type Finding struct { + // The relative resource name of this finding. See: + // https://cloud.google.com/apis/design/resource_names#relative_resource_name + // Example: + // "organizations/123/sources/456/findings/789" + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // The relative resource name of the source the finding belongs to. See: + // https://cloud.google.com/apis/design/resource_names#relative_resource_name + // This field is immutable after creation time. + // For example: + // "organizations/123/sources/456" + Parent string `protobuf:"bytes,2,opt,name=parent,proto3" json:"parent,omitempty"` + // The full resource name of the Google Cloud Platform (GCP) resource this + // finding is for. See: + // https://cloud.google.com/apis/design/resource_names#full_resource_name + // This field is immutable after creation time. + ResourceName string `protobuf:"bytes,3,opt,name=resource_name,json=resourceName,proto3" json:"resource_name,omitempty"` + // The state of the finding. + State Finding_State `protobuf:"varint,4,opt,name=state,proto3,enum=google.cloud.securitycenter.v1beta1.Finding_State" json:"state,omitempty"` + // The additional taxonomy group within findings from a given source. + // This field is immutable after creation time. + // Example: "XSS_FLASH_INJECTION" + Category string `protobuf:"bytes,5,opt,name=category,proto3" json:"category,omitempty"` + // The URI that, if available, points to a web page outside of Cloud SCC + // where additional information about the finding can be found. This field is + // guaranteed to be either empty or a well formed URL. + ExternalUri string `protobuf:"bytes,6,opt,name=external_uri,json=externalUri,proto3" json:"external_uri,omitempty"` + // Source specific properties. These properties are managed by the source + // that writes the finding. The key names in the source_properties map must be + // between 1 and 255 characters, and must start with a letter and contain + // alphanumeric characters or underscores only. + SourceProperties map[string]*_struct.Value `protobuf:"bytes,7,rep,name=source_properties,json=sourceProperties,proto3" json:"source_properties,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` + // Output only. User specified security marks. These marks are entirely + // managed by the user and come from the SecurityMarks resource that belongs + // to the finding. + SecurityMarks *SecurityMarks `protobuf:"bytes,8,opt,name=security_marks,json=securityMarks,proto3" json:"security_marks,omitempty"` + // The time at which the event took place. For example, if the finding + // represents an open firewall it would capture the time the open firewall was + // detected. + EventTime *timestamp.Timestamp `protobuf:"bytes,9,opt,name=event_time,json=eventTime,proto3" json:"event_time,omitempty"` + // The time at which the finding was created in Cloud SCC. + CreateTime *timestamp.Timestamp `protobuf:"bytes,10,opt,name=create_time,json=createTime,proto3" json:"create_time,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Finding) Reset() { *m = Finding{} } +func (m *Finding) String() string { return proto.CompactTextString(m) } +func (*Finding) ProtoMessage() {} +func (*Finding) Descriptor() ([]byte, []int) { + return fileDescriptor_finding_136f8b994030e0de, []int{0} +} +func (m *Finding) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Finding.Unmarshal(m, b) +} +func (m *Finding) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Finding.Marshal(b, m, deterministic) +} +func (dst *Finding) XXX_Merge(src proto.Message) { + xxx_messageInfo_Finding.Merge(dst, src) +} +func (m *Finding) XXX_Size() int { + return xxx_messageInfo_Finding.Size(m) +} +func (m *Finding) XXX_DiscardUnknown() { + xxx_messageInfo_Finding.DiscardUnknown(m) +} + +var xxx_messageInfo_Finding proto.InternalMessageInfo + +func (m *Finding) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *Finding) GetParent() string { + if m != nil { + return m.Parent + } + return "" +} + +func (m *Finding) GetResourceName() string { + if m != nil { + return m.ResourceName + } + return "" +} + +func (m *Finding) GetState() Finding_State { + if m != nil { + return m.State + } + return Finding_STATE_UNSPECIFIED +} + +func (m *Finding) GetCategory() string { + if m != nil { + return m.Category + } + return "" +} + +func (m *Finding) GetExternalUri() string { + if m != nil { + return m.ExternalUri + } + return "" +} + +func (m *Finding) GetSourceProperties() map[string]*_struct.Value { + if m != nil { + return m.SourceProperties + } + return nil +} + +func (m *Finding) GetSecurityMarks() *SecurityMarks { + if m != nil { + return m.SecurityMarks + } + return nil +} + +func (m *Finding) GetEventTime() *timestamp.Timestamp { + if m != nil { + return m.EventTime + } + return nil +} + +func (m *Finding) GetCreateTime() *timestamp.Timestamp { + if m != nil { + return m.CreateTime + } + return nil +} + +func init() { + proto.RegisterType((*Finding)(nil), "google.cloud.securitycenter.v1beta1.Finding") + proto.RegisterMapType((map[string]*_struct.Value)(nil), "google.cloud.securitycenter.v1beta1.Finding.SourcePropertiesEntry") + proto.RegisterEnum("google.cloud.securitycenter.v1beta1.Finding_State", Finding_State_name, Finding_State_value) +} + +func init() { + proto.RegisterFile("google/cloud/securitycenter/v1beta1/finding.proto", fileDescriptor_finding_136f8b994030e0de) +} + +var fileDescriptor_finding_136f8b994030e0de = []byte{ + // 510 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x94, 0x93, 0x41, 0x6f, 0xda, 0x30, + 0x14, 0xc7, 0x17, 0x28, 0x14, 0x1e, 0xb4, 0xa2, 0x96, 0x5a, 0x45, 0x68, 0xd2, 0x58, 0x7b, 0x18, + 0x87, 0x29, 0x11, 0xec, 0xc2, 0xd6, 0x53, 0xdb, 0x51, 0x8d, 0xc3, 0x10, 0x0b, 0xb4, 0xd2, 0xb6, + 0x03, 0x32, 0xe9, 0x6b, 0x64, 0x35, 0xb1, 0x23, 0xdb, 0x41, 0xe3, 0xb2, 0x8f, 0xb6, 0xcf, 0x36, + 0xc5, 0x71, 0xaa, 0xd1, 0x4d, 0x1b, 0xbb, 0xe5, 0xfd, 0xdf, 0xfb, 0xfd, 0xfd, 0xfc, 0x9e, 0x03, + 0x83, 0x48, 0x88, 0x28, 0x46, 0x3f, 0x8c, 0x45, 0x76, 0xe7, 0x2b, 0x0c, 0x33, 0xc9, 0xf4, 0x26, + 0x44, 0xae, 0x51, 0xfa, 0xeb, 0xc1, 0x0a, 0x35, 0x1d, 0xf8, 0xf7, 0x8c, 0xdf, 0x31, 0x1e, 0x79, + 0xa9, 0x14, 0x5a, 0x90, 0xb3, 0x02, 0xf1, 0x0c, 0xe2, 0x6d, 0x23, 0x9e, 0x45, 0xba, 0xcf, 0xad, + 0x2f, 0x4d, 0x99, 0x4f, 0x39, 0x17, 0x9a, 0x6a, 0x26, 0xb8, 0x2a, 0x2c, 0xba, 0xa3, 0x5d, 0x4e, + 0x2d, 0xe5, 0x65, 0x42, 0xe5, 0x43, 0x49, 0x96, 0xbe, 0x26, 0x5a, 0x65, 0xf7, 0xbe, 0xd2, 0x32, + 0x0b, 0xb5, 0xcd, 0xbe, 0x78, 0x9a, 0xd5, 0x2c, 0x41, 0xa5, 0x69, 0x92, 0x16, 0x05, 0xa7, 0x3f, + 0x6a, 0xb0, 0x7f, 0x5d, 0xdc, 0x86, 0x10, 0xd8, 0xe3, 0x34, 0x41, 0xd7, 0xe9, 0x39, 0xfd, 0x66, + 0x60, 0xbe, 0xc9, 0x09, 0xd4, 0x53, 0x2a, 0x91, 0x6b, 0xb7, 0x62, 0x54, 0x1b, 0x91, 0x33, 0x38, + 0x90, 0xa8, 0x44, 0x26, 0x43, 0x5c, 0x1a, 0xa8, 0x6a, 0xd2, 0xed, 0x52, 0x9c, 0xe6, 0xf0, 0x07, + 0xa8, 0x29, 0x4d, 0x35, 0xba, 0x7b, 0x3d, 0xa7, 0x7f, 0x38, 0x1c, 0x7a, 0x3b, 0x0c, 0xca, 0xb3, + 0xdd, 0x78, 0xf3, 0x9c, 0x0c, 0x0a, 0x03, 0xd2, 0x85, 0x46, 0x48, 0x35, 0x46, 0x42, 0x6e, 0xdc, + 0x9a, 0x39, 0xe9, 0x31, 0x26, 0x2f, 0xa1, 0x8d, 0xdf, 0x34, 0x4a, 0x4e, 0xe3, 0x65, 0x26, 0x99, + 0x5b, 0x37, 0xf9, 0x56, 0xa9, 0xdd, 0x48, 0x46, 0x04, 0x1c, 0xd9, 0x5e, 0x53, 0x29, 0x52, 0x94, + 0x9a, 0xa1, 0x72, 0xf7, 0x7b, 0xd5, 0x7e, 0x6b, 0x78, 0xf9, 0x7f, 0x4d, 0x19, 0x97, 0xd9, 0xa3, + 0xc9, 0x98, 0x6b, 0xb9, 0x09, 0x3a, 0xea, 0x89, 0x4c, 0x3e, 0xc3, 0xe1, 0xf6, 0xb6, 0xdc, 0x46, + 0xcf, 0xe9, 0xb7, 0x76, 0x1c, 0xc1, 0xdc, 0xca, 0x1f, 0x73, 0x32, 0x38, 0x50, 0xbf, 0x86, 0xe4, + 0x2d, 0x00, 0xae, 0x91, 0xeb, 0x65, 0xbe, 0x4a, 0xb7, 0x69, 0x6c, 0xbb, 0xa5, 0x6d, 0xb9, 0x67, + 0x6f, 0x51, 0xee, 0x39, 0x68, 0x9a, 0xea, 0x3c, 0x26, 0xe7, 0xd0, 0x0a, 0x25, 0x52, 0x8d, 0x05, + 0x0b, 0xff, 0x64, 0xa1, 0x28, 0xcf, 0x85, 0xee, 0x57, 0x38, 0xfe, 0xe3, 0xed, 0x49, 0x07, 0xaa, + 0x0f, 0xb8, 0xb1, 0xaf, 0x26, 0xff, 0x24, 0xaf, 0xa1, 0xb6, 0xa6, 0x71, 0x86, 0xe6, 0xcd, 0xb4, + 0x86, 0x27, 0xbf, 0x9d, 0x70, 0x9b, 0x67, 0x83, 0xa2, 0xe8, 0x5d, 0x65, 0xe4, 0x9c, 0x8e, 0xa0, + 0x66, 0xf6, 0x4d, 0x8e, 0xe1, 0x68, 0xbe, 0xb8, 0x58, 0x8c, 0x97, 0x37, 0xd3, 0xf9, 0x6c, 0x7c, + 0x35, 0xb9, 0x9e, 0x8c, 0xdf, 0x77, 0x9e, 0x11, 0x80, 0xfa, 0xc5, 0xd5, 0x62, 0x72, 0x3b, 0xee, + 0x38, 0xa4, 0x0d, 0x8d, 0xc9, 0xd4, 0x46, 0x95, 0xcb, 0xef, 0xf0, 0x2a, 0x14, 0xc9, 0x2e, 0x63, + 0x9d, 0x39, 0x5f, 0x3e, 0xd9, 0xb2, 0x48, 0xc4, 0x94, 0x47, 0x9e, 0x90, 0x91, 0x1f, 0x21, 0x37, + 0x6d, 0xf9, 0x45, 0x8a, 0xa6, 0x4c, 0xfd, 0xf5, 0x2f, 0x3c, 0xdf, 0x96, 0x57, 0x75, 0x43, 0xbf, + 0xf9, 0x19, 0x00, 0x00, 0xff, 0xff, 0xf7, 0xda, 0xfd, 0x34, 0x38, 0x04, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/cloud/securitycenter/v1beta1/organization_settings.pb.go b/vendor/google.golang.org/genproto/googleapis/cloud/securitycenter/v1beta1/organization_settings.pb.go new file mode 100644 index 0000000..abbf72b --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/cloud/securitycenter/v1beta1/organization_settings.pb.go @@ -0,0 +1,215 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/cloud/securitycenter/v1beta1/organization_settings.proto + +package securitycenter // import "google.golang.org/genproto/googleapis/cloud/securitycenter/v1beta1" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// The mode of inclusion when running Asset Discovery. +// Asset discovery can be limited by explicitly identifying projects to be +// included or excluded. If INCLUDE_ONLY is set, then only those projects +// within the organization and their children are discovered during asset +// discovery. If EXCLUDE is set, then projects that don't match those +// projects are discovered during asset discovery. If neither are set, then +// all projects within the organization are discovered during asset +// discovery. +type OrganizationSettings_AssetDiscoveryConfig_InclusionMode int32 + +const ( + // Unspecified. Setting the mode with this value will disable + // inclusion/exclusion filtering for Asset Discovery. + OrganizationSettings_AssetDiscoveryConfig_INCLUSION_MODE_UNSPECIFIED OrganizationSettings_AssetDiscoveryConfig_InclusionMode = 0 + // Asset Discovery will capture only the resources within the projects + // specified. All other resources will be ignored. + OrganizationSettings_AssetDiscoveryConfig_INCLUDE_ONLY OrganizationSettings_AssetDiscoveryConfig_InclusionMode = 1 + // Asset Discovery will ignore all resources under the projects specified. + // All other resources will be retrieved. + OrganizationSettings_AssetDiscoveryConfig_EXCLUDE OrganizationSettings_AssetDiscoveryConfig_InclusionMode = 2 +) + +var OrganizationSettings_AssetDiscoveryConfig_InclusionMode_name = map[int32]string{ + 0: "INCLUSION_MODE_UNSPECIFIED", + 1: "INCLUDE_ONLY", + 2: "EXCLUDE", +} +var OrganizationSettings_AssetDiscoveryConfig_InclusionMode_value = map[string]int32{ + "INCLUSION_MODE_UNSPECIFIED": 0, + "INCLUDE_ONLY": 1, + "EXCLUDE": 2, +} + +func (x OrganizationSettings_AssetDiscoveryConfig_InclusionMode) String() string { + return proto.EnumName(OrganizationSettings_AssetDiscoveryConfig_InclusionMode_name, int32(x)) +} +func (OrganizationSettings_AssetDiscoveryConfig_InclusionMode) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_organization_settings_8916391f4b80a5cd, []int{0, 0, 0} +} + +// User specified settings that are attached to the Cloud Security Command +// Center (Cloud SCC) organization. +type OrganizationSettings struct { + // The relative resource name of the settings. See: + // https://cloud.google.com/apis/design/resource_names#relative_resource_name + // Example: + // "organizations/123/organizationSettings". + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // A flag that indicates if Asset Discovery should be enabled. If the flag is + // set to `true`, then discovery of assets will occur. If it is set to `false, + // all historical assets will remain, but discovery of future assets will not + // occur. + EnableAssetDiscovery bool `protobuf:"varint,2,opt,name=enable_asset_discovery,json=enableAssetDiscovery,proto3" json:"enable_asset_discovery,omitempty"` + // The configuration used for Asset Discovery runs. + AssetDiscoveryConfig *OrganizationSettings_AssetDiscoveryConfig `protobuf:"bytes,3,opt,name=asset_discovery_config,json=assetDiscoveryConfig,proto3" json:"asset_discovery_config,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *OrganizationSettings) Reset() { *m = OrganizationSettings{} } +func (m *OrganizationSettings) String() string { return proto.CompactTextString(m) } +func (*OrganizationSettings) ProtoMessage() {} +func (*OrganizationSettings) Descriptor() ([]byte, []int) { + return fileDescriptor_organization_settings_8916391f4b80a5cd, []int{0} +} +func (m *OrganizationSettings) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_OrganizationSettings.Unmarshal(m, b) +} +func (m *OrganizationSettings) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_OrganizationSettings.Marshal(b, m, deterministic) +} +func (dst *OrganizationSettings) XXX_Merge(src proto.Message) { + xxx_messageInfo_OrganizationSettings.Merge(dst, src) +} +func (m *OrganizationSettings) XXX_Size() int { + return xxx_messageInfo_OrganizationSettings.Size(m) +} +func (m *OrganizationSettings) XXX_DiscardUnknown() { + xxx_messageInfo_OrganizationSettings.DiscardUnknown(m) +} + +var xxx_messageInfo_OrganizationSettings proto.InternalMessageInfo + +func (m *OrganizationSettings) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *OrganizationSettings) GetEnableAssetDiscovery() bool { + if m != nil { + return m.EnableAssetDiscovery + } + return false +} + +func (m *OrganizationSettings) GetAssetDiscoveryConfig() *OrganizationSettings_AssetDiscoveryConfig { + if m != nil { + return m.AssetDiscoveryConfig + } + return nil +} + +// The configuration used for Asset Discovery runs. +type OrganizationSettings_AssetDiscoveryConfig struct { + // The project ids to use for filtering asset discovery. + ProjectIds []string `protobuf:"bytes,1,rep,name=project_ids,json=projectIds,proto3" json:"project_ids,omitempty"` + // The mode to use for filtering asset discovery. + InclusionMode OrganizationSettings_AssetDiscoveryConfig_InclusionMode `protobuf:"varint,2,opt,name=inclusion_mode,json=inclusionMode,proto3,enum=google.cloud.securitycenter.v1beta1.OrganizationSettings_AssetDiscoveryConfig_InclusionMode" json:"inclusion_mode,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *OrganizationSettings_AssetDiscoveryConfig) Reset() { + *m = OrganizationSettings_AssetDiscoveryConfig{} +} +func (m *OrganizationSettings_AssetDiscoveryConfig) String() string { return proto.CompactTextString(m) } +func (*OrganizationSettings_AssetDiscoveryConfig) ProtoMessage() {} +func (*OrganizationSettings_AssetDiscoveryConfig) Descriptor() ([]byte, []int) { + return fileDescriptor_organization_settings_8916391f4b80a5cd, []int{0, 0} +} +func (m *OrganizationSettings_AssetDiscoveryConfig) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_OrganizationSettings_AssetDiscoveryConfig.Unmarshal(m, b) +} +func (m *OrganizationSettings_AssetDiscoveryConfig) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_OrganizationSettings_AssetDiscoveryConfig.Marshal(b, m, deterministic) +} +func (dst *OrganizationSettings_AssetDiscoveryConfig) XXX_Merge(src proto.Message) { + xxx_messageInfo_OrganizationSettings_AssetDiscoveryConfig.Merge(dst, src) +} +func (m *OrganizationSettings_AssetDiscoveryConfig) XXX_Size() int { + return xxx_messageInfo_OrganizationSettings_AssetDiscoveryConfig.Size(m) +} +func (m *OrganizationSettings_AssetDiscoveryConfig) XXX_DiscardUnknown() { + xxx_messageInfo_OrganizationSettings_AssetDiscoveryConfig.DiscardUnknown(m) +} + +var xxx_messageInfo_OrganizationSettings_AssetDiscoveryConfig proto.InternalMessageInfo + +func (m *OrganizationSettings_AssetDiscoveryConfig) GetProjectIds() []string { + if m != nil { + return m.ProjectIds + } + return nil +} + +func (m *OrganizationSettings_AssetDiscoveryConfig) GetInclusionMode() OrganizationSettings_AssetDiscoveryConfig_InclusionMode { + if m != nil { + return m.InclusionMode + } + return OrganizationSettings_AssetDiscoveryConfig_INCLUSION_MODE_UNSPECIFIED +} + +func init() { + proto.RegisterType((*OrganizationSettings)(nil), "google.cloud.securitycenter.v1beta1.OrganizationSettings") + proto.RegisterType((*OrganizationSettings_AssetDiscoveryConfig)(nil), "google.cloud.securitycenter.v1beta1.OrganizationSettings.AssetDiscoveryConfig") + proto.RegisterEnum("google.cloud.securitycenter.v1beta1.OrganizationSettings_AssetDiscoveryConfig_InclusionMode", OrganizationSettings_AssetDiscoveryConfig_InclusionMode_name, OrganizationSettings_AssetDiscoveryConfig_InclusionMode_value) +} + +func init() { + proto.RegisterFile("google/cloud/securitycenter/v1beta1/organization_settings.proto", fileDescriptor_organization_settings_8916391f4b80a5cd) +} + +var fileDescriptor_organization_settings_8916391f4b80a5cd = []byte{ + // 399 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xac, 0x92, 0xc1, 0x6b, 0x14, 0x31, + 0x14, 0xc6, 0xcd, 0xae, 0xa8, 0xcd, 0xda, 0xb2, 0x84, 0xa1, 0x0c, 0x8b, 0xe8, 0x50, 0x0f, 0xce, + 0x29, 0x43, 0xab, 0x37, 0x0f, 0xa2, 0x3b, 0x23, 0x0c, 0xb4, 0x33, 0x75, 0x96, 0x82, 0x8a, 0x10, + 0xb2, 0x99, 0x18, 0x22, 0xb3, 0x79, 0xc3, 0x24, 0x5b, 0xa8, 0x07, 0x2f, 0x7a, 0xf5, 0xef, 0xf5, + 0x2a, 0xcd, 0x4e, 0xa1, 0x23, 0x8b, 0xec, 0xa1, 0xb7, 0xe4, 0xfb, 0x5e, 0x7e, 0xdf, 0x7b, 0xe4, + 0xe1, 0x37, 0x0a, 0x40, 0x35, 0x32, 0x11, 0x0d, 0xac, 0xeb, 0xc4, 0x4a, 0xb1, 0xee, 0xb4, 0xbb, + 0x12, 0xd2, 0x38, 0xd9, 0x25, 0x97, 0xc7, 0x4b, 0xe9, 0xf8, 0x71, 0x02, 0x9d, 0xe2, 0x46, 0x7f, + 0xe7, 0x4e, 0x83, 0x61, 0x56, 0x3a, 0xa7, 0x8d, 0xb2, 0xb4, 0xed, 0xc0, 0x01, 0x79, 0xbe, 0x01, + 0x50, 0x0f, 0xa0, 0x43, 0x00, 0xed, 0x01, 0xb3, 0x27, 0x7d, 0x0a, 0x6f, 0x75, 0xc2, 0x8d, 0x01, + 0xe7, 0x51, 0x3d, 0xe2, 0xe8, 0xcf, 0x18, 0x07, 0xe5, 0xad, 0x88, 0x45, 0x9f, 0x40, 0x08, 0xbe, + 0x6f, 0xf8, 0x4a, 0x86, 0x28, 0x42, 0xf1, 0x5e, 0xe5, 0xcf, 0xe4, 0x15, 0x3e, 0x94, 0x86, 0x2f, + 0x1b, 0xc9, 0xb8, 0xb5, 0xd2, 0xb1, 0x5a, 0x5b, 0x01, 0x97, 0xb2, 0xbb, 0x0a, 0x47, 0x11, 0x8a, + 0x1f, 0x55, 0xc1, 0xc6, 0x7d, 0x7b, 0x6d, 0xa6, 0x37, 0x1e, 0xf9, 0x85, 0xf0, 0xe1, 0x3f, 0xf5, + 0x4c, 0x80, 0xf9, 0xaa, 0x55, 0x38, 0x8e, 0x50, 0x3c, 0x39, 0x29, 0xe8, 0x0e, 0x73, 0xd0, 0x6d, + 0x5d, 0xd2, 0x61, 0xd4, 0xdc, 0x53, 0xab, 0x80, 0x6f, 0x51, 0x67, 0xbf, 0x47, 0x38, 0xd8, 0x56, + 0x4e, 0x9e, 0xe1, 0x49, 0xdb, 0xc1, 0x37, 0x29, 0x1c, 0xd3, 0xb5, 0x0d, 0x51, 0x34, 0x8e, 0xf7, + 0x2a, 0xdc, 0x4b, 0x79, 0x6d, 0xc9, 0x4f, 0x84, 0x0f, 0xb4, 0x11, 0xcd, 0xda, 0x5e, 0xff, 0xc1, + 0x0a, 0x6a, 0xe9, 0xe7, 0x3d, 0x38, 0xf9, 0x72, 0xb7, 0x8d, 0xd3, 0xfc, 0x26, 0xe4, 0x0c, 0x6a, + 0x59, 0xed, 0xeb, 0xdb, 0xd7, 0xa3, 0x02, 0xef, 0x0f, 0x7c, 0xf2, 0x14, 0xcf, 0xf2, 0x62, 0x7e, + 0x7a, 0xb1, 0xc8, 0xcb, 0x82, 0x9d, 0x95, 0x69, 0xc6, 0x2e, 0x8a, 0xc5, 0x79, 0x36, 0xcf, 0xdf, + 0xe7, 0x59, 0x3a, 0xbd, 0x47, 0xa6, 0xf8, 0xb1, 0xf7, 0xd3, 0x8c, 0x95, 0xc5, 0xe9, 0xa7, 0x29, + 0x22, 0x13, 0xfc, 0x30, 0xfb, 0xe8, 0x95, 0xe9, 0xe8, 0xdd, 0x0f, 0xfc, 0x42, 0xc0, 0x6a, 0x97, + 0x09, 0xce, 0xd1, 0xe7, 0x0f, 0x7d, 0x99, 0x82, 0x86, 0x1b, 0x45, 0xa1, 0x53, 0x89, 0x92, 0xc6, + 0xaf, 0x50, 0xb2, 0xb1, 0x78, 0xab, 0xed, 0x7f, 0x37, 0xf9, 0xf5, 0x50, 0x5e, 0x3e, 0xf0, 0xaf, + 0x5f, 0xfe, 0x0d, 0x00, 0x00, 0xff, 0xff, 0x07, 0x96, 0xfd, 0x8f, 0x06, 0x03, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/cloud/securitycenter/v1beta1/security_marks.pb.go b/vendor/google.golang.org/genproto/googleapis/cloud/securitycenter/v1beta1/security_marks.pb.go new file mode 100644 index 0000000..df41303 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/cloud/securitycenter/v1beta1/security_marks.pb.go @@ -0,0 +1,110 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/cloud/securitycenter/v1beta1/security_marks.proto + +package securitycenter // import "google.golang.org/genproto/googleapis/cloud/securitycenter/v1beta1" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// User specified security marks that are attached to the parent Cloud Security +// Command Center (Cloud SCC) resource. Security marks are scoped within a Cloud +// SCC organization -- they can be modified and viewed by all users who have +// proper permissions on the organization. +type SecurityMarks struct { + // The relative resource name of the SecurityMarks. See: + // https://cloud.google.com/apis/design/resource_names#relative_resource_name + // Examples: + // "organizations/123/assets/456/securityMarks" + // "organizations/123/sources/456/findings/789/securityMarks". + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // Mutable user specified security marks belonging to the parent resource. + // Constraints are as follows: + // - Keys and values are treated as case insensitive + // - Keys must be alphanumeric and between 1 - 256 characters (inclusive) + // - Values have leading and trailing whitespace trimmed, remaining + // characters must be between 1 - 4096 characters (inclusive) + Marks map[string]string `protobuf:"bytes,2,rep,name=marks,proto3" json:"marks,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *SecurityMarks) Reset() { *m = SecurityMarks{} } +func (m *SecurityMarks) String() string { return proto.CompactTextString(m) } +func (*SecurityMarks) ProtoMessage() {} +func (*SecurityMarks) Descriptor() ([]byte, []int) { + return fileDescriptor_security_marks_89566b31649909af, []int{0} +} +func (m *SecurityMarks) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_SecurityMarks.Unmarshal(m, b) +} +func (m *SecurityMarks) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_SecurityMarks.Marshal(b, m, deterministic) +} +func (dst *SecurityMarks) XXX_Merge(src proto.Message) { + xxx_messageInfo_SecurityMarks.Merge(dst, src) +} +func (m *SecurityMarks) XXX_Size() int { + return xxx_messageInfo_SecurityMarks.Size(m) +} +func (m *SecurityMarks) XXX_DiscardUnknown() { + xxx_messageInfo_SecurityMarks.DiscardUnknown(m) +} + +var xxx_messageInfo_SecurityMarks proto.InternalMessageInfo + +func (m *SecurityMarks) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *SecurityMarks) GetMarks() map[string]string { + if m != nil { + return m.Marks + } + return nil +} + +func init() { + proto.RegisterType((*SecurityMarks)(nil), "google.cloud.securitycenter.v1beta1.SecurityMarks") + proto.RegisterMapType((map[string]string)(nil), "google.cloud.securitycenter.v1beta1.SecurityMarks.MarksEntry") +} + +func init() { + proto.RegisterFile("google/cloud/securitycenter/v1beta1/security_marks.proto", fileDescriptor_security_marks_89566b31649909af) +} + +var fileDescriptor_security_marks_89566b31649909af = []byte{ + // 252 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x8c, 0x90, 0x41, 0x4b, 0xc3, 0x30, + 0x14, 0xc7, 0x49, 0xe7, 0x04, 0x9f, 0x08, 0x12, 0x3c, 0x94, 0xe1, 0x61, 0xe8, 0xc1, 0x9d, 0x12, + 0xa6, 0x97, 0xa2, 0x78, 0x11, 0x3c, 0x0a, 0xba, 0xdd, 0xbc, 0xc8, 0x5b, 0x7d, 0x84, 0xb2, 0x36, + 0xaf, 0xa4, 0xe9, 0xa0, 0x17, 0x3f, 0x94, 0x9f, 0x50, 0x96, 0x04, 0xa5, 0x17, 0xd9, 0xa5, 0xbc, + 0xfe, 0x93, 0xdf, 0xef, 0x9f, 0x04, 0x0a, 0xc3, 0x6c, 0x6a, 0xd2, 0x65, 0xcd, 0xfd, 0xa7, 0xee, + 0xa8, 0xec, 0x5d, 0xe5, 0x87, 0x92, 0xac, 0x27, 0xa7, 0x77, 0xcb, 0x0d, 0x79, 0x5c, 0xfe, 0xc6, + 0x1f, 0x0d, 0xba, 0x6d, 0xa7, 0x5a, 0xc7, 0x9e, 0xe5, 0x75, 0x24, 0x55, 0x20, 0xd5, 0x98, 0x54, + 0x89, 0x9c, 0x5d, 0x26, 0x3d, 0xb6, 0x95, 0x46, 0x6b, 0xd9, 0xa3, 0xaf, 0xd8, 0x26, 0xc5, 0xd5, + 0xb7, 0x80, 0xb3, 0x75, 0x02, 0x5f, 0xf6, 0x6a, 0x29, 0xe1, 0xc8, 0x62, 0x43, 0xb9, 0x98, 0x8b, + 0xc5, 0xc9, 0x2a, 0xcc, 0x72, 0x0d, 0xd3, 0xd0, 0x9b, 0x67, 0xf3, 0xc9, 0xe2, 0xf4, 0xf6, 0x51, + 0x1d, 0x50, 0xac, 0x46, 0x5a, 0x15, 0xbe, 0xcf, 0xd6, 0xbb, 0x61, 0x15, 0x5d, 0xb3, 0x02, 0xe0, + 0x2f, 0x94, 0xe7, 0x30, 0xd9, 0xd2, 0x90, 0x5a, 0xf7, 0xa3, 0xbc, 0x80, 0xe9, 0x0e, 0xeb, 0x9e, + 0xf2, 0x2c, 0x64, 0xf1, 0xe7, 0x3e, 0x2b, 0xc4, 0xd3, 0x17, 0xdc, 0x94, 0xdc, 0x1c, 0x72, 0x88, + 0x57, 0xf1, 0xfe, 0x96, 0xb6, 0x19, 0xae, 0xd1, 0x1a, 0xc5, 0xce, 0x68, 0x43, 0x36, 0xdc, 0x5e, + 0xc7, 0x25, 0x6c, 0xab, 0xee, 0xdf, 0xd7, 0x7f, 0x18, 0xc7, 0x9b, 0xe3, 0x40, 0xdf, 0xfd, 0x04, + 0x00, 0x00, 0xff, 0xff, 0x50, 0x3d, 0x8c, 0xac, 0xba, 0x01, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/cloud/securitycenter/v1beta1/securitycenter_service.pb.go b/vendor/google.golang.org/genproto/googleapis/cloud/securitycenter/v1beta1/securitycenter_service.pb.go new file mode 100644 index 0000000..f61e9d6 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/cloud/securitycenter/v1beta1/securitycenter_service.pb.go @@ -0,0 +1,2498 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/cloud/securitycenter/v1beta1/securitycenter_service.proto + +package securitycenter // import "google.golang.org/genproto/googleapis/cloud/securitycenter/v1beta1" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import duration "github.com/golang/protobuf/ptypes/duration" +import _ "github.com/golang/protobuf/ptypes/empty" +import _struct "github.com/golang/protobuf/ptypes/struct" +import timestamp "github.com/golang/protobuf/ptypes/timestamp" +import _ "google.golang.org/genproto/googleapis/api/annotations" +import v1 "google.golang.org/genproto/googleapis/iam/v1" +import longrunning "google.golang.org/genproto/googleapis/longrunning" +import field_mask "google.golang.org/genproto/protobuf/field_mask" + +import ( + context "golang.org/x/net/context" + grpc "google.golang.org/grpc" +) + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// State of the asset. +// +// When querying across two points in time this describes +// the change between the two points: ADDED, REMOVED, or ACTIVE. +// If there was no compare_duration supplied in the request the state should +// be: UNUSED +type ListAssetsResponse_ListAssetsResult_State int32 + +const ( + // Unspecified state. + ListAssetsResponse_ListAssetsResult_STATE_UNSPECIFIED ListAssetsResponse_ListAssetsResult_State = 0 + // Request did not specify use of this field in the result. + ListAssetsResponse_ListAssetsResult_UNUSED ListAssetsResponse_ListAssetsResult_State = 1 + // Asset was added between the points in time. + ListAssetsResponse_ListAssetsResult_ADDED ListAssetsResponse_ListAssetsResult_State = 2 + // Asset was removed between the points in time. + ListAssetsResponse_ListAssetsResult_REMOVED ListAssetsResponse_ListAssetsResult_State = 3 + // Asset was active at both point(s) in time. + ListAssetsResponse_ListAssetsResult_ACTIVE ListAssetsResponse_ListAssetsResult_State = 4 +) + +var ListAssetsResponse_ListAssetsResult_State_name = map[int32]string{ + 0: "STATE_UNSPECIFIED", + 1: "UNUSED", + 2: "ADDED", + 3: "REMOVED", + 4: "ACTIVE", +} +var ListAssetsResponse_ListAssetsResult_State_value = map[string]int32{ + "STATE_UNSPECIFIED": 0, + "UNUSED": 1, + "ADDED": 2, + "REMOVED": 3, + "ACTIVE": 4, +} + +func (x ListAssetsResponse_ListAssetsResult_State) String() string { + return proto.EnumName(ListAssetsResponse_ListAssetsResult_State_name, int32(x)) +} +func (ListAssetsResponse_ListAssetsResult_State) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_securitycenter_service_32527bc15746ec09, []int{12, 0, 0} +} + +// Request message for creating a finding. +type CreateFindingRequest struct { + // Resource name of the new finding's parent. Its format should be + // "organizations/[organization_id]/sources/[source_id]". + Parent string `protobuf:"bytes,1,opt,name=parent,proto3" json:"parent,omitempty"` + // Unique identifier provided by the client within the parent scope. + // It must be alphanumeric and less than or equal to 32 characters and + // greater than 0 characters in length. + FindingId string `protobuf:"bytes,2,opt,name=finding_id,json=findingId,proto3" json:"finding_id,omitempty"` + // The Finding being created. The name and security_marks will be ignored as + // they are both output only fields on this resource. + Finding *Finding `protobuf:"bytes,3,opt,name=finding,proto3" json:"finding,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *CreateFindingRequest) Reset() { *m = CreateFindingRequest{} } +func (m *CreateFindingRequest) String() string { return proto.CompactTextString(m) } +func (*CreateFindingRequest) ProtoMessage() {} +func (*CreateFindingRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_securitycenter_service_32527bc15746ec09, []int{0} +} +func (m *CreateFindingRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_CreateFindingRequest.Unmarshal(m, b) +} +func (m *CreateFindingRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_CreateFindingRequest.Marshal(b, m, deterministic) +} +func (dst *CreateFindingRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_CreateFindingRequest.Merge(dst, src) +} +func (m *CreateFindingRequest) XXX_Size() int { + return xxx_messageInfo_CreateFindingRequest.Size(m) +} +func (m *CreateFindingRequest) XXX_DiscardUnknown() { + xxx_messageInfo_CreateFindingRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_CreateFindingRequest proto.InternalMessageInfo + +func (m *CreateFindingRequest) GetParent() string { + if m != nil { + return m.Parent + } + return "" +} + +func (m *CreateFindingRequest) GetFindingId() string { + if m != nil { + return m.FindingId + } + return "" +} + +func (m *CreateFindingRequest) GetFinding() *Finding { + if m != nil { + return m.Finding + } + return nil +} + +// Request message for creating a source. +type CreateSourceRequest struct { + // Resource name of the new source's parent. Its format should be + // "organizations/[organization_id]". + Parent string `protobuf:"bytes,1,opt,name=parent,proto3" json:"parent,omitempty"` + // The Source being created, only the display_name and description will be + // used. All other fields will be ignored. + Source *Source `protobuf:"bytes,2,opt,name=source,proto3" json:"source,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *CreateSourceRequest) Reset() { *m = CreateSourceRequest{} } +func (m *CreateSourceRequest) String() string { return proto.CompactTextString(m) } +func (*CreateSourceRequest) ProtoMessage() {} +func (*CreateSourceRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_securitycenter_service_32527bc15746ec09, []int{1} +} +func (m *CreateSourceRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_CreateSourceRequest.Unmarshal(m, b) +} +func (m *CreateSourceRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_CreateSourceRequest.Marshal(b, m, deterministic) +} +func (dst *CreateSourceRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_CreateSourceRequest.Merge(dst, src) +} +func (m *CreateSourceRequest) XXX_Size() int { + return xxx_messageInfo_CreateSourceRequest.Size(m) +} +func (m *CreateSourceRequest) XXX_DiscardUnknown() { + xxx_messageInfo_CreateSourceRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_CreateSourceRequest proto.InternalMessageInfo + +func (m *CreateSourceRequest) GetParent() string { + if m != nil { + return m.Parent + } + return "" +} + +func (m *CreateSourceRequest) GetSource() *Source { + if m != nil { + return m.Source + } + return nil +} + +// Request message for getting organization settings. +type GetOrganizationSettingsRequest struct { + // Name of the organization to get organization settings for. Its format is + // "organizations/[organization_id]/organizationSettings". + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GetOrganizationSettingsRequest) Reset() { *m = GetOrganizationSettingsRequest{} } +func (m *GetOrganizationSettingsRequest) String() string { return proto.CompactTextString(m) } +func (*GetOrganizationSettingsRequest) ProtoMessage() {} +func (*GetOrganizationSettingsRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_securitycenter_service_32527bc15746ec09, []int{2} +} +func (m *GetOrganizationSettingsRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GetOrganizationSettingsRequest.Unmarshal(m, b) +} +func (m *GetOrganizationSettingsRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GetOrganizationSettingsRequest.Marshal(b, m, deterministic) +} +func (dst *GetOrganizationSettingsRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_GetOrganizationSettingsRequest.Merge(dst, src) +} +func (m *GetOrganizationSettingsRequest) XXX_Size() int { + return xxx_messageInfo_GetOrganizationSettingsRequest.Size(m) +} +func (m *GetOrganizationSettingsRequest) XXX_DiscardUnknown() { + xxx_messageInfo_GetOrganizationSettingsRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_GetOrganizationSettingsRequest proto.InternalMessageInfo + +func (m *GetOrganizationSettingsRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +// Request message for getting a source. +type GetSourceRequest struct { + // Relative resource name of the source. Its format is + // "organizations/[organization_id]/source/[source_id]". + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GetSourceRequest) Reset() { *m = GetSourceRequest{} } +func (m *GetSourceRequest) String() string { return proto.CompactTextString(m) } +func (*GetSourceRequest) ProtoMessage() {} +func (*GetSourceRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_securitycenter_service_32527bc15746ec09, []int{3} +} +func (m *GetSourceRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GetSourceRequest.Unmarshal(m, b) +} +func (m *GetSourceRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GetSourceRequest.Marshal(b, m, deterministic) +} +func (dst *GetSourceRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_GetSourceRequest.Merge(dst, src) +} +func (m *GetSourceRequest) XXX_Size() int { + return xxx_messageInfo_GetSourceRequest.Size(m) +} +func (m *GetSourceRequest) XXX_DiscardUnknown() { + xxx_messageInfo_GetSourceRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_GetSourceRequest proto.InternalMessageInfo + +func (m *GetSourceRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +// Request message for grouping by assets. +type GroupAssetsRequest struct { + // Name of the organization to groupBy. Its format is + // "organizations/[organization_id]". + Parent string `protobuf:"bytes,1,opt,name=parent,proto3" json:"parent,omitempty"` + // Expression that defines the filter to apply across assets. + // The expression is a list of zero or more restrictions combined via logical + // operators `AND` and `OR`. + // Parentheses are not supported, and `OR` has higher precedence than `AND`. + // + // Restrictions have the form ` ` and may have a `-` + // character in front of them to indicate negation. The fields map to those + // defined in the Asset resource. Examples include: + // + // * name + // * security_center_properties.resource_name + // * resource_properties.a_property + // * security_marks.marks.marka + // + // The supported operators are: + // + // * `=` for all value types. + // * `>`, `<`, `>=`, `<=` for integer values. + // * `:`, meaning substring matching, for strings. + // + // The supported value types are: + // + // * string literals in quotes. + // * integer literals without quotes. + // * boolean literals `true` and `false` without quotes. + // + // For example, `resource_properties.size = 100` is a valid filter string. + Filter string `protobuf:"bytes,2,opt,name=filter,proto3" json:"filter,omitempty"` + // Expression that defines what assets fields to use for grouping. The string + // value should follow SQL syntax: comma separated list of fields. For + // example: + // "security_center_properties.resource_project,security_center_properties.project". + // + // The following fields are supported when compare_duration is not set: + // + // * security_center_properties.resource_project + // * security_center_properties.resource_type + // * security_center_properties.resource_parent + // + // The following fields are supported when compare_duration is set: + // + // * security_center_properties.resource_type + GroupBy string `protobuf:"bytes,3,opt,name=group_by,json=groupBy,proto3" json:"group_by,omitempty"` + // When compare_duration is set, the Asset's "state" property is updated to + // indicate whether the asset was added, removed, or remained present during + // the compare_duration period of time that precedes the read_time. This is + // the time between (read_time - compare_duration) and read_time. + // + // The state value is derived based on the presence of the asset at the two + // points in time. Intermediate state changes between the two times don't + // affect the result. For example, the results aren't affected if the asset is + // removed and re-created again. + // + // Possible "state" values when compare_duration is specified: + // + // * "ADDED": indicates that the asset was not present before + // compare_duration, but present at reference_time. + // * "REMOVED": indicates that the asset was present at the start of + // compare_duration, but not present at reference_time. + // * "ACTIVE": indicates that the asset was present at both the + // start and the end of the time period defined by + // compare_duration and reference_time. + // + // This field is ignored if `state` is not a field in `group_by`. + CompareDuration *duration.Duration `protobuf:"bytes,4,opt,name=compare_duration,json=compareDuration,proto3" json:"compare_duration,omitempty"` + // Time used as a reference point when filtering assets. The filter is limited + // to assets existing at the supplied time and their values are those at that + // specific time. Absence of this field will default to the API's version of + // NOW. + ReadTime *timestamp.Timestamp `protobuf:"bytes,5,opt,name=read_time,json=readTime,proto3" json:"read_time,omitempty"` + // The value returned by the last `GroupAssetsResponse`; indicates + // that this is a continuation of a prior `GroupAssets` call, and that the + // system should return the next page of data. + PageToken string `protobuf:"bytes,7,opt,name=page_token,json=pageToken,proto3" json:"page_token,omitempty"` + // The maximum number of results to return in a single response. Default is + // 10, minimum is 1, maximum is 1000. + PageSize int32 `protobuf:"varint,8,opt,name=page_size,json=pageSize,proto3" json:"page_size,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GroupAssetsRequest) Reset() { *m = GroupAssetsRequest{} } +func (m *GroupAssetsRequest) String() string { return proto.CompactTextString(m) } +func (*GroupAssetsRequest) ProtoMessage() {} +func (*GroupAssetsRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_securitycenter_service_32527bc15746ec09, []int{4} +} +func (m *GroupAssetsRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GroupAssetsRequest.Unmarshal(m, b) +} +func (m *GroupAssetsRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GroupAssetsRequest.Marshal(b, m, deterministic) +} +func (dst *GroupAssetsRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_GroupAssetsRequest.Merge(dst, src) +} +func (m *GroupAssetsRequest) XXX_Size() int { + return xxx_messageInfo_GroupAssetsRequest.Size(m) +} +func (m *GroupAssetsRequest) XXX_DiscardUnknown() { + xxx_messageInfo_GroupAssetsRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_GroupAssetsRequest proto.InternalMessageInfo + +func (m *GroupAssetsRequest) GetParent() string { + if m != nil { + return m.Parent + } + return "" +} + +func (m *GroupAssetsRequest) GetFilter() string { + if m != nil { + return m.Filter + } + return "" +} + +func (m *GroupAssetsRequest) GetGroupBy() string { + if m != nil { + return m.GroupBy + } + return "" +} + +func (m *GroupAssetsRequest) GetCompareDuration() *duration.Duration { + if m != nil { + return m.CompareDuration + } + return nil +} + +func (m *GroupAssetsRequest) GetReadTime() *timestamp.Timestamp { + if m != nil { + return m.ReadTime + } + return nil +} + +func (m *GroupAssetsRequest) GetPageToken() string { + if m != nil { + return m.PageToken + } + return "" +} + +func (m *GroupAssetsRequest) GetPageSize() int32 { + if m != nil { + return m.PageSize + } + return 0 +} + +// Response message for grouping by assets. +type GroupAssetsResponse struct { + // Group results. There exists an element for each existing unique + // combination of property/values. The element contains a count for the number + // of times those specific property/values appear. + GroupByResults []*GroupResult `protobuf:"bytes,1,rep,name=group_by_results,json=groupByResults,proto3" json:"group_by_results,omitempty"` + // Time used for executing the groupBy request. + ReadTime *timestamp.Timestamp `protobuf:"bytes,2,opt,name=read_time,json=readTime,proto3" json:"read_time,omitempty"` + // Token to retrieve the next page of results, or empty if there are no more + // results. + NextPageToken string `protobuf:"bytes,3,opt,name=next_page_token,json=nextPageToken,proto3" json:"next_page_token,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GroupAssetsResponse) Reset() { *m = GroupAssetsResponse{} } +func (m *GroupAssetsResponse) String() string { return proto.CompactTextString(m) } +func (*GroupAssetsResponse) ProtoMessage() {} +func (*GroupAssetsResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_securitycenter_service_32527bc15746ec09, []int{5} +} +func (m *GroupAssetsResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GroupAssetsResponse.Unmarshal(m, b) +} +func (m *GroupAssetsResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GroupAssetsResponse.Marshal(b, m, deterministic) +} +func (dst *GroupAssetsResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_GroupAssetsResponse.Merge(dst, src) +} +func (m *GroupAssetsResponse) XXX_Size() int { + return xxx_messageInfo_GroupAssetsResponse.Size(m) +} +func (m *GroupAssetsResponse) XXX_DiscardUnknown() { + xxx_messageInfo_GroupAssetsResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_GroupAssetsResponse proto.InternalMessageInfo + +func (m *GroupAssetsResponse) GetGroupByResults() []*GroupResult { + if m != nil { + return m.GroupByResults + } + return nil +} + +func (m *GroupAssetsResponse) GetReadTime() *timestamp.Timestamp { + if m != nil { + return m.ReadTime + } + return nil +} + +func (m *GroupAssetsResponse) GetNextPageToken() string { + if m != nil { + return m.NextPageToken + } + return "" +} + +// Request message for grouping by findings. +type GroupFindingsRequest struct { + // Name of the source to groupBy. Its format is + // "organizations/[organization_id]/sources/[source_id]". To groupBy across + // all sources provide a source_id of `-`. For example: + // organizations/123/sources/- + Parent string `protobuf:"bytes,1,opt,name=parent,proto3" json:"parent,omitempty"` + // Expression that defines the filter to apply across findings. + // The expression is a list of one or more restrictions combined via logical + // operators `AND` and `OR`. + // Parentheses are not supported, and `OR` has higher precedence than `AND`. + // + // Restrictions have the form ` ` and may have a `-` + // character in front of them to indicate negation. Examples include: + // + // * name + // * source_properties.a_property + // * security_marks.marks.marka + // + // The supported operators are: + // + // * `=` for all value types. + // * `>`, `<`, `>=`, `<=` for integer values. + // * `:`, meaning substring matching, for strings. + // + // The supported value types are: + // + // * string literals in quotes. + // * integer literals without quotes. + // * boolean literals `true` and `false` without quotes. + // + // For example, `source_properties.size = 100` is a valid filter string. + Filter string `protobuf:"bytes,2,opt,name=filter,proto3" json:"filter,omitempty"` + // Expression that defines what assets fields to use for grouping (including + // `state`). The string value should follow SQL syntax: comma separated list + // of fields. For example: + // "parent,resource_name". + // + // The following fields are supported: + // + // * resource_name + // * category + // * state + // * parent + GroupBy string `protobuf:"bytes,3,opt,name=group_by,json=groupBy,proto3" json:"group_by,omitempty"` + // Time used as a reference point when filtering findings. The filter is + // limited to findings existing at the supplied time and their values are + // those at that specific time. Absence of this field will default to the + // API's version of NOW. + ReadTime *timestamp.Timestamp `protobuf:"bytes,4,opt,name=read_time,json=readTime,proto3" json:"read_time,omitempty"` + // The value returned by the last `GroupFindingsResponse`; indicates + // that this is a continuation of a prior `GroupFindings` call, and + // that the system should return the next page of data. + PageToken string `protobuf:"bytes,5,opt,name=page_token,json=pageToken,proto3" json:"page_token,omitempty"` + // The maximum number of results to return in a single response. Default is + // 10, minimum is 1, maximum is 1000. + PageSize int32 `protobuf:"varint,6,opt,name=page_size,json=pageSize,proto3" json:"page_size,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GroupFindingsRequest) Reset() { *m = GroupFindingsRequest{} } +func (m *GroupFindingsRequest) String() string { return proto.CompactTextString(m) } +func (*GroupFindingsRequest) ProtoMessage() {} +func (*GroupFindingsRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_securitycenter_service_32527bc15746ec09, []int{6} +} +func (m *GroupFindingsRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GroupFindingsRequest.Unmarshal(m, b) +} +func (m *GroupFindingsRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GroupFindingsRequest.Marshal(b, m, deterministic) +} +func (dst *GroupFindingsRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_GroupFindingsRequest.Merge(dst, src) +} +func (m *GroupFindingsRequest) XXX_Size() int { + return xxx_messageInfo_GroupFindingsRequest.Size(m) +} +func (m *GroupFindingsRequest) XXX_DiscardUnknown() { + xxx_messageInfo_GroupFindingsRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_GroupFindingsRequest proto.InternalMessageInfo + +func (m *GroupFindingsRequest) GetParent() string { + if m != nil { + return m.Parent + } + return "" +} + +func (m *GroupFindingsRequest) GetFilter() string { + if m != nil { + return m.Filter + } + return "" +} + +func (m *GroupFindingsRequest) GetGroupBy() string { + if m != nil { + return m.GroupBy + } + return "" +} + +func (m *GroupFindingsRequest) GetReadTime() *timestamp.Timestamp { + if m != nil { + return m.ReadTime + } + return nil +} + +func (m *GroupFindingsRequest) GetPageToken() string { + if m != nil { + return m.PageToken + } + return "" +} + +func (m *GroupFindingsRequest) GetPageSize() int32 { + if m != nil { + return m.PageSize + } + return 0 +} + +// Response message for group by findings. +type GroupFindingsResponse struct { + // Group results. There exists an element for each existing unique + // combination of property/values. The element contains a count for the number + // of times those specific property/values appear. + GroupByResults []*GroupResult `protobuf:"bytes,1,rep,name=group_by_results,json=groupByResults,proto3" json:"group_by_results,omitempty"` + // Time used for executing the groupBy request. + ReadTime *timestamp.Timestamp `protobuf:"bytes,2,opt,name=read_time,json=readTime,proto3" json:"read_time,omitempty"` + // Token to retrieve the next page of results, or empty if there are no more + // results. + NextPageToken string `protobuf:"bytes,3,opt,name=next_page_token,json=nextPageToken,proto3" json:"next_page_token,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GroupFindingsResponse) Reset() { *m = GroupFindingsResponse{} } +func (m *GroupFindingsResponse) String() string { return proto.CompactTextString(m) } +func (*GroupFindingsResponse) ProtoMessage() {} +func (*GroupFindingsResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_securitycenter_service_32527bc15746ec09, []int{7} +} +func (m *GroupFindingsResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GroupFindingsResponse.Unmarshal(m, b) +} +func (m *GroupFindingsResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GroupFindingsResponse.Marshal(b, m, deterministic) +} +func (dst *GroupFindingsResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_GroupFindingsResponse.Merge(dst, src) +} +func (m *GroupFindingsResponse) XXX_Size() int { + return xxx_messageInfo_GroupFindingsResponse.Size(m) +} +func (m *GroupFindingsResponse) XXX_DiscardUnknown() { + xxx_messageInfo_GroupFindingsResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_GroupFindingsResponse proto.InternalMessageInfo + +func (m *GroupFindingsResponse) GetGroupByResults() []*GroupResult { + if m != nil { + return m.GroupByResults + } + return nil +} + +func (m *GroupFindingsResponse) GetReadTime() *timestamp.Timestamp { + if m != nil { + return m.ReadTime + } + return nil +} + +func (m *GroupFindingsResponse) GetNextPageToken() string { + if m != nil { + return m.NextPageToken + } + return "" +} + +// Result containing the properties and count of a groupBy request. +type GroupResult struct { + // Properties matching the groupBy fields in the request. + Properties map[string]*_struct.Value `protobuf:"bytes,1,rep,name=properties,proto3" json:"properties,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` + // Total count of resources for the given properties. + Count int64 `protobuf:"varint,2,opt,name=count,proto3" json:"count,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GroupResult) Reset() { *m = GroupResult{} } +func (m *GroupResult) String() string { return proto.CompactTextString(m) } +func (*GroupResult) ProtoMessage() {} +func (*GroupResult) Descriptor() ([]byte, []int) { + return fileDescriptor_securitycenter_service_32527bc15746ec09, []int{8} +} +func (m *GroupResult) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GroupResult.Unmarshal(m, b) +} +func (m *GroupResult) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GroupResult.Marshal(b, m, deterministic) +} +func (dst *GroupResult) XXX_Merge(src proto.Message) { + xxx_messageInfo_GroupResult.Merge(dst, src) +} +func (m *GroupResult) XXX_Size() int { + return xxx_messageInfo_GroupResult.Size(m) +} +func (m *GroupResult) XXX_DiscardUnknown() { + xxx_messageInfo_GroupResult.DiscardUnknown(m) +} + +var xxx_messageInfo_GroupResult proto.InternalMessageInfo + +func (m *GroupResult) GetProperties() map[string]*_struct.Value { + if m != nil { + return m.Properties + } + return nil +} + +func (m *GroupResult) GetCount() int64 { + if m != nil { + return m.Count + } + return 0 +} + +// Request message for listing sources. +type ListSourcesRequest struct { + // Resource name of the parent of sources to list. Its format should be + // "organizations/[organization_id]". + Parent string `protobuf:"bytes,1,opt,name=parent,proto3" json:"parent,omitempty"` + // The value returned by the last `ListSourcesResponse`; indicates + // that this is a continuation of a prior `ListSources` call, and + // that the system should return the next page of data. + PageToken string `protobuf:"bytes,2,opt,name=page_token,json=pageToken,proto3" json:"page_token,omitempty"` + // The maximum number of results to return in a single response. Default is + // 10, minimum is 1, maximum is 1000. + PageSize int32 `protobuf:"varint,7,opt,name=page_size,json=pageSize,proto3" json:"page_size,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ListSourcesRequest) Reset() { *m = ListSourcesRequest{} } +func (m *ListSourcesRequest) String() string { return proto.CompactTextString(m) } +func (*ListSourcesRequest) ProtoMessage() {} +func (*ListSourcesRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_securitycenter_service_32527bc15746ec09, []int{9} +} +func (m *ListSourcesRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ListSourcesRequest.Unmarshal(m, b) +} +func (m *ListSourcesRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ListSourcesRequest.Marshal(b, m, deterministic) +} +func (dst *ListSourcesRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_ListSourcesRequest.Merge(dst, src) +} +func (m *ListSourcesRequest) XXX_Size() int { + return xxx_messageInfo_ListSourcesRequest.Size(m) +} +func (m *ListSourcesRequest) XXX_DiscardUnknown() { + xxx_messageInfo_ListSourcesRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_ListSourcesRequest proto.InternalMessageInfo + +func (m *ListSourcesRequest) GetParent() string { + if m != nil { + return m.Parent + } + return "" +} + +func (m *ListSourcesRequest) GetPageToken() string { + if m != nil { + return m.PageToken + } + return "" +} + +func (m *ListSourcesRequest) GetPageSize() int32 { + if m != nil { + return m.PageSize + } + return 0 +} + +// Response message for listing sources. +type ListSourcesResponse struct { + // Sources belonging to the requested parent. + Sources []*Source `protobuf:"bytes,1,rep,name=sources,proto3" json:"sources,omitempty"` + // Token to retrieve the next page of results, or empty if there are no more + // results. + NextPageToken string `protobuf:"bytes,2,opt,name=next_page_token,json=nextPageToken,proto3" json:"next_page_token,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ListSourcesResponse) Reset() { *m = ListSourcesResponse{} } +func (m *ListSourcesResponse) String() string { return proto.CompactTextString(m) } +func (*ListSourcesResponse) ProtoMessage() {} +func (*ListSourcesResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_securitycenter_service_32527bc15746ec09, []int{10} +} +func (m *ListSourcesResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ListSourcesResponse.Unmarshal(m, b) +} +func (m *ListSourcesResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ListSourcesResponse.Marshal(b, m, deterministic) +} +func (dst *ListSourcesResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_ListSourcesResponse.Merge(dst, src) +} +func (m *ListSourcesResponse) XXX_Size() int { + return xxx_messageInfo_ListSourcesResponse.Size(m) +} +func (m *ListSourcesResponse) XXX_DiscardUnknown() { + xxx_messageInfo_ListSourcesResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_ListSourcesResponse proto.InternalMessageInfo + +func (m *ListSourcesResponse) GetSources() []*Source { + if m != nil { + return m.Sources + } + return nil +} + +func (m *ListSourcesResponse) GetNextPageToken() string { + if m != nil { + return m.NextPageToken + } + return "" +} + +// Request message for listing assets. +type ListAssetsRequest struct { + // Name of the organization assets should belong to. Its format is + // "organizations/[organization_id]". + Parent string `protobuf:"bytes,1,opt,name=parent,proto3" json:"parent,omitempty"` + // Expression that defines the filter to apply across assets. + // The expression is a list of zero or more restrictions combined via logical + // operators `AND` and `OR`. + // Parentheses are not supported, and `OR` has higher precedence than `AND`. + // + // Restrictions have the form ` ` and may have a `-` + // character in front of them to indicate negation. The fields map to those + // defined in the Asset resource. Examples include: + // + // * name + // * security_center_properties.resource_name + // * resource_properties.a_property + // * security_marks.marks.marka + // + // The supported operators are: + // + // * `=` for all value types. + // * `>`, `<`, `>=`, `<=` for integer values. + // * `:`, meaning substring matching, for strings. + // + // The supported value types are: + // + // * string literals in quotes. + // * integer literals without quotes. + // * boolean literals `true` and `false` without quotes. + // + // For example, `resource_properties.size = 100` is a valid filter string. + Filter string `protobuf:"bytes,2,opt,name=filter,proto3" json:"filter,omitempty"` + // Expression that defines what fields and order to use for sorting. The + // string value should follow SQL syntax: comma separated list of fields. For + // example: "name,resource_properties.a_property". The default sorting order + // is ascending. To specify descending order for a field, a suffix " desc" + // should be appended to the field name. For example: "name + // desc,resource_properties.a_property". Redundant space characters in the + // syntax are insignificant. "name desc,resource_properties.a_property" and " + // name desc , resource_properties.a_property " are equivalent. + OrderBy string `protobuf:"bytes,3,opt,name=order_by,json=orderBy,proto3" json:"order_by,omitempty"` + // Time used as a reference point when filtering assets. The filter is limited + // to assets existing at the supplied time and their values are those at that + // specific time. Absence of this field will default to the API's version of + // NOW. + ReadTime *timestamp.Timestamp `protobuf:"bytes,4,opt,name=read_time,json=readTime,proto3" json:"read_time,omitempty"` + // When compare_duration is set, the ListAssetResult's "state" attribute is + // updated to indicate whether the asset was added, removed, or remained + // present during the compare_duration period of time that precedes the + // read_time. This is the time between (read_time - + // compare_duration) and read_time. + // + // The state value is derived based on the presence of the asset at the two + // points in time. Intermediate state changes between the two times don't + // affect the result. For example, the results aren't affected if the asset is + // removed and re-created again. + // + // Possible "state" values when compare_duration is specified: + // + // * "ADDED": indicates that the asset was not present before + // compare_duration, but present at read_time. + // * "REMOVED": indicates that the asset was present at the start of + // compare_duration, but not present at read_time. + // * "ACTIVE": indicates that the asset was present at both the + // start and the end of the time period defined by + // compare_duration and read_time. + // + // If compare_duration is not specified, then the only possible state is + // "UNUSED", which indicates that the asset is present at read_time. + CompareDuration *duration.Duration `protobuf:"bytes,5,opt,name=compare_duration,json=compareDuration,proto3" json:"compare_duration,omitempty"` + // Optional. + // + // A field mask to specify the ListAssetsResult fields to be listed in the + // response. + // An empty field mask will list all fields. + FieldMask *field_mask.FieldMask `protobuf:"bytes,7,opt,name=field_mask,json=fieldMask,proto3" json:"field_mask,omitempty"` + // The value returned by the last `ListAssetsResponse`; indicates + // that this is a continuation of a prior `ListAssets` call, and + // that the system should return the next page of data. + PageToken string `protobuf:"bytes,8,opt,name=page_token,json=pageToken,proto3" json:"page_token,omitempty"` + // The maximum number of results to return in a single response. Default is + // 10, minimum is 1, maximum is 1000. + PageSize int32 `protobuf:"varint,9,opt,name=page_size,json=pageSize,proto3" json:"page_size,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ListAssetsRequest) Reset() { *m = ListAssetsRequest{} } +func (m *ListAssetsRequest) String() string { return proto.CompactTextString(m) } +func (*ListAssetsRequest) ProtoMessage() {} +func (*ListAssetsRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_securitycenter_service_32527bc15746ec09, []int{11} +} +func (m *ListAssetsRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ListAssetsRequest.Unmarshal(m, b) +} +func (m *ListAssetsRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ListAssetsRequest.Marshal(b, m, deterministic) +} +func (dst *ListAssetsRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_ListAssetsRequest.Merge(dst, src) +} +func (m *ListAssetsRequest) XXX_Size() int { + return xxx_messageInfo_ListAssetsRequest.Size(m) +} +func (m *ListAssetsRequest) XXX_DiscardUnknown() { + xxx_messageInfo_ListAssetsRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_ListAssetsRequest proto.InternalMessageInfo + +func (m *ListAssetsRequest) GetParent() string { + if m != nil { + return m.Parent + } + return "" +} + +func (m *ListAssetsRequest) GetFilter() string { + if m != nil { + return m.Filter + } + return "" +} + +func (m *ListAssetsRequest) GetOrderBy() string { + if m != nil { + return m.OrderBy + } + return "" +} + +func (m *ListAssetsRequest) GetReadTime() *timestamp.Timestamp { + if m != nil { + return m.ReadTime + } + return nil +} + +func (m *ListAssetsRequest) GetCompareDuration() *duration.Duration { + if m != nil { + return m.CompareDuration + } + return nil +} + +func (m *ListAssetsRequest) GetFieldMask() *field_mask.FieldMask { + if m != nil { + return m.FieldMask + } + return nil +} + +func (m *ListAssetsRequest) GetPageToken() string { + if m != nil { + return m.PageToken + } + return "" +} + +func (m *ListAssetsRequest) GetPageSize() int32 { + if m != nil { + return m.PageSize + } + return 0 +} + +// Response message for listing assets. +type ListAssetsResponse struct { + // Assets matching the list request. + ListAssetsResults []*ListAssetsResponse_ListAssetsResult `protobuf:"bytes,1,rep,name=list_assets_results,json=listAssetsResults,proto3" json:"list_assets_results,omitempty"` + // Time used for executing the list request. + ReadTime *timestamp.Timestamp `protobuf:"bytes,2,opt,name=read_time,json=readTime,proto3" json:"read_time,omitempty"` + // Token to retrieve the next page of results, or empty if there are no more + // results. + NextPageToken string `protobuf:"bytes,3,opt,name=next_page_token,json=nextPageToken,proto3" json:"next_page_token,omitempty"` + // The total number of assets matching the query. + TotalSize int32 `protobuf:"varint,4,opt,name=total_size,json=totalSize,proto3" json:"total_size,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ListAssetsResponse) Reset() { *m = ListAssetsResponse{} } +func (m *ListAssetsResponse) String() string { return proto.CompactTextString(m) } +func (*ListAssetsResponse) ProtoMessage() {} +func (*ListAssetsResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_securitycenter_service_32527bc15746ec09, []int{12} +} +func (m *ListAssetsResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ListAssetsResponse.Unmarshal(m, b) +} +func (m *ListAssetsResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ListAssetsResponse.Marshal(b, m, deterministic) +} +func (dst *ListAssetsResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_ListAssetsResponse.Merge(dst, src) +} +func (m *ListAssetsResponse) XXX_Size() int { + return xxx_messageInfo_ListAssetsResponse.Size(m) +} +func (m *ListAssetsResponse) XXX_DiscardUnknown() { + xxx_messageInfo_ListAssetsResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_ListAssetsResponse proto.InternalMessageInfo + +func (m *ListAssetsResponse) GetListAssetsResults() []*ListAssetsResponse_ListAssetsResult { + if m != nil { + return m.ListAssetsResults + } + return nil +} + +func (m *ListAssetsResponse) GetReadTime() *timestamp.Timestamp { + if m != nil { + return m.ReadTime + } + return nil +} + +func (m *ListAssetsResponse) GetNextPageToken() string { + if m != nil { + return m.NextPageToken + } + return "" +} + +func (m *ListAssetsResponse) GetTotalSize() int32 { + if m != nil { + return m.TotalSize + } + return 0 +} + +// Result containing the Asset and its State. +type ListAssetsResponse_ListAssetsResult struct { + // Asset matching the search request. + Asset *Asset `protobuf:"bytes,1,opt,name=asset,proto3" json:"asset,omitempty"` + // State of the asset. + State ListAssetsResponse_ListAssetsResult_State `protobuf:"varint,2,opt,name=state,proto3,enum=google.cloud.securitycenter.v1beta1.ListAssetsResponse_ListAssetsResult_State" json:"state,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ListAssetsResponse_ListAssetsResult) Reset() { *m = ListAssetsResponse_ListAssetsResult{} } +func (m *ListAssetsResponse_ListAssetsResult) String() string { return proto.CompactTextString(m) } +func (*ListAssetsResponse_ListAssetsResult) ProtoMessage() {} +func (*ListAssetsResponse_ListAssetsResult) Descriptor() ([]byte, []int) { + return fileDescriptor_securitycenter_service_32527bc15746ec09, []int{12, 0} +} +func (m *ListAssetsResponse_ListAssetsResult) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ListAssetsResponse_ListAssetsResult.Unmarshal(m, b) +} +func (m *ListAssetsResponse_ListAssetsResult) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ListAssetsResponse_ListAssetsResult.Marshal(b, m, deterministic) +} +func (dst *ListAssetsResponse_ListAssetsResult) XXX_Merge(src proto.Message) { + xxx_messageInfo_ListAssetsResponse_ListAssetsResult.Merge(dst, src) +} +func (m *ListAssetsResponse_ListAssetsResult) XXX_Size() int { + return xxx_messageInfo_ListAssetsResponse_ListAssetsResult.Size(m) +} +func (m *ListAssetsResponse_ListAssetsResult) XXX_DiscardUnknown() { + xxx_messageInfo_ListAssetsResponse_ListAssetsResult.DiscardUnknown(m) +} + +var xxx_messageInfo_ListAssetsResponse_ListAssetsResult proto.InternalMessageInfo + +func (m *ListAssetsResponse_ListAssetsResult) GetAsset() *Asset { + if m != nil { + return m.Asset + } + return nil +} + +func (m *ListAssetsResponse_ListAssetsResult) GetState() ListAssetsResponse_ListAssetsResult_State { + if m != nil { + return m.State + } + return ListAssetsResponse_ListAssetsResult_STATE_UNSPECIFIED +} + +// Request message for listing findings. +type ListFindingsRequest struct { + // Name of the source the findings belong to. Its format is + // "organizations/[organization_id]/sources/[source_id]". To list across all + // sources provide a source_id of `-`. For example: + // organizations/123/sources/- + Parent string `protobuf:"bytes,1,opt,name=parent,proto3" json:"parent,omitempty"` + // Expression that defines the filter to apply across findings. + // The expression is a list of one or more restrictions combined via logical + // operators `AND` and `OR`. + // Parentheses are not supported, and `OR` has higher precedence than `AND`. + // + // Restrictions have the form ` ` and may have a `-` + // character in front of them to indicate negation. Examples include: + // + // * name + // * source_properties.a_property + // * security_marks.marks.marka + // + // The supported operators are: + // + // * `=` for all value types. + // * `>`, `<`, `>=`, `<=` for integer values. + // * `:`, meaning substring matching, for strings. + // + // The supported value types are: + // + // * string literals in quotes. + // * integer literals without quotes. + // * boolean literals `true` and `false` without quotes. + // + // For example, `source_properties.size = 100` is a valid filter string. + Filter string `protobuf:"bytes,2,opt,name=filter,proto3" json:"filter,omitempty"` + // Expression that defines what fields and order to use for sorting. The + // string value should follow SQL syntax: comma separated list of fields. For + // example: "name,resource_properties.a_property". The default sorting order + // is ascending. To specify descending order for a field, a suffix " desc" + // should be appended to the field name. For example: "name + // desc,source_properties.a_property". Redundant space characters in the + // syntax are insignificant. "name desc,source_properties.a_property" and " + // name desc , source_properties.a_property " are equivalent. + OrderBy string `protobuf:"bytes,3,opt,name=order_by,json=orderBy,proto3" json:"order_by,omitempty"` + // Time used as a reference point when filtering findings. The filter is + // limited to findings existing at the supplied time and their values are + // those at that specific time. Absence of this field will default to the + // API's version of NOW. + ReadTime *timestamp.Timestamp `protobuf:"bytes,4,opt,name=read_time,json=readTime,proto3" json:"read_time,omitempty"` + // Optional. + // + // A field mask to specify the Finding fields to be listed in the response. + // An empty field mask will list all fields. + FieldMask *field_mask.FieldMask `protobuf:"bytes,5,opt,name=field_mask,json=fieldMask,proto3" json:"field_mask,omitempty"` + // The value returned by the last `ListFindingsResponse`; indicates + // that this is a continuation of a prior `ListFindings` call, and + // that the system should return the next page of data. + PageToken string `protobuf:"bytes,6,opt,name=page_token,json=pageToken,proto3" json:"page_token,omitempty"` + // The maximum number of results to return in a single response. Default is + // 10, minimum is 1, maximum is 1000. + PageSize int32 `protobuf:"varint,7,opt,name=page_size,json=pageSize,proto3" json:"page_size,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ListFindingsRequest) Reset() { *m = ListFindingsRequest{} } +func (m *ListFindingsRequest) String() string { return proto.CompactTextString(m) } +func (*ListFindingsRequest) ProtoMessage() {} +func (*ListFindingsRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_securitycenter_service_32527bc15746ec09, []int{13} +} +func (m *ListFindingsRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ListFindingsRequest.Unmarshal(m, b) +} +func (m *ListFindingsRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ListFindingsRequest.Marshal(b, m, deterministic) +} +func (dst *ListFindingsRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_ListFindingsRequest.Merge(dst, src) +} +func (m *ListFindingsRequest) XXX_Size() int { + return xxx_messageInfo_ListFindingsRequest.Size(m) +} +func (m *ListFindingsRequest) XXX_DiscardUnknown() { + xxx_messageInfo_ListFindingsRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_ListFindingsRequest proto.InternalMessageInfo + +func (m *ListFindingsRequest) GetParent() string { + if m != nil { + return m.Parent + } + return "" +} + +func (m *ListFindingsRequest) GetFilter() string { + if m != nil { + return m.Filter + } + return "" +} + +func (m *ListFindingsRequest) GetOrderBy() string { + if m != nil { + return m.OrderBy + } + return "" +} + +func (m *ListFindingsRequest) GetReadTime() *timestamp.Timestamp { + if m != nil { + return m.ReadTime + } + return nil +} + +func (m *ListFindingsRequest) GetFieldMask() *field_mask.FieldMask { + if m != nil { + return m.FieldMask + } + return nil +} + +func (m *ListFindingsRequest) GetPageToken() string { + if m != nil { + return m.PageToken + } + return "" +} + +func (m *ListFindingsRequest) GetPageSize() int32 { + if m != nil { + return m.PageSize + } + return 0 +} + +// Response message for listing findings. +type ListFindingsResponse struct { + // Findings matching the list request. + Findings []*Finding `protobuf:"bytes,1,rep,name=findings,proto3" json:"findings,omitempty"` + // Time used for executing the list request. + ReadTime *timestamp.Timestamp `protobuf:"bytes,2,opt,name=read_time,json=readTime,proto3" json:"read_time,omitempty"` + // Token to retrieve the next page of results, or empty if there are no more + // results. + NextPageToken string `protobuf:"bytes,3,opt,name=next_page_token,json=nextPageToken,proto3" json:"next_page_token,omitempty"` + // The total number of findings matching the query. + TotalSize int32 `protobuf:"varint,4,opt,name=total_size,json=totalSize,proto3" json:"total_size,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ListFindingsResponse) Reset() { *m = ListFindingsResponse{} } +func (m *ListFindingsResponse) String() string { return proto.CompactTextString(m) } +func (*ListFindingsResponse) ProtoMessage() {} +func (*ListFindingsResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_securitycenter_service_32527bc15746ec09, []int{14} +} +func (m *ListFindingsResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ListFindingsResponse.Unmarshal(m, b) +} +func (m *ListFindingsResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ListFindingsResponse.Marshal(b, m, deterministic) +} +func (dst *ListFindingsResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_ListFindingsResponse.Merge(dst, src) +} +func (m *ListFindingsResponse) XXX_Size() int { + return xxx_messageInfo_ListFindingsResponse.Size(m) +} +func (m *ListFindingsResponse) XXX_DiscardUnknown() { + xxx_messageInfo_ListFindingsResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_ListFindingsResponse proto.InternalMessageInfo + +func (m *ListFindingsResponse) GetFindings() []*Finding { + if m != nil { + return m.Findings + } + return nil +} + +func (m *ListFindingsResponse) GetReadTime() *timestamp.Timestamp { + if m != nil { + return m.ReadTime + } + return nil +} + +func (m *ListFindingsResponse) GetNextPageToken() string { + if m != nil { + return m.NextPageToken + } + return "" +} + +func (m *ListFindingsResponse) GetTotalSize() int32 { + if m != nil { + return m.TotalSize + } + return 0 +} + +// Request message for updating a finding's state. +type SetFindingStateRequest struct { + // The relative resource name of the finding. See: + // https://cloud.google.com/apis/design/resource_names#relative_resource_name + // Example: + // "organizations/123/sources/456/finding/789". + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // The desired State of the finding. + State Finding_State `protobuf:"varint,2,opt,name=state,proto3,enum=google.cloud.securitycenter.v1beta1.Finding_State" json:"state,omitempty"` + // The time at which the updated state takes effect. + StartTime *timestamp.Timestamp `protobuf:"bytes,3,opt,name=start_time,json=startTime,proto3" json:"start_time,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *SetFindingStateRequest) Reset() { *m = SetFindingStateRequest{} } +func (m *SetFindingStateRequest) String() string { return proto.CompactTextString(m) } +func (*SetFindingStateRequest) ProtoMessage() {} +func (*SetFindingStateRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_securitycenter_service_32527bc15746ec09, []int{15} +} +func (m *SetFindingStateRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_SetFindingStateRequest.Unmarshal(m, b) +} +func (m *SetFindingStateRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_SetFindingStateRequest.Marshal(b, m, deterministic) +} +func (dst *SetFindingStateRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_SetFindingStateRequest.Merge(dst, src) +} +func (m *SetFindingStateRequest) XXX_Size() int { + return xxx_messageInfo_SetFindingStateRequest.Size(m) +} +func (m *SetFindingStateRequest) XXX_DiscardUnknown() { + xxx_messageInfo_SetFindingStateRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_SetFindingStateRequest proto.InternalMessageInfo + +func (m *SetFindingStateRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *SetFindingStateRequest) GetState() Finding_State { + if m != nil { + return m.State + } + return Finding_STATE_UNSPECIFIED +} + +func (m *SetFindingStateRequest) GetStartTime() *timestamp.Timestamp { + if m != nil { + return m.StartTime + } + return nil +} + +// Request message for running asset discovery for an organization. +type RunAssetDiscoveryRequest struct { + // Name of the organization to run asset discovery for. Its format is + // "organizations/[organization_id]". + Parent string `protobuf:"bytes,1,opt,name=parent,proto3" json:"parent,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *RunAssetDiscoveryRequest) Reset() { *m = RunAssetDiscoveryRequest{} } +func (m *RunAssetDiscoveryRequest) String() string { return proto.CompactTextString(m) } +func (*RunAssetDiscoveryRequest) ProtoMessage() {} +func (*RunAssetDiscoveryRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_securitycenter_service_32527bc15746ec09, []int{16} +} +func (m *RunAssetDiscoveryRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_RunAssetDiscoveryRequest.Unmarshal(m, b) +} +func (m *RunAssetDiscoveryRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_RunAssetDiscoveryRequest.Marshal(b, m, deterministic) +} +func (dst *RunAssetDiscoveryRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_RunAssetDiscoveryRequest.Merge(dst, src) +} +func (m *RunAssetDiscoveryRequest) XXX_Size() int { + return xxx_messageInfo_RunAssetDiscoveryRequest.Size(m) +} +func (m *RunAssetDiscoveryRequest) XXX_DiscardUnknown() { + xxx_messageInfo_RunAssetDiscoveryRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_RunAssetDiscoveryRequest proto.InternalMessageInfo + +func (m *RunAssetDiscoveryRequest) GetParent() string { + if m != nil { + return m.Parent + } + return "" +} + +// Request message for updating or creating a finding. +type UpdateFindingRequest struct { + // The finding resource to update or create if it does not already exist. + // parent, security_marks, and update_time will be ignored. + // + // In the case of creation, the finding id portion of the name must + // alphanumeric and less than or equal to 32 characters and greater than 0 + // characters in length. + Finding *Finding `protobuf:"bytes,1,opt,name=finding,proto3" json:"finding,omitempty"` + // The FieldMask to use when updating the finding resource. This field should + // not be specified when creating a finding. + UpdateMask *field_mask.FieldMask `protobuf:"bytes,2,opt,name=update_mask,json=updateMask,proto3" json:"update_mask,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *UpdateFindingRequest) Reset() { *m = UpdateFindingRequest{} } +func (m *UpdateFindingRequest) String() string { return proto.CompactTextString(m) } +func (*UpdateFindingRequest) ProtoMessage() {} +func (*UpdateFindingRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_securitycenter_service_32527bc15746ec09, []int{17} +} +func (m *UpdateFindingRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_UpdateFindingRequest.Unmarshal(m, b) +} +func (m *UpdateFindingRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_UpdateFindingRequest.Marshal(b, m, deterministic) +} +func (dst *UpdateFindingRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_UpdateFindingRequest.Merge(dst, src) +} +func (m *UpdateFindingRequest) XXX_Size() int { + return xxx_messageInfo_UpdateFindingRequest.Size(m) +} +func (m *UpdateFindingRequest) XXX_DiscardUnknown() { + xxx_messageInfo_UpdateFindingRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_UpdateFindingRequest proto.InternalMessageInfo + +func (m *UpdateFindingRequest) GetFinding() *Finding { + if m != nil { + return m.Finding + } + return nil +} + +func (m *UpdateFindingRequest) GetUpdateMask() *field_mask.FieldMask { + if m != nil { + return m.UpdateMask + } + return nil +} + +// Request message for updating an organization's settings. +type UpdateOrganizationSettingsRequest struct { + // The organization settings resource to update. + OrganizationSettings *OrganizationSettings `protobuf:"bytes,1,opt,name=organization_settings,json=organizationSettings,proto3" json:"organization_settings,omitempty"` + // The FieldMask to use when updating the settings resource. + UpdateMask *field_mask.FieldMask `protobuf:"bytes,2,opt,name=update_mask,json=updateMask,proto3" json:"update_mask,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *UpdateOrganizationSettingsRequest) Reset() { *m = UpdateOrganizationSettingsRequest{} } +func (m *UpdateOrganizationSettingsRequest) String() string { return proto.CompactTextString(m) } +func (*UpdateOrganizationSettingsRequest) ProtoMessage() {} +func (*UpdateOrganizationSettingsRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_securitycenter_service_32527bc15746ec09, []int{18} +} +func (m *UpdateOrganizationSettingsRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_UpdateOrganizationSettingsRequest.Unmarshal(m, b) +} +func (m *UpdateOrganizationSettingsRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_UpdateOrganizationSettingsRequest.Marshal(b, m, deterministic) +} +func (dst *UpdateOrganizationSettingsRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_UpdateOrganizationSettingsRequest.Merge(dst, src) +} +func (m *UpdateOrganizationSettingsRequest) XXX_Size() int { + return xxx_messageInfo_UpdateOrganizationSettingsRequest.Size(m) +} +func (m *UpdateOrganizationSettingsRequest) XXX_DiscardUnknown() { + xxx_messageInfo_UpdateOrganizationSettingsRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_UpdateOrganizationSettingsRequest proto.InternalMessageInfo + +func (m *UpdateOrganizationSettingsRequest) GetOrganizationSettings() *OrganizationSettings { + if m != nil { + return m.OrganizationSettings + } + return nil +} + +func (m *UpdateOrganizationSettingsRequest) GetUpdateMask() *field_mask.FieldMask { + if m != nil { + return m.UpdateMask + } + return nil +} + +// Request message for updating a source. +type UpdateSourceRequest struct { + // The source resource to update. + Source *Source `protobuf:"bytes,1,opt,name=source,proto3" json:"source,omitempty"` + // The FieldMask to use when updating the source resource. + UpdateMask *field_mask.FieldMask `protobuf:"bytes,2,opt,name=update_mask,json=updateMask,proto3" json:"update_mask,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *UpdateSourceRequest) Reset() { *m = UpdateSourceRequest{} } +func (m *UpdateSourceRequest) String() string { return proto.CompactTextString(m) } +func (*UpdateSourceRequest) ProtoMessage() {} +func (*UpdateSourceRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_securitycenter_service_32527bc15746ec09, []int{19} +} +func (m *UpdateSourceRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_UpdateSourceRequest.Unmarshal(m, b) +} +func (m *UpdateSourceRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_UpdateSourceRequest.Marshal(b, m, deterministic) +} +func (dst *UpdateSourceRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_UpdateSourceRequest.Merge(dst, src) +} +func (m *UpdateSourceRequest) XXX_Size() int { + return xxx_messageInfo_UpdateSourceRequest.Size(m) +} +func (m *UpdateSourceRequest) XXX_DiscardUnknown() { + xxx_messageInfo_UpdateSourceRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_UpdateSourceRequest proto.InternalMessageInfo + +func (m *UpdateSourceRequest) GetSource() *Source { + if m != nil { + return m.Source + } + return nil +} + +func (m *UpdateSourceRequest) GetUpdateMask() *field_mask.FieldMask { + if m != nil { + return m.UpdateMask + } + return nil +} + +// Request message for updating a SecurityMarks resource. +type UpdateSecurityMarksRequest struct { + // The security marks resource to update. + SecurityMarks *SecurityMarks `protobuf:"bytes,1,opt,name=security_marks,json=securityMarks,proto3" json:"security_marks,omitempty"` + // The FieldMask to use when updating the security marks resource. + UpdateMask *field_mask.FieldMask `protobuf:"bytes,2,opt,name=update_mask,json=updateMask,proto3" json:"update_mask,omitempty"` + // The time at which the updated SecurityMarks take effect. + StartTime *timestamp.Timestamp `protobuf:"bytes,3,opt,name=start_time,json=startTime,proto3" json:"start_time,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *UpdateSecurityMarksRequest) Reset() { *m = UpdateSecurityMarksRequest{} } +func (m *UpdateSecurityMarksRequest) String() string { return proto.CompactTextString(m) } +func (*UpdateSecurityMarksRequest) ProtoMessage() {} +func (*UpdateSecurityMarksRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_securitycenter_service_32527bc15746ec09, []int{20} +} +func (m *UpdateSecurityMarksRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_UpdateSecurityMarksRequest.Unmarshal(m, b) +} +func (m *UpdateSecurityMarksRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_UpdateSecurityMarksRequest.Marshal(b, m, deterministic) +} +func (dst *UpdateSecurityMarksRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_UpdateSecurityMarksRequest.Merge(dst, src) +} +func (m *UpdateSecurityMarksRequest) XXX_Size() int { + return xxx_messageInfo_UpdateSecurityMarksRequest.Size(m) +} +func (m *UpdateSecurityMarksRequest) XXX_DiscardUnknown() { + xxx_messageInfo_UpdateSecurityMarksRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_UpdateSecurityMarksRequest proto.InternalMessageInfo + +func (m *UpdateSecurityMarksRequest) GetSecurityMarks() *SecurityMarks { + if m != nil { + return m.SecurityMarks + } + return nil +} + +func (m *UpdateSecurityMarksRequest) GetUpdateMask() *field_mask.FieldMask { + if m != nil { + return m.UpdateMask + } + return nil +} + +func (m *UpdateSecurityMarksRequest) GetStartTime() *timestamp.Timestamp { + if m != nil { + return m.StartTime + } + return nil +} + +func init() { + proto.RegisterType((*CreateFindingRequest)(nil), "google.cloud.securitycenter.v1beta1.CreateFindingRequest") + proto.RegisterType((*CreateSourceRequest)(nil), "google.cloud.securitycenter.v1beta1.CreateSourceRequest") + proto.RegisterType((*GetOrganizationSettingsRequest)(nil), "google.cloud.securitycenter.v1beta1.GetOrganizationSettingsRequest") + proto.RegisterType((*GetSourceRequest)(nil), "google.cloud.securitycenter.v1beta1.GetSourceRequest") + proto.RegisterType((*GroupAssetsRequest)(nil), "google.cloud.securitycenter.v1beta1.GroupAssetsRequest") + proto.RegisterType((*GroupAssetsResponse)(nil), "google.cloud.securitycenter.v1beta1.GroupAssetsResponse") + proto.RegisterType((*GroupFindingsRequest)(nil), "google.cloud.securitycenter.v1beta1.GroupFindingsRequest") + proto.RegisterType((*GroupFindingsResponse)(nil), "google.cloud.securitycenter.v1beta1.GroupFindingsResponse") + proto.RegisterType((*GroupResult)(nil), "google.cloud.securitycenter.v1beta1.GroupResult") + proto.RegisterMapType((map[string]*_struct.Value)(nil), "google.cloud.securitycenter.v1beta1.GroupResult.PropertiesEntry") + proto.RegisterType((*ListSourcesRequest)(nil), "google.cloud.securitycenter.v1beta1.ListSourcesRequest") + proto.RegisterType((*ListSourcesResponse)(nil), "google.cloud.securitycenter.v1beta1.ListSourcesResponse") + proto.RegisterType((*ListAssetsRequest)(nil), "google.cloud.securitycenter.v1beta1.ListAssetsRequest") + proto.RegisterType((*ListAssetsResponse)(nil), "google.cloud.securitycenter.v1beta1.ListAssetsResponse") + proto.RegisterType((*ListAssetsResponse_ListAssetsResult)(nil), "google.cloud.securitycenter.v1beta1.ListAssetsResponse.ListAssetsResult") + proto.RegisterType((*ListFindingsRequest)(nil), "google.cloud.securitycenter.v1beta1.ListFindingsRequest") + proto.RegisterType((*ListFindingsResponse)(nil), "google.cloud.securitycenter.v1beta1.ListFindingsResponse") + proto.RegisterType((*SetFindingStateRequest)(nil), "google.cloud.securitycenter.v1beta1.SetFindingStateRequest") + proto.RegisterType((*RunAssetDiscoveryRequest)(nil), "google.cloud.securitycenter.v1beta1.RunAssetDiscoveryRequest") + proto.RegisterType((*UpdateFindingRequest)(nil), "google.cloud.securitycenter.v1beta1.UpdateFindingRequest") + proto.RegisterType((*UpdateOrganizationSettingsRequest)(nil), "google.cloud.securitycenter.v1beta1.UpdateOrganizationSettingsRequest") + proto.RegisterType((*UpdateSourceRequest)(nil), "google.cloud.securitycenter.v1beta1.UpdateSourceRequest") + proto.RegisterType((*UpdateSecurityMarksRequest)(nil), "google.cloud.securitycenter.v1beta1.UpdateSecurityMarksRequest") + proto.RegisterEnum("google.cloud.securitycenter.v1beta1.ListAssetsResponse_ListAssetsResult_State", ListAssetsResponse_ListAssetsResult_State_name, ListAssetsResponse_ListAssetsResult_State_value) +} + +// Reference imports to suppress errors if they are not otherwise used. +var _ context.Context +var _ grpc.ClientConn + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the grpc package it is being compiled against. +const _ = grpc.SupportPackageIsVersion4 + +// SecurityCenterClient is the client API for SecurityCenter service. +// +// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://godoc.org/google.golang.org/grpc#ClientConn.NewStream. +type SecurityCenterClient interface { + // Creates a source. + CreateSource(ctx context.Context, in *CreateSourceRequest, opts ...grpc.CallOption) (*Source, error) + // Creates a finding. The corresponding source must exist for finding creation + // to succeed. + CreateFinding(ctx context.Context, in *CreateFindingRequest, opts ...grpc.CallOption) (*Finding, error) + // Gets the access control policy on the specified Source. + GetIamPolicy(ctx context.Context, in *v1.GetIamPolicyRequest, opts ...grpc.CallOption) (*v1.Policy, error) + // Gets the settings for an organization. + GetOrganizationSettings(ctx context.Context, in *GetOrganizationSettingsRequest, opts ...grpc.CallOption) (*OrganizationSettings, error) + // Gets a source. + GetSource(ctx context.Context, in *GetSourceRequest, opts ...grpc.CallOption) (*Source, error) + // Filters an organization's assets and groups them by their specified + // properties. + GroupAssets(ctx context.Context, in *GroupAssetsRequest, opts ...grpc.CallOption) (*GroupAssetsResponse, error) + // Filters an organization or source's findings and groups them by their + // specified properties. + // + // To group across all sources provide a `-` as the source id. + // Example: /v1beta1/organizations/123/sources/-/findings + GroupFindings(ctx context.Context, in *GroupFindingsRequest, opts ...grpc.CallOption) (*GroupFindingsResponse, error) + // Lists an organization's assets. + ListAssets(ctx context.Context, in *ListAssetsRequest, opts ...grpc.CallOption) (*ListAssetsResponse, error) + // Lists an organization or source's findings. + // + // To list across all sources provide a `-` as the source id. + // Example: /v1beta1/organizations/123/sources/-/findings + ListFindings(ctx context.Context, in *ListFindingsRequest, opts ...grpc.CallOption) (*ListFindingsResponse, error) + // Lists all sources belonging to an organization. + ListSources(ctx context.Context, in *ListSourcesRequest, opts ...grpc.CallOption) (*ListSourcesResponse, error) + // Runs asset discovery. The discovery is tracked with a long-running + // operation. + // + // This API can only be called with limited frequency for an organization. If + // it is called too frequently the caller will receive a TOO_MANY_REQUESTS + // error. + RunAssetDiscovery(ctx context.Context, in *RunAssetDiscoveryRequest, opts ...grpc.CallOption) (*longrunning.Operation, error) + // Updates the state of a finding. + SetFindingState(ctx context.Context, in *SetFindingStateRequest, opts ...grpc.CallOption) (*Finding, error) + // Sets the access control policy on the specified Source. + SetIamPolicy(ctx context.Context, in *v1.SetIamPolicyRequest, opts ...grpc.CallOption) (*v1.Policy, error) + // Returns the permissions that a caller has on the specified source. + TestIamPermissions(ctx context.Context, in *v1.TestIamPermissionsRequest, opts ...grpc.CallOption) (*v1.TestIamPermissionsResponse, error) + // Creates or updates a finding. The corresponding source must exist for a + // finding creation to succeed. + UpdateFinding(ctx context.Context, in *UpdateFindingRequest, opts ...grpc.CallOption) (*Finding, error) + // Updates an organization's settings. + UpdateOrganizationSettings(ctx context.Context, in *UpdateOrganizationSettingsRequest, opts ...grpc.CallOption) (*OrganizationSettings, error) + // Updates a source. + UpdateSource(ctx context.Context, in *UpdateSourceRequest, opts ...grpc.CallOption) (*Source, error) + // Updates security marks. + UpdateSecurityMarks(ctx context.Context, in *UpdateSecurityMarksRequest, opts ...grpc.CallOption) (*SecurityMarks, error) +} + +type securityCenterClient struct { + cc *grpc.ClientConn +} + +func NewSecurityCenterClient(cc *grpc.ClientConn) SecurityCenterClient { + return &securityCenterClient{cc} +} + +func (c *securityCenterClient) CreateSource(ctx context.Context, in *CreateSourceRequest, opts ...grpc.CallOption) (*Source, error) { + out := new(Source) + err := c.cc.Invoke(ctx, "/google.cloud.securitycenter.v1beta1.SecurityCenter/CreateSource", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *securityCenterClient) CreateFinding(ctx context.Context, in *CreateFindingRequest, opts ...grpc.CallOption) (*Finding, error) { + out := new(Finding) + err := c.cc.Invoke(ctx, "/google.cloud.securitycenter.v1beta1.SecurityCenter/CreateFinding", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *securityCenterClient) GetIamPolicy(ctx context.Context, in *v1.GetIamPolicyRequest, opts ...grpc.CallOption) (*v1.Policy, error) { + out := new(v1.Policy) + err := c.cc.Invoke(ctx, "/google.cloud.securitycenter.v1beta1.SecurityCenter/GetIamPolicy", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *securityCenterClient) GetOrganizationSettings(ctx context.Context, in *GetOrganizationSettingsRequest, opts ...grpc.CallOption) (*OrganizationSettings, error) { + out := new(OrganizationSettings) + err := c.cc.Invoke(ctx, "/google.cloud.securitycenter.v1beta1.SecurityCenter/GetOrganizationSettings", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *securityCenterClient) GetSource(ctx context.Context, in *GetSourceRequest, opts ...grpc.CallOption) (*Source, error) { + out := new(Source) + err := c.cc.Invoke(ctx, "/google.cloud.securitycenter.v1beta1.SecurityCenter/GetSource", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *securityCenterClient) GroupAssets(ctx context.Context, in *GroupAssetsRequest, opts ...grpc.CallOption) (*GroupAssetsResponse, error) { + out := new(GroupAssetsResponse) + err := c.cc.Invoke(ctx, "/google.cloud.securitycenter.v1beta1.SecurityCenter/GroupAssets", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *securityCenterClient) GroupFindings(ctx context.Context, in *GroupFindingsRequest, opts ...grpc.CallOption) (*GroupFindingsResponse, error) { + out := new(GroupFindingsResponse) + err := c.cc.Invoke(ctx, "/google.cloud.securitycenter.v1beta1.SecurityCenter/GroupFindings", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *securityCenterClient) ListAssets(ctx context.Context, in *ListAssetsRequest, opts ...grpc.CallOption) (*ListAssetsResponse, error) { + out := new(ListAssetsResponse) + err := c.cc.Invoke(ctx, "/google.cloud.securitycenter.v1beta1.SecurityCenter/ListAssets", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *securityCenterClient) ListFindings(ctx context.Context, in *ListFindingsRequest, opts ...grpc.CallOption) (*ListFindingsResponse, error) { + out := new(ListFindingsResponse) + err := c.cc.Invoke(ctx, "/google.cloud.securitycenter.v1beta1.SecurityCenter/ListFindings", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *securityCenterClient) ListSources(ctx context.Context, in *ListSourcesRequest, opts ...grpc.CallOption) (*ListSourcesResponse, error) { + out := new(ListSourcesResponse) + err := c.cc.Invoke(ctx, "/google.cloud.securitycenter.v1beta1.SecurityCenter/ListSources", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *securityCenterClient) RunAssetDiscovery(ctx context.Context, in *RunAssetDiscoveryRequest, opts ...grpc.CallOption) (*longrunning.Operation, error) { + out := new(longrunning.Operation) + err := c.cc.Invoke(ctx, "/google.cloud.securitycenter.v1beta1.SecurityCenter/RunAssetDiscovery", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *securityCenterClient) SetFindingState(ctx context.Context, in *SetFindingStateRequest, opts ...grpc.CallOption) (*Finding, error) { + out := new(Finding) + err := c.cc.Invoke(ctx, "/google.cloud.securitycenter.v1beta1.SecurityCenter/SetFindingState", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *securityCenterClient) SetIamPolicy(ctx context.Context, in *v1.SetIamPolicyRequest, opts ...grpc.CallOption) (*v1.Policy, error) { + out := new(v1.Policy) + err := c.cc.Invoke(ctx, "/google.cloud.securitycenter.v1beta1.SecurityCenter/SetIamPolicy", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *securityCenterClient) TestIamPermissions(ctx context.Context, in *v1.TestIamPermissionsRequest, opts ...grpc.CallOption) (*v1.TestIamPermissionsResponse, error) { + out := new(v1.TestIamPermissionsResponse) + err := c.cc.Invoke(ctx, "/google.cloud.securitycenter.v1beta1.SecurityCenter/TestIamPermissions", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *securityCenterClient) UpdateFinding(ctx context.Context, in *UpdateFindingRequest, opts ...grpc.CallOption) (*Finding, error) { + out := new(Finding) + err := c.cc.Invoke(ctx, "/google.cloud.securitycenter.v1beta1.SecurityCenter/UpdateFinding", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *securityCenterClient) UpdateOrganizationSettings(ctx context.Context, in *UpdateOrganizationSettingsRequest, opts ...grpc.CallOption) (*OrganizationSettings, error) { + out := new(OrganizationSettings) + err := c.cc.Invoke(ctx, "/google.cloud.securitycenter.v1beta1.SecurityCenter/UpdateOrganizationSettings", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *securityCenterClient) UpdateSource(ctx context.Context, in *UpdateSourceRequest, opts ...grpc.CallOption) (*Source, error) { + out := new(Source) + err := c.cc.Invoke(ctx, "/google.cloud.securitycenter.v1beta1.SecurityCenter/UpdateSource", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *securityCenterClient) UpdateSecurityMarks(ctx context.Context, in *UpdateSecurityMarksRequest, opts ...grpc.CallOption) (*SecurityMarks, error) { + out := new(SecurityMarks) + err := c.cc.Invoke(ctx, "/google.cloud.securitycenter.v1beta1.SecurityCenter/UpdateSecurityMarks", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +// SecurityCenterServer is the server API for SecurityCenter service. +type SecurityCenterServer interface { + // Creates a source. + CreateSource(context.Context, *CreateSourceRequest) (*Source, error) + // Creates a finding. The corresponding source must exist for finding creation + // to succeed. + CreateFinding(context.Context, *CreateFindingRequest) (*Finding, error) + // Gets the access control policy on the specified Source. + GetIamPolicy(context.Context, *v1.GetIamPolicyRequest) (*v1.Policy, error) + // Gets the settings for an organization. + GetOrganizationSettings(context.Context, *GetOrganizationSettingsRequest) (*OrganizationSettings, error) + // Gets a source. + GetSource(context.Context, *GetSourceRequest) (*Source, error) + // Filters an organization's assets and groups them by their specified + // properties. + GroupAssets(context.Context, *GroupAssetsRequest) (*GroupAssetsResponse, error) + // Filters an organization or source's findings and groups them by their + // specified properties. + // + // To group across all sources provide a `-` as the source id. + // Example: /v1beta1/organizations/123/sources/-/findings + GroupFindings(context.Context, *GroupFindingsRequest) (*GroupFindingsResponse, error) + // Lists an organization's assets. + ListAssets(context.Context, *ListAssetsRequest) (*ListAssetsResponse, error) + // Lists an organization or source's findings. + // + // To list across all sources provide a `-` as the source id. + // Example: /v1beta1/organizations/123/sources/-/findings + ListFindings(context.Context, *ListFindingsRequest) (*ListFindingsResponse, error) + // Lists all sources belonging to an organization. + ListSources(context.Context, *ListSourcesRequest) (*ListSourcesResponse, error) + // Runs asset discovery. The discovery is tracked with a long-running + // operation. + // + // This API can only be called with limited frequency for an organization. If + // it is called too frequently the caller will receive a TOO_MANY_REQUESTS + // error. + RunAssetDiscovery(context.Context, *RunAssetDiscoveryRequest) (*longrunning.Operation, error) + // Updates the state of a finding. + SetFindingState(context.Context, *SetFindingStateRequest) (*Finding, error) + // Sets the access control policy on the specified Source. + SetIamPolicy(context.Context, *v1.SetIamPolicyRequest) (*v1.Policy, error) + // Returns the permissions that a caller has on the specified source. + TestIamPermissions(context.Context, *v1.TestIamPermissionsRequest) (*v1.TestIamPermissionsResponse, error) + // Creates or updates a finding. The corresponding source must exist for a + // finding creation to succeed. + UpdateFinding(context.Context, *UpdateFindingRequest) (*Finding, error) + // Updates an organization's settings. + UpdateOrganizationSettings(context.Context, *UpdateOrganizationSettingsRequest) (*OrganizationSettings, error) + // Updates a source. + UpdateSource(context.Context, *UpdateSourceRequest) (*Source, error) + // Updates security marks. + UpdateSecurityMarks(context.Context, *UpdateSecurityMarksRequest) (*SecurityMarks, error) +} + +func RegisterSecurityCenterServer(s *grpc.Server, srv SecurityCenterServer) { + s.RegisterService(&_SecurityCenter_serviceDesc, srv) +} + +func _SecurityCenter_CreateSource_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(CreateSourceRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(SecurityCenterServer).CreateSource(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.securitycenter.v1beta1.SecurityCenter/CreateSource", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(SecurityCenterServer).CreateSource(ctx, req.(*CreateSourceRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _SecurityCenter_CreateFinding_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(CreateFindingRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(SecurityCenterServer).CreateFinding(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.securitycenter.v1beta1.SecurityCenter/CreateFinding", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(SecurityCenterServer).CreateFinding(ctx, req.(*CreateFindingRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _SecurityCenter_GetIamPolicy_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(v1.GetIamPolicyRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(SecurityCenterServer).GetIamPolicy(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.securitycenter.v1beta1.SecurityCenter/GetIamPolicy", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(SecurityCenterServer).GetIamPolicy(ctx, req.(*v1.GetIamPolicyRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _SecurityCenter_GetOrganizationSettings_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(GetOrganizationSettingsRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(SecurityCenterServer).GetOrganizationSettings(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.securitycenter.v1beta1.SecurityCenter/GetOrganizationSettings", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(SecurityCenterServer).GetOrganizationSettings(ctx, req.(*GetOrganizationSettingsRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _SecurityCenter_GetSource_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(GetSourceRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(SecurityCenterServer).GetSource(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.securitycenter.v1beta1.SecurityCenter/GetSource", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(SecurityCenterServer).GetSource(ctx, req.(*GetSourceRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _SecurityCenter_GroupAssets_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(GroupAssetsRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(SecurityCenterServer).GroupAssets(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.securitycenter.v1beta1.SecurityCenter/GroupAssets", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(SecurityCenterServer).GroupAssets(ctx, req.(*GroupAssetsRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _SecurityCenter_GroupFindings_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(GroupFindingsRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(SecurityCenterServer).GroupFindings(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.securitycenter.v1beta1.SecurityCenter/GroupFindings", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(SecurityCenterServer).GroupFindings(ctx, req.(*GroupFindingsRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _SecurityCenter_ListAssets_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(ListAssetsRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(SecurityCenterServer).ListAssets(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.securitycenter.v1beta1.SecurityCenter/ListAssets", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(SecurityCenterServer).ListAssets(ctx, req.(*ListAssetsRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _SecurityCenter_ListFindings_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(ListFindingsRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(SecurityCenterServer).ListFindings(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.securitycenter.v1beta1.SecurityCenter/ListFindings", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(SecurityCenterServer).ListFindings(ctx, req.(*ListFindingsRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _SecurityCenter_ListSources_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(ListSourcesRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(SecurityCenterServer).ListSources(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.securitycenter.v1beta1.SecurityCenter/ListSources", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(SecurityCenterServer).ListSources(ctx, req.(*ListSourcesRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _SecurityCenter_RunAssetDiscovery_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(RunAssetDiscoveryRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(SecurityCenterServer).RunAssetDiscovery(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.securitycenter.v1beta1.SecurityCenter/RunAssetDiscovery", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(SecurityCenterServer).RunAssetDiscovery(ctx, req.(*RunAssetDiscoveryRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _SecurityCenter_SetFindingState_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(SetFindingStateRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(SecurityCenterServer).SetFindingState(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.securitycenter.v1beta1.SecurityCenter/SetFindingState", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(SecurityCenterServer).SetFindingState(ctx, req.(*SetFindingStateRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _SecurityCenter_SetIamPolicy_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(v1.SetIamPolicyRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(SecurityCenterServer).SetIamPolicy(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.securitycenter.v1beta1.SecurityCenter/SetIamPolicy", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(SecurityCenterServer).SetIamPolicy(ctx, req.(*v1.SetIamPolicyRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _SecurityCenter_TestIamPermissions_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(v1.TestIamPermissionsRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(SecurityCenterServer).TestIamPermissions(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.securitycenter.v1beta1.SecurityCenter/TestIamPermissions", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(SecurityCenterServer).TestIamPermissions(ctx, req.(*v1.TestIamPermissionsRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _SecurityCenter_UpdateFinding_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(UpdateFindingRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(SecurityCenterServer).UpdateFinding(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.securitycenter.v1beta1.SecurityCenter/UpdateFinding", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(SecurityCenterServer).UpdateFinding(ctx, req.(*UpdateFindingRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _SecurityCenter_UpdateOrganizationSettings_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(UpdateOrganizationSettingsRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(SecurityCenterServer).UpdateOrganizationSettings(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.securitycenter.v1beta1.SecurityCenter/UpdateOrganizationSettings", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(SecurityCenterServer).UpdateOrganizationSettings(ctx, req.(*UpdateOrganizationSettingsRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _SecurityCenter_UpdateSource_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(UpdateSourceRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(SecurityCenterServer).UpdateSource(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.securitycenter.v1beta1.SecurityCenter/UpdateSource", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(SecurityCenterServer).UpdateSource(ctx, req.(*UpdateSourceRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _SecurityCenter_UpdateSecurityMarks_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(UpdateSecurityMarksRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(SecurityCenterServer).UpdateSecurityMarks(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.securitycenter.v1beta1.SecurityCenter/UpdateSecurityMarks", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(SecurityCenterServer).UpdateSecurityMarks(ctx, req.(*UpdateSecurityMarksRequest)) + } + return interceptor(ctx, in, info, handler) +} + +var _SecurityCenter_serviceDesc = grpc.ServiceDesc{ + ServiceName: "google.cloud.securitycenter.v1beta1.SecurityCenter", + HandlerType: (*SecurityCenterServer)(nil), + Methods: []grpc.MethodDesc{ + { + MethodName: "CreateSource", + Handler: _SecurityCenter_CreateSource_Handler, + }, + { + MethodName: "CreateFinding", + Handler: _SecurityCenter_CreateFinding_Handler, + }, + { + MethodName: "GetIamPolicy", + Handler: _SecurityCenter_GetIamPolicy_Handler, + }, + { + MethodName: "GetOrganizationSettings", + Handler: _SecurityCenter_GetOrganizationSettings_Handler, + }, + { + MethodName: "GetSource", + Handler: _SecurityCenter_GetSource_Handler, + }, + { + MethodName: "GroupAssets", + Handler: _SecurityCenter_GroupAssets_Handler, + }, + { + MethodName: "GroupFindings", + Handler: _SecurityCenter_GroupFindings_Handler, + }, + { + MethodName: "ListAssets", + Handler: _SecurityCenter_ListAssets_Handler, + }, + { + MethodName: "ListFindings", + Handler: _SecurityCenter_ListFindings_Handler, + }, + { + MethodName: "ListSources", + Handler: _SecurityCenter_ListSources_Handler, + }, + { + MethodName: "RunAssetDiscovery", + Handler: _SecurityCenter_RunAssetDiscovery_Handler, + }, + { + MethodName: "SetFindingState", + Handler: _SecurityCenter_SetFindingState_Handler, + }, + { + MethodName: "SetIamPolicy", + Handler: _SecurityCenter_SetIamPolicy_Handler, + }, + { + MethodName: "TestIamPermissions", + Handler: _SecurityCenter_TestIamPermissions_Handler, + }, + { + MethodName: "UpdateFinding", + Handler: _SecurityCenter_UpdateFinding_Handler, + }, + { + MethodName: "UpdateOrganizationSettings", + Handler: _SecurityCenter_UpdateOrganizationSettings_Handler, + }, + { + MethodName: "UpdateSource", + Handler: _SecurityCenter_UpdateSource_Handler, + }, + { + MethodName: "UpdateSecurityMarks", + Handler: _SecurityCenter_UpdateSecurityMarks_Handler, + }, + }, + Streams: []grpc.StreamDesc{}, + Metadata: "google/cloud/securitycenter/v1beta1/securitycenter_service.proto", +} + +func init() { + proto.RegisterFile("google/cloud/securitycenter/v1beta1/securitycenter_service.proto", fileDescriptor_securitycenter_service_32527bc15746ec09) +} + +var fileDescriptor_securitycenter_service_32527bc15746ec09 = []byte{ + // 1902 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xdc, 0x5a, 0xdb, 0x6f, 0x1b, 0x59, + 0x19, 0xe7, 0x38, 0x71, 0x12, 0x7f, 0x6e, 0x5a, 0xf7, 0x24, 0x2d, 0xde, 0xd9, 0xee, 0x12, 0x06, + 0x69, 0x49, 0xbb, 0x2b, 0xbb, 0xf1, 0x5e, 0x9a, 0xba, 0x5b, 0x35, 0x6d, 0xec, 0x5c, 0x80, 0xa4, + 0x59, 0x3b, 0xa9, 0x44, 0x5f, 0xcc, 0xc4, 0x3e, 0x31, 0xa3, 0xd8, 0x33, 0x66, 0xce, 0x71, 0xb4, + 0x29, 0x2a, 0x42, 0x88, 0xa7, 0x7d, 0x5a, 0x09, 0x21, 0x24, 0xf6, 0x8d, 0x7f, 0x00, 0x10, 0x12, + 0x3c, 0x80, 0x84, 0x10, 0x12, 0x3c, 0x21, 0xad, 0xa0, 0x82, 0x17, 0x1e, 0x78, 0xe0, 0x9d, 0x7f, + 0x01, 0x9d, 0x9b, 0x33, 0x33, 0x9e, 0xda, 0x33, 0x71, 0x23, 0x24, 0x9e, 0xe2, 0x73, 0xf9, 0xce, + 0xf9, 0xfd, 0xbe, 0xcb, 0x39, 0xdf, 0x77, 0x26, 0xb0, 0xd6, 0x76, 0xdd, 0x76, 0x87, 0x14, 0x9b, + 0x1d, 0xb7, 0xdf, 0x2a, 0x52, 0xd2, 0xec, 0x7b, 0x36, 0x3b, 0x6d, 0x12, 0x87, 0x11, 0xaf, 0x78, + 0xb2, 0x72, 0x48, 0x98, 0xb5, 0x12, 0xea, 0x6e, 0x50, 0xe2, 0x9d, 0xd8, 0x4d, 0x52, 0xe8, 0x79, + 0x2e, 0x73, 0xf1, 0x57, 0xe4, 0x0a, 0x05, 0xb1, 0x42, 0x21, 0x38, 0xb5, 0xa0, 0x56, 0x30, 0x6e, + 0xa8, 0x6d, 0xac, 0x9e, 0x5d, 0xb4, 0x1c, 0xc7, 0x65, 0x16, 0xb3, 0x5d, 0x87, 0xca, 0x25, 0x8c, + 0x62, 0x1c, 0x10, 0x16, 0xa5, 0x84, 0x29, 0x81, 0x95, 0x38, 0x02, 0x47, 0xb6, 0xd3, 0xb2, 0x9d, + 0xb6, 0x12, 0x79, 0x10, 0x47, 0xc4, 0xf5, 0xda, 0x96, 0x63, 0x3f, 0x13, 0xe0, 0x1a, 0x94, 0x30, + 0x66, 0x3b, 0x6d, 0x0d, 0x72, 0x35, 0x89, 0xa6, 0x1a, 0x5d, 0xcb, 0x3b, 0xd6, 0x92, 0xb7, 0x63, + 0x49, 0xba, 0x7d, 0x4f, 0xeb, 0xd4, 0x78, 0x53, 0x49, 0xd8, 0x56, 0xb7, 0x78, 0xb2, 0xc2, 0xff, + 0x34, 0x7a, 0x6e, 0xc7, 0x6e, 0x9e, 0xaa, 0x71, 0x23, 0x38, 0x1e, 0x18, 0x53, 0xf6, 0x28, 0x76, + 0x5c, 0xa7, 0xed, 0xf5, 0x1d, 0xc7, 0x76, 0xda, 0x45, 0xb7, 0x47, 0xbc, 0x80, 0xc6, 0xf5, 0x06, + 0xa2, 0x75, 0xd8, 0x3f, 0x2a, 0xb6, 0xfa, 0x72, 0x82, 0x1a, 0x7f, 0x3d, 0x3c, 0x4e, 0xba, 0x3d, + 0xa6, 0x77, 0x58, 0x0a, 0x0f, 0x1e, 0xd9, 0xa4, 0xd3, 0x6a, 0x74, 0x2d, 0x7a, 0xac, 0x66, 0xdc, + 0x08, 0xcf, 0xa0, 0xcc, 0xeb, 0x37, 0xb5, 0xf5, 0xbe, 0x14, 0x1e, 0x65, 0x76, 0x97, 0x50, 0x66, + 0x75, 0x7b, 0x72, 0x82, 0xf9, 0x63, 0x04, 0x8b, 0xeb, 0x1e, 0xb1, 0x18, 0xd9, 0x90, 0x36, 0xac, + 0x91, 0xef, 0xf4, 0x09, 0x65, 0xf8, 0x3a, 0xcc, 0xf4, 0x2c, 0x8f, 0x38, 0x2c, 0x8f, 0x96, 0xd0, + 0x72, 0xa6, 0xa6, 0x5a, 0xf8, 0x0d, 0x00, 0x65, 0xed, 0x86, 0xdd, 0xca, 0xa7, 0xc4, 0x58, 0x46, + 0xf5, 0x6c, 0xb7, 0xf0, 0x06, 0xcc, 0xaa, 0x46, 0x7e, 0x6a, 0x09, 0x2d, 0x67, 0x4b, 0xef, 0x14, + 0x62, 0x38, 0x6d, 0x41, 0x6f, 0xae, 0x85, 0x4d, 0x0f, 0x16, 0x24, 0xac, 0xba, 0x30, 0xd6, 0x38, + 0x54, 0xeb, 0x30, 0x23, 0xad, 0x2a, 0x10, 0x65, 0x4b, 0x6f, 0xc7, 0xda, 0x55, 0xad, 0xad, 0x44, + 0xcd, 0xf7, 0xe0, 0xcd, 0x4d, 0xc2, 0x1e, 0xfb, 0x1c, 0xb3, 0xae, 0xfc, 0x52, 0x6f, 0x8f, 0x61, + 0xda, 0xb1, 0xba, 0x44, 0x6d, 0x2e, 0x7e, 0x9b, 0x6f, 0x41, 0x6e, 0x93, 0xb0, 0x20, 0xcc, 0xa8, + 0x79, 0x3f, 0x4d, 0x01, 0xde, 0xf4, 0xdc, 0x7e, 0xef, 0x21, 0x8f, 0x2e, 0x3a, 0x8e, 0xd1, 0x75, + 0x98, 0x39, 0xb2, 0x3b, 0x8c, 0x78, 0x4a, 0xc7, 0xaa, 0x85, 0x5f, 0x83, 0xb9, 0x36, 0x5f, 0xa5, + 0x71, 0x78, 0x2a, 0x34, 0x9c, 0xa9, 0xcd, 0x8a, 0xf6, 0xa3, 0x53, 0x5c, 0x81, 0x5c, 0xd3, 0xed, + 0x72, 0xf9, 0x86, 0xf6, 0xb1, 0xfc, 0xb4, 0x50, 0xc7, 0x6b, 0x5a, 0x1d, 0xda, 0x0f, 0x0a, 0x15, + 0x35, 0xa1, 0x76, 0x45, 0x89, 0xe8, 0x0e, 0x7c, 0x07, 0x32, 0x1e, 0xb1, 0x5a, 0x0d, 0xee, 0x29, + 0xf9, 0xb4, 0x10, 0x37, 0x86, 0xc4, 0xf7, 0xb5, 0x1b, 0xd5, 0xe6, 0xf8, 0x64, 0xde, 0xe4, 0x9e, + 0xd1, 0xb3, 0xda, 0xa4, 0xc1, 0xdc, 0x63, 0xe2, 0xe4, 0x67, 0xa5, 0x67, 0xf0, 0x9e, 0x7d, 0xde, + 0x81, 0x5f, 0x07, 0xd1, 0x68, 0x50, 0xfb, 0x19, 0xc9, 0xcf, 0x2d, 0xa1, 0xe5, 0x74, 0x6d, 0x8e, + 0x77, 0xd4, 0xed, 0x67, 0xc4, 0xfc, 0x2b, 0x82, 0x85, 0x80, 0x72, 0x68, 0xcf, 0x75, 0x28, 0xc1, + 0x4f, 0x21, 0xa7, 0xd9, 0x36, 0x3c, 0x42, 0xfb, 0x1d, 0x46, 0xf3, 0x68, 0x69, 0x6a, 0x39, 0x5b, + 0xba, 0x1d, 0xcb, 0xc2, 0x62, 0xcd, 0x9a, 0x10, 0xac, 0x5d, 0x56, 0x7a, 0x92, 0x4d, 0x1a, 0x24, + 0x9a, 0x4a, 0x40, 0xf4, 0x2d, 0xb8, 0xe2, 0x90, 0x8f, 0x59, 0xc3, 0xc7, 0x56, 0x5a, 0x62, 0x9e, + 0x77, 0xef, 0x69, 0xc6, 0xe6, 0xdf, 0x11, 0x2c, 0x0a, 0x00, 0xca, 0xbb, 0x2f, 0xc2, 0xe6, 0x01, + 0x12, 0xd3, 0xe7, 0xb6, 0x56, 0x7a, 0xa4, 0xb5, 0x66, 0x42, 0xd6, 0x7a, 0x81, 0xe0, 0x5a, 0x88, + 0xd8, 0xff, 0x83, 0xbd, 0xfe, 0x89, 0x20, 0xeb, 0x03, 0x80, 0xbf, 0x05, 0xd0, 0xf3, 0xf8, 0x79, + 0xce, 0x6c, 0xa2, 0x69, 0xac, 0x25, 0xa5, 0x51, 0xd8, 0x1b, 0x2c, 0x51, 0x75, 0x98, 0x77, 0x5a, + 0xf3, 0xad, 0x89, 0x17, 0x21, 0xdd, 0x74, 0xfb, 0x0e, 0x13, 0x74, 0xa6, 0x6a, 0xb2, 0x61, 0x1c, + 0xc0, 0x95, 0x90, 0x10, 0xce, 0xc1, 0xd4, 0x31, 0x39, 0x55, 0xee, 0xc2, 0x7f, 0xe2, 0x77, 0x20, + 0x7d, 0x62, 0x75, 0xfa, 0x5a, 0x13, 0xd7, 0x87, 0x34, 0xf1, 0x84, 0x8f, 0xd6, 0xe4, 0xa4, 0x72, + 0x6a, 0x15, 0x99, 0xdf, 0x06, 0xfc, 0x0d, 0x9b, 0xaa, 0x93, 0x8a, 0xc6, 0x38, 0xe7, 0x7d, 0xfa, + 0x4a, 0x8d, 0xf4, 0x8f, 0xd9, 0x90, 0x7f, 0xfc, 0x10, 0xc1, 0x42, 0x60, 0x2b, 0xe5, 0x1d, 0x55, + 0x98, 0x95, 0x47, 0xad, 0xd6, 0x66, 0xa2, 0x63, 0x5a, 0xcb, 0x46, 0xd9, 0x33, 0x15, 0x65, 0xcf, + 0x17, 0x29, 0xb8, 0xca, 0x61, 0x4c, 0x7c, 0xe0, 0xba, 0x5e, 0x8b, 0x78, 0xbe, 0xe0, 0x13, 0xed, + 0x49, 0x82, 0x2f, 0xea, 0xa4, 0x4e, 0x27, 0x3e, 0xa9, 0xef, 0xf2, 0xab, 0x58, 0xa7, 0x03, 0xc2, + 0x08, 0x51, 0xfb, 0x6f, 0xf0, 0x29, 0x3b, 0x16, 0x3d, 0xe6, 0xd7, 0xb4, 0xfa, 0x19, 0xb2, 0xee, + 0xdc, 0x48, 0xeb, 0x66, 0x42, 0xd6, 0xfd, 0x6c, 0x5a, 0x3a, 0x52, 0xe8, 0xa8, 0xfe, 0x18, 0x16, + 0x3a, 0x36, 0x65, 0x0d, 0x91, 0x3c, 0xd2, 0x50, 0xf4, 0x6f, 0xc5, 0x32, 0xf4, 0xf0, 0xaa, 0xc1, + 0x2e, 0x7e, 0x2a, 0x5c, 0xed, 0x84, 0x7a, 0x2e, 0xfe, 0x60, 0xe0, 0xda, 0x62, 0x2e, 0xb3, 0x3a, + 0x52, 0x1f, 0xd3, 0x42, 0x1f, 0x19, 0xd1, 0xc3, 0x15, 0x62, 0x7c, 0x92, 0x82, 0x5c, 0x18, 0x27, + 0x5e, 0x83, 0xb4, 0xd0, 0x84, 0xf0, 0xb2, 0x6c, 0xe9, 0x56, 0x2c, 0x05, 0x88, 0x15, 0x6a, 0x52, + 0x10, 0xb7, 0x20, 0x4d, 0x99, 0xc5, 0x24, 0xa5, 0xcb, 0xa5, 0xdd, 0x57, 0xa5, 0xc2, 0x42, 0x9d, + 0xaf, 0x5a, 0x93, 0x8b, 0x9b, 0xbb, 0x90, 0x16, 0x6d, 0x7c, 0x0d, 0xae, 0xd6, 0xf7, 0x1f, 0xee, + 0x57, 0x1b, 0x07, 0xbb, 0xf5, 0xbd, 0xea, 0xfa, 0xf6, 0xc6, 0x76, 0xb5, 0x92, 0xfb, 0x02, 0x06, + 0x98, 0x39, 0xd8, 0x3d, 0xa8, 0x57, 0x2b, 0x39, 0x84, 0x33, 0x90, 0x7e, 0x58, 0xa9, 0x54, 0x2b, + 0xb9, 0x14, 0xce, 0xc2, 0x6c, 0xad, 0xba, 0xf3, 0xf8, 0x49, 0xb5, 0x92, 0x9b, 0xe2, 0x73, 0x1e, + 0xae, 0xef, 0x6f, 0x3f, 0xa9, 0xe6, 0xa6, 0xcd, 0x4f, 0x53, 0x32, 0xf6, 0x5f, 0xc1, 0x9d, 0xf7, + 0xca, 0xc3, 0x2e, 0x18, 0x30, 0xe9, 0xf3, 0x07, 0xcc, 0x4c, 0xa2, 0xe3, 0xf0, 0x5f, 0x08, 0x16, + 0x83, 0x2a, 0x51, 0x21, 0xb3, 0x05, 0x73, 0x2a, 0xdf, 0xd5, 0x71, 0x92, 0x2c, 0x5b, 0x1e, 0x48, + 0xff, 0xaf, 0x43, 0xc0, 0xfc, 0x25, 0x82, 0xeb, 0x75, 0xa2, 0x19, 0x4a, 0x07, 0x7b, 0x79, 0x2e, + 0x8c, 0xb7, 0x82, 0xae, 0x5d, 0x4a, 0xc2, 0x3a, 0xe0, 0xbe, 0xdc, 0xa4, 0x94, 0x59, 0x1e, 0x93, + 0xcc, 0xa7, 0xc6, 0x32, 0xcf, 0x88, 0xd9, 0xbc, 0x6d, 0x96, 0x20, 0x5f, 0xeb, 0x3b, 0x22, 0x32, + 0x2a, 0x36, 0x6d, 0xba, 0x27, 0xc4, 0x3b, 0x1d, 0xe3, 0xad, 0xe6, 0x67, 0x08, 0x16, 0x0f, 0x7a, + 0xad, 0xe1, 0x72, 0xc9, 0x57, 0xf7, 0xa0, 0x09, 0xea, 0x1e, 0x7c, 0x0f, 0xb2, 0x7d, 0xb1, 0xbe, + 0xf4, 0xd1, 0xd4, 0x58, 0x1f, 0x05, 0x39, 0x9d, 0xff, 0x36, 0x3f, 0x47, 0xf0, 0x65, 0x89, 0x6e, + 0x54, 0x11, 0xe3, 0xc0, 0xb5, 0xc8, 0xe2, 0x5b, 0x01, 0xbf, 0x1b, 0x0b, 0x78, 0xe4, 0x06, 0x8b, + 0x6e, 0x44, 0xef, 0x64, 0x94, 0x7e, 0x82, 0x60, 0x41, 0x52, 0x0a, 0x56, 0x58, 0x67, 0x05, 0x1f, + 0x3a, 0x77, 0xc1, 0x37, 0x19, 0xb2, 0xff, 0x20, 0x30, 0x14, 0x32, 0xb5, 0xd9, 0x8e, 0xe5, 0x1d, + 0x0f, 0xb4, 0xfc, 0x4d, 0xb8, 0x1c, 0x7c, 0xa1, 0x50, 0x40, 0xe3, 0xf9, 0x7a, 0x70, 0xc9, 0x79, + 0xea, 0x6f, 0x4e, 0x04, 0x7b, 0x82, 0x80, 0x29, 0x7d, 0xff, 0x06, 0x5c, 0xd6, 0xc0, 0xd6, 0x05, + 0x5e, 0xfc, 0x0b, 0x04, 0x97, 0xfc, 0x75, 0x3a, 0x5e, 0x8d, 0x45, 0x2f, 0xa2, 0xb4, 0x37, 0x92, + 0x58, 0xd0, 0xbc, 0xfb, 0x83, 0xbf, 0xfd, 0xfb, 0x47, 0xa9, 0x77, 0xcd, 0x9b, 0x83, 0x47, 0x9d, + 0xef, 0xca, 0x08, 0xbd, 0xef, 0x77, 0x41, 0x5a, 0xbc, 0xf5, 0x5c, 0xbd, 0xf6, 0xd0, 0xb2, 0x36, + 0xfa, 0xef, 0x11, 0xcc, 0x07, 0x5e, 0x3c, 0xf0, 0xdd, 0x04, 0x98, 0x83, 0x61, 0x6f, 0x24, 0x8a, + 0x72, 0xb3, 0x2a, 0x50, 0x3f, 0x30, 0xdf, 0x1b, 0x87, 0x5a, 0x83, 0xe6, 0xf8, 0xf5, 0x19, 0x5f, + 0x1e, 0x9c, 0x11, 0x9f, 0x22, 0xb8, 0xb4, 0x49, 0xd8, 0xb6, 0xd5, 0xdd, 0x13, 0xaf, 0x51, 0xd8, + 0xd4, 0x28, 0x6c, 0xab, 0x5b, 0x38, 0x59, 0x29, 0xf8, 0x07, 0x35, 0xd2, 0x6b, 0xa1, 0x39, 0x72, + 0x74, 0x00, 0xa9, 0x7c, 0x06, 0xc9, 0x23, 0x72, 0xf7, 0x11, 0xa0, 0xca, 0x6d, 0xdf, 0x0e, 0x65, + 0x74, 0x0b, 0xff, 0x03, 0xc1, 0x17, 0x5f, 0xf2, 0x76, 0x82, 0xd7, 0xe3, 0x95, 0x4c, 0x23, 0x5f, + 0x5e, 0x8c, 0xf3, 0x9f, 0x4a, 0xe6, 0x87, 0x82, 0xe2, 0x07, 0xd8, 0xa7, 0x75, 0x7e, 0x09, 0x0d, + 0xd1, 0x8b, 0x3a, 0xbc, 0x9e, 0xe3, 0x9f, 0x21, 0xc8, 0x0c, 0xde, 0x77, 0xf0, 0xfb, 0x71, 0xb9, + 0x4c, 0xe0, 0xdb, 0x2b, 0x02, 0xef, 0xdb, 0xf8, 0xe6, 0x18, 0xbc, 0x67, 0xe6, 0xc0, 0xbf, 0xd5, + 0x95, 0xab, 0xcc, 0xf3, 0xf0, 0x9d, 0xf8, 0x55, 0x6a, 0xa0, 0x38, 0x32, 0x56, 0x93, 0x0b, 0xca, + 0x5c, 0x66, 0x10, 0x91, 0x85, 0xf1, 0x11, 0x29, 0x2b, 0x84, 0xb2, 0x28, 0xef, 0xb9, 0xf3, 0xfc, + 0x05, 0xc1, 0x7c, 0xe0, 0x39, 0x21, 0x66, 0x44, 0x46, 0xbd, 0xad, 0x18, 0xe5, 0xf3, 0x88, 0x2a, + 0x0e, 0x11, 0xc1, 0x90, 0x20, 0x3e, 0x07, 0x7c, 0x7e, 0x8e, 0x00, 0xce, 0x92, 0x6e, 0xfc, 0x41, + 0xe2, 0xc4, 0x5d, 0x32, 0xb9, 0x73, 0xce, 0x84, 0xdf, 0xbc, 0x2d, 0x68, 0xdc, 0xc2, 0xcb, 0x71, + 0x4d, 0x81, 0xff, 0x80, 0xe0, 0x92, 0x3f, 0x43, 0x8d, 0x79, 0x8c, 0x47, 0xe4, 0xf9, 0x31, 0x03, + 0x35, 0x2a, 0x1d, 0x8e, 0x0a, 0xd4, 0xf8, 0xea, 0xc7, 0xbf, 0x42, 0x90, 0xf5, 0x3d, 0x3a, 0xe0, + 0xf8, 0xea, 0x0b, 0xbe, 0x88, 0x18, 0xab, 0xc9, 0x05, 0x15, 0x81, 0x88, 0xc8, 0x1d, 0x73, 0x2b, + 0x71, 0xd4, 0x57, 0x87, 0xb2, 0x50, 0x7c, 0x3f, 0x16, 0x84, 0x97, 0x65, 0xaf, 0xc6, 0x1b, 0x5a, + 0xdc, 0xf7, 0x61, 0xa2, 0xf0, 0x58, 0x7f, 0x98, 0x30, 0xd7, 0x04, 0xcc, 0xb2, 0xf9, 0x7e, 0xec, + 0x50, 0xf5, 0xfa, 0xce, 0x60, 0x13, 0xee, 0xe1, 0x7f, 0x42, 0x70, 0x25, 0x94, 0xee, 0xe3, 0x7b, + 0x31, 0x13, 0x9b, 0xa8, 0x22, 0x21, 0xe1, 0x3d, 0xba, 0x25, 0x08, 0x3c, 0x32, 0xef, 0xc7, 0x3d, + 0x21, 0x07, 0x5e, 0xc2, 0xef, 0x2e, 0x4a, 0x98, 0xd8, 0x9b, 0x13, 0xe1, 0x57, 0x69, 0x7d, 0xd4, + 0x55, 0x5a, 0xbf, 0xf0, 0xab, 0x94, 0x86, 0xae, 0xd2, 0x5f, 0x23, 0xc0, 0xfb, 0x84, 0x8a, 0x4e, + 0xe2, 0x75, 0x6d, 0x4a, 0xb9, 0x0c, 0x5e, 0x0e, 0x6d, 0x3a, 0x3c, 0x45, 0xc3, 0xbb, 0x19, 0x63, + 0xa6, 0x72, 0xd8, 0xaf, 0x0b, 0xc8, 0x55, 0x73, 0x2d, 0x19, 0x64, 0x36, 0xb4, 0x22, 0x07, 0xfe, + 0x67, 0x04, 0xf3, 0x81, 0xda, 0x28, 0xe6, 0x31, 0x1e, 0x55, 0x4f, 0x25, 0x74, 0x88, 0x1d, 0xc1, + 0x63, 0xb3, 0xf4, 0xe1, 0x19, 0x0f, 0xfd, 0x49, 0x32, 0x81, 0x63, 0x0c, 0x12, 0xac, 0x4f, 0x52, + 0x3a, 0xb5, 0x8f, 0x4c, 0x68, 0x36, 0x12, 0xd0, 0xba, 0xa0, 0x9c, 0xc6, 0x16, 0x84, 0x9b, 0xa5, + 0xaf, 0x9d, 0x11, 0x8e, 0xfe, 0xa0, 0x1a, 0x3f, 0xd3, 0x29, 0x47, 0x57, 0x85, 0xf8, 0x37, 0x08, + 0x2e, 0xf9, 0x2b, 0xb0, 0x98, 0x77, 0x43, 0x44, 0xd1, 0x96, 0x2c, 0x0d, 0x52, 0xa7, 0x54, 0xe9, + 0xf6, 0x19, 0x45, 0xf5, 0xe1, 0x76, 0x4c, 0x36, 0x34, 0xc8, 0xf4, 0x3f, 0x4f, 0x0d, 0x6a, 0xc7, + 0x40, 0xfd, 0xf4, 0x20, 0x09, 0x81, 0x88, 0xda, 0xce, 0x38, 0x47, 0x0d, 0x67, 0xfe, 0x11, 0x09, + 0x3e, 0xbf, 0x43, 0xa5, 0xaa, 0x8f, 0x50, 0xf0, 0x1b, 0x76, 0x24, 0x31, 0x79, 0x04, 0x73, 0x86, + 0xfe, 0xf5, 0x9e, 0x97, 0x43, 0xe5, 0xe5, 0xd3, 0x66, 0xe9, 0xa3, 0x64, 0x0b, 0x47, 0x05, 0xc1, + 0x98, 0x4d, 0x1e, 0x7d, 0x0f, 0xbe, 0xda, 0x74, 0xbb, 0x71, 0xe8, 0xef, 0xa1, 0xa7, 0x1f, 0xa9, + 0x69, 0x6d, 0xb7, 0x63, 0x39, 0xed, 0x82, 0xeb, 0xb5, 0x8b, 0x6d, 0xe2, 0x88, 0x1a, 0x53, 0xfd, + 0x1b, 0x82, 0xd5, 0xb3, 0xe9, 0xc8, 0x6f, 0xf5, 0xf7, 0x82, 0xdd, 0x87, 0x33, 0x42, 0xfa, 0xdd, + 0xff, 0x06, 0x00, 0x00, 0xff, 0xff, 0x70, 0x64, 0x0e, 0x6a, 0x4c, 0x21, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/cloud/securitycenter/v1beta1/source.pb.go b/vendor/google.golang.org/genproto/googleapis/cloud/securitycenter/v1beta1/source.pb.go new file mode 100644 index 0000000..267147e --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/cloud/securitycenter/v1beta1/source.pb.go @@ -0,0 +1,121 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/cloud/securitycenter/v1beta1/source.proto + +package securitycenter // import "google.golang.org/genproto/googleapis/cloud/securitycenter/v1beta1" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Cloud Security Command Center's (Cloud SCC) finding source. A finding source +// is an entity or a mechanism that can produce a finding. A source is like a +// container of findings that come from the same scanner, logger, monitor, etc. +type Source struct { + // The relative resource name of this source. See: + // https://cloud.google.com/apis/design/resource_names#relative_resource_name + // Example: + // "organizations/123/sources/456" + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // The source's display name. + // A source's display name must be unique amongst its siblings, for example, + // two sources with the same parent can't share the same display name. + // The display name must start and end with a letter or digit, may contain + // letters, digits, spaces, hyphens, and underscores, and can be no longer + // than 32 characters. This is captured by the regular expression: + // [\p{L}\p{N}]({\p{L}\p{N}_- ]{0,30}[\p{L}\p{N}])?. + DisplayName string `protobuf:"bytes,2,opt,name=display_name,json=displayName,proto3" json:"display_name,omitempty"` + // The description of the source (max of 1024 characters). + // Example: + // "Cloud Security Scanner is a web security scanner for common + // vulnerabilities in App Engine applications. It can automatically + // scan and detect four common vulnerabilities, including cross-site-scripting + // (XSS), Flash injection, mixed content (HTTP in HTTPS), and + // outdated/insecure libraries." + Description string `protobuf:"bytes,3,opt,name=description,proto3" json:"description,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Source) Reset() { *m = Source{} } +func (m *Source) String() string { return proto.CompactTextString(m) } +func (*Source) ProtoMessage() {} +func (*Source) Descriptor() ([]byte, []int) { + return fileDescriptor_source_8609a031d711b388, []int{0} +} +func (m *Source) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Source.Unmarshal(m, b) +} +func (m *Source) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Source.Marshal(b, m, deterministic) +} +func (dst *Source) XXX_Merge(src proto.Message) { + xxx_messageInfo_Source.Merge(dst, src) +} +func (m *Source) XXX_Size() int { + return xxx_messageInfo_Source.Size(m) +} +func (m *Source) XXX_DiscardUnknown() { + xxx_messageInfo_Source.DiscardUnknown(m) +} + +var xxx_messageInfo_Source proto.InternalMessageInfo + +func (m *Source) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *Source) GetDisplayName() string { + if m != nil { + return m.DisplayName + } + return "" +} + +func (m *Source) GetDescription() string { + if m != nil { + return m.Description + } + return "" +} + +func init() { + proto.RegisterType((*Source)(nil), "google.cloud.securitycenter.v1beta1.Source") +} + +func init() { + proto.RegisterFile("google/cloud/securitycenter/v1beta1/source.proto", fileDescriptor_source_8609a031d711b388) +} + +var fileDescriptor_source_8609a031d711b388 = []byte{ + // 217 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x8c, 0x90, 0x3f, 0x4b, 0xc5, 0x30, + 0x14, 0xc5, 0xa9, 0x4a, 0xc1, 0xd4, 0x29, 0x53, 0x11, 0x87, 0xaa, 0x83, 0x4e, 0x89, 0xc5, 0xd1, + 0xcd, 0x0f, 0x20, 0xfe, 0xd9, 0x5c, 0xe4, 0x36, 0xbd, 0x84, 0x40, 0x9a, 0x1b, 0x92, 0x54, 0xe8, + 0xe2, 0x67, 0x97, 0xa6, 0x81, 0x47, 0x97, 0xc7, 0xdb, 0xc2, 0x39, 0xe7, 0xf7, 0xe3, 0x12, 0xf6, + 0xa4, 0x89, 0xb4, 0x45, 0xa9, 0x2c, 0xcd, 0xa3, 0x8c, 0xa8, 0xe6, 0x60, 0xd2, 0xa2, 0xd0, 0x25, + 0x0c, 0xf2, 0xb7, 0x1f, 0x30, 0x41, 0x2f, 0x23, 0xcd, 0x41, 0xa1, 0xf0, 0x81, 0x12, 0xf1, 0xfb, + 0x8d, 0x10, 0x99, 0x10, 0x7b, 0x42, 0x14, 0xe2, 0xfa, 0xa6, 0x68, 0xc1, 0x1b, 0x09, 0xce, 0x51, + 0x82, 0x64, 0xc8, 0xc5, 0x4d, 0x71, 0x07, 0xac, 0xfe, 0xca, 0x4a, 0xce, 0xd9, 0x85, 0x83, 0x09, + 0xdb, 0xaa, 0xab, 0x1e, 0x2f, 0x3f, 0xf3, 0x9b, 0xdf, 0xb2, 0xab, 0xd1, 0x44, 0x6f, 0x61, 0xf9, + 0xc9, 0xdd, 0x59, 0xee, 0x9a, 0x92, 0xbd, 0xad, 0x93, 0x8e, 0x35, 0x23, 0x46, 0x15, 0x8c, 0x5f, + 0xb5, 0xed, 0x79, 0x59, 0x1c, 0xa2, 0xd7, 0x3f, 0xf6, 0xa0, 0x68, 0x12, 0x27, 0xdc, 0xfa, 0x5e, + 0x7d, 0x7f, 0x94, 0x99, 0x26, 0x0b, 0x4e, 0x0b, 0x0a, 0x5a, 0x6a, 0x74, 0xf9, 0x56, 0xb9, 0x55, + 0xe0, 0x4d, 0x3c, 0xfa, 0x47, 0x2f, 0xfb, 0x78, 0xa8, 0x33, 0xfd, 0xfc, 0x1f, 0x00, 0x00, 0xff, + 0xff, 0x52, 0x75, 0xca, 0xb1, 0x60, 0x01, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/cloud/speech/v1/cloud_speech.pb.go b/vendor/google.golang.org/genproto/googleapis/cloud/speech/v1/cloud_speech.pb.go new file mode 100644 index 0000000..2ecdd1b --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/cloud/speech/v1/cloud_speech.pb.go @@ -0,0 +1,1731 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/cloud/speech/v1/cloud_speech.proto + +package speech // import "google.golang.org/genproto/googleapis/cloud/speech/v1" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "github.com/golang/protobuf/ptypes/any" +import duration "github.com/golang/protobuf/ptypes/duration" +import _ "github.com/golang/protobuf/ptypes/empty" +import timestamp "github.com/golang/protobuf/ptypes/timestamp" +import _ "google.golang.org/genproto/googleapis/api/annotations" +import longrunning "google.golang.org/genproto/googleapis/longrunning" +import status "google.golang.org/genproto/googleapis/rpc/status" + +import ( + context "golang.org/x/net/context" + grpc "google.golang.org/grpc" +) + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// The encoding of the audio data sent in the request. +// +// All encodings support only 1 channel (mono) audio. +// +// For best results, the audio source should be captured and transmitted using +// a lossless encoding (`FLAC` or `LINEAR16`). The accuracy of the speech +// recognition can be reduced if lossy codecs are used to capture or transmit +// audio, particularly if background noise is present. Lossy codecs include +// `MULAW`, `AMR`, `AMR_WB`, `OGG_OPUS`, and `SPEEX_WITH_HEADER_BYTE`. +// +// The `FLAC` and `WAV` audio file formats include a header that describes the +// included audio content. You can request recognition for `WAV` files that +// contain either `LINEAR16` or `MULAW` encoded audio. +// If you send `FLAC` or `WAV` audio file format in +// your request, you do not need to specify an `AudioEncoding`; the audio +// encoding format is determined from the file header. If you specify +// an `AudioEncoding` when you send send `FLAC` or `WAV` audio, the +// encoding configuration must match the encoding described in the audio +// header; otherwise the request returns an +// [google.rpc.Code.INVALID_ARGUMENT][google.rpc.Code.INVALID_ARGUMENT] error +// code. +type RecognitionConfig_AudioEncoding int32 + +const ( + // Not specified. + RecognitionConfig_ENCODING_UNSPECIFIED RecognitionConfig_AudioEncoding = 0 + // Uncompressed 16-bit signed little-endian samples (Linear PCM). + RecognitionConfig_LINEAR16 RecognitionConfig_AudioEncoding = 1 + // `FLAC` (Free Lossless Audio + // Codec) is the recommended encoding because it is + // lossless--therefore recognition is not compromised--and + // requires only about half the bandwidth of `LINEAR16`. `FLAC` stream + // encoding supports 16-bit and 24-bit samples, however, not all fields in + // `STREAMINFO` are supported. + RecognitionConfig_FLAC RecognitionConfig_AudioEncoding = 2 + // 8-bit samples that compand 14-bit audio samples using G.711 PCMU/mu-law. + RecognitionConfig_MULAW RecognitionConfig_AudioEncoding = 3 + // Adaptive Multi-Rate Narrowband codec. `sample_rate_hertz` must be 8000. + RecognitionConfig_AMR RecognitionConfig_AudioEncoding = 4 + // Adaptive Multi-Rate Wideband codec. `sample_rate_hertz` must be 16000. + RecognitionConfig_AMR_WB RecognitionConfig_AudioEncoding = 5 + // Opus encoded audio frames in Ogg container + // ([OggOpus](https://wiki.xiph.org/OggOpus)). + // `sample_rate_hertz` must be one of 8000, 12000, 16000, 24000, or 48000. + RecognitionConfig_OGG_OPUS RecognitionConfig_AudioEncoding = 6 + // Although the use of lossy encodings is not recommended, if a very low + // bitrate encoding is required, `OGG_OPUS` is highly preferred over + // Speex encoding. The [Speex](https://speex.org/) encoding supported by + // Cloud Speech API has a header byte in each block, as in MIME type + // `audio/x-speex-with-header-byte`. + // It is a variant of the RTP Speex encoding defined in + // [RFC 5574](https://tools.ietf.org/html/rfc5574). + // The stream is a sequence of blocks, one block per RTP packet. Each block + // starts with a byte containing the length of the block, in bytes, followed + // by one or more frames of Speex data, padded to an integral number of + // bytes (octets) as specified in RFC 5574. In other words, each RTP header + // is replaced with a single byte containing the block length. Only Speex + // wideband is supported. `sample_rate_hertz` must be 16000. + RecognitionConfig_SPEEX_WITH_HEADER_BYTE RecognitionConfig_AudioEncoding = 7 +) + +var RecognitionConfig_AudioEncoding_name = map[int32]string{ + 0: "ENCODING_UNSPECIFIED", + 1: "LINEAR16", + 2: "FLAC", + 3: "MULAW", + 4: "AMR", + 5: "AMR_WB", + 6: "OGG_OPUS", + 7: "SPEEX_WITH_HEADER_BYTE", +} +var RecognitionConfig_AudioEncoding_value = map[string]int32{ + "ENCODING_UNSPECIFIED": 0, + "LINEAR16": 1, + "FLAC": 2, + "MULAW": 3, + "AMR": 4, + "AMR_WB": 5, + "OGG_OPUS": 6, + "SPEEX_WITH_HEADER_BYTE": 7, +} + +func (x RecognitionConfig_AudioEncoding) String() string { + return proto.EnumName(RecognitionConfig_AudioEncoding_name, int32(x)) +} +func (RecognitionConfig_AudioEncoding) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_cloud_speech_10c398bf6dc739ce, []int{4, 0} +} + +// Indicates the type of speech event. +type StreamingRecognizeResponse_SpeechEventType int32 + +const ( + // No speech event specified. + StreamingRecognizeResponse_SPEECH_EVENT_UNSPECIFIED StreamingRecognizeResponse_SpeechEventType = 0 + // This event indicates that the server has detected the end of the user's + // speech utterance and expects no additional speech. Therefore, the server + // will not process additional audio (although it may subsequently return + // additional results). The client should stop sending additional audio + // data, half-close the gRPC connection, and wait for any additional results + // until the server closes the gRPC connection. This event is only sent if + // `single_utterance` was set to `true`, and is not used otherwise. + StreamingRecognizeResponse_END_OF_SINGLE_UTTERANCE StreamingRecognizeResponse_SpeechEventType = 1 +) + +var StreamingRecognizeResponse_SpeechEventType_name = map[int32]string{ + 0: "SPEECH_EVENT_UNSPECIFIED", + 1: "END_OF_SINGLE_UTTERANCE", +} +var StreamingRecognizeResponse_SpeechEventType_value = map[string]int32{ + "SPEECH_EVENT_UNSPECIFIED": 0, + "END_OF_SINGLE_UTTERANCE": 1, +} + +func (x StreamingRecognizeResponse_SpeechEventType) String() string { + return proto.EnumName(StreamingRecognizeResponse_SpeechEventType_name, int32(x)) +} +func (StreamingRecognizeResponse_SpeechEventType) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_cloud_speech_10c398bf6dc739ce, []int{10, 0} +} + +// The top-level message sent by the client for the `Recognize` method. +type RecognizeRequest struct { + // *Required* Provides information to the recognizer that specifies how to + // process the request. + Config *RecognitionConfig `protobuf:"bytes,1,opt,name=config,proto3" json:"config,omitempty"` + // *Required* The audio data to be recognized. + Audio *RecognitionAudio `protobuf:"bytes,2,opt,name=audio,proto3" json:"audio,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *RecognizeRequest) Reset() { *m = RecognizeRequest{} } +func (m *RecognizeRequest) String() string { return proto.CompactTextString(m) } +func (*RecognizeRequest) ProtoMessage() {} +func (*RecognizeRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_cloud_speech_10c398bf6dc739ce, []int{0} +} +func (m *RecognizeRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_RecognizeRequest.Unmarshal(m, b) +} +func (m *RecognizeRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_RecognizeRequest.Marshal(b, m, deterministic) +} +func (dst *RecognizeRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_RecognizeRequest.Merge(dst, src) +} +func (m *RecognizeRequest) XXX_Size() int { + return xxx_messageInfo_RecognizeRequest.Size(m) +} +func (m *RecognizeRequest) XXX_DiscardUnknown() { + xxx_messageInfo_RecognizeRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_RecognizeRequest proto.InternalMessageInfo + +func (m *RecognizeRequest) GetConfig() *RecognitionConfig { + if m != nil { + return m.Config + } + return nil +} + +func (m *RecognizeRequest) GetAudio() *RecognitionAudio { + if m != nil { + return m.Audio + } + return nil +} + +// The top-level message sent by the client for the `LongRunningRecognize` +// method. +type LongRunningRecognizeRequest struct { + // *Required* Provides information to the recognizer that specifies how to + // process the request. + Config *RecognitionConfig `protobuf:"bytes,1,opt,name=config,proto3" json:"config,omitempty"` + // *Required* The audio data to be recognized. + Audio *RecognitionAudio `protobuf:"bytes,2,opt,name=audio,proto3" json:"audio,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *LongRunningRecognizeRequest) Reset() { *m = LongRunningRecognizeRequest{} } +func (m *LongRunningRecognizeRequest) String() string { return proto.CompactTextString(m) } +func (*LongRunningRecognizeRequest) ProtoMessage() {} +func (*LongRunningRecognizeRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_cloud_speech_10c398bf6dc739ce, []int{1} +} +func (m *LongRunningRecognizeRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_LongRunningRecognizeRequest.Unmarshal(m, b) +} +func (m *LongRunningRecognizeRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_LongRunningRecognizeRequest.Marshal(b, m, deterministic) +} +func (dst *LongRunningRecognizeRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_LongRunningRecognizeRequest.Merge(dst, src) +} +func (m *LongRunningRecognizeRequest) XXX_Size() int { + return xxx_messageInfo_LongRunningRecognizeRequest.Size(m) +} +func (m *LongRunningRecognizeRequest) XXX_DiscardUnknown() { + xxx_messageInfo_LongRunningRecognizeRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_LongRunningRecognizeRequest proto.InternalMessageInfo + +func (m *LongRunningRecognizeRequest) GetConfig() *RecognitionConfig { + if m != nil { + return m.Config + } + return nil +} + +func (m *LongRunningRecognizeRequest) GetAudio() *RecognitionAudio { + if m != nil { + return m.Audio + } + return nil +} + +// The top-level message sent by the client for the `StreamingRecognize` method. +// Multiple `StreamingRecognizeRequest` messages are sent. The first message +// must contain a `streaming_config` message and must not contain `audio` data. +// All subsequent messages must contain `audio` data and must not contain a +// `streaming_config` message. +type StreamingRecognizeRequest struct { + // The streaming request, which is either a streaming config or audio content. + // + // Types that are valid to be assigned to StreamingRequest: + // *StreamingRecognizeRequest_StreamingConfig + // *StreamingRecognizeRequest_AudioContent + StreamingRequest isStreamingRecognizeRequest_StreamingRequest `protobuf_oneof:"streaming_request"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *StreamingRecognizeRequest) Reset() { *m = StreamingRecognizeRequest{} } +func (m *StreamingRecognizeRequest) String() string { return proto.CompactTextString(m) } +func (*StreamingRecognizeRequest) ProtoMessage() {} +func (*StreamingRecognizeRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_cloud_speech_10c398bf6dc739ce, []int{2} +} +func (m *StreamingRecognizeRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_StreamingRecognizeRequest.Unmarshal(m, b) +} +func (m *StreamingRecognizeRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_StreamingRecognizeRequest.Marshal(b, m, deterministic) +} +func (dst *StreamingRecognizeRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_StreamingRecognizeRequest.Merge(dst, src) +} +func (m *StreamingRecognizeRequest) XXX_Size() int { + return xxx_messageInfo_StreamingRecognizeRequest.Size(m) +} +func (m *StreamingRecognizeRequest) XXX_DiscardUnknown() { + xxx_messageInfo_StreamingRecognizeRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_StreamingRecognizeRequest proto.InternalMessageInfo + +type isStreamingRecognizeRequest_StreamingRequest interface { + isStreamingRecognizeRequest_StreamingRequest() +} + +type StreamingRecognizeRequest_StreamingConfig struct { + StreamingConfig *StreamingRecognitionConfig `protobuf:"bytes,1,opt,name=streaming_config,json=streamingConfig,proto3,oneof"` +} + +type StreamingRecognizeRequest_AudioContent struct { + AudioContent []byte `protobuf:"bytes,2,opt,name=audio_content,json=audioContent,proto3,oneof"` +} + +func (*StreamingRecognizeRequest_StreamingConfig) isStreamingRecognizeRequest_StreamingRequest() {} + +func (*StreamingRecognizeRequest_AudioContent) isStreamingRecognizeRequest_StreamingRequest() {} + +func (m *StreamingRecognizeRequest) GetStreamingRequest() isStreamingRecognizeRequest_StreamingRequest { + if m != nil { + return m.StreamingRequest + } + return nil +} + +func (m *StreamingRecognizeRequest) GetStreamingConfig() *StreamingRecognitionConfig { + if x, ok := m.GetStreamingRequest().(*StreamingRecognizeRequest_StreamingConfig); ok { + return x.StreamingConfig + } + return nil +} + +func (m *StreamingRecognizeRequest) GetAudioContent() []byte { + if x, ok := m.GetStreamingRequest().(*StreamingRecognizeRequest_AudioContent); ok { + return x.AudioContent + } + return nil +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*StreamingRecognizeRequest) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _StreamingRecognizeRequest_OneofMarshaler, _StreamingRecognizeRequest_OneofUnmarshaler, _StreamingRecognizeRequest_OneofSizer, []interface{}{ + (*StreamingRecognizeRequest_StreamingConfig)(nil), + (*StreamingRecognizeRequest_AudioContent)(nil), + } +} + +func _StreamingRecognizeRequest_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*StreamingRecognizeRequest) + // streaming_request + switch x := m.StreamingRequest.(type) { + case *StreamingRecognizeRequest_StreamingConfig: + b.EncodeVarint(1<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.StreamingConfig); err != nil { + return err + } + case *StreamingRecognizeRequest_AudioContent: + b.EncodeVarint(2<<3 | proto.WireBytes) + b.EncodeRawBytes(x.AudioContent) + case nil: + default: + return fmt.Errorf("StreamingRecognizeRequest.StreamingRequest has unexpected type %T", x) + } + return nil +} + +func _StreamingRecognizeRequest_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*StreamingRecognizeRequest) + switch tag { + case 1: // streaming_request.streaming_config + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(StreamingRecognitionConfig) + err := b.DecodeMessage(msg) + m.StreamingRequest = &StreamingRecognizeRequest_StreamingConfig{msg} + return true, err + case 2: // streaming_request.audio_content + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeRawBytes(true) + m.StreamingRequest = &StreamingRecognizeRequest_AudioContent{x} + return true, err + default: + return false, nil + } +} + +func _StreamingRecognizeRequest_OneofSizer(msg proto.Message) (n int) { + m := msg.(*StreamingRecognizeRequest) + // streaming_request + switch x := m.StreamingRequest.(type) { + case *StreamingRecognizeRequest_StreamingConfig: + s := proto.Size(x.StreamingConfig) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *StreamingRecognizeRequest_AudioContent: + n += 1 // tag and wire + n += proto.SizeVarint(uint64(len(x.AudioContent))) + n += len(x.AudioContent) + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +// Provides information to the recognizer that specifies how to process the +// request. +type StreamingRecognitionConfig struct { + // *Required* Provides information to the recognizer that specifies how to + // process the request. + Config *RecognitionConfig `protobuf:"bytes,1,opt,name=config,proto3" json:"config,omitempty"` + // *Optional* If `false` or omitted, the recognizer will perform continuous + // recognition (continuing to wait for and process audio even if the user + // pauses speaking) until the client closes the input stream (gRPC API) or + // until the maximum time limit has been reached. May return multiple + // `StreamingRecognitionResult`s with the `is_final` flag set to `true`. + // + // If `true`, the recognizer will detect a single spoken utterance. When it + // detects that the user has paused or stopped speaking, it will return an + // `END_OF_SINGLE_UTTERANCE` event and cease recognition. It will return no + // more than one `StreamingRecognitionResult` with the `is_final` flag set to + // `true`. + SingleUtterance bool `protobuf:"varint,2,opt,name=single_utterance,json=singleUtterance,proto3" json:"single_utterance,omitempty"` + // *Optional* If `true`, interim results (tentative hypotheses) may be + // returned as they become available (these interim results are indicated with + // the `is_final=false` flag). + // If `false` or omitted, only `is_final=true` result(s) are returned. + InterimResults bool `protobuf:"varint,3,opt,name=interim_results,json=interimResults,proto3" json:"interim_results,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *StreamingRecognitionConfig) Reset() { *m = StreamingRecognitionConfig{} } +func (m *StreamingRecognitionConfig) String() string { return proto.CompactTextString(m) } +func (*StreamingRecognitionConfig) ProtoMessage() {} +func (*StreamingRecognitionConfig) Descriptor() ([]byte, []int) { + return fileDescriptor_cloud_speech_10c398bf6dc739ce, []int{3} +} +func (m *StreamingRecognitionConfig) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_StreamingRecognitionConfig.Unmarshal(m, b) +} +func (m *StreamingRecognitionConfig) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_StreamingRecognitionConfig.Marshal(b, m, deterministic) +} +func (dst *StreamingRecognitionConfig) XXX_Merge(src proto.Message) { + xxx_messageInfo_StreamingRecognitionConfig.Merge(dst, src) +} +func (m *StreamingRecognitionConfig) XXX_Size() int { + return xxx_messageInfo_StreamingRecognitionConfig.Size(m) +} +func (m *StreamingRecognitionConfig) XXX_DiscardUnknown() { + xxx_messageInfo_StreamingRecognitionConfig.DiscardUnknown(m) +} + +var xxx_messageInfo_StreamingRecognitionConfig proto.InternalMessageInfo + +func (m *StreamingRecognitionConfig) GetConfig() *RecognitionConfig { + if m != nil { + return m.Config + } + return nil +} + +func (m *StreamingRecognitionConfig) GetSingleUtterance() bool { + if m != nil { + return m.SingleUtterance + } + return false +} + +func (m *StreamingRecognitionConfig) GetInterimResults() bool { + if m != nil { + return m.InterimResults + } + return false +} + +// Provides information to the recognizer that specifies how to process the +// request. +type RecognitionConfig struct { + // Encoding of audio data sent in all `RecognitionAudio` messages. + // This field is optional for `FLAC` and `WAV` audio files and required + // for all other audio formats. For details, see + // [AudioEncoding][google.cloud.speech.v1.RecognitionConfig.AudioEncoding]. + Encoding RecognitionConfig_AudioEncoding `protobuf:"varint,1,opt,name=encoding,proto3,enum=google.cloud.speech.v1.RecognitionConfig_AudioEncoding" json:"encoding,omitempty"` + // Sample rate in Hertz of the audio data sent in all + // `RecognitionAudio` messages. Valid values are: 8000-48000. + // 16000 is optimal. For best results, set the sampling rate of the audio + // source to 16000 Hz. If that's not possible, use the native sample rate of + // the audio source (instead of re-sampling). + // This field is optional for `FLAC` and `WAV` audio files and required + // for all other audio formats. For details, see + // [AudioEncoding][google.cloud.speech.v1.RecognitionConfig.AudioEncoding]. + SampleRateHertz int32 `protobuf:"varint,2,opt,name=sample_rate_hertz,json=sampleRateHertz,proto3" json:"sample_rate_hertz,omitempty"` + // *Optional* The number of channels in the input audio data. + // ONLY set this for MULTI-CHANNEL recognition. + // Valid values for LINEAR16 and FLAC are `1`-`8`. + // Valid values for OGG_OPUS are '1'-'254'. + // Valid value for MULAW, AMR, AMR_WB and SPEEX_WITH_HEADER_BYTE is only `1`. + // If `0` or omitted, defaults to one channel (mono). + // Note: We only recognize the first channel by default. + // To perform independent recognition on each channel set + // `enable_separate_recognition_per_channel` to 'true'. + AudioChannelCount int32 `protobuf:"varint,7,opt,name=audio_channel_count,json=audioChannelCount,proto3" json:"audio_channel_count,omitempty"` + // This needs to be set to `true` explicitly and `audio_channel_count` > 1 + // to get each channel recognized separately. The recognition result will + // contain a `channel_tag` field to state which channel that result belongs + // to. If this is not true, we will only recognize the first channel. The + // request is billed cumulatively for all channels recognized: + // `audio_channel_count` multiplied by the length of the audio. + EnableSeparateRecognitionPerChannel bool `protobuf:"varint,12,opt,name=enable_separate_recognition_per_channel,json=enableSeparateRecognitionPerChannel,proto3" json:"enable_separate_recognition_per_channel,omitempty"` + // *Required* The language of the supplied audio as a + // [BCP-47](https://www.rfc-editor.org/rfc/bcp/bcp47.txt) language tag. + // Example: "en-US". + // See [Language Support](/speech-to-text/docs/languages) + // for a list of the currently supported language codes. + LanguageCode string `protobuf:"bytes,3,opt,name=language_code,json=languageCode,proto3" json:"language_code,omitempty"` + // *Optional* Maximum number of recognition hypotheses to be returned. + // Specifically, the maximum number of `SpeechRecognitionAlternative` messages + // within each `SpeechRecognitionResult`. + // The server may return fewer than `max_alternatives`. + // Valid values are `0`-`30`. A value of `0` or `1` will return a maximum of + // one. If omitted, will return a maximum of one. + MaxAlternatives int32 `protobuf:"varint,4,opt,name=max_alternatives,json=maxAlternatives,proto3" json:"max_alternatives,omitempty"` + // *Optional* If set to `true`, the server will attempt to filter out + // profanities, replacing all but the initial character in each filtered word + // with asterisks, e.g. "f***". If set to `false` or omitted, profanities + // won't be filtered out. + ProfanityFilter bool `protobuf:"varint,5,opt,name=profanity_filter,json=profanityFilter,proto3" json:"profanity_filter,omitempty"` + // *Optional* array of [SpeechContext][google.cloud.speech.v1.SpeechContext]. + // A means to provide context to assist the speech recognition. For more + // information, see [Phrase Hints](/speech-to-text/docs/basics#phrase-hints). + SpeechContexts []*SpeechContext `protobuf:"bytes,6,rep,name=speech_contexts,json=speechContexts,proto3" json:"speech_contexts,omitempty"` + // *Optional* If `true`, the top result includes a list of words and + // the start and end time offsets (timestamps) for those words. If + // `false`, no word-level time offset information is returned. The default is + // `false`. + EnableWordTimeOffsets bool `protobuf:"varint,8,opt,name=enable_word_time_offsets,json=enableWordTimeOffsets,proto3" json:"enable_word_time_offsets,omitempty"` + // *Optional* If 'true', adds punctuation to recognition result hypotheses. + // This feature is only available in select languages. Setting this for + // requests in other languages has no effect at all. + // The default 'false' value does not add punctuation to result hypotheses. + // Note: This is currently offered as an experimental service, complimentary + // to all users. In the future this may be exclusively available as a + // premium feature. + EnableAutomaticPunctuation bool `protobuf:"varint,11,opt,name=enable_automatic_punctuation,json=enableAutomaticPunctuation,proto3" json:"enable_automatic_punctuation,omitempty"` + // *Optional* Which model to select for the given request. Select the model + // best suited to your domain to get best results. If a model is not + // explicitly specified, then we auto-select a model based on the parameters + // in the RecognitionConfig. + // + // + // + // + // + // + // + // + // + // + // + // + // + // + // + // + // + // + // + // + // + //
ModelDescription
command_and_searchBest for short queries such as voice commands or voice search.
phone_callBest for audio that originated from a phone call (typically + // recorded at an 8khz sampling rate).
videoBest for audio that originated from from video or includes multiple + // speakers. Ideally the audio is recorded at a 16khz or greater + // sampling rate. This is a premium model that costs more than the + // standard rate.
defaultBest for audio that is not one of the specific audio models. + // For example, long-form audio. Ideally the audio is high-fidelity, + // recorded at a 16khz or greater sampling rate.
+ Model string `protobuf:"bytes,13,opt,name=model,proto3" json:"model,omitempty"` + // *Optional* Set to true to use an enhanced model for speech recognition. + // If `use_enhanced` is set to true and the `model` field is not set, then + // an appropriate enhanced model is chosen if: + // 1. project is eligible for requesting enhanced models + // 2. an enhanced model exists for the audio + // + // If `use_enhanced` is true and an enhanced version of the specified model + // does not exist, then the speech is recognized using the standard version + // of the specified model. + // + // Enhanced speech models require that you opt-in to data logging using + // instructions in the + // [documentation](/speech-to-text/docs/enable-data-logging). If you set + // `use_enhanced` to true and you have not enabled audio logging, then you + // will receive an error. + UseEnhanced bool `protobuf:"varint,14,opt,name=use_enhanced,json=useEnhanced,proto3" json:"use_enhanced,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *RecognitionConfig) Reset() { *m = RecognitionConfig{} } +func (m *RecognitionConfig) String() string { return proto.CompactTextString(m) } +func (*RecognitionConfig) ProtoMessage() {} +func (*RecognitionConfig) Descriptor() ([]byte, []int) { + return fileDescriptor_cloud_speech_10c398bf6dc739ce, []int{4} +} +func (m *RecognitionConfig) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_RecognitionConfig.Unmarshal(m, b) +} +func (m *RecognitionConfig) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_RecognitionConfig.Marshal(b, m, deterministic) +} +func (dst *RecognitionConfig) XXX_Merge(src proto.Message) { + xxx_messageInfo_RecognitionConfig.Merge(dst, src) +} +func (m *RecognitionConfig) XXX_Size() int { + return xxx_messageInfo_RecognitionConfig.Size(m) +} +func (m *RecognitionConfig) XXX_DiscardUnknown() { + xxx_messageInfo_RecognitionConfig.DiscardUnknown(m) +} + +var xxx_messageInfo_RecognitionConfig proto.InternalMessageInfo + +func (m *RecognitionConfig) GetEncoding() RecognitionConfig_AudioEncoding { + if m != nil { + return m.Encoding + } + return RecognitionConfig_ENCODING_UNSPECIFIED +} + +func (m *RecognitionConfig) GetSampleRateHertz() int32 { + if m != nil { + return m.SampleRateHertz + } + return 0 +} + +func (m *RecognitionConfig) GetAudioChannelCount() int32 { + if m != nil { + return m.AudioChannelCount + } + return 0 +} + +func (m *RecognitionConfig) GetEnableSeparateRecognitionPerChannel() bool { + if m != nil { + return m.EnableSeparateRecognitionPerChannel + } + return false +} + +func (m *RecognitionConfig) GetLanguageCode() string { + if m != nil { + return m.LanguageCode + } + return "" +} + +func (m *RecognitionConfig) GetMaxAlternatives() int32 { + if m != nil { + return m.MaxAlternatives + } + return 0 +} + +func (m *RecognitionConfig) GetProfanityFilter() bool { + if m != nil { + return m.ProfanityFilter + } + return false +} + +func (m *RecognitionConfig) GetSpeechContexts() []*SpeechContext { + if m != nil { + return m.SpeechContexts + } + return nil +} + +func (m *RecognitionConfig) GetEnableWordTimeOffsets() bool { + if m != nil { + return m.EnableWordTimeOffsets + } + return false +} + +func (m *RecognitionConfig) GetEnableAutomaticPunctuation() bool { + if m != nil { + return m.EnableAutomaticPunctuation + } + return false +} + +func (m *RecognitionConfig) GetModel() string { + if m != nil { + return m.Model + } + return "" +} + +func (m *RecognitionConfig) GetUseEnhanced() bool { + if m != nil { + return m.UseEnhanced + } + return false +} + +// Provides "hints" to the speech recognizer to favor specific words and phrases +// in the results. +type SpeechContext struct { + // *Optional* A list of strings containing words and phrases "hints" so that + // the speech recognition is more likely to recognize them. This can be used + // to improve the accuracy for specific words and phrases, for example, if + // specific commands are typically spoken by the user. This can also be used + // to add additional words to the vocabulary of the recognizer. See + // [usage limits](/speech-to-text/quotas#content). + Phrases []string `protobuf:"bytes,1,rep,name=phrases,proto3" json:"phrases,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *SpeechContext) Reset() { *m = SpeechContext{} } +func (m *SpeechContext) String() string { return proto.CompactTextString(m) } +func (*SpeechContext) ProtoMessage() {} +func (*SpeechContext) Descriptor() ([]byte, []int) { + return fileDescriptor_cloud_speech_10c398bf6dc739ce, []int{5} +} +func (m *SpeechContext) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_SpeechContext.Unmarshal(m, b) +} +func (m *SpeechContext) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_SpeechContext.Marshal(b, m, deterministic) +} +func (dst *SpeechContext) XXX_Merge(src proto.Message) { + xxx_messageInfo_SpeechContext.Merge(dst, src) +} +func (m *SpeechContext) XXX_Size() int { + return xxx_messageInfo_SpeechContext.Size(m) +} +func (m *SpeechContext) XXX_DiscardUnknown() { + xxx_messageInfo_SpeechContext.DiscardUnknown(m) +} + +var xxx_messageInfo_SpeechContext proto.InternalMessageInfo + +func (m *SpeechContext) GetPhrases() []string { + if m != nil { + return m.Phrases + } + return nil +} + +// Contains audio data in the encoding specified in the `RecognitionConfig`. +// Either `content` or `uri` must be supplied. Supplying both or neither +// returns [google.rpc.Code.INVALID_ARGUMENT][google.rpc.Code.INVALID_ARGUMENT]. +// See [content limits](/speech-to-text/quotas#content). +type RecognitionAudio struct { + // The audio source, which is either inline content or a Google Cloud + // Storage uri. + // + // Types that are valid to be assigned to AudioSource: + // *RecognitionAudio_Content + // *RecognitionAudio_Uri + AudioSource isRecognitionAudio_AudioSource `protobuf_oneof:"audio_source"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *RecognitionAudio) Reset() { *m = RecognitionAudio{} } +func (m *RecognitionAudio) String() string { return proto.CompactTextString(m) } +func (*RecognitionAudio) ProtoMessage() {} +func (*RecognitionAudio) Descriptor() ([]byte, []int) { + return fileDescriptor_cloud_speech_10c398bf6dc739ce, []int{6} +} +func (m *RecognitionAudio) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_RecognitionAudio.Unmarshal(m, b) +} +func (m *RecognitionAudio) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_RecognitionAudio.Marshal(b, m, deterministic) +} +func (dst *RecognitionAudio) XXX_Merge(src proto.Message) { + xxx_messageInfo_RecognitionAudio.Merge(dst, src) +} +func (m *RecognitionAudio) XXX_Size() int { + return xxx_messageInfo_RecognitionAudio.Size(m) +} +func (m *RecognitionAudio) XXX_DiscardUnknown() { + xxx_messageInfo_RecognitionAudio.DiscardUnknown(m) +} + +var xxx_messageInfo_RecognitionAudio proto.InternalMessageInfo + +type isRecognitionAudio_AudioSource interface { + isRecognitionAudio_AudioSource() +} + +type RecognitionAudio_Content struct { + Content []byte `protobuf:"bytes,1,opt,name=content,proto3,oneof"` +} + +type RecognitionAudio_Uri struct { + Uri string `protobuf:"bytes,2,opt,name=uri,proto3,oneof"` +} + +func (*RecognitionAudio_Content) isRecognitionAudio_AudioSource() {} + +func (*RecognitionAudio_Uri) isRecognitionAudio_AudioSource() {} + +func (m *RecognitionAudio) GetAudioSource() isRecognitionAudio_AudioSource { + if m != nil { + return m.AudioSource + } + return nil +} + +func (m *RecognitionAudio) GetContent() []byte { + if x, ok := m.GetAudioSource().(*RecognitionAudio_Content); ok { + return x.Content + } + return nil +} + +func (m *RecognitionAudio) GetUri() string { + if x, ok := m.GetAudioSource().(*RecognitionAudio_Uri); ok { + return x.Uri + } + return "" +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*RecognitionAudio) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _RecognitionAudio_OneofMarshaler, _RecognitionAudio_OneofUnmarshaler, _RecognitionAudio_OneofSizer, []interface{}{ + (*RecognitionAudio_Content)(nil), + (*RecognitionAudio_Uri)(nil), + } +} + +func _RecognitionAudio_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*RecognitionAudio) + // audio_source + switch x := m.AudioSource.(type) { + case *RecognitionAudio_Content: + b.EncodeVarint(1<<3 | proto.WireBytes) + b.EncodeRawBytes(x.Content) + case *RecognitionAudio_Uri: + b.EncodeVarint(2<<3 | proto.WireBytes) + b.EncodeStringBytes(x.Uri) + case nil: + default: + return fmt.Errorf("RecognitionAudio.AudioSource has unexpected type %T", x) + } + return nil +} + +func _RecognitionAudio_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*RecognitionAudio) + switch tag { + case 1: // audio_source.content + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeRawBytes(true) + m.AudioSource = &RecognitionAudio_Content{x} + return true, err + case 2: // audio_source.uri + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeStringBytes() + m.AudioSource = &RecognitionAudio_Uri{x} + return true, err + default: + return false, nil + } +} + +func _RecognitionAudio_OneofSizer(msg proto.Message) (n int) { + m := msg.(*RecognitionAudio) + // audio_source + switch x := m.AudioSource.(type) { + case *RecognitionAudio_Content: + n += 1 // tag and wire + n += proto.SizeVarint(uint64(len(x.Content))) + n += len(x.Content) + case *RecognitionAudio_Uri: + n += 1 // tag and wire + n += proto.SizeVarint(uint64(len(x.Uri))) + n += len(x.Uri) + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +// The only message returned to the client by the `Recognize` method. It +// contains the result as zero or more sequential `SpeechRecognitionResult` +// messages. +type RecognizeResponse struct { + // Output only. Sequential list of transcription results corresponding to + // sequential portions of audio. + Results []*SpeechRecognitionResult `protobuf:"bytes,2,rep,name=results,proto3" json:"results,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *RecognizeResponse) Reset() { *m = RecognizeResponse{} } +func (m *RecognizeResponse) String() string { return proto.CompactTextString(m) } +func (*RecognizeResponse) ProtoMessage() {} +func (*RecognizeResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_cloud_speech_10c398bf6dc739ce, []int{7} +} +func (m *RecognizeResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_RecognizeResponse.Unmarshal(m, b) +} +func (m *RecognizeResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_RecognizeResponse.Marshal(b, m, deterministic) +} +func (dst *RecognizeResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_RecognizeResponse.Merge(dst, src) +} +func (m *RecognizeResponse) XXX_Size() int { + return xxx_messageInfo_RecognizeResponse.Size(m) +} +func (m *RecognizeResponse) XXX_DiscardUnknown() { + xxx_messageInfo_RecognizeResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_RecognizeResponse proto.InternalMessageInfo + +func (m *RecognizeResponse) GetResults() []*SpeechRecognitionResult { + if m != nil { + return m.Results + } + return nil +} + +// The only message returned to the client by the `LongRunningRecognize` method. +// It contains the result as zero or more sequential `SpeechRecognitionResult` +// messages. It is included in the `result.response` field of the `Operation` +// returned by the `GetOperation` call of the `google::longrunning::Operations` +// service. +type LongRunningRecognizeResponse struct { + // Output only. Sequential list of transcription results corresponding to + // sequential portions of audio. + Results []*SpeechRecognitionResult `protobuf:"bytes,2,rep,name=results,proto3" json:"results,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *LongRunningRecognizeResponse) Reset() { *m = LongRunningRecognizeResponse{} } +func (m *LongRunningRecognizeResponse) String() string { return proto.CompactTextString(m) } +func (*LongRunningRecognizeResponse) ProtoMessage() {} +func (*LongRunningRecognizeResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_cloud_speech_10c398bf6dc739ce, []int{8} +} +func (m *LongRunningRecognizeResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_LongRunningRecognizeResponse.Unmarshal(m, b) +} +func (m *LongRunningRecognizeResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_LongRunningRecognizeResponse.Marshal(b, m, deterministic) +} +func (dst *LongRunningRecognizeResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_LongRunningRecognizeResponse.Merge(dst, src) +} +func (m *LongRunningRecognizeResponse) XXX_Size() int { + return xxx_messageInfo_LongRunningRecognizeResponse.Size(m) +} +func (m *LongRunningRecognizeResponse) XXX_DiscardUnknown() { + xxx_messageInfo_LongRunningRecognizeResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_LongRunningRecognizeResponse proto.InternalMessageInfo + +func (m *LongRunningRecognizeResponse) GetResults() []*SpeechRecognitionResult { + if m != nil { + return m.Results + } + return nil +} + +// Describes the progress of a long-running `LongRunningRecognize` call. It is +// included in the `metadata` field of the `Operation` returned by the +// `GetOperation` call of the `google::longrunning::Operations` service. +type LongRunningRecognizeMetadata struct { + // Approximate percentage of audio processed thus far. Guaranteed to be 100 + // when the audio is fully processed and the results are available. + ProgressPercent int32 `protobuf:"varint,1,opt,name=progress_percent,json=progressPercent,proto3" json:"progress_percent,omitempty"` + // Time when the request was received. + StartTime *timestamp.Timestamp `protobuf:"bytes,2,opt,name=start_time,json=startTime,proto3" json:"start_time,omitempty"` + // Time of the most recent processing update. + LastUpdateTime *timestamp.Timestamp `protobuf:"bytes,3,opt,name=last_update_time,json=lastUpdateTime,proto3" json:"last_update_time,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *LongRunningRecognizeMetadata) Reset() { *m = LongRunningRecognizeMetadata{} } +func (m *LongRunningRecognizeMetadata) String() string { return proto.CompactTextString(m) } +func (*LongRunningRecognizeMetadata) ProtoMessage() {} +func (*LongRunningRecognizeMetadata) Descriptor() ([]byte, []int) { + return fileDescriptor_cloud_speech_10c398bf6dc739ce, []int{9} +} +func (m *LongRunningRecognizeMetadata) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_LongRunningRecognizeMetadata.Unmarshal(m, b) +} +func (m *LongRunningRecognizeMetadata) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_LongRunningRecognizeMetadata.Marshal(b, m, deterministic) +} +func (dst *LongRunningRecognizeMetadata) XXX_Merge(src proto.Message) { + xxx_messageInfo_LongRunningRecognizeMetadata.Merge(dst, src) +} +func (m *LongRunningRecognizeMetadata) XXX_Size() int { + return xxx_messageInfo_LongRunningRecognizeMetadata.Size(m) +} +func (m *LongRunningRecognizeMetadata) XXX_DiscardUnknown() { + xxx_messageInfo_LongRunningRecognizeMetadata.DiscardUnknown(m) +} + +var xxx_messageInfo_LongRunningRecognizeMetadata proto.InternalMessageInfo + +func (m *LongRunningRecognizeMetadata) GetProgressPercent() int32 { + if m != nil { + return m.ProgressPercent + } + return 0 +} + +func (m *LongRunningRecognizeMetadata) GetStartTime() *timestamp.Timestamp { + if m != nil { + return m.StartTime + } + return nil +} + +func (m *LongRunningRecognizeMetadata) GetLastUpdateTime() *timestamp.Timestamp { + if m != nil { + return m.LastUpdateTime + } + return nil +} + +// `StreamingRecognizeResponse` is the only message returned to the client by +// `StreamingRecognize`. A series of zero or more `StreamingRecognizeResponse` +// messages are streamed back to the client. If there is no recognizable +// audio, and `single_utterance` is set to false, then no messages are streamed +// back to the client. +// +// Here's an example of a series of ten `StreamingRecognizeResponse`s that might +// be returned while processing audio: +// +// 1. results { alternatives { transcript: "tube" } stability: 0.01 } +// +// 2. results { alternatives { transcript: "to be a" } stability: 0.01 } +// +// 3. results { alternatives { transcript: "to be" } stability: 0.9 } +// results { alternatives { transcript: " or not to be" } stability: 0.01 } +// +// 4. results { alternatives { transcript: "to be or not to be" +// confidence: 0.92 } +// alternatives { transcript: "to bee or not to bee" } +// is_final: true } +// +// 5. results { alternatives { transcript: " that's" } stability: 0.01 } +// +// 6. results { alternatives { transcript: " that is" } stability: 0.9 } +// results { alternatives { transcript: " the question" } stability: 0.01 } +// +// 7. results { alternatives { transcript: " that is the question" +// confidence: 0.98 } +// alternatives { transcript: " that was the question" } +// is_final: true } +// +// Notes: +// +// - Only two of the above responses #4 and #7 contain final results; they are +// indicated by `is_final: true`. Concatenating these together generates the +// full transcript: "to be or not to be that is the question". +// +// - The others contain interim `results`. #3 and #6 contain two interim +// `results`: the first portion has a high stability and is less likely to +// change; the second portion has a low stability and is very likely to +// change. A UI designer might choose to show only high stability `results`. +// +// - The specific `stability` and `confidence` values shown above are only for +// illustrative purposes. Actual values may vary. +// +// - In each response, only one of these fields will be set: +// `error`, +// `speech_event_type`, or +// one or more (repeated) `results`. +type StreamingRecognizeResponse struct { + // Output only. If set, returns a [google.rpc.Status][google.rpc.Status] + // message that specifies the error for the operation. + Error *status.Status `protobuf:"bytes,1,opt,name=error,proto3" json:"error,omitempty"` + // Output only. This repeated list contains zero or more results that + // correspond to consecutive portions of the audio currently being processed. + // It contains zero or one `is_final=true` result (the newly settled portion), + // followed by zero or more `is_final=false` results (the interim results). + Results []*StreamingRecognitionResult `protobuf:"bytes,2,rep,name=results,proto3" json:"results,omitempty"` + // Output only. Indicates the type of speech event. + SpeechEventType StreamingRecognizeResponse_SpeechEventType `protobuf:"varint,4,opt,name=speech_event_type,json=speechEventType,proto3,enum=google.cloud.speech.v1.StreamingRecognizeResponse_SpeechEventType" json:"speech_event_type,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *StreamingRecognizeResponse) Reset() { *m = StreamingRecognizeResponse{} } +func (m *StreamingRecognizeResponse) String() string { return proto.CompactTextString(m) } +func (*StreamingRecognizeResponse) ProtoMessage() {} +func (*StreamingRecognizeResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_cloud_speech_10c398bf6dc739ce, []int{10} +} +func (m *StreamingRecognizeResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_StreamingRecognizeResponse.Unmarshal(m, b) +} +func (m *StreamingRecognizeResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_StreamingRecognizeResponse.Marshal(b, m, deterministic) +} +func (dst *StreamingRecognizeResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_StreamingRecognizeResponse.Merge(dst, src) +} +func (m *StreamingRecognizeResponse) XXX_Size() int { + return xxx_messageInfo_StreamingRecognizeResponse.Size(m) +} +func (m *StreamingRecognizeResponse) XXX_DiscardUnknown() { + xxx_messageInfo_StreamingRecognizeResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_StreamingRecognizeResponse proto.InternalMessageInfo + +func (m *StreamingRecognizeResponse) GetError() *status.Status { + if m != nil { + return m.Error + } + return nil +} + +func (m *StreamingRecognizeResponse) GetResults() []*StreamingRecognitionResult { + if m != nil { + return m.Results + } + return nil +} + +func (m *StreamingRecognizeResponse) GetSpeechEventType() StreamingRecognizeResponse_SpeechEventType { + if m != nil { + return m.SpeechEventType + } + return StreamingRecognizeResponse_SPEECH_EVENT_UNSPECIFIED +} + +// A streaming speech recognition result corresponding to a portion of the audio +// that is currently being processed. +type StreamingRecognitionResult struct { + // Output only. May contain one or more recognition hypotheses (up to the + // maximum specified in `max_alternatives`). + // These alternatives are ordered in terms of accuracy, with the top (first) + // alternative being the most probable, as ranked by the recognizer. + Alternatives []*SpeechRecognitionAlternative `protobuf:"bytes,1,rep,name=alternatives,proto3" json:"alternatives,omitempty"` + // Output only. If `false`, this `StreamingRecognitionResult` represents an + // interim result that may change. If `true`, this is the final time the + // speech service will return this particular `StreamingRecognitionResult`, + // the recognizer will not return any further hypotheses for this portion of + // the transcript and corresponding audio. + IsFinal bool `protobuf:"varint,2,opt,name=is_final,json=isFinal,proto3" json:"is_final,omitempty"` + // Output only. An estimate of the likelihood that the recognizer will not + // change its guess about this interim result. Values range from 0.0 + // (completely unstable) to 1.0 (completely stable). + // This field is only provided for interim results (`is_final=false`). + // The default of 0.0 is a sentinel value indicating `stability` was not set. + Stability float32 `protobuf:"fixed32,3,opt,name=stability,proto3" json:"stability,omitempty"` + // For multi-channel audio, this is the channel number corresponding to the + // recognized result for the audio from that channel. + // For audio_channel_count = N, its output values can range from '1' to 'N'. + ChannelTag int32 `protobuf:"varint,5,opt,name=channel_tag,json=channelTag,proto3" json:"channel_tag,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *StreamingRecognitionResult) Reset() { *m = StreamingRecognitionResult{} } +func (m *StreamingRecognitionResult) String() string { return proto.CompactTextString(m) } +func (*StreamingRecognitionResult) ProtoMessage() {} +func (*StreamingRecognitionResult) Descriptor() ([]byte, []int) { + return fileDescriptor_cloud_speech_10c398bf6dc739ce, []int{11} +} +func (m *StreamingRecognitionResult) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_StreamingRecognitionResult.Unmarshal(m, b) +} +func (m *StreamingRecognitionResult) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_StreamingRecognitionResult.Marshal(b, m, deterministic) +} +func (dst *StreamingRecognitionResult) XXX_Merge(src proto.Message) { + xxx_messageInfo_StreamingRecognitionResult.Merge(dst, src) +} +func (m *StreamingRecognitionResult) XXX_Size() int { + return xxx_messageInfo_StreamingRecognitionResult.Size(m) +} +func (m *StreamingRecognitionResult) XXX_DiscardUnknown() { + xxx_messageInfo_StreamingRecognitionResult.DiscardUnknown(m) +} + +var xxx_messageInfo_StreamingRecognitionResult proto.InternalMessageInfo + +func (m *StreamingRecognitionResult) GetAlternatives() []*SpeechRecognitionAlternative { + if m != nil { + return m.Alternatives + } + return nil +} + +func (m *StreamingRecognitionResult) GetIsFinal() bool { + if m != nil { + return m.IsFinal + } + return false +} + +func (m *StreamingRecognitionResult) GetStability() float32 { + if m != nil { + return m.Stability + } + return 0 +} + +func (m *StreamingRecognitionResult) GetChannelTag() int32 { + if m != nil { + return m.ChannelTag + } + return 0 +} + +// A speech recognition result corresponding to a portion of the audio. +type SpeechRecognitionResult struct { + // Output only. May contain one or more recognition hypotheses (up to the + // maximum specified in `max_alternatives`). + // These alternatives are ordered in terms of accuracy, with the top (first) + // alternative being the most probable, as ranked by the recognizer. + Alternatives []*SpeechRecognitionAlternative `protobuf:"bytes,1,rep,name=alternatives,proto3" json:"alternatives,omitempty"` + // For multi-channel audio, this is the channel number corresponding to the + // recognized result for the audio from that channel. + // For audio_channel_count = N, its output values can range from '1' to 'N'. + ChannelTag int32 `protobuf:"varint,2,opt,name=channel_tag,json=channelTag,proto3" json:"channel_tag,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *SpeechRecognitionResult) Reset() { *m = SpeechRecognitionResult{} } +func (m *SpeechRecognitionResult) String() string { return proto.CompactTextString(m) } +func (*SpeechRecognitionResult) ProtoMessage() {} +func (*SpeechRecognitionResult) Descriptor() ([]byte, []int) { + return fileDescriptor_cloud_speech_10c398bf6dc739ce, []int{12} +} +func (m *SpeechRecognitionResult) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_SpeechRecognitionResult.Unmarshal(m, b) +} +func (m *SpeechRecognitionResult) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_SpeechRecognitionResult.Marshal(b, m, deterministic) +} +func (dst *SpeechRecognitionResult) XXX_Merge(src proto.Message) { + xxx_messageInfo_SpeechRecognitionResult.Merge(dst, src) +} +func (m *SpeechRecognitionResult) XXX_Size() int { + return xxx_messageInfo_SpeechRecognitionResult.Size(m) +} +func (m *SpeechRecognitionResult) XXX_DiscardUnknown() { + xxx_messageInfo_SpeechRecognitionResult.DiscardUnknown(m) +} + +var xxx_messageInfo_SpeechRecognitionResult proto.InternalMessageInfo + +func (m *SpeechRecognitionResult) GetAlternatives() []*SpeechRecognitionAlternative { + if m != nil { + return m.Alternatives + } + return nil +} + +func (m *SpeechRecognitionResult) GetChannelTag() int32 { + if m != nil { + return m.ChannelTag + } + return 0 +} + +// Alternative hypotheses (a.k.a. n-best list). +type SpeechRecognitionAlternative struct { + // Output only. Transcript text representing the words that the user spoke. + Transcript string `protobuf:"bytes,1,opt,name=transcript,proto3" json:"transcript,omitempty"` + // Output only. The confidence estimate between 0.0 and 1.0. A higher number + // indicates an estimated greater likelihood that the recognized words are + // correct. This field is set only for the top alternative of a non-streaming + // result or, of a streaming result where `is_final=true`. + // This field is not guaranteed to be accurate and users should not rely on it + // to be always provided. + // The default of 0.0 is a sentinel value indicating `confidence` was not set. + Confidence float32 `protobuf:"fixed32,2,opt,name=confidence,proto3" json:"confidence,omitempty"` + // Output only. A list of word-specific information for each recognized word. + // Note: When `enable_speaker_diarization` is true, you will see all the words + // from the beginning of the audio. + Words []*WordInfo `protobuf:"bytes,3,rep,name=words,proto3" json:"words,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *SpeechRecognitionAlternative) Reset() { *m = SpeechRecognitionAlternative{} } +func (m *SpeechRecognitionAlternative) String() string { return proto.CompactTextString(m) } +func (*SpeechRecognitionAlternative) ProtoMessage() {} +func (*SpeechRecognitionAlternative) Descriptor() ([]byte, []int) { + return fileDescriptor_cloud_speech_10c398bf6dc739ce, []int{13} +} +func (m *SpeechRecognitionAlternative) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_SpeechRecognitionAlternative.Unmarshal(m, b) +} +func (m *SpeechRecognitionAlternative) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_SpeechRecognitionAlternative.Marshal(b, m, deterministic) +} +func (dst *SpeechRecognitionAlternative) XXX_Merge(src proto.Message) { + xxx_messageInfo_SpeechRecognitionAlternative.Merge(dst, src) +} +func (m *SpeechRecognitionAlternative) XXX_Size() int { + return xxx_messageInfo_SpeechRecognitionAlternative.Size(m) +} +func (m *SpeechRecognitionAlternative) XXX_DiscardUnknown() { + xxx_messageInfo_SpeechRecognitionAlternative.DiscardUnknown(m) +} + +var xxx_messageInfo_SpeechRecognitionAlternative proto.InternalMessageInfo + +func (m *SpeechRecognitionAlternative) GetTranscript() string { + if m != nil { + return m.Transcript + } + return "" +} + +func (m *SpeechRecognitionAlternative) GetConfidence() float32 { + if m != nil { + return m.Confidence + } + return 0 +} + +func (m *SpeechRecognitionAlternative) GetWords() []*WordInfo { + if m != nil { + return m.Words + } + return nil +} + +// Word-specific information for recognized words. +type WordInfo struct { + // Output only. Time offset relative to the beginning of the audio, + // and corresponding to the start of the spoken word. + // This field is only set if `enable_word_time_offsets=true` and only + // in the top hypothesis. + // This is an experimental feature and the accuracy of the time offset can + // vary. + StartTime *duration.Duration `protobuf:"bytes,1,opt,name=start_time,json=startTime,proto3" json:"start_time,omitempty"` + // Output only. Time offset relative to the beginning of the audio, + // and corresponding to the end of the spoken word. + // This field is only set if `enable_word_time_offsets=true` and only + // in the top hypothesis. + // This is an experimental feature and the accuracy of the time offset can + // vary. + EndTime *duration.Duration `protobuf:"bytes,2,opt,name=end_time,json=endTime,proto3" json:"end_time,omitempty"` + // Output only. The word corresponding to this set of information. + Word string `protobuf:"bytes,3,opt,name=word,proto3" json:"word,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *WordInfo) Reset() { *m = WordInfo{} } +func (m *WordInfo) String() string { return proto.CompactTextString(m) } +func (*WordInfo) ProtoMessage() {} +func (*WordInfo) Descriptor() ([]byte, []int) { + return fileDescriptor_cloud_speech_10c398bf6dc739ce, []int{14} +} +func (m *WordInfo) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_WordInfo.Unmarshal(m, b) +} +func (m *WordInfo) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_WordInfo.Marshal(b, m, deterministic) +} +func (dst *WordInfo) XXX_Merge(src proto.Message) { + xxx_messageInfo_WordInfo.Merge(dst, src) +} +func (m *WordInfo) XXX_Size() int { + return xxx_messageInfo_WordInfo.Size(m) +} +func (m *WordInfo) XXX_DiscardUnknown() { + xxx_messageInfo_WordInfo.DiscardUnknown(m) +} + +var xxx_messageInfo_WordInfo proto.InternalMessageInfo + +func (m *WordInfo) GetStartTime() *duration.Duration { + if m != nil { + return m.StartTime + } + return nil +} + +func (m *WordInfo) GetEndTime() *duration.Duration { + if m != nil { + return m.EndTime + } + return nil +} + +func (m *WordInfo) GetWord() string { + if m != nil { + return m.Word + } + return "" +} + +func init() { + proto.RegisterType((*RecognizeRequest)(nil), "google.cloud.speech.v1.RecognizeRequest") + proto.RegisterType((*LongRunningRecognizeRequest)(nil), "google.cloud.speech.v1.LongRunningRecognizeRequest") + proto.RegisterType((*StreamingRecognizeRequest)(nil), "google.cloud.speech.v1.StreamingRecognizeRequest") + proto.RegisterType((*StreamingRecognitionConfig)(nil), "google.cloud.speech.v1.StreamingRecognitionConfig") + proto.RegisterType((*RecognitionConfig)(nil), "google.cloud.speech.v1.RecognitionConfig") + proto.RegisterType((*SpeechContext)(nil), "google.cloud.speech.v1.SpeechContext") + proto.RegisterType((*RecognitionAudio)(nil), "google.cloud.speech.v1.RecognitionAudio") + proto.RegisterType((*RecognizeResponse)(nil), "google.cloud.speech.v1.RecognizeResponse") + proto.RegisterType((*LongRunningRecognizeResponse)(nil), "google.cloud.speech.v1.LongRunningRecognizeResponse") + proto.RegisterType((*LongRunningRecognizeMetadata)(nil), "google.cloud.speech.v1.LongRunningRecognizeMetadata") + proto.RegisterType((*StreamingRecognizeResponse)(nil), "google.cloud.speech.v1.StreamingRecognizeResponse") + proto.RegisterType((*StreamingRecognitionResult)(nil), "google.cloud.speech.v1.StreamingRecognitionResult") + proto.RegisterType((*SpeechRecognitionResult)(nil), "google.cloud.speech.v1.SpeechRecognitionResult") + proto.RegisterType((*SpeechRecognitionAlternative)(nil), "google.cloud.speech.v1.SpeechRecognitionAlternative") + proto.RegisterType((*WordInfo)(nil), "google.cloud.speech.v1.WordInfo") + proto.RegisterEnum("google.cloud.speech.v1.RecognitionConfig_AudioEncoding", RecognitionConfig_AudioEncoding_name, RecognitionConfig_AudioEncoding_value) + proto.RegisterEnum("google.cloud.speech.v1.StreamingRecognizeResponse_SpeechEventType", StreamingRecognizeResponse_SpeechEventType_name, StreamingRecognizeResponse_SpeechEventType_value) +} + +// Reference imports to suppress errors if they are not otherwise used. +var _ context.Context +var _ grpc.ClientConn + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the grpc package it is being compiled against. +const _ = grpc.SupportPackageIsVersion4 + +// SpeechClient is the client API for Speech service. +// +// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://godoc.org/google.golang.org/grpc#ClientConn.NewStream. +type SpeechClient interface { + // Performs synchronous speech recognition: receive results after all audio + // has been sent and processed. + Recognize(ctx context.Context, in *RecognizeRequest, opts ...grpc.CallOption) (*RecognizeResponse, error) + // Performs asynchronous speech recognition: receive results via the + // google.longrunning.Operations interface. Returns either an + // `Operation.error` or an `Operation.response` which contains + // a `LongRunningRecognizeResponse` message. + LongRunningRecognize(ctx context.Context, in *LongRunningRecognizeRequest, opts ...grpc.CallOption) (*longrunning.Operation, error) + // Performs bidirectional streaming speech recognition: receive results while + // sending audio. This method is only available via the gRPC API (not REST). + StreamingRecognize(ctx context.Context, opts ...grpc.CallOption) (Speech_StreamingRecognizeClient, error) +} + +type speechClient struct { + cc *grpc.ClientConn +} + +func NewSpeechClient(cc *grpc.ClientConn) SpeechClient { + return &speechClient{cc} +} + +func (c *speechClient) Recognize(ctx context.Context, in *RecognizeRequest, opts ...grpc.CallOption) (*RecognizeResponse, error) { + out := new(RecognizeResponse) + err := c.cc.Invoke(ctx, "/google.cloud.speech.v1.Speech/Recognize", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *speechClient) LongRunningRecognize(ctx context.Context, in *LongRunningRecognizeRequest, opts ...grpc.CallOption) (*longrunning.Operation, error) { + out := new(longrunning.Operation) + err := c.cc.Invoke(ctx, "/google.cloud.speech.v1.Speech/LongRunningRecognize", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *speechClient) StreamingRecognize(ctx context.Context, opts ...grpc.CallOption) (Speech_StreamingRecognizeClient, error) { + stream, err := c.cc.NewStream(ctx, &_Speech_serviceDesc.Streams[0], "/google.cloud.speech.v1.Speech/StreamingRecognize", opts...) + if err != nil { + return nil, err + } + x := &speechStreamingRecognizeClient{stream} + return x, nil +} + +type Speech_StreamingRecognizeClient interface { + Send(*StreamingRecognizeRequest) error + Recv() (*StreamingRecognizeResponse, error) + grpc.ClientStream +} + +type speechStreamingRecognizeClient struct { + grpc.ClientStream +} + +func (x *speechStreamingRecognizeClient) Send(m *StreamingRecognizeRequest) error { + return x.ClientStream.SendMsg(m) +} + +func (x *speechStreamingRecognizeClient) Recv() (*StreamingRecognizeResponse, error) { + m := new(StreamingRecognizeResponse) + if err := x.ClientStream.RecvMsg(m); err != nil { + return nil, err + } + return m, nil +} + +// SpeechServer is the server API for Speech service. +type SpeechServer interface { + // Performs synchronous speech recognition: receive results after all audio + // has been sent and processed. + Recognize(context.Context, *RecognizeRequest) (*RecognizeResponse, error) + // Performs asynchronous speech recognition: receive results via the + // google.longrunning.Operations interface. Returns either an + // `Operation.error` or an `Operation.response` which contains + // a `LongRunningRecognizeResponse` message. + LongRunningRecognize(context.Context, *LongRunningRecognizeRequest) (*longrunning.Operation, error) + // Performs bidirectional streaming speech recognition: receive results while + // sending audio. This method is only available via the gRPC API (not REST). + StreamingRecognize(Speech_StreamingRecognizeServer) error +} + +func RegisterSpeechServer(s *grpc.Server, srv SpeechServer) { + s.RegisterService(&_Speech_serviceDesc, srv) +} + +func _Speech_Recognize_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(RecognizeRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(SpeechServer).Recognize(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.speech.v1.Speech/Recognize", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(SpeechServer).Recognize(ctx, req.(*RecognizeRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _Speech_LongRunningRecognize_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(LongRunningRecognizeRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(SpeechServer).LongRunningRecognize(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.speech.v1.Speech/LongRunningRecognize", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(SpeechServer).LongRunningRecognize(ctx, req.(*LongRunningRecognizeRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _Speech_StreamingRecognize_Handler(srv interface{}, stream grpc.ServerStream) error { + return srv.(SpeechServer).StreamingRecognize(&speechStreamingRecognizeServer{stream}) +} + +type Speech_StreamingRecognizeServer interface { + Send(*StreamingRecognizeResponse) error + Recv() (*StreamingRecognizeRequest, error) + grpc.ServerStream +} + +type speechStreamingRecognizeServer struct { + grpc.ServerStream +} + +func (x *speechStreamingRecognizeServer) Send(m *StreamingRecognizeResponse) error { + return x.ServerStream.SendMsg(m) +} + +func (x *speechStreamingRecognizeServer) Recv() (*StreamingRecognizeRequest, error) { + m := new(StreamingRecognizeRequest) + if err := x.ServerStream.RecvMsg(m); err != nil { + return nil, err + } + return m, nil +} + +var _Speech_serviceDesc = grpc.ServiceDesc{ + ServiceName: "google.cloud.speech.v1.Speech", + HandlerType: (*SpeechServer)(nil), + Methods: []grpc.MethodDesc{ + { + MethodName: "Recognize", + Handler: _Speech_Recognize_Handler, + }, + { + MethodName: "LongRunningRecognize", + Handler: _Speech_LongRunningRecognize_Handler, + }, + }, + Streams: []grpc.StreamDesc{ + { + StreamName: "StreamingRecognize", + Handler: _Speech_StreamingRecognize_Handler, + ServerStreams: true, + ClientStreams: true, + }, + }, + Metadata: "google/cloud/speech/v1/cloud_speech.proto", +} + +func init() { + proto.RegisterFile("google/cloud/speech/v1/cloud_speech.proto", fileDescriptor_cloud_speech_10c398bf6dc739ce) +} + +var fileDescriptor_cloud_speech_10c398bf6dc739ce = []byte{ + // 1479 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xcc, 0x57, 0x4f, 0x53, 0x1b, 0xc9, + 0x15, 0x67, 0x24, 0x84, 0xe0, 0xf1, 0x4f, 0xb4, 0x89, 0x3d, 0xc8, 0xc4, 0x90, 0x71, 0x5c, 0x06, + 0x1f, 0xa4, 0x80, 0x5d, 0x76, 0xe2, 0xa4, 0x52, 0x11, 0x62, 0x00, 0x55, 0x81, 0x50, 0xb5, 0x44, + 0x70, 0x72, 0xc8, 0x54, 0x33, 0x6a, 0x0d, 0x53, 0x25, 0xf5, 0x4c, 0xa6, 0x7b, 0x88, 0xf1, 0xcd, + 0xb9, 0xa6, 0x2a, 0x97, 0x24, 0x3e, 0xe7, 0x96, 0xca, 0x79, 0x2f, 0xfb, 0x19, 0xf6, 0xb4, 0xb5, + 0xfb, 0x15, 0xf6, 0x43, 0xec, 0x71, 0xab, 0xbb, 0x67, 0x84, 0x24, 0x10, 0xc6, 0x55, 0xde, 0xaa, + 0xbd, 0xa9, 0xdf, 0xfb, 0xbd, 0x37, 0xbf, 0x7e, 0xdd, 0xef, 0xf7, 0x5a, 0xb0, 0xe9, 0x05, 0x81, + 0xd7, 0xa5, 0x65, 0xb7, 0x1b, 0xc4, 0xed, 0x32, 0x0f, 0x29, 0x75, 0xcf, 0xcb, 0x17, 0x5b, 0x7a, + 0xed, 0xe8, 0x75, 0x29, 0x8c, 0x02, 0x11, 0xa0, 0xfb, 0x1a, 0x5a, 0x52, 0xae, 0x52, 0xe2, 0xba, + 0xd8, 0x2a, 0xae, 0x26, 0x29, 0x48, 0xe8, 0x97, 0x09, 0x63, 0x81, 0x20, 0xc2, 0x0f, 0x18, 0xd7, + 0x51, 0xc5, 0xc7, 0x89, 0xb7, 0x1b, 0x30, 0x2f, 0x8a, 0x19, 0xf3, 0x99, 0x57, 0x0e, 0x42, 0x1a, + 0x0d, 0x81, 0x56, 0x12, 0x90, 0x5a, 0x9d, 0xc5, 0x9d, 0x32, 0x61, 0x97, 0x89, 0xeb, 0xd1, 0xa8, + 0xab, 0x1d, 0xeb, 0xd8, 0xc4, 0xff, 0x70, 0xd4, 0x4f, 0x7b, 0xa1, 0x48, 0x83, 0xd7, 0x46, 0x9d, + 0xc2, 0xef, 0x51, 0x2e, 0x48, 0x2f, 0x4c, 0x00, 0x0f, 0x12, 0x40, 0x14, 0xba, 0x65, 0x2e, 0x88, + 0x88, 0x13, 0x46, 0xd6, 0x7f, 0x0c, 0x28, 0x60, 0xea, 0x06, 0x1e, 0xf3, 0xdf, 0x51, 0x4c, 0xff, + 0x1a, 0x53, 0x2e, 0x50, 0x05, 0xa6, 0xdc, 0x80, 0x75, 0x7c, 0xcf, 0x34, 0xd6, 0x8d, 0x8d, 0xd9, + 0xed, 0xcd, 0xd2, 0xcd, 0x25, 0x29, 0x25, 0x91, 0x92, 0x66, 0x55, 0x05, 0xe0, 0x24, 0x10, 0xfd, + 0x1e, 0x72, 0x24, 0x6e, 0xfb, 0x81, 0x99, 0x51, 0x19, 0x36, 0xee, 0x90, 0xa1, 0x22, 0xf1, 0x58, + 0x87, 0x59, 0xff, 0x35, 0xe0, 0xe1, 0x61, 0xc0, 0x3c, 0xac, 0x4b, 0xf9, 0x53, 0xa4, 0xf8, 0xa5, + 0x01, 0x2b, 0x4d, 0x11, 0x51, 0xd2, 0xbb, 0x89, 0xa0, 0x03, 0x05, 0x9e, 0x3a, 0x9d, 0x21, 0xaa, + 0xdb, 0xe3, 0x3e, 0x34, 0x9a, 0xec, 0x8a, 0xf3, 0xc1, 0x04, 0x5e, 0xec, 0x67, 0xd3, 0x26, 0xf4, + 0x04, 0xe6, 0x15, 0x0f, 0x99, 0x5c, 0x50, 0x26, 0xd4, 0x36, 0xe6, 0x0e, 0x26, 0xf0, 0x9c, 0x32, + 0x57, 0xb5, 0x75, 0xe7, 0x1e, 0x2c, 0x5d, 0xf1, 0x88, 0x34, 0x39, 0xeb, 0x0b, 0x03, 0x8a, 0xe3, + 0xbf, 0xf6, 0x39, 0x8a, 0xbb, 0x09, 0x05, 0xee, 0x33, 0xaf, 0x4b, 0x9d, 0x58, 0x08, 0x1a, 0x11, + 0xe6, 0x52, 0x45, 0x70, 0x1a, 0x2f, 0x6a, 0xfb, 0x49, 0x6a, 0x46, 0x4f, 0x61, 0xd1, 0x67, 0x82, + 0x46, 0x7e, 0xcf, 0x89, 0x28, 0x8f, 0xbb, 0x82, 0x9b, 0x59, 0x85, 0x5c, 0x48, 0xcc, 0x58, 0x5b, + 0xad, 0xff, 0x4d, 0xc1, 0xd2, 0x75, 0xb2, 0x4d, 0x98, 0xa6, 0xcc, 0x0d, 0xda, 0x3e, 0xd3, 0x74, + 0x17, 0xb6, 0x5f, 0xdd, 0x99, 0x6e, 0x49, 0x1d, 0xa8, 0x9d, 0x84, 0xe3, 0x7e, 0x22, 0xf4, 0x0c, + 0x96, 0x38, 0xe9, 0x85, 0x5d, 0xea, 0x44, 0x44, 0x50, 0xe7, 0x9c, 0x46, 0xe2, 0x9d, 0xe2, 0x9f, + 0xc3, 0x8b, 0xda, 0x81, 0x89, 0xa0, 0x07, 0xd2, 0x8c, 0x4a, 0x70, 0x2f, 0x39, 0x88, 0x73, 0xc2, + 0x18, 0xed, 0x3a, 0x6e, 0x10, 0x33, 0x61, 0xe6, 0x15, 0x7a, 0x49, 0x1f, 0x86, 0xf6, 0x54, 0xa5, + 0x03, 0xb5, 0xe0, 0x29, 0x65, 0xe4, 0xac, 0x4b, 0x1d, 0x4e, 0x43, 0xa2, 0xf2, 0x47, 0x57, 0xc4, + 0x9c, 0x90, 0x46, 0x69, 0x26, 0x73, 0x4e, 0xd5, 0xe1, 0xb1, 0x86, 0x37, 0x13, 0xf4, 0xc0, 0x2e, + 0x1a, 0x34, 0x4a, 0x52, 0xa3, 0xc7, 0x30, 0xdf, 0x25, 0xcc, 0x8b, 0x89, 0x47, 0x1d, 0x37, 0x68, + 0x53, 0x55, 0xc3, 0x19, 0x3c, 0x97, 0x1a, 0xab, 0x41, 0x9b, 0xca, 0x53, 0xe9, 0x91, 0xb7, 0x0e, + 0xe9, 0x0a, 0x1a, 0x31, 0x22, 0xfc, 0x0b, 0xca, 0xcd, 0x49, 0xbd, 0xab, 0x1e, 0x79, 0x5b, 0x19, + 0x30, 0x4b, 0x68, 0x18, 0x05, 0x1d, 0xc2, 0x7c, 0x71, 0xe9, 0x74, 0x7c, 0xe9, 0x32, 0x73, 0xfa, + 0x00, 0xfb, 0xf6, 0x3d, 0x65, 0x46, 0x75, 0x58, 0xd4, 0x35, 0xd6, 0x57, 0xf1, 0xad, 0xe0, 0xe6, + 0xd4, 0x7a, 0x76, 0x63, 0x76, 0xfb, 0xc9, 0xd8, 0x9b, 0xae, 0x7e, 0x55, 0x35, 0x1a, 0x2f, 0xf0, + 0xc1, 0x25, 0x47, 0xaf, 0xc0, 0x4c, 0x0a, 0xf4, 0xb7, 0x20, 0x6a, 0x3b, 0x52, 0xcb, 0x9c, 0xa0, + 0xd3, 0xe1, 0x54, 0x70, 0x73, 0x5a, 0x51, 0xf8, 0x99, 0xf6, 0x9f, 0x06, 0x51, 0xbb, 0xe5, 0xf7, + 0xe8, 0xb1, 0x76, 0xa2, 0x3f, 0xc0, 0x6a, 0x12, 0x48, 0x62, 0x11, 0xf4, 0x88, 0xf0, 0x5d, 0x27, + 0x8c, 0x99, 0x2b, 0x62, 0xa5, 0xa4, 0xe6, 0xac, 0x0a, 0x2e, 0x6a, 0x4c, 0x25, 0x85, 0x34, 0xae, + 0x10, 0x68, 0x19, 0x72, 0xbd, 0xa0, 0x4d, 0xbb, 0xe6, 0xbc, 0xaa, 0x9e, 0x5e, 0xa0, 0x5f, 0xc0, + 0x5c, 0xcc, 0xa9, 0x43, 0xd9, 0xb9, 0xbc, 0xb0, 0x6d, 0x73, 0x41, 0xe5, 0x99, 0x8d, 0x39, 0xb5, + 0x13, 0x93, 0xf5, 0x0f, 0x03, 0xe6, 0x87, 0x2e, 0x13, 0x32, 0x61, 0xd9, 0xae, 0x57, 0x8f, 0x77, + 0x6b, 0xf5, 0x7d, 0xe7, 0xa4, 0xde, 0x6c, 0xd8, 0xd5, 0xda, 0x5e, 0xcd, 0xde, 0x2d, 0x4c, 0xa0, + 0x39, 0x98, 0x3e, 0xac, 0xd5, 0xed, 0x0a, 0xde, 0x7a, 0x59, 0x30, 0xd0, 0x34, 0x4c, 0xee, 0x1d, + 0x56, 0xaa, 0x85, 0x0c, 0x9a, 0x81, 0xdc, 0xd1, 0xc9, 0x61, 0xe5, 0xb4, 0x90, 0x45, 0x79, 0xc8, + 0x56, 0x8e, 0x70, 0x61, 0x12, 0x01, 0x4c, 0x55, 0x8e, 0xb0, 0x73, 0xba, 0x53, 0xc8, 0xc9, 0xb8, + 0xe3, 0xfd, 0x7d, 0xe7, 0xb8, 0x71, 0xd2, 0x2c, 0x4c, 0xa1, 0x22, 0xdc, 0x6f, 0x36, 0x6c, 0xfb, + 0x8d, 0x73, 0x5a, 0x6b, 0x1d, 0x38, 0x07, 0x76, 0x65, 0xd7, 0xc6, 0xce, 0xce, 0x9f, 0x5a, 0x76, + 0x21, 0x6f, 0x6d, 0xc2, 0xfc, 0x50, 0x89, 0x91, 0x09, 0xf9, 0xf0, 0x3c, 0x22, 0x9c, 0x72, 0xd3, + 0x58, 0xcf, 0x6e, 0xcc, 0xe0, 0x74, 0x69, 0xe1, 0xbe, 0xfe, 0xf7, 0x05, 0x0e, 0x15, 0x21, 0x9f, + 0x8a, 0x8a, 0x91, 0x88, 0x4a, 0x6a, 0x40, 0x08, 0xb2, 0x71, 0xe4, 0xab, 0x5e, 0x98, 0x39, 0x98, + 0xc0, 0x72, 0xb1, 0xb3, 0x00, 0x5a, 0x73, 0x1c, 0x1e, 0xc4, 0x91, 0x4b, 0xad, 0xbf, 0xf4, 0xfb, + 0x54, 0xea, 0x21, 0x0f, 0x03, 0xc6, 0x29, 0xaa, 0x41, 0x3e, 0x6d, 0xef, 0x8c, 0xba, 0x1d, 0xe5, + 0xdb, 0x6f, 0xc7, 0x00, 0x2b, 0x2d, 0x00, 0x38, 0x8d, 0xb7, 0x7c, 0x58, 0xbd, 0x79, 0x36, 0x7c, + 0xfe, 0x4f, 0x7d, 0x65, 0xdc, 0xfc, 0xad, 0x23, 0x2a, 0x48, 0x9b, 0x08, 0x92, 0xf4, 0x89, 0x17, + 0x51, 0xce, 0x65, 0xeb, 0xba, 0x69, 0xd1, 0x72, 0xaa, 0x4f, 0x94, 0xbd, 0xa1, 0xcd, 0xe8, 0x37, + 0x00, 0x5c, 0x90, 0x48, 0xa8, 0x1b, 0x9d, 0x4c, 0x9d, 0x62, 0xca, 0x2c, 0x1d, 0xdd, 0xa5, 0x56, + 0x3a, 0xba, 0xf1, 0x8c, 0x42, 0xcb, 0x35, 0xda, 0x85, 0x42, 0x97, 0x70, 0xe1, 0xc4, 0x61, 0x5b, + 0xea, 0x85, 0x4a, 0x90, 0xfd, 0x68, 0x82, 0x05, 0x19, 0x73, 0xa2, 0x42, 0xa4, 0xd1, 0xfa, 0x26, + 0x73, 0x5d, 0xf6, 0x07, 0xca, 0xb6, 0x01, 0x39, 0x1a, 0x45, 0x41, 0x94, 0xa8, 0x3e, 0x4a, 0x33, + 0x47, 0xa1, 0x5b, 0x6a, 0xaa, 0x47, 0x03, 0xd6, 0x00, 0x74, 0x38, 0x5a, 0xe0, 0x4f, 0x9a, 0x69, + 0x23, 0x35, 0x46, 0x0c, 0x96, 0x12, 0xfd, 0xa0, 0x17, 0x94, 0x09, 0x47, 0x5c, 0x86, 0x54, 0xc9, + 0xd2, 0xc2, 0xf6, 0xce, 0x5d, 0xf3, 0x5e, 0x6d, 0x23, 0x39, 0x53, 0x5b, 0xa6, 0x6a, 0x5d, 0x86, + 0x14, 0x27, 0xe2, 0xd4, 0x37, 0x58, 0x87, 0xb0, 0x38, 0x82, 0x41, 0xab, 0x60, 0xca, 0x66, 0xaa, + 0x1e, 0x38, 0xf6, 0x1f, 0xed, 0x7a, 0x6b, 0xa4, 0x61, 0x1f, 0xc2, 0x03, 0xbb, 0xbe, 0xeb, 0x1c, + 0xef, 0x39, 0xcd, 0x5a, 0x7d, 0xff, 0xd0, 0x76, 0x4e, 0x5a, 0x2d, 0x1b, 0x57, 0xea, 0x55, 0xbb, + 0x60, 0x58, 0x5f, 0x8f, 0x99, 0xa5, 0x7a, 0x97, 0xe8, 0x0d, 0xcc, 0x0d, 0xc9, 0xad, 0xa1, 0xea, + 0xf5, 0xe2, 0xce, 0x17, 0x72, 0x40, 0x94, 0xf1, 0x50, 0x26, 0xb4, 0x02, 0xd3, 0x3e, 0x77, 0x3a, + 0x3e, 0x23, 0xdd, 0x64, 0xb4, 0xe6, 0x7d, 0xbe, 0x27, 0x97, 0x68, 0x15, 0xe4, 0xdd, 0x39, 0xf3, + 0xbb, 0xbe, 0xb8, 0x54, 0xf7, 0x24, 0x83, 0xaf, 0x0c, 0x68, 0x0d, 0x66, 0xd3, 0x51, 0x25, 0x88, + 0xa7, 0x54, 0x3d, 0x87, 0x21, 0x31, 0xb5, 0x88, 0x67, 0xfd, 0xdb, 0x80, 0x07, 0x63, 0x3a, 0xe3, + 0x47, 0xdc, 0xcf, 0x08, 0xad, 0xcc, 0x35, 0x5a, 0x1f, 0x0c, 0x58, 0xbd, 0x2d, 0x1f, 0x7a, 0x04, + 0x20, 0x22, 0xc2, 0xb8, 0x1b, 0xf9, 0xa1, 0xee, 0xc2, 0x19, 0x3c, 0x60, 0x91, 0x7e, 0xf5, 0x3c, + 0x69, 0xd3, 0xf4, 0x39, 0x92, 0xc1, 0x03, 0x16, 0xf4, 0x12, 0x72, 0x72, 0xe2, 0xc8, 0xf7, 0x87, + 0xdc, 0xd4, 0xfa, 0xb8, 0x4d, 0xc9, 0xb9, 0x53, 0x63, 0x9d, 0x00, 0x6b, 0xb8, 0xf5, 0x4f, 0x03, + 0xa6, 0x53, 0x1b, 0xfa, 0xf5, 0x50, 0x97, 0xeb, 0x56, 0x5a, 0xb9, 0xd6, 0xa4, 0xbb, 0xc9, 0xeb, + 0x7e, 0xb0, 0xc9, 0x5f, 0xc8, 0x97, 0x4c, 0x7b, 0x50, 0x1d, 0x6e, 0x89, 0xcb, 0x53, 0xa6, 0x66, + 0x1f, 0x42, 0x30, 0x29, 0x59, 0x24, 0xf3, 0x5e, 0xfd, 0xde, 0xfe, 0x7f, 0x16, 0xa6, 0x74, 0xa5, + 0xd0, 0x7b, 0x03, 0x66, 0xfa, 0x3d, 0x82, 0x3e, 0xf6, 0xc8, 0xed, 0xbf, 0x5f, 0x8b, 0x9b, 0x77, + 0x40, 0xea, 0x86, 0xb3, 0xd6, 0xfe, 0xfe, 0xed, 0x77, 0xff, 0xca, 0xac, 0x58, 0xcb, 0xf2, 0x0f, + 0x95, 0x06, 0xbe, 0x8e, 0x52, 0xd4, 0x6b, 0xe3, 0x19, 0xfa, 0x60, 0xc0, 0xf2, 0x4d, 0x22, 0x8a, + 0x9e, 0x8f, 0xfb, 0xc8, 0x2d, 0x4f, 0xff, 0xe2, 0xcf, 0xd3, 0xa0, 0x81, 0xbf, 0x5a, 0xa5, 0xe3, + 0xf4, 0xaf, 0x96, 0xf5, 0x4c, 0xb1, 0xf9, 0xa5, 0xb5, 0x36, 0xc0, 0x66, 0x00, 0x39, 0x44, 0xec, + 0xbd, 0x01, 0xe8, 0xba, 0x92, 0xa0, 0xad, 0x4f, 0x51, 0x1d, 0x4d, 0x6a, 0xfb, 0xd3, 0x85, 0xca, + 0x9a, 0xd8, 0x30, 0x7e, 0x65, 0xec, 0x74, 0xa1, 0xe8, 0x06, 0xbd, 0x31, 0xe1, 0x3b, 0xb3, 0xfa, + 0x18, 0x1b, 0xf2, 0xfc, 0x1b, 0xc6, 0x9f, 0x7f, 0x97, 0xc0, 0xbc, 0x40, 0xbe, 0xeb, 0x4a, 0x41, + 0xe4, 0x95, 0x3d, 0xca, 0xd4, 0xed, 0x28, 0x6b, 0x17, 0x09, 0x7d, 0x3e, 0xfa, 0x2f, 0xf7, 0xb7, + 0xfa, 0xd7, 0xf7, 0x86, 0x71, 0x36, 0xa5, 0xb0, 0xcf, 0x7f, 0x08, 0x00, 0x00, 0xff, 0xff, 0x37, + 0x96, 0x14, 0x87, 0x10, 0x0f, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/cloud/speech/v1p1beta1/cloud_speech.pb.go b/vendor/google.golang.org/genproto/googleapis/cloud/speech/v1p1beta1/cloud_speech.pb.go new file mode 100644 index 0000000..3b7bd60 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/cloud/speech/v1p1beta1/cloud_speech.pb.go @@ -0,0 +1,2208 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/cloud/speech/v1p1beta1/cloud_speech.proto + +package speech // import "google.golang.org/genproto/googleapis/cloud/speech/v1p1beta1" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "github.com/golang/protobuf/ptypes/any" +import duration "github.com/golang/protobuf/ptypes/duration" +import _ "github.com/golang/protobuf/ptypes/empty" +import timestamp "github.com/golang/protobuf/ptypes/timestamp" +import _ "google.golang.org/genproto/googleapis/api/annotations" +import longrunning "google.golang.org/genproto/googleapis/longrunning" +import status "google.golang.org/genproto/googleapis/rpc/status" + +import ( + context "golang.org/x/net/context" + grpc "google.golang.org/grpc" +) + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// The encoding of the audio data sent in the request. +// +// All encodings support only 1 channel (mono) audio. +// +// For best results, the audio source should be captured and transmitted using +// a lossless encoding (`FLAC` or `LINEAR16`). The accuracy of the speech +// recognition can be reduced if lossy codecs are used to capture or transmit +// audio, particularly if background noise is present. Lossy codecs include +// `MULAW`, `AMR`, `AMR_WB`, `OGG_OPUS`, and `SPEEX_WITH_HEADER_BYTE`. +// +// The `FLAC` and `WAV` audio file formats include a header that describes the +// included audio content. You can request recognition for `WAV` files that +// contain either `LINEAR16` or `MULAW` encoded audio. +// If you send `FLAC` or `WAV` audio file format in +// your request, you do not need to specify an `AudioEncoding`; the audio +// encoding format is determined from the file header. If you specify +// an `AudioEncoding` when you send send `FLAC` or `WAV` audio, the +// encoding configuration must match the encoding described in the audio +// header; otherwise the request returns an +// [google.rpc.Code.INVALID_ARGUMENT][google.rpc.Code.INVALID_ARGUMENT] error +// code. +type RecognitionConfig_AudioEncoding int32 + +const ( + // Not specified. + RecognitionConfig_ENCODING_UNSPECIFIED RecognitionConfig_AudioEncoding = 0 + // Uncompressed 16-bit signed little-endian samples (Linear PCM). + RecognitionConfig_LINEAR16 RecognitionConfig_AudioEncoding = 1 + // `FLAC` (Free Lossless Audio + // Codec) is the recommended encoding because it is + // lossless--therefore recognition is not compromised--and + // requires only about half the bandwidth of `LINEAR16`. `FLAC` stream + // encoding supports 16-bit and 24-bit samples, however, not all fields in + // `STREAMINFO` are supported. + RecognitionConfig_FLAC RecognitionConfig_AudioEncoding = 2 + // 8-bit samples that compand 14-bit audio samples using G.711 PCMU/mu-law. + RecognitionConfig_MULAW RecognitionConfig_AudioEncoding = 3 + // Adaptive Multi-Rate Narrowband codec. `sample_rate_hertz` must be 8000. + RecognitionConfig_AMR RecognitionConfig_AudioEncoding = 4 + // Adaptive Multi-Rate Wideband codec. `sample_rate_hertz` must be 16000. + RecognitionConfig_AMR_WB RecognitionConfig_AudioEncoding = 5 + // Opus encoded audio frames in Ogg container + // ([OggOpus](https://wiki.xiph.org/OggOpus)). + // `sample_rate_hertz` must be one of 8000, 12000, 16000, 24000, or 48000. + RecognitionConfig_OGG_OPUS RecognitionConfig_AudioEncoding = 6 + // Although the use of lossy encodings is not recommended, if a very low + // bitrate encoding is required, `OGG_OPUS` is highly preferred over + // Speex encoding. The [Speex](https://speex.org/) encoding supported by + // Cloud Speech API has a header byte in each block, as in MIME type + // `audio/x-speex-with-header-byte`. + // It is a variant of the RTP Speex encoding defined in + // [RFC 5574](https://tools.ietf.org/html/rfc5574). + // The stream is a sequence of blocks, one block per RTP packet. Each block + // starts with a byte containing the length of the block, in bytes, followed + // by one or more frames of Speex data, padded to an integral number of + // bytes (octets) as specified in RFC 5574. In other words, each RTP header + // is replaced with a single byte containing the block length. Only Speex + // wideband is supported. `sample_rate_hertz` must be 16000. + RecognitionConfig_SPEEX_WITH_HEADER_BYTE RecognitionConfig_AudioEncoding = 7 +) + +var RecognitionConfig_AudioEncoding_name = map[int32]string{ + 0: "ENCODING_UNSPECIFIED", + 1: "LINEAR16", + 2: "FLAC", + 3: "MULAW", + 4: "AMR", + 5: "AMR_WB", + 6: "OGG_OPUS", + 7: "SPEEX_WITH_HEADER_BYTE", +} +var RecognitionConfig_AudioEncoding_value = map[string]int32{ + "ENCODING_UNSPECIFIED": 0, + "LINEAR16": 1, + "FLAC": 2, + "MULAW": 3, + "AMR": 4, + "AMR_WB": 5, + "OGG_OPUS": 6, + "SPEEX_WITH_HEADER_BYTE": 7, +} + +func (x RecognitionConfig_AudioEncoding) String() string { + return proto.EnumName(RecognitionConfig_AudioEncoding_name, int32(x)) +} +func (RecognitionConfig_AudioEncoding) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_cloud_speech_71c4a621c1d5e720, []int{4, 0} +} + +// Use case categories that the audio recognition request can be described +// by. +type RecognitionMetadata_InteractionType int32 + +const ( + // Use case is either unknown or is something other than one of the other + // values below. + RecognitionMetadata_INTERACTION_TYPE_UNSPECIFIED RecognitionMetadata_InteractionType = 0 + // Multiple people in a conversation or discussion. For example in a + // meeting with two or more people actively participating. Typically + // all the primary people speaking would be in the same room (if not, + // see PHONE_CALL) + RecognitionMetadata_DISCUSSION RecognitionMetadata_InteractionType = 1 + // One or more persons lecturing or presenting to others, mostly + // uninterrupted. + RecognitionMetadata_PRESENTATION RecognitionMetadata_InteractionType = 2 + // A phone-call or video-conference in which two or more people, who are + // not in the same room, are actively participating. + RecognitionMetadata_PHONE_CALL RecognitionMetadata_InteractionType = 3 + // A recorded message intended for another person to listen to. + RecognitionMetadata_VOICEMAIL RecognitionMetadata_InteractionType = 4 + // Professionally produced audio (eg. TV Show, Podcast). + RecognitionMetadata_PROFESSIONALLY_PRODUCED RecognitionMetadata_InteractionType = 5 + // Transcribe spoken questions and queries into text. + RecognitionMetadata_VOICE_SEARCH RecognitionMetadata_InteractionType = 6 + // Transcribe voice commands, such as for controlling a device. + RecognitionMetadata_VOICE_COMMAND RecognitionMetadata_InteractionType = 7 + // Transcribe speech to text to create a written document, such as a + // text-message, email or report. + RecognitionMetadata_DICTATION RecognitionMetadata_InteractionType = 8 +) + +var RecognitionMetadata_InteractionType_name = map[int32]string{ + 0: "INTERACTION_TYPE_UNSPECIFIED", + 1: "DISCUSSION", + 2: "PRESENTATION", + 3: "PHONE_CALL", + 4: "VOICEMAIL", + 5: "PROFESSIONALLY_PRODUCED", + 6: "VOICE_SEARCH", + 7: "VOICE_COMMAND", + 8: "DICTATION", +} +var RecognitionMetadata_InteractionType_value = map[string]int32{ + "INTERACTION_TYPE_UNSPECIFIED": 0, + "DISCUSSION": 1, + "PRESENTATION": 2, + "PHONE_CALL": 3, + "VOICEMAIL": 4, + "PROFESSIONALLY_PRODUCED": 5, + "VOICE_SEARCH": 6, + "VOICE_COMMAND": 7, + "DICTATION": 8, +} + +func (x RecognitionMetadata_InteractionType) String() string { + return proto.EnumName(RecognitionMetadata_InteractionType_name, int32(x)) +} +func (RecognitionMetadata_InteractionType) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_cloud_speech_71c4a621c1d5e720, []int{5, 0} +} + +// Enumerates the types of capture settings describing an audio file. +type RecognitionMetadata_MicrophoneDistance int32 + +const ( + // Audio type is not known. + RecognitionMetadata_MICROPHONE_DISTANCE_UNSPECIFIED RecognitionMetadata_MicrophoneDistance = 0 + // The audio was captured from a closely placed microphone. Eg. phone, + // dictaphone, or handheld microphone. Generally if there speaker is within + // 1 meter of the microphone. + RecognitionMetadata_NEARFIELD RecognitionMetadata_MicrophoneDistance = 1 + // The speaker if within 3 meters of the microphone. + RecognitionMetadata_MIDFIELD RecognitionMetadata_MicrophoneDistance = 2 + // The speaker is more than 3 meters away from the microphone. + RecognitionMetadata_FARFIELD RecognitionMetadata_MicrophoneDistance = 3 +) + +var RecognitionMetadata_MicrophoneDistance_name = map[int32]string{ + 0: "MICROPHONE_DISTANCE_UNSPECIFIED", + 1: "NEARFIELD", + 2: "MIDFIELD", + 3: "FARFIELD", +} +var RecognitionMetadata_MicrophoneDistance_value = map[string]int32{ + "MICROPHONE_DISTANCE_UNSPECIFIED": 0, + "NEARFIELD": 1, + "MIDFIELD": 2, + "FARFIELD": 3, +} + +func (x RecognitionMetadata_MicrophoneDistance) String() string { + return proto.EnumName(RecognitionMetadata_MicrophoneDistance_name, int32(x)) +} +func (RecognitionMetadata_MicrophoneDistance) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_cloud_speech_71c4a621c1d5e720, []int{5, 1} +} + +// The original media the speech was recorded on. +type RecognitionMetadata_OriginalMediaType int32 + +const ( + // Unknown original media type. + RecognitionMetadata_ORIGINAL_MEDIA_TYPE_UNSPECIFIED RecognitionMetadata_OriginalMediaType = 0 + // The speech data is an audio recording. + RecognitionMetadata_AUDIO RecognitionMetadata_OriginalMediaType = 1 + // The speech data originally recorded on a video. + RecognitionMetadata_VIDEO RecognitionMetadata_OriginalMediaType = 2 +) + +var RecognitionMetadata_OriginalMediaType_name = map[int32]string{ + 0: "ORIGINAL_MEDIA_TYPE_UNSPECIFIED", + 1: "AUDIO", + 2: "VIDEO", +} +var RecognitionMetadata_OriginalMediaType_value = map[string]int32{ + "ORIGINAL_MEDIA_TYPE_UNSPECIFIED": 0, + "AUDIO": 1, + "VIDEO": 2, +} + +func (x RecognitionMetadata_OriginalMediaType) String() string { + return proto.EnumName(RecognitionMetadata_OriginalMediaType_name, int32(x)) +} +func (RecognitionMetadata_OriginalMediaType) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_cloud_speech_71c4a621c1d5e720, []int{5, 2} +} + +// The type of device the speech was recorded with. +type RecognitionMetadata_RecordingDeviceType int32 + +const ( + // The recording device is unknown. + RecognitionMetadata_RECORDING_DEVICE_TYPE_UNSPECIFIED RecognitionMetadata_RecordingDeviceType = 0 + // Speech was recorded on a smartphone. + RecognitionMetadata_SMARTPHONE RecognitionMetadata_RecordingDeviceType = 1 + // Speech was recorded using a personal computer or tablet. + RecognitionMetadata_PC RecognitionMetadata_RecordingDeviceType = 2 + // Speech was recorded over a phone line. + RecognitionMetadata_PHONE_LINE RecognitionMetadata_RecordingDeviceType = 3 + // Speech was recorded in a vehicle. + RecognitionMetadata_VEHICLE RecognitionMetadata_RecordingDeviceType = 4 + // Speech was recorded outdoors. + RecognitionMetadata_OTHER_OUTDOOR_DEVICE RecognitionMetadata_RecordingDeviceType = 5 + // Speech was recorded indoors. + RecognitionMetadata_OTHER_INDOOR_DEVICE RecognitionMetadata_RecordingDeviceType = 6 +) + +var RecognitionMetadata_RecordingDeviceType_name = map[int32]string{ + 0: "RECORDING_DEVICE_TYPE_UNSPECIFIED", + 1: "SMARTPHONE", + 2: "PC", + 3: "PHONE_LINE", + 4: "VEHICLE", + 5: "OTHER_OUTDOOR_DEVICE", + 6: "OTHER_INDOOR_DEVICE", +} +var RecognitionMetadata_RecordingDeviceType_value = map[string]int32{ + "RECORDING_DEVICE_TYPE_UNSPECIFIED": 0, + "SMARTPHONE": 1, + "PC": 2, + "PHONE_LINE": 3, + "VEHICLE": 4, + "OTHER_OUTDOOR_DEVICE": 5, + "OTHER_INDOOR_DEVICE": 6, +} + +func (x RecognitionMetadata_RecordingDeviceType) String() string { + return proto.EnumName(RecognitionMetadata_RecordingDeviceType_name, int32(x)) +} +func (RecognitionMetadata_RecordingDeviceType) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_cloud_speech_71c4a621c1d5e720, []int{5, 3} +} + +// Indicates the type of speech event. +type StreamingRecognizeResponse_SpeechEventType int32 + +const ( + // No speech event specified. + StreamingRecognizeResponse_SPEECH_EVENT_UNSPECIFIED StreamingRecognizeResponse_SpeechEventType = 0 + // This event indicates that the server has detected the end of the user's + // speech utterance and expects no additional speech. Therefore, the server + // will not process additional audio (although it may subsequently return + // additional results). The client should stop sending additional audio + // data, half-close the gRPC connection, and wait for any additional results + // until the server closes the gRPC connection. This event is only sent if + // `single_utterance` was set to `true`, and is not used otherwise. + StreamingRecognizeResponse_END_OF_SINGLE_UTTERANCE StreamingRecognizeResponse_SpeechEventType = 1 +) + +var StreamingRecognizeResponse_SpeechEventType_name = map[int32]string{ + 0: "SPEECH_EVENT_UNSPECIFIED", + 1: "END_OF_SINGLE_UTTERANCE", +} +var StreamingRecognizeResponse_SpeechEventType_value = map[string]int32{ + "SPEECH_EVENT_UNSPECIFIED": 0, + "END_OF_SINGLE_UTTERANCE": 1, +} + +func (x StreamingRecognizeResponse_SpeechEventType) String() string { + return proto.EnumName(StreamingRecognizeResponse_SpeechEventType_name, int32(x)) +} +func (StreamingRecognizeResponse_SpeechEventType) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_cloud_speech_71c4a621c1d5e720, []int{11, 0} +} + +// The top-level message sent by the client for the `Recognize` method. +type RecognizeRequest struct { + // *Required* Provides information to the recognizer that specifies how to + // process the request. + Config *RecognitionConfig `protobuf:"bytes,1,opt,name=config,proto3" json:"config,omitempty"` + // *Required* The audio data to be recognized. + Audio *RecognitionAudio `protobuf:"bytes,2,opt,name=audio,proto3" json:"audio,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *RecognizeRequest) Reset() { *m = RecognizeRequest{} } +func (m *RecognizeRequest) String() string { return proto.CompactTextString(m) } +func (*RecognizeRequest) ProtoMessage() {} +func (*RecognizeRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_cloud_speech_71c4a621c1d5e720, []int{0} +} +func (m *RecognizeRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_RecognizeRequest.Unmarshal(m, b) +} +func (m *RecognizeRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_RecognizeRequest.Marshal(b, m, deterministic) +} +func (dst *RecognizeRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_RecognizeRequest.Merge(dst, src) +} +func (m *RecognizeRequest) XXX_Size() int { + return xxx_messageInfo_RecognizeRequest.Size(m) +} +func (m *RecognizeRequest) XXX_DiscardUnknown() { + xxx_messageInfo_RecognizeRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_RecognizeRequest proto.InternalMessageInfo + +func (m *RecognizeRequest) GetConfig() *RecognitionConfig { + if m != nil { + return m.Config + } + return nil +} + +func (m *RecognizeRequest) GetAudio() *RecognitionAudio { + if m != nil { + return m.Audio + } + return nil +} + +// The top-level message sent by the client for the `LongRunningRecognize` +// method. +type LongRunningRecognizeRequest struct { + // *Required* Provides information to the recognizer that specifies how to + // process the request. + Config *RecognitionConfig `protobuf:"bytes,1,opt,name=config,proto3" json:"config,omitempty"` + // *Required* The audio data to be recognized. + Audio *RecognitionAudio `protobuf:"bytes,2,opt,name=audio,proto3" json:"audio,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *LongRunningRecognizeRequest) Reset() { *m = LongRunningRecognizeRequest{} } +func (m *LongRunningRecognizeRequest) String() string { return proto.CompactTextString(m) } +func (*LongRunningRecognizeRequest) ProtoMessage() {} +func (*LongRunningRecognizeRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_cloud_speech_71c4a621c1d5e720, []int{1} +} +func (m *LongRunningRecognizeRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_LongRunningRecognizeRequest.Unmarshal(m, b) +} +func (m *LongRunningRecognizeRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_LongRunningRecognizeRequest.Marshal(b, m, deterministic) +} +func (dst *LongRunningRecognizeRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_LongRunningRecognizeRequest.Merge(dst, src) +} +func (m *LongRunningRecognizeRequest) XXX_Size() int { + return xxx_messageInfo_LongRunningRecognizeRequest.Size(m) +} +func (m *LongRunningRecognizeRequest) XXX_DiscardUnknown() { + xxx_messageInfo_LongRunningRecognizeRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_LongRunningRecognizeRequest proto.InternalMessageInfo + +func (m *LongRunningRecognizeRequest) GetConfig() *RecognitionConfig { + if m != nil { + return m.Config + } + return nil +} + +func (m *LongRunningRecognizeRequest) GetAudio() *RecognitionAudio { + if m != nil { + return m.Audio + } + return nil +} + +// The top-level message sent by the client for the `StreamingRecognize` method. +// Multiple `StreamingRecognizeRequest` messages are sent. The first message +// must contain a `streaming_config` message and must not contain `audio` data. +// All subsequent messages must contain `audio` data and must not contain a +// `streaming_config` message. +type StreamingRecognizeRequest struct { + // The streaming request, which is either a streaming config or audio content. + // + // Types that are valid to be assigned to StreamingRequest: + // *StreamingRecognizeRequest_StreamingConfig + // *StreamingRecognizeRequest_AudioContent + StreamingRequest isStreamingRecognizeRequest_StreamingRequest `protobuf_oneof:"streaming_request"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *StreamingRecognizeRequest) Reset() { *m = StreamingRecognizeRequest{} } +func (m *StreamingRecognizeRequest) String() string { return proto.CompactTextString(m) } +func (*StreamingRecognizeRequest) ProtoMessage() {} +func (*StreamingRecognizeRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_cloud_speech_71c4a621c1d5e720, []int{2} +} +func (m *StreamingRecognizeRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_StreamingRecognizeRequest.Unmarshal(m, b) +} +func (m *StreamingRecognizeRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_StreamingRecognizeRequest.Marshal(b, m, deterministic) +} +func (dst *StreamingRecognizeRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_StreamingRecognizeRequest.Merge(dst, src) +} +func (m *StreamingRecognizeRequest) XXX_Size() int { + return xxx_messageInfo_StreamingRecognizeRequest.Size(m) +} +func (m *StreamingRecognizeRequest) XXX_DiscardUnknown() { + xxx_messageInfo_StreamingRecognizeRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_StreamingRecognizeRequest proto.InternalMessageInfo + +type isStreamingRecognizeRequest_StreamingRequest interface { + isStreamingRecognizeRequest_StreamingRequest() +} + +type StreamingRecognizeRequest_StreamingConfig struct { + StreamingConfig *StreamingRecognitionConfig `protobuf:"bytes,1,opt,name=streaming_config,json=streamingConfig,proto3,oneof"` +} + +type StreamingRecognizeRequest_AudioContent struct { + AudioContent []byte `protobuf:"bytes,2,opt,name=audio_content,json=audioContent,proto3,oneof"` +} + +func (*StreamingRecognizeRequest_StreamingConfig) isStreamingRecognizeRequest_StreamingRequest() {} + +func (*StreamingRecognizeRequest_AudioContent) isStreamingRecognizeRequest_StreamingRequest() {} + +func (m *StreamingRecognizeRequest) GetStreamingRequest() isStreamingRecognizeRequest_StreamingRequest { + if m != nil { + return m.StreamingRequest + } + return nil +} + +func (m *StreamingRecognizeRequest) GetStreamingConfig() *StreamingRecognitionConfig { + if x, ok := m.GetStreamingRequest().(*StreamingRecognizeRequest_StreamingConfig); ok { + return x.StreamingConfig + } + return nil +} + +func (m *StreamingRecognizeRequest) GetAudioContent() []byte { + if x, ok := m.GetStreamingRequest().(*StreamingRecognizeRequest_AudioContent); ok { + return x.AudioContent + } + return nil +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*StreamingRecognizeRequest) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _StreamingRecognizeRequest_OneofMarshaler, _StreamingRecognizeRequest_OneofUnmarshaler, _StreamingRecognizeRequest_OneofSizer, []interface{}{ + (*StreamingRecognizeRequest_StreamingConfig)(nil), + (*StreamingRecognizeRequest_AudioContent)(nil), + } +} + +func _StreamingRecognizeRequest_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*StreamingRecognizeRequest) + // streaming_request + switch x := m.StreamingRequest.(type) { + case *StreamingRecognizeRequest_StreamingConfig: + b.EncodeVarint(1<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.StreamingConfig); err != nil { + return err + } + case *StreamingRecognizeRequest_AudioContent: + b.EncodeVarint(2<<3 | proto.WireBytes) + b.EncodeRawBytes(x.AudioContent) + case nil: + default: + return fmt.Errorf("StreamingRecognizeRequest.StreamingRequest has unexpected type %T", x) + } + return nil +} + +func _StreamingRecognizeRequest_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*StreamingRecognizeRequest) + switch tag { + case 1: // streaming_request.streaming_config + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(StreamingRecognitionConfig) + err := b.DecodeMessage(msg) + m.StreamingRequest = &StreamingRecognizeRequest_StreamingConfig{msg} + return true, err + case 2: // streaming_request.audio_content + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeRawBytes(true) + m.StreamingRequest = &StreamingRecognizeRequest_AudioContent{x} + return true, err + default: + return false, nil + } +} + +func _StreamingRecognizeRequest_OneofSizer(msg proto.Message) (n int) { + m := msg.(*StreamingRecognizeRequest) + // streaming_request + switch x := m.StreamingRequest.(type) { + case *StreamingRecognizeRequest_StreamingConfig: + s := proto.Size(x.StreamingConfig) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *StreamingRecognizeRequest_AudioContent: + n += 1 // tag and wire + n += proto.SizeVarint(uint64(len(x.AudioContent))) + n += len(x.AudioContent) + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +// Provides information to the recognizer that specifies how to process the +// request. +type StreamingRecognitionConfig struct { + // *Required* Provides information to the recognizer that specifies how to + // process the request. + Config *RecognitionConfig `protobuf:"bytes,1,opt,name=config,proto3" json:"config,omitempty"` + // *Optional* If `false` or omitted, the recognizer will perform continuous + // recognition (continuing to wait for and process audio even if the user + // pauses speaking) until the client closes the input stream (gRPC API) or + // until the maximum time limit has been reached. May return multiple + // `StreamingRecognitionResult`s with the `is_final` flag set to `true`. + // + // If `true`, the recognizer will detect a single spoken utterance. When it + // detects that the user has paused or stopped speaking, it will return an + // `END_OF_SINGLE_UTTERANCE` event and cease recognition. It will return no + // more than one `StreamingRecognitionResult` with the `is_final` flag set to + // `true`. + SingleUtterance bool `protobuf:"varint,2,opt,name=single_utterance,json=singleUtterance,proto3" json:"single_utterance,omitempty"` + // *Optional* If `true`, interim results (tentative hypotheses) may be + // returned as they become available (these interim results are indicated with + // the `is_final=false` flag). + // If `false` or omitted, only `is_final=true` result(s) are returned. + InterimResults bool `protobuf:"varint,3,opt,name=interim_results,json=interimResults,proto3" json:"interim_results,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *StreamingRecognitionConfig) Reset() { *m = StreamingRecognitionConfig{} } +func (m *StreamingRecognitionConfig) String() string { return proto.CompactTextString(m) } +func (*StreamingRecognitionConfig) ProtoMessage() {} +func (*StreamingRecognitionConfig) Descriptor() ([]byte, []int) { + return fileDescriptor_cloud_speech_71c4a621c1d5e720, []int{3} +} +func (m *StreamingRecognitionConfig) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_StreamingRecognitionConfig.Unmarshal(m, b) +} +func (m *StreamingRecognitionConfig) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_StreamingRecognitionConfig.Marshal(b, m, deterministic) +} +func (dst *StreamingRecognitionConfig) XXX_Merge(src proto.Message) { + xxx_messageInfo_StreamingRecognitionConfig.Merge(dst, src) +} +func (m *StreamingRecognitionConfig) XXX_Size() int { + return xxx_messageInfo_StreamingRecognitionConfig.Size(m) +} +func (m *StreamingRecognitionConfig) XXX_DiscardUnknown() { + xxx_messageInfo_StreamingRecognitionConfig.DiscardUnknown(m) +} + +var xxx_messageInfo_StreamingRecognitionConfig proto.InternalMessageInfo + +func (m *StreamingRecognitionConfig) GetConfig() *RecognitionConfig { + if m != nil { + return m.Config + } + return nil +} + +func (m *StreamingRecognitionConfig) GetSingleUtterance() bool { + if m != nil { + return m.SingleUtterance + } + return false +} + +func (m *StreamingRecognitionConfig) GetInterimResults() bool { + if m != nil { + return m.InterimResults + } + return false +} + +// Provides information to the recognizer that specifies how to process the +// request. +type RecognitionConfig struct { + // Encoding of audio data sent in all `RecognitionAudio` messages. + // This field is optional for `FLAC` and `WAV` audio files and required + // for all other audio formats. For details, see + // [AudioEncoding][google.cloud.speech.v1p1beta1.RecognitionConfig.AudioEncoding]. + Encoding RecognitionConfig_AudioEncoding `protobuf:"varint,1,opt,name=encoding,proto3,enum=google.cloud.speech.v1p1beta1.RecognitionConfig_AudioEncoding" json:"encoding,omitempty"` + // Sample rate in Hertz of the audio data sent in all + // `RecognitionAudio` messages. Valid values are: 8000-48000. + // 16000 is optimal. For best results, set the sampling rate of the audio + // source to 16000 Hz. If that's not possible, use the native sample rate of + // the audio source (instead of re-sampling). + // This field is optional for `FLAC` and `WAV` audio files and required + // for all other audio formats. For details, see + // [AudioEncoding][google.cloud.speech.v1p1beta1.RecognitionConfig.AudioEncoding]. + SampleRateHertz int32 `protobuf:"varint,2,opt,name=sample_rate_hertz,json=sampleRateHertz,proto3" json:"sample_rate_hertz,omitempty"` + // *Optional* The number of channels in the input audio data. + // ONLY set this for MULTI-CHANNEL recognition. + // Valid values for LINEAR16 and FLAC are `1`-`8`. + // Valid values for OGG_OPUS are '1'-'254'. + // Valid value for MULAW, AMR, AMR_WB and SPEEX_WITH_HEADER_BYTE is only `1`. + // If `0` or omitted, defaults to one channel (mono). + // Note: We only recognize the first channel by default. + // To perform independent recognition on each channel set + // `enable_separate_recognition_per_channel` to 'true'. + AudioChannelCount int32 `protobuf:"varint,7,opt,name=audio_channel_count,json=audioChannelCount,proto3" json:"audio_channel_count,omitempty"` + // This needs to be set to ‘true’ explicitly and `audio_channel_count` > 1 + // to get each channel recognized separately. The recognition result will + // contain a `channel_tag` field to state which channel that result belongs + // to. If this is not true, we will only recognize the first channel. The + // request is billed cumulatively for all channels recognized: + // `audio_channel_count` multiplied by the length of the audio. + EnableSeparateRecognitionPerChannel bool `protobuf:"varint,12,opt,name=enable_separate_recognition_per_channel,json=enableSeparateRecognitionPerChannel,proto3" json:"enable_separate_recognition_per_channel,omitempty"` + // *Required* The language of the supplied audio as a + // [BCP-47](https://www.rfc-editor.org/rfc/bcp/bcp47.txt) language tag. + // Example: "en-US". + // See [Language Support](/speech-to-text/docs/languages) + // for a list of the currently supported language codes. + LanguageCode string `protobuf:"bytes,3,opt,name=language_code,json=languageCode,proto3" json:"language_code,omitempty"` + // *Optional* A list of up to 3 additional + // [BCP-47](https://www.rfc-editor.org/rfc/bcp/bcp47.txt) language tags, + // listing possible alternative languages of the supplied audio. + // See [Language Support](/speech-to-text/docs/languages) + // for a list of the currently supported language codes. + // If alternative languages are listed, recognition result will contain + // recognition in the most likely language detected including the main + // language_code. The recognition result will include the language tag + // of the language detected in the audio. + // Note: This feature is only supported for Voice Command and Voice Search + // use cases and performance may vary for other use cases (e.g., phone call + // transcription). + AlternativeLanguageCodes []string `protobuf:"bytes,18,rep,name=alternative_language_codes,json=alternativeLanguageCodes,proto3" json:"alternative_language_codes,omitempty"` + // *Optional* Maximum number of recognition hypotheses to be returned. + // Specifically, the maximum number of `SpeechRecognitionAlternative` messages + // within each `SpeechRecognitionResult`. + // The server may return fewer than `max_alternatives`. + // Valid values are `0`-`30`. A value of `0` or `1` will return a maximum of + // one. If omitted, will return a maximum of one. + MaxAlternatives int32 `protobuf:"varint,4,opt,name=max_alternatives,json=maxAlternatives,proto3" json:"max_alternatives,omitempty"` + // *Optional* If set to `true`, the server will attempt to filter out + // profanities, replacing all but the initial character in each filtered word + // with asterisks, e.g. "f***". If set to `false` or omitted, profanities + // won't be filtered out. + ProfanityFilter bool `protobuf:"varint,5,opt,name=profanity_filter,json=profanityFilter,proto3" json:"profanity_filter,omitempty"` + // *Optional* array of + // [SpeechContext][google.cloud.speech.v1p1beta1.SpeechContext]. A means to + // provide context to assist the speech recognition. For more information, see + // [Phrase Hints](/speech-to-text/docs/basics#phrase-hints). + SpeechContexts []*SpeechContext `protobuf:"bytes,6,rep,name=speech_contexts,json=speechContexts,proto3" json:"speech_contexts,omitempty"` + // *Optional* If `true`, the top result includes a list of words and + // the start and end time offsets (timestamps) for those words. If + // `false`, no word-level time offset information is returned. The default is + // `false`. + EnableWordTimeOffsets bool `protobuf:"varint,8,opt,name=enable_word_time_offsets,json=enableWordTimeOffsets,proto3" json:"enable_word_time_offsets,omitempty"` + // *Optional* If `true`, the top result includes a list of words and the + // confidence for those words. If `false`, no word-level confidence + // information is returned. The default is `false`. + EnableWordConfidence bool `protobuf:"varint,15,opt,name=enable_word_confidence,json=enableWordConfidence,proto3" json:"enable_word_confidence,omitempty"` + // *Optional* If 'true', adds punctuation to recognition result hypotheses. + // This feature is only available in select languages. Setting this for + // requests in other languages has no effect at all. + // The default 'false' value does not add punctuation to result hypotheses. + // Note: This is currently offered as an experimental service, complimentary + // to all users. In the future this may be exclusively available as a + // premium feature. + EnableAutomaticPunctuation bool `protobuf:"varint,11,opt,name=enable_automatic_punctuation,json=enableAutomaticPunctuation,proto3" json:"enable_automatic_punctuation,omitempty"` + // *Optional* If 'true', enables speaker detection for each recognized word in + // the top alternative of the recognition result using a speaker_tag provided + // in the WordInfo. + // Note: When this is true, we send all the words from the beginning of the + // audio for the top alternative in every consecutive STREAMING responses. + // This is done in order to improve our speaker tags as our models learn to + // identify the speakers in the conversation over time. + // For non-streaming requests, the diarization results will be provided only + // in the top alternative of the FINAL SpeechRecognitionResult. + EnableSpeakerDiarization bool `protobuf:"varint,16,opt,name=enable_speaker_diarization,json=enableSpeakerDiarization,proto3" json:"enable_speaker_diarization,omitempty"` + // *Optional* + // If set, specifies the estimated number of speakers in the conversation. + // If not set, defaults to '2'. + // Ignored unless enable_speaker_diarization is set to true." + DiarizationSpeakerCount int32 `protobuf:"varint,17,opt,name=diarization_speaker_count,json=diarizationSpeakerCount,proto3" json:"diarization_speaker_count,omitempty"` + // *Optional* Metadata regarding this request. + Metadata *RecognitionMetadata `protobuf:"bytes,9,opt,name=metadata,proto3" json:"metadata,omitempty"` + // *Optional* Which model to select for the given request. Select the model + // best suited to your domain to get best results. If a model is not + // explicitly specified, then we auto-select a model based on the parameters + // in the RecognitionConfig. + // + // + // + // + // + // + // + // + // + // + // + // + // + // + // + // + // + // + // + // + // + //
ModelDescription
command_and_searchBest for short queries such as voice commands or voice search.
phone_callBest for audio that originated from a phone call (typically + // recorded at an 8khz sampling rate).
videoBest for audio that originated from from video or includes multiple + // speakers. Ideally the audio is recorded at a 16khz or greater + // sampling rate. This is a premium model that costs more than the + // standard rate.
defaultBest for audio that is not one of the specific audio models. + // For example, long-form audio. Ideally the audio is high-fidelity, + // recorded at a 16khz or greater sampling rate.
+ Model string `protobuf:"bytes,13,opt,name=model,proto3" json:"model,omitempty"` + // *Optional* Set to true to use an enhanced model for speech recognition. + // If `use_enhanced` is set to true and the `model` field is not set, then + // an appropriate enhanced model is chosen if: + // 1. project is eligible for requesting enhanced models + // 2. an enhanced model exists for the audio + // + // If `use_enhanced` is true and an enhanced version of the specified model + // does not exist, then the speech is recognized using the standard version + // of the specified model. + // + // Enhanced speech models require that you opt-in to data logging using + // instructions in the + // [documentation](/speech-to-text/docs/enable-data-logging). If you set + // `use_enhanced` to true and you have not enabled audio logging, then you + // will receive an error. + UseEnhanced bool `protobuf:"varint,14,opt,name=use_enhanced,json=useEnhanced,proto3" json:"use_enhanced,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *RecognitionConfig) Reset() { *m = RecognitionConfig{} } +func (m *RecognitionConfig) String() string { return proto.CompactTextString(m) } +func (*RecognitionConfig) ProtoMessage() {} +func (*RecognitionConfig) Descriptor() ([]byte, []int) { + return fileDescriptor_cloud_speech_71c4a621c1d5e720, []int{4} +} +func (m *RecognitionConfig) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_RecognitionConfig.Unmarshal(m, b) +} +func (m *RecognitionConfig) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_RecognitionConfig.Marshal(b, m, deterministic) +} +func (dst *RecognitionConfig) XXX_Merge(src proto.Message) { + xxx_messageInfo_RecognitionConfig.Merge(dst, src) +} +func (m *RecognitionConfig) XXX_Size() int { + return xxx_messageInfo_RecognitionConfig.Size(m) +} +func (m *RecognitionConfig) XXX_DiscardUnknown() { + xxx_messageInfo_RecognitionConfig.DiscardUnknown(m) +} + +var xxx_messageInfo_RecognitionConfig proto.InternalMessageInfo + +func (m *RecognitionConfig) GetEncoding() RecognitionConfig_AudioEncoding { + if m != nil { + return m.Encoding + } + return RecognitionConfig_ENCODING_UNSPECIFIED +} + +func (m *RecognitionConfig) GetSampleRateHertz() int32 { + if m != nil { + return m.SampleRateHertz + } + return 0 +} + +func (m *RecognitionConfig) GetAudioChannelCount() int32 { + if m != nil { + return m.AudioChannelCount + } + return 0 +} + +func (m *RecognitionConfig) GetEnableSeparateRecognitionPerChannel() bool { + if m != nil { + return m.EnableSeparateRecognitionPerChannel + } + return false +} + +func (m *RecognitionConfig) GetLanguageCode() string { + if m != nil { + return m.LanguageCode + } + return "" +} + +func (m *RecognitionConfig) GetAlternativeLanguageCodes() []string { + if m != nil { + return m.AlternativeLanguageCodes + } + return nil +} + +func (m *RecognitionConfig) GetMaxAlternatives() int32 { + if m != nil { + return m.MaxAlternatives + } + return 0 +} + +func (m *RecognitionConfig) GetProfanityFilter() bool { + if m != nil { + return m.ProfanityFilter + } + return false +} + +func (m *RecognitionConfig) GetSpeechContexts() []*SpeechContext { + if m != nil { + return m.SpeechContexts + } + return nil +} + +func (m *RecognitionConfig) GetEnableWordTimeOffsets() bool { + if m != nil { + return m.EnableWordTimeOffsets + } + return false +} + +func (m *RecognitionConfig) GetEnableWordConfidence() bool { + if m != nil { + return m.EnableWordConfidence + } + return false +} + +func (m *RecognitionConfig) GetEnableAutomaticPunctuation() bool { + if m != nil { + return m.EnableAutomaticPunctuation + } + return false +} + +func (m *RecognitionConfig) GetEnableSpeakerDiarization() bool { + if m != nil { + return m.EnableSpeakerDiarization + } + return false +} + +func (m *RecognitionConfig) GetDiarizationSpeakerCount() int32 { + if m != nil { + return m.DiarizationSpeakerCount + } + return 0 +} + +func (m *RecognitionConfig) GetMetadata() *RecognitionMetadata { + if m != nil { + return m.Metadata + } + return nil +} + +func (m *RecognitionConfig) GetModel() string { + if m != nil { + return m.Model + } + return "" +} + +func (m *RecognitionConfig) GetUseEnhanced() bool { + if m != nil { + return m.UseEnhanced + } + return false +} + +// Description of audio data to be recognized. +type RecognitionMetadata struct { + // The use case most closely describing the audio content to be recognized. + InteractionType RecognitionMetadata_InteractionType `protobuf:"varint,1,opt,name=interaction_type,json=interactionType,proto3,enum=google.cloud.speech.v1p1beta1.RecognitionMetadata_InteractionType" json:"interaction_type,omitempty"` + // The industry vertical to which this speech recognition request most + // closely applies. This is most indicative of the topics contained + // in the audio. Use the 6-digit NAICS code to identify the industry + // vertical - see https://www.naics.com/search/. + IndustryNaicsCodeOfAudio uint32 `protobuf:"varint,3,opt,name=industry_naics_code_of_audio,json=industryNaicsCodeOfAudio,proto3" json:"industry_naics_code_of_audio,omitempty"` + // The audio type that most closely describes the audio being recognized. + MicrophoneDistance RecognitionMetadata_MicrophoneDistance `protobuf:"varint,4,opt,name=microphone_distance,json=microphoneDistance,proto3,enum=google.cloud.speech.v1p1beta1.RecognitionMetadata_MicrophoneDistance" json:"microphone_distance,omitempty"` + // The original media the speech was recorded on. + OriginalMediaType RecognitionMetadata_OriginalMediaType `protobuf:"varint,5,opt,name=original_media_type,json=originalMediaType,proto3,enum=google.cloud.speech.v1p1beta1.RecognitionMetadata_OriginalMediaType" json:"original_media_type,omitempty"` + // The type of device the speech was recorded with. + RecordingDeviceType RecognitionMetadata_RecordingDeviceType `protobuf:"varint,6,opt,name=recording_device_type,json=recordingDeviceType,proto3,enum=google.cloud.speech.v1p1beta1.RecognitionMetadata_RecordingDeviceType" json:"recording_device_type,omitempty"` + // The device used to make the recording. Examples 'Nexus 5X' or + // 'Polycom SoundStation IP 6000' or 'POTS' or 'VoIP' or + // 'Cardioid Microphone'. + RecordingDeviceName string `protobuf:"bytes,7,opt,name=recording_device_name,json=recordingDeviceName,proto3" json:"recording_device_name,omitempty"` + // Mime type of the original audio file. For example `audio/m4a`, + // `audio/x-alaw-basic`, `audio/mp3`, `audio/3gpp`. + // A list of possible audio mime types is maintained at + // http://www.iana.org/assignments/media-types/media-types.xhtml#audio + OriginalMimeType string `protobuf:"bytes,8,opt,name=original_mime_type,json=originalMimeType,proto3" json:"original_mime_type,omitempty"` + // Obfuscated (privacy-protected) ID of the user, to identify number of + // unique users using the service. + ObfuscatedId int64 `protobuf:"varint,9,opt,name=obfuscated_id,json=obfuscatedId,proto3" json:"obfuscated_id,omitempty"` + // Description of the content. Eg. "Recordings of federal supreme court + // hearings from 2012". + AudioTopic string `protobuf:"bytes,10,opt,name=audio_topic,json=audioTopic,proto3" json:"audio_topic,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *RecognitionMetadata) Reset() { *m = RecognitionMetadata{} } +func (m *RecognitionMetadata) String() string { return proto.CompactTextString(m) } +func (*RecognitionMetadata) ProtoMessage() {} +func (*RecognitionMetadata) Descriptor() ([]byte, []int) { + return fileDescriptor_cloud_speech_71c4a621c1d5e720, []int{5} +} +func (m *RecognitionMetadata) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_RecognitionMetadata.Unmarshal(m, b) +} +func (m *RecognitionMetadata) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_RecognitionMetadata.Marshal(b, m, deterministic) +} +func (dst *RecognitionMetadata) XXX_Merge(src proto.Message) { + xxx_messageInfo_RecognitionMetadata.Merge(dst, src) +} +func (m *RecognitionMetadata) XXX_Size() int { + return xxx_messageInfo_RecognitionMetadata.Size(m) +} +func (m *RecognitionMetadata) XXX_DiscardUnknown() { + xxx_messageInfo_RecognitionMetadata.DiscardUnknown(m) +} + +var xxx_messageInfo_RecognitionMetadata proto.InternalMessageInfo + +func (m *RecognitionMetadata) GetInteractionType() RecognitionMetadata_InteractionType { + if m != nil { + return m.InteractionType + } + return RecognitionMetadata_INTERACTION_TYPE_UNSPECIFIED +} + +func (m *RecognitionMetadata) GetIndustryNaicsCodeOfAudio() uint32 { + if m != nil { + return m.IndustryNaicsCodeOfAudio + } + return 0 +} + +func (m *RecognitionMetadata) GetMicrophoneDistance() RecognitionMetadata_MicrophoneDistance { + if m != nil { + return m.MicrophoneDistance + } + return RecognitionMetadata_MICROPHONE_DISTANCE_UNSPECIFIED +} + +func (m *RecognitionMetadata) GetOriginalMediaType() RecognitionMetadata_OriginalMediaType { + if m != nil { + return m.OriginalMediaType + } + return RecognitionMetadata_ORIGINAL_MEDIA_TYPE_UNSPECIFIED +} + +func (m *RecognitionMetadata) GetRecordingDeviceType() RecognitionMetadata_RecordingDeviceType { + if m != nil { + return m.RecordingDeviceType + } + return RecognitionMetadata_RECORDING_DEVICE_TYPE_UNSPECIFIED +} + +func (m *RecognitionMetadata) GetRecordingDeviceName() string { + if m != nil { + return m.RecordingDeviceName + } + return "" +} + +func (m *RecognitionMetadata) GetOriginalMimeType() string { + if m != nil { + return m.OriginalMimeType + } + return "" +} + +func (m *RecognitionMetadata) GetObfuscatedId() int64 { + if m != nil { + return m.ObfuscatedId + } + return 0 +} + +func (m *RecognitionMetadata) GetAudioTopic() string { + if m != nil { + return m.AudioTopic + } + return "" +} + +// Provides "hints" to the speech recognizer to favor specific words and phrases +// in the results. +type SpeechContext struct { + // *Optional* A list of strings containing words and phrases "hints" so that + // the speech recognition is more likely to recognize them. This can be used + // to improve the accuracy for specific words and phrases, for example, if + // specific commands are typically spoken by the user. This can also be used + // to add additional words to the vocabulary of the recognizer. See + // [usage limits](/speech-to-text/quotas#content). + Phrases []string `protobuf:"bytes,1,rep,name=phrases,proto3" json:"phrases,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *SpeechContext) Reset() { *m = SpeechContext{} } +func (m *SpeechContext) String() string { return proto.CompactTextString(m) } +func (*SpeechContext) ProtoMessage() {} +func (*SpeechContext) Descriptor() ([]byte, []int) { + return fileDescriptor_cloud_speech_71c4a621c1d5e720, []int{6} +} +func (m *SpeechContext) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_SpeechContext.Unmarshal(m, b) +} +func (m *SpeechContext) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_SpeechContext.Marshal(b, m, deterministic) +} +func (dst *SpeechContext) XXX_Merge(src proto.Message) { + xxx_messageInfo_SpeechContext.Merge(dst, src) +} +func (m *SpeechContext) XXX_Size() int { + return xxx_messageInfo_SpeechContext.Size(m) +} +func (m *SpeechContext) XXX_DiscardUnknown() { + xxx_messageInfo_SpeechContext.DiscardUnknown(m) +} + +var xxx_messageInfo_SpeechContext proto.InternalMessageInfo + +func (m *SpeechContext) GetPhrases() []string { + if m != nil { + return m.Phrases + } + return nil +} + +// Contains audio data in the encoding specified in the `RecognitionConfig`. +// Either `content` or `uri` must be supplied. Supplying both or neither +// returns [google.rpc.Code.INVALID_ARGUMENT][google.rpc.Code.INVALID_ARGUMENT]. +// See [content limits](/speech-to-text/quotas#content). +type RecognitionAudio struct { + // The audio source, which is either inline content or a Google Cloud + // Storage uri. + // + // Types that are valid to be assigned to AudioSource: + // *RecognitionAudio_Content + // *RecognitionAudio_Uri + AudioSource isRecognitionAudio_AudioSource `protobuf_oneof:"audio_source"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *RecognitionAudio) Reset() { *m = RecognitionAudio{} } +func (m *RecognitionAudio) String() string { return proto.CompactTextString(m) } +func (*RecognitionAudio) ProtoMessage() {} +func (*RecognitionAudio) Descriptor() ([]byte, []int) { + return fileDescriptor_cloud_speech_71c4a621c1d5e720, []int{7} +} +func (m *RecognitionAudio) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_RecognitionAudio.Unmarshal(m, b) +} +func (m *RecognitionAudio) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_RecognitionAudio.Marshal(b, m, deterministic) +} +func (dst *RecognitionAudio) XXX_Merge(src proto.Message) { + xxx_messageInfo_RecognitionAudio.Merge(dst, src) +} +func (m *RecognitionAudio) XXX_Size() int { + return xxx_messageInfo_RecognitionAudio.Size(m) +} +func (m *RecognitionAudio) XXX_DiscardUnknown() { + xxx_messageInfo_RecognitionAudio.DiscardUnknown(m) +} + +var xxx_messageInfo_RecognitionAudio proto.InternalMessageInfo + +type isRecognitionAudio_AudioSource interface { + isRecognitionAudio_AudioSource() +} + +type RecognitionAudio_Content struct { + Content []byte `protobuf:"bytes,1,opt,name=content,proto3,oneof"` +} + +type RecognitionAudio_Uri struct { + Uri string `protobuf:"bytes,2,opt,name=uri,proto3,oneof"` +} + +func (*RecognitionAudio_Content) isRecognitionAudio_AudioSource() {} + +func (*RecognitionAudio_Uri) isRecognitionAudio_AudioSource() {} + +func (m *RecognitionAudio) GetAudioSource() isRecognitionAudio_AudioSource { + if m != nil { + return m.AudioSource + } + return nil +} + +func (m *RecognitionAudio) GetContent() []byte { + if x, ok := m.GetAudioSource().(*RecognitionAudio_Content); ok { + return x.Content + } + return nil +} + +func (m *RecognitionAudio) GetUri() string { + if x, ok := m.GetAudioSource().(*RecognitionAudio_Uri); ok { + return x.Uri + } + return "" +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*RecognitionAudio) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _RecognitionAudio_OneofMarshaler, _RecognitionAudio_OneofUnmarshaler, _RecognitionAudio_OneofSizer, []interface{}{ + (*RecognitionAudio_Content)(nil), + (*RecognitionAudio_Uri)(nil), + } +} + +func _RecognitionAudio_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*RecognitionAudio) + // audio_source + switch x := m.AudioSource.(type) { + case *RecognitionAudio_Content: + b.EncodeVarint(1<<3 | proto.WireBytes) + b.EncodeRawBytes(x.Content) + case *RecognitionAudio_Uri: + b.EncodeVarint(2<<3 | proto.WireBytes) + b.EncodeStringBytes(x.Uri) + case nil: + default: + return fmt.Errorf("RecognitionAudio.AudioSource has unexpected type %T", x) + } + return nil +} + +func _RecognitionAudio_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*RecognitionAudio) + switch tag { + case 1: // audio_source.content + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeRawBytes(true) + m.AudioSource = &RecognitionAudio_Content{x} + return true, err + case 2: // audio_source.uri + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeStringBytes() + m.AudioSource = &RecognitionAudio_Uri{x} + return true, err + default: + return false, nil + } +} + +func _RecognitionAudio_OneofSizer(msg proto.Message) (n int) { + m := msg.(*RecognitionAudio) + // audio_source + switch x := m.AudioSource.(type) { + case *RecognitionAudio_Content: + n += 1 // tag and wire + n += proto.SizeVarint(uint64(len(x.Content))) + n += len(x.Content) + case *RecognitionAudio_Uri: + n += 1 // tag and wire + n += proto.SizeVarint(uint64(len(x.Uri))) + n += len(x.Uri) + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +// The only message returned to the client by the `Recognize` method. It +// contains the result as zero or more sequential `SpeechRecognitionResult` +// messages. +type RecognizeResponse struct { + // Output only. Sequential list of transcription results corresponding to + // sequential portions of audio. + Results []*SpeechRecognitionResult `protobuf:"bytes,2,rep,name=results,proto3" json:"results,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *RecognizeResponse) Reset() { *m = RecognizeResponse{} } +func (m *RecognizeResponse) String() string { return proto.CompactTextString(m) } +func (*RecognizeResponse) ProtoMessage() {} +func (*RecognizeResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_cloud_speech_71c4a621c1d5e720, []int{8} +} +func (m *RecognizeResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_RecognizeResponse.Unmarshal(m, b) +} +func (m *RecognizeResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_RecognizeResponse.Marshal(b, m, deterministic) +} +func (dst *RecognizeResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_RecognizeResponse.Merge(dst, src) +} +func (m *RecognizeResponse) XXX_Size() int { + return xxx_messageInfo_RecognizeResponse.Size(m) +} +func (m *RecognizeResponse) XXX_DiscardUnknown() { + xxx_messageInfo_RecognizeResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_RecognizeResponse proto.InternalMessageInfo + +func (m *RecognizeResponse) GetResults() []*SpeechRecognitionResult { + if m != nil { + return m.Results + } + return nil +} + +// The only message returned to the client by the `LongRunningRecognize` method. +// It contains the result as zero or more sequential `SpeechRecognitionResult` +// messages. It is included in the `result.response` field of the `Operation` +// returned by the `GetOperation` call of the `google::longrunning::Operations` +// service. +type LongRunningRecognizeResponse struct { + // Output only. Sequential list of transcription results corresponding to + // sequential portions of audio. + Results []*SpeechRecognitionResult `protobuf:"bytes,2,rep,name=results,proto3" json:"results,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *LongRunningRecognizeResponse) Reset() { *m = LongRunningRecognizeResponse{} } +func (m *LongRunningRecognizeResponse) String() string { return proto.CompactTextString(m) } +func (*LongRunningRecognizeResponse) ProtoMessage() {} +func (*LongRunningRecognizeResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_cloud_speech_71c4a621c1d5e720, []int{9} +} +func (m *LongRunningRecognizeResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_LongRunningRecognizeResponse.Unmarshal(m, b) +} +func (m *LongRunningRecognizeResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_LongRunningRecognizeResponse.Marshal(b, m, deterministic) +} +func (dst *LongRunningRecognizeResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_LongRunningRecognizeResponse.Merge(dst, src) +} +func (m *LongRunningRecognizeResponse) XXX_Size() int { + return xxx_messageInfo_LongRunningRecognizeResponse.Size(m) +} +func (m *LongRunningRecognizeResponse) XXX_DiscardUnknown() { + xxx_messageInfo_LongRunningRecognizeResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_LongRunningRecognizeResponse proto.InternalMessageInfo + +func (m *LongRunningRecognizeResponse) GetResults() []*SpeechRecognitionResult { + if m != nil { + return m.Results + } + return nil +} + +// Describes the progress of a long-running `LongRunningRecognize` call. It is +// included in the `metadata` field of the `Operation` returned by the +// `GetOperation` call of the `google::longrunning::Operations` service. +type LongRunningRecognizeMetadata struct { + // Approximate percentage of audio processed thus far. Guaranteed to be 100 + // when the audio is fully processed and the results are available. + ProgressPercent int32 `protobuf:"varint,1,opt,name=progress_percent,json=progressPercent,proto3" json:"progress_percent,omitempty"` + // Time when the request was received. + StartTime *timestamp.Timestamp `protobuf:"bytes,2,opt,name=start_time,json=startTime,proto3" json:"start_time,omitempty"` + // Time of the most recent processing update. + LastUpdateTime *timestamp.Timestamp `protobuf:"bytes,3,opt,name=last_update_time,json=lastUpdateTime,proto3" json:"last_update_time,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *LongRunningRecognizeMetadata) Reset() { *m = LongRunningRecognizeMetadata{} } +func (m *LongRunningRecognizeMetadata) String() string { return proto.CompactTextString(m) } +func (*LongRunningRecognizeMetadata) ProtoMessage() {} +func (*LongRunningRecognizeMetadata) Descriptor() ([]byte, []int) { + return fileDescriptor_cloud_speech_71c4a621c1d5e720, []int{10} +} +func (m *LongRunningRecognizeMetadata) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_LongRunningRecognizeMetadata.Unmarshal(m, b) +} +func (m *LongRunningRecognizeMetadata) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_LongRunningRecognizeMetadata.Marshal(b, m, deterministic) +} +func (dst *LongRunningRecognizeMetadata) XXX_Merge(src proto.Message) { + xxx_messageInfo_LongRunningRecognizeMetadata.Merge(dst, src) +} +func (m *LongRunningRecognizeMetadata) XXX_Size() int { + return xxx_messageInfo_LongRunningRecognizeMetadata.Size(m) +} +func (m *LongRunningRecognizeMetadata) XXX_DiscardUnknown() { + xxx_messageInfo_LongRunningRecognizeMetadata.DiscardUnknown(m) +} + +var xxx_messageInfo_LongRunningRecognizeMetadata proto.InternalMessageInfo + +func (m *LongRunningRecognizeMetadata) GetProgressPercent() int32 { + if m != nil { + return m.ProgressPercent + } + return 0 +} + +func (m *LongRunningRecognizeMetadata) GetStartTime() *timestamp.Timestamp { + if m != nil { + return m.StartTime + } + return nil +} + +func (m *LongRunningRecognizeMetadata) GetLastUpdateTime() *timestamp.Timestamp { + if m != nil { + return m.LastUpdateTime + } + return nil +} + +// `StreamingRecognizeResponse` is the only message returned to the client by +// `StreamingRecognize`. A series of zero or more `StreamingRecognizeResponse` +// messages are streamed back to the client. If there is no recognizable +// audio, and `single_utterance` is set to false, then no messages are streamed +// back to the client. +// +// Here's an example of a series of ten `StreamingRecognizeResponse`s that might +// be returned while processing audio: +// +// 1. results { alternatives { transcript: "tube" } stability: 0.01 } +// +// 2. results { alternatives { transcript: "to be a" } stability: 0.01 } +// +// 3. results { alternatives { transcript: "to be" } stability: 0.9 } +// results { alternatives { transcript: " or not to be" } stability: 0.01 } +// +// 4. results { alternatives { transcript: "to be or not to be" +// confidence: 0.92 } +// alternatives { transcript: "to bee or not to bee" } +// is_final: true } +// +// 5. results { alternatives { transcript: " that's" } stability: 0.01 } +// +// 6. results { alternatives { transcript: " that is" } stability: 0.9 } +// results { alternatives { transcript: " the question" } stability: 0.01 } +// +// 7. results { alternatives { transcript: " that is the question" +// confidence: 0.98 } +// alternatives { transcript: " that was the question" } +// is_final: true } +// +// Notes: +// +// - Only two of the above responses #4 and #7 contain final results; they are +// indicated by `is_final: true`. Concatenating these together generates the +// full transcript: "to be or not to be that is the question". +// +// - The others contain interim `results`. #3 and #6 contain two interim +// `results`: the first portion has a high stability and is less likely to +// change; the second portion has a low stability and is very likely to +// change. A UI designer might choose to show only high stability `results`. +// +// - The specific `stability` and `confidence` values shown above are only for +// illustrative purposes. Actual values may vary. +// +// - In each response, only one of these fields will be set: +// `error`, +// `speech_event_type`, or +// one or more (repeated) `results`. +type StreamingRecognizeResponse struct { + // Output only. If set, returns a [google.rpc.Status][google.rpc.Status] + // message that specifies the error for the operation. + Error *status.Status `protobuf:"bytes,1,opt,name=error,proto3" json:"error,omitempty"` + // Output only. This repeated list contains zero or more results that + // correspond to consecutive portions of the audio currently being processed. + // It contains zero or one `is_final=true` result (the newly settled portion), + // followed by zero or more `is_final=false` results (the interim results). + Results []*StreamingRecognitionResult `protobuf:"bytes,2,rep,name=results,proto3" json:"results,omitempty"` + // Output only. Indicates the type of speech event. + SpeechEventType StreamingRecognizeResponse_SpeechEventType `protobuf:"varint,4,opt,name=speech_event_type,json=speechEventType,proto3,enum=google.cloud.speech.v1p1beta1.StreamingRecognizeResponse_SpeechEventType" json:"speech_event_type,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *StreamingRecognizeResponse) Reset() { *m = StreamingRecognizeResponse{} } +func (m *StreamingRecognizeResponse) String() string { return proto.CompactTextString(m) } +func (*StreamingRecognizeResponse) ProtoMessage() {} +func (*StreamingRecognizeResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_cloud_speech_71c4a621c1d5e720, []int{11} +} +func (m *StreamingRecognizeResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_StreamingRecognizeResponse.Unmarshal(m, b) +} +func (m *StreamingRecognizeResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_StreamingRecognizeResponse.Marshal(b, m, deterministic) +} +func (dst *StreamingRecognizeResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_StreamingRecognizeResponse.Merge(dst, src) +} +func (m *StreamingRecognizeResponse) XXX_Size() int { + return xxx_messageInfo_StreamingRecognizeResponse.Size(m) +} +func (m *StreamingRecognizeResponse) XXX_DiscardUnknown() { + xxx_messageInfo_StreamingRecognizeResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_StreamingRecognizeResponse proto.InternalMessageInfo + +func (m *StreamingRecognizeResponse) GetError() *status.Status { + if m != nil { + return m.Error + } + return nil +} + +func (m *StreamingRecognizeResponse) GetResults() []*StreamingRecognitionResult { + if m != nil { + return m.Results + } + return nil +} + +func (m *StreamingRecognizeResponse) GetSpeechEventType() StreamingRecognizeResponse_SpeechEventType { + if m != nil { + return m.SpeechEventType + } + return StreamingRecognizeResponse_SPEECH_EVENT_UNSPECIFIED +} + +// A streaming speech recognition result corresponding to a portion of the audio +// that is currently being processed. +type StreamingRecognitionResult struct { + // Output only. May contain one or more recognition hypotheses (up to the + // maximum specified in `max_alternatives`). + // These alternatives are ordered in terms of accuracy, with the top (first) + // alternative being the most probable, as ranked by the recognizer. + Alternatives []*SpeechRecognitionAlternative `protobuf:"bytes,1,rep,name=alternatives,proto3" json:"alternatives,omitempty"` + // Output only. If `false`, this `StreamingRecognitionResult` represents an + // interim result that may change. If `true`, this is the final time the + // speech service will return this particular `StreamingRecognitionResult`, + // the recognizer will not return any further hypotheses for this portion of + // the transcript and corresponding audio. + IsFinal bool `protobuf:"varint,2,opt,name=is_final,json=isFinal,proto3" json:"is_final,omitempty"` + // Output only. An estimate of the likelihood that the recognizer will not + // change its guess about this interim result. Values range from 0.0 + // (completely unstable) to 1.0 (completely stable). + // This field is only provided for interim results (`is_final=false`). + // The default of 0.0 is a sentinel value indicating `stability` was not set. + Stability float32 `protobuf:"fixed32,3,opt,name=stability,proto3" json:"stability,omitempty"` + // Output only. Time offset of the end of this result relative to the + // beginning of the audio. + ResultEndTime *duration.Duration `protobuf:"bytes,4,opt,name=result_end_time,json=resultEndTime,proto3" json:"result_end_time,omitempty"` + // For multi-channel audio, this is the channel number corresponding to the + // recognized result for the audio from that channel. + // For audio_channel_count = N, its output values can range from '1' to 'N'. + ChannelTag int32 `protobuf:"varint,5,opt,name=channel_tag,json=channelTag,proto3" json:"channel_tag,omitempty"` + // Output only. The + // [BCP-47](https://www.rfc-editor.org/rfc/bcp/bcp47.txt) language tag of the + // language in this result. This language code was detected to have the most + // likelihood of being spoken in the audio. + LanguageCode string `protobuf:"bytes,6,opt,name=language_code,json=languageCode,proto3" json:"language_code,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *StreamingRecognitionResult) Reset() { *m = StreamingRecognitionResult{} } +func (m *StreamingRecognitionResult) String() string { return proto.CompactTextString(m) } +func (*StreamingRecognitionResult) ProtoMessage() {} +func (*StreamingRecognitionResult) Descriptor() ([]byte, []int) { + return fileDescriptor_cloud_speech_71c4a621c1d5e720, []int{12} +} +func (m *StreamingRecognitionResult) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_StreamingRecognitionResult.Unmarshal(m, b) +} +func (m *StreamingRecognitionResult) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_StreamingRecognitionResult.Marshal(b, m, deterministic) +} +func (dst *StreamingRecognitionResult) XXX_Merge(src proto.Message) { + xxx_messageInfo_StreamingRecognitionResult.Merge(dst, src) +} +func (m *StreamingRecognitionResult) XXX_Size() int { + return xxx_messageInfo_StreamingRecognitionResult.Size(m) +} +func (m *StreamingRecognitionResult) XXX_DiscardUnknown() { + xxx_messageInfo_StreamingRecognitionResult.DiscardUnknown(m) +} + +var xxx_messageInfo_StreamingRecognitionResult proto.InternalMessageInfo + +func (m *StreamingRecognitionResult) GetAlternatives() []*SpeechRecognitionAlternative { + if m != nil { + return m.Alternatives + } + return nil +} + +func (m *StreamingRecognitionResult) GetIsFinal() bool { + if m != nil { + return m.IsFinal + } + return false +} + +func (m *StreamingRecognitionResult) GetStability() float32 { + if m != nil { + return m.Stability + } + return 0 +} + +func (m *StreamingRecognitionResult) GetResultEndTime() *duration.Duration { + if m != nil { + return m.ResultEndTime + } + return nil +} + +func (m *StreamingRecognitionResult) GetChannelTag() int32 { + if m != nil { + return m.ChannelTag + } + return 0 +} + +func (m *StreamingRecognitionResult) GetLanguageCode() string { + if m != nil { + return m.LanguageCode + } + return "" +} + +// A speech recognition result corresponding to a portion of the audio. +type SpeechRecognitionResult struct { + // Output only. May contain one or more recognition hypotheses (up to the + // maximum specified in `max_alternatives`). + // These alternatives are ordered in terms of accuracy, with the top (first) + // alternative being the most probable, as ranked by the recognizer. + Alternatives []*SpeechRecognitionAlternative `protobuf:"bytes,1,rep,name=alternatives,proto3" json:"alternatives,omitempty"` + // For multi-channel audio, this is the channel number corresponding to the + // recognized result for the audio from that channel. + // For audio_channel_count = N, its output values can range from '1' to 'N'. + ChannelTag int32 `protobuf:"varint,2,opt,name=channel_tag,json=channelTag,proto3" json:"channel_tag,omitempty"` + // Output only. The + // [BCP-47](https://www.rfc-editor.org/rfc/bcp/bcp47.txt) language tag of the + // language in this result. This language code was detected to have the most + // likelihood of being spoken in the audio. + LanguageCode string `protobuf:"bytes,5,opt,name=language_code,json=languageCode,proto3" json:"language_code,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *SpeechRecognitionResult) Reset() { *m = SpeechRecognitionResult{} } +func (m *SpeechRecognitionResult) String() string { return proto.CompactTextString(m) } +func (*SpeechRecognitionResult) ProtoMessage() {} +func (*SpeechRecognitionResult) Descriptor() ([]byte, []int) { + return fileDescriptor_cloud_speech_71c4a621c1d5e720, []int{13} +} +func (m *SpeechRecognitionResult) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_SpeechRecognitionResult.Unmarshal(m, b) +} +func (m *SpeechRecognitionResult) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_SpeechRecognitionResult.Marshal(b, m, deterministic) +} +func (dst *SpeechRecognitionResult) XXX_Merge(src proto.Message) { + xxx_messageInfo_SpeechRecognitionResult.Merge(dst, src) +} +func (m *SpeechRecognitionResult) XXX_Size() int { + return xxx_messageInfo_SpeechRecognitionResult.Size(m) +} +func (m *SpeechRecognitionResult) XXX_DiscardUnknown() { + xxx_messageInfo_SpeechRecognitionResult.DiscardUnknown(m) +} + +var xxx_messageInfo_SpeechRecognitionResult proto.InternalMessageInfo + +func (m *SpeechRecognitionResult) GetAlternatives() []*SpeechRecognitionAlternative { + if m != nil { + return m.Alternatives + } + return nil +} + +func (m *SpeechRecognitionResult) GetChannelTag() int32 { + if m != nil { + return m.ChannelTag + } + return 0 +} + +func (m *SpeechRecognitionResult) GetLanguageCode() string { + if m != nil { + return m.LanguageCode + } + return "" +} + +// Alternative hypotheses (a.k.a. n-best list). +type SpeechRecognitionAlternative struct { + // Output only. Transcript text representing the words that the user spoke. + Transcript string `protobuf:"bytes,1,opt,name=transcript,proto3" json:"transcript,omitempty"` + // Output only. The confidence estimate between 0.0 and 1.0. A higher number + // indicates an estimated greater likelihood that the recognized words are + // correct. This field is set only for the top alternative of a non-streaming + // result or, of a streaming result where `is_final=true`. + // This field is not guaranteed to be accurate and users should not rely on it + // to be always provided. + // The default of 0.0 is a sentinel value indicating `confidence` was not set. + Confidence float32 `protobuf:"fixed32,2,opt,name=confidence,proto3" json:"confidence,omitempty"` + // Output only. A list of word-specific information for each recognized word. + // Note: When `enable_speaker_diarization` is true, you will see all the words + // from the beginning of the audio. + Words []*WordInfo `protobuf:"bytes,3,rep,name=words,proto3" json:"words,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *SpeechRecognitionAlternative) Reset() { *m = SpeechRecognitionAlternative{} } +func (m *SpeechRecognitionAlternative) String() string { return proto.CompactTextString(m) } +func (*SpeechRecognitionAlternative) ProtoMessage() {} +func (*SpeechRecognitionAlternative) Descriptor() ([]byte, []int) { + return fileDescriptor_cloud_speech_71c4a621c1d5e720, []int{14} +} +func (m *SpeechRecognitionAlternative) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_SpeechRecognitionAlternative.Unmarshal(m, b) +} +func (m *SpeechRecognitionAlternative) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_SpeechRecognitionAlternative.Marshal(b, m, deterministic) +} +func (dst *SpeechRecognitionAlternative) XXX_Merge(src proto.Message) { + xxx_messageInfo_SpeechRecognitionAlternative.Merge(dst, src) +} +func (m *SpeechRecognitionAlternative) XXX_Size() int { + return xxx_messageInfo_SpeechRecognitionAlternative.Size(m) +} +func (m *SpeechRecognitionAlternative) XXX_DiscardUnknown() { + xxx_messageInfo_SpeechRecognitionAlternative.DiscardUnknown(m) +} + +var xxx_messageInfo_SpeechRecognitionAlternative proto.InternalMessageInfo + +func (m *SpeechRecognitionAlternative) GetTranscript() string { + if m != nil { + return m.Transcript + } + return "" +} + +func (m *SpeechRecognitionAlternative) GetConfidence() float32 { + if m != nil { + return m.Confidence + } + return 0 +} + +func (m *SpeechRecognitionAlternative) GetWords() []*WordInfo { + if m != nil { + return m.Words + } + return nil +} + +// Word-specific information for recognized words. +type WordInfo struct { + // Output only. Time offset relative to the beginning of the audio, + // and corresponding to the start of the spoken word. + // This field is only set if `enable_word_time_offsets=true` and only + // in the top hypothesis. + // This is an experimental feature and the accuracy of the time offset can + // vary. + StartTime *duration.Duration `protobuf:"bytes,1,opt,name=start_time,json=startTime,proto3" json:"start_time,omitempty"` + // Output only. Time offset relative to the beginning of the audio, + // and corresponding to the end of the spoken word. + // This field is only set if `enable_word_time_offsets=true` and only + // in the top hypothesis. + // This is an experimental feature and the accuracy of the time offset can + // vary. + EndTime *duration.Duration `protobuf:"bytes,2,opt,name=end_time,json=endTime,proto3" json:"end_time,omitempty"` + // Output only. The word corresponding to this set of information. + Word string `protobuf:"bytes,3,opt,name=word,proto3" json:"word,omitempty"` + // Output only. The confidence estimate between 0.0 and 1.0. A higher number + // indicates an estimated greater likelihood that the recognized words are + // correct. This field is set only for the top alternative of a non-streaming + // result or, of a streaming result where `is_final=true`. + // This field is not guaranteed to be accurate and users should not rely on it + // to be always provided. + // The default of 0.0 is a sentinel value indicating `confidence` was not set. + Confidence float32 `protobuf:"fixed32,4,opt,name=confidence,proto3" json:"confidence,omitempty"` + // Output only. A distinct integer value is assigned for every speaker within + // the audio. This field specifies which one of those speakers was detected to + // have spoken this word. Value ranges from '1' to diarization_speaker_count. + // speaker_tag is set if enable_speaker_diarization = 'true' and only in the + // top alternative. + SpeakerTag int32 `protobuf:"varint,5,opt,name=speaker_tag,json=speakerTag,proto3" json:"speaker_tag,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *WordInfo) Reset() { *m = WordInfo{} } +func (m *WordInfo) String() string { return proto.CompactTextString(m) } +func (*WordInfo) ProtoMessage() {} +func (*WordInfo) Descriptor() ([]byte, []int) { + return fileDescriptor_cloud_speech_71c4a621c1d5e720, []int{15} +} +func (m *WordInfo) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_WordInfo.Unmarshal(m, b) +} +func (m *WordInfo) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_WordInfo.Marshal(b, m, deterministic) +} +func (dst *WordInfo) XXX_Merge(src proto.Message) { + xxx_messageInfo_WordInfo.Merge(dst, src) +} +func (m *WordInfo) XXX_Size() int { + return xxx_messageInfo_WordInfo.Size(m) +} +func (m *WordInfo) XXX_DiscardUnknown() { + xxx_messageInfo_WordInfo.DiscardUnknown(m) +} + +var xxx_messageInfo_WordInfo proto.InternalMessageInfo + +func (m *WordInfo) GetStartTime() *duration.Duration { + if m != nil { + return m.StartTime + } + return nil +} + +func (m *WordInfo) GetEndTime() *duration.Duration { + if m != nil { + return m.EndTime + } + return nil +} + +func (m *WordInfo) GetWord() string { + if m != nil { + return m.Word + } + return "" +} + +func (m *WordInfo) GetConfidence() float32 { + if m != nil { + return m.Confidence + } + return 0 +} + +func (m *WordInfo) GetSpeakerTag() int32 { + if m != nil { + return m.SpeakerTag + } + return 0 +} + +func init() { + proto.RegisterType((*RecognizeRequest)(nil), "google.cloud.speech.v1p1beta1.RecognizeRequest") + proto.RegisterType((*LongRunningRecognizeRequest)(nil), "google.cloud.speech.v1p1beta1.LongRunningRecognizeRequest") + proto.RegisterType((*StreamingRecognizeRequest)(nil), "google.cloud.speech.v1p1beta1.StreamingRecognizeRequest") + proto.RegisterType((*StreamingRecognitionConfig)(nil), "google.cloud.speech.v1p1beta1.StreamingRecognitionConfig") + proto.RegisterType((*RecognitionConfig)(nil), "google.cloud.speech.v1p1beta1.RecognitionConfig") + proto.RegisterType((*RecognitionMetadata)(nil), "google.cloud.speech.v1p1beta1.RecognitionMetadata") + proto.RegisterType((*SpeechContext)(nil), "google.cloud.speech.v1p1beta1.SpeechContext") + proto.RegisterType((*RecognitionAudio)(nil), "google.cloud.speech.v1p1beta1.RecognitionAudio") + proto.RegisterType((*RecognizeResponse)(nil), "google.cloud.speech.v1p1beta1.RecognizeResponse") + proto.RegisterType((*LongRunningRecognizeResponse)(nil), "google.cloud.speech.v1p1beta1.LongRunningRecognizeResponse") + proto.RegisterType((*LongRunningRecognizeMetadata)(nil), "google.cloud.speech.v1p1beta1.LongRunningRecognizeMetadata") + proto.RegisterType((*StreamingRecognizeResponse)(nil), "google.cloud.speech.v1p1beta1.StreamingRecognizeResponse") + proto.RegisterType((*StreamingRecognitionResult)(nil), "google.cloud.speech.v1p1beta1.StreamingRecognitionResult") + proto.RegisterType((*SpeechRecognitionResult)(nil), "google.cloud.speech.v1p1beta1.SpeechRecognitionResult") + proto.RegisterType((*SpeechRecognitionAlternative)(nil), "google.cloud.speech.v1p1beta1.SpeechRecognitionAlternative") + proto.RegisterType((*WordInfo)(nil), "google.cloud.speech.v1p1beta1.WordInfo") + proto.RegisterEnum("google.cloud.speech.v1p1beta1.RecognitionConfig_AudioEncoding", RecognitionConfig_AudioEncoding_name, RecognitionConfig_AudioEncoding_value) + proto.RegisterEnum("google.cloud.speech.v1p1beta1.RecognitionMetadata_InteractionType", RecognitionMetadata_InteractionType_name, RecognitionMetadata_InteractionType_value) + proto.RegisterEnum("google.cloud.speech.v1p1beta1.RecognitionMetadata_MicrophoneDistance", RecognitionMetadata_MicrophoneDistance_name, RecognitionMetadata_MicrophoneDistance_value) + proto.RegisterEnum("google.cloud.speech.v1p1beta1.RecognitionMetadata_OriginalMediaType", RecognitionMetadata_OriginalMediaType_name, RecognitionMetadata_OriginalMediaType_value) + proto.RegisterEnum("google.cloud.speech.v1p1beta1.RecognitionMetadata_RecordingDeviceType", RecognitionMetadata_RecordingDeviceType_name, RecognitionMetadata_RecordingDeviceType_value) + proto.RegisterEnum("google.cloud.speech.v1p1beta1.StreamingRecognizeResponse_SpeechEventType", StreamingRecognizeResponse_SpeechEventType_name, StreamingRecognizeResponse_SpeechEventType_value) +} + +// Reference imports to suppress errors if they are not otherwise used. +var _ context.Context +var _ grpc.ClientConn + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the grpc package it is being compiled against. +const _ = grpc.SupportPackageIsVersion4 + +// SpeechClient is the client API for Speech service. +// +// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://godoc.org/google.golang.org/grpc#ClientConn.NewStream. +type SpeechClient interface { + // Performs synchronous speech recognition: receive results after all audio + // has been sent and processed. + Recognize(ctx context.Context, in *RecognizeRequest, opts ...grpc.CallOption) (*RecognizeResponse, error) + // Performs asynchronous speech recognition: receive results via the + // google.longrunning.Operations interface. Returns either an + // `Operation.error` or an `Operation.response` which contains + // a `LongRunningRecognizeResponse` message. + LongRunningRecognize(ctx context.Context, in *LongRunningRecognizeRequest, opts ...grpc.CallOption) (*longrunning.Operation, error) + // Performs bidirectional streaming speech recognition: receive results while + // sending audio. This method is only available via the gRPC API (not REST). + StreamingRecognize(ctx context.Context, opts ...grpc.CallOption) (Speech_StreamingRecognizeClient, error) +} + +type speechClient struct { + cc *grpc.ClientConn +} + +func NewSpeechClient(cc *grpc.ClientConn) SpeechClient { + return &speechClient{cc} +} + +func (c *speechClient) Recognize(ctx context.Context, in *RecognizeRequest, opts ...grpc.CallOption) (*RecognizeResponse, error) { + out := new(RecognizeResponse) + err := c.cc.Invoke(ctx, "/google.cloud.speech.v1p1beta1.Speech/Recognize", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *speechClient) LongRunningRecognize(ctx context.Context, in *LongRunningRecognizeRequest, opts ...grpc.CallOption) (*longrunning.Operation, error) { + out := new(longrunning.Operation) + err := c.cc.Invoke(ctx, "/google.cloud.speech.v1p1beta1.Speech/LongRunningRecognize", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *speechClient) StreamingRecognize(ctx context.Context, opts ...grpc.CallOption) (Speech_StreamingRecognizeClient, error) { + stream, err := c.cc.NewStream(ctx, &_Speech_serviceDesc.Streams[0], "/google.cloud.speech.v1p1beta1.Speech/StreamingRecognize", opts...) + if err != nil { + return nil, err + } + x := &speechStreamingRecognizeClient{stream} + return x, nil +} + +type Speech_StreamingRecognizeClient interface { + Send(*StreamingRecognizeRequest) error + Recv() (*StreamingRecognizeResponse, error) + grpc.ClientStream +} + +type speechStreamingRecognizeClient struct { + grpc.ClientStream +} + +func (x *speechStreamingRecognizeClient) Send(m *StreamingRecognizeRequest) error { + return x.ClientStream.SendMsg(m) +} + +func (x *speechStreamingRecognizeClient) Recv() (*StreamingRecognizeResponse, error) { + m := new(StreamingRecognizeResponse) + if err := x.ClientStream.RecvMsg(m); err != nil { + return nil, err + } + return m, nil +} + +// SpeechServer is the server API for Speech service. +type SpeechServer interface { + // Performs synchronous speech recognition: receive results after all audio + // has been sent and processed. + Recognize(context.Context, *RecognizeRequest) (*RecognizeResponse, error) + // Performs asynchronous speech recognition: receive results via the + // google.longrunning.Operations interface. Returns either an + // `Operation.error` or an `Operation.response` which contains + // a `LongRunningRecognizeResponse` message. + LongRunningRecognize(context.Context, *LongRunningRecognizeRequest) (*longrunning.Operation, error) + // Performs bidirectional streaming speech recognition: receive results while + // sending audio. This method is only available via the gRPC API (not REST). + StreamingRecognize(Speech_StreamingRecognizeServer) error +} + +func RegisterSpeechServer(s *grpc.Server, srv SpeechServer) { + s.RegisterService(&_Speech_serviceDesc, srv) +} + +func _Speech_Recognize_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(RecognizeRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(SpeechServer).Recognize(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.speech.v1p1beta1.Speech/Recognize", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(SpeechServer).Recognize(ctx, req.(*RecognizeRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _Speech_LongRunningRecognize_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(LongRunningRecognizeRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(SpeechServer).LongRunningRecognize(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.speech.v1p1beta1.Speech/LongRunningRecognize", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(SpeechServer).LongRunningRecognize(ctx, req.(*LongRunningRecognizeRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _Speech_StreamingRecognize_Handler(srv interface{}, stream grpc.ServerStream) error { + return srv.(SpeechServer).StreamingRecognize(&speechStreamingRecognizeServer{stream}) +} + +type Speech_StreamingRecognizeServer interface { + Send(*StreamingRecognizeResponse) error + Recv() (*StreamingRecognizeRequest, error) + grpc.ServerStream +} + +type speechStreamingRecognizeServer struct { + grpc.ServerStream +} + +func (x *speechStreamingRecognizeServer) Send(m *StreamingRecognizeResponse) error { + return x.ServerStream.SendMsg(m) +} + +func (x *speechStreamingRecognizeServer) Recv() (*StreamingRecognizeRequest, error) { + m := new(StreamingRecognizeRequest) + if err := x.ServerStream.RecvMsg(m); err != nil { + return nil, err + } + return m, nil +} + +var _Speech_serviceDesc = grpc.ServiceDesc{ + ServiceName: "google.cloud.speech.v1p1beta1.Speech", + HandlerType: (*SpeechServer)(nil), + Methods: []grpc.MethodDesc{ + { + MethodName: "Recognize", + Handler: _Speech_Recognize_Handler, + }, + { + MethodName: "LongRunningRecognize", + Handler: _Speech_LongRunningRecognize_Handler, + }, + }, + Streams: []grpc.StreamDesc{ + { + StreamName: "StreamingRecognize", + Handler: _Speech_StreamingRecognize_Handler, + ServerStreams: true, + ClientStreams: true, + }, + }, + Metadata: "google/cloud/speech/v1p1beta1/cloud_speech.proto", +} + +func init() { + proto.RegisterFile("google/cloud/speech/v1p1beta1/cloud_speech.proto", fileDescriptor_cloud_speech_71c4a621c1d5e720) +} + +var fileDescriptor_cloud_speech_71c4a621c1d5e720 = []byte{ + // 2178 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xcc, 0x58, 0xbf, 0x73, 0xdb, 0xc8, + 0xf5, 0x37, 0x48, 0x51, 0x12, 0x9f, 0x7e, 0x41, 0x2b, 0xdf, 0x89, 0x96, 0x75, 0x67, 0x1b, 0x9e, + 0x3b, 0xfb, 0xee, 0x7b, 0x43, 0xd9, 0xfa, 0xde, 0x5c, 0xce, 0xbe, 0xe4, 0x26, 0x14, 0x00, 0x99, + 0x98, 0x21, 0x09, 0xce, 0x92, 0xb2, 0xe3, 0x6b, 0x76, 0x56, 0xc4, 0x92, 0xc2, 0x84, 0x04, 0x10, + 0x60, 0xe1, 0x58, 0x2e, 0xd3, 0xa6, 0x48, 0x91, 0x99, 0x74, 0xa9, 0x72, 0x75, 0xfe, 0x80, 0x34, + 0x97, 0x26, 0x4d, 0x9a, 0x14, 0xe9, 0x52, 0xa5, 0xc8, 0x7f, 0x90, 0x26, 0x33, 0x69, 0x32, 0xbb, + 0x0b, 0x50, 0x10, 0x29, 0x5b, 0xb6, 0x26, 0x37, 0x93, 0x0e, 0xfb, 0x79, 0x3f, 0xf6, 0xbd, 0xb7, + 0x6f, 0xdf, 0xbe, 0x07, 0x78, 0x30, 0x0a, 0xc3, 0xd1, 0x98, 0xed, 0x0d, 0xc6, 0x61, 0xea, 0xed, + 0x25, 0x11, 0x63, 0x83, 0x93, 0xbd, 0x17, 0x0f, 0xa3, 0x87, 0xc7, 0x8c, 0xd3, 0x87, 0x0a, 0x26, + 0x0a, 0xae, 0x47, 0x71, 0xc8, 0x43, 0xf4, 0x81, 0x92, 0xa8, 0x4b, 0x52, 0x3d, 0x23, 0x4d, 0x25, + 0x76, 0x76, 0x33, 0x85, 0x34, 0xf2, 0xf7, 0x68, 0x10, 0x84, 0x9c, 0x72, 0x3f, 0x0c, 0x12, 0x25, + 0xbc, 0x73, 0x37, 0xa3, 0x8e, 0xc3, 0x60, 0x14, 0xa7, 0x41, 0xe0, 0x07, 0xa3, 0xbd, 0x30, 0x62, + 0xf1, 0x39, 0xa6, 0x1b, 0x19, 0x93, 0x5c, 0x1d, 0xa7, 0xc3, 0x3d, 0x1a, 0x9c, 0x66, 0xa4, 0x0f, + 0x67, 0x49, 0x5e, 0xaa, 0x64, 0x33, 0xfa, 0xcd, 0x59, 0x3a, 0x9b, 0x44, 0x3c, 0x17, 0xbe, 0x35, + 0x4b, 0xe4, 0xfe, 0x84, 0x25, 0x9c, 0x4e, 0xa2, 0x8c, 0x61, 0x3b, 0x63, 0x88, 0xa3, 0xc1, 0x5e, + 0xc2, 0x29, 0x4f, 0x33, 0x8b, 0x8c, 0xdf, 0x69, 0xa0, 0x63, 0x36, 0x08, 0x47, 0x81, 0xff, 0x8a, + 0x61, 0xf6, 0xb3, 0x94, 0x25, 0x1c, 0x35, 0x61, 0x71, 0x10, 0x06, 0x43, 0x7f, 0x54, 0xd3, 0x6e, + 0x6b, 0xf7, 0x57, 0xf6, 0x1f, 0xd4, 0xdf, 0x18, 0x99, 0x7a, 0xa6, 0x40, 0x58, 0x6b, 0x4a, 0x39, + 0x9c, 0xc9, 0x23, 0x1b, 0x2a, 0x34, 0xf5, 0xfc, 0xb0, 0x56, 0x92, 0x8a, 0xf6, 0xde, 0x5e, 0x51, + 0x43, 0x88, 0x61, 0x25, 0x6d, 0xfc, 0x5e, 0x83, 0x9b, 0xad, 0x30, 0x18, 0x61, 0x15, 0xd8, 0xff, + 0x7d, 0x83, 0xbf, 0xd3, 0xe0, 0x46, 0x8f, 0xc7, 0x8c, 0x4e, 0x2e, 0x32, 0x77, 0x08, 0x7a, 0x92, + 0x13, 0xc9, 0x39, 0xc3, 0x1f, 0x5d, 0xb2, 0xdf, 0xac, 0xce, 0x33, 0x0f, 0x9a, 0xd7, 0xf0, 0xc6, + 0x54, 0xa9, 0x82, 0xd0, 0x47, 0xb0, 0x26, 0xcd, 0x11, 0x7b, 0x70, 0x16, 0x70, 0xe9, 0xd4, 0x6a, + 0xf3, 0x1a, 0x5e, 0x95, 0xb0, 0xa9, 0xd0, 0x83, 0x2d, 0xd8, 0x3c, 0x33, 0x27, 0x56, 0x36, 0x1a, + 0x7f, 0xd0, 0x60, 0xe7, 0xf5, 0xbb, 0xfd, 0x17, 0x23, 0xfe, 0x09, 0xe8, 0x89, 0x1f, 0x8c, 0xc6, + 0x8c, 0xa4, 0x9c, 0xb3, 0x98, 0x06, 0x03, 0x26, 0xed, 0x5c, 0xc6, 0x1b, 0x0a, 0x3f, 0xca, 0x61, + 0x74, 0x0f, 0x36, 0xfc, 0x80, 0xb3, 0xd8, 0x9f, 0x90, 0x98, 0x25, 0xe9, 0x98, 0x27, 0xb5, 0xb2, + 0xe4, 0x5c, 0xcf, 0x60, 0xac, 0x50, 0xe3, 0x9f, 0xcb, 0xb0, 0x39, 0x6f, 0xf3, 0x37, 0xb0, 0xcc, + 0x82, 0x41, 0xe8, 0xf9, 0x81, 0xb2, 0x7a, 0x7d, 0xff, 0xeb, 0x77, 0xb5, 0xba, 0x2e, 0x4f, 0xd9, + 0xce, 0xb4, 0xe0, 0xa9, 0x3e, 0xf4, 0x29, 0x6c, 0x26, 0x74, 0x12, 0x8d, 0x19, 0x89, 0x29, 0x67, + 0xe4, 0x84, 0xc5, 0xfc, 0x95, 0x74, 0xa3, 0x82, 0x37, 0x14, 0x01, 0x53, 0xce, 0x9a, 0x02, 0x46, + 0x75, 0xd8, 0xca, 0x8e, 0xe5, 0x84, 0x06, 0x01, 0x1b, 0x93, 0x41, 0x98, 0x06, 0xbc, 0xb6, 0x24, + 0xb9, 0x37, 0xd5, 0xd1, 0x28, 0x8a, 0x29, 0x08, 0xa8, 0x0f, 0xf7, 0x58, 0x40, 0x8f, 0xc7, 0x8c, + 0x24, 0x2c, 0xa2, 0x52, 0x7f, 0x7c, 0x66, 0x18, 0x89, 0x58, 0x9c, 0x6b, 0xaa, 0xad, 0xca, 0x70, + 0xdc, 0x55, 0xec, 0xbd, 0x8c, 0xbb, 0xe0, 0x45, 0x97, 0xc5, 0x99, 0x6a, 0x74, 0x17, 0xd6, 0xc6, + 0x34, 0x18, 0xa5, 0x74, 0xc4, 0xc8, 0x20, 0xf4, 0x98, 0x0c, 0x65, 0x15, 0xaf, 0xe6, 0xa0, 0x19, + 0x7a, 0x0c, 0xfd, 0x10, 0x76, 0xe8, 0x98, 0xb3, 0x38, 0xa0, 0xdc, 0x7f, 0xc1, 0xc8, 0x39, 0x81, + 0xa4, 0x86, 0x6e, 0x97, 0xef, 0x57, 0x71, 0xad, 0xc0, 0xd1, 0x2a, 0x08, 0x27, 0xe2, 0x68, 0x27, + 0xf4, 0x25, 0x29, 0xd0, 0x93, 0xda, 0x82, 0x8a, 0xc9, 0x84, 0xbe, 0x6c, 0x14, 0x60, 0xc1, 0x1a, + 0xc5, 0xe1, 0x90, 0x06, 0x3e, 0x3f, 0x25, 0x43, 0x5f, 0x90, 0x6a, 0x15, 0x95, 0x05, 0x53, 0xfc, + 0x50, 0xc2, 0xe8, 0x08, 0x36, 0xd4, 0x41, 0xa9, 0xb4, 0x7e, 0xc9, 0x93, 0xda, 0xe2, 0xed, 0xf2, + 0xfd, 0x95, 0xfd, 0xcf, 0x2e, 0xbb, 0x3c, 0x12, 0x30, 0x95, 0x10, 0x5e, 0x4f, 0x8a, 0xcb, 0x04, + 0xfd, 0x00, 0x6a, 0x59, 0x94, 0x7f, 0x1e, 0xc6, 0x1e, 0x11, 0x15, 0x94, 0x84, 0xc3, 0x61, 0xc2, + 0x78, 0x52, 0x5b, 0x96, 0x96, 0xbc, 0xa7, 0xe8, 0xcf, 0xc2, 0xd8, 0xeb, 0xfb, 0x13, 0xe6, 0x2a, + 0x22, 0xfa, 0x1c, 0xde, 0x2f, 0x0a, 0xca, 0xb4, 0xf6, 0x98, 0x48, 0xe3, 0x0d, 0x29, 0x76, 0xfd, + 0x4c, 0xcc, 0x9c, 0xd2, 0xd0, 0x8f, 0x61, 0x37, 0x93, 0xa2, 0x29, 0x0f, 0x27, 0x94, 0xfb, 0x03, + 0x12, 0xa5, 0xc1, 0x80, 0xa7, 0xb2, 0xea, 0xd7, 0x56, 0xa4, 0xec, 0x8e, 0xe2, 0x69, 0xe4, 0x2c, + 0xdd, 0x33, 0x0e, 0x71, 0x36, 0x79, 0x5a, 0x44, 0x8c, 0xfe, 0x94, 0xc5, 0xc4, 0xf3, 0x69, 0xec, + 0xbf, 0x52, 0xf2, 0xba, 0x94, 0xcf, 0x5c, 0xea, 0x29, 0x06, 0xeb, 0x8c, 0x8e, 0x1e, 0xc3, 0x8d, + 0x02, 0xfb, 0x54, 0x85, 0x4a, 0xc5, 0x4d, 0x79, 0x48, 0xdb, 0x05, 0x86, 0x4c, 0x83, 0x4a, 0xc8, + 0x0e, 0x2c, 0x4f, 0x18, 0xa7, 0x1e, 0xe5, 0xb4, 0x56, 0x95, 0xd7, 0x7f, 0xff, 0xed, 0x2f, 0x52, + 0x3b, 0x93, 0xc4, 0x53, 0x1d, 0xe8, 0x3a, 0x54, 0x26, 0xa1, 0xc7, 0xc6, 0xb5, 0x35, 0x99, 0x82, + 0x6a, 0x81, 0xee, 0xc0, 0x6a, 0x9a, 0x30, 0xc2, 0x82, 0x13, 0x71, 0xf9, 0xbd, 0xda, 0xba, 0xf4, + 0x68, 0x25, 0x4d, 0x98, 0x9d, 0x41, 0xc6, 0x2f, 0x35, 0x58, 0x3b, 0x77, 0x23, 0x51, 0x0d, 0xae, + 0xdb, 0x1d, 0xd3, 0xb5, 0x9c, 0xce, 0x13, 0x72, 0xd4, 0xe9, 0x75, 0x6d, 0xd3, 0x39, 0x74, 0x6c, + 0x4b, 0xbf, 0x86, 0x56, 0x61, 0xb9, 0xe5, 0x74, 0xec, 0x06, 0x7e, 0xf8, 0x85, 0xae, 0xa1, 0x65, + 0x58, 0x38, 0x6c, 0x35, 0x4c, 0xbd, 0x84, 0xaa, 0x50, 0x69, 0x1f, 0xb5, 0x1a, 0xcf, 0xf4, 0x32, + 0x5a, 0x82, 0x72, 0xa3, 0x8d, 0xf5, 0x05, 0x04, 0xb0, 0xd8, 0x68, 0x63, 0xf2, 0xec, 0x40, 0xaf, + 0x08, 0x39, 0xf7, 0xc9, 0x13, 0xe2, 0x76, 0x8f, 0x7a, 0xfa, 0x22, 0xda, 0x81, 0xf7, 0x7b, 0x5d, + 0xdb, 0xfe, 0x09, 0x79, 0xe6, 0xf4, 0x9b, 0xa4, 0x69, 0x37, 0x2c, 0x1b, 0x93, 0x83, 0xe7, 0x7d, + 0x5b, 0x5f, 0x32, 0xfe, 0x5d, 0x85, 0xad, 0x0b, 0x1c, 0x45, 0x13, 0xd0, 0x65, 0x7d, 0xa2, 0x03, + 0x19, 0x6a, 0x7e, 0x1a, 0xb1, 0xac, 0xfe, 0x1c, 0xbc, 0x7b, 0xd8, 0xea, 0xce, 0x99, 0xaa, 0xfe, + 0x69, 0xc4, 0xf0, 0x86, 0x7f, 0x1e, 0x40, 0x5f, 0xc3, 0xae, 0x1f, 0x78, 0x69, 0xc2, 0xe3, 0x53, + 0x12, 0x50, 0x7f, 0x90, 0xc8, 0xdb, 0x4a, 0xc2, 0x21, 0x51, 0x2f, 0x9b, 0xb8, 0xe7, 0x6b, 0xb8, + 0x96, 0xf3, 0x74, 0x04, 0x8b, 0xb8, 0xaf, 0xee, 0x50, 0x86, 0x12, 0xbd, 0x80, 0xad, 0x89, 0x3f, + 0x88, 0xc3, 0xe8, 0x24, 0x0c, 0x18, 0xf1, 0xfc, 0x84, 0xcb, 0x9a, 0xbc, 0x20, 0x2d, 0xb6, 0xaf, + 0x60, 0x71, 0x7b, 0xaa, 0xcd, 0xca, 0x94, 0x61, 0x34, 0x99, 0xc3, 0x10, 0x87, 0xad, 0x30, 0xf6, + 0x47, 0x7e, 0x40, 0xc7, 0x64, 0xc2, 0x3c, 0x9f, 0xaa, 0x48, 0x55, 0xe4, 0xbe, 0xd6, 0x15, 0xf6, + 0x75, 0x33, 0x6d, 0x6d, 0xa1, 0x4c, 0xc6, 0x6a, 0x33, 0x9c, 0x85, 0xd0, 0x2b, 0x78, 0x4f, 0x14, + 0xd3, 0x58, 0x64, 0x0f, 0xf1, 0xd8, 0x0b, 0x7f, 0xc0, 0xd4, 0xbe, 0x8b, 0x72, 0xdf, 0xc3, 0x2b, + 0xec, 0x8b, 0x73, 0x7d, 0x96, 0x54, 0x27, 0x77, 0xde, 0x8a, 0xe7, 0x41, 0xb4, 0x7f, 0xc1, 0xde, + 0x01, 0x9d, 0x30, 0xf9, 0x14, 0x54, 0xe7, 0x64, 0x3a, 0x74, 0xc2, 0xd0, 0x67, 0x80, 0xce, 0xa2, + 0x24, 0x6a, 0x94, 0x34, 0x76, 0x59, 0x0a, 0xe8, 0x53, 0xf7, 0xfc, 0x89, 0xda, 0xe1, 0x2e, 0xac, + 0x85, 0xc7, 0xc3, 0x34, 0x19, 0x50, 0xce, 0x3c, 0xe2, 0x7b, 0xf2, 0xba, 0x96, 0xf1, 0xea, 0x19, + 0xe8, 0x78, 0xe8, 0x16, 0xac, 0xa8, 0xf7, 0x88, 0x87, 0x91, 0x3f, 0xa8, 0x81, 0xd4, 0x05, 0x12, + 0xea, 0x0b, 0xc4, 0xf8, 0x93, 0x06, 0x1b, 0x33, 0x69, 0x87, 0x6e, 0xc3, 0xae, 0xd3, 0xe9, 0xdb, + 0xb8, 0x61, 0xf6, 0x1d, 0xb7, 0x43, 0xfa, 0xcf, 0xbb, 0xf6, 0xcc, 0x85, 0x5b, 0x07, 0xb0, 0x9c, + 0x9e, 0x79, 0xd4, 0xeb, 0x39, 0x6e, 0x47, 0xd7, 0x90, 0x0e, 0xab, 0x5d, 0x6c, 0xf7, 0xec, 0x4e, + 0xbf, 0x21, 0x44, 0xf4, 0x92, 0xe0, 0xe8, 0x36, 0xdd, 0x8e, 0x4d, 0xcc, 0x46, 0xab, 0xa5, 0x97, + 0xd1, 0x1a, 0x54, 0x9f, 0xba, 0x8e, 0x69, 0xb7, 0x1b, 0x4e, 0x4b, 0x5f, 0x40, 0x37, 0x61, 0xbb, + 0x8b, 0xdd, 0x43, 0x5b, 0x2a, 0x68, 0xb4, 0x5a, 0xcf, 0x49, 0x17, 0xbb, 0xd6, 0x91, 0x69, 0x5b, + 0x7a, 0x45, 0x68, 0x93, 0xbc, 0xa4, 0x67, 0x37, 0xb0, 0xd9, 0xd4, 0x17, 0xd1, 0x26, 0xac, 0x29, + 0xc4, 0x74, 0xdb, 0xed, 0x46, 0xc7, 0xd2, 0x97, 0x84, 0x42, 0xcb, 0x31, 0xb3, 0xfd, 0x96, 0x0d, + 0x0f, 0xd0, 0x7c, 0x2e, 0xa2, 0xbb, 0x70, 0xab, 0xed, 0x98, 0xd8, 0x55, 0xa6, 0x58, 0x4e, 0xaf, + 0xdf, 0xe8, 0x98, 0xb3, 0xce, 0xac, 0x41, 0x55, 0xd4, 0x8e, 0x43, 0xc7, 0x6e, 0x59, 0xba, 0x26, + 0x8a, 0x42, 0xdb, 0xb1, 0xd4, 0xaa, 0x24, 0x56, 0x87, 0x39, 0xad, 0x6c, 0x74, 0x60, 0x73, 0x2e, + 0xf3, 0xc4, 0x26, 0x2e, 0x76, 0x9e, 0x38, 0x9d, 0x46, 0x8b, 0xb4, 0x6d, 0xcb, 0x69, 0x5c, 0x14, + 0xb1, 0x2a, 0x54, 0x1a, 0x47, 0x96, 0xe3, 0xea, 0x9a, 0xf8, 0x7c, 0xea, 0x58, 0xb6, 0xab, 0x97, + 0x8c, 0x6f, 0x35, 0x55, 0x56, 0x66, 0xb3, 0xe7, 0x23, 0xb8, 0x83, 0x6d, 0xd3, 0xc5, 0xb2, 0xd6, + 0x59, 0xf6, 0x53, 0xe1, 0xfa, 0xc5, 0xc7, 0xd0, 0x6b, 0x37, 0x70, 0x5f, 0xba, 0xa7, 0x6b, 0x68, + 0x11, 0x4a, 0x5d, 0xb3, 0x18, 0x7c, 0x51, 0x15, 0xf5, 0x32, 0x5a, 0x81, 0xa5, 0xa7, 0x76, 0xd3, + 0x31, 0x5b, 0xb6, 0xbe, 0x20, 0xca, 0xa8, 0xdb, 0x6f, 0xda, 0x98, 0xb8, 0x47, 0x7d, 0xcb, 0x75, + 0x71, 0xa6, 0x5f, 0xaf, 0xa0, 0x6d, 0xd8, 0x52, 0x14, 0xa7, 0x53, 0x24, 0x2c, 0x1a, 0x9f, 0xc0, + 0xda, 0xb9, 0x07, 0x16, 0xd5, 0x60, 0x29, 0x3a, 0x89, 0x69, 0xc2, 0x92, 0x9a, 0x26, 0x1b, 0x85, + 0x7c, 0x69, 0xe0, 0xe9, 0xcc, 0x31, 0x6d, 0x9c, 0xd1, 0x0e, 0x2c, 0xe5, 0x5d, 0xaa, 0x96, 0x75, + 0xa9, 0x39, 0x80, 0x10, 0x94, 0xd3, 0xd8, 0x97, 0xed, 0x54, 0xb5, 0x79, 0x0d, 0x8b, 0xc5, 0xc1, + 0x3a, 0xa8, 0x26, 0x96, 0x24, 0x61, 0x1a, 0x0f, 0x98, 0xc1, 0xa6, 0x1d, 0x9f, 0xe8, 0xb3, 0x93, + 0x28, 0x0c, 0x12, 0x86, 0xba, 0xb0, 0x94, 0x37, 0x8a, 0x25, 0xd9, 0x22, 0x7c, 0xf1, 0x56, 0x2d, + 0x42, 0xc1, 0x38, 0xd5, 0x51, 0xe2, 0x5c, 0x8d, 0x11, 0xc1, 0xee, 0xc5, 0x83, 0xc8, 0xf7, 0xb6, + 0xe3, 0x9f, 0xb5, 0x8b, 0xb7, 0x9c, 0x3e, 0x2f, 0xaa, 0x75, 0x1a, 0xc5, 0x2c, 0x49, 0x44, 0x2f, + 0x38, 0xc8, 0x43, 0x58, 0x91, 0xad, 0x93, 0xc4, 0xbb, 0x0a, 0x46, 0x8f, 0x00, 0x12, 0x4e, 0x63, + 0x2e, 0xbb, 0x9b, 0x6c, 0xc4, 0xd9, 0xc9, 0x0d, 0xcc, 0x87, 0xc7, 0x7a, 0x3f, 0x1f, 0x1e, 0x71, + 0x55, 0x72, 0x8b, 0x35, 0xb2, 0x40, 0x1f, 0xd3, 0x84, 0x93, 0x34, 0xf2, 0x44, 0x03, 0x2a, 0x15, + 0x94, 0x2f, 0x55, 0xb0, 0x2e, 0x64, 0x8e, 0xa4, 0x88, 0x00, 0x8d, 0xbf, 0x97, 0xe6, 0xa7, 0x8a, + 0x42, 0xf4, 0xee, 0x43, 0x85, 0xc5, 0x71, 0x18, 0x67, 0x43, 0x05, 0xca, 0x35, 0xc7, 0xd1, 0xa0, + 0xde, 0x93, 0x63, 0x2b, 0x56, 0x0c, 0xa8, 0x37, 0x1b, 0xe7, 0xab, 0x4c, 0x4e, 0x33, 0xa1, 0x46, + 0x29, 0x6c, 0x66, 0x9d, 0x25, 0x7b, 0xc1, 0x02, 0xae, 0x4a, 0xab, 0x7a, 0xf7, 0x9c, 0x77, 0x54, + 0x7f, 0xe6, 0x54, 0x76, 0xc2, 0xb6, 0xd0, 0xa8, 0x1e, 0xec, 0xe4, 0x3c, 0x60, 0xb4, 0x60, 0x63, + 0x86, 0x07, 0xed, 0x42, 0x4d, 0xb4, 0x19, 0x66, 0x93, 0xd8, 0x4f, 0xed, 0x4e, 0x7f, 0xe6, 0x4a, + 0xdf, 0x84, 0x6d, 0xbb, 0x63, 0x11, 0xf7, 0x90, 0xf4, 0x9c, 0xce, 0x93, 0x96, 0x4d, 0x8e, 0xfa, + 0xa2, 0x12, 0x77, 0x4c, 0x5b, 0xd7, 0x8c, 0xef, 0x4a, 0x17, 0x0f, 0x6e, 0xca, 0x59, 0x44, 0x60, + 0xf5, 0x5c, 0x3f, 0xae, 0xc9, 0xe8, 0x7d, 0xf5, 0xae, 0x59, 0x5a, 0x68, 0xde, 0xf1, 0x39, 0x85, + 0xe8, 0x06, 0x2c, 0xfb, 0x09, 0x19, 0x8a, 0xf2, 0x97, 0xcd, 0x71, 0x4b, 0x7e, 0x72, 0x28, 0x96, + 0x68, 0x17, 0x44, 0x42, 0x1d, 0xfb, 0x63, 0x9f, 0x9f, 0xca, 0xe4, 0x29, 0xe1, 0x33, 0x00, 0x35, + 0x60, 0x43, 0x1d, 0x04, 0x61, 0x81, 0xea, 0xbf, 0x65, 0xec, 0x57, 0xf6, 0x6f, 0xcc, 0x25, 0x98, + 0x95, 0xfd, 0x1b, 0xc1, 0x6b, 0x4a, 0xc2, 0x0e, 0x64, 0x47, 0x2e, 0x5e, 0xb2, 0x7c, 0xa6, 0xe2, + 0x74, 0x24, 0x5b, 0x87, 0x0a, 0x86, 0x0c, 0xea, 0xd3, 0xd1, 0xfc, 0xd0, 0xb3, 0x38, 0x3f, 0xf4, + 0x18, 0x7f, 0xd4, 0x60, 0xfb, 0x35, 0xd7, 0xf2, 0xfb, 0x0f, 0xdf, 0x8c, 0x0b, 0xa5, 0xcb, 0x5d, + 0xa8, 0x5c, 0xe0, 0xc2, 0x6f, 0x35, 0xd8, 0x7d, 0xd3, 0xa6, 0xe8, 0x43, 0x00, 0x1e, 0xd3, 0x20, + 0x19, 0xc4, 0x7e, 0xa4, 0xca, 0x45, 0x15, 0x17, 0x10, 0x41, 0x2f, 0x0c, 0x32, 0x25, 0x79, 0x56, + 0x05, 0x04, 0xfd, 0x08, 0x2a, 0x62, 0xda, 0x11, 0x03, 0xb8, 0x08, 0xc0, 0xbd, 0x4b, 0x02, 0x20, + 0x86, 0x1f, 0x27, 0x18, 0x86, 0x58, 0x49, 0x19, 0x7f, 0xd1, 0x60, 0x39, 0xc7, 0xd0, 0x97, 0xe7, + 0xaa, 0x92, 0x76, 0xd9, 0x99, 0x17, 0x8a, 0xd2, 0xe7, 0x62, 0xa2, 0xf7, 0x8a, 0xd5, 0xec, 0x0d, + 0x72, 0x4b, 0x2c, 0xcb, 0x12, 0x04, 0x0b, 0xc2, 0x8a, 0x6c, 0xe0, 0x95, 0xdf, 0x33, 0xfe, 0x2e, + 0xcc, 0xf9, 0x7b, 0x0b, 0x56, 0xf2, 0x11, 0xa9, 0x90, 0x59, 0x19, 0xd4, 0xa7, 0xa3, 0xfd, 0xbf, + 0x95, 0x61, 0x51, 0x45, 0x1c, 0xfd, 0x46, 0x83, 0xea, 0xb4, 0x0c, 0xa0, 0xb7, 0xfc, 0x85, 0x34, + 0xfd, 0x3b, 0xb4, 0xf3, 0xe0, 0xed, 0x05, 0x54, 0x85, 0x31, 0x3e, 0xfe, 0xc5, 0x5f, 0xff, 0xf1, + 0xeb, 0xd2, 0x6d, 0xe3, 0x66, 0xe1, 0xff, 0xa6, 0x12, 0x7b, 0x1c, 0xe7, 0xcc, 0x8f, 0xb5, 0x4f, + 0xd1, 0xb7, 0x1a, 0x5c, 0xbf, 0xe8, 0x29, 0x41, 0x8f, 0x2f, 0xd9, 0xf2, 0x0d, 0xff, 0xde, 0x76, + 0x3e, 0xc8, 0x65, 0x0b, 0x7f, 0x3e, 0xeb, 0x6e, 0xfe, 0xe7, 0xd3, 0x78, 0x28, 0x6d, 0xfb, 0x3f, + 0xe3, 0xe3, 0x79, 0xdb, 0x0a, 0x02, 0xe7, 0xcc, 0xfc, 0x95, 0x06, 0x68, 0xbe, 0x9e, 0xa2, 0x2f, + 0xaf, 0x50, 0x82, 0x95, 0x89, 0x8f, 0xae, 0x5c, 0xbc, 0x8d, 0x6b, 0xf7, 0xb5, 0x07, 0xda, 0xc1, + 0x2b, 0xb8, 0x33, 0x08, 0x27, 0x6f, 0xd6, 0x72, 0xb0, 0xa2, 0x8e, 0xbf, 0x2b, 0x12, 0xaf, 0xab, + 0x7d, 0x63, 0x66, 0xdc, 0xa3, 0x50, 0xdc, 0xcc, 0x7a, 0x18, 0x8f, 0xf6, 0x46, 0x2c, 0x90, 0x69, + 0xb9, 0xa7, 0x48, 0x34, 0xf2, 0x93, 0xd7, 0xfc, 0x9e, 0xfe, 0x4a, 0x01, 0xff, 0xd2, 0xb4, 0xe3, + 0x45, 0x29, 0xf2, 0xff, 0xff, 0x09, 0x00, 0x00, 0xff, 0xff, 0xdc, 0x29, 0xdf, 0xd7, 0xd0, 0x16, + 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/cloud/support/common/common.pb.go b/vendor/google.golang.org/genproto/googleapis/cloud/support/common/common.pb.go new file mode 100644 index 0000000..f658f34 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/cloud/support/common/common.pb.go @@ -0,0 +1,1040 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/cloud/support/common.proto + +package common // import "google.golang.org/genproto/googleapis/cloud/support/common" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import timestamp "github.com/golang/protobuf/ptypes/timestamp" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// The current state of this SupportAccount. +type SupportAccount_State int32 + +const ( + // Account is in an unknown state. + SupportAccount_STATE_UNSPECIFIED SupportAccount_State = 0 + // Account is in an active state. + SupportAccount_ACTIVE SupportAccount_State = 1 + // Account has been created but is being provisioned in support systems. + SupportAccount_PENDING SupportAccount_State = 2 + // Account deletion has been requested by the user. + SupportAccount_PENDING_DELETION SupportAccount_State = 3 +) + +var SupportAccount_State_name = map[int32]string{ + 0: "STATE_UNSPECIFIED", + 1: "ACTIVE", + 2: "PENDING", + 3: "PENDING_DELETION", +} +var SupportAccount_State_value = map[string]int32{ + "STATE_UNSPECIFIED": 0, + "ACTIVE": 1, + "PENDING": 2, + "PENDING_DELETION": 3, +} + +func (x SupportAccount_State) String() string { + return proto.EnumName(SupportAccount_State_name, int32(x)) +} +func (SupportAccount_State) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_common_2a1414d9284edc87, []int{0, 0} +} + +// Pricing model applicable to this support account. +type SupportAccount_PricingModel int32 + +const ( + // This account is subscribed to an unknown pricing model. + SupportAccount_PRICING_MODEL_UNKNOWN SupportAccount_PricingModel = 0 + // Package based pricing (Platinum, Gold, Silver, Bronze). + SupportAccount_PACKAGES SupportAccount_PricingModel = 1 + // Support charges are calculated based on user seats a.k.a, + // "Pick Your Team" model. + SupportAccount_USER_ROLES SupportAccount_PricingModel = 2 +) + +var SupportAccount_PricingModel_name = map[int32]string{ + 0: "PRICING_MODEL_UNKNOWN", + 1: "PACKAGES", + 2: "USER_ROLES", +} +var SupportAccount_PricingModel_value = map[string]int32{ + "PRICING_MODEL_UNKNOWN": 0, + "PACKAGES": 1, + "USER_ROLES": 2, +} + +func (x SupportAccount_PricingModel) String() string { + return proto.EnumName(SupportAccount_PricingModel_name, int32(x)) +} +func (SupportAccount_PricingModel) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_common_2a1414d9284edc87, []int{0, 1} +} + +// The case priority with P0 being the most urgent and P4 the least. +type Case_Priority int32 + +const ( + // Priority is undefined or has not been set yet. + Case_PRIORITY_UNSPECIFIED Case_Priority = 0 + // Extreme impact on a production service - Service is hard down. + Case_P0 Case_Priority = 1 + // Critical impact on a production service - Service is currently unusable. + Case_P1 Case_Priority = 2 + // Severe impact on a production service - Service is usable but greatly + // impaired. + Case_P2 Case_Priority = 3 + // Medium impact on a production service - Service is available, but + // moderately impaired. + Case_P3 Case_Priority = 4 + // General questions or minor issues - Production service is fully + // available. + Case_P4 Case_Priority = 5 +) + +var Case_Priority_name = map[int32]string{ + 0: "PRIORITY_UNSPECIFIED", + 1: "P0", + 2: "P1", + 3: "P2", + 4: "P3", + 5: "P4", +} +var Case_Priority_value = map[string]int32{ + "PRIORITY_UNSPECIFIED": 0, + "P0": 1, + "P1": 2, + "P2": 3, + "P3": 4, + "P4": 5, +} + +func (x Case_Priority) String() string { + return proto.EnumName(Case_Priority_name, int32(x)) +} +func (Case_Priority) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_common_2a1414d9284edc87, []int{1, 0} +} + +// The state of a case. +type Case_State int32 + +const ( + // Case is in an unknown state. + Case_STATE_UNSPECIFIED Case_State = 0 + // Case has been created but no one is assigned to work on it yet. + Case_NEW Case_State = 1 + // Case has been assigned to a support agent. + Case_ASSIGNED Case_State = 2 + // A support agent is currently investigating the case. + Case_IN_PROGRESS_GOOGLE_SUPPORT Case_State = 3 + // Case has been forwarded to product team for further investigation. + Case_IN_PROGRESS_GOOGLE_ENG Case_State = 4 + // Case is under investigation and relates to a known issue. + Case_IN_PROGRESS_KNOWN_ISSUE Case_State = 5 + // Case is waiting for a response from the customer. + Case_WAITING_FOR_CUSTOMER_RESPONSE Case_State = 6 + // A solution has been offered for the case but it isn't closed yet. + Case_SOLUTION_OFFERED Case_State = 7 + // Cases has been fully resolved and is in a closed state. + Case_CLOSED Case_State = 8 +) + +var Case_State_name = map[int32]string{ + 0: "STATE_UNSPECIFIED", + 1: "NEW", + 2: "ASSIGNED", + 3: "IN_PROGRESS_GOOGLE_SUPPORT", + 4: "IN_PROGRESS_GOOGLE_ENG", + 5: "IN_PROGRESS_KNOWN_ISSUE", + 6: "WAITING_FOR_CUSTOMER_RESPONSE", + 7: "SOLUTION_OFFERED", + 8: "CLOSED", +} +var Case_State_value = map[string]int32{ + "STATE_UNSPECIFIED": 0, + "NEW": 1, + "ASSIGNED": 2, + "IN_PROGRESS_GOOGLE_SUPPORT": 3, + "IN_PROGRESS_GOOGLE_ENG": 4, + "IN_PROGRESS_KNOWN_ISSUE": 5, + "WAITING_FOR_CUSTOMER_RESPONSE": 6, + "SOLUTION_OFFERED": 7, + "CLOSED": 8, +} + +func (x Case_State) String() string { + return proto.EnumName(Case_State_name, int32(x)) +} +func (Case_State) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_common_2a1414d9284edc87, []int{1, 1} +} + +// The status of a customer issue. +type CustomerIssue_IssueState int32 + +const ( + // Issue in an unknown state. + CustomerIssue_ISSUE_STATE_UNSPECIFIED CustomerIssue_IssueState = 0 + // Issue is currently open but the work on it has not been started. + CustomerIssue_OPEN CustomerIssue_IssueState = 1 + // Issue is currently being worked on. + CustomerIssue_IN_PROGRESS CustomerIssue_IssueState = 2 + // Issue is fixed. + CustomerIssue_FIXED CustomerIssue_IssueState = 3 + // Issue has been marked as invalid. + CustomerIssue_WONT_FIX CustomerIssue_IssueState = 4 + // Issue verified and in production. + CustomerIssue_VERIFIED CustomerIssue_IssueState = 5 +) + +var CustomerIssue_IssueState_name = map[int32]string{ + 0: "ISSUE_STATE_UNSPECIFIED", + 1: "OPEN", + 2: "IN_PROGRESS", + 3: "FIXED", + 4: "WONT_FIX", + 5: "VERIFIED", +} +var CustomerIssue_IssueState_value = map[string]int32{ + "ISSUE_STATE_UNSPECIFIED": 0, + "OPEN": 1, + "IN_PROGRESS": 2, + "FIXED": 3, + "WONT_FIX": 4, + "VERIFIED": 5, +} + +func (x CustomerIssue_IssueState) String() string { + return proto.EnumName(CustomerIssue_IssueState_name, int32(x)) +} +func (CustomerIssue_IssueState) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_common_2a1414d9284edc87, []int{2, 0} +} + +// A role which determines the support resources and features a user might +// get access to. +type SupportRole_Role int32 + +const ( + // An unknown role. + SupportRole_ROLE_UNSPECIFIED SupportRole_Role = 0 + // The basic support role. + SupportRole_BASIC SupportRole_Role = 1 + // The developer role. + SupportRole_DEVELOPER SupportRole_Role = 2 + // The operation role. + SupportRole_OPERATION SupportRole_Role = 3 + // The site reliability role. + SupportRole_SITE_RELIABILITY SupportRole_Role = 4 +) + +var SupportRole_Role_name = map[int32]string{ + 0: "ROLE_UNSPECIFIED", + 1: "BASIC", + 2: "DEVELOPER", + 3: "OPERATION", + 4: "SITE_RELIABILITY", +} +var SupportRole_Role_value = map[string]int32{ + "ROLE_UNSPECIFIED": 0, + "BASIC": 1, + "DEVELOPER": 2, + "OPERATION": 3, + "SITE_RELIABILITY": 4, +} + +func (x SupportRole_Role) String() string { + return proto.EnumName(SupportRole_Role_name, int32(x)) +} +func (SupportRole_Role) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_common_2a1414d9284edc87, []int{3, 0} +} + +// A Google Cloud Platform account that identifies support eligibility for a +// Cloud resource. Currently the Cloud resource can only be an Organization +// but this might change in future. +type SupportAccount struct { + // The resource name for a support account in format + // `supportAccounts/{account_id}`. + // Output only. + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // Identifier for this entity that gets persisted in storage system. The + // resource name is populated using this field in format + // `supportAccounts/{account_id}`. + AccountId string `protobuf:"bytes,2,opt,name=account_id,json=accountId,proto3" json:"account_id,omitempty"` + // The Cloud resource with which this support account is associated. + CloudResource string `protobuf:"bytes,3,opt,name=cloud_resource,json=cloudResource,proto3" json:"cloud_resource,omitempty"` + // A user friendly display name assigned to this support account. + DisplayName string `protobuf:"bytes,4,opt,name=display_name,json=displayName,proto3" json:"display_name,omitempty"` + // Indicates the current state of an account. + State SupportAccount_State `protobuf:"varint,5,opt,name=state,proto3,enum=google.cloud.support.common.SupportAccount_State" json:"state,omitempty"` + // Time when this account was created. + // Output only. + CreateTime *timestamp.Timestamp `protobuf:"bytes,6,opt,name=create_time,json=createTime,proto3" json:"create_time,omitempty"` + // The resource name of a billing account associated with this support + // account. For example, `billingAccounts/ABCDEF-012345-567890`. + BillingAccountName string `protobuf:"bytes,7,opt,name=billing_account_name,json=billingAccountName,proto3" json:"billing_account_name,omitempty"` + UnifyAccountId string `protobuf:"bytes,8,opt,name=unify_account_id,json=unifyAccountId,proto3" json:"unify_account_id,omitempty"` + // The PricingModel applicable to this support account. + PricingModel SupportAccount_PricingModel `protobuf:"varint,9,opt,name=pricing_model,json=pricingModel,proto3,enum=google.cloud.support.common.SupportAccount_PricingModel" json:"pricing_model,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *SupportAccount) Reset() { *m = SupportAccount{} } +func (m *SupportAccount) String() string { return proto.CompactTextString(m) } +func (*SupportAccount) ProtoMessage() {} +func (*SupportAccount) Descriptor() ([]byte, []int) { + return fileDescriptor_common_2a1414d9284edc87, []int{0} +} +func (m *SupportAccount) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_SupportAccount.Unmarshal(m, b) +} +func (m *SupportAccount) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_SupportAccount.Marshal(b, m, deterministic) +} +func (dst *SupportAccount) XXX_Merge(src proto.Message) { + xxx_messageInfo_SupportAccount.Merge(dst, src) +} +func (m *SupportAccount) XXX_Size() int { + return xxx_messageInfo_SupportAccount.Size(m) +} +func (m *SupportAccount) XXX_DiscardUnknown() { + xxx_messageInfo_SupportAccount.DiscardUnknown(m) +} + +var xxx_messageInfo_SupportAccount proto.InternalMessageInfo + +func (m *SupportAccount) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *SupportAccount) GetAccountId() string { + if m != nil { + return m.AccountId + } + return "" +} + +func (m *SupportAccount) GetCloudResource() string { + if m != nil { + return m.CloudResource + } + return "" +} + +func (m *SupportAccount) GetDisplayName() string { + if m != nil { + return m.DisplayName + } + return "" +} + +func (m *SupportAccount) GetState() SupportAccount_State { + if m != nil { + return m.State + } + return SupportAccount_STATE_UNSPECIFIED +} + +func (m *SupportAccount) GetCreateTime() *timestamp.Timestamp { + if m != nil { + return m.CreateTime + } + return nil +} + +func (m *SupportAccount) GetBillingAccountName() string { + if m != nil { + return m.BillingAccountName + } + return "" +} + +func (m *SupportAccount) GetUnifyAccountId() string { + if m != nil { + return m.UnifyAccountId + } + return "" +} + +func (m *SupportAccount) GetPricingModel() SupportAccount_PricingModel { + if m != nil { + return m.PricingModel + } + return SupportAccount_PRICING_MODEL_UNKNOWN +} + +// A support case created by the user. +type Case struct { + // The resource name for the Case in format + // `supportAccounts/{account_id}/cases/{case_id}` + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // The short summary of the issue reported in this case. + DisplayName string `protobuf:"bytes,2,opt,name=display_name,json=displayName,proto3" json:"display_name,omitempty"` + // The board description of issue provided with initial summary. + Description string `protobuf:"bytes,3,opt,name=description,proto3" json:"description,omitempty"` + // The product component for which this Case is reported. + Component string `protobuf:"bytes,4,opt,name=component,proto3" json:"component,omitempty"` + // The product subcomponent for which this Case is reported. + Subcomponent string `protobuf:"bytes,5,opt,name=subcomponent,proto3" json:"subcomponent,omitempty"` + // Timezone the client sending this request is in. + // It should be in a format IANA recognizes: https://www.iana.org/time-zone + // There is no additional validation done by the API. + ClientTimezone string `protobuf:"bytes,6,opt,name=client_timezone,json=clientTimezone,proto3" json:"client_timezone,omitempty"` + // The email addresses that can be copied to receive updates on this case. + // Users can specify a maximum of 10 email addresses. + CcAddresses []string `protobuf:"bytes,7,rep,name=cc_addresses,json=ccAddresses,proto3" json:"cc_addresses,omitempty"` + // The Google Cloud Platform project ID for which this case is created. + ProjectId string `protobuf:"bytes,8,opt,name=project_id,json=projectId,proto3" json:"project_id,omitempty"` + // List of customer issues associated with this case. + Issues []*CustomerIssue `protobuf:"bytes,10,rep,name=issues,proto3" json:"issues,omitempty"` + // The current priority of this case. + Priority Case_Priority `protobuf:"varint,11,opt,name=priority,proto3,enum=google.cloud.support.common.Case_Priority" json:"priority,omitempty"` + // The current state of this case. + State Case_State `protobuf:"varint,12,opt,name=state,proto3,enum=google.cloud.support.common.Case_State" json:"state,omitempty"` + // Time when this case was created. + // Output only. + CreateTime *timestamp.Timestamp `protobuf:"bytes,13,opt,name=create_time,json=createTime,proto3" json:"create_time,omitempty"` + // Time when this case was last updated. + // Output only. + UpdateTime *timestamp.Timestamp `protobuf:"bytes,14,opt,name=update_time,json=updateTime,proto3" json:"update_time,omitempty"` + // Email address of user who created this case. + // Output only. It is inferred from credentials supplied during case creation. + CreatorEmail string `protobuf:"bytes,15,opt,name=creator_email,json=creatorEmail,proto3" json:"creator_email,omitempty"` + // The issue category applicable to this case. + Category string `protobuf:"bytes,16,opt,name=category,proto3" json:"category,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Case) Reset() { *m = Case{} } +func (m *Case) String() string { return proto.CompactTextString(m) } +func (*Case) ProtoMessage() {} +func (*Case) Descriptor() ([]byte, []int) { + return fileDescriptor_common_2a1414d9284edc87, []int{1} +} +func (m *Case) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Case.Unmarshal(m, b) +} +func (m *Case) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Case.Marshal(b, m, deterministic) +} +func (dst *Case) XXX_Merge(src proto.Message) { + xxx_messageInfo_Case.Merge(dst, src) +} +func (m *Case) XXX_Size() int { + return xxx_messageInfo_Case.Size(m) +} +func (m *Case) XXX_DiscardUnknown() { + xxx_messageInfo_Case.DiscardUnknown(m) +} + +var xxx_messageInfo_Case proto.InternalMessageInfo + +func (m *Case) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *Case) GetDisplayName() string { + if m != nil { + return m.DisplayName + } + return "" +} + +func (m *Case) GetDescription() string { + if m != nil { + return m.Description + } + return "" +} + +func (m *Case) GetComponent() string { + if m != nil { + return m.Component + } + return "" +} + +func (m *Case) GetSubcomponent() string { + if m != nil { + return m.Subcomponent + } + return "" +} + +func (m *Case) GetClientTimezone() string { + if m != nil { + return m.ClientTimezone + } + return "" +} + +func (m *Case) GetCcAddresses() []string { + if m != nil { + return m.CcAddresses + } + return nil +} + +func (m *Case) GetProjectId() string { + if m != nil { + return m.ProjectId + } + return "" +} + +func (m *Case) GetIssues() []*CustomerIssue { + if m != nil { + return m.Issues + } + return nil +} + +func (m *Case) GetPriority() Case_Priority { + if m != nil { + return m.Priority + } + return Case_PRIORITY_UNSPECIFIED +} + +func (m *Case) GetState() Case_State { + if m != nil { + return m.State + } + return Case_STATE_UNSPECIFIED +} + +func (m *Case) GetCreateTime() *timestamp.Timestamp { + if m != nil { + return m.CreateTime + } + return nil +} + +func (m *Case) GetUpdateTime() *timestamp.Timestamp { + if m != nil { + return m.UpdateTime + } + return nil +} + +func (m *Case) GetCreatorEmail() string { + if m != nil { + return m.CreatorEmail + } + return "" +} + +func (m *Case) GetCategory() string { + if m != nil { + return m.Category + } + return "" +} + +// Reference to a Google internal ticket used for investigating a support case. +// Not every support case will have an internal ticket associated with it. +// A support case can have multiple tickets linked to it. +type CustomerIssue struct { + // Unique identifier for the internal issue. + // Output only. + IssueId string `protobuf:"bytes,1,opt,name=issue_id,json=issueId,proto3" json:"issue_id,omitempty"` + // Represents current status of the internal ticket. + // Output only. + State CustomerIssue_IssueState `protobuf:"varint,2,opt,name=state,proto3,enum=google.cloud.support.common.CustomerIssue_IssueState" json:"state,omitempty"` + // Time when the internal issue was created. + // Output only. + CreateTime *timestamp.Timestamp `protobuf:"bytes,3,opt,name=create_time,json=createTime,proto3" json:"create_time,omitempty"` + // Time when the internal issue was marked as resolved. + // Output only. + ResolveTime *timestamp.Timestamp `protobuf:"bytes,4,opt,name=resolve_time,json=resolveTime,proto3" json:"resolve_time,omitempty"` + // Time when the internal issue was last updated. + // Output only. + UpdateTime *timestamp.Timestamp `protobuf:"bytes,5,opt,name=update_time,json=updateTime,proto3" json:"update_time,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *CustomerIssue) Reset() { *m = CustomerIssue{} } +func (m *CustomerIssue) String() string { return proto.CompactTextString(m) } +func (*CustomerIssue) ProtoMessage() {} +func (*CustomerIssue) Descriptor() ([]byte, []int) { + return fileDescriptor_common_2a1414d9284edc87, []int{2} +} +func (m *CustomerIssue) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_CustomerIssue.Unmarshal(m, b) +} +func (m *CustomerIssue) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_CustomerIssue.Marshal(b, m, deterministic) +} +func (dst *CustomerIssue) XXX_Merge(src proto.Message) { + xxx_messageInfo_CustomerIssue.Merge(dst, src) +} +func (m *CustomerIssue) XXX_Size() int { + return xxx_messageInfo_CustomerIssue.Size(m) +} +func (m *CustomerIssue) XXX_DiscardUnknown() { + xxx_messageInfo_CustomerIssue.DiscardUnknown(m) +} + +var xxx_messageInfo_CustomerIssue proto.InternalMessageInfo + +func (m *CustomerIssue) GetIssueId() string { + if m != nil { + return m.IssueId + } + return "" +} + +func (m *CustomerIssue) GetState() CustomerIssue_IssueState { + if m != nil { + return m.State + } + return CustomerIssue_ISSUE_STATE_UNSPECIFIED +} + +func (m *CustomerIssue) GetCreateTime() *timestamp.Timestamp { + if m != nil { + return m.CreateTime + } + return nil +} + +func (m *CustomerIssue) GetResolveTime() *timestamp.Timestamp { + if m != nil { + return m.ResolveTime + } + return nil +} + +func (m *CustomerIssue) GetUpdateTime() *timestamp.Timestamp { + if m != nil { + return m.UpdateTime + } + return nil +} + +// A message that contains mapping of a user and their role under a support +// account. +type SupportRole struct { + // Email address of user being added through this Role. + Email string `protobuf:"bytes,1,opt,name=email,proto3" json:"email,omitempty"` + // The type of role assigned to user. + Role SupportRole_Role `protobuf:"varint,2,opt,name=role,proto3,enum=google.cloud.support.common.SupportRole_Role" json:"role,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *SupportRole) Reset() { *m = SupportRole{} } +func (m *SupportRole) String() string { return proto.CompactTextString(m) } +func (*SupportRole) ProtoMessage() {} +func (*SupportRole) Descriptor() ([]byte, []int) { + return fileDescriptor_common_2a1414d9284edc87, []int{3} +} +func (m *SupportRole) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_SupportRole.Unmarshal(m, b) +} +func (m *SupportRole) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_SupportRole.Marshal(b, m, deterministic) +} +func (dst *SupportRole) XXX_Merge(src proto.Message) { + xxx_messageInfo_SupportRole.Merge(dst, src) +} +func (m *SupportRole) XXX_Size() int { + return xxx_messageInfo_SupportRole.Size(m) +} +func (m *SupportRole) XXX_DiscardUnknown() { + xxx_messageInfo_SupportRole.DiscardUnknown(m) +} + +var xxx_messageInfo_SupportRole proto.InternalMessageInfo + +func (m *SupportRole) GetEmail() string { + if m != nil { + return m.Email + } + return "" +} + +func (m *SupportRole) GetRole() SupportRole_Role { + if m != nil { + return m.Role + } + return SupportRole_ROLE_UNSPECIFIED +} + +// The comment text associated with a `Case`. +type Comment struct { + // Text containing a maximum of 3000 characters. + Text string `protobuf:"bytes,1,opt,name=text,proto3" json:"text,omitempty"` + // Time when this update was created. + // Output only. + CreateTime *timestamp.Timestamp `protobuf:"bytes,2,opt,name=create_time,json=createTime,proto3" json:"create_time,omitempty"` + // The email address/name of user who created this comment. + // Output only. + Author string `protobuf:"bytes,3,opt,name=author,proto3" json:"author,omitempty"` + // The resource name for this comment in format + // `supportAccounts/{account_id}/cases/{case_id}/{comment_id}`. + // Output only. + Name string `protobuf:"bytes,4,opt,name=name,proto3" json:"name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Comment) Reset() { *m = Comment{} } +func (m *Comment) String() string { return proto.CompactTextString(m) } +func (*Comment) ProtoMessage() {} +func (*Comment) Descriptor() ([]byte, []int) { + return fileDescriptor_common_2a1414d9284edc87, []int{4} +} +func (m *Comment) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Comment.Unmarshal(m, b) +} +func (m *Comment) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Comment.Marshal(b, m, deterministic) +} +func (dst *Comment) XXX_Merge(src proto.Message) { + xxx_messageInfo_Comment.Merge(dst, src) +} +func (m *Comment) XXX_Size() int { + return xxx_messageInfo_Comment.Size(m) +} +func (m *Comment) XXX_DiscardUnknown() { + xxx_messageInfo_Comment.DiscardUnknown(m) +} + +var xxx_messageInfo_Comment proto.InternalMessageInfo + +func (m *Comment) GetText() string { + if m != nil { + return m.Text + } + return "" +} + +func (m *Comment) GetCreateTime() *timestamp.Timestamp { + if m != nil { + return m.CreateTime + } + return nil +} + +func (m *Comment) GetAuthor() string { + if m != nil { + return m.Author + } + return "" +} + +func (m *Comment) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +// Represents the product component taxonomy that is to be used while creating +// or updating a `Case`. A client should obtain the list of issue categories, +// component/subcomponent from this object and specify it in `Case.category`, +// `Case.component` and `Case.subcomponent` fields respectively. +type IssueTaxonomy struct { + // Map of available categories. + Categories map[string]*IssueTaxonomy_Category `protobuf:"bytes,1,rep,name=categories,proto3" json:"categories,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *IssueTaxonomy) Reset() { *m = IssueTaxonomy{} } +func (m *IssueTaxonomy) String() string { return proto.CompactTextString(m) } +func (*IssueTaxonomy) ProtoMessage() {} +func (*IssueTaxonomy) Descriptor() ([]byte, []int) { + return fileDescriptor_common_2a1414d9284edc87, []int{5} +} +func (m *IssueTaxonomy) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_IssueTaxonomy.Unmarshal(m, b) +} +func (m *IssueTaxonomy) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_IssueTaxonomy.Marshal(b, m, deterministic) +} +func (dst *IssueTaxonomy) XXX_Merge(src proto.Message) { + xxx_messageInfo_IssueTaxonomy.Merge(dst, src) +} +func (m *IssueTaxonomy) XXX_Size() int { + return xxx_messageInfo_IssueTaxonomy.Size(m) +} +func (m *IssueTaxonomy) XXX_DiscardUnknown() { + xxx_messageInfo_IssueTaxonomy.DiscardUnknown(m) +} + +var xxx_messageInfo_IssueTaxonomy proto.InternalMessageInfo + +func (m *IssueTaxonomy) GetCategories() map[string]*IssueTaxonomy_Category { + if m != nil { + return m.Categories + } + return nil +} + +// The representation of a product component. It is composed of a canonical +// name for the product (e.g., Google App Engine), languages in which a +// support ticket can be created under this component, a template that +// provides hints on important details to be filled out before submitting a +// case. It also contains an embedded list of product subcomponents that have +// similar attributes as top-level components. +// (e.g., Google App Engine > Memcache). +type IssueTaxonomy_Component struct { + // User friendly name of this component. + DisplayName string `protobuf:"bytes,1,opt,name=display_name,json=displayName,proto3" json:"display_name,omitempty"` + // List of languages in which a support case can be created under this + // component. Represented by language codes in ISO_639-1 standard. + Languages []string `protobuf:"bytes,2,rep,name=languages,proto3" json:"languages,omitempty"` + // Template to be used while filling the description of a support case. + Template string `protobuf:"bytes,3,opt,name=template,proto3" json:"template,omitempty"` + // List of subcomponents under this component. + Subcomponents []*IssueTaxonomy_Component `protobuf:"bytes,4,rep,name=subcomponents,proto3" json:"subcomponents,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *IssueTaxonomy_Component) Reset() { *m = IssueTaxonomy_Component{} } +func (m *IssueTaxonomy_Component) String() string { return proto.CompactTextString(m) } +func (*IssueTaxonomy_Component) ProtoMessage() {} +func (*IssueTaxonomy_Component) Descriptor() ([]byte, []int) { + return fileDescriptor_common_2a1414d9284edc87, []int{5, 0} +} +func (m *IssueTaxonomy_Component) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_IssueTaxonomy_Component.Unmarshal(m, b) +} +func (m *IssueTaxonomy_Component) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_IssueTaxonomy_Component.Marshal(b, m, deterministic) +} +func (dst *IssueTaxonomy_Component) XXX_Merge(src proto.Message) { + xxx_messageInfo_IssueTaxonomy_Component.Merge(dst, src) +} +func (m *IssueTaxonomy_Component) XXX_Size() int { + return xxx_messageInfo_IssueTaxonomy_Component.Size(m) +} +func (m *IssueTaxonomy_Component) XXX_DiscardUnknown() { + xxx_messageInfo_IssueTaxonomy_Component.DiscardUnknown(m) +} + +var xxx_messageInfo_IssueTaxonomy_Component proto.InternalMessageInfo + +func (m *IssueTaxonomy_Component) GetDisplayName() string { + if m != nil { + return m.DisplayName + } + return "" +} + +func (m *IssueTaxonomy_Component) GetLanguages() []string { + if m != nil { + return m.Languages + } + return nil +} + +func (m *IssueTaxonomy_Component) GetTemplate() string { + if m != nil { + return m.Template + } + return "" +} + +func (m *IssueTaxonomy_Component) GetSubcomponents() []*IssueTaxonomy_Component { + if m != nil { + return m.Subcomponents + } + return nil +} + +// Represents the category of issue (Technical or Non-Technical) +// reported through a support case. +type IssueTaxonomy_Category struct { + // User friendly name of this category. + DisplayName string `protobuf:"bytes,1,opt,name=display_name,json=displayName,proto3" json:"display_name,omitempty"` + // Map of product components under this category. + Components map[string]*IssueTaxonomy_Component `protobuf:"bytes,2,rep,name=components,proto3" json:"components,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *IssueTaxonomy_Category) Reset() { *m = IssueTaxonomy_Category{} } +func (m *IssueTaxonomy_Category) String() string { return proto.CompactTextString(m) } +func (*IssueTaxonomy_Category) ProtoMessage() {} +func (*IssueTaxonomy_Category) Descriptor() ([]byte, []int) { + return fileDescriptor_common_2a1414d9284edc87, []int{5, 1} +} +func (m *IssueTaxonomy_Category) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_IssueTaxonomy_Category.Unmarshal(m, b) +} +func (m *IssueTaxonomy_Category) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_IssueTaxonomy_Category.Marshal(b, m, deterministic) +} +func (dst *IssueTaxonomy_Category) XXX_Merge(src proto.Message) { + xxx_messageInfo_IssueTaxonomy_Category.Merge(dst, src) +} +func (m *IssueTaxonomy_Category) XXX_Size() int { + return xxx_messageInfo_IssueTaxonomy_Category.Size(m) +} +func (m *IssueTaxonomy_Category) XXX_DiscardUnknown() { + xxx_messageInfo_IssueTaxonomy_Category.DiscardUnknown(m) +} + +var xxx_messageInfo_IssueTaxonomy_Category proto.InternalMessageInfo + +func (m *IssueTaxonomy_Category) GetDisplayName() string { + if m != nil { + return m.DisplayName + } + return "" +} + +func (m *IssueTaxonomy_Category) GetComponents() map[string]*IssueTaxonomy_Component { + if m != nil { + return m.Components + } + return nil +} + +func init() { + proto.RegisterType((*SupportAccount)(nil), "google.cloud.support.common.SupportAccount") + proto.RegisterType((*Case)(nil), "google.cloud.support.common.Case") + proto.RegisterType((*CustomerIssue)(nil), "google.cloud.support.common.CustomerIssue") + proto.RegisterType((*SupportRole)(nil), "google.cloud.support.common.SupportRole") + proto.RegisterType((*Comment)(nil), "google.cloud.support.common.Comment") + proto.RegisterType((*IssueTaxonomy)(nil), "google.cloud.support.common.IssueTaxonomy") + proto.RegisterMapType((map[string]*IssueTaxonomy_Category)(nil), "google.cloud.support.common.IssueTaxonomy.CategoriesEntry") + proto.RegisterType((*IssueTaxonomy_Component)(nil), "google.cloud.support.common.IssueTaxonomy.Component") + proto.RegisterType((*IssueTaxonomy_Category)(nil), "google.cloud.support.common.IssueTaxonomy.Category") + proto.RegisterMapType((map[string]*IssueTaxonomy_Component)(nil), "google.cloud.support.common.IssueTaxonomy.Category.ComponentsEntry") + proto.RegisterEnum("google.cloud.support.common.SupportAccount_State", SupportAccount_State_name, SupportAccount_State_value) + proto.RegisterEnum("google.cloud.support.common.SupportAccount_PricingModel", SupportAccount_PricingModel_name, SupportAccount_PricingModel_value) + proto.RegisterEnum("google.cloud.support.common.Case_Priority", Case_Priority_name, Case_Priority_value) + proto.RegisterEnum("google.cloud.support.common.Case_State", Case_State_name, Case_State_value) + proto.RegisterEnum("google.cloud.support.common.CustomerIssue_IssueState", CustomerIssue_IssueState_name, CustomerIssue_IssueState_value) + proto.RegisterEnum("google.cloud.support.common.SupportRole_Role", SupportRole_Role_name, SupportRole_Role_value) +} + +func init() { + proto.RegisterFile("google/cloud/support/common.proto", fileDescriptor_common_2a1414d9284edc87) +} + +var fileDescriptor_common_2a1414d9284edc87 = []byte{ + // 1336 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x9c, 0x57, 0x61, 0x6e, 0xdb, 0xc6, + 0x12, 0x0e, 0x29, 0xc9, 0x92, 0x46, 0x96, 0xbd, 0x59, 0x38, 0x79, 0x8c, 0x92, 0xbc, 0x38, 0x7a, + 0x78, 0x88, 0x51, 0xa0, 0x72, 0xe2, 0xa4, 0x40, 0x90, 0x20, 0x3f, 0x64, 0x6a, 0x2d, 0xb0, 0x91, + 0x49, 0x82, 0xa4, 0xe3, 0x24, 0x45, 0x41, 0xd0, 0xd4, 0x46, 0x65, 0x43, 0x72, 0x09, 0x92, 0x4a, + 0xa3, 0x1e, 0xa0, 0x3d, 0x45, 0xef, 0xd0, 0x5f, 0xbd, 0x40, 0x7b, 0x83, 0xde, 0xa1, 0xe7, 0x28, + 0x76, 0x49, 0xc9, 0xb2, 0x63, 0xd8, 0x51, 0xfe, 0x68, 0x39, 0xb3, 0x33, 0xb3, 0x33, 0xb3, 0xdf, + 0xb7, 0x63, 0xc3, 0xfd, 0x09, 0x63, 0x93, 0x90, 0xee, 0xfa, 0x21, 0x9b, 0x8e, 0x77, 0xb3, 0x69, + 0x92, 0xb0, 0x34, 0xdf, 0xf5, 0x59, 0x14, 0xb1, 0xb8, 0x97, 0xa4, 0x2c, 0x67, 0xf8, 0x76, 0x61, + 0xd2, 0x13, 0x26, 0xbd, 0xd2, 0xa4, 0x57, 0x98, 0x74, 0xee, 0x94, 0xfe, 0x5e, 0x12, 0xec, 0x7a, + 0x71, 0xcc, 0x72, 0x2f, 0x0f, 0x58, 0x9c, 0x15, 0xae, 0x9d, 0x7b, 0xe5, 0xae, 0x90, 0x4e, 0xa6, + 0xef, 0x76, 0xf3, 0x20, 0xa2, 0x59, 0xee, 0x45, 0x49, 0x61, 0xd0, 0xfd, 0xa7, 0x0a, 0x1b, 0x76, + 0x11, 0xb1, 0xef, 0xfb, 0x6c, 0x1a, 0xe7, 0x18, 0x43, 0x35, 0xf6, 0x22, 0xaa, 0x48, 0xdb, 0xd2, + 0x4e, 0xd3, 0x12, 0xdf, 0xf8, 0x2e, 0x80, 0x57, 0x6c, 0xbb, 0xc1, 0x58, 0x91, 0xc5, 0x4e, 0xb3, + 0xd4, 0x68, 0x63, 0xfc, 0x7f, 0xd8, 0x10, 0xc9, 0xb9, 0x29, 0xcd, 0xd8, 0x34, 0xf5, 0xa9, 0x52, + 0x11, 0x26, 0x6d, 0xa1, 0xb5, 0x4a, 0x25, 0xbe, 0x0f, 0xeb, 0xe3, 0x20, 0x4b, 0x42, 0x6f, 0xe6, + 0x8a, 0x13, 0xaa, 0xc2, 0xa8, 0x55, 0xea, 0x74, 0x7e, 0xd0, 0x10, 0x6a, 0x59, 0xee, 0xe5, 0x54, + 0xa9, 0x6d, 0x4b, 0x3b, 0x1b, 0x7b, 0x8f, 0x7a, 0x97, 0xd4, 0xde, 0x3b, 0x9b, 0x78, 0xcf, 0xe6, + 0x8e, 0x56, 0xe1, 0x8f, 0x9f, 0x43, 0xcb, 0x4f, 0xa9, 0x97, 0x53, 0x97, 0x97, 0xac, 0xac, 0x6d, + 0x4b, 0x3b, 0xad, 0xbd, 0xce, 0x3c, 0xdc, 0xbc, 0x1f, 0x3d, 0x67, 0xde, 0x0f, 0x0b, 0x0a, 0x73, + 0xae, 0xc0, 0x0f, 0x61, 0xeb, 0x24, 0x08, 0xc3, 0x20, 0x9e, 0xb8, 0xf3, 0xb2, 0x45, 0xc2, 0x75, + 0x91, 0x30, 0x2e, 0xf7, 0xca, 0x73, 0x45, 0xde, 0x3b, 0x80, 0xa6, 0x71, 0xf0, 0x6e, 0xe6, 0x2e, + 0xb5, 0xa9, 0x21, 0xac, 0x37, 0x84, 0xbe, 0xbf, 0xe8, 0xd5, 0xf7, 0xd0, 0x4e, 0xd2, 0xc0, 0xe7, + 0xb1, 0x23, 0x36, 0xa6, 0xa1, 0xd2, 0x14, 0x95, 0x3e, 0x5d, 0xa5, 0x52, 0xb3, 0x08, 0x70, 0xc8, + 0xfd, 0xad, 0xf5, 0x64, 0x49, 0xea, 0x1e, 0x42, 0x4d, 0xf4, 0x01, 0xdf, 0x80, 0xeb, 0xb6, 0xd3, + 0x77, 0x88, 0x7b, 0xa4, 0xdb, 0x26, 0x51, 0xb5, 0x03, 0x8d, 0x0c, 0xd0, 0x35, 0x0c, 0xb0, 0xd6, + 0x57, 0x1d, 0xed, 0x15, 0x41, 0x12, 0x6e, 0x41, 0xdd, 0x24, 0xfa, 0x40, 0xd3, 0x87, 0x48, 0xc6, + 0x5b, 0x80, 0x4a, 0xc1, 0x1d, 0x90, 0x11, 0x71, 0x34, 0x43, 0x47, 0x95, 0xee, 0x10, 0xd6, 0x97, + 0x0f, 0xc3, 0xb7, 0xe0, 0x86, 0x69, 0x69, 0x2a, 0xb7, 0x3a, 0x34, 0x06, 0x64, 0xe4, 0x1e, 0xe9, + 0x2f, 0x75, 0xe3, 0x58, 0x47, 0xd7, 0xf0, 0x3a, 0x34, 0xcc, 0xbe, 0xfa, 0xb2, 0x3f, 0x24, 0x36, + 0x92, 0xf0, 0x06, 0xc0, 0x91, 0x4d, 0x2c, 0xd7, 0x32, 0x46, 0xc4, 0x46, 0x72, 0xf7, 0x8f, 0x3a, + 0x54, 0x55, 0x2f, 0xa3, 0x17, 0xc2, 0xeb, 0x3c, 0x30, 0xe4, 0x4f, 0x81, 0xb1, 0x0d, 0xad, 0x31, + 0xcd, 0xfc, 0x34, 0x48, 0x38, 0xbe, 0x4b, 0x7c, 0x2d, 0xab, 0xf0, 0x1d, 0x68, 0xfa, 0x2c, 0x4a, + 0x58, 0x4c, 0xe3, 0xbc, 0x84, 0xd6, 0xa9, 0x02, 0x77, 0x61, 0x3d, 0x9b, 0x9e, 0x9c, 0x1a, 0xd4, + 0x84, 0xc1, 0x19, 0x1d, 0x7e, 0x00, 0x9b, 0x7e, 0x18, 0xd0, 0x38, 0x17, 0x98, 0xf9, 0x99, 0xc5, + 0x05, 0x6e, 0x9a, 0xd6, 0x46, 0xa1, 0x76, 0x4a, 0x2d, 0xcf, 0xd7, 0xf7, 0x5d, 0x6f, 0x3c, 0x4e, + 0x69, 0x96, 0xd1, 0x4c, 0xa9, 0x6f, 0x57, 0x78, 0x36, 0xbe, 0xdf, 0x9f, 0xab, 0x38, 0x63, 0x92, + 0x94, 0xfd, 0x48, 0xfd, 0x25, 0x28, 0x34, 0x4b, 0x8d, 0x36, 0xc6, 0xfb, 0xb0, 0x16, 0x64, 0xd9, + 0x94, 0x66, 0x0a, 0x6c, 0x57, 0x76, 0x5a, 0x7b, 0x5f, 0x5d, 0x7a, 0xfd, 0xea, 0x34, 0xcb, 0x59, + 0x44, 0x53, 0x8d, 0xbb, 0x58, 0xa5, 0x27, 0x3e, 0x80, 0x46, 0x92, 0x06, 0x2c, 0x0d, 0xf2, 0x99, + 0xd2, 0x12, 0x20, 0xba, 0x22, 0x8a, 0x97, 0x51, 0x0e, 0x1d, 0xe1, 0x61, 0x2d, 0x7c, 0xf1, 0x8b, + 0x39, 0xe7, 0xd6, 0x45, 0x90, 0x07, 0x57, 0x07, 0xb9, 0x8c, 0x69, 0xed, 0x95, 0x98, 0xf6, 0x1c, + 0x5a, 0xd3, 0x64, 0xbc, 0x70, 0xde, 0xb8, 0xda, 0xb9, 0x30, 0x17, 0xce, 0xff, 0x83, 0xb6, 0x08, + 0xc5, 0x52, 0x97, 0x46, 0x5e, 0x10, 0x2a, 0x9b, 0xc5, 0xa5, 0x96, 0x4a, 0xc2, 0x75, 0xb8, 0x03, + 0x0d, 0xdf, 0xcb, 0xe9, 0x84, 0xa5, 0x33, 0x05, 0x89, 0xfd, 0x85, 0xdc, 0x1d, 0x41, 0x63, 0xde, + 0x0f, 0xac, 0xc0, 0x96, 0x69, 0x69, 0x86, 0xa5, 0x39, 0x6f, 0xce, 0x51, 0x66, 0x0d, 0x64, 0xf3, + 0x21, 0x92, 0xc4, 0xfa, 0x08, 0xc9, 0x62, 0xdd, 0x43, 0x15, 0xb1, 0x3e, 0x46, 0x55, 0xb1, 0x3e, + 0x41, 0xb5, 0xee, 0xdf, 0xd2, 0x15, 0xdc, 0xab, 0x43, 0x45, 0x27, 0xc7, 0x48, 0xe2, 0x54, 0xe9, + 0xdb, 0xb6, 0x36, 0xd4, 0xc9, 0x00, 0xc9, 0xf8, 0xbf, 0xd0, 0xd1, 0x74, 0xd7, 0xb4, 0x8c, 0xa1, + 0x45, 0x6c, 0xdb, 0x1d, 0x1a, 0xc6, 0x70, 0x44, 0x5c, 0xfb, 0xc8, 0x34, 0x0d, 0xcb, 0x41, 0x15, + 0xdc, 0x81, 0x9b, 0x17, 0xec, 0x13, 0x7d, 0x88, 0xaa, 0xf8, 0x36, 0xfc, 0x67, 0x79, 0x4f, 0x70, + 0xd1, 0xd5, 0x6c, 0xfb, 0x88, 0xa0, 0x1a, 0xbe, 0x0f, 0x77, 0x8f, 0xfb, 0x9a, 0xc3, 0xc9, 0x7a, + 0x60, 0x58, 0xae, 0x7a, 0x64, 0x3b, 0xc6, 0x21, 0xe7, 0x24, 0xb1, 0x4d, 0x43, 0xb7, 0x09, 0x5a, + 0xe3, 0xac, 0xb7, 0x8d, 0xd1, 0x11, 0x67, 0xbb, 0x6b, 0x1c, 0x1c, 0x10, 0x8b, 0x0c, 0x50, 0x9d, + 0x3f, 0x12, 0xea, 0xc8, 0xb0, 0xc9, 0x00, 0x35, 0xba, 0xbf, 0x55, 0xa0, 0x7d, 0x06, 0x7f, 0xf8, + 0x16, 0x34, 0x04, 0x02, 0x39, 0xb0, 0x0b, 0x16, 0xd7, 0x85, 0xac, 0x8d, 0xf1, 0xcb, 0x39, 0x94, + 0x64, 0x01, 0xa5, 0x6f, 0x3e, 0x1f, 0xd5, 0x3d, 0xf1, 0x7b, 0x19, 0xb0, 0x2a, 0x2b, 0x01, 0xeb, + 0x05, 0xac, 0xf3, 0x61, 0x14, 0x7e, 0x28, 0xbd, 0xab, 0x57, 0x7a, 0xb7, 0x4a, 0xfb, 0x8b, 0x70, + 0x59, 0x5b, 0x05, 0x97, 0xdd, 0xf7, 0x00, 0xa7, 0xd5, 0x88, 0x2b, 0xe2, 0x17, 0xe2, 0x5e, 0x04, + 0x89, 0x06, 0x54, 0x0d, 0x93, 0xe8, 0x48, 0xc2, 0x9b, 0xd0, 0x5a, 0xba, 0x49, 0x24, 0xe3, 0x26, + 0xd4, 0x0e, 0xb4, 0xd7, 0x64, 0x80, 0x2a, 0x1c, 0x2f, 0xc7, 0x86, 0xee, 0xb8, 0x07, 0xda, 0x6b, + 0x54, 0xe5, 0xd2, 0x2b, 0x62, 0x15, 0x11, 0x6a, 0xdd, 0x3f, 0x25, 0x68, 0x95, 0xe3, 0xc1, 0x62, + 0x21, 0xc5, 0x5b, 0x50, 0x2b, 0xc8, 0x50, 0x5c, 0x4d, 0x21, 0xe0, 0x3e, 0x54, 0x53, 0x16, 0xce, + 0xef, 0xe5, 0xeb, 0xcf, 0x19, 0x36, 0x3c, 0x5a, 0x8f, 0xff, 0x58, 0xc2, 0xb5, 0xfb, 0x1d, 0x54, + 0xcb, 0x03, 0x10, 0x7f, 0xd4, 0xcf, 0x15, 0xd2, 0x84, 0xda, 0x7e, 0xdf, 0xd6, 0x54, 0x24, 0xe1, + 0x36, 0x34, 0x07, 0xe4, 0x15, 0x19, 0x19, 0x26, 0xb1, 0x90, 0xcc, 0x45, 0xfe, 0xd5, 0x2f, 0x26, + 0x8a, 0x40, 0x9c, 0xe6, 0x10, 0xd7, 0x22, 0x23, 0xad, 0xbf, 0xaf, 0x8d, 0x34, 0xe7, 0x0d, 0xaa, + 0x76, 0x7f, 0x91, 0xa0, 0xae, 0xb2, 0x28, 0xa2, 0xc5, 0x1f, 0x20, 0x39, 0xfd, 0x98, 0xcf, 0x27, + 0x04, 0xff, 0x3e, 0x8f, 0x05, 0x79, 0x25, 0x2c, 0xdc, 0x84, 0x35, 0x6f, 0x9a, 0xff, 0xc0, 0xd2, + 0x72, 0x6c, 0x94, 0xd2, 0x62, 0x14, 0x55, 0x4f, 0x47, 0x51, 0xf7, 0xf7, 0x1a, 0xb4, 0xc5, 0xe5, + 0x39, 0xde, 0x47, 0x16, 0xb3, 0x68, 0x86, 0xdf, 0x02, 0x94, 0x0f, 0x46, 0x40, 0x33, 0x45, 0x12, + 0xcf, 0xf5, 0xb3, 0x4b, 0x1b, 0x78, 0xc6, 0xbf, 0xa7, 0x2e, 0x9c, 0x49, 0x9c, 0xa7, 0x33, 0x6b, + 0x29, 0x5a, 0xe7, 0x2f, 0x09, 0x9a, 0xea, 0x62, 0xfe, 0x9c, 0x1f, 0x83, 0xd2, 0xa7, 0x63, 0xf0, + 0x0e, 0x34, 0x43, 0x2f, 0x9e, 0x4c, 0xbd, 0x09, 0xcd, 0x14, 0x59, 0x8c, 0x9d, 0x53, 0x05, 0x7f, + 0xeb, 0x72, 0x1a, 0x25, 0x21, 0x67, 0x60, 0x51, 0xea, 0x42, 0xc6, 0x6f, 0xa1, 0xbd, 0x3c, 0xec, + 0x32, 0xa5, 0x2a, 0x2a, 0x79, 0xb2, 0x4a, 0x25, 0x73, 0x67, 0xeb, 0x6c, 0xa8, 0xce, 0xaf, 0x32, + 0x34, 0xca, 0x32, 0x67, 0x9f, 0x53, 0x85, 0x0f, 0xb0, 0x94, 0x88, 0x2c, 0x12, 0x51, 0x57, 0x6f, + 0xe9, 0x52, 0x46, 0x8b, 0xde, 0x9e, 0x26, 0x95, 0xc1, 0xe6, 0xb9, 0x6d, 0x8c, 0xa0, 0xf2, 0x9e, + 0xce, 0xca, 0x8c, 0xf8, 0x27, 0xfe, 0x16, 0x6a, 0x1f, 0xbc, 0x70, 0x3a, 0x47, 0xd4, 0x97, 0x75, + 0xa3, 0x08, 0xf1, 0x4c, 0x7e, 0x2a, 0x75, 0x52, 0xd8, 0x3c, 0x77, 0xdf, 0x17, 0x1c, 0xaa, 0x9d, + 0x3d, 0xf4, 0xf1, 0x17, 0x54, 0xbe, 0x74, 0xe6, 0xfe, 0x4f, 0x70, 0xcf, 0x67, 0xd1, 0x65, 0x41, + 0xf6, 0xaf, 0xab, 0x5c, 0x5b, 0x12, 0xdb, 0xe4, 0x6c, 0x79, 0xdb, 0x2f, 0xed, 0x27, 0x8c, 0xe3, + 0xa7, 0xc7, 0xd2, 0xc9, 0xee, 0x84, 0xc6, 0x82, 0x49, 0xbb, 0xc5, 0x96, 0x97, 0x04, 0xd9, 0x85, + 0xff, 0x97, 0x3c, 0x2f, 0x96, 0x93, 0x35, 0x61, 0xfd, 0xf8, 0xdf, 0x00, 0x00, 0x00, 0xff, 0xff, + 0x0c, 0xd0, 0x7b, 0x46, 0xc4, 0x0c, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/cloud/support/v1alpha1/cloud_support.pb.go b/vendor/google.golang.org/genproto/googleapis/cloud/support/v1alpha1/cloud_support.pb.go new file mode 100644 index 0000000..34782d7 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/cloud/support/v1alpha1/cloud_support.pb.go @@ -0,0 +1,1052 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/cloud/support/v1alpha1/cloud_support.proto + +package support // import "google.golang.org/genproto/googleapis/cloud/support/v1alpha1" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "github.com/golang/protobuf/ptypes/empty" +import _ "google.golang.org/genproto/googleapis/api/annotations" +import common "google.golang.org/genproto/googleapis/cloud/support/common" +import field_mask "google.golang.org/genproto/protobuf/field_mask" + +import ( + context "golang.org/x/net/context" + grpc "google.golang.org/grpc" +) + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// The request message for `GetSupportAccount`. +type GetSupportAccountRequest struct { + // The resource name of the support accounts. For example: + // `supportAccounts/accountA`. + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GetSupportAccountRequest) Reset() { *m = GetSupportAccountRequest{} } +func (m *GetSupportAccountRequest) String() string { return proto.CompactTextString(m) } +func (*GetSupportAccountRequest) ProtoMessage() {} +func (*GetSupportAccountRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_cloud_support_255f2e0c522c4db6, []int{0} +} +func (m *GetSupportAccountRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GetSupportAccountRequest.Unmarshal(m, b) +} +func (m *GetSupportAccountRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GetSupportAccountRequest.Marshal(b, m, deterministic) +} +func (dst *GetSupportAccountRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_GetSupportAccountRequest.Merge(dst, src) +} +func (m *GetSupportAccountRequest) XXX_Size() int { + return xxx_messageInfo_GetSupportAccountRequest.Size(m) +} +func (m *GetSupportAccountRequest) XXX_DiscardUnknown() { + xxx_messageInfo_GetSupportAccountRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_GetSupportAccountRequest proto.InternalMessageInfo + +func (m *GetSupportAccountRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +// The request message for `ListSupportAccount`. +type ListSupportAccountsRequest struct { + // The filter applied to search results. It only supports filtering a support + // account list by a cloud_resource. For example, to filter results by support + // accounts associated with an Organization, its value should be: + // "cloud_resource:organizations/" + Filter string `protobuf:"bytes,1,opt,name=filter,proto3" json:"filter,omitempty"` + // Maximum number of accounts fetched with each request. + PageSize int64 `protobuf:"varint,2,opt,name=page_size,json=pageSize,proto3" json:"page_size,omitempty"` + // A token identifying the page of results to return. If unspecified, the + // first page is retrieved. + PageToken string `protobuf:"bytes,3,opt,name=page_token,json=pageToken,proto3" json:"page_token,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ListSupportAccountsRequest) Reset() { *m = ListSupportAccountsRequest{} } +func (m *ListSupportAccountsRequest) String() string { return proto.CompactTextString(m) } +func (*ListSupportAccountsRequest) ProtoMessage() {} +func (*ListSupportAccountsRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_cloud_support_255f2e0c522c4db6, []int{1} +} +func (m *ListSupportAccountsRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ListSupportAccountsRequest.Unmarshal(m, b) +} +func (m *ListSupportAccountsRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ListSupportAccountsRequest.Marshal(b, m, deterministic) +} +func (dst *ListSupportAccountsRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_ListSupportAccountsRequest.Merge(dst, src) +} +func (m *ListSupportAccountsRequest) XXX_Size() int { + return xxx_messageInfo_ListSupportAccountsRequest.Size(m) +} +func (m *ListSupportAccountsRequest) XXX_DiscardUnknown() { + xxx_messageInfo_ListSupportAccountsRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_ListSupportAccountsRequest proto.InternalMessageInfo + +func (m *ListSupportAccountsRequest) GetFilter() string { + if m != nil { + return m.Filter + } + return "" +} + +func (m *ListSupportAccountsRequest) GetPageSize() int64 { + if m != nil { + return m.PageSize + } + return 0 +} + +func (m *ListSupportAccountsRequest) GetPageToken() string { + if m != nil { + return m.PageToken + } + return "" +} + +// The response message for `ListSupportAccount`. +type ListSupportAccountsResponse struct { + // A list of support accounts. + Accounts []*common.SupportAccount `protobuf:"bytes,1,rep,name=accounts,proto3" json:"accounts,omitempty"` + // A token to retrieve the next page of results. This should be passed on in + // `page_token` field of `ListSupportAccountRequest` for next request. If + // unspecified, there are no more results to retrieve. + NextPageToken string `protobuf:"bytes,2,opt,name=next_page_token,json=nextPageToken,proto3" json:"next_page_token,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ListSupportAccountsResponse) Reset() { *m = ListSupportAccountsResponse{} } +func (m *ListSupportAccountsResponse) String() string { return proto.CompactTextString(m) } +func (*ListSupportAccountsResponse) ProtoMessage() {} +func (*ListSupportAccountsResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_cloud_support_255f2e0c522c4db6, []int{2} +} +func (m *ListSupportAccountsResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ListSupportAccountsResponse.Unmarshal(m, b) +} +func (m *ListSupportAccountsResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ListSupportAccountsResponse.Marshal(b, m, deterministic) +} +func (dst *ListSupportAccountsResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_ListSupportAccountsResponse.Merge(dst, src) +} +func (m *ListSupportAccountsResponse) XXX_Size() int { + return xxx_messageInfo_ListSupportAccountsResponse.Size(m) +} +func (m *ListSupportAccountsResponse) XXX_DiscardUnknown() { + xxx_messageInfo_ListSupportAccountsResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_ListSupportAccountsResponse proto.InternalMessageInfo + +func (m *ListSupportAccountsResponse) GetAccounts() []*common.SupportAccount { + if m != nil { + return m.Accounts + } + return nil +} + +func (m *ListSupportAccountsResponse) GetNextPageToken() string { + if m != nil { + return m.NextPageToken + } + return "" +} + +// The request message for `GetCase` method. +type GetCaseRequest struct { + // Name of case resource requested. + // For example: "supportAccounts/accountA/cases/123" + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GetCaseRequest) Reset() { *m = GetCaseRequest{} } +func (m *GetCaseRequest) String() string { return proto.CompactTextString(m) } +func (*GetCaseRequest) ProtoMessage() {} +func (*GetCaseRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_cloud_support_255f2e0c522c4db6, []int{3} +} +func (m *GetCaseRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GetCaseRequest.Unmarshal(m, b) +} +func (m *GetCaseRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GetCaseRequest.Marshal(b, m, deterministic) +} +func (dst *GetCaseRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_GetCaseRequest.Merge(dst, src) +} +func (m *GetCaseRequest) XXX_Size() int { + return xxx_messageInfo_GetCaseRequest.Size(m) +} +func (m *GetCaseRequest) XXX_DiscardUnknown() { + xxx_messageInfo_GetCaseRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_GetCaseRequest proto.InternalMessageInfo + +func (m *GetCaseRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +// The request message for `ListCase` method. +type ListCasesRequest struct { + // Name of the account resource for which cases are requested. For example: + // "supportAccounts/accountA" + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // The filter applied to the search results. Currently it only accepts "OPEN" + // or "CLOSED" strings, filtering out cases that are open or resolved. + Filter string `protobuf:"bytes,2,opt,name=filter,proto3" json:"filter,omitempty"` + // Maximum number of cases fetched with each request. + PageSize int64 `protobuf:"varint,3,opt,name=page_size,json=pageSize,proto3" json:"page_size,omitempty"` + // A token identifying the page of results to return. If unspecified, the + // first page is retrieved. + PageToken string `protobuf:"bytes,4,opt,name=page_token,json=pageToken,proto3" json:"page_token,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ListCasesRequest) Reset() { *m = ListCasesRequest{} } +func (m *ListCasesRequest) String() string { return proto.CompactTextString(m) } +func (*ListCasesRequest) ProtoMessage() {} +func (*ListCasesRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_cloud_support_255f2e0c522c4db6, []int{4} +} +func (m *ListCasesRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ListCasesRequest.Unmarshal(m, b) +} +func (m *ListCasesRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ListCasesRequest.Marshal(b, m, deterministic) +} +func (dst *ListCasesRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_ListCasesRequest.Merge(dst, src) +} +func (m *ListCasesRequest) XXX_Size() int { + return xxx_messageInfo_ListCasesRequest.Size(m) +} +func (m *ListCasesRequest) XXX_DiscardUnknown() { + xxx_messageInfo_ListCasesRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_ListCasesRequest proto.InternalMessageInfo + +func (m *ListCasesRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *ListCasesRequest) GetFilter() string { + if m != nil { + return m.Filter + } + return "" +} + +func (m *ListCasesRequest) GetPageSize() int64 { + if m != nil { + return m.PageSize + } + return 0 +} + +func (m *ListCasesRequest) GetPageToken() string { + if m != nil { + return m.PageToken + } + return "" +} + +// The response message for `ListCase` method. +type ListCasesResponse struct { + // A list of cases. + Cases []*common.Case `protobuf:"bytes,1,rep,name=cases,proto3" json:"cases,omitempty"` + // A token to retrieve the next page of results. This should be passed on in + // `page_token` field of `ListCaseRequest` for next request. If unspecified, + // there are no more results to retrieve. + NextPageToken string `protobuf:"bytes,2,opt,name=next_page_token,json=nextPageToken,proto3" json:"next_page_token,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ListCasesResponse) Reset() { *m = ListCasesResponse{} } +func (m *ListCasesResponse) String() string { return proto.CompactTextString(m) } +func (*ListCasesResponse) ProtoMessage() {} +func (*ListCasesResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_cloud_support_255f2e0c522c4db6, []int{5} +} +func (m *ListCasesResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ListCasesResponse.Unmarshal(m, b) +} +func (m *ListCasesResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ListCasesResponse.Marshal(b, m, deterministic) +} +func (dst *ListCasesResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_ListCasesResponse.Merge(dst, src) +} +func (m *ListCasesResponse) XXX_Size() int { + return xxx_messageInfo_ListCasesResponse.Size(m) +} +func (m *ListCasesResponse) XXX_DiscardUnknown() { + xxx_messageInfo_ListCasesResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_ListCasesResponse proto.InternalMessageInfo + +func (m *ListCasesResponse) GetCases() []*common.Case { + if m != nil { + return m.Cases + } + return nil +} + +func (m *ListCasesResponse) GetNextPageToken() string { + if m != nil { + return m.NextPageToken + } + return "" +} + +// The request message for `ListComments` method. +type ListCommentsRequest struct { + // The resource name of case for which comments should be listed. + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ListCommentsRequest) Reset() { *m = ListCommentsRequest{} } +func (m *ListCommentsRequest) String() string { return proto.CompactTextString(m) } +func (*ListCommentsRequest) ProtoMessage() {} +func (*ListCommentsRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_cloud_support_255f2e0c522c4db6, []int{6} +} +func (m *ListCommentsRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ListCommentsRequest.Unmarshal(m, b) +} +func (m *ListCommentsRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ListCommentsRequest.Marshal(b, m, deterministic) +} +func (dst *ListCommentsRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_ListCommentsRequest.Merge(dst, src) +} +func (m *ListCommentsRequest) XXX_Size() int { + return xxx_messageInfo_ListCommentsRequest.Size(m) +} +func (m *ListCommentsRequest) XXX_DiscardUnknown() { + xxx_messageInfo_ListCommentsRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_ListCommentsRequest proto.InternalMessageInfo + +func (m *ListCommentsRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +// The response message for `ListComments` method. +type ListCommentsResponse struct { + // A list of comments. + Comments []*common.Comment `protobuf:"bytes,1,rep,name=comments,proto3" json:"comments,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ListCommentsResponse) Reset() { *m = ListCommentsResponse{} } +func (m *ListCommentsResponse) String() string { return proto.CompactTextString(m) } +func (*ListCommentsResponse) ProtoMessage() {} +func (*ListCommentsResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_cloud_support_255f2e0c522c4db6, []int{7} +} +func (m *ListCommentsResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ListCommentsResponse.Unmarshal(m, b) +} +func (m *ListCommentsResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ListCommentsResponse.Marshal(b, m, deterministic) +} +func (dst *ListCommentsResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_ListCommentsResponse.Merge(dst, src) +} +func (m *ListCommentsResponse) XXX_Size() int { + return xxx_messageInfo_ListCommentsResponse.Size(m) +} +func (m *ListCommentsResponse) XXX_DiscardUnknown() { + xxx_messageInfo_ListCommentsResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_ListCommentsResponse proto.InternalMessageInfo + +func (m *ListCommentsResponse) GetComments() []*common.Comment { + if m != nil { + return m.Comments + } + return nil +} + +// The request message for `CreateCase` method. +type CreateCaseRequest struct { + // The resource name for `SupportAccount` under which this case is created. + Parent string `protobuf:"bytes,1,opt,name=parent,proto3" json:"parent,omitempty"` + // The case resource to create. + Case *common.Case `protobuf:"bytes,2,opt,name=case,proto3" json:"case,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *CreateCaseRequest) Reset() { *m = CreateCaseRequest{} } +func (m *CreateCaseRequest) String() string { return proto.CompactTextString(m) } +func (*CreateCaseRequest) ProtoMessage() {} +func (*CreateCaseRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_cloud_support_255f2e0c522c4db6, []int{8} +} +func (m *CreateCaseRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_CreateCaseRequest.Unmarshal(m, b) +} +func (m *CreateCaseRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_CreateCaseRequest.Marshal(b, m, deterministic) +} +func (dst *CreateCaseRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_CreateCaseRequest.Merge(dst, src) +} +func (m *CreateCaseRequest) XXX_Size() int { + return xxx_messageInfo_CreateCaseRequest.Size(m) +} +func (m *CreateCaseRequest) XXX_DiscardUnknown() { + xxx_messageInfo_CreateCaseRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_CreateCaseRequest proto.InternalMessageInfo + +func (m *CreateCaseRequest) GetParent() string { + if m != nil { + return m.Parent + } + return "" +} + +func (m *CreateCaseRequest) GetCase() *common.Case { + if m != nil { + return m.Case + } + return nil +} + +// The request message for `UpdateCase` method. +type UpdateCaseRequest struct { + // The case resource to update. + Case *common.Case `protobuf:"bytes,1,opt,name=case,proto3" json:"case,omitempty"` + // A field that represents attributes of a Case object that should be updated + // as part of this request. + UpdateMask *field_mask.FieldMask `protobuf:"bytes,2,opt,name=update_mask,json=updateMask,proto3" json:"update_mask,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *UpdateCaseRequest) Reset() { *m = UpdateCaseRequest{} } +func (m *UpdateCaseRequest) String() string { return proto.CompactTextString(m) } +func (*UpdateCaseRequest) ProtoMessage() {} +func (*UpdateCaseRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_cloud_support_255f2e0c522c4db6, []int{9} +} +func (m *UpdateCaseRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_UpdateCaseRequest.Unmarshal(m, b) +} +func (m *UpdateCaseRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_UpdateCaseRequest.Marshal(b, m, deterministic) +} +func (dst *UpdateCaseRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_UpdateCaseRequest.Merge(dst, src) +} +func (m *UpdateCaseRequest) XXX_Size() int { + return xxx_messageInfo_UpdateCaseRequest.Size(m) +} +func (m *UpdateCaseRequest) XXX_DiscardUnknown() { + xxx_messageInfo_UpdateCaseRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_UpdateCaseRequest proto.InternalMessageInfo + +func (m *UpdateCaseRequest) GetCase() *common.Case { + if m != nil { + return m.Case + } + return nil +} + +func (m *UpdateCaseRequest) GetUpdateMask() *field_mask.FieldMask { + if m != nil { + return m.UpdateMask + } + return nil +} + +// The request message for `CreateComment` method. +type CreateCommentRequest struct { + // The resource name of case to which this comment should be added. + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // The `Comment` to be added to this case. + Comment *common.Comment `protobuf:"bytes,2,opt,name=comment,proto3" json:"comment,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *CreateCommentRequest) Reset() { *m = CreateCommentRequest{} } +func (m *CreateCommentRequest) String() string { return proto.CompactTextString(m) } +func (*CreateCommentRequest) ProtoMessage() {} +func (*CreateCommentRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_cloud_support_255f2e0c522c4db6, []int{10} +} +func (m *CreateCommentRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_CreateCommentRequest.Unmarshal(m, b) +} +func (m *CreateCommentRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_CreateCommentRequest.Marshal(b, m, deterministic) +} +func (dst *CreateCommentRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_CreateCommentRequest.Merge(dst, src) +} +func (m *CreateCommentRequest) XXX_Size() int { + return xxx_messageInfo_CreateCommentRequest.Size(m) +} +func (m *CreateCommentRequest) XXX_DiscardUnknown() { + xxx_messageInfo_CreateCommentRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_CreateCommentRequest proto.InternalMessageInfo + +func (m *CreateCommentRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *CreateCommentRequest) GetComment() *common.Comment { + if m != nil { + return m.Comment + } + return nil +} + +// The request message for `GetIssueTaxonomy` method. +type GetIssueTaxonomyRequest struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GetIssueTaxonomyRequest) Reset() { *m = GetIssueTaxonomyRequest{} } +func (m *GetIssueTaxonomyRequest) String() string { return proto.CompactTextString(m) } +func (*GetIssueTaxonomyRequest) ProtoMessage() {} +func (*GetIssueTaxonomyRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_cloud_support_255f2e0c522c4db6, []int{11} +} +func (m *GetIssueTaxonomyRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GetIssueTaxonomyRequest.Unmarshal(m, b) +} +func (m *GetIssueTaxonomyRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GetIssueTaxonomyRequest.Marshal(b, m, deterministic) +} +func (dst *GetIssueTaxonomyRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_GetIssueTaxonomyRequest.Merge(dst, src) +} +func (m *GetIssueTaxonomyRequest) XXX_Size() int { + return xxx_messageInfo_GetIssueTaxonomyRequest.Size(m) +} +func (m *GetIssueTaxonomyRequest) XXX_DiscardUnknown() { + xxx_messageInfo_GetIssueTaxonomyRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_GetIssueTaxonomyRequest proto.InternalMessageInfo + +func init() { + proto.RegisterType((*GetSupportAccountRequest)(nil), "google.cloud.support.v1alpha1.GetSupportAccountRequest") + proto.RegisterType((*ListSupportAccountsRequest)(nil), "google.cloud.support.v1alpha1.ListSupportAccountsRequest") + proto.RegisterType((*ListSupportAccountsResponse)(nil), "google.cloud.support.v1alpha1.ListSupportAccountsResponse") + proto.RegisterType((*GetCaseRequest)(nil), "google.cloud.support.v1alpha1.GetCaseRequest") + proto.RegisterType((*ListCasesRequest)(nil), "google.cloud.support.v1alpha1.ListCasesRequest") + proto.RegisterType((*ListCasesResponse)(nil), "google.cloud.support.v1alpha1.ListCasesResponse") + proto.RegisterType((*ListCommentsRequest)(nil), "google.cloud.support.v1alpha1.ListCommentsRequest") + proto.RegisterType((*ListCommentsResponse)(nil), "google.cloud.support.v1alpha1.ListCommentsResponse") + proto.RegisterType((*CreateCaseRequest)(nil), "google.cloud.support.v1alpha1.CreateCaseRequest") + proto.RegisterType((*UpdateCaseRequest)(nil), "google.cloud.support.v1alpha1.UpdateCaseRequest") + proto.RegisterType((*CreateCommentRequest)(nil), "google.cloud.support.v1alpha1.CreateCommentRequest") + proto.RegisterType((*GetIssueTaxonomyRequest)(nil), "google.cloud.support.v1alpha1.GetIssueTaxonomyRequest") +} + +// Reference imports to suppress errors if they are not otherwise used. +var _ context.Context +var _ grpc.ClientConn + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the grpc package it is being compiled against. +const _ = grpc.SupportPackageIsVersion4 + +// CloudSupportClient is the client API for CloudSupport service. +// +// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://godoc.org/google.golang.org/grpc#ClientConn.NewStream. +type CloudSupportClient interface { + // Retrieves the support account details given an account identifier. + // The authenticated user calling this method must be the account owner. + GetSupportAccount(ctx context.Context, in *GetSupportAccountRequest, opts ...grpc.CallOption) (*common.SupportAccount, error) + // Retrieves the list of accounts the current authenticated user has access + // to. + ListSupportAccounts(ctx context.Context, in *ListSupportAccountsRequest, opts ...grpc.CallOption) (*ListSupportAccountsResponse, error) + // Retrieves the details for a support case. The current authenticated user + // calling this method must have permissions to view this case. + GetCase(ctx context.Context, in *GetCaseRequest, opts ...grpc.CallOption) (*common.Case, error) + // Retrieves the list of support cases associated with an account. The current + // authenticated user must have the permission to list and view these cases. + ListCases(ctx context.Context, in *ListCasesRequest, opts ...grpc.CallOption) (*ListCasesResponse, error) + // Lists all comments from a case. + ListComments(ctx context.Context, in *ListCommentsRequest, opts ...grpc.CallOption) (*ListCommentsResponse, error) + // Creates a case and associates it with a + // [SupportAccount][google.cloud.support.v1alpha2.SupportAcccount]. The + // authenticated user attempting this action must have permissions to create a + // `Case` under that [SupportAccount]. + CreateCase(ctx context.Context, in *CreateCaseRequest, opts ...grpc.CallOption) (*common.Case, error) + // Updates a support case. Only a small set of details (priority, subject and + // cc_address) can be update after a case is created. + UpdateCase(ctx context.Context, in *UpdateCaseRequest, opts ...grpc.CallOption) (*common.Case, error) + // Adds a new comment to a case. + CreateComment(ctx context.Context, in *CreateCommentRequest, opts ...grpc.CallOption) (*common.Comment, error) + // Retrieves the taxonomy of product categories and components to be used + // while creating a support case. + GetIssueTaxonomy(ctx context.Context, in *GetIssueTaxonomyRequest, opts ...grpc.CallOption) (*common.IssueTaxonomy, error) +} + +type cloudSupportClient struct { + cc *grpc.ClientConn +} + +func NewCloudSupportClient(cc *grpc.ClientConn) CloudSupportClient { + return &cloudSupportClient{cc} +} + +func (c *cloudSupportClient) GetSupportAccount(ctx context.Context, in *GetSupportAccountRequest, opts ...grpc.CallOption) (*common.SupportAccount, error) { + out := new(common.SupportAccount) + err := c.cc.Invoke(ctx, "/google.cloud.support.v1alpha1.CloudSupport/GetSupportAccount", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *cloudSupportClient) ListSupportAccounts(ctx context.Context, in *ListSupportAccountsRequest, opts ...grpc.CallOption) (*ListSupportAccountsResponse, error) { + out := new(ListSupportAccountsResponse) + err := c.cc.Invoke(ctx, "/google.cloud.support.v1alpha1.CloudSupport/ListSupportAccounts", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *cloudSupportClient) GetCase(ctx context.Context, in *GetCaseRequest, opts ...grpc.CallOption) (*common.Case, error) { + out := new(common.Case) + err := c.cc.Invoke(ctx, "/google.cloud.support.v1alpha1.CloudSupport/GetCase", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *cloudSupportClient) ListCases(ctx context.Context, in *ListCasesRequest, opts ...grpc.CallOption) (*ListCasesResponse, error) { + out := new(ListCasesResponse) + err := c.cc.Invoke(ctx, "/google.cloud.support.v1alpha1.CloudSupport/ListCases", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *cloudSupportClient) ListComments(ctx context.Context, in *ListCommentsRequest, opts ...grpc.CallOption) (*ListCommentsResponse, error) { + out := new(ListCommentsResponse) + err := c.cc.Invoke(ctx, "/google.cloud.support.v1alpha1.CloudSupport/ListComments", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *cloudSupportClient) CreateCase(ctx context.Context, in *CreateCaseRequest, opts ...grpc.CallOption) (*common.Case, error) { + out := new(common.Case) + err := c.cc.Invoke(ctx, "/google.cloud.support.v1alpha1.CloudSupport/CreateCase", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *cloudSupportClient) UpdateCase(ctx context.Context, in *UpdateCaseRequest, opts ...grpc.CallOption) (*common.Case, error) { + out := new(common.Case) + err := c.cc.Invoke(ctx, "/google.cloud.support.v1alpha1.CloudSupport/UpdateCase", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *cloudSupportClient) CreateComment(ctx context.Context, in *CreateCommentRequest, opts ...grpc.CallOption) (*common.Comment, error) { + out := new(common.Comment) + err := c.cc.Invoke(ctx, "/google.cloud.support.v1alpha1.CloudSupport/CreateComment", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *cloudSupportClient) GetIssueTaxonomy(ctx context.Context, in *GetIssueTaxonomyRequest, opts ...grpc.CallOption) (*common.IssueTaxonomy, error) { + out := new(common.IssueTaxonomy) + err := c.cc.Invoke(ctx, "/google.cloud.support.v1alpha1.CloudSupport/GetIssueTaxonomy", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +// CloudSupportServer is the server API for CloudSupport service. +type CloudSupportServer interface { + // Retrieves the support account details given an account identifier. + // The authenticated user calling this method must be the account owner. + GetSupportAccount(context.Context, *GetSupportAccountRequest) (*common.SupportAccount, error) + // Retrieves the list of accounts the current authenticated user has access + // to. + ListSupportAccounts(context.Context, *ListSupportAccountsRequest) (*ListSupportAccountsResponse, error) + // Retrieves the details for a support case. The current authenticated user + // calling this method must have permissions to view this case. + GetCase(context.Context, *GetCaseRequest) (*common.Case, error) + // Retrieves the list of support cases associated with an account. The current + // authenticated user must have the permission to list and view these cases. + ListCases(context.Context, *ListCasesRequest) (*ListCasesResponse, error) + // Lists all comments from a case. + ListComments(context.Context, *ListCommentsRequest) (*ListCommentsResponse, error) + // Creates a case and associates it with a + // [SupportAccount][google.cloud.support.v1alpha2.SupportAcccount]. The + // authenticated user attempting this action must have permissions to create a + // `Case` under that [SupportAccount]. + CreateCase(context.Context, *CreateCaseRequest) (*common.Case, error) + // Updates a support case. Only a small set of details (priority, subject and + // cc_address) can be update after a case is created. + UpdateCase(context.Context, *UpdateCaseRequest) (*common.Case, error) + // Adds a new comment to a case. + CreateComment(context.Context, *CreateCommentRequest) (*common.Comment, error) + // Retrieves the taxonomy of product categories and components to be used + // while creating a support case. + GetIssueTaxonomy(context.Context, *GetIssueTaxonomyRequest) (*common.IssueTaxonomy, error) +} + +func RegisterCloudSupportServer(s *grpc.Server, srv CloudSupportServer) { + s.RegisterService(&_CloudSupport_serviceDesc, srv) +} + +func _CloudSupport_GetSupportAccount_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(GetSupportAccountRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(CloudSupportServer).GetSupportAccount(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.support.v1alpha1.CloudSupport/GetSupportAccount", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(CloudSupportServer).GetSupportAccount(ctx, req.(*GetSupportAccountRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _CloudSupport_ListSupportAccounts_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(ListSupportAccountsRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(CloudSupportServer).ListSupportAccounts(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.support.v1alpha1.CloudSupport/ListSupportAccounts", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(CloudSupportServer).ListSupportAccounts(ctx, req.(*ListSupportAccountsRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _CloudSupport_GetCase_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(GetCaseRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(CloudSupportServer).GetCase(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.support.v1alpha1.CloudSupport/GetCase", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(CloudSupportServer).GetCase(ctx, req.(*GetCaseRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _CloudSupport_ListCases_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(ListCasesRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(CloudSupportServer).ListCases(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.support.v1alpha1.CloudSupport/ListCases", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(CloudSupportServer).ListCases(ctx, req.(*ListCasesRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _CloudSupport_ListComments_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(ListCommentsRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(CloudSupportServer).ListComments(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.support.v1alpha1.CloudSupport/ListComments", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(CloudSupportServer).ListComments(ctx, req.(*ListCommentsRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _CloudSupport_CreateCase_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(CreateCaseRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(CloudSupportServer).CreateCase(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.support.v1alpha1.CloudSupport/CreateCase", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(CloudSupportServer).CreateCase(ctx, req.(*CreateCaseRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _CloudSupport_UpdateCase_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(UpdateCaseRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(CloudSupportServer).UpdateCase(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.support.v1alpha1.CloudSupport/UpdateCase", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(CloudSupportServer).UpdateCase(ctx, req.(*UpdateCaseRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _CloudSupport_CreateComment_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(CreateCommentRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(CloudSupportServer).CreateComment(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.support.v1alpha1.CloudSupport/CreateComment", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(CloudSupportServer).CreateComment(ctx, req.(*CreateCommentRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _CloudSupport_GetIssueTaxonomy_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(GetIssueTaxonomyRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(CloudSupportServer).GetIssueTaxonomy(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.support.v1alpha1.CloudSupport/GetIssueTaxonomy", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(CloudSupportServer).GetIssueTaxonomy(ctx, req.(*GetIssueTaxonomyRequest)) + } + return interceptor(ctx, in, info, handler) +} + +var _CloudSupport_serviceDesc = grpc.ServiceDesc{ + ServiceName: "google.cloud.support.v1alpha1.CloudSupport", + HandlerType: (*CloudSupportServer)(nil), + Methods: []grpc.MethodDesc{ + { + MethodName: "GetSupportAccount", + Handler: _CloudSupport_GetSupportAccount_Handler, + }, + { + MethodName: "ListSupportAccounts", + Handler: _CloudSupport_ListSupportAccounts_Handler, + }, + { + MethodName: "GetCase", + Handler: _CloudSupport_GetCase_Handler, + }, + { + MethodName: "ListCases", + Handler: _CloudSupport_ListCases_Handler, + }, + { + MethodName: "ListComments", + Handler: _CloudSupport_ListComments_Handler, + }, + { + MethodName: "CreateCase", + Handler: _CloudSupport_CreateCase_Handler, + }, + { + MethodName: "UpdateCase", + Handler: _CloudSupport_UpdateCase_Handler, + }, + { + MethodName: "CreateComment", + Handler: _CloudSupport_CreateComment_Handler, + }, + { + MethodName: "GetIssueTaxonomy", + Handler: _CloudSupport_GetIssueTaxonomy_Handler, + }, + }, + Streams: []grpc.StreamDesc{}, + Metadata: "google/cloud/support/v1alpha1/cloud_support.proto", +} + +func init() { + proto.RegisterFile("google/cloud/support/v1alpha1/cloud_support.proto", fileDescriptor_cloud_support_255f2e0c522c4db6) +} + +var fileDescriptor_cloud_support_255f2e0c522c4db6 = []byte{ + // 863 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x9c, 0x56, 0x41, 0x4f, 0x33, 0x45, + 0x18, 0xce, 0xb4, 0xc8, 0x07, 0x2f, 0xdf, 0xa7, 0x5f, 0x47, 0x82, 0x65, 0x0b, 0x49, 0x3b, 0x21, + 0xa6, 0x56, 0xdd, 0x85, 0x36, 0x88, 0x96, 0x40, 0x14, 0x88, 0x8d, 0x89, 0x26, 0xa4, 0x60, 0x62, + 0xbc, 0x34, 0x43, 0x19, 0xd6, 0x95, 0xee, 0xce, 0xda, 0x99, 0x1a, 0x40, 0xbd, 0x78, 0xf1, 0xa6, + 0x07, 0x6f, 0x7a, 0xe1, 0xe2, 0x59, 0x0f, 0xfe, 0x13, 0xff, 0x82, 0xfe, 0x0f, 0xb3, 0xb3, 0xb3, + 0xed, 0x76, 0x69, 0x77, 0x17, 0x6e, 0xdd, 0x77, 0xde, 0xe7, 0x7d, 0x9f, 0x79, 0xe6, 0x9d, 0x67, + 0x0a, 0x3b, 0x36, 0xe7, 0xf6, 0x80, 0x59, 0xfd, 0x01, 0x1f, 0x5d, 0x5a, 0x62, 0xe4, 0xfb, 0x7c, + 0x28, 0xad, 0x6f, 0x77, 0xe8, 0xc0, 0xff, 0x8a, 0xee, 0x84, 0xe1, 0x9e, 0x0e, 0x9b, 0xfe, 0x90, + 0x4b, 0x8e, 0x37, 0x43, 0x88, 0xa9, 0xd6, 0xcc, 0x68, 0x2d, 0x82, 0x18, 0x1b, 0xba, 0x22, 0xf5, + 0x1d, 0x8b, 0x7a, 0x1e, 0x97, 0x54, 0x3a, 0xdc, 0x13, 0x21, 0xd8, 0xa8, 0xcd, 0xec, 0xd7, 0xe7, + 0xae, 0xcb, 0x3d, 0x9d, 0x52, 0xd1, 0x29, 0xea, 0xeb, 0x62, 0x74, 0x65, 0x31, 0xd7, 0x97, 0xb7, + 0x7a, 0xb1, 0x9a, 0x5c, 0xbc, 0x72, 0xd8, 0xe0, 0xb2, 0xe7, 0x52, 0x71, 0x1d, 0x66, 0x10, 0x13, + 0xca, 0x1d, 0x26, 0xcf, 0xc2, 0xca, 0x1f, 0xf5, 0xfb, 0x7c, 0xe4, 0xc9, 0x2e, 0xfb, 0x66, 0xc4, + 0x84, 0xc4, 0x18, 0x16, 0x3c, 0xea, 0xb2, 0x32, 0xaa, 0xa2, 0xfa, 0x72, 0x57, 0xfd, 0x26, 0x3e, + 0x18, 0x9f, 0x3a, 0x22, 0x01, 0x10, 0x11, 0x62, 0x0d, 0x16, 0xaf, 0x9c, 0x81, 0x64, 0x43, 0x8d, + 0xd1, 0x5f, 0xb8, 0x02, 0xcb, 0x3e, 0xb5, 0x59, 0x4f, 0x38, 0x77, 0xac, 0x5c, 0xa8, 0xa2, 0x7a, + 0xb1, 0xbb, 0x14, 0x04, 0xce, 0x9c, 0x3b, 0x86, 0x37, 0x01, 0xd4, 0xa2, 0xe4, 0xd7, 0xcc, 0x2b, + 0x17, 0x15, 0x50, 0xa5, 0x9f, 0x07, 0x01, 0xf2, 0x33, 0x82, 0xca, 0xcc, 0x96, 0xc2, 0xe7, 0x9e, + 0x60, 0xb8, 0x03, 0x4b, 0x54, 0xc7, 0xca, 0xa8, 0x5a, 0xac, 0xaf, 0x34, 0xdf, 0x36, 0x67, 0x6a, + 0xae, 0x65, 0x4b, 0xec, 0x75, 0x0c, 0xc6, 0x6f, 0xc2, 0x6b, 0x1e, 0xbb, 0x91, 0xbd, 0x18, 0x99, + 0x82, 0x22, 0xf3, 0x22, 0x08, 0x9f, 0x8e, 0x09, 0x6d, 0xc1, 0xab, 0x1d, 0x26, 0x8f, 0xa9, 0x60, + 0x69, 0x42, 0xdd, 0xc1, 0xcb, 0x80, 0x75, 0x90, 0x26, 0x52, 0xf2, 0x62, 0x92, 0x15, 0xe6, 0x4b, + 0x56, 0x4c, 0x95, 0x6c, 0x21, 0x29, 0x99, 0x84, 0x52, 0xac, 0xb7, 0xd6, 0x69, 0x0f, 0x5e, 0xe9, + 0x07, 0x01, 0x2d, 0x52, 0x2d, 0x55, 0x24, 0xb5, 0xbb, 0x30, 0x3f, 0xb7, 0x2e, 0x6f, 0xc1, 0xeb, + 0xaa, 0x2b, 0x77, 0x5d, 0x16, 0x9b, 0x89, 0x59, 0xe2, 0x7c, 0x01, 0xab, 0xd3, 0xa9, 0x9a, 0xe3, + 0x87, 0xb0, 0xd4, 0xd7, 0x31, 0x4d, 0x73, 0x2b, 0x9d, 0x66, 0x98, 0xdc, 0x1d, 0xa3, 0xc8, 0x05, + 0x94, 0x8e, 0x87, 0x8c, 0x4a, 0x16, 0x3f, 0x9f, 0x35, 0x58, 0xf4, 0xe9, 0x90, 0x79, 0x32, 0x1a, + 0xcb, 0xf0, 0x0b, 0xef, 0xc2, 0x42, 0xb0, 0x45, 0xb5, 0x9d, 0x5c, 0x8a, 0xa8, 0x74, 0xf2, 0x13, + 0x82, 0xd2, 0xe7, 0xfe, 0x65, 0xa2, 0x49, 0x54, 0x0c, 0x3d, 0xaa, 0x18, 0xde, 0x87, 0x95, 0x91, + 0xaa, 0xa5, 0x6e, 0xa5, 0xa6, 0x62, 0x44, 0xe8, 0xe8, 0xe2, 0x9a, 0x1f, 0x07, 0x17, 0xf7, 0x33, + 0x2a, 0xae, 0xbb, 0x10, 0xa6, 0x07, 0xbf, 0xc9, 0xd7, 0xb0, 0xaa, 0x77, 0xab, 0x85, 0x48, 0x19, + 0xb4, 0x43, 0x78, 0xa6, 0x55, 0xd2, 0x4d, 0xf2, 0x49, 0x1b, 0x81, 0xc8, 0x3a, 0xbc, 0xd1, 0x61, + 0xf2, 0x13, 0x21, 0x46, 0xec, 0x9c, 0xde, 0x70, 0x8f, 0xbb, 0xb7, 0xba, 0x5d, 0xf3, 0xbf, 0x15, + 0x78, 0x7e, 0x1c, 0x14, 0xd1, 0x77, 0x0b, 0xff, 0x81, 0xa0, 0xf4, 0xc0, 0x56, 0xf0, 0x9e, 0x99, + 0xea, 0x85, 0xe6, 0x3c, 0x23, 0x32, 0x1e, 0x73, 0xa1, 0x49, 0xe3, 0xc7, 0x7f, 0xfe, 0xfd, 0xb5, + 0xb0, 0x85, 0xc9, 0xc4, 0x97, 0xbf, 0x0b, 0x04, 0x38, 0x10, 0xd3, 0x06, 0x62, 0x35, 0x7e, 0xc0, + 0x7f, 0xa2, 0x70, 0x66, 0x13, 0xde, 0x82, 0x3f, 0xc8, 0x60, 0x3a, 0xdf, 0x02, 0x8d, 0xf6, 0x53, + 0xa0, 0xe1, 0xf8, 0x93, 0x9a, 0xa2, 0x5e, 0xc1, 0xeb, 0x13, 0xea, 0x09, 0xd2, 0xf8, 0x17, 0x04, + 0xcf, 0xb4, 0xfb, 0xe0, 0x77, 0xb3, 0xf5, 0x8c, 0x0d, 0xa8, 0x91, 0x3d, 0x92, 0xa4, 0xa9, 0x08, + 0xbc, 0x83, 0x1b, 0x99, 0xda, 0x59, 0xca, 0x1b, 0x02, 0x0d, 0xef, 0x11, 0x2c, 0x8f, 0xdd, 0x06, + 0x5b, 0x39, 0xb6, 0x1f, 0xf7, 0x44, 0x63, 0x3b, 0x3f, 0x40, 0xab, 0xb4, 0xad, 0x48, 0x36, 0x70, + 0x3d, 0xfb, 0x80, 0x43, 0x96, 0xf8, 0x6f, 0x04, 0xcf, 0xe3, 0x7e, 0x83, 0x9b, 0x79, 0x9a, 0x4e, + 0xfb, 0x98, 0xd1, 0x7a, 0x14, 0x46, 0x73, 0xdd, 0x57, 0x5c, 0x77, 0x71, 0x2b, 0xbf, 0xa0, 0x56, + 0xe4, 0x65, 0xf8, 0x77, 0x04, 0x30, 0x31, 0x33, 0x9c, 0xa5, 0xd4, 0x03, 0xdf, 0xcb, 0x73, 0xe2, + 0xef, 0x2b, 0x82, 0x4d, 0x12, 0x3f, 0xf1, 0xd0, 0x1d, 0xe7, 0xca, 0xd9, 0x0e, 0x8d, 0xeb, 0x1e, + 0x01, 0x4c, 0x5c, 0x30, 0x93, 0xdd, 0x03, 0xc3, 0xcc, 0xc3, 0xee, 0x40, 0xb1, 0xdb, 0x6b, 0x5a, + 0x31, 0x76, 0x41, 0x73, 0x33, 0x43, 0x43, 0x4d, 0xf1, 0x2f, 0x04, 0x2f, 0xa6, 0xfc, 0x11, 0xb7, + 0xf2, 0x69, 0x38, 0xe5, 0xa6, 0x46, 0x2e, 0xa3, 0x24, 0x27, 0x8a, 0xeb, 0x21, 0x79, 0xca, 0x51, + 0xb7, 0x23, 0x97, 0xc5, 0xbf, 0x21, 0x78, 0x99, 0xb4, 0x59, 0xfc, 0x5e, 0xf6, 0x45, 0x9f, 0xe5, + 0xcb, 0x46, 0x23, 0x95, 0xf8, 0x14, 0x84, 0x10, 0x45, 0x7f, 0x03, 0x1b, 0x63, 0xfa, 0x6d, 0x3b, + 0x51, 0xf6, 0xe8, 0x7b, 0xa8, 0xf5, 0xb9, 0x9b, 0x4e, 0xe6, 0xa8, 0x14, 0x7f, 0x09, 0x4e, 0x83, + 0xf7, 0xeb, 0xcb, 0x13, 0x8d, 0xb0, 0xf9, 0x80, 0x7a, 0xb6, 0xc9, 0x87, 0xb6, 0x65, 0x33, 0x4f, + 0xbd, 0x6d, 0x56, 0xb8, 0x44, 0x7d, 0x47, 0xcc, 0xf9, 0x57, 0xbd, 0xaf, 0x03, 0x17, 0x8b, 0x0a, + 0xd0, 0xfa, 0x3f, 0x00, 0x00, 0xff, 0xff, 0xad, 0xe1, 0xf2, 0x57, 0x85, 0x0b, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/cloud/talent/v4beta1/application.pb.go b/vendor/google.golang.org/genproto/googleapis/cloud/talent/v4beta1/application.pb.go new file mode 100644 index 0000000..c4cf4e3 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/cloud/talent/v4beta1/application.pb.go @@ -0,0 +1,418 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/cloud/talent/v4beta1/application.proto + +package talent // import "google.golang.org/genproto/googleapis/cloud/talent/v4beta1" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import timestamp "github.com/golang/protobuf/ptypes/timestamp" +import wrappers "github.com/golang/protobuf/ptypes/wrappers" +import _ "google.golang.org/genproto/googleapis/api/annotations" +import date "google.golang.org/genproto/googleapis/type/date" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Enum that represents the application status. +type Application_ApplicationState int32 + +const ( + // Default value. + Application_APPLICATION_STATE_UNSPECIFIED Application_ApplicationState = 0 + // The current stage is in progress or pending, for example, interviews in + // progress. + Application_IN_PROGRESS Application_ApplicationState = 1 + // The current stage was terminated by a candidate decision. + Application_CANDIDATE_WITHDREW Application_ApplicationState = 2 + // The current stage was terminated by an employer or agency decision. + Application_EMPLOYER_WITHDREW Application_ApplicationState = 3 + // The current stage is successfully completed, but the next stage (if + // applicable) has not begun. + Application_COMPLETED Application_ApplicationState = 4 + // The current stage was closed without an exception, or terminated for + // reasons unrealated to the candidate. + Application_CLOSED Application_ApplicationState = 5 +) + +var Application_ApplicationState_name = map[int32]string{ + 0: "APPLICATION_STATE_UNSPECIFIED", + 1: "IN_PROGRESS", + 2: "CANDIDATE_WITHDREW", + 3: "EMPLOYER_WITHDREW", + 4: "COMPLETED", + 5: "CLOSED", +} +var Application_ApplicationState_value = map[string]int32{ + "APPLICATION_STATE_UNSPECIFIED": 0, + "IN_PROGRESS": 1, + "CANDIDATE_WITHDREW": 2, + "EMPLOYER_WITHDREW": 3, + "COMPLETED": 4, + "CLOSED": 5, +} + +func (x Application_ApplicationState) String() string { + return proto.EnumName(Application_ApplicationState_name, int32(x)) +} +func (Application_ApplicationState) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_application_dbba57710742dc0d, []int{0, 0} +} + +// The stage of the application. +type Application_ApplicationStage int32 + +const ( + // Default value. + Application_APPLICATION_STAGE_UNSPECIFIED Application_ApplicationStage = 0 + // Candidate has applied or a recruiter put candidate into consideration but + // candidate is not yet screened / no decision has been made to move or not + // move the candidate to the next stage. + Application_NEW Application_ApplicationStage = 1 + // A recruiter decided to screen the candidate for this role. + Application_SCREEN Application_ApplicationStage = 2 + // Candidate is being / was sent to the customer / hiring manager for + // detailed review. + Application_HIRING_MANAGER_REVIEW Application_ApplicationStage = 3 + // Candidate was approved by the client / hiring manager and is being / was + // interviewed for the role. + Application_INTERVIEW Application_ApplicationStage = 4 + // Candidate will be / has been given an offer of employment. + Application_OFFER_EXTENDED Application_ApplicationStage = 5 + // Candidate has accepted their offer of employment. + Application_OFFER_ACCEPTED Application_ApplicationStage = 6 + // Candidate has begun (or completed) their employment or assignment with + // the employer. + Application_STARTED Application_ApplicationStage = 7 +) + +var Application_ApplicationStage_name = map[int32]string{ + 0: "APPLICATION_STAGE_UNSPECIFIED", + 1: "NEW", + 2: "SCREEN", + 3: "HIRING_MANAGER_REVIEW", + 4: "INTERVIEW", + 5: "OFFER_EXTENDED", + 6: "OFFER_ACCEPTED", + 7: "STARTED", +} +var Application_ApplicationStage_value = map[string]int32{ + "APPLICATION_STAGE_UNSPECIFIED": 0, + "NEW": 1, + "SCREEN": 2, + "HIRING_MANAGER_REVIEW": 3, + "INTERVIEW": 4, + "OFFER_EXTENDED": 5, + "OFFER_ACCEPTED": 6, + "STARTED": 7, +} + +func (x Application_ApplicationStage) String() string { + return proto.EnumName(Application_ApplicationStage_name, int32(x)) +} +func (Application_ApplicationStage) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_application_dbba57710742dc0d, []int{0, 1} +} + +// Resource that represents a job application record of a candidate. +type Application struct { + // Required during application update. + // + // Resource name assigned to an application by the API. + // + // The format is + // "projects/{project_id}/tenants/{tenant_id}/profiles/{profile_id}/applications/{application_id}", + // for example, + // "projects/api-test-project/tenants/foo/profiles/bar/applications/baz". + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // Required. + // + // Client side application identifier, used to uniquely identify the + // application. + // + // The maximum number of allowed characters is 255. + ExternalId string `protobuf:"bytes,31,opt,name=external_id,json=externalId,proto3" json:"external_id,omitempty"` + // Output only. Resource name of the candidate of this application. + // + // The format is + // "projects/{project_id}/tenants/{tenant_id}/profiles/{profile_id}", + // for example, "projects/api-test-project/tenants/foo/profiles/bar". + Profile string `protobuf:"bytes,2,opt,name=profile,proto3" json:"profile,omitempty"` + // One of either a job or a company is required. + // + // Resource name of the job which the candidate applied for. + // + // The format is + // "projects/{project_id}/tenants/{tenant_id}/jobs/{job_id}", + // for example, "projects/api-test-project/tenants/foo/jobs/bar". + Job string `protobuf:"bytes,4,opt,name=job,proto3" json:"job,omitempty"` + // One of either a job or a company is required. + // + // Resource name of the company which the candidate applied for. + // + // The format is + // "projects/{project_id}/tenants/{tenant_id}/companies/{company_id}", + // for example, "projects/api-test-project/tenants/foo/companies/bar". + Company string `protobuf:"bytes,5,opt,name=company,proto3" json:"company,omitempty"` + // Optional. + // + // The application date. + ApplicationDate *date.Date `protobuf:"bytes,7,opt,name=application_date,json=applicationDate,proto3" json:"application_date,omitempty"` + // Required. + // + // What is the most recent stage of the application (that is, new, screen, + // send cv, hired, finished work)? This field is intentionally not + // comprehensive of every possible status, but instead, represents statuses + // that would be used to indicate to the ML models good / bad matches. + Stage Application_ApplicationStage `protobuf:"varint,11,opt,name=stage,proto3,enum=google.cloud.talent.v4beta1.Application_ApplicationStage" json:"stage,omitempty"` + // Optional. + // + // The application state. + State Application_ApplicationState `protobuf:"varint,13,opt,name=state,proto3,enum=google.cloud.talent.v4beta1.Application_ApplicationState" json:"state,omitempty"` + // Optional. + // + // All interviews (screen, onsite, and so on) conducted as part of this + // application (includes details such as user conducting the interview, + // timestamp, feedback, and so on). + Interviews []*Interview `protobuf:"bytes,16,rep,name=interviews,proto3" json:"interviews,omitempty"` + // Optional. + // + // If the candidate is referred by a employee. + Referral *wrappers.BoolValue `protobuf:"bytes,18,opt,name=referral,proto3" json:"referral,omitempty"` + // Required. + // + // Reflects the time that the application was created. + CreateTime *timestamp.Timestamp `protobuf:"bytes,19,opt,name=create_time,json=createTime,proto3" json:"create_time,omitempty"` + // Optional. + // + // The last update timestamp. + UpdateTime *timestamp.Timestamp `protobuf:"bytes,20,opt,name=update_time,json=updateTime,proto3" json:"update_time,omitempty"` + // Optional. + // + // Free text reason behind the recruitement outcome (for example, reason for + // withdraw / reject, reason for an unsuccessful finish, and so on). + // + // Number of characters allowed is 100. + OutcomeNotes string `protobuf:"bytes,21,opt,name=outcome_notes,json=outcomeNotes,proto3" json:"outcome_notes,omitempty"` + // Optional. + // + // Outcome positiveness shows how positive the outcome is. + Outcome Outcome `protobuf:"varint,22,opt,name=outcome,proto3,enum=google.cloud.talent.v4beta1.Outcome" json:"outcome,omitempty"` + // Output only. Indicates whether this job application is a match to + // application related filters. This value is only applicable in profile + // search response. + IsMatch *wrappers.BoolValue `protobuf:"bytes,28,opt,name=is_match,json=isMatch,proto3" json:"is_match,omitempty"` + // Output only. Job title snippet shows how the job title is related to a + // search query. It's empty if the job title isn't related to the search + // query. + JobTitleSnippet string `protobuf:"bytes,29,opt,name=job_title_snippet,json=jobTitleSnippet,proto3" json:"job_title_snippet,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Application) Reset() { *m = Application{} } +func (m *Application) String() string { return proto.CompactTextString(m) } +func (*Application) ProtoMessage() {} +func (*Application) Descriptor() ([]byte, []int) { + return fileDescriptor_application_dbba57710742dc0d, []int{0} +} +func (m *Application) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Application.Unmarshal(m, b) +} +func (m *Application) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Application.Marshal(b, m, deterministic) +} +func (dst *Application) XXX_Merge(src proto.Message) { + xxx_messageInfo_Application.Merge(dst, src) +} +func (m *Application) XXX_Size() int { + return xxx_messageInfo_Application.Size(m) +} +func (m *Application) XXX_DiscardUnknown() { + xxx_messageInfo_Application.DiscardUnknown(m) +} + +var xxx_messageInfo_Application proto.InternalMessageInfo + +func (m *Application) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *Application) GetExternalId() string { + if m != nil { + return m.ExternalId + } + return "" +} + +func (m *Application) GetProfile() string { + if m != nil { + return m.Profile + } + return "" +} + +func (m *Application) GetJob() string { + if m != nil { + return m.Job + } + return "" +} + +func (m *Application) GetCompany() string { + if m != nil { + return m.Company + } + return "" +} + +func (m *Application) GetApplicationDate() *date.Date { + if m != nil { + return m.ApplicationDate + } + return nil +} + +func (m *Application) GetStage() Application_ApplicationStage { + if m != nil { + return m.Stage + } + return Application_APPLICATION_STAGE_UNSPECIFIED +} + +func (m *Application) GetState() Application_ApplicationState { + if m != nil { + return m.State + } + return Application_APPLICATION_STATE_UNSPECIFIED +} + +func (m *Application) GetInterviews() []*Interview { + if m != nil { + return m.Interviews + } + return nil +} + +func (m *Application) GetReferral() *wrappers.BoolValue { + if m != nil { + return m.Referral + } + return nil +} + +func (m *Application) GetCreateTime() *timestamp.Timestamp { + if m != nil { + return m.CreateTime + } + return nil +} + +func (m *Application) GetUpdateTime() *timestamp.Timestamp { + if m != nil { + return m.UpdateTime + } + return nil +} + +func (m *Application) GetOutcomeNotes() string { + if m != nil { + return m.OutcomeNotes + } + return "" +} + +func (m *Application) GetOutcome() Outcome { + if m != nil { + return m.Outcome + } + return Outcome_OUTCOME_UNSPECIFIED +} + +func (m *Application) GetIsMatch() *wrappers.BoolValue { + if m != nil { + return m.IsMatch + } + return nil +} + +func (m *Application) GetJobTitleSnippet() string { + if m != nil { + return m.JobTitleSnippet + } + return "" +} + +func init() { + proto.RegisterType((*Application)(nil), "google.cloud.talent.v4beta1.Application") + proto.RegisterEnum("google.cloud.talent.v4beta1.Application_ApplicationState", Application_ApplicationState_name, Application_ApplicationState_value) + proto.RegisterEnum("google.cloud.talent.v4beta1.Application_ApplicationStage", Application_ApplicationStage_name, Application_ApplicationStage_value) +} + +func init() { + proto.RegisterFile("google/cloud/talent/v4beta1/application.proto", fileDescriptor_application_dbba57710742dc0d) +} + +var fileDescriptor_application_dbba57710742dc0d = []byte{ + // 764 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xa4, 0x94, 0xdd, 0x6e, 0xe3, 0x44, + 0x14, 0xc7, 0x71, 0xd3, 0x36, 0xdd, 0x31, 0xdd, 0xba, 0x03, 0xad, 0x86, 0xb2, 0x4b, 0x42, 0xf9, + 0x50, 0x84, 0x84, 0x2d, 0xca, 0x87, 0x84, 0x16, 0x21, 0xb9, 0xf6, 0x24, 0x6b, 0x29, 0xb1, 0xad, + 0xb1, 0xd9, 0x02, 0x37, 0xd6, 0xc4, 0x99, 0x1a, 0x47, 0xb6, 0xc7, 0xb2, 0x27, 0xbb, 0xec, 0x0b, + 0x70, 0xcd, 0x33, 0x70, 0xc7, 0xa3, 0xf1, 0x16, 0x68, 0xfc, 0xb1, 0x8d, 0xca, 0x92, 0xbd, 0xd8, + 0x3b, 0x9f, 0xff, 0xfc, 0xfe, 0x7f, 0x9d, 0x33, 0x67, 0x64, 0xf0, 0x65, 0xc2, 0x79, 0x92, 0x31, + 0x23, 0xce, 0xf8, 0x66, 0x65, 0x08, 0x9a, 0xb1, 0x42, 0x18, 0xcf, 0xbf, 0x59, 0x32, 0x41, 0xbf, + 0x32, 0x68, 0x59, 0x66, 0x69, 0x4c, 0x45, 0xca, 0x0b, 0xbd, 0xac, 0xb8, 0xe0, 0xf0, 0xc3, 0x16, + 0xd7, 0x1b, 0x5c, 0x6f, 0x71, 0xbd, 0xc3, 0x2f, 0x26, 0xbb, 0xb2, 0x62, 0x9e, 0xe7, 0x7d, 0xcc, + 0xc5, 0x67, 0xbb, 0xc8, 0x35, 0x5f, 0x76, 0xd8, 0xa8, 0xc3, 0x9a, 0x6a, 0xb9, 0xb9, 0x35, 0x44, + 0x9a, 0xb3, 0x5a, 0xd0, 0xbc, 0xec, 0x80, 0x8f, 0xee, 0x03, 0x2f, 0x2a, 0x5a, 0x96, 0xac, 0xaa, + 0xbb, 0xf3, 0xf3, 0xee, 0x5c, 0xbc, 0x2c, 0x99, 0xb1, 0xa2, 0x82, 0x75, 0xfa, 0xa3, 0x4e, 0xa7, + 0x65, 0x6a, 0xd0, 0xa2, 0xe0, 0xa2, 0x99, 0xb1, 0x73, 0x5d, 0xfe, 0x73, 0x04, 0x54, 0xf3, 0x6e, + 0x74, 0x08, 0xc1, 0x7e, 0x41, 0x73, 0x86, 0x94, 0xb1, 0x32, 0x79, 0x40, 0x9a, 0x6f, 0x38, 0x02, + 0x2a, 0xfb, 0x5d, 0xb0, 0xaa, 0xa0, 0x59, 0x94, 0xae, 0xd0, 0xa8, 0x39, 0x02, 0xbd, 0xe4, 0xac, + 0x20, 0x02, 0xc3, 0xb2, 0xe2, 0xb7, 0x69, 0xc6, 0xd0, 0x5e, 0x73, 0xd8, 0x97, 0x50, 0x03, 0x83, + 0x35, 0x5f, 0xa2, 0xfd, 0x46, 0x95, 0x9f, 0x92, 0x8d, 0x79, 0x5e, 0xd2, 0xe2, 0x25, 0x3a, 0x68, + 0xd9, 0xae, 0x84, 0x3f, 0x00, 0x6d, 0x6b, 0x09, 0x91, 0x1c, 0x01, 0x0d, 0xc7, 0xca, 0x44, 0xbd, + 0x3a, 0xd5, 0xbb, 0x55, 0xc8, 0xd9, 0x74, 0x9b, 0x0a, 0x46, 0x4e, 0xb6, 0x50, 0x29, 0x40, 0x0f, + 0x1c, 0xd4, 0x82, 0x26, 0x0c, 0xa9, 0x63, 0x65, 0xf2, 0xf0, 0xea, 0x7b, 0x7d, 0xc7, 0xf6, 0xf4, + 0xad, 0x89, 0xb7, 0xbf, 0x03, 0x19, 0x40, 0xda, 0x9c, 0x2e, 0x50, 0x30, 0x74, 0xfc, 0x56, 0x81, + 0xa2, 0x0d, 0x14, 0x0c, 0x4e, 0x01, 0x48, 0x0b, 0xc1, 0xaa, 0xe7, 0x29, 0x7b, 0x51, 0x23, 0x6d, + 0x3c, 0x98, 0xa8, 0x57, 0x9f, 0xef, 0x4c, 0x75, 0x7a, 0x9c, 0x6c, 0x39, 0xe1, 0x77, 0xe0, 0xa8, + 0x62, 0xb7, 0xac, 0xaa, 0x68, 0x86, 0x60, 0x73, 0x3f, 0x17, 0x7d, 0x4a, 0xff, 0x36, 0xf4, 0x6b, + 0xce, 0xb3, 0x67, 0x34, 0xdb, 0x30, 0xf2, 0x8a, 0x85, 0x4f, 0x80, 0x1a, 0x57, 0x8c, 0x0a, 0x16, + 0xc9, 0xa7, 0x85, 0xde, 0xfb, 0x1f, 0x6b, 0xd8, 0xbf, 0x3b, 0x02, 0x5a, 0x5c, 0x0a, 0xd2, 0xbc, + 0x29, 0x57, 0xaf, 0xcc, 0xef, 0xbf, 0xd9, 0xdc, 0xe2, 0x8d, 0xf9, 0x13, 0x70, 0xcc, 0x37, 0x22, + 0xe6, 0x39, 0x8b, 0x0a, 0x2e, 0x58, 0x8d, 0xce, 0x9a, 0xcd, 0xbf, 0xdb, 0x89, 0xae, 0xd4, 0xe0, + 0x8f, 0x60, 0xd8, 0xd5, 0xe8, 0xbc, 0xb9, 0xf1, 0x4f, 0x77, 0xde, 0x8d, 0xd7, 0xb2, 0xa4, 0x37, + 0xc1, 0x6f, 0xc1, 0x51, 0x5a, 0x47, 0x39, 0x15, 0xf1, 0x6f, 0xe8, 0xd1, 0x1b, 0xaf, 0x65, 0x98, + 0xd6, 0x0b, 0x89, 0xc2, 0x2f, 0xc0, 0xe9, 0x9a, 0x2f, 0x23, 0x91, 0x8a, 0x8c, 0x45, 0x75, 0x91, + 0x96, 0x25, 0x13, 0xe8, 0x71, 0xd3, 0xdf, 0xc9, 0x9a, 0x2f, 0x43, 0xa9, 0x07, 0xad, 0x7c, 0xf9, + 0xa7, 0x02, 0xb4, 0xfb, 0xdb, 0x85, 0x1f, 0x83, 0xc7, 0xa6, 0xef, 0xcf, 0x1d, 0xcb, 0x0c, 0x1d, + 0xcf, 0x8d, 0x82, 0xd0, 0x0c, 0x71, 0xf4, 0x93, 0x1b, 0xf8, 0xd8, 0x72, 0xa6, 0x0e, 0xb6, 0xb5, + 0x77, 0xe0, 0x09, 0x50, 0x1d, 0x37, 0xf2, 0x89, 0x37, 0x23, 0x38, 0x08, 0x34, 0x05, 0x9e, 0x03, + 0x68, 0x99, 0xae, 0xed, 0xd8, 0x92, 0xbd, 0x71, 0xc2, 0xa7, 0x36, 0xc1, 0x37, 0xda, 0x1e, 0x3c, + 0x03, 0xa7, 0x78, 0xe1, 0xcf, 0xbd, 0x5f, 0x30, 0xb9, 0x93, 0x07, 0xf0, 0x18, 0x3c, 0xb0, 0xbc, + 0x85, 0x3f, 0xc7, 0x21, 0xb6, 0xb5, 0x7d, 0x08, 0xc0, 0xa1, 0x35, 0xf7, 0x02, 0x6c, 0x6b, 0x07, + 0x97, 0x7f, 0xff, 0xa7, 0xa5, 0xe4, 0x75, 0x2d, 0xcd, 0xee, 0xb7, 0x34, 0x04, 0x03, 0x17, 0xdf, + 0x68, 0x8a, 0x0c, 0x0b, 0x2c, 0x82, 0xb1, 0xab, 0xed, 0xc1, 0x0f, 0xc0, 0xd9, 0x53, 0x87, 0x38, + 0xee, 0x2c, 0x5a, 0x98, 0xae, 0x39, 0xc3, 0x24, 0x22, 0xf8, 0x99, 0xd3, 0xb7, 0xe0, 0xb8, 0x21, + 0x26, 0x4d, 0xb9, 0x0f, 0x21, 0x78, 0xe8, 0x4d, 0xa7, 0x98, 0x44, 0xf8, 0xe7, 0x10, 0xbb, 0xb6, + 0x6c, 0xe5, 0x4e, 0x33, 0x2d, 0x0b, 0xfb, 0xb2, 0xd5, 0x43, 0xa8, 0x82, 0x61, 0x10, 0x9a, 0x44, + 0x16, 0xc3, 0xeb, 0x3f, 0x14, 0x30, 0x8a, 0x79, 0xbe, 0x6b, 0xad, 0xd7, 0x68, 0x6b, 0x18, 0xc2, + 0x6a, 0xbe, 0xa9, 0x62, 0xe6, 0xcb, 0xf5, 0xf9, 0xca, 0xaf, 0x66, 0x67, 0x4c, 0x78, 0x46, 0x8b, + 0x44, 0xe7, 0x55, 0x62, 0x24, 0xac, 0x68, 0x96, 0x6b, 0xb4, 0x47, 0xb4, 0x4c, 0xeb, 0xd7, 0xfe, + 0x68, 0x9f, 0xb4, 0xe5, 0x5f, 0x7b, 0x03, 0x2b, 0x0c, 0x96, 0x87, 0x8d, 0xe7, 0xeb, 0x7f, 0x03, + 0x00, 0x00, 0xff, 0xff, 0x72, 0x18, 0xca, 0xff, 0x11, 0x06, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/cloud/talent/v4beta1/application_service.pb.go b/vendor/google.golang.org/genproto/googleapis/cloud/talent/v4beta1/application_service.pb.go new file mode 100644 index 0000000..c0845d0 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/cloud/talent/v4beta1/application_service.pb.go @@ -0,0 +1,639 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/cloud/talent/v4beta1/application_service.proto + +package talent // import "google.golang.org/genproto/googleapis/cloud/talent/v4beta1" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import empty "github.com/golang/protobuf/ptypes/empty" +import _ "google.golang.org/genproto/googleapis/api/annotations" +import field_mask "google.golang.org/genproto/protobuf/field_mask" + +import ( + context "golang.org/x/net/context" + grpc "google.golang.org/grpc" +) + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// The Request of the CreateApplication method. +type CreateApplicationRequest struct { + // Required. + // + // Resource name of the profile under which the application is created. + // + // The format is + // "projects/{project_id}/tenants/{tenant_id}/profiles/{profile_id}", for + // example, "projects/test-project/tenants/test-tenant/profiles/test-profile". + Parent string `protobuf:"bytes,1,opt,name=parent,proto3" json:"parent,omitempty"` + // Required. + // + // The application to be created. + Application *Application `protobuf:"bytes,2,opt,name=application,proto3" json:"application,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *CreateApplicationRequest) Reset() { *m = CreateApplicationRequest{} } +func (m *CreateApplicationRequest) String() string { return proto.CompactTextString(m) } +func (*CreateApplicationRequest) ProtoMessage() {} +func (*CreateApplicationRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_application_service_ffccd846cf58538c, []int{0} +} +func (m *CreateApplicationRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_CreateApplicationRequest.Unmarshal(m, b) +} +func (m *CreateApplicationRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_CreateApplicationRequest.Marshal(b, m, deterministic) +} +func (dst *CreateApplicationRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_CreateApplicationRequest.Merge(dst, src) +} +func (m *CreateApplicationRequest) XXX_Size() int { + return xxx_messageInfo_CreateApplicationRequest.Size(m) +} +func (m *CreateApplicationRequest) XXX_DiscardUnknown() { + xxx_messageInfo_CreateApplicationRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_CreateApplicationRequest proto.InternalMessageInfo + +func (m *CreateApplicationRequest) GetParent() string { + if m != nil { + return m.Parent + } + return "" +} + +func (m *CreateApplicationRequest) GetApplication() *Application { + if m != nil { + return m.Application + } + return nil +} + +// Request for getting a application by name. +type GetApplicationRequest struct { + // Required. + // + // The resource name of the application to be retrieved. + // + // The format is + // "projects/{project_id}/tenants/{tenant_id}/profiles/{profile_id}/applications/{application_id}", + // for example, + // "projects/test-project/tenants/test-tenant/profiles/test-profile/applications/test-application". + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GetApplicationRequest) Reset() { *m = GetApplicationRequest{} } +func (m *GetApplicationRequest) String() string { return proto.CompactTextString(m) } +func (*GetApplicationRequest) ProtoMessage() {} +func (*GetApplicationRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_application_service_ffccd846cf58538c, []int{1} +} +func (m *GetApplicationRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GetApplicationRequest.Unmarshal(m, b) +} +func (m *GetApplicationRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GetApplicationRequest.Marshal(b, m, deterministic) +} +func (dst *GetApplicationRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_GetApplicationRequest.Merge(dst, src) +} +func (m *GetApplicationRequest) XXX_Size() int { + return xxx_messageInfo_GetApplicationRequest.Size(m) +} +func (m *GetApplicationRequest) XXX_DiscardUnknown() { + xxx_messageInfo_GetApplicationRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_GetApplicationRequest proto.InternalMessageInfo + +func (m *GetApplicationRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +// Request for updating a specified application. +type UpdateApplicationRequest struct { + // Required. + // + // The application resource to replace the current resource in the system. + Application *Application `protobuf:"bytes,1,opt,name=application,proto3" json:"application,omitempty"` + // Optional but strongly recommended for the best service + // experience. + // + // If [update_mask][google.cloud.talent.v4beta1.UpdateApplicationRequest.update_mask] is provided, only the specified fields in + // [application][google.cloud.talent.v4beta1.UpdateApplicationRequest.application] are updated. Otherwise all the fields are updated. + // + // A field mask to specify the application fields to be updated. Only + // top level fields of [Application][google.cloud.talent.v4beta1.Application] are supported. + UpdateMask *field_mask.FieldMask `protobuf:"bytes,2,opt,name=update_mask,json=updateMask,proto3" json:"update_mask,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *UpdateApplicationRequest) Reset() { *m = UpdateApplicationRequest{} } +func (m *UpdateApplicationRequest) String() string { return proto.CompactTextString(m) } +func (*UpdateApplicationRequest) ProtoMessage() {} +func (*UpdateApplicationRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_application_service_ffccd846cf58538c, []int{2} +} +func (m *UpdateApplicationRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_UpdateApplicationRequest.Unmarshal(m, b) +} +func (m *UpdateApplicationRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_UpdateApplicationRequest.Marshal(b, m, deterministic) +} +func (dst *UpdateApplicationRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_UpdateApplicationRequest.Merge(dst, src) +} +func (m *UpdateApplicationRequest) XXX_Size() int { + return xxx_messageInfo_UpdateApplicationRequest.Size(m) +} +func (m *UpdateApplicationRequest) XXX_DiscardUnknown() { + xxx_messageInfo_UpdateApplicationRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_UpdateApplicationRequest proto.InternalMessageInfo + +func (m *UpdateApplicationRequest) GetApplication() *Application { + if m != nil { + return m.Application + } + return nil +} + +func (m *UpdateApplicationRequest) GetUpdateMask() *field_mask.FieldMask { + if m != nil { + return m.UpdateMask + } + return nil +} + +// Request to delete a application. +type DeleteApplicationRequest struct { + // Required. + // + // The resource name of the application to be deleted. + // + // The format is + // "projects/{project_id}/tenants/{tenant_id}/profiles/{profile_id}/applications/{application_id}", + // for example, + // "projects/test-project/tenants/test-tenant/profiles/test-profile/applications/test-application". + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *DeleteApplicationRequest) Reset() { *m = DeleteApplicationRequest{} } +func (m *DeleteApplicationRequest) String() string { return proto.CompactTextString(m) } +func (*DeleteApplicationRequest) ProtoMessage() {} +func (*DeleteApplicationRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_application_service_ffccd846cf58538c, []int{3} +} +func (m *DeleteApplicationRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_DeleteApplicationRequest.Unmarshal(m, b) +} +func (m *DeleteApplicationRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_DeleteApplicationRequest.Marshal(b, m, deterministic) +} +func (dst *DeleteApplicationRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_DeleteApplicationRequest.Merge(dst, src) +} +func (m *DeleteApplicationRequest) XXX_Size() int { + return xxx_messageInfo_DeleteApplicationRequest.Size(m) +} +func (m *DeleteApplicationRequest) XXX_DiscardUnknown() { + xxx_messageInfo_DeleteApplicationRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_DeleteApplicationRequest proto.InternalMessageInfo + +func (m *DeleteApplicationRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +// List applications for which the client has ACL visibility. +type ListApplicationsRequest struct { + // Required. + // + // Resource name of the profile under which the application is created. + // + // The format is + // "projects/{project_id}/tenants/{tenant_id}/profiles/{profile_id}", for + // example, "projects/test-project/tenants/test-tenant/profiles/test-profile". + Parent string `protobuf:"bytes,1,opt,name=parent,proto3" json:"parent,omitempty"` + // Optional. + // + // The starting indicator from which to return results. + PageToken string `protobuf:"bytes,2,opt,name=page_token,json=pageToken,proto3" json:"page_token,omitempty"` + // Optional. + // + // The maximum number of applications to be returned, at most 100. + // Default is 100 if a non-positive number is provided. + PageSize int32 `protobuf:"varint,3,opt,name=page_size,json=pageSize,proto3" json:"page_size,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ListApplicationsRequest) Reset() { *m = ListApplicationsRequest{} } +func (m *ListApplicationsRequest) String() string { return proto.CompactTextString(m) } +func (*ListApplicationsRequest) ProtoMessage() {} +func (*ListApplicationsRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_application_service_ffccd846cf58538c, []int{4} +} +func (m *ListApplicationsRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ListApplicationsRequest.Unmarshal(m, b) +} +func (m *ListApplicationsRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ListApplicationsRequest.Marshal(b, m, deterministic) +} +func (dst *ListApplicationsRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_ListApplicationsRequest.Merge(dst, src) +} +func (m *ListApplicationsRequest) XXX_Size() int { + return xxx_messageInfo_ListApplicationsRequest.Size(m) +} +func (m *ListApplicationsRequest) XXX_DiscardUnknown() { + xxx_messageInfo_ListApplicationsRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_ListApplicationsRequest proto.InternalMessageInfo + +func (m *ListApplicationsRequest) GetParent() string { + if m != nil { + return m.Parent + } + return "" +} + +func (m *ListApplicationsRequest) GetPageToken() string { + if m != nil { + return m.PageToken + } + return "" +} + +func (m *ListApplicationsRequest) GetPageSize() int32 { + if m != nil { + return m.PageSize + } + return 0 +} + +// Output only. +// +// The List applications response object. +type ListApplicationsResponse struct { + // Applications for the current client. + Applications []*Application `protobuf:"bytes,1,rep,name=applications,proto3" json:"applications,omitempty"` + // A token to retrieve the next page of results. + NextPageToken string `protobuf:"bytes,2,opt,name=next_page_token,json=nextPageToken,proto3" json:"next_page_token,omitempty"` + // Additional information for the API invocation, such as the request + // tracking id. + Metadata *ResponseMetadata `protobuf:"bytes,3,opt,name=metadata,proto3" json:"metadata,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ListApplicationsResponse) Reset() { *m = ListApplicationsResponse{} } +func (m *ListApplicationsResponse) String() string { return proto.CompactTextString(m) } +func (*ListApplicationsResponse) ProtoMessage() {} +func (*ListApplicationsResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_application_service_ffccd846cf58538c, []int{5} +} +func (m *ListApplicationsResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ListApplicationsResponse.Unmarshal(m, b) +} +func (m *ListApplicationsResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ListApplicationsResponse.Marshal(b, m, deterministic) +} +func (dst *ListApplicationsResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_ListApplicationsResponse.Merge(dst, src) +} +func (m *ListApplicationsResponse) XXX_Size() int { + return xxx_messageInfo_ListApplicationsResponse.Size(m) +} +func (m *ListApplicationsResponse) XXX_DiscardUnknown() { + xxx_messageInfo_ListApplicationsResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_ListApplicationsResponse proto.InternalMessageInfo + +func (m *ListApplicationsResponse) GetApplications() []*Application { + if m != nil { + return m.Applications + } + return nil +} + +func (m *ListApplicationsResponse) GetNextPageToken() string { + if m != nil { + return m.NextPageToken + } + return "" +} + +func (m *ListApplicationsResponse) GetMetadata() *ResponseMetadata { + if m != nil { + return m.Metadata + } + return nil +} + +func init() { + proto.RegisterType((*CreateApplicationRequest)(nil), "google.cloud.talent.v4beta1.CreateApplicationRequest") + proto.RegisterType((*GetApplicationRequest)(nil), "google.cloud.talent.v4beta1.GetApplicationRequest") + proto.RegisterType((*UpdateApplicationRequest)(nil), "google.cloud.talent.v4beta1.UpdateApplicationRequest") + proto.RegisterType((*DeleteApplicationRequest)(nil), "google.cloud.talent.v4beta1.DeleteApplicationRequest") + proto.RegisterType((*ListApplicationsRequest)(nil), "google.cloud.talent.v4beta1.ListApplicationsRequest") + proto.RegisterType((*ListApplicationsResponse)(nil), "google.cloud.talent.v4beta1.ListApplicationsResponse") +} + +// Reference imports to suppress errors if they are not otherwise used. +var _ context.Context +var _ grpc.ClientConn + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the grpc package it is being compiled against. +const _ = grpc.SupportPackageIsVersion4 + +// ApplicationServiceClient is the client API for ApplicationService service. +// +// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://godoc.org/google.golang.org/grpc#ClientConn.NewStream. +type ApplicationServiceClient interface { + // Creates a new application entity. + CreateApplication(ctx context.Context, in *CreateApplicationRequest, opts ...grpc.CallOption) (*Application, error) + // Retrieves specified application. + GetApplication(ctx context.Context, in *GetApplicationRequest, opts ...grpc.CallOption) (*Application, error) + // Updates specified application. + UpdateApplication(ctx context.Context, in *UpdateApplicationRequest, opts ...grpc.CallOption) (*Application, error) + // Deletes specified application. + DeleteApplication(ctx context.Context, in *DeleteApplicationRequest, opts ...grpc.CallOption) (*empty.Empty, error) + // Lists all applications associated with the profile. + ListApplications(ctx context.Context, in *ListApplicationsRequest, opts ...grpc.CallOption) (*ListApplicationsResponse, error) +} + +type applicationServiceClient struct { + cc *grpc.ClientConn +} + +func NewApplicationServiceClient(cc *grpc.ClientConn) ApplicationServiceClient { + return &applicationServiceClient{cc} +} + +func (c *applicationServiceClient) CreateApplication(ctx context.Context, in *CreateApplicationRequest, opts ...grpc.CallOption) (*Application, error) { + out := new(Application) + err := c.cc.Invoke(ctx, "/google.cloud.talent.v4beta1.ApplicationService/CreateApplication", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *applicationServiceClient) GetApplication(ctx context.Context, in *GetApplicationRequest, opts ...grpc.CallOption) (*Application, error) { + out := new(Application) + err := c.cc.Invoke(ctx, "/google.cloud.talent.v4beta1.ApplicationService/GetApplication", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *applicationServiceClient) UpdateApplication(ctx context.Context, in *UpdateApplicationRequest, opts ...grpc.CallOption) (*Application, error) { + out := new(Application) + err := c.cc.Invoke(ctx, "/google.cloud.talent.v4beta1.ApplicationService/UpdateApplication", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *applicationServiceClient) DeleteApplication(ctx context.Context, in *DeleteApplicationRequest, opts ...grpc.CallOption) (*empty.Empty, error) { + out := new(empty.Empty) + err := c.cc.Invoke(ctx, "/google.cloud.talent.v4beta1.ApplicationService/DeleteApplication", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *applicationServiceClient) ListApplications(ctx context.Context, in *ListApplicationsRequest, opts ...grpc.CallOption) (*ListApplicationsResponse, error) { + out := new(ListApplicationsResponse) + err := c.cc.Invoke(ctx, "/google.cloud.talent.v4beta1.ApplicationService/ListApplications", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +// ApplicationServiceServer is the server API for ApplicationService service. +type ApplicationServiceServer interface { + // Creates a new application entity. + CreateApplication(context.Context, *CreateApplicationRequest) (*Application, error) + // Retrieves specified application. + GetApplication(context.Context, *GetApplicationRequest) (*Application, error) + // Updates specified application. + UpdateApplication(context.Context, *UpdateApplicationRequest) (*Application, error) + // Deletes specified application. + DeleteApplication(context.Context, *DeleteApplicationRequest) (*empty.Empty, error) + // Lists all applications associated with the profile. + ListApplications(context.Context, *ListApplicationsRequest) (*ListApplicationsResponse, error) +} + +func RegisterApplicationServiceServer(s *grpc.Server, srv ApplicationServiceServer) { + s.RegisterService(&_ApplicationService_serviceDesc, srv) +} + +func _ApplicationService_CreateApplication_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(CreateApplicationRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(ApplicationServiceServer).CreateApplication(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.talent.v4beta1.ApplicationService/CreateApplication", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(ApplicationServiceServer).CreateApplication(ctx, req.(*CreateApplicationRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _ApplicationService_GetApplication_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(GetApplicationRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(ApplicationServiceServer).GetApplication(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.talent.v4beta1.ApplicationService/GetApplication", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(ApplicationServiceServer).GetApplication(ctx, req.(*GetApplicationRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _ApplicationService_UpdateApplication_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(UpdateApplicationRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(ApplicationServiceServer).UpdateApplication(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.talent.v4beta1.ApplicationService/UpdateApplication", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(ApplicationServiceServer).UpdateApplication(ctx, req.(*UpdateApplicationRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _ApplicationService_DeleteApplication_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(DeleteApplicationRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(ApplicationServiceServer).DeleteApplication(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.talent.v4beta1.ApplicationService/DeleteApplication", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(ApplicationServiceServer).DeleteApplication(ctx, req.(*DeleteApplicationRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _ApplicationService_ListApplications_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(ListApplicationsRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(ApplicationServiceServer).ListApplications(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.talent.v4beta1.ApplicationService/ListApplications", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(ApplicationServiceServer).ListApplications(ctx, req.(*ListApplicationsRequest)) + } + return interceptor(ctx, in, info, handler) +} + +var _ApplicationService_serviceDesc = grpc.ServiceDesc{ + ServiceName: "google.cloud.talent.v4beta1.ApplicationService", + HandlerType: (*ApplicationServiceServer)(nil), + Methods: []grpc.MethodDesc{ + { + MethodName: "CreateApplication", + Handler: _ApplicationService_CreateApplication_Handler, + }, + { + MethodName: "GetApplication", + Handler: _ApplicationService_GetApplication_Handler, + }, + { + MethodName: "UpdateApplication", + Handler: _ApplicationService_UpdateApplication_Handler, + }, + { + MethodName: "DeleteApplication", + Handler: _ApplicationService_DeleteApplication_Handler, + }, + { + MethodName: "ListApplications", + Handler: _ApplicationService_ListApplications_Handler, + }, + }, + Streams: []grpc.StreamDesc{}, + Metadata: "google/cloud/talent/v4beta1/application_service.proto", +} + +func init() { + proto.RegisterFile("google/cloud/talent/v4beta1/application_service.proto", fileDescriptor_application_service_ffccd846cf58538c) +} + +var fileDescriptor_application_service_ffccd846cf58538c = []byte{ + // 665 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xa4, 0x95, 0x4f, 0x4f, 0xd4, 0x40, + 0x18, 0xc6, 0x33, 0xa0, 0x08, 0xef, 0xfa, 0x8f, 0x49, 0x84, 0x66, 0xd1, 0xb8, 0xe9, 0xc1, 0x6c, + 0xd6, 0xd0, 0x89, 0x2b, 0x5c, 0x24, 0x1a, 0x01, 0xc5, 0x40, 0x20, 0x92, 0x05, 0x2e, 0x5e, 0x36, + 0xc3, 0xee, 0x4b, 0x53, 0x69, 0x3b, 0xb5, 0x33, 0x4b, 0x14, 0x83, 0x07, 0x13, 0x3f, 0x81, 0xdf, + 0x40, 0x6f, 0x7e, 0x00, 0xaf, 0x1e, 0x3d, 0x78, 0xd4, 0x9b, 0x57, 0x3f, 0x88, 0xe9, 0xb4, 0xbb, + 0x14, 0xda, 0xed, 0xb2, 0x70, 0xeb, 0x4c, 0xe7, 0x99, 0xf7, 0x37, 0xcf, 0xf4, 0x79, 0x0b, 0xf3, + 0xb6, 0x10, 0xb6, 0x8b, 0xac, 0xe5, 0x8a, 0x4e, 0x9b, 0x29, 0xee, 0xa2, 0xaf, 0xd8, 0xc1, 0xdc, + 0x2e, 0x2a, 0xfe, 0x80, 0xf1, 0x20, 0x70, 0x9d, 0x16, 0x57, 0x8e, 0xf0, 0x9b, 0x12, 0xc3, 0x03, + 0xa7, 0x85, 0x56, 0x10, 0x0a, 0x25, 0xe8, 0x4c, 0x2c, 0xb3, 0xb4, 0xcc, 0x8a, 0x65, 0x56, 0x22, + 0x2b, 0xdf, 0x4e, 0xf6, 0xe4, 0x81, 0xc3, 0xb8, 0xef, 0x0b, 0xa5, 0x77, 0x90, 0xb1, 0xb4, 0x3c, + 0x7b, 0xc6, 0x8a, 0xc9, 0xf2, 0x6a, 0xd1, 0xf2, 0x96, 0xf0, 0xbc, 0xde, 0xca, 0x84, 0x89, 0xe9, + 0xd1, 0x6e, 0x67, 0x8f, 0xa1, 0x17, 0xa8, 0x77, 0xc9, 0xcb, 0xca, 0xe9, 0x97, 0x7b, 0x0e, 0xba, + 0xed, 0xa6, 0xc7, 0xe5, 0x7e, 0xbc, 0xc2, 0xfc, 0x00, 0xc6, 0x72, 0x88, 0x5c, 0xe1, 0xe2, 0x31, + 0x43, 0x03, 0xdf, 0x74, 0x50, 0x2a, 0x3a, 0x05, 0x63, 0x01, 0x0f, 0xd1, 0x57, 0x06, 0xa9, 0x90, + 0xea, 0x44, 0x23, 0x19, 0xd1, 0x35, 0x28, 0xa5, 0x88, 0x8d, 0x91, 0x0a, 0xa9, 0x96, 0xea, 0x55, + 0xab, 0xc0, 0x1c, 0x2b, 0xbd, 0x7b, 0x5a, 0x6c, 0xde, 0x87, 0x5b, 0x2f, 0x50, 0xe5, 0x14, 0xa7, + 0x70, 0xc9, 0xe7, 0x1e, 0x26, 0xa5, 0xf5, 0xb3, 0xf9, 0x95, 0x80, 0xb1, 0x13, 0xb4, 0xf3, 0x69, + 0x4f, 0x51, 0x91, 0x0b, 0x50, 0xd1, 0x05, 0x28, 0x75, 0x74, 0x1d, 0x6d, 0x55, 0x72, 0xc2, 0x72, + 0x77, 0xaf, 0xae, 0x9b, 0xd6, 0x4a, 0xe4, 0xe6, 0x06, 0x97, 0xfb, 0x0d, 0x88, 0x97, 0x47, 0xcf, + 0xa6, 0x05, 0xc6, 0x33, 0x74, 0x31, 0x17, 0x32, 0xef, 0x54, 0x1e, 0x4c, 0xaf, 0x3b, 0x32, 0xed, + 0x81, 0x1c, 0x74, 0x03, 0x77, 0x00, 0x02, 0x6e, 0x63, 0x53, 0x89, 0x7d, 0x8c, 0x2f, 0x60, 0xa2, + 0x31, 0x11, 0xcd, 0x6c, 0x47, 0x13, 0x74, 0x06, 0xf4, 0xa0, 0x29, 0x9d, 0x43, 0x34, 0x46, 0x2b, + 0xa4, 0x7a, 0xb9, 0x31, 0x1e, 0x4d, 0x6c, 0x39, 0x87, 0x68, 0xfe, 0x25, 0x60, 0x64, 0xeb, 0xc9, + 0x40, 0xf8, 0x12, 0xe9, 0x3a, 0x5c, 0x4d, 0xf9, 0x20, 0x0d, 0x52, 0x19, 0x1d, 0xca, 0xc5, 0x13, + 0x6a, 0x7a, 0x0f, 0x6e, 0xf8, 0xf8, 0x56, 0x35, 0x33, 0xac, 0xd7, 0xa2, 0xe9, 0xcd, 0x1e, 0xef, + 0x2a, 0x8c, 0x7b, 0xa8, 0x78, 0x9b, 0x2b, 0xae, 0x71, 0x4b, 0xf5, 0xd9, 0xc2, 0x8a, 0x5d, 0xdc, + 0x8d, 0x44, 0xd4, 0xe8, 0xc9, 0xeb, 0x7f, 0xae, 0x00, 0x4d, 0x01, 0x6d, 0xc5, 0xf9, 0xa5, 0x3f, + 0x08, 0x4c, 0x66, 0xbe, 0x73, 0x3a, 0x5f, 0x58, 0xa5, 0x5f, 0x2e, 0xca, 0x67, 0xb6, 0xc3, 0x5c, + 0xfd, 0xf8, 0xfb, 0xdf, 0xe7, 0x91, 0x65, 0xf3, 0x49, 0x2f, 0xbb, 0xef, 0xe3, 0x1b, 0x7c, 0x1c, + 0x84, 0xe2, 0x35, 0xb6, 0x94, 0x64, 0x35, 0xa6, 0xd0, 0xe7, 0xbe, 0x7e, 0x0a, 0x42, 0xb1, 0xe7, + 0xb8, 0x28, 0x59, 0xed, 0x28, 0xdd, 0x13, 0xe4, 0x23, 0x52, 0xa3, 0xdf, 0x09, 0x5c, 0x3f, 0x99, + 0x14, 0x5a, 0x2f, 0xe4, 0xc8, 0x8d, 0xd5, 0x10, 0xec, 0x2b, 0x9a, 0xfd, 0x29, 0x4d, 0xb1, 0x47, + 0x9f, 0xeb, 0x20, 0xf2, 0x13, 0xe0, 0xac, 0x76, 0x44, 0x7f, 0x11, 0x98, 0xcc, 0x84, 0x76, 0x80, + 0xf5, 0xfd, 0x42, 0x3e, 0x04, 0xfe, 0x8e, 0xc6, 0x7f, 0x59, 0x5f, 0x3b, 0xc6, 0x4f, 0xb7, 0xd9, + 0xf3, 0x1c, 0x25, 0xba, 0x86, 0x6f, 0x04, 0x26, 0x33, 0xe9, 0x1e, 0x70, 0x9a, 0x7e, 0xdd, 0xa0, + 0x3c, 0x95, 0xe9, 0x28, 0xcf, 0xa3, 0xe6, 0xdd, 0xb5, 0xbe, 0x76, 0x51, 0xeb, 0x7f, 0x12, 0xb8, + 0x79, 0x3a, 0xea, 0x74, 0xae, 0x90, 0xb5, 0x4f, 0x27, 0x2a, 0xcf, 0x0f, 0xa9, 0x8a, 0x03, 0x9a, + 0xf7, 0x11, 0x9d, 0x27, 0x00, 0x4b, 0x9f, 0x08, 0xdc, 0x6d, 0x09, 0xaf, 0x08, 0x62, 0x69, 0x3a, + 0x9b, 0xfb, 0xcd, 0xc8, 0xd7, 0x4d, 0xf2, 0x6a, 0x31, 0xd1, 0xd9, 0xc2, 0xe5, 0xbe, 0x6d, 0x89, + 0xd0, 0x66, 0x36, 0xfa, 0xda, 0x75, 0x16, 0xbf, 0xe2, 0x81, 0x23, 0x73, 0xff, 0xb6, 0x0b, 0xf1, + 0xf0, 0xcb, 0xc8, 0xe8, 0xf2, 0xf6, 0xd6, 0xee, 0x98, 0xd6, 0x3c, 0xfc, 0x1f, 0x00, 0x00, 0xff, + 0xff, 0x39, 0x45, 0xae, 0x62, 0x41, 0x08, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/cloud/talent/v4beta1/batch.pb.go b/vendor/google.golang.org/genproto/googleapis/cloud/talent/v4beta1/batch.pb.go new file mode 100644 index 0000000..cf173cd --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/cloud/talent/v4beta1/batch.pb.go @@ -0,0 +1,43 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/cloud/talent/v4beta1/batch.proto + +package talent // import "google.golang.org/genproto/googleapis/cloud/talent/v4beta1" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "github.com/golang/protobuf/ptypes/timestamp" +import _ "google.golang.org/genproto/googleapis/api/annotations" +import _ "google.golang.org/genproto/googleapis/rpc/status" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +func init() { + proto.RegisterFile("google/cloud/talent/v4beta1/batch.proto", fileDescriptor_batch_7dbe1703aba922fd) +} + +var fileDescriptor_batch_7dbe1703aba922fd = []byte{ + // 200 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x7c, 0xcf, 0xb1, 0x6a, 0xc3, 0x30, + 0x10, 0x06, 0x60, 0xda, 0x42, 0x07, 0x8f, 0x5d, 0x0a, 0x6e, 0xc1, 0x53, 0xe9, 0xa6, 0xa3, 0xb4, + 0x5b, 0xa7, 0xba, 0x2f, 0x60, 0x48, 0xa6, 0x6c, 0x27, 0x45, 0x51, 0x14, 0x64, 0x9d, 0xb0, 0xce, + 0x21, 0xcf, 0x93, 0x27, 0x0d, 0x96, 0x6e, 0x0c, 0x1e, 0x0f, 0x7d, 0x77, 0xff, 0xaf, 0xe6, 0xd3, + 0x11, 0xb9, 0x60, 0xc1, 0x04, 0x9a, 0xf7, 0xc0, 0x18, 0x6c, 0x64, 0x38, 0xff, 0x68, 0xcb, 0xf8, + 0x05, 0x1a, 0xd9, 0x1c, 0x55, 0x9a, 0x88, 0xe9, 0xe5, 0xad, 0x42, 0x55, 0xa0, 0xaa, 0x50, 0x09, + 0x6c, 0xdf, 0xe5, 0x0a, 0x26, 0x0f, 0x18, 0x23, 0x31, 0xb2, 0xa7, 0x98, 0xeb, 0x6a, 0xfb, 0xb1, + 0x96, 0x71, 0x22, 0x2d, 0xac, 0x13, 0x56, 0x26, 0x3d, 0x1f, 0x80, 0xfd, 0x68, 0x33, 0xe3, 0x98, + 0x04, 0xbc, 0x0a, 0x98, 0x92, 0x81, 0xcc, 0xc8, 0xb3, 0x04, 0xf4, 0x97, 0xa6, 0x33, 0x34, 0xaa, + 0x95, 0x86, 0x7d, 0xd3, 0x2f, 0x7f, 0x19, 0x16, 0x3e, 0x3c, 0xec, 0xfe, 0x84, 0x3a, 0x0a, 0x18, + 0x9d, 0xa2, 0xc9, 0x81, 0xb3, 0xb1, 0x1c, 0x83, 0xfa, 0x84, 0xc9, 0xe7, 0xbb, 0x85, 0x7f, 0xeb, + 0x78, 0x7d, 0x7c, 0xfa, 0xdf, 0x6e, 0xf4, 0x73, 0xd9, 0xf9, 0xbe, 0x05, 0x00, 0x00, 0xff, 0xff, + 0xc8, 0xd6, 0xac, 0x65, 0x47, 0x01, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/cloud/talent/v4beta1/common.pb.go b/vendor/google.golang.org/genproto/googleapis/cloud/talent/v4beta1/common.pb.go new file mode 100644 index 0000000..3cb790c --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/cloud/talent/v4beta1/common.pb.go @@ -0,0 +1,2389 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/cloud/talent/v4beta1/common.proto + +package talent // import "google.golang.org/genproto/googleapis/cloud/talent/v4beta1" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import timestamp "github.com/golang/protobuf/ptypes/timestamp" +import wrappers "github.com/golang/protobuf/ptypes/wrappers" +import _ "google.golang.org/genproto/googleapis/api/annotations" +import date "google.golang.org/genproto/googleapis/type/date" +import latlng "google.golang.org/genproto/googleapis/type/latlng" +import money "google.golang.org/genproto/googleapis/type/money" +import postaladdress "google.golang.org/genproto/googleapis/type/postaladdress" +import _ "google.golang.org/genproto/googleapis/type/timeofday" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// An enum that represents the size of the company. +type CompanySize int32 + +const ( + // Default value if the size isn't specified. + CompanySize_COMPANY_SIZE_UNSPECIFIED CompanySize = 0 + // The company has less than 50 employees. + CompanySize_MINI CompanySize = 1 + // The company has between 50 and 99 employees. + CompanySize_SMALL CompanySize = 2 + // The company has between 100 and 499 employees. + CompanySize_SMEDIUM CompanySize = 3 + // The company has between 500 and 999 employees. + CompanySize_MEDIUM CompanySize = 4 + // The company has between 1,000 and 4,999 employees. + CompanySize_BIG CompanySize = 5 + // The company has between 5,000 and 9,999 employees. + CompanySize_BIGGER CompanySize = 6 + // The company has 10,000 or more employees. + CompanySize_GIANT CompanySize = 7 +) + +var CompanySize_name = map[int32]string{ + 0: "COMPANY_SIZE_UNSPECIFIED", + 1: "MINI", + 2: "SMALL", + 3: "SMEDIUM", + 4: "MEDIUM", + 5: "BIG", + 6: "BIGGER", + 7: "GIANT", +} +var CompanySize_value = map[string]int32{ + "COMPANY_SIZE_UNSPECIFIED": 0, + "MINI": 1, + "SMALL": 2, + "SMEDIUM": 3, + "MEDIUM": 4, + "BIG": 5, + "BIGGER": 6, + "GIANT": 7, +} + +func (x CompanySize) String() string { + return proto.EnumName(CompanySize_name, int32(x)) +} +func (CompanySize) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_common_fee8e853138cd909, []int{0} +} + +// An enum that represents employee benefits included with the job. +type JobBenefit int32 + +const ( + // Default value if the type isn't specified. + JobBenefit_JOB_BENEFIT_UNSPECIFIED JobBenefit = 0 + // The job includes access to programs that support child care, such + // as daycare. + JobBenefit_CHILD_CARE JobBenefit = 1 + // The job includes dental services covered by a dental + // insurance plan. + JobBenefit_DENTAL JobBenefit = 2 + // The job offers specific benefits to domestic partners. + JobBenefit_DOMESTIC_PARTNER JobBenefit = 3 + // The job allows for a flexible work schedule. + JobBenefit_FLEXIBLE_HOURS JobBenefit = 4 + // The job includes health services covered by a medical insurance plan. + JobBenefit_MEDICAL JobBenefit = 5 + // The job includes a life insurance plan provided by the employer or + // available for purchase by the employee. + JobBenefit_LIFE_INSURANCE JobBenefit = 6 + // The job allows for a leave of absence to a parent to care for a newborn + // child. + JobBenefit_PARENTAL_LEAVE JobBenefit = 7 + // The job includes a workplace retirement plan provided by the + // employer or available for purchase by the employee. + JobBenefit_RETIREMENT_PLAN JobBenefit = 8 + // The job allows for paid time off due to illness. + JobBenefit_SICK_DAYS JobBenefit = 9 + // The job includes paid time off for vacation. + JobBenefit_VACATION JobBenefit = 10 + // The job includes vision services covered by a vision + // insurance plan. + JobBenefit_VISION JobBenefit = 11 +) + +var JobBenefit_name = map[int32]string{ + 0: "JOB_BENEFIT_UNSPECIFIED", + 1: "CHILD_CARE", + 2: "DENTAL", + 3: "DOMESTIC_PARTNER", + 4: "FLEXIBLE_HOURS", + 5: "MEDICAL", + 6: "LIFE_INSURANCE", + 7: "PARENTAL_LEAVE", + 8: "RETIREMENT_PLAN", + 9: "SICK_DAYS", + 10: "VACATION", + 11: "VISION", +} +var JobBenefit_value = map[string]int32{ + "JOB_BENEFIT_UNSPECIFIED": 0, + "CHILD_CARE": 1, + "DENTAL": 2, + "DOMESTIC_PARTNER": 3, + "FLEXIBLE_HOURS": 4, + "MEDICAL": 5, + "LIFE_INSURANCE": 6, + "PARENTAL_LEAVE": 7, + "RETIREMENT_PLAN": 8, + "SICK_DAYS": 9, + "VACATION": 10, + "VISION": 11, +} + +func (x JobBenefit) String() string { + return proto.EnumName(JobBenefit_name, int32(x)) +} +func (JobBenefit) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_common_fee8e853138cd909, []int{1} +} + +// Educational degree level defined in International Standard Classification +// of Education (ISCED). +type DegreeType int32 + +const ( + // Default value. Represents no degree, or early childhood education. + // Maps to ISCED code 0. + // Ex) Kindergarten + DegreeType_DEGREE_TYPE_UNSPECIFIED DegreeType = 0 + // Primary education which is typically the first stage of compulsory + // education. ISCED code 1. + // Ex) Elementary school + DegreeType_PRIMARY_EDUCATION DegreeType = 1 + // Lower secondary education; First stage of secondary education building on + // primary education, typically with a more subject-oriented curriculum. + // ISCED code 2. + // Ex) Middle school + DegreeType_LOWER_SECONDARY_EDUCATION DegreeType = 2 + // Middle education; Second/final stage of secondary education preparing for + // tertiary education and/or providing skills relevant to employment. + // Usually with an increased range of subject options and streams. ISCED + // code 3. + // Ex) High school + DegreeType_UPPER_SECONDARY_EDUCATION DegreeType = 3 + // Adult Remedial Education; Programmes providing learning experiences that + // build on secondary education and prepare for labour market entry and/or + // tertiary education. The content is broader than secondary but not as + // complex as tertiary education. ISCED code 4. + DegreeType_ADULT_REMEDIAL_EDUCATION DegreeType = 4 + // Associate's or equivalent; Short first tertiary programmes that are + // typically practically-based, occupationally-specific and prepare for + // labour market entry. These programmes may also provide a pathway to other + // tertiary programmes. ISCED code 5. + DegreeType_ASSOCIATES_OR_EQUIVALENT DegreeType = 5 + // Bachelor's or equivalent; Programmes designed to provide intermediate + // academic and/or professional knowledge, skills and competencies leading + // to a first tertiary degree or equivalent qualification. ISCED code 6. + DegreeType_BACHELORS_OR_EQUIVALENT DegreeType = 6 + // Master's or equivalent; Programmes designed to provide advanced academic + // and/or professional knowledge, skills and competencies leading to a + // second tertiary degree or equivalent qualification. ISCED code 7. + DegreeType_MASTERS_OR_EQUIVALENT DegreeType = 7 + // Doctoral or equivalent; Programmes designed primarily to lead to an + // advanced research qualification, usually concluding with the submission + // and defense of a substantive dissertation of publishable quality based on + // original research. ISCED code 8. + DegreeType_DOCTORAL_OR_EQUIVALENT DegreeType = 8 +) + +var DegreeType_name = map[int32]string{ + 0: "DEGREE_TYPE_UNSPECIFIED", + 1: "PRIMARY_EDUCATION", + 2: "LOWER_SECONDARY_EDUCATION", + 3: "UPPER_SECONDARY_EDUCATION", + 4: "ADULT_REMEDIAL_EDUCATION", + 5: "ASSOCIATES_OR_EQUIVALENT", + 6: "BACHELORS_OR_EQUIVALENT", + 7: "MASTERS_OR_EQUIVALENT", + 8: "DOCTORAL_OR_EQUIVALENT", +} +var DegreeType_value = map[string]int32{ + "DEGREE_TYPE_UNSPECIFIED": 0, + "PRIMARY_EDUCATION": 1, + "LOWER_SECONDARY_EDUCATION": 2, + "UPPER_SECONDARY_EDUCATION": 3, + "ADULT_REMEDIAL_EDUCATION": 4, + "ASSOCIATES_OR_EQUIVALENT": 5, + "BACHELORS_OR_EQUIVALENT": 6, + "MASTERS_OR_EQUIVALENT": 7, + "DOCTORAL_OR_EQUIVALENT": 8, +} + +func (x DegreeType) String() string { + return proto.EnumName(DegreeType_name, int32(x)) +} +func (DegreeType) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_common_fee8e853138cd909, []int{2} +} + +// An enum that represents the employment type of a job. +type EmploymentType int32 + +const ( + // The default value if the employment type isn't specified. + EmploymentType_EMPLOYMENT_TYPE_UNSPECIFIED EmploymentType = 0 + // The job requires working a number of hours that constitute full + // time employment, typically 40 or more hours per week. + EmploymentType_FULL_TIME EmploymentType = 1 + // The job entails working fewer hours than a full time job, + // typically less than 40 hours a week. + EmploymentType_PART_TIME EmploymentType = 2 + // The job is offered as a contracted, as opposed to a salaried employee, + // position. + EmploymentType_CONTRACTOR EmploymentType = 3 + // The job is offered as a contracted position with the understanding + // that it's converted into a full-time position at the end of the + // contract. Jobs of this type are also returned by a search for + // [EmploymentType.CONTRACTOR][google.cloud.talent.v4beta1.EmploymentType.CONTRACTOR] jobs. + EmploymentType_CONTRACT_TO_HIRE EmploymentType = 4 + // The job is offered as a temporary employment opportunity, usually + // a short-term engagement. + EmploymentType_TEMPORARY EmploymentType = 5 + // The job is a fixed-term opportunity for students or entry-level job + // seekers to obtain on-the-job training, typically offered as a summer + // position. + EmploymentType_INTERN EmploymentType = 6 + // The is an opportunity for an individual to volunteer, where there's no + // expectation of compensation for the provided services. + EmploymentType_VOLUNTEER EmploymentType = 7 + // The job requires an employee to work on an as-needed basis with a + // flexible schedule. + EmploymentType_PER_DIEM EmploymentType = 8 + // The job involves employing people in remote areas and flying them + // temporarily to the work site instead of relocating employees and their + // families permanently. + EmploymentType_FLY_IN_FLY_OUT EmploymentType = 9 + // The job does not fit any of the other listed types. + EmploymentType_OTHER_EMPLOYMENT_TYPE EmploymentType = 10 +) + +var EmploymentType_name = map[int32]string{ + 0: "EMPLOYMENT_TYPE_UNSPECIFIED", + 1: "FULL_TIME", + 2: "PART_TIME", + 3: "CONTRACTOR", + 4: "CONTRACT_TO_HIRE", + 5: "TEMPORARY", + 6: "INTERN", + 7: "VOLUNTEER", + 8: "PER_DIEM", + 9: "FLY_IN_FLY_OUT", + 10: "OTHER_EMPLOYMENT_TYPE", +} +var EmploymentType_value = map[string]int32{ + "EMPLOYMENT_TYPE_UNSPECIFIED": 0, + "FULL_TIME": 1, + "PART_TIME": 2, + "CONTRACTOR": 3, + "CONTRACT_TO_HIRE": 4, + "TEMPORARY": 5, + "INTERN": 6, + "VOLUNTEER": 7, + "PER_DIEM": 8, + "FLY_IN_FLY_OUT": 9, + "OTHER_EMPLOYMENT_TYPE": 10, +} + +func (x EmploymentType) String() string { + return proto.EnumName(EmploymentType_name, int32(x)) +} +func (EmploymentType) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_common_fee8e853138cd909, []int{3} +} + +// An enum that represents the required experience level required for the job. +type JobLevel int32 + +const ( + // The default value if the level isn't specified. + JobLevel_JOB_LEVEL_UNSPECIFIED JobLevel = 0 + // Entry-level individual contributors, typically with less than 2 years of + // experience in a similar role. Includes interns. + JobLevel_ENTRY_LEVEL JobLevel = 1 + // Experienced individual contributors, typically with 2+ years of + // experience in a similar role. + JobLevel_EXPERIENCED JobLevel = 2 + // Entry- to mid-level managers responsible for managing a team of people. + JobLevel_MANAGER JobLevel = 3 + // Senior-level managers responsible for managing teams of managers. + JobLevel_DIRECTOR JobLevel = 4 + // Executive-level managers and above, including C-level positions. + JobLevel_EXECUTIVE JobLevel = 5 +) + +var JobLevel_name = map[int32]string{ + 0: "JOB_LEVEL_UNSPECIFIED", + 1: "ENTRY_LEVEL", + 2: "EXPERIENCED", + 3: "MANAGER", + 4: "DIRECTOR", + 5: "EXECUTIVE", +} +var JobLevel_value = map[string]int32{ + "JOB_LEVEL_UNSPECIFIED": 0, + "ENTRY_LEVEL": 1, + "EXPERIENCED": 2, + "MANAGER": 3, + "DIRECTOR": 4, + "EXECUTIVE": 5, +} + +func (x JobLevel) String() string { + return proto.EnumName(JobLevel_name, int32(x)) +} +func (JobLevel) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_common_fee8e853138cd909, []int{4} +} + +// An enum that represents the categorization or primary focus of specific +// role. This value is different than the "industry" associated with a role, +// which is related to the categorization of the company listing the job. +type JobCategory int32 + +const ( + // The default value if the category isn't specified. + JobCategory_JOB_CATEGORY_UNSPECIFIED JobCategory = 0 + // An accounting and finance job, such as an Accountant. + JobCategory_ACCOUNTING_AND_FINANCE JobCategory = 1 + // An administrative and office job, such as an Administrative Assistant. + JobCategory_ADMINISTRATIVE_AND_OFFICE JobCategory = 2 + // An advertising and marketing job, such as Marketing Manager. + JobCategory_ADVERTISING_AND_MARKETING JobCategory = 3 + // An animal care job, such as Veterinarian. + JobCategory_ANIMAL_CARE JobCategory = 4 + // An art, fashion, or design job, such as Designer. + JobCategory_ART_FASHION_AND_DESIGN JobCategory = 5 + // A business operations job, such as Business Operations Manager. + JobCategory_BUSINESS_OPERATIONS JobCategory = 6 + // A cleaning and facilities job, such as Custodial Staff. + JobCategory_CLEANING_AND_FACILITIES JobCategory = 7 + // A computer and IT job, such as Systems Administrator. + JobCategory_COMPUTER_AND_IT JobCategory = 8 + // A construction job, such as General Laborer. + JobCategory_CONSTRUCTION JobCategory = 9 + // A customer service job, such s Cashier. + JobCategory_CUSTOMER_SERVICE JobCategory = 10 + // An education job, such as School Teacher. + JobCategory_EDUCATION JobCategory = 11 + // An entertainment and travel job, such as Flight Attendant. + JobCategory_ENTERTAINMENT_AND_TRAVEL JobCategory = 12 + // A farming or outdoor job, such as Park Ranger. + JobCategory_FARMING_AND_OUTDOORS JobCategory = 13 + // A healthcare job, such as Registered Nurse. + JobCategory_HEALTHCARE JobCategory = 14 + // A human resources job, such as Human Resources Director. + JobCategory_HUMAN_RESOURCES JobCategory = 15 + // An installation, maintenance, or repair job, such as Electrician. + JobCategory_INSTALLATION_MAINTENANCE_AND_REPAIR JobCategory = 16 + // A legal job, such as Law Clerk. + JobCategory_LEGAL JobCategory = 17 + // A management job, often used in conjunction with another category, + // such as Store Manager. + JobCategory_MANAGEMENT JobCategory = 18 + // A manufacturing or warehouse job, such as Assembly Technician. + JobCategory_MANUFACTURING_AND_WAREHOUSE JobCategory = 19 + // A media, communications, or writing job, such as Media Relations. + JobCategory_MEDIA_COMMUNICATIONS_AND_WRITING JobCategory = 20 + // An oil, gas or mining job, such as Offshore Driller. + JobCategory_OIL_GAS_AND_MINING JobCategory = 21 + // A personal care and services job, such as Hair Stylist. + JobCategory_PERSONAL_CARE_AND_SERVICES JobCategory = 22 + // A protective services job, such as Security Guard. + JobCategory_PROTECTIVE_SERVICES JobCategory = 23 + // A real estate job, such as Buyer's Agent. + JobCategory_REAL_ESTATE JobCategory = 24 + // A restaurant and hospitality job, such as Restaurant Server. + JobCategory_RESTAURANT_AND_HOSPITALITY JobCategory = 25 + // A sales and/or retail job, such Sales Associate. + JobCategory_SALES_AND_RETAIL JobCategory = 26 + // A science and engineering job, such as Lab Technician. + JobCategory_SCIENCE_AND_ENGINEERING JobCategory = 27 + // A social services or non-profit job, such as Case Worker. + JobCategory_SOCIAL_SERVICES_AND_NON_PROFIT JobCategory = 28 + // A sports, fitness, or recreation job, such as Personal Trainer. + JobCategory_SPORTS_FITNESS_AND_RECREATION JobCategory = 29 + // A transportation or logistics job, such as Truck Driver. + JobCategory_TRANSPORTATION_AND_LOGISTICS JobCategory = 30 +) + +var JobCategory_name = map[int32]string{ + 0: "JOB_CATEGORY_UNSPECIFIED", + 1: "ACCOUNTING_AND_FINANCE", + 2: "ADMINISTRATIVE_AND_OFFICE", + 3: "ADVERTISING_AND_MARKETING", + 4: "ANIMAL_CARE", + 5: "ART_FASHION_AND_DESIGN", + 6: "BUSINESS_OPERATIONS", + 7: "CLEANING_AND_FACILITIES", + 8: "COMPUTER_AND_IT", + 9: "CONSTRUCTION", + 10: "CUSTOMER_SERVICE", + 11: "EDUCATION", + 12: "ENTERTAINMENT_AND_TRAVEL", + 13: "FARMING_AND_OUTDOORS", + 14: "HEALTHCARE", + 15: "HUMAN_RESOURCES", + 16: "INSTALLATION_MAINTENANCE_AND_REPAIR", + 17: "LEGAL", + 18: "MANAGEMENT", + 19: "MANUFACTURING_AND_WAREHOUSE", + 20: "MEDIA_COMMUNICATIONS_AND_WRITING", + 21: "OIL_GAS_AND_MINING", + 22: "PERSONAL_CARE_AND_SERVICES", + 23: "PROTECTIVE_SERVICES", + 24: "REAL_ESTATE", + 25: "RESTAURANT_AND_HOSPITALITY", + 26: "SALES_AND_RETAIL", + 27: "SCIENCE_AND_ENGINEERING", + 28: "SOCIAL_SERVICES_AND_NON_PROFIT", + 29: "SPORTS_FITNESS_AND_RECREATION", + 30: "TRANSPORTATION_AND_LOGISTICS", +} +var JobCategory_value = map[string]int32{ + "JOB_CATEGORY_UNSPECIFIED": 0, + "ACCOUNTING_AND_FINANCE": 1, + "ADMINISTRATIVE_AND_OFFICE": 2, + "ADVERTISING_AND_MARKETING": 3, + "ANIMAL_CARE": 4, + "ART_FASHION_AND_DESIGN": 5, + "BUSINESS_OPERATIONS": 6, + "CLEANING_AND_FACILITIES": 7, + "COMPUTER_AND_IT": 8, + "CONSTRUCTION": 9, + "CUSTOMER_SERVICE": 10, + "EDUCATION": 11, + "ENTERTAINMENT_AND_TRAVEL": 12, + "FARMING_AND_OUTDOORS": 13, + "HEALTHCARE": 14, + "HUMAN_RESOURCES": 15, + "INSTALLATION_MAINTENANCE_AND_REPAIR": 16, + "LEGAL": 17, + "MANAGEMENT": 18, + "MANUFACTURING_AND_WAREHOUSE": 19, + "MEDIA_COMMUNICATIONS_AND_WRITING": 20, + "OIL_GAS_AND_MINING": 21, + "PERSONAL_CARE_AND_SERVICES": 22, + "PROTECTIVE_SERVICES": 23, + "REAL_ESTATE": 24, + "RESTAURANT_AND_HOSPITALITY": 25, + "SALES_AND_RETAIL": 26, + "SCIENCE_AND_ENGINEERING": 27, + "SOCIAL_SERVICES_AND_NON_PROFIT": 28, + "SPORTS_FITNESS_AND_RECREATION": 29, + "TRANSPORTATION_AND_LOGISTICS": 30, +} + +func (x JobCategory) String() string { + return proto.EnumName(JobCategory_name, int32(x)) +} +func (JobCategory) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_common_fee8e853138cd909, []int{5} +} + +// An enum that represents the job posting region. In most cases, job postings +// don't need to specify a region. If a region is given, jobs are +// eligible for searches in the specified region. +type PostingRegion int32 + +const ( + // If the region is unspecified, the job is only returned if it + // matches the [LocationFilter][google.cloud.talent.v4beta1.LocationFilter]. + PostingRegion_POSTING_REGION_UNSPECIFIED PostingRegion = 0 + // In addition to exact location matching, job posting is returned when the + // [LocationFilter][google.cloud.talent.v4beta1.LocationFilter] in the search query is in the same administrative area + // as the returned job posting. For example, if a `ADMINISTRATIVE_AREA` job + // is posted in "CA, USA", it's returned if [LocationFilter][google.cloud.talent.v4beta1.LocationFilter] has + // "Mountain View". + // + // Administrative area refers to top-level administrative subdivision of this + // country. For example, US state, IT region, UK constituent nation and + // JP prefecture. + PostingRegion_ADMINISTRATIVE_AREA PostingRegion = 1 + // In addition to exact location matching, job is returned when + // [LocationFilter][google.cloud.talent.v4beta1.LocationFilter] in search query is in the same country as this job. + // For example, if a `NATION_WIDE` job is posted in "USA", it's + // returned if [LocationFilter][google.cloud.talent.v4beta1.LocationFilter] has 'Mountain View'. + PostingRegion_NATION PostingRegion = 2 + // Job allows employees to work remotely (telecommute). + // If [locations][] are provided with this value, the job is + // considered as having a location, but telecommuting is allowed. + PostingRegion_TELECOMMUTE PostingRegion = 3 +) + +var PostingRegion_name = map[int32]string{ + 0: "POSTING_REGION_UNSPECIFIED", + 1: "ADMINISTRATIVE_AREA", + 2: "NATION", + 3: "TELECOMMUTE", +} +var PostingRegion_value = map[string]int32{ + "POSTING_REGION_UNSPECIFIED": 0, + "ADMINISTRATIVE_AREA": 1, + "NATION": 2, + "TELECOMMUTE": 3, +} + +func (x PostingRegion) String() string { + return proto.EnumName(PostingRegion_name, int32(x)) +} +func (PostingRegion) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_common_fee8e853138cd909, []int{6} +} + +// An enum that represents who has view access to the resource. +type Visibility int32 + +const ( + // Default value. + Visibility_VISIBILITY_UNSPECIFIED Visibility = 0 + // The resource is only visible to the GCP account who owns it. + Visibility_ACCOUNT_ONLY Visibility = 1 + // The resource is visible to the owner and may be visible to other + // applications and processes at Google. + Visibility_SHARED_WITH_GOOGLE Visibility = 2 + // The resource is visible to the owner and may be visible to all other API + // clients. + Visibility_SHARED_WITH_PUBLIC Visibility = 3 +) + +var Visibility_name = map[int32]string{ + 0: "VISIBILITY_UNSPECIFIED", + 1: "ACCOUNT_ONLY", + 2: "SHARED_WITH_GOOGLE", + 3: "SHARED_WITH_PUBLIC", +} +var Visibility_value = map[string]int32{ + "VISIBILITY_UNSPECIFIED": 0, + "ACCOUNT_ONLY": 1, + "SHARED_WITH_GOOGLE": 2, + "SHARED_WITH_PUBLIC": 3, +} + +func (x Visibility) String() string { + return proto.EnumName(Visibility_name, int32(x)) +} +func (Visibility) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_common_fee8e853138cd909, []int{7} +} + +// Enum that represents the usage of the contact information. +type ContactInfoUsage int32 + +const ( + // Default value. + ContactInfoUsage_CONTACT_INFO_USAGE_UNSPECIFIED ContactInfoUsage = 0 + // Personal use. + ContactInfoUsage_PERSONAL ContactInfoUsage = 1 + // Work use. + ContactInfoUsage_WORK ContactInfoUsage = 2 + // School use. + ContactInfoUsage_SCHOOL ContactInfoUsage = 3 +) + +var ContactInfoUsage_name = map[int32]string{ + 0: "CONTACT_INFO_USAGE_UNSPECIFIED", + 1: "PERSONAL", + 2: "WORK", + 3: "SCHOOL", +} +var ContactInfoUsage_value = map[string]int32{ + "CONTACT_INFO_USAGE_UNSPECIFIED": 0, + "PERSONAL": 1, + "WORK": 2, + "SCHOOL": 3, +} + +func (x ContactInfoUsage) String() string { + return proto.EnumName(ContactInfoUsage_name, int32(x)) +} +func (ContactInfoUsage) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_common_fee8e853138cd909, []int{8} +} + +// Input only. +// +// Option for HTML content sanitization on user input fields, for example, job +// description. By setting this option, user can determine whether and how +// sanitization is performed on these fields. +type HtmlSanitization int32 + +const ( + // Default value. + HtmlSanitization_HTML_SANITIZATION_UNSPECIFIED HtmlSanitization = 0 + // Disables sanitization on HTML input. + HtmlSanitization_HTML_SANITIZATION_DISABLED HtmlSanitization = 1 + // Sanitizes HTML input, only accepts bold, italic, ordered list, and + // unordered list markup tags. + HtmlSanitization_SIMPLE_FORMATTING_ONLY HtmlSanitization = 2 +) + +var HtmlSanitization_name = map[int32]string{ + 0: "HTML_SANITIZATION_UNSPECIFIED", + 1: "HTML_SANITIZATION_DISABLED", + 2: "SIMPLE_FORMATTING_ONLY", +} +var HtmlSanitization_value = map[string]int32{ + "HTML_SANITIZATION_UNSPECIFIED": 0, + "HTML_SANITIZATION_DISABLED": 1, + "SIMPLE_FORMATTING_ONLY": 2, +} + +func (x HtmlSanitization) String() string { + return proto.EnumName(HtmlSanitization_name, int32(x)) +} +func (HtmlSanitization) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_common_fee8e853138cd909, []int{9} +} + +// Method for commute. +type CommuteMethod int32 + +const ( + // Commute method isn't specified. + CommuteMethod_COMMUTE_METHOD_UNSPECIFIED CommuteMethod = 0 + // Commute time is calculated based on driving time. + CommuteMethod_DRIVING CommuteMethod = 1 + // Commute time is calculated based on public transit including bus, metro, + // subway, and so on. + CommuteMethod_TRANSIT CommuteMethod = 2 + // Commute time is calculated based on walking time. + CommuteMethod_WALKING CommuteMethod = 3 + // Commute time is calculated based on biking time. + CommuteMethod_CYCLING CommuteMethod = 4 +) + +var CommuteMethod_name = map[int32]string{ + 0: "COMMUTE_METHOD_UNSPECIFIED", + 1: "DRIVING", + 2: "TRANSIT", + 3: "WALKING", + 4: "CYCLING", +} +var CommuteMethod_value = map[string]int32{ + "COMMUTE_METHOD_UNSPECIFIED": 0, + "DRIVING": 1, + "TRANSIT": 2, + "WALKING": 3, + "CYCLING": 4, +} + +func (x CommuteMethod) String() string { + return proto.EnumName(CommuteMethod_name, int32(x)) +} +func (CommuteMethod) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_common_fee8e853138cd909, []int{10} +} + +// Enum that represents the skill proficiency level. +type SkillProficiencyLevel int32 + +const ( + // Default value. + SkillProficiencyLevel_SKILL_PROFICIENCY_LEVEL_UNSPECIFIED SkillProficiencyLevel = 0 + // Have a common knowledge or an understanding of basic techniques and + // concepts. + SkillProficiencyLevel_FUNDAMENTAL_AWARENESS SkillProficiencyLevel = 1 + // Have the level of experience gained in a classroom and/or experimental + // scenarios or as a trainee on-the-job. + SkillProficiencyLevel_NOVICE SkillProficiencyLevel = 2 + // Be able to successfully complete tasks in this skill as requested. Help + // from an expert may be required from time to time, but can usually perform + // skill independently. + SkillProficiencyLevel_INTERMEDIATE SkillProficiencyLevel = 3 + // Can perform the actions associated with this skill without assistance. + SkillProficiencyLevel_ADVANCED SkillProficiencyLevel = 4 + // Known as an expert in this area. + SkillProficiencyLevel_EXPERT SkillProficiencyLevel = 5 +) + +var SkillProficiencyLevel_name = map[int32]string{ + 0: "SKILL_PROFICIENCY_LEVEL_UNSPECIFIED", + 1: "FUNDAMENTAL_AWARENESS", + 2: "NOVICE", + 3: "INTERMEDIATE", + 4: "ADVANCED", + 5: "EXPERT", +} +var SkillProficiencyLevel_value = map[string]int32{ + "SKILL_PROFICIENCY_LEVEL_UNSPECIFIED": 0, + "FUNDAMENTAL_AWARENESS": 1, + "NOVICE": 2, + "INTERMEDIATE": 3, + "ADVANCED": 4, + "EXPERT": 5, +} + +func (x SkillProficiencyLevel) String() string { + return proto.EnumName(SkillProficiencyLevel_name, int32(x)) +} +func (SkillProficiencyLevel) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_common_fee8e853138cd909, []int{11} +} + +// The overall outcome /decision / result indicator. +type Outcome int32 + +const ( + // Default value. + Outcome_OUTCOME_UNSPECIFIED Outcome = 0 + // A positive outcome / passing indicator (for example, candidate was + // recommended for hiring or to be moved forward in the hiring process, + // candidate passed a test). + Outcome_POSITIVE Outcome = 1 + // A neutral outcome / no clear indicator (for example, no strong + // reccommendation either to move forward / not move forward, neutral score). + Outcome_NEUTRAL Outcome = 2 + // A negative outcome / failing indicator (for example, candidate was + // recommended to NOT move forward in the hiring process, failed a test). + Outcome_NEGATIVE Outcome = 3 + // The assessment outcome is not available or otherwise unknown (for example, + // candidate did not complete assessment). + Outcome_OUTCOME_NOT_AVAILABLE Outcome = 4 +) + +var Outcome_name = map[int32]string{ + 0: "OUTCOME_UNSPECIFIED", + 1: "POSITIVE", + 2: "NEUTRAL", + 3: "NEGATIVE", + 4: "OUTCOME_NOT_AVAILABLE", +} +var Outcome_value = map[string]int32{ + "OUTCOME_UNSPECIFIED": 0, + "POSITIVE": 1, + "NEUTRAL": 2, + "NEGATIVE": 3, + "OUTCOME_NOT_AVAILABLE": 4, +} + +func (x Outcome) String() string { + return proto.EnumName(Outcome_name, int32(x)) +} +func (Outcome) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_common_fee8e853138cd909, []int{12} +} + +// An enum which represents the type of a location. +type Location_LocationType int32 + +const ( + // Default value if the type isn't specified. + Location_LOCATION_TYPE_UNSPECIFIED Location_LocationType = 0 + // A country level location. + Location_COUNTRY Location_LocationType = 1 + // A state or equivalent level location. + Location_ADMINISTRATIVE_AREA Location_LocationType = 2 + // A county or equivalent level location. + Location_SUB_ADMINISTRATIVE_AREA Location_LocationType = 3 + // A city or equivalent level location. + Location_LOCALITY Location_LocationType = 4 + // A postal code level location. + Location_POSTAL_CODE Location_LocationType = 5 + // A sublocality is a subdivision of a locality, for example a city borough, + // ward, or arrondissement. Sublocalities are usually recognized by a local + // political authority. For example, Manhattan and Brooklyn are recognized + // as boroughs by the City of New York, and are therefore modeled as + // sublocalities. + Location_SUB_LOCALITY Location_LocationType = 6 + // A district or equivalent level location. + Location_SUB_LOCALITY_1 Location_LocationType = 7 + // A smaller district or equivalent level display. + Location_SUB_LOCALITY_2 Location_LocationType = 8 + // A neighborhood level location. + Location_NEIGHBORHOOD Location_LocationType = 9 + // A street address level location. + Location_STREET_ADDRESS Location_LocationType = 10 +) + +var Location_LocationType_name = map[int32]string{ + 0: "LOCATION_TYPE_UNSPECIFIED", + 1: "COUNTRY", + 2: "ADMINISTRATIVE_AREA", + 3: "SUB_ADMINISTRATIVE_AREA", + 4: "LOCALITY", + 5: "POSTAL_CODE", + 6: "SUB_LOCALITY", + 7: "SUB_LOCALITY_1", + 8: "SUB_LOCALITY_2", + 9: "NEIGHBORHOOD", + 10: "STREET_ADDRESS", +} +var Location_LocationType_value = map[string]int32{ + "LOCATION_TYPE_UNSPECIFIED": 0, + "COUNTRY": 1, + "ADMINISTRATIVE_AREA": 2, + "SUB_ADMINISTRATIVE_AREA": 3, + "LOCALITY": 4, + "POSTAL_CODE": 5, + "SUB_LOCALITY": 6, + "SUB_LOCALITY_1": 7, + "SUB_LOCALITY_2": 8, + "NEIGHBORHOOD": 9, + "STREET_ADDRESS": 10, +} + +func (x Location_LocationType) String() string { + return proto.EnumName(Location_LocationType_name, int32(x)) +} +func (Location_LocationType) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_common_fee8e853138cd909, []int{1, 0} +} + +// An enumeration describing an API access portal and exposure mechanism. +type DeviceInfo_DeviceType int32 + +const ( + // The device type isn't specified. + DeviceInfo_DEVICE_TYPE_UNSPECIFIED DeviceInfo_DeviceType = 0 + // A desktop web browser, such as, Chrome, Firefox, Safari, or Internet + // Explorer) + DeviceInfo_WEB DeviceInfo_DeviceType = 1 + // A mobile device web browser, such as a phone or tablet with a Chrome + // browser. + DeviceInfo_MOBILE_WEB DeviceInfo_DeviceType = 2 + // An Android device native application. + DeviceInfo_ANDROID DeviceInfo_DeviceType = 3 + // An iOS device native application. + DeviceInfo_IOS DeviceInfo_DeviceType = 4 + // A bot, as opposed to a device operated by human beings, such as a web + // crawler. + DeviceInfo_BOT DeviceInfo_DeviceType = 5 + // Other devices types. + DeviceInfo_OTHER DeviceInfo_DeviceType = 6 +) + +var DeviceInfo_DeviceType_name = map[int32]string{ + 0: "DEVICE_TYPE_UNSPECIFIED", + 1: "WEB", + 2: "MOBILE_WEB", + 3: "ANDROID", + 4: "IOS", + 5: "BOT", + 6: "OTHER", +} +var DeviceInfo_DeviceType_value = map[string]int32{ + "DEVICE_TYPE_UNSPECIFIED": 0, + "WEB": 1, + "MOBILE_WEB": 2, + "ANDROID": 3, + "IOS": 4, + "BOT": 5, + "OTHER": 6, +} + +func (x DeviceInfo_DeviceType) String() string { + return proto.EnumName(DeviceInfo_DeviceType_name, int32(x)) +} +func (DeviceInfo_DeviceType) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_common_fee8e853138cd909, []int{4, 0} +} + +// The type of compensation. +// +// For compensation amounts specified in non-monetary amounts, +// describe the compensation scheme in the [CompensationEntry.description][google.cloud.talent.v4beta1.CompensationInfo.CompensationEntry.description]. +// +// For example, tipping format is described in +// [CompensationEntry.description][google.cloud.talent.v4beta1.CompensationInfo.CompensationEntry.description] (for example, "expect 15-20% tips based +// on customer bill.") and an estimate of the tips provided in +// [CompensationEntry.amount][google.cloud.talent.v4beta1.CompensationInfo.CompensationEntry.amount] or [CompensationEntry.range][google.cloud.talent.v4beta1.CompensationInfo.CompensationEntry.range] ($10 per hour). +// +// For example, equity is described in [CompensationEntry.description][google.cloud.talent.v4beta1.CompensationInfo.CompensationEntry.description] +// (for example, "1% - 2% equity vesting over 4 years, 1 year cliff") and +// value estimated in [CompensationEntry.amount][google.cloud.talent.v4beta1.CompensationInfo.CompensationEntry.amount] or +// [CompensationEntry.range][google.cloud.talent.v4beta1.CompensationInfo.CompensationEntry.range]. If no value estimate is possible, units are +// [CompensationUnit.COMPENSATION_UNIT_UNSPECIFIED][google.cloud.talent.v4beta1.CompensationInfo.CompensationUnit.COMPENSATION_UNIT_UNSPECIFIED] and then further +// clarified in [CompensationEntry.description][google.cloud.talent.v4beta1.CompensationInfo.CompensationEntry.description] field. +type CompensationInfo_CompensationType int32 + +const ( + // Default value. + CompensationInfo_COMPENSATION_TYPE_UNSPECIFIED CompensationInfo_CompensationType = 0 + // Base compensation: Refers to the fixed amount of money paid to an + // employee by an employer in return for work performed. Base compensation + // does not include benefits, bonuses or any other potential compensation + // from an employer. + CompensationInfo_BASE CompensationInfo_CompensationType = 1 + // Bonus. + CompensationInfo_BONUS CompensationInfo_CompensationType = 2 + // Signing bonus. + CompensationInfo_SIGNING_BONUS CompensationInfo_CompensationType = 3 + // Equity. + CompensationInfo_EQUITY CompensationInfo_CompensationType = 4 + // Profit sharing. + CompensationInfo_PROFIT_SHARING CompensationInfo_CompensationType = 5 + // Commission. + CompensationInfo_COMMISSIONS CompensationInfo_CompensationType = 6 + // Tips. + CompensationInfo_TIPS CompensationInfo_CompensationType = 7 + // Other compensation type. + CompensationInfo_OTHER_COMPENSATION_TYPE CompensationInfo_CompensationType = 8 +) + +var CompensationInfo_CompensationType_name = map[int32]string{ + 0: "COMPENSATION_TYPE_UNSPECIFIED", + 1: "BASE", + 2: "BONUS", + 3: "SIGNING_BONUS", + 4: "EQUITY", + 5: "PROFIT_SHARING", + 6: "COMMISSIONS", + 7: "TIPS", + 8: "OTHER_COMPENSATION_TYPE", +} +var CompensationInfo_CompensationType_value = map[string]int32{ + "COMPENSATION_TYPE_UNSPECIFIED": 0, + "BASE": 1, + "BONUS": 2, + "SIGNING_BONUS": 3, + "EQUITY": 4, + "PROFIT_SHARING": 5, + "COMMISSIONS": 6, + "TIPS": 7, + "OTHER_COMPENSATION_TYPE": 8, +} + +func (x CompensationInfo_CompensationType) String() string { + return proto.EnumName(CompensationInfo_CompensationType_name, int32(x)) +} +func (CompensationInfo_CompensationType) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_common_fee8e853138cd909, []int{7, 0} +} + +// Pay frequency. +type CompensationInfo_CompensationUnit int32 + +const ( + // Default value. + CompensationInfo_COMPENSATION_UNIT_UNSPECIFIED CompensationInfo_CompensationUnit = 0 + // Hourly. + CompensationInfo_HOURLY CompensationInfo_CompensationUnit = 1 + // Daily. + CompensationInfo_DAILY CompensationInfo_CompensationUnit = 2 + // Weekly + CompensationInfo_WEEKLY CompensationInfo_CompensationUnit = 3 + // Monthly. + CompensationInfo_MONTHLY CompensationInfo_CompensationUnit = 4 + // Yearly. + CompensationInfo_YEARLY CompensationInfo_CompensationUnit = 5 + // One time. + CompensationInfo_ONE_TIME CompensationInfo_CompensationUnit = 6 + // Other compensation units. + CompensationInfo_OTHER_COMPENSATION_UNIT CompensationInfo_CompensationUnit = 7 +) + +var CompensationInfo_CompensationUnit_name = map[int32]string{ + 0: "COMPENSATION_UNIT_UNSPECIFIED", + 1: "HOURLY", + 2: "DAILY", + 3: "WEEKLY", + 4: "MONTHLY", + 5: "YEARLY", + 6: "ONE_TIME", + 7: "OTHER_COMPENSATION_UNIT", +} +var CompensationInfo_CompensationUnit_value = map[string]int32{ + "COMPENSATION_UNIT_UNSPECIFIED": 0, + "HOURLY": 1, + "DAILY": 2, + "WEEKLY": 3, + "MONTHLY": 4, + "YEARLY": 5, + "ONE_TIME": 6, + "OTHER_COMPENSATION_UNIT": 7, +} + +func (x CompensationInfo_CompensationUnit) String() string { + return proto.EnumName(CompensationInfo_CompensationUnit_name, int32(x)) +} +func (CompensationInfo_CompensationUnit) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_common_fee8e853138cd909, []int{7, 1} +} + +// Message representing a period of time between two timestamps. +type TimestampRange struct { + // Begin of the period (inclusive). + StartTime *timestamp.Timestamp `protobuf:"bytes,1,opt,name=start_time,json=startTime,proto3" json:"start_time,omitempty"` + // End of the period (exclusive). + EndTime *timestamp.Timestamp `protobuf:"bytes,2,opt,name=end_time,json=endTime,proto3" json:"end_time,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *TimestampRange) Reset() { *m = TimestampRange{} } +func (m *TimestampRange) String() string { return proto.CompactTextString(m) } +func (*TimestampRange) ProtoMessage() {} +func (*TimestampRange) Descriptor() ([]byte, []int) { + return fileDescriptor_common_fee8e853138cd909, []int{0} +} +func (m *TimestampRange) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_TimestampRange.Unmarshal(m, b) +} +func (m *TimestampRange) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_TimestampRange.Marshal(b, m, deterministic) +} +func (dst *TimestampRange) XXX_Merge(src proto.Message) { + xxx_messageInfo_TimestampRange.Merge(dst, src) +} +func (m *TimestampRange) XXX_Size() int { + return xxx_messageInfo_TimestampRange.Size(m) +} +func (m *TimestampRange) XXX_DiscardUnknown() { + xxx_messageInfo_TimestampRange.DiscardUnknown(m) +} + +var xxx_messageInfo_TimestampRange proto.InternalMessageInfo + +func (m *TimestampRange) GetStartTime() *timestamp.Timestamp { + if m != nil { + return m.StartTime + } + return nil +} + +func (m *TimestampRange) GetEndTime() *timestamp.Timestamp { + if m != nil { + return m.EndTime + } + return nil +} + +// Output only. A resource that represents a location with full geographic +// information. +type Location struct { + // The type of a location, which corresponds to the address lines field of + // [PostalAddress][]. For example, "Downtown, Atlanta, GA, USA" has a type of + // [LocationType#NEIGHBORHOOD][], and "Kansas City, KS, USA" has a type of + // [LocationType#LOCALITY][]. + LocationType Location_LocationType `protobuf:"varint,1,opt,name=location_type,json=locationType,proto3,enum=google.cloud.talent.v4beta1.Location_LocationType" json:"location_type,omitempty"` + // Postal address of the location that includes human readable information, + // such as postal delivery and payments addresses. Given a postal address, + // a postal service can deliver items to a premises, P.O. Box, or other + // delivery location. + PostalAddress *postaladdress.PostalAddress `protobuf:"bytes,2,opt,name=postal_address,json=postalAddress,proto3" json:"postal_address,omitempty"` + // An object representing a latitude/longitude pair. + LatLng *latlng.LatLng `protobuf:"bytes,3,opt,name=lat_lng,json=latLng,proto3" json:"lat_lng,omitempty"` + // Radius in miles of the job location. This value is derived from the + // location bounding box in which a circle with the specified radius + // centered from [LatLng][] covers the area associated with the job location. + // For example, currently, "Mountain View, CA, USA" has a radius of + // 6.17 miles. + RadiusMiles float64 `protobuf:"fixed64,4,opt,name=radius_miles,json=radiusMiles,proto3" json:"radius_miles,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Location) Reset() { *m = Location{} } +func (m *Location) String() string { return proto.CompactTextString(m) } +func (*Location) ProtoMessage() {} +func (*Location) Descriptor() ([]byte, []int) { + return fileDescriptor_common_fee8e853138cd909, []int{1} +} +func (m *Location) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Location.Unmarshal(m, b) +} +func (m *Location) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Location.Marshal(b, m, deterministic) +} +func (dst *Location) XXX_Merge(src proto.Message) { + xxx_messageInfo_Location.Merge(dst, src) +} +func (m *Location) XXX_Size() int { + return xxx_messageInfo_Location.Size(m) +} +func (m *Location) XXX_DiscardUnknown() { + xxx_messageInfo_Location.DiscardUnknown(m) +} + +var xxx_messageInfo_Location proto.InternalMessageInfo + +func (m *Location) GetLocationType() Location_LocationType { + if m != nil { + return m.LocationType + } + return Location_LOCATION_TYPE_UNSPECIFIED +} + +func (m *Location) GetPostalAddress() *postaladdress.PostalAddress { + if m != nil { + return m.PostalAddress + } + return nil +} + +func (m *Location) GetLatLng() *latlng.LatLng { + if m != nil { + return m.LatLng + } + return nil +} + +func (m *Location) GetRadiusMiles() float64 { + if m != nil { + return m.RadiusMiles + } + return 0 +} + +// Input only. +// +// Meta information related to the job searcher or entity +// conducting the job search. This information is used to improve the +// performance of the service. +type RequestMetadata struct { + // Required if [allow_missing_ids][google.cloud.talent.v4beta1.RequestMetadata.allow_missing_ids] is unset or `false`. + // + // The client-defined scope or source of the service call, which typically + // is the domain on + // which the service has been implemented and is currently being run. + // + // For example, if the service is being run by client Foo, Inc., on + // job board www.foo.com and career site www.bar.com, then this field is + // set to "foo.com" for use on the job board, and "bar.com" for use on the + // career site. + // + // Note that any improvements to the model for a particular tenant site rely + // on this field being set correctly to a unique domain. + // + // The maximum number of allowed characters is 255. + Domain string `protobuf:"bytes,1,opt,name=domain,proto3" json:"domain,omitempty"` + // Required if [allow_missing_ids][google.cloud.talent.v4beta1.RequestMetadata.allow_missing_ids] is unset or `false`. + // + // A unique session identification string. A session is defined as the + // duration of an end user's interaction with the service over a certain + // period. + // Obfuscate this field for privacy concerns before + // providing it to the service. + // + // Note that any improvements to the model for a particular tenant site rely + // on this field being set correctly to a unique session ID. + // + // The maximum number of allowed characters is 255. + SessionId string `protobuf:"bytes,2,opt,name=session_id,json=sessionId,proto3" json:"session_id,omitempty"` + // Required if [allow_missing_ids][google.cloud.talent.v4beta1.RequestMetadata.allow_missing_ids] is unset or `false`. + // + // A unique user identification string, as determined by the client. + // To have the strongest positive impact on search quality + // make sure the client-level is unique. + // Obfuscate this field for privacy concerns before + // providing it to the service. + // + // Note that any improvements to the model for a particular tenant site rely + // on this field being set correctly to a unique user ID. + // + // The maximum number of allowed characters is 255. + UserId string `protobuf:"bytes,3,opt,name=user_id,json=userId,proto3" json:"user_id,omitempty"` + // Optional. + // + // If set to `true`, [domain][google.cloud.talent.v4beta1.RequestMetadata.domain], [session_id][google.cloud.talent.v4beta1.RequestMetadata.session_id] and [user_id][google.cloud.talent.v4beta1.RequestMetadata.user_id] are optional. + // Only set when any of these fields isn't available for some reason. It + // is highly recommended not to set this field and provide accurate + // [domain][google.cloud.talent.v4beta1.RequestMetadata.domain], [session_id][google.cloud.talent.v4beta1.RequestMetadata.session_id] and [user_id][google.cloud.talent.v4beta1.RequestMetadata.user_id] for the best service experience. + AllowMissingIds bool `protobuf:"varint,4,opt,name=allow_missing_ids,json=allowMissingIds,proto3" json:"allow_missing_ids,omitempty"` + // Optional. + // + // The type of device used by the job seeker at the time of the call to the + // service. + DeviceInfo *DeviceInfo `protobuf:"bytes,5,opt,name=device_info,json=deviceInfo,proto3" json:"device_info,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *RequestMetadata) Reset() { *m = RequestMetadata{} } +func (m *RequestMetadata) String() string { return proto.CompactTextString(m) } +func (*RequestMetadata) ProtoMessage() {} +func (*RequestMetadata) Descriptor() ([]byte, []int) { + return fileDescriptor_common_fee8e853138cd909, []int{2} +} +func (m *RequestMetadata) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_RequestMetadata.Unmarshal(m, b) +} +func (m *RequestMetadata) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_RequestMetadata.Marshal(b, m, deterministic) +} +func (dst *RequestMetadata) XXX_Merge(src proto.Message) { + xxx_messageInfo_RequestMetadata.Merge(dst, src) +} +func (m *RequestMetadata) XXX_Size() int { + return xxx_messageInfo_RequestMetadata.Size(m) +} +func (m *RequestMetadata) XXX_DiscardUnknown() { + xxx_messageInfo_RequestMetadata.DiscardUnknown(m) +} + +var xxx_messageInfo_RequestMetadata proto.InternalMessageInfo + +func (m *RequestMetadata) GetDomain() string { + if m != nil { + return m.Domain + } + return "" +} + +func (m *RequestMetadata) GetSessionId() string { + if m != nil { + return m.SessionId + } + return "" +} + +func (m *RequestMetadata) GetUserId() string { + if m != nil { + return m.UserId + } + return "" +} + +func (m *RequestMetadata) GetAllowMissingIds() bool { + if m != nil { + return m.AllowMissingIds + } + return false +} + +func (m *RequestMetadata) GetDeviceInfo() *DeviceInfo { + if m != nil { + return m.DeviceInfo + } + return nil +} + +// Output only. Additional information returned to client, such as debugging +// information. +type ResponseMetadata struct { + // A unique id associated with this call. + // This id is logged for tracking purposes. + RequestId string `protobuf:"bytes,1,opt,name=request_id,json=requestId,proto3" json:"request_id,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ResponseMetadata) Reset() { *m = ResponseMetadata{} } +func (m *ResponseMetadata) String() string { return proto.CompactTextString(m) } +func (*ResponseMetadata) ProtoMessage() {} +func (*ResponseMetadata) Descriptor() ([]byte, []int) { + return fileDescriptor_common_fee8e853138cd909, []int{3} +} +func (m *ResponseMetadata) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ResponseMetadata.Unmarshal(m, b) +} +func (m *ResponseMetadata) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ResponseMetadata.Marshal(b, m, deterministic) +} +func (dst *ResponseMetadata) XXX_Merge(src proto.Message) { + xxx_messageInfo_ResponseMetadata.Merge(dst, src) +} +func (m *ResponseMetadata) XXX_Size() int { + return xxx_messageInfo_ResponseMetadata.Size(m) +} +func (m *ResponseMetadata) XXX_DiscardUnknown() { + xxx_messageInfo_ResponseMetadata.DiscardUnknown(m) +} + +var xxx_messageInfo_ResponseMetadata proto.InternalMessageInfo + +func (m *ResponseMetadata) GetRequestId() string { + if m != nil { + return m.RequestId + } + return "" +} + +// Device information collected from the job seeker, candidate, or +// other entity conducting the job search. Providing this information improves +// the quality of the search results across devices. +type DeviceInfo struct { + // Optional. + // + // Type of the device. + DeviceType DeviceInfo_DeviceType `protobuf:"varint,1,opt,name=device_type,json=deviceType,proto3,enum=google.cloud.talent.v4beta1.DeviceInfo_DeviceType" json:"device_type,omitempty"` + // Optional. + // + // A device-specific ID. The ID must be a unique identifier that + // distinguishes the device from other devices. + Id string `protobuf:"bytes,2,opt,name=id,proto3" json:"id,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *DeviceInfo) Reset() { *m = DeviceInfo{} } +func (m *DeviceInfo) String() string { return proto.CompactTextString(m) } +func (*DeviceInfo) ProtoMessage() {} +func (*DeviceInfo) Descriptor() ([]byte, []int) { + return fileDescriptor_common_fee8e853138cd909, []int{4} +} +func (m *DeviceInfo) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_DeviceInfo.Unmarshal(m, b) +} +func (m *DeviceInfo) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_DeviceInfo.Marshal(b, m, deterministic) +} +func (dst *DeviceInfo) XXX_Merge(src proto.Message) { + xxx_messageInfo_DeviceInfo.Merge(dst, src) +} +func (m *DeviceInfo) XXX_Size() int { + return xxx_messageInfo_DeviceInfo.Size(m) +} +func (m *DeviceInfo) XXX_DiscardUnknown() { + xxx_messageInfo_DeviceInfo.DiscardUnknown(m) +} + +var xxx_messageInfo_DeviceInfo proto.InternalMessageInfo + +func (m *DeviceInfo) GetDeviceType() DeviceInfo_DeviceType { + if m != nil { + return m.DeviceType + } + return DeviceInfo_DEVICE_TYPE_UNSPECIFIED +} + +func (m *DeviceInfo) GetId() string { + if m != nil { + return m.Id + } + return "" +} + +// Custom attribute values that are either filterable or non-filterable. +type CustomAttribute struct { + // Optional but exactly one of [string_values][google.cloud.talent.v4beta1.CustomAttribute.string_values] or [long_values][google.cloud.talent.v4beta1.CustomAttribute.long_values] must + // be specified. + // + // This field is used to perform a string match (`CASE_SENSITIVE_MATCH` or + // `CASE_INSENSITIVE_MATCH`) search. + // For filterable `string_value`s, a maximum total number of 200 values + // is allowed, with each `string_value` has a byte size of no more than + // 255B. For unfilterable `string_values`, the maximum total byte size of + // unfilterable `string_values` is 50KB. + // + // Empty string isn't allowed. + StringValues []string `protobuf:"bytes,1,rep,name=string_values,json=stringValues,proto3" json:"string_values,omitempty"` + // Optional but exactly one of [string_values][google.cloud.talent.v4beta1.CustomAttribute.string_values] or [long_values][google.cloud.talent.v4beta1.CustomAttribute.long_values] must + // be specified. + // + // This field is used to perform number range search. + // (`EQ`, `GT`, `GE`, `LE`, `LT`) over filterable `long_value`. + // + // Currently at most 1 [long_values][google.cloud.talent.v4beta1.CustomAttribute.long_values] is supported. + LongValues []int64 `protobuf:"varint,2,rep,packed,name=long_values,json=longValues,proto3" json:"long_values,omitempty"` + // Optional. + // + // If the `filterable` flag is true, custom field values are searchable. + // If false, values are not searchable. + // + // Default is false. + Filterable bool `protobuf:"varint,3,opt,name=filterable,proto3" json:"filterable,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *CustomAttribute) Reset() { *m = CustomAttribute{} } +func (m *CustomAttribute) String() string { return proto.CompactTextString(m) } +func (*CustomAttribute) ProtoMessage() {} +func (*CustomAttribute) Descriptor() ([]byte, []int) { + return fileDescriptor_common_fee8e853138cd909, []int{5} +} +func (m *CustomAttribute) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_CustomAttribute.Unmarshal(m, b) +} +func (m *CustomAttribute) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_CustomAttribute.Marshal(b, m, deterministic) +} +func (dst *CustomAttribute) XXX_Merge(src proto.Message) { + xxx_messageInfo_CustomAttribute.Merge(dst, src) +} +func (m *CustomAttribute) XXX_Size() int { + return xxx_messageInfo_CustomAttribute.Size(m) +} +func (m *CustomAttribute) XXX_DiscardUnknown() { + xxx_messageInfo_CustomAttribute.DiscardUnknown(m) +} + +var xxx_messageInfo_CustomAttribute proto.InternalMessageInfo + +func (m *CustomAttribute) GetStringValues() []string { + if m != nil { + return m.StringValues + } + return nil +} + +func (m *CustomAttribute) GetLongValues() []int64 { + if m != nil { + return m.LongValues + } + return nil +} + +func (m *CustomAttribute) GetFilterable() bool { + if m != nil { + return m.Filterable + } + return false +} + +// Output only. Spell check result. +type SpellingCorrection struct { + // Indicates if the query was corrected by the spell checker. + Corrected bool `protobuf:"varint,1,opt,name=corrected,proto3" json:"corrected,omitempty"` + // Correction output consisting of the corrected keyword string. + CorrectedText string `protobuf:"bytes,2,opt,name=corrected_text,json=correctedText,proto3" json:"corrected_text,omitempty"` + // Corrected output with html tags to highlight the corrected words. + // Corrected words are called out with the "..." html tags. + // + // For example, the user input query is "software enginear", where the second + // word, "enginear," is incorrect. It should be "engineer". When spelling + // correction is enabled, this value is + // "software engineer". + CorrectedHtml string `protobuf:"bytes,3,opt,name=corrected_html,json=correctedHtml,proto3" json:"corrected_html,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *SpellingCorrection) Reset() { *m = SpellingCorrection{} } +func (m *SpellingCorrection) String() string { return proto.CompactTextString(m) } +func (*SpellingCorrection) ProtoMessage() {} +func (*SpellingCorrection) Descriptor() ([]byte, []int) { + return fileDescriptor_common_fee8e853138cd909, []int{6} +} +func (m *SpellingCorrection) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_SpellingCorrection.Unmarshal(m, b) +} +func (m *SpellingCorrection) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_SpellingCorrection.Marshal(b, m, deterministic) +} +func (dst *SpellingCorrection) XXX_Merge(src proto.Message) { + xxx_messageInfo_SpellingCorrection.Merge(dst, src) +} +func (m *SpellingCorrection) XXX_Size() int { + return xxx_messageInfo_SpellingCorrection.Size(m) +} +func (m *SpellingCorrection) XXX_DiscardUnknown() { + xxx_messageInfo_SpellingCorrection.DiscardUnknown(m) +} + +var xxx_messageInfo_SpellingCorrection proto.InternalMessageInfo + +func (m *SpellingCorrection) GetCorrected() bool { + if m != nil { + return m.Corrected + } + return false +} + +func (m *SpellingCorrection) GetCorrectedText() string { + if m != nil { + return m.CorrectedText + } + return "" +} + +func (m *SpellingCorrection) GetCorrectedHtml() string { + if m != nil { + return m.CorrectedHtml + } + return "" +} + +// Job compensation details. +type CompensationInfo struct { + // Optional. + // + // Job compensation information. + // + // At most one entry can be of type + // [CompensationInfo.CompensationType.BASE][google.cloud.talent.v4beta1.CompensationInfo.CompensationType.BASE], which is + // referred as ** base compensation entry ** for the job. + Entries []*CompensationInfo_CompensationEntry `protobuf:"bytes,1,rep,name=entries,proto3" json:"entries,omitempty"` + // Output only. Annualized base compensation range. Computed as + // base compensation entry's [CompensationEntry.compensation][] times + // [CompensationEntry.expected_units_per_year][google.cloud.talent.v4beta1.CompensationInfo.CompensationEntry.expected_units_per_year]. + // + // See [CompensationEntry][google.cloud.talent.v4beta1.CompensationInfo.CompensationEntry] for explanation on compensation annualization. + AnnualizedBaseCompensationRange *CompensationInfo_CompensationRange `protobuf:"bytes,2,opt,name=annualized_base_compensation_range,json=annualizedBaseCompensationRange,proto3" json:"annualized_base_compensation_range,omitempty"` + // Output only. Annualized total compensation range. Computed as + // all compensation entries' [CompensationEntry.compensation][] times + // [CompensationEntry.expected_units_per_year][google.cloud.talent.v4beta1.CompensationInfo.CompensationEntry.expected_units_per_year]. + // + // See [CompensationEntry][google.cloud.talent.v4beta1.CompensationInfo.CompensationEntry] for explanation on compensation annualization. + AnnualizedTotalCompensationRange *CompensationInfo_CompensationRange `protobuf:"bytes,3,opt,name=annualized_total_compensation_range,json=annualizedTotalCompensationRange,proto3" json:"annualized_total_compensation_range,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *CompensationInfo) Reset() { *m = CompensationInfo{} } +func (m *CompensationInfo) String() string { return proto.CompactTextString(m) } +func (*CompensationInfo) ProtoMessage() {} +func (*CompensationInfo) Descriptor() ([]byte, []int) { + return fileDescriptor_common_fee8e853138cd909, []int{7} +} +func (m *CompensationInfo) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_CompensationInfo.Unmarshal(m, b) +} +func (m *CompensationInfo) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_CompensationInfo.Marshal(b, m, deterministic) +} +func (dst *CompensationInfo) XXX_Merge(src proto.Message) { + xxx_messageInfo_CompensationInfo.Merge(dst, src) +} +func (m *CompensationInfo) XXX_Size() int { + return xxx_messageInfo_CompensationInfo.Size(m) +} +func (m *CompensationInfo) XXX_DiscardUnknown() { + xxx_messageInfo_CompensationInfo.DiscardUnknown(m) +} + +var xxx_messageInfo_CompensationInfo proto.InternalMessageInfo + +func (m *CompensationInfo) GetEntries() []*CompensationInfo_CompensationEntry { + if m != nil { + return m.Entries + } + return nil +} + +func (m *CompensationInfo) GetAnnualizedBaseCompensationRange() *CompensationInfo_CompensationRange { + if m != nil { + return m.AnnualizedBaseCompensationRange + } + return nil +} + +func (m *CompensationInfo) GetAnnualizedTotalCompensationRange() *CompensationInfo_CompensationRange { + if m != nil { + return m.AnnualizedTotalCompensationRange + } + return nil +} + +// A compensation entry that represents one component of compensation, such +// as base pay, bonus, or other compensation type. +// +// Annualization: One compensation entry can be annualized if +// - it contains valid [amount][google.cloud.talent.v4beta1.CompensationInfo.CompensationEntry.amount] or [range][google.cloud.talent.v4beta1.CompensationInfo.CompensationEntry.range]. +// - and its [expected_units_per_year][google.cloud.talent.v4beta1.CompensationInfo.CompensationEntry.expected_units_per_year] is set or can be derived. +// Its annualized range is determined as ([amount][google.cloud.talent.v4beta1.CompensationInfo.CompensationEntry.amount] or [range][google.cloud.talent.v4beta1.CompensationInfo.CompensationEntry.range]) times +// [expected_units_per_year][google.cloud.talent.v4beta1.CompensationInfo.CompensationEntry.expected_units_per_year]. +type CompensationInfo_CompensationEntry struct { + // Optional. + // + // Compensation type. + // + // Default is [CompensationUnit.OTHER_COMPENSATION_TYPE][]. + Type CompensationInfo_CompensationType `protobuf:"varint,1,opt,name=type,proto3,enum=google.cloud.talent.v4beta1.CompensationInfo_CompensationType" json:"type,omitempty"` + // Optional. + // + // Frequency of the specified amount. + // + // Default is [CompensationUnit.OTHER_COMPENSATION_UNIT][google.cloud.talent.v4beta1.CompensationInfo.CompensationUnit.OTHER_COMPENSATION_UNIT]. + Unit CompensationInfo_CompensationUnit `protobuf:"varint,2,opt,name=unit,proto3,enum=google.cloud.talent.v4beta1.CompensationInfo_CompensationUnit" json:"unit,omitempty"` + // Optional. + // + // Compensation amount. It could be a fixed amount or a floating range. + // + // Types that are valid to be assigned to CompensationAmount: + // *CompensationInfo_CompensationEntry_Amount + // *CompensationInfo_CompensationEntry_Range + CompensationAmount isCompensationInfo_CompensationEntry_CompensationAmount `protobuf_oneof:"compensation_amount"` + // Optional. + // + // Compensation description. For example, could + // indicate equity terms or provide additional context to an estimated + // bonus. + Description string `protobuf:"bytes,5,opt,name=description,proto3" json:"description,omitempty"` + // Optional. + // + // Expected number of units paid each year. If not specified, when + // [Job.employment_types][google.cloud.talent.v4beta1.Job.employment_types] is FULLTIME, a default value is inferred + // based on [unit][google.cloud.talent.v4beta1.CompensationInfo.CompensationEntry.unit]. Default values: + // - HOURLY: 2080 + // - DAILY: 260 + // - WEEKLY: 52 + // - MONTHLY: 12 + // - ANNUAL: 1 + ExpectedUnitsPerYear *wrappers.DoubleValue `protobuf:"bytes,6,opt,name=expected_units_per_year,json=expectedUnitsPerYear,proto3" json:"expected_units_per_year,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *CompensationInfo_CompensationEntry) Reset() { *m = CompensationInfo_CompensationEntry{} } +func (m *CompensationInfo_CompensationEntry) String() string { return proto.CompactTextString(m) } +func (*CompensationInfo_CompensationEntry) ProtoMessage() {} +func (*CompensationInfo_CompensationEntry) Descriptor() ([]byte, []int) { + return fileDescriptor_common_fee8e853138cd909, []int{7, 0} +} +func (m *CompensationInfo_CompensationEntry) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_CompensationInfo_CompensationEntry.Unmarshal(m, b) +} +func (m *CompensationInfo_CompensationEntry) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_CompensationInfo_CompensationEntry.Marshal(b, m, deterministic) +} +func (dst *CompensationInfo_CompensationEntry) XXX_Merge(src proto.Message) { + xxx_messageInfo_CompensationInfo_CompensationEntry.Merge(dst, src) +} +func (m *CompensationInfo_CompensationEntry) XXX_Size() int { + return xxx_messageInfo_CompensationInfo_CompensationEntry.Size(m) +} +func (m *CompensationInfo_CompensationEntry) XXX_DiscardUnknown() { + xxx_messageInfo_CompensationInfo_CompensationEntry.DiscardUnknown(m) +} + +var xxx_messageInfo_CompensationInfo_CompensationEntry proto.InternalMessageInfo + +func (m *CompensationInfo_CompensationEntry) GetType() CompensationInfo_CompensationType { + if m != nil { + return m.Type + } + return CompensationInfo_COMPENSATION_TYPE_UNSPECIFIED +} + +func (m *CompensationInfo_CompensationEntry) GetUnit() CompensationInfo_CompensationUnit { + if m != nil { + return m.Unit + } + return CompensationInfo_COMPENSATION_UNIT_UNSPECIFIED +} + +type isCompensationInfo_CompensationEntry_CompensationAmount interface { + isCompensationInfo_CompensationEntry_CompensationAmount() +} + +type CompensationInfo_CompensationEntry_Amount struct { + Amount *money.Money `protobuf:"bytes,3,opt,name=amount,proto3,oneof"` +} + +type CompensationInfo_CompensationEntry_Range struct { + Range *CompensationInfo_CompensationRange `protobuf:"bytes,4,opt,name=range,proto3,oneof"` +} + +func (*CompensationInfo_CompensationEntry_Amount) isCompensationInfo_CompensationEntry_CompensationAmount() { +} + +func (*CompensationInfo_CompensationEntry_Range) isCompensationInfo_CompensationEntry_CompensationAmount() { +} + +func (m *CompensationInfo_CompensationEntry) GetCompensationAmount() isCompensationInfo_CompensationEntry_CompensationAmount { + if m != nil { + return m.CompensationAmount + } + return nil +} + +func (m *CompensationInfo_CompensationEntry) GetAmount() *money.Money { + if x, ok := m.GetCompensationAmount().(*CompensationInfo_CompensationEntry_Amount); ok { + return x.Amount + } + return nil +} + +func (m *CompensationInfo_CompensationEntry) GetRange() *CompensationInfo_CompensationRange { + if x, ok := m.GetCompensationAmount().(*CompensationInfo_CompensationEntry_Range); ok { + return x.Range + } + return nil +} + +func (m *CompensationInfo_CompensationEntry) GetDescription() string { + if m != nil { + return m.Description + } + return "" +} + +func (m *CompensationInfo_CompensationEntry) GetExpectedUnitsPerYear() *wrappers.DoubleValue { + if m != nil { + return m.ExpectedUnitsPerYear + } + return nil +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*CompensationInfo_CompensationEntry) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _CompensationInfo_CompensationEntry_OneofMarshaler, _CompensationInfo_CompensationEntry_OneofUnmarshaler, _CompensationInfo_CompensationEntry_OneofSizer, []interface{}{ + (*CompensationInfo_CompensationEntry_Amount)(nil), + (*CompensationInfo_CompensationEntry_Range)(nil), + } +} + +func _CompensationInfo_CompensationEntry_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*CompensationInfo_CompensationEntry) + // compensation_amount + switch x := m.CompensationAmount.(type) { + case *CompensationInfo_CompensationEntry_Amount: + b.EncodeVarint(3<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.Amount); err != nil { + return err + } + case *CompensationInfo_CompensationEntry_Range: + b.EncodeVarint(4<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.Range); err != nil { + return err + } + case nil: + default: + return fmt.Errorf("CompensationInfo_CompensationEntry.CompensationAmount has unexpected type %T", x) + } + return nil +} + +func _CompensationInfo_CompensationEntry_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*CompensationInfo_CompensationEntry) + switch tag { + case 3: // compensation_amount.amount + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(money.Money) + err := b.DecodeMessage(msg) + m.CompensationAmount = &CompensationInfo_CompensationEntry_Amount{msg} + return true, err + case 4: // compensation_amount.range + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(CompensationInfo_CompensationRange) + err := b.DecodeMessage(msg) + m.CompensationAmount = &CompensationInfo_CompensationEntry_Range{msg} + return true, err + default: + return false, nil + } +} + +func _CompensationInfo_CompensationEntry_OneofSizer(msg proto.Message) (n int) { + m := msg.(*CompensationInfo_CompensationEntry) + // compensation_amount + switch x := m.CompensationAmount.(type) { + case *CompensationInfo_CompensationEntry_Amount: + s := proto.Size(x.Amount) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *CompensationInfo_CompensationEntry_Range: + s := proto.Size(x.Range) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +// Compensation range. +type CompensationInfo_CompensationRange struct { + // Optional. + // + // The maximum amount of compensation. If left empty, the value is set + // to a maximal compensation value and the currency code is set to + // match the [currency code][google.type.Money.currency_code] of + // min_compensation. + MaxCompensation *money.Money `protobuf:"bytes,2,opt,name=max_compensation,json=maxCompensation,proto3" json:"max_compensation,omitempty"` + // Optional. + // + // The minimum amount of compensation. If left empty, the value is set + // to zero and the currency code is set to match the + // [currency code][google.type.Money.currency_code] of max_compensation. + MinCompensation *money.Money `protobuf:"bytes,1,opt,name=min_compensation,json=minCompensation,proto3" json:"min_compensation,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *CompensationInfo_CompensationRange) Reset() { *m = CompensationInfo_CompensationRange{} } +func (m *CompensationInfo_CompensationRange) String() string { return proto.CompactTextString(m) } +func (*CompensationInfo_CompensationRange) ProtoMessage() {} +func (*CompensationInfo_CompensationRange) Descriptor() ([]byte, []int) { + return fileDescriptor_common_fee8e853138cd909, []int{7, 1} +} +func (m *CompensationInfo_CompensationRange) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_CompensationInfo_CompensationRange.Unmarshal(m, b) +} +func (m *CompensationInfo_CompensationRange) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_CompensationInfo_CompensationRange.Marshal(b, m, deterministic) +} +func (dst *CompensationInfo_CompensationRange) XXX_Merge(src proto.Message) { + xxx_messageInfo_CompensationInfo_CompensationRange.Merge(dst, src) +} +func (m *CompensationInfo_CompensationRange) XXX_Size() int { + return xxx_messageInfo_CompensationInfo_CompensationRange.Size(m) +} +func (m *CompensationInfo_CompensationRange) XXX_DiscardUnknown() { + xxx_messageInfo_CompensationInfo_CompensationRange.DiscardUnknown(m) +} + +var xxx_messageInfo_CompensationInfo_CompensationRange proto.InternalMessageInfo + +func (m *CompensationInfo_CompensationRange) GetMaxCompensation() *money.Money { + if m != nil { + return m.MaxCompensation + } + return nil +} + +func (m *CompensationInfo_CompensationRange) GetMinCompensation() *money.Money { + if m != nil { + return m.MinCompensation + } + return nil +} + +// Resource that represents a license or certification. +type Certification struct { + // Optional. + // + // Name of license or certification. + // + // Number of characters allowed is 100. + DisplayName string `protobuf:"bytes,1,opt,name=display_name,json=displayName,proto3" json:"display_name,omitempty"` + // Optional. + // + // Acquisition date or effective date of license or certification. + AcquireDate *date.Date `protobuf:"bytes,2,opt,name=acquire_date,json=acquireDate,proto3" json:"acquire_date,omitempty"` + // Optional. + // + // Expiration date of license of certification. + ExpireDate *date.Date `protobuf:"bytes,3,opt,name=expire_date,json=expireDate,proto3" json:"expire_date,omitempty"` + // Optional. + // + // Authority of license, such as government. + // + // Number of characters allowed is 100. + Authority string `protobuf:"bytes,4,opt,name=authority,proto3" json:"authority,omitempty"` + // Optional. + // + // Description of license or certification. + // + // Number of characters allowed is 100,000. + Description string `protobuf:"bytes,5,opt,name=description,proto3" json:"description,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Certification) Reset() { *m = Certification{} } +func (m *Certification) String() string { return proto.CompactTextString(m) } +func (*Certification) ProtoMessage() {} +func (*Certification) Descriptor() ([]byte, []int) { + return fileDescriptor_common_fee8e853138cd909, []int{8} +} +func (m *Certification) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Certification.Unmarshal(m, b) +} +func (m *Certification) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Certification.Marshal(b, m, deterministic) +} +func (dst *Certification) XXX_Merge(src proto.Message) { + xxx_messageInfo_Certification.Merge(dst, src) +} +func (m *Certification) XXX_Size() int { + return xxx_messageInfo_Certification.Size(m) +} +func (m *Certification) XXX_DiscardUnknown() { + xxx_messageInfo_Certification.DiscardUnknown(m) +} + +var xxx_messageInfo_Certification proto.InternalMessageInfo + +func (m *Certification) GetDisplayName() string { + if m != nil { + return m.DisplayName + } + return "" +} + +func (m *Certification) GetAcquireDate() *date.Date { + if m != nil { + return m.AcquireDate + } + return nil +} + +func (m *Certification) GetExpireDate() *date.Date { + if m != nil { + return m.ExpireDate + } + return nil +} + +func (m *Certification) GetAuthority() string { + if m != nil { + return m.Authority + } + return "" +} + +func (m *Certification) GetDescription() string { + if m != nil { + return m.Description + } + return "" +} + +// Resource that represents a skill of a candidate. +type Skill struct { + // Optional. + // + // Skill display name. + // + // For example, "Java", "Python". + // + // Number of characters allowed is 100. + DisplayName string `protobuf:"bytes,1,opt,name=display_name,json=displayName,proto3" json:"display_name,omitempty"` + // Optional. + // + // The last time this skill was used. + LastUsedDate *date.Date `protobuf:"bytes,2,opt,name=last_used_date,json=lastUsedDate,proto3" json:"last_used_date,omitempty"` + // Optional. + // + // Skill proficiency level which indicates how proficient the candidate is at + // this skill. + Level SkillProficiencyLevel `protobuf:"varint,3,opt,name=level,proto3,enum=google.cloud.talent.v4beta1.SkillProficiencyLevel" json:"level,omitempty"` + // Optional. + // + // A paragraph describes context of this skill. + // + // Number of characters allowed is 100,000. + Context string `protobuf:"bytes,4,opt,name=context,proto3" json:"context,omitempty"` + // Output only. Skill name snippet shows how the [display_name][google.cloud.talent.v4beta1.Skill.display_name] is related + // to a search query. It's empty if the [display_name][google.cloud.talent.v4beta1.Skill.display_name] isn't related to the + // search query. + SkillNameSnippet string `protobuf:"bytes,5,opt,name=skill_name_snippet,json=skillNameSnippet,proto3" json:"skill_name_snippet,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Skill) Reset() { *m = Skill{} } +func (m *Skill) String() string { return proto.CompactTextString(m) } +func (*Skill) ProtoMessage() {} +func (*Skill) Descriptor() ([]byte, []int) { + return fileDescriptor_common_fee8e853138cd909, []int{9} +} +func (m *Skill) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Skill.Unmarshal(m, b) +} +func (m *Skill) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Skill.Marshal(b, m, deterministic) +} +func (dst *Skill) XXX_Merge(src proto.Message) { + xxx_messageInfo_Skill.Merge(dst, src) +} +func (m *Skill) XXX_Size() int { + return xxx_messageInfo_Skill.Size(m) +} +func (m *Skill) XXX_DiscardUnknown() { + xxx_messageInfo_Skill.DiscardUnknown(m) +} + +var xxx_messageInfo_Skill proto.InternalMessageInfo + +func (m *Skill) GetDisplayName() string { + if m != nil { + return m.DisplayName + } + return "" +} + +func (m *Skill) GetLastUsedDate() *date.Date { + if m != nil { + return m.LastUsedDate + } + return nil +} + +func (m *Skill) GetLevel() SkillProficiencyLevel { + if m != nil { + return m.Level + } + return SkillProficiencyLevel_SKILL_PROFICIENCY_LEVEL_UNSPECIFIED +} + +func (m *Skill) GetContext() string { + if m != nil { + return m.Context + } + return "" +} + +func (m *Skill) GetSkillNameSnippet() string { + if m != nil { + return m.SkillNameSnippet + } + return "" +} + +// Details of an interview. +type Interview struct { + // Optional. + // + // The rating on this interview. + Rating *Rating `protobuf:"bytes,6,opt,name=rating,proto3" json:"rating,omitempty"` + // Required. + // + // The overall decision resulting from this interview (positive, negative, + // nuetral). + Outcome Outcome `protobuf:"varint,7,opt,name=outcome,proto3,enum=google.cloud.talent.v4beta1.Outcome" json:"outcome,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Interview) Reset() { *m = Interview{} } +func (m *Interview) String() string { return proto.CompactTextString(m) } +func (*Interview) ProtoMessage() {} +func (*Interview) Descriptor() ([]byte, []int) { + return fileDescriptor_common_fee8e853138cd909, []int{10} +} +func (m *Interview) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Interview.Unmarshal(m, b) +} +func (m *Interview) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Interview.Marshal(b, m, deterministic) +} +func (dst *Interview) XXX_Merge(src proto.Message) { + xxx_messageInfo_Interview.Merge(dst, src) +} +func (m *Interview) XXX_Size() int { + return xxx_messageInfo_Interview.Size(m) +} +func (m *Interview) XXX_DiscardUnknown() { + xxx_messageInfo_Interview.DiscardUnknown(m) +} + +var xxx_messageInfo_Interview proto.InternalMessageInfo + +func (m *Interview) GetRating() *Rating { + if m != nil { + return m.Rating + } + return nil +} + +func (m *Interview) GetOutcome() Outcome { + if m != nil { + return m.Outcome + } + return Outcome_OUTCOME_UNSPECIFIED +} + +// The details of the score received for an assessment or interview. +type Rating struct { + // Overall score. + Overall float64 `protobuf:"fixed64,1,opt,name=overall,proto3" json:"overall,omitempty"` + // The minimum value for the score. + Min float64 `protobuf:"fixed64,2,opt,name=min,proto3" json:"min,omitempty"` + // The maximum value for the score. + Max float64 `protobuf:"fixed64,3,opt,name=max,proto3" json:"max,omitempty"` + // The steps within the score (for example, interval = 1 max = 5 + // min = 1 indicates that the score can be 1, 2, 3, 4, or 5) + Interval float64 `protobuf:"fixed64,4,opt,name=interval,proto3" json:"interval,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Rating) Reset() { *m = Rating{} } +func (m *Rating) String() string { return proto.CompactTextString(m) } +func (*Rating) ProtoMessage() {} +func (*Rating) Descriptor() ([]byte, []int) { + return fileDescriptor_common_fee8e853138cd909, []int{11} +} +func (m *Rating) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Rating.Unmarshal(m, b) +} +func (m *Rating) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Rating.Marshal(b, m, deterministic) +} +func (dst *Rating) XXX_Merge(src proto.Message) { + xxx_messageInfo_Rating.Merge(dst, src) +} +func (m *Rating) XXX_Size() int { + return xxx_messageInfo_Rating.Size(m) +} +func (m *Rating) XXX_DiscardUnknown() { + xxx_messageInfo_Rating.DiscardUnknown(m) +} + +var xxx_messageInfo_Rating proto.InternalMessageInfo + +func (m *Rating) GetOverall() float64 { + if m != nil { + return m.Overall + } + return 0 +} + +func (m *Rating) GetMin() float64 { + if m != nil { + return m.Min + } + return 0 +} + +func (m *Rating) GetMax() float64 { + if m != nil { + return m.Max + } + return 0 +} + +func (m *Rating) GetInterval() float64 { + if m != nil { + return m.Interval + } + return 0 +} + +func init() { + proto.RegisterType((*TimestampRange)(nil), "google.cloud.talent.v4beta1.TimestampRange") + proto.RegisterType((*Location)(nil), "google.cloud.talent.v4beta1.Location") + proto.RegisterType((*RequestMetadata)(nil), "google.cloud.talent.v4beta1.RequestMetadata") + proto.RegisterType((*ResponseMetadata)(nil), "google.cloud.talent.v4beta1.ResponseMetadata") + proto.RegisterType((*DeviceInfo)(nil), "google.cloud.talent.v4beta1.DeviceInfo") + proto.RegisterType((*CustomAttribute)(nil), "google.cloud.talent.v4beta1.CustomAttribute") + proto.RegisterType((*SpellingCorrection)(nil), "google.cloud.talent.v4beta1.SpellingCorrection") + proto.RegisterType((*CompensationInfo)(nil), "google.cloud.talent.v4beta1.CompensationInfo") + proto.RegisterType((*CompensationInfo_CompensationEntry)(nil), "google.cloud.talent.v4beta1.CompensationInfo.CompensationEntry") + proto.RegisterType((*CompensationInfo_CompensationRange)(nil), "google.cloud.talent.v4beta1.CompensationInfo.CompensationRange") + proto.RegisterType((*Certification)(nil), "google.cloud.talent.v4beta1.Certification") + proto.RegisterType((*Skill)(nil), "google.cloud.talent.v4beta1.Skill") + proto.RegisterType((*Interview)(nil), "google.cloud.talent.v4beta1.Interview") + proto.RegisterType((*Rating)(nil), "google.cloud.talent.v4beta1.Rating") + proto.RegisterEnum("google.cloud.talent.v4beta1.CompanySize", CompanySize_name, CompanySize_value) + proto.RegisterEnum("google.cloud.talent.v4beta1.JobBenefit", JobBenefit_name, JobBenefit_value) + proto.RegisterEnum("google.cloud.talent.v4beta1.DegreeType", DegreeType_name, DegreeType_value) + proto.RegisterEnum("google.cloud.talent.v4beta1.EmploymentType", EmploymentType_name, EmploymentType_value) + proto.RegisterEnum("google.cloud.talent.v4beta1.JobLevel", JobLevel_name, JobLevel_value) + proto.RegisterEnum("google.cloud.talent.v4beta1.JobCategory", JobCategory_name, JobCategory_value) + proto.RegisterEnum("google.cloud.talent.v4beta1.PostingRegion", PostingRegion_name, PostingRegion_value) + proto.RegisterEnum("google.cloud.talent.v4beta1.Visibility", Visibility_name, Visibility_value) + proto.RegisterEnum("google.cloud.talent.v4beta1.ContactInfoUsage", ContactInfoUsage_name, ContactInfoUsage_value) + proto.RegisterEnum("google.cloud.talent.v4beta1.HtmlSanitization", HtmlSanitization_name, HtmlSanitization_value) + proto.RegisterEnum("google.cloud.talent.v4beta1.CommuteMethod", CommuteMethod_name, CommuteMethod_value) + proto.RegisterEnum("google.cloud.talent.v4beta1.SkillProficiencyLevel", SkillProficiencyLevel_name, SkillProficiencyLevel_value) + proto.RegisterEnum("google.cloud.talent.v4beta1.Outcome", Outcome_name, Outcome_value) + proto.RegisterEnum("google.cloud.talent.v4beta1.Location_LocationType", Location_LocationType_name, Location_LocationType_value) + proto.RegisterEnum("google.cloud.talent.v4beta1.DeviceInfo_DeviceType", DeviceInfo_DeviceType_name, DeviceInfo_DeviceType_value) + proto.RegisterEnum("google.cloud.talent.v4beta1.CompensationInfo_CompensationType", CompensationInfo_CompensationType_name, CompensationInfo_CompensationType_value) + proto.RegisterEnum("google.cloud.talent.v4beta1.CompensationInfo_CompensationUnit", CompensationInfo_CompensationUnit_name, CompensationInfo_CompensationUnit_value) +} + +func init() { + proto.RegisterFile("google/cloud/talent/v4beta1/common.proto", fileDescriptor_common_fee8e853138cd909) +} + +var fileDescriptor_common_fee8e853138cd909 = []byte{ + // 3039 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xac, 0x59, 0xcd, 0x6f, 0x23, 0x47, + 0x76, 0x37, 0x45, 0x89, 0x14, 0x9f, 0xbe, 0x6a, 0x7a, 0x3c, 0x23, 0x59, 0xf3, 0x25, 0xd3, 0x1b, + 0xec, 0x40, 0x30, 0x24, 0x78, 0xb2, 0x41, 0x10, 0x2c, 0xb2, 0x41, 0xb1, 0xbb, 0x44, 0x96, 0xa7, + 0x3f, 0x98, 0xea, 0x6a, 0xc9, 0xf4, 0xa5, 0xd1, 0x12, 0x4b, 0x72, 0x27, 0xcd, 0x6e, 0x9a, 0xdd, + 0x1a, 0x8f, 0x7c, 0x0a, 0x72, 0xca, 0x21, 0x58, 0x20, 0xc8, 0x35, 0xa7, 0xfc, 0x0f, 0xb9, 0xe4, + 0x5f, 0xc8, 0x25, 0xb7, 0x5c, 0x16, 0x08, 0x90, 0x5b, 0x4e, 0x39, 0xe5, 0x1e, 0xbc, 0xaa, 0xa6, + 0x44, 0x7d, 0xcc, 0x78, 0x91, 0xf5, 0x49, 0x5d, 0xef, 0xa3, 0xea, 0x7d, 0xfc, 0xde, 0xab, 0x57, + 0x14, 0xbc, 0xbe, 0x28, 0x8a, 0x8b, 0x4c, 0x1d, 0x9e, 0x65, 0xc5, 0xe5, 0xf8, 0xb0, 0x4a, 0x32, + 0x95, 0x57, 0x87, 0xef, 0x7e, 0x75, 0xaa, 0xaa, 0xe4, 0xab, 0xc3, 0xb3, 0x62, 0x32, 0x29, 0xf2, + 0x83, 0xe9, 0xac, 0xa8, 0x0a, 0xeb, 0x99, 0x91, 0x3c, 0xd0, 0x92, 0x07, 0x46, 0xf2, 0xa0, 0x96, + 0xdc, 0x7d, 0x5e, 0x6f, 0x93, 0x4c, 0xd3, 0xc3, 0x24, 0xcf, 0x8b, 0x2a, 0xa9, 0xd2, 0x22, 0x2f, + 0x8d, 0xea, 0xee, 0xab, 0x9a, 0xab, 0x57, 0xa7, 0x97, 0xe7, 0x87, 0x55, 0x3a, 0x51, 0x65, 0x95, + 0x4c, 0xa6, 0xb5, 0xc0, 0xcb, 0xbb, 0x02, 0x3f, 0xcc, 0x92, 0xe9, 0x54, 0xcd, 0xe6, 0x1b, 0x3c, + 0xad, 0xf9, 0xd5, 0xd5, 0x54, 0x1d, 0x8e, 0x93, 0x4a, 0xd5, 0xf4, 0x9d, 0x45, 0x7a, 0x96, 0x54, + 0x59, 0x7e, 0x51, 0x73, 0xb6, 0x17, 0x39, 0x93, 0x22, 0x57, 0x57, 0x35, 0x63, 0x6f, 0x91, 0x31, + 0x2d, 0xca, 0x2a, 0xc9, 0xe2, 0x64, 0x3c, 0x9e, 0xa9, 0x72, 0x7e, 0xd8, 0xb3, 0x45, 0x09, 0xb4, + 0xb4, 0x38, 0x1f, 0x27, 0xb5, 0x7a, 0xf7, 0x6f, 0x1b, 0xb0, 0x29, 0xe7, 0xd6, 0x8b, 0x24, 0xbf, + 0x50, 0xd6, 0x9f, 0x01, 0x94, 0x55, 0x32, 0xab, 0x62, 0x94, 0xdd, 0x69, 0xec, 0x35, 0x5e, 0xaf, + 0xbd, 0xd9, 0x3d, 0xa8, 0xa3, 0x35, 0xf7, 0xe8, 0xe0, 0x46, 0xa9, 0xa3, 0xa5, 0x71, 0x6d, 0xfd, + 0x09, 0xac, 0xaa, 0x7c, 0x6c, 0x14, 0x97, 0x7e, 0x52, 0xb1, 0xad, 0xf2, 0x31, 0xae, 0xba, 0xff, + 0xdd, 0x84, 0x55, 0xb7, 0x38, 0xd3, 0x31, 0xb6, 0x4e, 0x60, 0x23, 0xab, 0xbf, 0x63, 0x34, 0x59, + 0x5b, 0xb0, 0xf9, 0xe6, 0xcd, 0xc1, 0x47, 0xf2, 0x75, 0x30, 0xd7, 0xbe, 0xfe, 0x90, 0x57, 0x53, + 0x25, 0xd6, 0xb3, 0x85, 0x95, 0x45, 0x61, 0xf3, 0x76, 0x7c, 0xee, 0x9a, 0x88, 0xa7, 0x1d, 0x0c, + 0xb5, 0x08, 0x35, 0x12, 0x62, 0x63, 0xba, 0xb8, 0xb4, 0xbe, 0x84, 0x76, 0x96, 0x54, 0x71, 0x96, + 0x5f, 0xec, 0x34, 0xb5, 0xee, 0xe3, 0x5b, 0xba, 0x6e, 0x52, 0xb9, 0xf9, 0x85, 0x68, 0x65, 0xfa, + 0xaf, 0xf5, 0x39, 0xac, 0xcf, 0x92, 0x71, 0x7a, 0x59, 0xc6, 0x93, 0x34, 0x53, 0xe5, 0xce, 0xf2, + 0x5e, 0xe3, 0x75, 0x43, 0xac, 0x19, 0x9a, 0x87, 0xa4, 0xee, 0xff, 0x34, 0x60, 0x7d, 0xd1, 0x64, + 0xeb, 0x05, 0x7c, 0xe6, 0x06, 0x36, 0x95, 0x3c, 0xf0, 0x63, 0x39, 0x1a, 0xb2, 0x38, 0xf2, 0xc3, + 0x21, 0xb3, 0xf9, 0x11, 0x67, 0x0e, 0xf9, 0xc4, 0x5a, 0x83, 0xb6, 0x1d, 0x44, 0xbe, 0x14, 0x23, + 0xd2, 0xb0, 0xb6, 0xe1, 0x31, 0x75, 0x3c, 0xee, 0xf3, 0x50, 0x0a, 0x2a, 0xf9, 0x31, 0x8b, 0xa9, + 0x60, 0x94, 0x2c, 0x59, 0xcf, 0x60, 0x3b, 0x8c, 0x7a, 0xf1, 0x43, 0xcc, 0xa6, 0xb5, 0x0e, 0xab, + 0x78, 0x82, 0xcb, 0xe5, 0x88, 0x2c, 0x5b, 0x5b, 0xb0, 0x36, 0x0c, 0x42, 0x49, 0xdd, 0xd8, 0x0e, + 0x1c, 0x46, 0x56, 0x2c, 0x02, 0xeb, 0xa8, 0x7b, 0x2d, 0xd2, 0xb2, 0x2c, 0xd8, 0x5c, 0xa4, 0xc4, + 0x5f, 0x91, 0xf6, 0x3d, 0xda, 0x1b, 0xb2, 0x8a, 0x9a, 0x3e, 0xe3, 0xfd, 0x41, 0x2f, 0x10, 0x83, + 0x20, 0x70, 0x48, 0x47, 0x4b, 0x49, 0xc1, 0x98, 0x8c, 0xa9, 0xe3, 0x08, 0x16, 0x86, 0x04, 0xba, + 0xff, 0xd1, 0x80, 0x2d, 0xa1, 0xbe, 0xbf, 0x54, 0x65, 0xe5, 0xa9, 0x2a, 0x19, 0x27, 0x55, 0x62, + 0x3d, 0x85, 0xd6, 0xb8, 0x98, 0x24, 0x69, 0xae, 0x73, 0xdd, 0x11, 0xf5, 0xca, 0x7a, 0x01, 0x50, + 0xaa, 0xb2, 0x44, 0x24, 0xa4, 0x63, 0x9d, 0xad, 0x8e, 0xe8, 0xd4, 0x14, 0x3e, 0xb6, 0xb6, 0xa1, + 0x7d, 0x59, 0xaa, 0x19, 0xf2, 0x9a, 0x46, 0x0f, 0x97, 0x7c, 0x6c, 0xed, 0xc3, 0xa3, 0x24, 0xcb, + 0x8a, 0x1f, 0xe2, 0x49, 0x5a, 0x96, 0x69, 0x7e, 0x11, 0xa7, 0x63, 0x13, 0xfd, 0x55, 0xb1, 0xa5, + 0x19, 0x9e, 0xa1, 0xf3, 0x71, 0x69, 0x0d, 0x60, 0x6d, 0xac, 0xde, 0xa5, 0x67, 0x2a, 0x4e, 0xf3, + 0xf3, 0x62, 0x67, 0x45, 0xa7, 0xf5, 0x97, 0x1f, 0x05, 0x9b, 0xa3, 0xe5, 0x79, 0x7e, 0x5e, 0x08, + 0x18, 0x5f, 0x7f, 0x77, 0xbf, 0x02, 0x22, 0x54, 0x39, 0x2d, 0xf2, 0x52, 0x5d, 0x7b, 0xf6, 0x02, + 0x60, 0x66, 0x9c, 0x45, 0x2b, 0x8d, 0x77, 0x9d, 0x9a, 0xc2, 0xc7, 0xdd, 0xff, 0x6c, 0x00, 0xdc, + 0xec, 0x66, 0x85, 0xd7, 0xb6, 0xfc, 0xde, 0xc0, 0xbf, 0xd1, 0xae, 0x3f, 0x35, 0xf0, 0x6b, 0xb3, + 0x34, 0xa2, 0x36, 0x61, 0xe9, 0x3a, 0x78, 0x4b, 0xe9, 0xb8, 0x9b, 0xcd, 0x8f, 0xd4, 0xdc, 0x67, + 0xb0, 0xed, 0xb0, 0x63, 0x6e, 0xb3, 0x87, 0xd0, 0xd6, 0x86, 0xe6, 0x09, 0xeb, 0x91, 0x86, 0xb5, + 0x09, 0xe0, 0x05, 0x3d, 0xee, 0xb2, 0x18, 0xd7, 0x4b, 0x08, 0x43, 0xea, 0x3b, 0x22, 0xe0, 0x0e, + 0x69, 0xa2, 0x14, 0x0f, 0x42, 0xb2, 0x8c, 0x1f, 0xbd, 0x40, 0x92, 0x15, 0xab, 0x03, 0x2b, 0x81, + 0x1c, 0x30, 0x41, 0x5a, 0xdd, 0x1f, 0x60, 0xcb, 0xbe, 0x2c, 0xab, 0x62, 0x42, 0xab, 0x6a, 0x96, + 0x9e, 0x5e, 0x56, 0xca, 0xfa, 0x02, 0x36, 0xca, 0x6a, 0x86, 0x69, 0x79, 0x97, 0x64, 0x97, 0xaa, + 0xdc, 0x69, 0xec, 0x35, 0x5f, 0x77, 0xc4, 0xba, 0x21, 0x1e, 0x6b, 0x9a, 0xf5, 0x0a, 0xd6, 0xb2, + 0xe2, 0x46, 0x64, 0x69, 0xaf, 0xf9, 0xba, 0x29, 0x00, 0x49, 0xb5, 0xc0, 0x4b, 0x80, 0xf3, 0x34, + 0xab, 0xd4, 0x2c, 0x39, 0xcd, 0x94, 0xce, 0xff, 0xaa, 0x58, 0xa0, 0x74, 0xff, 0xa6, 0x01, 0x56, + 0x38, 0x55, 0x59, 0x96, 0xe6, 0x17, 0x76, 0x31, 0x9b, 0xa9, 0x33, 0xdd, 0x5d, 0x9e, 0x43, 0xe7, + 0xcc, 0xac, 0x94, 0xc9, 0xc7, 0xaa, 0xb8, 0x21, 0x58, 0x7f, 0x04, 0x9b, 0xd7, 0x8b, 0xb8, 0x52, + 0xef, 0xab, 0x3a, 0x6e, 0x1b, 0xd7, 0x54, 0xa9, 0xde, 0x57, 0xb7, 0xc5, 0xbe, 0xab, 0x26, 0x59, + 0x8d, 0xbf, 0x1b, 0xb1, 0x41, 0x35, 0xc9, 0xba, 0xff, 0x06, 0x40, 0xec, 0x62, 0x32, 0x55, 0x79, + 0xa9, 0x0b, 0x5c, 0xe7, 0x78, 0x04, 0x6d, 0x95, 0x57, 0xb3, 0xb4, 0xf6, 0x7b, 0xed, 0xcd, 0x5f, + 0x7c, 0x34, 0xbf, 0x77, 0xf5, 0x6f, 0x11, 0x58, 0x5e, 0xcd, 0xae, 0xc4, 0x7c, 0x3f, 0xeb, 0xef, + 0x1b, 0xd0, 0x4d, 0xf2, 0xfc, 0x32, 0xc9, 0xd2, 0x1f, 0xd5, 0x38, 0x3e, 0x4d, 0x4a, 0x15, 0x9f, + 0x2d, 0x88, 0xc7, 0x33, 0xec, 0xef, 0x75, 0xd7, 0xfb, 0x03, 0x8e, 0xd5, 0xd7, 0x84, 0x78, 0x75, + 0x73, 0x54, 0x2f, 0x29, 0xd5, 0x3d, 0x01, 0xeb, 0xb7, 0x0d, 0xf8, 0x62, 0xc1, 0x9c, 0xaa, 0xc0, + 0xd6, 0xfb, 0x80, 0x3d, 0xcd, 0x9f, 0xc7, 0x9e, 0xbd, 0x9b, 0xb3, 0x24, 0x1e, 0x75, 0x4f, 0x62, + 0xf7, 0xdf, 0x9b, 0xf0, 0xe8, 0x5e, 0xf8, 0x2c, 0x01, 0xcb, 0x0b, 0xd5, 0xf6, 0x9b, 0xff, 0xbf, + 0x19, 0xba, 0xf2, 0xf4, 0x5e, 0xb8, 0xe7, 0x65, 0x9e, 0x1a, 0xf4, 0xfc, 0x41, 0x7b, 0x46, 0x79, + 0x5a, 0x09, 0xbd, 0x97, 0xf5, 0x25, 0xb4, 0x92, 0x49, 0x71, 0x99, 0x57, 0x75, 0xc0, 0xac, 0x5b, + 0x57, 0x8f, 0x87, 0x23, 0xc1, 0xe0, 0x13, 0x51, 0xcb, 0x58, 0x27, 0xb0, 0x62, 0xa2, 0xbb, 0xfc, + 0xb3, 0x44, 0x77, 0xf0, 0x89, 0x30, 0xfb, 0x59, 0x7b, 0xd8, 0xa3, 0xca, 0xb3, 0x59, 0x3a, 0x45, + 0xa6, 0xee, 0x97, 0x1d, 0xb1, 0x48, 0xb2, 0x42, 0xd8, 0x56, 0xef, 0xa7, 0xa6, 0x38, 0xd0, 0xf2, + 0x32, 0x9e, 0xaa, 0x59, 0x7c, 0xa5, 0x92, 0xd9, 0x4e, 0x4b, 0x1b, 0xf3, 0xfc, 0xde, 0x4c, 0xe0, + 0x14, 0x97, 0xa7, 0x99, 0xd2, 0x95, 0x2d, 0x3e, 0x9d, 0x2b, 0xa3, 0xef, 0xe5, 0x50, 0xcd, 0x46, + 0x2a, 0x99, 0xf5, 0x9e, 0xc0, 0xe3, 0x5b, 0xd0, 0x31, 0x6e, 0xee, 0xfe, 0x43, 0xe3, 0x76, 0x4a, + 0x0d, 0xf2, 0xfe, 0x1c, 0xc8, 0x24, 0x79, 0x7f, 0x0b, 0x6b, 0x35, 0xea, 0x1f, 0x08, 0x9a, 0xd8, + 0x9a, 0x24, 0xef, 0x17, 0xf7, 0xd0, 0xea, 0x69, 0x7e, 0x5b, 0xbd, 0xf1, 0x11, 0xf5, 0x34, 0x5f, + 0x54, 0xef, 0xfe, 0x4b, 0xe3, 0x76, 0xd9, 0xeb, 0x3e, 0xfb, 0x39, 0xbc, 0xb0, 0x03, 0x6f, 0xc8, + 0xfc, 0xf0, 0x83, 0x77, 0xfb, 0x2a, 0x2c, 0xf7, 0x68, 0xc8, 0x48, 0x03, 0xfb, 0x67, 0x2f, 0xf0, + 0xa3, 0x90, 0x2c, 0x59, 0x8f, 0x60, 0x23, 0xe4, 0x7d, 0x9f, 0xfb, 0xfd, 0xd8, 0x90, 0x9a, 0x16, + 0x40, 0x8b, 0xfd, 0x65, 0x64, 0xae, 0x6f, 0x0b, 0x36, 0x87, 0x22, 0x38, 0xe2, 0x32, 0x0e, 0x07, + 0x54, 0x70, 0xbf, 0x4f, 0x56, 0xf0, 0x4a, 0xb7, 0x03, 0xcf, 0xe3, 0x61, 0xc8, 0x03, 0x3f, 0x24, + 0x2d, 0xdc, 0x58, 0xf2, 0x61, 0x48, 0xda, 0xd8, 0xed, 0x75, 0x63, 0x8e, 0xef, 0xd9, 0x42, 0x56, + 0xbb, 0xff, 0x74, 0xc7, 0x6e, 0x8c, 0xff, 0x3d, 0xbb, 0x23, 0x9f, 0xcb, 0x3b, 0x76, 0x03, 0xb4, + 0x06, 0x41, 0x24, 0xdc, 0x91, 0xb1, 0xdc, 0xa1, 0xdc, 0x1d, 0x91, 0x25, 0x24, 0x9f, 0x30, 0xf6, + 0xd6, 0x1d, 0x91, 0x26, 0xde, 0x17, 0x5e, 0xe0, 0xcb, 0x81, 0x8b, 0x36, 0x03, 0xb4, 0x46, 0x8c, + 0xa2, 0xfc, 0x0a, 0x0e, 0x23, 0x81, 0xcf, 0x62, 0xc9, 0x3d, 0x46, 0x5a, 0x1f, 0x30, 0x0f, 0x8f, + 0x24, 0xed, 0xee, 0xef, 0x1a, 0xb0, 0x61, 0xab, 0x59, 0x95, 0x9e, 0xa7, 0xf5, 0xa4, 0xf8, 0x39, + 0xac, 0x8f, 0xd3, 0x72, 0x9a, 0x25, 0x57, 0x71, 0x9e, 0xd4, 0xa3, 0x2a, 0x62, 0xd1, 0xd0, 0xfc, + 0x64, 0xa2, 0xac, 0x5f, 0xc1, 0x7a, 0x72, 0xf6, 0xfd, 0x65, 0x3a, 0x53, 0x31, 0x8e, 0xd9, 0x35, + 0x0a, 0x1e, 0xdd, 0x4a, 0xa3, 0x93, 0x54, 0x4a, 0xac, 0xd5, 0x62, 0xb8, 0xb0, 0xde, 0xc0, 0x9a, + 0x7a, 0x3f, 0xbd, 0x56, 0x6a, 0x7e, 0x48, 0x09, 0x8c, 0x94, 0xd6, 0x79, 0x0e, 0x9d, 0xe4, 0xb2, + 0xfa, 0xae, 0x98, 0xa5, 0xd5, 0x95, 0x2e, 0xba, 0x8e, 0xb8, 0x21, 0xfc, 0x74, 0xd5, 0x74, 0xff, + 0xb7, 0x01, 0x2b, 0xe1, 0x5f, 0xa7, 0x59, 0xf6, 0xfb, 0xb8, 0xf5, 0xa7, 0xb0, 0x99, 0x25, 0x65, + 0x15, 0x5f, 0x96, 0x6a, 0xfc, 0x13, 0x8e, 0xad, 0xa3, 0x60, 0x54, 0xaa, 0xb1, 0xb6, 0x72, 0x00, + 0x2b, 0x99, 0x7a, 0xa7, 0xcc, 0x85, 0xf5, 0x53, 0xb3, 0x85, 0x36, 0x67, 0x38, 0x2b, 0xce, 0xd3, + 0xb3, 0x54, 0xe5, 0x67, 0x57, 0x2e, 0x6a, 0x0a, 0xb3, 0x81, 0xb5, 0x03, 0xed, 0xb3, 0x22, 0xd7, + 0x77, 0xa4, 0xf1, 0x76, 0xbe, 0xb4, 0xbe, 0x04, 0xab, 0x44, 0x4d, 0x6d, 0x7d, 0x5c, 0xe6, 0xe9, + 0x74, 0xaa, 0xaa, 0xda, 0x65, 0xa2, 0x39, 0xe8, 0x43, 0x68, 0xe8, 0xdd, 0xbf, 0x6b, 0x40, 0x87, + 0xe7, 0x95, 0x9a, 0xbd, 0x4b, 0xd5, 0x0f, 0xd6, 0xaf, 0xa1, 0x35, 0x4b, 0xaa, 0x34, 0xbf, 0xa8, + 0x5b, 0xc5, 0x17, 0x1f, 0x35, 0x50, 0x68, 0x51, 0x51, 0xab, 0x58, 0xbf, 0x81, 0x76, 0x71, 0x59, + 0x9d, 0x15, 0x13, 0xb5, 0xd3, 0xd6, 0xee, 0xfd, 0xe2, 0xa3, 0xda, 0x81, 0x91, 0x15, 0x73, 0xa5, + 0xee, 0x29, 0xb4, 0xcc, 0x8e, 0xe8, 0x5c, 0xf1, 0x4e, 0xcd, 0x92, 0x2c, 0xd3, 0xd1, 0x6f, 0x88, + 0xf9, 0xd2, 0x22, 0xd0, 0x9c, 0xa4, 0xa6, 0x9b, 0x34, 0x04, 0x7e, 0x6a, 0x4a, 0xf2, 0x5e, 0x07, + 0x14, 0x29, 0xc9, 0x7b, 0x6b, 0x17, 0x56, 0x53, 0xed, 0x51, 0x92, 0xd5, 0x33, 0xff, 0xf5, 0x7a, + 0xff, 0x0a, 0xd6, 0xb0, 0xc6, 0x92, 0xfc, 0x2a, 0x4c, 0x7f, 0x44, 0xd4, 0xec, 0x20, 0xd6, 0xa9, + 0x3f, 0x8a, 0x43, 0xfe, 0xed, 0x03, 0x1d, 0x01, 0x27, 0x78, 0x53, 0x57, 0xa1, 0x47, 0x5d, 0xd7, + 0xcc, 0x5e, 0xa1, 0xc7, 0x1c, 0x1e, 0x79, 0xa6, 0x17, 0xd4, 0xdf, 0x66, 0xfc, 0xe2, 0xd8, 0x00, + 0x00, 0x5a, 0x3d, 0xde, 0xef, 0xe3, 0xfc, 0x85, 0x8a, 0x7d, 0x4e, 0x7d, 0x49, 0xda, 0xfb, 0xff, + 0xd5, 0x00, 0xf8, 0xba, 0x38, 0xed, 0xa9, 0x5c, 0x9d, 0xa7, 0x15, 0x16, 0xdb, 0xd7, 0x41, 0x2f, + 0xee, 0x31, 0x9f, 0x1d, 0xdd, 0xab, 0xe9, 0x4d, 0x00, 0x7b, 0xc0, 0x5d, 0x27, 0xb6, 0xa9, 0xc0, + 0x8e, 0x04, 0xd0, 0x72, 0x98, 0x2f, 0x29, 0x1a, 0xf0, 0x29, 0x10, 0x27, 0xf0, 0x58, 0x28, 0xb9, + 0x1d, 0x0f, 0xa9, 0x90, 0x3e, 0x13, 0xa4, 0x89, 0x9d, 0xe8, 0xc8, 0x65, 0xdf, 0xf0, 0x9e, 0xcb, + 0x62, 0x6c, 0x07, 0x38, 0x10, 0x62, 0xd9, 0x33, 0x87, 0xdb, 0xd4, 0x25, 0x2b, 0x28, 0xe0, 0xf2, + 0x23, 0x16, 0x73, 0x3f, 0x8c, 0x04, 0xf5, 0x6d, 0x66, 0x9e, 0x16, 0x43, 0x2a, 0xf4, 0xc6, 0xb1, + 0xcb, 0xe8, 0x31, 0x23, 0x6d, 0xeb, 0x31, 0x6c, 0x09, 0x26, 0xb9, 0x60, 0x1e, 0xf3, 0x65, 0x3c, + 0x74, 0xa9, 0x4f, 0x56, 0xad, 0x0d, 0xe8, 0x84, 0xdc, 0x7e, 0x1b, 0x3b, 0x74, 0x14, 0x92, 0x0e, + 0xb6, 0x8d, 0x63, 0x6a, 0x5e, 0x49, 0x04, 0xd0, 0xb8, 0x63, 0x8e, 0xcd, 0x8e, 0xac, 0xed, 0xff, + 0x76, 0x09, 0xc7, 0xdb, 0x8b, 0x99, 0x5a, 0x18, 0x6f, 0xfb, 0x82, 0x3d, 0x38, 0xde, 0x3e, 0x81, + 0x47, 0x43, 0xc1, 0x3d, 0x2a, 0x46, 0x31, 0x73, 0xa2, 0x7a, 0xbb, 0x86, 0x79, 0x82, 0x9d, 0x30, + 0x11, 0x87, 0xcc, 0x0e, 0x7c, 0xe7, 0x36, 0x7b, 0x09, 0xd9, 0xd1, 0x70, 0xf8, 0x01, 0x76, 0x13, + 0x33, 0x4a, 0x9d, 0xc8, 0x95, 0x31, 0x3a, 0xe0, 0x70, 0xea, 0x2e, 0x70, 0x97, 0x35, 0x37, 0x0c, + 0x03, 0x9b, 0x53, 0xc9, 0xc2, 0x38, 0x10, 0x31, 0x76, 0xf2, 0x63, 0xea, 0x32, 0x1f, 0xe7, 0xe6, + 0x67, 0xb0, 0xdd, 0xa3, 0xf6, 0x80, 0xb9, 0x81, 0xb8, 0xcb, 0x6c, 0x59, 0x9f, 0xc1, 0x13, 0x8f, + 0x86, 0x92, 0xdd, 0x63, 0xb5, 0xad, 0x5d, 0x78, 0xea, 0x04, 0xb6, 0x0c, 0x04, 0x75, 0xef, 0xf0, + 0x56, 0xf7, 0x7f, 0xd7, 0x80, 0x4d, 0x36, 0x99, 0x66, 0xc5, 0xd5, 0x44, 0xe5, 0x95, 0x0e, 0xca, + 0x2b, 0x78, 0xc6, 0xbc, 0xa1, 0x1b, 0x8c, 0x74, 0x84, 0x1f, 0x08, 0xcc, 0x06, 0x74, 0x8e, 0x22, + 0xd7, 0x35, 0x6d, 0xb9, 0x81, 0x4b, 0xcc, 0xb3, 0x59, 0x2e, 0x69, 0x6c, 0x04, 0xbe, 0x14, 0x14, + 0x4f, 0x24, 0x4d, 0xc4, 0xc3, 0x7c, 0x1d, 0xcb, 0x20, 0x1e, 0x70, 0xc1, 0xc8, 0x32, 0x2a, 0x49, + 0xe6, 0x0d, 0x03, 0x41, 0xc5, 0xc8, 0x60, 0x92, 0xfb, 0x92, 0x09, 0x9f, 0xb4, 0x90, 0x75, 0x1c, + 0xb8, 0x91, 0x2f, 0x19, 0x13, 0xa4, 0x8d, 0xc9, 0xc4, 0x70, 0x3a, 0x9c, 0x79, 0x64, 0xd5, 0xe0, + 0x68, 0x14, 0x73, 0x3f, 0xc6, 0x3f, 0x41, 0x24, 0x49, 0x07, 0x5d, 0x37, 0xf7, 0xc2, 0x1d, 0xb3, + 0x09, 0xec, 0x7f, 0x0f, 0xab, 0x5f, 0x17, 0xa7, 0xba, 0x33, 0xa1, 0x18, 0x22, 0xda, 0x65, 0xc7, + 0xcc, 0xbd, 0xe3, 0xd1, 0x16, 0xac, 0x31, 0x7c, 0x35, 0x1b, 0x26, 0x69, 0x68, 0xc2, 0x37, 0x43, + 0x26, 0x38, 0xf3, 0x6d, 0xe6, 0x98, 0xb2, 0xf2, 0xa8, 0x4f, 0xfb, 0x1a, 0xcc, 0xeb, 0xb0, 0xea, + 0x70, 0xc1, 0xb4, 0x83, 0xda, 0x15, 0xf6, 0x0d, 0xb3, 0x23, 0x7c, 0x45, 0x93, 0x95, 0xfd, 0x7f, + 0x6d, 0xc1, 0xda, 0xd7, 0xc5, 0xa9, 0x9d, 0x54, 0xea, 0xa2, 0x98, 0x5d, 0x61, 0x4e, 0xf1, 0x58, + 0x9b, 0x4a, 0xd6, 0x0f, 0xc4, 0xe8, 0xce, 0xc9, 0xbb, 0xf0, 0x94, 0xda, 0xfa, 0xcd, 0x8e, 0x77, + 0x38, 0xf5, 0x9d, 0xf8, 0x88, 0xfb, 0x1a, 0xfe, 0x1a, 0x69, 0x77, 0xdf, 0xe8, 0xbe, 0x13, 0x07, + 0x47, 0x47, 0xdc, 0x66, 0x06, 0x69, 0xd4, 0x39, 0x66, 0x42, 0xf2, 0x70, 0xae, 0xeb, 0x51, 0xf1, + 0x96, 0xe1, 0x4e, 0xa4, 0x89, 0x2e, 0x50, 0x9f, 0x7b, 0xf8, 0x74, 0xa7, 0x3a, 0xe4, 0x78, 0x94, + 0x90, 0xf1, 0x11, 0x0d, 0x07, 0x78, 0x6f, 0xa2, 0xbc, 0xc3, 0x70, 0x7a, 0x20, 0x2b, 0xd6, 0x36, + 0x3c, 0xee, 0x45, 0x21, 0xf7, 0x59, 0x18, 0xc6, 0xc1, 0x90, 0x09, 0x0d, 0xc8, 0xd0, 0xdc, 0xb9, + 0xb6, 0xcb, 0xa8, 0x7f, 0x6d, 0x1d, 0xb5, 0xb9, 0xcb, 0x25, 0x67, 0xa1, 0xa9, 0x45, 0x6c, 0x4f, + 0x91, 0x64, 0x42, 0x33, 0xb9, 0x34, 0xef, 0x7c, 0x3b, 0xf0, 0x43, 0x29, 0x22, 0x5b, 0xa3, 0xba, + 0xa3, 0x11, 0x10, 0x85, 0x32, 0xf0, 0x74, 0x55, 0x08, 0x7c, 0x4e, 0x12, 0xd0, 0x61, 0xbb, 0x86, + 0xfe, 0x1a, 0x86, 0x89, 0x21, 0x02, 0x24, 0xe5, 0xbe, 0xce, 0x20, 0x6e, 0x28, 0x05, 0xc5, 0x7c, + 0xac, 0x5b, 0x3b, 0xf0, 0xe9, 0x11, 0x15, 0xde, 0xdc, 0x8a, 0x20, 0x92, 0x4e, 0x10, 0x88, 0x90, + 0x6c, 0x20, 0xdc, 0x06, 0x8c, 0xba, 0x72, 0xa0, 0xbd, 0xdc, 0x44, 0x9b, 0x06, 0x91, 0x47, 0xfd, + 0x58, 0xb0, 0x30, 0x88, 0x84, 0xcd, 0x42, 0xb2, 0x65, 0xfd, 0x12, 0xbe, 0xe0, 0x7e, 0x28, 0xa9, + 0xeb, 0x9a, 0x99, 0xc1, 0xa3, 0x88, 0x36, 0x1d, 0x67, 0xbd, 0x9f, 0x60, 0x43, 0xca, 0x05, 0x21, + 0xd8, 0x0f, 0x5d, 0xd6, 0xa7, 0x2e, 0x79, 0xa4, 0x1f, 0xb5, 0x3a, 0xe3, 0x68, 0x0d, 0xb1, 0xb0, + 0x2c, 0x3c, 0xea, 0x47, 0x47, 0xd4, 0x96, 0x91, 0x98, 0x1b, 0x72, 0x42, 0x05, 0x1b, 0x04, 0x51, + 0xc8, 0xc8, 0x63, 0xeb, 0x17, 0xb0, 0xa7, 0x4b, 0x1a, 0xc7, 0x13, 0x2f, 0xf2, 0xb9, 0x71, 0x2d, + 0x34, 0x72, 0x82, 0xeb, 0xb4, 0x7c, 0x6a, 0x3d, 0x05, 0x2b, 0xe0, 0x6e, 0xdc, 0xa7, 0x86, 0x81, + 0xe9, 0xf5, 0xfb, 0xe4, 0x89, 0xf5, 0x12, 0x76, 0x87, 0x4c, 0x84, 0x81, 0x5f, 0x27, 0x4c, 0x73, + 0xeb, 0x68, 0x85, 0xe4, 0x29, 0x66, 0x68, 0x28, 0x02, 0xc9, 0x6c, 0x0d, 0x84, 0x6b, 0xc6, 0x36, + 0xe6, 0x59, 0x30, 0xec, 0x23, 0xa1, 0xa4, 0x92, 0x91, 0x1d, 0xdc, 0x49, 0xe0, 0x02, 0xdb, 0xa8, + 0x09, 0xe3, 0x20, 0x08, 0x87, 0x5c, 0x9a, 0x1f, 0x6c, 0x3e, 0xc3, 0x74, 0x84, 0xd4, 0x65, 0x61, + 0xed, 0xb9, 0xa4, 0xdc, 0x25, 0xbb, 0xfa, 0x47, 0x21, 0x5b, 0xc3, 0x5d, 0xd3, 0x99, 0xdf, 0xe7, + 0x3e, 0x63, 0x7a, 0x66, 0x7c, 0x66, 0x75, 0xe1, 0xa5, 0xee, 0x4a, 0xee, 0xf5, 0xc1, 0x5a, 0xc8, + 0x0f, 0xfc, 0xd8, 0xcc, 0x97, 0xe4, 0x39, 0x8e, 0x82, 0xe1, 0x30, 0x10, 0x32, 0x8c, 0x8f, 0xb8, + 0xd4, 0x40, 0x32, 0xfb, 0xdb, 0x82, 0x99, 0x1c, 0xbf, 0xb0, 0xf6, 0xe0, 0xb9, 0x14, 0xd4, 0xd7, + 0x62, 0x26, 0x11, 0x28, 0xe2, 0x06, 0x7d, 0x8e, 0xb7, 0x45, 0x48, 0x5e, 0xee, 0x2b, 0xd8, 0x18, + 0x16, 0xa5, 0xbe, 0xb6, 0xd5, 0x05, 0x0e, 0x71, 0x18, 0x96, 0x20, 0xd4, 0xc5, 0x21, 0x58, 0xdf, + 0xcc, 0x7b, 0x8b, 0xf5, 0xf3, 0x81, 0x1f, 0xb9, 0xf4, 0x95, 0xe4, 0xcf, 0x7b, 0xf2, 0x16, 0xac, + 0x49, 0xe6, 0x32, 0x9d, 0x17, 0xc9, 0x48, 0x73, 0xff, 0xaf, 0x00, 0x8e, 0xd3, 0x32, 0x3d, 0x4d, + 0x33, 0x9c, 0xbe, 0x76, 0xe1, 0x29, 0x5e, 0x10, 0x3d, 0xae, 0x7f, 0xab, 0xba, 0xbd, 0x3f, 0x81, + 0xf5, 0xba, 0x3e, 0xe3, 0xc0, 0xd7, 0x33, 0xec, 0x53, 0xb0, 0x70, 0x98, 0x66, 0x4e, 0x7c, 0xc2, + 0xe5, 0x20, 0xee, 0x07, 0x41, 0xdf, 0xc5, 0x72, 0xbc, 0x43, 0x1f, 0x46, 0x3d, 0x97, 0xdb, 0xa4, + 0xb9, 0xff, 0x2d, 0x8e, 0xcd, 0x79, 0x95, 0x9c, 0x55, 0xf8, 0x76, 0x8a, 0xca, 0xe4, 0x42, 0x61, + 0x3c, 0xb1, 0x27, 0x62, 0x4b, 0xe4, 0xfe, 0x51, 0x10, 0x47, 0x21, 0xed, 0xdf, 0xed, 0xb2, 0xa6, + 0xef, 0x69, 0x40, 0x90, 0x06, 0xde, 0xf5, 0x27, 0x81, 0x78, 0x6b, 0x06, 0xe7, 0xd0, 0x1e, 0x04, + 0x81, 0x4b, 0x9a, 0xfb, 0xdf, 0x03, 0x19, 0x54, 0x93, 0x2c, 0x4c, 0xf2, 0xb4, 0x4a, 0x7f, 0x9c, + 0x8f, 0xbd, 0x2f, 0x06, 0xd2, 0x73, 0xe3, 0x90, 0xfa, 0x5c, 0xf2, 0x6f, 0xe7, 0x43, 0xf2, 0xe2, + 0xd6, 0x2f, 0x61, 0xf7, 0xbe, 0x88, 0xc3, 0x43, 0xda, 0x73, 0x99, 0x43, 0x1a, 0x18, 0x90, 0x90, + 0x7b, 0x43, 0x97, 0xc5, 0x47, 0x81, 0xf0, 0xa8, 0xd4, 0xe1, 0xd7, 0xee, 0x2f, 0xed, 0x9f, 0xc1, + 0x86, 0x5d, 0x4c, 0x26, 0x97, 0x95, 0xf2, 0x54, 0xf5, 0x5d, 0x31, 0xc6, 0xcd, 0xea, 0xc0, 0xc6, + 0x1e, 0x93, 0x83, 0xc0, 0xb9, 0xff, 0x9b, 0xa4, 0x23, 0xf8, 0x31, 0x02, 0xa9, 0x81, 0x0b, 0x8d, + 0x00, 0x2e, 0x4d, 0x4f, 0x3d, 0xa1, 0xee, 0x5b, 0xd3, 0xae, 0xd6, 0xa0, 0x6d, 0x8f, 0x6c, 0x17, + 0x17, 0xcb, 0xfb, 0xff, 0xd8, 0x80, 0x27, 0x0f, 0x8e, 0x97, 0x58, 0xc9, 0xe1, 0x5b, 0xee, 0xba, + 0x06, 0x77, 0x1a, 0xb0, 0xa3, 0x07, 0x5b, 0xfa, 0x67, 0xf0, 0xe4, 0x28, 0xf2, 0x1d, 0xea, 0x99, + 0xf1, 0x81, 0x62, 0xa5, 0x22, 0x2a, 0x6b, 0x68, 0x04, 0xc7, 0xa6, 0x89, 0x12, 0x58, 0xd7, 0x17, + 0x8f, 0xae, 0x5c, 0xc4, 0x06, 0xc6, 0x9d, 0x3a, 0xc7, 0x54, 0xf7, 0x7d, 0xfd, 0x1a, 0xd1, 0x17, + 0x81, 0x24, 0x2b, 0xfb, 0xe7, 0xd0, 0xae, 0x87, 0x42, 0x84, 0x5d, 0x10, 0x49, 0x3b, 0xf0, 0x1e, + 0xca, 0x5a, 0x10, 0x72, 0x7d, 0x17, 0x68, 0x77, 0x7d, 0x16, 0x49, 0xa1, 0x07, 0xa3, 0x75, 0x58, + 0xf5, 0x59, 0x5f, 0x83, 0x94, 0x34, 0xf5, 0xa5, 0x55, 0xef, 0xe0, 0x07, 0x32, 0xa6, 0xc7, 0x94, + 0xbb, 0x18, 0x7f, 0xb2, 0xdc, 0xbb, 0x82, 0x57, 0x67, 0xc5, 0xe4, 0x63, 0xc3, 0x69, 0x0f, 0xa7, + 0xc4, 0x49, 0x91, 0x0f, 0xf1, 0x85, 0x3c, 0x6c, 0x7c, 0x4b, 0x6b, 0xd9, 0x8b, 0x22, 0x4b, 0xf2, + 0x8b, 0x83, 0x62, 0x76, 0x71, 0x78, 0xa1, 0x72, 0xfd, 0x7e, 0x3e, 0x34, 0xac, 0x64, 0x9a, 0x96, + 0x0f, 0xfe, 0xd7, 0xe3, 0xd7, 0x66, 0xf9, 0xcf, 0x4b, 0x4d, 0x5b, 0x86, 0xa7, 0x2d, 0xad, 0xf3, + 0xc7, 0xff, 0x17, 0x00, 0x00, 0xff, 0xff, 0x66, 0xba, 0x70, 0xa9, 0x28, 0x19, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/cloud/talent/v4beta1/company.pb.go b/vendor/google.golang.org/genproto/googleapis/cloud/talent/v4beta1/company.pb.go new file mode 100644 index 0000000..24f1b0b --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/cloud/talent/v4beta1/company.pb.go @@ -0,0 +1,311 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/cloud/talent/v4beta1/company.proto + +package talent // import "google.golang.org/genproto/googleapis/cloud/talent/v4beta1" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// A Company resource represents a company in the service. A company is the +// entity that owns job postings, that is, the hiring entity responsible for +// employing applicants for the job position. +type Company struct { + // Required during company update. + // + // The resource name for a company. This is generated by the service when a + // company is created. + // + // The format is + // "projects/{project_id}/tenants/{tenant_id}/companies/{company_id}", for + // example, "projects/api-test-project/tenants/foo/companies/bar". + // + // Tenant id is optional and the default tenant is used if unspecified, for + // example, "projects/api-test-project/companies/bar". + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // Required. + // + // The display name of the company, for example, "Google, LLC". + DisplayName string `protobuf:"bytes,2,opt,name=display_name,json=displayName,proto3" json:"display_name,omitempty"` + // Required. + // + // Client side company identifier, used to uniquely identify the + // company. + // + // The maximum number of allowed characters is 255. + ExternalId string `protobuf:"bytes,3,opt,name=external_id,json=externalId,proto3" json:"external_id,omitempty"` + // Optional. + // + // The employer's company size. + Size CompanySize `protobuf:"varint,4,opt,name=size,proto3,enum=google.cloud.talent.v4beta1.CompanySize" json:"size,omitempty"` + // Optional. + // + // The street address of the company's main headquarters, which may be + // different from the job location. The service attempts + // to geolocate the provided address, and populates a more specific + // location wherever possible in [DerivedInfo.headquarters_location][google.cloud.talent.v4beta1.Company.DerivedInfo.headquarters_location]. + HeadquartersAddress string `protobuf:"bytes,5,opt,name=headquarters_address,json=headquartersAddress,proto3" json:"headquarters_address,omitempty"` + // Optional. + // + // Set to true if it is the hiring agency that post jobs for other + // employers. + // + // Defaults to false if not provided. + HiringAgency bool `protobuf:"varint,6,opt,name=hiring_agency,json=hiringAgency,proto3" json:"hiring_agency,omitempty"` + // Optional. + // + // Equal Employment Opportunity legal disclaimer text to be + // associated with all jobs, and typically to be displayed in all + // roles. + // + // The maximum number of allowed characters is 500. + EeoText string `protobuf:"bytes,7,opt,name=eeo_text,json=eeoText,proto3" json:"eeo_text,omitempty"` + // Optional. + // + // The URI representing the company's primary web site or home page, + // for example, "https://www.google.com". + // + // The maximum number of allowed characters is 255. + WebsiteUri string `protobuf:"bytes,8,opt,name=website_uri,json=websiteUri,proto3" json:"website_uri,omitempty"` + // Optional. + // + // The URI to employer's career site or careers page on the employer's web + // site, for example, "https://careers.google.com". + CareerSiteUri string `protobuf:"bytes,9,opt,name=career_site_uri,json=careerSiteUri,proto3" json:"career_site_uri,omitempty"` + // Optional. + // + // A URI that hosts the employer's company logo. + ImageUri string `protobuf:"bytes,10,opt,name=image_uri,json=imageUri,proto3" json:"image_uri,omitempty"` + // Optional. + // + // A list of keys of filterable [Job.custom_attributes][google.cloud.talent.v4beta1.Job.custom_attributes], whose + // corresponding `string_values` are used in keyword searches. Jobs with + // `string_values` under these specified field keys are returned if any + // of the values match the search keyword. Custom field values with + // parenthesis, brackets and special symbols are not searchable as-is, + // and those keyword queries must be surrounded by quotes. + KeywordSearchableJobCustomAttributes []string `protobuf:"bytes,11,rep,name=keyword_searchable_job_custom_attributes,json=keywordSearchableJobCustomAttributes,proto3" json:"keyword_searchable_job_custom_attributes,omitempty"` + // Output only. Derived details about the company. + DerivedInfo *Company_DerivedInfo `protobuf:"bytes,12,opt,name=derived_info,json=derivedInfo,proto3" json:"derived_info,omitempty"` + // Output only. Indicates whether a company is flagged to be suspended from + // public availability by the service when job content appears suspicious, + // abusive, or spammy. + Suspended bool `protobuf:"varint,13,opt,name=suspended,proto3" json:"suspended,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Company) Reset() { *m = Company{} } +func (m *Company) String() string { return proto.CompactTextString(m) } +func (*Company) ProtoMessage() {} +func (*Company) Descriptor() ([]byte, []int) { + return fileDescriptor_company_b5be25b03cde4ac9, []int{0} +} +func (m *Company) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Company.Unmarshal(m, b) +} +func (m *Company) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Company.Marshal(b, m, deterministic) +} +func (dst *Company) XXX_Merge(src proto.Message) { + xxx_messageInfo_Company.Merge(dst, src) +} +func (m *Company) XXX_Size() int { + return xxx_messageInfo_Company.Size(m) +} +func (m *Company) XXX_DiscardUnknown() { + xxx_messageInfo_Company.DiscardUnknown(m) +} + +var xxx_messageInfo_Company proto.InternalMessageInfo + +func (m *Company) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *Company) GetDisplayName() string { + if m != nil { + return m.DisplayName + } + return "" +} + +func (m *Company) GetExternalId() string { + if m != nil { + return m.ExternalId + } + return "" +} + +func (m *Company) GetSize() CompanySize { + if m != nil { + return m.Size + } + return CompanySize_COMPANY_SIZE_UNSPECIFIED +} + +func (m *Company) GetHeadquartersAddress() string { + if m != nil { + return m.HeadquartersAddress + } + return "" +} + +func (m *Company) GetHiringAgency() bool { + if m != nil { + return m.HiringAgency + } + return false +} + +func (m *Company) GetEeoText() string { + if m != nil { + return m.EeoText + } + return "" +} + +func (m *Company) GetWebsiteUri() string { + if m != nil { + return m.WebsiteUri + } + return "" +} + +func (m *Company) GetCareerSiteUri() string { + if m != nil { + return m.CareerSiteUri + } + return "" +} + +func (m *Company) GetImageUri() string { + if m != nil { + return m.ImageUri + } + return "" +} + +func (m *Company) GetKeywordSearchableJobCustomAttributes() []string { + if m != nil { + return m.KeywordSearchableJobCustomAttributes + } + return nil +} + +func (m *Company) GetDerivedInfo() *Company_DerivedInfo { + if m != nil { + return m.DerivedInfo + } + return nil +} + +func (m *Company) GetSuspended() bool { + if m != nil { + return m.Suspended + } + return false +} + +// Derived details about the company. +type Company_DerivedInfo struct { + // A structured headquarters location of the company, resolved from + // [Company.hq_location][] if provided. + HeadquartersLocation *Location `protobuf:"bytes,1,opt,name=headquarters_location,json=headquartersLocation,proto3" json:"headquarters_location,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Company_DerivedInfo) Reset() { *m = Company_DerivedInfo{} } +func (m *Company_DerivedInfo) String() string { return proto.CompactTextString(m) } +func (*Company_DerivedInfo) ProtoMessage() {} +func (*Company_DerivedInfo) Descriptor() ([]byte, []int) { + return fileDescriptor_company_b5be25b03cde4ac9, []int{0, 0} +} +func (m *Company_DerivedInfo) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Company_DerivedInfo.Unmarshal(m, b) +} +func (m *Company_DerivedInfo) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Company_DerivedInfo.Marshal(b, m, deterministic) +} +func (dst *Company_DerivedInfo) XXX_Merge(src proto.Message) { + xxx_messageInfo_Company_DerivedInfo.Merge(dst, src) +} +func (m *Company_DerivedInfo) XXX_Size() int { + return xxx_messageInfo_Company_DerivedInfo.Size(m) +} +func (m *Company_DerivedInfo) XXX_DiscardUnknown() { + xxx_messageInfo_Company_DerivedInfo.DiscardUnknown(m) +} + +var xxx_messageInfo_Company_DerivedInfo proto.InternalMessageInfo + +func (m *Company_DerivedInfo) GetHeadquartersLocation() *Location { + if m != nil { + return m.HeadquartersLocation + } + return nil +} + +func init() { + proto.RegisterType((*Company)(nil), "google.cloud.talent.v4beta1.Company") + proto.RegisterType((*Company_DerivedInfo)(nil), "google.cloud.talent.v4beta1.Company.DerivedInfo") +} + +func init() { + proto.RegisterFile("google/cloud/talent/v4beta1/company.proto", fileDescriptor_company_b5be25b03cde4ac9) +} + +var fileDescriptor_company_b5be25b03cde4ac9 = []byte{ + // 526 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x8c, 0x93, 0x4f, 0x6f, 0xd3, 0x30, + 0x18, 0xc6, 0x95, 0xb5, 0x5b, 0x5b, 0xa7, 0x05, 0xc9, 0x0c, 0x29, 0x74, 0x93, 0x56, 0xfe, 0x2a, + 0x5c, 0x12, 0x56, 0xb8, 0xc1, 0xa5, 0x2b, 0x97, 0x21, 0x84, 0xa6, 0x74, 0x70, 0xd8, 0xc5, 0x72, + 0xe2, 0x77, 0xa9, 0x21, 0xb1, 0x83, 0xed, 0x6c, 0xed, 0x8e, 0x7c, 0x14, 0x3e, 0x00, 0x9f, 0x11, + 0xd5, 0xce, 0xba, 0x4d, 0x42, 0x65, 0xb7, 0xe4, 0x79, 0x7e, 0xcf, 0x6b, 0xbf, 0xf6, 0x6b, 0xf4, + 0x3a, 0x97, 0x32, 0x2f, 0x20, 0xce, 0x0a, 0x59, 0xb3, 0xd8, 0xd0, 0x02, 0x84, 0x89, 0x2f, 0xde, + 0xa5, 0x60, 0xe8, 0x61, 0x9c, 0xc9, 0xb2, 0xa2, 0x62, 0x19, 0x55, 0x4a, 0x1a, 0x89, 0xf7, 0x1c, + 0x1a, 0x59, 0x34, 0x72, 0x68, 0xd4, 0xa0, 0xc3, 0xf0, 0x3f, 0x75, 0x4a, 0x29, 0x5c, 0x99, 0xe1, + 0x7e, 0x43, 0xd2, 0x8a, 0xc7, 0x54, 0x08, 0x69, 0xa8, 0xe1, 0x52, 0x68, 0xe7, 0x3e, 0xfb, 0xb3, + 0x8d, 0x3a, 0x53, 0xb7, 0x2c, 0xc6, 0xa8, 0x2d, 0x68, 0x09, 0x81, 0x37, 0xf2, 0xc2, 0x5e, 0x62, + 0xbf, 0xf1, 0x53, 0xd4, 0x67, 0x5c, 0x57, 0x05, 0x5d, 0x12, 0xeb, 0x6d, 0x59, 0xcf, 0x6f, 0xb4, + 0x2f, 0x2b, 0xe4, 0x00, 0xf9, 0xb0, 0x30, 0xa0, 0x04, 0x2d, 0x08, 0x67, 0x41, 0xcb, 0x12, 0xe8, + 0x5a, 0x3a, 0x66, 0xf8, 0x03, 0x6a, 0x6b, 0x7e, 0x05, 0x41, 0x7b, 0xe4, 0x85, 0x0f, 0xc6, 0x61, + 0xb4, 0xa1, 0xaf, 0xa8, 0xd9, 0xcb, 0x8c, 0x5f, 0x41, 0x62, 0x53, 0xf8, 0x10, 0xed, 0xce, 0x81, + 0xb2, 0x9f, 0x35, 0x55, 0x06, 0x94, 0x26, 0x94, 0x31, 0x05, 0x5a, 0x07, 0xdb, 0x76, 0x9d, 0x47, + 0xb7, 0xbd, 0x89, 0xb3, 0xf0, 0x73, 0x34, 0x98, 0x73, 0xc5, 0x45, 0x4e, 0x68, 0x0e, 0x22, 0x5b, + 0x06, 0x3b, 0x23, 0x2f, 0xec, 0x26, 0x7d, 0x27, 0x4e, 0xac, 0x86, 0x9f, 0xa0, 0x2e, 0x80, 0x24, + 0x06, 0x16, 0x26, 0xe8, 0xd8, 0x5a, 0x1d, 0x00, 0x79, 0x0a, 0x0b, 0xb3, 0xea, 0xe8, 0x12, 0x52, + 0xcd, 0x0d, 0x90, 0x5a, 0xf1, 0xa0, 0xeb, 0x3a, 0x6a, 0xa4, 0xaf, 0x8a, 0xe3, 0x57, 0xe8, 0x61, + 0x46, 0x15, 0x80, 0x22, 0x6b, 0xa8, 0x67, 0xa1, 0x81, 0x93, 0x67, 0x0d, 0xb7, 0x87, 0x7a, 0xbc, + 0xa4, 0xb9, 0x23, 0x90, 0x25, 0xba, 0x56, 0x58, 0x99, 0xdf, 0x50, 0xf8, 0x03, 0x96, 0x97, 0x52, + 0x31, 0xa2, 0x81, 0xaa, 0x6c, 0x4e, 0xd3, 0x02, 0xc8, 0x77, 0x99, 0x92, 0xac, 0xd6, 0x46, 0x96, + 0x84, 0x1a, 0xa3, 0x78, 0x5a, 0x1b, 0xd0, 0x81, 0x3f, 0x6a, 0x85, 0xbd, 0xe4, 0x45, 0xc3, 0xcf, + 0xd6, 0xf8, 0x27, 0x99, 0x4e, 0x2d, 0x3c, 0x59, 0xb3, 0x78, 0x86, 0xfa, 0x0c, 0x14, 0xbf, 0x00, + 0x46, 0xb8, 0x38, 0x97, 0x41, 0x7f, 0xe4, 0x85, 0xfe, 0xf8, 0xcd, 0x7d, 0x8e, 0x3d, 0xfa, 0xe8, + 0x82, 0xc7, 0xe2, 0x5c, 0x26, 0x3e, 0xbb, 0xf9, 0xc1, 0xfb, 0xa8, 0xa7, 0x6b, 0x5d, 0x81, 0x60, + 0xc0, 0x82, 0x81, 0x3d, 0xce, 0x1b, 0x61, 0xc8, 0x91, 0x7f, 0x2b, 0x89, 0xcf, 0xd0, 0xe3, 0x3b, + 0x57, 0x56, 0xc8, 0xcc, 0x0e, 0x9d, 0x9d, 0x2c, 0x7f, 0xfc, 0x72, 0xe3, 0x56, 0x3e, 0x37, 0x70, + 0x72, 0xe7, 0xda, 0xaf, 0xd5, 0xa3, 0x5f, 0x1e, 0x3a, 0xc8, 0x64, 0xb9, 0xa9, 0xc4, 0xd1, 0x6e, + 0xd3, 0x4e, 0x02, 0x5a, 0xd6, 0x2a, 0x83, 0x93, 0xd5, 0xa8, 0x9f, 0x78, 0x67, 0x93, 0x26, 0x94, + 0xcb, 0x82, 0x8a, 0x3c, 0x92, 0x2a, 0x8f, 0x73, 0x10, 0xf6, 0x21, 0xc4, 0xce, 0xa2, 0x15, 0xd7, + 0xff, 0x7c, 0x53, 0xef, 0xdd, 0xef, 0xef, 0xad, 0xd6, 0xf4, 0x74, 0x96, 0xee, 0xd8, 0xcc, 0xdb, + 0xbf, 0x01, 0x00, 0x00, 0xff, 0xff, 0x21, 0xcc, 0xa8, 0x22, 0xce, 0x03, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/cloud/talent/v4beta1/company_service.pb.go b/vendor/google.golang.org/genproto/googleapis/cloud/talent/v4beta1/company_service.pb.go new file mode 100644 index 0000000..e67cdf3 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/cloud/talent/v4beta1/company_service.pb.go @@ -0,0 +1,668 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/cloud/talent/v4beta1/company_service.proto + +package talent // import "google.golang.org/genproto/googleapis/cloud/talent/v4beta1" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import empty "github.com/golang/protobuf/ptypes/empty" +import _ "google.golang.org/genproto/googleapis/api/annotations" +import field_mask "google.golang.org/genproto/protobuf/field_mask" + +import ( + context "golang.org/x/net/context" + grpc "google.golang.org/grpc" +) + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// The Request of the CreateCompany method. +type CreateCompanyRequest struct { + // Required. + // + // Resource name of the tenant under which the company is created. + // + // The format is "projects/{project_id}/tenants/{tenant_id}", for example, + // "projects/api-test-project/tenant/foo". + // + // Tenant id is optional and a default tenant is created if unspecified, for + // example, "projects/api-test-project". + Parent string `protobuf:"bytes,1,opt,name=parent,proto3" json:"parent,omitempty"` + // Required. + // + // The company to be created. + Company *Company `protobuf:"bytes,2,opt,name=company,proto3" json:"company,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *CreateCompanyRequest) Reset() { *m = CreateCompanyRequest{} } +func (m *CreateCompanyRequest) String() string { return proto.CompactTextString(m) } +func (*CreateCompanyRequest) ProtoMessage() {} +func (*CreateCompanyRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_company_service_29fd10eb96c7a7f1, []int{0} +} +func (m *CreateCompanyRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_CreateCompanyRequest.Unmarshal(m, b) +} +func (m *CreateCompanyRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_CreateCompanyRequest.Marshal(b, m, deterministic) +} +func (dst *CreateCompanyRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_CreateCompanyRequest.Merge(dst, src) +} +func (m *CreateCompanyRequest) XXX_Size() int { + return xxx_messageInfo_CreateCompanyRequest.Size(m) +} +func (m *CreateCompanyRequest) XXX_DiscardUnknown() { + xxx_messageInfo_CreateCompanyRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_CreateCompanyRequest proto.InternalMessageInfo + +func (m *CreateCompanyRequest) GetParent() string { + if m != nil { + return m.Parent + } + return "" +} + +func (m *CreateCompanyRequest) GetCompany() *Company { + if m != nil { + return m.Company + } + return nil +} + +// Request for getting a company by name. +type GetCompanyRequest struct { + // Required. + // + // The resource name of the company to be retrieved. + // + // The format is + // "projects/{project_id}/tenants/{tenant_id}/companies/{company_id}", for + // example, "projects/api-test-project/tenants/foo/companies/bar". + // + // Tenant id is optional and the default tenant is used if unspecified, for + // example, "projects/api-test-project/companies/bar". + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GetCompanyRequest) Reset() { *m = GetCompanyRequest{} } +func (m *GetCompanyRequest) String() string { return proto.CompactTextString(m) } +func (*GetCompanyRequest) ProtoMessage() {} +func (*GetCompanyRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_company_service_29fd10eb96c7a7f1, []int{1} +} +func (m *GetCompanyRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GetCompanyRequest.Unmarshal(m, b) +} +func (m *GetCompanyRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GetCompanyRequest.Marshal(b, m, deterministic) +} +func (dst *GetCompanyRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_GetCompanyRequest.Merge(dst, src) +} +func (m *GetCompanyRequest) XXX_Size() int { + return xxx_messageInfo_GetCompanyRequest.Size(m) +} +func (m *GetCompanyRequest) XXX_DiscardUnknown() { + xxx_messageInfo_GetCompanyRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_GetCompanyRequest proto.InternalMessageInfo + +func (m *GetCompanyRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +// Request for updating a specified company. +type UpdateCompanyRequest struct { + // Required. + // + // The company resource to replace the current resource in the system. + Company *Company `protobuf:"bytes,1,opt,name=company,proto3" json:"company,omitempty"` + // Optional but strongly recommended for the best service + // experience. + // + // If [update_mask][google.cloud.talent.v4beta1.UpdateCompanyRequest.update_mask] is provided, only the specified fields in + // [company][google.cloud.talent.v4beta1.UpdateCompanyRequest.company] are updated. Otherwise all the fields are updated. + // + // A field mask to specify the company fields to be updated. Only + // top level fields of [Company][google.cloud.talent.v4beta1.Company] are supported. + UpdateMask *field_mask.FieldMask `protobuf:"bytes,2,opt,name=update_mask,json=updateMask,proto3" json:"update_mask,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *UpdateCompanyRequest) Reset() { *m = UpdateCompanyRequest{} } +func (m *UpdateCompanyRequest) String() string { return proto.CompactTextString(m) } +func (*UpdateCompanyRequest) ProtoMessage() {} +func (*UpdateCompanyRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_company_service_29fd10eb96c7a7f1, []int{2} +} +func (m *UpdateCompanyRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_UpdateCompanyRequest.Unmarshal(m, b) +} +func (m *UpdateCompanyRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_UpdateCompanyRequest.Marshal(b, m, deterministic) +} +func (dst *UpdateCompanyRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_UpdateCompanyRequest.Merge(dst, src) +} +func (m *UpdateCompanyRequest) XXX_Size() int { + return xxx_messageInfo_UpdateCompanyRequest.Size(m) +} +func (m *UpdateCompanyRequest) XXX_DiscardUnknown() { + xxx_messageInfo_UpdateCompanyRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_UpdateCompanyRequest proto.InternalMessageInfo + +func (m *UpdateCompanyRequest) GetCompany() *Company { + if m != nil { + return m.Company + } + return nil +} + +func (m *UpdateCompanyRequest) GetUpdateMask() *field_mask.FieldMask { + if m != nil { + return m.UpdateMask + } + return nil +} + +// Request to delete a company. +type DeleteCompanyRequest struct { + // Required. + // + // The resource name of the company to be deleted. + // + // The format is + // "projects/{project_id}/tenants/{tenant_id}/companies/{company_id}", for + // example, "projects/api-test-project/tenants/foo/companies/bar". + // + // Tenant id is optional and the default tenant is used if unspecified, for + // example, "projects/api-test-project/companies/bar". + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *DeleteCompanyRequest) Reset() { *m = DeleteCompanyRequest{} } +func (m *DeleteCompanyRequest) String() string { return proto.CompactTextString(m) } +func (*DeleteCompanyRequest) ProtoMessage() {} +func (*DeleteCompanyRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_company_service_29fd10eb96c7a7f1, []int{3} +} +func (m *DeleteCompanyRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_DeleteCompanyRequest.Unmarshal(m, b) +} +func (m *DeleteCompanyRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_DeleteCompanyRequest.Marshal(b, m, deterministic) +} +func (dst *DeleteCompanyRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_DeleteCompanyRequest.Merge(dst, src) +} +func (m *DeleteCompanyRequest) XXX_Size() int { + return xxx_messageInfo_DeleteCompanyRequest.Size(m) +} +func (m *DeleteCompanyRequest) XXX_DiscardUnknown() { + xxx_messageInfo_DeleteCompanyRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_DeleteCompanyRequest proto.InternalMessageInfo + +func (m *DeleteCompanyRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +// List companies for which the client has ACL visibility. +type ListCompaniesRequest struct { + // Required. + // + // Resource name of the tenant under which the company is created. + // + // The format is "projects/{project_id}/tenants/{tenant_id}", for example, + // "projects/api-test-project/tenant/foo". + // + // Tenant id is optional and the default tenant is used if unspecified, for + // example, "projects/api-test-project". + Parent string `protobuf:"bytes,1,opt,name=parent,proto3" json:"parent,omitempty"` + // Optional. + // + // The starting indicator from which to return results. + PageToken string `protobuf:"bytes,2,opt,name=page_token,json=pageToken,proto3" json:"page_token,omitempty"` + // Optional. + // + // The maximum number of companies to be returned, at most 100. + // Default is 100 if a non-positive number is provided. + PageSize int32 `protobuf:"varint,3,opt,name=page_size,json=pageSize,proto3" json:"page_size,omitempty"` + // Optional. + // + // Set to true if the companies requested must have open jobs. + // + // Defaults to false. + // + // If true, at most [page_size][google.cloud.talent.v4beta1.ListCompaniesRequest.page_size] of companies are fetched, among which + // only those with open jobs are returned. + RequireOpenJobs bool `protobuf:"varint,4,opt,name=require_open_jobs,json=requireOpenJobs,proto3" json:"require_open_jobs,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ListCompaniesRequest) Reset() { *m = ListCompaniesRequest{} } +func (m *ListCompaniesRequest) String() string { return proto.CompactTextString(m) } +func (*ListCompaniesRequest) ProtoMessage() {} +func (*ListCompaniesRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_company_service_29fd10eb96c7a7f1, []int{4} +} +func (m *ListCompaniesRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ListCompaniesRequest.Unmarshal(m, b) +} +func (m *ListCompaniesRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ListCompaniesRequest.Marshal(b, m, deterministic) +} +func (dst *ListCompaniesRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_ListCompaniesRequest.Merge(dst, src) +} +func (m *ListCompaniesRequest) XXX_Size() int { + return xxx_messageInfo_ListCompaniesRequest.Size(m) +} +func (m *ListCompaniesRequest) XXX_DiscardUnknown() { + xxx_messageInfo_ListCompaniesRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_ListCompaniesRequest proto.InternalMessageInfo + +func (m *ListCompaniesRequest) GetParent() string { + if m != nil { + return m.Parent + } + return "" +} + +func (m *ListCompaniesRequest) GetPageToken() string { + if m != nil { + return m.PageToken + } + return "" +} + +func (m *ListCompaniesRequest) GetPageSize() int32 { + if m != nil { + return m.PageSize + } + return 0 +} + +func (m *ListCompaniesRequest) GetRequireOpenJobs() bool { + if m != nil { + return m.RequireOpenJobs + } + return false +} + +// Output only. +// +// The List companies response object. +type ListCompaniesResponse struct { + // Companies for the current client. + Companies []*Company `protobuf:"bytes,1,rep,name=companies,proto3" json:"companies,omitempty"` + // A token to retrieve the next page of results. + NextPageToken string `protobuf:"bytes,2,opt,name=next_page_token,json=nextPageToken,proto3" json:"next_page_token,omitempty"` + // Additional information for the API invocation, such as the request + // tracking id. + Metadata *ResponseMetadata `protobuf:"bytes,3,opt,name=metadata,proto3" json:"metadata,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ListCompaniesResponse) Reset() { *m = ListCompaniesResponse{} } +func (m *ListCompaniesResponse) String() string { return proto.CompactTextString(m) } +func (*ListCompaniesResponse) ProtoMessage() {} +func (*ListCompaniesResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_company_service_29fd10eb96c7a7f1, []int{5} +} +func (m *ListCompaniesResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ListCompaniesResponse.Unmarshal(m, b) +} +func (m *ListCompaniesResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ListCompaniesResponse.Marshal(b, m, deterministic) +} +func (dst *ListCompaniesResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_ListCompaniesResponse.Merge(dst, src) +} +func (m *ListCompaniesResponse) XXX_Size() int { + return xxx_messageInfo_ListCompaniesResponse.Size(m) +} +func (m *ListCompaniesResponse) XXX_DiscardUnknown() { + xxx_messageInfo_ListCompaniesResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_ListCompaniesResponse proto.InternalMessageInfo + +func (m *ListCompaniesResponse) GetCompanies() []*Company { + if m != nil { + return m.Companies + } + return nil +} + +func (m *ListCompaniesResponse) GetNextPageToken() string { + if m != nil { + return m.NextPageToken + } + return "" +} + +func (m *ListCompaniesResponse) GetMetadata() *ResponseMetadata { + if m != nil { + return m.Metadata + } + return nil +} + +func init() { + proto.RegisterType((*CreateCompanyRequest)(nil), "google.cloud.talent.v4beta1.CreateCompanyRequest") + proto.RegisterType((*GetCompanyRequest)(nil), "google.cloud.talent.v4beta1.GetCompanyRequest") + proto.RegisterType((*UpdateCompanyRequest)(nil), "google.cloud.talent.v4beta1.UpdateCompanyRequest") + proto.RegisterType((*DeleteCompanyRequest)(nil), "google.cloud.talent.v4beta1.DeleteCompanyRequest") + proto.RegisterType((*ListCompaniesRequest)(nil), "google.cloud.talent.v4beta1.ListCompaniesRequest") + proto.RegisterType((*ListCompaniesResponse)(nil), "google.cloud.talent.v4beta1.ListCompaniesResponse") +} + +// Reference imports to suppress errors if they are not otherwise used. +var _ context.Context +var _ grpc.ClientConn + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the grpc package it is being compiled against. +const _ = grpc.SupportPackageIsVersion4 + +// CompanyServiceClient is the client API for CompanyService service. +// +// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://godoc.org/google.golang.org/grpc#ClientConn.NewStream. +type CompanyServiceClient interface { + // Creates a new company entity. + CreateCompany(ctx context.Context, in *CreateCompanyRequest, opts ...grpc.CallOption) (*Company, error) + // Retrieves specified company. + GetCompany(ctx context.Context, in *GetCompanyRequest, opts ...grpc.CallOption) (*Company, error) + // Updates specified company. + UpdateCompany(ctx context.Context, in *UpdateCompanyRequest, opts ...grpc.CallOption) (*Company, error) + // Deletes specified company. + // Prerequisite: The company has no jobs associated with it. + DeleteCompany(ctx context.Context, in *DeleteCompanyRequest, opts ...grpc.CallOption) (*empty.Empty, error) + // Lists all companies associated with the project. + ListCompanies(ctx context.Context, in *ListCompaniesRequest, opts ...grpc.CallOption) (*ListCompaniesResponse, error) +} + +type companyServiceClient struct { + cc *grpc.ClientConn +} + +func NewCompanyServiceClient(cc *grpc.ClientConn) CompanyServiceClient { + return &companyServiceClient{cc} +} + +func (c *companyServiceClient) CreateCompany(ctx context.Context, in *CreateCompanyRequest, opts ...grpc.CallOption) (*Company, error) { + out := new(Company) + err := c.cc.Invoke(ctx, "/google.cloud.talent.v4beta1.CompanyService/CreateCompany", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *companyServiceClient) GetCompany(ctx context.Context, in *GetCompanyRequest, opts ...grpc.CallOption) (*Company, error) { + out := new(Company) + err := c.cc.Invoke(ctx, "/google.cloud.talent.v4beta1.CompanyService/GetCompany", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *companyServiceClient) UpdateCompany(ctx context.Context, in *UpdateCompanyRequest, opts ...grpc.CallOption) (*Company, error) { + out := new(Company) + err := c.cc.Invoke(ctx, "/google.cloud.talent.v4beta1.CompanyService/UpdateCompany", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *companyServiceClient) DeleteCompany(ctx context.Context, in *DeleteCompanyRequest, opts ...grpc.CallOption) (*empty.Empty, error) { + out := new(empty.Empty) + err := c.cc.Invoke(ctx, "/google.cloud.talent.v4beta1.CompanyService/DeleteCompany", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *companyServiceClient) ListCompanies(ctx context.Context, in *ListCompaniesRequest, opts ...grpc.CallOption) (*ListCompaniesResponse, error) { + out := new(ListCompaniesResponse) + err := c.cc.Invoke(ctx, "/google.cloud.talent.v4beta1.CompanyService/ListCompanies", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +// CompanyServiceServer is the server API for CompanyService service. +type CompanyServiceServer interface { + // Creates a new company entity. + CreateCompany(context.Context, *CreateCompanyRequest) (*Company, error) + // Retrieves specified company. + GetCompany(context.Context, *GetCompanyRequest) (*Company, error) + // Updates specified company. + UpdateCompany(context.Context, *UpdateCompanyRequest) (*Company, error) + // Deletes specified company. + // Prerequisite: The company has no jobs associated with it. + DeleteCompany(context.Context, *DeleteCompanyRequest) (*empty.Empty, error) + // Lists all companies associated with the project. + ListCompanies(context.Context, *ListCompaniesRequest) (*ListCompaniesResponse, error) +} + +func RegisterCompanyServiceServer(s *grpc.Server, srv CompanyServiceServer) { + s.RegisterService(&_CompanyService_serviceDesc, srv) +} + +func _CompanyService_CreateCompany_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(CreateCompanyRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(CompanyServiceServer).CreateCompany(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.talent.v4beta1.CompanyService/CreateCompany", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(CompanyServiceServer).CreateCompany(ctx, req.(*CreateCompanyRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _CompanyService_GetCompany_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(GetCompanyRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(CompanyServiceServer).GetCompany(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.talent.v4beta1.CompanyService/GetCompany", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(CompanyServiceServer).GetCompany(ctx, req.(*GetCompanyRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _CompanyService_UpdateCompany_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(UpdateCompanyRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(CompanyServiceServer).UpdateCompany(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.talent.v4beta1.CompanyService/UpdateCompany", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(CompanyServiceServer).UpdateCompany(ctx, req.(*UpdateCompanyRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _CompanyService_DeleteCompany_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(DeleteCompanyRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(CompanyServiceServer).DeleteCompany(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.talent.v4beta1.CompanyService/DeleteCompany", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(CompanyServiceServer).DeleteCompany(ctx, req.(*DeleteCompanyRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _CompanyService_ListCompanies_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(ListCompaniesRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(CompanyServiceServer).ListCompanies(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.talent.v4beta1.CompanyService/ListCompanies", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(CompanyServiceServer).ListCompanies(ctx, req.(*ListCompaniesRequest)) + } + return interceptor(ctx, in, info, handler) +} + +var _CompanyService_serviceDesc = grpc.ServiceDesc{ + ServiceName: "google.cloud.talent.v4beta1.CompanyService", + HandlerType: (*CompanyServiceServer)(nil), + Methods: []grpc.MethodDesc{ + { + MethodName: "CreateCompany", + Handler: _CompanyService_CreateCompany_Handler, + }, + { + MethodName: "GetCompany", + Handler: _CompanyService_GetCompany_Handler, + }, + { + MethodName: "UpdateCompany", + Handler: _CompanyService_UpdateCompany_Handler, + }, + { + MethodName: "DeleteCompany", + Handler: _CompanyService_DeleteCompany_Handler, + }, + { + MethodName: "ListCompanies", + Handler: _CompanyService_ListCompanies_Handler, + }, + }, + Streams: []grpc.StreamDesc{}, + Metadata: "google/cloud/talent/v4beta1/company_service.proto", +} + +func init() { + proto.RegisterFile("google/cloud/talent/v4beta1/company_service.proto", fileDescriptor_company_service_29fd10eb96c7a7f1) +} + +var fileDescriptor_company_service_29fd10eb96c7a7f1 = []byte{ + // 720 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x9c, 0x55, 0x4f, 0x6f, 0xd3, 0x4a, + 0x10, 0xd7, 0xb6, 0x7d, 0x7d, 0xc9, 0x56, 0x79, 0x55, 0xf7, 0xe5, 0x55, 0x51, 0xfa, 0x10, 0x91, + 0x85, 0x4a, 0x08, 0xc2, 0xa6, 0x29, 0x07, 0x44, 0x05, 0x12, 0x2d, 0x7f, 0x04, 0xa2, 0xa2, 0x72, + 0xcb, 0xa5, 0x42, 0xb2, 0xd6, 0xc9, 0xd4, 0xb8, 0x8d, 0x77, 0x5d, 0xef, 0xa6, 0x2a, 0x45, 0x3d, + 0xc0, 0x57, 0xa8, 0xc4, 0x07, 0xe0, 0x1b, 0xf0, 0x05, 0x38, 0x72, 0xe4, 0x00, 0x37, 0x8e, 0x88, + 0x0f, 0x82, 0x6c, 0xaf, 0x93, 0x3a, 0x31, 0x8e, 0xe9, 0xcd, 0x3b, 0x3b, 0xbf, 0x99, 0xdf, 0x6f, + 0x66, 0x67, 0x8c, 0x57, 0x1c, 0xce, 0x9d, 0x1e, 0x18, 0x9d, 0x1e, 0xef, 0x77, 0x0d, 0x49, 0x7b, + 0xc0, 0xa4, 0x71, 0x74, 0xcb, 0x06, 0x49, 0x57, 0x8c, 0x0e, 0xf7, 0x7c, 0xca, 0x5e, 0x5b, 0x02, + 0x82, 0x23, 0xb7, 0x03, 0xba, 0x1f, 0x70, 0xc9, 0xc9, 0x52, 0x0c, 0xd1, 0x23, 0x88, 0x1e, 0x43, + 0x74, 0x05, 0xa9, 0xff, 0xaf, 0xe2, 0x51, 0xdf, 0x35, 0x28, 0x63, 0x5c, 0x52, 0xe9, 0x72, 0x26, + 0x62, 0x68, 0xbd, 0x39, 0x21, 0x9b, 0xc7, 0x99, 0xf2, 0xbc, 0x56, 0x80, 0x97, 0x72, 0x55, 0x7c, + 0x8c, 0xe8, 0x64, 0xf7, 0xf7, 0x0c, 0xf0, 0x7c, 0x99, 0x5c, 0x36, 0x46, 0x2f, 0xf7, 0x5c, 0xe8, + 0x75, 0x2d, 0x8f, 0x8a, 0x83, 0xd8, 0x43, 0x63, 0xb8, 0xba, 0x11, 0x00, 0x95, 0xb0, 0x11, 0x47, + 0x35, 0xe1, 0xb0, 0x0f, 0x42, 0x92, 0x45, 0x3c, 0xeb, 0xd3, 0x00, 0x98, 0xac, 0xa1, 0x06, 0x6a, + 0x96, 0x4d, 0x75, 0x22, 0xf7, 0xf0, 0xdf, 0x2a, 0x7f, 0x6d, 0xaa, 0x81, 0x9a, 0x73, 0xed, 0x2b, + 0x7a, 0x4e, 0x41, 0xf4, 0x24, 0x6a, 0x02, 0xd2, 0xae, 0xe2, 0x85, 0xc7, 0x20, 0x47, 0x92, 0x11, + 0x3c, 0xc3, 0xa8, 0x07, 0x2a, 0x55, 0xf4, 0xad, 0x9d, 0x21, 0x5c, 0x7d, 0xe1, 0x77, 0xc7, 0x99, + 0x9d, 0x63, 0x80, 0x2e, 0xc0, 0x80, 0xac, 0xe1, 0xb9, 0x7e, 0x14, 0x37, 0x2a, 0x83, 0x52, 0x51, + 0x4f, 0x62, 0x24, 0x95, 0xd2, 0x1f, 0x85, 0x95, 0xda, 0xa4, 0xe2, 0xc0, 0xc4, 0xb1, 0x7b, 0xf8, + 0xad, 0xb5, 0x70, 0xf5, 0x01, 0xf4, 0x60, 0x8c, 0x54, 0x96, 0x82, 0xf7, 0x08, 0x57, 0x9f, 0xb9, + 0x42, 0x89, 0x75, 0x41, 0x4c, 0xaa, 0xed, 0x25, 0x8c, 0x7d, 0xea, 0x80, 0x25, 0xf9, 0x01, 0xb0, + 0x88, 0x58, 0xd9, 0x2c, 0x87, 0x96, 0x9d, 0xd0, 0x40, 0x96, 0x70, 0x74, 0xb0, 0x84, 0x7b, 0x02, + 0xb5, 0xe9, 0x06, 0x6a, 0xfe, 0x65, 0x96, 0x42, 0xc3, 0xb6, 0x7b, 0x02, 0xa4, 0x85, 0x17, 0x02, + 0x38, 0xec, 0xbb, 0x01, 0x58, 0xdc, 0x07, 0x66, 0xed, 0x73, 0x5b, 0xd4, 0x66, 0x1a, 0xa8, 0x59, + 0x32, 0xe7, 0xd5, 0xc5, 0x73, 0x1f, 0xd8, 0x53, 0x6e, 0x0b, 0xed, 0x0b, 0xc2, 0xff, 0x8d, 0x10, + 0x13, 0x3e, 0x67, 0x02, 0xc8, 0x3a, 0x2e, 0x77, 0x12, 0x63, 0x0d, 0x35, 0xa6, 0x0b, 0x57, 0x77, + 0x08, 0x23, 0xcb, 0x78, 0x9e, 0xc1, 0xb1, 0xb4, 0xc6, 0xa4, 0x54, 0x42, 0xf3, 0xd6, 0x40, 0xce, + 0x13, 0x5c, 0xf2, 0x40, 0xd2, 0x2e, 0x95, 0x34, 0x52, 0x33, 0xd7, 0xbe, 0x91, 0x9b, 0x2a, 0x21, + 0xb9, 0xa9, 0x40, 0xe6, 0x00, 0xde, 0xfe, 0x58, 0xc2, 0xff, 0x28, 0x26, 0xdb, 0xf1, 0xb0, 0x92, + 0xaf, 0x08, 0x57, 0x52, 0x0f, 0x9b, 0xac, 0xe4, 0x0b, 0xc9, 0x18, 0x82, 0x7a, 0x21, 0xed, 0xda, + 0xab, 0x77, 0xdf, 0x7e, 0x9e, 0x4d, 0xd9, 0xda, 0xcd, 0xc1, 0x84, 0xbe, 0x89, 0x1b, 0x7a, 0xd7, + 0x0f, 0xf8, 0x3e, 0x74, 0xa4, 0x30, 0x5a, 0x86, 0x04, 0x46, 0x59, 0xf8, 0x75, 0x6a, 0x0c, 0x4a, + 0x75, 0x07, 0xb5, 0x76, 0xaf, 0x6b, 0xcb, 0x39, 0xb0, 0xb4, 0x33, 0xf9, 0x8c, 0x30, 0x1e, 0x4e, + 0x0f, 0xd1, 0x73, 0xe9, 0x8d, 0x8d, 0x59, 0x41, 0x39, 0x76, 0x24, 0xe7, 0x25, 0x39, 0x27, 0x27, + 0x7c, 0xce, 0x99, 0x62, 0x86, 0xf4, 0x8c, 0xd6, 0xe9, 0x6e, 0x93, 0x2c, 0xff, 0x1e, 0x73, 0xde, + 0x93, 0xfc, 0x40, 0xb8, 0x92, 0x1a, 0xee, 0x09, 0xdd, 0xc9, 0x5a, 0x04, 0x05, 0xe5, 0x1c, 0x47, + 0x72, 0x82, 0xf6, 0xed, 0x21, 0xb5, 0x64, 0x81, 0x16, 0x93, 0x15, 0x76, 0x69, 0xb5, 0xad, 0x4f, + 0x86, 0x8f, 0x80, 0xc8, 0x27, 0x84, 0x2b, 0xa9, 0x65, 0x31, 0x41, 0x64, 0xd6, 0x62, 0xa9, 0x2f, + 0x8e, 0x2d, 0xa6, 0x87, 0xe1, 0x7e, 0x4f, 0xba, 0xd4, 0xba, 0x40, 0x97, 0x5a, 0x45, 0xbb, 0xf4, + 0x1d, 0xe1, 0x4a, 0x6a, 0x4f, 0x4c, 0x10, 0x90, 0xb5, 0xec, 0xea, 0xed, 0x3f, 0x81, 0xc4, 0x13, + 0x9e, 0xf5, 0x04, 0x8b, 0x4d, 0x54, 0xfa, 0x09, 0xe6, 0x8d, 0xd3, 0xfa, 0x5b, 0x84, 0x2f, 0x77, + 0xb8, 0x97, 0xc7, 0x6e, 0xfd, 0xdf, 0xf4, 0x52, 0xd9, 0x0a, 0x5b, 0xb0, 0x85, 0x76, 0xef, 0x2b, + 0x8c, 0xc3, 0x7b, 0x94, 0x39, 0x3a, 0x0f, 0x1c, 0xc3, 0x01, 0x16, 0x35, 0xc8, 0x88, 0xaf, 0xa8, + 0xef, 0x8a, 0xcc, 0x9f, 0xf7, 0x5a, 0x7c, 0xfc, 0x30, 0x35, 0xbd, 0xb1, 0xb3, 0x6d, 0xcf, 0x46, + 0x98, 0xd5, 0x5f, 0x01, 0x00, 0x00, 0xff, 0xff, 0xa5, 0x8c, 0xa4, 0x83, 0x87, 0x08, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/cloud/talent/v4beta1/completion_service.pb.go b/vendor/google.golang.org/genproto/googleapis/cloud/talent/v4beta1/completion_service.pb.go new file mode 100644 index 0000000..b2024df --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/cloud/talent/v4beta1/completion_service.pb.go @@ -0,0 +1,480 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/cloud/talent/v4beta1/completion_service.proto + +package talent // import "google.golang.org/genproto/googleapis/cloud/talent/v4beta1" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +import ( + context "golang.org/x/net/context" + grpc "google.golang.org/grpc" +) + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Enum to specify the scope of completion. +type CompleteQueryRequest_CompletionScope int32 + +const ( + // Default value. + CompleteQueryRequest_COMPLETION_SCOPE_UNSPECIFIED CompleteQueryRequest_CompletionScope = 0 + // Suggestions are based only on the data provided by the client. + CompleteQueryRequest_TENANT CompleteQueryRequest_CompletionScope = 1 + // Suggestions are based on all jobs data in the system that's visible to + // the client + CompleteQueryRequest_PUBLIC CompleteQueryRequest_CompletionScope = 2 +) + +var CompleteQueryRequest_CompletionScope_name = map[int32]string{ + 0: "COMPLETION_SCOPE_UNSPECIFIED", + 1: "TENANT", + 2: "PUBLIC", +} +var CompleteQueryRequest_CompletionScope_value = map[string]int32{ + "COMPLETION_SCOPE_UNSPECIFIED": 0, + "TENANT": 1, + "PUBLIC": 2, +} + +func (x CompleteQueryRequest_CompletionScope) String() string { + return proto.EnumName(CompleteQueryRequest_CompletionScope_name, int32(x)) +} +func (CompleteQueryRequest_CompletionScope) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_completion_service_14ea22e15a1c2a15, []int{0, 0} +} + +// Enum to specify auto-completion topics. +type CompleteQueryRequest_CompletionType int32 + +const ( + // Default value. + CompleteQueryRequest_COMPLETION_TYPE_UNSPECIFIED CompleteQueryRequest_CompletionType = 0 + // Only suggest job titles. + CompleteQueryRequest_JOB_TITLE CompleteQueryRequest_CompletionType = 1 + // Only suggest company names. + CompleteQueryRequest_COMPANY_NAME CompleteQueryRequest_CompletionType = 2 + // Suggest both job titles and company names. + CompleteQueryRequest_COMBINED CompleteQueryRequest_CompletionType = 3 +) + +var CompleteQueryRequest_CompletionType_name = map[int32]string{ + 0: "COMPLETION_TYPE_UNSPECIFIED", + 1: "JOB_TITLE", + 2: "COMPANY_NAME", + 3: "COMBINED", +} +var CompleteQueryRequest_CompletionType_value = map[string]int32{ + "COMPLETION_TYPE_UNSPECIFIED": 0, + "JOB_TITLE": 1, + "COMPANY_NAME": 2, + "COMBINED": 3, +} + +func (x CompleteQueryRequest_CompletionType) String() string { + return proto.EnumName(CompleteQueryRequest_CompletionType_name, int32(x)) +} +func (CompleteQueryRequest_CompletionType) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_completion_service_14ea22e15a1c2a15, []int{0, 1} +} + +// Input only. +// +// Auto-complete parameters. +type CompleteQueryRequest struct { + // Required. + // + // Resource name of tenant the completion is performed within. + // + // The format is "projects/{project_id}/tenants/{tenant_id}", for example, + // "projects/api-test-project/tenant/foo". + // + // Tenant id is optional and the default tenant is used if unspecified, for + // example, "projects/api-test-project". + Parent string `protobuf:"bytes,1,opt,name=parent,proto3" json:"parent,omitempty"` + // Required. + // + // The query used to generate suggestions. + // + // The maximum number of allowed characters is 255. + Query string `protobuf:"bytes,2,opt,name=query,proto3" json:"query,omitempty"` + // Optional. + // + // The list of languages of the query. This is + // the BCP-47 language code, such as "en-US" or "sr-Latn". + // For more information, see + // [Tags for Identifying Languages](https://tools.ietf.org/html/bcp47). + // + // For [CompletionType.JOB_TITLE][google.cloud.talent.v4beta1.CompleteQueryRequest.CompletionType.JOB_TITLE] type, only open jobs with the same + // [language_codes][google.cloud.talent.v4beta1.CompleteQueryRequest.language_codes] are returned. + // + // For [CompletionType.COMPANY_NAME][google.cloud.talent.v4beta1.CompleteQueryRequest.CompletionType.COMPANY_NAME] type, + // only companies having open jobs with the same [language_codes][google.cloud.talent.v4beta1.CompleteQueryRequest.language_codes] are + // returned. + // + // For [CompletionType.COMBINED][google.cloud.talent.v4beta1.CompleteQueryRequest.CompletionType.COMBINED] type, only open jobs with the same + // [language_codes][google.cloud.talent.v4beta1.CompleteQueryRequest.language_codes] or companies having open jobs with the same + // [language_codes][google.cloud.talent.v4beta1.CompleteQueryRequest.language_codes] are returned. + // + // The maximum number of allowed characters is 255. + LanguageCodes []string `protobuf:"bytes,3,rep,name=language_codes,json=languageCodes,proto3" json:"language_codes,omitempty"` + // Required. + // + // Completion result count. + // + // The maximum allowed page size is 10. + PageSize int32 `protobuf:"varint,4,opt,name=page_size,json=pageSize,proto3" json:"page_size,omitempty"` + // Optional. + // + // If provided, restricts completion to specified company. + // + // The format is + // "projects/{project_id}/tenants/{tenant_id}/companies/{company_id}", for + // example, "projects/api-test-project/tenants/foo/companies/bar". + // + // Tenant id is optional and the default tenant is used if unspecified, for + // example, "projects/api-test-project/companies/bar". + Company string `protobuf:"bytes,5,opt,name=company,proto3" json:"company,omitempty"` + // Optional. + // + // The scope of the completion. The defaults is [CompletionScope.PUBLIC][google.cloud.talent.v4beta1.CompleteQueryRequest.CompletionScope.PUBLIC]. + Scope CompleteQueryRequest_CompletionScope `protobuf:"varint,6,opt,name=scope,proto3,enum=google.cloud.talent.v4beta1.CompleteQueryRequest_CompletionScope" json:"scope,omitempty"` + // Optional. + // + // The completion topic. The default is [CompletionType.COMBINED][google.cloud.talent.v4beta1.CompleteQueryRequest.CompletionType.COMBINED]. + Type CompleteQueryRequest_CompletionType `protobuf:"varint,7,opt,name=type,proto3,enum=google.cloud.talent.v4beta1.CompleteQueryRequest_CompletionType" json:"type,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *CompleteQueryRequest) Reset() { *m = CompleteQueryRequest{} } +func (m *CompleteQueryRequest) String() string { return proto.CompactTextString(m) } +func (*CompleteQueryRequest) ProtoMessage() {} +func (*CompleteQueryRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_completion_service_14ea22e15a1c2a15, []int{0} +} +func (m *CompleteQueryRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_CompleteQueryRequest.Unmarshal(m, b) +} +func (m *CompleteQueryRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_CompleteQueryRequest.Marshal(b, m, deterministic) +} +func (dst *CompleteQueryRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_CompleteQueryRequest.Merge(dst, src) +} +func (m *CompleteQueryRequest) XXX_Size() int { + return xxx_messageInfo_CompleteQueryRequest.Size(m) +} +func (m *CompleteQueryRequest) XXX_DiscardUnknown() { + xxx_messageInfo_CompleteQueryRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_CompleteQueryRequest proto.InternalMessageInfo + +func (m *CompleteQueryRequest) GetParent() string { + if m != nil { + return m.Parent + } + return "" +} + +func (m *CompleteQueryRequest) GetQuery() string { + if m != nil { + return m.Query + } + return "" +} + +func (m *CompleteQueryRequest) GetLanguageCodes() []string { + if m != nil { + return m.LanguageCodes + } + return nil +} + +func (m *CompleteQueryRequest) GetPageSize() int32 { + if m != nil { + return m.PageSize + } + return 0 +} + +func (m *CompleteQueryRequest) GetCompany() string { + if m != nil { + return m.Company + } + return "" +} + +func (m *CompleteQueryRequest) GetScope() CompleteQueryRequest_CompletionScope { + if m != nil { + return m.Scope + } + return CompleteQueryRequest_COMPLETION_SCOPE_UNSPECIFIED +} + +func (m *CompleteQueryRequest) GetType() CompleteQueryRequest_CompletionType { + if m != nil { + return m.Type + } + return CompleteQueryRequest_COMPLETION_TYPE_UNSPECIFIED +} + +// Output only. +// +// Response of auto-complete query. +type CompleteQueryResponse struct { + // Results of the matching job/company candidates. + CompletionResults []*CompleteQueryResponse_CompletionResult `protobuf:"bytes,1,rep,name=completion_results,json=completionResults,proto3" json:"completion_results,omitempty"` + // Additional information for the API invocation, such as the request + // tracking id. + Metadata *ResponseMetadata `protobuf:"bytes,2,opt,name=metadata,proto3" json:"metadata,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *CompleteQueryResponse) Reset() { *m = CompleteQueryResponse{} } +func (m *CompleteQueryResponse) String() string { return proto.CompactTextString(m) } +func (*CompleteQueryResponse) ProtoMessage() {} +func (*CompleteQueryResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_completion_service_14ea22e15a1c2a15, []int{1} +} +func (m *CompleteQueryResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_CompleteQueryResponse.Unmarshal(m, b) +} +func (m *CompleteQueryResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_CompleteQueryResponse.Marshal(b, m, deterministic) +} +func (dst *CompleteQueryResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_CompleteQueryResponse.Merge(dst, src) +} +func (m *CompleteQueryResponse) XXX_Size() int { + return xxx_messageInfo_CompleteQueryResponse.Size(m) +} +func (m *CompleteQueryResponse) XXX_DiscardUnknown() { + xxx_messageInfo_CompleteQueryResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_CompleteQueryResponse proto.InternalMessageInfo + +func (m *CompleteQueryResponse) GetCompletionResults() []*CompleteQueryResponse_CompletionResult { + if m != nil { + return m.CompletionResults + } + return nil +} + +func (m *CompleteQueryResponse) GetMetadata() *ResponseMetadata { + if m != nil { + return m.Metadata + } + return nil +} + +// Output only. +// +// Resource that represents completion results. +type CompleteQueryResponse_CompletionResult struct { + // The suggestion for the query. + Suggestion string `protobuf:"bytes,1,opt,name=suggestion,proto3" json:"suggestion,omitempty"` + // The completion topic. + Type CompleteQueryRequest_CompletionType `protobuf:"varint,2,opt,name=type,proto3,enum=google.cloud.talent.v4beta1.CompleteQueryRequest_CompletionType" json:"type,omitempty"` + // The URI of the company image for [CompletionType.COMPANY_NAME][]. + ImageUri string `protobuf:"bytes,3,opt,name=image_uri,json=imageUri,proto3" json:"image_uri,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *CompleteQueryResponse_CompletionResult) Reset() { + *m = CompleteQueryResponse_CompletionResult{} +} +func (m *CompleteQueryResponse_CompletionResult) String() string { return proto.CompactTextString(m) } +func (*CompleteQueryResponse_CompletionResult) ProtoMessage() {} +func (*CompleteQueryResponse_CompletionResult) Descriptor() ([]byte, []int) { + return fileDescriptor_completion_service_14ea22e15a1c2a15, []int{1, 0} +} +func (m *CompleteQueryResponse_CompletionResult) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_CompleteQueryResponse_CompletionResult.Unmarshal(m, b) +} +func (m *CompleteQueryResponse_CompletionResult) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_CompleteQueryResponse_CompletionResult.Marshal(b, m, deterministic) +} +func (dst *CompleteQueryResponse_CompletionResult) XXX_Merge(src proto.Message) { + xxx_messageInfo_CompleteQueryResponse_CompletionResult.Merge(dst, src) +} +func (m *CompleteQueryResponse_CompletionResult) XXX_Size() int { + return xxx_messageInfo_CompleteQueryResponse_CompletionResult.Size(m) +} +func (m *CompleteQueryResponse_CompletionResult) XXX_DiscardUnknown() { + xxx_messageInfo_CompleteQueryResponse_CompletionResult.DiscardUnknown(m) +} + +var xxx_messageInfo_CompleteQueryResponse_CompletionResult proto.InternalMessageInfo + +func (m *CompleteQueryResponse_CompletionResult) GetSuggestion() string { + if m != nil { + return m.Suggestion + } + return "" +} + +func (m *CompleteQueryResponse_CompletionResult) GetType() CompleteQueryRequest_CompletionType { + if m != nil { + return m.Type + } + return CompleteQueryRequest_COMPLETION_TYPE_UNSPECIFIED +} + +func (m *CompleteQueryResponse_CompletionResult) GetImageUri() string { + if m != nil { + return m.ImageUri + } + return "" +} + +func init() { + proto.RegisterType((*CompleteQueryRequest)(nil), "google.cloud.talent.v4beta1.CompleteQueryRequest") + proto.RegisterType((*CompleteQueryResponse)(nil), "google.cloud.talent.v4beta1.CompleteQueryResponse") + proto.RegisterType((*CompleteQueryResponse_CompletionResult)(nil), "google.cloud.talent.v4beta1.CompleteQueryResponse.CompletionResult") + proto.RegisterEnum("google.cloud.talent.v4beta1.CompleteQueryRequest_CompletionScope", CompleteQueryRequest_CompletionScope_name, CompleteQueryRequest_CompletionScope_value) + proto.RegisterEnum("google.cloud.talent.v4beta1.CompleteQueryRequest_CompletionType", CompleteQueryRequest_CompletionType_name, CompleteQueryRequest_CompletionType_value) +} + +// Reference imports to suppress errors if they are not otherwise used. +var _ context.Context +var _ grpc.ClientConn + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the grpc package it is being compiled against. +const _ = grpc.SupportPackageIsVersion4 + +// CompletionClient is the client API for Completion service. +// +// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://godoc.org/google.golang.org/grpc#ClientConn.NewStream. +type CompletionClient interface { + // Completes the specified prefix with keyword suggestions. + // Intended for use by a job search auto-complete search box. + CompleteQuery(ctx context.Context, in *CompleteQueryRequest, opts ...grpc.CallOption) (*CompleteQueryResponse, error) +} + +type completionClient struct { + cc *grpc.ClientConn +} + +func NewCompletionClient(cc *grpc.ClientConn) CompletionClient { + return &completionClient{cc} +} + +func (c *completionClient) CompleteQuery(ctx context.Context, in *CompleteQueryRequest, opts ...grpc.CallOption) (*CompleteQueryResponse, error) { + out := new(CompleteQueryResponse) + err := c.cc.Invoke(ctx, "/google.cloud.talent.v4beta1.Completion/CompleteQuery", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +// CompletionServer is the server API for Completion service. +type CompletionServer interface { + // Completes the specified prefix with keyword suggestions. + // Intended for use by a job search auto-complete search box. + CompleteQuery(context.Context, *CompleteQueryRequest) (*CompleteQueryResponse, error) +} + +func RegisterCompletionServer(s *grpc.Server, srv CompletionServer) { + s.RegisterService(&_Completion_serviceDesc, srv) +} + +func _Completion_CompleteQuery_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(CompleteQueryRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(CompletionServer).CompleteQuery(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.talent.v4beta1.Completion/CompleteQuery", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(CompletionServer).CompleteQuery(ctx, req.(*CompleteQueryRequest)) + } + return interceptor(ctx, in, info, handler) +} + +var _Completion_serviceDesc = grpc.ServiceDesc{ + ServiceName: "google.cloud.talent.v4beta1.Completion", + HandlerType: (*CompletionServer)(nil), + Methods: []grpc.MethodDesc{ + { + MethodName: "CompleteQuery", + Handler: _Completion_CompleteQuery_Handler, + }, + }, + Streams: []grpc.StreamDesc{}, + Metadata: "google/cloud/talent/v4beta1/completion_service.proto", +} + +func init() { + proto.RegisterFile("google/cloud/talent/v4beta1/completion_service.proto", fileDescriptor_completion_service_14ea22e15a1c2a15) +} + +var fileDescriptor_completion_service_14ea22e15a1c2a15 = []byte{ + // 652 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xac, 0x94, 0x6f, 0x6b, 0xd3, 0x5e, + 0x14, 0xc7, 0x7f, 0x49, 0xd7, 0xae, 0x3d, 0xfb, 0xf3, 0x8b, 0x97, 0x39, 0x42, 0x37, 0x5c, 0x29, + 0x0c, 0x8b, 0x60, 0xc2, 0xea, 0x1e, 0x29, 0x82, 0x6d, 0x16, 0x21, 0xba, 0xa6, 0x35, 0xcd, 0x90, + 0xed, 0x49, 0x77, 0x97, 0x5d, 0x42, 0xa4, 0xcd, 0xcd, 0x72, 0x6f, 0x06, 0x9d, 0x08, 0x22, 0xbe, + 0x03, 0xdf, 0x81, 0xe0, 0x2b, 0xf2, 0x89, 0x2f, 0x40, 0xf0, 0x5d, 0x88, 0xdc, 0x24, 0xdd, 0xba, + 0x3a, 0x2a, 0x43, 0x9f, 0xe5, 0x7c, 0x73, 0xbe, 0x9f, 0x73, 0x73, 0x72, 0xee, 0x81, 0x5d, 0x9f, + 0x52, 0x7f, 0x48, 0x74, 0x6f, 0x48, 0x93, 0x53, 0x9d, 0xe3, 0x21, 0x09, 0xb9, 0x7e, 0xbe, 0x7b, + 0x42, 0x38, 0xde, 0xd1, 0x3d, 0x3a, 0x8a, 0x86, 0x84, 0x07, 0x34, 0x1c, 0x30, 0x12, 0x9f, 0x07, + 0x1e, 0xd1, 0xa2, 0x98, 0x72, 0x8a, 0x36, 0x32, 0x97, 0x96, 0xba, 0xb4, 0xcc, 0xa5, 0xe5, 0xae, + 0xea, 0x66, 0x8e, 0xc4, 0x51, 0xa0, 0xe3, 0x30, 0xa4, 0x1c, 0x0b, 0x02, 0xcb, 0xac, 0xd5, 0xc6, + 0x1f, 0x0a, 0x8e, 0x68, 0x98, 0x65, 0xd6, 0xdf, 0x2f, 0xc0, 0x9a, 0x91, 0x9d, 0x80, 0xbc, 0x4a, + 0x48, 0x3c, 0x76, 0xc8, 0x59, 0x42, 0x18, 0x47, 0xeb, 0x50, 0x8a, 0x70, 0x4c, 0x42, 0xae, 0x4a, + 0x35, 0xa9, 0x51, 0x71, 0xf2, 0x08, 0xad, 0x41, 0xf1, 0x4c, 0xe4, 0xa9, 0x72, 0x2a, 0x67, 0x01, + 0xda, 0x86, 0xd5, 0x21, 0x0e, 0xfd, 0x04, 0xfb, 0x64, 0xe0, 0xd1, 0x53, 0xc2, 0xd4, 0x42, 0xad, + 0xd0, 0xa8, 0x38, 0x2b, 0x13, 0xd5, 0x10, 0x22, 0xda, 0x80, 0x4a, 0x24, 0x52, 0x58, 0x70, 0x41, + 0xd4, 0x85, 0x9a, 0xd4, 0x28, 0x3a, 0x65, 0x21, 0xf4, 0x83, 0x0b, 0x82, 0x54, 0x58, 0x14, 0xbd, + 0xc0, 0xe1, 0x58, 0x2d, 0xa6, 0xec, 0x49, 0x88, 0x5e, 0x43, 0x91, 0x79, 0x34, 0x22, 0x6a, 0xa9, + 0x26, 0x35, 0x56, 0x9b, 0x2d, 0x6d, 0x4e, 0x67, 0xb4, 0x9b, 0xbe, 0x66, 0x22, 0x06, 0x34, 0xec, + 0x0b, 0x90, 0x93, 0xf1, 0x90, 0x0b, 0x0b, 0x7c, 0x1c, 0x11, 0x75, 0x31, 0xe5, 0x3e, 0xfb, 0x1b, + 0xae, 0x3b, 0x8e, 0x88, 0x93, 0xd2, 0xea, 0x2f, 0xe1, 0xff, 0x99, 0x7a, 0xa8, 0x06, 0x9b, 0x46, + 0xb7, 0xd3, 0xdb, 0x37, 0x5d, 0xab, 0x6b, 0x0f, 0xfa, 0x46, 0xb7, 0x67, 0x0e, 0x0e, 0xec, 0x7e, + 0xcf, 0x34, 0xac, 0xe7, 0x96, 0xb9, 0xa7, 0xfc, 0x87, 0x00, 0x4a, 0xae, 0x69, 0xb7, 0x6c, 0x57, + 0x91, 0xc4, 0x73, 0xef, 0xa0, 0xbd, 0x6f, 0x19, 0x8a, 0x5c, 0x3f, 0x86, 0xd5, 0xeb, 0x45, 0xd0, + 0x16, 0x6c, 0x4c, 0xb1, 0xdc, 0xc3, 0xdf, 0x50, 0x2b, 0x50, 0x79, 0xd1, 0x6d, 0x0f, 0x5c, 0xcb, + 0xdd, 0x37, 0x15, 0x09, 0x29, 0xb0, 0x2c, 0xf2, 0x5b, 0xf6, 0xe1, 0xc0, 0x6e, 0x75, 0x4c, 0x45, + 0x46, 0xcb, 0x50, 0x36, 0xba, 0x9d, 0xb6, 0x65, 0x9b, 0x7b, 0x4a, 0xa1, 0xfe, 0x53, 0x86, 0xbb, + 0x33, 0x1f, 0xc7, 0x22, 0x1a, 0x32, 0x82, 0x62, 0x40, 0x53, 0xd3, 0x19, 0x13, 0x96, 0x0c, 0x39, + 0x53, 0xa5, 0x5a, 0xa1, 0xb1, 0xd4, 0x34, 0x6e, 0xd3, 0xac, 0x8c, 0x37, 0xd5, 0x2d, 0x27, 0x65, + 0x39, 0x77, 0xbc, 0x19, 0x85, 0x21, 0x0b, 0xca, 0x23, 0xc2, 0xf1, 0x29, 0xe6, 0x38, 0x1d, 0xb1, + 0xa5, 0xe6, 0xc3, 0xb9, 0x95, 0x26, 0xf0, 0x4e, 0x6e, 0x72, 0x2e, 0xed, 0xd5, 0x2f, 0x12, 0x28, + 0xb3, 0x25, 0xd1, 0x3d, 0x00, 0x96, 0xf8, 0x3e, 0x61, 0x42, 0xcb, 0x67, 0x7b, 0x4a, 0xb9, 0x1c, + 0x09, 0xf9, 0x5f, 0x8e, 0x84, 0x18, 0xfc, 0x60, 0x24, 0x26, 0x3f, 0x89, 0x03, 0xb5, 0x90, 0x16, + 0x2d, 0xa7, 0xc2, 0x41, 0x1c, 0x34, 0x7f, 0x48, 0x00, 0x57, 0x2e, 0xf4, 0x4d, 0x82, 0x95, 0x6b, + 0x64, 0xb4, 0x73, 0xeb, 0x53, 0x54, 0x9b, 0xb7, 0xff, 0x3d, 0xf5, 0xe3, 0x0f, 0x5f, 0xbf, 0x7f, + 0x92, 0x8f, 0x90, 0x7e, 0xb9, 0x2a, 0xde, 0x66, 0x97, 0xfe, 0x69, 0x14, 0xd3, 0x37, 0xc4, 0xe3, + 0x4c, 0x7f, 0xa0, 0x73, 0x12, 0xe2, 0x50, 0x3c, 0xbd, 0x7b, 0x9c, 0xff, 0x3c, 0x72, 0x74, 0x1f, + 0x6d, 0xcf, 0xb1, 0x5c, 0x25, 0xb6, 0x3f, 0x4a, 0xb0, 0xe5, 0xd1, 0xd1, 0xbc, 0xb3, 0xb5, 0xd7, + 0xa7, 0xee, 0x4e, 0xb6, 0x0f, 0x7b, 0x62, 0x53, 0xf5, 0xa4, 0xa3, 0x56, 0x6e, 0xf3, 0xa9, 0xd8, + 0x2a, 0x1a, 0x8d, 0x7d, 0xdd, 0x27, 0x61, 0xba, 0xc7, 0xf4, 0xec, 0x15, 0x8e, 0x02, 0x76, 0xe3, + 0xd2, 0x7b, 0x92, 0x85, 0x9f, 0xe5, 0x82, 0xe1, 0xf6, 0x4f, 0x4a, 0xa9, 0xe7, 0xd1, 0xaf, 0x00, + 0x00, 0x00, 0xff, 0xff, 0x6c, 0xb9, 0x64, 0x84, 0x98, 0x05, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/cloud/talent/v4beta1/event.pb.go b/vendor/google.golang.org/genproto/googleapis/cloud/talent/v4beta1/event.pb.go new file mode 100644 index 0000000..1f611c4 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/cloud/talent/v4beta1/event.pb.go @@ -0,0 +1,571 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/cloud/talent/v4beta1/event.proto + +package talent // import "google.golang.org/genproto/googleapis/cloud/talent/v4beta1" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import timestamp "github.com/golang/protobuf/ptypes/timestamp" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// An enumeration of an event attributed to the behavior of the end user, +// such as a job seeker. +type JobEvent_JobEventType int32 + +const ( + // The event is unspecified by other provided values. + JobEvent_JOB_EVENT_TYPE_UNSPECIFIED JobEvent_JobEventType = 0 + // The job seeker or other entity interacting with the service has + // had a job rendered in their view, such as in a list of search results in + // a compressed or clipped format. This event is typically associated with + // the viewing of a jobs list on a single page by a job seeker. + JobEvent_IMPRESSION JobEvent_JobEventType = 1 + // The job seeker, or other entity interacting with the service, has + // viewed the details of a job, including the full description. This + // event doesn't apply to the viewing a snippet of a job appearing as a + // part of the job search results. Viewing a snippet is associated with an + // [impression][google.cloud.talent.v4beta1.JobEvent.JobEventType.IMPRESSION]). + JobEvent_VIEW JobEvent_JobEventType = 2 + // The job seeker or other entity interacting with the service + // performed an action to view a job and was redirected to a different + // website for job. + JobEvent_VIEW_REDIRECT JobEvent_JobEventType = 3 + // The job seeker or other entity interacting with the service + // began the process or demonstrated the intention of applying for a job. + JobEvent_APPLICATION_START JobEvent_JobEventType = 4 + // The job seeker or other entity interacting with the service + // submitted an application for a job. + JobEvent_APPLICATION_FINISH JobEvent_JobEventType = 5 + // The job seeker or other entity interacting with the service + // submitted an application for a job with a single click without + // entering information. If a job seeker performs this action, send only + // this event to the service. Do not also send + // [JobEventType.APPLICATION_START][google.cloud.talent.v4beta1.JobEvent.JobEventType.APPLICATION_START] or [JobEventType.APPLICATION_FINISH][google.cloud.talent.v4beta1.JobEvent.JobEventType.APPLICATION_FINISH] + // events. + JobEvent_APPLICATION_QUICK_SUBMISSION JobEvent_JobEventType = 6 + // The job seeker or other entity interacting with the service + // performed an action to apply to a job and was redirected to a different + // website to complete the application. + JobEvent_APPLICATION_REDIRECT JobEvent_JobEventType = 7 + // The job seeker or other entity interacting with the service began the + // process or demonstrated the intention of applying for a job from the + // search results page without viewing the details of the job posting. + // If sending this event, JobEventType.VIEW event shouldn't be sent. + JobEvent_APPLICATION_START_FROM_SEARCH JobEvent_JobEventType = 8 + // The job seeker, or other entity interacting with the service, performs an + // action with a single click from the search results page to apply to a job + // (without viewing the details of the job posting), and is redirected + // to a different website to complete the application. If a candidate + // performs this action, send only this event to the service. Do not also + // send [JobEventType.APPLICATION_START][google.cloud.talent.v4beta1.JobEvent.JobEventType.APPLICATION_START], + // [JobEventType.APPLICATION_FINISH][google.cloud.talent.v4beta1.JobEvent.JobEventType.APPLICATION_FINISH] or [JobEventType.VIEW][google.cloud.talent.v4beta1.JobEvent.JobEventType.VIEW] events. + JobEvent_APPLICATION_REDIRECT_FROM_SEARCH JobEvent_JobEventType = 9 + // This event should be used when a company submits an application + // on behalf of a job seeker. This event is intended for use by staffing + // agencies attempting to place candidates. + JobEvent_APPLICATION_COMPANY_SUBMIT JobEvent_JobEventType = 10 + // The job seeker or other entity interacting with the service demonstrated + // an interest in a job by bookmarking or saving it. + JobEvent_BOOKMARK JobEvent_JobEventType = 11 + // The job seeker or other entity interacting with the service was + // sent a notification, such as an email alert or device notification, + // containing one or more jobs listings generated by the service. + JobEvent_NOTIFICATION JobEvent_JobEventType = 12 + // The job seeker or other entity interacting with the service was + // employed by the hiring entity (employer). Send this event + // only if the job seeker was hired through an application that was + // initiated by a search conducted through the Cloud Talent Solution + // service. + JobEvent_HIRED JobEvent_JobEventType = 13 + // A recruiter or staffing agency submitted an application on behalf of the + // candidate after interacting with the service to identify a suitable job + // posting. + JobEvent_SENT_CV JobEvent_JobEventType = 14 + // The entity interacting with the service (for example, the job seeker), + // was granted an initial interview by the hiring entity (employer). This + // event should only be sent if the job seeker was granted an interview as + // part of an application that was initiated by a search conducted through / + // recommendation provided by the Cloud Talent Solution service. + JobEvent_INTERVIEW_GRANTED JobEvent_JobEventType = 15 + // The job seeker or other entity interacting with the service showed + // no interest in the job. + JobEvent_NOT_INTERESTED JobEvent_JobEventType = 16 +) + +var JobEvent_JobEventType_name = map[int32]string{ + 0: "JOB_EVENT_TYPE_UNSPECIFIED", + 1: "IMPRESSION", + 2: "VIEW", + 3: "VIEW_REDIRECT", + 4: "APPLICATION_START", + 5: "APPLICATION_FINISH", + 6: "APPLICATION_QUICK_SUBMISSION", + 7: "APPLICATION_REDIRECT", + 8: "APPLICATION_START_FROM_SEARCH", + 9: "APPLICATION_REDIRECT_FROM_SEARCH", + 10: "APPLICATION_COMPANY_SUBMIT", + 11: "BOOKMARK", + 12: "NOTIFICATION", + 13: "HIRED", + 14: "SENT_CV", + 15: "INTERVIEW_GRANTED", + 16: "NOT_INTERESTED", +} +var JobEvent_JobEventType_value = map[string]int32{ + "JOB_EVENT_TYPE_UNSPECIFIED": 0, + "IMPRESSION": 1, + "VIEW": 2, + "VIEW_REDIRECT": 3, + "APPLICATION_START": 4, + "APPLICATION_FINISH": 5, + "APPLICATION_QUICK_SUBMISSION": 6, + "APPLICATION_REDIRECT": 7, + "APPLICATION_START_FROM_SEARCH": 8, + "APPLICATION_REDIRECT_FROM_SEARCH": 9, + "APPLICATION_COMPANY_SUBMIT": 10, + "BOOKMARK": 11, + "NOTIFICATION": 12, + "HIRED": 13, + "SENT_CV": 14, + "INTERVIEW_GRANTED": 15, + "NOT_INTERESTED": 16, +} + +func (x JobEvent_JobEventType) String() string { + return proto.EnumName(JobEvent_JobEventType_name, int32(x)) +} +func (JobEvent_JobEventType) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_event_89811e87550a0ace, []int{1, 0} +} + +// The enum represents types of client events for a candidate profile. +type ProfileEvent_ProfileEventType int32 + +const ( + // Default value. + ProfileEvent_PROFILE_EVENT_TYPE_UNSPECIFIED ProfileEvent_ProfileEventType = 0 + // The profile is displayed. + ProfileEvent_IMPRESSION ProfileEvent_ProfileEventType = 1 + // The profile is viewed. + ProfileEvent_VIEW ProfileEvent_ProfileEventType = 2 + // The profile is bookmarked. + ProfileEvent_BOOKMARK ProfileEvent_ProfileEventType = 3 +) + +var ProfileEvent_ProfileEventType_name = map[int32]string{ + 0: "PROFILE_EVENT_TYPE_UNSPECIFIED", + 1: "IMPRESSION", + 2: "VIEW", + 3: "BOOKMARK", +} +var ProfileEvent_ProfileEventType_value = map[string]int32{ + "PROFILE_EVENT_TYPE_UNSPECIFIED": 0, + "IMPRESSION": 1, + "VIEW": 2, + "BOOKMARK": 3, +} + +func (x ProfileEvent_ProfileEventType) String() string { + return proto.EnumName(ProfileEvent_ProfileEventType_name, int32(x)) +} +func (ProfileEvent_ProfileEventType) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_event_89811e87550a0ace, []int{2, 0} +} + +// An event issued when an end user interacts with the application that +// implements Cloud Talent Solution. Providing this information improves the +// quality of search and recommendation for the API clients, enabling the +// service to perform optimally. The number of events sent must be consistent +// with other calls, such as job searches, issued to the service by the client. +type ClientEvent struct { + // Required. + // + // A unique ID generated in the API responses. It can be found in + // [ResponseMetadata.request_id][google.cloud.talent.v4beta1.ResponseMetadata.request_id]. + RequestId string `protobuf:"bytes,1,opt,name=request_id,json=requestId,proto3" json:"request_id,omitempty"` + // Required. + // + // A unique identifier, generated by the client application. + EventId string `protobuf:"bytes,2,opt,name=event_id,json=eventId,proto3" json:"event_id,omitempty"` + // Required. + // + // The timestamp of the event. + CreateTime *timestamp.Timestamp `protobuf:"bytes,4,opt,name=create_time,json=createTime,proto3" json:"create_time,omitempty"` + // Required. + // + // The detail information of a specific event type. + // + // Types that are valid to be assigned to Event: + // *ClientEvent_JobEvent + // *ClientEvent_ProfileEvent + Event isClientEvent_Event `protobuf_oneof:"event"` + // Optional. + // + // Notes about the event provided by recruiters or other users, for example, + // feedback on why a profile was bookmarked. + EventNotes string `protobuf:"bytes,9,opt,name=event_notes,json=eventNotes,proto3" json:"event_notes,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ClientEvent) Reset() { *m = ClientEvent{} } +func (m *ClientEvent) String() string { return proto.CompactTextString(m) } +func (*ClientEvent) ProtoMessage() {} +func (*ClientEvent) Descriptor() ([]byte, []int) { + return fileDescriptor_event_89811e87550a0ace, []int{0} +} +func (m *ClientEvent) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ClientEvent.Unmarshal(m, b) +} +func (m *ClientEvent) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ClientEvent.Marshal(b, m, deterministic) +} +func (dst *ClientEvent) XXX_Merge(src proto.Message) { + xxx_messageInfo_ClientEvent.Merge(dst, src) +} +func (m *ClientEvent) XXX_Size() int { + return xxx_messageInfo_ClientEvent.Size(m) +} +func (m *ClientEvent) XXX_DiscardUnknown() { + xxx_messageInfo_ClientEvent.DiscardUnknown(m) +} + +var xxx_messageInfo_ClientEvent proto.InternalMessageInfo + +func (m *ClientEvent) GetRequestId() string { + if m != nil { + return m.RequestId + } + return "" +} + +func (m *ClientEvent) GetEventId() string { + if m != nil { + return m.EventId + } + return "" +} + +func (m *ClientEvent) GetCreateTime() *timestamp.Timestamp { + if m != nil { + return m.CreateTime + } + return nil +} + +type isClientEvent_Event interface { + isClientEvent_Event() +} + +type ClientEvent_JobEvent struct { + JobEvent *JobEvent `protobuf:"bytes,5,opt,name=job_event,json=jobEvent,proto3,oneof"` +} + +type ClientEvent_ProfileEvent struct { + ProfileEvent *ProfileEvent `protobuf:"bytes,6,opt,name=profile_event,json=profileEvent,proto3,oneof"` +} + +func (*ClientEvent_JobEvent) isClientEvent_Event() {} + +func (*ClientEvent_ProfileEvent) isClientEvent_Event() {} + +func (m *ClientEvent) GetEvent() isClientEvent_Event { + if m != nil { + return m.Event + } + return nil +} + +func (m *ClientEvent) GetJobEvent() *JobEvent { + if x, ok := m.GetEvent().(*ClientEvent_JobEvent); ok { + return x.JobEvent + } + return nil +} + +func (m *ClientEvent) GetProfileEvent() *ProfileEvent { + if x, ok := m.GetEvent().(*ClientEvent_ProfileEvent); ok { + return x.ProfileEvent + } + return nil +} + +func (m *ClientEvent) GetEventNotes() string { + if m != nil { + return m.EventNotes + } + return "" +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*ClientEvent) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _ClientEvent_OneofMarshaler, _ClientEvent_OneofUnmarshaler, _ClientEvent_OneofSizer, []interface{}{ + (*ClientEvent_JobEvent)(nil), + (*ClientEvent_ProfileEvent)(nil), + } +} + +func _ClientEvent_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*ClientEvent) + // event + switch x := m.Event.(type) { + case *ClientEvent_JobEvent: + b.EncodeVarint(5<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.JobEvent); err != nil { + return err + } + case *ClientEvent_ProfileEvent: + b.EncodeVarint(6<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.ProfileEvent); err != nil { + return err + } + case nil: + default: + return fmt.Errorf("ClientEvent.Event has unexpected type %T", x) + } + return nil +} + +func _ClientEvent_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*ClientEvent) + switch tag { + case 5: // event.job_event + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(JobEvent) + err := b.DecodeMessage(msg) + m.Event = &ClientEvent_JobEvent{msg} + return true, err + case 6: // event.profile_event + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(ProfileEvent) + err := b.DecodeMessage(msg) + m.Event = &ClientEvent_ProfileEvent{msg} + return true, err + default: + return false, nil + } +} + +func _ClientEvent_OneofSizer(msg proto.Message) (n int) { + m := msg.(*ClientEvent) + // event + switch x := m.Event.(type) { + case *ClientEvent_JobEvent: + s := proto.Size(x.JobEvent) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *ClientEvent_ProfileEvent: + s := proto.Size(x.ProfileEvent) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +// An event issued when a job seeker interacts with the application that +// implements Cloud Talent Solution. +type JobEvent struct { + // Required. + // + // The type of the event (see [JobEventType][google.cloud.talent.v4beta1.JobEvent.JobEventType]). + Type JobEvent_JobEventType `protobuf:"varint,1,opt,name=type,proto3,enum=google.cloud.talent.v4beta1.JobEvent_JobEventType" json:"type,omitempty"` + // Required. + // + // The [job name(s)][google.cloud.talent.v4beta1.Job.name] associated with this event. + // For example, if this is an [impression][google.cloud.talent.v4beta1.JobEvent.JobEventType.IMPRESSION] event, + // this field contains the identifiers of all jobs shown to the job seeker. + // If this was a [view][google.cloud.talent.v4beta1.JobEvent.JobEventType.VIEW] event, this field contains the + // identifier of the viewed job. + Jobs []string `protobuf:"bytes,2,rep,name=jobs,proto3" json:"jobs,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *JobEvent) Reset() { *m = JobEvent{} } +func (m *JobEvent) String() string { return proto.CompactTextString(m) } +func (*JobEvent) ProtoMessage() {} +func (*JobEvent) Descriptor() ([]byte, []int) { + return fileDescriptor_event_89811e87550a0ace, []int{1} +} +func (m *JobEvent) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_JobEvent.Unmarshal(m, b) +} +func (m *JobEvent) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_JobEvent.Marshal(b, m, deterministic) +} +func (dst *JobEvent) XXX_Merge(src proto.Message) { + xxx_messageInfo_JobEvent.Merge(dst, src) +} +func (m *JobEvent) XXX_Size() int { + return xxx_messageInfo_JobEvent.Size(m) +} +func (m *JobEvent) XXX_DiscardUnknown() { + xxx_messageInfo_JobEvent.DiscardUnknown(m) +} + +var xxx_messageInfo_JobEvent proto.InternalMessageInfo + +func (m *JobEvent) GetType() JobEvent_JobEventType { + if m != nil { + return m.Type + } + return JobEvent_JOB_EVENT_TYPE_UNSPECIFIED +} + +func (m *JobEvent) GetJobs() []string { + if m != nil { + return m.Jobs + } + return nil +} + +// An event issued when a profile searcher interacts with the application +// that implements Cloud Talent Solution. +type ProfileEvent struct { + // Required. + // + // Type of event. + Type ProfileEvent_ProfileEventType `protobuf:"varint,1,opt,name=type,proto3,enum=google.cloud.talent.v4beta1.ProfileEvent_ProfileEventType" json:"type,omitempty"` + // Required. + // + // The [profile name(s)][google.cloud.talent.v4beta1.Profile.name] associated with this client event. + Profiles []string `protobuf:"bytes,2,rep,name=profiles,proto3" json:"profiles,omitempty"` + // Optional. + // + // The job ID associated with this client event if there is one. Leave it + // empty if the event isn't associated with a job. + // + // The job ID should be consistent with the + // [JobApplication.job.requisition_id][] in the profile. + Jobs []string `protobuf:"bytes,6,rep,name=jobs,proto3" json:"jobs,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ProfileEvent) Reset() { *m = ProfileEvent{} } +func (m *ProfileEvent) String() string { return proto.CompactTextString(m) } +func (*ProfileEvent) ProtoMessage() {} +func (*ProfileEvent) Descriptor() ([]byte, []int) { + return fileDescriptor_event_89811e87550a0ace, []int{2} +} +func (m *ProfileEvent) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ProfileEvent.Unmarshal(m, b) +} +func (m *ProfileEvent) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ProfileEvent.Marshal(b, m, deterministic) +} +func (dst *ProfileEvent) XXX_Merge(src proto.Message) { + xxx_messageInfo_ProfileEvent.Merge(dst, src) +} +func (m *ProfileEvent) XXX_Size() int { + return xxx_messageInfo_ProfileEvent.Size(m) +} +func (m *ProfileEvent) XXX_DiscardUnknown() { + xxx_messageInfo_ProfileEvent.DiscardUnknown(m) +} + +var xxx_messageInfo_ProfileEvent proto.InternalMessageInfo + +func (m *ProfileEvent) GetType() ProfileEvent_ProfileEventType { + if m != nil { + return m.Type + } + return ProfileEvent_PROFILE_EVENT_TYPE_UNSPECIFIED +} + +func (m *ProfileEvent) GetProfiles() []string { + if m != nil { + return m.Profiles + } + return nil +} + +func (m *ProfileEvent) GetJobs() []string { + if m != nil { + return m.Jobs + } + return nil +} + +func init() { + proto.RegisterType((*ClientEvent)(nil), "google.cloud.talent.v4beta1.ClientEvent") + proto.RegisterType((*JobEvent)(nil), "google.cloud.talent.v4beta1.JobEvent") + proto.RegisterType((*ProfileEvent)(nil), "google.cloud.talent.v4beta1.ProfileEvent") + proto.RegisterEnum("google.cloud.talent.v4beta1.JobEvent_JobEventType", JobEvent_JobEventType_name, JobEvent_JobEventType_value) + proto.RegisterEnum("google.cloud.talent.v4beta1.ProfileEvent_ProfileEventType", ProfileEvent_ProfileEventType_name, ProfileEvent_ProfileEventType_value) +} + +func init() { + proto.RegisterFile("google/cloud/talent/v4beta1/event.proto", fileDescriptor_event_89811e87550a0ace) +} + +var fileDescriptor_event_89811e87550a0ace = []byte{ + // 682 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x9c, 0x54, 0xdd, 0x6e, 0xd3, 0x4c, + 0x10, 0x6d, 0xfe, 0x93, 0x49, 0x9a, 0x6f, 0xbb, 0xfa, 0x40, 0x21, 0xf4, 0x27, 0x44, 0x20, 0xca, + 0x8d, 0x2d, 0x0a, 0x57, 0xf4, 0xca, 0x71, 0x36, 0x64, 0xdb, 0xc6, 0x36, 0x6b, 0xb7, 0xa8, 0x5c, + 0x60, 0x39, 0xcd, 0x36, 0x72, 0x95, 0x7a, 0x4d, 0xe2, 0x56, 0xf4, 0x35, 0x78, 0x04, 0x1e, 0x86, + 0xc7, 0xe1, 0x05, 0xb8, 0x41, 0x5e, 0x3b, 0x91, 0x0b, 0x55, 0x55, 0x71, 0xb7, 0x33, 0x73, 0xe6, + 0xcc, 0x99, 0xd1, 0xb1, 0xe1, 0xe5, 0x54, 0x88, 0xe9, 0x8c, 0xab, 0x67, 0x33, 0x71, 0x35, 0x51, + 0x23, 0x6f, 0xc6, 0x83, 0x48, 0xbd, 0x7e, 0x3b, 0xe6, 0x91, 0xf7, 0x5a, 0xe5, 0xd7, 0x3c, 0x88, + 0x94, 0x70, 0x2e, 0x22, 0x81, 0x9f, 0x26, 0x40, 0x45, 0x02, 0x95, 0x04, 0xa8, 0xa4, 0xc0, 0xf6, + 0x66, 0xca, 0xe2, 0x85, 0xbe, 0xea, 0x05, 0x81, 0x88, 0xbc, 0xc8, 0x17, 0xc1, 0x22, 0x69, 0x6d, + 0xef, 0xa4, 0x55, 0x19, 0x8d, 0xaf, 0xce, 0xd5, 0xc8, 0xbf, 0xe4, 0x8b, 0xc8, 0xbb, 0x0c, 0x13, + 0x40, 0xf7, 0x47, 0x1e, 0xea, 0xfa, 0xcc, 0xe7, 0x41, 0x44, 0xe2, 0x89, 0x78, 0x0b, 0x60, 0xce, + 0xbf, 0x5c, 0xf1, 0x45, 0xe4, 0xfa, 0x93, 0x56, 0xae, 0x93, 0xdb, 0xad, 0xb1, 0x5a, 0x9a, 0xa1, + 0x13, 0xfc, 0x04, 0xaa, 0x52, 0x59, 0x5c, 0xcc, 0xcb, 0x62, 0x45, 0xc6, 0x74, 0x82, 0xf7, 0xa1, + 0x7e, 0x36, 0xe7, 0x5e, 0xc4, 0xdd, 0x78, 0x46, 0xab, 0xd8, 0xc9, 0xed, 0xd6, 0xf7, 0xda, 0x4a, + 0xaa, 0x7d, 0x29, 0x40, 0x71, 0x96, 0x02, 0x18, 0x24, 0xf0, 0x38, 0x81, 0xfb, 0x50, 0xbb, 0x10, + 0x63, 0x57, 0x72, 0xb5, 0x4a, 0xb2, 0xf5, 0x85, 0x72, 0xcf, 0xda, 0xca, 0x81, 0x18, 0x4b, 0xc1, + 0xc3, 0x35, 0x56, 0xbd, 0x48, 0xdf, 0xd8, 0x82, 0xf5, 0x70, 0x2e, 0xce, 0xfd, 0x19, 0x4f, 0x99, + 0xca, 0x92, 0xe9, 0xd5, 0xbd, 0x4c, 0x56, 0xd2, 0xb1, 0x64, 0x6b, 0x84, 0x99, 0x18, 0xef, 0x40, + 0x3d, 0xd9, 0x37, 0x10, 0x11, 0x5f, 0xb4, 0x6a, 0x72, 0x65, 0x90, 0x29, 0x23, 0xce, 0xf4, 0x2a, + 0x50, 0x92, 0x51, 0xf7, 0x57, 0x01, 0xaa, 0x4b, 0x51, 0x78, 0x00, 0xc5, 0xe8, 0x26, 0xe4, 0xf2, + 0x7e, 0xcd, 0xbd, 0xbd, 0x07, 0x6d, 0xb2, 0x7a, 0x38, 0x37, 0x21, 0x67, 0xb2, 0x1f, 0x63, 0x28, + 0x5e, 0x88, 0xf1, 0xa2, 0x95, 0xef, 0x14, 0x76, 0x6b, 0x4c, 0xbe, 0xbb, 0xdf, 0x0a, 0xd0, 0xc8, + 0x42, 0xf1, 0x36, 0xb4, 0x0f, 0xcc, 0x9e, 0x4b, 0x4e, 0x88, 0xe1, 0xb8, 0xce, 0xa9, 0x45, 0xdc, + 0x63, 0xc3, 0xb6, 0x88, 0x4e, 0x07, 0x94, 0xf4, 0xd1, 0x1a, 0x6e, 0x02, 0xd0, 0x91, 0xc5, 0x88, + 0x6d, 0x53, 0xd3, 0x40, 0x39, 0x5c, 0x85, 0xe2, 0x09, 0x25, 0x1f, 0x51, 0x1e, 0x6f, 0xc0, 0x7a, + 0xfc, 0x72, 0x19, 0xe9, 0x53, 0x46, 0x74, 0x07, 0x15, 0xf0, 0x23, 0xd8, 0xd0, 0x2c, 0xeb, 0x88, + 0xea, 0x9a, 0x43, 0x4d, 0xc3, 0xb5, 0x1d, 0x8d, 0x39, 0xa8, 0x88, 0x1f, 0x03, 0xce, 0xa6, 0x07, + 0xd4, 0xa0, 0xf6, 0x10, 0x95, 0x70, 0x07, 0x36, 0xb3, 0xf9, 0x0f, 0xc7, 0x54, 0x3f, 0x74, 0xed, + 0xe3, 0xde, 0x88, 0x26, 0xd3, 0xca, 0xb8, 0x05, 0xff, 0x67, 0x11, 0xab, 0x51, 0x15, 0xfc, 0x0c, + 0xb6, 0xfe, 0x1a, 0xe5, 0x0e, 0x98, 0x39, 0x72, 0x6d, 0xa2, 0x31, 0x7d, 0x88, 0xaa, 0xf8, 0x39, + 0x74, 0xee, 0x6a, 0xbe, 0x85, 0xaa, 0xc5, 0x07, 0xc8, 0xa2, 0x74, 0x73, 0x64, 0x69, 0xc6, 0x69, + 0x22, 0xc3, 0x41, 0x80, 0x1b, 0x50, 0xed, 0x99, 0xe6, 0xe1, 0x48, 0x63, 0x87, 0xa8, 0x8e, 0x11, + 0x34, 0x0c, 0xd3, 0xa1, 0x83, 0x14, 0x8e, 0x1a, 0xb8, 0x06, 0xa5, 0x21, 0x65, 0xa4, 0x8f, 0xd6, + 0x71, 0x1d, 0x2a, 0x76, 0x7c, 0x46, 0xfd, 0x04, 0x35, 0xe3, 0x5b, 0x50, 0xc3, 0x21, 0x4c, 0xde, + 0xe8, 0x3d, 0xd3, 0x0c, 0x87, 0xf4, 0xd1, 0x7f, 0x18, 0x43, 0xd3, 0x30, 0x1d, 0x57, 0x96, 0x88, + 0x1d, 0xe7, 0x50, 0xf7, 0x67, 0x0e, 0x1a, 0x59, 0x23, 0x61, 0xe3, 0x96, 0x03, 0xde, 0x3d, 0xd8, + 0x81, 0xb7, 0x82, 0x8c, 0x13, 0xda, 0x50, 0x4d, 0x8d, 0xb9, 0x74, 0xc3, 0x2a, 0x5e, 0xb9, 0xa4, + 0x9c, 0x71, 0xc9, 0x67, 0x40, 0x7f, 0x32, 0xe1, 0x2e, 0x6c, 0x5b, 0xcc, 0x1c, 0xd0, 0x23, 0xf2, + 0x2f, 0x66, 0xc9, 0x5e, 0xb1, 0xd0, 0xfb, 0x0a, 0x3b, 0x67, 0xe2, 0xf2, 0xbe, 0xb5, 0x7a, 0x20, + 0x27, 0x5b, 0xf1, 0x87, 0x6f, 0xe5, 0x3e, 0x69, 0x29, 0x74, 0x2a, 0x66, 0x5e, 0x30, 0x55, 0xc4, + 0x7c, 0xaa, 0x4e, 0x79, 0x20, 0x7f, 0x0b, 0x6a, 0x52, 0xf2, 0x42, 0x7f, 0x71, 0xe7, 0xcf, 0x70, + 0x3f, 0x09, 0xbf, 0xe7, 0x0b, 0xba, 0x63, 0x8f, 0xcb, 0xb2, 0xe7, 0xcd, 0xef, 0x00, 0x00, 0x00, + 0xff, 0xff, 0x6c, 0x1e, 0x6f, 0x92, 0x3f, 0x05, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/cloud/talent/v4beta1/event_service.pb.go b/vendor/google.golang.org/genproto/googleapis/cloud/talent/v4beta1/event_service.pb.go new file mode 100644 index 0000000..1c2b62b --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/cloud/talent/v4beta1/event_service.pb.go @@ -0,0 +1,204 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/cloud/talent/v4beta1/event_service.proto + +package talent // import "google.golang.org/genproto/googleapis/cloud/talent/v4beta1" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +import ( + context "golang.org/x/net/context" + grpc "google.golang.org/grpc" +) + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// The report event request. +type CreateClientEventRequest struct { + // Required. + // + // Resource name of the tenant under which the event is created. + // + // The format is "projects/{project_id}/tenants/{tenant_id}", for example, + // "projects/api-test-project/tenant/foo". + // + // Tenant id is optional and a default tenant is created if unspecified, for + // example, "projects/api-test-project". + Parent string `protobuf:"bytes,1,opt,name=parent,proto3" json:"parent,omitempty"` + // Required. + // + // Events issued when end user interacts with customer's application that + // uses Cloud Talent Solution. + ClientEvent *ClientEvent `protobuf:"bytes,2,opt,name=client_event,json=clientEvent,proto3" json:"client_event,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *CreateClientEventRequest) Reset() { *m = CreateClientEventRequest{} } +func (m *CreateClientEventRequest) String() string { return proto.CompactTextString(m) } +func (*CreateClientEventRequest) ProtoMessage() {} +func (*CreateClientEventRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_event_service_e4985594ba4916cb, []int{0} +} +func (m *CreateClientEventRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_CreateClientEventRequest.Unmarshal(m, b) +} +func (m *CreateClientEventRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_CreateClientEventRequest.Marshal(b, m, deterministic) +} +func (dst *CreateClientEventRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_CreateClientEventRequest.Merge(dst, src) +} +func (m *CreateClientEventRequest) XXX_Size() int { + return xxx_messageInfo_CreateClientEventRequest.Size(m) +} +func (m *CreateClientEventRequest) XXX_DiscardUnknown() { + xxx_messageInfo_CreateClientEventRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_CreateClientEventRequest proto.InternalMessageInfo + +func (m *CreateClientEventRequest) GetParent() string { + if m != nil { + return m.Parent + } + return "" +} + +func (m *CreateClientEventRequest) GetClientEvent() *ClientEvent { + if m != nil { + return m.ClientEvent + } + return nil +} + +func init() { + proto.RegisterType((*CreateClientEventRequest)(nil), "google.cloud.talent.v4beta1.CreateClientEventRequest") +} + +// Reference imports to suppress errors if they are not otherwise used. +var _ context.Context +var _ grpc.ClientConn + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the grpc package it is being compiled against. +const _ = grpc.SupportPackageIsVersion4 + +// EventServiceClient is the client API for EventService service. +// +// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://godoc.org/google.golang.org/grpc#ClientConn.NewStream. +type EventServiceClient interface { + // Report events issued when end user interacts with customer's application + // that uses Cloud Talent Solution. You may inspect the created events in + // [self service + // tools](https://console.cloud.google.com/talent-solution/overview). + // [Learn + // more](https://cloud.google.com/talent-solution/docs/management-tools) + // about self service tools. + CreateClientEvent(ctx context.Context, in *CreateClientEventRequest, opts ...grpc.CallOption) (*ClientEvent, error) +} + +type eventServiceClient struct { + cc *grpc.ClientConn +} + +func NewEventServiceClient(cc *grpc.ClientConn) EventServiceClient { + return &eventServiceClient{cc} +} + +func (c *eventServiceClient) CreateClientEvent(ctx context.Context, in *CreateClientEventRequest, opts ...grpc.CallOption) (*ClientEvent, error) { + out := new(ClientEvent) + err := c.cc.Invoke(ctx, "/google.cloud.talent.v4beta1.EventService/CreateClientEvent", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +// EventServiceServer is the server API for EventService service. +type EventServiceServer interface { + // Report events issued when end user interacts with customer's application + // that uses Cloud Talent Solution. You may inspect the created events in + // [self service + // tools](https://console.cloud.google.com/talent-solution/overview). + // [Learn + // more](https://cloud.google.com/talent-solution/docs/management-tools) + // about self service tools. + CreateClientEvent(context.Context, *CreateClientEventRequest) (*ClientEvent, error) +} + +func RegisterEventServiceServer(s *grpc.Server, srv EventServiceServer) { + s.RegisterService(&_EventService_serviceDesc, srv) +} + +func _EventService_CreateClientEvent_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(CreateClientEventRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(EventServiceServer).CreateClientEvent(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.talent.v4beta1.EventService/CreateClientEvent", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(EventServiceServer).CreateClientEvent(ctx, req.(*CreateClientEventRequest)) + } + return interceptor(ctx, in, info, handler) +} + +var _EventService_serviceDesc = grpc.ServiceDesc{ + ServiceName: "google.cloud.talent.v4beta1.EventService", + HandlerType: (*EventServiceServer)(nil), + Methods: []grpc.MethodDesc{ + { + MethodName: "CreateClientEvent", + Handler: _EventService_CreateClientEvent_Handler, + }, + }, + Streams: []grpc.StreamDesc{}, + Metadata: "google/cloud/talent/v4beta1/event_service.proto", +} + +func init() { + proto.RegisterFile("google/cloud/talent/v4beta1/event_service.proto", fileDescriptor_event_service_e4985594ba4916cb) +} + +var fileDescriptor_event_service_e4985594ba4916cb = []byte{ + // 327 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x8c, 0x92, 0xb1, 0x4e, 0x3a, 0x41, + 0x10, 0xc6, 0xb3, 0xfc, 0x13, 0x92, 0xff, 0x42, 0xc3, 0x15, 0x86, 0xa0, 0x89, 0xe4, 0x1a, 0x91, + 0x62, 0x37, 0x82, 0x36, 0x1a, 0x0b, 0x21, 0x56, 0x36, 0x04, 0xac, 0x68, 0xc8, 0x72, 0x4e, 0xce, + 0x33, 0xe7, 0xec, 0xb9, 0x3b, 0xd0, 0x18, 0x43, 0xe2, 0x2b, 0xf8, 0x06, 0xbe, 0x92, 0xaf, 0xc0, + 0x2b, 0xd8, 0x1b, 0x76, 0x2f, 0x7a, 0x09, 0x7a, 0xda, 0xcd, 0xe4, 0xe6, 0xf7, 0xdd, 0xf7, 0xcd, + 0x2c, 0x97, 0xb1, 0xd6, 0x71, 0x0a, 0x32, 0x4a, 0xf5, 0xe2, 0x46, 0x92, 0x4a, 0x01, 0x49, 0x2e, + 0x8f, 0xe7, 0x40, 0xea, 0x48, 0xc2, 0x12, 0x90, 0x66, 0x16, 0xcc, 0x32, 0x89, 0x40, 0x64, 0x46, + 0x93, 0x0e, 0x76, 0x3d, 0x20, 0x1c, 0x20, 0x3c, 0x20, 0x72, 0xa0, 0xb5, 0x97, 0xab, 0xa9, 0x2c, + 0x91, 0x0a, 0x51, 0x93, 0xa2, 0x44, 0xa3, 0xf5, 0x68, 0xeb, 0xe0, 0xd7, 0x7f, 0xf9, 0xc1, 0x70, + 0xc5, 0x9b, 0x43, 0x03, 0x8a, 0x60, 0x98, 0x26, 0x80, 0x74, 0xb9, 0xf9, 0x34, 0x86, 0x87, 0x05, + 0x58, 0x0a, 0x76, 0x78, 0x35, 0x53, 0x06, 0x90, 0x9a, 0xac, 0xcd, 0x3a, 0xff, 0xc7, 0x79, 0x17, + 0x5c, 0xf1, 0x7a, 0xe4, 0xa6, 0x67, 0x4e, 0xa9, 0x59, 0x69, 0xb3, 0x4e, 0xad, 0xd7, 0x11, 0x25, + 0x76, 0x45, 0x51, 0xbe, 0x16, 0x7d, 0x35, 0xbd, 0x77, 0xc6, 0xeb, 0xae, 0x9a, 0xf8, 0xec, 0xc1, + 0x9a, 0xf1, 0xc6, 0x96, 0xa5, 0xe0, 0xa4, 0x5c, 0xfd, 0x87, 0x08, 0xad, 0x3f, 0x9b, 0x0a, 0xf1, + 0xf9, 0x6d, 0xfd, 0x52, 0xb9, 0x0d, 0xfb, 0x9f, 0x6b, 0x7a, 0xf4, 0x71, 0xcf, 0x33, 0xa3, 0xef, + 0x20, 0x22, 0x2b, 0xbb, 0x92, 0x00, 0x15, 0x6e, 0xaa, 0x27, 0x59, 0xc8, 0x60, 0x4f, 0x59, 0x77, + 0x2a, 0xc2, 0xc3, 0x12, 0x72, 0x6b, 0x7e, 0xb0, 0xe2, 0xfb, 0x91, 0xbe, 0x2f, 0xb3, 0x37, 0x68, + 0x14, 0xf7, 0x32, 0xda, 0x9c, 0x6b, 0xc4, 0xa6, 0x17, 0x39, 0x11, 0xeb, 0x54, 0x61, 0x2c, 0xb4, + 0x89, 0x65, 0x0c, 0xe8, 0x8e, 0x99, 0x3f, 0x30, 0x95, 0x25, 0xf6, 0xdb, 0xc3, 0x9f, 0xf9, 0xf6, + 0xb5, 0xf2, 0x6f, 0x78, 0x3d, 0x99, 0x57, 0x1d, 0xd3, 0xff, 0x08, 0x00, 0x00, 0xff, 0xff, 0xb3, + 0x27, 0x4c, 0x92, 0x97, 0x02, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/cloud/talent/v4beta1/filters.pb.go b/vendor/google.golang.org/genproto/googleapis/cloud/talent/v4beta1/filters.pb.go new file mode 100644 index 0000000..bae016d --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/cloud/talent/v4beta1/filters.pb.go @@ -0,0 +1,1886 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/cloud/talent/v4beta1/filters.proto + +package talent // import "google.golang.org/genproto/googleapis/cloud/talent/v4beta1" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import duration "github.com/golang/protobuf/ptypes/duration" +import timestamp "github.com/golang/protobuf/ptypes/timestamp" +import wrappers "github.com/golang/protobuf/ptypes/wrappers" +import _ "google.golang.org/genproto/googleapis/api/annotations" +import date "google.golang.org/genproto/googleapis/type/date" +import latlng "google.golang.org/genproto/googleapis/type/latlng" +import timeofday "google.golang.org/genproto/googleapis/type/timeofday" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Specify whether including telecommute jobs. +type LocationFilter_TelecommutePreference int32 + +const ( + // Default value if the telecommute preference isn't specified. + LocationFilter_TELECOMMUTE_PREFERENCE_UNSPECIFIED LocationFilter_TelecommutePreference = 0 + // Exclude telecommute jobs. + LocationFilter_TELECOMMUTE_EXCLUDED LocationFilter_TelecommutePreference = 1 + // Allow telecommute jobs. + LocationFilter_TELECOMMUTE_ALLOWED LocationFilter_TelecommutePreference = 2 +) + +var LocationFilter_TelecommutePreference_name = map[int32]string{ + 0: "TELECOMMUTE_PREFERENCE_UNSPECIFIED", + 1: "TELECOMMUTE_EXCLUDED", + 2: "TELECOMMUTE_ALLOWED", +} +var LocationFilter_TelecommutePreference_value = map[string]int32{ + "TELECOMMUTE_PREFERENCE_UNSPECIFIED": 0, + "TELECOMMUTE_EXCLUDED": 1, + "TELECOMMUTE_ALLOWED": 2, +} + +func (x LocationFilter_TelecommutePreference) String() string { + return proto.EnumName(LocationFilter_TelecommutePreference_name, int32(x)) +} +func (LocationFilter_TelecommutePreference) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_filters_4fe13b05da0a66b7, []int{2, 0} +} + +// Specify the type of filtering. +type CompensationFilter_FilterType int32 + +const ( + // Filter type unspecified. Position holder, INVALID, should never be used. + CompensationFilter_FILTER_TYPE_UNSPECIFIED CompensationFilter_FilterType = 0 + // Filter by `base compensation entry's` unit. A job is a match if and + // only if the job contains a base CompensationEntry and the base + // CompensationEntry's unit matches provided [units][google.cloud.talent.v4beta1.CompensationFilter.units]. + // Populate one or more [units][google.cloud.talent.v4beta1.CompensationFilter.units]. + // + // See [CompensationInfo.CompensationEntry][google.cloud.talent.v4beta1.CompensationInfo.CompensationEntry] for definition of + // base compensation entry. + CompensationFilter_UNIT_ONLY CompensationFilter_FilterType = 1 + // Filter by `base compensation entry's` unit and amount / range. A job + // is a match if and only if the job contains a base CompensationEntry, and + // the base entry's unit matches provided [compensation_units][] and amount + // or range overlaps with provided [compensation_range][]. + // + // See [CompensationInfo.CompensationEntry][google.cloud.talent.v4beta1.CompensationInfo.CompensationEntry] for definition of + // base compensation entry. + // + // Set exactly one [units][google.cloud.talent.v4beta1.CompensationFilter.units] and populate [range][google.cloud.talent.v4beta1.CompensationFilter.range]. + CompensationFilter_UNIT_AND_AMOUNT CompensationFilter_FilterType = 2 + // Filter by annualized base compensation amount and `base compensation + // entry's` unit. Populate [range][google.cloud.talent.v4beta1.CompensationFilter.range] and zero or more [units][google.cloud.talent.v4beta1.CompensationFilter.units]. + CompensationFilter_ANNUALIZED_BASE_AMOUNT CompensationFilter_FilterType = 3 + // Filter by annualized total compensation amount and `base compensation + // entry's` unit . Populate [range][google.cloud.talent.v4beta1.CompensationFilter.range] and zero or more [units][google.cloud.talent.v4beta1.CompensationFilter.units]. + CompensationFilter_ANNUALIZED_TOTAL_AMOUNT CompensationFilter_FilterType = 4 +) + +var CompensationFilter_FilterType_name = map[int32]string{ + 0: "FILTER_TYPE_UNSPECIFIED", + 1: "UNIT_ONLY", + 2: "UNIT_AND_AMOUNT", + 3: "ANNUALIZED_BASE_AMOUNT", + 4: "ANNUALIZED_TOTAL_AMOUNT", +} +var CompensationFilter_FilterType_value = map[string]int32{ + "FILTER_TYPE_UNSPECIFIED": 0, + "UNIT_ONLY": 1, + "UNIT_AND_AMOUNT": 2, + "ANNUALIZED_BASE_AMOUNT": 3, + "ANNUALIZED_TOTAL_AMOUNT": 4, +} + +func (x CompensationFilter_FilterType) String() string { + return proto.EnumName(CompensationFilter_FilterType_name, int32(x)) +} +func (CompensationFilter_FilterType) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_filters_4fe13b05da0a66b7, []int{3, 0} +} + +// The traffic density to use when calculating commute time. +type CommuteFilter_RoadTraffic int32 + +const ( + // Road traffic situation isn't specified. + CommuteFilter_ROAD_TRAFFIC_UNSPECIFIED CommuteFilter_RoadTraffic = 0 + // Optimal commute time without considering any traffic impact. + CommuteFilter_TRAFFIC_FREE CommuteFilter_RoadTraffic = 1 + // Commute time calculation takes in account the peak traffic impact. + CommuteFilter_BUSY_HOUR CommuteFilter_RoadTraffic = 2 +) + +var CommuteFilter_RoadTraffic_name = map[int32]string{ + 0: "ROAD_TRAFFIC_UNSPECIFIED", + 1: "TRAFFIC_FREE", + 2: "BUSY_HOUR", +} +var CommuteFilter_RoadTraffic_value = map[string]int32{ + "ROAD_TRAFFIC_UNSPECIFIED": 0, + "TRAFFIC_FREE": 1, + "BUSY_HOUR": 2, +} + +func (x CommuteFilter_RoadTraffic) String() string { + return proto.EnumName(CommuteFilter_RoadTraffic_name, int32(x)) +} +func (CommuteFilter_RoadTraffic) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_filters_4fe13b05da0a66b7, []int{4, 0} +} + +// Enum indicating which set of [Profile.employment_records][google.cloud.talent.v4beta1.Profile.employment_records] to search +// against. +type EmployerFilter_EmployerFilterMode int32 + +const ( + // Default value. + EmployerFilter_EMPLOYER_FILTER_MODE_UNSPECIFIED EmployerFilter_EmployerFilterMode = 0 + // Apply to all employers in [Profile.employment_records][google.cloud.talent.v4beta1.Profile.employment_records]. + EmployerFilter_ALL_EMPLOYMENT_RECORDS EmployerFilter_EmployerFilterMode = 1 + // Apply only to current employer in [Profile.employment_records][google.cloud.talent.v4beta1.Profile.employment_records]. + EmployerFilter_CURRENT_EMPLOYMENT_RECORDS_ONLY EmployerFilter_EmployerFilterMode = 2 + // Apply only to past (not current) employers in + // [Profile.employment_records][google.cloud.talent.v4beta1.Profile.employment_records]. + EmployerFilter_PAST_EMPLOYMENT_RECORDS_ONLY EmployerFilter_EmployerFilterMode = 3 +) + +var EmployerFilter_EmployerFilterMode_name = map[int32]string{ + 0: "EMPLOYER_FILTER_MODE_UNSPECIFIED", + 1: "ALL_EMPLOYMENT_RECORDS", + 2: "CURRENT_EMPLOYMENT_RECORDS_ONLY", + 3: "PAST_EMPLOYMENT_RECORDS_ONLY", +} +var EmployerFilter_EmployerFilterMode_value = map[string]int32{ + "EMPLOYER_FILTER_MODE_UNSPECIFIED": 0, + "ALL_EMPLOYMENT_RECORDS": 1, + "CURRENT_EMPLOYMENT_RECORDS_ONLY": 2, + "PAST_EMPLOYMENT_RECORDS_ONLY": 3, +} + +func (x EmployerFilter_EmployerFilterMode) String() string { + return proto.EnumName(EmployerFilter_EmployerFilterMode_name, int32(x)) +} +func (EmployerFilter_EmployerFilterMode) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_filters_4fe13b05da0a66b7, []int{7, 0} +} + +// Time fields can be used in TimeFilter. +type TimeFilter_TimeField int32 + +const ( + // Default value. + TimeFilter_TIME_FIELD_UNSPECIFIED TimeFilter_TimeField = 0 + // Earliest profile create time. + TimeFilter_CREATE_TIME TimeFilter_TimeField = 1 + // Latest profile update time. + TimeFilter_UPDATE_TIME TimeFilter_TimeField = 2 +) + +var TimeFilter_TimeField_name = map[int32]string{ + 0: "TIME_FIELD_UNSPECIFIED", + 1: "CREATE_TIME", + 2: "UPDATE_TIME", +} +var TimeFilter_TimeField_value = map[string]int32{ + "TIME_FIELD_UNSPECIFIED": 0, + "CREATE_TIME": 1, + "UPDATE_TIME": 2, +} + +func (x TimeFilter_TimeField) String() string { + return proto.EnumName(TimeFilter_TimeField_name, int32(x)) +} +func (TimeFilter_TimeField) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_filters_4fe13b05da0a66b7, []int{13, 0} +} + +// Input only. +// +// The query required to perform a search query. +type JobQuery struct { + // Optional. + // + // The query string that matches against the job title, description, and + // location fields. + // + // The maximum number of allowed characters is 255. + Query string `protobuf:"bytes,1,opt,name=query,proto3" json:"query,omitempty"` + // Optional. + // + // This filter specifies the company entities to search against. + // + // If a value isn't specified, jobs are searched for against all + // companies. + // + // If multiple values are specified, jobs are searched against the + // companies specified. + // + // The format is + // "projects/{project_id}/tenants/{tenant_id}/companies/{company_id}", for + // example, "projects/api-test-project/tenants/foo/companies/bar". + // + // Tenant id is optional and the default tenant is used if unspecified, for + // example, "projects/api-test-project/companies/bar". + // + // At most 20 company filters are allowed. + Companies []string `protobuf:"bytes,2,rep,name=companies,proto3" json:"companies,omitempty"` + // Optional. + // + // The location filter specifies geo-regions containing the jobs to + // search against. See [LocationFilter][google.cloud.talent.v4beta1.LocationFilter] for more information. + // + // If a location value isn't specified, jobs fitting the other search + // criteria are retrieved regardless of where they're located. + // + // If multiple values are specified, jobs are retrieved from any of the + // specified locations. If different values are specified for the + // [LocationFilter.distance_in_miles][google.cloud.talent.v4beta1.LocationFilter.distance_in_miles] parameter, the maximum provided + // distance is used for all locations. + // + // At most 5 location filters are allowed. + LocationFilters []*LocationFilter `protobuf:"bytes,3,rep,name=location_filters,json=locationFilters,proto3" json:"location_filters,omitempty"` + // Optional. + // + // The category filter specifies the categories of jobs to search against. + // See [Category][] for more information. + // + // If a value isn't specified, jobs from any category are searched against. + // + // If multiple values are specified, jobs from any of the specified + // categories are searched against. + JobCategories []JobCategory `protobuf:"varint,4,rep,packed,name=job_categories,json=jobCategories,proto3,enum=google.cloud.talent.v4beta1.JobCategory" json:"job_categories,omitempty"` + // Optional. + // + // Allows filtering jobs by commute time with different travel methods (for + // example, driving or public transit). Note: This only works with [COMMUTE + // MODE][Mode#COMMUTE]. When specified, [JobQuery.location_filters] is + // ignored. + // + // Currently we don't support sorting by commute time. + CommuteFilter *CommuteFilter `protobuf:"bytes,5,opt,name=commute_filter,json=commuteFilter,proto3" json:"commute_filter,omitempty"` + // Optional. + // + // This filter specifies the exact [company display + // name][Company.display_name] of the jobs to search against. + // + // If a value isn't specified, jobs within the search results are + // associated with any company. + // + // If multiple values are specified, jobs within the search results may be + // associated with any of the specified companies. + // + // At most 20 company display name filters are allowed. + CompanyDisplayNames []string `protobuf:"bytes,6,rep,name=company_display_names,json=companyDisplayNames,proto3" json:"company_display_names,omitempty"` + // Optional. + // + // This search filter is applied only to + // [Job.compensation_info][google.cloud.talent.v4beta1.Job.compensation_info]. For example, if the filter is specified + // as "Hourly job with per-hour compensation > $15", only jobs meeting + // these criteria are searched. If a filter isn't defined, all open jobs + // are searched. + CompensationFilter *CompensationFilter `protobuf:"bytes,7,opt,name=compensation_filter,json=compensationFilter,proto3" json:"compensation_filter,omitempty"` + // Optional. + // + // This filter specifies a structured syntax to match against the + // [Job.custom_attributes][google.cloud.talent.v4beta1.Job.custom_attributes] marked as `filterable`. + // + // The syntax for this expression is a subset of SQL syntax. + // + // Supported operators are: `=`, `!=`, `<`, `<=`, `>`, and `>=` where the + // left of the operator is a custom field key and the right of the operator + // is a number or a quoted string. You must escape backslash (\\) and + // quote (\") characters. + // + // Supported functions are `LOWER([field_name])` to + // perform a case insensitive match and `EMPTY([field_name])` to filter on the + // existence of a key. + // + // Boolean expressions (AND/OR/NOT) are supported up to 3 levels of + // nesting (for example, "((A AND B AND C) OR NOT D) AND E"), a maximum of 100 + // comparisons or functions are allowed in the expression. The expression + // must be < 3000 bytes in length. + // + // Sample Query: + // `(LOWER(driving_license)="class \"a\"" OR EMPTY(driving_license)) AND + // driving_years > 10` + CustomAttributeFilter string `protobuf:"bytes,8,opt,name=custom_attribute_filter,json=customAttributeFilter,proto3" json:"custom_attribute_filter,omitempty"` + // Optional. + // + // This flag controls the spell-check feature. If false, the + // service attempts to correct a misspelled query, + // for example, "enginee" is corrected to "engineer". + // + // Defaults to false: a spell check is performed. + DisableSpellCheck bool `protobuf:"varint,9,opt,name=disable_spell_check,json=disableSpellCheck,proto3" json:"disable_spell_check,omitempty"` + // Optional. + // + // The employment type filter specifies the employment type of jobs to + // search against, such as [EmploymentType.FULL_TIME][google.cloud.talent.v4beta1.EmploymentType.FULL_TIME]. + // + // If a value isn't specified, jobs in the search results includes any + // employment type. + // + // If multiple values are specified, jobs in the search results include + // any of the specified employment types. + EmploymentTypes []EmploymentType `protobuf:"varint,10,rep,packed,name=employment_types,json=employmentTypes,proto3,enum=google.cloud.talent.v4beta1.EmploymentType" json:"employment_types,omitempty"` + // Optional. + // + // This filter specifies the locale of jobs to search against, + // for example, "en-US". + // + // If a value isn't specified, the search results can contain jobs in any + // locale. + // + // + // Language codes should be in BCP-47 format, such as "en-US" or "sr-Latn". + // For more information, see + // [Tags for Identifying Languages](https://tools.ietf.org/html/bcp47). + // + // At most 10 language code filters are allowed. + LanguageCodes []string `protobuf:"bytes,11,rep,name=language_codes,json=languageCodes,proto3" json:"language_codes,omitempty"` + // Optional. + // + // Jobs published within a range specified by this filter are searched + // against. + PublishTimeRange *TimestampRange `protobuf:"bytes,12,opt,name=publish_time_range,json=publishTimeRange,proto3" json:"publish_time_range,omitempty"` + // Optional. + // + // This filter specifies a list of job names to be excluded during search. + // + // At most 400 excluded job names are allowed. + ExcludedJobs []string `protobuf:"bytes,13,rep,name=excluded_jobs,json=excludedJobs,proto3" json:"excluded_jobs,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *JobQuery) Reset() { *m = JobQuery{} } +func (m *JobQuery) String() string { return proto.CompactTextString(m) } +func (*JobQuery) ProtoMessage() {} +func (*JobQuery) Descriptor() ([]byte, []int) { + return fileDescriptor_filters_4fe13b05da0a66b7, []int{0} +} +func (m *JobQuery) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_JobQuery.Unmarshal(m, b) +} +func (m *JobQuery) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_JobQuery.Marshal(b, m, deterministic) +} +func (dst *JobQuery) XXX_Merge(src proto.Message) { + xxx_messageInfo_JobQuery.Merge(dst, src) +} +func (m *JobQuery) XXX_Size() int { + return xxx_messageInfo_JobQuery.Size(m) +} +func (m *JobQuery) XXX_DiscardUnknown() { + xxx_messageInfo_JobQuery.DiscardUnknown(m) +} + +var xxx_messageInfo_JobQuery proto.InternalMessageInfo + +func (m *JobQuery) GetQuery() string { + if m != nil { + return m.Query + } + return "" +} + +func (m *JobQuery) GetCompanies() []string { + if m != nil { + return m.Companies + } + return nil +} + +func (m *JobQuery) GetLocationFilters() []*LocationFilter { + if m != nil { + return m.LocationFilters + } + return nil +} + +func (m *JobQuery) GetJobCategories() []JobCategory { + if m != nil { + return m.JobCategories + } + return nil +} + +func (m *JobQuery) GetCommuteFilter() *CommuteFilter { + if m != nil { + return m.CommuteFilter + } + return nil +} + +func (m *JobQuery) GetCompanyDisplayNames() []string { + if m != nil { + return m.CompanyDisplayNames + } + return nil +} + +func (m *JobQuery) GetCompensationFilter() *CompensationFilter { + if m != nil { + return m.CompensationFilter + } + return nil +} + +func (m *JobQuery) GetCustomAttributeFilter() string { + if m != nil { + return m.CustomAttributeFilter + } + return "" +} + +func (m *JobQuery) GetDisableSpellCheck() bool { + if m != nil { + return m.DisableSpellCheck + } + return false +} + +func (m *JobQuery) GetEmploymentTypes() []EmploymentType { + if m != nil { + return m.EmploymentTypes + } + return nil +} + +func (m *JobQuery) GetLanguageCodes() []string { + if m != nil { + return m.LanguageCodes + } + return nil +} + +func (m *JobQuery) GetPublishTimeRange() *TimestampRange { + if m != nil { + return m.PublishTimeRange + } + return nil +} + +func (m *JobQuery) GetExcludedJobs() []string { + if m != nil { + return m.ExcludedJobs + } + return nil +} + +// Filters to apply when performing the search query. +type ProfileQuery struct { + // Optional. + // + // Keywords to match any text fields of profiles. + // + // For example, "software engineer in Palo Alto". + Query string `protobuf:"bytes,1,opt,name=query,proto3" json:"query,omitempty"` + // Optional. + // + // The location filter specifies geo-regions containing the profiles to + // search against. + // + // If a location filter isn't specified, profiles fitting the other search + // criteria are retrieved regardless of where they're located. + // + // If [LocationFilter.negated][google.cloud.talent.v4beta1.LocationFilter.negated] is specified, the result doesn't contain + // profiles from that location. + // + // For example, search for profiles with addresses in "New York City". + LocationFilters []*LocationFilter `protobuf:"bytes,2,rep,name=location_filters,json=locationFilters,proto3" json:"location_filters,omitempty"` + // Optional. + // + // Job title filter specifies job titles of profiles to match on. + // + // If a job title isn't specified, profiles with any titles are retrieved. + // + // If multiple values are specified, profiles are retrieved with any of the + // specified job titles. + // + // If [JobTitleFilter.negated][google.cloud.talent.v4beta1.JobTitleFilter.negated] is specified, the result won't contain + // profiles with the job titles. + // + // For example, search for profiles with a job title "Product Manager". + JobTitleFilters []*JobTitleFilter `protobuf:"bytes,3,rep,name=job_title_filters,json=jobTitleFilters,proto3" json:"job_title_filters,omitempty"` + // Optional. + // + // Employer filter specifies employers of profiles to match on. + // + // If an employer filter isn't specified, profiles with any employers are + // retrieved. + // + // If multiple employer filters are specified, profiles with any matching + // employers are retrieved. + // + // If [EmployerFilter.negated][google.cloud.talent.v4beta1.EmployerFilter.negated] is specified, the result won't contain + // profiles that match the employers. + // + // For example, search for profiles that have working experience at "Google + // LLC". + EmployerFilters []*EmployerFilter `protobuf:"bytes,4,rep,name=employer_filters,json=employerFilters,proto3" json:"employer_filters,omitempty"` + // Optional. + // + // Education filter specifies education of profiles to match on. + // + // If an education filter isn't specified, profiles with any education are + // retrieved. + // + // If multiple education filters are specified, profiles that match any + // education filters are retrieved. + // + // If [EducationFilter.negated][google.cloud.talent.v4beta1.EducationFilter.negated] is specified, the result won't contain + // profiles that match the educations. + // + // For example, search for profiles with a master degree. + EducationFilters []*EducationFilter `protobuf:"bytes,5,rep,name=education_filters,json=educationFilters,proto3" json:"education_filters,omitempty"` + // Optional. + // + // Skill filter specifies skill of profiles to match on. + // + // If a skill filter isn't specified, profiles with any skills are retrieved. + // + // If multiple skill filters are specified, profiles that match any skill + // filters are retrieved. + // + // If [SkillFilter.negated][google.cloud.talent.v4beta1.SkillFilter.negated] is specified, the result won't contain profiles + // that match the skills. + // + // For example, search for profiles that have "Java" and "Python" in skill + // list. + SkillFilters []*SkillFilter `protobuf:"bytes,6,rep,name=skill_filters,json=skillFilters,proto3" json:"skill_filters,omitempty"` + // Optional. + // + // Work experience filter specifies the total working experience of profiles + // to match on. + // + // If a work experience filter isn't specified, profiles with any + // professional experience are retrieved. + // + // If multiple work experience filters are specified, profiles that match any + // work experience filters are retrieved. + // + // For example, search for profiles with 10 years of work experience. + WorkExperienceFilter []*WorkExperienceFilter `protobuf:"bytes,7,rep,name=work_experience_filter,json=workExperienceFilter,proto3" json:"work_experience_filter,omitempty"` + // Optional. + // + // Time filter specifies the create/update timestamp of the profiles to match + // on. + // + // For example, search for profiles created since "2018-1-1". + TimeFilters []*TimeFilter `protobuf:"bytes,8,rep,name=time_filters,json=timeFilters,proto3" json:"time_filters,omitempty"` + // Optional. + // + // The hirable filter specifies the profile's hirable status to match on. + HirableFilter *wrappers.BoolValue `protobuf:"bytes,9,opt,name=hirable_filter,json=hirableFilter,proto3" json:"hirable_filter,omitempty"` + // Optional. + // + // The application date filters specify application date ranges to match on. + ApplicationDateFilters []*ApplicationDateFilter `protobuf:"bytes,10,rep,name=application_date_filters,json=applicationDateFilters,proto3" json:"application_date_filters,omitempty"` + // Optional. + // + // The application outcome notes filters specify the notes for the outcome of + // the job application. + ApplicationOutcomeNotesFilters []*ApplicationOutcomeNotesFilter `protobuf:"bytes,11,rep,name=application_outcome_notes_filters,json=applicationOutcomeNotesFilters,proto3" json:"application_outcome_notes_filters,omitempty"` + // Optional. + // + // The application job filters specify the job applied for in the application. + ApplicationJobFilters []*ApplicationJobFilter `protobuf:"bytes,13,rep,name=application_job_filters,json=applicationJobFilters,proto3" json:"application_job_filters,omitempty"` + // Optional. + // + // This filter specifies a structured syntax to match against the + // [Profile.custom_attributes][google.cloud.talent.v4beta1.Profile.custom_attributes] that are marked as `filterable`. + // + // The syntax for this expression is a subset of Google SQL syntax. + // + // String custom attributes: supported operators are =, != where the left of + // the operator is a custom field key and the right of the operator is a + // string (surrounded by quotes) value. + // + // Numeric custom attributes: Supported operators are '>', '<' or '=' + // operators where the left of the operator is a custom field key and the + // right of the operator is a numeric value. + // + // Supported functions are LOWER() to + // perform case insensitive match and EMPTY() to filter on the + // existence of a key. + // + // Boolean expressions (AND/OR/NOT) are supported up to 3 levels of + // nesting (for example "((A AND B AND C) OR NOT D) AND E"), and there can be + // a maximum of 50 comparisons/functions in the expression. The expression + // must be < 2000 characters in length. + // + // Sample Query: + // (key1 = "TEST" OR LOWER(key1)="test" OR NOT EMPTY(key1)) + CustomAttributeFilter string `protobuf:"bytes,15,opt,name=custom_attribute_filter,json=customAttributeFilter,proto3" json:"custom_attribute_filter,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ProfileQuery) Reset() { *m = ProfileQuery{} } +func (m *ProfileQuery) String() string { return proto.CompactTextString(m) } +func (*ProfileQuery) ProtoMessage() {} +func (*ProfileQuery) Descriptor() ([]byte, []int) { + return fileDescriptor_filters_4fe13b05da0a66b7, []int{1} +} +func (m *ProfileQuery) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ProfileQuery.Unmarshal(m, b) +} +func (m *ProfileQuery) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ProfileQuery.Marshal(b, m, deterministic) +} +func (dst *ProfileQuery) XXX_Merge(src proto.Message) { + xxx_messageInfo_ProfileQuery.Merge(dst, src) +} +func (m *ProfileQuery) XXX_Size() int { + return xxx_messageInfo_ProfileQuery.Size(m) +} +func (m *ProfileQuery) XXX_DiscardUnknown() { + xxx_messageInfo_ProfileQuery.DiscardUnknown(m) +} + +var xxx_messageInfo_ProfileQuery proto.InternalMessageInfo + +func (m *ProfileQuery) GetQuery() string { + if m != nil { + return m.Query + } + return "" +} + +func (m *ProfileQuery) GetLocationFilters() []*LocationFilter { + if m != nil { + return m.LocationFilters + } + return nil +} + +func (m *ProfileQuery) GetJobTitleFilters() []*JobTitleFilter { + if m != nil { + return m.JobTitleFilters + } + return nil +} + +func (m *ProfileQuery) GetEmployerFilters() []*EmployerFilter { + if m != nil { + return m.EmployerFilters + } + return nil +} + +func (m *ProfileQuery) GetEducationFilters() []*EducationFilter { + if m != nil { + return m.EducationFilters + } + return nil +} + +func (m *ProfileQuery) GetSkillFilters() []*SkillFilter { + if m != nil { + return m.SkillFilters + } + return nil +} + +func (m *ProfileQuery) GetWorkExperienceFilter() []*WorkExperienceFilter { + if m != nil { + return m.WorkExperienceFilter + } + return nil +} + +func (m *ProfileQuery) GetTimeFilters() []*TimeFilter { + if m != nil { + return m.TimeFilters + } + return nil +} + +func (m *ProfileQuery) GetHirableFilter() *wrappers.BoolValue { + if m != nil { + return m.HirableFilter + } + return nil +} + +func (m *ProfileQuery) GetApplicationDateFilters() []*ApplicationDateFilter { + if m != nil { + return m.ApplicationDateFilters + } + return nil +} + +func (m *ProfileQuery) GetApplicationOutcomeNotesFilters() []*ApplicationOutcomeNotesFilter { + if m != nil { + return m.ApplicationOutcomeNotesFilters + } + return nil +} + +func (m *ProfileQuery) GetApplicationJobFilters() []*ApplicationJobFilter { + if m != nil { + return m.ApplicationJobFilters + } + return nil +} + +func (m *ProfileQuery) GetCustomAttributeFilter() string { + if m != nil { + return m.CustomAttributeFilter + } + return "" +} + +// Input only. +// +// Geographic region of the search. +type LocationFilter struct { + // Optional. + // + // The address name, such as "Mountain View" or "Bay Area". + Address string `protobuf:"bytes,1,opt,name=address,proto3" json:"address,omitempty"` + // Optional. + // + // CLDR region code of the country/region of the address. This is used + // to address ambiguity of the user-input location, for example, "Liverpool" + // against "Liverpool, NY, US" or "Liverpool, UK". + // + // Set this field if all the jobs to search against are from a same region, + // or jobs are world-wide, but the job seeker is from a specific region. + // + // See http://cldr.unicode.org/ and + // http://www.unicode.org/cldr/charts/30/supplemental/territory_information.html + // for details. Example: "CH" for Switzerland. + RegionCode string `protobuf:"bytes,2,opt,name=region_code,json=regionCode,proto3" json:"region_code,omitempty"` + // Optional. + // + // The latitude and longitude of the geographic center from which to + // search. This field's ignored if `address` is provided. + LatLng *latlng.LatLng `protobuf:"bytes,3,opt,name=lat_lng,json=latLng,proto3" json:"lat_lng,omitempty"` + // Optional. + // + // + // The distance_in_miles is applied when the location being searched for is + // identified as a city or smaller. When the location being searched for is a + // state or larger, this field is ignored. + DistanceInMiles float64 `protobuf:"fixed64,4,opt,name=distance_in_miles,json=distanceInMiles,proto3" json:"distance_in_miles,omitempty"` + // Optional. + // + // Allows the client to return jobs without a + // set location, specifically, telecommuting jobs (telecomuting is considered + // by the service as a special location. + // [Job.posting_region][google.cloud.talent.v4beta1.Job.posting_region] indicates if a job permits telecommuting. + // If this field is set to [TelecommutePreference.TELECOMMUTE_ALLOWED][google.cloud.talent.v4beta1.LocationFilter.TelecommutePreference.TELECOMMUTE_ALLOWED], + // telecommuting jobs are searched, and [address][google.cloud.talent.v4beta1.LocationFilter.address] and [lat_lng][google.cloud.talent.v4beta1.LocationFilter.lat_lng] are + // ignored. If not set or set to + // [TelecommutePreference.TELECOMMUTE_EXCLUDED][google.cloud.talent.v4beta1.LocationFilter.TelecommutePreference.TELECOMMUTE_EXCLUDED], telecommute job are not + // searched. + // + // This filter can be used by itself to search exclusively for telecommuting + // jobs, or it can be combined with another location + // filter to search for a combination of job locations, + // such as "Mountain View" or "telecommuting" jobs. However, when used in + // combination with other location filters, telecommuting jobs can be + // treated as less relevant than other jobs in the search response. + TelecommutePreference LocationFilter_TelecommutePreference `protobuf:"varint,5,opt,name=telecommute_preference,json=telecommutePreference,proto3,enum=google.cloud.talent.v4beta1.LocationFilter_TelecommutePreference" json:"telecommute_preference,omitempty"` + // Optional. + // + // Whether to apply negation to the filter so profiles matching the filter + // are excluded. + // + // Currently only supported in profile search. + Negated bool `protobuf:"varint,6,opt,name=negated,proto3" json:"negated,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *LocationFilter) Reset() { *m = LocationFilter{} } +func (m *LocationFilter) String() string { return proto.CompactTextString(m) } +func (*LocationFilter) ProtoMessage() {} +func (*LocationFilter) Descriptor() ([]byte, []int) { + return fileDescriptor_filters_4fe13b05da0a66b7, []int{2} +} +func (m *LocationFilter) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_LocationFilter.Unmarshal(m, b) +} +func (m *LocationFilter) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_LocationFilter.Marshal(b, m, deterministic) +} +func (dst *LocationFilter) XXX_Merge(src proto.Message) { + xxx_messageInfo_LocationFilter.Merge(dst, src) +} +func (m *LocationFilter) XXX_Size() int { + return xxx_messageInfo_LocationFilter.Size(m) +} +func (m *LocationFilter) XXX_DiscardUnknown() { + xxx_messageInfo_LocationFilter.DiscardUnknown(m) +} + +var xxx_messageInfo_LocationFilter proto.InternalMessageInfo + +func (m *LocationFilter) GetAddress() string { + if m != nil { + return m.Address + } + return "" +} + +func (m *LocationFilter) GetRegionCode() string { + if m != nil { + return m.RegionCode + } + return "" +} + +func (m *LocationFilter) GetLatLng() *latlng.LatLng { + if m != nil { + return m.LatLng + } + return nil +} + +func (m *LocationFilter) GetDistanceInMiles() float64 { + if m != nil { + return m.DistanceInMiles + } + return 0 +} + +func (m *LocationFilter) GetTelecommutePreference() LocationFilter_TelecommutePreference { + if m != nil { + return m.TelecommutePreference + } + return LocationFilter_TELECOMMUTE_PREFERENCE_UNSPECIFIED +} + +func (m *LocationFilter) GetNegated() bool { + if m != nil { + return m.Negated + } + return false +} + +// Input only. +// +// Filter on job compensation type and amount. +type CompensationFilter struct { + // Required. + // + // Type of filter. + Type CompensationFilter_FilterType `protobuf:"varint,1,opt,name=type,proto3,enum=google.cloud.talent.v4beta1.CompensationFilter_FilterType" json:"type,omitempty"` + // Required. + // + // Specify desired `base compensation entry's` + // [CompensationInfo.CompensationUnit][google.cloud.talent.v4beta1.CompensationInfo.CompensationUnit]. + Units []CompensationInfo_CompensationUnit `protobuf:"varint,2,rep,packed,name=units,proto3,enum=google.cloud.talent.v4beta1.CompensationInfo_CompensationUnit" json:"units,omitempty"` + // Optional. + // + // Compensation range. + Range *CompensationInfo_CompensationRange `protobuf:"bytes,3,opt,name=range,proto3" json:"range,omitempty"` + // Optional. + // + // Whether to include jobs whose compensation range is unspecified. + IncludeJobsWithUnspecifiedCompensationRange bool `protobuf:"varint,4,opt,name=include_jobs_with_unspecified_compensation_range,json=includeJobsWithUnspecifiedCompensationRange,proto3" json:"include_jobs_with_unspecified_compensation_range,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *CompensationFilter) Reset() { *m = CompensationFilter{} } +func (m *CompensationFilter) String() string { return proto.CompactTextString(m) } +func (*CompensationFilter) ProtoMessage() {} +func (*CompensationFilter) Descriptor() ([]byte, []int) { + return fileDescriptor_filters_4fe13b05da0a66b7, []int{3} +} +func (m *CompensationFilter) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_CompensationFilter.Unmarshal(m, b) +} +func (m *CompensationFilter) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_CompensationFilter.Marshal(b, m, deterministic) +} +func (dst *CompensationFilter) XXX_Merge(src proto.Message) { + xxx_messageInfo_CompensationFilter.Merge(dst, src) +} +func (m *CompensationFilter) XXX_Size() int { + return xxx_messageInfo_CompensationFilter.Size(m) +} +func (m *CompensationFilter) XXX_DiscardUnknown() { + xxx_messageInfo_CompensationFilter.DiscardUnknown(m) +} + +var xxx_messageInfo_CompensationFilter proto.InternalMessageInfo + +func (m *CompensationFilter) GetType() CompensationFilter_FilterType { + if m != nil { + return m.Type + } + return CompensationFilter_FILTER_TYPE_UNSPECIFIED +} + +func (m *CompensationFilter) GetUnits() []CompensationInfo_CompensationUnit { + if m != nil { + return m.Units + } + return nil +} + +func (m *CompensationFilter) GetRange() *CompensationInfo_CompensationRange { + if m != nil { + return m.Range + } + return nil +} + +func (m *CompensationFilter) GetIncludeJobsWithUnspecifiedCompensationRange() bool { + if m != nil { + return m.IncludeJobsWithUnspecifiedCompensationRange + } + return false +} + +// Input only. +// +// Parameters needed for commute search. +type CommuteFilter struct { + // Required. + // + // The method of transportation for which to calculate the commute time. + CommuteMethod CommuteMethod `protobuf:"varint,1,opt,name=commute_method,json=commuteMethod,proto3,enum=google.cloud.talent.v4beta1.CommuteMethod" json:"commute_method,omitempty"` + // Required. + // + // The latitude and longitude of the location from which to calculate the + // commute time. + StartCoordinates *latlng.LatLng `protobuf:"bytes,2,opt,name=start_coordinates,json=startCoordinates,proto3" json:"start_coordinates,omitempty"` + // Required. + // + // The maximum travel time in seconds. The maximum allowed value is `3600s` + // (one hour). Format is `123s`. + TravelDuration *duration.Duration `protobuf:"bytes,3,opt,name=travel_duration,json=travelDuration,proto3" json:"travel_duration,omitempty"` + // Optional. + // If `true`, jobs without street level addresses may also be returned. + // For city level addresses, the city center is used. For state and coarser + // level addresses, text matching is used. + // If this field is set to `false` or isn't specified, only jobs that include + // street level addresses will be returned by commute search. + AllowImpreciseAddresses bool `protobuf:"varint,4,opt,name=allow_imprecise_addresses,json=allowImpreciseAddresses,proto3" json:"allow_imprecise_addresses,omitempty"` + // Optional. + // + // Traffic factor to take into account while searching by commute. + // + // Types that are valid to be assigned to TrafficOption: + // *CommuteFilter_RoadTraffic_ + // *CommuteFilter_DepartureTime + TrafficOption isCommuteFilter_TrafficOption `protobuf_oneof:"traffic_option"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *CommuteFilter) Reset() { *m = CommuteFilter{} } +func (m *CommuteFilter) String() string { return proto.CompactTextString(m) } +func (*CommuteFilter) ProtoMessage() {} +func (*CommuteFilter) Descriptor() ([]byte, []int) { + return fileDescriptor_filters_4fe13b05da0a66b7, []int{4} +} +func (m *CommuteFilter) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_CommuteFilter.Unmarshal(m, b) +} +func (m *CommuteFilter) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_CommuteFilter.Marshal(b, m, deterministic) +} +func (dst *CommuteFilter) XXX_Merge(src proto.Message) { + xxx_messageInfo_CommuteFilter.Merge(dst, src) +} +func (m *CommuteFilter) XXX_Size() int { + return xxx_messageInfo_CommuteFilter.Size(m) +} +func (m *CommuteFilter) XXX_DiscardUnknown() { + xxx_messageInfo_CommuteFilter.DiscardUnknown(m) +} + +var xxx_messageInfo_CommuteFilter proto.InternalMessageInfo + +func (m *CommuteFilter) GetCommuteMethod() CommuteMethod { + if m != nil { + return m.CommuteMethod + } + return CommuteMethod_COMMUTE_METHOD_UNSPECIFIED +} + +func (m *CommuteFilter) GetStartCoordinates() *latlng.LatLng { + if m != nil { + return m.StartCoordinates + } + return nil +} + +func (m *CommuteFilter) GetTravelDuration() *duration.Duration { + if m != nil { + return m.TravelDuration + } + return nil +} + +func (m *CommuteFilter) GetAllowImpreciseAddresses() bool { + if m != nil { + return m.AllowImpreciseAddresses + } + return false +} + +type isCommuteFilter_TrafficOption interface { + isCommuteFilter_TrafficOption() +} + +type CommuteFilter_RoadTraffic_ struct { + RoadTraffic CommuteFilter_RoadTraffic `protobuf:"varint,5,opt,name=road_traffic,json=roadTraffic,proto3,enum=google.cloud.talent.v4beta1.CommuteFilter_RoadTraffic,oneof"` +} + +type CommuteFilter_DepartureTime struct { + DepartureTime *timeofday.TimeOfDay `protobuf:"bytes,6,opt,name=departure_time,json=departureTime,proto3,oneof"` +} + +func (*CommuteFilter_RoadTraffic_) isCommuteFilter_TrafficOption() {} + +func (*CommuteFilter_DepartureTime) isCommuteFilter_TrafficOption() {} + +func (m *CommuteFilter) GetTrafficOption() isCommuteFilter_TrafficOption { + if m != nil { + return m.TrafficOption + } + return nil +} + +func (m *CommuteFilter) GetRoadTraffic() CommuteFilter_RoadTraffic { + if x, ok := m.GetTrafficOption().(*CommuteFilter_RoadTraffic_); ok { + return x.RoadTraffic + } + return CommuteFilter_ROAD_TRAFFIC_UNSPECIFIED +} + +func (m *CommuteFilter) GetDepartureTime() *timeofday.TimeOfDay { + if x, ok := m.GetTrafficOption().(*CommuteFilter_DepartureTime); ok { + return x.DepartureTime + } + return nil +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*CommuteFilter) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _CommuteFilter_OneofMarshaler, _CommuteFilter_OneofUnmarshaler, _CommuteFilter_OneofSizer, []interface{}{ + (*CommuteFilter_RoadTraffic_)(nil), + (*CommuteFilter_DepartureTime)(nil), + } +} + +func _CommuteFilter_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*CommuteFilter) + // traffic_option + switch x := m.TrafficOption.(type) { + case *CommuteFilter_RoadTraffic_: + b.EncodeVarint(5<<3 | proto.WireVarint) + b.EncodeVarint(uint64(x.RoadTraffic)) + case *CommuteFilter_DepartureTime: + b.EncodeVarint(6<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.DepartureTime); err != nil { + return err + } + case nil: + default: + return fmt.Errorf("CommuteFilter.TrafficOption has unexpected type %T", x) + } + return nil +} + +func _CommuteFilter_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*CommuteFilter) + switch tag { + case 5: // traffic_option.road_traffic + if wire != proto.WireVarint { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeVarint() + m.TrafficOption = &CommuteFilter_RoadTraffic_{CommuteFilter_RoadTraffic(x)} + return true, err + case 6: // traffic_option.departure_time + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(timeofday.TimeOfDay) + err := b.DecodeMessage(msg) + m.TrafficOption = &CommuteFilter_DepartureTime{msg} + return true, err + default: + return false, nil + } +} + +func _CommuteFilter_OneofSizer(msg proto.Message) (n int) { + m := msg.(*CommuteFilter) + // traffic_option + switch x := m.TrafficOption.(type) { + case *CommuteFilter_RoadTraffic_: + n += 1 // tag and wire + n += proto.SizeVarint(uint64(x.RoadTraffic)) + case *CommuteFilter_DepartureTime: + s := proto.Size(x.DepartureTime) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +// Input only. +// +// Job title of the search. +type JobTitleFilter struct { + // Required. + // + // The job title, for example, "Software engineer", or "Product manager". + JobTitle string `protobuf:"bytes,1,opt,name=job_title,json=jobTitle,proto3" json:"job_title,omitempty"` + // Optional. + // + // Whether to apply negation to the filter so profiles matching the filter + // are excluded. + Negated bool `protobuf:"varint,2,opt,name=negated,proto3" json:"negated,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *JobTitleFilter) Reset() { *m = JobTitleFilter{} } +func (m *JobTitleFilter) String() string { return proto.CompactTextString(m) } +func (*JobTitleFilter) ProtoMessage() {} +func (*JobTitleFilter) Descriptor() ([]byte, []int) { + return fileDescriptor_filters_4fe13b05da0a66b7, []int{5} +} +func (m *JobTitleFilter) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_JobTitleFilter.Unmarshal(m, b) +} +func (m *JobTitleFilter) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_JobTitleFilter.Marshal(b, m, deterministic) +} +func (dst *JobTitleFilter) XXX_Merge(src proto.Message) { + xxx_messageInfo_JobTitleFilter.Merge(dst, src) +} +func (m *JobTitleFilter) XXX_Size() int { + return xxx_messageInfo_JobTitleFilter.Size(m) +} +func (m *JobTitleFilter) XXX_DiscardUnknown() { + xxx_messageInfo_JobTitleFilter.DiscardUnknown(m) +} + +var xxx_messageInfo_JobTitleFilter proto.InternalMessageInfo + +func (m *JobTitleFilter) GetJobTitle() string { + if m != nil { + return m.JobTitle + } + return "" +} + +func (m *JobTitleFilter) GetNegated() bool { + if m != nil { + return m.Negated + } + return false +} + +// Input only. +// +// Skill filter of the search. +type SkillFilter struct { + // Required. + // + // The skill name. For example, "java", "j2ee", and so on. + Skill string `protobuf:"bytes,1,opt,name=skill,proto3" json:"skill,omitempty"` + // Optional. + // + // Whether to apply negation to the filter so profiles matching the filter + // are excluded. + Negated bool `protobuf:"varint,2,opt,name=negated,proto3" json:"negated,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *SkillFilter) Reset() { *m = SkillFilter{} } +func (m *SkillFilter) String() string { return proto.CompactTextString(m) } +func (*SkillFilter) ProtoMessage() {} +func (*SkillFilter) Descriptor() ([]byte, []int) { + return fileDescriptor_filters_4fe13b05da0a66b7, []int{6} +} +func (m *SkillFilter) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_SkillFilter.Unmarshal(m, b) +} +func (m *SkillFilter) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_SkillFilter.Marshal(b, m, deterministic) +} +func (dst *SkillFilter) XXX_Merge(src proto.Message) { + xxx_messageInfo_SkillFilter.Merge(dst, src) +} +func (m *SkillFilter) XXX_Size() int { + return xxx_messageInfo_SkillFilter.Size(m) +} +func (m *SkillFilter) XXX_DiscardUnknown() { + xxx_messageInfo_SkillFilter.DiscardUnknown(m) +} + +var xxx_messageInfo_SkillFilter proto.InternalMessageInfo + +func (m *SkillFilter) GetSkill() string { + if m != nil { + return m.Skill + } + return "" +} + +func (m *SkillFilter) GetNegated() bool { + if m != nil { + return m.Negated + } + return false +} + +// Input only. +// +// Employer filter of the search. +type EmployerFilter struct { + // Required. + // + // The name of the employer, for example "Google", "Alphabet". + Employer string `protobuf:"bytes,1,opt,name=employer,proto3" json:"employer,omitempty"` + // Optional. + // + // Define set of [EmploymentRecord][google.cloud.talent.v4beta1.EmploymentRecord]s to search against. + // + // Defaults to [EmployerFilterMode.ALL_EMPLOYMENT_RECORDS][google.cloud.talent.v4beta1.EmployerFilter.EmployerFilterMode.ALL_EMPLOYMENT_RECORDS]. + Mode EmployerFilter_EmployerFilterMode `protobuf:"varint,2,opt,name=mode,proto3,enum=google.cloud.talent.v4beta1.EmployerFilter_EmployerFilterMode" json:"mode,omitempty"` + // Optional. + // + // Whether to apply negation to the filter so profiles matching the filter + // is excluded. + Negated bool `protobuf:"varint,3,opt,name=negated,proto3" json:"negated,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *EmployerFilter) Reset() { *m = EmployerFilter{} } +func (m *EmployerFilter) String() string { return proto.CompactTextString(m) } +func (*EmployerFilter) ProtoMessage() {} +func (*EmployerFilter) Descriptor() ([]byte, []int) { + return fileDescriptor_filters_4fe13b05da0a66b7, []int{7} +} +func (m *EmployerFilter) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_EmployerFilter.Unmarshal(m, b) +} +func (m *EmployerFilter) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_EmployerFilter.Marshal(b, m, deterministic) +} +func (dst *EmployerFilter) XXX_Merge(src proto.Message) { + xxx_messageInfo_EmployerFilter.Merge(dst, src) +} +func (m *EmployerFilter) XXX_Size() int { + return xxx_messageInfo_EmployerFilter.Size(m) +} +func (m *EmployerFilter) XXX_DiscardUnknown() { + xxx_messageInfo_EmployerFilter.DiscardUnknown(m) +} + +var xxx_messageInfo_EmployerFilter proto.InternalMessageInfo + +func (m *EmployerFilter) GetEmployer() string { + if m != nil { + return m.Employer + } + return "" +} + +func (m *EmployerFilter) GetMode() EmployerFilter_EmployerFilterMode { + if m != nil { + return m.Mode + } + return EmployerFilter_EMPLOYER_FILTER_MODE_UNSPECIFIED +} + +func (m *EmployerFilter) GetNegated() bool { + if m != nil { + return m.Negated + } + return false +} + +// Input only. +// +// Education filter of the search. +type EducationFilter struct { + // Optional. + // + // The school name. For example "MIT", "University of California, Berkeley". + School string `protobuf:"bytes,1,opt,name=school,proto3" json:"school,omitempty"` + // Optional. + // + // The field of study. This is to search against value provided in + // [Degree.fields_of_study][google.cloud.talent.v4beta1.Degree.fields_of_study]. + // For example "Computer Science", "Mathematics". + FieldOfStudy string `protobuf:"bytes,2,opt,name=field_of_study,json=fieldOfStudy,proto3" json:"field_of_study,omitempty"` + // Optional. + // + // Education degree in ISCED code. Each value in degree covers a specific + // level of education, without any expansion to upper nor lower levels of + // education degree. + DegreeType DegreeType `protobuf:"varint,3,opt,name=degree_type,json=degreeType,proto3,enum=google.cloud.talent.v4beta1.DegreeType" json:"degree_type,omitempty"` + // Optional. + // + // Whether to apply negation to the filter so profiles matching the filter + // is excluded. + Negated bool `protobuf:"varint,6,opt,name=negated,proto3" json:"negated,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *EducationFilter) Reset() { *m = EducationFilter{} } +func (m *EducationFilter) String() string { return proto.CompactTextString(m) } +func (*EducationFilter) ProtoMessage() {} +func (*EducationFilter) Descriptor() ([]byte, []int) { + return fileDescriptor_filters_4fe13b05da0a66b7, []int{8} +} +func (m *EducationFilter) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_EducationFilter.Unmarshal(m, b) +} +func (m *EducationFilter) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_EducationFilter.Marshal(b, m, deterministic) +} +func (dst *EducationFilter) XXX_Merge(src proto.Message) { + xxx_messageInfo_EducationFilter.Merge(dst, src) +} +func (m *EducationFilter) XXX_Size() int { + return xxx_messageInfo_EducationFilter.Size(m) +} +func (m *EducationFilter) XXX_DiscardUnknown() { + xxx_messageInfo_EducationFilter.DiscardUnknown(m) +} + +var xxx_messageInfo_EducationFilter proto.InternalMessageInfo + +func (m *EducationFilter) GetSchool() string { + if m != nil { + return m.School + } + return "" +} + +func (m *EducationFilter) GetFieldOfStudy() string { + if m != nil { + return m.FieldOfStudy + } + return "" +} + +func (m *EducationFilter) GetDegreeType() DegreeType { + if m != nil { + return m.DegreeType + } + return DegreeType_DEGREE_TYPE_UNSPECIFIED +} + +func (m *EducationFilter) GetNegated() bool { + if m != nil { + return m.Negated + } + return false +} + +// Input only. +// +// Work experience filter. +// +// This filter is used to search for profiles with working experience length +// between [min_experience][google.cloud.talent.v4beta1.WorkExperienceFilter.min_experience] and [max_experience][google.cloud.talent.v4beta1.WorkExperienceFilter.max_experience]. +type WorkExperienceFilter struct { + // Optional. + // + // The minimum duration of the work experience (inclusive). + MinExperience *duration.Duration `protobuf:"bytes,1,opt,name=min_experience,json=minExperience,proto3" json:"min_experience,omitempty"` + // Optional. + // + // The maximum duration of the work experience (exclusive). + MaxExperience *duration.Duration `protobuf:"bytes,2,opt,name=max_experience,json=maxExperience,proto3" json:"max_experience,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *WorkExperienceFilter) Reset() { *m = WorkExperienceFilter{} } +func (m *WorkExperienceFilter) String() string { return proto.CompactTextString(m) } +func (*WorkExperienceFilter) ProtoMessage() {} +func (*WorkExperienceFilter) Descriptor() ([]byte, []int) { + return fileDescriptor_filters_4fe13b05da0a66b7, []int{9} +} +func (m *WorkExperienceFilter) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_WorkExperienceFilter.Unmarshal(m, b) +} +func (m *WorkExperienceFilter) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_WorkExperienceFilter.Marshal(b, m, deterministic) +} +func (dst *WorkExperienceFilter) XXX_Merge(src proto.Message) { + xxx_messageInfo_WorkExperienceFilter.Merge(dst, src) +} +func (m *WorkExperienceFilter) XXX_Size() int { + return xxx_messageInfo_WorkExperienceFilter.Size(m) +} +func (m *WorkExperienceFilter) XXX_DiscardUnknown() { + xxx_messageInfo_WorkExperienceFilter.DiscardUnknown(m) +} + +var xxx_messageInfo_WorkExperienceFilter proto.InternalMessageInfo + +func (m *WorkExperienceFilter) GetMinExperience() *duration.Duration { + if m != nil { + return m.MinExperience + } + return nil +} + +func (m *WorkExperienceFilter) GetMaxExperience() *duration.Duration { + if m != nil { + return m.MaxExperience + } + return nil +} + +// Input only. +// +// Application Date Range Filter. +// +// The API matches profiles with [Application.application_date][google.cloud.talent.v4beta1.Application.application_date] between +// start date and end date (both boundaries are inclusive). The filter is +// ignored if both [start_date][google.cloud.talent.v4beta1.ApplicationDateFilter.start_date] and [end_date][google.cloud.talent.v4beta1.ApplicationDateFilter.end_date] are missing. +type ApplicationDateFilter struct { + // Optional. + // + // Start date. If it's missing, The API matches profiles with application date + // not after the end date. + StartDate *date.Date `protobuf:"bytes,1,opt,name=start_date,json=startDate,proto3" json:"start_date,omitempty"` + // Optional. + // + // End date. If it's missing, The API matches profiles with application date + // not before the start date. + EndDate *date.Date `protobuf:"bytes,2,opt,name=end_date,json=endDate,proto3" json:"end_date,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ApplicationDateFilter) Reset() { *m = ApplicationDateFilter{} } +func (m *ApplicationDateFilter) String() string { return proto.CompactTextString(m) } +func (*ApplicationDateFilter) ProtoMessage() {} +func (*ApplicationDateFilter) Descriptor() ([]byte, []int) { + return fileDescriptor_filters_4fe13b05da0a66b7, []int{10} +} +func (m *ApplicationDateFilter) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ApplicationDateFilter.Unmarshal(m, b) +} +func (m *ApplicationDateFilter) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ApplicationDateFilter.Marshal(b, m, deterministic) +} +func (dst *ApplicationDateFilter) XXX_Merge(src proto.Message) { + xxx_messageInfo_ApplicationDateFilter.Merge(dst, src) +} +func (m *ApplicationDateFilter) XXX_Size() int { + return xxx_messageInfo_ApplicationDateFilter.Size(m) +} +func (m *ApplicationDateFilter) XXX_DiscardUnknown() { + xxx_messageInfo_ApplicationDateFilter.DiscardUnknown(m) +} + +var xxx_messageInfo_ApplicationDateFilter proto.InternalMessageInfo + +func (m *ApplicationDateFilter) GetStartDate() *date.Date { + if m != nil { + return m.StartDate + } + return nil +} + +func (m *ApplicationDateFilter) GetEndDate() *date.Date { + if m != nil { + return m.EndDate + } + return nil +} + +// Input only. +// +// Outcome Notes Filter. +type ApplicationOutcomeNotesFilter struct { + // Required. + // + // User entered or selected outcome reason. The API does an exact match on the + // [Application.outcome_notes][google.cloud.talent.v4beta1.Application.outcome_notes] in profiles. + OutcomeNotes string `protobuf:"bytes,1,opt,name=outcome_notes,json=outcomeNotes,proto3" json:"outcome_notes,omitempty"` + // Optional. + // + // If true, The API excludes all candidates with any + // [Application.outcome_notes][google.cloud.talent.v4beta1.Application.outcome_notes] matching the outcome reason specified in + // the filter. + Negated bool `protobuf:"varint,2,opt,name=negated,proto3" json:"negated,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ApplicationOutcomeNotesFilter) Reset() { *m = ApplicationOutcomeNotesFilter{} } +func (m *ApplicationOutcomeNotesFilter) String() string { return proto.CompactTextString(m) } +func (*ApplicationOutcomeNotesFilter) ProtoMessage() {} +func (*ApplicationOutcomeNotesFilter) Descriptor() ([]byte, []int) { + return fileDescriptor_filters_4fe13b05da0a66b7, []int{11} +} +func (m *ApplicationOutcomeNotesFilter) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ApplicationOutcomeNotesFilter.Unmarshal(m, b) +} +func (m *ApplicationOutcomeNotesFilter) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ApplicationOutcomeNotesFilter.Marshal(b, m, deterministic) +} +func (dst *ApplicationOutcomeNotesFilter) XXX_Merge(src proto.Message) { + xxx_messageInfo_ApplicationOutcomeNotesFilter.Merge(dst, src) +} +func (m *ApplicationOutcomeNotesFilter) XXX_Size() int { + return xxx_messageInfo_ApplicationOutcomeNotesFilter.Size(m) +} +func (m *ApplicationOutcomeNotesFilter) XXX_DiscardUnknown() { + xxx_messageInfo_ApplicationOutcomeNotesFilter.DiscardUnknown(m) +} + +var xxx_messageInfo_ApplicationOutcomeNotesFilter proto.InternalMessageInfo + +func (m *ApplicationOutcomeNotesFilter) GetOutcomeNotes() string { + if m != nil { + return m.OutcomeNotes + } + return "" +} + +func (m *ApplicationOutcomeNotesFilter) GetNegated() bool { + if m != nil { + return m.Negated + } + return false +} + +// Input only. +// +// Filter on the job information of Application. +type ApplicationJobFilter struct { + // Optional. + // + // The job requisition id in the application. The API does an exact match on + // the [Job.requisistion_id][] of [Application.job][google.cloud.talent.v4beta1.Application.job] in profiles. + JobRequisitionId string `protobuf:"bytes,2,opt,name=job_requisition_id,json=jobRequisitionId,proto3" json:"job_requisition_id,omitempty"` + // Optional. + // + // The job title in the application. The API does an exact match on the + // [Job.title][google.cloud.talent.v4beta1.Job.title] of [Application.job][google.cloud.talent.v4beta1.Application.job] in profiles. + JobTitle string `protobuf:"bytes,3,opt,name=job_title,json=jobTitle,proto3" json:"job_title,omitempty"` + // Optional. + // + // If true, the API excludes all profiles with any [Application.job][google.cloud.talent.v4beta1.Application.job] + // matching the filters. + Negated bool `protobuf:"varint,4,opt,name=negated,proto3" json:"negated,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ApplicationJobFilter) Reset() { *m = ApplicationJobFilter{} } +func (m *ApplicationJobFilter) String() string { return proto.CompactTextString(m) } +func (*ApplicationJobFilter) ProtoMessage() {} +func (*ApplicationJobFilter) Descriptor() ([]byte, []int) { + return fileDescriptor_filters_4fe13b05da0a66b7, []int{12} +} +func (m *ApplicationJobFilter) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ApplicationJobFilter.Unmarshal(m, b) +} +func (m *ApplicationJobFilter) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ApplicationJobFilter.Marshal(b, m, deterministic) +} +func (dst *ApplicationJobFilter) XXX_Merge(src proto.Message) { + xxx_messageInfo_ApplicationJobFilter.Merge(dst, src) +} +func (m *ApplicationJobFilter) XXX_Size() int { + return xxx_messageInfo_ApplicationJobFilter.Size(m) +} +func (m *ApplicationJobFilter) XXX_DiscardUnknown() { + xxx_messageInfo_ApplicationJobFilter.DiscardUnknown(m) +} + +var xxx_messageInfo_ApplicationJobFilter proto.InternalMessageInfo + +func (m *ApplicationJobFilter) GetJobRequisitionId() string { + if m != nil { + return m.JobRequisitionId + } + return "" +} + +func (m *ApplicationJobFilter) GetJobTitle() string { + if m != nil { + return m.JobTitle + } + return "" +} + +func (m *ApplicationJobFilter) GetNegated() bool { + if m != nil { + return m.Negated + } + return false +} + +// Input only. +// +// Filter on create timestamp or update timestamp of profiles. +type TimeFilter struct { + // Optional. + // + // Start timestamp, matching profiles with the start time. If this field + // missing, The API matches profiles with create / update timestamp before the + // end timestamp. + StartTime *timestamp.Timestamp `protobuf:"bytes,1,opt,name=start_time,json=startTime,proto3" json:"start_time,omitempty"` + // Optional. + // + // End timestamp, matching profiles with the end time. If this field + // missing, The API matches profiles with create / update timestamp after the + // start timestamp. + EndTime *timestamp.Timestamp `protobuf:"bytes,2,opt,name=end_time,json=endTime,proto3" json:"end_time,omitempty"` + // Optional. + // + // Specifies which time field to filter profiles. + // + // Defaults to [TimeField.CREATE_TIME][google.cloud.talent.v4beta1.TimeFilter.TimeField.CREATE_TIME]. + TimeField TimeFilter_TimeField `protobuf:"varint,3,opt,name=time_field,json=timeField,proto3,enum=google.cloud.talent.v4beta1.TimeFilter_TimeField" json:"time_field,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *TimeFilter) Reset() { *m = TimeFilter{} } +func (m *TimeFilter) String() string { return proto.CompactTextString(m) } +func (*TimeFilter) ProtoMessage() {} +func (*TimeFilter) Descriptor() ([]byte, []int) { + return fileDescriptor_filters_4fe13b05da0a66b7, []int{13} +} +func (m *TimeFilter) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_TimeFilter.Unmarshal(m, b) +} +func (m *TimeFilter) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_TimeFilter.Marshal(b, m, deterministic) +} +func (dst *TimeFilter) XXX_Merge(src proto.Message) { + xxx_messageInfo_TimeFilter.Merge(dst, src) +} +func (m *TimeFilter) XXX_Size() int { + return xxx_messageInfo_TimeFilter.Size(m) +} +func (m *TimeFilter) XXX_DiscardUnknown() { + xxx_messageInfo_TimeFilter.DiscardUnknown(m) +} + +var xxx_messageInfo_TimeFilter proto.InternalMessageInfo + +func (m *TimeFilter) GetStartTime() *timestamp.Timestamp { + if m != nil { + return m.StartTime + } + return nil +} + +func (m *TimeFilter) GetEndTime() *timestamp.Timestamp { + if m != nil { + return m.EndTime + } + return nil +} + +func (m *TimeFilter) GetTimeField() TimeFilter_TimeField { + if m != nil { + return m.TimeField + } + return TimeFilter_TIME_FIELD_UNSPECIFIED +} + +func init() { + proto.RegisterType((*JobQuery)(nil), "google.cloud.talent.v4beta1.JobQuery") + proto.RegisterType((*ProfileQuery)(nil), "google.cloud.talent.v4beta1.ProfileQuery") + proto.RegisterType((*LocationFilter)(nil), "google.cloud.talent.v4beta1.LocationFilter") + proto.RegisterType((*CompensationFilter)(nil), "google.cloud.talent.v4beta1.CompensationFilter") + proto.RegisterType((*CommuteFilter)(nil), "google.cloud.talent.v4beta1.CommuteFilter") + proto.RegisterType((*JobTitleFilter)(nil), "google.cloud.talent.v4beta1.JobTitleFilter") + proto.RegisterType((*SkillFilter)(nil), "google.cloud.talent.v4beta1.SkillFilter") + proto.RegisterType((*EmployerFilter)(nil), "google.cloud.talent.v4beta1.EmployerFilter") + proto.RegisterType((*EducationFilter)(nil), "google.cloud.talent.v4beta1.EducationFilter") + proto.RegisterType((*WorkExperienceFilter)(nil), "google.cloud.talent.v4beta1.WorkExperienceFilter") + proto.RegisterType((*ApplicationDateFilter)(nil), "google.cloud.talent.v4beta1.ApplicationDateFilter") + proto.RegisterType((*ApplicationOutcomeNotesFilter)(nil), "google.cloud.talent.v4beta1.ApplicationOutcomeNotesFilter") + proto.RegisterType((*ApplicationJobFilter)(nil), "google.cloud.talent.v4beta1.ApplicationJobFilter") + proto.RegisterType((*TimeFilter)(nil), "google.cloud.talent.v4beta1.TimeFilter") + proto.RegisterEnum("google.cloud.talent.v4beta1.LocationFilter_TelecommutePreference", LocationFilter_TelecommutePreference_name, LocationFilter_TelecommutePreference_value) + proto.RegisterEnum("google.cloud.talent.v4beta1.CompensationFilter_FilterType", CompensationFilter_FilterType_name, CompensationFilter_FilterType_value) + proto.RegisterEnum("google.cloud.talent.v4beta1.CommuteFilter_RoadTraffic", CommuteFilter_RoadTraffic_name, CommuteFilter_RoadTraffic_value) + proto.RegisterEnum("google.cloud.talent.v4beta1.EmployerFilter_EmployerFilterMode", EmployerFilter_EmployerFilterMode_name, EmployerFilter_EmployerFilterMode_value) + proto.RegisterEnum("google.cloud.talent.v4beta1.TimeFilter_TimeField", TimeFilter_TimeField_name, TimeFilter_TimeField_value) +} + +func init() { + proto.RegisterFile("google/cloud/talent/v4beta1/filters.proto", fileDescriptor_filters_4fe13b05da0a66b7) +} + +var fileDescriptor_filters_4fe13b05da0a66b7 = []byte{ + // 1993 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xac, 0x58, 0x5b, 0x73, 0x23, 0x47, + 0x15, 0x5e, 0x49, 0x5e, 0xaf, 0x75, 0x74, 0xb1, 0xb6, 0x7d, 0x53, 0xbc, 0x4b, 0xd6, 0x28, 0x09, + 0x98, 0x64, 0x91, 0xb2, 0x06, 0x52, 0x45, 0x28, 0x48, 0x64, 0x69, 0xcc, 0xca, 0xa5, 0x5b, 0xc6, + 0x52, 0x16, 0x87, 0x2a, 0x9a, 0xd6, 0x4c, 0x4b, 0x1e, 0xef, 0x68, 0x7a, 0x32, 0xd3, 0x8a, 0x2d, + 0x78, 0xe2, 0x01, 0x7e, 0x04, 0x14, 0x2f, 0x79, 0xe7, 0x89, 0x07, 0xaa, 0xf8, 0x45, 0xfc, 0x0c, + 0xaa, 0x7b, 0x7a, 0xa4, 0x19, 0x5b, 0x1e, 0x3b, 0x14, 0x4f, 0x9a, 0x73, 0xfb, 0xfa, 0xcc, 0x39, + 0x7d, 0x2e, 0x23, 0xf8, 0xd1, 0x84, 0xb1, 0x89, 0x4d, 0x6b, 0x86, 0xcd, 0x66, 0x66, 0x8d, 0x13, + 0x9b, 0x3a, 0xbc, 0xf6, 0xcd, 0x4f, 0x47, 0x94, 0x93, 0x57, 0xb5, 0xb1, 0x65, 0x73, 0xea, 0xf9, + 0x55, 0xd7, 0x63, 0x9c, 0xa1, 0x67, 0x81, 0x6a, 0x55, 0xaa, 0x56, 0x03, 0xd5, 0xaa, 0x52, 0xdd, + 0xff, 0x71, 0x12, 0x0e, 0x71, 0x5d, 0xdb, 0x32, 0x08, 0xb7, 0x98, 0x13, 0x60, 0xed, 0x1f, 0x26, + 0xa9, 0x1b, 0x6c, 0x3a, 0x5d, 0x68, 0x7e, 0x90, 0xa4, 0x79, 0xc9, 0x46, 0x4a, 0x2d, 0xf1, 0x3d, + 0x5c, 0x8f, 0x8d, 0x2d, 0x9b, 0x2a, 0xd5, 0x77, 0x95, 0xaa, 0xa4, 0x46, 0xb3, 0x71, 0xcd, 0x9c, + 0x79, 0x51, 0xdf, 0x5e, 0xdc, 0x94, 0x73, 0x6b, 0x4a, 0x7d, 0x4e, 0xa6, 0xee, 0x5d, 0x00, 0x57, + 0x1e, 0x71, 0xdd, 0x45, 0xa0, 0xf6, 0x77, 0x95, 0x9c, 0xcf, 0x5d, 0x5a, 0x33, 0x09, 0x0f, 0x0f, + 0x2e, 0x47, 0xf9, 0x36, 0xe1, 0xb6, 0x33, 0x51, 0x92, 0x67, 0x51, 0x89, 0x38, 0x8e, 0x8d, 0x4d, + 0x32, 0x57, 0xc2, 0xe7, 0x4a, 0x48, 0x5c, 0xab, 0x46, 0x1c, 0x87, 0x71, 0xe9, 0xac, 0x3a, 0xac, + 0xf2, 0xaf, 0x75, 0xd8, 0x38, 0x65, 0xa3, 0x2f, 0x66, 0xd4, 0x9b, 0xa3, 0x6d, 0x78, 0xfc, 0xb5, + 0x78, 0x28, 0xa7, 0x0e, 0x52, 0x87, 0x59, 0x3d, 0x20, 0xd0, 0x73, 0xc8, 0x1a, 0x6c, 0xea, 0x12, + 0xc7, 0xa2, 0x7e, 0x39, 0x7d, 0x90, 0x39, 0xcc, 0xea, 0x4b, 0x06, 0xfa, 0x12, 0x4a, 0x36, 0x0b, + 0x92, 0x83, 0x55, 0xc2, 0xcb, 0x99, 0x83, 0xcc, 0x61, 0xee, 0xe8, 0xa3, 0x6a, 0x42, 0xc6, 0xab, + 0x6d, 0x65, 0x74, 0x22, 0x6d, 0xf4, 0x4d, 0x3b, 0x46, 0xfb, 0xa8, 0x07, 0xc5, 0x4b, 0x36, 0xc2, + 0x06, 0xe1, 0x74, 0xc2, 0x3c, 0x71, 0xf4, 0xda, 0x41, 0xe6, 0xb0, 0x78, 0x74, 0x98, 0x88, 0x7a, + 0xca, 0x46, 0x8d, 0xc0, 0x62, 0xae, 0x17, 0x2e, 0x17, 0x84, 0x70, 0xf4, 0x0b, 0x28, 0x8a, 0x9b, + 0x31, 0xe3, 0x54, 0xf9, 0x59, 0x7e, 0x7c, 0x90, 0x3a, 0xcc, 0x1d, 0x7d, 0x98, 0x08, 0xd8, 0x08, + 0x4c, 0x94, 0x97, 0x05, 0x23, 0x4a, 0xa2, 0x23, 0xd8, 0x09, 0x02, 0x31, 0xc7, 0xa6, 0xe5, 0xbb, + 0x36, 0x99, 0x63, 0x87, 0x4c, 0xa9, 0x5f, 0x5e, 0x97, 0x51, 0xda, 0x52, 0xc2, 0x66, 0x20, 0xeb, + 0x0a, 0x11, 0xfa, 0x3d, 0x48, 0x36, 0x75, 0xfc, 0x68, 0xcc, 0xca, 0x4f, 0xa4, 0x2f, 0xb5, 0xfb, + 0x7c, 0x59, 0xd8, 0x29, 0x87, 0x90, 0x71, 0x8b, 0x87, 0x3e, 0x81, 0x3d, 0x63, 0xe6, 0x73, 0x36, + 0xc5, 0x84, 0x73, 0xcf, 0x1a, 0x45, 0xde, 0x78, 0x43, 0xe6, 0x75, 0x27, 0x10, 0xd7, 0x43, 0xa9, + 0xb2, 0xab, 0xc2, 0x96, 0x69, 0xf9, 0x64, 0x64, 0x53, 0xec, 0xbb, 0xd4, 0xb6, 0xb1, 0x71, 0x41, + 0x8d, 0xb7, 0xe5, 0xec, 0x41, 0xea, 0x70, 0x43, 0x7f, 0xaa, 0x44, 0x67, 0x42, 0xd2, 0x10, 0x02, + 0x91, 0x79, 0x3a, 0x75, 0x6d, 0x36, 0x9f, 0x52, 0x87, 0x63, 0x71, 0xf7, 0xfc, 0x32, 0xc8, 0x1c, + 0x25, 0x67, 0x5e, 0x5b, 0x18, 0x0d, 0xe6, 0x2e, 0xd5, 0x37, 0x69, 0x8c, 0xf6, 0xd1, 0x07, 0x50, + 0xb4, 0x89, 0x33, 0x99, 0x91, 0x09, 0xc5, 0x06, 0x33, 0xa9, 0x5f, 0xce, 0xc9, 0x70, 0x16, 0x42, + 0x6e, 0x43, 0x30, 0xd1, 0x39, 0x20, 0x77, 0x36, 0xb2, 0x2d, 0xff, 0x02, 0x8b, 0x2b, 0x8f, 0x3d, + 0xe2, 0x4c, 0x68, 0x39, 0x2f, 0xe3, 0x98, 0xec, 0xc0, 0x20, 0x2c, 0x48, 0x5d, 0x98, 0xe8, 0x25, + 0x05, 0x23, 0xd8, 0x92, 0x83, 0xde, 0x83, 0x02, 0xbd, 0x36, 0xec, 0x99, 0x49, 0x4d, 0x7c, 0xc9, + 0x46, 0x7e, 0xb9, 0x20, 0x1d, 0xc8, 0x87, 0xcc, 0x53, 0x36, 0xf2, 0x2b, 0xff, 0xd9, 0x80, 0x7c, + 0x3f, 0xe8, 0x0c, 0x49, 0xd5, 0xb3, 0xaa, 0x3e, 0xd2, 0xff, 0x87, 0xfa, 0x78, 0x03, 0x4f, 0x45, + 0x7d, 0x70, 0x8b, 0xdb, 0xf4, 0x3b, 0x15, 0xde, 0x29, 0x1b, 0x0d, 0x84, 0x51, 0x08, 0x7c, 0x19, + 0xa3, 0xfd, 0x65, 0x5a, 0xa9, 0xb7, 0xc0, 0x5d, 0x7b, 0x00, 0xae, 0xa6, 0x8c, 0x42, 0x5c, 0x1a, + 0xa3, 0x45, 0xbe, 0x9e, 0x52, 0x73, 0x76, 0x23, 0x12, 0x8f, 0x25, 0xf0, 0xcb, 0x64, 0xe0, 0xd0, + 0x4a, 0x21, 0x97, 0x68, 0x9c, 0xe1, 0xa3, 0x0e, 0x14, 0xfc, 0xb7, 0x96, 0x6d, 0x2f, 0x60, 0xd7, + 0x25, 0x6c, 0x72, 0xab, 0x38, 0x13, 0x16, 0x0a, 0x32, 0xef, 0x2f, 0x09, 0x1f, 0x4d, 0x60, 0xf7, + 0x8a, 0x79, 0x6f, 0x31, 0xbd, 0x76, 0xa9, 0x67, 0x51, 0xc7, 0xa0, 0xcb, 0x2a, 0x15, 0xb8, 0xaf, + 0x12, 0x71, 0xdf, 0x30, 0xef, 0xad, 0xb6, 0xb0, 0x54, 0x07, 0x6c, 0x5f, 0xad, 0xe0, 0xa2, 0x53, + 0xc8, 0xcb, 0xab, 0x1b, 0xba, 0xbd, 0x21, 0xe1, 0x7f, 0x78, 0xef, 0xe5, 0x55, 0xa0, 0x39, 0xbe, + 0x78, 0xf6, 0x51, 0x1d, 0x8a, 0x17, 0x96, 0x27, 0xab, 0x57, 0x39, 0x9b, 0x95, 0xa5, 0xb0, 0x1f, + 0xa2, 0x85, 0xe3, 0xa6, 0x7a, 0xcc, 0x98, 0xfd, 0x25, 0xb1, 0x67, 0x54, 0x2f, 0x28, 0x0b, 0xe5, + 0x8e, 0x0d, 0xe5, 0xc8, 0xa8, 0xc5, 0x62, 0xf4, 0x2c, 0x5c, 0x03, 0xe9, 0xda, 0x51, 0xa2, 0x6b, + 0xf5, 0xa5, 0x71, 0x93, 0x2c, 0x7a, 0xe6, 0x2e, 0x59, 0xc5, 0xf6, 0xd1, 0x9f, 0x53, 0xf0, 0xfd, + 0xe8, 0x71, 0x6c, 0xc6, 0x0d, 0x36, 0xa5, 0xd8, 0x61, 0x9c, 0xfa, 0x8b, 0x73, 0x73, 0xf2, 0xdc, + 0x4f, 0x1f, 0x7a, 0x6e, 0x2f, 0x00, 0xe9, 0x0a, 0x0c, 0x75, 0xfe, 0xbb, 0x24, 0x49, 0xec, 0x23, + 0x0b, 0xf6, 0xa2, 0x6e, 0x88, 0xa2, 0x0a, 0x0f, 0x2f, 0x3c, 0x20, 0xdd, 0x91, 0xc3, 0x4f, 0xd9, + 0x48, 0x9d, 0xb9, 0x43, 0x56, 0x70, 0xfd, 0xa4, 0xce, 0xbc, 0x99, 0xd0, 0x99, 0x2b, 0xff, 0xc8, + 0x40, 0x31, 0xde, 0x0f, 0x50, 0x19, 0x9e, 0x10, 0xd3, 0xf4, 0xa8, 0xef, 0xab, 0x76, 0x13, 0x92, + 0xe8, 0x05, 0xe4, 0x3c, 0x3a, 0x11, 0xaf, 0x22, 0x9a, 0x67, 0x39, 0x2d, 0xa5, 0x10, 0xb0, 0x44, + 0xe7, 0x44, 0x2f, 0xe1, 0x89, 0x4d, 0x38, 0xb6, 0x9d, 0x49, 0x39, 0x23, 0xaf, 0xc8, 0x56, 0xf8, + 0x82, 0xa2, 0x87, 0x57, 0xdb, 0x84, 0xb7, 0x9d, 0x89, 0xbe, 0x6e, 0xcb, 0x5f, 0xf4, 0x21, 0x88, + 0xd6, 0xcf, 0x89, 0xa8, 0x02, 0xcb, 0xc1, 0x53, 0xcb, 0x96, 0xa3, 0x38, 0x75, 0x98, 0xd2, 0x37, + 0x43, 0x41, 0xcb, 0xe9, 0x08, 0x36, 0xba, 0x86, 0x5d, 0x4e, 0x6d, 0x1a, 0x8e, 0x59, 0xd7, 0xa3, + 0x63, 0xea, 0x89, 0xfb, 0x2e, 0x47, 0x6d, 0xf1, 0xa8, 0xfe, 0x1d, 0x3a, 0x5e, 0x75, 0xb0, 0x44, + 0xea, 0x2f, 0x80, 0xf4, 0x1d, 0xbe, 0x8a, 0x2d, 0xc2, 0xe1, 0xd0, 0x09, 0xe1, 0xd4, 0x2c, 0xaf, + 0xcb, 0x79, 0x15, 0x92, 0x15, 0x0f, 0x76, 0x56, 0x22, 0xa1, 0x1f, 0x40, 0x65, 0xa0, 0xb5, 0xb5, + 0x46, 0xaf, 0xd3, 0x19, 0x0e, 0x34, 0xdc, 0xd7, 0xb5, 0x13, 0x4d, 0xd7, 0xba, 0x0d, 0x0d, 0x0f, + 0xbb, 0x67, 0x7d, 0xad, 0xd1, 0x3a, 0x69, 0x69, 0xcd, 0xd2, 0x23, 0x54, 0x86, 0xed, 0xa8, 0x9e, + 0xf6, 0x9b, 0x46, 0x7b, 0xd8, 0xd4, 0x9a, 0xa5, 0x14, 0xda, 0x83, 0xad, 0xa8, 0xa4, 0xde, 0x6e, + 0xf7, 0xde, 0x68, 0xcd, 0x52, 0xba, 0xf2, 0xa7, 0x35, 0x40, 0xb7, 0x87, 0x35, 0xea, 0xc2, 0x9a, + 0x88, 0xb0, 0x4c, 0x58, 0xf1, 0x9e, 0x3b, 0x7d, 0xdb, 0xbc, 0x1a, 0xfc, 0xc8, 0x99, 0x29, 0x71, + 0xd0, 0x00, 0x1e, 0xcf, 0x1c, 0x8b, 0x07, 0xf3, 0xa4, 0x78, 0xf4, 0xab, 0x07, 0x03, 0xb6, 0x9c, + 0x31, 0x8b, 0x31, 0x86, 0x8e, 0xc5, 0xf5, 0x00, 0x0c, 0x0d, 0xe1, 0x71, 0x30, 0x4a, 0x83, 0xcb, + 0xf1, 0xd9, 0xff, 0x8e, 0x1a, 0x8c, 0xd7, 0x00, 0x0d, 0x51, 0xf8, 0xd8, 0x72, 0xe4, 0xf8, 0x94, + 0x23, 0x15, 0x5f, 0x59, 0xfc, 0x02, 0xcf, 0x1c, 0xdf, 0xa5, 0x86, 0x35, 0xb6, 0xa8, 0x89, 0x63, + 0x5b, 0x51, 0xe0, 0xc1, 0x9a, 0x4c, 0xe5, 0x47, 0xca, 0x4e, 0x4c, 0xdd, 0x37, 0x16, 0xbf, 0x18, + 0x2e, 0x8d, 0x6e, 0x9d, 0x56, 0xf9, 0x4b, 0x0a, 0x60, 0x19, 0x28, 0xf4, 0x0c, 0xf6, 0x4e, 0x5a, + 0xed, 0x81, 0xa6, 0xe3, 0xc1, 0x79, 0xff, 0x66, 0x66, 0x0b, 0x90, 0x1d, 0x76, 0x5b, 0x03, 0xdc, + 0xeb, 0xb6, 0xcf, 0x4b, 0x29, 0xb4, 0x05, 0x9b, 0x92, 0xac, 0x77, 0x9b, 0xb8, 0xde, 0xe9, 0x0d, + 0xbb, 0x83, 0x52, 0x1a, 0xed, 0xc3, 0x6e, 0xbd, 0xdb, 0x1d, 0xd6, 0xdb, 0xad, 0xaf, 0xb4, 0x26, + 0x3e, 0xae, 0x9f, 0x69, 0xa1, 0x2c, 0x23, 0xc0, 0x23, 0xb2, 0x41, 0x6f, 0x50, 0x6f, 0x87, 0xc2, + 0xb5, 0xca, 0xb7, 0x6b, 0x50, 0x88, 0x2d, 0x8f, 0xd1, 0x05, 0x74, 0x4a, 0xf9, 0x05, 0x33, 0xd5, + 0x45, 0x78, 0xd0, 0x02, 0xda, 0x91, 0x16, 0x8b, 0x05, 0x34, 0x20, 0xd1, 0xe7, 0xf0, 0xd4, 0xe7, + 0xc4, 0xe3, 0xd8, 0x60, 0xcc, 0x33, 0x2d, 0x87, 0x70, 0xb9, 0xa2, 0xdf, 0x59, 0xd4, 0x25, 0xa9, + 0xdd, 0x58, 0x2a, 0xa3, 0x63, 0xd8, 0xe4, 0x1e, 0xf9, 0x86, 0xda, 0x38, 0xfc, 0x8c, 0x51, 0x79, + 0x7f, 0xe7, 0xd6, 0xdc, 0x68, 0x2a, 0x05, 0xbd, 0x18, 0x58, 0x84, 0x34, 0xfa, 0x14, 0xde, 0x21, + 0xb6, 0xcd, 0xae, 0xb0, 0x35, 0x75, 0x3d, 0x6a, 0x58, 0x3e, 0xc5, 0xaa, 0x19, 0xa9, 0x56, 0xb1, + 0xa1, 0xef, 0x49, 0x85, 0x56, 0x28, 0xaf, 0x87, 0x62, 0xf4, 0x5b, 0xc8, 0x7b, 0x8c, 0x98, 0x98, + 0x7b, 0x64, 0x3c, 0xb6, 0x0c, 0xd5, 0x28, 0x3e, 0x79, 0xf8, 0x4e, 0x5e, 0xd5, 0x19, 0x31, 0x07, + 0x81, 0xf5, 0xeb, 0x47, 0x7a, 0xce, 0x5b, 0x92, 0xe8, 0x33, 0x28, 0x9a, 0xd4, 0x25, 0x1e, 0x9f, + 0x79, 0x54, 0x2e, 0x89, 0xb2, 0x39, 0xe4, 0x8e, 0x76, 0x63, 0xb1, 0x11, 0x13, 0xb5, 0x37, 0x6e, + 0x92, 0xf9, 0xeb, 0x47, 0x7a, 0x61, 0xa1, 0x2f, 0xb8, 0x95, 0x36, 0xe4, 0x22, 0xf0, 0xe8, 0x39, + 0x94, 0xf5, 0x5e, 0xbd, 0x89, 0x07, 0x7a, 0xfd, 0xe4, 0xa4, 0xd5, 0xb8, 0x71, 0x9d, 0x4a, 0x90, + 0x0f, 0x05, 0x27, 0xba, 0xa6, 0x95, 0x52, 0xe2, 0x82, 0x1d, 0x0f, 0xcf, 0xce, 0xf1, 0xeb, 0xde, + 0x50, 0x2f, 0xa5, 0x8f, 0x4b, 0x50, 0x54, 0xaf, 0x89, 0x99, 0x2b, 0x22, 0x57, 0xf9, 0x35, 0x14, + 0xe3, 0xeb, 0x18, 0x7a, 0x06, 0xd9, 0xc5, 0x5a, 0xa7, 0x3a, 0xfb, 0x46, 0xb8, 0xa1, 0x45, 0xbb, + 0x5c, 0x3a, 0xde, 0xe5, 0x7e, 0x09, 0xb9, 0xc8, 0x3e, 0x23, 0x56, 0x51, 0xb9, 0xd1, 0x84, 0xab, + 0xa8, 0x24, 0x12, 0xcc, 0xff, 0x9d, 0x86, 0x62, 0x7c, 0x7f, 0x43, 0xfb, 0xb0, 0x11, 0x6e, 0x70, + 0xa1, 0x1f, 0x21, 0x8d, 0x74, 0x58, 0x9b, 0x86, 0xb3, 0xe5, 0xbe, 0xbe, 0x13, 0x87, 0xbd, 0x41, + 0x76, 0x98, 0x49, 0x75, 0x89, 0x15, 0x75, 0x2e, 0x13, 0x77, 0xee, 0xef, 0x29, 0x40, 0xb7, 0xcd, + 0xd0, 0xfb, 0x70, 0xa0, 0x75, 0xfa, 0xed, 0xde, 0xb9, 0xa6, 0x63, 0x55, 0xe3, 0x9d, 0x5e, 0xf3, + 0x66, 0x8d, 0x8b, 0xfa, 0x6d, 0xb7, 0x71, 0xa0, 0xd9, 0xd1, 0xba, 0x03, 0xac, 0x6b, 0x8d, 0x9e, + 0xde, 0x3c, 0x2b, 0xa5, 0xd0, 0x7b, 0xf0, 0xa2, 0x31, 0xd4, 0x75, 0xc1, 0xbc, 0x2d, 0x0f, 0xba, + 0x42, 0x1a, 0x1d, 0xc0, 0xf3, 0x7e, 0xfd, 0xec, 0x6e, 0x8d, 0x4c, 0xe5, 0x9f, 0x29, 0xd8, 0xbc, + 0xb1, 0xa3, 0xa2, 0x5d, 0x58, 0xf7, 0x8d, 0x0b, 0xc6, 0xc2, 0x0c, 0x28, 0x0a, 0xbd, 0x0f, 0xc5, + 0xb1, 0x45, 0x6d, 0x13, 0xb3, 0x31, 0xf6, 0xf9, 0xcc, 0x9c, 0xab, 0xf9, 0x9c, 0x97, 0xdc, 0xde, + 0xf8, 0x4c, 0xf0, 0xd0, 0x6b, 0xc8, 0x99, 0x74, 0xe2, 0x51, 0x2a, 0xbf, 0xaa, 0x64, 0x3c, 0x8a, + 0xf7, 0xac, 0x85, 0x4d, 0xa9, 0x2f, 0x87, 0x03, 0x98, 0x8b, 0xe7, 0x84, 0xb9, 0xf8, 0xd7, 0x14, + 0x6c, 0xaf, 0x5a, 0x55, 0xd1, 0xe7, 0x50, 0x9c, 0x5a, 0x4e, 0x64, 0xf9, 0x95, 0xaf, 0x90, 0xd8, + 0x10, 0x0a, 0x53, 0xcb, 0x59, 0xe2, 0x48, 0x04, 0x72, 0x1d, 0x45, 0x48, 0xdf, 0x8f, 0x40, 0xae, + 0x97, 0x08, 0x95, 0x2b, 0xd8, 0x59, 0xb9, 0x4c, 0xa2, 0x8f, 0x01, 0x82, 0x86, 0x27, 0x96, 0x53, + 0xe5, 0xd8, 0xd3, 0x58, 0x35, 0x0b, 0x65, 0x3d, 0x2b, 0x95, 0xc4, 0x23, 0x7a, 0x09, 0x1b, 0xd4, + 0x31, 0x03, 0xfd, 0xf4, 0x5d, 0xfa, 0x4f, 0xa8, 0x63, 0x8a, 0x87, 0xca, 0xef, 0xe0, 0x7b, 0x89, + 0xdb, 0xa4, 0xf8, 0x34, 0x8c, 0x2d, 0xaa, 0x2a, 0xbf, 0x79, 0x16, 0x51, 0x4d, 0x28, 0xb4, 0x3f, + 0xc2, 0xf6, 0xaa, 0x85, 0x11, 0xbd, 0x04, 0x24, 0xca, 0xde, 0xa3, 0x5f, 0xcf, 0x2c, 0xdf, 0x92, + 0x23, 0xd0, 0x32, 0xd5, 0xdd, 0x28, 0x5d, 0xb2, 0x91, 0xbe, 0x14, 0xb4, 0xcc, 0x78, 0x93, 0xc8, + 0xdc, 0xdd, 0x24, 0xd6, 0xe2, 0x87, 0xff, 0x2d, 0x0d, 0xb0, 0xfc, 0x7c, 0x40, 0x3f, 0x0f, 0x63, + 0x29, 0x3b, 0x63, 0xea, 0x8e, 0xaf, 0x85, 0xe5, 0xc7, 0x72, 0x10, 0x54, 0x41, 0xa3, 0x9f, 0x05, + 0x41, 0x95, 0x86, 0xe9, 0x7b, 0x0d, 0x45, 0x74, 0xa5, 0x59, 0x1f, 0x40, 0x7d, 0xef, 0x50, 0xdb, + 0x54, 0xd7, 0xfa, 0xd5, 0x03, 0xbf, 0x76, 0xd4, 0x23, 0xb5, 0x4d, 0x3d, 0xcb, 0xc3, 0xc7, 0x4a, + 0x0b, 0xb2, 0x0b, 0xbe, 0xa8, 0xf5, 0x41, 0xab, 0xa3, 0xe1, 0x93, 0x96, 0xd6, 0x6e, 0xde, 0xe8, + 0x03, 0x9b, 0x90, 0x6b, 0xe8, 0x5a, 0x7d, 0xa0, 0x61, 0xa1, 0x52, 0x4a, 0x09, 0xc6, 0xb0, 0xdf, + 0x5c, 0x30, 0xd2, 0xc7, 0x7f, 0x80, 0x17, 0x06, 0x9b, 0x26, 0x79, 0x73, 0x9c, 0x57, 0x8b, 0x7c, + 0x5f, 0xbc, 0x63, 0x3f, 0xf5, 0x55, 0x5d, 0x29, 0x4f, 0x98, 0x4d, 0x9c, 0x49, 0x95, 0x79, 0x93, + 0xda, 0x84, 0x3a, 0x32, 0x02, 0xb5, 0x40, 0x44, 0x5c, 0xcb, 0x5f, 0xf9, 0xa7, 0xe2, 0x2f, 0x02, + 0xf2, 0xdb, 0x74, 0xa6, 0x31, 0x38, 0x1b, 0xad, 0x4b, 0x9b, 0x9f, 0xfc, 0x37, 0x00, 0x00, 0xff, + 0xff, 0x02, 0x42, 0x8a, 0x1f, 0x4f, 0x15, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/cloud/talent/v4beta1/histogram.pb.go b/vendor/google.golang.org/genproto/googleapis/cloud/talent/v4beta1/histogram.pb.go new file mode 100644 index 0000000..2f73c16 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/cloud/talent/v4beta1/histogram.pb.go @@ -0,0 +1,159 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/cloud/talent/v4beta1/histogram.proto + +package talent // import "google.golang.org/genproto/googleapis/cloud/talent/v4beta1" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "github.com/golang/protobuf/ptypes/wrappers" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Input Only. +// +// The histogram request. +type HistogramQuery struct { + // An expression specifies a histogram request against matching resources + // (for example, jobs, profiles) for searches. + // + // See [SearchJobsRequest.histogram_queries][google.cloud.talent.v4beta1.SearchJobsRequest.histogram_queries] and + // [SearchProfilesRequest.histogram_queries][google.cloud.talent.v4beta1.SearchProfilesRequest.histogram_queries] for details about syntax. + HistogramQuery string `protobuf:"bytes,1,opt,name=histogram_query,json=histogramQuery,proto3" json:"histogram_query,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *HistogramQuery) Reset() { *m = HistogramQuery{} } +func (m *HistogramQuery) String() string { return proto.CompactTextString(m) } +func (*HistogramQuery) ProtoMessage() {} +func (*HistogramQuery) Descriptor() ([]byte, []int) { + return fileDescriptor_histogram_6fd84e257fb563ef, []int{0} +} +func (m *HistogramQuery) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_HistogramQuery.Unmarshal(m, b) +} +func (m *HistogramQuery) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_HistogramQuery.Marshal(b, m, deterministic) +} +func (dst *HistogramQuery) XXX_Merge(src proto.Message) { + xxx_messageInfo_HistogramQuery.Merge(dst, src) +} +func (m *HistogramQuery) XXX_Size() int { + return xxx_messageInfo_HistogramQuery.Size(m) +} +func (m *HistogramQuery) XXX_DiscardUnknown() { + xxx_messageInfo_HistogramQuery.DiscardUnknown(m) +} + +var xxx_messageInfo_HistogramQuery proto.InternalMessageInfo + +func (m *HistogramQuery) GetHistogramQuery() string { + if m != nil { + return m.HistogramQuery + } + return "" +} + +// Output only. +// +// Histogram result that matches [HistogramSpec][] specified in searches. +type HistogramQueryResult struct { + // Requested histogram expression. + HistogramQuery string `protobuf:"bytes,1,opt,name=histogram_query,json=histogramQuery,proto3" json:"histogram_query,omitempty"` + // A map from the values of the facet associated with distinct values to the + // number of matching entries with corresponding value. + // + // The key format is: + // * (for string histogram) string values stored in the field. + // * (for named numeric bucket) name specified in `bucket()` function, like + // for `bucket(0, MAX, "non-negative")`, the key will be `non-negative`. + // * (for anonymous numeric bucket) range formatted as `-`, for + // example, `0-1000`, `MIN-0`, and `0-MAX`. + Histogram map[string]int64 `protobuf:"bytes,2,rep,name=histogram,proto3" json:"histogram,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"varint,2,opt,name=value,proto3"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *HistogramQueryResult) Reset() { *m = HistogramQueryResult{} } +func (m *HistogramQueryResult) String() string { return proto.CompactTextString(m) } +func (*HistogramQueryResult) ProtoMessage() {} +func (*HistogramQueryResult) Descriptor() ([]byte, []int) { + return fileDescriptor_histogram_6fd84e257fb563ef, []int{1} +} +func (m *HistogramQueryResult) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_HistogramQueryResult.Unmarshal(m, b) +} +func (m *HistogramQueryResult) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_HistogramQueryResult.Marshal(b, m, deterministic) +} +func (dst *HistogramQueryResult) XXX_Merge(src proto.Message) { + xxx_messageInfo_HistogramQueryResult.Merge(dst, src) +} +func (m *HistogramQueryResult) XXX_Size() int { + return xxx_messageInfo_HistogramQueryResult.Size(m) +} +func (m *HistogramQueryResult) XXX_DiscardUnknown() { + xxx_messageInfo_HistogramQueryResult.DiscardUnknown(m) +} + +var xxx_messageInfo_HistogramQueryResult proto.InternalMessageInfo + +func (m *HistogramQueryResult) GetHistogramQuery() string { + if m != nil { + return m.HistogramQuery + } + return "" +} + +func (m *HistogramQueryResult) GetHistogram() map[string]int64 { + if m != nil { + return m.Histogram + } + return nil +} + +func init() { + proto.RegisterType((*HistogramQuery)(nil), "google.cloud.talent.v4beta1.HistogramQuery") + proto.RegisterType((*HistogramQueryResult)(nil), "google.cloud.talent.v4beta1.HistogramQueryResult") + proto.RegisterMapType((map[string]int64)(nil), "google.cloud.talent.v4beta1.HistogramQueryResult.HistogramEntry") +} + +func init() { + proto.RegisterFile("google/cloud/talent/v4beta1/histogram.proto", fileDescriptor_histogram_6fd84e257fb563ef) +} + +var fileDescriptor_histogram_6fd84e257fb563ef = []byte{ + // 301 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xe2, 0xd2, 0x4e, 0xcf, 0xcf, 0x4f, + 0xcf, 0x49, 0xd5, 0x4f, 0xce, 0xc9, 0x2f, 0x4d, 0xd1, 0x2f, 0x49, 0xcc, 0x49, 0xcd, 0x2b, 0xd1, + 0x2f, 0x33, 0x49, 0x4a, 0x2d, 0x49, 0x34, 0xd4, 0xcf, 0xc8, 0x2c, 0x2e, 0xc9, 0x4f, 0x2f, 0x4a, + 0xcc, 0xd5, 0x2b, 0x28, 0xca, 0x2f, 0xc9, 0x17, 0x92, 0x86, 0x28, 0xd6, 0x03, 0x2b, 0xd6, 0x83, + 0x28, 0xd6, 0x83, 0x2a, 0x96, 0x92, 0x83, 0x9a, 0x04, 0x56, 0x9a, 0x54, 0x9a, 0xa6, 0x5f, 0x5e, + 0x94, 0x58, 0x50, 0x90, 0x5a, 0x54, 0x0c, 0xd1, 0x2c, 0x25, 0x03, 0x95, 0x4f, 0x2c, 0xc8, 0xd4, + 0x4f, 0xcc, 0xcb, 0xcb, 0x2f, 0x49, 0x2c, 0xc9, 0xcc, 0xcf, 0x83, 0xca, 0x2a, 0x59, 0x72, 0xf1, + 0x79, 0xc0, 0x6c, 0x0b, 0x2c, 0x4d, 0x2d, 0xaa, 0x14, 0x52, 0xe7, 0xe2, 0x87, 0xdb, 0x1f, 0x5f, + 0x08, 0x12, 0x92, 0x60, 0x54, 0x60, 0xd4, 0xe0, 0x0c, 0xe2, 0xcb, 0x40, 0x51, 0xa8, 0x74, 0x97, + 0x91, 0x4b, 0x04, 0x55, 0x6f, 0x50, 0x6a, 0x71, 0x69, 0x4e, 0x09, 0xd1, 0x26, 0x08, 0xc5, 0x71, + 0x71, 0xc2, 0x45, 0x24, 0x98, 0x14, 0x98, 0x35, 0xb8, 0x8d, 0x1c, 0xf4, 0xf0, 0xf8, 0x55, 0x0f, + 0x9b, 0x75, 0x08, 0x41, 0xd7, 0xbc, 0x92, 0xa2, 0xca, 0x20, 0x84, 0x91, 0x52, 0x36, 0x48, 0x9e, + 0x03, 0x4b, 0x0a, 0x09, 0x70, 0x31, 0x67, 0xa7, 0xc2, 0x9c, 0x03, 0x62, 0x0a, 0x89, 0x70, 0xb1, + 0x96, 0x25, 0xe6, 0x94, 0xa6, 0x4a, 0x30, 0x29, 0x30, 0x6a, 0x30, 0x07, 0x41, 0x38, 0x56, 0x4c, + 0x16, 0x8c, 0x4e, 0x35, 0x5c, 0xf2, 0xc9, 0xf9, 0xb9, 0xf8, 0xdc, 0xe3, 0x84, 0x30, 0x3e, 0x00, + 0x14, 0x9a, 0x01, 0x8c, 0x51, 0x8e, 0x50, 0xe5, 0xe9, 0xf9, 0x39, 0x89, 0x79, 0xe9, 0x7a, 0xf9, + 0x45, 0xe9, 0xfa, 0xe9, 0xa9, 0x79, 0xe0, 0xb0, 0xd6, 0x87, 0x48, 0x25, 0x16, 0x64, 0x16, 0x63, + 0x8d, 0x76, 0x6b, 0x08, 0x77, 0x11, 0x13, 0xb3, 0x73, 0x48, 0x70, 0x12, 0x1b, 0x58, 0x8f, 0x31, + 0x20, 0x00, 0x00, 0xff, 0xff, 0x9f, 0x60, 0x65, 0xa2, 0x29, 0x02, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/cloud/talent/v4beta1/job.pb.go b/vendor/google.golang.org/genproto/googleapis/cloud/talent/v4beta1/job.pb.go new file mode 100644 index 0000000..c2c9e45 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/cloud/talent/v4beta1/job.pb.go @@ -0,0 +1,801 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/cloud/talent/v4beta1/job.proto + +package talent // import "google.golang.org/genproto/googleapis/cloud/talent/v4beta1" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import timestamp "github.com/golang/protobuf/ptypes/timestamp" +import _ "github.com/golang/protobuf/ptypes/wrappers" +import _ "google.golang.org/genproto/googleapis/api/annotations" +import _ "google.golang.org/genproto/googleapis/type/postaladdress" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// A Job resource represents a job posting (also referred to as a "job listing" +// or "job requisition"). A job belongs to a [Company][google.cloud.talent.v4beta1.Company], which is the hiring +// entity responsible for the job. +type Job struct { + // Required during job update. + // + // The resource name for the job. This is generated by the service when a + // job is created. + // + // The format is + // "projects/{project_id}/tenants/{tenant_id}/jobs/{job_id}", for + // example, "projects/api-test-project/tenants/foo/jobs/1234". + // + // Tenant id is optional and the default tenant is used if unspecified, for + // example, "projects/api-test-project/jobs/1234". + // + // Use of this field in job queries and API calls is preferred over the use of + // [requisition_id][google.cloud.talent.v4beta1.Job.requisition_id] since this value is unique. + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // Required. + // + // The resource name of the company listing the job. + // + // The format is + // "projects/{project_id}/tenants/{tenant_id}/companies/{company_id}", for + // example, "projects/api-test-project/tenants/foo/companies/bar". + // + // Tenant id is optional and the default tenant is used if unspecified, for + // example, "projects/api-test-project/companies/bar". + Company string `protobuf:"bytes,2,opt,name=company,proto3" json:"company,omitempty"` + // Required. + // + // The requisition ID, also referred to as the posting ID, is assigned by the + // client to identify a job. This field is intended to be used by clients + // for client identification and tracking of postings. A job isn't allowed + // to be created if there is another job with the same [company][google.cloud.talent.v4beta1.Job.name], + // [language_code][google.cloud.talent.v4beta1.Job.language_code] and [requisition_id][google.cloud.talent.v4beta1.Job.requisition_id]. + // + // The maximum number of allowed characters is 255. + RequisitionId string `protobuf:"bytes,3,opt,name=requisition_id,json=requisitionId,proto3" json:"requisition_id,omitempty"` + // Required. + // + // The title of the job, such as "Software Engineer" + // + // The maximum number of allowed characters is 500. + Title string `protobuf:"bytes,4,opt,name=title,proto3" json:"title,omitempty"` + // Required. + // + // The description of the job, which typically includes a multi-paragraph + // description of the company and related information. Separate fields are + // provided on the job object for [responsibilities][google.cloud.talent.v4beta1.Job.responsibilities], + // [qualifications][google.cloud.talent.v4beta1.Job.qualifications], and other job characteristics. Use of + // these separate job fields is recommended. + // + // This field accepts and sanitizes HTML input, and also accepts + // bold, italic, ordered list, and unordered list markup tags. + // + // The maximum number of allowed characters is 100,000. + Description string `protobuf:"bytes,5,opt,name=description,proto3" json:"description,omitempty"` + // Optional but strongly recommended for the best service experience. + // + // Location(s) where the employer is looking to hire for this job posting. + // + // Specifying the full street address(es) of the hiring location enables + // better API results, especially job searches by commute time. + // + // At most 50 locations are allowed for best search performance. If a job has + // more locations, it is suggested to split it into multiple jobs with unique + // [requisition_id][google.cloud.talent.v4beta1.Job.requisition_id]s (e.g. 'ReqA' becomes 'ReqA-1', 'ReqA-2', and so on.) as + // multiple jobs with the same [company][google.cloud.talent.v4beta1.Job.name][], [language_code][] and + // [requisition_id][google.cloud.talent.v4beta1.Job.requisition_id] are not allowed. If the original [requisition_id][google.cloud.talent.v4beta1.Job.requisition_id] must + // be preserved, a custom field should be used for storage. It is also + // suggested to group the locations that close to each other in the same job + // for better search experience. + // + // The maximum number of allowed characters is 500. + Addresses []string `protobuf:"bytes,6,rep,name=addresses,proto3" json:"addresses,omitempty"` + // Optional. + // + // Job application information. + ApplicationInfo *Job_ApplicationInfo `protobuf:"bytes,7,opt,name=application_info,json=applicationInfo,proto3" json:"application_info,omitempty"` + // Optional. + // + // The benefits included with the job. + JobBenefits []JobBenefit `protobuf:"varint,8,rep,packed,name=job_benefits,json=jobBenefits,proto3,enum=google.cloud.talent.v4beta1.JobBenefit" json:"job_benefits,omitempty"` + // Optional. + // + // Job compensation information (a.k.a. "pay rate") i.e., the compensation + // that will paid to the employee. + CompensationInfo *CompensationInfo `protobuf:"bytes,9,opt,name=compensation_info,json=compensationInfo,proto3" json:"compensation_info,omitempty"` + // Optional. + // + // A map of fields to hold both filterable and non-filterable custom job + // attributes that are not covered by the provided structured fields. + // + // The keys of the map are strings up to 64 bytes and must match the + // pattern: [a-zA-Z][a-zA-Z0-9_]*. For example, key0LikeThis or + // KEY_1_LIKE_THIS. + // + // At most 100 filterable and at most 100 unfilterable keys are supported. + // For filterable `string_values`, across all keys at most 200 values are + // allowed, with each string no more than 255 characters. For unfilterable + // `string_values`, the maximum total size of `string_values` across all keys + // is 50KB. + CustomAttributes map[string]*CustomAttribute `protobuf:"bytes,10,rep,name=custom_attributes,json=customAttributes,proto3" json:"custom_attributes,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` + // Optional. + // + // The desired education degrees for the job, such as Bachelors, Masters. + DegreeTypes []DegreeType `protobuf:"varint,11,rep,packed,name=degree_types,json=degreeTypes,proto3,enum=google.cloud.talent.v4beta1.DegreeType" json:"degree_types,omitempty"` + // Optional. + // + // The department or functional area within the company with the open + // position. + // + // The maximum number of allowed characters is 255. + Department string `protobuf:"bytes,12,opt,name=department,proto3" json:"department,omitempty"` + // Optional. + // + // The employment type(s) of a job, for example, + // [full time][google.cloud.talent.v4beta1.EmploymentType.FULL_TIME] or + // [part time][google.cloud.talent.v4beta1.EmploymentType.PART_TIME]. + EmploymentTypes []EmploymentType `protobuf:"varint,13,rep,packed,name=employment_types,json=employmentTypes,proto3,enum=google.cloud.talent.v4beta1.EmploymentType" json:"employment_types,omitempty"` + // Optional. + // + // A description of bonus, commission, and other compensation + // incentives associated with the job not including salary or pay. + // + // The maximum number of allowed characters is 10,000. + Incentives string `protobuf:"bytes,14,opt,name=incentives,proto3" json:"incentives,omitempty"` + // Optional. + // + // The language of the posting. This field is distinct from + // any requirements for fluency that are associated with the job. + // + // Language codes must be in BCP-47 format, such as "en-US" or "sr-Latn". + // For more information, see + // [Tags for Identifying Languages](https://tools.ietf.org/html/bcp47){: + // class="external" target="_blank" }. + // + // If this field is unspecified and [Job.description][google.cloud.talent.v4beta1.Job.description] is present, detected + // language code based on [Job.description][google.cloud.talent.v4beta1.Job.description] is assigned, otherwise + // defaults to 'en_US'. + LanguageCode string `protobuf:"bytes,15,opt,name=language_code,json=languageCode,proto3" json:"language_code,omitempty"` + // Optional. + // + // The experience level associated with the job, such as "Entry Level". + JobLevel JobLevel `protobuf:"varint,16,opt,name=job_level,json=jobLevel,proto3,enum=google.cloud.talent.v4beta1.JobLevel" json:"job_level,omitempty"` + // Optional. + // + // A promotion value of the job, as determined by the client. + // The value determines the sort order of the jobs returned when searching for + // jobs using the featured jobs search call, with higher promotional values + // being returned first and ties being resolved by relevance sort. Only the + // jobs with a promotionValue >0 are returned in a FEATURED_JOB_SEARCH. + // + // Default value is 0, and negative values are treated as 0. + PromotionValue int32 `protobuf:"varint,17,opt,name=promotion_value,json=promotionValue,proto3" json:"promotion_value,omitempty"` + // Optional. + // + // A description of the qualifications required to perform the + // job. The use of this field is recommended + // as an alternative to using the more general [description][google.cloud.talent.v4beta1.Job.description] field. + // + // This field accepts and sanitizes HTML input, and also accepts + // bold, italic, ordered list, and unordered list markup tags. + // + // The maximum number of allowed characters is 10,000. + Qualifications string `protobuf:"bytes,18,opt,name=qualifications,proto3" json:"qualifications,omitempty"` + // Optional. + // + // A description of job responsibilities. The use of this field is + // recommended as an alternative to using the more general [description][google.cloud.talent.v4beta1.Job.description] + // field. + // + // This field accepts and sanitizes HTML input, and also accepts + // bold, italic, ordered list, and unordered list markup tags. + // + // The maximum number of allowed characters is 10,000. + Responsibilities string `protobuf:"bytes,19,opt,name=responsibilities,proto3" json:"responsibilities,omitempty"` + // Optional. + // + // The job [PostingRegion][google.cloud.talent.v4beta1.PostingRegion] (for example, state, country) throughout + // which the job is available. If this field is set, a [LocationFilter][google.cloud.talent.v4beta1.LocationFilter] + // in a search query within the job region finds this job posting if an + // exact location match isn't specified. If this field is set to + // [PostingRegion.NATION][google.cloud.talent.v4beta1.PostingRegion.NATION] or [PostingRegion.ADMINISTRATIVE_AREA][google.cloud.talent.v4beta1.PostingRegion.ADMINISTRATIVE_AREA], + // setting job [Job.addresses][google.cloud.talent.v4beta1.Job.addresses] to the same location level as this field + // is strongly recommended. + PostingRegion PostingRegion `protobuf:"varint,20,opt,name=posting_region,json=postingRegion,proto3,enum=google.cloud.talent.v4beta1.PostingRegion" json:"posting_region,omitempty"` + // Optional. + // + // The visibility of the job. + // + // Defaults to [Visibility.ACCOUNT_ONLY][google.cloud.talent.v4beta1.Visibility.ACCOUNT_ONLY] if not specified. + Visibility Visibility `protobuf:"varint,21,opt,name=visibility,proto3,enum=google.cloud.talent.v4beta1.Visibility" json:"visibility,omitempty"` + // Optional. + // + // The start timestamp of the job in UTC time zone. Typically this field + // is used for contracting engagements. Invalid timestamps are ignored. + JobStartTime *timestamp.Timestamp `protobuf:"bytes,22,opt,name=job_start_time,json=jobStartTime,proto3" json:"job_start_time,omitempty"` + // Optional. + // + // The end timestamp of the job. Typically this field is used for contracting + // engagements. Invalid timestamps are ignored. + JobEndTime *timestamp.Timestamp `protobuf:"bytes,23,opt,name=job_end_time,json=jobEndTime,proto3" json:"job_end_time,omitempty"` + // Optional. + // + // The timestamp this job posting was most recently published. The default + // value is the time the request arrives at the server. Invalid timestamps are + // ignored. + PostingPublishTime *timestamp.Timestamp `protobuf:"bytes,24,opt,name=posting_publish_time,json=postingPublishTime,proto3" json:"posting_publish_time,omitempty"` + // Optional but strongly recommended for the best service + // experience. + // + // The expiration timestamp of the job. After this timestamp, the + // job is marked as expired, and it no longer appears in search results. The + // expired job can't be deleted or listed by the [DeleteJob][] and + // [ListJobs][] APIs, but it can be retrieved with the [GetJob][] API or + // updated with the [UpdateJob][] API. An expired job can be updated and + // opened again by using a future expiration timestamp. Updating an expired + // job fails if there is another existing open job with same + // [company][google.cloud.talent.v4beta1.Job.name][], [language_code][] and [requisition_id][google.cloud.talent.v4beta1.Job.requisition_id]. + // + // The expired jobs are retained in our system for 90 days. However, the + // overall expired job count cannot exceed 3 times the maximum of open jobs + // count over the past week, otherwise jobs with earlier expire time are + // cleaned first. Expired jobs are no longer accessible after they are cleaned + // out. + // + // Invalid timestamps are ignored, and treated as expire time not provided. + // + // Timestamp before the instant request is made is considered valid, the job + // will be treated as expired immediately. + // + // If this value isn't provided at the time of job creation or is invalid, + // the job posting expires after 30 days from the job's creation time. For + // example, if the job was created on 2017/01/01 13:00AM UTC with an + // unspecified expiration date, the job expires after 2017/01/31 13:00AM UTC. + // + // If this value isn't provided on job update, it depends on the field masks + // set by [UpdateJobRequest.update_mask][google.cloud.talent.v4beta1.UpdateJobRequest.update_mask]. If the field masks include + // [expiry_time][], or the masks are empty meaning that every field is + // updated, the job posting expires after 30 days from the job's last + // update time. Otherwise the expiration date isn't updated. + PostingExpireTime *timestamp.Timestamp `protobuf:"bytes,25,opt,name=posting_expire_time,json=postingExpireTime,proto3" json:"posting_expire_time,omitempty"` + // Output only. The timestamp when this job posting was created. + PostingCreateTime *timestamp.Timestamp `protobuf:"bytes,26,opt,name=posting_create_time,json=postingCreateTime,proto3" json:"posting_create_time,omitempty"` + // Output only. The timestamp when this job posting was last updated. + PostingUpdateTime *timestamp.Timestamp `protobuf:"bytes,27,opt,name=posting_update_time,json=postingUpdateTime,proto3" json:"posting_update_time,omitempty"` + // Output only. Display name of the company listing the job. + CompanyDisplayName string `protobuf:"bytes,28,opt,name=company_display_name,json=companyDisplayName,proto3" json:"company_display_name,omitempty"` + // Output only. Derived details about the job posting. + DerivedInfo *Job_DerivedInfo `protobuf:"bytes,29,opt,name=derived_info,json=derivedInfo,proto3" json:"derived_info,omitempty"` + // Optional. + // + // Options for job processing. + ProcessingOptions *Job_ProcessingOptions `protobuf:"bytes,30,opt,name=processing_options,json=processingOptions,proto3" json:"processing_options,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Job) Reset() { *m = Job{} } +func (m *Job) String() string { return proto.CompactTextString(m) } +func (*Job) ProtoMessage() {} +func (*Job) Descriptor() ([]byte, []int) { + return fileDescriptor_job_1bcb1f20b417ba30, []int{0} +} +func (m *Job) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Job.Unmarshal(m, b) +} +func (m *Job) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Job.Marshal(b, m, deterministic) +} +func (dst *Job) XXX_Merge(src proto.Message) { + xxx_messageInfo_Job.Merge(dst, src) +} +func (m *Job) XXX_Size() int { + return xxx_messageInfo_Job.Size(m) +} +func (m *Job) XXX_DiscardUnknown() { + xxx_messageInfo_Job.DiscardUnknown(m) +} + +var xxx_messageInfo_Job proto.InternalMessageInfo + +func (m *Job) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *Job) GetCompany() string { + if m != nil { + return m.Company + } + return "" +} + +func (m *Job) GetRequisitionId() string { + if m != nil { + return m.RequisitionId + } + return "" +} + +func (m *Job) GetTitle() string { + if m != nil { + return m.Title + } + return "" +} + +func (m *Job) GetDescription() string { + if m != nil { + return m.Description + } + return "" +} + +func (m *Job) GetAddresses() []string { + if m != nil { + return m.Addresses + } + return nil +} + +func (m *Job) GetApplicationInfo() *Job_ApplicationInfo { + if m != nil { + return m.ApplicationInfo + } + return nil +} + +func (m *Job) GetJobBenefits() []JobBenefit { + if m != nil { + return m.JobBenefits + } + return nil +} + +func (m *Job) GetCompensationInfo() *CompensationInfo { + if m != nil { + return m.CompensationInfo + } + return nil +} + +func (m *Job) GetCustomAttributes() map[string]*CustomAttribute { + if m != nil { + return m.CustomAttributes + } + return nil +} + +func (m *Job) GetDegreeTypes() []DegreeType { + if m != nil { + return m.DegreeTypes + } + return nil +} + +func (m *Job) GetDepartment() string { + if m != nil { + return m.Department + } + return "" +} + +func (m *Job) GetEmploymentTypes() []EmploymentType { + if m != nil { + return m.EmploymentTypes + } + return nil +} + +func (m *Job) GetIncentives() string { + if m != nil { + return m.Incentives + } + return "" +} + +func (m *Job) GetLanguageCode() string { + if m != nil { + return m.LanguageCode + } + return "" +} + +func (m *Job) GetJobLevel() JobLevel { + if m != nil { + return m.JobLevel + } + return JobLevel_JOB_LEVEL_UNSPECIFIED +} + +func (m *Job) GetPromotionValue() int32 { + if m != nil { + return m.PromotionValue + } + return 0 +} + +func (m *Job) GetQualifications() string { + if m != nil { + return m.Qualifications + } + return "" +} + +func (m *Job) GetResponsibilities() string { + if m != nil { + return m.Responsibilities + } + return "" +} + +func (m *Job) GetPostingRegion() PostingRegion { + if m != nil { + return m.PostingRegion + } + return PostingRegion_POSTING_REGION_UNSPECIFIED +} + +func (m *Job) GetVisibility() Visibility { + if m != nil { + return m.Visibility + } + return Visibility_VISIBILITY_UNSPECIFIED +} + +func (m *Job) GetJobStartTime() *timestamp.Timestamp { + if m != nil { + return m.JobStartTime + } + return nil +} + +func (m *Job) GetJobEndTime() *timestamp.Timestamp { + if m != nil { + return m.JobEndTime + } + return nil +} + +func (m *Job) GetPostingPublishTime() *timestamp.Timestamp { + if m != nil { + return m.PostingPublishTime + } + return nil +} + +func (m *Job) GetPostingExpireTime() *timestamp.Timestamp { + if m != nil { + return m.PostingExpireTime + } + return nil +} + +func (m *Job) GetPostingCreateTime() *timestamp.Timestamp { + if m != nil { + return m.PostingCreateTime + } + return nil +} + +func (m *Job) GetPostingUpdateTime() *timestamp.Timestamp { + if m != nil { + return m.PostingUpdateTime + } + return nil +} + +func (m *Job) GetCompanyDisplayName() string { + if m != nil { + return m.CompanyDisplayName + } + return "" +} + +func (m *Job) GetDerivedInfo() *Job_DerivedInfo { + if m != nil { + return m.DerivedInfo + } + return nil +} + +func (m *Job) GetProcessingOptions() *Job_ProcessingOptions { + if m != nil { + return m.ProcessingOptions + } + return nil +} + +// Application related details of a job posting. +type Job_ApplicationInfo struct { + // Optional. + // + // Use this field to specify email address(es) to which resumes or + // applications can be sent. + // + // The maximum number of allowed characters for each entry is 255. + Emails []string `protobuf:"bytes,1,rep,name=emails,proto3" json:"emails,omitempty"` + // Optional. + // + // Use this field to provide instructions, such as "Mail your application + // to ...", that a candidate can follow to apply for the job. + // + // This field accepts and sanitizes HTML input, and also accepts + // bold, italic, ordered list, and unordered list markup tags. + // + // The maximum number of allowed characters is 3,000. + Instruction string `protobuf:"bytes,2,opt,name=instruction,proto3" json:"instruction,omitempty"` + // Optional. + // + // Use this URI field to direct an applicant to a website, for example to + // link to an online application form. + // + // The maximum number of allowed characters for each entry is 2,000. + Uris []string `protobuf:"bytes,3,rep,name=uris,proto3" json:"uris,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Job_ApplicationInfo) Reset() { *m = Job_ApplicationInfo{} } +func (m *Job_ApplicationInfo) String() string { return proto.CompactTextString(m) } +func (*Job_ApplicationInfo) ProtoMessage() {} +func (*Job_ApplicationInfo) Descriptor() ([]byte, []int) { + return fileDescriptor_job_1bcb1f20b417ba30, []int{0, 0} +} +func (m *Job_ApplicationInfo) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Job_ApplicationInfo.Unmarshal(m, b) +} +func (m *Job_ApplicationInfo) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Job_ApplicationInfo.Marshal(b, m, deterministic) +} +func (dst *Job_ApplicationInfo) XXX_Merge(src proto.Message) { + xxx_messageInfo_Job_ApplicationInfo.Merge(dst, src) +} +func (m *Job_ApplicationInfo) XXX_Size() int { + return xxx_messageInfo_Job_ApplicationInfo.Size(m) +} +func (m *Job_ApplicationInfo) XXX_DiscardUnknown() { + xxx_messageInfo_Job_ApplicationInfo.DiscardUnknown(m) +} + +var xxx_messageInfo_Job_ApplicationInfo proto.InternalMessageInfo + +func (m *Job_ApplicationInfo) GetEmails() []string { + if m != nil { + return m.Emails + } + return nil +} + +func (m *Job_ApplicationInfo) GetInstruction() string { + if m != nil { + return m.Instruction + } + return "" +} + +func (m *Job_ApplicationInfo) GetUris() []string { + if m != nil { + return m.Uris + } + return nil +} + +// Output only. +// +// Derived details about the job posting. +type Job_DerivedInfo struct { + // Structured locations of the job, resolved from [Job.addresses][google.cloud.talent.v4beta1.Job.addresses]. + // + // [locations][google.cloud.talent.v4beta1.Job.DerivedInfo.locations] are exactly matched to [Job.addresses][google.cloud.talent.v4beta1.Job.addresses] in the same + // order. + Locations []*Location `protobuf:"bytes,1,rep,name=locations,proto3" json:"locations,omitempty"` + // Job categories derived from [Job.title][google.cloud.talent.v4beta1.Job.title] and [Job.description][google.cloud.talent.v4beta1.Job.description]. + JobCategories []JobCategory `protobuf:"varint,3,rep,packed,name=job_categories,json=jobCategories,proto3,enum=google.cloud.talent.v4beta1.JobCategory" json:"job_categories,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Job_DerivedInfo) Reset() { *m = Job_DerivedInfo{} } +func (m *Job_DerivedInfo) String() string { return proto.CompactTextString(m) } +func (*Job_DerivedInfo) ProtoMessage() {} +func (*Job_DerivedInfo) Descriptor() ([]byte, []int) { + return fileDescriptor_job_1bcb1f20b417ba30, []int{0, 1} +} +func (m *Job_DerivedInfo) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Job_DerivedInfo.Unmarshal(m, b) +} +func (m *Job_DerivedInfo) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Job_DerivedInfo.Marshal(b, m, deterministic) +} +func (dst *Job_DerivedInfo) XXX_Merge(src proto.Message) { + xxx_messageInfo_Job_DerivedInfo.Merge(dst, src) +} +func (m *Job_DerivedInfo) XXX_Size() int { + return xxx_messageInfo_Job_DerivedInfo.Size(m) +} +func (m *Job_DerivedInfo) XXX_DiscardUnknown() { + xxx_messageInfo_Job_DerivedInfo.DiscardUnknown(m) +} + +var xxx_messageInfo_Job_DerivedInfo proto.InternalMessageInfo + +func (m *Job_DerivedInfo) GetLocations() []*Location { + if m != nil { + return m.Locations + } + return nil +} + +func (m *Job_DerivedInfo) GetJobCategories() []JobCategory { + if m != nil { + return m.JobCategories + } + return nil +} + +// Input only. +// +// Options for job processing. +type Job_ProcessingOptions struct { + // Optional. + // + // If set to `true`, the service does not attempt to resolve a + // more precise address for the job. + DisableStreetAddressResolution bool `protobuf:"varint,1,opt,name=disable_street_address_resolution,json=disableStreetAddressResolution,proto3" json:"disable_street_address_resolution,omitempty"` + // Optional. + // + // Option for job HTML content sanitization. Applied fields are: + // + // * description + // * applicationInfo.instruction + // * incentives + // * qualifications + // * responsibilities + // + // HTML tags in these fields may be stripped if sanitiazation isn't + // disabled. + // + // Defaults to [HtmlSanitization.SIMPLE_FORMATTING_ONLY][google.cloud.talent.v4beta1.HtmlSanitization.SIMPLE_FORMATTING_ONLY]. + HtmlSanitization HtmlSanitization `protobuf:"varint,2,opt,name=html_sanitization,json=htmlSanitization,proto3,enum=google.cloud.talent.v4beta1.HtmlSanitization" json:"html_sanitization,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Job_ProcessingOptions) Reset() { *m = Job_ProcessingOptions{} } +func (m *Job_ProcessingOptions) String() string { return proto.CompactTextString(m) } +func (*Job_ProcessingOptions) ProtoMessage() {} +func (*Job_ProcessingOptions) Descriptor() ([]byte, []int) { + return fileDescriptor_job_1bcb1f20b417ba30, []int{0, 2} +} +func (m *Job_ProcessingOptions) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Job_ProcessingOptions.Unmarshal(m, b) +} +func (m *Job_ProcessingOptions) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Job_ProcessingOptions.Marshal(b, m, deterministic) +} +func (dst *Job_ProcessingOptions) XXX_Merge(src proto.Message) { + xxx_messageInfo_Job_ProcessingOptions.Merge(dst, src) +} +func (m *Job_ProcessingOptions) XXX_Size() int { + return xxx_messageInfo_Job_ProcessingOptions.Size(m) +} +func (m *Job_ProcessingOptions) XXX_DiscardUnknown() { + xxx_messageInfo_Job_ProcessingOptions.DiscardUnknown(m) +} + +var xxx_messageInfo_Job_ProcessingOptions proto.InternalMessageInfo + +func (m *Job_ProcessingOptions) GetDisableStreetAddressResolution() bool { + if m != nil { + return m.DisableStreetAddressResolution + } + return false +} + +func (m *Job_ProcessingOptions) GetHtmlSanitization() HtmlSanitization { + if m != nil { + return m.HtmlSanitization + } + return HtmlSanitization_HTML_SANITIZATION_UNSPECIFIED +} + +func init() { + proto.RegisterType((*Job)(nil), "google.cloud.talent.v4beta1.Job") + proto.RegisterMapType((map[string]*CustomAttribute)(nil), "google.cloud.talent.v4beta1.Job.CustomAttributesEntry") + proto.RegisterType((*Job_ApplicationInfo)(nil), "google.cloud.talent.v4beta1.Job.ApplicationInfo") + proto.RegisterType((*Job_DerivedInfo)(nil), "google.cloud.talent.v4beta1.Job.DerivedInfo") + proto.RegisterType((*Job_ProcessingOptions)(nil), "google.cloud.talent.v4beta1.Job.ProcessingOptions") +} + +func init() { + proto.RegisterFile("google/cloud/talent/v4beta1/job.proto", fileDescriptor_job_1bcb1f20b417ba30) +} + +var fileDescriptor_job_1bcb1f20b417ba30 = []byte{ + // 1102 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x8c, 0x56, 0xd1, 0x72, 0x1b, 0x35, + 0x17, 0x1e, 0xc7, 0x4d, 0x1b, 0xcb, 0x89, 0x63, 0xab, 0x69, 0x7f, 0xfd, 0x6e, 0x48, 0x0d, 0x4c, + 0xa8, 0xa7, 0x80, 0x5d, 0x02, 0xc3, 0x30, 0xc0, 0x05, 0x89, 0x93, 0x81, 0x64, 0x32, 0x24, 0x6c, + 0x42, 0x2e, 0xca, 0xc5, 0x8e, 0x76, 0xf7, 0xc4, 0x91, 0xd9, 0x95, 0x14, 0x49, 0x6b, 0x30, 0x17, + 0x3c, 0x0c, 0xbc, 0x05, 0x2f, 0xc1, 0x2b, 0x31, 0x92, 0x76, 0x1d, 0xd7, 0xed, 0xd8, 0xb9, 0xd3, + 0xf9, 0xf4, 0x7d, 0x9f, 0x76, 0xcf, 0xea, 0x9c, 0xb3, 0x68, 0x77, 0x28, 0xc4, 0x30, 0x85, 0x7e, + 0x9c, 0x8a, 0x3c, 0xe9, 0x1b, 0x9a, 0x02, 0x37, 0xfd, 0xf1, 0x17, 0x11, 0x18, 0xfa, 0x59, 0x7f, + 0x24, 0xa2, 0x9e, 0x54, 0xc2, 0x08, 0xfc, 0xcc, 0xd3, 0x7a, 0x8e, 0xd6, 0xf3, 0xb4, 0x5e, 0x41, + 0x6b, 0x77, 0x17, 0x79, 0xc4, 0x22, 0xcb, 0x04, 0xf7, 0x36, 0xed, 0xe7, 0x05, 0xd3, 0x45, 0x51, + 0x7e, 0xdd, 0x37, 0x2c, 0x03, 0x6d, 0x68, 0x26, 0x0b, 0xc2, 0xce, 0x3c, 0xe1, 0x37, 0x45, 0xa5, + 0x04, 0xa5, 0x8b, 0xfd, 0x4e, 0xb1, 0x6f, 0x26, 0x12, 0xfa, 0x52, 0x68, 0x43, 0xd3, 0x90, 0x26, + 0x89, 0x02, 0x5d, 0x32, 0xb6, 0x0b, 0x06, 0x95, 0xac, 0x4f, 0x39, 0x17, 0x86, 0x1a, 0x26, 0x78, + 0xb1, 0xfb, 0xc1, 0xbf, 0x18, 0x55, 0x4f, 0x44, 0x84, 0x31, 0x7a, 0xc0, 0x69, 0x06, 0xa4, 0xd2, + 0xa9, 0x74, 0x6b, 0x81, 0x5b, 0x63, 0x82, 0x1e, 0xc5, 0x22, 0x93, 0x94, 0x4f, 0xc8, 0x8a, 0x83, + 0xcb, 0x10, 0xef, 0xa2, 0x86, 0x82, 0xdb, 0x9c, 0x69, 0x66, 0xbd, 0x42, 0x96, 0x90, 0xaa, 0x23, + 0x6c, 0xcc, 0xa0, 0xc7, 0x09, 0xde, 0x42, 0xab, 0x86, 0x99, 0x14, 0xc8, 0x03, 0xb7, 0xeb, 0x03, + 0xdc, 0x41, 0xf5, 0x04, 0x74, 0xac, 0x98, 0xb4, 0x34, 0xb2, 0xea, 0xf6, 0x66, 0x21, 0xbc, 0x8d, + 0x6a, 0xc5, 0x3b, 0x80, 0x26, 0x0f, 0x3b, 0xd5, 0x6e, 0x2d, 0xb8, 0x03, 0xf0, 0x2f, 0xa8, 0x49, + 0xa5, 0x4c, 0x59, 0x4c, 0xfd, 0xe1, 0xfc, 0x5a, 0x90, 0x47, 0x9d, 0x4a, 0xb7, 0xbe, 0xf7, 0xaa, + 0xb7, 0xe0, 0xab, 0xf4, 0x4e, 0x44, 0xd4, 0xdb, 0xbf, 0x13, 0x1e, 0xf3, 0x6b, 0x11, 0x6c, 0xd2, + 0x37, 0x01, 0x7c, 0x82, 0xd6, 0x47, 0x22, 0x0a, 0x23, 0xe0, 0x70, 0xcd, 0x8c, 0x26, 0x6b, 0x9d, + 0x6a, 0xb7, 0xb1, 0xf7, 0x62, 0x99, 0xf1, 0x81, 0xe7, 0x07, 0xf5, 0xd1, 0x74, 0xad, 0xf1, 0x6b, + 0xd4, 0xb2, 0x09, 0x03, 0xae, 0x67, 0x9e, 0xb4, 0xe6, 0x9e, 0xf4, 0xd3, 0x85, 0x86, 0x83, 0x19, + 0x95, 0x7b, 0xcc, 0x66, 0x3c, 0x87, 0xe0, 0x18, 0xb5, 0xe2, 0x5c, 0x1b, 0x91, 0x85, 0xd4, 0x18, + 0xc5, 0xa2, 0xdc, 0x80, 0x26, 0xa8, 0x53, 0xed, 0xd6, 0xf7, 0xbe, 0x5c, 0x9a, 0x85, 0x81, 0x53, + 0xee, 0x4f, 0x85, 0x47, 0xdc, 0xa8, 0x49, 0xd0, 0x8c, 0xe7, 0x60, 0x9b, 0x8c, 0x04, 0x86, 0x0a, + 0x20, 0xb4, 0xd7, 0x4b, 0x93, 0xfa, 0x3d, 0x92, 0x71, 0xe8, 0x04, 0x97, 0x13, 0x09, 0xf6, 0x9b, + 0x96, 0x6b, 0x8d, 0x77, 0x10, 0x4a, 0x40, 0x52, 0x65, 0x32, 0xe0, 0x86, 0xac, 0xbb, 0x8f, 0x3e, + 0x83, 0xe0, 0x2b, 0xd4, 0x84, 0x4c, 0xa6, 0x62, 0x62, 0xa3, 0xe2, 0xbc, 0x0d, 0x77, 0xde, 0xc7, + 0x0b, 0xcf, 0x3b, 0x9a, 0x8a, 0xdc, 0x99, 0x9b, 0xf0, 0x46, 0xec, 0xce, 0x65, 0x3c, 0x06, 0x6e, + 0xd8, 0x18, 0x34, 0x69, 0xf8, 0x73, 0xef, 0x10, 0xfc, 0x21, 0xda, 0x48, 0x29, 0x1f, 0xe6, 0x74, + 0x08, 0x61, 0x2c, 0x12, 0x20, 0x9b, 0x8e, 0xb2, 0x5e, 0x82, 0x03, 0x91, 0x00, 0x3e, 0x40, 0x35, + 0x7b, 0x2b, 0x52, 0x18, 0x43, 0x4a, 0x9a, 0x9d, 0x4a, 0xb7, 0xb1, 0xb7, 0xbb, 0x2c, 0xcb, 0xa7, + 0x96, 0x1c, 0xac, 0x8d, 0x8a, 0x15, 0x7e, 0x81, 0x36, 0xa5, 0x12, 0x99, 0x70, 0x57, 0x61, 0x4c, + 0xd3, 0x1c, 0x48, 0xab, 0x53, 0xe9, 0xae, 0x06, 0x8d, 0x29, 0x7c, 0x65, 0x51, 0xfc, 0x11, 0x6a, + 0xdc, 0xe6, 0x34, 0x65, 0xd7, 0xc5, 0xbd, 0xd4, 0x04, 0xbb, 0x47, 0x9a, 0x43, 0xf1, 0x4b, 0xd4, + 0x54, 0xa0, 0xa5, 0xe0, 0x9a, 0x45, 0x2c, 0x65, 0x86, 0x81, 0x26, 0x8f, 0x1d, 0xf3, 0x2d, 0x1c, + 0xff, 0x84, 0x1a, 0xb6, 0x39, 0x30, 0x3e, 0x0c, 0x15, 0x0c, 0x6d, 0xd9, 0x6d, 0xb9, 0xb7, 0x78, + 0xb9, 0xf0, 0x2d, 0xce, 0xbd, 0x24, 0x70, 0x8a, 0x60, 0x43, 0xce, 0x86, 0xf8, 0x7b, 0x84, 0xc6, + 0xac, 0x38, 0x62, 0x42, 0x9e, 0x38, 0xbb, 0xc5, 0x57, 0xe3, 0x6a, 0x4a, 0x0f, 0x66, 0xa4, 0xf8, + 0x3b, 0xd4, 0xb0, 0xc9, 0xd5, 0x86, 0x2a, 0x13, 0xda, 0xfe, 0x47, 0x9e, 0xba, 0x1a, 0x69, 0x97, + 0x66, 0x65, 0xef, 0xeb, 0x5d, 0x96, 0xcd, 0x31, 0xb0, 0x45, 0x7a, 0x61, 0x05, 0x16, 0xc2, 0xdf, + 0xfa, 0xa2, 0x05, 0x9e, 0x78, 0xfd, 0xff, 0x96, 0xea, 0xd1, 0x48, 0x44, 0x47, 0x3c, 0x71, 0xea, + 0x53, 0xb4, 0x55, 0xe6, 0x46, 0xe6, 0x51, 0xca, 0xf4, 0x8d, 0x77, 0x21, 0x4b, 0x5d, 0x70, 0xa1, + 0x3b, 0xf7, 0x32, 0xe7, 0x76, 0x82, 0x1e, 0x97, 0x6e, 0xf0, 0xbb, 0x64, 0x0a, 0xbc, 0xd9, 0xff, + 0x97, 0x9a, 0xb5, 0x0a, 0xd9, 0x91, 0x53, 0xcd, 0x7b, 0xc5, 0x0a, 0xa8, 0x29, 0xbc, 0xda, 0xf7, + 0xf6, 0x1a, 0x38, 0xd5, 0xbc, 0x57, 0x2e, 0x93, 0xa9, 0xd7, 0xb3, 0x7b, 0x7b, 0xfd, 0xec, 0x54, + 0xce, 0xeb, 0x15, 0xda, 0x2a, 0x26, 0x41, 0x98, 0x30, 0x2d, 0x53, 0x3a, 0x09, 0xdd, 0xf0, 0xd8, + 0x76, 0xb7, 0x0f, 0x17, 0x7b, 0x87, 0x7e, 0xeb, 0x47, 0x3b, 0x4a, 0xce, 0x6c, 0x27, 0x51, 0x6c, + 0x0c, 0x89, 0xef, 0x82, 0xef, 0xb9, 0x63, 0x3f, 0x59, 0xda, 0xa9, 0x0e, 0xbd, 0xc8, 0x35, 0xc1, + 0x7a, 0x72, 0x17, 0x60, 0x8a, 0xb0, 0x54, 0x22, 0x06, 0xad, 0xed, 0x1b, 0x09, 0xe9, 0x0b, 0x65, + 0xc7, 0xd9, 0xee, 0x2d, 0xb5, 0x3d, 0x9f, 0x4a, 0xcf, 0xbc, 0x32, 0x68, 0xc9, 0x79, 0xa8, 0x1d, + 0xa2, 0xcd, 0xb9, 0x71, 0x81, 0x9f, 0xa2, 0x87, 0x90, 0x51, 0x96, 0x6a, 0x52, 0x71, 0x53, 0xa9, + 0x88, 0xec, 0x48, 0x63, 0x5c, 0x1b, 0x95, 0xc7, 0x6e, 0xa4, 0xf9, 0x69, 0x39, 0x0b, 0xd9, 0xf9, + 0x9a, 0x2b, 0xa6, 0x49, 0xd5, 0xe9, 0xdc, 0xba, 0xfd, 0x77, 0x05, 0xd5, 0x67, 0x5e, 0x10, 0x0f, + 0x50, 0x2d, 0x15, 0x65, 0xcd, 0x57, 0x5c, 0x2f, 0x5f, 0xdc, 0x65, 0x4e, 0x0b, 0x76, 0x70, 0xa7, + 0xc3, 0x67, 0xbe, 0x9a, 0x62, 0x6a, 0x60, 0x28, 0x94, 0xed, 0x09, 0x55, 0xd7, 0x45, 0xbb, 0xcb, + 0x92, 0x32, 0xf0, 0x8a, 0x49, 0xb0, 0x31, 0x9a, 0x06, 0x0c, 0x74, 0xfb, 0x9f, 0x0a, 0x6a, 0xbd, + 0x95, 0x2f, 0x7c, 0x8c, 0xde, 0x4f, 0x98, 0xa6, 0x51, 0x0a, 0xa1, 0x36, 0x0a, 0xc0, 0x94, 0x7f, + 0x1d, 0xa1, 0x02, 0x2d, 0xd2, 0xdc, 0xe5, 0xc1, 0xfe, 0x4c, 0xac, 0x05, 0x3b, 0x05, 0xf1, 0xc2, + 0xf1, 0xf6, 0x3d, 0x2d, 0x98, 0xb2, 0xec, 0x98, 0xbc, 0x31, 0x59, 0x1a, 0x6a, 0xca, 0x99, 0x61, + 0x7f, 0xd0, 0x69, 0x0a, 0x1b, 0x4b, 0xc6, 0xe4, 0x0f, 0x26, 0x4b, 0x2f, 0x66, 0x44, 0x41, 0xf3, + 0x66, 0x0e, 0x69, 0xdf, 0xa2, 0x27, 0xef, 0x1c, 0x76, 0xb8, 0x89, 0xaa, 0xbf, 0xc2, 0xa4, 0xf8, + 0xdd, 0xb1, 0x4b, 0x7c, 0x80, 0x56, 0x7d, 0x57, 0x5e, 0xb9, 0xc7, 0xdd, 0x9c, 0x33, 0x0d, 0xbc, + 0xf4, 0xeb, 0x95, 0xaf, 0x2a, 0x07, 0x7f, 0xa2, 0xe7, 0xb1, 0xc8, 0x16, 0xa9, 0x0f, 0x9a, 0x27, + 0x22, 0xb2, 0x09, 0xc8, 0x55, 0x0c, 0xe7, 0xb6, 0xe2, 0xce, 0x2b, 0xaf, 0xf7, 0x0b, 0xc1, 0x50, + 0xd8, 0xc9, 0xd3, 0x13, 0x6a, 0xd8, 0x1f, 0x02, 0x77, 0xf5, 0xd8, 0xf7, 0x5b, 0x54, 0x32, 0xfd, + 0xce, 0x5f, 0xca, 0x6f, 0x7c, 0xf8, 0xd7, 0x4a, 0x75, 0x70, 0x79, 0x11, 0x3d, 0x74, 0x9a, 0xcf, + 0xff, 0x0b, 0x00, 0x00, 0xff, 0xff, 0x8b, 0xe6, 0x66, 0x6a, 0xc9, 0x0a, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/cloud/talent/v4beta1/job_service.pb.go b/vendor/google.golang.org/genproto/googleapis/cloud/talent/v4beta1/job_service.pb.go new file mode 100644 index 0000000..6f5cefd --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/cloud/talent/v4beta1/job_service.pb.go @@ -0,0 +1,1928 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/cloud/talent/v4beta1/job_service.proto + +package talent // import "google.golang.org/genproto/googleapis/cloud/talent/v4beta1" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import duration "github.com/golang/protobuf/ptypes/duration" +import empty "github.com/golang/protobuf/ptypes/empty" +import _ "google.golang.org/genproto/googleapis/api/annotations" +import _ "google.golang.org/genproto/googleapis/longrunning" +import field_mask "google.golang.org/genproto/protobuf/field_mask" + +import ( + context "golang.org/x/net/context" + grpc "google.golang.org/grpc" +) + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// An enum that specifies the job attributes that are returned in the +// [MatchingJob.Job][] in [SearchJobsResponse][google.cloud.talent.v4beta1.SearchJobsResponse] or [Job][google.cloud.talent.v4beta1.Job] objects in +// [ListJobsResponse][google.cloud.talent.v4beta1.ListJobsResponse]. +type JobView int32 + +const ( + // Default value. + JobView_JOB_VIEW_UNSPECIFIED JobView = 0 + // A ID only view of job, with following attributes: + // [Job.name][google.cloud.talent.v4beta1.Job.name], [Job.requisition_id][google.cloud.talent.v4beta1.Job.requisition_id], [Job.language_code][google.cloud.talent.v4beta1.Job.language_code]. + JobView_JOB_VIEW_ID_ONLY JobView = 1 + // A minimal view of the job, with the following attributes: + // [Job.name][google.cloud.talent.v4beta1.Job.name], [Job.requisition_id][google.cloud.talent.v4beta1.Job.requisition_id], [Job.job_title][], + // [Job.company][google.cloud.talent.v4beta1.Job.company], [Job.DerivedInfo.locations][google.cloud.talent.v4beta1.Job.DerivedInfo.locations], [Job.language_code][google.cloud.talent.v4beta1.Job.language_code]. + JobView_JOB_VIEW_MINIMAL JobView = 2 + // A small view of the job, with the following attributes in the search + // results: [Job.name][google.cloud.talent.v4beta1.Job.name], [Job.requisition_id][google.cloud.talent.v4beta1.Job.requisition_id], [Job.job_title][], + // [Job.company][google.cloud.talent.v4beta1.Job.company], [Job.DerivedInfo.locations][google.cloud.talent.v4beta1.Job.DerivedInfo.locations], [Job.visibility][google.cloud.talent.v4beta1.Job.visibility], + // [Job.language_code][google.cloud.talent.v4beta1.Job.language_code], [Job.description][google.cloud.talent.v4beta1.Job.description]. + JobView_JOB_VIEW_SMALL JobView = 3 + // All available attributes are included in the search results. + JobView_JOB_VIEW_FULL JobView = 4 +) + +var JobView_name = map[int32]string{ + 0: "JOB_VIEW_UNSPECIFIED", + 1: "JOB_VIEW_ID_ONLY", + 2: "JOB_VIEW_MINIMAL", + 3: "JOB_VIEW_SMALL", + 4: "JOB_VIEW_FULL", +} +var JobView_value = map[string]int32{ + "JOB_VIEW_UNSPECIFIED": 0, + "JOB_VIEW_ID_ONLY": 1, + "JOB_VIEW_MINIMAL": 2, + "JOB_VIEW_SMALL": 3, + "JOB_VIEW_FULL": 4, +} + +func (x JobView) String() string { + return proto.EnumName(JobView_name, int32(x)) +} +func (JobView) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_job_service_461eee9b2ad19a52, []int{0} +} + +// A string-represented enumeration of the job search mode. The service +// operate differently for different modes of service. +type SearchJobsRequest_SearchMode int32 + +const ( + // The mode of the search method isn't specified. + SearchJobsRequest_SEARCH_MODE_UNSPECIFIED SearchJobsRequest_SearchMode = 0 + // The job search matches against all jobs, and featured jobs + // (jobs with promotionValue > 0) are not specially handled. + SearchJobsRequest_JOB_SEARCH SearchJobsRequest_SearchMode = 1 + // The job search matches only against featured jobs (jobs with a + // promotionValue > 0). This method doesn't return any jobs having a + // promotionValue <= 0. The search results order is determined by the + // promotionValue (jobs with a higher promotionValue are returned higher up + // in the search results), with relevance being used as a tiebreaker. + SearchJobsRequest_FEATURED_JOB_SEARCH SearchJobsRequest_SearchMode = 2 +) + +var SearchJobsRequest_SearchMode_name = map[int32]string{ + 0: "SEARCH_MODE_UNSPECIFIED", + 1: "JOB_SEARCH", + 2: "FEATURED_JOB_SEARCH", +} +var SearchJobsRequest_SearchMode_value = map[string]int32{ + "SEARCH_MODE_UNSPECIFIED": 0, + "JOB_SEARCH": 1, + "FEATURED_JOB_SEARCH": 2, +} + +func (x SearchJobsRequest_SearchMode) String() string { + return proto.EnumName(SearchJobsRequest_SearchMode_name, int32(x)) +} +func (SearchJobsRequest_SearchMode) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_job_service_461eee9b2ad19a52, []int{7, 0} +} + +// Controls whether highly similar jobs are returned next to each other in +// the search results. Jobs are identified as highly similar based on +// their titles, job categories, and locations. Highly similar results are +// clustered so that only one representative job of the cluster is +// displayed to the job seeker higher up in the results, with the other jobs +// being displayed lower down in the results. +type SearchJobsRequest_DiversificationLevel int32 + +const ( + // The diversification level isn't specified. + SearchJobsRequest_DIVERSIFICATION_LEVEL_UNSPECIFIED SearchJobsRequest_DiversificationLevel = 0 + // Disables diversification. Jobs that would normally be pushed to the last + // page would not have their positions altered. This may result in highly + // similar jobs appearing in sequence in the search results. + SearchJobsRequest_DISABLED SearchJobsRequest_DiversificationLevel = 1 + // Default diversifying behavior. The result list is ordered so that + // highly similar results are pushed to the end of the last page of search + // results. + SearchJobsRequest_SIMPLE SearchJobsRequest_DiversificationLevel = 2 +) + +var SearchJobsRequest_DiversificationLevel_name = map[int32]string{ + 0: "DIVERSIFICATION_LEVEL_UNSPECIFIED", + 1: "DISABLED", + 2: "SIMPLE", +} +var SearchJobsRequest_DiversificationLevel_value = map[string]int32{ + "DIVERSIFICATION_LEVEL_UNSPECIFIED": 0, + "DISABLED": 1, + "SIMPLE": 2, +} + +func (x SearchJobsRequest_DiversificationLevel) String() string { + return proto.EnumName(SearchJobsRequest_DiversificationLevel_name, int32(x)) +} +func (SearchJobsRequest_DiversificationLevel) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_job_service_461eee9b2ad19a52, []int{7, 1} +} + +// The importance level for [CustomRankingInfo.ranking_expression][google.cloud.talent.v4beta1.SearchJobsRequest.CustomRankingInfo.ranking_expression]. +type SearchJobsRequest_CustomRankingInfo_ImportanceLevel int32 + +const ( + // Default value if the importance level isn't specified. + SearchJobsRequest_CustomRankingInfo_IMPORTANCE_LEVEL_UNSPECIFIED SearchJobsRequest_CustomRankingInfo_ImportanceLevel = 0 + // The given ranking expression is of None importance, existing relevance + // score (determined by API algorithm) dominates job's final ranking + // position. + SearchJobsRequest_CustomRankingInfo_NONE SearchJobsRequest_CustomRankingInfo_ImportanceLevel = 1 + // The given ranking expression is of Low importance in terms of job's + // final ranking position compared to existing relevance + // score (determined by API algorithm). + SearchJobsRequest_CustomRankingInfo_LOW SearchJobsRequest_CustomRankingInfo_ImportanceLevel = 2 + // The given ranking expression is of Mild importance in terms of job's + // final ranking position compared to existing relevance + // score (determined by API algorithm). + SearchJobsRequest_CustomRankingInfo_MILD SearchJobsRequest_CustomRankingInfo_ImportanceLevel = 3 + // The given ranking expression is of Medium importance in terms of job's + // final ranking position compared to existing relevance + // score (determined by API algorithm). + SearchJobsRequest_CustomRankingInfo_MEDIUM SearchJobsRequest_CustomRankingInfo_ImportanceLevel = 4 + // The given ranking expression is of High importance in terms of job's + // final ranking position compared to existing relevance + // score (determined by API algorithm). + SearchJobsRequest_CustomRankingInfo_HIGH SearchJobsRequest_CustomRankingInfo_ImportanceLevel = 5 + // The given ranking expression is of Extreme importance, and dominates + // job's final ranking position with existing relevance + // score (determined by API algorithm) ignored. + SearchJobsRequest_CustomRankingInfo_EXTREME SearchJobsRequest_CustomRankingInfo_ImportanceLevel = 6 +) + +var SearchJobsRequest_CustomRankingInfo_ImportanceLevel_name = map[int32]string{ + 0: "IMPORTANCE_LEVEL_UNSPECIFIED", + 1: "NONE", + 2: "LOW", + 3: "MILD", + 4: "MEDIUM", + 5: "HIGH", + 6: "EXTREME", +} +var SearchJobsRequest_CustomRankingInfo_ImportanceLevel_value = map[string]int32{ + "IMPORTANCE_LEVEL_UNSPECIFIED": 0, + "NONE": 1, + "LOW": 2, + "MILD": 3, + "MEDIUM": 4, + "HIGH": 5, + "EXTREME": 6, +} + +func (x SearchJobsRequest_CustomRankingInfo_ImportanceLevel) String() string { + return proto.EnumName(SearchJobsRequest_CustomRankingInfo_ImportanceLevel_name, int32(x)) +} +func (SearchJobsRequest_CustomRankingInfo_ImportanceLevel) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_job_service_461eee9b2ad19a52, []int{7, 0, 0} +} + +// Input only. +// +// Create job request. +type CreateJobRequest struct { + // Required. + // + // The resource name of the tenant under which the job is created. + // + // The format is "projects/{project_id}/tenants/{tenant_id}", for example, + // "projects/api-test-project/tenant/foo". + // + // Tenant id is optional and a default tenant is created if unspecified, for + // example, "projects/api-test-project". + Parent string `protobuf:"bytes,1,opt,name=parent,proto3" json:"parent,omitempty"` + // Required. + // + // The Job to be created. + Job *Job `protobuf:"bytes,2,opt,name=job,proto3" json:"job,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *CreateJobRequest) Reset() { *m = CreateJobRequest{} } +func (m *CreateJobRequest) String() string { return proto.CompactTextString(m) } +func (*CreateJobRequest) ProtoMessage() {} +func (*CreateJobRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_job_service_461eee9b2ad19a52, []int{0} +} +func (m *CreateJobRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_CreateJobRequest.Unmarshal(m, b) +} +func (m *CreateJobRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_CreateJobRequest.Marshal(b, m, deterministic) +} +func (dst *CreateJobRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_CreateJobRequest.Merge(dst, src) +} +func (m *CreateJobRequest) XXX_Size() int { + return xxx_messageInfo_CreateJobRequest.Size(m) +} +func (m *CreateJobRequest) XXX_DiscardUnknown() { + xxx_messageInfo_CreateJobRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_CreateJobRequest proto.InternalMessageInfo + +func (m *CreateJobRequest) GetParent() string { + if m != nil { + return m.Parent + } + return "" +} + +func (m *CreateJobRequest) GetJob() *Job { + if m != nil { + return m.Job + } + return nil +} + +// Input only. +// +// Get job request. +type GetJobRequest struct { + // Required. + // + // The resource name of the job to retrieve. + // + // The format is + // "projects/{project_id}/tenants/{tenant_id}/jobs/{job_id}", for + // example, "projects/api-test-project/tenants/foo/jobs/1234". + // + // Tenant id is optional and the default tenant is used if unspecified, for + // example, "projects/api-test-project/jobs/1234". + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GetJobRequest) Reset() { *m = GetJobRequest{} } +func (m *GetJobRequest) String() string { return proto.CompactTextString(m) } +func (*GetJobRequest) ProtoMessage() {} +func (*GetJobRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_job_service_461eee9b2ad19a52, []int{1} +} +func (m *GetJobRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GetJobRequest.Unmarshal(m, b) +} +func (m *GetJobRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GetJobRequest.Marshal(b, m, deterministic) +} +func (dst *GetJobRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_GetJobRequest.Merge(dst, src) +} +func (m *GetJobRequest) XXX_Size() int { + return xxx_messageInfo_GetJobRequest.Size(m) +} +func (m *GetJobRequest) XXX_DiscardUnknown() { + xxx_messageInfo_GetJobRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_GetJobRequest proto.InternalMessageInfo + +func (m *GetJobRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +// Input only. +// +// Update job request. +type UpdateJobRequest struct { + // Required. + // + // The Job to be updated. + Job *Job `protobuf:"bytes,1,opt,name=job,proto3" json:"job,omitempty"` + // Optional but strongly recommended to be provided for the best service + // experience. + // + // If [update_mask][google.cloud.talent.v4beta1.UpdateJobRequest.update_mask] is provided, only the specified fields in + // [job][google.cloud.talent.v4beta1.UpdateJobRequest.job] are updated. Otherwise all the fields are updated. + // + // A field mask to restrict the fields that are updated. Only + // top level fields of [Job][google.cloud.talent.v4beta1.Job] are supported. + UpdateMask *field_mask.FieldMask `protobuf:"bytes,2,opt,name=update_mask,json=updateMask,proto3" json:"update_mask,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *UpdateJobRequest) Reset() { *m = UpdateJobRequest{} } +func (m *UpdateJobRequest) String() string { return proto.CompactTextString(m) } +func (*UpdateJobRequest) ProtoMessage() {} +func (*UpdateJobRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_job_service_461eee9b2ad19a52, []int{2} +} +func (m *UpdateJobRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_UpdateJobRequest.Unmarshal(m, b) +} +func (m *UpdateJobRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_UpdateJobRequest.Marshal(b, m, deterministic) +} +func (dst *UpdateJobRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_UpdateJobRequest.Merge(dst, src) +} +func (m *UpdateJobRequest) XXX_Size() int { + return xxx_messageInfo_UpdateJobRequest.Size(m) +} +func (m *UpdateJobRequest) XXX_DiscardUnknown() { + xxx_messageInfo_UpdateJobRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_UpdateJobRequest proto.InternalMessageInfo + +func (m *UpdateJobRequest) GetJob() *Job { + if m != nil { + return m.Job + } + return nil +} + +func (m *UpdateJobRequest) GetUpdateMask() *field_mask.FieldMask { + if m != nil { + return m.UpdateMask + } + return nil +} + +// Input only. +// +// Delete job request. +type DeleteJobRequest struct { + // Required. + // + // The resource name of the job to be deleted. + // + // The format is + // "projects/{project_id}/tenants/{tenant_id}/jobs/{job_id}", for + // example, "projects/api-test-project/tenants/foo/jobs/1234". + // + // Tenant id is optional and the default tenant is used if unspecified, for + // example, "projects/api-test-project/jobs/1234". + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *DeleteJobRequest) Reset() { *m = DeleteJobRequest{} } +func (m *DeleteJobRequest) String() string { return proto.CompactTextString(m) } +func (*DeleteJobRequest) ProtoMessage() {} +func (*DeleteJobRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_job_service_461eee9b2ad19a52, []int{3} +} +func (m *DeleteJobRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_DeleteJobRequest.Unmarshal(m, b) +} +func (m *DeleteJobRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_DeleteJobRequest.Marshal(b, m, deterministic) +} +func (dst *DeleteJobRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_DeleteJobRequest.Merge(dst, src) +} +func (m *DeleteJobRequest) XXX_Size() int { + return xxx_messageInfo_DeleteJobRequest.Size(m) +} +func (m *DeleteJobRequest) XXX_DiscardUnknown() { + xxx_messageInfo_DeleteJobRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_DeleteJobRequest proto.InternalMessageInfo + +func (m *DeleteJobRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +// Input only. +// +// Batch delete jobs request. +type BatchDeleteJobsRequest struct { + // Required. + // + // The resource name of the tenant under which the job is created. + // + // The format is "projects/{project_id}/tenants/{tenant_id}", for example, + // "projects/api-test-project/tenant/foo". + // + // Tenant id is optional and the default tenant is used if unspecified, for + // example, "projects/api-test-project". + Parent string `protobuf:"bytes,1,opt,name=parent,proto3" json:"parent,omitempty"` + // Required. + // + // The filter string specifies the jobs to be deleted. + // + // Supported operator: =, AND + // + // The fields eligible for filtering are: + // + // * `companyName` (Required) + // * `requisitionId` (Required) + // + // Sample Query: companyName = "projects/api-test-project/companies/123" AND + // requisitionId = "req-1" + Filter string `protobuf:"bytes,2,opt,name=filter,proto3" json:"filter,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *BatchDeleteJobsRequest) Reset() { *m = BatchDeleteJobsRequest{} } +func (m *BatchDeleteJobsRequest) String() string { return proto.CompactTextString(m) } +func (*BatchDeleteJobsRequest) ProtoMessage() {} +func (*BatchDeleteJobsRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_job_service_461eee9b2ad19a52, []int{4} +} +func (m *BatchDeleteJobsRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_BatchDeleteJobsRequest.Unmarshal(m, b) +} +func (m *BatchDeleteJobsRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_BatchDeleteJobsRequest.Marshal(b, m, deterministic) +} +func (dst *BatchDeleteJobsRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_BatchDeleteJobsRequest.Merge(dst, src) +} +func (m *BatchDeleteJobsRequest) XXX_Size() int { + return xxx_messageInfo_BatchDeleteJobsRequest.Size(m) +} +func (m *BatchDeleteJobsRequest) XXX_DiscardUnknown() { + xxx_messageInfo_BatchDeleteJobsRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_BatchDeleteJobsRequest proto.InternalMessageInfo + +func (m *BatchDeleteJobsRequest) GetParent() string { + if m != nil { + return m.Parent + } + return "" +} + +func (m *BatchDeleteJobsRequest) GetFilter() string { + if m != nil { + return m.Filter + } + return "" +} + +// Input only. +// +// List jobs request. +type ListJobsRequest struct { + // Required. + // + // The resource name of the tenant under which the job is created. + // + // The format is "projects/{project_id}/tenants/{tenant_id}", for example, + // "projects/api-test-project/tenant/foo". + // + // Tenant id is optional and the default tenant is used if unspecified, for + // example, "projects/api-test-project". + Parent string `protobuf:"bytes,1,opt,name=parent,proto3" json:"parent,omitempty"` + // Required. + // + // The filter string specifies the jobs to be enumerated. + // + // Supported operator: =, AND + // + // The fields eligible for filtering are: + // + // * `companyName` (Required) + // * `requisitionId` (Optional) + // * `status` (Optional) Available values: OPEN, EXPIRED, ALL. Defaults to + // OPEN if no value is specified. + // + // Sample Query: + // + // * companyName = "projects/api-test-project/tenants/foo/companies/bar" + // * companyName = "projects/api-test-project/tenants/foo/companies/bar" AND + // requisitionId = "req-1" + // * companyName = "projects/api-test-project/tenants/foo/companies/bar" AND + // status = "EXPIRED" + Filter string `protobuf:"bytes,2,opt,name=filter,proto3" json:"filter,omitempty"` + // Optional. + // + // The starting point of a query result. + PageToken string `protobuf:"bytes,3,opt,name=page_token,json=pageToken,proto3" json:"page_token,omitempty"` + // Optional. + // + // The maximum number of jobs to be returned per page of results. + // + // If [job_view][google.cloud.talent.v4beta1.ListJobsRequest.job_view] is set to [JobView.JOB_VIEW_ID_ONLY][google.cloud.talent.v4beta1.JobView.JOB_VIEW_ID_ONLY], the maximum allowed + // page size is 1000. Otherwise, the maximum allowed page size is 100. + // + // Default is 100 if empty or a number < 1 is specified. + PageSize int32 `protobuf:"varint,4,opt,name=page_size,json=pageSize,proto3" json:"page_size,omitempty"` + // Optional. + // + // The desired job attributes returned for jobs in the + // search response. Defaults to [JobView.JOB_VIEW_FULL][google.cloud.talent.v4beta1.JobView.JOB_VIEW_FULL] if no value is + // specified. + JobView JobView `protobuf:"varint,5,opt,name=job_view,json=jobView,proto3,enum=google.cloud.talent.v4beta1.JobView" json:"job_view,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ListJobsRequest) Reset() { *m = ListJobsRequest{} } +func (m *ListJobsRequest) String() string { return proto.CompactTextString(m) } +func (*ListJobsRequest) ProtoMessage() {} +func (*ListJobsRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_job_service_461eee9b2ad19a52, []int{5} +} +func (m *ListJobsRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ListJobsRequest.Unmarshal(m, b) +} +func (m *ListJobsRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ListJobsRequest.Marshal(b, m, deterministic) +} +func (dst *ListJobsRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_ListJobsRequest.Merge(dst, src) +} +func (m *ListJobsRequest) XXX_Size() int { + return xxx_messageInfo_ListJobsRequest.Size(m) +} +func (m *ListJobsRequest) XXX_DiscardUnknown() { + xxx_messageInfo_ListJobsRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_ListJobsRequest proto.InternalMessageInfo + +func (m *ListJobsRequest) GetParent() string { + if m != nil { + return m.Parent + } + return "" +} + +func (m *ListJobsRequest) GetFilter() string { + if m != nil { + return m.Filter + } + return "" +} + +func (m *ListJobsRequest) GetPageToken() string { + if m != nil { + return m.PageToken + } + return "" +} + +func (m *ListJobsRequest) GetPageSize() int32 { + if m != nil { + return m.PageSize + } + return 0 +} + +func (m *ListJobsRequest) GetJobView() JobView { + if m != nil { + return m.JobView + } + return JobView_JOB_VIEW_UNSPECIFIED +} + +// Output only. +// +// List jobs response. +type ListJobsResponse struct { + // The Jobs for a given company. + // + // The maximum number of items returned is based on the limit field + // provided in the request. + Jobs []*Job `protobuf:"bytes,1,rep,name=jobs,proto3" json:"jobs,omitempty"` + // A token to retrieve the next page of results. + NextPageToken string `protobuf:"bytes,2,opt,name=next_page_token,json=nextPageToken,proto3" json:"next_page_token,omitempty"` + // Additional information for the API invocation, such as the request + // tracking id. + Metadata *ResponseMetadata `protobuf:"bytes,3,opt,name=metadata,proto3" json:"metadata,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ListJobsResponse) Reset() { *m = ListJobsResponse{} } +func (m *ListJobsResponse) String() string { return proto.CompactTextString(m) } +func (*ListJobsResponse) ProtoMessage() {} +func (*ListJobsResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_job_service_461eee9b2ad19a52, []int{6} +} +func (m *ListJobsResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ListJobsResponse.Unmarshal(m, b) +} +func (m *ListJobsResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ListJobsResponse.Marshal(b, m, deterministic) +} +func (dst *ListJobsResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_ListJobsResponse.Merge(dst, src) +} +func (m *ListJobsResponse) XXX_Size() int { + return xxx_messageInfo_ListJobsResponse.Size(m) +} +func (m *ListJobsResponse) XXX_DiscardUnknown() { + xxx_messageInfo_ListJobsResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_ListJobsResponse proto.InternalMessageInfo + +func (m *ListJobsResponse) GetJobs() []*Job { + if m != nil { + return m.Jobs + } + return nil +} + +func (m *ListJobsResponse) GetNextPageToken() string { + if m != nil { + return m.NextPageToken + } + return "" +} + +func (m *ListJobsResponse) GetMetadata() *ResponseMetadata { + if m != nil { + return m.Metadata + } + return nil +} + +// Input only. +// +// The Request body of the `SearchJobs` call. +type SearchJobsRequest struct { + // Required. + // + // The resource name of the tenant to search within. + // + // The format is "projects/{project_id}/tenants/{tenant_id}", for example, + // "projects/api-test-project/tenant/foo". + // + // Tenant id is optional and the default tenant is used if unspecified, for + // example, "projects/api-test-project". + Parent string `protobuf:"bytes,1,opt,name=parent,proto3" json:"parent,omitempty"` + // Optional. + // + // Mode of a search. + // + // Defaults to [SearchMode.JOB_SEARCH][google.cloud.talent.v4beta1.SearchJobsRequest.SearchMode.JOB_SEARCH]. + SearchMode SearchJobsRequest_SearchMode `protobuf:"varint,2,opt,name=search_mode,json=searchMode,proto3,enum=google.cloud.talent.v4beta1.SearchJobsRequest_SearchMode" json:"search_mode,omitempty"` + // Required. + // + // The meta information collected about the job searcher, used to improve the + // search quality of the service.. The identifiers, (such as `user_id`) are + // provided by users, and must be unique and consistent. + RequestMetadata *RequestMetadata `protobuf:"bytes,3,opt,name=request_metadata,json=requestMetadata,proto3" json:"request_metadata,omitempty"` + // Optional. + // + // Query used to search against jobs, such as keyword, location filters, etc. + JobQuery *JobQuery `protobuf:"bytes,4,opt,name=job_query,json=jobQuery,proto3" json:"job_query,omitempty"` + // Optional. + // + // Controls whether to broaden the search when it produces sparse results. + // Broadened queries append results to the end of the matching results + // list. + // + // Defaults to false. + EnableBroadening bool `protobuf:"varint,5,opt,name=enable_broadening,json=enableBroadening,proto3" json:"enable_broadening,omitempty"` + // Optional. + // + // Controls if the search job request requires the return of a precise + // count of the first 300 results. Setting this to `true` ensures + // consistency in the number of results per page. Best practice is to set this + // value to true if a client allows users to jump directly to a + // non-sequential search results page. + // + // Enabling this flag may adversely impact performance. + // + // Defaults to false. + RequirePreciseResultSize bool `protobuf:"varint,6,opt,name=require_precise_result_size,json=requirePreciseResultSize,proto3" json:"require_precise_result_size,omitempty"` + // Optional. + // + // An expression specifies a histogram request against matching jobs. + // + // Expression syntax is an aggregation function call with histogram facets and + // other options. + // + // Available aggregation function calls are: + // * `count(string_histogram_facet)`: Count the number of matching entities, + // for each distinct attribute value. + // * `count(numeric_histogram_facet, list of buckets)`: Count the number of + // matching entities within each bucket. + // + // Data types: + // + // * Histogram facet: facet names with format [a-zA-Z][a-zA-Z0-9_]+. + // * String: string like "any string with backslash escape for quote(\")." + // * Number: whole number and floating point number like 10, -1 and -0.01. + // * List: list of elements with comma(,) separator surrounded by square + // brackets, for example, [1, 2, 3] and ["one", "two", "three"]. + // + // Built-in constants: + // + // * MIN (minimum number similar to java Double.MIN_VALUE) + // * MAX (maximum number similar to java Double.MAX_VALUE) + // + // Built-in functions: + // + // * bucket(start, end[, label]): bucket built-in function creates a bucket + // with range of [start, end). Note that the end is exclusive, for example, + // bucket(1, MAX, "positive number") or bucket(1, 10). + // + // Job histogram facets: + // + // * company_id: histogram by [Job.distributor_company_id][]. + // * company_display_name: histogram by [Job.company_display_name][google.cloud.talent.v4beta1.Job.company_display_name]. + // * employment_type: histogram by [Job.employment_types][google.cloud.talent.v4beta1.Job.employment_types], for example, + // "FULL_TIME", "PART_TIME". + // * company_size: histogram by [CompanySize][google.cloud.talent.v4beta1.CompanySize], for example, "SMALL", + // "MEDIUM", "BIG". + // * publish_time_in_month: histogram by the [Job.publish_time][] in months. + // Must specify list of numeric buckets in spec. + // * publish_time_in_year: histogram by the [Job.publish_time][] in years. + // Must specify list of numeric buckets in spec. + // * degree_type: histogram by the [Job.degree_type][], for example, + // "Bachelors", "Masters". + // * job_level: histogram by the [Job.job_level][google.cloud.talent.v4beta1.Job.job_level], for example, "Entry + // Level". + // * country: histogram by the country code of jobs, for example, "US", "FR". + // * admin1: histogram by the admin1 code of jobs, which is a global + // placeholder referring to the state, province, or the particular term a + // country uses to define the geographic structure below the country level, + // for example, "CA", "IL". + // * city: histogram by a combination of the "city name, admin1 code". For + // example, "Mountain View, CA", "New York, NY". + // * admin1_country: histogram by a combination of the "admin1 code, country", + // for example, "CA, US", "IL, US". + // * city_coordinate: histogram by the city center's GPS coordinates (latitude + // and longitude), for example, 37.4038522,-122.0987765. Since the coordinates + // of a city center can change, customers may need to refresh them + // periodically. + // * locale: histogram by the [Job.language_code][google.cloud.talent.v4beta1.Job.language_code], for example, "en-US", + // "fr-FR". + // * language: histogram by the language subtag of the [Job.language_code][google.cloud.talent.v4beta1.Job.language_code], + // for example, "en", "fr". + // * category: histogram by the [JobCategory][google.cloud.talent.v4beta1.JobCategory], for example, + // "COMPUTER_AND_IT", "HEALTHCARE". + // * base_compensation_unit: histogram by the [CompensationUnit][] of base + // salary, for example, "WEEKLY", "MONTHLY". + // * base_compensation: histogram by the base salary. Must specify list of + // numeric buckets to group results by. + // * annualized_base_compensation: histogram by the base annualized salary. + // Must specify list of numeric buckets to group results by. + // * annualized_total_compensation: histogram by the total annualized salary. + // Must specify list of numeric buckets to group results by. + // * string_custom_attribute: histogram by string [Job.custom_attributes][google.cloud.talent.v4beta1.Job.custom_attributes]. + // Values can be accessed via square bracket notations like + // string_custom_attribute["key1"]. + // * numeric_custom_attribute: histogram by numeric [Job.custom_attributes][google.cloud.talent.v4beta1.Job.custom_attributes]. + // Values can be accessed via square bracket notations like + // numeric_custom_attribute["key1"]. Must specify list of numeric buckets to + // group results by. + // + // Example expressions: + // * count(admin1) + // * count(base_compensation, [bucket(1000, 10000), bucket(10000, 100000), + // bucket(100000, MAX)]) + // * count(string_custom_attribute["some-string-custom-attribute"]) + // * count(numeric_custom_attribute["some-numeric-custom-attribute"], + // [bucket(MIN, 0, "negative"), bucket(0, MAX, "non-negative"]) + HistogramQueries []*HistogramQuery `protobuf:"bytes,7,rep,name=histogram_queries,json=histogramQueries,proto3" json:"histogram_queries,omitempty"` + // Optional. + // + // The desired job attributes returned for jobs in the + // search response. Defaults to [JobView.SMALL][] if no value is specified. + JobView JobView `protobuf:"varint,8,opt,name=job_view,json=jobView,proto3,enum=google.cloud.talent.v4beta1.JobView" json:"job_view,omitempty"` + // Optional. + // + // An integer that specifies the current offset (that is, starting result + // location, amongst the jobs deemed by the API as relevant) in search + // results. This field is only considered if [page_token][google.cloud.talent.v4beta1.SearchJobsRequest.page_token] is unset. + // + // For example, 0 means to return results starting from the first matching + // job, and 10 means to return from the 11th job. This can be used for + // pagination, (for example, pageSize = 10 and offset = 10 means to return + // from the second page). + Offset int32 `protobuf:"varint,9,opt,name=offset,proto3" json:"offset,omitempty"` + // Optional. + // + // A limit on the number of jobs returned in the search results. + // Increasing this value above the default value of 10 can increase search + // response time. The value can be between 1 and 100. + PageSize int32 `protobuf:"varint,10,opt,name=page_size,json=pageSize,proto3" json:"page_size,omitempty"` + // Optional. + // + // The token specifying the current offset within + // search results. See [SearchJobsResponse.next_page_token][google.cloud.talent.v4beta1.SearchJobsResponse.next_page_token] for + // an explanation of how to obtain the next set of query results. + PageToken string `protobuf:"bytes,11,opt,name=page_token,json=pageToken,proto3" json:"page_token,omitempty"` + // Optional. + // + // The criteria determining how search results are sorted. Default is + // "relevance desc". + // + // Supported options are: + // + // * "relevance desc": By relevance descending, as determined by the API + // algorithms. Relevance thresholding of query results is only available + // with this ordering. + // * "posting`_`publish`_`time desc": By [Job.posting_publish_time][google.cloud.talent.v4beta1.Job.posting_publish_time] + // descending. + // * "posting`_`update`_`time desc": By [Job.posting_update_time][google.cloud.talent.v4beta1.Job.posting_update_time] + // descending. + // * "title": By [Job.title][google.cloud.talent.v4beta1.Job.title] ascending. + // * "title desc": By [Job.title][google.cloud.talent.v4beta1.Job.title] descending. + // * "annualized`_`base`_`compensation": By job's + // [CompensationInfo.annualized_base_compensation_range][google.cloud.talent.v4beta1.CompensationInfo.annualized_base_compensation_range] ascending. Jobs + // whose annualized base compensation is unspecified are put at the end of + // search results. + // * "annualized`_`base`_`compensation desc": By job's + // [CompensationInfo.annualized_base_compensation_range][google.cloud.talent.v4beta1.CompensationInfo.annualized_base_compensation_range] descending. Jobs + // whose annualized base compensation is unspecified are put at the end of + // search results. + // * "annualized`_`total`_`compensation": By job's + // [CompensationInfo.annualized_total_compensation_range][google.cloud.talent.v4beta1.CompensationInfo.annualized_total_compensation_range] ascending. Jobs + // whose annualized base compensation is unspecified are put at the end of + // search results. + // * "annualized`_`total`_`compensation desc": By job's + // [CompensationInfo.annualized_total_compensation_range][google.cloud.talent.v4beta1.CompensationInfo.annualized_total_compensation_range] descending. Jobs + // whose annualized base compensation is unspecified are put at the end of + // search results. + // * "custom`_`ranking desc": By the relevance score adjusted to the + // [SearchJobsRequest.custom_ranking_info.ranking_expression][] with weight + // factor assigned by + // [SearchJobsRequest.custom_ranking_info.importance_level][] in descending + // order. + // * "location`_`distance": By the distance between the location on jobs and + // locations specified in the + // [SearchJobsRequest.job_query.location_filters][]. + // When this order is selected, the + // [SearchJobsRequest.job_query.location_filters][] must not be empty. When + // a job has multiple locations, the location closest to one of the locations + // specified in the location filter will be used to calculate location + // distance. Distance is calculated by the distance between two lat/long + // coordinates, with a precision of 10e-4 degrees (11.3 meters). + // Jobs that don't have locations specified will be ranked below jobs having + // locations. + // Diversification strategy is still applied unless explicitly disabled in + // [SearchJobsRequest.diversification_level][google.cloud.talent.v4beta1.SearchJobsRequest.diversification_level]. + OrderBy string `protobuf:"bytes,12,opt,name=order_by,json=orderBy,proto3" json:"order_by,omitempty"` + // Optional. + // + // Controls whether highly similar jobs are returned next to each other in + // the search results. Jobs are identified as highly similar based on + // their titles, job categories, and locations. Highly similar results are + // clustered so that only one representative job of the cluster is + // displayed to the job seeker higher up in the results, with the other jobs + // being displayed lower down in the results. + // + // Defaults to [DiversificationLevel.SIMPLE][google.cloud.talent.v4beta1.SearchJobsRequest.DiversificationLevel.SIMPLE] if no value + // is specified. + DiversificationLevel SearchJobsRequest_DiversificationLevel `protobuf:"varint,13,opt,name=diversification_level,json=diversificationLevel,proto3,enum=google.cloud.talent.v4beta1.SearchJobsRequest_DiversificationLevel" json:"diversification_level,omitempty"` + // Optional. + // + // Controls over how job documents get ranked on top of existing relevance + // score (determined by API algorithm). + CustomRankingInfo *SearchJobsRequest_CustomRankingInfo `protobuf:"bytes,14,opt,name=custom_ranking_info,json=customRankingInfo,proto3" json:"custom_ranking_info,omitempty"` + // Optional. + // + // Controls whether to disable exact keyword match on [Job.job_title][], + // [Job.description][google.cloud.talent.v4beta1.Job.description], [Job.company_display_name][google.cloud.talent.v4beta1.Job.company_display_name], [Job.locations][0], + // [Job.qualifications][google.cloud.talent.v4beta1.Job.qualifications]. When disable keyword match is turned off, a + // keyword match returns jobs that do not match given category filters when + // there are matching keywords. For example, for the query "program manager," + // a result is returned even if the job posting has the title "software + // developer," which doesn't fall into "program manager" ontology, but does + // have "program manager" appearing in its description. + // + // For queries like "cloud" that don't contain title or + // location specific ontology, jobs with "cloud" keyword matches are returned + // regardless of this flag's value. + // + // Please use [Company.keyword_searchable_custom_fields][] or + // [Company.keyword_searchable_custom_attributes][] if company specific + // globally matched custom field/attribute string values is needed. Enabling + // keyword match improves recall of subsequent search requests. + // + // Defaults to false. + DisableKeywordMatch bool `protobuf:"varint,16,opt,name=disable_keyword_match,json=disableKeywordMatch,proto3" json:"disable_keyword_match,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *SearchJobsRequest) Reset() { *m = SearchJobsRequest{} } +func (m *SearchJobsRequest) String() string { return proto.CompactTextString(m) } +func (*SearchJobsRequest) ProtoMessage() {} +func (*SearchJobsRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_job_service_461eee9b2ad19a52, []int{7} +} +func (m *SearchJobsRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_SearchJobsRequest.Unmarshal(m, b) +} +func (m *SearchJobsRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_SearchJobsRequest.Marshal(b, m, deterministic) +} +func (dst *SearchJobsRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_SearchJobsRequest.Merge(dst, src) +} +func (m *SearchJobsRequest) XXX_Size() int { + return xxx_messageInfo_SearchJobsRequest.Size(m) +} +func (m *SearchJobsRequest) XXX_DiscardUnknown() { + xxx_messageInfo_SearchJobsRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_SearchJobsRequest proto.InternalMessageInfo + +func (m *SearchJobsRequest) GetParent() string { + if m != nil { + return m.Parent + } + return "" +} + +func (m *SearchJobsRequest) GetSearchMode() SearchJobsRequest_SearchMode { + if m != nil { + return m.SearchMode + } + return SearchJobsRequest_SEARCH_MODE_UNSPECIFIED +} + +func (m *SearchJobsRequest) GetRequestMetadata() *RequestMetadata { + if m != nil { + return m.RequestMetadata + } + return nil +} + +func (m *SearchJobsRequest) GetJobQuery() *JobQuery { + if m != nil { + return m.JobQuery + } + return nil +} + +func (m *SearchJobsRequest) GetEnableBroadening() bool { + if m != nil { + return m.EnableBroadening + } + return false +} + +func (m *SearchJobsRequest) GetRequirePreciseResultSize() bool { + if m != nil { + return m.RequirePreciseResultSize + } + return false +} + +func (m *SearchJobsRequest) GetHistogramQueries() []*HistogramQuery { + if m != nil { + return m.HistogramQueries + } + return nil +} + +func (m *SearchJobsRequest) GetJobView() JobView { + if m != nil { + return m.JobView + } + return JobView_JOB_VIEW_UNSPECIFIED +} + +func (m *SearchJobsRequest) GetOffset() int32 { + if m != nil { + return m.Offset + } + return 0 +} + +func (m *SearchJobsRequest) GetPageSize() int32 { + if m != nil { + return m.PageSize + } + return 0 +} + +func (m *SearchJobsRequest) GetPageToken() string { + if m != nil { + return m.PageToken + } + return "" +} + +func (m *SearchJobsRequest) GetOrderBy() string { + if m != nil { + return m.OrderBy + } + return "" +} + +func (m *SearchJobsRequest) GetDiversificationLevel() SearchJobsRequest_DiversificationLevel { + if m != nil { + return m.DiversificationLevel + } + return SearchJobsRequest_DIVERSIFICATION_LEVEL_UNSPECIFIED +} + +func (m *SearchJobsRequest) GetCustomRankingInfo() *SearchJobsRequest_CustomRankingInfo { + if m != nil { + return m.CustomRankingInfo + } + return nil +} + +func (m *SearchJobsRequest) GetDisableKeywordMatch() bool { + if m != nil { + return m.DisableKeywordMatch + } + return false +} + +// Input only. +// +// Custom ranking information for [SearchJobsRequest][google.cloud.talent.v4beta1.SearchJobsRequest]. +type SearchJobsRequest_CustomRankingInfo struct { + // Required. + // + // Controls over how important the score of + // [CustomRankingInfo.ranking_expression][google.cloud.talent.v4beta1.SearchJobsRequest.CustomRankingInfo.ranking_expression] gets applied to job's final + // ranking position. + // + // An error is thrown if not specified. + ImportanceLevel SearchJobsRequest_CustomRankingInfo_ImportanceLevel `protobuf:"varint,1,opt,name=importance_level,json=importanceLevel,proto3,enum=google.cloud.talent.v4beta1.SearchJobsRequest_CustomRankingInfo_ImportanceLevel" json:"importance_level,omitempty"` + // Required. + // + // Controls over how job documents get ranked on top of existing relevance + // score (determined by API algorithm). The product of ranking expression + // and relevance score is used to determine job's final ranking position. + // + // The syntax for this expression is a subset of Google SQL syntax. + // + // Supported operators are: +, -, *, /, where the left and right side of + // the operator is either a numeric [Job.custom_attributes][google.cloud.talent.v4beta1.Job.custom_attributes] key, + // integer/double value or an expression that can be evaluated to a number. + // + // Parenthesis are supported to adjust calculation precedence. The + // expression must be < 100 characters in length. + // + // Sample ranking expression + // (year + 25) * 0.25 - (freshness / 0.5) + RankingExpression string `protobuf:"bytes,2,opt,name=ranking_expression,json=rankingExpression,proto3" json:"ranking_expression,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *SearchJobsRequest_CustomRankingInfo) Reset() { *m = SearchJobsRequest_CustomRankingInfo{} } +func (m *SearchJobsRequest_CustomRankingInfo) String() string { return proto.CompactTextString(m) } +func (*SearchJobsRequest_CustomRankingInfo) ProtoMessage() {} +func (*SearchJobsRequest_CustomRankingInfo) Descriptor() ([]byte, []int) { + return fileDescriptor_job_service_461eee9b2ad19a52, []int{7, 0} +} +func (m *SearchJobsRequest_CustomRankingInfo) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_SearchJobsRequest_CustomRankingInfo.Unmarshal(m, b) +} +func (m *SearchJobsRequest_CustomRankingInfo) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_SearchJobsRequest_CustomRankingInfo.Marshal(b, m, deterministic) +} +func (dst *SearchJobsRequest_CustomRankingInfo) XXX_Merge(src proto.Message) { + xxx_messageInfo_SearchJobsRequest_CustomRankingInfo.Merge(dst, src) +} +func (m *SearchJobsRequest_CustomRankingInfo) XXX_Size() int { + return xxx_messageInfo_SearchJobsRequest_CustomRankingInfo.Size(m) +} +func (m *SearchJobsRequest_CustomRankingInfo) XXX_DiscardUnknown() { + xxx_messageInfo_SearchJobsRequest_CustomRankingInfo.DiscardUnknown(m) +} + +var xxx_messageInfo_SearchJobsRequest_CustomRankingInfo proto.InternalMessageInfo + +func (m *SearchJobsRequest_CustomRankingInfo) GetImportanceLevel() SearchJobsRequest_CustomRankingInfo_ImportanceLevel { + if m != nil { + return m.ImportanceLevel + } + return SearchJobsRequest_CustomRankingInfo_IMPORTANCE_LEVEL_UNSPECIFIED +} + +func (m *SearchJobsRequest_CustomRankingInfo) GetRankingExpression() string { + if m != nil { + return m.RankingExpression + } + return "" +} + +// Output only. +// +// Response for SearchJob method. +type SearchJobsResponse struct { + // The Job entities that match the specified [SearchJobsRequest][google.cloud.talent.v4beta1.SearchJobsRequest]. + MatchingJobs []*SearchJobsResponse_MatchingJob `protobuf:"bytes,1,rep,name=matching_jobs,json=matchingJobs,proto3" json:"matching_jobs,omitempty"` + // The histogram results that match with specified + // [SearchJobsRequest.histogram_queries][google.cloud.talent.v4beta1.SearchJobsRequest.histogram_queries]. + HistogramQueryResults []*HistogramQueryResult `protobuf:"bytes,2,rep,name=histogram_query_results,json=histogramQueryResults,proto3" json:"histogram_query_results,omitempty"` + // The token that specifies the starting position of the next page of results. + // This field is empty if there are no more results. + NextPageToken string `protobuf:"bytes,3,opt,name=next_page_token,json=nextPageToken,proto3" json:"next_page_token,omitempty"` + // The location filters that the service applied to the specified query. If + // any filters are lat-lng based, the [JobLocation.location_type][] is + // [JobLocation.LocationType#LOCATION_TYPE_UNSPECIFIED][]. + LocationFilters []*Location `protobuf:"bytes,4,rep,name=location_filters,json=locationFilters,proto3" json:"location_filters,omitempty"` + // An estimation of the number of jobs that match the specified query. + // + // This number isn't guaranteed to be accurate. For accurate results, + // see [enable_precise_result_size][]. + EstimatedTotalSize int32 `protobuf:"varint,5,opt,name=estimated_total_size,json=estimatedTotalSize,proto3" json:"estimated_total_size,omitempty"` + // The precise result count, which is available only if the client set + // [enable_precise_result_size][] to `true`, or if the response + // is the last page of results. Otherwise, the value is `-1`. + TotalSize int32 `protobuf:"varint,6,opt,name=total_size,json=totalSize,proto3" json:"total_size,omitempty"` + // Additional information for the API invocation, such as the request + // tracking id. + Metadata *ResponseMetadata `protobuf:"bytes,7,opt,name=metadata,proto3" json:"metadata,omitempty"` + // If query broadening is enabled, we may append additional results from the + // broadened query. This number indicates how many of the jobs returned in the + // jobs field are from the broadened query. These results are always at the + // end of the jobs list. In particular, a value of 0, or if the field isn't + // set, all the jobs in the jobs list are from the original + // (without broadening) query. If this field is non-zero, subsequent requests + // with offset after this result set should contain all broadened results. + BroadenedQueryJobsCount int32 `protobuf:"varint,8,opt,name=broadened_query_jobs_count,json=broadenedQueryJobsCount,proto3" json:"broadened_query_jobs_count,omitempty"` + // The spell checking result, and correction. + SpellCorrection *SpellingCorrection `protobuf:"bytes,9,opt,name=spell_correction,json=spellCorrection,proto3" json:"spell_correction,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *SearchJobsResponse) Reset() { *m = SearchJobsResponse{} } +func (m *SearchJobsResponse) String() string { return proto.CompactTextString(m) } +func (*SearchJobsResponse) ProtoMessage() {} +func (*SearchJobsResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_job_service_461eee9b2ad19a52, []int{8} +} +func (m *SearchJobsResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_SearchJobsResponse.Unmarshal(m, b) +} +func (m *SearchJobsResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_SearchJobsResponse.Marshal(b, m, deterministic) +} +func (dst *SearchJobsResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_SearchJobsResponse.Merge(dst, src) +} +func (m *SearchJobsResponse) XXX_Size() int { + return xxx_messageInfo_SearchJobsResponse.Size(m) +} +func (m *SearchJobsResponse) XXX_DiscardUnknown() { + xxx_messageInfo_SearchJobsResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_SearchJobsResponse proto.InternalMessageInfo + +func (m *SearchJobsResponse) GetMatchingJobs() []*SearchJobsResponse_MatchingJob { + if m != nil { + return m.MatchingJobs + } + return nil +} + +func (m *SearchJobsResponse) GetHistogramQueryResults() []*HistogramQueryResult { + if m != nil { + return m.HistogramQueryResults + } + return nil +} + +func (m *SearchJobsResponse) GetNextPageToken() string { + if m != nil { + return m.NextPageToken + } + return "" +} + +func (m *SearchJobsResponse) GetLocationFilters() []*Location { + if m != nil { + return m.LocationFilters + } + return nil +} + +func (m *SearchJobsResponse) GetEstimatedTotalSize() int32 { + if m != nil { + return m.EstimatedTotalSize + } + return 0 +} + +func (m *SearchJobsResponse) GetTotalSize() int32 { + if m != nil { + return m.TotalSize + } + return 0 +} + +func (m *SearchJobsResponse) GetMetadata() *ResponseMetadata { + if m != nil { + return m.Metadata + } + return nil +} + +func (m *SearchJobsResponse) GetBroadenedQueryJobsCount() int32 { + if m != nil { + return m.BroadenedQueryJobsCount + } + return 0 +} + +func (m *SearchJobsResponse) GetSpellCorrection() *SpellingCorrection { + if m != nil { + return m.SpellCorrection + } + return nil +} + +// Output only. +// +// Job entry with metadata inside [SearchJobsResponse][google.cloud.talent.v4beta1.SearchJobsResponse]. +type SearchJobsResponse_MatchingJob struct { + // Job resource that matches the specified [SearchJobsRequest][google.cloud.talent.v4beta1.SearchJobsRequest]. + Job *Job `protobuf:"bytes,1,opt,name=job,proto3" json:"job,omitempty"` + // A summary of the job with core information that's displayed on the search + // results listing page. + JobSummary string `protobuf:"bytes,2,opt,name=job_summary,json=jobSummary,proto3" json:"job_summary,omitempty"` + // Contains snippets of text from the [Job.job_title][] field most + // closely matching a search query's keywords, if available. The matching + // query keywords are enclosed in HTML bold tags. + JobTitleSnippet string `protobuf:"bytes,3,opt,name=job_title_snippet,json=jobTitleSnippet,proto3" json:"job_title_snippet,omitempty"` + // Contains snippets of text from the [Job.description][google.cloud.talent.v4beta1.Job.description] and similar + // fields that most closely match a search query's keywords, if available. + // All HTML tags in the original fields are stripped when returned in this + // field, and matching query keywords are enclosed in HTML bold tags. + SearchTextSnippet string `protobuf:"bytes,4,opt,name=search_text_snippet,json=searchTextSnippet,proto3" json:"search_text_snippet,omitempty"` + // Commute information which is generated based on specified + // [CommuteFilter][google.cloud.talent.v4beta1.CommuteFilter]. + CommuteInfo *SearchJobsResponse_CommuteInfo `protobuf:"bytes,5,opt,name=commute_info,json=commuteInfo,proto3" json:"commute_info,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *SearchJobsResponse_MatchingJob) Reset() { *m = SearchJobsResponse_MatchingJob{} } +func (m *SearchJobsResponse_MatchingJob) String() string { return proto.CompactTextString(m) } +func (*SearchJobsResponse_MatchingJob) ProtoMessage() {} +func (*SearchJobsResponse_MatchingJob) Descriptor() ([]byte, []int) { + return fileDescriptor_job_service_461eee9b2ad19a52, []int{8, 0} +} +func (m *SearchJobsResponse_MatchingJob) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_SearchJobsResponse_MatchingJob.Unmarshal(m, b) +} +func (m *SearchJobsResponse_MatchingJob) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_SearchJobsResponse_MatchingJob.Marshal(b, m, deterministic) +} +func (dst *SearchJobsResponse_MatchingJob) XXX_Merge(src proto.Message) { + xxx_messageInfo_SearchJobsResponse_MatchingJob.Merge(dst, src) +} +func (m *SearchJobsResponse_MatchingJob) XXX_Size() int { + return xxx_messageInfo_SearchJobsResponse_MatchingJob.Size(m) +} +func (m *SearchJobsResponse_MatchingJob) XXX_DiscardUnknown() { + xxx_messageInfo_SearchJobsResponse_MatchingJob.DiscardUnknown(m) +} + +var xxx_messageInfo_SearchJobsResponse_MatchingJob proto.InternalMessageInfo + +func (m *SearchJobsResponse_MatchingJob) GetJob() *Job { + if m != nil { + return m.Job + } + return nil +} + +func (m *SearchJobsResponse_MatchingJob) GetJobSummary() string { + if m != nil { + return m.JobSummary + } + return "" +} + +func (m *SearchJobsResponse_MatchingJob) GetJobTitleSnippet() string { + if m != nil { + return m.JobTitleSnippet + } + return "" +} + +func (m *SearchJobsResponse_MatchingJob) GetSearchTextSnippet() string { + if m != nil { + return m.SearchTextSnippet + } + return "" +} + +func (m *SearchJobsResponse_MatchingJob) GetCommuteInfo() *SearchJobsResponse_CommuteInfo { + if m != nil { + return m.CommuteInfo + } + return nil +} + +// Output only. +// +// Commute details related to this job. +type SearchJobsResponse_CommuteInfo struct { + // Location used as the destination in the commute calculation. + JobLocation *Location `protobuf:"bytes,1,opt,name=job_location,json=jobLocation,proto3" json:"job_location,omitempty"` + // The number of seconds required to travel to the job location from the + // query location. A duration of 0 seconds indicates that the job isn't + // reachable within the requested duration, but was returned as part of an + // expanded query. + TravelDuration *duration.Duration `protobuf:"bytes,2,opt,name=travel_duration,json=travelDuration,proto3" json:"travel_duration,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *SearchJobsResponse_CommuteInfo) Reset() { *m = SearchJobsResponse_CommuteInfo{} } +func (m *SearchJobsResponse_CommuteInfo) String() string { return proto.CompactTextString(m) } +func (*SearchJobsResponse_CommuteInfo) ProtoMessage() {} +func (*SearchJobsResponse_CommuteInfo) Descriptor() ([]byte, []int) { + return fileDescriptor_job_service_461eee9b2ad19a52, []int{8, 1} +} +func (m *SearchJobsResponse_CommuteInfo) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_SearchJobsResponse_CommuteInfo.Unmarshal(m, b) +} +func (m *SearchJobsResponse_CommuteInfo) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_SearchJobsResponse_CommuteInfo.Marshal(b, m, deterministic) +} +func (dst *SearchJobsResponse_CommuteInfo) XXX_Merge(src proto.Message) { + xxx_messageInfo_SearchJobsResponse_CommuteInfo.Merge(dst, src) +} +func (m *SearchJobsResponse_CommuteInfo) XXX_Size() int { + return xxx_messageInfo_SearchJobsResponse_CommuteInfo.Size(m) +} +func (m *SearchJobsResponse_CommuteInfo) XXX_DiscardUnknown() { + xxx_messageInfo_SearchJobsResponse_CommuteInfo.DiscardUnknown(m) +} + +var xxx_messageInfo_SearchJobsResponse_CommuteInfo proto.InternalMessageInfo + +func (m *SearchJobsResponse_CommuteInfo) GetJobLocation() *Location { + if m != nil { + return m.JobLocation + } + return nil +} + +func (m *SearchJobsResponse_CommuteInfo) GetTravelDuration() *duration.Duration { + if m != nil { + return m.TravelDuration + } + return nil +} + +func init() { + proto.RegisterType((*CreateJobRequest)(nil), "google.cloud.talent.v4beta1.CreateJobRequest") + proto.RegisterType((*GetJobRequest)(nil), "google.cloud.talent.v4beta1.GetJobRequest") + proto.RegisterType((*UpdateJobRequest)(nil), "google.cloud.talent.v4beta1.UpdateJobRequest") + proto.RegisterType((*DeleteJobRequest)(nil), "google.cloud.talent.v4beta1.DeleteJobRequest") + proto.RegisterType((*BatchDeleteJobsRequest)(nil), "google.cloud.talent.v4beta1.BatchDeleteJobsRequest") + proto.RegisterType((*ListJobsRequest)(nil), "google.cloud.talent.v4beta1.ListJobsRequest") + proto.RegisterType((*ListJobsResponse)(nil), "google.cloud.talent.v4beta1.ListJobsResponse") + proto.RegisterType((*SearchJobsRequest)(nil), "google.cloud.talent.v4beta1.SearchJobsRequest") + proto.RegisterType((*SearchJobsRequest_CustomRankingInfo)(nil), "google.cloud.talent.v4beta1.SearchJobsRequest.CustomRankingInfo") + proto.RegisterType((*SearchJobsResponse)(nil), "google.cloud.talent.v4beta1.SearchJobsResponse") + proto.RegisterType((*SearchJobsResponse_MatchingJob)(nil), "google.cloud.talent.v4beta1.SearchJobsResponse.MatchingJob") + proto.RegisterType((*SearchJobsResponse_CommuteInfo)(nil), "google.cloud.talent.v4beta1.SearchJobsResponse.CommuteInfo") + proto.RegisterEnum("google.cloud.talent.v4beta1.JobView", JobView_name, JobView_value) + proto.RegisterEnum("google.cloud.talent.v4beta1.SearchJobsRequest_SearchMode", SearchJobsRequest_SearchMode_name, SearchJobsRequest_SearchMode_value) + proto.RegisterEnum("google.cloud.talent.v4beta1.SearchJobsRequest_DiversificationLevel", SearchJobsRequest_DiversificationLevel_name, SearchJobsRequest_DiversificationLevel_value) + proto.RegisterEnum("google.cloud.talent.v4beta1.SearchJobsRequest_CustomRankingInfo_ImportanceLevel", SearchJobsRequest_CustomRankingInfo_ImportanceLevel_name, SearchJobsRequest_CustomRankingInfo_ImportanceLevel_value) +} + +// Reference imports to suppress errors if they are not otherwise used. +var _ context.Context +var _ grpc.ClientConn + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the grpc package it is being compiled against. +const _ = grpc.SupportPackageIsVersion4 + +// JobServiceClient is the client API for JobService service. +// +// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://godoc.org/google.golang.org/grpc#ClientConn.NewStream. +type JobServiceClient interface { + // Creates a new job. + // + // Typically, the job becomes searchable within 10 seconds, but it may take + // up to 5 minutes. + CreateJob(ctx context.Context, in *CreateJobRequest, opts ...grpc.CallOption) (*Job, error) + // Retrieves the specified job, whose status is OPEN or recently EXPIRED + // within the last 90 days. + GetJob(ctx context.Context, in *GetJobRequest, opts ...grpc.CallOption) (*Job, error) + // Updates specified job. + // + // Typically, updated contents become visible in search results within 10 + // seconds, but it may take up to 5 minutes. + UpdateJob(ctx context.Context, in *UpdateJobRequest, opts ...grpc.CallOption) (*Job, error) + // Deletes the specified job. + // + // Typically, the job becomes unsearchable within 10 seconds, but it may take + // up to 5 minutes. + DeleteJob(ctx context.Context, in *DeleteJobRequest, opts ...grpc.CallOption) (*empty.Empty, error) + // Lists jobs by filter. + ListJobs(ctx context.Context, in *ListJobsRequest, opts ...grpc.CallOption) (*ListJobsResponse, error) + // Deletes a list of [Job][google.cloud.talent.v4beta1.Job]s by filter. + BatchDeleteJobs(ctx context.Context, in *BatchDeleteJobsRequest, opts ...grpc.CallOption) (*empty.Empty, error) + // Searches for jobs using the provided [SearchJobsRequest][google.cloud.talent.v4beta1.SearchJobsRequest]. + // + // This call constrains the [visibility][google.cloud.talent.v4beta1.Job.visibility] of jobs + // present in the database, and only returns jobs that the caller has + // permission to search against. + SearchJobs(ctx context.Context, in *SearchJobsRequest, opts ...grpc.CallOption) (*SearchJobsResponse, error) + // Searches for jobs using the provided [SearchJobsRequest][google.cloud.talent.v4beta1.SearchJobsRequest]. + // + // This API call is intended for the use case of targeting passive job + // seekers (for example, job seekers who have signed up to receive email + // alerts about potential job opportunities), and has different algorithmic + // adjustments that are targeted to passive job seekers. + // + // This call constrains the [visibility][google.cloud.talent.v4beta1.Job.visibility] of jobs + // present in the database, and only returns jobs the caller has + // permission to search against. + SearchJobsForAlert(ctx context.Context, in *SearchJobsRequest, opts ...grpc.CallOption) (*SearchJobsResponse, error) +} + +type jobServiceClient struct { + cc *grpc.ClientConn +} + +func NewJobServiceClient(cc *grpc.ClientConn) JobServiceClient { + return &jobServiceClient{cc} +} + +func (c *jobServiceClient) CreateJob(ctx context.Context, in *CreateJobRequest, opts ...grpc.CallOption) (*Job, error) { + out := new(Job) + err := c.cc.Invoke(ctx, "/google.cloud.talent.v4beta1.JobService/CreateJob", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *jobServiceClient) GetJob(ctx context.Context, in *GetJobRequest, opts ...grpc.CallOption) (*Job, error) { + out := new(Job) + err := c.cc.Invoke(ctx, "/google.cloud.talent.v4beta1.JobService/GetJob", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *jobServiceClient) UpdateJob(ctx context.Context, in *UpdateJobRequest, opts ...grpc.CallOption) (*Job, error) { + out := new(Job) + err := c.cc.Invoke(ctx, "/google.cloud.talent.v4beta1.JobService/UpdateJob", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *jobServiceClient) DeleteJob(ctx context.Context, in *DeleteJobRequest, opts ...grpc.CallOption) (*empty.Empty, error) { + out := new(empty.Empty) + err := c.cc.Invoke(ctx, "/google.cloud.talent.v4beta1.JobService/DeleteJob", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *jobServiceClient) ListJobs(ctx context.Context, in *ListJobsRequest, opts ...grpc.CallOption) (*ListJobsResponse, error) { + out := new(ListJobsResponse) + err := c.cc.Invoke(ctx, "/google.cloud.talent.v4beta1.JobService/ListJobs", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *jobServiceClient) BatchDeleteJobs(ctx context.Context, in *BatchDeleteJobsRequest, opts ...grpc.CallOption) (*empty.Empty, error) { + out := new(empty.Empty) + err := c.cc.Invoke(ctx, "/google.cloud.talent.v4beta1.JobService/BatchDeleteJobs", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *jobServiceClient) SearchJobs(ctx context.Context, in *SearchJobsRequest, opts ...grpc.CallOption) (*SearchJobsResponse, error) { + out := new(SearchJobsResponse) + err := c.cc.Invoke(ctx, "/google.cloud.talent.v4beta1.JobService/SearchJobs", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *jobServiceClient) SearchJobsForAlert(ctx context.Context, in *SearchJobsRequest, opts ...grpc.CallOption) (*SearchJobsResponse, error) { + out := new(SearchJobsResponse) + err := c.cc.Invoke(ctx, "/google.cloud.talent.v4beta1.JobService/SearchJobsForAlert", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +// JobServiceServer is the server API for JobService service. +type JobServiceServer interface { + // Creates a new job. + // + // Typically, the job becomes searchable within 10 seconds, but it may take + // up to 5 minutes. + CreateJob(context.Context, *CreateJobRequest) (*Job, error) + // Retrieves the specified job, whose status is OPEN or recently EXPIRED + // within the last 90 days. + GetJob(context.Context, *GetJobRequest) (*Job, error) + // Updates specified job. + // + // Typically, updated contents become visible in search results within 10 + // seconds, but it may take up to 5 minutes. + UpdateJob(context.Context, *UpdateJobRequest) (*Job, error) + // Deletes the specified job. + // + // Typically, the job becomes unsearchable within 10 seconds, but it may take + // up to 5 minutes. + DeleteJob(context.Context, *DeleteJobRequest) (*empty.Empty, error) + // Lists jobs by filter. + ListJobs(context.Context, *ListJobsRequest) (*ListJobsResponse, error) + // Deletes a list of [Job][google.cloud.talent.v4beta1.Job]s by filter. + BatchDeleteJobs(context.Context, *BatchDeleteJobsRequest) (*empty.Empty, error) + // Searches for jobs using the provided [SearchJobsRequest][google.cloud.talent.v4beta1.SearchJobsRequest]. + // + // This call constrains the [visibility][google.cloud.talent.v4beta1.Job.visibility] of jobs + // present in the database, and only returns jobs that the caller has + // permission to search against. + SearchJobs(context.Context, *SearchJobsRequest) (*SearchJobsResponse, error) + // Searches for jobs using the provided [SearchJobsRequest][google.cloud.talent.v4beta1.SearchJobsRequest]. + // + // This API call is intended for the use case of targeting passive job + // seekers (for example, job seekers who have signed up to receive email + // alerts about potential job opportunities), and has different algorithmic + // adjustments that are targeted to passive job seekers. + // + // This call constrains the [visibility][google.cloud.talent.v4beta1.Job.visibility] of jobs + // present in the database, and only returns jobs the caller has + // permission to search against. + SearchJobsForAlert(context.Context, *SearchJobsRequest) (*SearchJobsResponse, error) +} + +func RegisterJobServiceServer(s *grpc.Server, srv JobServiceServer) { + s.RegisterService(&_JobService_serviceDesc, srv) +} + +func _JobService_CreateJob_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(CreateJobRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(JobServiceServer).CreateJob(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.talent.v4beta1.JobService/CreateJob", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(JobServiceServer).CreateJob(ctx, req.(*CreateJobRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _JobService_GetJob_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(GetJobRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(JobServiceServer).GetJob(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.talent.v4beta1.JobService/GetJob", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(JobServiceServer).GetJob(ctx, req.(*GetJobRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _JobService_UpdateJob_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(UpdateJobRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(JobServiceServer).UpdateJob(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.talent.v4beta1.JobService/UpdateJob", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(JobServiceServer).UpdateJob(ctx, req.(*UpdateJobRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _JobService_DeleteJob_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(DeleteJobRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(JobServiceServer).DeleteJob(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.talent.v4beta1.JobService/DeleteJob", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(JobServiceServer).DeleteJob(ctx, req.(*DeleteJobRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _JobService_ListJobs_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(ListJobsRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(JobServiceServer).ListJobs(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.talent.v4beta1.JobService/ListJobs", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(JobServiceServer).ListJobs(ctx, req.(*ListJobsRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _JobService_BatchDeleteJobs_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(BatchDeleteJobsRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(JobServiceServer).BatchDeleteJobs(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.talent.v4beta1.JobService/BatchDeleteJobs", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(JobServiceServer).BatchDeleteJobs(ctx, req.(*BatchDeleteJobsRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _JobService_SearchJobs_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(SearchJobsRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(JobServiceServer).SearchJobs(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.talent.v4beta1.JobService/SearchJobs", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(JobServiceServer).SearchJobs(ctx, req.(*SearchJobsRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _JobService_SearchJobsForAlert_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(SearchJobsRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(JobServiceServer).SearchJobsForAlert(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.talent.v4beta1.JobService/SearchJobsForAlert", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(JobServiceServer).SearchJobsForAlert(ctx, req.(*SearchJobsRequest)) + } + return interceptor(ctx, in, info, handler) +} + +var _JobService_serviceDesc = grpc.ServiceDesc{ + ServiceName: "google.cloud.talent.v4beta1.JobService", + HandlerType: (*JobServiceServer)(nil), + Methods: []grpc.MethodDesc{ + { + MethodName: "CreateJob", + Handler: _JobService_CreateJob_Handler, + }, + { + MethodName: "GetJob", + Handler: _JobService_GetJob_Handler, + }, + { + MethodName: "UpdateJob", + Handler: _JobService_UpdateJob_Handler, + }, + { + MethodName: "DeleteJob", + Handler: _JobService_DeleteJob_Handler, + }, + { + MethodName: "ListJobs", + Handler: _JobService_ListJobs_Handler, + }, + { + MethodName: "BatchDeleteJobs", + Handler: _JobService_BatchDeleteJobs_Handler, + }, + { + MethodName: "SearchJobs", + Handler: _JobService_SearchJobs_Handler, + }, + { + MethodName: "SearchJobsForAlert", + Handler: _JobService_SearchJobsForAlert_Handler, + }, + }, + Streams: []grpc.StreamDesc{}, + Metadata: "google/cloud/talent/v4beta1/job_service.proto", +} + +func init() { + proto.RegisterFile("google/cloud/talent/v4beta1/job_service.proto", fileDescriptor_job_service_461eee9b2ad19a52) +} + +var fileDescriptor_job_service_461eee9b2ad19a52 = []byte{ + // 1861 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xb4, 0x58, 0x4b, 0x73, 0x1b, 0xc7, + 0x11, 0xce, 0x82, 0x2f, 0xa0, 0x41, 0x12, 0xcb, 0x11, 0x2d, 0xc1, 0xa0, 0x13, 0x53, 0xab, 0x48, + 0xc5, 0x40, 0x21, 0x60, 0xc3, 0x4e, 0xa5, 0x62, 0x95, 0x2b, 0xc1, 0x63, 0x69, 0xae, 0x02, 0x90, + 0xc8, 0x02, 0x94, 0x14, 0x1e, 0xb4, 0x5e, 0x00, 0x03, 0x70, 0xc9, 0xc5, 0xce, 0x7a, 0x77, 0x40, + 0x89, 0x76, 0x74, 0x49, 0x7e, 0x42, 0x8e, 0xa9, 0xf2, 0x21, 0xe7, 0x1c, 0x72, 0xc8, 0x2d, 0xa9, + 0x54, 0xa5, 0x2a, 0xf7, 0x1c, 0x7c, 0xcc, 0x2d, 0x95, 0x7f, 0x90, 0x3f, 0x90, 0x9a, 0xd9, 0xd9, + 0xc5, 0x43, 0x10, 0x1e, 0x56, 0xf9, 0xb6, 0xd3, 0xfd, 0x75, 0x4f, 0x77, 0x4f, 0x4f, 0xf7, 0xf4, + 0xc2, 0x61, 0x8f, 0x90, 0x9e, 0x8d, 0xf3, 0x6d, 0x9b, 0x0c, 0x3a, 0x79, 0x6a, 0xda, 0xd8, 0xa1, + 0xf9, 0xeb, 0x8f, 0x5b, 0x98, 0x9a, 0x1f, 0xe6, 0x2f, 0x49, 0xcb, 0xf0, 0xb1, 0x77, 0x6d, 0xb5, + 0x71, 0xce, 0xf5, 0x08, 0x25, 0x68, 0x2f, 0x80, 0xe7, 0x38, 0x3c, 0x17, 0xc0, 0x73, 0x02, 0x9e, + 0x79, 0x4f, 0xe8, 0x32, 0x5d, 0x2b, 0x6f, 0x3a, 0x0e, 0xa1, 0x26, 0xb5, 0x88, 0xe3, 0x07, 0xa2, + 0x99, 0x83, 0x59, 0x3b, 0xb5, 0x49, 0xbf, 0x4f, 0x1c, 0x81, 0xfc, 0xd1, 0x2c, 0x64, 0xd7, 0xb2, + 0x29, 0xf6, 0x42, 0xa5, 0x0f, 0x67, 0x41, 0x2f, 0x2c, 0x9f, 0x92, 0x9e, 0x67, 0xf6, 0x05, 0xf8, + 0xfe, 0x1c, 0x5f, 0x05, 0xec, 0x9e, 0x80, 0xd9, 0xc4, 0xe9, 0x79, 0x03, 0xc7, 0xb1, 0x9c, 0x5e, + 0x9e, 0xb8, 0xd8, 0x1b, 0xf3, 0xe6, 0x07, 0x02, 0xc4, 0x57, 0xad, 0x41, 0x37, 0xdf, 0x19, 0x04, + 0x00, 0xc1, 0xdf, 0x9b, 0xe4, 0xe3, 0xbe, 0x4b, 0x6f, 0x04, 0x73, 0x7f, 0x92, 0xd9, 0xb5, 0xb0, + 0xdd, 0x31, 0xfa, 0xa6, 0x7f, 0x15, 0x20, 0x94, 0xe7, 0x20, 0x97, 0x3d, 0x6c, 0x52, 0xfc, 0x98, + 0xb4, 0x74, 0xfc, 0xc5, 0x00, 0xfb, 0x14, 0xdd, 0x86, 0x75, 0xd7, 0xf4, 0xb0, 0x43, 0xd3, 0xd2, + 0xbe, 0x74, 0x90, 0xd0, 0xc5, 0x0a, 0x15, 0x60, 0xe5, 0x92, 0xb4, 0xd2, 0xb1, 0x7d, 0xe9, 0x20, + 0x59, 0xd8, 0xcf, 0xcd, 0x38, 0xa1, 0x1c, 0xd3, 0xc6, 0xc0, 0xca, 0x3d, 0xd8, 0xfa, 0x0c, 0xd3, + 0x11, 0xe5, 0x08, 0x56, 0x1d, 0xb3, 0x8f, 0x85, 0x6a, 0xfe, 0xad, 0xfc, 0x4e, 0x02, 0xf9, 0xcc, + 0xed, 0x8c, 0x5b, 0x21, 0x76, 0x93, 0x96, 0xd8, 0x0d, 0x3d, 0x82, 0xe4, 0x80, 0xeb, 0xe1, 0x2e, + 0x0a, 0x4b, 0x33, 0xa1, 0x6c, 0x18, 0x85, 0xdc, 0x11, 0x8b, 0x42, 0xcd, 0xf4, 0xaf, 0x74, 0x08, + 0xe0, 0xec, 0x5b, 0x79, 0x00, 0x72, 0x05, 0xdb, 0x78, 0xcc, 0x88, 0x69, 0xd6, 0x1e, 0xc3, 0xed, + 0x92, 0x49, 0xdb, 0x17, 0x11, 0xd8, 0x9f, 0x17, 0xb8, 0xdb, 0xb0, 0x1e, 0x64, 0x13, 0xb7, 0x28, + 0xa1, 0x8b, 0x95, 0xf2, 0x77, 0x09, 0x52, 0x55, 0xcb, 0xa7, 0x6f, 0xa1, 0x03, 0x7d, 0x1f, 0xc0, + 0x35, 0x7b, 0xd8, 0xa0, 0xe4, 0x0a, 0x3b, 0xe9, 0x15, 0xce, 0x4b, 0x30, 0x4a, 0x93, 0x11, 0xd0, + 0x1e, 0xf0, 0x85, 0xe1, 0x5b, 0x5f, 0xe2, 0xf4, 0xea, 0xbe, 0x74, 0xb0, 0xa6, 0xc7, 0x19, 0xa1, + 0x61, 0x7d, 0x89, 0xd1, 0xcf, 0x21, 0xce, 0x6e, 0xde, 0xb5, 0x85, 0x5f, 0xa4, 0xd7, 0xf6, 0xa5, + 0x83, 0xed, 0xc2, 0x0f, 0xe7, 0xc5, 0xf9, 0x89, 0x85, 0x5f, 0xe8, 0x1b, 0x97, 0xc1, 0x87, 0xf2, + 0x57, 0x09, 0xe4, 0xa1, 0x03, 0xbe, 0x4b, 0x1c, 0x1f, 0xa3, 0x8f, 0x61, 0xf5, 0x92, 0xb4, 0xfc, + 0xb4, 0xb4, 0xbf, 0xb2, 0xd0, 0xc9, 0x71, 0x34, 0x7a, 0x00, 0x29, 0x07, 0xbf, 0xa4, 0xc6, 0x88, + 0x33, 0x81, 0xa3, 0x5b, 0x8c, 0x5c, 0x8f, 0x1c, 0xd2, 0x20, 0xde, 0xc7, 0xd4, 0xec, 0x98, 0xd4, + 0xe4, 0xde, 0x26, 0x0b, 0x87, 0x33, 0x77, 0x08, 0xcd, 0xaa, 0x09, 0x21, 0x3d, 0x12, 0x57, 0xbe, + 0x4e, 0xc2, 0x4e, 0x03, 0x9b, 0x5e, 0xfb, 0x62, 0x91, 0x03, 0x38, 0x87, 0xa4, 0xcf, 0xc1, 0x46, + 0x9f, 0x74, 0x30, 0x37, 0x6e, 0xbb, 0xf0, 0xb3, 0x99, 0x7b, 0xbf, 0xa6, 0x5c, 0x50, 0x6a, 0xa4, + 0x83, 0x75, 0xf0, 0xa3, 0x6f, 0xf4, 0x14, 0x64, 0x2f, 0x40, 0x18, 0x13, 0xce, 0xfd, 0x78, 0x8e, + 0x73, 0x5c, 0x28, 0xf2, 0x2d, 0xe5, 0x8d, 0x13, 0x50, 0x09, 0x12, 0xec, 0x84, 0xbf, 0x18, 0x60, + 0xef, 0x86, 0x1f, 0x7f, 0xb2, 0x70, 0x7f, 0xde, 0x81, 0xfc, 0x8a, 0x81, 0x75, 0x96, 0x19, 0xfc, + 0x0b, 0x3d, 0x84, 0x1d, 0xec, 0x98, 0x2d, 0x1b, 0x1b, 0x2d, 0x8f, 0x98, 0x1d, 0xcc, 0xea, 0x14, + 0x4f, 0x97, 0xb8, 0x2e, 0x07, 0x8c, 0x52, 0x44, 0x47, 0x9f, 0xc2, 0x1e, 0xb3, 0xc1, 0xf2, 0xb0, + 0xe1, 0x7a, 0xb8, 0x6d, 0xf9, 0xd8, 0xf0, 0xb0, 0x3f, 0xb0, 0x69, 0x90, 0x81, 0xeb, 0x5c, 0x2c, + 0x2d, 0x20, 0xf5, 0x00, 0xa1, 0x73, 0x00, 0xcf, 0xc8, 0x67, 0xb0, 0x13, 0x15, 0x53, 0x6e, 0xb5, + 0x85, 0xfd, 0xf4, 0x06, 0x4f, 0xa4, 0x87, 0x33, 0xed, 0x3e, 0x0e, 0xa5, 0x02, 0xeb, 0xe5, 0x8b, + 0xd1, 0xb5, 0x85, 0xfd, 0xb1, 0x5c, 0x8f, 0x7f, 0x8b, 0x5c, 0x67, 0x79, 0x41, 0xba, 0x5d, 0x1f, + 0xd3, 0x74, 0x82, 0x5f, 0x23, 0xb1, 0x1a, 0xbf, 0x61, 0x30, 0x71, 0xc3, 0xc6, 0x6f, 0x67, 0x72, + 0xf2, 0x76, 0xbe, 0x0b, 0x71, 0xe2, 0x75, 0xb0, 0x67, 0xb4, 0x6e, 0xd2, 0x9b, 0x9c, 0xb9, 0xc1, + 0xd7, 0xa5, 0x1b, 0xf4, 0x12, 0xde, 0xe9, 0x58, 0xd7, 0xd8, 0xf3, 0xad, 0xae, 0xd5, 0xe6, 0x05, + 0xdf, 0xb0, 0xf1, 0x35, 0xb6, 0xd3, 0x5b, 0xdc, 0xf8, 0xf2, 0x92, 0x89, 0x57, 0x19, 0xd7, 0x55, + 0x65, 0xaa, 0xf4, 0xdd, 0xce, 0x14, 0x2a, 0x72, 0xe1, 0x56, 0x7b, 0xe0, 0x53, 0xd2, 0x37, 0x3c, + 0xd3, 0xb9, 0xb2, 0x9c, 0x9e, 0x61, 0x39, 0x5d, 0x92, 0xde, 0xe6, 0xd9, 0xf3, 0x8b, 0x25, 0xf7, + 0x2d, 0x73, 0x4d, 0x7a, 0xa0, 0x48, 0x73, 0xba, 0x44, 0xdf, 0x69, 0x4f, 0x92, 0x50, 0x81, 0xf9, + 0xea, 0xf3, 0x14, 0xbb, 0xc2, 0x37, 0x2f, 0x88, 0xc7, 0x5a, 0x14, 0x6d, 0x5f, 0xa4, 0x65, 0x9e, + 0x2e, 0xb7, 0x04, 0xf3, 0x97, 0x01, 0xaf, 0xc6, 0x58, 0x99, 0xbf, 0xc4, 0x60, 0xe7, 0x35, 0xe5, + 0xe8, 0x2b, 0x90, 0xad, 0xbe, 0x4b, 0x3c, 0x6a, 0x3a, 0x6d, 0x2c, 0x02, 0x26, 0xf1, 0x80, 0xd5, + 0xdf, 0xd6, 0xf0, 0x9c, 0x16, 0x29, 0x0e, 0xa2, 0x97, 0xb2, 0xc6, 0x09, 0xe8, 0x10, 0x50, 0x18, + 0x31, 0xfc, 0xd2, 0xf5, 0xb0, 0xef, 0x5b, 0x24, 0xac, 0x62, 0x3b, 0x82, 0xa3, 0x46, 0x0c, 0xc5, + 0x87, 0xd4, 0x84, 0x4a, 0xb4, 0x0f, 0xef, 0x69, 0xb5, 0xfa, 0xa9, 0xde, 0x2c, 0x9e, 0x94, 0x55, + 0xa3, 0xaa, 0x3e, 0x51, 0xab, 0xc6, 0xd9, 0x49, 0xa3, 0xae, 0x96, 0xb5, 0x23, 0x4d, 0xad, 0xc8, + 0xdf, 0x43, 0x71, 0x58, 0x3d, 0x39, 0x3d, 0x51, 0x65, 0x09, 0x6d, 0xc0, 0x4a, 0xf5, 0xf4, 0xa9, + 0x1c, 0x63, 0xa4, 0x9a, 0x56, 0xad, 0xc8, 0x2b, 0x08, 0x60, 0xbd, 0xa6, 0x56, 0xb4, 0xb3, 0x9a, + 0xbc, 0xca, 0xa8, 0xc7, 0xda, 0x67, 0xc7, 0xf2, 0x1a, 0x4a, 0xc2, 0x86, 0xfa, 0xac, 0xa9, 0xab, + 0x35, 0x55, 0x5e, 0x57, 0x74, 0x80, 0x61, 0x0d, 0x42, 0x7b, 0x70, 0xa7, 0xa1, 0x16, 0xf5, 0xf2, + 0xb1, 0x51, 0x3b, 0xad, 0xa8, 0x13, 0x5b, 0x6d, 0x03, 0x3c, 0x3e, 0x2d, 0x19, 0x01, 0x40, 0x96, + 0xd0, 0x1d, 0xb8, 0x75, 0xa4, 0x16, 0x9b, 0x67, 0xba, 0x5a, 0x31, 0x46, 0x18, 0x31, 0xe5, 0x29, + 0xec, 0x4e, 0x4b, 0x2f, 0x74, 0x1f, 0xee, 0x56, 0xb4, 0x27, 0xaa, 0xde, 0xd0, 0x8e, 0xb4, 0x72, + 0xb1, 0xa9, 0x9d, 0x9e, 0x4c, 0x75, 0x69, 0x13, 0xe2, 0x15, 0xad, 0x51, 0x2c, 0x55, 0xd5, 0x8a, + 0x2c, 0x31, 0x1f, 0x1a, 0x5a, 0xad, 0x5e, 0x55, 0xe5, 0x98, 0xf2, 0xaf, 0x38, 0xa0, 0xd1, 0x93, + 0x11, 0x0d, 0xe6, 0x73, 0xd8, 0xe2, 0xe9, 0xc1, 0x02, 0x3d, 0xd2, 0x69, 0x1e, 0x2d, 0x7c, 0xc2, + 0x81, 0x9e, 0x5c, 0x4d, 0x28, 0x61, 0x4d, 0x68, 0xb3, 0x3f, 0x5c, 0xf8, 0xc8, 0x82, 0x3b, 0xe3, + 0x65, 0xe8, 0x46, 0x54, 0x31, 0x3f, 0x1d, 0xe3, 0x7b, 0x7d, 0xb8, 0x4c, 0x31, 0xe2, 0x92, 0xfa, + 0x3b, 0x17, 0x53, 0xa8, 0x53, 0xfb, 0xde, 0xca, 0xb4, 0xbe, 0x57, 0x07, 0xd9, 0x26, 0xa2, 0x10, + 0x88, 0xa7, 0x69, 0x7a, 0x95, 0xdb, 0x32, 0xbb, 0xa0, 0x57, 0x85, 0x90, 0x9e, 0x0a, 0xc5, 0x8f, + 0x02, 0x69, 0xf4, 0x01, 0xec, 0x62, 0x9f, 0x5a, 0x7d, 0x93, 0xe2, 0x8e, 0x41, 0x09, 0x35, 0xed, + 0xa0, 0x86, 0xad, 0xf1, 0x1a, 0x86, 0x22, 0x5e, 0x93, 0xb1, 0xc2, 0x6a, 0x36, 0x82, 0x5b, 0xe7, + 0xb8, 0x04, 0x8d, 0xd8, 0xa3, 0xad, 0x79, 0xe3, 0xad, 0x5a, 0x33, 0x7a, 0x04, 0x19, 0xd1, 0x6c, + 0x70, 0x47, 0x1c, 0x00, 0x3b, 0x69, 0xa3, 0x4d, 0x06, 0x0e, 0xe5, 0xf5, 0x7b, 0x4d, 0xbf, 0x13, + 0x21, 0x78, 0x40, 0xd9, 0xc1, 0x95, 0x19, 0x1b, 0x9d, 0x83, 0xec, 0xbb, 0xd8, 0xb6, 0x8d, 0x36, + 0xf1, 0x3c, 0xdc, 0x66, 0x3e, 0xf3, 0x9a, 0x9d, 0x2c, 0xe4, 0x67, 0xa7, 0x08, 0x13, 0xb2, 0x9c, + 0x5e, 0x39, 0x12, 0xd3, 0x53, 0x5c, 0xd1, 0x90, 0x90, 0xf9, 0x3a, 0x06, 0xc9, 0x91, 0xbc, 0xf9, + 0x56, 0xaf, 0xd4, 0xf7, 0x21, 0xc9, 0x07, 0x9e, 0x41, 0xbf, 0x6f, 0x7a, 0x37, 0xa2, 0x40, 0xc0, + 0x25, 0x69, 0x35, 0x02, 0x0a, 0xca, 0xc2, 0x0e, 0x03, 0x50, 0x8b, 0xda, 0xd8, 0xf0, 0x1d, 0xcb, + 0x75, 0x31, 0x15, 0x59, 0x91, 0xba, 0x24, 0xad, 0x26, 0xa3, 0x37, 0x02, 0x32, 0xca, 0xc1, 0x2d, + 0xf1, 0x2c, 0xa1, 0x2c, 0x8d, 0x42, 0xf4, 0x6a, 0x50, 0x75, 0x02, 0x56, 0x13, 0xbf, 0xa4, 0x21, + 0xfe, 0x39, 0x6c, 0xb2, 0x19, 0x68, 0x40, 0x71, 0x50, 0xd6, 0xd7, 0xb8, 0xe5, 0x4b, 0xdf, 0x9d, + 0x72, 0xa0, 0x83, 0x57, 0xf4, 0x64, 0x7b, 0xb8, 0xc8, 0xfc, 0x41, 0x82, 0xe4, 0x08, 0x13, 0x1d, + 0xc3, 0x26, 0xf3, 0x25, 0x4c, 0x3e, 0x11, 0xa9, 0x05, 0x73, 0x96, 0xc5, 0x29, 0x5c, 0xa0, 0x12, + 0xa4, 0xa8, 0x67, 0x5e, 0x63, 0xdb, 0x08, 0x47, 0x20, 0xf1, 0xc0, 0x7f, 0xf7, 0xb5, 0x07, 0x7e, + 0x45, 0x00, 0xf4, 0xed, 0x40, 0x22, 0x5c, 0x67, 0xaf, 0x61, 0x43, 0x34, 0x76, 0x94, 0x86, 0x5d, + 0x56, 0xc5, 0x9e, 0x68, 0xea, 0xd3, 0x89, 0x82, 0xb4, 0x0b, 0x72, 0xc4, 0xd1, 0x2a, 0xc6, 0xe9, + 0x49, 0xf5, 0xd7, 0xb2, 0x34, 0x46, 0xad, 0x69, 0x27, 0x5a, 0xad, 0x58, 0x95, 0x63, 0x08, 0xc1, + 0x76, 0x44, 0x6d, 0xd4, 0x8a, 0xd5, 0xaa, 0xbc, 0x82, 0x76, 0x60, 0x2b, 0xa2, 0x1d, 0x9d, 0x55, + 0xab, 0xf2, 0x6a, 0xe1, 0x3f, 0x9b, 0x00, 0x8f, 0x49, 0xab, 0x11, 0xcc, 0xb8, 0xe8, 0x6f, 0x12, + 0x24, 0xa2, 0xb1, 0x0b, 0xcd, 0xbe, 0x25, 0x93, 0xe3, 0x59, 0x66, 0x6e, 0x96, 0x29, 0xcf, 0x7f, + 0xfb, 0xcd, 0x7f, 0x7f, 0x1f, 0x7b, 0xa6, 0x3c, 0x8c, 0x86, 0xce, 0xaf, 0x82, 0x47, 0xec, 0xa7, + 0xae, 0x47, 0x2e, 0x71, 0x9b, 0xfa, 0xf9, 0x6c, 0x9e, 0x62, 0xc7, 0x74, 0xd8, 0xd7, 0x2b, 0x36, + 0x92, 0xfa, 0x9f, 0x48, 0xd9, 0xf3, 0x07, 0xca, 0xdd, 0x19, 0x12, 0x11, 0x0e, 0xfd, 0x59, 0x82, + 0xf5, 0x60, 0xaa, 0x43, 0xd9, 0x99, 0xc6, 0x8c, 0x8d, 0x7e, 0x0b, 0x18, 0xfe, 0x8c, 0x1b, 0xae, + 0xa3, 0x11, 0xc3, 0xd9, 0xc8, 0x35, 0xd5, 0x6c, 0x6e, 0x4d, 0x3e, 0xfb, 0xea, 0xfc, 0x1e, 0xba, + 0xfb, 0x66, 0xb8, 0x00, 0xa1, 0x7f, 0x4a, 0x90, 0x88, 0x46, 0xcc, 0x39, 0x11, 0x9f, 0x1c, 0x45, + 0x17, 0x30, 0xbc, 0xcb, 0x0d, 0xff, 0xbc, 0x90, 0x1f, 0x5a, 0xc2, 0xe6, 0xfc, 0xb9, 0xc6, 0xb3, + 0xa8, 0x67, 0x0b, 0xf7, 0x67, 0x4b, 0x0d, 0xb1, 0xe8, 0x4f, 0x12, 0x24, 0xa2, 0xb9, 0x73, 0x8e, + 0x1b, 0x93, 0xc3, 0x6c, 0xe6, 0xf6, 0x6b, 0xf7, 0x44, 0xed, 0xbb, 0xf4, 0x26, 0x8c, 0x7a, 0x76, + 0xb9, 0xa8, 0x67, 0x17, 0x88, 0xfa, 0x3f, 0x24, 0x88, 0x87, 0xf3, 0x21, 0x9a, 0x3d, 0xca, 0x4c, + 0xcc, 0xc1, 0x99, 0xc3, 0x05, 0xd1, 0x41, 0x3d, 0x9a, 0x96, 0x39, 0x73, 0x53, 0x7e, 0x3c, 0x73, + 0xde, 0x90, 0xef, 0xe8, 0x1b, 0x09, 0x52, 0x13, 0xf3, 0x3e, 0xfa, 0x68, 0xa6, 0x71, 0xd3, 0xff, + 0x0e, 0xbc, 0x31, 0xfc, 0x3e, 0x37, 0xbd, 0xaf, 0xfc, 0x74, 0x99, 0xdb, 0xda, 0x1a, 0xee, 0xc1, + 0x72, 0x28, 0xaf, 0x1c, 0xce, 0xbf, 0xb9, 0x23, 0x32, 0xe8, 0xdf, 0x52, 0xf8, 0x10, 0xe4, 0x0e, + 0xe5, 0x96, 0x7b, 0x1d, 0x67, 0xf2, 0x4b, 0xf6, 0x0b, 0xc5, 0xe6, 0x4e, 0x76, 0x95, 0xc2, 0x32, + 0x4e, 0x06, 0xdd, 0x8b, 0xf9, 0x77, 0xa8, 0x1c, 0xcc, 0xf7, 0x2f, 0x82, 0xa3, 0xff, 0x49, 0xa3, + 0x0f, 0xc7, 0x23, 0xe2, 0x15, 0x6d, 0xec, 0xd1, 0xef, 0xde, 0xcb, 0xdf, 0x70, 0x2f, 0xaf, 0x95, + 0x4f, 0x96, 0xf7, 0x32, 0x34, 0x92, 0x79, 0xfb, 0x13, 0xe5, 0x83, 0x45, 0xbd, 0x1d, 0x11, 0x2b, + 0xbd, 0x82, 0xf7, 0xdb, 0xa4, 0x3f, 0xcb, 0xe6, 0x52, 0x6a, 0xd8, 0x84, 0xea, 0x2c, 0x0b, 0xeb, + 0xd2, 0x79, 0x51, 0xe0, 0x7b, 0xc4, 0x36, 0x9d, 0x5e, 0x8e, 0x78, 0xbd, 0x7c, 0x0f, 0x3b, 0x3c, + 0x47, 0xf3, 0x01, 0xcb, 0x74, 0x2d, 0x7f, 0xea, 0xbf, 0xcc, 0x47, 0xc1, 0xf2, 0x8f, 0xb1, 0x95, + 0x72, 0xb3, 0xd1, 0x5a, 0xe7, 0x32, 0x1f, 0xfd, 0x3f, 0x00, 0x00, 0xff, 0xff, 0x2c, 0xe3, 0x18, + 0x53, 0xea, 0x15, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/cloud/talent/v4beta1/profile.pb.go b/vendor/google.golang.org/genproto/googleapis/cloud/talent/v4beta1/profile.pb.go new file mode 100644 index 0000000..e1de286 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/cloud/talent/v4beta1/profile.pb.go @@ -0,0 +1,2388 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/cloud/talent/v4beta1/profile.proto + +package talent // import "google.golang.org/genproto/googleapis/cloud/talent/v4beta1" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "github.com/golang/protobuf/ptypes/duration" +import timestamp "github.com/golang/protobuf/ptypes/timestamp" +import wrappers "github.com/golang/protobuf/ptypes/wrappers" +import _ "google.golang.org/genproto/googleapis/api/annotations" +import date "google.golang.org/genproto/googleapis/type/date" +import postaladdress "google.golang.org/genproto/googleapis/type/postaladdress" +import _ "google.golang.org/genproto/protobuf/field_mask" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// The format of a structured resume. +type Resume_ResumeType int32 + +const ( + // Default value. + Resume_RESUME_TYPE_UNSPECIFIED Resume_ResumeType = 0 + // The profile contents in HR-XML format. + // See http://schemas.liquid-technologies.com/hr-xml/2007-04-15/ for more + // information about Human Resources XML. + Resume_HRXML Resume_ResumeType = 1 + // Resume type not specified. + Resume_OTHER_RESUME_TYPE Resume_ResumeType = 2 +) + +var Resume_ResumeType_name = map[int32]string{ + 0: "RESUME_TYPE_UNSPECIFIED", + 1: "HRXML", + 2: "OTHER_RESUME_TYPE", +} +var Resume_ResumeType_value = map[string]int32{ + "RESUME_TYPE_UNSPECIFIED": 0, + "HRXML": 1, + "OTHER_RESUME_TYPE": 2, +} + +func (x Resume_ResumeType) String() string { + return proto.EnumName(Resume_ResumeType_name, int32(x)) +} +func (Resume_ResumeType) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_profile_1ba3c938ea40ebf9, []int{1, 0} +} + +// Enum that represents the type of the telephone. +type Phone_PhoneType int32 + +const ( + // Default value. + Phone_PHONE_TYPE_UNSPECIFIED Phone_PhoneType = 0 + // A landline. + Phone_LANDLINE Phone_PhoneType = 1 + // A mobile. + Phone_MOBILE Phone_PhoneType = 2 + // A fax. + Phone_FAX Phone_PhoneType = 3 + // A pager. + Phone_PAGER Phone_PhoneType = 4 + // A TTY (test telephone) or TDD (telecommunication device for the deaf). + Phone_TTY_OR_TDD Phone_PhoneType = 5 + // A voicemail. + Phone_VOICEMAIL Phone_PhoneType = 6 + // A virtual telephone number is a number that can be routed to another + // number and managed by the user via Web, SMS, IVR, and so on. It is + // associated with a particular person, and may be routed to either a MOBILE + // or LANDLINE number. The phone usage (see ContactInfoUsage above) should + // be set to PERSONAL for these phone types. Some more information can be + // found here: http://en.wikipedia.org/wiki/Personal_Numbers + Phone_VIRTUAL Phone_PhoneType = 7 + // Voice over IP numbers. This includes TSoIP (Telephony Service over IP). + Phone_VOIP Phone_PhoneType = 8 + // In some regions (e.g. the USA), it is impossible to distinguish between + // fixed-line and mobile numbers by looking at the phone number itself. + Phone_MOBILE_OR_LANDLINE Phone_PhoneType = 9 +) + +var Phone_PhoneType_name = map[int32]string{ + 0: "PHONE_TYPE_UNSPECIFIED", + 1: "LANDLINE", + 2: "MOBILE", + 3: "FAX", + 4: "PAGER", + 5: "TTY_OR_TDD", + 6: "VOICEMAIL", + 7: "VIRTUAL", + 8: "VOIP", + 9: "MOBILE_OR_LANDLINE", +} +var Phone_PhoneType_value = map[string]int32{ + "PHONE_TYPE_UNSPECIFIED": 0, + "LANDLINE": 1, + "MOBILE": 2, + "FAX": 3, + "PAGER": 4, + "TTY_OR_TDD": 5, + "VOICEMAIL": 6, + "VIRTUAL": 7, + "VOIP": 8, + "MOBILE_OR_LANDLINE": 9, +} + +func (x Phone_PhoneType) String() string { + return proto.EnumName(Phone_PhoneType_name, int32(x)) +} +func (Phone_PhoneType) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_profile_1ba3c938ea40ebf9, []int{5, 0} +} + +// A resource that represents the profile for a job candidate (also referred to +// as a "single-source profile"). A profile belongs to a [Company][google.cloud.talent.v4beta1.Company], which is +// the company/organization that owns the profile. +type Profile struct { + // Required during profile update. + // + // Resource name assigned to a profile by the API. + // + // The format is + // "projects/{project_id}/tenants/{tenant_id}/profiles/{profile_id}", + // for example, "projects/api-test-project/tenants/foo/profiles/bar". + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // Optional. + // + // Profile's id in client system if available. + // + // The maximum number of bytes allowed is 100. + ExternalId string `protobuf:"bytes,2,opt,name=external_id,json=externalId,proto3" json:"external_id,omitempty"` + // Optional. + // + // The source description indicating where the profile is acquired. + // + // For example, if a candidate profile is acquired from a resume, the user can + // input "resume" here to indicate the source. + // + // The maximum number of bytes allowed is 100. + Source string `protobuf:"bytes,3,opt,name=source,proto3" json:"source,omitempty"` + // Optional. + // + // The URI set by clients that links to this profile's client-side copy. + // + // The maximum number of bytes allowed is 4000. + Uri string `protobuf:"bytes,4,opt,name=uri,proto3" json:"uri,omitempty"` + // Optional. + // + // The cluster id of the profile to associate with other profile(s) for the + // same candidate. + // + // This field should be generated by the customer. If a value is not provided, + // a random UUI is assigned to this field of the profile. + // + // This is used to link multiple profiles to the same candidate. For example, + // a client has a candidate with two profiles, where one was created recently + // and the other one was created 5 years ago. These two profiles may be very + // different. The clients can create the first profile and get a generated + // [group_id][google.cloud.talent.v4beta1.Profile.group_id], and assign it when the second profile is created, + // indicating these two profiles are referring to the same candidate. + GroupId string `protobuf:"bytes,5,opt,name=group_id,json=groupId,proto3" json:"group_id,omitempty"` + // Optional. + // + // Indicates the hirable status of the candidate. + IsHirable *wrappers.BoolValue `protobuf:"bytes,6,opt,name=is_hirable,json=isHirable,proto3" json:"is_hirable,omitempty"` + // Optional. + // + // The timestamp when the profile was first created at this source. + CreateTime *timestamp.Timestamp `protobuf:"bytes,7,opt,name=create_time,json=createTime,proto3" json:"create_time,omitempty"` + // Optional. + // + // The timestamp when the profile was last updated at this source. + UpdateTime *timestamp.Timestamp `protobuf:"bytes,8,opt,name=update_time,json=updateTime,proto3" json:"update_time,omitempty"` + // Optional. + // + // The resume representing this profile. + Resume *Resume `protobuf:"bytes,53,opt,name=resume,proto3" json:"resume,omitempty"` + // Optional. + // + // The names of the candidate this profile references. + // + // Currently only one person name is supported. + PersonNames []*PersonName `protobuf:"bytes,11,rep,name=person_names,json=personNames,proto3" json:"person_names,omitempty"` + // Optional. + // + // The candidate's postal addresses. + Addresses []*Address `protobuf:"bytes,12,rep,name=addresses,proto3" json:"addresses,omitempty"` + // Optional. + // + // The candidate's email addresses. + EmailAddresses []*Email `protobuf:"bytes,13,rep,name=email_addresses,json=emailAddresses,proto3" json:"email_addresses,omitempty"` + // Optional. + // + // The candidate's phone number(s). + PhoneNumbers []*Phone `protobuf:"bytes,14,rep,name=phone_numbers,json=phoneNumbers,proto3" json:"phone_numbers,omitempty"` + // Optional. + // + // The candidate's personal URIs. + PersonalUris []*PersonalUri `protobuf:"bytes,15,rep,name=personal_uris,json=personalUris,proto3" json:"personal_uris,omitempty"` + // Optional. + // + // Available contact information besides [addresses][google.cloud.talent.v4beta1.Profile.addresses], [email_addresses][google.cloud.talent.v4beta1.Profile.email_addresses], + // [phone_numbers][google.cloud.talent.v4beta1.Profile.phone_numbers] and [personal_uris][google.cloud.talent.v4beta1.Profile.personal_uris]. For example, Hang-out, Skype. + AdditionalContactInfo []*AdditionalContactInfo `protobuf:"bytes,16,rep,name=additional_contact_info,json=additionalContactInfo,proto3" json:"additional_contact_info,omitempty"` + // Optional. + // + // The employment history records of the candidate. It's highly recommended + // to input this information as accurately as possible to help improve search + // quality. Here are some recommendations: + // + // * Specify the start and end dates of the employment records. + // * List different employment types separately, no matter how minor the + // change is. + // For example, only job title is changed from "software engineer" to "senior + // software engineer". + // * Provide [EmploymentRecord.is_current][google.cloud.talent.v4beta1.EmploymentRecord.is_current] for the current employment if + // possible. If not, it's inferred from user inputs. + EmploymentRecords []*EmploymentRecord `protobuf:"bytes,17,rep,name=employment_records,json=employmentRecords,proto3" json:"employment_records,omitempty"` + // Optional. + // + // The education history record of the candidate. It's highly recommended to + // input this information as accurately as possible to help improve search + // quality. Here are some recommendations: + // + // * Specify the start and end dates of the education records. + // * List each education type separately, no matter how minor the change is. + // For example, the profile contains the education experience from the same + // school but different degrees. + // * Provide [EducationRecord.is_current][google.cloud.talent.v4beta1.EducationRecord.is_current] for the current education if + // possible. If not, it's inferred from user inputs. + EducationRecords []*EducationRecord `protobuf:"bytes,18,rep,name=education_records,json=educationRecords,proto3" json:"education_records,omitempty"` + // Optional. + // + // The skill set of the candidate. It's highly recommended to provide as + // much information as possible to help improve the search quality. + Skills []*Skill `protobuf:"bytes,19,rep,name=skills,proto3" json:"skills,omitempty"` + // Optional. + // + // The individual or collaborative activities which the candidate has + // participated in, for example, open-source projects, class assignments that + // aren't listed in [employment_records][google.cloud.talent.v4beta1.Profile.employment_records]. + Activities []*Activity `protobuf:"bytes,20,rep,name=activities,proto3" json:"activities,omitempty"` + // Optional. + // + // The publications published by the candidate. + Publications []*Publication `protobuf:"bytes,21,rep,name=publications,proto3" json:"publications,omitempty"` + // Optional. + // + // The patents acquired by the candidate. + Patents []*Patent `protobuf:"bytes,22,rep,name=patents,proto3" json:"patents,omitempty"` + // Optional. + // + // The certifications acquired by the candidate. + Certifications []*Certification `protobuf:"bytes,23,rep,name=certifications,proto3" json:"certifications,omitempty"` + // Output only. The resource names of the candidate's applications. + Applications []string `protobuf:"bytes,47,rep,name=applications,proto3" json:"applications,omitempty"` + // Output only. The resource names of the candidate's assignments. + Assignments []string `protobuf:"bytes,48,rep,name=assignments,proto3" json:"assignments,omitempty"` + // Optional. + // + // A map of fields to hold both filterable and non-filterable custom profile + // attributes that aren't covered by the provided structured fields. See + // [CustomAttribute][google.cloud.talent.v4beta1.CustomAttribute] for more details. + // + // At most 100 filterable and at most 100 unfilterable keys are supported. If + // limit is exceeded, an error is thrown. Custom attributes are `unfilterable` + // by default. These are filterable when the `filterable` flag is set to + // `true`. + // + // Numeric custom attributes: each key can only map to one numeric value, + // otherwise an error is thrown. Client can also filter on numeric custom + // attributes using '>', '<' or '=' operators. + // + // String custom attributes: each key can map up to 50 string values. For + // filterable string value, each value has a byte size of no more than 256B. + // For unfilterable string values, the maximum byte size of a single key is + // 64B. An error is thrown for any request exceeding the limit. + // The maximum total byte size is 10KB. + CustomAttributes map[string]*CustomAttribute `protobuf:"bytes,26,rep,name=custom_attributes,json=customAttributes,proto3" json:"custom_attributes,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` + // Output only. Indicates if the profile is fully processed and searchable. + Processed bool `protobuf:"varint,27,opt,name=processed,proto3" json:"processed,omitempty"` + // Output only. Keyword snippet shows how the search result is related to a + // search query. This is only returned in [SearchProfilesResponse][google.cloud.talent.v4beta1.SearchProfilesResponse]. + KeywordSnippet string `protobuf:"bytes,28,opt,name=keyword_snippet,json=keywordSnippet,proto3" json:"keyword_snippet,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Profile) Reset() { *m = Profile{} } +func (m *Profile) String() string { return proto.CompactTextString(m) } +func (*Profile) ProtoMessage() {} +func (*Profile) Descriptor() ([]byte, []int) { + return fileDescriptor_profile_1ba3c938ea40ebf9, []int{0} +} +func (m *Profile) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Profile.Unmarshal(m, b) +} +func (m *Profile) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Profile.Marshal(b, m, deterministic) +} +func (dst *Profile) XXX_Merge(src proto.Message) { + xxx_messageInfo_Profile.Merge(dst, src) +} +func (m *Profile) XXX_Size() int { + return xxx_messageInfo_Profile.Size(m) +} +func (m *Profile) XXX_DiscardUnknown() { + xxx_messageInfo_Profile.DiscardUnknown(m) +} + +var xxx_messageInfo_Profile proto.InternalMessageInfo + +func (m *Profile) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *Profile) GetExternalId() string { + if m != nil { + return m.ExternalId + } + return "" +} + +func (m *Profile) GetSource() string { + if m != nil { + return m.Source + } + return "" +} + +func (m *Profile) GetUri() string { + if m != nil { + return m.Uri + } + return "" +} + +func (m *Profile) GetGroupId() string { + if m != nil { + return m.GroupId + } + return "" +} + +func (m *Profile) GetIsHirable() *wrappers.BoolValue { + if m != nil { + return m.IsHirable + } + return nil +} + +func (m *Profile) GetCreateTime() *timestamp.Timestamp { + if m != nil { + return m.CreateTime + } + return nil +} + +func (m *Profile) GetUpdateTime() *timestamp.Timestamp { + if m != nil { + return m.UpdateTime + } + return nil +} + +func (m *Profile) GetResume() *Resume { + if m != nil { + return m.Resume + } + return nil +} + +func (m *Profile) GetPersonNames() []*PersonName { + if m != nil { + return m.PersonNames + } + return nil +} + +func (m *Profile) GetAddresses() []*Address { + if m != nil { + return m.Addresses + } + return nil +} + +func (m *Profile) GetEmailAddresses() []*Email { + if m != nil { + return m.EmailAddresses + } + return nil +} + +func (m *Profile) GetPhoneNumbers() []*Phone { + if m != nil { + return m.PhoneNumbers + } + return nil +} + +func (m *Profile) GetPersonalUris() []*PersonalUri { + if m != nil { + return m.PersonalUris + } + return nil +} + +func (m *Profile) GetAdditionalContactInfo() []*AdditionalContactInfo { + if m != nil { + return m.AdditionalContactInfo + } + return nil +} + +func (m *Profile) GetEmploymentRecords() []*EmploymentRecord { + if m != nil { + return m.EmploymentRecords + } + return nil +} + +func (m *Profile) GetEducationRecords() []*EducationRecord { + if m != nil { + return m.EducationRecords + } + return nil +} + +func (m *Profile) GetSkills() []*Skill { + if m != nil { + return m.Skills + } + return nil +} + +func (m *Profile) GetActivities() []*Activity { + if m != nil { + return m.Activities + } + return nil +} + +func (m *Profile) GetPublications() []*Publication { + if m != nil { + return m.Publications + } + return nil +} + +func (m *Profile) GetPatents() []*Patent { + if m != nil { + return m.Patents + } + return nil +} + +func (m *Profile) GetCertifications() []*Certification { + if m != nil { + return m.Certifications + } + return nil +} + +func (m *Profile) GetApplications() []string { + if m != nil { + return m.Applications + } + return nil +} + +func (m *Profile) GetAssignments() []string { + if m != nil { + return m.Assignments + } + return nil +} + +func (m *Profile) GetCustomAttributes() map[string]*CustomAttribute { + if m != nil { + return m.CustomAttributes + } + return nil +} + +func (m *Profile) GetProcessed() bool { + if m != nil { + return m.Processed + } + return false +} + +func (m *Profile) GetKeywordSnippet() string { + if m != nil { + return m.KeywordSnippet + } + return "" +} + +// Resource that represents a resume. +type Resume struct { + // Optional. + // + // Users can create a profile with only this field field, if [resume_type][google.cloud.talent.v4beta1.Resume.resume_type] + // is [HRXML][]. For example, the API parses this field and creates a profile + // with all structured fields populated, for example. [EmploymentRecord][google.cloud.talent.v4beta1.EmploymentRecord], + // [EducationRecord][google.cloud.talent.v4beta1.EducationRecord], and so on. An error is thrown if this field cannot be + // parsed. + // + // If this field is provided during profile creation or update, + // any other structured data provided in the profile is ignored. The + // API populates these fields by parsing this field. + StructuredResume string `protobuf:"bytes,1,opt,name=structured_resume,json=structuredResume,proto3" json:"structured_resume,omitempty"` + // Optional. + // + // The format of [structured_resume][google.cloud.talent.v4beta1.Resume.structured_resume]. + ResumeType Resume_ResumeType `protobuf:"varint,2,opt,name=resume_type,json=resumeType,proto3,enum=google.cloud.talent.v4beta1.Resume_ResumeType" json:"resume_type,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Resume) Reset() { *m = Resume{} } +func (m *Resume) String() string { return proto.CompactTextString(m) } +func (*Resume) ProtoMessage() {} +func (*Resume) Descriptor() ([]byte, []int) { + return fileDescriptor_profile_1ba3c938ea40ebf9, []int{1} +} +func (m *Resume) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Resume.Unmarshal(m, b) +} +func (m *Resume) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Resume.Marshal(b, m, deterministic) +} +func (dst *Resume) XXX_Merge(src proto.Message) { + xxx_messageInfo_Resume.Merge(dst, src) +} +func (m *Resume) XXX_Size() int { + return xxx_messageInfo_Resume.Size(m) +} +func (m *Resume) XXX_DiscardUnknown() { + xxx_messageInfo_Resume.DiscardUnknown(m) +} + +var xxx_messageInfo_Resume proto.InternalMessageInfo + +func (m *Resume) GetStructuredResume() string { + if m != nil { + return m.StructuredResume + } + return "" +} + +func (m *Resume) GetResumeType() Resume_ResumeType { + if m != nil { + return m.ResumeType + } + return Resume_RESUME_TYPE_UNSPECIFIED +} + +// Resource that represents the name of a person. +type PersonName struct { + // The name of a person. It can be one of [formatted_name][] or + // [structured_name][]. + // + // Types that are valid to be assigned to PersonName: + // *PersonName_FormattedName + // *PersonName_StructuredName + PersonName isPersonName_PersonName `protobuf_oneof:"person_name"` + // Optional. + // + // Preferred name for the person. This field is ignored if [structured_name][google.cloud.talent.v4beta1.PersonName.structured_name] + // is provided. + // + // Number of characters allowed is 100. + PreferredName string `protobuf:"bytes,3,opt,name=preferred_name,json=preferredName,proto3" json:"preferred_name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *PersonName) Reset() { *m = PersonName{} } +func (m *PersonName) String() string { return proto.CompactTextString(m) } +func (*PersonName) ProtoMessage() {} +func (*PersonName) Descriptor() ([]byte, []int) { + return fileDescriptor_profile_1ba3c938ea40ebf9, []int{2} +} +func (m *PersonName) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_PersonName.Unmarshal(m, b) +} +func (m *PersonName) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_PersonName.Marshal(b, m, deterministic) +} +func (dst *PersonName) XXX_Merge(src proto.Message) { + xxx_messageInfo_PersonName.Merge(dst, src) +} +func (m *PersonName) XXX_Size() int { + return xxx_messageInfo_PersonName.Size(m) +} +func (m *PersonName) XXX_DiscardUnknown() { + xxx_messageInfo_PersonName.DiscardUnknown(m) +} + +var xxx_messageInfo_PersonName proto.InternalMessageInfo + +type isPersonName_PersonName interface { + isPersonName_PersonName() +} + +type PersonName_FormattedName struct { + FormattedName string `protobuf:"bytes,1,opt,name=formatted_name,json=formattedName,proto3,oneof"` +} + +type PersonName_StructuredName struct { + StructuredName *PersonName_PersonStructuredName `protobuf:"bytes,2,opt,name=structured_name,json=structuredName,proto3,oneof"` +} + +func (*PersonName_FormattedName) isPersonName_PersonName() {} + +func (*PersonName_StructuredName) isPersonName_PersonName() {} + +func (m *PersonName) GetPersonName() isPersonName_PersonName { + if m != nil { + return m.PersonName + } + return nil +} + +func (m *PersonName) GetFormattedName() string { + if x, ok := m.GetPersonName().(*PersonName_FormattedName); ok { + return x.FormattedName + } + return "" +} + +func (m *PersonName) GetStructuredName() *PersonName_PersonStructuredName { + if x, ok := m.GetPersonName().(*PersonName_StructuredName); ok { + return x.StructuredName + } + return nil +} + +func (m *PersonName) GetPreferredName() string { + if m != nil { + return m.PreferredName + } + return "" +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*PersonName) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _PersonName_OneofMarshaler, _PersonName_OneofUnmarshaler, _PersonName_OneofSizer, []interface{}{ + (*PersonName_FormattedName)(nil), + (*PersonName_StructuredName)(nil), + } +} + +func _PersonName_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*PersonName) + // person_name + switch x := m.PersonName.(type) { + case *PersonName_FormattedName: + b.EncodeVarint(1<<3 | proto.WireBytes) + b.EncodeStringBytes(x.FormattedName) + case *PersonName_StructuredName: + b.EncodeVarint(2<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.StructuredName); err != nil { + return err + } + case nil: + default: + return fmt.Errorf("PersonName.PersonName has unexpected type %T", x) + } + return nil +} + +func _PersonName_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*PersonName) + switch tag { + case 1: // person_name.formatted_name + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeStringBytes() + m.PersonName = &PersonName_FormattedName{x} + return true, err + case 2: // person_name.structured_name + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(PersonName_PersonStructuredName) + err := b.DecodeMessage(msg) + m.PersonName = &PersonName_StructuredName{msg} + return true, err + default: + return false, nil + } +} + +func _PersonName_OneofSizer(msg proto.Message) (n int) { + m := msg.(*PersonName) + // person_name + switch x := m.PersonName.(type) { + case *PersonName_FormattedName: + n += 1 // tag and wire + n += proto.SizeVarint(uint64(len(x.FormattedName))) + n += len(x.FormattedName) + case *PersonName_StructuredName: + s := proto.Size(x.StructuredName) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +// Resource that represents a person's structured name. +type PersonName_PersonStructuredName struct { + // Optional. + // + // Given/first name. + // + // It's derived from [formatted_name][google.cloud.talent.v4beta1.PersonName.formatted_name] if not provided. + // + // Number of characters allowed is 100. + GivenName string `protobuf:"bytes,1,opt,name=given_name,json=givenName,proto3" json:"given_name,omitempty"` + // Optional. + // + // Preferred given/first name or nickname. + // + // Number of characters allowed is 100. + PreferredName string `protobuf:"bytes,6,opt,name=preferred_name,json=preferredName,proto3" json:"preferred_name,omitempty"` + // Optional. + // + // Middle initial. + // + // It's derived from [formatted_name][google.cloud.talent.v4beta1.PersonName.formatted_name] if not provided. + // + // Number of characters allowed is 20. + MiddleInitial string `protobuf:"bytes,2,opt,name=middle_initial,json=middleInitial,proto3" json:"middle_initial,omitempty"` + // Optional. + // + // Family/last name. + // + // It's derived from [formatted_name][google.cloud.talent.v4beta1.PersonName.formatted_name] if not provided. + // + // Number of characters allowed is 100. + FamilyName string `protobuf:"bytes,3,opt,name=family_name,json=familyName,proto3" json:"family_name,omitempty"` + // Optional. + // + // Suffixes. + // + // Number of characters allowed is 20. + Suffixes []string `protobuf:"bytes,4,rep,name=suffixes,proto3" json:"suffixes,omitempty"` + // Optional. + // + // Prefixes. + // + // Number of characters allowed is 20. + Prefixes []string `protobuf:"bytes,5,rep,name=prefixes,proto3" json:"prefixes,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *PersonName_PersonStructuredName) Reset() { *m = PersonName_PersonStructuredName{} } +func (m *PersonName_PersonStructuredName) String() string { return proto.CompactTextString(m) } +func (*PersonName_PersonStructuredName) ProtoMessage() {} +func (*PersonName_PersonStructuredName) Descriptor() ([]byte, []int) { + return fileDescriptor_profile_1ba3c938ea40ebf9, []int{2, 0} +} +func (m *PersonName_PersonStructuredName) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_PersonName_PersonStructuredName.Unmarshal(m, b) +} +func (m *PersonName_PersonStructuredName) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_PersonName_PersonStructuredName.Marshal(b, m, deterministic) +} +func (dst *PersonName_PersonStructuredName) XXX_Merge(src proto.Message) { + xxx_messageInfo_PersonName_PersonStructuredName.Merge(dst, src) +} +func (m *PersonName_PersonStructuredName) XXX_Size() int { + return xxx_messageInfo_PersonName_PersonStructuredName.Size(m) +} +func (m *PersonName_PersonStructuredName) XXX_DiscardUnknown() { + xxx_messageInfo_PersonName_PersonStructuredName.DiscardUnknown(m) +} + +var xxx_messageInfo_PersonName_PersonStructuredName proto.InternalMessageInfo + +func (m *PersonName_PersonStructuredName) GetGivenName() string { + if m != nil { + return m.GivenName + } + return "" +} + +func (m *PersonName_PersonStructuredName) GetPreferredName() string { + if m != nil { + return m.PreferredName + } + return "" +} + +func (m *PersonName_PersonStructuredName) GetMiddleInitial() string { + if m != nil { + return m.MiddleInitial + } + return "" +} + +func (m *PersonName_PersonStructuredName) GetFamilyName() string { + if m != nil { + return m.FamilyName + } + return "" +} + +func (m *PersonName_PersonStructuredName) GetSuffixes() []string { + if m != nil { + return m.Suffixes + } + return nil +} + +func (m *PersonName_PersonStructuredName) GetPrefixes() []string { + if m != nil { + return m.Prefixes + } + return nil +} + +// Resource that represents a address. +type Address struct { + // Optional. + // + // The usage of the address. For example, SCHOOL, WORK, PERSONAL. + Usage ContactInfoUsage `protobuf:"varint,1,opt,name=usage,proto3,enum=google.cloud.talent.v4beta1.ContactInfoUsage" json:"usage,omitempty"` + // The address of a person. It can be one of [unstructured_address][] or + // [structured_address][]. + // + // Types that are valid to be assigned to Address: + // *Address_UnstructuredAddress + // *Address_StructuredAddress + Address isAddress_Address `protobuf_oneof:"address"` + // Optional. + // + // Indicates if it's the person's current address. + Current *wrappers.BoolValue `protobuf:"bytes,4,opt,name=current,proto3" json:"current,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Address) Reset() { *m = Address{} } +func (m *Address) String() string { return proto.CompactTextString(m) } +func (*Address) ProtoMessage() {} +func (*Address) Descriptor() ([]byte, []int) { + return fileDescriptor_profile_1ba3c938ea40ebf9, []int{3} +} +func (m *Address) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Address.Unmarshal(m, b) +} +func (m *Address) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Address.Marshal(b, m, deterministic) +} +func (dst *Address) XXX_Merge(src proto.Message) { + xxx_messageInfo_Address.Merge(dst, src) +} +func (m *Address) XXX_Size() int { + return xxx_messageInfo_Address.Size(m) +} +func (m *Address) XXX_DiscardUnknown() { + xxx_messageInfo_Address.DiscardUnknown(m) +} + +var xxx_messageInfo_Address proto.InternalMessageInfo + +func (m *Address) GetUsage() ContactInfoUsage { + if m != nil { + return m.Usage + } + return ContactInfoUsage_CONTACT_INFO_USAGE_UNSPECIFIED +} + +type isAddress_Address interface { + isAddress_Address() +} + +type Address_UnstructuredAddress struct { + UnstructuredAddress string `protobuf:"bytes,2,opt,name=unstructured_address,json=unstructuredAddress,proto3,oneof"` +} + +type Address_StructuredAddress struct { + StructuredAddress *postaladdress.PostalAddress `protobuf:"bytes,3,opt,name=structured_address,json=structuredAddress,proto3,oneof"` +} + +func (*Address_UnstructuredAddress) isAddress_Address() {} + +func (*Address_StructuredAddress) isAddress_Address() {} + +func (m *Address) GetAddress() isAddress_Address { + if m != nil { + return m.Address + } + return nil +} + +func (m *Address) GetUnstructuredAddress() string { + if x, ok := m.GetAddress().(*Address_UnstructuredAddress); ok { + return x.UnstructuredAddress + } + return "" +} + +func (m *Address) GetStructuredAddress() *postaladdress.PostalAddress { + if x, ok := m.GetAddress().(*Address_StructuredAddress); ok { + return x.StructuredAddress + } + return nil +} + +func (m *Address) GetCurrent() *wrappers.BoolValue { + if m != nil { + return m.Current + } + return nil +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*Address) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _Address_OneofMarshaler, _Address_OneofUnmarshaler, _Address_OneofSizer, []interface{}{ + (*Address_UnstructuredAddress)(nil), + (*Address_StructuredAddress)(nil), + } +} + +func _Address_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*Address) + // address + switch x := m.Address.(type) { + case *Address_UnstructuredAddress: + b.EncodeVarint(2<<3 | proto.WireBytes) + b.EncodeStringBytes(x.UnstructuredAddress) + case *Address_StructuredAddress: + b.EncodeVarint(3<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.StructuredAddress); err != nil { + return err + } + case nil: + default: + return fmt.Errorf("Address.Address has unexpected type %T", x) + } + return nil +} + +func _Address_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*Address) + switch tag { + case 2: // address.unstructured_address + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeStringBytes() + m.Address = &Address_UnstructuredAddress{x} + return true, err + case 3: // address.structured_address + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(postaladdress.PostalAddress) + err := b.DecodeMessage(msg) + m.Address = &Address_StructuredAddress{msg} + return true, err + default: + return false, nil + } +} + +func _Address_OneofSizer(msg proto.Message) (n int) { + m := msg.(*Address) + // address + switch x := m.Address.(type) { + case *Address_UnstructuredAddress: + n += 1 // tag and wire + n += proto.SizeVarint(uint64(len(x.UnstructuredAddress))) + n += len(x.UnstructuredAddress) + case *Address_StructuredAddress: + s := proto.Size(x.StructuredAddress) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +// Resource that represents a person's email address. +type Email struct { + // Optional. + // + // The usage of the email address. For example, SCHOOL, WORK, PERSONAL. + Usage ContactInfoUsage `protobuf:"varint,1,opt,name=usage,proto3,enum=google.cloud.talent.v4beta1.ContactInfoUsage" json:"usage,omitempty"` + // Optional. + // + // Email address. + // + // Number of characters allowed is 4,000. + EmailAddress string `protobuf:"bytes,2,opt,name=email_address,json=emailAddress,proto3" json:"email_address,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Email) Reset() { *m = Email{} } +func (m *Email) String() string { return proto.CompactTextString(m) } +func (*Email) ProtoMessage() {} +func (*Email) Descriptor() ([]byte, []int) { + return fileDescriptor_profile_1ba3c938ea40ebf9, []int{4} +} +func (m *Email) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Email.Unmarshal(m, b) +} +func (m *Email) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Email.Marshal(b, m, deterministic) +} +func (dst *Email) XXX_Merge(src proto.Message) { + xxx_messageInfo_Email.Merge(dst, src) +} +func (m *Email) XXX_Size() int { + return xxx_messageInfo_Email.Size(m) +} +func (m *Email) XXX_DiscardUnknown() { + xxx_messageInfo_Email.DiscardUnknown(m) +} + +var xxx_messageInfo_Email proto.InternalMessageInfo + +func (m *Email) GetUsage() ContactInfoUsage { + if m != nil { + return m.Usage + } + return ContactInfoUsage_CONTACT_INFO_USAGE_UNSPECIFIED +} + +func (m *Email) GetEmailAddress() string { + if m != nil { + return m.EmailAddress + } + return "" +} + +// Resource that represents a person's telephone number. +type Phone struct { + // Optional. + // + // The usage of the phone. For example, SCHOOL, WORK, PERSONAL. + Usage ContactInfoUsage `protobuf:"varint,1,opt,name=usage,proto3,enum=google.cloud.talent.v4beta1.ContactInfoUsage" json:"usage,omitempty"` + // Optional. + // + // The phone type. For example, LANDLINE, MOBILE, FAX. + Type Phone_PhoneType `protobuf:"varint,2,opt,name=type,proto3,enum=google.cloud.talent.v4beta1.Phone_PhoneType" json:"type,omitempty"` + // Optional. + // + // Phone number. + // + // Any phone formats are supported and only exact matches are performed on + // searches. For example, if a phone number in profile is provided in the + // format of "(xxx)xxx-xxxx", in profile searches the same phone format + // has to be provided. + // + // Number of characters allowed is 20. + Number string `protobuf:"bytes,3,opt,name=number,proto3" json:"number,omitempty"` + // Optional. + // + // When this number is available. Any descriptive string is expected. + // + // Number of characters allowed is 100. + WhenAvailable string `protobuf:"bytes,4,opt,name=when_available,json=whenAvailable,proto3" json:"when_available,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Phone) Reset() { *m = Phone{} } +func (m *Phone) String() string { return proto.CompactTextString(m) } +func (*Phone) ProtoMessage() {} +func (*Phone) Descriptor() ([]byte, []int) { + return fileDescriptor_profile_1ba3c938ea40ebf9, []int{5} +} +func (m *Phone) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Phone.Unmarshal(m, b) +} +func (m *Phone) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Phone.Marshal(b, m, deterministic) +} +func (dst *Phone) XXX_Merge(src proto.Message) { + xxx_messageInfo_Phone.Merge(dst, src) +} +func (m *Phone) XXX_Size() int { + return xxx_messageInfo_Phone.Size(m) +} +func (m *Phone) XXX_DiscardUnknown() { + xxx_messageInfo_Phone.DiscardUnknown(m) +} + +var xxx_messageInfo_Phone proto.InternalMessageInfo + +func (m *Phone) GetUsage() ContactInfoUsage { + if m != nil { + return m.Usage + } + return ContactInfoUsage_CONTACT_INFO_USAGE_UNSPECIFIED +} + +func (m *Phone) GetType() Phone_PhoneType { + if m != nil { + return m.Type + } + return Phone_PHONE_TYPE_UNSPECIFIED +} + +func (m *Phone) GetNumber() string { + if m != nil { + return m.Number + } + return "" +} + +func (m *Phone) GetWhenAvailable() string { + if m != nil { + return m.WhenAvailable + } + return "" +} + +// Resource that represents a valid URI for a personal use. +type PersonalUri struct { + // Optional. + // + // The personal URI. + // + // Number of characters allowed is 4,000. + Uri string `protobuf:"bytes,1,opt,name=uri,proto3" json:"uri,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *PersonalUri) Reset() { *m = PersonalUri{} } +func (m *PersonalUri) String() string { return proto.CompactTextString(m) } +func (*PersonalUri) ProtoMessage() {} +func (*PersonalUri) Descriptor() ([]byte, []int) { + return fileDescriptor_profile_1ba3c938ea40ebf9, []int{6} +} +func (m *PersonalUri) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_PersonalUri.Unmarshal(m, b) +} +func (m *PersonalUri) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_PersonalUri.Marshal(b, m, deterministic) +} +func (dst *PersonalUri) XXX_Merge(src proto.Message) { + xxx_messageInfo_PersonalUri.Merge(dst, src) +} +func (m *PersonalUri) XXX_Size() int { + return xxx_messageInfo_PersonalUri.Size(m) +} +func (m *PersonalUri) XXX_DiscardUnknown() { + xxx_messageInfo_PersonalUri.DiscardUnknown(m) +} + +var xxx_messageInfo_PersonalUri proto.InternalMessageInfo + +func (m *PersonalUri) GetUri() string { + if m != nil { + return m.Uri + } + return "" +} + +// Resource that represents contact information other than phone, email, +// URI and addresses. +type AdditionalContactInfo struct { + // Optional. + // + // The usage of this contact method. For example, SCHOOL, WORK, PERSONAL. + Usage ContactInfoUsage `protobuf:"varint,1,opt,name=usage,proto3,enum=google.cloud.talent.v4beta1.ContactInfoUsage" json:"usage,omitempty"` + // Optional. + // + // The name of the contact method. + // + // For example, "hangout", "skype". + // + // Number of characters allowed is 100. + Name string `protobuf:"bytes,2,opt,name=name,proto3" json:"name,omitempty"` + // Optional. + // + // The contact id. + // + // Number of characters allowed is 100. + ContactId string `protobuf:"bytes,3,opt,name=contact_id,json=contactId,proto3" json:"contact_id,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *AdditionalContactInfo) Reset() { *m = AdditionalContactInfo{} } +func (m *AdditionalContactInfo) String() string { return proto.CompactTextString(m) } +func (*AdditionalContactInfo) ProtoMessage() {} +func (*AdditionalContactInfo) Descriptor() ([]byte, []int) { + return fileDescriptor_profile_1ba3c938ea40ebf9, []int{7} +} +func (m *AdditionalContactInfo) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_AdditionalContactInfo.Unmarshal(m, b) +} +func (m *AdditionalContactInfo) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_AdditionalContactInfo.Marshal(b, m, deterministic) +} +func (dst *AdditionalContactInfo) XXX_Merge(src proto.Message) { + xxx_messageInfo_AdditionalContactInfo.Merge(dst, src) +} +func (m *AdditionalContactInfo) XXX_Size() int { + return xxx_messageInfo_AdditionalContactInfo.Size(m) +} +func (m *AdditionalContactInfo) XXX_DiscardUnknown() { + xxx_messageInfo_AdditionalContactInfo.DiscardUnknown(m) +} + +var xxx_messageInfo_AdditionalContactInfo proto.InternalMessageInfo + +func (m *AdditionalContactInfo) GetUsage() ContactInfoUsage { + if m != nil { + return m.Usage + } + return ContactInfoUsage_CONTACT_INFO_USAGE_UNSPECIFIED +} + +func (m *AdditionalContactInfo) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *AdditionalContactInfo) GetContactId() string { + if m != nil { + return m.ContactId + } + return "" +} + +// Resource that represents an employment record of a candidate. +type EmploymentRecord struct { + // Optional. + // + // Start date of the employment. + StartDate *date.Date `protobuf:"bytes,1,opt,name=start_date,json=startDate,proto3" json:"start_date,omitempty"` + // Optional. + // + // End date of the employment. + EndDate *date.Date `protobuf:"bytes,2,opt,name=end_date,json=endDate,proto3" json:"end_date,omitempty"` + // Optional. + // + // The name of the employer company/organization. + // + // For example, "Google", "Alphabet", and so on. + // + // Number of characters allowed is 100. + EmployerName string `protobuf:"bytes,3,opt,name=employer_name,json=employerName,proto3" json:"employer_name,omitempty"` + // Optional. + // + // The division name of the employment. + // + // For example, division, department, client, and so on. + // + // Number of characters allowed is 100. + DivisionName string `protobuf:"bytes,4,opt,name=division_name,json=divisionName,proto3" json:"division_name,omitempty"` + // Optional. + // + // The physical address of the employer. + Address *Address `protobuf:"bytes,5,opt,name=address,proto3" json:"address,omitempty"` + // Optional. + // + // The job title of the employment. + // + // For example, "Software Engineer", "Data Scientist", and so on. + // + // Number of characters allowed is 100. + JobTitle string `protobuf:"bytes,6,opt,name=job_title,json=jobTitle,proto3" json:"job_title,omitempty"` + // Optional. + // + // The description of job content. + // + // Number of characters allowed is 100,000. + JobDescription string `protobuf:"bytes,7,opt,name=job_description,json=jobDescription,proto3" json:"job_description,omitempty"` + // Optional. + // + // If the jobs is a supervisor position. + IsSupervisor *wrappers.BoolValue `protobuf:"bytes,8,opt,name=is_supervisor,json=isSupervisor,proto3" json:"is_supervisor,omitempty"` + // Optional. + // + // If this employment is self-employed. + IsSelfEmployed *wrappers.BoolValue `protobuf:"bytes,9,opt,name=is_self_employed,json=isSelfEmployed,proto3" json:"is_self_employed,omitempty"` + // Optional. + // + // If this employment is current. + IsCurrent *wrappers.BoolValue `protobuf:"bytes,10,opt,name=is_current,json=isCurrent,proto3" json:"is_current,omitempty"` + // Output only. The job title snippet shows how the [job_title][google.cloud.talent.v4beta1.EmploymentRecord.job_title] is related + // to a search query. It's empty if the [job_title][google.cloud.talent.v4beta1.EmploymentRecord.job_title] isn't related to the + // search query. + JobTitleSnippet string `protobuf:"bytes,11,opt,name=job_title_snippet,json=jobTitleSnippet,proto3" json:"job_title_snippet,omitempty"` + // Output only. The job description snippet shows how the [job_description][google.cloud.talent.v4beta1.EmploymentRecord.job_description] + // is related to a search query. It's empty if the [job_description][google.cloud.talent.v4beta1.EmploymentRecord.job_description] isn't + // related to the search query. + JobDescriptionSnippet string `protobuf:"bytes,12,opt,name=job_description_snippet,json=jobDescriptionSnippet,proto3" json:"job_description_snippet,omitempty"` + // Output only. The employer name snippet shows how the [employer_name][google.cloud.talent.v4beta1.EmploymentRecord.employer_name] is + // related to a search query. It's empty if the [employer_name][google.cloud.talent.v4beta1.EmploymentRecord.employer_name] isn't + // related to the search query. + EmployerNameSnippet string `protobuf:"bytes,13,opt,name=employer_name_snippet,json=employerNameSnippet,proto3" json:"employer_name_snippet,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *EmploymentRecord) Reset() { *m = EmploymentRecord{} } +func (m *EmploymentRecord) String() string { return proto.CompactTextString(m) } +func (*EmploymentRecord) ProtoMessage() {} +func (*EmploymentRecord) Descriptor() ([]byte, []int) { + return fileDescriptor_profile_1ba3c938ea40ebf9, []int{8} +} +func (m *EmploymentRecord) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_EmploymentRecord.Unmarshal(m, b) +} +func (m *EmploymentRecord) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_EmploymentRecord.Marshal(b, m, deterministic) +} +func (dst *EmploymentRecord) XXX_Merge(src proto.Message) { + xxx_messageInfo_EmploymentRecord.Merge(dst, src) +} +func (m *EmploymentRecord) XXX_Size() int { + return xxx_messageInfo_EmploymentRecord.Size(m) +} +func (m *EmploymentRecord) XXX_DiscardUnknown() { + xxx_messageInfo_EmploymentRecord.DiscardUnknown(m) +} + +var xxx_messageInfo_EmploymentRecord proto.InternalMessageInfo + +func (m *EmploymentRecord) GetStartDate() *date.Date { + if m != nil { + return m.StartDate + } + return nil +} + +func (m *EmploymentRecord) GetEndDate() *date.Date { + if m != nil { + return m.EndDate + } + return nil +} + +func (m *EmploymentRecord) GetEmployerName() string { + if m != nil { + return m.EmployerName + } + return "" +} + +func (m *EmploymentRecord) GetDivisionName() string { + if m != nil { + return m.DivisionName + } + return "" +} + +func (m *EmploymentRecord) GetAddress() *Address { + if m != nil { + return m.Address + } + return nil +} + +func (m *EmploymentRecord) GetJobTitle() string { + if m != nil { + return m.JobTitle + } + return "" +} + +func (m *EmploymentRecord) GetJobDescription() string { + if m != nil { + return m.JobDescription + } + return "" +} + +func (m *EmploymentRecord) GetIsSupervisor() *wrappers.BoolValue { + if m != nil { + return m.IsSupervisor + } + return nil +} + +func (m *EmploymentRecord) GetIsSelfEmployed() *wrappers.BoolValue { + if m != nil { + return m.IsSelfEmployed + } + return nil +} + +func (m *EmploymentRecord) GetIsCurrent() *wrappers.BoolValue { + if m != nil { + return m.IsCurrent + } + return nil +} + +func (m *EmploymentRecord) GetJobTitleSnippet() string { + if m != nil { + return m.JobTitleSnippet + } + return "" +} + +func (m *EmploymentRecord) GetJobDescriptionSnippet() string { + if m != nil { + return m.JobDescriptionSnippet + } + return "" +} + +func (m *EmploymentRecord) GetEmployerNameSnippet() string { + if m != nil { + return m.EmployerNameSnippet + } + return "" +} + +// Resource that represents an education record of a candidate. +type EducationRecord struct { + // Optional. + // + // The start date of the education. + StartDate *date.Date `protobuf:"bytes,1,opt,name=start_date,json=startDate,proto3" json:"start_date,omitempty"` + // Optional. + // + // The end date of the education. + EndDate *date.Date `protobuf:"bytes,2,opt,name=end_date,json=endDate,proto3" json:"end_date,omitempty"` + // Optional. + // + // The expected graduation date if currently pursuing a degree. + ExpectedGraduationDate *date.Date `protobuf:"bytes,3,opt,name=expected_graduation_date,json=expectedGraduationDate,proto3" json:"expected_graduation_date,omitempty"` + // Optional. + // + // The name of the school or institution. + // + // For example, "Stanford University", "UC Berkeley", and so on. + // + // Number of characters allowed is 100. + SchoolName string `protobuf:"bytes,4,opt,name=school_name,json=schoolName,proto3" json:"school_name,omitempty"` + // Optional. + // + // The physical address of the education institution. + Address *Address `protobuf:"bytes,5,opt,name=address,proto3" json:"address,omitempty"` + // The degree information. It can be one of [degree_description][] or + // [structured_degree][]. + // + // Types that are valid to be assigned to Degree: + // *EducationRecord_DegreeDescription + // *EducationRecord_StructuredDegree + Degree isEducationRecord_Degree `protobuf_oneof:"degree"` + // Optional. + // + // The description of the education. + // + // Number of characters allowed is 100,000. + Description string `protobuf:"bytes,8,opt,name=description,proto3" json:"description,omitempty"` + // Optional. + // + // If this education is current. + IsCurrent *wrappers.BoolValue `protobuf:"bytes,9,opt,name=is_current,json=isCurrent,proto3" json:"is_current,omitempty"` + // Output only. The school name snippet shows how the [school_name][google.cloud.talent.v4beta1.EducationRecord.school_name] is + // related to a search query in search result. It's empty if the + // [school_name][google.cloud.talent.v4beta1.EducationRecord.school_name] isn't related to the search query. + SchoolNameSnippet string `protobuf:"bytes,10,opt,name=school_name_snippet,json=schoolNameSnippet,proto3" json:"school_name_snippet,omitempty"` + // Output only. The job description snippet shows how the [degree][google.cloud.talent.v4beta1.degree] is + // related to a search query in search result. It's empty if the [degree][google.cloud.talent.v4beta1.degree] + // isn't related to the search query. + DegreeSnippet string `protobuf:"bytes,11,opt,name=degree_snippet,json=degreeSnippet,proto3" json:"degree_snippet,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *EducationRecord) Reset() { *m = EducationRecord{} } +func (m *EducationRecord) String() string { return proto.CompactTextString(m) } +func (*EducationRecord) ProtoMessage() {} +func (*EducationRecord) Descriptor() ([]byte, []int) { + return fileDescriptor_profile_1ba3c938ea40ebf9, []int{9} +} +func (m *EducationRecord) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_EducationRecord.Unmarshal(m, b) +} +func (m *EducationRecord) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_EducationRecord.Marshal(b, m, deterministic) +} +func (dst *EducationRecord) XXX_Merge(src proto.Message) { + xxx_messageInfo_EducationRecord.Merge(dst, src) +} +func (m *EducationRecord) XXX_Size() int { + return xxx_messageInfo_EducationRecord.Size(m) +} +func (m *EducationRecord) XXX_DiscardUnknown() { + xxx_messageInfo_EducationRecord.DiscardUnknown(m) +} + +var xxx_messageInfo_EducationRecord proto.InternalMessageInfo + +func (m *EducationRecord) GetStartDate() *date.Date { + if m != nil { + return m.StartDate + } + return nil +} + +func (m *EducationRecord) GetEndDate() *date.Date { + if m != nil { + return m.EndDate + } + return nil +} + +func (m *EducationRecord) GetExpectedGraduationDate() *date.Date { + if m != nil { + return m.ExpectedGraduationDate + } + return nil +} + +func (m *EducationRecord) GetSchoolName() string { + if m != nil { + return m.SchoolName + } + return "" +} + +func (m *EducationRecord) GetAddress() *Address { + if m != nil { + return m.Address + } + return nil +} + +type isEducationRecord_Degree interface { + isEducationRecord_Degree() +} + +type EducationRecord_DegreeDescription struct { + DegreeDescription string `protobuf:"bytes,6,opt,name=degree_description,json=degreeDescription,proto3,oneof"` +} + +type EducationRecord_StructuredDegree struct { + StructuredDegree *Degree `protobuf:"bytes,7,opt,name=structured_degree,json=structuredDegree,proto3,oneof"` +} + +func (*EducationRecord_DegreeDescription) isEducationRecord_Degree() {} + +func (*EducationRecord_StructuredDegree) isEducationRecord_Degree() {} + +func (m *EducationRecord) GetDegree() isEducationRecord_Degree { + if m != nil { + return m.Degree + } + return nil +} + +func (m *EducationRecord) GetDegreeDescription() string { + if x, ok := m.GetDegree().(*EducationRecord_DegreeDescription); ok { + return x.DegreeDescription + } + return "" +} + +func (m *EducationRecord) GetStructuredDegree() *Degree { + if x, ok := m.GetDegree().(*EducationRecord_StructuredDegree); ok { + return x.StructuredDegree + } + return nil +} + +func (m *EducationRecord) GetDescription() string { + if m != nil { + return m.Description + } + return "" +} + +func (m *EducationRecord) GetIsCurrent() *wrappers.BoolValue { + if m != nil { + return m.IsCurrent + } + return nil +} + +func (m *EducationRecord) GetSchoolNameSnippet() string { + if m != nil { + return m.SchoolNameSnippet + } + return "" +} + +func (m *EducationRecord) GetDegreeSnippet() string { + if m != nil { + return m.DegreeSnippet + } + return "" +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*EducationRecord) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _EducationRecord_OneofMarshaler, _EducationRecord_OneofUnmarshaler, _EducationRecord_OneofSizer, []interface{}{ + (*EducationRecord_DegreeDescription)(nil), + (*EducationRecord_StructuredDegree)(nil), + } +} + +func _EducationRecord_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*EducationRecord) + // degree + switch x := m.Degree.(type) { + case *EducationRecord_DegreeDescription: + b.EncodeVarint(6<<3 | proto.WireBytes) + b.EncodeStringBytes(x.DegreeDescription) + case *EducationRecord_StructuredDegree: + b.EncodeVarint(7<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.StructuredDegree); err != nil { + return err + } + case nil: + default: + return fmt.Errorf("EducationRecord.Degree has unexpected type %T", x) + } + return nil +} + +func _EducationRecord_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*EducationRecord) + switch tag { + case 6: // degree.degree_description + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeStringBytes() + m.Degree = &EducationRecord_DegreeDescription{x} + return true, err + case 7: // degree.structured_degree + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(Degree) + err := b.DecodeMessage(msg) + m.Degree = &EducationRecord_StructuredDegree{msg} + return true, err + default: + return false, nil + } +} + +func _EducationRecord_OneofSizer(msg proto.Message) (n int) { + m := msg.(*EducationRecord) + // degree + switch x := m.Degree.(type) { + case *EducationRecord_DegreeDescription: + n += 1 // tag and wire + n += proto.SizeVarint(uint64(len(x.DegreeDescription))) + n += len(x.DegreeDescription) + case *EducationRecord_StructuredDegree: + s := proto.Size(x.StructuredDegree) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +// Resource that represents a degree pursuing or acquired by a candidate. +type Degree struct { + // Optional. + // + // ISCED degree type. + DegreeType DegreeType `protobuf:"varint,1,opt,name=degree_type,json=degreeType,proto3,enum=google.cloud.talent.v4beta1.DegreeType" json:"degree_type,omitempty"` + // Optional. + // + // Full Degree name. + // + // For example, "B.S.", "Master of Arts", and so on. + // + // Number of characters allowed is 100. + DegreeName string `protobuf:"bytes,2,opt,name=degree_name,json=degreeName,proto3" json:"degree_name,omitempty"` + // Optional. + // + // Fields of study for the degree. + // + // For example, "Computer science", "engineering". + // + // Number of characters allowed is 100. + FieldsOfStudy []string `protobuf:"bytes,3,rep,name=fields_of_study,json=fieldsOfStudy,proto3" json:"fields_of_study,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Degree) Reset() { *m = Degree{} } +func (m *Degree) String() string { return proto.CompactTextString(m) } +func (*Degree) ProtoMessage() {} +func (*Degree) Descriptor() ([]byte, []int) { + return fileDescriptor_profile_1ba3c938ea40ebf9, []int{10} +} +func (m *Degree) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Degree.Unmarshal(m, b) +} +func (m *Degree) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Degree.Marshal(b, m, deterministic) +} +func (dst *Degree) XXX_Merge(src proto.Message) { + xxx_messageInfo_Degree.Merge(dst, src) +} +func (m *Degree) XXX_Size() int { + return xxx_messageInfo_Degree.Size(m) +} +func (m *Degree) XXX_DiscardUnknown() { + xxx_messageInfo_Degree.DiscardUnknown(m) +} + +var xxx_messageInfo_Degree proto.InternalMessageInfo + +func (m *Degree) GetDegreeType() DegreeType { + if m != nil { + return m.DegreeType + } + return DegreeType_DEGREE_TYPE_UNSPECIFIED +} + +func (m *Degree) GetDegreeName() string { + if m != nil { + return m.DegreeName + } + return "" +} + +func (m *Degree) GetFieldsOfStudy() []string { + if m != nil { + return m.FieldsOfStudy + } + return nil +} + +// Resource that represents an individual or collaborative activity participated +// in by a candidate, for example, an open-source project, a class assignment, +// and so on. +type Activity struct { + // Optional. + // + // Activity display name. + // + // Number of characters allowed is 100. + DisplayName string `protobuf:"bytes,1,opt,name=display_name,json=displayName,proto3" json:"display_name,omitempty"` + // Optional. + // + // Activity description. + // + // Number of characters allowed is 100,000. + Description string `protobuf:"bytes,2,opt,name=description,proto3" json:"description,omitempty"` + // Optional. + // + // Activity URI. + // + // Number of characters allowed is 4,000. + Uri string `protobuf:"bytes,3,opt,name=uri,proto3" json:"uri,omitempty"` + // Optional. + // + // The first creation date of the activity. + CreateDate *date.Date `protobuf:"bytes,4,opt,name=create_date,json=createDate,proto3" json:"create_date,omitempty"` + // Optional. + // + // The last update date of the activity. + UpdateDate *date.Date `protobuf:"bytes,5,opt,name=update_date,json=updateDate,proto3" json:"update_date,omitempty"` + // Optional. + // + // A list of team members involved in this activity. + // + // Number of characters allowed is 100. + TeamMembers []string `protobuf:"bytes,6,rep,name=team_members,json=teamMembers,proto3" json:"team_members,omitempty"` + // Optional. + // + // A list of skills used in this activity. + SkillsUsed []*Skill `protobuf:"bytes,7,rep,name=skills_used,json=skillsUsed,proto3" json:"skills_used,omitempty"` + // Output only. Activity name snippet shows how the [display_name][google.cloud.talent.v4beta1.Activity.display_name] is + // related to a search query. It's empty if the [display_name][google.cloud.talent.v4beta1.Activity.display_name] isn't related + // to the search query. + ActivityNameSnippet string `protobuf:"bytes,8,opt,name=activity_name_snippet,json=activityNameSnippet,proto3" json:"activity_name_snippet,omitempty"` + // Output only. Activity description snippet shows how the + // [description][google.cloud.talent.v4beta1.Activity.description] is related to a search query. It's empty if the + // [description][google.cloud.talent.v4beta1.Activity.description] isn't related to the search query. + ActivityDescriptionSnippet string `protobuf:"bytes,9,opt,name=activity_description_snippet,json=activityDescriptionSnippet,proto3" json:"activity_description_snippet,omitempty"` + // Output only. Skill used snippet shows how the corresponding + // [skills_used][google.cloud.talent.v4beta1.Activity.skills_used] are related to a search query. It's empty if the + // corresponding [skills_used][google.cloud.talent.v4beta1.Activity.skills_used] are not related to the search query. + SkillsUsedSnippet []string `protobuf:"bytes,10,rep,name=skills_used_snippet,json=skillsUsedSnippet,proto3" json:"skills_used_snippet,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Activity) Reset() { *m = Activity{} } +func (m *Activity) String() string { return proto.CompactTextString(m) } +func (*Activity) ProtoMessage() {} +func (*Activity) Descriptor() ([]byte, []int) { + return fileDescriptor_profile_1ba3c938ea40ebf9, []int{11} +} +func (m *Activity) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Activity.Unmarshal(m, b) +} +func (m *Activity) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Activity.Marshal(b, m, deterministic) +} +func (dst *Activity) XXX_Merge(src proto.Message) { + xxx_messageInfo_Activity.Merge(dst, src) +} +func (m *Activity) XXX_Size() int { + return xxx_messageInfo_Activity.Size(m) +} +func (m *Activity) XXX_DiscardUnknown() { + xxx_messageInfo_Activity.DiscardUnknown(m) +} + +var xxx_messageInfo_Activity proto.InternalMessageInfo + +func (m *Activity) GetDisplayName() string { + if m != nil { + return m.DisplayName + } + return "" +} + +func (m *Activity) GetDescription() string { + if m != nil { + return m.Description + } + return "" +} + +func (m *Activity) GetUri() string { + if m != nil { + return m.Uri + } + return "" +} + +func (m *Activity) GetCreateDate() *date.Date { + if m != nil { + return m.CreateDate + } + return nil +} + +func (m *Activity) GetUpdateDate() *date.Date { + if m != nil { + return m.UpdateDate + } + return nil +} + +func (m *Activity) GetTeamMembers() []string { + if m != nil { + return m.TeamMembers + } + return nil +} + +func (m *Activity) GetSkillsUsed() []*Skill { + if m != nil { + return m.SkillsUsed + } + return nil +} + +func (m *Activity) GetActivityNameSnippet() string { + if m != nil { + return m.ActivityNameSnippet + } + return "" +} + +func (m *Activity) GetActivityDescriptionSnippet() string { + if m != nil { + return m.ActivityDescriptionSnippet + } + return "" +} + +func (m *Activity) GetSkillsUsedSnippet() []string { + if m != nil { + return m.SkillsUsedSnippet + } + return nil +} + +// Resource that represents a publication resource of a candidate. +type Publication struct { + // Optional. + // + // A list of author names. + // + // Number of characters allowed is 100. + Authors []string `protobuf:"bytes,1,rep,name=authors,proto3" json:"authors,omitempty"` + // Optional. + // + // The title of the publication. + // + // Number of characters allowed is 100. + Title string `protobuf:"bytes,2,opt,name=title,proto3" json:"title,omitempty"` + // Optional. + // + // The description of the publication. + // + // Number of characters allowed is 100,000. + Description string `protobuf:"bytes,3,opt,name=description,proto3" json:"description,omitempty"` + // Optional. + // + // The journal name of the publication. + // + // Number of characters allowed is 100. + Journal string `protobuf:"bytes,4,opt,name=journal,proto3" json:"journal,omitempty"` + // Optional. + // + // Volume number. + // + // Number of characters allowed is 100. + Volume string `protobuf:"bytes,5,opt,name=volume,proto3" json:"volume,omitempty"` + // Optional. + // + // The publisher of the journal. + // + // Number of characters allowed is 100. + Publisher string `protobuf:"bytes,6,opt,name=publisher,proto3" json:"publisher,omitempty"` + // Optional. + // + // The publication date. + PublicationDate *date.Date `protobuf:"bytes,7,opt,name=publication_date,json=publicationDate,proto3" json:"publication_date,omitempty"` + // Optional. + // + // The publication type. + // + // Number of characters allowed is 100. + PublicationType string `protobuf:"bytes,8,opt,name=publication_type,json=publicationType,proto3" json:"publication_type,omitempty"` + // Optional. + // + // ISBN number. + // + // Number of characters allowed is 100. + Isbn string `protobuf:"bytes,9,opt,name=isbn,proto3" json:"isbn,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Publication) Reset() { *m = Publication{} } +func (m *Publication) String() string { return proto.CompactTextString(m) } +func (*Publication) ProtoMessage() {} +func (*Publication) Descriptor() ([]byte, []int) { + return fileDescriptor_profile_1ba3c938ea40ebf9, []int{12} +} +func (m *Publication) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Publication.Unmarshal(m, b) +} +func (m *Publication) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Publication.Marshal(b, m, deterministic) +} +func (dst *Publication) XXX_Merge(src proto.Message) { + xxx_messageInfo_Publication.Merge(dst, src) +} +func (m *Publication) XXX_Size() int { + return xxx_messageInfo_Publication.Size(m) +} +func (m *Publication) XXX_DiscardUnknown() { + xxx_messageInfo_Publication.DiscardUnknown(m) +} + +var xxx_messageInfo_Publication proto.InternalMessageInfo + +func (m *Publication) GetAuthors() []string { + if m != nil { + return m.Authors + } + return nil +} + +func (m *Publication) GetTitle() string { + if m != nil { + return m.Title + } + return "" +} + +func (m *Publication) GetDescription() string { + if m != nil { + return m.Description + } + return "" +} + +func (m *Publication) GetJournal() string { + if m != nil { + return m.Journal + } + return "" +} + +func (m *Publication) GetVolume() string { + if m != nil { + return m.Volume + } + return "" +} + +func (m *Publication) GetPublisher() string { + if m != nil { + return m.Publisher + } + return "" +} + +func (m *Publication) GetPublicationDate() *date.Date { + if m != nil { + return m.PublicationDate + } + return nil +} + +func (m *Publication) GetPublicationType() string { + if m != nil { + return m.PublicationType + } + return "" +} + +func (m *Publication) GetIsbn() string { + if m != nil { + return m.Isbn + } + return "" +} + +// Resource that represents the patent acquired by a candidate. +type Patent struct { + // Optional. + // + // Name of the patent. + // + // Number of characters allowed is 100. + DisplayName string `protobuf:"bytes,1,opt,name=display_name,json=displayName,proto3" json:"display_name,omitempty"` + // Optional. + // + // A list of inventors' names. + // + // Number of characters allowed for each is 100. + Inventors []string `protobuf:"bytes,2,rep,name=inventors,proto3" json:"inventors,omitempty"` + // Optional. + // + // The status of the patent. + // + // Number of characters allowed is 100. + PatentStatus string `protobuf:"bytes,3,opt,name=patent_status,json=patentStatus,proto3" json:"patent_status,omitempty"` + // Optional. + // + // The date the last time the status of the patent was checked. + PatentStatusDate *date.Date `protobuf:"bytes,4,opt,name=patent_status_date,json=patentStatusDate,proto3" json:"patent_status_date,omitempty"` + // Optional. + // + // The date that the patent was filed. + PatentFilingDate *date.Date `protobuf:"bytes,5,opt,name=patent_filing_date,json=patentFilingDate,proto3" json:"patent_filing_date,omitempty"` + // Optional. + // + // The name of the patent office. + // + // Number of characters allowed is 100. + PatentOffice string `protobuf:"bytes,6,opt,name=patent_office,json=patentOffice,proto3" json:"patent_office,omitempty"` + // Optional. + // + // The number of the patent. + // + // Number of characters allowed is 100. + PatentNumber string `protobuf:"bytes,7,opt,name=patent_number,json=patentNumber,proto3" json:"patent_number,omitempty"` + // Optional. + // + // The description of the patent. + // + // Number of characters allowed is 100,000. + PatentDescription string `protobuf:"bytes,8,opt,name=patent_description,json=patentDescription,proto3" json:"patent_description,omitempty"` + // Optional. + // + // The skills used in this patent. + SkillsUsed []*Skill `protobuf:"bytes,9,rep,name=skills_used,json=skillsUsed,proto3" json:"skills_used,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Patent) Reset() { *m = Patent{} } +func (m *Patent) String() string { return proto.CompactTextString(m) } +func (*Patent) ProtoMessage() {} +func (*Patent) Descriptor() ([]byte, []int) { + return fileDescriptor_profile_1ba3c938ea40ebf9, []int{13} +} +func (m *Patent) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Patent.Unmarshal(m, b) +} +func (m *Patent) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Patent.Marshal(b, m, deterministic) +} +func (dst *Patent) XXX_Merge(src proto.Message) { + xxx_messageInfo_Patent.Merge(dst, src) +} +func (m *Patent) XXX_Size() int { + return xxx_messageInfo_Patent.Size(m) +} +func (m *Patent) XXX_DiscardUnknown() { + xxx_messageInfo_Patent.DiscardUnknown(m) +} + +var xxx_messageInfo_Patent proto.InternalMessageInfo + +func (m *Patent) GetDisplayName() string { + if m != nil { + return m.DisplayName + } + return "" +} + +func (m *Patent) GetInventors() []string { + if m != nil { + return m.Inventors + } + return nil +} + +func (m *Patent) GetPatentStatus() string { + if m != nil { + return m.PatentStatus + } + return "" +} + +func (m *Patent) GetPatentStatusDate() *date.Date { + if m != nil { + return m.PatentStatusDate + } + return nil +} + +func (m *Patent) GetPatentFilingDate() *date.Date { + if m != nil { + return m.PatentFilingDate + } + return nil +} + +func (m *Patent) GetPatentOffice() string { + if m != nil { + return m.PatentOffice + } + return "" +} + +func (m *Patent) GetPatentNumber() string { + if m != nil { + return m.PatentNumber + } + return "" +} + +func (m *Patent) GetPatentDescription() string { + if m != nil { + return m.PatentDescription + } + return "" +} + +func (m *Patent) GetSkillsUsed() []*Skill { + if m != nil { + return m.SkillsUsed + } + return nil +} + +func init() { + proto.RegisterType((*Profile)(nil), "google.cloud.talent.v4beta1.Profile") + proto.RegisterMapType((map[string]*CustomAttribute)(nil), "google.cloud.talent.v4beta1.Profile.CustomAttributesEntry") + proto.RegisterType((*Resume)(nil), "google.cloud.talent.v4beta1.Resume") + proto.RegisterType((*PersonName)(nil), "google.cloud.talent.v4beta1.PersonName") + proto.RegisterType((*PersonName_PersonStructuredName)(nil), "google.cloud.talent.v4beta1.PersonName.PersonStructuredName") + proto.RegisterType((*Address)(nil), "google.cloud.talent.v4beta1.Address") + proto.RegisterType((*Email)(nil), "google.cloud.talent.v4beta1.Email") + proto.RegisterType((*Phone)(nil), "google.cloud.talent.v4beta1.Phone") + proto.RegisterType((*PersonalUri)(nil), "google.cloud.talent.v4beta1.PersonalUri") + proto.RegisterType((*AdditionalContactInfo)(nil), "google.cloud.talent.v4beta1.AdditionalContactInfo") + proto.RegisterType((*EmploymentRecord)(nil), "google.cloud.talent.v4beta1.EmploymentRecord") + proto.RegisterType((*EducationRecord)(nil), "google.cloud.talent.v4beta1.EducationRecord") + proto.RegisterType((*Degree)(nil), "google.cloud.talent.v4beta1.Degree") + proto.RegisterType((*Activity)(nil), "google.cloud.talent.v4beta1.Activity") + proto.RegisterType((*Publication)(nil), "google.cloud.talent.v4beta1.Publication") + proto.RegisterType((*Patent)(nil), "google.cloud.talent.v4beta1.Patent") + proto.RegisterEnum("google.cloud.talent.v4beta1.Resume_ResumeType", Resume_ResumeType_name, Resume_ResumeType_value) + proto.RegisterEnum("google.cloud.talent.v4beta1.Phone_PhoneType", Phone_PhoneType_name, Phone_PhoneType_value) +} + +func init() { + proto.RegisterFile("google/cloud/talent/v4beta1/profile.proto", fileDescriptor_profile_1ba3c938ea40ebf9) +} + +var fileDescriptor_profile_1ba3c938ea40ebf9 = []byte{ + // 2288 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xb4, 0x59, 0xcd, 0x72, 0x1b, 0xb9, + 0xf1, 0x17, 0x49, 0x89, 0x1f, 0xcd, 0x0f, 0x91, 0xb0, 0x25, 0xcf, 0x5f, 0xf6, 0xfe, 0xad, 0xe5, + 0xc6, 0xb1, 0x76, 0xe3, 0xa5, 0x36, 0xda, 0x4d, 0x2a, 0xf1, 0x6e, 0xb2, 0xab, 0x0f, 0xda, 0x62, + 0xac, 0x0f, 0xd6, 0x48, 0x72, 0xad, 0x53, 0xa9, 0x9a, 0x02, 0x39, 0x20, 0x05, 0x79, 0x38, 0x33, + 0x3b, 0x98, 0x91, 0xad, 0x6b, 0x5e, 0x20, 0x95, 0x73, 0x72, 0x4a, 0xae, 0xa9, 0x5c, 0xf2, 0x28, + 0x39, 0xe4, 0xb2, 0xa7, 0xbc, 0x42, 0x5e, 0x20, 0x85, 0x06, 0x66, 0x38, 0xa4, 0x25, 0x8a, 0x49, + 0x39, 0x17, 0x69, 0xd0, 0xf8, 0xfd, 0x1a, 0x40, 0xa3, 0xd1, 0xe8, 0x06, 0xe1, 0xe3, 0xa1, 0xe7, + 0x0d, 0x1d, 0xb6, 0xd9, 0x77, 0xbc, 0xc8, 0xde, 0x0c, 0xa9, 0xc3, 0xdc, 0x70, 0xf3, 0xf2, 0x8b, + 0x1e, 0x0b, 0xe9, 0x8f, 0x37, 0xfd, 0xc0, 0x1b, 0x70, 0x87, 0xb5, 0xfc, 0xc0, 0x0b, 0x3d, 0x72, + 0x5f, 0x41, 0x5b, 0x08, 0x6d, 0x29, 0x68, 0x4b, 0x43, 0xd7, 0x1e, 0x68, 0x3d, 0xd4, 0xe7, 0x9b, + 0xd4, 0x75, 0xbd, 0x90, 0x86, 0xdc, 0x73, 0x85, 0xa2, 0xae, 0x6d, 0xcc, 0x1a, 0xa5, 0xef, 0x8d, + 0x46, 0x9e, 0xab, 0x91, 0x8f, 0x66, 0x21, 0x2f, 0xbc, 0x9e, 0x86, 0xfd, 0xbf, 0x86, 0x61, 0xab, + 0x17, 0x0d, 0x36, 0xed, 0x28, 0xc0, 0x11, 0x75, 0xff, 0xfa, 0x74, 0xff, 0x80, 0x33, 0xc7, 0xb6, + 0x46, 0x54, 0xbc, 0xd6, 0x88, 0x87, 0xd3, 0x88, 0x90, 0x8f, 0x98, 0x08, 0xe9, 0xc8, 0xbf, 0x69, + 0x88, 0x37, 0x01, 0xf5, 0x7d, 0x16, 0xc4, 0x6b, 0x5a, 0xd5, 0xfd, 0xe1, 0x95, 0xcf, 0x36, 0x6d, + 0x1a, 0xb2, 0xa9, 0xa1, 0x51, 0xee, 0x7b, 0x22, 0xa4, 0x8e, 0x45, 0x6d, 0x3b, 0x60, 0x42, 0x33, + 0x9b, 0xdf, 0x57, 0xa1, 0xd0, 0x55, 0xa6, 0x25, 0x04, 0x16, 0x5d, 0x3a, 0x62, 0x46, 0x66, 0x3d, + 0xb3, 0x51, 0x32, 0xf1, 0x9b, 0x3c, 0x84, 0x32, 0x7b, 0x1b, 0xb2, 0xc0, 0xa5, 0x8e, 0xc5, 0x6d, + 0x23, 0x8b, 0x5d, 0x10, 0x8b, 0x3a, 0x36, 0x59, 0x85, 0xbc, 0xf0, 0xa2, 0xa0, 0xcf, 0x8c, 0x1c, + 0xf6, 0xe9, 0x16, 0xa9, 0x43, 0x2e, 0x0a, 0xb8, 0xb1, 0x88, 0x42, 0xf9, 0x49, 0xfe, 0x0f, 0x8a, + 0xc3, 0xc0, 0x8b, 0x7c, 0xa9, 0x67, 0x09, 0xc5, 0x05, 0x6c, 0x77, 0x6c, 0xf2, 0x73, 0x00, 0x2e, + 0xac, 0x73, 0x1e, 0xd0, 0x9e, 0xc3, 0x8c, 0xfc, 0x7a, 0x66, 0xa3, 0xbc, 0xb5, 0xd6, 0xd2, 0x7b, + 0x1c, 0x2f, 0xba, 0xb5, 0xe3, 0x79, 0xce, 0x4b, 0xea, 0x44, 0xcc, 0x2c, 0x71, 0xb1, 0xaf, 0xc0, + 0xe4, 0x4b, 0x28, 0xf7, 0x03, 0x46, 0x43, 0x66, 0x49, 0xa3, 0x19, 0x85, 0x1b, 0xb8, 0xa7, 0xb1, + 0x45, 0x4d, 0x50, 0x70, 0x29, 0x90, 0xe4, 0xc8, 0xb7, 0x13, 0x72, 0xf1, 0x76, 0xb2, 0x82, 0x6b, + 0x72, 0x3e, 0x60, 0x22, 0x1a, 0x31, 0xe3, 0x27, 0xc8, 0xfb, 0xa8, 0x35, 0xc3, 0x29, 0x5b, 0x26, + 0x42, 0x4d, 0x4d, 0x21, 0xbf, 0x82, 0x8a, 0xdc, 0x3f, 0xcf, 0xb5, 0xa4, 0x99, 0x85, 0x51, 0x5e, + 0xcf, 0x6d, 0x94, 0xb7, 0x1e, 0xcf, 0x54, 0xd1, 0x45, 0xc2, 0x11, 0x1d, 0x31, 0xb3, 0xec, 0x27, + 0xdf, 0x82, 0xec, 0x40, 0x49, 0x6f, 0x2a, 0x13, 0x46, 0x05, 0x15, 0xfd, 0x60, 0xa6, 0xa2, 0x6d, + 0x85, 0x36, 0xc7, 0x34, 0xf2, 0x02, 0x96, 0xd9, 0x88, 0xf2, 0xc4, 0x3d, 0x98, 0x30, 0xaa, 0xa8, + 0xa9, 0x39, 0x53, 0x53, 0x5b, 0x72, 0xcc, 0x1a, 0x52, 0xb7, 0x13, 0x65, 0xcf, 0xa1, 0xea, 0x9f, + 0x7b, 0x2e, 0xb3, 0xdc, 0x68, 0xd4, 0x63, 0x81, 0x30, 0x6a, 0x73, 0xa8, 0xea, 0x4a, 0x86, 0x59, + 0x41, 0xe2, 0x91, 0xe2, 0x91, 0x43, 0xa8, 0xaa, 0x85, 0x52, 0xc7, 0x8a, 0x02, 0x2e, 0x8c, 0x65, + 0x54, 0xb4, 0x31, 0x87, 0x99, 0xa8, 0x73, 0x16, 0x70, 0xb3, 0xe2, 0x8f, 0x1b, 0x82, 0x5c, 0xc0, + 0x3d, 0x6a, 0xdb, 0x5c, 0x9e, 0x4d, 0xea, 0x58, 0x7d, 0xcf, 0x0d, 0x69, 0x3f, 0xb4, 0xb8, 0x3b, + 0xf0, 0x8c, 0x3a, 0x2a, 0xde, 0xba, 0xcd, 0x6c, 0x9a, 0xbb, 0xab, 0xa8, 0x1d, 0x77, 0xe0, 0x99, + 0x2b, 0xf4, 0x3a, 0x31, 0xf9, 0x0d, 0x10, 0x36, 0xf2, 0x1d, 0xef, 0x6a, 0xc4, 0xdc, 0xd0, 0x0a, + 0x58, 0xdf, 0x0b, 0x6c, 0x61, 0x34, 0x70, 0x98, 0x4f, 0x6f, 0xb1, 0x69, 0x4c, 0x33, 0x91, 0x65, + 0x36, 0xd8, 0x94, 0x44, 0x90, 0x57, 0xd0, 0x60, 0x76, 0xd4, 0xc7, 0x30, 0x93, 0x28, 0x27, 0xa8, + 0xfc, 0xc9, 0x6c, 0xe5, 0x31, 0x4b, 0xeb, 0xae, 0xb3, 0x49, 0x81, 0x20, 0x4f, 0x21, 0x2f, 0x5e, + 0x73, 0xc7, 0x11, 0xc6, 0x9d, 0x39, 0x76, 0xed, 0x44, 0x42, 0x4d, 0xcd, 0x20, 0x6d, 0x00, 0xda, + 0x0f, 0xf9, 0x25, 0x0f, 0x39, 0x13, 0xc6, 0x5d, 0xe4, 0x3f, 0x9a, 0x6d, 0x53, 0x05, 0xbf, 0x32, + 0x53, 0x44, 0x72, 0x00, 0x15, 0x3f, 0xea, 0x39, 0x5c, 0x4d, 0x4c, 0x18, 0x2b, 0xf3, 0xec, 0xfa, + 0x98, 0x60, 0x4e, 0xb0, 0xc9, 0x2f, 0xa0, 0xe0, 0xd3, 0x90, 0xb9, 0xa1, 0x30, 0x56, 0x51, 0xd1, + 0xec, 0x83, 0xda, 0x45, 0xac, 0x19, 0x73, 0x88, 0x09, 0xb5, 0x3e, 0x0b, 0x42, 0x3e, 0x48, 0xa6, + 0x73, 0x0f, 0xb5, 0x7c, 0x32, 0x53, 0xcb, 0x6e, 0x9a, 0x62, 0x4e, 0x69, 0x20, 0x4d, 0xa8, 0x50, + 0xdf, 0x1f, 0x2f, 0x70, 0x73, 0x3d, 0xb7, 0x51, 0x32, 0x27, 0x64, 0x64, 0x1d, 0xca, 0x54, 0x08, + 0x3e, 0x74, 0x47, 0x38, 0xf5, 0xcf, 0x10, 0x92, 0x16, 0x91, 0x21, 0x34, 0xfa, 0x91, 0x08, 0xbd, + 0x91, 0x45, 0xc3, 0x30, 0xe0, 0xbd, 0x28, 0x64, 0xc2, 0x58, 0xc3, 0xc9, 0x3d, 0x9d, 0xbd, 0x44, + 0x7d, 0x97, 0xee, 0x22, 0x7b, 0x3b, 0x21, 0xb7, 0xdd, 0x30, 0xb8, 0x32, 0xeb, 0xfd, 0x29, 0x31, + 0x79, 0x00, 0x25, 0x3f, 0xf0, 0xfa, 0xf2, 0x70, 0xdb, 0xc6, 0xfd, 0xf5, 0xcc, 0x46, 0xd1, 0x1c, + 0x0b, 0xc8, 0x63, 0x58, 0x7e, 0xcd, 0xae, 0xde, 0x78, 0x81, 0x6d, 0x09, 0x97, 0xfb, 0x3e, 0x0b, + 0x8d, 0x07, 0x18, 0xde, 0x6b, 0x5a, 0x7c, 0xa2, 0xa4, 0x6b, 0xdf, 0xc1, 0xca, 0xb5, 0x23, 0xca, + 0xbb, 0xe2, 0x35, 0xbb, 0xd2, 0xf7, 0x8e, 0xfc, 0x24, 0x3b, 0xb0, 0x74, 0x29, 0x23, 0x3d, 0x5e, + 0x38, 0xb7, 0xf9, 0xf4, 0x94, 0x52, 0x53, 0x51, 0x9f, 0x66, 0x7f, 0x96, 0x69, 0xfe, 0x3d, 0x03, + 0x79, 0x15, 0x79, 0xc9, 0x8f, 0xa0, 0x21, 0xc2, 0x20, 0xea, 0x87, 0x51, 0xc0, 0x6c, 0x4b, 0x47, + 0x6e, 0x35, 0x64, 0x7d, 0xdc, 0xa1, 0xc1, 0xc7, 0x50, 0x56, 0x08, 0x4b, 0x5e, 0x9d, 0x38, 0x8b, + 0xda, 0x56, 0x6b, 0x8e, 0x00, 0xaf, 0xff, 0x9d, 0x5e, 0xf9, 0xcc, 0x84, 0x20, 0xf9, 0x6e, 0xbe, + 0x00, 0x18, 0xf7, 0x90, 0xfb, 0x70, 0xcf, 0x6c, 0x9f, 0x9c, 0x1d, 0xb6, 0xad, 0xd3, 0x57, 0xdd, + 0xb6, 0x75, 0x76, 0x74, 0xd2, 0x6d, 0xef, 0x76, 0x9e, 0x75, 0xda, 0x7b, 0xf5, 0x05, 0x52, 0x82, + 0xa5, 0x7d, 0xf3, 0xdb, 0xc3, 0x83, 0x7a, 0x86, 0xac, 0x40, 0xe3, 0xf8, 0x74, 0xbf, 0x6d, 0x5a, + 0x29, 0x74, 0x3d, 0xdb, 0xfc, 0x5b, 0x0e, 0x60, 0x7c, 0x19, 0x90, 0xc7, 0x50, 0x1b, 0x78, 0xc1, + 0x88, 0x86, 0x21, 0xb3, 0xad, 0xf1, 0x0d, 0xbe, 0xbf, 0x60, 0x56, 0x13, 0x39, 0x02, 0x87, 0xb0, + 0x9c, 0x32, 0x01, 0x22, 0x95, 0x7d, 0xbf, 0x9a, 0xf3, 0xde, 0xd1, 0x9f, 0x27, 0x89, 0x12, 0x29, + 0xdc, 0x5f, 0x30, 0x6b, 0x62, 0x42, 0x42, 0x1e, 0x41, 0xcd, 0x0f, 0xd8, 0x80, 0x05, 0xc9, 0x38, + 0x2a, 0x39, 0xa8, 0x26, 0x52, 0x09, 0x5b, 0xfb, 0x3e, 0x03, 0x77, 0xaf, 0xd3, 0x48, 0x3e, 0x00, + 0x18, 0xf2, 0x4b, 0xe6, 0xa6, 0x56, 0x63, 0x96, 0x50, 0x72, 0x83, 0xfa, 0xfc, 0x35, 0xea, 0x25, + 0x6c, 0xc4, 0x6d, 0xdb, 0x61, 0x16, 0x77, 0x79, 0xc8, 0xa9, 0xa3, 0xd3, 0x97, 0xaa, 0x92, 0x76, + 0x94, 0x50, 0xa6, 0x38, 0x03, 0x3a, 0xe2, 0xce, 0x55, 0x7a, 0xa6, 0xa0, 0x44, 0xa8, 0x67, 0x0d, + 0x8a, 0x22, 0x1a, 0x0c, 0xf8, 0x5b, 0x26, 0x8c, 0x45, 0x3c, 0x86, 0x49, 0x5b, 0xf6, 0xc9, 0x41, + 0xb1, 0x6f, 0x49, 0xf5, 0xc5, 0xed, 0x9d, 0x2a, 0x94, 0x53, 0x77, 0x7c, 0xf3, 0xf7, 0x59, 0x28, + 0xe8, 0x3b, 0x92, 0xec, 0xc2, 0x52, 0x24, 0xe8, 0x50, 0xad, 0xad, 0x76, 0xcb, 0x85, 0x90, 0xba, + 0x56, 0xce, 0x24, 0xc9, 0x54, 0x5c, 0xf2, 0x39, 0xdc, 0x8d, 0xdc, 0xd4, 0x86, 0xea, 0xab, 0x5b, + 0xad, 0x72, 0x7f, 0xc1, 0xbc, 0x93, 0xee, 0x8d, 0x47, 0x7e, 0x01, 0xe4, 0x1a, 0x4a, 0x6e, 0x32, + 0xf3, 0x91, 0x4e, 0xdf, 0xea, 0x62, 0xbe, 0xa8, 0x79, 0xfb, 0x0b, 0x66, 0xe3, 0x5d, 0x65, 0x5f, + 0x40, 0xa1, 0x1f, 0x05, 0x01, 0x73, 0x43, 0x4c, 0xf4, 0x66, 0x27, 0x6d, 0x31, 0x74, 0xa7, 0x04, + 0x05, 0x3d, 0x6e, 0xf3, 0x3b, 0x58, 0xc2, 0x14, 0xe2, 0xfd, 0x18, 0xe4, 0x23, 0xa8, 0x4e, 0x24, + 0x31, 0x7a, 0xbf, 0x2b, 0xe9, 0xf4, 0xa4, 0xf9, 0xaf, 0x2c, 0x2c, 0x61, 0xae, 0xf1, 0x7e, 0xc6, + 0xfc, 0x06, 0x16, 0x53, 0x21, 0xe2, 0xc9, 0xed, 0x29, 0x8e, 0xfa, 0x8b, 0x01, 0x02, 0x99, 0x32, + 0x83, 0x56, 0x79, 0x52, 0x9c, 0x41, 0xab, 0x96, 0x74, 0xdf, 0x37, 0xe7, 0xcc, 0xb5, 0xe8, 0x25, + 0xe5, 0x0e, 0x26, 0xc6, 0x2a, 0x99, 0xae, 0x4a, 0xe9, 0x76, 0x2c, 0x6c, 0xfe, 0x39, 0x03, 0xa5, + 0x44, 0x25, 0x59, 0x83, 0xd5, 0xee, 0xfe, 0xf1, 0xd1, 0xb5, 0x81, 0xa5, 0x02, 0xc5, 0x83, 0xed, + 0xa3, 0xbd, 0x83, 0xce, 0x51, 0xbb, 0x9e, 0x21, 0x00, 0xf9, 0xc3, 0xe3, 0x9d, 0xce, 0x41, 0xbb, + 0x9e, 0x25, 0x05, 0xc8, 0x3d, 0xdb, 0xfe, 0xb6, 0x9e, 0x93, 0xb1, 0xa7, 0xbb, 0xfd, 0xbc, 0x6d, + 0xd6, 0x17, 0x49, 0x0d, 0xe0, 0xf4, 0xf4, 0x95, 0x75, 0x6c, 0x5a, 0xa7, 0x7b, 0x7b, 0xf5, 0x25, + 0x52, 0x85, 0xd2, 0xcb, 0xe3, 0xce, 0x6e, 0xfb, 0x70, 0xbb, 0x73, 0x50, 0xcf, 0x93, 0x32, 0x14, + 0x5e, 0x76, 0xcc, 0xd3, 0xb3, 0xed, 0x83, 0x7a, 0x81, 0x14, 0x61, 0xf1, 0xe5, 0x71, 0xa7, 0x5b, + 0x2f, 0x92, 0x55, 0x20, 0x4a, 0xab, 0x24, 0x26, 0xa3, 0x95, 0x9a, 0x0f, 0xa1, 0x9c, 0xca, 0xcb, + 0xe2, 0xea, 0x20, 0x93, 0x54, 0x07, 0xcd, 0xdf, 0x65, 0x60, 0xe5, 0xda, 0x04, 0xeb, 0xfd, 0x6c, + 0x53, 0x5c, 0xdb, 0x64, 0x53, 0xb5, 0xcd, 0x07, 0x00, 0x49, 0x0e, 0x68, 0x6b, 0xe3, 0x97, 0xb4, + 0xa4, 0x63, 0x37, 0xff, 0xb8, 0x04, 0xf5, 0xe9, 0x5c, 0x8c, 0x7c, 0x06, 0x20, 0x42, 0x1a, 0x84, + 0x96, 0x2c, 0x03, 0x70, 0x46, 0xe5, 0xad, 0xc6, 0xc4, 0xb1, 0xd9, 0xa3, 0x21, 0x33, 0x4b, 0x08, + 0x92, 0x9f, 0xe4, 0x09, 0x14, 0x99, 0x6b, 0x2b, 0x7c, 0xf6, 0x26, 0x7c, 0x81, 0xb9, 0x36, 0xa2, + 0xd1, 0x85, 0xe5, 0x98, 0x2c, 0x48, 0x87, 0xa3, 0x4a, 0x2c, 0xc4, 0x80, 0xf4, 0x11, 0x54, 0x6d, + 0x7e, 0xc9, 0x05, 0xd7, 0xa1, 0x45, 0x3b, 0x46, 0x25, 0x16, 0x22, 0xe8, 0x97, 0xc9, 0x29, 0xc3, + 0x6a, 0x6b, 0xde, 0x9a, 0x20, 0x26, 0x91, 0xfb, 0x50, 0xba, 0xf0, 0x7a, 0x56, 0xc8, 0x43, 0x27, + 0x8e, 0xaf, 0xc5, 0x0b, 0xaf, 0x77, 0x2a, 0xdb, 0xf2, 0xce, 0x97, 0x9d, 0x36, 0x13, 0xfd, 0x80, + 0xfb, 0x72, 0xcf, 0xb0, 0xf2, 0x2a, 0x99, 0xb5, 0x0b, 0xaf, 0xb7, 0x37, 0x96, 0x92, 0xaf, 0xa1, + 0xca, 0x85, 0x25, 0x22, 0x9f, 0x05, 0x97, 0x5c, 0x78, 0xc1, 0x8d, 0x35, 0xd6, 0x38, 0x4e, 0x54, + 0xb8, 0x38, 0x49, 0xf0, 0x64, 0x0f, 0xea, 0x52, 0x01, 0x73, 0x06, 0x96, 0xb6, 0x81, 0x6d, 0x94, + 0x6e, 0xd5, 0x51, 0xe3, 0xe2, 0x84, 0x39, 0x83, 0xb6, 0x66, 0xe8, 0x02, 0x33, 0x8e, 0x55, 0x30, + 0x4f, 0x81, 0xb9, 0xab, 0xc0, 0xe4, 0x13, 0x68, 0x24, 0x76, 0x48, 0x12, 0x9c, 0x32, 0x2e, 0x76, + 0x39, 0xb6, 0x87, 0xce, 0x70, 0xc8, 0x4f, 0xe1, 0xde, 0x94, 0x59, 0x12, 0x46, 0x05, 0x19, 0x2b, + 0x93, 0xe6, 0x89, 0x79, 0x5b, 0xb0, 0x32, 0xb1, 0xeb, 0x09, 0xab, 0x8a, 0xac, 0x3b, 0xe9, 0xdd, + 0xd7, 0x9c, 0xe6, 0x3f, 0x17, 0x61, 0x79, 0x2a, 0x9b, 0xff, 0x9f, 0x7b, 0xe7, 0x0b, 0x30, 0xd8, + 0x5b, 0x9f, 0xf5, 0x65, 0xa2, 0x31, 0x0c, 0xa8, 0x1d, 0xa9, 0x02, 0x04, 0xd9, 0xb9, 0x9b, 0xd8, + 0xab, 0x31, 0xe5, 0x79, 0xc2, 0x40, 0x65, 0x0f, 0xa1, 0x2c, 0xfa, 0xe7, 0x9e, 0xe7, 0xa4, 0x7d, + 0x18, 0x94, 0xe8, 0xbd, 0x78, 0xf0, 0x26, 0x10, 0x9b, 0x0d, 0x03, 0xc6, 0x26, 0xfc, 0x34, 0xaf, + 0x6f, 0xc7, 0x86, 0xea, 0x4b, 0x3b, 0xab, 0x39, 0x91, 0x22, 0xaa, 0x7e, 0xfd, 0xa2, 0x30, 0xbb, + 0x66, 0xd8, 0x43, 0xe8, 0xfe, 0x42, 0x3a, 0x93, 0x54, 0x32, 0x99, 0xc6, 0xa7, 0x47, 0x2f, 0xe2, + 0x2a, 0xd3, 0xa2, 0x29, 0xdf, 0x2c, 0xfd, 0x27, 0xbe, 0xd9, 0x82, 0x3b, 0x29, 0x13, 0x26, 0x5e, + 0x03, 0x38, 0x48, 0x63, 0x6c, 0xca, 0xd8, 0xcf, 0x1e, 0x41, 0x4d, 0x5b, 0x64, 0xd2, 0x91, 0xab, + 0x4a, 0xaa, 0x61, 0x3b, 0x45, 0xc8, 0x2b, 0x41, 0xf3, 0x0f, 0x19, 0xc8, 0xeb, 0x85, 0xec, 0xcb, + 0x85, 0x20, 0x17, 0xef, 0x3b, 0x15, 0x8c, 0x1f, 0xcf, 0x61, 0x16, 0x95, 0x0b, 0xdb, 0xc9, 0xb7, + 0xdc, 0x78, 0xad, 0x29, 0x15, 0x92, 0x35, 0x00, 0x37, 0xfe, 0x87, 0xb0, 0x8c, 0x6f, 0x64, 0xc2, + 0xf2, 0x06, 0x96, 0x08, 0x23, 0xfb, 0xca, 0xc8, 0x61, 0x6e, 0x55, 0x55, 0xe2, 0xe3, 0xc1, 0x89, + 0x14, 0x36, 0xff, 0x91, 0x83, 0x62, 0x5c, 0x40, 0x92, 0x0f, 0xa1, 0x62, 0x73, 0xe1, 0x3b, 0xf4, + 0x2a, 0x9d, 0x35, 0x96, 0xb5, 0x0c, 0xf5, 0x4e, 0xed, 0x45, 0xf6, 0xdd, 0xbd, 0xd0, 0xf7, 0x52, + 0x6e, 0xfc, 0x6a, 0xb5, 0x95, 0xbc, 0x2f, 0xa1, 0x97, 0x2f, 0xde, 0xe4, 0xe5, 0xfa, 0x59, 0x09, + 0x3d, 0x7b, 0x2b, 0x79, 0x56, 0x42, 0xce, 0xd2, 0x8d, 0x1c, 0x85, 0x42, 0xce, 0x87, 0x50, 0x09, + 0x19, 0x1d, 0x59, 0x23, 0xa6, 0x9e, 0x4c, 0xf2, 0xaa, 0xde, 0x93, 0xb2, 0x43, 0x25, 0x22, 0xbb, + 0x50, 0x56, 0x75, 0xb6, 0x15, 0xc9, 0x42, 0xac, 0x30, 0x77, 0x79, 0x0e, 0x8a, 0x76, 0x26, 0xab, + 0xb5, 0x2d, 0x58, 0xd1, 0x95, 0xf6, 0xd5, 0xa4, 0xd3, 0x28, 0xcf, 0xbc, 0x13, 0x77, 0xa6, 0xdd, + 0xe6, 0x1b, 0x78, 0x90, 0x70, 0xae, 0x8b, 0x6d, 0x25, 0xa4, 0xae, 0xc5, 0x98, 0x6b, 0x02, 0x9c, + 0x74, 0xd4, 0xf1, 0xd4, 0x53, 0x8e, 0x9a, 0x43, 0x47, 0x4d, 0xa6, 0x17, 0x07, 0xb7, 0xbf, 0x64, + 0xa1, 0x9c, 0xaa, 0xe8, 0x89, 0x01, 0x05, 0x1a, 0x85, 0xe7, 0x5e, 0x20, 0x8c, 0x0c, 0x72, 0xe2, + 0x26, 0xb9, 0x0b, 0x4b, 0xea, 0x8a, 0x52, 0xbb, 0xa9, 0x1a, 0xd3, 0x3b, 0x9d, 0x7b, 0x77, 0xa7, + 0x0d, 0x28, 0x5c, 0x78, 0x51, 0xe0, 0x52, 0x47, 0x47, 0x9e, 0xb8, 0x29, 0xf3, 0xb1, 0x4b, 0xcf, + 0x91, 0xd5, 0xa1, 0x7a, 0xa5, 0xd4, 0x2d, 0xac, 0x82, 0xe5, 0x94, 0xc4, 0x39, 0x0b, 0xf4, 0x85, + 0x38, 0x16, 0x90, 0xaf, 0xa0, 0x9e, 0x7a, 0x75, 0x50, 0x1b, 0x5f, 0xb8, 0x69, 0xe3, 0x97, 0x53, + 0x50, 0xdc, 0xfd, 0x8f, 0x27, 0xd9, 0x78, 0xc2, 0xd4, 0x86, 0xa4, 0xa1, 0x78, 0x7a, 0x08, 0x2c, + 0x72, 0xd1, 0x73, 0xb5, 0xd1, 0xf1, 0xbb, 0xf9, 0xd7, 0x1c, 0xe4, 0xd5, 0xbb, 0xc5, 0x3c, 0xc7, + 0xe0, 0x01, 0x94, 0xb8, 0x7b, 0xc9, 0xdc, 0x50, 0x9a, 0x33, 0x8b, 0xe6, 0x1c, 0x0b, 0x64, 0x72, + 0xa1, 0x9e, 0x3e, 0x2c, 0x11, 0xd2, 0x30, 0x12, 0x71, 0x06, 0xa2, 0x84, 0x27, 0x28, 0x23, 0x5f, + 0x03, 0x99, 0x00, 0xdd, 0x72, 0x38, 0xea, 0x69, 0x32, 0x2e, 0x78, 0xac, 0x60, 0xc0, 0x1d, 0xee, + 0x0e, 0x6f, 0x39, 0x29, 0x5a, 0xc1, 0x33, 0xc4, 0xc6, 0x89, 0x92, 0x56, 0xe0, 0x0d, 0x06, 0xbc, + 0x1f, 0xa7, 0x28, 0x7a, 0x9a, 0xc7, 0x28, 0x4b, 0x81, 0x74, 0x86, 0x5d, 0x48, 0x83, 0xd4, 0x2b, + 0x23, 0xf9, 0x34, 0x99, 0xca, 0xbb, 0x81, 0xba, 0xa1, 0x7a, 0xd2, 0x97, 0xc4, 0xd4, 0x29, 0x2c, + 0xfd, 0x37, 0xa7, 0x70, 0xe7, 0xb7, 0x19, 0x78, 0xd8, 0xf7, 0x46, 0xb3, 0x58, 0x3b, 0x77, 0xf5, + 0x33, 0x8d, 0xc9, 0xd4, 0x93, 0x7a, 0x57, 0x5e, 0x05, 0xdd, 0xcc, 0xaf, 0xb7, 0x35, 0x69, 0xe8, + 0x39, 0xd4, 0x1d, 0xb6, 0xbc, 0x60, 0xb8, 0x39, 0x64, 0x2e, 0x5e, 0x14, 0x9b, 0xaa, 0x8b, 0xfa, + 0x5c, 0x5c, 0xfb, 0xab, 0xc5, 0x97, 0xaa, 0xf9, 0xa7, 0x6c, 0x6e, 0xf7, 0xf4, 0xa4, 0x97, 0x47, + 0xce, 0xe7, 0xff, 0x0e, 0x00, 0x00, 0xff, 0xff, 0xbe, 0x13, 0x82, 0x47, 0x78, 0x19, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/cloud/talent/v4beta1/profile_service.pb.go b/vendor/google.golang.org/genproto/googleapis/cloud/talent/v4beta1/profile_service.pb.go new file mode 100644 index 0000000..5689f40 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/cloud/talent/v4beta1/profile_service.pb.go @@ -0,0 +1,1156 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/cloud/talent/v4beta1/profile_service.proto + +package talent // import "google.golang.org/genproto/googleapis/cloud/talent/v4beta1" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import empty "github.com/golang/protobuf/ptypes/empty" +import _ "google.golang.org/genproto/googleapis/api/annotations" +import field_mask "google.golang.org/genproto/protobuf/field_mask" + +import ( + context "golang.org/x/net/context" + grpc "google.golang.org/grpc" +) + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// List profiles request. +type ListProfilesRequest struct { + // Required. + // + // The resource name of the tenant under which the job is created. + // + // The format is "projects/{project_id}/tenants/{tenant_id}", for example, + // "projects/api-test-project/tenants/foo". + Parent string `protobuf:"bytes,1,opt,name=parent,proto3" json:"parent,omitempty"` + // Optional. + // + // The token that specifies the current offset (that is, starting result). + // + // Please set the value to [ListProfilesResponse.next_page_token][google.cloud.talent.v4beta1.ListProfilesResponse.next_page_token] to + // continue the list. + PageToken string `protobuf:"bytes,2,opt,name=page_token,json=pageToken,proto3" json:"page_token,omitempty"` + // Optional. + // + // The maximum number of profiles to be returned, at most 100. + // + // Default is 100 unless a positive number smaller than 100 is specified. + PageSize int32 `protobuf:"varint,3,opt,name=page_size,json=pageSize,proto3" json:"page_size,omitempty"` + // Optional. + // + // A field mask to specify the profile fields to be listed in response. + // All fields are listed if it is unset. + // + // Valid values are: + // + // * name + ReadMask *field_mask.FieldMask `protobuf:"bytes,4,opt,name=read_mask,json=readMask,proto3" json:"read_mask,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ListProfilesRequest) Reset() { *m = ListProfilesRequest{} } +func (m *ListProfilesRequest) String() string { return proto.CompactTextString(m) } +func (*ListProfilesRequest) ProtoMessage() {} +func (*ListProfilesRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_profile_service_7ea8377f2d7ce92e, []int{0} +} +func (m *ListProfilesRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ListProfilesRequest.Unmarshal(m, b) +} +func (m *ListProfilesRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ListProfilesRequest.Marshal(b, m, deterministic) +} +func (dst *ListProfilesRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_ListProfilesRequest.Merge(dst, src) +} +func (m *ListProfilesRequest) XXX_Size() int { + return xxx_messageInfo_ListProfilesRequest.Size(m) +} +func (m *ListProfilesRequest) XXX_DiscardUnknown() { + xxx_messageInfo_ListProfilesRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_ListProfilesRequest proto.InternalMessageInfo + +func (m *ListProfilesRequest) GetParent() string { + if m != nil { + return m.Parent + } + return "" +} + +func (m *ListProfilesRequest) GetPageToken() string { + if m != nil { + return m.PageToken + } + return "" +} + +func (m *ListProfilesRequest) GetPageSize() int32 { + if m != nil { + return m.PageSize + } + return 0 +} + +func (m *ListProfilesRequest) GetReadMask() *field_mask.FieldMask { + if m != nil { + return m.ReadMask + } + return nil +} + +// The List profiles response object. +type ListProfilesResponse struct { + // Profiles for the specific tenant. + Profiles []*Profile `protobuf:"bytes,1,rep,name=profiles,proto3" json:"profiles,omitempty"` + // A token to retrieve the next page of results. This is empty if there are no + // more results. + NextPageToken string `protobuf:"bytes,2,opt,name=next_page_token,json=nextPageToken,proto3" json:"next_page_token,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ListProfilesResponse) Reset() { *m = ListProfilesResponse{} } +func (m *ListProfilesResponse) String() string { return proto.CompactTextString(m) } +func (*ListProfilesResponse) ProtoMessage() {} +func (*ListProfilesResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_profile_service_7ea8377f2d7ce92e, []int{1} +} +func (m *ListProfilesResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ListProfilesResponse.Unmarshal(m, b) +} +func (m *ListProfilesResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ListProfilesResponse.Marshal(b, m, deterministic) +} +func (dst *ListProfilesResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_ListProfilesResponse.Merge(dst, src) +} +func (m *ListProfilesResponse) XXX_Size() int { + return xxx_messageInfo_ListProfilesResponse.Size(m) +} +func (m *ListProfilesResponse) XXX_DiscardUnknown() { + xxx_messageInfo_ListProfilesResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_ListProfilesResponse proto.InternalMessageInfo + +func (m *ListProfilesResponse) GetProfiles() []*Profile { + if m != nil { + return m.Profiles + } + return nil +} + +func (m *ListProfilesResponse) GetNextPageToken() string { + if m != nil { + return m.NextPageToken + } + return "" +} + +// Create profile request. +type CreateProfileRequest struct { + // Required. + // + // The name of the tenant this profile belongs to. + // + // The format is "projects/{project_id}/tenants/{tenant_id}", for example, + // "projects/api-test-project/tenants/foo". + Parent string `protobuf:"bytes,1,opt,name=parent,proto3" json:"parent,omitempty"` + // Required. + // + // The profile to be created. + Profile *Profile `protobuf:"bytes,2,opt,name=profile,proto3" json:"profile,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *CreateProfileRequest) Reset() { *m = CreateProfileRequest{} } +func (m *CreateProfileRequest) String() string { return proto.CompactTextString(m) } +func (*CreateProfileRequest) ProtoMessage() {} +func (*CreateProfileRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_profile_service_7ea8377f2d7ce92e, []int{2} +} +func (m *CreateProfileRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_CreateProfileRequest.Unmarshal(m, b) +} +func (m *CreateProfileRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_CreateProfileRequest.Marshal(b, m, deterministic) +} +func (dst *CreateProfileRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_CreateProfileRequest.Merge(dst, src) +} +func (m *CreateProfileRequest) XXX_Size() int { + return xxx_messageInfo_CreateProfileRequest.Size(m) +} +func (m *CreateProfileRequest) XXX_DiscardUnknown() { + xxx_messageInfo_CreateProfileRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_CreateProfileRequest proto.InternalMessageInfo + +func (m *CreateProfileRequest) GetParent() string { + if m != nil { + return m.Parent + } + return "" +} + +func (m *CreateProfileRequest) GetProfile() *Profile { + if m != nil { + return m.Profile + } + return nil +} + +// Get profile request. +type GetProfileRequest struct { + // Required. + // + // Resource name of the profile to get. + // + // The format is + // "projects/{project_id}/tenants/{tenant_id}/profiles/{profile_id}", + // for example, "projects/api-test-project/tenants/foo/profiles/bar". + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GetProfileRequest) Reset() { *m = GetProfileRequest{} } +func (m *GetProfileRequest) String() string { return proto.CompactTextString(m) } +func (*GetProfileRequest) ProtoMessage() {} +func (*GetProfileRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_profile_service_7ea8377f2d7ce92e, []int{3} +} +func (m *GetProfileRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GetProfileRequest.Unmarshal(m, b) +} +func (m *GetProfileRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GetProfileRequest.Marshal(b, m, deterministic) +} +func (dst *GetProfileRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_GetProfileRequest.Merge(dst, src) +} +func (m *GetProfileRequest) XXX_Size() int { + return xxx_messageInfo_GetProfileRequest.Size(m) +} +func (m *GetProfileRequest) XXX_DiscardUnknown() { + xxx_messageInfo_GetProfileRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_GetProfileRequest proto.InternalMessageInfo + +func (m *GetProfileRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +// Update profile request +type UpdateProfileRequest struct { + // Required. + // + // Profile to be updated. + Profile *Profile `protobuf:"bytes,1,opt,name=profile,proto3" json:"profile,omitempty"` + // Optional. + // + // A field mask to specify the profile fields to update. + // + // A full update is performed if it is unset. + // + // Valid values are: + // + // * externalId + // * source + // * uri + // * isHirable + // * createTime + // * updateTime + // * resumeHrxml + // * personNames + // * addresses + // * emailAddresses + // * phoneNumbers + // * personalUris + // * additionalContactInfo + // * employmentRecords + // * educationRecords + // * skills + // * projects + // * publications + // * patents + // * certifications + // * recruitingNotes + // * customAttributes + // * groupId + UpdateMask *field_mask.FieldMask `protobuf:"bytes,2,opt,name=update_mask,json=updateMask,proto3" json:"update_mask,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *UpdateProfileRequest) Reset() { *m = UpdateProfileRequest{} } +func (m *UpdateProfileRequest) String() string { return proto.CompactTextString(m) } +func (*UpdateProfileRequest) ProtoMessage() {} +func (*UpdateProfileRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_profile_service_7ea8377f2d7ce92e, []int{4} +} +func (m *UpdateProfileRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_UpdateProfileRequest.Unmarshal(m, b) +} +func (m *UpdateProfileRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_UpdateProfileRequest.Marshal(b, m, deterministic) +} +func (dst *UpdateProfileRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_UpdateProfileRequest.Merge(dst, src) +} +func (m *UpdateProfileRequest) XXX_Size() int { + return xxx_messageInfo_UpdateProfileRequest.Size(m) +} +func (m *UpdateProfileRequest) XXX_DiscardUnknown() { + xxx_messageInfo_UpdateProfileRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_UpdateProfileRequest proto.InternalMessageInfo + +func (m *UpdateProfileRequest) GetProfile() *Profile { + if m != nil { + return m.Profile + } + return nil +} + +func (m *UpdateProfileRequest) GetUpdateMask() *field_mask.FieldMask { + if m != nil { + return m.UpdateMask + } + return nil +} + +// Delete profile request. +type DeleteProfileRequest struct { + // Required. + // + // Resource name of the profile to be deleted. + // + // The format is + // "projects/{project_id}/tenants/{tenant_id}/profiles/{profile_id}", + // for example, "projects/api-test-project/tenants/foo/profiles/bar". + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *DeleteProfileRequest) Reset() { *m = DeleteProfileRequest{} } +func (m *DeleteProfileRequest) String() string { return proto.CompactTextString(m) } +func (*DeleteProfileRequest) ProtoMessage() {} +func (*DeleteProfileRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_profile_service_7ea8377f2d7ce92e, []int{5} +} +func (m *DeleteProfileRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_DeleteProfileRequest.Unmarshal(m, b) +} +func (m *DeleteProfileRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_DeleteProfileRequest.Marshal(b, m, deterministic) +} +func (dst *DeleteProfileRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_DeleteProfileRequest.Merge(dst, src) +} +func (m *DeleteProfileRequest) XXX_Size() int { + return xxx_messageInfo_DeleteProfileRequest.Size(m) +} +func (m *DeleteProfileRequest) XXX_DiscardUnknown() { + xxx_messageInfo_DeleteProfileRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_DeleteProfileRequest proto.InternalMessageInfo + +func (m *DeleteProfileRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +// The request body of the `SearchProfiles` call. +type SearchProfilesRequest struct { + // Required. + // + // The resource name of the tenant to search within. + // + // The format is "projects/{project_id}/tenants/{tenant_id}", for example, + // "projects/api-test-project/tenants/foo". + Parent string `protobuf:"bytes,1,opt,name=parent,proto3" json:"parent,omitempty"` + // Required. + // + // The meta information collected about the profile search user. This is used + // to improve the search quality of the service. These values are provided by + // users, and must be precise and consistent. + RequestMetadata *RequestMetadata `protobuf:"bytes,2,opt,name=request_metadata,json=requestMetadata,proto3" json:"request_metadata,omitempty"` + // Optional. + // + // Search query to execute. See [ProfileQuery][google.cloud.talent.v4beta1.ProfileQuery] for more details. + ProfileQuery *ProfileQuery `protobuf:"bytes,3,opt,name=profile_query,json=profileQuery,proto3" json:"profile_query,omitempty"` + // Optional. + // + // A limit on the number of profiles returned in the search results. + // A value above the default value 10 can increase search response time. + // + // The maximum value allowed is 100. Otherwise an error is thrown. + PageSize int32 `protobuf:"varint,4,opt,name=page_size,json=pageSize,proto3" json:"page_size,omitempty"` + // Optional. + // + // The pageToken, similar to offset enables users of the API to paginate + // through the search results. To retrieve the first page of results, set the + // pageToken to empty. The search response includes a + // [nextPageToken][google.cloud.talent.v4beta1.SearchProfilesResponse.next_page_token] field that can be + // used to populate the pageToken field for the next page of results. Using + // pageToken instead of offset increases the performance of the API, + // especially compared to larger offset values. + PageToken string `protobuf:"bytes,5,opt,name=page_token,json=pageToken,proto3" json:"page_token,omitempty"` + // Optional. + // + // An integer that specifies the current offset (that is, starting result) in + // search results. This field is only considered if [page_token][google.cloud.talent.v4beta1.SearchProfilesRequest.page_token] is unset. + // + // The maximum allowed value is 5000. Otherwise an error is thrown. + // + // For example, 0 means to search from the first profile, and 10 means to + // search from the 11th profile. This can be used for pagination, for example + // pageSize = 10 and offset = 10 means to search from the second page. + Offset int32 `protobuf:"varint,6,opt,name=offset,proto3" json:"offset,omitempty"` + // Optional. + // + // This flag controls the spell-check feature. If `false`, the + // service attempts to correct a misspelled query. + // + // For example, "enginee" is corrected to "engineer". + DisableSpellCheck bool `protobuf:"varint,7,opt,name=disable_spell_check,json=disableSpellCheck,proto3" json:"disable_spell_check,omitempty"` + // Optional. + // + // The criteria that determines how search results are sorted. + // Defaults is "relevance desc" if no value is specified. + // + // Supported options are: + // + // * "relevance desc": By descending relevance, as determined by the API + // algorithms. + // * "update_date desc": Sort by [Profile.update_date][] in descending order + // (recently updated profiles first). + // * "create_date desc": Sort by [Profile.create_date][] in descending order + // (recently created profiles first). + // * "first_name": Sort by [PersonStrcuturedName.given_name][] in ascending + // order. + // * "first_name desc": Sort by [PersonStrcuturedName.given_name][] in + // descending order. + // * "last_name": Sort by [PersonStrcuturedName.family_name][] in ascending + // order. + // * "last_name desc": Sort by [PersonStrcuturedName.family_name][] in + // ascending order. + OrderBy string `protobuf:"bytes,8,opt,name=order_by,json=orderBy,proto3" json:"order_by,omitempty"` + // Optional. + // + // When sort by field is based on alphabetical order, sort values case + // sensitively (based on ASCII) when the value is set to true. Default value + // is case in-sensitive sort (false). + CaseSensitiveSort bool `protobuf:"varint,9,opt,name=case_sensitive_sort,json=caseSensitiveSort,proto3" json:"case_sensitive_sort,omitempty"` + // Optional. + // + // A list of expressions specifies histogram requests against matching + // profiles for [SearchProfilesRequest][google.cloud.talent.v4beta1.SearchProfilesRequest]. + // + // The expression syntax looks like a function definition with optional + // parameters. + // + // Function syntax: function_name(histogram_facet[, list of buckets]) + // + // Data types: + // + // * Histogram facet: facet names with format [a-zA-Z][a-zA-Z0-9_]+. + // * String: string like "any string with backslash escape for quote(\")." + // * Number: whole number and floating point number like 10, -1 and -0.01. + // * List: list of elements with comma(,) separator surrounded by square + // brackets. For example, [1, 2, 3] and ["one", "two", "three"]. + // + // Built-in constants: + // + // * MIN (minimum number similar to java Double.MIN_VALUE) + // * MAX (maximum number similar to java Double.MAX_VALUE) + // + // Built-in functions: + // + // * bucket(start, end[, label]) + // Bucket build-in function creates a bucket with range of [start, end). Note + // that the end is exclusive. + // For example, bucket(1, MAX, "positive number") or bucket(1, 10). + // + // Histogram Facets: + // + // * admin1: Admin1 is a global placeholder for referring to state, province, + // or the particular term a country uses to define the geographic structure + // below the country level. Examples include states codes such as "CA", "IL", + // "NY", and provinces, such as "BC". + // * locality: Locality is a global placeholder for referring to city, town, + // or the particular term a country uses to define the geographic structure + // below the admin1 level. Examples include city names such as + // "Mountain View" and "New York". + // * extended_locality: Extended locality is concatenated version of admin1 + // and locality with comma separator. For example, "Mountain View, CA" and + // "New York, NY". + // * postal_code: Postal code of profile which follows locale code. + // * country: Country code (ISO-3166-1 alpha-2 code) of profile, such as US, + // JP, GB. + // * job_title: Normalized job titles specified in EmploymentHistory. + // * company_name: Normalized company name of profiles to match on. + // * institution: The school name. For example, "MIT", + // "University of California, Berkeley" + // * degree: Highest education degree in ISCED code. Each value in degree + // covers a specific level of education, without any expansion to upper nor + // lower levels of education degree. + // * experience_in_months: experience in months. 0 means 0 month to 1 month + // (exclusive). + // * application_date: The application date specifies application start dates. + // See [ApplicationDateFilter][google.cloud.talent.v4beta1.ApplicationDateFilter] for more details. + // * application_outcome_notes: The application outcome reason specifies the + // reasons behind the outcome of the job application. + // See [ApplicationOutcomeNotesFilter][google.cloud.talent.v4beta1.ApplicationOutcomeNotesFilter] for more details. + // * application_job_title: The application job title specifies the job + // applied for in the application. + // See [ApplicationJobFilter][google.cloud.talent.v4beta1.ApplicationJobFilter] for more details. + // * hirable_status: Hirable status specifies the profile's hirable status. + // * string_custom_attribute: String custom attributes. Values can be accessed + // via square bracket notation like string_custom_attribute["key1"]. + // * numeric_custom_attribute: Numeric custom attributes. Values can be + // accessed via square bracket notation like numeric_custom_attribute["key1"]. + // + // Example expressions: + // + // * count(admin1) + // * count(experience_in_months, [bucket(0, 12, "1 year"), + // bucket(12, 36, "1-3 years"), bucket(36, MAX, "3+ years")]) + // * count(string_custom_attribute["assigned_recruiter"]) + // * count(numeric_custom_attribute["favorite_number"], + // [bucket(MIN, 0, "negative"), bucket(0, MAX, "non-negative")]) + HistogramQueries []*HistogramQuery `protobuf:"bytes,10,rep,name=histogram_queries,json=histogramQueries,proto3" json:"histogram_queries,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *SearchProfilesRequest) Reset() { *m = SearchProfilesRequest{} } +func (m *SearchProfilesRequest) String() string { return proto.CompactTextString(m) } +func (*SearchProfilesRequest) ProtoMessage() {} +func (*SearchProfilesRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_profile_service_7ea8377f2d7ce92e, []int{6} +} +func (m *SearchProfilesRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_SearchProfilesRequest.Unmarshal(m, b) +} +func (m *SearchProfilesRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_SearchProfilesRequest.Marshal(b, m, deterministic) +} +func (dst *SearchProfilesRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_SearchProfilesRequest.Merge(dst, src) +} +func (m *SearchProfilesRequest) XXX_Size() int { + return xxx_messageInfo_SearchProfilesRequest.Size(m) +} +func (m *SearchProfilesRequest) XXX_DiscardUnknown() { + xxx_messageInfo_SearchProfilesRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_SearchProfilesRequest proto.InternalMessageInfo + +func (m *SearchProfilesRequest) GetParent() string { + if m != nil { + return m.Parent + } + return "" +} + +func (m *SearchProfilesRequest) GetRequestMetadata() *RequestMetadata { + if m != nil { + return m.RequestMetadata + } + return nil +} + +func (m *SearchProfilesRequest) GetProfileQuery() *ProfileQuery { + if m != nil { + return m.ProfileQuery + } + return nil +} + +func (m *SearchProfilesRequest) GetPageSize() int32 { + if m != nil { + return m.PageSize + } + return 0 +} + +func (m *SearchProfilesRequest) GetPageToken() string { + if m != nil { + return m.PageToken + } + return "" +} + +func (m *SearchProfilesRequest) GetOffset() int32 { + if m != nil { + return m.Offset + } + return 0 +} + +func (m *SearchProfilesRequest) GetDisableSpellCheck() bool { + if m != nil { + return m.DisableSpellCheck + } + return false +} + +func (m *SearchProfilesRequest) GetOrderBy() string { + if m != nil { + return m.OrderBy + } + return "" +} + +func (m *SearchProfilesRequest) GetCaseSensitiveSort() bool { + if m != nil { + return m.CaseSensitiveSort + } + return false +} + +func (m *SearchProfilesRequest) GetHistogramQueries() []*HistogramQuery { + if m != nil { + return m.HistogramQueries + } + return nil +} + +// Response of SearchProfiles method. +type SearchProfilesResponse struct { + // An estimation of the number of profiles that match the specified query. + // + // This number isn't guaranteed to be accurate. + EstimatedTotalSize int64 `protobuf:"varint,1,opt,name=estimated_total_size,json=estimatedTotalSize,proto3" json:"estimated_total_size,omitempty"` + // The spell checking result, and correction. + SpellCorrection *SpellingCorrection `protobuf:"bytes,2,opt,name=spell_correction,json=spellCorrection,proto3" json:"spell_correction,omitempty"` + // Additional information for the API invocation, such as the request + // tracking id. + Metadata *ResponseMetadata `protobuf:"bytes,3,opt,name=metadata,proto3" json:"metadata,omitempty"` + // A token to retrieve the next page of results. This is empty if there are no + // more results. + NextPageToken string `protobuf:"bytes,4,opt,name=next_page_token,json=nextPageToken,proto3" json:"next_page_token,omitempty"` + // The histogram results that match with specified + // [SearchProfilesRequest.histogram_queries][google.cloud.talent.v4beta1.SearchProfilesRequest.histogram_queries]. + HistogramQueryResults []*HistogramQueryResult `protobuf:"bytes,5,rep,name=histogram_query_results,json=histogramQueryResults,proto3" json:"histogram_query_results,omitempty"` + // The profile entities that match the specified [SearchProfilesRequest][google.cloud.talent.v4beta1.SearchProfilesRequest]. + SummarizedProfiles []*SummarizedProfile `protobuf:"bytes,6,rep,name=summarized_profiles,json=summarizedProfiles,proto3" json:"summarized_profiles,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *SearchProfilesResponse) Reset() { *m = SearchProfilesResponse{} } +func (m *SearchProfilesResponse) String() string { return proto.CompactTextString(m) } +func (*SearchProfilesResponse) ProtoMessage() {} +func (*SearchProfilesResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_profile_service_7ea8377f2d7ce92e, []int{7} +} +func (m *SearchProfilesResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_SearchProfilesResponse.Unmarshal(m, b) +} +func (m *SearchProfilesResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_SearchProfilesResponse.Marshal(b, m, deterministic) +} +func (dst *SearchProfilesResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_SearchProfilesResponse.Merge(dst, src) +} +func (m *SearchProfilesResponse) XXX_Size() int { + return xxx_messageInfo_SearchProfilesResponse.Size(m) +} +func (m *SearchProfilesResponse) XXX_DiscardUnknown() { + xxx_messageInfo_SearchProfilesResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_SearchProfilesResponse proto.InternalMessageInfo + +func (m *SearchProfilesResponse) GetEstimatedTotalSize() int64 { + if m != nil { + return m.EstimatedTotalSize + } + return 0 +} + +func (m *SearchProfilesResponse) GetSpellCorrection() *SpellingCorrection { + if m != nil { + return m.SpellCorrection + } + return nil +} + +func (m *SearchProfilesResponse) GetMetadata() *ResponseMetadata { + if m != nil { + return m.Metadata + } + return nil +} + +func (m *SearchProfilesResponse) GetNextPageToken() string { + if m != nil { + return m.NextPageToken + } + return "" +} + +func (m *SearchProfilesResponse) GetHistogramQueryResults() []*HistogramQueryResult { + if m != nil { + return m.HistogramQueryResults + } + return nil +} + +func (m *SearchProfilesResponse) GetSummarizedProfiles() []*SummarizedProfile { + if m != nil { + return m.SummarizedProfiles + } + return nil +} + +// Output only. +// +// Profile entry with metadata inside [SearchProfilesResponse][google.cloud.talent.v4beta1.SearchProfilesResponse]. +type SummarizedProfile struct { + // A list of profiles that are linked by [Profile.cluster_id][]. + Profiles []*Profile `protobuf:"bytes,1,rep,name=profiles,proto3" json:"profiles,omitempty"` + // A profile summary shows the profile summary and how the profile matches the + // search query. + // + // In profile summary, the profiles with the same [Profile.cluster_id][] are + // merged together. Among profiles, same education/employment records may be + // slightly different but they are merged into one with best efforts. + // + // For example, in one profile the school name is "UC Berkeley" and the field + // study is "Computer Science" and in another one the school name is + // "University of California at Berkeley" and the field study is "CS". The API + // merges these two inputs into one and selects one value for each field. For + // example, the school name in summary is set to "University of California at + // Berkeley" and the field of study is set to "Computer Science". + Summary *Profile `protobuf:"bytes,2,opt,name=summary,proto3" json:"summary,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *SummarizedProfile) Reset() { *m = SummarizedProfile{} } +func (m *SummarizedProfile) String() string { return proto.CompactTextString(m) } +func (*SummarizedProfile) ProtoMessage() {} +func (*SummarizedProfile) Descriptor() ([]byte, []int) { + return fileDescriptor_profile_service_7ea8377f2d7ce92e, []int{8} +} +func (m *SummarizedProfile) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_SummarizedProfile.Unmarshal(m, b) +} +func (m *SummarizedProfile) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_SummarizedProfile.Marshal(b, m, deterministic) +} +func (dst *SummarizedProfile) XXX_Merge(src proto.Message) { + xxx_messageInfo_SummarizedProfile.Merge(dst, src) +} +func (m *SummarizedProfile) XXX_Size() int { + return xxx_messageInfo_SummarizedProfile.Size(m) +} +func (m *SummarizedProfile) XXX_DiscardUnknown() { + xxx_messageInfo_SummarizedProfile.DiscardUnknown(m) +} + +var xxx_messageInfo_SummarizedProfile proto.InternalMessageInfo + +func (m *SummarizedProfile) GetProfiles() []*Profile { + if m != nil { + return m.Profiles + } + return nil +} + +func (m *SummarizedProfile) GetSummary() *Profile { + if m != nil { + return m.Summary + } + return nil +} + +func init() { + proto.RegisterType((*ListProfilesRequest)(nil), "google.cloud.talent.v4beta1.ListProfilesRequest") + proto.RegisterType((*ListProfilesResponse)(nil), "google.cloud.talent.v4beta1.ListProfilesResponse") + proto.RegisterType((*CreateProfileRequest)(nil), "google.cloud.talent.v4beta1.CreateProfileRequest") + proto.RegisterType((*GetProfileRequest)(nil), "google.cloud.talent.v4beta1.GetProfileRequest") + proto.RegisterType((*UpdateProfileRequest)(nil), "google.cloud.talent.v4beta1.UpdateProfileRequest") + proto.RegisterType((*DeleteProfileRequest)(nil), "google.cloud.talent.v4beta1.DeleteProfileRequest") + proto.RegisterType((*SearchProfilesRequest)(nil), "google.cloud.talent.v4beta1.SearchProfilesRequest") + proto.RegisterType((*SearchProfilesResponse)(nil), "google.cloud.talent.v4beta1.SearchProfilesResponse") + proto.RegisterType((*SummarizedProfile)(nil), "google.cloud.talent.v4beta1.SummarizedProfile") +} + +// Reference imports to suppress errors if they are not otherwise used. +var _ context.Context +var _ grpc.ClientConn + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the grpc package it is being compiled against. +const _ = grpc.SupportPackageIsVersion4 + +// ProfileServiceClient is the client API for ProfileService service. +// +// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://godoc.org/google.golang.org/grpc#ClientConn.NewStream. +type ProfileServiceClient interface { + // Lists profiles by filter. The order is unspecified. + ListProfiles(ctx context.Context, in *ListProfilesRequest, opts ...grpc.CallOption) (*ListProfilesResponse, error) + // Creates and returns a new profile. + CreateProfile(ctx context.Context, in *CreateProfileRequest, opts ...grpc.CallOption) (*Profile, error) + // Gets the specified profile. + GetProfile(ctx context.Context, in *GetProfileRequest, opts ...grpc.CallOption) (*Profile, error) + // Updates the specified profile and returns the updated result. + UpdateProfile(ctx context.Context, in *UpdateProfileRequest, opts ...grpc.CallOption) (*Profile, error) + // Deletes the specified profile. + // Prerequisite: The profile has no associated applications or assignments + // associated. + DeleteProfile(ctx context.Context, in *DeleteProfileRequest, opts ...grpc.CallOption) (*empty.Empty, error) + // Searches for profiles within a tenant. + // + // For example, search by raw queries "software engineer in Mountain View" or + // search by structured filters (location filter, education filter, etc.). + // + // See [SearchProfilesRequest][google.cloud.talent.v4beta1.SearchProfilesRequest] for more information. + SearchProfiles(ctx context.Context, in *SearchProfilesRequest, opts ...grpc.CallOption) (*SearchProfilesResponse, error) +} + +type profileServiceClient struct { + cc *grpc.ClientConn +} + +func NewProfileServiceClient(cc *grpc.ClientConn) ProfileServiceClient { + return &profileServiceClient{cc} +} + +func (c *profileServiceClient) ListProfiles(ctx context.Context, in *ListProfilesRequest, opts ...grpc.CallOption) (*ListProfilesResponse, error) { + out := new(ListProfilesResponse) + err := c.cc.Invoke(ctx, "/google.cloud.talent.v4beta1.ProfileService/ListProfiles", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *profileServiceClient) CreateProfile(ctx context.Context, in *CreateProfileRequest, opts ...grpc.CallOption) (*Profile, error) { + out := new(Profile) + err := c.cc.Invoke(ctx, "/google.cloud.talent.v4beta1.ProfileService/CreateProfile", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *profileServiceClient) GetProfile(ctx context.Context, in *GetProfileRequest, opts ...grpc.CallOption) (*Profile, error) { + out := new(Profile) + err := c.cc.Invoke(ctx, "/google.cloud.talent.v4beta1.ProfileService/GetProfile", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *profileServiceClient) UpdateProfile(ctx context.Context, in *UpdateProfileRequest, opts ...grpc.CallOption) (*Profile, error) { + out := new(Profile) + err := c.cc.Invoke(ctx, "/google.cloud.talent.v4beta1.ProfileService/UpdateProfile", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *profileServiceClient) DeleteProfile(ctx context.Context, in *DeleteProfileRequest, opts ...grpc.CallOption) (*empty.Empty, error) { + out := new(empty.Empty) + err := c.cc.Invoke(ctx, "/google.cloud.talent.v4beta1.ProfileService/DeleteProfile", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *profileServiceClient) SearchProfiles(ctx context.Context, in *SearchProfilesRequest, opts ...grpc.CallOption) (*SearchProfilesResponse, error) { + out := new(SearchProfilesResponse) + err := c.cc.Invoke(ctx, "/google.cloud.talent.v4beta1.ProfileService/SearchProfiles", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +// ProfileServiceServer is the server API for ProfileService service. +type ProfileServiceServer interface { + // Lists profiles by filter. The order is unspecified. + ListProfiles(context.Context, *ListProfilesRequest) (*ListProfilesResponse, error) + // Creates and returns a new profile. + CreateProfile(context.Context, *CreateProfileRequest) (*Profile, error) + // Gets the specified profile. + GetProfile(context.Context, *GetProfileRequest) (*Profile, error) + // Updates the specified profile and returns the updated result. + UpdateProfile(context.Context, *UpdateProfileRequest) (*Profile, error) + // Deletes the specified profile. + // Prerequisite: The profile has no associated applications or assignments + // associated. + DeleteProfile(context.Context, *DeleteProfileRequest) (*empty.Empty, error) + // Searches for profiles within a tenant. + // + // For example, search by raw queries "software engineer in Mountain View" or + // search by structured filters (location filter, education filter, etc.). + // + // See [SearchProfilesRequest][google.cloud.talent.v4beta1.SearchProfilesRequest] for more information. + SearchProfiles(context.Context, *SearchProfilesRequest) (*SearchProfilesResponse, error) +} + +func RegisterProfileServiceServer(s *grpc.Server, srv ProfileServiceServer) { + s.RegisterService(&_ProfileService_serviceDesc, srv) +} + +func _ProfileService_ListProfiles_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(ListProfilesRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(ProfileServiceServer).ListProfiles(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.talent.v4beta1.ProfileService/ListProfiles", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(ProfileServiceServer).ListProfiles(ctx, req.(*ListProfilesRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _ProfileService_CreateProfile_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(CreateProfileRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(ProfileServiceServer).CreateProfile(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.talent.v4beta1.ProfileService/CreateProfile", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(ProfileServiceServer).CreateProfile(ctx, req.(*CreateProfileRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _ProfileService_GetProfile_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(GetProfileRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(ProfileServiceServer).GetProfile(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.talent.v4beta1.ProfileService/GetProfile", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(ProfileServiceServer).GetProfile(ctx, req.(*GetProfileRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _ProfileService_UpdateProfile_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(UpdateProfileRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(ProfileServiceServer).UpdateProfile(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.talent.v4beta1.ProfileService/UpdateProfile", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(ProfileServiceServer).UpdateProfile(ctx, req.(*UpdateProfileRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _ProfileService_DeleteProfile_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(DeleteProfileRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(ProfileServiceServer).DeleteProfile(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.talent.v4beta1.ProfileService/DeleteProfile", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(ProfileServiceServer).DeleteProfile(ctx, req.(*DeleteProfileRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _ProfileService_SearchProfiles_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(SearchProfilesRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(ProfileServiceServer).SearchProfiles(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.talent.v4beta1.ProfileService/SearchProfiles", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(ProfileServiceServer).SearchProfiles(ctx, req.(*SearchProfilesRequest)) + } + return interceptor(ctx, in, info, handler) +} + +var _ProfileService_serviceDesc = grpc.ServiceDesc{ + ServiceName: "google.cloud.talent.v4beta1.ProfileService", + HandlerType: (*ProfileServiceServer)(nil), + Methods: []grpc.MethodDesc{ + { + MethodName: "ListProfiles", + Handler: _ProfileService_ListProfiles_Handler, + }, + { + MethodName: "CreateProfile", + Handler: _ProfileService_CreateProfile_Handler, + }, + { + MethodName: "GetProfile", + Handler: _ProfileService_GetProfile_Handler, + }, + { + MethodName: "UpdateProfile", + Handler: _ProfileService_UpdateProfile_Handler, + }, + { + MethodName: "DeleteProfile", + Handler: _ProfileService_DeleteProfile_Handler, + }, + { + MethodName: "SearchProfiles", + Handler: _ProfileService_SearchProfiles_Handler, + }, + }, + Streams: []grpc.StreamDesc{}, + Metadata: "google/cloud/talent/v4beta1/profile_service.proto", +} + +func init() { + proto.RegisterFile("google/cloud/talent/v4beta1/profile_service.proto", fileDescriptor_profile_service_7ea8377f2d7ce92e) +} + +var fileDescriptor_profile_service_7ea8377f2d7ce92e = []byte{ + // 1059 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xac, 0x56, 0xdf, 0x6e, 0x1b, 0xc5, + 0x17, 0xd6, 0xe6, 0xaf, 0x73, 0xd2, 0x34, 0xc9, 0x24, 0xcd, 0x6f, 0x7f, 0x2e, 0x08, 0x6b, 0x85, + 0xc0, 0x75, 0xe9, 0x6e, 0xe3, 0x56, 0x0a, 0x4a, 0xa1, 0x82, 0x84, 0xbf, 0x12, 0x45, 0x61, 0x1d, + 0x04, 0xea, 0xcd, 0x6a, 0x62, 0x1f, 0x3b, 0x4b, 0x76, 0x77, 0xb6, 0x33, 0xe3, 0x08, 0x07, 0x55, + 0xa2, 0xbc, 0x42, 0xc4, 0x15, 0x77, 0x15, 0x97, 0xdc, 0xf1, 0x28, 0xbc, 0x02, 0x3c, 0x07, 0x68, + 0x66, 0xc7, 0xdb, 0xd8, 0x5e, 0x6d, 0x6c, 0xc4, 0xdd, 0xce, 0x9c, 0xf3, 0x9d, 0xf3, 0xcd, 0x37, + 0x67, 0xce, 0x59, 0xd8, 0xed, 0x31, 0xd6, 0x8b, 0xd0, 0x6b, 0x47, 0xac, 0xdf, 0xf1, 0x24, 0x8d, + 0x30, 0x91, 0xde, 0xf9, 0xc3, 0x13, 0x94, 0x74, 0xd7, 0x4b, 0x39, 0xeb, 0x86, 0x11, 0x06, 0x02, + 0xf9, 0x79, 0xd8, 0x46, 0x37, 0xe5, 0x4c, 0x32, 0x72, 0x3b, 0x83, 0xb8, 0x1a, 0xe2, 0x66, 0x10, + 0xd7, 0x40, 0xaa, 0xaf, 0x99, 0x78, 0x34, 0x0d, 0x3d, 0x9a, 0x24, 0x4c, 0x52, 0x19, 0xb2, 0x44, + 0x64, 0xd0, 0x6a, 0xbd, 0x2c, 0x5b, 0x9b, 0xc5, 0x31, 0x4b, 0x8c, 0xe7, 0x9d, 0x32, 0xcf, 0x6e, + 0x18, 0x49, 0xe4, 0xc3, 0xa0, 0x77, 0xcb, 0x5c, 0x4f, 0x43, 0x21, 0x59, 0x8f, 0xd3, 0x78, 0x9a, + 0xb8, 0xe6, 0xbc, 0xc6, 0xd5, 0x9c, 0xd3, 0xd3, 0xab, 0x93, 0x7e, 0xd7, 0xc3, 0x38, 0x95, 0x03, + 0x63, 0xac, 0x8d, 0x1b, 0xbb, 0x21, 0x46, 0x9d, 0x20, 0xa6, 0xe2, 0x2c, 0xf3, 0x70, 0x5e, 0x5a, + 0xb0, 0xf5, 0x45, 0x28, 0xe4, 0x51, 0x16, 0x54, 0xf8, 0xf8, 0xac, 0x8f, 0x42, 0x92, 0x1d, 0x58, + 0x4a, 0x29, 0xc7, 0x44, 0xda, 0x56, 0xcd, 0xaa, 0xaf, 0xf8, 0x66, 0x45, 0x5e, 0x07, 0x48, 0x69, + 0x0f, 0x03, 0xc9, 0xce, 0x30, 0xb1, 0xe7, 0xb4, 0x6d, 0x45, 0xed, 0x1c, 0xab, 0x0d, 0x72, 0x1b, + 0xf4, 0x22, 0x10, 0xe1, 0x05, 0xda, 0xf3, 0x35, 0xab, 0xbe, 0xe8, 0x57, 0xd4, 0x46, 0x2b, 0xbc, + 0x40, 0xb2, 0x07, 0x2b, 0x1c, 0x69, 0x96, 0xde, 0x5e, 0xa8, 0x59, 0xf5, 0xd5, 0x66, 0xd5, 0x35, + 0xd7, 0x34, 0x64, 0xe8, 0x7e, 0xa2, 0x18, 0x3e, 0xa1, 0xe2, 0xcc, 0xaf, 0x28, 0x67, 0xf5, 0xe5, + 0xfc, 0x68, 0xc1, 0xf6, 0x28, 0x49, 0x91, 0xb2, 0x44, 0x20, 0xf9, 0x00, 0x2a, 0x46, 0x0d, 0x61, + 0x5b, 0xb5, 0xf9, 0xfa, 0x6a, 0xf3, 0x4d, 0xb7, 0xe4, 0xde, 0x5d, 0x13, 0xc0, 0xcf, 0x51, 0xe4, + 0x2d, 0x58, 0x4f, 0xf0, 0x7b, 0x19, 0x4c, 0x1c, 0x6a, 0x4d, 0x6d, 0x1f, 0x0d, 0x0f, 0xe6, 0x24, + 0xb0, 0x7d, 0xc8, 0x91, 0x4a, 0x1c, 0x86, 0xb8, 0x46, 0xa7, 0xc7, 0xb0, 0x6c, 0x72, 0xe8, 0x78, + 0xd3, 0x12, 0x1b, 0x82, 0x9c, 0xb7, 0x61, 0xf3, 0x53, 0x94, 0x63, 0xc9, 0x08, 0x2c, 0x24, 0x34, + 0x46, 0x93, 0x4a, 0x7f, 0x3b, 0x97, 0x16, 0x6c, 0x7f, 0x9d, 0x76, 0x26, 0x99, 0x5d, 0x61, 0x60, + 0xfd, 0x0b, 0x06, 0xe4, 0x11, 0xac, 0xf6, 0x75, 0xdc, 0xec, 0xbe, 0xe6, 0xae, 0xbd, 0x2f, 0xc8, + 0xdc, 0xf5, 0x8d, 0x35, 0x60, 0xfb, 0x23, 0x8c, 0x70, 0x82, 0x54, 0xd1, 0x09, 0x5e, 0x2c, 0xc0, + 0xad, 0x16, 0x52, 0xde, 0x3e, 0x9d, 0xb6, 0x08, 0xbf, 0x81, 0x0d, 0x9e, 0xb9, 0x04, 0x31, 0x4a, + 0xda, 0xa1, 0x92, 0x1a, 0x7e, 0xef, 0x94, 0x9e, 0xd1, 0xc4, 0x7d, 0x62, 0x30, 0xfe, 0x3a, 0x1f, + 0xdd, 0x20, 0x5f, 0xc2, 0xda, 0xb0, 0x9b, 0x3c, 0xeb, 0x23, 0x1f, 0xe8, 0x12, 0x5e, 0x6d, 0xde, + 0x99, 0x46, 0xb9, 0xaf, 0x14, 0xc0, 0xbf, 0x91, 0x5e, 0x59, 0x8d, 0x3e, 0x87, 0x85, 0xb1, 0xe7, + 0x30, 0xfa, 0x94, 0x16, 0xc7, 0x9f, 0xd2, 0x0e, 0x2c, 0xb1, 0x6e, 0x57, 0xa0, 0xb4, 0x97, 0x34, + 0xd0, 0xac, 0x88, 0x0b, 0x5b, 0x9d, 0x50, 0xd0, 0x13, 0xd5, 0xf1, 0x52, 0x8c, 0xa2, 0xa0, 0x7d, + 0x8a, 0xed, 0x33, 0x7b, 0xb9, 0x66, 0xd5, 0x2b, 0xfe, 0xa6, 0x31, 0xb5, 0x94, 0xe5, 0x50, 0x19, + 0xc8, 0xff, 0xa1, 0xc2, 0x78, 0x07, 0x79, 0x70, 0x32, 0xb0, 0x2b, 0x3a, 0xc9, 0xb2, 0x5e, 0x1f, + 0x0c, 0x54, 0xa8, 0x36, 0x15, 0xaa, 0x73, 0x26, 0x22, 0x94, 0xe1, 0x39, 0x06, 0x82, 0x71, 0x69, + 0xaf, 0x64, 0xa1, 0x94, 0xa9, 0x35, 0xb4, 0xb4, 0x18, 0x97, 0xe4, 0x5b, 0xd8, 0xcc, 0x3b, 0x95, + 0x16, 0x28, 0x44, 0x61, 0x83, 0x7e, 0x77, 0x77, 0x4b, 0x25, 0xfa, 0x6c, 0x88, 0xca, 0x44, 0xda, + 0x38, 0xbd, 0xba, 0x0e, 0x51, 0x38, 0x7f, 0xcd, 0xc3, 0xce, 0x78, 0x0d, 0x98, 0x37, 0x7e, 0x1f, + 0xb6, 0x51, 0xc8, 0x30, 0xa6, 0x12, 0x3b, 0x81, 0x64, 0x92, 0x46, 0x99, 0x9c, 0xaa, 0x24, 0xe6, + 0x7d, 0x92, 0xdb, 0x8e, 0x95, 0x49, 0x0b, 0xfb, 0x14, 0x36, 0x8c, 0x32, 0x8c, 0x73, 0x6c, 0xab, + 0xd6, 0x6e, 0xca, 0xc3, 0x2b, 0x65, 0xa9, 0x45, 0x0b, 0x93, 0xde, 0x61, 0x0e, 0xf3, 0xd7, 0x75, + 0xa0, 0x57, 0x1b, 0xe4, 0x73, 0xa8, 0xe4, 0x25, 0x97, 0x15, 0xc7, 0xbd, 0x6b, 0x4a, 0x2e, 0x3b, + 0x46, 0x5e, 0x73, 0x39, 0xbc, 0xa8, 0xf5, 0x2c, 0x14, 0xb4, 0x1e, 0x12, 0xc2, 0xff, 0x46, 0x55, + 0x1f, 0x04, 0x1c, 0x45, 0x3f, 0x92, 0xc2, 0x5e, 0xd4, 0xda, 0xef, 0xce, 0xa2, 0xbd, 0x46, 0xfa, + 0xb7, 0x4e, 0x0b, 0x76, 0x05, 0x09, 0x60, 0x4b, 0xf4, 0xe3, 0x98, 0xf2, 0xf0, 0x02, 0x3b, 0x41, + 0xde, 0x5a, 0x97, 0x74, 0x1a, 0xb7, 0x5c, 0xbc, 0x1c, 0x37, 0x7c, 0xf2, 0x44, 0x8c, 0x6f, 0x09, + 0xe7, 0x67, 0x0b, 0x36, 0x27, 0x3c, 0xff, 0x83, 0x36, 0xfe, 0x18, 0x96, 0xb3, 0x6c, 0x83, 0xd9, + 0xda, 0xad, 0x01, 0x35, 0xff, 0x5e, 0x86, 0x9b, 0x66, 0xb3, 0x95, 0xfd, 0x46, 0x90, 0xdf, 0x2c, + 0xb8, 0x71, 0x75, 0xe8, 0x90, 0xfb, 0xa5, 0x21, 0x0b, 0x86, 0x68, 0x75, 0x77, 0x06, 0x44, 0x56, + 0x26, 0xce, 0xde, 0x4f, 0x7f, 0xfc, 0x79, 0x39, 0xb7, 0x4b, 0xbc, 0x7c, 0xdc, 0xff, 0x90, 0x35, + 0xbd, 0xf7, 0x53, 0xce, 0xbe, 0xc3, 0xb6, 0x14, 0x5e, 0xc3, 0x93, 0x98, 0xd0, 0x44, 0x7d, 0x3d, + 0xf7, 0x72, 0x05, 0x7e, 0xb5, 0x60, 0x6d, 0x64, 0x42, 0x91, 0xf2, 0xec, 0x45, 0xd3, 0xac, 0x3a, + 0x95, 0x6a, 0xce, 0xbe, 0xe6, 0xf8, 0xd0, 0x99, 0x95, 0xe3, 0xbe, 0xd5, 0x20, 0xbf, 0x58, 0x00, + 0xaf, 0x06, 0x1b, 0x29, 0xaf, 0xa9, 0x89, 0x09, 0x38, 0x25, 0xc1, 0x02, 0x11, 0xd5, 0xa4, 0x29, + 0xa4, 0x97, 0xb3, 0xf3, 0x1a, 0xcf, 0xd5, 0x9d, 0xaf, 0x8d, 0x0c, 0xd3, 0x6b, 0x44, 0x2c, 0x1a, + 0xbc, 0x53, 0x72, 0x3c, 0xd0, 0x1c, 0xdf, 0x6b, 0xee, 0x5d, 0x11, 0xd1, 0xfc, 0xd8, 0x4d, 0xc5, + 0x55, 0x89, 0x79, 0x69, 0xc1, 0xda, 0xc8, 0x98, 0xbd, 0x86, 0x6e, 0xd1, 0x48, 0xae, 0xee, 0x4c, + 0x8c, 0xf4, 0x8f, 0xd5, 0x1f, 0xe4, 0x50, 0xc4, 0xc6, 0xcc, 0x22, 0xfe, 0x6e, 0xc1, 0xcd, 0xd1, + 0x5e, 0x4e, 0x9a, 0xe5, 0xad, 0xa3, 0x68, 0xf8, 0x57, 0x1f, 0xcc, 0x84, 0x31, 0xcf, 0xe7, 0x5d, + 0x4d, 0xba, 0xe9, 0xdc, 0x9b, 0xae, 0x34, 0xf7, 0x85, 0x0e, 0xb3, 0x6f, 0x35, 0x0e, 0x5e, 0x58, + 0xf0, 0x46, 0x9b, 0xc5, 0x65, 0x49, 0x0f, 0xb6, 0x46, 0x5b, 0xc4, 0x91, 0x12, 0xec, 0xc8, 0x7a, + 0xfa, 0xa1, 0xc1, 0xf4, 0x58, 0x44, 0x93, 0x9e, 0xcb, 0x78, 0xcf, 0xeb, 0x61, 0xa2, 0xe5, 0xf4, + 0x32, 0x13, 0x4d, 0x43, 0x51, 0xf8, 0x33, 0xff, 0x28, 0x5b, 0xbe, 0x9c, 0x9b, 0x3f, 0x3c, 0x6e, + 0x9d, 0x2c, 0x69, 0xcc, 0x83, 0x7f, 0x02, 0x00, 0x00, 0xff, 0xff, 0x81, 0xa9, 0x37, 0xfb, 0xef, + 0x0c, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/cloud/talent/v4beta1/tenant.pb.go b/vendor/google.golang.org/genproto/googleapis/cloud/talent/v4beta1/tenant.pb.go new file mode 100644 index 0000000..643d024 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/cloud/talent/v4beta1/tenant.pb.go @@ -0,0 +1,182 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/cloud/talent/v4beta1/tenant.proto + +package talent // import "google.golang.org/genproto/googleapis/cloud/talent/v4beta1" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Enum that represents how user data owned by the tenant is used. +type Tenant_DataUsageType int32 + +const ( + // Default value. + Tenant_DATA_USAGE_TYPE_UNSPECIFIED Tenant_DataUsageType = 0 + // Data owned by this tenant is used to improve search/recommendation + // quality across tenants. + Tenant_AGGREGATED Tenant_DataUsageType = 1 + // Data owned by this tenant is used to improve search/recommendation + // quality for this tenant only. + Tenant_ISOLATED Tenant_DataUsageType = 2 +) + +var Tenant_DataUsageType_name = map[int32]string{ + 0: "DATA_USAGE_TYPE_UNSPECIFIED", + 1: "AGGREGATED", + 2: "ISOLATED", +} +var Tenant_DataUsageType_value = map[string]int32{ + "DATA_USAGE_TYPE_UNSPECIFIED": 0, + "AGGREGATED": 1, + "ISOLATED": 2, +} + +func (x Tenant_DataUsageType) String() string { + return proto.EnumName(Tenant_DataUsageType_name, int32(x)) +} +func (Tenant_DataUsageType) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_tenant_6085026e17b3c245, []int{0, 0} +} + +// A Tenant resource represents a tenant in the service. A tenant is a group or +// entity that shares common access with specific privileges for resources like +// profiles. Customer may create multiple tenants to provide data isolation for +// different groups. +type Tenant struct { + // Required during tenant update. + // + // The resource name for a tenant. This is generated by the service when a + // tenant is created. + // + // The format is "projects/{project_id}/tenants/{tenant_id}", for example, + // "projects/api-test-project/tenants/foo". + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // Required. + // + // Client side tenant identifier, used to uniquely identify the tenant. + // + // The maximum number of allowed characters is 255. + ExternalId string `protobuf:"bytes,2,opt,name=external_id,json=externalId,proto3" json:"external_id,omitempty"` + // Optional. + // + // Indicates whether data owned by this tenant may be used to provide product + // improvements across other tenants. + // + // Defaults behavior is [DataUsageType.ISOLATED][google.cloud.talent.v4beta1.Tenant.DataUsageType.ISOLATED] if it's unset. + UsageType Tenant_DataUsageType `protobuf:"varint,3,opt,name=usage_type,json=usageType,proto3,enum=google.cloud.talent.v4beta1.Tenant_DataUsageType" json:"usage_type,omitempty"` + // Optional. + // + // A list of keys of filterable [Profile.custom_attributes][google.cloud.talent.v4beta1.Profile.custom_attributes], whose + // corresponding `string_values` are used in keyword searches. Profiles with + // `string_values` under these specified field keys are returned if any + // of the values match the search keyword. Custom field values with + // parenthesis, brackets and special symbols are not searchable as-is, + // and must be surrounded by quotes. + KeywordSearchableProfileCustomAttributes []string `protobuf:"bytes,4,rep,name=keyword_searchable_profile_custom_attributes,json=keywordSearchableProfileCustomAttributes,proto3" json:"keyword_searchable_profile_custom_attributes,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Tenant) Reset() { *m = Tenant{} } +func (m *Tenant) String() string { return proto.CompactTextString(m) } +func (*Tenant) ProtoMessage() {} +func (*Tenant) Descriptor() ([]byte, []int) { + return fileDescriptor_tenant_6085026e17b3c245, []int{0} +} +func (m *Tenant) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Tenant.Unmarshal(m, b) +} +func (m *Tenant) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Tenant.Marshal(b, m, deterministic) +} +func (dst *Tenant) XXX_Merge(src proto.Message) { + xxx_messageInfo_Tenant.Merge(dst, src) +} +func (m *Tenant) XXX_Size() int { + return xxx_messageInfo_Tenant.Size(m) +} +func (m *Tenant) XXX_DiscardUnknown() { + xxx_messageInfo_Tenant.DiscardUnknown(m) +} + +var xxx_messageInfo_Tenant proto.InternalMessageInfo + +func (m *Tenant) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *Tenant) GetExternalId() string { + if m != nil { + return m.ExternalId + } + return "" +} + +func (m *Tenant) GetUsageType() Tenant_DataUsageType { + if m != nil { + return m.UsageType + } + return Tenant_DATA_USAGE_TYPE_UNSPECIFIED +} + +func (m *Tenant) GetKeywordSearchableProfileCustomAttributes() []string { + if m != nil { + return m.KeywordSearchableProfileCustomAttributes + } + return nil +} + +func init() { + proto.RegisterType((*Tenant)(nil), "google.cloud.talent.v4beta1.Tenant") + proto.RegisterEnum("google.cloud.talent.v4beta1.Tenant_DataUsageType", Tenant_DataUsageType_name, Tenant_DataUsageType_value) +} + +func init() { + proto.RegisterFile("google/cloud/talent/v4beta1/tenant.proto", fileDescriptor_tenant_6085026e17b3c245) +} + +var fileDescriptor_tenant_6085026e17b3c245 = []byte{ + // 386 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x7c, 0x91, 0xcd, 0x6e, 0xd3, 0x40, + 0x10, 0x80, 0x71, 0x52, 0x55, 0x64, 0x81, 0xaa, 0x5a, 0x2e, 0x51, 0x8b, 0x94, 0xa8, 0x27, 0x1f, + 0xd0, 0x5a, 0x05, 0x6e, 0x9c, 0xdc, 0xc4, 0x44, 0x91, 0x50, 0xb1, 0x6c, 0xe7, 0x00, 0x07, 0x56, + 0x13, 0x67, 0x30, 0x16, 0xf6, 0x8e, 0xb5, 0x1e, 0x03, 0x39, 0xf2, 0x2a, 0xbc, 0x08, 0xaf, 0x86, + 0xba, 0x9b, 0x56, 0x42, 0x42, 0xb9, 0xed, 0xec, 0x7c, 0xdf, 0xfc, 0x68, 0x44, 0x58, 0x11, 0x55, + 0x0d, 0x46, 0x65, 0x43, 0xc3, 0x2e, 0x62, 0x68, 0xd0, 0x70, 0xf4, 0xfd, 0xcd, 0x16, 0x19, 0xae, + 0x23, 0x46, 0x03, 0x86, 0x55, 0x67, 0x89, 0x49, 0x5e, 0x7a, 0x52, 0x39, 0x52, 0x79, 0x52, 0x1d, + 0xc8, 0x8b, 0xa3, 0x65, 0x4a, 0x6a, 0x5b, 0x32, 0xbe, 0xcc, 0xc5, 0x8b, 0x03, 0x09, 0x5d, 0x1d, + 0x81, 0x31, 0xc4, 0xc0, 0x35, 0x99, 0xde, 0x67, 0xaf, 0xfe, 0x8c, 0xc4, 0x69, 0xe1, 0xba, 0x4a, + 0x29, 0x4e, 0x0c, 0xb4, 0x38, 0x0d, 0xe6, 0x41, 0x38, 0xc9, 0xdc, 0x5b, 0xce, 0xc4, 0x13, 0xfc, + 0xc9, 0x68, 0x0d, 0x34, 0xba, 0xde, 0x4d, 0x47, 0x2e, 0x25, 0xee, 0xbf, 0xd6, 0x3b, 0x99, 0x0a, + 0x31, 0xf4, 0x50, 0xa1, 0xe6, 0x7d, 0x87, 0xd3, 0xf1, 0x3c, 0x08, 0xcf, 0x5e, 0x5d, 0xab, 0x23, + 0x93, 0x2b, 0xdf, 0x4d, 0x2d, 0x81, 0x61, 0x73, 0x67, 0x16, 0xfb, 0x0e, 0xb3, 0xc9, 0x70, 0xff, + 0x94, 0x9f, 0xc5, 0xcb, 0x6f, 0xb8, 0xff, 0x41, 0x76, 0xa7, 0x7b, 0x04, 0x5b, 0x7e, 0x85, 0x6d, + 0x83, 0xba, 0xb3, 0xf4, 0xa5, 0x6e, 0x50, 0x97, 0x43, 0xcf, 0xd4, 0x6a, 0x60, 0xb6, 0xf5, 0x76, + 0x60, 0xec, 0xa7, 0x27, 0xf3, 0x71, 0x38, 0xc9, 0xc2, 0x83, 0x93, 0x3f, 0x28, 0xa9, 0x37, 0x16, + 0x4e, 0x88, 0x1f, 0xf8, 0xab, 0x5b, 0xf1, 0xec, 0x9f, 0xde, 0x72, 0x26, 0x2e, 0x97, 0x71, 0x11, + 0xeb, 0x4d, 0x1e, 0xaf, 0x12, 0x5d, 0x7c, 0x4c, 0x13, 0xbd, 0xb9, 0xcd, 0xd3, 0x64, 0xb1, 0x7e, + 0xb7, 0x4e, 0x96, 0xe7, 0x8f, 0xe4, 0x99, 0x10, 0xf1, 0x6a, 0x95, 0x25, 0xab, 0xb8, 0x48, 0x96, + 0xe7, 0x81, 0x7c, 0x2a, 0x1e, 0xaf, 0xf3, 0x0f, 0xef, 0x5d, 0x34, 0xba, 0xf9, 0x15, 0x88, 0x59, + 0x49, 0xed, 0xb1, 0x9d, 0x6f, 0x9e, 0xfb, 0xa5, 0x33, 0xec, 0x69, 0xb0, 0xe5, 0xdd, 0x68, 0x4c, + 0x69, 0xf0, 0x29, 0x3e, 0x38, 0x15, 0x35, 0x60, 0x2a, 0x45, 0xb6, 0x8a, 0x2a, 0x34, 0xee, 0x30, + 0x91, 0x4f, 0x41, 0x57, 0xf7, 0xff, 0xbd, 0xf1, 0x5b, 0x1f, 0xfe, 0x1e, 0x8d, 0x17, 0x45, 0xbe, + 0x3d, 0x75, 0xce, 0xeb, 0xbf, 0x01, 0x00, 0x00, 0xff, 0xff, 0x27, 0xbe, 0xcd, 0xd1, 0x5d, 0x02, + 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/cloud/talent/v4beta1/tenant_service.pb.go b/vendor/google.golang.org/genproto/googleapis/cloud/talent/v4beta1/tenant_service.pb.go new file mode 100644 index 0000000..0f56aed --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/cloud/talent/v4beta1/tenant_service.pb.go @@ -0,0 +1,632 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/cloud/talent/v4beta1/tenant_service.proto + +package talent // import "google.golang.org/genproto/googleapis/cloud/talent/v4beta1" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import empty "github.com/golang/protobuf/ptypes/empty" +import _ "google.golang.org/genproto/googleapis/api/annotations" +import field_mask "google.golang.org/genproto/protobuf/field_mask" + +import ( + context "golang.org/x/net/context" + grpc "google.golang.org/grpc" +) + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// The Request of the CreateTenant method. +type CreateTenantRequest struct { + // Required. + // + // Resource name of the project under which the tenant is created. + // + // The format is "projects/{project_id}", for example, + // "projects/api-test-project". + Parent string `protobuf:"bytes,1,opt,name=parent,proto3" json:"parent,omitempty"` + // Required. + // + // The tenant to be created. + Tenant *Tenant `protobuf:"bytes,2,opt,name=tenant,proto3" json:"tenant,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *CreateTenantRequest) Reset() { *m = CreateTenantRequest{} } +func (m *CreateTenantRequest) String() string { return proto.CompactTextString(m) } +func (*CreateTenantRequest) ProtoMessage() {} +func (*CreateTenantRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_tenant_service_b3e8de717c6e6008, []int{0} +} +func (m *CreateTenantRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_CreateTenantRequest.Unmarshal(m, b) +} +func (m *CreateTenantRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_CreateTenantRequest.Marshal(b, m, deterministic) +} +func (dst *CreateTenantRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_CreateTenantRequest.Merge(dst, src) +} +func (m *CreateTenantRequest) XXX_Size() int { + return xxx_messageInfo_CreateTenantRequest.Size(m) +} +func (m *CreateTenantRequest) XXX_DiscardUnknown() { + xxx_messageInfo_CreateTenantRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_CreateTenantRequest proto.InternalMessageInfo + +func (m *CreateTenantRequest) GetParent() string { + if m != nil { + return m.Parent + } + return "" +} + +func (m *CreateTenantRequest) GetTenant() *Tenant { + if m != nil { + return m.Tenant + } + return nil +} + +// Request for getting a tenant by name. +type GetTenantRequest struct { + // Required. + // + // The resource name of the tenant to be retrieved. + // + // The format is "projects/{project_id}/tenants/{tenant_id}", for example, + // "projects/api-test-project/tenants/foo". + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GetTenantRequest) Reset() { *m = GetTenantRequest{} } +func (m *GetTenantRequest) String() string { return proto.CompactTextString(m) } +func (*GetTenantRequest) ProtoMessage() {} +func (*GetTenantRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_tenant_service_b3e8de717c6e6008, []int{1} +} +func (m *GetTenantRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GetTenantRequest.Unmarshal(m, b) +} +func (m *GetTenantRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GetTenantRequest.Marshal(b, m, deterministic) +} +func (dst *GetTenantRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_GetTenantRequest.Merge(dst, src) +} +func (m *GetTenantRequest) XXX_Size() int { + return xxx_messageInfo_GetTenantRequest.Size(m) +} +func (m *GetTenantRequest) XXX_DiscardUnknown() { + xxx_messageInfo_GetTenantRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_GetTenantRequest proto.InternalMessageInfo + +func (m *GetTenantRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +// Request for updating a specified tenant. +type UpdateTenantRequest struct { + // Required. + // + // The tenant resource to replace the current resource in the system. + Tenant *Tenant `protobuf:"bytes,1,opt,name=tenant,proto3" json:"tenant,omitempty"` + // Optional but strongly recommended for the best service + // experience. + // + // If [update_mask][google.cloud.talent.v4beta1.UpdateTenantRequest.update_mask] is provided, only the specified fields in + // [tenant][google.cloud.talent.v4beta1.UpdateTenantRequest.tenant] are updated. Otherwise all the fields are updated. + // + // A field mask to specify the tenant fields to be updated. Only + // top level fields of [Tenant][google.cloud.talent.v4beta1.Tenant] are supported. + UpdateMask *field_mask.FieldMask `protobuf:"bytes,2,opt,name=update_mask,json=updateMask,proto3" json:"update_mask,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *UpdateTenantRequest) Reset() { *m = UpdateTenantRequest{} } +func (m *UpdateTenantRequest) String() string { return proto.CompactTextString(m) } +func (*UpdateTenantRequest) ProtoMessage() {} +func (*UpdateTenantRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_tenant_service_b3e8de717c6e6008, []int{2} +} +func (m *UpdateTenantRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_UpdateTenantRequest.Unmarshal(m, b) +} +func (m *UpdateTenantRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_UpdateTenantRequest.Marshal(b, m, deterministic) +} +func (dst *UpdateTenantRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_UpdateTenantRequest.Merge(dst, src) +} +func (m *UpdateTenantRequest) XXX_Size() int { + return xxx_messageInfo_UpdateTenantRequest.Size(m) +} +func (m *UpdateTenantRequest) XXX_DiscardUnknown() { + xxx_messageInfo_UpdateTenantRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_UpdateTenantRequest proto.InternalMessageInfo + +func (m *UpdateTenantRequest) GetTenant() *Tenant { + if m != nil { + return m.Tenant + } + return nil +} + +func (m *UpdateTenantRequest) GetUpdateMask() *field_mask.FieldMask { + if m != nil { + return m.UpdateMask + } + return nil +} + +// Request to delete a tenant. +type DeleteTenantRequest struct { + // Required. + // + // The resource name of the tenant to be deleted. + // + // The format is "projects/{project_id}/tenants/{tenant_id}", for example, + // "projects/api-test-project/tenants/foo". + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *DeleteTenantRequest) Reset() { *m = DeleteTenantRequest{} } +func (m *DeleteTenantRequest) String() string { return proto.CompactTextString(m) } +func (*DeleteTenantRequest) ProtoMessage() {} +func (*DeleteTenantRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_tenant_service_b3e8de717c6e6008, []int{3} +} +func (m *DeleteTenantRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_DeleteTenantRequest.Unmarshal(m, b) +} +func (m *DeleteTenantRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_DeleteTenantRequest.Marshal(b, m, deterministic) +} +func (dst *DeleteTenantRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_DeleteTenantRequest.Merge(dst, src) +} +func (m *DeleteTenantRequest) XXX_Size() int { + return xxx_messageInfo_DeleteTenantRequest.Size(m) +} +func (m *DeleteTenantRequest) XXX_DiscardUnknown() { + xxx_messageInfo_DeleteTenantRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_DeleteTenantRequest proto.InternalMessageInfo + +func (m *DeleteTenantRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +// List tenants for which the client has ACL visibility. +type ListTenantsRequest struct { + // Required. + // + // Resource name of the project under which the tenant is created. + // + // The format is "projects/{project_id}", for example, + // "projects/api-test-project". + Parent string `protobuf:"bytes,1,opt,name=parent,proto3" json:"parent,omitempty"` + // Optional. + // + // The starting indicator from which to return results. + PageToken string `protobuf:"bytes,2,opt,name=page_token,json=pageToken,proto3" json:"page_token,omitempty"` + // Optional. + // + // The maximum number of tenants to be returned, at most 100. + // Default is 100 if a non-positive number is provided. + PageSize int32 `protobuf:"varint,3,opt,name=page_size,json=pageSize,proto3" json:"page_size,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ListTenantsRequest) Reset() { *m = ListTenantsRequest{} } +func (m *ListTenantsRequest) String() string { return proto.CompactTextString(m) } +func (*ListTenantsRequest) ProtoMessage() {} +func (*ListTenantsRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_tenant_service_b3e8de717c6e6008, []int{4} +} +func (m *ListTenantsRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ListTenantsRequest.Unmarshal(m, b) +} +func (m *ListTenantsRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ListTenantsRequest.Marshal(b, m, deterministic) +} +func (dst *ListTenantsRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_ListTenantsRequest.Merge(dst, src) +} +func (m *ListTenantsRequest) XXX_Size() int { + return xxx_messageInfo_ListTenantsRequest.Size(m) +} +func (m *ListTenantsRequest) XXX_DiscardUnknown() { + xxx_messageInfo_ListTenantsRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_ListTenantsRequest proto.InternalMessageInfo + +func (m *ListTenantsRequest) GetParent() string { + if m != nil { + return m.Parent + } + return "" +} + +func (m *ListTenantsRequest) GetPageToken() string { + if m != nil { + return m.PageToken + } + return "" +} + +func (m *ListTenantsRequest) GetPageSize() int32 { + if m != nil { + return m.PageSize + } + return 0 +} + +// Output only. +// +// The List tenants response object. +type ListTenantsResponse struct { + // Tenants for the current client. + Tenants []*Tenant `protobuf:"bytes,1,rep,name=tenants,proto3" json:"tenants,omitempty"` + // A token to retrieve the next page of results. + NextPageToken string `protobuf:"bytes,2,opt,name=next_page_token,json=nextPageToken,proto3" json:"next_page_token,omitempty"` + // Additional information for the API invocation, such as the request + // tracking id. + Metadata *ResponseMetadata `protobuf:"bytes,3,opt,name=metadata,proto3" json:"metadata,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ListTenantsResponse) Reset() { *m = ListTenantsResponse{} } +func (m *ListTenantsResponse) String() string { return proto.CompactTextString(m) } +func (*ListTenantsResponse) ProtoMessage() {} +func (*ListTenantsResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_tenant_service_b3e8de717c6e6008, []int{5} +} +func (m *ListTenantsResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ListTenantsResponse.Unmarshal(m, b) +} +func (m *ListTenantsResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ListTenantsResponse.Marshal(b, m, deterministic) +} +func (dst *ListTenantsResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_ListTenantsResponse.Merge(dst, src) +} +func (m *ListTenantsResponse) XXX_Size() int { + return xxx_messageInfo_ListTenantsResponse.Size(m) +} +func (m *ListTenantsResponse) XXX_DiscardUnknown() { + xxx_messageInfo_ListTenantsResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_ListTenantsResponse proto.InternalMessageInfo + +func (m *ListTenantsResponse) GetTenants() []*Tenant { + if m != nil { + return m.Tenants + } + return nil +} + +func (m *ListTenantsResponse) GetNextPageToken() string { + if m != nil { + return m.NextPageToken + } + return "" +} + +func (m *ListTenantsResponse) GetMetadata() *ResponseMetadata { + if m != nil { + return m.Metadata + } + return nil +} + +func init() { + proto.RegisterType((*CreateTenantRequest)(nil), "google.cloud.talent.v4beta1.CreateTenantRequest") + proto.RegisterType((*GetTenantRequest)(nil), "google.cloud.talent.v4beta1.GetTenantRequest") + proto.RegisterType((*UpdateTenantRequest)(nil), "google.cloud.talent.v4beta1.UpdateTenantRequest") + proto.RegisterType((*DeleteTenantRequest)(nil), "google.cloud.talent.v4beta1.DeleteTenantRequest") + proto.RegisterType((*ListTenantsRequest)(nil), "google.cloud.talent.v4beta1.ListTenantsRequest") + proto.RegisterType((*ListTenantsResponse)(nil), "google.cloud.talent.v4beta1.ListTenantsResponse") +} + +// Reference imports to suppress errors if they are not otherwise used. +var _ context.Context +var _ grpc.ClientConn + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the grpc package it is being compiled against. +const _ = grpc.SupportPackageIsVersion4 + +// TenantServiceClient is the client API for TenantService service. +// +// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://godoc.org/google.golang.org/grpc#ClientConn.NewStream. +type TenantServiceClient interface { + // Creates a new tenant entity. + CreateTenant(ctx context.Context, in *CreateTenantRequest, opts ...grpc.CallOption) (*Tenant, error) + // Retrieves specified tenant. + GetTenant(ctx context.Context, in *GetTenantRequest, opts ...grpc.CallOption) (*Tenant, error) + // Updates specified tenant. + UpdateTenant(ctx context.Context, in *UpdateTenantRequest, opts ...grpc.CallOption) (*Tenant, error) + // Deletes specified tenant. + DeleteTenant(ctx context.Context, in *DeleteTenantRequest, opts ...grpc.CallOption) (*empty.Empty, error) + // Lists all tenants associated with the project. + ListTenants(ctx context.Context, in *ListTenantsRequest, opts ...grpc.CallOption) (*ListTenantsResponse, error) +} + +type tenantServiceClient struct { + cc *grpc.ClientConn +} + +func NewTenantServiceClient(cc *grpc.ClientConn) TenantServiceClient { + return &tenantServiceClient{cc} +} + +func (c *tenantServiceClient) CreateTenant(ctx context.Context, in *CreateTenantRequest, opts ...grpc.CallOption) (*Tenant, error) { + out := new(Tenant) + err := c.cc.Invoke(ctx, "/google.cloud.talent.v4beta1.TenantService/CreateTenant", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *tenantServiceClient) GetTenant(ctx context.Context, in *GetTenantRequest, opts ...grpc.CallOption) (*Tenant, error) { + out := new(Tenant) + err := c.cc.Invoke(ctx, "/google.cloud.talent.v4beta1.TenantService/GetTenant", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *tenantServiceClient) UpdateTenant(ctx context.Context, in *UpdateTenantRequest, opts ...grpc.CallOption) (*Tenant, error) { + out := new(Tenant) + err := c.cc.Invoke(ctx, "/google.cloud.talent.v4beta1.TenantService/UpdateTenant", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *tenantServiceClient) DeleteTenant(ctx context.Context, in *DeleteTenantRequest, opts ...grpc.CallOption) (*empty.Empty, error) { + out := new(empty.Empty) + err := c.cc.Invoke(ctx, "/google.cloud.talent.v4beta1.TenantService/DeleteTenant", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *tenantServiceClient) ListTenants(ctx context.Context, in *ListTenantsRequest, opts ...grpc.CallOption) (*ListTenantsResponse, error) { + out := new(ListTenantsResponse) + err := c.cc.Invoke(ctx, "/google.cloud.talent.v4beta1.TenantService/ListTenants", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +// TenantServiceServer is the server API for TenantService service. +type TenantServiceServer interface { + // Creates a new tenant entity. + CreateTenant(context.Context, *CreateTenantRequest) (*Tenant, error) + // Retrieves specified tenant. + GetTenant(context.Context, *GetTenantRequest) (*Tenant, error) + // Updates specified tenant. + UpdateTenant(context.Context, *UpdateTenantRequest) (*Tenant, error) + // Deletes specified tenant. + DeleteTenant(context.Context, *DeleteTenantRequest) (*empty.Empty, error) + // Lists all tenants associated with the project. + ListTenants(context.Context, *ListTenantsRequest) (*ListTenantsResponse, error) +} + +func RegisterTenantServiceServer(s *grpc.Server, srv TenantServiceServer) { + s.RegisterService(&_TenantService_serviceDesc, srv) +} + +func _TenantService_CreateTenant_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(CreateTenantRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(TenantServiceServer).CreateTenant(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.talent.v4beta1.TenantService/CreateTenant", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(TenantServiceServer).CreateTenant(ctx, req.(*CreateTenantRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _TenantService_GetTenant_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(GetTenantRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(TenantServiceServer).GetTenant(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.talent.v4beta1.TenantService/GetTenant", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(TenantServiceServer).GetTenant(ctx, req.(*GetTenantRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _TenantService_UpdateTenant_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(UpdateTenantRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(TenantServiceServer).UpdateTenant(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.talent.v4beta1.TenantService/UpdateTenant", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(TenantServiceServer).UpdateTenant(ctx, req.(*UpdateTenantRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _TenantService_DeleteTenant_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(DeleteTenantRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(TenantServiceServer).DeleteTenant(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.talent.v4beta1.TenantService/DeleteTenant", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(TenantServiceServer).DeleteTenant(ctx, req.(*DeleteTenantRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _TenantService_ListTenants_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(ListTenantsRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(TenantServiceServer).ListTenants(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.talent.v4beta1.TenantService/ListTenants", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(TenantServiceServer).ListTenants(ctx, req.(*ListTenantsRequest)) + } + return interceptor(ctx, in, info, handler) +} + +var _TenantService_serviceDesc = grpc.ServiceDesc{ + ServiceName: "google.cloud.talent.v4beta1.TenantService", + HandlerType: (*TenantServiceServer)(nil), + Methods: []grpc.MethodDesc{ + { + MethodName: "CreateTenant", + Handler: _TenantService_CreateTenant_Handler, + }, + { + MethodName: "GetTenant", + Handler: _TenantService_GetTenant_Handler, + }, + { + MethodName: "UpdateTenant", + Handler: _TenantService_UpdateTenant_Handler, + }, + { + MethodName: "DeleteTenant", + Handler: _TenantService_DeleteTenant_Handler, + }, + { + MethodName: "ListTenants", + Handler: _TenantService_ListTenants_Handler, + }, + }, + Streams: []grpc.StreamDesc{}, + Metadata: "google/cloud/talent/v4beta1/tenant_service.proto", +} + +func init() { + proto.RegisterFile("google/cloud/talent/v4beta1/tenant_service.proto", fileDescriptor_tenant_service_b3e8de717c6e6008) +} + +var fileDescriptor_tenant_service_b3e8de717c6e6008 = []byte{ + // 641 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x94, 0x95, 0x4f, 0x4f, 0x13, 0x41, + 0x18, 0xc6, 0x33, 0xa0, 0x95, 0xbe, 0x85, 0x68, 0xa6, 0x09, 0x69, 0x8a, 0xc6, 0x66, 0x35, 0xa4, + 0x54, 0xdd, 0xa9, 0xd5, 0x78, 0x90, 0x70, 0x10, 0xfc, 0x13, 0x13, 0x49, 0x48, 0xc1, 0x8b, 0x97, + 0x66, 0x68, 0x5f, 0xd6, 0x85, 0xee, 0xcc, 0xda, 0x99, 0x12, 0xc5, 0x90, 0x18, 0x0e, 0xde, 0x8c, + 0x07, 0x13, 0x6f, 0x7a, 0xf0, 0xcb, 0xf8, 0x01, 0xfc, 0x0a, 0x7e, 0x10, 0xb3, 0x33, 0xd3, 0x52, + 0x68, 0xd9, 0xae, 0xb7, 0xce, 0xec, 0x3c, 0xef, 0xfb, 0x9b, 0x67, 0xdf, 0x67, 0x0b, 0xf5, 0x40, + 0xca, 0xa0, 0x8b, 0xac, 0xdd, 0x95, 0xfd, 0x0e, 0xd3, 0xbc, 0x8b, 0x42, 0xb3, 0xc3, 0x87, 0xbb, + 0xa8, 0xf9, 0x7d, 0xa6, 0x51, 0x70, 0xa1, 0x5b, 0x0a, 0x7b, 0x87, 0x61, 0x1b, 0xfd, 0xb8, 0x27, + 0xb5, 0xa4, 0x4b, 0x56, 0xe1, 0x1b, 0x85, 0x6f, 0x15, 0xbe, 0x53, 0x94, 0xaf, 0xbb, 0x72, 0x3c, + 0x0e, 0x19, 0x17, 0x42, 0x6a, 0xae, 0x43, 0x29, 0x94, 0x95, 0x96, 0xab, 0x69, 0xcd, 0xda, 0x32, + 0x8a, 0xa4, 0xc8, 0x72, 0xd2, 0x62, 0xb9, 0x93, 0x0e, 0x87, 0x99, 0xd5, 0x6e, 0x7f, 0x8f, 0x61, + 0x14, 0xeb, 0x0f, 0xee, 0x61, 0xe5, 0xfc, 0xc3, 0xbd, 0x10, 0xbb, 0x9d, 0x56, 0xc4, 0xd5, 0x81, + 0x3d, 0xe1, 0xed, 0x43, 0x71, 0xa3, 0x87, 0x5c, 0xe3, 0x8e, 0x29, 0xda, 0xc4, 0x77, 0x7d, 0x54, + 0x9a, 0x2e, 0x42, 0x2e, 0xe6, 0x3d, 0x14, 0xba, 0x44, 0x2a, 0xa4, 0x9a, 0x6f, 0xba, 0x15, 0x5d, + 0x85, 0x9c, 0xed, 0x5e, 0x9a, 0xa9, 0x90, 0x6a, 0xa1, 0x71, 0xcb, 0x4f, 0x71, 0xc3, 0x77, 0x35, + 0x9d, 0xc4, 0x5b, 0x86, 0x6b, 0x2f, 0x50, 0x9f, 0x6d, 0x44, 0xe1, 0x92, 0xe0, 0x11, 0xba, 0x36, + 0xe6, 0xb7, 0xf7, 0x95, 0x40, 0xf1, 0x75, 0xdc, 0x19, 0x83, 0x3a, 0x6d, 0x4e, 0xfe, 0xbb, 0x39, + 0x5d, 0x85, 0x42, 0xdf, 0xd4, 0x34, 0xb7, 0x77, 0xf8, 0xe5, 0x41, 0x85, 0x81, 0x41, 0xfe, 0xf3, + 0xc4, 0xa0, 0x4d, 0xae, 0x0e, 0x9a, 0x60, 0x8f, 0x27, 0xbf, 0xbd, 0x15, 0x28, 0x3e, 0xc5, 0x2e, + 0x9e, 0x07, 0x9a, 0x04, 0xff, 0x16, 0xe8, 0xab, 0x50, 0xb9, 0x5b, 0xaa, 0x69, 0x7e, 0xde, 0x00, + 0x88, 0x79, 0x80, 0x2d, 0x2d, 0x0f, 0x50, 0x18, 0xa8, 0x7c, 0x33, 0x9f, 0xec, 0xec, 0x24, 0x1b, + 0x74, 0x09, 0xcc, 0xa2, 0xa5, 0xc2, 0x23, 0x2c, 0xcd, 0x56, 0x48, 0xf5, 0x72, 0x73, 0x2e, 0xd9, + 0xd8, 0x0e, 0x8f, 0xd0, 0xfb, 0x4d, 0xa0, 0x78, 0xa6, 0x95, 0x8a, 0xa5, 0x50, 0x48, 0xd7, 0xe0, + 0x8a, 0xbd, 0xb3, 0x2a, 0x91, 0xca, 0x6c, 0x56, 0x9f, 0x06, 0x1a, 0xba, 0x0c, 0x57, 0x05, 0xbe, + 0xd7, 0xad, 0x31, 0xae, 0x85, 0x64, 0x7b, 0x6b, 0xc8, 0xf6, 0x12, 0xe6, 0x22, 0xd4, 0xbc, 0xc3, + 0x35, 0x37, 0x68, 0x85, 0xc6, 0xbd, 0xd4, 0x3e, 0x03, 0xbe, 0x4d, 0x27, 0x6a, 0x0e, 0xe5, 0x8d, + 0x93, 0x1c, 0x2c, 0x58, 0x8c, 0x6d, 0x1b, 0x35, 0xfa, 0x9d, 0xc0, 0xfc, 0xe8, 0x5c, 0xd2, 0x7a, + 0x6a, 0xed, 0x09, 0x23, 0x5c, 0xce, 0x72, 0x6b, 0x8f, 0x9d, 0xfc, 0xf9, 0xfb, 0x6d, 0x66, 0xc5, + 0xbb, 0x3d, 0x0c, 0xd7, 0x47, 0xfb, 0x66, 0xd6, 0xe2, 0x9e, 0xdc, 0xc7, 0xb6, 0x56, 0xac, 0x76, + 0xec, 0x02, 0xa7, 0x1e, 0x93, 0x1a, 0xfd, 0x42, 0x20, 0x3f, 0x1c, 0x62, 0x9a, 0x7e, 0xe3, 0xf3, + 0xc3, 0x9e, 0x0d, 0xe9, 0xae, 0x41, 0x5a, 0xa6, 0x23, 0x48, 0xc9, 0x60, 0x8d, 0x00, 0x0d, 0x78, + 0x58, 0xed, 0x98, 0xfe, 0x20, 0x30, 0x3f, 0x9a, 0x95, 0x29, 0x46, 0x4d, 0x88, 0x55, 0x36, 0xaa, + 0x47, 0x86, 0xaa, 0xde, 0xb8, 0x73, 0x4a, 0xe5, 0x3e, 0x43, 0x17, 0xc3, 0x25, 0x7e, 0x7d, 0x26, + 0x30, 0x3f, 0x1a, 0x9d, 0x29, 0x7c, 0x13, 0x52, 0x56, 0x5e, 0x1c, 0x0b, 0xe9, 0xb3, 0xe4, 0x13, + 0x37, 0x30, 0xaa, 0x96, 0xcd, 0xa8, 0x9f, 0x04, 0x0a, 0x23, 0x69, 0xa1, 0x2c, 0x95, 0x63, 0x3c, + 0xc2, 0xe5, 0x7a, 0x76, 0x81, 0x1d, 0xf4, 0x49, 0x6f, 0xf2, 0xe2, 0xe1, 0x5a, 0xff, 0x44, 0xe0, + 0x66, 0x5b, 0x46, 0x69, 0x5d, 0xd6, 0xe9, 0x99, 0x94, 0x6c, 0x25, 0x7e, 0x6c, 0x91, 0x37, 0x4f, + 0x9c, 0x24, 0x90, 0x5d, 0x2e, 0x02, 0x5f, 0xf6, 0x02, 0x16, 0xa0, 0x30, 0x6e, 0x31, 0xfb, 0x88, + 0xc7, 0xa1, 0x9a, 0xf8, 0x5f, 0xb2, 0x6a, 0x97, 0xbf, 0x66, 0x66, 0x37, 0x76, 0xb6, 0x77, 0x73, + 0x46, 0xf3, 0xe0, 0x5f, 0x00, 0x00, 0x00, 0xff, 0xff, 0x8a, 0x45, 0x0b, 0xcb, 0x15, 0x07, 0x00, + 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/cloud/tasks/v2/cloudtasks.pb.go b/vendor/google.golang.org/genproto/googleapis/cloud/tasks/v2/cloudtasks.pb.go new file mode 100644 index 0000000..088e85f --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/cloud/tasks/v2/cloudtasks.pb.go @@ -0,0 +1,1879 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/cloud/tasks/v2/cloudtasks.proto + +package tasks // import "google.golang.org/genproto/googleapis/cloud/tasks/v2" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import empty "github.com/golang/protobuf/ptypes/empty" +import _ "google.golang.org/genproto/googleapis/api/annotations" +import v1 "google.golang.org/genproto/googleapis/iam/v1" +import _ "google.golang.org/genproto/googleapis/rpc/code" +import field_mask "google.golang.org/genproto/protobuf/field_mask" + +import ( + context "golang.org/x/net/context" + grpc "google.golang.org/grpc" +) + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Request message for [ListQueues][google.cloud.tasks.v2.CloudTasks.ListQueues]. +type ListQueuesRequest struct { + // Required. + // + // The location name. + // For example: `projects/PROJECT_ID/locations/LOCATION_ID` + Parent string `protobuf:"bytes,1,opt,name=parent,proto3" json:"parent,omitempty"` + // `filter` can be used to specify a subset of queues. Any [Queue][google.cloud.tasks.v2.Queue] + // field can be used as a filter and several operators as supported. + // For example: `<=, <, >=, >, !=, =, :`. The filter syntax is the same as + // described in + // [Stackdriver's Advanced Logs Filters](https://cloud.google.com/logging/docs/view/advanced_filters). + // + // Sample filter "state: PAUSED". + // + // Note that using filters might cause fewer queues than the + // requested page_size to be returned. + Filter string `protobuf:"bytes,2,opt,name=filter,proto3" json:"filter,omitempty"` + // Requested page size. + // + // The maximum page size is 9800. If unspecified, the page size will + // be the maximum. Fewer queues than requested might be returned, + // even if more queues exist; use the + // [next_page_token][google.cloud.tasks.v2.ListQueuesResponse.next_page_token] in the + // response to determine if more queues exist. + PageSize int32 `protobuf:"varint,3,opt,name=page_size,json=pageSize,proto3" json:"page_size,omitempty"` + // A token identifying the page of results to return. + // + // To request the first page results, page_token must be empty. To + // request the next page of results, page_token must be the value of + // [next_page_token][google.cloud.tasks.v2.ListQueuesResponse.next_page_token] returned + // from the previous call to [ListQueues][google.cloud.tasks.v2.CloudTasks.ListQueues] + // method. It is an error to switch the value of the + // [filter][google.cloud.tasks.v2.ListQueuesRequest.filter] while iterating through pages. + PageToken string `protobuf:"bytes,4,opt,name=page_token,json=pageToken,proto3" json:"page_token,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ListQueuesRequest) Reset() { *m = ListQueuesRequest{} } +func (m *ListQueuesRequest) String() string { return proto.CompactTextString(m) } +func (*ListQueuesRequest) ProtoMessage() {} +func (*ListQueuesRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_cloudtasks_7027f62ef3ad9740, []int{0} +} +func (m *ListQueuesRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ListQueuesRequest.Unmarshal(m, b) +} +func (m *ListQueuesRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ListQueuesRequest.Marshal(b, m, deterministic) +} +func (dst *ListQueuesRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_ListQueuesRequest.Merge(dst, src) +} +func (m *ListQueuesRequest) XXX_Size() int { + return xxx_messageInfo_ListQueuesRequest.Size(m) +} +func (m *ListQueuesRequest) XXX_DiscardUnknown() { + xxx_messageInfo_ListQueuesRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_ListQueuesRequest proto.InternalMessageInfo + +func (m *ListQueuesRequest) GetParent() string { + if m != nil { + return m.Parent + } + return "" +} + +func (m *ListQueuesRequest) GetFilter() string { + if m != nil { + return m.Filter + } + return "" +} + +func (m *ListQueuesRequest) GetPageSize() int32 { + if m != nil { + return m.PageSize + } + return 0 +} + +func (m *ListQueuesRequest) GetPageToken() string { + if m != nil { + return m.PageToken + } + return "" +} + +// Response message for [ListQueues][google.cloud.tasks.v2.CloudTasks.ListQueues]. +type ListQueuesResponse struct { + // The list of queues. + Queues []*Queue `protobuf:"bytes,1,rep,name=queues,proto3" json:"queues,omitempty"` + // A token to retrieve next page of results. + // + // To return the next page of results, call + // [ListQueues][google.cloud.tasks.v2.CloudTasks.ListQueues] with this value as the + // [page_token][google.cloud.tasks.v2.ListQueuesRequest.page_token]. + // + // If the next_page_token is empty, there are no more results. + // + // The page token is valid for only 2 hours. + NextPageToken string `protobuf:"bytes,2,opt,name=next_page_token,json=nextPageToken,proto3" json:"next_page_token,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ListQueuesResponse) Reset() { *m = ListQueuesResponse{} } +func (m *ListQueuesResponse) String() string { return proto.CompactTextString(m) } +func (*ListQueuesResponse) ProtoMessage() {} +func (*ListQueuesResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_cloudtasks_7027f62ef3ad9740, []int{1} +} +func (m *ListQueuesResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ListQueuesResponse.Unmarshal(m, b) +} +func (m *ListQueuesResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ListQueuesResponse.Marshal(b, m, deterministic) +} +func (dst *ListQueuesResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_ListQueuesResponse.Merge(dst, src) +} +func (m *ListQueuesResponse) XXX_Size() int { + return xxx_messageInfo_ListQueuesResponse.Size(m) +} +func (m *ListQueuesResponse) XXX_DiscardUnknown() { + xxx_messageInfo_ListQueuesResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_ListQueuesResponse proto.InternalMessageInfo + +func (m *ListQueuesResponse) GetQueues() []*Queue { + if m != nil { + return m.Queues + } + return nil +} + +func (m *ListQueuesResponse) GetNextPageToken() string { + if m != nil { + return m.NextPageToken + } + return "" +} + +// Request message for [GetQueue][google.cloud.tasks.v2.CloudTasks.GetQueue]. +type GetQueueRequest struct { + // Required. + // + // The resource name of the queue. For example: + // `projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID` + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GetQueueRequest) Reset() { *m = GetQueueRequest{} } +func (m *GetQueueRequest) String() string { return proto.CompactTextString(m) } +func (*GetQueueRequest) ProtoMessage() {} +func (*GetQueueRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_cloudtasks_7027f62ef3ad9740, []int{2} +} +func (m *GetQueueRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GetQueueRequest.Unmarshal(m, b) +} +func (m *GetQueueRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GetQueueRequest.Marshal(b, m, deterministic) +} +func (dst *GetQueueRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_GetQueueRequest.Merge(dst, src) +} +func (m *GetQueueRequest) XXX_Size() int { + return xxx_messageInfo_GetQueueRequest.Size(m) +} +func (m *GetQueueRequest) XXX_DiscardUnknown() { + xxx_messageInfo_GetQueueRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_GetQueueRequest proto.InternalMessageInfo + +func (m *GetQueueRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +// Request message for [CreateQueue][google.cloud.tasks.v2.CloudTasks.CreateQueue]. +type CreateQueueRequest struct { + // Required. + // + // The location name in which the queue will be created. + // For example: `projects/PROJECT_ID/locations/LOCATION_ID` + // + // The list of allowed locations can be obtained by calling Cloud + // Tasks' implementation of + // [ListLocations][google.cloud.location.Locations.ListLocations]. + Parent string `protobuf:"bytes,1,opt,name=parent,proto3" json:"parent,omitempty"` + // Required. + // + // The queue to create. + // + // [Queue's name][google.cloud.tasks.v2.Queue.name] cannot be the same as an existing queue. + Queue *Queue `protobuf:"bytes,2,opt,name=queue,proto3" json:"queue,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *CreateQueueRequest) Reset() { *m = CreateQueueRequest{} } +func (m *CreateQueueRequest) String() string { return proto.CompactTextString(m) } +func (*CreateQueueRequest) ProtoMessage() {} +func (*CreateQueueRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_cloudtasks_7027f62ef3ad9740, []int{3} +} +func (m *CreateQueueRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_CreateQueueRequest.Unmarshal(m, b) +} +func (m *CreateQueueRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_CreateQueueRequest.Marshal(b, m, deterministic) +} +func (dst *CreateQueueRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_CreateQueueRequest.Merge(dst, src) +} +func (m *CreateQueueRequest) XXX_Size() int { + return xxx_messageInfo_CreateQueueRequest.Size(m) +} +func (m *CreateQueueRequest) XXX_DiscardUnknown() { + xxx_messageInfo_CreateQueueRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_CreateQueueRequest proto.InternalMessageInfo + +func (m *CreateQueueRequest) GetParent() string { + if m != nil { + return m.Parent + } + return "" +} + +func (m *CreateQueueRequest) GetQueue() *Queue { + if m != nil { + return m.Queue + } + return nil +} + +// Request message for [UpdateQueue][google.cloud.tasks.v2.CloudTasks.UpdateQueue]. +type UpdateQueueRequest struct { + // Required. + // + // The queue to create or update. + // + // The queue's [name][google.cloud.tasks.v2.Queue.name] must be specified. + // + // Output only fields cannot be modified using UpdateQueue. + // Any value specified for an output only field will be ignored. + // The queue's [name][google.cloud.tasks.v2.Queue.name] cannot be changed. + Queue *Queue `protobuf:"bytes,1,opt,name=queue,proto3" json:"queue,omitempty"` + // A mask used to specify which fields of the queue are being updated. + // + // If empty, then all fields will be updated. + UpdateMask *field_mask.FieldMask `protobuf:"bytes,2,opt,name=update_mask,json=updateMask,proto3" json:"update_mask,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *UpdateQueueRequest) Reset() { *m = UpdateQueueRequest{} } +func (m *UpdateQueueRequest) String() string { return proto.CompactTextString(m) } +func (*UpdateQueueRequest) ProtoMessage() {} +func (*UpdateQueueRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_cloudtasks_7027f62ef3ad9740, []int{4} +} +func (m *UpdateQueueRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_UpdateQueueRequest.Unmarshal(m, b) +} +func (m *UpdateQueueRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_UpdateQueueRequest.Marshal(b, m, deterministic) +} +func (dst *UpdateQueueRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_UpdateQueueRequest.Merge(dst, src) +} +func (m *UpdateQueueRequest) XXX_Size() int { + return xxx_messageInfo_UpdateQueueRequest.Size(m) +} +func (m *UpdateQueueRequest) XXX_DiscardUnknown() { + xxx_messageInfo_UpdateQueueRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_UpdateQueueRequest proto.InternalMessageInfo + +func (m *UpdateQueueRequest) GetQueue() *Queue { + if m != nil { + return m.Queue + } + return nil +} + +func (m *UpdateQueueRequest) GetUpdateMask() *field_mask.FieldMask { + if m != nil { + return m.UpdateMask + } + return nil +} + +// Request message for [DeleteQueue][google.cloud.tasks.v2.CloudTasks.DeleteQueue]. +type DeleteQueueRequest struct { + // Required. + // + // The queue name. For example: + // `projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID` + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *DeleteQueueRequest) Reset() { *m = DeleteQueueRequest{} } +func (m *DeleteQueueRequest) String() string { return proto.CompactTextString(m) } +func (*DeleteQueueRequest) ProtoMessage() {} +func (*DeleteQueueRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_cloudtasks_7027f62ef3ad9740, []int{5} +} +func (m *DeleteQueueRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_DeleteQueueRequest.Unmarshal(m, b) +} +func (m *DeleteQueueRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_DeleteQueueRequest.Marshal(b, m, deterministic) +} +func (dst *DeleteQueueRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_DeleteQueueRequest.Merge(dst, src) +} +func (m *DeleteQueueRequest) XXX_Size() int { + return xxx_messageInfo_DeleteQueueRequest.Size(m) +} +func (m *DeleteQueueRequest) XXX_DiscardUnknown() { + xxx_messageInfo_DeleteQueueRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_DeleteQueueRequest proto.InternalMessageInfo + +func (m *DeleteQueueRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +// Request message for [PurgeQueue][google.cloud.tasks.v2.CloudTasks.PurgeQueue]. +type PurgeQueueRequest struct { + // Required. + // + // The queue name. For example: + // `projects/PROJECT_ID/location/LOCATION_ID/queues/QUEUE_ID` + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *PurgeQueueRequest) Reset() { *m = PurgeQueueRequest{} } +func (m *PurgeQueueRequest) String() string { return proto.CompactTextString(m) } +func (*PurgeQueueRequest) ProtoMessage() {} +func (*PurgeQueueRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_cloudtasks_7027f62ef3ad9740, []int{6} +} +func (m *PurgeQueueRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_PurgeQueueRequest.Unmarshal(m, b) +} +func (m *PurgeQueueRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_PurgeQueueRequest.Marshal(b, m, deterministic) +} +func (dst *PurgeQueueRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_PurgeQueueRequest.Merge(dst, src) +} +func (m *PurgeQueueRequest) XXX_Size() int { + return xxx_messageInfo_PurgeQueueRequest.Size(m) +} +func (m *PurgeQueueRequest) XXX_DiscardUnknown() { + xxx_messageInfo_PurgeQueueRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_PurgeQueueRequest proto.InternalMessageInfo + +func (m *PurgeQueueRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +// Request message for [PauseQueue][google.cloud.tasks.v2.CloudTasks.PauseQueue]. +type PauseQueueRequest struct { + // Required. + // + // The queue name. For example: + // `projects/PROJECT_ID/location/LOCATION_ID/queues/QUEUE_ID` + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *PauseQueueRequest) Reset() { *m = PauseQueueRequest{} } +func (m *PauseQueueRequest) String() string { return proto.CompactTextString(m) } +func (*PauseQueueRequest) ProtoMessage() {} +func (*PauseQueueRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_cloudtasks_7027f62ef3ad9740, []int{7} +} +func (m *PauseQueueRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_PauseQueueRequest.Unmarshal(m, b) +} +func (m *PauseQueueRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_PauseQueueRequest.Marshal(b, m, deterministic) +} +func (dst *PauseQueueRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_PauseQueueRequest.Merge(dst, src) +} +func (m *PauseQueueRequest) XXX_Size() int { + return xxx_messageInfo_PauseQueueRequest.Size(m) +} +func (m *PauseQueueRequest) XXX_DiscardUnknown() { + xxx_messageInfo_PauseQueueRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_PauseQueueRequest proto.InternalMessageInfo + +func (m *PauseQueueRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +// Request message for [ResumeQueue][google.cloud.tasks.v2.CloudTasks.ResumeQueue]. +type ResumeQueueRequest struct { + // Required. + // + // The queue name. For example: + // `projects/PROJECT_ID/location/LOCATION_ID/queues/QUEUE_ID` + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ResumeQueueRequest) Reset() { *m = ResumeQueueRequest{} } +func (m *ResumeQueueRequest) String() string { return proto.CompactTextString(m) } +func (*ResumeQueueRequest) ProtoMessage() {} +func (*ResumeQueueRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_cloudtasks_7027f62ef3ad9740, []int{8} +} +func (m *ResumeQueueRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ResumeQueueRequest.Unmarshal(m, b) +} +func (m *ResumeQueueRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ResumeQueueRequest.Marshal(b, m, deterministic) +} +func (dst *ResumeQueueRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_ResumeQueueRequest.Merge(dst, src) +} +func (m *ResumeQueueRequest) XXX_Size() int { + return xxx_messageInfo_ResumeQueueRequest.Size(m) +} +func (m *ResumeQueueRequest) XXX_DiscardUnknown() { + xxx_messageInfo_ResumeQueueRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_ResumeQueueRequest proto.InternalMessageInfo + +func (m *ResumeQueueRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +// Request message for listing tasks using [ListTasks][google.cloud.tasks.v2.CloudTasks.ListTasks]. +type ListTasksRequest struct { + // Required. + // + // The queue name. For example: + // `projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID` + Parent string `protobuf:"bytes,1,opt,name=parent,proto3" json:"parent,omitempty"` + // The response_view specifies which subset of the [Task][google.cloud.tasks.v2.Task] will be + // returned. + // + // By default response_view is [BASIC][google.cloud.tasks.v2.Task.View.BASIC]; not all + // information is retrieved by default because some data, such as + // payloads, might be desirable to return only when needed because + // of its large size or because of the sensitivity of data that it + // contains. + // + // Authorization for [FULL][google.cloud.tasks.v2.Task.View.FULL] requires + // `cloudtasks.tasks.fullView` [Google IAM](https://cloud.google.com/iam/) + // permission on the [Task][google.cloud.tasks.v2.Task] resource. + ResponseView Task_View `protobuf:"varint,2,opt,name=response_view,json=responseView,proto3,enum=google.cloud.tasks.v2.Task_View" json:"response_view,omitempty"` + // Requested page size. Fewer tasks than requested might be returned. + // + // The maximum page size is 1000. If unspecified, the page size will + // be the maximum. Fewer tasks than requested might be returned, + // even if more tasks exist; use + // [next_page_token][google.cloud.tasks.v2.ListTasksResponse.next_page_token] in the + // response to determine if more tasks exist. + PageSize int32 `protobuf:"varint,3,opt,name=page_size,json=pageSize,proto3" json:"page_size,omitempty"` + // A token identifying the page of results to return. + // + // To request the first page results, page_token must be empty. To + // request the next page of results, page_token must be the value of + // [next_page_token][google.cloud.tasks.v2.ListTasksResponse.next_page_token] returned + // from the previous call to [ListTasks][google.cloud.tasks.v2.CloudTasks.ListTasks] + // method. + // + // The page token is valid for only 2 hours. + PageToken string `protobuf:"bytes,4,opt,name=page_token,json=pageToken,proto3" json:"page_token,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ListTasksRequest) Reset() { *m = ListTasksRequest{} } +func (m *ListTasksRequest) String() string { return proto.CompactTextString(m) } +func (*ListTasksRequest) ProtoMessage() {} +func (*ListTasksRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_cloudtasks_7027f62ef3ad9740, []int{9} +} +func (m *ListTasksRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ListTasksRequest.Unmarshal(m, b) +} +func (m *ListTasksRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ListTasksRequest.Marshal(b, m, deterministic) +} +func (dst *ListTasksRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_ListTasksRequest.Merge(dst, src) +} +func (m *ListTasksRequest) XXX_Size() int { + return xxx_messageInfo_ListTasksRequest.Size(m) +} +func (m *ListTasksRequest) XXX_DiscardUnknown() { + xxx_messageInfo_ListTasksRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_ListTasksRequest proto.InternalMessageInfo + +func (m *ListTasksRequest) GetParent() string { + if m != nil { + return m.Parent + } + return "" +} + +func (m *ListTasksRequest) GetResponseView() Task_View { + if m != nil { + return m.ResponseView + } + return Task_VIEW_UNSPECIFIED +} + +func (m *ListTasksRequest) GetPageSize() int32 { + if m != nil { + return m.PageSize + } + return 0 +} + +func (m *ListTasksRequest) GetPageToken() string { + if m != nil { + return m.PageToken + } + return "" +} + +// Response message for listing tasks using [ListTasks][google.cloud.tasks.v2.CloudTasks.ListTasks]. +type ListTasksResponse struct { + // The list of tasks. + Tasks []*Task `protobuf:"bytes,1,rep,name=tasks,proto3" json:"tasks,omitempty"` + // A token to retrieve next page of results. + // + // To return the next page of results, call + // [ListTasks][google.cloud.tasks.v2.CloudTasks.ListTasks] with this value as the + // [page_token][google.cloud.tasks.v2.ListTasksRequest.page_token]. + // + // If the next_page_token is empty, there are no more results. + NextPageToken string `protobuf:"bytes,2,opt,name=next_page_token,json=nextPageToken,proto3" json:"next_page_token,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ListTasksResponse) Reset() { *m = ListTasksResponse{} } +func (m *ListTasksResponse) String() string { return proto.CompactTextString(m) } +func (*ListTasksResponse) ProtoMessage() {} +func (*ListTasksResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_cloudtasks_7027f62ef3ad9740, []int{10} +} +func (m *ListTasksResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ListTasksResponse.Unmarshal(m, b) +} +func (m *ListTasksResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ListTasksResponse.Marshal(b, m, deterministic) +} +func (dst *ListTasksResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_ListTasksResponse.Merge(dst, src) +} +func (m *ListTasksResponse) XXX_Size() int { + return xxx_messageInfo_ListTasksResponse.Size(m) +} +func (m *ListTasksResponse) XXX_DiscardUnknown() { + xxx_messageInfo_ListTasksResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_ListTasksResponse proto.InternalMessageInfo + +func (m *ListTasksResponse) GetTasks() []*Task { + if m != nil { + return m.Tasks + } + return nil +} + +func (m *ListTasksResponse) GetNextPageToken() string { + if m != nil { + return m.NextPageToken + } + return "" +} + +// Request message for getting a task using [GetTask][google.cloud.tasks.v2.CloudTasks.GetTask]. +type GetTaskRequest struct { + // Required. + // + // The task name. For example: + // `projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID/tasks/TASK_ID` + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // The response_view specifies which subset of the [Task][google.cloud.tasks.v2.Task] will be + // returned. + // + // By default response_view is [BASIC][google.cloud.tasks.v2.Task.View.BASIC]; not all + // information is retrieved by default because some data, such as + // payloads, might be desirable to return only when needed because + // of its large size or because of the sensitivity of data that it + // contains. + // + // Authorization for [FULL][google.cloud.tasks.v2.Task.View.FULL] requires + // `cloudtasks.tasks.fullView` [Google IAM](https://cloud.google.com/iam/) + // permission on the [Task][google.cloud.tasks.v2.Task] resource. + ResponseView Task_View `protobuf:"varint,2,opt,name=response_view,json=responseView,proto3,enum=google.cloud.tasks.v2.Task_View" json:"response_view,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GetTaskRequest) Reset() { *m = GetTaskRequest{} } +func (m *GetTaskRequest) String() string { return proto.CompactTextString(m) } +func (*GetTaskRequest) ProtoMessage() {} +func (*GetTaskRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_cloudtasks_7027f62ef3ad9740, []int{11} +} +func (m *GetTaskRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GetTaskRequest.Unmarshal(m, b) +} +func (m *GetTaskRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GetTaskRequest.Marshal(b, m, deterministic) +} +func (dst *GetTaskRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_GetTaskRequest.Merge(dst, src) +} +func (m *GetTaskRequest) XXX_Size() int { + return xxx_messageInfo_GetTaskRequest.Size(m) +} +func (m *GetTaskRequest) XXX_DiscardUnknown() { + xxx_messageInfo_GetTaskRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_GetTaskRequest proto.InternalMessageInfo + +func (m *GetTaskRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *GetTaskRequest) GetResponseView() Task_View { + if m != nil { + return m.ResponseView + } + return Task_VIEW_UNSPECIFIED +} + +// Request message for [CreateTask][google.cloud.tasks.v2.CloudTasks.CreateTask]. +type CreateTaskRequest struct { + // Required. + // + // The queue name. For example: + // `projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID` + // + // The queue must already exist. + Parent string `protobuf:"bytes,1,opt,name=parent,proto3" json:"parent,omitempty"` + // Required. + // + // The task to add. + // + // Task names have the following format: + // `projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID/tasks/TASK_ID`. + // The user can optionally specify a task [name][google.cloud.tasks.v2.Task.name]. If a + // name is not specified then the system will generate a random + // unique task id, which will be set in the task returned in the + // [response][google.cloud.tasks.v2.Task.name]. + // + // If [schedule_time][google.cloud.tasks.v2.Task.schedule_time] is not set or is in the + // past then Cloud Tasks will set it to the current time. + // + // Task De-duplication: + // + // Explicitly specifying a task ID enables task de-duplication. If + // a task's ID is identical to that of an existing task or a task + // that was deleted or executed recently then the call will fail + // with [ALREADY_EXISTS][google.rpc.Code.ALREADY_EXISTS]. + // If the task's queue was created using Cloud Tasks, then another task with + // the same name can't be created for ~1hour after the original task was + // deleted or executed. If the task's queue was created using queue.yaml or + // queue.xml, then another task with the same name can't be created + // for ~9days after the original task was deleted or executed. + // + // Because there is an extra lookup cost to identify duplicate task + // names, these [CreateTask][google.cloud.tasks.v2.CloudTasks.CreateTask] calls have significantly + // increased latency. Using hashed strings for the task id or for + // the prefix of the task id is recommended. Choosing task ids that + // are sequential or have sequential prefixes, for example using a + // timestamp, causes an increase in latency and error rates in all + // task commands. The infrastructure relies on an approximately + // uniform distribution of task ids to store and serve tasks + // efficiently. + Task *Task `protobuf:"bytes,2,opt,name=task,proto3" json:"task,omitempty"` + // The response_view specifies which subset of the [Task][google.cloud.tasks.v2.Task] will be + // returned. + // + // By default response_view is [BASIC][google.cloud.tasks.v2.Task.View.BASIC]; not all + // information is retrieved by default because some data, such as + // payloads, might be desirable to return only when needed because + // of its large size or because of the sensitivity of data that it + // contains. + // + // Authorization for [FULL][google.cloud.tasks.v2.Task.View.FULL] requires + // `cloudtasks.tasks.fullView` [Google IAM](https://cloud.google.com/iam/) + // permission on the [Task][google.cloud.tasks.v2.Task] resource. + ResponseView Task_View `protobuf:"varint,3,opt,name=response_view,json=responseView,proto3,enum=google.cloud.tasks.v2.Task_View" json:"response_view,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *CreateTaskRequest) Reset() { *m = CreateTaskRequest{} } +func (m *CreateTaskRequest) String() string { return proto.CompactTextString(m) } +func (*CreateTaskRequest) ProtoMessage() {} +func (*CreateTaskRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_cloudtasks_7027f62ef3ad9740, []int{12} +} +func (m *CreateTaskRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_CreateTaskRequest.Unmarshal(m, b) +} +func (m *CreateTaskRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_CreateTaskRequest.Marshal(b, m, deterministic) +} +func (dst *CreateTaskRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_CreateTaskRequest.Merge(dst, src) +} +func (m *CreateTaskRequest) XXX_Size() int { + return xxx_messageInfo_CreateTaskRequest.Size(m) +} +func (m *CreateTaskRequest) XXX_DiscardUnknown() { + xxx_messageInfo_CreateTaskRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_CreateTaskRequest proto.InternalMessageInfo + +func (m *CreateTaskRequest) GetParent() string { + if m != nil { + return m.Parent + } + return "" +} + +func (m *CreateTaskRequest) GetTask() *Task { + if m != nil { + return m.Task + } + return nil +} + +func (m *CreateTaskRequest) GetResponseView() Task_View { + if m != nil { + return m.ResponseView + } + return Task_VIEW_UNSPECIFIED +} + +// Request message for deleting a task using +// [DeleteTask][google.cloud.tasks.v2.CloudTasks.DeleteTask]. +type DeleteTaskRequest struct { + // Required. + // + // The task name. For example: + // `projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID/tasks/TASK_ID` + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *DeleteTaskRequest) Reset() { *m = DeleteTaskRequest{} } +func (m *DeleteTaskRequest) String() string { return proto.CompactTextString(m) } +func (*DeleteTaskRequest) ProtoMessage() {} +func (*DeleteTaskRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_cloudtasks_7027f62ef3ad9740, []int{13} +} +func (m *DeleteTaskRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_DeleteTaskRequest.Unmarshal(m, b) +} +func (m *DeleteTaskRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_DeleteTaskRequest.Marshal(b, m, deterministic) +} +func (dst *DeleteTaskRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_DeleteTaskRequest.Merge(dst, src) +} +func (m *DeleteTaskRequest) XXX_Size() int { + return xxx_messageInfo_DeleteTaskRequest.Size(m) +} +func (m *DeleteTaskRequest) XXX_DiscardUnknown() { + xxx_messageInfo_DeleteTaskRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_DeleteTaskRequest proto.InternalMessageInfo + +func (m *DeleteTaskRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +// Request message for forcing a task to run now using +// [RunTask][google.cloud.tasks.v2.CloudTasks.RunTask]. +type RunTaskRequest struct { + // Required. + // + // The task name. For example: + // `projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID/tasks/TASK_ID` + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // The response_view specifies which subset of the [Task][google.cloud.tasks.v2.Task] will be + // returned. + // + // By default response_view is [BASIC][google.cloud.tasks.v2.Task.View.BASIC]; not all + // information is retrieved by default because some data, such as + // payloads, might be desirable to return only when needed because + // of its large size or because of the sensitivity of data that it + // contains. + // + // Authorization for [FULL][google.cloud.tasks.v2.Task.View.FULL] requires + // `cloudtasks.tasks.fullView` [Google IAM](https://cloud.google.com/iam/) + // permission on the [Task][google.cloud.tasks.v2.Task] resource. + ResponseView Task_View `protobuf:"varint,2,opt,name=response_view,json=responseView,proto3,enum=google.cloud.tasks.v2.Task_View" json:"response_view,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *RunTaskRequest) Reset() { *m = RunTaskRequest{} } +func (m *RunTaskRequest) String() string { return proto.CompactTextString(m) } +func (*RunTaskRequest) ProtoMessage() {} +func (*RunTaskRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_cloudtasks_7027f62ef3ad9740, []int{14} +} +func (m *RunTaskRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_RunTaskRequest.Unmarshal(m, b) +} +func (m *RunTaskRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_RunTaskRequest.Marshal(b, m, deterministic) +} +func (dst *RunTaskRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_RunTaskRequest.Merge(dst, src) +} +func (m *RunTaskRequest) XXX_Size() int { + return xxx_messageInfo_RunTaskRequest.Size(m) +} +func (m *RunTaskRequest) XXX_DiscardUnknown() { + xxx_messageInfo_RunTaskRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_RunTaskRequest proto.InternalMessageInfo + +func (m *RunTaskRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *RunTaskRequest) GetResponseView() Task_View { + if m != nil { + return m.ResponseView + } + return Task_VIEW_UNSPECIFIED +} + +func init() { + proto.RegisterType((*ListQueuesRequest)(nil), "google.cloud.tasks.v2.ListQueuesRequest") + proto.RegisterType((*ListQueuesResponse)(nil), "google.cloud.tasks.v2.ListQueuesResponse") + proto.RegisterType((*GetQueueRequest)(nil), "google.cloud.tasks.v2.GetQueueRequest") + proto.RegisterType((*CreateQueueRequest)(nil), "google.cloud.tasks.v2.CreateQueueRequest") + proto.RegisterType((*UpdateQueueRequest)(nil), "google.cloud.tasks.v2.UpdateQueueRequest") + proto.RegisterType((*DeleteQueueRequest)(nil), "google.cloud.tasks.v2.DeleteQueueRequest") + proto.RegisterType((*PurgeQueueRequest)(nil), "google.cloud.tasks.v2.PurgeQueueRequest") + proto.RegisterType((*PauseQueueRequest)(nil), "google.cloud.tasks.v2.PauseQueueRequest") + proto.RegisterType((*ResumeQueueRequest)(nil), "google.cloud.tasks.v2.ResumeQueueRequest") + proto.RegisterType((*ListTasksRequest)(nil), "google.cloud.tasks.v2.ListTasksRequest") + proto.RegisterType((*ListTasksResponse)(nil), "google.cloud.tasks.v2.ListTasksResponse") + proto.RegisterType((*GetTaskRequest)(nil), "google.cloud.tasks.v2.GetTaskRequest") + proto.RegisterType((*CreateTaskRequest)(nil), "google.cloud.tasks.v2.CreateTaskRequest") + proto.RegisterType((*DeleteTaskRequest)(nil), "google.cloud.tasks.v2.DeleteTaskRequest") + proto.RegisterType((*RunTaskRequest)(nil), "google.cloud.tasks.v2.RunTaskRequest") +} + +// Reference imports to suppress errors if they are not otherwise used. +var _ context.Context +var _ grpc.ClientConn + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the grpc package it is being compiled against. +const _ = grpc.SupportPackageIsVersion4 + +// CloudTasksClient is the client API for CloudTasks service. +// +// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://godoc.org/google.golang.org/grpc#ClientConn.NewStream. +type CloudTasksClient interface { + // Lists queues. + // + // Queues are returned in lexicographical order. + ListQueues(ctx context.Context, in *ListQueuesRequest, opts ...grpc.CallOption) (*ListQueuesResponse, error) + // Gets a queue. + GetQueue(ctx context.Context, in *GetQueueRequest, opts ...grpc.CallOption) (*Queue, error) + // Creates a queue. + // + // Queues created with this method allow tasks to live for a maximum of 31 + // days. After a task is 31 days old, the task will be deleted regardless of whether + // it was dispatched or not. + // + // WARNING: Using this method may have unintended side effects if you are + // using an App Engine `queue.yaml` or `queue.xml` file to manage your queues. + // Read + // [Overview of Queue Management and queue.yaml](https://cloud.google.com/tasks/docs/queue-yaml) + // before using this method. + CreateQueue(ctx context.Context, in *CreateQueueRequest, opts ...grpc.CallOption) (*Queue, error) + // Updates a queue. + // + // This method creates the queue if it does not exist and updates + // the queue if it does exist. + // + // Queues created with this method allow tasks to live for a maximum of 31 + // days. After a task is 31 days old, the task will be deleted regardless of whether + // it was dispatched or not. + // + // WARNING: Using this method may have unintended side effects if you are + // using an App Engine `queue.yaml` or `queue.xml` file to manage your queues. + // Read + // [Overview of Queue Management and queue.yaml](https://cloud.google.com/tasks/docs/queue-yaml) + // before using this method. + UpdateQueue(ctx context.Context, in *UpdateQueueRequest, opts ...grpc.CallOption) (*Queue, error) + // Deletes a queue. + // + // This command will delete the queue even if it has tasks in it. + // + // Note: If you delete a queue, a queue with the same name can't be created + // for 7 days. + // + // WARNING: Using this method may have unintended side effects if you are + // using an App Engine `queue.yaml` or `queue.xml` file to manage your queues. + // Read + // [Overview of Queue Management and queue.yaml](https://cloud.google.com/tasks/docs/queue-yaml) + // before using this method. + DeleteQueue(ctx context.Context, in *DeleteQueueRequest, opts ...grpc.CallOption) (*empty.Empty, error) + // Purges a queue by deleting all of its tasks. + // + // All tasks created before this method is called are permanently deleted. + // + // Purge operations can take up to one minute to take effect. Tasks + // might be dispatched before the purge takes effect. A purge is irreversible. + PurgeQueue(ctx context.Context, in *PurgeQueueRequest, opts ...grpc.CallOption) (*Queue, error) + // Pauses the queue. + // + // If a queue is paused then the system will stop dispatching tasks + // until the queue is resumed via + // [ResumeQueue][google.cloud.tasks.v2.CloudTasks.ResumeQueue]. Tasks can still be added + // when the queue is paused. A queue is paused if its + // [state][google.cloud.tasks.v2.Queue.state] is [PAUSED][google.cloud.tasks.v2.Queue.State.PAUSED]. + PauseQueue(ctx context.Context, in *PauseQueueRequest, opts ...grpc.CallOption) (*Queue, error) + // Resume a queue. + // + // This method resumes a queue after it has been + // [PAUSED][google.cloud.tasks.v2.Queue.State.PAUSED] or + // [DISABLED][google.cloud.tasks.v2.Queue.State.DISABLED]. The state of a queue is stored + // in the queue's [state][google.cloud.tasks.v2.Queue.state]; after calling this method it + // will be set to [RUNNING][google.cloud.tasks.v2.Queue.State.RUNNING]. + // + // WARNING: Resuming many high-QPS queues at the same time can + // lead to target overloading. If you are resuming high-QPS + // queues, follow the 500/50/5 pattern described in + // [Managing Cloud Tasks Scaling Risks](https://cloud.google.com/tasks/docs/manage-cloud-task-scaling). + ResumeQueue(ctx context.Context, in *ResumeQueueRequest, opts ...grpc.CallOption) (*Queue, error) + // Gets the access control policy for a [Queue][google.cloud.tasks.v2.Queue]. + // Returns an empty policy if the resource exists and does not have a policy + // set. + // + // Authorization requires the following + // [Google IAM](https://cloud.google.com/iam) permission on the specified + // resource parent: + // + // * `cloudtasks.queues.getIamPolicy` + GetIamPolicy(ctx context.Context, in *v1.GetIamPolicyRequest, opts ...grpc.CallOption) (*v1.Policy, error) + // Sets the access control policy for a [Queue][google.cloud.tasks.v2.Queue]. Replaces any existing + // policy. + // + // Note: The Cloud Console does not check queue-level IAM permissions yet. + // Project-level permissions are required to use the Cloud Console. + // + // Authorization requires the following + // [Google IAM](https://cloud.google.com/iam) permission on the specified + // resource parent: + // + // * `cloudtasks.queues.setIamPolicy` + SetIamPolicy(ctx context.Context, in *v1.SetIamPolicyRequest, opts ...grpc.CallOption) (*v1.Policy, error) + // Returns permissions that a caller has on a [Queue][google.cloud.tasks.v2.Queue]. + // If the resource does not exist, this will return an empty set of + // permissions, not a [NOT_FOUND][google.rpc.Code.NOT_FOUND] error. + // + // Note: This operation is designed to be used for building permission-aware + // UIs and command-line tools, not for authorization checking. This operation + // may "fail open" without warning. + TestIamPermissions(ctx context.Context, in *v1.TestIamPermissionsRequest, opts ...grpc.CallOption) (*v1.TestIamPermissionsResponse, error) + // Lists the tasks in a queue. + // + // By default, only the [BASIC][google.cloud.tasks.v2.Task.View.BASIC] view is retrieved + // due to performance considerations; + // [response_view][google.cloud.tasks.v2.ListTasksRequest.response_view] controls the + // subset of information which is returned. + // + // The tasks may be returned in any order. The ordering may change at any + // time. + ListTasks(ctx context.Context, in *ListTasksRequest, opts ...grpc.CallOption) (*ListTasksResponse, error) + // Gets a task. + GetTask(ctx context.Context, in *GetTaskRequest, opts ...grpc.CallOption) (*Task, error) + // Creates a task and adds it to a queue. + // + // Tasks cannot be updated after creation; there is no UpdateTask command. + // + // * For [App Engine queues][google.cloud.tasks.v2.AppEngineHttpQueue], the maximum task size is + // 100KB. + CreateTask(ctx context.Context, in *CreateTaskRequest, opts ...grpc.CallOption) (*Task, error) + // Deletes a task. + // + // A task can be deleted if it is scheduled or dispatched. A task + // cannot be deleted if it has executed successfully or permanently + // failed. + DeleteTask(ctx context.Context, in *DeleteTaskRequest, opts ...grpc.CallOption) (*empty.Empty, error) + // Forces a task to run now. + // + // When this method is called, Cloud Tasks will dispatch the task, even if + // the task is already running, the queue has reached its [RateLimits][google.cloud.tasks.v2.RateLimits] or + // is [PAUSED][google.cloud.tasks.v2.Queue.State.PAUSED]. + // + // This command is meant to be used for manual debugging. For + // example, [RunTask][google.cloud.tasks.v2.CloudTasks.RunTask] can be used to retry a failed + // task after a fix has been made or to manually force a task to be + // dispatched now. + // + // The dispatched task is returned. That is, the task that is returned + // contains the [status][Task.status] after the task is dispatched but + // before the task is received by its target. + // + // If Cloud Tasks receives a successful response from the task's + // target, then the task will be deleted; otherwise the task's + // [schedule_time][google.cloud.tasks.v2.Task.schedule_time] will be reset to the time that + // [RunTask][google.cloud.tasks.v2.CloudTasks.RunTask] was called plus the retry delay specified + // in the queue's [RetryConfig][google.cloud.tasks.v2.RetryConfig]. + // + // [RunTask][google.cloud.tasks.v2.CloudTasks.RunTask] returns + // [NOT_FOUND][google.rpc.Code.NOT_FOUND] when it is called on a + // task that has already succeeded or permanently failed. + RunTask(ctx context.Context, in *RunTaskRequest, opts ...grpc.CallOption) (*Task, error) +} + +type cloudTasksClient struct { + cc *grpc.ClientConn +} + +func NewCloudTasksClient(cc *grpc.ClientConn) CloudTasksClient { + return &cloudTasksClient{cc} +} + +func (c *cloudTasksClient) ListQueues(ctx context.Context, in *ListQueuesRequest, opts ...grpc.CallOption) (*ListQueuesResponse, error) { + out := new(ListQueuesResponse) + err := c.cc.Invoke(ctx, "/google.cloud.tasks.v2.CloudTasks/ListQueues", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *cloudTasksClient) GetQueue(ctx context.Context, in *GetQueueRequest, opts ...grpc.CallOption) (*Queue, error) { + out := new(Queue) + err := c.cc.Invoke(ctx, "/google.cloud.tasks.v2.CloudTasks/GetQueue", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *cloudTasksClient) CreateQueue(ctx context.Context, in *CreateQueueRequest, opts ...grpc.CallOption) (*Queue, error) { + out := new(Queue) + err := c.cc.Invoke(ctx, "/google.cloud.tasks.v2.CloudTasks/CreateQueue", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *cloudTasksClient) UpdateQueue(ctx context.Context, in *UpdateQueueRequest, opts ...grpc.CallOption) (*Queue, error) { + out := new(Queue) + err := c.cc.Invoke(ctx, "/google.cloud.tasks.v2.CloudTasks/UpdateQueue", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *cloudTasksClient) DeleteQueue(ctx context.Context, in *DeleteQueueRequest, opts ...grpc.CallOption) (*empty.Empty, error) { + out := new(empty.Empty) + err := c.cc.Invoke(ctx, "/google.cloud.tasks.v2.CloudTasks/DeleteQueue", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *cloudTasksClient) PurgeQueue(ctx context.Context, in *PurgeQueueRequest, opts ...grpc.CallOption) (*Queue, error) { + out := new(Queue) + err := c.cc.Invoke(ctx, "/google.cloud.tasks.v2.CloudTasks/PurgeQueue", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *cloudTasksClient) PauseQueue(ctx context.Context, in *PauseQueueRequest, opts ...grpc.CallOption) (*Queue, error) { + out := new(Queue) + err := c.cc.Invoke(ctx, "/google.cloud.tasks.v2.CloudTasks/PauseQueue", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *cloudTasksClient) ResumeQueue(ctx context.Context, in *ResumeQueueRequest, opts ...grpc.CallOption) (*Queue, error) { + out := new(Queue) + err := c.cc.Invoke(ctx, "/google.cloud.tasks.v2.CloudTasks/ResumeQueue", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *cloudTasksClient) GetIamPolicy(ctx context.Context, in *v1.GetIamPolicyRequest, opts ...grpc.CallOption) (*v1.Policy, error) { + out := new(v1.Policy) + err := c.cc.Invoke(ctx, "/google.cloud.tasks.v2.CloudTasks/GetIamPolicy", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *cloudTasksClient) SetIamPolicy(ctx context.Context, in *v1.SetIamPolicyRequest, opts ...grpc.CallOption) (*v1.Policy, error) { + out := new(v1.Policy) + err := c.cc.Invoke(ctx, "/google.cloud.tasks.v2.CloudTasks/SetIamPolicy", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *cloudTasksClient) TestIamPermissions(ctx context.Context, in *v1.TestIamPermissionsRequest, opts ...grpc.CallOption) (*v1.TestIamPermissionsResponse, error) { + out := new(v1.TestIamPermissionsResponse) + err := c.cc.Invoke(ctx, "/google.cloud.tasks.v2.CloudTasks/TestIamPermissions", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *cloudTasksClient) ListTasks(ctx context.Context, in *ListTasksRequest, opts ...grpc.CallOption) (*ListTasksResponse, error) { + out := new(ListTasksResponse) + err := c.cc.Invoke(ctx, "/google.cloud.tasks.v2.CloudTasks/ListTasks", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *cloudTasksClient) GetTask(ctx context.Context, in *GetTaskRequest, opts ...grpc.CallOption) (*Task, error) { + out := new(Task) + err := c.cc.Invoke(ctx, "/google.cloud.tasks.v2.CloudTasks/GetTask", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *cloudTasksClient) CreateTask(ctx context.Context, in *CreateTaskRequest, opts ...grpc.CallOption) (*Task, error) { + out := new(Task) + err := c.cc.Invoke(ctx, "/google.cloud.tasks.v2.CloudTasks/CreateTask", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *cloudTasksClient) DeleteTask(ctx context.Context, in *DeleteTaskRequest, opts ...grpc.CallOption) (*empty.Empty, error) { + out := new(empty.Empty) + err := c.cc.Invoke(ctx, "/google.cloud.tasks.v2.CloudTasks/DeleteTask", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *cloudTasksClient) RunTask(ctx context.Context, in *RunTaskRequest, opts ...grpc.CallOption) (*Task, error) { + out := new(Task) + err := c.cc.Invoke(ctx, "/google.cloud.tasks.v2.CloudTasks/RunTask", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +// CloudTasksServer is the server API for CloudTasks service. +type CloudTasksServer interface { + // Lists queues. + // + // Queues are returned in lexicographical order. + ListQueues(context.Context, *ListQueuesRequest) (*ListQueuesResponse, error) + // Gets a queue. + GetQueue(context.Context, *GetQueueRequest) (*Queue, error) + // Creates a queue. + // + // Queues created with this method allow tasks to live for a maximum of 31 + // days. After a task is 31 days old, the task will be deleted regardless of whether + // it was dispatched or not. + // + // WARNING: Using this method may have unintended side effects if you are + // using an App Engine `queue.yaml` or `queue.xml` file to manage your queues. + // Read + // [Overview of Queue Management and queue.yaml](https://cloud.google.com/tasks/docs/queue-yaml) + // before using this method. + CreateQueue(context.Context, *CreateQueueRequest) (*Queue, error) + // Updates a queue. + // + // This method creates the queue if it does not exist and updates + // the queue if it does exist. + // + // Queues created with this method allow tasks to live for a maximum of 31 + // days. After a task is 31 days old, the task will be deleted regardless of whether + // it was dispatched or not. + // + // WARNING: Using this method may have unintended side effects if you are + // using an App Engine `queue.yaml` or `queue.xml` file to manage your queues. + // Read + // [Overview of Queue Management and queue.yaml](https://cloud.google.com/tasks/docs/queue-yaml) + // before using this method. + UpdateQueue(context.Context, *UpdateQueueRequest) (*Queue, error) + // Deletes a queue. + // + // This command will delete the queue even if it has tasks in it. + // + // Note: If you delete a queue, a queue with the same name can't be created + // for 7 days. + // + // WARNING: Using this method may have unintended side effects if you are + // using an App Engine `queue.yaml` or `queue.xml` file to manage your queues. + // Read + // [Overview of Queue Management and queue.yaml](https://cloud.google.com/tasks/docs/queue-yaml) + // before using this method. + DeleteQueue(context.Context, *DeleteQueueRequest) (*empty.Empty, error) + // Purges a queue by deleting all of its tasks. + // + // All tasks created before this method is called are permanently deleted. + // + // Purge operations can take up to one minute to take effect. Tasks + // might be dispatched before the purge takes effect. A purge is irreversible. + PurgeQueue(context.Context, *PurgeQueueRequest) (*Queue, error) + // Pauses the queue. + // + // If a queue is paused then the system will stop dispatching tasks + // until the queue is resumed via + // [ResumeQueue][google.cloud.tasks.v2.CloudTasks.ResumeQueue]. Tasks can still be added + // when the queue is paused. A queue is paused if its + // [state][google.cloud.tasks.v2.Queue.state] is [PAUSED][google.cloud.tasks.v2.Queue.State.PAUSED]. + PauseQueue(context.Context, *PauseQueueRequest) (*Queue, error) + // Resume a queue. + // + // This method resumes a queue after it has been + // [PAUSED][google.cloud.tasks.v2.Queue.State.PAUSED] or + // [DISABLED][google.cloud.tasks.v2.Queue.State.DISABLED]. The state of a queue is stored + // in the queue's [state][google.cloud.tasks.v2.Queue.state]; after calling this method it + // will be set to [RUNNING][google.cloud.tasks.v2.Queue.State.RUNNING]. + // + // WARNING: Resuming many high-QPS queues at the same time can + // lead to target overloading. If you are resuming high-QPS + // queues, follow the 500/50/5 pattern described in + // [Managing Cloud Tasks Scaling Risks](https://cloud.google.com/tasks/docs/manage-cloud-task-scaling). + ResumeQueue(context.Context, *ResumeQueueRequest) (*Queue, error) + // Gets the access control policy for a [Queue][google.cloud.tasks.v2.Queue]. + // Returns an empty policy if the resource exists and does not have a policy + // set. + // + // Authorization requires the following + // [Google IAM](https://cloud.google.com/iam) permission on the specified + // resource parent: + // + // * `cloudtasks.queues.getIamPolicy` + GetIamPolicy(context.Context, *v1.GetIamPolicyRequest) (*v1.Policy, error) + // Sets the access control policy for a [Queue][google.cloud.tasks.v2.Queue]. Replaces any existing + // policy. + // + // Note: The Cloud Console does not check queue-level IAM permissions yet. + // Project-level permissions are required to use the Cloud Console. + // + // Authorization requires the following + // [Google IAM](https://cloud.google.com/iam) permission on the specified + // resource parent: + // + // * `cloudtasks.queues.setIamPolicy` + SetIamPolicy(context.Context, *v1.SetIamPolicyRequest) (*v1.Policy, error) + // Returns permissions that a caller has on a [Queue][google.cloud.tasks.v2.Queue]. + // If the resource does not exist, this will return an empty set of + // permissions, not a [NOT_FOUND][google.rpc.Code.NOT_FOUND] error. + // + // Note: This operation is designed to be used for building permission-aware + // UIs and command-line tools, not for authorization checking. This operation + // may "fail open" without warning. + TestIamPermissions(context.Context, *v1.TestIamPermissionsRequest) (*v1.TestIamPermissionsResponse, error) + // Lists the tasks in a queue. + // + // By default, only the [BASIC][google.cloud.tasks.v2.Task.View.BASIC] view is retrieved + // due to performance considerations; + // [response_view][google.cloud.tasks.v2.ListTasksRequest.response_view] controls the + // subset of information which is returned. + // + // The tasks may be returned in any order. The ordering may change at any + // time. + ListTasks(context.Context, *ListTasksRequest) (*ListTasksResponse, error) + // Gets a task. + GetTask(context.Context, *GetTaskRequest) (*Task, error) + // Creates a task and adds it to a queue. + // + // Tasks cannot be updated after creation; there is no UpdateTask command. + // + // * For [App Engine queues][google.cloud.tasks.v2.AppEngineHttpQueue], the maximum task size is + // 100KB. + CreateTask(context.Context, *CreateTaskRequest) (*Task, error) + // Deletes a task. + // + // A task can be deleted if it is scheduled or dispatched. A task + // cannot be deleted if it has executed successfully or permanently + // failed. + DeleteTask(context.Context, *DeleteTaskRequest) (*empty.Empty, error) + // Forces a task to run now. + // + // When this method is called, Cloud Tasks will dispatch the task, even if + // the task is already running, the queue has reached its [RateLimits][google.cloud.tasks.v2.RateLimits] or + // is [PAUSED][google.cloud.tasks.v2.Queue.State.PAUSED]. + // + // This command is meant to be used for manual debugging. For + // example, [RunTask][google.cloud.tasks.v2.CloudTasks.RunTask] can be used to retry a failed + // task after a fix has been made or to manually force a task to be + // dispatched now. + // + // The dispatched task is returned. That is, the task that is returned + // contains the [status][Task.status] after the task is dispatched but + // before the task is received by its target. + // + // If Cloud Tasks receives a successful response from the task's + // target, then the task will be deleted; otherwise the task's + // [schedule_time][google.cloud.tasks.v2.Task.schedule_time] will be reset to the time that + // [RunTask][google.cloud.tasks.v2.CloudTasks.RunTask] was called plus the retry delay specified + // in the queue's [RetryConfig][google.cloud.tasks.v2.RetryConfig]. + // + // [RunTask][google.cloud.tasks.v2.CloudTasks.RunTask] returns + // [NOT_FOUND][google.rpc.Code.NOT_FOUND] when it is called on a + // task that has already succeeded or permanently failed. + RunTask(context.Context, *RunTaskRequest) (*Task, error) +} + +func RegisterCloudTasksServer(s *grpc.Server, srv CloudTasksServer) { + s.RegisterService(&_CloudTasks_serviceDesc, srv) +} + +func _CloudTasks_ListQueues_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(ListQueuesRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(CloudTasksServer).ListQueues(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.tasks.v2.CloudTasks/ListQueues", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(CloudTasksServer).ListQueues(ctx, req.(*ListQueuesRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _CloudTasks_GetQueue_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(GetQueueRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(CloudTasksServer).GetQueue(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.tasks.v2.CloudTasks/GetQueue", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(CloudTasksServer).GetQueue(ctx, req.(*GetQueueRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _CloudTasks_CreateQueue_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(CreateQueueRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(CloudTasksServer).CreateQueue(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.tasks.v2.CloudTasks/CreateQueue", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(CloudTasksServer).CreateQueue(ctx, req.(*CreateQueueRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _CloudTasks_UpdateQueue_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(UpdateQueueRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(CloudTasksServer).UpdateQueue(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.tasks.v2.CloudTasks/UpdateQueue", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(CloudTasksServer).UpdateQueue(ctx, req.(*UpdateQueueRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _CloudTasks_DeleteQueue_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(DeleteQueueRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(CloudTasksServer).DeleteQueue(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.tasks.v2.CloudTasks/DeleteQueue", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(CloudTasksServer).DeleteQueue(ctx, req.(*DeleteQueueRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _CloudTasks_PurgeQueue_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(PurgeQueueRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(CloudTasksServer).PurgeQueue(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.tasks.v2.CloudTasks/PurgeQueue", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(CloudTasksServer).PurgeQueue(ctx, req.(*PurgeQueueRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _CloudTasks_PauseQueue_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(PauseQueueRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(CloudTasksServer).PauseQueue(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.tasks.v2.CloudTasks/PauseQueue", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(CloudTasksServer).PauseQueue(ctx, req.(*PauseQueueRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _CloudTasks_ResumeQueue_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(ResumeQueueRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(CloudTasksServer).ResumeQueue(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.tasks.v2.CloudTasks/ResumeQueue", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(CloudTasksServer).ResumeQueue(ctx, req.(*ResumeQueueRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _CloudTasks_GetIamPolicy_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(v1.GetIamPolicyRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(CloudTasksServer).GetIamPolicy(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.tasks.v2.CloudTasks/GetIamPolicy", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(CloudTasksServer).GetIamPolicy(ctx, req.(*v1.GetIamPolicyRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _CloudTasks_SetIamPolicy_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(v1.SetIamPolicyRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(CloudTasksServer).SetIamPolicy(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.tasks.v2.CloudTasks/SetIamPolicy", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(CloudTasksServer).SetIamPolicy(ctx, req.(*v1.SetIamPolicyRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _CloudTasks_TestIamPermissions_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(v1.TestIamPermissionsRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(CloudTasksServer).TestIamPermissions(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.tasks.v2.CloudTasks/TestIamPermissions", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(CloudTasksServer).TestIamPermissions(ctx, req.(*v1.TestIamPermissionsRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _CloudTasks_ListTasks_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(ListTasksRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(CloudTasksServer).ListTasks(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.tasks.v2.CloudTasks/ListTasks", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(CloudTasksServer).ListTasks(ctx, req.(*ListTasksRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _CloudTasks_GetTask_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(GetTaskRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(CloudTasksServer).GetTask(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.tasks.v2.CloudTasks/GetTask", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(CloudTasksServer).GetTask(ctx, req.(*GetTaskRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _CloudTasks_CreateTask_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(CreateTaskRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(CloudTasksServer).CreateTask(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.tasks.v2.CloudTasks/CreateTask", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(CloudTasksServer).CreateTask(ctx, req.(*CreateTaskRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _CloudTasks_DeleteTask_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(DeleteTaskRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(CloudTasksServer).DeleteTask(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.tasks.v2.CloudTasks/DeleteTask", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(CloudTasksServer).DeleteTask(ctx, req.(*DeleteTaskRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _CloudTasks_RunTask_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(RunTaskRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(CloudTasksServer).RunTask(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.tasks.v2.CloudTasks/RunTask", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(CloudTasksServer).RunTask(ctx, req.(*RunTaskRequest)) + } + return interceptor(ctx, in, info, handler) +} + +var _CloudTasks_serviceDesc = grpc.ServiceDesc{ + ServiceName: "google.cloud.tasks.v2.CloudTasks", + HandlerType: (*CloudTasksServer)(nil), + Methods: []grpc.MethodDesc{ + { + MethodName: "ListQueues", + Handler: _CloudTasks_ListQueues_Handler, + }, + { + MethodName: "GetQueue", + Handler: _CloudTasks_GetQueue_Handler, + }, + { + MethodName: "CreateQueue", + Handler: _CloudTasks_CreateQueue_Handler, + }, + { + MethodName: "UpdateQueue", + Handler: _CloudTasks_UpdateQueue_Handler, + }, + { + MethodName: "DeleteQueue", + Handler: _CloudTasks_DeleteQueue_Handler, + }, + { + MethodName: "PurgeQueue", + Handler: _CloudTasks_PurgeQueue_Handler, + }, + { + MethodName: "PauseQueue", + Handler: _CloudTasks_PauseQueue_Handler, + }, + { + MethodName: "ResumeQueue", + Handler: _CloudTasks_ResumeQueue_Handler, + }, + { + MethodName: "GetIamPolicy", + Handler: _CloudTasks_GetIamPolicy_Handler, + }, + { + MethodName: "SetIamPolicy", + Handler: _CloudTasks_SetIamPolicy_Handler, + }, + { + MethodName: "TestIamPermissions", + Handler: _CloudTasks_TestIamPermissions_Handler, + }, + { + MethodName: "ListTasks", + Handler: _CloudTasks_ListTasks_Handler, + }, + { + MethodName: "GetTask", + Handler: _CloudTasks_GetTask_Handler, + }, + { + MethodName: "CreateTask", + Handler: _CloudTasks_CreateTask_Handler, + }, + { + MethodName: "DeleteTask", + Handler: _CloudTasks_DeleteTask_Handler, + }, + { + MethodName: "RunTask", + Handler: _CloudTasks_RunTask_Handler, + }, + }, + Streams: []grpc.StreamDesc{}, + Metadata: "google/cloud/tasks/v2/cloudtasks.proto", +} + +func init() { + proto.RegisterFile("google/cloud/tasks/v2/cloudtasks.proto", fileDescriptor_cloudtasks_7027f62ef3ad9740) +} + +var fileDescriptor_cloudtasks_7027f62ef3ad9740 = []byte{ + // 1096 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xbc, 0x97, 0x4b, 0x8f, 0xdb, 0x54, + 0x14, 0xc7, 0x75, 0xe7, 0xd5, 0xce, 0x49, 0xdb, 0x61, 0xae, 0xd4, 0x2a, 0xcd, 0x14, 0x14, 0x2c, + 0x75, 0x26, 0x58, 0xc8, 0xee, 0xb8, 0xa5, 0xa8, 0x09, 0x15, 0x4c, 0x4b, 0x67, 0x84, 0x28, 0x52, + 0x70, 0x06, 0x16, 0x6c, 0x82, 0x9b, 0xb9, 0x63, 0x99, 0xc4, 0x8f, 0xfa, 0xda, 0x29, 0x14, 0x8d, + 0x90, 0x50, 0x59, 0xc0, 0x8a, 0xa7, 0x90, 0x58, 0xc2, 0x9a, 0x35, 0x1f, 0x84, 0x0f, 0xc0, 0x86, + 0x0f, 0x82, 0xee, 0xc3, 0x63, 0x8f, 0x3d, 0xb6, 0x93, 0x81, 0xb2, 0x4a, 0x7c, 0xcf, 0xff, 0xde, + 0xf3, 0xf3, 0xb9, 0x27, 0xe7, 0x9c, 0xc0, 0xa6, 0xed, 0xfb, 0xf6, 0x84, 0xe8, 0xa3, 0x89, 0x1f, + 0x1f, 0xe8, 0x91, 0x45, 0xc7, 0x54, 0x9f, 0x1a, 0xe2, 0x91, 0x3f, 0x69, 0x41, 0xe8, 0x47, 0x3e, + 0xbe, 0x2c, 0x74, 0x1a, 0x37, 0x68, 0xc2, 0x32, 0x35, 0x5a, 0xd7, 0xe4, 0x76, 0x2b, 0x70, 0x74, + 0xcb, 0xf3, 0xfc, 0xc8, 0x8a, 0x1c, 0xdf, 0x93, 0x9b, 0x5a, 0x57, 0x33, 0xd6, 0x90, 0x50, 0x3f, + 0x0e, 0x47, 0x44, 0x9a, 0x36, 0xa4, 0x89, 0x3f, 0x3d, 0x8a, 0x0f, 0x75, 0xe2, 0x06, 0xd1, 0x67, + 0xd2, 0xd8, 0xce, 0x1b, 0x0f, 0x1d, 0x32, 0x39, 0x18, 0xba, 0x16, 0x1d, 0xe7, 0x14, 0x39, 0xec, + 0x28, 0x55, 0xbc, 0x7c, 0xba, 0xe2, 0x71, 0x4c, 0xe2, 0x84, 0xe1, 0x25, 0x29, 0x71, 0x2c, 0x57, + 0x9f, 0x6e, 0xb3, 0x8f, 0x61, 0xe0, 0x4f, 0x9c, 0x51, 0x82, 0xd1, 0x3a, 0x69, 0x3f, 0x61, 0x93, + 0xf1, 0xd0, 0xc3, 0x60, 0xa4, 0x8f, 0xfc, 0x03, 0x79, 0xa4, 0xf2, 0x05, 0xac, 0x3f, 0x74, 0x68, + 0xf4, 0x3e, 0xf3, 0x42, 0x4d, 0xf2, 0x38, 0x26, 0x34, 0xc2, 0x57, 0x60, 0x25, 0xb0, 0x42, 0xe2, + 0x45, 0x4d, 0xd4, 0x46, 0x9d, 0x55, 0x53, 0x3e, 0xb1, 0xf5, 0x43, 0x67, 0x12, 0x91, 0xb0, 0xb9, + 0x20, 0xd6, 0xc5, 0x13, 0xde, 0x80, 0xd5, 0xc0, 0xb2, 0xc9, 0x90, 0x3a, 0x4f, 0x49, 0x73, 0xb1, + 0x8d, 0x3a, 0xcb, 0xe6, 0x79, 0xb6, 0x30, 0x70, 0x9e, 0x12, 0xfc, 0x22, 0x00, 0x37, 0x46, 0xfe, + 0x98, 0x78, 0xcd, 0x25, 0xbe, 0x91, 0xcb, 0xf7, 0xd9, 0x82, 0x12, 0x02, 0xce, 0x02, 0xd0, 0xc0, + 0xf7, 0x28, 0xc1, 0xb7, 0x60, 0x85, 0xbf, 0x38, 0x6d, 0xa2, 0xf6, 0x62, 0xa7, 0x61, 0x5c, 0xd3, + 0x4e, 0xbd, 0x4e, 0x8d, 0x6f, 0x33, 0xa5, 0x16, 0x6f, 0xc2, 0x9a, 0x47, 0x3e, 0x8d, 0x86, 0x19, + 0x7f, 0x02, 0xf4, 0x22, 0x5b, 0xee, 0x1f, 0xfb, 0xbc, 0x0e, 0x6b, 0x7b, 0x44, 0xb8, 0x4c, 0x5e, + 0x19, 0xc3, 0x92, 0x67, 0xb9, 0x44, 0xbe, 0x30, 0xff, 0xae, 0x7c, 0x0c, 0xf8, 0x7e, 0x48, 0xac, + 0x88, 0x9c, 0x50, 0x96, 0x05, 0xc7, 0x80, 0x65, 0x8e, 0xc1, 0x5d, 0xd6, 0x11, 0x0b, 0xa9, 0xf2, + 0x15, 0x02, 0xfc, 0x41, 0x70, 0x90, 0x77, 0x71, 0x7c, 0x14, 0x9a, 0xf9, 0x28, 0xdc, 0x83, 0x46, + 0xcc, 0x4f, 0xe2, 0x59, 0x27, 0x21, 0x5a, 0xc9, 0xce, 0x24, 0x31, 0xb5, 0x5d, 0x96, 0x98, 0xef, + 0x59, 0x74, 0x6c, 0x82, 0x90, 0xb3, 0xef, 0x4a, 0x07, 0xf0, 0xdb, 0x64, 0x42, 0x72, 0x18, 0xa7, + 0xc5, 0x64, 0x0b, 0xd6, 0xfb, 0x71, 0x68, 0xcf, 0x26, 0xb4, 0x62, 0x5a, 0x2f, 0xec, 0x00, 0x36, + 0x09, 0x8d, 0xdd, 0x7a, 0xe5, 0xef, 0x08, 0x5e, 0x60, 0xb9, 0xb2, 0xcf, 0x02, 0x50, 0x77, 0x1d, + 0x0f, 0xe0, 0x62, 0x28, 0xb3, 0x69, 0x38, 0x75, 0xc8, 0x13, 0x1e, 0x91, 0x4b, 0x46, 0xbb, 0x24, + 0x96, 0xec, 0x4c, 0xed, 0x43, 0x87, 0x3c, 0x31, 0x2f, 0x24, 0xdb, 0xd8, 0xd3, 0xbf, 0x4a, 0x6d, + 0x4f, 0xfc, 0xb6, 0x24, 0xae, 0xcc, 0xec, 0x6d, 0x58, 0xe6, 0x4e, 0x65, 0x62, 0x6f, 0x54, 0xf0, + 0x98, 0x42, 0x39, 0x73, 0x5a, 0x8f, 0xe1, 0xd2, 0x1e, 0xe1, 0xee, 0x2a, 0xa2, 0xf8, 0x1f, 0x05, + 0x46, 0xf9, 0x0d, 0xc1, 0xba, 0xf8, 0x75, 0x64, 0x1d, 0x96, 0xdd, 0x86, 0x0e, 0x4b, 0x51, 0x9a, + 0x96, 0x95, 0x2f, 0xcd, 0x85, 0x45, 0xca, 0xc5, 0x33, 0x51, 0x6e, 0xc1, 0xba, 0x48, 0xec, 0x9a, + 0xa8, 0xb0, 0xd8, 0x99, 0xb1, 0xf7, 0xff, 0xc4, 0xce, 0xf8, 0x0b, 0x03, 0xdc, 0x67, 0x52, 0x9e, + 0x1a, 0xf8, 0x27, 0x04, 0x90, 0xd6, 0x40, 0xdc, 0x29, 0x39, 0xad, 0x50, 0xa7, 0x5b, 0xaf, 0xcc, + 0xa0, 0x14, 0x6e, 0x15, 0xe3, 0xcb, 0x3f, 0xff, 0xfe, 0x61, 0xe1, 0x55, 0xac, 0xb2, 0x9e, 0xf2, + 0xb9, 0xb8, 0x95, 0xbb, 0x41, 0xe8, 0x7f, 0x42, 0x46, 0x11, 0xd5, 0x55, 0x7d, 0xe2, 0x8f, 0x44, + 0x27, 0xd4, 0xd5, 0x23, 0x5d, 0x96, 0xd3, 0x67, 0x08, 0xce, 0x27, 0x75, 0x12, 0x6f, 0x96, 0xf8, + 0xca, 0x15, 0xd2, 0x56, 0x65, 0xb1, 0xca, 0x61, 0xb0, 0x98, 0x96, 0x40, 0x48, 0x06, 0x5d, 0x3d, + 0xc2, 0xdf, 0x21, 0x68, 0x64, 0xea, 0x30, 0x2e, 0x7b, 0xeb, 0x62, 0xad, 0xae, 0x81, 0xb9, 0xc3, + 0x61, 0x6e, 0x2a, 0x73, 0xc4, 0xa4, 0x2b, 0xab, 0xed, 0xcf, 0x08, 0x1a, 0x99, 0xc2, 0x5d, 0xca, + 0x54, 0x2c, 0xee, 0x35, 0x4c, 0x6f, 0x72, 0xa6, 0x3b, 0xc6, 0x0d, 0xce, 0x24, 0x9a, 0xff, 0x4c, + 0x61, 0x4a, 0xc8, 0x9e, 0x21, 0x68, 0x64, 0x6a, 0x79, 0x29, 0x59, 0xb1, 0xde, 0xb7, 0xae, 0x14, + 0xba, 0xc5, 0x03, 0x36, 0xe3, 0x24, 0x97, 0xa6, 0xce, 0x79, 0x69, 0x90, 0x36, 0x8a, 0xd2, 0x9c, + 0x2e, 0xf4, 0x92, 0x9a, 0xf0, 0xf4, 0x38, 0xca, 0x6b, 0xca, 0x8d, 0xd9, 0x51, 0xba, 0x01, 0xf3, + 0xd1, 0x45, 0xaa, 0x60, 0x3a, 0xee, 0x49, 0xe5, 0x4c, 0xf9, 0xb6, 0xf5, 0x3c, 0x98, 0x98, 0x0f, + 0xc6, 0xf4, 0x23, 0x82, 0x46, 0xa6, 0xfd, 0x95, 0x5e, 0x57, 0xb1, 0x45, 0xd6, 0x50, 0xbd, 0xc1, + 0xa9, 0x6e, 0x2b, 0xdb, 0x73, 0x50, 0x85, 0xdc, 0x89, 0x0c, 0xd5, 0x85, 0x3d, 0x12, 0xbd, 0x63, + 0xb9, 0x7d, 0x3e, 0x44, 0x62, 0x25, 0x71, 0xe6, 0x58, 0xae, 0x36, 0xdd, 0xd6, 0xb2, 0xc6, 0x04, + 0xe8, 0x72, 0x4e, 0x23, 0xac, 0xca, 0x2e, 0x27, 0x79, 0x4b, 0xe9, 0x71, 0x92, 0x64, 0xac, 0xae, + 0xa7, 0xb1, 0x33, 0x2e, 0x12, 0xa6, 0x41, 0x15, 0xd3, 0xe0, 0xf9, 0x33, 0xd1, 0x1c, 0xd3, 0x1f, + 0x08, 0xf0, 0x3e, 0xa1, 0x7c, 0x91, 0x84, 0xae, 0x43, 0x29, 0xdb, 0x92, 0xa6, 0x96, 0xf4, 0x5a, + 0x94, 0x14, 0x4a, 0x78, 0x85, 0x52, 0x96, 0xf0, 0x87, 0x9c, 0x79, 0x57, 0xd9, 0x99, 0x93, 0x39, + 0x2a, 0x1c, 0xc9, 0xc8, 0x7f, 0x41, 0xb0, 0x7a, 0x3c, 0x9d, 0xe0, 0xad, 0x8a, 0x4e, 0x92, 0x1d, + 0xb7, 0x5a, 0x9d, 0x7a, 0xa1, 0xc4, 0xed, 0x72, 0xdc, 0x5b, 0xd8, 0xa8, 0xaf, 0xae, 0x29, 0xac, + 0xf8, 0xdf, 0x83, 0xbf, 0x46, 0x70, 0x4e, 0x8e, 0x32, 0xf8, 0x7a, 0x79, 0xe3, 0xc9, 0xb4, 0xeb, + 0x56, 0xd5, 0x4c, 0x91, 0x63, 0x99, 0xe5, 0xc7, 0x20, 0xff, 0x80, 0xa9, 0x47, 0xf8, 0x7b, 0x04, + 0x90, 0x0e, 0x3a, 0xa5, 0x55, 0xa3, 0x30, 0x0b, 0x55, 0x13, 0xdd, 0xe5, 0x44, 0xaf, 0x2b, 0x67, + 0x88, 0x0e, 0xbb, 0xbd, 0x6f, 0x10, 0x40, 0x3a, 0xd8, 0x94, 0x42, 0x15, 0x66, 0x9f, 0xd2, 0x1a, + 0x2f, 0x23, 0xa4, 0x9e, 0x25, 0x42, 0xdf, 0x22, 0x38, 0x27, 0x87, 0xa7, 0xd2, 0xdb, 0x3a, 0x39, + 0x5c, 0x55, 0xc7, 0x66, 0x87, 0xb3, 0xf4, 0x94, 0xdb, 0xf3, 0xb3, 0x74, 0xc3, 0xd8, 0xeb, 0x22, + 0xf5, 0x5e, 0x08, 0x57, 0x47, 0xbe, 0x7b, 0xba, 0x93, 0x7b, 0x6b, 0xe9, 0xec, 0xd5, 0x67, 0x51, + 0xe8, 0xa3, 0x8f, 0xba, 0x52, 0x69, 0xfb, 0x13, 0xcb, 0xb3, 0x35, 0x3f, 0xb4, 0x75, 0x9b, 0x78, + 0x3c, 0x46, 0xba, 0x30, 0x59, 0x81, 0x43, 0x73, 0xff, 0xcd, 0x7b, 0xfc, 0xcb, 0xaf, 0x0b, 0xcb, + 0xfb, 0x3b, 0x83, 0x77, 0x07, 0x8f, 0x56, 0xb8, 0xfc, 0xe6, 0x3f, 0x01, 0x00, 0x00, 0xff, 0xff, + 0xa7, 0xfa, 0xc3, 0xfb, 0xa2, 0x10, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/cloud/tasks/v2/queue.pb.go b/vendor/google.golang.org/genproto/googleapis/cloud/tasks/v2/queue.pb.go new file mode 100644 index 0000000..72c1c34 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/cloud/tasks/v2/queue.pb.go @@ -0,0 +1,577 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/cloud/tasks/v2/queue.proto + +package tasks // import "google.golang.org/genproto/googleapis/cloud/tasks/v2" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import duration "github.com/golang/protobuf/ptypes/duration" +import timestamp "github.com/golang/protobuf/ptypes/timestamp" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// State of the queue. +type Queue_State int32 + +const ( + // Unspecified state. + Queue_STATE_UNSPECIFIED Queue_State = 0 + // The queue is running. Tasks can be dispatched. + // + // If the queue was created using Cloud Tasks and the queue has + // had no activity (method calls or task dispatches) for 30 days, + // the queue may take a few minutes to re-activate. Some method + // calls may return [NOT_FOUND][google.rpc.Code.NOT_FOUND] and + // tasks may not be dispatched for a few minutes until the queue + // has been re-activated. + Queue_RUNNING Queue_State = 1 + // Tasks are paused by the user. If the queue is paused then Cloud + // Tasks will stop delivering tasks from it, but more tasks can + // still be added to it by the user. + Queue_PAUSED Queue_State = 2 + // The queue is disabled. + // + // A queue becomes `DISABLED` when + // [queue.yaml](https://cloud.google.com/appengine/docs/python/config/queueref) + // or + // [queue.xml](https://cloud.google.com/appengine/docs/standard/java/config/queueref) + // is uploaded which does not contain the queue. You cannot directly disable + // a queue. + // + // When a queue is disabled, tasks can still be added to a queue + // but the tasks are not dispatched. + // + // To permanently delete this queue and all of its tasks, call + // [DeleteQueue][google.cloud.tasks.v2.CloudTasks.DeleteQueue]. + Queue_DISABLED Queue_State = 3 +) + +var Queue_State_name = map[int32]string{ + 0: "STATE_UNSPECIFIED", + 1: "RUNNING", + 2: "PAUSED", + 3: "DISABLED", +} +var Queue_State_value = map[string]int32{ + "STATE_UNSPECIFIED": 0, + "RUNNING": 1, + "PAUSED": 2, + "DISABLED": 3, +} + +func (x Queue_State) String() string { + return proto.EnumName(Queue_State_name, int32(x)) +} +func (Queue_State) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_queue_a02b454f63d79936, []int{0, 0} +} + +// A queue is a container of related tasks. Queues are configured to manage +// how those tasks are dispatched. Configurable properties include rate limits, +// retry options, queue types, and others. +type Queue struct { + // Caller-specified and required in [CreateQueue][google.cloud.tasks.v2.CloudTasks.CreateQueue], + // after which it becomes output only. + // + // The queue name. + // + // The queue name must have the following format: + // `projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID` + // + // * `PROJECT_ID` can contain letters ([A-Za-z]), numbers ([0-9]), + // hyphens (-), colons (:), or periods (.). + // For more information, see + // [Identifying + // projects](https://cloud.google.com/resource-manager/docs/creating-managing-projects#identifying_projects) + // * `LOCATION_ID` is the canonical ID for the queue's location. + // The list of available locations can be obtained by calling + // [ListLocations][google.cloud.location.Locations.ListLocations]. + // For more information, see https://cloud.google.com/about/locations/. + // * `QUEUE_ID` can contain letters ([A-Za-z]), numbers ([0-9]), or + // hyphens (-). The maximum length is 100 characters. + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // Overrides for + // [task-level app_engine_routing][google.cloud.tasks.v2.AppEngineHttpRequest.app_engine_routing]. + // These settings apply only to + // [App Engine tasks][google.cloud.tasks.v2.AppEngineHttpRequest] in this queue. + // + // If set, `app_engine_routing_override` is used for all + // [App Engine tasks][google.cloud.tasks.v2.AppEngineHttpRequest] in the queue, no matter what the + // setting is for the + // [task-level app_engine_routing][google.cloud.tasks.v2.AppEngineHttpRequest.app_engine_routing]. + AppEngineRoutingOverride *AppEngineRouting `protobuf:"bytes,2,opt,name=app_engine_routing_override,json=appEngineRoutingOverride,proto3" json:"app_engine_routing_override,omitempty"` + // Rate limits for task dispatches. + // + // [rate_limits][google.cloud.tasks.v2.Queue.rate_limits] and [retry_config][google.cloud.tasks.v2.Queue.retry_config] are + // related because they both control task attempts. However they control task + // attempts in different ways: + // + // * [rate_limits][google.cloud.tasks.v2.Queue.rate_limits] controls the total rate of + // dispatches from a queue (i.e. all traffic dispatched from the + // queue, regardless of whether the dispatch is from a first + // attempt or a retry). + // * [retry_config][google.cloud.tasks.v2.Queue.retry_config] controls what happens to + // particular a task after its first attempt fails. That is, + // [retry_config][google.cloud.tasks.v2.Queue.retry_config] controls task retries (the + // second attempt, third attempt, etc). + // + // The queue's actual dispatch rate is the result of: + // + // * Number of tasks in the queue + // * User-specified throttling: [rate_limits][google.cloud.tasks.v2.Queue.rate_limits], + // [retry_config][google.cloud.tasks.v2.Queue.retry_config], and the + // [queue's state][google.cloud.tasks.v2.Queue.state]. + // * System throttling due to `429` (Too Many Requests) or `503` (Service + // Unavailable) responses from the worker, high error rates, or to smooth + // sudden large traffic spikes. + RateLimits *RateLimits `protobuf:"bytes,3,opt,name=rate_limits,json=rateLimits,proto3" json:"rate_limits,omitempty"` + // Settings that determine the retry behavior. + // + // * For tasks created using Cloud Tasks: the queue-level retry settings + // apply to all tasks in the queue that were created using Cloud Tasks. + // Retry settings cannot be set on individual tasks. + // * For tasks created using the App Engine SDK: the queue-level retry + // settings apply to all tasks in the queue which do not have retry settings + // explicitly set on the task and were created by the App Engine SDK. See + // [App Engine + // documentation](https://cloud.google.com/appengine/docs/standard/python/taskqueue/push/retrying-tasks). + RetryConfig *RetryConfig `protobuf:"bytes,4,opt,name=retry_config,json=retryConfig,proto3" json:"retry_config,omitempty"` + // Output only. The state of the queue. + // + // `state` can only be changed by called + // [PauseQueue][google.cloud.tasks.v2.CloudTasks.PauseQueue], + // [ResumeQueue][google.cloud.tasks.v2.CloudTasks.ResumeQueue], or uploading + // [queue.yaml/xml](https://cloud.google.com/appengine/docs/python/config/queueref). + // [UpdateQueue][google.cloud.tasks.v2.CloudTasks.UpdateQueue] cannot be used to change `state`. + State Queue_State `protobuf:"varint,5,opt,name=state,proto3,enum=google.cloud.tasks.v2.Queue_State" json:"state,omitempty"` + // Output only. The last time this queue was purged. + // + // All tasks that were [created][google.cloud.tasks.v2.Task.create_time] before this time + // were purged. + // + // A queue can be purged using [PurgeQueue][google.cloud.tasks.v2.CloudTasks.PurgeQueue], the + // [App Engine Task Queue SDK, or the Cloud + // Console](https://cloud.google.com/appengine/docs/standard/python/taskqueue/push/deleting-tasks-and-queues#purging_all_tasks_from_a_queue). + // + // Purge time will be truncated to the nearest microsecond. Purge + // time will be unset if the queue has never been purged. + PurgeTime *timestamp.Timestamp `protobuf:"bytes,6,opt,name=purge_time,json=purgeTime,proto3" json:"purge_time,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Queue) Reset() { *m = Queue{} } +func (m *Queue) String() string { return proto.CompactTextString(m) } +func (*Queue) ProtoMessage() {} +func (*Queue) Descriptor() ([]byte, []int) { + return fileDescriptor_queue_a02b454f63d79936, []int{0} +} +func (m *Queue) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Queue.Unmarshal(m, b) +} +func (m *Queue) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Queue.Marshal(b, m, deterministic) +} +func (dst *Queue) XXX_Merge(src proto.Message) { + xxx_messageInfo_Queue.Merge(dst, src) +} +func (m *Queue) XXX_Size() int { + return xxx_messageInfo_Queue.Size(m) +} +func (m *Queue) XXX_DiscardUnknown() { + xxx_messageInfo_Queue.DiscardUnknown(m) +} + +var xxx_messageInfo_Queue proto.InternalMessageInfo + +func (m *Queue) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *Queue) GetAppEngineRoutingOverride() *AppEngineRouting { + if m != nil { + return m.AppEngineRoutingOverride + } + return nil +} + +func (m *Queue) GetRateLimits() *RateLimits { + if m != nil { + return m.RateLimits + } + return nil +} + +func (m *Queue) GetRetryConfig() *RetryConfig { + if m != nil { + return m.RetryConfig + } + return nil +} + +func (m *Queue) GetState() Queue_State { + if m != nil { + return m.State + } + return Queue_STATE_UNSPECIFIED +} + +func (m *Queue) GetPurgeTime() *timestamp.Timestamp { + if m != nil { + return m.PurgeTime + } + return nil +} + +// Rate limits. +// +// This message determines the maximum rate that tasks can be dispatched by a +// queue, regardless of whether the dispatch is a first task attempt or a retry. +// +// Note: The debugging command, [RunTask][google.cloud.tasks.v2.CloudTasks.RunTask], will run a task +// even if the queue has reached its [RateLimits][google.cloud.tasks.v2.RateLimits]. +type RateLimits struct { + // The maximum rate at which tasks are dispatched from this queue. + // + // If unspecified when the queue is created, Cloud Tasks will pick the + // default. + // + // * For [App Engine queues][google.cloud.tasks.v2.AppEngineHttpQueue], the maximum allowed value + // is 500. + // + // + // This field has the same meaning as + // [rate in + // queue.yaml/xml](https://cloud.google.com/appengine/docs/standard/python/config/queueref#rate). + MaxDispatchesPerSecond float64 `protobuf:"fixed64,1,opt,name=max_dispatches_per_second,json=maxDispatchesPerSecond,proto3" json:"max_dispatches_per_second,omitempty"` + // Output only. The max burst size. + // + // Max burst size limits how fast tasks in queue are processed when + // many tasks are in the queue and the rate is high. This field + // allows the queue to have a high rate so processing starts shortly + // after a task is enqueued, but still limits resource usage when + // many tasks are enqueued in a short period of time. + // + // The [token bucket](https://wikipedia.org/wiki/Token_Bucket) + // algorithm is used to control the rate of task dispatches. Each + // queue has a token bucket that holds tokens, up to the maximum + // specified by `max_burst_size`. Each time a task is dispatched, a + // token is removed from the bucket. Tasks will be dispatched until + // the queue's bucket runs out of tokens. The bucket will be + // continuously refilled with new tokens based on + // [max_dispatches_per_second][google.cloud.tasks.v2.RateLimits.max_dispatches_per_second]. + // + // Cloud Tasks will pick the value of `max_burst_size` based on the + // value of + // [max_dispatches_per_second][google.cloud.tasks.v2.RateLimits.max_dispatches_per_second]. + // + // For App Engine queues that were created or updated using + // `queue.yaml/xml`, `max_burst_size` is equal to + // [bucket_size](https://cloud.google.com/appengine/docs/standard/python/config/queueref#bucket_size). + // Since `max_burst_size` is output only, if + // [UpdateQueue][google.cloud.tasks.v2.CloudTasks.UpdateQueue] is called on a queue + // created by `queue.yaml/xml`, `max_burst_size` will be reset based + // on the value of + // [max_dispatches_per_second][google.cloud.tasks.v2.RateLimits.max_dispatches_per_second], + // regardless of whether + // [max_dispatches_per_second][google.cloud.tasks.v2.RateLimits.max_dispatches_per_second] + // is updated. + // + MaxBurstSize int32 `protobuf:"varint,2,opt,name=max_burst_size,json=maxBurstSize,proto3" json:"max_burst_size,omitempty"` + // The maximum number of concurrent tasks that Cloud Tasks allows + // to be dispatched for this queue. After this threshold has been + // reached, Cloud Tasks stops dispatching tasks until the number of + // concurrent requests decreases. + // + // If unspecified when the queue is created, Cloud Tasks will pick the + // default. + // + // + // The maximum allowed value is 5,000. + // + // + // This field has the same meaning as + // [max_concurrent_requests in + // queue.yaml/xml](https://cloud.google.com/appengine/docs/standard/python/config/queueref#max_concurrent_requests). + MaxConcurrentDispatches int32 `protobuf:"varint,3,opt,name=max_concurrent_dispatches,json=maxConcurrentDispatches,proto3" json:"max_concurrent_dispatches,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *RateLimits) Reset() { *m = RateLimits{} } +func (m *RateLimits) String() string { return proto.CompactTextString(m) } +func (*RateLimits) ProtoMessage() {} +func (*RateLimits) Descriptor() ([]byte, []int) { + return fileDescriptor_queue_a02b454f63d79936, []int{1} +} +func (m *RateLimits) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_RateLimits.Unmarshal(m, b) +} +func (m *RateLimits) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_RateLimits.Marshal(b, m, deterministic) +} +func (dst *RateLimits) XXX_Merge(src proto.Message) { + xxx_messageInfo_RateLimits.Merge(dst, src) +} +func (m *RateLimits) XXX_Size() int { + return xxx_messageInfo_RateLimits.Size(m) +} +func (m *RateLimits) XXX_DiscardUnknown() { + xxx_messageInfo_RateLimits.DiscardUnknown(m) +} + +var xxx_messageInfo_RateLimits proto.InternalMessageInfo + +func (m *RateLimits) GetMaxDispatchesPerSecond() float64 { + if m != nil { + return m.MaxDispatchesPerSecond + } + return 0 +} + +func (m *RateLimits) GetMaxBurstSize() int32 { + if m != nil { + return m.MaxBurstSize + } + return 0 +} + +func (m *RateLimits) GetMaxConcurrentDispatches() int32 { + if m != nil { + return m.MaxConcurrentDispatches + } + return 0 +} + +// Retry config. +// +// These settings determine when a failed task attempt is retried. +type RetryConfig struct { + // Number of attempts per task. + // + // Cloud Tasks will attempt the task `max_attempts` times (that is, if the + // first attempt fails, then there will be `max_attempts - 1` retries). Must + // be >= -1. + // + // If unspecified when the queue is created, Cloud Tasks will pick the + // default. + // + // -1 indicates unlimited attempts. + // + // This field has the same meaning as + // [task_retry_limit in + // queue.yaml/xml](https://cloud.google.com/appengine/docs/standard/python/config/queueref#retry_parameters). + MaxAttempts int32 `protobuf:"varint,1,opt,name=max_attempts,json=maxAttempts,proto3" json:"max_attempts,omitempty"` + // If positive, `max_retry_duration` specifies the time limit for + // retrying a failed task, measured from when the task was first + // attempted. Once `max_retry_duration` time has passed *and* the + // task has been attempted [max_attempts][google.cloud.tasks.v2.RetryConfig.max_attempts] + // times, no further attempts will be made and the task will be + // deleted. + // + // If zero, then the task age is unlimited. + // + // If unspecified when the queue is created, Cloud Tasks will pick the + // default. + // + // + // `max_retry_duration` will be truncated to the nearest second. + // + // This field has the same meaning as + // [task_age_limit in + // queue.yaml/xml](https://cloud.google.com/appengine/docs/standard/python/config/queueref#retry_parameters). + MaxRetryDuration *duration.Duration `protobuf:"bytes,2,opt,name=max_retry_duration,json=maxRetryDuration,proto3" json:"max_retry_duration,omitempty"` + // A task will be [scheduled][google.cloud.tasks.v2.Task.schedule_time] for retry between + // [min_backoff][google.cloud.tasks.v2.RetryConfig.min_backoff] and + // [max_backoff][google.cloud.tasks.v2.RetryConfig.max_backoff] duration after it fails, + // if the queue's [RetryConfig][google.cloud.tasks.v2.RetryConfig] specifies that the task should be + // retried. + // + // If unspecified when the queue is created, Cloud Tasks will pick the + // default. + // + // + // `min_backoff` will be truncated to the nearest second. + // + // This field has the same meaning as + // [min_backoff_seconds in + // queue.yaml/xml](https://cloud.google.com/appengine/docs/standard/python/config/queueref#retry_parameters). + MinBackoff *duration.Duration `protobuf:"bytes,3,opt,name=min_backoff,json=minBackoff,proto3" json:"min_backoff,omitempty"` + // A task will be [scheduled][google.cloud.tasks.v2.Task.schedule_time] for retry between + // [min_backoff][google.cloud.tasks.v2.RetryConfig.min_backoff] and + // [max_backoff][google.cloud.tasks.v2.RetryConfig.max_backoff] duration after it fails, + // if the queue's [RetryConfig][google.cloud.tasks.v2.RetryConfig] specifies that the task should be + // retried. + // + // If unspecified when the queue is created, Cloud Tasks will pick the + // default. + // + // + // `max_backoff` will be truncated to the nearest second. + // + // This field has the same meaning as + // [max_backoff_seconds in + // queue.yaml/xml](https://cloud.google.com/appengine/docs/standard/python/config/queueref#retry_parameters). + MaxBackoff *duration.Duration `protobuf:"bytes,4,opt,name=max_backoff,json=maxBackoff,proto3" json:"max_backoff,omitempty"` + // The time between retries will double `max_doublings` times. + // + // A task's retry interval starts at + // [min_backoff][google.cloud.tasks.v2.RetryConfig.min_backoff], then doubles + // `max_doublings` times, then increases linearly, and finally + // retries retries at intervals of + // [max_backoff][google.cloud.tasks.v2.RetryConfig.max_backoff] up to + // [max_attempts][google.cloud.tasks.v2.RetryConfig.max_attempts] times. + // + // For example, if [min_backoff][google.cloud.tasks.v2.RetryConfig.min_backoff] is 10s, + // [max_backoff][google.cloud.tasks.v2.RetryConfig.max_backoff] is 300s, and + // `max_doublings` is 3, then the a task will first be retried in + // 10s. The retry interval will double three times, and then + // increase linearly by 2^3 * 10s. Finally, the task will retry at + // intervals of [max_backoff][google.cloud.tasks.v2.RetryConfig.max_backoff] until the + // task has been attempted [max_attempts][google.cloud.tasks.v2.RetryConfig.max_attempts] + // times. Thus, the requests will retry at 10s, 20s, 40s, 80s, 160s, + // 240s, 300s, 300s, .... + // + // If unspecified when the queue is created, Cloud Tasks will pick the + // default. + // + // + // This field has the same meaning as + // [max_doublings in + // queue.yaml/xml](https://cloud.google.com/appengine/docs/standard/python/config/queueref#retry_parameters). + MaxDoublings int32 `protobuf:"varint,5,opt,name=max_doublings,json=maxDoublings,proto3" json:"max_doublings,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *RetryConfig) Reset() { *m = RetryConfig{} } +func (m *RetryConfig) String() string { return proto.CompactTextString(m) } +func (*RetryConfig) ProtoMessage() {} +func (*RetryConfig) Descriptor() ([]byte, []int) { + return fileDescriptor_queue_a02b454f63d79936, []int{2} +} +func (m *RetryConfig) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_RetryConfig.Unmarshal(m, b) +} +func (m *RetryConfig) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_RetryConfig.Marshal(b, m, deterministic) +} +func (dst *RetryConfig) XXX_Merge(src proto.Message) { + xxx_messageInfo_RetryConfig.Merge(dst, src) +} +func (m *RetryConfig) XXX_Size() int { + return xxx_messageInfo_RetryConfig.Size(m) +} +func (m *RetryConfig) XXX_DiscardUnknown() { + xxx_messageInfo_RetryConfig.DiscardUnknown(m) +} + +var xxx_messageInfo_RetryConfig proto.InternalMessageInfo + +func (m *RetryConfig) GetMaxAttempts() int32 { + if m != nil { + return m.MaxAttempts + } + return 0 +} + +func (m *RetryConfig) GetMaxRetryDuration() *duration.Duration { + if m != nil { + return m.MaxRetryDuration + } + return nil +} + +func (m *RetryConfig) GetMinBackoff() *duration.Duration { + if m != nil { + return m.MinBackoff + } + return nil +} + +func (m *RetryConfig) GetMaxBackoff() *duration.Duration { + if m != nil { + return m.MaxBackoff + } + return nil +} + +func (m *RetryConfig) GetMaxDoublings() int32 { + if m != nil { + return m.MaxDoublings + } + return 0 +} + +func init() { + proto.RegisterType((*Queue)(nil), "google.cloud.tasks.v2.Queue") + proto.RegisterType((*RateLimits)(nil), "google.cloud.tasks.v2.RateLimits") + proto.RegisterType((*RetryConfig)(nil), "google.cloud.tasks.v2.RetryConfig") + proto.RegisterEnum("google.cloud.tasks.v2.Queue_State", Queue_State_name, Queue_State_value) +} + +func init() { + proto.RegisterFile("google/cloud/tasks/v2/queue.proto", fileDescriptor_queue_a02b454f63d79936) +} + +var fileDescriptor_queue_a02b454f63d79936 = []byte{ + // 643 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x84, 0x94, 0xd1, 0x4e, 0xdc, 0x3a, + 0x10, 0x86, 0x4f, 0x80, 0x70, 0x0e, 0x13, 0x0e, 0xda, 0x63, 0x89, 0xd3, 0xec, 0xb6, 0x6a, 0x61, + 0x5b, 0xa9, 0x5c, 0x25, 0x12, 0xbd, 0x29, 0xf4, 0x6a, 0x97, 0xa4, 0x68, 0x25, 0xb4, 0xdd, 0x3a, + 0x70, 0xd3, 0x9b, 0xc8, 0x9b, 0xf5, 0xa6, 0x16, 0x6b, 0x3b, 0xb5, 0x1d, 0x94, 0xf2, 0x20, 0x7d, + 0x87, 0xbe, 0x4a, 0x9f, 0xaa, 0x8a, 0x93, 0x00, 0xa2, 0xd0, 0xde, 0xd9, 0x33, 0xdf, 0xff, 0x4f, + 0x3c, 0x33, 0x0a, 0xec, 0xe7, 0x52, 0xe6, 0x2b, 0x1a, 0x66, 0x2b, 0x59, 0x2e, 0x42, 0x43, 0xf4, + 0xa5, 0x0e, 0xaf, 0x0e, 0xc3, 0x2f, 0x25, 0x2d, 0x69, 0x50, 0x28, 0x69, 0x24, 0xda, 0x6d, 0x90, + 0xc0, 0x22, 0x81, 0x45, 0x82, 0xab, 0xc3, 0xc1, 0xb3, 0x56, 0x49, 0x0a, 0x16, 0x12, 0x21, 0xa4, + 0x21, 0x86, 0x49, 0xa1, 0x1b, 0xd1, 0xa0, 0x7f, 0x27, 0xab, 0xa8, 0x96, 0xa5, 0xca, 0x5a, 0xbf, + 0xc1, 0xf0, 0xe1, 0x92, 0x86, 0xa8, 0x9c, 0x9a, 0x96, 0x79, 0xde, 0x32, 0xf6, 0x36, 0x2f, 0x97, + 0xe1, 0xa2, 0x54, 0xd6, 0xbf, 0xcd, 0xbf, 0xb8, 0x9f, 0x37, 0x8c, 0x53, 0x6d, 0x08, 0x2f, 0x1a, + 0x60, 0xf8, 0x63, 0x1d, 0xdc, 0x8f, 0xf5, 0x23, 0x10, 0x82, 0x0d, 0x41, 0x38, 0xf5, 0x9d, 0x3d, + 0xe7, 0x60, 0x0b, 0xdb, 0x33, 0x5a, 0xc2, 0x53, 0x52, 0x14, 0x29, 0x15, 0x39, 0x13, 0x34, 0x55, + 0xb2, 0x34, 0x4c, 0xe4, 0xa9, 0xbc, 0xa2, 0x4a, 0xb1, 0x05, 0xf5, 0xd7, 0xf6, 0x9c, 0x03, 0xef, + 0xf0, 0x75, 0xf0, 0xe0, 0xc3, 0x83, 0x51, 0x51, 0xc4, 0x56, 0x88, 0x1b, 0x1d, 0xf6, 0xc9, 0xbd, + 0xc8, 0x87, 0xd6, 0x08, 0x8d, 0xc1, 0x53, 0xc4, 0xd0, 0x74, 0xc5, 0x38, 0x33, 0xda, 0x5f, 0xb7, + 0xbe, 0xfb, 0x8f, 0xf8, 0x62, 0x62, 0xe8, 0x99, 0x05, 0x31, 0xa8, 0x9b, 0x33, 0x8a, 0x61, 0x5b, + 0x51, 0xa3, 0xbe, 0xa6, 0x99, 0x14, 0x4b, 0x96, 0xfb, 0x1b, 0xd6, 0x64, 0xf8, 0x98, 0x49, 0x8d, + 0x9e, 0x58, 0x12, 0x7b, 0xea, 0xf6, 0x82, 0xde, 0x82, 0xab, 0x0d, 0x31, 0xd4, 0x77, 0xf7, 0x9c, + 0x83, 0x9d, 0x47, 0xf5, 0xb6, 0x67, 0x41, 0x52, 0x93, 0xb8, 0x11, 0xa0, 0x23, 0x80, 0xa2, 0x54, + 0x39, 0x4d, 0xeb, 0x1e, 0xfb, 0x9b, 0xb6, 0xfc, 0xa0, 0x93, 0x77, 0x03, 0x08, 0xce, 0xbb, 0x01, + 0xe0, 0x2d, 0x4b, 0xd7, 0xf7, 0x61, 0x0c, 0xae, 0xb5, 0x42, 0xbb, 0xf0, 0x5f, 0x72, 0x3e, 0x3a, + 0x8f, 0xd3, 0x8b, 0x69, 0x32, 0x8b, 0x4f, 0x26, 0xef, 0x27, 0x71, 0xd4, 0xfb, 0x0b, 0x79, 0xf0, + 0x37, 0xbe, 0x98, 0x4e, 0x27, 0xd3, 0xd3, 0x9e, 0x83, 0x00, 0x36, 0x67, 0xa3, 0x8b, 0x24, 0x8e, + 0x7a, 0x6b, 0x68, 0x1b, 0xfe, 0x89, 0x26, 0xc9, 0x68, 0x7c, 0x16, 0x47, 0xbd, 0xf5, 0xe1, 0x77, + 0x07, 0xe0, 0xb6, 0x3b, 0xe8, 0x08, 0xfa, 0x9c, 0x54, 0xe9, 0x82, 0xe9, 0x82, 0x98, 0xec, 0x33, + 0xd5, 0x69, 0x41, 0x55, 0xaa, 0x69, 0x26, 0xc5, 0xc2, 0x8e, 0xd9, 0xc1, 0xff, 0x73, 0x52, 0x45, + 0x37, 0xf9, 0x19, 0x55, 0x89, 0xcd, 0xa2, 0x57, 0xb0, 0x53, 0x4b, 0xe7, 0xa5, 0xd2, 0x26, 0xd5, + 0xec, 0xba, 0x99, 0xb5, 0x8b, 0xb7, 0x39, 0xa9, 0xc6, 0x75, 0x30, 0x61, 0xd7, 0x14, 0x1d, 0x37, + 0x05, 0x32, 0x29, 0xb2, 0x52, 0x29, 0x2a, 0xcc, 0x9d, 0x5a, 0x76, 0x88, 0x2e, 0x7e, 0xc2, 0x49, + 0x75, 0x72, 0x93, 0xbf, 0x2d, 0x35, 0xfc, 0xb6, 0x06, 0xde, 0x9d, 0x21, 0xa0, 0x7d, 0xa8, 0xbd, + 0x53, 0x62, 0x0c, 0xe5, 0x85, 0xd1, 0xf6, 0xfb, 0x5c, 0xec, 0x71, 0x52, 0x8d, 0xda, 0x10, 0x3a, + 0x05, 0x54, 0x23, 0xcd, 0x94, 0xbb, 0x45, 0x6f, 0x97, 0xb0, 0xff, 0x4b, 0xa3, 0xa3, 0x16, 0xc0, + 0x3d, 0x4e, 0x2a, 0x5b, 0xa9, 0x8b, 0xa0, 0x63, 0xf0, 0x38, 0x13, 0xe9, 0x9c, 0x64, 0x97, 0x72, + 0xb9, 0x6c, 0xd7, 0xed, 0x37, 0x0e, 0xc0, 0x99, 0x18, 0x37, 0xb0, 0xd5, 0xd6, 0x9d, 0x69, 0xb5, + 0x1b, 0x7f, 0xd6, 0x92, 0xaa, 0xd3, 0xbe, 0x84, 0x7f, 0xed, 0x40, 0x64, 0x39, 0x5f, 0x31, 0x91, + 0x6b, 0xbb, 0x63, 0x4d, 0x53, 0xa3, 0x2e, 0x36, 0xa6, 0xd0, 0xcf, 0x24, 0x7f, 0x78, 0xed, 0xc6, + 0x60, 0xf7, 0x6e, 0x56, 0x57, 0x99, 0x39, 0x9f, 0x8e, 0x5b, 0x28, 0x97, 0x2b, 0x22, 0xf2, 0x40, + 0xaa, 0x3c, 0xcc, 0xa9, 0xb0, 0xdf, 0x10, 0x36, 0x29, 0x52, 0x30, 0x7d, 0xef, 0x07, 0xf2, 0xce, + 0x1e, 0xe6, 0x9b, 0x16, 0x7b, 0xf3, 0x33, 0x00, 0x00, 0xff, 0xff, 0xe8, 0x78, 0xca, 0x5c, 0xd9, + 0x04, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/cloud/tasks/v2/target.pb.go b/vendor/google.golang.org/genproto/googleapis/cloud/tasks/v2/target.pb.go new file mode 100644 index 0000000..3329054 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/cloud/tasks/v2/target.pb.go @@ -0,0 +1,447 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/cloud/tasks/v2/target.proto + +package tasks // import "google.golang.org/genproto/googleapis/cloud/tasks/v2" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// The HTTP method used to deliver the task. +type HttpMethod int32 + +const ( + // HTTP method unspecified + HttpMethod_HTTP_METHOD_UNSPECIFIED HttpMethod = 0 + // HTTP POST + HttpMethod_POST HttpMethod = 1 + // HTTP GET + HttpMethod_GET HttpMethod = 2 + // HTTP HEAD + HttpMethod_HEAD HttpMethod = 3 + // HTTP PUT + HttpMethod_PUT HttpMethod = 4 + // HTTP DELETE + HttpMethod_DELETE HttpMethod = 5 + // HTTP PATCH + HttpMethod_PATCH HttpMethod = 6 + // HTTP OPTIONS + HttpMethod_OPTIONS HttpMethod = 7 +) + +var HttpMethod_name = map[int32]string{ + 0: "HTTP_METHOD_UNSPECIFIED", + 1: "POST", + 2: "GET", + 3: "HEAD", + 4: "PUT", + 5: "DELETE", + 6: "PATCH", + 7: "OPTIONS", +} +var HttpMethod_value = map[string]int32{ + "HTTP_METHOD_UNSPECIFIED": 0, + "POST": 1, + "GET": 2, + "HEAD": 3, + "PUT": 4, + "DELETE": 5, + "PATCH": 6, + "OPTIONS": 7, +} + +func (x HttpMethod) String() string { + return proto.EnumName(HttpMethod_name, int32(x)) +} +func (HttpMethod) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_target_83a965c577901646, []int{0} +} + +// App Engine HTTP request. +// +// The message defines the HTTP request that is sent to an App Engine app when +// the task is dispatched. +// +// This proto can only be used for tasks in a queue which has +// [app_engine_http_queue][Queue.app_engine_http_queue] set. +// +// Using [AppEngineHttpRequest][google.cloud.tasks.v2.AppEngineHttpRequest] requires +// [`appengine.applications.get`](https://cloud.google.com/appengine/docs/admin-api/access-control) +// Google IAM permission for the project +// and the following scope: +// +// `https://www.googleapis.com/auth/cloud-platform` +// +// The task will be delivered to the App Engine app which belongs to the same +// project as the queue. For more information, see +// [How Requests are +// Routed](https://cloud.google.com/appengine/docs/standard/python/how-requests-are-routed) +// and how routing is affected by +// [dispatch +// files](https://cloud.google.com/appengine/docs/python/config/dispatchref). +// Traffic is encrypted during transport and never leaves Google datacenters. +// Because this traffic is carried over a communication mechanism internal to +// Google, you cannot explicitly set the protocol (for example, HTTP or HTTPS). +// The request to the handler, however, will appear to have used the HTTP +// protocol. +// +// The [AppEngineRouting][google.cloud.tasks.v2.AppEngineRouting] used to construct the URL that the task is +// delivered to can be set at the queue-level or task-level: +// +// * If set, +// [app_engine_routing_override][google.cloud.tasks.v2.AppEngineHttpQueue.app_engine_routing_override] +// is used for all tasks in the queue, no matter what the setting +// is for the +// [task-level app_engine_routing][google.cloud.tasks.v2.AppEngineHttpRequest.app_engine_routing]. +// +// +// The `url` that the task will be sent to is: +// +// * `url =` [host][google.cloud.tasks.v2.AppEngineRouting.host] `+` +// [relative_uri][google.cloud.tasks.v2.AppEngineHttpRequest.relative_uri] +// +// Tasks can be dispatched to secure app handlers, unsecure app handlers, and +// URIs restricted with +// [`login: +// admin`](https://cloud.google.com/appengine/docs/standard/python/config/appref). +// Because tasks are not run as any user, they cannot be dispatched to URIs +// restricted with +// [`login: +// required`](https://cloud.google.com/appengine/docs/standard/python/config/appref) +// Task dispatches also do not follow redirects. +// +// The task attempt has succeeded if the app's request handler returns +// an HTTP response code in the range [`200` - `299`]. `503` is +// considered an App Engine system error instead of an application +// error. Requests returning error `503` will be retried regardless of +// retry configuration and not counted against retry counts. +// Any other response code or a failure to receive a response before the +// deadline is a failed attempt. +type AppEngineHttpRequest struct { + // The HTTP method to use for the request. The default is POST. + // + // The app's request handler for the task's target URL must be able to handle + // HTTP requests with this http_method, otherwise the task attempt will fail + // with error code 405 (Method Not Allowed). See + // [Writing a push task request + // handler](https://cloud.google.com/appengine/docs/java/taskqueue/push/creating-handlers#writing_a_push_task_request_handler) + // and the documentation for the request handlers in the language your app is + // written in e.g. + // [Python Request + // Handler](https://cloud.google.com/appengine/docs/python/tools/webapp/requesthandlerclass). + HttpMethod HttpMethod `protobuf:"varint,1,opt,name=http_method,json=httpMethod,proto3,enum=google.cloud.tasks.v2.HttpMethod" json:"http_method,omitempty"` + // Task-level setting for App Engine routing. + // + // If set, + // [app_engine_routing_override][google.cloud.tasks.v2.AppEngineHttpQueue.app_engine_routing_override] + // is used for all tasks in the queue, no matter what the setting is for the + // [task-level app_engine_routing][google.cloud.tasks.v2.AppEngineHttpRequest.app_engine_routing]. + AppEngineRouting *AppEngineRouting `protobuf:"bytes,2,opt,name=app_engine_routing,json=appEngineRouting,proto3" json:"app_engine_routing,omitempty"` + // The relative URI. + // + // The relative URI must begin with "/" and must be a valid HTTP relative URI. + // It can contain a path and query string arguments. + // If the relative URI is empty, then the root path "/" will be used. + // No spaces are allowed, and the maximum length allowed is 2083 characters. + RelativeUri string `protobuf:"bytes,3,opt,name=relative_uri,json=relativeUri,proto3" json:"relative_uri,omitempty"` + // HTTP request headers. + // + // This map contains the header field names and values. + // Headers can be set when the + // [task is created][google.cloud.tasks.v2.CloudTasks.CreateTask]. + // Repeated headers are not supported but a header value can contain commas. + // + // Cloud Tasks sets some headers to default values: + // + // * `User-Agent`: By default, this header is + // `"AppEngine-Google; (+http://code.google.com/appengine)"`. + // This header can be modified, but Cloud Tasks will append + // `"AppEngine-Google; (+http://code.google.com/appengine)"` to the + // modified `User-Agent`. + // + // If the task has a [body][google.cloud.tasks.v2.AppEngineHttpRequest.body], Cloud + // Tasks sets the following headers: + // + // * `Content-Type`: By default, the `Content-Type` header is set to + // `"application/octet-stream"`. The default can be overridden by explicitly + // setting `Content-Type` to a particular media type when the + // [task is created][google.cloud.tasks.v2.CloudTasks.CreateTask]. + // For example, `Content-Type` can be set to `"application/json"`. + // * `Content-Length`: This is computed by Cloud Tasks. This value is + // output only. It cannot be changed. + // + // The headers below cannot be set or overridden: + // + // * `Host` + // * `X-Google-*` + // * `X-AppEngine-*` + // + // In addition, Cloud Tasks sets some headers when the task is dispatched, + // such as headers containing information about the task; see + // [request + // headers](https://cloud.google.com/appengine/docs/python/taskqueue/push/creating-handlers#reading_request_headers). + // These headers are set only when the task is dispatched, so they are not + // visible when the task is returned in a Cloud Tasks response. + // + // Although there is no specific limit for the maximum number of headers or + // the size, there is a limit on the maximum size of the [Task][google.cloud.tasks.v2.Task]. For more + // information, see the [CreateTask][google.cloud.tasks.v2.CloudTasks.CreateTask] documentation. + Headers map[string]string `protobuf:"bytes,4,rep,name=headers,proto3" json:"headers,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` + // HTTP request body. + // + // A request body is allowed only if the HTTP method is POST or PUT. It is + // an error to set a body on a task with an incompatible [HttpMethod][google.cloud.tasks.v2.HttpMethod]. + Body []byte `protobuf:"bytes,5,opt,name=body,proto3" json:"body,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *AppEngineHttpRequest) Reset() { *m = AppEngineHttpRequest{} } +func (m *AppEngineHttpRequest) String() string { return proto.CompactTextString(m) } +func (*AppEngineHttpRequest) ProtoMessage() {} +func (*AppEngineHttpRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_target_83a965c577901646, []int{0} +} +func (m *AppEngineHttpRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_AppEngineHttpRequest.Unmarshal(m, b) +} +func (m *AppEngineHttpRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_AppEngineHttpRequest.Marshal(b, m, deterministic) +} +func (dst *AppEngineHttpRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_AppEngineHttpRequest.Merge(dst, src) +} +func (m *AppEngineHttpRequest) XXX_Size() int { + return xxx_messageInfo_AppEngineHttpRequest.Size(m) +} +func (m *AppEngineHttpRequest) XXX_DiscardUnknown() { + xxx_messageInfo_AppEngineHttpRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_AppEngineHttpRequest proto.InternalMessageInfo + +func (m *AppEngineHttpRequest) GetHttpMethod() HttpMethod { + if m != nil { + return m.HttpMethod + } + return HttpMethod_HTTP_METHOD_UNSPECIFIED +} + +func (m *AppEngineHttpRequest) GetAppEngineRouting() *AppEngineRouting { + if m != nil { + return m.AppEngineRouting + } + return nil +} + +func (m *AppEngineHttpRequest) GetRelativeUri() string { + if m != nil { + return m.RelativeUri + } + return "" +} + +func (m *AppEngineHttpRequest) GetHeaders() map[string]string { + if m != nil { + return m.Headers + } + return nil +} + +func (m *AppEngineHttpRequest) GetBody() []byte { + if m != nil { + return m.Body + } + return nil +} + +// App Engine Routing. +// +// Defines routing characteristics specific to App Engine - service, version, +// and instance. +// +// For more information about services, versions, and instances see +// [An Overview of App +// Engine](https://cloud.google.com/appengine/docs/python/an-overview-of-app-engine), +// [Microservices Architecture on Google App +// Engine](https://cloud.google.com/appengine/docs/python/microservices-on-app-engine), +// [App Engine Standard request +// routing](https://cloud.google.com/appengine/docs/standard/python/how-requests-are-routed), +// and [App Engine Flex request +// routing](https://cloud.google.com/appengine/docs/flexible/python/how-requests-are-routed). +type AppEngineRouting struct { + // App service. + // + // By default, the task is sent to the service which is the default + // service when the task is attempted. + // + // For some queues or tasks which were created using the App Engine + // Task Queue API, [host][google.cloud.tasks.v2.AppEngineRouting.host] is not parsable + // into [service][google.cloud.tasks.v2.AppEngineRouting.service], + // [version][google.cloud.tasks.v2.AppEngineRouting.version], and + // [instance][google.cloud.tasks.v2.AppEngineRouting.instance]. For example, some tasks + // which were created using the App Engine SDK use a custom domain + // name; custom domains are not parsed by Cloud Tasks. If + // [host][google.cloud.tasks.v2.AppEngineRouting.host] is not parsable, then + // [service][google.cloud.tasks.v2.AppEngineRouting.service], + // [version][google.cloud.tasks.v2.AppEngineRouting.version], and + // [instance][google.cloud.tasks.v2.AppEngineRouting.instance] are the empty string. + Service string `protobuf:"bytes,1,opt,name=service,proto3" json:"service,omitempty"` + // App version. + // + // By default, the task is sent to the version which is the default + // version when the task is attempted. + // + // For some queues or tasks which were created using the App Engine + // Task Queue API, [host][google.cloud.tasks.v2.AppEngineRouting.host] is not parsable + // into [service][google.cloud.tasks.v2.AppEngineRouting.service], + // [version][google.cloud.tasks.v2.AppEngineRouting.version], and + // [instance][google.cloud.tasks.v2.AppEngineRouting.instance]. For example, some tasks + // which were created using the App Engine SDK use a custom domain + // name; custom domains are not parsed by Cloud Tasks. If + // [host][google.cloud.tasks.v2.AppEngineRouting.host] is not parsable, then + // [service][google.cloud.tasks.v2.AppEngineRouting.service], + // [version][google.cloud.tasks.v2.AppEngineRouting.version], and + // [instance][google.cloud.tasks.v2.AppEngineRouting.instance] are the empty string. + Version string `protobuf:"bytes,2,opt,name=version,proto3" json:"version,omitempty"` + // App instance. + // + // By default, the task is sent to an instance which is available when + // the task is attempted. + // + // Requests can only be sent to a specific instance if + // [manual scaling is used in App Engine + // Standard](https://cloud.google.com/appengine/docs/python/an-overview-of-app-engine?hl=en_US#scaling_types_and_instance_classes). + // App Engine Flex does not support instances. For more information, see + // [App Engine Standard request + // routing](https://cloud.google.com/appengine/docs/standard/python/how-requests-are-routed) + // and [App Engine Flex request + // routing](https://cloud.google.com/appengine/docs/flexible/python/how-requests-are-routed). + Instance string `protobuf:"bytes,3,opt,name=instance,proto3" json:"instance,omitempty"` + // Output only. The host that the task is sent to. + // + // The host is constructed from the domain name of the app associated with + // the queue's project ID (for example .appspot.com), and the + // [service][google.cloud.tasks.v2.AppEngineRouting.service], [version][google.cloud.tasks.v2.AppEngineRouting.version], + // and [instance][google.cloud.tasks.v2.AppEngineRouting.instance]. Tasks which were created using + // the App Engine SDK might have a custom domain name. + // + // For more information, see + // [How Requests are + // Routed](https://cloud.google.com/appengine/docs/standard/python/how-requests-are-routed). + Host string `protobuf:"bytes,4,opt,name=host,proto3" json:"host,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *AppEngineRouting) Reset() { *m = AppEngineRouting{} } +func (m *AppEngineRouting) String() string { return proto.CompactTextString(m) } +func (*AppEngineRouting) ProtoMessage() {} +func (*AppEngineRouting) Descriptor() ([]byte, []int) { + return fileDescriptor_target_83a965c577901646, []int{1} +} +func (m *AppEngineRouting) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_AppEngineRouting.Unmarshal(m, b) +} +func (m *AppEngineRouting) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_AppEngineRouting.Marshal(b, m, deterministic) +} +func (dst *AppEngineRouting) XXX_Merge(src proto.Message) { + xxx_messageInfo_AppEngineRouting.Merge(dst, src) +} +func (m *AppEngineRouting) XXX_Size() int { + return xxx_messageInfo_AppEngineRouting.Size(m) +} +func (m *AppEngineRouting) XXX_DiscardUnknown() { + xxx_messageInfo_AppEngineRouting.DiscardUnknown(m) +} + +var xxx_messageInfo_AppEngineRouting proto.InternalMessageInfo + +func (m *AppEngineRouting) GetService() string { + if m != nil { + return m.Service + } + return "" +} + +func (m *AppEngineRouting) GetVersion() string { + if m != nil { + return m.Version + } + return "" +} + +func (m *AppEngineRouting) GetInstance() string { + if m != nil { + return m.Instance + } + return "" +} + +func (m *AppEngineRouting) GetHost() string { + if m != nil { + return m.Host + } + return "" +} + +func init() { + proto.RegisterType((*AppEngineHttpRequest)(nil), "google.cloud.tasks.v2.AppEngineHttpRequest") + proto.RegisterMapType((map[string]string)(nil), "google.cloud.tasks.v2.AppEngineHttpRequest.HeadersEntry") + proto.RegisterType((*AppEngineRouting)(nil), "google.cloud.tasks.v2.AppEngineRouting") + proto.RegisterEnum("google.cloud.tasks.v2.HttpMethod", HttpMethod_name, HttpMethod_value) +} + +func init() { + proto.RegisterFile("google/cloud/tasks/v2/target.proto", fileDescriptor_target_83a965c577901646) +} + +var fileDescriptor_target_83a965c577901646 = []byte{ + // 490 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x84, 0x53, 0xcf, 0x6f, 0xda, 0x30, + 0x18, 0x5d, 0xf8, 0x51, 0xca, 0x17, 0x34, 0x59, 0x56, 0xa7, 0x65, 0x6c, 0x07, 0xca, 0x65, 0x68, + 0x87, 0x44, 0x62, 0x97, 0x8a, 0x9d, 0xa0, 0x64, 0x0b, 0xd2, 0x5a, 0xa2, 0x60, 0x2e, 0xbb, 0x44, + 0x2e, 0x78, 0xc1, 0x2a, 0xb5, 0x33, 0xdb, 0x44, 0xe2, 0x3f, 0xdf, 0x71, 0x8a, 0x03, 0xac, 0x42, + 0x9d, 0x76, 0x7b, 0xdf, 0xf3, 0x7b, 0xcf, 0xfe, 0xec, 0xcf, 0xd0, 0xcf, 0xa4, 0xcc, 0xb6, 0x2c, + 0x58, 0x6d, 0xe5, 0x6e, 0x1d, 0x18, 0xaa, 0x1f, 0x75, 0x50, 0x0c, 0x03, 0x43, 0x55, 0xc6, 0x8c, + 0x9f, 0x2b, 0x69, 0x24, 0x7e, 0x53, 0x69, 0x7c, 0xab, 0xf1, 0xad, 0xc6, 0x2f, 0x86, 0xdd, 0x0f, + 0x07, 0x2b, 0xcd, 0x79, 0x40, 0x85, 0x90, 0x86, 0x1a, 0x2e, 0x85, 0xae, 0x4c, 0xfd, 0xdf, 0x35, + 0xb8, 0x1a, 0xe7, 0x79, 0x28, 0x32, 0x2e, 0x58, 0x64, 0x4c, 0x9e, 0xb0, 0x5f, 0x3b, 0xa6, 0x0d, + 0x9e, 0x80, 0xbb, 0x31, 0x26, 0x4f, 0x9f, 0x98, 0xd9, 0xc8, 0xb5, 0xe7, 0xf4, 0x9c, 0xc1, 0xeb, + 0xe1, 0xb5, 0xff, 0xe2, 0x1e, 0x7e, 0x69, 0xbc, 0xb3, 0xc2, 0x04, 0x36, 0x27, 0x8c, 0x97, 0x80, + 0x69, 0x9e, 0xa7, 0xcc, 0x86, 0xa7, 0x4a, 0xee, 0x0c, 0x17, 0x99, 0x57, 0xeb, 0x39, 0x03, 0x77, + 0xf8, 0xf1, 0x1f, 0x51, 0xa7, 0xc3, 0x24, 0x95, 0x3c, 0x41, 0xf4, 0x8c, 0xc1, 0xd7, 0xd0, 0x51, + 0x6c, 0x4b, 0x0d, 0x2f, 0x58, 0xba, 0x53, 0xdc, 0xab, 0xf7, 0x9c, 0x41, 0x3b, 0x71, 0x8f, 0xdc, + 0x52, 0x71, 0x9c, 0x40, 0x6b, 0xc3, 0xe8, 0x9a, 0x29, 0xed, 0x35, 0x7a, 0xf5, 0x81, 0x3b, 0xbc, + 0xf9, 0xdf, 0x76, 0xcf, 0x7a, 0xf7, 0xa3, 0xca, 0x1a, 0x0a, 0xa3, 0xf6, 0xc9, 0x31, 0x08, 0x63, + 0x68, 0x3c, 0xc8, 0xf5, 0xde, 0x6b, 0xf6, 0x9c, 0x41, 0x27, 0xb1, 0xb8, 0x3b, 0x82, 0xce, 0x73, + 0x31, 0x46, 0x50, 0x7f, 0x64, 0x7b, 0x7b, 0x5b, 0xed, 0xa4, 0x84, 0xf8, 0x0a, 0x9a, 0x05, 0xdd, + 0xee, 0x98, 0x6d, 0xbb, 0x9d, 0x54, 0xc5, 0xa8, 0x76, 0xe3, 0xf4, 0x0b, 0x40, 0xe7, 0xcd, 0x62, + 0x0f, 0x5a, 0x9a, 0xa9, 0x82, 0xaf, 0xd8, 0x21, 0xe3, 0x58, 0x96, 0x2b, 0x05, 0x53, 0x9a, 0x4b, + 0x71, 0x48, 0x3a, 0x96, 0xb8, 0x0b, 0x97, 0x5c, 0x68, 0x43, 0xc5, 0x8a, 0x1d, 0xae, 0xe2, 0x54, + 0x97, 0x67, 0xde, 0x48, 0x6d, 0xbc, 0x86, 0xe5, 0x2d, 0xfe, 0xa4, 0x01, 0xfe, 0xbe, 0x17, 0x7e, + 0x0f, 0x6f, 0x23, 0x42, 0xe2, 0xf4, 0x2e, 0x24, 0xd1, 0x7c, 0x9a, 0x2e, 0xef, 0x17, 0x71, 0x78, + 0x3b, 0xfb, 0x3a, 0x0b, 0xa7, 0xe8, 0x15, 0xbe, 0x84, 0x46, 0x3c, 0x5f, 0x10, 0xe4, 0xe0, 0x16, + 0xd4, 0xbf, 0x85, 0x04, 0xd5, 0x4a, 0x2a, 0x0a, 0xc7, 0x53, 0x54, 0x2f, 0xa9, 0x78, 0x49, 0x50, + 0x03, 0x03, 0x5c, 0x4c, 0xc3, 0xef, 0x21, 0x09, 0x51, 0x13, 0xb7, 0xa1, 0x19, 0x8f, 0xc9, 0x6d, + 0x84, 0x2e, 0xb0, 0x0b, 0xad, 0x79, 0x4c, 0x66, 0xf3, 0xfb, 0x05, 0x6a, 0x4d, 0x7e, 0xc2, 0xbb, + 0x95, 0x7c, 0x7a, 0xf9, 0x11, 0x26, 0x2e, 0xb1, 0x73, 0x1c, 0x97, 0x13, 0x19, 0x3b, 0x3f, 0x46, + 0x07, 0x55, 0x26, 0xb7, 0x54, 0x64, 0xbe, 0x54, 0x59, 0x90, 0x31, 0x61, 0xe7, 0x35, 0xa8, 0x96, + 0x68, 0xce, 0xf5, 0xd9, 0x5f, 0xf8, 0x62, 0xc1, 0xc3, 0x85, 0x95, 0x7d, 0xfe, 0x13, 0x00, 0x00, + 0xff, 0xff, 0xb4, 0x23, 0x16, 0x6a, 0x31, 0x03, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/cloud/tasks/v2/task.pb.go b/vendor/google.golang.org/genproto/googleapis/cloud/tasks/v2/task.pb.go new file mode 100644 index 0000000..a0a7926 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/cloud/tasks/v2/task.pb.go @@ -0,0 +1,453 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/cloud/tasks/v2/task.proto + +package tasks // import "google.golang.org/genproto/googleapis/cloud/tasks/v2" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import duration "github.com/golang/protobuf/ptypes/duration" +import timestamp "github.com/golang/protobuf/ptypes/timestamp" +import _ "google.golang.org/genproto/googleapis/api/annotations" +import status "google.golang.org/genproto/googleapis/rpc/status" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// The view specifies a subset of [Task][google.cloud.tasks.v2.Task] data. +// +// When a task is returned in a response, not all +// information is retrieved by default because some data, such as +// payloads, might be desirable to return only when needed because +// of its large size or because of the sensitivity of data that it +// contains. +type Task_View int32 + +const ( + // Unspecified. Defaults to BASIC. + Task_VIEW_UNSPECIFIED Task_View = 0 + // The basic view omits fields which can be large or can contain + // sensitive data. + // + // This view does not include the + // [body in AppEngineHttpRequest][google.cloud.tasks.v2.AppEngineHttpRequest.body]. + // Bodies are desirable to return only when needed, because they + // can be large and because of the sensitivity of the data that you + // choose to store in it. + Task_BASIC Task_View = 1 + // All information is returned. + // + // Authorization for [FULL][google.cloud.tasks.v2.Task.View.FULL] requires + // `cloudtasks.tasks.fullView` [Google IAM](https://cloud.google.com/iam/) + // permission on the [Queue][google.cloud.tasks.v2.Queue] resource. + Task_FULL Task_View = 2 +) + +var Task_View_name = map[int32]string{ + 0: "VIEW_UNSPECIFIED", + 1: "BASIC", + 2: "FULL", +} +var Task_View_value = map[string]int32{ + "VIEW_UNSPECIFIED": 0, + "BASIC": 1, + "FULL": 2, +} + +func (x Task_View) String() string { + return proto.EnumName(Task_View_name, int32(x)) +} +func (Task_View) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_task_798b1eb2476e0fcb, []int{0, 0} +} + +// A unit of scheduled work. +type Task struct { + // Optionally caller-specified in [CreateTask][google.cloud.tasks.v2.CloudTasks.CreateTask]. + // + // The task name. + // + // The task name must have the following format: + // `projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID/tasks/TASK_ID` + // + // * `PROJECT_ID` can contain letters ([A-Za-z]), numbers ([0-9]), + // hyphens (-), colons (:), or periods (.). + // For more information, see + // [Identifying + // projects](https://cloud.google.com/resource-manager/docs/creating-managing-projects#identifying_projects) + // * `LOCATION_ID` is the canonical ID for the task's location. + // The list of available locations can be obtained by calling + // [ListLocations][google.cloud.location.Locations.ListLocations]. + // For more information, see https://cloud.google.com/about/locations/. + // * `QUEUE_ID` can contain letters ([A-Za-z]), numbers ([0-9]), or + // hyphens (-). The maximum length is 100 characters. + // * `TASK_ID` can contain only letters ([A-Za-z]), numbers ([0-9]), + // hyphens (-), or underscores (_). The maximum length is 500 characters. + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // Required. The message to send to the worker. + // + // Types that are valid to be assigned to MessageType: + // *Task_AppEngineHttpRequest + MessageType isTask_MessageType `protobuf_oneof:"message_type"` + // The time when the task is scheduled to be attempted. + // + // For App Engine queues, this is when the task will be attempted or retried. + // + // `schedule_time` will be truncated to the nearest microsecond. + ScheduleTime *timestamp.Timestamp `protobuf:"bytes,4,opt,name=schedule_time,json=scheduleTime,proto3" json:"schedule_time,omitempty"` + // Output only. The time that the task was created. + // + // `create_time` will be truncated to the nearest second. + CreateTime *timestamp.Timestamp `protobuf:"bytes,5,opt,name=create_time,json=createTime,proto3" json:"create_time,omitempty"` + // The deadline for requests sent to the worker. If the worker does not + // respond by this deadline then the request is cancelled and the attempt + // is marked as a `DEADLINE_EXCEEDED` failure. Cloud Tasks will retry the + // task according to the [RetryConfig][google.cloud.tasks.v2.RetryConfig]. + // + // Note that when the request is cancelled, Cloud Tasks will stop listing for + // the response, but whether the worker stops processing depends on the + // worker. For example, if the worker is stuck, it may not react to cancelled + // requests. + // + // The default and maximum values depend on the type of request: + // + // + // * For [App Engine tasks][google.cloud.tasks.v2.AppEngineHttpRequest], 0 indicates that the + // request has the default deadline. The default deadline depends on the + // [scaling + // type](https://cloud.google.com/appengine/docs/standard/go/how-instances-are-managed#instance_scaling) + // of the service: 10 minutes for standard apps with automatic scaling, 24 + // hours for standard apps with manual and basic scaling, and 60 minutes for + // flex apps. If the request deadline is set, it must be in the interval [15 + // seconds, 24 hours 15 seconds]. Regardless of the task's + // `dispatch_deadline`, the app handler will not run for longer than than + // the service's timeout. We recommend setting the `dispatch_deadline` to + // at most a few seconds more than the app handler's timeout. For more + // information see + // [Timeouts](https://cloud.google.com/tasks/docs/creating-appengine-handlers#timeouts). + // + // `dispatch_deadline` will be truncated to the nearest millisecond. The + // deadline is an approximate deadline. + DispatchDeadline *duration.Duration `protobuf:"bytes,6,opt,name=dispatch_deadline,json=dispatchDeadline,proto3" json:"dispatch_deadline,omitempty"` + // Output only. The number of attempts dispatched. + // + // This count includes attempts which have been dispatched but haven't + // received a response. + DispatchCount int32 `protobuf:"varint,7,opt,name=dispatch_count,json=dispatchCount,proto3" json:"dispatch_count,omitempty"` + // Output only. The number of attempts which have received a response. + ResponseCount int32 `protobuf:"varint,8,opt,name=response_count,json=responseCount,proto3" json:"response_count,omitempty"` + // Output only. The status of the task's first attempt. + // + // Only [dispatch_time][google.cloud.tasks.v2.Attempt.dispatch_time] will be set. + // The other [Attempt][google.cloud.tasks.v2.Attempt] information is not retained by Cloud Tasks. + FirstAttempt *Attempt `protobuf:"bytes,9,opt,name=first_attempt,json=firstAttempt,proto3" json:"first_attempt,omitempty"` + // Output only. The status of the task's last attempt. + LastAttempt *Attempt `protobuf:"bytes,10,opt,name=last_attempt,json=lastAttempt,proto3" json:"last_attempt,omitempty"` + // Output only. The view specifies which subset of the [Task][google.cloud.tasks.v2.Task] has + // been returned. + View Task_View `protobuf:"varint,11,opt,name=view,proto3,enum=google.cloud.tasks.v2.Task_View" json:"view,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Task) Reset() { *m = Task{} } +func (m *Task) String() string { return proto.CompactTextString(m) } +func (*Task) ProtoMessage() {} +func (*Task) Descriptor() ([]byte, []int) { + return fileDescriptor_task_798b1eb2476e0fcb, []int{0} +} +func (m *Task) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Task.Unmarshal(m, b) +} +func (m *Task) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Task.Marshal(b, m, deterministic) +} +func (dst *Task) XXX_Merge(src proto.Message) { + xxx_messageInfo_Task.Merge(dst, src) +} +func (m *Task) XXX_Size() int { + return xxx_messageInfo_Task.Size(m) +} +func (m *Task) XXX_DiscardUnknown() { + xxx_messageInfo_Task.DiscardUnknown(m) +} + +var xxx_messageInfo_Task proto.InternalMessageInfo + +func (m *Task) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +type isTask_MessageType interface { + isTask_MessageType() +} + +type Task_AppEngineHttpRequest struct { + AppEngineHttpRequest *AppEngineHttpRequest `protobuf:"bytes,2,opt,name=app_engine_http_request,json=appEngineHttpRequest,proto3,oneof"` +} + +func (*Task_AppEngineHttpRequest) isTask_MessageType() {} + +func (m *Task) GetMessageType() isTask_MessageType { + if m != nil { + return m.MessageType + } + return nil +} + +func (m *Task) GetAppEngineHttpRequest() *AppEngineHttpRequest { + if x, ok := m.GetMessageType().(*Task_AppEngineHttpRequest); ok { + return x.AppEngineHttpRequest + } + return nil +} + +func (m *Task) GetScheduleTime() *timestamp.Timestamp { + if m != nil { + return m.ScheduleTime + } + return nil +} + +func (m *Task) GetCreateTime() *timestamp.Timestamp { + if m != nil { + return m.CreateTime + } + return nil +} + +func (m *Task) GetDispatchDeadline() *duration.Duration { + if m != nil { + return m.DispatchDeadline + } + return nil +} + +func (m *Task) GetDispatchCount() int32 { + if m != nil { + return m.DispatchCount + } + return 0 +} + +func (m *Task) GetResponseCount() int32 { + if m != nil { + return m.ResponseCount + } + return 0 +} + +func (m *Task) GetFirstAttempt() *Attempt { + if m != nil { + return m.FirstAttempt + } + return nil +} + +func (m *Task) GetLastAttempt() *Attempt { + if m != nil { + return m.LastAttempt + } + return nil +} + +func (m *Task) GetView() Task_View { + if m != nil { + return m.View + } + return Task_VIEW_UNSPECIFIED +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*Task) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _Task_OneofMarshaler, _Task_OneofUnmarshaler, _Task_OneofSizer, []interface{}{ + (*Task_AppEngineHttpRequest)(nil), + } +} + +func _Task_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*Task) + // message_type + switch x := m.MessageType.(type) { + case *Task_AppEngineHttpRequest: + b.EncodeVarint(2<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.AppEngineHttpRequest); err != nil { + return err + } + case nil: + default: + return fmt.Errorf("Task.MessageType has unexpected type %T", x) + } + return nil +} + +func _Task_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*Task) + switch tag { + case 2: // message_type.app_engine_http_request + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(AppEngineHttpRequest) + err := b.DecodeMessage(msg) + m.MessageType = &Task_AppEngineHttpRequest{msg} + return true, err + default: + return false, nil + } +} + +func _Task_OneofSizer(msg proto.Message) (n int) { + m := msg.(*Task) + // message_type + switch x := m.MessageType.(type) { + case *Task_AppEngineHttpRequest: + s := proto.Size(x.AppEngineHttpRequest) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +// The status of a task attempt. +type Attempt struct { + // Output only. The time that this attempt was scheduled. + // + // `schedule_time` will be truncated to the nearest microsecond. + ScheduleTime *timestamp.Timestamp `protobuf:"bytes,1,opt,name=schedule_time,json=scheduleTime,proto3" json:"schedule_time,omitempty"` + // Output only. The time that this attempt was dispatched. + // + // `dispatch_time` will be truncated to the nearest microsecond. + DispatchTime *timestamp.Timestamp `protobuf:"bytes,2,opt,name=dispatch_time,json=dispatchTime,proto3" json:"dispatch_time,omitempty"` + // Output only. The time that this attempt response was received. + // + // `response_time` will be truncated to the nearest microsecond. + ResponseTime *timestamp.Timestamp `protobuf:"bytes,3,opt,name=response_time,json=responseTime,proto3" json:"response_time,omitempty"` + // Output only. The response from the worker for this attempt. + // + // If `response_time` is unset, then the task has not been attempted or is + // currently running and the `response_status` field is meaningless. + ResponseStatus *status.Status `protobuf:"bytes,4,opt,name=response_status,json=responseStatus,proto3" json:"response_status,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Attempt) Reset() { *m = Attempt{} } +func (m *Attempt) String() string { return proto.CompactTextString(m) } +func (*Attempt) ProtoMessage() {} +func (*Attempt) Descriptor() ([]byte, []int) { + return fileDescriptor_task_798b1eb2476e0fcb, []int{1} +} +func (m *Attempt) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Attempt.Unmarshal(m, b) +} +func (m *Attempt) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Attempt.Marshal(b, m, deterministic) +} +func (dst *Attempt) XXX_Merge(src proto.Message) { + xxx_messageInfo_Attempt.Merge(dst, src) +} +func (m *Attempt) XXX_Size() int { + return xxx_messageInfo_Attempt.Size(m) +} +func (m *Attempt) XXX_DiscardUnknown() { + xxx_messageInfo_Attempt.DiscardUnknown(m) +} + +var xxx_messageInfo_Attempt proto.InternalMessageInfo + +func (m *Attempt) GetScheduleTime() *timestamp.Timestamp { + if m != nil { + return m.ScheduleTime + } + return nil +} + +func (m *Attempt) GetDispatchTime() *timestamp.Timestamp { + if m != nil { + return m.DispatchTime + } + return nil +} + +func (m *Attempt) GetResponseTime() *timestamp.Timestamp { + if m != nil { + return m.ResponseTime + } + return nil +} + +func (m *Attempt) GetResponseStatus() *status.Status { + if m != nil { + return m.ResponseStatus + } + return nil +} + +func init() { + proto.RegisterType((*Task)(nil), "google.cloud.tasks.v2.Task") + proto.RegisterType((*Attempt)(nil), "google.cloud.tasks.v2.Attempt") + proto.RegisterEnum("google.cloud.tasks.v2.Task_View", Task_View_name, Task_View_value) +} + +func init() { + proto.RegisterFile("google/cloud/tasks/v2/task.proto", fileDescriptor_task_798b1eb2476e0fcb) +} + +var fileDescriptor_task_798b1eb2476e0fcb = []byte{ + // 579 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x94, 0x94, 0x51, 0x6b, 0xdb, 0x30, + 0x10, 0xc7, 0xeb, 0xcc, 0x69, 0x1b, 0x25, 0xed, 0x32, 0xd1, 0x51, 0xb7, 0x8c, 0xce, 0x04, 0x06, + 0x81, 0x81, 0xcd, 0xb2, 0x3d, 0xad, 0x0f, 0xa5, 0x49, 0x53, 0x1a, 0x28, 0xa3, 0x38, 0x6d, 0x07, + 0x7b, 0x31, 0xaa, 0x7d, 0x75, 0x4c, 0x6d, 0x4b, 0x93, 0xe4, 0x94, 0x7d, 0x84, 0x7d, 0x80, 0x7d, + 0xdf, 0x61, 0x59, 0x0a, 0xa5, 0x4d, 0xc8, 0xf6, 0xa6, 0xbb, 0xfb, 0xff, 0x7f, 0x3e, 0x9d, 0x2e, + 0x41, 0x6e, 0x42, 0x69, 0x92, 0x81, 0x1f, 0x65, 0xb4, 0x8c, 0x7d, 0x49, 0xc4, 0x83, 0xf0, 0xe7, + 0x03, 0x75, 0xf0, 0x18, 0xa7, 0x92, 0xe2, 0xb7, 0xb5, 0xc2, 0x53, 0x0a, 0x4f, 0x29, 0xbc, 0xf9, + 0xe0, 0xf0, 0x9d, 0x36, 0x12, 0x96, 0xfa, 0xa4, 0x28, 0xa8, 0x24, 0x32, 0xa5, 0x85, 0xa8, 0x4d, + 0x87, 0x07, 0x4f, 0xaa, 0x1c, 0x04, 0x2d, 0x79, 0x04, 0xba, 0xd4, 0x5b, 0xf5, 0x45, 0x9e, 0x80, + 0xd4, 0x9a, 0x23, 0xad, 0x51, 0xd1, 0x5d, 0x79, 0xef, 0xc7, 0x25, 0x57, 0x7c, 0x5d, 0x7f, 0xff, + 0xbc, 0x2e, 0xd3, 0x1c, 0x84, 0x24, 0x39, 0xd3, 0x82, 0x7d, 0x2d, 0xe0, 0x2c, 0xf2, 0x85, 0x24, + 0xb2, 0xd4, 0x8d, 0xf5, 0xfe, 0x34, 0x91, 0x7d, 0x4d, 0xc4, 0x03, 0xc6, 0xc8, 0x2e, 0x48, 0x0e, + 0x8e, 0xe5, 0x5a, 0xfd, 0x56, 0xa0, 0xce, 0x38, 0x46, 0xfb, 0x84, 0xb1, 0x10, 0x8a, 0x24, 0x2d, + 0x20, 0x9c, 0x49, 0xc9, 0x42, 0x0e, 0x3f, 0x4b, 0x10, 0xd2, 0x69, 0xb8, 0x56, 0xbf, 0x3d, 0xf8, + 0xe8, 0x2d, 0x1d, 0x86, 0x77, 0xca, 0xd8, 0x58, 0x99, 0x2e, 0xa4, 0x64, 0x41, 0x6d, 0xb9, 0xd8, + 0x08, 0xf6, 0xc8, 0x92, 0x3c, 0x3e, 0x41, 0x3b, 0x22, 0x9a, 0x41, 0x5c, 0x66, 0x10, 0x56, 0x7d, + 0x3b, 0xb6, 0x62, 0x1f, 0x1a, 0xb6, 0xb9, 0x94, 0x77, 0x6d, 0x2e, 0x15, 0x74, 0x8c, 0xa1, 0x4a, + 0xe1, 0x63, 0xd4, 0x8e, 0x38, 0x10, 0xa9, 0xed, 0xcd, 0xb5, 0x76, 0x54, 0xcb, 0x95, 0xf9, 0x1c, + 0xbd, 0x89, 0x53, 0xc1, 0x88, 0x8c, 0x66, 0x61, 0x0c, 0x24, 0xce, 0xd2, 0x02, 0x9c, 0x4d, 0x85, + 0x38, 0x78, 0x81, 0x38, 0xd3, 0x63, 0x0f, 0xba, 0xc6, 0x73, 0xa6, 0x2d, 0xf8, 0x03, 0xda, 0x5d, + 0x70, 0x22, 0x5a, 0x16, 0xd2, 0xd9, 0x72, 0xad, 0x7e, 0x33, 0xd8, 0x31, 0xd9, 0x51, 0x95, 0xac, + 0x64, 0x1c, 0x04, 0xa3, 0x85, 0x00, 0x2d, 0xdb, 0xae, 0x65, 0x26, 0x5b, 0xcb, 0x46, 0x68, 0xe7, + 0x3e, 0xe5, 0x42, 0x86, 0x44, 0x4a, 0xc8, 0x99, 0x74, 0x5a, 0xaa, 0xa3, 0xa3, 0x55, 0xf3, 0xae, + 0x55, 0x41, 0x47, 0x99, 0x74, 0x84, 0x4f, 0x51, 0x27, 0x23, 0x4f, 0x18, 0xe8, 0x9f, 0x18, 0xed, + 0xca, 0x63, 0x10, 0x5f, 0x90, 0x3d, 0x4f, 0xe1, 0xd1, 0x69, 0xbb, 0x56, 0x7f, 0x77, 0xe0, 0xae, + 0xb0, 0x56, 0x0b, 0xe4, 0xdd, 0xa6, 0xf0, 0x18, 0x28, 0x75, 0xef, 0x13, 0xb2, 0xab, 0x08, 0xef, + 0xa1, 0xee, 0xed, 0x64, 0xfc, 0x3d, 0xbc, 0xf9, 0x36, 0xbd, 0x1a, 0x8f, 0x26, 0xe7, 0x93, 0xf1, + 0x59, 0x77, 0x03, 0xb7, 0x50, 0x73, 0x78, 0x3a, 0x9d, 0x8c, 0xba, 0x16, 0xde, 0x46, 0xf6, 0xf9, + 0xcd, 0xe5, 0x65, 0xb7, 0x31, 0xdc, 0x45, 0x9d, 0x1c, 0x84, 0x20, 0x09, 0x84, 0xf2, 0x17, 0x83, + 0xde, 0xef, 0x06, 0xda, 0x32, 0x4d, 0xbc, 0x58, 0x10, 0xeb, 0x3f, 0x17, 0xe4, 0x04, 0x2d, 0x5e, + 0xa1, 0x06, 0x34, 0xd6, 0x03, 0x8c, 0xc1, 0x00, 0x16, 0xaf, 0xa6, 0x00, 0xaf, 0xd6, 0x03, 0x8c, + 0x41, 0xaf, 0xe8, 0xeb, 0x05, 0xa0, 0xfe, 0xfd, 0xe9, 0x2d, 0xc7, 0x06, 0xc1, 0x59, 0xe4, 0x4d, + 0x55, 0x25, 0x58, 0x6c, 0x48, 0x1d, 0x0f, 0x63, 0x74, 0x10, 0xd1, 0x7c, 0xf9, 0xec, 0x87, 0xad, + 0x6a, 0xf8, 0x57, 0xd5, 0xf7, 0xaf, 0xac, 0x1f, 0x5f, 0xb5, 0x26, 0xa1, 0x19, 0x29, 0x12, 0x8f, + 0xf2, 0xc4, 0x4f, 0xa0, 0x50, 0xdd, 0xf9, 0x75, 0x89, 0xb0, 0x54, 0x3c, 0xfb, 0xab, 0x39, 0x56, + 0x87, 0xbb, 0x4d, 0x25, 0xfb, 0xfc, 0x37, 0x00, 0x00, 0xff, 0xff, 0xac, 0xc4, 0x10, 0xa0, 0x02, + 0x05, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/cloud/tasks/v2beta2/cloudtasks.pb.go b/vendor/google.golang.org/genproto/googleapis/cloud/tasks/v2beta2/cloudtasks.pb.go new file mode 100644 index 0000000..4518ecc --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/cloud/tasks/v2beta2/cloudtasks.pb.go @@ -0,0 +1,2604 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/cloud/tasks/v2beta2/cloudtasks.proto + +package tasks // import "google.golang.org/genproto/googleapis/cloud/tasks/v2beta2" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import duration "github.com/golang/protobuf/ptypes/duration" +import empty "github.com/golang/protobuf/ptypes/empty" +import timestamp "github.com/golang/protobuf/ptypes/timestamp" +import _ "google.golang.org/genproto/googleapis/api/annotations" +import v1 "google.golang.org/genproto/googleapis/iam/v1" +import _ "google.golang.org/genproto/googleapis/rpc/code" +import field_mask "google.golang.org/genproto/protobuf/field_mask" + +import ( + context "golang.org/x/net/context" + grpc "google.golang.org/grpc" +) + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Request message for +// [ListQueues][google.cloud.tasks.v2beta2.CloudTasks.ListQueues]. +type ListQueuesRequest struct { + // Required. + // + // The location name. + // For example: `projects/PROJECT_ID/locations/LOCATION_ID` + Parent string `protobuf:"bytes,1,opt,name=parent,proto3" json:"parent,omitempty"` + // `filter` can be used to specify a subset of queues. Any + // [Queue][google.cloud.tasks.v2beta2.Queue] field can be used as a filter and + // several operators as supported. For example: `<=, <, >=, >, !=, =, :`. The + // filter syntax is the same as described in [Stackdriver's Advanced Logs + // Filters](https://cloud.google.com/logging/docs/view/advanced_filters). + // + // Sample filter "app_engine_http_target: *". + // + // Note that using filters might cause fewer queues than the + // requested_page size to be returned. + Filter string `protobuf:"bytes,2,opt,name=filter,proto3" json:"filter,omitempty"` + // Requested page size. + // + // The maximum page size is 9800. If unspecified, the page size will + // be the maximum. Fewer queues than requested might be returned, + // even if more queues exist; use the + // [next_page_token][google.cloud.tasks.v2beta2.ListQueuesResponse.next_page_token] + // in the response to determine if more queues exist. + PageSize int32 `protobuf:"varint,3,opt,name=page_size,json=pageSize,proto3" json:"page_size,omitempty"` + // A token identifying the page of results to return. + // + // To request the first page results, page_token must be empty. To + // request the next page of results, page_token must be the value of + // [next_page_token][google.cloud.tasks.v2beta2.ListQueuesResponse.next_page_token] + // returned from the previous call to + // [ListQueues][google.cloud.tasks.v2beta2.CloudTasks.ListQueues] method. It + // is an error to switch the value of the + // [filter][google.cloud.tasks.v2beta2.ListQueuesRequest.filter] while + // iterating through pages. + PageToken string `protobuf:"bytes,4,opt,name=page_token,json=pageToken,proto3" json:"page_token,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ListQueuesRequest) Reset() { *m = ListQueuesRequest{} } +func (m *ListQueuesRequest) String() string { return proto.CompactTextString(m) } +func (*ListQueuesRequest) ProtoMessage() {} +func (*ListQueuesRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_cloudtasks_a80c09c0b658f30d, []int{0} +} +func (m *ListQueuesRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ListQueuesRequest.Unmarshal(m, b) +} +func (m *ListQueuesRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ListQueuesRequest.Marshal(b, m, deterministic) +} +func (dst *ListQueuesRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_ListQueuesRequest.Merge(dst, src) +} +func (m *ListQueuesRequest) XXX_Size() int { + return xxx_messageInfo_ListQueuesRequest.Size(m) +} +func (m *ListQueuesRequest) XXX_DiscardUnknown() { + xxx_messageInfo_ListQueuesRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_ListQueuesRequest proto.InternalMessageInfo + +func (m *ListQueuesRequest) GetParent() string { + if m != nil { + return m.Parent + } + return "" +} + +func (m *ListQueuesRequest) GetFilter() string { + if m != nil { + return m.Filter + } + return "" +} + +func (m *ListQueuesRequest) GetPageSize() int32 { + if m != nil { + return m.PageSize + } + return 0 +} + +func (m *ListQueuesRequest) GetPageToken() string { + if m != nil { + return m.PageToken + } + return "" +} + +// Response message for +// [ListQueues][google.cloud.tasks.v2beta2.CloudTasks.ListQueues]. +type ListQueuesResponse struct { + // The list of queues. + Queues []*Queue `protobuf:"bytes,1,rep,name=queues,proto3" json:"queues,omitempty"` + // A token to retrieve next page of results. + // + // To return the next page of results, call + // [ListQueues][google.cloud.tasks.v2beta2.CloudTasks.ListQueues] with this + // value as the + // [page_token][google.cloud.tasks.v2beta2.ListQueuesRequest.page_token]. + // + // If the next_page_token is empty, there are no more results. + // + // The page token is valid for only 2 hours. + NextPageToken string `protobuf:"bytes,2,opt,name=next_page_token,json=nextPageToken,proto3" json:"next_page_token,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ListQueuesResponse) Reset() { *m = ListQueuesResponse{} } +func (m *ListQueuesResponse) String() string { return proto.CompactTextString(m) } +func (*ListQueuesResponse) ProtoMessage() {} +func (*ListQueuesResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_cloudtasks_a80c09c0b658f30d, []int{1} +} +func (m *ListQueuesResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ListQueuesResponse.Unmarshal(m, b) +} +func (m *ListQueuesResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ListQueuesResponse.Marshal(b, m, deterministic) +} +func (dst *ListQueuesResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_ListQueuesResponse.Merge(dst, src) +} +func (m *ListQueuesResponse) XXX_Size() int { + return xxx_messageInfo_ListQueuesResponse.Size(m) +} +func (m *ListQueuesResponse) XXX_DiscardUnknown() { + xxx_messageInfo_ListQueuesResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_ListQueuesResponse proto.InternalMessageInfo + +func (m *ListQueuesResponse) GetQueues() []*Queue { + if m != nil { + return m.Queues + } + return nil +} + +func (m *ListQueuesResponse) GetNextPageToken() string { + if m != nil { + return m.NextPageToken + } + return "" +} + +// Request message for +// [GetQueue][google.cloud.tasks.v2beta2.CloudTasks.GetQueue]. +type GetQueueRequest struct { + // Required. + // + // The resource name of the queue. For example: + // `projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID` + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GetQueueRequest) Reset() { *m = GetQueueRequest{} } +func (m *GetQueueRequest) String() string { return proto.CompactTextString(m) } +func (*GetQueueRequest) ProtoMessage() {} +func (*GetQueueRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_cloudtasks_a80c09c0b658f30d, []int{2} +} +func (m *GetQueueRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GetQueueRequest.Unmarshal(m, b) +} +func (m *GetQueueRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GetQueueRequest.Marshal(b, m, deterministic) +} +func (dst *GetQueueRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_GetQueueRequest.Merge(dst, src) +} +func (m *GetQueueRequest) XXX_Size() int { + return xxx_messageInfo_GetQueueRequest.Size(m) +} +func (m *GetQueueRequest) XXX_DiscardUnknown() { + xxx_messageInfo_GetQueueRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_GetQueueRequest proto.InternalMessageInfo + +func (m *GetQueueRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +// Request message for +// [CreateQueue][google.cloud.tasks.v2beta2.CloudTasks.CreateQueue]. +type CreateQueueRequest struct { + // Required. + // + // The location name in which the queue will be created. + // For example: `projects/PROJECT_ID/locations/LOCATION_ID` + // + // The list of allowed locations can be obtained by calling Cloud + // Tasks' implementation of + // [ListLocations][google.cloud.location.Locations.ListLocations]. + Parent string `protobuf:"bytes,1,opt,name=parent,proto3" json:"parent,omitempty"` + // Required. + // + // The queue to create. + // + // [Queue's name][google.cloud.tasks.v2beta2.Queue.name] cannot be the same as + // an existing queue. + Queue *Queue `protobuf:"bytes,2,opt,name=queue,proto3" json:"queue,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *CreateQueueRequest) Reset() { *m = CreateQueueRequest{} } +func (m *CreateQueueRequest) String() string { return proto.CompactTextString(m) } +func (*CreateQueueRequest) ProtoMessage() {} +func (*CreateQueueRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_cloudtasks_a80c09c0b658f30d, []int{3} +} +func (m *CreateQueueRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_CreateQueueRequest.Unmarshal(m, b) +} +func (m *CreateQueueRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_CreateQueueRequest.Marshal(b, m, deterministic) +} +func (dst *CreateQueueRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_CreateQueueRequest.Merge(dst, src) +} +func (m *CreateQueueRequest) XXX_Size() int { + return xxx_messageInfo_CreateQueueRequest.Size(m) +} +func (m *CreateQueueRequest) XXX_DiscardUnknown() { + xxx_messageInfo_CreateQueueRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_CreateQueueRequest proto.InternalMessageInfo + +func (m *CreateQueueRequest) GetParent() string { + if m != nil { + return m.Parent + } + return "" +} + +func (m *CreateQueueRequest) GetQueue() *Queue { + if m != nil { + return m.Queue + } + return nil +} + +// Request message for +// [UpdateQueue][google.cloud.tasks.v2beta2.CloudTasks.UpdateQueue]. +type UpdateQueueRequest struct { + // Required. + // + // The queue to create or update. + // + // The queue's [name][google.cloud.tasks.v2beta2.Queue.name] must be + // specified. + // + // Output only fields cannot be modified using UpdateQueue. + // Any value specified for an output only field will be ignored. + // The queue's [name][google.cloud.tasks.v2beta2.Queue.name] cannot be + // changed. + Queue *Queue `protobuf:"bytes,1,opt,name=queue,proto3" json:"queue,omitempty"` + // A mask used to specify which fields of the queue are being updated. + // + // If empty, then all fields will be updated. + UpdateMask *field_mask.FieldMask `protobuf:"bytes,2,opt,name=update_mask,json=updateMask,proto3" json:"update_mask,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *UpdateQueueRequest) Reset() { *m = UpdateQueueRequest{} } +func (m *UpdateQueueRequest) String() string { return proto.CompactTextString(m) } +func (*UpdateQueueRequest) ProtoMessage() {} +func (*UpdateQueueRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_cloudtasks_a80c09c0b658f30d, []int{4} +} +func (m *UpdateQueueRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_UpdateQueueRequest.Unmarshal(m, b) +} +func (m *UpdateQueueRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_UpdateQueueRequest.Marshal(b, m, deterministic) +} +func (dst *UpdateQueueRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_UpdateQueueRequest.Merge(dst, src) +} +func (m *UpdateQueueRequest) XXX_Size() int { + return xxx_messageInfo_UpdateQueueRequest.Size(m) +} +func (m *UpdateQueueRequest) XXX_DiscardUnknown() { + xxx_messageInfo_UpdateQueueRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_UpdateQueueRequest proto.InternalMessageInfo + +func (m *UpdateQueueRequest) GetQueue() *Queue { + if m != nil { + return m.Queue + } + return nil +} + +func (m *UpdateQueueRequest) GetUpdateMask() *field_mask.FieldMask { + if m != nil { + return m.UpdateMask + } + return nil +} + +// Request message for +// [DeleteQueue][google.cloud.tasks.v2beta2.CloudTasks.DeleteQueue]. +type DeleteQueueRequest struct { + // Required. + // + // The queue name. For example: + // `projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID` + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *DeleteQueueRequest) Reset() { *m = DeleteQueueRequest{} } +func (m *DeleteQueueRequest) String() string { return proto.CompactTextString(m) } +func (*DeleteQueueRequest) ProtoMessage() {} +func (*DeleteQueueRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_cloudtasks_a80c09c0b658f30d, []int{5} +} +func (m *DeleteQueueRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_DeleteQueueRequest.Unmarshal(m, b) +} +func (m *DeleteQueueRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_DeleteQueueRequest.Marshal(b, m, deterministic) +} +func (dst *DeleteQueueRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_DeleteQueueRequest.Merge(dst, src) +} +func (m *DeleteQueueRequest) XXX_Size() int { + return xxx_messageInfo_DeleteQueueRequest.Size(m) +} +func (m *DeleteQueueRequest) XXX_DiscardUnknown() { + xxx_messageInfo_DeleteQueueRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_DeleteQueueRequest proto.InternalMessageInfo + +func (m *DeleteQueueRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +// Request message for +// [PurgeQueue][google.cloud.tasks.v2beta2.CloudTasks.PurgeQueue]. +type PurgeQueueRequest struct { + // Required. + // + // The queue name. For example: + // `projects/PROJECT_ID/location/LOCATION_ID/queues/QUEUE_ID` + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *PurgeQueueRequest) Reset() { *m = PurgeQueueRequest{} } +func (m *PurgeQueueRequest) String() string { return proto.CompactTextString(m) } +func (*PurgeQueueRequest) ProtoMessage() {} +func (*PurgeQueueRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_cloudtasks_a80c09c0b658f30d, []int{6} +} +func (m *PurgeQueueRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_PurgeQueueRequest.Unmarshal(m, b) +} +func (m *PurgeQueueRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_PurgeQueueRequest.Marshal(b, m, deterministic) +} +func (dst *PurgeQueueRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_PurgeQueueRequest.Merge(dst, src) +} +func (m *PurgeQueueRequest) XXX_Size() int { + return xxx_messageInfo_PurgeQueueRequest.Size(m) +} +func (m *PurgeQueueRequest) XXX_DiscardUnknown() { + xxx_messageInfo_PurgeQueueRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_PurgeQueueRequest proto.InternalMessageInfo + +func (m *PurgeQueueRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +// Request message for +// [PauseQueue][google.cloud.tasks.v2beta2.CloudTasks.PauseQueue]. +type PauseQueueRequest struct { + // Required. + // + // The queue name. For example: + // `projects/PROJECT_ID/location/LOCATION_ID/queues/QUEUE_ID` + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *PauseQueueRequest) Reset() { *m = PauseQueueRequest{} } +func (m *PauseQueueRequest) String() string { return proto.CompactTextString(m) } +func (*PauseQueueRequest) ProtoMessage() {} +func (*PauseQueueRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_cloudtasks_a80c09c0b658f30d, []int{7} +} +func (m *PauseQueueRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_PauseQueueRequest.Unmarshal(m, b) +} +func (m *PauseQueueRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_PauseQueueRequest.Marshal(b, m, deterministic) +} +func (dst *PauseQueueRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_PauseQueueRequest.Merge(dst, src) +} +func (m *PauseQueueRequest) XXX_Size() int { + return xxx_messageInfo_PauseQueueRequest.Size(m) +} +func (m *PauseQueueRequest) XXX_DiscardUnknown() { + xxx_messageInfo_PauseQueueRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_PauseQueueRequest proto.InternalMessageInfo + +func (m *PauseQueueRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +// Request message for +// [ResumeQueue][google.cloud.tasks.v2beta2.CloudTasks.ResumeQueue]. +type ResumeQueueRequest struct { + // Required. + // + // The queue name. For example: + // `projects/PROJECT_ID/location/LOCATION_ID/queues/QUEUE_ID` + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ResumeQueueRequest) Reset() { *m = ResumeQueueRequest{} } +func (m *ResumeQueueRequest) String() string { return proto.CompactTextString(m) } +func (*ResumeQueueRequest) ProtoMessage() {} +func (*ResumeQueueRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_cloudtasks_a80c09c0b658f30d, []int{8} +} +func (m *ResumeQueueRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ResumeQueueRequest.Unmarshal(m, b) +} +func (m *ResumeQueueRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ResumeQueueRequest.Marshal(b, m, deterministic) +} +func (dst *ResumeQueueRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_ResumeQueueRequest.Merge(dst, src) +} +func (m *ResumeQueueRequest) XXX_Size() int { + return xxx_messageInfo_ResumeQueueRequest.Size(m) +} +func (m *ResumeQueueRequest) XXX_DiscardUnknown() { + xxx_messageInfo_ResumeQueueRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_ResumeQueueRequest proto.InternalMessageInfo + +func (m *ResumeQueueRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +// Request message for listing tasks using +// [ListTasks][google.cloud.tasks.v2beta2.CloudTasks.ListTasks]. +type ListTasksRequest struct { + // Required. + // + // The queue name. For example: + // `projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID` + Parent string `protobuf:"bytes,1,opt,name=parent,proto3" json:"parent,omitempty"` + // The response_view specifies which subset of the + // [Task][google.cloud.tasks.v2beta2.Task] will be returned. + // + // By default response_view is + // [BASIC][google.cloud.tasks.v2beta2.Task.View.BASIC]; not all information is + // retrieved by default because some data, such as payloads, might be + // desirable to return only when needed because of its large size or because + // of the sensitivity of data that it contains. + // + // Authorization for [FULL][google.cloud.tasks.v2beta2.Task.View.FULL] + // requires `cloudtasks.tasks.fullView` [Google + // IAM](https://cloud.google.com/iam/) permission on the + // [Task][google.cloud.tasks.v2beta2.Task] resource. + ResponseView Task_View `protobuf:"varint,2,opt,name=response_view,json=responseView,proto3,enum=google.cloud.tasks.v2beta2.Task_View" json:"response_view,omitempty"` + // Requested page size. Fewer tasks than requested might be returned. + // + // The maximum page size is 1000. If unspecified, the page size will + // be the maximum. Fewer tasks than requested might be returned, + // even if more tasks exist; use + // [next_page_token][google.cloud.tasks.v2beta2.ListTasksResponse.next_page_token] + // in the response to determine if more tasks exist. + PageSize int32 `protobuf:"varint,4,opt,name=page_size,json=pageSize,proto3" json:"page_size,omitempty"` + // A token identifying the page of results to return. + // + // To request the first page results, page_token must be empty. To + // request the next page of results, page_token must be the value of + // [next_page_token][google.cloud.tasks.v2beta2.ListTasksResponse.next_page_token] + // returned from the previous call to + // [ListTasks][google.cloud.tasks.v2beta2.CloudTasks.ListTasks] method. + // + // The page token is valid for only 2 hours. + PageToken string `protobuf:"bytes,5,opt,name=page_token,json=pageToken,proto3" json:"page_token,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ListTasksRequest) Reset() { *m = ListTasksRequest{} } +func (m *ListTasksRequest) String() string { return proto.CompactTextString(m) } +func (*ListTasksRequest) ProtoMessage() {} +func (*ListTasksRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_cloudtasks_a80c09c0b658f30d, []int{9} +} +func (m *ListTasksRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ListTasksRequest.Unmarshal(m, b) +} +func (m *ListTasksRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ListTasksRequest.Marshal(b, m, deterministic) +} +func (dst *ListTasksRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_ListTasksRequest.Merge(dst, src) +} +func (m *ListTasksRequest) XXX_Size() int { + return xxx_messageInfo_ListTasksRequest.Size(m) +} +func (m *ListTasksRequest) XXX_DiscardUnknown() { + xxx_messageInfo_ListTasksRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_ListTasksRequest proto.InternalMessageInfo + +func (m *ListTasksRequest) GetParent() string { + if m != nil { + return m.Parent + } + return "" +} + +func (m *ListTasksRequest) GetResponseView() Task_View { + if m != nil { + return m.ResponseView + } + return Task_VIEW_UNSPECIFIED +} + +func (m *ListTasksRequest) GetPageSize() int32 { + if m != nil { + return m.PageSize + } + return 0 +} + +func (m *ListTasksRequest) GetPageToken() string { + if m != nil { + return m.PageToken + } + return "" +} + +// Response message for listing tasks using +// [ListTasks][google.cloud.tasks.v2beta2.CloudTasks.ListTasks]. +type ListTasksResponse struct { + // The list of tasks. + Tasks []*Task `protobuf:"bytes,1,rep,name=tasks,proto3" json:"tasks,omitempty"` + // A token to retrieve next page of results. + // + // To return the next page of results, call + // [ListTasks][google.cloud.tasks.v2beta2.CloudTasks.ListTasks] with this + // value as the + // [page_token][google.cloud.tasks.v2beta2.ListTasksRequest.page_token]. + // + // If the next_page_token is empty, there are no more results. + NextPageToken string `protobuf:"bytes,2,opt,name=next_page_token,json=nextPageToken,proto3" json:"next_page_token,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ListTasksResponse) Reset() { *m = ListTasksResponse{} } +func (m *ListTasksResponse) String() string { return proto.CompactTextString(m) } +func (*ListTasksResponse) ProtoMessage() {} +func (*ListTasksResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_cloudtasks_a80c09c0b658f30d, []int{10} +} +func (m *ListTasksResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ListTasksResponse.Unmarshal(m, b) +} +func (m *ListTasksResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ListTasksResponse.Marshal(b, m, deterministic) +} +func (dst *ListTasksResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_ListTasksResponse.Merge(dst, src) +} +func (m *ListTasksResponse) XXX_Size() int { + return xxx_messageInfo_ListTasksResponse.Size(m) +} +func (m *ListTasksResponse) XXX_DiscardUnknown() { + xxx_messageInfo_ListTasksResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_ListTasksResponse proto.InternalMessageInfo + +func (m *ListTasksResponse) GetTasks() []*Task { + if m != nil { + return m.Tasks + } + return nil +} + +func (m *ListTasksResponse) GetNextPageToken() string { + if m != nil { + return m.NextPageToken + } + return "" +} + +// Request message for getting a task using +// [GetTask][google.cloud.tasks.v2beta2.CloudTasks.GetTask]. +type GetTaskRequest struct { + // Required. + // + // The task name. For example: + // `projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID/tasks/TASK_ID` + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // The response_view specifies which subset of the + // [Task][google.cloud.tasks.v2beta2.Task] will be returned. + // + // By default response_view is + // [BASIC][google.cloud.tasks.v2beta2.Task.View.BASIC]; not all information is + // retrieved by default because some data, such as payloads, might be + // desirable to return only when needed because of its large size or because + // of the sensitivity of data that it contains. + // + // Authorization for [FULL][google.cloud.tasks.v2beta2.Task.View.FULL] + // requires `cloudtasks.tasks.fullView` [Google + // IAM](https://cloud.google.com/iam/) permission on the + // [Task][google.cloud.tasks.v2beta2.Task] resource. + ResponseView Task_View `protobuf:"varint,2,opt,name=response_view,json=responseView,proto3,enum=google.cloud.tasks.v2beta2.Task_View" json:"response_view,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GetTaskRequest) Reset() { *m = GetTaskRequest{} } +func (m *GetTaskRequest) String() string { return proto.CompactTextString(m) } +func (*GetTaskRequest) ProtoMessage() {} +func (*GetTaskRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_cloudtasks_a80c09c0b658f30d, []int{11} +} +func (m *GetTaskRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GetTaskRequest.Unmarshal(m, b) +} +func (m *GetTaskRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GetTaskRequest.Marshal(b, m, deterministic) +} +func (dst *GetTaskRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_GetTaskRequest.Merge(dst, src) +} +func (m *GetTaskRequest) XXX_Size() int { + return xxx_messageInfo_GetTaskRequest.Size(m) +} +func (m *GetTaskRequest) XXX_DiscardUnknown() { + xxx_messageInfo_GetTaskRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_GetTaskRequest proto.InternalMessageInfo + +func (m *GetTaskRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *GetTaskRequest) GetResponseView() Task_View { + if m != nil { + return m.ResponseView + } + return Task_VIEW_UNSPECIFIED +} + +// Request message for +// [CreateTask][google.cloud.tasks.v2beta2.CloudTasks.CreateTask]. +type CreateTaskRequest struct { + // Required. + // + // The queue name. For example: + // `projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID` + // + // The queue must already exist. + Parent string `protobuf:"bytes,1,opt,name=parent,proto3" json:"parent,omitempty"` + // Required. + // + // The task to add. + // + // Task names have the following format: + // `projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID/tasks/TASK_ID`. + // The user can optionally specify a task + // [name][google.cloud.tasks.v2beta2.Task.name]. If a name is not specified + // then the system will generate a random unique task id, which will be set in + // the task returned in the [response][google.cloud.tasks.v2beta2.Task.name]. + // + // If [schedule_time][google.cloud.tasks.v2beta2.Task.schedule_time] is not + // set or is in the past then Cloud Tasks will set it to the current time. + // + // Task De-duplication: + // + // Explicitly specifying a task ID enables task de-duplication. If + // a task's ID is identical to that of an existing task or a task + // that was deleted or completed recently then the call will fail + // with [ALREADY_EXISTS][google.rpc.Code.ALREADY_EXISTS]. + // If the task's queue was created using Cloud Tasks, then another task with + // the same name can't be created for ~1hour after the original task was + // deleted or completed. If the task's queue was created using queue.yaml or + // queue.xml, then another task with the same name can't be created + // for ~9days after the original task was deleted or completed. + // + // Because there is an extra lookup cost to identify duplicate task + // names, these [CreateTask][google.cloud.tasks.v2beta2.CloudTasks.CreateTask] + // calls have significantly increased latency. Using hashed strings for the + // task id or for the prefix of the task id is recommended. Choosing task ids + // that are sequential or have sequential prefixes, for example using a + // timestamp, causes an increase in latency and error rates in all + // task commands. The infrastructure relies on an approximately + // uniform distribution of task ids to store and serve tasks + // efficiently. + Task *Task `protobuf:"bytes,2,opt,name=task,proto3" json:"task,omitempty"` + // The response_view specifies which subset of the + // [Task][google.cloud.tasks.v2beta2.Task] will be returned. + // + // By default response_view is + // [BASIC][google.cloud.tasks.v2beta2.Task.View.BASIC]; not all information is + // retrieved by default because some data, such as payloads, might be + // desirable to return only when needed because of its large size or because + // of the sensitivity of data that it contains. + // + // Authorization for [FULL][google.cloud.tasks.v2beta2.Task.View.FULL] + // requires `cloudtasks.tasks.fullView` [Google + // IAM](https://cloud.google.com/iam/) permission on the + // [Task][google.cloud.tasks.v2beta2.Task] resource. + ResponseView Task_View `protobuf:"varint,3,opt,name=response_view,json=responseView,proto3,enum=google.cloud.tasks.v2beta2.Task_View" json:"response_view,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *CreateTaskRequest) Reset() { *m = CreateTaskRequest{} } +func (m *CreateTaskRequest) String() string { return proto.CompactTextString(m) } +func (*CreateTaskRequest) ProtoMessage() {} +func (*CreateTaskRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_cloudtasks_a80c09c0b658f30d, []int{12} +} +func (m *CreateTaskRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_CreateTaskRequest.Unmarshal(m, b) +} +func (m *CreateTaskRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_CreateTaskRequest.Marshal(b, m, deterministic) +} +func (dst *CreateTaskRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_CreateTaskRequest.Merge(dst, src) +} +func (m *CreateTaskRequest) XXX_Size() int { + return xxx_messageInfo_CreateTaskRequest.Size(m) +} +func (m *CreateTaskRequest) XXX_DiscardUnknown() { + xxx_messageInfo_CreateTaskRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_CreateTaskRequest proto.InternalMessageInfo + +func (m *CreateTaskRequest) GetParent() string { + if m != nil { + return m.Parent + } + return "" +} + +func (m *CreateTaskRequest) GetTask() *Task { + if m != nil { + return m.Task + } + return nil +} + +func (m *CreateTaskRequest) GetResponseView() Task_View { + if m != nil { + return m.ResponseView + } + return Task_VIEW_UNSPECIFIED +} + +// Request message for deleting a task using +// [DeleteTask][google.cloud.tasks.v2beta2.CloudTasks.DeleteTask]. +type DeleteTaskRequest struct { + // Required. + // + // The task name. For example: + // `projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID/tasks/TASK_ID` + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *DeleteTaskRequest) Reset() { *m = DeleteTaskRequest{} } +func (m *DeleteTaskRequest) String() string { return proto.CompactTextString(m) } +func (*DeleteTaskRequest) ProtoMessage() {} +func (*DeleteTaskRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_cloudtasks_a80c09c0b658f30d, []int{13} +} +func (m *DeleteTaskRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_DeleteTaskRequest.Unmarshal(m, b) +} +func (m *DeleteTaskRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_DeleteTaskRequest.Marshal(b, m, deterministic) +} +func (dst *DeleteTaskRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_DeleteTaskRequest.Merge(dst, src) +} +func (m *DeleteTaskRequest) XXX_Size() int { + return xxx_messageInfo_DeleteTaskRequest.Size(m) +} +func (m *DeleteTaskRequest) XXX_DiscardUnknown() { + xxx_messageInfo_DeleteTaskRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_DeleteTaskRequest proto.InternalMessageInfo + +func (m *DeleteTaskRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +// Request message for leasing tasks using +// [LeaseTasks][google.cloud.tasks.v2beta2.CloudTasks.LeaseTasks]. +type LeaseTasksRequest struct { + // Required. + // + // The queue name. For example: + // `projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID` + Parent string `protobuf:"bytes,1,opt,name=parent,proto3" json:"parent,omitempty"` + // The maximum number of tasks to lease. + // + // The system will make a best effort to return as close to as + // `max_tasks` as possible. + // + // The largest that `max_tasks` can be is 1000. + MaxTasks int32 `protobuf:"varint,2,opt,name=max_tasks,json=maxTasks,proto3" json:"max_tasks,omitempty"` + // + // After the worker has successfully finished the work associated + // with the task, the worker must call via + // [AcknowledgeTask][google.cloud.tasks.v2beta2.CloudTasks.AcknowledgeTask] + // before the [schedule_time][google.cloud.tasks.v2beta2.Task.schedule_time]. + // Otherwise the task will be returned to a later + // [LeaseTasks][google.cloud.tasks.v2beta2.CloudTasks.LeaseTasks] call so that + // another worker can retry it. + // + // The maximum lease duration is 1 week. + // `lease_duration` will be truncated to the nearest second. + LeaseDuration *duration.Duration `protobuf:"bytes,3,opt,name=lease_duration,json=leaseDuration,proto3" json:"lease_duration,omitempty"` + // The response_view specifies which subset of the + // [Task][google.cloud.tasks.v2beta2.Task] will be returned. + // + // By default response_view is + // [BASIC][google.cloud.tasks.v2beta2.Task.View.BASIC]; not all information is + // retrieved by default because some data, such as payloads, might be + // desirable to return only when needed because of its large size or because + // of the sensitivity of data that it contains. + // + // Authorization for [FULL][google.cloud.tasks.v2beta2.Task.View.FULL] + // requires `cloudtasks.tasks.fullView` [Google + // IAM](https://cloud.google.com/iam/) permission on the + // [Task][google.cloud.tasks.v2beta2.Task] resource. + ResponseView Task_View `protobuf:"varint,4,opt,name=response_view,json=responseView,proto3,enum=google.cloud.tasks.v2beta2.Task_View" json:"response_view,omitempty"` + // `filter` can be used to specify a subset of tasks to lease. + // + // When `filter` is set to `tag=` then the + // [response][google.cloud.tasks.v2beta2.LeaseTasksResponse] will contain only + // tasks whose [tag][google.cloud.tasks.v2beta2.PullMessage.tag] is equal to + // ``. `` must be less than 500 characters. + // + // When `filter` is set to `tag_function=oldest_tag()`, only tasks which have + // the same tag as the task with the oldest + // [schedule_time][google.cloud.tasks.v2beta2.Task.schedule_time] will be + // returned. + // + // Grammar Syntax: + // + // * `filter = "tag=" tag | "tag_function=" function` + // + // * `tag = string` + // + // * `function = "oldest_tag()"` + // + // The `oldest_tag()` function returns tasks which have the same tag as the + // oldest task (ordered by schedule time). + // + // SDK compatibility: Although the SDK allows tags to be either + // string or + // [bytes](https://cloud.google.com/appengine/docs/standard/java/javadoc/com/google/appengine/api/taskqueue/TaskOptions.html#tag-byte:A-), + // only UTF-8 encoded tags can be used in Cloud Tasks. Tag which + // aren't UTF-8 encoded can't be used in the + // [filter][google.cloud.tasks.v2beta2.LeaseTasksRequest.filter] and the + // task's [tag][google.cloud.tasks.v2beta2.PullMessage.tag] will be displayed + // as empty in Cloud Tasks. + Filter string `protobuf:"bytes,5,opt,name=filter,proto3" json:"filter,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *LeaseTasksRequest) Reset() { *m = LeaseTasksRequest{} } +func (m *LeaseTasksRequest) String() string { return proto.CompactTextString(m) } +func (*LeaseTasksRequest) ProtoMessage() {} +func (*LeaseTasksRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_cloudtasks_a80c09c0b658f30d, []int{14} +} +func (m *LeaseTasksRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_LeaseTasksRequest.Unmarshal(m, b) +} +func (m *LeaseTasksRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_LeaseTasksRequest.Marshal(b, m, deterministic) +} +func (dst *LeaseTasksRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_LeaseTasksRequest.Merge(dst, src) +} +func (m *LeaseTasksRequest) XXX_Size() int { + return xxx_messageInfo_LeaseTasksRequest.Size(m) +} +func (m *LeaseTasksRequest) XXX_DiscardUnknown() { + xxx_messageInfo_LeaseTasksRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_LeaseTasksRequest proto.InternalMessageInfo + +func (m *LeaseTasksRequest) GetParent() string { + if m != nil { + return m.Parent + } + return "" +} + +func (m *LeaseTasksRequest) GetMaxTasks() int32 { + if m != nil { + return m.MaxTasks + } + return 0 +} + +func (m *LeaseTasksRequest) GetLeaseDuration() *duration.Duration { + if m != nil { + return m.LeaseDuration + } + return nil +} + +func (m *LeaseTasksRequest) GetResponseView() Task_View { + if m != nil { + return m.ResponseView + } + return Task_VIEW_UNSPECIFIED +} + +func (m *LeaseTasksRequest) GetFilter() string { + if m != nil { + return m.Filter + } + return "" +} + +// Response message for leasing tasks using +// [LeaseTasks][google.cloud.tasks.v2beta2.CloudTasks.LeaseTasks]. +type LeaseTasksResponse struct { + // The leased tasks. + Tasks []*Task `protobuf:"bytes,1,rep,name=tasks,proto3" json:"tasks,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *LeaseTasksResponse) Reset() { *m = LeaseTasksResponse{} } +func (m *LeaseTasksResponse) String() string { return proto.CompactTextString(m) } +func (*LeaseTasksResponse) ProtoMessage() {} +func (*LeaseTasksResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_cloudtasks_a80c09c0b658f30d, []int{15} +} +func (m *LeaseTasksResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_LeaseTasksResponse.Unmarshal(m, b) +} +func (m *LeaseTasksResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_LeaseTasksResponse.Marshal(b, m, deterministic) +} +func (dst *LeaseTasksResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_LeaseTasksResponse.Merge(dst, src) +} +func (m *LeaseTasksResponse) XXX_Size() int { + return xxx_messageInfo_LeaseTasksResponse.Size(m) +} +func (m *LeaseTasksResponse) XXX_DiscardUnknown() { + xxx_messageInfo_LeaseTasksResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_LeaseTasksResponse proto.InternalMessageInfo + +func (m *LeaseTasksResponse) GetTasks() []*Task { + if m != nil { + return m.Tasks + } + return nil +} + +// Request message for acknowledging a task using +// [AcknowledgeTask][google.cloud.tasks.v2beta2.CloudTasks.AcknowledgeTask]. +type AcknowledgeTaskRequest struct { + // Required. + // + // The task name. For example: + // `projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID/tasks/TASK_ID` + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // Required. + // + // The task's current schedule time, available in the + // [schedule_time][google.cloud.tasks.v2beta2.Task.schedule_time] returned by + // [LeaseTasks][google.cloud.tasks.v2beta2.CloudTasks.LeaseTasks] response or + // [RenewLease][google.cloud.tasks.v2beta2.CloudTasks.RenewLease] response. + // This restriction is to ensure that your worker currently holds the lease. + ScheduleTime *timestamp.Timestamp `protobuf:"bytes,2,opt,name=schedule_time,json=scheduleTime,proto3" json:"schedule_time,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *AcknowledgeTaskRequest) Reset() { *m = AcknowledgeTaskRequest{} } +func (m *AcknowledgeTaskRequest) String() string { return proto.CompactTextString(m) } +func (*AcknowledgeTaskRequest) ProtoMessage() {} +func (*AcknowledgeTaskRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_cloudtasks_a80c09c0b658f30d, []int{16} +} +func (m *AcknowledgeTaskRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_AcknowledgeTaskRequest.Unmarshal(m, b) +} +func (m *AcknowledgeTaskRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_AcknowledgeTaskRequest.Marshal(b, m, deterministic) +} +func (dst *AcknowledgeTaskRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_AcknowledgeTaskRequest.Merge(dst, src) +} +func (m *AcknowledgeTaskRequest) XXX_Size() int { + return xxx_messageInfo_AcknowledgeTaskRequest.Size(m) +} +func (m *AcknowledgeTaskRequest) XXX_DiscardUnknown() { + xxx_messageInfo_AcknowledgeTaskRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_AcknowledgeTaskRequest proto.InternalMessageInfo + +func (m *AcknowledgeTaskRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *AcknowledgeTaskRequest) GetScheduleTime() *timestamp.Timestamp { + if m != nil { + return m.ScheduleTime + } + return nil +} + +// Request message for renewing a lease using +// [RenewLease][google.cloud.tasks.v2beta2.CloudTasks.RenewLease]. +type RenewLeaseRequest struct { + // Required. + // + // The task name. For example: + // `projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID/tasks/TASK_ID` + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // Required. + // + // The task's current schedule time, available in the + // [schedule_time][google.cloud.tasks.v2beta2.Task.schedule_time] returned by + // [LeaseTasks][google.cloud.tasks.v2beta2.CloudTasks.LeaseTasks] response or + // [RenewLease][google.cloud.tasks.v2beta2.CloudTasks.RenewLease] response. + // This restriction is to ensure that your worker currently holds the lease. + ScheduleTime *timestamp.Timestamp `protobuf:"bytes,2,opt,name=schedule_time,json=scheduleTime,proto3" json:"schedule_time,omitempty"` + // Required. + // + // The desired new lease duration, starting from now. + // + // + // The maximum lease duration is 1 week. + // `lease_duration` will be truncated to the nearest second. + LeaseDuration *duration.Duration `protobuf:"bytes,3,opt,name=lease_duration,json=leaseDuration,proto3" json:"lease_duration,omitempty"` + // The response_view specifies which subset of the + // [Task][google.cloud.tasks.v2beta2.Task] will be returned. + // + // By default response_view is + // [BASIC][google.cloud.tasks.v2beta2.Task.View.BASIC]; not all information is + // retrieved by default because some data, such as payloads, might be + // desirable to return only when needed because of its large size or because + // of the sensitivity of data that it contains. + // + // Authorization for [FULL][google.cloud.tasks.v2beta2.Task.View.FULL] + // requires `cloudtasks.tasks.fullView` [Google + // IAM](https://cloud.google.com/iam/) permission on the + // [Task][google.cloud.tasks.v2beta2.Task] resource. + ResponseView Task_View `protobuf:"varint,4,opt,name=response_view,json=responseView,proto3,enum=google.cloud.tasks.v2beta2.Task_View" json:"response_view,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *RenewLeaseRequest) Reset() { *m = RenewLeaseRequest{} } +func (m *RenewLeaseRequest) String() string { return proto.CompactTextString(m) } +func (*RenewLeaseRequest) ProtoMessage() {} +func (*RenewLeaseRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_cloudtasks_a80c09c0b658f30d, []int{17} +} +func (m *RenewLeaseRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_RenewLeaseRequest.Unmarshal(m, b) +} +func (m *RenewLeaseRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_RenewLeaseRequest.Marshal(b, m, deterministic) +} +func (dst *RenewLeaseRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_RenewLeaseRequest.Merge(dst, src) +} +func (m *RenewLeaseRequest) XXX_Size() int { + return xxx_messageInfo_RenewLeaseRequest.Size(m) +} +func (m *RenewLeaseRequest) XXX_DiscardUnknown() { + xxx_messageInfo_RenewLeaseRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_RenewLeaseRequest proto.InternalMessageInfo + +func (m *RenewLeaseRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *RenewLeaseRequest) GetScheduleTime() *timestamp.Timestamp { + if m != nil { + return m.ScheduleTime + } + return nil +} + +func (m *RenewLeaseRequest) GetLeaseDuration() *duration.Duration { + if m != nil { + return m.LeaseDuration + } + return nil +} + +func (m *RenewLeaseRequest) GetResponseView() Task_View { + if m != nil { + return m.ResponseView + } + return Task_VIEW_UNSPECIFIED +} + +// Request message for canceling a lease using +// [CancelLease][google.cloud.tasks.v2beta2.CloudTasks.CancelLease]. +type CancelLeaseRequest struct { + // Required. + // + // The task name. For example: + // `projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID/tasks/TASK_ID` + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // Required. + // + // The task's current schedule time, available in the + // [schedule_time][google.cloud.tasks.v2beta2.Task.schedule_time] returned by + // [LeaseTasks][google.cloud.tasks.v2beta2.CloudTasks.LeaseTasks] response or + // [RenewLease][google.cloud.tasks.v2beta2.CloudTasks.RenewLease] response. + // This restriction is to ensure that your worker currently holds the lease. + ScheduleTime *timestamp.Timestamp `protobuf:"bytes,2,opt,name=schedule_time,json=scheduleTime,proto3" json:"schedule_time,omitempty"` + // The response_view specifies which subset of the + // [Task][google.cloud.tasks.v2beta2.Task] will be returned. + // + // By default response_view is + // [BASIC][google.cloud.tasks.v2beta2.Task.View.BASIC]; not all information is + // retrieved by default because some data, such as payloads, might be + // desirable to return only when needed because of its large size or because + // of the sensitivity of data that it contains. + // + // Authorization for [FULL][google.cloud.tasks.v2beta2.Task.View.FULL] + // requires `cloudtasks.tasks.fullView` [Google + // IAM](https://cloud.google.com/iam/) permission on the + // [Task][google.cloud.tasks.v2beta2.Task] resource. + ResponseView Task_View `protobuf:"varint,3,opt,name=response_view,json=responseView,proto3,enum=google.cloud.tasks.v2beta2.Task_View" json:"response_view,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *CancelLeaseRequest) Reset() { *m = CancelLeaseRequest{} } +func (m *CancelLeaseRequest) String() string { return proto.CompactTextString(m) } +func (*CancelLeaseRequest) ProtoMessage() {} +func (*CancelLeaseRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_cloudtasks_a80c09c0b658f30d, []int{18} +} +func (m *CancelLeaseRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_CancelLeaseRequest.Unmarshal(m, b) +} +func (m *CancelLeaseRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_CancelLeaseRequest.Marshal(b, m, deterministic) +} +func (dst *CancelLeaseRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_CancelLeaseRequest.Merge(dst, src) +} +func (m *CancelLeaseRequest) XXX_Size() int { + return xxx_messageInfo_CancelLeaseRequest.Size(m) +} +func (m *CancelLeaseRequest) XXX_DiscardUnknown() { + xxx_messageInfo_CancelLeaseRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_CancelLeaseRequest proto.InternalMessageInfo + +func (m *CancelLeaseRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *CancelLeaseRequest) GetScheduleTime() *timestamp.Timestamp { + if m != nil { + return m.ScheduleTime + } + return nil +} + +func (m *CancelLeaseRequest) GetResponseView() Task_View { + if m != nil { + return m.ResponseView + } + return Task_VIEW_UNSPECIFIED +} + +// Request message for forcing a task to run now using +// [RunTask][google.cloud.tasks.v2beta2.CloudTasks.RunTask]. +type RunTaskRequest struct { + // Required. + // + // The task name. For example: + // `projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID/tasks/TASK_ID` + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // The response_view specifies which subset of the + // [Task][google.cloud.tasks.v2beta2.Task] will be returned. + // + // By default response_view is + // [BASIC][google.cloud.tasks.v2beta2.Task.View.BASIC]; not all information is + // retrieved by default because some data, such as payloads, might be + // desirable to return only when needed because of its large size or because + // of the sensitivity of data that it contains. + // + // Authorization for [FULL][google.cloud.tasks.v2beta2.Task.View.FULL] + // requires `cloudtasks.tasks.fullView` [Google + // IAM](https://cloud.google.com/iam/) permission on the + // [Task][google.cloud.tasks.v2beta2.Task] resource. + ResponseView Task_View `protobuf:"varint,2,opt,name=response_view,json=responseView,proto3,enum=google.cloud.tasks.v2beta2.Task_View" json:"response_view,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *RunTaskRequest) Reset() { *m = RunTaskRequest{} } +func (m *RunTaskRequest) String() string { return proto.CompactTextString(m) } +func (*RunTaskRequest) ProtoMessage() {} +func (*RunTaskRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_cloudtasks_a80c09c0b658f30d, []int{19} +} +func (m *RunTaskRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_RunTaskRequest.Unmarshal(m, b) +} +func (m *RunTaskRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_RunTaskRequest.Marshal(b, m, deterministic) +} +func (dst *RunTaskRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_RunTaskRequest.Merge(dst, src) +} +func (m *RunTaskRequest) XXX_Size() int { + return xxx_messageInfo_RunTaskRequest.Size(m) +} +func (m *RunTaskRequest) XXX_DiscardUnknown() { + xxx_messageInfo_RunTaskRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_RunTaskRequest proto.InternalMessageInfo + +func (m *RunTaskRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *RunTaskRequest) GetResponseView() Task_View { + if m != nil { + return m.ResponseView + } + return Task_VIEW_UNSPECIFIED +} + +func init() { + proto.RegisterType((*ListQueuesRequest)(nil), "google.cloud.tasks.v2beta2.ListQueuesRequest") + proto.RegisterType((*ListQueuesResponse)(nil), "google.cloud.tasks.v2beta2.ListQueuesResponse") + proto.RegisterType((*GetQueueRequest)(nil), "google.cloud.tasks.v2beta2.GetQueueRequest") + proto.RegisterType((*CreateQueueRequest)(nil), "google.cloud.tasks.v2beta2.CreateQueueRequest") + proto.RegisterType((*UpdateQueueRequest)(nil), "google.cloud.tasks.v2beta2.UpdateQueueRequest") + proto.RegisterType((*DeleteQueueRequest)(nil), "google.cloud.tasks.v2beta2.DeleteQueueRequest") + proto.RegisterType((*PurgeQueueRequest)(nil), "google.cloud.tasks.v2beta2.PurgeQueueRequest") + proto.RegisterType((*PauseQueueRequest)(nil), "google.cloud.tasks.v2beta2.PauseQueueRequest") + proto.RegisterType((*ResumeQueueRequest)(nil), "google.cloud.tasks.v2beta2.ResumeQueueRequest") + proto.RegisterType((*ListTasksRequest)(nil), "google.cloud.tasks.v2beta2.ListTasksRequest") + proto.RegisterType((*ListTasksResponse)(nil), "google.cloud.tasks.v2beta2.ListTasksResponse") + proto.RegisterType((*GetTaskRequest)(nil), "google.cloud.tasks.v2beta2.GetTaskRequest") + proto.RegisterType((*CreateTaskRequest)(nil), "google.cloud.tasks.v2beta2.CreateTaskRequest") + proto.RegisterType((*DeleteTaskRequest)(nil), "google.cloud.tasks.v2beta2.DeleteTaskRequest") + proto.RegisterType((*LeaseTasksRequest)(nil), "google.cloud.tasks.v2beta2.LeaseTasksRequest") + proto.RegisterType((*LeaseTasksResponse)(nil), "google.cloud.tasks.v2beta2.LeaseTasksResponse") + proto.RegisterType((*AcknowledgeTaskRequest)(nil), "google.cloud.tasks.v2beta2.AcknowledgeTaskRequest") + proto.RegisterType((*RenewLeaseRequest)(nil), "google.cloud.tasks.v2beta2.RenewLeaseRequest") + proto.RegisterType((*CancelLeaseRequest)(nil), "google.cloud.tasks.v2beta2.CancelLeaseRequest") + proto.RegisterType((*RunTaskRequest)(nil), "google.cloud.tasks.v2beta2.RunTaskRequest") +} + +// Reference imports to suppress errors if they are not otherwise used. +var _ context.Context +var _ grpc.ClientConn + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the grpc package it is being compiled against. +const _ = grpc.SupportPackageIsVersion4 + +// CloudTasksClient is the client API for CloudTasks service. +// +// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://godoc.org/google.golang.org/grpc#ClientConn.NewStream. +type CloudTasksClient interface { + // Lists queues. + // + // Queues are returned in lexicographical order. + ListQueues(ctx context.Context, in *ListQueuesRequest, opts ...grpc.CallOption) (*ListQueuesResponse, error) + // Gets a queue. + GetQueue(ctx context.Context, in *GetQueueRequest, opts ...grpc.CallOption) (*Queue, error) + // Creates a queue. + // + // Queues created with this method allow tasks to live for a maximum of 31 + // days. After a task is 31 days old, the task will be deleted regardless of + // whether it was dispatched or not. + // + // WARNING: Using this method may have unintended side effects if you are + // using an App Engine `queue.yaml` or `queue.xml` file to manage your queues. + // Read + // [Overview of Queue Management and + // queue.yaml](https://cloud.google.com/tasks/docs/queue-yaml) before using + // this method. + CreateQueue(ctx context.Context, in *CreateQueueRequest, opts ...grpc.CallOption) (*Queue, error) + // Updates a queue. + // + // This method creates the queue if it does not exist and updates + // the queue if it does exist. + // + // Queues created with this method allow tasks to live for a maximum of 31 + // days. After a task is 31 days old, the task will be deleted regardless of + // whether it was dispatched or not. + // + // WARNING: Using this method may have unintended side effects if you are + // using an App Engine `queue.yaml` or `queue.xml` file to manage your queues. + // Read + // [Overview of Queue Management and + // queue.yaml](https://cloud.google.com/tasks/docs/queue-yaml) before using + // this method. + UpdateQueue(ctx context.Context, in *UpdateQueueRequest, opts ...grpc.CallOption) (*Queue, error) + // Deletes a queue. + // + // This command will delete the queue even if it has tasks in it. + // + // Note: If you delete a queue, a queue with the same name can't be created + // for 7 days. + // + // WARNING: Using this method may have unintended side effects if you are + // using an App Engine `queue.yaml` or `queue.xml` file to manage your queues. + // Read + // [Overview of Queue Management and + // queue.yaml](https://cloud.google.com/tasks/docs/queue-yaml) before using + // this method. + DeleteQueue(ctx context.Context, in *DeleteQueueRequest, opts ...grpc.CallOption) (*empty.Empty, error) + // Purges a queue by deleting all of its tasks. + // + // All tasks created before this method is called are permanently deleted. + // + // Purge operations can take up to one minute to take effect. Tasks + // might be dispatched before the purge takes effect. A purge is irreversible. + PurgeQueue(ctx context.Context, in *PurgeQueueRequest, opts ...grpc.CallOption) (*Queue, error) + // Pauses the queue. + // + // If a queue is paused then the system will stop dispatching tasks + // until the queue is resumed via + // [ResumeQueue][google.cloud.tasks.v2beta2.CloudTasks.ResumeQueue]. Tasks can + // still be added when the queue is paused. A queue is paused if its + // [state][google.cloud.tasks.v2beta2.Queue.state] is + // [PAUSED][google.cloud.tasks.v2beta2.Queue.State.PAUSED]. + PauseQueue(ctx context.Context, in *PauseQueueRequest, opts ...grpc.CallOption) (*Queue, error) + // Resume a queue. + // + // This method resumes a queue after it has been + // [PAUSED][google.cloud.tasks.v2beta2.Queue.State.PAUSED] or + // [DISABLED][google.cloud.tasks.v2beta2.Queue.State.DISABLED]. The state of a + // queue is stored in the queue's + // [state][google.cloud.tasks.v2beta2.Queue.state]; after calling this method + // it will be set to + // [RUNNING][google.cloud.tasks.v2beta2.Queue.State.RUNNING]. + // + // WARNING: Resuming many high-QPS queues at the same time can + // lead to target overloading. If you are resuming high-QPS + // queues, follow the 500/50/5 pattern described in + // [Managing Cloud Tasks Scaling + // Risks](https://cloud.google.com/tasks/docs/manage-cloud-task-scaling). + ResumeQueue(ctx context.Context, in *ResumeQueueRequest, opts ...grpc.CallOption) (*Queue, error) + // Gets the access control policy for a + // [Queue][google.cloud.tasks.v2beta2.Queue]. Returns an empty policy if the + // resource exists and does not have a policy set. + // + // Authorization requires the following + // [Google IAM](https://cloud.google.com/iam) permission on the specified + // resource parent: + // + // * `cloudtasks.queues.getIamPolicy` + GetIamPolicy(ctx context.Context, in *v1.GetIamPolicyRequest, opts ...grpc.CallOption) (*v1.Policy, error) + // Sets the access control policy for a + // [Queue][google.cloud.tasks.v2beta2.Queue]. Replaces any existing policy. + // + // Note: The Cloud Console does not check queue-level IAM permissions yet. + // Project-level permissions are required to use the Cloud Console. + // + // Authorization requires the following + // [Google IAM](https://cloud.google.com/iam) permission on the specified + // resource parent: + // + // * `cloudtasks.queues.setIamPolicy` + SetIamPolicy(ctx context.Context, in *v1.SetIamPolicyRequest, opts ...grpc.CallOption) (*v1.Policy, error) + // Returns permissions that a caller has on a + // [Queue][google.cloud.tasks.v2beta2.Queue]. If the resource does not exist, + // this will return an empty set of permissions, not a + // [NOT_FOUND][google.rpc.Code.NOT_FOUND] error. + // + // Note: This operation is designed to be used for building permission-aware + // UIs and command-line tools, not for authorization checking. This operation + // may "fail open" without warning. + TestIamPermissions(ctx context.Context, in *v1.TestIamPermissionsRequest, opts ...grpc.CallOption) (*v1.TestIamPermissionsResponse, error) + // Lists the tasks in a queue. + // + // By default, only the [BASIC][google.cloud.tasks.v2beta2.Task.View.BASIC] + // view is retrieved due to performance considerations; + // [response_view][google.cloud.tasks.v2beta2.ListTasksRequest.response_view] + // controls the subset of information which is returned. + // + // The tasks may be returned in any order. The ordering may change at any + // time. + ListTasks(ctx context.Context, in *ListTasksRequest, opts ...grpc.CallOption) (*ListTasksResponse, error) + // Gets a task. + GetTask(ctx context.Context, in *GetTaskRequest, opts ...grpc.CallOption) (*Task, error) + // Creates a task and adds it to a queue. + // + // Tasks cannot be updated after creation; there is no UpdateTask command. + // + // * For [App Engine queues][google.cloud.tasks.v2beta2.AppEngineHttpTarget], + // the maximum task size is + // 100KB. + // * For [pull queues][google.cloud.tasks.v2beta2.PullTarget], the maximum + // task size is 1MB. + CreateTask(ctx context.Context, in *CreateTaskRequest, opts ...grpc.CallOption) (*Task, error) + // Deletes a task. + // + // A task can be deleted if it is scheduled or dispatched. A task + // cannot be deleted if it has completed successfully or permanently + // failed. + DeleteTask(ctx context.Context, in *DeleteTaskRequest, opts ...grpc.CallOption) (*empty.Empty, error) + // Leases tasks from a pull queue for + // [lease_duration][google.cloud.tasks.v2beta2.LeaseTasksRequest.lease_duration]. + // + // This method is invoked by the worker to obtain a lease. The + // worker must acknowledge the task via + // [AcknowledgeTask][google.cloud.tasks.v2beta2.CloudTasks.AcknowledgeTask] + // after they have performed the work associated with the task. + // + // The [payload][google.cloud.tasks.v2beta2.PullMessage.payload] is intended + // to store data that the worker needs to perform the work associated with the + // task. To return the payloads in the + // [response][google.cloud.tasks.v2beta2.LeaseTasksResponse], set + // [response_view][google.cloud.tasks.v2beta2.LeaseTasksRequest.response_view] + // to [FULL][google.cloud.tasks.v2beta2.Task.View.FULL]. + // + // A maximum of 10 qps of + // [LeaseTasks][google.cloud.tasks.v2beta2.CloudTasks.LeaseTasks] requests are + // allowed per queue. [RESOURCE_EXHAUSTED][google.rpc.Code.RESOURCE_EXHAUSTED] + // is returned when this limit is + // exceeded. [RESOURCE_EXHAUSTED][google.rpc.Code.RESOURCE_EXHAUSTED] + // is also returned when + // [max_tasks_dispatched_per_second][google.cloud.tasks.v2beta2.RateLimits.max_tasks_dispatched_per_second] + // is exceeded. + LeaseTasks(ctx context.Context, in *LeaseTasksRequest, opts ...grpc.CallOption) (*LeaseTasksResponse, error) + // Acknowledges a pull task. + // + // The worker, that is, the entity that + // [leased][google.cloud.tasks.v2beta2.CloudTasks.LeaseTasks] this task must + // call this method to indicate that the work associated with the task has + // finished. + // + // The worker must acknowledge a task within the + // [lease_duration][google.cloud.tasks.v2beta2.LeaseTasksRequest.lease_duration] + // or the lease will expire and the task will become available to be leased + // again. After the task is acknowledged, it will not be returned + // by a later [LeaseTasks][google.cloud.tasks.v2beta2.CloudTasks.LeaseTasks], + // [GetTask][google.cloud.tasks.v2beta2.CloudTasks.GetTask], or + // [ListTasks][google.cloud.tasks.v2beta2.CloudTasks.ListTasks]. + AcknowledgeTask(ctx context.Context, in *AcknowledgeTaskRequest, opts ...grpc.CallOption) (*empty.Empty, error) + // Renew the current lease of a pull task. + // + // The worker can use this method to extend the lease by a new + // duration, starting from now. The new task lease will be + // returned in the task's + // [schedule_time][google.cloud.tasks.v2beta2.Task.schedule_time]. + RenewLease(ctx context.Context, in *RenewLeaseRequest, opts ...grpc.CallOption) (*Task, error) + // Cancel a pull task's lease. + // + // The worker can use this method to cancel a task's lease by + // setting its [schedule_time][google.cloud.tasks.v2beta2.Task.schedule_time] + // to now. This will make the task available to be leased to the next caller + // of [LeaseTasks][google.cloud.tasks.v2beta2.CloudTasks.LeaseTasks]. + CancelLease(ctx context.Context, in *CancelLeaseRequest, opts ...grpc.CallOption) (*Task, error) + // Forces a task to run now. + // + // When this method is called, Cloud Tasks will dispatch the task, even if + // the task is already running, the queue has reached its + // [RateLimits][google.cloud.tasks.v2beta2.RateLimits] or is + // [PAUSED][google.cloud.tasks.v2beta2.Queue.State.PAUSED]. + // + // This command is meant to be used for manual debugging. For + // example, [RunTask][google.cloud.tasks.v2beta2.CloudTasks.RunTask] can be + // used to retry a failed task after a fix has been made or to manually force + // a task to be dispatched now. + // + // The dispatched task is returned. That is, the task that is returned + // contains the [status][google.cloud.tasks.v2beta2.Task.status] after the + // task is dispatched but before the task is received by its target. + // + // If Cloud Tasks receives a successful response from the task's + // target, then the task will be deleted; otherwise the task's + // [schedule_time][google.cloud.tasks.v2beta2.Task.schedule_time] will be + // reset to the time that + // [RunTask][google.cloud.tasks.v2beta2.CloudTasks.RunTask] was called plus + // the retry delay specified in the queue's + // [RetryConfig][google.cloud.tasks.v2beta2.RetryConfig]. + // + // [RunTask][google.cloud.tasks.v2beta2.CloudTasks.RunTask] returns + // [NOT_FOUND][google.rpc.Code.NOT_FOUND] when it is called on a + // task that has already succeeded or permanently failed. + // + // [RunTask][google.cloud.tasks.v2beta2.CloudTasks.RunTask] cannot be called + // on a [pull task][google.cloud.tasks.v2beta2.PullMessage]. + RunTask(ctx context.Context, in *RunTaskRequest, opts ...grpc.CallOption) (*Task, error) +} + +type cloudTasksClient struct { + cc *grpc.ClientConn +} + +func NewCloudTasksClient(cc *grpc.ClientConn) CloudTasksClient { + return &cloudTasksClient{cc} +} + +func (c *cloudTasksClient) ListQueues(ctx context.Context, in *ListQueuesRequest, opts ...grpc.CallOption) (*ListQueuesResponse, error) { + out := new(ListQueuesResponse) + err := c.cc.Invoke(ctx, "/google.cloud.tasks.v2beta2.CloudTasks/ListQueues", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *cloudTasksClient) GetQueue(ctx context.Context, in *GetQueueRequest, opts ...grpc.CallOption) (*Queue, error) { + out := new(Queue) + err := c.cc.Invoke(ctx, "/google.cloud.tasks.v2beta2.CloudTasks/GetQueue", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *cloudTasksClient) CreateQueue(ctx context.Context, in *CreateQueueRequest, opts ...grpc.CallOption) (*Queue, error) { + out := new(Queue) + err := c.cc.Invoke(ctx, "/google.cloud.tasks.v2beta2.CloudTasks/CreateQueue", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *cloudTasksClient) UpdateQueue(ctx context.Context, in *UpdateQueueRequest, opts ...grpc.CallOption) (*Queue, error) { + out := new(Queue) + err := c.cc.Invoke(ctx, "/google.cloud.tasks.v2beta2.CloudTasks/UpdateQueue", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *cloudTasksClient) DeleteQueue(ctx context.Context, in *DeleteQueueRequest, opts ...grpc.CallOption) (*empty.Empty, error) { + out := new(empty.Empty) + err := c.cc.Invoke(ctx, "/google.cloud.tasks.v2beta2.CloudTasks/DeleteQueue", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *cloudTasksClient) PurgeQueue(ctx context.Context, in *PurgeQueueRequest, opts ...grpc.CallOption) (*Queue, error) { + out := new(Queue) + err := c.cc.Invoke(ctx, "/google.cloud.tasks.v2beta2.CloudTasks/PurgeQueue", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *cloudTasksClient) PauseQueue(ctx context.Context, in *PauseQueueRequest, opts ...grpc.CallOption) (*Queue, error) { + out := new(Queue) + err := c.cc.Invoke(ctx, "/google.cloud.tasks.v2beta2.CloudTasks/PauseQueue", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *cloudTasksClient) ResumeQueue(ctx context.Context, in *ResumeQueueRequest, opts ...grpc.CallOption) (*Queue, error) { + out := new(Queue) + err := c.cc.Invoke(ctx, "/google.cloud.tasks.v2beta2.CloudTasks/ResumeQueue", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *cloudTasksClient) GetIamPolicy(ctx context.Context, in *v1.GetIamPolicyRequest, opts ...grpc.CallOption) (*v1.Policy, error) { + out := new(v1.Policy) + err := c.cc.Invoke(ctx, "/google.cloud.tasks.v2beta2.CloudTasks/GetIamPolicy", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *cloudTasksClient) SetIamPolicy(ctx context.Context, in *v1.SetIamPolicyRequest, opts ...grpc.CallOption) (*v1.Policy, error) { + out := new(v1.Policy) + err := c.cc.Invoke(ctx, "/google.cloud.tasks.v2beta2.CloudTasks/SetIamPolicy", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *cloudTasksClient) TestIamPermissions(ctx context.Context, in *v1.TestIamPermissionsRequest, opts ...grpc.CallOption) (*v1.TestIamPermissionsResponse, error) { + out := new(v1.TestIamPermissionsResponse) + err := c.cc.Invoke(ctx, "/google.cloud.tasks.v2beta2.CloudTasks/TestIamPermissions", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *cloudTasksClient) ListTasks(ctx context.Context, in *ListTasksRequest, opts ...grpc.CallOption) (*ListTasksResponse, error) { + out := new(ListTasksResponse) + err := c.cc.Invoke(ctx, "/google.cloud.tasks.v2beta2.CloudTasks/ListTasks", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *cloudTasksClient) GetTask(ctx context.Context, in *GetTaskRequest, opts ...grpc.CallOption) (*Task, error) { + out := new(Task) + err := c.cc.Invoke(ctx, "/google.cloud.tasks.v2beta2.CloudTasks/GetTask", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *cloudTasksClient) CreateTask(ctx context.Context, in *CreateTaskRequest, opts ...grpc.CallOption) (*Task, error) { + out := new(Task) + err := c.cc.Invoke(ctx, "/google.cloud.tasks.v2beta2.CloudTasks/CreateTask", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *cloudTasksClient) DeleteTask(ctx context.Context, in *DeleteTaskRequest, opts ...grpc.CallOption) (*empty.Empty, error) { + out := new(empty.Empty) + err := c.cc.Invoke(ctx, "/google.cloud.tasks.v2beta2.CloudTasks/DeleteTask", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *cloudTasksClient) LeaseTasks(ctx context.Context, in *LeaseTasksRequest, opts ...grpc.CallOption) (*LeaseTasksResponse, error) { + out := new(LeaseTasksResponse) + err := c.cc.Invoke(ctx, "/google.cloud.tasks.v2beta2.CloudTasks/LeaseTasks", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *cloudTasksClient) AcknowledgeTask(ctx context.Context, in *AcknowledgeTaskRequest, opts ...grpc.CallOption) (*empty.Empty, error) { + out := new(empty.Empty) + err := c.cc.Invoke(ctx, "/google.cloud.tasks.v2beta2.CloudTasks/AcknowledgeTask", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *cloudTasksClient) RenewLease(ctx context.Context, in *RenewLeaseRequest, opts ...grpc.CallOption) (*Task, error) { + out := new(Task) + err := c.cc.Invoke(ctx, "/google.cloud.tasks.v2beta2.CloudTasks/RenewLease", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *cloudTasksClient) CancelLease(ctx context.Context, in *CancelLeaseRequest, opts ...grpc.CallOption) (*Task, error) { + out := new(Task) + err := c.cc.Invoke(ctx, "/google.cloud.tasks.v2beta2.CloudTasks/CancelLease", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *cloudTasksClient) RunTask(ctx context.Context, in *RunTaskRequest, opts ...grpc.CallOption) (*Task, error) { + out := new(Task) + err := c.cc.Invoke(ctx, "/google.cloud.tasks.v2beta2.CloudTasks/RunTask", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +// CloudTasksServer is the server API for CloudTasks service. +type CloudTasksServer interface { + // Lists queues. + // + // Queues are returned in lexicographical order. + ListQueues(context.Context, *ListQueuesRequest) (*ListQueuesResponse, error) + // Gets a queue. + GetQueue(context.Context, *GetQueueRequest) (*Queue, error) + // Creates a queue. + // + // Queues created with this method allow tasks to live for a maximum of 31 + // days. After a task is 31 days old, the task will be deleted regardless of + // whether it was dispatched or not. + // + // WARNING: Using this method may have unintended side effects if you are + // using an App Engine `queue.yaml` or `queue.xml` file to manage your queues. + // Read + // [Overview of Queue Management and + // queue.yaml](https://cloud.google.com/tasks/docs/queue-yaml) before using + // this method. + CreateQueue(context.Context, *CreateQueueRequest) (*Queue, error) + // Updates a queue. + // + // This method creates the queue if it does not exist and updates + // the queue if it does exist. + // + // Queues created with this method allow tasks to live for a maximum of 31 + // days. After a task is 31 days old, the task will be deleted regardless of + // whether it was dispatched or not. + // + // WARNING: Using this method may have unintended side effects if you are + // using an App Engine `queue.yaml` or `queue.xml` file to manage your queues. + // Read + // [Overview of Queue Management and + // queue.yaml](https://cloud.google.com/tasks/docs/queue-yaml) before using + // this method. + UpdateQueue(context.Context, *UpdateQueueRequest) (*Queue, error) + // Deletes a queue. + // + // This command will delete the queue even if it has tasks in it. + // + // Note: If you delete a queue, a queue with the same name can't be created + // for 7 days. + // + // WARNING: Using this method may have unintended side effects if you are + // using an App Engine `queue.yaml` or `queue.xml` file to manage your queues. + // Read + // [Overview of Queue Management and + // queue.yaml](https://cloud.google.com/tasks/docs/queue-yaml) before using + // this method. + DeleteQueue(context.Context, *DeleteQueueRequest) (*empty.Empty, error) + // Purges a queue by deleting all of its tasks. + // + // All tasks created before this method is called are permanently deleted. + // + // Purge operations can take up to one minute to take effect. Tasks + // might be dispatched before the purge takes effect. A purge is irreversible. + PurgeQueue(context.Context, *PurgeQueueRequest) (*Queue, error) + // Pauses the queue. + // + // If a queue is paused then the system will stop dispatching tasks + // until the queue is resumed via + // [ResumeQueue][google.cloud.tasks.v2beta2.CloudTasks.ResumeQueue]. Tasks can + // still be added when the queue is paused. A queue is paused if its + // [state][google.cloud.tasks.v2beta2.Queue.state] is + // [PAUSED][google.cloud.tasks.v2beta2.Queue.State.PAUSED]. + PauseQueue(context.Context, *PauseQueueRequest) (*Queue, error) + // Resume a queue. + // + // This method resumes a queue after it has been + // [PAUSED][google.cloud.tasks.v2beta2.Queue.State.PAUSED] or + // [DISABLED][google.cloud.tasks.v2beta2.Queue.State.DISABLED]. The state of a + // queue is stored in the queue's + // [state][google.cloud.tasks.v2beta2.Queue.state]; after calling this method + // it will be set to + // [RUNNING][google.cloud.tasks.v2beta2.Queue.State.RUNNING]. + // + // WARNING: Resuming many high-QPS queues at the same time can + // lead to target overloading. If you are resuming high-QPS + // queues, follow the 500/50/5 pattern described in + // [Managing Cloud Tasks Scaling + // Risks](https://cloud.google.com/tasks/docs/manage-cloud-task-scaling). + ResumeQueue(context.Context, *ResumeQueueRequest) (*Queue, error) + // Gets the access control policy for a + // [Queue][google.cloud.tasks.v2beta2.Queue]. Returns an empty policy if the + // resource exists and does not have a policy set. + // + // Authorization requires the following + // [Google IAM](https://cloud.google.com/iam) permission on the specified + // resource parent: + // + // * `cloudtasks.queues.getIamPolicy` + GetIamPolicy(context.Context, *v1.GetIamPolicyRequest) (*v1.Policy, error) + // Sets the access control policy for a + // [Queue][google.cloud.tasks.v2beta2.Queue]. Replaces any existing policy. + // + // Note: The Cloud Console does not check queue-level IAM permissions yet. + // Project-level permissions are required to use the Cloud Console. + // + // Authorization requires the following + // [Google IAM](https://cloud.google.com/iam) permission on the specified + // resource parent: + // + // * `cloudtasks.queues.setIamPolicy` + SetIamPolicy(context.Context, *v1.SetIamPolicyRequest) (*v1.Policy, error) + // Returns permissions that a caller has on a + // [Queue][google.cloud.tasks.v2beta2.Queue]. If the resource does not exist, + // this will return an empty set of permissions, not a + // [NOT_FOUND][google.rpc.Code.NOT_FOUND] error. + // + // Note: This operation is designed to be used for building permission-aware + // UIs and command-line tools, not for authorization checking. This operation + // may "fail open" without warning. + TestIamPermissions(context.Context, *v1.TestIamPermissionsRequest) (*v1.TestIamPermissionsResponse, error) + // Lists the tasks in a queue. + // + // By default, only the [BASIC][google.cloud.tasks.v2beta2.Task.View.BASIC] + // view is retrieved due to performance considerations; + // [response_view][google.cloud.tasks.v2beta2.ListTasksRequest.response_view] + // controls the subset of information which is returned. + // + // The tasks may be returned in any order. The ordering may change at any + // time. + ListTasks(context.Context, *ListTasksRequest) (*ListTasksResponse, error) + // Gets a task. + GetTask(context.Context, *GetTaskRequest) (*Task, error) + // Creates a task and adds it to a queue. + // + // Tasks cannot be updated after creation; there is no UpdateTask command. + // + // * For [App Engine queues][google.cloud.tasks.v2beta2.AppEngineHttpTarget], + // the maximum task size is + // 100KB. + // * For [pull queues][google.cloud.tasks.v2beta2.PullTarget], the maximum + // task size is 1MB. + CreateTask(context.Context, *CreateTaskRequest) (*Task, error) + // Deletes a task. + // + // A task can be deleted if it is scheduled or dispatched. A task + // cannot be deleted if it has completed successfully or permanently + // failed. + DeleteTask(context.Context, *DeleteTaskRequest) (*empty.Empty, error) + // Leases tasks from a pull queue for + // [lease_duration][google.cloud.tasks.v2beta2.LeaseTasksRequest.lease_duration]. + // + // This method is invoked by the worker to obtain a lease. The + // worker must acknowledge the task via + // [AcknowledgeTask][google.cloud.tasks.v2beta2.CloudTasks.AcknowledgeTask] + // after they have performed the work associated with the task. + // + // The [payload][google.cloud.tasks.v2beta2.PullMessage.payload] is intended + // to store data that the worker needs to perform the work associated with the + // task. To return the payloads in the + // [response][google.cloud.tasks.v2beta2.LeaseTasksResponse], set + // [response_view][google.cloud.tasks.v2beta2.LeaseTasksRequest.response_view] + // to [FULL][google.cloud.tasks.v2beta2.Task.View.FULL]. + // + // A maximum of 10 qps of + // [LeaseTasks][google.cloud.tasks.v2beta2.CloudTasks.LeaseTasks] requests are + // allowed per queue. [RESOURCE_EXHAUSTED][google.rpc.Code.RESOURCE_EXHAUSTED] + // is returned when this limit is + // exceeded. [RESOURCE_EXHAUSTED][google.rpc.Code.RESOURCE_EXHAUSTED] + // is also returned when + // [max_tasks_dispatched_per_second][google.cloud.tasks.v2beta2.RateLimits.max_tasks_dispatched_per_second] + // is exceeded. + LeaseTasks(context.Context, *LeaseTasksRequest) (*LeaseTasksResponse, error) + // Acknowledges a pull task. + // + // The worker, that is, the entity that + // [leased][google.cloud.tasks.v2beta2.CloudTasks.LeaseTasks] this task must + // call this method to indicate that the work associated with the task has + // finished. + // + // The worker must acknowledge a task within the + // [lease_duration][google.cloud.tasks.v2beta2.LeaseTasksRequest.lease_duration] + // or the lease will expire and the task will become available to be leased + // again. After the task is acknowledged, it will not be returned + // by a later [LeaseTasks][google.cloud.tasks.v2beta2.CloudTasks.LeaseTasks], + // [GetTask][google.cloud.tasks.v2beta2.CloudTasks.GetTask], or + // [ListTasks][google.cloud.tasks.v2beta2.CloudTasks.ListTasks]. + AcknowledgeTask(context.Context, *AcknowledgeTaskRequest) (*empty.Empty, error) + // Renew the current lease of a pull task. + // + // The worker can use this method to extend the lease by a new + // duration, starting from now. The new task lease will be + // returned in the task's + // [schedule_time][google.cloud.tasks.v2beta2.Task.schedule_time]. + RenewLease(context.Context, *RenewLeaseRequest) (*Task, error) + // Cancel a pull task's lease. + // + // The worker can use this method to cancel a task's lease by + // setting its [schedule_time][google.cloud.tasks.v2beta2.Task.schedule_time] + // to now. This will make the task available to be leased to the next caller + // of [LeaseTasks][google.cloud.tasks.v2beta2.CloudTasks.LeaseTasks]. + CancelLease(context.Context, *CancelLeaseRequest) (*Task, error) + // Forces a task to run now. + // + // When this method is called, Cloud Tasks will dispatch the task, even if + // the task is already running, the queue has reached its + // [RateLimits][google.cloud.tasks.v2beta2.RateLimits] or is + // [PAUSED][google.cloud.tasks.v2beta2.Queue.State.PAUSED]. + // + // This command is meant to be used for manual debugging. For + // example, [RunTask][google.cloud.tasks.v2beta2.CloudTasks.RunTask] can be + // used to retry a failed task after a fix has been made or to manually force + // a task to be dispatched now. + // + // The dispatched task is returned. That is, the task that is returned + // contains the [status][google.cloud.tasks.v2beta2.Task.status] after the + // task is dispatched but before the task is received by its target. + // + // If Cloud Tasks receives a successful response from the task's + // target, then the task will be deleted; otherwise the task's + // [schedule_time][google.cloud.tasks.v2beta2.Task.schedule_time] will be + // reset to the time that + // [RunTask][google.cloud.tasks.v2beta2.CloudTasks.RunTask] was called plus + // the retry delay specified in the queue's + // [RetryConfig][google.cloud.tasks.v2beta2.RetryConfig]. + // + // [RunTask][google.cloud.tasks.v2beta2.CloudTasks.RunTask] returns + // [NOT_FOUND][google.rpc.Code.NOT_FOUND] when it is called on a + // task that has already succeeded or permanently failed. + // + // [RunTask][google.cloud.tasks.v2beta2.CloudTasks.RunTask] cannot be called + // on a [pull task][google.cloud.tasks.v2beta2.PullMessage]. + RunTask(context.Context, *RunTaskRequest) (*Task, error) +} + +func RegisterCloudTasksServer(s *grpc.Server, srv CloudTasksServer) { + s.RegisterService(&_CloudTasks_serviceDesc, srv) +} + +func _CloudTasks_ListQueues_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(ListQueuesRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(CloudTasksServer).ListQueues(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.tasks.v2beta2.CloudTasks/ListQueues", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(CloudTasksServer).ListQueues(ctx, req.(*ListQueuesRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _CloudTasks_GetQueue_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(GetQueueRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(CloudTasksServer).GetQueue(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.tasks.v2beta2.CloudTasks/GetQueue", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(CloudTasksServer).GetQueue(ctx, req.(*GetQueueRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _CloudTasks_CreateQueue_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(CreateQueueRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(CloudTasksServer).CreateQueue(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.tasks.v2beta2.CloudTasks/CreateQueue", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(CloudTasksServer).CreateQueue(ctx, req.(*CreateQueueRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _CloudTasks_UpdateQueue_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(UpdateQueueRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(CloudTasksServer).UpdateQueue(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.tasks.v2beta2.CloudTasks/UpdateQueue", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(CloudTasksServer).UpdateQueue(ctx, req.(*UpdateQueueRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _CloudTasks_DeleteQueue_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(DeleteQueueRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(CloudTasksServer).DeleteQueue(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.tasks.v2beta2.CloudTasks/DeleteQueue", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(CloudTasksServer).DeleteQueue(ctx, req.(*DeleteQueueRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _CloudTasks_PurgeQueue_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(PurgeQueueRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(CloudTasksServer).PurgeQueue(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.tasks.v2beta2.CloudTasks/PurgeQueue", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(CloudTasksServer).PurgeQueue(ctx, req.(*PurgeQueueRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _CloudTasks_PauseQueue_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(PauseQueueRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(CloudTasksServer).PauseQueue(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.tasks.v2beta2.CloudTasks/PauseQueue", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(CloudTasksServer).PauseQueue(ctx, req.(*PauseQueueRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _CloudTasks_ResumeQueue_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(ResumeQueueRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(CloudTasksServer).ResumeQueue(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.tasks.v2beta2.CloudTasks/ResumeQueue", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(CloudTasksServer).ResumeQueue(ctx, req.(*ResumeQueueRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _CloudTasks_GetIamPolicy_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(v1.GetIamPolicyRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(CloudTasksServer).GetIamPolicy(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.tasks.v2beta2.CloudTasks/GetIamPolicy", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(CloudTasksServer).GetIamPolicy(ctx, req.(*v1.GetIamPolicyRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _CloudTasks_SetIamPolicy_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(v1.SetIamPolicyRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(CloudTasksServer).SetIamPolicy(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.tasks.v2beta2.CloudTasks/SetIamPolicy", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(CloudTasksServer).SetIamPolicy(ctx, req.(*v1.SetIamPolicyRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _CloudTasks_TestIamPermissions_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(v1.TestIamPermissionsRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(CloudTasksServer).TestIamPermissions(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.tasks.v2beta2.CloudTasks/TestIamPermissions", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(CloudTasksServer).TestIamPermissions(ctx, req.(*v1.TestIamPermissionsRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _CloudTasks_ListTasks_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(ListTasksRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(CloudTasksServer).ListTasks(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.tasks.v2beta2.CloudTasks/ListTasks", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(CloudTasksServer).ListTasks(ctx, req.(*ListTasksRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _CloudTasks_GetTask_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(GetTaskRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(CloudTasksServer).GetTask(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.tasks.v2beta2.CloudTasks/GetTask", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(CloudTasksServer).GetTask(ctx, req.(*GetTaskRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _CloudTasks_CreateTask_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(CreateTaskRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(CloudTasksServer).CreateTask(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.tasks.v2beta2.CloudTasks/CreateTask", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(CloudTasksServer).CreateTask(ctx, req.(*CreateTaskRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _CloudTasks_DeleteTask_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(DeleteTaskRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(CloudTasksServer).DeleteTask(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.tasks.v2beta2.CloudTasks/DeleteTask", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(CloudTasksServer).DeleteTask(ctx, req.(*DeleteTaskRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _CloudTasks_LeaseTasks_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(LeaseTasksRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(CloudTasksServer).LeaseTasks(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.tasks.v2beta2.CloudTasks/LeaseTasks", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(CloudTasksServer).LeaseTasks(ctx, req.(*LeaseTasksRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _CloudTasks_AcknowledgeTask_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(AcknowledgeTaskRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(CloudTasksServer).AcknowledgeTask(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.tasks.v2beta2.CloudTasks/AcknowledgeTask", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(CloudTasksServer).AcknowledgeTask(ctx, req.(*AcknowledgeTaskRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _CloudTasks_RenewLease_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(RenewLeaseRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(CloudTasksServer).RenewLease(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.tasks.v2beta2.CloudTasks/RenewLease", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(CloudTasksServer).RenewLease(ctx, req.(*RenewLeaseRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _CloudTasks_CancelLease_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(CancelLeaseRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(CloudTasksServer).CancelLease(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.tasks.v2beta2.CloudTasks/CancelLease", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(CloudTasksServer).CancelLease(ctx, req.(*CancelLeaseRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _CloudTasks_RunTask_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(RunTaskRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(CloudTasksServer).RunTask(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.tasks.v2beta2.CloudTasks/RunTask", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(CloudTasksServer).RunTask(ctx, req.(*RunTaskRequest)) + } + return interceptor(ctx, in, info, handler) +} + +var _CloudTasks_serviceDesc = grpc.ServiceDesc{ + ServiceName: "google.cloud.tasks.v2beta2.CloudTasks", + HandlerType: (*CloudTasksServer)(nil), + Methods: []grpc.MethodDesc{ + { + MethodName: "ListQueues", + Handler: _CloudTasks_ListQueues_Handler, + }, + { + MethodName: "GetQueue", + Handler: _CloudTasks_GetQueue_Handler, + }, + { + MethodName: "CreateQueue", + Handler: _CloudTasks_CreateQueue_Handler, + }, + { + MethodName: "UpdateQueue", + Handler: _CloudTasks_UpdateQueue_Handler, + }, + { + MethodName: "DeleteQueue", + Handler: _CloudTasks_DeleteQueue_Handler, + }, + { + MethodName: "PurgeQueue", + Handler: _CloudTasks_PurgeQueue_Handler, + }, + { + MethodName: "PauseQueue", + Handler: _CloudTasks_PauseQueue_Handler, + }, + { + MethodName: "ResumeQueue", + Handler: _CloudTasks_ResumeQueue_Handler, + }, + { + MethodName: "GetIamPolicy", + Handler: _CloudTasks_GetIamPolicy_Handler, + }, + { + MethodName: "SetIamPolicy", + Handler: _CloudTasks_SetIamPolicy_Handler, + }, + { + MethodName: "TestIamPermissions", + Handler: _CloudTasks_TestIamPermissions_Handler, + }, + { + MethodName: "ListTasks", + Handler: _CloudTasks_ListTasks_Handler, + }, + { + MethodName: "GetTask", + Handler: _CloudTasks_GetTask_Handler, + }, + { + MethodName: "CreateTask", + Handler: _CloudTasks_CreateTask_Handler, + }, + { + MethodName: "DeleteTask", + Handler: _CloudTasks_DeleteTask_Handler, + }, + { + MethodName: "LeaseTasks", + Handler: _CloudTasks_LeaseTasks_Handler, + }, + { + MethodName: "AcknowledgeTask", + Handler: _CloudTasks_AcknowledgeTask_Handler, + }, + { + MethodName: "RenewLease", + Handler: _CloudTasks_RenewLease_Handler, + }, + { + MethodName: "CancelLease", + Handler: _CloudTasks_CancelLease_Handler, + }, + { + MethodName: "RunTask", + Handler: _CloudTasks_RunTask_Handler, + }, + }, + Streams: []grpc.StreamDesc{}, + Metadata: "google/cloud/tasks/v2beta2/cloudtasks.proto", +} + +func init() { + proto.RegisterFile("google/cloud/tasks/v2beta2/cloudtasks.proto", fileDescriptor_cloudtasks_a80c09c0b658f30d) +} + +var fileDescriptor_cloudtasks_a80c09c0b658f30d = []byte{ + // 1382 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xcc, 0x98, 0xdf, 0x6f, 0x14, 0x55, + 0x14, 0xc7, 0x73, 0x4b, 0x17, 0xe8, 0x59, 0xda, 0xba, 0x37, 0x81, 0x94, 0x45, 0xb1, 0xde, 0x04, + 0x58, 0x17, 0xd9, 0x09, 0xab, 0x80, 0x6c, 0x05, 0x4a, 0x0b, 0x14, 0x05, 0x4c, 0x99, 0x56, 0x1f, + 0x7c, 0xd9, 0x0c, 0xb3, 0x97, 0x75, 0xec, 0xfc, 0x62, 0x7e, 0xb4, 0x15, 0x6d, 0x4c, 0x7c, 0xf4, + 0xc1, 0x17, 0x8c, 0xf1, 0xcd, 0x9f, 0x31, 0x31, 0x91, 0x98, 0x98, 0xf0, 0xe6, 0x7f, 0xe1, 0xbf, + 0xe0, 0xb3, 0xf1, 0xc9, 0x67, 0x73, 0x7f, 0xcc, 0xce, 0x74, 0xa6, 0x3b, 0x33, 0x5d, 0x28, 0xf1, + 0xa9, 0x9d, 0x7b, 0xcf, 0xbd, 0xe7, 0x73, 0xcf, 0x3d, 0xf7, 0xde, 0xef, 0x59, 0x38, 0xdd, 0x77, + 0x9c, 0xbe, 0x49, 0x15, 0xdd, 0x74, 0xc2, 0x9e, 0x12, 0x68, 0xfe, 0x9a, 0xaf, 0xac, 0xb7, 0xef, + 0xd1, 0x40, 0x6b, 0x8b, 0x36, 0xde, 0xd4, 0x72, 0x3d, 0x27, 0x70, 0x70, 0x5d, 0x18, 0xb7, 0x78, + 0x47, 0x4b, 0xf4, 0x48, 0xe3, 0xfa, 0x8b, 0x72, 0x22, 0xcd, 0x35, 0x14, 0xcd, 0xb6, 0x9d, 0x40, + 0x0b, 0x0c, 0xc7, 0x96, 0x23, 0xeb, 0x27, 0x73, 0xdc, 0x3c, 0x08, 0x69, 0x48, 0xa5, 0xdd, 0x89, + 0x1c, 0x3b, 0xf6, 0x25, 0xcd, 0x8e, 0x4b, 0x33, 0x43, 0xb3, 0x94, 0xf5, 0xb3, 0xec, 0x4f, 0xd7, + 0x75, 0x4c, 0x43, 0xff, 0x58, 0xf6, 0xd7, 0xb7, 0xf7, 0x6f, 0xeb, 0x8b, 0xc6, 0xf2, 0xaf, 0x7b, + 0xe1, 0x7d, 0xa5, 0x17, 0x7a, 0x9c, 0x55, 0xf6, 0x1f, 0x4b, 0xf7, 0x53, 0xcb, 0x0d, 0xa2, 0xc1, + 0xb3, 0xe9, 0xce, 0xfb, 0x06, 0x35, 0x7b, 0x5d, 0x2b, 0x46, 0x7b, 0x39, 0x6d, 0x11, 0x18, 0x16, + 0xf5, 0x03, 0xcd, 0x72, 0xa5, 0xc1, 0x61, 0x69, 0xe0, 0xb9, 0xba, 0xa2, 0x3b, 0x3d, 0xb9, 0x72, + 0xf2, 0x19, 0xd4, 0x6e, 0x1b, 0x7e, 0x70, 0x97, 0x05, 0xc3, 0x57, 0xe9, 0x83, 0x90, 0xfa, 0x01, + 0x3e, 0x02, 0xfb, 0x5d, 0xcd, 0xa3, 0x76, 0x30, 0x83, 0x66, 0x51, 0x63, 0x42, 0x95, 0x5f, 0xac, + 0xfd, 0xbe, 0x61, 0x06, 0xd4, 0x9b, 0x19, 0x13, 0xed, 0xe2, 0x0b, 0x1f, 0x83, 0x09, 0x57, 0xeb, + 0xd3, 0xae, 0x6f, 0x3c, 0xa4, 0x33, 0xfb, 0x66, 0x51, 0xa3, 0xa2, 0x1e, 0x64, 0x0d, 0x2b, 0xc6, + 0x43, 0x8a, 0x5f, 0x02, 0xe0, 0x9d, 0x81, 0xb3, 0x46, 0xed, 0x99, 0x71, 0x3e, 0x90, 0x9b, 0xaf, + 0xb2, 0x06, 0xb2, 0x01, 0x38, 0x09, 0xe0, 0xbb, 0x8e, 0xed, 0x53, 0x7c, 0x11, 0xf6, 0xf3, 0xfd, + 0xf1, 0x67, 0xd0, 0xec, 0xbe, 0x46, 0xb5, 0xfd, 0x4a, 0x6b, 0x78, 0x0e, 0xb4, 0xf8, 0x58, 0x55, + 0x0e, 0xc0, 0x27, 0x61, 0xda, 0xa6, 0x9b, 0x41, 0x37, 0xe1, 0x54, 0xd0, 0x4e, 0xb2, 0xe6, 0xe5, + 0x81, 0xe3, 0x13, 0x30, 0xbd, 0x44, 0x85, 0xdf, 0x68, 0xdd, 0x18, 0xc6, 0x6d, 0xcd, 0xa2, 0x72, + 0xd5, 0xfc, 0x7f, 0x42, 0x01, 0x2f, 0x7a, 0x54, 0x0b, 0xe8, 0x36, 0xcb, 0x61, 0x11, 0xba, 0x00, + 0x15, 0x8e, 0xc1, 0x5d, 0x96, 0xc2, 0x16, 0xf6, 0xe4, 0x0b, 0x04, 0xf8, 0x3d, 0xb7, 0x97, 0xf6, + 0x33, 0x98, 0x0f, 0xed, 0x6e, 0x3e, 0x3c, 0x07, 0xd5, 0x90, 0x4f, 0xc7, 0x93, 0x44, 0xe2, 0xd4, + 0xa3, 0xe1, 0x51, 0x96, 0xb4, 0x6e, 0xb0, 0x3c, 0xba, 0xa3, 0xf9, 0x6b, 0x2a, 0x08, 0x73, 0xf6, + 0x3f, 0x69, 0x00, 0xbe, 0x46, 0x4d, 0x9a, 0x62, 0xd9, 0x29, 0x3a, 0xa7, 0xa0, 0xb6, 0x1c, 0x7a, + 0xfd, 0x72, 0x86, 0x5a, 0xe8, 0x17, 0x1b, 0x36, 0x00, 0xab, 0xd4, 0x0f, 0xad, 0x62, 0xcb, 0xdf, + 0x11, 0xbc, 0xc0, 0x52, 0x67, 0x95, 0x45, 0xa1, 0x68, 0x63, 0xde, 0x81, 0x49, 0x4f, 0x26, 0x57, + 0x77, 0xdd, 0xa0, 0x1b, 0x3c, 0x22, 0x53, 0xed, 0x13, 0x79, 0x01, 0x65, 0x13, 0xb7, 0xde, 0x37, + 0xe8, 0x86, 0x7a, 0x28, 0x1a, 0xcb, 0xbe, 0xb6, 0xa7, 0xfb, 0x78, 0x6e, 0xba, 0x57, 0xd2, 0xe9, + 0xee, 0x8b, 0xf3, 0x26, 0x99, 0x65, 0xb6, 0x9f, 0x87, 0x0a, 0xf7, 0x2c, 0x93, 0x7d, 0xb6, 0x08, + 0x4a, 0x15, 0xe6, 0xa5, 0x53, 0xdd, 0x85, 0xa9, 0x25, 0xca, 0x7d, 0xe6, 0xc4, 0xf3, 0x59, 0x86, + 0x88, 0x3c, 0x46, 0x50, 0x13, 0xc7, 0x26, 0xe9, 0x75, 0xd8, 0xe6, 0xbc, 0x01, 0xe3, 0x41, 0x9c, + 0xa5, 0xc5, 0xcb, 0xe7, 0xd6, 0x59, 0xde, 0x7d, 0xa3, 0xf3, 0x9e, 0x82, 0x9a, 0xc8, 0xf8, 0x82, + 0x20, 0x91, 0xbf, 0x11, 0xd4, 0x6e, 0x53, 0xcd, 0xa7, 0xa5, 0xb2, 0xee, 0x18, 0x4c, 0x58, 0xda, + 0x66, 0x57, 0x6c, 0xee, 0x98, 0xc8, 0x14, 0x4b, 0xdb, 0xe4, 0x63, 0xf1, 0x3c, 0x4c, 0x99, 0x6c, + 0xa6, 0x6e, 0xf4, 0x12, 0xf0, 0x05, 0x54, 0xdb, 0x47, 0x33, 0xa7, 0xf4, 0x9a, 0x34, 0x50, 0x27, + 0xf9, 0x80, 0xe8, 0x33, 0x1b, 0x81, 0xf1, 0xd1, 0x93, 0x3a, 0xbe, 0xdb, 0x2b, 0xc9, 0xbb, 0x9d, + 0xdc, 0x06, 0x9c, 0x5c, 0xef, 0xd3, 0x65, 0x2c, 0xb1, 0xe0, 0xc8, 0x55, 0x7d, 0xcd, 0x76, 0x36, + 0x4c, 0xda, 0xeb, 0x17, 0x05, 0x1b, 0x5f, 0x81, 0x49, 0x5f, 0xff, 0x90, 0xf6, 0x42, 0x93, 0x76, + 0xd9, 0x7b, 0x36, 0xf4, 0x1a, 0x5b, 0x8d, 0x1e, 0x3b, 0xf5, 0x50, 0x34, 0x80, 0x35, 0x91, 0x7f, + 0x11, 0xd4, 0x54, 0x6a, 0xd3, 0x0d, 0xbe, 0x84, 0xbd, 0x74, 0xf5, 0xff, 0xda, 0x4d, 0xf2, 0x04, + 0x01, 0x5e, 0xd4, 0x6c, 0x9d, 0x9a, 0x7b, 0xbf, 0xf2, 0x67, 0x79, 0x0e, 0x5d, 0x98, 0x52, 0x43, + 0xfb, 0x39, 0xde, 0x54, 0xed, 0x7f, 0x8e, 0x02, 0x2c, 0x32, 0x7b, 0x71, 0x28, 0x7f, 0x42, 0x00, + 0xb1, 0x1e, 0xc1, 0x67, 0xf2, 0xa6, 0xcc, 0x08, 0xa7, 0x7a, 0xab, 0xac, 0xb9, 0x00, 0x20, 0x17, + 0x3e, 0xff, 0xf3, 0xaf, 0x47, 0x63, 0x67, 0xb1, 0x32, 0x50, 0x9b, 0x9f, 0x88, 0x9b, 0xe3, 0x92, + 0xeb, 0x39, 0x1f, 0x51, 0x3d, 0xf0, 0x95, 0xa6, 0x62, 0x3a, 0xba, 0x10, 0xb5, 0x4a, 0x73, 0x4b, + 0x91, 0x22, 0xe7, 0x11, 0x82, 0x83, 0x91, 0x7a, 0xc1, 0xa7, 0xf3, 0xbc, 0xa6, 0x34, 0x4e, 0xbd, + 0x58, 0x42, 0xec, 0x44, 0xc5, 0xc2, 0x3e, 0x84, 0x49, 0x22, 0x29, 0xcd, 0x2d, 0xfc, 0x1d, 0x82, + 0x6a, 0x42, 0x2c, 0xe1, 0xdc, 0x70, 0x64, 0x55, 0x55, 0x19, 0xb6, 0xcb, 0x9c, 0xed, 0x4d, 0xb2, + 0xdb, 0x88, 0x75, 0xa4, 0x2e, 0xfa, 0x19, 0x41, 0x35, 0xa1, 0xb3, 0xf2, 0x11, 0xb3, 0x82, 0xac, + 0x0c, 0xe2, 0x35, 0x8e, 0x78, 0xb9, 0x7d, 0x2e, 0x46, 0x14, 0xb5, 0x46, 0xa9, 0x20, 0x46, 0xa0, + 0x5f, 0x22, 0xa8, 0x26, 0x44, 0x58, 0x3e, 0x68, 0x56, 0xad, 0xd5, 0x8f, 0x64, 0xce, 0xef, 0x75, + 0x56, 0x50, 0x44, 0x9b, 0xdb, 0x1c, 0x65, 0x73, 0x21, 0xd6, 0x7a, 0xf9, 0x27, 0x23, 0xa3, 0x09, + 0xcb, 0xc4, 0x6d, 0x9e, 0x93, 0x75, 0xc8, 0xb9, 0x5d, 0x92, 0x75, 0x5c, 0xe6, 0xad, 0x83, 0x9a, + 0x02, 0x71, 0xa0, 0x32, 0x0b, 0x10, 0xd3, 0x6a, 0x74, 0x6f, 0x11, 0x99, 0x37, 0x86, 0xf8, 0x23, + 0x82, 0x6a, 0x42, 0xdf, 0xe6, 0x6f, 0x6b, 0x56, 0x08, 0x97, 0x81, 0xbc, 0xca, 0x21, 0xe7, 0xc8, + 0xf9, 0xdd, 0x42, 0x7a, 0xdc, 0x1d, 0xa3, 0xfc, 0x1a, 0xc1, 0xa1, 0x25, 0x1a, 0xbc, 0xad, 0x59, + 0xcb, 0xbc, 0x86, 0xc5, 0x24, 0x72, 0x6b, 0x68, 0x56, 0x6b, 0xfd, 0x6c, 0x2b, 0xd9, 0x19, 0xa1, + 0x1d, 0x4e, 0xd9, 0x88, 0x5e, 0x72, 0x8b, 0xe3, 0x5c, 0x27, 0xf3, 0x31, 0x8e, 0x47, 0x7d, 0x27, + 0xf4, 0xf4, 0x12, 0x48, 0xfd, 0x84, 0x9f, 0x08, 0x6c, 0x25, 0x0f, 0x6c, 0xe5, 0x39, 0x81, 0xf9, + 0x29, 0xb0, 0x3f, 0x10, 0xe0, 0x55, 0xea, 0xf3, 0x46, 0xea, 0x59, 0x86, 0xef, 0xb3, 0x21, 0xb8, + 0x91, 0x72, 0x9d, 0x35, 0x89, 0x20, 0x5f, 0x2d, 0x61, 0x29, 0x5f, 0x8d, 0xbb, 0x1c, 0xfc, 0x16, + 0xb9, 0x31, 0x0a, 0x78, 0x90, 0x99, 0x97, 0xe1, 0xff, 0x82, 0x60, 0x62, 0x50, 0x97, 0xe0, 0xd7, + 0x8a, 0x9e, 0xb1, 0xa4, 0xf8, 0xad, 0x9f, 0x29, 0x69, 0x2d, 0xe9, 0xaf, 0x70, 0xfa, 0x8b, 0xf8, + 0x42, 0xc9, 0x1b, 0x3c, 0x66, 0x17, 0x3f, 0xcc, 0xe0, 0x6f, 0x10, 0x1c, 0x90, 0xe5, 0x0c, 0x6e, + 0x16, 0x3c, 0x7d, 0x09, 0x25, 0x51, 0x2f, 0x14, 0xa9, 0x3b, 0xa1, 0x95, 0x39, 0x39, 0xf2, 0x07, + 0xa3, 0xe6, 0x16, 0xfe, 0x1e, 0x01, 0xc4, 0x65, 0x4f, 0xfe, 0x05, 0x94, 0x29, 0x8f, 0x4a, 0x00, + 0x2e, 0x70, 0xc0, 0xb7, 0xc8, 0xa8, 0xb1, 0x63, 0x5b, 0xfd, 0x15, 0x02, 0x88, 0x6b, 0x9d, 0x7c, + 0xc6, 0x4c, 0x4d, 0x34, 0xf4, 0x59, 0x91, 0xa1, 0x6b, 0x8e, 0x1c, 0xba, 0x27, 0x4c, 0x78, 0x0d, + 0x0a, 0x8d, 0x02, 0xe1, 0x95, 0x2e, 0xc0, 0x0a, 0x84, 0x57, 0xa6, 0x7e, 0x21, 0x37, 0x39, 0xee, + 0x02, 0xb9, 0x34, 0x6a, 0x20, 0xb9, 0x74, 0x67, 0xe1, 0x7c, 0x8c, 0x60, 0x3a, 0x55, 0xd2, 0xe0, + 0x76, 0x1e, 0xcd, 0xce, 0xf5, 0xcf, 0xd0, 0xc0, 0xbe, 0xcb, 0x49, 0x6f, 0x92, 0xc5, 0x11, 0x03, + 0xdb, 0xd1, 0x62, 0x7f, 0x8c, 0xf7, 0x57, 0x04, 0x10, 0x97, 0x44, 0xf9, 0x71, 0xce, 0x94, 0x4e, + 0x25, 0x52, 0xf4, 0x0e, 0xe7, 0x5d, 0x22, 0x0b, 0xa3, 0xf2, 0x7a, 0x03, 0xa7, 0x0c, 0xf7, 0x37, + 0x26, 0x29, 0xe3, 0x42, 0xa6, 0x40, 0x52, 0x66, 0x2a, 0x9e, 0x12, 0xc0, 0x4f, 0x1d, 0x60, 0x3d, + 0xf6, 0xca, 0x88, 0xbf, 0x45, 0x70, 0x40, 0xd6, 0x30, 0xf9, 0xd7, 0xd3, 0xf6, 0x42, 0xa7, 0x04, + 0xe9, 0x0d, 0x4e, 0x3a, 0x4f, 0xe6, 0x46, 0x0e, 0x6d, 0x68, 0x77, 0x50, 0x73, 0xe1, 0x53, 0x38, + 0xae, 0x3b, 0x56, 0x8e, 0xbb, 0x85, 0xe9, 0xb8, 0x22, 0x5a, 0x66, 0xe9, 0xb8, 0x8c, 0x3e, 0xb8, + 0x22, 0xcd, 0xfb, 0x8e, 0xa9, 0xd9, 0xfd, 0x96, 0xe3, 0xf5, 0x95, 0x3e, 0xb5, 0x79, 0xb2, 0x2a, + 0xa2, 0x4b, 0x73, 0x0d, 0x7f, 0xa7, 0x5f, 0xd0, 0xe7, 0xf8, 0xd7, 0x0f, 0x63, 0x95, 0xd5, 0xab, + 0x2b, 0xb7, 0x56, 0xee, 0xed, 0xe7, 0x63, 0x5e, 0xff, 0x2f, 0x00, 0x00, 0xff, 0xff, 0x01, 0x6f, + 0x4f, 0xe1, 0x03, 0x18, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/cloud/tasks/v2beta2/queue.pb.go b/vendor/google.golang.org/genproto/googleapis/cloud/tasks/v2beta2/queue.pb.go new file mode 100644 index 0000000..9419a26 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/cloud/tasks/v2beta2/queue.pb.go @@ -0,0 +1,817 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/cloud/tasks/v2beta2/queue.proto + +package tasks // import "google.golang.org/genproto/googleapis/cloud/tasks/v2beta2" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import duration "github.com/golang/protobuf/ptypes/duration" +import timestamp "github.com/golang/protobuf/ptypes/timestamp" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// State of the queue. +type Queue_State int32 + +const ( + // Unspecified state. + Queue_STATE_UNSPECIFIED Queue_State = 0 + // The queue is running. Tasks can be dispatched. + // + // If the queue was created using Cloud Tasks and the queue has + // had no activity (method calls or task dispatches) for 30 days, + // the queue may take a few minutes to re-activate. Some method + // calls may return [NOT_FOUND][google.rpc.Code.NOT_FOUND] and + // tasks may not be dispatched for a few minutes until the queue + // has been re-activated. + Queue_RUNNING Queue_State = 1 + // Tasks are paused by the user. If the queue is paused then Cloud + // Tasks will stop delivering tasks from it, but more tasks can + // still be added to it by the user. When a pull queue is paused, + // all [LeaseTasks][google.cloud.tasks.v2beta2.CloudTasks.LeaseTasks] calls + // will return a [FAILED_PRECONDITION][google.rpc.Code.FAILED_PRECONDITION]. + Queue_PAUSED Queue_State = 2 + // The queue is disabled. + // + // A queue becomes `DISABLED` when + // [queue.yaml](https://cloud.google.com/appengine/docs/python/config/queueref) + // or + // [queue.xml](https://cloud.google.com/appengine/docs/standard/java/config/queueref) + // is uploaded which does not contain the queue. You cannot directly disable + // a queue. + // + // When a queue is disabled, tasks can still be added to a queue + // but the tasks are not dispatched and + // [LeaseTasks][google.cloud.tasks.v2beta2.CloudTasks.LeaseTasks] calls + // return a `FAILED_PRECONDITION` error. + // + // To permanently delete this queue and all of its tasks, call + // [DeleteQueue][google.cloud.tasks.v2beta2.CloudTasks.DeleteQueue]. + Queue_DISABLED Queue_State = 3 +) + +var Queue_State_name = map[int32]string{ + 0: "STATE_UNSPECIFIED", + 1: "RUNNING", + 2: "PAUSED", + 3: "DISABLED", +} +var Queue_State_value = map[string]int32{ + "STATE_UNSPECIFIED": 0, + "RUNNING": 1, + "PAUSED": 2, + "DISABLED": 3, +} + +func (x Queue_State) String() string { + return proto.EnumName(Queue_State_name, int32(x)) +} +func (Queue_State) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_queue_a4c8471e5288c61a, []int{0, 0} +} + +// A queue is a container of related tasks. Queues are configured to manage +// how those tasks are dispatched. Configurable properties include rate limits, +// retry options, target types, and others. +type Queue struct { + // Caller-specified and required in + // [CreateQueue][google.cloud.tasks.v2beta2.CloudTasks.CreateQueue], after + // which it becomes output only. + // + // The queue name. + // + // The queue name must have the following format: + // `projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID` + // + // * `PROJECT_ID` can contain letters ([A-Za-z]), numbers ([0-9]), + // hyphens (-), colons (:), or periods (.). + // For more information, see + // [Identifying + // projects](https://cloud.google.com/resource-manager/docs/creating-managing-projects#identifying_projects) + // * `LOCATION_ID` is the canonical ID for the queue's location. + // The list of available locations can be obtained by calling + // [ListLocations][google.cloud.location.Locations.ListLocations]. + // For more information, see https://cloud.google.com/about/locations/. + // * `QUEUE_ID` can contain letters ([A-Za-z]), numbers ([0-9]), or + // hyphens (-). The maximum length is 100 characters. + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // Caller-specified and required in + // [CreateQueue][google.cloud.tasks.v2beta2.CloudTasks.CreateQueue][], after + // which the queue config type becomes output only, though fields within the + // config are mutable. + // + // The queue's target. + // + // The target applies to all tasks in the queue. + // + // Types that are valid to be assigned to TargetType: + // *Queue_AppEngineHttpTarget + // *Queue_PullTarget + TargetType isQueue_TargetType `protobuf_oneof:"target_type"` + // Rate limits for task dispatches. + // + // [rate_limits][google.cloud.tasks.v2beta2.Queue.rate_limits] and + // [retry_config][google.cloud.tasks.v2beta2.Queue.retry_config] are related + // because they both control task attempts however they control how tasks are + // attempted in different ways: + // + // * [rate_limits][google.cloud.tasks.v2beta2.Queue.rate_limits] controls the + // total rate of + // dispatches from a queue (i.e. all traffic dispatched from the + // queue, regardless of whether the dispatch is from a first + // attempt or a retry). + // * [retry_config][google.cloud.tasks.v2beta2.Queue.retry_config] controls + // what happens to + // particular a task after its first attempt fails. That is, + // [retry_config][google.cloud.tasks.v2beta2.Queue.retry_config] controls + // task retries (the second attempt, third attempt, etc). + RateLimits *RateLimits `protobuf:"bytes,5,opt,name=rate_limits,json=rateLimits,proto3" json:"rate_limits,omitempty"` + // Settings that determine the retry behavior. + // + // * For tasks created using Cloud Tasks: the queue-level retry settings + // apply to all tasks in the queue that were created using Cloud Tasks. + // Retry settings cannot be set on individual tasks. + // * For tasks created using the App Engine SDK: the queue-level retry + // settings apply to all tasks in the queue which do not have retry settings + // explicitly set on the task and were created by the App Engine SDK. See + // [App Engine + // documentation](https://cloud.google.com/appengine/docs/standard/python/taskqueue/push/retrying-tasks). + RetryConfig *RetryConfig `protobuf:"bytes,6,opt,name=retry_config,json=retryConfig,proto3" json:"retry_config,omitempty"` + // Output only. The state of the queue. + // + // `state` can only be changed by called + // [PauseQueue][google.cloud.tasks.v2beta2.CloudTasks.PauseQueue], + // [ResumeQueue][google.cloud.tasks.v2beta2.CloudTasks.ResumeQueue], or + // uploading + // [queue.yaml/xml](https://cloud.google.com/appengine/docs/python/config/queueref). + // [UpdateQueue][google.cloud.tasks.v2beta2.CloudTasks.UpdateQueue] cannot be + // used to change `state`. + State Queue_State `protobuf:"varint,7,opt,name=state,proto3,enum=google.cloud.tasks.v2beta2.Queue_State" json:"state,omitempty"` + // Output only. The last time this queue was purged. + // + // All tasks that were [created][google.cloud.tasks.v2beta2.Task.create_time] + // before this time were purged. + // + // A queue can be purged using + // [PurgeQueue][google.cloud.tasks.v2beta2.CloudTasks.PurgeQueue], the [App + // Engine Task Queue SDK, or the Cloud + // Console](https://cloud.google.com/appengine/docs/standard/python/taskqueue/push/deleting-tasks-and-queues#purging_all_tasks_from_a_queue). + // + // Purge time will be truncated to the nearest microsecond. Purge + // time will be unset if the queue has never been purged. + PurgeTime *timestamp.Timestamp `protobuf:"bytes,8,opt,name=purge_time,json=purgeTime,proto3" json:"purge_time,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Queue) Reset() { *m = Queue{} } +func (m *Queue) String() string { return proto.CompactTextString(m) } +func (*Queue) ProtoMessage() {} +func (*Queue) Descriptor() ([]byte, []int) { + return fileDescriptor_queue_a4c8471e5288c61a, []int{0} +} +func (m *Queue) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Queue.Unmarshal(m, b) +} +func (m *Queue) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Queue.Marshal(b, m, deterministic) +} +func (dst *Queue) XXX_Merge(src proto.Message) { + xxx_messageInfo_Queue.Merge(dst, src) +} +func (m *Queue) XXX_Size() int { + return xxx_messageInfo_Queue.Size(m) +} +func (m *Queue) XXX_DiscardUnknown() { + xxx_messageInfo_Queue.DiscardUnknown(m) +} + +var xxx_messageInfo_Queue proto.InternalMessageInfo + +func (m *Queue) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +type isQueue_TargetType interface { + isQueue_TargetType() +} + +type Queue_AppEngineHttpTarget struct { + AppEngineHttpTarget *AppEngineHttpTarget `protobuf:"bytes,3,opt,name=app_engine_http_target,json=appEngineHttpTarget,proto3,oneof"` +} + +type Queue_PullTarget struct { + PullTarget *PullTarget `protobuf:"bytes,4,opt,name=pull_target,json=pullTarget,proto3,oneof"` +} + +func (*Queue_AppEngineHttpTarget) isQueue_TargetType() {} + +func (*Queue_PullTarget) isQueue_TargetType() {} + +func (m *Queue) GetTargetType() isQueue_TargetType { + if m != nil { + return m.TargetType + } + return nil +} + +func (m *Queue) GetAppEngineHttpTarget() *AppEngineHttpTarget { + if x, ok := m.GetTargetType().(*Queue_AppEngineHttpTarget); ok { + return x.AppEngineHttpTarget + } + return nil +} + +func (m *Queue) GetPullTarget() *PullTarget { + if x, ok := m.GetTargetType().(*Queue_PullTarget); ok { + return x.PullTarget + } + return nil +} + +func (m *Queue) GetRateLimits() *RateLimits { + if m != nil { + return m.RateLimits + } + return nil +} + +func (m *Queue) GetRetryConfig() *RetryConfig { + if m != nil { + return m.RetryConfig + } + return nil +} + +func (m *Queue) GetState() Queue_State { + if m != nil { + return m.State + } + return Queue_STATE_UNSPECIFIED +} + +func (m *Queue) GetPurgeTime() *timestamp.Timestamp { + if m != nil { + return m.PurgeTime + } + return nil +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*Queue) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _Queue_OneofMarshaler, _Queue_OneofUnmarshaler, _Queue_OneofSizer, []interface{}{ + (*Queue_AppEngineHttpTarget)(nil), + (*Queue_PullTarget)(nil), + } +} + +func _Queue_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*Queue) + // target_type + switch x := m.TargetType.(type) { + case *Queue_AppEngineHttpTarget: + b.EncodeVarint(3<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.AppEngineHttpTarget); err != nil { + return err + } + case *Queue_PullTarget: + b.EncodeVarint(4<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.PullTarget); err != nil { + return err + } + case nil: + default: + return fmt.Errorf("Queue.TargetType has unexpected type %T", x) + } + return nil +} + +func _Queue_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*Queue) + switch tag { + case 3: // target_type.app_engine_http_target + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(AppEngineHttpTarget) + err := b.DecodeMessage(msg) + m.TargetType = &Queue_AppEngineHttpTarget{msg} + return true, err + case 4: // target_type.pull_target + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(PullTarget) + err := b.DecodeMessage(msg) + m.TargetType = &Queue_PullTarget{msg} + return true, err + default: + return false, nil + } +} + +func _Queue_OneofSizer(msg proto.Message) (n int) { + m := msg.(*Queue) + // target_type + switch x := m.TargetType.(type) { + case *Queue_AppEngineHttpTarget: + s := proto.Size(x.AppEngineHttpTarget) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *Queue_PullTarget: + s := proto.Size(x.PullTarget) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +// Rate limits. +// +// This message determines the maximum rate that tasks can be dispatched by a +// queue, regardless of whether the dispatch is a first task attempt or a retry. +// +// Note: The debugging command, +// [RunTask][google.cloud.tasks.v2beta2.CloudTasks.RunTask], will run a task +// even if the queue has reached its +// [RateLimits][google.cloud.tasks.v2beta2.RateLimits]. +type RateLimits struct { + // The maximum rate at which tasks are dispatched from this queue. + // + // If unspecified when the queue is created, Cloud Tasks will pick the + // default. + // + // * For [App Engine queues][google.cloud.tasks.v2beta2.AppEngineHttpTarget], + // the maximum allowed value + // is 500. + // * This field is output only for [pull + // queues][google.cloud.tasks.v2beta2.PullTarget]. In addition to the + // `max_tasks_dispatched_per_second` limit, a maximum of 10 QPS of + // [LeaseTasks][google.cloud.tasks.v2beta2.CloudTasks.LeaseTasks] requests + // are allowed per pull queue. + // + // + // This field has the same meaning as + // [rate in + // queue.yaml/xml](https://cloud.google.com/appengine/docs/standard/python/config/queueref#rate). + MaxTasksDispatchedPerSecond float64 `protobuf:"fixed64,1,opt,name=max_tasks_dispatched_per_second,json=maxTasksDispatchedPerSecond,proto3" json:"max_tasks_dispatched_per_second,omitempty"` + // Output only. The max burst size. + // + // Max burst size limits how fast tasks in queue are processed when + // many tasks are in the queue and the rate is high. This field + // allows the queue to have a high rate so processing starts shortly + // after a task is enqueued, but still limits resource usage when + // many tasks are enqueued in a short period of time. + // + // The [token bucket](https://wikipedia.org/wiki/Token_Bucket) + // algorithm is used to control the rate of task dispatches. Each + // queue has a token bucket that holds tokens, up to the maximum + // specified by `max_burst_size`. Each time a task is dispatched, a + // token is removed from the bucket. Tasks will be dispatched until + // the queue's bucket runs out of tokens. The bucket will be + // continuously refilled with new tokens based on + // [max_tasks_dispatched_per_second][google.cloud.tasks.v2beta2.RateLimits.max_tasks_dispatched_per_second]. + // + // Cloud Tasks will pick the value of `max_burst_size` based on the + // value of + // [max_tasks_dispatched_per_second][google.cloud.tasks.v2beta2.RateLimits.max_tasks_dispatched_per_second]. + // + // For App Engine queues that were created or updated using + // `queue.yaml/xml`, `max_burst_size` is equal to + // [bucket_size](https://cloud.google.com/appengine/docs/standard/python/config/queueref#bucket_size). + // Since `max_burst_size` is output only, if + // [UpdateQueue][google.cloud.tasks.v2beta2.CloudTasks.UpdateQueue] is called + // on a queue created by `queue.yaml/xml`, `max_burst_size` will be reset + // based on the value of + // [max_tasks_dispatched_per_second][google.cloud.tasks.v2beta2.RateLimits.max_tasks_dispatched_per_second], + // regardless of whether + // [max_tasks_dispatched_per_second][google.cloud.tasks.v2beta2.RateLimits.max_tasks_dispatched_per_second] + // is updated. + // + MaxBurstSize int32 `protobuf:"varint,2,opt,name=max_burst_size,json=maxBurstSize,proto3" json:"max_burst_size,omitempty"` + // The maximum number of concurrent tasks that Cloud Tasks allows + // to be dispatched for this queue. After this threshold has been + // reached, Cloud Tasks stops dispatching tasks until the number of + // concurrent requests decreases. + // + // If unspecified when the queue is created, Cloud Tasks will pick the + // default. + // + // + // The maximum allowed value is 5,000. + // + // This field is output only for + // [pull queues][google.cloud.tasks.v2beta2.PullTarget] and always -1, which + // indicates no limit. No other queue types can have `max_concurrent_tasks` + // set to -1. + // + // + // This field has the same meaning as + // [max_concurrent_requests in + // queue.yaml/xml](https://cloud.google.com/appengine/docs/standard/python/config/queueref#max_concurrent_requests). + MaxConcurrentTasks int32 `protobuf:"varint,3,opt,name=max_concurrent_tasks,json=maxConcurrentTasks,proto3" json:"max_concurrent_tasks,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *RateLimits) Reset() { *m = RateLimits{} } +func (m *RateLimits) String() string { return proto.CompactTextString(m) } +func (*RateLimits) ProtoMessage() {} +func (*RateLimits) Descriptor() ([]byte, []int) { + return fileDescriptor_queue_a4c8471e5288c61a, []int{1} +} +func (m *RateLimits) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_RateLimits.Unmarshal(m, b) +} +func (m *RateLimits) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_RateLimits.Marshal(b, m, deterministic) +} +func (dst *RateLimits) XXX_Merge(src proto.Message) { + xxx_messageInfo_RateLimits.Merge(dst, src) +} +func (m *RateLimits) XXX_Size() int { + return xxx_messageInfo_RateLimits.Size(m) +} +func (m *RateLimits) XXX_DiscardUnknown() { + xxx_messageInfo_RateLimits.DiscardUnknown(m) +} + +var xxx_messageInfo_RateLimits proto.InternalMessageInfo + +func (m *RateLimits) GetMaxTasksDispatchedPerSecond() float64 { + if m != nil { + return m.MaxTasksDispatchedPerSecond + } + return 0 +} + +func (m *RateLimits) GetMaxBurstSize() int32 { + if m != nil { + return m.MaxBurstSize + } + return 0 +} + +func (m *RateLimits) GetMaxConcurrentTasks() int32 { + if m != nil { + return m.MaxConcurrentTasks + } + return 0 +} + +// Retry config. +// +// These settings determine how a failed task attempt is retried. +type RetryConfig struct { + // Number of attempts per task. + // + // If unspecified when the queue is created, Cloud Tasks will pick the + // default. + // + // + // + // This field has the same meaning as + // [task_retry_limit in + // queue.yaml/xml](https://cloud.google.com/appengine/docs/standard/python/config/queueref#retry_parameters). + // + // Types that are valid to be assigned to NumAttempts: + // *RetryConfig_MaxAttempts + // *RetryConfig_UnlimitedAttempts + NumAttempts isRetryConfig_NumAttempts `protobuf_oneof:"num_attempts"` + // If positive, `max_retry_duration` specifies the time limit for + // retrying a failed task, measured from when the task was first + // attempted. Once `max_retry_duration` time has passed *and* the + // task has been attempted + // [max_attempts][google.cloud.tasks.v2beta2.RetryConfig.max_attempts] times, + // no further attempts will be made and the task will be deleted. + // + // If zero, then the task age is unlimited. + // + // If unspecified when the queue is created, Cloud Tasks will pick the + // default. + // + // This field is output only for [pull + // queues][google.cloud.tasks.v2beta2.PullTarget]. + // + // + // `max_retry_duration` will be truncated to the nearest second. + // + // This field has the same meaning as + // [task_age_limit in + // queue.yaml/xml](https://cloud.google.com/appengine/docs/standard/python/config/queueref#retry_parameters). + MaxRetryDuration *duration.Duration `protobuf:"bytes,3,opt,name=max_retry_duration,json=maxRetryDuration,proto3" json:"max_retry_duration,omitempty"` + // A task will be [scheduled][google.cloud.tasks.v2beta2.Task.schedule_time] + // for retry between + // [min_backoff][google.cloud.tasks.v2beta2.RetryConfig.min_backoff] and + // [max_backoff][google.cloud.tasks.v2beta2.RetryConfig.max_backoff] duration + // after it fails, if the queue's + // [RetryConfig][google.cloud.tasks.v2beta2.RetryConfig] specifies that the + // task should be retried. + // + // If unspecified when the queue is created, Cloud Tasks will pick the + // default. + // + // This field is output only for [pull + // queues][google.cloud.tasks.v2beta2.PullTarget]. + // + // + // `min_backoff` will be truncated to the nearest second. + // + // This field has the same meaning as + // [min_backoff_seconds in + // queue.yaml/xml](https://cloud.google.com/appengine/docs/standard/python/config/queueref#retry_parameters). + MinBackoff *duration.Duration `protobuf:"bytes,4,opt,name=min_backoff,json=minBackoff,proto3" json:"min_backoff,omitempty"` + // A task will be [scheduled][google.cloud.tasks.v2beta2.Task.schedule_time] + // for retry between + // [min_backoff][google.cloud.tasks.v2beta2.RetryConfig.min_backoff] and + // [max_backoff][google.cloud.tasks.v2beta2.RetryConfig.max_backoff] duration + // after it fails, if the queue's + // [RetryConfig][google.cloud.tasks.v2beta2.RetryConfig] specifies that the + // task should be retried. + // + // If unspecified when the queue is created, Cloud Tasks will pick the + // default. + // + // This field is output only for [pull + // queues][google.cloud.tasks.v2beta2.PullTarget]. + // + // + // `max_backoff` will be truncated to the nearest second. + // + // This field has the same meaning as + // [max_backoff_seconds in + // queue.yaml/xml](https://cloud.google.com/appengine/docs/standard/python/config/queueref#retry_parameters). + MaxBackoff *duration.Duration `protobuf:"bytes,5,opt,name=max_backoff,json=maxBackoff,proto3" json:"max_backoff,omitempty"` + // The time between retries will double `max_doublings` times. + // + // A task's retry interval starts at + // [min_backoff][google.cloud.tasks.v2beta2.RetryConfig.min_backoff], then + // doubles `max_doublings` times, then increases linearly, and finally retries + // retries at intervals of + // [max_backoff][google.cloud.tasks.v2beta2.RetryConfig.max_backoff] up to + // [max_attempts][google.cloud.tasks.v2beta2.RetryConfig.max_attempts] times. + // + // For example, if + // [min_backoff][google.cloud.tasks.v2beta2.RetryConfig.min_backoff] is 10s, + // [max_backoff][google.cloud.tasks.v2beta2.RetryConfig.max_backoff] is 300s, + // and `max_doublings` is 3, then the a task will first be retried in 10s. The + // retry interval will double three times, and then increase linearly by 2^3 * + // 10s. Finally, the task will retry at intervals of + // [max_backoff][google.cloud.tasks.v2beta2.RetryConfig.max_backoff] until the + // task has been attempted + // [max_attempts][google.cloud.tasks.v2beta2.RetryConfig.max_attempts] times. + // Thus, the requests will retry at 10s, 20s, 40s, 80s, 160s, 240s, 300s, + // 300s, .... + // + // If unspecified when the queue is created, Cloud Tasks will pick the + // default. + // + // This field is output only for [pull + // queues][google.cloud.tasks.v2beta2.PullTarget]. + // + // + // This field has the same meaning as + // [max_doublings in + // queue.yaml/xml](https://cloud.google.com/appengine/docs/standard/python/config/queueref#retry_parameters). + MaxDoublings int32 `protobuf:"varint,6,opt,name=max_doublings,json=maxDoublings,proto3" json:"max_doublings,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *RetryConfig) Reset() { *m = RetryConfig{} } +func (m *RetryConfig) String() string { return proto.CompactTextString(m) } +func (*RetryConfig) ProtoMessage() {} +func (*RetryConfig) Descriptor() ([]byte, []int) { + return fileDescriptor_queue_a4c8471e5288c61a, []int{2} +} +func (m *RetryConfig) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_RetryConfig.Unmarshal(m, b) +} +func (m *RetryConfig) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_RetryConfig.Marshal(b, m, deterministic) +} +func (dst *RetryConfig) XXX_Merge(src proto.Message) { + xxx_messageInfo_RetryConfig.Merge(dst, src) +} +func (m *RetryConfig) XXX_Size() int { + return xxx_messageInfo_RetryConfig.Size(m) +} +func (m *RetryConfig) XXX_DiscardUnknown() { + xxx_messageInfo_RetryConfig.DiscardUnknown(m) +} + +var xxx_messageInfo_RetryConfig proto.InternalMessageInfo + +type isRetryConfig_NumAttempts interface { + isRetryConfig_NumAttempts() +} + +type RetryConfig_MaxAttempts struct { + MaxAttempts int32 `protobuf:"varint,1,opt,name=max_attempts,json=maxAttempts,proto3,oneof"` +} + +type RetryConfig_UnlimitedAttempts struct { + UnlimitedAttempts bool `protobuf:"varint,2,opt,name=unlimited_attempts,json=unlimitedAttempts,proto3,oneof"` +} + +func (*RetryConfig_MaxAttempts) isRetryConfig_NumAttempts() {} + +func (*RetryConfig_UnlimitedAttempts) isRetryConfig_NumAttempts() {} + +func (m *RetryConfig) GetNumAttempts() isRetryConfig_NumAttempts { + if m != nil { + return m.NumAttempts + } + return nil +} + +func (m *RetryConfig) GetMaxAttempts() int32 { + if x, ok := m.GetNumAttempts().(*RetryConfig_MaxAttempts); ok { + return x.MaxAttempts + } + return 0 +} + +func (m *RetryConfig) GetUnlimitedAttempts() bool { + if x, ok := m.GetNumAttempts().(*RetryConfig_UnlimitedAttempts); ok { + return x.UnlimitedAttempts + } + return false +} + +func (m *RetryConfig) GetMaxRetryDuration() *duration.Duration { + if m != nil { + return m.MaxRetryDuration + } + return nil +} + +func (m *RetryConfig) GetMinBackoff() *duration.Duration { + if m != nil { + return m.MinBackoff + } + return nil +} + +func (m *RetryConfig) GetMaxBackoff() *duration.Duration { + if m != nil { + return m.MaxBackoff + } + return nil +} + +func (m *RetryConfig) GetMaxDoublings() int32 { + if m != nil { + return m.MaxDoublings + } + return 0 +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*RetryConfig) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _RetryConfig_OneofMarshaler, _RetryConfig_OneofUnmarshaler, _RetryConfig_OneofSizer, []interface{}{ + (*RetryConfig_MaxAttempts)(nil), + (*RetryConfig_UnlimitedAttempts)(nil), + } +} + +func _RetryConfig_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*RetryConfig) + // num_attempts + switch x := m.NumAttempts.(type) { + case *RetryConfig_MaxAttempts: + b.EncodeVarint(1<<3 | proto.WireVarint) + b.EncodeVarint(uint64(x.MaxAttempts)) + case *RetryConfig_UnlimitedAttempts: + t := uint64(0) + if x.UnlimitedAttempts { + t = 1 + } + b.EncodeVarint(2<<3 | proto.WireVarint) + b.EncodeVarint(t) + case nil: + default: + return fmt.Errorf("RetryConfig.NumAttempts has unexpected type %T", x) + } + return nil +} + +func _RetryConfig_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*RetryConfig) + switch tag { + case 1: // num_attempts.max_attempts + if wire != proto.WireVarint { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeVarint() + m.NumAttempts = &RetryConfig_MaxAttempts{int32(x)} + return true, err + case 2: // num_attempts.unlimited_attempts + if wire != proto.WireVarint { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeVarint() + m.NumAttempts = &RetryConfig_UnlimitedAttempts{x != 0} + return true, err + default: + return false, nil + } +} + +func _RetryConfig_OneofSizer(msg proto.Message) (n int) { + m := msg.(*RetryConfig) + // num_attempts + switch x := m.NumAttempts.(type) { + case *RetryConfig_MaxAttempts: + n += 1 // tag and wire + n += proto.SizeVarint(uint64(x.MaxAttempts)) + case *RetryConfig_UnlimitedAttempts: + n += 1 // tag and wire + n += 1 + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +func init() { + proto.RegisterType((*Queue)(nil), "google.cloud.tasks.v2beta2.Queue") + proto.RegisterType((*RateLimits)(nil), "google.cloud.tasks.v2beta2.RateLimits") + proto.RegisterType((*RetryConfig)(nil), "google.cloud.tasks.v2beta2.RetryConfig") + proto.RegisterEnum("google.cloud.tasks.v2beta2.Queue_State", Queue_State_name, Queue_State_value) +} + +func init() { + proto.RegisterFile("google/cloud/tasks/v2beta2/queue.proto", fileDescriptor_queue_a4c8471e5288c61a) +} + +var fileDescriptor_queue_a4c8471e5288c61a = []byte{ + // 702 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x84, 0x94, 0xcb, 0x6e, 0xdb, 0x3a, + 0x10, 0x86, 0xad, 0x24, 0xca, 0x65, 0xe4, 0x04, 0x0e, 0xcf, 0x05, 0x3e, 0x3e, 0x45, 0x62, 0x38, + 0x45, 0xe2, 0x95, 0x54, 0xa4, 0xab, 0xb6, 0x28, 0x0a, 0x3b, 0x72, 0x63, 0x17, 0x81, 0xe1, 0xca, + 0xce, 0xa6, 0x1b, 0x82, 0x96, 0x69, 0x45, 0x88, 0x44, 0xb2, 0x12, 0x55, 0x38, 0x79, 0xa4, 0x6e, + 0xfb, 0x34, 0x7d, 0x9b, 0x82, 0xd4, 0x25, 0x45, 0xd3, 0x3a, 0x3b, 0x73, 0xe6, 0xfb, 0xff, 0x19, + 0x71, 0x86, 0x86, 0xd3, 0x80, 0xf3, 0x20, 0xa2, 0x8e, 0x1f, 0xf1, 0x6c, 0xe1, 0x48, 0x92, 0xde, + 0xa6, 0xce, 0x97, 0xf3, 0x39, 0x95, 0xe4, 0xdc, 0xf9, 0x9c, 0xd1, 0x8c, 0xda, 0x22, 0xe1, 0x92, + 0xa3, 0x56, 0xce, 0xd9, 0x9a, 0xb3, 0x35, 0x67, 0x17, 0x5c, 0xeb, 0x59, 0xe1, 0x41, 0x44, 0xe8, + 0x10, 0xc6, 0xb8, 0x24, 0x32, 0xe4, 0x2c, 0xcd, 0x95, 0xad, 0xb3, 0x35, 0x15, 0x24, 0x49, 0x02, + 0x2a, 0x0b, 0xf0, 0xa8, 0x00, 0xf5, 0x69, 0x9e, 0x2d, 0x9d, 0x45, 0x96, 0x68, 0xa7, 0x22, 0x7f, + 0xfc, 0x6b, 0x5e, 0x86, 0x31, 0x4d, 0x25, 0x89, 0x45, 0x0e, 0x74, 0xbe, 0x6d, 0x81, 0xf9, 0x51, + 0xf5, 0x8c, 0x10, 0x6c, 0x31, 0x12, 0xd3, 0xa6, 0xd1, 0x36, 0xba, 0x7b, 0x9e, 0xfe, 0x8d, 0x96, + 0xf0, 0x2f, 0x11, 0x02, 0x53, 0x16, 0x84, 0x8c, 0xe2, 0x1b, 0x29, 0x05, 0xce, 0xcb, 0x37, 0x37, + 0xdb, 0x46, 0xd7, 0x3a, 0x77, 0xec, 0x3f, 0x7f, 0xa2, 0xdd, 0x13, 0x62, 0xa0, 0x85, 0x43, 0x29, + 0xc5, 0x4c, 0xcb, 0x86, 0x35, 0xef, 0x2f, 0xf2, 0x38, 0x8c, 0x46, 0x60, 0x89, 0x2c, 0x8a, 0x4a, + 0xf3, 0x2d, 0x6d, 0x7e, 0xba, 0xce, 0x7c, 0x92, 0x45, 0x51, 0xe5, 0x09, 0xa2, 0x3a, 0xa1, 0x4b, + 0xb0, 0x12, 0x22, 0x29, 0x8e, 0xc2, 0x38, 0x94, 0x69, 0xd3, 0x7c, 0xda, 0xca, 0x23, 0x92, 0x5e, + 0x69, 0xda, 0x83, 0xa4, 0xfa, 0x8d, 0x3e, 0x40, 0x3d, 0xa1, 0x32, 0xb9, 0xc3, 0x3e, 0x67, 0xcb, + 0x30, 0x68, 0x6e, 0x6b, 0xa7, 0xb3, 0xb5, 0x4e, 0x8a, 0xbf, 0xd0, 0xb8, 0x67, 0x25, 0x0f, 0x07, + 0xf4, 0x16, 0xcc, 0x54, 0x12, 0x49, 0x9b, 0x3b, 0x6d, 0xa3, 0x7b, 0xb0, 0xde, 0x44, 0x4f, 0xc3, + 0x9e, 0x2a, 0xdc, 0xcb, 0x55, 0xe8, 0x15, 0x80, 0xc8, 0x92, 0x80, 0x62, 0x35, 0xbd, 0xe6, 0xae, + 0x6e, 0xa4, 0x55, 0x7a, 0x94, 0xa3, 0xb5, 0x67, 0xe5, 0x68, 0xbd, 0x3d, 0x4d, 0xab, 0x73, 0x67, + 0x00, 0xa6, 0xb6, 0x42, 0xff, 0xc0, 0xe1, 0x74, 0xd6, 0x9b, 0x0d, 0xf0, 0xf5, 0x78, 0x3a, 0x19, + 0x5c, 0x8c, 0xde, 0x8f, 0x06, 0x6e, 0xa3, 0x86, 0x2c, 0xd8, 0xf1, 0xae, 0xc7, 0xe3, 0xd1, 0xf8, + 0xb2, 0x61, 0x20, 0x80, 0xed, 0x49, 0xef, 0x7a, 0x3a, 0x70, 0x1b, 0x1b, 0xa8, 0x0e, 0xbb, 0xee, + 0x68, 0xda, 0xeb, 0x5f, 0x0d, 0xdc, 0xc6, 0x66, 0x7f, 0x1f, 0xac, 0x7c, 0x36, 0x58, 0xde, 0x09, + 0xda, 0xf9, 0x6a, 0x00, 0x3c, 0x5c, 0x1b, 0x72, 0xe1, 0x38, 0x26, 0x2b, 0xac, 0xbf, 0x03, 0x2f, + 0xc2, 0x54, 0x10, 0xe9, 0xdf, 0xd0, 0x05, 0x16, 0x34, 0xc1, 0x29, 0xf5, 0x39, 0x5b, 0xe8, 0xad, + 0x32, 0xbc, 0xff, 0x63, 0xb2, 0x9a, 0x29, 0xca, 0xad, 0xa0, 0x09, 0x4d, 0xa6, 0x1a, 0x41, 0xcf, + 0xe1, 0x40, 0xb9, 0xcc, 0xb3, 0x24, 0x95, 0x38, 0x0d, 0xef, 0x69, 0x73, 0xa3, 0x6d, 0x74, 0x4d, + 0xaf, 0x1e, 0x93, 0x55, 0x5f, 0x05, 0xa7, 0xe1, 0x3d, 0x45, 0x2f, 0xe0, 0x6f, 0x45, 0xf9, 0x9c, + 0xf9, 0x59, 0x92, 0x50, 0x26, 0xf3, 0xb2, 0x7a, 0x21, 0x4d, 0x0f, 0xc5, 0x64, 0x75, 0x51, 0xa5, + 0x74, 0xa9, 0xce, 0xf7, 0x0d, 0xb0, 0x7e, 0x9a, 0x0c, 0x3a, 0x01, 0xe5, 0x88, 0x89, 0x94, 0x34, + 0x16, 0x32, 0xd5, 0xad, 0x99, 0xc3, 0x9a, 0x67, 0xc5, 0x64, 0xd5, 0x2b, 0x82, 0xc8, 0x01, 0x94, + 0x31, 0xbd, 0x43, 0x74, 0xf1, 0x80, 0xaa, 0x86, 0x76, 0x87, 0x35, 0xef, 0xb0, 0xca, 0x55, 0x82, + 0x4b, 0x50, 0xb5, 0x71, 0xbe, 0x32, 0xe5, 0x2b, 0x2c, 0x9e, 0xc9, 0x7f, 0x8f, 0x66, 0xe5, 0x16, + 0x80, 0xd7, 0x88, 0xc9, 0x4a, 0x37, 0x57, 0x46, 0xd0, 0x6b, 0xb0, 0xe2, 0x90, 0xe1, 0x39, 0xf1, + 0x6f, 0xf9, 0x72, 0x59, 0xbc, 0x85, 0x35, 0x0e, 0x10, 0x87, 0xac, 0x9f, 0xc3, 0x5a, 0xab, 0xae, + 0xb0, 0xd0, 0x9a, 0x4f, 0x6b, 0xc9, 0xaa, 0xd4, 0x9e, 0xc0, 0xbe, 0xd2, 0x2e, 0x78, 0x36, 0x8f, + 0x42, 0x16, 0xa4, 0x7a, 0xe1, 0xf3, 0xdb, 0x77, 0xcb, 0x58, 0xff, 0x00, 0xea, 0x2c, 0x8b, 0xab, + 0x0b, 0xe9, 0x73, 0x38, 0xf2, 0x79, 0xbc, 0x66, 0x9d, 0xfb, 0xa0, 0xf7, 0x79, 0xa2, 0x4a, 0x4f, + 0x8c, 0x4f, 0xef, 0x0a, 0x32, 0xe0, 0x11, 0x61, 0x81, 0xcd, 0x93, 0xc0, 0x09, 0x28, 0xd3, 0x8d, + 0x39, 0x79, 0x8a, 0x88, 0x30, 0xfd, 0xdd, 0xff, 0xde, 0x1b, 0x7d, 0x9a, 0x6f, 0x6b, 0xf6, 0xe5, + 0x8f, 0x00, 0x00, 0x00, 0xff, 0xff, 0xd6, 0xd1, 0xaa, 0xde, 0x84, 0x05, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/cloud/tasks/v2beta2/target.pb.go b/vendor/google.golang.org/genproto/googleapis/cloud/tasks/v2beta2/target.pb.go new file mode 100644 index 0000000..26ed26d --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/cloud/tasks/v2beta2/target.pb.go @@ -0,0 +1,665 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/cloud/tasks/v2beta2/target.proto + +package tasks // import "google.golang.org/genproto/googleapis/cloud/tasks/v2beta2" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "github.com/golang/protobuf/ptypes/duration" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// The HTTP method used to execute the task. +type HttpMethod int32 + +const ( + // HTTP method unspecified + HttpMethod_HTTP_METHOD_UNSPECIFIED HttpMethod = 0 + // HTTP POST + HttpMethod_POST HttpMethod = 1 + // HTTP GET + HttpMethod_GET HttpMethod = 2 + // HTTP HEAD + HttpMethod_HEAD HttpMethod = 3 + // HTTP PUT + HttpMethod_PUT HttpMethod = 4 + // HTTP DELETE + HttpMethod_DELETE HttpMethod = 5 +) + +var HttpMethod_name = map[int32]string{ + 0: "HTTP_METHOD_UNSPECIFIED", + 1: "POST", + 2: "GET", + 3: "HEAD", + 4: "PUT", + 5: "DELETE", +} +var HttpMethod_value = map[string]int32{ + "HTTP_METHOD_UNSPECIFIED": 0, + "POST": 1, + "GET": 2, + "HEAD": 3, + "PUT": 4, + "DELETE": 5, +} + +func (x HttpMethod) String() string { + return proto.EnumName(HttpMethod_name, int32(x)) +} +func (HttpMethod) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_target_94aeace9d01cd65d, []int{0} +} + +// Pull target. +type PullTarget struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *PullTarget) Reset() { *m = PullTarget{} } +func (m *PullTarget) String() string { return proto.CompactTextString(m) } +func (*PullTarget) ProtoMessage() {} +func (*PullTarget) Descriptor() ([]byte, []int) { + return fileDescriptor_target_94aeace9d01cd65d, []int{0} +} +func (m *PullTarget) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_PullTarget.Unmarshal(m, b) +} +func (m *PullTarget) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_PullTarget.Marshal(b, m, deterministic) +} +func (dst *PullTarget) XXX_Merge(src proto.Message) { + xxx_messageInfo_PullTarget.Merge(dst, src) +} +func (m *PullTarget) XXX_Size() int { + return xxx_messageInfo_PullTarget.Size(m) +} +func (m *PullTarget) XXX_DiscardUnknown() { + xxx_messageInfo_PullTarget.DiscardUnknown(m) +} + +var xxx_messageInfo_PullTarget proto.InternalMessageInfo + +// The pull message contains data that can be used by the caller of +// [LeaseTasks][google.cloud.tasks.v2beta2.CloudTasks.LeaseTasks] to process the +// task. +// +// This proto can only be used for tasks in a queue which has +// [pull_target][google.cloud.tasks.v2beta2.Queue.pull_target] set. +type PullMessage struct { + // A data payload consumed by the worker to execute the task. + Payload []byte `protobuf:"bytes,1,opt,name=payload,proto3" json:"payload,omitempty"` + // The task's tag. + // + // Tags allow similar tasks to be processed in a batch. If you label + // tasks with a tag, your worker can + // [lease tasks][google.cloud.tasks.v2beta2.CloudTasks.LeaseTasks] with the + // same tag using + // [filter][google.cloud.tasks.v2beta2.LeaseTasksRequest.filter]. For example, + // if you want to aggregate the events associated with a specific user once a + // day, you could tag tasks with the user ID. + // + // The task's tag can only be set when the + // [task is created][google.cloud.tasks.v2beta2.CloudTasks.CreateTask]. + // + // The tag must be less than 500 characters. + // + // SDK compatibility: Although the SDK allows tags to be either + // string or + // [bytes](https://cloud.google.com/appengine/docs/standard/java/javadoc/com/google/appengine/api/taskqueue/TaskOptions.html#tag-byte:A-), + // only UTF-8 encoded tags can be used in Cloud Tasks. If a tag isn't UTF-8 + // encoded, the tag will be empty when the task is returned by Cloud Tasks. + Tag string `protobuf:"bytes,2,opt,name=tag,proto3" json:"tag,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *PullMessage) Reset() { *m = PullMessage{} } +func (m *PullMessage) String() string { return proto.CompactTextString(m) } +func (*PullMessage) ProtoMessage() {} +func (*PullMessage) Descriptor() ([]byte, []int) { + return fileDescriptor_target_94aeace9d01cd65d, []int{1} +} +func (m *PullMessage) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_PullMessage.Unmarshal(m, b) +} +func (m *PullMessage) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_PullMessage.Marshal(b, m, deterministic) +} +func (dst *PullMessage) XXX_Merge(src proto.Message) { + xxx_messageInfo_PullMessage.Merge(dst, src) +} +func (m *PullMessage) XXX_Size() int { + return xxx_messageInfo_PullMessage.Size(m) +} +func (m *PullMessage) XXX_DiscardUnknown() { + xxx_messageInfo_PullMessage.DiscardUnknown(m) +} + +var xxx_messageInfo_PullMessage proto.InternalMessageInfo + +func (m *PullMessage) GetPayload() []byte { + if m != nil { + return m.Payload + } + return nil +} + +func (m *PullMessage) GetTag() string { + if m != nil { + return m.Tag + } + return "" +} + +// App Engine HTTP target. +// +// The task will be delivered to the App Engine application hostname +// specified by its +// [AppEngineHttpTarget][google.cloud.tasks.v2beta2.AppEngineHttpTarget] and +// [AppEngineHttpRequest][google.cloud.tasks.v2beta2.AppEngineHttpRequest]. The +// documentation for +// [AppEngineHttpRequest][google.cloud.tasks.v2beta2.AppEngineHttpRequest] +// explains how the task's host URL is constructed. +// +// Using [AppEngineHttpTarget][google.cloud.tasks.v2beta2.AppEngineHttpTarget] +// requires +// [`appengine.applications.get`](https://cloud.google.com/appengine/docs/admin-api/access-control) +// Google IAM permission for the project +// and the following scope: +// +// `https://www.googleapis.com/auth/cloud-platform` +type AppEngineHttpTarget struct { + // Overrides for the + // [task-level + // app_engine_routing][google.cloud.tasks.v2beta2.AppEngineHttpRequest.app_engine_routing]. + // + // If set, `app_engine_routing_override` is used for all tasks in + // the queue, no matter what the setting is for the + // [task-level + // app_engine_routing][google.cloud.tasks.v2beta2.AppEngineHttpRequest.app_engine_routing]. + AppEngineRoutingOverride *AppEngineRouting `protobuf:"bytes,1,opt,name=app_engine_routing_override,json=appEngineRoutingOverride,proto3" json:"app_engine_routing_override,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *AppEngineHttpTarget) Reset() { *m = AppEngineHttpTarget{} } +func (m *AppEngineHttpTarget) String() string { return proto.CompactTextString(m) } +func (*AppEngineHttpTarget) ProtoMessage() {} +func (*AppEngineHttpTarget) Descriptor() ([]byte, []int) { + return fileDescriptor_target_94aeace9d01cd65d, []int{2} +} +func (m *AppEngineHttpTarget) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_AppEngineHttpTarget.Unmarshal(m, b) +} +func (m *AppEngineHttpTarget) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_AppEngineHttpTarget.Marshal(b, m, deterministic) +} +func (dst *AppEngineHttpTarget) XXX_Merge(src proto.Message) { + xxx_messageInfo_AppEngineHttpTarget.Merge(dst, src) +} +func (m *AppEngineHttpTarget) XXX_Size() int { + return xxx_messageInfo_AppEngineHttpTarget.Size(m) +} +func (m *AppEngineHttpTarget) XXX_DiscardUnknown() { + xxx_messageInfo_AppEngineHttpTarget.DiscardUnknown(m) +} + +var xxx_messageInfo_AppEngineHttpTarget proto.InternalMessageInfo + +func (m *AppEngineHttpTarget) GetAppEngineRoutingOverride() *AppEngineRouting { + if m != nil { + return m.AppEngineRoutingOverride + } + return nil +} + +// App Engine HTTP request. +// +// The message defines the HTTP request that is sent to an App Engine app when +// the task is dispatched. +// +// This proto can only be used for tasks in a queue which has +// [app_engine_http_target][google.cloud.tasks.v2beta2.Queue.app_engine_http_target] +// set. +// +// Using [AppEngineHttpRequest][google.cloud.tasks.v2beta2.AppEngineHttpRequest] +// requires +// [`appengine.applications.get`](https://cloud.google.com/appengine/docs/admin-api/access-control) +// Google IAM permission for the project +// and the following scope: +// +// `https://www.googleapis.com/auth/cloud-platform` +// +// The task will be delivered to the App Engine app which belongs to the same +// project as the queue. For more information, see +// [How Requests are +// Routed](https://cloud.google.com/appengine/docs/standard/python/how-requests-are-routed) +// and how routing is affected by +// [dispatch +// files](https://cloud.google.com/appengine/docs/python/config/dispatchref). +// +// The [AppEngineRouting][google.cloud.tasks.v2beta2.AppEngineRouting] used to +// construct the URL that the task is delivered to can be set at the queue-level +// or task-level: +// +// * If set, +// [app_engine_routing_override][google.cloud.tasks.v2beta2.AppEngineHttpTarget.app_engine_routing_override] +// is used for all tasks in the queue, no matter what the setting +// is for the +// [task-level +// app_engine_routing][google.cloud.tasks.v2beta2.AppEngineHttpRequest.app_engine_routing]. +// +// +// The `url` that the task will be sent to is: +// +// * `url =` [host][google.cloud.tasks.v2beta2.AppEngineRouting.host] `+` +// [relative_url][google.cloud.tasks.v2beta2.AppEngineHttpRequest.relative_url] +// +// The task attempt has succeeded if the app's request handler returns +// an HTTP response code in the range [`200` - `299`]. `503` is +// considered an App Engine system error instead of an application +// error. Requests returning error `503` will be retried regardless of +// retry configuration and not counted against retry counts. +// Any other response code or a failure to receive a response before the +// deadline is a failed attempt. +type AppEngineHttpRequest struct { + // The HTTP method to use for the request. The default is POST. + // + // The app's request handler for the task's target URL must be able to handle + // HTTP requests with this http_method, otherwise the task attempt will fail + // with error code 405 (Method Not Allowed). See + // [Writing a push task request + // handler](https://cloud.google.com/appengine/docs/java/taskqueue/push/creating-handlers#writing_a_push_task_request_handler) + // and the documentation for the request handlers in the language your app is + // written in e.g. + // [Python Request + // Handler](https://cloud.google.com/appengine/docs/python/tools/webapp/requesthandlerclass). + HttpMethod HttpMethod `protobuf:"varint,1,opt,name=http_method,json=httpMethod,proto3,enum=google.cloud.tasks.v2beta2.HttpMethod" json:"http_method,omitempty"` + // Task-level setting for App Engine routing. + // + // If set, + // [app_engine_routing_override][google.cloud.tasks.v2beta2.AppEngineHttpTarget.app_engine_routing_override] + // is used for all tasks in the queue, no matter what the setting is for the + // [task-level + // app_engine_routing][google.cloud.tasks.v2beta2.AppEngineHttpRequest.app_engine_routing]. + AppEngineRouting *AppEngineRouting `protobuf:"bytes,2,opt,name=app_engine_routing,json=appEngineRouting,proto3" json:"app_engine_routing,omitempty"` + // The relative URL. + // + // The relative URL must begin with "/" and must be a valid HTTP relative URL. + // It can contain a path and query string arguments. + // If the relative URL is empty, then the root path "/" will be used. + // No spaces are allowed, and the maximum length allowed is 2083 characters. + RelativeUrl string `protobuf:"bytes,3,opt,name=relative_url,json=relativeUrl,proto3" json:"relative_url,omitempty"` + // HTTP request headers. + // + // This map contains the header field names and values. + // Headers can be set when the + // [task is created][google.cloud.tasks.v2beta2.CloudTasks.CreateTask]. + // Repeated headers are not supported but a header value can contain commas. + // + // Cloud Tasks sets some headers to default values: + // + // * `User-Agent`: By default, this header is + // `"AppEngine-Google; (+http://code.google.com/appengine)"`. + // This header can be modified, but Cloud Tasks will append + // `"AppEngine-Google; (+http://code.google.com/appengine)"` to the + // modified `User-Agent`. + // + // If the task has a + // [payload][google.cloud.tasks.v2beta2.AppEngineHttpRequest.payload], Cloud + // Tasks sets the following headers: + // + // * `Content-Type`: By default, the `Content-Type` header is set to + // `"application/octet-stream"`. The default can be overridden by explicitly + // setting `Content-Type` to a particular media type when the + // [task is created][google.cloud.tasks.v2beta2.CloudTasks.CreateTask]. + // For example, `Content-Type` can be set to `"application/json"`. + // * `Content-Length`: This is computed by Cloud Tasks. This value is + // output only. It cannot be changed. + // + // The headers below cannot be set or overridden: + // + // * `Host` + // * `X-Google-*` + // * `X-AppEngine-*` + // + // In addition, Cloud Tasks sets some headers when the task is dispatched, + // such as headers containing information about the task; see + // [request + // headers](https://cloud.google.com/appengine/docs/python/taskqueue/push/creating-handlers#reading_request_headers). + // These headers are set only when the task is dispatched, so they are not + // visible when the task is returned in a Cloud Tasks response. + // + // Although there is no specific limit for the maximum number of headers or + // the size, there is a limit on the maximum size of the + // [Task][google.cloud.tasks.v2beta2.Task]. For more information, see the + // [CreateTask][google.cloud.tasks.v2beta2.CloudTasks.CreateTask] + // documentation. + Headers map[string]string `protobuf:"bytes,4,rep,name=headers,proto3" json:"headers,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` + // Payload. + // + // The payload will be sent as the HTTP message body. A message + // body, and thus a payload, is allowed only if the HTTP method is + // POST or PUT. It is an error to set a data payload on a task with + // an incompatible [HttpMethod][google.cloud.tasks.v2beta2.HttpMethod]. + Payload []byte `protobuf:"bytes,5,opt,name=payload,proto3" json:"payload,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *AppEngineHttpRequest) Reset() { *m = AppEngineHttpRequest{} } +func (m *AppEngineHttpRequest) String() string { return proto.CompactTextString(m) } +func (*AppEngineHttpRequest) ProtoMessage() {} +func (*AppEngineHttpRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_target_94aeace9d01cd65d, []int{3} +} +func (m *AppEngineHttpRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_AppEngineHttpRequest.Unmarshal(m, b) +} +func (m *AppEngineHttpRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_AppEngineHttpRequest.Marshal(b, m, deterministic) +} +func (dst *AppEngineHttpRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_AppEngineHttpRequest.Merge(dst, src) +} +func (m *AppEngineHttpRequest) XXX_Size() int { + return xxx_messageInfo_AppEngineHttpRequest.Size(m) +} +func (m *AppEngineHttpRequest) XXX_DiscardUnknown() { + xxx_messageInfo_AppEngineHttpRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_AppEngineHttpRequest proto.InternalMessageInfo + +func (m *AppEngineHttpRequest) GetHttpMethod() HttpMethod { + if m != nil { + return m.HttpMethod + } + return HttpMethod_HTTP_METHOD_UNSPECIFIED +} + +func (m *AppEngineHttpRequest) GetAppEngineRouting() *AppEngineRouting { + if m != nil { + return m.AppEngineRouting + } + return nil +} + +func (m *AppEngineHttpRequest) GetRelativeUrl() string { + if m != nil { + return m.RelativeUrl + } + return "" +} + +func (m *AppEngineHttpRequest) GetHeaders() map[string]string { + if m != nil { + return m.Headers + } + return nil +} + +func (m *AppEngineHttpRequest) GetPayload() []byte { + if m != nil { + return m.Payload + } + return nil +} + +// App Engine Routing. +// +// For more information about services, versions, and instances see +// [An Overview of App +// Engine](https://cloud.google.com/appengine/docs/python/an-overview-of-app-engine), +// [Microservices Architecture on Google App +// Engine](https://cloud.google.com/appengine/docs/python/microservices-on-app-engine), +// [App Engine Standard request +// routing](https://cloud.google.com/appengine/docs/standard/python/how-requests-are-routed), +// and [App Engine Flex request +// routing](https://cloud.google.com/appengine/docs/flexible/python/how-requests-are-routed). +type AppEngineRouting struct { + // App service. + // + // By default, the task is sent to the service which is the default + // service when the task is attempted. + // + // For some queues or tasks which were created using the App Engine + // Task Queue API, [host][google.cloud.tasks.v2beta2.AppEngineRouting.host] is + // not parsable into + // [service][google.cloud.tasks.v2beta2.AppEngineRouting.service], + // [version][google.cloud.tasks.v2beta2.AppEngineRouting.version], and + // [instance][google.cloud.tasks.v2beta2.AppEngineRouting.instance]. For + // example, some tasks which were created using the App Engine SDK use a + // custom domain name; custom domains are not parsed by Cloud Tasks. If + // [host][google.cloud.tasks.v2beta2.AppEngineRouting.host] is not parsable, + // then [service][google.cloud.tasks.v2beta2.AppEngineRouting.service], + // [version][google.cloud.tasks.v2beta2.AppEngineRouting.version], and + // [instance][google.cloud.tasks.v2beta2.AppEngineRouting.instance] are the + // empty string. + Service string `protobuf:"bytes,1,opt,name=service,proto3" json:"service,omitempty"` + // App version. + // + // By default, the task is sent to the version which is the default + // version when the task is attempted. + // + // For some queues or tasks which were created using the App Engine + // Task Queue API, [host][google.cloud.tasks.v2beta2.AppEngineRouting.host] is + // not parsable into + // [service][google.cloud.tasks.v2beta2.AppEngineRouting.service], + // [version][google.cloud.tasks.v2beta2.AppEngineRouting.version], and + // [instance][google.cloud.tasks.v2beta2.AppEngineRouting.instance]. For + // example, some tasks which were created using the App Engine SDK use a + // custom domain name; custom domains are not parsed by Cloud Tasks. If + // [host][google.cloud.tasks.v2beta2.AppEngineRouting.host] is not parsable, + // then [service][google.cloud.tasks.v2beta2.AppEngineRouting.service], + // [version][google.cloud.tasks.v2beta2.AppEngineRouting.version], and + // [instance][google.cloud.tasks.v2beta2.AppEngineRouting.instance] are the + // empty string. + Version string `protobuf:"bytes,2,opt,name=version,proto3" json:"version,omitempty"` + // App instance. + // + // By default, the task is sent to an instance which is available when + // the task is attempted. + // + // Requests can only be sent to a specific instance if + // [manual scaling is used in App Engine + // Standard](https://cloud.google.com/appengine/docs/python/an-overview-of-app-engine?hl=en_US#scaling_types_and_instance_classes). + // App Engine Flex does not support instances. For more information, see + // [App Engine Standard request + // routing](https://cloud.google.com/appengine/docs/standard/python/how-requests-are-routed) + // and [App Engine Flex request + // routing](https://cloud.google.com/appengine/docs/flexible/python/how-requests-are-routed). + Instance string `protobuf:"bytes,3,opt,name=instance,proto3" json:"instance,omitempty"` + // Output only. The host that the task is sent to. + // + // For more information, see + // [How Requests are + // Routed](https://cloud.google.com/appengine/docs/standard/python/how-requests-are-routed). + // + // The host is constructed as: + // + // + // * `host = [application_domain_name]`
+ // `| [service] + '.' + [application_domain_name]`
+ // `| [version] + '.' + [application_domain_name]`
+ // `| [version_dot_service]+ '.' + [application_domain_name]`
+ // `| [instance] + '.' + [application_domain_name]`
+ // `| [instance_dot_service] + '.' + [application_domain_name]`
+ // `| [instance_dot_version] + '.' + [application_domain_name]`
+ // `| [instance_dot_version_dot_service] + '.' + [application_domain_name]` + // + // * `application_domain_name` = The domain name of the app, for + // example .appspot.com, which is associated with the + // queue's project ID. Some tasks which were created using the App Engine + // SDK use a custom domain name. + // + // * `service =` + // [service][google.cloud.tasks.v2beta2.AppEngineRouting.service] + // + // * `version =` + // [version][google.cloud.tasks.v2beta2.AppEngineRouting.version] + // + // * `version_dot_service =` + // [version][google.cloud.tasks.v2beta2.AppEngineRouting.version] `+ '.' +` + // [service][google.cloud.tasks.v2beta2.AppEngineRouting.service] + // + // * `instance =` + // [instance][google.cloud.tasks.v2beta2.AppEngineRouting.instance] + // + // * `instance_dot_service =` + // [instance][google.cloud.tasks.v2beta2.AppEngineRouting.instance] `+ '.' + // +` [service][google.cloud.tasks.v2beta2.AppEngineRouting.service] + // + // * `instance_dot_version =` + // [instance][google.cloud.tasks.v2beta2.AppEngineRouting.instance] `+ '.' + // +` [version][google.cloud.tasks.v2beta2.AppEngineRouting.version] + // + // * `instance_dot_version_dot_service =` + // [instance][google.cloud.tasks.v2beta2.AppEngineRouting.instance] `+ '.' + // +` [version][google.cloud.tasks.v2beta2.AppEngineRouting.version] `+ '.' + // +` [service][google.cloud.tasks.v2beta2.AppEngineRouting.service] + // + // If [service][google.cloud.tasks.v2beta2.AppEngineRouting.service] is empty, + // then the task will be sent to the service which is the default service when + // the task is attempted. + // + // If [version][google.cloud.tasks.v2beta2.AppEngineRouting.version] is empty, + // then the task will be sent to the version which is the default version when + // the task is attempted. + // + // If [instance][google.cloud.tasks.v2beta2.AppEngineRouting.instance] is + // empty, then the task will be sent to an instance which is available when + // the task is attempted. + // + // If [service][google.cloud.tasks.v2beta2.AppEngineRouting.service], + // [version][google.cloud.tasks.v2beta2.AppEngineRouting.version], or + // [instance][google.cloud.tasks.v2beta2.AppEngineRouting.instance] is + // invalid, then the task will be sent to the default version of the default + // service when the task is attempted. + Host string `protobuf:"bytes,4,opt,name=host,proto3" json:"host,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *AppEngineRouting) Reset() { *m = AppEngineRouting{} } +func (m *AppEngineRouting) String() string { return proto.CompactTextString(m) } +func (*AppEngineRouting) ProtoMessage() {} +func (*AppEngineRouting) Descriptor() ([]byte, []int) { + return fileDescriptor_target_94aeace9d01cd65d, []int{4} +} +func (m *AppEngineRouting) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_AppEngineRouting.Unmarshal(m, b) +} +func (m *AppEngineRouting) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_AppEngineRouting.Marshal(b, m, deterministic) +} +func (dst *AppEngineRouting) XXX_Merge(src proto.Message) { + xxx_messageInfo_AppEngineRouting.Merge(dst, src) +} +func (m *AppEngineRouting) XXX_Size() int { + return xxx_messageInfo_AppEngineRouting.Size(m) +} +func (m *AppEngineRouting) XXX_DiscardUnknown() { + xxx_messageInfo_AppEngineRouting.DiscardUnknown(m) +} + +var xxx_messageInfo_AppEngineRouting proto.InternalMessageInfo + +func (m *AppEngineRouting) GetService() string { + if m != nil { + return m.Service + } + return "" +} + +func (m *AppEngineRouting) GetVersion() string { + if m != nil { + return m.Version + } + return "" +} + +func (m *AppEngineRouting) GetInstance() string { + if m != nil { + return m.Instance + } + return "" +} + +func (m *AppEngineRouting) GetHost() string { + if m != nil { + return m.Host + } + return "" +} + +func init() { + proto.RegisterType((*PullTarget)(nil), "google.cloud.tasks.v2beta2.PullTarget") + proto.RegisterType((*PullMessage)(nil), "google.cloud.tasks.v2beta2.PullMessage") + proto.RegisterType((*AppEngineHttpTarget)(nil), "google.cloud.tasks.v2beta2.AppEngineHttpTarget") + proto.RegisterType((*AppEngineHttpRequest)(nil), "google.cloud.tasks.v2beta2.AppEngineHttpRequest") + proto.RegisterMapType((map[string]string)(nil), "google.cloud.tasks.v2beta2.AppEngineHttpRequest.HeadersEntry") + proto.RegisterType((*AppEngineRouting)(nil), "google.cloud.tasks.v2beta2.AppEngineRouting") + proto.RegisterEnum("google.cloud.tasks.v2beta2.HttpMethod", HttpMethod_name, HttpMethod_value) +} + +func init() { + proto.RegisterFile("google/cloud/tasks/v2beta2/target.proto", fileDescriptor_target_94aeace9d01cd65d) +} + +var fileDescriptor_target_94aeace9d01cd65d = []byte{ + // 557 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x94, 0x54, 0xcf, 0x6f, 0xd3, 0x4c, + 0x10, 0xfd, 0x5c, 0xa7, 0xbf, 0xc6, 0xd1, 0x27, 0x6b, 0xa9, 0x84, 0x95, 0xa2, 0xaa, 0xe4, 0x00, + 0x15, 0x42, 0xb6, 0x14, 0x2e, 0x50, 0x84, 0x50, 0x4b, 0x4c, 0x53, 0x89, 0x12, 0xcb, 0x75, 0x84, + 0x54, 0x0e, 0xd6, 0x26, 0x19, 0x1c, 0x2b, 0xee, 0xae, 0xd9, 0x5d, 0x5b, 0xca, 0x95, 0x3b, 0xff, + 0x33, 0xf2, 0xda, 0x09, 0x69, 0x80, 0x0a, 0x6e, 0xf3, 0x66, 0xde, 0xbc, 0xc9, 0xbc, 0xf1, 0x06, + 0x9e, 0x26, 0x9c, 0x27, 0x19, 0x7a, 0x93, 0x8c, 0x17, 0x53, 0x4f, 0x51, 0x39, 0x97, 0x5e, 0xd9, + 0x1b, 0xa3, 0xa2, 0x3d, 0x4f, 0x51, 0x91, 0xa0, 0x72, 0x73, 0xc1, 0x15, 0x27, 0x9d, 0x9a, 0xe8, + 0x6a, 0xa2, 0xab, 0x89, 0x6e, 0x43, 0xec, 0x3c, 0x6a, 0x44, 0x68, 0x9e, 0x7a, 0x94, 0x31, 0xae, + 0xa8, 0x4a, 0x39, 0x93, 0x75, 0x67, 0xe7, 0xa8, 0xa9, 0x6a, 0x34, 0x2e, 0xbe, 0x78, 0xd3, 0x42, + 0x68, 0x42, 0x5d, 0xef, 0xb6, 0x01, 0x82, 0x22, 0xcb, 0x22, 0x3d, 0xad, 0xfb, 0x0a, 0xac, 0x0a, + 0x5d, 0xa1, 0x94, 0x34, 0x41, 0xe2, 0xc0, 0x6e, 0x4e, 0x17, 0x19, 0xa7, 0x53, 0xc7, 0x38, 0x36, + 0x4e, 0xda, 0xe1, 0x12, 0x12, 0x1b, 0x4c, 0x45, 0x13, 0x67, 0xeb, 0xd8, 0x38, 0xd9, 0x0f, 0xab, + 0xb0, 0xfb, 0xcd, 0x80, 0x07, 0x67, 0x79, 0xee, 0xb3, 0x24, 0x65, 0x38, 0x50, 0x2a, 0xaf, 0x25, + 0xc9, 0x1c, 0x0e, 0x69, 0x9e, 0xc7, 0xa8, 0xf3, 0xb1, 0xe0, 0x85, 0x4a, 0x59, 0x12, 0xf3, 0x12, + 0x85, 0x48, 0xa7, 0xa8, 0x75, 0xad, 0xde, 0x73, 0xf7, 0xcf, 0x0b, 0xba, 0x2b, 0xd5, 0xb0, 0x6e, + 0x0e, 0x1d, 0xba, 0x91, 0x19, 0x36, 0x6a, 0xdd, 0xef, 0x26, 0x1c, 0xdc, 0xf9, 0x11, 0x21, 0x7e, + 0x2d, 0x50, 0x2a, 0x72, 0x01, 0xd6, 0x4c, 0xa9, 0x3c, 0xbe, 0x45, 0x35, 0xe3, 0xf5, 0x36, 0xff, + 0xf7, 0x9e, 0xdc, 0x37, 0xb5, 0xea, 0xbe, 0xd2, 0xec, 0x10, 0x66, 0xab, 0x98, 0xdc, 0x00, 0xf9, + 0x75, 0x1d, 0xed, 0xc3, 0xbf, 0x6e, 0x61, 0x6f, 0x6e, 0x41, 0x1e, 0x43, 0x5b, 0x60, 0x46, 0x55, + 0x5a, 0x62, 0x5c, 0x88, 0xcc, 0x31, 0xb5, 0xbb, 0xd6, 0x32, 0x37, 0x12, 0x19, 0xf9, 0x04, 0xbb, + 0x33, 0xa4, 0x53, 0x14, 0xd2, 0x69, 0x1d, 0x9b, 0x27, 0x56, 0xef, 0xcd, 0x5f, 0xcd, 0x5c, 0xb3, + 0xc2, 0x1d, 0xd4, 0xfd, 0x3e, 0x53, 0x62, 0x11, 0x2e, 0xd5, 0xd6, 0x4f, 0xbd, 0x7d, 0xe7, 0xd4, + 0x9d, 0x53, 0x68, 0xaf, 0xb7, 0x54, 0xa7, 0x9f, 0xe3, 0x42, 0x5b, 0xb8, 0x1f, 0x56, 0x21, 0x39, + 0x80, 0xed, 0x92, 0x66, 0x05, 0x36, 0x9f, 0x43, 0x0d, 0x4e, 0xb7, 0x5e, 0x1a, 0xdd, 0x12, 0xec, + 0xcd, 0xbd, 0xab, 0x49, 0x12, 0x45, 0x99, 0x4e, 0xb0, 0xd1, 0x58, 0xc2, 0xaa, 0x52, 0xa2, 0x90, + 0x29, 0x67, 0x8d, 0xd2, 0x12, 0x92, 0x0e, 0xec, 0xa5, 0x4c, 0x2a, 0xca, 0x26, 0xd8, 0xb8, 0xb2, + 0xc2, 0x84, 0x40, 0x6b, 0xc6, 0xa5, 0x72, 0x5a, 0x3a, 0xaf, 0xe3, 0x67, 0x9f, 0x01, 0x7e, 0xde, + 0x8f, 0x1c, 0xc2, 0xc3, 0x41, 0x14, 0x05, 0xf1, 0x95, 0x1f, 0x0d, 0x86, 0xfd, 0x78, 0xf4, 0xf1, + 0x3a, 0xf0, 0xdf, 0x5d, 0xbe, 0xbf, 0xf4, 0xfb, 0xf6, 0x7f, 0x64, 0x0f, 0x5a, 0xc1, 0xf0, 0x3a, + 0xb2, 0x0d, 0xb2, 0x0b, 0xe6, 0x85, 0x1f, 0xd9, 0x5b, 0x55, 0x6a, 0xe0, 0x9f, 0xf5, 0x6d, 0xb3, + 0x4a, 0x05, 0xa3, 0xc8, 0x6e, 0x11, 0x80, 0x9d, 0xbe, 0xff, 0xc1, 0x8f, 0x7c, 0x7b, 0xfb, 0x3c, + 0x87, 0xa3, 0x09, 0xbf, 0xbd, 0xc7, 0xf7, 0x73, 0xab, 0xfe, 0xf6, 0x83, 0xea, 0x85, 0x05, 0xc6, + 0xcd, 0xdb, 0x86, 0x9a, 0xf0, 0x8c, 0xb2, 0xc4, 0xe5, 0x22, 0xf1, 0x12, 0x64, 0xfa, 0xfd, 0x79, + 0x75, 0x89, 0xe6, 0xa9, 0xfc, 0xdd, 0xbf, 0xc0, 0x6b, 0x8d, 0xc6, 0x3b, 0x9a, 0xfb, 0xe2, 0x47, + 0x00, 0x00, 0x00, 0xff, 0xff, 0xbb, 0x5d, 0x40, 0x26, 0x30, 0x04, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/cloud/tasks/v2beta2/task.pb.go b/vendor/google.golang.org/genproto/googleapis/cloud/tasks/v2beta2/task.pb.go new file mode 100644 index 0000000..626c861 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/cloud/tasks/v2beta2/task.pb.go @@ -0,0 +1,517 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/cloud/tasks/v2beta2/task.proto + +package tasks // import "google.golang.org/genproto/googleapis/cloud/tasks/v2beta2" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import timestamp "github.com/golang/protobuf/ptypes/timestamp" +import _ "google.golang.org/genproto/googleapis/api/annotations" +import status "google.golang.org/genproto/googleapis/rpc/status" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// The view specifies a subset of [Task][google.cloud.tasks.v2beta2.Task] +// data. +// +// When a task is returned in a response, not all +// information is retrieved by default because some data, such as +// payloads, might be desirable to return only when needed because +// of its large size or because of the sensitivity of data that it +// contains. +type Task_View int32 + +const ( + // Unspecified. Defaults to BASIC. + Task_VIEW_UNSPECIFIED Task_View = 0 + // The basic view omits fields which can be large or can contain + // sensitive data. + // + // This view does not include the + // ([payload in + // AppEngineHttpRequest][google.cloud.tasks.v2beta2.AppEngineHttpRequest] + // and [payload in + // PullMessage][google.cloud.tasks.v2beta2.PullMessage.payload]). These + // payloads are desirable to return only when needed, because they can be + // large and because of the sensitivity of the data that you choose to store + // in it. + Task_BASIC Task_View = 1 + // All information is returned. + // + // Authorization for [FULL][google.cloud.tasks.v2beta2.Task.View.FULL] + // requires `cloudtasks.tasks.fullView` [Google + // IAM](https://cloud.google.com/iam/) permission on the + // [Queue][google.cloud.tasks.v2beta2.Queue] resource. + Task_FULL Task_View = 2 +) + +var Task_View_name = map[int32]string{ + 0: "VIEW_UNSPECIFIED", + 1: "BASIC", + 2: "FULL", +} +var Task_View_value = map[string]int32{ + "VIEW_UNSPECIFIED": 0, + "BASIC": 1, + "FULL": 2, +} + +func (x Task_View) String() string { + return proto.EnumName(Task_View_name, int32(x)) +} +func (Task_View) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_task_bf54980d4c1d27a8, []int{0, 0} +} + +// A unit of scheduled work. +type Task struct { + // Optionally caller-specified in + // [CreateTask][google.cloud.tasks.v2beta2.CloudTasks.CreateTask]. + // + // The task name. + // + // The task name must have the following format: + // `projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID/tasks/TASK_ID` + // + // * `PROJECT_ID` can contain letters ([A-Za-z]), numbers ([0-9]), + // hyphens (-), colons (:), or periods (.). + // For more information, see + // [Identifying + // projects](https://cloud.google.com/resource-manager/docs/creating-managing-projects#identifying_projects) + // * `LOCATION_ID` is the canonical ID for the task's location. + // The list of available locations can be obtained by calling + // [ListLocations][google.cloud.location.Locations.ListLocations]. + // For more information, see https://cloud.google.com/about/locations/. + // * `QUEUE_ID` can contain letters ([A-Za-z]), numbers ([0-9]), or + // hyphens (-). The maximum length is 100 characters. + // * `TASK_ID` can contain only letters ([A-Za-z]), numbers ([0-9]), + // hyphens (-), or underscores (_). The maximum length is 500 characters. + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // Required. + // + // The task's payload is used by the task's target to process the task. + // A payload is valid only if it is compatible with the queue's target. + // + // Types that are valid to be assigned to PayloadType: + // *Task_AppEngineHttpRequest + // *Task_PullMessage + PayloadType isTask_PayloadType `protobuf_oneof:"payload_type"` + // The time when the task is scheduled to be attempted. + // + // For App Engine queues, this is when the task will be attempted or retried. + // + // For pull queues, this is the time when the task is available to + // be leased; if a task is currently leased, this is the time when + // the current lease expires, that is, the time that the task was + // leased plus the + // [lease_duration][google.cloud.tasks.v2beta2.LeaseTasksRequest.lease_duration]. + // + // `schedule_time` will be truncated to the nearest microsecond. + ScheduleTime *timestamp.Timestamp `protobuf:"bytes,5,opt,name=schedule_time,json=scheduleTime,proto3" json:"schedule_time,omitempty"` + // Output only. The time that the task was created. + // + // `create_time` will be truncated to the nearest second. + CreateTime *timestamp.Timestamp `protobuf:"bytes,6,opt,name=create_time,json=createTime,proto3" json:"create_time,omitempty"` + // Output only. The task status. + Status *TaskStatus `protobuf:"bytes,7,opt,name=status,proto3" json:"status,omitempty"` + // Output only. The view specifies which subset of the + // [Task][google.cloud.tasks.v2beta2.Task] has been returned. + View Task_View `protobuf:"varint,8,opt,name=view,proto3,enum=google.cloud.tasks.v2beta2.Task_View" json:"view,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Task) Reset() { *m = Task{} } +func (m *Task) String() string { return proto.CompactTextString(m) } +func (*Task) ProtoMessage() {} +func (*Task) Descriptor() ([]byte, []int) { + return fileDescriptor_task_bf54980d4c1d27a8, []int{0} +} +func (m *Task) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Task.Unmarshal(m, b) +} +func (m *Task) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Task.Marshal(b, m, deterministic) +} +func (dst *Task) XXX_Merge(src proto.Message) { + xxx_messageInfo_Task.Merge(dst, src) +} +func (m *Task) XXX_Size() int { + return xxx_messageInfo_Task.Size(m) +} +func (m *Task) XXX_DiscardUnknown() { + xxx_messageInfo_Task.DiscardUnknown(m) +} + +var xxx_messageInfo_Task proto.InternalMessageInfo + +func (m *Task) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +type isTask_PayloadType interface { + isTask_PayloadType() +} + +type Task_AppEngineHttpRequest struct { + AppEngineHttpRequest *AppEngineHttpRequest `protobuf:"bytes,3,opt,name=app_engine_http_request,json=appEngineHttpRequest,proto3,oneof"` +} + +type Task_PullMessage struct { + PullMessage *PullMessage `protobuf:"bytes,4,opt,name=pull_message,json=pullMessage,proto3,oneof"` +} + +func (*Task_AppEngineHttpRequest) isTask_PayloadType() {} + +func (*Task_PullMessage) isTask_PayloadType() {} + +func (m *Task) GetPayloadType() isTask_PayloadType { + if m != nil { + return m.PayloadType + } + return nil +} + +func (m *Task) GetAppEngineHttpRequest() *AppEngineHttpRequest { + if x, ok := m.GetPayloadType().(*Task_AppEngineHttpRequest); ok { + return x.AppEngineHttpRequest + } + return nil +} + +func (m *Task) GetPullMessage() *PullMessage { + if x, ok := m.GetPayloadType().(*Task_PullMessage); ok { + return x.PullMessage + } + return nil +} + +func (m *Task) GetScheduleTime() *timestamp.Timestamp { + if m != nil { + return m.ScheduleTime + } + return nil +} + +func (m *Task) GetCreateTime() *timestamp.Timestamp { + if m != nil { + return m.CreateTime + } + return nil +} + +func (m *Task) GetStatus() *TaskStatus { + if m != nil { + return m.Status + } + return nil +} + +func (m *Task) GetView() Task_View { + if m != nil { + return m.View + } + return Task_VIEW_UNSPECIFIED +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*Task) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _Task_OneofMarshaler, _Task_OneofUnmarshaler, _Task_OneofSizer, []interface{}{ + (*Task_AppEngineHttpRequest)(nil), + (*Task_PullMessage)(nil), + } +} + +func _Task_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*Task) + // payload_type + switch x := m.PayloadType.(type) { + case *Task_AppEngineHttpRequest: + b.EncodeVarint(3<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.AppEngineHttpRequest); err != nil { + return err + } + case *Task_PullMessage: + b.EncodeVarint(4<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.PullMessage); err != nil { + return err + } + case nil: + default: + return fmt.Errorf("Task.PayloadType has unexpected type %T", x) + } + return nil +} + +func _Task_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*Task) + switch tag { + case 3: // payload_type.app_engine_http_request + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(AppEngineHttpRequest) + err := b.DecodeMessage(msg) + m.PayloadType = &Task_AppEngineHttpRequest{msg} + return true, err + case 4: // payload_type.pull_message + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(PullMessage) + err := b.DecodeMessage(msg) + m.PayloadType = &Task_PullMessage{msg} + return true, err + default: + return false, nil + } +} + +func _Task_OneofSizer(msg proto.Message) (n int) { + m := msg.(*Task) + // payload_type + switch x := m.PayloadType.(type) { + case *Task_AppEngineHttpRequest: + s := proto.Size(x.AppEngineHttpRequest) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *Task_PullMessage: + s := proto.Size(x.PullMessage) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +// Status of the task. +type TaskStatus struct { + // Output only. The number of attempts dispatched. + // + // This count includes tasks which have been dispatched but haven't + // received a response. + AttemptDispatchCount int32 `protobuf:"varint,1,opt,name=attempt_dispatch_count,json=attemptDispatchCount,proto3" json:"attempt_dispatch_count,omitempty"` + // Output only. The number of attempts which have received a response. + // + // This field is not calculated for [pull + // tasks][google.cloud.tasks.v2beta2.PullMessage]. + AttemptResponseCount int32 `protobuf:"varint,2,opt,name=attempt_response_count,json=attemptResponseCount,proto3" json:"attempt_response_count,omitempty"` + // Output only. The status of the task's first attempt. + // + // Only + // [dispatch_time][google.cloud.tasks.v2beta2.AttemptStatus.dispatch_time] + // will be set. The other + // [AttemptStatus][google.cloud.tasks.v2beta2.AttemptStatus] information is + // not retained by Cloud Tasks. + // + // This field is not calculated for [pull + // tasks][google.cloud.tasks.v2beta2.PullMessage]. + FirstAttemptStatus *AttemptStatus `protobuf:"bytes,3,opt,name=first_attempt_status,json=firstAttemptStatus,proto3" json:"first_attempt_status,omitempty"` + // Output only. The status of the task's last attempt. + // + // This field is not calculated for [pull + // tasks][google.cloud.tasks.v2beta2.PullMessage]. + LastAttemptStatus *AttemptStatus `protobuf:"bytes,4,opt,name=last_attempt_status,json=lastAttemptStatus,proto3" json:"last_attempt_status,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *TaskStatus) Reset() { *m = TaskStatus{} } +func (m *TaskStatus) String() string { return proto.CompactTextString(m) } +func (*TaskStatus) ProtoMessage() {} +func (*TaskStatus) Descriptor() ([]byte, []int) { + return fileDescriptor_task_bf54980d4c1d27a8, []int{1} +} +func (m *TaskStatus) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_TaskStatus.Unmarshal(m, b) +} +func (m *TaskStatus) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_TaskStatus.Marshal(b, m, deterministic) +} +func (dst *TaskStatus) XXX_Merge(src proto.Message) { + xxx_messageInfo_TaskStatus.Merge(dst, src) +} +func (m *TaskStatus) XXX_Size() int { + return xxx_messageInfo_TaskStatus.Size(m) +} +func (m *TaskStatus) XXX_DiscardUnknown() { + xxx_messageInfo_TaskStatus.DiscardUnknown(m) +} + +var xxx_messageInfo_TaskStatus proto.InternalMessageInfo + +func (m *TaskStatus) GetAttemptDispatchCount() int32 { + if m != nil { + return m.AttemptDispatchCount + } + return 0 +} + +func (m *TaskStatus) GetAttemptResponseCount() int32 { + if m != nil { + return m.AttemptResponseCount + } + return 0 +} + +func (m *TaskStatus) GetFirstAttemptStatus() *AttemptStatus { + if m != nil { + return m.FirstAttemptStatus + } + return nil +} + +func (m *TaskStatus) GetLastAttemptStatus() *AttemptStatus { + if m != nil { + return m.LastAttemptStatus + } + return nil +} + +// The status of a task attempt. +type AttemptStatus struct { + // Output only. The time that this attempt was scheduled. + // + // `schedule_time` will be truncated to the nearest microsecond. + ScheduleTime *timestamp.Timestamp `protobuf:"bytes,1,opt,name=schedule_time,json=scheduleTime,proto3" json:"schedule_time,omitempty"` + // Output only. The time that this attempt was dispatched. + // + // `dispatch_time` will be truncated to the nearest microsecond. + DispatchTime *timestamp.Timestamp `protobuf:"bytes,2,opt,name=dispatch_time,json=dispatchTime,proto3" json:"dispatch_time,omitempty"` + // Output only. The time that this attempt response was received. + // + // `response_time` will be truncated to the nearest microsecond. + ResponseTime *timestamp.Timestamp `protobuf:"bytes,3,opt,name=response_time,json=responseTime,proto3" json:"response_time,omitempty"` + // Output only. The response from the target for this attempt. + // + // If the task has not been attempted or the task is currently running + // then the response status is unset. + ResponseStatus *status.Status `protobuf:"bytes,4,opt,name=response_status,json=responseStatus,proto3" json:"response_status,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *AttemptStatus) Reset() { *m = AttemptStatus{} } +func (m *AttemptStatus) String() string { return proto.CompactTextString(m) } +func (*AttemptStatus) ProtoMessage() {} +func (*AttemptStatus) Descriptor() ([]byte, []int) { + return fileDescriptor_task_bf54980d4c1d27a8, []int{2} +} +func (m *AttemptStatus) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_AttemptStatus.Unmarshal(m, b) +} +func (m *AttemptStatus) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_AttemptStatus.Marshal(b, m, deterministic) +} +func (dst *AttemptStatus) XXX_Merge(src proto.Message) { + xxx_messageInfo_AttemptStatus.Merge(dst, src) +} +func (m *AttemptStatus) XXX_Size() int { + return xxx_messageInfo_AttemptStatus.Size(m) +} +func (m *AttemptStatus) XXX_DiscardUnknown() { + xxx_messageInfo_AttemptStatus.DiscardUnknown(m) +} + +var xxx_messageInfo_AttemptStatus proto.InternalMessageInfo + +func (m *AttemptStatus) GetScheduleTime() *timestamp.Timestamp { + if m != nil { + return m.ScheduleTime + } + return nil +} + +func (m *AttemptStatus) GetDispatchTime() *timestamp.Timestamp { + if m != nil { + return m.DispatchTime + } + return nil +} + +func (m *AttemptStatus) GetResponseTime() *timestamp.Timestamp { + if m != nil { + return m.ResponseTime + } + return nil +} + +func (m *AttemptStatus) GetResponseStatus() *status.Status { + if m != nil { + return m.ResponseStatus + } + return nil +} + +func init() { + proto.RegisterType((*Task)(nil), "google.cloud.tasks.v2beta2.Task") + proto.RegisterType((*TaskStatus)(nil), "google.cloud.tasks.v2beta2.TaskStatus") + proto.RegisterType((*AttemptStatus)(nil), "google.cloud.tasks.v2beta2.AttemptStatus") + proto.RegisterEnum("google.cloud.tasks.v2beta2.Task_View", Task_View_name, Task_View_value) +} + +func init() { + proto.RegisterFile("google/cloud/tasks/v2beta2/task.proto", fileDescriptor_task_bf54980d4c1d27a8) +} + +var fileDescriptor_task_bf54980d4c1d27a8 = []byte{ + // 601 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x94, 0x94, 0x5f, 0x6f, 0xda, 0x3c, + 0x14, 0xc6, 0x1b, 0x9a, 0xf6, 0x2d, 0xe6, 0xcf, 0xcb, 0x3c, 0xb4, 0x46, 0x68, 0xda, 0x10, 0x52, + 0x57, 0x76, 0x93, 0x6c, 0x6c, 0x37, 0x53, 0xa5, 0xa1, 0x42, 0xa9, 0x40, 0x62, 0x13, 0x0a, 0x6d, + 0xa7, 0x6d, 0x17, 0x91, 0x09, 0x6e, 0x88, 0x9a, 0xc4, 0x5e, 0x7c, 0xd2, 0xaa, 0x9f, 0xa2, 0x5f, + 0x65, 0x1f, 0x71, 0x8a, 0xe3, 0xd0, 0xa1, 0x76, 0xa0, 0xde, 0x71, 0xce, 0x79, 0x7e, 0x8f, 0xed, + 0x47, 0x87, 0xa0, 0x03, 0x8f, 0x31, 0x2f, 0xa0, 0x96, 0x1b, 0xb0, 0x64, 0x6e, 0x01, 0x11, 0x57, + 0xc2, 0xba, 0xee, 0xcc, 0x28, 0x90, 0x8e, 0xac, 0x4c, 0x1e, 0x33, 0x60, 0xb8, 0x91, 0xc9, 0x4c, + 0x29, 0x33, 0xa5, 0xcc, 0x54, 0xb2, 0xc6, 0x4b, 0x65, 0x41, 0xb8, 0x6f, 0x91, 0x28, 0x62, 0x40, + 0xc0, 0x67, 0x91, 0xc8, 0xc8, 0xc6, 0xe1, 0xda, 0x03, 0x62, 0x8f, 0x82, 0x12, 0xbe, 0x56, 0x42, + 0x59, 0xcd, 0x92, 0x4b, 0x0b, 0xfc, 0x90, 0x0a, 0x20, 0x21, 0x57, 0x82, 0x7d, 0x25, 0x88, 0xb9, + 0x6b, 0x09, 0x20, 0x90, 0xa8, 0x23, 0x5a, 0x77, 0x3a, 0xd2, 0xcf, 0x88, 0xb8, 0xc2, 0x18, 0xe9, + 0x11, 0x09, 0xa9, 0xa1, 0x35, 0xb5, 0x76, 0xd1, 0x96, 0xbf, 0xb1, 0x8f, 0xf6, 0x09, 0xe7, 0x0e, + 0x8d, 0x3c, 0x3f, 0xa2, 0xce, 0x02, 0x80, 0x3b, 0x31, 0xfd, 0x95, 0x50, 0x01, 0xc6, 0x76, 0x53, + 0x6b, 0x97, 0x3a, 0xef, 0xcc, 0x7f, 0xbf, 0xcd, 0x3c, 0xe6, 0x7c, 0x20, 0xc9, 0x21, 0x00, 0xb7, + 0x33, 0x6e, 0xb8, 0x65, 0xd7, 0xc9, 0x23, 0x7d, 0x3c, 0x46, 0x65, 0x9e, 0x04, 0x81, 0x13, 0x52, + 0x21, 0x88, 0x47, 0x0d, 0x5d, 0xfa, 0x1f, 0xae, 0xf3, 0x9f, 0x24, 0x41, 0xf0, 0x25, 0x93, 0x0f, + 0xb7, 0xec, 0x12, 0xbf, 0x2f, 0x71, 0x17, 0x55, 0x84, 0xbb, 0xa0, 0xf3, 0x24, 0xa0, 0x4e, 0x1a, + 0x85, 0xb1, 0x23, 0xed, 0x1a, 0xb9, 0x5d, 0x9e, 0x93, 0x79, 0x96, 0xe7, 0x64, 0x97, 0x73, 0x20, + 0x6d, 0xe1, 0x23, 0x54, 0x72, 0x63, 0x4a, 0x40, 0xe1, 0xbb, 0x1b, 0x71, 0x94, 0xc9, 0x25, 0xfc, + 0x19, 0xed, 0x66, 0x19, 0x1b, 0xff, 0x49, 0xee, 0xcd, 0xba, 0x57, 0xa4, 0xe1, 0x4f, 0xa5, 0xda, + 0x56, 0x14, 0xfe, 0x84, 0xf4, 0x6b, 0x9f, 0xde, 0x18, 0x7b, 0x4d, 0xad, 0x5d, 0xed, 0x1c, 0x6c, + 0xa2, 0xcd, 0x0b, 0x9f, 0xde, 0xd8, 0x12, 0x69, 0xbd, 0x47, 0x7a, 0x5a, 0xe1, 0x3a, 0xaa, 0x5d, + 0x8c, 0x06, 0xdf, 0x9c, 0xf3, 0xaf, 0xd3, 0xc9, 0xa0, 0x3f, 0x3a, 0x1d, 0x0d, 0x4e, 0x6a, 0x5b, + 0xb8, 0x88, 0x76, 0x7a, 0xc7, 0xd3, 0x51, 0xbf, 0xa6, 0xe1, 0x3d, 0xa4, 0x9f, 0x9e, 0x8f, 0xc7, + 0xb5, 0x42, 0xaf, 0x8a, 0xca, 0x9c, 0xdc, 0x06, 0x8c, 0xcc, 0x1d, 0xb8, 0xe5, 0xb4, 0xf5, 0xbb, + 0x80, 0xd0, 0xfd, 0xa5, 0xf0, 0x47, 0xf4, 0x82, 0x00, 0xd0, 0x90, 0x83, 0x33, 0xf7, 0x05, 0x27, + 0xe0, 0x2e, 0x1c, 0x97, 0x25, 0x11, 0xc8, 0x4d, 0xd9, 0xb1, 0xeb, 0x6a, 0x7a, 0xa2, 0x86, 0xfd, + 0x74, 0xf6, 0x37, 0x15, 0x53, 0xc1, 0x59, 0x24, 0xa8, 0xa2, 0x0a, 0x2b, 0x94, 0xad, 0x86, 0x19, + 0xf5, 0x13, 0xd5, 0x2f, 0xfd, 0x58, 0x80, 0x93, 0xb3, 0x2a, 0xc6, 0x6c, 0xd9, 0xde, 0xae, 0x5d, + 0xb6, 0x8c, 0x50, 0x49, 0x62, 0x69, 0xb3, 0xd2, 0xc3, 0xdf, 0xd1, 0xf3, 0x80, 0x3c, 0xf4, 0xd6, + 0x9f, 0xea, 0xfd, 0x2c, 0x75, 0x59, 0x69, 0xb5, 0xee, 0x0a, 0xa8, 0xb2, 0x7a, 0xd8, 0x83, 0x05, + 0xd4, 0x9e, 0xb8, 0x80, 0x5d, 0x54, 0x59, 0xc6, 0x2d, 0x0d, 0x0a, 0x9b, 0x0d, 0x72, 0x20, 0x37, + 0x58, 0x26, 0x2f, 0x0d, 0xb6, 0x37, 0x1b, 0xe4, 0x80, 0xfa, 0x0b, 0xfc, 0xbf, 0x34, 0x58, 0xc9, + 0x0a, 0xe7, 0x16, 0x31, 0x77, 0x4d, 0x15, 0x4a, 0x35, 0x97, 0x66, 0x75, 0x2f, 0x42, 0xaf, 0x5c, + 0x16, 0xae, 0x09, 0xb5, 0x57, 0x4c, 0x77, 0x6c, 0x92, 0x5e, 0x62, 0xa2, 0xfd, 0xe8, 0x2a, 0xa1, + 0xc7, 0x02, 0x12, 0x79, 0x26, 0x8b, 0x3d, 0xcb, 0xa3, 0x91, 0xbc, 0xa2, 0x95, 0x8d, 0x08, 0xf7, + 0xc5, 0x63, 0xdf, 0xc1, 0x23, 0x59, 0xcd, 0x76, 0xa5, 0xf6, 0xc3, 0x9f, 0x00, 0x00, 0x00, 0xff, + 0xff, 0x67, 0x07, 0xb1, 0x59, 0x93, 0x05, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/cloud/tasks/v2beta3/cloudtasks.pb.go b/vendor/google.golang.org/genproto/googleapis/cloud/tasks/v2beta3/cloudtasks.pb.go new file mode 100644 index 0000000..1ecae9b --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/cloud/tasks/v2beta3/cloudtasks.pb.go @@ -0,0 +1,1880 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/cloud/tasks/v2beta3/cloudtasks.proto + +package tasks // import "google.golang.org/genproto/googleapis/cloud/tasks/v2beta3" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import empty "github.com/golang/protobuf/ptypes/empty" +import _ "google.golang.org/genproto/googleapis/api/annotations" +import v1 "google.golang.org/genproto/googleapis/iam/v1" +import _ "google.golang.org/genproto/googleapis/rpc/code" +import field_mask "google.golang.org/genproto/protobuf/field_mask" + +import ( + context "golang.org/x/net/context" + grpc "google.golang.org/grpc" +) + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Request message for [ListQueues][google.cloud.tasks.v2beta3.CloudTasks.ListQueues]. +type ListQueuesRequest struct { + // Required. + // + // The location name. + // For example: `projects/PROJECT_ID/locations/LOCATION_ID` + Parent string `protobuf:"bytes,1,opt,name=parent,proto3" json:"parent,omitempty"` + // `filter` can be used to specify a subset of queues. Any [Queue][google.cloud.tasks.v2beta3.Queue] + // field can be used as a filter and several operators as supported. + // For example: `<=, <, >=, >, !=, =, :`. The filter syntax is the same as + // described in + // [Stackdriver's Advanced Logs Filters](https://cloud.google.com/logging/docs/view/advanced_filters). + // + // Sample filter "state: PAUSED". + // + // Note that using filters might cause fewer queues than the + // requested page_size to be returned. + Filter string `protobuf:"bytes,2,opt,name=filter,proto3" json:"filter,omitempty"` + // Requested page size. + // + // The maximum page size is 9800. If unspecified, the page size will + // be the maximum. Fewer queues than requested might be returned, + // even if more queues exist; use the + // [next_page_token][google.cloud.tasks.v2beta3.ListQueuesResponse.next_page_token] in the + // response to determine if more queues exist. + PageSize int32 `protobuf:"varint,3,opt,name=page_size,json=pageSize,proto3" json:"page_size,omitempty"` + // A token identifying the page of results to return. + // + // To request the first page results, page_token must be empty. To + // request the next page of results, page_token must be the value of + // [next_page_token][google.cloud.tasks.v2beta3.ListQueuesResponse.next_page_token] returned + // from the previous call to [ListQueues][google.cloud.tasks.v2beta3.CloudTasks.ListQueues] + // method. It is an error to switch the value of the + // [filter][google.cloud.tasks.v2beta3.ListQueuesRequest.filter] while iterating through pages. + PageToken string `protobuf:"bytes,4,opt,name=page_token,json=pageToken,proto3" json:"page_token,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ListQueuesRequest) Reset() { *m = ListQueuesRequest{} } +func (m *ListQueuesRequest) String() string { return proto.CompactTextString(m) } +func (*ListQueuesRequest) ProtoMessage() {} +func (*ListQueuesRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_cloudtasks_6396912137772a30, []int{0} +} +func (m *ListQueuesRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ListQueuesRequest.Unmarshal(m, b) +} +func (m *ListQueuesRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ListQueuesRequest.Marshal(b, m, deterministic) +} +func (dst *ListQueuesRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_ListQueuesRequest.Merge(dst, src) +} +func (m *ListQueuesRequest) XXX_Size() int { + return xxx_messageInfo_ListQueuesRequest.Size(m) +} +func (m *ListQueuesRequest) XXX_DiscardUnknown() { + xxx_messageInfo_ListQueuesRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_ListQueuesRequest proto.InternalMessageInfo + +func (m *ListQueuesRequest) GetParent() string { + if m != nil { + return m.Parent + } + return "" +} + +func (m *ListQueuesRequest) GetFilter() string { + if m != nil { + return m.Filter + } + return "" +} + +func (m *ListQueuesRequest) GetPageSize() int32 { + if m != nil { + return m.PageSize + } + return 0 +} + +func (m *ListQueuesRequest) GetPageToken() string { + if m != nil { + return m.PageToken + } + return "" +} + +// Response message for [ListQueues][google.cloud.tasks.v2beta3.CloudTasks.ListQueues]. +type ListQueuesResponse struct { + // The list of queues. + Queues []*Queue `protobuf:"bytes,1,rep,name=queues,proto3" json:"queues,omitempty"` + // A token to retrieve next page of results. + // + // To return the next page of results, call + // [ListQueues][google.cloud.tasks.v2beta3.CloudTasks.ListQueues] with this value as the + // [page_token][google.cloud.tasks.v2beta3.ListQueuesRequest.page_token]. + // + // If the next_page_token is empty, there are no more results. + // + // The page token is valid for only 2 hours. + NextPageToken string `protobuf:"bytes,2,opt,name=next_page_token,json=nextPageToken,proto3" json:"next_page_token,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ListQueuesResponse) Reset() { *m = ListQueuesResponse{} } +func (m *ListQueuesResponse) String() string { return proto.CompactTextString(m) } +func (*ListQueuesResponse) ProtoMessage() {} +func (*ListQueuesResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_cloudtasks_6396912137772a30, []int{1} +} +func (m *ListQueuesResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ListQueuesResponse.Unmarshal(m, b) +} +func (m *ListQueuesResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ListQueuesResponse.Marshal(b, m, deterministic) +} +func (dst *ListQueuesResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_ListQueuesResponse.Merge(dst, src) +} +func (m *ListQueuesResponse) XXX_Size() int { + return xxx_messageInfo_ListQueuesResponse.Size(m) +} +func (m *ListQueuesResponse) XXX_DiscardUnknown() { + xxx_messageInfo_ListQueuesResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_ListQueuesResponse proto.InternalMessageInfo + +func (m *ListQueuesResponse) GetQueues() []*Queue { + if m != nil { + return m.Queues + } + return nil +} + +func (m *ListQueuesResponse) GetNextPageToken() string { + if m != nil { + return m.NextPageToken + } + return "" +} + +// Request message for [GetQueue][google.cloud.tasks.v2beta3.CloudTasks.GetQueue]. +type GetQueueRequest struct { + // Required. + // + // The resource name of the queue. For example: + // `projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID` + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GetQueueRequest) Reset() { *m = GetQueueRequest{} } +func (m *GetQueueRequest) String() string { return proto.CompactTextString(m) } +func (*GetQueueRequest) ProtoMessage() {} +func (*GetQueueRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_cloudtasks_6396912137772a30, []int{2} +} +func (m *GetQueueRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GetQueueRequest.Unmarshal(m, b) +} +func (m *GetQueueRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GetQueueRequest.Marshal(b, m, deterministic) +} +func (dst *GetQueueRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_GetQueueRequest.Merge(dst, src) +} +func (m *GetQueueRequest) XXX_Size() int { + return xxx_messageInfo_GetQueueRequest.Size(m) +} +func (m *GetQueueRequest) XXX_DiscardUnknown() { + xxx_messageInfo_GetQueueRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_GetQueueRequest proto.InternalMessageInfo + +func (m *GetQueueRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +// Request message for [CreateQueue][google.cloud.tasks.v2beta3.CloudTasks.CreateQueue]. +type CreateQueueRequest struct { + // Required. + // + // The location name in which the queue will be created. + // For example: `projects/PROJECT_ID/locations/LOCATION_ID` + // + // The list of allowed locations can be obtained by calling Cloud + // Tasks' implementation of + // [ListLocations][google.cloud.location.Locations.ListLocations]. + Parent string `protobuf:"bytes,1,opt,name=parent,proto3" json:"parent,omitempty"` + // Required. + // + // The queue to create. + // + // [Queue's name][google.cloud.tasks.v2beta3.Queue.name] cannot be the same as an existing queue. + Queue *Queue `protobuf:"bytes,2,opt,name=queue,proto3" json:"queue,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *CreateQueueRequest) Reset() { *m = CreateQueueRequest{} } +func (m *CreateQueueRequest) String() string { return proto.CompactTextString(m) } +func (*CreateQueueRequest) ProtoMessage() {} +func (*CreateQueueRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_cloudtasks_6396912137772a30, []int{3} +} +func (m *CreateQueueRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_CreateQueueRequest.Unmarshal(m, b) +} +func (m *CreateQueueRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_CreateQueueRequest.Marshal(b, m, deterministic) +} +func (dst *CreateQueueRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_CreateQueueRequest.Merge(dst, src) +} +func (m *CreateQueueRequest) XXX_Size() int { + return xxx_messageInfo_CreateQueueRequest.Size(m) +} +func (m *CreateQueueRequest) XXX_DiscardUnknown() { + xxx_messageInfo_CreateQueueRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_CreateQueueRequest proto.InternalMessageInfo + +func (m *CreateQueueRequest) GetParent() string { + if m != nil { + return m.Parent + } + return "" +} + +func (m *CreateQueueRequest) GetQueue() *Queue { + if m != nil { + return m.Queue + } + return nil +} + +// Request message for [UpdateQueue][google.cloud.tasks.v2beta3.CloudTasks.UpdateQueue]. +type UpdateQueueRequest struct { + // Required. + // + // The queue to create or update. + // + // The queue's [name][google.cloud.tasks.v2beta3.Queue.name] must be specified. + // + // Output only fields cannot be modified using UpdateQueue. + // Any value specified for an output only field will be ignored. + // The queue's [name][google.cloud.tasks.v2beta3.Queue.name] cannot be changed. + Queue *Queue `protobuf:"bytes,1,opt,name=queue,proto3" json:"queue,omitempty"` + // A mask used to specify which fields of the queue are being updated. + // + // If empty, then all fields will be updated. + UpdateMask *field_mask.FieldMask `protobuf:"bytes,2,opt,name=update_mask,json=updateMask,proto3" json:"update_mask,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *UpdateQueueRequest) Reset() { *m = UpdateQueueRequest{} } +func (m *UpdateQueueRequest) String() string { return proto.CompactTextString(m) } +func (*UpdateQueueRequest) ProtoMessage() {} +func (*UpdateQueueRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_cloudtasks_6396912137772a30, []int{4} +} +func (m *UpdateQueueRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_UpdateQueueRequest.Unmarshal(m, b) +} +func (m *UpdateQueueRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_UpdateQueueRequest.Marshal(b, m, deterministic) +} +func (dst *UpdateQueueRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_UpdateQueueRequest.Merge(dst, src) +} +func (m *UpdateQueueRequest) XXX_Size() int { + return xxx_messageInfo_UpdateQueueRequest.Size(m) +} +func (m *UpdateQueueRequest) XXX_DiscardUnknown() { + xxx_messageInfo_UpdateQueueRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_UpdateQueueRequest proto.InternalMessageInfo + +func (m *UpdateQueueRequest) GetQueue() *Queue { + if m != nil { + return m.Queue + } + return nil +} + +func (m *UpdateQueueRequest) GetUpdateMask() *field_mask.FieldMask { + if m != nil { + return m.UpdateMask + } + return nil +} + +// Request message for [DeleteQueue][google.cloud.tasks.v2beta3.CloudTasks.DeleteQueue]. +type DeleteQueueRequest struct { + // Required. + // + // The queue name. For example: + // `projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID` + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *DeleteQueueRequest) Reset() { *m = DeleteQueueRequest{} } +func (m *DeleteQueueRequest) String() string { return proto.CompactTextString(m) } +func (*DeleteQueueRequest) ProtoMessage() {} +func (*DeleteQueueRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_cloudtasks_6396912137772a30, []int{5} +} +func (m *DeleteQueueRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_DeleteQueueRequest.Unmarshal(m, b) +} +func (m *DeleteQueueRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_DeleteQueueRequest.Marshal(b, m, deterministic) +} +func (dst *DeleteQueueRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_DeleteQueueRequest.Merge(dst, src) +} +func (m *DeleteQueueRequest) XXX_Size() int { + return xxx_messageInfo_DeleteQueueRequest.Size(m) +} +func (m *DeleteQueueRequest) XXX_DiscardUnknown() { + xxx_messageInfo_DeleteQueueRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_DeleteQueueRequest proto.InternalMessageInfo + +func (m *DeleteQueueRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +// Request message for [PurgeQueue][google.cloud.tasks.v2beta3.CloudTasks.PurgeQueue]. +type PurgeQueueRequest struct { + // Required. + // + // The queue name. For example: + // `projects/PROJECT_ID/location/LOCATION_ID/queues/QUEUE_ID` + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *PurgeQueueRequest) Reset() { *m = PurgeQueueRequest{} } +func (m *PurgeQueueRequest) String() string { return proto.CompactTextString(m) } +func (*PurgeQueueRequest) ProtoMessage() {} +func (*PurgeQueueRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_cloudtasks_6396912137772a30, []int{6} +} +func (m *PurgeQueueRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_PurgeQueueRequest.Unmarshal(m, b) +} +func (m *PurgeQueueRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_PurgeQueueRequest.Marshal(b, m, deterministic) +} +func (dst *PurgeQueueRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_PurgeQueueRequest.Merge(dst, src) +} +func (m *PurgeQueueRequest) XXX_Size() int { + return xxx_messageInfo_PurgeQueueRequest.Size(m) +} +func (m *PurgeQueueRequest) XXX_DiscardUnknown() { + xxx_messageInfo_PurgeQueueRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_PurgeQueueRequest proto.InternalMessageInfo + +func (m *PurgeQueueRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +// Request message for [PauseQueue][google.cloud.tasks.v2beta3.CloudTasks.PauseQueue]. +type PauseQueueRequest struct { + // Required. + // + // The queue name. For example: + // `projects/PROJECT_ID/location/LOCATION_ID/queues/QUEUE_ID` + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *PauseQueueRequest) Reset() { *m = PauseQueueRequest{} } +func (m *PauseQueueRequest) String() string { return proto.CompactTextString(m) } +func (*PauseQueueRequest) ProtoMessage() {} +func (*PauseQueueRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_cloudtasks_6396912137772a30, []int{7} +} +func (m *PauseQueueRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_PauseQueueRequest.Unmarshal(m, b) +} +func (m *PauseQueueRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_PauseQueueRequest.Marshal(b, m, deterministic) +} +func (dst *PauseQueueRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_PauseQueueRequest.Merge(dst, src) +} +func (m *PauseQueueRequest) XXX_Size() int { + return xxx_messageInfo_PauseQueueRequest.Size(m) +} +func (m *PauseQueueRequest) XXX_DiscardUnknown() { + xxx_messageInfo_PauseQueueRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_PauseQueueRequest proto.InternalMessageInfo + +func (m *PauseQueueRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +// Request message for [ResumeQueue][google.cloud.tasks.v2beta3.CloudTasks.ResumeQueue]. +type ResumeQueueRequest struct { + // Required. + // + // The queue name. For example: + // `projects/PROJECT_ID/location/LOCATION_ID/queues/QUEUE_ID` + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ResumeQueueRequest) Reset() { *m = ResumeQueueRequest{} } +func (m *ResumeQueueRequest) String() string { return proto.CompactTextString(m) } +func (*ResumeQueueRequest) ProtoMessage() {} +func (*ResumeQueueRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_cloudtasks_6396912137772a30, []int{8} +} +func (m *ResumeQueueRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ResumeQueueRequest.Unmarshal(m, b) +} +func (m *ResumeQueueRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ResumeQueueRequest.Marshal(b, m, deterministic) +} +func (dst *ResumeQueueRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_ResumeQueueRequest.Merge(dst, src) +} +func (m *ResumeQueueRequest) XXX_Size() int { + return xxx_messageInfo_ResumeQueueRequest.Size(m) +} +func (m *ResumeQueueRequest) XXX_DiscardUnknown() { + xxx_messageInfo_ResumeQueueRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_ResumeQueueRequest proto.InternalMessageInfo + +func (m *ResumeQueueRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +// Request message for listing tasks using [ListTasks][google.cloud.tasks.v2beta3.CloudTasks.ListTasks]. +type ListTasksRequest struct { + // Required. + // + // The queue name. For example: + // `projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID` + Parent string `protobuf:"bytes,1,opt,name=parent,proto3" json:"parent,omitempty"` + // The response_view specifies which subset of the [Task][google.cloud.tasks.v2beta3.Task] will be + // returned. + // + // By default response_view is [BASIC][google.cloud.tasks.v2beta3.Task.View.BASIC]; not all + // information is retrieved by default because some data, such as + // payloads, might be desirable to return only when needed because + // of its large size or because of the sensitivity of data that it + // contains. + // + // Authorization for [FULL][google.cloud.tasks.v2beta3.Task.View.FULL] requires + // `cloudtasks.tasks.fullView` [Google IAM](https://cloud.google.com/iam/) + // permission on the [Task][google.cloud.tasks.v2beta3.Task] resource. + ResponseView Task_View `protobuf:"varint,2,opt,name=response_view,json=responseView,proto3,enum=google.cloud.tasks.v2beta3.Task_View" json:"response_view,omitempty"` + // Requested page size. Fewer tasks than requested might be returned. + // + // The maximum page size is 1000. If unspecified, the page size will + // be the maximum. Fewer tasks than requested might be returned, + // even if more tasks exist; use + // [next_page_token][google.cloud.tasks.v2beta3.ListTasksResponse.next_page_token] in the + // response to determine if more tasks exist. + PageSize int32 `protobuf:"varint,3,opt,name=page_size,json=pageSize,proto3" json:"page_size,omitempty"` + // A token identifying the page of results to return. + // + // To request the first page results, page_token must be empty. To + // request the next page of results, page_token must be the value of + // [next_page_token][google.cloud.tasks.v2beta3.ListTasksResponse.next_page_token] returned + // from the previous call to [ListTasks][google.cloud.tasks.v2beta3.CloudTasks.ListTasks] + // method. + // + // The page token is valid for only 2 hours. + PageToken string `protobuf:"bytes,4,opt,name=page_token,json=pageToken,proto3" json:"page_token,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ListTasksRequest) Reset() { *m = ListTasksRequest{} } +func (m *ListTasksRequest) String() string { return proto.CompactTextString(m) } +func (*ListTasksRequest) ProtoMessage() {} +func (*ListTasksRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_cloudtasks_6396912137772a30, []int{9} +} +func (m *ListTasksRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ListTasksRequest.Unmarshal(m, b) +} +func (m *ListTasksRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ListTasksRequest.Marshal(b, m, deterministic) +} +func (dst *ListTasksRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_ListTasksRequest.Merge(dst, src) +} +func (m *ListTasksRequest) XXX_Size() int { + return xxx_messageInfo_ListTasksRequest.Size(m) +} +func (m *ListTasksRequest) XXX_DiscardUnknown() { + xxx_messageInfo_ListTasksRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_ListTasksRequest proto.InternalMessageInfo + +func (m *ListTasksRequest) GetParent() string { + if m != nil { + return m.Parent + } + return "" +} + +func (m *ListTasksRequest) GetResponseView() Task_View { + if m != nil { + return m.ResponseView + } + return Task_VIEW_UNSPECIFIED +} + +func (m *ListTasksRequest) GetPageSize() int32 { + if m != nil { + return m.PageSize + } + return 0 +} + +func (m *ListTasksRequest) GetPageToken() string { + if m != nil { + return m.PageToken + } + return "" +} + +// Response message for listing tasks using [ListTasks][google.cloud.tasks.v2beta3.CloudTasks.ListTasks]. +type ListTasksResponse struct { + // The list of tasks. + Tasks []*Task `protobuf:"bytes,1,rep,name=tasks,proto3" json:"tasks,omitempty"` + // A token to retrieve next page of results. + // + // To return the next page of results, call + // [ListTasks][google.cloud.tasks.v2beta3.CloudTasks.ListTasks] with this value as the + // [page_token][google.cloud.tasks.v2beta3.ListTasksRequest.page_token]. + // + // If the next_page_token is empty, there are no more results. + NextPageToken string `protobuf:"bytes,2,opt,name=next_page_token,json=nextPageToken,proto3" json:"next_page_token,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ListTasksResponse) Reset() { *m = ListTasksResponse{} } +func (m *ListTasksResponse) String() string { return proto.CompactTextString(m) } +func (*ListTasksResponse) ProtoMessage() {} +func (*ListTasksResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_cloudtasks_6396912137772a30, []int{10} +} +func (m *ListTasksResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ListTasksResponse.Unmarshal(m, b) +} +func (m *ListTasksResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ListTasksResponse.Marshal(b, m, deterministic) +} +func (dst *ListTasksResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_ListTasksResponse.Merge(dst, src) +} +func (m *ListTasksResponse) XXX_Size() int { + return xxx_messageInfo_ListTasksResponse.Size(m) +} +func (m *ListTasksResponse) XXX_DiscardUnknown() { + xxx_messageInfo_ListTasksResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_ListTasksResponse proto.InternalMessageInfo + +func (m *ListTasksResponse) GetTasks() []*Task { + if m != nil { + return m.Tasks + } + return nil +} + +func (m *ListTasksResponse) GetNextPageToken() string { + if m != nil { + return m.NextPageToken + } + return "" +} + +// Request message for getting a task using [GetTask][google.cloud.tasks.v2beta3.CloudTasks.GetTask]. +type GetTaskRequest struct { + // Required. + // + // The task name. For example: + // `projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID/tasks/TASK_ID` + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // The response_view specifies which subset of the [Task][google.cloud.tasks.v2beta3.Task] will be + // returned. + // + // By default response_view is [BASIC][google.cloud.tasks.v2beta3.Task.View.BASIC]; not all + // information is retrieved by default because some data, such as + // payloads, might be desirable to return only when needed because + // of its large size or because of the sensitivity of data that it + // contains. + // + // Authorization for [FULL][google.cloud.tasks.v2beta3.Task.View.FULL] requires + // `cloudtasks.tasks.fullView` [Google IAM](https://cloud.google.com/iam/) + // permission on the [Task][google.cloud.tasks.v2beta3.Task] resource. + ResponseView Task_View `protobuf:"varint,2,opt,name=response_view,json=responseView,proto3,enum=google.cloud.tasks.v2beta3.Task_View" json:"response_view,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GetTaskRequest) Reset() { *m = GetTaskRequest{} } +func (m *GetTaskRequest) String() string { return proto.CompactTextString(m) } +func (*GetTaskRequest) ProtoMessage() {} +func (*GetTaskRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_cloudtasks_6396912137772a30, []int{11} +} +func (m *GetTaskRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GetTaskRequest.Unmarshal(m, b) +} +func (m *GetTaskRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GetTaskRequest.Marshal(b, m, deterministic) +} +func (dst *GetTaskRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_GetTaskRequest.Merge(dst, src) +} +func (m *GetTaskRequest) XXX_Size() int { + return xxx_messageInfo_GetTaskRequest.Size(m) +} +func (m *GetTaskRequest) XXX_DiscardUnknown() { + xxx_messageInfo_GetTaskRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_GetTaskRequest proto.InternalMessageInfo + +func (m *GetTaskRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *GetTaskRequest) GetResponseView() Task_View { + if m != nil { + return m.ResponseView + } + return Task_VIEW_UNSPECIFIED +} + +// Request message for [CreateTask][google.cloud.tasks.v2beta3.CloudTasks.CreateTask]. +type CreateTaskRequest struct { + // Required. + // + // The queue name. For example: + // `projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID` + // + // The queue must already exist. + Parent string `protobuf:"bytes,1,opt,name=parent,proto3" json:"parent,omitempty"` + // Required. + // + // The task to add. + // + // Task names have the following format: + // `projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID/tasks/TASK_ID`. + // The user can optionally specify a task [name][google.cloud.tasks.v2beta3.Task.name]. If a + // name is not specified then the system will generate a random + // unique task id, which will be set in the task returned in the + // [response][google.cloud.tasks.v2beta3.Task.name]. + // + // If [schedule_time][google.cloud.tasks.v2beta3.Task.schedule_time] is not set or is in the + // past then Cloud Tasks will set it to the current time. + // + // Task De-duplication: + // + // Explicitly specifying a task ID enables task de-duplication. If + // a task's ID is identical to that of an existing task or a task + // that was deleted or executed recently then the call will fail + // with [ALREADY_EXISTS][google.rpc.Code.ALREADY_EXISTS]. + // If the task's queue was created using Cloud Tasks, then another task with + // the same name can't be created for ~1hour after the original task was + // deleted or executed. If the task's queue was created using queue.yaml or + // queue.xml, then another task with the same name can't be created + // for ~9days after the original task was deleted or executed. + // + // Because there is an extra lookup cost to identify duplicate task + // names, these [CreateTask][google.cloud.tasks.v2beta3.CloudTasks.CreateTask] calls have significantly + // increased latency. Using hashed strings for the task id or for + // the prefix of the task id is recommended. Choosing task ids that + // are sequential or have sequential prefixes, for example using a + // timestamp, causes an increase in latency and error rates in all + // task commands. The infrastructure relies on an approximately + // uniform distribution of task ids to store and serve tasks + // efficiently. + Task *Task `protobuf:"bytes,2,opt,name=task,proto3" json:"task,omitempty"` + // The response_view specifies which subset of the [Task][google.cloud.tasks.v2beta3.Task] will be + // returned. + // + // By default response_view is [BASIC][google.cloud.tasks.v2beta3.Task.View.BASIC]; not all + // information is retrieved by default because some data, such as + // payloads, might be desirable to return only when needed because + // of its large size or because of the sensitivity of data that it + // contains. + // + // Authorization for [FULL][google.cloud.tasks.v2beta3.Task.View.FULL] requires + // `cloudtasks.tasks.fullView` [Google IAM](https://cloud.google.com/iam/) + // permission on the [Task][google.cloud.tasks.v2beta3.Task] resource. + ResponseView Task_View `protobuf:"varint,3,opt,name=response_view,json=responseView,proto3,enum=google.cloud.tasks.v2beta3.Task_View" json:"response_view,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *CreateTaskRequest) Reset() { *m = CreateTaskRequest{} } +func (m *CreateTaskRequest) String() string { return proto.CompactTextString(m) } +func (*CreateTaskRequest) ProtoMessage() {} +func (*CreateTaskRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_cloudtasks_6396912137772a30, []int{12} +} +func (m *CreateTaskRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_CreateTaskRequest.Unmarshal(m, b) +} +func (m *CreateTaskRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_CreateTaskRequest.Marshal(b, m, deterministic) +} +func (dst *CreateTaskRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_CreateTaskRequest.Merge(dst, src) +} +func (m *CreateTaskRequest) XXX_Size() int { + return xxx_messageInfo_CreateTaskRequest.Size(m) +} +func (m *CreateTaskRequest) XXX_DiscardUnknown() { + xxx_messageInfo_CreateTaskRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_CreateTaskRequest proto.InternalMessageInfo + +func (m *CreateTaskRequest) GetParent() string { + if m != nil { + return m.Parent + } + return "" +} + +func (m *CreateTaskRequest) GetTask() *Task { + if m != nil { + return m.Task + } + return nil +} + +func (m *CreateTaskRequest) GetResponseView() Task_View { + if m != nil { + return m.ResponseView + } + return Task_VIEW_UNSPECIFIED +} + +// Request message for deleting a task using +// [DeleteTask][google.cloud.tasks.v2beta3.CloudTasks.DeleteTask]. +type DeleteTaskRequest struct { + // Required. + // + // The task name. For example: + // `projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID/tasks/TASK_ID` + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *DeleteTaskRequest) Reset() { *m = DeleteTaskRequest{} } +func (m *DeleteTaskRequest) String() string { return proto.CompactTextString(m) } +func (*DeleteTaskRequest) ProtoMessage() {} +func (*DeleteTaskRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_cloudtasks_6396912137772a30, []int{13} +} +func (m *DeleteTaskRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_DeleteTaskRequest.Unmarshal(m, b) +} +func (m *DeleteTaskRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_DeleteTaskRequest.Marshal(b, m, deterministic) +} +func (dst *DeleteTaskRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_DeleteTaskRequest.Merge(dst, src) +} +func (m *DeleteTaskRequest) XXX_Size() int { + return xxx_messageInfo_DeleteTaskRequest.Size(m) +} +func (m *DeleteTaskRequest) XXX_DiscardUnknown() { + xxx_messageInfo_DeleteTaskRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_DeleteTaskRequest proto.InternalMessageInfo + +func (m *DeleteTaskRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +// Request message for forcing a task to run now using +// [RunTask][google.cloud.tasks.v2beta3.CloudTasks.RunTask]. +type RunTaskRequest struct { + // Required. + // + // The task name. For example: + // `projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID/tasks/TASK_ID` + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // The response_view specifies which subset of the [Task][google.cloud.tasks.v2beta3.Task] will be + // returned. + // + // By default response_view is [BASIC][google.cloud.tasks.v2beta3.Task.View.BASIC]; not all + // information is retrieved by default because some data, such as + // payloads, might be desirable to return only when needed because + // of its large size or because of the sensitivity of data that it + // contains. + // + // Authorization for [FULL][google.cloud.tasks.v2beta3.Task.View.FULL] requires + // `cloudtasks.tasks.fullView` [Google IAM](https://cloud.google.com/iam/) + // permission on the [Task][google.cloud.tasks.v2beta3.Task] resource. + ResponseView Task_View `protobuf:"varint,2,opt,name=response_view,json=responseView,proto3,enum=google.cloud.tasks.v2beta3.Task_View" json:"response_view,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *RunTaskRequest) Reset() { *m = RunTaskRequest{} } +func (m *RunTaskRequest) String() string { return proto.CompactTextString(m) } +func (*RunTaskRequest) ProtoMessage() {} +func (*RunTaskRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_cloudtasks_6396912137772a30, []int{14} +} +func (m *RunTaskRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_RunTaskRequest.Unmarshal(m, b) +} +func (m *RunTaskRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_RunTaskRequest.Marshal(b, m, deterministic) +} +func (dst *RunTaskRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_RunTaskRequest.Merge(dst, src) +} +func (m *RunTaskRequest) XXX_Size() int { + return xxx_messageInfo_RunTaskRequest.Size(m) +} +func (m *RunTaskRequest) XXX_DiscardUnknown() { + xxx_messageInfo_RunTaskRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_RunTaskRequest proto.InternalMessageInfo + +func (m *RunTaskRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *RunTaskRequest) GetResponseView() Task_View { + if m != nil { + return m.ResponseView + } + return Task_VIEW_UNSPECIFIED +} + +func init() { + proto.RegisterType((*ListQueuesRequest)(nil), "google.cloud.tasks.v2beta3.ListQueuesRequest") + proto.RegisterType((*ListQueuesResponse)(nil), "google.cloud.tasks.v2beta3.ListQueuesResponse") + proto.RegisterType((*GetQueueRequest)(nil), "google.cloud.tasks.v2beta3.GetQueueRequest") + proto.RegisterType((*CreateQueueRequest)(nil), "google.cloud.tasks.v2beta3.CreateQueueRequest") + proto.RegisterType((*UpdateQueueRequest)(nil), "google.cloud.tasks.v2beta3.UpdateQueueRequest") + proto.RegisterType((*DeleteQueueRequest)(nil), "google.cloud.tasks.v2beta3.DeleteQueueRequest") + proto.RegisterType((*PurgeQueueRequest)(nil), "google.cloud.tasks.v2beta3.PurgeQueueRequest") + proto.RegisterType((*PauseQueueRequest)(nil), "google.cloud.tasks.v2beta3.PauseQueueRequest") + proto.RegisterType((*ResumeQueueRequest)(nil), "google.cloud.tasks.v2beta3.ResumeQueueRequest") + proto.RegisterType((*ListTasksRequest)(nil), "google.cloud.tasks.v2beta3.ListTasksRequest") + proto.RegisterType((*ListTasksResponse)(nil), "google.cloud.tasks.v2beta3.ListTasksResponse") + proto.RegisterType((*GetTaskRequest)(nil), "google.cloud.tasks.v2beta3.GetTaskRequest") + proto.RegisterType((*CreateTaskRequest)(nil), "google.cloud.tasks.v2beta3.CreateTaskRequest") + proto.RegisterType((*DeleteTaskRequest)(nil), "google.cloud.tasks.v2beta3.DeleteTaskRequest") + proto.RegisterType((*RunTaskRequest)(nil), "google.cloud.tasks.v2beta3.RunTaskRequest") +} + +// Reference imports to suppress errors if they are not otherwise used. +var _ context.Context +var _ grpc.ClientConn + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the grpc package it is being compiled against. +const _ = grpc.SupportPackageIsVersion4 + +// CloudTasksClient is the client API for CloudTasks service. +// +// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://godoc.org/google.golang.org/grpc#ClientConn.NewStream. +type CloudTasksClient interface { + // Lists queues. + // + // Queues are returned in lexicographical order. + ListQueues(ctx context.Context, in *ListQueuesRequest, opts ...grpc.CallOption) (*ListQueuesResponse, error) + // Gets a queue. + GetQueue(ctx context.Context, in *GetQueueRequest, opts ...grpc.CallOption) (*Queue, error) + // Creates a queue. + // + // Queues created with this method allow tasks to live for a maximum of 31 + // days. After a task is 31 days old, the task will be deleted regardless of whether + // it was dispatched or not. + // + // WARNING: Using this method may have unintended side effects if you are + // using an App Engine `queue.yaml` or `queue.xml` file to manage your queues. + // Read + // [Overview of Queue Management and queue.yaml](https://cloud.google.com/tasks/docs/queue-yaml) + // before using this method. + CreateQueue(ctx context.Context, in *CreateQueueRequest, opts ...grpc.CallOption) (*Queue, error) + // Updates a queue. + // + // This method creates the queue if it does not exist and updates + // the queue if it does exist. + // + // Queues created with this method allow tasks to live for a maximum of 31 + // days. After a task is 31 days old, the task will be deleted regardless of whether + // it was dispatched or not. + // + // WARNING: Using this method may have unintended side effects if you are + // using an App Engine `queue.yaml` or `queue.xml` file to manage your queues. + // Read + // [Overview of Queue Management and queue.yaml](https://cloud.google.com/tasks/docs/queue-yaml) + // before using this method. + UpdateQueue(ctx context.Context, in *UpdateQueueRequest, opts ...grpc.CallOption) (*Queue, error) + // Deletes a queue. + // + // This command will delete the queue even if it has tasks in it. + // + // Note: If you delete a queue, a queue with the same name can't be created + // for 7 days. + // + // WARNING: Using this method may have unintended side effects if you are + // using an App Engine `queue.yaml` or `queue.xml` file to manage your queues. + // Read + // [Overview of Queue Management and queue.yaml](https://cloud.google.com/tasks/docs/queue-yaml) + // before using this method. + DeleteQueue(ctx context.Context, in *DeleteQueueRequest, opts ...grpc.CallOption) (*empty.Empty, error) + // Purges a queue by deleting all of its tasks. + // + // All tasks created before this method is called are permanently deleted. + // + // Purge operations can take up to one minute to take effect. Tasks + // might be dispatched before the purge takes effect. A purge is irreversible. + PurgeQueue(ctx context.Context, in *PurgeQueueRequest, opts ...grpc.CallOption) (*Queue, error) + // Pauses the queue. + // + // If a queue is paused then the system will stop dispatching tasks + // until the queue is resumed via + // [ResumeQueue][google.cloud.tasks.v2beta3.CloudTasks.ResumeQueue]. Tasks can still be added + // when the queue is paused. A queue is paused if its + // [state][google.cloud.tasks.v2beta3.Queue.state] is [PAUSED][google.cloud.tasks.v2beta3.Queue.State.PAUSED]. + PauseQueue(ctx context.Context, in *PauseQueueRequest, opts ...grpc.CallOption) (*Queue, error) + // Resume a queue. + // + // This method resumes a queue after it has been + // [PAUSED][google.cloud.tasks.v2beta3.Queue.State.PAUSED] or + // [DISABLED][google.cloud.tasks.v2beta3.Queue.State.DISABLED]. The state of a queue is stored + // in the queue's [state][google.cloud.tasks.v2beta3.Queue.state]; after calling this method it + // will be set to [RUNNING][google.cloud.tasks.v2beta3.Queue.State.RUNNING]. + // + // WARNING: Resuming many high-QPS queues at the same time can + // lead to target overloading. If you are resuming high-QPS + // queues, follow the 500/50/5 pattern described in + // [Managing Cloud Tasks Scaling Risks](https://cloud.google.com/tasks/docs/manage-cloud-task-scaling). + ResumeQueue(ctx context.Context, in *ResumeQueueRequest, opts ...grpc.CallOption) (*Queue, error) + // Gets the access control policy for a [Queue][google.cloud.tasks.v2beta3.Queue]. + // Returns an empty policy if the resource exists and does not have a policy + // set. + // + // Authorization requires the following + // [Google IAM](https://cloud.google.com/iam) permission on the specified + // resource parent: + // + // * `cloudtasks.queues.getIamPolicy` + GetIamPolicy(ctx context.Context, in *v1.GetIamPolicyRequest, opts ...grpc.CallOption) (*v1.Policy, error) + // Sets the access control policy for a [Queue][google.cloud.tasks.v2beta3.Queue]. Replaces any existing + // policy. + // + // Note: The Cloud Console does not check queue-level IAM permissions yet. + // Project-level permissions are required to use the Cloud Console. + // + // Authorization requires the following + // [Google IAM](https://cloud.google.com/iam) permission on the specified + // resource parent: + // + // * `cloudtasks.queues.setIamPolicy` + SetIamPolicy(ctx context.Context, in *v1.SetIamPolicyRequest, opts ...grpc.CallOption) (*v1.Policy, error) + // Returns permissions that a caller has on a [Queue][google.cloud.tasks.v2beta3.Queue]. + // If the resource does not exist, this will return an empty set of + // permissions, not a [NOT_FOUND][google.rpc.Code.NOT_FOUND] error. + // + // Note: This operation is designed to be used for building permission-aware + // UIs and command-line tools, not for authorization checking. This operation + // may "fail open" without warning. + TestIamPermissions(ctx context.Context, in *v1.TestIamPermissionsRequest, opts ...grpc.CallOption) (*v1.TestIamPermissionsResponse, error) + // Lists the tasks in a queue. + // + // By default, only the [BASIC][google.cloud.tasks.v2beta3.Task.View.BASIC] view is retrieved + // due to performance considerations; + // [response_view][google.cloud.tasks.v2beta3.ListTasksRequest.response_view] controls the + // subset of information which is returned. + // + // The tasks may be returned in any order. The ordering may change at any + // time. + ListTasks(ctx context.Context, in *ListTasksRequest, opts ...grpc.CallOption) (*ListTasksResponse, error) + // Gets a task. + GetTask(ctx context.Context, in *GetTaskRequest, opts ...grpc.CallOption) (*Task, error) + // Creates a task and adds it to a queue. + // + // Tasks cannot be updated after creation; there is no UpdateTask command. + // + // * For [App Engine queues][google.cloud.tasks.v2beta3.AppEngineHttpQueue], the maximum task size is + // 100KB. + CreateTask(ctx context.Context, in *CreateTaskRequest, opts ...grpc.CallOption) (*Task, error) + // Deletes a task. + // + // A task can be deleted if it is scheduled or dispatched. A task + // cannot be deleted if it has executed successfully or permanently + // failed. + DeleteTask(ctx context.Context, in *DeleteTaskRequest, opts ...grpc.CallOption) (*empty.Empty, error) + // Forces a task to run now. + // + // When this method is called, Cloud Tasks will dispatch the task, even if + // the task is already running, the queue has reached its [RateLimits][google.cloud.tasks.v2beta3.RateLimits] or + // is [PAUSED][google.cloud.tasks.v2beta3.Queue.State.PAUSED]. + // + // This command is meant to be used for manual debugging. For + // example, [RunTask][google.cloud.tasks.v2beta3.CloudTasks.RunTask] can be used to retry a failed + // task after a fix has been made or to manually force a task to be + // dispatched now. + // + // The dispatched task is returned. That is, the task that is returned + // contains the [status][Task.status] after the task is dispatched but + // before the task is received by its target. + // + // If Cloud Tasks receives a successful response from the task's + // target, then the task will be deleted; otherwise the task's + // [schedule_time][google.cloud.tasks.v2beta3.Task.schedule_time] will be reset to the time that + // [RunTask][google.cloud.tasks.v2beta3.CloudTasks.RunTask] was called plus the retry delay specified + // in the queue's [RetryConfig][google.cloud.tasks.v2beta3.RetryConfig]. + // + // [RunTask][google.cloud.tasks.v2beta3.CloudTasks.RunTask] returns + // [NOT_FOUND][google.rpc.Code.NOT_FOUND] when it is called on a + // task that has already succeeded or permanently failed. + RunTask(ctx context.Context, in *RunTaskRequest, opts ...grpc.CallOption) (*Task, error) +} + +type cloudTasksClient struct { + cc *grpc.ClientConn +} + +func NewCloudTasksClient(cc *grpc.ClientConn) CloudTasksClient { + return &cloudTasksClient{cc} +} + +func (c *cloudTasksClient) ListQueues(ctx context.Context, in *ListQueuesRequest, opts ...grpc.CallOption) (*ListQueuesResponse, error) { + out := new(ListQueuesResponse) + err := c.cc.Invoke(ctx, "/google.cloud.tasks.v2beta3.CloudTasks/ListQueues", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *cloudTasksClient) GetQueue(ctx context.Context, in *GetQueueRequest, opts ...grpc.CallOption) (*Queue, error) { + out := new(Queue) + err := c.cc.Invoke(ctx, "/google.cloud.tasks.v2beta3.CloudTasks/GetQueue", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *cloudTasksClient) CreateQueue(ctx context.Context, in *CreateQueueRequest, opts ...grpc.CallOption) (*Queue, error) { + out := new(Queue) + err := c.cc.Invoke(ctx, "/google.cloud.tasks.v2beta3.CloudTasks/CreateQueue", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *cloudTasksClient) UpdateQueue(ctx context.Context, in *UpdateQueueRequest, opts ...grpc.CallOption) (*Queue, error) { + out := new(Queue) + err := c.cc.Invoke(ctx, "/google.cloud.tasks.v2beta3.CloudTasks/UpdateQueue", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *cloudTasksClient) DeleteQueue(ctx context.Context, in *DeleteQueueRequest, opts ...grpc.CallOption) (*empty.Empty, error) { + out := new(empty.Empty) + err := c.cc.Invoke(ctx, "/google.cloud.tasks.v2beta3.CloudTasks/DeleteQueue", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *cloudTasksClient) PurgeQueue(ctx context.Context, in *PurgeQueueRequest, opts ...grpc.CallOption) (*Queue, error) { + out := new(Queue) + err := c.cc.Invoke(ctx, "/google.cloud.tasks.v2beta3.CloudTasks/PurgeQueue", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *cloudTasksClient) PauseQueue(ctx context.Context, in *PauseQueueRequest, opts ...grpc.CallOption) (*Queue, error) { + out := new(Queue) + err := c.cc.Invoke(ctx, "/google.cloud.tasks.v2beta3.CloudTasks/PauseQueue", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *cloudTasksClient) ResumeQueue(ctx context.Context, in *ResumeQueueRequest, opts ...grpc.CallOption) (*Queue, error) { + out := new(Queue) + err := c.cc.Invoke(ctx, "/google.cloud.tasks.v2beta3.CloudTasks/ResumeQueue", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *cloudTasksClient) GetIamPolicy(ctx context.Context, in *v1.GetIamPolicyRequest, opts ...grpc.CallOption) (*v1.Policy, error) { + out := new(v1.Policy) + err := c.cc.Invoke(ctx, "/google.cloud.tasks.v2beta3.CloudTasks/GetIamPolicy", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *cloudTasksClient) SetIamPolicy(ctx context.Context, in *v1.SetIamPolicyRequest, opts ...grpc.CallOption) (*v1.Policy, error) { + out := new(v1.Policy) + err := c.cc.Invoke(ctx, "/google.cloud.tasks.v2beta3.CloudTasks/SetIamPolicy", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *cloudTasksClient) TestIamPermissions(ctx context.Context, in *v1.TestIamPermissionsRequest, opts ...grpc.CallOption) (*v1.TestIamPermissionsResponse, error) { + out := new(v1.TestIamPermissionsResponse) + err := c.cc.Invoke(ctx, "/google.cloud.tasks.v2beta3.CloudTasks/TestIamPermissions", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *cloudTasksClient) ListTasks(ctx context.Context, in *ListTasksRequest, opts ...grpc.CallOption) (*ListTasksResponse, error) { + out := new(ListTasksResponse) + err := c.cc.Invoke(ctx, "/google.cloud.tasks.v2beta3.CloudTasks/ListTasks", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *cloudTasksClient) GetTask(ctx context.Context, in *GetTaskRequest, opts ...grpc.CallOption) (*Task, error) { + out := new(Task) + err := c.cc.Invoke(ctx, "/google.cloud.tasks.v2beta3.CloudTasks/GetTask", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *cloudTasksClient) CreateTask(ctx context.Context, in *CreateTaskRequest, opts ...grpc.CallOption) (*Task, error) { + out := new(Task) + err := c.cc.Invoke(ctx, "/google.cloud.tasks.v2beta3.CloudTasks/CreateTask", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *cloudTasksClient) DeleteTask(ctx context.Context, in *DeleteTaskRequest, opts ...grpc.CallOption) (*empty.Empty, error) { + out := new(empty.Empty) + err := c.cc.Invoke(ctx, "/google.cloud.tasks.v2beta3.CloudTasks/DeleteTask", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *cloudTasksClient) RunTask(ctx context.Context, in *RunTaskRequest, opts ...grpc.CallOption) (*Task, error) { + out := new(Task) + err := c.cc.Invoke(ctx, "/google.cloud.tasks.v2beta3.CloudTasks/RunTask", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +// CloudTasksServer is the server API for CloudTasks service. +type CloudTasksServer interface { + // Lists queues. + // + // Queues are returned in lexicographical order. + ListQueues(context.Context, *ListQueuesRequest) (*ListQueuesResponse, error) + // Gets a queue. + GetQueue(context.Context, *GetQueueRequest) (*Queue, error) + // Creates a queue. + // + // Queues created with this method allow tasks to live for a maximum of 31 + // days. After a task is 31 days old, the task will be deleted regardless of whether + // it was dispatched or not. + // + // WARNING: Using this method may have unintended side effects if you are + // using an App Engine `queue.yaml` or `queue.xml` file to manage your queues. + // Read + // [Overview of Queue Management and queue.yaml](https://cloud.google.com/tasks/docs/queue-yaml) + // before using this method. + CreateQueue(context.Context, *CreateQueueRequest) (*Queue, error) + // Updates a queue. + // + // This method creates the queue if it does not exist and updates + // the queue if it does exist. + // + // Queues created with this method allow tasks to live for a maximum of 31 + // days. After a task is 31 days old, the task will be deleted regardless of whether + // it was dispatched or not. + // + // WARNING: Using this method may have unintended side effects if you are + // using an App Engine `queue.yaml` or `queue.xml` file to manage your queues. + // Read + // [Overview of Queue Management and queue.yaml](https://cloud.google.com/tasks/docs/queue-yaml) + // before using this method. + UpdateQueue(context.Context, *UpdateQueueRequest) (*Queue, error) + // Deletes a queue. + // + // This command will delete the queue even if it has tasks in it. + // + // Note: If you delete a queue, a queue with the same name can't be created + // for 7 days. + // + // WARNING: Using this method may have unintended side effects if you are + // using an App Engine `queue.yaml` or `queue.xml` file to manage your queues. + // Read + // [Overview of Queue Management and queue.yaml](https://cloud.google.com/tasks/docs/queue-yaml) + // before using this method. + DeleteQueue(context.Context, *DeleteQueueRequest) (*empty.Empty, error) + // Purges a queue by deleting all of its tasks. + // + // All tasks created before this method is called are permanently deleted. + // + // Purge operations can take up to one minute to take effect. Tasks + // might be dispatched before the purge takes effect. A purge is irreversible. + PurgeQueue(context.Context, *PurgeQueueRequest) (*Queue, error) + // Pauses the queue. + // + // If a queue is paused then the system will stop dispatching tasks + // until the queue is resumed via + // [ResumeQueue][google.cloud.tasks.v2beta3.CloudTasks.ResumeQueue]. Tasks can still be added + // when the queue is paused. A queue is paused if its + // [state][google.cloud.tasks.v2beta3.Queue.state] is [PAUSED][google.cloud.tasks.v2beta3.Queue.State.PAUSED]. + PauseQueue(context.Context, *PauseQueueRequest) (*Queue, error) + // Resume a queue. + // + // This method resumes a queue after it has been + // [PAUSED][google.cloud.tasks.v2beta3.Queue.State.PAUSED] or + // [DISABLED][google.cloud.tasks.v2beta3.Queue.State.DISABLED]. The state of a queue is stored + // in the queue's [state][google.cloud.tasks.v2beta3.Queue.state]; after calling this method it + // will be set to [RUNNING][google.cloud.tasks.v2beta3.Queue.State.RUNNING]. + // + // WARNING: Resuming many high-QPS queues at the same time can + // lead to target overloading. If you are resuming high-QPS + // queues, follow the 500/50/5 pattern described in + // [Managing Cloud Tasks Scaling Risks](https://cloud.google.com/tasks/docs/manage-cloud-task-scaling). + ResumeQueue(context.Context, *ResumeQueueRequest) (*Queue, error) + // Gets the access control policy for a [Queue][google.cloud.tasks.v2beta3.Queue]. + // Returns an empty policy if the resource exists and does not have a policy + // set. + // + // Authorization requires the following + // [Google IAM](https://cloud.google.com/iam) permission on the specified + // resource parent: + // + // * `cloudtasks.queues.getIamPolicy` + GetIamPolicy(context.Context, *v1.GetIamPolicyRequest) (*v1.Policy, error) + // Sets the access control policy for a [Queue][google.cloud.tasks.v2beta3.Queue]. Replaces any existing + // policy. + // + // Note: The Cloud Console does not check queue-level IAM permissions yet. + // Project-level permissions are required to use the Cloud Console. + // + // Authorization requires the following + // [Google IAM](https://cloud.google.com/iam) permission on the specified + // resource parent: + // + // * `cloudtasks.queues.setIamPolicy` + SetIamPolicy(context.Context, *v1.SetIamPolicyRequest) (*v1.Policy, error) + // Returns permissions that a caller has on a [Queue][google.cloud.tasks.v2beta3.Queue]. + // If the resource does not exist, this will return an empty set of + // permissions, not a [NOT_FOUND][google.rpc.Code.NOT_FOUND] error. + // + // Note: This operation is designed to be used for building permission-aware + // UIs and command-line tools, not for authorization checking. This operation + // may "fail open" without warning. + TestIamPermissions(context.Context, *v1.TestIamPermissionsRequest) (*v1.TestIamPermissionsResponse, error) + // Lists the tasks in a queue. + // + // By default, only the [BASIC][google.cloud.tasks.v2beta3.Task.View.BASIC] view is retrieved + // due to performance considerations; + // [response_view][google.cloud.tasks.v2beta3.ListTasksRequest.response_view] controls the + // subset of information which is returned. + // + // The tasks may be returned in any order. The ordering may change at any + // time. + ListTasks(context.Context, *ListTasksRequest) (*ListTasksResponse, error) + // Gets a task. + GetTask(context.Context, *GetTaskRequest) (*Task, error) + // Creates a task and adds it to a queue. + // + // Tasks cannot be updated after creation; there is no UpdateTask command. + // + // * For [App Engine queues][google.cloud.tasks.v2beta3.AppEngineHttpQueue], the maximum task size is + // 100KB. + CreateTask(context.Context, *CreateTaskRequest) (*Task, error) + // Deletes a task. + // + // A task can be deleted if it is scheduled or dispatched. A task + // cannot be deleted if it has executed successfully or permanently + // failed. + DeleteTask(context.Context, *DeleteTaskRequest) (*empty.Empty, error) + // Forces a task to run now. + // + // When this method is called, Cloud Tasks will dispatch the task, even if + // the task is already running, the queue has reached its [RateLimits][google.cloud.tasks.v2beta3.RateLimits] or + // is [PAUSED][google.cloud.tasks.v2beta3.Queue.State.PAUSED]. + // + // This command is meant to be used for manual debugging. For + // example, [RunTask][google.cloud.tasks.v2beta3.CloudTasks.RunTask] can be used to retry a failed + // task after a fix has been made or to manually force a task to be + // dispatched now. + // + // The dispatched task is returned. That is, the task that is returned + // contains the [status][Task.status] after the task is dispatched but + // before the task is received by its target. + // + // If Cloud Tasks receives a successful response from the task's + // target, then the task will be deleted; otherwise the task's + // [schedule_time][google.cloud.tasks.v2beta3.Task.schedule_time] will be reset to the time that + // [RunTask][google.cloud.tasks.v2beta3.CloudTasks.RunTask] was called plus the retry delay specified + // in the queue's [RetryConfig][google.cloud.tasks.v2beta3.RetryConfig]. + // + // [RunTask][google.cloud.tasks.v2beta3.CloudTasks.RunTask] returns + // [NOT_FOUND][google.rpc.Code.NOT_FOUND] when it is called on a + // task that has already succeeded or permanently failed. + RunTask(context.Context, *RunTaskRequest) (*Task, error) +} + +func RegisterCloudTasksServer(s *grpc.Server, srv CloudTasksServer) { + s.RegisterService(&_CloudTasks_serviceDesc, srv) +} + +func _CloudTasks_ListQueues_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(ListQueuesRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(CloudTasksServer).ListQueues(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.tasks.v2beta3.CloudTasks/ListQueues", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(CloudTasksServer).ListQueues(ctx, req.(*ListQueuesRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _CloudTasks_GetQueue_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(GetQueueRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(CloudTasksServer).GetQueue(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.tasks.v2beta3.CloudTasks/GetQueue", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(CloudTasksServer).GetQueue(ctx, req.(*GetQueueRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _CloudTasks_CreateQueue_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(CreateQueueRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(CloudTasksServer).CreateQueue(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.tasks.v2beta3.CloudTasks/CreateQueue", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(CloudTasksServer).CreateQueue(ctx, req.(*CreateQueueRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _CloudTasks_UpdateQueue_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(UpdateQueueRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(CloudTasksServer).UpdateQueue(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.tasks.v2beta3.CloudTasks/UpdateQueue", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(CloudTasksServer).UpdateQueue(ctx, req.(*UpdateQueueRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _CloudTasks_DeleteQueue_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(DeleteQueueRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(CloudTasksServer).DeleteQueue(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.tasks.v2beta3.CloudTasks/DeleteQueue", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(CloudTasksServer).DeleteQueue(ctx, req.(*DeleteQueueRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _CloudTasks_PurgeQueue_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(PurgeQueueRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(CloudTasksServer).PurgeQueue(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.tasks.v2beta3.CloudTasks/PurgeQueue", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(CloudTasksServer).PurgeQueue(ctx, req.(*PurgeQueueRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _CloudTasks_PauseQueue_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(PauseQueueRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(CloudTasksServer).PauseQueue(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.tasks.v2beta3.CloudTasks/PauseQueue", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(CloudTasksServer).PauseQueue(ctx, req.(*PauseQueueRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _CloudTasks_ResumeQueue_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(ResumeQueueRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(CloudTasksServer).ResumeQueue(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.tasks.v2beta3.CloudTasks/ResumeQueue", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(CloudTasksServer).ResumeQueue(ctx, req.(*ResumeQueueRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _CloudTasks_GetIamPolicy_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(v1.GetIamPolicyRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(CloudTasksServer).GetIamPolicy(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.tasks.v2beta3.CloudTasks/GetIamPolicy", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(CloudTasksServer).GetIamPolicy(ctx, req.(*v1.GetIamPolicyRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _CloudTasks_SetIamPolicy_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(v1.SetIamPolicyRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(CloudTasksServer).SetIamPolicy(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.tasks.v2beta3.CloudTasks/SetIamPolicy", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(CloudTasksServer).SetIamPolicy(ctx, req.(*v1.SetIamPolicyRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _CloudTasks_TestIamPermissions_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(v1.TestIamPermissionsRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(CloudTasksServer).TestIamPermissions(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.tasks.v2beta3.CloudTasks/TestIamPermissions", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(CloudTasksServer).TestIamPermissions(ctx, req.(*v1.TestIamPermissionsRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _CloudTasks_ListTasks_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(ListTasksRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(CloudTasksServer).ListTasks(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.tasks.v2beta3.CloudTasks/ListTasks", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(CloudTasksServer).ListTasks(ctx, req.(*ListTasksRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _CloudTasks_GetTask_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(GetTaskRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(CloudTasksServer).GetTask(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.tasks.v2beta3.CloudTasks/GetTask", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(CloudTasksServer).GetTask(ctx, req.(*GetTaskRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _CloudTasks_CreateTask_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(CreateTaskRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(CloudTasksServer).CreateTask(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.tasks.v2beta3.CloudTasks/CreateTask", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(CloudTasksServer).CreateTask(ctx, req.(*CreateTaskRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _CloudTasks_DeleteTask_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(DeleteTaskRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(CloudTasksServer).DeleteTask(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.tasks.v2beta3.CloudTasks/DeleteTask", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(CloudTasksServer).DeleteTask(ctx, req.(*DeleteTaskRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _CloudTasks_RunTask_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(RunTaskRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(CloudTasksServer).RunTask(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.tasks.v2beta3.CloudTasks/RunTask", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(CloudTasksServer).RunTask(ctx, req.(*RunTaskRequest)) + } + return interceptor(ctx, in, info, handler) +} + +var _CloudTasks_serviceDesc = grpc.ServiceDesc{ + ServiceName: "google.cloud.tasks.v2beta3.CloudTasks", + HandlerType: (*CloudTasksServer)(nil), + Methods: []grpc.MethodDesc{ + { + MethodName: "ListQueues", + Handler: _CloudTasks_ListQueues_Handler, + }, + { + MethodName: "GetQueue", + Handler: _CloudTasks_GetQueue_Handler, + }, + { + MethodName: "CreateQueue", + Handler: _CloudTasks_CreateQueue_Handler, + }, + { + MethodName: "UpdateQueue", + Handler: _CloudTasks_UpdateQueue_Handler, + }, + { + MethodName: "DeleteQueue", + Handler: _CloudTasks_DeleteQueue_Handler, + }, + { + MethodName: "PurgeQueue", + Handler: _CloudTasks_PurgeQueue_Handler, + }, + { + MethodName: "PauseQueue", + Handler: _CloudTasks_PauseQueue_Handler, + }, + { + MethodName: "ResumeQueue", + Handler: _CloudTasks_ResumeQueue_Handler, + }, + { + MethodName: "GetIamPolicy", + Handler: _CloudTasks_GetIamPolicy_Handler, + }, + { + MethodName: "SetIamPolicy", + Handler: _CloudTasks_SetIamPolicy_Handler, + }, + { + MethodName: "TestIamPermissions", + Handler: _CloudTasks_TestIamPermissions_Handler, + }, + { + MethodName: "ListTasks", + Handler: _CloudTasks_ListTasks_Handler, + }, + { + MethodName: "GetTask", + Handler: _CloudTasks_GetTask_Handler, + }, + { + MethodName: "CreateTask", + Handler: _CloudTasks_CreateTask_Handler, + }, + { + MethodName: "DeleteTask", + Handler: _CloudTasks_DeleteTask_Handler, + }, + { + MethodName: "RunTask", + Handler: _CloudTasks_RunTask_Handler, + }, + }, + Streams: []grpc.StreamDesc{}, + Metadata: "google/cloud/tasks/v2beta3/cloudtasks.proto", +} + +func init() { + proto.RegisterFile("google/cloud/tasks/v2beta3/cloudtasks.proto", fileDescriptor_cloudtasks_6396912137772a30) +} + +var fileDescriptor_cloudtasks_6396912137772a30 = []byte{ + // 1113 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xbc, 0x98, 0x5d, 0x6f, 0xdc, 0x44, + 0x17, 0xc7, 0x35, 0x79, 0x6b, 0x73, 0xb6, 0x6d, 0x9e, 0x8c, 0x9e, 0x56, 0x8b, 0x0b, 0xd5, 0x62, + 0x29, 0xed, 0xb2, 0xa5, 0xb6, 0xb2, 0xa5, 0x0d, 0xdd, 0x40, 0x9b, 0xa6, 0x6d, 0x22, 0x08, 0x48, + 0xdb, 0xdd, 0xc0, 0x05, 0x37, 0x2b, 0x67, 0x33, 0xb1, 0x4c, 0xd6, 0x2f, 0xf5, 0x8c, 0x13, 0x28, + 0x44, 0x48, 0x5c, 0x72, 0xc1, 0x4d, 0x11, 0xe2, 0x8e, 0x57, 0x81, 0xb8, 0xe0, 0x86, 0x6b, 0xbe, + 0x05, 0x5f, 0x81, 0x0f, 0x82, 0xe6, 0xc5, 0x6b, 0x67, 0x9d, 0xb5, 0x9d, 0x05, 0x7a, 0x95, 0xf5, + 0x9c, 0xff, 0xcc, 0xf9, 0xcd, 0x99, 0xe3, 0x39, 0xc7, 0x81, 0xeb, 0xb6, 0xef, 0xdb, 0x03, 0x62, + 0xf6, 0x07, 0x7e, 0xb4, 0x6b, 0x32, 0x8b, 0xee, 0x53, 0xf3, 0xa0, 0xb9, 0x43, 0x98, 0x75, 0x53, + 0x8e, 0x89, 0x21, 0x23, 0x08, 0x7d, 0xe6, 0x63, 0x4d, 0x8a, 0x0d, 0x61, 0x30, 0xa4, 0x45, 0x89, + 0xb5, 0x17, 0xd5, 0x42, 0x56, 0xe0, 0x98, 0x96, 0xe7, 0xf9, 0xcc, 0x62, 0x8e, 0xef, 0xa9, 0x99, + 0xda, 0x0b, 0x29, 0x6b, 0x48, 0xa8, 0x1f, 0x85, 0x7d, 0xa2, 0x4c, 0x97, 0x95, 0x49, 0x3c, 0xed, + 0x44, 0x7b, 0x26, 0x71, 0x03, 0xf6, 0xb1, 0x32, 0xd6, 0x46, 0x8d, 0x7b, 0x0e, 0x19, 0xec, 0xf6, + 0x5c, 0x8b, 0xee, 0x2b, 0xc5, 0x52, 0xce, 0x06, 0x58, 0x22, 0xbb, 0x9a, 0x23, 0x7b, 0x12, 0x91, + 0x28, 0xa6, 0xb9, 0xa2, 0x74, 0x8e, 0xe5, 0x9a, 0x07, 0xcb, 0xfc, 0x4f, 0x2f, 0xf0, 0x07, 0x4e, + 0x3f, 0x06, 0xd2, 0x8e, 0xdb, 0x8f, 0xd9, 0x2e, 0x2a, 0x5b, 0x18, 0xf4, 0xcd, 0xbe, 0xbf, 0xab, + 0x96, 0xd4, 0x3f, 0x83, 0xc5, 0x77, 0x1c, 0xca, 0x1e, 0x73, 0x2f, 0xb4, 0x43, 0x9e, 0x44, 0x84, + 0x32, 0x7c, 0x09, 0xe6, 0x02, 0x2b, 0x24, 0x1e, 0xab, 0xa2, 0x1a, 0xaa, 0xcf, 0x77, 0xd4, 0x13, + 0x1f, 0xdf, 0x73, 0x06, 0x8c, 0x84, 0xd5, 0x29, 0x39, 0x2e, 0x9f, 0xf0, 0x65, 0x98, 0x0f, 0x2c, + 0x9b, 0xf4, 0xa8, 0xf3, 0x94, 0x54, 0xa7, 0x6b, 0xa8, 0x3e, 0xdb, 0x39, 0xcb, 0x07, 0xba, 0xce, + 0x53, 0x82, 0x5f, 0x02, 0x10, 0x46, 0xe6, 0xef, 0x13, 0xaf, 0x3a, 0x23, 0x26, 0x0a, 0xf9, 0x36, + 0x1f, 0xd0, 0x0f, 0x01, 0xa7, 0x01, 0x68, 0xe0, 0x7b, 0x94, 0xe0, 0x3b, 0x30, 0x27, 0x36, 0x4e, + 0xab, 0xa8, 0x36, 0x5d, 0xaf, 0x34, 0x5f, 0x36, 0xc6, 0x9f, 0xae, 0x21, 0xe6, 0x76, 0xd4, 0x04, + 0x7c, 0x15, 0x16, 0x3c, 0xf2, 0x11, 0xeb, 0xa5, 0x9c, 0x4a, 0xda, 0xf3, 0x7c, 0xb8, 0x3d, 0x74, + 0xbc, 0x04, 0x0b, 0x9b, 0x44, 0xfa, 0x8d, 0xf7, 0x8d, 0x61, 0xc6, 0xb3, 0x5c, 0xa2, 0x76, 0x2d, + 0x7e, 0xeb, 0x04, 0xf0, 0x83, 0x90, 0x58, 0x8c, 0x1c, 0x53, 0x8e, 0x8b, 0xd0, 0x0a, 0xcc, 0x0a, + 0x0c, 0xe1, 0xb2, 0x14, 0xb6, 0xd4, 0xeb, 0x5f, 0x20, 0xc0, 0xef, 0x05, 0xbb, 0xa3, 0x7e, 0x86, + 0xeb, 0xa1, 0xd3, 0xad, 0x87, 0x57, 0xa1, 0x12, 0x89, 0xe5, 0x44, 0x3a, 0x2a, 0x1c, 0x2d, 0x9e, + 0x1e, 0x67, 0xac, 0xb1, 0xc1, 0x33, 0xf6, 0x5d, 0x8b, 0xee, 0x77, 0x40, 0xca, 0xf9, 0x6f, 0xbd, + 0x0e, 0xf8, 0x21, 0x19, 0x90, 0x11, 0x96, 0x93, 0xa2, 0x73, 0x0d, 0x16, 0xdb, 0x51, 0x68, 0x97, + 0x13, 0x5a, 0x11, 0x2d, 0x16, 0xd6, 0x01, 0x77, 0x08, 0x8d, 0xdc, 0x62, 0xe5, 0xef, 0x08, 0xfe, + 0xc7, 0x53, 0x67, 0x9b, 0x47, 0xa1, 0xe8, 0x60, 0xde, 0x86, 0xf3, 0xa1, 0x4a, 0xae, 0xde, 0x81, + 0x43, 0x0e, 0x45, 0x44, 0x2e, 0x34, 0x97, 0xf2, 0x02, 0xca, 0x17, 0x36, 0xde, 0x77, 0xc8, 0x61, + 0xe7, 0x5c, 0x3c, 0x97, 0x3f, 0xfd, 0xa3, 0x74, 0xa7, 0xf2, 0x7d, 0x53, 0xcc, 0x2a, 0xdb, 0x6f, + 0xc3, 0xac, 0xf0, 0xac, 0x92, 0xbd, 0x56, 0x04, 0xd5, 0x91, 0xf2, 0xd2, 0xa9, 0x1e, 0xc0, 0x85, + 0x4d, 0x22, 0x7c, 0xe6, 0xc4, 0xf3, 0xdf, 0x0c, 0x91, 0xfe, 0x1b, 0x82, 0x45, 0xf9, 0xda, 0xa4, + 0xbd, 0x8e, 0x3b, 0x9c, 0xd7, 0x60, 0x86, 0x25, 0x59, 0x5a, 0xbc, 0x7d, 0xa1, 0xce, 0xf2, 0x4e, + 0x4f, 0xce, 0x7b, 0x0d, 0x16, 0x65, 0xc6, 0x17, 0x04, 0x89, 0x87, 0xb2, 0x13, 0x79, 0xcf, 0x31, + 0x94, 0xcd, 0x5f, 0xfe, 0x0f, 0xf0, 0x80, 0xeb, 0x45, 0xce, 0xe0, 0x9f, 0x10, 0x40, 0x72, 0x61, + 0xe2, 0x1b, 0x79, 0x4b, 0x66, 0x6e, 0x76, 0xcd, 0x28, 0x2b, 0x97, 0x00, 0xfa, 0xca, 0xe7, 0x7f, + 0xfe, 0xf5, 0x6c, 0x6a, 0x19, 0x9b, 0xc3, 0x7a, 0xf4, 0x89, 0x3c, 0xb3, 0x37, 0x83, 0xd0, 0xff, + 0x90, 0xf4, 0x19, 0x35, 0x1b, 0xe6, 0xc0, 0xef, 0xcb, 0x7a, 0x6a, 0x36, 0x8e, 0x4c, 0x75, 0x0b, + 0x3f, 0x43, 0x70, 0x36, 0xbe, 0x5e, 0xf1, 0xf5, 0x3c, 0xaf, 0x23, 0x97, 0xb0, 0x56, 0x7c, 0xc7, + 0x9d, 0x44, 0xc5, 0xc3, 0x3e, 0x86, 0x49, 0x21, 0x99, 0x8d, 0x23, 0xfc, 0x1d, 0x82, 0x4a, 0xea, + 0x36, 0xc7, 0xb9, 0xe1, 0xc8, 0x5e, 0xfb, 0x65, 0xd8, 0xee, 0x0a, 0xb6, 0xd7, 0xf5, 0xd3, 0x46, + 0xac, 0xa5, 0x2e, 0xee, 0x9f, 0x11, 0x54, 0x52, 0x85, 0x20, 0x1f, 0x31, 0x5b, 0x31, 0xca, 0x20, + 0x3e, 0x14, 0x88, 0x77, 0x9b, 0xb7, 0x12, 0x44, 0xd9, 0x65, 0x94, 0x0a, 0x62, 0x0c, 0xfa, 0x25, + 0x82, 0x4a, 0xaa, 0x4a, 0xe4, 0x83, 0x66, 0xcb, 0x89, 0x76, 0x29, 0x53, 0x8c, 0x1e, 0xf1, 0xde, + 0x2a, 0x3e, 0xdc, 0xc6, 0x24, 0x87, 0x0b, 0x49, 0x31, 0xca, 0x7f, 0x33, 0x32, 0x45, 0xab, 0x4c, + 0xdc, 0xd6, 0x04, 0x59, 0x4b, 0xbf, 0x75, 0x4a, 0xb2, 0x56, 0xc0, 0xbd, 0xb5, 0x50, 0x43, 0x22, + 0x0e, 0xcb, 0x60, 0x01, 0xe2, 0x68, 0xb9, 0xfc, 0x6f, 0x11, 0xb9, 0x37, 0x8e, 0xf8, 0x23, 0x82, + 0x4a, 0xaa, 0x00, 0xe7, 0x1f, 0x6b, 0xb6, 0x52, 0x97, 0x81, 0xbc, 0x2f, 0x20, 0x57, 0xf5, 0xdb, + 0xa7, 0x85, 0x0c, 0x85, 0x3b, 0x4e, 0xf9, 0x35, 0x82, 0x73, 0x9b, 0x84, 0xbd, 0x65, 0xb9, 0x6d, + 0xd1, 0xe4, 0x62, 0x3d, 0x76, 0xeb, 0x58, 0xae, 0x71, 0xb0, 0x6c, 0xa4, 0x8d, 0x31, 0xda, 0xc5, + 0x11, 0x8d, 0xb4, 0xea, 0x5b, 0x02, 0xe7, 0x91, 0xbe, 0x96, 0xe0, 0xc4, 0x5f, 0x01, 0xc5, 0x48, + 0x76, 0xca, 0x4f, 0x0c, 0xd6, 0xcd, 0x03, 0xeb, 0x3e, 0x27, 0x30, 0x3a, 0x02, 0xf6, 0x07, 0x02, + 0xbc, 0x4d, 0xa8, 0x18, 0x24, 0xa1, 0xeb, 0x50, 0xca, 0xa7, 0xe0, 0xfa, 0x88, 0xeb, 0xac, 0x24, + 0x86, 0x7c, 0xa5, 0x84, 0x52, 0x55, 0x8d, 0xc7, 0x02, 0x7c, 0x4b, 0xdf, 0x98, 0x04, 0x9c, 0x65, + 0xd6, 0xe5, 0xf8, 0xbf, 0x22, 0x98, 0x1f, 0x36, 0x4e, 0xf8, 0xd5, 0xa2, 0x32, 0x96, 0xee, 0x09, + 0xb5, 0x1b, 0x25, 0xd5, 0x8a, 0xfe, 0x9e, 0xa0, 0xbf, 0x83, 0x57, 0x4a, 0xde, 0xe0, 0x09, 0xbb, + 0xfc, 0x74, 0xc3, 0xdf, 0x20, 0x38, 0xa3, 0xfa, 0x2d, 0xdc, 0x28, 0x28, 0x7d, 0xa9, 0x4e, 0x42, + 0x2b, 0x6c, 0x7c, 0x4e, 0x42, 0x2b, 0xf3, 0xe6, 0xa8, 0x4f, 0xca, 0xc6, 0x11, 0xfe, 0x1e, 0x01, + 0x24, 0x7d, 0x59, 0xfe, 0x05, 0x94, 0xe9, 0xdf, 0x4a, 0x00, 0xae, 0x0b, 0xc0, 0x37, 0xf4, 0x49, + 0x63, 0xc7, 0x8f, 0xfa, 0x2b, 0x04, 0x90, 0x34, 0x63, 0xf9, 0x8c, 0x99, 0xa6, 0x6d, 0x6c, 0x59, + 0x51, 0xa1, 0x6b, 0x4c, 0x1c, 0xba, 0x6f, 0x11, 0x9c, 0x51, 0xad, 0x5f, 0xfe, 0xa9, 0x1e, 0xef, + 0x0f, 0x4b, 0x04, 0x6d, 0x43, 0xa0, 0xad, 0xe9, 0xab, 0x13, 0xa2, 0xb5, 0xc2, 0xc8, 0x6b, 0xa1, + 0xc6, 0xfa, 0xa7, 0x70, 0xa5, 0xef, 0xbb, 0x39, 0xee, 0xd6, 0x17, 0x92, 0x46, 0xb2, 0xcd, 0xc3, + 0xd3, 0x46, 0x1f, 0xdc, 0x53, 0x72, 0xdb, 0x1f, 0x58, 0x9e, 0x6d, 0xf8, 0xa1, 0x6d, 0xda, 0xc4, + 0x13, 0xc1, 0x33, 0xa5, 0xc9, 0x0a, 0x1c, 0x7a, 0xd2, 0xbf, 0x26, 0x56, 0xc5, 0xd3, 0x0f, 0x53, + 0xb3, 0xdb, 0xf7, 0xbb, 0x5b, 0xdd, 0x9d, 0x39, 0x31, 0xe7, 0xe6, 0xdf, 0x01, 0x00, 0x00, 0xff, + 0xff, 0x34, 0x8b, 0xca, 0x54, 0xb5, 0x11, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/cloud/tasks/v2beta3/queue.pb.go b/vendor/google.golang.org/genproto/googleapis/cloud/tasks/v2beta3/queue.pb.go new file mode 100644 index 0000000..25c2a47 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/cloud/tasks/v2beta3/queue.pb.go @@ -0,0 +1,656 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/cloud/tasks/v2beta3/queue.proto + +package tasks // import "google.golang.org/genproto/googleapis/cloud/tasks/v2beta3" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import duration "github.com/golang/protobuf/ptypes/duration" +import timestamp "github.com/golang/protobuf/ptypes/timestamp" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// State of the queue. +type Queue_State int32 + +const ( + // Unspecified state. + Queue_STATE_UNSPECIFIED Queue_State = 0 + // The queue is running. Tasks can be dispatched. + // + // If the queue was created using Cloud Tasks and the queue has + // had no activity (method calls or task dispatches) for 30 days, + // the queue may take a few minutes to re-activate. Some method + // calls may return [NOT_FOUND][google.rpc.Code.NOT_FOUND] and + // tasks may not be dispatched for a few minutes until the queue + // has been re-activated. + Queue_RUNNING Queue_State = 1 + // Tasks are paused by the user. If the queue is paused then Cloud + // Tasks will stop delivering tasks from it, but more tasks can + // still be added to it by the user. + Queue_PAUSED Queue_State = 2 + // The queue is disabled. + // + // A queue becomes `DISABLED` when + // [queue.yaml](https://cloud.google.com/appengine/docs/python/config/queueref) + // or + // [queue.xml](https://cloud.google.com/appengine/docs/standard/java/config/queueref) + // is uploaded which does not contain the queue. You cannot directly disable + // a queue. + // + // When a queue is disabled, tasks can still be added to a queue + // but the tasks are not dispatched. + // + // To permanently delete this queue and all of its tasks, call + // [DeleteQueue][google.cloud.tasks.v2beta3.CloudTasks.DeleteQueue]. + Queue_DISABLED Queue_State = 3 +) + +var Queue_State_name = map[int32]string{ + 0: "STATE_UNSPECIFIED", + 1: "RUNNING", + 2: "PAUSED", + 3: "DISABLED", +} +var Queue_State_value = map[string]int32{ + "STATE_UNSPECIFIED": 0, + "RUNNING": 1, + "PAUSED": 2, + "DISABLED": 3, +} + +func (x Queue_State) String() string { + return proto.EnumName(Queue_State_name, int32(x)) +} +func (Queue_State) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_queue_728c565dd0b3b59c, []int{0, 0} +} + +// A queue is a container of related tasks. Queues are configured to manage +// how those tasks are dispatched. Configurable properties include rate limits, +// retry options, queue types, and others. +type Queue struct { + // Caller-specified and required in [CreateQueue][google.cloud.tasks.v2beta3.CloudTasks.CreateQueue], + // after which it becomes output only. + // + // The queue name. + // + // The queue name must have the following format: + // `projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID` + // + // * `PROJECT_ID` can contain letters ([A-Za-z]), numbers ([0-9]), + // hyphens (-), colons (:), or periods (.). + // For more information, see + // [Identifying + // projects](https://cloud.google.com/resource-manager/docs/creating-managing-projects#identifying_projects) + // * `LOCATION_ID` is the canonical ID for the queue's location. + // The list of available locations can be obtained by calling + // [ListLocations][google.cloud.location.Locations.ListLocations]. + // For more information, see https://cloud.google.com/about/locations/. + // * `QUEUE_ID` can contain letters ([A-Za-z]), numbers ([0-9]), or + // hyphens (-). The maximum length is 100 characters. + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // Types that are valid to be assigned to QueueType: + // *Queue_AppEngineHttpQueue + QueueType isQueue_QueueType `protobuf_oneof:"queue_type"` + // Rate limits for task dispatches. + // + // [rate_limits][google.cloud.tasks.v2beta3.Queue.rate_limits] and [retry_config][google.cloud.tasks.v2beta3.Queue.retry_config] are + // related because they both control task attempts. However they control task + // attempts in different ways: + // + // * [rate_limits][google.cloud.tasks.v2beta3.Queue.rate_limits] controls the total rate of + // dispatches from a queue (i.e. all traffic dispatched from the + // queue, regardless of whether the dispatch is from a first + // attempt or a retry). + // * [retry_config][google.cloud.tasks.v2beta3.Queue.retry_config] controls what happens to + // particular a task after its first attempt fails. That is, + // [retry_config][google.cloud.tasks.v2beta3.Queue.retry_config] controls task retries (the + // second attempt, third attempt, etc). + // + // The queue's actual dispatch rate is the result of: + // + // * Number of tasks in the queue + // * User-specified throttling: [rate_limits][google.cloud.tasks.v2beta3.Queue.rate_limits], + // [retry_config][google.cloud.tasks.v2beta3.Queue.retry_config], and the + // [queue's state][google.cloud.tasks.v2beta3.Queue.state]. + // * System throttling due to `429` (Too Many Requests) or `503` (Service + // Unavailable) responses from the worker, high error rates, or to smooth + // sudden large traffic spikes. + RateLimits *RateLimits `protobuf:"bytes,4,opt,name=rate_limits,json=rateLimits,proto3" json:"rate_limits,omitempty"` + // Settings that determine the retry behavior. + // + // * For tasks created using Cloud Tasks: the queue-level retry settings + // apply to all tasks in the queue that were created using Cloud Tasks. + // Retry settings cannot be set on individual tasks. + // * For tasks created using the App Engine SDK: the queue-level retry + // settings apply to all tasks in the queue which do not have retry settings + // explicitly set on the task and were created by the App Engine SDK. See + // [App Engine + // documentation](https://cloud.google.com/appengine/docs/standard/python/taskqueue/push/retrying-tasks). + RetryConfig *RetryConfig `protobuf:"bytes,5,opt,name=retry_config,json=retryConfig,proto3" json:"retry_config,omitempty"` + // Output only. The state of the queue. + // + // `state` can only be changed by called + // [PauseQueue][google.cloud.tasks.v2beta3.CloudTasks.PauseQueue], + // [ResumeQueue][google.cloud.tasks.v2beta3.CloudTasks.ResumeQueue], or uploading + // [queue.yaml/xml](https://cloud.google.com/appengine/docs/python/config/queueref). + // [UpdateQueue][google.cloud.tasks.v2beta3.CloudTasks.UpdateQueue] cannot be used to change `state`. + State Queue_State `protobuf:"varint,6,opt,name=state,proto3,enum=google.cloud.tasks.v2beta3.Queue_State" json:"state,omitempty"` + // Output only. The last time this queue was purged. + // + // All tasks that were [created][google.cloud.tasks.v2beta3.Task.create_time] before this time + // were purged. + // + // A queue can be purged using [PurgeQueue][google.cloud.tasks.v2beta3.CloudTasks.PurgeQueue], the + // [App Engine Task Queue SDK, or the Cloud + // Console](https://cloud.google.com/appengine/docs/standard/python/taskqueue/push/deleting-tasks-and-queues#purging_all_tasks_from_a_queue). + // + // Purge time will be truncated to the nearest microsecond. Purge + // time will be unset if the queue has never been purged. + PurgeTime *timestamp.Timestamp `protobuf:"bytes,7,opt,name=purge_time,json=purgeTime,proto3" json:"purge_time,omitempty"` + // Specifies the fraction of operations to write to + // [Stackdriver Logging](https://cloud.google.com/logging/docs/). + // This field may contain any value between 0.0 and 1.0, inclusive. + // 0.0 is the default and means that no operations are logged. + LogSamplingRatio float64 `protobuf:"fixed64,10,opt,name=log_sampling_ratio,json=logSamplingRatio,proto3" json:"log_sampling_ratio,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Queue) Reset() { *m = Queue{} } +func (m *Queue) String() string { return proto.CompactTextString(m) } +func (*Queue) ProtoMessage() {} +func (*Queue) Descriptor() ([]byte, []int) { + return fileDescriptor_queue_728c565dd0b3b59c, []int{0} +} +func (m *Queue) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Queue.Unmarshal(m, b) +} +func (m *Queue) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Queue.Marshal(b, m, deterministic) +} +func (dst *Queue) XXX_Merge(src proto.Message) { + xxx_messageInfo_Queue.Merge(dst, src) +} +func (m *Queue) XXX_Size() int { + return xxx_messageInfo_Queue.Size(m) +} +func (m *Queue) XXX_DiscardUnknown() { + xxx_messageInfo_Queue.DiscardUnknown(m) +} + +var xxx_messageInfo_Queue proto.InternalMessageInfo + +func (m *Queue) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +type isQueue_QueueType interface { + isQueue_QueueType() +} + +type Queue_AppEngineHttpQueue struct { + AppEngineHttpQueue *AppEngineHttpQueue `protobuf:"bytes,3,opt,name=app_engine_http_queue,json=appEngineHttpQueue,proto3,oneof"` +} + +func (*Queue_AppEngineHttpQueue) isQueue_QueueType() {} + +func (m *Queue) GetQueueType() isQueue_QueueType { + if m != nil { + return m.QueueType + } + return nil +} + +func (m *Queue) GetAppEngineHttpQueue() *AppEngineHttpQueue { + if x, ok := m.GetQueueType().(*Queue_AppEngineHttpQueue); ok { + return x.AppEngineHttpQueue + } + return nil +} + +func (m *Queue) GetRateLimits() *RateLimits { + if m != nil { + return m.RateLimits + } + return nil +} + +func (m *Queue) GetRetryConfig() *RetryConfig { + if m != nil { + return m.RetryConfig + } + return nil +} + +func (m *Queue) GetState() Queue_State { + if m != nil { + return m.State + } + return Queue_STATE_UNSPECIFIED +} + +func (m *Queue) GetPurgeTime() *timestamp.Timestamp { + if m != nil { + return m.PurgeTime + } + return nil +} + +func (m *Queue) GetLogSamplingRatio() float64 { + if m != nil { + return m.LogSamplingRatio + } + return 0 +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*Queue) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _Queue_OneofMarshaler, _Queue_OneofUnmarshaler, _Queue_OneofSizer, []interface{}{ + (*Queue_AppEngineHttpQueue)(nil), + } +} + +func _Queue_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*Queue) + // queue_type + switch x := m.QueueType.(type) { + case *Queue_AppEngineHttpQueue: + b.EncodeVarint(3<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.AppEngineHttpQueue); err != nil { + return err + } + case nil: + default: + return fmt.Errorf("Queue.QueueType has unexpected type %T", x) + } + return nil +} + +func _Queue_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*Queue) + switch tag { + case 3: // queue_type.app_engine_http_queue + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(AppEngineHttpQueue) + err := b.DecodeMessage(msg) + m.QueueType = &Queue_AppEngineHttpQueue{msg} + return true, err + default: + return false, nil + } +} + +func _Queue_OneofSizer(msg proto.Message) (n int) { + m := msg.(*Queue) + // queue_type + switch x := m.QueueType.(type) { + case *Queue_AppEngineHttpQueue: + s := proto.Size(x.AppEngineHttpQueue) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +// Rate limits. +// +// This message determines the maximum rate that tasks can be dispatched by a +// queue, regardless of whether the dispatch is a first task attempt or a retry. +// +// Note: The debugging command, [RunTask][google.cloud.tasks.v2beta3.CloudTasks.RunTask], will run a task +// even if the queue has reached its [RateLimits][google.cloud.tasks.v2beta3.RateLimits]. +type RateLimits struct { + // The maximum rate at which tasks are dispatched from this queue. + // + // If unspecified when the queue is created, Cloud Tasks will pick the + // default. + // + // * For [App Engine queues][google.cloud.tasks.v2beta3.AppEngineHttpQueue], the maximum allowed value + // is 500. + // + // + // This field has the same meaning as + // [rate in + // queue.yaml/xml](https://cloud.google.com/appengine/docs/standard/python/config/queueref#rate). + MaxDispatchesPerSecond float64 `protobuf:"fixed64,1,opt,name=max_dispatches_per_second,json=maxDispatchesPerSecond,proto3" json:"max_dispatches_per_second,omitempty"` + // Output only. The max burst size. + // + // Max burst size limits how fast tasks in queue are processed when + // many tasks are in the queue and the rate is high. This field + // allows the queue to have a high rate so processing starts shortly + // after a task is enqueued, but still limits resource usage when + // many tasks are enqueued in a short period of time. + // + // The [token bucket](https://wikipedia.org/wiki/Token_Bucket) + // algorithm is used to control the rate of task dispatches. Each + // queue has a token bucket that holds tokens, up to the maximum + // specified by `max_burst_size`. Each time a task is dispatched, a + // token is removed from the bucket. Tasks will be dispatched until + // the queue's bucket runs out of tokens. The bucket will be + // continuously refilled with new tokens based on + // [max_dispatches_per_second][google.cloud.tasks.v2beta3.RateLimits.max_dispatches_per_second]. + // + // Cloud Tasks will pick the value of `max_burst_size` based on the + // value of + // [max_dispatches_per_second][google.cloud.tasks.v2beta3.RateLimits.max_dispatches_per_second]. + // + // For App Engine queues that were created or updated using + // `queue.yaml/xml`, `max_burst_size` is equal to + // [bucket_size](https://cloud.google.com/appengine/docs/standard/python/config/queueref#bucket_size). + // Since `max_burst_size` is output only, if + // [UpdateQueue][google.cloud.tasks.v2beta3.CloudTasks.UpdateQueue] is called on a queue + // created by `queue.yaml/xml`, `max_burst_size` will be reset based + // on the value of + // [max_dispatches_per_second][google.cloud.tasks.v2beta3.RateLimits.max_dispatches_per_second], + // regardless of whether + // [max_dispatches_per_second][google.cloud.tasks.v2beta3.RateLimits.max_dispatches_per_second] + // is updated. + // + MaxBurstSize int32 `protobuf:"varint,2,opt,name=max_burst_size,json=maxBurstSize,proto3" json:"max_burst_size,omitempty"` + // The maximum number of concurrent tasks that Cloud Tasks allows + // to be dispatched for this queue. After this threshold has been + // reached, Cloud Tasks stops dispatching tasks until the number of + // concurrent requests decreases. + // + // If unspecified when the queue is created, Cloud Tasks will pick the + // default. + // + // + // The maximum allowed value is 5,000. + // + // + // This field has the same meaning as + // [max_concurrent_requests in + // queue.yaml/xml](https://cloud.google.com/appengine/docs/standard/python/config/queueref#max_concurrent_requests). + MaxConcurrentDispatches int32 `protobuf:"varint,3,opt,name=max_concurrent_dispatches,json=maxConcurrentDispatches,proto3" json:"max_concurrent_dispatches,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *RateLimits) Reset() { *m = RateLimits{} } +func (m *RateLimits) String() string { return proto.CompactTextString(m) } +func (*RateLimits) ProtoMessage() {} +func (*RateLimits) Descriptor() ([]byte, []int) { + return fileDescriptor_queue_728c565dd0b3b59c, []int{1} +} +func (m *RateLimits) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_RateLimits.Unmarshal(m, b) +} +func (m *RateLimits) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_RateLimits.Marshal(b, m, deterministic) +} +func (dst *RateLimits) XXX_Merge(src proto.Message) { + xxx_messageInfo_RateLimits.Merge(dst, src) +} +func (m *RateLimits) XXX_Size() int { + return xxx_messageInfo_RateLimits.Size(m) +} +func (m *RateLimits) XXX_DiscardUnknown() { + xxx_messageInfo_RateLimits.DiscardUnknown(m) +} + +var xxx_messageInfo_RateLimits proto.InternalMessageInfo + +func (m *RateLimits) GetMaxDispatchesPerSecond() float64 { + if m != nil { + return m.MaxDispatchesPerSecond + } + return 0 +} + +func (m *RateLimits) GetMaxBurstSize() int32 { + if m != nil { + return m.MaxBurstSize + } + return 0 +} + +func (m *RateLimits) GetMaxConcurrentDispatches() int32 { + if m != nil { + return m.MaxConcurrentDispatches + } + return 0 +} + +// Retry config. +// +// These settings determine when a failed task attempt is retried. +type RetryConfig struct { + // Number of attempts per task. + // + // Cloud Tasks will attempt the task `max_attempts` times (that is, if the + // first attempt fails, then there will be `max_attempts - 1` retries). Must + // be >= -1. + // + // If unspecified when the queue is created, Cloud Tasks will pick the + // default. + // + // -1 indicates unlimited attempts. + // + // This field has the same meaning as + // [task_retry_limit in + // queue.yaml/xml](https://cloud.google.com/appengine/docs/standard/python/config/queueref#retry_parameters). + MaxAttempts int32 `protobuf:"varint,1,opt,name=max_attempts,json=maxAttempts,proto3" json:"max_attempts,omitempty"` + // If positive, `max_retry_duration` specifies the time limit for + // retrying a failed task, measured from when the task was first + // attempted. Once `max_retry_duration` time has passed *and* the + // task has been attempted [max_attempts][google.cloud.tasks.v2beta3.RetryConfig.max_attempts] + // times, no further attempts will be made and the task will be + // deleted. + // + // If zero, then the task age is unlimited. + // + // If unspecified when the queue is created, Cloud Tasks will pick the + // default. + // + // + // `max_retry_duration` will be truncated to the nearest second. + // + // This field has the same meaning as + // [task_age_limit in + // queue.yaml/xml](https://cloud.google.com/appengine/docs/standard/python/config/queueref#retry_parameters). + MaxRetryDuration *duration.Duration `protobuf:"bytes,2,opt,name=max_retry_duration,json=maxRetryDuration,proto3" json:"max_retry_duration,omitempty"` + // A task will be [scheduled][google.cloud.tasks.v2beta3.Task.schedule_time] for retry between + // [min_backoff][google.cloud.tasks.v2beta3.RetryConfig.min_backoff] and + // [max_backoff][google.cloud.tasks.v2beta3.RetryConfig.max_backoff] duration after it fails, + // if the queue's [RetryConfig][google.cloud.tasks.v2beta3.RetryConfig] specifies that the task should be + // retried. + // + // If unspecified when the queue is created, Cloud Tasks will pick the + // default. + // + // + // `min_backoff` will be truncated to the nearest second. + // + // This field has the same meaning as + // [min_backoff_seconds in + // queue.yaml/xml](https://cloud.google.com/appengine/docs/standard/python/config/queueref#retry_parameters). + MinBackoff *duration.Duration `protobuf:"bytes,3,opt,name=min_backoff,json=minBackoff,proto3" json:"min_backoff,omitempty"` + // A task will be [scheduled][google.cloud.tasks.v2beta3.Task.schedule_time] for retry between + // [min_backoff][google.cloud.tasks.v2beta3.RetryConfig.min_backoff] and + // [max_backoff][google.cloud.tasks.v2beta3.RetryConfig.max_backoff] duration after it fails, + // if the queue's [RetryConfig][google.cloud.tasks.v2beta3.RetryConfig] specifies that the task should be + // retried. + // + // If unspecified when the queue is created, Cloud Tasks will pick the + // default. + // + // + // `max_backoff` will be truncated to the nearest second. + // + // This field has the same meaning as + // [max_backoff_seconds in + // queue.yaml/xml](https://cloud.google.com/appengine/docs/standard/python/config/queueref#retry_parameters). + MaxBackoff *duration.Duration `protobuf:"bytes,4,opt,name=max_backoff,json=maxBackoff,proto3" json:"max_backoff,omitempty"` + // The time between retries will double `max_doublings` times. + // + // A task's retry interval starts at + // [min_backoff][google.cloud.tasks.v2beta3.RetryConfig.min_backoff], then doubles + // `max_doublings` times, then increases linearly, and finally + // retries retries at intervals of + // [max_backoff][google.cloud.tasks.v2beta3.RetryConfig.max_backoff] up to + // [max_attempts][google.cloud.tasks.v2beta3.RetryConfig.max_attempts] times. + // + // For example, if [min_backoff][google.cloud.tasks.v2beta3.RetryConfig.min_backoff] is 10s, + // [max_backoff][google.cloud.tasks.v2beta3.RetryConfig.max_backoff] is 300s, and + // `max_doublings` is 3, then the a task will first be retried in + // 10s. The retry interval will double three times, and then + // increase linearly by 2^3 * 10s. Finally, the task will retry at + // intervals of [max_backoff][google.cloud.tasks.v2beta3.RetryConfig.max_backoff] until the + // task has been attempted [max_attempts][google.cloud.tasks.v2beta3.RetryConfig.max_attempts] + // times. Thus, the requests will retry at 10s, 20s, 40s, 80s, 160s, + // 240s, 300s, 300s, .... + // + // If unspecified when the queue is created, Cloud Tasks will pick the + // default. + // + // + // This field has the same meaning as + // [max_doublings in + // queue.yaml/xml](https://cloud.google.com/appengine/docs/standard/python/config/queueref#retry_parameters). + MaxDoublings int32 `protobuf:"varint,5,opt,name=max_doublings,json=maxDoublings,proto3" json:"max_doublings,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *RetryConfig) Reset() { *m = RetryConfig{} } +func (m *RetryConfig) String() string { return proto.CompactTextString(m) } +func (*RetryConfig) ProtoMessage() {} +func (*RetryConfig) Descriptor() ([]byte, []int) { + return fileDescriptor_queue_728c565dd0b3b59c, []int{2} +} +func (m *RetryConfig) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_RetryConfig.Unmarshal(m, b) +} +func (m *RetryConfig) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_RetryConfig.Marshal(b, m, deterministic) +} +func (dst *RetryConfig) XXX_Merge(src proto.Message) { + xxx_messageInfo_RetryConfig.Merge(dst, src) +} +func (m *RetryConfig) XXX_Size() int { + return xxx_messageInfo_RetryConfig.Size(m) +} +func (m *RetryConfig) XXX_DiscardUnknown() { + xxx_messageInfo_RetryConfig.DiscardUnknown(m) +} + +var xxx_messageInfo_RetryConfig proto.InternalMessageInfo + +func (m *RetryConfig) GetMaxAttempts() int32 { + if m != nil { + return m.MaxAttempts + } + return 0 +} + +func (m *RetryConfig) GetMaxRetryDuration() *duration.Duration { + if m != nil { + return m.MaxRetryDuration + } + return nil +} + +func (m *RetryConfig) GetMinBackoff() *duration.Duration { + if m != nil { + return m.MinBackoff + } + return nil +} + +func (m *RetryConfig) GetMaxBackoff() *duration.Duration { + if m != nil { + return m.MaxBackoff + } + return nil +} + +func (m *RetryConfig) GetMaxDoublings() int32 { + if m != nil { + return m.MaxDoublings + } + return 0 +} + +func init() { + proto.RegisterType((*Queue)(nil), "google.cloud.tasks.v2beta3.Queue") + proto.RegisterType((*RateLimits)(nil), "google.cloud.tasks.v2beta3.RateLimits") + proto.RegisterType((*RetryConfig)(nil), "google.cloud.tasks.v2beta3.RetryConfig") + proto.RegisterEnum("google.cloud.tasks.v2beta3.Queue_State", Queue_State_name, Queue_State_value) +} + +func init() { + proto.RegisterFile("google/cloud/tasks/v2beta3/queue.proto", fileDescriptor_queue_728c565dd0b3b59c) +} + +var fileDescriptor_queue_728c565dd0b3b59c = []byte{ + // 683 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x84, 0x94, 0x51, 0x4f, 0xdb, 0x30, + 0x10, 0xc7, 0x09, 0xb4, 0x30, 0x2e, 0x1d, 0xea, 0x2c, 0xb1, 0x85, 0x6a, 0x62, 0x5d, 0x37, 0x41, + 0x1f, 0xa6, 0x44, 0x82, 0x27, 0x98, 0xa6, 0xa9, 0xa5, 0x1d, 0x74, 0x42, 0x55, 0x97, 0xc0, 0xcb, + 0x5e, 0x2c, 0x37, 0x75, 0x43, 0x44, 0x13, 0x7b, 0xb6, 0x33, 0x15, 0x3e, 0xc6, 0x1e, 0xf6, 0x1d, + 0xf6, 0x2d, 0xa7, 0x5c, 0x52, 0x40, 0x63, 0xeb, 0xde, 0xe2, 0xbb, 0xdf, 0xfd, 0xff, 0xb6, 0xef, + 0x1c, 0xd8, 0x8b, 0x84, 0x88, 0x66, 0xdc, 0x0b, 0x67, 0x22, 0x9b, 0x78, 0x86, 0xe9, 0x6b, 0xed, + 0x7d, 0x3f, 0x18, 0x73, 0xc3, 0x0e, 0xbd, 0x6f, 0x19, 0xcf, 0xb8, 0x2b, 0x95, 0x30, 0x82, 0x34, + 0x0a, 0xce, 0x45, 0xce, 0x45, 0xce, 0x2d, 0xb9, 0xc6, 0xcb, 0x52, 0x83, 0xc9, 0xd8, 0x63, 0x69, + 0x2a, 0x0c, 0x33, 0xb1, 0x48, 0x75, 0x51, 0xd9, 0xd8, 0x79, 0x90, 0x55, 0x5c, 0x8b, 0x4c, 0x85, + 0xa5, 0x68, 0x63, 0x7f, 0x89, 0xb9, 0x61, 0x2a, 0xe2, 0xa6, 0x04, 0x77, 0x4b, 0x10, 0x57, 0xe3, + 0x6c, 0xea, 0x4d, 0x32, 0x85, 0x26, 0x65, 0xfe, 0xd5, 0x9f, 0x79, 0x13, 0x27, 0x5c, 0x1b, 0x96, + 0xc8, 0x02, 0x68, 0xfd, 0xa8, 0x40, 0xf5, 0x4b, 0x7e, 0x1c, 0x42, 0xa0, 0x92, 0xb2, 0x84, 0x3b, + 0x56, 0xd3, 0x6a, 0x6f, 0xfa, 0xf8, 0x4d, 0x42, 0xd8, 0x66, 0x52, 0x52, 0x9e, 0x46, 0x71, 0xca, + 0xe9, 0x95, 0x31, 0x92, 0xe2, 0xd9, 0x9d, 0xb5, 0xa6, 0xd5, 0xb6, 0x0f, 0x5c, 0xf7, 0xdf, 0x87, + 0x77, 0x3b, 0x52, 0xf6, 0xb1, 0xee, 0xcc, 0x18, 0x89, 0x16, 0x67, 0x2b, 0x3e, 0x61, 0x8f, 0xa2, + 0xe4, 0x14, 0x6c, 0xc5, 0x0c, 0xa7, 0xb3, 0x38, 0x89, 0x8d, 0x76, 0x2a, 0x28, 0xbd, 0xb7, 0x4c, + 0xda, 0x67, 0x86, 0x9f, 0x23, 0xed, 0x83, 0xba, 0xfb, 0x26, 0x9f, 0xa1, 0xa6, 0xb8, 0x51, 0x37, + 0x34, 0x14, 0xe9, 0x34, 0x8e, 0x9c, 0x2a, 0x2a, 0xed, 0x2f, 0x55, 0xca, 0xf9, 0x13, 0xc4, 0x7d, + 0x5b, 0xdd, 0x2f, 0xc8, 0x07, 0xa8, 0x6a, 0xc3, 0x0c, 0x77, 0xd6, 0x9b, 0x56, 0x7b, 0x6b, 0xb9, + 0x08, 0x1e, 0xc3, 0x0d, 0x72, 0xdc, 0x2f, 0xaa, 0xc8, 0x11, 0x80, 0xcc, 0x54, 0xc4, 0x69, 0x7e, + 0xdf, 0xce, 0x06, 0x6e, 0xa4, 0xb1, 0xd0, 0x58, 0x34, 0xc3, 0xbd, 0x58, 0x34, 0xc3, 0xdf, 0x44, + 0x3a, 0x5f, 0x93, 0x77, 0x40, 0x66, 0x22, 0xa2, 0x9a, 0x25, 0x72, 0x16, 0xa7, 0x11, 0xc5, 0x7e, + 0x3a, 0xd0, 0xb4, 0xda, 0x96, 0x5f, 0x9f, 0x89, 0x28, 0x28, 0x13, 0x7e, 0x1e, 0x6f, 0xf5, 0xa1, + 0x8a, 0xc6, 0x64, 0x1b, 0x9e, 0x05, 0x17, 0x9d, 0x8b, 0x3e, 0xbd, 0x1c, 0x06, 0xa3, 0xfe, 0xc9, + 0xe0, 0xd3, 0xa0, 0xdf, 0xab, 0xaf, 0x10, 0x1b, 0x36, 0xfc, 0xcb, 0xe1, 0x70, 0x30, 0x3c, 0xad, + 0x5b, 0x04, 0x60, 0x7d, 0xd4, 0xb9, 0x0c, 0xfa, 0xbd, 0xfa, 0x2a, 0xa9, 0xc1, 0x93, 0xde, 0x20, + 0xe8, 0x74, 0xcf, 0xfb, 0xbd, 0xfa, 0x5a, 0xb7, 0x06, 0x80, 0x8d, 0xa5, 0xe6, 0x46, 0xf2, 0xd6, + 0x2f, 0x0b, 0xe0, 0xfe, 0x8e, 0xc9, 0x11, 0xec, 0x24, 0x6c, 0x4e, 0x27, 0xb1, 0x96, 0xcc, 0x84, + 0x57, 0x5c, 0x53, 0xc9, 0x15, 0xd5, 0x3c, 0x14, 0xe9, 0x04, 0xc7, 0xc5, 0xf2, 0x9f, 0x27, 0x6c, + 0xde, 0xbb, 0xcb, 0x8f, 0xb8, 0x0a, 0x30, 0x4b, 0xde, 0xc2, 0x56, 0x5e, 0x3a, 0xce, 0x94, 0x36, + 0x54, 0xc7, 0xb7, 0xdc, 0x59, 0x6d, 0x5a, 0xed, 0xaa, 0x5f, 0x4b, 0xd8, 0xbc, 0x9b, 0x07, 0x83, + 0xf8, 0x96, 0x93, 0xe3, 0xc2, 0x20, 0x14, 0x69, 0x98, 0x29, 0xc5, 0x53, 0xf3, 0xc0, 0x0b, 0x47, + 0xad, 0xea, 0xbf, 0x48, 0xd8, 0xfc, 0xe4, 0x2e, 0x7f, 0x6f, 0xd5, 0xfa, 0xb9, 0x0a, 0xf6, 0x83, + 0x2e, 0x92, 0xd7, 0x90, 0x6b, 0x53, 0x66, 0x0c, 0x4f, 0xa4, 0xd1, 0xb8, 0xbf, 0xaa, 0x6f, 0x27, + 0x6c, 0xde, 0x29, 0x43, 0xe4, 0x14, 0x48, 0x8e, 0x14, 0xb3, 0xb2, 0x78, 0x30, 0xb8, 0x31, 0xfb, + 0x60, 0xe7, 0x51, 0x93, 0x7a, 0x25, 0xe0, 0xd7, 0x13, 0x36, 0x47, 0xa7, 0x45, 0x84, 0x1c, 0x83, + 0x9d, 0xc4, 0x29, 0x1d, 0xb3, 0xf0, 0x5a, 0x4c, 0xa7, 0xe5, 0xa3, 0x58, 0xa2, 0x00, 0x49, 0x9c, + 0x76, 0x0b, 0x18, 0x6b, 0xf3, 0x9b, 0x29, 0x6b, 0x2b, 0xff, 0xaf, 0x65, 0xf3, 0x45, 0xed, 0x1b, + 0x78, 0x8a, 0x0d, 0x11, 0xd9, 0x38, 0x9f, 0x04, 0x8d, 0x93, 0x5e, 0x5c, 0x6a, 0x6f, 0x11, 0xeb, + 0x0a, 0xd8, 0x0d, 0x45, 0xb2, 0x64, 0x6e, 0xbb, 0x80, 0x83, 0x3b, 0xca, 0xad, 0x46, 0xd6, 0xd7, + 0x8f, 0x25, 0x19, 0x89, 0x19, 0x4b, 0x23, 0x57, 0xa8, 0xc8, 0x8b, 0x78, 0x8a, 0x1b, 0xf1, 0x8a, + 0x14, 0x93, 0xb1, 0xfe, 0xdb, 0x2f, 0xe9, 0x3d, 0xae, 0xc6, 0xeb, 0xc8, 0x1e, 0xfe, 0x0e, 0x00, + 0x00, 0xff, 0xff, 0xa5, 0xce, 0xef, 0x87, 0x3a, 0x05, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/cloud/tasks/v2beta3/target.pb.go b/vendor/google.golang.org/genproto/googleapis/cloud/tasks/v2beta3/target.pb.go new file mode 100644 index 0000000..c9a10a7 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/cloud/tasks/v2beta3/target.pb.go @@ -0,0 +1,896 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/cloud/tasks/v2beta3/target.proto + +package tasks // import "google.golang.org/genproto/googleapis/cloud/tasks/v2beta3" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// The HTTP method used to execute the task. +type HttpMethod int32 + +const ( + // HTTP method unspecified + HttpMethod_HTTP_METHOD_UNSPECIFIED HttpMethod = 0 + // HTTP POST + HttpMethod_POST HttpMethod = 1 + // HTTP GET + HttpMethod_GET HttpMethod = 2 + // HTTP HEAD + HttpMethod_HEAD HttpMethod = 3 + // HTTP PUT + HttpMethod_PUT HttpMethod = 4 + // HTTP DELETE + HttpMethod_DELETE HttpMethod = 5 + // HTTP PATCH + HttpMethod_PATCH HttpMethod = 6 + // HTTP OPTIONS + HttpMethod_OPTIONS HttpMethod = 7 +) + +var HttpMethod_name = map[int32]string{ + 0: "HTTP_METHOD_UNSPECIFIED", + 1: "POST", + 2: "GET", + 3: "HEAD", + 4: "PUT", + 5: "DELETE", + 6: "PATCH", + 7: "OPTIONS", +} +var HttpMethod_value = map[string]int32{ + "HTTP_METHOD_UNSPECIFIED": 0, + "POST": 1, + "GET": 2, + "HEAD": 3, + "PUT": 4, + "DELETE": 5, + "PATCH": 6, + "OPTIONS": 7, +} + +func (x HttpMethod) String() string { + return proto.EnumName(HttpMethod_name, int32(x)) +} +func (HttpMethod) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_target_6c51f433dca06861, []int{0} +} + +// HTTP request. +// +// Warning: This is an [alpha](https://cloud.google.com/terms/launch-stages) +// feature. If you haven't already joined, you can [use this form to sign +// up](https://docs.google.com/forms/d/e/1FAIpQLSfc4uEy9CBHKYUSdnY1hdhKDCX7julVZHy3imOiR-XrU7bUNQ/viewform). +// +// The task will be pushed to the worker as an HTTP request. If the worker +// or the redirected worker acknowledges the task by returning a successful HTTP +// response code ([`200` - `299`]), the task will removed from the queue. If +// any other HTTP response code is returned or no response is received, the +// task will be retried according to the following: +// +// * User-specified throttling: [retry configuration][Queue.RetryConfig], +// [rate limits][Queue.RateLimits], and the [queue's state][google.cloud.tasks.v2beta3.Queue.state]. +// +// * System throttling: To prevent the worker from overloading, Cloud Tasks may +// temporarily reduce the queue's effective rate. User-specified settings +// will not be changed. +// +// System throttling happens because: +// +// * Cloud Tasks backoffs on all errors. Normally the backoff specified in +// [rate limits][Queue.RateLimits] will be used. But if the worker returns +// `429` (Too Many Requests), `503` (Service Unavailable), or the rate of +// errors is high, Cloud Tasks will use a higher backoff rate. The retry +// specified in the `Retry-After` HTTP response header is considered. +// +// * To prevent traffic spikes and to smooth sudden large traffic spikes, +// dispatches ramp up slowly when the queue is newly created or idle and +// if large numbers of tasks suddenly become available to dispatch (due to +// spikes in create task rates, the queue being unpaused, or many tasks +// that are scheduled at the same time). +type HttpRequest struct { + // Required. The full url path that the request will be sent to. + // + // This string must begin with either "http://" or "https://". Some examples + // are: `http://acme.com` and `https://acme.com/sales:8080`. Cloud Tasks will + // encode some characters for safety and compatibility. The maximum allowed + // URL length is 2083 characters after encoding. + // + // The `Location` header response from a redirect response [`300` - `399`] + // may be followed. The redirect is not counted as a separate attempt. + Url string `protobuf:"bytes,1,opt,name=url,proto3" json:"url,omitempty"` + // The HTTP method to use for the request. The default is POST. + HttpMethod HttpMethod `protobuf:"varint,2,opt,name=http_method,json=httpMethod,proto3,enum=google.cloud.tasks.v2beta3.HttpMethod" json:"http_method,omitempty"` + // HTTP request headers. + // + // This map contains the header field names and values. + // Headers can be set when the + // [task is created][google.cloud.tasks.v2beta3.CloudTasks.CreateTask]. + // + // These headers represent a subset of the headers that will accompany the + // task's HTTP request. Some HTTP request headers will be ignored or replaced. + // + // A partial list of headers that will be ignored or replaced is: + // + // * Host: This will be computed by Cloud Tasks and derived from + // [HttpRequest.url][google.cloud.tasks.v2beta3.HttpRequest.url]. + // * Content-Length: This will be computed by Cloud Tasks. + // * User-Agent: This will be set to `"Google-Cloud-Tasks"`. + // * X-Google-*: Google use only. + // * X-AppEngine-*: Google use only. + // + // `Content-Type` won't be set by Cloud Tasks. You can explicitly set + // `Content-Type` to a media type when the + // [task is created][google.cloud.tasks.v2beta3.CloudTasks.CreateTask]. + // For example, `Content-Type` can be set to `"application/octet-stream"` or + // `"application/json"`. + // + // Headers which can have multiple values (according to RFC2616) can be + // specified using comma-separated values. + // + // The size of the headers must be less than 80KB. + Headers map[string]string `protobuf:"bytes,3,rep,name=headers,proto3" json:"headers,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` + // HTTP request body. + // + // A request body is allowed only if the + // [HTTP method][google.cloud.tasks.v2beta3.HttpRequest.http_method] is POST, PUT, or PATCH. It is an + // error to set body on a task with an incompatible [HttpMethod][google.cloud.tasks.v2beta3.HttpMethod]. + Body []byte `protobuf:"bytes,4,opt,name=body,proto3" json:"body,omitempty"` + // The mode for generating an `Authorization` header for HTTP requests. + // + // If specified, all `Authorization` headers in the [HttpTarget.headers][] + // field will be overridden. + // + // Types that are valid to be assigned to AuthorizationHeader: + // *HttpRequest_OauthToken + // *HttpRequest_OidcToken + AuthorizationHeader isHttpRequest_AuthorizationHeader `protobuf_oneof:"authorization_header"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *HttpRequest) Reset() { *m = HttpRequest{} } +func (m *HttpRequest) String() string { return proto.CompactTextString(m) } +func (*HttpRequest) ProtoMessage() {} +func (*HttpRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_target_6c51f433dca06861, []int{0} +} +func (m *HttpRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_HttpRequest.Unmarshal(m, b) +} +func (m *HttpRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_HttpRequest.Marshal(b, m, deterministic) +} +func (dst *HttpRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_HttpRequest.Merge(dst, src) +} +func (m *HttpRequest) XXX_Size() int { + return xxx_messageInfo_HttpRequest.Size(m) +} +func (m *HttpRequest) XXX_DiscardUnknown() { + xxx_messageInfo_HttpRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_HttpRequest proto.InternalMessageInfo + +func (m *HttpRequest) GetUrl() string { + if m != nil { + return m.Url + } + return "" +} + +func (m *HttpRequest) GetHttpMethod() HttpMethod { + if m != nil { + return m.HttpMethod + } + return HttpMethod_HTTP_METHOD_UNSPECIFIED +} + +func (m *HttpRequest) GetHeaders() map[string]string { + if m != nil { + return m.Headers + } + return nil +} + +func (m *HttpRequest) GetBody() []byte { + if m != nil { + return m.Body + } + return nil +} + +type isHttpRequest_AuthorizationHeader interface { + isHttpRequest_AuthorizationHeader() +} + +type HttpRequest_OauthToken struct { + OauthToken *OAuthToken `protobuf:"bytes,5,opt,name=oauth_token,json=oauthToken,proto3,oneof"` +} + +type HttpRequest_OidcToken struct { + OidcToken *OidcToken `protobuf:"bytes,6,opt,name=oidc_token,json=oidcToken,proto3,oneof"` +} + +func (*HttpRequest_OauthToken) isHttpRequest_AuthorizationHeader() {} + +func (*HttpRequest_OidcToken) isHttpRequest_AuthorizationHeader() {} + +func (m *HttpRequest) GetAuthorizationHeader() isHttpRequest_AuthorizationHeader { + if m != nil { + return m.AuthorizationHeader + } + return nil +} + +func (m *HttpRequest) GetOauthToken() *OAuthToken { + if x, ok := m.GetAuthorizationHeader().(*HttpRequest_OauthToken); ok { + return x.OauthToken + } + return nil +} + +func (m *HttpRequest) GetOidcToken() *OidcToken { + if x, ok := m.GetAuthorizationHeader().(*HttpRequest_OidcToken); ok { + return x.OidcToken + } + return nil +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*HttpRequest) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _HttpRequest_OneofMarshaler, _HttpRequest_OneofUnmarshaler, _HttpRequest_OneofSizer, []interface{}{ + (*HttpRequest_OauthToken)(nil), + (*HttpRequest_OidcToken)(nil), + } +} + +func _HttpRequest_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*HttpRequest) + // authorization_header + switch x := m.AuthorizationHeader.(type) { + case *HttpRequest_OauthToken: + b.EncodeVarint(5<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.OauthToken); err != nil { + return err + } + case *HttpRequest_OidcToken: + b.EncodeVarint(6<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.OidcToken); err != nil { + return err + } + case nil: + default: + return fmt.Errorf("HttpRequest.AuthorizationHeader has unexpected type %T", x) + } + return nil +} + +func _HttpRequest_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*HttpRequest) + switch tag { + case 5: // authorization_header.oauth_token + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(OAuthToken) + err := b.DecodeMessage(msg) + m.AuthorizationHeader = &HttpRequest_OauthToken{msg} + return true, err + case 6: // authorization_header.oidc_token + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(OidcToken) + err := b.DecodeMessage(msg) + m.AuthorizationHeader = &HttpRequest_OidcToken{msg} + return true, err + default: + return false, nil + } +} + +func _HttpRequest_OneofSizer(msg proto.Message) (n int) { + m := msg.(*HttpRequest) + // authorization_header + switch x := m.AuthorizationHeader.(type) { + case *HttpRequest_OauthToken: + s := proto.Size(x.OauthToken) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *HttpRequest_OidcToken: + s := proto.Size(x.OidcToken) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +// App Engine HTTP queue. +// +// The task will be delivered to the App Engine application hostname +// specified by its [AppEngineHttpQueue][google.cloud.tasks.v2beta3.AppEngineHttpQueue] and [AppEngineHttpRequest][google.cloud.tasks.v2beta3.AppEngineHttpRequest]. +// The documentation for [AppEngineHttpRequest][google.cloud.tasks.v2beta3.AppEngineHttpRequest] explains how the +// task's host URL is constructed. +// +// Using [AppEngineHttpQueue][google.cloud.tasks.v2beta3.AppEngineHttpQueue] requires +// [`appengine.applications.get`](https://cloud.google.com/appengine/docs/admin-api/access-control) +// Google IAM permission for the project +// and the following scope: +// +// `https://www.googleapis.com/auth/cloud-platform` +type AppEngineHttpQueue struct { + // Overrides for the + // [task-level app_engine_routing][google.cloud.tasks.v2beta3.AppEngineHttpRequest.app_engine_routing]. + // + // If set, `app_engine_routing_override` is used for all tasks in + // the queue, no matter what the setting is for the + // [task-level app_engine_routing][google.cloud.tasks.v2beta3.AppEngineHttpRequest.app_engine_routing]. + AppEngineRoutingOverride *AppEngineRouting `protobuf:"bytes,1,opt,name=app_engine_routing_override,json=appEngineRoutingOverride,proto3" json:"app_engine_routing_override,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *AppEngineHttpQueue) Reset() { *m = AppEngineHttpQueue{} } +func (m *AppEngineHttpQueue) String() string { return proto.CompactTextString(m) } +func (*AppEngineHttpQueue) ProtoMessage() {} +func (*AppEngineHttpQueue) Descriptor() ([]byte, []int) { + return fileDescriptor_target_6c51f433dca06861, []int{1} +} +func (m *AppEngineHttpQueue) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_AppEngineHttpQueue.Unmarshal(m, b) +} +func (m *AppEngineHttpQueue) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_AppEngineHttpQueue.Marshal(b, m, deterministic) +} +func (dst *AppEngineHttpQueue) XXX_Merge(src proto.Message) { + xxx_messageInfo_AppEngineHttpQueue.Merge(dst, src) +} +func (m *AppEngineHttpQueue) XXX_Size() int { + return xxx_messageInfo_AppEngineHttpQueue.Size(m) +} +func (m *AppEngineHttpQueue) XXX_DiscardUnknown() { + xxx_messageInfo_AppEngineHttpQueue.DiscardUnknown(m) +} + +var xxx_messageInfo_AppEngineHttpQueue proto.InternalMessageInfo + +func (m *AppEngineHttpQueue) GetAppEngineRoutingOverride() *AppEngineRouting { + if m != nil { + return m.AppEngineRoutingOverride + } + return nil +} + +// App Engine HTTP request. +// +// The message defines the HTTP request that is sent to an App Engine app when +// the task is dispatched. +// +// This proto can only be used for tasks in a queue which has +// [app_engine_http_queue][google.cloud.tasks.v2beta3.Queue.app_engine_http_queue] set. +// +// Using [AppEngineHttpRequest][google.cloud.tasks.v2beta3.AppEngineHttpRequest] requires +// [`appengine.applications.get`](https://cloud.google.com/appengine/docs/admin-api/access-control) +// Google IAM permission for the project +// and the following scope: +// +// `https://www.googleapis.com/auth/cloud-platform` +// +// The task will be delivered to the App Engine app which belongs to the same +// project as the queue. For more information, see +// [How Requests are +// Routed](https://cloud.google.com/appengine/docs/standard/python/how-requests-are-routed) +// and how routing is affected by +// [dispatch +// files](https://cloud.google.com/appengine/docs/python/config/dispatchref). +// Traffic is encrypted during transport and never leaves Google datacenters. +// Because this traffic is carried over a communication mechanism internal to +// Google, you cannot explicitly set the protocol (for example, HTTP or HTTPS). +// The request to the handler, however, will appear to have used the HTTP +// protocol. +// +// The [AppEngineRouting][google.cloud.tasks.v2beta3.AppEngineRouting] used to construct the URL that the task is +// delivered to can be set at the queue-level or task-level: +// +// * If set, +// [app_engine_routing_override][google.cloud.tasks.v2beta3.AppEngineHttpQueue.app_engine_routing_override] +// is used for all tasks in the queue, no matter what the setting +// is for the +// [task-level app_engine_routing][google.cloud.tasks.v2beta3.AppEngineHttpRequest.app_engine_routing]. +// +// +// The `url` that the task will be sent to is: +// +// * `url =` [host][google.cloud.tasks.v2beta3.AppEngineRouting.host] `+` +// [relative_uri][google.cloud.tasks.v2beta3.AppEngineHttpRequest.relative_uri] +// +// Tasks can be dispatched to secure app handlers, unsecure app handlers, and +// URIs restricted with +// [`login: +// admin`](https://cloud.google.com/appengine/docs/standard/python/config/appref). +// Because tasks are not run as any user, they cannot be dispatched to URIs +// restricted with +// [`login: +// required`](https://cloud.google.com/appengine/docs/standard/python/config/appref) +// Task dispatches also do not follow redirects. +// +// The task attempt has succeeded if the app's request handler returns +// an HTTP response code in the range [`200` - `299`]. `503` is +// considered an App Engine system error instead of an application +// error. Requests returning error `503` will be retried regardless of +// retry configuration and not counted against retry counts. +// Any other response code or a failure to receive a response before the +// deadline is a failed attempt. +type AppEngineHttpRequest struct { + // The HTTP method to use for the request. The default is POST. + // + // The app's request handler for the task's target URL must be able to handle + // HTTP requests with this http_method, otherwise the task attempt will fail + // with error code 405 (Method Not Allowed). See + // [Writing a push task request + // handler](https://cloud.google.com/appengine/docs/java/taskqueue/push/creating-handlers#writing_a_push_task_request_handler) + // and the documentation for the request handlers in the language your app is + // written in e.g. + // [Python Request + // Handler](https://cloud.google.com/appengine/docs/python/tools/webapp/requesthandlerclass). + HttpMethod HttpMethod `protobuf:"varint,1,opt,name=http_method,json=httpMethod,proto3,enum=google.cloud.tasks.v2beta3.HttpMethod" json:"http_method,omitempty"` + // Task-level setting for App Engine routing. + // + // If set, + // [app_engine_routing_override][google.cloud.tasks.v2beta3.AppEngineHttpQueue.app_engine_routing_override] + // is used for all tasks in the queue, no matter what the setting is for the + // [task-level app_engine_routing][google.cloud.tasks.v2beta3.AppEngineHttpRequest.app_engine_routing]. + AppEngineRouting *AppEngineRouting `protobuf:"bytes,2,opt,name=app_engine_routing,json=appEngineRouting,proto3" json:"app_engine_routing,omitempty"` + // The relative URI. + // + // The relative URI must begin with "/" and must be a valid HTTP relative URI. + // It can contain a path and query string arguments. + // If the relative URI is empty, then the root path "/" will be used. + // No spaces are allowed, and the maximum length allowed is 2083 characters. + RelativeUri string `protobuf:"bytes,3,opt,name=relative_uri,json=relativeUri,proto3" json:"relative_uri,omitempty"` + // HTTP request headers. + // + // This map contains the header field names and values. + // Headers can be set when the + // [task is created][google.cloud.tasks.v2beta3.CloudTasks.CreateTask]. + // Repeated headers are not supported but a header value can contain commas. + // + // Cloud Tasks sets some headers to default values: + // + // * `User-Agent`: By default, this header is + // `"AppEngine-Google; (+http://code.google.com/appengine)"`. + // This header can be modified, but Cloud Tasks will append + // `"AppEngine-Google; (+http://code.google.com/appengine)"` to the + // modified `User-Agent`. + // + // If the task has a [body][google.cloud.tasks.v2beta3.AppEngineHttpRequest.body], Cloud + // Tasks sets the following headers: + // + // * `Content-Type`: By default, the `Content-Type` header is set to + // `"application/octet-stream"`. The default can be overridden by explicitly + // setting `Content-Type` to a particular media type when the + // [task is created][google.cloud.tasks.v2beta3.CloudTasks.CreateTask]. + // For example, `Content-Type` can be set to `"application/json"`. + // * `Content-Length`: This is computed by Cloud Tasks. This value is + // output only. It cannot be changed. + // + // The headers below cannot be set or overridden: + // + // * `Host` + // * `X-Google-*` + // * `X-AppEngine-*` + // + // In addition, Cloud Tasks sets some headers when the task is dispatched, + // such as headers containing information about the task; see + // [request + // headers](https://cloud.google.com/appengine/docs/python/taskqueue/push/creating-handlers#reading_request_headers). + // These headers are set only when the task is dispatched, so they are not + // visible when the task is returned in a Cloud Tasks response. + // + // Although there is no specific limit for the maximum number of headers or + // the size, there is a limit on the maximum size of the [Task][google.cloud.tasks.v2beta3.Task]. For more + // information, see the [CreateTask][google.cloud.tasks.v2beta3.CloudTasks.CreateTask] documentation. + Headers map[string]string `protobuf:"bytes,4,rep,name=headers,proto3" json:"headers,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` + // HTTP request body. + // + // A request body is allowed only if the HTTP method is POST or PUT. It is + // an error to set a body on a task with an incompatible [HttpMethod][google.cloud.tasks.v2beta3.HttpMethod]. + Body []byte `protobuf:"bytes,5,opt,name=body,proto3" json:"body,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *AppEngineHttpRequest) Reset() { *m = AppEngineHttpRequest{} } +func (m *AppEngineHttpRequest) String() string { return proto.CompactTextString(m) } +func (*AppEngineHttpRequest) ProtoMessage() {} +func (*AppEngineHttpRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_target_6c51f433dca06861, []int{2} +} +func (m *AppEngineHttpRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_AppEngineHttpRequest.Unmarshal(m, b) +} +func (m *AppEngineHttpRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_AppEngineHttpRequest.Marshal(b, m, deterministic) +} +func (dst *AppEngineHttpRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_AppEngineHttpRequest.Merge(dst, src) +} +func (m *AppEngineHttpRequest) XXX_Size() int { + return xxx_messageInfo_AppEngineHttpRequest.Size(m) +} +func (m *AppEngineHttpRequest) XXX_DiscardUnknown() { + xxx_messageInfo_AppEngineHttpRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_AppEngineHttpRequest proto.InternalMessageInfo + +func (m *AppEngineHttpRequest) GetHttpMethod() HttpMethod { + if m != nil { + return m.HttpMethod + } + return HttpMethod_HTTP_METHOD_UNSPECIFIED +} + +func (m *AppEngineHttpRequest) GetAppEngineRouting() *AppEngineRouting { + if m != nil { + return m.AppEngineRouting + } + return nil +} + +func (m *AppEngineHttpRequest) GetRelativeUri() string { + if m != nil { + return m.RelativeUri + } + return "" +} + +func (m *AppEngineHttpRequest) GetHeaders() map[string]string { + if m != nil { + return m.Headers + } + return nil +} + +func (m *AppEngineHttpRequest) GetBody() []byte { + if m != nil { + return m.Body + } + return nil +} + +// App Engine Routing. +// +// Defines routing characteristics specific to App Engine - service, version, +// and instance. +// +// For more information about services, versions, and instances see +// [An Overview of App +// Engine](https://cloud.google.com/appengine/docs/python/an-overview-of-app-engine), +// [Microservices Architecture on Google App +// Engine](https://cloud.google.com/appengine/docs/python/microservices-on-app-engine), +// [App Engine Standard request +// routing](https://cloud.google.com/appengine/docs/standard/python/how-requests-are-routed), +// and [App Engine Flex request +// routing](https://cloud.google.com/appengine/docs/flexible/python/how-requests-are-routed). +type AppEngineRouting struct { + // App service. + // + // By default, the task is sent to the service which is the default + // service when the task is attempted. + // + // For some queues or tasks which were created using the App Engine + // Task Queue API, [host][google.cloud.tasks.v2beta3.AppEngineRouting.host] is not parsable + // into [service][google.cloud.tasks.v2beta3.AppEngineRouting.service], + // [version][google.cloud.tasks.v2beta3.AppEngineRouting.version], and + // [instance][google.cloud.tasks.v2beta3.AppEngineRouting.instance]. For example, some tasks + // which were created using the App Engine SDK use a custom domain + // name; custom domains are not parsed by Cloud Tasks. If + // [host][google.cloud.tasks.v2beta3.AppEngineRouting.host] is not parsable, then + // [service][google.cloud.tasks.v2beta3.AppEngineRouting.service], + // [version][google.cloud.tasks.v2beta3.AppEngineRouting.version], and + // [instance][google.cloud.tasks.v2beta3.AppEngineRouting.instance] are the empty string. + Service string `protobuf:"bytes,1,opt,name=service,proto3" json:"service,omitempty"` + // App version. + // + // By default, the task is sent to the version which is the default + // version when the task is attempted. + // + // For some queues or tasks which were created using the App Engine + // Task Queue API, [host][google.cloud.tasks.v2beta3.AppEngineRouting.host] is not parsable + // into [service][google.cloud.tasks.v2beta3.AppEngineRouting.service], + // [version][google.cloud.tasks.v2beta3.AppEngineRouting.version], and + // [instance][google.cloud.tasks.v2beta3.AppEngineRouting.instance]. For example, some tasks + // which were created using the App Engine SDK use a custom domain + // name; custom domains are not parsed by Cloud Tasks. If + // [host][google.cloud.tasks.v2beta3.AppEngineRouting.host] is not parsable, then + // [service][google.cloud.tasks.v2beta3.AppEngineRouting.service], + // [version][google.cloud.tasks.v2beta3.AppEngineRouting.version], and + // [instance][google.cloud.tasks.v2beta3.AppEngineRouting.instance] are the empty string. + Version string `protobuf:"bytes,2,opt,name=version,proto3" json:"version,omitempty"` + // App instance. + // + // By default, the task is sent to an instance which is available when + // the task is attempted. + // + // Requests can only be sent to a specific instance if + // [manual scaling is used in App Engine + // Standard](https://cloud.google.com/appengine/docs/python/an-overview-of-app-engine?hl=en_US#scaling_types_and_instance_classes). + // App Engine Flex does not support instances. For more information, see + // [App Engine Standard request + // routing](https://cloud.google.com/appengine/docs/standard/python/how-requests-are-routed) + // and [App Engine Flex request + // routing](https://cloud.google.com/appengine/docs/flexible/python/how-requests-are-routed). + Instance string `protobuf:"bytes,3,opt,name=instance,proto3" json:"instance,omitempty"` + // Output only. The host that the task is sent to. + // + // The host is constructed from the domain name of the app associated with + // the queue's project ID (for example .appspot.com), and the + // [service][google.cloud.tasks.v2beta3.AppEngineRouting.service], [version][google.cloud.tasks.v2beta3.AppEngineRouting.version], + // and [instance][google.cloud.tasks.v2beta3.AppEngineRouting.instance]. Tasks which were created using + // the App Engine SDK might have a custom domain name. + // + // For more information, see + // [How Requests are + // Routed](https://cloud.google.com/appengine/docs/standard/python/how-requests-are-routed). + Host string `protobuf:"bytes,4,opt,name=host,proto3" json:"host,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *AppEngineRouting) Reset() { *m = AppEngineRouting{} } +func (m *AppEngineRouting) String() string { return proto.CompactTextString(m) } +func (*AppEngineRouting) ProtoMessage() {} +func (*AppEngineRouting) Descriptor() ([]byte, []int) { + return fileDescriptor_target_6c51f433dca06861, []int{3} +} +func (m *AppEngineRouting) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_AppEngineRouting.Unmarshal(m, b) +} +func (m *AppEngineRouting) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_AppEngineRouting.Marshal(b, m, deterministic) +} +func (dst *AppEngineRouting) XXX_Merge(src proto.Message) { + xxx_messageInfo_AppEngineRouting.Merge(dst, src) +} +func (m *AppEngineRouting) XXX_Size() int { + return xxx_messageInfo_AppEngineRouting.Size(m) +} +func (m *AppEngineRouting) XXX_DiscardUnknown() { + xxx_messageInfo_AppEngineRouting.DiscardUnknown(m) +} + +var xxx_messageInfo_AppEngineRouting proto.InternalMessageInfo + +func (m *AppEngineRouting) GetService() string { + if m != nil { + return m.Service + } + return "" +} + +func (m *AppEngineRouting) GetVersion() string { + if m != nil { + return m.Version + } + return "" +} + +func (m *AppEngineRouting) GetInstance() string { + if m != nil { + return m.Instance + } + return "" +} + +func (m *AppEngineRouting) GetHost() string { + if m != nil { + return m.Host + } + return "" +} + +// Contains information needed for generating an +// [OAuth token](https://developers.google.com/identity/protocols/OAuth2). +// This type of authorization should be used when sending requests to a GCP +// endpoint. +type OAuthToken struct { + // [Service account email](https://cloud.google.com/iam/docs/service-accounts) + // to be used for generating OAuth token. + // The service account must be within the same project as the queue. The + // caller must have iam.serviceAccounts.actAs permission for the service + // account. + ServiceAccountEmail string `protobuf:"bytes,1,opt,name=service_account_email,json=serviceAccountEmail,proto3" json:"service_account_email,omitempty"` + // OAuth scope to be used for generating OAuth access token. + // If not specified, "https://www.googleapis.com/auth/cloud-platform" + // will be used. + Scope string `protobuf:"bytes,2,opt,name=scope,proto3" json:"scope,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *OAuthToken) Reset() { *m = OAuthToken{} } +func (m *OAuthToken) String() string { return proto.CompactTextString(m) } +func (*OAuthToken) ProtoMessage() {} +func (*OAuthToken) Descriptor() ([]byte, []int) { + return fileDescriptor_target_6c51f433dca06861, []int{4} +} +func (m *OAuthToken) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_OAuthToken.Unmarshal(m, b) +} +func (m *OAuthToken) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_OAuthToken.Marshal(b, m, deterministic) +} +func (dst *OAuthToken) XXX_Merge(src proto.Message) { + xxx_messageInfo_OAuthToken.Merge(dst, src) +} +func (m *OAuthToken) XXX_Size() int { + return xxx_messageInfo_OAuthToken.Size(m) +} +func (m *OAuthToken) XXX_DiscardUnknown() { + xxx_messageInfo_OAuthToken.DiscardUnknown(m) +} + +var xxx_messageInfo_OAuthToken proto.InternalMessageInfo + +func (m *OAuthToken) GetServiceAccountEmail() string { + if m != nil { + return m.ServiceAccountEmail + } + return "" +} + +func (m *OAuthToken) GetScope() string { + if m != nil { + return m.Scope + } + return "" +} + +// Contains information needed for generating an +// [OpenID Connect +// token](https://developers.google.com/identity/protocols/OpenIDConnect). This +// type of authorization should be used when sending requests to third party +// endpoints. +type OidcToken struct { + // [Service account email](https://cloud.google.com/iam/docs/service-accounts) + // to be used for generating OIDC token. + // The service account must be within the same project as the queue. The + // caller must have iam.serviceAccounts.actAs permission for the service + // account. + ServiceAccountEmail string `protobuf:"bytes,1,opt,name=service_account_email,json=serviceAccountEmail,proto3" json:"service_account_email,omitempty"` + // Audience to be used when generating OIDC token. If not specified, the URI + // specified in target will be used. + Audience string `protobuf:"bytes,2,opt,name=audience,proto3" json:"audience,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *OidcToken) Reset() { *m = OidcToken{} } +func (m *OidcToken) String() string { return proto.CompactTextString(m) } +func (*OidcToken) ProtoMessage() {} +func (*OidcToken) Descriptor() ([]byte, []int) { + return fileDescriptor_target_6c51f433dca06861, []int{5} +} +func (m *OidcToken) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_OidcToken.Unmarshal(m, b) +} +func (m *OidcToken) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_OidcToken.Marshal(b, m, deterministic) +} +func (dst *OidcToken) XXX_Merge(src proto.Message) { + xxx_messageInfo_OidcToken.Merge(dst, src) +} +func (m *OidcToken) XXX_Size() int { + return xxx_messageInfo_OidcToken.Size(m) +} +func (m *OidcToken) XXX_DiscardUnknown() { + xxx_messageInfo_OidcToken.DiscardUnknown(m) +} + +var xxx_messageInfo_OidcToken proto.InternalMessageInfo + +func (m *OidcToken) GetServiceAccountEmail() string { + if m != nil { + return m.ServiceAccountEmail + } + return "" +} + +func (m *OidcToken) GetAudience() string { + if m != nil { + return m.Audience + } + return "" +} + +func init() { + proto.RegisterType((*HttpRequest)(nil), "google.cloud.tasks.v2beta3.HttpRequest") + proto.RegisterMapType((map[string]string)(nil), "google.cloud.tasks.v2beta3.HttpRequest.HeadersEntry") + proto.RegisterType((*AppEngineHttpQueue)(nil), "google.cloud.tasks.v2beta3.AppEngineHttpQueue") + proto.RegisterType((*AppEngineHttpRequest)(nil), "google.cloud.tasks.v2beta3.AppEngineHttpRequest") + proto.RegisterMapType((map[string]string)(nil), "google.cloud.tasks.v2beta3.AppEngineHttpRequest.HeadersEntry") + proto.RegisterType((*AppEngineRouting)(nil), "google.cloud.tasks.v2beta3.AppEngineRouting") + proto.RegisterType((*OAuthToken)(nil), "google.cloud.tasks.v2beta3.OAuthToken") + proto.RegisterType((*OidcToken)(nil), "google.cloud.tasks.v2beta3.OidcToken") + proto.RegisterEnum("google.cloud.tasks.v2beta3.HttpMethod", HttpMethod_name, HttpMethod_value) +} + +func init() { + proto.RegisterFile("google/cloud/tasks/v2beta3/target.proto", fileDescriptor_target_6c51f433dca06861) +} + +var fileDescriptor_target_6c51f433dca06861 = []byte{ + // 707 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xa4, 0x55, 0xdf, 0x6f, 0xd3, 0x3a, + 0x14, 0x5e, 0x96, 0xfe, 0x58, 0x4f, 0xa6, 0xab, 0xc8, 0x77, 0xf7, 0xde, 0xa8, 0xbb, 0x42, 0xa5, + 0x12, 0x50, 0x21, 0x94, 0x4a, 0x1d, 0x0f, 0x68, 0x08, 0xa1, 0x6e, 0xcd, 0xd6, 0x4a, 0x6c, 0x0d, + 0x59, 0x06, 0xd2, 0x78, 0x88, 0xbc, 0xc4, 0x4a, 0xad, 0x76, 0x71, 0x70, 0x9c, 0x4a, 0xe5, 0x8d, + 0x27, 0xfe, 0x20, 0xfe, 0x41, 0x14, 0x27, 0x69, 0xa1, 0x40, 0x19, 0xe3, 0xcd, 0xdf, 0x39, 0xc7, + 0x9f, 0xcf, 0x77, 0xf2, 0xd9, 0x81, 0x47, 0x21, 0x63, 0xe1, 0x8c, 0x74, 0xfd, 0x19, 0x4b, 0x83, + 0xae, 0xc0, 0xc9, 0x34, 0xe9, 0xce, 0x7b, 0xd7, 0x44, 0xe0, 0x83, 0xae, 0xc0, 0x3c, 0x24, 0xc2, + 0x8c, 0x39, 0x13, 0x0c, 0x35, 0xf3, 0x42, 0x53, 0x16, 0x9a, 0xb2, 0xd0, 0x2c, 0x0a, 0x9b, 0xff, + 0x17, 0x24, 0x38, 0xa6, 0x5d, 0x1c, 0x45, 0x4c, 0x60, 0x41, 0x59, 0x94, 0xe4, 0x3b, 0xdb, 0x9f, + 0x55, 0xd0, 0x86, 0x42, 0xc4, 0x0e, 0x79, 0x9f, 0x92, 0x44, 0x20, 0x1d, 0xd4, 0x94, 0xcf, 0x0c, + 0xa5, 0xa5, 0x74, 0x1a, 0x4e, 0xb6, 0x44, 0xa7, 0xa0, 0x4d, 0x84, 0x88, 0xbd, 0x1b, 0x22, 0x26, + 0x2c, 0x30, 0xb6, 0x5b, 0x4a, 0xe7, 0xaf, 0xde, 0x43, 0xf3, 0xe7, 0x27, 0x9a, 0x19, 0xdf, 0x99, + 0xac, 0x76, 0x60, 0xb2, 0x5c, 0xa3, 0x73, 0xa8, 0x4f, 0x08, 0x0e, 0x08, 0x4f, 0x0c, 0xb5, 0xa5, + 0x76, 0xb4, 0xde, 0xd3, 0x5f, 0x91, 0x14, 0x4d, 0x99, 0xc3, 0x7c, 0x9b, 0x15, 0x09, 0xbe, 0x70, + 0x4a, 0x12, 0x84, 0xa0, 0x72, 0xcd, 0x82, 0x85, 0x51, 0x69, 0x29, 0x9d, 0x5d, 0x47, 0xae, 0xd1, + 0x08, 0x34, 0x86, 0x53, 0x31, 0xf1, 0x04, 0x9b, 0x92, 0xc8, 0xa8, 0xb6, 0x94, 0x8e, 0xb6, 0xb9, + 0xd9, 0x71, 0x3f, 0x15, 0x13, 0x37, 0xab, 0x1e, 0x6e, 0x39, 0x20, 0x37, 0x4b, 0x84, 0x4e, 0x00, + 0x18, 0x0d, 0xfc, 0x82, 0xa9, 0x26, 0x99, 0x1e, 0x6c, 0x64, 0xa2, 0x81, 0x5f, 0x12, 0x35, 0x58, + 0x09, 0x9a, 0x87, 0xb0, 0xfb, 0x75, 0xff, 0xd9, 0x84, 0xa7, 0x64, 0x51, 0x4e, 0x78, 0x4a, 0x16, + 0x68, 0x0f, 0xaa, 0x73, 0x3c, 0x4b, 0x89, 0x9c, 0x6d, 0xc3, 0xc9, 0xc1, 0xe1, 0xf6, 0x33, 0xe5, + 0xe8, 0x5f, 0xd8, 0xcb, 0x1a, 0x62, 0x9c, 0x7e, 0x90, 0x5f, 0xcd, 0xcb, 0xb5, 0xb7, 0x3f, 0x2a, + 0x80, 0xfa, 0x71, 0x6c, 0x45, 0x21, 0x8d, 0x48, 0x36, 0xa9, 0xd7, 0x29, 0x49, 0x09, 0x9a, 0xc2, + 0x3e, 0x8e, 0x63, 0x8f, 0xc8, 0xb0, 0xc7, 0x59, 0x2a, 0x68, 0x14, 0x7a, 0x6c, 0x4e, 0x38, 0xa7, + 0x01, 0x91, 0x47, 0x6a, 0xbd, 0x27, 0x9b, 0x34, 0x2c, 0x49, 0x9d, 0x7c, 0xb3, 0x63, 0xe0, 0xb5, + 0xc8, 0xb8, 0x60, 0x6b, 0x7f, 0x52, 0x61, 0xef, 0x9b, 0x1e, 0x4a, 0x0b, 0xad, 0x19, 0x46, 0xb9, + 0xb3, 0x61, 0xae, 0x00, 0x7d, 0x2f, 0x47, 0x0e, 0xe9, 0x77, 0x55, 0xe8, 0xeb, 0x2a, 0xd0, 0x7d, + 0xd8, 0xe5, 0x64, 0x86, 0x05, 0x9d, 0x13, 0x2f, 0xe5, 0xd4, 0x50, 0xe5, 0xe8, 0xb5, 0x32, 0x76, + 0xc9, 0x29, 0x7a, 0xbb, 0xf2, 0x6b, 0x45, 0xfa, 0xf5, 0xc5, 0xad, 0xce, 0xbc, 0xbd, 0x71, 0xab, + 0x2b, 0xe3, 0xfe, 0x89, 0x4b, 0xda, 0x73, 0xd0, 0xd7, 0x15, 0x23, 0x03, 0xea, 0x09, 0xe1, 0x73, + 0xea, 0x93, 0x82, 0xa3, 0x84, 0x59, 0x66, 0x4e, 0x78, 0x42, 0x59, 0x54, 0x30, 0x95, 0x10, 0x35, + 0x61, 0x87, 0x46, 0x89, 0xc0, 0x91, 0x4f, 0x8a, 0x79, 0x2c, 0x71, 0xd6, 0xf3, 0x84, 0x25, 0x42, + 0x5e, 0xb6, 0x86, 0x23, 0xd7, 0xed, 0x37, 0x00, 0xab, 0xdb, 0x83, 0x7a, 0xf0, 0x4f, 0x71, 0x84, + 0x87, 0x7d, 0x9f, 0xa5, 0x91, 0xf0, 0xc8, 0x0d, 0xa6, 0xe5, 0x5b, 0xf2, 0x77, 0x91, 0xec, 0xe7, + 0x39, 0x2b, 0x4b, 0x65, 0x9a, 0x12, 0x9f, 0xc5, 0x4b, 0x4d, 0x12, 0xb4, 0xdf, 0x41, 0x63, 0x79, + 0x97, 0xee, 0x44, 0xdb, 0x84, 0x1d, 0x9c, 0x06, 0x94, 0x64, 0x42, 0x72, 0xe6, 0x25, 0x7e, 0x9c, + 0x00, 0xac, 0xec, 0x86, 0xf6, 0xe1, 0xbf, 0xa1, 0xeb, 0xda, 0xde, 0x99, 0xe5, 0x0e, 0xc7, 0x03, + 0xef, 0xf2, 0xfc, 0xc2, 0xb6, 0x8e, 0x47, 0x27, 0x23, 0x6b, 0xa0, 0x6f, 0xa1, 0x1d, 0xa8, 0xd8, + 0xe3, 0x0b, 0x57, 0x57, 0x50, 0x1d, 0xd4, 0x53, 0xcb, 0xd5, 0xb7, 0xb3, 0xd0, 0xd0, 0xea, 0x0f, + 0x74, 0x35, 0x0b, 0xd9, 0x97, 0xae, 0x5e, 0x41, 0x00, 0xb5, 0x81, 0xf5, 0xca, 0x72, 0x2d, 0xbd, + 0x8a, 0x1a, 0x50, 0xb5, 0xfb, 0xee, 0xf1, 0x50, 0xaf, 0x21, 0x0d, 0xea, 0x63, 0xdb, 0x1d, 0x8d, + 0xcf, 0x2f, 0xf4, 0xfa, 0x51, 0x0c, 0xf7, 0x7c, 0x76, 0xb3, 0xc1, 0x3e, 0x47, 0x9a, 0x2b, 0xdf, + 0x73, 0x3b, 0x7b, 0x94, 0x6d, 0xe5, 0xea, 0x65, 0x51, 0x1a, 0xb2, 0x19, 0x8e, 0x42, 0x93, 0xf1, + 0xb0, 0x1b, 0x92, 0x48, 0x3e, 0xd9, 0xdd, 0x3c, 0x85, 0x63, 0x9a, 0xfc, 0xe8, 0xc7, 0xf0, 0x5c, + 0xa2, 0xeb, 0x9a, 0xac, 0x3d, 0xf8, 0x12, 0x00, 0x00, 0xff, 0xff, 0xf1, 0xca, 0x64, 0x96, 0x43, + 0x06, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/cloud/tasks/v2beta3/task.pb.go b/vendor/google.golang.org/genproto/googleapis/cloud/tasks/v2beta3/task.pb.go new file mode 100644 index 0000000..b153f61 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/cloud/tasks/v2beta3/task.pb.go @@ -0,0 +1,489 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/cloud/tasks/v2beta3/task.proto + +package tasks // import "google.golang.org/genproto/googleapis/cloud/tasks/v2beta3" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import duration "github.com/golang/protobuf/ptypes/duration" +import timestamp "github.com/golang/protobuf/ptypes/timestamp" +import _ "google.golang.org/genproto/googleapis/api/annotations" +import status "google.golang.org/genproto/googleapis/rpc/status" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// The view specifies a subset of [Task][google.cloud.tasks.v2beta3.Task] data. +// +// When a task is returned in a response, not all +// information is retrieved by default because some data, such as +// payloads, might be desirable to return only when needed because +// of its large size or because of the sensitivity of data that it +// contains. +type Task_View int32 + +const ( + // Unspecified. Defaults to BASIC. + Task_VIEW_UNSPECIFIED Task_View = 0 + // The basic view omits fields which can be large or can contain + // sensitive data. + // + // This view does not include the + // [body in AppEngineHttpRequest][google.cloud.tasks.v2beta3.AppEngineHttpRequest.body]. + // Bodies are desirable to return only when needed, because they + // can be large and because of the sensitivity of the data that you + // choose to store in it. + Task_BASIC Task_View = 1 + // All information is returned. + // + // Authorization for [FULL][google.cloud.tasks.v2beta3.Task.View.FULL] requires + // `cloudtasks.tasks.fullView` [Google IAM](https://cloud.google.com/iam/) + // permission on the [Queue][google.cloud.tasks.v2beta3.Queue] resource. + Task_FULL Task_View = 2 +) + +var Task_View_name = map[int32]string{ + 0: "VIEW_UNSPECIFIED", + 1: "BASIC", + 2: "FULL", +} +var Task_View_value = map[string]int32{ + "VIEW_UNSPECIFIED": 0, + "BASIC": 1, + "FULL": 2, +} + +func (x Task_View) String() string { + return proto.EnumName(Task_View_name, int32(x)) +} +func (Task_View) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_task_9659896bd52bfaac, []int{0, 0} +} + +// A unit of scheduled work. +type Task struct { + // Optionally caller-specified in [CreateTask][google.cloud.tasks.v2beta3.CloudTasks.CreateTask]. + // + // The task name. + // + // The task name must have the following format: + // `projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID/tasks/TASK_ID` + // + // * `PROJECT_ID` can contain letters ([A-Za-z]), numbers ([0-9]), + // hyphens (-), colons (:), or periods (.). + // For more information, see + // [Identifying + // projects](https://cloud.google.com/resource-manager/docs/creating-managing-projects#identifying_projects) + // * `LOCATION_ID` is the canonical ID for the task's location. + // The list of available locations can be obtained by calling + // [ListLocations][google.cloud.location.Locations.ListLocations]. + // For more information, see https://cloud.google.com/about/locations/. + // * `QUEUE_ID` can contain letters ([A-Za-z]), numbers ([0-9]), or + // hyphens (-). The maximum length is 100 characters. + // * `TASK_ID` can contain only letters ([A-Za-z]), numbers ([0-9]), + // hyphens (-), or underscores (_). The maximum length is 500 characters. + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // Required. The message to send to the worker. + // + // Types that are valid to be assigned to PayloadType: + // *Task_AppEngineHttpRequest + // *Task_HttpRequest + PayloadType isTask_PayloadType `protobuf_oneof:"payload_type"` + // The time when the task is scheduled to be attempted. + // + // For App Engine queues, this is when the task will be attempted or retried. + // + // `schedule_time` will be truncated to the nearest microsecond. + ScheduleTime *timestamp.Timestamp `protobuf:"bytes,4,opt,name=schedule_time,json=scheduleTime,proto3" json:"schedule_time,omitempty"` + // Output only. The time that the task was created. + // + // `create_time` will be truncated to the nearest second. + CreateTime *timestamp.Timestamp `protobuf:"bytes,5,opt,name=create_time,json=createTime,proto3" json:"create_time,omitempty"` + // The deadline for requests sent to the worker. If the worker does not + // respond by this deadline then the request is cancelled and the attempt + // is marked as a `DEADLINE_EXCEEDED` failure. Cloud Tasks will retry the + // task according to the [RetryConfig][google.cloud.tasks.v2beta3.RetryConfig]. + // + // Note that when the request is cancelled, Cloud Tasks will stop listing for + // the response, but whether the worker stops processing depends on the + // worker. For example, if the worker is stuck, it may not react to cancelled + // requests. + // + // The default and maximum values depend on the type of request: + // + // * For [HTTP tasks][google.cloud.tasks.v2beta3.HttpRequest], the default is 10 minutes. The deadline + // must be in the interval [15 seconds, 30 minutes]. + // + // * For [App Engine tasks][google.cloud.tasks.v2beta3.AppEngineHttpRequest], 0 indicates that the + // request has the default deadline. The default deadline depends on the + // [scaling + // type](https://cloud.google.com/appengine/docs/standard/go/how-instances-are-managed#instance_scaling) + // of the service: 10 minutes for standard apps with automatic scaling, 24 + // hours for standard apps with manual and basic scaling, and 60 minutes for + // flex apps. If the request deadline is set, it must be in the interval [15 + // seconds, 24 hours 15 seconds]. Regardless of the task's + // `dispatch_deadline`, the app handler will not run for longer than than + // the service's timeout. We recommend setting the `dispatch_deadline` to + // at most a few seconds more than the app handler's timeout. For more + // information see + // [Timeouts](https://cloud.google.com/tasks/docs/creating-appengine-handlers#timeouts). + // + // `dispatch_deadline` will be truncated to the nearest millisecond. The + // deadline is an approximate deadline. + DispatchDeadline *duration.Duration `protobuf:"bytes,12,opt,name=dispatch_deadline,json=dispatchDeadline,proto3" json:"dispatch_deadline,omitempty"` + // Output only. The number of attempts dispatched. + // + // This count includes attempts which have been dispatched but haven't + // received a response. + DispatchCount int32 `protobuf:"varint,6,opt,name=dispatch_count,json=dispatchCount,proto3" json:"dispatch_count,omitempty"` + // Output only. The number of attempts which have received a response. + ResponseCount int32 `protobuf:"varint,7,opt,name=response_count,json=responseCount,proto3" json:"response_count,omitempty"` + // Output only. The status of the task's first attempt. + // + // Only [dispatch_time][google.cloud.tasks.v2beta3.Attempt.dispatch_time] will be set. + // The other [Attempt][google.cloud.tasks.v2beta3.Attempt] information is not retained by Cloud Tasks. + FirstAttempt *Attempt `protobuf:"bytes,8,opt,name=first_attempt,json=firstAttempt,proto3" json:"first_attempt,omitempty"` + // Output only. The status of the task's last attempt. + LastAttempt *Attempt `protobuf:"bytes,9,opt,name=last_attempt,json=lastAttempt,proto3" json:"last_attempt,omitempty"` + // Output only. The view specifies which subset of the [Task][google.cloud.tasks.v2beta3.Task] has + // been returned. + View Task_View `protobuf:"varint,10,opt,name=view,proto3,enum=google.cloud.tasks.v2beta3.Task_View" json:"view,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Task) Reset() { *m = Task{} } +func (m *Task) String() string { return proto.CompactTextString(m) } +func (*Task) ProtoMessage() {} +func (*Task) Descriptor() ([]byte, []int) { + return fileDescriptor_task_9659896bd52bfaac, []int{0} +} +func (m *Task) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Task.Unmarshal(m, b) +} +func (m *Task) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Task.Marshal(b, m, deterministic) +} +func (dst *Task) XXX_Merge(src proto.Message) { + xxx_messageInfo_Task.Merge(dst, src) +} +func (m *Task) XXX_Size() int { + return xxx_messageInfo_Task.Size(m) +} +func (m *Task) XXX_DiscardUnknown() { + xxx_messageInfo_Task.DiscardUnknown(m) +} + +var xxx_messageInfo_Task proto.InternalMessageInfo + +func (m *Task) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +type isTask_PayloadType interface { + isTask_PayloadType() +} + +type Task_AppEngineHttpRequest struct { + AppEngineHttpRequest *AppEngineHttpRequest `protobuf:"bytes,3,opt,name=app_engine_http_request,json=appEngineHttpRequest,proto3,oneof"` +} + +type Task_HttpRequest struct { + HttpRequest *HttpRequest `protobuf:"bytes,11,opt,name=http_request,json=httpRequest,proto3,oneof"` +} + +func (*Task_AppEngineHttpRequest) isTask_PayloadType() {} + +func (*Task_HttpRequest) isTask_PayloadType() {} + +func (m *Task) GetPayloadType() isTask_PayloadType { + if m != nil { + return m.PayloadType + } + return nil +} + +func (m *Task) GetAppEngineHttpRequest() *AppEngineHttpRequest { + if x, ok := m.GetPayloadType().(*Task_AppEngineHttpRequest); ok { + return x.AppEngineHttpRequest + } + return nil +} + +func (m *Task) GetHttpRequest() *HttpRequest { + if x, ok := m.GetPayloadType().(*Task_HttpRequest); ok { + return x.HttpRequest + } + return nil +} + +func (m *Task) GetScheduleTime() *timestamp.Timestamp { + if m != nil { + return m.ScheduleTime + } + return nil +} + +func (m *Task) GetCreateTime() *timestamp.Timestamp { + if m != nil { + return m.CreateTime + } + return nil +} + +func (m *Task) GetDispatchDeadline() *duration.Duration { + if m != nil { + return m.DispatchDeadline + } + return nil +} + +func (m *Task) GetDispatchCount() int32 { + if m != nil { + return m.DispatchCount + } + return 0 +} + +func (m *Task) GetResponseCount() int32 { + if m != nil { + return m.ResponseCount + } + return 0 +} + +func (m *Task) GetFirstAttempt() *Attempt { + if m != nil { + return m.FirstAttempt + } + return nil +} + +func (m *Task) GetLastAttempt() *Attempt { + if m != nil { + return m.LastAttempt + } + return nil +} + +func (m *Task) GetView() Task_View { + if m != nil { + return m.View + } + return Task_VIEW_UNSPECIFIED +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*Task) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _Task_OneofMarshaler, _Task_OneofUnmarshaler, _Task_OneofSizer, []interface{}{ + (*Task_AppEngineHttpRequest)(nil), + (*Task_HttpRequest)(nil), + } +} + +func _Task_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*Task) + // payload_type + switch x := m.PayloadType.(type) { + case *Task_AppEngineHttpRequest: + b.EncodeVarint(3<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.AppEngineHttpRequest); err != nil { + return err + } + case *Task_HttpRequest: + b.EncodeVarint(11<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.HttpRequest); err != nil { + return err + } + case nil: + default: + return fmt.Errorf("Task.PayloadType has unexpected type %T", x) + } + return nil +} + +func _Task_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*Task) + switch tag { + case 3: // payload_type.app_engine_http_request + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(AppEngineHttpRequest) + err := b.DecodeMessage(msg) + m.PayloadType = &Task_AppEngineHttpRequest{msg} + return true, err + case 11: // payload_type.http_request + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(HttpRequest) + err := b.DecodeMessage(msg) + m.PayloadType = &Task_HttpRequest{msg} + return true, err + default: + return false, nil + } +} + +func _Task_OneofSizer(msg proto.Message) (n int) { + m := msg.(*Task) + // payload_type + switch x := m.PayloadType.(type) { + case *Task_AppEngineHttpRequest: + s := proto.Size(x.AppEngineHttpRequest) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *Task_HttpRequest: + s := proto.Size(x.HttpRequest) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +// The status of a task attempt. +type Attempt struct { + // Output only. The time that this attempt was scheduled. + // + // `schedule_time` will be truncated to the nearest microsecond. + ScheduleTime *timestamp.Timestamp `protobuf:"bytes,1,opt,name=schedule_time,json=scheduleTime,proto3" json:"schedule_time,omitempty"` + // Output only. The time that this attempt was dispatched. + // + // `dispatch_time` will be truncated to the nearest microsecond. + DispatchTime *timestamp.Timestamp `protobuf:"bytes,2,opt,name=dispatch_time,json=dispatchTime,proto3" json:"dispatch_time,omitempty"` + // Output only. The time that this attempt response was received. + // + // `response_time` will be truncated to the nearest microsecond. + ResponseTime *timestamp.Timestamp `protobuf:"bytes,3,opt,name=response_time,json=responseTime,proto3" json:"response_time,omitempty"` + // Output only. The response from the worker for this attempt. + // + // If `response_time` is unset, then the task has not been attempted or is + // currently running and the `response_status` field is meaningless. + ResponseStatus *status.Status `protobuf:"bytes,4,opt,name=response_status,json=responseStatus,proto3" json:"response_status,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Attempt) Reset() { *m = Attempt{} } +func (m *Attempt) String() string { return proto.CompactTextString(m) } +func (*Attempt) ProtoMessage() {} +func (*Attempt) Descriptor() ([]byte, []int) { + return fileDescriptor_task_9659896bd52bfaac, []int{1} +} +func (m *Attempt) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Attempt.Unmarshal(m, b) +} +func (m *Attempt) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Attempt.Marshal(b, m, deterministic) +} +func (dst *Attempt) XXX_Merge(src proto.Message) { + xxx_messageInfo_Attempt.Merge(dst, src) +} +func (m *Attempt) XXX_Size() int { + return xxx_messageInfo_Attempt.Size(m) +} +func (m *Attempt) XXX_DiscardUnknown() { + xxx_messageInfo_Attempt.DiscardUnknown(m) +} + +var xxx_messageInfo_Attempt proto.InternalMessageInfo + +func (m *Attempt) GetScheduleTime() *timestamp.Timestamp { + if m != nil { + return m.ScheduleTime + } + return nil +} + +func (m *Attempt) GetDispatchTime() *timestamp.Timestamp { + if m != nil { + return m.DispatchTime + } + return nil +} + +func (m *Attempt) GetResponseTime() *timestamp.Timestamp { + if m != nil { + return m.ResponseTime + } + return nil +} + +func (m *Attempt) GetResponseStatus() *status.Status { + if m != nil { + return m.ResponseStatus + } + return nil +} + +func init() { + proto.RegisterType((*Task)(nil), "google.cloud.tasks.v2beta3.Task") + proto.RegisterType((*Attempt)(nil), "google.cloud.tasks.v2beta3.Attempt") + proto.RegisterEnum("google.cloud.tasks.v2beta3.Task_View", Task_View_name, Task_View_value) +} + +func init() { + proto.RegisterFile("google/cloud/tasks/v2beta3/task.proto", fileDescriptor_task_9659896bd52bfaac) +} + +var fileDescriptor_task_9659896bd52bfaac = []byte{ + // 599 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x94, 0x94, 0x5f, 0x4f, 0xdb, 0x3c, + 0x14, 0xc6, 0x09, 0xa4, 0x40, 0xdd, 0xc0, 0xdb, 0xd7, 0x42, 0x22, 0x54, 0x13, 0xab, 0x98, 0x10, + 0xbd, 0x4a, 0x36, 0xb8, 0x9a, 0xb8, 0x40, 0xf4, 0x9f, 0x5a, 0xa9, 0x9a, 0xaa, 0x14, 0x98, 0xb4, + 0x9b, 0xc8, 0x4d, 0x4c, 0x1a, 0x91, 0xda, 0x9e, 0xed, 0x80, 0xf8, 0x08, 0xfb, 0xc4, 0xbb, 0x9d, + 0xe2, 0xd8, 0x55, 0x19, 0xac, 0xdd, 0xee, 0x7a, 0xce, 0x79, 0x9e, 0x9f, 0x8f, 0xce, 0x39, 0x0d, + 0x38, 0x4d, 0x28, 0x4d, 0x32, 0xec, 0x47, 0x19, 0xcd, 0x63, 0x5f, 0x22, 0xf1, 0x20, 0xfc, 0xc7, + 0xf3, 0x29, 0x96, 0xe8, 0x42, 0x45, 0x1e, 0xe3, 0x54, 0x52, 0xd8, 0x28, 0x65, 0x9e, 0x92, 0x79, + 0x4a, 0xe6, 0x69, 0x59, 0xe3, 0x9d, 0x46, 0x20, 0x96, 0xfa, 0x88, 0x10, 0x2a, 0x91, 0x4c, 0x29, + 0x11, 0xa5, 0xb3, 0x71, 0xb4, 0x54, 0xe5, 0x58, 0xd0, 0x9c, 0x47, 0x58, 0x97, 0xce, 0x56, 0xbe, + 0xcd, 0x13, 0x2c, 0xb5, 0xf0, 0x58, 0x0b, 0x55, 0x34, 0xcd, 0xef, 0xfd, 0x38, 0xe7, 0xea, 0x11, + 0x5d, 0x7f, 0xff, 0x7b, 0x5d, 0xa6, 0x73, 0x2c, 0x24, 0x9a, 0x33, 0x2d, 0x38, 0xd4, 0x02, 0xce, + 0x22, 0x5f, 0x48, 0x24, 0x73, 0xdd, 0xdd, 0xc9, 0xcf, 0x0a, 0xb0, 0x6f, 0x90, 0x78, 0x80, 0x10, + 0xd8, 0x04, 0xcd, 0xb1, 0x6b, 0x35, 0xad, 0x56, 0x35, 0x50, 0xbf, 0x61, 0x0a, 0x0e, 0x11, 0x63, + 0x21, 0x26, 0x49, 0x4a, 0x70, 0x38, 0x93, 0x92, 0x85, 0x1c, 0x7f, 0xcf, 0xb1, 0x90, 0xee, 0x56, + 0xd3, 0x6a, 0xd5, 0xce, 0x3f, 0x7a, 0x7f, 0x1e, 0x8b, 0x77, 0xcd, 0x58, 0x4f, 0x39, 0x07, 0x52, + 0xb2, 0xa0, 0xf4, 0x0d, 0x36, 0x82, 0x03, 0xf4, 0x46, 0x1e, 0x8e, 0x80, 0xf3, 0x82, 0x5f, 0x53, + 0xfc, 0xb3, 0x55, 0xfc, 0x97, 0xd8, 0xda, 0x6c, 0x89, 0x76, 0x05, 0xf6, 0x44, 0x34, 0xc3, 0x71, + 0x9e, 0xe1, 0xb0, 0x18, 0x85, 0x6b, 0x2b, 0x5c, 0xc3, 0xe0, 0xcc, 0x9c, 0xbc, 0x1b, 0x33, 0xa7, + 0xc0, 0x31, 0x86, 0x22, 0x05, 0x2f, 0x41, 0x2d, 0xe2, 0x18, 0x49, 0x6d, 0xaf, 0xac, 0xb5, 0x83, + 0x52, 0xae, 0xcc, 0x7d, 0xf0, 0x7f, 0x9c, 0x0a, 0x86, 0x64, 0x34, 0x0b, 0x63, 0x8c, 0xe2, 0x2c, + 0x25, 0xd8, 0x75, 0x14, 0xe2, 0xe8, 0x15, 0xa2, 0xab, 0x37, 0x19, 0xd4, 0x8d, 0xa7, 0xab, 0x2d, + 0xf0, 0x14, 0xec, 0x2f, 0x38, 0x11, 0xcd, 0x89, 0x74, 0xb7, 0x9b, 0x56, 0xab, 0x12, 0xec, 0x99, + 0x6c, 0xa7, 0x48, 0x16, 0x32, 0x8e, 0x05, 0xa3, 0x44, 0x60, 0x2d, 0xdb, 0x29, 0x65, 0x26, 0x5b, + 0xca, 0x06, 0x60, 0xef, 0x3e, 0xe5, 0x42, 0x86, 0x48, 0x4a, 0x3c, 0x67, 0xd2, 0xdd, 0x55, 0x1d, + 0x7d, 0x58, 0xb9, 0xc2, 0x52, 0x1a, 0x38, 0xca, 0xa9, 0x23, 0xd8, 0x07, 0x4e, 0x86, 0x96, 0x40, + 0xd5, 0xbf, 0x07, 0xd5, 0x0a, 0xa3, 0xe1, 0x7c, 0x06, 0xf6, 0x63, 0x8a, 0x9f, 0x5c, 0xd0, 0xb4, + 0x5a, 0xfb, 0xe7, 0xa7, 0xab, 0xfc, 0xc5, 0x89, 0x7a, 0x77, 0x29, 0x7e, 0x0a, 0x94, 0xe5, 0xe4, + 0x13, 0xb0, 0x8b, 0x08, 0x1e, 0x80, 0xfa, 0xdd, 0xb0, 0xf7, 0x35, 0xbc, 0xfd, 0x32, 0x19, 0xf7, + 0x3a, 0xc3, 0xfe, 0xb0, 0xd7, 0xad, 0x6f, 0xc0, 0x2a, 0xa8, 0xb4, 0xaf, 0x27, 0xc3, 0x4e, 0xdd, + 0x82, 0xbb, 0xc0, 0xee, 0xdf, 0x8e, 0x46, 0xf5, 0xcd, 0xf6, 0x3e, 0x70, 0x18, 0x7a, 0xce, 0x28, + 0x8a, 0x43, 0xf9, 0xcc, 0xf0, 0xc9, 0x8f, 0x4d, 0xb0, 0x63, 0x3a, 0x79, 0x75, 0x2f, 0xd6, 0x3f, + 0xde, 0xcb, 0x15, 0x58, 0x2c, 0xa5, 0x04, 0x6c, 0xae, 0x07, 0x18, 0x83, 0x01, 0x2c, 0x96, 0xa8, + 0x00, 0x5b, 0xeb, 0x01, 0xc6, 0xa0, 0x2f, 0xf6, 0xbf, 0x05, 0xa0, 0xfc, 0x87, 0xeb, 0xa3, 0x87, + 0x06, 0xc1, 0x59, 0xe4, 0x4d, 0x54, 0x25, 0x58, 0x1c, 0x4c, 0x19, 0xb7, 0x09, 0x38, 0x8e, 0xe8, + 0x7c, 0xc5, 0x02, 0xda, 0xd5, 0x62, 0x03, 0xe3, 0xa2, 0x89, 0xb1, 0xf5, 0xed, 0x4a, 0x0b, 0x13, + 0x9a, 0x21, 0x92, 0x78, 0x94, 0x27, 0x7e, 0x82, 0x89, 0x6a, 0xd1, 0x2f, 0x4b, 0x88, 0xa5, 0xe2, + 0xad, 0xcf, 0xda, 0xa5, 0x8a, 0xa6, 0xdb, 0x4a, 0x7b, 0xf1, 0x2b, 0x00, 0x00, 0xff, 0xff, 0x9c, + 0x28, 0xef, 0x11, 0x7d, 0x05, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/cloud/texttospeech/v1/cloud_tts.pb.go b/vendor/google.golang.org/genproto/googleapis/cloud/texttospeech/v1/cloud_tts.pb.go new file mode 100644 index 0000000..6674c81 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/cloud/texttospeech/v1/cloud_tts.pb.go @@ -0,0 +1,874 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/cloud/texttospeech/v1/cloud_tts.proto + +package texttospeech // import "google.golang.org/genproto/googleapis/cloud/texttospeech/v1" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +import ( + context "golang.org/x/net/context" + grpc "google.golang.org/grpc" +) + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Gender of the voice as described in +// [SSML voice element](https://www.w3.org/TR/speech-synthesis11/#edef_voice). +type SsmlVoiceGender int32 + +const ( + // An unspecified gender. + // In VoiceSelectionParams, this means that the client doesn't care which + // gender the selected voice will have. In the Voice field of + // ListVoicesResponse, this may mean that the voice doesn't fit any of the + // other categories in this enum, or that the gender of the voice isn't known. + SsmlVoiceGender_SSML_VOICE_GENDER_UNSPECIFIED SsmlVoiceGender = 0 + // A male voice. + SsmlVoiceGender_MALE SsmlVoiceGender = 1 + // A female voice. + SsmlVoiceGender_FEMALE SsmlVoiceGender = 2 + // A gender-neutral voice. + SsmlVoiceGender_NEUTRAL SsmlVoiceGender = 3 +) + +var SsmlVoiceGender_name = map[int32]string{ + 0: "SSML_VOICE_GENDER_UNSPECIFIED", + 1: "MALE", + 2: "FEMALE", + 3: "NEUTRAL", +} +var SsmlVoiceGender_value = map[string]int32{ + "SSML_VOICE_GENDER_UNSPECIFIED": 0, + "MALE": 1, + "FEMALE": 2, + "NEUTRAL": 3, +} + +func (x SsmlVoiceGender) String() string { + return proto.EnumName(SsmlVoiceGender_name, int32(x)) +} +func (SsmlVoiceGender) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_cloud_tts_aab68875a18aec90, []int{0} +} + +// Configuration to set up audio encoder. The encoding determines the output +// audio format that we'd like. +type AudioEncoding int32 + +const ( + // Not specified. Will return result [google.rpc.Code.INVALID_ARGUMENT][]. + AudioEncoding_AUDIO_ENCODING_UNSPECIFIED AudioEncoding = 0 + // Uncompressed 16-bit signed little-endian samples (Linear PCM). + // Audio content returned as LINEAR16 also contains a WAV header. + AudioEncoding_LINEAR16 AudioEncoding = 1 + // MP3 audio. + AudioEncoding_MP3 AudioEncoding = 2 + // Opus encoded audio wrapped in an ogg container. The result will be a + // file which can be played natively on Android, and in browsers (at least + // Chrome and Firefox). The quality of the encoding is considerably higher + // than MP3 while using approximately the same bitrate. + AudioEncoding_OGG_OPUS AudioEncoding = 3 +) + +var AudioEncoding_name = map[int32]string{ + 0: "AUDIO_ENCODING_UNSPECIFIED", + 1: "LINEAR16", + 2: "MP3", + 3: "OGG_OPUS", +} +var AudioEncoding_value = map[string]int32{ + "AUDIO_ENCODING_UNSPECIFIED": 0, + "LINEAR16": 1, + "MP3": 2, + "OGG_OPUS": 3, +} + +func (x AudioEncoding) String() string { + return proto.EnumName(AudioEncoding_name, int32(x)) +} +func (AudioEncoding) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_cloud_tts_aab68875a18aec90, []int{1} +} + +// The top-level message sent by the client for the `ListVoices` method. +type ListVoicesRequest struct { + // Optional (but recommended) + // [BCP-47](https://www.rfc-editor.org/rfc/bcp/bcp47.txt) language tag. If + // specified, the ListVoices call will only return voices that can be used to + // synthesize this language_code. E.g. when specifying "en-NZ", you will get + // supported "en-*" voices; when specifying "no", you will get supported + // "no-*" (Norwegian) and "nb-*" (Norwegian Bokmal) voices; specifying "zh" + // will also get supported "cmn-*" voices; specifying "zh-hk" will also get + // supported "yue-*" voices. + LanguageCode string `protobuf:"bytes,1,opt,name=language_code,json=languageCode,proto3" json:"language_code,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ListVoicesRequest) Reset() { *m = ListVoicesRequest{} } +func (m *ListVoicesRequest) String() string { return proto.CompactTextString(m) } +func (*ListVoicesRequest) ProtoMessage() {} +func (*ListVoicesRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_cloud_tts_aab68875a18aec90, []int{0} +} +func (m *ListVoicesRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ListVoicesRequest.Unmarshal(m, b) +} +func (m *ListVoicesRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ListVoicesRequest.Marshal(b, m, deterministic) +} +func (dst *ListVoicesRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_ListVoicesRequest.Merge(dst, src) +} +func (m *ListVoicesRequest) XXX_Size() int { + return xxx_messageInfo_ListVoicesRequest.Size(m) +} +func (m *ListVoicesRequest) XXX_DiscardUnknown() { + xxx_messageInfo_ListVoicesRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_ListVoicesRequest proto.InternalMessageInfo + +func (m *ListVoicesRequest) GetLanguageCode() string { + if m != nil { + return m.LanguageCode + } + return "" +} + +// The message returned to the client by the `ListVoices` method. +type ListVoicesResponse struct { + // The list of voices. + Voices []*Voice `protobuf:"bytes,1,rep,name=voices,proto3" json:"voices,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ListVoicesResponse) Reset() { *m = ListVoicesResponse{} } +func (m *ListVoicesResponse) String() string { return proto.CompactTextString(m) } +func (*ListVoicesResponse) ProtoMessage() {} +func (*ListVoicesResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_cloud_tts_aab68875a18aec90, []int{1} +} +func (m *ListVoicesResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ListVoicesResponse.Unmarshal(m, b) +} +func (m *ListVoicesResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ListVoicesResponse.Marshal(b, m, deterministic) +} +func (dst *ListVoicesResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_ListVoicesResponse.Merge(dst, src) +} +func (m *ListVoicesResponse) XXX_Size() int { + return xxx_messageInfo_ListVoicesResponse.Size(m) +} +func (m *ListVoicesResponse) XXX_DiscardUnknown() { + xxx_messageInfo_ListVoicesResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_ListVoicesResponse proto.InternalMessageInfo + +func (m *ListVoicesResponse) GetVoices() []*Voice { + if m != nil { + return m.Voices + } + return nil +} + +// Description of a voice supported by the TTS service. +type Voice struct { + // The languages that this voice supports, expressed as + // [BCP-47](https://www.rfc-editor.org/rfc/bcp/bcp47.txt) language tags (e.g. + // "en-US", "es-419", "cmn-tw"). + LanguageCodes []string `protobuf:"bytes,1,rep,name=language_codes,json=languageCodes,proto3" json:"language_codes,omitempty"` + // The name of this voice. Each distinct voice has a unique name. + Name string `protobuf:"bytes,2,opt,name=name,proto3" json:"name,omitempty"` + // The gender of this voice. + SsmlGender SsmlVoiceGender `protobuf:"varint,3,opt,name=ssml_gender,json=ssmlGender,proto3,enum=google.cloud.texttospeech.v1.SsmlVoiceGender" json:"ssml_gender,omitempty"` + // The natural sample rate (in hertz) for this voice. + NaturalSampleRateHertz int32 `protobuf:"varint,4,opt,name=natural_sample_rate_hertz,json=naturalSampleRateHertz,proto3" json:"natural_sample_rate_hertz,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Voice) Reset() { *m = Voice{} } +func (m *Voice) String() string { return proto.CompactTextString(m) } +func (*Voice) ProtoMessage() {} +func (*Voice) Descriptor() ([]byte, []int) { + return fileDescriptor_cloud_tts_aab68875a18aec90, []int{2} +} +func (m *Voice) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Voice.Unmarshal(m, b) +} +func (m *Voice) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Voice.Marshal(b, m, deterministic) +} +func (dst *Voice) XXX_Merge(src proto.Message) { + xxx_messageInfo_Voice.Merge(dst, src) +} +func (m *Voice) XXX_Size() int { + return xxx_messageInfo_Voice.Size(m) +} +func (m *Voice) XXX_DiscardUnknown() { + xxx_messageInfo_Voice.DiscardUnknown(m) +} + +var xxx_messageInfo_Voice proto.InternalMessageInfo + +func (m *Voice) GetLanguageCodes() []string { + if m != nil { + return m.LanguageCodes + } + return nil +} + +func (m *Voice) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *Voice) GetSsmlGender() SsmlVoiceGender { + if m != nil { + return m.SsmlGender + } + return SsmlVoiceGender_SSML_VOICE_GENDER_UNSPECIFIED +} + +func (m *Voice) GetNaturalSampleRateHertz() int32 { + if m != nil { + return m.NaturalSampleRateHertz + } + return 0 +} + +// The top-level message sent by the client for the `SynthesizeSpeech` method. +type SynthesizeSpeechRequest struct { + // Required. The Synthesizer requires either plain text or SSML as input. + Input *SynthesisInput `protobuf:"bytes,1,opt,name=input,proto3" json:"input,omitempty"` + // Required. The desired voice of the synthesized audio. + Voice *VoiceSelectionParams `protobuf:"bytes,2,opt,name=voice,proto3" json:"voice,omitempty"` + // Required. The configuration of the synthesized audio. + AudioConfig *AudioConfig `protobuf:"bytes,3,opt,name=audio_config,json=audioConfig,proto3" json:"audio_config,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *SynthesizeSpeechRequest) Reset() { *m = SynthesizeSpeechRequest{} } +func (m *SynthesizeSpeechRequest) String() string { return proto.CompactTextString(m) } +func (*SynthesizeSpeechRequest) ProtoMessage() {} +func (*SynthesizeSpeechRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_cloud_tts_aab68875a18aec90, []int{3} +} +func (m *SynthesizeSpeechRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_SynthesizeSpeechRequest.Unmarshal(m, b) +} +func (m *SynthesizeSpeechRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_SynthesizeSpeechRequest.Marshal(b, m, deterministic) +} +func (dst *SynthesizeSpeechRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_SynthesizeSpeechRequest.Merge(dst, src) +} +func (m *SynthesizeSpeechRequest) XXX_Size() int { + return xxx_messageInfo_SynthesizeSpeechRequest.Size(m) +} +func (m *SynthesizeSpeechRequest) XXX_DiscardUnknown() { + xxx_messageInfo_SynthesizeSpeechRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_SynthesizeSpeechRequest proto.InternalMessageInfo + +func (m *SynthesizeSpeechRequest) GetInput() *SynthesisInput { + if m != nil { + return m.Input + } + return nil +} + +func (m *SynthesizeSpeechRequest) GetVoice() *VoiceSelectionParams { + if m != nil { + return m.Voice + } + return nil +} + +func (m *SynthesizeSpeechRequest) GetAudioConfig() *AudioConfig { + if m != nil { + return m.AudioConfig + } + return nil +} + +// Contains text input to be synthesized. Either `text` or `ssml` must be +// supplied. Supplying both or neither returns +// [google.rpc.Code.INVALID_ARGUMENT][]. The input size is limited to 5000 +// characters. +type SynthesisInput struct { + // The input source, which is either plain text or SSML. + // + // Types that are valid to be assigned to InputSource: + // *SynthesisInput_Text + // *SynthesisInput_Ssml + InputSource isSynthesisInput_InputSource `protobuf_oneof:"input_source"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *SynthesisInput) Reset() { *m = SynthesisInput{} } +func (m *SynthesisInput) String() string { return proto.CompactTextString(m) } +func (*SynthesisInput) ProtoMessage() {} +func (*SynthesisInput) Descriptor() ([]byte, []int) { + return fileDescriptor_cloud_tts_aab68875a18aec90, []int{4} +} +func (m *SynthesisInput) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_SynthesisInput.Unmarshal(m, b) +} +func (m *SynthesisInput) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_SynthesisInput.Marshal(b, m, deterministic) +} +func (dst *SynthesisInput) XXX_Merge(src proto.Message) { + xxx_messageInfo_SynthesisInput.Merge(dst, src) +} +func (m *SynthesisInput) XXX_Size() int { + return xxx_messageInfo_SynthesisInput.Size(m) +} +func (m *SynthesisInput) XXX_DiscardUnknown() { + xxx_messageInfo_SynthesisInput.DiscardUnknown(m) +} + +var xxx_messageInfo_SynthesisInput proto.InternalMessageInfo + +type isSynthesisInput_InputSource interface { + isSynthesisInput_InputSource() +} + +type SynthesisInput_Text struct { + Text string `protobuf:"bytes,1,opt,name=text,proto3,oneof"` +} + +type SynthesisInput_Ssml struct { + Ssml string `protobuf:"bytes,2,opt,name=ssml,proto3,oneof"` +} + +func (*SynthesisInput_Text) isSynthesisInput_InputSource() {} + +func (*SynthesisInput_Ssml) isSynthesisInput_InputSource() {} + +func (m *SynthesisInput) GetInputSource() isSynthesisInput_InputSource { + if m != nil { + return m.InputSource + } + return nil +} + +func (m *SynthesisInput) GetText() string { + if x, ok := m.GetInputSource().(*SynthesisInput_Text); ok { + return x.Text + } + return "" +} + +func (m *SynthesisInput) GetSsml() string { + if x, ok := m.GetInputSource().(*SynthesisInput_Ssml); ok { + return x.Ssml + } + return "" +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*SynthesisInput) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _SynthesisInput_OneofMarshaler, _SynthesisInput_OneofUnmarshaler, _SynthesisInput_OneofSizer, []interface{}{ + (*SynthesisInput_Text)(nil), + (*SynthesisInput_Ssml)(nil), + } +} + +func _SynthesisInput_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*SynthesisInput) + // input_source + switch x := m.InputSource.(type) { + case *SynthesisInput_Text: + b.EncodeVarint(1<<3 | proto.WireBytes) + b.EncodeStringBytes(x.Text) + case *SynthesisInput_Ssml: + b.EncodeVarint(2<<3 | proto.WireBytes) + b.EncodeStringBytes(x.Ssml) + case nil: + default: + return fmt.Errorf("SynthesisInput.InputSource has unexpected type %T", x) + } + return nil +} + +func _SynthesisInput_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*SynthesisInput) + switch tag { + case 1: // input_source.text + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeStringBytes() + m.InputSource = &SynthesisInput_Text{x} + return true, err + case 2: // input_source.ssml + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeStringBytes() + m.InputSource = &SynthesisInput_Ssml{x} + return true, err + default: + return false, nil + } +} + +func _SynthesisInput_OneofSizer(msg proto.Message) (n int) { + m := msg.(*SynthesisInput) + // input_source + switch x := m.InputSource.(type) { + case *SynthesisInput_Text: + n += 1 // tag and wire + n += proto.SizeVarint(uint64(len(x.Text))) + n += len(x.Text) + case *SynthesisInput_Ssml: + n += 1 // tag and wire + n += proto.SizeVarint(uint64(len(x.Ssml))) + n += len(x.Ssml) + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +// Description of which voice to use for a synthesis request. +type VoiceSelectionParams struct { + // The language (and optionally also the region) of the voice expressed as a + // [BCP-47](https://www.rfc-editor.org/rfc/bcp/bcp47.txt) language tag, e.g. + // "en-US". Required. This should not include a script tag (e.g. use + // "cmn-cn" rather than "cmn-Hant-cn"), because the script will be inferred + // from the input provided in the SynthesisInput. The TTS service + // will use this parameter to help choose an appropriate voice. Note that + // the TTS service may choose a voice with a slightly different language code + // than the one selected; it may substitute a different region + // (e.g. using en-US rather than en-CA if there isn't a Canadian voice + // available), or even a different language, e.g. using "nb" (Norwegian + // Bokmal) instead of "no" (Norwegian)". + LanguageCode string `protobuf:"bytes,1,opt,name=language_code,json=languageCode,proto3" json:"language_code,omitempty"` + // The name of the voice. Optional; if not set, the service will choose a + // voice based on the other parameters such as language_code and gender. + Name string `protobuf:"bytes,2,opt,name=name,proto3" json:"name,omitempty"` + // The preferred gender of the voice. Optional; if not set, the service will + // choose a voice based on the other parameters such as language_code and + // name. Note that this is only a preference, not requirement; if a + // voice of the appropriate gender is not available, the synthesizer should + // substitute a voice with a different gender rather than failing the request. + SsmlGender SsmlVoiceGender `protobuf:"varint,3,opt,name=ssml_gender,json=ssmlGender,proto3,enum=google.cloud.texttospeech.v1.SsmlVoiceGender" json:"ssml_gender,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *VoiceSelectionParams) Reset() { *m = VoiceSelectionParams{} } +func (m *VoiceSelectionParams) String() string { return proto.CompactTextString(m) } +func (*VoiceSelectionParams) ProtoMessage() {} +func (*VoiceSelectionParams) Descriptor() ([]byte, []int) { + return fileDescriptor_cloud_tts_aab68875a18aec90, []int{5} +} +func (m *VoiceSelectionParams) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_VoiceSelectionParams.Unmarshal(m, b) +} +func (m *VoiceSelectionParams) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_VoiceSelectionParams.Marshal(b, m, deterministic) +} +func (dst *VoiceSelectionParams) XXX_Merge(src proto.Message) { + xxx_messageInfo_VoiceSelectionParams.Merge(dst, src) +} +func (m *VoiceSelectionParams) XXX_Size() int { + return xxx_messageInfo_VoiceSelectionParams.Size(m) +} +func (m *VoiceSelectionParams) XXX_DiscardUnknown() { + xxx_messageInfo_VoiceSelectionParams.DiscardUnknown(m) +} + +var xxx_messageInfo_VoiceSelectionParams proto.InternalMessageInfo + +func (m *VoiceSelectionParams) GetLanguageCode() string { + if m != nil { + return m.LanguageCode + } + return "" +} + +func (m *VoiceSelectionParams) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *VoiceSelectionParams) GetSsmlGender() SsmlVoiceGender { + if m != nil { + return m.SsmlGender + } + return SsmlVoiceGender_SSML_VOICE_GENDER_UNSPECIFIED +} + +// Description of audio data to be synthesized. +type AudioConfig struct { + // Required. The format of the requested audio byte stream. + AudioEncoding AudioEncoding `protobuf:"varint,1,opt,name=audio_encoding,json=audioEncoding,proto3,enum=google.cloud.texttospeech.v1.AudioEncoding" json:"audio_encoding,omitempty"` + // Optional speaking rate/speed, in the range [0.25, 4.0]. 1.0 is the normal + // native speed supported by the specific voice. 2.0 is twice as fast, and + // 0.5 is half as fast. If unset(0.0), defaults to the native 1.0 speed. Any + // other values < 0.25 or > 4.0 will return an error. + SpeakingRate float64 `protobuf:"fixed64,2,opt,name=speaking_rate,json=speakingRate,proto3" json:"speaking_rate,omitempty"` + // Optional speaking pitch, in the range [-20.0, 20.0]. 20 means increase 20 + // semitones from the original pitch. -20 means decrease 20 semitones from the + // original pitch. + Pitch float64 `protobuf:"fixed64,3,opt,name=pitch,proto3" json:"pitch,omitempty"` + // Optional volume gain (in dB) of the normal native volume supported by the + // specific voice, in the range [-96.0, 16.0]. If unset, or set to a value of + // 0.0 (dB), will play at normal native signal amplitude. A value of -6.0 (dB) + // will play at approximately half the amplitude of the normal native signal + // amplitude. A value of +6.0 (dB) will play at approximately twice the + // amplitude of the normal native signal amplitude. Strongly recommend not to + // exceed +10 (dB) as there's usually no effective increase in loudness for + // any value greater than that. + VolumeGainDb float64 `protobuf:"fixed64,4,opt,name=volume_gain_db,json=volumeGainDb,proto3" json:"volume_gain_db,omitempty"` + // The synthesis sample rate (in hertz) for this audio. Optional. If this is + // different from the voice's natural sample rate, then the synthesizer will + // honor this request by converting to the desired sample rate (which might + // result in worse audio quality), unless the specified sample rate is not + // supported for the encoding chosen, in which case it will fail the request + // and return [google.rpc.Code.INVALID_ARGUMENT][]. + SampleRateHertz int32 `protobuf:"varint,5,opt,name=sample_rate_hertz,json=sampleRateHertz,proto3" json:"sample_rate_hertz,omitempty"` + // An identifier which selects 'audio effects' profiles that are applied on + // (post synthesized) text to speech. + // Effects are applied on top of each other in the order they are given. + // See + // + // [audio-profiles](https: + // //cloud.google.com/text-to-speech/docs/audio-profiles) + // for current supported profile ids. + EffectsProfileId []string `protobuf:"bytes,6,rep,name=effects_profile_id,json=effectsProfileId,proto3" json:"effects_profile_id,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *AudioConfig) Reset() { *m = AudioConfig{} } +func (m *AudioConfig) String() string { return proto.CompactTextString(m) } +func (*AudioConfig) ProtoMessage() {} +func (*AudioConfig) Descriptor() ([]byte, []int) { + return fileDescriptor_cloud_tts_aab68875a18aec90, []int{6} +} +func (m *AudioConfig) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_AudioConfig.Unmarshal(m, b) +} +func (m *AudioConfig) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_AudioConfig.Marshal(b, m, deterministic) +} +func (dst *AudioConfig) XXX_Merge(src proto.Message) { + xxx_messageInfo_AudioConfig.Merge(dst, src) +} +func (m *AudioConfig) XXX_Size() int { + return xxx_messageInfo_AudioConfig.Size(m) +} +func (m *AudioConfig) XXX_DiscardUnknown() { + xxx_messageInfo_AudioConfig.DiscardUnknown(m) +} + +var xxx_messageInfo_AudioConfig proto.InternalMessageInfo + +func (m *AudioConfig) GetAudioEncoding() AudioEncoding { + if m != nil { + return m.AudioEncoding + } + return AudioEncoding_AUDIO_ENCODING_UNSPECIFIED +} + +func (m *AudioConfig) GetSpeakingRate() float64 { + if m != nil { + return m.SpeakingRate + } + return 0 +} + +func (m *AudioConfig) GetPitch() float64 { + if m != nil { + return m.Pitch + } + return 0 +} + +func (m *AudioConfig) GetVolumeGainDb() float64 { + if m != nil { + return m.VolumeGainDb + } + return 0 +} + +func (m *AudioConfig) GetSampleRateHertz() int32 { + if m != nil { + return m.SampleRateHertz + } + return 0 +} + +func (m *AudioConfig) GetEffectsProfileId() []string { + if m != nil { + return m.EffectsProfileId + } + return nil +} + +// The message returned to the client by the `SynthesizeSpeech` method. +type SynthesizeSpeechResponse struct { + // The audio data bytes encoded as specified in the request, including the + // header (For LINEAR16 audio, we include the WAV header). Note: as + // with all bytes fields, protobuffers use a pure binary representation, + // whereas JSON representations use base64. + AudioContent []byte `protobuf:"bytes,1,opt,name=audio_content,json=audioContent,proto3" json:"audio_content,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *SynthesizeSpeechResponse) Reset() { *m = SynthesizeSpeechResponse{} } +func (m *SynthesizeSpeechResponse) String() string { return proto.CompactTextString(m) } +func (*SynthesizeSpeechResponse) ProtoMessage() {} +func (*SynthesizeSpeechResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_cloud_tts_aab68875a18aec90, []int{7} +} +func (m *SynthesizeSpeechResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_SynthesizeSpeechResponse.Unmarshal(m, b) +} +func (m *SynthesizeSpeechResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_SynthesizeSpeechResponse.Marshal(b, m, deterministic) +} +func (dst *SynthesizeSpeechResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_SynthesizeSpeechResponse.Merge(dst, src) +} +func (m *SynthesizeSpeechResponse) XXX_Size() int { + return xxx_messageInfo_SynthesizeSpeechResponse.Size(m) +} +func (m *SynthesizeSpeechResponse) XXX_DiscardUnknown() { + xxx_messageInfo_SynthesizeSpeechResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_SynthesizeSpeechResponse proto.InternalMessageInfo + +func (m *SynthesizeSpeechResponse) GetAudioContent() []byte { + if m != nil { + return m.AudioContent + } + return nil +} + +func init() { + proto.RegisterType((*ListVoicesRequest)(nil), "google.cloud.texttospeech.v1.ListVoicesRequest") + proto.RegisterType((*ListVoicesResponse)(nil), "google.cloud.texttospeech.v1.ListVoicesResponse") + proto.RegisterType((*Voice)(nil), "google.cloud.texttospeech.v1.Voice") + proto.RegisterType((*SynthesizeSpeechRequest)(nil), "google.cloud.texttospeech.v1.SynthesizeSpeechRequest") + proto.RegisterType((*SynthesisInput)(nil), "google.cloud.texttospeech.v1.SynthesisInput") + proto.RegisterType((*VoiceSelectionParams)(nil), "google.cloud.texttospeech.v1.VoiceSelectionParams") + proto.RegisterType((*AudioConfig)(nil), "google.cloud.texttospeech.v1.AudioConfig") + proto.RegisterType((*SynthesizeSpeechResponse)(nil), "google.cloud.texttospeech.v1.SynthesizeSpeechResponse") + proto.RegisterEnum("google.cloud.texttospeech.v1.SsmlVoiceGender", SsmlVoiceGender_name, SsmlVoiceGender_value) + proto.RegisterEnum("google.cloud.texttospeech.v1.AudioEncoding", AudioEncoding_name, AudioEncoding_value) +} + +// Reference imports to suppress errors if they are not otherwise used. +var _ context.Context +var _ grpc.ClientConn + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the grpc package it is being compiled against. +const _ = grpc.SupportPackageIsVersion4 + +// TextToSpeechClient is the client API for TextToSpeech service. +// +// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://godoc.org/google.golang.org/grpc#ClientConn.NewStream. +type TextToSpeechClient interface { + // Returns a list of Voice supported for synthesis. + ListVoices(ctx context.Context, in *ListVoicesRequest, opts ...grpc.CallOption) (*ListVoicesResponse, error) + // Synthesizes speech synchronously: receive results after all text input + // has been processed. + SynthesizeSpeech(ctx context.Context, in *SynthesizeSpeechRequest, opts ...grpc.CallOption) (*SynthesizeSpeechResponse, error) +} + +type textToSpeechClient struct { + cc *grpc.ClientConn +} + +func NewTextToSpeechClient(cc *grpc.ClientConn) TextToSpeechClient { + return &textToSpeechClient{cc} +} + +func (c *textToSpeechClient) ListVoices(ctx context.Context, in *ListVoicesRequest, opts ...grpc.CallOption) (*ListVoicesResponse, error) { + out := new(ListVoicesResponse) + err := c.cc.Invoke(ctx, "/google.cloud.texttospeech.v1.TextToSpeech/ListVoices", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *textToSpeechClient) SynthesizeSpeech(ctx context.Context, in *SynthesizeSpeechRequest, opts ...grpc.CallOption) (*SynthesizeSpeechResponse, error) { + out := new(SynthesizeSpeechResponse) + err := c.cc.Invoke(ctx, "/google.cloud.texttospeech.v1.TextToSpeech/SynthesizeSpeech", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +// TextToSpeechServer is the server API for TextToSpeech service. +type TextToSpeechServer interface { + // Returns a list of Voice supported for synthesis. + ListVoices(context.Context, *ListVoicesRequest) (*ListVoicesResponse, error) + // Synthesizes speech synchronously: receive results after all text input + // has been processed. + SynthesizeSpeech(context.Context, *SynthesizeSpeechRequest) (*SynthesizeSpeechResponse, error) +} + +func RegisterTextToSpeechServer(s *grpc.Server, srv TextToSpeechServer) { + s.RegisterService(&_TextToSpeech_serviceDesc, srv) +} + +func _TextToSpeech_ListVoices_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(ListVoicesRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(TextToSpeechServer).ListVoices(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.texttospeech.v1.TextToSpeech/ListVoices", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(TextToSpeechServer).ListVoices(ctx, req.(*ListVoicesRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _TextToSpeech_SynthesizeSpeech_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(SynthesizeSpeechRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(TextToSpeechServer).SynthesizeSpeech(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.texttospeech.v1.TextToSpeech/SynthesizeSpeech", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(TextToSpeechServer).SynthesizeSpeech(ctx, req.(*SynthesizeSpeechRequest)) + } + return interceptor(ctx, in, info, handler) +} + +var _TextToSpeech_serviceDesc = grpc.ServiceDesc{ + ServiceName: "google.cloud.texttospeech.v1.TextToSpeech", + HandlerType: (*TextToSpeechServer)(nil), + Methods: []grpc.MethodDesc{ + { + MethodName: "ListVoices", + Handler: _TextToSpeech_ListVoices_Handler, + }, + { + MethodName: "SynthesizeSpeech", + Handler: _TextToSpeech_SynthesizeSpeech_Handler, + }, + }, + Streams: []grpc.StreamDesc{}, + Metadata: "google/cloud/texttospeech/v1/cloud_tts.proto", +} + +func init() { + proto.RegisterFile("google/cloud/texttospeech/v1/cloud_tts.proto", fileDescriptor_cloud_tts_aab68875a18aec90) +} + +var fileDescriptor_cloud_tts_aab68875a18aec90 = []byte{ + // 897 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xbc, 0x55, 0x41, 0x6f, 0x1b, 0x45, + 0x14, 0xee, 0xd8, 0x71, 0xda, 0x3e, 0xaf, 0x5d, 0x67, 0x88, 0xc0, 0x44, 0xa5, 0x32, 0x1b, 0x90, + 0x42, 0x08, 0x36, 0x76, 0x45, 0x05, 0xed, 0x01, 0x39, 0xf6, 0xd6, 0xb1, 0xe4, 0xd8, 0x66, 0x9c, + 0x04, 0x09, 0x45, 0x5a, 0x4d, 0xd6, 0x93, 0xcd, 0x8a, 0xf5, 0xcc, 0xe2, 0x19, 0x47, 0xa5, 0x47, + 0xf8, 0x09, 0x5c, 0x91, 0x10, 0x57, 0x7e, 0x01, 0x67, 0x8e, 0x48, 0x9c, 0xf8, 0x0b, 0xfc, 0x05, + 0x24, 0x8e, 0x68, 0x66, 0xd6, 0xad, 0x93, 0x54, 0xc6, 0x5c, 0x7a, 0xdb, 0x79, 0x6f, 0xbe, 0x37, + 0xdf, 0xfb, 0xf6, 0x9b, 0x37, 0xb0, 0x17, 0x0a, 0x11, 0xc6, 0xac, 0x16, 0xc4, 0x62, 0x36, 0xae, + 0x29, 0xf6, 0x4c, 0x29, 0x21, 0x13, 0xc6, 0x82, 0x8b, 0xda, 0x65, 0xdd, 0x46, 0x7d, 0xa5, 0x64, + 0x35, 0x99, 0x0a, 0x25, 0xf0, 0x7d, 0xbb, 0xbb, 0x6a, 0xe2, 0xd5, 0xc5, 0xdd, 0xd5, 0xcb, 0xfa, + 0x56, 0x9a, 0xad, 0xd1, 0x24, 0xaa, 0x51, 0xce, 0x85, 0xa2, 0x2a, 0x12, 0x3c, 0xc5, 0xba, 0x9f, + 0xc2, 0x46, 0x2f, 0x92, 0xea, 0x44, 0x44, 0x01, 0x93, 0x84, 0x7d, 0x33, 0x63, 0x52, 0xe1, 0x6d, + 0x28, 0xc4, 0x94, 0x87, 0x33, 0x1a, 0x32, 0x3f, 0x10, 0x63, 0x56, 0x46, 0x15, 0xb4, 0x73, 0x97, + 0x38, 0xf3, 0x60, 0x4b, 0x8c, 0x99, 0xfb, 0x05, 0xe0, 0x45, 0xa4, 0x4c, 0x04, 0x97, 0x0c, 0x3f, + 0x81, 0xf5, 0x4b, 0x13, 0x29, 0xa3, 0x4a, 0x76, 0x27, 0xdf, 0xd8, 0xae, 0x2e, 0x23, 0x57, 0x35, + 0x68, 0x92, 0x42, 0xdc, 0x3f, 0x10, 0xe4, 0x4c, 0x04, 0xbf, 0x0f, 0xc5, 0x2b, 0x0c, 0x6c, 0xb9, + 0xbb, 0xa4, 0xb0, 0x48, 0x41, 0x62, 0x0c, 0x6b, 0x9c, 0x4e, 0x58, 0x39, 0x63, 0xf8, 0x99, 0x6f, + 0xdc, 0x87, 0xbc, 0x94, 0x93, 0xd8, 0x0f, 0x19, 0x1f, 0xb3, 0x69, 0x39, 0x5b, 0x41, 0x3b, 0xc5, + 0xc6, 0x47, 0xcb, 0x69, 0x8c, 0xe4, 0x24, 0x36, 0x07, 0x77, 0x0c, 0x88, 0x80, 0xae, 0x60, 0xbf, + 0xf1, 0x67, 0xf0, 0x36, 0xa7, 0x6a, 0x36, 0xa5, 0xb1, 0x2f, 0xe9, 0x24, 0x89, 0x99, 0x3f, 0xa5, + 0x8a, 0xf9, 0x17, 0x6c, 0xaa, 0x9e, 0x97, 0xd7, 0x2a, 0x68, 0x27, 0x47, 0xde, 0x4c, 0x37, 0x8c, + 0x4c, 0x9e, 0x50, 0xc5, 0x0e, 0x74, 0xd6, 0xfd, 0x1b, 0xc1, 0x5b, 0xa3, 0x6f, 0xb9, 0xba, 0x60, + 0x32, 0x7a, 0xce, 0x46, 0xe6, 0xb8, 0xb9, 0xc6, 0xfb, 0x90, 0x8b, 0x78, 0x32, 0x53, 0x46, 0xdb, + 0x7c, 0x63, 0xef, 0x3f, 0x08, 0xa6, 0x55, 0x64, 0x57, 0x63, 0x88, 0x85, 0xe2, 0x03, 0xc8, 0x19, + 0xe5, 0x4c, 0xff, 0xf9, 0x46, 0x63, 0x05, 0xad, 0x47, 0x2c, 0x66, 0x81, 0x36, 0xc0, 0x90, 0x4e, + 0xe9, 0x44, 0x12, 0x5b, 0x00, 0xf7, 0xc0, 0xa1, 0xb3, 0x71, 0x24, 0xfc, 0x40, 0xf0, 0xf3, 0x28, + 0x34, 0xaa, 0xe5, 0x1b, 0x1f, 0x2c, 0x2f, 0xd8, 0xd4, 0x88, 0x96, 0x01, 0x90, 0x3c, 0x7d, 0xb9, + 0x70, 0x7b, 0x50, 0xbc, 0x4a, 0x18, 0x6f, 0xc2, 0x9a, 0x46, 0x5b, 0x23, 0x1d, 0xdc, 0x22, 0x66, + 0xa5, 0xa3, 0x5a, 0x68, 0xfb, 0xfb, 0x74, 0x54, 0xaf, 0xf6, 0x8b, 0xe0, 0x98, 0xf6, 0x7c, 0x29, + 0x66, 0xd3, 0x80, 0xb9, 0x3f, 0x21, 0xd8, 0x7c, 0x15, 0xf7, 0x95, 0x6c, 0xfa, 0x3a, 0x2c, 0xe2, + 0xfe, 0x98, 0x81, 0xfc, 0x82, 0x18, 0x98, 0x40, 0xd1, 0xaa, 0xc9, 0x78, 0x20, 0xc6, 0x11, 0x0f, + 0x0d, 0xb3, 0x62, 0xe3, 0xc3, 0x15, 0xf4, 0xf4, 0x52, 0x08, 0x29, 0xd0, 0xc5, 0xa5, 0x6e, 0x56, + 0x26, 0x8c, 0x7e, 0x1d, 0xf1, 0xd0, 0x18, 0xd0, 0x34, 0x84, 0x88, 0x33, 0x0f, 0x6a, 0xd7, 0xe1, + 0x4d, 0xc8, 0x25, 0x91, 0x0a, 0x2e, 0x4c, 0x4b, 0x88, 0xd8, 0x05, 0x7e, 0x0f, 0x8a, 0x97, 0x22, + 0x9e, 0x4d, 0x98, 0x1f, 0xd2, 0x88, 0xfb, 0xe3, 0x33, 0x63, 0x5b, 0x44, 0x1c, 0x1b, 0xed, 0xd0, + 0x88, 0xb7, 0xcf, 0xf0, 0x2e, 0x6c, 0xdc, 0xf4, 0x77, 0xce, 0xf8, 0xfb, 0x9e, 0xbc, 0x6a, 0x6c, + 0xbc, 0x07, 0x98, 0x9d, 0x9f, 0xb3, 0x40, 0x49, 0x3f, 0x99, 0x8a, 0xf3, 0x28, 0x66, 0x7e, 0x34, + 0x2e, 0xaf, 0x9b, 0x2b, 0x5a, 0x4a, 0x33, 0x43, 0x9b, 0xe8, 0x8e, 0xdd, 0xcf, 0xa1, 0x7c, 0xf3, + 0x16, 0xa4, 0xf3, 0x62, 0x1b, 0x0a, 0x2f, 0x8c, 0xa7, 0x18, 0xb7, 0x0e, 0x71, 0x88, 0x33, 0xb7, + 0x93, 0x8e, 0xed, 0x7e, 0x09, 0xf7, 0xae, 0xc9, 0x8f, 0xdf, 0x85, 0x77, 0x46, 0xa3, 0xc3, 0x9e, + 0x7f, 0x32, 0xe8, 0xb6, 0x3c, 0xbf, 0xe3, 0xf5, 0xdb, 0x1e, 0xf1, 0x8f, 0xfb, 0xa3, 0xa1, 0xd7, + 0xea, 0x3e, 0xed, 0x7a, 0xed, 0xd2, 0x2d, 0x7c, 0x07, 0xd6, 0x0e, 0x9b, 0x3d, 0xaf, 0x84, 0x30, + 0xc0, 0xfa, 0x53, 0xcf, 0x7c, 0x67, 0x70, 0x1e, 0x6e, 0xf7, 0xbd, 0xe3, 0x23, 0xd2, 0xec, 0x95, + 0xb2, 0xbb, 0x47, 0x50, 0xb8, 0x22, 0x3a, 0x7e, 0x00, 0x5b, 0xcd, 0xe3, 0x76, 0x77, 0xe0, 0x7b, + 0xfd, 0xd6, 0xa0, 0xdd, 0xed, 0x77, 0xae, 0xd5, 0x74, 0xe0, 0x4e, 0xaf, 0xdb, 0xf7, 0x9a, 0xa4, + 0xfe, 0xa8, 0x84, 0xf0, 0x6d, 0xc8, 0x1e, 0x0e, 0x1f, 0x96, 0x32, 0x3a, 0x3c, 0xe8, 0x74, 0xfc, + 0xc1, 0xf0, 0x78, 0x54, 0xca, 0x36, 0x7e, 0xcd, 0x80, 0x73, 0xc4, 0x9e, 0xa9, 0x23, 0x61, 0x9b, + 0xc5, 0xdf, 0x23, 0x80, 0x97, 0xb3, 0x12, 0xd7, 0x96, 0xdb, 0xe0, 0xc6, 0x3c, 0xde, 0xfa, 0x78, + 0x75, 0x80, 0x95, 0xd5, 0xc5, 0xdf, 0xfd, 0xf9, 0xd7, 0x0f, 0x19, 0x07, 0x83, 0x7e, 0x2f, 0xec, + 0x74, 0xc5, 0x3f, 0x23, 0x28, 0x5d, 0xff, 0x0f, 0xf8, 0x93, 0xd5, 0xe6, 0xce, 0xb5, 0xe9, 0xb5, + 0xf5, 0xe8, 0xff, 0xc2, 0x52, 0x5e, 0x0f, 0x0c, 0xaf, 0xb2, 0xfb, 0x86, 0xe6, 0xa5, 0x51, 0x8f, + 0xe5, 0x8b, 0xad, 0x8f, 0xd1, 0xee, 0xfe, 0x6f, 0x08, 0x2a, 0x81, 0x98, 0x2c, 0xad, 0xbe, 0xbf, + 0xb1, 0x28, 0xee, 0x50, 0x3f, 0x63, 0x43, 0xf4, 0xd5, 0x41, 0x0a, 0x09, 0x85, 0xbe, 0xfe, 0x55, + 0x31, 0x0d, 0x6b, 0x21, 0xe3, 0xe6, 0x91, 0xab, 0xd9, 0x14, 0x4d, 0x22, 0xf9, 0xea, 0x17, 0xf5, + 0xc9, 0xe2, 0xfa, 0x1f, 0x84, 0x7e, 0xc9, 0xdc, 0xef, 0xd8, 0x6a, 0x2d, 0x43, 0x60, 0xf1, 0xbc, + 0xea, 0x49, 0xfd, 0xf7, 0x79, 0xfa, 0xd4, 0xa4, 0x4f, 0x17, 0xd3, 0xa7, 0x27, 0xf5, 0xb3, 0x75, + 0x73, 0xea, 0xc3, 0x7f, 0x03, 0x00, 0x00, 0xff, 0xff, 0x05, 0xf0, 0x24, 0x64, 0xc6, 0x07, 0x00, + 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/cloud/texttospeech/v1beta1/cloud_tts.pb.go b/vendor/google.golang.org/genproto/googleapis/cloud/texttospeech/v1beta1/cloud_tts.pb.go new file mode 100644 index 0000000..98d30df --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/cloud/texttospeech/v1beta1/cloud_tts.pb.go @@ -0,0 +1,869 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/cloud/texttospeech/v1beta1/cloud_tts.proto + +package texttospeech // import "google.golang.org/genproto/googleapis/cloud/texttospeech/v1beta1" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +import ( + context "golang.org/x/net/context" + grpc "google.golang.org/grpc" +) + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Gender of the voice as described in +// [SSML voice element](https://www.w3.org/TR/speech-synthesis11/#edef_voice). +type SsmlVoiceGender int32 + +const ( + // An unspecified gender. + // In VoiceSelectionParams, this means that the client doesn't care which + // gender the selected voice will have. In the Voice field of + // ListVoicesResponse, this may mean that the voice doesn't fit any of the + // other categories in this enum, or that the gender of the voice isn't known. + SsmlVoiceGender_SSML_VOICE_GENDER_UNSPECIFIED SsmlVoiceGender = 0 + // A male voice. + SsmlVoiceGender_MALE SsmlVoiceGender = 1 + // A female voice. + SsmlVoiceGender_FEMALE SsmlVoiceGender = 2 + // A gender-neutral voice. + SsmlVoiceGender_NEUTRAL SsmlVoiceGender = 3 +) + +var SsmlVoiceGender_name = map[int32]string{ + 0: "SSML_VOICE_GENDER_UNSPECIFIED", + 1: "MALE", + 2: "FEMALE", + 3: "NEUTRAL", +} +var SsmlVoiceGender_value = map[string]int32{ + "SSML_VOICE_GENDER_UNSPECIFIED": 0, + "MALE": 1, + "FEMALE": 2, + "NEUTRAL": 3, +} + +func (x SsmlVoiceGender) String() string { + return proto.EnumName(SsmlVoiceGender_name, int32(x)) +} +func (SsmlVoiceGender) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_cloud_tts_3ab83d835c3b5bf2, []int{0} +} + +// Configuration to set up audio encoder. The encoding determines the output +// audio format that we'd like. +type AudioEncoding int32 + +const ( + // Not specified. Will return result [google.rpc.Code.INVALID_ARGUMENT][]. + AudioEncoding_AUDIO_ENCODING_UNSPECIFIED AudioEncoding = 0 + // Uncompressed 16-bit signed little-endian samples (Linear PCM). + // Audio content returned as LINEAR16 also contains a WAV header. + AudioEncoding_LINEAR16 AudioEncoding = 1 + // MP3 audio. + AudioEncoding_MP3 AudioEncoding = 2 + // Opus encoded audio wrapped in an ogg container. The result will be a + // file which can be played natively on Android, and in browsers (at least + // Chrome and Firefox). The quality of the encoding is considerably higher + // than MP3 while using approximately the same bitrate. + AudioEncoding_OGG_OPUS AudioEncoding = 3 +) + +var AudioEncoding_name = map[int32]string{ + 0: "AUDIO_ENCODING_UNSPECIFIED", + 1: "LINEAR16", + 2: "MP3", + 3: "OGG_OPUS", +} +var AudioEncoding_value = map[string]int32{ + "AUDIO_ENCODING_UNSPECIFIED": 0, + "LINEAR16": 1, + "MP3": 2, + "OGG_OPUS": 3, +} + +func (x AudioEncoding) String() string { + return proto.EnumName(AudioEncoding_name, int32(x)) +} +func (AudioEncoding) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_cloud_tts_3ab83d835c3b5bf2, []int{1} +} + +// The top-level message sent by the client for the `ListVoices` method. +type ListVoicesRequest struct { + // Optional (but recommended) + // [BCP-47](https://www.rfc-editor.org/rfc/bcp/bcp47.txt) language tag. If + // specified, the ListVoices call will only return voices that can be used to + // synthesize this language_code. E.g. when specifying "en-NZ", you will get + // supported "en-*" voices; when specifying "no", you will get supported + // "no-*" (Norwegian) and "nb-*" (Norwegian Bokmal) voices; specifying "zh" + // will also get supported "cmn-*" voices; specifying "zh-hk" will also get + // supported "yue-*" voices. + LanguageCode string `protobuf:"bytes,1,opt,name=language_code,json=languageCode,proto3" json:"language_code,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ListVoicesRequest) Reset() { *m = ListVoicesRequest{} } +func (m *ListVoicesRequest) String() string { return proto.CompactTextString(m) } +func (*ListVoicesRequest) ProtoMessage() {} +func (*ListVoicesRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_cloud_tts_3ab83d835c3b5bf2, []int{0} +} +func (m *ListVoicesRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ListVoicesRequest.Unmarshal(m, b) +} +func (m *ListVoicesRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ListVoicesRequest.Marshal(b, m, deterministic) +} +func (dst *ListVoicesRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_ListVoicesRequest.Merge(dst, src) +} +func (m *ListVoicesRequest) XXX_Size() int { + return xxx_messageInfo_ListVoicesRequest.Size(m) +} +func (m *ListVoicesRequest) XXX_DiscardUnknown() { + xxx_messageInfo_ListVoicesRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_ListVoicesRequest proto.InternalMessageInfo + +func (m *ListVoicesRequest) GetLanguageCode() string { + if m != nil { + return m.LanguageCode + } + return "" +} + +// The message returned to the client by the `ListVoices` method. +type ListVoicesResponse struct { + // The list of voices. + Voices []*Voice `protobuf:"bytes,1,rep,name=voices,proto3" json:"voices,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ListVoicesResponse) Reset() { *m = ListVoicesResponse{} } +func (m *ListVoicesResponse) String() string { return proto.CompactTextString(m) } +func (*ListVoicesResponse) ProtoMessage() {} +func (*ListVoicesResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_cloud_tts_3ab83d835c3b5bf2, []int{1} +} +func (m *ListVoicesResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ListVoicesResponse.Unmarshal(m, b) +} +func (m *ListVoicesResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ListVoicesResponse.Marshal(b, m, deterministic) +} +func (dst *ListVoicesResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_ListVoicesResponse.Merge(dst, src) +} +func (m *ListVoicesResponse) XXX_Size() int { + return xxx_messageInfo_ListVoicesResponse.Size(m) +} +func (m *ListVoicesResponse) XXX_DiscardUnknown() { + xxx_messageInfo_ListVoicesResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_ListVoicesResponse proto.InternalMessageInfo + +func (m *ListVoicesResponse) GetVoices() []*Voice { + if m != nil { + return m.Voices + } + return nil +} + +// Description of a voice supported by the TTS service. +type Voice struct { + // The languages that this voice supports, expressed as + // [BCP-47](https://www.rfc-editor.org/rfc/bcp/bcp47.txt) language tags (e.g. + // "en-US", "es-419", "cmn-tw"). + LanguageCodes []string `protobuf:"bytes,1,rep,name=language_codes,json=languageCodes,proto3" json:"language_codes,omitempty"` + // The name of this voice. Each distinct voice has a unique name. + Name string `protobuf:"bytes,2,opt,name=name,proto3" json:"name,omitempty"` + // The gender of this voice. + SsmlGender SsmlVoiceGender `protobuf:"varint,3,opt,name=ssml_gender,json=ssmlGender,proto3,enum=google.cloud.texttospeech.v1beta1.SsmlVoiceGender" json:"ssml_gender,omitempty"` + // The natural sample rate (in hertz) for this voice. + NaturalSampleRateHertz int32 `protobuf:"varint,4,opt,name=natural_sample_rate_hertz,json=naturalSampleRateHertz,proto3" json:"natural_sample_rate_hertz,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Voice) Reset() { *m = Voice{} } +func (m *Voice) String() string { return proto.CompactTextString(m) } +func (*Voice) ProtoMessage() {} +func (*Voice) Descriptor() ([]byte, []int) { + return fileDescriptor_cloud_tts_3ab83d835c3b5bf2, []int{2} +} +func (m *Voice) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Voice.Unmarshal(m, b) +} +func (m *Voice) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Voice.Marshal(b, m, deterministic) +} +func (dst *Voice) XXX_Merge(src proto.Message) { + xxx_messageInfo_Voice.Merge(dst, src) +} +func (m *Voice) XXX_Size() int { + return xxx_messageInfo_Voice.Size(m) +} +func (m *Voice) XXX_DiscardUnknown() { + xxx_messageInfo_Voice.DiscardUnknown(m) +} + +var xxx_messageInfo_Voice proto.InternalMessageInfo + +func (m *Voice) GetLanguageCodes() []string { + if m != nil { + return m.LanguageCodes + } + return nil +} + +func (m *Voice) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *Voice) GetSsmlGender() SsmlVoiceGender { + if m != nil { + return m.SsmlGender + } + return SsmlVoiceGender_SSML_VOICE_GENDER_UNSPECIFIED +} + +func (m *Voice) GetNaturalSampleRateHertz() int32 { + if m != nil { + return m.NaturalSampleRateHertz + } + return 0 +} + +// The top-level message sent by the client for the `SynthesizeSpeech` method. +type SynthesizeSpeechRequest struct { + // Required. The Synthesizer requires either plain text or SSML as input. + Input *SynthesisInput `protobuf:"bytes,1,opt,name=input,proto3" json:"input,omitempty"` + // Required. The desired voice of the synthesized audio. + Voice *VoiceSelectionParams `protobuf:"bytes,2,opt,name=voice,proto3" json:"voice,omitempty"` + // Required. The configuration of the synthesized audio. + AudioConfig *AudioConfig `protobuf:"bytes,3,opt,name=audio_config,json=audioConfig,proto3" json:"audio_config,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *SynthesizeSpeechRequest) Reset() { *m = SynthesizeSpeechRequest{} } +func (m *SynthesizeSpeechRequest) String() string { return proto.CompactTextString(m) } +func (*SynthesizeSpeechRequest) ProtoMessage() {} +func (*SynthesizeSpeechRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_cloud_tts_3ab83d835c3b5bf2, []int{3} +} +func (m *SynthesizeSpeechRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_SynthesizeSpeechRequest.Unmarshal(m, b) +} +func (m *SynthesizeSpeechRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_SynthesizeSpeechRequest.Marshal(b, m, deterministic) +} +func (dst *SynthesizeSpeechRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_SynthesizeSpeechRequest.Merge(dst, src) +} +func (m *SynthesizeSpeechRequest) XXX_Size() int { + return xxx_messageInfo_SynthesizeSpeechRequest.Size(m) +} +func (m *SynthesizeSpeechRequest) XXX_DiscardUnknown() { + xxx_messageInfo_SynthesizeSpeechRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_SynthesizeSpeechRequest proto.InternalMessageInfo + +func (m *SynthesizeSpeechRequest) GetInput() *SynthesisInput { + if m != nil { + return m.Input + } + return nil +} + +func (m *SynthesizeSpeechRequest) GetVoice() *VoiceSelectionParams { + if m != nil { + return m.Voice + } + return nil +} + +func (m *SynthesizeSpeechRequest) GetAudioConfig() *AudioConfig { + if m != nil { + return m.AudioConfig + } + return nil +} + +// Contains text input to be synthesized. Either `text` or `ssml` must be +// supplied. Supplying both or neither returns +// [google.rpc.Code.INVALID_ARGUMENT][]. The input size is limited to 5000 +// characters. +type SynthesisInput struct { + // The input source, which is either plain text or SSML. + // + // Types that are valid to be assigned to InputSource: + // *SynthesisInput_Text + // *SynthesisInput_Ssml + InputSource isSynthesisInput_InputSource `protobuf_oneof:"input_source"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *SynthesisInput) Reset() { *m = SynthesisInput{} } +func (m *SynthesisInput) String() string { return proto.CompactTextString(m) } +func (*SynthesisInput) ProtoMessage() {} +func (*SynthesisInput) Descriptor() ([]byte, []int) { + return fileDescriptor_cloud_tts_3ab83d835c3b5bf2, []int{4} +} +func (m *SynthesisInput) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_SynthesisInput.Unmarshal(m, b) +} +func (m *SynthesisInput) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_SynthesisInput.Marshal(b, m, deterministic) +} +func (dst *SynthesisInput) XXX_Merge(src proto.Message) { + xxx_messageInfo_SynthesisInput.Merge(dst, src) +} +func (m *SynthesisInput) XXX_Size() int { + return xxx_messageInfo_SynthesisInput.Size(m) +} +func (m *SynthesisInput) XXX_DiscardUnknown() { + xxx_messageInfo_SynthesisInput.DiscardUnknown(m) +} + +var xxx_messageInfo_SynthesisInput proto.InternalMessageInfo + +type isSynthesisInput_InputSource interface { + isSynthesisInput_InputSource() +} + +type SynthesisInput_Text struct { + Text string `protobuf:"bytes,1,opt,name=text,proto3,oneof"` +} + +type SynthesisInput_Ssml struct { + Ssml string `protobuf:"bytes,2,opt,name=ssml,proto3,oneof"` +} + +func (*SynthesisInput_Text) isSynthesisInput_InputSource() {} + +func (*SynthesisInput_Ssml) isSynthesisInput_InputSource() {} + +func (m *SynthesisInput) GetInputSource() isSynthesisInput_InputSource { + if m != nil { + return m.InputSource + } + return nil +} + +func (m *SynthesisInput) GetText() string { + if x, ok := m.GetInputSource().(*SynthesisInput_Text); ok { + return x.Text + } + return "" +} + +func (m *SynthesisInput) GetSsml() string { + if x, ok := m.GetInputSource().(*SynthesisInput_Ssml); ok { + return x.Ssml + } + return "" +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*SynthesisInput) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _SynthesisInput_OneofMarshaler, _SynthesisInput_OneofUnmarshaler, _SynthesisInput_OneofSizer, []interface{}{ + (*SynthesisInput_Text)(nil), + (*SynthesisInput_Ssml)(nil), + } +} + +func _SynthesisInput_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*SynthesisInput) + // input_source + switch x := m.InputSource.(type) { + case *SynthesisInput_Text: + b.EncodeVarint(1<<3 | proto.WireBytes) + b.EncodeStringBytes(x.Text) + case *SynthesisInput_Ssml: + b.EncodeVarint(2<<3 | proto.WireBytes) + b.EncodeStringBytes(x.Ssml) + case nil: + default: + return fmt.Errorf("SynthesisInput.InputSource has unexpected type %T", x) + } + return nil +} + +func _SynthesisInput_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*SynthesisInput) + switch tag { + case 1: // input_source.text + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeStringBytes() + m.InputSource = &SynthesisInput_Text{x} + return true, err + case 2: // input_source.ssml + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeStringBytes() + m.InputSource = &SynthesisInput_Ssml{x} + return true, err + default: + return false, nil + } +} + +func _SynthesisInput_OneofSizer(msg proto.Message) (n int) { + m := msg.(*SynthesisInput) + // input_source + switch x := m.InputSource.(type) { + case *SynthesisInput_Text: + n += 1 // tag and wire + n += proto.SizeVarint(uint64(len(x.Text))) + n += len(x.Text) + case *SynthesisInput_Ssml: + n += 1 // tag and wire + n += proto.SizeVarint(uint64(len(x.Ssml))) + n += len(x.Ssml) + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +// Description of which voice to use for a synthesis request. +type VoiceSelectionParams struct { + // The language (and optionally also the region) of the voice expressed as a + // [BCP-47](https://www.rfc-editor.org/rfc/bcp/bcp47.txt) language tag, e.g. + // "en-US". Required. This should not include a script tag (e.g. use + // "cmn-cn" rather than "cmn-Hant-cn"), because the script will be inferred + // from the input provided in the SynthesisInput. The TTS service + // will use this parameter to help choose an appropriate voice. Note that + // the TTS service may choose a voice with a slightly different language code + // than the one selected; it may substitute a different region + // (e.g. using en-US rather than en-CA if there isn't a Canadian voice + // available), or even a different language, e.g. using "nb" (Norwegian + // Bokmal) instead of "no" (Norwegian)". + LanguageCode string `protobuf:"bytes,1,opt,name=language_code,json=languageCode,proto3" json:"language_code,omitempty"` + // The name of the voice. Optional; if not set, the service will choose a + // voice based on the other parameters such as language_code and gender. + Name string `protobuf:"bytes,2,opt,name=name,proto3" json:"name,omitempty"` + // The preferred gender of the voice. Optional; if not set, the service will + // choose a voice based on the other parameters such as language_code and + // name. Note that this is only a preference, not requirement; if a + // voice of the appropriate gender is not available, the synthesizer should + // substitute a voice with a different gender rather than failing the request. + SsmlGender SsmlVoiceGender `protobuf:"varint,3,opt,name=ssml_gender,json=ssmlGender,proto3,enum=google.cloud.texttospeech.v1beta1.SsmlVoiceGender" json:"ssml_gender,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *VoiceSelectionParams) Reset() { *m = VoiceSelectionParams{} } +func (m *VoiceSelectionParams) String() string { return proto.CompactTextString(m) } +func (*VoiceSelectionParams) ProtoMessage() {} +func (*VoiceSelectionParams) Descriptor() ([]byte, []int) { + return fileDescriptor_cloud_tts_3ab83d835c3b5bf2, []int{5} +} +func (m *VoiceSelectionParams) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_VoiceSelectionParams.Unmarshal(m, b) +} +func (m *VoiceSelectionParams) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_VoiceSelectionParams.Marshal(b, m, deterministic) +} +func (dst *VoiceSelectionParams) XXX_Merge(src proto.Message) { + xxx_messageInfo_VoiceSelectionParams.Merge(dst, src) +} +func (m *VoiceSelectionParams) XXX_Size() int { + return xxx_messageInfo_VoiceSelectionParams.Size(m) +} +func (m *VoiceSelectionParams) XXX_DiscardUnknown() { + xxx_messageInfo_VoiceSelectionParams.DiscardUnknown(m) +} + +var xxx_messageInfo_VoiceSelectionParams proto.InternalMessageInfo + +func (m *VoiceSelectionParams) GetLanguageCode() string { + if m != nil { + return m.LanguageCode + } + return "" +} + +func (m *VoiceSelectionParams) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *VoiceSelectionParams) GetSsmlGender() SsmlVoiceGender { + if m != nil { + return m.SsmlGender + } + return SsmlVoiceGender_SSML_VOICE_GENDER_UNSPECIFIED +} + +// Description of audio data to be synthesized. +type AudioConfig struct { + // Required. The format of the requested audio byte stream. + AudioEncoding AudioEncoding `protobuf:"varint,1,opt,name=audio_encoding,json=audioEncoding,proto3,enum=google.cloud.texttospeech.v1beta1.AudioEncoding" json:"audio_encoding,omitempty"` + // Optional speaking rate/speed, in the range [0.25, 4.0]. 1.0 is the normal + // native speed supported by the specific voice. 2.0 is twice as fast, and + // 0.5 is half as fast. If unset(0.0), defaults to the native 1.0 speed. Any + // other values < 0.25 or > 4.0 will return an error. + SpeakingRate float64 `protobuf:"fixed64,2,opt,name=speaking_rate,json=speakingRate,proto3" json:"speaking_rate,omitempty"` + // Optional speaking pitch, in the range [-20.0, 20.0]. 20 means increase 20 + // semitones from the original pitch. -20 means decrease 20 semitones from the + // original pitch. + Pitch float64 `protobuf:"fixed64,3,opt,name=pitch,proto3" json:"pitch,omitempty"` + // Optional volume gain (in dB) of the normal native volume supported by the + // specific voice, in the range [-96.0, 16.0]. If unset, or set to a value of + // 0.0 (dB), will play at normal native signal amplitude. A value of -6.0 (dB) + // will play at approximately half the amplitude of the normal native signal + // amplitude. A value of +6.0 (dB) will play at approximately twice the + // amplitude of the normal native signal amplitude. Strongly recommend not to + // exceed +10 (dB) as there's usually no effective increase in loudness for + // any value greater than that. + VolumeGainDb float64 `protobuf:"fixed64,4,opt,name=volume_gain_db,json=volumeGainDb,proto3" json:"volume_gain_db,omitempty"` + // The synthesis sample rate (in hertz) for this audio. Optional. If this is + // different from the voice's natural sample rate, then the synthesizer will + // honor this request by converting to the desired sample rate (which might + // result in worse audio quality), unless the specified sample rate is not + // supported for the encoding chosen, in which case it will fail the request + // and return [google.rpc.Code.INVALID_ARGUMENT][]. + SampleRateHertz int32 `protobuf:"varint,5,opt,name=sample_rate_hertz,json=sampleRateHertz,proto3" json:"sample_rate_hertz,omitempty"` + // An identifier which selects 'audio effects' profiles that are applied on + // (post synthesized) text to speech. + // Effects are applied on top of each other in the order they are given. + EffectsProfileId []string `protobuf:"bytes,6,rep,name=effects_profile_id,json=effectsProfileId,proto3" json:"effects_profile_id,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *AudioConfig) Reset() { *m = AudioConfig{} } +func (m *AudioConfig) String() string { return proto.CompactTextString(m) } +func (*AudioConfig) ProtoMessage() {} +func (*AudioConfig) Descriptor() ([]byte, []int) { + return fileDescriptor_cloud_tts_3ab83d835c3b5bf2, []int{6} +} +func (m *AudioConfig) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_AudioConfig.Unmarshal(m, b) +} +func (m *AudioConfig) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_AudioConfig.Marshal(b, m, deterministic) +} +func (dst *AudioConfig) XXX_Merge(src proto.Message) { + xxx_messageInfo_AudioConfig.Merge(dst, src) +} +func (m *AudioConfig) XXX_Size() int { + return xxx_messageInfo_AudioConfig.Size(m) +} +func (m *AudioConfig) XXX_DiscardUnknown() { + xxx_messageInfo_AudioConfig.DiscardUnknown(m) +} + +var xxx_messageInfo_AudioConfig proto.InternalMessageInfo + +func (m *AudioConfig) GetAudioEncoding() AudioEncoding { + if m != nil { + return m.AudioEncoding + } + return AudioEncoding_AUDIO_ENCODING_UNSPECIFIED +} + +func (m *AudioConfig) GetSpeakingRate() float64 { + if m != nil { + return m.SpeakingRate + } + return 0 +} + +func (m *AudioConfig) GetPitch() float64 { + if m != nil { + return m.Pitch + } + return 0 +} + +func (m *AudioConfig) GetVolumeGainDb() float64 { + if m != nil { + return m.VolumeGainDb + } + return 0 +} + +func (m *AudioConfig) GetSampleRateHertz() int32 { + if m != nil { + return m.SampleRateHertz + } + return 0 +} + +func (m *AudioConfig) GetEffectsProfileId() []string { + if m != nil { + return m.EffectsProfileId + } + return nil +} + +// The message returned to the client by the `SynthesizeSpeech` method. +type SynthesizeSpeechResponse struct { + // The audio data bytes encoded as specified in the request, including the + // header (For LINEAR16 audio, we include the WAV header). Note: as + // with all bytes fields, protobuffers use a pure binary representation, + // whereas JSON representations use base64. + AudioContent []byte `protobuf:"bytes,1,opt,name=audio_content,json=audioContent,proto3" json:"audio_content,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *SynthesizeSpeechResponse) Reset() { *m = SynthesizeSpeechResponse{} } +func (m *SynthesizeSpeechResponse) String() string { return proto.CompactTextString(m) } +func (*SynthesizeSpeechResponse) ProtoMessage() {} +func (*SynthesizeSpeechResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_cloud_tts_3ab83d835c3b5bf2, []int{7} +} +func (m *SynthesizeSpeechResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_SynthesizeSpeechResponse.Unmarshal(m, b) +} +func (m *SynthesizeSpeechResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_SynthesizeSpeechResponse.Marshal(b, m, deterministic) +} +func (dst *SynthesizeSpeechResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_SynthesizeSpeechResponse.Merge(dst, src) +} +func (m *SynthesizeSpeechResponse) XXX_Size() int { + return xxx_messageInfo_SynthesizeSpeechResponse.Size(m) +} +func (m *SynthesizeSpeechResponse) XXX_DiscardUnknown() { + xxx_messageInfo_SynthesizeSpeechResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_SynthesizeSpeechResponse proto.InternalMessageInfo + +func (m *SynthesizeSpeechResponse) GetAudioContent() []byte { + if m != nil { + return m.AudioContent + } + return nil +} + +func init() { + proto.RegisterType((*ListVoicesRequest)(nil), "google.cloud.texttospeech.v1beta1.ListVoicesRequest") + proto.RegisterType((*ListVoicesResponse)(nil), "google.cloud.texttospeech.v1beta1.ListVoicesResponse") + proto.RegisterType((*Voice)(nil), "google.cloud.texttospeech.v1beta1.Voice") + proto.RegisterType((*SynthesizeSpeechRequest)(nil), "google.cloud.texttospeech.v1beta1.SynthesizeSpeechRequest") + proto.RegisterType((*SynthesisInput)(nil), "google.cloud.texttospeech.v1beta1.SynthesisInput") + proto.RegisterType((*VoiceSelectionParams)(nil), "google.cloud.texttospeech.v1beta1.VoiceSelectionParams") + proto.RegisterType((*AudioConfig)(nil), "google.cloud.texttospeech.v1beta1.AudioConfig") + proto.RegisterType((*SynthesizeSpeechResponse)(nil), "google.cloud.texttospeech.v1beta1.SynthesizeSpeechResponse") + proto.RegisterEnum("google.cloud.texttospeech.v1beta1.SsmlVoiceGender", SsmlVoiceGender_name, SsmlVoiceGender_value) + proto.RegisterEnum("google.cloud.texttospeech.v1beta1.AudioEncoding", AudioEncoding_name, AudioEncoding_value) +} + +// Reference imports to suppress errors if they are not otherwise used. +var _ context.Context +var _ grpc.ClientConn + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the grpc package it is being compiled against. +const _ = grpc.SupportPackageIsVersion4 + +// TextToSpeechClient is the client API for TextToSpeech service. +// +// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://godoc.org/google.golang.org/grpc#ClientConn.NewStream. +type TextToSpeechClient interface { + // Returns a list of [Voice][google.cloud.texttospeech.v1beta1.Voice] + // supported for synthesis. + ListVoices(ctx context.Context, in *ListVoicesRequest, opts ...grpc.CallOption) (*ListVoicesResponse, error) + // Synthesizes speech synchronously: receive results after all text input + // has been processed. + SynthesizeSpeech(ctx context.Context, in *SynthesizeSpeechRequest, opts ...grpc.CallOption) (*SynthesizeSpeechResponse, error) +} + +type textToSpeechClient struct { + cc *grpc.ClientConn +} + +func NewTextToSpeechClient(cc *grpc.ClientConn) TextToSpeechClient { + return &textToSpeechClient{cc} +} + +func (c *textToSpeechClient) ListVoices(ctx context.Context, in *ListVoicesRequest, opts ...grpc.CallOption) (*ListVoicesResponse, error) { + out := new(ListVoicesResponse) + err := c.cc.Invoke(ctx, "/google.cloud.texttospeech.v1beta1.TextToSpeech/ListVoices", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *textToSpeechClient) SynthesizeSpeech(ctx context.Context, in *SynthesizeSpeechRequest, opts ...grpc.CallOption) (*SynthesizeSpeechResponse, error) { + out := new(SynthesizeSpeechResponse) + err := c.cc.Invoke(ctx, "/google.cloud.texttospeech.v1beta1.TextToSpeech/SynthesizeSpeech", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +// TextToSpeechServer is the server API for TextToSpeech service. +type TextToSpeechServer interface { + // Returns a list of [Voice][google.cloud.texttospeech.v1beta1.Voice] + // supported for synthesis. + ListVoices(context.Context, *ListVoicesRequest) (*ListVoicesResponse, error) + // Synthesizes speech synchronously: receive results after all text input + // has been processed. + SynthesizeSpeech(context.Context, *SynthesizeSpeechRequest) (*SynthesizeSpeechResponse, error) +} + +func RegisterTextToSpeechServer(s *grpc.Server, srv TextToSpeechServer) { + s.RegisterService(&_TextToSpeech_serviceDesc, srv) +} + +func _TextToSpeech_ListVoices_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(ListVoicesRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(TextToSpeechServer).ListVoices(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.texttospeech.v1beta1.TextToSpeech/ListVoices", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(TextToSpeechServer).ListVoices(ctx, req.(*ListVoicesRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _TextToSpeech_SynthesizeSpeech_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(SynthesizeSpeechRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(TextToSpeechServer).SynthesizeSpeech(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.texttospeech.v1beta1.TextToSpeech/SynthesizeSpeech", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(TextToSpeechServer).SynthesizeSpeech(ctx, req.(*SynthesizeSpeechRequest)) + } + return interceptor(ctx, in, info, handler) +} + +var _TextToSpeech_serviceDesc = grpc.ServiceDesc{ + ServiceName: "google.cloud.texttospeech.v1beta1.TextToSpeech", + HandlerType: (*TextToSpeechServer)(nil), + Methods: []grpc.MethodDesc{ + { + MethodName: "ListVoices", + Handler: _TextToSpeech_ListVoices_Handler, + }, + { + MethodName: "SynthesizeSpeech", + Handler: _TextToSpeech_SynthesizeSpeech_Handler, + }, + }, + Streams: []grpc.StreamDesc{}, + Metadata: "google/cloud/texttospeech/v1beta1/cloud_tts.proto", +} + +func init() { + proto.RegisterFile("google/cloud/texttospeech/v1beta1/cloud_tts.proto", fileDescriptor_cloud_tts_3ab83d835c3b5bf2) +} + +var fileDescriptor_cloud_tts_3ab83d835c3b5bf2 = []byte{ + // 875 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xbc, 0x55, 0xdd, 0x6e, 0x1b, 0x45, + 0x14, 0xee, 0xd8, 0x71, 0xda, 0x1e, 0xaf, 0x1d, 0x67, 0x14, 0x51, 0x13, 0x51, 0x94, 0x6e, 0xa8, + 0x64, 0x45, 0xc8, 0xc6, 0x2e, 0xbf, 0xe9, 0x05, 0x38, 0xf6, 0xd6, 0xb5, 0xe4, 0x3f, 0x66, 0x93, + 0x54, 0xe2, 0x66, 0x35, 0xd9, 0x9d, 0x6c, 0x56, 0xac, 0x67, 0x96, 0x9d, 0x71, 0x54, 0x7a, 0x89, + 0xb8, 0xe6, 0x02, 0x9e, 0x00, 0x89, 0x07, 0xe0, 0x59, 0x40, 0xe2, 0x09, 0x78, 0x08, 0x2e, 0xd1, + 0xce, 0x6c, 0x52, 0x27, 0x41, 0xd4, 0xe1, 0x82, 0xbb, 0x99, 0x73, 0xf6, 0x3b, 0xf3, 0x9d, 0x6f, + 0xbe, 0x3d, 0x03, 0xed, 0x50, 0x88, 0x30, 0x66, 0x2d, 0x3f, 0x16, 0x8b, 0xa0, 0xa5, 0xd8, 0x4b, + 0xa5, 0x84, 0x4c, 0x18, 0xf3, 0xcf, 0x5a, 0xe7, 0xed, 0x13, 0xa6, 0x68, 0xdb, 0xa4, 0x3c, 0xa5, + 0x64, 0x33, 0x49, 0x85, 0x12, 0xf8, 0x91, 0x81, 0x34, 0x75, 0xbc, 0xb9, 0x0c, 0x69, 0xe6, 0x90, + 0xed, 0x77, 0xf2, 0xaa, 0x34, 0x89, 0x5a, 0x94, 0x73, 0xa1, 0xa8, 0x8a, 0x04, 0xcf, 0x0b, 0xd8, + 0x9f, 0xc2, 0xe6, 0x28, 0x92, 0xea, 0x58, 0x44, 0x3e, 0x93, 0x84, 0x7d, 0xb3, 0x60, 0x52, 0xe1, + 0x5d, 0xa8, 0xc4, 0x94, 0x87, 0x0b, 0x1a, 0x32, 0xcf, 0x17, 0x01, 0xab, 0xa3, 0x1d, 0xd4, 0xb8, + 0x4f, 0xac, 0x8b, 0x60, 0x4f, 0x04, 0xcc, 0x3e, 0x06, 0xbc, 0x8c, 0x94, 0x89, 0xe0, 0x92, 0xe1, + 0x2f, 0x60, 0xfd, 0x5c, 0x47, 0xea, 0x68, 0xa7, 0xd8, 0x28, 0x77, 0x1a, 0xcd, 0x37, 0x32, 0x6c, + 0xea, 0x12, 0x24, 0xc7, 0xd9, 0xbf, 0x21, 0x28, 0xe9, 0x08, 0x7e, 0x0c, 0xd5, 0x2b, 0x34, 0x4c, + 0xcd, 0xfb, 0xa4, 0xb2, 0xcc, 0x43, 0x62, 0x0c, 0x6b, 0x9c, 0xce, 0x59, 0xbd, 0xa0, 0x49, 0xea, + 0x35, 0x76, 0xa1, 0x2c, 0xe5, 0x3c, 0xf6, 0x42, 0xc6, 0x03, 0x96, 0xd6, 0x8b, 0x3b, 0xa8, 0x51, + 0xed, 0x74, 0x56, 0xe0, 0xe2, 0xca, 0x79, 0xac, 0x4f, 0x1f, 0x68, 0x24, 0x81, 0xac, 0x8c, 0x59, + 0xe3, 0xcf, 0xe0, 0x6d, 0x4e, 0xd5, 0x22, 0xa5, 0xb1, 0x27, 0xe9, 0x3c, 0x89, 0x99, 0x97, 0x52, + 0xc5, 0xbc, 0x33, 0x96, 0xaa, 0x57, 0xf5, 0xb5, 0x1d, 0xd4, 0x28, 0x91, 0xb7, 0xf2, 0x0f, 0x5c, + 0x9d, 0x27, 0x54, 0xb1, 0xe7, 0x59, 0xd6, 0xfe, 0xbe, 0x00, 0x0f, 0xdc, 0x6f, 0xb9, 0x3a, 0x63, + 0x32, 0x7a, 0xc5, 0x5c, 0x7d, 0xe6, 0x85, 0xda, 0x03, 0x28, 0x45, 0x3c, 0x59, 0x28, 0xad, 0x72, + 0xb9, 0xd3, 0x5e, 0x85, 0x65, 0x5e, 0x4a, 0x0e, 0x33, 0x20, 0x31, 0x78, 0x3c, 0x86, 0x92, 0xd6, + 0x50, 0x2b, 0x51, 0xee, 0x7c, 0xb2, 0xaa, 0xf4, 0x2e, 0x8b, 0x99, 0x9f, 0x99, 0x62, 0x46, 0x53, + 0x3a, 0x97, 0xc4, 0x54, 0xc1, 0x5f, 0x82, 0x45, 0x17, 0x41, 0x24, 0x3c, 0x5f, 0xf0, 0xd3, 0x28, + 0xd4, 0x22, 0x96, 0x3b, 0xcd, 0x15, 0xaa, 0x76, 0x33, 0x58, 0x4f, 0xa3, 0x48, 0x99, 0xbe, 0xde, + 0xd8, 0x23, 0xa8, 0x5e, 0xa5, 0x8e, 0xb7, 0x60, 0x2d, 0x2b, 0x61, 0x1c, 0xf6, 0xfc, 0x0e, 0xd1, + 0xbb, 0x2c, 0x9a, 0xe9, 0x6e, 0xae, 0x34, 0x8b, 0x66, 0xbb, 0x83, 0x2a, 0x58, 0xba, 0x51, 0x4f, + 0x8a, 0x45, 0xea, 0x33, 0xfb, 0x17, 0x04, 0x5b, 0xff, 0xd4, 0xc0, 0x4a, 0xfe, 0xfd, 0xdf, 0x6c, + 0x63, 0xff, 0x5c, 0x80, 0xf2, 0x92, 0x22, 0xf8, 0x05, 0x54, 0x8d, 0xae, 0x8c, 0xfb, 0x22, 0x88, + 0x78, 0xa8, 0xe9, 0x55, 0x3b, 0x1f, 0xac, 0xaa, 0xac, 0x93, 0xe3, 0x48, 0x85, 0x2e, 0x6f, 0xb3, + 0xb6, 0x65, 0xc2, 0xe8, 0xd7, 0x11, 0x0f, 0xb5, 0x33, 0x75, 0x6b, 0x88, 0x58, 0x17, 0xc1, 0xcc, + 0x8e, 0x78, 0x0b, 0x4a, 0x49, 0xa4, 0xfc, 0x33, 0xdd, 0x1c, 0x22, 0x66, 0x83, 0xdf, 0x83, 0xea, + 0xb9, 0x88, 0x17, 0x73, 0xe6, 0x85, 0x34, 0xe2, 0x5e, 0x70, 0xa2, 0xfd, 0x8c, 0x88, 0x65, 0xa2, + 0x03, 0x1a, 0xf1, 0xfe, 0x09, 0xde, 0x83, 0xcd, 0x9b, 0xc6, 0x2f, 0x69, 0xe3, 0x6f, 0xc8, 0xab, + 0x8e, 0xc7, 0xef, 0x03, 0x66, 0xa7, 0xa7, 0xcc, 0x57, 0xd2, 0x4b, 0x52, 0x71, 0x1a, 0xc5, 0xcc, + 0x8b, 0x82, 0xfa, 0xba, 0xfe, 0x81, 0x6b, 0x79, 0x66, 0x66, 0x12, 0xc3, 0xc0, 0xfe, 0x1c, 0xea, + 0x37, 0x7f, 0x8f, 0x7c, 0xa4, 0xec, 0x42, 0xe5, 0xd2, 0x87, 0x8a, 0x71, 0xe3, 0x15, 0x8b, 0x58, + 0x17, 0xc6, 0xca, 0x62, 0x7b, 0x2f, 0x60, 0xe3, 0xda, 0x1d, 0xe0, 0x47, 0xf0, 0xd0, 0x75, 0xc7, + 0x23, 0xef, 0x78, 0x3a, 0xec, 0x39, 0xde, 0xc0, 0x99, 0xf4, 0x1d, 0xe2, 0x1d, 0x4d, 0xdc, 0x99, + 0xd3, 0x1b, 0x3e, 0x1b, 0x3a, 0xfd, 0xda, 0x1d, 0x7c, 0x0f, 0xd6, 0xc6, 0xdd, 0x91, 0x53, 0x43, + 0x18, 0x60, 0xfd, 0x99, 0xa3, 0xd7, 0x05, 0x5c, 0x86, 0xbb, 0x13, 0xe7, 0xe8, 0x90, 0x74, 0x47, + 0xb5, 0xe2, 0xde, 0x21, 0x54, 0xae, 0x88, 0x8e, 0xdf, 0x85, 0xed, 0xee, 0x51, 0x7f, 0x38, 0xf5, + 0x9c, 0x49, 0x6f, 0xda, 0x1f, 0x4e, 0x06, 0xd7, 0x6a, 0x5a, 0x70, 0x6f, 0x34, 0x9c, 0x38, 0x5d, + 0xd2, 0xfe, 0xb8, 0x86, 0xf0, 0x5d, 0x28, 0x8e, 0x67, 0x4f, 0x6a, 0x85, 0x2c, 0x3c, 0x1d, 0x0c, + 0xbc, 0xe9, 0xec, 0xc8, 0xad, 0x15, 0x3b, 0x7f, 0x14, 0xc0, 0x3a, 0x64, 0x2f, 0xd5, 0xa1, 0x30, + 0xcd, 0xe2, 0x1f, 0x11, 0xc0, 0xeb, 0x71, 0x8a, 0x3f, 0x5c, 0xc1, 0x0b, 0x37, 0xe6, 0xf6, 0xf6, + 0x47, 0xb7, 0x44, 0x19, 0x81, 0xed, 0x07, 0xdf, 0xfd, 0xfe, 0xe7, 0x4f, 0x85, 0x4d, 0xbc, 0x71, + 0xf9, 0xcc, 0x98, 0x51, 0x8c, 0x7f, 0x45, 0x50, 0xbb, 0x7e, 0x2d, 0x78, 0xff, 0x16, 0xf3, 0xe9, + 0xda, 0xa8, 0xdb, 0x7e, 0xfa, 0x9f, 0xb0, 0x39, 0xcd, 0x5d, 0x4d, 0xf3, 0xa1, 0x5d, 0xbf, 0xa4, + 0x99, 0xe1, 0xf7, 0xe5, 0xe5, 0xf7, 0xfb, 0x68, 0xef, 0xe0, 0x07, 0x04, 0x8f, 0x7d, 0x31, 0x7f, + 0xf3, 0x39, 0x07, 0x9b, 0xcb, 0xfa, 0xcf, 0xb2, 0xc7, 0x70, 0x86, 0xbe, 0x1a, 0xe7, 0xb8, 0x50, + 0x64, 0xb3, 0xa2, 0x29, 0xd2, 0xb0, 0x15, 0x32, 0xae, 0x9f, 0xca, 0x96, 0x49, 0xd1, 0x24, 0x92, + 0xff, 0xf2, 0x42, 0x3f, 0x5d, 0x0e, 0xfe, 0x85, 0xd0, 0xc9, 0xba, 0x06, 0x3f, 0xf9, 0x3b, 0x00, + 0x00, 0xff, 0xff, 0x26, 0x28, 0xff, 0x42, 0xdd, 0x07, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/cloud/translate/v3beta1/translation_service.pb.go b/vendor/google.golang.org/genproto/googleapis/cloud/translate/v3beta1/translation_service.pb.go new file mode 100644 index 0000000..f23cb3a --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/cloud/translate/v3beta1/translation_service.pb.go @@ -0,0 +1,2820 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/cloud/translate/v3beta1/translation_service.proto + +package translate // import "google.golang.org/genproto/googleapis/cloud/translate/v3beta1" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import timestamp "github.com/golang/protobuf/ptypes/timestamp" +import _ "google.golang.org/genproto/googleapis/api/annotations" +import longrunning "google.golang.org/genproto/googleapis/longrunning" + +import ( + context "golang.org/x/net/context" + grpc "google.golang.org/grpc" +) + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// State of the job. +type BatchTranslateMetadata_State int32 + +const ( + // Invalid. + BatchTranslateMetadata_STATE_UNSPECIFIED BatchTranslateMetadata_State = 0 + // Request is being processed. + BatchTranslateMetadata_RUNNING BatchTranslateMetadata_State = 1 + // The batch is processed, and at least one item has been successfully + // processed. + BatchTranslateMetadata_SUCCEEDED BatchTranslateMetadata_State = 2 + // The batch is done and no item has been successfully processed. + BatchTranslateMetadata_FAILED BatchTranslateMetadata_State = 3 + // Request is in the process of being canceled after caller invoked + // longrunning.Operations.CancelOperation on the request id. + BatchTranslateMetadata_CANCELLING BatchTranslateMetadata_State = 4 + // The batch is done after the user has called the + // longrunning.Operations.CancelOperation. Any records processed before the + // cancel command are output as specified in the request. + BatchTranslateMetadata_CANCELLED BatchTranslateMetadata_State = 5 +) + +var BatchTranslateMetadata_State_name = map[int32]string{ + 0: "STATE_UNSPECIFIED", + 1: "RUNNING", + 2: "SUCCEEDED", + 3: "FAILED", + 4: "CANCELLING", + 5: "CANCELLED", +} +var BatchTranslateMetadata_State_value = map[string]int32{ + "STATE_UNSPECIFIED": 0, + "RUNNING": 1, + "SUCCEEDED": 2, + "FAILED": 3, + "CANCELLING": 4, + "CANCELLED": 5, +} + +func (x BatchTranslateMetadata_State) String() string { + return proto.EnumName(BatchTranslateMetadata_State_name, int32(x)) +} +func (BatchTranslateMetadata_State) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_translation_service_7ab926c836a43926, []int{15, 0} +} + +// Enumerates the possible states that the creation request can be in. +type CreateGlossaryMetadata_State int32 + +const ( + // Invalid. + CreateGlossaryMetadata_STATE_UNSPECIFIED CreateGlossaryMetadata_State = 0 + // Request is being processed. + CreateGlossaryMetadata_RUNNING CreateGlossaryMetadata_State = 1 + // The glossary has been successfully created. + CreateGlossaryMetadata_SUCCEEDED CreateGlossaryMetadata_State = 2 + // Failed to create the glossary. + CreateGlossaryMetadata_FAILED CreateGlossaryMetadata_State = 3 + // Request is in the process of being canceled after caller invoked + // longrunning.Operations.CancelOperation on the request id. + CreateGlossaryMetadata_CANCELLING CreateGlossaryMetadata_State = 4 + // The glossary creation request has been successfully canceled. + CreateGlossaryMetadata_CANCELLED CreateGlossaryMetadata_State = 5 +) + +var CreateGlossaryMetadata_State_name = map[int32]string{ + 0: "STATE_UNSPECIFIED", + 1: "RUNNING", + 2: "SUCCEEDED", + 3: "FAILED", + 4: "CANCELLING", + 5: "CANCELLED", +} +var CreateGlossaryMetadata_State_value = map[string]int32{ + "STATE_UNSPECIFIED": 0, + "RUNNING": 1, + "SUCCEEDED": 2, + "FAILED": 3, + "CANCELLING": 4, + "CANCELLED": 5, +} + +func (x CreateGlossaryMetadata_State) String() string { + return proto.EnumName(CreateGlossaryMetadata_State_name, int32(x)) +} +func (CreateGlossaryMetadata_State) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_translation_service_7ab926c836a43926, []int{24, 0} +} + +// Enumerates the possible states that the creation request can be in. +type DeleteGlossaryMetadata_State int32 + +const ( + // Invalid. + DeleteGlossaryMetadata_STATE_UNSPECIFIED DeleteGlossaryMetadata_State = 0 + // Request is being processed. + DeleteGlossaryMetadata_RUNNING DeleteGlossaryMetadata_State = 1 + // The glossary was successfully deleted. + DeleteGlossaryMetadata_SUCCEEDED DeleteGlossaryMetadata_State = 2 + // Failed to delete the glossary. + DeleteGlossaryMetadata_FAILED DeleteGlossaryMetadata_State = 3 + // Request is in the process of being canceled after caller invoked + // longrunning.Operations.CancelOperation on the request id. + DeleteGlossaryMetadata_CANCELLING DeleteGlossaryMetadata_State = 4 + // The glossary deletion request has been successfully canceled. + DeleteGlossaryMetadata_CANCELLED DeleteGlossaryMetadata_State = 5 +) + +var DeleteGlossaryMetadata_State_name = map[int32]string{ + 0: "STATE_UNSPECIFIED", + 1: "RUNNING", + 2: "SUCCEEDED", + 3: "FAILED", + 4: "CANCELLING", + 5: "CANCELLED", +} +var DeleteGlossaryMetadata_State_value = map[string]int32{ + "STATE_UNSPECIFIED": 0, + "RUNNING": 1, + "SUCCEEDED": 2, + "FAILED": 3, + "CANCELLING": 4, + "CANCELLED": 5, +} + +func (x DeleteGlossaryMetadata_State) String() string { + return proto.EnumName(DeleteGlossaryMetadata_State_name, int32(x)) +} +func (DeleteGlossaryMetadata_State) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_translation_service_7ab926c836a43926, []int{25, 0} +} + +// Configures which glossary should be used for a specific target language, +// and defines options for applying that glossary. +type TranslateTextGlossaryConfig struct { + // Required. Specifies the glossary used for this translation. Use + // this format: projects/*/locations/*/glossaries/* + Glossary string `protobuf:"bytes,1,opt,name=glossary,proto3" json:"glossary,omitempty"` + // Optional. Indicates whether we should do a case-insensitive match. + // Default value is false if missing. + IgnoreCase bool `protobuf:"varint,2,opt,name=ignore_case,json=ignoreCase,proto3" json:"ignore_case,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *TranslateTextGlossaryConfig) Reset() { *m = TranslateTextGlossaryConfig{} } +func (m *TranslateTextGlossaryConfig) String() string { return proto.CompactTextString(m) } +func (*TranslateTextGlossaryConfig) ProtoMessage() {} +func (*TranslateTextGlossaryConfig) Descriptor() ([]byte, []int) { + return fileDescriptor_translation_service_7ab926c836a43926, []int{0} +} +func (m *TranslateTextGlossaryConfig) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_TranslateTextGlossaryConfig.Unmarshal(m, b) +} +func (m *TranslateTextGlossaryConfig) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_TranslateTextGlossaryConfig.Marshal(b, m, deterministic) +} +func (dst *TranslateTextGlossaryConfig) XXX_Merge(src proto.Message) { + xxx_messageInfo_TranslateTextGlossaryConfig.Merge(dst, src) +} +func (m *TranslateTextGlossaryConfig) XXX_Size() int { + return xxx_messageInfo_TranslateTextGlossaryConfig.Size(m) +} +func (m *TranslateTextGlossaryConfig) XXX_DiscardUnknown() { + xxx_messageInfo_TranslateTextGlossaryConfig.DiscardUnknown(m) +} + +var xxx_messageInfo_TranslateTextGlossaryConfig proto.InternalMessageInfo + +func (m *TranslateTextGlossaryConfig) GetGlossary() string { + if m != nil { + return m.Glossary + } + return "" +} + +func (m *TranslateTextGlossaryConfig) GetIgnoreCase() bool { + if m != nil { + return m.IgnoreCase + } + return false +} + +// The request message for synchronous translation. +type TranslateTextRequest struct { + // Required. The content of the input in string format. + // We recommend the total contents to be less than 30k codepoints. + // Please use BatchTranslateText for larger text. + Contents []string `protobuf:"bytes,1,rep,name=contents,proto3" json:"contents,omitempty"` + // Optional. The format of the source text, for example, "text/html", + // "text/plain". If left blank, the MIME type is assumed to be "text/html". + MimeType string `protobuf:"bytes,3,opt,name=mime_type,json=mimeType,proto3" json:"mime_type,omitempty"` + // Optional. The BCP-47 language code of the input text if + // known, for example, "en-US" or "sr-Latn". Supported language codes are + // listed in Language Support. If the source language isn't specified, the API + // attempts to identify the source language automatically and returns the + // the source language within the response. + SourceLanguageCode string `protobuf:"bytes,4,opt,name=source_language_code,json=sourceLanguageCode,proto3" json:"source_language_code,omitempty"` + // Required. The BCP-47 language code to use for translation of the input + // text, set to one of the language codes listed in Language Support. + TargetLanguageCode string `protobuf:"bytes,5,opt,name=target_language_code,json=targetLanguageCode,proto3" json:"target_language_code,omitempty"` + // Optional. Only used when making regionalized call. + // Format: + // projects/{project-id}/locations/{location-id}. + // + // Only custom model/glossary within the same location-id can be used. + // Otherwise 400 is returned. + Parent string `protobuf:"bytes,8,opt,name=parent,proto3" json:"parent,omitempty"` + // Optional. The `model` type requested for this translation. + // + // The format depends on model type: + // 1. Custom models: + // projects/{project-id}/locations/{location-id}/models/{model-id}. + // 2. General (built-in) models: + // projects/{project-id}/locations/{location-id}/models/general/nmt + // projects/{project-id}/locations/{location-id}/models/general/base + // + // For global (non-regionalized) requests, use {location-id} 'global'. + // For example, + // projects/{project-id}/locations/global/models/general/nmt + // + // If missing, the system decides which google base model to use. + Model string `protobuf:"bytes,6,opt,name=model,proto3" json:"model,omitempty"` + // Optional. Glossary to be applied. The glossary needs to be in the same + // region as the model, otherwise an INVALID_ARGUMENT error is returned. + GlossaryConfig *TranslateTextGlossaryConfig `protobuf:"bytes,7,opt,name=glossary_config,json=glossaryConfig,proto3" json:"glossary_config,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *TranslateTextRequest) Reset() { *m = TranslateTextRequest{} } +func (m *TranslateTextRequest) String() string { return proto.CompactTextString(m) } +func (*TranslateTextRequest) ProtoMessage() {} +func (*TranslateTextRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_translation_service_7ab926c836a43926, []int{1} +} +func (m *TranslateTextRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_TranslateTextRequest.Unmarshal(m, b) +} +func (m *TranslateTextRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_TranslateTextRequest.Marshal(b, m, deterministic) +} +func (dst *TranslateTextRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_TranslateTextRequest.Merge(dst, src) +} +func (m *TranslateTextRequest) XXX_Size() int { + return xxx_messageInfo_TranslateTextRequest.Size(m) +} +func (m *TranslateTextRequest) XXX_DiscardUnknown() { + xxx_messageInfo_TranslateTextRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_TranslateTextRequest proto.InternalMessageInfo + +func (m *TranslateTextRequest) GetContents() []string { + if m != nil { + return m.Contents + } + return nil +} + +func (m *TranslateTextRequest) GetMimeType() string { + if m != nil { + return m.MimeType + } + return "" +} + +func (m *TranslateTextRequest) GetSourceLanguageCode() string { + if m != nil { + return m.SourceLanguageCode + } + return "" +} + +func (m *TranslateTextRequest) GetTargetLanguageCode() string { + if m != nil { + return m.TargetLanguageCode + } + return "" +} + +func (m *TranslateTextRequest) GetParent() string { + if m != nil { + return m.Parent + } + return "" +} + +func (m *TranslateTextRequest) GetModel() string { + if m != nil { + return m.Model + } + return "" +} + +func (m *TranslateTextRequest) GetGlossaryConfig() *TranslateTextGlossaryConfig { + if m != nil { + return m.GlossaryConfig + } + return nil +} + +// The main language translation response message. +type TranslateTextResponse struct { + // Text translation responses with no glossary applied. + // This field has the same length as `contents` in TranslateTextRequest. + Translations []*Translation `protobuf:"bytes,1,rep,name=translations,proto3" json:"translations,omitempty"` + // Text translation responses if a glossary is provided in the request. + // This could be the same as 'translation' above if no terms apply. + // This field has the same length as `contents` in TranslateTextRequest. + GlossaryTranslations []*Translation `protobuf:"bytes,3,rep,name=glossary_translations,json=glossaryTranslations,proto3" json:"glossary_translations,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *TranslateTextResponse) Reset() { *m = TranslateTextResponse{} } +func (m *TranslateTextResponse) String() string { return proto.CompactTextString(m) } +func (*TranslateTextResponse) ProtoMessage() {} +func (*TranslateTextResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_translation_service_7ab926c836a43926, []int{2} +} +func (m *TranslateTextResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_TranslateTextResponse.Unmarshal(m, b) +} +func (m *TranslateTextResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_TranslateTextResponse.Marshal(b, m, deterministic) +} +func (dst *TranslateTextResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_TranslateTextResponse.Merge(dst, src) +} +func (m *TranslateTextResponse) XXX_Size() int { + return xxx_messageInfo_TranslateTextResponse.Size(m) +} +func (m *TranslateTextResponse) XXX_DiscardUnknown() { + xxx_messageInfo_TranslateTextResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_TranslateTextResponse proto.InternalMessageInfo + +func (m *TranslateTextResponse) GetTranslations() []*Translation { + if m != nil { + return m.Translations + } + return nil +} + +func (m *TranslateTextResponse) GetGlossaryTranslations() []*Translation { + if m != nil { + return m.GlossaryTranslations + } + return nil +} + +// A single translation response. +type Translation struct { + // Text translated into the target language. + TranslatedText string `protobuf:"bytes,1,opt,name=translated_text,json=translatedText,proto3" json:"translated_text,omitempty"` + // Only present when `model` is present in the request. + // This is same as `model` provided in the request. + Model string `protobuf:"bytes,2,opt,name=model,proto3" json:"model,omitempty"` + // The BCP-47 language code of source text in the initial request, detected + // automatically, if no source language was passed within the initial + // request. If the source language was passed, auto-detection of the language + // does not occur and this field will be empty. + DetectedLanguageCode string `protobuf:"bytes,4,opt,name=detected_language_code,json=detectedLanguageCode,proto3" json:"detected_language_code,omitempty"` + // The `glossary_config` used for this translation. + GlossaryConfig *TranslateTextGlossaryConfig `protobuf:"bytes,3,opt,name=glossary_config,json=glossaryConfig,proto3" json:"glossary_config,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Translation) Reset() { *m = Translation{} } +func (m *Translation) String() string { return proto.CompactTextString(m) } +func (*Translation) ProtoMessage() {} +func (*Translation) Descriptor() ([]byte, []int) { + return fileDescriptor_translation_service_7ab926c836a43926, []int{3} +} +func (m *Translation) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Translation.Unmarshal(m, b) +} +func (m *Translation) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Translation.Marshal(b, m, deterministic) +} +func (dst *Translation) XXX_Merge(src proto.Message) { + xxx_messageInfo_Translation.Merge(dst, src) +} +func (m *Translation) XXX_Size() int { + return xxx_messageInfo_Translation.Size(m) +} +func (m *Translation) XXX_DiscardUnknown() { + xxx_messageInfo_Translation.DiscardUnknown(m) +} + +var xxx_messageInfo_Translation proto.InternalMessageInfo + +func (m *Translation) GetTranslatedText() string { + if m != nil { + return m.TranslatedText + } + return "" +} + +func (m *Translation) GetModel() string { + if m != nil { + return m.Model + } + return "" +} + +func (m *Translation) GetDetectedLanguageCode() string { + if m != nil { + return m.DetectedLanguageCode + } + return "" +} + +func (m *Translation) GetGlossaryConfig() *TranslateTextGlossaryConfig { + if m != nil { + return m.GlossaryConfig + } + return nil +} + +// The request message for language detection. +type DetectLanguageRequest struct { + // Optional. Only used when making regionalized call. + // Format: + // projects/{project-id}/locations/{location-id}. + // + // Only custom model within the same location-id can be used. + // Otherwise 400 is returned. + Parent string `protobuf:"bytes,5,opt,name=parent,proto3" json:"parent,omitempty"` + // Optional. The language detection model to be used. + // projects/{project-id}/locations/{location-id}/models/language-detection/{model-id} + // If not specified, default will be used. + Model string `protobuf:"bytes,4,opt,name=model,proto3" json:"model,omitempty"` + // Required. The source of the document from which to detect the language. + // + // Types that are valid to be assigned to Source: + // *DetectLanguageRequest_Content + Source isDetectLanguageRequest_Source `protobuf_oneof:"source"` + // Optional. The format of the source text, for example, "text/html", + // "text/plain". If left blank, the MIME type is assumed to be "text/html". + MimeType string `protobuf:"bytes,3,opt,name=mime_type,json=mimeType,proto3" json:"mime_type,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *DetectLanguageRequest) Reset() { *m = DetectLanguageRequest{} } +func (m *DetectLanguageRequest) String() string { return proto.CompactTextString(m) } +func (*DetectLanguageRequest) ProtoMessage() {} +func (*DetectLanguageRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_translation_service_7ab926c836a43926, []int{4} +} +func (m *DetectLanguageRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_DetectLanguageRequest.Unmarshal(m, b) +} +func (m *DetectLanguageRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_DetectLanguageRequest.Marshal(b, m, deterministic) +} +func (dst *DetectLanguageRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_DetectLanguageRequest.Merge(dst, src) +} +func (m *DetectLanguageRequest) XXX_Size() int { + return xxx_messageInfo_DetectLanguageRequest.Size(m) +} +func (m *DetectLanguageRequest) XXX_DiscardUnknown() { + xxx_messageInfo_DetectLanguageRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_DetectLanguageRequest proto.InternalMessageInfo + +func (m *DetectLanguageRequest) GetParent() string { + if m != nil { + return m.Parent + } + return "" +} + +func (m *DetectLanguageRequest) GetModel() string { + if m != nil { + return m.Model + } + return "" +} + +type isDetectLanguageRequest_Source interface { + isDetectLanguageRequest_Source() +} + +type DetectLanguageRequest_Content struct { + Content string `protobuf:"bytes,1,opt,name=content,proto3,oneof"` +} + +func (*DetectLanguageRequest_Content) isDetectLanguageRequest_Source() {} + +func (m *DetectLanguageRequest) GetSource() isDetectLanguageRequest_Source { + if m != nil { + return m.Source + } + return nil +} + +func (m *DetectLanguageRequest) GetContent() string { + if x, ok := m.GetSource().(*DetectLanguageRequest_Content); ok { + return x.Content + } + return "" +} + +func (m *DetectLanguageRequest) GetMimeType() string { + if m != nil { + return m.MimeType + } + return "" +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*DetectLanguageRequest) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _DetectLanguageRequest_OneofMarshaler, _DetectLanguageRequest_OneofUnmarshaler, _DetectLanguageRequest_OneofSizer, []interface{}{ + (*DetectLanguageRequest_Content)(nil), + } +} + +func _DetectLanguageRequest_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*DetectLanguageRequest) + // source + switch x := m.Source.(type) { + case *DetectLanguageRequest_Content: + b.EncodeVarint(1<<3 | proto.WireBytes) + b.EncodeStringBytes(x.Content) + case nil: + default: + return fmt.Errorf("DetectLanguageRequest.Source has unexpected type %T", x) + } + return nil +} + +func _DetectLanguageRequest_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*DetectLanguageRequest) + switch tag { + case 1: // source.content + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeStringBytes() + m.Source = &DetectLanguageRequest_Content{x} + return true, err + default: + return false, nil + } +} + +func _DetectLanguageRequest_OneofSizer(msg proto.Message) (n int) { + m := msg.(*DetectLanguageRequest) + // source + switch x := m.Source.(type) { + case *DetectLanguageRequest_Content: + n += 1 // tag and wire + n += proto.SizeVarint(uint64(len(x.Content))) + n += len(x.Content) + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +// The response message for language detection. +type DetectedLanguage struct { + // The BCP-47 language code of source content in the request, detected + // automatically. + LanguageCode string `protobuf:"bytes,1,opt,name=language_code,json=languageCode,proto3" json:"language_code,omitempty"` + // The confidence of the detection result for this language. + Confidence float32 `protobuf:"fixed32,2,opt,name=confidence,proto3" json:"confidence,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *DetectedLanguage) Reset() { *m = DetectedLanguage{} } +func (m *DetectedLanguage) String() string { return proto.CompactTextString(m) } +func (*DetectedLanguage) ProtoMessage() {} +func (*DetectedLanguage) Descriptor() ([]byte, []int) { + return fileDescriptor_translation_service_7ab926c836a43926, []int{5} +} +func (m *DetectedLanguage) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_DetectedLanguage.Unmarshal(m, b) +} +func (m *DetectedLanguage) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_DetectedLanguage.Marshal(b, m, deterministic) +} +func (dst *DetectedLanguage) XXX_Merge(src proto.Message) { + xxx_messageInfo_DetectedLanguage.Merge(dst, src) +} +func (m *DetectedLanguage) XXX_Size() int { + return xxx_messageInfo_DetectedLanguage.Size(m) +} +func (m *DetectedLanguage) XXX_DiscardUnknown() { + xxx_messageInfo_DetectedLanguage.DiscardUnknown(m) +} + +var xxx_messageInfo_DetectedLanguage proto.InternalMessageInfo + +func (m *DetectedLanguage) GetLanguageCode() string { + if m != nil { + return m.LanguageCode + } + return "" +} + +func (m *DetectedLanguage) GetConfidence() float32 { + if m != nil { + return m.Confidence + } + return 0 +} + +// The response message for language detection. +type DetectLanguageResponse struct { + // A list of detected languages sorted by detection confidence in descending + // order. The most probable language first. + Languages []*DetectedLanguage `protobuf:"bytes,1,rep,name=languages,proto3" json:"languages,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *DetectLanguageResponse) Reset() { *m = DetectLanguageResponse{} } +func (m *DetectLanguageResponse) String() string { return proto.CompactTextString(m) } +func (*DetectLanguageResponse) ProtoMessage() {} +func (*DetectLanguageResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_translation_service_7ab926c836a43926, []int{6} +} +func (m *DetectLanguageResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_DetectLanguageResponse.Unmarshal(m, b) +} +func (m *DetectLanguageResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_DetectLanguageResponse.Marshal(b, m, deterministic) +} +func (dst *DetectLanguageResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_DetectLanguageResponse.Merge(dst, src) +} +func (m *DetectLanguageResponse) XXX_Size() int { + return xxx_messageInfo_DetectLanguageResponse.Size(m) +} +func (m *DetectLanguageResponse) XXX_DiscardUnknown() { + xxx_messageInfo_DetectLanguageResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_DetectLanguageResponse proto.InternalMessageInfo + +func (m *DetectLanguageResponse) GetLanguages() []*DetectedLanguage { + if m != nil { + return m.Languages + } + return nil +} + +// The request message for discovering supported languages. +type GetSupportedLanguagesRequest struct { + // Optional. Used for making regionalized calls. + // Format: projects/{project-id}/locations/{location-id}. + // For global calls, use projects/{project-id}/locations/global. + // If missing, the call is treated as a global call. + // + // Only custom model within the same location-id can be used. + // Otherwise 400 is returned. + Parent string `protobuf:"bytes,3,opt,name=parent,proto3" json:"parent,omitempty"` + // Optional. The language to use to return localized, human readable names + // of supported languages. If missing, default language is ENGLISH. + DisplayLanguageCode string `protobuf:"bytes,1,opt,name=display_language_code,json=displayLanguageCode,proto3" json:"display_language_code,omitempty"` + // Optional. Get supported languages of this model. + // The format depends on model type: + // 1. Custom models: + // projects/{project-id}/locations/{location-id}/models/{model-id}. + // 2. General (built-in) models: + // projects/{project-id}/locations/{location-id}/models/general/nmt + // projects/{project-id}/locations/{location-id}/models/general/base + // Returns languages supported by the specified model. + // If missing, we get supported languages of Google general NMT model. + Model string `protobuf:"bytes,2,opt,name=model,proto3" json:"model,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GetSupportedLanguagesRequest) Reset() { *m = GetSupportedLanguagesRequest{} } +func (m *GetSupportedLanguagesRequest) String() string { return proto.CompactTextString(m) } +func (*GetSupportedLanguagesRequest) ProtoMessage() {} +func (*GetSupportedLanguagesRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_translation_service_7ab926c836a43926, []int{7} +} +func (m *GetSupportedLanguagesRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GetSupportedLanguagesRequest.Unmarshal(m, b) +} +func (m *GetSupportedLanguagesRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GetSupportedLanguagesRequest.Marshal(b, m, deterministic) +} +func (dst *GetSupportedLanguagesRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_GetSupportedLanguagesRequest.Merge(dst, src) +} +func (m *GetSupportedLanguagesRequest) XXX_Size() int { + return xxx_messageInfo_GetSupportedLanguagesRequest.Size(m) +} +func (m *GetSupportedLanguagesRequest) XXX_DiscardUnknown() { + xxx_messageInfo_GetSupportedLanguagesRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_GetSupportedLanguagesRequest proto.InternalMessageInfo + +func (m *GetSupportedLanguagesRequest) GetParent() string { + if m != nil { + return m.Parent + } + return "" +} + +func (m *GetSupportedLanguagesRequest) GetDisplayLanguageCode() string { + if m != nil { + return m.DisplayLanguageCode + } + return "" +} + +func (m *GetSupportedLanguagesRequest) GetModel() string { + if m != nil { + return m.Model + } + return "" +} + +// The response message for discovering supported languages. +type SupportedLanguages struct { + // A list of supported language responses. This list contains an entry + // for each language the Translation API supports. + Languages []*SupportedLanguage `protobuf:"bytes,1,rep,name=languages,proto3" json:"languages,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *SupportedLanguages) Reset() { *m = SupportedLanguages{} } +func (m *SupportedLanguages) String() string { return proto.CompactTextString(m) } +func (*SupportedLanguages) ProtoMessage() {} +func (*SupportedLanguages) Descriptor() ([]byte, []int) { + return fileDescriptor_translation_service_7ab926c836a43926, []int{8} +} +func (m *SupportedLanguages) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_SupportedLanguages.Unmarshal(m, b) +} +func (m *SupportedLanguages) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_SupportedLanguages.Marshal(b, m, deterministic) +} +func (dst *SupportedLanguages) XXX_Merge(src proto.Message) { + xxx_messageInfo_SupportedLanguages.Merge(dst, src) +} +func (m *SupportedLanguages) XXX_Size() int { + return xxx_messageInfo_SupportedLanguages.Size(m) +} +func (m *SupportedLanguages) XXX_DiscardUnknown() { + xxx_messageInfo_SupportedLanguages.DiscardUnknown(m) +} + +var xxx_messageInfo_SupportedLanguages proto.InternalMessageInfo + +func (m *SupportedLanguages) GetLanguages() []*SupportedLanguage { + if m != nil { + return m.Languages + } + return nil +} + +// A single supported language response corresponds to information related +// to one supported language. +type SupportedLanguage struct { + // Supported language code, generally consisting of its ISO 639-1 + // identifier, for example, 'en', 'ja'. In certain cases, BCP-47 codes + // including language and region identifiers are returned (for example, + // 'zh-TW' and 'zh-CN') + LanguageCode string `protobuf:"bytes,1,opt,name=language_code,json=languageCode,proto3" json:"language_code,omitempty"` + // Human readable name of the language localized in the display language + // specified in the request. + DisplayName string `protobuf:"bytes,2,opt,name=display_name,json=displayName,proto3" json:"display_name,omitempty"` + // Can be used as source language. + SupportSource bool `protobuf:"varint,3,opt,name=support_source,json=supportSource,proto3" json:"support_source,omitempty"` + // Can be used as target language. + SupportTarget bool `protobuf:"varint,4,opt,name=support_target,json=supportTarget,proto3" json:"support_target,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *SupportedLanguage) Reset() { *m = SupportedLanguage{} } +func (m *SupportedLanguage) String() string { return proto.CompactTextString(m) } +func (*SupportedLanguage) ProtoMessage() {} +func (*SupportedLanguage) Descriptor() ([]byte, []int) { + return fileDescriptor_translation_service_7ab926c836a43926, []int{9} +} +func (m *SupportedLanguage) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_SupportedLanguage.Unmarshal(m, b) +} +func (m *SupportedLanguage) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_SupportedLanguage.Marshal(b, m, deterministic) +} +func (dst *SupportedLanguage) XXX_Merge(src proto.Message) { + xxx_messageInfo_SupportedLanguage.Merge(dst, src) +} +func (m *SupportedLanguage) XXX_Size() int { + return xxx_messageInfo_SupportedLanguage.Size(m) +} +func (m *SupportedLanguage) XXX_DiscardUnknown() { + xxx_messageInfo_SupportedLanguage.DiscardUnknown(m) +} + +var xxx_messageInfo_SupportedLanguage proto.InternalMessageInfo + +func (m *SupportedLanguage) GetLanguageCode() string { + if m != nil { + return m.LanguageCode + } + return "" +} + +func (m *SupportedLanguage) GetDisplayName() string { + if m != nil { + return m.DisplayName + } + return "" +} + +func (m *SupportedLanguage) GetSupportSource() bool { + if m != nil { + return m.SupportSource + } + return false +} + +func (m *SupportedLanguage) GetSupportTarget() bool { + if m != nil { + return m.SupportTarget + } + return false +} + +// The GCS location for the input content. +type GcsSource struct { + // Required. Source data URI. For example, `gs://my_bucket/my_object`. + InputUri string `protobuf:"bytes,1,opt,name=input_uri,json=inputUri,proto3" json:"input_uri,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GcsSource) Reset() { *m = GcsSource{} } +func (m *GcsSource) String() string { return proto.CompactTextString(m) } +func (*GcsSource) ProtoMessage() {} +func (*GcsSource) Descriptor() ([]byte, []int) { + return fileDescriptor_translation_service_7ab926c836a43926, []int{10} +} +func (m *GcsSource) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GcsSource.Unmarshal(m, b) +} +func (m *GcsSource) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GcsSource.Marshal(b, m, deterministic) +} +func (dst *GcsSource) XXX_Merge(src proto.Message) { + xxx_messageInfo_GcsSource.Merge(dst, src) +} +func (m *GcsSource) XXX_Size() int { + return xxx_messageInfo_GcsSource.Size(m) +} +func (m *GcsSource) XXX_DiscardUnknown() { + xxx_messageInfo_GcsSource.DiscardUnknown(m) +} + +var xxx_messageInfo_GcsSource proto.InternalMessageInfo + +func (m *GcsSource) GetInputUri() string { + if m != nil { + return m.InputUri + } + return "" +} + +// Input configuration. +type InputConfig struct { + // Optional. Can be "text/plain" or "text/html". + // For `.tsv`, "text/html" is used if mime_type is missing. + // For `.html`, this field must be "text/html" or empty. + // For `.txt`, this field must be "text/plain" or empty. + MimeType string `protobuf:"bytes,1,opt,name=mime_type,json=mimeType,proto3" json:"mime_type,omitempty"` + // Required. Specify the input. + // + // Types that are valid to be assigned to Source: + // *InputConfig_GcsSource + Source isInputConfig_Source `protobuf_oneof:"source"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *InputConfig) Reset() { *m = InputConfig{} } +func (m *InputConfig) String() string { return proto.CompactTextString(m) } +func (*InputConfig) ProtoMessage() {} +func (*InputConfig) Descriptor() ([]byte, []int) { + return fileDescriptor_translation_service_7ab926c836a43926, []int{11} +} +func (m *InputConfig) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_InputConfig.Unmarshal(m, b) +} +func (m *InputConfig) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_InputConfig.Marshal(b, m, deterministic) +} +func (dst *InputConfig) XXX_Merge(src proto.Message) { + xxx_messageInfo_InputConfig.Merge(dst, src) +} +func (m *InputConfig) XXX_Size() int { + return xxx_messageInfo_InputConfig.Size(m) +} +func (m *InputConfig) XXX_DiscardUnknown() { + xxx_messageInfo_InputConfig.DiscardUnknown(m) +} + +var xxx_messageInfo_InputConfig proto.InternalMessageInfo + +func (m *InputConfig) GetMimeType() string { + if m != nil { + return m.MimeType + } + return "" +} + +type isInputConfig_Source interface { + isInputConfig_Source() +} + +type InputConfig_GcsSource struct { + GcsSource *GcsSource `protobuf:"bytes,2,opt,name=gcs_source,json=gcsSource,proto3,oneof"` +} + +func (*InputConfig_GcsSource) isInputConfig_Source() {} + +func (m *InputConfig) GetSource() isInputConfig_Source { + if m != nil { + return m.Source + } + return nil +} + +func (m *InputConfig) GetGcsSource() *GcsSource { + if x, ok := m.GetSource().(*InputConfig_GcsSource); ok { + return x.GcsSource + } + return nil +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*InputConfig) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _InputConfig_OneofMarshaler, _InputConfig_OneofUnmarshaler, _InputConfig_OneofSizer, []interface{}{ + (*InputConfig_GcsSource)(nil), + } +} + +func _InputConfig_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*InputConfig) + // source + switch x := m.Source.(type) { + case *InputConfig_GcsSource: + b.EncodeVarint(2<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.GcsSource); err != nil { + return err + } + case nil: + default: + return fmt.Errorf("InputConfig.Source has unexpected type %T", x) + } + return nil +} + +func _InputConfig_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*InputConfig) + switch tag { + case 2: // source.gcs_source + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(GcsSource) + err := b.DecodeMessage(msg) + m.Source = &InputConfig_GcsSource{msg} + return true, err + default: + return false, nil + } +} + +func _InputConfig_OneofSizer(msg proto.Message) (n int) { + m := msg.(*InputConfig) + // source + switch x := m.Source.(type) { + case *InputConfig_GcsSource: + s := proto.Size(x.GcsSource) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +// The GCS location for the output content +type GcsDestination struct { + // Required. There must be no files under 'output_uri_prefix'. + // 'output_uri_prefix' must end with "/". Otherwise error 400 is returned. + OutputUriPrefix string `protobuf:"bytes,1,opt,name=output_uri_prefix,json=outputUriPrefix,proto3" json:"output_uri_prefix,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GcsDestination) Reset() { *m = GcsDestination{} } +func (m *GcsDestination) String() string { return proto.CompactTextString(m) } +func (*GcsDestination) ProtoMessage() {} +func (*GcsDestination) Descriptor() ([]byte, []int) { + return fileDescriptor_translation_service_7ab926c836a43926, []int{12} +} +func (m *GcsDestination) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GcsDestination.Unmarshal(m, b) +} +func (m *GcsDestination) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GcsDestination.Marshal(b, m, deterministic) +} +func (dst *GcsDestination) XXX_Merge(src proto.Message) { + xxx_messageInfo_GcsDestination.Merge(dst, src) +} +func (m *GcsDestination) XXX_Size() int { + return xxx_messageInfo_GcsDestination.Size(m) +} +func (m *GcsDestination) XXX_DiscardUnknown() { + xxx_messageInfo_GcsDestination.DiscardUnknown(m) +} + +var xxx_messageInfo_GcsDestination proto.InternalMessageInfo + +func (m *GcsDestination) GetOutputUriPrefix() string { + if m != nil { + return m.OutputUriPrefix + } + return "" +} + +// Output configuration. +type OutputConfig struct { + // Required. The destination of output. + // + // Types that are valid to be assigned to Destination: + // *OutputConfig_GcsDestination + Destination isOutputConfig_Destination `protobuf_oneof:"destination"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *OutputConfig) Reset() { *m = OutputConfig{} } +func (m *OutputConfig) String() string { return proto.CompactTextString(m) } +func (*OutputConfig) ProtoMessage() {} +func (*OutputConfig) Descriptor() ([]byte, []int) { + return fileDescriptor_translation_service_7ab926c836a43926, []int{13} +} +func (m *OutputConfig) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_OutputConfig.Unmarshal(m, b) +} +func (m *OutputConfig) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_OutputConfig.Marshal(b, m, deterministic) +} +func (dst *OutputConfig) XXX_Merge(src proto.Message) { + xxx_messageInfo_OutputConfig.Merge(dst, src) +} +func (m *OutputConfig) XXX_Size() int { + return xxx_messageInfo_OutputConfig.Size(m) +} +func (m *OutputConfig) XXX_DiscardUnknown() { + xxx_messageInfo_OutputConfig.DiscardUnknown(m) +} + +var xxx_messageInfo_OutputConfig proto.InternalMessageInfo + +type isOutputConfig_Destination interface { + isOutputConfig_Destination() +} + +type OutputConfig_GcsDestination struct { + GcsDestination *GcsDestination `protobuf:"bytes,1,opt,name=gcs_destination,json=gcsDestination,proto3,oneof"` +} + +func (*OutputConfig_GcsDestination) isOutputConfig_Destination() {} + +func (m *OutputConfig) GetDestination() isOutputConfig_Destination { + if m != nil { + return m.Destination + } + return nil +} + +func (m *OutputConfig) GetGcsDestination() *GcsDestination { + if x, ok := m.GetDestination().(*OutputConfig_GcsDestination); ok { + return x.GcsDestination + } + return nil +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*OutputConfig) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _OutputConfig_OneofMarshaler, _OutputConfig_OneofUnmarshaler, _OutputConfig_OneofSizer, []interface{}{ + (*OutputConfig_GcsDestination)(nil), + } +} + +func _OutputConfig_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*OutputConfig) + // destination + switch x := m.Destination.(type) { + case *OutputConfig_GcsDestination: + b.EncodeVarint(1<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.GcsDestination); err != nil { + return err + } + case nil: + default: + return fmt.Errorf("OutputConfig.Destination has unexpected type %T", x) + } + return nil +} + +func _OutputConfig_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*OutputConfig) + switch tag { + case 1: // destination.gcs_destination + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(GcsDestination) + err := b.DecodeMessage(msg) + m.Destination = &OutputConfig_GcsDestination{msg} + return true, err + default: + return false, nil + } +} + +func _OutputConfig_OneofSizer(msg proto.Message) (n int) { + m := msg.(*OutputConfig) + // destination + switch x := m.Destination.(type) { + case *OutputConfig_GcsDestination: + s := proto.Size(x.GcsDestination) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +// The batch translation request. +type BatchTranslateTextRequest struct { + // Optional. Only used when making regionalized call. + // Format: + // projects/{project-id}/locations/{location-id}. + // + // Only custom models/glossaries within the same location-id can be used. + // Otherwise 400 is returned. + Parent string `protobuf:"bytes,1,opt,name=parent,proto3" json:"parent,omitempty"` + // Required. Source language code. + SourceLanguageCode string `protobuf:"bytes,2,opt,name=source_language_code,json=sourceLanguageCode,proto3" json:"source_language_code,omitempty"` + // Required. Specify up to 10 language codes here. + TargetLanguageCodes []string `protobuf:"bytes,3,rep,name=target_language_codes,json=targetLanguageCodes,proto3" json:"target_language_codes,omitempty"` + // Optional. The models to use for translation. Map's key is target language + // code. Map's value is model name. Value can be a built-in general model, + // or a custom model built by AutoML. + // + // The value format depends on model type: + // 1. Custom models: + // projects/{project-id}/locations/{location-id}/models/{model-id}. + // 2. General (built-in) models: + // projects/{project-id}/locations/{location-id}/models/general/nmt + // projects/{project-id}/locations/{location-id}/models/general/base + // + // If the map is empty or a specific model is + // not requested for a language pair, then default google model is used. + Models map[string]string `protobuf:"bytes,4,rep,name=models,proto3" json:"models,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` + // Required. Input configurations. + // The total number of files matched should be <= 1000. + // The total content size should be <= 100M Unicode codepoints. + // The files must use UTF-8 encoding. + InputConfigs []*InputConfig `protobuf:"bytes,5,rep,name=input_configs,json=inputConfigs,proto3" json:"input_configs,omitempty"` + // Required. Output configuration. + // If 2 input configs match to the same file (that is, same input path), + // we don't generate output for duplicate inputs. + OutputConfig *OutputConfig `protobuf:"bytes,6,opt,name=output_config,json=outputConfig,proto3" json:"output_config,omitempty"` + // Optional. Glossaries to be applied for translation. + // It's keyed by target language code. + Glossaries map[string]*TranslateTextGlossaryConfig `protobuf:"bytes,7,rep,name=glossaries,proto3" json:"glossaries,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *BatchTranslateTextRequest) Reset() { *m = BatchTranslateTextRequest{} } +func (m *BatchTranslateTextRequest) String() string { return proto.CompactTextString(m) } +func (*BatchTranslateTextRequest) ProtoMessage() {} +func (*BatchTranslateTextRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_translation_service_7ab926c836a43926, []int{14} +} +func (m *BatchTranslateTextRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_BatchTranslateTextRequest.Unmarshal(m, b) +} +func (m *BatchTranslateTextRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_BatchTranslateTextRequest.Marshal(b, m, deterministic) +} +func (dst *BatchTranslateTextRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_BatchTranslateTextRequest.Merge(dst, src) +} +func (m *BatchTranslateTextRequest) XXX_Size() int { + return xxx_messageInfo_BatchTranslateTextRequest.Size(m) +} +func (m *BatchTranslateTextRequest) XXX_DiscardUnknown() { + xxx_messageInfo_BatchTranslateTextRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_BatchTranslateTextRequest proto.InternalMessageInfo + +func (m *BatchTranslateTextRequest) GetParent() string { + if m != nil { + return m.Parent + } + return "" +} + +func (m *BatchTranslateTextRequest) GetSourceLanguageCode() string { + if m != nil { + return m.SourceLanguageCode + } + return "" +} + +func (m *BatchTranslateTextRequest) GetTargetLanguageCodes() []string { + if m != nil { + return m.TargetLanguageCodes + } + return nil +} + +func (m *BatchTranslateTextRequest) GetModels() map[string]string { + if m != nil { + return m.Models + } + return nil +} + +func (m *BatchTranslateTextRequest) GetInputConfigs() []*InputConfig { + if m != nil { + return m.InputConfigs + } + return nil +} + +func (m *BatchTranslateTextRequest) GetOutputConfig() *OutputConfig { + if m != nil { + return m.OutputConfig + } + return nil +} + +func (m *BatchTranslateTextRequest) GetGlossaries() map[string]*TranslateTextGlossaryConfig { + if m != nil { + return m.Glossaries + } + return nil +} + +// State metadata for the batch translation operation. +type BatchTranslateMetadata struct { + // The state of the operation. + State BatchTranslateMetadata_State `protobuf:"varint,1,opt,name=state,proto3,enum=google.cloud.translation.v3beta1.BatchTranslateMetadata_State" json:"state,omitempty"` + // Number of successfully translated characters so far (Unicode codepoints). + TranslatedCharacters int64 `protobuf:"varint,2,opt,name=translated_characters,json=translatedCharacters,proto3" json:"translated_characters,omitempty"` + // Number of characters that have failed to process so far (Unicode + // codepoints). + FailedCharacters int64 `protobuf:"varint,3,opt,name=failed_characters,json=failedCharacters,proto3" json:"failed_characters,omitempty"` + // Total number of characters (Unicode codepoints). + // This is the total number of codepoints from input files times the number of + // target languages. It appears here shortly after the call is submitted. + TotalCharacters int64 `protobuf:"varint,4,opt,name=total_characters,json=totalCharacters,proto3" json:"total_characters,omitempty"` + // Time when the operation was submitted. + SubmitTime *timestamp.Timestamp `protobuf:"bytes,5,opt,name=submit_time,json=submitTime,proto3" json:"submit_time,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *BatchTranslateMetadata) Reset() { *m = BatchTranslateMetadata{} } +func (m *BatchTranslateMetadata) String() string { return proto.CompactTextString(m) } +func (*BatchTranslateMetadata) ProtoMessage() {} +func (*BatchTranslateMetadata) Descriptor() ([]byte, []int) { + return fileDescriptor_translation_service_7ab926c836a43926, []int{15} +} +func (m *BatchTranslateMetadata) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_BatchTranslateMetadata.Unmarshal(m, b) +} +func (m *BatchTranslateMetadata) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_BatchTranslateMetadata.Marshal(b, m, deterministic) +} +func (dst *BatchTranslateMetadata) XXX_Merge(src proto.Message) { + xxx_messageInfo_BatchTranslateMetadata.Merge(dst, src) +} +func (m *BatchTranslateMetadata) XXX_Size() int { + return xxx_messageInfo_BatchTranslateMetadata.Size(m) +} +func (m *BatchTranslateMetadata) XXX_DiscardUnknown() { + xxx_messageInfo_BatchTranslateMetadata.DiscardUnknown(m) +} + +var xxx_messageInfo_BatchTranslateMetadata proto.InternalMessageInfo + +func (m *BatchTranslateMetadata) GetState() BatchTranslateMetadata_State { + if m != nil { + return m.State + } + return BatchTranslateMetadata_STATE_UNSPECIFIED +} + +func (m *BatchTranslateMetadata) GetTranslatedCharacters() int64 { + if m != nil { + return m.TranslatedCharacters + } + return 0 +} + +func (m *BatchTranslateMetadata) GetFailedCharacters() int64 { + if m != nil { + return m.FailedCharacters + } + return 0 +} + +func (m *BatchTranslateMetadata) GetTotalCharacters() int64 { + if m != nil { + return m.TotalCharacters + } + return 0 +} + +func (m *BatchTranslateMetadata) GetSubmitTime() *timestamp.Timestamp { + if m != nil { + return m.SubmitTime + } + return nil +} + +// Stored in the [google.longrunning.Operation.response][google.longrunning.Operation.response] field returned by +// BatchTranslateText if at least one sentence is translated successfully. +type BatchTranslateResponse struct { + // Total number of characters (Unicode codepoints). + TotalCharacters int64 `protobuf:"varint,1,opt,name=total_characters,json=totalCharacters,proto3" json:"total_characters,omitempty"` + // Number of successfully translated characters (Unicode codepoints). + TranslatedCharacters int64 `protobuf:"varint,2,opt,name=translated_characters,json=translatedCharacters,proto3" json:"translated_characters,omitempty"` + // Number of characters that have failed to process (Unicode codepoints). + FailedCharacters int64 `protobuf:"varint,3,opt,name=failed_characters,json=failedCharacters,proto3" json:"failed_characters,omitempty"` + // Time when the operation was submitted. + SubmitTime *timestamp.Timestamp `protobuf:"bytes,4,opt,name=submit_time,json=submitTime,proto3" json:"submit_time,omitempty"` + // The time when the operation is finished and + // [google.longrunning.Operation.done][google.longrunning.Operation.done] is set to true. + EndTime *timestamp.Timestamp `protobuf:"bytes,5,opt,name=end_time,json=endTime,proto3" json:"end_time,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *BatchTranslateResponse) Reset() { *m = BatchTranslateResponse{} } +func (m *BatchTranslateResponse) String() string { return proto.CompactTextString(m) } +func (*BatchTranslateResponse) ProtoMessage() {} +func (*BatchTranslateResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_translation_service_7ab926c836a43926, []int{16} +} +func (m *BatchTranslateResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_BatchTranslateResponse.Unmarshal(m, b) +} +func (m *BatchTranslateResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_BatchTranslateResponse.Marshal(b, m, deterministic) +} +func (dst *BatchTranslateResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_BatchTranslateResponse.Merge(dst, src) +} +func (m *BatchTranslateResponse) XXX_Size() int { + return xxx_messageInfo_BatchTranslateResponse.Size(m) +} +func (m *BatchTranslateResponse) XXX_DiscardUnknown() { + xxx_messageInfo_BatchTranslateResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_BatchTranslateResponse proto.InternalMessageInfo + +func (m *BatchTranslateResponse) GetTotalCharacters() int64 { + if m != nil { + return m.TotalCharacters + } + return 0 +} + +func (m *BatchTranslateResponse) GetTranslatedCharacters() int64 { + if m != nil { + return m.TranslatedCharacters + } + return 0 +} + +func (m *BatchTranslateResponse) GetFailedCharacters() int64 { + if m != nil { + return m.FailedCharacters + } + return 0 +} + +func (m *BatchTranslateResponse) GetSubmitTime() *timestamp.Timestamp { + if m != nil { + return m.SubmitTime + } + return nil +} + +func (m *BatchTranslateResponse) GetEndTime() *timestamp.Timestamp { + if m != nil { + return m.EndTime + } + return nil +} + +// Input configuration for glossaries. +type GlossaryInputConfig struct { + // Required. Specify the input. + // + // Types that are valid to be assigned to Source: + // *GlossaryInputConfig_GcsSource + Source isGlossaryInputConfig_Source `protobuf_oneof:"source"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GlossaryInputConfig) Reset() { *m = GlossaryInputConfig{} } +func (m *GlossaryInputConfig) String() string { return proto.CompactTextString(m) } +func (*GlossaryInputConfig) ProtoMessage() {} +func (*GlossaryInputConfig) Descriptor() ([]byte, []int) { + return fileDescriptor_translation_service_7ab926c836a43926, []int{17} +} +func (m *GlossaryInputConfig) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GlossaryInputConfig.Unmarshal(m, b) +} +func (m *GlossaryInputConfig) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GlossaryInputConfig.Marshal(b, m, deterministic) +} +func (dst *GlossaryInputConfig) XXX_Merge(src proto.Message) { + xxx_messageInfo_GlossaryInputConfig.Merge(dst, src) +} +func (m *GlossaryInputConfig) XXX_Size() int { + return xxx_messageInfo_GlossaryInputConfig.Size(m) +} +func (m *GlossaryInputConfig) XXX_DiscardUnknown() { + xxx_messageInfo_GlossaryInputConfig.DiscardUnknown(m) +} + +var xxx_messageInfo_GlossaryInputConfig proto.InternalMessageInfo + +type isGlossaryInputConfig_Source interface { + isGlossaryInputConfig_Source() +} + +type GlossaryInputConfig_GcsSource struct { + GcsSource *GcsSource `protobuf:"bytes,1,opt,name=gcs_source,json=gcsSource,proto3,oneof"` +} + +func (*GlossaryInputConfig_GcsSource) isGlossaryInputConfig_Source() {} + +func (m *GlossaryInputConfig) GetSource() isGlossaryInputConfig_Source { + if m != nil { + return m.Source + } + return nil +} + +func (m *GlossaryInputConfig) GetGcsSource() *GcsSource { + if x, ok := m.GetSource().(*GlossaryInputConfig_GcsSource); ok { + return x.GcsSource + } + return nil +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*GlossaryInputConfig) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _GlossaryInputConfig_OneofMarshaler, _GlossaryInputConfig_OneofUnmarshaler, _GlossaryInputConfig_OneofSizer, []interface{}{ + (*GlossaryInputConfig_GcsSource)(nil), + } +} + +func _GlossaryInputConfig_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*GlossaryInputConfig) + // source + switch x := m.Source.(type) { + case *GlossaryInputConfig_GcsSource: + b.EncodeVarint(1<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.GcsSource); err != nil { + return err + } + case nil: + default: + return fmt.Errorf("GlossaryInputConfig.Source has unexpected type %T", x) + } + return nil +} + +func _GlossaryInputConfig_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*GlossaryInputConfig) + switch tag { + case 1: // source.gcs_source + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(GcsSource) + err := b.DecodeMessage(msg) + m.Source = &GlossaryInputConfig_GcsSource{msg} + return true, err + default: + return false, nil + } +} + +func _GlossaryInputConfig_OneofSizer(msg proto.Message) (n int) { + m := msg.(*GlossaryInputConfig) + // source + switch x := m.Source.(type) { + case *GlossaryInputConfig_GcsSource: + s := proto.Size(x.GcsSource) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +// Represents a glossary built from user provided data. +type Glossary struct { + // Required. The resource name of the glossary. Glossary names have the form + // `projects/{project-id}/locations/{location-id}/glossaries/{glossary-id}`. + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // Languages supported by the glossary. + // + // Types that are valid to be assigned to Languages: + // *Glossary_LanguagePair + // *Glossary_LanguageCodesSet_ + Languages isGlossary_Languages `protobuf_oneof:"languages"` + // Required. Provides examples to build the glossary from. + // Total glossary must not exceed 10M Unicode codepoints. + InputConfig *GlossaryInputConfig `protobuf:"bytes,5,opt,name=input_config,json=inputConfig,proto3" json:"input_config,omitempty"` + // Output only. The number of entries defined in the glossary. + EntryCount int32 `protobuf:"varint,6,opt,name=entry_count,json=entryCount,proto3" json:"entry_count,omitempty"` + // Output only. When CreateGlossary was called. + SubmitTime *timestamp.Timestamp `protobuf:"bytes,7,opt,name=submit_time,json=submitTime,proto3" json:"submit_time,omitempty"` + // Output only. When the glossary creation was finished. + EndTime *timestamp.Timestamp `protobuf:"bytes,8,opt,name=end_time,json=endTime,proto3" json:"end_time,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Glossary) Reset() { *m = Glossary{} } +func (m *Glossary) String() string { return proto.CompactTextString(m) } +func (*Glossary) ProtoMessage() {} +func (*Glossary) Descriptor() ([]byte, []int) { + return fileDescriptor_translation_service_7ab926c836a43926, []int{18} +} +func (m *Glossary) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Glossary.Unmarshal(m, b) +} +func (m *Glossary) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Glossary.Marshal(b, m, deterministic) +} +func (dst *Glossary) XXX_Merge(src proto.Message) { + xxx_messageInfo_Glossary.Merge(dst, src) +} +func (m *Glossary) XXX_Size() int { + return xxx_messageInfo_Glossary.Size(m) +} +func (m *Glossary) XXX_DiscardUnknown() { + xxx_messageInfo_Glossary.DiscardUnknown(m) +} + +var xxx_messageInfo_Glossary proto.InternalMessageInfo + +func (m *Glossary) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +type isGlossary_Languages interface { + isGlossary_Languages() +} + +type Glossary_LanguagePair struct { + LanguagePair *Glossary_LanguageCodePair `protobuf:"bytes,3,opt,name=language_pair,json=languagePair,proto3,oneof"` +} + +type Glossary_LanguageCodesSet_ struct { + LanguageCodesSet *Glossary_LanguageCodesSet `protobuf:"bytes,4,opt,name=language_codes_set,json=languageCodesSet,proto3,oneof"` +} + +func (*Glossary_LanguagePair) isGlossary_Languages() {} + +func (*Glossary_LanguageCodesSet_) isGlossary_Languages() {} + +func (m *Glossary) GetLanguages() isGlossary_Languages { + if m != nil { + return m.Languages + } + return nil +} + +func (m *Glossary) GetLanguagePair() *Glossary_LanguageCodePair { + if x, ok := m.GetLanguages().(*Glossary_LanguagePair); ok { + return x.LanguagePair + } + return nil +} + +func (m *Glossary) GetLanguageCodesSet() *Glossary_LanguageCodesSet { + if x, ok := m.GetLanguages().(*Glossary_LanguageCodesSet_); ok { + return x.LanguageCodesSet + } + return nil +} + +func (m *Glossary) GetInputConfig() *GlossaryInputConfig { + if m != nil { + return m.InputConfig + } + return nil +} + +func (m *Glossary) GetEntryCount() int32 { + if m != nil { + return m.EntryCount + } + return 0 +} + +func (m *Glossary) GetSubmitTime() *timestamp.Timestamp { + if m != nil { + return m.SubmitTime + } + return nil +} + +func (m *Glossary) GetEndTime() *timestamp.Timestamp { + if m != nil { + return m.EndTime + } + return nil +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*Glossary) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _Glossary_OneofMarshaler, _Glossary_OneofUnmarshaler, _Glossary_OneofSizer, []interface{}{ + (*Glossary_LanguagePair)(nil), + (*Glossary_LanguageCodesSet_)(nil), + } +} + +func _Glossary_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*Glossary) + // languages + switch x := m.Languages.(type) { + case *Glossary_LanguagePair: + b.EncodeVarint(3<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.LanguagePair); err != nil { + return err + } + case *Glossary_LanguageCodesSet_: + b.EncodeVarint(4<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.LanguageCodesSet); err != nil { + return err + } + case nil: + default: + return fmt.Errorf("Glossary.Languages has unexpected type %T", x) + } + return nil +} + +func _Glossary_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*Glossary) + switch tag { + case 3: // languages.language_pair + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(Glossary_LanguageCodePair) + err := b.DecodeMessage(msg) + m.Languages = &Glossary_LanguagePair{msg} + return true, err + case 4: // languages.language_codes_set + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(Glossary_LanguageCodesSet) + err := b.DecodeMessage(msg) + m.Languages = &Glossary_LanguageCodesSet_{msg} + return true, err + default: + return false, nil + } +} + +func _Glossary_OneofSizer(msg proto.Message) (n int) { + m := msg.(*Glossary) + // languages + switch x := m.Languages.(type) { + case *Glossary_LanguagePair: + s := proto.Size(x.LanguagePair) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *Glossary_LanguageCodesSet_: + s := proto.Size(x.LanguageCodesSet) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +// Used with UNIDIRECTIONAL. +type Glossary_LanguageCodePair struct { + // Required. The BCP-47 language code of the input text, for example, + // "en-US". Expected to be an exact match for GlossaryTerm.language_code. + SourceLanguageCode string `protobuf:"bytes,1,opt,name=source_language_code,json=sourceLanguageCode,proto3" json:"source_language_code,omitempty"` + // Required. The BCP-47 language code for translation output, for example, + // "zh-CN". Expected to be an exact match for GlossaryTerm.language_code. + TargetLanguageCode string `protobuf:"bytes,2,opt,name=target_language_code,json=targetLanguageCode,proto3" json:"target_language_code,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Glossary_LanguageCodePair) Reset() { *m = Glossary_LanguageCodePair{} } +func (m *Glossary_LanguageCodePair) String() string { return proto.CompactTextString(m) } +func (*Glossary_LanguageCodePair) ProtoMessage() {} +func (*Glossary_LanguageCodePair) Descriptor() ([]byte, []int) { + return fileDescriptor_translation_service_7ab926c836a43926, []int{18, 0} +} +func (m *Glossary_LanguageCodePair) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Glossary_LanguageCodePair.Unmarshal(m, b) +} +func (m *Glossary_LanguageCodePair) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Glossary_LanguageCodePair.Marshal(b, m, deterministic) +} +func (dst *Glossary_LanguageCodePair) XXX_Merge(src proto.Message) { + xxx_messageInfo_Glossary_LanguageCodePair.Merge(dst, src) +} +func (m *Glossary_LanguageCodePair) XXX_Size() int { + return xxx_messageInfo_Glossary_LanguageCodePair.Size(m) +} +func (m *Glossary_LanguageCodePair) XXX_DiscardUnknown() { + xxx_messageInfo_Glossary_LanguageCodePair.DiscardUnknown(m) +} + +var xxx_messageInfo_Glossary_LanguageCodePair proto.InternalMessageInfo + +func (m *Glossary_LanguageCodePair) GetSourceLanguageCode() string { + if m != nil { + return m.SourceLanguageCode + } + return "" +} + +func (m *Glossary_LanguageCodePair) GetTargetLanguageCode() string { + if m != nil { + return m.TargetLanguageCode + } + return "" +} + +// Used with EQUIVALENT_TERMS_SET. +type Glossary_LanguageCodesSet struct { + // The BCP-47 language code(s) for terms defined in the glossary. + // All entries are unique. The list contains at least two entries. + // Expected to be an exact match for GlossaryTerm.language_code. + LanguageCodes []string `protobuf:"bytes,1,rep,name=language_codes,json=languageCodes,proto3" json:"language_codes,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Glossary_LanguageCodesSet) Reset() { *m = Glossary_LanguageCodesSet{} } +func (m *Glossary_LanguageCodesSet) String() string { return proto.CompactTextString(m) } +func (*Glossary_LanguageCodesSet) ProtoMessage() {} +func (*Glossary_LanguageCodesSet) Descriptor() ([]byte, []int) { + return fileDescriptor_translation_service_7ab926c836a43926, []int{18, 1} +} +func (m *Glossary_LanguageCodesSet) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Glossary_LanguageCodesSet.Unmarshal(m, b) +} +func (m *Glossary_LanguageCodesSet) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Glossary_LanguageCodesSet.Marshal(b, m, deterministic) +} +func (dst *Glossary_LanguageCodesSet) XXX_Merge(src proto.Message) { + xxx_messageInfo_Glossary_LanguageCodesSet.Merge(dst, src) +} +func (m *Glossary_LanguageCodesSet) XXX_Size() int { + return xxx_messageInfo_Glossary_LanguageCodesSet.Size(m) +} +func (m *Glossary_LanguageCodesSet) XXX_DiscardUnknown() { + xxx_messageInfo_Glossary_LanguageCodesSet.DiscardUnknown(m) +} + +var xxx_messageInfo_Glossary_LanguageCodesSet proto.InternalMessageInfo + +func (m *Glossary_LanguageCodesSet) GetLanguageCodes() []string { + if m != nil { + return m.LanguageCodes + } + return nil +} + +// Request message for CreateGlossary. +type CreateGlossaryRequest struct { + // Required. The project name. + Parent string `protobuf:"bytes,1,opt,name=parent,proto3" json:"parent,omitempty"` + // Required. The glossary to create. + Glossary *Glossary `protobuf:"bytes,2,opt,name=glossary,proto3" json:"glossary,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *CreateGlossaryRequest) Reset() { *m = CreateGlossaryRequest{} } +func (m *CreateGlossaryRequest) String() string { return proto.CompactTextString(m) } +func (*CreateGlossaryRequest) ProtoMessage() {} +func (*CreateGlossaryRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_translation_service_7ab926c836a43926, []int{19} +} +func (m *CreateGlossaryRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_CreateGlossaryRequest.Unmarshal(m, b) +} +func (m *CreateGlossaryRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_CreateGlossaryRequest.Marshal(b, m, deterministic) +} +func (dst *CreateGlossaryRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_CreateGlossaryRequest.Merge(dst, src) +} +func (m *CreateGlossaryRequest) XXX_Size() int { + return xxx_messageInfo_CreateGlossaryRequest.Size(m) +} +func (m *CreateGlossaryRequest) XXX_DiscardUnknown() { + xxx_messageInfo_CreateGlossaryRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_CreateGlossaryRequest proto.InternalMessageInfo + +func (m *CreateGlossaryRequest) GetParent() string { + if m != nil { + return m.Parent + } + return "" +} + +func (m *CreateGlossaryRequest) GetGlossary() *Glossary { + if m != nil { + return m.Glossary + } + return nil +} + +// Request message for GetGlossary. +type GetGlossaryRequest struct { + // Required. The name of the glossary to retrieve. + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GetGlossaryRequest) Reset() { *m = GetGlossaryRequest{} } +func (m *GetGlossaryRequest) String() string { return proto.CompactTextString(m) } +func (*GetGlossaryRequest) ProtoMessage() {} +func (*GetGlossaryRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_translation_service_7ab926c836a43926, []int{20} +} +func (m *GetGlossaryRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GetGlossaryRequest.Unmarshal(m, b) +} +func (m *GetGlossaryRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GetGlossaryRequest.Marshal(b, m, deterministic) +} +func (dst *GetGlossaryRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_GetGlossaryRequest.Merge(dst, src) +} +func (m *GetGlossaryRequest) XXX_Size() int { + return xxx_messageInfo_GetGlossaryRequest.Size(m) +} +func (m *GetGlossaryRequest) XXX_DiscardUnknown() { + xxx_messageInfo_GetGlossaryRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_GetGlossaryRequest proto.InternalMessageInfo + +func (m *GetGlossaryRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +// Request message for DeleteGlossary. +type DeleteGlossaryRequest struct { + // Required. The name of the glossary to delete. + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *DeleteGlossaryRequest) Reset() { *m = DeleteGlossaryRequest{} } +func (m *DeleteGlossaryRequest) String() string { return proto.CompactTextString(m) } +func (*DeleteGlossaryRequest) ProtoMessage() {} +func (*DeleteGlossaryRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_translation_service_7ab926c836a43926, []int{21} +} +func (m *DeleteGlossaryRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_DeleteGlossaryRequest.Unmarshal(m, b) +} +func (m *DeleteGlossaryRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_DeleteGlossaryRequest.Marshal(b, m, deterministic) +} +func (dst *DeleteGlossaryRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_DeleteGlossaryRequest.Merge(dst, src) +} +func (m *DeleteGlossaryRequest) XXX_Size() int { + return xxx_messageInfo_DeleteGlossaryRequest.Size(m) +} +func (m *DeleteGlossaryRequest) XXX_DiscardUnknown() { + xxx_messageInfo_DeleteGlossaryRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_DeleteGlossaryRequest proto.InternalMessageInfo + +func (m *DeleteGlossaryRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +// Request message for ListGlossaries. +type ListGlossariesRequest struct { + // Required. The name of the project from which to list all of the glossaries. + Parent string `protobuf:"bytes,1,opt,name=parent,proto3" json:"parent,omitempty"` + // Optional. Requested page size. The server may return fewer glossaries than + // requested. If unspecified, the server picks an appropriate default. + PageSize int32 `protobuf:"varint,2,opt,name=page_size,json=pageSize,proto3" json:"page_size,omitempty"` + // Optional. A token identifying a page of results the server should return. + // Typically, this is the value of [ListGlossariesResponse.next_page_token] + // returned from the previous call to `ListGlossaries` method. + // The first page is returned if `page_token`is empty or missing. + PageToken string `protobuf:"bytes,3,opt,name=page_token,json=pageToken,proto3" json:"page_token,omitempty"` + // Optional. Filter specifying constraints of a list operation. + // For example, `tags.glossary_name="products*"`. + // If missing, no filtering is performed. + Filter string `protobuf:"bytes,4,opt,name=filter,proto3" json:"filter,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ListGlossariesRequest) Reset() { *m = ListGlossariesRequest{} } +func (m *ListGlossariesRequest) String() string { return proto.CompactTextString(m) } +func (*ListGlossariesRequest) ProtoMessage() {} +func (*ListGlossariesRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_translation_service_7ab926c836a43926, []int{22} +} +func (m *ListGlossariesRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ListGlossariesRequest.Unmarshal(m, b) +} +func (m *ListGlossariesRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ListGlossariesRequest.Marshal(b, m, deterministic) +} +func (dst *ListGlossariesRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_ListGlossariesRequest.Merge(dst, src) +} +func (m *ListGlossariesRequest) XXX_Size() int { + return xxx_messageInfo_ListGlossariesRequest.Size(m) +} +func (m *ListGlossariesRequest) XXX_DiscardUnknown() { + xxx_messageInfo_ListGlossariesRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_ListGlossariesRequest proto.InternalMessageInfo + +func (m *ListGlossariesRequest) GetParent() string { + if m != nil { + return m.Parent + } + return "" +} + +func (m *ListGlossariesRequest) GetPageSize() int32 { + if m != nil { + return m.PageSize + } + return 0 +} + +func (m *ListGlossariesRequest) GetPageToken() string { + if m != nil { + return m.PageToken + } + return "" +} + +func (m *ListGlossariesRequest) GetFilter() string { + if m != nil { + return m.Filter + } + return "" +} + +// Response message for ListGlossaries. +type ListGlossariesResponse struct { + // The list of glossaries for a project. + Glossaries []*Glossary `protobuf:"bytes,1,rep,name=glossaries,proto3" json:"glossaries,omitempty"` + // A token to retrieve a page of results. Pass this value in the + // [ListGlossariesRequest.page_token] field in the subsequent call to + // `ListGlossaries` method to retrieve the next page of results. + NextPageToken string `protobuf:"bytes,2,opt,name=next_page_token,json=nextPageToken,proto3" json:"next_page_token,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ListGlossariesResponse) Reset() { *m = ListGlossariesResponse{} } +func (m *ListGlossariesResponse) String() string { return proto.CompactTextString(m) } +func (*ListGlossariesResponse) ProtoMessage() {} +func (*ListGlossariesResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_translation_service_7ab926c836a43926, []int{23} +} +func (m *ListGlossariesResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ListGlossariesResponse.Unmarshal(m, b) +} +func (m *ListGlossariesResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ListGlossariesResponse.Marshal(b, m, deterministic) +} +func (dst *ListGlossariesResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_ListGlossariesResponse.Merge(dst, src) +} +func (m *ListGlossariesResponse) XXX_Size() int { + return xxx_messageInfo_ListGlossariesResponse.Size(m) +} +func (m *ListGlossariesResponse) XXX_DiscardUnknown() { + xxx_messageInfo_ListGlossariesResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_ListGlossariesResponse proto.InternalMessageInfo + +func (m *ListGlossariesResponse) GetGlossaries() []*Glossary { + if m != nil { + return m.Glossaries + } + return nil +} + +func (m *ListGlossariesResponse) GetNextPageToken() string { + if m != nil { + return m.NextPageToken + } + return "" +} + +// Stored in the [google.longrunning.Operation.metadata][google.longrunning.Operation.metadata] field returned by +// CreateGlossary. +type CreateGlossaryMetadata struct { + // The name of the glossary that is being created. + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // The current state of the glossary creation operation. + State CreateGlossaryMetadata_State `protobuf:"varint,2,opt,name=state,proto3,enum=google.cloud.translation.v3beta1.CreateGlossaryMetadata_State" json:"state,omitempty"` + // The time when the operation was submitted to the server. + SubmitTime *timestamp.Timestamp `protobuf:"bytes,3,opt,name=submit_time,json=submitTime,proto3" json:"submit_time,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *CreateGlossaryMetadata) Reset() { *m = CreateGlossaryMetadata{} } +func (m *CreateGlossaryMetadata) String() string { return proto.CompactTextString(m) } +func (*CreateGlossaryMetadata) ProtoMessage() {} +func (*CreateGlossaryMetadata) Descriptor() ([]byte, []int) { + return fileDescriptor_translation_service_7ab926c836a43926, []int{24} +} +func (m *CreateGlossaryMetadata) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_CreateGlossaryMetadata.Unmarshal(m, b) +} +func (m *CreateGlossaryMetadata) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_CreateGlossaryMetadata.Marshal(b, m, deterministic) +} +func (dst *CreateGlossaryMetadata) XXX_Merge(src proto.Message) { + xxx_messageInfo_CreateGlossaryMetadata.Merge(dst, src) +} +func (m *CreateGlossaryMetadata) XXX_Size() int { + return xxx_messageInfo_CreateGlossaryMetadata.Size(m) +} +func (m *CreateGlossaryMetadata) XXX_DiscardUnknown() { + xxx_messageInfo_CreateGlossaryMetadata.DiscardUnknown(m) +} + +var xxx_messageInfo_CreateGlossaryMetadata proto.InternalMessageInfo + +func (m *CreateGlossaryMetadata) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *CreateGlossaryMetadata) GetState() CreateGlossaryMetadata_State { + if m != nil { + return m.State + } + return CreateGlossaryMetadata_STATE_UNSPECIFIED +} + +func (m *CreateGlossaryMetadata) GetSubmitTime() *timestamp.Timestamp { + if m != nil { + return m.SubmitTime + } + return nil +} + +// Stored in the [google.longrunning.Operation.metadata][google.longrunning.Operation.metadata] field returned by +// DeleteGlossary. +type DeleteGlossaryMetadata struct { + // The name of the glossary that is being deleted. + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // The current state of the glossary deletion operation. + State DeleteGlossaryMetadata_State `protobuf:"varint,2,opt,name=state,proto3,enum=google.cloud.translation.v3beta1.DeleteGlossaryMetadata_State" json:"state,omitempty"` + // The time when the operation was submitted to the server. + SubmitTime *timestamp.Timestamp `protobuf:"bytes,3,opt,name=submit_time,json=submitTime,proto3" json:"submit_time,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *DeleteGlossaryMetadata) Reset() { *m = DeleteGlossaryMetadata{} } +func (m *DeleteGlossaryMetadata) String() string { return proto.CompactTextString(m) } +func (*DeleteGlossaryMetadata) ProtoMessage() {} +func (*DeleteGlossaryMetadata) Descriptor() ([]byte, []int) { + return fileDescriptor_translation_service_7ab926c836a43926, []int{25} +} +func (m *DeleteGlossaryMetadata) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_DeleteGlossaryMetadata.Unmarshal(m, b) +} +func (m *DeleteGlossaryMetadata) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_DeleteGlossaryMetadata.Marshal(b, m, deterministic) +} +func (dst *DeleteGlossaryMetadata) XXX_Merge(src proto.Message) { + xxx_messageInfo_DeleteGlossaryMetadata.Merge(dst, src) +} +func (m *DeleteGlossaryMetadata) XXX_Size() int { + return xxx_messageInfo_DeleteGlossaryMetadata.Size(m) +} +func (m *DeleteGlossaryMetadata) XXX_DiscardUnknown() { + xxx_messageInfo_DeleteGlossaryMetadata.DiscardUnknown(m) +} + +var xxx_messageInfo_DeleteGlossaryMetadata proto.InternalMessageInfo + +func (m *DeleteGlossaryMetadata) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *DeleteGlossaryMetadata) GetState() DeleteGlossaryMetadata_State { + if m != nil { + return m.State + } + return DeleteGlossaryMetadata_STATE_UNSPECIFIED +} + +func (m *DeleteGlossaryMetadata) GetSubmitTime() *timestamp.Timestamp { + if m != nil { + return m.SubmitTime + } + return nil +} + +// Stored in the [google.longrunning.Operation.response][google.longrunning.Operation.response] field returned by +// DeleteGlossary. +type DeleteGlossaryResponse struct { + // The name of the deleted glossary. + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // The time when the operation was submitted to the server. + SubmitTime *timestamp.Timestamp `protobuf:"bytes,2,opt,name=submit_time,json=submitTime,proto3" json:"submit_time,omitempty"` + // The time when the glossary deletion is finished and + // [google.longrunning.Operation.done][google.longrunning.Operation.done] is set to true. + EndTime *timestamp.Timestamp `protobuf:"bytes,3,opt,name=end_time,json=endTime,proto3" json:"end_time,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *DeleteGlossaryResponse) Reset() { *m = DeleteGlossaryResponse{} } +func (m *DeleteGlossaryResponse) String() string { return proto.CompactTextString(m) } +func (*DeleteGlossaryResponse) ProtoMessage() {} +func (*DeleteGlossaryResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_translation_service_7ab926c836a43926, []int{26} +} +func (m *DeleteGlossaryResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_DeleteGlossaryResponse.Unmarshal(m, b) +} +func (m *DeleteGlossaryResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_DeleteGlossaryResponse.Marshal(b, m, deterministic) +} +func (dst *DeleteGlossaryResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_DeleteGlossaryResponse.Merge(dst, src) +} +func (m *DeleteGlossaryResponse) XXX_Size() int { + return xxx_messageInfo_DeleteGlossaryResponse.Size(m) +} +func (m *DeleteGlossaryResponse) XXX_DiscardUnknown() { + xxx_messageInfo_DeleteGlossaryResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_DeleteGlossaryResponse proto.InternalMessageInfo + +func (m *DeleteGlossaryResponse) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *DeleteGlossaryResponse) GetSubmitTime() *timestamp.Timestamp { + if m != nil { + return m.SubmitTime + } + return nil +} + +func (m *DeleteGlossaryResponse) GetEndTime() *timestamp.Timestamp { + if m != nil { + return m.EndTime + } + return nil +} + +func init() { + proto.RegisterType((*TranslateTextGlossaryConfig)(nil), "google.cloud.translation.v3beta1.TranslateTextGlossaryConfig") + proto.RegisterType((*TranslateTextRequest)(nil), "google.cloud.translation.v3beta1.TranslateTextRequest") + proto.RegisterType((*TranslateTextResponse)(nil), "google.cloud.translation.v3beta1.TranslateTextResponse") + proto.RegisterType((*Translation)(nil), "google.cloud.translation.v3beta1.Translation") + proto.RegisterType((*DetectLanguageRequest)(nil), "google.cloud.translation.v3beta1.DetectLanguageRequest") + proto.RegisterType((*DetectedLanguage)(nil), "google.cloud.translation.v3beta1.DetectedLanguage") + proto.RegisterType((*DetectLanguageResponse)(nil), "google.cloud.translation.v3beta1.DetectLanguageResponse") + proto.RegisterType((*GetSupportedLanguagesRequest)(nil), "google.cloud.translation.v3beta1.GetSupportedLanguagesRequest") + proto.RegisterType((*SupportedLanguages)(nil), "google.cloud.translation.v3beta1.SupportedLanguages") + proto.RegisterType((*SupportedLanguage)(nil), "google.cloud.translation.v3beta1.SupportedLanguage") + proto.RegisterType((*GcsSource)(nil), "google.cloud.translation.v3beta1.GcsSource") + proto.RegisterType((*InputConfig)(nil), "google.cloud.translation.v3beta1.InputConfig") + proto.RegisterType((*GcsDestination)(nil), "google.cloud.translation.v3beta1.GcsDestination") + proto.RegisterType((*OutputConfig)(nil), "google.cloud.translation.v3beta1.OutputConfig") + proto.RegisterType((*BatchTranslateTextRequest)(nil), "google.cloud.translation.v3beta1.BatchTranslateTextRequest") + proto.RegisterMapType((map[string]*TranslateTextGlossaryConfig)(nil), "google.cloud.translation.v3beta1.BatchTranslateTextRequest.GlossariesEntry") + proto.RegisterMapType((map[string]string)(nil), "google.cloud.translation.v3beta1.BatchTranslateTextRequest.ModelsEntry") + proto.RegisterType((*BatchTranslateMetadata)(nil), "google.cloud.translation.v3beta1.BatchTranslateMetadata") + proto.RegisterType((*BatchTranslateResponse)(nil), "google.cloud.translation.v3beta1.BatchTranslateResponse") + proto.RegisterType((*GlossaryInputConfig)(nil), "google.cloud.translation.v3beta1.GlossaryInputConfig") + proto.RegisterType((*Glossary)(nil), "google.cloud.translation.v3beta1.Glossary") + proto.RegisterType((*Glossary_LanguageCodePair)(nil), "google.cloud.translation.v3beta1.Glossary.LanguageCodePair") + proto.RegisterType((*Glossary_LanguageCodesSet)(nil), "google.cloud.translation.v3beta1.Glossary.LanguageCodesSet") + proto.RegisterType((*CreateGlossaryRequest)(nil), "google.cloud.translation.v3beta1.CreateGlossaryRequest") + proto.RegisterType((*GetGlossaryRequest)(nil), "google.cloud.translation.v3beta1.GetGlossaryRequest") + proto.RegisterType((*DeleteGlossaryRequest)(nil), "google.cloud.translation.v3beta1.DeleteGlossaryRequest") + proto.RegisterType((*ListGlossariesRequest)(nil), "google.cloud.translation.v3beta1.ListGlossariesRequest") + proto.RegisterType((*ListGlossariesResponse)(nil), "google.cloud.translation.v3beta1.ListGlossariesResponse") + proto.RegisterType((*CreateGlossaryMetadata)(nil), "google.cloud.translation.v3beta1.CreateGlossaryMetadata") + proto.RegisterType((*DeleteGlossaryMetadata)(nil), "google.cloud.translation.v3beta1.DeleteGlossaryMetadata") + proto.RegisterType((*DeleteGlossaryResponse)(nil), "google.cloud.translation.v3beta1.DeleteGlossaryResponse") + proto.RegisterEnum("google.cloud.translation.v3beta1.BatchTranslateMetadata_State", BatchTranslateMetadata_State_name, BatchTranslateMetadata_State_value) + proto.RegisterEnum("google.cloud.translation.v3beta1.CreateGlossaryMetadata_State", CreateGlossaryMetadata_State_name, CreateGlossaryMetadata_State_value) + proto.RegisterEnum("google.cloud.translation.v3beta1.DeleteGlossaryMetadata_State", DeleteGlossaryMetadata_State_name, DeleteGlossaryMetadata_State_value) +} + +// Reference imports to suppress errors if they are not otherwise used. +var _ context.Context +var _ grpc.ClientConn + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the grpc package it is being compiled against. +const _ = grpc.SupportPackageIsVersion4 + +// TranslationServiceClient is the client API for TranslationService service. +// +// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://godoc.org/google.golang.org/grpc#ClientConn.NewStream. +type TranslationServiceClient interface { + // Translates input text and returns translated text. + TranslateText(ctx context.Context, in *TranslateTextRequest, opts ...grpc.CallOption) (*TranslateTextResponse, error) + // Detects the language of text within a request. + DetectLanguage(ctx context.Context, in *DetectLanguageRequest, opts ...grpc.CallOption) (*DetectLanguageResponse, error) + // Returns a list of supported languages for translation. + GetSupportedLanguages(ctx context.Context, in *GetSupportedLanguagesRequest, opts ...grpc.CallOption) (*SupportedLanguages, error) + // Translates a large volume of text in asynchronous batch mode. + // This function provides real-time output as the inputs are being processed. + // If caller cancels a request, the partial results (for an input file, it's + // all or nothing) may still be available on the specified output location. + // + // This call returns immediately and you can + // use google.longrunning.Operation.name to poll the status of the call. + BatchTranslateText(ctx context.Context, in *BatchTranslateTextRequest, opts ...grpc.CallOption) (*longrunning.Operation, error) + // Creates a glossary and returns the long-running operation. Returns + // NOT_FOUND, if the project doesn't exist. + CreateGlossary(ctx context.Context, in *CreateGlossaryRequest, opts ...grpc.CallOption) (*longrunning.Operation, error) + // Lists glossaries in a project. Returns NOT_FOUND, if the project doesn't + // exist. + ListGlossaries(ctx context.Context, in *ListGlossariesRequest, opts ...grpc.CallOption) (*ListGlossariesResponse, error) + // Gets a glossary. Returns NOT_FOUND, if the glossary doesn't + // exist. + GetGlossary(ctx context.Context, in *GetGlossaryRequest, opts ...grpc.CallOption) (*Glossary, error) + // Deletes a glossary, or cancels glossary construction + // if the glossary isn't created yet. + // Returns NOT_FOUND, if the glossary doesn't exist. + DeleteGlossary(ctx context.Context, in *DeleteGlossaryRequest, opts ...grpc.CallOption) (*longrunning.Operation, error) +} + +type translationServiceClient struct { + cc *grpc.ClientConn +} + +func NewTranslationServiceClient(cc *grpc.ClientConn) TranslationServiceClient { + return &translationServiceClient{cc} +} + +func (c *translationServiceClient) TranslateText(ctx context.Context, in *TranslateTextRequest, opts ...grpc.CallOption) (*TranslateTextResponse, error) { + out := new(TranslateTextResponse) + err := c.cc.Invoke(ctx, "/google.cloud.translation.v3beta1.TranslationService/TranslateText", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *translationServiceClient) DetectLanguage(ctx context.Context, in *DetectLanguageRequest, opts ...grpc.CallOption) (*DetectLanguageResponse, error) { + out := new(DetectLanguageResponse) + err := c.cc.Invoke(ctx, "/google.cloud.translation.v3beta1.TranslationService/DetectLanguage", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *translationServiceClient) GetSupportedLanguages(ctx context.Context, in *GetSupportedLanguagesRequest, opts ...grpc.CallOption) (*SupportedLanguages, error) { + out := new(SupportedLanguages) + err := c.cc.Invoke(ctx, "/google.cloud.translation.v3beta1.TranslationService/GetSupportedLanguages", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *translationServiceClient) BatchTranslateText(ctx context.Context, in *BatchTranslateTextRequest, opts ...grpc.CallOption) (*longrunning.Operation, error) { + out := new(longrunning.Operation) + err := c.cc.Invoke(ctx, "/google.cloud.translation.v3beta1.TranslationService/BatchTranslateText", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *translationServiceClient) CreateGlossary(ctx context.Context, in *CreateGlossaryRequest, opts ...grpc.CallOption) (*longrunning.Operation, error) { + out := new(longrunning.Operation) + err := c.cc.Invoke(ctx, "/google.cloud.translation.v3beta1.TranslationService/CreateGlossary", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *translationServiceClient) ListGlossaries(ctx context.Context, in *ListGlossariesRequest, opts ...grpc.CallOption) (*ListGlossariesResponse, error) { + out := new(ListGlossariesResponse) + err := c.cc.Invoke(ctx, "/google.cloud.translation.v3beta1.TranslationService/ListGlossaries", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *translationServiceClient) GetGlossary(ctx context.Context, in *GetGlossaryRequest, opts ...grpc.CallOption) (*Glossary, error) { + out := new(Glossary) + err := c.cc.Invoke(ctx, "/google.cloud.translation.v3beta1.TranslationService/GetGlossary", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *translationServiceClient) DeleteGlossary(ctx context.Context, in *DeleteGlossaryRequest, opts ...grpc.CallOption) (*longrunning.Operation, error) { + out := new(longrunning.Operation) + err := c.cc.Invoke(ctx, "/google.cloud.translation.v3beta1.TranslationService/DeleteGlossary", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +// TranslationServiceServer is the server API for TranslationService service. +type TranslationServiceServer interface { + // Translates input text and returns translated text. + TranslateText(context.Context, *TranslateTextRequest) (*TranslateTextResponse, error) + // Detects the language of text within a request. + DetectLanguage(context.Context, *DetectLanguageRequest) (*DetectLanguageResponse, error) + // Returns a list of supported languages for translation. + GetSupportedLanguages(context.Context, *GetSupportedLanguagesRequest) (*SupportedLanguages, error) + // Translates a large volume of text in asynchronous batch mode. + // This function provides real-time output as the inputs are being processed. + // If caller cancels a request, the partial results (for an input file, it's + // all or nothing) may still be available on the specified output location. + // + // This call returns immediately and you can + // use google.longrunning.Operation.name to poll the status of the call. + BatchTranslateText(context.Context, *BatchTranslateTextRequest) (*longrunning.Operation, error) + // Creates a glossary and returns the long-running operation. Returns + // NOT_FOUND, if the project doesn't exist. + CreateGlossary(context.Context, *CreateGlossaryRequest) (*longrunning.Operation, error) + // Lists glossaries in a project. Returns NOT_FOUND, if the project doesn't + // exist. + ListGlossaries(context.Context, *ListGlossariesRequest) (*ListGlossariesResponse, error) + // Gets a glossary. Returns NOT_FOUND, if the glossary doesn't + // exist. + GetGlossary(context.Context, *GetGlossaryRequest) (*Glossary, error) + // Deletes a glossary, or cancels glossary construction + // if the glossary isn't created yet. + // Returns NOT_FOUND, if the glossary doesn't exist. + DeleteGlossary(context.Context, *DeleteGlossaryRequest) (*longrunning.Operation, error) +} + +func RegisterTranslationServiceServer(s *grpc.Server, srv TranslationServiceServer) { + s.RegisterService(&_TranslationService_serviceDesc, srv) +} + +func _TranslationService_TranslateText_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(TranslateTextRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(TranslationServiceServer).TranslateText(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.translation.v3beta1.TranslationService/TranslateText", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(TranslationServiceServer).TranslateText(ctx, req.(*TranslateTextRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _TranslationService_DetectLanguage_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(DetectLanguageRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(TranslationServiceServer).DetectLanguage(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.translation.v3beta1.TranslationService/DetectLanguage", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(TranslationServiceServer).DetectLanguage(ctx, req.(*DetectLanguageRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _TranslationService_GetSupportedLanguages_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(GetSupportedLanguagesRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(TranslationServiceServer).GetSupportedLanguages(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.translation.v3beta1.TranslationService/GetSupportedLanguages", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(TranslationServiceServer).GetSupportedLanguages(ctx, req.(*GetSupportedLanguagesRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _TranslationService_BatchTranslateText_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(BatchTranslateTextRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(TranslationServiceServer).BatchTranslateText(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.translation.v3beta1.TranslationService/BatchTranslateText", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(TranslationServiceServer).BatchTranslateText(ctx, req.(*BatchTranslateTextRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _TranslationService_CreateGlossary_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(CreateGlossaryRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(TranslationServiceServer).CreateGlossary(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.translation.v3beta1.TranslationService/CreateGlossary", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(TranslationServiceServer).CreateGlossary(ctx, req.(*CreateGlossaryRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _TranslationService_ListGlossaries_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(ListGlossariesRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(TranslationServiceServer).ListGlossaries(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.translation.v3beta1.TranslationService/ListGlossaries", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(TranslationServiceServer).ListGlossaries(ctx, req.(*ListGlossariesRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _TranslationService_GetGlossary_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(GetGlossaryRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(TranslationServiceServer).GetGlossary(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.translation.v3beta1.TranslationService/GetGlossary", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(TranslationServiceServer).GetGlossary(ctx, req.(*GetGlossaryRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _TranslationService_DeleteGlossary_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(DeleteGlossaryRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(TranslationServiceServer).DeleteGlossary(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.translation.v3beta1.TranslationService/DeleteGlossary", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(TranslationServiceServer).DeleteGlossary(ctx, req.(*DeleteGlossaryRequest)) + } + return interceptor(ctx, in, info, handler) +} + +var _TranslationService_serviceDesc = grpc.ServiceDesc{ + ServiceName: "google.cloud.translation.v3beta1.TranslationService", + HandlerType: (*TranslationServiceServer)(nil), + Methods: []grpc.MethodDesc{ + { + MethodName: "TranslateText", + Handler: _TranslationService_TranslateText_Handler, + }, + { + MethodName: "DetectLanguage", + Handler: _TranslationService_DetectLanguage_Handler, + }, + { + MethodName: "GetSupportedLanguages", + Handler: _TranslationService_GetSupportedLanguages_Handler, + }, + { + MethodName: "BatchTranslateText", + Handler: _TranslationService_BatchTranslateText_Handler, + }, + { + MethodName: "CreateGlossary", + Handler: _TranslationService_CreateGlossary_Handler, + }, + { + MethodName: "ListGlossaries", + Handler: _TranslationService_ListGlossaries_Handler, + }, + { + MethodName: "GetGlossary", + Handler: _TranslationService_GetGlossary_Handler, + }, + { + MethodName: "DeleteGlossary", + Handler: _TranslationService_DeleteGlossary_Handler, + }, + }, + Streams: []grpc.StreamDesc{}, + Metadata: "google/cloud/translate/v3beta1/translation_service.proto", +} + +func init() { + proto.RegisterFile("google/cloud/translate/v3beta1/translation_service.proto", fileDescriptor_translation_service_7ab926c836a43926) +} + +var fileDescriptor_translation_service_7ab926c836a43926 = []byte{ + // 1986 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xd4, 0x59, 0xcf, 0x73, 0x23, 0x47, + 0xf5, 0xf7, 0x48, 0x96, 0x2d, 0x3d, 0x59, 0xb2, 0xb6, 0xd7, 0xf2, 0x57, 0x5f, 0x6d, 0x7e, 0x38, + 0x93, 0x0a, 0x18, 0x6f, 0x21, 0x05, 0x79, 0x37, 0xbb, 0x91, 0xb3, 0xc0, 0x5a, 0xd6, 0xca, 0x06, + 0xc7, 0x71, 0x46, 0x72, 0xa0, 0xc2, 0x56, 0xa9, 0xc6, 0xa3, 0xf6, 0x64, 0xb2, 0xd2, 0xcc, 0x30, + 0xdd, 0x5a, 0xd6, 0x0b, 0xa9, 0x4a, 0x85, 0x0b, 0x07, 0x6e, 0x9c, 0xb8, 0x00, 0x17, 0xa8, 0x82, + 0xa2, 0x8a, 0x2b, 0x47, 0xaa, 0x38, 0x50, 0xe1, 0xc0, 0x01, 0xfe, 0x05, 0x4e, 0xb9, 0x71, 0xe3, + 0x48, 0x4d, 0x77, 0x8f, 0xd4, 0x33, 0x1a, 0xaf, 0xc6, 0xde, 0xa4, 0x0a, 0x6e, 0xea, 0xd7, 0xef, + 0xbd, 0x7e, 0xef, 0xf3, 0x5e, 0xbf, 0xf7, 0xa6, 0x05, 0x77, 0x4d, 0xc7, 0x31, 0x87, 0xb8, 0x6e, + 0x0c, 0x9d, 0xf1, 0xa0, 0x4e, 0x3d, 0xdd, 0x26, 0x43, 0x9d, 0xe2, 0xfa, 0xe3, 0xed, 0x53, 0x4c, + 0xf5, 0xaf, 0x4d, 0x28, 0x96, 0x63, 0xf7, 0x09, 0xf6, 0x1e, 0x5b, 0x06, 0xae, 0xb9, 0x9e, 0x43, + 0x1d, 0xb4, 0xc1, 0x25, 0x6b, 0x4c, 0xb2, 0x26, 0xf1, 0xd5, 0x84, 0x6c, 0xf5, 0x05, 0xa1, 0x5b, + 0x77, 0xad, 0xba, 0x6e, 0xdb, 0x0e, 0x65, 0xdb, 0x84, 0xcb, 0x57, 0x5f, 0x15, 0xbb, 0x43, 0xc7, + 0x36, 0xbd, 0xb1, 0x6d, 0x5b, 0xb6, 0x59, 0x77, 0x5c, 0xec, 0x85, 0x98, 0x5e, 0x16, 0x4c, 0x6c, + 0x75, 0x3a, 0x3e, 0xab, 0x53, 0x6b, 0x84, 0x09, 0xd5, 0x47, 0x2e, 0x67, 0x50, 0xdf, 0x87, 0x1b, + 0xbd, 0xc0, 0xe8, 0x1e, 0x7e, 0x42, 0x3b, 0x43, 0x87, 0x10, 0xdd, 0x3b, 0x6f, 0x39, 0xf6, 0x99, + 0x65, 0xa2, 0x2a, 0x64, 0x4d, 0x41, 0xa9, 0x28, 0x1b, 0xca, 0x66, 0x4e, 0x9b, 0xac, 0xd1, 0xcb, + 0x90, 0xb7, 0x4c, 0xdb, 0xf1, 0x70, 0xdf, 0xd0, 0x09, 0xae, 0xa4, 0x36, 0x94, 0xcd, 0xac, 0x06, + 0x9c, 0xd4, 0xd2, 0x09, 0x56, 0x3f, 0x4d, 0xc1, 0x5a, 0x48, 0xb9, 0x86, 0xbf, 0x3f, 0xc6, 0x84, + 0xfa, 0x5a, 0x0d, 0xc7, 0xa6, 0xd8, 0xa6, 0xa4, 0xa2, 0x6c, 0xa4, 0x7d, 0xad, 0xc1, 0x1a, 0xdd, + 0x80, 0xdc, 0xc8, 0x1a, 0xe1, 0x3e, 0x3d, 0x77, 0x71, 0x25, 0xcd, 0x8f, 0xf4, 0x09, 0xbd, 0x73, + 0x17, 0xa3, 0xd7, 0x61, 0x8d, 0x38, 0x63, 0xcf, 0xc0, 0xfd, 0xa1, 0x6e, 0x9b, 0x63, 0xdd, 0xc4, + 0x7d, 0xc3, 0x19, 0xe0, 0xca, 0x22, 0xe3, 0x43, 0x7c, 0xef, 0x50, 0x6c, 0xb5, 0x9c, 0x01, 0x93, + 0xa0, 0xba, 0x67, 0x62, 0x1a, 0x91, 0xc8, 0x70, 0x09, 0xbe, 0x17, 0x92, 0x58, 0x87, 0x25, 0x57, + 0xf7, 0xb0, 0x4d, 0x2b, 0x59, 0xc6, 0x23, 0x56, 0x68, 0x0d, 0x32, 0x23, 0x67, 0x80, 0x87, 0x95, + 0x25, 0x46, 0xe6, 0x0b, 0x74, 0x06, 0xab, 0x01, 0x20, 0x7d, 0x83, 0x61, 0x56, 0x59, 0xde, 0x50, + 0x36, 0xf3, 0x8d, 0x7b, 0xb5, 0x79, 0xf1, 0xad, 0x3d, 0x03, 0x78, 0xad, 0x68, 0x86, 0xd6, 0xea, + 0xdf, 0x14, 0x28, 0x47, 0xb0, 0x24, 0xae, 0x63, 0x13, 0x8c, 0xde, 0x85, 0x15, 0x49, 0x39, 0x07, + 0x34, 0xdf, 0xf8, 0x6a, 0xf2, 0xe3, 0x2d, 0xc7, 0xd6, 0x42, 0x2a, 0xd0, 0x29, 0x94, 0x27, 0x4e, + 0x85, 0x74, 0xa7, 0xaf, 0xa2, 0x7b, 0x2d, 0xd0, 0x25, 0x11, 0x89, 0xfa, 0x99, 0x02, 0x79, 0x89, + 0x80, 0xbe, 0x0c, 0xab, 0x93, 0xdb, 0x33, 0xe8, 0x53, 0xfc, 0x84, 0x8a, 0x84, 0x2b, 0x4e, 0xc9, + 0xbe, 0xdf, 0xd3, 0x38, 0xa4, 0xe4, 0x38, 0xdc, 0x82, 0xf5, 0x01, 0xa6, 0xd8, 0xf0, 0x85, 0xe3, + 0x72, 0x63, 0x2d, 0xd8, 0x0d, 0xc5, 0x3a, 0x26, 0x7a, 0xe9, 0x2f, 0x22, 0x7a, 0x3f, 0x51, 0xa0, + 0xbc, 0xc7, 0x0c, 0x08, 0x8e, 0x0f, 0xae, 0xc2, 0x34, 0xdb, 0x32, 0xf1, 0xd9, 0xb6, 0x28, 0x7b, + 0x59, 0x85, 0x65, 0x71, 0x51, 0x38, 0x38, 0xfb, 0x0b, 0x5a, 0x40, 0x78, 0xe6, 0xc5, 0xd9, 0xcd, + 0xc2, 0x12, 0xbf, 0x1c, 0xea, 0x77, 0xa0, 0xb4, 0x17, 0x81, 0x02, 0xbd, 0x0a, 0x85, 0x30, 0x66, + 0x1c, 0xf9, 0x95, 0xa1, 0x8c, 0xd5, 0x4b, 0x00, 0x0c, 0xa2, 0x01, 0xb6, 0x0d, 0x7e, 0xdb, 0x53, + 0x9a, 0x44, 0x51, 0x3f, 0x84, 0xf5, 0xa8, 0x8b, 0x22, 0x43, 0x8f, 0x21, 0x17, 0x68, 0x0a, 0xd2, + 0xb3, 0x31, 0x1f, 0xdf, 0xa8, 0x95, 0xda, 0x54, 0x89, 0xfa, 0xb1, 0x02, 0x2f, 0x74, 0x30, 0xed, + 0x8e, 0x5d, 0xd7, 0xf1, 0x24, 0x1e, 0x32, 0x0b, 0x6b, 0x3a, 0x04, 0x6b, 0x03, 0xca, 0x03, 0x8b, + 0xb8, 0x43, 0xfd, 0xbc, 0x1f, 0xe7, 0xf1, 0x75, 0xb1, 0x19, 0x4a, 0x92, 0xd8, 0x84, 0x53, 0x4d, + 0x40, 0xb3, 0xc7, 0xa3, 0x77, 0x67, 0x5d, 0xdd, 0x9e, 0xef, 0xea, 0x8c, 0x22, 0xd9, 0xd7, 0xdf, + 0x2a, 0x70, 0x6d, 0x86, 0x21, 0x59, 0xc8, 0x5e, 0x81, 0x95, 0xc0, 0x5b, 0x5b, 0x1f, 0x61, 0xe1, + 0x40, 0x5e, 0xd0, 0x8e, 0xf4, 0x11, 0x46, 0xaf, 0x41, 0x91, 0x70, 0xe5, 0x7d, 0x9e, 0x20, 0x0c, + 0xb0, 0xac, 0x56, 0x10, 0xd4, 0x2e, 0x23, 0xca, 0x6c, 0xbc, 0x64, 0xb2, 0xbc, 0x9c, 0xb2, 0xf5, + 0x18, 0x51, 0xdd, 0x84, 0x5c, 0xc7, 0x20, 0x42, 0xe6, 0x06, 0xe4, 0x2c, 0xdb, 0x1d, 0xd3, 0xfe, + 0xd8, 0xb3, 0x82, 0xe6, 0xc1, 0x08, 0x27, 0x9e, 0xa5, 0x7e, 0xa2, 0x40, 0xfe, 0xc0, 0x5f, 0x88, + 0x46, 0x13, 0xca, 0x5e, 0x25, 0x52, 0xf6, 0x0f, 0x01, 0x4c, 0x83, 0x04, 0x06, 0xa6, 0xd8, 0x0d, + 0xbd, 0x39, 0x1f, 0xd6, 0x89, 0x29, 0xfb, 0x0b, 0x5a, 0xce, 0x0c, 0x16, 0xd2, 0x5d, 0x78, 0x0b, + 0x8a, 0x1d, 0x83, 0xec, 0x61, 0x42, 0x2d, 0x9b, 0x57, 0xa1, 0x2d, 0xb8, 0xe6, 0x8c, 0xa9, 0x30, + 0xba, 0xef, 0x7a, 0xf8, 0xcc, 0x7a, 0x22, 0xcc, 0x59, 0xe5, 0x1b, 0x27, 0x9e, 0x75, 0xcc, 0xc8, + 0xea, 0x53, 0x58, 0x79, 0x87, 0x91, 0x84, 0x0b, 0xdf, 0x83, 0x55, 0xdf, 0xca, 0xc1, 0x54, 0x1d, + 0x93, 0xcc, 0x37, 0x5e, 0x4f, 0x64, 0xaa, 0x64, 0xc6, 0xfe, 0x82, 0x56, 0x34, 0x43, 0x94, 0xdd, + 0x02, 0xe4, 0x25, 0xc5, 0xea, 0xa7, 0x19, 0xf8, 0xff, 0x5d, 0x9d, 0x1a, 0x1f, 0xc4, 0xf6, 0xd7, + 0x69, 0xf6, 0x2b, 0xa1, 0xec, 0xbf, 0xa8, 0x7d, 0xa6, 0x2e, 0x6c, 0x9f, 0x0d, 0x28, 0xc7, 0xb5, + 0x4f, 0xde, 0x09, 0x72, 0xda, 0xf5, 0xd9, 0xfe, 0x49, 0x50, 0x1f, 0x96, 0xd8, 0x15, 0x21, 0x95, + 0x45, 0x76, 0x01, 0x3a, 0xf3, 0xdd, 0xbf, 0xd0, 0x95, 0xda, 0xdb, 0x4c, 0x53, 0xdb, 0xa6, 0xde, + 0xb9, 0x26, 0xd4, 0x22, 0x0d, 0x0a, 0x3c, 0xb1, 0x78, 0xc9, 0x26, 0x95, 0x4c, 0xd2, 0xb6, 0x24, + 0x65, 0x9c, 0xb6, 0x62, 0x4d, 0x17, 0x04, 0x75, 0xa1, 0x20, 0x02, 0x2f, 0xfa, 0xc0, 0x12, 0x0b, + 0x5d, 0x6d, 0xbe, 0x4e, 0x39, 0x07, 0xb4, 0x15, 0x47, 0xce, 0x88, 0x47, 0x00, 0xa2, 0x11, 0x58, + 0x98, 0x54, 0x96, 0x99, 0x95, 0xdf, 0x7e, 0x1e, 0x34, 0x3a, 0x13, 0x6d, 0x1c, 0x11, 0x49, 0x7d, + 0xf5, 0x4d, 0xc8, 0x4b, 0x60, 0xa1, 0x12, 0xa4, 0x1f, 0xe1, 0x60, 0x68, 0xf3, 0x7f, 0xfa, 0x75, + 0xec, 0xb1, 0x3e, 0x1c, 0x07, 0xe1, 0xe6, 0x8b, 0x66, 0xea, 0xae, 0x52, 0xfd, 0x11, 0xac, 0x46, + 0x34, 0xc7, 0x88, 0x77, 0x65, 0xf1, 0xe7, 0xee, 0x90, 0xd3, 0xd3, 0xd5, 0x5f, 0xa6, 0x61, 0x3d, + 0xec, 0xf2, 0xdb, 0x98, 0xea, 0x03, 0x9d, 0xea, 0xa8, 0x07, 0x19, 0x42, 0x75, 0xca, 0x2b, 0x42, + 0xb1, 0xf1, 0xf5, 0xcb, 0x62, 0x17, 0x28, 0xaa, 0x75, 0x7d, 0x2d, 0x1a, 0x57, 0x86, 0xb6, 0xa1, + 0x2c, 0x8d, 0x1a, 0xc6, 0x07, 0xba, 0xa7, 0x1b, 0x14, 0x7b, 0x84, 0x79, 0x96, 0xd6, 0xd6, 0xa6, + 0x9b, 0xad, 0xc9, 0x1e, 0xba, 0x09, 0xd7, 0xce, 0x74, 0x6b, 0x18, 0x16, 0x48, 0x33, 0x81, 0x12, + 0xdf, 0x90, 0x98, 0xbf, 0x02, 0x25, 0xea, 0x50, 0x7d, 0x28, 0xf3, 0x2e, 0x32, 0xde, 0x55, 0x46, + 0x97, 0x58, 0x77, 0x20, 0x4f, 0xc6, 0xa7, 0x23, 0x8b, 0xf6, 0xfd, 0xd1, 0x9c, 0x4d, 0x01, 0xf9, + 0x46, 0x35, 0x70, 0x34, 0x98, 0xdb, 0x6b, 0xbd, 0x60, 0x6e, 0xd7, 0x80, 0xb3, 0xfb, 0x04, 0x15, + 0x43, 0x86, 0x79, 0x86, 0xca, 0x70, 0xad, 0xdb, 0xbb, 0xdf, 0x6b, 0xf7, 0x4f, 0x8e, 0xba, 0xc7, + 0xed, 0xd6, 0xc1, 0x83, 0x83, 0xf6, 0x5e, 0x69, 0x01, 0xe5, 0x61, 0x59, 0x3b, 0x39, 0x3a, 0x3a, + 0x38, 0xea, 0x94, 0x14, 0x54, 0x80, 0x5c, 0xf7, 0xa4, 0xd5, 0x6a, 0xb7, 0xf7, 0xda, 0x7b, 0xa5, + 0x14, 0x02, 0x58, 0x7a, 0x70, 0xff, 0xe0, 0xb0, 0xbd, 0x57, 0x4a, 0xa3, 0x22, 0x40, 0xeb, 0xfe, + 0x51, 0xab, 0x7d, 0x78, 0xe8, 0xb3, 0x2e, 0xfa, 0xac, 0x62, 0xdd, 0xde, 0x2b, 0x65, 0xd4, 0x9f, + 0xa7, 0xa2, 0x11, 0x9a, 0xf4, 0xf6, 0x38, 0x4f, 0x95, 0x78, 0x4f, 0xbf, 0x78, 0xd8, 0x23, 0x58, + 0x2e, 0x5e, 0x06, 0x4b, 0x74, 0x1b, 0xb2, 0xd8, 0x1e, 0x24, 0x8d, 0xc2, 0x32, 0xb6, 0x07, 0x2c, + 0x04, 0x23, 0xb8, 0x1e, 0xa4, 0xb6, 0xdc, 0xcf, 0xc2, 0x2d, 0x4b, 0xf9, 0xdc, 0x5a, 0xd6, 0x2f, + 0x32, 0x90, 0x0d, 0xce, 0x43, 0x08, 0x16, 0x59, 0x5f, 0xe7, 0xb7, 0x94, 0xfd, 0x46, 0xa7, 0xd2, + 0x60, 0xe0, 0xea, 0x96, 0x27, 0x06, 0xda, 0x9d, 0x04, 0x67, 0x0b, 0xb5, 0x35, 0xb9, 0x9c, 0x1f, + 0xeb, 0x96, 0xb7, 0xbf, 0x30, 0x9d, 0x2b, 0xfc, 0x35, 0x7a, 0x04, 0x28, 0xdc, 0x0e, 0xfa, 0x44, + 0x4c, 0x04, 0x57, 0x3f, 0x88, 0x74, 0x31, 0xdd, 0x5f, 0xd0, 0x4a, 0xc3, 0x08, 0x0d, 0x7d, 0x17, + 0x56, 0xe4, 0x6a, 0x2f, 0x62, 0x73, 0x3b, 0xf9, 0x31, 0x72, 0xd1, 0xcf, 0x4b, 0x45, 0xdf, 0xff, + 0x80, 0xc5, 0x7e, 0xb1, 0xeb, 0x1b, 0xce, 0xd8, 0xa6, 0xac, 0xe2, 0x67, 0x34, 0x60, 0xa4, 0x96, + 0x4f, 0x89, 0xe6, 0xd3, 0xf2, 0x95, 0xf3, 0x29, 0x9b, 0x38, 0x9f, 0xaa, 0x8f, 0xa1, 0x14, 0xc5, + 0xff, 0xc2, 0xbe, 0xad, 0x5c, 0xfa, 0xb3, 0x37, 0x75, 0xd1, 0x67, 0x6f, 0xf5, 0xcd, 0xf0, 0xb9, + 0x0c, 0xfa, 0xd7, 0xa0, 0x18, 0x69, 0xfb, 0xfc, 0x6b, 0xbd, 0x10, 0x0a, 0xd2, 0x6e, 0x5e, 0x1a, + 0x7a, 0xd5, 0x1f, 0x40, 0xb9, 0xe5, 0x61, 0x9d, 0xe2, 0x00, 0xfe, 0x79, 0x43, 0xc9, 0x03, 0xe9, + 0x89, 0x81, 0xb7, 0x96, 0xad, 0xe4, 0xb1, 0x9d, 0x3e, 0x47, 0xa8, 0x9b, 0x80, 0x3a, 0x98, 0x46, + 0x4f, 0x8d, 0xb9, 0x22, 0xea, 0x4d, 0xff, 0x63, 0x6c, 0x88, 0x67, 0x4d, 0x8c, 0x63, 0xfe, 0xb1, + 0x02, 0xe5, 0x43, 0x8b, 0xd0, 0x69, 0x83, 0x9c, 0xe7, 0xd0, 0x0d, 0xc8, 0xb9, 0x3e, 0x62, 0xc4, + 0x7a, 0xca, 0x01, 0xcf, 0x68, 0x59, 0x9f, 0xd0, 0xb5, 0x9e, 0x62, 0xf4, 0x22, 0x00, 0xdb, 0xa4, + 0xce, 0x23, 0x6c, 0x8b, 0x8f, 0x13, 0xc6, 0xde, 0xf3, 0x09, 0xbe, 0xce, 0x33, 0x6b, 0x48, 0xb1, + 0x27, 0xbe, 0xfb, 0xc4, 0x4a, 0xfd, 0xa9, 0x02, 0xeb, 0x51, 0x2b, 0x44, 0x05, 0xfe, 0x56, 0x68, + 0xc8, 0xe0, 0xdf, 0x1c, 0x97, 0x41, 0x50, 0x92, 0x46, 0x5f, 0x82, 0x55, 0x1b, 0x3f, 0xa1, 0x7d, + 0xc9, 0x44, 0x9e, 0x31, 0x05, 0x9f, 0x7c, 0x1c, 0x98, 0xa9, 0xfe, 0x26, 0x05, 0xeb, 0xe1, 0x28, + 0x4f, 0x5a, 0x76, 0x5c, 0x4d, 0x9a, 0xb4, 0xf1, 0x54, 0xd2, 0x36, 0x1e, 0xaf, 0x3c, 0xdc, 0xc6, + 0x23, 0xb7, 0x33, 0xfd, 0xdf, 0xd8, 0x39, 0x7d, 0xa0, 0xc2, 0xb9, 0xf6, 0x39, 0x03, 0x15, 0xaf, + 0xfc, 0x7f, 0x0f, 0xa8, 0x5f, 0x29, 0x51, 0xa0, 0x26, 0x09, 0x1e, 0x07, 0x54, 0xc4, 0xa5, 0xd4, + 0x95, 0x2b, 0x73, 0x3a, 0x71, 0x65, 0x6e, 0x7c, 0xbc, 0x02, 0x48, 0x7a, 0xb1, 0xea, 0xf2, 0xd7, + 0x5c, 0xf4, 0x67, 0x05, 0x0a, 0xa1, 0x49, 0x17, 0xbd, 0x71, 0xc9, 0xd1, 0x58, 0x14, 0x94, 0xea, + 0x9d, 0x4b, 0xcb, 0x71, 0x84, 0xd4, 0xfb, 0x9f, 0xfc, 0xe3, 0x9f, 0x3f, 0x4b, 0xed, 0xa8, 0x6f, + 0x4c, 0x9e, 0x9d, 0x7f, 0xc8, 0x6b, 0xd1, 0x3d, 0xd7, 0x73, 0x3e, 0xc4, 0x06, 0x25, 0xf5, 0xad, + 0xfa, 0xd0, 0x31, 0xf8, 0xd3, 0x5b, 0x7d, 0xeb, 0xa3, 0x26, 0x95, 0xf5, 0x34, 0x95, 0x2d, 0xf4, + 0x17, 0x05, 0x8a, 0xe1, 0xe7, 0x1b, 0x74, 0x27, 0xe9, 0x1b, 0x4d, 0xe4, 0x4d, 0xab, 0x7a, 0xf7, + 0xf2, 0x82, 0xc2, 0x91, 0x5d, 0xe6, 0xc8, 0x5b, 0xea, 0x9d, 0xc4, 0x8e, 0x0c, 0x42, 0x8a, 0x7c, + 0x4f, 0xfe, 0xae, 0x40, 0x39, 0xf6, 0x6d, 0x08, 0x25, 0xb8, 0x4e, 0xcf, 0x7a, 0x54, 0xaa, 0xde, + 0xba, 0xc2, 0x4b, 0x0e, 0x51, 0x5b, 0xcc, 0xa7, 0x7b, 0x68, 0x27, 0xa9, 0x4f, 0x75, 0x32, 0x6b, + 0xfa, 0x1f, 0x15, 0x40, 0xb3, 0x9f, 0x85, 0x68, 0xe7, 0x39, 0x3e, 0x26, 0xab, 0x2f, 0x06, 0xc2, + 0xd2, 0x3f, 0x08, 0xb5, 0x77, 0x82, 0x7f, 0x10, 0xd4, 0x07, 0xcc, 0xee, 0x6f, 0xaa, 0x89, 0xed, + 0x6e, 0x9e, 0xce, 0x1c, 0xe5, 0xc7, 0xe3, 0x0f, 0x0a, 0x14, 0xc3, 0xe5, 0x3c, 0x49, 0x66, 0xc5, + 0xce, 0x10, 0xf3, 0x4c, 0x6e, 0x33, 0x93, 0xbf, 0xa1, 0x6e, 0x27, 0x86, 0x7a, 0xda, 0xfb, 0x9a, + 0xd3, 0x3f, 0x36, 0xfe, 0xa4, 0x40, 0x31, 0xdc, 0x6c, 0x93, 0x58, 0x1c, 0x3b, 0x24, 0x24, 0xb9, + 0x0b, 0xf1, 0x7d, 0x5d, 0xdd, 0x61, 0xce, 0xdc, 0x46, 0x57, 0x71, 0x06, 0xfd, 0x5e, 0x81, 0xbc, + 0x34, 0x0d, 0xa1, 0x5b, 0x89, 0x52, 0x3f, 0x0a, 0xf7, 0x25, 0xc6, 0x88, 0x38, 0x73, 0xfd, 0x4a, + 0x7d, 0x81, 0xb1, 0x92, 0xad, 0xf5, 0xad, 0x8f, 0xd0, 0xaf, 0x59, 0xf5, 0x91, 0xab, 0x7f, 0xb2, + 0xea, 0x13, 0x33, 0xc4, 0xcd, 0xcb, 0x11, 0x61, 0xe7, 0xd6, 0x55, 0xec, 0xdc, 0xfd, 0x97, 0x02, + 0xaa, 0xe1, 0x8c, 0xe2, 0x4d, 0xc3, 0x81, 0x61, 0xbb, 0xff, 0x37, 0xdb, 0x26, 0x8e, 0xfd, 0xc6, + 0x72, 0xac, 0xbc, 0xdf, 0x11, 0xa2, 0xa6, 0xe3, 0x8f, 0xcc, 0x35, 0xc7, 0x33, 0xeb, 0x26, 0xb6, + 0x59, 0xdb, 0xa9, 0xf3, 0x2d, 0xdd, 0xb5, 0xc8, 0x45, 0x7f, 0x27, 0xee, 0x4c, 0x28, 0xff, 0x56, + 0x94, 0xdf, 0xa5, 0x5e, 0xea, 0x70, 0x65, 0x2d, 0x66, 0xc7, 0xe4, 0xe6, 0xd5, 0xde, 0xdb, 0xde, + 0xf5, 0xb9, 0xff, 0x1a, 0x30, 0x3c, 0x64, 0x0c, 0x0f, 0x27, 0x0c, 0x0f, 0xdf, 0xe3, 0xea, 0x3e, + 0x4b, 0xbd, 0xc2, 0x19, 0x9a, 0x4d, 0xc6, 0xd1, 0x6c, 0x4e, 0x58, 0x9a, 0x4d, 0xc1, 0x73, 0xba, + 0xc4, 0x8c, 0xdb, 0xfe, 0x4f, 0x00, 0x00, 0x00, 0xff, 0xff, 0x5d, 0xe5, 0x9e, 0x13, 0xea, 0x1c, + 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/cloud/videointelligence/v1/video_intelligence.pb.go b/vendor/google.golang.org/genproto/googleapis/cloud/videointelligence/v1/video_intelligence.pb.go new file mode 100644 index 0000000..117b208 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/cloud/videointelligence/v1/video_intelligence.pb.go @@ -0,0 +1,2648 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/cloud/videointelligence/v1/video_intelligence.proto + +package videointelligence // import "google.golang.org/genproto/googleapis/cloud/videointelligence/v1" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import duration "github.com/golang/protobuf/ptypes/duration" +import timestamp "github.com/golang/protobuf/ptypes/timestamp" +import _ "google.golang.org/genproto/googleapis/api/annotations" +import longrunning "google.golang.org/genproto/googleapis/longrunning" +import status "google.golang.org/genproto/googleapis/rpc/status" + +import ( + context "golang.org/x/net/context" + grpc "google.golang.org/grpc" +) + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Video annotation feature. +type Feature int32 + +const ( + // Unspecified. + Feature_FEATURE_UNSPECIFIED Feature = 0 + // Label detection. Detect objects, such as dog or flower. + Feature_LABEL_DETECTION Feature = 1 + // Shot change detection. + Feature_SHOT_CHANGE_DETECTION Feature = 2 + // Explicit content detection. + Feature_EXPLICIT_CONTENT_DETECTION Feature = 3 + // Human face detection and tracking. + Feature_FACE_DETECTION Feature = 4 + // Speech transcription. + Feature_SPEECH_TRANSCRIPTION Feature = 6 + // OCR text detection and tracking. + Feature_TEXT_DETECTION Feature = 7 + // Object detection and tracking. + Feature_OBJECT_TRACKING Feature = 9 +) + +var Feature_name = map[int32]string{ + 0: "FEATURE_UNSPECIFIED", + 1: "LABEL_DETECTION", + 2: "SHOT_CHANGE_DETECTION", + 3: "EXPLICIT_CONTENT_DETECTION", + 4: "FACE_DETECTION", + 6: "SPEECH_TRANSCRIPTION", + 7: "TEXT_DETECTION", + 9: "OBJECT_TRACKING", +} +var Feature_value = map[string]int32{ + "FEATURE_UNSPECIFIED": 0, + "LABEL_DETECTION": 1, + "SHOT_CHANGE_DETECTION": 2, + "EXPLICIT_CONTENT_DETECTION": 3, + "FACE_DETECTION": 4, + "SPEECH_TRANSCRIPTION": 6, + "TEXT_DETECTION": 7, + "OBJECT_TRACKING": 9, +} + +func (x Feature) String() string { + return proto.EnumName(Feature_name, int32(x)) +} +func (Feature) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_video_intelligence_c8b048fd49d4d016, []int{0} +} + +// Label detection mode. +type LabelDetectionMode int32 + +const ( + // Unspecified. + LabelDetectionMode_LABEL_DETECTION_MODE_UNSPECIFIED LabelDetectionMode = 0 + // Detect shot-level labels. + LabelDetectionMode_SHOT_MODE LabelDetectionMode = 1 + // Detect frame-level labels. + LabelDetectionMode_FRAME_MODE LabelDetectionMode = 2 + // Detect both shot-level and frame-level labels. + LabelDetectionMode_SHOT_AND_FRAME_MODE LabelDetectionMode = 3 +) + +var LabelDetectionMode_name = map[int32]string{ + 0: "LABEL_DETECTION_MODE_UNSPECIFIED", + 1: "SHOT_MODE", + 2: "FRAME_MODE", + 3: "SHOT_AND_FRAME_MODE", +} +var LabelDetectionMode_value = map[string]int32{ + "LABEL_DETECTION_MODE_UNSPECIFIED": 0, + "SHOT_MODE": 1, + "FRAME_MODE": 2, + "SHOT_AND_FRAME_MODE": 3, +} + +func (x LabelDetectionMode) String() string { + return proto.EnumName(LabelDetectionMode_name, int32(x)) +} +func (LabelDetectionMode) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_video_intelligence_c8b048fd49d4d016, []int{1} +} + +// Bucketized representation of likelihood. +type Likelihood int32 + +const ( + // Unspecified likelihood. + Likelihood_LIKELIHOOD_UNSPECIFIED Likelihood = 0 + // Very unlikely. + Likelihood_VERY_UNLIKELY Likelihood = 1 + // Unlikely. + Likelihood_UNLIKELY Likelihood = 2 + // Possible. + Likelihood_POSSIBLE Likelihood = 3 + // Likely. + Likelihood_LIKELY Likelihood = 4 + // Very likely. + Likelihood_VERY_LIKELY Likelihood = 5 +) + +var Likelihood_name = map[int32]string{ + 0: "LIKELIHOOD_UNSPECIFIED", + 1: "VERY_UNLIKELY", + 2: "UNLIKELY", + 3: "POSSIBLE", + 4: "LIKELY", + 5: "VERY_LIKELY", +} +var Likelihood_value = map[string]int32{ + "LIKELIHOOD_UNSPECIFIED": 0, + "VERY_UNLIKELY": 1, + "UNLIKELY": 2, + "POSSIBLE": 3, + "LIKELY": 4, + "VERY_LIKELY": 5, +} + +func (x Likelihood) String() string { + return proto.EnumName(Likelihood_name, int32(x)) +} +func (Likelihood) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_video_intelligence_c8b048fd49d4d016, []int{2} +} + +// Video annotation request. +type AnnotateVideoRequest struct { + // Input video location. Currently, only + // [Google Cloud Storage](https://cloud.google.com/storage/) URIs are + // supported, which must be specified in the following format: + // `gs://bucket-id/object-id` (other URI formats return + // [google.rpc.Code.INVALID_ARGUMENT][google.rpc.Code.INVALID_ARGUMENT]). For + // more information, see [Request URIs](/storage/docs/reference-uris). A video + // URI may include wildcards in `object-id`, and thus identify multiple + // videos. Supported wildcards: '*' to match 0 or more characters; + // '?' to match 1 character. If unset, the input video should be embedded + // in the request as `input_content`. If set, `input_content` should be unset. + InputUri string `protobuf:"bytes,1,opt,name=input_uri,json=inputUri,proto3" json:"input_uri,omitempty"` + // The video data bytes. + // If unset, the input video(s) should be specified via `input_uri`. + // If set, `input_uri` should be unset. + InputContent []byte `protobuf:"bytes,6,opt,name=input_content,json=inputContent,proto3" json:"input_content,omitempty"` + // Requested video annotation features. + Features []Feature `protobuf:"varint,2,rep,packed,name=features,proto3,enum=google.cloud.videointelligence.v1.Feature" json:"features,omitempty"` + // Additional video context and/or feature-specific parameters. + VideoContext *VideoContext `protobuf:"bytes,3,opt,name=video_context,json=videoContext,proto3" json:"video_context,omitempty"` + // Optional location where the output (in JSON format) should be stored. + // Currently, only [Google Cloud Storage](https://cloud.google.com/storage/) + // URIs are supported, which must be specified in the following format: + // `gs://bucket-id/object-id` (other URI formats return + // [google.rpc.Code.INVALID_ARGUMENT][google.rpc.Code.INVALID_ARGUMENT]). For + // more information, see [Request URIs](/storage/docs/reference-uris). + OutputUri string `protobuf:"bytes,4,opt,name=output_uri,json=outputUri,proto3" json:"output_uri,omitempty"` + // Optional cloud region where annotation should take place. Supported cloud + // regions: `us-east1`, `us-west1`, `europe-west1`, `asia-east1`. If no region + // is specified, a region will be determined based on video file location. + LocationId string `protobuf:"bytes,5,opt,name=location_id,json=locationId,proto3" json:"location_id,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *AnnotateVideoRequest) Reset() { *m = AnnotateVideoRequest{} } +func (m *AnnotateVideoRequest) String() string { return proto.CompactTextString(m) } +func (*AnnotateVideoRequest) ProtoMessage() {} +func (*AnnotateVideoRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_video_intelligence_c8b048fd49d4d016, []int{0} +} +func (m *AnnotateVideoRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_AnnotateVideoRequest.Unmarshal(m, b) +} +func (m *AnnotateVideoRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_AnnotateVideoRequest.Marshal(b, m, deterministic) +} +func (dst *AnnotateVideoRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_AnnotateVideoRequest.Merge(dst, src) +} +func (m *AnnotateVideoRequest) XXX_Size() int { + return xxx_messageInfo_AnnotateVideoRequest.Size(m) +} +func (m *AnnotateVideoRequest) XXX_DiscardUnknown() { + xxx_messageInfo_AnnotateVideoRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_AnnotateVideoRequest proto.InternalMessageInfo + +func (m *AnnotateVideoRequest) GetInputUri() string { + if m != nil { + return m.InputUri + } + return "" +} + +func (m *AnnotateVideoRequest) GetInputContent() []byte { + if m != nil { + return m.InputContent + } + return nil +} + +func (m *AnnotateVideoRequest) GetFeatures() []Feature { + if m != nil { + return m.Features + } + return nil +} + +func (m *AnnotateVideoRequest) GetVideoContext() *VideoContext { + if m != nil { + return m.VideoContext + } + return nil +} + +func (m *AnnotateVideoRequest) GetOutputUri() string { + if m != nil { + return m.OutputUri + } + return "" +} + +func (m *AnnotateVideoRequest) GetLocationId() string { + if m != nil { + return m.LocationId + } + return "" +} + +// Video context and/or feature-specific parameters. +type VideoContext struct { + // Video segments to annotate. The segments may overlap and are not required + // to be contiguous or span the whole video. If unspecified, each video is + // treated as a single segment. + Segments []*VideoSegment `protobuf:"bytes,1,rep,name=segments,proto3" json:"segments,omitempty"` + // Config for LABEL_DETECTION. + LabelDetectionConfig *LabelDetectionConfig `protobuf:"bytes,2,opt,name=label_detection_config,json=labelDetectionConfig,proto3" json:"label_detection_config,omitempty"` + // Config for SHOT_CHANGE_DETECTION. + ShotChangeDetectionConfig *ShotChangeDetectionConfig `protobuf:"bytes,3,opt,name=shot_change_detection_config,json=shotChangeDetectionConfig,proto3" json:"shot_change_detection_config,omitempty"` + // Config for EXPLICIT_CONTENT_DETECTION. + ExplicitContentDetectionConfig *ExplicitContentDetectionConfig `protobuf:"bytes,4,opt,name=explicit_content_detection_config,json=explicitContentDetectionConfig,proto3" json:"explicit_content_detection_config,omitempty"` + // Config for FACE_DETECTION. + FaceDetectionConfig *FaceDetectionConfig `protobuf:"bytes,5,opt,name=face_detection_config,json=faceDetectionConfig,proto3" json:"face_detection_config,omitempty"` + // Config for SPEECH_TRANSCRIPTION. + SpeechTranscriptionConfig *SpeechTranscriptionConfig `protobuf:"bytes,6,opt,name=speech_transcription_config,json=speechTranscriptionConfig,proto3" json:"speech_transcription_config,omitempty"` + // Config for TEXT_DETECTION. + TextDetectionConfig *TextDetectionConfig `protobuf:"bytes,8,opt,name=text_detection_config,json=textDetectionConfig,proto3" json:"text_detection_config,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *VideoContext) Reset() { *m = VideoContext{} } +func (m *VideoContext) String() string { return proto.CompactTextString(m) } +func (*VideoContext) ProtoMessage() {} +func (*VideoContext) Descriptor() ([]byte, []int) { + return fileDescriptor_video_intelligence_c8b048fd49d4d016, []int{1} +} +func (m *VideoContext) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_VideoContext.Unmarshal(m, b) +} +func (m *VideoContext) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_VideoContext.Marshal(b, m, deterministic) +} +func (dst *VideoContext) XXX_Merge(src proto.Message) { + xxx_messageInfo_VideoContext.Merge(dst, src) +} +func (m *VideoContext) XXX_Size() int { + return xxx_messageInfo_VideoContext.Size(m) +} +func (m *VideoContext) XXX_DiscardUnknown() { + xxx_messageInfo_VideoContext.DiscardUnknown(m) +} + +var xxx_messageInfo_VideoContext proto.InternalMessageInfo + +func (m *VideoContext) GetSegments() []*VideoSegment { + if m != nil { + return m.Segments + } + return nil +} + +func (m *VideoContext) GetLabelDetectionConfig() *LabelDetectionConfig { + if m != nil { + return m.LabelDetectionConfig + } + return nil +} + +func (m *VideoContext) GetShotChangeDetectionConfig() *ShotChangeDetectionConfig { + if m != nil { + return m.ShotChangeDetectionConfig + } + return nil +} + +func (m *VideoContext) GetExplicitContentDetectionConfig() *ExplicitContentDetectionConfig { + if m != nil { + return m.ExplicitContentDetectionConfig + } + return nil +} + +func (m *VideoContext) GetFaceDetectionConfig() *FaceDetectionConfig { + if m != nil { + return m.FaceDetectionConfig + } + return nil +} + +func (m *VideoContext) GetSpeechTranscriptionConfig() *SpeechTranscriptionConfig { + if m != nil { + return m.SpeechTranscriptionConfig + } + return nil +} + +func (m *VideoContext) GetTextDetectionConfig() *TextDetectionConfig { + if m != nil { + return m.TextDetectionConfig + } + return nil +} + +// Config for LABEL_DETECTION. +type LabelDetectionConfig struct { + // What labels should be detected with LABEL_DETECTION, in addition to + // video-level labels or segment-level labels. + // If unspecified, defaults to `SHOT_MODE`. + LabelDetectionMode LabelDetectionMode `protobuf:"varint,1,opt,name=label_detection_mode,json=labelDetectionMode,proto3,enum=google.cloud.videointelligence.v1.LabelDetectionMode" json:"label_detection_mode,omitempty"` + // Whether the video has been shot from a stationary (i.e. non-moving) camera. + // When set to true, might improve detection accuracy for moving objects. + // Should be used with `SHOT_AND_FRAME_MODE` enabled. + StationaryCamera bool `protobuf:"varint,2,opt,name=stationary_camera,json=stationaryCamera,proto3" json:"stationary_camera,omitempty"` + // Model to use for label detection. + // Supported values: "builtin/stable" (the default if unset) and + // "builtin/latest". + Model string `protobuf:"bytes,3,opt,name=model,proto3" json:"model,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *LabelDetectionConfig) Reset() { *m = LabelDetectionConfig{} } +func (m *LabelDetectionConfig) String() string { return proto.CompactTextString(m) } +func (*LabelDetectionConfig) ProtoMessage() {} +func (*LabelDetectionConfig) Descriptor() ([]byte, []int) { + return fileDescriptor_video_intelligence_c8b048fd49d4d016, []int{2} +} +func (m *LabelDetectionConfig) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_LabelDetectionConfig.Unmarshal(m, b) +} +func (m *LabelDetectionConfig) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_LabelDetectionConfig.Marshal(b, m, deterministic) +} +func (dst *LabelDetectionConfig) XXX_Merge(src proto.Message) { + xxx_messageInfo_LabelDetectionConfig.Merge(dst, src) +} +func (m *LabelDetectionConfig) XXX_Size() int { + return xxx_messageInfo_LabelDetectionConfig.Size(m) +} +func (m *LabelDetectionConfig) XXX_DiscardUnknown() { + xxx_messageInfo_LabelDetectionConfig.DiscardUnknown(m) +} + +var xxx_messageInfo_LabelDetectionConfig proto.InternalMessageInfo + +func (m *LabelDetectionConfig) GetLabelDetectionMode() LabelDetectionMode { + if m != nil { + return m.LabelDetectionMode + } + return LabelDetectionMode_LABEL_DETECTION_MODE_UNSPECIFIED +} + +func (m *LabelDetectionConfig) GetStationaryCamera() bool { + if m != nil { + return m.StationaryCamera + } + return false +} + +func (m *LabelDetectionConfig) GetModel() string { + if m != nil { + return m.Model + } + return "" +} + +// Config for SHOT_CHANGE_DETECTION. +type ShotChangeDetectionConfig struct { + // Model to use for shot change detection. + // Supported values: "builtin/stable" (the default if unset) and + // "builtin/latest". + Model string `protobuf:"bytes,1,opt,name=model,proto3" json:"model,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ShotChangeDetectionConfig) Reset() { *m = ShotChangeDetectionConfig{} } +func (m *ShotChangeDetectionConfig) String() string { return proto.CompactTextString(m) } +func (*ShotChangeDetectionConfig) ProtoMessage() {} +func (*ShotChangeDetectionConfig) Descriptor() ([]byte, []int) { + return fileDescriptor_video_intelligence_c8b048fd49d4d016, []int{3} +} +func (m *ShotChangeDetectionConfig) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ShotChangeDetectionConfig.Unmarshal(m, b) +} +func (m *ShotChangeDetectionConfig) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ShotChangeDetectionConfig.Marshal(b, m, deterministic) +} +func (dst *ShotChangeDetectionConfig) XXX_Merge(src proto.Message) { + xxx_messageInfo_ShotChangeDetectionConfig.Merge(dst, src) +} +func (m *ShotChangeDetectionConfig) XXX_Size() int { + return xxx_messageInfo_ShotChangeDetectionConfig.Size(m) +} +func (m *ShotChangeDetectionConfig) XXX_DiscardUnknown() { + xxx_messageInfo_ShotChangeDetectionConfig.DiscardUnknown(m) +} + +var xxx_messageInfo_ShotChangeDetectionConfig proto.InternalMessageInfo + +func (m *ShotChangeDetectionConfig) GetModel() string { + if m != nil { + return m.Model + } + return "" +} + +// Config for EXPLICIT_CONTENT_DETECTION. +type ExplicitContentDetectionConfig struct { + // Model to use for explicit content detection. + // Supported values: "builtin/stable" (the default if unset) and + // "builtin/latest". + Model string `protobuf:"bytes,1,opt,name=model,proto3" json:"model,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ExplicitContentDetectionConfig) Reset() { *m = ExplicitContentDetectionConfig{} } +func (m *ExplicitContentDetectionConfig) String() string { return proto.CompactTextString(m) } +func (*ExplicitContentDetectionConfig) ProtoMessage() {} +func (*ExplicitContentDetectionConfig) Descriptor() ([]byte, []int) { + return fileDescriptor_video_intelligence_c8b048fd49d4d016, []int{4} +} +func (m *ExplicitContentDetectionConfig) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ExplicitContentDetectionConfig.Unmarshal(m, b) +} +func (m *ExplicitContentDetectionConfig) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ExplicitContentDetectionConfig.Marshal(b, m, deterministic) +} +func (dst *ExplicitContentDetectionConfig) XXX_Merge(src proto.Message) { + xxx_messageInfo_ExplicitContentDetectionConfig.Merge(dst, src) +} +func (m *ExplicitContentDetectionConfig) XXX_Size() int { + return xxx_messageInfo_ExplicitContentDetectionConfig.Size(m) +} +func (m *ExplicitContentDetectionConfig) XXX_DiscardUnknown() { + xxx_messageInfo_ExplicitContentDetectionConfig.DiscardUnknown(m) +} + +var xxx_messageInfo_ExplicitContentDetectionConfig proto.InternalMessageInfo + +func (m *ExplicitContentDetectionConfig) GetModel() string { + if m != nil { + return m.Model + } + return "" +} + +// Config for FACE_DETECTION. +type FaceDetectionConfig struct { + // Model to use for face detection. + // Supported values: "builtin/stable" (the default if unset) and + // "builtin/latest". + Model string `protobuf:"bytes,1,opt,name=model,proto3" json:"model,omitempty"` + // Whether bounding boxes be included in the face annotation output. + IncludeBoundingBoxes bool `protobuf:"varint,2,opt,name=include_bounding_boxes,json=includeBoundingBoxes,proto3" json:"include_bounding_boxes,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *FaceDetectionConfig) Reset() { *m = FaceDetectionConfig{} } +func (m *FaceDetectionConfig) String() string { return proto.CompactTextString(m) } +func (*FaceDetectionConfig) ProtoMessage() {} +func (*FaceDetectionConfig) Descriptor() ([]byte, []int) { + return fileDescriptor_video_intelligence_c8b048fd49d4d016, []int{5} +} +func (m *FaceDetectionConfig) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_FaceDetectionConfig.Unmarshal(m, b) +} +func (m *FaceDetectionConfig) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_FaceDetectionConfig.Marshal(b, m, deterministic) +} +func (dst *FaceDetectionConfig) XXX_Merge(src proto.Message) { + xxx_messageInfo_FaceDetectionConfig.Merge(dst, src) +} +func (m *FaceDetectionConfig) XXX_Size() int { + return xxx_messageInfo_FaceDetectionConfig.Size(m) +} +func (m *FaceDetectionConfig) XXX_DiscardUnknown() { + xxx_messageInfo_FaceDetectionConfig.DiscardUnknown(m) +} + +var xxx_messageInfo_FaceDetectionConfig proto.InternalMessageInfo + +func (m *FaceDetectionConfig) GetModel() string { + if m != nil { + return m.Model + } + return "" +} + +func (m *FaceDetectionConfig) GetIncludeBoundingBoxes() bool { + if m != nil { + return m.IncludeBoundingBoxes + } + return false +} + +// Config for TEXT_DETECTION. +type TextDetectionConfig struct { + // Language hint can be specified if the language to be detected is known a + // priori. It can increase the accuracy of the detection. Language hint must + // be language code in BCP-47 format. + // + // Automatic language detection is performed if no hint is provided. + LanguageHints []string `protobuf:"bytes,1,rep,name=language_hints,json=languageHints,proto3" json:"language_hints,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *TextDetectionConfig) Reset() { *m = TextDetectionConfig{} } +func (m *TextDetectionConfig) String() string { return proto.CompactTextString(m) } +func (*TextDetectionConfig) ProtoMessage() {} +func (*TextDetectionConfig) Descriptor() ([]byte, []int) { + return fileDescriptor_video_intelligence_c8b048fd49d4d016, []int{6} +} +func (m *TextDetectionConfig) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_TextDetectionConfig.Unmarshal(m, b) +} +func (m *TextDetectionConfig) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_TextDetectionConfig.Marshal(b, m, deterministic) +} +func (dst *TextDetectionConfig) XXX_Merge(src proto.Message) { + xxx_messageInfo_TextDetectionConfig.Merge(dst, src) +} +func (m *TextDetectionConfig) XXX_Size() int { + return xxx_messageInfo_TextDetectionConfig.Size(m) +} +func (m *TextDetectionConfig) XXX_DiscardUnknown() { + xxx_messageInfo_TextDetectionConfig.DiscardUnknown(m) +} + +var xxx_messageInfo_TextDetectionConfig proto.InternalMessageInfo + +func (m *TextDetectionConfig) GetLanguageHints() []string { + if m != nil { + return m.LanguageHints + } + return nil +} + +// Video segment. +type VideoSegment struct { + // Time-offset, relative to the beginning of the video, + // corresponding to the start of the segment (inclusive). + StartTimeOffset *duration.Duration `protobuf:"bytes,1,opt,name=start_time_offset,json=startTimeOffset,proto3" json:"start_time_offset,omitempty"` + // Time-offset, relative to the beginning of the video, + // corresponding to the end of the segment (inclusive). + EndTimeOffset *duration.Duration `protobuf:"bytes,2,opt,name=end_time_offset,json=endTimeOffset,proto3" json:"end_time_offset,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *VideoSegment) Reset() { *m = VideoSegment{} } +func (m *VideoSegment) String() string { return proto.CompactTextString(m) } +func (*VideoSegment) ProtoMessage() {} +func (*VideoSegment) Descriptor() ([]byte, []int) { + return fileDescriptor_video_intelligence_c8b048fd49d4d016, []int{7} +} +func (m *VideoSegment) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_VideoSegment.Unmarshal(m, b) +} +func (m *VideoSegment) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_VideoSegment.Marshal(b, m, deterministic) +} +func (dst *VideoSegment) XXX_Merge(src proto.Message) { + xxx_messageInfo_VideoSegment.Merge(dst, src) +} +func (m *VideoSegment) XXX_Size() int { + return xxx_messageInfo_VideoSegment.Size(m) +} +func (m *VideoSegment) XXX_DiscardUnknown() { + xxx_messageInfo_VideoSegment.DiscardUnknown(m) +} + +var xxx_messageInfo_VideoSegment proto.InternalMessageInfo + +func (m *VideoSegment) GetStartTimeOffset() *duration.Duration { + if m != nil { + return m.StartTimeOffset + } + return nil +} + +func (m *VideoSegment) GetEndTimeOffset() *duration.Duration { + if m != nil { + return m.EndTimeOffset + } + return nil +} + +// Video segment level annotation results for label detection. +type LabelSegment struct { + // Video segment where a label was detected. + Segment *VideoSegment `protobuf:"bytes,1,opt,name=segment,proto3" json:"segment,omitempty"` + // Confidence that the label is accurate. Range: [0, 1]. + Confidence float32 `protobuf:"fixed32,2,opt,name=confidence,proto3" json:"confidence,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *LabelSegment) Reset() { *m = LabelSegment{} } +func (m *LabelSegment) String() string { return proto.CompactTextString(m) } +func (*LabelSegment) ProtoMessage() {} +func (*LabelSegment) Descriptor() ([]byte, []int) { + return fileDescriptor_video_intelligence_c8b048fd49d4d016, []int{8} +} +func (m *LabelSegment) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_LabelSegment.Unmarshal(m, b) +} +func (m *LabelSegment) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_LabelSegment.Marshal(b, m, deterministic) +} +func (dst *LabelSegment) XXX_Merge(src proto.Message) { + xxx_messageInfo_LabelSegment.Merge(dst, src) +} +func (m *LabelSegment) XXX_Size() int { + return xxx_messageInfo_LabelSegment.Size(m) +} +func (m *LabelSegment) XXX_DiscardUnknown() { + xxx_messageInfo_LabelSegment.DiscardUnknown(m) +} + +var xxx_messageInfo_LabelSegment proto.InternalMessageInfo + +func (m *LabelSegment) GetSegment() *VideoSegment { + if m != nil { + return m.Segment + } + return nil +} + +func (m *LabelSegment) GetConfidence() float32 { + if m != nil { + return m.Confidence + } + return 0 +} + +// Video frame level annotation results for label detection. +type LabelFrame struct { + // Time-offset, relative to the beginning of the video, corresponding to the + // video frame for this location. + TimeOffset *duration.Duration `protobuf:"bytes,1,opt,name=time_offset,json=timeOffset,proto3" json:"time_offset,omitempty"` + // Confidence that the label is accurate. Range: [0, 1]. + Confidence float32 `protobuf:"fixed32,2,opt,name=confidence,proto3" json:"confidence,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *LabelFrame) Reset() { *m = LabelFrame{} } +func (m *LabelFrame) String() string { return proto.CompactTextString(m) } +func (*LabelFrame) ProtoMessage() {} +func (*LabelFrame) Descriptor() ([]byte, []int) { + return fileDescriptor_video_intelligence_c8b048fd49d4d016, []int{9} +} +func (m *LabelFrame) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_LabelFrame.Unmarshal(m, b) +} +func (m *LabelFrame) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_LabelFrame.Marshal(b, m, deterministic) +} +func (dst *LabelFrame) XXX_Merge(src proto.Message) { + xxx_messageInfo_LabelFrame.Merge(dst, src) +} +func (m *LabelFrame) XXX_Size() int { + return xxx_messageInfo_LabelFrame.Size(m) +} +func (m *LabelFrame) XXX_DiscardUnknown() { + xxx_messageInfo_LabelFrame.DiscardUnknown(m) +} + +var xxx_messageInfo_LabelFrame proto.InternalMessageInfo + +func (m *LabelFrame) GetTimeOffset() *duration.Duration { + if m != nil { + return m.TimeOffset + } + return nil +} + +func (m *LabelFrame) GetConfidence() float32 { + if m != nil { + return m.Confidence + } + return 0 +} + +// Detected entity from video analysis. +type Entity struct { + // Opaque entity ID. Some IDs may be available in + // [Google Knowledge Graph Search + // API](https://developers.google.com/knowledge-graph/). + EntityId string `protobuf:"bytes,1,opt,name=entity_id,json=entityId,proto3" json:"entity_id,omitempty"` + // Textual description, e.g. `Fixed-gear bicycle`. + Description string `protobuf:"bytes,2,opt,name=description,proto3" json:"description,omitempty"` + // Language code for `description` in BCP-47 format. + LanguageCode string `protobuf:"bytes,3,opt,name=language_code,json=languageCode,proto3" json:"language_code,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Entity) Reset() { *m = Entity{} } +func (m *Entity) String() string { return proto.CompactTextString(m) } +func (*Entity) ProtoMessage() {} +func (*Entity) Descriptor() ([]byte, []int) { + return fileDescriptor_video_intelligence_c8b048fd49d4d016, []int{10} +} +func (m *Entity) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Entity.Unmarshal(m, b) +} +func (m *Entity) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Entity.Marshal(b, m, deterministic) +} +func (dst *Entity) XXX_Merge(src proto.Message) { + xxx_messageInfo_Entity.Merge(dst, src) +} +func (m *Entity) XXX_Size() int { + return xxx_messageInfo_Entity.Size(m) +} +func (m *Entity) XXX_DiscardUnknown() { + xxx_messageInfo_Entity.DiscardUnknown(m) +} + +var xxx_messageInfo_Entity proto.InternalMessageInfo + +func (m *Entity) GetEntityId() string { + if m != nil { + return m.EntityId + } + return "" +} + +func (m *Entity) GetDescription() string { + if m != nil { + return m.Description + } + return "" +} + +func (m *Entity) GetLanguageCode() string { + if m != nil { + return m.LanguageCode + } + return "" +} + +// Label annotation. +type LabelAnnotation struct { + // Detected entity. + Entity *Entity `protobuf:"bytes,1,opt,name=entity,proto3" json:"entity,omitempty"` + // Common categories for the detected entity. + // E.g. when the label is `Terrier` the category is likely `dog`. And in some + // cases there might be more than one categories e.g. `Terrier` could also be + // a `pet`. + CategoryEntities []*Entity `protobuf:"bytes,2,rep,name=category_entities,json=categoryEntities,proto3" json:"category_entities,omitempty"` + // All video segments where a label was detected. + Segments []*LabelSegment `protobuf:"bytes,3,rep,name=segments,proto3" json:"segments,omitempty"` + // All video frames where a label was detected. + Frames []*LabelFrame `protobuf:"bytes,4,rep,name=frames,proto3" json:"frames,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *LabelAnnotation) Reset() { *m = LabelAnnotation{} } +func (m *LabelAnnotation) String() string { return proto.CompactTextString(m) } +func (*LabelAnnotation) ProtoMessage() {} +func (*LabelAnnotation) Descriptor() ([]byte, []int) { + return fileDescriptor_video_intelligence_c8b048fd49d4d016, []int{11} +} +func (m *LabelAnnotation) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_LabelAnnotation.Unmarshal(m, b) +} +func (m *LabelAnnotation) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_LabelAnnotation.Marshal(b, m, deterministic) +} +func (dst *LabelAnnotation) XXX_Merge(src proto.Message) { + xxx_messageInfo_LabelAnnotation.Merge(dst, src) +} +func (m *LabelAnnotation) XXX_Size() int { + return xxx_messageInfo_LabelAnnotation.Size(m) +} +func (m *LabelAnnotation) XXX_DiscardUnknown() { + xxx_messageInfo_LabelAnnotation.DiscardUnknown(m) +} + +var xxx_messageInfo_LabelAnnotation proto.InternalMessageInfo + +func (m *LabelAnnotation) GetEntity() *Entity { + if m != nil { + return m.Entity + } + return nil +} + +func (m *LabelAnnotation) GetCategoryEntities() []*Entity { + if m != nil { + return m.CategoryEntities + } + return nil +} + +func (m *LabelAnnotation) GetSegments() []*LabelSegment { + if m != nil { + return m.Segments + } + return nil +} + +func (m *LabelAnnotation) GetFrames() []*LabelFrame { + if m != nil { + return m.Frames + } + return nil +} + +// Video frame level annotation results for explicit content. +type ExplicitContentFrame struct { + // Time-offset, relative to the beginning of the video, corresponding to the + // video frame for this location. + TimeOffset *duration.Duration `protobuf:"bytes,1,opt,name=time_offset,json=timeOffset,proto3" json:"time_offset,omitempty"` + // Likelihood of the pornography content.. + PornographyLikelihood Likelihood `protobuf:"varint,2,opt,name=pornography_likelihood,json=pornographyLikelihood,proto3,enum=google.cloud.videointelligence.v1.Likelihood" json:"pornography_likelihood,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ExplicitContentFrame) Reset() { *m = ExplicitContentFrame{} } +func (m *ExplicitContentFrame) String() string { return proto.CompactTextString(m) } +func (*ExplicitContentFrame) ProtoMessage() {} +func (*ExplicitContentFrame) Descriptor() ([]byte, []int) { + return fileDescriptor_video_intelligence_c8b048fd49d4d016, []int{12} +} +func (m *ExplicitContentFrame) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ExplicitContentFrame.Unmarshal(m, b) +} +func (m *ExplicitContentFrame) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ExplicitContentFrame.Marshal(b, m, deterministic) +} +func (dst *ExplicitContentFrame) XXX_Merge(src proto.Message) { + xxx_messageInfo_ExplicitContentFrame.Merge(dst, src) +} +func (m *ExplicitContentFrame) XXX_Size() int { + return xxx_messageInfo_ExplicitContentFrame.Size(m) +} +func (m *ExplicitContentFrame) XXX_DiscardUnknown() { + xxx_messageInfo_ExplicitContentFrame.DiscardUnknown(m) +} + +var xxx_messageInfo_ExplicitContentFrame proto.InternalMessageInfo + +func (m *ExplicitContentFrame) GetTimeOffset() *duration.Duration { + if m != nil { + return m.TimeOffset + } + return nil +} + +func (m *ExplicitContentFrame) GetPornographyLikelihood() Likelihood { + if m != nil { + return m.PornographyLikelihood + } + return Likelihood_LIKELIHOOD_UNSPECIFIED +} + +// Explicit content annotation (based on per-frame visual signals only). +// If no explicit content has been detected in a frame, no annotations are +// present for that frame. +type ExplicitContentAnnotation struct { + // All video frames where explicit content was detected. + Frames []*ExplicitContentFrame `protobuf:"bytes,1,rep,name=frames,proto3" json:"frames,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ExplicitContentAnnotation) Reset() { *m = ExplicitContentAnnotation{} } +func (m *ExplicitContentAnnotation) String() string { return proto.CompactTextString(m) } +func (*ExplicitContentAnnotation) ProtoMessage() {} +func (*ExplicitContentAnnotation) Descriptor() ([]byte, []int) { + return fileDescriptor_video_intelligence_c8b048fd49d4d016, []int{13} +} +func (m *ExplicitContentAnnotation) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ExplicitContentAnnotation.Unmarshal(m, b) +} +func (m *ExplicitContentAnnotation) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ExplicitContentAnnotation.Marshal(b, m, deterministic) +} +func (dst *ExplicitContentAnnotation) XXX_Merge(src proto.Message) { + xxx_messageInfo_ExplicitContentAnnotation.Merge(dst, src) +} +func (m *ExplicitContentAnnotation) XXX_Size() int { + return xxx_messageInfo_ExplicitContentAnnotation.Size(m) +} +func (m *ExplicitContentAnnotation) XXX_DiscardUnknown() { + xxx_messageInfo_ExplicitContentAnnotation.DiscardUnknown(m) +} + +var xxx_messageInfo_ExplicitContentAnnotation proto.InternalMessageInfo + +func (m *ExplicitContentAnnotation) GetFrames() []*ExplicitContentFrame { + if m != nil { + return m.Frames + } + return nil +} + +// Normalized bounding box. +// The normalized vertex coordinates are relative to the original image. +// Range: [0, 1]. +type NormalizedBoundingBox struct { + // Left X coordinate. + Left float32 `protobuf:"fixed32,1,opt,name=left,proto3" json:"left,omitempty"` + // Top Y coordinate. + Top float32 `protobuf:"fixed32,2,opt,name=top,proto3" json:"top,omitempty"` + // Right X coordinate. + Right float32 `protobuf:"fixed32,3,opt,name=right,proto3" json:"right,omitempty"` + // Bottom Y coordinate. + Bottom float32 `protobuf:"fixed32,4,opt,name=bottom,proto3" json:"bottom,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *NormalizedBoundingBox) Reset() { *m = NormalizedBoundingBox{} } +func (m *NormalizedBoundingBox) String() string { return proto.CompactTextString(m) } +func (*NormalizedBoundingBox) ProtoMessage() {} +func (*NormalizedBoundingBox) Descriptor() ([]byte, []int) { + return fileDescriptor_video_intelligence_c8b048fd49d4d016, []int{14} +} +func (m *NormalizedBoundingBox) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_NormalizedBoundingBox.Unmarshal(m, b) +} +func (m *NormalizedBoundingBox) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_NormalizedBoundingBox.Marshal(b, m, deterministic) +} +func (dst *NormalizedBoundingBox) XXX_Merge(src proto.Message) { + xxx_messageInfo_NormalizedBoundingBox.Merge(dst, src) +} +func (m *NormalizedBoundingBox) XXX_Size() int { + return xxx_messageInfo_NormalizedBoundingBox.Size(m) +} +func (m *NormalizedBoundingBox) XXX_DiscardUnknown() { + xxx_messageInfo_NormalizedBoundingBox.DiscardUnknown(m) +} + +var xxx_messageInfo_NormalizedBoundingBox proto.InternalMessageInfo + +func (m *NormalizedBoundingBox) GetLeft() float32 { + if m != nil { + return m.Left + } + return 0 +} + +func (m *NormalizedBoundingBox) GetTop() float32 { + if m != nil { + return m.Top + } + return 0 +} + +func (m *NormalizedBoundingBox) GetRight() float32 { + if m != nil { + return m.Right + } + return 0 +} + +func (m *NormalizedBoundingBox) GetBottom() float32 { + if m != nil { + return m.Bottom + } + return 0 +} + +// Video segment level annotation results for face detection. +type FaceSegment struct { + // Video segment where a face was detected. + Segment *VideoSegment `protobuf:"bytes,1,opt,name=segment,proto3" json:"segment,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *FaceSegment) Reset() { *m = FaceSegment{} } +func (m *FaceSegment) String() string { return proto.CompactTextString(m) } +func (*FaceSegment) ProtoMessage() {} +func (*FaceSegment) Descriptor() ([]byte, []int) { + return fileDescriptor_video_intelligence_c8b048fd49d4d016, []int{15} +} +func (m *FaceSegment) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_FaceSegment.Unmarshal(m, b) +} +func (m *FaceSegment) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_FaceSegment.Marshal(b, m, deterministic) +} +func (dst *FaceSegment) XXX_Merge(src proto.Message) { + xxx_messageInfo_FaceSegment.Merge(dst, src) +} +func (m *FaceSegment) XXX_Size() int { + return xxx_messageInfo_FaceSegment.Size(m) +} +func (m *FaceSegment) XXX_DiscardUnknown() { + xxx_messageInfo_FaceSegment.DiscardUnknown(m) +} + +var xxx_messageInfo_FaceSegment proto.InternalMessageInfo + +func (m *FaceSegment) GetSegment() *VideoSegment { + if m != nil { + return m.Segment + } + return nil +} + +// Video frame level annotation results for face detection. +type FaceFrame struct { + // Normalized Bounding boxes in a frame. + // There can be more than one boxes if the same face is detected in multiple + // locations within the current frame. + NormalizedBoundingBoxes []*NormalizedBoundingBox `protobuf:"bytes,1,rep,name=normalized_bounding_boxes,json=normalizedBoundingBoxes,proto3" json:"normalized_bounding_boxes,omitempty"` + // Time-offset, relative to the beginning of the video, + // corresponding to the video frame for this location. + TimeOffset *duration.Duration `protobuf:"bytes,2,opt,name=time_offset,json=timeOffset,proto3" json:"time_offset,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *FaceFrame) Reset() { *m = FaceFrame{} } +func (m *FaceFrame) String() string { return proto.CompactTextString(m) } +func (*FaceFrame) ProtoMessage() {} +func (*FaceFrame) Descriptor() ([]byte, []int) { + return fileDescriptor_video_intelligence_c8b048fd49d4d016, []int{16} +} +func (m *FaceFrame) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_FaceFrame.Unmarshal(m, b) +} +func (m *FaceFrame) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_FaceFrame.Marshal(b, m, deterministic) +} +func (dst *FaceFrame) XXX_Merge(src proto.Message) { + xxx_messageInfo_FaceFrame.Merge(dst, src) +} +func (m *FaceFrame) XXX_Size() int { + return xxx_messageInfo_FaceFrame.Size(m) +} +func (m *FaceFrame) XXX_DiscardUnknown() { + xxx_messageInfo_FaceFrame.DiscardUnknown(m) +} + +var xxx_messageInfo_FaceFrame proto.InternalMessageInfo + +func (m *FaceFrame) GetNormalizedBoundingBoxes() []*NormalizedBoundingBox { + if m != nil { + return m.NormalizedBoundingBoxes + } + return nil +} + +func (m *FaceFrame) GetTimeOffset() *duration.Duration { + if m != nil { + return m.TimeOffset + } + return nil +} + +// Face annotation. +type FaceAnnotation struct { + // Thumbnail of a representative face view (in JPEG format). + Thumbnail []byte `protobuf:"bytes,1,opt,name=thumbnail,proto3" json:"thumbnail,omitempty"` + // All video segments where a face was detected. + Segments []*FaceSegment `protobuf:"bytes,2,rep,name=segments,proto3" json:"segments,omitempty"` + // All video frames where a face was detected. + Frames []*FaceFrame `protobuf:"bytes,3,rep,name=frames,proto3" json:"frames,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *FaceAnnotation) Reset() { *m = FaceAnnotation{} } +func (m *FaceAnnotation) String() string { return proto.CompactTextString(m) } +func (*FaceAnnotation) ProtoMessage() {} +func (*FaceAnnotation) Descriptor() ([]byte, []int) { + return fileDescriptor_video_intelligence_c8b048fd49d4d016, []int{17} +} +func (m *FaceAnnotation) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_FaceAnnotation.Unmarshal(m, b) +} +func (m *FaceAnnotation) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_FaceAnnotation.Marshal(b, m, deterministic) +} +func (dst *FaceAnnotation) XXX_Merge(src proto.Message) { + xxx_messageInfo_FaceAnnotation.Merge(dst, src) +} +func (m *FaceAnnotation) XXX_Size() int { + return xxx_messageInfo_FaceAnnotation.Size(m) +} +func (m *FaceAnnotation) XXX_DiscardUnknown() { + xxx_messageInfo_FaceAnnotation.DiscardUnknown(m) +} + +var xxx_messageInfo_FaceAnnotation proto.InternalMessageInfo + +func (m *FaceAnnotation) GetThumbnail() []byte { + if m != nil { + return m.Thumbnail + } + return nil +} + +func (m *FaceAnnotation) GetSegments() []*FaceSegment { + if m != nil { + return m.Segments + } + return nil +} + +func (m *FaceAnnotation) GetFrames() []*FaceFrame { + if m != nil { + return m.Frames + } + return nil +} + +// Annotation results for a single video. +type VideoAnnotationResults struct { + // Video file location in + // [Google Cloud Storage](https://cloud.google.com/storage/). + InputUri string `protobuf:"bytes,1,opt,name=input_uri,json=inputUri,proto3" json:"input_uri,omitempty"` + // Label annotations on video level or user specified segment level. + // There is exactly one element for each unique label. + SegmentLabelAnnotations []*LabelAnnotation `protobuf:"bytes,2,rep,name=segment_label_annotations,json=segmentLabelAnnotations,proto3" json:"segment_label_annotations,omitempty"` + // Label annotations on shot level. + // There is exactly one element for each unique label. + ShotLabelAnnotations []*LabelAnnotation `protobuf:"bytes,3,rep,name=shot_label_annotations,json=shotLabelAnnotations,proto3" json:"shot_label_annotations,omitempty"` + // Label annotations on frame level. + // There is exactly one element for each unique label. + FrameLabelAnnotations []*LabelAnnotation `protobuf:"bytes,4,rep,name=frame_label_annotations,json=frameLabelAnnotations,proto3" json:"frame_label_annotations,omitempty"` + // Face annotations. There is exactly one element for each unique face. + FaceAnnotations []*FaceAnnotation `protobuf:"bytes,5,rep,name=face_annotations,json=faceAnnotations,proto3" json:"face_annotations,omitempty"` + // Shot annotations. Each shot is represented as a video segment. + ShotAnnotations []*VideoSegment `protobuf:"bytes,6,rep,name=shot_annotations,json=shotAnnotations,proto3" json:"shot_annotations,omitempty"` + // Explicit content annotation. + ExplicitAnnotation *ExplicitContentAnnotation `protobuf:"bytes,7,opt,name=explicit_annotation,json=explicitAnnotation,proto3" json:"explicit_annotation,omitempty"` + // Speech transcription. + SpeechTranscriptions []*SpeechTranscription `protobuf:"bytes,11,rep,name=speech_transcriptions,json=speechTranscriptions,proto3" json:"speech_transcriptions,omitempty"` + // OCR text detection and tracking. + // Annotations for list of detected text snippets. Each will have list of + // frame information associated with it. + TextAnnotations []*TextAnnotation `protobuf:"bytes,12,rep,name=text_annotations,json=textAnnotations,proto3" json:"text_annotations,omitempty"` + // Annotations for list of objects detected and tracked in video. + ObjectAnnotations []*ObjectTrackingAnnotation `protobuf:"bytes,14,rep,name=object_annotations,json=objectAnnotations,proto3" json:"object_annotations,omitempty"` + // If set, indicates an error. Note that for a single `AnnotateVideoRequest` + // some videos may succeed and some may fail. + Error *status.Status `protobuf:"bytes,9,opt,name=error,proto3" json:"error,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *VideoAnnotationResults) Reset() { *m = VideoAnnotationResults{} } +func (m *VideoAnnotationResults) String() string { return proto.CompactTextString(m) } +func (*VideoAnnotationResults) ProtoMessage() {} +func (*VideoAnnotationResults) Descriptor() ([]byte, []int) { + return fileDescriptor_video_intelligence_c8b048fd49d4d016, []int{18} +} +func (m *VideoAnnotationResults) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_VideoAnnotationResults.Unmarshal(m, b) +} +func (m *VideoAnnotationResults) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_VideoAnnotationResults.Marshal(b, m, deterministic) +} +func (dst *VideoAnnotationResults) XXX_Merge(src proto.Message) { + xxx_messageInfo_VideoAnnotationResults.Merge(dst, src) +} +func (m *VideoAnnotationResults) XXX_Size() int { + return xxx_messageInfo_VideoAnnotationResults.Size(m) +} +func (m *VideoAnnotationResults) XXX_DiscardUnknown() { + xxx_messageInfo_VideoAnnotationResults.DiscardUnknown(m) +} + +var xxx_messageInfo_VideoAnnotationResults proto.InternalMessageInfo + +func (m *VideoAnnotationResults) GetInputUri() string { + if m != nil { + return m.InputUri + } + return "" +} + +func (m *VideoAnnotationResults) GetSegmentLabelAnnotations() []*LabelAnnotation { + if m != nil { + return m.SegmentLabelAnnotations + } + return nil +} + +func (m *VideoAnnotationResults) GetShotLabelAnnotations() []*LabelAnnotation { + if m != nil { + return m.ShotLabelAnnotations + } + return nil +} + +func (m *VideoAnnotationResults) GetFrameLabelAnnotations() []*LabelAnnotation { + if m != nil { + return m.FrameLabelAnnotations + } + return nil +} + +func (m *VideoAnnotationResults) GetFaceAnnotations() []*FaceAnnotation { + if m != nil { + return m.FaceAnnotations + } + return nil +} + +func (m *VideoAnnotationResults) GetShotAnnotations() []*VideoSegment { + if m != nil { + return m.ShotAnnotations + } + return nil +} + +func (m *VideoAnnotationResults) GetExplicitAnnotation() *ExplicitContentAnnotation { + if m != nil { + return m.ExplicitAnnotation + } + return nil +} + +func (m *VideoAnnotationResults) GetSpeechTranscriptions() []*SpeechTranscription { + if m != nil { + return m.SpeechTranscriptions + } + return nil +} + +func (m *VideoAnnotationResults) GetTextAnnotations() []*TextAnnotation { + if m != nil { + return m.TextAnnotations + } + return nil +} + +func (m *VideoAnnotationResults) GetObjectAnnotations() []*ObjectTrackingAnnotation { + if m != nil { + return m.ObjectAnnotations + } + return nil +} + +func (m *VideoAnnotationResults) GetError() *status.Status { + if m != nil { + return m.Error + } + return nil +} + +// Video annotation response. Included in the `response` +// field of the `Operation` returned by the `GetOperation` +// call of the `google::longrunning::Operations` service. +type AnnotateVideoResponse struct { + // Annotation results for all videos specified in `AnnotateVideoRequest`. + AnnotationResults []*VideoAnnotationResults `protobuf:"bytes,1,rep,name=annotation_results,json=annotationResults,proto3" json:"annotation_results,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *AnnotateVideoResponse) Reset() { *m = AnnotateVideoResponse{} } +func (m *AnnotateVideoResponse) String() string { return proto.CompactTextString(m) } +func (*AnnotateVideoResponse) ProtoMessage() {} +func (*AnnotateVideoResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_video_intelligence_c8b048fd49d4d016, []int{19} +} +func (m *AnnotateVideoResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_AnnotateVideoResponse.Unmarshal(m, b) +} +func (m *AnnotateVideoResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_AnnotateVideoResponse.Marshal(b, m, deterministic) +} +func (dst *AnnotateVideoResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_AnnotateVideoResponse.Merge(dst, src) +} +func (m *AnnotateVideoResponse) XXX_Size() int { + return xxx_messageInfo_AnnotateVideoResponse.Size(m) +} +func (m *AnnotateVideoResponse) XXX_DiscardUnknown() { + xxx_messageInfo_AnnotateVideoResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_AnnotateVideoResponse proto.InternalMessageInfo + +func (m *AnnotateVideoResponse) GetAnnotationResults() []*VideoAnnotationResults { + if m != nil { + return m.AnnotationResults + } + return nil +} + +// Annotation progress for a single video. +type VideoAnnotationProgress struct { + // Video file location in + // [Google Cloud Storage](https://cloud.google.com/storage/). + InputUri string `protobuf:"bytes,1,opt,name=input_uri,json=inputUri,proto3" json:"input_uri,omitempty"` + // Approximate percentage processed thus far. Guaranteed to be + // 100 when fully processed. + ProgressPercent int32 `protobuf:"varint,2,opt,name=progress_percent,json=progressPercent,proto3" json:"progress_percent,omitempty"` + // Time when the request was received. + StartTime *timestamp.Timestamp `protobuf:"bytes,3,opt,name=start_time,json=startTime,proto3" json:"start_time,omitempty"` + // Time of the most recent update. + UpdateTime *timestamp.Timestamp `protobuf:"bytes,4,opt,name=update_time,json=updateTime,proto3" json:"update_time,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *VideoAnnotationProgress) Reset() { *m = VideoAnnotationProgress{} } +func (m *VideoAnnotationProgress) String() string { return proto.CompactTextString(m) } +func (*VideoAnnotationProgress) ProtoMessage() {} +func (*VideoAnnotationProgress) Descriptor() ([]byte, []int) { + return fileDescriptor_video_intelligence_c8b048fd49d4d016, []int{20} +} +func (m *VideoAnnotationProgress) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_VideoAnnotationProgress.Unmarshal(m, b) +} +func (m *VideoAnnotationProgress) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_VideoAnnotationProgress.Marshal(b, m, deterministic) +} +func (dst *VideoAnnotationProgress) XXX_Merge(src proto.Message) { + xxx_messageInfo_VideoAnnotationProgress.Merge(dst, src) +} +func (m *VideoAnnotationProgress) XXX_Size() int { + return xxx_messageInfo_VideoAnnotationProgress.Size(m) +} +func (m *VideoAnnotationProgress) XXX_DiscardUnknown() { + xxx_messageInfo_VideoAnnotationProgress.DiscardUnknown(m) +} + +var xxx_messageInfo_VideoAnnotationProgress proto.InternalMessageInfo + +func (m *VideoAnnotationProgress) GetInputUri() string { + if m != nil { + return m.InputUri + } + return "" +} + +func (m *VideoAnnotationProgress) GetProgressPercent() int32 { + if m != nil { + return m.ProgressPercent + } + return 0 +} + +func (m *VideoAnnotationProgress) GetStartTime() *timestamp.Timestamp { + if m != nil { + return m.StartTime + } + return nil +} + +func (m *VideoAnnotationProgress) GetUpdateTime() *timestamp.Timestamp { + if m != nil { + return m.UpdateTime + } + return nil +} + +// Video annotation progress. Included in the `metadata` +// field of the `Operation` returned by the `GetOperation` +// call of the `google::longrunning::Operations` service. +type AnnotateVideoProgress struct { + // Progress metadata for all videos specified in `AnnotateVideoRequest`. + AnnotationProgress []*VideoAnnotationProgress `protobuf:"bytes,1,rep,name=annotation_progress,json=annotationProgress,proto3" json:"annotation_progress,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *AnnotateVideoProgress) Reset() { *m = AnnotateVideoProgress{} } +func (m *AnnotateVideoProgress) String() string { return proto.CompactTextString(m) } +func (*AnnotateVideoProgress) ProtoMessage() {} +func (*AnnotateVideoProgress) Descriptor() ([]byte, []int) { + return fileDescriptor_video_intelligence_c8b048fd49d4d016, []int{21} +} +func (m *AnnotateVideoProgress) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_AnnotateVideoProgress.Unmarshal(m, b) +} +func (m *AnnotateVideoProgress) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_AnnotateVideoProgress.Marshal(b, m, deterministic) +} +func (dst *AnnotateVideoProgress) XXX_Merge(src proto.Message) { + xxx_messageInfo_AnnotateVideoProgress.Merge(dst, src) +} +func (m *AnnotateVideoProgress) XXX_Size() int { + return xxx_messageInfo_AnnotateVideoProgress.Size(m) +} +func (m *AnnotateVideoProgress) XXX_DiscardUnknown() { + xxx_messageInfo_AnnotateVideoProgress.DiscardUnknown(m) +} + +var xxx_messageInfo_AnnotateVideoProgress proto.InternalMessageInfo + +func (m *AnnotateVideoProgress) GetAnnotationProgress() []*VideoAnnotationProgress { + if m != nil { + return m.AnnotationProgress + } + return nil +} + +// Config for SPEECH_TRANSCRIPTION. +type SpeechTranscriptionConfig struct { + // *Required* The language of the supplied audio as a + // [BCP-47](https://www.rfc-editor.org/rfc/bcp/bcp47.txt) language tag. + // Example: "en-US". + // See [Language Support](https://cloud.google.com/speech/docs/languages) + // for a list of the currently supported language codes. + LanguageCode string `protobuf:"bytes,1,opt,name=language_code,json=languageCode,proto3" json:"language_code,omitempty"` + // *Optional* Maximum number of recognition hypotheses to be returned. + // Specifically, the maximum number of `SpeechRecognitionAlternative` messages + // within each `SpeechTranscription`. The server may return fewer than + // `max_alternatives`. Valid values are `0`-`30`. A value of `0` or `1` will + // return a maximum of one. If omitted, will return a maximum of one. + MaxAlternatives int32 `protobuf:"varint,2,opt,name=max_alternatives,json=maxAlternatives,proto3" json:"max_alternatives,omitempty"` + // *Optional* If set to `true`, the server will attempt to filter out + // profanities, replacing all but the initial character in each filtered word + // with asterisks, e.g. "f***". If set to `false` or omitted, profanities + // won't be filtered out. + FilterProfanity bool `protobuf:"varint,3,opt,name=filter_profanity,json=filterProfanity,proto3" json:"filter_profanity,omitempty"` + // *Optional* A means to provide context to assist the speech recognition. + SpeechContexts []*SpeechContext `protobuf:"bytes,4,rep,name=speech_contexts,json=speechContexts,proto3" json:"speech_contexts,omitempty"` + // *Optional* If 'true', adds punctuation to recognition result hypotheses. + // This feature is only available in select languages. Setting this for + // requests in other languages has no effect at all. The default 'false' value + // does not add punctuation to result hypotheses. NOTE: "This is currently + // offered as an experimental service, complimentary to all users. In the + // future this may be exclusively available as a premium feature." + EnableAutomaticPunctuation bool `protobuf:"varint,5,opt,name=enable_automatic_punctuation,json=enableAutomaticPunctuation,proto3" json:"enable_automatic_punctuation,omitempty"` + // *Optional* For file formats, such as MXF or MKV, supporting multiple audio + // tracks, specify up to two tracks. Default: track 0. + AudioTracks []int32 `protobuf:"varint,6,rep,packed,name=audio_tracks,json=audioTracks,proto3" json:"audio_tracks,omitempty"` + // *Optional* If 'true', enables speaker detection for each recognized word in + // the top alternative of the recognition result using a speaker_tag provided + // in the WordInfo. + // Note: When this is true, we send all the words from the beginning of the + // audio for the top alternative in every consecutive responses. + // This is done in order to improve our speaker tags as our models learn to + // identify the speakers in the conversation over time. + EnableSpeakerDiarization bool `protobuf:"varint,7,opt,name=enable_speaker_diarization,json=enableSpeakerDiarization,proto3" json:"enable_speaker_diarization,omitempty"` + // *Optional* + // If set, specifies the estimated number of speakers in the conversation. + // If not set, defaults to '2'. + // Ignored unless enable_speaker_diarization is set to true. + DiarizationSpeakerCount int32 `protobuf:"varint,8,opt,name=diarization_speaker_count,json=diarizationSpeakerCount,proto3" json:"diarization_speaker_count,omitempty"` + // *Optional* If `true`, the top result includes a list of words and the + // confidence for those words. If `false`, no word-level confidence + // information is returned. The default is `false`. + EnableWordConfidence bool `protobuf:"varint,9,opt,name=enable_word_confidence,json=enableWordConfidence,proto3" json:"enable_word_confidence,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *SpeechTranscriptionConfig) Reset() { *m = SpeechTranscriptionConfig{} } +func (m *SpeechTranscriptionConfig) String() string { return proto.CompactTextString(m) } +func (*SpeechTranscriptionConfig) ProtoMessage() {} +func (*SpeechTranscriptionConfig) Descriptor() ([]byte, []int) { + return fileDescriptor_video_intelligence_c8b048fd49d4d016, []int{22} +} +func (m *SpeechTranscriptionConfig) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_SpeechTranscriptionConfig.Unmarshal(m, b) +} +func (m *SpeechTranscriptionConfig) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_SpeechTranscriptionConfig.Marshal(b, m, deterministic) +} +func (dst *SpeechTranscriptionConfig) XXX_Merge(src proto.Message) { + xxx_messageInfo_SpeechTranscriptionConfig.Merge(dst, src) +} +func (m *SpeechTranscriptionConfig) XXX_Size() int { + return xxx_messageInfo_SpeechTranscriptionConfig.Size(m) +} +func (m *SpeechTranscriptionConfig) XXX_DiscardUnknown() { + xxx_messageInfo_SpeechTranscriptionConfig.DiscardUnknown(m) +} + +var xxx_messageInfo_SpeechTranscriptionConfig proto.InternalMessageInfo + +func (m *SpeechTranscriptionConfig) GetLanguageCode() string { + if m != nil { + return m.LanguageCode + } + return "" +} + +func (m *SpeechTranscriptionConfig) GetMaxAlternatives() int32 { + if m != nil { + return m.MaxAlternatives + } + return 0 +} + +func (m *SpeechTranscriptionConfig) GetFilterProfanity() bool { + if m != nil { + return m.FilterProfanity + } + return false +} + +func (m *SpeechTranscriptionConfig) GetSpeechContexts() []*SpeechContext { + if m != nil { + return m.SpeechContexts + } + return nil +} + +func (m *SpeechTranscriptionConfig) GetEnableAutomaticPunctuation() bool { + if m != nil { + return m.EnableAutomaticPunctuation + } + return false +} + +func (m *SpeechTranscriptionConfig) GetAudioTracks() []int32 { + if m != nil { + return m.AudioTracks + } + return nil +} + +func (m *SpeechTranscriptionConfig) GetEnableSpeakerDiarization() bool { + if m != nil { + return m.EnableSpeakerDiarization + } + return false +} + +func (m *SpeechTranscriptionConfig) GetDiarizationSpeakerCount() int32 { + if m != nil { + return m.DiarizationSpeakerCount + } + return 0 +} + +func (m *SpeechTranscriptionConfig) GetEnableWordConfidence() bool { + if m != nil { + return m.EnableWordConfidence + } + return false +} + +// Provides "hints" to the speech recognizer to favor specific words and phrases +// in the results. +type SpeechContext struct { + // *Optional* A list of strings containing words and phrases "hints" so that + // the speech recognition is more likely to recognize them. This can be used + // to improve the accuracy for specific words and phrases, for example, if + // specific commands are typically spoken by the user. This can also be used + // to add additional words to the vocabulary of the recognizer. See + // [usage limits](https://cloud.google.com/speech/limits#content). + Phrases []string `protobuf:"bytes,1,rep,name=phrases,proto3" json:"phrases,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *SpeechContext) Reset() { *m = SpeechContext{} } +func (m *SpeechContext) String() string { return proto.CompactTextString(m) } +func (*SpeechContext) ProtoMessage() {} +func (*SpeechContext) Descriptor() ([]byte, []int) { + return fileDescriptor_video_intelligence_c8b048fd49d4d016, []int{23} +} +func (m *SpeechContext) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_SpeechContext.Unmarshal(m, b) +} +func (m *SpeechContext) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_SpeechContext.Marshal(b, m, deterministic) +} +func (dst *SpeechContext) XXX_Merge(src proto.Message) { + xxx_messageInfo_SpeechContext.Merge(dst, src) +} +func (m *SpeechContext) XXX_Size() int { + return xxx_messageInfo_SpeechContext.Size(m) +} +func (m *SpeechContext) XXX_DiscardUnknown() { + xxx_messageInfo_SpeechContext.DiscardUnknown(m) +} + +var xxx_messageInfo_SpeechContext proto.InternalMessageInfo + +func (m *SpeechContext) GetPhrases() []string { + if m != nil { + return m.Phrases + } + return nil +} + +// A speech recognition result corresponding to a portion of the audio. +type SpeechTranscription struct { + // May contain one or more recognition hypotheses (up to the maximum specified + // in `max_alternatives`). These alternatives are ordered in terms of + // accuracy, with the top (first) alternative being the most probable, as + // ranked by the recognizer. + Alternatives []*SpeechRecognitionAlternative `protobuf:"bytes,1,rep,name=alternatives,proto3" json:"alternatives,omitempty"` + // Output only. The + // [BCP-47](https://www.rfc-editor.org/rfc/bcp/bcp47.txt) language tag of the + // language in this result. This language code was detected to have the most + // likelihood of being spoken in the audio. + LanguageCode string `protobuf:"bytes,2,opt,name=language_code,json=languageCode,proto3" json:"language_code,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *SpeechTranscription) Reset() { *m = SpeechTranscription{} } +func (m *SpeechTranscription) String() string { return proto.CompactTextString(m) } +func (*SpeechTranscription) ProtoMessage() {} +func (*SpeechTranscription) Descriptor() ([]byte, []int) { + return fileDescriptor_video_intelligence_c8b048fd49d4d016, []int{24} +} +func (m *SpeechTranscription) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_SpeechTranscription.Unmarshal(m, b) +} +func (m *SpeechTranscription) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_SpeechTranscription.Marshal(b, m, deterministic) +} +func (dst *SpeechTranscription) XXX_Merge(src proto.Message) { + xxx_messageInfo_SpeechTranscription.Merge(dst, src) +} +func (m *SpeechTranscription) XXX_Size() int { + return xxx_messageInfo_SpeechTranscription.Size(m) +} +func (m *SpeechTranscription) XXX_DiscardUnknown() { + xxx_messageInfo_SpeechTranscription.DiscardUnknown(m) +} + +var xxx_messageInfo_SpeechTranscription proto.InternalMessageInfo + +func (m *SpeechTranscription) GetAlternatives() []*SpeechRecognitionAlternative { + if m != nil { + return m.Alternatives + } + return nil +} + +func (m *SpeechTranscription) GetLanguageCode() string { + if m != nil { + return m.LanguageCode + } + return "" +} + +// Alternative hypotheses (a.k.a. n-best list). +type SpeechRecognitionAlternative struct { + // Transcript text representing the words that the user spoke. + Transcript string `protobuf:"bytes,1,opt,name=transcript,proto3" json:"transcript,omitempty"` + // The confidence estimate between 0.0 and 1.0. A higher number + // indicates an estimated greater likelihood that the recognized words are + // correct. This field is typically provided only for the top hypothesis, and + // only for `is_final=true` results. Clients should not rely on the + // `confidence` field as it is not guaranteed to be accurate or consistent. + // The default of 0.0 is a sentinel value indicating `confidence` was not set. + Confidence float32 `protobuf:"fixed32,2,opt,name=confidence,proto3" json:"confidence,omitempty"` + // A list of word-specific information for each recognized word. + Words []*WordInfo `protobuf:"bytes,3,rep,name=words,proto3" json:"words,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *SpeechRecognitionAlternative) Reset() { *m = SpeechRecognitionAlternative{} } +func (m *SpeechRecognitionAlternative) String() string { return proto.CompactTextString(m) } +func (*SpeechRecognitionAlternative) ProtoMessage() {} +func (*SpeechRecognitionAlternative) Descriptor() ([]byte, []int) { + return fileDescriptor_video_intelligence_c8b048fd49d4d016, []int{25} +} +func (m *SpeechRecognitionAlternative) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_SpeechRecognitionAlternative.Unmarshal(m, b) +} +func (m *SpeechRecognitionAlternative) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_SpeechRecognitionAlternative.Marshal(b, m, deterministic) +} +func (dst *SpeechRecognitionAlternative) XXX_Merge(src proto.Message) { + xxx_messageInfo_SpeechRecognitionAlternative.Merge(dst, src) +} +func (m *SpeechRecognitionAlternative) XXX_Size() int { + return xxx_messageInfo_SpeechRecognitionAlternative.Size(m) +} +func (m *SpeechRecognitionAlternative) XXX_DiscardUnknown() { + xxx_messageInfo_SpeechRecognitionAlternative.DiscardUnknown(m) +} + +var xxx_messageInfo_SpeechRecognitionAlternative proto.InternalMessageInfo + +func (m *SpeechRecognitionAlternative) GetTranscript() string { + if m != nil { + return m.Transcript + } + return "" +} + +func (m *SpeechRecognitionAlternative) GetConfidence() float32 { + if m != nil { + return m.Confidence + } + return 0 +} + +func (m *SpeechRecognitionAlternative) GetWords() []*WordInfo { + if m != nil { + return m.Words + } + return nil +} + +// Word-specific information for recognized words. Word information is only +// included in the response when certain request parameters are set, such +// as `enable_word_time_offsets`. +type WordInfo struct { + // Time offset relative to the beginning of the audio, and + // corresponding to the start of the spoken word. This field is only set if + // `enable_word_time_offsets=true` and only in the top hypothesis. This is an + // experimental feature and the accuracy of the time offset can vary. + StartTime *duration.Duration `protobuf:"bytes,1,opt,name=start_time,json=startTime,proto3" json:"start_time,omitempty"` + // Time offset relative to the beginning of the audio, and + // corresponding to the end of the spoken word. This field is only set if + // `enable_word_time_offsets=true` and only in the top hypothesis. This is an + // experimental feature and the accuracy of the time offset can vary. + EndTime *duration.Duration `protobuf:"bytes,2,opt,name=end_time,json=endTime,proto3" json:"end_time,omitempty"` + // The word corresponding to this set of information. + Word string `protobuf:"bytes,3,opt,name=word,proto3" json:"word,omitempty"` + // Output only. The confidence estimate between 0.0 and 1.0. A higher number + // indicates an estimated greater likelihood that the recognized words are + // correct. This field is set only for the top alternative. + // This field is not guaranteed to be accurate and users should not rely on it + // to be always provided. + // The default of 0.0 is a sentinel value indicating `confidence` was not set. + Confidence float32 `protobuf:"fixed32,4,opt,name=confidence,proto3" json:"confidence,omitempty"` + // Output only. A distinct integer value is assigned for every speaker within + // the audio. This field specifies which one of those speakers was detected to + // have spoken this word. Value ranges from 1 up to diarization_speaker_count, + // and is only set if speaker diarization is enabled. + SpeakerTag int32 `protobuf:"varint,5,opt,name=speaker_tag,json=speakerTag,proto3" json:"speaker_tag,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *WordInfo) Reset() { *m = WordInfo{} } +func (m *WordInfo) String() string { return proto.CompactTextString(m) } +func (*WordInfo) ProtoMessage() {} +func (*WordInfo) Descriptor() ([]byte, []int) { + return fileDescriptor_video_intelligence_c8b048fd49d4d016, []int{26} +} +func (m *WordInfo) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_WordInfo.Unmarshal(m, b) +} +func (m *WordInfo) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_WordInfo.Marshal(b, m, deterministic) +} +func (dst *WordInfo) XXX_Merge(src proto.Message) { + xxx_messageInfo_WordInfo.Merge(dst, src) +} +func (m *WordInfo) XXX_Size() int { + return xxx_messageInfo_WordInfo.Size(m) +} +func (m *WordInfo) XXX_DiscardUnknown() { + xxx_messageInfo_WordInfo.DiscardUnknown(m) +} + +var xxx_messageInfo_WordInfo proto.InternalMessageInfo + +func (m *WordInfo) GetStartTime() *duration.Duration { + if m != nil { + return m.StartTime + } + return nil +} + +func (m *WordInfo) GetEndTime() *duration.Duration { + if m != nil { + return m.EndTime + } + return nil +} + +func (m *WordInfo) GetWord() string { + if m != nil { + return m.Word + } + return "" +} + +func (m *WordInfo) GetConfidence() float32 { + if m != nil { + return m.Confidence + } + return 0 +} + +func (m *WordInfo) GetSpeakerTag() int32 { + if m != nil { + return m.SpeakerTag + } + return 0 +} + +// A vertex represents a 2D point in the image. +// NOTE: the normalized vertex coordinates are relative to the original image +// and range from 0 to 1. +type NormalizedVertex struct { + // X coordinate. + X float32 `protobuf:"fixed32,1,opt,name=x,proto3" json:"x,omitempty"` + // Y coordinate. + Y float32 `protobuf:"fixed32,2,opt,name=y,proto3" json:"y,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *NormalizedVertex) Reset() { *m = NormalizedVertex{} } +func (m *NormalizedVertex) String() string { return proto.CompactTextString(m) } +func (*NormalizedVertex) ProtoMessage() {} +func (*NormalizedVertex) Descriptor() ([]byte, []int) { + return fileDescriptor_video_intelligence_c8b048fd49d4d016, []int{27} +} +func (m *NormalizedVertex) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_NormalizedVertex.Unmarshal(m, b) +} +func (m *NormalizedVertex) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_NormalizedVertex.Marshal(b, m, deterministic) +} +func (dst *NormalizedVertex) XXX_Merge(src proto.Message) { + xxx_messageInfo_NormalizedVertex.Merge(dst, src) +} +func (m *NormalizedVertex) XXX_Size() int { + return xxx_messageInfo_NormalizedVertex.Size(m) +} +func (m *NormalizedVertex) XXX_DiscardUnknown() { + xxx_messageInfo_NormalizedVertex.DiscardUnknown(m) +} + +var xxx_messageInfo_NormalizedVertex proto.InternalMessageInfo + +func (m *NormalizedVertex) GetX() float32 { + if m != nil { + return m.X + } + return 0 +} + +func (m *NormalizedVertex) GetY() float32 { + if m != nil { + return m.Y + } + return 0 +} + +// Normalized bounding polygon for text (that might not be aligned with axis). +// Contains list of the corner points in clockwise order starting from +// top-left corner. For example, for a rectangular bounding box: +// When the text is horizontal it might look like: +// 0----1 +// | | +// 3----2 +// +// When it's clockwise rotated 180 degrees around the top-left corner it +// becomes: +// 2----3 +// | | +// 1----0 +// +// and the vertex order will still be (0, 1, 2, 3). Note that values can be less +// than 0, or greater than 1 due to trignometric calculations for location of +// the box. +type NormalizedBoundingPoly struct { + // Normalized vertices of the bounding polygon. + Vertices []*NormalizedVertex `protobuf:"bytes,1,rep,name=vertices,proto3" json:"vertices,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *NormalizedBoundingPoly) Reset() { *m = NormalizedBoundingPoly{} } +func (m *NormalizedBoundingPoly) String() string { return proto.CompactTextString(m) } +func (*NormalizedBoundingPoly) ProtoMessage() {} +func (*NormalizedBoundingPoly) Descriptor() ([]byte, []int) { + return fileDescriptor_video_intelligence_c8b048fd49d4d016, []int{28} +} +func (m *NormalizedBoundingPoly) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_NormalizedBoundingPoly.Unmarshal(m, b) +} +func (m *NormalizedBoundingPoly) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_NormalizedBoundingPoly.Marshal(b, m, deterministic) +} +func (dst *NormalizedBoundingPoly) XXX_Merge(src proto.Message) { + xxx_messageInfo_NormalizedBoundingPoly.Merge(dst, src) +} +func (m *NormalizedBoundingPoly) XXX_Size() int { + return xxx_messageInfo_NormalizedBoundingPoly.Size(m) +} +func (m *NormalizedBoundingPoly) XXX_DiscardUnknown() { + xxx_messageInfo_NormalizedBoundingPoly.DiscardUnknown(m) +} + +var xxx_messageInfo_NormalizedBoundingPoly proto.InternalMessageInfo + +func (m *NormalizedBoundingPoly) GetVertices() []*NormalizedVertex { + if m != nil { + return m.Vertices + } + return nil +} + +// Video segment level annotation results for text detection. +type TextSegment struct { + // Video segment where a text snippet was detected. + Segment *VideoSegment `protobuf:"bytes,1,opt,name=segment,proto3" json:"segment,omitempty"` + // Confidence for the track of detected text. It is calculated as the highest + // over all frames where OCR detected text appears. + Confidence float32 `protobuf:"fixed32,2,opt,name=confidence,proto3" json:"confidence,omitempty"` + // Information related to the frames where OCR detected text appears. + Frames []*TextFrame `protobuf:"bytes,3,rep,name=frames,proto3" json:"frames,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *TextSegment) Reset() { *m = TextSegment{} } +func (m *TextSegment) String() string { return proto.CompactTextString(m) } +func (*TextSegment) ProtoMessage() {} +func (*TextSegment) Descriptor() ([]byte, []int) { + return fileDescriptor_video_intelligence_c8b048fd49d4d016, []int{29} +} +func (m *TextSegment) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_TextSegment.Unmarshal(m, b) +} +func (m *TextSegment) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_TextSegment.Marshal(b, m, deterministic) +} +func (dst *TextSegment) XXX_Merge(src proto.Message) { + xxx_messageInfo_TextSegment.Merge(dst, src) +} +func (m *TextSegment) XXX_Size() int { + return xxx_messageInfo_TextSegment.Size(m) +} +func (m *TextSegment) XXX_DiscardUnknown() { + xxx_messageInfo_TextSegment.DiscardUnknown(m) +} + +var xxx_messageInfo_TextSegment proto.InternalMessageInfo + +func (m *TextSegment) GetSegment() *VideoSegment { + if m != nil { + return m.Segment + } + return nil +} + +func (m *TextSegment) GetConfidence() float32 { + if m != nil { + return m.Confidence + } + return 0 +} + +func (m *TextSegment) GetFrames() []*TextFrame { + if m != nil { + return m.Frames + } + return nil +} + +// Video frame level annotation results for text annotation (OCR). +// Contains information regarding timestamp and bounding box locations for the +// frames containing detected OCR text snippets. +type TextFrame struct { + // Bounding polygon of the detected text for this frame. + RotatedBoundingBox *NormalizedBoundingPoly `protobuf:"bytes,1,opt,name=rotated_bounding_box,json=rotatedBoundingBox,proto3" json:"rotated_bounding_box,omitempty"` + // Timestamp of this frame. + TimeOffset *duration.Duration `protobuf:"bytes,2,opt,name=time_offset,json=timeOffset,proto3" json:"time_offset,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *TextFrame) Reset() { *m = TextFrame{} } +func (m *TextFrame) String() string { return proto.CompactTextString(m) } +func (*TextFrame) ProtoMessage() {} +func (*TextFrame) Descriptor() ([]byte, []int) { + return fileDescriptor_video_intelligence_c8b048fd49d4d016, []int{30} +} +func (m *TextFrame) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_TextFrame.Unmarshal(m, b) +} +func (m *TextFrame) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_TextFrame.Marshal(b, m, deterministic) +} +func (dst *TextFrame) XXX_Merge(src proto.Message) { + xxx_messageInfo_TextFrame.Merge(dst, src) +} +func (m *TextFrame) XXX_Size() int { + return xxx_messageInfo_TextFrame.Size(m) +} +func (m *TextFrame) XXX_DiscardUnknown() { + xxx_messageInfo_TextFrame.DiscardUnknown(m) +} + +var xxx_messageInfo_TextFrame proto.InternalMessageInfo + +func (m *TextFrame) GetRotatedBoundingBox() *NormalizedBoundingPoly { + if m != nil { + return m.RotatedBoundingBox + } + return nil +} + +func (m *TextFrame) GetTimeOffset() *duration.Duration { + if m != nil { + return m.TimeOffset + } + return nil +} + +// Annotations related to one detected OCR text snippet. This will contain the +// corresponding text, confidence value, and frame level information for each +// detection. +type TextAnnotation struct { + // The detected text. + Text string `protobuf:"bytes,1,opt,name=text,proto3" json:"text,omitempty"` + // All video segments where OCR detected text appears. + Segments []*TextSegment `protobuf:"bytes,2,rep,name=segments,proto3" json:"segments,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *TextAnnotation) Reset() { *m = TextAnnotation{} } +func (m *TextAnnotation) String() string { return proto.CompactTextString(m) } +func (*TextAnnotation) ProtoMessage() {} +func (*TextAnnotation) Descriptor() ([]byte, []int) { + return fileDescriptor_video_intelligence_c8b048fd49d4d016, []int{31} +} +func (m *TextAnnotation) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_TextAnnotation.Unmarshal(m, b) +} +func (m *TextAnnotation) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_TextAnnotation.Marshal(b, m, deterministic) +} +func (dst *TextAnnotation) XXX_Merge(src proto.Message) { + xxx_messageInfo_TextAnnotation.Merge(dst, src) +} +func (m *TextAnnotation) XXX_Size() int { + return xxx_messageInfo_TextAnnotation.Size(m) +} +func (m *TextAnnotation) XXX_DiscardUnknown() { + xxx_messageInfo_TextAnnotation.DiscardUnknown(m) +} + +var xxx_messageInfo_TextAnnotation proto.InternalMessageInfo + +func (m *TextAnnotation) GetText() string { + if m != nil { + return m.Text + } + return "" +} + +func (m *TextAnnotation) GetSegments() []*TextSegment { + if m != nil { + return m.Segments + } + return nil +} + +// Video frame level annotations for object detection and tracking. This field +// stores per frame location, time offset, and confidence. +type ObjectTrackingFrame struct { + // The normalized bounding box location of this object track for the frame. + NormalizedBoundingBox *NormalizedBoundingBox `protobuf:"bytes,1,opt,name=normalized_bounding_box,json=normalizedBoundingBox,proto3" json:"normalized_bounding_box,omitempty"` + // The timestamp of the frame in microseconds. + TimeOffset *duration.Duration `protobuf:"bytes,2,opt,name=time_offset,json=timeOffset,proto3" json:"time_offset,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ObjectTrackingFrame) Reset() { *m = ObjectTrackingFrame{} } +func (m *ObjectTrackingFrame) String() string { return proto.CompactTextString(m) } +func (*ObjectTrackingFrame) ProtoMessage() {} +func (*ObjectTrackingFrame) Descriptor() ([]byte, []int) { + return fileDescriptor_video_intelligence_c8b048fd49d4d016, []int{32} +} +func (m *ObjectTrackingFrame) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ObjectTrackingFrame.Unmarshal(m, b) +} +func (m *ObjectTrackingFrame) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ObjectTrackingFrame.Marshal(b, m, deterministic) +} +func (dst *ObjectTrackingFrame) XXX_Merge(src proto.Message) { + xxx_messageInfo_ObjectTrackingFrame.Merge(dst, src) +} +func (m *ObjectTrackingFrame) XXX_Size() int { + return xxx_messageInfo_ObjectTrackingFrame.Size(m) +} +func (m *ObjectTrackingFrame) XXX_DiscardUnknown() { + xxx_messageInfo_ObjectTrackingFrame.DiscardUnknown(m) +} + +var xxx_messageInfo_ObjectTrackingFrame proto.InternalMessageInfo + +func (m *ObjectTrackingFrame) GetNormalizedBoundingBox() *NormalizedBoundingBox { + if m != nil { + return m.NormalizedBoundingBox + } + return nil +} + +func (m *ObjectTrackingFrame) GetTimeOffset() *duration.Duration { + if m != nil { + return m.TimeOffset + } + return nil +} + +// Annotations corresponding to one tracked object. +type ObjectTrackingAnnotation struct { + // Different representation of tracking info in non-streaming batch + // and streaming modes. + // + // Types that are valid to be assigned to TrackInfo: + // *ObjectTrackingAnnotation_Segment + // *ObjectTrackingAnnotation_TrackId + TrackInfo isObjectTrackingAnnotation_TrackInfo `protobuf_oneof:"track_info"` + // Entity to specify the object category that this track is labeled as. + Entity *Entity `protobuf:"bytes,1,opt,name=entity,proto3" json:"entity,omitempty"` + // Object category's labeling confidence of this track. + Confidence float32 `protobuf:"fixed32,4,opt,name=confidence,proto3" json:"confidence,omitempty"` + // Information corresponding to all frames where this object track appears. + // Non-streaming batch mode: it may be one or multiple ObjectTrackingFrame + // messages in frames. + // Streaming mode: it can only be one ObjectTrackingFrame message in frames. + Frames []*ObjectTrackingFrame `protobuf:"bytes,2,rep,name=frames,proto3" json:"frames,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ObjectTrackingAnnotation) Reset() { *m = ObjectTrackingAnnotation{} } +func (m *ObjectTrackingAnnotation) String() string { return proto.CompactTextString(m) } +func (*ObjectTrackingAnnotation) ProtoMessage() {} +func (*ObjectTrackingAnnotation) Descriptor() ([]byte, []int) { + return fileDescriptor_video_intelligence_c8b048fd49d4d016, []int{33} +} +func (m *ObjectTrackingAnnotation) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ObjectTrackingAnnotation.Unmarshal(m, b) +} +func (m *ObjectTrackingAnnotation) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ObjectTrackingAnnotation.Marshal(b, m, deterministic) +} +func (dst *ObjectTrackingAnnotation) XXX_Merge(src proto.Message) { + xxx_messageInfo_ObjectTrackingAnnotation.Merge(dst, src) +} +func (m *ObjectTrackingAnnotation) XXX_Size() int { + return xxx_messageInfo_ObjectTrackingAnnotation.Size(m) +} +func (m *ObjectTrackingAnnotation) XXX_DiscardUnknown() { + xxx_messageInfo_ObjectTrackingAnnotation.DiscardUnknown(m) +} + +var xxx_messageInfo_ObjectTrackingAnnotation proto.InternalMessageInfo + +type isObjectTrackingAnnotation_TrackInfo interface { + isObjectTrackingAnnotation_TrackInfo() +} + +type ObjectTrackingAnnotation_Segment struct { + Segment *VideoSegment `protobuf:"bytes,3,opt,name=segment,proto3,oneof"` +} + +type ObjectTrackingAnnotation_TrackId struct { + TrackId int64 `protobuf:"varint,5,opt,name=track_id,json=trackId,proto3,oneof"` +} + +func (*ObjectTrackingAnnotation_Segment) isObjectTrackingAnnotation_TrackInfo() {} + +func (*ObjectTrackingAnnotation_TrackId) isObjectTrackingAnnotation_TrackInfo() {} + +func (m *ObjectTrackingAnnotation) GetTrackInfo() isObjectTrackingAnnotation_TrackInfo { + if m != nil { + return m.TrackInfo + } + return nil +} + +func (m *ObjectTrackingAnnotation) GetSegment() *VideoSegment { + if x, ok := m.GetTrackInfo().(*ObjectTrackingAnnotation_Segment); ok { + return x.Segment + } + return nil +} + +func (m *ObjectTrackingAnnotation) GetTrackId() int64 { + if x, ok := m.GetTrackInfo().(*ObjectTrackingAnnotation_TrackId); ok { + return x.TrackId + } + return 0 +} + +func (m *ObjectTrackingAnnotation) GetEntity() *Entity { + if m != nil { + return m.Entity + } + return nil +} + +func (m *ObjectTrackingAnnotation) GetConfidence() float32 { + if m != nil { + return m.Confidence + } + return 0 +} + +func (m *ObjectTrackingAnnotation) GetFrames() []*ObjectTrackingFrame { + if m != nil { + return m.Frames + } + return nil +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*ObjectTrackingAnnotation) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _ObjectTrackingAnnotation_OneofMarshaler, _ObjectTrackingAnnotation_OneofUnmarshaler, _ObjectTrackingAnnotation_OneofSizer, []interface{}{ + (*ObjectTrackingAnnotation_Segment)(nil), + (*ObjectTrackingAnnotation_TrackId)(nil), + } +} + +func _ObjectTrackingAnnotation_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*ObjectTrackingAnnotation) + // track_info + switch x := m.TrackInfo.(type) { + case *ObjectTrackingAnnotation_Segment: + b.EncodeVarint(3<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.Segment); err != nil { + return err + } + case *ObjectTrackingAnnotation_TrackId: + b.EncodeVarint(5<<3 | proto.WireVarint) + b.EncodeVarint(uint64(x.TrackId)) + case nil: + default: + return fmt.Errorf("ObjectTrackingAnnotation.TrackInfo has unexpected type %T", x) + } + return nil +} + +func _ObjectTrackingAnnotation_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*ObjectTrackingAnnotation) + switch tag { + case 3: // track_info.segment + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(VideoSegment) + err := b.DecodeMessage(msg) + m.TrackInfo = &ObjectTrackingAnnotation_Segment{msg} + return true, err + case 5: // track_info.track_id + if wire != proto.WireVarint { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeVarint() + m.TrackInfo = &ObjectTrackingAnnotation_TrackId{int64(x)} + return true, err + default: + return false, nil + } +} + +func _ObjectTrackingAnnotation_OneofSizer(msg proto.Message) (n int) { + m := msg.(*ObjectTrackingAnnotation) + // track_info + switch x := m.TrackInfo.(type) { + case *ObjectTrackingAnnotation_Segment: + s := proto.Size(x.Segment) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *ObjectTrackingAnnotation_TrackId: + n += 1 // tag and wire + n += proto.SizeVarint(uint64(x.TrackId)) + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +func init() { + proto.RegisterType((*AnnotateVideoRequest)(nil), "google.cloud.videointelligence.v1.AnnotateVideoRequest") + proto.RegisterType((*VideoContext)(nil), "google.cloud.videointelligence.v1.VideoContext") + proto.RegisterType((*LabelDetectionConfig)(nil), "google.cloud.videointelligence.v1.LabelDetectionConfig") + proto.RegisterType((*ShotChangeDetectionConfig)(nil), "google.cloud.videointelligence.v1.ShotChangeDetectionConfig") + proto.RegisterType((*ExplicitContentDetectionConfig)(nil), "google.cloud.videointelligence.v1.ExplicitContentDetectionConfig") + proto.RegisterType((*FaceDetectionConfig)(nil), "google.cloud.videointelligence.v1.FaceDetectionConfig") + proto.RegisterType((*TextDetectionConfig)(nil), "google.cloud.videointelligence.v1.TextDetectionConfig") + proto.RegisterType((*VideoSegment)(nil), "google.cloud.videointelligence.v1.VideoSegment") + proto.RegisterType((*LabelSegment)(nil), "google.cloud.videointelligence.v1.LabelSegment") + proto.RegisterType((*LabelFrame)(nil), "google.cloud.videointelligence.v1.LabelFrame") + proto.RegisterType((*Entity)(nil), "google.cloud.videointelligence.v1.Entity") + proto.RegisterType((*LabelAnnotation)(nil), "google.cloud.videointelligence.v1.LabelAnnotation") + proto.RegisterType((*ExplicitContentFrame)(nil), "google.cloud.videointelligence.v1.ExplicitContentFrame") + proto.RegisterType((*ExplicitContentAnnotation)(nil), "google.cloud.videointelligence.v1.ExplicitContentAnnotation") + proto.RegisterType((*NormalizedBoundingBox)(nil), "google.cloud.videointelligence.v1.NormalizedBoundingBox") + proto.RegisterType((*FaceSegment)(nil), "google.cloud.videointelligence.v1.FaceSegment") + proto.RegisterType((*FaceFrame)(nil), "google.cloud.videointelligence.v1.FaceFrame") + proto.RegisterType((*FaceAnnotation)(nil), "google.cloud.videointelligence.v1.FaceAnnotation") + proto.RegisterType((*VideoAnnotationResults)(nil), "google.cloud.videointelligence.v1.VideoAnnotationResults") + proto.RegisterType((*AnnotateVideoResponse)(nil), "google.cloud.videointelligence.v1.AnnotateVideoResponse") + proto.RegisterType((*VideoAnnotationProgress)(nil), "google.cloud.videointelligence.v1.VideoAnnotationProgress") + proto.RegisterType((*AnnotateVideoProgress)(nil), "google.cloud.videointelligence.v1.AnnotateVideoProgress") + proto.RegisterType((*SpeechTranscriptionConfig)(nil), "google.cloud.videointelligence.v1.SpeechTranscriptionConfig") + proto.RegisterType((*SpeechContext)(nil), "google.cloud.videointelligence.v1.SpeechContext") + proto.RegisterType((*SpeechTranscription)(nil), "google.cloud.videointelligence.v1.SpeechTranscription") + proto.RegisterType((*SpeechRecognitionAlternative)(nil), "google.cloud.videointelligence.v1.SpeechRecognitionAlternative") + proto.RegisterType((*WordInfo)(nil), "google.cloud.videointelligence.v1.WordInfo") + proto.RegisterType((*NormalizedVertex)(nil), "google.cloud.videointelligence.v1.NormalizedVertex") + proto.RegisterType((*NormalizedBoundingPoly)(nil), "google.cloud.videointelligence.v1.NormalizedBoundingPoly") + proto.RegisterType((*TextSegment)(nil), "google.cloud.videointelligence.v1.TextSegment") + proto.RegisterType((*TextFrame)(nil), "google.cloud.videointelligence.v1.TextFrame") + proto.RegisterType((*TextAnnotation)(nil), "google.cloud.videointelligence.v1.TextAnnotation") + proto.RegisterType((*ObjectTrackingFrame)(nil), "google.cloud.videointelligence.v1.ObjectTrackingFrame") + proto.RegisterType((*ObjectTrackingAnnotation)(nil), "google.cloud.videointelligence.v1.ObjectTrackingAnnotation") + proto.RegisterEnum("google.cloud.videointelligence.v1.Feature", Feature_name, Feature_value) + proto.RegisterEnum("google.cloud.videointelligence.v1.LabelDetectionMode", LabelDetectionMode_name, LabelDetectionMode_value) + proto.RegisterEnum("google.cloud.videointelligence.v1.Likelihood", Likelihood_name, Likelihood_value) +} + +// Reference imports to suppress errors if they are not otherwise used. +var _ context.Context +var _ grpc.ClientConn + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the grpc package it is being compiled against. +const _ = grpc.SupportPackageIsVersion4 + +// VideoIntelligenceServiceClient is the client API for VideoIntelligenceService service. +// +// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://godoc.org/google.golang.org/grpc#ClientConn.NewStream. +type VideoIntelligenceServiceClient interface { + // Performs asynchronous video annotation. Progress and results can be + // retrieved through the `google.longrunning.Operations` interface. + // `Operation.metadata` contains `AnnotateVideoProgress` (progress). + // `Operation.response` contains `AnnotateVideoResponse` (results). + AnnotateVideo(ctx context.Context, in *AnnotateVideoRequest, opts ...grpc.CallOption) (*longrunning.Operation, error) +} + +type videoIntelligenceServiceClient struct { + cc *grpc.ClientConn +} + +func NewVideoIntelligenceServiceClient(cc *grpc.ClientConn) VideoIntelligenceServiceClient { + return &videoIntelligenceServiceClient{cc} +} + +func (c *videoIntelligenceServiceClient) AnnotateVideo(ctx context.Context, in *AnnotateVideoRequest, opts ...grpc.CallOption) (*longrunning.Operation, error) { + out := new(longrunning.Operation) + err := c.cc.Invoke(ctx, "/google.cloud.videointelligence.v1.VideoIntelligenceService/AnnotateVideo", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +// VideoIntelligenceServiceServer is the server API for VideoIntelligenceService service. +type VideoIntelligenceServiceServer interface { + // Performs asynchronous video annotation. Progress and results can be + // retrieved through the `google.longrunning.Operations` interface. + // `Operation.metadata` contains `AnnotateVideoProgress` (progress). + // `Operation.response` contains `AnnotateVideoResponse` (results). + AnnotateVideo(context.Context, *AnnotateVideoRequest) (*longrunning.Operation, error) +} + +func RegisterVideoIntelligenceServiceServer(s *grpc.Server, srv VideoIntelligenceServiceServer) { + s.RegisterService(&_VideoIntelligenceService_serviceDesc, srv) +} + +func _VideoIntelligenceService_AnnotateVideo_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(AnnotateVideoRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(VideoIntelligenceServiceServer).AnnotateVideo(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.videointelligence.v1.VideoIntelligenceService/AnnotateVideo", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(VideoIntelligenceServiceServer).AnnotateVideo(ctx, req.(*AnnotateVideoRequest)) + } + return interceptor(ctx, in, info, handler) +} + +var _VideoIntelligenceService_serviceDesc = grpc.ServiceDesc{ + ServiceName: "google.cloud.videointelligence.v1.VideoIntelligenceService", + HandlerType: (*VideoIntelligenceServiceServer)(nil), + Methods: []grpc.MethodDesc{ + { + MethodName: "AnnotateVideo", + Handler: _VideoIntelligenceService_AnnotateVideo_Handler, + }, + }, + Streams: []grpc.StreamDesc{}, + Metadata: "google/cloud/videointelligence/v1/video_intelligence.proto", +} + +func init() { + proto.RegisterFile("google/cloud/videointelligence/v1/video_intelligence.proto", fileDescriptor_video_intelligence_c8b048fd49d4d016) +} + +var fileDescriptor_video_intelligence_c8b048fd49d4d016 = []byte{ + // 2462 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xbc, 0x59, 0xcb, 0x6f, 0x1b, 0xc9, + 0xd1, 0xdf, 0x21, 0x45, 0x89, 0x2c, 0x52, 0x12, 0xdd, 0x7a, 0x51, 0xb2, 0x2d, 0xcb, 0xb3, 0xdf, + 0x02, 0xb6, 0xf7, 0x0b, 0x15, 0x69, 0x37, 0xde, 0xb5, 0x6c, 0x20, 0xa1, 0x28, 0xca, 0xe2, 0x5a, + 0x16, 0x89, 0x16, 0xad, 0xd8, 0x0b, 0x03, 0x83, 0xd1, 0x4c, 0x93, 0x1c, 0x6b, 0x38, 0xcd, 0xcc, + 0x0c, 0x15, 0x69, 0x91, 0x1c, 0x12, 0xe4, 0x71, 0xc9, 0x2d, 0x97, 0xdc, 0x82, 0x00, 0x39, 0xe5, + 0x90, 0x53, 0x10, 0xe4, 0x92, 0x60, 0x91, 0x20, 0x87, 0xe4, 0x92, 0x43, 0x4e, 0xb9, 0xef, 0x1f, + 0x12, 0xf4, 0x63, 0xc8, 0x21, 0x87, 0xb2, 0x46, 0xf2, 0x22, 0x37, 0x76, 0x75, 0xd5, 0xaf, 0x1e, + 0x5d, 0x5d, 0x5d, 0x35, 0x84, 0xad, 0x16, 0xa5, 0x2d, 0x9b, 0xac, 0x1b, 0x36, 0xed, 0x99, 0xeb, + 0xa7, 0x96, 0x49, 0xa8, 0xe5, 0xf8, 0xc4, 0xb6, 0xad, 0x16, 0x71, 0x0c, 0xb2, 0x7e, 0xba, 0x21, + 0x88, 0x5a, 0x98, 0x5a, 0xec, 0xba, 0xd4, 0xa7, 0xe8, 0xae, 0x90, 0x2d, 0x72, 0xd9, 0x62, 0x44, + 0xb6, 0x78, 0xba, 0xb1, 0x72, 0x4b, 0xc2, 0xeb, 0x5d, 0x6b, 0x5d, 0x77, 0x1c, 0xea, 0xeb, 0xbe, + 0x45, 0x1d, 0x4f, 0x00, 0xac, 0xbc, 0x2f, 0x77, 0x6d, 0xea, 0xb4, 0xdc, 0x9e, 0xe3, 0x58, 0x4e, + 0x6b, 0x9d, 0x76, 0x89, 0x3b, 0xc4, 0xb4, 0x2a, 0x99, 0xf8, 0xea, 0xb8, 0xd7, 0x5c, 0x37, 0x7b, + 0x82, 0x41, 0xee, 0xdf, 0x19, 0xdd, 0xf7, 0xad, 0x0e, 0xf1, 0x7c, 0xbd, 0xd3, 0x95, 0x0c, 0x4b, + 0x92, 0xc1, 0xed, 0x1a, 0xeb, 0x9e, 0xaf, 0xfb, 0x3d, 0x89, 0xac, 0xfe, 0x31, 0x01, 0xf3, 0x25, + 0x61, 0x14, 0x39, 0x62, 0xd6, 0x63, 0xf2, 0xbd, 0x1e, 0xf1, 0x7c, 0x74, 0x13, 0x32, 0x96, 0xd3, + 0xed, 0xf9, 0x5a, 0xcf, 0xb5, 0x0a, 0xca, 0x9a, 0x72, 0x2f, 0x83, 0xd3, 0x9c, 0xf0, 0xc2, 0xb5, + 0xd0, 0xfb, 0x30, 0x2d, 0x36, 0x0d, 0xea, 0xf8, 0xc4, 0xf1, 0x0b, 0x93, 0x6b, 0xca, 0xbd, 0x1c, + 0xce, 0x71, 0x62, 0x59, 0xd0, 0xd0, 0x2e, 0xa4, 0x9b, 0x44, 0xf7, 0x7b, 0x2e, 0xf1, 0x0a, 0x89, + 0xb5, 0xe4, 0xbd, 0x99, 0xcd, 0x07, 0xc5, 0x4b, 0xa3, 0x55, 0xdc, 0x15, 0x22, 0xb8, 0x2f, 0x8b, + 0x1a, 0x30, 0x2d, 0xc2, 0xcf, 0x95, 0x9d, 0xf9, 0x85, 0xe4, 0x9a, 0x72, 0x2f, 0xbb, 0xb9, 0x1e, + 0x03, 0x8c, 0x7b, 0x54, 0x16, 0x62, 0x38, 0x77, 0x1a, 0x5a, 0xa1, 0xdb, 0x00, 0xb4, 0xe7, 0x07, + 0x0e, 0x4e, 0x70, 0x07, 0x33, 0x82, 0xc2, 0x3c, 0xbc, 0x03, 0x59, 0x9b, 0x1a, 0x3c, 0xc6, 0x9a, + 0x65, 0x16, 0x52, 0x7c, 0x1f, 0x02, 0x52, 0xd5, 0x54, 0x7f, 0x3f, 0x09, 0xb9, 0x30, 0x3c, 0x7a, + 0x06, 0x69, 0x8f, 0xb4, 0x3a, 0xc4, 0xf1, 0xbd, 0x82, 0xb2, 0x96, 0xbc, 0x8a, 0x85, 0x87, 0x42, + 0x0e, 0xf7, 0x01, 0x50, 0x07, 0x16, 0x6d, 0xfd, 0x98, 0xd8, 0x9a, 0x49, 0x7c, 0x62, 0x70, 0x2b, + 0x0c, 0xea, 0x34, 0xad, 0x56, 0x21, 0xc1, 0x9d, 0xff, 0x24, 0x06, 0xf4, 0x3e, 0x03, 0xd8, 0x09, + 0xe4, 0xcb, 0x5c, 0x1c, 0xcf, 0xdb, 0x63, 0xa8, 0xe8, 0x87, 0x70, 0xcb, 0x6b, 0x53, 0x5f, 0x33, + 0xda, 0xba, 0xd3, 0x22, 0x51, 0xa5, 0x22, 0xe2, 0x4f, 0x62, 0x28, 0x3d, 0x6c, 0x53, 0xbf, 0xcc, + 0x51, 0x46, 0x35, 0x2f, 0x7b, 0x17, 0x6d, 0xa1, 0x5f, 0x28, 0x70, 0x97, 0x9c, 0x75, 0x6d, 0xcb, + 0xb0, 0xfa, 0x29, 0x15, 0x35, 0x62, 0x82, 0x1b, 0x51, 0x8a, 0x61, 0x44, 0x45, 0x62, 0xc9, 0x4c, + 0x1c, 0xb5, 0x64, 0x95, 0xbc, 0x75, 0x1f, 0xbd, 0x81, 0x85, 0xa6, 0x6e, 0x8c, 0x09, 0x43, 0x8a, + 0x5b, 0xf0, 0x30, 0x4e, 0x16, 0xeb, 0x46, 0x24, 0x00, 0x73, 0xcd, 0x28, 0x11, 0xfd, 0x00, 0x6e, + 0x7a, 0x5d, 0x42, 0x8c, 0xb6, 0xe6, 0xbb, 0xba, 0xe3, 0x19, 0xae, 0xd5, 0x0d, 0x6b, 0x9c, 0x8c, + 0x1f, 0x78, 0x8e, 0xd2, 0x08, 0x83, 0xf4, 0x03, 0x7f, 0xd1, 0x16, 0xf3, 0x94, 0xe5, 0x6e, 0xd4, + 0xd3, 0x74, 0x6c, 0x4f, 0x1b, 0xe4, 0x2c, 0x12, 0xe0, 0x39, 0x3f, 0x4a, 0x54, 0xff, 0xaa, 0xc0, + 0xfc, 0xb8, 0x94, 0x44, 0x2d, 0x98, 0x1f, 0xcd, 0xf5, 0x0e, 0x35, 0x09, 0x2f, 0x3a, 0x33, 0x9b, + 0xdf, 0xba, 0x72, 0xa6, 0x3f, 0xa7, 0x26, 0xc1, 0xc8, 0x8e, 0xd0, 0xd0, 0x87, 0x70, 0xc3, 0x13, + 0xc5, 0x57, 0x77, 0xcf, 0x35, 0x43, 0xef, 0x10, 0x57, 0xe7, 0xf7, 0x29, 0x8d, 0xf3, 0x83, 0x8d, + 0x32, 0xa7, 0xa3, 0x79, 0x48, 0x31, 0x2b, 0x6c, 0x9e, 0xfb, 0x19, 0x2c, 0x16, 0xea, 0x06, 0x2c, + 0x5f, 0x98, 0xe1, 0x03, 0x11, 0x25, 0x2c, 0xf2, 0x10, 0x56, 0xdf, 0x9e, 0x8f, 0x17, 0xc8, 0xe9, + 0x30, 0x37, 0x26, 0x8b, 0xc6, 0x33, 0xa3, 0x8f, 0x61, 0xd1, 0x72, 0x0c, 0xbb, 0x67, 0x12, 0xed, + 0x98, 0xf6, 0x1c, 0xd3, 0x72, 0x5a, 0xda, 0x31, 0x3d, 0xe3, 0x95, 0x97, 0xf9, 0x37, 0x2f, 0x77, + 0xb7, 0xe5, 0xe6, 0x36, 0xdb, 0x53, 0x9f, 0xc0, 0xdc, 0x98, 0xe3, 0x43, 0x1f, 0xc0, 0x8c, 0xad, + 0x3b, 0xad, 0x9e, 0xde, 0x22, 0x5a, 0xdb, 0x0a, 0xea, 0x59, 0x06, 0x4f, 0x07, 0xd4, 0x3d, 0x46, + 0x54, 0x7f, 0xa5, 0xc8, 0x0a, 0x28, 0xcb, 0x17, 0xaa, 0xf0, 0xf8, 0xba, 0xbe, 0xc6, 0x5e, 0x1f, + 0x8d, 0x36, 0x9b, 0x1e, 0xf1, 0xb9, 0x99, 0xd9, 0xcd, 0xe5, 0xe0, 0x14, 0x83, 0x17, 0xaa, 0xb8, + 0x23, 0x5f, 0x30, 0x3c, 0xcb, 0x65, 0x1a, 0x56, 0x87, 0xd4, 0xb8, 0x04, 0x2a, 0xc1, 0x2c, 0x71, + 0xcc, 0x21, 0x90, 0xc4, 0x65, 0x20, 0xd3, 0xc4, 0x31, 0x07, 0x10, 0xea, 0x39, 0xe4, 0x78, 0x4e, + 0x04, 0x96, 0x55, 0x61, 0x4a, 0x96, 0x56, 0x69, 0xcf, 0x95, 0x4b, 0x73, 0x20, 0x8f, 0x56, 0x01, + 0xf8, 0x1d, 0x31, 0x19, 0x1b, 0x37, 0x2c, 0x81, 0x43, 0x14, 0xb5, 0x0d, 0xc0, 0x55, 0xef, 0xba, + 0x7a, 0x87, 0xa0, 0x2d, 0xc8, 0x5e, 0x29, 0x18, 0xe0, 0x0f, 0xe2, 0x70, 0x99, 0x26, 0x1b, 0x26, + 0x2b, 0x8e, 0x6f, 0xf9, 0xe7, 0xec, 0xad, 0x26, 0xfc, 0x17, 0x7b, 0xaa, 0xe4, 0x5b, 0x2d, 0x08, + 0x55, 0x13, 0xad, 0x41, 0xd6, 0x24, 0xfd, 0x8b, 0xcf, 0x71, 0x32, 0x38, 0x4c, 0x62, 0xaf, 0x79, + 0xff, 0xbc, 0x0d, 0x76, 0xf3, 0x44, 0xca, 0xe7, 0x02, 0x62, 0x99, 0x9a, 0x44, 0xfd, 0x32, 0x01, + 0xb3, 0xdc, 0xb1, 0x52, 0xbf, 0x85, 0x41, 0x25, 0x98, 0x14, 0x6a, 0xa4, 0x63, 0xf7, 0xe3, 0xd4, + 0x66, 0x2e, 0x80, 0xa5, 0x20, 0x3a, 0x82, 0x1b, 0x86, 0xee, 0x93, 0x16, 0x75, 0xcf, 0x35, 0x4e, + 0xb2, 0x64, 0xb7, 0x70, 0x25, 0xb4, 0x7c, 0x80, 0x51, 0x91, 0x10, 0x43, 0xaf, 0x71, 0x32, 0xf6, + 0x6b, 0x1c, 0x4e, 0x9a, 0xd0, 0x6b, 0x5c, 0x81, 0xc9, 0x26, 0x3b, 0x4e, 0xaf, 0x30, 0xc1, 0xa1, + 0xbe, 0x11, 0x17, 0x8a, 0x27, 0x01, 0x96, 0xc2, 0xea, 0x9f, 0x14, 0x98, 0x1f, 0x29, 0x05, 0xef, + 0x9e, 0x25, 0x26, 0x2c, 0x76, 0xa9, 0xeb, 0xd0, 0x96, 0xab, 0x77, 0xdb, 0xe7, 0x9a, 0x6d, 0x9d, + 0x10, 0xdb, 0x6a, 0x53, 0x6a, 0xf2, 0x93, 0x9e, 0x89, 0x67, 0x6b, 0x5f, 0x08, 0x2f, 0x84, 0xc0, + 0x06, 0x64, 0xd5, 0x86, 0xe5, 0x11, 0xcb, 0x43, 0x69, 0x50, 0xeb, 0x87, 0x47, 0xf4, 0x3d, 0x9f, + 0x5c, 0xfd, 0x89, 0x1e, 0x0e, 0xd4, 0x09, 0x2c, 0x1c, 0x50, 0xb7, 0xa3, 0xdb, 0xd6, 0x17, 0xc4, + 0x0c, 0x95, 0x2c, 0x84, 0x60, 0xc2, 0x26, 0x4d, 0x11, 0xa1, 0x04, 0xe6, 0xbf, 0x51, 0x1e, 0x92, + 0x3e, 0xed, 0xca, 0xfb, 0xc1, 0x7e, 0xb2, 0x12, 0xe9, 0x5a, 0xad, 0xb6, 0x68, 0x14, 0x13, 0x58, + 0x2c, 0xd0, 0x22, 0x4c, 0x1e, 0x53, 0xdf, 0xa7, 0x1d, 0xde, 0x48, 0x24, 0xb0, 0x5c, 0xa9, 0x2f, + 0x21, 0xcb, 0xea, 0xec, 0xd7, 0x5f, 0x2a, 0xd4, 0x3f, 0x2b, 0x90, 0x61, 0xd0, 0xe2, 0x90, 0x7d, + 0x58, 0x76, 0xfa, 0x4e, 0x8d, 0x56, 0x69, 0x11, 0xb8, 0x4f, 0x63, 0xa8, 0x1a, 0x1b, 0x18, 0xbc, + 0xe4, 0x8c, 0x23, 0x13, 0x6f, 0x34, 0xb5, 0x12, 0x57, 0x48, 0x2d, 0xf5, 0x4b, 0x05, 0x66, 0x98, + 0xfd, 0xa1, 0xa3, 0xbe, 0x05, 0x19, 0xbf, 0xdd, 0xeb, 0x1c, 0x3b, 0xba, 0x25, 0x5e, 0xa0, 0x1c, + 0x1e, 0x10, 0xd0, 0x67, 0xa1, 0x4b, 0x27, 0xee, 0x70, 0x31, 0x66, 0xaf, 0x14, 0xbd, 0x73, 0x3b, + 0xfd, 0xa4, 0x12, 0xd7, 0xf7, 0xff, 0x63, 0x22, 0x0d, 0x67, 0xd2, 0x4f, 0xd3, 0xb0, 0xc8, 0x0f, + 0x67, 0xe0, 0x03, 0x26, 0x5e, 0xcf, 0xf6, 0xbd, 0xb7, 0x0f, 0x38, 0x0e, 0x2c, 0x4b, 0x4b, 0x34, + 0xd1, 0x9b, 0x84, 0x06, 0x37, 0xe9, 0xda, 0x66, 0xdc, 0x22, 0x10, 0x52, 0xbd, 0x24, 0x41, 0x47, + 0xe8, 0x1e, 0x6a, 0xc3, 0x22, 0x6f, 0xc0, 0xa3, 0xca, 0x92, 0xd7, 0x56, 0x36, 0xcf, 0x10, 0x23, + 0x9a, 0xde, 0xc0, 0x12, 0x8f, 0xcd, 0x18, 0x55, 0x13, 0xd7, 0x56, 0xb5, 0xc0, 0x21, 0x23, 0xba, + 0x5e, 0x43, 0x9e, 0x37, 0xd2, 0x61, 0x25, 0x29, 0xae, 0x64, 0x23, 0xe6, 0x69, 0x86, 0x74, 0xcc, + 0x36, 0x87, 0xd6, 0x1e, 0xfa, 0x1c, 0xf2, 0x3c, 0x66, 0x61, 0xf4, 0xc9, 0xeb, 0x0d, 0x5e, 0xb3, + 0x0c, 0x28, 0x8c, 0xdd, 0x81, 0xb9, 0xfe, 0x40, 0x32, 0xc0, 0x2f, 0x4c, 0xc5, 0x6e, 0xc7, 0x2f, + 0xac, 0x96, 0x18, 0x05, 0xc0, 0xa1, 0x6b, 0x75, 0x02, 0x0b, 0xe3, 0xa6, 0x00, 0xaf, 0x90, 0xe5, + 0xfe, 0x3c, 0xbc, 0x5e, 0xff, 0x8f, 0xe7, 0xc7, 0x74, 0xfe, 0xfc, 0x54, 0x78, 0xd3, 0x1f, 0x8e, + 0x5b, 0x2e, 0xf6, 0xa9, 0xb0, 0x86, 0x31, 0x7c, 0x2a, 0xfe, 0xd0, 0x9a, 0xe5, 0x17, 0xa2, 0xc7, + 0x6f, 0x88, 0x31, 0x8c, 0x3f, 0xc3, 0xf1, 0x1f, 0xc7, 0xc0, 0xaf, 0x71, 0xe1, 0x86, 0xab, 0x1b, + 0x27, 0x96, 0xd3, 0x0a, 0x69, 0xba, 0x21, 0x60, 0xc3, 0xba, 0xee, 0x41, 0x8a, 0xb8, 0x2e, 0x75, + 0x0b, 0x19, 0x7e, 0x2e, 0x28, 0x80, 0x77, 0xbb, 0x46, 0xf1, 0x90, 0x7f, 0xe5, 0xc0, 0x82, 0x41, + 0xfd, 0x91, 0x02, 0x0b, 0x23, 0x9f, 0x39, 0xbc, 0x2e, 0x75, 0x3c, 0x82, 0xda, 0x80, 0x06, 0x86, + 0x6a, 0xae, 0x28, 0x0e, 0xb2, 0x1e, 0x3f, 0x8a, 0x9b, 0x47, 0x91, 0xea, 0x82, 0x6f, 0xe8, 0xa3, + 0x24, 0xf5, 0x3f, 0x0a, 0x2c, 0x8d, 0x70, 0xd7, 0x5d, 0xda, 0x72, 0x89, 0x77, 0x49, 0x31, 0xba, + 0x0f, 0xf9, 0xae, 0x64, 0xd4, 0xba, 0xc4, 0x35, 0xd8, 0xdb, 0xc4, 0x0a, 0x79, 0x0a, 0xcf, 0x06, + 0xf4, 0xba, 0x20, 0xa3, 0x47, 0x00, 0x83, 0x16, 0x5c, 0x8e, 0xed, 0x2b, 0x91, 0x6a, 0xdf, 0x08, + 0xbe, 0x0e, 0xe1, 0x4c, 0xbf, 0xf9, 0x46, 0x8f, 0x21, 0xdb, 0xeb, 0x9a, 0xba, 0x4f, 0x84, 0xec, + 0xc4, 0xa5, 0xb2, 0x20, 0xd8, 0x19, 0x41, 0xfd, 0xc9, 0x68, 0x7c, 0xfb, 0x9e, 0x9d, 0xc0, 0x5c, + 0x28, 0xbe, 0x81, 0xbd, 0x32, 0xc0, 0x5b, 0x57, 0x0f, 0x70, 0x00, 0x8c, 0x43, 0xc7, 0x16, 0xd0, + 0xd4, 0x9f, 0x4d, 0xc0, 0xf2, 0x85, 0x83, 0x70, 0xb4, 0xcf, 0x55, 0xa2, 0x7d, 0x2e, 0x0b, 0x76, + 0x47, 0x3f, 0xd3, 0x74, 0xdb, 0x27, 0xae, 0xa3, 0xfb, 0xd6, 0xa9, 0x9c, 0xa1, 0x52, 0x78, 0xb6, + 0xa3, 0x9f, 0x95, 0x42, 0x64, 0xc6, 0xda, 0xb4, 0x18, 0x81, 0xb9, 0xd5, 0xd4, 0x1d, 0xd6, 0x08, + 0x27, 0xf9, 0xb8, 0x35, 0x2b, 0xe8, 0xf5, 0x80, 0x8c, 0x5e, 0xc1, 0xac, 0xbc, 0xe0, 0xf2, 0x23, + 0x56, 0x50, 0x6d, 0xbf, 0x19, 0xfb, 0x6a, 0x07, 0x9f, 0xb1, 0x66, 0xbc, 0xf0, 0xd2, 0x43, 0xdf, + 0x81, 0x5b, 0xc4, 0xd1, 0x8f, 0x6d, 0xa2, 0xe9, 0x3d, 0x9f, 0x76, 0x74, 0xdf, 0x32, 0xb4, 0x6e, + 0xcf, 0x31, 0xfc, 0x9e, 0xa8, 0x59, 0x29, 0x6e, 0xd1, 0x8a, 0xe0, 0x29, 0x05, 0x2c, 0xf5, 0x01, + 0x07, 0xba, 0x0b, 0x39, 0xbd, 0x67, 0x5a, 0x94, 0x15, 0x1f, 0xe3, 0x44, 0x14, 0xd1, 0x14, 0xce, + 0x72, 0x1a, 0xbf, 0x88, 0x1e, 0x7a, 0x02, 0x12, 0x40, 0xf3, 0xba, 0x44, 0x3f, 0x21, 0xae, 0x66, + 0x5a, 0xba, 0x6b, 0x7d, 0x31, 0x28, 0x8b, 0x69, 0x5c, 0x10, 0x1c, 0x87, 0x82, 0x61, 0x67, 0xb0, + 0x8f, 0xb6, 0x60, 0x39, 0xc4, 0xde, 0x87, 0x30, 0x68, 0xcf, 0xf1, 0xf9, 0xa7, 0x86, 0x14, 0x5e, + 0x0a, 0x31, 0x48, 0x84, 0x32, 0xdb, 0x66, 0x93, 0xad, 0xd4, 0xfc, 0x7d, 0xea, 0x9a, 0x5a, 0x68, + 0x22, 0xca, 0x88, 0xc9, 0x56, 0xec, 0x7e, 0x97, 0xba, 0x66, 0x79, 0x30, 0x1b, 0xdd, 0x87, 0xe9, + 0xa1, 0xa8, 0xa1, 0x02, 0x4c, 0x75, 0xdb, 0xae, 0xee, 0x91, 0x60, 0x98, 0x0d, 0x96, 0xea, 0xaf, + 0x15, 0x98, 0x1b, 0x93, 0x33, 0xc8, 0x80, 0xdc, 0x50, 0x12, 0x88, 0x8c, 0xfd, 0x76, 0xec, 0xf3, + 0xc2, 0xc4, 0xa0, 0x2d, 0xc7, 0x62, 0x58, 0xa1, 0xac, 0xc1, 0x43, 0xa0, 0xd1, 0x94, 0x4c, 0x8c, + 0x19, 0xbd, 0x7e, 0xa3, 0xc0, 0xad, 0xb7, 0x61, 0xb2, 0x49, 0x71, 0xf0, 0x6e, 0xc8, 0xac, 0x0e, + 0x51, 0x2e, 0x9b, 0x24, 0x51, 0x09, 0x52, 0x2c, 0xb8, 0x41, 0xb3, 0xf1, 0x61, 0x0c, 0x1f, 0x59, + 0xbc, 0xab, 0x4e, 0x93, 0x62, 0x21, 0xa9, 0xfe, 0x4b, 0x81, 0x74, 0x40, 0x43, 0x9f, 0x0e, 0x55, + 0xa1, 0x4b, 0xc7, 0x99, 0x50, 0x11, 0xfa, 0x18, 0xd2, 0xc1, 0xec, 0x7f, 0x79, 0xaf, 0x3a, 0x25, + 0x87, 0x7e, 0x36, 0x16, 0x30, 0x2b, 0xe4, 0xdc, 0xca, 0x7f, 0x8f, 0xf8, 0x3c, 0x11, 0xf1, 0xf9, + 0x0e, 0x64, 0x83, 0x3c, 0xf4, 0x75, 0xf1, 0x69, 0x2f, 0x85, 0x41, 0x92, 0x1a, 0x7a, 0x4b, 0x2d, + 0x42, 0x7e, 0xd0, 0x6b, 0x1f, 0x11, 0xd7, 0x27, 0x67, 0x28, 0x07, 0xca, 0x99, 0x1c, 0x3e, 0x14, + 0xbe, 0x3a, 0x97, 0xd1, 0x54, 0xce, 0x55, 0x0b, 0x16, 0xa3, 0xbd, 0x79, 0x9d, 0xda, 0xe7, 0xa8, + 0x06, 0xe9, 0x53, 0xe2, 0xfa, 0x96, 0xd1, 0xcf, 0xa2, 0x8f, 0xae, 0xd4, 0xe8, 0x0b, 0xe5, 0xb8, + 0x0f, 0xa2, 0xfe, 0x45, 0x81, 0x2c, 0x7b, 0x87, 0xff, 0xf7, 0x9f, 0x37, 0xae, 0xd5, 0x96, 0x33, + 0x53, 0x87, 0xdb, 0xf2, 0x3f, 0x28, 0x90, 0xe9, 0x53, 0xd1, 0x09, 0xcc, 0xbb, 0xfc, 0xe5, 0x18, + 0x1e, 0x8b, 0xa4, 0x2f, 0x8f, 0xae, 0x35, 0x14, 0xb1, 0xc0, 0x63, 0x24, 0x61, 0xc3, 0x23, 0xe4, + 0xbb, 0x0c, 0x44, 0x5d, 0x98, 0x19, 0x6e, 0x7f, 0x58, 0xe6, 0xf1, 0xbf, 0x24, 0xc4, 0x9d, 0xe3, + 0xbf, 0xaf, 0x39, 0x05, 0x85, 0xce, 0x73, 0x30, 0x05, 0xa9, 0x7f, 0x53, 0x60, 0x6e, 0xb8, 0x23, + 0x12, 0x21, 0xeb, 0xc2, 0xd2, 0x05, 0xc3, 0xa4, 0x8c, 0xda, 0xf5, 0x47, 0xc9, 0x85, 0xb1, 0xa3, + 0xe4, 0x3b, 0xc5, 0xed, 0xef, 0x09, 0x28, 0x5c, 0xd4, 0xd7, 0xa1, 0x67, 0x83, 0xe4, 0x4d, 0x5e, + 0x2b, 0x79, 0xf7, 0xde, 0x1b, 0xa4, 0xef, 0x4d, 0x48, 0xf3, 0x47, 0x2c, 0xf8, 0xcf, 0x26, 0xc9, + 0x36, 0x39, 0xa5, 0x6a, 0x7e, 0x1d, 0x9f, 0xab, 0x2e, 0xab, 0x2a, 0x07, 0xfd, 0xeb, 0x91, 0x88, + 0xdd, 0xb9, 0x8f, 0x39, 0xdf, 0xe0, 0xa2, 0x6c, 0xe7, 0x78, 0x65, 0x67, 0xfe, 0x38, 0x4d, 0xfa, + 0xe0, 0x1f, 0x0a, 0x4c, 0xc9, 0xff, 0xc7, 0xd0, 0x12, 0xcc, 0xed, 0x56, 0x4a, 0x8d, 0x17, 0xb8, + 0xa2, 0xbd, 0x38, 0x38, 0xac, 0x57, 0xca, 0xd5, 0xdd, 0x6a, 0x65, 0x27, 0xff, 0x1e, 0x9a, 0x83, + 0xd9, 0xfd, 0xd2, 0x76, 0x65, 0x5f, 0xdb, 0xa9, 0x34, 0x2a, 0xe5, 0x46, 0xb5, 0x76, 0x90, 0x57, + 0xd0, 0x32, 0x2c, 0x1c, 0xee, 0xd5, 0x1a, 0x5a, 0x79, 0xaf, 0x74, 0xf0, 0xb4, 0x12, 0xda, 0x4a, + 0xa0, 0x55, 0x58, 0xa9, 0xbc, 0xac, 0xef, 0x57, 0xcb, 0xd5, 0x86, 0x56, 0xae, 0x1d, 0x34, 0x2a, + 0x07, 0x8d, 0xd0, 0x7e, 0x12, 0x21, 0x98, 0xd9, 0x2d, 0x95, 0xc3, 0x32, 0x13, 0xa8, 0x00, 0xf3, + 0x87, 0xf5, 0x4a, 0xa5, 0xbc, 0xa7, 0x35, 0x70, 0xe9, 0xe0, 0xb0, 0x8c, 0xab, 0x75, 0xbe, 0x33, + 0xc9, 0xb8, 0x1b, 0x95, 0x97, 0x61, 0x84, 0x29, 0x66, 0x51, 0x6d, 0xfb, 0xb3, 0x4a, 0xb9, 0xc1, + 0xb8, 0xcb, 0xcf, 0xaa, 0x07, 0x4f, 0xf3, 0x99, 0x07, 0x2e, 0xa0, 0xe8, 0x67, 0x7b, 0xf4, 0x7f, + 0xb0, 0x36, 0x62, 0xbc, 0xf6, 0xbc, 0xb6, 0x33, 0xea, 0xe2, 0x34, 0x64, 0xb8, 0x37, 0x6c, 0x2b, + 0xaf, 0xa0, 0x19, 0x80, 0x5d, 0x5c, 0x7a, 0x5e, 0x11, 0xeb, 0x04, 0x0b, 0x0d, 0xdf, 0x2e, 0x1d, + 0xec, 0x68, 0xa1, 0x8d, 0xe4, 0x03, 0x1f, 0x60, 0xf0, 0x4d, 0x0b, 0xad, 0xc0, 0xe2, 0x7e, 0xf5, + 0x59, 0x65, 0xbf, 0xba, 0x57, 0xab, 0xed, 0x8c, 0x68, 0xb8, 0x01, 0xd3, 0x47, 0x15, 0xfc, 0x4a, + 0x7b, 0x71, 0xc0, 0x59, 0x5e, 0xe5, 0x15, 0x94, 0x83, 0x74, 0x7f, 0x95, 0x60, 0xab, 0x7a, 0xed, + 0xf0, 0xb0, 0xba, 0xbd, 0x5f, 0xc9, 0x27, 0x11, 0xc0, 0xa4, 0xdc, 0x99, 0x40, 0xb3, 0x90, 0xe5, + 0xa2, 0x92, 0x90, 0xda, 0xfc, 0xad, 0x02, 0x05, 0x9e, 0xaf, 0xd5, 0xd0, 0xc1, 0x1f, 0x12, 0xf7, + 0xd4, 0x32, 0x08, 0xfa, 0xb9, 0x02, 0xd3, 0x43, 0x8d, 0x33, 0x8a, 0xf3, 0xf5, 0x6c, 0xdc, 0x3f, + 0xb6, 0x2b, 0xb7, 0x03, 0xc1, 0xd0, 0x5f, 0xc9, 0xc5, 0x5a, 0xf0, 0x57, 0xb2, 0xba, 0xfa, 0xe3, + 0x7f, 0x7f, 0xf5, 0xcb, 0x44, 0x41, 0x9d, 0xeb, 0xff, 0x9f, 0xed, 0x6d, 0xc9, 0x16, 0x9a, 0x6c, + 0x29, 0x0f, 0xb6, 0xbf, 0x52, 0xe0, 0x03, 0x83, 0x76, 0x2e, 0xd7, 0xbe, 0x7d, 0xfb, 0x22, 0x6f, + 0xea, 0xac, 0x0a, 0xd4, 0x95, 0xcf, 0xb1, 0xc4, 0x68, 0x51, 0xd6, 0xc7, 0x14, 0xa9, 0xdb, 0x5a, + 0x6f, 0x11, 0x87, 0xd7, 0x88, 0x75, 0xb1, 0xa5, 0x77, 0x2d, 0xef, 0x2d, 0xff, 0xb7, 0x3f, 0x8e, + 0x10, 0x7f, 0x97, 0xb8, 0xfb, 0x54, 0x80, 0x96, 0xb9, 0x61, 0x11, 0x13, 0x8a, 0x47, 0x1b, 0xff, + 0x0c, 0x78, 0x5e, 0x73, 0x9e, 0xd7, 0x11, 0x9e, 0xd7, 0x47, 0x1b, 0xc7, 0x93, 0xdc, 0x8c, 0x8f, + 0xfe, 0x1b, 0x00, 0x00, 0xff, 0xff, 0x74, 0x61, 0x19, 0xe7, 0xf5, 0x1f, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/cloud/videointelligence/v1beta1/video_intelligence.pb.go b/vendor/google.golang.org/genproto/googleapis/cloud/videointelligence/v1beta1/video_intelligence.pb.go new file mode 100644 index 0000000..cb2a0a3 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/cloud/videointelligence/v1beta1/video_intelligence.pb.go @@ -0,0 +1,1263 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/cloud/videointelligence/v1beta1/video_intelligence.proto + +package videointelligence // import "google.golang.org/genproto/googleapis/cloud/videointelligence/v1beta1" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import timestamp "github.com/golang/protobuf/ptypes/timestamp" +import _ "google.golang.org/genproto/googleapis/api/annotations" +import longrunning "google.golang.org/genproto/googleapis/longrunning" +import status "google.golang.org/genproto/googleapis/rpc/status" + +import ( + context "golang.org/x/net/context" + grpc "google.golang.org/grpc" +) + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Video annotation feature. +type Feature int32 + +const ( + // Unspecified. + Feature_FEATURE_UNSPECIFIED Feature = 0 + // Label detection. Detect objects, such as dog or flower. + Feature_LABEL_DETECTION Feature = 1 + // Human face detection and tracking. + Feature_FACE_DETECTION Feature = 2 + // Shot change detection. + Feature_SHOT_CHANGE_DETECTION Feature = 3 + // Safe search detection. + Feature_SAFE_SEARCH_DETECTION Feature = 4 +) + +var Feature_name = map[int32]string{ + 0: "FEATURE_UNSPECIFIED", + 1: "LABEL_DETECTION", + 2: "FACE_DETECTION", + 3: "SHOT_CHANGE_DETECTION", + 4: "SAFE_SEARCH_DETECTION", +} +var Feature_value = map[string]int32{ + "FEATURE_UNSPECIFIED": 0, + "LABEL_DETECTION": 1, + "FACE_DETECTION": 2, + "SHOT_CHANGE_DETECTION": 3, + "SAFE_SEARCH_DETECTION": 4, +} + +func (x Feature) String() string { + return proto.EnumName(Feature_name, int32(x)) +} +func (Feature) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_video_intelligence_3999319c2d69fb16, []int{0} +} + +// Label level (scope). +type LabelLevel int32 + +const ( + // Unspecified. + LabelLevel_LABEL_LEVEL_UNSPECIFIED LabelLevel = 0 + // Video-level. Corresponds to the whole video. + LabelLevel_VIDEO_LEVEL LabelLevel = 1 + // Segment-level. Corresponds to one of `AnnotateSpec.segments`. + LabelLevel_SEGMENT_LEVEL LabelLevel = 2 + // Shot-level. Corresponds to a single shot (i.e. a series of frames + // without a major camera position or background change). + LabelLevel_SHOT_LEVEL LabelLevel = 3 + // Frame-level. Corresponds to a single video frame. + LabelLevel_FRAME_LEVEL LabelLevel = 4 +) + +var LabelLevel_name = map[int32]string{ + 0: "LABEL_LEVEL_UNSPECIFIED", + 1: "VIDEO_LEVEL", + 2: "SEGMENT_LEVEL", + 3: "SHOT_LEVEL", + 4: "FRAME_LEVEL", +} +var LabelLevel_value = map[string]int32{ + "LABEL_LEVEL_UNSPECIFIED": 0, + "VIDEO_LEVEL": 1, + "SEGMENT_LEVEL": 2, + "SHOT_LEVEL": 3, + "FRAME_LEVEL": 4, +} + +func (x LabelLevel) String() string { + return proto.EnumName(LabelLevel_name, int32(x)) +} +func (LabelLevel) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_video_intelligence_3999319c2d69fb16, []int{1} +} + +// Label detection mode. +type LabelDetectionMode int32 + +const ( + // Unspecified. + LabelDetectionMode_LABEL_DETECTION_MODE_UNSPECIFIED LabelDetectionMode = 0 + // Detect shot-level labels. + LabelDetectionMode_SHOT_MODE LabelDetectionMode = 1 + // Detect frame-level labels. + LabelDetectionMode_FRAME_MODE LabelDetectionMode = 2 + // Detect both shot-level and frame-level labels. + LabelDetectionMode_SHOT_AND_FRAME_MODE LabelDetectionMode = 3 +) + +var LabelDetectionMode_name = map[int32]string{ + 0: "LABEL_DETECTION_MODE_UNSPECIFIED", + 1: "SHOT_MODE", + 2: "FRAME_MODE", + 3: "SHOT_AND_FRAME_MODE", +} +var LabelDetectionMode_value = map[string]int32{ + "LABEL_DETECTION_MODE_UNSPECIFIED": 0, + "SHOT_MODE": 1, + "FRAME_MODE": 2, + "SHOT_AND_FRAME_MODE": 3, +} + +func (x LabelDetectionMode) String() string { + return proto.EnumName(LabelDetectionMode_name, int32(x)) +} +func (LabelDetectionMode) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_video_intelligence_3999319c2d69fb16, []int{2} +} + +// Bucketized representation of likelihood. +type Likelihood int32 + +const ( + // Unknown likelihood. + Likelihood_UNKNOWN Likelihood = 0 + // Very unlikely. + Likelihood_VERY_UNLIKELY Likelihood = 1 + // Unlikely. + Likelihood_UNLIKELY Likelihood = 2 + // Possible. + Likelihood_POSSIBLE Likelihood = 3 + // Likely. + Likelihood_LIKELY Likelihood = 4 + // Very likely. + Likelihood_VERY_LIKELY Likelihood = 5 +) + +var Likelihood_name = map[int32]string{ + 0: "UNKNOWN", + 1: "VERY_UNLIKELY", + 2: "UNLIKELY", + 3: "POSSIBLE", + 4: "LIKELY", + 5: "VERY_LIKELY", +} +var Likelihood_value = map[string]int32{ + "UNKNOWN": 0, + "VERY_UNLIKELY": 1, + "UNLIKELY": 2, + "POSSIBLE": 3, + "LIKELY": 4, + "VERY_LIKELY": 5, +} + +func (x Likelihood) String() string { + return proto.EnumName(Likelihood_name, int32(x)) +} +func (Likelihood) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_video_intelligence_3999319c2d69fb16, []int{3} +} + +// Video annotation request. +type AnnotateVideoRequest struct { + // Input video location. Currently, only + // [Google Cloud Storage](https://cloud.google.com/storage/) URIs are + // supported, which must be specified in the following format: + // `gs://bucket-id/object-id` (other URI formats return + // [google.rpc.Code.INVALID_ARGUMENT][google.rpc.Code.INVALID_ARGUMENT]). For + // more information, see [Request URIs](/storage/docs/reference-uris). A video + // URI may include wildcards in `object-id`, and thus identify multiple + // videos. Supported wildcards: '*' to match 0 or more characters; + // '?' to match 1 character. If unset, the input video should be embedded + // in the request as `input_content`. If set, `input_content` should be unset. + InputUri string `protobuf:"bytes,1,opt,name=input_uri,json=inputUri,proto3" json:"input_uri,omitempty"` + // The video data bytes. Encoding: base64. If unset, the input video(s) + // should be specified via `input_uri`. If set, `input_uri` should be unset. + InputContent string `protobuf:"bytes,6,opt,name=input_content,json=inputContent,proto3" json:"input_content,omitempty"` + // Requested video annotation features. + Features []Feature `protobuf:"varint,2,rep,packed,name=features,proto3,enum=google.cloud.videointelligence.v1beta1.Feature" json:"features,omitempty"` + // Additional video context and/or feature-specific parameters. + VideoContext *VideoContext `protobuf:"bytes,3,opt,name=video_context,json=videoContext,proto3" json:"video_context,omitempty"` + // Optional location where the output (in JSON format) should be stored. + // Currently, only [Google Cloud Storage](https://cloud.google.com/storage/) + // URIs are supported, which must be specified in the following format: + // `gs://bucket-id/object-id` (other URI formats return + // [google.rpc.Code.INVALID_ARGUMENT][google.rpc.Code.INVALID_ARGUMENT]). For + // more information, see [Request URIs](/storage/docs/reference-uris). + OutputUri string `protobuf:"bytes,4,opt,name=output_uri,json=outputUri,proto3" json:"output_uri,omitempty"` + // Optional cloud region where annotation should take place. Supported cloud + // regions: `us-east1`, `us-west1`, `europe-west1`, `asia-east1`. If no region + // is specified, a region will be determined based on video file location. + LocationId string `protobuf:"bytes,5,opt,name=location_id,json=locationId,proto3" json:"location_id,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *AnnotateVideoRequest) Reset() { *m = AnnotateVideoRequest{} } +func (m *AnnotateVideoRequest) String() string { return proto.CompactTextString(m) } +func (*AnnotateVideoRequest) ProtoMessage() {} +func (*AnnotateVideoRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_video_intelligence_3999319c2d69fb16, []int{0} +} +func (m *AnnotateVideoRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_AnnotateVideoRequest.Unmarshal(m, b) +} +func (m *AnnotateVideoRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_AnnotateVideoRequest.Marshal(b, m, deterministic) +} +func (dst *AnnotateVideoRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_AnnotateVideoRequest.Merge(dst, src) +} +func (m *AnnotateVideoRequest) XXX_Size() int { + return xxx_messageInfo_AnnotateVideoRequest.Size(m) +} +func (m *AnnotateVideoRequest) XXX_DiscardUnknown() { + xxx_messageInfo_AnnotateVideoRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_AnnotateVideoRequest proto.InternalMessageInfo + +func (m *AnnotateVideoRequest) GetInputUri() string { + if m != nil { + return m.InputUri + } + return "" +} + +func (m *AnnotateVideoRequest) GetInputContent() string { + if m != nil { + return m.InputContent + } + return "" +} + +func (m *AnnotateVideoRequest) GetFeatures() []Feature { + if m != nil { + return m.Features + } + return nil +} + +func (m *AnnotateVideoRequest) GetVideoContext() *VideoContext { + if m != nil { + return m.VideoContext + } + return nil +} + +func (m *AnnotateVideoRequest) GetOutputUri() string { + if m != nil { + return m.OutputUri + } + return "" +} + +func (m *AnnotateVideoRequest) GetLocationId() string { + if m != nil { + return m.LocationId + } + return "" +} + +// Video context and/or feature-specific parameters. +type VideoContext struct { + // Video segments to annotate. The segments may overlap and are not required + // to be contiguous or span the whole video. If unspecified, each video + // is treated as a single segment. + Segments []*VideoSegment `protobuf:"bytes,1,rep,name=segments,proto3" json:"segments,omitempty"` + // If label detection has been requested, what labels should be detected + // in addition to video-level labels or segment-level labels. If unspecified, + // defaults to `SHOT_MODE`. + LabelDetectionMode LabelDetectionMode `protobuf:"varint,2,opt,name=label_detection_mode,json=labelDetectionMode,proto3,enum=google.cloud.videointelligence.v1beta1.LabelDetectionMode" json:"label_detection_mode,omitempty"` + // Whether the video has been shot from a stationary (i.e. non-moving) camera. + // When set to true, might improve detection accuracy for moving objects. + StationaryCamera bool `protobuf:"varint,3,opt,name=stationary_camera,json=stationaryCamera,proto3" json:"stationary_camera,omitempty"` + // Model to use for label detection. + // Supported values: "latest" and "stable" (the default). + LabelDetectionModel string `protobuf:"bytes,4,opt,name=label_detection_model,json=labelDetectionModel,proto3" json:"label_detection_model,omitempty"` + // Model to use for face detection. + // Supported values: "latest" and "stable" (the default). + FaceDetectionModel string `protobuf:"bytes,5,opt,name=face_detection_model,json=faceDetectionModel,proto3" json:"face_detection_model,omitempty"` + // Model to use for shot change detection. + // Supported values: "latest" and "stable" (the default). + ShotChangeDetectionModel string `protobuf:"bytes,6,opt,name=shot_change_detection_model,json=shotChangeDetectionModel,proto3" json:"shot_change_detection_model,omitempty"` + // Model to use for safe search detection. + // Supported values: "latest" and "stable" (the default). + SafeSearchDetectionModel string `protobuf:"bytes,7,opt,name=safe_search_detection_model,json=safeSearchDetectionModel,proto3" json:"safe_search_detection_model,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *VideoContext) Reset() { *m = VideoContext{} } +func (m *VideoContext) String() string { return proto.CompactTextString(m) } +func (*VideoContext) ProtoMessage() {} +func (*VideoContext) Descriptor() ([]byte, []int) { + return fileDescriptor_video_intelligence_3999319c2d69fb16, []int{1} +} +func (m *VideoContext) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_VideoContext.Unmarshal(m, b) +} +func (m *VideoContext) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_VideoContext.Marshal(b, m, deterministic) +} +func (dst *VideoContext) XXX_Merge(src proto.Message) { + xxx_messageInfo_VideoContext.Merge(dst, src) +} +func (m *VideoContext) XXX_Size() int { + return xxx_messageInfo_VideoContext.Size(m) +} +func (m *VideoContext) XXX_DiscardUnknown() { + xxx_messageInfo_VideoContext.DiscardUnknown(m) +} + +var xxx_messageInfo_VideoContext proto.InternalMessageInfo + +func (m *VideoContext) GetSegments() []*VideoSegment { + if m != nil { + return m.Segments + } + return nil +} + +func (m *VideoContext) GetLabelDetectionMode() LabelDetectionMode { + if m != nil { + return m.LabelDetectionMode + } + return LabelDetectionMode_LABEL_DETECTION_MODE_UNSPECIFIED +} + +func (m *VideoContext) GetStationaryCamera() bool { + if m != nil { + return m.StationaryCamera + } + return false +} + +func (m *VideoContext) GetLabelDetectionModel() string { + if m != nil { + return m.LabelDetectionModel + } + return "" +} + +func (m *VideoContext) GetFaceDetectionModel() string { + if m != nil { + return m.FaceDetectionModel + } + return "" +} + +func (m *VideoContext) GetShotChangeDetectionModel() string { + if m != nil { + return m.ShotChangeDetectionModel + } + return "" +} + +func (m *VideoContext) GetSafeSearchDetectionModel() string { + if m != nil { + return m.SafeSearchDetectionModel + } + return "" +} + +// Video segment. +type VideoSegment struct { + // Start offset in microseconds (inclusive). Unset means 0. + StartTimeOffset int64 `protobuf:"varint,1,opt,name=start_time_offset,json=startTimeOffset,proto3" json:"start_time_offset,omitempty"` + // End offset in microseconds (inclusive). Unset means 0. + EndTimeOffset int64 `protobuf:"varint,2,opt,name=end_time_offset,json=endTimeOffset,proto3" json:"end_time_offset,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *VideoSegment) Reset() { *m = VideoSegment{} } +func (m *VideoSegment) String() string { return proto.CompactTextString(m) } +func (*VideoSegment) ProtoMessage() {} +func (*VideoSegment) Descriptor() ([]byte, []int) { + return fileDescriptor_video_intelligence_3999319c2d69fb16, []int{2} +} +func (m *VideoSegment) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_VideoSegment.Unmarshal(m, b) +} +func (m *VideoSegment) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_VideoSegment.Marshal(b, m, deterministic) +} +func (dst *VideoSegment) XXX_Merge(src proto.Message) { + xxx_messageInfo_VideoSegment.Merge(dst, src) +} +func (m *VideoSegment) XXX_Size() int { + return xxx_messageInfo_VideoSegment.Size(m) +} +func (m *VideoSegment) XXX_DiscardUnknown() { + xxx_messageInfo_VideoSegment.DiscardUnknown(m) +} + +var xxx_messageInfo_VideoSegment proto.InternalMessageInfo + +func (m *VideoSegment) GetStartTimeOffset() int64 { + if m != nil { + return m.StartTimeOffset + } + return 0 +} + +func (m *VideoSegment) GetEndTimeOffset() int64 { + if m != nil { + return m.EndTimeOffset + } + return 0 +} + +// Label location. +type LabelLocation struct { + // Video segment. Set to [-1, -1] for video-level labels. + // Set to [timestamp, timestamp] for frame-level labels. + // Otherwise, corresponds to one of `AnnotateSpec.segments` + // (if specified) or to shot boundaries (if requested). + Segment *VideoSegment `protobuf:"bytes,1,opt,name=segment,proto3" json:"segment,omitempty"` + // Confidence that the label is accurate. Range: [0, 1]. + Confidence float32 `protobuf:"fixed32,2,opt,name=confidence,proto3" json:"confidence,omitempty"` + // Label level. + Level LabelLevel `protobuf:"varint,3,opt,name=level,proto3,enum=google.cloud.videointelligence.v1beta1.LabelLevel" json:"level,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *LabelLocation) Reset() { *m = LabelLocation{} } +func (m *LabelLocation) String() string { return proto.CompactTextString(m) } +func (*LabelLocation) ProtoMessage() {} +func (*LabelLocation) Descriptor() ([]byte, []int) { + return fileDescriptor_video_intelligence_3999319c2d69fb16, []int{3} +} +func (m *LabelLocation) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_LabelLocation.Unmarshal(m, b) +} +func (m *LabelLocation) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_LabelLocation.Marshal(b, m, deterministic) +} +func (dst *LabelLocation) XXX_Merge(src proto.Message) { + xxx_messageInfo_LabelLocation.Merge(dst, src) +} +func (m *LabelLocation) XXX_Size() int { + return xxx_messageInfo_LabelLocation.Size(m) +} +func (m *LabelLocation) XXX_DiscardUnknown() { + xxx_messageInfo_LabelLocation.DiscardUnknown(m) +} + +var xxx_messageInfo_LabelLocation proto.InternalMessageInfo + +func (m *LabelLocation) GetSegment() *VideoSegment { + if m != nil { + return m.Segment + } + return nil +} + +func (m *LabelLocation) GetConfidence() float32 { + if m != nil { + return m.Confidence + } + return 0 +} + +func (m *LabelLocation) GetLevel() LabelLevel { + if m != nil { + return m.Level + } + return LabelLevel_LABEL_LEVEL_UNSPECIFIED +} + +// Label annotation. +type LabelAnnotation struct { + // Textual description, e.g. `Fixed-gear bicycle`. + Description string `protobuf:"bytes,1,opt,name=description,proto3" json:"description,omitempty"` + // Language code for `description` in BCP-47 format. + LanguageCode string `protobuf:"bytes,2,opt,name=language_code,json=languageCode,proto3" json:"language_code,omitempty"` + // Where the label was detected and with what confidence. + Locations []*LabelLocation `protobuf:"bytes,3,rep,name=locations,proto3" json:"locations,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *LabelAnnotation) Reset() { *m = LabelAnnotation{} } +func (m *LabelAnnotation) String() string { return proto.CompactTextString(m) } +func (*LabelAnnotation) ProtoMessage() {} +func (*LabelAnnotation) Descriptor() ([]byte, []int) { + return fileDescriptor_video_intelligence_3999319c2d69fb16, []int{4} +} +func (m *LabelAnnotation) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_LabelAnnotation.Unmarshal(m, b) +} +func (m *LabelAnnotation) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_LabelAnnotation.Marshal(b, m, deterministic) +} +func (dst *LabelAnnotation) XXX_Merge(src proto.Message) { + xxx_messageInfo_LabelAnnotation.Merge(dst, src) +} +func (m *LabelAnnotation) XXX_Size() int { + return xxx_messageInfo_LabelAnnotation.Size(m) +} +func (m *LabelAnnotation) XXX_DiscardUnknown() { + xxx_messageInfo_LabelAnnotation.DiscardUnknown(m) +} + +var xxx_messageInfo_LabelAnnotation proto.InternalMessageInfo + +func (m *LabelAnnotation) GetDescription() string { + if m != nil { + return m.Description + } + return "" +} + +func (m *LabelAnnotation) GetLanguageCode() string { + if m != nil { + return m.LanguageCode + } + return "" +} + +func (m *LabelAnnotation) GetLocations() []*LabelLocation { + if m != nil { + return m.Locations + } + return nil +} + +// Safe search annotation (based on per-frame visual signals only). +// If no unsafe content has been detected in a frame, no annotations +// are present for that frame. If only some types of unsafe content +// have been detected in a frame, the likelihood is set to `UNKNOWN` +// for all other types of unsafe content. +type SafeSearchAnnotation struct { + // Likelihood of adult content. + Adult Likelihood `protobuf:"varint,1,opt,name=adult,proto3,enum=google.cloud.videointelligence.v1beta1.Likelihood" json:"adult,omitempty"` + // Likelihood that an obvious modification was made to the original + // version to make it appear funny or offensive. + Spoof Likelihood `protobuf:"varint,2,opt,name=spoof,proto3,enum=google.cloud.videointelligence.v1beta1.Likelihood" json:"spoof,omitempty"` + // Likelihood of medical content. + Medical Likelihood `protobuf:"varint,3,opt,name=medical,proto3,enum=google.cloud.videointelligence.v1beta1.Likelihood" json:"medical,omitempty"` + // Likelihood of violent content. + Violent Likelihood `protobuf:"varint,4,opt,name=violent,proto3,enum=google.cloud.videointelligence.v1beta1.Likelihood" json:"violent,omitempty"` + // Likelihood of racy content. + Racy Likelihood `protobuf:"varint,5,opt,name=racy,proto3,enum=google.cloud.videointelligence.v1beta1.Likelihood" json:"racy,omitempty"` + // Video time offset in microseconds. + TimeOffset int64 `protobuf:"varint,6,opt,name=time_offset,json=timeOffset,proto3" json:"time_offset,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *SafeSearchAnnotation) Reset() { *m = SafeSearchAnnotation{} } +func (m *SafeSearchAnnotation) String() string { return proto.CompactTextString(m) } +func (*SafeSearchAnnotation) ProtoMessage() {} +func (*SafeSearchAnnotation) Descriptor() ([]byte, []int) { + return fileDescriptor_video_intelligence_3999319c2d69fb16, []int{5} +} +func (m *SafeSearchAnnotation) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_SafeSearchAnnotation.Unmarshal(m, b) +} +func (m *SafeSearchAnnotation) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_SafeSearchAnnotation.Marshal(b, m, deterministic) +} +func (dst *SafeSearchAnnotation) XXX_Merge(src proto.Message) { + xxx_messageInfo_SafeSearchAnnotation.Merge(dst, src) +} +func (m *SafeSearchAnnotation) XXX_Size() int { + return xxx_messageInfo_SafeSearchAnnotation.Size(m) +} +func (m *SafeSearchAnnotation) XXX_DiscardUnknown() { + xxx_messageInfo_SafeSearchAnnotation.DiscardUnknown(m) +} + +var xxx_messageInfo_SafeSearchAnnotation proto.InternalMessageInfo + +func (m *SafeSearchAnnotation) GetAdult() Likelihood { + if m != nil { + return m.Adult + } + return Likelihood_UNKNOWN +} + +func (m *SafeSearchAnnotation) GetSpoof() Likelihood { + if m != nil { + return m.Spoof + } + return Likelihood_UNKNOWN +} + +func (m *SafeSearchAnnotation) GetMedical() Likelihood { + if m != nil { + return m.Medical + } + return Likelihood_UNKNOWN +} + +func (m *SafeSearchAnnotation) GetViolent() Likelihood { + if m != nil { + return m.Violent + } + return Likelihood_UNKNOWN +} + +func (m *SafeSearchAnnotation) GetRacy() Likelihood { + if m != nil { + return m.Racy + } + return Likelihood_UNKNOWN +} + +func (m *SafeSearchAnnotation) GetTimeOffset() int64 { + if m != nil { + return m.TimeOffset + } + return 0 +} + +// Bounding box. +type BoundingBox struct { + // Left X coordinate. + Left int32 `protobuf:"varint,1,opt,name=left,proto3" json:"left,omitempty"` + // Right X coordinate. + Right int32 `protobuf:"varint,2,opt,name=right,proto3" json:"right,omitempty"` + // Bottom Y coordinate. + Bottom int32 `protobuf:"varint,3,opt,name=bottom,proto3" json:"bottom,omitempty"` + // Top Y coordinate. + Top int32 `protobuf:"varint,4,opt,name=top,proto3" json:"top,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *BoundingBox) Reset() { *m = BoundingBox{} } +func (m *BoundingBox) String() string { return proto.CompactTextString(m) } +func (*BoundingBox) ProtoMessage() {} +func (*BoundingBox) Descriptor() ([]byte, []int) { + return fileDescriptor_video_intelligence_3999319c2d69fb16, []int{6} +} +func (m *BoundingBox) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_BoundingBox.Unmarshal(m, b) +} +func (m *BoundingBox) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_BoundingBox.Marshal(b, m, deterministic) +} +func (dst *BoundingBox) XXX_Merge(src proto.Message) { + xxx_messageInfo_BoundingBox.Merge(dst, src) +} +func (m *BoundingBox) XXX_Size() int { + return xxx_messageInfo_BoundingBox.Size(m) +} +func (m *BoundingBox) XXX_DiscardUnknown() { + xxx_messageInfo_BoundingBox.DiscardUnknown(m) +} + +var xxx_messageInfo_BoundingBox proto.InternalMessageInfo + +func (m *BoundingBox) GetLeft() int32 { + if m != nil { + return m.Left + } + return 0 +} + +func (m *BoundingBox) GetRight() int32 { + if m != nil { + return m.Right + } + return 0 +} + +func (m *BoundingBox) GetBottom() int32 { + if m != nil { + return m.Bottom + } + return 0 +} + +func (m *BoundingBox) GetTop() int32 { + if m != nil { + return m.Top + } + return 0 +} + +// Face location. +type FaceLocation struct { + // Bounding box in a frame. + BoundingBox *BoundingBox `protobuf:"bytes,1,opt,name=bounding_box,json=boundingBox,proto3" json:"bounding_box,omitempty"` + // Video time offset in microseconds. + TimeOffset int64 `protobuf:"varint,2,opt,name=time_offset,json=timeOffset,proto3" json:"time_offset,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *FaceLocation) Reset() { *m = FaceLocation{} } +func (m *FaceLocation) String() string { return proto.CompactTextString(m) } +func (*FaceLocation) ProtoMessage() {} +func (*FaceLocation) Descriptor() ([]byte, []int) { + return fileDescriptor_video_intelligence_3999319c2d69fb16, []int{7} +} +func (m *FaceLocation) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_FaceLocation.Unmarshal(m, b) +} +func (m *FaceLocation) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_FaceLocation.Marshal(b, m, deterministic) +} +func (dst *FaceLocation) XXX_Merge(src proto.Message) { + xxx_messageInfo_FaceLocation.Merge(dst, src) +} +func (m *FaceLocation) XXX_Size() int { + return xxx_messageInfo_FaceLocation.Size(m) +} +func (m *FaceLocation) XXX_DiscardUnknown() { + xxx_messageInfo_FaceLocation.DiscardUnknown(m) +} + +var xxx_messageInfo_FaceLocation proto.InternalMessageInfo + +func (m *FaceLocation) GetBoundingBox() *BoundingBox { + if m != nil { + return m.BoundingBox + } + return nil +} + +func (m *FaceLocation) GetTimeOffset() int64 { + if m != nil { + return m.TimeOffset + } + return 0 +} + +// Face annotation. +type FaceAnnotation struct { + // Thumbnail of a representative face view (in JPEG format). Encoding: base64. + Thumbnail string `protobuf:"bytes,1,opt,name=thumbnail,proto3" json:"thumbnail,omitempty"` + // All locations where a face was detected. + // Faces are detected and tracked on a per-video basis + // (as opposed to across multiple videos). + Segments []*VideoSegment `protobuf:"bytes,2,rep,name=segments,proto3" json:"segments,omitempty"` + // Face locations at one frame per second. + Locations []*FaceLocation `protobuf:"bytes,3,rep,name=locations,proto3" json:"locations,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *FaceAnnotation) Reset() { *m = FaceAnnotation{} } +func (m *FaceAnnotation) String() string { return proto.CompactTextString(m) } +func (*FaceAnnotation) ProtoMessage() {} +func (*FaceAnnotation) Descriptor() ([]byte, []int) { + return fileDescriptor_video_intelligence_3999319c2d69fb16, []int{8} +} +func (m *FaceAnnotation) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_FaceAnnotation.Unmarshal(m, b) +} +func (m *FaceAnnotation) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_FaceAnnotation.Marshal(b, m, deterministic) +} +func (dst *FaceAnnotation) XXX_Merge(src proto.Message) { + xxx_messageInfo_FaceAnnotation.Merge(dst, src) +} +func (m *FaceAnnotation) XXX_Size() int { + return xxx_messageInfo_FaceAnnotation.Size(m) +} +func (m *FaceAnnotation) XXX_DiscardUnknown() { + xxx_messageInfo_FaceAnnotation.DiscardUnknown(m) +} + +var xxx_messageInfo_FaceAnnotation proto.InternalMessageInfo + +func (m *FaceAnnotation) GetThumbnail() string { + if m != nil { + return m.Thumbnail + } + return "" +} + +func (m *FaceAnnotation) GetSegments() []*VideoSegment { + if m != nil { + return m.Segments + } + return nil +} + +func (m *FaceAnnotation) GetLocations() []*FaceLocation { + if m != nil { + return m.Locations + } + return nil +} + +// Annotation results for a single video. +type VideoAnnotationResults struct { + // Video file location in + // [Google Cloud Storage](https://cloud.google.com/storage/). + InputUri string `protobuf:"bytes,1,opt,name=input_uri,json=inputUri,proto3" json:"input_uri,omitempty"` + // Label annotations. There is exactly one element for each unique label. + LabelAnnotations []*LabelAnnotation `protobuf:"bytes,2,rep,name=label_annotations,json=labelAnnotations,proto3" json:"label_annotations,omitempty"` + // Face annotations. There is exactly one element for each unique face. + FaceAnnotations []*FaceAnnotation `protobuf:"bytes,3,rep,name=face_annotations,json=faceAnnotations,proto3" json:"face_annotations,omitempty"` + // Shot annotations. Each shot is represented as a video segment. + ShotAnnotations []*VideoSegment `protobuf:"bytes,4,rep,name=shot_annotations,json=shotAnnotations,proto3" json:"shot_annotations,omitempty"` + // Safe search annotations. + SafeSearchAnnotations []*SafeSearchAnnotation `protobuf:"bytes,6,rep,name=safe_search_annotations,json=safeSearchAnnotations,proto3" json:"safe_search_annotations,omitempty"` + // If set, indicates an error. Note that for a single `AnnotateVideoRequest` + // some videos may succeed and some may fail. + Error *status.Status `protobuf:"bytes,5,opt,name=error,proto3" json:"error,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *VideoAnnotationResults) Reset() { *m = VideoAnnotationResults{} } +func (m *VideoAnnotationResults) String() string { return proto.CompactTextString(m) } +func (*VideoAnnotationResults) ProtoMessage() {} +func (*VideoAnnotationResults) Descriptor() ([]byte, []int) { + return fileDescriptor_video_intelligence_3999319c2d69fb16, []int{9} +} +func (m *VideoAnnotationResults) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_VideoAnnotationResults.Unmarshal(m, b) +} +func (m *VideoAnnotationResults) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_VideoAnnotationResults.Marshal(b, m, deterministic) +} +func (dst *VideoAnnotationResults) XXX_Merge(src proto.Message) { + xxx_messageInfo_VideoAnnotationResults.Merge(dst, src) +} +func (m *VideoAnnotationResults) XXX_Size() int { + return xxx_messageInfo_VideoAnnotationResults.Size(m) +} +func (m *VideoAnnotationResults) XXX_DiscardUnknown() { + xxx_messageInfo_VideoAnnotationResults.DiscardUnknown(m) +} + +var xxx_messageInfo_VideoAnnotationResults proto.InternalMessageInfo + +func (m *VideoAnnotationResults) GetInputUri() string { + if m != nil { + return m.InputUri + } + return "" +} + +func (m *VideoAnnotationResults) GetLabelAnnotations() []*LabelAnnotation { + if m != nil { + return m.LabelAnnotations + } + return nil +} + +func (m *VideoAnnotationResults) GetFaceAnnotations() []*FaceAnnotation { + if m != nil { + return m.FaceAnnotations + } + return nil +} + +func (m *VideoAnnotationResults) GetShotAnnotations() []*VideoSegment { + if m != nil { + return m.ShotAnnotations + } + return nil +} + +func (m *VideoAnnotationResults) GetSafeSearchAnnotations() []*SafeSearchAnnotation { + if m != nil { + return m.SafeSearchAnnotations + } + return nil +} + +func (m *VideoAnnotationResults) GetError() *status.Status { + if m != nil { + return m.Error + } + return nil +} + +// Video annotation response. Included in the `response` +// field of the `Operation` returned by the `GetOperation` +// call of the `google::longrunning::Operations` service. +type AnnotateVideoResponse struct { + // Annotation results for all videos specified in `AnnotateVideoRequest`. + AnnotationResults []*VideoAnnotationResults `protobuf:"bytes,1,rep,name=annotation_results,json=annotationResults,proto3" json:"annotation_results,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *AnnotateVideoResponse) Reset() { *m = AnnotateVideoResponse{} } +func (m *AnnotateVideoResponse) String() string { return proto.CompactTextString(m) } +func (*AnnotateVideoResponse) ProtoMessage() {} +func (*AnnotateVideoResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_video_intelligence_3999319c2d69fb16, []int{10} +} +func (m *AnnotateVideoResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_AnnotateVideoResponse.Unmarshal(m, b) +} +func (m *AnnotateVideoResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_AnnotateVideoResponse.Marshal(b, m, deterministic) +} +func (dst *AnnotateVideoResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_AnnotateVideoResponse.Merge(dst, src) +} +func (m *AnnotateVideoResponse) XXX_Size() int { + return xxx_messageInfo_AnnotateVideoResponse.Size(m) +} +func (m *AnnotateVideoResponse) XXX_DiscardUnknown() { + xxx_messageInfo_AnnotateVideoResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_AnnotateVideoResponse proto.InternalMessageInfo + +func (m *AnnotateVideoResponse) GetAnnotationResults() []*VideoAnnotationResults { + if m != nil { + return m.AnnotationResults + } + return nil +} + +// Annotation progress for a single video. +type VideoAnnotationProgress struct { + // Video file location in + // [Google Cloud Storage](https://cloud.google.com/storage/). + InputUri string `protobuf:"bytes,1,opt,name=input_uri,json=inputUri,proto3" json:"input_uri,omitempty"` + // Approximate percentage processed thus far. + // Guaranteed to be 100 when fully processed. + ProgressPercent int32 `protobuf:"varint,2,opt,name=progress_percent,json=progressPercent,proto3" json:"progress_percent,omitempty"` + // Time when the request was received. + StartTime *timestamp.Timestamp `protobuf:"bytes,3,opt,name=start_time,json=startTime,proto3" json:"start_time,omitempty"` + // Time of the most recent update. + UpdateTime *timestamp.Timestamp `protobuf:"bytes,4,opt,name=update_time,json=updateTime,proto3" json:"update_time,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *VideoAnnotationProgress) Reset() { *m = VideoAnnotationProgress{} } +func (m *VideoAnnotationProgress) String() string { return proto.CompactTextString(m) } +func (*VideoAnnotationProgress) ProtoMessage() {} +func (*VideoAnnotationProgress) Descriptor() ([]byte, []int) { + return fileDescriptor_video_intelligence_3999319c2d69fb16, []int{11} +} +func (m *VideoAnnotationProgress) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_VideoAnnotationProgress.Unmarshal(m, b) +} +func (m *VideoAnnotationProgress) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_VideoAnnotationProgress.Marshal(b, m, deterministic) +} +func (dst *VideoAnnotationProgress) XXX_Merge(src proto.Message) { + xxx_messageInfo_VideoAnnotationProgress.Merge(dst, src) +} +func (m *VideoAnnotationProgress) XXX_Size() int { + return xxx_messageInfo_VideoAnnotationProgress.Size(m) +} +func (m *VideoAnnotationProgress) XXX_DiscardUnknown() { + xxx_messageInfo_VideoAnnotationProgress.DiscardUnknown(m) +} + +var xxx_messageInfo_VideoAnnotationProgress proto.InternalMessageInfo + +func (m *VideoAnnotationProgress) GetInputUri() string { + if m != nil { + return m.InputUri + } + return "" +} + +func (m *VideoAnnotationProgress) GetProgressPercent() int32 { + if m != nil { + return m.ProgressPercent + } + return 0 +} + +func (m *VideoAnnotationProgress) GetStartTime() *timestamp.Timestamp { + if m != nil { + return m.StartTime + } + return nil +} + +func (m *VideoAnnotationProgress) GetUpdateTime() *timestamp.Timestamp { + if m != nil { + return m.UpdateTime + } + return nil +} + +// Video annotation progress. Included in the `metadata` +// field of the `Operation` returned by the `GetOperation` +// call of the `google::longrunning::Operations` service. +type AnnotateVideoProgress struct { + // Progress metadata for all videos specified in `AnnotateVideoRequest`. + AnnotationProgress []*VideoAnnotationProgress `protobuf:"bytes,1,rep,name=annotation_progress,json=annotationProgress,proto3" json:"annotation_progress,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *AnnotateVideoProgress) Reset() { *m = AnnotateVideoProgress{} } +func (m *AnnotateVideoProgress) String() string { return proto.CompactTextString(m) } +func (*AnnotateVideoProgress) ProtoMessage() {} +func (*AnnotateVideoProgress) Descriptor() ([]byte, []int) { + return fileDescriptor_video_intelligence_3999319c2d69fb16, []int{12} +} +func (m *AnnotateVideoProgress) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_AnnotateVideoProgress.Unmarshal(m, b) +} +func (m *AnnotateVideoProgress) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_AnnotateVideoProgress.Marshal(b, m, deterministic) +} +func (dst *AnnotateVideoProgress) XXX_Merge(src proto.Message) { + xxx_messageInfo_AnnotateVideoProgress.Merge(dst, src) +} +func (m *AnnotateVideoProgress) XXX_Size() int { + return xxx_messageInfo_AnnotateVideoProgress.Size(m) +} +func (m *AnnotateVideoProgress) XXX_DiscardUnknown() { + xxx_messageInfo_AnnotateVideoProgress.DiscardUnknown(m) +} + +var xxx_messageInfo_AnnotateVideoProgress proto.InternalMessageInfo + +func (m *AnnotateVideoProgress) GetAnnotationProgress() []*VideoAnnotationProgress { + if m != nil { + return m.AnnotationProgress + } + return nil +} + +func init() { + proto.RegisterType((*AnnotateVideoRequest)(nil), "google.cloud.videointelligence.v1beta1.AnnotateVideoRequest") + proto.RegisterType((*VideoContext)(nil), "google.cloud.videointelligence.v1beta1.VideoContext") + proto.RegisterType((*VideoSegment)(nil), "google.cloud.videointelligence.v1beta1.VideoSegment") + proto.RegisterType((*LabelLocation)(nil), "google.cloud.videointelligence.v1beta1.LabelLocation") + proto.RegisterType((*LabelAnnotation)(nil), "google.cloud.videointelligence.v1beta1.LabelAnnotation") + proto.RegisterType((*SafeSearchAnnotation)(nil), "google.cloud.videointelligence.v1beta1.SafeSearchAnnotation") + proto.RegisterType((*BoundingBox)(nil), "google.cloud.videointelligence.v1beta1.BoundingBox") + proto.RegisterType((*FaceLocation)(nil), "google.cloud.videointelligence.v1beta1.FaceLocation") + proto.RegisterType((*FaceAnnotation)(nil), "google.cloud.videointelligence.v1beta1.FaceAnnotation") + proto.RegisterType((*VideoAnnotationResults)(nil), "google.cloud.videointelligence.v1beta1.VideoAnnotationResults") + proto.RegisterType((*AnnotateVideoResponse)(nil), "google.cloud.videointelligence.v1beta1.AnnotateVideoResponse") + proto.RegisterType((*VideoAnnotationProgress)(nil), "google.cloud.videointelligence.v1beta1.VideoAnnotationProgress") + proto.RegisterType((*AnnotateVideoProgress)(nil), "google.cloud.videointelligence.v1beta1.AnnotateVideoProgress") + proto.RegisterEnum("google.cloud.videointelligence.v1beta1.Feature", Feature_name, Feature_value) + proto.RegisterEnum("google.cloud.videointelligence.v1beta1.LabelLevel", LabelLevel_name, LabelLevel_value) + proto.RegisterEnum("google.cloud.videointelligence.v1beta1.LabelDetectionMode", LabelDetectionMode_name, LabelDetectionMode_value) + proto.RegisterEnum("google.cloud.videointelligence.v1beta1.Likelihood", Likelihood_name, Likelihood_value) +} + +// Reference imports to suppress errors if they are not otherwise used. +var _ context.Context +var _ grpc.ClientConn + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the grpc package it is being compiled against. +const _ = grpc.SupportPackageIsVersion4 + +// VideoIntelligenceServiceClient is the client API for VideoIntelligenceService service. +// +// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://godoc.org/google.golang.org/grpc#ClientConn.NewStream. +type VideoIntelligenceServiceClient interface { + // Performs asynchronous video annotation. Progress and results can be + // retrieved through the `google.longrunning.Operations` interface. + // `Operation.metadata` contains `AnnotateVideoProgress` (progress). + // `Operation.response` contains `AnnotateVideoResponse` (results). + AnnotateVideo(ctx context.Context, in *AnnotateVideoRequest, opts ...grpc.CallOption) (*longrunning.Operation, error) +} + +type videoIntelligenceServiceClient struct { + cc *grpc.ClientConn +} + +func NewVideoIntelligenceServiceClient(cc *grpc.ClientConn) VideoIntelligenceServiceClient { + return &videoIntelligenceServiceClient{cc} +} + +func (c *videoIntelligenceServiceClient) AnnotateVideo(ctx context.Context, in *AnnotateVideoRequest, opts ...grpc.CallOption) (*longrunning.Operation, error) { + out := new(longrunning.Operation) + err := c.cc.Invoke(ctx, "/google.cloud.videointelligence.v1beta1.VideoIntelligenceService/AnnotateVideo", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +// VideoIntelligenceServiceServer is the server API for VideoIntelligenceService service. +type VideoIntelligenceServiceServer interface { + // Performs asynchronous video annotation. Progress and results can be + // retrieved through the `google.longrunning.Operations` interface. + // `Operation.metadata` contains `AnnotateVideoProgress` (progress). + // `Operation.response` contains `AnnotateVideoResponse` (results). + AnnotateVideo(context.Context, *AnnotateVideoRequest) (*longrunning.Operation, error) +} + +func RegisterVideoIntelligenceServiceServer(s *grpc.Server, srv VideoIntelligenceServiceServer) { + s.RegisterService(&_VideoIntelligenceService_serviceDesc, srv) +} + +func _VideoIntelligenceService_AnnotateVideo_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(AnnotateVideoRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(VideoIntelligenceServiceServer).AnnotateVideo(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.videointelligence.v1beta1.VideoIntelligenceService/AnnotateVideo", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(VideoIntelligenceServiceServer).AnnotateVideo(ctx, req.(*AnnotateVideoRequest)) + } + return interceptor(ctx, in, info, handler) +} + +var _VideoIntelligenceService_serviceDesc = grpc.ServiceDesc{ + ServiceName: "google.cloud.videointelligence.v1beta1.VideoIntelligenceService", + HandlerType: (*VideoIntelligenceServiceServer)(nil), + Methods: []grpc.MethodDesc{ + { + MethodName: "AnnotateVideo", + Handler: _VideoIntelligenceService_AnnotateVideo_Handler, + }, + }, + Streams: []grpc.StreamDesc{}, + Metadata: "google/cloud/videointelligence/v1beta1/video_intelligence.proto", +} + +func init() { + proto.RegisterFile("google/cloud/videointelligence/v1beta1/video_intelligence.proto", fileDescriptor_video_intelligence_3999319c2d69fb16) +} + +var fileDescriptor_video_intelligence_3999319c2d69fb16 = []byte{ + // 1520 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xac, 0x58, 0xcb, 0x6f, 0x1b, 0xd5, + 0x1a, 0xef, 0xf8, 0x91, 0xc4, 0x9f, 0x93, 0xd8, 0x39, 0x49, 0x1a, 0xdf, 0xb4, 0xb9, 0x8d, 0xdc, + 0xab, 0x2a, 0x37, 0x57, 0xb2, 0x6f, 0x5d, 0x1e, 0xa2, 0x05, 0x2a, 0xc7, 0x19, 0x37, 0x56, 0x1d, + 0x3b, 0x1a, 0x27, 0xa9, 0x8a, 0x2a, 0x8d, 0xc6, 0x33, 0xc7, 0xce, 0x88, 0xf1, 0x9c, 0x61, 0xe6, + 0x4c, 0xd4, 0x2e, 0x61, 0x01, 0x62, 0x89, 0xf8, 0x2f, 0x90, 0x80, 0x7f, 0x81, 0x2d, 0x6c, 0x61, + 0xc3, 0x8a, 0x0d, 0x7f, 0x04, 0x3b, 0xd0, 0x79, 0x8c, 0x3d, 0xb6, 0x03, 0xb5, 0x03, 0x3b, 0x9f, + 0xef, 0xf1, 0xfb, 0xde, 0xe7, 0x7c, 0x63, 0x78, 0xdc, 0x27, 0xa4, 0xef, 0xe0, 0xb2, 0xe9, 0x90, + 0xd0, 0x2a, 0x5f, 0xda, 0x16, 0x26, 0xb6, 0x4b, 0xb1, 0xe3, 0xd8, 0x7d, 0xec, 0x9a, 0xb8, 0x7c, + 0x79, 0xbf, 0x8b, 0xa9, 0x71, 0x5f, 0x70, 0xf4, 0x38, 0xab, 0xe4, 0xf9, 0x84, 0x12, 0x74, 0x4f, + 0x00, 0x94, 0x38, 0x40, 0x69, 0x0a, 0xa0, 0x24, 0x01, 0xb6, 0x6f, 0x4b, 0x43, 0x86, 0x67, 0x97, + 0x0d, 0xd7, 0x25, 0xd4, 0xa0, 0x36, 0x71, 0x03, 0x81, 0xb2, 0x7d, 0x57, 0x72, 0x1d, 0xe2, 0xf6, + 0xfd, 0xd0, 0x75, 0x6d, 0xb7, 0x5f, 0x26, 0x1e, 0xf6, 0xc7, 0x84, 0xee, 0x48, 0x21, 0x7e, 0xea, + 0x86, 0xbd, 0x32, 0xb5, 0x07, 0x38, 0xa0, 0xc6, 0xc0, 0x93, 0x02, 0x5b, 0x52, 0xc0, 0xf7, 0xcc, + 0x72, 0x40, 0x0d, 0x1a, 0x4a, 0xcd, 0xe2, 0x77, 0x09, 0xd8, 0xa8, 0x0a, 0xa3, 0xf8, 0x9c, 0xb9, + 0xa8, 0xe1, 0x8f, 0x42, 0x1c, 0x50, 0x74, 0x0b, 0x32, 0xb6, 0xeb, 0x85, 0x54, 0x0f, 0x7d, 0xbb, + 0xa0, 0xec, 0x2a, 0x7b, 0x19, 0x6d, 0x89, 0x13, 0xce, 0x7c, 0x1b, 0xdd, 0x85, 0x15, 0xc1, 0x34, + 0x89, 0x4b, 0xb1, 0x4b, 0x0b, 0x0b, 0x5c, 0x60, 0x99, 0x13, 0x6b, 0x82, 0x86, 0x9e, 0xc2, 0x52, + 0x0f, 0x1b, 0x34, 0xf4, 0x71, 0x50, 0x48, 0xec, 0x26, 0xf7, 0x56, 0x2b, 0xe5, 0xd2, 0x6c, 0x29, + 0x29, 0xd5, 0x85, 0x9e, 0x36, 0x04, 0x40, 0xcf, 0x61, 0x45, 0x24, 0x9a, 0x5b, 0x7c, 0x49, 0x0b, + 0xc9, 0x5d, 0x65, 0x2f, 0x5b, 0x79, 0x63, 0x56, 0x44, 0x1e, 0x5b, 0x4d, 0xe8, 0x6a, 0xcb, 0x97, + 0xb1, 0x13, 0xda, 0x01, 0x20, 0x21, 0x8d, 0x42, 0x4d, 0xf1, 0x48, 0x32, 0x82, 0xc2, 0x62, 0xbd, + 0x03, 0x59, 0x87, 0x98, 0x3c, 0xdd, 0xba, 0x6d, 0x15, 0xd2, 0x9c, 0x0f, 0x11, 0xa9, 0x61, 0x15, + 0x7f, 0x49, 0xc2, 0x72, 0x1c, 0x1e, 0x9d, 0xc0, 0x52, 0x80, 0xfb, 0x03, 0xec, 0xd2, 0xa0, 0xa0, + 0xec, 0x26, 0xe7, 0x76, 0xb3, 0x23, 0x94, 0xb5, 0x21, 0x0a, 0x72, 0x60, 0xc3, 0x31, 0xba, 0xd8, + 0xd1, 0x2d, 0x4c, 0xb1, 0xc9, 0x5d, 0x19, 0x10, 0x0b, 0x17, 0x12, 0xbb, 0xca, 0xde, 0x6a, 0xe5, + 0xe1, 0xac, 0xe8, 0x4d, 0x86, 0x71, 0x18, 0x41, 0x1c, 0x13, 0x0b, 0x6b, 0xc8, 0x99, 0xa2, 0xa1, + 0xff, 0xc1, 0x5a, 0x20, 0x9a, 0xd0, 0xf0, 0x5f, 0xe9, 0xa6, 0x31, 0xc0, 0xbe, 0xc1, 0xf3, 0xbd, + 0xa4, 0xe5, 0x47, 0x8c, 0x1a, 0xa7, 0xa3, 0x0a, 0x6c, 0x5e, 0xe5, 0x9a, 0x23, 0x13, 0xb9, 0x3e, + 0x8d, 0xef, 0xa0, 0xff, 0xc3, 0x46, 0xcf, 0x30, 0xf1, 0x94, 0x8a, 0xc8, 0x2d, 0x62, 0xbc, 0x09, + 0x8d, 0xf7, 0xe0, 0x56, 0x70, 0x41, 0xa8, 0x6e, 0x5e, 0x18, 0x6e, 0x7f, 0x5a, 0x51, 0xb4, 0x5f, + 0x81, 0x89, 0xd4, 0xb8, 0xc4, 0x15, 0xea, 0x46, 0x0f, 0xeb, 0x01, 0x36, 0x7c, 0xf3, 0x62, 0x4a, + 0x7d, 0x51, 0xaa, 0x1b, 0x3d, 0xdc, 0xe1, 0x12, 0xe3, 0xea, 0xc5, 0xae, 0x2c, 0xb0, 0x2c, 0x0c, + 0xda, 0xe7, 0x09, 0xf2, 0xa9, 0xce, 0xc6, 0x4c, 0x27, 0xbd, 0x5e, 0x80, 0x29, 0x9f, 0x91, 0xa4, + 0x96, 0xe3, 0x8c, 0x53, 0x7b, 0x80, 0xdb, 0x9c, 0x8c, 0xee, 0x41, 0x0e, 0xbb, 0xd6, 0x98, 0x64, + 0x82, 0x4b, 0xae, 0x60, 0xd7, 0x1a, 0xc9, 0x15, 0xbf, 0x57, 0x60, 0x85, 0xd7, 0xa7, 0x29, 0x3b, + 0x0b, 0xb5, 0x60, 0x51, 0x36, 0x00, 0xc7, 0xbe, 0x6e, 0x17, 0x45, 0x20, 0xe8, 0xdf, 0x00, 0x26, + 0x71, 0x7b, 0xb6, 0xc5, 0x64, 0xb9, 0x13, 0x09, 0x2d, 0x46, 0x41, 0x47, 0x90, 0x76, 0xf0, 0x25, + 0x76, 0x78, 0xa9, 0x57, 0x2b, 0x95, 0xb9, 0xba, 0xaa, 0xc9, 0x34, 0x35, 0x01, 0x50, 0xfc, 0x5a, + 0x81, 0x1c, 0xa7, 0x56, 0x87, 0xd7, 0x19, 0xda, 0x85, 0xac, 0x85, 0x03, 0xd3, 0xb7, 0x3d, 0x76, + 0x94, 0x37, 0x4a, 0x9c, 0xc4, 0x2e, 0x15, 0xc7, 0x70, 0xfb, 0xa1, 0xd1, 0xc7, 0xba, 0x19, 0x75, + 0x77, 0x46, 0x5b, 0x8e, 0x88, 0x35, 0xd6, 0x9b, 0x1d, 0xc8, 0x44, 0xa3, 0x17, 0x14, 0x92, 0x7c, + 0xb8, 0xde, 0x9c, 0xcf, 0x51, 0xa9, 0xad, 0x8d, 0x70, 0x8a, 0xdf, 0x26, 0x61, 0xa3, 0x33, 0x2c, + 0x7e, 0xcc, 0xe9, 0x23, 0x48, 0x1b, 0x56, 0xe8, 0x88, 0x02, 0xcc, 0x93, 0x12, 0xfb, 0x43, 0xec, + 0xd8, 0x17, 0x84, 0x58, 0x9a, 0x00, 0x60, 0x48, 0x81, 0x47, 0x48, 0x4f, 0x8e, 0xec, 0xb5, 0x90, + 0x38, 0x00, 0x6a, 0xc2, 0xe2, 0x00, 0x5b, 0xb6, 0x69, 0xcc, 0x5f, 0xa8, 0x11, 0x56, 0x04, 0xc1, + 0xd0, 0x2e, 0x6d, 0xe2, 0xb0, 0x26, 0x4b, 0x5d, 0x1f, 0x4d, 0x42, 0xa0, 0x3a, 0xa4, 0x7c, 0xc3, + 0x7c, 0xc5, 0x07, 0xf9, 0x7a, 0x50, 0x5c, 0x9f, 0xdd, 0xb9, 0xf1, 0x81, 0x59, 0xe0, 0x03, 0x03, + 0x74, 0x34, 0x2d, 0x06, 0x64, 0x0f, 0x48, 0xe8, 0x5a, 0xb6, 0xdb, 0x3f, 0x20, 0x2f, 0x11, 0x82, + 0x94, 0x83, 0x7b, 0xa2, 0x4c, 0x69, 0x8d, 0xff, 0x46, 0x1b, 0x90, 0xf6, 0xed, 0xfe, 0x85, 0x18, + 0xb7, 0xb4, 0x26, 0x0e, 0xe8, 0x26, 0x2c, 0x74, 0x09, 0xa5, 0x64, 0xc0, 0x93, 0x97, 0xd6, 0xe4, + 0x09, 0xe5, 0x21, 0x49, 0x89, 0xc7, 0x73, 0x90, 0xd6, 0xd8, 0xcf, 0xe2, 0x67, 0x0a, 0x2c, 0xd7, + 0x0d, 0x13, 0x0f, 0xe7, 0xf1, 0x1c, 0x96, 0xbb, 0xd2, 0xa6, 0xde, 0x25, 0x2f, 0xe5, 0x50, 0x3e, + 0x98, 0x35, 0xc8, 0x98, 0xbf, 0x5a, 0xb6, 0x1b, 0x73, 0x7e, 0x22, 0xd8, 0xc4, 0x54, 0xb0, 0x3f, + 0x29, 0xb0, 0xca, 0x3c, 0x89, 0x35, 0xe6, 0x6d, 0xc8, 0xd0, 0x8b, 0x70, 0xd0, 0x75, 0x0d, 0xdb, + 0x91, 0xb3, 0x34, 0x22, 0x8c, 0x3d, 0x40, 0x89, 0x7f, 0xe4, 0x01, 0xd2, 0xa6, 0xc7, 0x6e, 0x66, + 0xc8, 0x78, 0x12, 0xe3, 0x53, 0xf7, 0x7b, 0x12, 0x6e, 0x72, 0x73, 0xa3, 0xb8, 0x34, 0x1c, 0x84, + 0x0e, 0x0d, 0xfe, 0x7a, 0xf9, 0xb0, 0x60, 0x4d, 0xbc, 0x38, 0xb1, 0x65, 0x49, 0x86, 0xf9, 0xf6, + 0x5c, 0x57, 0x41, 0xcc, 0x6e, 0xde, 0x19, 0x27, 0x04, 0xc8, 0x80, 0x3c, 0x7f, 0xa3, 0xe2, 0x46, + 0x44, 0xe0, 0x6f, 0xcd, 0x13, 0x78, 0xcc, 0x46, 0xae, 0x37, 0x76, 0x0e, 0x90, 0x0e, 0x79, 0xfe, + 0xa8, 0xc5, 0x4d, 0xa4, 0xfe, 0x46, 0xb9, 0x72, 0x0c, 0x2d, 0x6e, 0x80, 0xc2, 0x56, 0xfc, 0xd9, + 0x8b, 0xdb, 0x59, 0xe0, 0x76, 0xde, 0x9d, 0xd5, 0xce, 0x55, 0xb7, 0xa3, 0xb6, 0x19, 0x5c, 0x41, + 0x0d, 0xd0, 0x1e, 0xa4, 0xb1, 0xef, 0x13, 0x9f, 0xdf, 0x02, 0xd9, 0x0a, 0x8a, 0x6c, 0xf8, 0x9e, + 0x59, 0xea, 0xf0, 0xdd, 0x53, 0x13, 0x02, 0xc5, 0x4f, 0x15, 0xd8, 0x9c, 0x58, 0x3e, 0x03, 0x8f, + 0xb8, 0x01, 0x46, 0x03, 0x40, 0x23, 0x6f, 0x75, 0x5f, 0xb4, 0x85, 0x5c, 0xa6, 0xde, 0x9f, 0x2b, + 0x39, 0x53, 0xcd, 0xa5, 0xad, 0x19, 0x93, 0xa4, 0xe2, 0xcf, 0x0a, 0x6c, 0x4d, 0x48, 0x9f, 0xf8, + 0xa4, 0xef, 0xe3, 0xe0, 0x35, 0xbd, 0xf8, 0x5f, 0xc8, 0x7b, 0x52, 0x50, 0xf7, 0xb0, 0x6f, 0xb2, + 0x7b, 0x54, 0xdc, 0x37, 0xb9, 0x88, 0x7e, 0x22, 0xc8, 0xe8, 0x1d, 0x80, 0xd1, 0xd2, 0x20, 0xd7, + 0xd7, 0xed, 0x28, 0x94, 0x68, 0x71, 0x2f, 0x9d, 0x46, 0x8b, 0xbb, 0x96, 0x19, 0x6e, 0x12, 0xe8, + 0x11, 0x64, 0x43, 0xcf, 0x32, 0x28, 0x16, 0xba, 0xa9, 0xd7, 0xea, 0x82, 0x10, 0x67, 0x84, 0xe2, + 0xe7, 0x93, 0x49, 0x1e, 0x46, 0xe6, 0xc1, 0x7a, 0x2c, 0xc9, 0x91, 0xbf, 0x32, 0xcb, 0x8f, 0xaf, + 0x99, 0xe5, 0x08, 0x5d, 0x8b, 0x15, 0x30, 0xa2, 0xed, 0x7f, 0xac, 0xc0, 0xa2, 0xdc, 0xed, 0xd1, + 0x16, 0xac, 0xd7, 0xd5, 0xea, 0xe9, 0x99, 0xa6, 0xea, 0x67, 0xad, 0xce, 0x89, 0x5a, 0x6b, 0xd4, + 0x1b, 0xea, 0x61, 0xfe, 0x06, 0x5a, 0x87, 0x5c, 0xb3, 0x7a, 0xa0, 0x36, 0xf5, 0x43, 0xf5, 0x54, + 0xad, 0x9d, 0x36, 0xda, 0xad, 0xbc, 0x82, 0x10, 0xac, 0xd6, 0xab, 0x35, 0x35, 0x46, 0x4b, 0xa0, + 0x7f, 0xc1, 0x66, 0xe7, 0xa8, 0x7d, 0xaa, 0xd7, 0x8e, 0xaa, 0xad, 0x27, 0x71, 0x56, 0x92, 0xb3, + 0xaa, 0x75, 0x55, 0xef, 0xa8, 0x55, 0xad, 0x76, 0x14, 0x63, 0xa5, 0xf6, 0x5d, 0x80, 0xd1, 0xc6, + 0x82, 0x6e, 0xc1, 0x96, 0x30, 0xd6, 0x54, 0xcf, 0xd5, 0xe6, 0x84, 0x27, 0x39, 0xc8, 0x9e, 0x37, + 0x0e, 0xd5, 0xb6, 0x60, 0xe6, 0x15, 0xb4, 0x06, 0x2b, 0x1d, 0xf5, 0xc9, 0xb1, 0xda, 0x3a, 0x95, + 0xa4, 0x04, 0x5a, 0x05, 0xe0, 0x4e, 0x88, 0x73, 0x92, 0xe9, 0xd4, 0xb5, 0xea, 0xb1, 0x2a, 0x09, + 0xa9, 0x7d, 0x1f, 0xd0, 0xf4, 0xde, 0x8d, 0xfe, 0x03, 0xbb, 0x13, 0x41, 0xea, 0xc7, 0xed, 0xc3, + 0xc9, 0x54, 0xac, 0x40, 0x86, 0x83, 0x33, 0x56, 0x5e, 0x61, 0xb6, 0x04, 0x36, 0x3f, 0x27, 0x58, + 0x0a, 0x39, 0xbb, 0xda, 0x3a, 0xd4, 0x63, 0x8c, 0xe4, 0x3e, 0x06, 0x18, 0xbd, 0xa9, 0x28, 0x0b, + 0x8b, 0x67, 0xad, 0xa7, 0xad, 0xf6, 0xb3, 0x56, 0xfe, 0x06, 0x0b, 0xe1, 0x5c, 0xd5, 0x9e, 0xeb, + 0x67, 0xad, 0x66, 0xe3, 0xa9, 0xda, 0x7c, 0x9e, 0x57, 0xd0, 0x32, 0x2c, 0x0d, 0x4f, 0x09, 0x76, + 0x3a, 0x69, 0x77, 0x3a, 0x8d, 0x83, 0xa6, 0x9a, 0x4f, 0x22, 0x80, 0x05, 0xc9, 0x49, 0xf1, 0x74, + 0x30, 0x55, 0x49, 0x48, 0x57, 0xbe, 0x51, 0xa0, 0xc0, 0xcb, 0xdf, 0x88, 0x35, 0x46, 0x07, 0xfb, + 0x97, 0xb6, 0x89, 0xd1, 0x17, 0x0a, 0xac, 0x8c, 0xf5, 0x1d, 0x9a, 0xf9, 0xb6, 0xb9, 0xea, 0x83, + 0x74, 0x7b, 0x27, 0xd2, 0x8e, 0x7d, 0x09, 0x97, 0xda, 0xd1, 0x97, 0x70, 0xf1, 0xee, 0x27, 0x3f, + 0xfe, 0xfa, 0x65, 0x62, 0xa7, 0x58, 0x18, 0xff, 0x30, 0x0f, 0x1e, 0xca, 0x36, 0xc4, 0x0f, 0x95, + 0xfd, 0x83, 0xdf, 0x14, 0xd8, 0x37, 0xc9, 0x60, 0x46, 0x3f, 0x0e, 0x76, 0xfe, 0x2c, 0xb8, 0x13, + 0x36, 0x72, 0x27, 0xca, 0x07, 0xcf, 0x24, 0x50, 0x9f, 0xb0, 0x25, 0xb5, 0x44, 0xfc, 0x7e, 0xb9, + 0x8f, 0x5d, 0x3e, 0x90, 0x65, 0xc1, 0x32, 0x3c, 0x3b, 0x78, 0xdd, 0x5f, 0x08, 0x8f, 0xa6, 0x38, + 0x5f, 0x25, 0xee, 0x3d, 0x11, 0xc8, 0x35, 0xee, 0xe2, 0x94, 0x1f, 0xa5, 0xf3, 0xfb, 0x07, 0x4c, + 0xf5, 0x87, 0x48, 0xf0, 0x05, 0x17, 0x7c, 0x31, 0x25, 0xf8, 0xe2, 0x5c, 0xd8, 0xe8, 0x2e, 0x70, + 0xaf, 0x1e, 0xfc, 0x11, 0x00, 0x00, 0xff, 0xff, 0xba, 0xc2, 0xb0, 0xa0, 0xd7, 0x10, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/cloud/videointelligence/v1beta2/video_intelligence.pb.go b/vendor/google.golang.org/genproto/googleapis/cloud/videointelligence/v1beta2/video_intelligence.pb.go new file mode 100644 index 0000000..cf26251 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/cloud/videointelligence/v1beta2/video_intelligence.pb.go @@ -0,0 +1,1598 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/cloud/videointelligence/v1beta2/video_intelligence.proto + +package videointelligence // import "google.golang.org/genproto/googleapis/cloud/videointelligence/v1beta2" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import duration "github.com/golang/protobuf/ptypes/duration" +import timestamp "github.com/golang/protobuf/ptypes/timestamp" +import _ "google.golang.org/genproto/googleapis/api/annotations" +import longrunning "google.golang.org/genproto/googleapis/longrunning" +import status "google.golang.org/genproto/googleapis/rpc/status" + +import ( + context "golang.org/x/net/context" + grpc "google.golang.org/grpc" +) + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Video annotation feature. +type Feature int32 + +const ( + // Unspecified. + Feature_FEATURE_UNSPECIFIED Feature = 0 + // Label detection. Detect objects, such as dog or flower. + Feature_LABEL_DETECTION Feature = 1 + // Shot change detection. + Feature_SHOT_CHANGE_DETECTION Feature = 2 + // Explicit content detection. + Feature_EXPLICIT_CONTENT_DETECTION Feature = 3 + // Human face detection and tracking. + Feature_FACE_DETECTION Feature = 4 +) + +var Feature_name = map[int32]string{ + 0: "FEATURE_UNSPECIFIED", + 1: "LABEL_DETECTION", + 2: "SHOT_CHANGE_DETECTION", + 3: "EXPLICIT_CONTENT_DETECTION", + 4: "FACE_DETECTION", +} +var Feature_value = map[string]int32{ + "FEATURE_UNSPECIFIED": 0, + "LABEL_DETECTION": 1, + "SHOT_CHANGE_DETECTION": 2, + "EXPLICIT_CONTENT_DETECTION": 3, + "FACE_DETECTION": 4, +} + +func (x Feature) String() string { + return proto.EnumName(Feature_name, int32(x)) +} +func (Feature) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_video_intelligence_4625d60d88ccf591, []int{0} +} + +// Label detection mode. +type LabelDetectionMode int32 + +const ( + // Unspecified. + LabelDetectionMode_LABEL_DETECTION_MODE_UNSPECIFIED LabelDetectionMode = 0 + // Detect shot-level labels. + LabelDetectionMode_SHOT_MODE LabelDetectionMode = 1 + // Detect frame-level labels. + LabelDetectionMode_FRAME_MODE LabelDetectionMode = 2 + // Detect both shot-level and frame-level labels. + LabelDetectionMode_SHOT_AND_FRAME_MODE LabelDetectionMode = 3 +) + +var LabelDetectionMode_name = map[int32]string{ + 0: "LABEL_DETECTION_MODE_UNSPECIFIED", + 1: "SHOT_MODE", + 2: "FRAME_MODE", + 3: "SHOT_AND_FRAME_MODE", +} +var LabelDetectionMode_value = map[string]int32{ + "LABEL_DETECTION_MODE_UNSPECIFIED": 0, + "SHOT_MODE": 1, + "FRAME_MODE": 2, + "SHOT_AND_FRAME_MODE": 3, +} + +func (x LabelDetectionMode) String() string { + return proto.EnumName(LabelDetectionMode_name, int32(x)) +} +func (LabelDetectionMode) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_video_intelligence_4625d60d88ccf591, []int{1} +} + +// Bucketized representation of likelihood. +type Likelihood int32 + +const ( + // Unspecified likelihood. + Likelihood_LIKELIHOOD_UNSPECIFIED Likelihood = 0 + // Very unlikely. + Likelihood_VERY_UNLIKELY Likelihood = 1 + // Unlikely. + Likelihood_UNLIKELY Likelihood = 2 + // Possible. + Likelihood_POSSIBLE Likelihood = 3 + // Likely. + Likelihood_LIKELY Likelihood = 4 + // Very likely. + Likelihood_VERY_LIKELY Likelihood = 5 +) + +var Likelihood_name = map[int32]string{ + 0: "LIKELIHOOD_UNSPECIFIED", + 1: "VERY_UNLIKELY", + 2: "UNLIKELY", + 3: "POSSIBLE", + 4: "LIKELY", + 5: "VERY_LIKELY", +} +var Likelihood_value = map[string]int32{ + "LIKELIHOOD_UNSPECIFIED": 0, + "VERY_UNLIKELY": 1, + "UNLIKELY": 2, + "POSSIBLE": 3, + "LIKELY": 4, + "VERY_LIKELY": 5, +} + +func (x Likelihood) String() string { + return proto.EnumName(Likelihood_name, int32(x)) +} +func (Likelihood) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_video_intelligence_4625d60d88ccf591, []int{2} +} + +// Video annotation request. +type AnnotateVideoRequest struct { + // Input video location. Currently, only + // [Google Cloud Storage](https://cloud.google.com/storage/) URIs are + // supported, which must be specified in the following format: + // `gs://bucket-id/object-id` (other URI formats return + // [google.rpc.Code.INVALID_ARGUMENT][google.rpc.Code.INVALID_ARGUMENT]). For + // more information, see [Request URIs](/storage/docs/reference-uris). A video + // URI may include wildcards in `object-id`, and thus identify multiple + // videos. Supported wildcards: '*' to match 0 or more characters; + // '?' to match 1 character. If unset, the input video should be embedded + // in the request as `input_content`. If set, `input_content` should be unset. + InputUri string `protobuf:"bytes,1,opt,name=input_uri,json=inputUri,proto3" json:"input_uri,omitempty"` + // The video data bytes. + // If unset, the input video(s) should be specified via `input_uri`. + // If set, `input_uri` should be unset. + InputContent []byte `protobuf:"bytes,6,opt,name=input_content,json=inputContent,proto3" json:"input_content,omitempty"` + // Requested video annotation features. + Features []Feature `protobuf:"varint,2,rep,packed,name=features,proto3,enum=google.cloud.videointelligence.v1beta2.Feature" json:"features,omitempty"` + // Additional video context and/or feature-specific parameters. + VideoContext *VideoContext `protobuf:"bytes,3,opt,name=video_context,json=videoContext,proto3" json:"video_context,omitempty"` + // Optional location where the output (in JSON format) should be stored. + // Currently, only [Google Cloud Storage](https://cloud.google.com/storage/) + // URIs are supported, which must be specified in the following format: + // `gs://bucket-id/object-id` (other URI formats return + // [google.rpc.Code.INVALID_ARGUMENT][google.rpc.Code.INVALID_ARGUMENT]). For + // more information, see [Request URIs](/storage/docs/reference-uris). + OutputUri string `protobuf:"bytes,4,opt,name=output_uri,json=outputUri,proto3" json:"output_uri,omitempty"` + // Optional cloud region where annotation should take place. Supported cloud + // regions: `us-east1`, `us-west1`, `europe-west1`, `asia-east1`. If no region + // is specified, a region will be determined based on video file location. + LocationId string `protobuf:"bytes,5,opt,name=location_id,json=locationId,proto3" json:"location_id,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *AnnotateVideoRequest) Reset() { *m = AnnotateVideoRequest{} } +func (m *AnnotateVideoRequest) String() string { return proto.CompactTextString(m) } +func (*AnnotateVideoRequest) ProtoMessage() {} +func (*AnnotateVideoRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_video_intelligence_4625d60d88ccf591, []int{0} +} +func (m *AnnotateVideoRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_AnnotateVideoRequest.Unmarshal(m, b) +} +func (m *AnnotateVideoRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_AnnotateVideoRequest.Marshal(b, m, deterministic) +} +func (dst *AnnotateVideoRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_AnnotateVideoRequest.Merge(dst, src) +} +func (m *AnnotateVideoRequest) XXX_Size() int { + return xxx_messageInfo_AnnotateVideoRequest.Size(m) +} +func (m *AnnotateVideoRequest) XXX_DiscardUnknown() { + xxx_messageInfo_AnnotateVideoRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_AnnotateVideoRequest proto.InternalMessageInfo + +func (m *AnnotateVideoRequest) GetInputUri() string { + if m != nil { + return m.InputUri + } + return "" +} + +func (m *AnnotateVideoRequest) GetInputContent() []byte { + if m != nil { + return m.InputContent + } + return nil +} + +func (m *AnnotateVideoRequest) GetFeatures() []Feature { + if m != nil { + return m.Features + } + return nil +} + +func (m *AnnotateVideoRequest) GetVideoContext() *VideoContext { + if m != nil { + return m.VideoContext + } + return nil +} + +func (m *AnnotateVideoRequest) GetOutputUri() string { + if m != nil { + return m.OutputUri + } + return "" +} + +func (m *AnnotateVideoRequest) GetLocationId() string { + if m != nil { + return m.LocationId + } + return "" +} + +// Video context and/or feature-specific parameters. +type VideoContext struct { + // Video segments to annotate. The segments may overlap and are not required + // to be contiguous or span the whole video. If unspecified, each video + // is treated as a single segment. + Segments []*VideoSegment `protobuf:"bytes,1,rep,name=segments,proto3" json:"segments,omitempty"` + // Config for LABEL_DETECTION. + LabelDetectionConfig *LabelDetectionConfig `protobuf:"bytes,2,opt,name=label_detection_config,json=labelDetectionConfig,proto3" json:"label_detection_config,omitempty"` + // Config for SHOT_CHANGE_DETECTION. + ShotChangeDetectionConfig *ShotChangeDetectionConfig `protobuf:"bytes,3,opt,name=shot_change_detection_config,json=shotChangeDetectionConfig,proto3" json:"shot_change_detection_config,omitempty"` + // Config for EXPLICIT_CONTENT_DETECTION. + ExplicitContentDetectionConfig *ExplicitContentDetectionConfig `protobuf:"bytes,4,opt,name=explicit_content_detection_config,json=explicitContentDetectionConfig,proto3" json:"explicit_content_detection_config,omitempty"` + // Config for FACE_DETECTION. + FaceDetectionConfig *FaceDetectionConfig `protobuf:"bytes,5,opt,name=face_detection_config,json=faceDetectionConfig,proto3" json:"face_detection_config,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *VideoContext) Reset() { *m = VideoContext{} } +func (m *VideoContext) String() string { return proto.CompactTextString(m) } +func (*VideoContext) ProtoMessage() {} +func (*VideoContext) Descriptor() ([]byte, []int) { + return fileDescriptor_video_intelligence_4625d60d88ccf591, []int{1} +} +func (m *VideoContext) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_VideoContext.Unmarshal(m, b) +} +func (m *VideoContext) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_VideoContext.Marshal(b, m, deterministic) +} +func (dst *VideoContext) XXX_Merge(src proto.Message) { + xxx_messageInfo_VideoContext.Merge(dst, src) +} +func (m *VideoContext) XXX_Size() int { + return xxx_messageInfo_VideoContext.Size(m) +} +func (m *VideoContext) XXX_DiscardUnknown() { + xxx_messageInfo_VideoContext.DiscardUnknown(m) +} + +var xxx_messageInfo_VideoContext proto.InternalMessageInfo + +func (m *VideoContext) GetSegments() []*VideoSegment { + if m != nil { + return m.Segments + } + return nil +} + +func (m *VideoContext) GetLabelDetectionConfig() *LabelDetectionConfig { + if m != nil { + return m.LabelDetectionConfig + } + return nil +} + +func (m *VideoContext) GetShotChangeDetectionConfig() *ShotChangeDetectionConfig { + if m != nil { + return m.ShotChangeDetectionConfig + } + return nil +} + +func (m *VideoContext) GetExplicitContentDetectionConfig() *ExplicitContentDetectionConfig { + if m != nil { + return m.ExplicitContentDetectionConfig + } + return nil +} + +func (m *VideoContext) GetFaceDetectionConfig() *FaceDetectionConfig { + if m != nil { + return m.FaceDetectionConfig + } + return nil +} + +// Config for LABEL_DETECTION. +type LabelDetectionConfig struct { + // What labels should be detected with LABEL_DETECTION, in addition to + // video-level labels or segment-level labels. + // If unspecified, defaults to `SHOT_MODE`. + LabelDetectionMode LabelDetectionMode `protobuf:"varint,1,opt,name=label_detection_mode,json=labelDetectionMode,proto3,enum=google.cloud.videointelligence.v1beta2.LabelDetectionMode" json:"label_detection_mode,omitempty"` + // Whether the video has been shot from a stationary (i.e. non-moving) camera. + // When set to true, might improve detection accuracy for moving objects. + // Should be used with `SHOT_AND_FRAME_MODE` enabled. + StationaryCamera bool `protobuf:"varint,2,opt,name=stationary_camera,json=stationaryCamera,proto3" json:"stationary_camera,omitempty"` + // Model to use for label detection. + // Supported values: "builtin/stable" (the default if unset) and + // "builtin/latest". + Model string `protobuf:"bytes,3,opt,name=model,proto3" json:"model,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *LabelDetectionConfig) Reset() { *m = LabelDetectionConfig{} } +func (m *LabelDetectionConfig) String() string { return proto.CompactTextString(m) } +func (*LabelDetectionConfig) ProtoMessage() {} +func (*LabelDetectionConfig) Descriptor() ([]byte, []int) { + return fileDescriptor_video_intelligence_4625d60d88ccf591, []int{2} +} +func (m *LabelDetectionConfig) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_LabelDetectionConfig.Unmarshal(m, b) +} +func (m *LabelDetectionConfig) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_LabelDetectionConfig.Marshal(b, m, deterministic) +} +func (dst *LabelDetectionConfig) XXX_Merge(src proto.Message) { + xxx_messageInfo_LabelDetectionConfig.Merge(dst, src) +} +func (m *LabelDetectionConfig) XXX_Size() int { + return xxx_messageInfo_LabelDetectionConfig.Size(m) +} +func (m *LabelDetectionConfig) XXX_DiscardUnknown() { + xxx_messageInfo_LabelDetectionConfig.DiscardUnknown(m) +} + +var xxx_messageInfo_LabelDetectionConfig proto.InternalMessageInfo + +func (m *LabelDetectionConfig) GetLabelDetectionMode() LabelDetectionMode { + if m != nil { + return m.LabelDetectionMode + } + return LabelDetectionMode_LABEL_DETECTION_MODE_UNSPECIFIED +} + +func (m *LabelDetectionConfig) GetStationaryCamera() bool { + if m != nil { + return m.StationaryCamera + } + return false +} + +func (m *LabelDetectionConfig) GetModel() string { + if m != nil { + return m.Model + } + return "" +} + +// Config for SHOT_CHANGE_DETECTION. +type ShotChangeDetectionConfig struct { + // Model to use for shot change detection. + // Supported values: "builtin/stable" (the default if unset) and + // "builtin/latest". + Model string `protobuf:"bytes,1,opt,name=model,proto3" json:"model,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ShotChangeDetectionConfig) Reset() { *m = ShotChangeDetectionConfig{} } +func (m *ShotChangeDetectionConfig) String() string { return proto.CompactTextString(m) } +func (*ShotChangeDetectionConfig) ProtoMessage() {} +func (*ShotChangeDetectionConfig) Descriptor() ([]byte, []int) { + return fileDescriptor_video_intelligence_4625d60d88ccf591, []int{3} +} +func (m *ShotChangeDetectionConfig) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ShotChangeDetectionConfig.Unmarshal(m, b) +} +func (m *ShotChangeDetectionConfig) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ShotChangeDetectionConfig.Marshal(b, m, deterministic) +} +func (dst *ShotChangeDetectionConfig) XXX_Merge(src proto.Message) { + xxx_messageInfo_ShotChangeDetectionConfig.Merge(dst, src) +} +func (m *ShotChangeDetectionConfig) XXX_Size() int { + return xxx_messageInfo_ShotChangeDetectionConfig.Size(m) +} +func (m *ShotChangeDetectionConfig) XXX_DiscardUnknown() { + xxx_messageInfo_ShotChangeDetectionConfig.DiscardUnknown(m) +} + +var xxx_messageInfo_ShotChangeDetectionConfig proto.InternalMessageInfo + +func (m *ShotChangeDetectionConfig) GetModel() string { + if m != nil { + return m.Model + } + return "" +} + +// Config for EXPLICIT_CONTENT_DETECTION. +type ExplicitContentDetectionConfig struct { + // Model to use for explicit content detection. + // Supported values: "builtin/stable" (the default if unset) and + // "builtin/latest". + Model string `protobuf:"bytes,1,opt,name=model,proto3" json:"model,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ExplicitContentDetectionConfig) Reset() { *m = ExplicitContentDetectionConfig{} } +func (m *ExplicitContentDetectionConfig) String() string { return proto.CompactTextString(m) } +func (*ExplicitContentDetectionConfig) ProtoMessage() {} +func (*ExplicitContentDetectionConfig) Descriptor() ([]byte, []int) { + return fileDescriptor_video_intelligence_4625d60d88ccf591, []int{4} +} +func (m *ExplicitContentDetectionConfig) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ExplicitContentDetectionConfig.Unmarshal(m, b) +} +func (m *ExplicitContentDetectionConfig) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ExplicitContentDetectionConfig.Marshal(b, m, deterministic) +} +func (dst *ExplicitContentDetectionConfig) XXX_Merge(src proto.Message) { + xxx_messageInfo_ExplicitContentDetectionConfig.Merge(dst, src) +} +func (m *ExplicitContentDetectionConfig) XXX_Size() int { + return xxx_messageInfo_ExplicitContentDetectionConfig.Size(m) +} +func (m *ExplicitContentDetectionConfig) XXX_DiscardUnknown() { + xxx_messageInfo_ExplicitContentDetectionConfig.DiscardUnknown(m) +} + +var xxx_messageInfo_ExplicitContentDetectionConfig proto.InternalMessageInfo + +func (m *ExplicitContentDetectionConfig) GetModel() string { + if m != nil { + return m.Model + } + return "" +} + +// Config for FACE_DETECTION. +type FaceDetectionConfig struct { + // Model to use for face detection. + // Supported values: "builtin/stable" (the default if unset) and + // "builtin/latest". + Model string `protobuf:"bytes,1,opt,name=model,proto3" json:"model,omitempty"` + // Whether bounding boxes be included in the face annotation output. + IncludeBoundingBoxes bool `protobuf:"varint,2,opt,name=include_bounding_boxes,json=includeBoundingBoxes,proto3" json:"include_bounding_boxes,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *FaceDetectionConfig) Reset() { *m = FaceDetectionConfig{} } +func (m *FaceDetectionConfig) String() string { return proto.CompactTextString(m) } +func (*FaceDetectionConfig) ProtoMessage() {} +func (*FaceDetectionConfig) Descriptor() ([]byte, []int) { + return fileDescriptor_video_intelligence_4625d60d88ccf591, []int{5} +} +func (m *FaceDetectionConfig) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_FaceDetectionConfig.Unmarshal(m, b) +} +func (m *FaceDetectionConfig) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_FaceDetectionConfig.Marshal(b, m, deterministic) +} +func (dst *FaceDetectionConfig) XXX_Merge(src proto.Message) { + xxx_messageInfo_FaceDetectionConfig.Merge(dst, src) +} +func (m *FaceDetectionConfig) XXX_Size() int { + return xxx_messageInfo_FaceDetectionConfig.Size(m) +} +func (m *FaceDetectionConfig) XXX_DiscardUnknown() { + xxx_messageInfo_FaceDetectionConfig.DiscardUnknown(m) +} + +var xxx_messageInfo_FaceDetectionConfig proto.InternalMessageInfo + +func (m *FaceDetectionConfig) GetModel() string { + if m != nil { + return m.Model + } + return "" +} + +func (m *FaceDetectionConfig) GetIncludeBoundingBoxes() bool { + if m != nil { + return m.IncludeBoundingBoxes + } + return false +} + +// Video segment. +type VideoSegment struct { + // Time-offset, relative to the beginning of the video, + // corresponding to the start of the segment (inclusive). + StartTimeOffset *duration.Duration `protobuf:"bytes,1,opt,name=start_time_offset,json=startTimeOffset,proto3" json:"start_time_offset,omitempty"` + // Time-offset, relative to the beginning of the video, + // corresponding to the end of the segment (inclusive). + EndTimeOffset *duration.Duration `protobuf:"bytes,2,opt,name=end_time_offset,json=endTimeOffset,proto3" json:"end_time_offset,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *VideoSegment) Reset() { *m = VideoSegment{} } +func (m *VideoSegment) String() string { return proto.CompactTextString(m) } +func (*VideoSegment) ProtoMessage() {} +func (*VideoSegment) Descriptor() ([]byte, []int) { + return fileDescriptor_video_intelligence_4625d60d88ccf591, []int{6} +} +func (m *VideoSegment) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_VideoSegment.Unmarshal(m, b) +} +func (m *VideoSegment) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_VideoSegment.Marshal(b, m, deterministic) +} +func (dst *VideoSegment) XXX_Merge(src proto.Message) { + xxx_messageInfo_VideoSegment.Merge(dst, src) +} +func (m *VideoSegment) XXX_Size() int { + return xxx_messageInfo_VideoSegment.Size(m) +} +func (m *VideoSegment) XXX_DiscardUnknown() { + xxx_messageInfo_VideoSegment.DiscardUnknown(m) +} + +var xxx_messageInfo_VideoSegment proto.InternalMessageInfo + +func (m *VideoSegment) GetStartTimeOffset() *duration.Duration { + if m != nil { + return m.StartTimeOffset + } + return nil +} + +func (m *VideoSegment) GetEndTimeOffset() *duration.Duration { + if m != nil { + return m.EndTimeOffset + } + return nil +} + +// Video segment level annotation results for label detection. +type LabelSegment struct { + // Video segment where a label was detected. + Segment *VideoSegment `protobuf:"bytes,1,opt,name=segment,proto3" json:"segment,omitempty"` + // Confidence that the label is accurate. Range: [0, 1]. + Confidence float32 `protobuf:"fixed32,2,opt,name=confidence,proto3" json:"confidence,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *LabelSegment) Reset() { *m = LabelSegment{} } +func (m *LabelSegment) String() string { return proto.CompactTextString(m) } +func (*LabelSegment) ProtoMessage() {} +func (*LabelSegment) Descriptor() ([]byte, []int) { + return fileDescriptor_video_intelligence_4625d60d88ccf591, []int{7} +} +func (m *LabelSegment) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_LabelSegment.Unmarshal(m, b) +} +func (m *LabelSegment) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_LabelSegment.Marshal(b, m, deterministic) +} +func (dst *LabelSegment) XXX_Merge(src proto.Message) { + xxx_messageInfo_LabelSegment.Merge(dst, src) +} +func (m *LabelSegment) XXX_Size() int { + return xxx_messageInfo_LabelSegment.Size(m) +} +func (m *LabelSegment) XXX_DiscardUnknown() { + xxx_messageInfo_LabelSegment.DiscardUnknown(m) +} + +var xxx_messageInfo_LabelSegment proto.InternalMessageInfo + +func (m *LabelSegment) GetSegment() *VideoSegment { + if m != nil { + return m.Segment + } + return nil +} + +func (m *LabelSegment) GetConfidence() float32 { + if m != nil { + return m.Confidence + } + return 0 +} + +// Video frame level annotation results for label detection. +type LabelFrame struct { + // Time-offset, relative to the beginning of the video, corresponding to the + // video frame for this location. + TimeOffset *duration.Duration `protobuf:"bytes,1,opt,name=time_offset,json=timeOffset,proto3" json:"time_offset,omitempty"` + // Confidence that the label is accurate. Range: [0, 1]. + Confidence float32 `protobuf:"fixed32,2,opt,name=confidence,proto3" json:"confidence,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *LabelFrame) Reset() { *m = LabelFrame{} } +func (m *LabelFrame) String() string { return proto.CompactTextString(m) } +func (*LabelFrame) ProtoMessage() {} +func (*LabelFrame) Descriptor() ([]byte, []int) { + return fileDescriptor_video_intelligence_4625d60d88ccf591, []int{8} +} +func (m *LabelFrame) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_LabelFrame.Unmarshal(m, b) +} +func (m *LabelFrame) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_LabelFrame.Marshal(b, m, deterministic) +} +func (dst *LabelFrame) XXX_Merge(src proto.Message) { + xxx_messageInfo_LabelFrame.Merge(dst, src) +} +func (m *LabelFrame) XXX_Size() int { + return xxx_messageInfo_LabelFrame.Size(m) +} +func (m *LabelFrame) XXX_DiscardUnknown() { + xxx_messageInfo_LabelFrame.DiscardUnknown(m) +} + +var xxx_messageInfo_LabelFrame proto.InternalMessageInfo + +func (m *LabelFrame) GetTimeOffset() *duration.Duration { + if m != nil { + return m.TimeOffset + } + return nil +} + +func (m *LabelFrame) GetConfidence() float32 { + if m != nil { + return m.Confidence + } + return 0 +} + +// Detected entity from video analysis. +type Entity struct { + // Opaque entity ID. Some IDs may be available in + // [Google Knowledge Graph Search + // API](https://developers.google.com/knowledge-graph/). + EntityId string `protobuf:"bytes,1,opt,name=entity_id,json=entityId,proto3" json:"entity_id,omitempty"` + // Textual description, e.g. `Fixed-gear bicycle`. + Description string `protobuf:"bytes,2,opt,name=description,proto3" json:"description,omitempty"` + // Language code for `description` in BCP-47 format. + LanguageCode string `protobuf:"bytes,3,opt,name=language_code,json=languageCode,proto3" json:"language_code,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Entity) Reset() { *m = Entity{} } +func (m *Entity) String() string { return proto.CompactTextString(m) } +func (*Entity) ProtoMessage() {} +func (*Entity) Descriptor() ([]byte, []int) { + return fileDescriptor_video_intelligence_4625d60d88ccf591, []int{9} +} +func (m *Entity) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Entity.Unmarshal(m, b) +} +func (m *Entity) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Entity.Marshal(b, m, deterministic) +} +func (dst *Entity) XXX_Merge(src proto.Message) { + xxx_messageInfo_Entity.Merge(dst, src) +} +func (m *Entity) XXX_Size() int { + return xxx_messageInfo_Entity.Size(m) +} +func (m *Entity) XXX_DiscardUnknown() { + xxx_messageInfo_Entity.DiscardUnknown(m) +} + +var xxx_messageInfo_Entity proto.InternalMessageInfo + +func (m *Entity) GetEntityId() string { + if m != nil { + return m.EntityId + } + return "" +} + +func (m *Entity) GetDescription() string { + if m != nil { + return m.Description + } + return "" +} + +func (m *Entity) GetLanguageCode() string { + if m != nil { + return m.LanguageCode + } + return "" +} + +// Label annotation. +type LabelAnnotation struct { + // Detected entity. + Entity *Entity `protobuf:"bytes,1,opt,name=entity,proto3" json:"entity,omitempty"` + // Common categories for the detected entity. + // E.g. when the label is `Terrier` the category is likely `dog`. And in some + // cases there might be more than one categories e.g. `Terrier` could also be + // a `pet`. + CategoryEntities []*Entity `protobuf:"bytes,2,rep,name=category_entities,json=categoryEntities,proto3" json:"category_entities,omitempty"` + // All video segments where a label was detected. + Segments []*LabelSegment `protobuf:"bytes,3,rep,name=segments,proto3" json:"segments,omitempty"` + // All video frames where a label was detected. + Frames []*LabelFrame `protobuf:"bytes,4,rep,name=frames,proto3" json:"frames,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *LabelAnnotation) Reset() { *m = LabelAnnotation{} } +func (m *LabelAnnotation) String() string { return proto.CompactTextString(m) } +func (*LabelAnnotation) ProtoMessage() {} +func (*LabelAnnotation) Descriptor() ([]byte, []int) { + return fileDescriptor_video_intelligence_4625d60d88ccf591, []int{10} +} +func (m *LabelAnnotation) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_LabelAnnotation.Unmarshal(m, b) +} +func (m *LabelAnnotation) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_LabelAnnotation.Marshal(b, m, deterministic) +} +func (dst *LabelAnnotation) XXX_Merge(src proto.Message) { + xxx_messageInfo_LabelAnnotation.Merge(dst, src) +} +func (m *LabelAnnotation) XXX_Size() int { + return xxx_messageInfo_LabelAnnotation.Size(m) +} +func (m *LabelAnnotation) XXX_DiscardUnknown() { + xxx_messageInfo_LabelAnnotation.DiscardUnknown(m) +} + +var xxx_messageInfo_LabelAnnotation proto.InternalMessageInfo + +func (m *LabelAnnotation) GetEntity() *Entity { + if m != nil { + return m.Entity + } + return nil +} + +func (m *LabelAnnotation) GetCategoryEntities() []*Entity { + if m != nil { + return m.CategoryEntities + } + return nil +} + +func (m *LabelAnnotation) GetSegments() []*LabelSegment { + if m != nil { + return m.Segments + } + return nil +} + +func (m *LabelAnnotation) GetFrames() []*LabelFrame { + if m != nil { + return m.Frames + } + return nil +} + +// Video frame level annotation results for explicit content. +type ExplicitContentFrame struct { + // Time-offset, relative to the beginning of the video, corresponding to the + // video frame for this location. + TimeOffset *duration.Duration `protobuf:"bytes,1,opt,name=time_offset,json=timeOffset,proto3" json:"time_offset,omitempty"` + // Likelihood of the pornography content.. + PornographyLikelihood Likelihood `protobuf:"varint,2,opt,name=pornography_likelihood,json=pornographyLikelihood,proto3,enum=google.cloud.videointelligence.v1beta2.Likelihood" json:"pornography_likelihood,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ExplicitContentFrame) Reset() { *m = ExplicitContentFrame{} } +func (m *ExplicitContentFrame) String() string { return proto.CompactTextString(m) } +func (*ExplicitContentFrame) ProtoMessage() {} +func (*ExplicitContentFrame) Descriptor() ([]byte, []int) { + return fileDescriptor_video_intelligence_4625d60d88ccf591, []int{11} +} +func (m *ExplicitContentFrame) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ExplicitContentFrame.Unmarshal(m, b) +} +func (m *ExplicitContentFrame) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ExplicitContentFrame.Marshal(b, m, deterministic) +} +func (dst *ExplicitContentFrame) XXX_Merge(src proto.Message) { + xxx_messageInfo_ExplicitContentFrame.Merge(dst, src) +} +func (m *ExplicitContentFrame) XXX_Size() int { + return xxx_messageInfo_ExplicitContentFrame.Size(m) +} +func (m *ExplicitContentFrame) XXX_DiscardUnknown() { + xxx_messageInfo_ExplicitContentFrame.DiscardUnknown(m) +} + +var xxx_messageInfo_ExplicitContentFrame proto.InternalMessageInfo + +func (m *ExplicitContentFrame) GetTimeOffset() *duration.Duration { + if m != nil { + return m.TimeOffset + } + return nil +} + +func (m *ExplicitContentFrame) GetPornographyLikelihood() Likelihood { + if m != nil { + return m.PornographyLikelihood + } + return Likelihood_LIKELIHOOD_UNSPECIFIED +} + +// Explicit content annotation (based on per-frame visual signals only). +// If no explicit content has been detected in a frame, no annotations are +// present for that frame. +type ExplicitContentAnnotation struct { + // All video frames where explicit content was detected. + Frames []*ExplicitContentFrame `protobuf:"bytes,1,rep,name=frames,proto3" json:"frames,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ExplicitContentAnnotation) Reset() { *m = ExplicitContentAnnotation{} } +func (m *ExplicitContentAnnotation) String() string { return proto.CompactTextString(m) } +func (*ExplicitContentAnnotation) ProtoMessage() {} +func (*ExplicitContentAnnotation) Descriptor() ([]byte, []int) { + return fileDescriptor_video_intelligence_4625d60d88ccf591, []int{12} +} +func (m *ExplicitContentAnnotation) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ExplicitContentAnnotation.Unmarshal(m, b) +} +func (m *ExplicitContentAnnotation) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ExplicitContentAnnotation.Marshal(b, m, deterministic) +} +func (dst *ExplicitContentAnnotation) XXX_Merge(src proto.Message) { + xxx_messageInfo_ExplicitContentAnnotation.Merge(dst, src) +} +func (m *ExplicitContentAnnotation) XXX_Size() int { + return xxx_messageInfo_ExplicitContentAnnotation.Size(m) +} +func (m *ExplicitContentAnnotation) XXX_DiscardUnknown() { + xxx_messageInfo_ExplicitContentAnnotation.DiscardUnknown(m) +} + +var xxx_messageInfo_ExplicitContentAnnotation proto.InternalMessageInfo + +func (m *ExplicitContentAnnotation) GetFrames() []*ExplicitContentFrame { + if m != nil { + return m.Frames + } + return nil +} + +// Normalized bounding box. +// The normalized vertex coordinates are relative to the original image. +// Range: [0, 1]. +type NormalizedBoundingBox struct { + // Left X coordinate. + Left float32 `protobuf:"fixed32,1,opt,name=left,proto3" json:"left,omitempty"` + // Top Y coordinate. + Top float32 `protobuf:"fixed32,2,opt,name=top,proto3" json:"top,omitempty"` + // Right X coordinate. + Right float32 `protobuf:"fixed32,3,opt,name=right,proto3" json:"right,omitempty"` + // Bottom Y coordinate. + Bottom float32 `protobuf:"fixed32,4,opt,name=bottom,proto3" json:"bottom,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *NormalizedBoundingBox) Reset() { *m = NormalizedBoundingBox{} } +func (m *NormalizedBoundingBox) String() string { return proto.CompactTextString(m) } +func (*NormalizedBoundingBox) ProtoMessage() {} +func (*NormalizedBoundingBox) Descriptor() ([]byte, []int) { + return fileDescriptor_video_intelligence_4625d60d88ccf591, []int{13} +} +func (m *NormalizedBoundingBox) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_NormalizedBoundingBox.Unmarshal(m, b) +} +func (m *NormalizedBoundingBox) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_NormalizedBoundingBox.Marshal(b, m, deterministic) +} +func (dst *NormalizedBoundingBox) XXX_Merge(src proto.Message) { + xxx_messageInfo_NormalizedBoundingBox.Merge(dst, src) +} +func (m *NormalizedBoundingBox) XXX_Size() int { + return xxx_messageInfo_NormalizedBoundingBox.Size(m) +} +func (m *NormalizedBoundingBox) XXX_DiscardUnknown() { + xxx_messageInfo_NormalizedBoundingBox.DiscardUnknown(m) +} + +var xxx_messageInfo_NormalizedBoundingBox proto.InternalMessageInfo + +func (m *NormalizedBoundingBox) GetLeft() float32 { + if m != nil { + return m.Left + } + return 0 +} + +func (m *NormalizedBoundingBox) GetTop() float32 { + if m != nil { + return m.Top + } + return 0 +} + +func (m *NormalizedBoundingBox) GetRight() float32 { + if m != nil { + return m.Right + } + return 0 +} + +func (m *NormalizedBoundingBox) GetBottom() float32 { + if m != nil { + return m.Bottom + } + return 0 +} + +// Video segment level annotation results for face detection. +type FaceSegment struct { + // Video segment where a face was detected. + Segment *VideoSegment `protobuf:"bytes,1,opt,name=segment,proto3" json:"segment,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *FaceSegment) Reset() { *m = FaceSegment{} } +func (m *FaceSegment) String() string { return proto.CompactTextString(m) } +func (*FaceSegment) ProtoMessage() {} +func (*FaceSegment) Descriptor() ([]byte, []int) { + return fileDescriptor_video_intelligence_4625d60d88ccf591, []int{14} +} +func (m *FaceSegment) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_FaceSegment.Unmarshal(m, b) +} +func (m *FaceSegment) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_FaceSegment.Marshal(b, m, deterministic) +} +func (dst *FaceSegment) XXX_Merge(src proto.Message) { + xxx_messageInfo_FaceSegment.Merge(dst, src) +} +func (m *FaceSegment) XXX_Size() int { + return xxx_messageInfo_FaceSegment.Size(m) +} +func (m *FaceSegment) XXX_DiscardUnknown() { + xxx_messageInfo_FaceSegment.DiscardUnknown(m) +} + +var xxx_messageInfo_FaceSegment proto.InternalMessageInfo + +func (m *FaceSegment) GetSegment() *VideoSegment { + if m != nil { + return m.Segment + } + return nil +} + +// Video frame level annotation results for face detection. +type FaceFrame struct { + // Normalized Bounding boxes in a frame. + // There can be more than one boxes if the same face is detected in multiple + // locations within the current frame. + NormalizedBoundingBoxes []*NormalizedBoundingBox `protobuf:"bytes,1,rep,name=normalized_bounding_boxes,json=normalizedBoundingBoxes,proto3" json:"normalized_bounding_boxes,omitempty"` + // Time-offset, relative to the beginning of the video, + // corresponding to the video frame for this location. + TimeOffset *duration.Duration `protobuf:"bytes,2,opt,name=time_offset,json=timeOffset,proto3" json:"time_offset,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *FaceFrame) Reset() { *m = FaceFrame{} } +func (m *FaceFrame) String() string { return proto.CompactTextString(m) } +func (*FaceFrame) ProtoMessage() {} +func (*FaceFrame) Descriptor() ([]byte, []int) { + return fileDescriptor_video_intelligence_4625d60d88ccf591, []int{15} +} +func (m *FaceFrame) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_FaceFrame.Unmarshal(m, b) +} +func (m *FaceFrame) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_FaceFrame.Marshal(b, m, deterministic) +} +func (dst *FaceFrame) XXX_Merge(src proto.Message) { + xxx_messageInfo_FaceFrame.Merge(dst, src) +} +func (m *FaceFrame) XXX_Size() int { + return xxx_messageInfo_FaceFrame.Size(m) +} +func (m *FaceFrame) XXX_DiscardUnknown() { + xxx_messageInfo_FaceFrame.DiscardUnknown(m) +} + +var xxx_messageInfo_FaceFrame proto.InternalMessageInfo + +func (m *FaceFrame) GetNormalizedBoundingBoxes() []*NormalizedBoundingBox { + if m != nil { + return m.NormalizedBoundingBoxes + } + return nil +} + +func (m *FaceFrame) GetTimeOffset() *duration.Duration { + if m != nil { + return m.TimeOffset + } + return nil +} + +// Face annotation. +type FaceAnnotation struct { + // Thumbnail of a representative face view (in JPEG format). + Thumbnail []byte `protobuf:"bytes,1,opt,name=thumbnail,proto3" json:"thumbnail,omitempty"` + // All video segments where a face was detected. + Segments []*FaceSegment `protobuf:"bytes,2,rep,name=segments,proto3" json:"segments,omitempty"` + // All video frames where a face was detected. + Frames []*FaceFrame `protobuf:"bytes,3,rep,name=frames,proto3" json:"frames,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *FaceAnnotation) Reset() { *m = FaceAnnotation{} } +func (m *FaceAnnotation) String() string { return proto.CompactTextString(m) } +func (*FaceAnnotation) ProtoMessage() {} +func (*FaceAnnotation) Descriptor() ([]byte, []int) { + return fileDescriptor_video_intelligence_4625d60d88ccf591, []int{16} +} +func (m *FaceAnnotation) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_FaceAnnotation.Unmarshal(m, b) +} +func (m *FaceAnnotation) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_FaceAnnotation.Marshal(b, m, deterministic) +} +func (dst *FaceAnnotation) XXX_Merge(src proto.Message) { + xxx_messageInfo_FaceAnnotation.Merge(dst, src) +} +func (m *FaceAnnotation) XXX_Size() int { + return xxx_messageInfo_FaceAnnotation.Size(m) +} +func (m *FaceAnnotation) XXX_DiscardUnknown() { + xxx_messageInfo_FaceAnnotation.DiscardUnknown(m) +} + +var xxx_messageInfo_FaceAnnotation proto.InternalMessageInfo + +func (m *FaceAnnotation) GetThumbnail() []byte { + if m != nil { + return m.Thumbnail + } + return nil +} + +func (m *FaceAnnotation) GetSegments() []*FaceSegment { + if m != nil { + return m.Segments + } + return nil +} + +func (m *FaceAnnotation) GetFrames() []*FaceFrame { + if m != nil { + return m.Frames + } + return nil +} + +// Annotation results for a single video. +type VideoAnnotationResults struct { + // Video file location in + // [Google Cloud Storage](https://cloud.google.com/storage/). + InputUri string `protobuf:"bytes,1,opt,name=input_uri,json=inputUri,proto3" json:"input_uri,omitempty"` + // Label annotations on video level or user specified segment level. + // There is exactly one element for each unique label. + SegmentLabelAnnotations []*LabelAnnotation `protobuf:"bytes,2,rep,name=segment_label_annotations,json=segmentLabelAnnotations,proto3" json:"segment_label_annotations,omitempty"` + // Label annotations on shot level. + // There is exactly one element for each unique label. + ShotLabelAnnotations []*LabelAnnotation `protobuf:"bytes,3,rep,name=shot_label_annotations,json=shotLabelAnnotations,proto3" json:"shot_label_annotations,omitempty"` + // Label annotations on frame level. + // There is exactly one element for each unique label. + FrameLabelAnnotations []*LabelAnnotation `protobuf:"bytes,4,rep,name=frame_label_annotations,json=frameLabelAnnotations,proto3" json:"frame_label_annotations,omitempty"` + // Face annotations. There is exactly one element for each unique face. + FaceAnnotations []*FaceAnnotation `protobuf:"bytes,5,rep,name=face_annotations,json=faceAnnotations,proto3" json:"face_annotations,omitempty"` + // Shot annotations. Each shot is represented as a video segment. + ShotAnnotations []*VideoSegment `protobuf:"bytes,6,rep,name=shot_annotations,json=shotAnnotations,proto3" json:"shot_annotations,omitempty"` + // Explicit content annotation. + ExplicitAnnotation *ExplicitContentAnnotation `protobuf:"bytes,7,opt,name=explicit_annotation,json=explicitAnnotation,proto3" json:"explicit_annotation,omitempty"` + // If set, indicates an error. Note that for a single `AnnotateVideoRequest` + // some videos may succeed and some may fail. + Error *status.Status `protobuf:"bytes,9,opt,name=error,proto3" json:"error,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *VideoAnnotationResults) Reset() { *m = VideoAnnotationResults{} } +func (m *VideoAnnotationResults) String() string { return proto.CompactTextString(m) } +func (*VideoAnnotationResults) ProtoMessage() {} +func (*VideoAnnotationResults) Descriptor() ([]byte, []int) { + return fileDescriptor_video_intelligence_4625d60d88ccf591, []int{17} +} +func (m *VideoAnnotationResults) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_VideoAnnotationResults.Unmarshal(m, b) +} +func (m *VideoAnnotationResults) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_VideoAnnotationResults.Marshal(b, m, deterministic) +} +func (dst *VideoAnnotationResults) XXX_Merge(src proto.Message) { + xxx_messageInfo_VideoAnnotationResults.Merge(dst, src) +} +func (m *VideoAnnotationResults) XXX_Size() int { + return xxx_messageInfo_VideoAnnotationResults.Size(m) +} +func (m *VideoAnnotationResults) XXX_DiscardUnknown() { + xxx_messageInfo_VideoAnnotationResults.DiscardUnknown(m) +} + +var xxx_messageInfo_VideoAnnotationResults proto.InternalMessageInfo + +func (m *VideoAnnotationResults) GetInputUri() string { + if m != nil { + return m.InputUri + } + return "" +} + +func (m *VideoAnnotationResults) GetSegmentLabelAnnotations() []*LabelAnnotation { + if m != nil { + return m.SegmentLabelAnnotations + } + return nil +} + +func (m *VideoAnnotationResults) GetShotLabelAnnotations() []*LabelAnnotation { + if m != nil { + return m.ShotLabelAnnotations + } + return nil +} + +func (m *VideoAnnotationResults) GetFrameLabelAnnotations() []*LabelAnnotation { + if m != nil { + return m.FrameLabelAnnotations + } + return nil +} + +func (m *VideoAnnotationResults) GetFaceAnnotations() []*FaceAnnotation { + if m != nil { + return m.FaceAnnotations + } + return nil +} + +func (m *VideoAnnotationResults) GetShotAnnotations() []*VideoSegment { + if m != nil { + return m.ShotAnnotations + } + return nil +} + +func (m *VideoAnnotationResults) GetExplicitAnnotation() *ExplicitContentAnnotation { + if m != nil { + return m.ExplicitAnnotation + } + return nil +} + +func (m *VideoAnnotationResults) GetError() *status.Status { + if m != nil { + return m.Error + } + return nil +} + +// Video annotation response. Included in the `response` +// field of the `Operation` returned by the `GetOperation` +// call of the `google::longrunning::Operations` service. +type AnnotateVideoResponse struct { + // Annotation results for all videos specified in `AnnotateVideoRequest`. + AnnotationResults []*VideoAnnotationResults `protobuf:"bytes,1,rep,name=annotation_results,json=annotationResults,proto3" json:"annotation_results,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *AnnotateVideoResponse) Reset() { *m = AnnotateVideoResponse{} } +func (m *AnnotateVideoResponse) String() string { return proto.CompactTextString(m) } +func (*AnnotateVideoResponse) ProtoMessage() {} +func (*AnnotateVideoResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_video_intelligence_4625d60d88ccf591, []int{18} +} +func (m *AnnotateVideoResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_AnnotateVideoResponse.Unmarshal(m, b) +} +func (m *AnnotateVideoResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_AnnotateVideoResponse.Marshal(b, m, deterministic) +} +func (dst *AnnotateVideoResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_AnnotateVideoResponse.Merge(dst, src) +} +func (m *AnnotateVideoResponse) XXX_Size() int { + return xxx_messageInfo_AnnotateVideoResponse.Size(m) +} +func (m *AnnotateVideoResponse) XXX_DiscardUnknown() { + xxx_messageInfo_AnnotateVideoResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_AnnotateVideoResponse proto.InternalMessageInfo + +func (m *AnnotateVideoResponse) GetAnnotationResults() []*VideoAnnotationResults { + if m != nil { + return m.AnnotationResults + } + return nil +} + +// Annotation progress for a single video. +type VideoAnnotationProgress struct { + // Video file location in + // [Google Cloud Storage](https://cloud.google.com/storage/). + InputUri string `protobuf:"bytes,1,opt,name=input_uri,json=inputUri,proto3" json:"input_uri,omitempty"` + // Approximate percentage processed thus far. + // Guaranteed to be 100 when fully processed. + ProgressPercent int32 `protobuf:"varint,2,opt,name=progress_percent,json=progressPercent,proto3" json:"progress_percent,omitempty"` + // Time when the request was received. + StartTime *timestamp.Timestamp `protobuf:"bytes,3,opt,name=start_time,json=startTime,proto3" json:"start_time,omitempty"` + // Time of the most recent update. + UpdateTime *timestamp.Timestamp `protobuf:"bytes,4,opt,name=update_time,json=updateTime,proto3" json:"update_time,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *VideoAnnotationProgress) Reset() { *m = VideoAnnotationProgress{} } +func (m *VideoAnnotationProgress) String() string { return proto.CompactTextString(m) } +func (*VideoAnnotationProgress) ProtoMessage() {} +func (*VideoAnnotationProgress) Descriptor() ([]byte, []int) { + return fileDescriptor_video_intelligence_4625d60d88ccf591, []int{19} +} +func (m *VideoAnnotationProgress) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_VideoAnnotationProgress.Unmarshal(m, b) +} +func (m *VideoAnnotationProgress) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_VideoAnnotationProgress.Marshal(b, m, deterministic) +} +func (dst *VideoAnnotationProgress) XXX_Merge(src proto.Message) { + xxx_messageInfo_VideoAnnotationProgress.Merge(dst, src) +} +func (m *VideoAnnotationProgress) XXX_Size() int { + return xxx_messageInfo_VideoAnnotationProgress.Size(m) +} +func (m *VideoAnnotationProgress) XXX_DiscardUnknown() { + xxx_messageInfo_VideoAnnotationProgress.DiscardUnknown(m) +} + +var xxx_messageInfo_VideoAnnotationProgress proto.InternalMessageInfo + +func (m *VideoAnnotationProgress) GetInputUri() string { + if m != nil { + return m.InputUri + } + return "" +} + +func (m *VideoAnnotationProgress) GetProgressPercent() int32 { + if m != nil { + return m.ProgressPercent + } + return 0 +} + +func (m *VideoAnnotationProgress) GetStartTime() *timestamp.Timestamp { + if m != nil { + return m.StartTime + } + return nil +} + +func (m *VideoAnnotationProgress) GetUpdateTime() *timestamp.Timestamp { + if m != nil { + return m.UpdateTime + } + return nil +} + +// Video annotation progress. Included in the `metadata` +// field of the `Operation` returned by the `GetOperation` +// call of the `google::longrunning::Operations` service. +type AnnotateVideoProgress struct { + // Progress metadata for all videos specified in `AnnotateVideoRequest`. + AnnotationProgress []*VideoAnnotationProgress `protobuf:"bytes,1,rep,name=annotation_progress,json=annotationProgress,proto3" json:"annotation_progress,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *AnnotateVideoProgress) Reset() { *m = AnnotateVideoProgress{} } +func (m *AnnotateVideoProgress) String() string { return proto.CompactTextString(m) } +func (*AnnotateVideoProgress) ProtoMessage() {} +func (*AnnotateVideoProgress) Descriptor() ([]byte, []int) { + return fileDescriptor_video_intelligence_4625d60d88ccf591, []int{20} +} +func (m *AnnotateVideoProgress) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_AnnotateVideoProgress.Unmarshal(m, b) +} +func (m *AnnotateVideoProgress) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_AnnotateVideoProgress.Marshal(b, m, deterministic) +} +func (dst *AnnotateVideoProgress) XXX_Merge(src proto.Message) { + xxx_messageInfo_AnnotateVideoProgress.Merge(dst, src) +} +func (m *AnnotateVideoProgress) XXX_Size() int { + return xxx_messageInfo_AnnotateVideoProgress.Size(m) +} +func (m *AnnotateVideoProgress) XXX_DiscardUnknown() { + xxx_messageInfo_AnnotateVideoProgress.DiscardUnknown(m) +} + +var xxx_messageInfo_AnnotateVideoProgress proto.InternalMessageInfo + +func (m *AnnotateVideoProgress) GetAnnotationProgress() []*VideoAnnotationProgress { + if m != nil { + return m.AnnotationProgress + } + return nil +} + +func init() { + proto.RegisterType((*AnnotateVideoRequest)(nil), "google.cloud.videointelligence.v1beta2.AnnotateVideoRequest") + proto.RegisterType((*VideoContext)(nil), "google.cloud.videointelligence.v1beta2.VideoContext") + proto.RegisterType((*LabelDetectionConfig)(nil), "google.cloud.videointelligence.v1beta2.LabelDetectionConfig") + proto.RegisterType((*ShotChangeDetectionConfig)(nil), "google.cloud.videointelligence.v1beta2.ShotChangeDetectionConfig") + proto.RegisterType((*ExplicitContentDetectionConfig)(nil), "google.cloud.videointelligence.v1beta2.ExplicitContentDetectionConfig") + proto.RegisterType((*FaceDetectionConfig)(nil), "google.cloud.videointelligence.v1beta2.FaceDetectionConfig") + proto.RegisterType((*VideoSegment)(nil), "google.cloud.videointelligence.v1beta2.VideoSegment") + proto.RegisterType((*LabelSegment)(nil), "google.cloud.videointelligence.v1beta2.LabelSegment") + proto.RegisterType((*LabelFrame)(nil), "google.cloud.videointelligence.v1beta2.LabelFrame") + proto.RegisterType((*Entity)(nil), "google.cloud.videointelligence.v1beta2.Entity") + proto.RegisterType((*LabelAnnotation)(nil), "google.cloud.videointelligence.v1beta2.LabelAnnotation") + proto.RegisterType((*ExplicitContentFrame)(nil), "google.cloud.videointelligence.v1beta2.ExplicitContentFrame") + proto.RegisterType((*ExplicitContentAnnotation)(nil), "google.cloud.videointelligence.v1beta2.ExplicitContentAnnotation") + proto.RegisterType((*NormalizedBoundingBox)(nil), "google.cloud.videointelligence.v1beta2.NormalizedBoundingBox") + proto.RegisterType((*FaceSegment)(nil), "google.cloud.videointelligence.v1beta2.FaceSegment") + proto.RegisterType((*FaceFrame)(nil), "google.cloud.videointelligence.v1beta2.FaceFrame") + proto.RegisterType((*FaceAnnotation)(nil), "google.cloud.videointelligence.v1beta2.FaceAnnotation") + proto.RegisterType((*VideoAnnotationResults)(nil), "google.cloud.videointelligence.v1beta2.VideoAnnotationResults") + proto.RegisterType((*AnnotateVideoResponse)(nil), "google.cloud.videointelligence.v1beta2.AnnotateVideoResponse") + proto.RegisterType((*VideoAnnotationProgress)(nil), "google.cloud.videointelligence.v1beta2.VideoAnnotationProgress") + proto.RegisterType((*AnnotateVideoProgress)(nil), "google.cloud.videointelligence.v1beta2.AnnotateVideoProgress") + proto.RegisterEnum("google.cloud.videointelligence.v1beta2.Feature", Feature_name, Feature_value) + proto.RegisterEnum("google.cloud.videointelligence.v1beta2.LabelDetectionMode", LabelDetectionMode_name, LabelDetectionMode_value) + proto.RegisterEnum("google.cloud.videointelligence.v1beta2.Likelihood", Likelihood_name, Likelihood_value) +} + +// Reference imports to suppress errors if they are not otherwise used. +var _ context.Context +var _ grpc.ClientConn + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the grpc package it is being compiled against. +const _ = grpc.SupportPackageIsVersion4 + +// VideoIntelligenceServiceClient is the client API for VideoIntelligenceService service. +// +// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://godoc.org/google.golang.org/grpc#ClientConn.NewStream. +type VideoIntelligenceServiceClient interface { + // Performs asynchronous video annotation. Progress and results can be + // retrieved through the `google.longrunning.Operations` interface. + // `Operation.metadata` contains `AnnotateVideoProgress` (progress). + // `Operation.response` contains `AnnotateVideoResponse` (results). + AnnotateVideo(ctx context.Context, in *AnnotateVideoRequest, opts ...grpc.CallOption) (*longrunning.Operation, error) +} + +type videoIntelligenceServiceClient struct { + cc *grpc.ClientConn +} + +func NewVideoIntelligenceServiceClient(cc *grpc.ClientConn) VideoIntelligenceServiceClient { + return &videoIntelligenceServiceClient{cc} +} + +func (c *videoIntelligenceServiceClient) AnnotateVideo(ctx context.Context, in *AnnotateVideoRequest, opts ...grpc.CallOption) (*longrunning.Operation, error) { + out := new(longrunning.Operation) + err := c.cc.Invoke(ctx, "/google.cloud.videointelligence.v1beta2.VideoIntelligenceService/AnnotateVideo", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +// VideoIntelligenceServiceServer is the server API for VideoIntelligenceService service. +type VideoIntelligenceServiceServer interface { + // Performs asynchronous video annotation. Progress and results can be + // retrieved through the `google.longrunning.Operations` interface. + // `Operation.metadata` contains `AnnotateVideoProgress` (progress). + // `Operation.response` contains `AnnotateVideoResponse` (results). + AnnotateVideo(context.Context, *AnnotateVideoRequest) (*longrunning.Operation, error) +} + +func RegisterVideoIntelligenceServiceServer(s *grpc.Server, srv VideoIntelligenceServiceServer) { + s.RegisterService(&_VideoIntelligenceService_serviceDesc, srv) +} + +func _VideoIntelligenceService_AnnotateVideo_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(AnnotateVideoRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(VideoIntelligenceServiceServer).AnnotateVideo(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.videointelligence.v1beta2.VideoIntelligenceService/AnnotateVideo", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(VideoIntelligenceServiceServer).AnnotateVideo(ctx, req.(*AnnotateVideoRequest)) + } + return interceptor(ctx, in, info, handler) +} + +var _VideoIntelligenceService_serviceDesc = grpc.ServiceDesc{ + ServiceName: "google.cloud.videointelligence.v1beta2.VideoIntelligenceService", + HandlerType: (*VideoIntelligenceServiceServer)(nil), + Methods: []grpc.MethodDesc{ + { + MethodName: "AnnotateVideo", + Handler: _VideoIntelligenceService_AnnotateVideo_Handler, + }, + }, + Streams: []grpc.StreamDesc{}, + Metadata: "google/cloud/videointelligence/v1beta2/video_intelligence.proto", +} + +func init() { + proto.RegisterFile("google/cloud/videointelligence/v1beta2/video_intelligence.proto", fileDescriptor_video_intelligence_4625d60d88ccf591) +} + +var fileDescriptor_video_intelligence_4625d60d88ccf591 = []byte{ + // 1718 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xb4, 0x58, 0x4f, 0x6f, 0xdb, 0xc8, + 0x15, 0x2f, 0x25, 0xd9, 0xb1, 0x9e, 0xff, 0x48, 0x19, 0xcb, 0xb6, 0xec, 0x26, 0x5e, 0x97, 0x29, + 0x16, 0xae, 0x0b, 0x48, 0x88, 0x77, 0xb1, 0x45, 0x93, 0x6d, 0x17, 0xb2, 0x4c, 0x6d, 0xd4, 0x75, + 0x24, 0x81, 0x52, 0xd2, 0xa6, 0x4d, 0x41, 0x50, 0xe4, 0x88, 0x22, 0x96, 0xe2, 0x70, 0xc9, 0x61, + 0x10, 0xf7, 0xd0, 0xc3, 0x1e, 0x16, 0xe8, 0xb1, 0xe8, 0xa5, 0x9f, 0xa1, 0x87, 0x7e, 0x83, 0x02, + 0x45, 0x2f, 0x05, 0x72, 0x6d, 0x2f, 0xbd, 0xf4, 0xd4, 0x63, 0x3f, 0x40, 0x8f, 0x05, 0x67, 0x86, + 0x12, 0x45, 0xca, 0xb1, 0x94, 0x60, 0x6f, 0x9c, 0xf7, 0xe6, 0xfd, 0xde, 0xff, 0x37, 0x33, 0x84, + 0xcf, 0x2c, 0x42, 0x2c, 0x07, 0xd7, 0x0d, 0x87, 0x84, 0x66, 0xfd, 0x95, 0x6d, 0x62, 0x62, 0xbb, + 0x14, 0x3b, 0x8e, 0x6d, 0x61, 0xd7, 0xc0, 0xf5, 0x57, 0x0f, 0x87, 0x98, 0xea, 0xe7, 0x9c, 0xa3, + 0x25, 0x59, 0x35, 0xcf, 0x27, 0x94, 0xa0, 0x0f, 0x39, 0x40, 0x8d, 0x01, 0xd4, 0x32, 0x00, 0x35, + 0x01, 0x70, 0x74, 0x4f, 0x28, 0xd2, 0x3d, 0xbb, 0xae, 0xbb, 0x2e, 0xa1, 0x3a, 0xb5, 0x89, 0x1b, + 0x70, 0x94, 0xa3, 0x07, 0x82, 0xeb, 0x10, 0xd7, 0xf2, 0x43, 0xd7, 0xb5, 0x5d, 0xab, 0x4e, 0x3c, + 0xec, 0xcf, 0x6d, 0x3a, 0x16, 0x9b, 0xd8, 0x6a, 0x18, 0x8e, 0xea, 0x66, 0xc8, 0x37, 0x08, 0xfe, + 0x07, 0x69, 0x3e, 0xb5, 0x27, 0x38, 0xa0, 0xfa, 0xc4, 0x13, 0x1b, 0x0e, 0xc4, 0x06, 0xdf, 0x33, + 0xea, 0x01, 0xd5, 0x69, 0x28, 0x90, 0xe5, 0xbf, 0xe6, 0xa0, 0xd2, 0xe0, 0x46, 0xe1, 0xe7, 0x91, + 0x0b, 0x2a, 0xfe, 0x2a, 0xc4, 0x01, 0x45, 0xdf, 0x85, 0xa2, 0xed, 0x7a, 0x21, 0xd5, 0x42, 0xdf, + 0xae, 0x4a, 0x27, 0xd2, 0x69, 0x51, 0xdd, 0x60, 0x84, 0x67, 0xbe, 0x8d, 0x1e, 0xc0, 0x36, 0x67, + 0x1a, 0xc4, 0xa5, 0xd8, 0xa5, 0xd5, 0xf5, 0x13, 0xe9, 0x74, 0x4b, 0xdd, 0x62, 0xc4, 0x26, 0xa7, + 0xa1, 0x2f, 0x60, 0x63, 0x84, 0x75, 0x1a, 0xfa, 0x38, 0xa8, 0xe6, 0x4e, 0xf2, 0xa7, 0x3b, 0xe7, + 0xf5, 0xda, 0x72, 0x21, 0xab, 0xb5, 0xb8, 0x9c, 0x3a, 0x05, 0x40, 0x2f, 0x60, 0x9b, 0x27, 0x82, + 0x69, 0x7c, 0x4d, 0xab, 0xf9, 0x13, 0xe9, 0x74, 0xf3, 0xfc, 0xe3, 0x65, 0x11, 0x99, 0x6f, 0x4d, + 0x2e, 0xab, 0x6e, 0xbd, 0x4a, 0xac, 0xd0, 0x7d, 0x00, 0x12, 0xd2, 0xd8, 0xd5, 0x02, 0x73, 0xb5, + 0xc8, 0x29, 0x91, 0xaf, 0x1f, 0xc0, 0xa6, 0x43, 0x0c, 0x16, 0x6d, 0xcd, 0x36, 0xab, 0x6b, 0x8c, + 0x0f, 0x31, 0xa9, 0x6d, 0xca, 0xff, 0x2e, 0xc0, 0x56, 0x12, 0x1e, 0xf5, 0x60, 0x23, 0xc0, 0xd6, + 0x04, 0xbb, 0x34, 0xa8, 0x4a, 0x27, 0xf9, 0x95, 0xcd, 0xec, 0x73, 0x61, 0x75, 0x8a, 0x82, 0x7c, + 0xd8, 0x77, 0xf4, 0x21, 0x76, 0x34, 0x13, 0x53, 0x6c, 0x30, 0x53, 0x0c, 0xe2, 0x8e, 0x6c, 0xab, + 0x9a, 0x63, 0x61, 0xf8, 0x74, 0x59, 0xfc, 0xab, 0x08, 0xe5, 0x32, 0x06, 0x69, 0x32, 0x0c, 0xb5, + 0xe2, 0x2c, 0xa0, 0xa2, 0xaf, 0x25, 0xb8, 0x17, 0x8c, 0x09, 0xd5, 0x8c, 0xb1, 0xee, 0x5a, 0x38, + 0xab, 0x9a, 0x67, 0xa0, 0xb1, 0xac, 0xea, 0xfe, 0x98, 0xd0, 0x26, 0x83, 0x4a, 0xeb, 0x3f, 0x0c, + 0x6e, 0x62, 0xa1, 0xdf, 0x4b, 0xf0, 0x3d, 0xfc, 0xda, 0x73, 0x6c, 0xc3, 0x9e, 0x16, 0x5b, 0xd6, + 0x92, 0x02, 0xb3, 0xa4, 0xb5, 0xac, 0x25, 0x8a, 0x00, 0x14, 0x85, 0x9a, 0x36, 0xe7, 0x18, 0xbf, + 0x95, 0x8f, 0x08, 0xec, 0x8d, 0x74, 0x63, 0x41, 0x40, 0xd6, 0x98, 0x19, 0x8f, 0x97, 0x2e, 0x72, + 0xdd, 0xc8, 0x84, 0x62, 0x77, 0x94, 0x25, 0xca, 0x7f, 0x97, 0xa0, 0xb2, 0x28, 0x71, 0xc8, 0x81, + 0x4a, 0xba, 0x2c, 0x26, 0xc4, 0xc4, 0xac, 0x5d, 0x77, 0xce, 0x1f, 0xbd, 0x5b, 0x51, 0x3c, 0x25, + 0x26, 0x56, 0x91, 0x93, 0xa1, 0xa1, 0x1f, 0xc2, 0xdd, 0x80, 0xcf, 0x2e, 0xdd, 0xbf, 0xd6, 0x0c, + 0x7d, 0x82, 0x7d, 0x9d, 0xd5, 0xdf, 0x86, 0x5a, 0x9e, 0x31, 0x9a, 0x8c, 0x8e, 0x2a, 0xb0, 0x16, + 0x99, 0xe2, 0xb0, 0x2a, 0x29, 0xaa, 0x7c, 0x21, 0x3f, 0x84, 0xc3, 0x1b, 0xcb, 0x60, 0x26, 0x22, + 0x25, 0x45, 0x3e, 0x81, 0xe3, 0xb7, 0xe7, 0xeb, 0x06, 0x39, 0x1d, 0x76, 0x17, 0x04, 0x78, 0xf1, + 0x66, 0xf4, 0x31, 0xec, 0xdb, 0xae, 0xe1, 0x84, 0x26, 0xd6, 0x86, 0x24, 0x74, 0x4d, 0xdb, 0xb5, + 0xb4, 0x21, 0x79, 0xcd, 0x06, 0x57, 0xe4, 0x5f, 0x45, 0x70, 0x2f, 0x04, 0xf3, 0x22, 0xe2, 0xc9, + 0x7f, 0x94, 0x44, 0xe3, 0x8b, 0x86, 0x45, 0x0a, 0x8b, 0x90, 0x4f, 0xb5, 0x68, 0xfc, 0x6a, 0x64, + 0x34, 0x0a, 0x30, 0x65, 0x8a, 0x36, 0xcf, 0x0f, 0xe3, 0x64, 0xc4, 0x23, 0xba, 0x76, 0x29, 0x46, + 0xb8, 0x5a, 0x62, 0x32, 0x03, 0x7b, 0x82, 0xbb, 0x4c, 0x02, 0x35, 0xa0, 0x84, 0x5d, 0x73, 0x0e, + 0x24, 0x77, 0x1b, 0xc8, 0x36, 0x76, 0xcd, 0x19, 0x84, 0xfc, 0x5b, 0xd8, 0x62, 0x59, 0x8d, 0x2d, + 0xeb, 0xc0, 0x1d, 0x31, 0x4c, 0x84, 0x3d, 0xef, 0x36, 0x91, 0x62, 0x10, 0x74, 0x0c, 0xc0, 0x8a, + 0xde, 0x8c, 0xf6, 0x32, 0xeb, 0x72, 0x6a, 0x82, 0x22, 0x8f, 0x01, 0x98, 0xfe, 0x96, 0xaf, 0x4f, + 0x30, 0x7a, 0x04, 0x9b, 0x2b, 0x45, 0x04, 0xe8, 0x2c, 0x18, 0xb7, 0x69, 0x72, 0x60, 0x5d, 0x71, + 0xa9, 0x4d, 0xaf, 0xa3, 0x13, 0x0b, 0xb3, 0xaf, 0x68, 0x4c, 0x8b, 0x13, 0x8b, 0x13, 0xda, 0x26, + 0x3a, 0x81, 0x4d, 0x13, 0x07, 0x86, 0x6f, 0x7b, 0x91, 0x06, 0x86, 0x53, 0x54, 0x93, 0xa4, 0xe8, + 0x4c, 0x73, 0x74, 0xd7, 0x0a, 0x75, 0x0b, 0x6b, 0x46, 0xd4, 0x45, 0xbc, 0x72, 0xb7, 0x62, 0x62, + 0x93, 0x98, 0x58, 0xfe, 0x67, 0x0e, 0x4a, 0xcc, 0xb1, 0xc6, 0xf4, 0x20, 0x47, 0x2d, 0x58, 0xe7, + 0x6a, 0x84, 0x63, 0xb5, 0xa5, 0xe7, 0x10, 0x93, 0x52, 0x85, 0x34, 0xfa, 0x15, 0xdc, 0x35, 0x74, + 0x8a, 0x2d, 0xe2, 0x5f, 0x6b, 0x8c, 0x64, 0x8b, 0x83, 0x73, 0x75, 0xc8, 0x72, 0x0c, 0xa4, 0x08, + 0x9c, 0xb9, 0x33, 0x29, 0xbf, 0xda, 0x99, 0x94, 0x2c, 0xa4, 0xc4, 0x99, 0xf4, 0x33, 0x58, 0x1f, + 0x45, 0xd9, 0x0d, 0xaa, 0x05, 0x86, 0x77, 0xbe, 0x12, 0x1e, 0x2b, 0x0c, 0x55, 0x20, 0xc8, 0x7f, + 0x91, 0xa0, 0x92, 0xea, 0xf2, 0xf7, 0xaf, 0x1c, 0x1b, 0xf6, 0x3d, 0xe2, 0xbb, 0xc4, 0xf2, 0x75, + 0x6f, 0x7c, 0xad, 0x39, 0xf6, 0x97, 0xd8, 0xb1, 0xc7, 0x84, 0x98, 0x2c, 0xfb, 0x3b, 0x2b, 0x18, + 0x3c, 0x95, 0x54, 0xf7, 0x12, 0x88, 0x33, 0xb2, 0xfc, 0x15, 0x1c, 0xa6, 0xcc, 0x4f, 0xd4, 0xc7, + 0x60, 0x1a, 0x28, 0x7e, 0x19, 0xf8, 0xf4, 0x1d, 0xcf, 0xa9, 0xf9, 0x90, 0x7d, 0x09, 0x7b, 0x1d, + 0xe2, 0x4f, 0x74, 0xc7, 0xfe, 0x0d, 0x36, 0x13, 0x73, 0x09, 0x21, 0x28, 0x38, 0x78, 0xc4, 0x63, + 0x95, 0x53, 0xd9, 0x37, 0x2a, 0x43, 0x9e, 0x12, 0x4f, 0x74, 0x4f, 0xf4, 0x19, 0xcd, 0x41, 0xdf, + 0xb6, 0xc6, 0xfc, 0x1e, 0x95, 0x53, 0xf9, 0x02, 0xed, 0xc3, 0xfa, 0x90, 0x50, 0x4a, 0x26, 0xec, + 0x48, 0xcd, 0xa9, 0x62, 0x25, 0xff, 0x1a, 0x36, 0xa3, 0x61, 0xfa, 0x2d, 0x4d, 0x13, 0xf9, 0x6f, + 0x12, 0x14, 0x23, 0x7c, 0x9e, 0xf3, 0x6b, 0x38, 0x74, 0xa7, 0x9e, 0xa5, 0xe7, 0x31, 0x0f, 0xe1, + 0x4f, 0x96, 0xd5, 0xb7, 0x30, 0x44, 0xea, 0x81, 0xbb, 0x88, 0x8c, 0x83, 0x74, 0xb9, 0xe5, 0x56, + 0x28, 0x37, 0xf9, 0x8d, 0x04, 0x3b, 0x91, 0x13, 0x89, 0xcc, 0xdf, 0x83, 0x22, 0x1d, 0x87, 0x93, + 0xa1, 0xab, 0xdb, 0xfc, 0xc0, 0xd9, 0x52, 0x67, 0x04, 0xd4, 0x4d, 0xb4, 0x24, 0x6f, 0xf3, 0x8f, + 0x56, 0xb9, 0x3a, 0x64, 0x3b, 0xb2, 0x3d, 0x2d, 0x34, 0xde, 0xe1, 0x0f, 0x57, 0x81, 0x9b, 0xaf, + 0xae, 0xff, 0xae, 0xc1, 0x3e, 0xcb, 0xd5, 0xcc, 0x1b, 0x15, 0x07, 0xa1, 0x43, 0x83, 0xb7, 0x3f, + 0x0c, 0x02, 0x38, 0x14, 0xe6, 0x68, 0xfc, 0x66, 0x92, 0x78, 0xf0, 0x08, 0x27, 0x7f, 0xb4, 0xd2, + 0x9c, 0x48, 0xe8, 0x3f, 0x10, 0xc8, 0x29, 0x7a, 0x80, 0x26, 0xb0, 0xcf, 0x2e, 0xaa, 0x59, 0x8d, + 0xf9, 0xf7, 0xd3, 0x58, 0x89, 0x60, 0x33, 0xea, 0x08, 0x1c, 0xb0, 0x28, 0x2d, 0xd0, 0x57, 0x78, + 0x3f, 0x7d, 0x7b, 0x0c, 0x37, 0xa3, 0x50, 0x87, 0x32, 0xbb, 0x70, 0x26, 0x35, 0xad, 0x31, 0x4d, + 0x9f, 0xac, 0x92, 0xe1, 0x84, 0xa2, 0xd2, 0x68, 0x6e, 0x1d, 0x20, 0x0d, 0xca, 0x2c, 0x84, 0x49, + 0x15, 0xeb, 0xef, 0xf1, 0x74, 0x29, 0x45, 0x68, 0x49, 0x05, 0x3e, 0xec, 0x4e, 0xef, 0xf1, 0x33, + 0x25, 0xd5, 0x3b, 0xab, 0xbd, 0x21, 0x6e, 0x1c, 0xb2, 0x2a, 0x8a, 0xd1, 0x13, 0xed, 0x77, 0x0a, + 0x6b, 0xd8, 0xf7, 0x89, 0x5f, 0x2d, 0x32, 0x2d, 0x28, 0xd6, 0xe2, 0x7b, 0x46, 0xad, 0xcf, 0x1e, + 0xc1, 0x2a, 0xdf, 0x20, 0x7f, 0x23, 0xc1, 0x5e, 0xea, 0x15, 0x1c, 0x78, 0xc4, 0x0d, 0x30, 0x9a, + 0x00, 0x9a, 0x99, 0xab, 0xf9, 0xbc, 0x07, 0xc4, 0x14, 0xfa, 0xe9, 0x4a, 0xa1, 0xc9, 0x74, 0x92, + 0x7a, 0x57, 0x4f, 0x93, 0xe4, 0x7f, 0x49, 0x70, 0x90, 0xda, 0xdd, 0xf3, 0x89, 0xe5, 0xe3, 0xe0, + 0x96, 0xc6, 0xfb, 0x01, 0x94, 0x3d, 0xb1, 0x51, 0xf3, 0xb0, 0x6f, 0x44, 0xb3, 0x39, 0x1a, 0x5f, + 0x6b, 0x6a, 0x29, 0xa6, 0xf7, 0x38, 0x19, 0xfd, 0x18, 0x60, 0x76, 0x4b, 0x15, 0xaf, 0xb8, 0xa3, + 0xcc, 0x8c, 0x1b, 0xc4, 0x7f, 0x10, 0xd4, 0xe2, 0xf4, 0x7e, 0x8a, 0x1e, 0xc3, 0x66, 0xe8, 0x99, + 0x3a, 0xc5, 0x5c, 0xb6, 0x70, 0xab, 0x2c, 0xf0, 0xed, 0x11, 0x41, 0xfe, 0x5d, 0x3a, 0xc8, 0x53, + 0xcf, 0x3c, 0xd8, 0x4d, 0x04, 0x39, 0xb6, 0x57, 0x44, 0xf9, 0xb3, 0x77, 0x8c, 0x72, 0x8c, 0xae, + 0x26, 0x12, 0x18, 0xd3, 0xce, 0xbe, 0x91, 0xe0, 0x8e, 0xf8, 0xc9, 0x80, 0x0e, 0x60, 0xb7, 0xa5, + 0x34, 0x06, 0xcf, 0x54, 0x45, 0x7b, 0xd6, 0xe9, 0xf7, 0x94, 0x66, 0xbb, 0xd5, 0x56, 0x2e, 0xcb, + 0xdf, 0x41, 0xbb, 0x50, 0xba, 0x6a, 0x5c, 0x28, 0x57, 0xda, 0xa5, 0x32, 0x50, 0x9a, 0x83, 0x76, + 0xb7, 0x53, 0x96, 0xd0, 0x21, 0xec, 0xf5, 0x9f, 0x74, 0x07, 0x5a, 0xf3, 0x49, 0xa3, 0xf3, 0xb9, + 0x92, 0x60, 0xe5, 0xd0, 0x31, 0x1c, 0x29, 0xbf, 0xe8, 0x5d, 0xb5, 0x9b, 0xed, 0x81, 0xd6, 0xec, + 0x76, 0x06, 0x4a, 0x67, 0x90, 0xe0, 0xe7, 0x11, 0x82, 0x9d, 0x56, 0xa3, 0x99, 0x94, 0x29, 0x9c, + 0xf9, 0x80, 0xb2, 0xcf, 0x2f, 0xf4, 0x7d, 0x38, 0x49, 0x69, 0xd6, 0x9e, 0x76, 0x2f, 0xd3, 0xf6, + 0x6d, 0x43, 0x91, 0x99, 0x12, 0xb1, 0xca, 0x12, 0xda, 0x01, 0x68, 0xa9, 0x8d, 0xa7, 0x0a, 0x5f, + 0xe7, 0x22, 0xbf, 0x18, 0xbb, 0xd1, 0xb9, 0xd4, 0x12, 0x8c, 0xfc, 0x19, 0x05, 0x98, 0xdd, 0x5d, + 0xd0, 0x11, 0xec, 0x5f, 0xb5, 0xbf, 0x50, 0xae, 0xda, 0x4f, 0xba, 0xdd, 0xcb, 0x94, 0x86, 0xbb, + 0xb0, 0xfd, 0x5c, 0x51, 0x5f, 0x68, 0xcf, 0x3a, 0x6c, 0xcb, 0x8b, 0xb2, 0x84, 0xb6, 0x60, 0x63, + 0xba, 0xca, 0x45, 0xab, 0x5e, 0xb7, 0xdf, 0x6f, 0x5f, 0x5c, 0x29, 0xe5, 0x3c, 0x02, 0x58, 0x17, + 0x9c, 0x02, 0x2a, 0xc1, 0x26, 0x13, 0x15, 0x84, 0xb5, 0xf3, 0x3f, 0x4b, 0x50, 0x65, 0x29, 0x6a, + 0x27, 0x92, 0xd7, 0xc7, 0xfe, 0x2b, 0xdb, 0xc0, 0xd1, 0x3b, 0x7f, 0x7b, 0xae, 0x36, 0xd0, 0xd2, + 0xb7, 0xa4, 0x45, 0x7f, 0xaf, 0x8e, 0xee, 0xc7, 0xd2, 0x89, 0xdf, 0x6a, 0xb5, 0x6e, 0xfc, 0x5b, + 0x4d, 0x7e, 0xf0, 0xf5, 0x3f, 0xfe, 0xf3, 0x87, 0xdc, 0x7d, 0xb9, 0x3a, 0xff, 0x97, 0x2f, 0x78, + 0x24, 0x4a, 0x05, 0x3f, 0x92, 0xce, 0x2e, 0xfe, 0x27, 0xc1, 0x99, 0x41, 0x26, 0x4b, 0xda, 0x71, + 0x71, 0xff, 0x26, 0xe7, 0x7a, 0x51, 0x5b, 0xf4, 0xa4, 0x5f, 0xfe, 0x5c, 0x00, 0x59, 0x24, 0x7a, + 0x52, 0xd4, 0x88, 0x6f, 0xd5, 0x2d, 0xec, 0xb2, 0xa6, 0xa9, 0x73, 0x96, 0xee, 0xd9, 0xc1, 0x6d, + 0xff, 0x23, 0x1f, 0x67, 0x38, 0x7f, 0xca, 0x7d, 0xf8, 0x39, 0x47, 0x6e, 0x32, 0x13, 0x33, 0x76, + 0xd4, 0x9e, 0x3f, 0xbc, 0x88, 0x44, 0xdf, 0xc4, 0x1b, 0x5f, 0xb2, 0x8d, 0x2f, 0x33, 0x1b, 0x5f, + 0x3e, 0xe7, 0x3a, 0x86, 0xeb, 0xcc, 0xaa, 0x8f, 0xfe, 0x1f, 0x00, 0x00, 0xff, 0xff, 0x1e, 0x8a, + 0xa0, 0x1c, 0x24, 0x15, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/cloud/videointelligence/v1p1beta1/video_intelligence.pb.go b/vendor/google.golang.org/genproto/googleapis/cloud/videointelligence/v1p1beta1/video_intelligence.pb.go new file mode 100644 index 0000000..afbae07 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/cloud/videointelligence/v1p1beta1/video_intelligence.pb.go @@ -0,0 +1,1653 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/cloud/videointelligence/v1p1beta1/video_intelligence.proto + +package videointelligence // import "google.golang.org/genproto/googleapis/cloud/videointelligence/v1p1beta1" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import duration "github.com/golang/protobuf/ptypes/duration" +import timestamp "github.com/golang/protobuf/ptypes/timestamp" +import _ "google.golang.org/genproto/googleapis/api/annotations" +import longrunning "google.golang.org/genproto/googleapis/longrunning" +import status "google.golang.org/genproto/googleapis/rpc/status" + +import ( + context "golang.org/x/net/context" + grpc "google.golang.org/grpc" +) + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Video annotation feature. +type Feature int32 + +const ( + // Unspecified. + Feature_FEATURE_UNSPECIFIED Feature = 0 + // Label detection. Detect objects, such as dog or flower. + Feature_LABEL_DETECTION Feature = 1 + // Shot change detection. + Feature_SHOT_CHANGE_DETECTION Feature = 2 + // Explicit content detection. + Feature_EXPLICIT_CONTENT_DETECTION Feature = 3 + // Speech transcription. + Feature_SPEECH_TRANSCRIPTION Feature = 6 +) + +var Feature_name = map[int32]string{ + 0: "FEATURE_UNSPECIFIED", + 1: "LABEL_DETECTION", + 2: "SHOT_CHANGE_DETECTION", + 3: "EXPLICIT_CONTENT_DETECTION", + 6: "SPEECH_TRANSCRIPTION", +} +var Feature_value = map[string]int32{ + "FEATURE_UNSPECIFIED": 0, + "LABEL_DETECTION": 1, + "SHOT_CHANGE_DETECTION": 2, + "EXPLICIT_CONTENT_DETECTION": 3, + "SPEECH_TRANSCRIPTION": 6, +} + +func (x Feature) String() string { + return proto.EnumName(Feature_name, int32(x)) +} +func (Feature) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_video_intelligence_d0764d772f800fc8, []int{0} +} + +// Label detection mode. +type LabelDetectionMode int32 + +const ( + // Unspecified. + LabelDetectionMode_LABEL_DETECTION_MODE_UNSPECIFIED LabelDetectionMode = 0 + // Detect shot-level labels. + LabelDetectionMode_SHOT_MODE LabelDetectionMode = 1 + // Detect frame-level labels. + LabelDetectionMode_FRAME_MODE LabelDetectionMode = 2 + // Detect both shot-level and frame-level labels. + LabelDetectionMode_SHOT_AND_FRAME_MODE LabelDetectionMode = 3 +) + +var LabelDetectionMode_name = map[int32]string{ + 0: "LABEL_DETECTION_MODE_UNSPECIFIED", + 1: "SHOT_MODE", + 2: "FRAME_MODE", + 3: "SHOT_AND_FRAME_MODE", +} +var LabelDetectionMode_value = map[string]int32{ + "LABEL_DETECTION_MODE_UNSPECIFIED": 0, + "SHOT_MODE": 1, + "FRAME_MODE": 2, + "SHOT_AND_FRAME_MODE": 3, +} + +func (x LabelDetectionMode) String() string { + return proto.EnumName(LabelDetectionMode_name, int32(x)) +} +func (LabelDetectionMode) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_video_intelligence_d0764d772f800fc8, []int{1} +} + +// Bucketized representation of likelihood. +type Likelihood int32 + +const ( + // Unspecified likelihood. + Likelihood_LIKELIHOOD_UNSPECIFIED Likelihood = 0 + // Very unlikely. + Likelihood_VERY_UNLIKELY Likelihood = 1 + // Unlikely. + Likelihood_UNLIKELY Likelihood = 2 + // Possible. + Likelihood_POSSIBLE Likelihood = 3 + // Likely. + Likelihood_LIKELY Likelihood = 4 + // Very likely. + Likelihood_VERY_LIKELY Likelihood = 5 +) + +var Likelihood_name = map[int32]string{ + 0: "LIKELIHOOD_UNSPECIFIED", + 1: "VERY_UNLIKELY", + 2: "UNLIKELY", + 3: "POSSIBLE", + 4: "LIKELY", + 5: "VERY_LIKELY", +} +var Likelihood_value = map[string]int32{ + "LIKELIHOOD_UNSPECIFIED": 0, + "VERY_UNLIKELY": 1, + "UNLIKELY": 2, + "POSSIBLE": 3, + "LIKELY": 4, + "VERY_LIKELY": 5, +} + +func (x Likelihood) String() string { + return proto.EnumName(Likelihood_name, int32(x)) +} +func (Likelihood) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_video_intelligence_d0764d772f800fc8, []int{2} +} + +// Video annotation request. +type AnnotateVideoRequest struct { + // Input video location. Currently, only + // [Google Cloud Storage](https://cloud.google.com/storage/) URIs are + // supported, which must be specified in the following format: + // `gs://bucket-id/object-id` (other URI formats return + // [google.rpc.Code.INVALID_ARGUMENT][google.rpc.Code.INVALID_ARGUMENT]). For + // more information, see [Request URIs](/storage/docs/reference-uris). A video + // URI may include wildcards in `object-id`, and thus identify multiple + // videos. Supported wildcards: '*' to match 0 or more characters; + // '?' to match 1 character. If unset, the input video should be embedded + // in the request as `input_content`. If set, `input_content` should be unset. + InputUri string `protobuf:"bytes,1,opt,name=input_uri,json=inputUri,proto3" json:"input_uri,omitempty"` + // The video data bytes. + // If unset, the input video(s) should be specified via `input_uri`. + // If set, `input_uri` should be unset. + InputContent []byte `protobuf:"bytes,6,opt,name=input_content,json=inputContent,proto3" json:"input_content,omitempty"` + // Requested video annotation features. + Features []Feature `protobuf:"varint,2,rep,packed,name=features,proto3,enum=google.cloud.videointelligence.v1p1beta1.Feature" json:"features,omitempty"` + // Additional video context and/or feature-specific parameters. + VideoContext *VideoContext `protobuf:"bytes,3,opt,name=video_context,json=videoContext,proto3" json:"video_context,omitempty"` + // Optional location where the output (in JSON format) should be stored. + // Currently, only [Google Cloud Storage](https://cloud.google.com/storage/) + // URIs are supported, which must be specified in the following format: + // `gs://bucket-id/object-id` (other URI formats return + // [google.rpc.Code.INVALID_ARGUMENT][google.rpc.Code.INVALID_ARGUMENT]). For + // more information, see [Request URIs](/storage/docs/reference-uris). + OutputUri string `protobuf:"bytes,4,opt,name=output_uri,json=outputUri,proto3" json:"output_uri,omitempty"` + // Optional cloud region where annotation should take place. Supported cloud + // regions: `us-east1`, `us-west1`, `europe-west1`, `asia-east1`. If no region + // is specified, a region will be determined based on video file location. + LocationId string `protobuf:"bytes,5,opt,name=location_id,json=locationId,proto3" json:"location_id,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *AnnotateVideoRequest) Reset() { *m = AnnotateVideoRequest{} } +func (m *AnnotateVideoRequest) String() string { return proto.CompactTextString(m) } +func (*AnnotateVideoRequest) ProtoMessage() {} +func (*AnnotateVideoRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_video_intelligence_d0764d772f800fc8, []int{0} +} +func (m *AnnotateVideoRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_AnnotateVideoRequest.Unmarshal(m, b) +} +func (m *AnnotateVideoRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_AnnotateVideoRequest.Marshal(b, m, deterministic) +} +func (dst *AnnotateVideoRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_AnnotateVideoRequest.Merge(dst, src) +} +func (m *AnnotateVideoRequest) XXX_Size() int { + return xxx_messageInfo_AnnotateVideoRequest.Size(m) +} +func (m *AnnotateVideoRequest) XXX_DiscardUnknown() { + xxx_messageInfo_AnnotateVideoRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_AnnotateVideoRequest proto.InternalMessageInfo + +func (m *AnnotateVideoRequest) GetInputUri() string { + if m != nil { + return m.InputUri + } + return "" +} + +func (m *AnnotateVideoRequest) GetInputContent() []byte { + if m != nil { + return m.InputContent + } + return nil +} + +func (m *AnnotateVideoRequest) GetFeatures() []Feature { + if m != nil { + return m.Features + } + return nil +} + +func (m *AnnotateVideoRequest) GetVideoContext() *VideoContext { + if m != nil { + return m.VideoContext + } + return nil +} + +func (m *AnnotateVideoRequest) GetOutputUri() string { + if m != nil { + return m.OutputUri + } + return "" +} + +func (m *AnnotateVideoRequest) GetLocationId() string { + if m != nil { + return m.LocationId + } + return "" +} + +// Video context and/or feature-specific parameters. +type VideoContext struct { + // Video segments to annotate. The segments may overlap and are not required + // to be contiguous or span the whole video. If unspecified, each video is + // treated as a single segment. + Segments []*VideoSegment `protobuf:"bytes,1,rep,name=segments,proto3" json:"segments,omitempty"` + // Config for LABEL_DETECTION. + LabelDetectionConfig *LabelDetectionConfig `protobuf:"bytes,2,opt,name=label_detection_config,json=labelDetectionConfig,proto3" json:"label_detection_config,omitempty"` + // Config for SHOT_CHANGE_DETECTION. + ShotChangeDetectionConfig *ShotChangeDetectionConfig `protobuf:"bytes,3,opt,name=shot_change_detection_config,json=shotChangeDetectionConfig,proto3" json:"shot_change_detection_config,omitempty"` + // Config for EXPLICIT_CONTENT_DETECTION. + ExplicitContentDetectionConfig *ExplicitContentDetectionConfig `protobuf:"bytes,4,opt,name=explicit_content_detection_config,json=explicitContentDetectionConfig,proto3" json:"explicit_content_detection_config,omitempty"` + // Config for SPEECH_TRANSCRIPTION. + SpeechTranscriptionConfig *SpeechTranscriptionConfig `protobuf:"bytes,6,opt,name=speech_transcription_config,json=speechTranscriptionConfig,proto3" json:"speech_transcription_config,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *VideoContext) Reset() { *m = VideoContext{} } +func (m *VideoContext) String() string { return proto.CompactTextString(m) } +func (*VideoContext) ProtoMessage() {} +func (*VideoContext) Descriptor() ([]byte, []int) { + return fileDescriptor_video_intelligence_d0764d772f800fc8, []int{1} +} +func (m *VideoContext) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_VideoContext.Unmarshal(m, b) +} +func (m *VideoContext) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_VideoContext.Marshal(b, m, deterministic) +} +func (dst *VideoContext) XXX_Merge(src proto.Message) { + xxx_messageInfo_VideoContext.Merge(dst, src) +} +func (m *VideoContext) XXX_Size() int { + return xxx_messageInfo_VideoContext.Size(m) +} +func (m *VideoContext) XXX_DiscardUnknown() { + xxx_messageInfo_VideoContext.DiscardUnknown(m) +} + +var xxx_messageInfo_VideoContext proto.InternalMessageInfo + +func (m *VideoContext) GetSegments() []*VideoSegment { + if m != nil { + return m.Segments + } + return nil +} + +func (m *VideoContext) GetLabelDetectionConfig() *LabelDetectionConfig { + if m != nil { + return m.LabelDetectionConfig + } + return nil +} + +func (m *VideoContext) GetShotChangeDetectionConfig() *ShotChangeDetectionConfig { + if m != nil { + return m.ShotChangeDetectionConfig + } + return nil +} + +func (m *VideoContext) GetExplicitContentDetectionConfig() *ExplicitContentDetectionConfig { + if m != nil { + return m.ExplicitContentDetectionConfig + } + return nil +} + +func (m *VideoContext) GetSpeechTranscriptionConfig() *SpeechTranscriptionConfig { + if m != nil { + return m.SpeechTranscriptionConfig + } + return nil +} + +// Config for LABEL_DETECTION. +type LabelDetectionConfig struct { + // What labels should be detected with LABEL_DETECTION, in addition to + // video-level labels or segment-level labels. + // If unspecified, defaults to `SHOT_MODE`. + LabelDetectionMode LabelDetectionMode `protobuf:"varint,1,opt,name=label_detection_mode,json=labelDetectionMode,proto3,enum=google.cloud.videointelligence.v1p1beta1.LabelDetectionMode" json:"label_detection_mode,omitempty"` + // Whether the video has been shot from a stationary (i.e. non-moving) camera. + // When set to true, might improve detection accuracy for moving objects. + // Should be used with `SHOT_AND_FRAME_MODE` enabled. + StationaryCamera bool `protobuf:"varint,2,opt,name=stationary_camera,json=stationaryCamera,proto3" json:"stationary_camera,omitempty"` + // Model to use for label detection. + // Supported values: "builtin/stable" (the default if unset) and + // "builtin/latest". + Model string `protobuf:"bytes,3,opt,name=model,proto3" json:"model,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *LabelDetectionConfig) Reset() { *m = LabelDetectionConfig{} } +func (m *LabelDetectionConfig) String() string { return proto.CompactTextString(m) } +func (*LabelDetectionConfig) ProtoMessage() {} +func (*LabelDetectionConfig) Descriptor() ([]byte, []int) { + return fileDescriptor_video_intelligence_d0764d772f800fc8, []int{2} +} +func (m *LabelDetectionConfig) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_LabelDetectionConfig.Unmarshal(m, b) +} +func (m *LabelDetectionConfig) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_LabelDetectionConfig.Marshal(b, m, deterministic) +} +func (dst *LabelDetectionConfig) XXX_Merge(src proto.Message) { + xxx_messageInfo_LabelDetectionConfig.Merge(dst, src) +} +func (m *LabelDetectionConfig) XXX_Size() int { + return xxx_messageInfo_LabelDetectionConfig.Size(m) +} +func (m *LabelDetectionConfig) XXX_DiscardUnknown() { + xxx_messageInfo_LabelDetectionConfig.DiscardUnknown(m) +} + +var xxx_messageInfo_LabelDetectionConfig proto.InternalMessageInfo + +func (m *LabelDetectionConfig) GetLabelDetectionMode() LabelDetectionMode { + if m != nil { + return m.LabelDetectionMode + } + return LabelDetectionMode_LABEL_DETECTION_MODE_UNSPECIFIED +} + +func (m *LabelDetectionConfig) GetStationaryCamera() bool { + if m != nil { + return m.StationaryCamera + } + return false +} + +func (m *LabelDetectionConfig) GetModel() string { + if m != nil { + return m.Model + } + return "" +} + +// Config for SHOT_CHANGE_DETECTION. +type ShotChangeDetectionConfig struct { + // Model to use for shot change detection. + // Supported values: "builtin/stable" (the default if unset) and + // "builtin/latest". + Model string `protobuf:"bytes,1,opt,name=model,proto3" json:"model,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ShotChangeDetectionConfig) Reset() { *m = ShotChangeDetectionConfig{} } +func (m *ShotChangeDetectionConfig) String() string { return proto.CompactTextString(m) } +func (*ShotChangeDetectionConfig) ProtoMessage() {} +func (*ShotChangeDetectionConfig) Descriptor() ([]byte, []int) { + return fileDescriptor_video_intelligence_d0764d772f800fc8, []int{3} +} +func (m *ShotChangeDetectionConfig) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ShotChangeDetectionConfig.Unmarshal(m, b) +} +func (m *ShotChangeDetectionConfig) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ShotChangeDetectionConfig.Marshal(b, m, deterministic) +} +func (dst *ShotChangeDetectionConfig) XXX_Merge(src proto.Message) { + xxx_messageInfo_ShotChangeDetectionConfig.Merge(dst, src) +} +func (m *ShotChangeDetectionConfig) XXX_Size() int { + return xxx_messageInfo_ShotChangeDetectionConfig.Size(m) +} +func (m *ShotChangeDetectionConfig) XXX_DiscardUnknown() { + xxx_messageInfo_ShotChangeDetectionConfig.DiscardUnknown(m) +} + +var xxx_messageInfo_ShotChangeDetectionConfig proto.InternalMessageInfo + +func (m *ShotChangeDetectionConfig) GetModel() string { + if m != nil { + return m.Model + } + return "" +} + +// Config for EXPLICIT_CONTENT_DETECTION. +type ExplicitContentDetectionConfig struct { + // Model to use for explicit content detection. + // Supported values: "builtin/stable" (the default if unset) and + // "builtin/latest". + Model string `protobuf:"bytes,1,opt,name=model,proto3" json:"model,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ExplicitContentDetectionConfig) Reset() { *m = ExplicitContentDetectionConfig{} } +func (m *ExplicitContentDetectionConfig) String() string { return proto.CompactTextString(m) } +func (*ExplicitContentDetectionConfig) ProtoMessage() {} +func (*ExplicitContentDetectionConfig) Descriptor() ([]byte, []int) { + return fileDescriptor_video_intelligence_d0764d772f800fc8, []int{4} +} +func (m *ExplicitContentDetectionConfig) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ExplicitContentDetectionConfig.Unmarshal(m, b) +} +func (m *ExplicitContentDetectionConfig) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ExplicitContentDetectionConfig.Marshal(b, m, deterministic) +} +func (dst *ExplicitContentDetectionConfig) XXX_Merge(src proto.Message) { + xxx_messageInfo_ExplicitContentDetectionConfig.Merge(dst, src) +} +func (m *ExplicitContentDetectionConfig) XXX_Size() int { + return xxx_messageInfo_ExplicitContentDetectionConfig.Size(m) +} +func (m *ExplicitContentDetectionConfig) XXX_DiscardUnknown() { + xxx_messageInfo_ExplicitContentDetectionConfig.DiscardUnknown(m) +} + +var xxx_messageInfo_ExplicitContentDetectionConfig proto.InternalMessageInfo + +func (m *ExplicitContentDetectionConfig) GetModel() string { + if m != nil { + return m.Model + } + return "" +} + +// Video segment. +type VideoSegment struct { + // Time-offset, relative to the beginning of the video, + // corresponding to the start of the segment (inclusive). + StartTimeOffset *duration.Duration `protobuf:"bytes,1,opt,name=start_time_offset,json=startTimeOffset,proto3" json:"start_time_offset,omitempty"` + // Time-offset, relative to the beginning of the video, + // corresponding to the end of the segment (inclusive). + EndTimeOffset *duration.Duration `protobuf:"bytes,2,opt,name=end_time_offset,json=endTimeOffset,proto3" json:"end_time_offset,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *VideoSegment) Reset() { *m = VideoSegment{} } +func (m *VideoSegment) String() string { return proto.CompactTextString(m) } +func (*VideoSegment) ProtoMessage() {} +func (*VideoSegment) Descriptor() ([]byte, []int) { + return fileDescriptor_video_intelligence_d0764d772f800fc8, []int{5} +} +func (m *VideoSegment) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_VideoSegment.Unmarshal(m, b) +} +func (m *VideoSegment) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_VideoSegment.Marshal(b, m, deterministic) +} +func (dst *VideoSegment) XXX_Merge(src proto.Message) { + xxx_messageInfo_VideoSegment.Merge(dst, src) +} +func (m *VideoSegment) XXX_Size() int { + return xxx_messageInfo_VideoSegment.Size(m) +} +func (m *VideoSegment) XXX_DiscardUnknown() { + xxx_messageInfo_VideoSegment.DiscardUnknown(m) +} + +var xxx_messageInfo_VideoSegment proto.InternalMessageInfo + +func (m *VideoSegment) GetStartTimeOffset() *duration.Duration { + if m != nil { + return m.StartTimeOffset + } + return nil +} + +func (m *VideoSegment) GetEndTimeOffset() *duration.Duration { + if m != nil { + return m.EndTimeOffset + } + return nil +} + +// Video segment level annotation results for label detection. +type LabelSegment struct { + // Video segment where a label was detected. + Segment *VideoSegment `protobuf:"bytes,1,opt,name=segment,proto3" json:"segment,omitempty"` + // Confidence that the label is accurate. Range: [0, 1]. + Confidence float32 `protobuf:"fixed32,2,opt,name=confidence,proto3" json:"confidence,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *LabelSegment) Reset() { *m = LabelSegment{} } +func (m *LabelSegment) String() string { return proto.CompactTextString(m) } +func (*LabelSegment) ProtoMessage() {} +func (*LabelSegment) Descriptor() ([]byte, []int) { + return fileDescriptor_video_intelligence_d0764d772f800fc8, []int{6} +} +func (m *LabelSegment) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_LabelSegment.Unmarshal(m, b) +} +func (m *LabelSegment) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_LabelSegment.Marshal(b, m, deterministic) +} +func (dst *LabelSegment) XXX_Merge(src proto.Message) { + xxx_messageInfo_LabelSegment.Merge(dst, src) +} +func (m *LabelSegment) XXX_Size() int { + return xxx_messageInfo_LabelSegment.Size(m) +} +func (m *LabelSegment) XXX_DiscardUnknown() { + xxx_messageInfo_LabelSegment.DiscardUnknown(m) +} + +var xxx_messageInfo_LabelSegment proto.InternalMessageInfo + +func (m *LabelSegment) GetSegment() *VideoSegment { + if m != nil { + return m.Segment + } + return nil +} + +func (m *LabelSegment) GetConfidence() float32 { + if m != nil { + return m.Confidence + } + return 0 +} + +// Video frame level annotation results for label detection. +type LabelFrame struct { + // Time-offset, relative to the beginning of the video, corresponding to the + // video frame for this location. + TimeOffset *duration.Duration `protobuf:"bytes,1,opt,name=time_offset,json=timeOffset,proto3" json:"time_offset,omitempty"` + // Confidence that the label is accurate. Range: [0, 1]. + Confidence float32 `protobuf:"fixed32,2,opt,name=confidence,proto3" json:"confidence,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *LabelFrame) Reset() { *m = LabelFrame{} } +func (m *LabelFrame) String() string { return proto.CompactTextString(m) } +func (*LabelFrame) ProtoMessage() {} +func (*LabelFrame) Descriptor() ([]byte, []int) { + return fileDescriptor_video_intelligence_d0764d772f800fc8, []int{7} +} +func (m *LabelFrame) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_LabelFrame.Unmarshal(m, b) +} +func (m *LabelFrame) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_LabelFrame.Marshal(b, m, deterministic) +} +func (dst *LabelFrame) XXX_Merge(src proto.Message) { + xxx_messageInfo_LabelFrame.Merge(dst, src) +} +func (m *LabelFrame) XXX_Size() int { + return xxx_messageInfo_LabelFrame.Size(m) +} +func (m *LabelFrame) XXX_DiscardUnknown() { + xxx_messageInfo_LabelFrame.DiscardUnknown(m) +} + +var xxx_messageInfo_LabelFrame proto.InternalMessageInfo + +func (m *LabelFrame) GetTimeOffset() *duration.Duration { + if m != nil { + return m.TimeOffset + } + return nil +} + +func (m *LabelFrame) GetConfidence() float32 { + if m != nil { + return m.Confidence + } + return 0 +} + +// Detected entity from video analysis. +type Entity struct { + // Opaque entity ID. Some IDs may be available in + // [Google Knowledge Graph Search + // API](https://developers.google.com/knowledge-graph/). + EntityId string `protobuf:"bytes,1,opt,name=entity_id,json=entityId,proto3" json:"entity_id,omitempty"` + // Textual description, e.g. `Fixed-gear bicycle`. + Description string `protobuf:"bytes,2,opt,name=description,proto3" json:"description,omitempty"` + // Language code for `description` in BCP-47 format. + LanguageCode string `protobuf:"bytes,3,opt,name=language_code,json=languageCode,proto3" json:"language_code,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Entity) Reset() { *m = Entity{} } +func (m *Entity) String() string { return proto.CompactTextString(m) } +func (*Entity) ProtoMessage() {} +func (*Entity) Descriptor() ([]byte, []int) { + return fileDescriptor_video_intelligence_d0764d772f800fc8, []int{8} +} +func (m *Entity) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Entity.Unmarshal(m, b) +} +func (m *Entity) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Entity.Marshal(b, m, deterministic) +} +func (dst *Entity) XXX_Merge(src proto.Message) { + xxx_messageInfo_Entity.Merge(dst, src) +} +func (m *Entity) XXX_Size() int { + return xxx_messageInfo_Entity.Size(m) +} +func (m *Entity) XXX_DiscardUnknown() { + xxx_messageInfo_Entity.DiscardUnknown(m) +} + +var xxx_messageInfo_Entity proto.InternalMessageInfo + +func (m *Entity) GetEntityId() string { + if m != nil { + return m.EntityId + } + return "" +} + +func (m *Entity) GetDescription() string { + if m != nil { + return m.Description + } + return "" +} + +func (m *Entity) GetLanguageCode() string { + if m != nil { + return m.LanguageCode + } + return "" +} + +// Label annotation. +type LabelAnnotation struct { + // Detected entity. + Entity *Entity `protobuf:"bytes,1,opt,name=entity,proto3" json:"entity,omitempty"` + // Common categories for the detected entity. + // E.g. when the label is `Terrier` the category is likely `dog`. And in some + // cases there might be more than one categories e.g. `Terrier` could also be + // a `pet`. + CategoryEntities []*Entity `protobuf:"bytes,2,rep,name=category_entities,json=categoryEntities,proto3" json:"category_entities,omitempty"` + // All video segments where a label was detected. + Segments []*LabelSegment `protobuf:"bytes,3,rep,name=segments,proto3" json:"segments,omitempty"` + // All video frames where a label was detected. + Frames []*LabelFrame `protobuf:"bytes,4,rep,name=frames,proto3" json:"frames,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *LabelAnnotation) Reset() { *m = LabelAnnotation{} } +func (m *LabelAnnotation) String() string { return proto.CompactTextString(m) } +func (*LabelAnnotation) ProtoMessage() {} +func (*LabelAnnotation) Descriptor() ([]byte, []int) { + return fileDescriptor_video_intelligence_d0764d772f800fc8, []int{9} +} +func (m *LabelAnnotation) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_LabelAnnotation.Unmarshal(m, b) +} +func (m *LabelAnnotation) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_LabelAnnotation.Marshal(b, m, deterministic) +} +func (dst *LabelAnnotation) XXX_Merge(src proto.Message) { + xxx_messageInfo_LabelAnnotation.Merge(dst, src) +} +func (m *LabelAnnotation) XXX_Size() int { + return xxx_messageInfo_LabelAnnotation.Size(m) +} +func (m *LabelAnnotation) XXX_DiscardUnknown() { + xxx_messageInfo_LabelAnnotation.DiscardUnknown(m) +} + +var xxx_messageInfo_LabelAnnotation proto.InternalMessageInfo + +func (m *LabelAnnotation) GetEntity() *Entity { + if m != nil { + return m.Entity + } + return nil +} + +func (m *LabelAnnotation) GetCategoryEntities() []*Entity { + if m != nil { + return m.CategoryEntities + } + return nil +} + +func (m *LabelAnnotation) GetSegments() []*LabelSegment { + if m != nil { + return m.Segments + } + return nil +} + +func (m *LabelAnnotation) GetFrames() []*LabelFrame { + if m != nil { + return m.Frames + } + return nil +} + +// Video frame level annotation results for explicit content. +type ExplicitContentFrame struct { + // Time-offset, relative to the beginning of the video, corresponding to the + // video frame for this location. + TimeOffset *duration.Duration `protobuf:"bytes,1,opt,name=time_offset,json=timeOffset,proto3" json:"time_offset,omitempty"` + // Likelihood of the pornography content.. + PornographyLikelihood Likelihood `protobuf:"varint,2,opt,name=pornography_likelihood,json=pornographyLikelihood,proto3,enum=google.cloud.videointelligence.v1p1beta1.Likelihood" json:"pornography_likelihood,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ExplicitContentFrame) Reset() { *m = ExplicitContentFrame{} } +func (m *ExplicitContentFrame) String() string { return proto.CompactTextString(m) } +func (*ExplicitContentFrame) ProtoMessage() {} +func (*ExplicitContentFrame) Descriptor() ([]byte, []int) { + return fileDescriptor_video_intelligence_d0764d772f800fc8, []int{10} +} +func (m *ExplicitContentFrame) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ExplicitContentFrame.Unmarshal(m, b) +} +func (m *ExplicitContentFrame) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ExplicitContentFrame.Marshal(b, m, deterministic) +} +func (dst *ExplicitContentFrame) XXX_Merge(src proto.Message) { + xxx_messageInfo_ExplicitContentFrame.Merge(dst, src) +} +func (m *ExplicitContentFrame) XXX_Size() int { + return xxx_messageInfo_ExplicitContentFrame.Size(m) +} +func (m *ExplicitContentFrame) XXX_DiscardUnknown() { + xxx_messageInfo_ExplicitContentFrame.DiscardUnknown(m) +} + +var xxx_messageInfo_ExplicitContentFrame proto.InternalMessageInfo + +func (m *ExplicitContentFrame) GetTimeOffset() *duration.Duration { + if m != nil { + return m.TimeOffset + } + return nil +} + +func (m *ExplicitContentFrame) GetPornographyLikelihood() Likelihood { + if m != nil { + return m.PornographyLikelihood + } + return Likelihood_LIKELIHOOD_UNSPECIFIED +} + +// Explicit content annotation (based on per-frame visual signals only). +// If no explicit content has been detected in a frame, no annotations are +// present for that frame. +type ExplicitContentAnnotation struct { + // All video frames where explicit content was detected. + Frames []*ExplicitContentFrame `protobuf:"bytes,1,rep,name=frames,proto3" json:"frames,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ExplicitContentAnnotation) Reset() { *m = ExplicitContentAnnotation{} } +func (m *ExplicitContentAnnotation) String() string { return proto.CompactTextString(m) } +func (*ExplicitContentAnnotation) ProtoMessage() {} +func (*ExplicitContentAnnotation) Descriptor() ([]byte, []int) { + return fileDescriptor_video_intelligence_d0764d772f800fc8, []int{11} +} +func (m *ExplicitContentAnnotation) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ExplicitContentAnnotation.Unmarshal(m, b) +} +func (m *ExplicitContentAnnotation) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ExplicitContentAnnotation.Marshal(b, m, deterministic) +} +func (dst *ExplicitContentAnnotation) XXX_Merge(src proto.Message) { + xxx_messageInfo_ExplicitContentAnnotation.Merge(dst, src) +} +func (m *ExplicitContentAnnotation) XXX_Size() int { + return xxx_messageInfo_ExplicitContentAnnotation.Size(m) +} +func (m *ExplicitContentAnnotation) XXX_DiscardUnknown() { + xxx_messageInfo_ExplicitContentAnnotation.DiscardUnknown(m) +} + +var xxx_messageInfo_ExplicitContentAnnotation proto.InternalMessageInfo + +func (m *ExplicitContentAnnotation) GetFrames() []*ExplicitContentFrame { + if m != nil { + return m.Frames + } + return nil +} + +// Annotation results for a single video. +type VideoAnnotationResults struct { + // Output only. Video file location in + // [Google Cloud Storage](https://cloud.google.com/storage/). + InputUri string `protobuf:"bytes,1,opt,name=input_uri,json=inputUri,proto3" json:"input_uri,omitempty"` + // Label annotations on video level or user specified segment level. + // There is exactly one element for each unique label. + SegmentLabelAnnotations []*LabelAnnotation `protobuf:"bytes,2,rep,name=segment_label_annotations,json=segmentLabelAnnotations,proto3" json:"segment_label_annotations,omitempty"` + // Label annotations on shot level. + // There is exactly one element for each unique label. + ShotLabelAnnotations []*LabelAnnotation `protobuf:"bytes,3,rep,name=shot_label_annotations,json=shotLabelAnnotations,proto3" json:"shot_label_annotations,omitempty"` + // Label annotations on frame level. + // There is exactly one element for each unique label. + FrameLabelAnnotations []*LabelAnnotation `protobuf:"bytes,4,rep,name=frame_label_annotations,json=frameLabelAnnotations,proto3" json:"frame_label_annotations,omitempty"` + // Shot annotations. Each shot is represented as a video segment. + ShotAnnotations []*VideoSegment `protobuf:"bytes,6,rep,name=shot_annotations,json=shotAnnotations,proto3" json:"shot_annotations,omitempty"` + // Explicit content annotation. + ExplicitAnnotation *ExplicitContentAnnotation `protobuf:"bytes,7,opt,name=explicit_annotation,json=explicitAnnotation,proto3" json:"explicit_annotation,omitempty"` + // Speech transcription. + SpeechTranscriptions []*SpeechTranscription `protobuf:"bytes,11,rep,name=speech_transcriptions,json=speechTranscriptions,proto3" json:"speech_transcriptions,omitempty"` + // Output only. If set, indicates an error. Note that for a single + // `AnnotateVideoRequest` some videos may succeed and some may fail. + Error *status.Status `protobuf:"bytes,9,opt,name=error,proto3" json:"error,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *VideoAnnotationResults) Reset() { *m = VideoAnnotationResults{} } +func (m *VideoAnnotationResults) String() string { return proto.CompactTextString(m) } +func (*VideoAnnotationResults) ProtoMessage() {} +func (*VideoAnnotationResults) Descriptor() ([]byte, []int) { + return fileDescriptor_video_intelligence_d0764d772f800fc8, []int{12} +} +func (m *VideoAnnotationResults) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_VideoAnnotationResults.Unmarshal(m, b) +} +func (m *VideoAnnotationResults) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_VideoAnnotationResults.Marshal(b, m, deterministic) +} +func (dst *VideoAnnotationResults) XXX_Merge(src proto.Message) { + xxx_messageInfo_VideoAnnotationResults.Merge(dst, src) +} +func (m *VideoAnnotationResults) XXX_Size() int { + return xxx_messageInfo_VideoAnnotationResults.Size(m) +} +func (m *VideoAnnotationResults) XXX_DiscardUnknown() { + xxx_messageInfo_VideoAnnotationResults.DiscardUnknown(m) +} + +var xxx_messageInfo_VideoAnnotationResults proto.InternalMessageInfo + +func (m *VideoAnnotationResults) GetInputUri() string { + if m != nil { + return m.InputUri + } + return "" +} + +func (m *VideoAnnotationResults) GetSegmentLabelAnnotations() []*LabelAnnotation { + if m != nil { + return m.SegmentLabelAnnotations + } + return nil +} + +func (m *VideoAnnotationResults) GetShotLabelAnnotations() []*LabelAnnotation { + if m != nil { + return m.ShotLabelAnnotations + } + return nil +} + +func (m *VideoAnnotationResults) GetFrameLabelAnnotations() []*LabelAnnotation { + if m != nil { + return m.FrameLabelAnnotations + } + return nil +} + +func (m *VideoAnnotationResults) GetShotAnnotations() []*VideoSegment { + if m != nil { + return m.ShotAnnotations + } + return nil +} + +func (m *VideoAnnotationResults) GetExplicitAnnotation() *ExplicitContentAnnotation { + if m != nil { + return m.ExplicitAnnotation + } + return nil +} + +func (m *VideoAnnotationResults) GetSpeechTranscriptions() []*SpeechTranscription { + if m != nil { + return m.SpeechTranscriptions + } + return nil +} + +func (m *VideoAnnotationResults) GetError() *status.Status { + if m != nil { + return m.Error + } + return nil +} + +// Video annotation response. Included in the `response` +// field of the `Operation` returned by the `GetOperation` +// call of the `google::longrunning::Operations` service. +type AnnotateVideoResponse struct { + // Annotation results for all videos specified in `AnnotateVideoRequest`. + AnnotationResults []*VideoAnnotationResults `protobuf:"bytes,1,rep,name=annotation_results,json=annotationResults,proto3" json:"annotation_results,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *AnnotateVideoResponse) Reset() { *m = AnnotateVideoResponse{} } +func (m *AnnotateVideoResponse) String() string { return proto.CompactTextString(m) } +func (*AnnotateVideoResponse) ProtoMessage() {} +func (*AnnotateVideoResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_video_intelligence_d0764d772f800fc8, []int{13} +} +func (m *AnnotateVideoResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_AnnotateVideoResponse.Unmarshal(m, b) +} +func (m *AnnotateVideoResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_AnnotateVideoResponse.Marshal(b, m, deterministic) +} +func (dst *AnnotateVideoResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_AnnotateVideoResponse.Merge(dst, src) +} +func (m *AnnotateVideoResponse) XXX_Size() int { + return xxx_messageInfo_AnnotateVideoResponse.Size(m) +} +func (m *AnnotateVideoResponse) XXX_DiscardUnknown() { + xxx_messageInfo_AnnotateVideoResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_AnnotateVideoResponse proto.InternalMessageInfo + +func (m *AnnotateVideoResponse) GetAnnotationResults() []*VideoAnnotationResults { + if m != nil { + return m.AnnotationResults + } + return nil +} + +// Annotation progress for a single video. +type VideoAnnotationProgress struct { + // Output only. Video file location in + // [Google Cloud Storage](https://cloud.google.com/storage/). + InputUri string `protobuf:"bytes,1,opt,name=input_uri,json=inputUri,proto3" json:"input_uri,omitempty"` + // Output only. Approximate percentage processed thus far. Guaranteed to be + // 100 when fully processed. + ProgressPercent int32 `protobuf:"varint,2,opt,name=progress_percent,json=progressPercent,proto3" json:"progress_percent,omitempty"` + // Output only. Time when the request was received. + StartTime *timestamp.Timestamp `protobuf:"bytes,3,opt,name=start_time,json=startTime,proto3" json:"start_time,omitempty"` + // Output only. Time of the most recent update. + UpdateTime *timestamp.Timestamp `protobuf:"bytes,4,opt,name=update_time,json=updateTime,proto3" json:"update_time,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *VideoAnnotationProgress) Reset() { *m = VideoAnnotationProgress{} } +func (m *VideoAnnotationProgress) String() string { return proto.CompactTextString(m) } +func (*VideoAnnotationProgress) ProtoMessage() {} +func (*VideoAnnotationProgress) Descriptor() ([]byte, []int) { + return fileDescriptor_video_intelligence_d0764d772f800fc8, []int{14} +} +func (m *VideoAnnotationProgress) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_VideoAnnotationProgress.Unmarshal(m, b) +} +func (m *VideoAnnotationProgress) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_VideoAnnotationProgress.Marshal(b, m, deterministic) +} +func (dst *VideoAnnotationProgress) XXX_Merge(src proto.Message) { + xxx_messageInfo_VideoAnnotationProgress.Merge(dst, src) +} +func (m *VideoAnnotationProgress) XXX_Size() int { + return xxx_messageInfo_VideoAnnotationProgress.Size(m) +} +func (m *VideoAnnotationProgress) XXX_DiscardUnknown() { + xxx_messageInfo_VideoAnnotationProgress.DiscardUnknown(m) +} + +var xxx_messageInfo_VideoAnnotationProgress proto.InternalMessageInfo + +func (m *VideoAnnotationProgress) GetInputUri() string { + if m != nil { + return m.InputUri + } + return "" +} + +func (m *VideoAnnotationProgress) GetProgressPercent() int32 { + if m != nil { + return m.ProgressPercent + } + return 0 +} + +func (m *VideoAnnotationProgress) GetStartTime() *timestamp.Timestamp { + if m != nil { + return m.StartTime + } + return nil +} + +func (m *VideoAnnotationProgress) GetUpdateTime() *timestamp.Timestamp { + if m != nil { + return m.UpdateTime + } + return nil +} + +// Video annotation progress. Included in the `metadata` +// field of the `Operation` returned by the `GetOperation` +// call of the `google::longrunning::Operations` service. +type AnnotateVideoProgress struct { + // Progress metadata for all videos specified in `AnnotateVideoRequest`. + AnnotationProgress []*VideoAnnotationProgress `protobuf:"bytes,1,rep,name=annotation_progress,json=annotationProgress,proto3" json:"annotation_progress,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *AnnotateVideoProgress) Reset() { *m = AnnotateVideoProgress{} } +func (m *AnnotateVideoProgress) String() string { return proto.CompactTextString(m) } +func (*AnnotateVideoProgress) ProtoMessage() {} +func (*AnnotateVideoProgress) Descriptor() ([]byte, []int) { + return fileDescriptor_video_intelligence_d0764d772f800fc8, []int{15} +} +func (m *AnnotateVideoProgress) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_AnnotateVideoProgress.Unmarshal(m, b) +} +func (m *AnnotateVideoProgress) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_AnnotateVideoProgress.Marshal(b, m, deterministic) +} +func (dst *AnnotateVideoProgress) XXX_Merge(src proto.Message) { + xxx_messageInfo_AnnotateVideoProgress.Merge(dst, src) +} +func (m *AnnotateVideoProgress) XXX_Size() int { + return xxx_messageInfo_AnnotateVideoProgress.Size(m) +} +func (m *AnnotateVideoProgress) XXX_DiscardUnknown() { + xxx_messageInfo_AnnotateVideoProgress.DiscardUnknown(m) +} + +var xxx_messageInfo_AnnotateVideoProgress proto.InternalMessageInfo + +func (m *AnnotateVideoProgress) GetAnnotationProgress() []*VideoAnnotationProgress { + if m != nil { + return m.AnnotationProgress + } + return nil +} + +// Config for SPEECH_TRANSCRIPTION. +type SpeechTranscriptionConfig struct { + // *Required* The language of the supplied audio as a + // [BCP-47](https://www.rfc-editor.org/rfc/bcp/bcp47.txt) language tag. + // Example: "en-US". + // See [Language Support](https://cloud.google.com/speech/docs/languages) + // for a list of the currently supported language codes. + LanguageCode string `protobuf:"bytes,1,opt,name=language_code,json=languageCode,proto3" json:"language_code,omitempty"` + // *Optional* Maximum number of recognition hypotheses to be returned. + // Specifically, the maximum number of `SpeechRecognitionAlternative` messages + // within each `SpeechRecognitionResult`. The server may return fewer than + // `max_alternatives`. Valid values are `0`-`30`. A value of `0` or `1` will + // return a maximum of one. If omitted, will return a maximum of one. + MaxAlternatives int32 `protobuf:"varint,2,opt,name=max_alternatives,json=maxAlternatives,proto3" json:"max_alternatives,omitempty"` + // *Optional* If set to `true`, the server will attempt to filter out + // profanities, replacing all but the initial character in each filtered word + // with asterisks, e.g. "f***". If set to `false` or omitted, profanities + // won't be filtered out. + FilterProfanity bool `protobuf:"varint,3,opt,name=filter_profanity,json=filterProfanity,proto3" json:"filter_profanity,omitempty"` + // *Optional* A means to provide context to assist the speech recognition. + SpeechContexts []*SpeechContext `protobuf:"bytes,4,rep,name=speech_contexts,json=speechContexts,proto3" json:"speech_contexts,omitempty"` + // *Optional* If 'true', adds punctuation to recognition result hypotheses. + // This feature is only available in select languages. Setting this for + // requests in other languages has no effect at all. The default 'false' value + // does not add punctuation to result hypotheses. NOTE: "This is currently + // offered as an experimental service, complimentary to all users. In the + // future this may be exclusively available as a premium feature." + EnableAutomaticPunctuation bool `protobuf:"varint,5,opt,name=enable_automatic_punctuation,json=enableAutomaticPunctuation,proto3" json:"enable_automatic_punctuation,omitempty"` + // *Optional* For file formats, such as MXF or MKV, supporting multiple audio + // tracks, specify up to two tracks. Default: track 0. + AudioTracks []int32 `protobuf:"varint,6,rep,packed,name=audio_tracks,json=audioTracks,proto3" json:"audio_tracks,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *SpeechTranscriptionConfig) Reset() { *m = SpeechTranscriptionConfig{} } +func (m *SpeechTranscriptionConfig) String() string { return proto.CompactTextString(m) } +func (*SpeechTranscriptionConfig) ProtoMessage() {} +func (*SpeechTranscriptionConfig) Descriptor() ([]byte, []int) { + return fileDescriptor_video_intelligence_d0764d772f800fc8, []int{16} +} +func (m *SpeechTranscriptionConfig) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_SpeechTranscriptionConfig.Unmarshal(m, b) +} +func (m *SpeechTranscriptionConfig) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_SpeechTranscriptionConfig.Marshal(b, m, deterministic) +} +func (dst *SpeechTranscriptionConfig) XXX_Merge(src proto.Message) { + xxx_messageInfo_SpeechTranscriptionConfig.Merge(dst, src) +} +func (m *SpeechTranscriptionConfig) XXX_Size() int { + return xxx_messageInfo_SpeechTranscriptionConfig.Size(m) +} +func (m *SpeechTranscriptionConfig) XXX_DiscardUnknown() { + xxx_messageInfo_SpeechTranscriptionConfig.DiscardUnknown(m) +} + +var xxx_messageInfo_SpeechTranscriptionConfig proto.InternalMessageInfo + +func (m *SpeechTranscriptionConfig) GetLanguageCode() string { + if m != nil { + return m.LanguageCode + } + return "" +} + +func (m *SpeechTranscriptionConfig) GetMaxAlternatives() int32 { + if m != nil { + return m.MaxAlternatives + } + return 0 +} + +func (m *SpeechTranscriptionConfig) GetFilterProfanity() bool { + if m != nil { + return m.FilterProfanity + } + return false +} + +func (m *SpeechTranscriptionConfig) GetSpeechContexts() []*SpeechContext { + if m != nil { + return m.SpeechContexts + } + return nil +} + +func (m *SpeechTranscriptionConfig) GetEnableAutomaticPunctuation() bool { + if m != nil { + return m.EnableAutomaticPunctuation + } + return false +} + +func (m *SpeechTranscriptionConfig) GetAudioTracks() []int32 { + if m != nil { + return m.AudioTracks + } + return nil +} + +// Provides "hints" to the speech recognizer to favor specific words and phrases +// in the results. +type SpeechContext struct { + // *Optional* A list of strings containing words and phrases "hints" so that + // the speech recognition is more likely to recognize them. This can be used + // to improve the accuracy for specific words and phrases, for example, if + // specific commands are typically spoken by the user. This can also be used + // to add additional words to the vocabulary of the recognizer. See + // [usage limits](https://cloud.google.com/speech/limits#content). + Phrases []string `protobuf:"bytes,1,rep,name=phrases,proto3" json:"phrases,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *SpeechContext) Reset() { *m = SpeechContext{} } +func (m *SpeechContext) String() string { return proto.CompactTextString(m) } +func (*SpeechContext) ProtoMessage() {} +func (*SpeechContext) Descriptor() ([]byte, []int) { + return fileDescriptor_video_intelligence_d0764d772f800fc8, []int{17} +} +func (m *SpeechContext) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_SpeechContext.Unmarshal(m, b) +} +func (m *SpeechContext) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_SpeechContext.Marshal(b, m, deterministic) +} +func (dst *SpeechContext) XXX_Merge(src proto.Message) { + xxx_messageInfo_SpeechContext.Merge(dst, src) +} +func (m *SpeechContext) XXX_Size() int { + return xxx_messageInfo_SpeechContext.Size(m) +} +func (m *SpeechContext) XXX_DiscardUnknown() { + xxx_messageInfo_SpeechContext.DiscardUnknown(m) +} + +var xxx_messageInfo_SpeechContext proto.InternalMessageInfo + +func (m *SpeechContext) GetPhrases() []string { + if m != nil { + return m.Phrases + } + return nil +} + +// A speech recognition result corresponding to a portion of the audio. +type SpeechTranscription struct { + // Output only. May contain one or more recognition hypotheses (up to the + // maximum specified in `max_alternatives`). + // These alternatives are ordered in terms of accuracy, with the top (first) + // alternative being the most probable, as ranked by the recognizer. + Alternatives []*SpeechRecognitionAlternative `protobuf:"bytes,1,rep,name=alternatives,proto3" json:"alternatives,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *SpeechTranscription) Reset() { *m = SpeechTranscription{} } +func (m *SpeechTranscription) String() string { return proto.CompactTextString(m) } +func (*SpeechTranscription) ProtoMessage() {} +func (*SpeechTranscription) Descriptor() ([]byte, []int) { + return fileDescriptor_video_intelligence_d0764d772f800fc8, []int{18} +} +func (m *SpeechTranscription) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_SpeechTranscription.Unmarshal(m, b) +} +func (m *SpeechTranscription) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_SpeechTranscription.Marshal(b, m, deterministic) +} +func (dst *SpeechTranscription) XXX_Merge(src proto.Message) { + xxx_messageInfo_SpeechTranscription.Merge(dst, src) +} +func (m *SpeechTranscription) XXX_Size() int { + return xxx_messageInfo_SpeechTranscription.Size(m) +} +func (m *SpeechTranscription) XXX_DiscardUnknown() { + xxx_messageInfo_SpeechTranscription.DiscardUnknown(m) +} + +var xxx_messageInfo_SpeechTranscription proto.InternalMessageInfo + +func (m *SpeechTranscription) GetAlternatives() []*SpeechRecognitionAlternative { + if m != nil { + return m.Alternatives + } + return nil +} + +// Alternative hypotheses (a.k.a. n-best list). +type SpeechRecognitionAlternative struct { + // Output only. Transcript text representing the words that the user spoke. + Transcript string `protobuf:"bytes,1,opt,name=transcript,proto3" json:"transcript,omitempty"` + // Output only. The confidence estimate between 0.0 and 1.0. A higher number + // indicates an estimated greater likelihood that the recognized words are + // correct. This field is typically provided only for the top hypothesis, and + // only for `is_final=true` results. Clients should not rely on the + // `confidence` field as it is not guaranteed to be accurate or consistent. + // The default of 0.0 is a sentinel value indicating `confidence` was not set. + Confidence float32 `protobuf:"fixed32,2,opt,name=confidence,proto3" json:"confidence,omitempty"` + // Output only. A list of word-specific information for each recognized word. + Words []*WordInfo `protobuf:"bytes,3,rep,name=words,proto3" json:"words,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *SpeechRecognitionAlternative) Reset() { *m = SpeechRecognitionAlternative{} } +func (m *SpeechRecognitionAlternative) String() string { return proto.CompactTextString(m) } +func (*SpeechRecognitionAlternative) ProtoMessage() {} +func (*SpeechRecognitionAlternative) Descriptor() ([]byte, []int) { + return fileDescriptor_video_intelligence_d0764d772f800fc8, []int{19} +} +func (m *SpeechRecognitionAlternative) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_SpeechRecognitionAlternative.Unmarshal(m, b) +} +func (m *SpeechRecognitionAlternative) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_SpeechRecognitionAlternative.Marshal(b, m, deterministic) +} +func (dst *SpeechRecognitionAlternative) XXX_Merge(src proto.Message) { + xxx_messageInfo_SpeechRecognitionAlternative.Merge(dst, src) +} +func (m *SpeechRecognitionAlternative) XXX_Size() int { + return xxx_messageInfo_SpeechRecognitionAlternative.Size(m) +} +func (m *SpeechRecognitionAlternative) XXX_DiscardUnknown() { + xxx_messageInfo_SpeechRecognitionAlternative.DiscardUnknown(m) +} + +var xxx_messageInfo_SpeechRecognitionAlternative proto.InternalMessageInfo + +func (m *SpeechRecognitionAlternative) GetTranscript() string { + if m != nil { + return m.Transcript + } + return "" +} + +func (m *SpeechRecognitionAlternative) GetConfidence() float32 { + if m != nil { + return m.Confidence + } + return 0 +} + +func (m *SpeechRecognitionAlternative) GetWords() []*WordInfo { + if m != nil { + return m.Words + } + return nil +} + +// Word-specific information for recognized words. Word information is only +// included in the response when certain request parameters are set, such +// as `enable_word_time_offsets`. +type WordInfo struct { + // Output only. Time offset relative to the beginning of the audio, and + // corresponding to the start of the spoken word. This field is only set if + // `enable_word_time_offsets=true` and only in the top hypothesis. This is an + // experimental feature and the accuracy of the time offset can vary. + StartTime *duration.Duration `protobuf:"bytes,1,opt,name=start_time,json=startTime,proto3" json:"start_time,omitempty"` + // Output only. Time offset relative to the beginning of the audio, and + // corresponding to the end of the spoken word. This field is only set if + // `enable_word_time_offsets=true` and only in the top hypothesis. This is an + // experimental feature and the accuracy of the time offset can vary. + EndTime *duration.Duration `protobuf:"bytes,2,opt,name=end_time,json=endTime,proto3" json:"end_time,omitempty"` + // Output only. The word corresponding to this set of information. + Word string `protobuf:"bytes,3,opt,name=word,proto3" json:"word,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *WordInfo) Reset() { *m = WordInfo{} } +func (m *WordInfo) String() string { return proto.CompactTextString(m) } +func (*WordInfo) ProtoMessage() {} +func (*WordInfo) Descriptor() ([]byte, []int) { + return fileDescriptor_video_intelligence_d0764d772f800fc8, []int{20} +} +func (m *WordInfo) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_WordInfo.Unmarshal(m, b) +} +func (m *WordInfo) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_WordInfo.Marshal(b, m, deterministic) +} +func (dst *WordInfo) XXX_Merge(src proto.Message) { + xxx_messageInfo_WordInfo.Merge(dst, src) +} +func (m *WordInfo) XXX_Size() int { + return xxx_messageInfo_WordInfo.Size(m) +} +func (m *WordInfo) XXX_DiscardUnknown() { + xxx_messageInfo_WordInfo.DiscardUnknown(m) +} + +var xxx_messageInfo_WordInfo proto.InternalMessageInfo + +func (m *WordInfo) GetStartTime() *duration.Duration { + if m != nil { + return m.StartTime + } + return nil +} + +func (m *WordInfo) GetEndTime() *duration.Duration { + if m != nil { + return m.EndTime + } + return nil +} + +func (m *WordInfo) GetWord() string { + if m != nil { + return m.Word + } + return "" +} + +func init() { + proto.RegisterType((*AnnotateVideoRequest)(nil), "google.cloud.videointelligence.v1p1beta1.AnnotateVideoRequest") + proto.RegisterType((*VideoContext)(nil), "google.cloud.videointelligence.v1p1beta1.VideoContext") + proto.RegisterType((*LabelDetectionConfig)(nil), "google.cloud.videointelligence.v1p1beta1.LabelDetectionConfig") + proto.RegisterType((*ShotChangeDetectionConfig)(nil), "google.cloud.videointelligence.v1p1beta1.ShotChangeDetectionConfig") + proto.RegisterType((*ExplicitContentDetectionConfig)(nil), "google.cloud.videointelligence.v1p1beta1.ExplicitContentDetectionConfig") + proto.RegisterType((*VideoSegment)(nil), "google.cloud.videointelligence.v1p1beta1.VideoSegment") + proto.RegisterType((*LabelSegment)(nil), "google.cloud.videointelligence.v1p1beta1.LabelSegment") + proto.RegisterType((*LabelFrame)(nil), "google.cloud.videointelligence.v1p1beta1.LabelFrame") + proto.RegisterType((*Entity)(nil), "google.cloud.videointelligence.v1p1beta1.Entity") + proto.RegisterType((*LabelAnnotation)(nil), "google.cloud.videointelligence.v1p1beta1.LabelAnnotation") + proto.RegisterType((*ExplicitContentFrame)(nil), "google.cloud.videointelligence.v1p1beta1.ExplicitContentFrame") + proto.RegisterType((*ExplicitContentAnnotation)(nil), "google.cloud.videointelligence.v1p1beta1.ExplicitContentAnnotation") + proto.RegisterType((*VideoAnnotationResults)(nil), "google.cloud.videointelligence.v1p1beta1.VideoAnnotationResults") + proto.RegisterType((*AnnotateVideoResponse)(nil), "google.cloud.videointelligence.v1p1beta1.AnnotateVideoResponse") + proto.RegisterType((*VideoAnnotationProgress)(nil), "google.cloud.videointelligence.v1p1beta1.VideoAnnotationProgress") + proto.RegisterType((*AnnotateVideoProgress)(nil), "google.cloud.videointelligence.v1p1beta1.AnnotateVideoProgress") + proto.RegisterType((*SpeechTranscriptionConfig)(nil), "google.cloud.videointelligence.v1p1beta1.SpeechTranscriptionConfig") + proto.RegisterType((*SpeechContext)(nil), "google.cloud.videointelligence.v1p1beta1.SpeechContext") + proto.RegisterType((*SpeechTranscription)(nil), "google.cloud.videointelligence.v1p1beta1.SpeechTranscription") + proto.RegisterType((*SpeechRecognitionAlternative)(nil), "google.cloud.videointelligence.v1p1beta1.SpeechRecognitionAlternative") + proto.RegisterType((*WordInfo)(nil), "google.cloud.videointelligence.v1p1beta1.WordInfo") + proto.RegisterEnum("google.cloud.videointelligence.v1p1beta1.Feature", Feature_name, Feature_value) + proto.RegisterEnum("google.cloud.videointelligence.v1p1beta1.LabelDetectionMode", LabelDetectionMode_name, LabelDetectionMode_value) + proto.RegisterEnum("google.cloud.videointelligence.v1p1beta1.Likelihood", Likelihood_name, Likelihood_value) +} + +// Reference imports to suppress errors if they are not otherwise used. +var _ context.Context +var _ grpc.ClientConn + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the grpc package it is being compiled against. +const _ = grpc.SupportPackageIsVersion4 + +// VideoIntelligenceServiceClient is the client API for VideoIntelligenceService service. +// +// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://godoc.org/google.golang.org/grpc#ClientConn.NewStream. +type VideoIntelligenceServiceClient interface { + // Performs asynchronous video annotation. Progress and results can be + // retrieved through the `google.longrunning.Operations` interface. + // `Operation.metadata` contains `AnnotateVideoProgress` (progress). + // `Operation.response` contains `AnnotateVideoResponse` (results). + AnnotateVideo(ctx context.Context, in *AnnotateVideoRequest, opts ...grpc.CallOption) (*longrunning.Operation, error) +} + +type videoIntelligenceServiceClient struct { + cc *grpc.ClientConn +} + +func NewVideoIntelligenceServiceClient(cc *grpc.ClientConn) VideoIntelligenceServiceClient { + return &videoIntelligenceServiceClient{cc} +} + +func (c *videoIntelligenceServiceClient) AnnotateVideo(ctx context.Context, in *AnnotateVideoRequest, opts ...grpc.CallOption) (*longrunning.Operation, error) { + out := new(longrunning.Operation) + err := c.cc.Invoke(ctx, "/google.cloud.videointelligence.v1p1beta1.VideoIntelligenceService/AnnotateVideo", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +// VideoIntelligenceServiceServer is the server API for VideoIntelligenceService service. +type VideoIntelligenceServiceServer interface { + // Performs asynchronous video annotation. Progress and results can be + // retrieved through the `google.longrunning.Operations` interface. + // `Operation.metadata` contains `AnnotateVideoProgress` (progress). + // `Operation.response` contains `AnnotateVideoResponse` (results). + AnnotateVideo(context.Context, *AnnotateVideoRequest) (*longrunning.Operation, error) +} + +func RegisterVideoIntelligenceServiceServer(s *grpc.Server, srv VideoIntelligenceServiceServer) { + s.RegisterService(&_VideoIntelligenceService_serviceDesc, srv) +} + +func _VideoIntelligenceService_AnnotateVideo_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(AnnotateVideoRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(VideoIntelligenceServiceServer).AnnotateVideo(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.videointelligence.v1p1beta1.VideoIntelligenceService/AnnotateVideo", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(VideoIntelligenceServiceServer).AnnotateVideo(ctx, req.(*AnnotateVideoRequest)) + } + return interceptor(ctx, in, info, handler) +} + +var _VideoIntelligenceService_serviceDesc = grpc.ServiceDesc{ + ServiceName: "google.cloud.videointelligence.v1p1beta1.VideoIntelligenceService", + HandlerType: (*VideoIntelligenceServiceServer)(nil), + Methods: []grpc.MethodDesc{ + { + MethodName: "AnnotateVideo", + Handler: _VideoIntelligenceService_AnnotateVideo_Handler, + }, + }, + Streams: []grpc.StreamDesc{}, + Metadata: "google/cloud/videointelligence/v1p1beta1/video_intelligence.proto", +} + +func init() { + proto.RegisterFile("google/cloud/videointelligence/v1p1beta1/video_intelligence.proto", fileDescriptor_video_intelligence_d0764d772f800fc8) +} + +var fileDescriptor_video_intelligence_d0764d772f800fc8 = []byte{ + // 1807 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xa4, 0x58, 0xcd, 0x6f, 0x23, 0x49, + 0x15, 0xa7, 0xed, 0xc4, 0x89, 0x9f, 0x9d, 0xd8, 0x53, 0x71, 0x12, 0x27, 0x3b, 0x93, 0xc9, 0xf6, + 0x82, 0x94, 0x1d, 0x90, 0x4d, 0xc2, 0x6a, 0x61, 0x67, 0x61, 0xb5, 0x8e, 0xd3, 0xd9, 0x58, 0x64, + 0x62, 0xab, 0xec, 0x09, 0x0c, 0x0c, 0x6a, 0x3a, 0xdd, 0xe5, 0x4e, 0x33, 0xed, 0xae, 0xde, 0xee, + 0xea, 0x30, 0x73, 0x5b, 0x3e, 0x24, 0x90, 0x40, 0x5c, 0x46, 0x48, 0xfc, 0x09, 0x88, 0x23, 0xff, + 0x00, 0x17, 0x2e, 0x70, 0xe5, 0x84, 0x04, 0x37, 0xfe, 0x0b, 0x2e, 0xab, 0xae, 0xaa, 0xb6, 0xdb, + 0x76, 0xbe, 0x9c, 0xdc, 0xdc, 0xef, 0xe3, 0xf7, 0x3e, 0xea, 0xd5, 0x7b, 0xaf, 0x0c, 0x0d, 0x9b, + 0x52, 0xdb, 0x25, 0x75, 0xd3, 0xa5, 0x91, 0x55, 0xbf, 0x70, 0x2c, 0x42, 0x1d, 0x8f, 0x11, 0xd7, + 0x75, 0x6c, 0xe2, 0x99, 0xa4, 0x7e, 0xb1, 0xeb, 0xef, 0x9e, 0x11, 0x66, 0xec, 0x0a, 0x9e, 0x9e, + 0x66, 0xd6, 0xfc, 0x80, 0x32, 0x8a, 0x76, 0x04, 0x44, 0x8d, 0x43, 0xd4, 0xa6, 0x20, 0x6a, 0x43, + 0x88, 0xcd, 0x87, 0xd2, 0x98, 0xe1, 0x3b, 0x75, 0xc3, 0xf3, 0x28, 0x33, 0x98, 0x43, 0xbd, 0x50, + 0xe0, 0x6c, 0xbe, 0x27, 0xb9, 0x2e, 0xf5, 0xec, 0x20, 0xf2, 0x3c, 0xc7, 0xb3, 0xeb, 0xd4, 0x27, + 0xc1, 0x98, 0xd0, 0x96, 0x14, 0xe2, 0x5f, 0x67, 0x51, 0xbf, 0x6e, 0x45, 0x42, 0x40, 0xf2, 0x1f, + 0x4f, 0xf2, 0x99, 0x33, 0x20, 0x21, 0x33, 0x06, 0xbe, 0x14, 0x58, 0x97, 0x02, 0x81, 0x6f, 0xd6, + 0x43, 0x66, 0xb0, 0x48, 0x22, 0xab, 0x7f, 0xcf, 0x40, 0xa5, 0x21, 0x9c, 0x22, 0xa7, 0x71, 0x10, + 0x98, 0x7c, 0x1e, 0x91, 0x90, 0xa1, 0x77, 0x20, 0xef, 0x78, 0x7e, 0xc4, 0xf4, 0x28, 0x70, 0xaa, + 0xca, 0xb6, 0xb2, 0x93, 0xc7, 0x8b, 0x9c, 0xf0, 0x3c, 0x70, 0xd0, 0x7b, 0xb0, 0x24, 0x98, 0x26, + 0xf5, 0x18, 0xf1, 0x58, 0x35, 0xb7, 0xad, 0xec, 0x14, 0x71, 0x91, 0x13, 0x9b, 0x82, 0x86, 0x9e, + 0xc1, 0x62, 0x9f, 0x18, 0x2c, 0x0a, 0x48, 0x58, 0xcd, 0x6c, 0x67, 0x77, 0x96, 0xf7, 0x76, 0x6b, + 0xb7, 0x4d, 0x5a, 0xed, 0x50, 0x68, 0xe2, 0x21, 0x04, 0xfa, 0x31, 0x2c, 0x89, 0xc3, 0xe0, 0x36, + 0x5f, 0xb3, 0x6a, 0x76, 0x5b, 0xd9, 0x29, 0xec, 0x7d, 0x78, 0x7b, 0x4c, 0x1e, 0x5f, 0x53, 0x68, + 0xe3, 0xe2, 0x45, 0xea, 0x0b, 0x3d, 0x02, 0xa0, 0x11, 0x4b, 0xc2, 0x9d, 0xe3, 0xe1, 0xe6, 0x05, + 0x25, 0x8e, 0xf7, 0x31, 0x14, 0x5c, 0x6a, 0xf2, 0x8c, 0xeb, 0x8e, 0x55, 0x9d, 0xe7, 0x7c, 0x48, + 0x48, 0x2d, 0x4b, 0xfd, 0xff, 0x1c, 0x14, 0xd3, 0xf0, 0x08, 0xc3, 0x62, 0x48, 0xec, 0x01, 0xf1, + 0x58, 0x58, 0x55, 0xb6, 0xb3, 0x77, 0x70, 0xb4, 0x2b, 0xd4, 0xf1, 0x10, 0x07, 0x31, 0x58, 0x73, + 0x8d, 0x33, 0xe2, 0xea, 0x16, 0x61, 0xc4, 0xe4, 0xce, 0x98, 0xd4, 0xeb, 0x3b, 0x76, 0x35, 0xc3, + 0x53, 0xf1, 0xc9, 0xed, 0x2d, 0x1c, 0xc7, 0x38, 0x07, 0x09, 0x4c, 0x93, 0xa3, 0xe0, 0x8a, 0x7b, + 0x09, 0x15, 0xfd, 0x5a, 0x81, 0x87, 0xe1, 0x39, 0x65, 0xba, 0x79, 0x6e, 0x78, 0x36, 0x99, 0x36, + 0x2e, 0xce, 0xa1, 0x79, 0x7b, 0xe3, 0xdd, 0x73, 0xca, 0x9a, 0x1c, 0x6c, 0xd2, 0x83, 0x8d, 0xf0, + 0x2a, 0x16, 0x7a, 0xab, 0xc0, 0xbb, 0xe4, 0xb5, 0xef, 0x3a, 0xa6, 0x33, 0x2c, 0xbb, 0x69, 0x5f, + 0xe6, 0xb8, 0x2f, 0x47, 0xb7, 0xf7, 0x45, 0x93, 0x90, 0xb2, 0x68, 0x27, 0x1d, 0xda, 0x22, 0xd7, + 0xf2, 0xd1, 0xaf, 0x14, 0x78, 0x27, 0xf4, 0x09, 0x31, 0xcf, 0x75, 0x16, 0x18, 0x5e, 0x68, 0x06, + 0x8e, 0x9f, 0xf6, 0x27, 0x37, 0x73, 0x6e, 0x38, 0x58, 0x2f, 0x8d, 0x35, 0xcc, 0xcd, 0x55, 0x2c, + 0xf5, 0x1f, 0x0a, 0x54, 0x2e, 0x3b, 0x51, 0xe4, 0x41, 0x65, 0xb2, 0x62, 0x06, 0xd4, 0x22, 0xfc, + 0x3e, 0x2f, 0xef, 0x7d, 0xf7, 0xae, 0xf5, 0xf2, 0x8c, 0x5a, 0x04, 0x23, 0x77, 0x8a, 0x86, 0xbe, + 0x0e, 0x0f, 0x42, 0xd1, 0xde, 0x8c, 0xe0, 0x8d, 0x6e, 0x1a, 0x03, 0x12, 0x18, 0xbc, 0x38, 0x17, + 0x71, 0x79, 0xc4, 0x68, 0x72, 0x3a, 0xaa, 0xc0, 0x7c, 0xec, 0x8c, 0xcb, 0x0b, 0x28, 0x8f, 0xc5, + 0x87, 0xba, 0x0b, 0x1b, 0x57, 0xd6, 0xc7, 0x48, 0x45, 0x49, 0xab, 0x7c, 0x08, 0x5b, 0xd7, 0x1f, + 0xe3, 0x15, 0x7a, 0x7f, 0x52, 0xe4, 0xa5, 0x95, 0x57, 0x0d, 0x69, 0xdc, 0xfd, 0x80, 0xe9, 0x71, + 0xfb, 0xd4, 0x69, 0xbf, 0x1f, 0x12, 0xc6, 0x55, 0x0a, 0x7b, 0x1b, 0x49, 0xae, 0x92, 0x16, 0x5b, + 0x3b, 0x90, 0x2d, 0x18, 0x97, 0xb8, 0x4e, 0xcf, 0x19, 0x90, 0x36, 0xd7, 0x40, 0x0d, 0x28, 0x11, + 0xcf, 0x1a, 0x03, 0xc9, 0xdc, 0x04, 0xb2, 0x44, 0x3c, 0x6b, 0x04, 0xa1, 0x7e, 0xa1, 0x40, 0x91, + 0xe7, 0x3c, 0x71, 0xad, 0x03, 0x0b, 0xb2, 0x0f, 0x48, 0x87, 0xee, 0xda, 0x4e, 0x12, 0x18, 0xb4, + 0x05, 0xc0, 0x8b, 0xd4, 0x8a, 0xa5, 0xb9, 0x83, 0x19, 0x9c, 0xa2, 0xa8, 0xe7, 0x00, 0xdc, 0x83, + 0xc3, 0xc0, 0x18, 0x10, 0xf4, 0x14, 0x0a, 0x33, 0x25, 0x05, 0xd8, 0x28, 0x1f, 0x37, 0x59, 0x72, + 0x21, 0xa7, 0x79, 0xcc, 0x61, 0x6f, 0xe2, 0xa1, 0x43, 0xf8, 0xaf, 0xb8, 0xcb, 0xca, 0xa1, 0x23, + 0x08, 0x2d, 0x0b, 0x6d, 0x43, 0xc1, 0x22, 0xc3, 0xd2, 0xe7, 0x38, 0x79, 0x9c, 0x26, 0xc5, 0x63, + 0xc9, 0x35, 0x3c, 0x3b, 0x32, 0x6c, 0xa2, 0x9b, 0x71, 0x9d, 0x8b, 0xca, 0x2a, 0x26, 0xc4, 0x26, + 0xb5, 0x88, 0xfa, 0x9f, 0x0c, 0x94, 0x78, 0x60, 0x8d, 0xe1, 0x2c, 0x46, 0x47, 0x90, 0x13, 0x66, + 0x64, 0x60, 0xdf, 0x9c, 0xa1, 0x81, 0x70, 0x3d, 0x2c, 0xf5, 0xd1, 0x4f, 0xe0, 0x81, 0x69, 0x30, + 0x62, 0xd3, 0xe0, 0x8d, 0xce, 0x49, 0x8e, 0x9c, 0x7e, 0x77, 0x01, 0x2d, 0x27, 0x50, 0x9a, 0x44, + 0x1a, 0x1b, 0x2b, 0xd9, 0x59, 0xc7, 0x4a, 0xba, 0xa0, 0x52, 0x63, 0xe5, 0x18, 0x72, 0xfd, 0xf8, + 0x8c, 0xc3, 0xea, 0x1c, 0x47, 0xfc, 0x60, 0x46, 0x44, 0x5e, 0x20, 0x58, 0x62, 0xa8, 0x7f, 0x53, + 0xa0, 0x32, 0x71, 0x1b, 0xef, 0x5f, 0x41, 0xaf, 0x60, 0xcd, 0xa7, 0x81, 0x47, 0xed, 0xc0, 0xf0, + 0xcf, 0xdf, 0xe8, 0xae, 0xf3, 0x8a, 0xb8, 0xce, 0x39, 0xa5, 0x16, 0xaf, 0x82, 0xe5, 0x99, 0x5c, + 0x1e, 0xea, 0xe2, 0xd5, 0x14, 0xe6, 0x88, 0xac, 0x86, 0xb0, 0x31, 0x11, 0x40, 0xaa, 0x52, 0x4e, + 0x87, 0xc9, 0x12, 0x53, 0xfd, 0x93, 0x3b, 0x8f, 0x9a, 0xf1, 0xb4, 0xfd, 0x26, 0x07, 0x6b, 0xfc, + 0x9e, 0x8e, 0x6c, 0x61, 0x12, 0x46, 0x2e, 0x0b, 0xaf, 0xdf, 0xc4, 0x22, 0xd8, 0x90, 0x07, 0xa9, + 0x8b, 0x4e, 0x9f, 0xda, 0x30, 0x65, 0xdd, 0x7d, 0x34, 0xe3, 0x79, 0xa6, 0x3c, 0x58, 0x97, 0xd8, + 0x13, 0xf4, 0x10, 0x51, 0x58, 0xe3, 0x3b, 0xc1, 0xb4, 0xcd, 0xec, 0x7d, 0x6d, 0x56, 0x62, 0xe0, + 0x29, 0x83, 0x9f, 0xc3, 0x3a, 0xcf, 0xd4, 0x25, 0x16, 0xe7, 0xee, 0x6b, 0x71, 0x95, 0x23, 0x4f, + 0x99, 0x34, 0xa0, 0xcc, 0x63, 0x4c, 0xdb, 0xca, 0xdd, 0x6b, 0x95, 0x2b, 0xc5, 0x78, 0x69, 0x13, + 0x0c, 0x56, 0x86, 0x3b, 0xcd, 0xc8, 0x4c, 0x75, 0x61, 0xd6, 0xad, 0xe1, 0xca, 0x7a, 0xc5, 0x28, + 0xc1, 0x4f, 0xd5, 0x70, 0x00, 0xab, 0x97, 0xed, 0x2c, 0x61, 0xb5, 0xc0, 0xa3, 0xfb, 0xde, 0xbd, + 0xb6, 0x15, 0x5c, 0xb9, 0x64, 0x4f, 0x09, 0xd1, 0x0e, 0xcc, 0x93, 0x20, 0xa0, 0x41, 0x35, 0xcf, + 0x63, 0x43, 0x89, 0x8d, 0xc0, 0x37, 0x6b, 0x5d, 0xfe, 0x20, 0xc1, 0x42, 0x40, 0xfd, 0xad, 0x02, + 0xab, 0x13, 0x2f, 0x92, 0xd0, 0xa7, 0x5e, 0x48, 0x10, 0x05, 0x34, 0x4a, 0x92, 0x1e, 0x88, 0xeb, + 0x21, 0xef, 0xe1, 0xa7, 0x33, 0x1e, 0xc9, 0xd4, 0x35, 0xc3, 0x0f, 0x8c, 0x49, 0x92, 0xfa, 0x6f, + 0x05, 0xd6, 0x27, 0xa4, 0x3b, 0x01, 0xb5, 0x03, 0x12, 0xde, 0x70, 0x2b, 0xdf, 0x87, 0xb2, 0x2f, + 0x05, 0x75, 0x9f, 0x04, 0x66, 0x3c, 0xb6, 0xe3, 0x4e, 0x35, 0x8f, 0x4b, 0x09, 0xbd, 0x23, 0xc8, + 0xe8, 0x23, 0x80, 0xd1, 0xce, 0x21, 0x77, 0xe9, 0xcd, 0xa9, 0xae, 0xd8, 0x4b, 0xde, 0x73, 0x38, + 0x3f, 0xdc, 0x36, 0xd0, 0xc7, 0x50, 0x88, 0x7c, 0xcb, 0x60, 0x44, 0xe8, 0xce, 0xdd, 0xa8, 0x0b, + 0x42, 0x3c, 0x26, 0xa8, 0xbf, 0x9b, 0x4c, 0xf3, 0x30, 0xb2, 0x00, 0x56, 0x52, 0x69, 0x4e, 0xfc, + 0x95, 0x79, 0x6e, 0xdc, 0x39, 0xcf, 0x09, 0x3e, 0x4e, 0x1d, 0x62, 0x42, 0x53, 0xff, 0x9b, 0x81, + 0x8d, 0x2b, 0x57, 0xdf, 0xe9, 0xb9, 0xae, 0x4c, 0xcf, 0xf5, 0x38, 0xe7, 0x03, 0xe3, 0xb5, 0x6e, + 0xb8, 0x8c, 0x04, 0x9e, 0xc1, 0x9c, 0x0b, 0x3e, 0x78, 0x79, 0xce, 0x07, 0xc6, 0xeb, 0x46, 0x8a, + 0x1c, 0x8b, 0xf6, 0x9d, 0x98, 0x10, 0x47, 0xd7, 0x37, 0xbc, 0x78, 0xf0, 0x67, 0xf9, 0x96, 0x5a, + 0x12, 0xf4, 0x4e, 0x42, 0x46, 0x3f, 0x85, 0x92, 0xbc, 0x2b, 0xf2, 0xd9, 0x99, 0xf4, 0x9b, 0x6f, + 0xcf, 0x7a, 0x4b, 0x92, 0x87, 0xe7, 0x72, 0x98, 0xfe, 0x0c, 0xd1, 0xa7, 0xf0, 0x90, 0x78, 0xc6, + 0x99, 0x4b, 0x74, 0x23, 0x62, 0x74, 0x60, 0x30, 0xc7, 0xd4, 0xfd, 0xc8, 0x33, 0x59, 0x24, 0x9a, + 0xc1, 0x3c, 0x77, 0x6c, 0x53, 0xc8, 0x34, 0x12, 0x91, 0xce, 0x48, 0x02, 0xbd, 0x0b, 0x45, 0x23, + 0xb2, 0x1c, 0x1a, 0x5f, 0x67, 0xf3, 0x95, 0x68, 0x52, 0xf3, 0xb8, 0xc0, 0x69, 0x3d, 0x4e, 0x52, + 0xdf, 0x87, 0xa5, 0x31, 0x2f, 0x50, 0x15, 0x16, 0xfc, 0xf3, 0xc0, 0x08, 0xe5, 0x20, 0xcb, 0xe3, + 0xe4, 0x53, 0xfd, 0x85, 0x02, 0x2b, 0x97, 0x1c, 0x05, 0xfa, 0x19, 0x14, 0xc7, 0x72, 0x2b, 0xea, + 0xe1, 0x70, 0xd6, 0x34, 0x60, 0x62, 0x52, 0xdb, 0x73, 0x62, 0xc8, 0xd4, 0x99, 0xe0, 0x31, 0x6c, + 0xf5, 0xcf, 0x0a, 0x3c, 0xbc, 0x4e, 0x3c, 0x5e, 0x29, 0x47, 0xbd, 0x4b, 0x96, 0x43, 0x8a, 0x72, + 0xd3, 0xca, 0x89, 0x8e, 0x60, 0xfe, 0xe7, 0x34, 0xb0, 0x92, 0x71, 0xb5, 0x77, 0xfb, 0x28, 0x7e, + 0x40, 0x03, 0xab, 0xe5, 0xf5, 0x29, 0x16, 0x00, 0xea, 0x1f, 0x14, 0x58, 0x4c, 0x68, 0xe8, 0x3b, + 0x63, 0x97, 0xf9, 0xc6, 0x15, 0x27, 0x75, 0x97, 0x3f, 0x80, 0xc5, 0xe4, 0xcd, 0x70, 0xf3, 0x63, + 0x61, 0x41, 0x3e, 0x16, 0x10, 0x82, 0xb9, 0xd8, 0x0b, 0xb9, 0xe7, 0xf2, 0xdf, 0x4f, 0x7e, 0xaf, + 0xc0, 0x82, 0xfc, 0xf7, 0x04, 0xad, 0xc3, 0xca, 0xa1, 0xd6, 0xe8, 0x3d, 0xc7, 0x9a, 0xfe, 0xfc, + 0xa4, 0xdb, 0xd1, 0x9a, 0xad, 0xc3, 0x96, 0x76, 0x50, 0xfe, 0x0a, 0x5a, 0x81, 0xd2, 0x71, 0x63, + 0x5f, 0x3b, 0xd6, 0x0f, 0xb4, 0x9e, 0xd6, 0xec, 0xb5, 0xda, 0x27, 0x65, 0x05, 0x6d, 0xc0, 0x6a, + 0xf7, 0xa8, 0xdd, 0xd3, 0x9b, 0x47, 0x8d, 0x93, 0xcf, 0xb4, 0x14, 0x2b, 0x83, 0xb6, 0x60, 0x53, + 0xfb, 0x61, 0xe7, 0xb8, 0xd5, 0x6c, 0xf5, 0xf4, 0x66, 0xfb, 0xa4, 0xa7, 0x9d, 0xf4, 0x52, 0xfc, + 0x2c, 0xaa, 0x42, 0xa5, 0xdb, 0xd1, 0xb4, 0xe6, 0x91, 0xde, 0xc3, 0x8d, 0x93, 0x6e, 0x13, 0xb7, + 0x3a, 0x9c, 0x93, 0x7b, 0x12, 0x00, 0x9a, 0x7e, 0x3c, 0xa2, 0xaf, 0xc2, 0xf6, 0x84, 0x7d, 0xfd, + 0x59, 0xfb, 0x60, 0xd2, 0xcb, 0x25, 0xc8, 0x73, 0x87, 0x62, 0x56, 0x59, 0x41, 0xcb, 0x00, 0x87, + 0xb8, 0xf1, 0x4c, 0x13, 0xdf, 0x99, 0x38, 0x3a, 0xce, 0x6e, 0x9c, 0x1c, 0xe8, 0x29, 0x46, 0xf6, + 0x09, 0x03, 0x18, 0xed, 0x73, 0x68, 0x13, 0xd6, 0x8e, 0x5b, 0xdf, 0xd7, 0x8e, 0x5b, 0x47, 0xed, + 0xf6, 0xc1, 0x84, 0x85, 0x07, 0xb0, 0x74, 0xaa, 0xe1, 0x17, 0xfa, 0xf3, 0x13, 0x2e, 0xf2, 0xa2, + 0xac, 0xa0, 0x22, 0x2c, 0x0e, 0xbf, 0x32, 0xf1, 0x57, 0xa7, 0xdd, 0xed, 0xb6, 0xf6, 0x8f, 0xb5, + 0x72, 0x16, 0x01, 0xe4, 0x24, 0x67, 0x0e, 0x95, 0xa0, 0xc0, 0x55, 0x25, 0x61, 0x7e, 0xef, 0xaf, + 0x0a, 0x54, 0x79, 0xcf, 0x6b, 0xa5, 0x2a, 0xa7, 0x4b, 0x82, 0x0b, 0xc7, 0x24, 0xe8, 0x8f, 0x0a, + 0x2c, 0x8d, 0xb5, 0x5b, 0x34, 0xc3, 0xe6, 0x78, 0xd9, 0x1f, 0x74, 0x9b, 0x8f, 0x12, 0xfd, 0xd4, + 0x3f, 0x87, 0xb5, 0x76, 0xf2, 0xcf, 0xa1, 0xfa, 0xb5, 0x5f, 0xfe, 0xeb, 0x7f, 0x6f, 0x33, 0x8f, + 0xd5, 0xcd, 0xc9, 0x3f, 0x33, 0xc3, 0xa7, 0xb2, 0xff, 0x92, 0xa7, 0xca, 0x93, 0xfd, 0x2f, 0x32, + 0xf0, 0x0d, 0x93, 0x0e, 0x6e, 0xed, 0xcb, 0xfe, 0xa3, 0xab, 0x42, 0xec, 0xc4, 0x65, 0xda, 0x51, + 0x7e, 0xf4, 0x42, 0x42, 0xd9, 0x34, 0x6e, 0xcf, 0x35, 0x1a, 0xd8, 0x75, 0x9b, 0x78, 0xbc, 0x88, + 0xeb, 0x82, 0x65, 0xf8, 0x4e, 0x78, 0xf3, 0x5f, 0xaf, 0x1f, 0x4f, 0xf1, 0xfe, 0x92, 0xd9, 0xf9, + 0x4c, 0x60, 0x37, 0xb9, 0x9b, 0x53, 0x9e, 0xd4, 0x4e, 0x77, 0x3b, 0xbb, 0xfb, 0xb1, 0xf2, 0x3f, + 0x13, 0xd1, 0x97, 0x5c, 0xf4, 0xe5, 0x94, 0xe8, 0xcb, 0xd3, 0xc4, 0xce, 0x59, 0x8e, 0xfb, 0xf6, + 0xad, 0x2f, 0x03, 0x00, 0x00, 0xff, 0xff, 0x82, 0x99, 0xe4, 0x9b, 0x15, 0x16, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/cloud/videointelligence/v1p2beta1/video_intelligence.pb.go b/vendor/google.golang.org/genproto/googleapis/cloud/videointelligence/v1p2beta1/video_intelligence.pb.go new file mode 100644 index 0000000..baaf489 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/cloud/videointelligence/v1p2beta1/video_intelligence.pb.go @@ -0,0 +1,1855 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/cloud/videointelligence/v1p2beta1/video_intelligence.proto + +package videointelligence // import "google.golang.org/genproto/googleapis/cloud/videointelligence/v1p2beta1" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import duration "github.com/golang/protobuf/ptypes/duration" +import timestamp "github.com/golang/protobuf/ptypes/timestamp" +import _ "google.golang.org/genproto/googleapis/api/annotations" +import longrunning "google.golang.org/genproto/googleapis/longrunning" +import status "google.golang.org/genproto/googleapis/rpc/status" + +import ( + context "golang.org/x/net/context" + grpc "google.golang.org/grpc" +) + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Video annotation feature. +type Feature int32 + +const ( + // Unspecified. + Feature_FEATURE_UNSPECIFIED Feature = 0 + // Label detection. Detect objects, such as dog or flower. + Feature_LABEL_DETECTION Feature = 1 + // Shot change detection. + Feature_SHOT_CHANGE_DETECTION Feature = 2 + // Explicit content detection. + Feature_EXPLICIT_CONTENT_DETECTION Feature = 3 + // OCR text detection and tracking. + Feature_TEXT_DETECTION Feature = 7 + // Object detection and tracking. + Feature_OBJECT_TRACKING Feature = 9 +) + +var Feature_name = map[int32]string{ + 0: "FEATURE_UNSPECIFIED", + 1: "LABEL_DETECTION", + 2: "SHOT_CHANGE_DETECTION", + 3: "EXPLICIT_CONTENT_DETECTION", + 7: "TEXT_DETECTION", + 9: "OBJECT_TRACKING", +} +var Feature_value = map[string]int32{ + "FEATURE_UNSPECIFIED": 0, + "LABEL_DETECTION": 1, + "SHOT_CHANGE_DETECTION": 2, + "EXPLICIT_CONTENT_DETECTION": 3, + "TEXT_DETECTION": 7, + "OBJECT_TRACKING": 9, +} + +func (x Feature) String() string { + return proto.EnumName(Feature_name, int32(x)) +} +func (Feature) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_video_intelligence_2747a56faf436e7b, []int{0} +} + +// Label detection mode. +type LabelDetectionMode int32 + +const ( + // Unspecified. + LabelDetectionMode_LABEL_DETECTION_MODE_UNSPECIFIED LabelDetectionMode = 0 + // Detect shot-level labels. + LabelDetectionMode_SHOT_MODE LabelDetectionMode = 1 + // Detect frame-level labels. + LabelDetectionMode_FRAME_MODE LabelDetectionMode = 2 + // Detect both shot-level and frame-level labels. + LabelDetectionMode_SHOT_AND_FRAME_MODE LabelDetectionMode = 3 +) + +var LabelDetectionMode_name = map[int32]string{ + 0: "LABEL_DETECTION_MODE_UNSPECIFIED", + 1: "SHOT_MODE", + 2: "FRAME_MODE", + 3: "SHOT_AND_FRAME_MODE", +} +var LabelDetectionMode_value = map[string]int32{ + "LABEL_DETECTION_MODE_UNSPECIFIED": 0, + "SHOT_MODE": 1, + "FRAME_MODE": 2, + "SHOT_AND_FRAME_MODE": 3, +} + +func (x LabelDetectionMode) String() string { + return proto.EnumName(LabelDetectionMode_name, int32(x)) +} +func (LabelDetectionMode) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_video_intelligence_2747a56faf436e7b, []int{1} +} + +// Bucketized representation of likelihood. +type Likelihood int32 + +const ( + // Unspecified likelihood. + Likelihood_LIKELIHOOD_UNSPECIFIED Likelihood = 0 + // Very unlikely. + Likelihood_VERY_UNLIKELY Likelihood = 1 + // Unlikely. + Likelihood_UNLIKELY Likelihood = 2 + // Possible. + Likelihood_POSSIBLE Likelihood = 3 + // Likely. + Likelihood_LIKELY Likelihood = 4 + // Very likely. + Likelihood_VERY_LIKELY Likelihood = 5 +) + +var Likelihood_name = map[int32]string{ + 0: "LIKELIHOOD_UNSPECIFIED", + 1: "VERY_UNLIKELY", + 2: "UNLIKELY", + 3: "POSSIBLE", + 4: "LIKELY", + 5: "VERY_LIKELY", +} +var Likelihood_value = map[string]int32{ + "LIKELIHOOD_UNSPECIFIED": 0, + "VERY_UNLIKELY": 1, + "UNLIKELY": 2, + "POSSIBLE": 3, + "LIKELY": 4, + "VERY_LIKELY": 5, +} + +func (x Likelihood) String() string { + return proto.EnumName(Likelihood_name, int32(x)) +} +func (Likelihood) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_video_intelligence_2747a56faf436e7b, []int{2} +} + +// Video annotation request. +type AnnotateVideoRequest struct { + // Input video location. Currently, only + // [Google Cloud Storage](https://cloud.google.com/storage/) URIs are + // supported, which must be specified in the following format: + // `gs://bucket-id/object-id` (other URI formats return + // [google.rpc.Code.INVALID_ARGUMENT][google.rpc.Code.INVALID_ARGUMENT]). For + // more information, see [Request URIs](/storage/docs/reference-uris). A video + // URI may include wildcards in `object-id`, and thus identify multiple + // videos. Supported wildcards: '*' to match 0 or more characters; + // '?' to match 1 character. If unset, the input video should be embedded + // in the request as `input_content`. If set, `input_content` should be unset. + InputUri string `protobuf:"bytes,1,opt,name=input_uri,json=inputUri,proto3" json:"input_uri,omitempty"` + // The video data bytes. + // If unset, the input video(s) should be specified via `input_uri`. + // If set, `input_uri` should be unset. + InputContent []byte `protobuf:"bytes,6,opt,name=input_content,json=inputContent,proto3" json:"input_content,omitempty"` + // Requested video annotation features. + Features []Feature `protobuf:"varint,2,rep,packed,name=features,proto3,enum=google.cloud.videointelligence.v1p2beta1.Feature" json:"features,omitempty"` + // Additional video context and/or feature-specific parameters. + VideoContext *VideoContext `protobuf:"bytes,3,opt,name=video_context,json=videoContext,proto3" json:"video_context,omitempty"` + // Optional location where the output (in JSON format) should be stored. + // Currently, only [Google Cloud Storage](https://cloud.google.com/storage/) + // URIs are supported, which must be specified in the following format: + // `gs://bucket-id/object-id` (other URI formats return + // [google.rpc.Code.INVALID_ARGUMENT][google.rpc.Code.INVALID_ARGUMENT]). For + // more information, see [Request URIs](/storage/docs/reference-uris). + OutputUri string `protobuf:"bytes,4,opt,name=output_uri,json=outputUri,proto3" json:"output_uri,omitempty"` + // Optional cloud region where annotation should take place. Supported cloud + // regions: `us-east1`, `us-west1`, `europe-west1`, `asia-east1`. If no region + // is specified, a region will be determined based on video file location. + LocationId string `protobuf:"bytes,5,opt,name=location_id,json=locationId,proto3" json:"location_id,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *AnnotateVideoRequest) Reset() { *m = AnnotateVideoRequest{} } +func (m *AnnotateVideoRequest) String() string { return proto.CompactTextString(m) } +func (*AnnotateVideoRequest) ProtoMessage() {} +func (*AnnotateVideoRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_video_intelligence_2747a56faf436e7b, []int{0} +} +func (m *AnnotateVideoRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_AnnotateVideoRequest.Unmarshal(m, b) +} +func (m *AnnotateVideoRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_AnnotateVideoRequest.Marshal(b, m, deterministic) +} +func (dst *AnnotateVideoRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_AnnotateVideoRequest.Merge(dst, src) +} +func (m *AnnotateVideoRequest) XXX_Size() int { + return xxx_messageInfo_AnnotateVideoRequest.Size(m) +} +func (m *AnnotateVideoRequest) XXX_DiscardUnknown() { + xxx_messageInfo_AnnotateVideoRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_AnnotateVideoRequest proto.InternalMessageInfo + +func (m *AnnotateVideoRequest) GetInputUri() string { + if m != nil { + return m.InputUri + } + return "" +} + +func (m *AnnotateVideoRequest) GetInputContent() []byte { + if m != nil { + return m.InputContent + } + return nil +} + +func (m *AnnotateVideoRequest) GetFeatures() []Feature { + if m != nil { + return m.Features + } + return nil +} + +func (m *AnnotateVideoRequest) GetVideoContext() *VideoContext { + if m != nil { + return m.VideoContext + } + return nil +} + +func (m *AnnotateVideoRequest) GetOutputUri() string { + if m != nil { + return m.OutputUri + } + return "" +} + +func (m *AnnotateVideoRequest) GetLocationId() string { + if m != nil { + return m.LocationId + } + return "" +} + +// Video context and/or feature-specific parameters. +type VideoContext struct { + // Video segments to annotate. The segments may overlap and are not required + // to be contiguous or span the whole video. If unspecified, each video is + // treated as a single segment. + Segments []*VideoSegment `protobuf:"bytes,1,rep,name=segments,proto3" json:"segments,omitempty"` + // Config for LABEL_DETECTION. + LabelDetectionConfig *LabelDetectionConfig `protobuf:"bytes,2,opt,name=label_detection_config,json=labelDetectionConfig,proto3" json:"label_detection_config,omitempty"` + // Config for SHOT_CHANGE_DETECTION. + ShotChangeDetectionConfig *ShotChangeDetectionConfig `protobuf:"bytes,3,opt,name=shot_change_detection_config,json=shotChangeDetectionConfig,proto3" json:"shot_change_detection_config,omitempty"` + // Config for EXPLICIT_CONTENT_DETECTION. + ExplicitContentDetectionConfig *ExplicitContentDetectionConfig `protobuf:"bytes,4,opt,name=explicit_content_detection_config,json=explicitContentDetectionConfig,proto3" json:"explicit_content_detection_config,omitempty"` + // Config for TEXT_DETECTION. + TextDetectionConfig *TextDetectionConfig `protobuf:"bytes,8,opt,name=text_detection_config,json=textDetectionConfig,proto3" json:"text_detection_config,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *VideoContext) Reset() { *m = VideoContext{} } +func (m *VideoContext) String() string { return proto.CompactTextString(m) } +func (*VideoContext) ProtoMessage() {} +func (*VideoContext) Descriptor() ([]byte, []int) { + return fileDescriptor_video_intelligence_2747a56faf436e7b, []int{1} +} +func (m *VideoContext) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_VideoContext.Unmarshal(m, b) +} +func (m *VideoContext) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_VideoContext.Marshal(b, m, deterministic) +} +func (dst *VideoContext) XXX_Merge(src proto.Message) { + xxx_messageInfo_VideoContext.Merge(dst, src) +} +func (m *VideoContext) XXX_Size() int { + return xxx_messageInfo_VideoContext.Size(m) +} +func (m *VideoContext) XXX_DiscardUnknown() { + xxx_messageInfo_VideoContext.DiscardUnknown(m) +} + +var xxx_messageInfo_VideoContext proto.InternalMessageInfo + +func (m *VideoContext) GetSegments() []*VideoSegment { + if m != nil { + return m.Segments + } + return nil +} + +func (m *VideoContext) GetLabelDetectionConfig() *LabelDetectionConfig { + if m != nil { + return m.LabelDetectionConfig + } + return nil +} + +func (m *VideoContext) GetShotChangeDetectionConfig() *ShotChangeDetectionConfig { + if m != nil { + return m.ShotChangeDetectionConfig + } + return nil +} + +func (m *VideoContext) GetExplicitContentDetectionConfig() *ExplicitContentDetectionConfig { + if m != nil { + return m.ExplicitContentDetectionConfig + } + return nil +} + +func (m *VideoContext) GetTextDetectionConfig() *TextDetectionConfig { + if m != nil { + return m.TextDetectionConfig + } + return nil +} + +// Config for LABEL_DETECTION. +type LabelDetectionConfig struct { + // What labels should be detected with LABEL_DETECTION, in addition to + // video-level labels or segment-level labels. + // If unspecified, defaults to `SHOT_MODE`. + LabelDetectionMode LabelDetectionMode `protobuf:"varint,1,opt,name=label_detection_mode,json=labelDetectionMode,proto3,enum=google.cloud.videointelligence.v1p2beta1.LabelDetectionMode" json:"label_detection_mode,omitempty"` + // Whether the video has been shot from a stationary (i.e. non-moving) camera. + // When set to true, might improve detection accuracy for moving objects. + // Should be used with `SHOT_AND_FRAME_MODE` enabled. + StationaryCamera bool `protobuf:"varint,2,opt,name=stationary_camera,json=stationaryCamera,proto3" json:"stationary_camera,omitempty"` + // Model to use for label detection. + // Supported values: "builtin/stable" (the default if unset) and + // "builtin/latest". + Model string `protobuf:"bytes,3,opt,name=model,proto3" json:"model,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *LabelDetectionConfig) Reset() { *m = LabelDetectionConfig{} } +func (m *LabelDetectionConfig) String() string { return proto.CompactTextString(m) } +func (*LabelDetectionConfig) ProtoMessage() {} +func (*LabelDetectionConfig) Descriptor() ([]byte, []int) { + return fileDescriptor_video_intelligence_2747a56faf436e7b, []int{2} +} +func (m *LabelDetectionConfig) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_LabelDetectionConfig.Unmarshal(m, b) +} +func (m *LabelDetectionConfig) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_LabelDetectionConfig.Marshal(b, m, deterministic) +} +func (dst *LabelDetectionConfig) XXX_Merge(src proto.Message) { + xxx_messageInfo_LabelDetectionConfig.Merge(dst, src) +} +func (m *LabelDetectionConfig) XXX_Size() int { + return xxx_messageInfo_LabelDetectionConfig.Size(m) +} +func (m *LabelDetectionConfig) XXX_DiscardUnknown() { + xxx_messageInfo_LabelDetectionConfig.DiscardUnknown(m) +} + +var xxx_messageInfo_LabelDetectionConfig proto.InternalMessageInfo + +func (m *LabelDetectionConfig) GetLabelDetectionMode() LabelDetectionMode { + if m != nil { + return m.LabelDetectionMode + } + return LabelDetectionMode_LABEL_DETECTION_MODE_UNSPECIFIED +} + +func (m *LabelDetectionConfig) GetStationaryCamera() bool { + if m != nil { + return m.StationaryCamera + } + return false +} + +func (m *LabelDetectionConfig) GetModel() string { + if m != nil { + return m.Model + } + return "" +} + +// Config for SHOT_CHANGE_DETECTION. +type ShotChangeDetectionConfig struct { + // Model to use for shot change detection. + // Supported values: "builtin/stable" (the default if unset) and + // "builtin/latest". + Model string `protobuf:"bytes,1,opt,name=model,proto3" json:"model,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ShotChangeDetectionConfig) Reset() { *m = ShotChangeDetectionConfig{} } +func (m *ShotChangeDetectionConfig) String() string { return proto.CompactTextString(m) } +func (*ShotChangeDetectionConfig) ProtoMessage() {} +func (*ShotChangeDetectionConfig) Descriptor() ([]byte, []int) { + return fileDescriptor_video_intelligence_2747a56faf436e7b, []int{3} +} +func (m *ShotChangeDetectionConfig) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ShotChangeDetectionConfig.Unmarshal(m, b) +} +func (m *ShotChangeDetectionConfig) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ShotChangeDetectionConfig.Marshal(b, m, deterministic) +} +func (dst *ShotChangeDetectionConfig) XXX_Merge(src proto.Message) { + xxx_messageInfo_ShotChangeDetectionConfig.Merge(dst, src) +} +func (m *ShotChangeDetectionConfig) XXX_Size() int { + return xxx_messageInfo_ShotChangeDetectionConfig.Size(m) +} +func (m *ShotChangeDetectionConfig) XXX_DiscardUnknown() { + xxx_messageInfo_ShotChangeDetectionConfig.DiscardUnknown(m) +} + +var xxx_messageInfo_ShotChangeDetectionConfig proto.InternalMessageInfo + +func (m *ShotChangeDetectionConfig) GetModel() string { + if m != nil { + return m.Model + } + return "" +} + +// Config for EXPLICIT_CONTENT_DETECTION. +type ExplicitContentDetectionConfig struct { + // Model to use for explicit content detection. + // Supported values: "builtin/stable" (the default if unset) and + // "builtin/latest". + Model string `protobuf:"bytes,1,opt,name=model,proto3" json:"model,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ExplicitContentDetectionConfig) Reset() { *m = ExplicitContentDetectionConfig{} } +func (m *ExplicitContentDetectionConfig) String() string { return proto.CompactTextString(m) } +func (*ExplicitContentDetectionConfig) ProtoMessage() {} +func (*ExplicitContentDetectionConfig) Descriptor() ([]byte, []int) { + return fileDescriptor_video_intelligence_2747a56faf436e7b, []int{4} +} +func (m *ExplicitContentDetectionConfig) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ExplicitContentDetectionConfig.Unmarshal(m, b) +} +func (m *ExplicitContentDetectionConfig) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ExplicitContentDetectionConfig.Marshal(b, m, deterministic) +} +func (dst *ExplicitContentDetectionConfig) XXX_Merge(src proto.Message) { + xxx_messageInfo_ExplicitContentDetectionConfig.Merge(dst, src) +} +func (m *ExplicitContentDetectionConfig) XXX_Size() int { + return xxx_messageInfo_ExplicitContentDetectionConfig.Size(m) +} +func (m *ExplicitContentDetectionConfig) XXX_DiscardUnknown() { + xxx_messageInfo_ExplicitContentDetectionConfig.DiscardUnknown(m) +} + +var xxx_messageInfo_ExplicitContentDetectionConfig proto.InternalMessageInfo + +func (m *ExplicitContentDetectionConfig) GetModel() string { + if m != nil { + return m.Model + } + return "" +} + +// Config for TEXT_DETECTION. +type TextDetectionConfig struct { + // Language hint can be specified if the language to be detected is known a + // priori. It can increase the accuracy of the detection. Language hint must + // be language code in BCP-47 format. + // + // Automatic language detection is performed if no hint is provided. + LanguageHints []string `protobuf:"bytes,1,rep,name=language_hints,json=languageHints,proto3" json:"language_hints,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *TextDetectionConfig) Reset() { *m = TextDetectionConfig{} } +func (m *TextDetectionConfig) String() string { return proto.CompactTextString(m) } +func (*TextDetectionConfig) ProtoMessage() {} +func (*TextDetectionConfig) Descriptor() ([]byte, []int) { + return fileDescriptor_video_intelligence_2747a56faf436e7b, []int{5} +} +func (m *TextDetectionConfig) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_TextDetectionConfig.Unmarshal(m, b) +} +func (m *TextDetectionConfig) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_TextDetectionConfig.Marshal(b, m, deterministic) +} +func (dst *TextDetectionConfig) XXX_Merge(src proto.Message) { + xxx_messageInfo_TextDetectionConfig.Merge(dst, src) +} +func (m *TextDetectionConfig) XXX_Size() int { + return xxx_messageInfo_TextDetectionConfig.Size(m) +} +func (m *TextDetectionConfig) XXX_DiscardUnknown() { + xxx_messageInfo_TextDetectionConfig.DiscardUnknown(m) +} + +var xxx_messageInfo_TextDetectionConfig proto.InternalMessageInfo + +func (m *TextDetectionConfig) GetLanguageHints() []string { + if m != nil { + return m.LanguageHints + } + return nil +} + +// Video segment. +type VideoSegment struct { + // Time-offset, relative to the beginning of the video, + // corresponding to the start of the segment (inclusive). + StartTimeOffset *duration.Duration `protobuf:"bytes,1,opt,name=start_time_offset,json=startTimeOffset,proto3" json:"start_time_offset,omitempty"` + // Time-offset, relative to the beginning of the video, + // corresponding to the end of the segment (inclusive). + EndTimeOffset *duration.Duration `protobuf:"bytes,2,opt,name=end_time_offset,json=endTimeOffset,proto3" json:"end_time_offset,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *VideoSegment) Reset() { *m = VideoSegment{} } +func (m *VideoSegment) String() string { return proto.CompactTextString(m) } +func (*VideoSegment) ProtoMessage() {} +func (*VideoSegment) Descriptor() ([]byte, []int) { + return fileDescriptor_video_intelligence_2747a56faf436e7b, []int{6} +} +func (m *VideoSegment) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_VideoSegment.Unmarshal(m, b) +} +func (m *VideoSegment) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_VideoSegment.Marshal(b, m, deterministic) +} +func (dst *VideoSegment) XXX_Merge(src proto.Message) { + xxx_messageInfo_VideoSegment.Merge(dst, src) +} +func (m *VideoSegment) XXX_Size() int { + return xxx_messageInfo_VideoSegment.Size(m) +} +func (m *VideoSegment) XXX_DiscardUnknown() { + xxx_messageInfo_VideoSegment.DiscardUnknown(m) +} + +var xxx_messageInfo_VideoSegment proto.InternalMessageInfo + +func (m *VideoSegment) GetStartTimeOffset() *duration.Duration { + if m != nil { + return m.StartTimeOffset + } + return nil +} + +func (m *VideoSegment) GetEndTimeOffset() *duration.Duration { + if m != nil { + return m.EndTimeOffset + } + return nil +} + +// Video segment level annotation results for label detection. +type LabelSegment struct { + // Video segment where a label was detected. + Segment *VideoSegment `protobuf:"bytes,1,opt,name=segment,proto3" json:"segment,omitempty"` + // Confidence that the label is accurate. Range: [0, 1]. + Confidence float32 `protobuf:"fixed32,2,opt,name=confidence,proto3" json:"confidence,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *LabelSegment) Reset() { *m = LabelSegment{} } +func (m *LabelSegment) String() string { return proto.CompactTextString(m) } +func (*LabelSegment) ProtoMessage() {} +func (*LabelSegment) Descriptor() ([]byte, []int) { + return fileDescriptor_video_intelligence_2747a56faf436e7b, []int{7} +} +func (m *LabelSegment) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_LabelSegment.Unmarshal(m, b) +} +func (m *LabelSegment) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_LabelSegment.Marshal(b, m, deterministic) +} +func (dst *LabelSegment) XXX_Merge(src proto.Message) { + xxx_messageInfo_LabelSegment.Merge(dst, src) +} +func (m *LabelSegment) XXX_Size() int { + return xxx_messageInfo_LabelSegment.Size(m) +} +func (m *LabelSegment) XXX_DiscardUnknown() { + xxx_messageInfo_LabelSegment.DiscardUnknown(m) +} + +var xxx_messageInfo_LabelSegment proto.InternalMessageInfo + +func (m *LabelSegment) GetSegment() *VideoSegment { + if m != nil { + return m.Segment + } + return nil +} + +func (m *LabelSegment) GetConfidence() float32 { + if m != nil { + return m.Confidence + } + return 0 +} + +// Video frame level annotation results for label detection. +type LabelFrame struct { + // Time-offset, relative to the beginning of the video, corresponding to the + // video frame for this location. + TimeOffset *duration.Duration `protobuf:"bytes,1,opt,name=time_offset,json=timeOffset,proto3" json:"time_offset,omitempty"` + // Confidence that the label is accurate. Range: [0, 1]. + Confidence float32 `protobuf:"fixed32,2,opt,name=confidence,proto3" json:"confidence,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *LabelFrame) Reset() { *m = LabelFrame{} } +func (m *LabelFrame) String() string { return proto.CompactTextString(m) } +func (*LabelFrame) ProtoMessage() {} +func (*LabelFrame) Descriptor() ([]byte, []int) { + return fileDescriptor_video_intelligence_2747a56faf436e7b, []int{8} +} +func (m *LabelFrame) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_LabelFrame.Unmarshal(m, b) +} +func (m *LabelFrame) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_LabelFrame.Marshal(b, m, deterministic) +} +func (dst *LabelFrame) XXX_Merge(src proto.Message) { + xxx_messageInfo_LabelFrame.Merge(dst, src) +} +func (m *LabelFrame) XXX_Size() int { + return xxx_messageInfo_LabelFrame.Size(m) +} +func (m *LabelFrame) XXX_DiscardUnknown() { + xxx_messageInfo_LabelFrame.DiscardUnknown(m) +} + +var xxx_messageInfo_LabelFrame proto.InternalMessageInfo + +func (m *LabelFrame) GetTimeOffset() *duration.Duration { + if m != nil { + return m.TimeOffset + } + return nil +} + +func (m *LabelFrame) GetConfidence() float32 { + if m != nil { + return m.Confidence + } + return 0 +} + +// Detected entity from video analysis. +type Entity struct { + // Opaque entity ID. Some IDs may be available in + // [Google Knowledge Graph Search + // API](https://developers.google.com/knowledge-graph/). + EntityId string `protobuf:"bytes,1,opt,name=entity_id,json=entityId,proto3" json:"entity_id,omitempty"` + // Textual description, e.g. `Fixed-gear bicycle`. + Description string `protobuf:"bytes,2,opt,name=description,proto3" json:"description,omitempty"` + // Language code for `description` in BCP-47 format. + LanguageCode string `protobuf:"bytes,3,opt,name=language_code,json=languageCode,proto3" json:"language_code,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Entity) Reset() { *m = Entity{} } +func (m *Entity) String() string { return proto.CompactTextString(m) } +func (*Entity) ProtoMessage() {} +func (*Entity) Descriptor() ([]byte, []int) { + return fileDescriptor_video_intelligence_2747a56faf436e7b, []int{9} +} +func (m *Entity) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Entity.Unmarshal(m, b) +} +func (m *Entity) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Entity.Marshal(b, m, deterministic) +} +func (dst *Entity) XXX_Merge(src proto.Message) { + xxx_messageInfo_Entity.Merge(dst, src) +} +func (m *Entity) XXX_Size() int { + return xxx_messageInfo_Entity.Size(m) +} +func (m *Entity) XXX_DiscardUnknown() { + xxx_messageInfo_Entity.DiscardUnknown(m) +} + +var xxx_messageInfo_Entity proto.InternalMessageInfo + +func (m *Entity) GetEntityId() string { + if m != nil { + return m.EntityId + } + return "" +} + +func (m *Entity) GetDescription() string { + if m != nil { + return m.Description + } + return "" +} + +func (m *Entity) GetLanguageCode() string { + if m != nil { + return m.LanguageCode + } + return "" +} + +// Label annotation. +type LabelAnnotation struct { + // Detected entity. + Entity *Entity `protobuf:"bytes,1,opt,name=entity,proto3" json:"entity,omitempty"` + // Common categories for the detected entity. + // E.g. when the label is `Terrier` the category is likely `dog`. And in some + // cases there might be more than one categories e.g. `Terrier` could also be + // a `pet`. + CategoryEntities []*Entity `protobuf:"bytes,2,rep,name=category_entities,json=categoryEntities,proto3" json:"category_entities,omitempty"` + // All video segments where a label was detected. + Segments []*LabelSegment `protobuf:"bytes,3,rep,name=segments,proto3" json:"segments,omitempty"` + // All video frames where a label was detected. + Frames []*LabelFrame `protobuf:"bytes,4,rep,name=frames,proto3" json:"frames,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *LabelAnnotation) Reset() { *m = LabelAnnotation{} } +func (m *LabelAnnotation) String() string { return proto.CompactTextString(m) } +func (*LabelAnnotation) ProtoMessage() {} +func (*LabelAnnotation) Descriptor() ([]byte, []int) { + return fileDescriptor_video_intelligence_2747a56faf436e7b, []int{10} +} +func (m *LabelAnnotation) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_LabelAnnotation.Unmarshal(m, b) +} +func (m *LabelAnnotation) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_LabelAnnotation.Marshal(b, m, deterministic) +} +func (dst *LabelAnnotation) XXX_Merge(src proto.Message) { + xxx_messageInfo_LabelAnnotation.Merge(dst, src) +} +func (m *LabelAnnotation) XXX_Size() int { + return xxx_messageInfo_LabelAnnotation.Size(m) +} +func (m *LabelAnnotation) XXX_DiscardUnknown() { + xxx_messageInfo_LabelAnnotation.DiscardUnknown(m) +} + +var xxx_messageInfo_LabelAnnotation proto.InternalMessageInfo + +func (m *LabelAnnotation) GetEntity() *Entity { + if m != nil { + return m.Entity + } + return nil +} + +func (m *LabelAnnotation) GetCategoryEntities() []*Entity { + if m != nil { + return m.CategoryEntities + } + return nil +} + +func (m *LabelAnnotation) GetSegments() []*LabelSegment { + if m != nil { + return m.Segments + } + return nil +} + +func (m *LabelAnnotation) GetFrames() []*LabelFrame { + if m != nil { + return m.Frames + } + return nil +} + +// Video frame level annotation results for explicit content. +type ExplicitContentFrame struct { + // Time-offset, relative to the beginning of the video, corresponding to the + // video frame for this location. + TimeOffset *duration.Duration `protobuf:"bytes,1,opt,name=time_offset,json=timeOffset,proto3" json:"time_offset,omitempty"` + // Likelihood of the pornography content.. + PornographyLikelihood Likelihood `protobuf:"varint,2,opt,name=pornography_likelihood,json=pornographyLikelihood,proto3,enum=google.cloud.videointelligence.v1p2beta1.Likelihood" json:"pornography_likelihood,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ExplicitContentFrame) Reset() { *m = ExplicitContentFrame{} } +func (m *ExplicitContentFrame) String() string { return proto.CompactTextString(m) } +func (*ExplicitContentFrame) ProtoMessage() {} +func (*ExplicitContentFrame) Descriptor() ([]byte, []int) { + return fileDescriptor_video_intelligence_2747a56faf436e7b, []int{11} +} +func (m *ExplicitContentFrame) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ExplicitContentFrame.Unmarshal(m, b) +} +func (m *ExplicitContentFrame) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ExplicitContentFrame.Marshal(b, m, deterministic) +} +func (dst *ExplicitContentFrame) XXX_Merge(src proto.Message) { + xxx_messageInfo_ExplicitContentFrame.Merge(dst, src) +} +func (m *ExplicitContentFrame) XXX_Size() int { + return xxx_messageInfo_ExplicitContentFrame.Size(m) +} +func (m *ExplicitContentFrame) XXX_DiscardUnknown() { + xxx_messageInfo_ExplicitContentFrame.DiscardUnknown(m) +} + +var xxx_messageInfo_ExplicitContentFrame proto.InternalMessageInfo + +func (m *ExplicitContentFrame) GetTimeOffset() *duration.Duration { + if m != nil { + return m.TimeOffset + } + return nil +} + +func (m *ExplicitContentFrame) GetPornographyLikelihood() Likelihood { + if m != nil { + return m.PornographyLikelihood + } + return Likelihood_LIKELIHOOD_UNSPECIFIED +} + +// Explicit content annotation (based on per-frame visual signals only). +// If no explicit content has been detected in a frame, no annotations are +// present for that frame. +type ExplicitContentAnnotation struct { + // All video frames where explicit content was detected. + Frames []*ExplicitContentFrame `protobuf:"bytes,1,rep,name=frames,proto3" json:"frames,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ExplicitContentAnnotation) Reset() { *m = ExplicitContentAnnotation{} } +func (m *ExplicitContentAnnotation) String() string { return proto.CompactTextString(m) } +func (*ExplicitContentAnnotation) ProtoMessage() {} +func (*ExplicitContentAnnotation) Descriptor() ([]byte, []int) { + return fileDescriptor_video_intelligence_2747a56faf436e7b, []int{12} +} +func (m *ExplicitContentAnnotation) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ExplicitContentAnnotation.Unmarshal(m, b) +} +func (m *ExplicitContentAnnotation) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ExplicitContentAnnotation.Marshal(b, m, deterministic) +} +func (dst *ExplicitContentAnnotation) XXX_Merge(src proto.Message) { + xxx_messageInfo_ExplicitContentAnnotation.Merge(dst, src) +} +func (m *ExplicitContentAnnotation) XXX_Size() int { + return xxx_messageInfo_ExplicitContentAnnotation.Size(m) +} +func (m *ExplicitContentAnnotation) XXX_DiscardUnknown() { + xxx_messageInfo_ExplicitContentAnnotation.DiscardUnknown(m) +} + +var xxx_messageInfo_ExplicitContentAnnotation proto.InternalMessageInfo + +func (m *ExplicitContentAnnotation) GetFrames() []*ExplicitContentFrame { + if m != nil { + return m.Frames + } + return nil +} + +// Normalized bounding box. +// The normalized vertex coordinates are relative to the original image. +// Range: [0, 1]. +type NormalizedBoundingBox struct { + // Left X coordinate. + Left float32 `protobuf:"fixed32,1,opt,name=left,proto3" json:"left,omitempty"` + // Top Y coordinate. + Top float32 `protobuf:"fixed32,2,opt,name=top,proto3" json:"top,omitempty"` + // Right X coordinate. + Right float32 `protobuf:"fixed32,3,opt,name=right,proto3" json:"right,omitempty"` + // Bottom Y coordinate. + Bottom float32 `protobuf:"fixed32,4,opt,name=bottom,proto3" json:"bottom,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *NormalizedBoundingBox) Reset() { *m = NormalizedBoundingBox{} } +func (m *NormalizedBoundingBox) String() string { return proto.CompactTextString(m) } +func (*NormalizedBoundingBox) ProtoMessage() {} +func (*NormalizedBoundingBox) Descriptor() ([]byte, []int) { + return fileDescriptor_video_intelligence_2747a56faf436e7b, []int{13} +} +func (m *NormalizedBoundingBox) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_NormalizedBoundingBox.Unmarshal(m, b) +} +func (m *NormalizedBoundingBox) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_NormalizedBoundingBox.Marshal(b, m, deterministic) +} +func (dst *NormalizedBoundingBox) XXX_Merge(src proto.Message) { + xxx_messageInfo_NormalizedBoundingBox.Merge(dst, src) +} +func (m *NormalizedBoundingBox) XXX_Size() int { + return xxx_messageInfo_NormalizedBoundingBox.Size(m) +} +func (m *NormalizedBoundingBox) XXX_DiscardUnknown() { + xxx_messageInfo_NormalizedBoundingBox.DiscardUnknown(m) +} + +var xxx_messageInfo_NormalizedBoundingBox proto.InternalMessageInfo + +func (m *NormalizedBoundingBox) GetLeft() float32 { + if m != nil { + return m.Left + } + return 0 +} + +func (m *NormalizedBoundingBox) GetTop() float32 { + if m != nil { + return m.Top + } + return 0 +} + +func (m *NormalizedBoundingBox) GetRight() float32 { + if m != nil { + return m.Right + } + return 0 +} + +func (m *NormalizedBoundingBox) GetBottom() float32 { + if m != nil { + return m.Bottom + } + return 0 +} + +// Annotation results for a single video. +type VideoAnnotationResults struct { + // Video file location in + // [Google Cloud Storage](https://cloud.google.com/storage/). + InputUri string `protobuf:"bytes,1,opt,name=input_uri,json=inputUri,proto3" json:"input_uri,omitempty"` + // Label annotations on video level or user specified segment level. + // There is exactly one element for each unique label. + SegmentLabelAnnotations []*LabelAnnotation `protobuf:"bytes,2,rep,name=segment_label_annotations,json=segmentLabelAnnotations,proto3" json:"segment_label_annotations,omitempty"` + // Label annotations on shot level. + // There is exactly one element for each unique label. + ShotLabelAnnotations []*LabelAnnotation `protobuf:"bytes,3,rep,name=shot_label_annotations,json=shotLabelAnnotations,proto3" json:"shot_label_annotations,omitempty"` + // Label annotations on frame level. + // There is exactly one element for each unique label. + FrameLabelAnnotations []*LabelAnnotation `protobuf:"bytes,4,rep,name=frame_label_annotations,json=frameLabelAnnotations,proto3" json:"frame_label_annotations,omitempty"` + // Shot annotations. Each shot is represented as a video segment. + ShotAnnotations []*VideoSegment `protobuf:"bytes,6,rep,name=shot_annotations,json=shotAnnotations,proto3" json:"shot_annotations,omitempty"` + // Explicit content annotation. + ExplicitAnnotation *ExplicitContentAnnotation `protobuf:"bytes,7,opt,name=explicit_annotation,json=explicitAnnotation,proto3" json:"explicit_annotation,omitempty"` + // OCR text detection and tracking. + // Annotations for list of detected text snippets. Each will have list of + // frame information associated with it. + TextAnnotations []*TextAnnotation `protobuf:"bytes,12,rep,name=text_annotations,json=textAnnotations,proto3" json:"text_annotations,omitempty"` + // Annotations for list of objects detected and tracked in video. + ObjectAnnotations []*ObjectTrackingAnnotation `protobuf:"bytes,14,rep,name=object_annotations,json=objectAnnotations,proto3" json:"object_annotations,omitempty"` + // If set, indicates an error. Note that for a single `AnnotateVideoRequest` + // some videos may succeed and some may fail. + Error *status.Status `protobuf:"bytes,9,opt,name=error,proto3" json:"error,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *VideoAnnotationResults) Reset() { *m = VideoAnnotationResults{} } +func (m *VideoAnnotationResults) String() string { return proto.CompactTextString(m) } +func (*VideoAnnotationResults) ProtoMessage() {} +func (*VideoAnnotationResults) Descriptor() ([]byte, []int) { + return fileDescriptor_video_intelligence_2747a56faf436e7b, []int{14} +} +func (m *VideoAnnotationResults) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_VideoAnnotationResults.Unmarshal(m, b) +} +func (m *VideoAnnotationResults) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_VideoAnnotationResults.Marshal(b, m, deterministic) +} +func (dst *VideoAnnotationResults) XXX_Merge(src proto.Message) { + xxx_messageInfo_VideoAnnotationResults.Merge(dst, src) +} +func (m *VideoAnnotationResults) XXX_Size() int { + return xxx_messageInfo_VideoAnnotationResults.Size(m) +} +func (m *VideoAnnotationResults) XXX_DiscardUnknown() { + xxx_messageInfo_VideoAnnotationResults.DiscardUnknown(m) +} + +var xxx_messageInfo_VideoAnnotationResults proto.InternalMessageInfo + +func (m *VideoAnnotationResults) GetInputUri() string { + if m != nil { + return m.InputUri + } + return "" +} + +func (m *VideoAnnotationResults) GetSegmentLabelAnnotations() []*LabelAnnotation { + if m != nil { + return m.SegmentLabelAnnotations + } + return nil +} + +func (m *VideoAnnotationResults) GetShotLabelAnnotations() []*LabelAnnotation { + if m != nil { + return m.ShotLabelAnnotations + } + return nil +} + +func (m *VideoAnnotationResults) GetFrameLabelAnnotations() []*LabelAnnotation { + if m != nil { + return m.FrameLabelAnnotations + } + return nil +} + +func (m *VideoAnnotationResults) GetShotAnnotations() []*VideoSegment { + if m != nil { + return m.ShotAnnotations + } + return nil +} + +func (m *VideoAnnotationResults) GetExplicitAnnotation() *ExplicitContentAnnotation { + if m != nil { + return m.ExplicitAnnotation + } + return nil +} + +func (m *VideoAnnotationResults) GetTextAnnotations() []*TextAnnotation { + if m != nil { + return m.TextAnnotations + } + return nil +} + +func (m *VideoAnnotationResults) GetObjectAnnotations() []*ObjectTrackingAnnotation { + if m != nil { + return m.ObjectAnnotations + } + return nil +} + +func (m *VideoAnnotationResults) GetError() *status.Status { + if m != nil { + return m.Error + } + return nil +} + +// Video annotation response. Included in the `response` +// field of the `Operation` returned by the `GetOperation` +// call of the `google::longrunning::Operations` service. +type AnnotateVideoResponse struct { + // Annotation results for all videos specified in `AnnotateVideoRequest`. + AnnotationResults []*VideoAnnotationResults `protobuf:"bytes,1,rep,name=annotation_results,json=annotationResults,proto3" json:"annotation_results,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *AnnotateVideoResponse) Reset() { *m = AnnotateVideoResponse{} } +func (m *AnnotateVideoResponse) String() string { return proto.CompactTextString(m) } +func (*AnnotateVideoResponse) ProtoMessage() {} +func (*AnnotateVideoResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_video_intelligence_2747a56faf436e7b, []int{15} +} +func (m *AnnotateVideoResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_AnnotateVideoResponse.Unmarshal(m, b) +} +func (m *AnnotateVideoResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_AnnotateVideoResponse.Marshal(b, m, deterministic) +} +func (dst *AnnotateVideoResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_AnnotateVideoResponse.Merge(dst, src) +} +func (m *AnnotateVideoResponse) XXX_Size() int { + return xxx_messageInfo_AnnotateVideoResponse.Size(m) +} +func (m *AnnotateVideoResponse) XXX_DiscardUnknown() { + xxx_messageInfo_AnnotateVideoResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_AnnotateVideoResponse proto.InternalMessageInfo + +func (m *AnnotateVideoResponse) GetAnnotationResults() []*VideoAnnotationResults { + if m != nil { + return m.AnnotationResults + } + return nil +} + +// Annotation progress for a single video. +type VideoAnnotationProgress struct { + // Video file location in + // [Google Cloud Storage](https://cloud.google.com/storage/). + InputUri string `protobuf:"bytes,1,opt,name=input_uri,json=inputUri,proto3" json:"input_uri,omitempty"` + // Approximate percentage processed thus far. Guaranteed to be + // 100 when fully processed. + ProgressPercent int32 `protobuf:"varint,2,opt,name=progress_percent,json=progressPercent,proto3" json:"progress_percent,omitempty"` + // Time when the request was received. + StartTime *timestamp.Timestamp `protobuf:"bytes,3,opt,name=start_time,json=startTime,proto3" json:"start_time,omitempty"` + // Time of the most recent update. + UpdateTime *timestamp.Timestamp `protobuf:"bytes,4,opt,name=update_time,json=updateTime,proto3" json:"update_time,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *VideoAnnotationProgress) Reset() { *m = VideoAnnotationProgress{} } +func (m *VideoAnnotationProgress) String() string { return proto.CompactTextString(m) } +func (*VideoAnnotationProgress) ProtoMessage() {} +func (*VideoAnnotationProgress) Descriptor() ([]byte, []int) { + return fileDescriptor_video_intelligence_2747a56faf436e7b, []int{16} +} +func (m *VideoAnnotationProgress) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_VideoAnnotationProgress.Unmarshal(m, b) +} +func (m *VideoAnnotationProgress) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_VideoAnnotationProgress.Marshal(b, m, deterministic) +} +func (dst *VideoAnnotationProgress) XXX_Merge(src proto.Message) { + xxx_messageInfo_VideoAnnotationProgress.Merge(dst, src) +} +func (m *VideoAnnotationProgress) XXX_Size() int { + return xxx_messageInfo_VideoAnnotationProgress.Size(m) +} +func (m *VideoAnnotationProgress) XXX_DiscardUnknown() { + xxx_messageInfo_VideoAnnotationProgress.DiscardUnknown(m) +} + +var xxx_messageInfo_VideoAnnotationProgress proto.InternalMessageInfo + +func (m *VideoAnnotationProgress) GetInputUri() string { + if m != nil { + return m.InputUri + } + return "" +} + +func (m *VideoAnnotationProgress) GetProgressPercent() int32 { + if m != nil { + return m.ProgressPercent + } + return 0 +} + +func (m *VideoAnnotationProgress) GetStartTime() *timestamp.Timestamp { + if m != nil { + return m.StartTime + } + return nil +} + +func (m *VideoAnnotationProgress) GetUpdateTime() *timestamp.Timestamp { + if m != nil { + return m.UpdateTime + } + return nil +} + +// Video annotation progress. Included in the `metadata` +// field of the `Operation` returned by the `GetOperation` +// call of the `google::longrunning::Operations` service. +type AnnotateVideoProgress struct { + // Progress metadata for all videos specified in `AnnotateVideoRequest`. + AnnotationProgress []*VideoAnnotationProgress `protobuf:"bytes,1,rep,name=annotation_progress,json=annotationProgress,proto3" json:"annotation_progress,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *AnnotateVideoProgress) Reset() { *m = AnnotateVideoProgress{} } +func (m *AnnotateVideoProgress) String() string { return proto.CompactTextString(m) } +func (*AnnotateVideoProgress) ProtoMessage() {} +func (*AnnotateVideoProgress) Descriptor() ([]byte, []int) { + return fileDescriptor_video_intelligence_2747a56faf436e7b, []int{17} +} +func (m *AnnotateVideoProgress) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_AnnotateVideoProgress.Unmarshal(m, b) +} +func (m *AnnotateVideoProgress) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_AnnotateVideoProgress.Marshal(b, m, deterministic) +} +func (dst *AnnotateVideoProgress) XXX_Merge(src proto.Message) { + xxx_messageInfo_AnnotateVideoProgress.Merge(dst, src) +} +func (m *AnnotateVideoProgress) XXX_Size() int { + return xxx_messageInfo_AnnotateVideoProgress.Size(m) +} +func (m *AnnotateVideoProgress) XXX_DiscardUnknown() { + xxx_messageInfo_AnnotateVideoProgress.DiscardUnknown(m) +} + +var xxx_messageInfo_AnnotateVideoProgress proto.InternalMessageInfo + +func (m *AnnotateVideoProgress) GetAnnotationProgress() []*VideoAnnotationProgress { + if m != nil { + return m.AnnotationProgress + } + return nil +} + +// A vertex represents a 2D point in the image. +// NOTE: the normalized vertex coordinates are relative to the original image +// and range from 0 to 1. +type NormalizedVertex struct { + // X coordinate. + X float32 `protobuf:"fixed32,1,opt,name=x,proto3" json:"x,omitempty"` + // Y coordinate. + Y float32 `protobuf:"fixed32,2,opt,name=y,proto3" json:"y,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *NormalizedVertex) Reset() { *m = NormalizedVertex{} } +func (m *NormalizedVertex) String() string { return proto.CompactTextString(m) } +func (*NormalizedVertex) ProtoMessage() {} +func (*NormalizedVertex) Descriptor() ([]byte, []int) { + return fileDescriptor_video_intelligence_2747a56faf436e7b, []int{18} +} +func (m *NormalizedVertex) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_NormalizedVertex.Unmarshal(m, b) +} +func (m *NormalizedVertex) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_NormalizedVertex.Marshal(b, m, deterministic) +} +func (dst *NormalizedVertex) XXX_Merge(src proto.Message) { + xxx_messageInfo_NormalizedVertex.Merge(dst, src) +} +func (m *NormalizedVertex) XXX_Size() int { + return xxx_messageInfo_NormalizedVertex.Size(m) +} +func (m *NormalizedVertex) XXX_DiscardUnknown() { + xxx_messageInfo_NormalizedVertex.DiscardUnknown(m) +} + +var xxx_messageInfo_NormalizedVertex proto.InternalMessageInfo + +func (m *NormalizedVertex) GetX() float32 { + if m != nil { + return m.X + } + return 0 +} + +func (m *NormalizedVertex) GetY() float32 { + if m != nil { + return m.Y + } + return 0 +} + +// Normalized bounding polygon for text (that might not be aligned with axis). +// Contains list of the corner points in clockwise order starting from +// top-left corner. For example, for a rectangular bounding box: +// When the text is horizontal it might look like: +// 0----1 +// | | +// 3----2 +// +// When it's clockwise rotated 180 degrees around the top-left corner it +// becomes: +// 2----3 +// | | +// 1----0 +// +// and the vertex order will still be (0, 1, 2, 3). Note that values can be less +// than 0, or greater than 1 due to trignometric calculations for location of +// the box. +type NormalizedBoundingPoly struct { + // Normalized vertices of the bounding polygon. + Vertices []*NormalizedVertex `protobuf:"bytes,1,rep,name=vertices,proto3" json:"vertices,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *NormalizedBoundingPoly) Reset() { *m = NormalizedBoundingPoly{} } +func (m *NormalizedBoundingPoly) String() string { return proto.CompactTextString(m) } +func (*NormalizedBoundingPoly) ProtoMessage() {} +func (*NormalizedBoundingPoly) Descriptor() ([]byte, []int) { + return fileDescriptor_video_intelligence_2747a56faf436e7b, []int{19} +} +func (m *NormalizedBoundingPoly) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_NormalizedBoundingPoly.Unmarshal(m, b) +} +func (m *NormalizedBoundingPoly) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_NormalizedBoundingPoly.Marshal(b, m, deterministic) +} +func (dst *NormalizedBoundingPoly) XXX_Merge(src proto.Message) { + xxx_messageInfo_NormalizedBoundingPoly.Merge(dst, src) +} +func (m *NormalizedBoundingPoly) XXX_Size() int { + return xxx_messageInfo_NormalizedBoundingPoly.Size(m) +} +func (m *NormalizedBoundingPoly) XXX_DiscardUnknown() { + xxx_messageInfo_NormalizedBoundingPoly.DiscardUnknown(m) +} + +var xxx_messageInfo_NormalizedBoundingPoly proto.InternalMessageInfo + +func (m *NormalizedBoundingPoly) GetVertices() []*NormalizedVertex { + if m != nil { + return m.Vertices + } + return nil +} + +// Video segment level annotation results for text detection. +type TextSegment struct { + // Video segment where a text snippet was detected. + Segment *VideoSegment `protobuf:"bytes,1,opt,name=segment,proto3" json:"segment,omitempty"` + // Confidence for the track of detected text. It is calculated as the highest + // over all frames where OCR detected text appears. + Confidence float32 `protobuf:"fixed32,2,opt,name=confidence,proto3" json:"confidence,omitempty"` + // Information related to the frames where OCR detected text appears. + Frames []*TextFrame `protobuf:"bytes,3,rep,name=frames,proto3" json:"frames,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *TextSegment) Reset() { *m = TextSegment{} } +func (m *TextSegment) String() string { return proto.CompactTextString(m) } +func (*TextSegment) ProtoMessage() {} +func (*TextSegment) Descriptor() ([]byte, []int) { + return fileDescriptor_video_intelligence_2747a56faf436e7b, []int{20} +} +func (m *TextSegment) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_TextSegment.Unmarshal(m, b) +} +func (m *TextSegment) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_TextSegment.Marshal(b, m, deterministic) +} +func (dst *TextSegment) XXX_Merge(src proto.Message) { + xxx_messageInfo_TextSegment.Merge(dst, src) +} +func (m *TextSegment) XXX_Size() int { + return xxx_messageInfo_TextSegment.Size(m) +} +func (m *TextSegment) XXX_DiscardUnknown() { + xxx_messageInfo_TextSegment.DiscardUnknown(m) +} + +var xxx_messageInfo_TextSegment proto.InternalMessageInfo + +func (m *TextSegment) GetSegment() *VideoSegment { + if m != nil { + return m.Segment + } + return nil +} + +func (m *TextSegment) GetConfidence() float32 { + if m != nil { + return m.Confidence + } + return 0 +} + +func (m *TextSegment) GetFrames() []*TextFrame { + if m != nil { + return m.Frames + } + return nil +} + +// Video frame level annotation results for text annotation (OCR). +// Contains information regarding timestamp and bounding box locations for the +// frames containing detected OCR text snippets. +type TextFrame struct { + // Bounding polygon of the detected text for this frame. + RotatedBoundingBox *NormalizedBoundingPoly `protobuf:"bytes,1,opt,name=rotated_bounding_box,json=rotatedBoundingBox,proto3" json:"rotated_bounding_box,omitempty"` + // Timestamp of this frame. + TimeOffset *duration.Duration `protobuf:"bytes,2,opt,name=time_offset,json=timeOffset,proto3" json:"time_offset,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *TextFrame) Reset() { *m = TextFrame{} } +func (m *TextFrame) String() string { return proto.CompactTextString(m) } +func (*TextFrame) ProtoMessage() {} +func (*TextFrame) Descriptor() ([]byte, []int) { + return fileDescriptor_video_intelligence_2747a56faf436e7b, []int{21} +} +func (m *TextFrame) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_TextFrame.Unmarshal(m, b) +} +func (m *TextFrame) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_TextFrame.Marshal(b, m, deterministic) +} +func (dst *TextFrame) XXX_Merge(src proto.Message) { + xxx_messageInfo_TextFrame.Merge(dst, src) +} +func (m *TextFrame) XXX_Size() int { + return xxx_messageInfo_TextFrame.Size(m) +} +func (m *TextFrame) XXX_DiscardUnknown() { + xxx_messageInfo_TextFrame.DiscardUnknown(m) +} + +var xxx_messageInfo_TextFrame proto.InternalMessageInfo + +func (m *TextFrame) GetRotatedBoundingBox() *NormalizedBoundingPoly { + if m != nil { + return m.RotatedBoundingBox + } + return nil +} + +func (m *TextFrame) GetTimeOffset() *duration.Duration { + if m != nil { + return m.TimeOffset + } + return nil +} + +// Annotations related to one detected OCR text snippet. This will contain the +// corresponding text, confidence value, and frame level information for each +// detection. +type TextAnnotation struct { + // The detected text. + Text string `protobuf:"bytes,1,opt,name=text,proto3" json:"text,omitempty"` + // All video segments where OCR detected text appears. + Segments []*TextSegment `protobuf:"bytes,2,rep,name=segments,proto3" json:"segments,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *TextAnnotation) Reset() { *m = TextAnnotation{} } +func (m *TextAnnotation) String() string { return proto.CompactTextString(m) } +func (*TextAnnotation) ProtoMessage() {} +func (*TextAnnotation) Descriptor() ([]byte, []int) { + return fileDescriptor_video_intelligence_2747a56faf436e7b, []int{22} +} +func (m *TextAnnotation) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_TextAnnotation.Unmarshal(m, b) +} +func (m *TextAnnotation) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_TextAnnotation.Marshal(b, m, deterministic) +} +func (dst *TextAnnotation) XXX_Merge(src proto.Message) { + xxx_messageInfo_TextAnnotation.Merge(dst, src) +} +func (m *TextAnnotation) XXX_Size() int { + return xxx_messageInfo_TextAnnotation.Size(m) +} +func (m *TextAnnotation) XXX_DiscardUnknown() { + xxx_messageInfo_TextAnnotation.DiscardUnknown(m) +} + +var xxx_messageInfo_TextAnnotation proto.InternalMessageInfo + +func (m *TextAnnotation) GetText() string { + if m != nil { + return m.Text + } + return "" +} + +func (m *TextAnnotation) GetSegments() []*TextSegment { + if m != nil { + return m.Segments + } + return nil +} + +// Video frame level annotations for object detection and tracking. This field +// stores per frame location, time offset, and confidence. +type ObjectTrackingFrame struct { + // The normalized bounding box location of this object track for the frame. + NormalizedBoundingBox *NormalizedBoundingBox `protobuf:"bytes,1,opt,name=normalized_bounding_box,json=normalizedBoundingBox,proto3" json:"normalized_bounding_box,omitempty"` + // The timestamp of the frame in microseconds. + TimeOffset *duration.Duration `protobuf:"bytes,2,opt,name=time_offset,json=timeOffset,proto3" json:"time_offset,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ObjectTrackingFrame) Reset() { *m = ObjectTrackingFrame{} } +func (m *ObjectTrackingFrame) String() string { return proto.CompactTextString(m) } +func (*ObjectTrackingFrame) ProtoMessage() {} +func (*ObjectTrackingFrame) Descriptor() ([]byte, []int) { + return fileDescriptor_video_intelligence_2747a56faf436e7b, []int{23} +} +func (m *ObjectTrackingFrame) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ObjectTrackingFrame.Unmarshal(m, b) +} +func (m *ObjectTrackingFrame) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ObjectTrackingFrame.Marshal(b, m, deterministic) +} +func (dst *ObjectTrackingFrame) XXX_Merge(src proto.Message) { + xxx_messageInfo_ObjectTrackingFrame.Merge(dst, src) +} +func (m *ObjectTrackingFrame) XXX_Size() int { + return xxx_messageInfo_ObjectTrackingFrame.Size(m) +} +func (m *ObjectTrackingFrame) XXX_DiscardUnknown() { + xxx_messageInfo_ObjectTrackingFrame.DiscardUnknown(m) +} + +var xxx_messageInfo_ObjectTrackingFrame proto.InternalMessageInfo + +func (m *ObjectTrackingFrame) GetNormalizedBoundingBox() *NormalizedBoundingBox { + if m != nil { + return m.NormalizedBoundingBox + } + return nil +} + +func (m *ObjectTrackingFrame) GetTimeOffset() *duration.Duration { + if m != nil { + return m.TimeOffset + } + return nil +} + +// Annotations corresponding to one tracked object. +type ObjectTrackingAnnotation struct { + // Entity to specify the object category that this track is labeled as. + Entity *Entity `protobuf:"bytes,1,opt,name=entity,proto3" json:"entity,omitempty"` + // Object category's labeling confidence of this track. + Confidence float32 `protobuf:"fixed32,4,opt,name=confidence,proto3" json:"confidence,omitempty"` + // Information corresponding to all frames where this object track appears. + Frames []*ObjectTrackingFrame `protobuf:"bytes,2,rep,name=frames,proto3" json:"frames,omitempty"` + // Each object track corresponds to one video segment where it appears. + Segment *VideoSegment `protobuf:"bytes,3,opt,name=segment,proto3" json:"segment,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ObjectTrackingAnnotation) Reset() { *m = ObjectTrackingAnnotation{} } +func (m *ObjectTrackingAnnotation) String() string { return proto.CompactTextString(m) } +func (*ObjectTrackingAnnotation) ProtoMessage() {} +func (*ObjectTrackingAnnotation) Descriptor() ([]byte, []int) { + return fileDescriptor_video_intelligence_2747a56faf436e7b, []int{24} +} +func (m *ObjectTrackingAnnotation) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ObjectTrackingAnnotation.Unmarshal(m, b) +} +func (m *ObjectTrackingAnnotation) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ObjectTrackingAnnotation.Marshal(b, m, deterministic) +} +func (dst *ObjectTrackingAnnotation) XXX_Merge(src proto.Message) { + xxx_messageInfo_ObjectTrackingAnnotation.Merge(dst, src) +} +func (m *ObjectTrackingAnnotation) XXX_Size() int { + return xxx_messageInfo_ObjectTrackingAnnotation.Size(m) +} +func (m *ObjectTrackingAnnotation) XXX_DiscardUnknown() { + xxx_messageInfo_ObjectTrackingAnnotation.DiscardUnknown(m) +} + +var xxx_messageInfo_ObjectTrackingAnnotation proto.InternalMessageInfo + +func (m *ObjectTrackingAnnotation) GetEntity() *Entity { + if m != nil { + return m.Entity + } + return nil +} + +func (m *ObjectTrackingAnnotation) GetConfidence() float32 { + if m != nil { + return m.Confidence + } + return 0 +} + +func (m *ObjectTrackingAnnotation) GetFrames() []*ObjectTrackingFrame { + if m != nil { + return m.Frames + } + return nil +} + +func (m *ObjectTrackingAnnotation) GetSegment() *VideoSegment { + if m != nil { + return m.Segment + } + return nil +} + +func init() { + proto.RegisterType((*AnnotateVideoRequest)(nil), "google.cloud.videointelligence.v1p2beta1.AnnotateVideoRequest") + proto.RegisterType((*VideoContext)(nil), "google.cloud.videointelligence.v1p2beta1.VideoContext") + proto.RegisterType((*LabelDetectionConfig)(nil), "google.cloud.videointelligence.v1p2beta1.LabelDetectionConfig") + proto.RegisterType((*ShotChangeDetectionConfig)(nil), "google.cloud.videointelligence.v1p2beta1.ShotChangeDetectionConfig") + proto.RegisterType((*ExplicitContentDetectionConfig)(nil), "google.cloud.videointelligence.v1p2beta1.ExplicitContentDetectionConfig") + proto.RegisterType((*TextDetectionConfig)(nil), "google.cloud.videointelligence.v1p2beta1.TextDetectionConfig") + proto.RegisterType((*VideoSegment)(nil), "google.cloud.videointelligence.v1p2beta1.VideoSegment") + proto.RegisterType((*LabelSegment)(nil), "google.cloud.videointelligence.v1p2beta1.LabelSegment") + proto.RegisterType((*LabelFrame)(nil), "google.cloud.videointelligence.v1p2beta1.LabelFrame") + proto.RegisterType((*Entity)(nil), "google.cloud.videointelligence.v1p2beta1.Entity") + proto.RegisterType((*LabelAnnotation)(nil), "google.cloud.videointelligence.v1p2beta1.LabelAnnotation") + proto.RegisterType((*ExplicitContentFrame)(nil), "google.cloud.videointelligence.v1p2beta1.ExplicitContentFrame") + proto.RegisterType((*ExplicitContentAnnotation)(nil), "google.cloud.videointelligence.v1p2beta1.ExplicitContentAnnotation") + proto.RegisterType((*NormalizedBoundingBox)(nil), "google.cloud.videointelligence.v1p2beta1.NormalizedBoundingBox") + proto.RegisterType((*VideoAnnotationResults)(nil), "google.cloud.videointelligence.v1p2beta1.VideoAnnotationResults") + proto.RegisterType((*AnnotateVideoResponse)(nil), "google.cloud.videointelligence.v1p2beta1.AnnotateVideoResponse") + proto.RegisterType((*VideoAnnotationProgress)(nil), "google.cloud.videointelligence.v1p2beta1.VideoAnnotationProgress") + proto.RegisterType((*AnnotateVideoProgress)(nil), "google.cloud.videointelligence.v1p2beta1.AnnotateVideoProgress") + proto.RegisterType((*NormalizedVertex)(nil), "google.cloud.videointelligence.v1p2beta1.NormalizedVertex") + proto.RegisterType((*NormalizedBoundingPoly)(nil), "google.cloud.videointelligence.v1p2beta1.NormalizedBoundingPoly") + proto.RegisterType((*TextSegment)(nil), "google.cloud.videointelligence.v1p2beta1.TextSegment") + proto.RegisterType((*TextFrame)(nil), "google.cloud.videointelligence.v1p2beta1.TextFrame") + proto.RegisterType((*TextAnnotation)(nil), "google.cloud.videointelligence.v1p2beta1.TextAnnotation") + proto.RegisterType((*ObjectTrackingFrame)(nil), "google.cloud.videointelligence.v1p2beta1.ObjectTrackingFrame") + proto.RegisterType((*ObjectTrackingAnnotation)(nil), "google.cloud.videointelligence.v1p2beta1.ObjectTrackingAnnotation") + proto.RegisterEnum("google.cloud.videointelligence.v1p2beta1.Feature", Feature_name, Feature_value) + proto.RegisterEnum("google.cloud.videointelligence.v1p2beta1.LabelDetectionMode", LabelDetectionMode_name, LabelDetectionMode_value) + proto.RegisterEnum("google.cloud.videointelligence.v1p2beta1.Likelihood", Likelihood_name, Likelihood_value) +} + +// Reference imports to suppress errors if they are not otherwise used. +var _ context.Context +var _ grpc.ClientConn + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the grpc package it is being compiled against. +const _ = grpc.SupportPackageIsVersion4 + +// VideoIntelligenceServiceClient is the client API for VideoIntelligenceService service. +// +// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://godoc.org/google.golang.org/grpc#ClientConn.NewStream. +type VideoIntelligenceServiceClient interface { + // Performs asynchronous video annotation. Progress and results can be + // retrieved through the `google.longrunning.Operations` interface. + // `Operation.metadata` contains `AnnotateVideoProgress` (progress). + // `Operation.response` contains `AnnotateVideoResponse` (results). + AnnotateVideo(ctx context.Context, in *AnnotateVideoRequest, opts ...grpc.CallOption) (*longrunning.Operation, error) +} + +type videoIntelligenceServiceClient struct { + cc *grpc.ClientConn +} + +func NewVideoIntelligenceServiceClient(cc *grpc.ClientConn) VideoIntelligenceServiceClient { + return &videoIntelligenceServiceClient{cc} +} + +func (c *videoIntelligenceServiceClient) AnnotateVideo(ctx context.Context, in *AnnotateVideoRequest, opts ...grpc.CallOption) (*longrunning.Operation, error) { + out := new(longrunning.Operation) + err := c.cc.Invoke(ctx, "/google.cloud.videointelligence.v1p2beta1.VideoIntelligenceService/AnnotateVideo", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +// VideoIntelligenceServiceServer is the server API for VideoIntelligenceService service. +type VideoIntelligenceServiceServer interface { + // Performs asynchronous video annotation. Progress and results can be + // retrieved through the `google.longrunning.Operations` interface. + // `Operation.metadata` contains `AnnotateVideoProgress` (progress). + // `Operation.response` contains `AnnotateVideoResponse` (results). + AnnotateVideo(context.Context, *AnnotateVideoRequest) (*longrunning.Operation, error) +} + +func RegisterVideoIntelligenceServiceServer(s *grpc.Server, srv VideoIntelligenceServiceServer) { + s.RegisterService(&_VideoIntelligenceService_serviceDesc, srv) +} + +func _VideoIntelligenceService_AnnotateVideo_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(AnnotateVideoRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(VideoIntelligenceServiceServer).AnnotateVideo(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.videointelligence.v1p2beta1.VideoIntelligenceService/AnnotateVideo", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(VideoIntelligenceServiceServer).AnnotateVideo(ctx, req.(*AnnotateVideoRequest)) + } + return interceptor(ctx, in, info, handler) +} + +var _VideoIntelligenceService_serviceDesc = grpc.ServiceDesc{ + ServiceName: "google.cloud.videointelligence.v1p2beta1.VideoIntelligenceService", + HandlerType: (*VideoIntelligenceServiceServer)(nil), + Methods: []grpc.MethodDesc{ + { + MethodName: "AnnotateVideo", + Handler: _VideoIntelligenceService_AnnotateVideo_Handler, + }, + }, + Streams: []grpc.StreamDesc{}, + Metadata: "google/cloud/videointelligence/v1p2beta1/video_intelligence.proto", +} + +func init() { + proto.RegisterFile("google/cloud/videointelligence/v1p2beta1/video_intelligence.proto", fileDescriptor_video_intelligence_2747a56faf436e7b) +} + +var fileDescriptor_video_intelligence_2747a56faf436e7b = []byte{ + // 1882 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xc4, 0x58, 0xcb, 0x6f, 0x23, 0x49, + 0x19, 0xa7, 0xed, 0x24, 0x13, 0x7f, 0x79, 0x39, 0x95, 0x97, 0x13, 0x76, 0x32, 0xa1, 0x97, 0x95, + 0xc2, 0x80, 0x6c, 0x92, 0x85, 0x15, 0x3b, 0xfb, 0x60, 0x6d, 0xa7, 0x33, 0x31, 0x93, 0xb1, 0x4d, + 0xc5, 0x89, 0x76, 0x60, 0x50, 0xab, 0xd3, 0x5d, 0xe9, 0x34, 0xd3, 0xee, 0xea, 0xe9, 0x2e, 0x67, + 0x13, 0x4e, 0x2b, 0xc4, 0x01, 0x89, 0xe3, 0x0a, 0x09, 0x89, 0xff, 0x00, 0x89, 0x0b, 0x57, 0x0e, + 0x5c, 0xb8, 0xb0, 0xe2, 0xc6, 0x09, 0x89, 0x23, 0x07, 0x24, 0xfe, 0x09, 0xd4, 0x55, 0xd5, 0xed, + 0xb6, 0x3b, 0x99, 0xd8, 0xc9, 0x4a, 0xdc, 0xba, 0xbe, 0xaf, 0xea, 0xf7, 0x3d, 0xea, 0x7b, 0x75, + 0x41, 0xd5, 0xa6, 0xd4, 0x76, 0x49, 0xc5, 0x74, 0x69, 0xcf, 0xaa, 0x5c, 0x38, 0x16, 0xa1, 0x8e, + 0xc7, 0x88, 0xeb, 0x3a, 0x36, 0xf1, 0x4c, 0x52, 0xb9, 0xd8, 0xf1, 0x77, 0x4f, 0x09, 0x33, 0x76, + 0x04, 0x4f, 0x4f, 0x33, 0xcb, 0x7e, 0x40, 0x19, 0x45, 0xdb, 0x02, 0xa2, 0xcc, 0x21, 0xca, 0x19, + 0x88, 0x72, 0x02, 0xb1, 0xf1, 0x96, 0x14, 0x66, 0xf8, 0x4e, 0xc5, 0xf0, 0x3c, 0xca, 0x0c, 0xe6, + 0x50, 0x2f, 0x14, 0x38, 0x1b, 0x6f, 0x4b, 0xae, 0x4b, 0x3d, 0x3b, 0xe8, 0x79, 0x9e, 0xe3, 0xd9, + 0x15, 0xea, 0x93, 0x60, 0x60, 0xd3, 0xa6, 0xdc, 0xc4, 0x57, 0xa7, 0xbd, 0xb3, 0x8a, 0xd5, 0x13, + 0x1b, 0x24, 0xff, 0xd1, 0x30, 0x9f, 0x39, 0x5d, 0x12, 0x32, 0xa3, 0xeb, 0xcb, 0x0d, 0x6b, 0x72, + 0x43, 0xe0, 0x9b, 0x95, 0x90, 0x19, 0xac, 0x27, 0x91, 0xd5, 0xbf, 0xe6, 0x60, 0xb9, 0x2a, 0x94, + 0x22, 0x27, 0x91, 0x11, 0x98, 0xbc, 0xee, 0x91, 0x90, 0xa1, 0xaf, 0x43, 0xc1, 0xf1, 0xfc, 0x1e, + 0xd3, 0x7b, 0x81, 0x53, 0x52, 0xb6, 0x94, 0xed, 0x02, 0x9e, 0xe6, 0x84, 0xe3, 0xc0, 0x41, 0x6f, + 0xc3, 0x9c, 0x60, 0x9a, 0xd4, 0x63, 0xc4, 0x63, 0xa5, 0xa9, 0x2d, 0x65, 0x7b, 0x16, 0xcf, 0x72, + 0x62, 0x5d, 0xd0, 0xd0, 0x73, 0x98, 0x3e, 0x23, 0x06, 0xeb, 0x05, 0x24, 0x2c, 0xe5, 0xb6, 0xf2, + 0xdb, 0xf3, 0xbb, 0x3b, 0xe5, 0x51, 0x9d, 0x56, 0xde, 0x17, 0x27, 0x71, 0x02, 0x81, 0x7e, 0x0a, + 0x73, 0xe2, 0x32, 0xb8, 0xcc, 0x4b, 0x56, 0xca, 0x6f, 0x29, 0xdb, 0x33, 0xbb, 0xef, 0x8d, 0x8e, + 0xc9, 0xed, 0xab, 0x8b, 0xd3, 0x78, 0xf6, 0x22, 0xb5, 0x42, 0x0f, 0x01, 0x68, 0x8f, 0xc5, 0xe6, + 0x4e, 0x70, 0x73, 0x0b, 0x82, 0x12, 0xd9, 0xfb, 0x08, 0x66, 0x5c, 0x6a, 0x72, 0x8f, 0xeb, 0x8e, + 0x55, 0x9a, 0xe4, 0x7c, 0x88, 0x49, 0x0d, 0x4b, 0xfd, 0xcf, 0x04, 0xcc, 0xa6, 0xe1, 0x11, 0x86, + 0xe9, 0x90, 0xd8, 0x5d, 0xe2, 0xb1, 0xb0, 0xa4, 0x6c, 0xe5, 0xef, 0xa0, 0xe8, 0x91, 0x38, 0x8e, + 0x13, 0x1c, 0xc4, 0x60, 0xd5, 0x35, 0x4e, 0x89, 0xab, 0x5b, 0x84, 0x11, 0x93, 0x2b, 0x63, 0x52, + 0xef, 0xcc, 0xb1, 0x4b, 0x39, 0xee, 0x8a, 0x8f, 0x47, 0x97, 0x70, 0x18, 0xe1, 0xec, 0xc5, 0x30, + 0x75, 0x8e, 0x82, 0x97, 0xdd, 0x6b, 0xa8, 0xe8, 0x57, 0x0a, 0xbc, 0x15, 0x9e, 0x53, 0xa6, 0x9b, + 0xe7, 0x86, 0x67, 0x93, 0xac, 0x70, 0x71, 0x0f, 0xf5, 0xd1, 0x85, 0x1f, 0x9d, 0x53, 0x56, 0xe7, + 0x60, 0xc3, 0x1a, 0xac, 0x87, 0x37, 0xb1, 0xd0, 0x17, 0x0a, 0x7c, 0x83, 0x5c, 0xfa, 0xae, 0x63, + 0x3a, 0x49, 0xd8, 0x65, 0x75, 0x99, 0xe0, 0xba, 0x1c, 0x8c, 0xae, 0x8b, 0x26, 0x21, 0x65, 0xd0, + 0x0e, 0x2b, 0xb4, 0x49, 0xde, 0xc8, 0x47, 0xaf, 0x61, 0x25, 0xba, 0xee, 0xac, 0x22, 0xd3, 0x5c, + 0x91, 0x8f, 0x46, 0x57, 0xa4, 0x43, 0x2e, 0x33, 0xd2, 0x97, 0x58, 0x96, 0xa8, 0xfe, 0x4d, 0x81, + 0xe5, 0xeb, 0xae, 0x0f, 0x79, 0xb0, 0x3c, 0x1c, 0x1e, 0x5d, 0x6a, 0x11, 0x9e, 0xbc, 0xf3, 0xbb, + 0x1f, 0xde, 0x35, 0x38, 0x9e, 0x53, 0x8b, 0x60, 0xe4, 0x66, 0x68, 0xe8, 0xdb, 0xb0, 0x18, 0x8a, + 0x5a, 0x66, 0x04, 0x57, 0xba, 0x69, 0x74, 0x49, 0x60, 0xf0, 0x48, 0x9c, 0xc6, 0xc5, 0x3e, 0xa3, + 0xce, 0xe9, 0x68, 0x19, 0x26, 0x23, 0x65, 0x5c, 0x1e, 0x2d, 0x05, 0x2c, 0x16, 0xea, 0x0e, 0xac, + 0xdf, 0x18, 0x0c, 0xfd, 0x23, 0x4a, 0xfa, 0xc8, 0x7b, 0xb0, 0xf9, 0xe6, 0x3b, 0xbb, 0xe1, 0xdc, + 0x87, 0xb0, 0x74, 0x8d, 0x8b, 0xd1, 0x3b, 0x30, 0xef, 0x1a, 0x9e, 0xdd, 0x33, 0x6c, 0xa2, 0x9f, + 0x3b, 0x71, 0xb6, 0x16, 0xf0, 0x5c, 0x4c, 0x3d, 0x88, 0x88, 0xea, 0xef, 0x14, 0x99, 0xdf, 0x32, + 0x2b, 0x91, 0xc6, 0x8d, 0x0f, 0x98, 0x1e, 0x55, 0x5a, 0x9d, 0x9e, 0x9d, 0x85, 0x84, 0x71, 0x81, + 0x33, 0xbb, 0xeb, 0xb1, 0xa7, 0xe3, 0x6a, 0x5c, 0xde, 0x93, 0xd5, 0x1a, 0x2f, 0xf0, 0x33, 0x1d, + 0xa7, 0x4b, 0x5a, 0xfc, 0x04, 0xaa, 0xc2, 0x02, 0xf1, 0xac, 0x01, 0x90, 0xdc, 0x6d, 0x20, 0x73, + 0xc4, 0xb3, 0xfa, 0x10, 0xea, 0xe7, 0x0a, 0xcc, 0xf2, 0x1b, 0x8b, 0x55, 0x6b, 0xc3, 0x03, 0x59, + 0x32, 0xa4, 0x42, 0x77, 0xad, 0x3c, 0x31, 0x0c, 0xda, 0x04, 0xe0, 0x61, 0x6d, 0x45, 0xbb, 0xb9, + 0x82, 0x39, 0x9c, 0xa2, 0xa8, 0xe7, 0x00, 0x5c, 0x83, 0xfd, 0xc0, 0xe8, 0x12, 0xf4, 0x04, 0x66, + 0xc6, 0x72, 0x0a, 0xb0, 0xbe, 0x3f, 0x6e, 0x93, 0xe4, 0xc2, 0x94, 0xe6, 0x31, 0x87, 0x5d, 0x45, + 0xfd, 0x89, 0xf0, 0xaf, 0xa8, 0x20, 0xcb, 0xfe, 0x24, 0x08, 0x0d, 0x0b, 0x6d, 0xc1, 0x8c, 0x45, + 0x42, 0x33, 0x70, 0xfc, 0x48, 0x02, 0xc7, 0x29, 0xe0, 0x34, 0x29, 0xea, 0x60, 0xc9, 0xbd, 0x9b, + 0x51, 0x96, 0x88, 0xb8, 0x9c, 0x8d, 0x89, 0x75, 0x6a, 0x11, 0xf5, 0x5f, 0x39, 0x58, 0xe0, 0x86, + 0x55, 0x93, 0xb6, 0x8d, 0x0e, 0x60, 0x4a, 0x88, 0x91, 0x86, 0x7d, 0x77, 0x8c, 0x5a, 0xc3, 0xcf, + 0x61, 0x79, 0x1e, 0xfd, 0x0c, 0x16, 0x4d, 0x83, 0x11, 0x9b, 0x06, 0x57, 0x3a, 0x27, 0x39, 0xb2, + 0x51, 0xde, 0x05, 0xb4, 0x18, 0x43, 0x69, 0x12, 0x69, 0xa0, 0x03, 0xe5, 0xc7, 0xed, 0x40, 0xe9, + 0x80, 0x4a, 0x75, 0xa0, 0x43, 0x98, 0x3a, 0x8b, 0xee, 0x38, 0x2c, 0x4d, 0x70, 0xc4, 0xef, 0x8d, + 0x89, 0xc8, 0x03, 0x04, 0x4b, 0x0c, 0xf5, 0x2f, 0x0a, 0x2c, 0x0f, 0xe5, 0xf2, 0xfd, 0x23, 0xe8, + 0x15, 0xac, 0xfa, 0x34, 0xf0, 0xa8, 0x1d, 0x18, 0xfe, 0xf9, 0x95, 0xee, 0x3a, 0xaf, 0x88, 0xeb, + 0x9c, 0x53, 0x6a, 0xf1, 0x28, 0x98, 0x1f, 0x4b, 0xe5, 0xe4, 0x2c, 0x5e, 0x49, 0x61, 0xf6, 0xc9, + 0x6a, 0x08, 0xeb, 0x43, 0x06, 0xa4, 0x22, 0xe5, 0x24, 0x71, 0x96, 0x18, 0x00, 0x3e, 0xbe, 0x73, + 0x57, 0x1a, 0x74, 0xdb, 0x2b, 0x58, 0x69, 0xd2, 0xa0, 0x6b, 0xb8, 0xce, 0x2f, 0x88, 0x55, 0xa3, + 0x3d, 0xcf, 0x72, 0x3c, 0xbb, 0x46, 0x2f, 0x11, 0x82, 0x09, 0x97, 0x9c, 0x09, 0x7f, 0xe5, 0x30, + 0xff, 0x46, 0x45, 0xc8, 0x33, 0xea, 0xcb, 0x4c, 0x8a, 0x3e, 0xa3, 0xf2, 0x18, 0x38, 0xf6, 0xb9, + 0x98, 0x9f, 0x72, 0x58, 0x2c, 0xd0, 0x2a, 0x4c, 0x9d, 0x52, 0xc6, 0x68, 0x97, 0xb7, 0xd0, 0x1c, + 0x96, 0x2b, 0xf5, 0xbf, 0x53, 0xb0, 0xca, 0x8b, 0x42, 0xdf, 0x30, 0x4c, 0xc2, 0x9e, 0xcb, 0xc2, + 0x37, 0x4f, 0x88, 0x3d, 0x58, 0x97, 0x51, 0xa3, 0x8b, 0xa6, 0x94, 0x9a, 0x7c, 0x65, 0x90, 0xbf, + 0x3f, 0x66, 0xf0, 0xa4, 0x34, 0x58, 0x93, 0xd8, 0x43, 0xf4, 0x10, 0x51, 0x58, 0xe5, 0xb3, 0x4a, + 0x56, 0x66, 0xfe, 0xbe, 0x32, 0x97, 0x23, 0xe0, 0x8c, 0xc0, 0xd7, 0xb0, 0xc6, 0xaf, 0xe5, 0x1a, + 0x89, 0x13, 0xf7, 0x95, 0xb8, 0xc2, 0x91, 0x33, 0x22, 0x0d, 0x28, 0x72, 0x1b, 0xd3, 0xb2, 0xa6, + 0xee, 0x35, 0x62, 0x2e, 0x44, 0x78, 0x69, 0x11, 0x0c, 0x96, 0x92, 0x59, 0xab, 0x2f, 0xa6, 0xf4, + 0x60, 0xdc, 0x49, 0xef, 0xc6, 0xe4, 0xc0, 0x28, 0xc6, 0x4f, 0x25, 0x8c, 0x09, 0x45, 0x3e, 0x4c, + 0xa5, 0x0d, 0x9b, 0xe5, 0x86, 0xfd, 0x60, 0xbc, 0x39, 0x2a, 0x25, 0x67, 0x81, 0x0d, 0xac, 0xa3, + 0x0b, 0x43, 0xf4, 0xf4, 0xe7, 0xc4, 0x1c, 0x14, 0x33, 0xcf, 0xc5, 0xd4, 0x46, 0x17, 0xd3, 0xe2, + 0x18, 0x9d, 0xc0, 0x30, 0x5f, 0x39, 0x9e, 0x9d, 0x12, 0xb8, 0x28, 0xd0, 0xd3, 0x22, 0xb7, 0x61, + 0x92, 0x04, 0x01, 0x0d, 0x4a, 0x05, 0xee, 0x3f, 0x14, 0x4b, 0x09, 0x7c, 0xb3, 0x7c, 0xc4, 0x7f, + 0xc6, 0xb0, 0xd8, 0xa0, 0xfe, 0x5a, 0x81, 0x95, 0xa1, 0xbf, 0xb1, 0xd0, 0xa7, 0x5e, 0x48, 0x10, + 0x05, 0xd4, 0xd7, 0x57, 0x0f, 0x44, 0x0a, 0xca, 0xc2, 0xf2, 0xc9, 0x98, 0xd7, 0x9e, 0x49, 0x65, + 0xbc, 0x68, 0x0c, 0x93, 0xd4, 0x7f, 0x2a, 0xb0, 0x36, 0xb4, 0xbb, 0x1d, 0x50, 0x3b, 0x20, 0xe1, + 0x2d, 0x99, 0xff, 0x2d, 0x28, 0xfa, 0x72, 0xa3, 0xee, 0x93, 0xc0, 0x8c, 0xe6, 0x90, 0xa8, 0xfc, + 0x4c, 0xe2, 0x85, 0x98, 0xde, 0x16, 0x64, 0xf4, 0x3e, 0x40, 0x7f, 0x88, 0x92, 0xff, 0x11, 0x1b, + 0x99, 0x32, 0xdf, 0x89, 0xff, 0x65, 0x71, 0x21, 0x19, 0x9f, 0xd0, 0x07, 0x30, 0xd3, 0xf3, 0x2d, + 0x83, 0x11, 0x71, 0x76, 0xe2, 0xd6, 0xb3, 0x20, 0xb6, 0x47, 0x04, 0xf5, 0x37, 0xc3, 0x6e, 0x4e, + 0x2c, 0x0b, 0x60, 0x29, 0xe5, 0xe6, 0x58, 0x5f, 0xe9, 0xe7, 0xea, 0x9d, 0xfd, 0x1c, 0xe3, 0xe3, + 0xd4, 0x25, 0xc6, 0x34, 0xb5, 0x0c, 0xc5, 0x7e, 0x3d, 0x3f, 0x21, 0x01, 0x23, 0x97, 0x68, 0x16, + 0x94, 0x4b, 0x59, 0xc7, 0x15, 0xbe, 0xba, 0x92, 0x25, 0x5c, 0xb9, 0x52, 0x7d, 0x58, 0xcd, 0xd6, + 0xff, 0x36, 0x75, 0xaf, 0xd0, 0x09, 0x4c, 0x5f, 0x90, 0x80, 0x39, 0x66, 0xd2, 0x73, 0x9e, 0x8c, + 0xae, 0xf2, 0xb0, 0x0e, 0x38, 0xc1, 0x52, 0xff, 0xae, 0xc0, 0x4c, 0x94, 0x57, 0xff, 0xb7, 0x09, + 0x13, 0x3d, 0x4b, 0x7a, 0xa9, 0xa8, 0xe3, 0xef, 0x8e, 0x57, 0x10, 0x06, 0x1b, 0xe8, 0x9f, 0x15, + 0x28, 0x24, 0x54, 0x14, 0xc0, 0x72, 0xc0, 0x23, 0xc1, 0xd2, 0x4f, 0xa5, 0x33, 0xf5, 0x53, 0x7a, + 0x29, 0x2d, 0xfb, 0xe4, 0x2e, 0x0e, 0x4c, 0x5f, 0x0a, 0x46, 0x12, 0x3d, 0xdd, 0xa9, 0x87, 0x06, + 0x9c, 0xdc, 0x18, 0x03, 0x8e, 0xfa, 0x19, 0xcc, 0x0f, 0xd6, 0xb8, 0xa8, 0xef, 0xf3, 0x07, 0x11, + 0x91, 0x89, 0xfc, 0x1b, 0xfd, 0x38, 0x35, 0xfd, 0x89, 0x76, 0xfb, 0xfd, 0xf1, 0x5c, 0x96, 0x19, + 0xfe, 0xd4, 0x2f, 0x15, 0x58, 0x1a, 0x2c, 0x7b, 0xc2, 0x81, 0x9f, 0xc1, 0x9a, 0x97, 0x98, 0x7e, + 0x9d, 0x0f, 0x7f, 0x78, 0x1f, 0x1f, 0xd6, 0xe8, 0x25, 0x5e, 0xf1, 0xae, 0x9d, 0x77, 0xee, 0xe3, + 0xc5, 0x3f, 0xe6, 0xa0, 0x74, 0x53, 0x0d, 0xff, 0x0a, 0x67, 0xfc, 0xc1, 0xb8, 0x9e, 0xc8, 0xc4, + 0xf5, 0x71, 0x12, 0xd7, 0xe2, 0x92, 0x3e, 0xba, 0x6b, 0x07, 0x1a, 0x88, 0xf0, 0x74, 0x82, 0xe6, + 0xbf, 0x92, 0x04, 0x7d, 0xfc, 0x7b, 0x05, 0x1e, 0xc8, 0x37, 0x39, 0xb4, 0x06, 0x4b, 0xfb, 0x5a, + 0xb5, 0x73, 0x8c, 0x35, 0xfd, 0xb8, 0x79, 0xd4, 0xd6, 0xea, 0x8d, 0xfd, 0x86, 0xb6, 0x57, 0xfc, + 0x1a, 0x5a, 0x82, 0x85, 0xc3, 0x6a, 0x4d, 0x3b, 0xd4, 0xf7, 0xb4, 0x8e, 0x56, 0xef, 0x34, 0x5a, + 0xcd, 0xa2, 0x82, 0xd6, 0x61, 0xe5, 0xe8, 0xa0, 0xd5, 0xd1, 0xeb, 0x07, 0xd5, 0xe6, 0x53, 0x2d, + 0xc5, 0xca, 0xa1, 0x4d, 0xd8, 0xd0, 0x3e, 0x6d, 0x1f, 0x36, 0xea, 0x8d, 0x8e, 0x5e, 0x6f, 0x35, + 0x3b, 0x5a, 0xb3, 0x93, 0xe2, 0xe7, 0x11, 0x82, 0xf9, 0x8e, 0xf6, 0x69, 0x9a, 0xf6, 0x20, 0x92, + 0xd1, 0xaa, 0xfd, 0x48, 0xab, 0x77, 0xf4, 0x0e, 0xae, 0xd6, 0x9f, 0x35, 0x9a, 0x4f, 0x8b, 0x85, + 0xc7, 0x01, 0xa0, 0xec, 0xa3, 0x05, 0xfa, 0x26, 0x6c, 0x0d, 0xa9, 0xa3, 0x3f, 0x6f, 0xed, 0x0d, + 0x2b, 0x3d, 0x07, 0x05, 0xae, 0x5f, 0xc4, 0x2a, 0x2a, 0x68, 0x1e, 0x60, 0x1f, 0x57, 0x9f, 0x6b, + 0x62, 0x9d, 0x8b, 0x8c, 0xe5, 0xec, 0x6a, 0x73, 0x4f, 0x4f, 0x31, 0xf2, 0x8f, 0x19, 0x40, 0xff, + 0x4f, 0x00, 0x6d, 0xc0, 0xea, 0x61, 0xe3, 0x99, 0x76, 0xd8, 0x38, 0x68, 0xb5, 0xf6, 0x86, 0x24, + 0x2c, 0xc2, 0xdc, 0x89, 0x86, 0x5f, 0xe8, 0xc7, 0x4d, 0xbe, 0xe5, 0x45, 0x51, 0x41, 0xb3, 0x30, + 0x9d, 0xac, 0x72, 0xd1, 0xaa, 0xdd, 0x3a, 0x3a, 0x6a, 0xd4, 0x0e, 0xb5, 0x62, 0x1e, 0x01, 0x4c, + 0x49, 0xce, 0x04, 0x5a, 0x80, 0x19, 0x7e, 0x54, 0x12, 0x26, 0x77, 0xff, 0xa4, 0x40, 0x89, 0xdf, + 0x50, 0x23, 0x75, 0x7b, 0x47, 0x24, 0xb8, 0x70, 0x4c, 0x82, 0x7e, 0xab, 0xc0, 0xdc, 0x40, 0x5f, + 0x43, 0x63, 0xfc, 0x73, 0x5c, 0xf7, 0x0a, 0xbc, 0xf1, 0x30, 0x3e, 0x9f, 0x7a, 0x9e, 0x2e, 0xb7, + 0xe2, 0xe7, 0x69, 0xf5, 0x9d, 0x5f, 0xfe, 0xe3, 0xdf, 0x5f, 0xe4, 0x1e, 0xa9, 0x1b, 0xc3, 0x2f, + 0xe6, 0xe1, 0x13, 0xd9, 0xe8, 0xc8, 0x13, 0xe5, 0x71, 0xed, 0xf3, 0x1c, 0x7c, 0xc7, 0xa4, 0xdd, + 0x91, 0x75, 0xa9, 0x3d, 0xbc, 0xc9, 0xc4, 0x76, 0x94, 0xd4, 0x6d, 0xe5, 0x27, 0x2f, 0x24, 0x94, + 0x4d, 0xa3, 0x1f, 0xf6, 0x32, 0x0d, 0xec, 0x8a, 0x4d, 0x3c, 0x9e, 0xf2, 0x15, 0xc1, 0x32, 0x7c, + 0x27, 0xbc, 0xfd, 0x7d, 0xff, 0x83, 0x0c, 0xef, 0x0f, 0xb9, 0xed, 0xa7, 0x02, 0xbb, 0xce, 0xd5, + 0xcc, 0x68, 0x52, 0x3e, 0xd9, 0x69, 0xef, 0xd6, 0xa2, 0xc3, 0x5f, 0xc6, 0x5b, 0x5f, 0xf2, 0xad, + 0x2f, 0x33, 0x5b, 0x5f, 0x9e, 0xc4, 0x72, 0x4e, 0xa7, 0xb8, 0x6e, 0xef, 0xfe, 0x2f, 0x00, 0x00, + 0xff, 0xff, 0x28, 0x2e, 0x0c, 0x2f, 0x7a, 0x18, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/cloud/videointelligence/v1p3beta1/video_intelligence.pb.go b/vendor/google.golang.org/genproto/googleapis/cloud/videointelligence/v1p3beta1/video_intelligence.pb.go new file mode 100644 index 0000000..1f903b0 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/cloud/videointelligence/v1p3beta1/video_intelligence.pb.go @@ -0,0 +1,2852 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/cloud/videointelligence/v1p3beta1/video_intelligence.proto + +package videointelligence // import "google.golang.org/genproto/googleapis/cloud/videointelligence/v1p3beta1" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import duration "github.com/golang/protobuf/ptypes/duration" +import timestamp "github.com/golang/protobuf/ptypes/timestamp" +import _ "google.golang.org/genproto/googleapis/api/annotations" +import longrunning "google.golang.org/genproto/googleapis/longrunning" +import status "google.golang.org/genproto/googleapis/rpc/status" + +import ( + context "golang.org/x/net/context" + grpc "google.golang.org/grpc" +) + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Video annotation feature. +type Feature int32 + +const ( + // Unspecified. + Feature_FEATURE_UNSPECIFIED Feature = 0 + // Label detection. Detect objects, such as dog or flower. + Feature_LABEL_DETECTION Feature = 1 + // Shot change detection. + Feature_SHOT_CHANGE_DETECTION Feature = 2 + // Explicit content detection. + Feature_EXPLICIT_CONTENT_DETECTION Feature = 3 + // OCR text detection and tracking. + Feature_TEXT_DETECTION Feature = 7 + // Object detection and tracking. + Feature_OBJECT_TRACKING Feature = 9 +) + +var Feature_name = map[int32]string{ + 0: "FEATURE_UNSPECIFIED", + 1: "LABEL_DETECTION", + 2: "SHOT_CHANGE_DETECTION", + 3: "EXPLICIT_CONTENT_DETECTION", + 7: "TEXT_DETECTION", + 9: "OBJECT_TRACKING", +} +var Feature_value = map[string]int32{ + "FEATURE_UNSPECIFIED": 0, + "LABEL_DETECTION": 1, + "SHOT_CHANGE_DETECTION": 2, + "EXPLICIT_CONTENT_DETECTION": 3, + "TEXT_DETECTION": 7, + "OBJECT_TRACKING": 9, +} + +func (x Feature) String() string { + return proto.EnumName(Feature_name, int32(x)) +} +func (Feature) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_video_intelligence_26bfd7f2669c7d48, []int{0} +} + +// Label detection mode. +type LabelDetectionMode int32 + +const ( + // Unspecified. + LabelDetectionMode_LABEL_DETECTION_MODE_UNSPECIFIED LabelDetectionMode = 0 + // Detect shot-level labels. + LabelDetectionMode_SHOT_MODE LabelDetectionMode = 1 + // Detect frame-level labels. + LabelDetectionMode_FRAME_MODE LabelDetectionMode = 2 + // Detect both shot-level and frame-level labels. + LabelDetectionMode_SHOT_AND_FRAME_MODE LabelDetectionMode = 3 +) + +var LabelDetectionMode_name = map[int32]string{ + 0: "LABEL_DETECTION_MODE_UNSPECIFIED", + 1: "SHOT_MODE", + 2: "FRAME_MODE", + 3: "SHOT_AND_FRAME_MODE", +} +var LabelDetectionMode_value = map[string]int32{ + "LABEL_DETECTION_MODE_UNSPECIFIED": 0, + "SHOT_MODE": 1, + "FRAME_MODE": 2, + "SHOT_AND_FRAME_MODE": 3, +} + +func (x LabelDetectionMode) String() string { + return proto.EnumName(LabelDetectionMode_name, int32(x)) +} +func (LabelDetectionMode) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_video_intelligence_26bfd7f2669c7d48, []int{1} +} + +// Bucketized representation of likelihood. +type Likelihood int32 + +const ( + // Unspecified likelihood. + Likelihood_LIKELIHOOD_UNSPECIFIED Likelihood = 0 + // Very unlikely. + Likelihood_VERY_UNLIKELY Likelihood = 1 + // Unlikely. + Likelihood_UNLIKELY Likelihood = 2 + // Possible. + Likelihood_POSSIBLE Likelihood = 3 + // Likely. + Likelihood_LIKELY Likelihood = 4 + // Very likely. + Likelihood_VERY_LIKELY Likelihood = 5 +) + +var Likelihood_name = map[int32]string{ + 0: "LIKELIHOOD_UNSPECIFIED", + 1: "VERY_UNLIKELY", + 2: "UNLIKELY", + 3: "POSSIBLE", + 4: "LIKELY", + 5: "VERY_LIKELY", +} +var Likelihood_value = map[string]int32{ + "LIKELIHOOD_UNSPECIFIED": 0, + "VERY_UNLIKELY": 1, + "UNLIKELY": 2, + "POSSIBLE": 3, + "LIKELY": 4, + "VERY_LIKELY": 5, +} + +func (x Likelihood) String() string { + return proto.EnumName(Likelihood_name, int32(x)) +} +func (Likelihood) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_video_intelligence_26bfd7f2669c7d48, []int{2} +} + +// Streaming video annotation feature. +type StreamingFeature int32 + +const ( + // Unspecified. + StreamingFeature_STREAMING_FEATURE_UNSPECIFIED StreamingFeature = 0 + // Label detection. Detect objects, such as dog or flower. + StreamingFeature_STREAMING_LABEL_DETECTION StreamingFeature = 1 + // Shot change detection. + StreamingFeature_STREAMING_SHOT_CHANGE_DETECTION StreamingFeature = 2 + // Explicit content detection. + StreamingFeature_STREAMING_EXPLICIT_CONTENT_DETECTION StreamingFeature = 3 + // Object detection and tracking. + StreamingFeature_STREAMING_OBJECT_TRACKING StreamingFeature = 4 +) + +var StreamingFeature_name = map[int32]string{ + 0: "STREAMING_FEATURE_UNSPECIFIED", + 1: "STREAMING_LABEL_DETECTION", + 2: "STREAMING_SHOT_CHANGE_DETECTION", + 3: "STREAMING_EXPLICIT_CONTENT_DETECTION", + 4: "STREAMING_OBJECT_TRACKING", +} +var StreamingFeature_value = map[string]int32{ + "STREAMING_FEATURE_UNSPECIFIED": 0, + "STREAMING_LABEL_DETECTION": 1, + "STREAMING_SHOT_CHANGE_DETECTION": 2, + "STREAMING_EXPLICIT_CONTENT_DETECTION": 3, + "STREAMING_OBJECT_TRACKING": 4, +} + +func (x StreamingFeature) String() string { + return proto.EnumName(StreamingFeature_name, int32(x)) +} +func (StreamingFeature) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_video_intelligence_26bfd7f2669c7d48, []int{3} +} + +// Video annotation request. +type AnnotateVideoRequest struct { + // Input video location. Currently, only + // [Google Cloud Storage](https://cloud.google.com/storage/) URIs are + // supported, which must be specified in the following format: + // `gs://bucket-id/object-id` (other URI formats return + // [google.rpc.Code.INVALID_ARGUMENT][google.rpc.Code.INVALID_ARGUMENT]). For + // more information, see [Request URIs](/storage/docs/reference-uris). A video + // URI may include wildcards in `object-id`, and thus identify multiple + // videos. Supported wildcards: '*' to match 0 or more characters; + // '?' to match 1 character. If unset, the input video should be embedded + // in the request as `input_content`. If set, `input_content` should be unset. + InputUri string `protobuf:"bytes,1,opt,name=input_uri,json=inputUri,proto3" json:"input_uri,omitempty"` + // The video data bytes. + // If unset, the input video(s) should be specified via `input_uri`. + // If set, `input_uri` should be unset. + InputContent []byte `protobuf:"bytes,6,opt,name=input_content,json=inputContent,proto3" json:"input_content,omitempty"` + // Requested video annotation features. + Features []Feature `protobuf:"varint,2,rep,packed,name=features,proto3,enum=google.cloud.videointelligence.v1p3beta1.Feature" json:"features,omitempty"` + // Additional video context and/or feature-specific parameters. + VideoContext *VideoContext `protobuf:"bytes,3,opt,name=video_context,json=videoContext,proto3" json:"video_context,omitempty"` + // Optional location where the output (in JSON format) should be stored. + // Currently, only [Google Cloud Storage](https://cloud.google.com/storage/) + // URIs are supported, which must be specified in the following format: + // `gs://bucket-id/object-id` (other URI formats return + // [google.rpc.Code.INVALID_ARGUMENT][google.rpc.Code.INVALID_ARGUMENT]). For + // more information, see [Request URIs](/storage/docs/reference-uris). + OutputUri string `protobuf:"bytes,4,opt,name=output_uri,json=outputUri,proto3" json:"output_uri,omitempty"` + // Optional cloud region where annotation should take place. Supported cloud + // regions: `us-east1`, `us-west1`, `europe-west1`, `asia-east1`. If no region + // is specified, a region will be determined based on video file location. + LocationId string `protobuf:"bytes,5,opt,name=location_id,json=locationId,proto3" json:"location_id,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *AnnotateVideoRequest) Reset() { *m = AnnotateVideoRequest{} } +func (m *AnnotateVideoRequest) String() string { return proto.CompactTextString(m) } +func (*AnnotateVideoRequest) ProtoMessage() {} +func (*AnnotateVideoRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_video_intelligence_26bfd7f2669c7d48, []int{0} +} +func (m *AnnotateVideoRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_AnnotateVideoRequest.Unmarshal(m, b) +} +func (m *AnnotateVideoRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_AnnotateVideoRequest.Marshal(b, m, deterministic) +} +func (dst *AnnotateVideoRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_AnnotateVideoRequest.Merge(dst, src) +} +func (m *AnnotateVideoRequest) XXX_Size() int { + return xxx_messageInfo_AnnotateVideoRequest.Size(m) +} +func (m *AnnotateVideoRequest) XXX_DiscardUnknown() { + xxx_messageInfo_AnnotateVideoRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_AnnotateVideoRequest proto.InternalMessageInfo + +func (m *AnnotateVideoRequest) GetInputUri() string { + if m != nil { + return m.InputUri + } + return "" +} + +func (m *AnnotateVideoRequest) GetInputContent() []byte { + if m != nil { + return m.InputContent + } + return nil +} + +func (m *AnnotateVideoRequest) GetFeatures() []Feature { + if m != nil { + return m.Features + } + return nil +} + +func (m *AnnotateVideoRequest) GetVideoContext() *VideoContext { + if m != nil { + return m.VideoContext + } + return nil +} + +func (m *AnnotateVideoRequest) GetOutputUri() string { + if m != nil { + return m.OutputUri + } + return "" +} + +func (m *AnnotateVideoRequest) GetLocationId() string { + if m != nil { + return m.LocationId + } + return "" +} + +// Video context and/or feature-specific parameters. +type VideoContext struct { + // Video segments to annotate. The segments may overlap and are not required + // to be contiguous or span the whole video. If unspecified, each video is + // treated as a single segment. + Segments []*VideoSegment `protobuf:"bytes,1,rep,name=segments,proto3" json:"segments,omitempty"` + // Config for LABEL_DETECTION. + LabelDetectionConfig *LabelDetectionConfig `protobuf:"bytes,2,opt,name=label_detection_config,json=labelDetectionConfig,proto3" json:"label_detection_config,omitempty"` + // Config for SHOT_CHANGE_DETECTION. + ShotChangeDetectionConfig *ShotChangeDetectionConfig `protobuf:"bytes,3,opt,name=shot_change_detection_config,json=shotChangeDetectionConfig,proto3" json:"shot_change_detection_config,omitempty"` + // Config for EXPLICIT_CONTENT_DETECTION. + ExplicitContentDetectionConfig *ExplicitContentDetectionConfig `protobuf:"bytes,4,opt,name=explicit_content_detection_config,json=explicitContentDetectionConfig,proto3" json:"explicit_content_detection_config,omitempty"` + // Config for TEXT_DETECTION. + TextDetectionConfig *TextDetectionConfig `protobuf:"bytes,8,opt,name=text_detection_config,json=textDetectionConfig,proto3" json:"text_detection_config,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *VideoContext) Reset() { *m = VideoContext{} } +func (m *VideoContext) String() string { return proto.CompactTextString(m) } +func (*VideoContext) ProtoMessage() {} +func (*VideoContext) Descriptor() ([]byte, []int) { + return fileDescriptor_video_intelligence_26bfd7f2669c7d48, []int{1} +} +func (m *VideoContext) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_VideoContext.Unmarshal(m, b) +} +func (m *VideoContext) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_VideoContext.Marshal(b, m, deterministic) +} +func (dst *VideoContext) XXX_Merge(src proto.Message) { + xxx_messageInfo_VideoContext.Merge(dst, src) +} +func (m *VideoContext) XXX_Size() int { + return xxx_messageInfo_VideoContext.Size(m) +} +func (m *VideoContext) XXX_DiscardUnknown() { + xxx_messageInfo_VideoContext.DiscardUnknown(m) +} + +var xxx_messageInfo_VideoContext proto.InternalMessageInfo + +func (m *VideoContext) GetSegments() []*VideoSegment { + if m != nil { + return m.Segments + } + return nil +} + +func (m *VideoContext) GetLabelDetectionConfig() *LabelDetectionConfig { + if m != nil { + return m.LabelDetectionConfig + } + return nil +} + +func (m *VideoContext) GetShotChangeDetectionConfig() *ShotChangeDetectionConfig { + if m != nil { + return m.ShotChangeDetectionConfig + } + return nil +} + +func (m *VideoContext) GetExplicitContentDetectionConfig() *ExplicitContentDetectionConfig { + if m != nil { + return m.ExplicitContentDetectionConfig + } + return nil +} + +func (m *VideoContext) GetTextDetectionConfig() *TextDetectionConfig { + if m != nil { + return m.TextDetectionConfig + } + return nil +} + +// Config for LABEL_DETECTION. +type LabelDetectionConfig struct { + // What labels should be detected with LABEL_DETECTION, in addition to + // video-level labels or segment-level labels. + // If unspecified, defaults to `SHOT_MODE`. + LabelDetectionMode LabelDetectionMode `protobuf:"varint,1,opt,name=label_detection_mode,json=labelDetectionMode,proto3,enum=google.cloud.videointelligence.v1p3beta1.LabelDetectionMode" json:"label_detection_mode,omitempty"` + // Whether the video has been shot from a stationary (i.e. non-moving) camera. + // When set to true, might improve detection accuracy for moving objects. + // Should be used with `SHOT_AND_FRAME_MODE` enabled. + StationaryCamera bool `protobuf:"varint,2,opt,name=stationary_camera,json=stationaryCamera,proto3" json:"stationary_camera,omitempty"` + // Model to use for label detection. + // Supported values: "builtin/stable" (the default if unset) and + // "builtin/latest". + Model string `protobuf:"bytes,3,opt,name=model,proto3" json:"model,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *LabelDetectionConfig) Reset() { *m = LabelDetectionConfig{} } +func (m *LabelDetectionConfig) String() string { return proto.CompactTextString(m) } +func (*LabelDetectionConfig) ProtoMessage() {} +func (*LabelDetectionConfig) Descriptor() ([]byte, []int) { + return fileDescriptor_video_intelligence_26bfd7f2669c7d48, []int{2} +} +func (m *LabelDetectionConfig) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_LabelDetectionConfig.Unmarshal(m, b) +} +func (m *LabelDetectionConfig) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_LabelDetectionConfig.Marshal(b, m, deterministic) +} +func (dst *LabelDetectionConfig) XXX_Merge(src proto.Message) { + xxx_messageInfo_LabelDetectionConfig.Merge(dst, src) +} +func (m *LabelDetectionConfig) XXX_Size() int { + return xxx_messageInfo_LabelDetectionConfig.Size(m) +} +func (m *LabelDetectionConfig) XXX_DiscardUnknown() { + xxx_messageInfo_LabelDetectionConfig.DiscardUnknown(m) +} + +var xxx_messageInfo_LabelDetectionConfig proto.InternalMessageInfo + +func (m *LabelDetectionConfig) GetLabelDetectionMode() LabelDetectionMode { + if m != nil { + return m.LabelDetectionMode + } + return LabelDetectionMode_LABEL_DETECTION_MODE_UNSPECIFIED +} + +func (m *LabelDetectionConfig) GetStationaryCamera() bool { + if m != nil { + return m.StationaryCamera + } + return false +} + +func (m *LabelDetectionConfig) GetModel() string { + if m != nil { + return m.Model + } + return "" +} + +// Config for SHOT_CHANGE_DETECTION. +type ShotChangeDetectionConfig struct { + // Model to use for shot change detection. + // Supported values: "builtin/stable" (the default if unset) and + // "builtin/latest". + Model string `protobuf:"bytes,1,opt,name=model,proto3" json:"model,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ShotChangeDetectionConfig) Reset() { *m = ShotChangeDetectionConfig{} } +func (m *ShotChangeDetectionConfig) String() string { return proto.CompactTextString(m) } +func (*ShotChangeDetectionConfig) ProtoMessage() {} +func (*ShotChangeDetectionConfig) Descriptor() ([]byte, []int) { + return fileDescriptor_video_intelligence_26bfd7f2669c7d48, []int{3} +} +func (m *ShotChangeDetectionConfig) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ShotChangeDetectionConfig.Unmarshal(m, b) +} +func (m *ShotChangeDetectionConfig) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ShotChangeDetectionConfig.Marshal(b, m, deterministic) +} +func (dst *ShotChangeDetectionConfig) XXX_Merge(src proto.Message) { + xxx_messageInfo_ShotChangeDetectionConfig.Merge(dst, src) +} +func (m *ShotChangeDetectionConfig) XXX_Size() int { + return xxx_messageInfo_ShotChangeDetectionConfig.Size(m) +} +func (m *ShotChangeDetectionConfig) XXX_DiscardUnknown() { + xxx_messageInfo_ShotChangeDetectionConfig.DiscardUnknown(m) +} + +var xxx_messageInfo_ShotChangeDetectionConfig proto.InternalMessageInfo + +func (m *ShotChangeDetectionConfig) GetModel() string { + if m != nil { + return m.Model + } + return "" +} + +// Config for EXPLICIT_CONTENT_DETECTION. +type ExplicitContentDetectionConfig struct { + // Model to use for explicit content detection. + // Supported values: "builtin/stable" (the default if unset) and + // "builtin/latest". + Model string `protobuf:"bytes,1,opt,name=model,proto3" json:"model,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ExplicitContentDetectionConfig) Reset() { *m = ExplicitContentDetectionConfig{} } +func (m *ExplicitContentDetectionConfig) String() string { return proto.CompactTextString(m) } +func (*ExplicitContentDetectionConfig) ProtoMessage() {} +func (*ExplicitContentDetectionConfig) Descriptor() ([]byte, []int) { + return fileDescriptor_video_intelligence_26bfd7f2669c7d48, []int{4} +} +func (m *ExplicitContentDetectionConfig) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ExplicitContentDetectionConfig.Unmarshal(m, b) +} +func (m *ExplicitContentDetectionConfig) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ExplicitContentDetectionConfig.Marshal(b, m, deterministic) +} +func (dst *ExplicitContentDetectionConfig) XXX_Merge(src proto.Message) { + xxx_messageInfo_ExplicitContentDetectionConfig.Merge(dst, src) +} +func (m *ExplicitContentDetectionConfig) XXX_Size() int { + return xxx_messageInfo_ExplicitContentDetectionConfig.Size(m) +} +func (m *ExplicitContentDetectionConfig) XXX_DiscardUnknown() { + xxx_messageInfo_ExplicitContentDetectionConfig.DiscardUnknown(m) +} + +var xxx_messageInfo_ExplicitContentDetectionConfig proto.InternalMessageInfo + +func (m *ExplicitContentDetectionConfig) GetModel() string { + if m != nil { + return m.Model + } + return "" +} + +// Config for TEXT_DETECTION. +type TextDetectionConfig struct { + // Language hint can be specified if the language to be detected is known a + // priori. It can increase the accuracy of the detection. Language hint must + // be language code in BCP-47 format. + // + // Automatic language detection is performed if no hint is provided. + LanguageHints []string `protobuf:"bytes,1,rep,name=language_hints,json=languageHints,proto3" json:"language_hints,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *TextDetectionConfig) Reset() { *m = TextDetectionConfig{} } +func (m *TextDetectionConfig) String() string { return proto.CompactTextString(m) } +func (*TextDetectionConfig) ProtoMessage() {} +func (*TextDetectionConfig) Descriptor() ([]byte, []int) { + return fileDescriptor_video_intelligence_26bfd7f2669c7d48, []int{5} +} +func (m *TextDetectionConfig) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_TextDetectionConfig.Unmarshal(m, b) +} +func (m *TextDetectionConfig) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_TextDetectionConfig.Marshal(b, m, deterministic) +} +func (dst *TextDetectionConfig) XXX_Merge(src proto.Message) { + xxx_messageInfo_TextDetectionConfig.Merge(dst, src) +} +func (m *TextDetectionConfig) XXX_Size() int { + return xxx_messageInfo_TextDetectionConfig.Size(m) +} +func (m *TextDetectionConfig) XXX_DiscardUnknown() { + xxx_messageInfo_TextDetectionConfig.DiscardUnknown(m) +} + +var xxx_messageInfo_TextDetectionConfig proto.InternalMessageInfo + +func (m *TextDetectionConfig) GetLanguageHints() []string { + if m != nil { + return m.LanguageHints + } + return nil +} + +// Video segment. +type VideoSegment struct { + // Time-offset, relative to the beginning of the video, + // corresponding to the start of the segment (inclusive). + StartTimeOffset *duration.Duration `protobuf:"bytes,1,opt,name=start_time_offset,json=startTimeOffset,proto3" json:"start_time_offset,omitempty"` + // Time-offset, relative to the beginning of the video, + // corresponding to the end of the segment (inclusive). + EndTimeOffset *duration.Duration `protobuf:"bytes,2,opt,name=end_time_offset,json=endTimeOffset,proto3" json:"end_time_offset,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *VideoSegment) Reset() { *m = VideoSegment{} } +func (m *VideoSegment) String() string { return proto.CompactTextString(m) } +func (*VideoSegment) ProtoMessage() {} +func (*VideoSegment) Descriptor() ([]byte, []int) { + return fileDescriptor_video_intelligence_26bfd7f2669c7d48, []int{6} +} +func (m *VideoSegment) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_VideoSegment.Unmarshal(m, b) +} +func (m *VideoSegment) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_VideoSegment.Marshal(b, m, deterministic) +} +func (dst *VideoSegment) XXX_Merge(src proto.Message) { + xxx_messageInfo_VideoSegment.Merge(dst, src) +} +func (m *VideoSegment) XXX_Size() int { + return xxx_messageInfo_VideoSegment.Size(m) +} +func (m *VideoSegment) XXX_DiscardUnknown() { + xxx_messageInfo_VideoSegment.DiscardUnknown(m) +} + +var xxx_messageInfo_VideoSegment proto.InternalMessageInfo + +func (m *VideoSegment) GetStartTimeOffset() *duration.Duration { + if m != nil { + return m.StartTimeOffset + } + return nil +} + +func (m *VideoSegment) GetEndTimeOffset() *duration.Duration { + if m != nil { + return m.EndTimeOffset + } + return nil +} + +// Video segment level annotation results for label detection. +type LabelSegment struct { + // Video segment where a label was detected. + Segment *VideoSegment `protobuf:"bytes,1,opt,name=segment,proto3" json:"segment,omitempty"` + // Confidence that the label is accurate. Range: [0, 1]. + Confidence float32 `protobuf:"fixed32,2,opt,name=confidence,proto3" json:"confidence,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *LabelSegment) Reset() { *m = LabelSegment{} } +func (m *LabelSegment) String() string { return proto.CompactTextString(m) } +func (*LabelSegment) ProtoMessage() {} +func (*LabelSegment) Descriptor() ([]byte, []int) { + return fileDescriptor_video_intelligence_26bfd7f2669c7d48, []int{7} +} +func (m *LabelSegment) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_LabelSegment.Unmarshal(m, b) +} +func (m *LabelSegment) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_LabelSegment.Marshal(b, m, deterministic) +} +func (dst *LabelSegment) XXX_Merge(src proto.Message) { + xxx_messageInfo_LabelSegment.Merge(dst, src) +} +func (m *LabelSegment) XXX_Size() int { + return xxx_messageInfo_LabelSegment.Size(m) +} +func (m *LabelSegment) XXX_DiscardUnknown() { + xxx_messageInfo_LabelSegment.DiscardUnknown(m) +} + +var xxx_messageInfo_LabelSegment proto.InternalMessageInfo + +func (m *LabelSegment) GetSegment() *VideoSegment { + if m != nil { + return m.Segment + } + return nil +} + +func (m *LabelSegment) GetConfidence() float32 { + if m != nil { + return m.Confidence + } + return 0 +} + +// Video frame level annotation results for label detection. +type LabelFrame struct { + // Time-offset, relative to the beginning of the video, corresponding to the + // video frame for this location. + TimeOffset *duration.Duration `protobuf:"bytes,1,opt,name=time_offset,json=timeOffset,proto3" json:"time_offset,omitempty"` + // Confidence that the label is accurate. Range: [0, 1]. + Confidence float32 `protobuf:"fixed32,2,opt,name=confidence,proto3" json:"confidence,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *LabelFrame) Reset() { *m = LabelFrame{} } +func (m *LabelFrame) String() string { return proto.CompactTextString(m) } +func (*LabelFrame) ProtoMessage() {} +func (*LabelFrame) Descriptor() ([]byte, []int) { + return fileDescriptor_video_intelligence_26bfd7f2669c7d48, []int{8} +} +func (m *LabelFrame) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_LabelFrame.Unmarshal(m, b) +} +func (m *LabelFrame) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_LabelFrame.Marshal(b, m, deterministic) +} +func (dst *LabelFrame) XXX_Merge(src proto.Message) { + xxx_messageInfo_LabelFrame.Merge(dst, src) +} +func (m *LabelFrame) XXX_Size() int { + return xxx_messageInfo_LabelFrame.Size(m) +} +func (m *LabelFrame) XXX_DiscardUnknown() { + xxx_messageInfo_LabelFrame.DiscardUnknown(m) +} + +var xxx_messageInfo_LabelFrame proto.InternalMessageInfo + +func (m *LabelFrame) GetTimeOffset() *duration.Duration { + if m != nil { + return m.TimeOffset + } + return nil +} + +func (m *LabelFrame) GetConfidence() float32 { + if m != nil { + return m.Confidence + } + return 0 +} + +// Detected entity from video analysis. +type Entity struct { + // Opaque entity ID. Some IDs may be available in + // [Google Knowledge Graph Search + // API](https://developers.google.com/knowledge-graph/). + EntityId string `protobuf:"bytes,1,opt,name=entity_id,json=entityId,proto3" json:"entity_id,omitempty"` + // Textual description, e.g. `Fixed-gear bicycle`. + Description string `protobuf:"bytes,2,opt,name=description,proto3" json:"description,omitempty"` + // Language code for `description` in BCP-47 format. + LanguageCode string `protobuf:"bytes,3,opt,name=language_code,json=languageCode,proto3" json:"language_code,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Entity) Reset() { *m = Entity{} } +func (m *Entity) String() string { return proto.CompactTextString(m) } +func (*Entity) ProtoMessage() {} +func (*Entity) Descriptor() ([]byte, []int) { + return fileDescriptor_video_intelligence_26bfd7f2669c7d48, []int{9} +} +func (m *Entity) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Entity.Unmarshal(m, b) +} +func (m *Entity) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Entity.Marshal(b, m, deterministic) +} +func (dst *Entity) XXX_Merge(src proto.Message) { + xxx_messageInfo_Entity.Merge(dst, src) +} +func (m *Entity) XXX_Size() int { + return xxx_messageInfo_Entity.Size(m) +} +func (m *Entity) XXX_DiscardUnknown() { + xxx_messageInfo_Entity.DiscardUnknown(m) +} + +var xxx_messageInfo_Entity proto.InternalMessageInfo + +func (m *Entity) GetEntityId() string { + if m != nil { + return m.EntityId + } + return "" +} + +func (m *Entity) GetDescription() string { + if m != nil { + return m.Description + } + return "" +} + +func (m *Entity) GetLanguageCode() string { + if m != nil { + return m.LanguageCode + } + return "" +} + +// Label annotation. +type LabelAnnotation struct { + // Detected entity. + Entity *Entity `protobuf:"bytes,1,opt,name=entity,proto3" json:"entity,omitempty"` + // Common categories for the detected entity. + // E.g. when the label is `Terrier` the category is likely `dog`. And in some + // cases there might be more than one categories e.g. `Terrier` could also be + // a `pet`. + CategoryEntities []*Entity `protobuf:"bytes,2,rep,name=category_entities,json=categoryEntities,proto3" json:"category_entities,omitempty"` + // All video segments where a label was detected. + Segments []*LabelSegment `protobuf:"bytes,3,rep,name=segments,proto3" json:"segments,omitempty"` + // All video frames where a label was detected. + Frames []*LabelFrame `protobuf:"bytes,4,rep,name=frames,proto3" json:"frames,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *LabelAnnotation) Reset() { *m = LabelAnnotation{} } +func (m *LabelAnnotation) String() string { return proto.CompactTextString(m) } +func (*LabelAnnotation) ProtoMessage() {} +func (*LabelAnnotation) Descriptor() ([]byte, []int) { + return fileDescriptor_video_intelligence_26bfd7f2669c7d48, []int{10} +} +func (m *LabelAnnotation) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_LabelAnnotation.Unmarshal(m, b) +} +func (m *LabelAnnotation) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_LabelAnnotation.Marshal(b, m, deterministic) +} +func (dst *LabelAnnotation) XXX_Merge(src proto.Message) { + xxx_messageInfo_LabelAnnotation.Merge(dst, src) +} +func (m *LabelAnnotation) XXX_Size() int { + return xxx_messageInfo_LabelAnnotation.Size(m) +} +func (m *LabelAnnotation) XXX_DiscardUnknown() { + xxx_messageInfo_LabelAnnotation.DiscardUnknown(m) +} + +var xxx_messageInfo_LabelAnnotation proto.InternalMessageInfo + +func (m *LabelAnnotation) GetEntity() *Entity { + if m != nil { + return m.Entity + } + return nil +} + +func (m *LabelAnnotation) GetCategoryEntities() []*Entity { + if m != nil { + return m.CategoryEntities + } + return nil +} + +func (m *LabelAnnotation) GetSegments() []*LabelSegment { + if m != nil { + return m.Segments + } + return nil +} + +func (m *LabelAnnotation) GetFrames() []*LabelFrame { + if m != nil { + return m.Frames + } + return nil +} + +// Video frame level annotation results for explicit content. +type ExplicitContentFrame struct { + // Time-offset, relative to the beginning of the video, corresponding to the + // video frame for this location. + TimeOffset *duration.Duration `protobuf:"bytes,1,opt,name=time_offset,json=timeOffset,proto3" json:"time_offset,omitempty"` + // Likelihood of the pornography content.. + PornographyLikelihood Likelihood `protobuf:"varint,2,opt,name=pornography_likelihood,json=pornographyLikelihood,proto3,enum=google.cloud.videointelligence.v1p3beta1.Likelihood" json:"pornography_likelihood,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ExplicitContentFrame) Reset() { *m = ExplicitContentFrame{} } +func (m *ExplicitContentFrame) String() string { return proto.CompactTextString(m) } +func (*ExplicitContentFrame) ProtoMessage() {} +func (*ExplicitContentFrame) Descriptor() ([]byte, []int) { + return fileDescriptor_video_intelligence_26bfd7f2669c7d48, []int{11} +} +func (m *ExplicitContentFrame) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ExplicitContentFrame.Unmarshal(m, b) +} +func (m *ExplicitContentFrame) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ExplicitContentFrame.Marshal(b, m, deterministic) +} +func (dst *ExplicitContentFrame) XXX_Merge(src proto.Message) { + xxx_messageInfo_ExplicitContentFrame.Merge(dst, src) +} +func (m *ExplicitContentFrame) XXX_Size() int { + return xxx_messageInfo_ExplicitContentFrame.Size(m) +} +func (m *ExplicitContentFrame) XXX_DiscardUnknown() { + xxx_messageInfo_ExplicitContentFrame.DiscardUnknown(m) +} + +var xxx_messageInfo_ExplicitContentFrame proto.InternalMessageInfo + +func (m *ExplicitContentFrame) GetTimeOffset() *duration.Duration { + if m != nil { + return m.TimeOffset + } + return nil +} + +func (m *ExplicitContentFrame) GetPornographyLikelihood() Likelihood { + if m != nil { + return m.PornographyLikelihood + } + return Likelihood_LIKELIHOOD_UNSPECIFIED +} + +// Explicit content annotation (based on per-frame visual signals only). +// If no explicit content has been detected in a frame, no annotations are +// present for that frame. +type ExplicitContentAnnotation struct { + // All video frames where explicit content was detected. + Frames []*ExplicitContentFrame `protobuf:"bytes,1,rep,name=frames,proto3" json:"frames,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ExplicitContentAnnotation) Reset() { *m = ExplicitContentAnnotation{} } +func (m *ExplicitContentAnnotation) String() string { return proto.CompactTextString(m) } +func (*ExplicitContentAnnotation) ProtoMessage() {} +func (*ExplicitContentAnnotation) Descriptor() ([]byte, []int) { + return fileDescriptor_video_intelligence_26bfd7f2669c7d48, []int{12} +} +func (m *ExplicitContentAnnotation) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ExplicitContentAnnotation.Unmarshal(m, b) +} +func (m *ExplicitContentAnnotation) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ExplicitContentAnnotation.Marshal(b, m, deterministic) +} +func (dst *ExplicitContentAnnotation) XXX_Merge(src proto.Message) { + xxx_messageInfo_ExplicitContentAnnotation.Merge(dst, src) +} +func (m *ExplicitContentAnnotation) XXX_Size() int { + return xxx_messageInfo_ExplicitContentAnnotation.Size(m) +} +func (m *ExplicitContentAnnotation) XXX_DiscardUnknown() { + xxx_messageInfo_ExplicitContentAnnotation.DiscardUnknown(m) +} + +var xxx_messageInfo_ExplicitContentAnnotation proto.InternalMessageInfo + +func (m *ExplicitContentAnnotation) GetFrames() []*ExplicitContentFrame { + if m != nil { + return m.Frames + } + return nil +} + +// Normalized bounding box. +// The normalized vertex coordinates are relative to the original image. +// Range: [0, 1]. +type NormalizedBoundingBox struct { + // Left X coordinate. + Left float32 `protobuf:"fixed32,1,opt,name=left,proto3" json:"left,omitempty"` + // Top Y coordinate. + Top float32 `protobuf:"fixed32,2,opt,name=top,proto3" json:"top,omitempty"` + // Right X coordinate. + Right float32 `protobuf:"fixed32,3,opt,name=right,proto3" json:"right,omitempty"` + // Bottom Y coordinate. + Bottom float32 `protobuf:"fixed32,4,opt,name=bottom,proto3" json:"bottom,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *NormalizedBoundingBox) Reset() { *m = NormalizedBoundingBox{} } +func (m *NormalizedBoundingBox) String() string { return proto.CompactTextString(m) } +func (*NormalizedBoundingBox) ProtoMessage() {} +func (*NormalizedBoundingBox) Descriptor() ([]byte, []int) { + return fileDescriptor_video_intelligence_26bfd7f2669c7d48, []int{13} +} +func (m *NormalizedBoundingBox) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_NormalizedBoundingBox.Unmarshal(m, b) +} +func (m *NormalizedBoundingBox) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_NormalizedBoundingBox.Marshal(b, m, deterministic) +} +func (dst *NormalizedBoundingBox) XXX_Merge(src proto.Message) { + xxx_messageInfo_NormalizedBoundingBox.Merge(dst, src) +} +func (m *NormalizedBoundingBox) XXX_Size() int { + return xxx_messageInfo_NormalizedBoundingBox.Size(m) +} +func (m *NormalizedBoundingBox) XXX_DiscardUnknown() { + xxx_messageInfo_NormalizedBoundingBox.DiscardUnknown(m) +} + +var xxx_messageInfo_NormalizedBoundingBox proto.InternalMessageInfo + +func (m *NormalizedBoundingBox) GetLeft() float32 { + if m != nil { + return m.Left + } + return 0 +} + +func (m *NormalizedBoundingBox) GetTop() float32 { + if m != nil { + return m.Top + } + return 0 +} + +func (m *NormalizedBoundingBox) GetRight() float32 { + if m != nil { + return m.Right + } + return 0 +} + +func (m *NormalizedBoundingBox) GetBottom() float32 { + if m != nil { + return m.Bottom + } + return 0 +} + +// Annotation results for a single video. +type VideoAnnotationResults struct { + // Video file location in + // [Google Cloud Storage](https://cloud.google.com/storage/). + InputUri string `protobuf:"bytes,1,opt,name=input_uri,json=inputUri,proto3" json:"input_uri,omitempty"` + // Label annotations on video level or user specified segment level. + // There is exactly one element for each unique label. + SegmentLabelAnnotations []*LabelAnnotation `protobuf:"bytes,2,rep,name=segment_label_annotations,json=segmentLabelAnnotations,proto3" json:"segment_label_annotations,omitempty"` + // Label annotations on shot level. + // There is exactly one element for each unique label. + ShotLabelAnnotations []*LabelAnnotation `protobuf:"bytes,3,rep,name=shot_label_annotations,json=shotLabelAnnotations,proto3" json:"shot_label_annotations,omitempty"` + // Label annotations on frame level. + // There is exactly one element for each unique label. + FrameLabelAnnotations []*LabelAnnotation `protobuf:"bytes,4,rep,name=frame_label_annotations,json=frameLabelAnnotations,proto3" json:"frame_label_annotations,omitempty"` + // Shot annotations. Each shot is represented as a video segment. + ShotAnnotations []*VideoSegment `protobuf:"bytes,6,rep,name=shot_annotations,json=shotAnnotations,proto3" json:"shot_annotations,omitempty"` + // Explicit content annotation. + ExplicitAnnotation *ExplicitContentAnnotation `protobuf:"bytes,7,opt,name=explicit_annotation,json=explicitAnnotation,proto3" json:"explicit_annotation,omitempty"` + // OCR text detection and tracking. + // Annotations for list of detected text snippets. Each will have list of + // frame information associated with it. + TextAnnotations []*TextAnnotation `protobuf:"bytes,12,rep,name=text_annotations,json=textAnnotations,proto3" json:"text_annotations,omitempty"` + // Annotations for list of objects detected and tracked in video. + ObjectAnnotations []*ObjectTrackingAnnotation `protobuf:"bytes,14,rep,name=object_annotations,json=objectAnnotations,proto3" json:"object_annotations,omitempty"` + // If set, indicates an error. Note that for a single `AnnotateVideoRequest` + // some videos may succeed and some may fail. + Error *status.Status `protobuf:"bytes,9,opt,name=error,proto3" json:"error,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *VideoAnnotationResults) Reset() { *m = VideoAnnotationResults{} } +func (m *VideoAnnotationResults) String() string { return proto.CompactTextString(m) } +func (*VideoAnnotationResults) ProtoMessage() {} +func (*VideoAnnotationResults) Descriptor() ([]byte, []int) { + return fileDescriptor_video_intelligence_26bfd7f2669c7d48, []int{14} +} +func (m *VideoAnnotationResults) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_VideoAnnotationResults.Unmarshal(m, b) +} +func (m *VideoAnnotationResults) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_VideoAnnotationResults.Marshal(b, m, deterministic) +} +func (dst *VideoAnnotationResults) XXX_Merge(src proto.Message) { + xxx_messageInfo_VideoAnnotationResults.Merge(dst, src) +} +func (m *VideoAnnotationResults) XXX_Size() int { + return xxx_messageInfo_VideoAnnotationResults.Size(m) +} +func (m *VideoAnnotationResults) XXX_DiscardUnknown() { + xxx_messageInfo_VideoAnnotationResults.DiscardUnknown(m) +} + +var xxx_messageInfo_VideoAnnotationResults proto.InternalMessageInfo + +func (m *VideoAnnotationResults) GetInputUri() string { + if m != nil { + return m.InputUri + } + return "" +} + +func (m *VideoAnnotationResults) GetSegmentLabelAnnotations() []*LabelAnnotation { + if m != nil { + return m.SegmentLabelAnnotations + } + return nil +} + +func (m *VideoAnnotationResults) GetShotLabelAnnotations() []*LabelAnnotation { + if m != nil { + return m.ShotLabelAnnotations + } + return nil +} + +func (m *VideoAnnotationResults) GetFrameLabelAnnotations() []*LabelAnnotation { + if m != nil { + return m.FrameLabelAnnotations + } + return nil +} + +func (m *VideoAnnotationResults) GetShotAnnotations() []*VideoSegment { + if m != nil { + return m.ShotAnnotations + } + return nil +} + +func (m *VideoAnnotationResults) GetExplicitAnnotation() *ExplicitContentAnnotation { + if m != nil { + return m.ExplicitAnnotation + } + return nil +} + +func (m *VideoAnnotationResults) GetTextAnnotations() []*TextAnnotation { + if m != nil { + return m.TextAnnotations + } + return nil +} + +func (m *VideoAnnotationResults) GetObjectAnnotations() []*ObjectTrackingAnnotation { + if m != nil { + return m.ObjectAnnotations + } + return nil +} + +func (m *VideoAnnotationResults) GetError() *status.Status { + if m != nil { + return m.Error + } + return nil +} + +// Video annotation response. Included in the `response` +// field of the `Operation` returned by the `GetOperation` +// call of the `google::longrunning::Operations` service. +type AnnotateVideoResponse struct { + // Annotation results for all videos specified in `AnnotateVideoRequest`. + AnnotationResults []*VideoAnnotationResults `protobuf:"bytes,1,rep,name=annotation_results,json=annotationResults,proto3" json:"annotation_results,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *AnnotateVideoResponse) Reset() { *m = AnnotateVideoResponse{} } +func (m *AnnotateVideoResponse) String() string { return proto.CompactTextString(m) } +func (*AnnotateVideoResponse) ProtoMessage() {} +func (*AnnotateVideoResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_video_intelligence_26bfd7f2669c7d48, []int{15} +} +func (m *AnnotateVideoResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_AnnotateVideoResponse.Unmarshal(m, b) +} +func (m *AnnotateVideoResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_AnnotateVideoResponse.Marshal(b, m, deterministic) +} +func (dst *AnnotateVideoResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_AnnotateVideoResponse.Merge(dst, src) +} +func (m *AnnotateVideoResponse) XXX_Size() int { + return xxx_messageInfo_AnnotateVideoResponse.Size(m) +} +func (m *AnnotateVideoResponse) XXX_DiscardUnknown() { + xxx_messageInfo_AnnotateVideoResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_AnnotateVideoResponse proto.InternalMessageInfo + +func (m *AnnotateVideoResponse) GetAnnotationResults() []*VideoAnnotationResults { + if m != nil { + return m.AnnotationResults + } + return nil +} + +// Annotation progress for a single video. +type VideoAnnotationProgress struct { + // Video file location in + // [Google Cloud Storage](https://cloud.google.com/storage/). + InputUri string `protobuf:"bytes,1,opt,name=input_uri,json=inputUri,proto3" json:"input_uri,omitempty"` + // Approximate percentage processed thus far. Guaranteed to be + // 100 when fully processed. + ProgressPercent int32 `protobuf:"varint,2,opt,name=progress_percent,json=progressPercent,proto3" json:"progress_percent,omitempty"` + // Time when the request was received. + StartTime *timestamp.Timestamp `protobuf:"bytes,3,opt,name=start_time,json=startTime,proto3" json:"start_time,omitempty"` + // Time of the most recent update. + UpdateTime *timestamp.Timestamp `protobuf:"bytes,4,opt,name=update_time,json=updateTime,proto3" json:"update_time,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *VideoAnnotationProgress) Reset() { *m = VideoAnnotationProgress{} } +func (m *VideoAnnotationProgress) String() string { return proto.CompactTextString(m) } +func (*VideoAnnotationProgress) ProtoMessage() {} +func (*VideoAnnotationProgress) Descriptor() ([]byte, []int) { + return fileDescriptor_video_intelligence_26bfd7f2669c7d48, []int{16} +} +func (m *VideoAnnotationProgress) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_VideoAnnotationProgress.Unmarshal(m, b) +} +func (m *VideoAnnotationProgress) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_VideoAnnotationProgress.Marshal(b, m, deterministic) +} +func (dst *VideoAnnotationProgress) XXX_Merge(src proto.Message) { + xxx_messageInfo_VideoAnnotationProgress.Merge(dst, src) +} +func (m *VideoAnnotationProgress) XXX_Size() int { + return xxx_messageInfo_VideoAnnotationProgress.Size(m) +} +func (m *VideoAnnotationProgress) XXX_DiscardUnknown() { + xxx_messageInfo_VideoAnnotationProgress.DiscardUnknown(m) +} + +var xxx_messageInfo_VideoAnnotationProgress proto.InternalMessageInfo + +func (m *VideoAnnotationProgress) GetInputUri() string { + if m != nil { + return m.InputUri + } + return "" +} + +func (m *VideoAnnotationProgress) GetProgressPercent() int32 { + if m != nil { + return m.ProgressPercent + } + return 0 +} + +func (m *VideoAnnotationProgress) GetStartTime() *timestamp.Timestamp { + if m != nil { + return m.StartTime + } + return nil +} + +func (m *VideoAnnotationProgress) GetUpdateTime() *timestamp.Timestamp { + if m != nil { + return m.UpdateTime + } + return nil +} + +// Video annotation progress. Included in the `metadata` +// field of the `Operation` returned by the `GetOperation` +// call of the `google::longrunning::Operations` service. +type AnnotateVideoProgress struct { + // Progress metadata for all videos specified in `AnnotateVideoRequest`. + AnnotationProgress []*VideoAnnotationProgress `protobuf:"bytes,1,rep,name=annotation_progress,json=annotationProgress,proto3" json:"annotation_progress,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *AnnotateVideoProgress) Reset() { *m = AnnotateVideoProgress{} } +func (m *AnnotateVideoProgress) String() string { return proto.CompactTextString(m) } +func (*AnnotateVideoProgress) ProtoMessage() {} +func (*AnnotateVideoProgress) Descriptor() ([]byte, []int) { + return fileDescriptor_video_intelligence_26bfd7f2669c7d48, []int{17} +} +func (m *AnnotateVideoProgress) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_AnnotateVideoProgress.Unmarshal(m, b) +} +func (m *AnnotateVideoProgress) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_AnnotateVideoProgress.Marshal(b, m, deterministic) +} +func (dst *AnnotateVideoProgress) XXX_Merge(src proto.Message) { + xxx_messageInfo_AnnotateVideoProgress.Merge(dst, src) +} +func (m *AnnotateVideoProgress) XXX_Size() int { + return xxx_messageInfo_AnnotateVideoProgress.Size(m) +} +func (m *AnnotateVideoProgress) XXX_DiscardUnknown() { + xxx_messageInfo_AnnotateVideoProgress.DiscardUnknown(m) +} + +var xxx_messageInfo_AnnotateVideoProgress proto.InternalMessageInfo + +func (m *AnnotateVideoProgress) GetAnnotationProgress() []*VideoAnnotationProgress { + if m != nil { + return m.AnnotationProgress + } + return nil +} + +// A vertex represents a 2D point in the image. +// NOTE: the normalized vertex coordinates are relative to the original image +// and range from 0 to 1. +type NormalizedVertex struct { + // X coordinate. + X float32 `protobuf:"fixed32,1,opt,name=x,proto3" json:"x,omitempty"` + // Y coordinate. + Y float32 `protobuf:"fixed32,2,opt,name=y,proto3" json:"y,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *NormalizedVertex) Reset() { *m = NormalizedVertex{} } +func (m *NormalizedVertex) String() string { return proto.CompactTextString(m) } +func (*NormalizedVertex) ProtoMessage() {} +func (*NormalizedVertex) Descriptor() ([]byte, []int) { + return fileDescriptor_video_intelligence_26bfd7f2669c7d48, []int{18} +} +func (m *NormalizedVertex) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_NormalizedVertex.Unmarshal(m, b) +} +func (m *NormalizedVertex) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_NormalizedVertex.Marshal(b, m, deterministic) +} +func (dst *NormalizedVertex) XXX_Merge(src proto.Message) { + xxx_messageInfo_NormalizedVertex.Merge(dst, src) +} +func (m *NormalizedVertex) XXX_Size() int { + return xxx_messageInfo_NormalizedVertex.Size(m) +} +func (m *NormalizedVertex) XXX_DiscardUnknown() { + xxx_messageInfo_NormalizedVertex.DiscardUnknown(m) +} + +var xxx_messageInfo_NormalizedVertex proto.InternalMessageInfo + +func (m *NormalizedVertex) GetX() float32 { + if m != nil { + return m.X + } + return 0 +} + +func (m *NormalizedVertex) GetY() float32 { + if m != nil { + return m.Y + } + return 0 +} + +// Normalized bounding polygon for text (that might not be aligned with axis). +// Contains list of the corner points in clockwise order starting from +// top-left corner. For example, for a rectangular bounding box: +// When the text is horizontal it might look like: +// 0----1 +// | | +// 3----2 +// +// When it's clockwise rotated 180 degrees around the top-left corner it +// becomes: +// 2----3 +// | | +// 1----0 +// +// and the vertex order will still be (0, 1, 2, 3). Note that values can be less +// than 0, or greater than 1 due to trignometric calculations for location of +// the box. +type NormalizedBoundingPoly struct { + // Normalized vertices of the bounding polygon. + Vertices []*NormalizedVertex `protobuf:"bytes,1,rep,name=vertices,proto3" json:"vertices,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *NormalizedBoundingPoly) Reset() { *m = NormalizedBoundingPoly{} } +func (m *NormalizedBoundingPoly) String() string { return proto.CompactTextString(m) } +func (*NormalizedBoundingPoly) ProtoMessage() {} +func (*NormalizedBoundingPoly) Descriptor() ([]byte, []int) { + return fileDescriptor_video_intelligence_26bfd7f2669c7d48, []int{19} +} +func (m *NormalizedBoundingPoly) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_NormalizedBoundingPoly.Unmarshal(m, b) +} +func (m *NormalizedBoundingPoly) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_NormalizedBoundingPoly.Marshal(b, m, deterministic) +} +func (dst *NormalizedBoundingPoly) XXX_Merge(src proto.Message) { + xxx_messageInfo_NormalizedBoundingPoly.Merge(dst, src) +} +func (m *NormalizedBoundingPoly) XXX_Size() int { + return xxx_messageInfo_NormalizedBoundingPoly.Size(m) +} +func (m *NormalizedBoundingPoly) XXX_DiscardUnknown() { + xxx_messageInfo_NormalizedBoundingPoly.DiscardUnknown(m) +} + +var xxx_messageInfo_NormalizedBoundingPoly proto.InternalMessageInfo + +func (m *NormalizedBoundingPoly) GetVertices() []*NormalizedVertex { + if m != nil { + return m.Vertices + } + return nil +} + +// Video segment level annotation results for text detection. +type TextSegment struct { + // Video segment where a text snippet was detected. + Segment *VideoSegment `protobuf:"bytes,1,opt,name=segment,proto3" json:"segment,omitempty"` + // Confidence for the track of detected text. It is calculated as the highest + // over all frames where OCR detected text appears. + Confidence float32 `protobuf:"fixed32,2,opt,name=confidence,proto3" json:"confidence,omitempty"` + // Information related to the frames where OCR detected text appears. + Frames []*TextFrame `protobuf:"bytes,3,rep,name=frames,proto3" json:"frames,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *TextSegment) Reset() { *m = TextSegment{} } +func (m *TextSegment) String() string { return proto.CompactTextString(m) } +func (*TextSegment) ProtoMessage() {} +func (*TextSegment) Descriptor() ([]byte, []int) { + return fileDescriptor_video_intelligence_26bfd7f2669c7d48, []int{20} +} +func (m *TextSegment) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_TextSegment.Unmarshal(m, b) +} +func (m *TextSegment) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_TextSegment.Marshal(b, m, deterministic) +} +func (dst *TextSegment) XXX_Merge(src proto.Message) { + xxx_messageInfo_TextSegment.Merge(dst, src) +} +func (m *TextSegment) XXX_Size() int { + return xxx_messageInfo_TextSegment.Size(m) +} +func (m *TextSegment) XXX_DiscardUnknown() { + xxx_messageInfo_TextSegment.DiscardUnknown(m) +} + +var xxx_messageInfo_TextSegment proto.InternalMessageInfo + +func (m *TextSegment) GetSegment() *VideoSegment { + if m != nil { + return m.Segment + } + return nil +} + +func (m *TextSegment) GetConfidence() float32 { + if m != nil { + return m.Confidence + } + return 0 +} + +func (m *TextSegment) GetFrames() []*TextFrame { + if m != nil { + return m.Frames + } + return nil +} + +// Video frame level annotation results for text annotation (OCR). +// Contains information regarding timestamp and bounding box locations for the +// frames containing detected OCR text snippets. +type TextFrame struct { + // Bounding polygon of the detected text for this frame. + RotatedBoundingBox *NormalizedBoundingPoly `protobuf:"bytes,1,opt,name=rotated_bounding_box,json=rotatedBoundingBox,proto3" json:"rotated_bounding_box,omitempty"` + // Timestamp of this frame. + TimeOffset *duration.Duration `protobuf:"bytes,2,opt,name=time_offset,json=timeOffset,proto3" json:"time_offset,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *TextFrame) Reset() { *m = TextFrame{} } +func (m *TextFrame) String() string { return proto.CompactTextString(m) } +func (*TextFrame) ProtoMessage() {} +func (*TextFrame) Descriptor() ([]byte, []int) { + return fileDescriptor_video_intelligence_26bfd7f2669c7d48, []int{21} +} +func (m *TextFrame) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_TextFrame.Unmarshal(m, b) +} +func (m *TextFrame) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_TextFrame.Marshal(b, m, deterministic) +} +func (dst *TextFrame) XXX_Merge(src proto.Message) { + xxx_messageInfo_TextFrame.Merge(dst, src) +} +func (m *TextFrame) XXX_Size() int { + return xxx_messageInfo_TextFrame.Size(m) +} +func (m *TextFrame) XXX_DiscardUnknown() { + xxx_messageInfo_TextFrame.DiscardUnknown(m) +} + +var xxx_messageInfo_TextFrame proto.InternalMessageInfo + +func (m *TextFrame) GetRotatedBoundingBox() *NormalizedBoundingPoly { + if m != nil { + return m.RotatedBoundingBox + } + return nil +} + +func (m *TextFrame) GetTimeOffset() *duration.Duration { + if m != nil { + return m.TimeOffset + } + return nil +} + +// Annotations related to one detected OCR text snippet. This will contain the +// corresponding text, confidence value, and frame level information for each +// detection. +type TextAnnotation struct { + // The detected text. + Text string `protobuf:"bytes,1,opt,name=text,proto3" json:"text,omitempty"` + // All video segments where OCR detected text appears. + Segments []*TextSegment `protobuf:"bytes,2,rep,name=segments,proto3" json:"segments,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *TextAnnotation) Reset() { *m = TextAnnotation{} } +func (m *TextAnnotation) String() string { return proto.CompactTextString(m) } +func (*TextAnnotation) ProtoMessage() {} +func (*TextAnnotation) Descriptor() ([]byte, []int) { + return fileDescriptor_video_intelligence_26bfd7f2669c7d48, []int{22} +} +func (m *TextAnnotation) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_TextAnnotation.Unmarshal(m, b) +} +func (m *TextAnnotation) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_TextAnnotation.Marshal(b, m, deterministic) +} +func (dst *TextAnnotation) XXX_Merge(src proto.Message) { + xxx_messageInfo_TextAnnotation.Merge(dst, src) +} +func (m *TextAnnotation) XXX_Size() int { + return xxx_messageInfo_TextAnnotation.Size(m) +} +func (m *TextAnnotation) XXX_DiscardUnknown() { + xxx_messageInfo_TextAnnotation.DiscardUnknown(m) +} + +var xxx_messageInfo_TextAnnotation proto.InternalMessageInfo + +func (m *TextAnnotation) GetText() string { + if m != nil { + return m.Text + } + return "" +} + +func (m *TextAnnotation) GetSegments() []*TextSegment { + if m != nil { + return m.Segments + } + return nil +} + +// Video frame level annotations for object detection and tracking. This field +// stores per frame location, time offset, and confidence. +type ObjectTrackingFrame struct { + // The normalized bounding box location of this object track for the frame. + NormalizedBoundingBox *NormalizedBoundingBox `protobuf:"bytes,1,opt,name=normalized_bounding_box,json=normalizedBoundingBox,proto3" json:"normalized_bounding_box,omitempty"` + // The timestamp of the frame in microseconds. + TimeOffset *duration.Duration `protobuf:"bytes,2,opt,name=time_offset,json=timeOffset,proto3" json:"time_offset,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ObjectTrackingFrame) Reset() { *m = ObjectTrackingFrame{} } +func (m *ObjectTrackingFrame) String() string { return proto.CompactTextString(m) } +func (*ObjectTrackingFrame) ProtoMessage() {} +func (*ObjectTrackingFrame) Descriptor() ([]byte, []int) { + return fileDescriptor_video_intelligence_26bfd7f2669c7d48, []int{23} +} +func (m *ObjectTrackingFrame) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ObjectTrackingFrame.Unmarshal(m, b) +} +func (m *ObjectTrackingFrame) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ObjectTrackingFrame.Marshal(b, m, deterministic) +} +func (dst *ObjectTrackingFrame) XXX_Merge(src proto.Message) { + xxx_messageInfo_ObjectTrackingFrame.Merge(dst, src) +} +func (m *ObjectTrackingFrame) XXX_Size() int { + return xxx_messageInfo_ObjectTrackingFrame.Size(m) +} +func (m *ObjectTrackingFrame) XXX_DiscardUnknown() { + xxx_messageInfo_ObjectTrackingFrame.DiscardUnknown(m) +} + +var xxx_messageInfo_ObjectTrackingFrame proto.InternalMessageInfo + +func (m *ObjectTrackingFrame) GetNormalizedBoundingBox() *NormalizedBoundingBox { + if m != nil { + return m.NormalizedBoundingBox + } + return nil +} + +func (m *ObjectTrackingFrame) GetTimeOffset() *duration.Duration { + if m != nil { + return m.TimeOffset + } + return nil +} + +// Annotations corresponding to one tracked object. +type ObjectTrackingAnnotation struct { + // Entity to specify the object category that this track is labeled as. + Entity *Entity `protobuf:"bytes,1,opt,name=entity,proto3" json:"entity,omitempty"` + // Object category's labeling confidence of this track. + Confidence float32 `protobuf:"fixed32,4,opt,name=confidence,proto3" json:"confidence,omitempty"` + // Information corresponding to all frames where this object track appears. + // Non-streaming batch mode: it may be one or multiple ObjectTrackingFrame + // messages in frames. + // Streaming mode: it can only be one ObjectTrackingFrame message in frames. + Frames []*ObjectTrackingFrame `protobuf:"bytes,2,rep,name=frames,proto3" json:"frames,omitempty"` + // Different representation of tracking info in non-streaming batch + // and streaming modes. + // + // Types that are valid to be assigned to TrackInfo: + // *ObjectTrackingAnnotation_Segment + // *ObjectTrackingAnnotation_TrackId + TrackInfo isObjectTrackingAnnotation_TrackInfo `protobuf_oneof:"track_info"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ObjectTrackingAnnotation) Reset() { *m = ObjectTrackingAnnotation{} } +func (m *ObjectTrackingAnnotation) String() string { return proto.CompactTextString(m) } +func (*ObjectTrackingAnnotation) ProtoMessage() {} +func (*ObjectTrackingAnnotation) Descriptor() ([]byte, []int) { + return fileDescriptor_video_intelligence_26bfd7f2669c7d48, []int{24} +} +func (m *ObjectTrackingAnnotation) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ObjectTrackingAnnotation.Unmarshal(m, b) +} +func (m *ObjectTrackingAnnotation) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ObjectTrackingAnnotation.Marshal(b, m, deterministic) +} +func (dst *ObjectTrackingAnnotation) XXX_Merge(src proto.Message) { + xxx_messageInfo_ObjectTrackingAnnotation.Merge(dst, src) +} +func (m *ObjectTrackingAnnotation) XXX_Size() int { + return xxx_messageInfo_ObjectTrackingAnnotation.Size(m) +} +func (m *ObjectTrackingAnnotation) XXX_DiscardUnknown() { + xxx_messageInfo_ObjectTrackingAnnotation.DiscardUnknown(m) +} + +var xxx_messageInfo_ObjectTrackingAnnotation proto.InternalMessageInfo + +func (m *ObjectTrackingAnnotation) GetEntity() *Entity { + if m != nil { + return m.Entity + } + return nil +} + +func (m *ObjectTrackingAnnotation) GetConfidence() float32 { + if m != nil { + return m.Confidence + } + return 0 +} + +func (m *ObjectTrackingAnnotation) GetFrames() []*ObjectTrackingFrame { + if m != nil { + return m.Frames + } + return nil +} + +type isObjectTrackingAnnotation_TrackInfo interface { + isObjectTrackingAnnotation_TrackInfo() +} + +type ObjectTrackingAnnotation_Segment struct { + Segment *VideoSegment `protobuf:"bytes,3,opt,name=segment,proto3,oneof"` +} + +type ObjectTrackingAnnotation_TrackId struct { + TrackId int64 `protobuf:"varint,5,opt,name=track_id,json=trackId,proto3,oneof"` +} + +func (*ObjectTrackingAnnotation_Segment) isObjectTrackingAnnotation_TrackInfo() {} + +func (*ObjectTrackingAnnotation_TrackId) isObjectTrackingAnnotation_TrackInfo() {} + +func (m *ObjectTrackingAnnotation) GetTrackInfo() isObjectTrackingAnnotation_TrackInfo { + if m != nil { + return m.TrackInfo + } + return nil +} + +func (m *ObjectTrackingAnnotation) GetSegment() *VideoSegment { + if x, ok := m.GetTrackInfo().(*ObjectTrackingAnnotation_Segment); ok { + return x.Segment + } + return nil +} + +func (m *ObjectTrackingAnnotation) GetTrackId() int64 { + if x, ok := m.GetTrackInfo().(*ObjectTrackingAnnotation_TrackId); ok { + return x.TrackId + } + return 0 +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*ObjectTrackingAnnotation) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _ObjectTrackingAnnotation_OneofMarshaler, _ObjectTrackingAnnotation_OneofUnmarshaler, _ObjectTrackingAnnotation_OneofSizer, []interface{}{ + (*ObjectTrackingAnnotation_Segment)(nil), + (*ObjectTrackingAnnotation_TrackId)(nil), + } +} + +func _ObjectTrackingAnnotation_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*ObjectTrackingAnnotation) + // track_info + switch x := m.TrackInfo.(type) { + case *ObjectTrackingAnnotation_Segment: + b.EncodeVarint(3<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.Segment); err != nil { + return err + } + case *ObjectTrackingAnnotation_TrackId: + b.EncodeVarint(5<<3 | proto.WireVarint) + b.EncodeVarint(uint64(x.TrackId)) + case nil: + default: + return fmt.Errorf("ObjectTrackingAnnotation.TrackInfo has unexpected type %T", x) + } + return nil +} + +func _ObjectTrackingAnnotation_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*ObjectTrackingAnnotation) + switch tag { + case 3: // track_info.segment + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(VideoSegment) + err := b.DecodeMessage(msg) + m.TrackInfo = &ObjectTrackingAnnotation_Segment{msg} + return true, err + case 5: // track_info.track_id + if wire != proto.WireVarint { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeVarint() + m.TrackInfo = &ObjectTrackingAnnotation_TrackId{int64(x)} + return true, err + default: + return false, nil + } +} + +func _ObjectTrackingAnnotation_OneofSizer(msg proto.Message) (n int) { + m := msg.(*ObjectTrackingAnnotation) + // track_info + switch x := m.TrackInfo.(type) { + case *ObjectTrackingAnnotation_Segment: + s := proto.Size(x.Segment) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *ObjectTrackingAnnotation_TrackId: + n += 1 // tag and wire + n += proto.SizeVarint(uint64(x.TrackId)) + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +// The top-level message sent by the client for the `StreamingAnnotateVideo` +// method. Multiple `StreamingAnnotateVideoRequest` messages are sent. +// The first message must only contain a `StreamingVideoConfig` message. +// All subsequent messages must only contain `input_content` data. +type StreamingAnnotateVideoRequest struct { + // *Required* The streaming request, which is either a streaming config or + // video content. + // + // Types that are valid to be assigned to StreamingRequest: + // *StreamingAnnotateVideoRequest_VideoConfig + // *StreamingAnnotateVideoRequest_InputContent + StreamingRequest isStreamingAnnotateVideoRequest_StreamingRequest `protobuf_oneof:"streaming_request"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *StreamingAnnotateVideoRequest) Reset() { *m = StreamingAnnotateVideoRequest{} } +func (m *StreamingAnnotateVideoRequest) String() string { return proto.CompactTextString(m) } +func (*StreamingAnnotateVideoRequest) ProtoMessage() {} +func (*StreamingAnnotateVideoRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_video_intelligence_26bfd7f2669c7d48, []int{25} +} +func (m *StreamingAnnotateVideoRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_StreamingAnnotateVideoRequest.Unmarshal(m, b) +} +func (m *StreamingAnnotateVideoRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_StreamingAnnotateVideoRequest.Marshal(b, m, deterministic) +} +func (dst *StreamingAnnotateVideoRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_StreamingAnnotateVideoRequest.Merge(dst, src) +} +func (m *StreamingAnnotateVideoRequest) XXX_Size() int { + return xxx_messageInfo_StreamingAnnotateVideoRequest.Size(m) +} +func (m *StreamingAnnotateVideoRequest) XXX_DiscardUnknown() { + xxx_messageInfo_StreamingAnnotateVideoRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_StreamingAnnotateVideoRequest proto.InternalMessageInfo + +type isStreamingAnnotateVideoRequest_StreamingRequest interface { + isStreamingAnnotateVideoRequest_StreamingRequest() +} + +type StreamingAnnotateVideoRequest_VideoConfig struct { + VideoConfig *StreamingVideoConfig `protobuf:"bytes,1,opt,name=video_config,json=videoConfig,proto3,oneof"` +} + +type StreamingAnnotateVideoRequest_InputContent struct { + InputContent []byte `protobuf:"bytes,2,opt,name=input_content,json=inputContent,proto3,oneof"` +} + +func (*StreamingAnnotateVideoRequest_VideoConfig) isStreamingAnnotateVideoRequest_StreamingRequest() {} + +func (*StreamingAnnotateVideoRequest_InputContent) isStreamingAnnotateVideoRequest_StreamingRequest() { +} + +func (m *StreamingAnnotateVideoRequest) GetStreamingRequest() isStreamingAnnotateVideoRequest_StreamingRequest { + if m != nil { + return m.StreamingRequest + } + return nil +} + +func (m *StreamingAnnotateVideoRequest) GetVideoConfig() *StreamingVideoConfig { + if x, ok := m.GetStreamingRequest().(*StreamingAnnotateVideoRequest_VideoConfig); ok { + return x.VideoConfig + } + return nil +} + +func (m *StreamingAnnotateVideoRequest) GetInputContent() []byte { + if x, ok := m.GetStreamingRequest().(*StreamingAnnotateVideoRequest_InputContent); ok { + return x.InputContent + } + return nil +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*StreamingAnnotateVideoRequest) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _StreamingAnnotateVideoRequest_OneofMarshaler, _StreamingAnnotateVideoRequest_OneofUnmarshaler, _StreamingAnnotateVideoRequest_OneofSizer, []interface{}{ + (*StreamingAnnotateVideoRequest_VideoConfig)(nil), + (*StreamingAnnotateVideoRequest_InputContent)(nil), + } +} + +func _StreamingAnnotateVideoRequest_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*StreamingAnnotateVideoRequest) + // streaming_request + switch x := m.StreamingRequest.(type) { + case *StreamingAnnotateVideoRequest_VideoConfig: + b.EncodeVarint(1<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.VideoConfig); err != nil { + return err + } + case *StreamingAnnotateVideoRequest_InputContent: + b.EncodeVarint(2<<3 | proto.WireBytes) + b.EncodeRawBytes(x.InputContent) + case nil: + default: + return fmt.Errorf("StreamingAnnotateVideoRequest.StreamingRequest has unexpected type %T", x) + } + return nil +} + +func _StreamingAnnotateVideoRequest_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*StreamingAnnotateVideoRequest) + switch tag { + case 1: // streaming_request.video_config + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(StreamingVideoConfig) + err := b.DecodeMessage(msg) + m.StreamingRequest = &StreamingAnnotateVideoRequest_VideoConfig{msg} + return true, err + case 2: // streaming_request.input_content + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeRawBytes(true) + m.StreamingRequest = &StreamingAnnotateVideoRequest_InputContent{x} + return true, err + default: + return false, nil + } +} + +func _StreamingAnnotateVideoRequest_OneofSizer(msg proto.Message) (n int) { + m := msg.(*StreamingAnnotateVideoRequest) + // streaming_request + switch x := m.StreamingRequest.(type) { + case *StreamingAnnotateVideoRequest_VideoConfig: + s := proto.Size(x.VideoConfig) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *StreamingAnnotateVideoRequest_InputContent: + n += 1 // tag and wire + n += proto.SizeVarint(uint64(len(x.InputContent))) + n += len(x.InputContent) + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +// `StreamingAnnotateVideoResponse` is the only message returned to the client +// by `StreamingAnnotateVideo`. A series of zero or more +// `StreamingAnnotateVideoResponse` messages are streamed back to the client. +type StreamingAnnotateVideoResponse struct { + // If set, returns a [google.rpc.Status][] message that + // specifies the error for the operation. + Error *status.Status `protobuf:"bytes,1,opt,name=error,proto3" json:"error,omitempty"` + // Streaming annotation results. + AnnotationResults *StreamingVideoAnnotationResults `protobuf:"bytes,2,opt,name=annotation_results,json=annotationResults,proto3" json:"annotation_results,omitempty"` + // GCS URI that stores annotation results of one streaming session. + // It is a directory that can hold multiple files in JSON format. + // Example uri format: + // gs://bucket_id/object_id/cloud_project_name-session_id + AnnotationResultsUri string `protobuf:"bytes,3,opt,name=annotation_results_uri,json=annotationResultsUri,proto3" json:"annotation_results_uri,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *StreamingAnnotateVideoResponse) Reset() { *m = StreamingAnnotateVideoResponse{} } +func (m *StreamingAnnotateVideoResponse) String() string { return proto.CompactTextString(m) } +func (*StreamingAnnotateVideoResponse) ProtoMessage() {} +func (*StreamingAnnotateVideoResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_video_intelligence_26bfd7f2669c7d48, []int{26} +} +func (m *StreamingAnnotateVideoResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_StreamingAnnotateVideoResponse.Unmarshal(m, b) +} +func (m *StreamingAnnotateVideoResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_StreamingAnnotateVideoResponse.Marshal(b, m, deterministic) +} +func (dst *StreamingAnnotateVideoResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_StreamingAnnotateVideoResponse.Merge(dst, src) +} +func (m *StreamingAnnotateVideoResponse) XXX_Size() int { + return xxx_messageInfo_StreamingAnnotateVideoResponse.Size(m) +} +func (m *StreamingAnnotateVideoResponse) XXX_DiscardUnknown() { + xxx_messageInfo_StreamingAnnotateVideoResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_StreamingAnnotateVideoResponse proto.InternalMessageInfo + +func (m *StreamingAnnotateVideoResponse) GetError() *status.Status { + if m != nil { + return m.Error + } + return nil +} + +func (m *StreamingAnnotateVideoResponse) GetAnnotationResults() *StreamingVideoAnnotationResults { + if m != nil { + return m.AnnotationResults + } + return nil +} + +func (m *StreamingAnnotateVideoResponse) GetAnnotationResultsUri() string { + if m != nil { + return m.AnnotationResultsUri + } + return "" +} + +// Config for EXPLICIT_CONTENT_DETECTION in streaming mode. +type StreamingExplicitContentDetectionConfig struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *StreamingExplicitContentDetectionConfig) Reset() { + *m = StreamingExplicitContentDetectionConfig{} +} +func (m *StreamingExplicitContentDetectionConfig) String() string { return proto.CompactTextString(m) } +func (*StreamingExplicitContentDetectionConfig) ProtoMessage() {} +func (*StreamingExplicitContentDetectionConfig) Descriptor() ([]byte, []int) { + return fileDescriptor_video_intelligence_26bfd7f2669c7d48, []int{27} +} +func (m *StreamingExplicitContentDetectionConfig) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_StreamingExplicitContentDetectionConfig.Unmarshal(m, b) +} +func (m *StreamingExplicitContentDetectionConfig) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_StreamingExplicitContentDetectionConfig.Marshal(b, m, deterministic) +} +func (dst *StreamingExplicitContentDetectionConfig) XXX_Merge(src proto.Message) { + xxx_messageInfo_StreamingExplicitContentDetectionConfig.Merge(dst, src) +} +func (m *StreamingExplicitContentDetectionConfig) XXX_Size() int { + return xxx_messageInfo_StreamingExplicitContentDetectionConfig.Size(m) +} +func (m *StreamingExplicitContentDetectionConfig) XXX_DiscardUnknown() { + xxx_messageInfo_StreamingExplicitContentDetectionConfig.DiscardUnknown(m) +} + +var xxx_messageInfo_StreamingExplicitContentDetectionConfig proto.InternalMessageInfo + +// Config for LABEL_DETECTION in streaming mode. +type StreamingLabelDetectionConfig struct { + // Whether the video has been captured from a stationary (i.e. non-moving) + // camera. When set to true, might improve detection accuracy for moving + // objects. Default: false. + StationaryCamera bool `protobuf:"varint,1,opt,name=stationary_camera,json=stationaryCamera,proto3" json:"stationary_camera,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *StreamingLabelDetectionConfig) Reset() { *m = StreamingLabelDetectionConfig{} } +func (m *StreamingLabelDetectionConfig) String() string { return proto.CompactTextString(m) } +func (*StreamingLabelDetectionConfig) ProtoMessage() {} +func (*StreamingLabelDetectionConfig) Descriptor() ([]byte, []int) { + return fileDescriptor_video_intelligence_26bfd7f2669c7d48, []int{28} +} +func (m *StreamingLabelDetectionConfig) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_StreamingLabelDetectionConfig.Unmarshal(m, b) +} +func (m *StreamingLabelDetectionConfig) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_StreamingLabelDetectionConfig.Marshal(b, m, deterministic) +} +func (dst *StreamingLabelDetectionConfig) XXX_Merge(src proto.Message) { + xxx_messageInfo_StreamingLabelDetectionConfig.Merge(dst, src) +} +func (m *StreamingLabelDetectionConfig) XXX_Size() int { + return xxx_messageInfo_StreamingLabelDetectionConfig.Size(m) +} +func (m *StreamingLabelDetectionConfig) XXX_DiscardUnknown() { + xxx_messageInfo_StreamingLabelDetectionConfig.DiscardUnknown(m) +} + +var xxx_messageInfo_StreamingLabelDetectionConfig proto.InternalMessageInfo + +func (m *StreamingLabelDetectionConfig) GetStationaryCamera() bool { + if m != nil { + return m.StationaryCamera + } + return false +} + +// Config for STREAMING_OBJECT_TRACKING. +type StreamingObjectTrackingConfig struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *StreamingObjectTrackingConfig) Reset() { *m = StreamingObjectTrackingConfig{} } +func (m *StreamingObjectTrackingConfig) String() string { return proto.CompactTextString(m) } +func (*StreamingObjectTrackingConfig) ProtoMessage() {} +func (*StreamingObjectTrackingConfig) Descriptor() ([]byte, []int) { + return fileDescriptor_video_intelligence_26bfd7f2669c7d48, []int{29} +} +func (m *StreamingObjectTrackingConfig) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_StreamingObjectTrackingConfig.Unmarshal(m, b) +} +func (m *StreamingObjectTrackingConfig) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_StreamingObjectTrackingConfig.Marshal(b, m, deterministic) +} +func (dst *StreamingObjectTrackingConfig) XXX_Merge(src proto.Message) { + xxx_messageInfo_StreamingObjectTrackingConfig.Merge(dst, src) +} +func (m *StreamingObjectTrackingConfig) XXX_Size() int { + return xxx_messageInfo_StreamingObjectTrackingConfig.Size(m) +} +func (m *StreamingObjectTrackingConfig) XXX_DiscardUnknown() { + xxx_messageInfo_StreamingObjectTrackingConfig.DiscardUnknown(m) +} + +var xxx_messageInfo_StreamingObjectTrackingConfig proto.InternalMessageInfo + +// Config for SHOT_CHANGE_DETECTION in streaming mode. +type StreamingShotChangeDetectionConfig struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *StreamingShotChangeDetectionConfig) Reset() { *m = StreamingShotChangeDetectionConfig{} } +func (m *StreamingShotChangeDetectionConfig) String() string { return proto.CompactTextString(m) } +func (*StreamingShotChangeDetectionConfig) ProtoMessage() {} +func (*StreamingShotChangeDetectionConfig) Descriptor() ([]byte, []int) { + return fileDescriptor_video_intelligence_26bfd7f2669c7d48, []int{30} +} +func (m *StreamingShotChangeDetectionConfig) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_StreamingShotChangeDetectionConfig.Unmarshal(m, b) +} +func (m *StreamingShotChangeDetectionConfig) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_StreamingShotChangeDetectionConfig.Marshal(b, m, deterministic) +} +func (dst *StreamingShotChangeDetectionConfig) XXX_Merge(src proto.Message) { + xxx_messageInfo_StreamingShotChangeDetectionConfig.Merge(dst, src) +} +func (m *StreamingShotChangeDetectionConfig) XXX_Size() int { + return xxx_messageInfo_StreamingShotChangeDetectionConfig.Size(m) +} +func (m *StreamingShotChangeDetectionConfig) XXX_DiscardUnknown() { + xxx_messageInfo_StreamingShotChangeDetectionConfig.DiscardUnknown(m) +} + +var xxx_messageInfo_StreamingShotChangeDetectionConfig proto.InternalMessageInfo + +// Config for streaming storage option. +type StreamingStorageConfig struct { + // Enable streaming storage. Default: false. + EnableStorageAnnotationResult bool `protobuf:"varint,1,opt,name=enable_storage_annotation_result,json=enableStorageAnnotationResult,proto3" json:"enable_storage_annotation_result,omitempty"` + // GCS URI to store all annotation results for one client. Client should + // specify this field as the top-level storage directory. Annotation results + // of different sessions will be put into different sub-directories denoted + // by project_name and session_id. All sub-directories will be auto generated + // by program and will be made accessible to client in response proto. + // URIs must be specified in the following format: `gs://bucket-id/object-id` + // `bucket-id` should be a valid GCS bucket created by client and bucket + // permission shall also be configured properly. `object-id` can be arbitrary + // string that make sense to client. Other URI formats will return error and + // cause GCS write failure. + AnnotationResultStorageDirectory string `protobuf:"bytes,3,opt,name=annotation_result_storage_directory,json=annotationResultStorageDirectory,proto3" json:"annotation_result_storage_directory,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *StreamingStorageConfig) Reset() { *m = StreamingStorageConfig{} } +func (m *StreamingStorageConfig) String() string { return proto.CompactTextString(m) } +func (*StreamingStorageConfig) ProtoMessage() {} +func (*StreamingStorageConfig) Descriptor() ([]byte, []int) { + return fileDescriptor_video_intelligence_26bfd7f2669c7d48, []int{31} +} +func (m *StreamingStorageConfig) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_StreamingStorageConfig.Unmarshal(m, b) +} +func (m *StreamingStorageConfig) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_StreamingStorageConfig.Marshal(b, m, deterministic) +} +func (dst *StreamingStorageConfig) XXX_Merge(src proto.Message) { + xxx_messageInfo_StreamingStorageConfig.Merge(dst, src) +} +func (m *StreamingStorageConfig) XXX_Size() int { + return xxx_messageInfo_StreamingStorageConfig.Size(m) +} +func (m *StreamingStorageConfig) XXX_DiscardUnknown() { + xxx_messageInfo_StreamingStorageConfig.DiscardUnknown(m) +} + +var xxx_messageInfo_StreamingStorageConfig proto.InternalMessageInfo + +func (m *StreamingStorageConfig) GetEnableStorageAnnotationResult() bool { + if m != nil { + return m.EnableStorageAnnotationResult + } + return false +} + +func (m *StreamingStorageConfig) GetAnnotationResultStorageDirectory() string { + if m != nil { + return m.AnnotationResultStorageDirectory + } + return "" +} + +// Streaming annotation results corresponding to a portion of the video +// that is currently being processed. +type StreamingVideoAnnotationResults struct { + // Shot annotation results. Each shot is represented as a video segment. + ShotAnnotations []*VideoSegment `protobuf:"bytes,1,rep,name=shot_annotations,json=shotAnnotations,proto3" json:"shot_annotations,omitempty"` + // Label annotation results. + LabelAnnotations []*LabelAnnotation `protobuf:"bytes,2,rep,name=label_annotations,json=labelAnnotations,proto3" json:"label_annotations,omitempty"` + // Explicit content detection results. + ExplicitAnnotation *ExplicitContentAnnotation `protobuf:"bytes,3,opt,name=explicit_annotation,json=explicitAnnotation,proto3" json:"explicit_annotation,omitempty"` + // Object tracking results. + ObjectAnnotations []*ObjectTrackingAnnotation `protobuf:"bytes,4,rep,name=object_annotations,json=objectAnnotations,proto3" json:"object_annotations,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *StreamingVideoAnnotationResults) Reset() { *m = StreamingVideoAnnotationResults{} } +func (m *StreamingVideoAnnotationResults) String() string { return proto.CompactTextString(m) } +func (*StreamingVideoAnnotationResults) ProtoMessage() {} +func (*StreamingVideoAnnotationResults) Descriptor() ([]byte, []int) { + return fileDescriptor_video_intelligence_26bfd7f2669c7d48, []int{32} +} +func (m *StreamingVideoAnnotationResults) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_StreamingVideoAnnotationResults.Unmarshal(m, b) +} +func (m *StreamingVideoAnnotationResults) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_StreamingVideoAnnotationResults.Marshal(b, m, deterministic) +} +func (dst *StreamingVideoAnnotationResults) XXX_Merge(src proto.Message) { + xxx_messageInfo_StreamingVideoAnnotationResults.Merge(dst, src) +} +func (m *StreamingVideoAnnotationResults) XXX_Size() int { + return xxx_messageInfo_StreamingVideoAnnotationResults.Size(m) +} +func (m *StreamingVideoAnnotationResults) XXX_DiscardUnknown() { + xxx_messageInfo_StreamingVideoAnnotationResults.DiscardUnknown(m) +} + +var xxx_messageInfo_StreamingVideoAnnotationResults proto.InternalMessageInfo + +func (m *StreamingVideoAnnotationResults) GetShotAnnotations() []*VideoSegment { + if m != nil { + return m.ShotAnnotations + } + return nil +} + +func (m *StreamingVideoAnnotationResults) GetLabelAnnotations() []*LabelAnnotation { + if m != nil { + return m.LabelAnnotations + } + return nil +} + +func (m *StreamingVideoAnnotationResults) GetExplicitAnnotation() *ExplicitContentAnnotation { + if m != nil { + return m.ExplicitAnnotation + } + return nil +} + +func (m *StreamingVideoAnnotationResults) GetObjectAnnotations() []*ObjectTrackingAnnotation { + if m != nil { + return m.ObjectAnnotations + } + return nil +} + +// Provides information to the annotator that specifies how to process the +// request. +type StreamingVideoConfig struct { + // Requested annotation feature. + Feature StreamingFeature `protobuf:"varint,1,opt,name=feature,proto3,enum=google.cloud.videointelligence.v1p3beta1.StreamingFeature" json:"feature,omitempty"` + // Config for requested annotation feature. + // + // Types that are valid to be assigned to StreamingConfig: + // *StreamingVideoConfig_ShotChangeDetectionConfig + // *StreamingVideoConfig_LabelDetectionConfig + // *StreamingVideoConfig_ExplicitContentDetectionConfig + // *StreamingVideoConfig_ObjectTrackingConfig + StreamingConfig isStreamingVideoConfig_StreamingConfig `protobuf_oneof:"streaming_config"` + // Streaming storage option. By default: storage is disabled. + StorageConfig *StreamingStorageConfig `protobuf:"bytes,30,opt,name=storage_config,json=storageConfig,proto3" json:"storage_config,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *StreamingVideoConfig) Reset() { *m = StreamingVideoConfig{} } +func (m *StreamingVideoConfig) String() string { return proto.CompactTextString(m) } +func (*StreamingVideoConfig) ProtoMessage() {} +func (*StreamingVideoConfig) Descriptor() ([]byte, []int) { + return fileDescriptor_video_intelligence_26bfd7f2669c7d48, []int{33} +} +func (m *StreamingVideoConfig) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_StreamingVideoConfig.Unmarshal(m, b) +} +func (m *StreamingVideoConfig) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_StreamingVideoConfig.Marshal(b, m, deterministic) +} +func (dst *StreamingVideoConfig) XXX_Merge(src proto.Message) { + xxx_messageInfo_StreamingVideoConfig.Merge(dst, src) +} +func (m *StreamingVideoConfig) XXX_Size() int { + return xxx_messageInfo_StreamingVideoConfig.Size(m) +} +func (m *StreamingVideoConfig) XXX_DiscardUnknown() { + xxx_messageInfo_StreamingVideoConfig.DiscardUnknown(m) +} + +var xxx_messageInfo_StreamingVideoConfig proto.InternalMessageInfo + +func (m *StreamingVideoConfig) GetFeature() StreamingFeature { + if m != nil { + return m.Feature + } + return StreamingFeature_STREAMING_FEATURE_UNSPECIFIED +} + +type isStreamingVideoConfig_StreamingConfig interface { + isStreamingVideoConfig_StreamingConfig() +} + +type StreamingVideoConfig_ShotChangeDetectionConfig struct { + ShotChangeDetectionConfig *StreamingShotChangeDetectionConfig `protobuf:"bytes,2,opt,name=shot_change_detection_config,json=shotChangeDetectionConfig,proto3,oneof"` +} + +type StreamingVideoConfig_LabelDetectionConfig struct { + LabelDetectionConfig *StreamingLabelDetectionConfig `protobuf:"bytes,3,opt,name=label_detection_config,json=labelDetectionConfig,proto3,oneof"` +} + +type StreamingVideoConfig_ExplicitContentDetectionConfig struct { + ExplicitContentDetectionConfig *StreamingExplicitContentDetectionConfig `protobuf:"bytes,4,opt,name=explicit_content_detection_config,json=explicitContentDetectionConfig,proto3,oneof"` +} + +type StreamingVideoConfig_ObjectTrackingConfig struct { + ObjectTrackingConfig *StreamingObjectTrackingConfig `protobuf:"bytes,5,opt,name=object_tracking_config,json=objectTrackingConfig,proto3,oneof"` +} + +func (*StreamingVideoConfig_ShotChangeDetectionConfig) isStreamingVideoConfig_StreamingConfig() {} + +func (*StreamingVideoConfig_LabelDetectionConfig) isStreamingVideoConfig_StreamingConfig() {} + +func (*StreamingVideoConfig_ExplicitContentDetectionConfig) isStreamingVideoConfig_StreamingConfig() {} + +func (*StreamingVideoConfig_ObjectTrackingConfig) isStreamingVideoConfig_StreamingConfig() {} + +func (m *StreamingVideoConfig) GetStreamingConfig() isStreamingVideoConfig_StreamingConfig { + if m != nil { + return m.StreamingConfig + } + return nil +} + +func (m *StreamingVideoConfig) GetShotChangeDetectionConfig() *StreamingShotChangeDetectionConfig { + if x, ok := m.GetStreamingConfig().(*StreamingVideoConfig_ShotChangeDetectionConfig); ok { + return x.ShotChangeDetectionConfig + } + return nil +} + +func (m *StreamingVideoConfig) GetLabelDetectionConfig() *StreamingLabelDetectionConfig { + if x, ok := m.GetStreamingConfig().(*StreamingVideoConfig_LabelDetectionConfig); ok { + return x.LabelDetectionConfig + } + return nil +} + +func (m *StreamingVideoConfig) GetExplicitContentDetectionConfig() *StreamingExplicitContentDetectionConfig { + if x, ok := m.GetStreamingConfig().(*StreamingVideoConfig_ExplicitContentDetectionConfig); ok { + return x.ExplicitContentDetectionConfig + } + return nil +} + +func (m *StreamingVideoConfig) GetObjectTrackingConfig() *StreamingObjectTrackingConfig { + if x, ok := m.GetStreamingConfig().(*StreamingVideoConfig_ObjectTrackingConfig); ok { + return x.ObjectTrackingConfig + } + return nil +} + +func (m *StreamingVideoConfig) GetStorageConfig() *StreamingStorageConfig { + if m != nil { + return m.StorageConfig + } + return nil +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*StreamingVideoConfig) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _StreamingVideoConfig_OneofMarshaler, _StreamingVideoConfig_OneofUnmarshaler, _StreamingVideoConfig_OneofSizer, []interface{}{ + (*StreamingVideoConfig_ShotChangeDetectionConfig)(nil), + (*StreamingVideoConfig_LabelDetectionConfig)(nil), + (*StreamingVideoConfig_ExplicitContentDetectionConfig)(nil), + (*StreamingVideoConfig_ObjectTrackingConfig)(nil), + } +} + +func _StreamingVideoConfig_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*StreamingVideoConfig) + // streaming_config + switch x := m.StreamingConfig.(type) { + case *StreamingVideoConfig_ShotChangeDetectionConfig: + b.EncodeVarint(2<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.ShotChangeDetectionConfig); err != nil { + return err + } + case *StreamingVideoConfig_LabelDetectionConfig: + b.EncodeVarint(3<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.LabelDetectionConfig); err != nil { + return err + } + case *StreamingVideoConfig_ExplicitContentDetectionConfig: + b.EncodeVarint(4<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.ExplicitContentDetectionConfig); err != nil { + return err + } + case *StreamingVideoConfig_ObjectTrackingConfig: + b.EncodeVarint(5<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.ObjectTrackingConfig); err != nil { + return err + } + case nil: + default: + return fmt.Errorf("StreamingVideoConfig.StreamingConfig has unexpected type %T", x) + } + return nil +} + +func _StreamingVideoConfig_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*StreamingVideoConfig) + switch tag { + case 2: // streaming_config.shot_change_detection_config + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(StreamingShotChangeDetectionConfig) + err := b.DecodeMessage(msg) + m.StreamingConfig = &StreamingVideoConfig_ShotChangeDetectionConfig{msg} + return true, err + case 3: // streaming_config.label_detection_config + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(StreamingLabelDetectionConfig) + err := b.DecodeMessage(msg) + m.StreamingConfig = &StreamingVideoConfig_LabelDetectionConfig{msg} + return true, err + case 4: // streaming_config.explicit_content_detection_config + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(StreamingExplicitContentDetectionConfig) + err := b.DecodeMessage(msg) + m.StreamingConfig = &StreamingVideoConfig_ExplicitContentDetectionConfig{msg} + return true, err + case 5: // streaming_config.object_tracking_config + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(StreamingObjectTrackingConfig) + err := b.DecodeMessage(msg) + m.StreamingConfig = &StreamingVideoConfig_ObjectTrackingConfig{msg} + return true, err + default: + return false, nil + } +} + +func _StreamingVideoConfig_OneofSizer(msg proto.Message) (n int) { + m := msg.(*StreamingVideoConfig) + // streaming_config + switch x := m.StreamingConfig.(type) { + case *StreamingVideoConfig_ShotChangeDetectionConfig: + s := proto.Size(x.ShotChangeDetectionConfig) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *StreamingVideoConfig_LabelDetectionConfig: + s := proto.Size(x.LabelDetectionConfig) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *StreamingVideoConfig_ExplicitContentDetectionConfig: + s := proto.Size(x.ExplicitContentDetectionConfig) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *StreamingVideoConfig_ObjectTrackingConfig: + s := proto.Size(x.ObjectTrackingConfig) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +func init() { + proto.RegisterType((*AnnotateVideoRequest)(nil), "google.cloud.videointelligence.v1p3beta1.AnnotateVideoRequest") + proto.RegisterType((*VideoContext)(nil), "google.cloud.videointelligence.v1p3beta1.VideoContext") + proto.RegisterType((*LabelDetectionConfig)(nil), "google.cloud.videointelligence.v1p3beta1.LabelDetectionConfig") + proto.RegisterType((*ShotChangeDetectionConfig)(nil), "google.cloud.videointelligence.v1p3beta1.ShotChangeDetectionConfig") + proto.RegisterType((*ExplicitContentDetectionConfig)(nil), "google.cloud.videointelligence.v1p3beta1.ExplicitContentDetectionConfig") + proto.RegisterType((*TextDetectionConfig)(nil), "google.cloud.videointelligence.v1p3beta1.TextDetectionConfig") + proto.RegisterType((*VideoSegment)(nil), "google.cloud.videointelligence.v1p3beta1.VideoSegment") + proto.RegisterType((*LabelSegment)(nil), "google.cloud.videointelligence.v1p3beta1.LabelSegment") + proto.RegisterType((*LabelFrame)(nil), "google.cloud.videointelligence.v1p3beta1.LabelFrame") + proto.RegisterType((*Entity)(nil), "google.cloud.videointelligence.v1p3beta1.Entity") + proto.RegisterType((*LabelAnnotation)(nil), "google.cloud.videointelligence.v1p3beta1.LabelAnnotation") + proto.RegisterType((*ExplicitContentFrame)(nil), "google.cloud.videointelligence.v1p3beta1.ExplicitContentFrame") + proto.RegisterType((*ExplicitContentAnnotation)(nil), "google.cloud.videointelligence.v1p3beta1.ExplicitContentAnnotation") + proto.RegisterType((*NormalizedBoundingBox)(nil), "google.cloud.videointelligence.v1p3beta1.NormalizedBoundingBox") + proto.RegisterType((*VideoAnnotationResults)(nil), "google.cloud.videointelligence.v1p3beta1.VideoAnnotationResults") + proto.RegisterType((*AnnotateVideoResponse)(nil), "google.cloud.videointelligence.v1p3beta1.AnnotateVideoResponse") + proto.RegisterType((*VideoAnnotationProgress)(nil), "google.cloud.videointelligence.v1p3beta1.VideoAnnotationProgress") + proto.RegisterType((*AnnotateVideoProgress)(nil), "google.cloud.videointelligence.v1p3beta1.AnnotateVideoProgress") + proto.RegisterType((*NormalizedVertex)(nil), "google.cloud.videointelligence.v1p3beta1.NormalizedVertex") + proto.RegisterType((*NormalizedBoundingPoly)(nil), "google.cloud.videointelligence.v1p3beta1.NormalizedBoundingPoly") + proto.RegisterType((*TextSegment)(nil), "google.cloud.videointelligence.v1p3beta1.TextSegment") + proto.RegisterType((*TextFrame)(nil), "google.cloud.videointelligence.v1p3beta1.TextFrame") + proto.RegisterType((*TextAnnotation)(nil), "google.cloud.videointelligence.v1p3beta1.TextAnnotation") + proto.RegisterType((*ObjectTrackingFrame)(nil), "google.cloud.videointelligence.v1p3beta1.ObjectTrackingFrame") + proto.RegisterType((*ObjectTrackingAnnotation)(nil), "google.cloud.videointelligence.v1p3beta1.ObjectTrackingAnnotation") + proto.RegisterType((*StreamingAnnotateVideoRequest)(nil), "google.cloud.videointelligence.v1p3beta1.StreamingAnnotateVideoRequest") + proto.RegisterType((*StreamingAnnotateVideoResponse)(nil), "google.cloud.videointelligence.v1p3beta1.StreamingAnnotateVideoResponse") + proto.RegisterType((*StreamingExplicitContentDetectionConfig)(nil), "google.cloud.videointelligence.v1p3beta1.StreamingExplicitContentDetectionConfig") + proto.RegisterType((*StreamingLabelDetectionConfig)(nil), "google.cloud.videointelligence.v1p3beta1.StreamingLabelDetectionConfig") + proto.RegisterType((*StreamingObjectTrackingConfig)(nil), "google.cloud.videointelligence.v1p3beta1.StreamingObjectTrackingConfig") + proto.RegisterType((*StreamingShotChangeDetectionConfig)(nil), "google.cloud.videointelligence.v1p3beta1.StreamingShotChangeDetectionConfig") + proto.RegisterType((*StreamingStorageConfig)(nil), "google.cloud.videointelligence.v1p3beta1.StreamingStorageConfig") + proto.RegisterType((*StreamingVideoAnnotationResults)(nil), "google.cloud.videointelligence.v1p3beta1.StreamingVideoAnnotationResults") + proto.RegisterType((*StreamingVideoConfig)(nil), "google.cloud.videointelligence.v1p3beta1.StreamingVideoConfig") + proto.RegisterEnum("google.cloud.videointelligence.v1p3beta1.Feature", Feature_name, Feature_value) + proto.RegisterEnum("google.cloud.videointelligence.v1p3beta1.LabelDetectionMode", LabelDetectionMode_name, LabelDetectionMode_value) + proto.RegisterEnum("google.cloud.videointelligence.v1p3beta1.Likelihood", Likelihood_name, Likelihood_value) + proto.RegisterEnum("google.cloud.videointelligence.v1p3beta1.StreamingFeature", StreamingFeature_name, StreamingFeature_value) +} + +// Reference imports to suppress errors if they are not otherwise used. +var _ context.Context +var _ grpc.ClientConn + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the grpc package it is being compiled against. +const _ = grpc.SupportPackageIsVersion4 + +// VideoIntelligenceServiceClient is the client API for VideoIntelligenceService service. +// +// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://godoc.org/google.golang.org/grpc#ClientConn.NewStream. +type VideoIntelligenceServiceClient interface { + // Performs asynchronous video annotation. Progress and results can be + // retrieved through the `google.longrunning.Operations` interface. + // `Operation.metadata` contains `AnnotateVideoProgress` (progress). + // `Operation.response` contains `AnnotateVideoResponse` (results). + AnnotateVideo(ctx context.Context, in *AnnotateVideoRequest, opts ...grpc.CallOption) (*longrunning.Operation, error) +} + +type videoIntelligenceServiceClient struct { + cc *grpc.ClientConn +} + +func NewVideoIntelligenceServiceClient(cc *grpc.ClientConn) VideoIntelligenceServiceClient { + return &videoIntelligenceServiceClient{cc} +} + +func (c *videoIntelligenceServiceClient) AnnotateVideo(ctx context.Context, in *AnnotateVideoRequest, opts ...grpc.CallOption) (*longrunning.Operation, error) { + out := new(longrunning.Operation) + err := c.cc.Invoke(ctx, "/google.cloud.videointelligence.v1p3beta1.VideoIntelligenceService/AnnotateVideo", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +// VideoIntelligenceServiceServer is the server API for VideoIntelligenceService service. +type VideoIntelligenceServiceServer interface { + // Performs asynchronous video annotation. Progress and results can be + // retrieved through the `google.longrunning.Operations` interface. + // `Operation.metadata` contains `AnnotateVideoProgress` (progress). + // `Operation.response` contains `AnnotateVideoResponse` (results). + AnnotateVideo(context.Context, *AnnotateVideoRequest) (*longrunning.Operation, error) +} + +func RegisterVideoIntelligenceServiceServer(s *grpc.Server, srv VideoIntelligenceServiceServer) { + s.RegisterService(&_VideoIntelligenceService_serviceDesc, srv) +} + +func _VideoIntelligenceService_AnnotateVideo_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(AnnotateVideoRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(VideoIntelligenceServiceServer).AnnotateVideo(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.videointelligence.v1p3beta1.VideoIntelligenceService/AnnotateVideo", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(VideoIntelligenceServiceServer).AnnotateVideo(ctx, req.(*AnnotateVideoRequest)) + } + return interceptor(ctx, in, info, handler) +} + +var _VideoIntelligenceService_serviceDesc = grpc.ServiceDesc{ + ServiceName: "google.cloud.videointelligence.v1p3beta1.VideoIntelligenceService", + HandlerType: (*VideoIntelligenceServiceServer)(nil), + Methods: []grpc.MethodDesc{ + { + MethodName: "AnnotateVideo", + Handler: _VideoIntelligenceService_AnnotateVideo_Handler, + }, + }, + Streams: []grpc.StreamDesc{}, + Metadata: "google/cloud/videointelligence/v1p3beta1/video_intelligence.proto", +} + +// StreamingVideoIntelligenceServiceClient is the client API for StreamingVideoIntelligenceService service. +// +// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://godoc.org/google.golang.org/grpc#ClientConn.NewStream. +type StreamingVideoIntelligenceServiceClient interface { + // Performs video annotation with bidirectional streaming: emitting results + // while sending video/audio bytes. + // This method is only available via the gRPC API (not REST). + StreamingAnnotateVideo(ctx context.Context, opts ...grpc.CallOption) (StreamingVideoIntelligenceService_StreamingAnnotateVideoClient, error) +} + +type streamingVideoIntelligenceServiceClient struct { + cc *grpc.ClientConn +} + +func NewStreamingVideoIntelligenceServiceClient(cc *grpc.ClientConn) StreamingVideoIntelligenceServiceClient { + return &streamingVideoIntelligenceServiceClient{cc} +} + +func (c *streamingVideoIntelligenceServiceClient) StreamingAnnotateVideo(ctx context.Context, opts ...grpc.CallOption) (StreamingVideoIntelligenceService_StreamingAnnotateVideoClient, error) { + stream, err := c.cc.NewStream(ctx, &_StreamingVideoIntelligenceService_serviceDesc.Streams[0], "/google.cloud.videointelligence.v1p3beta1.StreamingVideoIntelligenceService/StreamingAnnotateVideo", opts...) + if err != nil { + return nil, err + } + x := &streamingVideoIntelligenceServiceStreamingAnnotateVideoClient{stream} + return x, nil +} + +type StreamingVideoIntelligenceService_StreamingAnnotateVideoClient interface { + Send(*StreamingAnnotateVideoRequest) error + Recv() (*StreamingAnnotateVideoResponse, error) + grpc.ClientStream +} + +type streamingVideoIntelligenceServiceStreamingAnnotateVideoClient struct { + grpc.ClientStream +} + +func (x *streamingVideoIntelligenceServiceStreamingAnnotateVideoClient) Send(m *StreamingAnnotateVideoRequest) error { + return x.ClientStream.SendMsg(m) +} + +func (x *streamingVideoIntelligenceServiceStreamingAnnotateVideoClient) Recv() (*StreamingAnnotateVideoResponse, error) { + m := new(StreamingAnnotateVideoResponse) + if err := x.ClientStream.RecvMsg(m); err != nil { + return nil, err + } + return m, nil +} + +// StreamingVideoIntelligenceServiceServer is the server API for StreamingVideoIntelligenceService service. +type StreamingVideoIntelligenceServiceServer interface { + // Performs video annotation with bidirectional streaming: emitting results + // while sending video/audio bytes. + // This method is only available via the gRPC API (not REST). + StreamingAnnotateVideo(StreamingVideoIntelligenceService_StreamingAnnotateVideoServer) error +} + +func RegisterStreamingVideoIntelligenceServiceServer(s *grpc.Server, srv StreamingVideoIntelligenceServiceServer) { + s.RegisterService(&_StreamingVideoIntelligenceService_serviceDesc, srv) +} + +func _StreamingVideoIntelligenceService_StreamingAnnotateVideo_Handler(srv interface{}, stream grpc.ServerStream) error { + return srv.(StreamingVideoIntelligenceServiceServer).StreamingAnnotateVideo(&streamingVideoIntelligenceServiceStreamingAnnotateVideoServer{stream}) +} + +type StreamingVideoIntelligenceService_StreamingAnnotateVideoServer interface { + Send(*StreamingAnnotateVideoResponse) error + Recv() (*StreamingAnnotateVideoRequest, error) + grpc.ServerStream +} + +type streamingVideoIntelligenceServiceStreamingAnnotateVideoServer struct { + grpc.ServerStream +} + +func (x *streamingVideoIntelligenceServiceStreamingAnnotateVideoServer) Send(m *StreamingAnnotateVideoResponse) error { + return x.ServerStream.SendMsg(m) +} + +func (x *streamingVideoIntelligenceServiceStreamingAnnotateVideoServer) Recv() (*StreamingAnnotateVideoRequest, error) { + m := new(StreamingAnnotateVideoRequest) + if err := x.ServerStream.RecvMsg(m); err != nil { + return nil, err + } + return m, nil +} + +var _StreamingVideoIntelligenceService_serviceDesc = grpc.ServiceDesc{ + ServiceName: "google.cloud.videointelligence.v1p3beta1.StreamingVideoIntelligenceService", + HandlerType: (*StreamingVideoIntelligenceServiceServer)(nil), + Methods: []grpc.MethodDesc{}, + Streams: []grpc.StreamDesc{ + { + StreamName: "StreamingAnnotateVideo", + Handler: _StreamingVideoIntelligenceService_StreamingAnnotateVideo_Handler, + ServerStreams: true, + ClientStreams: true, + }, + }, + Metadata: "google/cloud/videointelligence/v1p3beta1/video_intelligence.proto", +} + +func init() { + proto.RegisterFile("google/cloud/videointelligence/v1p3beta1/video_intelligence.proto", fileDescriptor_video_intelligence_26bfd7f2669c7d48) +} + +var fileDescriptor_video_intelligence_26bfd7f2669c7d48 = []byte{ + // 2371 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xc4, 0x5a, 0xcf, 0x6f, 0xe3, 0xc6, + 0xf5, 0x37, 0x25, 0xdb, 0x6b, 0x3d, 0xcb, 0xb6, 0x3c, 0xfe, 0x25, 0x3b, 0xf1, 0x8f, 0x70, 0xb3, + 0xf8, 0x3a, 0xfb, 0x2d, 0xe4, 0xac, 0x37, 0x0d, 0x9a, 0xcd, 0x8f, 0x46, 0x92, 0xb5, 0x96, 0xba, + 0xb6, 0xe5, 0xd0, 0xb2, 0x91, 0x6d, 0xb7, 0x20, 0x68, 0x6a, 0x44, 0xb3, 0x4b, 0x71, 0xb4, 0xe4, + 0xc8, 0xb1, 0x7b, 0x49, 0x50, 0xf4, 0x50, 0xa0, 0x87, 0x1e, 0x82, 0x02, 0x05, 0x0a, 0xe4, 0x0f, + 0xe8, 0xa5, 0x45, 0xaf, 0x3d, 0xb4, 0x28, 0x7a, 0x69, 0xd0, 0x5b, 0x91, 0x43, 0x8b, 0x1e, 0x7b, + 0x28, 0xd0, 0xff, 0xa0, 0xa7, 0x82, 0x33, 0x43, 0x8a, 0x22, 0x25, 0xdb, 0x92, 0xb3, 0xe8, 0x4d, + 0xf3, 0x66, 0xe6, 0xf3, 0x7e, 0xce, 0x7b, 0x6f, 0x86, 0x82, 0xbc, 0x41, 0x88, 0x61, 0xe1, 0x2d, + 0xdd, 0x22, 0xed, 0xfa, 0xd6, 0xb9, 0x59, 0xc7, 0xc4, 0xb4, 0x29, 0xb6, 0x2c, 0xd3, 0xc0, 0xb6, + 0x8e, 0xb7, 0xce, 0x1f, 0xb4, 0x1e, 0x9e, 0x62, 0xaa, 0x3d, 0xe0, 0x73, 0x6a, 0x78, 0x32, 0xd7, + 0x72, 0x08, 0x25, 0x68, 0x93, 0x43, 0xe4, 0x18, 0x44, 0x2e, 0x06, 0x91, 0x0b, 0x20, 0x56, 0x5e, + 0x15, 0xcc, 0xb4, 0x96, 0xb9, 0xa5, 0xd9, 0x36, 0xa1, 0x1a, 0x35, 0x89, 0xed, 0x72, 0x9c, 0x95, + 0xbb, 0x62, 0xd6, 0x22, 0xb6, 0xe1, 0xb4, 0x6d, 0xdb, 0xb4, 0x8d, 0x2d, 0xd2, 0xc2, 0x4e, 0xd7, + 0xa2, 0x35, 0xb1, 0x88, 0x8d, 0x4e, 0xdb, 0x8d, 0xad, 0x7a, 0x9b, 0x2f, 0x10, 0xf3, 0xeb, 0xd1, + 0x79, 0x6a, 0x36, 0xb1, 0x4b, 0xb5, 0x66, 0x4b, 0x2c, 0x58, 0x12, 0x0b, 0x9c, 0x96, 0xbe, 0xe5, + 0x52, 0x8d, 0xb6, 0x05, 0xb2, 0xfc, 0xa7, 0x04, 0xcc, 0xe7, 0xb9, 0x50, 0xf8, 0xc4, 0x53, 0x42, + 0xc1, 0x2f, 0xda, 0xd8, 0xa5, 0xe8, 0x15, 0x48, 0x99, 0x76, 0xab, 0x4d, 0xd5, 0xb6, 0x63, 0x66, + 0xa5, 0x0d, 0x69, 0x33, 0xa5, 0x4c, 0x30, 0xc2, 0xb1, 0x63, 0xa2, 0xbb, 0x30, 0xc5, 0x27, 0x75, + 0x62, 0x53, 0x6c, 0xd3, 0xec, 0xf8, 0x86, 0xb4, 0x99, 0x56, 0xd2, 0x8c, 0x58, 0xe4, 0x34, 0xb4, + 0x0f, 0x13, 0x0d, 0xac, 0xd1, 0xb6, 0x83, 0xdd, 0x6c, 0x62, 0x23, 0xb9, 0x39, 0xbd, 0xfd, 0x20, + 0x77, 0x53, 0xa3, 0xe5, 0x1e, 0xf3, 0x9d, 0x4a, 0x00, 0x81, 0xbe, 0x07, 0x53, 0xdc, 0x19, 0x8c, + 0xe7, 0x05, 0xcd, 0x26, 0x37, 0xa4, 0xcd, 0xc9, 0xed, 0xb7, 0x6f, 0x8e, 0xc9, 0xf4, 0x2b, 0xf2, + 0xdd, 0x4a, 0xfa, 0x3c, 0x34, 0x42, 0xab, 0x00, 0xa4, 0x4d, 0x7d, 0x75, 0x47, 0x99, 0xba, 0x29, + 0x4e, 0xf1, 0xf4, 0x5d, 0x87, 0x49, 0x8b, 0xe8, 0xcc, 0xe2, 0xaa, 0x59, 0xcf, 0x8e, 0xb1, 0x79, + 0xf0, 0x49, 0x95, 0xba, 0xfc, 0xaf, 0x51, 0x48, 0x87, 0xe1, 0x91, 0x02, 0x13, 0x2e, 0x36, 0x9a, + 0xd8, 0xa6, 0x6e, 0x56, 0xda, 0x48, 0x0e, 0x21, 0xe8, 0x11, 0xdf, 0xae, 0x04, 0x38, 0x88, 0xc2, + 0xa2, 0xa5, 0x9d, 0x62, 0x4b, 0xad, 0x63, 0x8a, 0x75, 0x26, 0x8c, 0x4e, 0xec, 0x86, 0x69, 0x64, + 0x13, 0xcc, 0x14, 0x1f, 0xdc, 0x9c, 0xc3, 0x9e, 0x87, 0xb3, 0xe3, 0xc3, 0x14, 0x19, 0x8a, 0x32, + 0x6f, 0xf5, 0xa0, 0xa2, 0x1f, 0x4b, 0xf0, 0xaa, 0x7b, 0x46, 0xa8, 0xaa, 0x9f, 0x69, 0xb6, 0x81, + 0xe3, 0xcc, 0xb9, 0x1f, 0x8a, 0x37, 0x67, 0x7e, 0x74, 0x46, 0x68, 0x91, 0x81, 0x45, 0x25, 0x58, + 0x76, 0xfb, 0x4d, 0xa1, 0xcf, 0x25, 0x78, 0x0d, 0x5f, 0xb4, 0x2c, 0x53, 0x37, 0x83, 0xb0, 0x8b, + 0xcb, 0x32, 0xca, 0x64, 0x29, 0xdf, 0x5c, 0x96, 0x92, 0x80, 0x14, 0x41, 0x1b, 0x15, 0x68, 0x0d, + 0x5f, 0x39, 0x8f, 0x5e, 0xc0, 0x82, 0xe7, 0xee, 0xb8, 0x20, 0x13, 0x4c, 0x90, 0xf7, 0x6f, 0x2e, + 0x48, 0x0d, 0x5f, 0xc4, 0xb8, 0xcf, 0xd1, 0x38, 0x51, 0xfe, 0xb3, 0x04, 0xf3, 0xbd, 0xdc, 0x87, + 0x6c, 0x98, 0x8f, 0x86, 0x47, 0x93, 0xd4, 0x31, 0x3b, 0xbc, 0xd3, 0xdb, 0xef, 0x0d, 0x1b, 0x1c, + 0xfb, 0xa4, 0x8e, 0x15, 0x64, 0xc5, 0x68, 0xe8, 0xff, 0x61, 0xd6, 0xe5, 0xb9, 0x4c, 0x73, 0x2e, + 0x55, 0x5d, 0x6b, 0x62, 0x47, 0x63, 0x91, 0x38, 0xa1, 0x64, 0x3a, 0x13, 0x45, 0x46, 0x47, 0xf3, + 0x30, 0xe6, 0x09, 0x63, 0xb1, 0x68, 0x49, 0x29, 0x7c, 0x20, 0x3f, 0x80, 0xe5, 0xbe, 0xc1, 0xd0, + 0xd9, 0x22, 0x85, 0xb7, 0xbc, 0x0d, 0x6b, 0x57, 0xfb, 0xac, 0xcf, 0xbe, 0xf7, 0x60, 0xae, 0x87, + 0x89, 0xd1, 0x3d, 0x98, 0xb6, 0x34, 0xdb, 0x68, 0x6b, 0x06, 0x56, 0xcf, 0x4c, 0xff, 0xb4, 0xa6, + 0x94, 0x29, 0x9f, 0x5a, 0xf6, 0x88, 0xf2, 0x2f, 0x24, 0x71, 0xbe, 0xc5, 0xa9, 0x44, 0x25, 0xa6, + 0xbc, 0x43, 0x55, 0x2f, 0xd3, 0xaa, 0xa4, 0xd1, 0x70, 0x31, 0x65, 0x0c, 0x27, 0xb7, 0x97, 0x7d, + 0x4b, 0xfb, 0xd9, 0x38, 0xb7, 0x23, 0xb2, 0xb5, 0x32, 0xc3, 0xf6, 0xd4, 0xcc, 0x26, 0xae, 0xb2, + 0x1d, 0x28, 0x0f, 0x33, 0xd8, 0xae, 0x77, 0x81, 0x24, 0xae, 0x03, 0x99, 0xc2, 0x76, 0xbd, 0x03, + 0x21, 0x7f, 0x26, 0x41, 0x9a, 0x79, 0xcc, 0x17, 0xed, 0x10, 0xee, 0x88, 0x94, 0x21, 0x04, 0x1a, + 0x36, 0xf3, 0xf8, 0x30, 0x68, 0x0d, 0x80, 0x85, 0x75, 0xdd, 0x5b, 0xcd, 0x04, 0x4c, 0x28, 0x21, + 0x8a, 0x7c, 0x06, 0xc0, 0x24, 0x78, 0xec, 0x68, 0x4d, 0x8c, 0x1e, 0xc1, 0xe4, 0x40, 0x46, 0x01, + 0xda, 0xb1, 0xc7, 0x75, 0x9c, 0x2c, 0x18, 0x2f, 0xd9, 0xd4, 0xa4, 0x97, 0x5e, 0x7d, 0xc2, 0xec, + 0x97, 0x97, 0x90, 0x45, 0x7d, 0xe2, 0x84, 0x4a, 0x1d, 0x6d, 0xc0, 0x64, 0x1d, 0xbb, 0xba, 0x63, + 0xb6, 0x3c, 0x0e, 0x0c, 0x27, 0xa5, 0x84, 0x49, 0x5e, 0x05, 0x0b, 0xfc, 0xae, 0x7b, 0xa7, 0x84, + 0xc7, 0x65, 0xda, 0x27, 0x16, 0x49, 0x1d, 0xcb, 0xff, 0x48, 0xc0, 0x0c, 0x53, 0x2c, 0x1f, 0x94, + 0x6d, 0x54, 0x86, 0x71, 0xce, 0x46, 0x28, 0xf6, 0xe6, 0x00, 0xb9, 0x86, 0xed, 0x53, 0xc4, 0x7e, + 0xf4, 0x7d, 0x98, 0xd5, 0x35, 0x8a, 0x0d, 0xe2, 0x5c, 0xaa, 0x8c, 0x64, 0x8a, 0x42, 0x39, 0x0c, + 0x68, 0xc6, 0x87, 0x2a, 0x09, 0xa4, 0xae, 0x0a, 0x94, 0x1c, 0xb4, 0x02, 0x85, 0x03, 0x2a, 0x54, + 0x81, 0xf6, 0x60, 0xbc, 0xe1, 0xf9, 0xd8, 0xcd, 0x8e, 0x32, 0xc4, 0xb7, 0x06, 0x44, 0x64, 0x01, + 0xa2, 0x08, 0x0c, 0xf9, 0xf7, 0x12, 0xcc, 0x47, 0xce, 0xf2, 0xed, 0x23, 0xe8, 0x39, 0x2c, 0xb6, + 0x88, 0x63, 0x13, 0xc3, 0xd1, 0x5a, 0x67, 0x97, 0xaa, 0x65, 0x3e, 0xc7, 0x96, 0x79, 0x46, 0x48, + 0x9d, 0x45, 0xc1, 0xf4, 0x40, 0x22, 0x07, 0x7b, 0x95, 0x85, 0x10, 0x66, 0x87, 0x2c, 0xbb, 0xb0, + 0x1c, 0x51, 0x20, 0x14, 0x29, 0x27, 0x81, 0xb1, 0x78, 0x03, 0xf0, 0xc1, 0xd0, 0x55, 0xa9, 0xdb, + 0x6c, 0xcf, 0x61, 0xe1, 0x80, 0x38, 0x4d, 0xcd, 0x32, 0x7f, 0x88, 0xeb, 0x05, 0xd2, 0xb6, 0xeb, + 0xa6, 0x6d, 0x14, 0xc8, 0x05, 0x42, 0x30, 0x6a, 0xe1, 0x06, 0xb7, 0x57, 0x42, 0x61, 0xbf, 0x51, + 0x06, 0x92, 0x94, 0xb4, 0xc4, 0x49, 0xf2, 0x7e, 0x7a, 0xe9, 0xd1, 0x31, 0x8d, 0x33, 0xde, 0x3f, + 0x25, 0x14, 0x3e, 0x40, 0x8b, 0x30, 0x7e, 0x4a, 0x28, 0x25, 0x4d, 0x56, 0x42, 0x13, 0x8a, 0x18, + 0xc9, 0xff, 0x1e, 0x87, 0x45, 0x96, 0x14, 0x3a, 0x8a, 0x29, 0xd8, 0x6d, 0x5b, 0xd4, 0xbd, 0xba, + 0x43, 0x6c, 0xc3, 0xb2, 0x88, 0x1a, 0x95, 0x17, 0xa5, 0x50, 0xe7, 0x2b, 0x82, 0xfc, 0x9d, 0x01, + 0x83, 0x27, 0x24, 0xc1, 0x92, 0xc0, 0x8e, 0xd0, 0x5d, 0x44, 0x60, 0x91, 0xf5, 0x2a, 0x71, 0x9e, + 0xc9, 0xdb, 0xf2, 0x9c, 0xf7, 0x80, 0x63, 0x0c, 0x5f, 0xc0, 0x12, 0x73, 0x4b, 0x0f, 0x8e, 0xa3, + 0xb7, 0xe5, 0xb8, 0xc0, 0x90, 0x63, 0x2c, 0x35, 0xc8, 0x30, 0x1d, 0xc3, 0xbc, 0xc6, 0x6f, 0xd5, + 0x62, 0xce, 0x78, 0x78, 0x61, 0x16, 0x14, 0xe6, 0x82, 0x5e, 0xab, 0xc3, 0x26, 0x7b, 0x67, 0xd0, + 0x4e, 0xaf, 0xef, 0xe1, 0x50, 0x90, 0x8f, 0x1f, 0x3a, 0x30, 0x3a, 0x64, 0x58, 0x33, 0x15, 0x56, + 0x2c, 0xcd, 0x14, 0xfb, 0xd6, 0x60, 0x7d, 0x54, 0x88, 0xcf, 0x0c, 0xed, 0x1a, 0x7b, 0x0e, 0x43, + 0xe4, 0xf4, 0x07, 0x58, 0xef, 0x66, 0x33, 0xcd, 0xd8, 0x14, 0x6e, 0xce, 0xa6, 0xca, 0x30, 0x6a, + 0x8e, 0xa6, 0x3f, 0x37, 0x6d, 0x23, 0xc4, 0x70, 0x96, 0xa3, 0x87, 0x59, 0x6e, 0xc2, 0x18, 0x76, + 0x1c, 0xe2, 0x64, 0x53, 0xcc, 0x7e, 0xc8, 0xe7, 0xe2, 0xb4, 0xf4, 0xdc, 0x11, 0xbb, 0x8c, 0x29, + 0x7c, 0x81, 0xfc, 0x13, 0x09, 0x16, 0x22, 0xb7, 0x31, 0xb7, 0x45, 0x6c, 0x17, 0x23, 0x02, 0xa8, + 0x23, 0xaf, 0xea, 0xf0, 0x23, 0x28, 0x12, 0xcb, 0x87, 0x03, 0xba, 0x3d, 0x76, 0x94, 0x95, 0x59, + 0x2d, 0x4a, 0x92, 0xff, 0x26, 0xc1, 0x52, 0x64, 0xf5, 0xa1, 0x43, 0x0c, 0x07, 0xbb, 0xd7, 0x9c, + 0xfc, 0x37, 0x20, 0xd3, 0x12, 0x0b, 0xd5, 0x16, 0x76, 0x74, 0xaf, 0x0f, 0xf1, 0xd2, 0xcf, 0x98, + 0x32, 0xe3, 0xd3, 0x0f, 0x39, 0x19, 0xbd, 0x03, 0xd0, 0x69, 0xa2, 0xc4, 0x3d, 0x62, 0x25, 0x96, + 0xe6, 0x6b, 0xfe, 0x5d, 0x56, 0x49, 0x05, 0xed, 0x13, 0x7a, 0x17, 0x26, 0xdb, 0xad, 0xba, 0x46, + 0x31, 0xdf, 0x3b, 0x7a, 0xed, 0x5e, 0xe0, 0xcb, 0x3d, 0x82, 0xfc, 0xd3, 0xa8, 0x99, 0x03, 0xcd, + 0x1c, 0x98, 0x0b, 0x99, 0xd9, 0x97, 0x57, 0xd8, 0x39, 0x3f, 0xb4, 0x9d, 0x7d, 0x7c, 0x25, 0xe4, + 0x44, 0x9f, 0x26, 0xe7, 0x20, 0xd3, 0xc9, 0xe7, 0x27, 0xd8, 0xa1, 0xf8, 0x02, 0xa5, 0x41, 0xba, + 0x10, 0x79, 0x5c, 0x62, 0xa3, 0x4b, 0x91, 0xc2, 0xa5, 0x4b, 0xb9, 0x05, 0x8b, 0xf1, 0xfc, 0x7f, + 0x48, 0xac, 0x4b, 0x74, 0x02, 0x13, 0xe7, 0xd8, 0xa1, 0xa6, 0x1e, 0xd4, 0x9c, 0x47, 0x37, 0x17, + 0x39, 0x2a, 0x83, 0x12, 0x60, 0xc9, 0x7f, 0x91, 0x60, 0xd2, 0x3b, 0x57, 0xff, 0xb3, 0x0e, 0x13, + 0x3d, 0x09, 0x6a, 0x29, 0xcf, 0xe3, 0x0f, 0x07, 0x4b, 0x08, 0xdd, 0x05, 0xf4, 0x77, 0x12, 0xa4, + 0x02, 0x2a, 0x72, 0x60, 0xde, 0x61, 0x91, 0x50, 0x57, 0x4f, 0x85, 0x31, 0xd5, 0x53, 0x72, 0x21, + 0x34, 0xfb, 0x70, 0x18, 0x03, 0x86, 0x9d, 0xa2, 0x20, 0x81, 0x1e, 0xae, 0xd4, 0x91, 0x06, 0x27, + 0x31, 0x40, 0x83, 0x23, 0x7f, 0x02, 0xd3, 0xdd, 0x39, 0xce, 0xab, 0xfb, 0xec, 0x41, 0x84, 0x9f, + 0x44, 0xf6, 0x1b, 0x7d, 0x14, 0xea, 0xfe, 0x78, 0xb9, 0xfd, 0xe6, 0x60, 0x26, 0x8b, 0x35, 0x7f, + 0xf2, 0x97, 0x12, 0xcc, 0x75, 0xa7, 0x3d, 0x6e, 0xc0, 0x4f, 0x60, 0xc9, 0x0e, 0x54, 0xef, 0x65, + 0xc3, 0x6f, 0xdf, 0xc6, 0x86, 0x05, 0x72, 0xa1, 0x2c, 0xd8, 0x3d, 0xfb, 0x9d, 0xdb, 0x58, 0xf1, + 0xef, 0x09, 0xc8, 0xf6, 0xcb, 0xe1, 0x5f, 0x63, 0x8f, 0xdf, 0x1d, 0xd7, 0xa3, 0xb1, 0xb8, 0x3e, + 0x0e, 0xe2, 0x9a, 0x3b, 0xe9, 0xfd, 0x61, 0x2b, 0x50, 0x57, 0x84, 0x23, 0xa5, 0x73, 0x40, 0x93, + 0xb7, 0x39, 0xa0, 0xe5, 0x91, 0xce, 0x11, 0x7d, 0x05, 0x26, 0xa8, 0xc7, 0xcc, 0x7f, 0x00, 0x4b, + 0x7a, 0x93, 0x8c, 0x52, 0xa9, 0x17, 0xd2, 0x00, 0x62, 0xd2, 0x6e, 0x10, 0xf9, 0x0f, 0x12, 0xac, + 0x1e, 0x51, 0x07, 0x6b, 0xcd, 0x8e, 0x5d, 0xbb, 0x5f, 0x17, 0x75, 0x48, 0x07, 0x8f, 0x79, 0x0d, + 0xd3, 0x10, 0x76, 0x1e, 0xa0, 0x43, 0x0e, 0xe0, 0xfd, 0x57, 0xb7, 0x86, 0x69, 0x94, 0x47, 0x94, + 0xc9, 0xf3, 0xce, 0x10, 0xdd, 0x8b, 0xbe, 0x52, 0x7a, 0x11, 0x92, 0x2e, 0x8f, 0x74, 0xbf, 0x53, + 0x16, 0xe6, 0xbc, 0xab, 0xbc, 0x40, 0x53, 0x1d, 0x2e, 0xa0, 0xfc, 0x1f, 0x09, 0xd6, 0xfa, 0xa9, + 0x20, 0x4a, 0x72, 0x50, 0xd6, 0xa5, 0x6b, 0xca, 0x3a, 0xba, 0xe8, 0x59, 0xbc, 0x79, 0xbc, 0x56, + 0x86, 0xd5, 0xf9, 0x26, 0x55, 0x1c, 0xbd, 0x05, 0x8b, 0x71, 0xce, 0xac, 0x6c, 0xf3, 0xfb, 0xee, + 0x7c, 0x6c, 0xcb, 0xb1, 0x63, 0xca, 0x6f, 0xc0, 0xff, 0x05, 0xbc, 0xae, 0x7e, 0x6c, 0x91, 0xf7, + 0x42, 0x9e, 0xee, 0xf9, 0x2a, 0xd5, 0xf3, 0x95, 0x48, 0xea, 0xfd, 0x4a, 0x24, 0xaf, 0x87, 0xd0, + 0xba, 0xe3, 0x5b, 0xb0, 0x7b, 0x1d, 0xe4, 0x60, 0x41, 0xdf, 0x97, 0x23, 0xf9, 0x37, 0x12, 0x2c, + 0x76, 0x96, 0x51, 0xe2, 0xb0, 0x0b, 0x3d, 0x13, 0x67, 0x17, 0x36, 0xb0, 0xad, 0x9d, 0x5a, 0x58, + 0x75, 0x39, 0x5d, 0x8d, 0xd9, 0x47, 0x48, 0xb7, 0xca, 0xd7, 0x89, 0xed, 0x51, 0x6b, 0xa3, 0x7d, + 0xb8, 0x1b, 0xdb, 0x19, 0x60, 0xd6, 0x4d, 0x07, 0xeb, 0x94, 0x38, 0x97, 0xc2, 0xcc, 0x1b, 0x51, + 0x33, 0x0b, 0xd4, 0x1d, 0x7f, 0x9d, 0xfc, 0x55, 0x12, 0xd6, 0xaf, 0xf1, 0x6f, 0xcf, 0xc6, 0x5f, + 0xfa, 0x7a, 0x1b, 0xff, 0x06, 0xcc, 0xbe, 0x84, 0xeb, 0x5a, 0xc6, 0x8a, 0xde, 0x61, 0xfa, 0x5c, + 0x30, 0x92, 0x2f, 0xf7, 0x82, 0xd1, 0xbb, 0xf7, 0x1f, 0x7d, 0x89, 0xbd, 0xbf, 0xfc, 0xc5, 0x38, + 0xcc, 0xf7, 0xca, 0x55, 0xa8, 0x06, 0x77, 0xc4, 0xa7, 0x0d, 0xf1, 0x40, 0xfb, 0x68, 0x88, 0x44, + 0xe0, 0x7f, 0x25, 0xf1, 0xa1, 0xd0, 0xcf, 0xae, 0x7b, 0xac, 0xe7, 0x49, 0x67, 0x6f, 0x08, 0x5e, + 0x7d, 0x8f, 0x5b, 0x79, 0xe4, 0xaa, 0x77, 0xfb, 0x4f, 0xfb, 0x7e, 0xb4, 0xe0, 0xce, 0xde, 0x1d, + 0x42, 0x94, 0x5e, 0x89, 0xa6, 0x3c, 0xd2, 0xe7, 0xfb, 0xc5, 0x17, 0x03, 0x7c, 0x38, 0xf8, 0x68, + 0x08, 0x61, 0xae, 0x4e, 0x90, 0xe5, 0x91, 0x6b, 0xbf, 0x21, 0x7c, 0x0a, 0x8b, 0x22, 0x2a, 0xa9, + 0x08, 0x29, 0x5f, 0xa8, 0xb1, 0xa1, 0x2d, 0xd4, 0x2b, 0x79, 0x7a, 0x16, 0x22, 0x3d, 0xe8, 0xc8, + 0x80, 0x69, 0x3f, 0x71, 0x09, 0xc6, 0x6b, 0x83, 0xf6, 0xbe, 0xbd, 0xb3, 0xad, 0x32, 0xe5, 0x86, + 0x87, 0x05, 0x04, 0x99, 0x4e, 0xa5, 0xe5, 0xac, 0xee, 0xff, 0x52, 0x82, 0x3b, 0x22, 0x8c, 0xd1, + 0x12, 0xcc, 0x3d, 0x2e, 0xe5, 0x6b, 0xc7, 0x4a, 0x49, 0x3d, 0x3e, 0x38, 0x3a, 0x2c, 0x15, 0x2b, + 0x8f, 0x2b, 0xa5, 0x9d, 0xcc, 0x08, 0x9a, 0x83, 0x99, 0xbd, 0x7c, 0xa1, 0xb4, 0xa7, 0xee, 0x94, + 0x6a, 0xa5, 0x62, 0xad, 0x52, 0x3d, 0xc8, 0x48, 0x68, 0x19, 0x16, 0x8e, 0xca, 0xd5, 0x9a, 0x5a, + 0x2c, 0xe7, 0x0f, 0x76, 0x4b, 0xa1, 0xa9, 0x04, 0x5a, 0x83, 0x95, 0xd2, 0xc7, 0x87, 0x7b, 0x95, + 0x62, 0xa5, 0xa6, 0x16, 0xab, 0x07, 0xb5, 0xd2, 0x41, 0x2d, 0x34, 0x9f, 0x44, 0x08, 0xa6, 0x6b, + 0xa5, 0x8f, 0xc3, 0xb4, 0x3b, 0x1e, 0x8f, 0x6a, 0xe1, 0x3b, 0xa5, 0x62, 0x4d, 0xad, 0x29, 0xf9, + 0xe2, 0x93, 0xca, 0xc1, 0x6e, 0x26, 0x75, 0xdf, 0x01, 0x14, 0xff, 0x1a, 0x82, 0x5e, 0x87, 0x8d, + 0x88, 0x38, 0xea, 0x7e, 0x75, 0x27, 0x2a, 0xf4, 0x14, 0xa4, 0x98, 0x7c, 0xde, 0x54, 0x46, 0x42, + 0xd3, 0x00, 0x8f, 0x95, 0xfc, 0x7e, 0x89, 0x8f, 0x13, 0x9e, 0xb2, 0x6c, 0x3a, 0x7f, 0xb0, 0xa3, + 0x86, 0x26, 0x92, 0xf7, 0x29, 0x40, 0xe7, 0x89, 0x11, 0xad, 0xc0, 0xe2, 0x5e, 0xe5, 0x49, 0x69, + 0xaf, 0x52, 0xae, 0x56, 0x77, 0x22, 0x1c, 0x66, 0x61, 0xea, 0xa4, 0xa4, 0x3c, 0x55, 0x8f, 0x0f, + 0xd8, 0x92, 0xa7, 0x19, 0x09, 0xa5, 0x61, 0x22, 0x18, 0x25, 0xbc, 0xd1, 0x61, 0xf5, 0xe8, 0xa8, + 0x52, 0xd8, 0x2b, 0x65, 0x92, 0x08, 0x60, 0x5c, 0xcc, 0x8c, 0xa2, 0x19, 0x98, 0x64, 0x5b, 0x05, + 0x61, 0xec, 0xfe, 0x1f, 0x25, 0xc8, 0x44, 0xf3, 0x0a, 0x7a, 0x0d, 0x56, 0x8f, 0x6a, 0x4a, 0x29, + 0xbf, 0x5f, 0x39, 0xd8, 0x55, 0x7b, 0xbb, 0x66, 0x15, 0x96, 0x3b, 0x4b, 0xe2, 0x4e, 0xba, 0x0b, + 0xeb, 0x9d, 0xe9, 0x7e, 0xee, 0xda, 0x84, 0xd7, 0x3b, 0x8b, 0xae, 0x74, 0x5c, 0x17, 0xb7, 0xa8, + 0xbb, 0x46, 0xb7, 0x7f, 0x2b, 0x41, 0x96, 0x25, 0xd9, 0x4a, 0x28, 0x4c, 0x8f, 0xb0, 0x73, 0x6e, + 0xea, 0x18, 0xfd, 0x5c, 0x82, 0xa9, 0xae, 0x4e, 0x0e, 0x0d, 0xd0, 0x6f, 0xf6, 0xea, 0x62, 0x57, + 0x56, 0xfd, 0xfd, 0xa1, 0x8f, 0xf7, 0xb9, 0xaa, 0xff, 0xf1, 0x5e, 0xbe, 0xf7, 0xa3, 0xbf, 0xfe, + 0xf3, 0xf3, 0xc4, 0xba, 0xbc, 0x12, 0xfd, 0x3f, 0x81, 0xfb, 0x48, 0xd4, 0x1f, 0xfc, 0x48, 0xba, + 0xbf, 0xfd, 0x95, 0x04, 0xaf, 0x75, 0x97, 0x88, 0x5e, 0xd2, 0xff, 0x3a, 0xdc, 0xd3, 0x74, 0xab, + 0x31, 0x4c, 0x82, 0xe8, 0xa9, 0x4f, 0xf9, 0xf6, 0x40, 0xbc, 0x37, 0xde, 0x94, 0xde, 0x94, 0x0a, + 0x9f, 0x25, 0xe0, 0x1b, 0x3a, 0x69, 0xde, 0x18, 0xb3, 0xb0, 0xda, 0x4f, 0xf9, 0x43, 0xef, 0x2a, + 0x77, 0x28, 0x7d, 0xf7, 0xa9, 0x80, 0x32, 0x88, 0xa5, 0xd9, 0x46, 0x8e, 0x38, 0xc6, 0x96, 0x81, + 0x6d, 0x76, 0xd1, 0xdb, 0xe2, 0x53, 0x5a, 0xcb, 0x74, 0xaf, 0xff, 0x57, 0xc7, 0xbb, 0xb1, 0xb9, + 0x5f, 0x25, 0x36, 0x77, 0x39, 0x76, 0x91, 0x89, 0x19, 0x93, 0x24, 0x77, 0xf2, 0xe0, 0xf0, 0x61, + 0xc1, 0xdb, 0xfc, 0xa5, 0xbf, 0xf4, 0x19, 0x5b, 0xfa, 0x2c, 0xb6, 0xf4, 0xd9, 0x89, 0xcf, 0xe7, + 0x74, 0x9c, 0xc9, 0xf6, 0xf0, 0xbf, 0x01, 0x00, 0x00, 0xff, 0xff, 0x6b, 0xcb, 0x43, 0xd9, 0x70, + 0x22, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/cloud/vision/v1/geometry.pb.go b/vendor/google.golang.org/genproto/googleapis/cloud/vision/v1/geometry.pb.go new file mode 100644 index 0000000..2c8383c --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/cloud/vision/v1/geometry.pb.go @@ -0,0 +1,264 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/cloud/vision/v1/geometry.proto + +package vision // import "google.golang.org/genproto/googleapis/cloud/vision/v1" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// A vertex represents a 2D point in the image. +// NOTE: the vertex coordinates are in the same scale as the original image. +type Vertex struct { + // X coordinate. + X int32 `protobuf:"varint,1,opt,name=x,proto3" json:"x,omitempty"` + // Y coordinate. + Y int32 `protobuf:"varint,2,opt,name=y,proto3" json:"y,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Vertex) Reset() { *m = Vertex{} } +func (m *Vertex) String() string { return proto.CompactTextString(m) } +func (*Vertex) ProtoMessage() {} +func (*Vertex) Descriptor() ([]byte, []int) { + return fileDescriptor_geometry_e8a7714abee99ced, []int{0} +} +func (m *Vertex) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Vertex.Unmarshal(m, b) +} +func (m *Vertex) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Vertex.Marshal(b, m, deterministic) +} +func (dst *Vertex) XXX_Merge(src proto.Message) { + xxx_messageInfo_Vertex.Merge(dst, src) +} +func (m *Vertex) XXX_Size() int { + return xxx_messageInfo_Vertex.Size(m) +} +func (m *Vertex) XXX_DiscardUnknown() { + xxx_messageInfo_Vertex.DiscardUnknown(m) +} + +var xxx_messageInfo_Vertex proto.InternalMessageInfo + +func (m *Vertex) GetX() int32 { + if m != nil { + return m.X + } + return 0 +} + +func (m *Vertex) GetY() int32 { + if m != nil { + return m.Y + } + return 0 +} + +// A vertex represents a 2D point in the image. +// NOTE: the normalized vertex coordinates are relative to the original image +// and range from 0 to 1. +type NormalizedVertex struct { + // X coordinate. + X float32 `protobuf:"fixed32,1,opt,name=x,proto3" json:"x,omitempty"` + // Y coordinate. + Y float32 `protobuf:"fixed32,2,opt,name=y,proto3" json:"y,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *NormalizedVertex) Reset() { *m = NormalizedVertex{} } +func (m *NormalizedVertex) String() string { return proto.CompactTextString(m) } +func (*NormalizedVertex) ProtoMessage() {} +func (*NormalizedVertex) Descriptor() ([]byte, []int) { + return fileDescriptor_geometry_e8a7714abee99ced, []int{1} +} +func (m *NormalizedVertex) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_NormalizedVertex.Unmarshal(m, b) +} +func (m *NormalizedVertex) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_NormalizedVertex.Marshal(b, m, deterministic) +} +func (dst *NormalizedVertex) XXX_Merge(src proto.Message) { + xxx_messageInfo_NormalizedVertex.Merge(dst, src) +} +func (m *NormalizedVertex) XXX_Size() int { + return xxx_messageInfo_NormalizedVertex.Size(m) +} +func (m *NormalizedVertex) XXX_DiscardUnknown() { + xxx_messageInfo_NormalizedVertex.DiscardUnknown(m) +} + +var xxx_messageInfo_NormalizedVertex proto.InternalMessageInfo + +func (m *NormalizedVertex) GetX() float32 { + if m != nil { + return m.X + } + return 0 +} + +func (m *NormalizedVertex) GetY() float32 { + if m != nil { + return m.Y + } + return 0 +} + +// A bounding polygon for the detected image annotation. +type BoundingPoly struct { + // The bounding polygon vertices. + Vertices []*Vertex `protobuf:"bytes,1,rep,name=vertices,proto3" json:"vertices,omitempty"` + // The bounding polygon normalized vertices. + NormalizedVertices []*NormalizedVertex `protobuf:"bytes,2,rep,name=normalized_vertices,json=normalizedVertices,proto3" json:"normalized_vertices,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *BoundingPoly) Reset() { *m = BoundingPoly{} } +func (m *BoundingPoly) String() string { return proto.CompactTextString(m) } +func (*BoundingPoly) ProtoMessage() {} +func (*BoundingPoly) Descriptor() ([]byte, []int) { + return fileDescriptor_geometry_e8a7714abee99ced, []int{2} +} +func (m *BoundingPoly) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_BoundingPoly.Unmarshal(m, b) +} +func (m *BoundingPoly) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_BoundingPoly.Marshal(b, m, deterministic) +} +func (dst *BoundingPoly) XXX_Merge(src proto.Message) { + xxx_messageInfo_BoundingPoly.Merge(dst, src) +} +func (m *BoundingPoly) XXX_Size() int { + return xxx_messageInfo_BoundingPoly.Size(m) +} +func (m *BoundingPoly) XXX_DiscardUnknown() { + xxx_messageInfo_BoundingPoly.DiscardUnknown(m) +} + +var xxx_messageInfo_BoundingPoly proto.InternalMessageInfo + +func (m *BoundingPoly) GetVertices() []*Vertex { + if m != nil { + return m.Vertices + } + return nil +} + +func (m *BoundingPoly) GetNormalizedVertices() []*NormalizedVertex { + if m != nil { + return m.NormalizedVertices + } + return nil +} + +// A 3D position in the image, used primarily for Face detection landmarks. +// A valid Position must have both x and y coordinates. +// The position coordinates are in the same scale as the original image. +type Position struct { + // X coordinate. + X float32 `protobuf:"fixed32,1,opt,name=x,proto3" json:"x,omitempty"` + // Y coordinate. + Y float32 `protobuf:"fixed32,2,opt,name=y,proto3" json:"y,omitempty"` + // Z coordinate (or depth). + Z float32 `protobuf:"fixed32,3,opt,name=z,proto3" json:"z,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Position) Reset() { *m = Position{} } +func (m *Position) String() string { return proto.CompactTextString(m) } +func (*Position) ProtoMessage() {} +func (*Position) Descriptor() ([]byte, []int) { + return fileDescriptor_geometry_e8a7714abee99ced, []int{3} +} +func (m *Position) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Position.Unmarshal(m, b) +} +func (m *Position) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Position.Marshal(b, m, deterministic) +} +func (dst *Position) XXX_Merge(src proto.Message) { + xxx_messageInfo_Position.Merge(dst, src) +} +func (m *Position) XXX_Size() int { + return xxx_messageInfo_Position.Size(m) +} +func (m *Position) XXX_DiscardUnknown() { + xxx_messageInfo_Position.DiscardUnknown(m) +} + +var xxx_messageInfo_Position proto.InternalMessageInfo + +func (m *Position) GetX() float32 { + if m != nil { + return m.X + } + return 0 +} + +func (m *Position) GetY() float32 { + if m != nil { + return m.Y + } + return 0 +} + +func (m *Position) GetZ() float32 { + if m != nil { + return m.Z + } + return 0 +} + +func init() { + proto.RegisterType((*Vertex)(nil), "google.cloud.vision.v1.Vertex") + proto.RegisterType((*NormalizedVertex)(nil), "google.cloud.vision.v1.NormalizedVertex") + proto.RegisterType((*BoundingPoly)(nil), "google.cloud.vision.v1.BoundingPoly") + proto.RegisterType((*Position)(nil), "google.cloud.vision.v1.Position") +} + +func init() { + proto.RegisterFile("google/cloud/vision/v1/geometry.proto", fileDescriptor_geometry_e8a7714abee99ced) +} + +var fileDescriptor_geometry_e8a7714abee99ced = []byte{ + // 300 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x7c, 0x91, 0xc1, 0x4b, 0xc3, 0x30, + 0x14, 0xc6, 0x49, 0xa7, 0x63, 0xc4, 0x09, 0x52, 0x41, 0xca, 0x10, 0x19, 0x43, 0xa1, 0xa7, 0x84, + 0xa9, 0x27, 0xf5, 0x54, 0x0f, 0xbb, 0x8d, 0xd2, 0xc3, 0x40, 0x2f, 0x12, 0xdb, 0x10, 0x02, 0x6d, + 0x5e, 0x49, 0xd3, 0xb2, 0xf6, 0xcf, 0x11, 0xfc, 0x1f, 0x3d, 0x4a, 0x9b, 0x52, 0x58, 0x71, 0xde, + 0xf2, 0x25, 0xbf, 0xf7, 0xbd, 0x97, 0xef, 0xe1, 0x3b, 0x01, 0x20, 0x52, 0x4e, 0xe3, 0x14, 0xca, + 0x84, 0x56, 0xb2, 0x90, 0xa0, 0x68, 0xb5, 0xa6, 0x82, 0x43, 0xc6, 0x8d, 0xae, 0x49, 0xae, 0xc1, + 0x80, 0x7b, 0x65, 0x31, 0xd2, 0x61, 0xc4, 0x62, 0xa4, 0x5a, 0x2f, 0xae, 0xfb, 0x72, 0x96, 0x4b, + 0xca, 0x94, 0x02, 0xc3, 0x8c, 0x04, 0x55, 0xd8, 0xaa, 0xd5, 0x2d, 0x9e, 0xee, 0xb8, 0x36, 0x7c, + 0xef, 0xce, 0x31, 0xda, 0x7b, 0x68, 0x89, 0xfc, 0xd3, 0x08, 0x75, 0xaa, 0xf6, 0x1c, 0xab, 0xea, + 0x15, 0xc1, 0x17, 0x5b, 0xd0, 0x19, 0x4b, 0x65, 0xc3, 0x93, 0x31, 0xef, 0x1c, 0xf0, 0x4e, 0xcb, + 0x7f, 0x23, 0x3c, 0x0f, 0xa0, 0x54, 0x89, 0x54, 0x22, 0x84, 0xb4, 0x76, 0x9f, 0xf0, 0xac, 0xe2, + 0xda, 0xc8, 0x98, 0x17, 0x1e, 0x5a, 0x4e, 0xfc, 0xb3, 0xfb, 0x1b, 0xf2, 0xf7, 0xbc, 0xc4, 0xda, + 0x47, 0x03, 0xef, 0xbe, 0xe1, 0x4b, 0x35, 0x34, 0xff, 0x18, 0x6c, 0x9c, 0xce, 0xc6, 0x3f, 0x66, + 0x33, 0x9e, 0x37, 0x72, 0xd5, 0xc1, 0x4d, 0xeb, 0xb1, 0x7a, 0xc4, 0xb3, 0x10, 0x0a, 0xd9, 0x06, + 0xf2, 0xdf, 0x7f, 0x5a, 0xd5, 0x78, 0x13, 0xab, 0x9a, 0xa0, 0xc4, 0x8b, 0x18, 0xb2, 0x23, 0x8d, + 0x83, 0xf3, 0x4d, 0xbf, 0x97, 0xb0, 0x0d, 0x38, 0x44, 0xef, 0x2f, 0x3d, 0x28, 0x20, 0x65, 0x4a, + 0x10, 0xd0, 0x82, 0x0a, 0xae, 0xba, 0xf8, 0xa9, 0x7d, 0x62, 0xb9, 0x2c, 0xc6, 0xeb, 0x7d, 0xb6, + 0xa7, 0x1f, 0x84, 0xbe, 0x9c, 0x93, 0xcd, 0xeb, 0x6e, 0xfb, 0x39, 0xed, 0x4a, 0x1e, 0x7e, 0x03, + 0x00, 0x00, 0xff, 0xff, 0x5b, 0xbb, 0x2b, 0x5f, 0x10, 0x02, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/cloud/vision/v1/image_annotator.pb.go b/vendor/google.golang.org/genproto/googleapis/cloud/vision/v1/image_annotator.pb.go new file mode 100644 index 0000000..eaeb3c2 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/cloud/vision/v1/image_annotator.pb.go @@ -0,0 +1,3223 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/cloud/vision/v1/image_annotator.proto + +package vision // import "google.golang.org/genproto/googleapis/cloud/vision/v1" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import timestamp "github.com/golang/protobuf/ptypes/timestamp" +import _ "google.golang.org/genproto/googleapis/api/annotations" +import longrunning "google.golang.org/genproto/googleapis/longrunning" +import status "google.golang.org/genproto/googleapis/rpc/status" +import color "google.golang.org/genproto/googleapis/type/color" +import latlng "google.golang.org/genproto/googleapis/type/latlng" +import _ "google.golang.org/genproto/protobuf/field_mask" + +import ( + context "golang.org/x/net/context" + grpc "google.golang.org/grpc" +) + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// A bucketized representation of likelihood, which is intended to give clients +// highly stable results across model upgrades. +type Likelihood int32 + +const ( + // Unknown likelihood. + Likelihood_UNKNOWN Likelihood = 0 + // It is very unlikely that the image belongs to the specified vertical. + Likelihood_VERY_UNLIKELY Likelihood = 1 + // It is unlikely that the image belongs to the specified vertical. + Likelihood_UNLIKELY Likelihood = 2 + // It is possible that the image belongs to the specified vertical. + Likelihood_POSSIBLE Likelihood = 3 + // It is likely that the image belongs to the specified vertical. + Likelihood_LIKELY Likelihood = 4 + // It is very likely that the image belongs to the specified vertical. + Likelihood_VERY_LIKELY Likelihood = 5 +) + +var Likelihood_name = map[int32]string{ + 0: "UNKNOWN", + 1: "VERY_UNLIKELY", + 2: "UNLIKELY", + 3: "POSSIBLE", + 4: "LIKELY", + 5: "VERY_LIKELY", +} +var Likelihood_value = map[string]int32{ + "UNKNOWN": 0, + "VERY_UNLIKELY": 1, + "UNLIKELY": 2, + "POSSIBLE": 3, + "LIKELY": 4, + "VERY_LIKELY": 5, +} + +func (x Likelihood) String() string { + return proto.EnumName(Likelihood_name, int32(x)) +} +func (Likelihood) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_image_annotator_faae31fa1d905783, []int{0} +} + +// Type of Google Cloud Vision API feature to be extracted. +type Feature_Type int32 + +const ( + // Unspecified feature type. + Feature_TYPE_UNSPECIFIED Feature_Type = 0 + // Run face detection. + Feature_FACE_DETECTION Feature_Type = 1 + // Run landmark detection. + Feature_LANDMARK_DETECTION Feature_Type = 2 + // Run logo detection. + Feature_LOGO_DETECTION Feature_Type = 3 + // Run label detection. + Feature_LABEL_DETECTION Feature_Type = 4 + // Run text detection / optical character recognition (OCR). Text detection + // is optimized for areas of text within a larger image; if the image is + // a document, use `DOCUMENT_TEXT_DETECTION` instead. + Feature_TEXT_DETECTION Feature_Type = 5 + // Run dense text document OCR. Takes precedence when both + // `DOCUMENT_TEXT_DETECTION` and `TEXT_DETECTION` are present. + Feature_DOCUMENT_TEXT_DETECTION Feature_Type = 11 + // Run Safe Search to detect potentially unsafe + // or undesirable content. + Feature_SAFE_SEARCH_DETECTION Feature_Type = 6 + // Compute a set of image properties, such as the + // image's dominant colors. + Feature_IMAGE_PROPERTIES Feature_Type = 7 + // Run crop hints. + Feature_CROP_HINTS Feature_Type = 9 + // Run web detection. + Feature_WEB_DETECTION Feature_Type = 10 + // Run Product Search. + Feature_PRODUCT_SEARCH Feature_Type = 12 + // Run localizer for object detection. + Feature_OBJECT_LOCALIZATION Feature_Type = 19 +) + +var Feature_Type_name = map[int32]string{ + 0: "TYPE_UNSPECIFIED", + 1: "FACE_DETECTION", + 2: "LANDMARK_DETECTION", + 3: "LOGO_DETECTION", + 4: "LABEL_DETECTION", + 5: "TEXT_DETECTION", + 11: "DOCUMENT_TEXT_DETECTION", + 6: "SAFE_SEARCH_DETECTION", + 7: "IMAGE_PROPERTIES", + 9: "CROP_HINTS", + 10: "WEB_DETECTION", + 12: "PRODUCT_SEARCH", + 19: "OBJECT_LOCALIZATION", +} +var Feature_Type_value = map[string]int32{ + "TYPE_UNSPECIFIED": 0, + "FACE_DETECTION": 1, + "LANDMARK_DETECTION": 2, + "LOGO_DETECTION": 3, + "LABEL_DETECTION": 4, + "TEXT_DETECTION": 5, + "DOCUMENT_TEXT_DETECTION": 11, + "SAFE_SEARCH_DETECTION": 6, + "IMAGE_PROPERTIES": 7, + "CROP_HINTS": 9, + "WEB_DETECTION": 10, + "PRODUCT_SEARCH": 12, + "OBJECT_LOCALIZATION": 19, +} + +func (x Feature_Type) String() string { + return proto.EnumName(Feature_Type_name, int32(x)) +} +func (Feature_Type) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_image_annotator_faae31fa1d905783, []int{0, 0} +} + +// Face landmark (feature) type. +// Left and right are defined from the vantage of the viewer of the image +// without considering mirror projections typical of photos. So, `LEFT_EYE`, +// typically, is the person's right eye. +type FaceAnnotation_Landmark_Type int32 + +const ( + // Unknown face landmark detected. Should not be filled. + FaceAnnotation_Landmark_UNKNOWN_LANDMARK FaceAnnotation_Landmark_Type = 0 + // Left eye. + FaceAnnotation_Landmark_LEFT_EYE FaceAnnotation_Landmark_Type = 1 + // Right eye. + FaceAnnotation_Landmark_RIGHT_EYE FaceAnnotation_Landmark_Type = 2 + // Left of left eyebrow. + FaceAnnotation_Landmark_LEFT_OF_LEFT_EYEBROW FaceAnnotation_Landmark_Type = 3 + // Right of left eyebrow. + FaceAnnotation_Landmark_RIGHT_OF_LEFT_EYEBROW FaceAnnotation_Landmark_Type = 4 + // Left of right eyebrow. + FaceAnnotation_Landmark_LEFT_OF_RIGHT_EYEBROW FaceAnnotation_Landmark_Type = 5 + // Right of right eyebrow. + FaceAnnotation_Landmark_RIGHT_OF_RIGHT_EYEBROW FaceAnnotation_Landmark_Type = 6 + // Midpoint between eyes. + FaceAnnotation_Landmark_MIDPOINT_BETWEEN_EYES FaceAnnotation_Landmark_Type = 7 + // Nose tip. + FaceAnnotation_Landmark_NOSE_TIP FaceAnnotation_Landmark_Type = 8 + // Upper lip. + FaceAnnotation_Landmark_UPPER_LIP FaceAnnotation_Landmark_Type = 9 + // Lower lip. + FaceAnnotation_Landmark_LOWER_LIP FaceAnnotation_Landmark_Type = 10 + // Mouth left. + FaceAnnotation_Landmark_MOUTH_LEFT FaceAnnotation_Landmark_Type = 11 + // Mouth right. + FaceAnnotation_Landmark_MOUTH_RIGHT FaceAnnotation_Landmark_Type = 12 + // Mouth center. + FaceAnnotation_Landmark_MOUTH_CENTER FaceAnnotation_Landmark_Type = 13 + // Nose, bottom right. + FaceAnnotation_Landmark_NOSE_BOTTOM_RIGHT FaceAnnotation_Landmark_Type = 14 + // Nose, bottom left. + FaceAnnotation_Landmark_NOSE_BOTTOM_LEFT FaceAnnotation_Landmark_Type = 15 + // Nose, bottom center. + FaceAnnotation_Landmark_NOSE_BOTTOM_CENTER FaceAnnotation_Landmark_Type = 16 + // Left eye, top boundary. + FaceAnnotation_Landmark_LEFT_EYE_TOP_BOUNDARY FaceAnnotation_Landmark_Type = 17 + // Left eye, right corner. + FaceAnnotation_Landmark_LEFT_EYE_RIGHT_CORNER FaceAnnotation_Landmark_Type = 18 + // Left eye, bottom boundary. + FaceAnnotation_Landmark_LEFT_EYE_BOTTOM_BOUNDARY FaceAnnotation_Landmark_Type = 19 + // Left eye, left corner. + FaceAnnotation_Landmark_LEFT_EYE_LEFT_CORNER FaceAnnotation_Landmark_Type = 20 + // Right eye, top boundary. + FaceAnnotation_Landmark_RIGHT_EYE_TOP_BOUNDARY FaceAnnotation_Landmark_Type = 21 + // Right eye, right corner. + FaceAnnotation_Landmark_RIGHT_EYE_RIGHT_CORNER FaceAnnotation_Landmark_Type = 22 + // Right eye, bottom boundary. + FaceAnnotation_Landmark_RIGHT_EYE_BOTTOM_BOUNDARY FaceAnnotation_Landmark_Type = 23 + // Right eye, left corner. + FaceAnnotation_Landmark_RIGHT_EYE_LEFT_CORNER FaceAnnotation_Landmark_Type = 24 + // Left eyebrow, upper midpoint. + FaceAnnotation_Landmark_LEFT_EYEBROW_UPPER_MIDPOINT FaceAnnotation_Landmark_Type = 25 + // Right eyebrow, upper midpoint. + FaceAnnotation_Landmark_RIGHT_EYEBROW_UPPER_MIDPOINT FaceAnnotation_Landmark_Type = 26 + // Left ear tragion. + FaceAnnotation_Landmark_LEFT_EAR_TRAGION FaceAnnotation_Landmark_Type = 27 + // Right ear tragion. + FaceAnnotation_Landmark_RIGHT_EAR_TRAGION FaceAnnotation_Landmark_Type = 28 + // Left eye pupil. + FaceAnnotation_Landmark_LEFT_EYE_PUPIL FaceAnnotation_Landmark_Type = 29 + // Right eye pupil. + FaceAnnotation_Landmark_RIGHT_EYE_PUPIL FaceAnnotation_Landmark_Type = 30 + // Forehead glabella. + FaceAnnotation_Landmark_FOREHEAD_GLABELLA FaceAnnotation_Landmark_Type = 31 + // Chin gnathion. + FaceAnnotation_Landmark_CHIN_GNATHION FaceAnnotation_Landmark_Type = 32 + // Chin left gonion. + FaceAnnotation_Landmark_CHIN_LEFT_GONION FaceAnnotation_Landmark_Type = 33 + // Chin right gonion. + FaceAnnotation_Landmark_CHIN_RIGHT_GONION FaceAnnotation_Landmark_Type = 34 +) + +var FaceAnnotation_Landmark_Type_name = map[int32]string{ + 0: "UNKNOWN_LANDMARK", + 1: "LEFT_EYE", + 2: "RIGHT_EYE", + 3: "LEFT_OF_LEFT_EYEBROW", + 4: "RIGHT_OF_LEFT_EYEBROW", + 5: "LEFT_OF_RIGHT_EYEBROW", + 6: "RIGHT_OF_RIGHT_EYEBROW", + 7: "MIDPOINT_BETWEEN_EYES", + 8: "NOSE_TIP", + 9: "UPPER_LIP", + 10: "LOWER_LIP", + 11: "MOUTH_LEFT", + 12: "MOUTH_RIGHT", + 13: "MOUTH_CENTER", + 14: "NOSE_BOTTOM_RIGHT", + 15: "NOSE_BOTTOM_LEFT", + 16: "NOSE_BOTTOM_CENTER", + 17: "LEFT_EYE_TOP_BOUNDARY", + 18: "LEFT_EYE_RIGHT_CORNER", + 19: "LEFT_EYE_BOTTOM_BOUNDARY", + 20: "LEFT_EYE_LEFT_CORNER", + 21: "RIGHT_EYE_TOP_BOUNDARY", + 22: "RIGHT_EYE_RIGHT_CORNER", + 23: "RIGHT_EYE_BOTTOM_BOUNDARY", + 24: "RIGHT_EYE_LEFT_CORNER", + 25: "LEFT_EYEBROW_UPPER_MIDPOINT", + 26: "RIGHT_EYEBROW_UPPER_MIDPOINT", + 27: "LEFT_EAR_TRAGION", + 28: "RIGHT_EAR_TRAGION", + 29: "LEFT_EYE_PUPIL", + 30: "RIGHT_EYE_PUPIL", + 31: "FOREHEAD_GLABELLA", + 32: "CHIN_GNATHION", + 33: "CHIN_LEFT_GONION", + 34: "CHIN_RIGHT_GONION", +} +var FaceAnnotation_Landmark_Type_value = map[string]int32{ + "UNKNOWN_LANDMARK": 0, + "LEFT_EYE": 1, + "RIGHT_EYE": 2, + "LEFT_OF_LEFT_EYEBROW": 3, + "RIGHT_OF_LEFT_EYEBROW": 4, + "LEFT_OF_RIGHT_EYEBROW": 5, + "RIGHT_OF_RIGHT_EYEBROW": 6, + "MIDPOINT_BETWEEN_EYES": 7, + "NOSE_TIP": 8, + "UPPER_LIP": 9, + "LOWER_LIP": 10, + "MOUTH_LEFT": 11, + "MOUTH_RIGHT": 12, + "MOUTH_CENTER": 13, + "NOSE_BOTTOM_RIGHT": 14, + "NOSE_BOTTOM_LEFT": 15, + "NOSE_BOTTOM_CENTER": 16, + "LEFT_EYE_TOP_BOUNDARY": 17, + "LEFT_EYE_RIGHT_CORNER": 18, + "LEFT_EYE_BOTTOM_BOUNDARY": 19, + "LEFT_EYE_LEFT_CORNER": 20, + "RIGHT_EYE_TOP_BOUNDARY": 21, + "RIGHT_EYE_RIGHT_CORNER": 22, + "RIGHT_EYE_BOTTOM_BOUNDARY": 23, + "RIGHT_EYE_LEFT_CORNER": 24, + "LEFT_EYEBROW_UPPER_MIDPOINT": 25, + "RIGHT_EYEBROW_UPPER_MIDPOINT": 26, + "LEFT_EAR_TRAGION": 27, + "RIGHT_EAR_TRAGION": 28, + "LEFT_EYE_PUPIL": 29, + "RIGHT_EYE_PUPIL": 30, + "FOREHEAD_GLABELLA": 31, + "CHIN_GNATHION": 32, + "CHIN_LEFT_GONION": 33, + "CHIN_RIGHT_GONION": 34, +} + +func (x FaceAnnotation_Landmark_Type) String() string { + return proto.EnumName(FaceAnnotation_Landmark_Type_name, int32(x)) +} +func (FaceAnnotation_Landmark_Type) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_image_annotator_faae31fa1d905783, []int{3, 0, 0} +} + +// Batch operation states. +type OperationMetadata_State int32 + +const ( + // Invalid. + OperationMetadata_STATE_UNSPECIFIED OperationMetadata_State = 0 + // Request is received. + OperationMetadata_CREATED OperationMetadata_State = 1 + // Request is actively being processed. + OperationMetadata_RUNNING OperationMetadata_State = 2 + // The batch processing is done. + OperationMetadata_DONE OperationMetadata_State = 3 + // The batch processing was cancelled. + OperationMetadata_CANCELLED OperationMetadata_State = 4 +) + +var OperationMetadata_State_name = map[int32]string{ + 0: "STATE_UNSPECIFIED", + 1: "CREATED", + 2: "RUNNING", + 3: "DONE", + 4: "CANCELLED", +} +var OperationMetadata_State_value = map[string]int32{ + "STATE_UNSPECIFIED": 0, + "CREATED": 1, + "RUNNING": 2, + "DONE": 3, + "CANCELLED": 4, +} + +func (x OperationMetadata_State) String() string { + return proto.EnumName(OperationMetadata_State_name, int32(x)) +} +func (OperationMetadata_State) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_image_annotator_faae31fa1d905783, []int{37, 0} +} + +// The type of Google Cloud Vision API detection to perform, and the maximum +// number of results to return for that type. Multiple `Feature` objects can +// be specified in the `features` list. +type Feature struct { + // The feature type. + Type Feature_Type `protobuf:"varint,1,opt,name=type,proto3,enum=google.cloud.vision.v1.Feature_Type" json:"type,omitempty"` + // Maximum number of results of this type. Does not apply to + // `TEXT_DETECTION`, `DOCUMENT_TEXT_DETECTION`, or `CROP_HINTS`. + MaxResults int32 `protobuf:"varint,2,opt,name=max_results,json=maxResults,proto3" json:"max_results,omitempty"` + // Model to use for the feature. + // Supported values: "builtin/stable" (the default if unset) and + // "builtin/latest". + Model string `protobuf:"bytes,3,opt,name=model,proto3" json:"model,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Feature) Reset() { *m = Feature{} } +func (m *Feature) String() string { return proto.CompactTextString(m) } +func (*Feature) ProtoMessage() {} +func (*Feature) Descriptor() ([]byte, []int) { + return fileDescriptor_image_annotator_faae31fa1d905783, []int{0} +} +func (m *Feature) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Feature.Unmarshal(m, b) +} +func (m *Feature) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Feature.Marshal(b, m, deterministic) +} +func (dst *Feature) XXX_Merge(src proto.Message) { + xxx_messageInfo_Feature.Merge(dst, src) +} +func (m *Feature) XXX_Size() int { + return xxx_messageInfo_Feature.Size(m) +} +func (m *Feature) XXX_DiscardUnknown() { + xxx_messageInfo_Feature.DiscardUnknown(m) +} + +var xxx_messageInfo_Feature proto.InternalMessageInfo + +func (m *Feature) GetType() Feature_Type { + if m != nil { + return m.Type + } + return Feature_TYPE_UNSPECIFIED +} + +func (m *Feature) GetMaxResults() int32 { + if m != nil { + return m.MaxResults + } + return 0 +} + +func (m *Feature) GetModel() string { + if m != nil { + return m.Model + } + return "" +} + +// External image source (Google Cloud Storage or web URL image location). +type ImageSource struct { + // **Use `image_uri` instead.** + // + // The Google Cloud Storage URI of the form + // `gs://bucket_name/object_name`. Object versioning is not supported. See + // [Google Cloud Storage Request + // URIs](https://cloud.google.com/storage/docs/reference-uris) for more info. + GcsImageUri string `protobuf:"bytes,1,opt,name=gcs_image_uri,json=gcsImageUri,proto3" json:"gcs_image_uri,omitempty"` + // The URI of the source image. Can be either: + // + // 1. A Google Cloud Storage URI of the form + // `gs://bucket_name/object_name`. Object versioning is not supported. See + // [Google Cloud Storage Request + // URIs](https://cloud.google.com/storage/docs/reference-uris) for more + // info. + // + // 2. A publicly-accessible image HTTP/HTTPS URL. When fetching images from + // HTTP/HTTPS URLs, Google cannot guarantee that the request will be + // completed. Your request may fail if the specified host denies the + // request (e.g. due to request throttling or DOS prevention), or if Google + // throttles requests to the site for abuse prevention. You should not + // depend on externally-hosted images for production applications. + // + // When both `gcs_image_uri` and `image_uri` are specified, `image_uri` takes + // precedence. + ImageUri string `protobuf:"bytes,2,opt,name=image_uri,json=imageUri,proto3" json:"image_uri,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ImageSource) Reset() { *m = ImageSource{} } +func (m *ImageSource) String() string { return proto.CompactTextString(m) } +func (*ImageSource) ProtoMessage() {} +func (*ImageSource) Descriptor() ([]byte, []int) { + return fileDescriptor_image_annotator_faae31fa1d905783, []int{1} +} +func (m *ImageSource) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ImageSource.Unmarshal(m, b) +} +func (m *ImageSource) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ImageSource.Marshal(b, m, deterministic) +} +func (dst *ImageSource) XXX_Merge(src proto.Message) { + xxx_messageInfo_ImageSource.Merge(dst, src) +} +func (m *ImageSource) XXX_Size() int { + return xxx_messageInfo_ImageSource.Size(m) +} +func (m *ImageSource) XXX_DiscardUnknown() { + xxx_messageInfo_ImageSource.DiscardUnknown(m) +} + +var xxx_messageInfo_ImageSource proto.InternalMessageInfo + +func (m *ImageSource) GetGcsImageUri() string { + if m != nil { + return m.GcsImageUri + } + return "" +} + +func (m *ImageSource) GetImageUri() string { + if m != nil { + return m.ImageUri + } + return "" +} + +// Client image to perform Google Cloud Vision API tasks over. +type Image struct { + // Image content, represented as a stream of bytes. + // Note: As with all `bytes` fields, protobuffers use a pure binary + // representation, whereas JSON representations use base64. + Content []byte `protobuf:"bytes,1,opt,name=content,proto3" json:"content,omitempty"` + // Google Cloud Storage image location, or publicly-accessible image + // URL. If both `content` and `source` are provided for an image, `content` + // takes precedence and is used to perform the image annotation request. + Source *ImageSource `protobuf:"bytes,2,opt,name=source,proto3" json:"source,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Image) Reset() { *m = Image{} } +func (m *Image) String() string { return proto.CompactTextString(m) } +func (*Image) ProtoMessage() {} +func (*Image) Descriptor() ([]byte, []int) { + return fileDescriptor_image_annotator_faae31fa1d905783, []int{2} +} +func (m *Image) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Image.Unmarshal(m, b) +} +func (m *Image) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Image.Marshal(b, m, deterministic) +} +func (dst *Image) XXX_Merge(src proto.Message) { + xxx_messageInfo_Image.Merge(dst, src) +} +func (m *Image) XXX_Size() int { + return xxx_messageInfo_Image.Size(m) +} +func (m *Image) XXX_DiscardUnknown() { + xxx_messageInfo_Image.DiscardUnknown(m) +} + +var xxx_messageInfo_Image proto.InternalMessageInfo + +func (m *Image) GetContent() []byte { + if m != nil { + return m.Content + } + return nil +} + +func (m *Image) GetSource() *ImageSource { + if m != nil { + return m.Source + } + return nil +} + +// A face annotation object contains the results of face detection. +type FaceAnnotation struct { + // The bounding polygon around the face. The coordinates of the bounding box + // are in the original image's scale. + // The bounding box is computed to "frame" the face in accordance with human + // expectations. It is based on the landmarker results. + // Note that one or more x and/or y coordinates may not be generated in the + // `BoundingPoly` (the polygon will be unbounded) if only a partial face + // appears in the image to be annotated. + BoundingPoly *BoundingPoly `protobuf:"bytes,1,opt,name=bounding_poly,json=boundingPoly,proto3" json:"bounding_poly,omitempty"` + // The `fd_bounding_poly` bounding polygon is tighter than the + // `boundingPoly`, and encloses only the skin part of the face. Typically, it + // is used to eliminate the face from any image analysis that detects the + // "amount of skin" visible in an image. It is not based on the + // landmarker results, only on the initial face detection, hence + // the fd (face detection) prefix. + FdBoundingPoly *BoundingPoly `protobuf:"bytes,2,opt,name=fd_bounding_poly,json=fdBoundingPoly,proto3" json:"fd_bounding_poly,omitempty"` + // Detected face landmarks. + Landmarks []*FaceAnnotation_Landmark `protobuf:"bytes,3,rep,name=landmarks,proto3" json:"landmarks,omitempty"` + // Roll angle, which indicates the amount of clockwise/anti-clockwise rotation + // of the face relative to the image vertical about the axis perpendicular to + // the face. Range [-180,180]. + RollAngle float32 `protobuf:"fixed32,4,opt,name=roll_angle,json=rollAngle,proto3" json:"roll_angle,omitempty"` + // Yaw angle, which indicates the leftward/rightward angle that the face is + // pointing relative to the vertical plane perpendicular to the image. Range + // [-180,180]. + PanAngle float32 `protobuf:"fixed32,5,opt,name=pan_angle,json=panAngle,proto3" json:"pan_angle,omitempty"` + // Pitch angle, which indicates the upwards/downwards angle that the face is + // pointing relative to the image's horizontal plane. Range [-180,180]. + TiltAngle float32 `protobuf:"fixed32,6,opt,name=tilt_angle,json=tiltAngle,proto3" json:"tilt_angle,omitempty"` + // Detection confidence. Range [0, 1]. + DetectionConfidence float32 `protobuf:"fixed32,7,opt,name=detection_confidence,json=detectionConfidence,proto3" json:"detection_confidence,omitempty"` + // Face landmarking confidence. Range [0, 1]. + LandmarkingConfidence float32 `protobuf:"fixed32,8,opt,name=landmarking_confidence,json=landmarkingConfidence,proto3" json:"landmarking_confidence,omitempty"` + // Joy likelihood. + JoyLikelihood Likelihood `protobuf:"varint,9,opt,name=joy_likelihood,json=joyLikelihood,proto3,enum=google.cloud.vision.v1.Likelihood" json:"joy_likelihood,omitempty"` + // Sorrow likelihood. + SorrowLikelihood Likelihood `protobuf:"varint,10,opt,name=sorrow_likelihood,json=sorrowLikelihood,proto3,enum=google.cloud.vision.v1.Likelihood" json:"sorrow_likelihood,omitempty"` + // Anger likelihood. + AngerLikelihood Likelihood `protobuf:"varint,11,opt,name=anger_likelihood,json=angerLikelihood,proto3,enum=google.cloud.vision.v1.Likelihood" json:"anger_likelihood,omitempty"` + // Surprise likelihood. + SurpriseLikelihood Likelihood `protobuf:"varint,12,opt,name=surprise_likelihood,json=surpriseLikelihood,proto3,enum=google.cloud.vision.v1.Likelihood" json:"surprise_likelihood,omitempty"` + // Under-exposed likelihood. + UnderExposedLikelihood Likelihood `protobuf:"varint,13,opt,name=under_exposed_likelihood,json=underExposedLikelihood,proto3,enum=google.cloud.vision.v1.Likelihood" json:"under_exposed_likelihood,omitempty"` + // Blurred likelihood. + BlurredLikelihood Likelihood `protobuf:"varint,14,opt,name=blurred_likelihood,json=blurredLikelihood,proto3,enum=google.cloud.vision.v1.Likelihood" json:"blurred_likelihood,omitempty"` + // Headwear likelihood. + HeadwearLikelihood Likelihood `protobuf:"varint,15,opt,name=headwear_likelihood,json=headwearLikelihood,proto3,enum=google.cloud.vision.v1.Likelihood" json:"headwear_likelihood,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *FaceAnnotation) Reset() { *m = FaceAnnotation{} } +func (m *FaceAnnotation) String() string { return proto.CompactTextString(m) } +func (*FaceAnnotation) ProtoMessage() {} +func (*FaceAnnotation) Descriptor() ([]byte, []int) { + return fileDescriptor_image_annotator_faae31fa1d905783, []int{3} +} +func (m *FaceAnnotation) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_FaceAnnotation.Unmarshal(m, b) +} +func (m *FaceAnnotation) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_FaceAnnotation.Marshal(b, m, deterministic) +} +func (dst *FaceAnnotation) XXX_Merge(src proto.Message) { + xxx_messageInfo_FaceAnnotation.Merge(dst, src) +} +func (m *FaceAnnotation) XXX_Size() int { + return xxx_messageInfo_FaceAnnotation.Size(m) +} +func (m *FaceAnnotation) XXX_DiscardUnknown() { + xxx_messageInfo_FaceAnnotation.DiscardUnknown(m) +} + +var xxx_messageInfo_FaceAnnotation proto.InternalMessageInfo + +func (m *FaceAnnotation) GetBoundingPoly() *BoundingPoly { + if m != nil { + return m.BoundingPoly + } + return nil +} + +func (m *FaceAnnotation) GetFdBoundingPoly() *BoundingPoly { + if m != nil { + return m.FdBoundingPoly + } + return nil +} + +func (m *FaceAnnotation) GetLandmarks() []*FaceAnnotation_Landmark { + if m != nil { + return m.Landmarks + } + return nil +} + +func (m *FaceAnnotation) GetRollAngle() float32 { + if m != nil { + return m.RollAngle + } + return 0 +} + +func (m *FaceAnnotation) GetPanAngle() float32 { + if m != nil { + return m.PanAngle + } + return 0 +} + +func (m *FaceAnnotation) GetTiltAngle() float32 { + if m != nil { + return m.TiltAngle + } + return 0 +} + +func (m *FaceAnnotation) GetDetectionConfidence() float32 { + if m != nil { + return m.DetectionConfidence + } + return 0 +} + +func (m *FaceAnnotation) GetLandmarkingConfidence() float32 { + if m != nil { + return m.LandmarkingConfidence + } + return 0 +} + +func (m *FaceAnnotation) GetJoyLikelihood() Likelihood { + if m != nil { + return m.JoyLikelihood + } + return Likelihood_UNKNOWN +} + +func (m *FaceAnnotation) GetSorrowLikelihood() Likelihood { + if m != nil { + return m.SorrowLikelihood + } + return Likelihood_UNKNOWN +} + +func (m *FaceAnnotation) GetAngerLikelihood() Likelihood { + if m != nil { + return m.AngerLikelihood + } + return Likelihood_UNKNOWN +} + +func (m *FaceAnnotation) GetSurpriseLikelihood() Likelihood { + if m != nil { + return m.SurpriseLikelihood + } + return Likelihood_UNKNOWN +} + +func (m *FaceAnnotation) GetUnderExposedLikelihood() Likelihood { + if m != nil { + return m.UnderExposedLikelihood + } + return Likelihood_UNKNOWN +} + +func (m *FaceAnnotation) GetBlurredLikelihood() Likelihood { + if m != nil { + return m.BlurredLikelihood + } + return Likelihood_UNKNOWN +} + +func (m *FaceAnnotation) GetHeadwearLikelihood() Likelihood { + if m != nil { + return m.HeadwearLikelihood + } + return Likelihood_UNKNOWN +} + +// A face-specific landmark (for example, a face feature). +type FaceAnnotation_Landmark struct { + // Face landmark type. + Type FaceAnnotation_Landmark_Type `protobuf:"varint,3,opt,name=type,proto3,enum=google.cloud.vision.v1.FaceAnnotation_Landmark_Type" json:"type,omitempty"` + // Face landmark position. + Position *Position `protobuf:"bytes,4,opt,name=position,proto3" json:"position,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *FaceAnnotation_Landmark) Reset() { *m = FaceAnnotation_Landmark{} } +func (m *FaceAnnotation_Landmark) String() string { return proto.CompactTextString(m) } +func (*FaceAnnotation_Landmark) ProtoMessage() {} +func (*FaceAnnotation_Landmark) Descriptor() ([]byte, []int) { + return fileDescriptor_image_annotator_faae31fa1d905783, []int{3, 0} +} +func (m *FaceAnnotation_Landmark) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_FaceAnnotation_Landmark.Unmarshal(m, b) +} +func (m *FaceAnnotation_Landmark) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_FaceAnnotation_Landmark.Marshal(b, m, deterministic) +} +func (dst *FaceAnnotation_Landmark) XXX_Merge(src proto.Message) { + xxx_messageInfo_FaceAnnotation_Landmark.Merge(dst, src) +} +func (m *FaceAnnotation_Landmark) XXX_Size() int { + return xxx_messageInfo_FaceAnnotation_Landmark.Size(m) +} +func (m *FaceAnnotation_Landmark) XXX_DiscardUnknown() { + xxx_messageInfo_FaceAnnotation_Landmark.DiscardUnknown(m) +} + +var xxx_messageInfo_FaceAnnotation_Landmark proto.InternalMessageInfo + +func (m *FaceAnnotation_Landmark) GetType() FaceAnnotation_Landmark_Type { + if m != nil { + return m.Type + } + return FaceAnnotation_Landmark_UNKNOWN_LANDMARK +} + +func (m *FaceAnnotation_Landmark) GetPosition() *Position { + if m != nil { + return m.Position + } + return nil +} + +// Detected entity location information. +type LocationInfo struct { + // lat/long location coordinates. + LatLng *latlng.LatLng `protobuf:"bytes,1,opt,name=lat_lng,json=latLng,proto3" json:"lat_lng,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *LocationInfo) Reset() { *m = LocationInfo{} } +func (m *LocationInfo) String() string { return proto.CompactTextString(m) } +func (*LocationInfo) ProtoMessage() {} +func (*LocationInfo) Descriptor() ([]byte, []int) { + return fileDescriptor_image_annotator_faae31fa1d905783, []int{4} +} +func (m *LocationInfo) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_LocationInfo.Unmarshal(m, b) +} +func (m *LocationInfo) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_LocationInfo.Marshal(b, m, deterministic) +} +func (dst *LocationInfo) XXX_Merge(src proto.Message) { + xxx_messageInfo_LocationInfo.Merge(dst, src) +} +func (m *LocationInfo) XXX_Size() int { + return xxx_messageInfo_LocationInfo.Size(m) +} +func (m *LocationInfo) XXX_DiscardUnknown() { + xxx_messageInfo_LocationInfo.DiscardUnknown(m) +} + +var xxx_messageInfo_LocationInfo proto.InternalMessageInfo + +func (m *LocationInfo) GetLatLng() *latlng.LatLng { + if m != nil { + return m.LatLng + } + return nil +} + +// A `Property` consists of a user-supplied name/value pair. +type Property struct { + // Name of the property. + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // Value of the property. + Value string `protobuf:"bytes,2,opt,name=value,proto3" json:"value,omitempty"` + // Value of numeric properties. + Uint64Value uint64 `protobuf:"varint,3,opt,name=uint64_value,json=uint64Value,proto3" json:"uint64_value,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Property) Reset() { *m = Property{} } +func (m *Property) String() string { return proto.CompactTextString(m) } +func (*Property) ProtoMessage() {} +func (*Property) Descriptor() ([]byte, []int) { + return fileDescriptor_image_annotator_faae31fa1d905783, []int{5} +} +func (m *Property) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Property.Unmarshal(m, b) +} +func (m *Property) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Property.Marshal(b, m, deterministic) +} +func (dst *Property) XXX_Merge(src proto.Message) { + xxx_messageInfo_Property.Merge(dst, src) +} +func (m *Property) XXX_Size() int { + return xxx_messageInfo_Property.Size(m) +} +func (m *Property) XXX_DiscardUnknown() { + xxx_messageInfo_Property.DiscardUnknown(m) +} + +var xxx_messageInfo_Property proto.InternalMessageInfo + +func (m *Property) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *Property) GetValue() string { + if m != nil { + return m.Value + } + return "" +} + +func (m *Property) GetUint64Value() uint64 { + if m != nil { + return m.Uint64Value + } + return 0 +} + +// Set of detected entity features. +type EntityAnnotation struct { + // Opaque entity ID. Some IDs may be available in + // [Google Knowledge Graph Search + // API](https://developers.google.com/knowledge-graph/). + Mid string `protobuf:"bytes,1,opt,name=mid,proto3" json:"mid,omitempty"` + // The language code for the locale in which the entity textual + // `description` is expressed. + Locale string `protobuf:"bytes,2,opt,name=locale,proto3" json:"locale,omitempty"` + // Entity textual description, expressed in its `locale` language. + Description string `protobuf:"bytes,3,opt,name=description,proto3" json:"description,omitempty"` + // Overall score of the result. Range [0, 1]. + Score float32 `protobuf:"fixed32,4,opt,name=score,proto3" json:"score,omitempty"` + // **Deprecated. Use `score` instead.** + // The accuracy of the entity detection in an image. + // For example, for an image in which the "Eiffel Tower" entity is detected, + // this field represents the confidence that there is a tower in the query + // image. Range [0, 1]. + Confidence float32 `protobuf:"fixed32,5,opt,name=confidence,proto3" json:"confidence,omitempty"` // Deprecated: Do not use. + // The relevancy of the ICA (Image Content Annotation) label to the + // image. For example, the relevancy of "tower" is likely higher to an image + // containing the detected "Eiffel Tower" than to an image containing a + // detected distant towering building, even though the confidence that + // there is a tower in each image may be the same. Range [0, 1]. + Topicality float32 `protobuf:"fixed32,6,opt,name=topicality,proto3" json:"topicality,omitempty"` + // Image region to which this entity belongs. Not produced + // for `LABEL_DETECTION` features. + BoundingPoly *BoundingPoly `protobuf:"bytes,7,opt,name=bounding_poly,json=boundingPoly,proto3" json:"bounding_poly,omitempty"` + // The location information for the detected entity. Multiple + // `LocationInfo` elements can be present because one location may + // indicate the location of the scene in the image, and another location + // may indicate the location of the place where the image was taken. + // Location information is usually present for landmarks. + Locations []*LocationInfo `protobuf:"bytes,8,rep,name=locations,proto3" json:"locations,omitempty"` + // Some entities may have optional user-supplied `Property` (name/value) + // fields, such a score or string that qualifies the entity. + Properties []*Property `protobuf:"bytes,9,rep,name=properties,proto3" json:"properties,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *EntityAnnotation) Reset() { *m = EntityAnnotation{} } +func (m *EntityAnnotation) String() string { return proto.CompactTextString(m) } +func (*EntityAnnotation) ProtoMessage() {} +func (*EntityAnnotation) Descriptor() ([]byte, []int) { + return fileDescriptor_image_annotator_faae31fa1d905783, []int{6} +} +func (m *EntityAnnotation) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_EntityAnnotation.Unmarshal(m, b) +} +func (m *EntityAnnotation) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_EntityAnnotation.Marshal(b, m, deterministic) +} +func (dst *EntityAnnotation) XXX_Merge(src proto.Message) { + xxx_messageInfo_EntityAnnotation.Merge(dst, src) +} +func (m *EntityAnnotation) XXX_Size() int { + return xxx_messageInfo_EntityAnnotation.Size(m) +} +func (m *EntityAnnotation) XXX_DiscardUnknown() { + xxx_messageInfo_EntityAnnotation.DiscardUnknown(m) +} + +var xxx_messageInfo_EntityAnnotation proto.InternalMessageInfo + +func (m *EntityAnnotation) GetMid() string { + if m != nil { + return m.Mid + } + return "" +} + +func (m *EntityAnnotation) GetLocale() string { + if m != nil { + return m.Locale + } + return "" +} + +func (m *EntityAnnotation) GetDescription() string { + if m != nil { + return m.Description + } + return "" +} + +func (m *EntityAnnotation) GetScore() float32 { + if m != nil { + return m.Score + } + return 0 +} + +// Deprecated: Do not use. +func (m *EntityAnnotation) GetConfidence() float32 { + if m != nil { + return m.Confidence + } + return 0 +} + +func (m *EntityAnnotation) GetTopicality() float32 { + if m != nil { + return m.Topicality + } + return 0 +} + +func (m *EntityAnnotation) GetBoundingPoly() *BoundingPoly { + if m != nil { + return m.BoundingPoly + } + return nil +} + +func (m *EntityAnnotation) GetLocations() []*LocationInfo { + if m != nil { + return m.Locations + } + return nil +} + +func (m *EntityAnnotation) GetProperties() []*Property { + if m != nil { + return m.Properties + } + return nil +} + +// Set of detected objects with bounding boxes. +type LocalizedObjectAnnotation struct { + // Object ID that should align with EntityAnnotation mid. + Mid string `protobuf:"bytes,1,opt,name=mid,proto3" json:"mid,omitempty"` + // The BCP-47 language code, such as "en-US" or "sr-Latn". For more + // information, see + // http://www.unicode.org/reports/tr35/#Unicode_locale_identifier. + LanguageCode string `protobuf:"bytes,2,opt,name=language_code,json=languageCode,proto3" json:"language_code,omitempty"` + // Object name, expressed in its `language_code` language. + Name string `protobuf:"bytes,3,opt,name=name,proto3" json:"name,omitempty"` + // Score of the result. Range [0, 1]. + Score float32 `protobuf:"fixed32,4,opt,name=score,proto3" json:"score,omitempty"` + // Image region to which this object belongs. This must be populated. + BoundingPoly *BoundingPoly `protobuf:"bytes,5,opt,name=bounding_poly,json=boundingPoly,proto3" json:"bounding_poly,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *LocalizedObjectAnnotation) Reset() { *m = LocalizedObjectAnnotation{} } +func (m *LocalizedObjectAnnotation) String() string { return proto.CompactTextString(m) } +func (*LocalizedObjectAnnotation) ProtoMessage() {} +func (*LocalizedObjectAnnotation) Descriptor() ([]byte, []int) { + return fileDescriptor_image_annotator_faae31fa1d905783, []int{7} +} +func (m *LocalizedObjectAnnotation) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_LocalizedObjectAnnotation.Unmarshal(m, b) +} +func (m *LocalizedObjectAnnotation) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_LocalizedObjectAnnotation.Marshal(b, m, deterministic) +} +func (dst *LocalizedObjectAnnotation) XXX_Merge(src proto.Message) { + xxx_messageInfo_LocalizedObjectAnnotation.Merge(dst, src) +} +func (m *LocalizedObjectAnnotation) XXX_Size() int { + return xxx_messageInfo_LocalizedObjectAnnotation.Size(m) +} +func (m *LocalizedObjectAnnotation) XXX_DiscardUnknown() { + xxx_messageInfo_LocalizedObjectAnnotation.DiscardUnknown(m) +} + +var xxx_messageInfo_LocalizedObjectAnnotation proto.InternalMessageInfo + +func (m *LocalizedObjectAnnotation) GetMid() string { + if m != nil { + return m.Mid + } + return "" +} + +func (m *LocalizedObjectAnnotation) GetLanguageCode() string { + if m != nil { + return m.LanguageCode + } + return "" +} + +func (m *LocalizedObjectAnnotation) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *LocalizedObjectAnnotation) GetScore() float32 { + if m != nil { + return m.Score + } + return 0 +} + +func (m *LocalizedObjectAnnotation) GetBoundingPoly() *BoundingPoly { + if m != nil { + return m.BoundingPoly + } + return nil +} + +// Set of features pertaining to the image, computed by computer vision +// methods over safe-search verticals (for example, adult, spoof, medical, +// violence). +type SafeSearchAnnotation struct { + // Represents the adult content likelihood for the image. Adult content may + // contain elements such as nudity, pornographic images or cartoons, or + // sexual activities. + Adult Likelihood `protobuf:"varint,1,opt,name=adult,proto3,enum=google.cloud.vision.v1.Likelihood" json:"adult,omitempty"` + // Spoof likelihood. The likelihood that an modification + // was made to the image's canonical version to make it appear + // funny or offensive. + Spoof Likelihood `protobuf:"varint,2,opt,name=spoof,proto3,enum=google.cloud.vision.v1.Likelihood" json:"spoof,omitempty"` + // Likelihood that this is a medical image. + Medical Likelihood `protobuf:"varint,3,opt,name=medical,proto3,enum=google.cloud.vision.v1.Likelihood" json:"medical,omitempty"` + // Likelihood that this image contains violent content. + Violence Likelihood `protobuf:"varint,4,opt,name=violence,proto3,enum=google.cloud.vision.v1.Likelihood" json:"violence,omitempty"` + // Likelihood that the request image contains racy content. Racy content may + // include (but is not limited to) skimpy or sheer clothing, strategically + // covered nudity, lewd or provocative poses, or close-ups of sensitive + // body areas. + Racy Likelihood `protobuf:"varint,9,opt,name=racy,proto3,enum=google.cloud.vision.v1.Likelihood" json:"racy,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *SafeSearchAnnotation) Reset() { *m = SafeSearchAnnotation{} } +func (m *SafeSearchAnnotation) String() string { return proto.CompactTextString(m) } +func (*SafeSearchAnnotation) ProtoMessage() {} +func (*SafeSearchAnnotation) Descriptor() ([]byte, []int) { + return fileDescriptor_image_annotator_faae31fa1d905783, []int{8} +} +func (m *SafeSearchAnnotation) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_SafeSearchAnnotation.Unmarshal(m, b) +} +func (m *SafeSearchAnnotation) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_SafeSearchAnnotation.Marshal(b, m, deterministic) +} +func (dst *SafeSearchAnnotation) XXX_Merge(src proto.Message) { + xxx_messageInfo_SafeSearchAnnotation.Merge(dst, src) +} +func (m *SafeSearchAnnotation) XXX_Size() int { + return xxx_messageInfo_SafeSearchAnnotation.Size(m) +} +func (m *SafeSearchAnnotation) XXX_DiscardUnknown() { + xxx_messageInfo_SafeSearchAnnotation.DiscardUnknown(m) +} + +var xxx_messageInfo_SafeSearchAnnotation proto.InternalMessageInfo + +func (m *SafeSearchAnnotation) GetAdult() Likelihood { + if m != nil { + return m.Adult + } + return Likelihood_UNKNOWN +} + +func (m *SafeSearchAnnotation) GetSpoof() Likelihood { + if m != nil { + return m.Spoof + } + return Likelihood_UNKNOWN +} + +func (m *SafeSearchAnnotation) GetMedical() Likelihood { + if m != nil { + return m.Medical + } + return Likelihood_UNKNOWN +} + +func (m *SafeSearchAnnotation) GetViolence() Likelihood { + if m != nil { + return m.Violence + } + return Likelihood_UNKNOWN +} + +func (m *SafeSearchAnnotation) GetRacy() Likelihood { + if m != nil { + return m.Racy + } + return Likelihood_UNKNOWN +} + +// Rectangle determined by min and max `LatLng` pairs. +type LatLongRect struct { + // Min lat/long pair. + MinLatLng *latlng.LatLng `protobuf:"bytes,1,opt,name=min_lat_lng,json=minLatLng,proto3" json:"min_lat_lng,omitempty"` + // Max lat/long pair. + MaxLatLng *latlng.LatLng `protobuf:"bytes,2,opt,name=max_lat_lng,json=maxLatLng,proto3" json:"max_lat_lng,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *LatLongRect) Reset() { *m = LatLongRect{} } +func (m *LatLongRect) String() string { return proto.CompactTextString(m) } +func (*LatLongRect) ProtoMessage() {} +func (*LatLongRect) Descriptor() ([]byte, []int) { + return fileDescriptor_image_annotator_faae31fa1d905783, []int{9} +} +func (m *LatLongRect) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_LatLongRect.Unmarshal(m, b) +} +func (m *LatLongRect) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_LatLongRect.Marshal(b, m, deterministic) +} +func (dst *LatLongRect) XXX_Merge(src proto.Message) { + xxx_messageInfo_LatLongRect.Merge(dst, src) +} +func (m *LatLongRect) XXX_Size() int { + return xxx_messageInfo_LatLongRect.Size(m) +} +func (m *LatLongRect) XXX_DiscardUnknown() { + xxx_messageInfo_LatLongRect.DiscardUnknown(m) +} + +var xxx_messageInfo_LatLongRect proto.InternalMessageInfo + +func (m *LatLongRect) GetMinLatLng() *latlng.LatLng { + if m != nil { + return m.MinLatLng + } + return nil +} + +func (m *LatLongRect) GetMaxLatLng() *latlng.LatLng { + if m != nil { + return m.MaxLatLng + } + return nil +} + +// Color information consists of RGB channels, score, and the fraction of +// the image that the color occupies in the image. +type ColorInfo struct { + // RGB components of the color. + Color *color.Color `protobuf:"bytes,1,opt,name=color,proto3" json:"color,omitempty"` + // Image-specific score for this color. Value in range [0, 1]. + Score float32 `protobuf:"fixed32,2,opt,name=score,proto3" json:"score,omitempty"` + // The fraction of pixels the color occupies in the image. + // Value in range [0, 1]. + PixelFraction float32 `protobuf:"fixed32,3,opt,name=pixel_fraction,json=pixelFraction,proto3" json:"pixel_fraction,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ColorInfo) Reset() { *m = ColorInfo{} } +func (m *ColorInfo) String() string { return proto.CompactTextString(m) } +func (*ColorInfo) ProtoMessage() {} +func (*ColorInfo) Descriptor() ([]byte, []int) { + return fileDescriptor_image_annotator_faae31fa1d905783, []int{10} +} +func (m *ColorInfo) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ColorInfo.Unmarshal(m, b) +} +func (m *ColorInfo) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ColorInfo.Marshal(b, m, deterministic) +} +func (dst *ColorInfo) XXX_Merge(src proto.Message) { + xxx_messageInfo_ColorInfo.Merge(dst, src) +} +func (m *ColorInfo) XXX_Size() int { + return xxx_messageInfo_ColorInfo.Size(m) +} +func (m *ColorInfo) XXX_DiscardUnknown() { + xxx_messageInfo_ColorInfo.DiscardUnknown(m) +} + +var xxx_messageInfo_ColorInfo proto.InternalMessageInfo + +func (m *ColorInfo) GetColor() *color.Color { + if m != nil { + return m.Color + } + return nil +} + +func (m *ColorInfo) GetScore() float32 { + if m != nil { + return m.Score + } + return 0 +} + +func (m *ColorInfo) GetPixelFraction() float32 { + if m != nil { + return m.PixelFraction + } + return 0 +} + +// Set of dominant colors and their corresponding scores. +type DominantColorsAnnotation struct { + // RGB color values with their score and pixel fraction. + Colors []*ColorInfo `protobuf:"bytes,1,rep,name=colors,proto3" json:"colors,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *DominantColorsAnnotation) Reset() { *m = DominantColorsAnnotation{} } +func (m *DominantColorsAnnotation) String() string { return proto.CompactTextString(m) } +func (*DominantColorsAnnotation) ProtoMessage() {} +func (*DominantColorsAnnotation) Descriptor() ([]byte, []int) { + return fileDescriptor_image_annotator_faae31fa1d905783, []int{11} +} +func (m *DominantColorsAnnotation) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_DominantColorsAnnotation.Unmarshal(m, b) +} +func (m *DominantColorsAnnotation) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_DominantColorsAnnotation.Marshal(b, m, deterministic) +} +func (dst *DominantColorsAnnotation) XXX_Merge(src proto.Message) { + xxx_messageInfo_DominantColorsAnnotation.Merge(dst, src) +} +func (m *DominantColorsAnnotation) XXX_Size() int { + return xxx_messageInfo_DominantColorsAnnotation.Size(m) +} +func (m *DominantColorsAnnotation) XXX_DiscardUnknown() { + xxx_messageInfo_DominantColorsAnnotation.DiscardUnknown(m) +} + +var xxx_messageInfo_DominantColorsAnnotation proto.InternalMessageInfo + +func (m *DominantColorsAnnotation) GetColors() []*ColorInfo { + if m != nil { + return m.Colors + } + return nil +} + +// Stores image properties, such as dominant colors. +type ImageProperties struct { + // If present, dominant colors completed successfully. + DominantColors *DominantColorsAnnotation `protobuf:"bytes,1,opt,name=dominant_colors,json=dominantColors,proto3" json:"dominant_colors,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ImageProperties) Reset() { *m = ImageProperties{} } +func (m *ImageProperties) String() string { return proto.CompactTextString(m) } +func (*ImageProperties) ProtoMessage() {} +func (*ImageProperties) Descriptor() ([]byte, []int) { + return fileDescriptor_image_annotator_faae31fa1d905783, []int{12} +} +func (m *ImageProperties) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ImageProperties.Unmarshal(m, b) +} +func (m *ImageProperties) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ImageProperties.Marshal(b, m, deterministic) +} +func (dst *ImageProperties) XXX_Merge(src proto.Message) { + xxx_messageInfo_ImageProperties.Merge(dst, src) +} +func (m *ImageProperties) XXX_Size() int { + return xxx_messageInfo_ImageProperties.Size(m) +} +func (m *ImageProperties) XXX_DiscardUnknown() { + xxx_messageInfo_ImageProperties.DiscardUnknown(m) +} + +var xxx_messageInfo_ImageProperties proto.InternalMessageInfo + +func (m *ImageProperties) GetDominantColors() *DominantColorsAnnotation { + if m != nil { + return m.DominantColors + } + return nil +} + +// Single crop hint that is used to generate a new crop when serving an image. +type CropHint struct { + // The bounding polygon for the crop region. The coordinates of the bounding + // box are in the original image's scale. + BoundingPoly *BoundingPoly `protobuf:"bytes,1,opt,name=bounding_poly,json=boundingPoly,proto3" json:"bounding_poly,omitempty"` + // Confidence of this being a salient region. Range [0, 1]. + Confidence float32 `protobuf:"fixed32,2,opt,name=confidence,proto3" json:"confidence,omitempty"` + // Fraction of importance of this salient region with respect to the original + // image. + ImportanceFraction float32 `protobuf:"fixed32,3,opt,name=importance_fraction,json=importanceFraction,proto3" json:"importance_fraction,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *CropHint) Reset() { *m = CropHint{} } +func (m *CropHint) String() string { return proto.CompactTextString(m) } +func (*CropHint) ProtoMessage() {} +func (*CropHint) Descriptor() ([]byte, []int) { + return fileDescriptor_image_annotator_faae31fa1d905783, []int{13} +} +func (m *CropHint) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_CropHint.Unmarshal(m, b) +} +func (m *CropHint) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_CropHint.Marshal(b, m, deterministic) +} +func (dst *CropHint) XXX_Merge(src proto.Message) { + xxx_messageInfo_CropHint.Merge(dst, src) +} +func (m *CropHint) XXX_Size() int { + return xxx_messageInfo_CropHint.Size(m) +} +func (m *CropHint) XXX_DiscardUnknown() { + xxx_messageInfo_CropHint.DiscardUnknown(m) +} + +var xxx_messageInfo_CropHint proto.InternalMessageInfo + +func (m *CropHint) GetBoundingPoly() *BoundingPoly { + if m != nil { + return m.BoundingPoly + } + return nil +} + +func (m *CropHint) GetConfidence() float32 { + if m != nil { + return m.Confidence + } + return 0 +} + +func (m *CropHint) GetImportanceFraction() float32 { + if m != nil { + return m.ImportanceFraction + } + return 0 +} + +// Set of crop hints that are used to generate new crops when serving images. +type CropHintsAnnotation struct { + // Crop hint results. + CropHints []*CropHint `protobuf:"bytes,1,rep,name=crop_hints,json=cropHints,proto3" json:"crop_hints,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *CropHintsAnnotation) Reset() { *m = CropHintsAnnotation{} } +func (m *CropHintsAnnotation) String() string { return proto.CompactTextString(m) } +func (*CropHintsAnnotation) ProtoMessage() {} +func (*CropHintsAnnotation) Descriptor() ([]byte, []int) { + return fileDescriptor_image_annotator_faae31fa1d905783, []int{14} +} +func (m *CropHintsAnnotation) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_CropHintsAnnotation.Unmarshal(m, b) +} +func (m *CropHintsAnnotation) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_CropHintsAnnotation.Marshal(b, m, deterministic) +} +func (dst *CropHintsAnnotation) XXX_Merge(src proto.Message) { + xxx_messageInfo_CropHintsAnnotation.Merge(dst, src) +} +func (m *CropHintsAnnotation) XXX_Size() int { + return xxx_messageInfo_CropHintsAnnotation.Size(m) +} +func (m *CropHintsAnnotation) XXX_DiscardUnknown() { + xxx_messageInfo_CropHintsAnnotation.DiscardUnknown(m) +} + +var xxx_messageInfo_CropHintsAnnotation proto.InternalMessageInfo + +func (m *CropHintsAnnotation) GetCropHints() []*CropHint { + if m != nil { + return m.CropHints + } + return nil +} + +// Parameters for crop hints annotation request. +type CropHintsParams struct { + // Aspect ratios in floats, representing the ratio of the width to the height + // of the image. For example, if the desired aspect ratio is 4/3, the + // corresponding float value should be 1.33333. If not specified, the + // best possible crop is returned. The number of provided aspect ratios is + // limited to a maximum of 16; any aspect ratios provided after the 16th are + // ignored. + AspectRatios []float32 `protobuf:"fixed32,1,rep,packed,name=aspect_ratios,json=aspectRatios,proto3" json:"aspect_ratios,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *CropHintsParams) Reset() { *m = CropHintsParams{} } +func (m *CropHintsParams) String() string { return proto.CompactTextString(m) } +func (*CropHintsParams) ProtoMessage() {} +func (*CropHintsParams) Descriptor() ([]byte, []int) { + return fileDescriptor_image_annotator_faae31fa1d905783, []int{15} +} +func (m *CropHintsParams) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_CropHintsParams.Unmarshal(m, b) +} +func (m *CropHintsParams) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_CropHintsParams.Marshal(b, m, deterministic) +} +func (dst *CropHintsParams) XXX_Merge(src proto.Message) { + xxx_messageInfo_CropHintsParams.Merge(dst, src) +} +func (m *CropHintsParams) XXX_Size() int { + return xxx_messageInfo_CropHintsParams.Size(m) +} +func (m *CropHintsParams) XXX_DiscardUnknown() { + xxx_messageInfo_CropHintsParams.DiscardUnknown(m) +} + +var xxx_messageInfo_CropHintsParams proto.InternalMessageInfo + +func (m *CropHintsParams) GetAspectRatios() []float32 { + if m != nil { + return m.AspectRatios + } + return nil +} + +// Parameters for web detection request. +type WebDetectionParams struct { + // Whether to include results derived from the geo information in the image. + IncludeGeoResults bool `protobuf:"varint,2,opt,name=include_geo_results,json=includeGeoResults,proto3" json:"include_geo_results,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *WebDetectionParams) Reset() { *m = WebDetectionParams{} } +func (m *WebDetectionParams) String() string { return proto.CompactTextString(m) } +func (*WebDetectionParams) ProtoMessage() {} +func (*WebDetectionParams) Descriptor() ([]byte, []int) { + return fileDescriptor_image_annotator_faae31fa1d905783, []int{16} +} +func (m *WebDetectionParams) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_WebDetectionParams.Unmarshal(m, b) +} +func (m *WebDetectionParams) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_WebDetectionParams.Marshal(b, m, deterministic) +} +func (dst *WebDetectionParams) XXX_Merge(src proto.Message) { + xxx_messageInfo_WebDetectionParams.Merge(dst, src) +} +func (m *WebDetectionParams) XXX_Size() int { + return xxx_messageInfo_WebDetectionParams.Size(m) +} +func (m *WebDetectionParams) XXX_DiscardUnknown() { + xxx_messageInfo_WebDetectionParams.DiscardUnknown(m) +} + +var xxx_messageInfo_WebDetectionParams proto.InternalMessageInfo + +func (m *WebDetectionParams) GetIncludeGeoResults() bool { + if m != nil { + return m.IncludeGeoResults + } + return false +} + +// Image context and/or feature-specific parameters. +type ImageContext struct { + // Not used. + LatLongRect *LatLongRect `protobuf:"bytes,1,opt,name=lat_long_rect,json=latLongRect,proto3" json:"lat_long_rect,omitempty"` + // List of languages to use for TEXT_DETECTION. In most cases, an empty value + // yields the best results since it enables automatic language detection. For + // languages based on the Latin alphabet, setting `language_hints` is not + // needed. In rare cases, when the language of the text in the image is known, + // setting a hint will help get better results (although it will be a + // significant hindrance if the hint is wrong). Text detection returns an + // error if one or more of the specified languages is not one of the + // [supported languages](/vision/docs/languages). + LanguageHints []string `protobuf:"bytes,2,rep,name=language_hints,json=languageHints,proto3" json:"language_hints,omitempty"` + // Parameters for crop hints annotation request. + CropHintsParams *CropHintsParams `protobuf:"bytes,4,opt,name=crop_hints_params,json=cropHintsParams,proto3" json:"crop_hints_params,omitempty"` + // Parameters for product search. + ProductSearchParams *ProductSearchParams `protobuf:"bytes,5,opt,name=product_search_params,json=productSearchParams,proto3" json:"product_search_params,omitempty"` + // Parameters for web detection. + WebDetectionParams *WebDetectionParams `protobuf:"bytes,6,opt,name=web_detection_params,json=webDetectionParams,proto3" json:"web_detection_params,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ImageContext) Reset() { *m = ImageContext{} } +func (m *ImageContext) String() string { return proto.CompactTextString(m) } +func (*ImageContext) ProtoMessage() {} +func (*ImageContext) Descriptor() ([]byte, []int) { + return fileDescriptor_image_annotator_faae31fa1d905783, []int{17} +} +func (m *ImageContext) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ImageContext.Unmarshal(m, b) +} +func (m *ImageContext) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ImageContext.Marshal(b, m, deterministic) +} +func (dst *ImageContext) XXX_Merge(src proto.Message) { + xxx_messageInfo_ImageContext.Merge(dst, src) +} +func (m *ImageContext) XXX_Size() int { + return xxx_messageInfo_ImageContext.Size(m) +} +func (m *ImageContext) XXX_DiscardUnknown() { + xxx_messageInfo_ImageContext.DiscardUnknown(m) +} + +var xxx_messageInfo_ImageContext proto.InternalMessageInfo + +func (m *ImageContext) GetLatLongRect() *LatLongRect { + if m != nil { + return m.LatLongRect + } + return nil +} + +func (m *ImageContext) GetLanguageHints() []string { + if m != nil { + return m.LanguageHints + } + return nil +} + +func (m *ImageContext) GetCropHintsParams() *CropHintsParams { + if m != nil { + return m.CropHintsParams + } + return nil +} + +func (m *ImageContext) GetProductSearchParams() *ProductSearchParams { + if m != nil { + return m.ProductSearchParams + } + return nil +} + +func (m *ImageContext) GetWebDetectionParams() *WebDetectionParams { + if m != nil { + return m.WebDetectionParams + } + return nil +} + +// Request for performing Google Cloud Vision API tasks over a user-provided +// image, with user-requested features, and with context information. +type AnnotateImageRequest struct { + // The image to be processed. + Image *Image `protobuf:"bytes,1,opt,name=image,proto3" json:"image,omitempty"` + // Requested features. + Features []*Feature `protobuf:"bytes,2,rep,name=features,proto3" json:"features,omitempty"` + // Additional context that may accompany the image. + ImageContext *ImageContext `protobuf:"bytes,3,opt,name=image_context,json=imageContext,proto3" json:"image_context,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *AnnotateImageRequest) Reset() { *m = AnnotateImageRequest{} } +func (m *AnnotateImageRequest) String() string { return proto.CompactTextString(m) } +func (*AnnotateImageRequest) ProtoMessage() {} +func (*AnnotateImageRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_image_annotator_faae31fa1d905783, []int{18} +} +func (m *AnnotateImageRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_AnnotateImageRequest.Unmarshal(m, b) +} +func (m *AnnotateImageRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_AnnotateImageRequest.Marshal(b, m, deterministic) +} +func (dst *AnnotateImageRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_AnnotateImageRequest.Merge(dst, src) +} +func (m *AnnotateImageRequest) XXX_Size() int { + return xxx_messageInfo_AnnotateImageRequest.Size(m) +} +func (m *AnnotateImageRequest) XXX_DiscardUnknown() { + xxx_messageInfo_AnnotateImageRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_AnnotateImageRequest proto.InternalMessageInfo + +func (m *AnnotateImageRequest) GetImage() *Image { + if m != nil { + return m.Image + } + return nil +} + +func (m *AnnotateImageRequest) GetFeatures() []*Feature { + if m != nil { + return m.Features + } + return nil +} + +func (m *AnnotateImageRequest) GetImageContext() *ImageContext { + if m != nil { + return m.ImageContext + } + return nil +} + +// If an image was produced from a file (e.g. a PDF), this message gives +// information about the source of that image. +type ImageAnnotationContext struct { + // The URI of the file used to produce the image. + Uri string `protobuf:"bytes,1,opt,name=uri,proto3" json:"uri,omitempty"` + // If the file was a PDF or TIFF, this field gives the page number within + // the file used to produce the image. + PageNumber int32 `protobuf:"varint,2,opt,name=page_number,json=pageNumber,proto3" json:"page_number,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ImageAnnotationContext) Reset() { *m = ImageAnnotationContext{} } +func (m *ImageAnnotationContext) String() string { return proto.CompactTextString(m) } +func (*ImageAnnotationContext) ProtoMessage() {} +func (*ImageAnnotationContext) Descriptor() ([]byte, []int) { + return fileDescriptor_image_annotator_faae31fa1d905783, []int{19} +} +func (m *ImageAnnotationContext) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ImageAnnotationContext.Unmarshal(m, b) +} +func (m *ImageAnnotationContext) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ImageAnnotationContext.Marshal(b, m, deterministic) +} +func (dst *ImageAnnotationContext) XXX_Merge(src proto.Message) { + xxx_messageInfo_ImageAnnotationContext.Merge(dst, src) +} +func (m *ImageAnnotationContext) XXX_Size() int { + return xxx_messageInfo_ImageAnnotationContext.Size(m) +} +func (m *ImageAnnotationContext) XXX_DiscardUnknown() { + xxx_messageInfo_ImageAnnotationContext.DiscardUnknown(m) +} + +var xxx_messageInfo_ImageAnnotationContext proto.InternalMessageInfo + +func (m *ImageAnnotationContext) GetUri() string { + if m != nil { + return m.Uri + } + return "" +} + +func (m *ImageAnnotationContext) GetPageNumber() int32 { + if m != nil { + return m.PageNumber + } + return 0 +} + +// Response to an image annotation request. +type AnnotateImageResponse struct { + // If present, face detection has completed successfully. + FaceAnnotations []*FaceAnnotation `protobuf:"bytes,1,rep,name=face_annotations,json=faceAnnotations,proto3" json:"face_annotations,omitempty"` + // If present, landmark detection has completed successfully. + LandmarkAnnotations []*EntityAnnotation `protobuf:"bytes,2,rep,name=landmark_annotations,json=landmarkAnnotations,proto3" json:"landmark_annotations,omitempty"` + // If present, logo detection has completed successfully. + LogoAnnotations []*EntityAnnotation `protobuf:"bytes,3,rep,name=logo_annotations,json=logoAnnotations,proto3" json:"logo_annotations,omitempty"` + // If present, label detection has completed successfully. + LabelAnnotations []*EntityAnnotation `protobuf:"bytes,4,rep,name=label_annotations,json=labelAnnotations,proto3" json:"label_annotations,omitempty"` + // If present, localized object detection has completed successfully. + // This will be sorted descending by confidence score. + LocalizedObjectAnnotations []*LocalizedObjectAnnotation `protobuf:"bytes,22,rep,name=localized_object_annotations,json=localizedObjectAnnotations,proto3" json:"localized_object_annotations,omitempty"` + // If present, text (OCR) detection has completed successfully. + TextAnnotations []*EntityAnnotation `protobuf:"bytes,5,rep,name=text_annotations,json=textAnnotations,proto3" json:"text_annotations,omitempty"` + // If present, text (OCR) detection or document (OCR) text detection has + // completed successfully. + // This annotation provides the structural hierarchy for the OCR detected + // text. + FullTextAnnotation *TextAnnotation `protobuf:"bytes,12,opt,name=full_text_annotation,json=fullTextAnnotation,proto3" json:"full_text_annotation,omitempty"` + // If present, safe-search annotation has completed successfully. + SafeSearchAnnotation *SafeSearchAnnotation `protobuf:"bytes,6,opt,name=safe_search_annotation,json=safeSearchAnnotation,proto3" json:"safe_search_annotation,omitempty"` + // If present, image properties were extracted successfully. + ImagePropertiesAnnotation *ImageProperties `protobuf:"bytes,8,opt,name=image_properties_annotation,json=imagePropertiesAnnotation,proto3" json:"image_properties_annotation,omitempty"` + // If present, crop hints have completed successfully. + CropHintsAnnotation *CropHintsAnnotation `protobuf:"bytes,11,opt,name=crop_hints_annotation,json=cropHintsAnnotation,proto3" json:"crop_hints_annotation,omitempty"` + // If present, web detection has completed successfully. + WebDetection *WebDetection `protobuf:"bytes,13,opt,name=web_detection,json=webDetection,proto3" json:"web_detection,omitempty"` + // If present, product search has completed successfully. + ProductSearchResults *ProductSearchResults `protobuf:"bytes,14,opt,name=product_search_results,json=productSearchResults,proto3" json:"product_search_results,omitempty"` + // If set, represents the error message for the operation. + // Note that filled-in image annotations are guaranteed to be + // correct, even when `error` is set. + Error *status.Status `protobuf:"bytes,9,opt,name=error,proto3" json:"error,omitempty"` + // If present, contextual information is needed to understand where this image + // comes from. + Context *ImageAnnotationContext `protobuf:"bytes,21,opt,name=context,proto3" json:"context,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *AnnotateImageResponse) Reset() { *m = AnnotateImageResponse{} } +func (m *AnnotateImageResponse) String() string { return proto.CompactTextString(m) } +func (*AnnotateImageResponse) ProtoMessage() {} +func (*AnnotateImageResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_image_annotator_faae31fa1d905783, []int{20} +} +func (m *AnnotateImageResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_AnnotateImageResponse.Unmarshal(m, b) +} +func (m *AnnotateImageResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_AnnotateImageResponse.Marshal(b, m, deterministic) +} +func (dst *AnnotateImageResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_AnnotateImageResponse.Merge(dst, src) +} +func (m *AnnotateImageResponse) XXX_Size() int { + return xxx_messageInfo_AnnotateImageResponse.Size(m) +} +func (m *AnnotateImageResponse) XXX_DiscardUnknown() { + xxx_messageInfo_AnnotateImageResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_AnnotateImageResponse proto.InternalMessageInfo + +func (m *AnnotateImageResponse) GetFaceAnnotations() []*FaceAnnotation { + if m != nil { + return m.FaceAnnotations + } + return nil +} + +func (m *AnnotateImageResponse) GetLandmarkAnnotations() []*EntityAnnotation { + if m != nil { + return m.LandmarkAnnotations + } + return nil +} + +func (m *AnnotateImageResponse) GetLogoAnnotations() []*EntityAnnotation { + if m != nil { + return m.LogoAnnotations + } + return nil +} + +func (m *AnnotateImageResponse) GetLabelAnnotations() []*EntityAnnotation { + if m != nil { + return m.LabelAnnotations + } + return nil +} + +func (m *AnnotateImageResponse) GetLocalizedObjectAnnotations() []*LocalizedObjectAnnotation { + if m != nil { + return m.LocalizedObjectAnnotations + } + return nil +} + +func (m *AnnotateImageResponse) GetTextAnnotations() []*EntityAnnotation { + if m != nil { + return m.TextAnnotations + } + return nil +} + +func (m *AnnotateImageResponse) GetFullTextAnnotation() *TextAnnotation { + if m != nil { + return m.FullTextAnnotation + } + return nil +} + +func (m *AnnotateImageResponse) GetSafeSearchAnnotation() *SafeSearchAnnotation { + if m != nil { + return m.SafeSearchAnnotation + } + return nil +} + +func (m *AnnotateImageResponse) GetImagePropertiesAnnotation() *ImageProperties { + if m != nil { + return m.ImagePropertiesAnnotation + } + return nil +} + +func (m *AnnotateImageResponse) GetCropHintsAnnotation() *CropHintsAnnotation { + if m != nil { + return m.CropHintsAnnotation + } + return nil +} + +func (m *AnnotateImageResponse) GetWebDetection() *WebDetection { + if m != nil { + return m.WebDetection + } + return nil +} + +func (m *AnnotateImageResponse) GetProductSearchResults() *ProductSearchResults { + if m != nil { + return m.ProductSearchResults + } + return nil +} + +func (m *AnnotateImageResponse) GetError() *status.Status { + if m != nil { + return m.Error + } + return nil +} + +func (m *AnnotateImageResponse) GetContext() *ImageAnnotationContext { + if m != nil { + return m.Context + } + return nil +} + +// Response to a single file annotation request. A file may contain one or more +// images, which individually have their own responses. +type AnnotateFileResponse struct { + // Information about the file for which this response is generated. + InputConfig *InputConfig `protobuf:"bytes,1,opt,name=input_config,json=inputConfig,proto3" json:"input_config,omitempty"` + // Individual responses to images found within the file. + Responses []*AnnotateImageResponse `protobuf:"bytes,2,rep,name=responses,proto3" json:"responses,omitempty"` + // This field gives the total number of pages in the file. + TotalPages int32 `protobuf:"varint,3,opt,name=total_pages,json=totalPages,proto3" json:"total_pages,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *AnnotateFileResponse) Reset() { *m = AnnotateFileResponse{} } +func (m *AnnotateFileResponse) String() string { return proto.CompactTextString(m) } +func (*AnnotateFileResponse) ProtoMessage() {} +func (*AnnotateFileResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_image_annotator_faae31fa1d905783, []int{21} +} +func (m *AnnotateFileResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_AnnotateFileResponse.Unmarshal(m, b) +} +func (m *AnnotateFileResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_AnnotateFileResponse.Marshal(b, m, deterministic) +} +func (dst *AnnotateFileResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_AnnotateFileResponse.Merge(dst, src) +} +func (m *AnnotateFileResponse) XXX_Size() int { + return xxx_messageInfo_AnnotateFileResponse.Size(m) +} +func (m *AnnotateFileResponse) XXX_DiscardUnknown() { + xxx_messageInfo_AnnotateFileResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_AnnotateFileResponse proto.InternalMessageInfo + +func (m *AnnotateFileResponse) GetInputConfig() *InputConfig { + if m != nil { + return m.InputConfig + } + return nil +} + +func (m *AnnotateFileResponse) GetResponses() []*AnnotateImageResponse { + if m != nil { + return m.Responses + } + return nil +} + +func (m *AnnotateFileResponse) GetTotalPages() int32 { + if m != nil { + return m.TotalPages + } + return 0 +} + +// Multiple image annotation requests are batched into a single service call. +type BatchAnnotateImagesRequest struct { + // Individual image annotation requests for this batch. + Requests []*AnnotateImageRequest `protobuf:"bytes,1,rep,name=requests,proto3" json:"requests,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *BatchAnnotateImagesRequest) Reset() { *m = BatchAnnotateImagesRequest{} } +func (m *BatchAnnotateImagesRequest) String() string { return proto.CompactTextString(m) } +func (*BatchAnnotateImagesRequest) ProtoMessage() {} +func (*BatchAnnotateImagesRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_image_annotator_faae31fa1d905783, []int{22} +} +func (m *BatchAnnotateImagesRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_BatchAnnotateImagesRequest.Unmarshal(m, b) +} +func (m *BatchAnnotateImagesRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_BatchAnnotateImagesRequest.Marshal(b, m, deterministic) +} +func (dst *BatchAnnotateImagesRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_BatchAnnotateImagesRequest.Merge(dst, src) +} +func (m *BatchAnnotateImagesRequest) XXX_Size() int { + return xxx_messageInfo_BatchAnnotateImagesRequest.Size(m) +} +func (m *BatchAnnotateImagesRequest) XXX_DiscardUnknown() { + xxx_messageInfo_BatchAnnotateImagesRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_BatchAnnotateImagesRequest proto.InternalMessageInfo + +func (m *BatchAnnotateImagesRequest) GetRequests() []*AnnotateImageRequest { + if m != nil { + return m.Requests + } + return nil +} + +// Response to a batch image annotation request. +type BatchAnnotateImagesResponse struct { + // Individual responses to image annotation requests within the batch. + Responses []*AnnotateImageResponse `protobuf:"bytes,1,rep,name=responses,proto3" json:"responses,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *BatchAnnotateImagesResponse) Reset() { *m = BatchAnnotateImagesResponse{} } +func (m *BatchAnnotateImagesResponse) String() string { return proto.CompactTextString(m) } +func (*BatchAnnotateImagesResponse) ProtoMessage() {} +func (*BatchAnnotateImagesResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_image_annotator_faae31fa1d905783, []int{23} +} +func (m *BatchAnnotateImagesResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_BatchAnnotateImagesResponse.Unmarshal(m, b) +} +func (m *BatchAnnotateImagesResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_BatchAnnotateImagesResponse.Marshal(b, m, deterministic) +} +func (dst *BatchAnnotateImagesResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_BatchAnnotateImagesResponse.Merge(dst, src) +} +func (m *BatchAnnotateImagesResponse) XXX_Size() int { + return xxx_messageInfo_BatchAnnotateImagesResponse.Size(m) +} +func (m *BatchAnnotateImagesResponse) XXX_DiscardUnknown() { + xxx_messageInfo_BatchAnnotateImagesResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_BatchAnnotateImagesResponse proto.InternalMessageInfo + +func (m *BatchAnnotateImagesResponse) GetResponses() []*AnnotateImageResponse { + if m != nil { + return m.Responses + } + return nil +} + +// A request to annotate one single file, e.g. a PDF, TIFF or GIF file. +type AnnotateFileRequest struct { + // Required. Information about the input file. + InputConfig *InputConfig `protobuf:"bytes,1,opt,name=input_config,json=inputConfig,proto3" json:"input_config,omitempty"` + // Required. Requested features. + Features []*Feature `protobuf:"bytes,2,rep,name=features,proto3" json:"features,omitempty"` + // Additional context that may accompany the image(s) in the file. + ImageContext *ImageContext `protobuf:"bytes,3,opt,name=image_context,json=imageContext,proto3" json:"image_context,omitempty"` + // Pages of the file to perform image annotation. + // + // Pages starts from 1, we assume the first page of the file is page 1. + // At most 5 pages are supported per request. Pages can be negative. + // + // Page 1 means the first page. + // Page 2 means the second page. + // Page -1 means the last page. + // Page -2 means the second to the last page. + // + // If the file is GIF instead of PDF or TIFF, page refers to GIF frames. + // + // If this field is empty, by default the service performs image annotation + // for the first 5 pages of the file. + Pages []int32 `protobuf:"varint,4,rep,packed,name=pages,proto3" json:"pages,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *AnnotateFileRequest) Reset() { *m = AnnotateFileRequest{} } +func (m *AnnotateFileRequest) String() string { return proto.CompactTextString(m) } +func (*AnnotateFileRequest) ProtoMessage() {} +func (*AnnotateFileRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_image_annotator_faae31fa1d905783, []int{24} +} +func (m *AnnotateFileRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_AnnotateFileRequest.Unmarshal(m, b) +} +func (m *AnnotateFileRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_AnnotateFileRequest.Marshal(b, m, deterministic) +} +func (dst *AnnotateFileRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_AnnotateFileRequest.Merge(dst, src) +} +func (m *AnnotateFileRequest) XXX_Size() int { + return xxx_messageInfo_AnnotateFileRequest.Size(m) +} +func (m *AnnotateFileRequest) XXX_DiscardUnknown() { + xxx_messageInfo_AnnotateFileRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_AnnotateFileRequest proto.InternalMessageInfo + +func (m *AnnotateFileRequest) GetInputConfig() *InputConfig { + if m != nil { + return m.InputConfig + } + return nil +} + +func (m *AnnotateFileRequest) GetFeatures() []*Feature { + if m != nil { + return m.Features + } + return nil +} + +func (m *AnnotateFileRequest) GetImageContext() *ImageContext { + if m != nil { + return m.ImageContext + } + return nil +} + +func (m *AnnotateFileRequest) GetPages() []int32 { + if m != nil { + return m.Pages + } + return nil +} + +// A list of requests to annotate files using the BatchAnnotateFiles API. +type BatchAnnotateFilesRequest struct { + // The list of file annotation requests. Right now we support only one + // AnnotateFileRequest in BatchAnnotateFilesRequest. + Requests []*AnnotateFileRequest `protobuf:"bytes,1,rep,name=requests,proto3" json:"requests,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *BatchAnnotateFilesRequest) Reset() { *m = BatchAnnotateFilesRequest{} } +func (m *BatchAnnotateFilesRequest) String() string { return proto.CompactTextString(m) } +func (*BatchAnnotateFilesRequest) ProtoMessage() {} +func (*BatchAnnotateFilesRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_image_annotator_faae31fa1d905783, []int{25} +} +func (m *BatchAnnotateFilesRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_BatchAnnotateFilesRequest.Unmarshal(m, b) +} +func (m *BatchAnnotateFilesRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_BatchAnnotateFilesRequest.Marshal(b, m, deterministic) +} +func (dst *BatchAnnotateFilesRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_BatchAnnotateFilesRequest.Merge(dst, src) +} +func (m *BatchAnnotateFilesRequest) XXX_Size() int { + return xxx_messageInfo_BatchAnnotateFilesRequest.Size(m) +} +func (m *BatchAnnotateFilesRequest) XXX_DiscardUnknown() { + xxx_messageInfo_BatchAnnotateFilesRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_BatchAnnotateFilesRequest proto.InternalMessageInfo + +func (m *BatchAnnotateFilesRequest) GetRequests() []*AnnotateFileRequest { + if m != nil { + return m.Requests + } + return nil +} + +// A list of file annotation responses. +type BatchAnnotateFilesResponse struct { + // The list of file annotation responses, each response corresponding to each + // AnnotateFileRequest in BatchAnnotateFilesRequest. + Responses []*AnnotateFileResponse `protobuf:"bytes,1,rep,name=responses,proto3" json:"responses,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *BatchAnnotateFilesResponse) Reset() { *m = BatchAnnotateFilesResponse{} } +func (m *BatchAnnotateFilesResponse) String() string { return proto.CompactTextString(m) } +func (*BatchAnnotateFilesResponse) ProtoMessage() {} +func (*BatchAnnotateFilesResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_image_annotator_faae31fa1d905783, []int{26} +} +func (m *BatchAnnotateFilesResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_BatchAnnotateFilesResponse.Unmarshal(m, b) +} +func (m *BatchAnnotateFilesResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_BatchAnnotateFilesResponse.Marshal(b, m, deterministic) +} +func (dst *BatchAnnotateFilesResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_BatchAnnotateFilesResponse.Merge(dst, src) +} +func (m *BatchAnnotateFilesResponse) XXX_Size() int { + return xxx_messageInfo_BatchAnnotateFilesResponse.Size(m) +} +func (m *BatchAnnotateFilesResponse) XXX_DiscardUnknown() { + xxx_messageInfo_BatchAnnotateFilesResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_BatchAnnotateFilesResponse proto.InternalMessageInfo + +func (m *BatchAnnotateFilesResponse) GetResponses() []*AnnotateFileResponse { + if m != nil { + return m.Responses + } + return nil +} + +// An offline file annotation request. +type AsyncAnnotateFileRequest struct { + // Required. Information about the input file. + InputConfig *InputConfig `protobuf:"bytes,1,opt,name=input_config,json=inputConfig,proto3" json:"input_config,omitempty"` + // Required. Requested features. + Features []*Feature `protobuf:"bytes,2,rep,name=features,proto3" json:"features,omitempty"` + // Additional context that may accompany the image(s) in the file. + ImageContext *ImageContext `protobuf:"bytes,3,opt,name=image_context,json=imageContext,proto3" json:"image_context,omitempty"` + // Required. The desired output location and metadata (e.g. format). + OutputConfig *OutputConfig `protobuf:"bytes,4,opt,name=output_config,json=outputConfig,proto3" json:"output_config,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *AsyncAnnotateFileRequest) Reset() { *m = AsyncAnnotateFileRequest{} } +func (m *AsyncAnnotateFileRequest) String() string { return proto.CompactTextString(m) } +func (*AsyncAnnotateFileRequest) ProtoMessage() {} +func (*AsyncAnnotateFileRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_image_annotator_faae31fa1d905783, []int{27} +} +func (m *AsyncAnnotateFileRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_AsyncAnnotateFileRequest.Unmarshal(m, b) +} +func (m *AsyncAnnotateFileRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_AsyncAnnotateFileRequest.Marshal(b, m, deterministic) +} +func (dst *AsyncAnnotateFileRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_AsyncAnnotateFileRequest.Merge(dst, src) +} +func (m *AsyncAnnotateFileRequest) XXX_Size() int { + return xxx_messageInfo_AsyncAnnotateFileRequest.Size(m) +} +func (m *AsyncAnnotateFileRequest) XXX_DiscardUnknown() { + xxx_messageInfo_AsyncAnnotateFileRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_AsyncAnnotateFileRequest proto.InternalMessageInfo + +func (m *AsyncAnnotateFileRequest) GetInputConfig() *InputConfig { + if m != nil { + return m.InputConfig + } + return nil +} + +func (m *AsyncAnnotateFileRequest) GetFeatures() []*Feature { + if m != nil { + return m.Features + } + return nil +} + +func (m *AsyncAnnotateFileRequest) GetImageContext() *ImageContext { + if m != nil { + return m.ImageContext + } + return nil +} + +func (m *AsyncAnnotateFileRequest) GetOutputConfig() *OutputConfig { + if m != nil { + return m.OutputConfig + } + return nil +} + +// The response for a single offline file annotation request. +type AsyncAnnotateFileResponse struct { + // The output location and metadata from AsyncAnnotateFileRequest. + OutputConfig *OutputConfig `protobuf:"bytes,1,opt,name=output_config,json=outputConfig,proto3" json:"output_config,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *AsyncAnnotateFileResponse) Reset() { *m = AsyncAnnotateFileResponse{} } +func (m *AsyncAnnotateFileResponse) String() string { return proto.CompactTextString(m) } +func (*AsyncAnnotateFileResponse) ProtoMessage() {} +func (*AsyncAnnotateFileResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_image_annotator_faae31fa1d905783, []int{28} +} +func (m *AsyncAnnotateFileResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_AsyncAnnotateFileResponse.Unmarshal(m, b) +} +func (m *AsyncAnnotateFileResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_AsyncAnnotateFileResponse.Marshal(b, m, deterministic) +} +func (dst *AsyncAnnotateFileResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_AsyncAnnotateFileResponse.Merge(dst, src) +} +func (m *AsyncAnnotateFileResponse) XXX_Size() int { + return xxx_messageInfo_AsyncAnnotateFileResponse.Size(m) +} +func (m *AsyncAnnotateFileResponse) XXX_DiscardUnknown() { + xxx_messageInfo_AsyncAnnotateFileResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_AsyncAnnotateFileResponse proto.InternalMessageInfo + +func (m *AsyncAnnotateFileResponse) GetOutputConfig() *OutputConfig { + if m != nil { + return m.OutputConfig + } + return nil +} + +// Request for async image annotation for a list of images. +type AsyncBatchAnnotateImagesRequest struct { + // Individual image annotation requests for this batch. + Requests []*AnnotateImageRequest `protobuf:"bytes,1,rep,name=requests,proto3" json:"requests,omitempty"` + // Required. The desired output location and metadata (e.g. format). + OutputConfig *OutputConfig `protobuf:"bytes,2,opt,name=output_config,json=outputConfig,proto3" json:"output_config,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *AsyncBatchAnnotateImagesRequest) Reset() { *m = AsyncBatchAnnotateImagesRequest{} } +func (m *AsyncBatchAnnotateImagesRequest) String() string { return proto.CompactTextString(m) } +func (*AsyncBatchAnnotateImagesRequest) ProtoMessage() {} +func (*AsyncBatchAnnotateImagesRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_image_annotator_faae31fa1d905783, []int{29} +} +func (m *AsyncBatchAnnotateImagesRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_AsyncBatchAnnotateImagesRequest.Unmarshal(m, b) +} +func (m *AsyncBatchAnnotateImagesRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_AsyncBatchAnnotateImagesRequest.Marshal(b, m, deterministic) +} +func (dst *AsyncBatchAnnotateImagesRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_AsyncBatchAnnotateImagesRequest.Merge(dst, src) +} +func (m *AsyncBatchAnnotateImagesRequest) XXX_Size() int { + return xxx_messageInfo_AsyncBatchAnnotateImagesRequest.Size(m) +} +func (m *AsyncBatchAnnotateImagesRequest) XXX_DiscardUnknown() { + xxx_messageInfo_AsyncBatchAnnotateImagesRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_AsyncBatchAnnotateImagesRequest proto.InternalMessageInfo + +func (m *AsyncBatchAnnotateImagesRequest) GetRequests() []*AnnotateImageRequest { + if m != nil { + return m.Requests + } + return nil +} + +func (m *AsyncBatchAnnotateImagesRequest) GetOutputConfig() *OutputConfig { + if m != nil { + return m.OutputConfig + } + return nil +} + +// Response to an async batch image annotation request. +type AsyncBatchAnnotateImagesResponse struct { + // The output location and metadata from AsyncBatchAnnotateImagesRequest. + OutputConfig *OutputConfig `protobuf:"bytes,1,opt,name=output_config,json=outputConfig,proto3" json:"output_config,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *AsyncBatchAnnotateImagesResponse) Reset() { *m = AsyncBatchAnnotateImagesResponse{} } +func (m *AsyncBatchAnnotateImagesResponse) String() string { return proto.CompactTextString(m) } +func (*AsyncBatchAnnotateImagesResponse) ProtoMessage() {} +func (*AsyncBatchAnnotateImagesResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_image_annotator_faae31fa1d905783, []int{30} +} +func (m *AsyncBatchAnnotateImagesResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_AsyncBatchAnnotateImagesResponse.Unmarshal(m, b) +} +func (m *AsyncBatchAnnotateImagesResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_AsyncBatchAnnotateImagesResponse.Marshal(b, m, deterministic) +} +func (dst *AsyncBatchAnnotateImagesResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_AsyncBatchAnnotateImagesResponse.Merge(dst, src) +} +func (m *AsyncBatchAnnotateImagesResponse) XXX_Size() int { + return xxx_messageInfo_AsyncBatchAnnotateImagesResponse.Size(m) +} +func (m *AsyncBatchAnnotateImagesResponse) XXX_DiscardUnknown() { + xxx_messageInfo_AsyncBatchAnnotateImagesResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_AsyncBatchAnnotateImagesResponse proto.InternalMessageInfo + +func (m *AsyncBatchAnnotateImagesResponse) GetOutputConfig() *OutputConfig { + if m != nil { + return m.OutputConfig + } + return nil +} + +// Multiple async file annotation requests are batched into a single service +// call. +type AsyncBatchAnnotateFilesRequest struct { + // Individual async file annotation requests for this batch. + Requests []*AsyncAnnotateFileRequest `protobuf:"bytes,1,rep,name=requests,proto3" json:"requests,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *AsyncBatchAnnotateFilesRequest) Reset() { *m = AsyncBatchAnnotateFilesRequest{} } +func (m *AsyncBatchAnnotateFilesRequest) String() string { return proto.CompactTextString(m) } +func (*AsyncBatchAnnotateFilesRequest) ProtoMessage() {} +func (*AsyncBatchAnnotateFilesRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_image_annotator_faae31fa1d905783, []int{31} +} +func (m *AsyncBatchAnnotateFilesRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_AsyncBatchAnnotateFilesRequest.Unmarshal(m, b) +} +func (m *AsyncBatchAnnotateFilesRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_AsyncBatchAnnotateFilesRequest.Marshal(b, m, deterministic) +} +func (dst *AsyncBatchAnnotateFilesRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_AsyncBatchAnnotateFilesRequest.Merge(dst, src) +} +func (m *AsyncBatchAnnotateFilesRequest) XXX_Size() int { + return xxx_messageInfo_AsyncBatchAnnotateFilesRequest.Size(m) +} +func (m *AsyncBatchAnnotateFilesRequest) XXX_DiscardUnknown() { + xxx_messageInfo_AsyncBatchAnnotateFilesRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_AsyncBatchAnnotateFilesRequest proto.InternalMessageInfo + +func (m *AsyncBatchAnnotateFilesRequest) GetRequests() []*AsyncAnnotateFileRequest { + if m != nil { + return m.Requests + } + return nil +} + +// Response to an async batch file annotation request. +type AsyncBatchAnnotateFilesResponse struct { + // The list of file annotation responses, one for each request in + // AsyncBatchAnnotateFilesRequest. + Responses []*AsyncAnnotateFileResponse `protobuf:"bytes,1,rep,name=responses,proto3" json:"responses,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *AsyncBatchAnnotateFilesResponse) Reset() { *m = AsyncBatchAnnotateFilesResponse{} } +func (m *AsyncBatchAnnotateFilesResponse) String() string { return proto.CompactTextString(m) } +func (*AsyncBatchAnnotateFilesResponse) ProtoMessage() {} +func (*AsyncBatchAnnotateFilesResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_image_annotator_faae31fa1d905783, []int{32} +} +func (m *AsyncBatchAnnotateFilesResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_AsyncBatchAnnotateFilesResponse.Unmarshal(m, b) +} +func (m *AsyncBatchAnnotateFilesResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_AsyncBatchAnnotateFilesResponse.Marshal(b, m, deterministic) +} +func (dst *AsyncBatchAnnotateFilesResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_AsyncBatchAnnotateFilesResponse.Merge(dst, src) +} +func (m *AsyncBatchAnnotateFilesResponse) XXX_Size() int { + return xxx_messageInfo_AsyncBatchAnnotateFilesResponse.Size(m) +} +func (m *AsyncBatchAnnotateFilesResponse) XXX_DiscardUnknown() { + xxx_messageInfo_AsyncBatchAnnotateFilesResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_AsyncBatchAnnotateFilesResponse proto.InternalMessageInfo + +func (m *AsyncBatchAnnotateFilesResponse) GetResponses() []*AsyncAnnotateFileResponse { + if m != nil { + return m.Responses + } + return nil +} + +// The desired input location and metadata. +type InputConfig struct { + // The Google Cloud Storage location to read the input from. + GcsSource *GcsSource `protobuf:"bytes,1,opt,name=gcs_source,json=gcsSource,proto3" json:"gcs_source,omitempty"` + // File content, represented as a stream of bytes. + // Note: As with all `bytes` fields, protobuffers use a pure binary + // representation, whereas JSON representations use base64. + // + // Currently, this field only works for BatchAnnotateFiles requests. It does + // not work for AsyncBatchAnnotateFiles requests. + Content []byte `protobuf:"bytes,3,opt,name=content,proto3" json:"content,omitempty"` + // The type of the file. Currently only "application/pdf" and "image/tiff" + // are supported. Wildcards are not supported. + MimeType string `protobuf:"bytes,2,opt,name=mime_type,json=mimeType,proto3" json:"mime_type,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *InputConfig) Reset() { *m = InputConfig{} } +func (m *InputConfig) String() string { return proto.CompactTextString(m) } +func (*InputConfig) ProtoMessage() {} +func (*InputConfig) Descriptor() ([]byte, []int) { + return fileDescriptor_image_annotator_faae31fa1d905783, []int{33} +} +func (m *InputConfig) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_InputConfig.Unmarshal(m, b) +} +func (m *InputConfig) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_InputConfig.Marshal(b, m, deterministic) +} +func (dst *InputConfig) XXX_Merge(src proto.Message) { + xxx_messageInfo_InputConfig.Merge(dst, src) +} +func (m *InputConfig) XXX_Size() int { + return xxx_messageInfo_InputConfig.Size(m) +} +func (m *InputConfig) XXX_DiscardUnknown() { + xxx_messageInfo_InputConfig.DiscardUnknown(m) +} + +var xxx_messageInfo_InputConfig proto.InternalMessageInfo + +func (m *InputConfig) GetGcsSource() *GcsSource { + if m != nil { + return m.GcsSource + } + return nil +} + +func (m *InputConfig) GetContent() []byte { + if m != nil { + return m.Content + } + return nil +} + +func (m *InputConfig) GetMimeType() string { + if m != nil { + return m.MimeType + } + return "" +} + +// The desired output location and metadata. +type OutputConfig struct { + // The Google Cloud Storage location to write the output(s) to. + GcsDestination *GcsDestination `protobuf:"bytes,1,opt,name=gcs_destination,json=gcsDestination,proto3" json:"gcs_destination,omitempty"` + // The max number of response protos to put into each output JSON file on + // Google Cloud Storage. + // The valid range is [1, 100]. If not specified, the default value is 20. + // + // For example, for one pdf file with 100 pages, 100 response protos will + // be generated. If `batch_size` = 20, then 5 json files each + // containing 20 response protos will be written under the prefix + // `gcs_destination`.`uri`. + // + // Currently, batch_size only applies to GcsDestination, with potential future + // support for other output configurations. + BatchSize int32 `protobuf:"varint,2,opt,name=batch_size,json=batchSize,proto3" json:"batch_size,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *OutputConfig) Reset() { *m = OutputConfig{} } +func (m *OutputConfig) String() string { return proto.CompactTextString(m) } +func (*OutputConfig) ProtoMessage() {} +func (*OutputConfig) Descriptor() ([]byte, []int) { + return fileDescriptor_image_annotator_faae31fa1d905783, []int{34} +} +func (m *OutputConfig) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_OutputConfig.Unmarshal(m, b) +} +func (m *OutputConfig) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_OutputConfig.Marshal(b, m, deterministic) +} +func (dst *OutputConfig) XXX_Merge(src proto.Message) { + xxx_messageInfo_OutputConfig.Merge(dst, src) +} +func (m *OutputConfig) XXX_Size() int { + return xxx_messageInfo_OutputConfig.Size(m) +} +func (m *OutputConfig) XXX_DiscardUnknown() { + xxx_messageInfo_OutputConfig.DiscardUnknown(m) +} + +var xxx_messageInfo_OutputConfig proto.InternalMessageInfo + +func (m *OutputConfig) GetGcsDestination() *GcsDestination { + if m != nil { + return m.GcsDestination + } + return nil +} + +func (m *OutputConfig) GetBatchSize() int32 { + if m != nil { + return m.BatchSize + } + return 0 +} + +// The Google Cloud Storage location where the input will be read from. +type GcsSource struct { + // Google Cloud Storage URI for the input file. This must only be a + // Google Cloud Storage object. Wildcards are not currently supported. + Uri string `protobuf:"bytes,1,opt,name=uri,proto3" json:"uri,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GcsSource) Reset() { *m = GcsSource{} } +func (m *GcsSource) String() string { return proto.CompactTextString(m) } +func (*GcsSource) ProtoMessage() {} +func (*GcsSource) Descriptor() ([]byte, []int) { + return fileDescriptor_image_annotator_faae31fa1d905783, []int{35} +} +func (m *GcsSource) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GcsSource.Unmarshal(m, b) +} +func (m *GcsSource) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GcsSource.Marshal(b, m, deterministic) +} +func (dst *GcsSource) XXX_Merge(src proto.Message) { + xxx_messageInfo_GcsSource.Merge(dst, src) +} +func (m *GcsSource) XXX_Size() int { + return xxx_messageInfo_GcsSource.Size(m) +} +func (m *GcsSource) XXX_DiscardUnknown() { + xxx_messageInfo_GcsSource.DiscardUnknown(m) +} + +var xxx_messageInfo_GcsSource proto.InternalMessageInfo + +func (m *GcsSource) GetUri() string { + if m != nil { + return m.Uri + } + return "" +} + +// The Google Cloud Storage location where the output will be written to. +type GcsDestination struct { + // Google Cloud Storage URI prefix where the results will be stored. Results + // will be in JSON format and preceded by its corresponding input URI prefix. + // This field can either represent a gcs file prefix or gcs directory. In + // either case, the uri should be unique because in order to get all of the + // output files, you will need to do a wildcard gcs search on the uri prefix + // you provide. + // + // Examples: + // + // * File Prefix: gs://bucket-name/here/filenameprefix The output files + // will be created in gs://bucket-name/here/ and the names of the + // output files will begin with "filenameprefix". + // + // * Directory Prefix: gs://bucket-name/some/location/ The output files + // will be created in gs://bucket-name/some/location/ and the names of the + // output files could be anything because there was no filename prefix + // specified. + // + // If multiple outputs, each response is still AnnotateFileResponse, each of + // which contains some subset of the full list of AnnotateImageResponse. + // Multiple outputs can happen if, for example, the output JSON is too large + // and overflows into multiple sharded files. + Uri string `protobuf:"bytes,1,opt,name=uri,proto3" json:"uri,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GcsDestination) Reset() { *m = GcsDestination{} } +func (m *GcsDestination) String() string { return proto.CompactTextString(m) } +func (*GcsDestination) ProtoMessage() {} +func (*GcsDestination) Descriptor() ([]byte, []int) { + return fileDescriptor_image_annotator_faae31fa1d905783, []int{36} +} +func (m *GcsDestination) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GcsDestination.Unmarshal(m, b) +} +func (m *GcsDestination) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GcsDestination.Marshal(b, m, deterministic) +} +func (dst *GcsDestination) XXX_Merge(src proto.Message) { + xxx_messageInfo_GcsDestination.Merge(dst, src) +} +func (m *GcsDestination) XXX_Size() int { + return xxx_messageInfo_GcsDestination.Size(m) +} +func (m *GcsDestination) XXX_DiscardUnknown() { + xxx_messageInfo_GcsDestination.DiscardUnknown(m) +} + +var xxx_messageInfo_GcsDestination proto.InternalMessageInfo + +func (m *GcsDestination) GetUri() string { + if m != nil { + return m.Uri + } + return "" +} + +// Contains metadata for the BatchAnnotateImages operation. +type OperationMetadata struct { + // Current state of the batch operation. + State OperationMetadata_State `protobuf:"varint,1,opt,name=state,proto3,enum=google.cloud.vision.v1.OperationMetadata_State" json:"state,omitempty"` + // The time when the batch request was received. + CreateTime *timestamp.Timestamp `protobuf:"bytes,5,opt,name=create_time,json=createTime,proto3" json:"create_time,omitempty"` + // The time when the operation result was last updated. + UpdateTime *timestamp.Timestamp `protobuf:"bytes,6,opt,name=update_time,json=updateTime,proto3" json:"update_time,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *OperationMetadata) Reset() { *m = OperationMetadata{} } +func (m *OperationMetadata) String() string { return proto.CompactTextString(m) } +func (*OperationMetadata) ProtoMessage() {} +func (*OperationMetadata) Descriptor() ([]byte, []int) { + return fileDescriptor_image_annotator_faae31fa1d905783, []int{37} +} +func (m *OperationMetadata) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_OperationMetadata.Unmarshal(m, b) +} +func (m *OperationMetadata) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_OperationMetadata.Marshal(b, m, deterministic) +} +func (dst *OperationMetadata) XXX_Merge(src proto.Message) { + xxx_messageInfo_OperationMetadata.Merge(dst, src) +} +func (m *OperationMetadata) XXX_Size() int { + return xxx_messageInfo_OperationMetadata.Size(m) +} +func (m *OperationMetadata) XXX_DiscardUnknown() { + xxx_messageInfo_OperationMetadata.DiscardUnknown(m) +} + +var xxx_messageInfo_OperationMetadata proto.InternalMessageInfo + +func (m *OperationMetadata) GetState() OperationMetadata_State { + if m != nil { + return m.State + } + return OperationMetadata_STATE_UNSPECIFIED +} + +func (m *OperationMetadata) GetCreateTime() *timestamp.Timestamp { + if m != nil { + return m.CreateTime + } + return nil +} + +func (m *OperationMetadata) GetUpdateTime() *timestamp.Timestamp { + if m != nil { + return m.UpdateTime + } + return nil +} + +func init() { + proto.RegisterType((*Feature)(nil), "google.cloud.vision.v1.Feature") + proto.RegisterType((*ImageSource)(nil), "google.cloud.vision.v1.ImageSource") + proto.RegisterType((*Image)(nil), "google.cloud.vision.v1.Image") + proto.RegisterType((*FaceAnnotation)(nil), "google.cloud.vision.v1.FaceAnnotation") + proto.RegisterType((*FaceAnnotation_Landmark)(nil), "google.cloud.vision.v1.FaceAnnotation.Landmark") + proto.RegisterType((*LocationInfo)(nil), "google.cloud.vision.v1.LocationInfo") + proto.RegisterType((*Property)(nil), "google.cloud.vision.v1.Property") + proto.RegisterType((*EntityAnnotation)(nil), "google.cloud.vision.v1.EntityAnnotation") + proto.RegisterType((*LocalizedObjectAnnotation)(nil), "google.cloud.vision.v1.LocalizedObjectAnnotation") + proto.RegisterType((*SafeSearchAnnotation)(nil), "google.cloud.vision.v1.SafeSearchAnnotation") + proto.RegisterType((*LatLongRect)(nil), "google.cloud.vision.v1.LatLongRect") + proto.RegisterType((*ColorInfo)(nil), "google.cloud.vision.v1.ColorInfo") + proto.RegisterType((*DominantColorsAnnotation)(nil), "google.cloud.vision.v1.DominantColorsAnnotation") + proto.RegisterType((*ImageProperties)(nil), "google.cloud.vision.v1.ImageProperties") + proto.RegisterType((*CropHint)(nil), "google.cloud.vision.v1.CropHint") + proto.RegisterType((*CropHintsAnnotation)(nil), "google.cloud.vision.v1.CropHintsAnnotation") + proto.RegisterType((*CropHintsParams)(nil), "google.cloud.vision.v1.CropHintsParams") + proto.RegisterType((*WebDetectionParams)(nil), "google.cloud.vision.v1.WebDetectionParams") + proto.RegisterType((*ImageContext)(nil), "google.cloud.vision.v1.ImageContext") + proto.RegisterType((*AnnotateImageRequest)(nil), "google.cloud.vision.v1.AnnotateImageRequest") + proto.RegisterType((*ImageAnnotationContext)(nil), "google.cloud.vision.v1.ImageAnnotationContext") + proto.RegisterType((*AnnotateImageResponse)(nil), "google.cloud.vision.v1.AnnotateImageResponse") + proto.RegisterType((*AnnotateFileResponse)(nil), "google.cloud.vision.v1.AnnotateFileResponse") + proto.RegisterType((*BatchAnnotateImagesRequest)(nil), "google.cloud.vision.v1.BatchAnnotateImagesRequest") + proto.RegisterType((*BatchAnnotateImagesResponse)(nil), "google.cloud.vision.v1.BatchAnnotateImagesResponse") + proto.RegisterType((*AnnotateFileRequest)(nil), "google.cloud.vision.v1.AnnotateFileRequest") + proto.RegisterType((*BatchAnnotateFilesRequest)(nil), "google.cloud.vision.v1.BatchAnnotateFilesRequest") + proto.RegisterType((*BatchAnnotateFilesResponse)(nil), "google.cloud.vision.v1.BatchAnnotateFilesResponse") + proto.RegisterType((*AsyncAnnotateFileRequest)(nil), "google.cloud.vision.v1.AsyncAnnotateFileRequest") + proto.RegisterType((*AsyncAnnotateFileResponse)(nil), "google.cloud.vision.v1.AsyncAnnotateFileResponse") + proto.RegisterType((*AsyncBatchAnnotateImagesRequest)(nil), "google.cloud.vision.v1.AsyncBatchAnnotateImagesRequest") + proto.RegisterType((*AsyncBatchAnnotateImagesResponse)(nil), "google.cloud.vision.v1.AsyncBatchAnnotateImagesResponse") + proto.RegisterType((*AsyncBatchAnnotateFilesRequest)(nil), "google.cloud.vision.v1.AsyncBatchAnnotateFilesRequest") + proto.RegisterType((*AsyncBatchAnnotateFilesResponse)(nil), "google.cloud.vision.v1.AsyncBatchAnnotateFilesResponse") + proto.RegisterType((*InputConfig)(nil), "google.cloud.vision.v1.InputConfig") + proto.RegisterType((*OutputConfig)(nil), "google.cloud.vision.v1.OutputConfig") + proto.RegisterType((*GcsSource)(nil), "google.cloud.vision.v1.GcsSource") + proto.RegisterType((*GcsDestination)(nil), "google.cloud.vision.v1.GcsDestination") + proto.RegisterType((*OperationMetadata)(nil), "google.cloud.vision.v1.OperationMetadata") + proto.RegisterEnum("google.cloud.vision.v1.Likelihood", Likelihood_name, Likelihood_value) + proto.RegisterEnum("google.cloud.vision.v1.Feature_Type", Feature_Type_name, Feature_Type_value) + proto.RegisterEnum("google.cloud.vision.v1.FaceAnnotation_Landmark_Type", FaceAnnotation_Landmark_Type_name, FaceAnnotation_Landmark_Type_value) + proto.RegisterEnum("google.cloud.vision.v1.OperationMetadata_State", OperationMetadata_State_name, OperationMetadata_State_value) +} + +// Reference imports to suppress errors if they are not otherwise used. +var _ context.Context +var _ grpc.ClientConn + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the grpc package it is being compiled against. +const _ = grpc.SupportPackageIsVersion4 + +// ImageAnnotatorClient is the client API for ImageAnnotator service. +// +// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://godoc.org/google.golang.org/grpc#ClientConn.NewStream. +type ImageAnnotatorClient interface { + // Run image detection and annotation for a batch of images. + BatchAnnotateImages(ctx context.Context, in *BatchAnnotateImagesRequest, opts ...grpc.CallOption) (*BatchAnnotateImagesResponse, error) + // Service that performs image detection and annotation for a batch of files. + // Now only "application/pdf", "image/tiff" and "image/gif" are supported. + // + // This service will extract at most 5 (customers can specify which 5 in + // AnnotateFileRequest.pages) frames (gif) or pages (pdf or tiff) from each + // file provided and perform detection and annotation for each image + // extracted. + BatchAnnotateFiles(ctx context.Context, in *BatchAnnotateFilesRequest, opts ...grpc.CallOption) (*BatchAnnotateFilesResponse, error) + // Run asynchronous image detection and annotation for a list of images. + // + // Progress and results can be retrieved through the + // `google.longrunning.Operations` interface. + // `Operation.metadata` contains `OperationMetadata` (metadata). + // `Operation.response` contains `AsyncBatchAnnotateImagesResponse` (results). + // + // This service will write image annotation outputs to json files in customer + // GCS bucket, each json file containing BatchAnnotateImagesResponse proto. + AsyncBatchAnnotateImages(ctx context.Context, in *AsyncBatchAnnotateImagesRequest, opts ...grpc.CallOption) (*longrunning.Operation, error) + // Run asynchronous image detection and annotation for a list of generic + // files, such as PDF files, which may contain multiple pages and multiple + // images per page. Progress and results can be retrieved through the + // `google.longrunning.Operations` interface. + // `Operation.metadata` contains `OperationMetadata` (metadata). + // `Operation.response` contains `AsyncBatchAnnotateFilesResponse` (results). + AsyncBatchAnnotateFiles(ctx context.Context, in *AsyncBatchAnnotateFilesRequest, opts ...grpc.CallOption) (*longrunning.Operation, error) +} + +type imageAnnotatorClient struct { + cc *grpc.ClientConn +} + +func NewImageAnnotatorClient(cc *grpc.ClientConn) ImageAnnotatorClient { + return &imageAnnotatorClient{cc} +} + +func (c *imageAnnotatorClient) BatchAnnotateImages(ctx context.Context, in *BatchAnnotateImagesRequest, opts ...grpc.CallOption) (*BatchAnnotateImagesResponse, error) { + out := new(BatchAnnotateImagesResponse) + err := c.cc.Invoke(ctx, "/google.cloud.vision.v1.ImageAnnotator/BatchAnnotateImages", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *imageAnnotatorClient) BatchAnnotateFiles(ctx context.Context, in *BatchAnnotateFilesRequest, opts ...grpc.CallOption) (*BatchAnnotateFilesResponse, error) { + out := new(BatchAnnotateFilesResponse) + err := c.cc.Invoke(ctx, "/google.cloud.vision.v1.ImageAnnotator/BatchAnnotateFiles", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *imageAnnotatorClient) AsyncBatchAnnotateImages(ctx context.Context, in *AsyncBatchAnnotateImagesRequest, opts ...grpc.CallOption) (*longrunning.Operation, error) { + out := new(longrunning.Operation) + err := c.cc.Invoke(ctx, "/google.cloud.vision.v1.ImageAnnotator/AsyncBatchAnnotateImages", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *imageAnnotatorClient) AsyncBatchAnnotateFiles(ctx context.Context, in *AsyncBatchAnnotateFilesRequest, opts ...grpc.CallOption) (*longrunning.Operation, error) { + out := new(longrunning.Operation) + err := c.cc.Invoke(ctx, "/google.cloud.vision.v1.ImageAnnotator/AsyncBatchAnnotateFiles", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +// ImageAnnotatorServer is the server API for ImageAnnotator service. +type ImageAnnotatorServer interface { + // Run image detection and annotation for a batch of images. + BatchAnnotateImages(context.Context, *BatchAnnotateImagesRequest) (*BatchAnnotateImagesResponse, error) + // Service that performs image detection and annotation for a batch of files. + // Now only "application/pdf", "image/tiff" and "image/gif" are supported. + // + // This service will extract at most 5 (customers can specify which 5 in + // AnnotateFileRequest.pages) frames (gif) or pages (pdf or tiff) from each + // file provided and perform detection and annotation for each image + // extracted. + BatchAnnotateFiles(context.Context, *BatchAnnotateFilesRequest) (*BatchAnnotateFilesResponse, error) + // Run asynchronous image detection and annotation for a list of images. + // + // Progress and results can be retrieved through the + // `google.longrunning.Operations` interface. + // `Operation.metadata` contains `OperationMetadata` (metadata). + // `Operation.response` contains `AsyncBatchAnnotateImagesResponse` (results). + // + // This service will write image annotation outputs to json files in customer + // GCS bucket, each json file containing BatchAnnotateImagesResponse proto. + AsyncBatchAnnotateImages(context.Context, *AsyncBatchAnnotateImagesRequest) (*longrunning.Operation, error) + // Run asynchronous image detection and annotation for a list of generic + // files, such as PDF files, which may contain multiple pages and multiple + // images per page. Progress and results can be retrieved through the + // `google.longrunning.Operations` interface. + // `Operation.metadata` contains `OperationMetadata` (metadata). + // `Operation.response` contains `AsyncBatchAnnotateFilesResponse` (results). + AsyncBatchAnnotateFiles(context.Context, *AsyncBatchAnnotateFilesRequest) (*longrunning.Operation, error) +} + +func RegisterImageAnnotatorServer(s *grpc.Server, srv ImageAnnotatorServer) { + s.RegisterService(&_ImageAnnotator_serviceDesc, srv) +} + +func _ImageAnnotator_BatchAnnotateImages_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(BatchAnnotateImagesRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(ImageAnnotatorServer).BatchAnnotateImages(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.vision.v1.ImageAnnotator/BatchAnnotateImages", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(ImageAnnotatorServer).BatchAnnotateImages(ctx, req.(*BatchAnnotateImagesRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _ImageAnnotator_BatchAnnotateFiles_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(BatchAnnotateFilesRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(ImageAnnotatorServer).BatchAnnotateFiles(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.vision.v1.ImageAnnotator/BatchAnnotateFiles", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(ImageAnnotatorServer).BatchAnnotateFiles(ctx, req.(*BatchAnnotateFilesRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _ImageAnnotator_AsyncBatchAnnotateImages_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(AsyncBatchAnnotateImagesRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(ImageAnnotatorServer).AsyncBatchAnnotateImages(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.vision.v1.ImageAnnotator/AsyncBatchAnnotateImages", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(ImageAnnotatorServer).AsyncBatchAnnotateImages(ctx, req.(*AsyncBatchAnnotateImagesRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _ImageAnnotator_AsyncBatchAnnotateFiles_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(AsyncBatchAnnotateFilesRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(ImageAnnotatorServer).AsyncBatchAnnotateFiles(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.vision.v1.ImageAnnotator/AsyncBatchAnnotateFiles", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(ImageAnnotatorServer).AsyncBatchAnnotateFiles(ctx, req.(*AsyncBatchAnnotateFilesRequest)) + } + return interceptor(ctx, in, info, handler) +} + +var _ImageAnnotator_serviceDesc = grpc.ServiceDesc{ + ServiceName: "google.cloud.vision.v1.ImageAnnotator", + HandlerType: (*ImageAnnotatorServer)(nil), + Methods: []grpc.MethodDesc{ + { + MethodName: "BatchAnnotateImages", + Handler: _ImageAnnotator_BatchAnnotateImages_Handler, + }, + { + MethodName: "BatchAnnotateFiles", + Handler: _ImageAnnotator_BatchAnnotateFiles_Handler, + }, + { + MethodName: "AsyncBatchAnnotateImages", + Handler: _ImageAnnotator_AsyncBatchAnnotateImages_Handler, + }, + { + MethodName: "AsyncBatchAnnotateFiles", + Handler: _ImageAnnotator_AsyncBatchAnnotateFiles_Handler, + }, + }, + Streams: []grpc.StreamDesc{}, + Metadata: "google/cloud/vision/v1/image_annotator.proto", +} + +func init() { + proto.RegisterFile("google/cloud/vision/v1/image_annotator.proto", fileDescriptor_image_annotator_faae31fa1d905783) +} + +var fileDescriptor_image_annotator_faae31fa1d905783 = []byte{ + // 3240 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xe4, 0x5a, 0x5f, 0x6f, 0x1b, 0x59, + 0x15, 0x5f, 0xdb, 0x71, 0x62, 0x1f, 0x3b, 0xc9, 0xe4, 0x3a, 0x49, 0xdd, 0xb4, 0x69, 0xd3, 0x59, + 0x96, 0x8d, 0xba, 0x25, 0xa1, 0xd9, 0xa5, 0x2c, 0xdb, 0x0a, 0xd6, 0x71, 0x26, 0x89, 0xb7, 0x8e, + 0xed, 0xbd, 0x76, 0xda, 0xed, 0xb2, 0x62, 0x34, 0x19, 0x5f, 0xbb, 0xd3, 0x1d, 0xcf, 0x0c, 0x33, + 0xe3, 0x36, 0x59, 0x24, 0x1e, 0x78, 0x40, 0xda, 0x57, 0x78, 0x02, 0x21, 0x84, 0x40, 0x82, 0x2f, + 0x00, 0x3c, 0xf3, 0x86, 0x90, 0x78, 0x41, 0xfb, 0x15, 0x78, 0xe0, 0x23, 0x20, 0xf1, 0x82, 0xee, + 0x9d, 0x3b, 0xe3, 0x3b, 0xfe, 0x57, 0xa7, 0xac, 0x10, 0x12, 0x4f, 0x9e, 0x7b, 0xfe, 0xfc, 0xee, + 0x99, 0x73, 0xee, 0x39, 0xf7, 0xdc, 0x3b, 0x86, 0x3b, 0x5d, 0xdb, 0xee, 0x9a, 0x64, 0x57, 0x37, + 0xed, 0x7e, 0x7b, 0xf7, 0xb9, 0xe1, 0x19, 0xb6, 0xb5, 0xfb, 0xfc, 0xee, 0xae, 0xd1, 0xd3, 0xba, + 0x44, 0xd5, 0x2c, 0xcb, 0xf6, 0x35, 0xdf, 0x76, 0x77, 0x1c, 0xd7, 0xf6, 0x6d, 0xb4, 0x1e, 0x48, + 0xef, 0x30, 0xe9, 0x9d, 0x40, 0x7a, 0xe7, 0xf9, 0xdd, 0x8d, 0xeb, 0x1c, 0x45, 0x73, 0x8c, 0x5d, + 0xae, 0x63, 0xd8, 0x96, 0x17, 0x68, 0x6d, 0xbc, 0x31, 0x61, 0x8e, 0x2e, 0xb1, 0x7b, 0xc4, 0x77, + 0x2f, 0xb8, 0xd8, 0x5b, 0x13, 0xc4, 0x1c, 0xd7, 0x6e, 0xf7, 0x75, 0x5f, 0xf5, 0x88, 0xe6, 0xea, + 0x4f, 0xb9, 0xf0, 0x24, 0xbb, 0x7d, 0x72, 0xee, 0xab, 0x03, 0x13, 0xb8, 0xf4, 0xed, 0x09, 0xd2, + 0x2f, 0xc8, 0x99, 0xda, 0x26, 0x3e, 0xd1, 0x05, 0xd9, 0xd7, 0xb9, 0xac, 0x69, 0x5b, 0x5d, 0xb7, + 0x6f, 0x59, 0x86, 0xd5, 0xdd, 0xb5, 0x1d, 0xe2, 0xc6, 0x5e, 0x69, 0x8b, 0x0b, 0xb1, 0xd1, 0x59, + 0xbf, 0xb3, 0xdb, 0x31, 0x88, 0xd9, 0x56, 0x7b, 0x9a, 0xf7, 0x29, 0x97, 0xb8, 0x39, 0x2c, 0xe1, + 0x1b, 0x3d, 0xe2, 0xf9, 0x5a, 0xcf, 0xe1, 0x02, 0x57, 0xb8, 0x80, 0xeb, 0xe8, 0xbb, 0x9e, 0xaf, + 0xf9, 0x7d, 0x6f, 0x88, 0xe1, 0x5f, 0x38, 0x64, 0x57, 0xb7, 0xcd, 0xd0, 0xfb, 0x1b, 0x45, 0x91, + 0x61, 0x6a, 0xbe, 0x69, 0x75, 0x03, 0x8e, 0xfc, 0xab, 0x14, 0x2c, 0x1c, 0x12, 0xcd, 0xef, 0xbb, + 0x04, 0xbd, 0x0b, 0x73, 0x54, 0xa0, 0x98, 0xd8, 0x4a, 0x6c, 0x2f, 0xed, 0x7d, 0x65, 0x67, 0x7c, + 0xc8, 0x76, 0xb8, 0xf8, 0x4e, 0xeb, 0xc2, 0x21, 0x98, 0x69, 0xa0, 0x9b, 0x90, 0xeb, 0x69, 0xe7, + 0xaa, 0x4b, 0xbc, 0xbe, 0xe9, 0x7b, 0xc5, 0xe4, 0x56, 0x62, 0x3b, 0x8d, 0xa1, 0xa7, 0x9d, 0xe3, + 0x80, 0x82, 0x56, 0x21, 0xdd, 0xb3, 0xdb, 0xc4, 0x2c, 0xa6, 0xb6, 0x12, 0xdb, 0x59, 0x1c, 0x0c, + 0xe4, 0xdf, 0x24, 0x61, 0x8e, 0xa2, 0xa0, 0x55, 0x90, 0x5a, 0x4f, 0x1a, 0x8a, 0x7a, 0x5a, 0x6b, + 0x36, 0x94, 0x72, 0xe5, 0xb0, 0xa2, 0x1c, 0x48, 0xaf, 0x21, 0x04, 0x4b, 0x87, 0xa5, 0xb2, 0xa2, + 0x1e, 0x28, 0x2d, 0xa5, 0xdc, 0xaa, 0xd4, 0x6b, 0x52, 0x02, 0xad, 0x03, 0xaa, 0x96, 0x6a, 0x07, + 0x27, 0x25, 0xfc, 0x50, 0xa0, 0x27, 0xa9, 0x6c, 0xb5, 0x7e, 0x54, 0x17, 0x68, 0x29, 0x54, 0x80, + 0xe5, 0x6a, 0x69, 0x5f, 0xa9, 0x0a, 0xc4, 0x39, 0x2a, 0xd8, 0x52, 0x3e, 0x6a, 0x09, 0xb4, 0x34, + 0xba, 0x06, 0x57, 0x0e, 0xea, 0xe5, 0xd3, 0x13, 0xa5, 0xd6, 0x52, 0x87, 0x98, 0x39, 0x74, 0x15, + 0xd6, 0x9a, 0xa5, 0x43, 0x45, 0x6d, 0x2a, 0x25, 0x5c, 0x3e, 0x16, 0x58, 0xf3, 0xd4, 0xec, 0xca, + 0x49, 0xe9, 0x48, 0x51, 0x1b, 0xb8, 0xde, 0x50, 0x70, 0xab, 0xa2, 0x34, 0xa5, 0x05, 0xb4, 0x04, + 0x50, 0xc6, 0xf5, 0x86, 0x7a, 0x5c, 0xa9, 0xb5, 0x9a, 0x52, 0x16, 0xad, 0xc0, 0xe2, 0x63, 0x65, + 0x5f, 0x50, 0x04, 0x6a, 0x44, 0x03, 0xd7, 0x0f, 0x4e, 0xcb, 0x2d, 0x0e, 0x2b, 0xe5, 0xd1, 0x15, + 0x28, 0xd4, 0xf7, 0x3f, 0x50, 0xca, 0x2d, 0xb5, 0x5a, 0x2f, 0x97, 0xaa, 0x95, 0x8f, 0x4b, 0x4c, + 0xb8, 0x20, 0xd7, 0x20, 0x57, 0xa1, 0x39, 0xd5, 0xb4, 0xfb, 0xae, 0x4e, 0x90, 0x0c, 0x8b, 0x5d, + 0xdd, 0x53, 0x83, 0x34, 0xeb, 0xbb, 0x06, 0x0b, 0x57, 0x16, 0xe7, 0xba, 0xba, 0xc7, 0xc4, 0x4e, + 0x5d, 0x03, 0x5d, 0x83, 0xec, 0x80, 0x9f, 0x64, 0xfc, 0x8c, 0xc1, 0x99, 0xf2, 0xf7, 0x20, 0xcd, + 0x04, 0x51, 0x11, 0x16, 0x74, 0xdb, 0xf2, 0x89, 0xe5, 0x33, 0x8c, 0x3c, 0x0e, 0x87, 0xe8, 0x3e, + 0xcc, 0x7b, 0x6c, 0x36, 0xa6, 0x9c, 0xdb, 0x7b, 0x7d, 0xd2, 0x5a, 0x10, 0x0c, 0xc3, 0x5c, 0x45, + 0xfe, 0xdb, 0x32, 0x2c, 0x1d, 0x6a, 0x3a, 0x29, 0x45, 0xb9, 0x84, 0x2a, 0xb0, 0x78, 0x66, 0xf7, + 0xad, 0xb6, 0x61, 0x75, 0x55, 0xc7, 0x36, 0x2f, 0xd8, 0x7c, 0xb9, 0xc9, 0x4b, 0x6c, 0x9f, 0x0b, + 0x37, 0x6c, 0xf3, 0x02, 0xe7, 0xcf, 0x84, 0x11, 0xaa, 0x81, 0xd4, 0x69, 0xab, 0x71, 0xb4, 0xe4, + 0x25, 0xd0, 0x96, 0x3a, 0x6d, 0x71, 0x8c, 0x4e, 0x20, 0x6b, 0x6a, 0x56, 0xbb, 0xa7, 0xb9, 0x9f, + 0x7a, 0xc5, 0xd4, 0x56, 0x6a, 0x3b, 0xb7, 0xb7, 0x3b, 0x71, 0xe5, 0xc7, 0xde, 0x6a, 0xa7, 0xca, + 0xf5, 0xf0, 0x00, 0x01, 0x6d, 0x02, 0xb8, 0xb6, 0x69, 0xaa, 0x9a, 0xd5, 0x35, 0x49, 0x71, 0x6e, + 0x2b, 0xb1, 0x9d, 0xc4, 0x59, 0x4a, 0x29, 0x51, 0x02, 0x0d, 0x8c, 0xa3, 0x59, 0x9c, 0x9b, 0x66, + 0xdc, 0x8c, 0xa3, 0x59, 0x01, 0x73, 0x13, 0xc0, 0x37, 0x4c, 0x9f, 0x73, 0xe7, 0x03, 0x5d, 0x4a, + 0x09, 0xd8, 0x77, 0x61, 0x35, 0xaa, 0x38, 0xaa, 0x6e, 0x5b, 0x1d, 0xa3, 0x4d, 0x2c, 0x9d, 0x14, + 0x17, 0x98, 0x60, 0x21, 0xe2, 0x95, 0x23, 0x16, 0xfa, 0x06, 0xac, 0x87, 0xa6, 0x51, 0x67, 0x09, + 0x4a, 0x19, 0xa6, 0xb4, 0x26, 0x70, 0x05, 0xb5, 0x0a, 0x2c, 0x3d, 0xb3, 0x2f, 0x54, 0xd3, 0xf8, + 0x94, 0x98, 0xc6, 0x53, 0xdb, 0x6e, 0x17, 0xb3, 0xac, 0x24, 0xc8, 0x93, 0x1c, 0x53, 0x8d, 0x24, + 0xf1, 0xe2, 0x33, 0xfb, 0x62, 0x30, 0x44, 0x75, 0x58, 0xf1, 0x6c, 0xd7, 0xb5, 0x5f, 0x88, 0x68, + 0x30, 0x33, 0x9a, 0x14, 0x28, 0x0b, 0x80, 0x27, 0x20, 0x69, 0x56, 0x97, 0xb8, 0x22, 0x5e, 0x6e, + 0x66, 0xbc, 0x65, 0xa6, 0x2b, 0xc0, 0x35, 0xa1, 0xe0, 0xf5, 0x5d, 0xc7, 0x35, 0x3c, 0x22, 0x22, + 0xe6, 0x67, 0x46, 0x44, 0xa1, 0xba, 0x00, 0xfa, 0x09, 0x14, 0xfb, 0x56, 0x9b, 0xb8, 0x2a, 0x39, + 0x77, 0x6c, 0x8f, 0xb4, 0x45, 0xe4, 0xc5, 0x99, 0x91, 0xd7, 0x19, 0x86, 0x12, 0x40, 0x08, 0xe8, + 0x1f, 0x02, 0x3a, 0x33, 0xfb, 0xae, 0x1b, 0xc7, 0x5d, 0x9a, 0x19, 0x77, 0x85, 0x6b, 0xc7, 0xbd, + 0xf0, 0x94, 0x68, 0xed, 0x17, 0x44, 0x8b, 0xf9, 0x75, 0x79, 0x76, 0x2f, 0x84, 0xea, 0x03, 0xda, + 0xc6, 0x5f, 0x16, 0x20, 0x13, 0xa6, 0x08, 0x3a, 0xe6, 0x7b, 0x4b, 0x8a, 0x41, 0xbe, 0x73, 0xc9, + 0x0c, 0x13, 0xf7, 0x9a, 0x07, 0x90, 0x71, 0x6c, 0xcf, 0xa0, 0x7c, 0x96, 0x5f, 0xb9, 0xbd, 0xad, + 0x49, 0x68, 0x0d, 0x2e, 0x87, 0x23, 0x0d, 0xf9, 0xf7, 0xf3, 0x83, 0x2d, 0xe7, 0xb4, 0xf6, 0xb0, + 0x56, 0x7f, 0x5c, 0x53, 0xc3, 0x0d, 0x45, 0x7a, 0x0d, 0xe5, 0x21, 0x53, 0x55, 0x0e, 0x5b, 0xaa, + 0xf2, 0x44, 0x91, 0x12, 0x68, 0x11, 0xb2, 0xb8, 0x72, 0x74, 0x1c, 0x0c, 0x93, 0xa8, 0x08, 0xab, + 0x8c, 0x59, 0x3f, 0x54, 0x43, 0xa1, 0x7d, 0x5c, 0x7f, 0x2c, 0xa5, 0xe8, 0x1e, 0x11, 0x08, 0x0e, + 0xb3, 0xe6, 0x28, 0x2b, 0x54, 0x8a, 0xb0, 0x18, 0x2b, 0x8d, 0x36, 0x60, 0x3d, 0xd2, 0x8a, 0xf3, + 0xe6, 0xa9, 0xda, 0x49, 0xe5, 0xa0, 0x51, 0xaf, 0xd4, 0x5a, 0xea, 0xbe, 0xd2, 0x7a, 0xac, 0x28, + 0x35, 0xca, 0xa5, 0xfb, 0x4b, 0x1e, 0x32, 0xb5, 0x7a, 0x53, 0x51, 0x5b, 0x95, 0x86, 0x94, 0xa1, + 0x36, 0x9e, 0x36, 0x1a, 0x0a, 0x56, 0xab, 0x95, 0x86, 0x94, 0xa5, 0xc3, 0x6a, 0xfd, 0x31, 0x1f, + 0x02, 0xdd, 0x8b, 0x4e, 0xea, 0xa7, 0xad, 0x63, 0x66, 0x95, 0x94, 0x43, 0xcb, 0x90, 0x0b, 0xc6, + 0x6c, 0x3e, 0x29, 0x8f, 0x24, 0xc8, 0x07, 0x84, 0xb2, 0x52, 0x6b, 0x29, 0x58, 0x5a, 0x44, 0x6b, + 0xb0, 0xc2, 0xe0, 0xf7, 0xeb, 0xad, 0x56, 0xfd, 0x84, 0x0b, 0x2e, 0x51, 0x7f, 0x89, 0x64, 0x86, + 0xb7, 0x4c, 0xb7, 0x63, 0x91, 0xca, 0x41, 0xa4, 0xe8, 0xad, 0x95, 0x27, 0x8a, 0xda, 0xaa, 0x37, + 0xd4, 0xfd, 0xfa, 0x69, 0xed, 0xa0, 0x84, 0x9f, 0x48, 0x2b, 0x31, 0x56, 0xf0, 0xd6, 0xe5, 0x3a, + 0xae, 0x29, 0x58, 0x42, 0xe8, 0x3a, 0x14, 0x23, 0x16, 0x47, 0x8c, 0x14, 0x0b, 0x91, 0xfb, 0x29, + 0x97, 0x3d, 0x70, 0xbd, 0xd5, 0x81, 0x23, 0x47, 0xa6, 0x5b, 0x8b, 0xf3, 0x62, 0xf3, 0xad, 0xa3, + 0x4d, 0xb8, 0x3a, 0xe0, 0x0d, 0x4f, 0x78, 0x65, 0x10, 0xd5, 0xe1, 0x19, 0x8b, 0xe8, 0x26, 0x5c, + 0x13, 0xe3, 0xac, 0x06, 0x21, 0x08, 0x23, 0x26, 0x5d, 0x45, 0x5b, 0x70, 0x3d, 0x16, 0xd2, 0x61, + 0x89, 0x0d, 0xea, 0xd0, 0x00, 0xa2, 0x84, 0xd5, 0x16, 0x2e, 0x1d, 0xd1, 0xcd, 0xfe, 0x1a, 0xf5, + 0x3e, 0xd7, 0x13, 0xc8, 0xd7, 0x59, 0x7b, 0x13, 0xbe, 0x7b, 0xe3, 0xb4, 0x51, 0xa9, 0x4a, 0x9b, + 0xb4, 0xbd, 0x19, 0x98, 0x17, 0x10, 0x6f, 0x50, 0xfd, 0xc3, 0x3a, 0x56, 0x8e, 0x95, 0xd2, 0x81, + 0x7a, 0xc4, 0xba, 0x9f, 0x6a, 0x49, 0xba, 0x49, 0x7b, 0x90, 0xf2, 0x71, 0xa5, 0xa6, 0x1e, 0xd5, + 0x4a, 0xad, 0x63, 0x0a, 0xb9, 0x45, 0xe7, 0x67, 0x24, 0x86, 0x7b, 0x54, 0xaf, 0x51, 0xea, 0x2d, + 0xaa, 0xcf, 0xa8, 0x01, 0x32, 0x27, 0xcb, 0xf2, 0x03, 0xc8, 0x57, 0x6d, 0x9d, 0x25, 0x65, 0xc5, + 0xea, 0xd8, 0xe8, 0x0e, 0x2c, 0x98, 0x9a, 0xaf, 0x9a, 0x56, 0x97, 0x6f, 0xe5, 0x85, 0x30, 0x07, + 0x69, 0x8e, 0xee, 0x54, 0x35, 0xbf, 0x6a, 0x75, 0xf1, 0xbc, 0xc9, 0x7e, 0xe5, 0xc7, 0x90, 0x69, + 0xb8, 0xb4, 0x13, 0xf6, 0x2f, 0x10, 0x82, 0x39, 0x4b, 0xeb, 0x11, 0xde, 0xb5, 0xb0, 0x67, 0xda, + 0x1d, 0x3e, 0xd7, 0xcc, 0x3e, 0xe1, 0xad, 0x4a, 0x30, 0x40, 0xb7, 0x20, 0xdf, 0x37, 0x2c, 0xff, + 0xde, 0x3b, 0x6a, 0xc0, 0xa4, 0xa5, 0x63, 0x0e, 0xe7, 0x02, 0xda, 0x23, 0x4a, 0x92, 0x3f, 0x4f, + 0x81, 0xa4, 0x58, 0xbe, 0xe1, 0x5f, 0x08, 0xcd, 0x86, 0x04, 0xa9, 0x9e, 0xd1, 0xe6, 0x13, 0xd0, + 0x47, 0xb4, 0x0e, 0xf3, 0xa6, 0xad, 0x6b, 0x66, 0x38, 0x01, 0x1f, 0xa1, 0x2d, 0xc8, 0xb5, 0x89, + 0xa7, 0xbb, 0x86, 0xc3, 0xaa, 0x49, 0xd0, 0x9b, 0x8a, 0x24, 0x6a, 0x99, 0xa7, 0xdb, 0x6e, 0xb8, + 0x93, 0x07, 0x03, 0x24, 0x03, 0x08, 0x5b, 0x29, 0xdb, 0xc6, 0xf7, 0x93, 0xc5, 0x04, 0x16, 0xa8, + 0xe8, 0x06, 0x80, 0x6f, 0x3b, 0x86, 0xae, 0x99, 0x86, 0x7f, 0xc1, 0x37, 0x73, 0x81, 0x32, 0xda, + 0x12, 0x2d, 0xbc, 0x72, 0x4b, 0xb4, 0x0f, 0x59, 0x93, 0x07, 0xc7, 0x2b, 0x66, 0x58, 0x0b, 0x33, + 0x11, 0x46, 0x8c, 0x22, 0x1e, 0xa8, 0xa1, 0xf7, 0x01, 0x9c, 0x20, 0x44, 0x06, 0xf1, 0x8a, 0x59, + 0x06, 0x32, 0xb9, 0xae, 0xf2, 0x60, 0x62, 0x41, 0x47, 0xfe, 0x73, 0x02, 0xae, 0x52, 0x74, 0xd3, + 0xf8, 0x8c, 0xb4, 0xeb, 0x67, 0xcf, 0x88, 0xee, 0x4f, 0x0d, 0xca, 0xeb, 0xb0, 0x68, 0x6a, 0x56, + 0xb7, 0x4f, 0xdb, 0x54, 0xdd, 0x6e, 0x87, 0xb1, 0xc9, 0x87, 0xc4, 0xb2, 0xdd, 0x26, 0xd1, 0x6a, + 0x49, 0xc5, 0x57, 0xcb, 0x98, 0x98, 0x8c, 0xf8, 0x33, 0xfd, 0xaa, 0xfe, 0x94, 0xff, 0x94, 0x84, + 0xd5, 0xa6, 0xd6, 0x21, 0x4d, 0x76, 0x6c, 0x14, 0x5e, 0xe2, 0x5d, 0x48, 0x6b, 0xed, 0xbe, 0xe9, + 0xf3, 0x13, 0xd2, 0x2c, 0x1b, 0x63, 0xa0, 0x40, 0x35, 0x3d, 0xc7, 0xb6, 0x3b, 0xec, 0x25, 0x67, + 0xd4, 0x64, 0x0a, 0xe8, 0x01, 0x2c, 0xf4, 0x48, 0x9b, 0xae, 0x1a, 0xbe, 0x77, 0xce, 0xa2, 0x1b, + 0xaa, 0xa0, 0x6f, 0x43, 0xe6, 0xb9, 0x61, 0x9b, 0x6c, 0x9d, 0xce, 0xcd, 0xac, 0x1e, 0xe9, 0xa0, + 0x7b, 0x30, 0xe7, 0x6a, 0xfa, 0xc5, 0x25, 0xfa, 0x3f, 0x26, 0x2f, 0xbf, 0x80, 0x1c, 0xad, 0x01, + 0xb6, 0xd5, 0xc5, 0x44, 0xf7, 0xd1, 0xdb, 0x90, 0xeb, 0x19, 0x96, 0x3a, 0x43, 0xc9, 0xc8, 0xf6, + 0x0c, 0x2b, 0x78, 0x64, 0x4a, 0xda, 0x79, 0xa4, 0x94, 0x9c, 0xa6, 0xa4, 0x9d, 0x07, 0x8f, 0xb2, + 0x0b, 0xd9, 0x32, 0x3d, 0xf8, 0xb2, 0x2a, 0xb5, 0x0d, 0x69, 0x76, 0x0a, 0xe6, 0x13, 0xa2, 0x98, + 0x2e, 0x13, 0xc3, 0x81, 0xc0, 0x60, 0x4d, 0x25, 0xc5, 0x35, 0xf5, 0x06, 0x2c, 0x39, 0xc6, 0x39, + 0x31, 0xd5, 0x8e, 0xab, 0xe9, 0x51, 0x89, 0x48, 0xe2, 0x45, 0x46, 0x3d, 0xe4, 0x44, 0xf9, 0x14, + 0x8a, 0x07, 0x76, 0xcf, 0xb0, 0x34, 0xcb, 0x67, 0xa0, 0x9e, 0xb0, 0x64, 0xbe, 0x05, 0xf3, 0x6c, + 0x06, 0xaf, 0x98, 0x60, 0x39, 0x75, 0x6b, 0x92, 0x0b, 0x23, 0xab, 0x31, 0x57, 0x90, 0x4d, 0x58, + 0x66, 0xc7, 0xab, 0x46, 0x94, 0x63, 0xe8, 0x09, 0x2c, 0xb7, 0xf9, 0x4c, 0x6a, 0x04, 0x4b, 0x5f, + 0xed, 0xeb, 0x93, 0x60, 0x27, 0x19, 0x86, 0x97, 0xda, 0x31, 0x8e, 0xfc, 0xdb, 0x04, 0x64, 0xca, + 0xae, 0xed, 0x1c, 0x1b, 0x96, 0xff, 0x65, 0x9e, 0xd7, 0x6e, 0xc4, 0x6a, 0x65, 0xe0, 0x5e, 0xb1, + 0x4e, 0xee, 0x42, 0xc1, 0xe8, 0x39, 0xb6, 0xeb, 0x6b, 0x96, 0x4e, 0x86, 0x1d, 0x8d, 0x06, 0xac, + 0xc8, 0xdb, 0x8f, 0xa0, 0x10, 0xda, 0x29, 0x3a, 0xfa, 0x3b, 0x00, 0xba, 0x6b, 0x3b, 0xea, 0x53, + 0x4a, 0xe7, 0xce, 0x9e, 0x58, 0xc0, 0x42, 0x00, 0x9c, 0xd5, 0x43, 0x28, 0xf9, 0x1e, 0x2c, 0x47, + 0xb8, 0x0d, 0xcd, 0xd5, 0x7a, 0x1e, 0x2d, 0x51, 0x9a, 0xe7, 0x10, 0xdd, 0x57, 0xd9, 0x1d, 0x4e, + 0x00, 0x9b, 0xc4, 0xf9, 0x80, 0x88, 0x19, 0x4d, 0x3e, 0x00, 0xf4, 0x98, 0x9c, 0x1d, 0x84, 0xa7, + 0x2f, 0xae, 0xba, 0x03, 0x05, 0xc3, 0xd2, 0xcd, 0x7e, 0x9b, 0xa8, 0x5d, 0x62, 0xc7, 0x6e, 0x46, + 0x32, 0x78, 0x85, 0xb3, 0x8e, 0x88, 0xcd, 0x2f, 0x48, 0xe4, 0x9f, 0xa4, 0x20, 0xcf, 0xa2, 0x5d, + 0xa6, 0x47, 0xf0, 0x73, 0x1f, 0x1d, 0xd1, 0xf2, 0xe8, 0xab, 0xa6, 0x6d, 0x75, 0x55, 0x97, 0xe8, + 0x3e, 0x0f, 0xc1, 0xc4, 0x93, 0xb8, 0x90, 0x6e, 0x38, 0x67, 0x0a, 0xb9, 0xf7, 0x06, 0x2c, 0x45, + 0x75, 0x36, 0x70, 0x4e, 0x72, 0x2b, 0xb5, 0x9d, 0xc5, 0x51, 0xf5, 0x65, 0x6f, 0x8c, 0x9a, 0xb0, + 0x32, 0xf0, 0x9f, 0xea, 0xb0, 0xb7, 0xe0, 0xfd, 0xf5, 0x9b, 0x2f, 0x73, 0x23, 0xf7, 0x17, 0x5e, + 0xd6, 0x87, 0x1c, 0xa8, 0xc2, 0x5a, 0xfc, 0x0e, 0x2e, 0x04, 0x0e, 0x8a, 0xf3, 0x5b, 0x53, 0x36, + 0x18, 0xaa, 0x14, 0x14, 0x60, 0x0e, 0x5e, 0x70, 0x46, 0x89, 0xe8, 0x13, 0x58, 0x8d, 0xdd, 0xc4, + 0x85, 0xf8, 0xf3, 0x0c, 0xff, 0xf6, 0x24, 0xfc, 0xd1, 0x80, 0x61, 0xf4, 0x62, 0x84, 0x26, 0x7f, + 0x91, 0x80, 0x55, 0xbe, 0xc4, 0x08, 0x0b, 0x0e, 0x26, 0xdf, 0xef, 0x13, 0x8f, 0xd6, 0xb3, 0x34, + 0xbb, 0x4e, 0xe1, 0x41, 0xd9, 0x9c, 0x7a, 0x3d, 0x82, 0x03, 0x59, 0x74, 0x1f, 0x32, 0x9d, 0xe0, + 0xea, 0x2c, 0x08, 0x41, 0x6e, 0xef, 0xe6, 0x4b, 0xae, 0xd8, 0x70, 0xa4, 0x40, 0x33, 0x32, 0xb8, + 0xd1, 0xd1, 0x83, 0xf5, 0xc1, 0x12, 0x64, 0x4a, 0x46, 0x8a, 0x6b, 0x09, 0xe7, 0x0d, 0x61, 0x24, + 0x3f, 0x84, 0x75, 0xc6, 0x1d, 0x24, 0x4f, 0xb8, 0xe6, 0x24, 0x48, 0x0d, 0x2e, 0x94, 0xe8, 0x23, + 0xba, 0x09, 0x39, 0x87, 0xce, 0x6a, 0xf5, 0x7b, 0x67, 0xc4, 0x0d, 0x2f, 0xf6, 0x28, 0xa9, 0xc6, + 0x28, 0xf2, 0x3f, 0xb2, 0xb0, 0x36, 0xe4, 0x22, 0xcf, 0xb1, 0x2d, 0x8f, 0xa0, 0x0f, 0x41, 0xea, + 0x68, 0x3a, 0x11, 0xae, 0x54, 0xc3, 0xb4, 0xfc, 0xea, 0x6c, 0xa7, 0x3f, 0xbc, 0xdc, 0x89, 0x8d, + 0x3d, 0xf4, 0x5d, 0x58, 0x0d, 0x2f, 0x2c, 0x62, 0xb0, 0x81, 0x37, 0xb7, 0x27, 0xc1, 0x0e, 0x77, + 0x88, 0xb8, 0x10, 0xa2, 0x88, 0xe0, 0x4d, 0x90, 0x4c, 0xbb, 0x6b, 0xc7, 0x80, 0x53, 0x97, 0x04, + 0x5e, 0xa6, 0x08, 0x22, 0xe8, 0x29, 0xac, 0x98, 0xda, 0x19, 0x31, 0x63, 0xa8, 0x73, 0x97, 0x44, + 0x95, 0x18, 0x84, 0x08, 0xeb, 0xc1, 0x75, 0x33, 0x6c, 0xb5, 0x54, 0x9b, 0xf5, 0x5a, 0xb1, 0x19, + 0xd6, 0xd9, 0x0c, 0x77, 0xa7, 0x35, 0x81, 0x63, 0xdb, 0x34, 0xbc, 0x61, 0x4e, 0x62, 0x31, 0x07, + 0x0d, 0xdd, 0x91, 0xd3, 0x3c, 0xbe, 0xa4, 0x83, 0x28, 0x82, 0x08, 0xfa, 0x11, 0xac, 0x76, 0xfa, + 0xa6, 0xa9, 0x0e, 0x21, 0xb3, 0x0b, 0x98, 0x29, 0x2b, 0xa5, 0x15, 0x83, 0xc1, 0x88, 0x62, 0xc4, + 0x69, 0xe8, 0x0c, 0xd6, 0x3d, 0xad, 0x43, 0xc2, 0xc2, 0x23, 0x60, 0x07, 0xc5, 0xe1, 0xce, 0x24, + 0xec, 0x71, 0xad, 0x1f, 0x5e, 0xf5, 0xc6, 0x35, 0x84, 0x5d, 0xb8, 0x16, 0x64, 0xe5, 0xa0, 0x0f, + 0x16, 0x27, 0xca, 0x4c, 0x2f, 0x9f, 0x43, 0xbb, 0x3b, 0xbe, 0x6a, 0xc4, 0x09, 0xc2, 0x44, 0x2a, + 0xac, 0x09, 0xd5, 0x59, 0x98, 0x22, 0x37, 0xbd, 0x90, 0x8e, 0xd9, 0x29, 0x71, 0x41, 0x1f, 0xb3, + 0x7d, 0x56, 0x60, 0x31, 0x56, 0x48, 0xd9, 0x3d, 0xd5, 0x94, 0xfa, 0x22, 0x56, 0x50, 0x9c, 0x17, + 0x6b, 0x27, 0x75, 0xfc, 0x50, 0xd1, 0x0f, 0x77, 0xbf, 0xa5, 0xe9, 0x8e, 0x8f, 0x55, 0x7d, 0xbe, + 0x31, 0xe2, 0x55, 0x67, 0x0c, 0x95, 0x76, 0x76, 0xc4, 0x75, 0x6d, 0x97, 0x35, 0xa6, 0x42, 0x67, + 0xe7, 0x3a, 0xfa, 0x4e, 0x93, 0x7d, 0x12, 0xc1, 0x81, 0x00, 0x3a, 0xe6, 0x97, 0xdc, 0xe7, 0x7e, + 0x71, 0x8d, 0xc9, 0xee, 0x4c, 0x0d, 0xc7, 0x48, 0x51, 0xc4, 0xa1, 0xba, 0xfc, 0x57, 0x61, 0x37, + 0x38, 0x34, 0xcc, 0x41, 0xa5, 0x3b, 0x84, 0xbc, 0x61, 0x39, 0x7d, 0x3f, 0xb8, 0x5f, 0xed, 0xbe, + 0x6c, 0xa7, 0xae, 0x50, 0x59, 0x76, 0xdb, 0xda, 0xc5, 0x39, 0x63, 0x30, 0x40, 0x0f, 0x21, 0xeb, + 0x72, 0xcc, 0xb0, 0xa6, 0x7d, 0x6d, 0x12, 0xc8, 0xd8, 0x9a, 0x8b, 0x07, 0xfa, 0xb4, 0x72, 0xfb, + 0xb6, 0xaf, 0x99, 0x2a, 0x2d, 0xd6, 0x1e, 0xdb, 0x2e, 0xd2, 0xf4, 0x00, 0xea, 0x6b, 0x66, 0x83, + 0x52, 0xe4, 0x0e, 0x6c, 0xec, 0x6b, 0x7e, 0xb4, 0x9c, 0x03, 0x24, 0x2f, 0xdc, 0xe1, 0x8e, 0x21, + 0xe3, 0x06, 0x8f, 0x61, 0xd5, 0xbe, 0x33, 0xa3, 0x29, 0x4c, 0x09, 0x47, 0xda, 0xf2, 0x33, 0xb8, + 0x36, 0x76, 0x1e, 0xee, 0xbc, 0xd8, 0x4b, 0x27, 0xfe, 0xb3, 0x97, 0x96, 0xff, 0x95, 0x80, 0x42, + 0x3c, 0x44, 0xc1, 0xdb, 0x7c, 0x59, 0x11, 0xfa, 0x1f, 0xd9, 0xc2, 0xe9, 0x71, 0x25, 0x08, 0x2b, + 0xdd, 0x4a, 0xd2, 0x38, 0x18, 0xc8, 0x6d, 0xb8, 0x1a, 0xf3, 0x34, 0xf5, 0x40, 0x14, 0xd0, 0xa3, + 0x91, 0x80, 0xbe, 0xf5, 0x32, 0x37, 0x0b, 0x1e, 0x14, 0xe2, 0xf9, 0x74, 0x68, 0xdd, 0xf0, 0x59, + 0x78, 0x38, 0x3f, 0x18, 0x0d, 0xe7, 0x9d, 0xd9, 0xe6, 0x19, 0x8d, 0xe6, 0x1f, 0x92, 0x50, 0x2c, + 0x79, 0x17, 0x96, 0xfe, 0xff, 0x10, 0xd2, 0x0a, 0x2c, 0xda, 0x7d, 0x5f, 0x78, 0xa1, 0xb9, 0xe9, + 0x50, 0x75, 0x26, 0xcc, 0xdf, 0x28, 0x6f, 0x0b, 0x23, 0xb9, 0x03, 0x57, 0xc7, 0xb8, 0x8d, 0x07, + 0x68, 0x64, 0x9e, 0xc4, 0x2b, 0xcf, 0xf3, 0xc7, 0x04, 0xdc, 0x64, 0x13, 0xfd, 0x37, 0xea, 0xc8, + 0xa8, 0xe1, 0xc9, 0x57, 0x36, 0xbc, 0x07, 0x5b, 0x93, 0xed, 0xfe, 0xf2, 0xfd, 0x64, 0xc1, 0x8d, + 0xd1, 0xe9, 0x62, 0xc9, 0x59, 0x1d, 0xf1, 0xd2, 0xc4, 0x03, 0xfd, 0xa4, 0x84, 0x10, 0x32, 0xd4, + 0x1d, 0x17, 0x96, 0x78, 0x9a, 0xd6, 0x47, 0xd3, 0xf4, 0xee, 0x25, 0x66, 0x1c, 0xcd, 0xd5, 0x1f, + 0x27, 0x20, 0x27, 0xe4, 0x18, 0x7a, 0x1f, 0xa0, 0xab, 0x7b, 0x2a, 0xff, 0x8a, 0x1c, 0xf8, 0x6e, + 0xe2, 0xdd, 0xc7, 0x91, 0xee, 0xf1, 0x6f, 0xc8, 0xd9, 0x6e, 0xf8, 0x28, 0x7e, 0x9d, 0x4e, 0xc5, + 0xbf, 0x4e, 0x5f, 0x83, 0x6c, 0xcf, 0xe8, 0x11, 0x95, 0x7d, 0x50, 0xe2, 0x5f, 0xb7, 0x29, 0xa1, + 0x75, 0xe1, 0x10, 0xf9, 0x87, 0x90, 0x17, 0x43, 0x81, 0xea, 0xb0, 0x4c, 0x0d, 0x69, 0x13, 0xcf, + 0x37, 0xac, 0xa0, 0x67, 0x4a, 0x4c, 0xef, 0x2d, 0x8f, 0x74, 0xef, 0x60, 0x20, 0x8d, 0x97, 0xba, + 0xb1, 0x31, 0xda, 0x04, 0x38, 0xa3, 0x8e, 0x55, 0x3d, 0xe3, 0x33, 0xc2, 0x4f, 0x44, 0x59, 0x46, + 0x69, 0x1a, 0x9f, 0x11, 0x79, 0x13, 0xb2, 0xd1, 0xeb, 0x8c, 0x1e, 0xa8, 0x64, 0x19, 0x96, 0xe2, + 0xf8, 0x63, 0x64, 0x7e, 0x97, 0x84, 0x95, 0x7a, 0xf8, 0xbf, 0x91, 0x13, 0xe2, 0x6b, 0x6d, 0xcd, + 0xd7, 0x90, 0x02, 0x69, 0x8f, 0x46, 0x80, 0x5f, 0x3e, 0x4e, 0xfc, 0x48, 0x3d, 0xa2, 0xc9, 0x1a, + 0x22, 0x82, 0x03, 0x6d, 0x74, 0x1f, 0x72, 0xba, 0x4b, 0x34, 0x9f, 0xa8, 0xbe, 0xd1, 0x23, 0xfc, + 0x20, 0xbe, 0x11, 0x82, 0x85, 0xff, 0x39, 0xd9, 0x69, 0x85, 0xff, 0x39, 0xc1, 0x10, 0x88, 0x53, + 0x02, 0x55, 0xee, 0x3b, 0xed, 0x48, 0x79, 0xfe, 0xe5, 0xca, 0x81, 0x38, 0x25, 0xc8, 0x1f, 0x42, + 0x9a, 0x59, 0x82, 0xd6, 0x60, 0xa5, 0xd9, 0x2a, 0xb5, 0x86, 0xff, 0xee, 0x91, 0x83, 0x85, 0x32, + 0x56, 0x4a, 0x2d, 0xe5, 0x40, 0x4a, 0xd0, 0x01, 0x3e, 0xad, 0xd5, 0x2a, 0xb5, 0x23, 0x29, 0x89, + 0x32, 0x30, 0x77, 0x50, 0xaf, 0x29, 0x52, 0x0a, 0x2d, 0x42, 0xb6, 0x5c, 0xaa, 0x95, 0x95, 0x6a, + 0x55, 0x39, 0x90, 0xe6, 0x6e, 0x13, 0x00, 0xe1, 0x2b, 0x66, 0x0e, 0x16, 0xf8, 0x27, 0x3d, 0xe9, + 0x35, 0xb4, 0x02, 0x8b, 0x8f, 0x14, 0xfc, 0x44, 0x3d, 0xad, 0x55, 0x2b, 0x0f, 0x95, 0xea, 0x13, + 0x29, 0x81, 0xf2, 0x90, 0x89, 0x46, 0x49, 0x3a, 0x6a, 0xd4, 0x9b, 0xcd, 0xca, 0x7e, 0x95, 0x02, + 0x03, 0xcc, 0x73, 0xce, 0x1c, 0x5a, 0x86, 0x1c, 0x53, 0xe5, 0x84, 0xf4, 0xde, 0xe7, 0x69, 0x58, + 0x12, 0xbb, 0x43, 0xdb, 0x45, 0xbf, 0x4c, 0x40, 0x61, 0x4c, 0xf9, 0x40, 0x7b, 0x13, 0xaf, 0xc8, + 0x26, 0xd6, 0xc8, 0x8d, 0xb7, 0x2f, 0xa5, 0x13, 0x64, 0x9c, 0x7c, 0xe3, 0x47, 0x5f, 0xfc, 0xfd, + 0xa7, 0xc9, 0xa2, 0x5c, 0x88, 0xfe, 0x73, 0xe5, 0xbd, 0xc7, 0xcf, 0x05, 0xe4, 0xbd, 0xc4, 0x6d, + 0xf4, 0xf3, 0x04, 0xa0, 0xd1, 0x02, 0x80, 0xee, 0xce, 0x34, 0x97, 0x58, 0x9c, 0x36, 0xf6, 0x2e, + 0xa3, 0xc2, 0xad, 0xdb, 0x64, 0xd6, 0x5d, 0x91, 0x11, 0xb5, 0xae, 0x43, 0x59, 0x31, 0xe3, 0x7e, + 0x91, 0xe0, 0x3b, 0xfb, 0x38, 0x17, 0x7e, 0x73, 0x6a, 0x21, 0x9a, 0xe2, 0xc7, 0xe8, 0x1a, 0x46, + 0xf8, 0x03, 0xd6, 0x20, 0x1d, 0xe4, 0x6d, 0x66, 0x93, 0x2c, 0x6f, 0x8a, 0x1e, 0x1b, 0x81, 0xa4, + 0xe6, 0xfd, 0x2c, 0x01, 0x57, 0x26, 0x54, 0x50, 0x74, 0x6f, 0x76, 0xeb, 0x62, 0x5e, 0x7c, 0x89, + 0x71, 0x6f, 0x32, 0xe3, 0x6e, 0xc9, 0xd7, 0x05, 0x87, 0x8d, 0xb3, 0x6d, 0xff, 0x07, 0xb0, 0xa1, + 0xdb, 0xbd, 0x09, 0x46, 0xec, 0x17, 0xe2, 0xcb, 0xb4, 0x41, 0x33, 0xb2, 0x91, 0xf8, 0xf8, 0x01, + 0x17, 0xef, 0xda, 0xa6, 0x66, 0x75, 0x77, 0x6c, 0xb7, 0xbb, 0xdb, 0x25, 0x16, 0xcb, 0xd7, 0xdd, + 0x80, 0xa5, 0x39, 0x86, 0x37, 0xfc, 0x27, 0xb7, 0xfb, 0xc1, 0xd3, 0x3f, 0x13, 0x89, 0x5f, 0x27, + 0xe7, 0x8e, 0xca, 0x8f, 0x6a, 0x67, 0xf3, 0x4c, 0xe5, 0xed, 0x7f, 0x07, 0x00, 0x00, 0xff, 0xff, + 0x88, 0xec, 0xbf, 0xf1, 0xfc, 0x27, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/cloud/vision/v1/product_search.pb.go b/vendor/google.golang.org/genproto/googleapis/cloud/vision/v1/product_search.pb.go new file mode 100644 index 0000000..d8fadc4 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/cloud/vision/v1/product_search.pb.go @@ -0,0 +1,321 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/cloud/vision/v1/product_search.proto + +package vision // import "google.golang.org/genproto/googleapis/cloud/vision/v1" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import timestamp "github.com/golang/protobuf/ptypes/timestamp" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Parameters for a product search request. +type ProductSearchParams struct { + // The bounding polygon around the area of interest in the image. + // Optional. If it is not specified, system discretion will be applied. + BoundingPoly *BoundingPoly `protobuf:"bytes,9,opt,name=bounding_poly,json=boundingPoly,proto3" json:"bounding_poly,omitempty"` + // The resource name of a [ProductSet][google.cloud.vision.v1.ProductSet] to + // be searched for similar images. + // + // Format is: + // `projects/PROJECT_ID/locations/LOC_ID/productSets/PRODUCT_SET_ID`. + ProductSet string `protobuf:"bytes,6,opt,name=product_set,json=productSet,proto3" json:"product_set,omitempty"` + // The list of product categories to search in. Currently, we only consider + // the first category, and either "homegoods", "apparel", or "toys" should be + // specified. + ProductCategories []string `protobuf:"bytes,7,rep,name=product_categories,json=productCategories,proto3" json:"product_categories,omitempty"` + // The filtering expression. This can be used to restrict search results based + // on Product labels. We currently support an AND of OR of key-value + // expressions, where each expression within an OR must have the same key. An + // '=' should be used to connect the key and value. + // + // For example, "(color = red OR color = blue) AND brand = Google" is + // acceptable, but "(color = red OR brand = Google)" is not acceptable. + // "color: red" is not acceptable because it uses a ':' instead of an '='. + Filter string `protobuf:"bytes,8,opt,name=filter,proto3" json:"filter,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ProductSearchParams) Reset() { *m = ProductSearchParams{} } +func (m *ProductSearchParams) String() string { return proto.CompactTextString(m) } +func (*ProductSearchParams) ProtoMessage() {} +func (*ProductSearchParams) Descriptor() ([]byte, []int) { + return fileDescriptor_product_search_541754fa22b35e11, []int{0} +} +func (m *ProductSearchParams) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ProductSearchParams.Unmarshal(m, b) +} +func (m *ProductSearchParams) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ProductSearchParams.Marshal(b, m, deterministic) +} +func (dst *ProductSearchParams) XXX_Merge(src proto.Message) { + xxx_messageInfo_ProductSearchParams.Merge(dst, src) +} +func (m *ProductSearchParams) XXX_Size() int { + return xxx_messageInfo_ProductSearchParams.Size(m) +} +func (m *ProductSearchParams) XXX_DiscardUnknown() { + xxx_messageInfo_ProductSearchParams.DiscardUnknown(m) +} + +var xxx_messageInfo_ProductSearchParams proto.InternalMessageInfo + +func (m *ProductSearchParams) GetBoundingPoly() *BoundingPoly { + if m != nil { + return m.BoundingPoly + } + return nil +} + +func (m *ProductSearchParams) GetProductSet() string { + if m != nil { + return m.ProductSet + } + return "" +} + +func (m *ProductSearchParams) GetProductCategories() []string { + if m != nil { + return m.ProductCategories + } + return nil +} + +func (m *ProductSearchParams) GetFilter() string { + if m != nil { + return m.Filter + } + return "" +} + +// Results for a product search request. +type ProductSearchResults struct { + // Timestamp of the index which provided these results. Products added to the + // product set and products removed from the product set after this time are + // not reflected in the current results. + IndexTime *timestamp.Timestamp `protobuf:"bytes,2,opt,name=index_time,json=indexTime,proto3" json:"index_time,omitempty"` + // List of results, one for each product match. + Results []*ProductSearchResults_Result `protobuf:"bytes,5,rep,name=results,proto3" json:"results,omitempty"` + // List of results grouped by products detected in the query image. Each entry + // corresponds to one bounding polygon in the query image, and contains the + // matching products specific to that region. There may be duplicate product + // matches in the union of all the per-product results. + ProductGroupedResults []*ProductSearchResults_GroupedResult `protobuf:"bytes,6,rep,name=product_grouped_results,json=productGroupedResults,proto3" json:"product_grouped_results,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ProductSearchResults) Reset() { *m = ProductSearchResults{} } +func (m *ProductSearchResults) String() string { return proto.CompactTextString(m) } +func (*ProductSearchResults) ProtoMessage() {} +func (*ProductSearchResults) Descriptor() ([]byte, []int) { + return fileDescriptor_product_search_541754fa22b35e11, []int{1} +} +func (m *ProductSearchResults) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ProductSearchResults.Unmarshal(m, b) +} +func (m *ProductSearchResults) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ProductSearchResults.Marshal(b, m, deterministic) +} +func (dst *ProductSearchResults) XXX_Merge(src proto.Message) { + xxx_messageInfo_ProductSearchResults.Merge(dst, src) +} +func (m *ProductSearchResults) XXX_Size() int { + return xxx_messageInfo_ProductSearchResults.Size(m) +} +func (m *ProductSearchResults) XXX_DiscardUnknown() { + xxx_messageInfo_ProductSearchResults.DiscardUnknown(m) +} + +var xxx_messageInfo_ProductSearchResults proto.InternalMessageInfo + +func (m *ProductSearchResults) GetIndexTime() *timestamp.Timestamp { + if m != nil { + return m.IndexTime + } + return nil +} + +func (m *ProductSearchResults) GetResults() []*ProductSearchResults_Result { + if m != nil { + return m.Results + } + return nil +} + +func (m *ProductSearchResults) GetProductGroupedResults() []*ProductSearchResults_GroupedResult { + if m != nil { + return m.ProductGroupedResults + } + return nil +} + +// Information about a product. +type ProductSearchResults_Result struct { + // The Product. + Product *Product `protobuf:"bytes,1,opt,name=product,proto3" json:"product,omitempty"` + // A confidence level on the match, ranging from 0 (no confidence) to + // 1 (full confidence). + Score float32 `protobuf:"fixed32,2,opt,name=score,proto3" json:"score,omitempty"` + // The resource name of the image from the product that is the closest match + // to the query. + Image string `protobuf:"bytes,3,opt,name=image,proto3" json:"image,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ProductSearchResults_Result) Reset() { *m = ProductSearchResults_Result{} } +func (m *ProductSearchResults_Result) String() string { return proto.CompactTextString(m) } +func (*ProductSearchResults_Result) ProtoMessage() {} +func (*ProductSearchResults_Result) Descriptor() ([]byte, []int) { + return fileDescriptor_product_search_541754fa22b35e11, []int{1, 0} +} +func (m *ProductSearchResults_Result) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ProductSearchResults_Result.Unmarshal(m, b) +} +func (m *ProductSearchResults_Result) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ProductSearchResults_Result.Marshal(b, m, deterministic) +} +func (dst *ProductSearchResults_Result) XXX_Merge(src proto.Message) { + xxx_messageInfo_ProductSearchResults_Result.Merge(dst, src) +} +func (m *ProductSearchResults_Result) XXX_Size() int { + return xxx_messageInfo_ProductSearchResults_Result.Size(m) +} +func (m *ProductSearchResults_Result) XXX_DiscardUnknown() { + xxx_messageInfo_ProductSearchResults_Result.DiscardUnknown(m) +} + +var xxx_messageInfo_ProductSearchResults_Result proto.InternalMessageInfo + +func (m *ProductSearchResults_Result) GetProduct() *Product { + if m != nil { + return m.Product + } + return nil +} + +func (m *ProductSearchResults_Result) GetScore() float32 { + if m != nil { + return m.Score + } + return 0 +} + +func (m *ProductSearchResults_Result) GetImage() string { + if m != nil { + return m.Image + } + return "" +} + +// Information about the products similar to a single product in a query +// image. +type ProductSearchResults_GroupedResult struct { + // The bounding polygon around the product detected in the query image. + BoundingPoly *BoundingPoly `protobuf:"bytes,1,opt,name=bounding_poly,json=boundingPoly,proto3" json:"bounding_poly,omitempty"` + // List of results, one for each product match. + Results []*ProductSearchResults_Result `protobuf:"bytes,2,rep,name=results,proto3" json:"results,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ProductSearchResults_GroupedResult) Reset() { *m = ProductSearchResults_GroupedResult{} } +func (m *ProductSearchResults_GroupedResult) String() string { return proto.CompactTextString(m) } +func (*ProductSearchResults_GroupedResult) ProtoMessage() {} +func (*ProductSearchResults_GroupedResult) Descriptor() ([]byte, []int) { + return fileDescriptor_product_search_541754fa22b35e11, []int{1, 1} +} +func (m *ProductSearchResults_GroupedResult) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ProductSearchResults_GroupedResult.Unmarshal(m, b) +} +func (m *ProductSearchResults_GroupedResult) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ProductSearchResults_GroupedResult.Marshal(b, m, deterministic) +} +func (dst *ProductSearchResults_GroupedResult) XXX_Merge(src proto.Message) { + xxx_messageInfo_ProductSearchResults_GroupedResult.Merge(dst, src) +} +func (m *ProductSearchResults_GroupedResult) XXX_Size() int { + return xxx_messageInfo_ProductSearchResults_GroupedResult.Size(m) +} +func (m *ProductSearchResults_GroupedResult) XXX_DiscardUnknown() { + xxx_messageInfo_ProductSearchResults_GroupedResult.DiscardUnknown(m) +} + +var xxx_messageInfo_ProductSearchResults_GroupedResult proto.InternalMessageInfo + +func (m *ProductSearchResults_GroupedResult) GetBoundingPoly() *BoundingPoly { + if m != nil { + return m.BoundingPoly + } + return nil +} + +func (m *ProductSearchResults_GroupedResult) GetResults() []*ProductSearchResults_Result { + if m != nil { + return m.Results + } + return nil +} + +func init() { + proto.RegisterType((*ProductSearchParams)(nil), "google.cloud.vision.v1.ProductSearchParams") + proto.RegisterType((*ProductSearchResults)(nil), "google.cloud.vision.v1.ProductSearchResults") + proto.RegisterType((*ProductSearchResults_Result)(nil), "google.cloud.vision.v1.ProductSearchResults.Result") + proto.RegisterType((*ProductSearchResults_GroupedResult)(nil), "google.cloud.vision.v1.ProductSearchResults.GroupedResult") +} + +func init() { + proto.RegisterFile("google/cloud/vision/v1/product_search.proto", fileDescriptor_product_search_541754fa22b35e11) +} + +var fileDescriptor_product_search_541754fa22b35e11 = []byte{ + // 485 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xa4, 0x93, 0xcf, 0x6f, 0xd3, 0x30, + 0x14, 0xc7, 0x95, 0x76, 0x4b, 0xa9, 0xcb, 0x0e, 0x98, 0x31, 0xa2, 0x08, 0xa9, 0xd5, 0x04, 0x52, + 0x25, 0x84, 0xa3, 0xad, 0xa7, 0x01, 0xa7, 0xee, 0x30, 0x71, 0x00, 0x55, 0x01, 0x71, 0xe0, 0x12, + 0xb9, 0x89, 0x67, 0x2c, 0x25, 0x7e, 0x91, 0xed, 0x54, 0x94, 0x3f, 0x87, 0x1b, 0x47, 0xfe, 0x0b, + 0xfe, 0x24, 0x8e, 0xa8, 0xfe, 0x01, 0x2b, 0xac, 0xe2, 0xc7, 0x4e, 0xc9, 0xb3, 0xbf, 0xef, 0xf3, + 0xfc, 0x7d, 0x7e, 0x46, 0x8f, 0x39, 0x00, 0xaf, 0x59, 0x56, 0xd6, 0xd0, 0x55, 0xd9, 0x4a, 0x68, + 0x01, 0x32, 0x5b, 0x9d, 0x64, 0xad, 0x82, 0xaa, 0x2b, 0x4d, 0xa1, 0x19, 0x55, 0xe5, 0x7b, 0xd2, + 0x2a, 0x30, 0x80, 0x8f, 0x9c, 0x98, 0x58, 0x31, 0x71, 0x62, 0xb2, 0x3a, 0x49, 0x1f, 0x78, 0x08, + 0x6d, 0x45, 0x46, 0xa5, 0x04, 0x43, 0x8d, 0x00, 0xa9, 0x5d, 0x56, 0xfa, 0x68, 0x47, 0x09, 0xce, + 0xa0, 0x61, 0x46, 0xad, 0xbd, 0x6c, 0xf6, 0x57, 0x27, 0x29, 0x34, 0x53, 0x2b, 0x51, 0x32, 0x9f, + 0x34, 0xf6, 0x49, 0x36, 0x5a, 0x76, 0x97, 0x99, 0x11, 0x0d, 0xd3, 0x86, 0x36, 0xad, 0x13, 0x1c, + 0x7f, 0x8d, 0xd0, 0xdd, 0x85, 0x23, 0xbc, 0xb6, 0x80, 0x05, 0x55, 0xb4, 0xd1, 0xf8, 0x05, 0x3a, + 0x58, 0x42, 0x27, 0x2b, 0x21, 0x79, 0xd1, 0x42, 0xbd, 0x4e, 0x86, 0x93, 0x68, 0x3a, 0x3a, 0x7d, + 0x48, 0xae, 0xb7, 0x48, 0xe6, 0x5e, 0xbc, 0x80, 0x7a, 0x9d, 0xdf, 0x5e, 0x5e, 0x89, 0xf0, 0x18, + 0x8d, 0x7e, 0x9e, 0xd1, 0x24, 0xf1, 0x24, 0x9a, 0x0e, 0x73, 0xd4, 0x86, 0xa2, 0x06, 0x3f, 0x41, + 0x38, 0x08, 0x4a, 0x6a, 0x18, 0x07, 0x25, 0x98, 0x4e, 0x06, 0x93, 0xfe, 0x74, 0x98, 0xdf, 0xf1, + 0x3b, 0xe7, 0x3f, 0x36, 0xf0, 0x11, 0x8a, 0x2f, 0x45, 0x6d, 0x98, 0x4a, 0x6e, 0x59, 0x94, 0x8f, + 0x8e, 0xbf, 0xec, 0xa1, 0xc3, 0x2d, 0x2b, 0x39, 0xd3, 0x5d, 0x6d, 0x34, 0x3e, 0x43, 0x48, 0xc8, + 0x8a, 0x7d, 0x28, 0x36, 0xe6, 0x93, 0x9e, 0x35, 0x92, 0x06, 0x23, 0xa1, 0x33, 0xe4, 0x4d, 0xe8, + 0x4c, 0x3e, 0xb4, 0xea, 0x4d, 0x8c, 0x5f, 0xa2, 0x81, 0x72, 0x94, 0x64, 0x7f, 0xd2, 0x9f, 0x8e, + 0x4e, 0x67, 0xbb, 0x1a, 0x70, 0x5d, 0x65, 0xe2, 0xbe, 0x79, 0x60, 0x60, 0x85, 0xee, 0x07, 0xa7, + 0x5c, 0x41, 0xd7, 0xb2, 0xaa, 0x08, 0xf8, 0xd8, 0xe2, 0x9f, 0xfe, 0x13, 0xfe, 0xc2, 0x31, 0x7c, + 0x95, 0x7b, 0x1e, 0xbd, 0xb5, 0xaa, 0x53, 0x40, 0xb1, 0xfb, 0xc5, 0x67, 0x68, 0xe0, 0x25, 0x49, + 0x64, 0x9b, 0x30, 0xfe, 0x43, 0xb5, 0x3c, 0xe8, 0xf1, 0x21, 0xda, 0xd7, 0x25, 0x28, 0xd7, 0xbd, + 0x5e, 0xee, 0x82, 0xcd, 0xaa, 0x68, 0x28, 0x67, 0x49, 0xdf, 0x5e, 0x84, 0x0b, 0xd2, 0xcf, 0x11, + 0x3a, 0xd8, 0x3a, 0xc3, 0xef, 0xc3, 0x14, 0xfd, 0xf7, 0x30, 0x5d, 0xb9, 0x90, 0xde, 0xcd, 0x2f, + 0x64, 0xfe, 0x11, 0xa5, 0x25, 0x34, 0x3b, 0x10, 0x73, 0xbc, 0xfd, 0x32, 0x36, 0x93, 0xb2, 0x88, + 0xde, 0x3d, 0xf7, 0x6a, 0x0e, 0x35, 0x95, 0x9c, 0x80, 0xe2, 0x19, 0x67, 0xd2, 0xce, 0x51, 0xe6, + 0xb6, 0x68, 0x2b, 0xf4, 0xaf, 0xef, 0xf4, 0x99, 0xfb, 0xfb, 0x16, 0x45, 0x9f, 0x7a, 0x7b, 0x17, + 0xe7, 0x6f, 0x5f, 0x2d, 0x63, 0x9b, 0x32, 0xfb, 0x1e, 0x00, 0x00, 0xff, 0xff, 0x93, 0xfe, 0xaa, + 0xbb, 0x63, 0x04, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/cloud/vision/v1/product_search_service.pb.go b/vendor/google.golang.org/genproto/googleapis/cloud/vision/v1/product_search_service.pb.go new file mode 100644 index 0000000..2927b73 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/cloud/vision/v1/product_search_service.pb.go @@ -0,0 +1,2944 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/cloud/vision/v1/product_search_service.proto + +package vision // import "google.golang.org/genproto/googleapis/cloud/vision/v1" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import empty "github.com/golang/protobuf/ptypes/empty" +import timestamp "github.com/golang/protobuf/ptypes/timestamp" +import _ "google.golang.org/genproto/googleapis/api/annotations" +import longrunning "google.golang.org/genproto/googleapis/longrunning" +import status "google.golang.org/genproto/googleapis/rpc/status" +import field_mask "google.golang.org/genproto/protobuf/field_mask" + +import ( + context "golang.org/x/net/context" + grpc "google.golang.org/grpc" +) + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Enumerates the possible states that the batch request can be in. +type BatchOperationMetadata_State int32 + +const ( + // Invalid. + BatchOperationMetadata_STATE_UNSPECIFIED BatchOperationMetadata_State = 0 + // Request is actively being processed. + BatchOperationMetadata_PROCESSING BatchOperationMetadata_State = 1 + // The request is done and at least one item has been successfully + // processed. + BatchOperationMetadata_SUCCESSFUL BatchOperationMetadata_State = 2 + // The request is done and no item has been successfully processed. + BatchOperationMetadata_FAILED BatchOperationMetadata_State = 3 + // The request is done after the longrunning.Operations.CancelOperation has + // been called by the user. Any records that were processed before the + // cancel command are output as specified in the request. + BatchOperationMetadata_CANCELLED BatchOperationMetadata_State = 4 +) + +var BatchOperationMetadata_State_name = map[int32]string{ + 0: "STATE_UNSPECIFIED", + 1: "PROCESSING", + 2: "SUCCESSFUL", + 3: "FAILED", + 4: "CANCELLED", +} +var BatchOperationMetadata_State_value = map[string]int32{ + "STATE_UNSPECIFIED": 0, + "PROCESSING": 1, + "SUCCESSFUL": 2, + "FAILED": 3, + "CANCELLED": 4, +} + +func (x BatchOperationMetadata_State) String() string { + return proto.EnumName(BatchOperationMetadata_State_name, int32(x)) +} +func (BatchOperationMetadata_State) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_product_search_service_914a807106064587, []int{28, 0} +} + +// A Product contains ReferenceImages. +type Product struct { + // The resource name of the product. + // + // Format is: + // `projects/PROJECT_ID/locations/LOC_ID/products/PRODUCT_ID`. + // + // This field is ignored when creating a product. + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // The user-provided name for this Product. Must not be empty. Must be at most + // 4096 characters long. + DisplayName string `protobuf:"bytes,2,opt,name=display_name,json=displayName,proto3" json:"display_name,omitempty"` + // User-provided metadata to be stored with this product. Must be at most 4096 + // characters long. + Description string `protobuf:"bytes,3,opt,name=description,proto3" json:"description,omitempty"` + // The category for the product identified by the reference image. This should + // be either "homegoods", "apparel", or "toys". + // + // This field is immutable. + ProductCategory string `protobuf:"bytes,4,opt,name=product_category,json=productCategory,proto3" json:"product_category,omitempty"` + // Key-value pairs that can be attached to a product. At query time, + // constraints can be specified based on the product_labels. + // + // Note that integer values can be provided as strings, e.g. "1199". Only + // strings with integer values can match a range-based restriction which is + // to be supported soon. + // + // Multiple values can be assigned to the same key. One product may have up to + // 100 product_labels. + ProductLabels []*Product_KeyValue `protobuf:"bytes,5,rep,name=product_labels,json=productLabels,proto3" json:"product_labels,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Product) Reset() { *m = Product{} } +func (m *Product) String() string { return proto.CompactTextString(m) } +func (*Product) ProtoMessage() {} +func (*Product) Descriptor() ([]byte, []int) { + return fileDescriptor_product_search_service_914a807106064587, []int{0} +} +func (m *Product) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Product.Unmarshal(m, b) +} +func (m *Product) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Product.Marshal(b, m, deterministic) +} +func (dst *Product) XXX_Merge(src proto.Message) { + xxx_messageInfo_Product.Merge(dst, src) +} +func (m *Product) XXX_Size() int { + return xxx_messageInfo_Product.Size(m) +} +func (m *Product) XXX_DiscardUnknown() { + xxx_messageInfo_Product.DiscardUnknown(m) +} + +var xxx_messageInfo_Product proto.InternalMessageInfo + +func (m *Product) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *Product) GetDisplayName() string { + if m != nil { + return m.DisplayName + } + return "" +} + +func (m *Product) GetDescription() string { + if m != nil { + return m.Description + } + return "" +} + +func (m *Product) GetProductCategory() string { + if m != nil { + return m.ProductCategory + } + return "" +} + +func (m *Product) GetProductLabels() []*Product_KeyValue { + if m != nil { + return m.ProductLabels + } + return nil +} + +// A product label represented as a key-value pair. +type Product_KeyValue struct { + // The key of the label attached to the product. Cannot be empty and cannot + // exceed 128 bytes. + Key string `protobuf:"bytes,1,opt,name=key,proto3" json:"key,omitempty"` + // The value of the label attached to the product. Cannot be empty and + // cannot exceed 128 bytes. + Value string `protobuf:"bytes,2,opt,name=value,proto3" json:"value,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Product_KeyValue) Reset() { *m = Product_KeyValue{} } +func (m *Product_KeyValue) String() string { return proto.CompactTextString(m) } +func (*Product_KeyValue) ProtoMessage() {} +func (*Product_KeyValue) Descriptor() ([]byte, []int) { + return fileDescriptor_product_search_service_914a807106064587, []int{0, 0} +} +func (m *Product_KeyValue) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Product_KeyValue.Unmarshal(m, b) +} +func (m *Product_KeyValue) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Product_KeyValue.Marshal(b, m, deterministic) +} +func (dst *Product_KeyValue) XXX_Merge(src proto.Message) { + xxx_messageInfo_Product_KeyValue.Merge(dst, src) +} +func (m *Product_KeyValue) XXX_Size() int { + return xxx_messageInfo_Product_KeyValue.Size(m) +} +func (m *Product_KeyValue) XXX_DiscardUnknown() { + xxx_messageInfo_Product_KeyValue.DiscardUnknown(m) +} + +var xxx_messageInfo_Product_KeyValue proto.InternalMessageInfo + +func (m *Product_KeyValue) GetKey() string { + if m != nil { + return m.Key + } + return "" +} + +func (m *Product_KeyValue) GetValue() string { + if m != nil { + return m.Value + } + return "" +} + +// A ProductSet contains Products. A ProductSet can contain a maximum of 1 +// million reference images. If the limit is exceeded, periodic indexing will +// fail. +type ProductSet struct { + // The resource name of the ProductSet. + // + // Format is: + // `projects/PROJECT_ID/locations/LOC_ID/productSets/PRODUCT_SET_ID`. + // + // This field is ignored when creating a ProductSet. + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // The user-provided name for this ProductSet. Must not be empty. Must be at + // most 4096 characters long. + DisplayName string `protobuf:"bytes,2,opt,name=display_name,json=displayName,proto3" json:"display_name,omitempty"` + // Output only. The time at which this ProductSet was last indexed. Query + // results will reflect all updates before this time. If this ProductSet has + // never been indexed, this timestamp is the default value + // "1970-01-01T00:00:00Z". + // + // This field is ignored when creating a ProductSet. + IndexTime *timestamp.Timestamp `protobuf:"bytes,3,opt,name=index_time,json=indexTime,proto3" json:"index_time,omitempty"` + // Output only. If there was an error with indexing the product set, the field + // is populated. + // + // This field is ignored when creating a ProductSet. + IndexError *status.Status `protobuf:"bytes,4,opt,name=index_error,json=indexError,proto3" json:"index_error,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ProductSet) Reset() { *m = ProductSet{} } +func (m *ProductSet) String() string { return proto.CompactTextString(m) } +func (*ProductSet) ProtoMessage() {} +func (*ProductSet) Descriptor() ([]byte, []int) { + return fileDescriptor_product_search_service_914a807106064587, []int{1} +} +func (m *ProductSet) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ProductSet.Unmarshal(m, b) +} +func (m *ProductSet) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ProductSet.Marshal(b, m, deterministic) +} +func (dst *ProductSet) XXX_Merge(src proto.Message) { + xxx_messageInfo_ProductSet.Merge(dst, src) +} +func (m *ProductSet) XXX_Size() int { + return xxx_messageInfo_ProductSet.Size(m) +} +func (m *ProductSet) XXX_DiscardUnknown() { + xxx_messageInfo_ProductSet.DiscardUnknown(m) +} + +var xxx_messageInfo_ProductSet proto.InternalMessageInfo + +func (m *ProductSet) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *ProductSet) GetDisplayName() string { + if m != nil { + return m.DisplayName + } + return "" +} + +func (m *ProductSet) GetIndexTime() *timestamp.Timestamp { + if m != nil { + return m.IndexTime + } + return nil +} + +func (m *ProductSet) GetIndexError() *status.Status { + if m != nil { + return m.IndexError + } + return nil +} + +// A `ReferenceImage` represents a product image and its associated metadata, +// such as bounding boxes. +type ReferenceImage struct { + // The resource name of the reference image. + // + // Format is: + // + // `projects/PROJECT_ID/locations/LOC_ID/products/PRODUCT_ID/referenceImages/IMAGE_ID`. + // + // This field is ignored when creating a reference image. + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // The Google Cloud Storage URI of the reference image. + // + // The URI must start with `gs://`. + // + // Required. + Uri string `protobuf:"bytes,2,opt,name=uri,proto3" json:"uri,omitempty"` + // Bounding polygons around the areas of interest in the reference image. + // Optional. If this field is empty, the system will try to detect regions of + // interest. At most 10 bounding polygons will be used. + // + // The provided shape is converted into a non-rotated rectangle. Once + // converted, the small edge of the rectangle must be greater than or equal + // to 300 pixels. The aspect ratio must be 1:4 or less (i.e. 1:3 is ok; 1:5 + // is not). + BoundingPolys []*BoundingPoly `protobuf:"bytes,3,rep,name=bounding_polys,json=boundingPolys,proto3" json:"bounding_polys,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ReferenceImage) Reset() { *m = ReferenceImage{} } +func (m *ReferenceImage) String() string { return proto.CompactTextString(m) } +func (*ReferenceImage) ProtoMessage() {} +func (*ReferenceImage) Descriptor() ([]byte, []int) { + return fileDescriptor_product_search_service_914a807106064587, []int{2} +} +func (m *ReferenceImage) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ReferenceImage.Unmarshal(m, b) +} +func (m *ReferenceImage) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ReferenceImage.Marshal(b, m, deterministic) +} +func (dst *ReferenceImage) XXX_Merge(src proto.Message) { + xxx_messageInfo_ReferenceImage.Merge(dst, src) +} +func (m *ReferenceImage) XXX_Size() int { + return xxx_messageInfo_ReferenceImage.Size(m) +} +func (m *ReferenceImage) XXX_DiscardUnknown() { + xxx_messageInfo_ReferenceImage.DiscardUnknown(m) +} + +var xxx_messageInfo_ReferenceImage proto.InternalMessageInfo + +func (m *ReferenceImage) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *ReferenceImage) GetUri() string { + if m != nil { + return m.Uri + } + return "" +} + +func (m *ReferenceImage) GetBoundingPolys() []*BoundingPoly { + if m != nil { + return m.BoundingPolys + } + return nil +} + +// Request message for the `CreateProduct` method. +type CreateProductRequest struct { + // The project in which the Product should be created. + // + // Format is + // `projects/PROJECT_ID/locations/LOC_ID`. + Parent string `protobuf:"bytes,1,opt,name=parent,proto3" json:"parent,omitempty"` + // The product to create. + Product *Product `protobuf:"bytes,2,opt,name=product,proto3" json:"product,omitempty"` + // A user-supplied resource id for this Product. If set, the server will + // attempt to use this value as the resource id. If it is already in use, an + // error is returned with code ALREADY_EXISTS. Must be at most 128 characters + // long. It cannot contain the character `/`. + ProductId string `protobuf:"bytes,3,opt,name=product_id,json=productId,proto3" json:"product_id,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *CreateProductRequest) Reset() { *m = CreateProductRequest{} } +func (m *CreateProductRequest) String() string { return proto.CompactTextString(m) } +func (*CreateProductRequest) ProtoMessage() {} +func (*CreateProductRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_product_search_service_914a807106064587, []int{3} +} +func (m *CreateProductRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_CreateProductRequest.Unmarshal(m, b) +} +func (m *CreateProductRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_CreateProductRequest.Marshal(b, m, deterministic) +} +func (dst *CreateProductRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_CreateProductRequest.Merge(dst, src) +} +func (m *CreateProductRequest) XXX_Size() int { + return xxx_messageInfo_CreateProductRequest.Size(m) +} +func (m *CreateProductRequest) XXX_DiscardUnknown() { + xxx_messageInfo_CreateProductRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_CreateProductRequest proto.InternalMessageInfo + +func (m *CreateProductRequest) GetParent() string { + if m != nil { + return m.Parent + } + return "" +} + +func (m *CreateProductRequest) GetProduct() *Product { + if m != nil { + return m.Product + } + return nil +} + +func (m *CreateProductRequest) GetProductId() string { + if m != nil { + return m.ProductId + } + return "" +} + +// Request message for the `ListProducts` method. +type ListProductsRequest struct { + // The project OR ProductSet from which Products should be listed. + // + // Format: + // `projects/PROJECT_ID/locations/LOC_ID` + Parent string `protobuf:"bytes,1,opt,name=parent,proto3" json:"parent,omitempty"` + // The maximum number of items to return. Default 10, maximum 100. + PageSize int32 `protobuf:"varint,2,opt,name=page_size,json=pageSize,proto3" json:"page_size,omitempty"` + // The next_page_token returned from a previous List request, if any. + PageToken string `protobuf:"bytes,3,opt,name=page_token,json=pageToken,proto3" json:"page_token,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ListProductsRequest) Reset() { *m = ListProductsRequest{} } +func (m *ListProductsRequest) String() string { return proto.CompactTextString(m) } +func (*ListProductsRequest) ProtoMessage() {} +func (*ListProductsRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_product_search_service_914a807106064587, []int{4} +} +func (m *ListProductsRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ListProductsRequest.Unmarshal(m, b) +} +func (m *ListProductsRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ListProductsRequest.Marshal(b, m, deterministic) +} +func (dst *ListProductsRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_ListProductsRequest.Merge(dst, src) +} +func (m *ListProductsRequest) XXX_Size() int { + return xxx_messageInfo_ListProductsRequest.Size(m) +} +func (m *ListProductsRequest) XXX_DiscardUnknown() { + xxx_messageInfo_ListProductsRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_ListProductsRequest proto.InternalMessageInfo + +func (m *ListProductsRequest) GetParent() string { + if m != nil { + return m.Parent + } + return "" +} + +func (m *ListProductsRequest) GetPageSize() int32 { + if m != nil { + return m.PageSize + } + return 0 +} + +func (m *ListProductsRequest) GetPageToken() string { + if m != nil { + return m.PageToken + } + return "" +} + +// Response message for the `ListProducts` method. +type ListProductsResponse struct { + // List of products. + Products []*Product `protobuf:"bytes,1,rep,name=products,proto3" json:"products,omitempty"` + // Token to retrieve the next page of results, or empty if there are no more + // results in the list. + NextPageToken string `protobuf:"bytes,2,opt,name=next_page_token,json=nextPageToken,proto3" json:"next_page_token,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ListProductsResponse) Reset() { *m = ListProductsResponse{} } +func (m *ListProductsResponse) String() string { return proto.CompactTextString(m) } +func (*ListProductsResponse) ProtoMessage() {} +func (*ListProductsResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_product_search_service_914a807106064587, []int{5} +} +func (m *ListProductsResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ListProductsResponse.Unmarshal(m, b) +} +func (m *ListProductsResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ListProductsResponse.Marshal(b, m, deterministic) +} +func (dst *ListProductsResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_ListProductsResponse.Merge(dst, src) +} +func (m *ListProductsResponse) XXX_Size() int { + return xxx_messageInfo_ListProductsResponse.Size(m) +} +func (m *ListProductsResponse) XXX_DiscardUnknown() { + xxx_messageInfo_ListProductsResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_ListProductsResponse proto.InternalMessageInfo + +func (m *ListProductsResponse) GetProducts() []*Product { + if m != nil { + return m.Products + } + return nil +} + +func (m *ListProductsResponse) GetNextPageToken() string { + if m != nil { + return m.NextPageToken + } + return "" +} + +// Request message for the `GetProduct` method. +type GetProductRequest struct { + // Resource name of the Product to get. + // + // Format is: + // `projects/PROJECT_ID/locations/LOC_ID/products/PRODUCT_ID` + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GetProductRequest) Reset() { *m = GetProductRequest{} } +func (m *GetProductRequest) String() string { return proto.CompactTextString(m) } +func (*GetProductRequest) ProtoMessage() {} +func (*GetProductRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_product_search_service_914a807106064587, []int{6} +} +func (m *GetProductRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GetProductRequest.Unmarshal(m, b) +} +func (m *GetProductRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GetProductRequest.Marshal(b, m, deterministic) +} +func (dst *GetProductRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_GetProductRequest.Merge(dst, src) +} +func (m *GetProductRequest) XXX_Size() int { + return xxx_messageInfo_GetProductRequest.Size(m) +} +func (m *GetProductRequest) XXX_DiscardUnknown() { + xxx_messageInfo_GetProductRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_GetProductRequest proto.InternalMessageInfo + +func (m *GetProductRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +// Request message for the `UpdateProduct` method. +type UpdateProductRequest struct { + // The Product resource which replaces the one on the server. + // product.name is immutable. + Product *Product `protobuf:"bytes,1,opt,name=product,proto3" json:"product,omitempty"` + // The [FieldMask][google.protobuf.FieldMask] that specifies which fields + // to update. + // If update_mask isn't specified, all mutable fields are to be updated. + // Valid mask paths include `product_labels`, `display_name`, and + // `description`. + UpdateMask *field_mask.FieldMask `protobuf:"bytes,2,opt,name=update_mask,json=updateMask,proto3" json:"update_mask,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *UpdateProductRequest) Reset() { *m = UpdateProductRequest{} } +func (m *UpdateProductRequest) String() string { return proto.CompactTextString(m) } +func (*UpdateProductRequest) ProtoMessage() {} +func (*UpdateProductRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_product_search_service_914a807106064587, []int{7} +} +func (m *UpdateProductRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_UpdateProductRequest.Unmarshal(m, b) +} +func (m *UpdateProductRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_UpdateProductRequest.Marshal(b, m, deterministic) +} +func (dst *UpdateProductRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_UpdateProductRequest.Merge(dst, src) +} +func (m *UpdateProductRequest) XXX_Size() int { + return xxx_messageInfo_UpdateProductRequest.Size(m) +} +func (m *UpdateProductRequest) XXX_DiscardUnknown() { + xxx_messageInfo_UpdateProductRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_UpdateProductRequest proto.InternalMessageInfo + +func (m *UpdateProductRequest) GetProduct() *Product { + if m != nil { + return m.Product + } + return nil +} + +func (m *UpdateProductRequest) GetUpdateMask() *field_mask.FieldMask { + if m != nil { + return m.UpdateMask + } + return nil +} + +// Request message for the `DeleteProduct` method. +type DeleteProductRequest struct { + // Resource name of product to delete. + // + // Format is: + // `projects/PROJECT_ID/locations/LOC_ID/products/PRODUCT_ID` + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *DeleteProductRequest) Reset() { *m = DeleteProductRequest{} } +func (m *DeleteProductRequest) String() string { return proto.CompactTextString(m) } +func (*DeleteProductRequest) ProtoMessage() {} +func (*DeleteProductRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_product_search_service_914a807106064587, []int{8} +} +func (m *DeleteProductRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_DeleteProductRequest.Unmarshal(m, b) +} +func (m *DeleteProductRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_DeleteProductRequest.Marshal(b, m, deterministic) +} +func (dst *DeleteProductRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_DeleteProductRequest.Merge(dst, src) +} +func (m *DeleteProductRequest) XXX_Size() int { + return xxx_messageInfo_DeleteProductRequest.Size(m) +} +func (m *DeleteProductRequest) XXX_DiscardUnknown() { + xxx_messageInfo_DeleteProductRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_DeleteProductRequest proto.InternalMessageInfo + +func (m *DeleteProductRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +// Request message for the `CreateProductSet` method. +type CreateProductSetRequest struct { + // The project in which the ProductSet should be created. + // + // Format is `projects/PROJECT_ID/locations/LOC_ID`. + Parent string `protobuf:"bytes,1,opt,name=parent,proto3" json:"parent,omitempty"` + // The ProductSet to create. + ProductSet *ProductSet `protobuf:"bytes,2,opt,name=product_set,json=productSet,proto3" json:"product_set,omitempty"` + // A user-supplied resource id for this ProductSet. If set, the server will + // attempt to use this value as the resource id. If it is already in use, an + // error is returned with code ALREADY_EXISTS. Must be at most 128 characters + // long. It cannot contain the character `/`. + ProductSetId string `protobuf:"bytes,3,opt,name=product_set_id,json=productSetId,proto3" json:"product_set_id,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *CreateProductSetRequest) Reset() { *m = CreateProductSetRequest{} } +func (m *CreateProductSetRequest) String() string { return proto.CompactTextString(m) } +func (*CreateProductSetRequest) ProtoMessage() {} +func (*CreateProductSetRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_product_search_service_914a807106064587, []int{9} +} +func (m *CreateProductSetRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_CreateProductSetRequest.Unmarshal(m, b) +} +func (m *CreateProductSetRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_CreateProductSetRequest.Marshal(b, m, deterministic) +} +func (dst *CreateProductSetRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_CreateProductSetRequest.Merge(dst, src) +} +func (m *CreateProductSetRequest) XXX_Size() int { + return xxx_messageInfo_CreateProductSetRequest.Size(m) +} +func (m *CreateProductSetRequest) XXX_DiscardUnknown() { + xxx_messageInfo_CreateProductSetRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_CreateProductSetRequest proto.InternalMessageInfo + +func (m *CreateProductSetRequest) GetParent() string { + if m != nil { + return m.Parent + } + return "" +} + +func (m *CreateProductSetRequest) GetProductSet() *ProductSet { + if m != nil { + return m.ProductSet + } + return nil +} + +func (m *CreateProductSetRequest) GetProductSetId() string { + if m != nil { + return m.ProductSetId + } + return "" +} + +// Request message for the `ListProductSets` method. +type ListProductSetsRequest struct { + // The project from which ProductSets should be listed. + // + // Format is `projects/PROJECT_ID/locations/LOC_ID`. + Parent string `protobuf:"bytes,1,opt,name=parent,proto3" json:"parent,omitempty"` + // The maximum number of items to return. Default 10, maximum 100. + PageSize int32 `protobuf:"varint,2,opt,name=page_size,json=pageSize,proto3" json:"page_size,omitempty"` + // The next_page_token returned from a previous List request, if any. + PageToken string `protobuf:"bytes,3,opt,name=page_token,json=pageToken,proto3" json:"page_token,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ListProductSetsRequest) Reset() { *m = ListProductSetsRequest{} } +func (m *ListProductSetsRequest) String() string { return proto.CompactTextString(m) } +func (*ListProductSetsRequest) ProtoMessage() {} +func (*ListProductSetsRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_product_search_service_914a807106064587, []int{10} +} +func (m *ListProductSetsRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ListProductSetsRequest.Unmarshal(m, b) +} +func (m *ListProductSetsRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ListProductSetsRequest.Marshal(b, m, deterministic) +} +func (dst *ListProductSetsRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_ListProductSetsRequest.Merge(dst, src) +} +func (m *ListProductSetsRequest) XXX_Size() int { + return xxx_messageInfo_ListProductSetsRequest.Size(m) +} +func (m *ListProductSetsRequest) XXX_DiscardUnknown() { + xxx_messageInfo_ListProductSetsRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_ListProductSetsRequest proto.InternalMessageInfo + +func (m *ListProductSetsRequest) GetParent() string { + if m != nil { + return m.Parent + } + return "" +} + +func (m *ListProductSetsRequest) GetPageSize() int32 { + if m != nil { + return m.PageSize + } + return 0 +} + +func (m *ListProductSetsRequest) GetPageToken() string { + if m != nil { + return m.PageToken + } + return "" +} + +// Response message for the `ListProductSets` method. +type ListProductSetsResponse struct { + // List of ProductSets. + ProductSets []*ProductSet `protobuf:"bytes,1,rep,name=product_sets,json=productSets,proto3" json:"product_sets,omitempty"` + // Token to retrieve the next page of results, or empty if there are no more + // results in the list. + NextPageToken string `protobuf:"bytes,2,opt,name=next_page_token,json=nextPageToken,proto3" json:"next_page_token,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ListProductSetsResponse) Reset() { *m = ListProductSetsResponse{} } +func (m *ListProductSetsResponse) String() string { return proto.CompactTextString(m) } +func (*ListProductSetsResponse) ProtoMessage() {} +func (*ListProductSetsResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_product_search_service_914a807106064587, []int{11} +} +func (m *ListProductSetsResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ListProductSetsResponse.Unmarshal(m, b) +} +func (m *ListProductSetsResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ListProductSetsResponse.Marshal(b, m, deterministic) +} +func (dst *ListProductSetsResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_ListProductSetsResponse.Merge(dst, src) +} +func (m *ListProductSetsResponse) XXX_Size() int { + return xxx_messageInfo_ListProductSetsResponse.Size(m) +} +func (m *ListProductSetsResponse) XXX_DiscardUnknown() { + xxx_messageInfo_ListProductSetsResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_ListProductSetsResponse proto.InternalMessageInfo + +func (m *ListProductSetsResponse) GetProductSets() []*ProductSet { + if m != nil { + return m.ProductSets + } + return nil +} + +func (m *ListProductSetsResponse) GetNextPageToken() string { + if m != nil { + return m.NextPageToken + } + return "" +} + +// Request message for the `GetProductSet` method. +type GetProductSetRequest struct { + // Resource name of the ProductSet to get. + // + // Format is: + // `projects/PROJECT_ID/locations/LOG_ID/productSets/PRODUCT_SET_ID` + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GetProductSetRequest) Reset() { *m = GetProductSetRequest{} } +func (m *GetProductSetRequest) String() string { return proto.CompactTextString(m) } +func (*GetProductSetRequest) ProtoMessage() {} +func (*GetProductSetRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_product_search_service_914a807106064587, []int{12} +} +func (m *GetProductSetRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GetProductSetRequest.Unmarshal(m, b) +} +func (m *GetProductSetRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GetProductSetRequest.Marshal(b, m, deterministic) +} +func (dst *GetProductSetRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_GetProductSetRequest.Merge(dst, src) +} +func (m *GetProductSetRequest) XXX_Size() int { + return xxx_messageInfo_GetProductSetRequest.Size(m) +} +func (m *GetProductSetRequest) XXX_DiscardUnknown() { + xxx_messageInfo_GetProductSetRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_GetProductSetRequest proto.InternalMessageInfo + +func (m *GetProductSetRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +// Request message for the `UpdateProductSet` method. +type UpdateProductSetRequest struct { + // The ProductSet resource which replaces the one on the server. + ProductSet *ProductSet `protobuf:"bytes,1,opt,name=product_set,json=productSet,proto3" json:"product_set,omitempty"` + // The [FieldMask][google.protobuf.FieldMask] that specifies which fields to + // update. + // If update_mask isn't specified, all mutable fields are to be updated. + // Valid mask path is `display_name`. + UpdateMask *field_mask.FieldMask `protobuf:"bytes,2,opt,name=update_mask,json=updateMask,proto3" json:"update_mask,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *UpdateProductSetRequest) Reset() { *m = UpdateProductSetRequest{} } +func (m *UpdateProductSetRequest) String() string { return proto.CompactTextString(m) } +func (*UpdateProductSetRequest) ProtoMessage() {} +func (*UpdateProductSetRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_product_search_service_914a807106064587, []int{13} +} +func (m *UpdateProductSetRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_UpdateProductSetRequest.Unmarshal(m, b) +} +func (m *UpdateProductSetRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_UpdateProductSetRequest.Marshal(b, m, deterministic) +} +func (dst *UpdateProductSetRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_UpdateProductSetRequest.Merge(dst, src) +} +func (m *UpdateProductSetRequest) XXX_Size() int { + return xxx_messageInfo_UpdateProductSetRequest.Size(m) +} +func (m *UpdateProductSetRequest) XXX_DiscardUnknown() { + xxx_messageInfo_UpdateProductSetRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_UpdateProductSetRequest proto.InternalMessageInfo + +func (m *UpdateProductSetRequest) GetProductSet() *ProductSet { + if m != nil { + return m.ProductSet + } + return nil +} + +func (m *UpdateProductSetRequest) GetUpdateMask() *field_mask.FieldMask { + if m != nil { + return m.UpdateMask + } + return nil +} + +// Request message for the `DeleteProductSet` method. +type DeleteProductSetRequest struct { + // Resource name of the ProductSet to delete. + // + // Format is: + // `projects/PROJECT_ID/locations/LOC_ID/productSets/PRODUCT_SET_ID` + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *DeleteProductSetRequest) Reset() { *m = DeleteProductSetRequest{} } +func (m *DeleteProductSetRequest) String() string { return proto.CompactTextString(m) } +func (*DeleteProductSetRequest) ProtoMessage() {} +func (*DeleteProductSetRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_product_search_service_914a807106064587, []int{14} +} +func (m *DeleteProductSetRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_DeleteProductSetRequest.Unmarshal(m, b) +} +func (m *DeleteProductSetRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_DeleteProductSetRequest.Marshal(b, m, deterministic) +} +func (dst *DeleteProductSetRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_DeleteProductSetRequest.Merge(dst, src) +} +func (m *DeleteProductSetRequest) XXX_Size() int { + return xxx_messageInfo_DeleteProductSetRequest.Size(m) +} +func (m *DeleteProductSetRequest) XXX_DiscardUnknown() { + xxx_messageInfo_DeleteProductSetRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_DeleteProductSetRequest proto.InternalMessageInfo + +func (m *DeleteProductSetRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +// Request message for the `CreateReferenceImage` method. +type CreateReferenceImageRequest struct { + // Resource name of the product in which to create the reference image. + // + // Format is + // `projects/PROJECT_ID/locations/LOC_ID/products/PRODUCT_ID`. + Parent string `protobuf:"bytes,1,opt,name=parent,proto3" json:"parent,omitempty"` + // The reference image to create. + // If an image ID is specified, it is ignored. + ReferenceImage *ReferenceImage `protobuf:"bytes,2,opt,name=reference_image,json=referenceImage,proto3" json:"reference_image,omitempty"` + // A user-supplied resource id for the ReferenceImage to be added. If set, + // the server will attempt to use this value as the resource id. If it is + // already in use, an error is returned with code ALREADY_EXISTS. Must be at + // most 128 characters long. It cannot contain the character `/`. + ReferenceImageId string `protobuf:"bytes,3,opt,name=reference_image_id,json=referenceImageId,proto3" json:"reference_image_id,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *CreateReferenceImageRequest) Reset() { *m = CreateReferenceImageRequest{} } +func (m *CreateReferenceImageRequest) String() string { return proto.CompactTextString(m) } +func (*CreateReferenceImageRequest) ProtoMessage() {} +func (*CreateReferenceImageRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_product_search_service_914a807106064587, []int{15} +} +func (m *CreateReferenceImageRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_CreateReferenceImageRequest.Unmarshal(m, b) +} +func (m *CreateReferenceImageRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_CreateReferenceImageRequest.Marshal(b, m, deterministic) +} +func (dst *CreateReferenceImageRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_CreateReferenceImageRequest.Merge(dst, src) +} +func (m *CreateReferenceImageRequest) XXX_Size() int { + return xxx_messageInfo_CreateReferenceImageRequest.Size(m) +} +func (m *CreateReferenceImageRequest) XXX_DiscardUnknown() { + xxx_messageInfo_CreateReferenceImageRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_CreateReferenceImageRequest proto.InternalMessageInfo + +func (m *CreateReferenceImageRequest) GetParent() string { + if m != nil { + return m.Parent + } + return "" +} + +func (m *CreateReferenceImageRequest) GetReferenceImage() *ReferenceImage { + if m != nil { + return m.ReferenceImage + } + return nil +} + +func (m *CreateReferenceImageRequest) GetReferenceImageId() string { + if m != nil { + return m.ReferenceImageId + } + return "" +} + +// Request message for the `ListReferenceImages` method. +type ListReferenceImagesRequest struct { + // Resource name of the product containing the reference images. + // + // Format is + // `projects/PROJECT_ID/locations/LOC_ID/products/PRODUCT_ID`. + Parent string `protobuf:"bytes,1,opt,name=parent,proto3" json:"parent,omitempty"` + // The maximum number of items to return. Default 10, maximum 100. + PageSize int32 `protobuf:"varint,2,opt,name=page_size,json=pageSize,proto3" json:"page_size,omitempty"` + // A token identifying a page of results to be returned. This is the value + // of `nextPageToken` returned in a previous reference image list request. + // + // Defaults to the first page if not specified. + PageToken string `protobuf:"bytes,3,opt,name=page_token,json=pageToken,proto3" json:"page_token,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ListReferenceImagesRequest) Reset() { *m = ListReferenceImagesRequest{} } +func (m *ListReferenceImagesRequest) String() string { return proto.CompactTextString(m) } +func (*ListReferenceImagesRequest) ProtoMessage() {} +func (*ListReferenceImagesRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_product_search_service_914a807106064587, []int{16} +} +func (m *ListReferenceImagesRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ListReferenceImagesRequest.Unmarshal(m, b) +} +func (m *ListReferenceImagesRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ListReferenceImagesRequest.Marshal(b, m, deterministic) +} +func (dst *ListReferenceImagesRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_ListReferenceImagesRequest.Merge(dst, src) +} +func (m *ListReferenceImagesRequest) XXX_Size() int { + return xxx_messageInfo_ListReferenceImagesRequest.Size(m) +} +func (m *ListReferenceImagesRequest) XXX_DiscardUnknown() { + xxx_messageInfo_ListReferenceImagesRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_ListReferenceImagesRequest proto.InternalMessageInfo + +func (m *ListReferenceImagesRequest) GetParent() string { + if m != nil { + return m.Parent + } + return "" +} + +func (m *ListReferenceImagesRequest) GetPageSize() int32 { + if m != nil { + return m.PageSize + } + return 0 +} + +func (m *ListReferenceImagesRequest) GetPageToken() string { + if m != nil { + return m.PageToken + } + return "" +} + +// Response message for the `ListReferenceImages` method. +type ListReferenceImagesResponse struct { + // The list of reference images. + ReferenceImages []*ReferenceImage `protobuf:"bytes,1,rep,name=reference_images,json=referenceImages,proto3" json:"reference_images,omitempty"` + // The maximum number of items to return. Default 10, maximum 100. + PageSize int32 `protobuf:"varint,2,opt,name=page_size,json=pageSize,proto3" json:"page_size,omitempty"` + // The next_page_token returned from a previous List request, if any. + NextPageToken string `protobuf:"bytes,3,opt,name=next_page_token,json=nextPageToken,proto3" json:"next_page_token,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ListReferenceImagesResponse) Reset() { *m = ListReferenceImagesResponse{} } +func (m *ListReferenceImagesResponse) String() string { return proto.CompactTextString(m) } +func (*ListReferenceImagesResponse) ProtoMessage() {} +func (*ListReferenceImagesResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_product_search_service_914a807106064587, []int{17} +} +func (m *ListReferenceImagesResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ListReferenceImagesResponse.Unmarshal(m, b) +} +func (m *ListReferenceImagesResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ListReferenceImagesResponse.Marshal(b, m, deterministic) +} +func (dst *ListReferenceImagesResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_ListReferenceImagesResponse.Merge(dst, src) +} +func (m *ListReferenceImagesResponse) XXX_Size() int { + return xxx_messageInfo_ListReferenceImagesResponse.Size(m) +} +func (m *ListReferenceImagesResponse) XXX_DiscardUnknown() { + xxx_messageInfo_ListReferenceImagesResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_ListReferenceImagesResponse proto.InternalMessageInfo + +func (m *ListReferenceImagesResponse) GetReferenceImages() []*ReferenceImage { + if m != nil { + return m.ReferenceImages + } + return nil +} + +func (m *ListReferenceImagesResponse) GetPageSize() int32 { + if m != nil { + return m.PageSize + } + return 0 +} + +func (m *ListReferenceImagesResponse) GetNextPageToken() string { + if m != nil { + return m.NextPageToken + } + return "" +} + +// Request message for the `GetReferenceImage` method. +type GetReferenceImageRequest struct { + // The resource name of the ReferenceImage to get. + // + // Format is: + // + // `projects/PROJECT_ID/locations/LOC_ID/products/PRODUCT_ID/referenceImages/IMAGE_ID`. + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GetReferenceImageRequest) Reset() { *m = GetReferenceImageRequest{} } +func (m *GetReferenceImageRequest) String() string { return proto.CompactTextString(m) } +func (*GetReferenceImageRequest) ProtoMessage() {} +func (*GetReferenceImageRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_product_search_service_914a807106064587, []int{18} +} +func (m *GetReferenceImageRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GetReferenceImageRequest.Unmarshal(m, b) +} +func (m *GetReferenceImageRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GetReferenceImageRequest.Marshal(b, m, deterministic) +} +func (dst *GetReferenceImageRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_GetReferenceImageRequest.Merge(dst, src) +} +func (m *GetReferenceImageRequest) XXX_Size() int { + return xxx_messageInfo_GetReferenceImageRequest.Size(m) +} +func (m *GetReferenceImageRequest) XXX_DiscardUnknown() { + xxx_messageInfo_GetReferenceImageRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_GetReferenceImageRequest proto.InternalMessageInfo + +func (m *GetReferenceImageRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +// Request message for the `DeleteReferenceImage` method. +type DeleteReferenceImageRequest struct { + // The resource name of the reference image to delete. + // + // Format is: + // + // `projects/PROJECT_ID/locations/LOC_ID/products/PRODUCT_ID/referenceImages/IMAGE_ID` + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *DeleteReferenceImageRequest) Reset() { *m = DeleteReferenceImageRequest{} } +func (m *DeleteReferenceImageRequest) String() string { return proto.CompactTextString(m) } +func (*DeleteReferenceImageRequest) ProtoMessage() {} +func (*DeleteReferenceImageRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_product_search_service_914a807106064587, []int{19} +} +func (m *DeleteReferenceImageRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_DeleteReferenceImageRequest.Unmarshal(m, b) +} +func (m *DeleteReferenceImageRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_DeleteReferenceImageRequest.Marshal(b, m, deterministic) +} +func (dst *DeleteReferenceImageRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_DeleteReferenceImageRequest.Merge(dst, src) +} +func (m *DeleteReferenceImageRequest) XXX_Size() int { + return xxx_messageInfo_DeleteReferenceImageRequest.Size(m) +} +func (m *DeleteReferenceImageRequest) XXX_DiscardUnknown() { + xxx_messageInfo_DeleteReferenceImageRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_DeleteReferenceImageRequest proto.InternalMessageInfo + +func (m *DeleteReferenceImageRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +// Request message for the `AddProductToProductSet` method. +type AddProductToProductSetRequest struct { + // The resource name for the ProductSet to modify. + // + // Format is: + // `projects/PROJECT_ID/locations/LOC_ID/productSets/PRODUCT_SET_ID` + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // The resource name for the Product to be added to this ProductSet. + // + // Format is: + // `projects/PROJECT_ID/locations/LOC_ID/products/PRODUCT_ID` + Product string `protobuf:"bytes,2,opt,name=product,proto3" json:"product,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *AddProductToProductSetRequest) Reset() { *m = AddProductToProductSetRequest{} } +func (m *AddProductToProductSetRequest) String() string { return proto.CompactTextString(m) } +func (*AddProductToProductSetRequest) ProtoMessage() {} +func (*AddProductToProductSetRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_product_search_service_914a807106064587, []int{20} +} +func (m *AddProductToProductSetRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_AddProductToProductSetRequest.Unmarshal(m, b) +} +func (m *AddProductToProductSetRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_AddProductToProductSetRequest.Marshal(b, m, deterministic) +} +func (dst *AddProductToProductSetRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_AddProductToProductSetRequest.Merge(dst, src) +} +func (m *AddProductToProductSetRequest) XXX_Size() int { + return xxx_messageInfo_AddProductToProductSetRequest.Size(m) +} +func (m *AddProductToProductSetRequest) XXX_DiscardUnknown() { + xxx_messageInfo_AddProductToProductSetRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_AddProductToProductSetRequest proto.InternalMessageInfo + +func (m *AddProductToProductSetRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *AddProductToProductSetRequest) GetProduct() string { + if m != nil { + return m.Product + } + return "" +} + +// Request message for the `RemoveProductFromProductSet` method. +type RemoveProductFromProductSetRequest struct { + // The resource name for the ProductSet to modify. + // + // Format is: + // `projects/PROJECT_ID/locations/LOC_ID/productSets/PRODUCT_SET_ID` + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // The resource name for the Product to be removed from this ProductSet. + // + // Format is: + // `projects/PROJECT_ID/locations/LOC_ID/products/PRODUCT_ID` + Product string `protobuf:"bytes,2,opt,name=product,proto3" json:"product,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *RemoveProductFromProductSetRequest) Reset() { *m = RemoveProductFromProductSetRequest{} } +func (m *RemoveProductFromProductSetRequest) String() string { return proto.CompactTextString(m) } +func (*RemoveProductFromProductSetRequest) ProtoMessage() {} +func (*RemoveProductFromProductSetRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_product_search_service_914a807106064587, []int{21} +} +func (m *RemoveProductFromProductSetRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_RemoveProductFromProductSetRequest.Unmarshal(m, b) +} +func (m *RemoveProductFromProductSetRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_RemoveProductFromProductSetRequest.Marshal(b, m, deterministic) +} +func (dst *RemoveProductFromProductSetRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_RemoveProductFromProductSetRequest.Merge(dst, src) +} +func (m *RemoveProductFromProductSetRequest) XXX_Size() int { + return xxx_messageInfo_RemoveProductFromProductSetRequest.Size(m) +} +func (m *RemoveProductFromProductSetRequest) XXX_DiscardUnknown() { + xxx_messageInfo_RemoveProductFromProductSetRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_RemoveProductFromProductSetRequest proto.InternalMessageInfo + +func (m *RemoveProductFromProductSetRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *RemoveProductFromProductSetRequest) GetProduct() string { + if m != nil { + return m.Product + } + return "" +} + +// Request message for the `ListProductsInProductSet` method. +type ListProductsInProductSetRequest struct { + // The ProductSet resource for which to retrieve Products. + // + // Format is: + // `projects/PROJECT_ID/locations/LOC_ID/productSets/PRODUCT_SET_ID` + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // The maximum number of items to return. Default 10, maximum 100. + PageSize int32 `protobuf:"varint,2,opt,name=page_size,json=pageSize,proto3" json:"page_size,omitempty"` + // The next_page_token returned from a previous List request, if any. + PageToken string `protobuf:"bytes,3,opt,name=page_token,json=pageToken,proto3" json:"page_token,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ListProductsInProductSetRequest) Reset() { *m = ListProductsInProductSetRequest{} } +func (m *ListProductsInProductSetRequest) String() string { return proto.CompactTextString(m) } +func (*ListProductsInProductSetRequest) ProtoMessage() {} +func (*ListProductsInProductSetRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_product_search_service_914a807106064587, []int{22} +} +func (m *ListProductsInProductSetRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ListProductsInProductSetRequest.Unmarshal(m, b) +} +func (m *ListProductsInProductSetRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ListProductsInProductSetRequest.Marshal(b, m, deterministic) +} +func (dst *ListProductsInProductSetRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_ListProductsInProductSetRequest.Merge(dst, src) +} +func (m *ListProductsInProductSetRequest) XXX_Size() int { + return xxx_messageInfo_ListProductsInProductSetRequest.Size(m) +} +func (m *ListProductsInProductSetRequest) XXX_DiscardUnknown() { + xxx_messageInfo_ListProductsInProductSetRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_ListProductsInProductSetRequest proto.InternalMessageInfo + +func (m *ListProductsInProductSetRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *ListProductsInProductSetRequest) GetPageSize() int32 { + if m != nil { + return m.PageSize + } + return 0 +} + +func (m *ListProductsInProductSetRequest) GetPageToken() string { + if m != nil { + return m.PageToken + } + return "" +} + +// Response message for the `ListProductsInProductSet` method. +type ListProductsInProductSetResponse struct { + // The list of Products. + Products []*Product `protobuf:"bytes,1,rep,name=products,proto3" json:"products,omitempty"` + // Token to retrieve the next page of results, or empty if there are no more + // results in the list. + NextPageToken string `protobuf:"bytes,2,opt,name=next_page_token,json=nextPageToken,proto3" json:"next_page_token,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ListProductsInProductSetResponse) Reset() { *m = ListProductsInProductSetResponse{} } +func (m *ListProductsInProductSetResponse) String() string { return proto.CompactTextString(m) } +func (*ListProductsInProductSetResponse) ProtoMessage() {} +func (*ListProductsInProductSetResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_product_search_service_914a807106064587, []int{23} +} +func (m *ListProductsInProductSetResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ListProductsInProductSetResponse.Unmarshal(m, b) +} +func (m *ListProductsInProductSetResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ListProductsInProductSetResponse.Marshal(b, m, deterministic) +} +func (dst *ListProductsInProductSetResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_ListProductsInProductSetResponse.Merge(dst, src) +} +func (m *ListProductsInProductSetResponse) XXX_Size() int { + return xxx_messageInfo_ListProductsInProductSetResponse.Size(m) +} +func (m *ListProductsInProductSetResponse) XXX_DiscardUnknown() { + xxx_messageInfo_ListProductsInProductSetResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_ListProductsInProductSetResponse proto.InternalMessageInfo + +func (m *ListProductsInProductSetResponse) GetProducts() []*Product { + if m != nil { + return m.Products + } + return nil +} + +func (m *ListProductsInProductSetResponse) GetNextPageToken() string { + if m != nil { + return m.NextPageToken + } + return "" +} + +// The Google Cloud Storage location for a csv file which preserves a list of +// ImportProductSetRequests in each line. +type ImportProductSetsGcsSource struct { + // The Google Cloud Storage URI of the input csv file. + // + // The URI must start with `gs://`. + // + // The format of the input csv file should be one image per line. + // In each line, there are 8 columns. + // + // 1. image-uri + // 2. image-id + // 3. product-set-id + // 4. product-id + // 5. product-category + // 6. product-display-name + // 7. labels + // 8. bounding-poly + // + // The `image-uri`, `product-set-id`, `product-id`, and `product-category` + // columns are required. All other columns are optional. + // + // If the `ProductSet` or `Product` specified by the `product-set-id` and + // `product-id` values does not exist, then the system will create a new + // `ProductSet` or `Product` for the image. In this case, the + // `product-display-name` column refers to + // [display_name][google.cloud.vision.v1.Product.display_name], the + // `product-category` column refers to + // [product_category][google.cloud.vision.v1.Product.product_category], and + // the `labels` column refers to + // [product_labels][google.cloud.vision.v1.Product.product_labels]. + // + // The `image-id` column is optional but must be unique if provided. If it is + // empty, the system will automatically assign a unique id to the image. + // + // The `product-display-name` column is optional. If it is empty, the system + // sets the [display_name][google.cloud.vision.v1.Product.display_name] field + // for the product to a space (" "). You can update the `display_name` later + // by using the API. + // + // If a `Product` with the specified `product-id` already exists, then the + // system ignores the `product-display-name`, `product-category`, and `labels` + // columns. + // + // The `labels` column (optional) is a line containing a list of + // comma-separated key-value pairs, in the following format: + // + // "key_1=value_1,key_2=value_2,...,key_n=value_n" + // + // The `bounding-poly` column (optional) identifies one region of + // interest from the image in the same manner as `CreateReferenceImage`. If + // you do not specify the `bounding-poly` column, then the system will try to + // detect regions of interest automatically. + // + // At most one `bounding-poly` column is allowed per line. If the image + // contains multiple regions of interest, add a line to the CSV file that + // includes the same product information, and the `bounding-poly` values for + // each region of interest. + // + // The `bounding-poly` column must contain an even number of comma-separated + // numbers, in the format "p1_x,p1_y,p2_x,p2_y,...,pn_x,pn_y". Use + // non-negative integers for absolute bounding polygons, and float values + // in [0, 1] for normalized bounding polygons. + // + // The system will resize the image if the image resolution is too + // large to process (larger than 20MP). + CsvFileUri string `protobuf:"bytes,1,opt,name=csv_file_uri,json=csvFileUri,proto3" json:"csv_file_uri,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ImportProductSetsGcsSource) Reset() { *m = ImportProductSetsGcsSource{} } +func (m *ImportProductSetsGcsSource) String() string { return proto.CompactTextString(m) } +func (*ImportProductSetsGcsSource) ProtoMessage() {} +func (*ImportProductSetsGcsSource) Descriptor() ([]byte, []int) { + return fileDescriptor_product_search_service_914a807106064587, []int{24} +} +func (m *ImportProductSetsGcsSource) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ImportProductSetsGcsSource.Unmarshal(m, b) +} +func (m *ImportProductSetsGcsSource) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ImportProductSetsGcsSource.Marshal(b, m, deterministic) +} +func (dst *ImportProductSetsGcsSource) XXX_Merge(src proto.Message) { + xxx_messageInfo_ImportProductSetsGcsSource.Merge(dst, src) +} +func (m *ImportProductSetsGcsSource) XXX_Size() int { + return xxx_messageInfo_ImportProductSetsGcsSource.Size(m) +} +func (m *ImportProductSetsGcsSource) XXX_DiscardUnknown() { + xxx_messageInfo_ImportProductSetsGcsSource.DiscardUnknown(m) +} + +var xxx_messageInfo_ImportProductSetsGcsSource proto.InternalMessageInfo + +func (m *ImportProductSetsGcsSource) GetCsvFileUri() string { + if m != nil { + return m.CsvFileUri + } + return "" +} + +// The input content for the `ImportProductSets` method. +type ImportProductSetsInputConfig struct { + // The source of the input. + // + // Types that are valid to be assigned to Source: + // *ImportProductSetsInputConfig_GcsSource + Source isImportProductSetsInputConfig_Source `protobuf_oneof:"source"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ImportProductSetsInputConfig) Reset() { *m = ImportProductSetsInputConfig{} } +func (m *ImportProductSetsInputConfig) String() string { return proto.CompactTextString(m) } +func (*ImportProductSetsInputConfig) ProtoMessage() {} +func (*ImportProductSetsInputConfig) Descriptor() ([]byte, []int) { + return fileDescriptor_product_search_service_914a807106064587, []int{25} +} +func (m *ImportProductSetsInputConfig) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ImportProductSetsInputConfig.Unmarshal(m, b) +} +func (m *ImportProductSetsInputConfig) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ImportProductSetsInputConfig.Marshal(b, m, deterministic) +} +func (dst *ImportProductSetsInputConfig) XXX_Merge(src proto.Message) { + xxx_messageInfo_ImportProductSetsInputConfig.Merge(dst, src) +} +func (m *ImportProductSetsInputConfig) XXX_Size() int { + return xxx_messageInfo_ImportProductSetsInputConfig.Size(m) +} +func (m *ImportProductSetsInputConfig) XXX_DiscardUnknown() { + xxx_messageInfo_ImportProductSetsInputConfig.DiscardUnknown(m) +} + +var xxx_messageInfo_ImportProductSetsInputConfig proto.InternalMessageInfo + +type isImportProductSetsInputConfig_Source interface { + isImportProductSetsInputConfig_Source() +} + +type ImportProductSetsInputConfig_GcsSource struct { + GcsSource *ImportProductSetsGcsSource `protobuf:"bytes,1,opt,name=gcs_source,json=gcsSource,proto3,oneof"` +} + +func (*ImportProductSetsInputConfig_GcsSource) isImportProductSetsInputConfig_Source() {} + +func (m *ImportProductSetsInputConfig) GetSource() isImportProductSetsInputConfig_Source { + if m != nil { + return m.Source + } + return nil +} + +func (m *ImportProductSetsInputConfig) GetGcsSource() *ImportProductSetsGcsSource { + if x, ok := m.GetSource().(*ImportProductSetsInputConfig_GcsSource); ok { + return x.GcsSource + } + return nil +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*ImportProductSetsInputConfig) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _ImportProductSetsInputConfig_OneofMarshaler, _ImportProductSetsInputConfig_OneofUnmarshaler, _ImportProductSetsInputConfig_OneofSizer, []interface{}{ + (*ImportProductSetsInputConfig_GcsSource)(nil), + } +} + +func _ImportProductSetsInputConfig_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*ImportProductSetsInputConfig) + // source + switch x := m.Source.(type) { + case *ImportProductSetsInputConfig_GcsSource: + b.EncodeVarint(1<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.GcsSource); err != nil { + return err + } + case nil: + default: + return fmt.Errorf("ImportProductSetsInputConfig.Source has unexpected type %T", x) + } + return nil +} + +func _ImportProductSetsInputConfig_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*ImportProductSetsInputConfig) + switch tag { + case 1: // source.gcs_source + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(ImportProductSetsGcsSource) + err := b.DecodeMessage(msg) + m.Source = &ImportProductSetsInputConfig_GcsSource{msg} + return true, err + default: + return false, nil + } +} + +func _ImportProductSetsInputConfig_OneofSizer(msg proto.Message) (n int) { + m := msg.(*ImportProductSetsInputConfig) + // source + switch x := m.Source.(type) { + case *ImportProductSetsInputConfig_GcsSource: + s := proto.Size(x.GcsSource) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +// Request message for the `ImportProductSets` method. +type ImportProductSetsRequest struct { + // The project in which the ProductSets should be imported. + // + // Format is `projects/PROJECT_ID/locations/LOC_ID`. + Parent string `protobuf:"bytes,1,opt,name=parent,proto3" json:"parent,omitempty"` + // The input content for the list of requests. + InputConfig *ImportProductSetsInputConfig `protobuf:"bytes,2,opt,name=input_config,json=inputConfig,proto3" json:"input_config,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ImportProductSetsRequest) Reset() { *m = ImportProductSetsRequest{} } +func (m *ImportProductSetsRequest) String() string { return proto.CompactTextString(m) } +func (*ImportProductSetsRequest) ProtoMessage() {} +func (*ImportProductSetsRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_product_search_service_914a807106064587, []int{26} +} +func (m *ImportProductSetsRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ImportProductSetsRequest.Unmarshal(m, b) +} +func (m *ImportProductSetsRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ImportProductSetsRequest.Marshal(b, m, deterministic) +} +func (dst *ImportProductSetsRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_ImportProductSetsRequest.Merge(dst, src) +} +func (m *ImportProductSetsRequest) XXX_Size() int { + return xxx_messageInfo_ImportProductSetsRequest.Size(m) +} +func (m *ImportProductSetsRequest) XXX_DiscardUnknown() { + xxx_messageInfo_ImportProductSetsRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_ImportProductSetsRequest proto.InternalMessageInfo + +func (m *ImportProductSetsRequest) GetParent() string { + if m != nil { + return m.Parent + } + return "" +} + +func (m *ImportProductSetsRequest) GetInputConfig() *ImportProductSetsInputConfig { + if m != nil { + return m.InputConfig + } + return nil +} + +// Response message for the `ImportProductSets` method. +// +// This message is returned by the +// [google.longrunning.Operations.GetOperation][google.longrunning.Operations.GetOperation] +// method in the returned +// [google.longrunning.Operation.response][google.longrunning.Operation.response] +// field. +type ImportProductSetsResponse struct { + // The list of reference_images that are imported successfully. + ReferenceImages []*ReferenceImage `protobuf:"bytes,1,rep,name=reference_images,json=referenceImages,proto3" json:"reference_images,omitempty"` + // The rpc status for each ImportProductSet request, including both successes + // and errors. + // + // The number of statuses here matches the number of lines in the csv file, + // and statuses[i] stores the success or failure status of processing the i-th + // line of the csv, starting from line 0. + Statuses []*status.Status `protobuf:"bytes,2,rep,name=statuses,proto3" json:"statuses,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ImportProductSetsResponse) Reset() { *m = ImportProductSetsResponse{} } +func (m *ImportProductSetsResponse) String() string { return proto.CompactTextString(m) } +func (*ImportProductSetsResponse) ProtoMessage() {} +func (*ImportProductSetsResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_product_search_service_914a807106064587, []int{27} +} +func (m *ImportProductSetsResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ImportProductSetsResponse.Unmarshal(m, b) +} +func (m *ImportProductSetsResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ImportProductSetsResponse.Marshal(b, m, deterministic) +} +func (dst *ImportProductSetsResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_ImportProductSetsResponse.Merge(dst, src) +} +func (m *ImportProductSetsResponse) XXX_Size() int { + return xxx_messageInfo_ImportProductSetsResponse.Size(m) +} +func (m *ImportProductSetsResponse) XXX_DiscardUnknown() { + xxx_messageInfo_ImportProductSetsResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_ImportProductSetsResponse proto.InternalMessageInfo + +func (m *ImportProductSetsResponse) GetReferenceImages() []*ReferenceImage { + if m != nil { + return m.ReferenceImages + } + return nil +} + +func (m *ImportProductSetsResponse) GetStatuses() []*status.Status { + if m != nil { + return m.Statuses + } + return nil +} + +// Metadata for the batch operations such as the current state. +// +// This is included in the `metadata` field of the `Operation` returned by the +// `GetOperation` call of the `google::longrunning::Operations` service. +type BatchOperationMetadata struct { + // The current state of the batch operation. + State BatchOperationMetadata_State `protobuf:"varint,1,opt,name=state,proto3,enum=google.cloud.vision.v1.BatchOperationMetadata_State" json:"state,omitempty"` + // The time when the batch request was submitted to the server. + SubmitTime *timestamp.Timestamp `protobuf:"bytes,2,opt,name=submit_time,json=submitTime,proto3" json:"submit_time,omitempty"` + // The time when the batch request is finished and + // [google.longrunning.Operation.done][google.longrunning.Operation.done] is + // set to true. + EndTime *timestamp.Timestamp `protobuf:"bytes,3,opt,name=end_time,json=endTime,proto3" json:"end_time,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *BatchOperationMetadata) Reset() { *m = BatchOperationMetadata{} } +func (m *BatchOperationMetadata) String() string { return proto.CompactTextString(m) } +func (*BatchOperationMetadata) ProtoMessage() {} +func (*BatchOperationMetadata) Descriptor() ([]byte, []int) { + return fileDescriptor_product_search_service_914a807106064587, []int{28} +} +func (m *BatchOperationMetadata) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_BatchOperationMetadata.Unmarshal(m, b) +} +func (m *BatchOperationMetadata) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_BatchOperationMetadata.Marshal(b, m, deterministic) +} +func (dst *BatchOperationMetadata) XXX_Merge(src proto.Message) { + xxx_messageInfo_BatchOperationMetadata.Merge(dst, src) +} +func (m *BatchOperationMetadata) XXX_Size() int { + return xxx_messageInfo_BatchOperationMetadata.Size(m) +} +func (m *BatchOperationMetadata) XXX_DiscardUnknown() { + xxx_messageInfo_BatchOperationMetadata.DiscardUnknown(m) +} + +var xxx_messageInfo_BatchOperationMetadata proto.InternalMessageInfo + +func (m *BatchOperationMetadata) GetState() BatchOperationMetadata_State { + if m != nil { + return m.State + } + return BatchOperationMetadata_STATE_UNSPECIFIED +} + +func (m *BatchOperationMetadata) GetSubmitTime() *timestamp.Timestamp { + if m != nil { + return m.SubmitTime + } + return nil +} + +func (m *BatchOperationMetadata) GetEndTime() *timestamp.Timestamp { + if m != nil { + return m.EndTime + } + return nil +} + +func init() { + proto.RegisterType((*Product)(nil), "google.cloud.vision.v1.Product") + proto.RegisterType((*Product_KeyValue)(nil), "google.cloud.vision.v1.Product.KeyValue") + proto.RegisterType((*ProductSet)(nil), "google.cloud.vision.v1.ProductSet") + proto.RegisterType((*ReferenceImage)(nil), "google.cloud.vision.v1.ReferenceImage") + proto.RegisterType((*CreateProductRequest)(nil), "google.cloud.vision.v1.CreateProductRequest") + proto.RegisterType((*ListProductsRequest)(nil), "google.cloud.vision.v1.ListProductsRequest") + proto.RegisterType((*ListProductsResponse)(nil), "google.cloud.vision.v1.ListProductsResponse") + proto.RegisterType((*GetProductRequest)(nil), "google.cloud.vision.v1.GetProductRequest") + proto.RegisterType((*UpdateProductRequest)(nil), "google.cloud.vision.v1.UpdateProductRequest") + proto.RegisterType((*DeleteProductRequest)(nil), "google.cloud.vision.v1.DeleteProductRequest") + proto.RegisterType((*CreateProductSetRequest)(nil), "google.cloud.vision.v1.CreateProductSetRequest") + proto.RegisterType((*ListProductSetsRequest)(nil), "google.cloud.vision.v1.ListProductSetsRequest") + proto.RegisterType((*ListProductSetsResponse)(nil), "google.cloud.vision.v1.ListProductSetsResponse") + proto.RegisterType((*GetProductSetRequest)(nil), "google.cloud.vision.v1.GetProductSetRequest") + proto.RegisterType((*UpdateProductSetRequest)(nil), "google.cloud.vision.v1.UpdateProductSetRequest") + proto.RegisterType((*DeleteProductSetRequest)(nil), "google.cloud.vision.v1.DeleteProductSetRequest") + proto.RegisterType((*CreateReferenceImageRequest)(nil), "google.cloud.vision.v1.CreateReferenceImageRequest") + proto.RegisterType((*ListReferenceImagesRequest)(nil), "google.cloud.vision.v1.ListReferenceImagesRequest") + proto.RegisterType((*ListReferenceImagesResponse)(nil), "google.cloud.vision.v1.ListReferenceImagesResponse") + proto.RegisterType((*GetReferenceImageRequest)(nil), "google.cloud.vision.v1.GetReferenceImageRequest") + proto.RegisterType((*DeleteReferenceImageRequest)(nil), "google.cloud.vision.v1.DeleteReferenceImageRequest") + proto.RegisterType((*AddProductToProductSetRequest)(nil), "google.cloud.vision.v1.AddProductToProductSetRequest") + proto.RegisterType((*RemoveProductFromProductSetRequest)(nil), "google.cloud.vision.v1.RemoveProductFromProductSetRequest") + proto.RegisterType((*ListProductsInProductSetRequest)(nil), "google.cloud.vision.v1.ListProductsInProductSetRequest") + proto.RegisterType((*ListProductsInProductSetResponse)(nil), "google.cloud.vision.v1.ListProductsInProductSetResponse") + proto.RegisterType((*ImportProductSetsGcsSource)(nil), "google.cloud.vision.v1.ImportProductSetsGcsSource") + proto.RegisterType((*ImportProductSetsInputConfig)(nil), "google.cloud.vision.v1.ImportProductSetsInputConfig") + proto.RegisterType((*ImportProductSetsRequest)(nil), "google.cloud.vision.v1.ImportProductSetsRequest") + proto.RegisterType((*ImportProductSetsResponse)(nil), "google.cloud.vision.v1.ImportProductSetsResponse") + proto.RegisterType((*BatchOperationMetadata)(nil), "google.cloud.vision.v1.BatchOperationMetadata") + proto.RegisterEnum("google.cloud.vision.v1.BatchOperationMetadata_State", BatchOperationMetadata_State_name, BatchOperationMetadata_State_value) +} + +// Reference imports to suppress errors if they are not otherwise used. +var _ context.Context +var _ grpc.ClientConn + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the grpc package it is being compiled against. +const _ = grpc.SupportPackageIsVersion4 + +// ProductSearchClient is the client API for ProductSearch service. +// +// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://godoc.org/google.golang.org/grpc#ClientConn.NewStream. +type ProductSearchClient interface { + // Creates and returns a new ProductSet resource. + // + // Possible errors: + // + // * Returns INVALID_ARGUMENT if display_name is missing, or is longer than + // 4096 characters. + CreateProductSet(ctx context.Context, in *CreateProductSetRequest, opts ...grpc.CallOption) (*ProductSet, error) + // Lists ProductSets in an unspecified order. + // + // Possible errors: + // + // * Returns INVALID_ARGUMENT if page_size is greater than 100, or less + // than 1. + ListProductSets(ctx context.Context, in *ListProductSetsRequest, opts ...grpc.CallOption) (*ListProductSetsResponse, error) + // Gets information associated with a ProductSet. + // + // Possible errors: + // + // * Returns NOT_FOUND if the ProductSet does not exist. + GetProductSet(ctx context.Context, in *GetProductSetRequest, opts ...grpc.CallOption) (*ProductSet, error) + // Makes changes to a ProductSet resource. + // Only display_name can be updated currently. + // + // Possible errors: + // + // * Returns NOT_FOUND if the ProductSet does not exist. + // * Returns INVALID_ARGUMENT if display_name is present in update_mask but + // missing from the request or longer than 4096 characters. + UpdateProductSet(ctx context.Context, in *UpdateProductSetRequest, opts ...grpc.CallOption) (*ProductSet, error) + // Permanently deletes a ProductSet. Products and ReferenceImages in the + // ProductSet are not deleted. + // + // The actual image files are not deleted from Google Cloud Storage. + DeleteProductSet(ctx context.Context, in *DeleteProductSetRequest, opts ...grpc.CallOption) (*empty.Empty, error) + // Creates and returns a new product resource. + // + // Possible errors: + // + // * Returns INVALID_ARGUMENT if display_name is missing or longer than 4096 + // characters. + // * Returns INVALID_ARGUMENT if description is longer than 4096 characters. + // * Returns INVALID_ARGUMENT if product_category is missing or invalid. + CreateProduct(ctx context.Context, in *CreateProductRequest, opts ...grpc.CallOption) (*Product, error) + // Lists products in an unspecified order. + // + // Possible errors: + // + // * Returns INVALID_ARGUMENT if page_size is greater than 100 or less than 1. + ListProducts(ctx context.Context, in *ListProductsRequest, opts ...grpc.CallOption) (*ListProductsResponse, error) + // Gets information associated with a Product. + // + // Possible errors: + // + // * Returns NOT_FOUND if the Product does not exist. + GetProduct(ctx context.Context, in *GetProductRequest, opts ...grpc.CallOption) (*Product, error) + // Makes changes to a Product resource. + // Only the `display_name`, `description`, and `labels` fields can be updated + // right now. + // + // If labels are updated, the change will not be reflected in queries until + // the next index time. + // + // Possible errors: + // + // * Returns NOT_FOUND if the Product does not exist. + // * Returns INVALID_ARGUMENT if display_name is present in update_mask but is + // missing from the request or longer than 4096 characters. + // * Returns INVALID_ARGUMENT if description is present in update_mask but is + // longer than 4096 characters. + // * Returns INVALID_ARGUMENT if product_category is present in update_mask. + UpdateProduct(ctx context.Context, in *UpdateProductRequest, opts ...grpc.CallOption) (*Product, error) + // Permanently deletes a product and its reference images. + // + // Metadata of the product and all its images will be deleted right away, but + // search queries against ProductSets containing the product may still work + // until all related caches are refreshed. + DeleteProduct(ctx context.Context, in *DeleteProductRequest, opts ...grpc.CallOption) (*empty.Empty, error) + // Creates and returns a new ReferenceImage resource. + // + // The `bounding_poly` field is optional. If `bounding_poly` is not specified, + // the system will try to detect regions of interest in the image that are + // compatible with the product_category on the parent product. If it is + // specified, detection is ALWAYS skipped. The system converts polygons into + // non-rotated rectangles. + // + // Note that the pipeline will resize the image if the image resolution is too + // large to process (above 50MP). + // + // Possible errors: + // + // * Returns INVALID_ARGUMENT if the image_uri is missing or longer than 4096 + // characters. + // * Returns INVALID_ARGUMENT if the product does not exist. + // * Returns INVALID_ARGUMENT if bounding_poly is not provided, and nothing + // compatible with the parent product's product_category is detected. + // * Returns INVALID_ARGUMENT if bounding_poly contains more than 10 polygons. + CreateReferenceImage(ctx context.Context, in *CreateReferenceImageRequest, opts ...grpc.CallOption) (*ReferenceImage, error) + // Permanently deletes a reference image. + // + // The image metadata will be deleted right away, but search queries + // against ProductSets containing the image may still work until all related + // caches are refreshed. + // + // The actual image files are not deleted from Google Cloud Storage. + DeleteReferenceImage(ctx context.Context, in *DeleteReferenceImageRequest, opts ...grpc.CallOption) (*empty.Empty, error) + // Lists reference images. + // + // Possible errors: + // + // * Returns NOT_FOUND if the parent product does not exist. + // * Returns INVALID_ARGUMENT if the page_size is greater than 100, or less + // than 1. + ListReferenceImages(ctx context.Context, in *ListReferenceImagesRequest, opts ...grpc.CallOption) (*ListReferenceImagesResponse, error) + // Gets information associated with a ReferenceImage. + // + // Possible errors: + // + // * Returns NOT_FOUND if the specified image does not exist. + GetReferenceImage(ctx context.Context, in *GetReferenceImageRequest, opts ...grpc.CallOption) (*ReferenceImage, error) + // Adds a Product to the specified ProductSet. If the Product is already + // present, no change is made. + // + // One Product can be added to at most 100 ProductSets. + // + // Possible errors: + // + // * Returns NOT_FOUND if the Product or the ProductSet doesn't exist. + AddProductToProductSet(ctx context.Context, in *AddProductToProductSetRequest, opts ...grpc.CallOption) (*empty.Empty, error) + // Removes a Product from the specified ProductSet. + RemoveProductFromProductSet(ctx context.Context, in *RemoveProductFromProductSetRequest, opts ...grpc.CallOption) (*empty.Empty, error) + // Lists the Products in a ProductSet, in an unspecified order. If the + // ProductSet does not exist, the products field of the response will be + // empty. + // + // Possible errors: + // + // * Returns INVALID_ARGUMENT if page_size is greater than 100 or less than 1. + ListProductsInProductSet(ctx context.Context, in *ListProductsInProductSetRequest, opts ...grpc.CallOption) (*ListProductsInProductSetResponse, error) + // Asynchronous API that imports a list of reference images to specified + // product sets based on a list of image information. + // + // The [google.longrunning.Operation][google.longrunning.Operation] API can be + // used to keep track of the progress and results of the request. + // `Operation.metadata` contains `BatchOperationMetadata`. (progress) + // `Operation.response` contains `ImportProductSetsResponse`. (results) + // + // The input source of this method is a csv file on Google Cloud Storage. + // For the format of the csv file please see + // [ImportProductSetsGcsSource.csv_file_uri][google.cloud.vision.v1.ImportProductSetsGcsSource.csv_file_uri]. + ImportProductSets(ctx context.Context, in *ImportProductSetsRequest, opts ...grpc.CallOption) (*longrunning.Operation, error) +} + +type productSearchClient struct { + cc *grpc.ClientConn +} + +func NewProductSearchClient(cc *grpc.ClientConn) ProductSearchClient { + return &productSearchClient{cc} +} + +func (c *productSearchClient) CreateProductSet(ctx context.Context, in *CreateProductSetRequest, opts ...grpc.CallOption) (*ProductSet, error) { + out := new(ProductSet) + err := c.cc.Invoke(ctx, "/google.cloud.vision.v1.ProductSearch/CreateProductSet", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *productSearchClient) ListProductSets(ctx context.Context, in *ListProductSetsRequest, opts ...grpc.CallOption) (*ListProductSetsResponse, error) { + out := new(ListProductSetsResponse) + err := c.cc.Invoke(ctx, "/google.cloud.vision.v1.ProductSearch/ListProductSets", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *productSearchClient) GetProductSet(ctx context.Context, in *GetProductSetRequest, opts ...grpc.CallOption) (*ProductSet, error) { + out := new(ProductSet) + err := c.cc.Invoke(ctx, "/google.cloud.vision.v1.ProductSearch/GetProductSet", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *productSearchClient) UpdateProductSet(ctx context.Context, in *UpdateProductSetRequest, opts ...grpc.CallOption) (*ProductSet, error) { + out := new(ProductSet) + err := c.cc.Invoke(ctx, "/google.cloud.vision.v1.ProductSearch/UpdateProductSet", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *productSearchClient) DeleteProductSet(ctx context.Context, in *DeleteProductSetRequest, opts ...grpc.CallOption) (*empty.Empty, error) { + out := new(empty.Empty) + err := c.cc.Invoke(ctx, "/google.cloud.vision.v1.ProductSearch/DeleteProductSet", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *productSearchClient) CreateProduct(ctx context.Context, in *CreateProductRequest, opts ...grpc.CallOption) (*Product, error) { + out := new(Product) + err := c.cc.Invoke(ctx, "/google.cloud.vision.v1.ProductSearch/CreateProduct", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *productSearchClient) ListProducts(ctx context.Context, in *ListProductsRequest, opts ...grpc.CallOption) (*ListProductsResponse, error) { + out := new(ListProductsResponse) + err := c.cc.Invoke(ctx, "/google.cloud.vision.v1.ProductSearch/ListProducts", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *productSearchClient) GetProduct(ctx context.Context, in *GetProductRequest, opts ...grpc.CallOption) (*Product, error) { + out := new(Product) + err := c.cc.Invoke(ctx, "/google.cloud.vision.v1.ProductSearch/GetProduct", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *productSearchClient) UpdateProduct(ctx context.Context, in *UpdateProductRequest, opts ...grpc.CallOption) (*Product, error) { + out := new(Product) + err := c.cc.Invoke(ctx, "/google.cloud.vision.v1.ProductSearch/UpdateProduct", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *productSearchClient) DeleteProduct(ctx context.Context, in *DeleteProductRequest, opts ...grpc.CallOption) (*empty.Empty, error) { + out := new(empty.Empty) + err := c.cc.Invoke(ctx, "/google.cloud.vision.v1.ProductSearch/DeleteProduct", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *productSearchClient) CreateReferenceImage(ctx context.Context, in *CreateReferenceImageRequest, opts ...grpc.CallOption) (*ReferenceImage, error) { + out := new(ReferenceImage) + err := c.cc.Invoke(ctx, "/google.cloud.vision.v1.ProductSearch/CreateReferenceImage", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *productSearchClient) DeleteReferenceImage(ctx context.Context, in *DeleteReferenceImageRequest, opts ...grpc.CallOption) (*empty.Empty, error) { + out := new(empty.Empty) + err := c.cc.Invoke(ctx, "/google.cloud.vision.v1.ProductSearch/DeleteReferenceImage", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *productSearchClient) ListReferenceImages(ctx context.Context, in *ListReferenceImagesRequest, opts ...grpc.CallOption) (*ListReferenceImagesResponse, error) { + out := new(ListReferenceImagesResponse) + err := c.cc.Invoke(ctx, "/google.cloud.vision.v1.ProductSearch/ListReferenceImages", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *productSearchClient) GetReferenceImage(ctx context.Context, in *GetReferenceImageRequest, opts ...grpc.CallOption) (*ReferenceImage, error) { + out := new(ReferenceImage) + err := c.cc.Invoke(ctx, "/google.cloud.vision.v1.ProductSearch/GetReferenceImage", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *productSearchClient) AddProductToProductSet(ctx context.Context, in *AddProductToProductSetRequest, opts ...grpc.CallOption) (*empty.Empty, error) { + out := new(empty.Empty) + err := c.cc.Invoke(ctx, "/google.cloud.vision.v1.ProductSearch/AddProductToProductSet", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *productSearchClient) RemoveProductFromProductSet(ctx context.Context, in *RemoveProductFromProductSetRequest, opts ...grpc.CallOption) (*empty.Empty, error) { + out := new(empty.Empty) + err := c.cc.Invoke(ctx, "/google.cloud.vision.v1.ProductSearch/RemoveProductFromProductSet", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *productSearchClient) ListProductsInProductSet(ctx context.Context, in *ListProductsInProductSetRequest, opts ...grpc.CallOption) (*ListProductsInProductSetResponse, error) { + out := new(ListProductsInProductSetResponse) + err := c.cc.Invoke(ctx, "/google.cloud.vision.v1.ProductSearch/ListProductsInProductSet", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *productSearchClient) ImportProductSets(ctx context.Context, in *ImportProductSetsRequest, opts ...grpc.CallOption) (*longrunning.Operation, error) { + out := new(longrunning.Operation) + err := c.cc.Invoke(ctx, "/google.cloud.vision.v1.ProductSearch/ImportProductSets", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +// ProductSearchServer is the server API for ProductSearch service. +type ProductSearchServer interface { + // Creates and returns a new ProductSet resource. + // + // Possible errors: + // + // * Returns INVALID_ARGUMENT if display_name is missing, or is longer than + // 4096 characters. + CreateProductSet(context.Context, *CreateProductSetRequest) (*ProductSet, error) + // Lists ProductSets in an unspecified order. + // + // Possible errors: + // + // * Returns INVALID_ARGUMENT if page_size is greater than 100, or less + // than 1. + ListProductSets(context.Context, *ListProductSetsRequest) (*ListProductSetsResponse, error) + // Gets information associated with a ProductSet. + // + // Possible errors: + // + // * Returns NOT_FOUND if the ProductSet does not exist. + GetProductSet(context.Context, *GetProductSetRequest) (*ProductSet, error) + // Makes changes to a ProductSet resource. + // Only display_name can be updated currently. + // + // Possible errors: + // + // * Returns NOT_FOUND if the ProductSet does not exist. + // * Returns INVALID_ARGUMENT if display_name is present in update_mask but + // missing from the request or longer than 4096 characters. + UpdateProductSet(context.Context, *UpdateProductSetRequest) (*ProductSet, error) + // Permanently deletes a ProductSet. Products and ReferenceImages in the + // ProductSet are not deleted. + // + // The actual image files are not deleted from Google Cloud Storage. + DeleteProductSet(context.Context, *DeleteProductSetRequest) (*empty.Empty, error) + // Creates and returns a new product resource. + // + // Possible errors: + // + // * Returns INVALID_ARGUMENT if display_name is missing or longer than 4096 + // characters. + // * Returns INVALID_ARGUMENT if description is longer than 4096 characters. + // * Returns INVALID_ARGUMENT if product_category is missing or invalid. + CreateProduct(context.Context, *CreateProductRequest) (*Product, error) + // Lists products in an unspecified order. + // + // Possible errors: + // + // * Returns INVALID_ARGUMENT if page_size is greater than 100 or less than 1. + ListProducts(context.Context, *ListProductsRequest) (*ListProductsResponse, error) + // Gets information associated with a Product. + // + // Possible errors: + // + // * Returns NOT_FOUND if the Product does not exist. + GetProduct(context.Context, *GetProductRequest) (*Product, error) + // Makes changes to a Product resource. + // Only the `display_name`, `description`, and `labels` fields can be updated + // right now. + // + // If labels are updated, the change will not be reflected in queries until + // the next index time. + // + // Possible errors: + // + // * Returns NOT_FOUND if the Product does not exist. + // * Returns INVALID_ARGUMENT if display_name is present in update_mask but is + // missing from the request or longer than 4096 characters. + // * Returns INVALID_ARGUMENT if description is present in update_mask but is + // longer than 4096 characters. + // * Returns INVALID_ARGUMENT if product_category is present in update_mask. + UpdateProduct(context.Context, *UpdateProductRequest) (*Product, error) + // Permanently deletes a product and its reference images. + // + // Metadata of the product and all its images will be deleted right away, but + // search queries against ProductSets containing the product may still work + // until all related caches are refreshed. + DeleteProduct(context.Context, *DeleteProductRequest) (*empty.Empty, error) + // Creates and returns a new ReferenceImage resource. + // + // The `bounding_poly` field is optional. If `bounding_poly` is not specified, + // the system will try to detect regions of interest in the image that are + // compatible with the product_category on the parent product. If it is + // specified, detection is ALWAYS skipped. The system converts polygons into + // non-rotated rectangles. + // + // Note that the pipeline will resize the image if the image resolution is too + // large to process (above 50MP). + // + // Possible errors: + // + // * Returns INVALID_ARGUMENT if the image_uri is missing or longer than 4096 + // characters. + // * Returns INVALID_ARGUMENT if the product does not exist. + // * Returns INVALID_ARGUMENT if bounding_poly is not provided, and nothing + // compatible with the parent product's product_category is detected. + // * Returns INVALID_ARGUMENT if bounding_poly contains more than 10 polygons. + CreateReferenceImage(context.Context, *CreateReferenceImageRequest) (*ReferenceImage, error) + // Permanently deletes a reference image. + // + // The image metadata will be deleted right away, but search queries + // against ProductSets containing the image may still work until all related + // caches are refreshed. + // + // The actual image files are not deleted from Google Cloud Storage. + DeleteReferenceImage(context.Context, *DeleteReferenceImageRequest) (*empty.Empty, error) + // Lists reference images. + // + // Possible errors: + // + // * Returns NOT_FOUND if the parent product does not exist. + // * Returns INVALID_ARGUMENT if the page_size is greater than 100, or less + // than 1. + ListReferenceImages(context.Context, *ListReferenceImagesRequest) (*ListReferenceImagesResponse, error) + // Gets information associated with a ReferenceImage. + // + // Possible errors: + // + // * Returns NOT_FOUND if the specified image does not exist. + GetReferenceImage(context.Context, *GetReferenceImageRequest) (*ReferenceImage, error) + // Adds a Product to the specified ProductSet. If the Product is already + // present, no change is made. + // + // One Product can be added to at most 100 ProductSets. + // + // Possible errors: + // + // * Returns NOT_FOUND if the Product or the ProductSet doesn't exist. + AddProductToProductSet(context.Context, *AddProductToProductSetRequest) (*empty.Empty, error) + // Removes a Product from the specified ProductSet. + RemoveProductFromProductSet(context.Context, *RemoveProductFromProductSetRequest) (*empty.Empty, error) + // Lists the Products in a ProductSet, in an unspecified order. If the + // ProductSet does not exist, the products field of the response will be + // empty. + // + // Possible errors: + // + // * Returns INVALID_ARGUMENT if page_size is greater than 100 or less than 1. + ListProductsInProductSet(context.Context, *ListProductsInProductSetRequest) (*ListProductsInProductSetResponse, error) + // Asynchronous API that imports a list of reference images to specified + // product sets based on a list of image information. + // + // The [google.longrunning.Operation][google.longrunning.Operation] API can be + // used to keep track of the progress and results of the request. + // `Operation.metadata` contains `BatchOperationMetadata`. (progress) + // `Operation.response` contains `ImportProductSetsResponse`. (results) + // + // The input source of this method is a csv file on Google Cloud Storage. + // For the format of the csv file please see + // [ImportProductSetsGcsSource.csv_file_uri][google.cloud.vision.v1.ImportProductSetsGcsSource.csv_file_uri]. + ImportProductSets(context.Context, *ImportProductSetsRequest) (*longrunning.Operation, error) +} + +func RegisterProductSearchServer(s *grpc.Server, srv ProductSearchServer) { + s.RegisterService(&_ProductSearch_serviceDesc, srv) +} + +func _ProductSearch_CreateProductSet_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(CreateProductSetRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(ProductSearchServer).CreateProductSet(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.vision.v1.ProductSearch/CreateProductSet", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(ProductSearchServer).CreateProductSet(ctx, req.(*CreateProductSetRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _ProductSearch_ListProductSets_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(ListProductSetsRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(ProductSearchServer).ListProductSets(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.vision.v1.ProductSearch/ListProductSets", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(ProductSearchServer).ListProductSets(ctx, req.(*ListProductSetsRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _ProductSearch_GetProductSet_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(GetProductSetRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(ProductSearchServer).GetProductSet(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.vision.v1.ProductSearch/GetProductSet", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(ProductSearchServer).GetProductSet(ctx, req.(*GetProductSetRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _ProductSearch_UpdateProductSet_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(UpdateProductSetRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(ProductSearchServer).UpdateProductSet(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.vision.v1.ProductSearch/UpdateProductSet", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(ProductSearchServer).UpdateProductSet(ctx, req.(*UpdateProductSetRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _ProductSearch_DeleteProductSet_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(DeleteProductSetRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(ProductSearchServer).DeleteProductSet(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.vision.v1.ProductSearch/DeleteProductSet", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(ProductSearchServer).DeleteProductSet(ctx, req.(*DeleteProductSetRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _ProductSearch_CreateProduct_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(CreateProductRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(ProductSearchServer).CreateProduct(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.vision.v1.ProductSearch/CreateProduct", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(ProductSearchServer).CreateProduct(ctx, req.(*CreateProductRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _ProductSearch_ListProducts_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(ListProductsRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(ProductSearchServer).ListProducts(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.vision.v1.ProductSearch/ListProducts", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(ProductSearchServer).ListProducts(ctx, req.(*ListProductsRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _ProductSearch_GetProduct_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(GetProductRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(ProductSearchServer).GetProduct(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.vision.v1.ProductSearch/GetProduct", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(ProductSearchServer).GetProduct(ctx, req.(*GetProductRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _ProductSearch_UpdateProduct_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(UpdateProductRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(ProductSearchServer).UpdateProduct(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.vision.v1.ProductSearch/UpdateProduct", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(ProductSearchServer).UpdateProduct(ctx, req.(*UpdateProductRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _ProductSearch_DeleteProduct_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(DeleteProductRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(ProductSearchServer).DeleteProduct(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.vision.v1.ProductSearch/DeleteProduct", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(ProductSearchServer).DeleteProduct(ctx, req.(*DeleteProductRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _ProductSearch_CreateReferenceImage_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(CreateReferenceImageRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(ProductSearchServer).CreateReferenceImage(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.vision.v1.ProductSearch/CreateReferenceImage", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(ProductSearchServer).CreateReferenceImage(ctx, req.(*CreateReferenceImageRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _ProductSearch_DeleteReferenceImage_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(DeleteReferenceImageRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(ProductSearchServer).DeleteReferenceImage(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.vision.v1.ProductSearch/DeleteReferenceImage", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(ProductSearchServer).DeleteReferenceImage(ctx, req.(*DeleteReferenceImageRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _ProductSearch_ListReferenceImages_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(ListReferenceImagesRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(ProductSearchServer).ListReferenceImages(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.vision.v1.ProductSearch/ListReferenceImages", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(ProductSearchServer).ListReferenceImages(ctx, req.(*ListReferenceImagesRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _ProductSearch_GetReferenceImage_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(GetReferenceImageRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(ProductSearchServer).GetReferenceImage(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.vision.v1.ProductSearch/GetReferenceImage", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(ProductSearchServer).GetReferenceImage(ctx, req.(*GetReferenceImageRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _ProductSearch_AddProductToProductSet_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(AddProductToProductSetRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(ProductSearchServer).AddProductToProductSet(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.vision.v1.ProductSearch/AddProductToProductSet", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(ProductSearchServer).AddProductToProductSet(ctx, req.(*AddProductToProductSetRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _ProductSearch_RemoveProductFromProductSet_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(RemoveProductFromProductSetRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(ProductSearchServer).RemoveProductFromProductSet(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.vision.v1.ProductSearch/RemoveProductFromProductSet", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(ProductSearchServer).RemoveProductFromProductSet(ctx, req.(*RemoveProductFromProductSetRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _ProductSearch_ListProductsInProductSet_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(ListProductsInProductSetRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(ProductSearchServer).ListProductsInProductSet(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.vision.v1.ProductSearch/ListProductsInProductSet", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(ProductSearchServer).ListProductsInProductSet(ctx, req.(*ListProductsInProductSetRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _ProductSearch_ImportProductSets_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(ImportProductSetsRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(ProductSearchServer).ImportProductSets(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.vision.v1.ProductSearch/ImportProductSets", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(ProductSearchServer).ImportProductSets(ctx, req.(*ImportProductSetsRequest)) + } + return interceptor(ctx, in, info, handler) +} + +var _ProductSearch_serviceDesc = grpc.ServiceDesc{ + ServiceName: "google.cloud.vision.v1.ProductSearch", + HandlerType: (*ProductSearchServer)(nil), + Methods: []grpc.MethodDesc{ + { + MethodName: "CreateProductSet", + Handler: _ProductSearch_CreateProductSet_Handler, + }, + { + MethodName: "ListProductSets", + Handler: _ProductSearch_ListProductSets_Handler, + }, + { + MethodName: "GetProductSet", + Handler: _ProductSearch_GetProductSet_Handler, + }, + { + MethodName: "UpdateProductSet", + Handler: _ProductSearch_UpdateProductSet_Handler, + }, + { + MethodName: "DeleteProductSet", + Handler: _ProductSearch_DeleteProductSet_Handler, + }, + { + MethodName: "CreateProduct", + Handler: _ProductSearch_CreateProduct_Handler, + }, + { + MethodName: "ListProducts", + Handler: _ProductSearch_ListProducts_Handler, + }, + { + MethodName: "GetProduct", + Handler: _ProductSearch_GetProduct_Handler, + }, + { + MethodName: "UpdateProduct", + Handler: _ProductSearch_UpdateProduct_Handler, + }, + { + MethodName: "DeleteProduct", + Handler: _ProductSearch_DeleteProduct_Handler, + }, + { + MethodName: "CreateReferenceImage", + Handler: _ProductSearch_CreateReferenceImage_Handler, + }, + { + MethodName: "DeleteReferenceImage", + Handler: _ProductSearch_DeleteReferenceImage_Handler, + }, + { + MethodName: "ListReferenceImages", + Handler: _ProductSearch_ListReferenceImages_Handler, + }, + { + MethodName: "GetReferenceImage", + Handler: _ProductSearch_GetReferenceImage_Handler, + }, + { + MethodName: "AddProductToProductSet", + Handler: _ProductSearch_AddProductToProductSet_Handler, + }, + { + MethodName: "RemoveProductFromProductSet", + Handler: _ProductSearch_RemoveProductFromProductSet_Handler, + }, + { + MethodName: "ListProductsInProductSet", + Handler: _ProductSearch_ListProductsInProductSet_Handler, + }, + { + MethodName: "ImportProductSets", + Handler: _ProductSearch_ImportProductSets_Handler, + }, + }, + Streams: []grpc.StreamDesc{}, + Metadata: "google/cloud/vision/v1/product_search_service.proto", +} + +func init() { + proto.RegisterFile("google/cloud/vision/v1/product_search_service.proto", fileDescriptor_product_search_service_914a807106064587) +} + +var fileDescriptor_product_search_service_914a807106064587 = []byte{ + // 1853 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xbc, 0x59, 0x4f, 0x73, 0xdb, 0xc6, + 0x15, 0xcf, 0x4a, 0x96, 0x2d, 0x3d, 0x8a, 0x12, 0xbd, 0x55, 0x25, 0x86, 0x8a, 0xc7, 0x2a, 0x9a, + 0xa6, 0x0e, 0xeb, 0x02, 0x35, 0xe5, 0xd4, 0x09, 0x55, 0xbb, 0x91, 0x68, 0x4a, 0x61, 0x22, 0xcb, + 0x0a, 0x28, 0x25, 0xd3, 0x5e, 0x38, 0x10, 0xb0, 0x42, 0x50, 0x81, 0x00, 0x82, 0x05, 0x35, 0x51, + 0x32, 0x3e, 0xb4, 0x3d, 0x34, 0x33, 0x99, 0xc9, 0xa1, 0xed, 0xa9, 0x7f, 0x67, 0x3a, 0xd3, 0xe9, + 0xa1, 0xd3, 0x5e, 0xd2, 0x1c, 0x3a, 0xbd, 0xf7, 0xd4, 0x19, 0x1f, 0xfa, 0x15, 0xfa, 0x21, 0x7a, + 0xec, 0xec, 0x62, 0x41, 0x02, 0x10, 0x20, 0x82, 0xd6, 0xb8, 0x37, 0x2c, 0xf0, 0xfe, 0xfc, 0xde, + 0x7b, 0xbf, 0xdd, 0x7d, 0x8f, 0x84, 0x75, 0xd3, 0x75, 0x4d, 0x9b, 0x28, 0xba, 0xed, 0x0e, 0x0c, + 0xe5, 0xd4, 0xa2, 0x96, 0xeb, 0x28, 0xa7, 0x77, 0x14, 0xcf, 0x77, 0x8d, 0x81, 0x1e, 0xf4, 0x28, + 0xd1, 0x7c, 0xfd, 0x83, 0x1e, 0x25, 0xfe, 0xa9, 0xa5, 0x13, 0xd9, 0xf3, 0xdd, 0xc0, 0xc5, 0xcb, + 0xa1, 0x92, 0xcc, 0x95, 0xe4, 0x50, 0x49, 0x3e, 0xbd, 0x53, 0x7b, 0x49, 0x18, 0xd3, 0x3c, 0x4b, + 0xd1, 0x1c, 0xc7, 0x0d, 0xb4, 0xc0, 0x72, 0x1d, 0x1a, 0x6a, 0xd5, 0xbe, 0x91, 0xe3, 0xca, 0x24, + 0x6e, 0x9f, 0x04, 0xfe, 0x99, 0x10, 0xfb, 0xba, 0x10, 0xb3, 0x5d, 0xc7, 0xf4, 0x07, 0x8e, 0x63, + 0x39, 0xa6, 0xe2, 0x7a, 0xc4, 0x4f, 0xd8, 0x5a, 0x15, 0x42, 0x7c, 0x75, 0x34, 0x38, 0x56, 0x48, + 0xdf, 0x0b, 0x22, 0x0b, 0x6b, 0xe9, 0x8f, 0xc7, 0x16, 0xb1, 0x8d, 0x5e, 0x5f, 0xa3, 0x27, 0x42, + 0xe2, 0x66, 0x5a, 0x22, 0xb0, 0xfa, 0x84, 0x06, 0x5a, 0xdf, 0x13, 0x02, 0x2b, 0x42, 0xc0, 0xf7, + 0x74, 0x85, 0x06, 0x5a, 0x30, 0x10, 0x8e, 0xa5, 0x9f, 0x4f, 0xc1, 0xb5, 0xfd, 0x30, 0x37, 0x18, + 0xc3, 0x15, 0x47, 0xeb, 0x93, 0x2a, 0x5a, 0x43, 0xb7, 0xe6, 0x54, 0xfe, 0x8c, 0xbf, 0x06, 0xf3, + 0x86, 0x45, 0x3d, 0x5b, 0x3b, 0xeb, 0xf1, 0x6f, 0x53, 0xfc, 0x5b, 0x49, 0xbc, 0xdb, 0x63, 0x22, + 0x6b, 0x50, 0x32, 0x08, 0xd5, 0x7d, 0xcb, 0x63, 0x11, 0x55, 0xa7, 0x85, 0xc4, 0xe8, 0x15, 0x7e, + 0x15, 0x2a, 0x51, 0xfe, 0x75, 0x2d, 0x20, 0xa6, 0xeb, 0x9f, 0x55, 0xaf, 0x70, 0xb1, 0x45, 0xf1, + 0xbe, 0x25, 0x5e, 0xe3, 0xc7, 0xb0, 0x10, 0x89, 0xda, 0xda, 0x11, 0xb1, 0x69, 0x75, 0x66, 0x6d, + 0xfa, 0x56, 0xa9, 0x71, 0x4b, 0xce, 0xae, 0x91, 0x2c, 0xc0, 0xcb, 0xef, 0x90, 0xb3, 0xf7, 0x34, + 0x7b, 0x40, 0xd4, 0xb2, 0xd0, 0xdf, 0xe5, 0xea, 0xb5, 0x06, 0xcc, 0x46, 0x9f, 0x70, 0x05, 0xa6, + 0x4f, 0xc8, 0x99, 0x88, 0x8f, 0x3d, 0xe2, 0x25, 0x98, 0x39, 0x65, 0x9f, 0x44, 0x5c, 0xe1, 0x42, + 0xfa, 0x02, 0x01, 0x08, 0xbb, 0x5d, 0xf2, 0xcc, 0x79, 0x79, 0x03, 0xc0, 0x72, 0x0c, 0xf2, 0x51, + 0x8f, 0x15, 0x83, 0xa7, 0xa5, 0xd4, 0xa8, 0x45, 0x61, 0x44, 0x95, 0x92, 0x0f, 0xa2, 0x4a, 0xa9, + 0x73, 0x5c, 0x9a, 0xad, 0xf1, 0x3a, 0x94, 0x42, 0x55, 0xe2, 0xfb, 0xae, 0xcf, 0x73, 0x55, 0x6a, + 0xe0, 0x48, 0xd7, 0xf7, 0x74, 0xb9, 0xcb, 0x8b, 0xa8, 0x86, 0x1e, 0xda, 0x4c, 0x4a, 0xfa, 0x29, + 0x82, 0x05, 0x95, 0x1c, 0x13, 0x9f, 0x38, 0x3a, 0xe9, 0xf4, 0x35, 0x93, 0x64, 0x22, 0xaf, 0xc0, + 0xf4, 0xc0, 0xb7, 0x04, 0x60, 0xf6, 0x88, 0xdf, 0x81, 0x85, 0x23, 0x77, 0xe0, 0x18, 0x96, 0x63, + 0xf6, 0x3c, 0xd7, 0x3e, 0xa3, 0xd5, 0x69, 0x9e, 0xf3, 0x97, 0xf3, 0x72, 0xbe, 0x25, 0xa4, 0xf7, + 0x5d, 0xfb, 0x4c, 0x2d, 0x1f, 0xc5, 0x56, 0x54, 0xfa, 0x14, 0xc1, 0x52, 0xcb, 0x27, 0x5a, 0x40, + 0x44, 0x06, 0x55, 0xf2, 0xe1, 0x80, 0xd0, 0x00, 0x2f, 0xc3, 0x55, 0x4f, 0xf3, 0x89, 0x13, 0x08, + 0x34, 0x62, 0x85, 0xdf, 0x80, 0x6b, 0xa2, 0x62, 0x1c, 0x53, 0xa9, 0x71, 0x73, 0x4c, 0xa9, 0xd5, + 0x48, 0x1e, 0xdf, 0x00, 0x88, 0xc8, 0x62, 0x19, 0x82, 0x78, 0x73, 0xe2, 0x4d, 0xc7, 0x90, 0x2c, + 0xf8, 0xca, 0xae, 0x45, 0x03, 0xa1, 0x46, 0xc7, 0x01, 0x59, 0x85, 0x39, 0x4f, 0x33, 0x49, 0x8f, + 0x5a, 0x1f, 0x87, 0xf5, 0x9c, 0x51, 0x67, 0xd9, 0x8b, 0xae, 0xf5, 0x31, 0xe1, 0xae, 0xd8, 0xc7, + 0xc0, 0x3d, 0x21, 0xce, 0xd0, 0x95, 0x66, 0x92, 0x03, 0xf6, 0x42, 0xfa, 0x04, 0x96, 0x92, 0xae, + 0xa8, 0xe7, 0x3a, 0x94, 0xe0, 0x0d, 0x98, 0x15, 0x78, 0x68, 0x15, 0xf1, 0xa4, 0x8e, 0x8d, 0x6e, + 0xa8, 0x80, 0x5f, 0x81, 0x45, 0x87, 0x7c, 0x14, 0xf4, 0x62, 0x8e, 0xc3, 0xaa, 0x95, 0xd9, 0xeb, + 0xfd, 0xa1, 0xf3, 0x6f, 0xc2, 0xf5, 0x1d, 0x12, 0xa4, 0xd2, 0x9d, 0x51, 0x7a, 0xe9, 0x73, 0x04, + 0x4b, 0x87, 0x9e, 0x71, 0xbe, 0x36, 0xb1, 0x1a, 0xa0, 0x09, 0x6b, 0xb0, 0x01, 0xa5, 0x01, 0x37, + 0xc9, 0xcf, 0x23, 0x51, 0xc2, 0xf3, 0x34, 0xdf, 0x66, 0x47, 0xd6, 0x23, 0x8d, 0x9e, 0xa8, 0x10, + 0x8a, 0xb3, 0x67, 0xa9, 0x0e, 0x4b, 0x0f, 0x89, 0x4d, 0xce, 0xe1, 0xc9, 0x02, 0xff, 0x1b, 0x04, + 0x2b, 0x09, 0x62, 0x75, 0xc9, 0x58, 0x6e, 0xb5, 0xa0, 0x34, 0x3a, 0xf8, 0x23, 0x7e, 0x49, 0x63, + 0x62, 0x63, 0x76, 0x23, 0x5e, 0xb1, 0xed, 0xff, 0xf2, 0xe8, 0x48, 0xa2, 0x24, 0xc6, 0xb4, 0xf9, + 0x91, 0x4c, 0xc7, 0x90, 0x6c, 0x58, 0x8e, 0x31, 0xa0, 0x4b, 0x9e, 0x2f, 0xdf, 0x3e, 0x45, 0xb0, + 0x72, 0xce, 0x9d, 0xe0, 0x5c, 0x1b, 0xe6, 0x63, 0x78, 0x23, 0xde, 0x15, 0x89, 0xba, 0x34, 0x8a, + 0xa8, 0x38, 0xfb, 0xea, 0xb0, 0x34, 0x62, 0x5f, 0xac, 0x26, 0x59, 0x35, 0xfc, 0x35, 0x82, 0x95, + 0x04, 0x01, 0x63, 0xf2, 0xa9, 0x5a, 0xa1, 0x67, 0xaa, 0xd5, 0xa5, 0xd8, 0xf8, 0x6d, 0x58, 0x49, + 0xb0, 0x71, 0x4c, 0x30, 0x7f, 0x43, 0xb0, 0x1a, 0x12, 0x32, 0x79, 0xea, 0x8e, 0xab, 0xfb, 0x63, + 0x58, 0xf4, 0x23, 0x85, 0x9e, 0xc5, 0x34, 0x04, 0xce, 0x57, 0xf2, 0x82, 0x4d, 0xd9, 0x5f, 0xf0, + 0x93, 0xa7, 0xfc, 0x6d, 0xc0, 0x29, 0x83, 0x23, 0x92, 0x56, 0x92, 0xb2, 0x1d, 0x43, 0xf2, 0xa0, + 0xc6, 0x98, 0x93, 0xb4, 0xf9, 0x5c, 0xc9, 0xfa, 0x25, 0x82, 0xd5, 0x4c, 0x97, 0x82, 0xb0, 0xef, + 0x42, 0x25, 0x85, 0x3f, 0x22, 0x6d, 0xd1, 0x8c, 0x2c, 0x26, 0xa3, 0xa4, 0x17, 0xc3, 0xcd, 0x60, + 0xf6, 0x74, 0x16, 0xb3, 0x65, 0xa8, 0xee, 0x90, 0x20, 0xbb, 0xb8, 0x59, 0x84, 0xb8, 0x03, 0xab, + 0x21, 0x7f, 0x8a, 0xab, 0x3c, 0x82, 0x1b, 0x9b, 0x86, 0x21, 0xf8, 0x76, 0xe0, 0x16, 0x22, 0x1e, + 0xae, 0x26, 0x6f, 0xcc, 0xb9, 0xe1, 0x61, 0x2c, 0xa9, 0x20, 0xa9, 0xa4, 0xef, 0x9e, 0x46, 0x0c, + 0xde, 0xf6, 0xdd, 0xfe, 0x65, 0x6d, 0x7e, 0x08, 0x37, 0xe3, 0x57, 0x5b, 0xc7, 0x29, 0x66, 0xf0, + 0x32, 0x84, 0xf9, 0x19, 0x82, 0xb5, 0x7c, 0x9f, 0xff, 0xcf, 0xab, 0xf5, 0x01, 0xd4, 0x3a, 0x7d, + 0xcf, 0xf5, 0xe3, 0x07, 0xed, 0x8e, 0x4e, 0xbb, 0xee, 0xc0, 0xd7, 0x59, 0xe7, 0x3b, 0xaf, 0xd3, + 0xd3, 0xde, 0xb1, 0x65, 0x93, 0x1e, 0xeb, 0xa9, 0xc2, 0xf8, 0x41, 0xa7, 0xa7, 0xdb, 0x96, 0x4d, + 0x0e, 0x7d, 0x4b, 0x7a, 0x02, 0x2f, 0x9d, 0xd3, 0xef, 0x38, 0xde, 0x20, 0x68, 0xb9, 0xce, 0xb1, + 0x65, 0xe2, 0x2e, 0x80, 0xa9, 0xd3, 0x1e, 0xe5, 0xf6, 0xc4, 0x99, 0xd7, 0xc8, 0x0b, 0x23, 0x1f, + 0xc9, 0x5b, 0x2f, 0xa8, 0x73, 0x66, 0xb4, 0xd8, 0x9a, 0x85, 0xab, 0xa1, 0x41, 0xe9, 0x33, 0x04, + 0xd5, 0x73, 0x5a, 0xe3, 0xb6, 0xfa, 0xfb, 0x30, 0x6f, 0x31, 0x88, 0x3d, 0x9d, 0x63, 0x14, 0x87, + 0xd3, 0xdd, 0xc2, 0xa8, 0x62, 0xf1, 0xa9, 0x25, 0x6b, 0xb4, 0x90, 0x7e, 0x87, 0xe0, 0xc5, 0x0c, + 0x34, 0xcf, 0xef, 0x14, 0x90, 0x61, 0x36, 0x1c, 0x76, 0x08, 0xad, 0x4e, 0x71, 0x53, 0x59, 0x3d, + 0xf4, 0x50, 0x46, 0xfa, 0x62, 0x0a, 0x96, 0xb7, 0xb4, 0x40, 0xff, 0xe0, 0x71, 0x34, 0x9f, 0x3d, + 0x22, 0x81, 0x66, 0x68, 0x81, 0x86, 0xdf, 0x86, 0x19, 0x26, 0x16, 0xd6, 0x68, 0x21, 0x3f, 0x1b, + 0xd9, 0xea, 0xdc, 0x15, 0x51, 0x43, 0x13, 0xec, 0x92, 0xa2, 0x83, 0xa3, 0xbe, 0x15, 0x84, 0x93, + 0xc1, 0xd4, 0xd8, 0xc9, 0x00, 0x42, 0x71, 0x3e, 0x1a, 0xbc, 0x06, 0xb3, 0xc4, 0x31, 0x8a, 0xce, + 0x14, 0xd7, 0x88, 0x63, 0xb0, 0x95, 0xf4, 0x03, 0x98, 0xe1, 0x18, 0xf0, 0x57, 0xe1, 0x7a, 0xf7, + 0x60, 0xf3, 0xa0, 0xdd, 0x3b, 0xdc, 0xeb, 0xee, 0xb7, 0x5b, 0x9d, 0xed, 0x4e, 0xfb, 0x61, 0xe5, + 0x05, 0xbc, 0x00, 0xb0, 0xaf, 0x3e, 0x6e, 0xb5, 0xbb, 0xdd, 0xce, 0xde, 0x4e, 0x05, 0xb1, 0x75, + 0xf7, 0xb0, 0xc5, 0xd6, 0xdb, 0x87, 0xbb, 0x95, 0x29, 0x0c, 0x70, 0x75, 0x7b, 0xb3, 0xb3, 0xdb, + 0x7e, 0x58, 0x99, 0xc6, 0x65, 0x98, 0x6b, 0x6d, 0xee, 0xb5, 0xda, 0xbb, 0x6c, 0x79, 0xa5, 0xf1, + 0xb4, 0x0a, 0xe5, 0x61, 0x41, 0xd9, 0x74, 0x8d, 0xff, 0x82, 0xa0, 0x92, 0x6e, 0xd5, 0xb0, 0x92, + 0x97, 0xb2, 0x9c, 0xa6, 0xae, 0x56, 0xe0, 0xee, 0x97, 0x1e, 0xfe, 0xe4, 0xdf, 0xff, 0xf9, 0xc5, + 0xd4, 0x03, 0x49, 0x61, 0x83, 0xf7, 0x27, 0x21, 0x81, 0xef, 0x7b, 0xbe, 0xfb, 0x23, 0xa2, 0x07, + 0x54, 0xa9, 0x2b, 0xb6, 0xab, 0x87, 0x73, 0xb6, 0x52, 0x7f, 0xa2, 0xc4, 0xba, 0x9b, 0x66, 0xbc, + 0xd7, 0xc0, 0x7f, 0x46, 0xb0, 0x98, 0xea, 0xa6, 0xb0, 0x9c, 0xe7, 0x3d, 0xbb, 0xcb, 0xab, 0x29, + 0x85, 0xe5, 0x43, 0xbe, 0x4b, 0xf7, 0x38, 0xf4, 0x3b, 0x78, 0x52, 0xe8, 0xf8, 0x57, 0x08, 0xca, + 0x89, 0x8e, 0x0b, 0xdf, 0xce, 0xf3, 0x9d, 0xd5, 0x98, 0x15, 0xca, 0x6b, 0x12, 0x1c, 0x3b, 0xd0, + 0x73, 0xa0, 0xc5, 0x91, 0x29, 0xf5, 0x27, 0xf8, 0xef, 0x08, 0x2a, 0xe9, 0x0e, 0x2f, 0xbf, 0xf4, + 0x39, 0xbd, 0x60, 0x21, 0x88, 0xfb, 0x1c, 0xe2, 0xdb, 0x8d, 0x8d, 0x30, 0x7f, 0xa3, 0x72, 0xca, + 0xc5, 0xe1, 0x26, 0x69, 0xf0, 0x4b, 0x04, 0x95, 0x74, 0x03, 0x98, 0x8f, 0x3d, 0xa7, 0x55, 0xac, + 0x2d, 0x9f, 0xdb, 0x8e, 0xed, 0xbe, 0x17, 0x9c, 0x45, 0x29, 0xad, 0x4f, 0x9c, 0xd2, 0xdf, 0x22, + 0x28, 0x27, 0xf6, 0x48, 0x7e, 0xbd, 0xb3, 0x06, 0xef, 0xda, 0xb8, 0x6b, 0x51, 0xba, 0xcf, 0x91, + 0xdd, 0x93, 0x6e, 0x4f, 0xc0, 0x44, 0xda, 0x1c, 0x4e, 0x80, 0xbf, 0x47, 0x30, 0x1f, 0xbf, 0xad, + 0xf1, 0xb7, 0x0a, 0x6c, 0x85, 0xe1, 0xbe, 0xb9, 0x5d, 0x4c, 0x58, 0x6c, 0x9a, 0xbb, 0x1c, 0xaa, + 0x8c, 0x27, 0x82, 0x8a, 0x3f, 0x47, 0x00, 0xa3, 0xad, 0x80, 0x5f, 0x1d, 0xbf, 0x5d, 0x0a, 0xe7, + 0x2e, 0x09, 0xa8, 0x40, 0x55, 0x79, 0x49, 0xff, 0x88, 0xa0, 0x9c, 0xe0, 0x7e, 0x7e, 0x49, 0xb3, + 0xe6, 0xf5, 0xf1, 0xb0, 0xda, 0x1c, 0xd6, 0xf7, 0x1b, 0x77, 0xe3, 0x9b, 0x43, 0x2e, 0x08, 0x6f, + 0x54, 0xda, 0xcf, 0x10, 0x94, 0x13, 0x3c, 0xcf, 0xc7, 0x99, 0x35, 0xc7, 0xe7, 0xee, 0x05, 0x91, + 0xb5, 0xfa, 0x64, 0x59, 0xfb, 0xd7, 0xf0, 0xa7, 0xa5, 0xd4, 0xcf, 0x5c, 0xeb, 0x17, 0xef, 0x87, + 0xcc, 0x76, 0xbc, 0x56, 0xb0, 0xab, 0x90, 0xde, 0xe7, 0x58, 0xdf, 0x95, 0x1e, 0x8c, 0xa7, 0x5c, + 0x1c, 0xad, 0x92, 0x6a, 0x46, 0x9a, 0xe9, 0xa1, 0x8f, 0xdd, 0x3a, 0x4b, 0x59, 0xf3, 0x42, 0x7e, + 0x38, 0x17, 0x4c, 0x17, 0xb9, 0xa9, 0xde, 0xe6, 0xf0, 0xdf, 0xac, 0x3f, 0x98, 0x20, 0xd5, 0x69, + 0xec, 0x2c, 0xf9, 0xff, 0x44, 0xe1, 0xaf, 0x69, 0xa9, 0x21, 0x0e, 0x37, 0x2e, 0xda, 0xbf, 0xd9, + 0x43, 0x66, 0x6d, 0x7d, 0x22, 0x1d, 0xb1, 0xf5, 0x45, 0x20, 0xf8, 0x92, 0x75, 0xc0, 0x5f, 0x22, + 0xfe, 0x73, 0x59, 0x2a, 0xe7, 0xdf, 0xb9, 0xe0, 0x4c, 0xb8, 0x1c, 0x7f, 0x92, 0xb8, 0x9f, 0xbd, + 0x00, 0x7f, 0x45, 0xb0, 0x9c, 0x3d, 0x2b, 0xe2, 0xd7, 0xf2, 0xa0, 0x5c, 0x38, 0x5b, 0xe6, 0x52, + 0x46, 0x1c, 0x1e, 0x52, 0x73, 0xc2, 0x9b, 0xaa, 0xa9, 0x0d, 0xdd, 0x35, 0x51, 0x1d, 0xff, 0x03, + 0xc1, 0xea, 0x05, 0xd3, 0x28, 0x6e, 0xe6, 0x27, 0x70, 0xdc, 0x08, 0x9b, 0x0b, 0xfd, 0x2d, 0x0e, + 0x7d, 0x4b, 0xba, 0x3f, 0x29, 0x74, 0x3f, 0xee, 0x93, 0xa1, 0x7f, 0x8a, 0xa0, 0x9a, 0x37, 0x83, + 0xe2, 0x7b, 0x45, 0x2e, 0xad, 0x8c, 0x49, 0xb9, 0xf6, 0xfa, 0xe4, 0x8a, 0x82, 0xfe, 0x6f, 0xf2, + 0xc8, 0x9a, 0xf8, 0xf5, 0x09, 0x23, 0x1b, 0xdd, 0x82, 0x7f, 0x42, 0x70, 0xfd, 0xdc, 0xf8, 0x95, + 0x4f, 0xfc, 0xbc, 0xb9, 0xb1, 0x76, 0x23, 0xd2, 0x88, 0xfd, 0xa3, 0x25, 0x0f, 0x47, 0x1e, 0x69, + 0x93, 0x03, 0xdd, 0x90, 0xbe, 0x3b, 0x69, 0x4b, 0x6e, 0x71, 0x87, 0x4d, 0x54, 0xdf, 0xfa, 0x31, + 0x82, 0x9a, 0xee, 0xf6, 0x73, 0x90, 0x6d, 0xbd, 0x98, 0x98, 0x36, 0xba, 0xe1, 0x5f, 0x79, 0xfb, + 0x8c, 0x08, 0xfb, 0xe8, 0x87, 0xdf, 0x13, 0x4a, 0xa6, 0x6b, 0x6b, 0x8e, 0x29, 0xbb, 0xbe, 0xa9, + 0x98, 0xc4, 0xe1, 0x34, 0x51, 0xc2, 0x4f, 0x9a, 0x67, 0xd1, 0xf4, 0x9f, 0x76, 0x1b, 0xe1, 0xd3, + 0x7f, 0x11, 0xfa, 0xc3, 0xd4, 0x95, 0x9d, 0xd6, 0x7b, 0x7b, 0x47, 0x57, 0xb9, 0xca, 0xfa, 0xff, + 0x02, 0x00, 0x00, 0xff, 0xff, 0x1d, 0xa2, 0x18, 0x20, 0x51, 0x1c, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/cloud/vision/v1/text_annotation.pb.go b/vendor/google.golang.org/genproto/googleapis/cloud/vision/v1/text_annotation.pb.go new file mode 100644 index 0000000..4dc4594 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/cloud/vision/v1/text_annotation.pb.go @@ -0,0 +1,798 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/cloud/vision/v1/text_annotation.proto + +package vision // import "google.golang.org/genproto/googleapis/cloud/vision/v1" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Enum to denote the type of break found. New line, space etc. +type TextAnnotation_DetectedBreak_BreakType int32 + +const ( + // Unknown break label type. + TextAnnotation_DetectedBreak_UNKNOWN TextAnnotation_DetectedBreak_BreakType = 0 + // Regular space. + TextAnnotation_DetectedBreak_SPACE TextAnnotation_DetectedBreak_BreakType = 1 + // Sure space (very wide). + TextAnnotation_DetectedBreak_SURE_SPACE TextAnnotation_DetectedBreak_BreakType = 2 + // Line-wrapping break. + TextAnnotation_DetectedBreak_EOL_SURE_SPACE TextAnnotation_DetectedBreak_BreakType = 3 + // End-line hyphen that is not present in text; does not co-occur with + // `SPACE`, `LEADER_SPACE`, or `LINE_BREAK`. + TextAnnotation_DetectedBreak_HYPHEN TextAnnotation_DetectedBreak_BreakType = 4 + // Line break that ends a paragraph. + TextAnnotation_DetectedBreak_LINE_BREAK TextAnnotation_DetectedBreak_BreakType = 5 +) + +var TextAnnotation_DetectedBreak_BreakType_name = map[int32]string{ + 0: "UNKNOWN", + 1: "SPACE", + 2: "SURE_SPACE", + 3: "EOL_SURE_SPACE", + 4: "HYPHEN", + 5: "LINE_BREAK", +} +var TextAnnotation_DetectedBreak_BreakType_value = map[string]int32{ + "UNKNOWN": 0, + "SPACE": 1, + "SURE_SPACE": 2, + "EOL_SURE_SPACE": 3, + "HYPHEN": 4, + "LINE_BREAK": 5, +} + +func (x TextAnnotation_DetectedBreak_BreakType) String() string { + return proto.EnumName(TextAnnotation_DetectedBreak_BreakType_name, int32(x)) +} +func (TextAnnotation_DetectedBreak_BreakType) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_text_annotation_29fd59b3d2ddfb72, []int{0, 1, 0} +} + +// Type of a block (text, image etc) as identified by OCR. +type Block_BlockType int32 + +const ( + // Unknown block type. + Block_UNKNOWN Block_BlockType = 0 + // Regular text block. + Block_TEXT Block_BlockType = 1 + // Table block. + Block_TABLE Block_BlockType = 2 + // Image block. + Block_PICTURE Block_BlockType = 3 + // Horizontal/vertical line box. + Block_RULER Block_BlockType = 4 + // Barcode block. + Block_BARCODE Block_BlockType = 5 +) + +var Block_BlockType_name = map[int32]string{ + 0: "UNKNOWN", + 1: "TEXT", + 2: "TABLE", + 3: "PICTURE", + 4: "RULER", + 5: "BARCODE", +} +var Block_BlockType_value = map[string]int32{ + "UNKNOWN": 0, + "TEXT": 1, + "TABLE": 2, + "PICTURE": 3, + "RULER": 4, + "BARCODE": 5, +} + +func (x Block_BlockType) String() string { + return proto.EnumName(Block_BlockType_name, int32(x)) +} +func (Block_BlockType) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_text_annotation_29fd59b3d2ddfb72, []int{2, 0} +} + +// TextAnnotation contains a structured representation of OCR extracted text. +// The hierarchy of an OCR extracted text structure is like this: +// TextAnnotation -> Page -> Block -> Paragraph -> Word -> Symbol +// Each structural component, starting from Page, may further have their own +// properties. Properties describe detected languages, breaks etc.. Please refer +// to the +// [TextAnnotation.TextProperty][google.cloud.vision.v1.TextAnnotation.TextProperty] +// message definition below for more detail. +type TextAnnotation struct { + // List of pages detected by OCR. + Pages []*Page `protobuf:"bytes,1,rep,name=pages,proto3" json:"pages,omitempty"` + // UTF-8 text detected on the pages. + Text string `protobuf:"bytes,2,opt,name=text,proto3" json:"text,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *TextAnnotation) Reset() { *m = TextAnnotation{} } +func (m *TextAnnotation) String() string { return proto.CompactTextString(m) } +func (*TextAnnotation) ProtoMessage() {} +func (*TextAnnotation) Descriptor() ([]byte, []int) { + return fileDescriptor_text_annotation_29fd59b3d2ddfb72, []int{0} +} +func (m *TextAnnotation) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_TextAnnotation.Unmarshal(m, b) +} +func (m *TextAnnotation) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_TextAnnotation.Marshal(b, m, deterministic) +} +func (dst *TextAnnotation) XXX_Merge(src proto.Message) { + xxx_messageInfo_TextAnnotation.Merge(dst, src) +} +func (m *TextAnnotation) XXX_Size() int { + return xxx_messageInfo_TextAnnotation.Size(m) +} +func (m *TextAnnotation) XXX_DiscardUnknown() { + xxx_messageInfo_TextAnnotation.DiscardUnknown(m) +} + +var xxx_messageInfo_TextAnnotation proto.InternalMessageInfo + +func (m *TextAnnotation) GetPages() []*Page { + if m != nil { + return m.Pages + } + return nil +} + +func (m *TextAnnotation) GetText() string { + if m != nil { + return m.Text + } + return "" +} + +// Detected language for a structural component. +type TextAnnotation_DetectedLanguage struct { + // The BCP-47 language code, such as "en-US" or "sr-Latn". For more + // information, see + // http://www.unicode.org/reports/tr35/#Unicode_locale_identifier. + LanguageCode string `protobuf:"bytes,1,opt,name=language_code,json=languageCode,proto3" json:"language_code,omitempty"` + // Confidence of detected language. Range [0, 1]. + Confidence float32 `protobuf:"fixed32,2,opt,name=confidence,proto3" json:"confidence,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *TextAnnotation_DetectedLanguage) Reset() { *m = TextAnnotation_DetectedLanguage{} } +func (m *TextAnnotation_DetectedLanguage) String() string { return proto.CompactTextString(m) } +func (*TextAnnotation_DetectedLanguage) ProtoMessage() {} +func (*TextAnnotation_DetectedLanguage) Descriptor() ([]byte, []int) { + return fileDescriptor_text_annotation_29fd59b3d2ddfb72, []int{0, 0} +} +func (m *TextAnnotation_DetectedLanguage) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_TextAnnotation_DetectedLanguage.Unmarshal(m, b) +} +func (m *TextAnnotation_DetectedLanguage) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_TextAnnotation_DetectedLanguage.Marshal(b, m, deterministic) +} +func (dst *TextAnnotation_DetectedLanguage) XXX_Merge(src proto.Message) { + xxx_messageInfo_TextAnnotation_DetectedLanguage.Merge(dst, src) +} +func (m *TextAnnotation_DetectedLanguage) XXX_Size() int { + return xxx_messageInfo_TextAnnotation_DetectedLanguage.Size(m) +} +func (m *TextAnnotation_DetectedLanguage) XXX_DiscardUnknown() { + xxx_messageInfo_TextAnnotation_DetectedLanguage.DiscardUnknown(m) +} + +var xxx_messageInfo_TextAnnotation_DetectedLanguage proto.InternalMessageInfo + +func (m *TextAnnotation_DetectedLanguage) GetLanguageCode() string { + if m != nil { + return m.LanguageCode + } + return "" +} + +func (m *TextAnnotation_DetectedLanguage) GetConfidence() float32 { + if m != nil { + return m.Confidence + } + return 0 +} + +// Detected start or end of a structural component. +type TextAnnotation_DetectedBreak struct { + // Detected break type. + Type TextAnnotation_DetectedBreak_BreakType `protobuf:"varint,1,opt,name=type,proto3,enum=google.cloud.vision.v1.TextAnnotation_DetectedBreak_BreakType" json:"type,omitempty"` + // True if break prepends the element. + IsPrefix bool `protobuf:"varint,2,opt,name=is_prefix,json=isPrefix,proto3" json:"is_prefix,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *TextAnnotation_DetectedBreak) Reset() { *m = TextAnnotation_DetectedBreak{} } +func (m *TextAnnotation_DetectedBreak) String() string { return proto.CompactTextString(m) } +func (*TextAnnotation_DetectedBreak) ProtoMessage() {} +func (*TextAnnotation_DetectedBreak) Descriptor() ([]byte, []int) { + return fileDescriptor_text_annotation_29fd59b3d2ddfb72, []int{0, 1} +} +func (m *TextAnnotation_DetectedBreak) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_TextAnnotation_DetectedBreak.Unmarshal(m, b) +} +func (m *TextAnnotation_DetectedBreak) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_TextAnnotation_DetectedBreak.Marshal(b, m, deterministic) +} +func (dst *TextAnnotation_DetectedBreak) XXX_Merge(src proto.Message) { + xxx_messageInfo_TextAnnotation_DetectedBreak.Merge(dst, src) +} +func (m *TextAnnotation_DetectedBreak) XXX_Size() int { + return xxx_messageInfo_TextAnnotation_DetectedBreak.Size(m) +} +func (m *TextAnnotation_DetectedBreak) XXX_DiscardUnknown() { + xxx_messageInfo_TextAnnotation_DetectedBreak.DiscardUnknown(m) +} + +var xxx_messageInfo_TextAnnotation_DetectedBreak proto.InternalMessageInfo + +func (m *TextAnnotation_DetectedBreak) GetType() TextAnnotation_DetectedBreak_BreakType { + if m != nil { + return m.Type + } + return TextAnnotation_DetectedBreak_UNKNOWN +} + +func (m *TextAnnotation_DetectedBreak) GetIsPrefix() bool { + if m != nil { + return m.IsPrefix + } + return false +} + +// Additional information detected on the structural component. +type TextAnnotation_TextProperty struct { + // A list of detected languages together with confidence. + DetectedLanguages []*TextAnnotation_DetectedLanguage `protobuf:"bytes,1,rep,name=detected_languages,json=detectedLanguages,proto3" json:"detected_languages,omitempty"` + // Detected start or end of a text segment. + DetectedBreak *TextAnnotation_DetectedBreak `protobuf:"bytes,2,opt,name=detected_break,json=detectedBreak,proto3" json:"detected_break,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *TextAnnotation_TextProperty) Reset() { *m = TextAnnotation_TextProperty{} } +func (m *TextAnnotation_TextProperty) String() string { return proto.CompactTextString(m) } +func (*TextAnnotation_TextProperty) ProtoMessage() {} +func (*TextAnnotation_TextProperty) Descriptor() ([]byte, []int) { + return fileDescriptor_text_annotation_29fd59b3d2ddfb72, []int{0, 2} +} +func (m *TextAnnotation_TextProperty) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_TextAnnotation_TextProperty.Unmarshal(m, b) +} +func (m *TextAnnotation_TextProperty) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_TextAnnotation_TextProperty.Marshal(b, m, deterministic) +} +func (dst *TextAnnotation_TextProperty) XXX_Merge(src proto.Message) { + xxx_messageInfo_TextAnnotation_TextProperty.Merge(dst, src) +} +func (m *TextAnnotation_TextProperty) XXX_Size() int { + return xxx_messageInfo_TextAnnotation_TextProperty.Size(m) +} +func (m *TextAnnotation_TextProperty) XXX_DiscardUnknown() { + xxx_messageInfo_TextAnnotation_TextProperty.DiscardUnknown(m) +} + +var xxx_messageInfo_TextAnnotation_TextProperty proto.InternalMessageInfo + +func (m *TextAnnotation_TextProperty) GetDetectedLanguages() []*TextAnnotation_DetectedLanguage { + if m != nil { + return m.DetectedLanguages + } + return nil +} + +func (m *TextAnnotation_TextProperty) GetDetectedBreak() *TextAnnotation_DetectedBreak { + if m != nil { + return m.DetectedBreak + } + return nil +} + +// Detected page from OCR. +type Page struct { + // Additional information detected on the page. + Property *TextAnnotation_TextProperty `protobuf:"bytes,1,opt,name=property,proto3" json:"property,omitempty"` + // Page width. For PDFs the unit is points. For images (including + // TIFFs) the unit is pixels. + Width int32 `protobuf:"varint,2,opt,name=width,proto3" json:"width,omitempty"` + // Page height. For PDFs the unit is points. For images (including + // TIFFs) the unit is pixels. + Height int32 `protobuf:"varint,3,opt,name=height,proto3" json:"height,omitempty"` + // List of blocks of text, images etc on this page. + Blocks []*Block `protobuf:"bytes,4,rep,name=blocks,proto3" json:"blocks,omitempty"` + // Confidence of the OCR results on the page. Range [0, 1]. + Confidence float32 `protobuf:"fixed32,5,opt,name=confidence,proto3" json:"confidence,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Page) Reset() { *m = Page{} } +func (m *Page) String() string { return proto.CompactTextString(m) } +func (*Page) ProtoMessage() {} +func (*Page) Descriptor() ([]byte, []int) { + return fileDescriptor_text_annotation_29fd59b3d2ddfb72, []int{1} +} +func (m *Page) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Page.Unmarshal(m, b) +} +func (m *Page) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Page.Marshal(b, m, deterministic) +} +func (dst *Page) XXX_Merge(src proto.Message) { + xxx_messageInfo_Page.Merge(dst, src) +} +func (m *Page) XXX_Size() int { + return xxx_messageInfo_Page.Size(m) +} +func (m *Page) XXX_DiscardUnknown() { + xxx_messageInfo_Page.DiscardUnknown(m) +} + +var xxx_messageInfo_Page proto.InternalMessageInfo + +func (m *Page) GetProperty() *TextAnnotation_TextProperty { + if m != nil { + return m.Property + } + return nil +} + +func (m *Page) GetWidth() int32 { + if m != nil { + return m.Width + } + return 0 +} + +func (m *Page) GetHeight() int32 { + if m != nil { + return m.Height + } + return 0 +} + +func (m *Page) GetBlocks() []*Block { + if m != nil { + return m.Blocks + } + return nil +} + +func (m *Page) GetConfidence() float32 { + if m != nil { + return m.Confidence + } + return 0 +} + +// Logical element on the page. +type Block struct { + // Additional information detected for the block. + Property *TextAnnotation_TextProperty `protobuf:"bytes,1,opt,name=property,proto3" json:"property,omitempty"` + // The bounding box for the block. + // The vertices are in the order of top-left, top-right, bottom-right, + // bottom-left. When a rotation of the bounding box is detected the rotation + // is represented as around the top-left corner as defined when the text is + // read in the 'natural' orientation. + // For example: + // + // * when the text is horizontal it might look like: + // + // 0----1 + // | | + // 3----2 + // + // * when it's rotated 180 degrees around the top-left corner it becomes: + // + // 2----3 + // | | + // 1----0 + // + // and the vertex order will still be (0, 1, 2, 3). + BoundingBox *BoundingPoly `protobuf:"bytes,2,opt,name=bounding_box,json=boundingBox,proto3" json:"bounding_box,omitempty"` + // List of paragraphs in this block (if this blocks is of type text). + Paragraphs []*Paragraph `protobuf:"bytes,3,rep,name=paragraphs,proto3" json:"paragraphs,omitempty"` + // Detected block type (text, image etc) for this block. + BlockType Block_BlockType `protobuf:"varint,4,opt,name=block_type,json=blockType,proto3,enum=google.cloud.vision.v1.Block_BlockType" json:"block_type,omitempty"` + // Confidence of the OCR results on the block. Range [0, 1]. + Confidence float32 `protobuf:"fixed32,5,opt,name=confidence,proto3" json:"confidence,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Block) Reset() { *m = Block{} } +func (m *Block) String() string { return proto.CompactTextString(m) } +func (*Block) ProtoMessage() {} +func (*Block) Descriptor() ([]byte, []int) { + return fileDescriptor_text_annotation_29fd59b3d2ddfb72, []int{2} +} +func (m *Block) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Block.Unmarshal(m, b) +} +func (m *Block) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Block.Marshal(b, m, deterministic) +} +func (dst *Block) XXX_Merge(src proto.Message) { + xxx_messageInfo_Block.Merge(dst, src) +} +func (m *Block) XXX_Size() int { + return xxx_messageInfo_Block.Size(m) +} +func (m *Block) XXX_DiscardUnknown() { + xxx_messageInfo_Block.DiscardUnknown(m) +} + +var xxx_messageInfo_Block proto.InternalMessageInfo + +func (m *Block) GetProperty() *TextAnnotation_TextProperty { + if m != nil { + return m.Property + } + return nil +} + +func (m *Block) GetBoundingBox() *BoundingPoly { + if m != nil { + return m.BoundingBox + } + return nil +} + +func (m *Block) GetParagraphs() []*Paragraph { + if m != nil { + return m.Paragraphs + } + return nil +} + +func (m *Block) GetBlockType() Block_BlockType { + if m != nil { + return m.BlockType + } + return Block_UNKNOWN +} + +func (m *Block) GetConfidence() float32 { + if m != nil { + return m.Confidence + } + return 0 +} + +// Structural unit of text representing a number of words in certain order. +type Paragraph struct { + // Additional information detected for the paragraph. + Property *TextAnnotation_TextProperty `protobuf:"bytes,1,opt,name=property,proto3" json:"property,omitempty"` + // The bounding box for the paragraph. + // The vertices are in the order of top-left, top-right, bottom-right, + // bottom-left. When a rotation of the bounding box is detected the rotation + // is represented as around the top-left corner as defined when the text is + // read in the 'natural' orientation. + // For example: + // * when the text is horizontal it might look like: + // 0----1 + // | | + // 3----2 + // * when it's rotated 180 degrees around the top-left corner it becomes: + // 2----3 + // | | + // 1----0 + // and the vertex order will still be (0, 1, 2, 3). + BoundingBox *BoundingPoly `protobuf:"bytes,2,opt,name=bounding_box,json=boundingBox,proto3" json:"bounding_box,omitempty"` + // List of words in this paragraph. + Words []*Word `protobuf:"bytes,3,rep,name=words,proto3" json:"words,omitempty"` + // Confidence of the OCR results for the paragraph. Range [0, 1]. + Confidence float32 `protobuf:"fixed32,4,opt,name=confidence,proto3" json:"confidence,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Paragraph) Reset() { *m = Paragraph{} } +func (m *Paragraph) String() string { return proto.CompactTextString(m) } +func (*Paragraph) ProtoMessage() {} +func (*Paragraph) Descriptor() ([]byte, []int) { + return fileDescriptor_text_annotation_29fd59b3d2ddfb72, []int{3} +} +func (m *Paragraph) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Paragraph.Unmarshal(m, b) +} +func (m *Paragraph) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Paragraph.Marshal(b, m, deterministic) +} +func (dst *Paragraph) XXX_Merge(src proto.Message) { + xxx_messageInfo_Paragraph.Merge(dst, src) +} +func (m *Paragraph) XXX_Size() int { + return xxx_messageInfo_Paragraph.Size(m) +} +func (m *Paragraph) XXX_DiscardUnknown() { + xxx_messageInfo_Paragraph.DiscardUnknown(m) +} + +var xxx_messageInfo_Paragraph proto.InternalMessageInfo + +func (m *Paragraph) GetProperty() *TextAnnotation_TextProperty { + if m != nil { + return m.Property + } + return nil +} + +func (m *Paragraph) GetBoundingBox() *BoundingPoly { + if m != nil { + return m.BoundingBox + } + return nil +} + +func (m *Paragraph) GetWords() []*Word { + if m != nil { + return m.Words + } + return nil +} + +func (m *Paragraph) GetConfidence() float32 { + if m != nil { + return m.Confidence + } + return 0 +} + +// A word representation. +type Word struct { + // Additional information detected for the word. + Property *TextAnnotation_TextProperty `protobuf:"bytes,1,opt,name=property,proto3" json:"property,omitempty"` + // The bounding box for the word. + // The vertices are in the order of top-left, top-right, bottom-right, + // bottom-left. When a rotation of the bounding box is detected the rotation + // is represented as around the top-left corner as defined when the text is + // read in the 'natural' orientation. + // For example: + // * when the text is horizontal it might look like: + // 0----1 + // | | + // 3----2 + // * when it's rotated 180 degrees around the top-left corner it becomes: + // 2----3 + // | | + // 1----0 + // and the vertex order will still be (0, 1, 2, 3). + BoundingBox *BoundingPoly `protobuf:"bytes,2,opt,name=bounding_box,json=boundingBox,proto3" json:"bounding_box,omitempty"` + // List of symbols in the word. + // The order of the symbols follows the natural reading order. + Symbols []*Symbol `protobuf:"bytes,3,rep,name=symbols,proto3" json:"symbols,omitempty"` + // Confidence of the OCR results for the word. Range [0, 1]. + Confidence float32 `protobuf:"fixed32,4,opt,name=confidence,proto3" json:"confidence,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Word) Reset() { *m = Word{} } +func (m *Word) String() string { return proto.CompactTextString(m) } +func (*Word) ProtoMessage() {} +func (*Word) Descriptor() ([]byte, []int) { + return fileDescriptor_text_annotation_29fd59b3d2ddfb72, []int{4} +} +func (m *Word) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Word.Unmarshal(m, b) +} +func (m *Word) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Word.Marshal(b, m, deterministic) +} +func (dst *Word) XXX_Merge(src proto.Message) { + xxx_messageInfo_Word.Merge(dst, src) +} +func (m *Word) XXX_Size() int { + return xxx_messageInfo_Word.Size(m) +} +func (m *Word) XXX_DiscardUnknown() { + xxx_messageInfo_Word.DiscardUnknown(m) +} + +var xxx_messageInfo_Word proto.InternalMessageInfo + +func (m *Word) GetProperty() *TextAnnotation_TextProperty { + if m != nil { + return m.Property + } + return nil +} + +func (m *Word) GetBoundingBox() *BoundingPoly { + if m != nil { + return m.BoundingBox + } + return nil +} + +func (m *Word) GetSymbols() []*Symbol { + if m != nil { + return m.Symbols + } + return nil +} + +func (m *Word) GetConfidence() float32 { + if m != nil { + return m.Confidence + } + return 0 +} + +// A single symbol representation. +type Symbol struct { + // Additional information detected for the symbol. + Property *TextAnnotation_TextProperty `protobuf:"bytes,1,opt,name=property,proto3" json:"property,omitempty"` + // The bounding box for the symbol. + // The vertices are in the order of top-left, top-right, bottom-right, + // bottom-left. When a rotation of the bounding box is detected the rotation + // is represented as around the top-left corner as defined when the text is + // read in the 'natural' orientation. + // For example: + // * when the text is horizontal it might look like: + // 0----1 + // | | + // 3----2 + // * when it's rotated 180 degrees around the top-left corner it becomes: + // 2----3 + // | | + // 1----0 + // and the vertice order will still be (0, 1, 2, 3). + BoundingBox *BoundingPoly `protobuf:"bytes,2,opt,name=bounding_box,json=boundingBox,proto3" json:"bounding_box,omitempty"` + // The actual UTF-8 representation of the symbol. + Text string `protobuf:"bytes,3,opt,name=text,proto3" json:"text,omitempty"` + // Confidence of the OCR results for the symbol. Range [0, 1]. + Confidence float32 `protobuf:"fixed32,4,opt,name=confidence,proto3" json:"confidence,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Symbol) Reset() { *m = Symbol{} } +func (m *Symbol) String() string { return proto.CompactTextString(m) } +func (*Symbol) ProtoMessage() {} +func (*Symbol) Descriptor() ([]byte, []int) { + return fileDescriptor_text_annotation_29fd59b3d2ddfb72, []int{5} +} +func (m *Symbol) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Symbol.Unmarshal(m, b) +} +func (m *Symbol) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Symbol.Marshal(b, m, deterministic) +} +func (dst *Symbol) XXX_Merge(src proto.Message) { + xxx_messageInfo_Symbol.Merge(dst, src) +} +func (m *Symbol) XXX_Size() int { + return xxx_messageInfo_Symbol.Size(m) +} +func (m *Symbol) XXX_DiscardUnknown() { + xxx_messageInfo_Symbol.DiscardUnknown(m) +} + +var xxx_messageInfo_Symbol proto.InternalMessageInfo + +func (m *Symbol) GetProperty() *TextAnnotation_TextProperty { + if m != nil { + return m.Property + } + return nil +} + +func (m *Symbol) GetBoundingBox() *BoundingPoly { + if m != nil { + return m.BoundingBox + } + return nil +} + +func (m *Symbol) GetText() string { + if m != nil { + return m.Text + } + return "" +} + +func (m *Symbol) GetConfidence() float32 { + if m != nil { + return m.Confidence + } + return 0 +} + +func init() { + proto.RegisterType((*TextAnnotation)(nil), "google.cloud.vision.v1.TextAnnotation") + proto.RegisterType((*TextAnnotation_DetectedLanguage)(nil), "google.cloud.vision.v1.TextAnnotation.DetectedLanguage") + proto.RegisterType((*TextAnnotation_DetectedBreak)(nil), "google.cloud.vision.v1.TextAnnotation.DetectedBreak") + proto.RegisterType((*TextAnnotation_TextProperty)(nil), "google.cloud.vision.v1.TextAnnotation.TextProperty") + proto.RegisterType((*Page)(nil), "google.cloud.vision.v1.Page") + proto.RegisterType((*Block)(nil), "google.cloud.vision.v1.Block") + proto.RegisterType((*Paragraph)(nil), "google.cloud.vision.v1.Paragraph") + proto.RegisterType((*Word)(nil), "google.cloud.vision.v1.Word") + proto.RegisterType((*Symbol)(nil), "google.cloud.vision.v1.Symbol") + proto.RegisterEnum("google.cloud.vision.v1.TextAnnotation_DetectedBreak_BreakType", TextAnnotation_DetectedBreak_BreakType_name, TextAnnotation_DetectedBreak_BreakType_value) + proto.RegisterEnum("google.cloud.vision.v1.Block_BlockType", Block_BlockType_name, Block_BlockType_value) +} + +func init() { + proto.RegisterFile("google/cloud/vision/v1/text_annotation.proto", fileDescriptor_text_annotation_29fd59b3d2ddfb72) +} + +var fileDescriptor_text_annotation_29fd59b3d2ddfb72 = []byte{ + // 774 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xcc, 0x56, 0xcf, 0x6f, 0xd3, 0x58, + 0x10, 0x5e, 0x27, 0x76, 0x1a, 0x4f, 0xda, 0xc8, 0xfb, 0x76, 0x55, 0x45, 0xd9, 0x6e, 0xd5, 0xcd, + 0x82, 0xe8, 0x01, 0x39, 0x6a, 0x0a, 0x02, 0x09, 0x84, 0x14, 0xa7, 0xa6, 0xad, 0x1a, 0x25, 0xd6, + 0x6b, 0x42, 0xf9, 0x71, 0xb0, 0x1c, 0xfb, 0xd5, 0xb1, 0x9a, 0xfa, 0x59, 0xb6, 0xdb, 0x26, 0xe2, + 0x9f, 0x41, 0xfc, 0x4b, 0x48, 0x9c, 0xb8, 0x72, 0xe6, 0x0a, 0x9c, 0x90, 0x9f, 0xed, 0x34, 0x09, + 0x98, 0x02, 0xe2, 0xd0, 0x8b, 0xf5, 0x66, 0xf2, 0xcd, 0x37, 0xf3, 0xcd, 0x78, 0xf2, 0x0c, 0xb7, + 0x6d, 0x4a, 0xed, 0x11, 0xa9, 0x9b, 0x23, 0x7a, 0x66, 0xd5, 0xcf, 0x9d, 0xc0, 0xa1, 0x6e, 0xfd, + 0x7c, 0xab, 0x1e, 0x92, 0x71, 0xa8, 0x1b, 0xae, 0x4b, 0x43, 0x23, 0x74, 0xa8, 0x2b, 0x7b, 0x3e, + 0x0d, 0x29, 0x5a, 0x8d, 0xd1, 0x32, 0x43, 0xcb, 0x31, 0x5a, 0x3e, 0xdf, 0xaa, 0xae, 0x25, 0x2c, + 0x86, 0xe7, 0xd4, 0x2f, 0x83, 0x82, 0x38, 0xaa, 0x7a, 0x33, 0x23, 0x87, 0x4d, 0xe8, 0x29, 0x09, + 0xfd, 0x49, 0x0c, 0xab, 0x7d, 0xe0, 0xa1, 0xdc, 0x23, 0xe3, 0xb0, 0x39, 0x25, 0x40, 0x0d, 0x10, + 0x3c, 0xc3, 0x26, 0x41, 0x85, 0xdb, 0xc8, 0x6f, 0x96, 0x1a, 0x6b, 0xf2, 0xb7, 0xf3, 0xcb, 0x9a, + 0x61, 0x13, 0x1c, 0x43, 0x11, 0x02, 0x3e, 0x2a, 0xbe, 0x92, 0xdb, 0xe0, 0x36, 0x45, 0xcc, 0xce, + 0xd5, 0x23, 0x90, 0x76, 0x48, 0x48, 0xcc, 0x90, 0x58, 0x6d, 0xc3, 0xb5, 0xcf, 0x0c, 0x9b, 0xa0, + 0xff, 0x61, 0x65, 0x94, 0x9c, 0x75, 0x93, 0x5a, 0xa4, 0xc2, 0xb1, 0x80, 0xe5, 0xd4, 0xd9, 0xa2, + 0x16, 0x41, 0xeb, 0x00, 0x26, 0x75, 0x8f, 0x1d, 0x8b, 0xb8, 0x26, 0x61, 0x94, 0x39, 0x3c, 0xe3, + 0xa9, 0xbe, 0xe7, 0x60, 0x25, 0x65, 0x56, 0x7c, 0x62, 0x9c, 0x20, 0x0c, 0x7c, 0x38, 0xf1, 0x62, + 0xb6, 0x72, 0xe3, 0x51, 0x56, 0xc5, 0xf3, 0x42, 0xe5, 0x39, 0x0e, 0x99, 0x3d, 0x7b, 0x13, 0x8f, + 0x60, 0xc6, 0x85, 0xfe, 0x01, 0xd1, 0x09, 0x74, 0xcf, 0x27, 0xc7, 0xce, 0x98, 0x15, 0x51, 0xc4, + 0x45, 0x27, 0xd0, 0x98, 0x5d, 0x33, 0x41, 0x9c, 0xe2, 0x51, 0x09, 0x96, 0xfa, 0x9d, 0x83, 0x4e, + 0xf7, 0xa8, 0x23, 0xfd, 0x81, 0x44, 0x10, 0x0e, 0xb5, 0x66, 0x4b, 0x95, 0x38, 0x54, 0x06, 0x38, + 0xec, 0x63, 0x55, 0x8f, 0xed, 0x1c, 0x42, 0x50, 0x56, 0xbb, 0x6d, 0x7d, 0xc6, 0x97, 0x47, 0x00, + 0x85, 0xbd, 0x67, 0xda, 0x9e, 0xda, 0x91, 0xf8, 0x08, 0xdf, 0xde, 0xef, 0xa8, 0xba, 0x82, 0xd5, + 0xe6, 0x81, 0x24, 0x54, 0xdf, 0x70, 0xb0, 0x1c, 0x95, 0xac, 0xf9, 0xd4, 0x23, 0x7e, 0x38, 0x41, + 0xc7, 0x80, 0xac, 0xa4, 0x66, 0x3d, 0xed, 0x58, 0x3a, 0xa6, 0x7b, 0x3f, 0x29, 0x3a, 0x1d, 0x09, + 0xfe, 0xd3, 0x5a, 0xf0, 0x04, 0xe8, 0x05, 0x94, 0xa7, 0x79, 0x06, 0x91, 0x4c, 0xa6, 0xbf, 0xd4, + 0xb8, 0xf3, 0x2b, 0x8d, 0xc5, 0x2b, 0xd6, 0xac, 0x59, 0x7b, 0xc7, 0x01, 0x1f, 0xbd, 0x3a, 0xa8, + 0x0b, 0x45, 0x2f, 0x51, 0xc6, 0x06, 0x57, 0x6a, 0x6c, 0xff, 0x20, 0xff, 0x6c, 0x53, 0xf0, 0x94, + 0x04, 0xfd, 0x0d, 0xc2, 0x85, 0x63, 0x85, 0x43, 0x56, 0xad, 0x80, 0x63, 0x03, 0xad, 0x42, 0x61, + 0x48, 0x1c, 0x7b, 0x18, 0x56, 0xf2, 0xcc, 0x9d, 0x58, 0xe8, 0x2e, 0x14, 0x06, 0x23, 0x6a, 0x9e, + 0x04, 0x15, 0x9e, 0x35, 0xf0, 0xdf, 0xac, 0xe4, 0x4a, 0x84, 0xc2, 0x09, 0x78, 0xe1, 0xe5, 0x14, + 0x16, 0x5f, 0xce, 0xda, 0xab, 0x3c, 0x08, 0x2c, 0xe2, 0xf7, 0xeb, 0xdb, 0x85, 0xe5, 0x01, 0x3d, + 0x73, 0x2d, 0xc7, 0xb5, 0xf5, 0x01, 0x1d, 0x27, 0x43, 0xb9, 0x91, 0x59, 0x77, 0x82, 0xd5, 0xe8, + 0x68, 0x82, 0x4b, 0x69, 0xa4, 0x42, 0xc7, 0xa8, 0x09, 0xe0, 0x19, 0xbe, 0x61, 0xfb, 0x86, 0x37, + 0x0c, 0x2a, 0x79, 0x26, 0xff, 0xbf, 0xec, 0x35, 0x4f, 0x90, 0x78, 0x26, 0x08, 0x3d, 0x06, 0x60, + 0x0d, 0xd1, 0xd9, 0xde, 0xf1, 0x6c, 0xef, 0x6e, 0x7d, 0xb7, 0x83, 0xf1, 0x93, 0x2d, 0x98, 0x38, + 0x48, 0x8f, 0x57, 0xb6, 0x13, 0x83, 0x38, 0x8d, 0x9b, 0x5f, 0xb4, 0x22, 0xf0, 0x3d, 0xf5, 0x69, + 0x4f, 0xe2, 0xa2, 0x95, 0xeb, 0x35, 0x95, 0x76, 0xb4, 0x62, 0x25, 0x58, 0xd2, 0xf6, 0x5b, 0xbd, + 0x3e, 0x8e, 0x76, 0x4b, 0x04, 0x01, 0xf7, 0xdb, 0x2a, 0x96, 0xf8, 0xc8, 0xaf, 0x34, 0x71, 0xab, + 0xbb, 0xa3, 0x4a, 0x42, 0xed, 0x13, 0x07, 0xe2, 0x54, 0xd5, 0x35, 0x1e, 0x53, 0x03, 0x84, 0x0b, + 0xea, 0x5b, 0xe9, 0x84, 0x32, 0xff, 0x88, 0x8f, 0xa8, 0x6f, 0xe1, 0x18, 0xba, 0xd0, 0x4f, 0xfe, + 0xab, 0x7e, 0x7e, 0xe6, 0x80, 0x8f, 0xf0, 0xd7, 0x58, 0xf6, 0x7d, 0x58, 0x0a, 0x26, 0xa7, 0x03, + 0x3a, 0x4a, 0x85, 0xaf, 0x67, 0x71, 0x1c, 0x32, 0x18, 0x4e, 0xe1, 0x57, 0x8a, 0x7f, 0xcb, 0x41, + 0x21, 0x8e, 0xb9, 0xc6, 0xf2, 0xd3, 0xab, 0x34, 0x7f, 0x79, 0x95, 0x5e, 0x25, 0x4c, 0x79, 0x09, + 0x55, 0x93, 0x9e, 0x66, 0xe4, 0x52, 0xfe, 0x9a, 0x57, 0xa0, 0x45, 0x17, 0xbf, 0xc6, 0x3d, 0x7f, + 0x98, 0xc0, 0x6d, 0x1a, 0xdd, 0x25, 0x32, 0xf5, 0xed, 0xba, 0x4d, 0x5c, 0xf6, 0x59, 0x50, 0x8f, + 0x7f, 0x32, 0x3c, 0x27, 0x58, 0xfc, 0x80, 0x78, 0x10, 0x9f, 0x3e, 0x72, 0xdc, 0xeb, 0x1c, 0xbf, + 0xdb, 0x7a, 0xd2, 0x19, 0x14, 0x58, 0xc8, 0xf6, 0x97, 0x00, 0x00, 0x00, 0xff, 0xff, 0xc4, 0x88, + 0x57, 0x38, 0xd6, 0x08, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/cloud/vision/v1/web_detection.pb.go b/vendor/google.golang.org/genproto/googleapis/cloud/vision/v1/web_detection.pb.go new file mode 100644 index 0000000..9880f23 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/cloud/vision/v1/web_detection.pb.go @@ -0,0 +1,396 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/cloud/vision/v1/web_detection.proto + +package vision // import "google.golang.org/genproto/googleapis/cloud/vision/v1" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Relevant information for the image from the Internet. +type WebDetection struct { + // Deduced entities from similar images on the Internet. + WebEntities []*WebDetection_WebEntity `protobuf:"bytes,1,rep,name=web_entities,json=webEntities,proto3" json:"web_entities,omitempty"` + // Fully matching images from the Internet. + // Can include resized copies of the query image. + FullMatchingImages []*WebDetection_WebImage `protobuf:"bytes,2,rep,name=full_matching_images,json=fullMatchingImages,proto3" json:"full_matching_images,omitempty"` + // Partial matching images from the Internet. + // Those images are similar enough to share some key-point features. For + // example an original image will likely have partial matching for its crops. + PartialMatchingImages []*WebDetection_WebImage `protobuf:"bytes,3,rep,name=partial_matching_images,json=partialMatchingImages,proto3" json:"partial_matching_images,omitempty"` + // Web pages containing the matching images from the Internet. + PagesWithMatchingImages []*WebDetection_WebPage `protobuf:"bytes,4,rep,name=pages_with_matching_images,json=pagesWithMatchingImages,proto3" json:"pages_with_matching_images,omitempty"` + // The visually similar image results. + VisuallySimilarImages []*WebDetection_WebImage `protobuf:"bytes,6,rep,name=visually_similar_images,json=visuallySimilarImages,proto3" json:"visually_similar_images,omitempty"` + // The service's best guess as to the topic of the request image. + // Inferred from similar images on the open web. + BestGuessLabels []*WebDetection_WebLabel `protobuf:"bytes,8,rep,name=best_guess_labels,json=bestGuessLabels,proto3" json:"best_guess_labels,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *WebDetection) Reset() { *m = WebDetection{} } +func (m *WebDetection) String() string { return proto.CompactTextString(m) } +func (*WebDetection) ProtoMessage() {} +func (*WebDetection) Descriptor() ([]byte, []int) { + return fileDescriptor_web_detection_bc054784537d86ab, []int{0} +} +func (m *WebDetection) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_WebDetection.Unmarshal(m, b) +} +func (m *WebDetection) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_WebDetection.Marshal(b, m, deterministic) +} +func (dst *WebDetection) XXX_Merge(src proto.Message) { + xxx_messageInfo_WebDetection.Merge(dst, src) +} +func (m *WebDetection) XXX_Size() int { + return xxx_messageInfo_WebDetection.Size(m) +} +func (m *WebDetection) XXX_DiscardUnknown() { + xxx_messageInfo_WebDetection.DiscardUnknown(m) +} + +var xxx_messageInfo_WebDetection proto.InternalMessageInfo + +func (m *WebDetection) GetWebEntities() []*WebDetection_WebEntity { + if m != nil { + return m.WebEntities + } + return nil +} + +func (m *WebDetection) GetFullMatchingImages() []*WebDetection_WebImage { + if m != nil { + return m.FullMatchingImages + } + return nil +} + +func (m *WebDetection) GetPartialMatchingImages() []*WebDetection_WebImage { + if m != nil { + return m.PartialMatchingImages + } + return nil +} + +func (m *WebDetection) GetPagesWithMatchingImages() []*WebDetection_WebPage { + if m != nil { + return m.PagesWithMatchingImages + } + return nil +} + +func (m *WebDetection) GetVisuallySimilarImages() []*WebDetection_WebImage { + if m != nil { + return m.VisuallySimilarImages + } + return nil +} + +func (m *WebDetection) GetBestGuessLabels() []*WebDetection_WebLabel { + if m != nil { + return m.BestGuessLabels + } + return nil +} + +// Entity deduced from similar images on the Internet. +type WebDetection_WebEntity struct { + // Opaque entity ID. + EntityId string `protobuf:"bytes,1,opt,name=entity_id,json=entityId,proto3" json:"entity_id,omitempty"` + // Overall relevancy score for the entity. + // Not normalized and not comparable across different image queries. + Score float32 `protobuf:"fixed32,2,opt,name=score,proto3" json:"score,omitempty"` + // Canonical description of the entity, in English. + Description string `protobuf:"bytes,3,opt,name=description,proto3" json:"description,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *WebDetection_WebEntity) Reset() { *m = WebDetection_WebEntity{} } +func (m *WebDetection_WebEntity) String() string { return proto.CompactTextString(m) } +func (*WebDetection_WebEntity) ProtoMessage() {} +func (*WebDetection_WebEntity) Descriptor() ([]byte, []int) { + return fileDescriptor_web_detection_bc054784537d86ab, []int{0, 0} +} +func (m *WebDetection_WebEntity) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_WebDetection_WebEntity.Unmarshal(m, b) +} +func (m *WebDetection_WebEntity) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_WebDetection_WebEntity.Marshal(b, m, deterministic) +} +func (dst *WebDetection_WebEntity) XXX_Merge(src proto.Message) { + xxx_messageInfo_WebDetection_WebEntity.Merge(dst, src) +} +func (m *WebDetection_WebEntity) XXX_Size() int { + return xxx_messageInfo_WebDetection_WebEntity.Size(m) +} +func (m *WebDetection_WebEntity) XXX_DiscardUnknown() { + xxx_messageInfo_WebDetection_WebEntity.DiscardUnknown(m) +} + +var xxx_messageInfo_WebDetection_WebEntity proto.InternalMessageInfo + +func (m *WebDetection_WebEntity) GetEntityId() string { + if m != nil { + return m.EntityId + } + return "" +} + +func (m *WebDetection_WebEntity) GetScore() float32 { + if m != nil { + return m.Score + } + return 0 +} + +func (m *WebDetection_WebEntity) GetDescription() string { + if m != nil { + return m.Description + } + return "" +} + +// Metadata for online images. +type WebDetection_WebImage struct { + // The result image URL. + Url string `protobuf:"bytes,1,opt,name=url,proto3" json:"url,omitempty"` + // (Deprecated) Overall relevancy score for the image. + Score float32 `protobuf:"fixed32,2,opt,name=score,proto3" json:"score,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *WebDetection_WebImage) Reset() { *m = WebDetection_WebImage{} } +func (m *WebDetection_WebImage) String() string { return proto.CompactTextString(m) } +func (*WebDetection_WebImage) ProtoMessage() {} +func (*WebDetection_WebImage) Descriptor() ([]byte, []int) { + return fileDescriptor_web_detection_bc054784537d86ab, []int{0, 1} +} +func (m *WebDetection_WebImage) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_WebDetection_WebImage.Unmarshal(m, b) +} +func (m *WebDetection_WebImage) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_WebDetection_WebImage.Marshal(b, m, deterministic) +} +func (dst *WebDetection_WebImage) XXX_Merge(src proto.Message) { + xxx_messageInfo_WebDetection_WebImage.Merge(dst, src) +} +func (m *WebDetection_WebImage) XXX_Size() int { + return xxx_messageInfo_WebDetection_WebImage.Size(m) +} +func (m *WebDetection_WebImage) XXX_DiscardUnknown() { + xxx_messageInfo_WebDetection_WebImage.DiscardUnknown(m) +} + +var xxx_messageInfo_WebDetection_WebImage proto.InternalMessageInfo + +func (m *WebDetection_WebImage) GetUrl() string { + if m != nil { + return m.Url + } + return "" +} + +func (m *WebDetection_WebImage) GetScore() float32 { + if m != nil { + return m.Score + } + return 0 +} + +// Label to provide extra metadata for the web detection. +type WebDetection_WebLabel struct { + // Label for extra metadata. + Label string `protobuf:"bytes,1,opt,name=label,proto3" json:"label,omitempty"` + // The BCP-47 language code for `label`, such as "en-US" or "sr-Latn". + // For more information, see + // http://www.unicode.org/reports/tr35/#Unicode_locale_identifier. + LanguageCode string `protobuf:"bytes,2,opt,name=language_code,json=languageCode,proto3" json:"language_code,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *WebDetection_WebLabel) Reset() { *m = WebDetection_WebLabel{} } +func (m *WebDetection_WebLabel) String() string { return proto.CompactTextString(m) } +func (*WebDetection_WebLabel) ProtoMessage() {} +func (*WebDetection_WebLabel) Descriptor() ([]byte, []int) { + return fileDescriptor_web_detection_bc054784537d86ab, []int{0, 2} +} +func (m *WebDetection_WebLabel) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_WebDetection_WebLabel.Unmarshal(m, b) +} +func (m *WebDetection_WebLabel) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_WebDetection_WebLabel.Marshal(b, m, deterministic) +} +func (dst *WebDetection_WebLabel) XXX_Merge(src proto.Message) { + xxx_messageInfo_WebDetection_WebLabel.Merge(dst, src) +} +func (m *WebDetection_WebLabel) XXX_Size() int { + return xxx_messageInfo_WebDetection_WebLabel.Size(m) +} +func (m *WebDetection_WebLabel) XXX_DiscardUnknown() { + xxx_messageInfo_WebDetection_WebLabel.DiscardUnknown(m) +} + +var xxx_messageInfo_WebDetection_WebLabel proto.InternalMessageInfo + +func (m *WebDetection_WebLabel) GetLabel() string { + if m != nil { + return m.Label + } + return "" +} + +func (m *WebDetection_WebLabel) GetLanguageCode() string { + if m != nil { + return m.LanguageCode + } + return "" +} + +// Metadata for web pages. +type WebDetection_WebPage struct { + // The result web page URL. + Url string `protobuf:"bytes,1,opt,name=url,proto3" json:"url,omitempty"` + // (Deprecated) Overall relevancy score for the web page. + Score float32 `protobuf:"fixed32,2,opt,name=score,proto3" json:"score,omitempty"` + // Title for the web page, may contain HTML markups. + PageTitle string `protobuf:"bytes,3,opt,name=page_title,json=pageTitle,proto3" json:"page_title,omitempty"` + // Fully matching images on the page. + // Can include resized copies of the query image. + FullMatchingImages []*WebDetection_WebImage `protobuf:"bytes,4,rep,name=full_matching_images,json=fullMatchingImages,proto3" json:"full_matching_images,omitempty"` + // Partial matching images on the page. + // Those images are similar enough to share some key-point features. For + // example an original image will likely have partial matching for its + // crops. + PartialMatchingImages []*WebDetection_WebImage `protobuf:"bytes,5,rep,name=partial_matching_images,json=partialMatchingImages,proto3" json:"partial_matching_images,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *WebDetection_WebPage) Reset() { *m = WebDetection_WebPage{} } +func (m *WebDetection_WebPage) String() string { return proto.CompactTextString(m) } +func (*WebDetection_WebPage) ProtoMessage() {} +func (*WebDetection_WebPage) Descriptor() ([]byte, []int) { + return fileDescriptor_web_detection_bc054784537d86ab, []int{0, 3} +} +func (m *WebDetection_WebPage) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_WebDetection_WebPage.Unmarshal(m, b) +} +func (m *WebDetection_WebPage) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_WebDetection_WebPage.Marshal(b, m, deterministic) +} +func (dst *WebDetection_WebPage) XXX_Merge(src proto.Message) { + xxx_messageInfo_WebDetection_WebPage.Merge(dst, src) +} +func (m *WebDetection_WebPage) XXX_Size() int { + return xxx_messageInfo_WebDetection_WebPage.Size(m) +} +func (m *WebDetection_WebPage) XXX_DiscardUnknown() { + xxx_messageInfo_WebDetection_WebPage.DiscardUnknown(m) +} + +var xxx_messageInfo_WebDetection_WebPage proto.InternalMessageInfo + +func (m *WebDetection_WebPage) GetUrl() string { + if m != nil { + return m.Url + } + return "" +} + +func (m *WebDetection_WebPage) GetScore() float32 { + if m != nil { + return m.Score + } + return 0 +} + +func (m *WebDetection_WebPage) GetPageTitle() string { + if m != nil { + return m.PageTitle + } + return "" +} + +func (m *WebDetection_WebPage) GetFullMatchingImages() []*WebDetection_WebImage { + if m != nil { + return m.FullMatchingImages + } + return nil +} + +func (m *WebDetection_WebPage) GetPartialMatchingImages() []*WebDetection_WebImage { + if m != nil { + return m.PartialMatchingImages + } + return nil +} + +func init() { + proto.RegisterType((*WebDetection)(nil), "google.cloud.vision.v1.WebDetection") + proto.RegisterType((*WebDetection_WebEntity)(nil), "google.cloud.vision.v1.WebDetection.WebEntity") + proto.RegisterType((*WebDetection_WebImage)(nil), "google.cloud.vision.v1.WebDetection.WebImage") + proto.RegisterType((*WebDetection_WebLabel)(nil), "google.cloud.vision.v1.WebDetection.WebLabel") + proto.RegisterType((*WebDetection_WebPage)(nil), "google.cloud.vision.v1.WebDetection.WebPage") +} + +func init() { + proto.RegisterFile("google/cloud/vision/v1/web_detection.proto", fileDescriptor_web_detection_bc054784537d86ab) +} + +var fileDescriptor_web_detection_bc054784537d86ab = []byte{ + // 511 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xbc, 0x94, 0xdf, 0x6e, 0xd3, 0x30, + 0x14, 0xc6, 0x95, 0xb6, 0x1b, 0xad, 0x5b, 0x04, 0xb3, 0x06, 0x8b, 0x02, 0x48, 0x15, 0xdc, 0x54, + 0x08, 0x12, 0x6d, 0x5c, 0xc2, 0xd5, 0xc6, 0x34, 0x4d, 0x02, 0x54, 0x02, 0x62, 0x82, 0x1b, 0xe3, + 0x24, 0xc6, 0x3d, 0x92, 0x1b, 0x47, 0xb1, 0xd3, 0xaa, 0x6f, 0xc2, 0x35, 0x0f, 0xc4, 0xf3, 0x70, + 0x89, 0xfc, 0x27, 0xa8, 0x5a, 0x37, 0xa9, 0x4c, 0x88, 0xbb, 0x73, 0x4e, 0xcf, 0xf7, 0xfb, 0x7a, + 0x62, 0xfb, 0xa0, 0xa7, 0x5c, 0x4a, 0x2e, 0x58, 0x92, 0x0b, 0xd9, 0x14, 0xc9, 0x02, 0x14, 0xc8, + 0x32, 0x59, 0x1c, 0x26, 0x4b, 0x96, 0x91, 0x82, 0x69, 0x96, 0x6b, 0x90, 0x65, 0x5c, 0xd5, 0x52, + 0x4b, 0x7c, 0xdf, 0xf5, 0xc6, 0xb6, 0x37, 0x76, 0xbd, 0xf1, 0xe2, 0x30, 0x7a, 0xe8, 0x19, 0xb4, + 0x82, 0x84, 0x96, 0xa5, 0xd4, 0xd4, 0x88, 0x94, 0x53, 0x3d, 0xfe, 0xd9, 0x47, 0xa3, 0x0b, 0x96, + 0xbd, 0x6e, 0x61, 0xf8, 0x3d, 0x1a, 0x19, 0x3a, 0x2b, 0x35, 0x68, 0x60, 0x2a, 0x0c, 0xc6, 0xdd, + 0xc9, 0xf0, 0x28, 0x8e, 0xaf, 0xa6, 0xc7, 0xeb, 0x5a, 0x93, 0x9c, 0x1a, 0xdd, 0x2a, 0x1d, 0x2e, + 0x7d, 0x08, 0x4c, 0x61, 0x82, 0xf6, 0xbf, 0x35, 0x42, 0x90, 0x39, 0xd5, 0xf9, 0x0c, 0x4a, 0x4e, + 0x60, 0x4e, 0x39, 0x53, 0x61, 0xc7, 0xa2, 0x9f, 0x6f, 0x8b, 0x3e, 0x37, 0xaa, 0x14, 0x1b, 0xd4, + 0x5b, 0x4f, 0xb2, 0x25, 0x85, 0x19, 0x3a, 0xa8, 0x68, 0xad, 0x81, 0x6e, 0x7a, 0x74, 0x6f, 0xe2, + 0x71, 0xcf, 0xd3, 0x2e, 0xd9, 0x00, 0x8a, 0x2a, 0x13, 0x90, 0x25, 0xe8, 0xd9, 0x86, 0x53, 0xcf, + 0x3a, 0x3d, 0xdb, 0xd6, 0x69, 0x6a, 0x8c, 0x0e, 0x2c, 0xef, 0x02, 0xf4, 0x6c, 0x73, 0xa2, 0x05, + 0xa8, 0x86, 0x0a, 0xb1, 0x22, 0x0a, 0xe6, 0x20, 0x68, 0xdd, 0xfa, 0xec, 0xde, 0x68, 0xa2, 0x96, + 0xf6, 0xc1, 0xc1, 0xbc, 0xcd, 0x67, 0xb4, 0x97, 0x31, 0xa5, 0x09, 0x6f, 0x98, 0x52, 0x44, 0xd0, + 0x8c, 0x09, 0x15, 0xf6, 0xff, 0xce, 0xe0, 0x8d, 0x51, 0xa5, 0x77, 0x0c, 0xe7, 0xcc, 0x60, 0x6c, + 0xae, 0xa2, 0xaf, 0x68, 0xf0, 0xe7, 0x3a, 0xe0, 0x07, 0x68, 0x60, 0x2f, 0xd4, 0x8a, 0x40, 0x11, + 0x06, 0xe3, 0x60, 0x32, 0x48, 0xfb, 0xae, 0x70, 0x5e, 0xe0, 0x7d, 0xb4, 0xa3, 0x72, 0x59, 0xb3, + 0xb0, 0x33, 0x0e, 0x26, 0x9d, 0xd4, 0x25, 0x78, 0x8c, 0x86, 0x05, 0x53, 0x79, 0x0d, 0x95, 0x31, + 0x0a, 0xbb, 0x56, 0xb4, 0x5e, 0x8a, 0x8e, 0x50, 0xbf, 0x9d, 0x0f, 0xdf, 0x45, 0xdd, 0xa6, 0x16, + 0x1e, 0x6d, 0xc2, 0xab, 0xa9, 0xd1, 0xa9, 0xd5, 0xd8, 0xbf, 0x68, 0x3a, 0xec, 0xc4, 0x5e, 0xe5, + 0x12, 0xfc, 0x04, 0xdd, 0x16, 0xb4, 0xe4, 0x0d, 0xe5, 0x8c, 0xe4, 0xb2, 0x70, 0xfa, 0x41, 0x3a, + 0x6a, 0x8b, 0x27, 0xb2, 0x60, 0xd1, 0xf7, 0x0e, 0xba, 0xe5, 0xcf, 0x70, 0x5b, 0x6b, 0xfc, 0x08, + 0x21, 0x73, 0xda, 0x44, 0x83, 0x16, 0xcc, 0xcf, 0x33, 0x30, 0x95, 0x8f, 0xa6, 0x70, 0xed, 0x23, + 0xe9, 0xfd, 0x87, 0x47, 0xb2, 0xf3, 0xef, 0x1e, 0xc9, 0xf1, 0x0a, 0x45, 0xb9, 0x9c, 0x5f, 0x83, + 0x3a, 0xde, 0x5b, 0x67, 0x4d, 0xcd, 0x06, 0x9a, 0x06, 0x5f, 0x5e, 0xf9, 0x66, 0x2e, 0xcd, 0x47, + 0x8e, 0x65, 0xcd, 0x13, 0xce, 0x4a, 0xbb, 0x9f, 0x12, 0xf7, 0x13, 0xad, 0x40, 0x5d, 0x5e, 0x82, + 0x2f, 0x5d, 0xf4, 0x2b, 0x08, 0x7e, 0x74, 0x7a, 0x67, 0x27, 0x9f, 0xde, 0x65, 0xbb, 0x56, 0xf2, + 0xe2, 0x77, 0x00, 0x00, 0x00, 0xff, 0xff, 0x38, 0x42, 0x3a, 0x04, 0x36, 0x05, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/cloud/vision/v1p1beta1/geometry.pb.go b/vendor/google.golang.org/genproto/googleapis/cloud/vision/v1p1beta1/geometry.pb.go new file mode 100644 index 0000000..c3ce08d --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/cloud/vision/v1p1beta1/geometry.pb.go @@ -0,0 +1,199 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/cloud/vision/v1p1beta1/geometry.proto + +package vision // import "google.golang.org/genproto/googleapis/cloud/vision/v1p1beta1" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// A vertex represents a 2D point in the image. +// NOTE: the vertex coordinates are in the same scale as the original image. +type Vertex struct { + // X coordinate. + X int32 `protobuf:"varint,1,opt,name=x,proto3" json:"x,omitempty"` + // Y coordinate. + Y int32 `protobuf:"varint,2,opt,name=y,proto3" json:"y,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Vertex) Reset() { *m = Vertex{} } +func (m *Vertex) String() string { return proto.CompactTextString(m) } +func (*Vertex) ProtoMessage() {} +func (*Vertex) Descriptor() ([]byte, []int) { + return fileDescriptor_geometry_b5cd86c78d12f2ad, []int{0} +} +func (m *Vertex) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Vertex.Unmarshal(m, b) +} +func (m *Vertex) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Vertex.Marshal(b, m, deterministic) +} +func (dst *Vertex) XXX_Merge(src proto.Message) { + xxx_messageInfo_Vertex.Merge(dst, src) +} +func (m *Vertex) XXX_Size() int { + return xxx_messageInfo_Vertex.Size(m) +} +func (m *Vertex) XXX_DiscardUnknown() { + xxx_messageInfo_Vertex.DiscardUnknown(m) +} + +var xxx_messageInfo_Vertex proto.InternalMessageInfo + +func (m *Vertex) GetX() int32 { + if m != nil { + return m.X + } + return 0 +} + +func (m *Vertex) GetY() int32 { + if m != nil { + return m.Y + } + return 0 +} + +// A bounding polygon for the detected image annotation. +type BoundingPoly struct { + // The bounding polygon vertices. + Vertices []*Vertex `protobuf:"bytes,1,rep,name=vertices,proto3" json:"vertices,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *BoundingPoly) Reset() { *m = BoundingPoly{} } +func (m *BoundingPoly) String() string { return proto.CompactTextString(m) } +func (*BoundingPoly) ProtoMessage() {} +func (*BoundingPoly) Descriptor() ([]byte, []int) { + return fileDescriptor_geometry_b5cd86c78d12f2ad, []int{1} +} +func (m *BoundingPoly) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_BoundingPoly.Unmarshal(m, b) +} +func (m *BoundingPoly) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_BoundingPoly.Marshal(b, m, deterministic) +} +func (dst *BoundingPoly) XXX_Merge(src proto.Message) { + xxx_messageInfo_BoundingPoly.Merge(dst, src) +} +func (m *BoundingPoly) XXX_Size() int { + return xxx_messageInfo_BoundingPoly.Size(m) +} +func (m *BoundingPoly) XXX_DiscardUnknown() { + xxx_messageInfo_BoundingPoly.DiscardUnknown(m) +} + +var xxx_messageInfo_BoundingPoly proto.InternalMessageInfo + +func (m *BoundingPoly) GetVertices() []*Vertex { + if m != nil { + return m.Vertices + } + return nil +} + +// A 3D position in the image, used primarily for Face detection landmarks. +// A valid Position must have both x and y coordinates. +// The position coordinates are in the same scale as the original image. +type Position struct { + // X coordinate. + X float32 `protobuf:"fixed32,1,opt,name=x,proto3" json:"x,omitempty"` + // Y coordinate. + Y float32 `protobuf:"fixed32,2,opt,name=y,proto3" json:"y,omitempty"` + // Z coordinate (or depth). + Z float32 `protobuf:"fixed32,3,opt,name=z,proto3" json:"z,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Position) Reset() { *m = Position{} } +func (m *Position) String() string { return proto.CompactTextString(m) } +func (*Position) ProtoMessage() {} +func (*Position) Descriptor() ([]byte, []int) { + return fileDescriptor_geometry_b5cd86c78d12f2ad, []int{2} +} +func (m *Position) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Position.Unmarshal(m, b) +} +func (m *Position) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Position.Marshal(b, m, deterministic) +} +func (dst *Position) XXX_Merge(src proto.Message) { + xxx_messageInfo_Position.Merge(dst, src) +} +func (m *Position) XXX_Size() int { + return xxx_messageInfo_Position.Size(m) +} +func (m *Position) XXX_DiscardUnknown() { + xxx_messageInfo_Position.DiscardUnknown(m) +} + +var xxx_messageInfo_Position proto.InternalMessageInfo + +func (m *Position) GetX() float32 { + if m != nil { + return m.X + } + return 0 +} + +func (m *Position) GetY() float32 { + if m != nil { + return m.Y + } + return 0 +} + +func (m *Position) GetZ() float32 { + if m != nil { + return m.Z + } + return 0 +} + +func init() { + proto.RegisterType((*Vertex)(nil), "google.cloud.vision.v1p1beta1.Vertex") + proto.RegisterType((*BoundingPoly)(nil), "google.cloud.vision.v1p1beta1.BoundingPoly") + proto.RegisterType((*Position)(nil), "google.cloud.vision.v1p1beta1.Position") +} + +func init() { + proto.RegisterFile("google/cloud/vision/v1p1beta1/geometry.proto", fileDescriptor_geometry_b5cd86c78d12f2ad) +} + +var fileDescriptor_geometry_b5cd86c78d12f2ad = []byte{ + // 243 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x7c, 0x90, 0xb1, 0x4b, 0xc3, 0x40, + 0x14, 0x87, 0x79, 0x29, 0x96, 0x72, 0xd6, 0x25, 0x53, 0x16, 0xa1, 0x06, 0x85, 0x0e, 0x72, 0x47, + 0xd4, 0xcd, 0xc9, 0x38, 0xb8, 0xc6, 0x0c, 0x0e, 0x6e, 0x69, 0xfa, 0x78, 0x1c, 0xa4, 0xf7, 0xc2, + 0xe5, 0x1a, 0x7a, 0xc5, 0x3f, 0xdc, 0x51, 0x7a, 0x57, 0x2a, 0x0e, 0x76, 0xfc, 0xdd, 0x7d, 0x8f, + 0x0f, 0x3e, 0x71, 0x4f, 0xcc, 0xd4, 0xa1, 0x6a, 0x3b, 0xde, 0xae, 0xd5, 0xa8, 0x07, 0xcd, 0x46, + 0x8d, 0x45, 0x5f, 0xac, 0xd0, 0x35, 0x85, 0x22, 0xe4, 0x0d, 0x3a, 0xeb, 0x65, 0x6f, 0xd9, 0x71, + 0x7a, 0x1d, 0x69, 0x19, 0x68, 0x19, 0x69, 0x79, 0xa2, 0xf3, 0x5b, 0x31, 0xfd, 0x40, 0xeb, 0x70, + 0x97, 0xce, 0x05, 0xec, 0x32, 0x58, 0xc0, 0xf2, 0xa2, 0x86, 0xb0, 0x7c, 0x96, 0xc4, 0xe5, 0xf3, + 0x77, 0x31, 0x2f, 0x79, 0x6b, 0xd6, 0xda, 0x50, 0xc5, 0x9d, 0x4f, 0x5f, 0xc4, 0x6c, 0x44, 0xeb, + 0x74, 0x8b, 0x43, 0x06, 0x8b, 0xc9, 0xf2, 0xf2, 0xe1, 0x4e, 0x9e, 0xf5, 0xc8, 0x28, 0xa9, 0x4f, + 0x67, 0xf9, 0x93, 0x98, 0x55, 0x3c, 0x68, 0xa7, 0xd9, 0xfc, 0xaa, 0x93, 0x3f, 0xea, 0xa4, 0x06, + 0x7f, 0x58, 0xfb, 0x6c, 0x12, 0xd7, 0xbe, 0xfc, 0x12, 0x37, 0x2d, 0x6f, 0xce, 0xbb, 0xca, 0xab, + 0xb7, 0x63, 0x82, 0xea, 0x50, 0xa0, 0x82, 0xcf, 0xd7, 0x23, 0x4f, 0xdc, 0x35, 0x86, 0x24, 0x5b, + 0x52, 0x84, 0x26, 0xf4, 0x51, 0xf1, 0xab, 0xe9, 0xf5, 0xf0, 0x4f, 0xd0, 0xe7, 0xf8, 0xf0, 0x0d, + 0xb0, 0x9a, 0x86, 0x93, 0xc7, 0x9f, 0x00, 0x00, 0x00, 0xff, 0xff, 0x91, 0xa5, 0x86, 0xce, 0x82, + 0x01, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/cloud/vision/v1p1beta1/image_annotator.pb.go b/vendor/google.golang.org/genproto/googleapis/cloud/vision/v1p1beta1/image_annotator.pb.go new file mode 100644 index 0000000..2bd5a88 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/cloud/vision/v1p1beta1/image_annotator.pb.go @@ -0,0 +1,1998 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/cloud/vision/v1p1beta1/image_annotator.proto + +package vision // import "google.golang.org/genproto/googleapis/cloud/vision/v1p1beta1" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "google.golang.org/genproto/googleapis/api/annotations" +import status "google.golang.org/genproto/googleapis/rpc/status" +import color "google.golang.org/genproto/googleapis/type/color" +import latlng "google.golang.org/genproto/googleapis/type/latlng" + +import ( + context "golang.org/x/net/context" + grpc "google.golang.org/grpc" +) + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// A bucketized representation of likelihood, which is intended to give clients +// highly stable results across model upgrades. +type Likelihood int32 + +const ( + // Unknown likelihood. + Likelihood_UNKNOWN Likelihood = 0 + // It is very unlikely that the image belongs to the specified vertical. + Likelihood_VERY_UNLIKELY Likelihood = 1 + // It is unlikely that the image belongs to the specified vertical. + Likelihood_UNLIKELY Likelihood = 2 + // It is possible that the image belongs to the specified vertical. + Likelihood_POSSIBLE Likelihood = 3 + // It is likely that the image belongs to the specified vertical. + Likelihood_LIKELY Likelihood = 4 + // It is very likely that the image belongs to the specified vertical. + Likelihood_VERY_LIKELY Likelihood = 5 +) + +var Likelihood_name = map[int32]string{ + 0: "UNKNOWN", + 1: "VERY_UNLIKELY", + 2: "UNLIKELY", + 3: "POSSIBLE", + 4: "LIKELY", + 5: "VERY_LIKELY", +} +var Likelihood_value = map[string]int32{ + "UNKNOWN": 0, + "VERY_UNLIKELY": 1, + "UNLIKELY": 2, + "POSSIBLE": 3, + "LIKELY": 4, + "VERY_LIKELY": 5, +} + +func (x Likelihood) String() string { + return proto.EnumName(Likelihood_name, int32(x)) +} +func (Likelihood) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_image_annotator_e65877f16ecf0354, []int{0} +} + +// Type of image feature. +type Feature_Type int32 + +const ( + // Unspecified feature type. + Feature_TYPE_UNSPECIFIED Feature_Type = 0 + // Run face detection. + Feature_FACE_DETECTION Feature_Type = 1 + // Run landmark detection. + Feature_LANDMARK_DETECTION Feature_Type = 2 + // Run logo detection. + Feature_LOGO_DETECTION Feature_Type = 3 + // Run label detection. + Feature_LABEL_DETECTION Feature_Type = 4 + // Run OCR. + Feature_TEXT_DETECTION Feature_Type = 5 + // Run dense text document OCR. Takes precedence when both + // DOCUMENT_TEXT_DETECTION and TEXT_DETECTION are present. + Feature_DOCUMENT_TEXT_DETECTION Feature_Type = 11 + // Run computer vision models to compute image safe-search properties. + Feature_SAFE_SEARCH_DETECTION Feature_Type = 6 + // Compute a set of image properties, such as the image's dominant colors. + Feature_IMAGE_PROPERTIES Feature_Type = 7 + // Run crop hints. + Feature_CROP_HINTS Feature_Type = 9 + // Run web detection. + Feature_WEB_DETECTION Feature_Type = 10 +) + +var Feature_Type_name = map[int32]string{ + 0: "TYPE_UNSPECIFIED", + 1: "FACE_DETECTION", + 2: "LANDMARK_DETECTION", + 3: "LOGO_DETECTION", + 4: "LABEL_DETECTION", + 5: "TEXT_DETECTION", + 11: "DOCUMENT_TEXT_DETECTION", + 6: "SAFE_SEARCH_DETECTION", + 7: "IMAGE_PROPERTIES", + 9: "CROP_HINTS", + 10: "WEB_DETECTION", +} +var Feature_Type_value = map[string]int32{ + "TYPE_UNSPECIFIED": 0, + "FACE_DETECTION": 1, + "LANDMARK_DETECTION": 2, + "LOGO_DETECTION": 3, + "LABEL_DETECTION": 4, + "TEXT_DETECTION": 5, + "DOCUMENT_TEXT_DETECTION": 11, + "SAFE_SEARCH_DETECTION": 6, + "IMAGE_PROPERTIES": 7, + "CROP_HINTS": 9, + "WEB_DETECTION": 10, +} + +func (x Feature_Type) String() string { + return proto.EnumName(Feature_Type_name, int32(x)) +} +func (Feature_Type) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_image_annotator_e65877f16ecf0354, []int{0, 0} +} + +// Face landmark (feature) type. +// Left and right are defined from the vantage of the viewer of the image +// without considering mirror projections typical of photos. So, `LEFT_EYE`, +// typically, is the person's right eye. +type FaceAnnotation_Landmark_Type int32 + +const ( + // Unknown face landmark detected. Should not be filled. + FaceAnnotation_Landmark_UNKNOWN_LANDMARK FaceAnnotation_Landmark_Type = 0 + // Left eye. + FaceAnnotation_Landmark_LEFT_EYE FaceAnnotation_Landmark_Type = 1 + // Right eye. + FaceAnnotation_Landmark_RIGHT_EYE FaceAnnotation_Landmark_Type = 2 + // Left of left eyebrow. + FaceAnnotation_Landmark_LEFT_OF_LEFT_EYEBROW FaceAnnotation_Landmark_Type = 3 + // Right of left eyebrow. + FaceAnnotation_Landmark_RIGHT_OF_LEFT_EYEBROW FaceAnnotation_Landmark_Type = 4 + // Left of right eyebrow. + FaceAnnotation_Landmark_LEFT_OF_RIGHT_EYEBROW FaceAnnotation_Landmark_Type = 5 + // Right of right eyebrow. + FaceAnnotation_Landmark_RIGHT_OF_RIGHT_EYEBROW FaceAnnotation_Landmark_Type = 6 + // Midpoint between eyes. + FaceAnnotation_Landmark_MIDPOINT_BETWEEN_EYES FaceAnnotation_Landmark_Type = 7 + // Nose tip. + FaceAnnotation_Landmark_NOSE_TIP FaceAnnotation_Landmark_Type = 8 + // Upper lip. + FaceAnnotation_Landmark_UPPER_LIP FaceAnnotation_Landmark_Type = 9 + // Lower lip. + FaceAnnotation_Landmark_LOWER_LIP FaceAnnotation_Landmark_Type = 10 + // Mouth left. + FaceAnnotation_Landmark_MOUTH_LEFT FaceAnnotation_Landmark_Type = 11 + // Mouth right. + FaceAnnotation_Landmark_MOUTH_RIGHT FaceAnnotation_Landmark_Type = 12 + // Mouth center. + FaceAnnotation_Landmark_MOUTH_CENTER FaceAnnotation_Landmark_Type = 13 + // Nose, bottom right. + FaceAnnotation_Landmark_NOSE_BOTTOM_RIGHT FaceAnnotation_Landmark_Type = 14 + // Nose, bottom left. + FaceAnnotation_Landmark_NOSE_BOTTOM_LEFT FaceAnnotation_Landmark_Type = 15 + // Nose, bottom center. + FaceAnnotation_Landmark_NOSE_BOTTOM_CENTER FaceAnnotation_Landmark_Type = 16 + // Left eye, top boundary. + FaceAnnotation_Landmark_LEFT_EYE_TOP_BOUNDARY FaceAnnotation_Landmark_Type = 17 + // Left eye, right corner. + FaceAnnotation_Landmark_LEFT_EYE_RIGHT_CORNER FaceAnnotation_Landmark_Type = 18 + // Left eye, bottom boundary. + FaceAnnotation_Landmark_LEFT_EYE_BOTTOM_BOUNDARY FaceAnnotation_Landmark_Type = 19 + // Left eye, left corner. + FaceAnnotation_Landmark_LEFT_EYE_LEFT_CORNER FaceAnnotation_Landmark_Type = 20 + // Right eye, top boundary. + FaceAnnotation_Landmark_RIGHT_EYE_TOP_BOUNDARY FaceAnnotation_Landmark_Type = 21 + // Right eye, right corner. + FaceAnnotation_Landmark_RIGHT_EYE_RIGHT_CORNER FaceAnnotation_Landmark_Type = 22 + // Right eye, bottom boundary. + FaceAnnotation_Landmark_RIGHT_EYE_BOTTOM_BOUNDARY FaceAnnotation_Landmark_Type = 23 + // Right eye, left corner. + FaceAnnotation_Landmark_RIGHT_EYE_LEFT_CORNER FaceAnnotation_Landmark_Type = 24 + // Left eyebrow, upper midpoint. + FaceAnnotation_Landmark_LEFT_EYEBROW_UPPER_MIDPOINT FaceAnnotation_Landmark_Type = 25 + // Right eyebrow, upper midpoint. + FaceAnnotation_Landmark_RIGHT_EYEBROW_UPPER_MIDPOINT FaceAnnotation_Landmark_Type = 26 + // Left ear tragion. + FaceAnnotation_Landmark_LEFT_EAR_TRAGION FaceAnnotation_Landmark_Type = 27 + // Right ear tragion. + FaceAnnotation_Landmark_RIGHT_EAR_TRAGION FaceAnnotation_Landmark_Type = 28 + // Left eye pupil. + FaceAnnotation_Landmark_LEFT_EYE_PUPIL FaceAnnotation_Landmark_Type = 29 + // Right eye pupil. + FaceAnnotation_Landmark_RIGHT_EYE_PUPIL FaceAnnotation_Landmark_Type = 30 + // Forehead glabella. + FaceAnnotation_Landmark_FOREHEAD_GLABELLA FaceAnnotation_Landmark_Type = 31 + // Chin gnathion. + FaceAnnotation_Landmark_CHIN_GNATHION FaceAnnotation_Landmark_Type = 32 + // Chin left gonion. + FaceAnnotation_Landmark_CHIN_LEFT_GONION FaceAnnotation_Landmark_Type = 33 + // Chin right gonion. + FaceAnnotation_Landmark_CHIN_RIGHT_GONION FaceAnnotation_Landmark_Type = 34 +) + +var FaceAnnotation_Landmark_Type_name = map[int32]string{ + 0: "UNKNOWN_LANDMARK", + 1: "LEFT_EYE", + 2: "RIGHT_EYE", + 3: "LEFT_OF_LEFT_EYEBROW", + 4: "RIGHT_OF_LEFT_EYEBROW", + 5: "LEFT_OF_RIGHT_EYEBROW", + 6: "RIGHT_OF_RIGHT_EYEBROW", + 7: "MIDPOINT_BETWEEN_EYES", + 8: "NOSE_TIP", + 9: "UPPER_LIP", + 10: "LOWER_LIP", + 11: "MOUTH_LEFT", + 12: "MOUTH_RIGHT", + 13: "MOUTH_CENTER", + 14: "NOSE_BOTTOM_RIGHT", + 15: "NOSE_BOTTOM_LEFT", + 16: "NOSE_BOTTOM_CENTER", + 17: "LEFT_EYE_TOP_BOUNDARY", + 18: "LEFT_EYE_RIGHT_CORNER", + 19: "LEFT_EYE_BOTTOM_BOUNDARY", + 20: "LEFT_EYE_LEFT_CORNER", + 21: "RIGHT_EYE_TOP_BOUNDARY", + 22: "RIGHT_EYE_RIGHT_CORNER", + 23: "RIGHT_EYE_BOTTOM_BOUNDARY", + 24: "RIGHT_EYE_LEFT_CORNER", + 25: "LEFT_EYEBROW_UPPER_MIDPOINT", + 26: "RIGHT_EYEBROW_UPPER_MIDPOINT", + 27: "LEFT_EAR_TRAGION", + 28: "RIGHT_EAR_TRAGION", + 29: "LEFT_EYE_PUPIL", + 30: "RIGHT_EYE_PUPIL", + 31: "FOREHEAD_GLABELLA", + 32: "CHIN_GNATHION", + 33: "CHIN_LEFT_GONION", + 34: "CHIN_RIGHT_GONION", +} +var FaceAnnotation_Landmark_Type_value = map[string]int32{ + "UNKNOWN_LANDMARK": 0, + "LEFT_EYE": 1, + "RIGHT_EYE": 2, + "LEFT_OF_LEFT_EYEBROW": 3, + "RIGHT_OF_LEFT_EYEBROW": 4, + "LEFT_OF_RIGHT_EYEBROW": 5, + "RIGHT_OF_RIGHT_EYEBROW": 6, + "MIDPOINT_BETWEEN_EYES": 7, + "NOSE_TIP": 8, + "UPPER_LIP": 9, + "LOWER_LIP": 10, + "MOUTH_LEFT": 11, + "MOUTH_RIGHT": 12, + "MOUTH_CENTER": 13, + "NOSE_BOTTOM_RIGHT": 14, + "NOSE_BOTTOM_LEFT": 15, + "NOSE_BOTTOM_CENTER": 16, + "LEFT_EYE_TOP_BOUNDARY": 17, + "LEFT_EYE_RIGHT_CORNER": 18, + "LEFT_EYE_BOTTOM_BOUNDARY": 19, + "LEFT_EYE_LEFT_CORNER": 20, + "RIGHT_EYE_TOP_BOUNDARY": 21, + "RIGHT_EYE_RIGHT_CORNER": 22, + "RIGHT_EYE_BOTTOM_BOUNDARY": 23, + "RIGHT_EYE_LEFT_CORNER": 24, + "LEFT_EYEBROW_UPPER_MIDPOINT": 25, + "RIGHT_EYEBROW_UPPER_MIDPOINT": 26, + "LEFT_EAR_TRAGION": 27, + "RIGHT_EAR_TRAGION": 28, + "LEFT_EYE_PUPIL": 29, + "RIGHT_EYE_PUPIL": 30, + "FOREHEAD_GLABELLA": 31, + "CHIN_GNATHION": 32, + "CHIN_LEFT_GONION": 33, + "CHIN_RIGHT_GONION": 34, +} + +func (x FaceAnnotation_Landmark_Type) String() string { + return proto.EnumName(FaceAnnotation_Landmark_Type_name, int32(x)) +} +func (FaceAnnotation_Landmark_Type) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_image_annotator_e65877f16ecf0354, []int{3, 0, 0} +} + +// Users describe the type of Google Cloud Vision API tasks to perform over +// images by using *Feature*s. Each Feature indicates a type of image +// detection task to perform. Features encode the Cloud Vision API +// vertical to operate on and the number of top-scoring results to return. +type Feature struct { + // The feature type. + Type Feature_Type `protobuf:"varint,1,opt,name=type,proto3,enum=google.cloud.vision.v1p1beta1.Feature_Type" json:"type,omitempty"` + // Maximum number of results of this type. + MaxResults int32 `protobuf:"varint,2,opt,name=max_results,json=maxResults,proto3" json:"max_results,omitempty"` + // Model to use for the feature. + // Supported values: "builtin/stable" (the default if unset) and + // "builtin/latest". + Model string `protobuf:"bytes,3,opt,name=model,proto3" json:"model,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Feature) Reset() { *m = Feature{} } +func (m *Feature) String() string { return proto.CompactTextString(m) } +func (*Feature) ProtoMessage() {} +func (*Feature) Descriptor() ([]byte, []int) { + return fileDescriptor_image_annotator_e65877f16ecf0354, []int{0} +} +func (m *Feature) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Feature.Unmarshal(m, b) +} +func (m *Feature) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Feature.Marshal(b, m, deterministic) +} +func (dst *Feature) XXX_Merge(src proto.Message) { + xxx_messageInfo_Feature.Merge(dst, src) +} +func (m *Feature) XXX_Size() int { + return xxx_messageInfo_Feature.Size(m) +} +func (m *Feature) XXX_DiscardUnknown() { + xxx_messageInfo_Feature.DiscardUnknown(m) +} + +var xxx_messageInfo_Feature proto.InternalMessageInfo + +func (m *Feature) GetType() Feature_Type { + if m != nil { + return m.Type + } + return Feature_TYPE_UNSPECIFIED +} + +func (m *Feature) GetMaxResults() int32 { + if m != nil { + return m.MaxResults + } + return 0 +} + +func (m *Feature) GetModel() string { + if m != nil { + return m.Model + } + return "" +} + +// External image source (Google Cloud Storage image location). +type ImageSource struct { + // NOTE: For new code `image_uri` below is preferred. + // Google Cloud Storage image URI, which must be in the following form: + // `gs://bucket_name/object_name` (for details, see + // [Google Cloud Storage Request + // URIs](https://cloud.google.com/storage/docs/reference-uris)). + // NOTE: Cloud Storage object versioning is not supported. + GcsImageUri string `protobuf:"bytes,1,opt,name=gcs_image_uri,json=gcsImageUri,proto3" json:"gcs_image_uri,omitempty"` + // Image URI which supports: + // 1) Google Cloud Storage image URI, which must be in the following form: + // `gs://bucket_name/object_name` (for details, see + // [Google Cloud Storage Request + // URIs](https://cloud.google.com/storage/docs/reference-uris)). + // NOTE: Cloud Storage object versioning is not supported. + // 2) Publicly accessible image HTTP/HTTPS URL. + // This is preferred over the legacy `gcs_image_uri` above. When both + // `gcs_image_uri` and `image_uri` are specified, `image_uri` takes + // precedence. + ImageUri string `protobuf:"bytes,2,opt,name=image_uri,json=imageUri,proto3" json:"image_uri,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ImageSource) Reset() { *m = ImageSource{} } +func (m *ImageSource) String() string { return proto.CompactTextString(m) } +func (*ImageSource) ProtoMessage() {} +func (*ImageSource) Descriptor() ([]byte, []int) { + return fileDescriptor_image_annotator_e65877f16ecf0354, []int{1} +} +func (m *ImageSource) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ImageSource.Unmarshal(m, b) +} +func (m *ImageSource) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ImageSource.Marshal(b, m, deterministic) +} +func (dst *ImageSource) XXX_Merge(src proto.Message) { + xxx_messageInfo_ImageSource.Merge(dst, src) +} +func (m *ImageSource) XXX_Size() int { + return xxx_messageInfo_ImageSource.Size(m) +} +func (m *ImageSource) XXX_DiscardUnknown() { + xxx_messageInfo_ImageSource.DiscardUnknown(m) +} + +var xxx_messageInfo_ImageSource proto.InternalMessageInfo + +func (m *ImageSource) GetGcsImageUri() string { + if m != nil { + return m.GcsImageUri + } + return "" +} + +func (m *ImageSource) GetImageUri() string { + if m != nil { + return m.ImageUri + } + return "" +} + +// Client image to perform Google Cloud Vision API tasks over. +type Image struct { + // Image content, represented as a stream of bytes. + // Note: as with all `bytes` fields, protobuffers use a pure binary + // representation, whereas JSON representations use base64. + Content []byte `protobuf:"bytes,1,opt,name=content,proto3" json:"content,omitempty"` + // Google Cloud Storage image location. If both `content` and `source` + // are provided for an image, `content` takes precedence and is + // used to perform the image annotation request. + Source *ImageSource `protobuf:"bytes,2,opt,name=source,proto3" json:"source,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Image) Reset() { *m = Image{} } +func (m *Image) String() string { return proto.CompactTextString(m) } +func (*Image) ProtoMessage() {} +func (*Image) Descriptor() ([]byte, []int) { + return fileDescriptor_image_annotator_e65877f16ecf0354, []int{2} +} +func (m *Image) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Image.Unmarshal(m, b) +} +func (m *Image) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Image.Marshal(b, m, deterministic) +} +func (dst *Image) XXX_Merge(src proto.Message) { + xxx_messageInfo_Image.Merge(dst, src) +} +func (m *Image) XXX_Size() int { + return xxx_messageInfo_Image.Size(m) +} +func (m *Image) XXX_DiscardUnknown() { + xxx_messageInfo_Image.DiscardUnknown(m) +} + +var xxx_messageInfo_Image proto.InternalMessageInfo + +func (m *Image) GetContent() []byte { + if m != nil { + return m.Content + } + return nil +} + +func (m *Image) GetSource() *ImageSource { + if m != nil { + return m.Source + } + return nil +} + +// A face annotation object contains the results of face detection. +type FaceAnnotation struct { + // The bounding polygon around the face. The coordinates of the bounding box + // are in the original image's scale, as returned in `ImageParams`. + // The bounding box is computed to "frame" the face in accordance with human + // expectations. It is based on the landmarker results. + // Note that one or more x and/or y coordinates may not be generated in the + // `BoundingPoly` (the polygon will be unbounded) if only a partial face + // appears in the image to be annotated. + BoundingPoly *BoundingPoly `protobuf:"bytes,1,opt,name=bounding_poly,json=boundingPoly,proto3" json:"bounding_poly,omitempty"` + // The `fd_bounding_poly` bounding polygon is tighter than the + // `boundingPoly`, and encloses only the skin part of the face. Typically, it + // is used to eliminate the face from any image analysis that detects the + // "amount of skin" visible in an image. It is not based on the + // landmarker results, only on the initial face detection, hence + // the fd (face detection) prefix. + FdBoundingPoly *BoundingPoly `protobuf:"bytes,2,opt,name=fd_bounding_poly,json=fdBoundingPoly,proto3" json:"fd_bounding_poly,omitempty"` + // Detected face landmarks. + Landmarks []*FaceAnnotation_Landmark `protobuf:"bytes,3,rep,name=landmarks,proto3" json:"landmarks,omitempty"` + // Roll angle, which indicates the amount of clockwise/anti-clockwise rotation + // of the face relative to the image vertical about the axis perpendicular to + // the face. Range [-180,180]. + RollAngle float32 `protobuf:"fixed32,4,opt,name=roll_angle,json=rollAngle,proto3" json:"roll_angle,omitempty"` + // Yaw angle, which indicates the leftward/rightward angle that the face is + // pointing relative to the vertical plane perpendicular to the image. Range + // [-180,180]. + PanAngle float32 `protobuf:"fixed32,5,opt,name=pan_angle,json=panAngle,proto3" json:"pan_angle,omitempty"` + // Pitch angle, which indicates the upwards/downwards angle that the face is + // pointing relative to the image's horizontal plane. Range [-180,180]. + TiltAngle float32 `protobuf:"fixed32,6,opt,name=tilt_angle,json=tiltAngle,proto3" json:"tilt_angle,omitempty"` + // Detection confidence. Range [0, 1]. + DetectionConfidence float32 `protobuf:"fixed32,7,opt,name=detection_confidence,json=detectionConfidence,proto3" json:"detection_confidence,omitempty"` + // Face landmarking confidence. Range [0, 1]. + LandmarkingConfidence float32 `protobuf:"fixed32,8,opt,name=landmarking_confidence,json=landmarkingConfidence,proto3" json:"landmarking_confidence,omitempty"` + // Joy likelihood. + JoyLikelihood Likelihood `protobuf:"varint,9,opt,name=joy_likelihood,json=joyLikelihood,proto3,enum=google.cloud.vision.v1p1beta1.Likelihood" json:"joy_likelihood,omitempty"` + // Sorrow likelihood. + SorrowLikelihood Likelihood `protobuf:"varint,10,opt,name=sorrow_likelihood,json=sorrowLikelihood,proto3,enum=google.cloud.vision.v1p1beta1.Likelihood" json:"sorrow_likelihood,omitempty"` + // Anger likelihood. + AngerLikelihood Likelihood `protobuf:"varint,11,opt,name=anger_likelihood,json=angerLikelihood,proto3,enum=google.cloud.vision.v1p1beta1.Likelihood" json:"anger_likelihood,omitempty"` + // Surprise likelihood. + SurpriseLikelihood Likelihood `protobuf:"varint,12,opt,name=surprise_likelihood,json=surpriseLikelihood,proto3,enum=google.cloud.vision.v1p1beta1.Likelihood" json:"surprise_likelihood,omitempty"` + // Under-exposed likelihood. + UnderExposedLikelihood Likelihood `protobuf:"varint,13,opt,name=under_exposed_likelihood,json=underExposedLikelihood,proto3,enum=google.cloud.vision.v1p1beta1.Likelihood" json:"under_exposed_likelihood,omitempty"` + // Blurred likelihood. + BlurredLikelihood Likelihood `protobuf:"varint,14,opt,name=blurred_likelihood,json=blurredLikelihood,proto3,enum=google.cloud.vision.v1p1beta1.Likelihood" json:"blurred_likelihood,omitempty"` + // Headwear likelihood. + HeadwearLikelihood Likelihood `protobuf:"varint,15,opt,name=headwear_likelihood,json=headwearLikelihood,proto3,enum=google.cloud.vision.v1p1beta1.Likelihood" json:"headwear_likelihood,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *FaceAnnotation) Reset() { *m = FaceAnnotation{} } +func (m *FaceAnnotation) String() string { return proto.CompactTextString(m) } +func (*FaceAnnotation) ProtoMessage() {} +func (*FaceAnnotation) Descriptor() ([]byte, []int) { + return fileDescriptor_image_annotator_e65877f16ecf0354, []int{3} +} +func (m *FaceAnnotation) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_FaceAnnotation.Unmarshal(m, b) +} +func (m *FaceAnnotation) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_FaceAnnotation.Marshal(b, m, deterministic) +} +func (dst *FaceAnnotation) XXX_Merge(src proto.Message) { + xxx_messageInfo_FaceAnnotation.Merge(dst, src) +} +func (m *FaceAnnotation) XXX_Size() int { + return xxx_messageInfo_FaceAnnotation.Size(m) +} +func (m *FaceAnnotation) XXX_DiscardUnknown() { + xxx_messageInfo_FaceAnnotation.DiscardUnknown(m) +} + +var xxx_messageInfo_FaceAnnotation proto.InternalMessageInfo + +func (m *FaceAnnotation) GetBoundingPoly() *BoundingPoly { + if m != nil { + return m.BoundingPoly + } + return nil +} + +func (m *FaceAnnotation) GetFdBoundingPoly() *BoundingPoly { + if m != nil { + return m.FdBoundingPoly + } + return nil +} + +func (m *FaceAnnotation) GetLandmarks() []*FaceAnnotation_Landmark { + if m != nil { + return m.Landmarks + } + return nil +} + +func (m *FaceAnnotation) GetRollAngle() float32 { + if m != nil { + return m.RollAngle + } + return 0 +} + +func (m *FaceAnnotation) GetPanAngle() float32 { + if m != nil { + return m.PanAngle + } + return 0 +} + +func (m *FaceAnnotation) GetTiltAngle() float32 { + if m != nil { + return m.TiltAngle + } + return 0 +} + +func (m *FaceAnnotation) GetDetectionConfidence() float32 { + if m != nil { + return m.DetectionConfidence + } + return 0 +} + +func (m *FaceAnnotation) GetLandmarkingConfidence() float32 { + if m != nil { + return m.LandmarkingConfidence + } + return 0 +} + +func (m *FaceAnnotation) GetJoyLikelihood() Likelihood { + if m != nil { + return m.JoyLikelihood + } + return Likelihood_UNKNOWN +} + +func (m *FaceAnnotation) GetSorrowLikelihood() Likelihood { + if m != nil { + return m.SorrowLikelihood + } + return Likelihood_UNKNOWN +} + +func (m *FaceAnnotation) GetAngerLikelihood() Likelihood { + if m != nil { + return m.AngerLikelihood + } + return Likelihood_UNKNOWN +} + +func (m *FaceAnnotation) GetSurpriseLikelihood() Likelihood { + if m != nil { + return m.SurpriseLikelihood + } + return Likelihood_UNKNOWN +} + +func (m *FaceAnnotation) GetUnderExposedLikelihood() Likelihood { + if m != nil { + return m.UnderExposedLikelihood + } + return Likelihood_UNKNOWN +} + +func (m *FaceAnnotation) GetBlurredLikelihood() Likelihood { + if m != nil { + return m.BlurredLikelihood + } + return Likelihood_UNKNOWN +} + +func (m *FaceAnnotation) GetHeadwearLikelihood() Likelihood { + if m != nil { + return m.HeadwearLikelihood + } + return Likelihood_UNKNOWN +} + +// A face-specific landmark (for example, a face feature). +type FaceAnnotation_Landmark struct { + // Face landmark type. + Type FaceAnnotation_Landmark_Type `protobuf:"varint,3,opt,name=type,proto3,enum=google.cloud.vision.v1p1beta1.FaceAnnotation_Landmark_Type" json:"type,omitempty"` + // Face landmark position. + Position *Position `protobuf:"bytes,4,opt,name=position,proto3" json:"position,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *FaceAnnotation_Landmark) Reset() { *m = FaceAnnotation_Landmark{} } +func (m *FaceAnnotation_Landmark) String() string { return proto.CompactTextString(m) } +func (*FaceAnnotation_Landmark) ProtoMessage() {} +func (*FaceAnnotation_Landmark) Descriptor() ([]byte, []int) { + return fileDescriptor_image_annotator_e65877f16ecf0354, []int{3, 0} +} +func (m *FaceAnnotation_Landmark) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_FaceAnnotation_Landmark.Unmarshal(m, b) +} +func (m *FaceAnnotation_Landmark) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_FaceAnnotation_Landmark.Marshal(b, m, deterministic) +} +func (dst *FaceAnnotation_Landmark) XXX_Merge(src proto.Message) { + xxx_messageInfo_FaceAnnotation_Landmark.Merge(dst, src) +} +func (m *FaceAnnotation_Landmark) XXX_Size() int { + return xxx_messageInfo_FaceAnnotation_Landmark.Size(m) +} +func (m *FaceAnnotation_Landmark) XXX_DiscardUnknown() { + xxx_messageInfo_FaceAnnotation_Landmark.DiscardUnknown(m) +} + +var xxx_messageInfo_FaceAnnotation_Landmark proto.InternalMessageInfo + +func (m *FaceAnnotation_Landmark) GetType() FaceAnnotation_Landmark_Type { + if m != nil { + return m.Type + } + return FaceAnnotation_Landmark_UNKNOWN_LANDMARK +} + +func (m *FaceAnnotation_Landmark) GetPosition() *Position { + if m != nil { + return m.Position + } + return nil +} + +// Detected entity location information. +type LocationInfo struct { + // lat/long location coordinates. + LatLng *latlng.LatLng `protobuf:"bytes,1,opt,name=lat_lng,json=latLng,proto3" json:"lat_lng,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *LocationInfo) Reset() { *m = LocationInfo{} } +func (m *LocationInfo) String() string { return proto.CompactTextString(m) } +func (*LocationInfo) ProtoMessage() {} +func (*LocationInfo) Descriptor() ([]byte, []int) { + return fileDescriptor_image_annotator_e65877f16ecf0354, []int{4} +} +func (m *LocationInfo) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_LocationInfo.Unmarshal(m, b) +} +func (m *LocationInfo) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_LocationInfo.Marshal(b, m, deterministic) +} +func (dst *LocationInfo) XXX_Merge(src proto.Message) { + xxx_messageInfo_LocationInfo.Merge(dst, src) +} +func (m *LocationInfo) XXX_Size() int { + return xxx_messageInfo_LocationInfo.Size(m) +} +func (m *LocationInfo) XXX_DiscardUnknown() { + xxx_messageInfo_LocationInfo.DiscardUnknown(m) +} + +var xxx_messageInfo_LocationInfo proto.InternalMessageInfo + +func (m *LocationInfo) GetLatLng() *latlng.LatLng { + if m != nil { + return m.LatLng + } + return nil +} + +// A `Property` consists of a user-supplied name/value pair. +type Property struct { + // Name of the property. + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // Value of the property. + Value string `protobuf:"bytes,2,opt,name=value,proto3" json:"value,omitempty"` + // Value of numeric properties. + Uint64Value uint64 `protobuf:"varint,3,opt,name=uint64_value,json=uint64Value,proto3" json:"uint64_value,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Property) Reset() { *m = Property{} } +func (m *Property) String() string { return proto.CompactTextString(m) } +func (*Property) ProtoMessage() {} +func (*Property) Descriptor() ([]byte, []int) { + return fileDescriptor_image_annotator_e65877f16ecf0354, []int{5} +} +func (m *Property) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Property.Unmarshal(m, b) +} +func (m *Property) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Property.Marshal(b, m, deterministic) +} +func (dst *Property) XXX_Merge(src proto.Message) { + xxx_messageInfo_Property.Merge(dst, src) +} +func (m *Property) XXX_Size() int { + return xxx_messageInfo_Property.Size(m) +} +func (m *Property) XXX_DiscardUnknown() { + xxx_messageInfo_Property.DiscardUnknown(m) +} + +var xxx_messageInfo_Property proto.InternalMessageInfo + +func (m *Property) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *Property) GetValue() string { + if m != nil { + return m.Value + } + return "" +} + +func (m *Property) GetUint64Value() uint64 { + if m != nil { + return m.Uint64Value + } + return 0 +} + +// Set of detected entity features. +type EntityAnnotation struct { + // Opaque entity ID. Some IDs may be available in + // [Google Knowledge Graph Search + // API](https://developers.google.com/knowledge-graph/). + Mid string `protobuf:"bytes,1,opt,name=mid,proto3" json:"mid,omitempty"` + // The language code for the locale in which the entity textual + // `description` is expressed. + Locale string `protobuf:"bytes,2,opt,name=locale,proto3" json:"locale,omitempty"` + // Entity textual description, expressed in its `locale` language. + Description string `protobuf:"bytes,3,opt,name=description,proto3" json:"description,omitempty"` + // Overall score of the result. Range [0, 1]. + Score float32 `protobuf:"fixed32,4,opt,name=score,proto3" json:"score,omitempty"` + // The accuracy of the entity detection in an image. + // For example, for an image in which the "Eiffel Tower" entity is detected, + // this field represents the confidence that there is a tower in the query + // image. Range [0, 1]. + Confidence float32 `protobuf:"fixed32,5,opt,name=confidence,proto3" json:"confidence,omitempty"` + // The relevancy of the ICA (Image Content Annotation) label to the + // image. For example, the relevancy of "tower" is likely higher to an image + // containing the detected "Eiffel Tower" than to an image containing a + // detected distant towering building, even though the confidence that + // there is a tower in each image may be the same. Range [0, 1]. + Topicality float32 `protobuf:"fixed32,6,opt,name=topicality,proto3" json:"topicality,omitempty"` + // Image region to which this entity belongs. Not produced + // for `LABEL_DETECTION` features. + BoundingPoly *BoundingPoly `protobuf:"bytes,7,opt,name=bounding_poly,json=boundingPoly,proto3" json:"bounding_poly,omitempty"` + // The location information for the detected entity. Multiple + // `LocationInfo` elements can be present because one location may + // indicate the location of the scene in the image, and another location + // may indicate the location of the place where the image was taken. + // Location information is usually present for landmarks. + Locations []*LocationInfo `protobuf:"bytes,8,rep,name=locations,proto3" json:"locations,omitempty"` + // Some entities may have optional user-supplied `Property` (name/value) + // fields, such a score or string that qualifies the entity. + Properties []*Property `protobuf:"bytes,9,rep,name=properties,proto3" json:"properties,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *EntityAnnotation) Reset() { *m = EntityAnnotation{} } +func (m *EntityAnnotation) String() string { return proto.CompactTextString(m) } +func (*EntityAnnotation) ProtoMessage() {} +func (*EntityAnnotation) Descriptor() ([]byte, []int) { + return fileDescriptor_image_annotator_e65877f16ecf0354, []int{6} +} +func (m *EntityAnnotation) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_EntityAnnotation.Unmarshal(m, b) +} +func (m *EntityAnnotation) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_EntityAnnotation.Marshal(b, m, deterministic) +} +func (dst *EntityAnnotation) XXX_Merge(src proto.Message) { + xxx_messageInfo_EntityAnnotation.Merge(dst, src) +} +func (m *EntityAnnotation) XXX_Size() int { + return xxx_messageInfo_EntityAnnotation.Size(m) +} +func (m *EntityAnnotation) XXX_DiscardUnknown() { + xxx_messageInfo_EntityAnnotation.DiscardUnknown(m) +} + +var xxx_messageInfo_EntityAnnotation proto.InternalMessageInfo + +func (m *EntityAnnotation) GetMid() string { + if m != nil { + return m.Mid + } + return "" +} + +func (m *EntityAnnotation) GetLocale() string { + if m != nil { + return m.Locale + } + return "" +} + +func (m *EntityAnnotation) GetDescription() string { + if m != nil { + return m.Description + } + return "" +} + +func (m *EntityAnnotation) GetScore() float32 { + if m != nil { + return m.Score + } + return 0 +} + +func (m *EntityAnnotation) GetConfidence() float32 { + if m != nil { + return m.Confidence + } + return 0 +} + +func (m *EntityAnnotation) GetTopicality() float32 { + if m != nil { + return m.Topicality + } + return 0 +} + +func (m *EntityAnnotation) GetBoundingPoly() *BoundingPoly { + if m != nil { + return m.BoundingPoly + } + return nil +} + +func (m *EntityAnnotation) GetLocations() []*LocationInfo { + if m != nil { + return m.Locations + } + return nil +} + +func (m *EntityAnnotation) GetProperties() []*Property { + if m != nil { + return m.Properties + } + return nil +} + +// Set of features pertaining to the image, computed by computer vision +// methods over safe-search verticals (for example, adult, spoof, medical, +// violence). +type SafeSearchAnnotation struct { + // Represents the adult content likelihood for the image. Adult content may + // contain elements such as nudity, pornographic images or cartoons, or + // sexual activities. + Adult Likelihood `protobuf:"varint,1,opt,name=adult,proto3,enum=google.cloud.vision.v1p1beta1.Likelihood" json:"adult,omitempty"` + // Spoof likelihood. The likelihood that an modification + // was made to the image's canonical version to make it appear + // funny or offensive. + Spoof Likelihood `protobuf:"varint,2,opt,name=spoof,proto3,enum=google.cloud.vision.v1p1beta1.Likelihood" json:"spoof,omitempty"` + // Likelihood that this is a medical image. + Medical Likelihood `protobuf:"varint,3,opt,name=medical,proto3,enum=google.cloud.vision.v1p1beta1.Likelihood" json:"medical,omitempty"` + // Likelihood that this image contains violent content. + Violence Likelihood `protobuf:"varint,4,opt,name=violence,proto3,enum=google.cloud.vision.v1p1beta1.Likelihood" json:"violence,omitempty"` + // Likelihood that the request image contains racy content. Racy content may + // include (but is not limited to) skimpy or sheer clothing, strategically + // covered nudity, lewd or provocative poses, or close-ups of sensitive + // body areas. + Racy Likelihood `protobuf:"varint,9,opt,name=racy,proto3,enum=google.cloud.vision.v1p1beta1.Likelihood" json:"racy,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *SafeSearchAnnotation) Reset() { *m = SafeSearchAnnotation{} } +func (m *SafeSearchAnnotation) String() string { return proto.CompactTextString(m) } +func (*SafeSearchAnnotation) ProtoMessage() {} +func (*SafeSearchAnnotation) Descriptor() ([]byte, []int) { + return fileDescriptor_image_annotator_e65877f16ecf0354, []int{7} +} +func (m *SafeSearchAnnotation) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_SafeSearchAnnotation.Unmarshal(m, b) +} +func (m *SafeSearchAnnotation) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_SafeSearchAnnotation.Marshal(b, m, deterministic) +} +func (dst *SafeSearchAnnotation) XXX_Merge(src proto.Message) { + xxx_messageInfo_SafeSearchAnnotation.Merge(dst, src) +} +func (m *SafeSearchAnnotation) XXX_Size() int { + return xxx_messageInfo_SafeSearchAnnotation.Size(m) +} +func (m *SafeSearchAnnotation) XXX_DiscardUnknown() { + xxx_messageInfo_SafeSearchAnnotation.DiscardUnknown(m) +} + +var xxx_messageInfo_SafeSearchAnnotation proto.InternalMessageInfo + +func (m *SafeSearchAnnotation) GetAdult() Likelihood { + if m != nil { + return m.Adult + } + return Likelihood_UNKNOWN +} + +func (m *SafeSearchAnnotation) GetSpoof() Likelihood { + if m != nil { + return m.Spoof + } + return Likelihood_UNKNOWN +} + +func (m *SafeSearchAnnotation) GetMedical() Likelihood { + if m != nil { + return m.Medical + } + return Likelihood_UNKNOWN +} + +func (m *SafeSearchAnnotation) GetViolence() Likelihood { + if m != nil { + return m.Violence + } + return Likelihood_UNKNOWN +} + +func (m *SafeSearchAnnotation) GetRacy() Likelihood { + if m != nil { + return m.Racy + } + return Likelihood_UNKNOWN +} + +// Rectangle determined by min and max `LatLng` pairs. +type LatLongRect struct { + // Min lat/long pair. + MinLatLng *latlng.LatLng `protobuf:"bytes,1,opt,name=min_lat_lng,json=minLatLng,proto3" json:"min_lat_lng,omitempty"` + // Max lat/long pair. + MaxLatLng *latlng.LatLng `protobuf:"bytes,2,opt,name=max_lat_lng,json=maxLatLng,proto3" json:"max_lat_lng,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *LatLongRect) Reset() { *m = LatLongRect{} } +func (m *LatLongRect) String() string { return proto.CompactTextString(m) } +func (*LatLongRect) ProtoMessage() {} +func (*LatLongRect) Descriptor() ([]byte, []int) { + return fileDescriptor_image_annotator_e65877f16ecf0354, []int{8} +} +func (m *LatLongRect) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_LatLongRect.Unmarshal(m, b) +} +func (m *LatLongRect) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_LatLongRect.Marshal(b, m, deterministic) +} +func (dst *LatLongRect) XXX_Merge(src proto.Message) { + xxx_messageInfo_LatLongRect.Merge(dst, src) +} +func (m *LatLongRect) XXX_Size() int { + return xxx_messageInfo_LatLongRect.Size(m) +} +func (m *LatLongRect) XXX_DiscardUnknown() { + xxx_messageInfo_LatLongRect.DiscardUnknown(m) +} + +var xxx_messageInfo_LatLongRect proto.InternalMessageInfo + +func (m *LatLongRect) GetMinLatLng() *latlng.LatLng { + if m != nil { + return m.MinLatLng + } + return nil +} + +func (m *LatLongRect) GetMaxLatLng() *latlng.LatLng { + if m != nil { + return m.MaxLatLng + } + return nil +} + +// Color information consists of RGB channels, score, and the fraction of +// the image that the color occupies in the image. +type ColorInfo struct { + // RGB components of the color. + Color *color.Color `protobuf:"bytes,1,opt,name=color,proto3" json:"color,omitempty"` + // Image-specific score for this color. Value in range [0, 1]. + Score float32 `protobuf:"fixed32,2,opt,name=score,proto3" json:"score,omitempty"` + // The fraction of pixels the color occupies in the image. + // Value in range [0, 1]. + PixelFraction float32 `protobuf:"fixed32,3,opt,name=pixel_fraction,json=pixelFraction,proto3" json:"pixel_fraction,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ColorInfo) Reset() { *m = ColorInfo{} } +func (m *ColorInfo) String() string { return proto.CompactTextString(m) } +func (*ColorInfo) ProtoMessage() {} +func (*ColorInfo) Descriptor() ([]byte, []int) { + return fileDescriptor_image_annotator_e65877f16ecf0354, []int{9} +} +func (m *ColorInfo) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ColorInfo.Unmarshal(m, b) +} +func (m *ColorInfo) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ColorInfo.Marshal(b, m, deterministic) +} +func (dst *ColorInfo) XXX_Merge(src proto.Message) { + xxx_messageInfo_ColorInfo.Merge(dst, src) +} +func (m *ColorInfo) XXX_Size() int { + return xxx_messageInfo_ColorInfo.Size(m) +} +func (m *ColorInfo) XXX_DiscardUnknown() { + xxx_messageInfo_ColorInfo.DiscardUnknown(m) +} + +var xxx_messageInfo_ColorInfo proto.InternalMessageInfo + +func (m *ColorInfo) GetColor() *color.Color { + if m != nil { + return m.Color + } + return nil +} + +func (m *ColorInfo) GetScore() float32 { + if m != nil { + return m.Score + } + return 0 +} + +func (m *ColorInfo) GetPixelFraction() float32 { + if m != nil { + return m.PixelFraction + } + return 0 +} + +// Set of dominant colors and their corresponding scores. +type DominantColorsAnnotation struct { + // RGB color values with their score and pixel fraction. + Colors []*ColorInfo `protobuf:"bytes,1,rep,name=colors,proto3" json:"colors,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *DominantColorsAnnotation) Reset() { *m = DominantColorsAnnotation{} } +func (m *DominantColorsAnnotation) String() string { return proto.CompactTextString(m) } +func (*DominantColorsAnnotation) ProtoMessage() {} +func (*DominantColorsAnnotation) Descriptor() ([]byte, []int) { + return fileDescriptor_image_annotator_e65877f16ecf0354, []int{10} +} +func (m *DominantColorsAnnotation) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_DominantColorsAnnotation.Unmarshal(m, b) +} +func (m *DominantColorsAnnotation) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_DominantColorsAnnotation.Marshal(b, m, deterministic) +} +func (dst *DominantColorsAnnotation) XXX_Merge(src proto.Message) { + xxx_messageInfo_DominantColorsAnnotation.Merge(dst, src) +} +func (m *DominantColorsAnnotation) XXX_Size() int { + return xxx_messageInfo_DominantColorsAnnotation.Size(m) +} +func (m *DominantColorsAnnotation) XXX_DiscardUnknown() { + xxx_messageInfo_DominantColorsAnnotation.DiscardUnknown(m) +} + +var xxx_messageInfo_DominantColorsAnnotation proto.InternalMessageInfo + +func (m *DominantColorsAnnotation) GetColors() []*ColorInfo { + if m != nil { + return m.Colors + } + return nil +} + +// Stores image properties, such as dominant colors. +type ImageProperties struct { + // If present, dominant colors completed successfully. + DominantColors *DominantColorsAnnotation `protobuf:"bytes,1,opt,name=dominant_colors,json=dominantColors,proto3" json:"dominant_colors,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ImageProperties) Reset() { *m = ImageProperties{} } +func (m *ImageProperties) String() string { return proto.CompactTextString(m) } +func (*ImageProperties) ProtoMessage() {} +func (*ImageProperties) Descriptor() ([]byte, []int) { + return fileDescriptor_image_annotator_e65877f16ecf0354, []int{11} +} +func (m *ImageProperties) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ImageProperties.Unmarshal(m, b) +} +func (m *ImageProperties) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ImageProperties.Marshal(b, m, deterministic) +} +func (dst *ImageProperties) XXX_Merge(src proto.Message) { + xxx_messageInfo_ImageProperties.Merge(dst, src) +} +func (m *ImageProperties) XXX_Size() int { + return xxx_messageInfo_ImageProperties.Size(m) +} +func (m *ImageProperties) XXX_DiscardUnknown() { + xxx_messageInfo_ImageProperties.DiscardUnknown(m) +} + +var xxx_messageInfo_ImageProperties proto.InternalMessageInfo + +func (m *ImageProperties) GetDominantColors() *DominantColorsAnnotation { + if m != nil { + return m.DominantColors + } + return nil +} + +// Single crop hint that is used to generate a new crop when serving an image. +type CropHint struct { + // The bounding polygon for the crop region. The coordinates of the bounding + // box are in the original image's scale, as returned in `ImageParams`. + BoundingPoly *BoundingPoly `protobuf:"bytes,1,opt,name=bounding_poly,json=boundingPoly,proto3" json:"bounding_poly,omitempty"` + // Confidence of this being a salient region. Range [0, 1]. + Confidence float32 `protobuf:"fixed32,2,opt,name=confidence,proto3" json:"confidence,omitempty"` + // Fraction of importance of this salient region with respect to the original + // image. + ImportanceFraction float32 `protobuf:"fixed32,3,opt,name=importance_fraction,json=importanceFraction,proto3" json:"importance_fraction,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *CropHint) Reset() { *m = CropHint{} } +func (m *CropHint) String() string { return proto.CompactTextString(m) } +func (*CropHint) ProtoMessage() {} +func (*CropHint) Descriptor() ([]byte, []int) { + return fileDescriptor_image_annotator_e65877f16ecf0354, []int{12} +} +func (m *CropHint) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_CropHint.Unmarshal(m, b) +} +func (m *CropHint) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_CropHint.Marshal(b, m, deterministic) +} +func (dst *CropHint) XXX_Merge(src proto.Message) { + xxx_messageInfo_CropHint.Merge(dst, src) +} +func (m *CropHint) XXX_Size() int { + return xxx_messageInfo_CropHint.Size(m) +} +func (m *CropHint) XXX_DiscardUnknown() { + xxx_messageInfo_CropHint.DiscardUnknown(m) +} + +var xxx_messageInfo_CropHint proto.InternalMessageInfo + +func (m *CropHint) GetBoundingPoly() *BoundingPoly { + if m != nil { + return m.BoundingPoly + } + return nil +} + +func (m *CropHint) GetConfidence() float32 { + if m != nil { + return m.Confidence + } + return 0 +} + +func (m *CropHint) GetImportanceFraction() float32 { + if m != nil { + return m.ImportanceFraction + } + return 0 +} + +// Set of crop hints that are used to generate new crops when serving images. +type CropHintsAnnotation struct { + // Crop hint results. + CropHints []*CropHint `protobuf:"bytes,1,rep,name=crop_hints,json=cropHints,proto3" json:"crop_hints,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *CropHintsAnnotation) Reset() { *m = CropHintsAnnotation{} } +func (m *CropHintsAnnotation) String() string { return proto.CompactTextString(m) } +func (*CropHintsAnnotation) ProtoMessage() {} +func (*CropHintsAnnotation) Descriptor() ([]byte, []int) { + return fileDescriptor_image_annotator_e65877f16ecf0354, []int{13} +} +func (m *CropHintsAnnotation) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_CropHintsAnnotation.Unmarshal(m, b) +} +func (m *CropHintsAnnotation) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_CropHintsAnnotation.Marshal(b, m, deterministic) +} +func (dst *CropHintsAnnotation) XXX_Merge(src proto.Message) { + xxx_messageInfo_CropHintsAnnotation.Merge(dst, src) +} +func (m *CropHintsAnnotation) XXX_Size() int { + return xxx_messageInfo_CropHintsAnnotation.Size(m) +} +func (m *CropHintsAnnotation) XXX_DiscardUnknown() { + xxx_messageInfo_CropHintsAnnotation.DiscardUnknown(m) +} + +var xxx_messageInfo_CropHintsAnnotation proto.InternalMessageInfo + +func (m *CropHintsAnnotation) GetCropHints() []*CropHint { + if m != nil { + return m.CropHints + } + return nil +} + +// Parameters for crop hints annotation request. +type CropHintsParams struct { + // Aspect ratios in floats, representing the ratio of the width to the height + // of the image. For example, if the desired aspect ratio is 4/3, the + // corresponding float value should be 1.33333. If not specified, the + // best possible crop is returned. The number of provided aspect ratios is + // limited to a maximum of 16; any aspect ratios provided after the 16th are + // ignored. + AspectRatios []float32 `protobuf:"fixed32,1,rep,packed,name=aspect_ratios,json=aspectRatios,proto3" json:"aspect_ratios,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *CropHintsParams) Reset() { *m = CropHintsParams{} } +func (m *CropHintsParams) String() string { return proto.CompactTextString(m) } +func (*CropHintsParams) ProtoMessage() {} +func (*CropHintsParams) Descriptor() ([]byte, []int) { + return fileDescriptor_image_annotator_e65877f16ecf0354, []int{14} +} +func (m *CropHintsParams) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_CropHintsParams.Unmarshal(m, b) +} +func (m *CropHintsParams) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_CropHintsParams.Marshal(b, m, deterministic) +} +func (dst *CropHintsParams) XXX_Merge(src proto.Message) { + xxx_messageInfo_CropHintsParams.Merge(dst, src) +} +func (m *CropHintsParams) XXX_Size() int { + return xxx_messageInfo_CropHintsParams.Size(m) +} +func (m *CropHintsParams) XXX_DiscardUnknown() { + xxx_messageInfo_CropHintsParams.DiscardUnknown(m) +} + +var xxx_messageInfo_CropHintsParams proto.InternalMessageInfo + +func (m *CropHintsParams) GetAspectRatios() []float32 { + if m != nil { + return m.AspectRatios + } + return nil +} + +// Parameters for web detection request. +type WebDetectionParams struct { + // Whether to include results derived from the geo information in the image. + IncludeGeoResults bool `protobuf:"varint,2,opt,name=include_geo_results,json=includeGeoResults,proto3" json:"include_geo_results,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *WebDetectionParams) Reset() { *m = WebDetectionParams{} } +func (m *WebDetectionParams) String() string { return proto.CompactTextString(m) } +func (*WebDetectionParams) ProtoMessage() {} +func (*WebDetectionParams) Descriptor() ([]byte, []int) { + return fileDescriptor_image_annotator_e65877f16ecf0354, []int{15} +} +func (m *WebDetectionParams) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_WebDetectionParams.Unmarshal(m, b) +} +func (m *WebDetectionParams) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_WebDetectionParams.Marshal(b, m, deterministic) +} +func (dst *WebDetectionParams) XXX_Merge(src proto.Message) { + xxx_messageInfo_WebDetectionParams.Merge(dst, src) +} +func (m *WebDetectionParams) XXX_Size() int { + return xxx_messageInfo_WebDetectionParams.Size(m) +} +func (m *WebDetectionParams) XXX_DiscardUnknown() { + xxx_messageInfo_WebDetectionParams.DiscardUnknown(m) +} + +var xxx_messageInfo_WebDetectionParams proto.InternalMessageInfo + +func (m *WebDetectionParams) GetIncludeGeoResults() bool { + if m != nil { + return m.IncludeGeoResults + } + return false +} + +// Image context and/or feature-specific parameters. +type ImageContext struct { + // lat/long rectangle that specifies the location of the image. + LatLongRect *LatLongRect `protobuf:"bytes,1,opt,name=lat_long_rect,json=latLongRect,proto3" json:"lat_long_rect,omitempty"` + // List of languages to use for TEXT_DETECTION. In most cases, an empty value + // yields the best results since it enables automatic language detection. For + // languages based on the Latin alphabet, setting `language_hints` is not + // needed. In rare cases, when the language of the text in the image is known, + // setting a hint will help get better results (although it will be a + // significant hindrance if the hint is wrong). Text detection returns an + // error if one or more of the specified languages is not one of the + // [supported languages](/vision/docs/languages). + LanguageHints []string `protobuf:"bytes,2,rep,name=language_hints,json=languageHints,proto3" json:"language_hints,omitempty"` + // Parameters for crop hints annotation request. + CropHintsParams *CropHintsParams `protobuf:"bytes,4,opt,name=crop_hints_params,json=cropHintsParams,proto3" json:"crop_hints_params,omitempty"` + // Parameters for web detection. + WebDetectionParams *WebDetectionParams `protobuf:"bytes,6,opt,name=web_detection_params,json=webDetectionParams,proto3" json:"web_detection_params,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ImageContext) Reset() { *m = ImageContext{} } +func (m *ImageContext) String() string { return proto.CompactTextString(m) } +func (*ImageContext) ProtoMessage() {} +func (*ImageContext) Descriptor() ([]byte, []int) { + return fileDescriptor_image_annotator_e65877f16ecf0354, []int{16} +} +func (m *ImageContext) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ImageContext.Unmarshal(m, b) +} +func (m *ImageContext) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ImageContext.Marshal(b, m, deterministic) +} +func (dst *ImageContext) XXX_Merge(src proto.Message) { + xxx_messageInfo_ImageContext.Merge(dst, src) +} +func (m *ImageContext) XXX_Size() int { + return xxx_messageInfo_ImageContext.Size(m) +} +func (m *ImageContext) XXX_DiscardUnknown() { + xxx_messageInfo_ImageContext.DiscardUnknown(m) +} + +var xxx_messageInfo_ImageContext proto.InternalMessageInfo + +func (m *ImageContext) GetLatLongRect() *LatLongRect { + if m != nil { + return m.LatLongRect + } + return nil +} + +func (m *ImageContext) GetLanguageHints() []string { + if m != nil { + return m.LanguageHints + } + return nil +} + +func (m *ImageContext) GetCropHintsParams() *CropHintsParams { + if m != nil { + return m.CropHintsParams + } + return nil +} + +func (m *ImageContext) GetWebDetectionParams() *WebDetectionParams { + if m != nil { + return m.WebDetectionParams + } + return nil +} + +// Request for performing Google Cloud Vision API tasks over a user-provided +// image, with user-requested features. +type AnnotateImageRequest struct { + // The image to be processed. + Image *Image `protobuf:"bytes,1,opt,name=image,proto3" json:"image,omitempty"` + // Requested features. + Features []*Feature `protobuf:"bytes,2,rep,name=features,proto3" json:"features,omitempty"` + // Additional context that may accompany the image. + ImageContext *ImageContext `protobuf:"bytes,3,opt,name=image_context,json=imageContext,proto3" json:"image_context,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *AnnotateImageRequest) Reset() { *m = AnnotateImageRequest{} } +func (m *AnnotateImageRequest) String() string { return proto.CompactTextString(m) } +func (*AnnotateImageRequest) ProtoMessage() {} +func (*AnnotateImageRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_image_annotator_e65877f16ecf0354, []int{17} +} +func (m *AnnotateImageRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_AnnotateImageRequest.Unmarshal(m, b) +} +func (m *AnnotateImageRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_AnnotateImageRequest.Marshal(b, m, deterministic) +} +func (dst *AnnotateImageRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_AnnotateImageRequest.Merge(dst, src) +} +func (m *AnnotateImageRequest) XXX_Size() int { + return xxx_messageInfo_AnnotateImageRequest.Size(m) +} +func (m *AnnotateImageRequest) XXX_DiscardUnknown() { + xxx_messageInfo_AnnotateImageRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_AnnotateImageRequest proto.InternalMessageInfo + +func (m *AnnotateImageRequest) GetImage() *Image { + if m != nil { + return m.Image + } + return nil +} + +func (m *AnnotateImageRequest) GetFeatures() []*Feature { + if m != nil { + return m.Features + } + return nil +} + +func (m *AnnotateImageRequest) GetImageContext() *ImageContext { + if m != nil { + return m.ImageContext + } + return nil +} + +// Response to an image annotation request. +type AnnotateImageResponse struct { + // If present, face detection has completed successfully. + FaceAnnotations []*FaceAnnotation `protobuf:"bytes,1,rep,name=face_annotations,json=faceAnnotations,proto3" json:"face_annotations,omitempty"` + // If present, landmark detection has completed successfully. + LandmarkAnnotations []*EntityAnnotation `protobuf:"bytes,2,rep,name=landmark_annotations,json=landmarkAnnotations,proto3" json:"landmark_annotations,omitempty"` + // If present, logo detection has completed successfully. + LogoAnnotations []*EntityAnnotation `protobuf:"bytes,3,rep,name=logo_annotations,json=logoAnnotations,proto3" json:"logo_annotations,omitempty"` + // If present, label detection has completed successfully. + LabelAnnotations []*EntityAnnotation `protobuf:"bytes,4,rep,name=label_annotations,json=labelAnnotations,proto3" json:"label_annotations,omitempty"` + // If present, text (OCR) detection has completed successfully. + TextAnnotations []*EntityAnnotation `protobuf:"bytes,5,rep,name=text_annotations,json=textAnnotations,proto3" json:"text_annotations,omitempty"` + // If present, text (OCR) detection or document (OCR) text detection has + // completed successfully. + // This annotation provides the structural hierarchy for the OCR detected + // text. + FullTextAnnotation *TextAnnotation `protobuf:"bytes,12,opt,name=full_text_annotation,json=fullTextAnnotation,proto3" json:"full_text_annotation,omitempty"` + // If present, safe-search annotation has completed successfully. + SafeSearchAnnotation *SafeSearchAnnotation `protobuf:"bytes,6,opt,name=safe_search_annotation,json=safeSearchAnnotation,proto3" json:"safe_search_annotation,omitempty"` + // If present, image properties were extracted successfully. + ImagePropertiesAnnotation *ImageProperties `protobuf:"bytes,8,opt,name=image_properties_annotation,json=imagePropertiesAnnotation,proto3" json:"image_properties_annotation,omitempty"` + // If present, crop hints have completed successfully. + CropHintsAnnotation *CropHintsAnnotation `protobuf:"bytes,11,opt,name=crop_hints_annotation,json=cropHintsAnnotation,proto3" json:"crop_hints_annotation,omitempty"` + // If present, web detection has completed successfully. + WebDetection *WebDetection `protobuf:"bytes,13,opt,name=web_detection,json=webDetection,proto3" json:"web_detection,omitempty"` + // If set, represents the error message for the operation. + // Note that filled-in image annotations are guaranteed to be + // correct, even when `error` is set. + Error *status.Status `protobuf:"bytes,9,opt,name=error,proto3" json:"error,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *AnnotateImageResponse) Reset() { *m = AnnotateImageResponse{} } +func (m *AnnotateImageResponse) String() string { return proto.CompactTextString(m) } +func (*AnnotateImageResponse) ProtoMessage() {} +func (*AnnotateImageResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_image_annotator_e65877f16ecf0354, []int{18} +} +func (m *AnnotateImageResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_AnnotateImageResponse.Unmarshal(m, b) +} +func (m *AnnotateImageResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_AnnotateImageResponse.Marshal(b, m, deterministic) +} +func (dst *AnnotateImageResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_AnnotateImageResponse.Merge(dst, src) +} +func (m *AnnotateImageResponse) XXX_Size() int { + return xxx_messageInfo_AnnotateImageResponse.Size(m) +} +func (m *AnnotateImageResponse) XXX_DiscardUnknown() { + xxx_messageInfo_AnnotateImageResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_AnnotateImageResponse proto.InternalMessageInfo + +func (m *AnnotateImageResponse) GetFaceAnnotations() []*FaceAnnotation { + if m != nil { + return m.FaceAnnotations + } + return nil +} + +func (m *AnnotateImageResponse) GetLandmarkAnnotations() []*EntityAnnotation { + if m != nil { + return m.LandmarkAnnotations + } + return nil +} + +func (m *AnnotateImageResponse) GetLogoAnnotations() []*EntityAnnotation { + if m != nil { + return m.LogoAnnotations + } + return nil +} + +func (m *AnnotateImageResponse) GetLabelAnnotations() []*EntityAnnotation { + if m != nil { + return m.LabelAnnotations + } + return nil +} + +func (m *AnnotateImageResponse) GetTextAnnotations() []*EntityAnnotation { + if m != nil { + return m.TextAnnotations + } + return nil +} + +func (m *AnnotateImageResponse) GetFullTextAnnotation() *TextAnnotation { + if m != nil { + return m.FullTextAnnotation + } + return nil +} + +func (m *AnnotateImageResponse) GetSafeSearchAnnotation() *SafeSearchAnnotation { + if m != nil { + return m.SafeSearchAnnotation + } + return nil +} + +func (m *AnnotateImageResponse) GetImagePropertiesAnnotation() *ImageProperties { + if m != nil { + return m.ImagePropertiesAnnotation + } + return nil +} + +func (m *AnnotateImageResponse) GetCropHintsAnnotation() *CropHintsAnnotation { + if m != nil { + return m.CropHintsAnnotation + } + return nil +} + +func (m *AnnotateImageResponse) GetWebDetection() *WebDetection { + if m != nil { + return m.WebDetection + } + return nil +} + +func (m *AnnotateImageResponse) GetError() *status.Status { + if m != nil { + return m.Error + } + return nil +} + +// Multiple image annotation requests are batched into a single service call. +type BatchAnnotateImagesRequest struct { + // Individual image annotation requests for this batch. + Requests []*AnnotateImageRequest `protobuf:"bytes,1,rep,name=requests,proto3" json:"requests,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *BatchAnnotateImagesRequest) Reset() { *m = BatchAnnotateImagesRequest{} } +func (m *BatchAnnotateImagesRequest) String() string { return proto.CompactTextString(m) } +func (*BatchAnnotateImagesRequest) ProtoMessage() {} +func (*BatchAnnotateImagesRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_image_annotator_e65877f16ecf0354, []int{19} +} +func (m *BatchAnnotateImagesRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_BatchAnnotateImagesRequest.Unmarshal(m, b) +} +func (m *BatchAnnotateImagesRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_BatchAnnotateImagesRequest.Marshal(b, m, deterministic) +} +func (dst *BatchAnnotateImagesRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_BatchAnnotateImagesRequest.Merge(dst, src) +} +func (m *BatchAnnotateImagesRequest) XXX_Size() int { + return xxx_messageInfo_BatchAnnotateImagesRequest.Size(m) +} +func (m *BatchAnnotateImagesRequest) XXX_DiscardUnknown() { + xxx_messageInfo_BatchAnnotateImagesRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_BatchAnnotateImagesRequest proto.InternalMessageInfo + +func (m *BatchAnnotateImagesRequest) GetRequests() []*AnnotateImageRequest { + if m != nil { + return m.Requests + } + return nil +} + +// Response to a batch image annotation request. +type BatchAnnotateImagesResponse struct { + // Individual responses to image annotation requests within the batch. + Responses []*AnnotateImageResponse `protobuf:"bytes,1,rep,name=responses,proto3" json:"responses,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *BatchAnnotateImagesResponse) Reset() { *m = BatchAnnotateImagesResponse{} } +func (m *BatchAnnotateImagesResponse) String() string { return proto.CompactTextString(m) } +func (*BatchAnnotateImagesResponse) ProtoMessage() {} +func (*BatchAnnotateImagesResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_image_annotator_e65877f16ecf0354, []int{20} +} +func (m *BatchAnnotateImagesResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_BatchAnnotateImagesResponse.Unmarshal(m, b) +} +func (m *BatchAnnotateImagesResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_BatchAnnotateImagesResponse.Marshal(b, m, deterministic) +} +func (dst *BatchAnnotateImagesResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_BatchAnnotateImagesResponse.Merge(dst, src) +} +func (m *BatchAnnotateImagesResponse) XXX_Size() int { + return xxx_messageInfo_BatchAnnotateImagesResponse.Size(m) +} +func (m *BatchAnnotateImagesResponse) XXX_DiscardUnknown() { + xxx_messageInfo_BatchAnnotateImagesResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_BatchAnnotateImagesResponse proto.InternalMessageInfo + +func (m *BatchAnnotateImagesResponse) GetResponses() []*AnnotateImageResponse { + if m != nil { + return m.Responses + } + return nil +} + +func init() { + proto.RegisterType((*Feature)(nil), "google.cloud.vision.v1p1beta1.Feature") + proto.RegisterType((*ImageSource)(nil), "google.cloud.vision.v1p1beta1.ImageSource") + proto.RegisterType((*Image)(nil), "google.cloud.vision.v1p1beta1.Image") + proto.RegisterType((*FaceAnnotation)(nil), "google.cloud.vision.v1p1beta1.FaceAnnotation") + proto.RegisterType((*FaceAnnotation_Landmark)(nil), "google.cloud.vision.v1p1beta1.FaceAnnotation.Landmark") + proto.RegisterType((*LocationInfo)(nil), "google.cloud.vision.v1p1beta1.LocationInfo") + proto.RegisterType((*Property)(nil), "google.cloud.vision.v1p1beta1.Property") + proto.RegisterType((*EntityAnnotation)(nil), "google.cloud.vision.v1p1beta1.EntityAnnotation") + proto.RegisterType((*SafeSearchAnnotation)(nil), "google.cloud.vision.v1p1beta1.SafeSearchAnnotation") + proto.RegisterType((*LatLongRect)(nil), "google.cloud.vision.v1p1beta1.LatLongRect") + proto.RegisterType((*ColorInfo)(nil), "google.cloud.vision.v1p1beta1.ColorInfo") + proto.RegisterType((*DominantColorsAnnotation)(nil), "google.cloud.vision.v1p1beta1.DominantColorsAnnotation") + proto.RegisterType((*ImageProperties)(nil), "google.cloud.vision.v1p1beta1.ImageProperties") + proto.RegisterType((*CropHint)(nil), "google.cloud.vision.v1p1beta1.CropHint") + proto.RegisterType((*CropHintsAnnotation)(nil), "google.cloud.vision.v1p1beta1.CropHintsAnnotation") + proto.RegisterType((*CropHintsParams)(nil), "google.cloud.vision.v1p1beta1.CropHintsParams") + proto.RegisterType((*WebDetectionParams)(nil), "google.cloud.vision.v1p1beta1.WebDetectionParams") + proto.RegisterType((*ImageContext)(nil), "google.cloud.vision.v1p1beta1.ImageContext") + proto.RegisterType((*AnnotateImageRequest)(nil), "google.cloud.vision.v1p1beta1.AnnotateImageRequest") + proto.RegisterType((*AnnotateImageResponse)(nil), "google.cloud.vision.v1p1beta1.AnnotateImageResponse") + proto.RegisterType((*BatchAnnotateImagesRequest)(nil), "google.cloud.vision.v1p1beta1.BatchAnnotateImagesRequest") + proto.RegisterType((*BatchAnnotateImagesResponse)(nil), "google.cloud.vision.v1p1beta1.BatchAnnotateImagesResponse") + proto.RegisterEnum("google.cloud.vision.v1p1beta1.Likelihood", Likelihood_name, Likelihood_value) + proto.RegisterEnum("google.cloud.vision.v1p1beta1.Feature_Type", Feature_Type_name, Feature_Type_value) + proto.RegisterEnum("google.cloud.vision.v1p1beta1.FaceAnnotation_Landmark_Type", FaceAnnotation_Landmark_Type_name, FaceAnnotation_Landmark_Type_value) +} + +// Reference imports to suppress errors if they are not otherwise used. +var _ context.Context +var _ grpc.ClientConn + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the grpc package it is being compiled against. +const _ = grpc.SupportPackageIsVersion4 + +// ImageAnnotatorClient is the client API for ImageAnnotator service. +// +// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://godoc.org/google.golang.org/grpc#ClientConn.NewStream. +type ImageAnnotatorClient interface { + // Run image detection and annotation for a batch of images. + BatchAnnotateImages(ctx context.Context, in *BatchAnnotateImagesRequest, opts ...grpc.CallOption) (*BatchAnnotateImagesResponse, error) +} + +type imageAnnotatorClient struct { + cc *grpc.ClientConn +} + +func NewImageAnnotatorClient(cc *grpc.ClientConn) ImageAnnotatorClient { + return &imageAnnotatorClient{cc} +} + +func (c *imageAnnotatorClient) BatchAnnotateImages(ctx context.Context, in *BatchAnnotateImagesRequest, opts ...grpc.CallOption) (*BatchAnnotateImagesResponse, error) { + out := new(BatchAnnotateImagesResponse) + err := c.cc.Invoke(ctx, "/google.cloud.vision.v1p1beta1.ImageAnnotator/BatchAnnotateImages", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +// ImageAnnotatorServer is the server API for ImageAnnotator service. +type ImageAnnotatorServer interface { + // Run image detection and annotation for a batch of images. + BatchAnnotateImages(context.Context, *BatchAnnotateImagesRequest) (*BatchAnnotateImagesResponse, error) +} + +func RegisterImageAnnotatorServer(s *grpc.Server, srv ImageAnnotatorServer) { + s.RegisterService(&_ImageAnnotator_serviceDesc, srv) +} + +func _ImageAnnotator_BatchAnnotateImages_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(BatchAnnotateImagesRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(ImageAnnotatorServer).BatchAnnotateImages(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.vision.v1p1beta1.ImageAnnotator/BatchAnnotateImages", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(ImageAnnotatorServer).BatchAnnotateImages(ctx, req.(*BatchAnnotateImagesRequest)) + } + return interceptor(ctx, in, info, handler) +} + +var _ImageAnnotator_serviceDesc = grpc.ServiceDesc{ + ServiceName: "google.cloud.vision.v1p1beta1.ImageAnnotator", + HandlerType: (*ImageAnnotatorServer)(nil), + Methods: []grpc.MethodDesc{ + { + MethodName: "BatchAnnotateImages", + Handler: _ImageAnnotator_BatchAnnotateImages_Handler, + }, + }, + Streams: []grpc.StreamDesc{}, + Metadata: "google/cloud/vision/v1p1beta1/image_annotator.proto", +} + +func init() { + proto.RegisterFile("google/cloud/vision/v1p1beta1/image_annotator.proto", fileDescriptor_image_annotator_e65877f16ecf0354) +} + +var fileDescriptor_image_annotator_e65877f16ecf0354 = []byte{ + // 2392 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xb4, 0x59, 0xcd, 0x72, 0xe3, 0xc6, + 0xf1, 0x37, 0xa9, 0x2f, 0xb2, 0x49, 0x91, 0xd0, 0x48, 0x2b, 0x73, 0xb5, 0xbb, 0x5e, 0x19, 0xff, + 0xbf, 0x13, 0xc5, 0x71, 0xa8, 0x5a, 0xad, 0xe3, 0x54, 0xd6, 0x49, 0x39, 0x24, 0x05, 0x49, 0x2c, + 0x73, 0x49, 0xd4, 0x90, 0xb2, 0xbc, 0x5b, 0x4e, 0x21, 0x10, 0x38, 0xe4, 0xc2, 0x06, 0x31, 0x30, + 0x00, 0xee, 0x4a, 0x57, 0x5f, 0x73, 0xcc, 0x2d, 0xf7, 0x1c, 0x73, 0x4a, 0x9e, 0xc1, 0x2f, 0x90, + 0x43, 0x1e, 0x20, 0x39, 0xe4, 0x09, 0x52, 0xa9, 0x9c, 0x52, 0xf3, 0x01, 0x70, 0xc0, 0xfd, 0xa0, + 0xb8, 0xa9, 0x9c, 0x88, 0xe9, 0x9e, 0xdf, 0xaf, 0x67, 0xba, 0xa7, 0x67, 0x7a, 0x86, 0xf0, 0x70, + 0x4c, 0xe9, 0xd8, 0x23, 0x87, 0x8e, 0x47, 0xa7, 0xc3, 0xc3, 0xe7, 0x6e, 0xe4, 0x52, 0xff, 0xf0, + 0xf9, 0x83, 0xe0, 0xc1, 0x25, 0x89, 0xed, 0x07, 0x87, 0xee, 0xc4, 0x1e, 0x13, 0xcb, 0xf6, 0x7d, + 0x1a, 0xdb, 0x31, 0x0d, 0xeb, 0x41, 0x48, 0x63, 0x8a, 0xee, 0x09, 0x50, 0x9d, 0x83, 0xea, 0x02, + 0x54, 0x4f, 0x41, 0x7b, 0x77, 0x25, 0xa7, 0x1d, 0xb8, 0x87, 0x12, 0xea, 0x52, 0x3f, 0x12, 0xe0, + 0xbd, 0x8f, 0xde, 0x6c, 0x71, 0x4c, 0xe8, 0x84, 0xc4, 0xe1, 0xb5, 0xec, 0xbd, 0x60, 0x7c, 0x31, + 0xb9, 0x8a, 0xad, 0x99, 0x0d, 0x09, 0x7a, 0xf0, 0x66, 0xd0, 0x0b, 0x72, 0x69, 0x0d, 0x49, 0x4c, + 0x1c, 0x05, 0xf2, 0xae, 0x84, 0x84, 0x81, 0x73, 0x18, 0xc5, 0x76, 0x3c, 0x8d, 0xe6, 0x14, 0xf1, + 0x75, 0x40, 0x0e, 0x1d, 0xea, 0x25, 0x4e, 0xd8, 0xab, 0xa9, 0x0a, 0xcf, 0x8e, 0x3d, 0x7f, 0x2c, + 0x34, 0xfa, 0xbf, 0xf3, 0xb0, 0x71, 0x42, 0xec, 0x78, 0x1a, 0x12, 0xf4, 0x19, 0xac, 0xb2, 0x0e, + 0xb5, 0xdc, 0x7e, 0xee, 0xa0, 0x72, 0xf4, 0xe3, 0xfa, 0x1b, 0x3d, 0x57, 0x97, 0xa8, 0xfa, 0xe0, + 0x3a, 0x20, 0x98, 0x03, 0xd1, 0x7d, 0x28, 0x4d, 0xec, 0x2b, 0x2b, 0x24, 0xd1, 0xd4, 0x8b, 0xa3, + 0x5a, 0x7e, 0x3f, 0x77, 0xb0, 0x86, 0x61, 0x62, 0x5f, 0x61, 0x21, 0x41, 0x3b, 0xb0, 0x36, 0xa1, + 0x43, 0xe2, 0xd5, 0x56, 0xf6, 0x73, 0x07, 0x45, 0x2c, 0x1a, 0xfa, 0x3f, 0x73, 0xb0, 0xca, 0x58, + 0xd0, 0x0e, 0x68, 0x83, 0x27, 0xa6, 0x61, 0x9d, 0x77, 0xfb, 0xa6, 0xd1, 0x6a, 0x9f, 0xb4, 0x8d, + 0x63, 0xed, 0x1d, 0x84, 0xa0, 0x72, 0xd2, 0x68, 0x19, 0xd6, 0xb1, 0x31, 0x30, 0x5a, 0x83, 0x76, + 0xaf, 0xab, 0xe5, 0xd0, 0x2e, 0xa0, 0x4e, 0xa3, 0x7b, 0xfc, 0xb8, 0x81, 0x3f, 0x57, 0xe4, 0x79, + 0xd6, 0xb7, 0xd3, 0x3b, 0xed, 0x29, 0xb2, 0x15, 0xb4, 0x0d, 0xd5, 0x4e, 0xa3, 0x69, 0x74, 0x14, + 0xe1, 0x2a, 0xeb, 0x38, 0x30, 0xbe, 0x1c, 0x28, 0xb2, 0x35, 0x74, 0x07, 0xde, 0x3d, 0xee, 0xb5, + 0xce, 0x1f, 0x1b, 0xdd, 0x81, 0x35, 0xa7, 0x2c, 0xa1, 0xdb, 0x70, 0xab, 0xdf, 0x38, 0x31, 0xac, + 0xbe, 0xd1, 0xc0, 0xad, 0x33, 0x45, 0xb5, 0xce, 0x86, 0xdd, 0x7e, 0xdc, 0x38, 0x35, 0x2c, 0x13, + 0xf7, 0x4c, 0x03, 0x0f, 0xda, 0x46, 0x5f, 0xdb, 0x40, 0x15, 0x80, 0x16, 0xee, 0x99, 0xd6, 0x59, + 0xbb, 0x3b, 0xe8, 0x6b, 0x45, 0xb4, 0x05, 0x9b, 0x17, 0x46, 0x53, 0x01, 0x82, 0xde, 0x85, 0x52, + 0x9b, 0x2d, 0xda, 0x3e, 0x9d, 0x86, 0x0e, 0x41, 0x3a, 0x6c, 0x8e, 0x9d, 0xc8, 0x12, 0xeb, 0x78, + 0x1a, 0xba, 0x3c, 0x10, 0x45, 0x5c, 0x1a, 0x3b, 0x11, 0xef, 0x76, 0x1e, 0xba, 0xe8, 0x0e, 0x14, + 0x67, 0xfa, 0x3c, 0xd7, 0x17, 0x5c, 0xa9, 0xd4, 0x09, 0xac, 0xf1, 0x8e, 0xa8, 0x06, 0x1b, 0x0e, + 0xf5, 0x63, 0xe2, 0xc7, 0x9c, 0xa3, 0x8c, 0x93, 0x26, 0x6a, 0xc2, 0x7a, 0xc4, 0xad, 0x71, 0x70, + 0xe9, 0xe8, 0xc3, 0x05, 0x51, 0x56, 0xc6, 0x87, 0x25, 0x52, 0xff, 0x83, 0x06, 0x95, 0x13, 0xdb, + 0x21, 0x8d, 0x74, 0x2d, 0x23, 0x13, 0x36, 0x2f, 0xe9, 0xd4, 0x1f, 0xba, 0xfe, 0xd8, 0x0a, 0xa8, + 0x77, 0xcd, 0xcd, 0x96, 0x16, 0xae, 0xa1, 0xa6, 0xc4, 0x98, 0xd4, 0xbb, 0xc6, 0xe5, 0x4b, 0xa5, + 0x85, 0xce, 0x41, 0x1b, 0x0d, 0xad, 0x2c, 0x69, 0x7e, 0x79, 0xd2, 0xca, 0x68, 0xa8, 0xb6, 0xd1, + 0x00, 0x8a, 0x9e, 0xed, 0x0f, 0x27, 0x76, 0xf8, 0x4d, 0x54, 0x5b, 0xd9, 0x5f, 0x39, 0x28, 0x1d, + 0x7d, 0xb2, 0x68, 0xa1, 0x67, 0xa6, 0x5a, 0xef, 0x48, 0x38, 0x9e, 0x11, 0xa1, 0x7b, 0x00, 0x21, + 0xf5, 0x3c, 0xcb, 0xf6, 0xc7, 0x1e, 0xa9, 0xad, 0xee, 0xe7, 0x0e, 0xf2, 0xb8, 0xc8, 0x24, 0x0d, + 0x26, 0x60, 0x41, 0x0b, 0x6c, 0x5f, 0x6a, 0xd7, 0xb8, 0xb6, 0x10, 0xd8, 0xbe, 0x50, 0xde, 0x03, + 0x88, 0x5d, 0x2f, 0x96, 0xda, 0x75, 0x81, 0x65, 0x12, 0xa1, 0x7e, 0x00, 0x3b, 0x69, 0xfe, 0x5b, + 0x0e, 0xf5, 0x47, 0xee, 0x90, 0xf8, 0x0e, 0xa9, 0x6d, 0xf0, 0x8e, 0xdb, 0xa9, 0xae, 0x95, 0xaa, + 0xd0, 0x4f, 0x61, 0x37, 0x19, 0x1a, 0x73, 0x9d, 0x02, 0x2a, 0x70, 0xd0, 0x2d, 0x45, 0xab, 0xc0, + 0x4c, 0xa8, 0x7c, 0x4d, 0xaf, 0x2d, 0xcf, 0xfd, 0x86, 0x78, 0xee, 0x33, 0x4a, 0x87, 0xb5, 0x22, + 0xdf, 0x08, 0x7e, 0xb4, 0xc0, 0x3f, 0x9d, 0x14, 0x80, 0x37, 0xbf, 0xa6, 0xd7, 0xb3, 0x26, 0xfa, + 0x02, 0xb6, 0x22, 0x1a, 0x86, 0xf4, 0x85, 0x4a, 0x0a, 0xcb, 0x92, 0x6a, 0x82, 0x43, 0xe1, 0x1d, + 0x80, 0x66, 0xfb, 0x63, 0x12, 0xaa, 0xb4, 0xa5, 0x65, 0x69, 0xab, 0x9c, 0x42, 0x61, 0x7d, 0x0a, + 0xdb, 0xd1, 0x34, 0x0c, 0x42, 0x37, 0x22, 0x2a, 0x71, 0x79, 0x59, 0x62, 0x94, 0xb0, 0x28, 0xdc, + 0x0e, 0xd4, 0xa6, 0xfe, 0x90, 0x84, 0x16, 0xb9, 0x0a, 0x68, 0x44, 0x86, 0xaa, 0x81, 0xcd, 0x65, + 0x0d, 0xec, 0x72, 0x2a, 0x43, 0x30, 0x29, 0x46, 0xbe, 0x04, 0x74, 0xe9, 0x4d, 0xc3, 0x30, 0x4b, + 0x5f, 0x59, 0x96, 0x7e, 0x4b, 0x92, 0x64, 0x5d, 0xf3, 0x8c, 0xd8, 0xc3, 0x17, 0xc4, 0xce, 0xf8, + 0xbc, 0xba, 0xb4, 0x6b, 0x12, 0x96, 0x99, 0x6c, 0xef, 0xaf, 0x1b, 0x50, 0x48, 0x72, 0x0a, 0xf5, + 0xe4, 0x11, 0xb4, 0xc2, 0x99, 0x3f, 0x7d, 0xbb, 0xcc, 0x54, 0x8f, 0xa4, 0x16, 0x14, 0x02, 0x1a, + 0xb9, 0x4c, 0xcf, 0xf3, 0xb2, 0x74, 0xf4, 0xc3, 0x05, 0xa4, 0xa6, 0xec, 0x8e, 0x53, 0xa0, 0xfe, + 0xe7, 0xf5, 0xd9, 0x01, 0x75, 0xde, 0xfd, 0xbc, 0xdb, 0xbb, 0xe8, 0x5a, 0xc9, 0xf1, 0xa3, 0xbd, + 0x83, 0xca, 0x50, 0xe8, 0x18, 0x27, 0x03, 0xcb, 0x78, 0x62, 0x68, 0x39, 0xb4, 0x09, 0x45, 0xdc, + 0x3e, 0x3d, 0x13, 0xcd, 0x3c, 0xaa, 0xc1, 0x0e, 0x57, 0xf6, 0x4e, 0xac, 0xa4, 0x53, 0x13, 0xf7, + 0x2e, 0xb4, 0x15, 0x76, 0xa2, 0x88, 0x8e, 0xf3, 0xaa, 0x55, 0xa6, 0x4a, 0x40, 0x29, 0x17, 0x57, + 0xad, 0xa1, 0x3d, 0xd8, 0x4d, 0x51, 0x59, 0xdd, 0x3a, 0x83, 0x3d, 0x6e, 0x1f, 0x9b, 0xbd, 0x76, + 0x77, 0x60, 0x35, 0x8d, 0xc1, 0x85, 0x61, 0x74, 0x99, 0x96, 0x9d, 0x46, 0x65, 0x28, 0x74, 0x7b, + 0x7d, 0xc3, 0x1a, 0xb4, 0x4d, 0xad, 0xc0, 0xc6, 0x78, 0x6e, 0x9a, 0x06, 0xb6, 0x3a, 0x6d, 0x53, + 0x2b, 0xb2, 0x66, 0xa7, 0x77, 0x21, 0x9b, 0xc0, 0x4e, 0xae, 0xc7, 0xbd, 0xf3, 0xc1, 0x19, 0x1f, + 0x95, 0x56, 0x42, 0x55, 0x28, 0x89, 0x36, 0xb7, 0xa7, 0x95, 0x91, 0x06, 0x65, 0x21, 0x68, 0x19, + 0xdd, 0x81, 0x81, 0xb5, 0x4d, 0x74, 0x0b, 0xb6, 0x38, 0x7d, 0xb3, 0x37, 0x18, 0xf4, 0x1e, 0xcb, + 0x8e, 0x15, 0xe6, 0x2f, 0x55, 0xcc, 0xf9, 0xaa, 0xec, 0xf0, 0x56, 0xa5, 0x92, 0x44, 0x4b, 0x67, + 0x6d, 0x3c, 0x31, 0xac, 0x41, 0xcf, 0xb4, 0x9a, 0xbd, 0xf3, 0xee, 0x71, 0x03, 0x3f, 0xd1, 0xb6, + 0x32, 0x2a, 0x31, 0xeb, 0x56, 0x0f, 0x77, 0x0d, 0xac, 0x21, 0x74, 0x17, 0x6a, 0xa9, 0x4a, 0x32, + 0xa6, 0xc0, 0xed, 0xd4, 0xfd, 0x4c, 0xcb, 0x3f, 0x24, 0x6e, 0x67, 0xe6, 0xc8, 0x97, 0xcc, 0xdd, + 0xca, 0xea, 0x32, 0xf6, 0x76, 0xd1, 0x3d, 0xb8, 0x3d, 0xd3, 0xcd, 0x1b, 0x7c, 0x77, 0x16, 0xd5, + 0x79, 0x8b, 0x35, 0x74, 0x1f, 0xee, 0xa8, 0x71, 0xb6, 0x44, 0x08, 0x92, 0x88, 0x69, 0xb7, 0xd1, + 0x3e, 0xdc, 0xcd, 0x84, 0x74, 0xbe, 0xc7, 0x1e, 0x73, 0xa8, 0xa0, 0x68, 0x60, 0x6b, 0x80, 0x1b, + 0xa7, 0xac, 0x8e, 0xb8, 0xc3, 0xbc, 0x2f, 0x71, 0x8a, 0xf8, 0x2e, 0x2f, 0x86, 0x92, 0xb9, 0x9b, + 0xe7, 0x66, 0xbb, 0xa3, 0xdd, 0x63, 0xc5, 0xd0, 0x6c, 0x78, 0x42, 0xf8, 0x1e, 0xc3, 0x9f, 0xf4, + 0xb0, 0x71, 0x66, 0x34, 0x8e, 0xad, 0x53, 0x5e, 0x2b, 0x75, 0x1a, 0xda, 0x7d, 0x56, 0xb1, 0xb4, + 0xce, 0xda, 0x5d, 0xeb, 0xb4, 0xdb, 0x18, 0x9c, 0x31, 0xca, 0x7d, 0x66, 0x9f, 0x8b, 0x38, 0xef, + 0x69, 0xaf, 0xcb, 0xa4, 0xef, 0x33, 0x3c, 0x97, 0x0a, 0x66, 0x29, 0xd6, 0xf5, 0x5f, 0x40, 0xb9, + 0x43, 0x1d, 0x9e, 0x9b, 0x6d, 0x7f, 0x44, 0xd1, 0x47, 0xb0, 0xe1, 0xd9, 0xb1, 0xe5, 0xf9, 0x63, + 0x59, 0x1e, 0x6c, 0x27, 0xa9, 0xc8, 0x52, 0xb5, 0xde, 0xb1, 0xe3, 0x8e, 0x3f, 0xc6, 0xeb, 0x1e, + 0xff, 0xd5, 0x2f, 0xa0, 0x60, 0x86, 0x34, 0x20, 0x61, 0x7c, 0x8d, 0x10, 0xac, 0xfa, 0xf6, 0x84, + 0xc8, 0x82, 0x88, 0x7f, 0xb3, 0x5a, 0xf2, 0xb9, 0xed, 0x4d, 0x89, 0xac, 0x82, 0x44, 0x03, 0xbd, + 0x0f, 0xe5, 0xa9, 0xeb, 0xc7, 0x9f, 0x7c, 0x6c, 0x09, 0x25, 0xdb, 0x48, 0x56, 0x71, 0x49, 0xc8, + 0xbe, 0x60, 0x22, 0xfd, 0xf7, 0x2b, 0xa0, 0x19, 0x7e, 0xec, 0xc6, 0xd7, 0x4a, 0x01, 0xa3, 0xc1, + 0xca, 0xc4, 0x1d, 0x4a, 0x03, 0xec, 0x13, 0xed, 0xc2, 0xba, 0x47, 0x1d, 0xdb, 0x4b, 0x0c, 0xc8, + 0x16, 0xda, 0x87, 0xd2, 0x90, 0x44, 0x4e, 0xe8, 0x06, 0x7c, 0x53, 0x11, 0x95, 0xac, 0x2a, 0x62, + 0x23, 0x8b, 0x1c, 0x1a, 0x26, 0x85, 0x80, 0x68, 0xa0, 0xf7, 0x00, 0x94, 0x93, 0x58, 0x54, 0x01, + 0x8a, 0x84, 0xe9, 0x63, 0x1a, 0xb8, 0x8e, 0xed, 0xb9, 0xf1, 0xb5, 0xac, 0x03, 0x14, 0xc9, 0xcb, + 0x25, 0xd6, 0xc6, 0x7f, 0x5b, 0x62, 0xb5, 0xa1, 0xe8, 0xc9, 0xf8, 0x44, 0xb5, 0x02, 0xaf, 0x85, + 0x16, 0xb1, 0xa9, 0xf1, 0xc4, 0x33, 0x34, 0x3a, 0x05, 0x08, 0x44, 0xb0, 0x5c, 0x12, 0xd5, 0x8a, + 0x9c, 0x6b, 0xe1, 0x46, 0x2b, 0xa3, 0x8b, 0x15, 0xa8, 0xfe, 0xb7, 0x3c, 0xec, 0xf4, 0xed, 0x11, + 0xe9, 0x13, 0x3b, 0x74, 0x9e, 0x29, 0x01, 0xfa, 0x0c, 0xd6, 0xec, 0xe1, 0xd4, 0x8b, 0xe5, 0xed, + 0x64, 0x89, 0x43, 0x47, 0xe0, 0x18, 0x41, 0x14, 0x50, 0x3a, 0xe2, 0xe1, 0x5c, 0x8e, 0x80, 0xe3, + 0x50, 0x0b, 0x36, 0x26, 0x64, 0xc8, 0xc2, 0x21, 0x8f, 0xa7, 0x25, 0x28, 0x12, 0x24, 0x32, 0xa0, + 0xf0, 0xdc, 0xa5, 0x1e, 0x5f, 0x03, 0xab, 0xcb, 0xb2, 0xa4, 0x50, 0xf4, 0x4b, 0x58, 0x0d, 0x6d, + 0xe7, 0x7a, 0xf9, 0x0a, 0x8d, 0xc3, 0xf4, 0x17, 0x50, 0x62, 0xd9, 0x46, 0xfd, 0x31, 0x26, 0x4e, + 0x8c, 0x1e, 0x42, 0x69, 0xe2, 0xfa, 0xd6, 0x0d, 0x92, 0xb3, 0x38, 0x71, 0x7d, 0xf1, 0xc9, 0x41, + 0xf6, 0x55, 0x0a, 0xca, 0xbf, 0x09, 0x64, 0x5f, 0x89, 0x4f, 0x3d, 0x84, 0x62, 0x8b, 0xdd, 0x4b, + 0xf9, 0x7e, 0x70, 0x00, 0x6b, 0xfc, 0x92, 0x2a, 0x0d, 0xa2, 0x0c, 0x96, 0x77, 0xc3, 0xa2, 0xc3, + 0x2c, 0xa3, 0xf2, 0x6a, 0x46, 0x7d, 0x00, 0x95, 0xc0, 0xbd, 0x22, 0x9e, 0x35, 0x0a, 0x6d, 0x27, + 0x4d, 0xc6, 0x3c, 0xde, 0xe4, 0xd2, 0x13, 0x29, 0xd4, 0xbf, 0x82, 0xda, 0x31, 0x9d, 0xb8, 0xbe, + 0xed, 0xc7, 0x9c, 0x34, 0x52, 0x56, 0xd5, 0xaf, 0x60, 0x9d, 0x5b, 0x88, 0x6a, 0x39, 0xbe, 0x66, + 0x0f, 0x16, 0x78, 0x32, 0x1d, 0x3c, 0x96, 0x38, 0x3d, 0x82, 0x2a, 0xbf, 0x23, 0x99, 0xe9, 0x1a, + 0x46, 0xbf, 0x81, 0xea, 0x50, 0x1a, 0xb4, 0x52, 0x76, 0x36, 0xc3, 0x9f, 0x2d, 0x60, 0x7f, 0xdd, + 0x30, 0x71, 0x65, 0x98, 0xd1, 0xe8, 0x7f, 0xcc, 0x41, 0xa1, 0x15, 0xd2, 0xe0, 0xcc, 0xf5, 0xe3, + 0xff, 0xc1, 0xdd, 0x2b, 0xbb, 0x55, 0xe5, 0x5f, 0xda, 0xaa, 0x0e, 0x61, 0xdb, 0x9d, 0x04, 0x34, + 0x8c, 0x6d, 0xdf, 0x21, 0xf3, 0xde, 0x47, 0x33, 0x55, 0x1a, 0x82, 0x5f, 0xc3, 0x76, 0x32, 0x5c, + 0xd5, 0xfb, 0x27, 0x00, 0x4e, 0x48, 0x03, 0xeb, 0x19, 0x93, 0xcb, 0x08, 0x2c, 0xda, 0x35, 0x12, + 0x1e, 0x5c, 0x74, 0x12, 0x46, 0xfd, 0x13, 0xa8, 0xa6, 0xf4, 0xa6, 0x1d, 0xda, 0x93, 0x08, 0xfd, + 0x1f, 0x6c, 0xda, 0x51, 0x40, 0x9c, 0xd8, 0x0a, 0x99, 0x2d, 0xc1, 0x9e, 0xc7, 0x65, 0x21, 0xc4, + 0x5c, 0xa6, 0x1f, 0x03, 0xba, 0x20, 0x97, 0xc7, 0xc9, 0x15, 0x4a, 0x42, 0xeb, 0xb0, 0xed, 0xfa, + 0x8e, 0x37, 0x1d, 0x12, 0x6b, 0x4c, 0x68, 0xe6, 0x35, 0xa3, 0x80, 0xb7, 0xa4, 0xea, 0x94, 0x50, + 0xf9, 0xa8, 0xa1, 0x7f, 0x9f, 0x87, 0x32, 0x5f, 0x02, 0x2d, 0x76, 0xc7, 0xbe, 0x8a, 0x51, 0x17, + 0x36, 0x79, 0x56, 0x50, 0x7f, 0x6c, 0x85, 0xc4, 0x89, 0x65, 0x40, 0x16, 0x5d, 0xb5, 0x95, 0x8c, + 0xc4, 0x25, 0x4f, 0x49, 0xcf, 0x0f, 0xa0, 0xe2, 0xd9, 0xfe, 0x78, 0xca, 0xae, 0xfd, 0xc2, 0x55, + 0xf9, 0xfd, 0x95, 0x83, 0x22, 0xde, 0x4c, 0xa4, 0x7c, 0xe2, 0xe8, 0x29, 0x6c, 0xcd, 0xbc, 0x69, + 0x05, 0x7c, 0x32, 0xb2, 0xe6, 0xad, 0xdf, 0xd0, 0xa9, 0xd2, 0x7b, 0xb8, 0xea, 0xcc, 0xb9, 0xd3, + 0x81, 0x9d, 0xcc, 0x4b, 0x54, 0x42, 0xbf, 0xce, 0xe9, 0x1f, 0x2c, 0xa0, 0x7f, 0xd9, 0xc9, 0x18, + 0xbd, 0x78, 0x49, 0xa6, 0xff, 0x23, 0x07, 0x3b, 0x72, 0x75, 0x10, 0xee, 0x50, 0x4c, 0xbe, 0x9d, + 0x92, 0x28, 0x46, 0x8f, 0x60, 0x8d, 0xbf, 0x71, 0x48, 0x47, 0xfe, 0xff, 0x4d, 0xde, 0x2c, 0xb0, + 0x80, 0xa0, 0x26, 0x14, 0x46, 0xe2, 0xa5, 0x4a, 0xb8, 0xad, 0x74, 0xf4, 0x83, 0x9b, 0x3d, 0x6c, + 0xe1, 0x14, 0xc7, 0x32, 0x4c, 0x3c, 0xba, 0x38, 0x22, 0xc2, 0x7c, 0xa5, 0x2f, 0xce, 0x30, 0x75, + 0x51, 0xe0, 0xb2, 0xab, 0xb4, 0xf4, 0xdf, 0x16, 0xe0, 0xd6, 0xdc, 0x54, 0xa3, 0x80, 0xfa, 0x11, + 0x41, 0x5f, 0x82, 0x36, 0xb2, 0x1d, 0xa2, 0x3c, 0x14, 0x26, 0x99, 0xf1, 0x93, 0xa5, 0x6e, 0x43, + 0xb8, 0x3a, 0xca, 0xb4, 0x23, 0x74, 0x09, 0x3b, 0xc9, 0xc5, 0x3f, 0xc3, 0x2e, 0xbc, 0x72, 0xb8, + 0x80, 0x7d, 0xbe, 0x62, 0xc2, 0xdb, 0x09, 0x99, 0x6a, 0xe3, 0x29, 0x68, 0x1e, 0x1d, 0xd3, 0x0c, + 0xff, 0xca, 0xdb, 0xf1, 0x57, 0x19, 0x91, 0xca, 0xfd, 0x15, 0x6c, 0x79, 0xf6, 0x25, 0xf1, 0x32, + 0xe4, 0xab, 0x6f, 0x47, 0xae, 0x71, 0xa6, 0xb9, 0x91, 0xcf, 0x3d, 0xd0, 0x46, 0xb5, 0xb5, 0xb7, + 0x1c, 0x39, 0x23, 0x52, 0xb9, 0x2d, 0xd8, 0x19, 0x4d, 0x3d, 0xcf, 0x9a, 0x33, 0xc0, 0x9f, 0x16, + 0x16, 0xc7, 0x75, 0x90, 0x61, 0xc3, 0x88, 0x51, 0x65, 0x65, 0xc8, 0x85, 0xdd, 0xc8, 0x1e, 0x11, + 0x2b, 0xe2, 0x55, 0x93, 0x6a, 0x42, 0x24, 0xe8, 0xc3, 0x05, 0x26, 0x5e, 0x55, 0x71, 0xe1, 0x9d, + 0xe8, 0x55, 0x75, 0x98, 0x0f, 0x77, 0x44, 0x2e, 0xcc, 0x8a, 0x36, 0xd5, 0x5e, 0xe1, 0x46, 0xfb, + 0xcd, 0xdc, 0x89, 0x89, 0x6f, 0xbb, 0x59, 0x81, 0x62, 0x6f, 0x04, 0xb7, 0x94, 0x5d, 0x4d, 0xb1, + 0x54, 0xe2, 0x96, 0x8e, 0x6e, 0xba, 0xb3, 0xa9, 0x2b, 0xd7, 0x79, 0xc5, 0x59, 0x64, 0xc2, 0x66, + 0x66, 0x87, 0xe3, 0xcf, 0x32, 0x8b, 0x73, 0x5c, 0xdd, 0xda, 0x70, 0x59, 0xdd, 0xd4, 0x58, 0x79, + 0x43, 0xc2, 0x90, 0x86, 0xbc, 0x48, 0x53, 0xca, 0x9b, 0x30, 0x70, 0xea, 0x7d, 0xfe, 0x6c, 0x8f, + 0x45, 0x07, 0x7d, 0x02, 0x7b, 0x4d, 0x3b, 0x4e, 0xdd, 0x2c, 0x76, 0x84, 0x28, 0xd9, 0xfd, 0x7a, + 0x50, 0x08, 0xc5, 0x67, 0xb2, 0x13, 0x2c, 0x0a, 0xe7, 0xab, 0x36, 0x51, 0x9c, 0x92, 0xe8, 0xdf, + 0xc2, 0x9d, 0x57, 0x9a, 0x93, 0x3b, 0x10, 0x86, 0x62, 0x28, 0xbf, 0x13, 0x83, 0x1f, 0x2f, 0x67, + 0x50, 0x80, 0xf1, 0x8c, 0xe6, 0x43, 0x02, 0xa0, 0x3c, 0x27, 0x95, 0x60, 0x43, 0x3e, 0xa3, 0x68, + 0xef, 0xb0, 0x5b, 0xe6, 0x17, 0x06, 0x7e, 0x62, 0x9d, 0x77, 0x3b, 0xed, 0xcf, 0x8d, 0xce, 0x13, + 0x2d, 0x87, 0xca, 0x50, 0x48, 0x5b, 0x79, 0xd6, 0x32, 0x7b, 0xfd, 0x7e, 0xbb, 0xd9, 0x31, 0xb4, + 0x15, 0x04, 0xb0, 0x2e, 0x35, 0xab, 0xa8, 0x0a, 0x25, 0x0e, 0x95, 0x82, 0xb5, 0xa3, 0xef, 0x73, + 0x50, 0xe1, 0x63, 0x68, 0x24, 0xff, 0x02, 0xa1, 0x3f, 0xe5, 0x60, 0xfb, 0x15, 0xb3, 0x45, 0x3f, + 0x5f, 0x54, 0x1e, 0xbd, 0x36, 0x20, 0x7b, 0x8f, 0xde, 0x06, 0x2a, 0x3c, 0xa1, 0x7f, 0xf0, 0xdd, + 0x5f, 0xfe, 0xfe, 0xbb, 0xfc, 0x7d, 0x7d, 0x6f, 0xfe, 0x8f, 0xab, 0xe8, 0x91, 0x5c, 0xdb, 0xe4, + 0x51, 0xee, 0xc3, 0xe6, 0x77, 0x39, 0x78, 0xdf, 0xa1, 0x93, 0x37, 0x1b, 0x6a, 0x6e, 0x67, 0xe7, + 0x6a, 0x86, 0x34, 0xa6, 0x66, 0xee, 0x69, 0x4b, 0xa2, 0xc6, 0x94, 0x15, 0x08, 0x75, 0x1a, 0x8e, + 0x0f, 0xc7, 0xc4, 0xe7, 0xff, 0xf7, 0x1c, 0x0a, 0x95, 0x1d, 0xb8, 0xd1, 0x6b, 0xfe, 0x71, 0xfa, + 0x54, 0x08, 0xfe, 0x95, 0xcb, 0x5d, 0xae, 0x73, 0xc8, 0xc3, 0xff, 0x04, 0x00, 0x00, 0xff, 0xff, + 0xb8, 0x10, 0xf5, 0x85, 0x78, 0x1b, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/cloud/vision/v1p1beta1/text_annotation.pb.go b/vendor/google.golang.org/genproto/googleapis/cloud/vision/v1p1beta1/text_annotation.pb.go new file mode 100644 index 0000000..168bccc --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/cloud/vision/v1p1beta1/text_annotation.pb.go @@ -0,0 +1,791 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/cloud/vision/v1p1beta1/text_annotation.proto + +package vision // import "google.golang.org/genproto/googleapis/cloud/vision/v1p1beta1" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Enum to denote the type of break found. New line, space etc. +type TextAnnotation_DetectedBreak_BreakType int32 + +const ( + // Unknown break label type. + TextAnnotation_DetectedBreak_UNKNOWN TextAnnotation_DetectedBreak_BreakType = 0 + // Regular space. + TextAnnotation_DetectedBreak_SPACE TextAnnotation_DetectedBreak_BreakType = 1 + // Sure space (very wide). + TextAnnotation_DetectedBreak_SURE_SPACE TextAnnotation_DetectedBreak_BreakType = 2 + // Line-wrapping break. + TextAnnotation_DetectedBreak_EOL_SURE_SPACE TextAnnotation_DetectedBreak_BreakType = 3 + // End-line hyphen that is not present in text; does not co-occur with + // `SPACE`, `LEADER_SPACE`, or `LINE_BREAK`. + TextAnnotation_DetectedBreak_HYPHEN TextAnnotation_DetectedBreak_BreakType = 4 + // Line break that ends a paragraph. + TextAnnotation_DetectedBreak_LINE_BREAK TextAnnotation_DetectedBreak_BreakType = 5 +) + +var TextAnnotation_DetectedBreak_BreakType_name = map[int32]string{ + 0: "UNKNOWN", + 1: "SPACE", + 2: "SURE_SPACE", + 3: "EOL_SURE_SPACE", + 4: "HYPHEN", + 5: "LINE_BREAK", +} +var TextAnnotation_DetectedBreak_BreakType_value = map[string]int32{ + "UNKNOWN": 0, + "SPACE": 1, + "SURE_SPACE": 2, + "EOL_SURE_SPACE": 3, + "HYPHEN": 4, + "LINE_BREAK": 5, +} + +func (x TextAnnotation_DetectedBreak_BreakType) String() string { + return proto.EnumName(TextAnnotation_DetectedBreak_BreakType_name, int32(x)) +} +func (TextAnnotation_DetectedBreak_BreakType) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_text_annotation_66d065b83e9aab34, []int{0, 1, 0} +} + +// Type of a block (text, image etc) as identified by OCR. +type Block_BlockType int32 + +const ( + // Unknown block type. + Block_UNKNOWN Block_BlockType = 0 + // Regular text block. + Block_TEXT Block_BlockType = 1 + // Table block. + Block_TABLE Block_BlockType = 2 + // Image block. + Block_PICTURE Block_BlockType = 3 + // Horizontal/vertical line box. + Block_RULER Block_BlockType = 4 + // Barcode block. + Block_BARCODE Block_BlockType = 5 +) + +var Block_BlockType_name = map[int32]string{ + 0: "UNKNOWN", + 1: "TEXT", + 2: "TABLE", + 3: "PICTURE", + 4: "RULER", + 5: "BARCODE", +} +var Block_BlockType_value = map[string]int32{ + "UNKNOWN": 0, + "TEXT": 1, + "TABLE": 2, + "PICTURE": 3, + "RULER": 4, + "BARCODE": 5, +} + +func (x Block_BlockType) String() string { + return proto.EnumName(Block_BlockType_name, int32(x)) +} +func (Block_BlockType) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_text_annotation_66d065b83e9aab34, []int{2, 0} +} + +// TextAnnotation contains a structured representation of OCR extracted text. +// The hierarchy of an OCR extracted text structure is like this: +// TextAnnotation -> Page -> Block -> Paragraph -> Word -> Symbol +// Each structural component, starting from Page, may further have their own +// properties. Properties describe detected languages, breaks etc.. Please refer +// to the +// [TextAnnotation.TextProperty][google.cloud.vision.v1p1beta1.TextAnnotation.TextProperty] +// message definition below for more detail. +type TextAnnotation struct { + // List of pages detected by OCR. + Pages []*Page `protobuf:"bytes,1,rep,name=pages,proto3" json:"pages,omitempty"` + // UTF-8 text detected on the pages. + Text string `protobuf:"bytes,2,opt,name=text,proto3" json:"text,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *TextAnnotation) Reset() { *m = TextAnnotation{} } +func (m *TextAnnotation) String() string { return proto.CompactTextString(m) } +func (*TextAnnotation) ProtoMessage() {} +func (*TextAnnotation) Descriptor() ([]byte, []int) { + return fileDescriptor_text_annotation_66d065b83e9aab34, []int{0} +} +func (m *TextAnnotation) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_TextAnnotation.Unmarshal(m, b) +} +func (m *TextAnnotation) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_TextAnnotation.Marshal(b, m, deterministic) +} +func (dst *TextAnnotation) XXX_Merge(src proto.Message) { + xxx_messageInfo_TextAnnotation.Merge(dst, src) +} +func (m *TextAnnotation) XXX_Size() int { + return xxx_messageInfo_TextAnnotation.Size(m) +} +func (m *TextAnnotation) XXX_DiscardUnknown() { + xxx_messageInfo_TextAnnotation.DiscardUnknown(m) +} + +var xxx_messageInfo_TextAnnotation proto.InternalMessageInfo + +func (m *TextAnnotation) GetPages() []*Page { + if m != nil { + return m.Pages + } + return nil +} + +func (m *TextAnnotation) GetText() string { + if m != nil { + return m.Text + } + return "" +} + +// Detected language for a structural component. +type TextAnnotation_DetectedLanguage struct { + // The BCP-47 language code, such as "en-US" or "sr-Latn". For more + // information, see + // http://www.unicode.org/reports/tr35/#Unicode_locale_identifier. + LanguageCode string `protobuf:"bytes,1,opt,name=language_code,json=languageCode,proto3" json:"language_code,omitempty"` + // Confidence of detected language. Range [0, 1]. + Confidence float32 `protobuf:"fixed32,2,opt,name=confidence,proto3" json:"confidence,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *TextAnnotation_DetectedLanguage) Reset() { *m = TextAnnotation_DetectedLanguage{} } +func (m *TextAnnotation_DetectedLanguage) String() string { return proto.CompactTextString(m) } +func (*TextAnnotation_DetectedLanguage) ProtoMessage() {} +func (*TextAnnotation_DetectedLanguage) Descriptor() ([]byte, []int) { + return fileDescriptor_text_annotation_66d065b83e9aab34, []int{0, 0} +} +func (m *TextAnnotation_DetectedLanguage) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_TextAnnotation_DetectedLanguage.Unmarshal(m, b) +} +func (m *TextAnnotation_DetectedLanguage) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_TextAnnotation_DetectedLanguage.Marshal(b, m, deterministic) +} +func (dst *TextAnnotation_DetectedLanguage) XXX_Merge(src proto.Message) { + xxx_messageInfo_TextAnnotation_DetectedLanguage.Merge(dst, src) +} +func (m *TextAnnotation_DetectedLanguage) XXX_Size() int { + return xxx_messageInfo_TextAnnotation_DetectedLanguage.Size(m) +} +func (m *TextAnnotation_DetectedLanguage) XXX_DiscardUnknown() { + xxx_messageInfo_TextAnnotation_DetectedLanguage.DiscardUnknown(m) +} + +var xxx_messageInfo_TextAnnotation_DetectedLanguage proto.InternalMessageInfo + +func (m *TextAnnotation_DetectedLanguage) GetLanguageCode() string { + if m != nil { + return m.LanguageCode + } + return "" +} + +func (m *TextAnnotation_DetectedLanguage) GetConfidence() float32 { + if m != nil { + return m.Confidence + } + return 0 +} + +// Detected start or end of a structural component. +type TextAnnotation_DetectedBreak struct { + // Detected break type. + Type TextAnnotation_DetectedBreak_BreakType `protobuf:"varint,1,opt,name=type,proto3,enum=google.cloud.vision.v1p1beta1.TextAnnotation_DetectedBreak_BreakType" json:"type,omitempty"` + // True if break prepends the element. + IsPrefix bool `protobuf:"varint,2,opt,name=is_prefix,json=isPrefix,proto3" json:"is_prefix,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *TextAnnotation_DetectedBreak) Reset() { *m = TextAnnotation_DetectedBreak{} } +func (m *TextAnnotation_DetectedBreak) String() string { return proto.CompactTextString(m) } +func (*TextAnnotation_DetectedBreak) ProtoMessage() {} +func (*TextAnnotation_DetectedBreak) Descriptor() ([]byte, []int) { + return fileDescriptor_text_annotation_66d065b83e9aab34, []int{0, 1} +} +func (m *TextAnnotation_DetectedBreak) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_TextAnnotation_DetectedBreak.Unmarshal(m, b) +} +func (m *TextAnnotation_DetectedBreak) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_TextAnnotation_DetectedBreak.Marshal(b, m, deterministic) +} +func (dst *TextAnnotation_DetectedBreak) XXX_Merge(src proto.Message) { + xxx_messageInfo_TextAnnotation_DetectedBreak.Merge(dst, src) +} +func (m *TextAnnotation_DetectedBreak) XXX_Size() int { + return xxx_messageInfo_TextAnnotation_DetectedBreak.Size(m) +} +func (m *TextAnnotation_DetectedBreak) XXX_DiscardUnknown() { + xxx_messageInfo_TextAnnotation_DetectedBreak.DiscardUnknown(m) +} + +var xxx_messageInfo_TextAnnotation_DetectedBreak proto.InternalMessageInfo + +func (m *TextAnnotation_DetectedBreak) GetType() TextAnnotation_DetectedBreak_BreakType { + if m != nil { + return m.Type + } + return TextAnnotation_DetectedBreak_UNKNOWN +} + +func (m *TextAnnotation_DetectedBreak) GetIsPrefix() bool { + if m != nil { + return m.IsPrefix + } + return false +} + +// Additional information detected on the structural component. +type TextAnnotation_TextProperty struct { + // A list of detected languages together with confidence. + DetectedLanguages []*TextAnnotation_DetectedLanguage `protobuf:"bytes,1,rep,name=detected_languages,json=detectedLanguages,proto3" json:"detected_languages,omitempty"` + // Detected start or end of a text segment. + DetectedBreak *TextAnnotation_DetectedBreak `protobuf:"bytes,2,opt,name=detected_break,json=detectedBreak,proto3" json:"detected_break,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *TextAnnotation_TextProperty) Reset() { *m = TextAnnotation_TextProperty{} } +func (m *TextAnnotation_TextProperty) String() string { return proto.CompactTextString(m) } +func (*TextAnnotation_TextProperty) ProtoMessage() {} +func (*TextAnnotation_TextProperty) Descriptor() ([]byte, []int) { + return fileDescriptor_text_annotation_66d065b83e9aab34, []int{0, 2} +} +func (m *TextAnnotation_TextProperty) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_TextAnnotation_TextProperty.Unmarshal(m, b) +} +func (m *TextAnnotation_TextProperty) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_TextAnnotation_TextProperty.Marshal(b, m, deterministic) +} +func (dst *TextAnnotation_TextProperty) XXX_Merge(src proto.Message) { + xxx_messageInfo_TextAnnotation_TextProperty.Merge(dst, src) +} +func (m *TextAnnotation_TextProperty) XXX_Size() int { + return xxx_messageInfo_TextAnnotation_TextProperty.Size(m) +} +func (m *TextAnnotation_TextProperty) XXX_DiscardUnknown() { + xxx_messageInfo_TextAnnotation_TextProperty.DiscardUnknown(m) +} + +var xxx_messageInfo_TextAnnotation_TextProperty proto.InternalMessageInfo + +func (m *TextAnnotation_TextProperty) GetDetectedLanguages() []*TextAnnotation_DetectedLanguage { + if m != nil { + return m.DetectedLanguages + } + return nil +} + +func (m *TextAnnotation_TextProperty) GetDetectedBreak() *TextAnnotation_DetectedBreak { + if m != nil { + return m.DetectedBreak + } + return nil +} + +// Detected page from OCR. +type Page struct { + // Additional information detected on the page. + Property *TextAnnotation_TextProperty `protobuf:"bytes,1,opt,name=property,proto3" json:"property,omitempty"` + // Page width in pixels. + Width int32 `protobuf:"varint,2,opt,name=width,proto3" json:"width,omitempty"` + // Page height in pixels. + Height int32 `protobuf:"varint,3,opt,name=height,proto3" json:"height,omitempty"` + // List of blocks of text, images etc on this page. + Blocks []*Block `protobuf:"bytes,4,rep,name=blocks,proto3" json:"blocks,omitempty"` + // Confidence of the OCR results on the page. Range [0, 1]. + Confidence float32 `protobuf:"fixed32,5,opt,name=confidence,proto3" json:"confidence,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Page) Reset() { *m = Page{} } +func (m *Page) String() string { return proto.CompactTextString(m) } +func (*Page) ProtoMessage() {} +func (*Page) Descriptor() ([]byte, []int) { + return fileDescriptor_text_annotation_66d065b83e9aab34, []int{1} +} +func (m *Page) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Page.Unmarshal(m, b) +} +func (m *Page) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Page.Marshal(b, m, deterministic) +} +func (dst *Page) XXX_Merge(src proto.Message) { + xxx_messageInfo_Page.Merge(dst, src) +} +func (m *Page) XXX_Size() int { + return xxx_messageInfo_Page.Size(m) +} +func (m *Page) XXX_DiscardUnknown() { + xxx_messageInfo_Page.DiscardUnknown(m) +} + +var xxx_messageInfo_Page proto.InternalMessageInfo + +func (m *Page) GetProperty() *TextAnnotation_TextProperty { + if m != nil { + return m.Property + } + return nil +} + +func (m *Page) GetWidth() int32 { + if m != nil { + return m.Width + } + return 0 +} + +func (m *Page) GetHeight() int32 { + if m != nil { + return m.Height + } + return 0 +} + +func (m *Page) GetBlocks() []*Block { + if m != nil { + return m.Blocks + } + return nil +} + +func (m *Page) GetConfidence() float32 { + if m != nil { + return m.Confidence + } + return 0 +} + +// Logical element on the page. +type Block struct { + // Additional information detected for the block. + Property *TextAnnotation_TextProperty `protobuf:"bytes,1,opt,name=property,proto3" json:"property,omitempty"` + // The bounding box for the block. + // The vertices are in the order of top-left, top-right, bottom-right, + // bottom-left. When a rotation of the bounding box is detected the rotation + // is represented as around the top-left corner as defined when the text is + // read in the 'natural' orientation. + // For example: + // * when the text is horizontal it might look like: + // 0----1 + // | | + // 3----2 + // * when it's rotated 180 degrees around the top-left corner it becomes: + // 2----3 + // | | + // 1----0 + // and the vertice order will still be (0, 1, 2, 3). + BoundingBox *BoundingPoly `protobuf:"bytes,2,opt,name=bounding_box,json=boundingBox,proto3" json:"bounding_box,omitempty"` + // List of paragraphs in this block (if this blocks is of type text). + Paragraphs []*Paragraph `protobuf:"bytes,3,rep,name=paragraphs,proto3" json:"paragraphs,omitempty"` + // Detected block type (text, image etc) for this block. + BlockType Block_BlockType `protobuf:"varint,4,opt,name=block_type,json=blockType,proto3,enum=google.cloud.vision.v1p1beta1.Block_BlockType" json:"block_type,omitempty"` + // Confidence of the OCR results on the block. Range [0, 1]. + Confidence float32 `protobuf:"fixed32,5,opt,name=confidence,proto3" json:"confidence,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Block) Reset() { *m = Block{} } +func (m *Block) String() string { return proto.CompactTextString(m) } +func (*Block) ProtoMessage() {} +func (*Block) Descriptor() ([]byte, []int) { + return fileDescriptor_text_annotation_66d065b83e9aab34, []int{2} +} +func (m *Block) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Block.Unmarshal(m, b) +} +func (m *Block) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Block.Marshal(b, m, deterministic) +} +func (dst *Block) XXX_Merge(src proto.Message) { + xxx_messageInfo_Block.Merge(dst, src) +} +func (m *Block) XXX_Size() int { + return xxx_messageInfo_Block.Size(m) +} +func (m *Block) XXX_DiscardUnknown() { + xxx_messageInfo_Block.DiscardUnknown(m) +} + +var xxx_messageInfo_Block proto.InternalMessageInfo + +func (m *Block) GetProperty() *TextAnnotation_TextProperty { + if m != nil { + return m.Property + } + return nil +} + +func (m *Block) GetBoundingBox() *BoundingPoly { + if m != nil { + return m.BoundingBox + } + return nil +} + +func (m *Block) GetParagraphs() []*Paragraph { + if m != nil { + return m.Paragraphs + } + return nil +} + +func (m *Block) GetBlockType() Block_BlockType { + if m != nil { + return m.BlockType + } + return Block_UNKNOWN +} + +func (m *Block) GetConfidence() float32 { + if m != nil { + return m.Confidence + } + return 0 +} + +// Structural unit of text representing a number of words in certain order. +type Paragraph struct { + // Additional information detected for the paragraph. + Property *TextAnnotation_TextProperty `protobuf:"bytes,1,opt,name=property,proto3" json:"property,omitempty"` + // The bounding box for the paragraph. + // The vertices are in the order of top-left, top-right, bottom-right, + // bottom-left. When a rotation of the bounding box is detected the rotation + // is represented as around the top-left corner as defined when the text is + // read in the 'natural' orientation. + // For example: + // * when the text is horizontal it might look like: + // 0----1 + // | | + // 3----2 + // * when it's rotated 180 degrees around the top-left corner it becomes: + // 2----3 + // | | + // 1----0 + // and the vertice order will still be (0, 1, 2, 3). + BoundingBox *BoundingPoly `protobuf:"bytes,2,opt,name=bounding_box,json=boundingBox,proto3" json:"bounding_box,omitempty"` + // List of words in this paragraph. + Words []*Word `protobuf:"bytes,3,rep,name=words,proto3" json:"words,omitempty"` + // Confidence of the OCR results for the paragraph. Range [0, 1]. + Confidence float32 `protobuf:"fixed32,4,opt,name=confidence,proto3" json:"confidence,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Paragraph) Reset() { *m = Paragraph{} } +func (m *Paragraph) String() string { return proto.CompactTextString(m) } +func (*Paragraph) ProtoMessage() {} +func (*Paragraph) Descriptor() ([]byte, []int) { + return fileDescriptor_text_annotation_66d065b83e9aab34, []int{3} +} +func (m *Paragraph) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Paragraph.Unmarshal(m, b) +} +func (m *Paragraph) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Paragraph.Marshal(b, m, deterministic) +} +func (dst *Paragraph) XXX_Merge(src proto.Message) { + xxx_messageInfo_Paragraph.Merge(dst, src) +} +func (m *Paragraph) XXX_Size() int { + return xxx_messageInfo_Paragraph.Size(m) +} +func (m *Paragraph) XXX_DiscardUnknown() { + xxx_messageInfo_Paragraph.DiscardUnknown(m) +} + +var xxx_messageInfo_Paragraph proto.InternalMessageInfo + +func (m *Paragraph) GetProperty() *TextAnnotation_TextProperty { + if m != nil { + return m.Property + } + return nil +} + +func (m *Paragraph) GetBoundingBox() *BoundingPoly { + if m != nil { + return m.BoundingBox + } + return nil +} + +func (m *Paragraph) GetWords() []*Word { + if m != nil { + return m.Words + } + return nil +} + +func (m *Paragraph) GetConfidence() float32 { + if m != nil { + return m.Confidence + } + return 0 +} + +// A word representation. +type Word struct { + // Additional information detected for the word. + Property *TextAnnotation_TextProperty `protobuf:"bytes,1,opt,name=property,proto3" json:"property,omitempty"` + // The bounding box for the word. + // The vertices are in the order of top-left, top-right, bottom-right, + // bottom-left. When a rotation of the bounding box is detected the rotation + // is represented as around the top-left corner as defined when the text is + // read in the 'natural' orientation. + // For example: + // * when the text is horizontal it might look like: + // 0----1 + // | | + // 3----2 + // * when it's rotated 180 degrees around the top-left corner it becomes: + // 2----3 + // | | + // 1----0 + // and the vertice order will still be (0, 1, 2, 3). + BoundingBox *BoundingPoly `protobuf:"bytes,2,opt,name=bounding_box,json=boundingBox,proto3" json:"bounding_box,omitempty"` + // List of symbols in the word. + // The order of the symbols follows the natural reading order. + Symbols []*Symbol `protobuf:"bytes,3,rep,name=symbols,proto3" json:"symbols,omitempty"` + // Confidence of the OCR results for the word. Range [0, 1]. + Confidence float32 `protobuf:"fixed32,4,opt,name=confidence,proto3" json:"confidence,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Word) Reset() { *m = Word{} } +func (m *Word) String() string { return proto.CompactTextString(m) } +func (*Word) ProtoMessage() {} +func (*Word) Descriptor() ([]byte, []int) { + return fileDescriptor_text_annotation_66d065b83e9aab34, []int{4} +} +func (m *Word) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Word.Unmarshal(m, b) +} +func (m *Word) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Word.Marshal(b, m, deterministic) +} +func (dst *Word) XXX_Merge(src proto.Message) { + xxx_messageInfo_Word.Merge(dst, src) +} +func (m *Word) XXX_Size() int { + return xxx_messageInfo_Word.Size(m) +} +func (m *Word) XXX_DiscardUnknown() { + xxx_messageInfo_Word.DiscardUnknown(m) +} + +var xxx_messageInfo_Word proto.InternalMessageInfo + +func (m *Word) GetProperty() *TextAnnotation_TextProperty { + if m != nil { + return m.Property + } + return nil +} + +func (m *Word) GetBoundingBox() *BoundingPoly { + if m != nil { + return m.BoundingBox + } + return nil +} + +func (m *Word) GetSymbols() []*Symbol { + if m != nil { + return m.Symbols + } + return nil +} + +func (m *Word) GetConfidence() float32 { + if m != nil { + return m.Confidence + } + return 0 +} + +// A single symbol representation. +type Symbol struct { + // Additional information detected for the symbol. + Property *TextAnnotation_TextProperty `protobuf:"bytes,1,opt,name=property,proto3" json:"property,omitempty"` + // The bounding box for the symbol. + // The vertices are in the order of top-left, top-right, bottom-right, + // bottom-left. When a rotation of the bounding box is detected the rotation + // is represented as around the top-left corner as defined when the text is + // read in the 'natural' orientation. + // For example: + // * when the text is horizontal it might look like: + // 0----1 + // | | + // 3----2 + // * when it's rotated 180 degrees around the top-left corner it becomes: + // 2----3 + // | | + // 1----0 + // and the vertice order will still be (0, 1, 2, 3). + BoundingBox *BoundingPoly `protobuf:"bytes,2,opt,name=bounding_box,json=boundingBox,proto3" json:"bounding_box,omitempty"` + // The actual UTF-8 representation of the symbol. + Text string `protobuf:"bytes,3,opt,name=text,proto3" json:"text,omitempty"` + // Confidence of the OCR results for the symbol. Range [0, 1]. + Confidence float32 `protobuf:"fixed32,4,opt,name=confidence,proto3" json:"confidence,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Symbol) Reset() { *m = Symbol{} } +func (m *Symbol) String() string { return proto.CompactTextString(m) } +func (*Symbol) ProtoMessage() {} +func (*Symbol) Descriptor() ([]byte, []int) { + return fileDescriptor_text_annotation_66d065b83e9aab34, []int{5} +} +func (m *Symbol) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Symbol.Unmarshal(m, b) +} +func (m *Symbol) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Symbol.Marshal(b, m, deterministic) +} +func (dst *Symbol) XXX_Merge(src proto.Message) { + xxx_messageInfo_Symbol.Merge(dst, src) +} +func (m *Symbol) XXX_Size() int { + return xxx_messageInfo_Symbol.Size(m) +} +func (m *Symbol) XXX_DiscardUnknown() { + xxx_messageInfo_Symbol.DiscardUnknown(m) +} + +var xxx_messageInfo_Symbol proto.InternalMessageInfo + +func (m *Symbol) GetProperty() *TextAnnotation_TextProperty { + if m != nil { + return m.Property + } + return nil +} + +func (m *Symbol) GetBoundingBox() *BoundingPoly { + if m != nil { + return m.BoundingBox + } + return nil +} + +func (m *Symbol) GetText() string { + if m != nil { + return m.Text + } + return "" +} + +func (m *Symbol) GetConfidence() float32 { + if m != nil { + return m.Confidence + } + return 0 +} + +func init() { + proto.RegisterType((*TextAnnotation)(nil), "google.cloud.vision.v1p1beta1.TextAnnotation") + proto.RegisterType((*TextAnnotation_DetectedLanguage)(nil), "google.cloud.vision.v1p1beta1.TextAnnotation.DetectedLanguage") + proto.RegisterType((*TextAnnotation_DetectedBreak)(nil), "google.cloud.vision.v1p1beta1.TextAnnotation.DetectedBreak") + proto.RegisterType((*TextAnnotation_TextProperty)(nil), "google.cloud.vision.v1p1beta1.TextAnnotation.TextProperty") + proto.RegisterType((*Page)(nil), "google.cloud.vision.v1p1beta1.Page") + proto.RegisterType((*Block)(nil), "google.cloud.vision.v1p1beta1.Block") + proto.RegisterType((*Paragraph)(nil), "google.cloud.vision.v1p1beta1.Paragraph") + proto.RegisterType((*Word)(nil), "google.cloud.vision.v1p1beta1.Word") + proto.RegisterType((*Symbol)(nil), "google.cloud.vision.v1p1beta1.Symbol") + proto.RegisterEnum("google.cloud.vision.v1p1beta1.TextAnnotation_DetectedBreak_BreakType", TextAnnotation_DetectedBreak_BreakType_name, TextAnnotation_DetectedBreak_BreakType_value) + proto.RegisterEnum("google.cloud.vision.v1p1beta1.Block_BlockType", Block_BlockType_name, Block_BlockType_value) +} + +func init() { + proto.RegisterFile("google/cloud/vision/v1p1beta1/text_annotation.proto", fileDescriptor_text_annotation_66d065b83e9aab34) +} + +var fileDescriptor_text_annotation_66d065b83e9aab34 = []byte{ + // 775 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xd4, 0x56, 0x4f, 0x6f, 0xd3, 0x48, + 0x14, 0x5f, 0x27, 0x76, 0x1a, 0xbf, 0xb4, 0x91, 0x77, 0x76, 0xb5, 0x8a, 0xb2, 0xbb, 0xa8, 0xa4, + 0x20, 0x55, 0x02, 0x39, 0x6a, 0x7a, 0x2a, 0x45, 0xa0, 0x38, 0xb5, 0xd4, 0xaa, 0x21, 0xb5, 0xa6, + 0x09, 0xa5, 0x5c, 0x2c, 0xff, 0x99, 0x3a, 0x56, 0x13, 0x8f, 0x65, 0xbb, 0x6d, 0x72, 0xe5, 0x8a, + 0x04, 0x5f, 0x88, 0x2f, 0x83, 0xc4, 0x09, 0xf1, 0x01, 0x38, 0x22, 0x8f, 0xed, 0x34, 0x09, 0xa2, + 0xe6, 0x8f, 0x38, 0xf4, 0x12, 0xcd, 0x7b, 0x79, 0xbf, 0x37, 0xef, 0xf7, 0x7b, 0xf3, 0x3c, 0x03, + 0xdb, 0x0e, 0xa5, 0xce, 0x88, 0x34, 0xad, 0x11, 0xbd, 0xb0, 0x9b, 0x97, 0x6e, 0xe8, 0x52, 0xaf, + 0x79, 0xb9, 0xe5, 0x6f, 0x99, 0x24, 0x32, 0xb6, 0x9a, 0x11, 0x99, 0x44, 0xba, 0xe1, 0x79, 0x34, + 0x32, 0x22, 0x97, 0x7a, 0xb2, 0x1f, 0xd0, 0x88, 0xa2, 0xff, 0x13, 0x90, 0xcc, 0x40, 0x72, 0x02, + 0x92, 0x67, 0xa0, 0xfa, 0x7f, 0x69, 0x4e, 0xc3, 0x77, 0x9b, 0xd7, 0xd8, 0x30, 0x01, 0xd7, 0x1f, + 0xde, 0xbc, 0xa3, 0x43, 0xe8, 0x98, 0x44, 0xc1, 0x34, 0x89, 0x6e, 0xbc, 0x16, 0xa0, 0xda, 0x27, + 0x93, 0xa8, 0x3d, 0xcb, 0x83, 0x76, 0x40, 0xf0, 0x0d, 0x87, 0x84, 0x35, 0x6e, 0xbd, 0xb8, 0x59, + 0x69, 0x6d, 0xc8, 0x37, 0x56, 0x23, 0x6b, 0x86, 0x43, 0x70, 0x82, 0x40, 0x08, 0xf8, 0x98, 0x51, + 0xad, 0xb0, 0xce, 0x6d, 0x8a, 0x98, 0xad, 0xeb, 0x27, 0x20, 0xed, 0x91, 0x88, 0x58, 0x11, 0xb1, + 0xbb, 0x86, 0xe7, 0x5c, 0x18, 0x0e, 0x41, 0x1b, 0xb0, 0x36, 0x4a, 0xd7, 0xba, 0x45, 0x6d, 0x52, + 0xe3, 0x18, 0x60, 0x35, 0x73, 0x76, 0xa8, 0x4d, 0xd0, 0x1d, 0x00, 0x8b, 0x7a, 0x67, 0xae, 0x4d, + 0x3c, 0x8b, 0xb0, 0x94, 0x05, 0x3c, 0xe7, 0xa9, 0x7f, 0xe2, 0x60, 0x2d, 0xcb, 0xac, 0x04, 0xc4, + 0x38, 0x47, 0xa7, 0xc0, 0x47, 0x53, 0x3f, 0xc9, 0x56, 0x6d, 0xa9, 0x39, 0x85, 0x2f, 0xd2, 0x96, + 0x17, 0x52, 0xc9, 0xec, 0xb7, 0x3f, 0xf5, 0x09, 0x66, 0x29, 0xd1, 0xbf, 0x20, 0xba, 0xa1, 0xee, + 0x07, 0xe4, 0xcc, 0x9d, 0xb0, 0x5a, 0xca, 0xb8, 0xec, 0x86, 0x1a, 0xb3, 0x1b, 0x16, 0x88, 0xb3, + 0x78, 0x54, 0x81, 0x95, 0x41, 0xef, 0xb0, 0x77, 0x74, 0xd2, 0x93, 0xfe, 0x40, 0x22, 0x08, 0xc7, + 0x5a, 0xbb, 0xa3, 0x4a, 0x1c, 0xaa, 0x02, 0x1c, 0x0f, 0xb0, 0xaa, 0x27, 0x76, 0x01, 0x21, 0xa8, + 0xaa, 0x47, 0x5d, 0x7d, 0xce, 0x57, 0x44, 0x00, 0xa5, 0xfd, 0x53, 0x6d, 0x5f, 0xed, 0x49, 0x7c, + 0x1c, 0xdf, 0x3d, 0xe8, 0xa9, 0xba, 0x82, 0xd5, 0xf6, 0xa1, 0x24, 0xd4, 0xdf, 0x73, 0xb0, 0x1a, + 0x97, 0xac, 0x05, 0xd4, 0x27, 0x41, 0x34, 0x45, 0x63, 0x40, 0x76, 0x5a, 0xb3, 0x9e, 0x09, 0x97, + 0x35, 0xed, 0xc9, 0xcf, 0x71, 0xcf, 0x1a, 0x84, 0xff, 0xb4, 0x97, 0x3c, 0x21, 0x32, 0xa1, 0x3a, + 0xdb, 0xce, 0x8c, 0xd9, 0x32, 0x19, 0x2a, 0xad, 0xdd, 0x5f, 0x90, 0x19, 0xaf, 0xd9, 0xf3, 0x66, + 0xe3, 0x23, 0x07, 0x7c, 0x7c, 0x9e, 0xd0, 0x73, 0x28, 0xfb, 0x29, 0x4f, 0xd6, 0xcd, 0x4a, 0xeb, + 0xd1, 0x8f, 0x6d, 0x33, 0xaf, 0x14, 0x9e, 0xe5, 0x42, 0x7f, 0x83, 0x70, 0xe5, 0xda, 0xd1, 0x90, + 0xd5, 0x2e, 0xe0, 0xc4, 0x40, 0xff, 0x40, 0x69, 0x48, 0x5c, 0x67, 0x18, 0xd5, 0x8a, 0xcc, 0x9d, + 0x5a, 0xe8, 0x31, 0x94, 0xcc, 0x11, 0xb5, 0xce, 0xc3, 0x1a, 0xcf, 0x54, 0xbd, 0x97, 0x53, 0x83, + 0x12, 0x07, 0xe3, 0x14, 0xb3, 0x74, 0x7e, 0x85, 0xe5, 0xf3, 0xdb, 0x78, 0x57, 0x04, 0x81, 0x21, + 0x7e, 0x1b, 0xdb, 0x1e, 0xac, 0x9a, 0xf4, 0xc2, 0xb3, 0x5d, 0xcf, 0xd1, 0x4d, 0x3a, 0x49, 0x1b, + 0xf6, 0x20, 0x8f, 0x45, 0x0a, 0xd1, 0xe8, 0x68, 0x8a, 0x2b, 0x59, 0x02, 0x85, 0x4e, 0xd0, 0x3e, + 0x80, 0x6f, 0x04, 0x86, 0x13, 0x18, 0xfe, 0x30, 0xac, 0x15, 0x99, 0x26, 0x9b, 0xb9, 0x9f, 0x87, + 0x14, 0x80, 0xe7, 0xb0, 0xe8, 0x19, 0x00, 0x53, 0x49, 0x67, 0xf3, 0xca, 0xb3, 0x79, 0x95, 0xbf, + 0x47, 0xdd, 0xe4, 0x97, 0x0d, 0xa6, 0x68, 0x66, 0xcb, 0x5c, 0xa9, 0x31, 0x88, 0x33, 0xdc, 0xe2, + 0x80, 0x96, 0x81, 0xef, 0xab, 0x2f, 0xfa, 0x12, 0x17, 0x8f, 0x6a, 0xbf, 0xad, 0x74, 0xe3, 0xd1, + 0xac, 0xc0, 0x8a, 0x76, 0xd0, 0xe9, 0x0f, 0x70, 0x3c, 0x93, 0x22, 0x08, 0x78, 0xd0, 0x55, 0xb1, + 0xc4, 0xc7, 0x7e, 0xa5, 0x8d, 0x3b, 0x47, 0x7b, 0xaa, 0x24, 0x34, 0xde, 0x14, 0x40, 0x9c, 0x91, + 0xbb, 0x35, 0x2d, 0xdc, 0x01, 0xe1, 0x8a, 0x06, 0x76, 0xd6, 0xbd, 0xbc, 0x8f, 0xfb, 0x09, 0x0d, + 0x6c, 0x9c, 0x20, 0x96, 0x44, 0xe6, 0xbf, 0x12, 0xf9, 0x6d, 0x01, 0xf8, 0x38, 0xfe, 0xd6, 0x68, + 0xf1, 0x14, 0x56, 0xc2, 0xe9, 0xd8, 0xa4, 0xa3, 0x4c, 0x8d, 0xfb, 0x39, 0xa9, 0x8e, 0x59, 0x34, + 0xce, 0x50, 0xb9, 0x8a, 0x7c, 0xe0, 0xa0, 0x94, 0x60, 0x6e, 0x8d, 0x26, 0xd9, 0x0d, 0x5e, 0xbc, + 0xbe, 0xc1, 0xf3, 0x68, 0x2a, 0xaf, 0x38, 0xb8, 0x6b, 0xd1, 0xf1, 0xcd, 0x7b, 0x2a, 0x7f, 0x2d, + 0x12, 0xd2, 0xe2, 0xe7, 0x87, 0xc6, 0xbd, 0xec, 0xa4, 0x28, 0x87, 0xc6, 0x77, 0x98, 0x4c, 0x03, + 0xa7, 0xe9, 0x10, 0x8f, 0x3d, 0x4e, 0x9a, 0xc9, 0x5f, 0x86, 0xef, 0x86, 0xdf, 0x78, 0xcd, 0xec, + 0x26, 0x8e, 0xcf, 0x1c, 0x67, 0x96, 0x18, 0x64, 0xfb, 0x4b, 0x00, 0x00, 0x00, 0xff, 0xff, 0xb1, + 0xa1, 0x02, 0xbb, 0x71, 0x09, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/cloud/vision/v1p1beta1/web_detection.pb.go b/vendor/google.golang.org/genproto/googleapis/cloud/vision/v1p1beta1/web_detection.pb.go new file mode 100644 index 0000000..3cf6181 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/cloud/vision/v1p1beta1/web_detection.pb.go @@ -0,0 +1,395 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/cloud/vision/v1p1beta1/web_detection.proto + +package vision // import "google.golang.org/genproto/googleapis/cloud/vision/v1p1beta1" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Relevant information for the image from the Internet. +type WebDetection struct { + // Deduced entities from similar images on the Internet. + WebEntities []*WebDetection_WebEntity `protobuf:"bytes,1,rep,name=web_entities,json=webEntities,proto3" json:"web_entities,omitempty"` + // Fully matching images from the Internet. + // Can include resized copies of the query image. + FullMatchingImages []*WebDetection_WebImage `protobuf:"bytes,2,rep,name=full_matching_images,json=fullMatchingImages,proto3" json:"full_matching_images,omitempty"` + // Partial matching images from the Internet. + // Those images are similar enough to share some key-point features. For + // example an original image will likely have partial matching for its crops. + PartialMatchingImages []*WebDetection_WebImage `protobuf:"bytes,3,rep,name=partial_matching_images,json=partialMatchingImages,proto3" json:"partial_matching_images,omitempty"` + // Web pages containing the matching images from the Internet. + PagesWithMatchingImages []*WebDetection_WebPage `protobuf:"bytes,4,rep,name=pages_with_matching_images,json=pagesWithMatchingImages,proto3" json:"pages_with_matching_images,omitempty"` + // The visually similar image results. + VisuallySimilarImages []*WebDetection_WebImage `protobuf:"bytes,6,rep,name=visually_similar_images,json=visuallySimilarImages,proto3" json:"visually_similar_images,omitempty"` + // Best guess text labels for the request image. + BestGuessLabels []*WebDetection_WebLabel `protobuf:"bytes,8,rep,name=best_guess_labels,json=bestGuessLabels,proto3" json:"best_guess_labels,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *WebDetection) Reset() { *m = WebDetection{} } +func (m *WebDetection) String() string { return proto.CompactTextString(m) } +func (*WebDetection) ProtoMessage() {} +func (*WebDetection) Descriptor() ([]byte, []int) { + return fileDescriptor_web_detection_fd6fa85ea390cf49, []int{0} +} +func (m *WebDetection) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_WebDetection.Unmarshal(m, b) +} +func (m *WebDetection) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_WebDetection.Marshal(b, m, deterministic) +} +func (dst *WebDetection) XXX_Merge(src proto.Message) { + xxx_messageInfo_WebDetection.Merge(dst, src) +} +func (m *WebDetection) XXX_Size() int { + return xxx_messageInfo_WebDetection.Size(m) +} +func (m *WebDetection) XXX_DiscardUnknown() { + xxx_messageInfo_WebDetection.DiscardUnknown(m) +} + +var xxx_messageInfo_WebDetection proto.InternalMessageInfo + +func (m *WebDetection) GetWebEntities() []*WebDetection_WebEntity { + if m != nil { + return m.WebEntities + } + return nil +} + +func (m *WebDetection) GetFullMatchingImages() []*WebDetection_WebImage { + if m != nil { + return m.FullMatchingImages + } + return nil +} + +func (m *WebDetection) GetPartialMatchingImages() []*WebDetection_WebImage { + if m != nil { + return m.PartialMatchingImages + } + return nil +} + +func (m *WebDetection) GetPagesWithMatchingImages() []*WebDetection_WebPage { + if m != nil { + return m.PagesWithMatchingImages + } + return nil +} + +func (m *WebDetection) GetVisuallySimilarImages() []*WebDetection_WebImage { + if m != nil { + return m.VisuallySimilarImages + } + return nil +} + +func (m *WebDetection) GetBestGuessLabels() []*WebDetection_WebLabel { + if m != nil { + return m.BestGuessLabels + } + return nil +} + +// Entity deduced from similar images on the Internet. +type WebDetection_WebEntity struct { + // Opaque entity ID. + EntityId string `protobuf:"bytes,1,opt,name=entity_id,json=entityId,proto3" json:"entity_id,omitempty"` + // Overall relevancy score for the entity. + // Not normalized and not comparable across different image queries. + Score float32 `protobuf:"fixed32,2,opt,name=score,proto3" json:"score,omitempty"` + // Canonical description of the entity, in English. + Description string `protobuf:"bytes,3,opt,name=description,proto3" json:"description,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *WebDetection_WebEntity) Reset() { *m = WebDetection_WebEntity{} } +func (m *WebDetection_WebEntity) String() string { return proto.CompactTextString(m) } +func (*WebDetection_WebEntity) ProtoMessage() {} +func (*WebDetection_WebEntity) Descriptor() ([]byte, []int) { + return fileDescriptor_web_detection_fd6fa85ea390cf49, []int{0, 0} +} +func (m *WebDetection_WebEntity) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_WebDetection_WebEntity.Unmarshal(m, b) +} +func (m *WebDetection_WebEntity) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_WebDetection_WebEntity.Marshal(b, m, deterministic) +} +func (dst *WebDetection_WebEntity) XXX_Merge(src proto.Message) { + xxx_messageInfo_WebDetection_WebEntity.Merge(dst, src) +} +func (m *WebDetection_WebEntity) XXX_Size() int { + return xxx_messageInfo_WebDetection_WebEntity.Size(m) +} +func (m *WebDetection_WebEntity) XXX_DiscardUnknown() { + xxx_messageInfo_WebDetection_WebEntity.DiscardUnknown(m) +} + +var xxx_messageInfo_WebDetection_WebEntity proto.InternalMessageInfo + +func (m *WebDetection_WebEntity) GetEntityId() string { + if m != nil { + return m.EntityId + } + return "" +} + +func (m *WebDetection_WebEntity) GetScore() float32 { + if m != nil { + return m.Score + } + return 0 +} + +func (m *WebDetection_WebEntity) GetDescription() string { + if m != nil { + return m.Description + } + return "" +} + +// Metadata for online images. +type WebDetection_WebImage struct { + // The result image URL. + Url string `protobuf:"bytes,1,opt,name=url,proto3" json:"url,omitempty"` + // (Deprecated) Overall relevancy score for the image. + Score float32 `protobuf:"fixed32,2,opt,name=score,proto3" json:"score,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *WebDetection_WebImage) Reset() { *m = WebDetection_WebImage{} } +func (m *WebDetection_WebImage) String() string { return proto.CompactTextString(m) } +func (*WebDetection_WebImage) ProtoMessage() {} +func (*WebDetection_WebImage) Descriptor() ([]byte, []int) { + return fileDescriptor_web_detection_fd6fa85ea390cf49, []int{0, 1} +} +func (m *WebDetection_WebImage) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_WebDetection_WebImage.Unmarshal(m, b) +} +func (m *WebDetection_WebImage) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_WebDetection_WebImage.Marshal(b, m, deterministic) +} +func (dst *WebDetection_WebImage) XXX_Merge(src proto.Message) { + xxx_messageInfo_WebDetection_WebImage.Merge(dst, src) +} +func (m *WebDetection_WebImage) XXX_Size() int { + return xxx_messageInfo_WebDetection_WebImage.Size(m) +} +func (m *WebDetection_WebImage) XXX_DiscardUnknown() { + xxx_messageInfo_WebDetection_WebImage.DiscardUnknown(m) +} + +var xxx_messageInfo_WebDetection_WebImage proto.InternalMessageInfo + +func (m *WebDetection_WebImage) GetUrl() string { + if m != nil { + return m.Url + } + return "" +} + +func (m *WebDetection_WebImage) GetScore() float32 { + if m != nil { + return m.Score + } + return 0 +} + +// Metadata for web pages. +type WebDetection_WebPage struct { + // The result web page URL. + Url string `protobuf:"bytes,1,opt,name=url,proto3" json:"url,omitempty"` + // (Deprecated) Overall relevancy score for the web page. + Score float32 `protobuf:"fixed32,2,opt,name=score,proto3" json:"score,omitempty"` + // Title for the web page, may contain HTML markups. + PageTitle string `protobuf:"bytes,3,opt,name=page_title,json=pageTitle,proto3" json:"page_title,omitempty"` + // Fully matching images on the page. + // Can include resized copies of the query image. + FullMatchingImages []*WebDetection_WebImage `protobuf:"bytes,4,rep,name=full_matching_images,json=fullMatchingImages,proto3" json:"full_matching_images,omitempty"` + // Partial matching images on the page. + // Those images are similar enough to share some key-point features. For + // example an original image will likely have partial matching for its + // crops. + PartialMatchingImages []*WebDetection_WebImage `protobuf:"bytes,5,rep,name=partial_matching_images,json=partialMatchingImages,proto3" json:"partial_matching_images,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *WebDetection_WebPage) Reset() { *m = WebDetection_WebPage{} } +func (m *WebDetection_WebPage) String() string { return proto.CompactTextString(m) } +func (*WebDetection_WebPage) ProtoMessage() {} +func (*WebDetection_WebPage) Descriptor() ([]byte, []int) { + return fileDescriptor_web_detection_fd6fa85ea390cf49, []int{0, 2} +} +func (m *WebDetection_WebPage) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_WebDetection_WebPage.Unmarshal(m, b) +} +func (m *WebDetection_WebPage) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_WebDetection_WebPage.Marshal(b, m, deterministic) +} +func (dst *WebDetection_WebPage) XXX_Merge(src proto.Message) { + xxx_messageInfo_WebDetection_WebPage.Merge(dst, src) +} +func (m *WebDetection_WebPage) XXX_Size() int { + return xxx_messageInfo_WebDetection_WebPage.Size(m) +} +func (m *WebDetection_WebPage) XXX_DiscardUnknown() { + xxx_messageInfo_WebDetection_WebPage.DiscardUnknown(m) +} + +var xxx_messageInfo_WebDetection_WebPage proto.InternalMessageInfo + +func (m *WebDetection_WebPage) GetUrl() string { + if m != nil { + return m.Url + } + return "" +} + +func (m *WebDetection_WebPage) GetScore() float32 { + if m != nil { + return m.Score + } + return 0 +} + +func (m *WebDetection_WebPage) GetPageTitle() string { + if m != nil { + return m.PageTitle + } + return "" +} + +func (m *WebDetection_WebPage) GetFullMatchingImages() []*WebDetection_WebImage { + if m != nil { + return m.FullMatchingImages + } + return nil +} + +func (m *WebDetection_WebPage) GetPartialMatchingImages() []*WebDetection_WebImage { + if m != nil { + return m.PartialMatchingImages + } + return nil +} + +// Label to provide extra metadata for the web detection. +type WebDetection_WebLabel struct { + // Label for extra metadata. + Label string `protobuf:"bytes,1,opt,name=label,proto3" json:"label,omitempty"` + // The BCP-47 language code for `label`, such as "en-US" or "sr-Latn". + // For more information, see + // http://www.unicode.org/reports/tr35/#Unicode_locale_identifier. + LanguageCode string `protobuf:"bytes,2,opt,name=language_code,json=languageCode,proto3" json:"language_code,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *WebDetection_WebLabel) Reset() { *m = WebDetection_WebLabel{} } +func (m *WebDetection_WebLabel) String() string { return proto.CompactTextString(m) } +func (*WebDetection_WebLabel) ProtoMessage() {} +func (*WebDetection_WebLabel) Descriptor() ([]byte, []int) { + return fileDescriptor_web_detection_fd6fa85ea390cf49, []int{0, 3} +} +func (m *WebDetection_WebLabel) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_WebDetection_WebLabel.Unmarshal(m, b) +} +func (m *WebDetection_WebLabel) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_WebDetection_WebLabel.Marshal(b, m, deterministic) +} +func (dst *WebDetection_WebLabel) XXX_Merge(src proto.Message) { + xxx_messageInfo_WebDetection_WebLabel.Merge(dst, src) +} +func (m *WebDetection_WebLabel) XXX_Size() int { + return xxx_messageInfo_WebDetection_WebLabel.Size(m) +} +func (m *WebDetection_WebLabel) XXX_DiscardUnknown() { + xxx_messageInfo_WebDetection_WebLabel.DiscardUnknown(m) +} + +var xxx_messageInfo_WebDetection_WebLabel proto.InternalMessageInfo + +func (m *WebDetection_WebLabel) GetLabel() string { + if m != nil { + return m.Label + } + return "" +} + +func (m *WebDetection_WebLabel) GetLanguageCode() string { + if m != nil { + return m.LanguageCode + } + return "" +} + +func init() { + proto.RegisterType((*WebDetection)(nil), "google.cloud.vision.v1p1beta1.WebDetection") + proto.RegisterType((*WebDetection_WebEntity)(nil), "google.cloud.vision.v1p1beta1.WebDetection.WebEntity") + proto.RegisterType((*WebDetection_WebImage)(nil), "google.cloud.vision.v1p1beta1.WebDetection.WebImage") + proto.RegisterType((*WebDetection_WebPage)(nil), "google.cloud.vision.v1p1beta1.WebDetection.WebPage") + proto.RegisterType((*WebDetection_WebLabel)(nil), "google.cloud.vision.v1p1beta1.WebDetection.WebLabel") +} + +func init() { + proto.RegisterFile("google/cloud/vision/v1p1beta1/web_detection.proto", fileDescriptor_web_detection_fd6fa85ea390cf49) +} + +var fileDescriptor_web_detection_fd6fa85ea390cf49 = []byte{ + // 511 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xbc, 0x94, 0x4f, 0x6f, 0xd3, 0x30, + 0x18, 0xc6, 0x95, 0x76, 0x1b, 0x8b, 0x5b, 0x04, 0xb3, 0x86, 0x16, 0x05, 0x26, 0x15, 0xb8, 0xf4, + 0x94, 0xa8, 0x1b, 0x9c, 0xb8, 0x6d, 0x4c, 0x68, 0x12, 0x48, 0x55, 0x40, 0x1a, 0xe2, 0x92, 0x39, + 0x89, 0x97, 0xbe, 0x92, 0x1b, 0x47, 0xb1, 0xd3, 0xaa, 0x37, 0x4e, 0x7c, 0x14, 0x3e, 0x23, 0x47, + 0xf4, 0xda, 0xce, 0x54, 0x51, 0x36, 0x31, 0x86, 0xb8, 0xf9, 0x7d, 0xac, 0xe7, 0xf9, 0xd9, 0xaf, + 0xff, 0x90, 0x49, 0x29, 0x65, 0x29, 0x78, 0x9c, 0x0b, 0xd9, 0x16, 0xf1, 0x02, 0x14, 0xc8, 0x2a, + 0x5e, 0x4c, 0xea, 0x49, 0xc6, 0x35, 0x9b, 0xc4, 0x4b, 0x9e, 0xa5, 0x05, 0xd7, 0x3c, 0xd7, 0x20, + 0xab, 0xa8, 0x6e, 0xa4, 0x96, 0xf4, 0xd0, 0x5a, 0x22, 0x63, 0x89, 0xac, 0x25, 0xba, 0xb6, 0x84, + 0xcf, 0x5c, 0x22, 0xab, 0x21, 0x66, 0x55, 0x25, 0x35, 0x43, 0xaf, 0xb2, 0xe6, 0x17, 0xdf, 0x7c, + 0x32, 0xbc, 0xe0, 0xd9, 0xdb, 0x2e, 0x93, 0x7e, 0x26, 0x43, 0x84, 0xf0, 0x4a, 0x83, 0x06, 0xae, + 0x02, 0x6f, 0xd4, 0x1f, 0x0f, 0x8e, 0x5e, 0x47, 0xb7, 0x42, 0xa2, 0xf5, 0x08, 0x2c, 0xce, 0xd0, + 0xbe, 0x4a, 0x06, 0x4b, 0x37, 0x04, 0xae, 0xe8, 0x15, 0xd9, 0xbf, 0x6a, 0x85, 0x48, 0xe7, 0x4c, + 0xe7, 0x33, 0xa8, 0xca, 0x14, 0xe6, 0xac, 0xe4, 0x2a, 0xe8, 0x19, 0xc2, 0xab, 0x3b, 0x12, 0xce, + 0xd1, 0x9c, 0x50, 0x4c, 0xfc, 0xe0, 0x02, 0x8d, 0xa4, 0xa8, 0x20, 0x07, 0x35, 0x6b, 0x34, 0xb0, + 0x4d, 0x54, 0xff, 0x1e, 0xa8, 0x27, 0x2e, 0xf4, 0x17, 0x5a, 0x4d, 0xc2, 0x1a, 0x07, 0xe9, 0x12, + 0xf4, 0x6c, 0x03, 0xb8, 0x65, 0x80, 0xc7, 0x77, 0x04, 0x4e, 0x91, 0x77, 0x60, 0x62, 0x2f, 0x40, + 0xcf, 0x36, 0xf7, 0xb7, 0x00, 0xd5, 0x32, 0x21, 0x56, 0xa9, 0x82, 0x39, 0x08, 0xd6, 0x74, 0xb8, + 0x9d, 0xfb, 0xec, 0xaf, 0x0b, 0xfd, 0x68, 0x33, 0x1d, 0xed, 0x92, 0xec, 0x65, 0x5c, 0xe9, 0xb4, + 0x6c, 0xb9, 0x52, 0xa9, 0x60, 0x19, 0x17, 0x2a, 0xd8, 0xfd, 0x2b, 0xce, 0x7b, 0x34, 0x27, 0x8f, + 0x30, 0xee, 0x1d, 0xa6, 0x99, 0x5a, 0x85, 0x97, 0xc4, 0xbf, 0xbe, 0x31, 0xf4, 0x29, 0xf1, 0xcd, + 0xd5, 0x5b, 0xa5, 0x50, 0x04, 0xde, 0xc8, 0x1b, 0xfb, 0xc9, 0xae, 0x15, 0xce, 0x0b, 0xba, 0x4f, + 0xb6, 0x55, 0x2e, 0x1b, 0x1e, 0xf4, 0x46, 0xde, 0xb8, 0x97, 0xd8, 0x82, 0x8e, 0xc8, 0xa0, 0xe0, + 0x2a, 0x6f, 0xa0, 0x46, 0x50, 0xd0, 0x37, 0xa6, 0x75, 0x29, 0x3c, 0x22, 0xbb, 0xdd, 0x36, 0xe9, + 0x63, 0xd2, 0x6f, 0x1b, 0xe1, 0xa2, 0x71, 0xf8, 0xfb, 0xd4, 0xf0, 0x7b, 0x8f, 0x3c, 0x70, 0x47, + 0xf1, 0xa7, 0x1e, 0x7a, 0x48, 0x08, 0x1e, 0x5a, 0xaa, 0x41, 0x0b, 0xee, 0x16, 0xe2, 0xa3, 0xf2, + 0x09, 0x85, 0x1b, 0x1f, 0xc0, 0xd6, 0xff, 0x7b, 0x00, 0xdb, 0xff, 0xfc, 0x01, 0x84, 0x67, 0xa6, + 0xb9, 0xe6, 0x2c, 0xb1, 0x2d, 0xe6, 0x86, 0xb8, 0x56, 0xd9, 0x82, 0xbe, 0x24, 0x0f, 0x05, 0xab, + 0xca, 0x16, 0x5b, 0x93, 0xcb, 0xc2, 0x36, 0xcd, 0x4f, 0x86, 0x9d, 0x78, 0x2a, 0x0b, 0x7e, 0xf2, + 0xd5, 0x23, 0xcf, 0x73, 0x39, 0xbf, 0x7d, 0x65, 0x27, 0x7b, 0xeb, 0x4b, 0x9b, 0xe2, 0x0f, 0x36, + 0xf5, 0xbe, 0x9c, 0x3a, 0x4f, 0x29, 0x31, 0x31, 0x92, 0x4d, 0x19, 0x97, 0xbc, 0x32, 0xff, 0x5b, + 0x6c, 0xa7, 0x58, 0x0d, 0xea, 0x86, 0x2f, 0xf5, 0x8d, 0x15, 0x7e, 0x78, 0x5e, 0xb6, 0x63, 0x2c, + 0xc7, 0x3f, 0x03, 0x00, 0x00, 0xff, 0xff, 0xa2, 0x19, 0xa7, 0x1d, 0x84, 0x05, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/cloud/vision/v1p2beta1/geometry.pb.go b/vendor/google.golang.org/genproto/googleapis/cloud/vision/v1p2beta1/geometry.pb.go new file mode 100644 index 0000000..c61a4ff --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/cloud/vision/v1p2beta1/geometry.pb.go @@ -0,0 +1,262 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/cloud/vision/v1p2beta1/geometry.proto + +package vision // import "google.golang.org/genproto/googleapis/cloud/vision/v1p2beta1" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// A vertex represents a 2D point in the image. +// NOTE: the vertex coordinates are in the same scale as the original image. +type Vertex struct { + // X coordinate. + X int32 `protobuf:"varint,1,opt,name=x,proto3" json:"x,omitempty"` + // Y coordinate. + Y int32 `protobuf:"varint,2,opt,name=y,proto3" json:"y,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Vertex) Reset() { *m = Vertex{} } +func (m *Vertex) String() string { return proto.CompactTextString(m) } +func (*Vertex) ProtoMessage() {} +func (*Vertex) Descriptor() ([]byte, []int) { + return fileDescriptor_geometry_9c5927b87470c5c2, []int{0} +} +func (m *Vertex) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Vertex.Unmarshal(m, b) +} +func (m *Vertex) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Vertex.Marshal(b, m, deterministic) +} +func (dst *Vertex) XXX_Merge(src proto.Message) { + xxx_messageInfo_Vertex.Merge(dst, src) +} +func (m *Vertex) XXX_Size() int { + return xxx_messageInfo_Vertex.Size(m) +} +func (m *Vertex) XXX_DiscardUnknown() { + xxx_messageInfo_Vertex.DiscardUnknown(m) +} + +var xxx_messageInfo_Vertex proto.InternalMessageInfo + +func (m *Vertex) GetX() int32 { + if m != nil { + return m.X + } + return 0 +} + +func (m *Vertex) GetY() int32 { + if m != nil { + return m.Y + } + return 0 +} + +// A vertex represents a 2D point in the image. +// NOTE: the normalized vertex coordinates are relative to the original image +// and range from 0 to 1. +type NormalizedVertex struct { + // X coordinate. + X float32 `protobuf:"fixed32,1,opt,name=x,proto3" json:"x,omitempty"` + // Y coordinate. + Y float32 `protobuf:"fixed32,2,opt,name=y,proto3" json:"y,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *NormalizedVertex) Reset() { *m = NormalizedVertex{} } +func (m *NormalizedVertex) String() string { return proto.CompactTextString(m) } +func (*NormalizedVertex) ProtoMessage() {} +func (*NormalizedVertex) Descriptor() ([]byte, []int) { + return fileDescriptor_geometry_9c5927b87470c5c2, []int{1} +} +func (m *NormalizedVertex) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_NormalizedVertex.Unmarshal(m, b) +} +func (m *NormalizedVertex) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_NormalizedVertex.Marshal(b, m, deterministic) +} +func (dst *NormalizedVertex) XXX_Merge(src proto.Message) { + xxx_messageInfo_NormalizedVertex.Merge(dst, src) +} +func (m *NormalizedVertex) XXX_Size() int { + return xxx_messageInfo_NormalizedVertex.Size(m) +} +func (m *NormalizedVertex) XXX_DiscardUnknown() { + xxx_messageInfo_NormalizedVertex.DiscardUnknown(m) +} + +var xxx_messageInfo_NormalizedVertex proto.InternalMessageInfo + +func (m *NormalizedVertex) GetX() float32 { + if m != nil { + return m.X + } + return 0 +} + +func (m *NormalizedVertex) GetY() float32 { + if m != nil { + return m.Y + } + return 0 +} + +// A bounding polygon for the detected image annotation. +type BoundingPoly struct { + // The bounding polygon vertices. + Vertices []*Vertex `protobuf:"bytes,1,rep,name=vertices,proto3" json:"vertices,omitempty"` + // The bounding polygon normalized vertices. + NormalizedVertices []*NormalizedVertex `protobuf:"bytes,2,rep,name=normalized_vertices,json=normalizedVertices,proto3" json:"normalized_vertices,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *BoundingPoly) Reset() { *m = BoundingPoly{} } +func (m *BoundingPoly) String() string { return proto.CompactTextString(m) } +func (*BoundingPoly) ProtoMessage() {} +func (*BoundingPoly) Descriptor() ([]byte, []int) { + return fileDescriptor_geometry_9c5927b87470c5c2, []int{2} +} +func (m *BoundingPoly) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_BoundingPoly.Unmarshal(m, b) +} +func (m *BoundingPoly) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_BoundingPoly.Marshal(b, m, deterministic) +} +func (dst *BoundingPoly) XXX_Merge(src proto.Message) { + xxx_messageInfo_BoundingPoly.Merge(dst, src) +} +func (m *BoundingPoly) XXX_Size() int { + return xxx_messageInfo_BoundingPoly.Size(m) +} +func (m *BoundingPoly) XXX_DiscardUnknown() { + xxx_messageInfo_BoundingPoly.DiscardUnknown(m) +} + +var xxx_messageInfo_BoundingPoly proto.InternalMessageInfo + +func (m *BoundingPoly) GetVertices() []*Vertex { + if m != nil { + return m.Vertices + } + return nil +} + +func (m *BoundingPoly) GetNormalizedVertices() []*NormalizedVertex { + if m != nil { + return m.NormalizedVertices + } + return nil +} + +// A 3D position in the image, used primarily for Face detection landmarks. +// A valid Position must have both x and y coordinates. +// The position coordinates are in the same scale as the original image. +type Position struct { + // X coordinate. + X float32 `protobuf:"fixed32,1,opt,name=x,proto3" json:"x,omitempty"` + // Y coordinate. + Y float32 `protobuf:"fixed32,2,opt,name=y,proto3" json:"y,omitempty"` + // Z coordinate (or depth). + Z float32 `protobuf:"fixed32,3,opt,name=z,proto3" json:"z,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Position) Reset() { *m = Position{} } +func (m *Position) String() string { return proto.CompactTextString(m) } +func (*Position) ProtoMessage() {} +func (*Position) Descriptor() ([]byte, []int) { + return fileDescriptor_geometry_9c5927b87470c5c2, []int{3} +} +func (m *Position) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Position.Unmarshal(m, b) +} +func (m *Position) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Position.Marshal(b, m, deterministic) +} +func (dst *Position) XXX_Merge(src proto.Message) { + xxx_messageInfo_Position.Merge(dst, src) +} +func (m *Position) XXX_Size() int { + return xxx_messageInfo_Position.Size(m) +} +func (m *Position) XXX_DiscardUnknown() { + xxx_messageInfo_Position.DiscardUnknown(m) +} + +var xxx_messageInfo_Position proto.InternalMessageInfo + +func (m *Position) GetX() float32 { + if m != nil { + return m.X + } + return 0 +} + +func (m *Position) GetY() float32 { + if m != nil { + return m.Y + } + return 0 +} + +func (m *Position) GetZ() float32 { + if m != nil { + return m.Z + } + return 0 +} + +func init() { + proto.RegisterType((*Vertex)(nil), "google.cloud.vision.v1p2beta1.Vertex") + proto.RegisterType((*NormalizedVertex)(nil), "google.cloud.vision.v1p2beta1.NormalizedVertex") + proto.RegisterType((*BoundingPoly)(nil), "google.cloud.vision.v1p2beta1.BoundingPoly") + proto.RegisterType((*Position)(nil), "google.cloud.vision.v1p2beta1.Position") +} + +func init() { + proto.RegisterFile("google/cloud/vision/v1p2beta1/geometry.proto", fileDescriptor_geometry_9c5927b87470c5c2) +} + +var fileDescriptor_geometry_9c5927b87470c5c2 = []byte{ + // 283 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x84, 0x91, 0xc1, 0x4b, 0xc3, 0x30, + 0x14, 0xc6, 0x49, 0x87, 0x63, 0xc4, 0x09, 0x52, 0x2f, 0xbd, 0x08, 0xb3, 0x28, 0xec, 0x20, 0x09, + 0x9b, 0xde, 0x3c, 0x59, 0x0f, 0xde, 0xa4, 0xf4, 0xe0, 0xc1, 0x8b, 0x76, 0xed, 0x23, 0x04, 0xda, + 0xbc, 0x92, 0x66, 0x65, 0x2d, 0xfe, 0x57, 0xfe, 0x73, 0x1e, 0xa5, 0xc9, 0x28, 0x6c, 0x60, 0x77, + 0xfc, 0x5e, 0x7e, 0xef, 0x7b, 0x5f, 0xf8, 0xe8, 0xbd, 0x40, 0x14, 0x05, 0xf0, 0xac, 0xc0, 0x6d, + 0xce, 0x1b, 0x59, 0x4b, 0x54, 0xbc, 0x59, 0x55, 0xeb, 0x0d, 0x98, 0x74, 0xc5, 0x05, 0x60, 0x09, + 0x46, 0xb7, 0xac, 0xd2, 0x68, 0xd0, 0xbf, 0x76, 0x34, 0xb3, 0x34, 0x73, 0x34, 0x1b, 0xe8, 0xf0, + 0x96, 0x4e, 0xdf, 0x41, 0x1b, 0xd8, 0xf9, 0x73, 0x4a, 0x76, 0x01, 0x59, 0x90, 0xe5, 0x59, 0x42, + 0xac, 0x6a, 0x03, 0xcf, 0xa9, 0x36, 0x64, 0xf4, 0xf2, 0x0d, 0x75, 0x99, 0x16, 0xb2, 0x83, 0xfc, + 0x98, 0xf7, 0x0e, 0x78, 0xaf, 0xe7, 0x7f, 0x08, 0x9d, 0x47, 0xb8, 0x55, 0xb9, 0x54, 0x22, 0xc6, + 0xa2, 0xf5, 0x9f, 0xe9, 0xac, 0x01, 0x6d, 0x64, 0x06, 0x75, 0x40, 0x16, 0x93, 0xe5, 0xf9, 0xfa, + 0x8e, 0x8d, 0x06, 0x63, 0xee, 0x4a, 0x32, 0xac, 0xf9, 0x5f, 0xf4, 0x4a, 0x0d, 0x19, 0x3e, 0x07, + 0x37, 0xcf, 0xba, 0xf1, 0x13, 0x6e, 0xc7, 0xe9, 0x13, 0x5f, 0x1d, 0x4c, 0x7a, 0xab, 0xf0, 0x91, + 0xce, 0x62, 0xac, 0xa5, 0x91, 0xa8, 0xc6, 0x7e, 0xd7, 0xab, 0x2e, 0x98, 0x38, 0xd5, 0x45, 0xdf, + 0xf4, 0x26, 0xc3, 0x72, 0xfc, 0x7e, 0x74, 0xf1, 0xba, 0x6f, 0x25, 0xee, 0x4b, 0x89, 0xc9, 0xc7, + 0xcb, 0x9e, 0x17, 0x58, 0xa4, 0x4a, 0x30, 0xd4, 0x82, 0x0b, 0x50, 0xb6, 0x32, 0xee, 0x9e, 0xd2, + 0x4a, 0xd6, 0xff, 0x74, 0xfc, 0xe4, 0x06, 0xbf, 0x84, 0x6c, 0xa6, 0x76, 0xe5, 0xe1, 0x2f, 0x00, + 0x00, 0xff, 0xff, 0x3d, 0xe4, 0x63, 0xcf, 0x15, 0x02, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/cloud/vision/v1p2beta1/image_annotator.pb.go b/vendor/google.golang.org/genproto/googleapis/cloud/vision/v1p2beta1/image_annotator.pb.go new file mode 100644 index 0000000..b300cb6 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/cloud/vision/v1p2beta1/image_annotator.pb.go @@ -0,0 +1,2700 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/cloud/vision/v1p2beta1/image_annotator.proto + +package vision // import "google.golang.org/genproto/googleapis/cloud/vision/v1p2beta1" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import timestamp "github.com/golang/protobuf/ptypes/timestamp" +import _ "google.golang.org/genproto/googleapis/api/annotations" +import longrunning "google.golang.org/genproto/googleapis/longrunning" +import status "google.golang.org/genproto/googleapis/rpc/status" +import color "google.golang.org/genproto/googleapis/type/color" +import latlng "google.golang.org/genproto/googleapis/type/latlng" + +import ( + context "golang.org/x/net/context" + grpc "google.golang.org/grpc" +) + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// A bucketized representation of likelihood, which is intended to give clients +// highly stable results across model upgrades. +type Likelihood int32 + +const ( + // Unknown likelihood. + Likelihood_UNKNOWN Likelihood = 0 + // It is very unlikely that the image belongs to the specified vertical. + Likelihood_VERY_UNLIKELY Likelihood = 1 + // It is unlikely that the image belongs to the specified vertical. + Likelihood_UNLIKELY Likelihood = 2 + // It is possible that the image belongs to the specified vertical. + Likelihood_POSSIBLE Likelihood = 3 + // It is likely that the image belongs to the specified vertical. + Likelihood_LIKELY Likelihood = 4 + // It is very likely that the image belongs to the specified vertical. + Likelihood_VERY_LIKELY Likelihood = 5 +) + +var Likelihood_name = map[int32]string{ + 0: "UNKNOWN", + 1: "VERY_UNLIKELY", + 2: "UNLIKELY", + 3: "POSSIBLE", + 4: "LIKELY", + 5: "VERY_LIKELY", +} +var Likelihood_value = map[string]int32{ + "UNKNOWN": 0, + "VERY_UNLIKELY": 1, + "UNLIKELY": 2, + "POSSIBLE": 3, + "LIKELY": 4, + "VERY_LIKELY": 5, +} + +func (x Likelihood) String() string { + return proto.EnumName(Likelihood_name, int32(x)) +} +func (Likelihood) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_image_annotator_3f9c4b0d06e54350, []int{0} +} + +// Type of Google Cloud Vision API feature to be extracted. +type Feature_Type int32 + +const ( + // Unspecified feature type. + Feature_TYPE_UNSPECIFIED Feature_Type = 0 + // Run face detection. + Feature_FACE_DETECTION Feature_Type = 1 + // Run landmark detection. + Feature_LANDMARK_DETECTION Feature_Type = 2 + // Run logo detection. + Feature_LOGO_DETECTION Feature_Type = 3 + // Run label detection. + Feature_LABEL_DETECTION Feature_Type = 4 + // Run text detection / optical character recognition (OCR). Text detection + // is optimized for areas of text within a larger image; if the image is + // a document, use `DOCUMENT_TEXT_DETECTION` instead. + Feature_TEXT_DETECTION Feature_Type = 5 + // Run dense text document OCR. Takes precedence when both + // `DOCUMENT_TEXT_DETECTION` and `TEXT_DETECTION` are present. + Feature_DOCUMENT_TEXT_DETECTION Feature_Type = 11 + // Run Safe Search to detect potentially unsafe + // or undesirable content. + Feature_SAFE_SEARCH_DETECTION Feature_Type = 6 + // Compute a set of image properties, such as the + // image's dominant colors. + Feature_IMAGE_PROPERTIES Feature_Type = 7 + // Run crop hints. + Feature_CROP_HINTS Feature_Type = 9 + // Run web detection. + Feature_WEB_DETECTION Feature_Type = 10 +) + +var Feature_Type_name = map[int32]string{ + 0: "TYPE_UNSPECIFIED", + 1: "FACE_DETECTION", + 2: "LANDMARK_DETECTION", + 3: "LOGO_DETECTION", + 4: "LABEL_DETECTION", + 5: "TEXT_DETECTION", + 11: "DOCUMENT_TEXT_DETECTION", + 6: "SAFE_SEARCH_DETECTION", + 7: "IMAGE_PROPERTIES", + 9: "CROP_HINTS", + 10: "WEB_DETECTION", +} +var Feature_Type_value = map[string]int32{ + "TYPE_UNSPECIFIED": 0, + "FACE_DETECTION": 1, + "LANDMARK_DETECTION": 2, + "LOGO_DETECTION": 3, + "LABEL_DETECTION": 4, + "TEXT_DETECTION": 5, + "DOCUMENT_TEXT_DETECTION": 11, + "SAFE_SEARCH_DETECTION": 6, + "IMAGE_PROPERTIES": 7, + "CROP_HINTS": 9, + "WEB_DETECTION": 10, +} + +func (x Feature_Type) String() string { + return proto.EnumName(Feature_Type_name, int32(x)) +} +func (Feature_Type) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_image_annotator_3f9c4b0d06e54350, []int{0, 0} +} + +// Face landmark (feature) type. +// Left and right are defined from the vantage of the viewer of the image +// without considering mirror projections typical of photos. So, `LEFT_EYE`, +// typically, is the person's right eye. +type FaceAnnotation_Landmark_Type int32 + +const ( + // Unknown face landmark detected. Should not be filled. + FaceAnnotation_Landmark_UNKNOWN_LANDMARK FaceAnnotation_Landmark_Type = 0 + // Left eye. + FaceAnnotation_Landmark_LEFT_EYE FaceAnnotation_Landmark_Type = 1 + // Right eye. + FaceAnnotation_Landmark_RIGHT_EYE FaceAnnotation_Landmark_Type = 2 + // Left of left eyebrow. + FaceAnnotation_Landmark_LEFT_OF_LEFT_EYEBROW FaceAnnotation_Landmark_Type = 3 + // Right of left eyebrow. + FaceAnnotation_Landmark_RIGHT_OF_LEFT_EYEBROW FaceAnnotation_Landmark_Type = 4 + // Left of right eyebrow. + FaceAnnotation_Landmark_LEFT_OF_RIGHT_EYEBROW FaceAnnotation_Landmark_Type = 5 + // Right of right eyebrow. + FaceAnnotation_Landmark_RIGHT_OF_RIGHT_EYEBROW FaceAnnotation_Landmark_Type = 6 + // Midpoint between eyes. + FaceAnnotation_Landmark_MIDPOINT_BETWEEN_EYES FaceAnnotation_Landmark_Type = 7 + // Nose tip. + FaceAnnotation_Landmark_NOSE_TIP FaceAnnotation_Landmark_Type = 8 + // Upper lip. + FaceAnnotation_Landmark_UPPER_LIP FaceAnnotation_Landmark_Type = 9 + // Lower lip. + FaceAnnotation_Landmark_LOWER_LIP FaceAnnotation_Landmark_Type = 10 + // Mouth left. + FaceAnnotation_Landmark_MOUTH_LEFT FaceAnnotation_Landmark_Type = 11 + // Mouth right. + FaceAnnotation_Landmark_MOUTH_RIGHT FaceAnnotation_Landmark_Type = 12 + // Mouth center. + FaceAnnotation_Landmark_MOUTH_CENTER FaceAnnotation_Landmark_Type = 13 + // Nose, bottom right. + FaceAnnotation_Landmark_NOSE_BOTTOM_RIGHT FaceAnnotation_Landmark_Type = 14 + // Nose, bottom left. + FaceAnnotation_Landmark_NOSE_BOTTOM_LEFT FaceAnnotation_Landmark_Type = 15 + // Nose, bottom center. + FaceAnnotation_Landmark_NOSE_BOTTOM_CENTER FaceAnnotation_Landmark_Type = 16 + // Left eye, top boundary. + FaceAnnotation_Landmark_LEFT_EYE_TOP_BOUNDARY FaceAnnotation_Landmark_Type = 17 + // Left eye, right corner. + FaceAnnotation_Landmark_LEFT_EYE_RIGHT_CORNER FaceAnnotation_Landmark_Type = 18 + // Left eye, bottom boundary. + FaceAnnotation_Landmark_LEFT_EYE_BOTTOM_BOUNDARY FaceAnnotation_Landmark_Type = 19 + // Left eye, left corner. + FaceAnnotation_Landmark_LEFT_EYE_LEFT_CORNER FaceAnnotation_Landmark_Type = 20 + // Right eye, top boundary. + FaceAnnotation_Landmark_RIGHT_EYE_TOP_BOUNDARY FaceAnnotation_Landmark_Type = 21 + // Right eye, right corner. + FaceAnnotation_Landmark_RIGHT_EYE_RIGHT_CORNER FaceAnnotation_Landmark_Type = 22 + // Right eye, bottom boundary. + FaceAnnotation_Landmark_RIGHT_EYE_BOTTOM_BOUNDARY FaceAnnotation_Landmark_Type = 23 + // Right eye, left corner. + FaceAnnotation_Landmark_RIGHT_EYE_LEFT_CORNER FaceAnnotation_Landmark_Type = 24 + // Left eyebrow, upper midpoint. + FaceAnnotation_Landmark_LEFT_EYEBROW_UPPER_MIDPOINT FaceAnnotation_Landmark_Type = 25 + // Right eyebrow, upper midpoint. + FaceAnnotation_Landmark_RIGHT_EYEBROW_UPPER_MIDPOINT FaceAnnotation_Landmark_Type = 26 + // Left ear tragion. + FaceAnnotation_Landmark_LEFT_EAR_TRAGION FaceAnnotation_Landmark_Type = 27 + // Right ear tragion. + FaceAnnotation_Landmark_RIGHT_EAR_TRAGION FaceAnnotation_Landmark_Type = 28 + // Left eye pupil. + FaceAnnotation_Landmark_LEFT_EYE_PUPIL FaceAnnotation_Landmark_Type = 29 + // Right eye pupil. + FaceAnnotation_Landmark_RIGHT_EYE_PUPIL FaceAnnotation_Landmark_Type = 30 + // Forehead glabella. + FaceAnnotation_Landmark_FOREHEAD_GLABELLA FaceAnnotation_Landmark_Type = 31 + // Chin gnathion. + FaceAnnotation_Landmark_CHIN_GNATHION FaceAnnotation_Landmark_Type = 32 + // Chin left gonion. + FaceAnnotation_Landmark_CHIN_LEFT_GONION FaceAnnotation_Landmark_Type = 33 + // Chin right gonion. + FaceAnnotation_Landmark_CHIN_RIGHT_GONION FaceAnnotation_Landmark_Type = 34 +) + +var FaceAnnotation_Landmark_Type_name = map[int32]string{ + 0: "UNKNOWN_LANDMARK", + 1: "LEFT_EYE", + 2: "RIGHT_EYE", + 3: "LEFT_OF_LEFT_EYEBROW", + 4: "RIGHT_OF_LEFT_EYEBROW", + 5: "LEFT_OF_RIGHT_EYEBROW", + 6: "RIGHT_OF_RIGHT_EYEBROW", + 7: "MIDPOINT_BETWEEN_EYES", + 8: "NOSE_TIP", + 9: "UPPER_LIP", + 10: "LOWER_LIP", + 11: "MOUTH_LEFT", + 12: "MOUTH_RIGHT", + 13: "MOUTH_CENTER", + 14: "NOSE_BOTTOM_RIGHT", + 15: "NOSE_BOTTOM_LEFT", + 16: "NOSE_BOTTOM_CENTER", + 17: "LEFT_EYE_TOP_BOUNDARY", + 18: "LEFT_EYE_RIGHT_CORNER", + 19: "LEFT_EYE_BOTTOM_BOUNDARY", + 20: "LEFT_EYE_LEFT_CORNER", + 21: "RIGHT_EYE_TOP_BOUNDARY", + 22: "RIGHT_EYE_RIGHT_CORNER", + 23: "RIGHT_EYE_BOTTOM_BOUNDARY", + 24: "RIGHT_EYE_LEFT_CORNER", + 25: "LEFT_EYEBROW_UPPER_MIDPOINT", + 26: "RIGHT_EYEBROW_UPPER_MIDPOINT", + 27: "LEFT_EAR_TRAGION", + 28: "RIGHT_EAR_TRAGION", + 29: "LEFT_EYE_PUPIL", + 30: "RIGHT_EYE_PUPIL", + 31: "FOREHEAD_GLABELLA", + 32: "CHIN_GNATHION", + 33: "CHIN_LEFT_GONION", + 34: "CHIN_RIGHT_GONION", +} +var FaceAnnotation_Landmark_Type_value = map[string]int32{ + "UNKNOWN_LANDMARK": 0, + "LEFT_EYE": 1, + "RIGHT_EYE": 2, + "LEFT_OF_LEFT_EYEBROW": 3, + "RIGHT_OF_LEFT_EYEBROW": 4, + "LEFT_OF_RIGHT_EYEBROW": 5, + "RIGHT_OF_RIGHT_EYEBROW": 6, + "MIDPOINT_BETWEEN_EYES": 7, + "NOSE_TIP": 8, + "UPPER_LIP": 9, + "LOWER_LIP": 10, + "MOUTH_LEFT": 11, + "MOUTH_RIGHT": 12, + "MOUTH_CENTER": 13, + "NOSE_BOTTOM_RIGHT": 14, + "NOSE_BOTTOM_LEFT": 15, + "NOSE_BOTTOM_CENTER": 16, + "LEFT_EYE_TOP_BOUNDARY": 17, + "LEFT_EYE_RIGHT_CORNER": 18, + "LEFT_EYE_BOTTOM_BOUNDARY": 19, + "LEFT_EYE_LEFT_CORNER": 20, + "RIGHT_EYE_TOP_BOUNDARY": 21, + "RIGHT_EYE_RIGHT_CORNER": 22, + "RIGHT_EYE_BOTTOM_BOUNDARY": 23, + "RIGHT_EYE_LEFT_CORNER": 24, + "LEFT_EYEBROW_UPPER_MIDPOINT": 25, + "RIGHT_EYEBROW_UPPER_MIDPOINT": 26, + "LEFT_EAR_TRAGION": 27, + "RIGHT_EAR_TRAGION": 28, + "LEFT_EYE_PUPIL": 29, + "RIGHT_EYE_PUPIL": 30, + "FOREHEAD_GLABELLA": 31, + "CHIN_GNATHION": 32, + "CHIN_LEFT_GONION": 33, + "CHIN_RIGHT_GONION": 34, +} + +func (x FaceAnnotation_Landmark_Type) String() string { + return proto.EnumName(FaceAnnotation_Landmark_Type_name, int32(x)) +} +func (FaceAnnotation_Landmark_Type) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_image_annotator_3f9c4b0d06e54350, []int{3, 0, 0} +} + +// Batch operation states. +type OperationMetadata_State int32 + +const ( + // Invalid. + OperationMetadata_STATE_UNSPECIFIED OperationMetadata_State = 0 + // Request is received. + OperationMetadata_CREATED OperationMetadata_State = 1 + // Request is actively being processed. + OperationMetadata_RUNNING OperationMetadata_State = 2 + // The batch processing is done. + OperationMetadata_DONE OperationMetadata_State = 3 + // The batch processing was cancelled. + OperationMetadata_CANCELLED OperationMetadata_State = 4 +) + +var OperationMetadata_State_name = map[int32]string{ + 0: "STATE_UNSPECIFIED", + 1: "CREATED", + 2: "RUNNING", + 3: "DONE", + 4: "CANCELLED", +} +var OperationMetadata_State_value = map[string]int32{ + "STATE_UNSPECIFIED": 0, + "CREATED": 1, + "RUNNING": 2, + "DONE": 3, + "CANCELLED": 4, +} + +func (x OperationMetadata_State) String() string { + return proto.EnumName(OperationMetadata_State_name, int32(x)) +} +func (OperationMetadata_State) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_image_annotator_3f9c4b0d06e54350, []int{31, 0} +} + +// The type of Google Cloud Vision API detection to perform, and the maximum +// number of results to return for that type. Multiple `Feature` objects can +// be specified in the `features` list. +type Feature struct { + // The feature type. + Type Feature_Type `protobuf:"varint,1,opt,name=type,proto3,enum=google.cloud.vision.v1p2beta1.Feature_Type" json:"type,omitempty"` + // Maximum number of results of this type. Does not apply to + // `TEXT_DETECTION`, `DOCUMENT_TEXT_DETECTION`, or `CROP_HINTS`. + MaxResults int32 `protobuf:"varint,2,opt,name=max_results,json=maxResults,proto3" json:"max_results,omitempty"` + // Model to use for the feature. + // Supported values: "builtin/stable" (the default if unset) and + // "builtin/latest". + Model string `protobuf:"bytes,3,opt,name=model,proto3" json:"model,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Feature) Reset() { *m = Feature{} } +func (m *Feature) String() string { return proto.CompactTextString(m) } +func (*Feature) ProtoMessage() {} +func (*Feature) Descriptor() ([]byte, []int) { + return fileDescriptor_image_annotator_3f9c4b0d06e54350, []int{0} +} +func (m *Feature) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Feature.Unmarshal(m, b) +} +func (m *Feature) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Feature.Marshal(b, m, deterministic) +} +func (dst *Feature) XXX_Merge(src proto.Message) { + xxx_messageInfo_Feature.Merge(dst, src) +} +func (m *Feature) XXX_Size() int { + return xxx_messageInfo_Feature.Size(m) +} +func (m *Feature) XXX_DiscardUnknown() { + xxx_messageInfo_Feature.DiscardUnknown(m) +} + +var xxx_messageInfo_Feature proto.InternalMessageInfo + +func (m *Feature) GetType() Feature_Type { + if m != nil { + return m.Type + } + return Feature_TYPE_UNSPECIFIED +} + +func (m *Feature) GetMaxResults() int32 { + if m != nil { + return m.MaxResults + } + return 0 +} + +func (m *Feature) GetModel() string { + if m != nil { + return m.Model + } + return "" +} + +// External image source (Google Cloud Storage or web URL image location). +type ImageSource struct { + // **Use `image_uri` instead.** + // + // The Google Cloud Storage URI of the form + // `gs://bucket_name/object_name`. Object versioning is not supported. See + // [Google Cloud Storage Request + // URIs](https://cloud.google.com/storage/docs/reference-uris) for more info. + GcsImageUri string `protobuf:"bytes,1,opt,name=gcs_image_uri,json=gcsImageUri,proto3" json:"gcs_image_uri,omitempty"` + // The URI of the source image. Can be either: + // + // 1. A Google Cloud Storage URI of the form + // `gs://bucket_name/object_name`. Object versioning is not supported. See + // [Google Cloud Storage Request + // URIs](https://cloud.google.com/storage/docs/reference-uris) for more + // info. + // + // 2. A publicly-accessible image HTTP/HTTPS URL. When fetching images from + // HTTP/HTTPS URLs, Google cannot guarantee that the request will be + // completed. Your request may fail if the specified host denies the + // request (e.g. due to request throttling or DOS prevention), or if Google + // throttles requests to the site for abuse prevention. You should not + // depend on externally-hosted images for production applications. + // + // When both `gcs_image_uri` and `image_uri` are specified, `image_uri` takes + // precedence. + ImageUri string `protobuf:"bytes,2,opt,name=image_uri,json=imageUri,proto3" json:"image_uri,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ImageSource) Reset() { *m = ImageSource{} } +func (m *ImageSource) String() string { return proto.CompactTextString(m) } +func (*ImageSource) ProtoMessage() {} +func (*ImageSource) Descriptor() ([]byte, []int) { + return fileDescriptor_image_annotator_3f9c4b0d06e54350, []int{1} +} +func (m *ImageSource) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ImageSource.Unmarshal(m, b) +} +func (m *ImageSource) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ImageSource.Marshal(b, m, deterministic) +} +func (dst *ImageSource) XXX_Merge(src proto.Message) { + xxx_messageInfo_ImageSource.Merge(dst, src) +} +func (m *ImageSource) XXX_Size() int { + return xxx_messageInfo_ImageSource.Size(m) +} +func (m *ImageSource) XXX_DiscardUnknown() { + xxx_messageInfo_ImageSource.DiscardUnknown(m) +} + +var xxx_messageInfo_ImageSource proto.InternalMessageInfo + +func (m *ImageSource) GetGcsImageUri() string { + if m != nil { + return m.GcsImageUri + } + return "" +} + +func (m *ImageSource) GetImageUri() string { + if m != nil { + return m.ImageUri + } + return "" +} + +// Client image to perform Google Cloud Vision API tasks over. +type Image struct { + // Image content, represented as a stream of bytes. + // Note: As with all `bytes` fields, protobuffers use a pure binary + // representation, whereas JSON representations use base64. + Content []byte `protobuf:"bytes,1,opt,name=content,proto3" json:"content,omitempty"` + // Google Cloud Storage image location, or publicly-accessible image + // URL. If both `content` and `source` are provided for an image, `content` + // takes precedence and is used to perform the image annotation request. + Source *ImageSource `protobuf:"bytes,2,opt,name=source,proto3" json:"source,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Image) Reset() { *m = Image{} } +func (m *Image) String() string { return proto.CompactTextString(m) } +func (*Image) ProtoMessage() {} +func (*Image) Descriptor() ([]byte, []int) { + return fileDescriptor_image_annotator_3f9c4b0d06e54350, []int{2} +} +func (m *Image) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Image.Unmarshal(m, b) +} +func (m *Image) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Image.Marshal(b, m, deterministic) +} +func (dst *Image) XXX_Merge(src proto.Message) { + xxx_messageInfo_Image.Merge(dst, src) +} +func (m *Image) XXX_Size() int { + return xxx_messageInfo_Image.Size(m) +} +func (m *Image) XXX_DiscardUnknown() { + xxx_messageInfo_Image.DiscardUnknown(m) +} + +var xxx_messageInfo_Image proto.InternalMessageInfo + +func (m *Image) GetContent() []byte { + if m != nil { + return m.Content + } + return nil +} + +func (m *Image) GetSource() *ImageSource { + if m != nil { + return m.Source + } + return nil +} + +// A face annotation object contains the results of face detection. +type FaceAnnotation struct { + // The bounding polygon around the face. The coordinates of the bounding box + // are in the original image's scale, as returned in `ImageParams`. + // The bounding box is computed to "frame" the face in accordance with human + // expectations. It is based on the landmarker results. + // Note that one or more x and/or y coordinates may not be generated in the + // `BoundingPoly` (the polygon will be unbounded) if only a partial face + // appears in the image to be annotated. + BoundingPoly *BoundingPoly `protobuf:"bytes,1,opt,name=bounding_poly,json=boundingPoly,proto3" json:"bounding_poly,omitempty"` + // The `fd_bounding_poly` bounding polygon is tighter than the + // `boundingPoly`, and encloses only the skin part of the face. Typically, it + // is used to eliminate the face from any image analysis that detects the + // "amount of skin" visible in an image. It is not based on the + // landmarker results, only on the initial face detection, hence + // the fd (face detection) prefix. + FdBoundingPoly *BoundingPoly `protobuf:"bytes,2,opt,name=fd_bounding_poly,json=fdBoundingPoly,proto3" json:"fd_bounding_poly,omitempty"` + // Detected face landmarks. + Landmarks []*FaceAnnotation_Landmark `protobuf:"bytes,3,rep,name=landmarks,proto3" json:"landmarks,omitempty"` + // Roll angle, which indicates the amount of clockwise/anti-clockwise rotation + // of the face relative to the image vertical about the axis perpendicular to + // the face. Range [-180,180]. + RollAngle float32 `protobuf:"fixed32,4,opt,name=roll_angle,json=rollAngle,proto3" json:"roll_angle,omitempty"` + // Yaw angle, which indicates the leftward/rightward angle that the face is + // pointing relative to the vertical plane perpendicular to the image. Range + // [-180,180]. + PanAngle float32 `protobuf:"fixed32,5,opt,name=pan_angle,json=panAngle,proto3" json:"pan_angle,omitempty"` + // Pitch angle, which indicates the upwards/downwards angle that the face is + // pointing relative to the image's horizontal plane. Range [-180,180]. + TiltAngle float32 `protobuf:"fixed32,6,opt,name=tilt_angle,json=tiltAngle,proto3" json:"tilt_angle,omitempty"` + // Detection confidence. Range [0, 1]. + DetectionConfidence float32 `protobuf:"fixed32,7,opt,name=detection_confidence,json=detectionConfidence,proto3" json:"detection_confidence,omitempty"` + // Face landmarking confidence. Range [0, 1]. + LandmarkingConfidence float32 `protobuf:"fixed32,8,opt,name=landmarking_confidence,json=landmarkingConfidence,proto3" json:"landmarking_confidence,omitempty"` + // Joy likelihood. + JoyLikelihood Likelihood `protobuf:"varint,9,opt,name=joy_likelihood,json=joyLikelihood,proto3,enum=google.cloud.vision.v1p2beta1.Likelihood" json:"joy_likelihood,omitempty"` + // Sorrow likelihood. + SorrowLikelihood Likelihood `protobuf:"varint,10,opt,name=sorrow_likelihood,json=sorrowLikelihood,proto3,enum=google.cloud.vision.v1p2beta1.Likelihood" json:"sorrow_likelihood,omitempty"` + // Anger likelihood. + AngerLikelihood Likelihood `protobuf:"varint,11,opt,name=anger_likelihood,json=angerLikelihood,proto3,enum=google.cloud.vision.v1p2beta1.Likelihood" json:"anger_likelihood,omitempty"` + // Surprise likelihood. + SurpriseLikelihood Likelihood `protobuf:"varint,12,opt,name=surprise_likelihood,json=surpriseLikelihood,proto3,enum=google.cloud.vision.v1p2beta1.Likelihood" json:"surprise_likelihood,omitempty"` + // Under-exposed likelihood. + UnderExposedLikelihood Likelihood `protobuf:"varint,13,opt,name=under_exposed_likelihood,json=underExposedLikelihood,proto3,enum=google.cloud.vision.v1p2beta1.Likelihood" json:"under_exposed_likelihood,omitempty"` + // Blurred likelihood. + BlurredLikelihood Likelihood `protobuf:"varint,14,opt,name=blurred_likelihood,json=blurredLikelihood,proto3,enum=google.cloud.vision.v1p2beta1.Likelihood" json:"blurred_likelihood,omitempty"` + // Headwear likelihood. + HeadwearLikelihood Likelihood `protobuf:"varint,15,opt,name=headwear_likelihood,json=headwearLikelihood,proto3,enum=google.cloud.vision.v1p2beta1.Likelihood" json:"headwear_likelihood,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *FaceAnnotation) Reset() { *m = FaceAnnotation{} } +func (m *FaceAnnotation) String() string { return proto.CompactTextString(m) } +func (*FaceAnnotation) ProtoMessage() {} +func (*FaceAnnotation) Descriptor() ([]byte, []int) { + return fileDescriptor_image_annotator_3f9c4b0d06e54350, []int{3} +} +func (m *FaceAnnotation) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_FaceAnnotation.Unmarshal(m, b) +} +func (m *FaceAnnotation) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_FaceAnnotation.Marshal(b, m, deterministic) +} +func (dst *FaceAnnotation) XXX_Merge(src proto.Message) { + xxx_messageInfo_FaceAnnotation.Merge(dst, src) +} +func (m *FaceAnnotation) XXX_Size() int { + return xxx_messageInfo_FaceAnnotation.Size(m) +} +func (m *FaceAnnotation) XXX_DiscardUnknown() { + xxx_messageInfo_FaceAnnotation.DiscardUnknown(m) +} + +var xxx_messageInfo_FaceAnnotation proto.InternalMessageInfo + +func (m *FaceAnnotation) GetBoundingPoly() *BoundingPoly { + if m != nil { + return m.BoundingPoly + } + return nil +} + +func (m *FaceAnnotation) GetFdBoundingPoly() *BoundingPoly { + if m != nil { + return m.FdBoundingPoly + } + return nil +} + +func (m *FaceAnnotation) GetLandmarks() []*FaceAnnotation_Landmark { + if m != nil { + return m.Landmarks + } + return nil +} + +func (m *FaceAnnotation) GetRollAngle() float32 { + if m != nil { + return m.RollAngle + } + return 0 +} + +func (m *FaceAnnotation) GetPanAngle() float32 { + if m != nil { + return m.PanAngle + } + return 0 +} + +func (m *FaceAnnotation) GetTiltAngle() float32 { + if m != nil { + return m.TiltAngle + } + return 0 +} + +func (m *FaceAnnotation) GetDetectionConfidence() float32 { + if m != nil { + return m.DetectionConfidence + } + return 0 +} + +func (m *FaceAnnotation) GetLandmarkingConfidence() float32 { + if m != nil { + return m.LandmarkingConfidence + } + return 0 +} + +func (m *FaceAnnotation) GetJoyLikelihood() Likelihood { + if m != nil { + return m.JoyLikelihood + } + return Likelihood_UNKNOWN +} + +func (m *FaceAnnotation) GetSorrowLikelihood() Likelihood { + if m != nil { + return m.SorrowLikelihood + } + return Likelihood_UNKNOWN +} + +func (m *FaceAnnotation) GetAngerLikelihood() Likelihood { + if m != nil { + return m.AngerLikelihood + } + return Likelihood_UNKNOWN +} + +func (m *FaceAnnotation) GetSurpriseLikelihood() Likelihood { + if m != nil { + return m.SurpriseLikelihood + } + return Likelihood_UNKNOWN +} + +func (m *FaceAnnotation) GetUnderExposedLikelihood() Likelihood { + if m != nil { + return m.UnderExposedLikelihood + } + return Likelihood_UNKNOWN +} + +func (m *FaceAnnotation) GetBlurredLikelihood() Likelihood { + if m != nil { + return m.BlurredLikelihood + } + return Likelihood_UNKNOWN +} + +func (m *FaceAnnotation) GetHeadwearLikelihood() Likelihood { + if m != nil { + return m.HeadwearLikelihood + } + return Likelihood_UNKNOWN +} + +// A face-specific landmark (for example, a face feature). +type FaceAnnotation_Landmark struct { + // Face landmark type. + Type FaceAnnotation_Landmark_Type `protobuf:"varint,3,opt,name=type,proto3,enum=google.cloud.vision.v1p2beta1.FaceAnnotation_Landmark_Type" json:"type,omitempty"` + // Face landmark position. + Position *Position `protobuf:"bytes,4,opt,name=position,proto3" json:"position,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *FaceAnnotation_Landmark) Reset() { *m = FaceAnnotation_Landmark{} } +func (m *FaceAnnotation_Landmark) String() string { return proto.CompactTextString(m) } +func (*FaceAnnotation_Landmark) ProtoMessage() {} +func (*FaceAnnotation_Landmark) Descriptor() ([]byte, []int) { + return fileDescriptor_image_annotator_3f9c4b0d06e54350, []int{3, 0} +} +func (m *FaceAnnotation_Landmark) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_FaceAnnotation_Landmark.Unmarshal(m, b) +} +func (m *FaceAnnotation_Landmark) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_FaceAnnotation_Landmark.Marshal(b, m, deterministic) +} +func (dst *FaceAnnotation_Landmark) XXX_Merge(src proto.Message) { + xxx_messageInfo_FaceAnnotation_Landmark.Merge(dst, src) +} +func (m *FaceAnnotation_Landmark) XXX_Size() int { + return xxx_messageInfo_FaceAnnotation_Landmark.Size(m) +} +func (m *FaceAnnotation_Landmark) XXX_DiscardUnknown() { + xxx_messageInfo_FaceAnnotation_Landmark.DiscardUnknown(m) +} + +var xxx_messageInfo_FaceAnnotation_Landmark proto.InternalMessageInfo + +func (m *FaceAnnotation_Landmark) GetType() FaceAnnotation_Landmark_Type { + if m != nil { + return m.Type + } + return FaceAnnotation_Landmark_UNKNOWN_LANDMARK +} + +func (m *FaceAnnotation_Landmark) GetPosition() *Position { + if m != nil { + return m.Position + } + return nil +} + +// Detected entity location information. +type LocationInfo struct { + // lat/long location coordinates. + LatLng *latlng.LatLng `protobuf:"bytes,1,opt,name=lat_lng,json=latLng,proto3" json:"lat_lng,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *LocationInfo) Reset() { *m = LocationInfo{} } +func (m *LocationInfo) String() string { return proto.CompactTextString(m) } +func (*LocationInfo) ProtoMessage() {} +func (*LocationInfo) Descriptor() ([]byte, []int) { + return fileDescriptor_image_annotator_3f9c4b0d06e54350, []int{4} +} +func (m *LocationInfo) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_LocationInfo.Unmarshal(m, b) +} +func (m *LocationInfo) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_LocationInfo.Marshal(b, m, deterministic) +} +func (dst *LocationInfo) XXX_Merge(src proto.Message) { + xxx_messageInfo_LocationInfo.Merge(dst, src) +} +func (m *LocationInfo) XXX_Size() int { + return xxx_messageInfo_LocationInfo.Size(m) +} +func (m *LocationInfo) XXX_DiscardUnknown() { + xxx_messageInfo_LocationInfo.DiscardUnknown(m) +} + +var xxx_messageInfo_LocationInfo proto.InternalMessageInfo + +func (m *LocationInfo) GetLatLng() *latlng.LatLng { + if m != nil { + return m.LatLng + } + return nil +} + +// A `Property` consists of a user-supplied name/value pair. +type Property struct { + // Name of the property. + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // Value of the property. + Value string `protobuf:"bytes,2,opt,name=value,proto3" json:"value,omitempty"` + // Value of numeric properties. + Uint64Value uint64 `protobuf:"varint,3,opt,name=uint64_value,json=uint64Value,proto3" json:"uint64_value,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Property) Reset() { *m = Property{} } +func (m *Property) String() string { return proto.CompactTextString(m) } +func (*Property) ProtoMessage() {} +func (*Property) Descriptor() ([]byte, []int) { + return fileDescriptor_image_annotator_3f9c4b0d06e54350, []int{5} +} +func (m *Property) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Property.Unmarshal(m, b) +} +func (m *Property) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Property.Marshal(b, m, deterministic) +} +func (dst *Property) XXX_Merge(src proto.Message) { + xxx_messageInfo_Property.Merge(dst, src) +} +func (m *Property) XXX_Size() int { + return xxx_messageInfo_Property.Size(m) +} +func (m *Property) XXX_DiscardUnknown() { + xxx_messageInfo_Property.DiscardUnknown(m) +} + +var xxx_messageInfo_Property proto.InternalMessageInfo + +func (m *Property) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *Property) GetValue() string { + if m != nil { + return m.Value + } + return "" +} + +func (m *Property) GetUint64Value() uint64 { + if m != nil { + return m.Uint64Value + } + return 0 +} + +// Set of detected entity features. +type EntityAnnotation struct { + // Opaque entity ID. Some IDs may be available in + // [Google Knowledge Graph Search + // API](https://developers.google.com/knowledge-graph/). + Mid string `protobuf:"bytes,1,opt,name=mid,proto3" json:"mid,omitempty"` + // The language code for the locale in which the entity textual + // `description` is expressed. + Locale string `protobuf:"bytes,2,opt,name=locale,proto3" json:"locale,omitempty"` + // Entity textual description, expressed in its `locale` language. + Description string `protobuf:"bytes,3,opt,name=description,proto3" json:"description,omitempty"` + // Overall score of the result. Range [0, 1]. + Score float32 `protobuf:"fixed32,4,opt,name=score,proto3" json:"score,omitempty"` + // **Deprecated. Use `score` instead.** + // The accuracy of the entity detection in an image. + // For example, for an image in which the "Eiffel Tower" entity is detected, + // this field represents the confidence that there is a tower in the query + // image. Range [0, 1]. + Confidence float32 `protobuf:"fixed32,5,opt,name=confidence,proto3" json:"confidence,omitempty"` + // The relevancy of the ICA (Image Content Annotation) label to the + // image. For example, the relevancy of "tower" is likely higher to an image + // containing the detected "Eiffel Tower" than to an image containing a + // detected distant towering building, even though the confidence that + // there is a tower in each image may be the same. Range [0, 1]. + Topicality float32 `protobuf:"fixed32,6,opt,name=topicality,proto3" json:"topicality,omitempty"` + // Image region to which this entity belongs. Not produced + // for `LABEL_DETECTION` features. + BoundingPoly *BoundingPoly `protobuf:"bytes,7,opt,name=bounding_poly,json=boundingPoly,proto3" json:"bounding_poly,omitempty"` + // The location information for the detected entity. Multiple + // `LocationInfo` elements can be present because one location may + // indicate the location of the scene in the image, and another location + // may indicate the location of the place where the image was taken. + // Location information is usually present for landmarks. + Locations []*LocationInfo `protobuf:"bytes,8,rep,name=locations,proto3" json:"locations,omitempty"` + // Some entities may have optional user-supplied `Property` (name/value) + // fields, such a score or string that qualifies the entity. + Properties []*Property `protobuf:"bytes,9,rep,name=properties,proto3" json:"properties,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *EntityAnnotation) Reset() { *m = EntityAnnotation{} } +func (m *EntityAnnotation) String() string { return proto.CompactTextString(m) } +func (*EntityAnnotation) ProtoMessage() {} +func (*EntityAnnotation) Descriptor() ([]byte, []int) { + return fileDescriptor_image_annotator_3f9c4b0d06e54350, []int{6} +} +func (m *EntityAnnotation) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_EntityAnnotation.Unmarshal(m, b) +} +func (m *EntityAnnotation) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_EntityAnnotation.Marshal(b, m, deterministic) +} +func (dst *EntityAnnotation) XXX_Merge(src proto.Message) { + xxx_messageInfo_EntityAnnotation.Merge(dst, src) +} +func (m *EntityAnnotation) XXX_Size() int { + return xxx_messageInfo_EntityAnnotation.Size(m) +} +func (m *EntityAnnotation) XXX_DiscardUnknown() { + xxx_messageInfo_EntityAnnotation.DiscardUnknown(m) +} + +var xxx_messageInfo_EntityAnnotation proto.InternalMessageInfo + +func (m *EntityAnnotation) GetMid() string { + if m != nil { + return m.Mid + } + return "" +} + +func (m *EntityAnnotation) GetLocale() string { + if m != nil { + return m.Locale + } + return "" +} + +func (m *EntityAnnotation) GetDescription() string { + if m != nil { + return m.Description + } + return "" +} + +func (m *EntityAnnotation) GetScore() float32 { + if m != nil { + return m.Score + } + return 0 +} + +func (m *EntityAnnotation) GetConfidence() float32 { + if m != nil { + return m.Confidence + } + return 0 +} + +func (m *EntityAnnotation) GetTopicality() float32 { + if m != nil { + return m.Topicality + } + return 0 +} + +func (m *EntityAnnotation) GetBoundingPoly() *BoundingPoly { + if m != nil { + return m.BoundingPoly + } + return nil +} + +func (m *EntityAnnotation) GetLocations() []*LocationInfo { + if m != nil { + return m.Locations + } + return nil +} + +func (m *EntityAnnotation) GetProperties() []*Property { + if m != nil { + return m.Properties + } + return nil +} + +// Set of features pertaining to the image, computed by computer vision +// methods over safe-search verticals (for example, adult, spoof, medical, +// violence). +type SafeSearchAnnotation struct { + // Represents the adult content likelihood for the image. Adult content may + // contain elements such as nudity, pornographic images or cartoons, or + // sexual activities. + Adult Likelihood `protobuf:"varint,1,opt,name=adult,proto3,enum=google.cloud.vision.v1p2beta1.Likelihood" json:"adult,omitempty"` + // Spoof likelihood. The likelihood that an modification + // was made to the image's canonical version to make it appear + // funny or offensive. + Spoof Likelihood `protobuf:"varint,2,opt,name=spoof,proto3,enum=google.cloud.vision.v1p2beta1.Likelihood" json:"spoof,omitempty"` + // Likelihood that this is a medical image. + Medical Likelihood `protobuf:"varint,3,opt,name=medical,proto3,enum=google.cloud.vision.v1p2beta1.Likelihood" json:"medical,omitempty"` + // Likelihood that this image contains violent content. + Violence Likelihood `protobuf:"varint,4,opt,name=violence,proto3,enum=google.cloud.vision.v1p2beta1.Likelihood" json:"violence,omitempty"` + // Likelihood that the request image contains racy content. Racy content may + // include (but is not limited to) skimpy or sheer clothing, strategically + // covered nudity, lewd or provocative poses, or close-ups of sensitive + // body areas. + Racy Likelihood `protobuf:"varint,9,opt,name=racy,proto3,enum=google.cloud.vision.v1p2beta1.Likelihood" json:"racy,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *SafeSearchAnnotation) Reset() { *m = SafeSearchAnnotation{} } +func (m *SafeSearchAnnotation) String() string { return proto.CompactTextString(m) } +func (*SafeSearchAnnotation) ProtoMessage() {} +func (*SafeSearchAnnotation) Descriptor() ([]byte, []int) { + return fileDescriptor_image_annotator_3f9c4b0d06e54350, []int{7} +} +func (m *SafeSearchAnnotation) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_SafeSearchAnnotation.Unmarshal(m, b) +} +func (m *SafeSearchAnnotation) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_SafeSearchAnnotation.Marshal(b, m, deterministic) +} +func (dst *SafeSearchAnnotation) XXX_Merge(src proto.Message) { + xxx_messageInfo_SafeSearchAnnotation.Merge(dst, src) +} +func (m *SafeSearchAnnotation) XXX_Size() int { + return xxx_messageInfo_SafeSearchAnnotation.Size(m) +} +func (m *SafeSearchAnnotation) XXX_DiscardUnknown() { + xxx_messageInfo_SafeSearchAnnotation.DiscardUnknown(m) +} + +var xxx_messageInfo_SafeSearchAnnotation proto.InternalMessageInfo + +func (m *SafeSearchAnnotation) GetAdult() Likelihood { + if m != nil { + return m.Adult + } + return Likelihood_UNKNOWN +} + +func (m *SafeSearchAnnotation) GetSpoof() Likelihood { + if m != nil { + return m.Spoof + } + return Likelihood_UNKNOWN +} + +func (m *SafeSearchAnnotation) GetMedical() Likelihood { + if m != nil { + return m.Medical + } + return Likelihood_UNKNOWN +} + +func (m *SafeSearchAnnotation) GetViolence() Likelihood { + if m != nil { + return m.Violence + } + return Likelihood_UNKNOWN +} + +func (m *SafeSearchAnnotation) GetRacy() Likelihood { + if m != nil { + return m.Racy + } + return Likelihood_UNKNOWN +} + +// Rectangle determined by min and max `LatLng` pairs. +type LatLongRect struct { + // Min lat/long pair. + MinLatLng *latlng.LatLng `protobuf:"bytes,1,opt,name=min_lat_lng,json=minLatLng,proto3" json:"min_lat_lng,omitempty"` + // Max lat/long pair. + MaxLatLng *latlng.LatLng `protobuf:"bytes,2,opt,name=max_lat_lng,json=maxLatLng,proto3" json:"max_lat_lng,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *LatLongRect) Reset() { *m = LatLongRect{} } +func (m *LatLongRect) String() string { return proto.CompactTextString(m) } +func (*LatLongRect) ProtoMessage() {} +func (*LatLongRect) Descriptor() ([]byte, []int) { + return fileDescriptor_image_annotator_3f9c4b0d06e54350, []int{8} +} +func (m *LatLongRect) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_LatLongRect.Unmarshal(m, b) +} +func (m *LatLongRect) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_LatLongRect.Marshal(b, m, deterministic) +} +func (dst *LatLongRect) XXX_Merge(src proto.Message) { + xxx_messageInfo_LatLongRect.Merge(dst, src) +} +func (m *LatLongRect) XXX_Size() int { + return xxx_messageInfo_LatLongRect.Size(m) +} +func (m *LatLongRect) XXX_DiscardUnknown() { + xxx_messageInfo_LatLongRect.DiscardUnknown(m) +} + +var xxx_messageInfo_LatLongRect proto.InternalMessageInfo + +func (m *LatLongRect) GetMinLatLng() *latlng.LatLng { + if m != nil { + return m.MinLatLng + } + return nil +} + +func (m *LatLongRect) GetMaxLatLng() *latlng.LatLng { + if m != nil { + return m.MaxLatLng + } + return nil +} + +// Color information consists of RGB channels, score, and the fraction of +// the image that the color occupies in the image. +type ColorInfo struct { + // RGB components of the color. + Color *color.Color `protobuf:"bytes,1,opt,name=color,proto3" json:"color,omitempty"` + // Image-specific score for this color. Value in range [0, 1]. + Score float32 `protobuf:"fixed32,2,opt,name=score,proto3" json:"score,omitempty"` + // The fraction of pixels the color occupies in the image. + // Value in range [0, 1]. + PixelFraction float32 `protobuf:"fixed32,3,opt,name=pixel_fraction,json=pixelFraction,proto3" json:"pixel_fraction,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ColorInfo) Reset() { *m = ColorInfo{} } +func (m *ColorInfo) String() string { return proto.CompactTextString(m) } +func (*ColorInfo) ProtoMessage() {} +func (*ColorInfo) Descriptor() ([]byte, []int) { + return fileDescriptor_image_annotator_3f9c4b0d06e54350, []int{9} +} +func (m *ColorInfo) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ColorInfo.Unmarshal(m, b) +} +func (m *ColorInfo) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ColorInfo.Marshal(b, m, deterministic) +} +func (dst *ColorInfo) XXX_Merge(src proto.Message) { + xxx_messageInfo_ColorInfo.Merge(dst, src) +} +func (m *ColorInfo) XXX_Size() int { + return xxx_messageInfo_ColorInfo.Size(m) +} +func (m *ColorInfo) XXX_DiscardUnknown() { + xxx_messageInfo_ColorInfo.DiscardUnknown(m) +} + +var xxx_messageInfo_ColorInfo proto.InternalMessageInfo + +func (m *ColorInfo) GetColor() *color.Color { + if m != nil { + return m.Color + } + return nil +} + +func (m *ColorInfo) GetScore() float32 { + if m != nil { + return m.Score + } + return 0 +} + +func (m *ColorInfo) GetPixelFraction() float32 { + if m != nil { + return m.PixelFraction + } + return 0 +} + +// Set of dominant colors and their corresponding scores. +type DominantColorsAnnotation struct { + // RGB color values with their score and pixel fraction. + Colors []*ColorInfo `protobuf:"bytes,1,rep,name=colors,proto3" json:"colors,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *DominantColorsAnnotation) Reset() { *m = DominantColorsAnnotation{} } +func (m *DominantColorsAnnotation) String() string { return proto.CompactTextString(m) } +func (*DominantColorsAnnotation) ProtoMessage() {} +func (*DominantColorsAnnotation) Descriptor() ([]byte, []int) { + return fileDescriptor_image_annotator_3f9c4b0d06e54350, []int{10} +} +func (m *DominantColorsAnnotation) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_DominantColorsAnnotation.Unmarshal(m, b) +} +func (m *DominantColorsAnnotation) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_DominantColorsAnnotation.Marshal(b, m, deterministic) +} +func (dst *DominantColorsAnnotation) XXX_Merge(src proto.Message) { + xxx_messageInfo_DominantColorsAnnotation.Merge(dst, src) +} +func (m *DominantColorsAnnotation) XXX_Size() int { + return xxx_messageInfo_DominantColorsAnnotation.Size(m) +} +func (m *DominantColorsAnnotation) XXX_DiscardUnknown() { + xxx_messageInfo_DominantColorsAnnotation.DiscardUnknown(m) +} + +var xxx_messageInfo_DominantColorsAnnotation proto.InternalMessageInfo + +func (m *DominantColorsAnnotation) GetColors() []*ColorInfo { + if m != nil { + return m.Colors + } + return nil +} + +// Stores image properties, such as dominant colors. +type ImageProperties struct { + // If present, dominant colors completed successfully. + DominantColors *DominantColorsAnnotation `protobuf:"bytes,1,opt,name=dominant_colors,json=dominantColors,proto3" json:"dominant_colors,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ImageProperties) Reset() { *m = ImageProperties{} } +func (m *ImageProperties) String() string { return proto.CompactTextString(m) } +func (*ImageProperties) ProtoMessage() {} +func (*ImageProperties) Descriptor() ([]byte, []int) { + return fileDescriptor_image_annotator_3f9c4b0d06e54350, []int{11} +} +func (m *ImageProperties) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ImageProperties.Unmarshal(m, b) +} +func (m *ImageProperties) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ImageProperties.Marshal(b, m, deterministic) +} +func (dst *ImageProperties) XXX_Merge(src proto.Message) { + xxx_messageInfo_ImageProperties.Merge(dst, src) +} +func (m *ImageProperties) XXX_Size() int { + return xxx_messageInfo_ImageProperties.Size(m) +} +func (m *ImageProperties) XXX_DiscardUnknown() { + xxx_messageInfo_ImageProperties.DiscardUnknown(m) +} + +var xxx_messageInfo_ImageProperties proto.InternalMessageInfo + +func (m *ImageProperties) GetDominantColors() *DominantColorsAnnotation { + if m != nil { + return m.DominantColors + } + return nil +} + +// Single crop hint that is used to generate a new crop when serving an image. +type CropHint struct { + // The bounding polygon for the crop region. The coordinates of the bounding + // box are in the original image's scale, as returned in `ImageParams`. + BoundingPoly *BoundingPoly `protobuf:"bytes,1,opt,name=bounding_poly,json=boundingPoly,proto3" json:"bounding_poly,omitempty"` + // Confidence of this being a salient region. Range [0, 1]. + Confidence float32 `protobuf:"fixed32,2,opt,name=confidence,proto3" json:"confidence,omitempty"` + // Fraction of importance of this salient region with respect to the original + // image. + ImportanceFraction float32 `protobuf:"fixed32,3,opt,name=importance_fraction,json=importanceFraction,proto3" json:"importance_fraction,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *CropHint) Reset() { *m = CropHint{} } +func (m *CropHint) String() string { return proto.CompactTextString(m) } +func (*CropHint) ProtoMessage() {} +func (*CropHint) Descriptor() ([]byte, []int) { + return fileDescriptor_image_annotator_3f9c4b0d06e54350, []int{12} +} +func (m *CropHint) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_CropHint.Unmarshal(m, b) +} +func (m *CropHint) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_CropHint.Marshal(b, m, deterministic) +} +func (dst *CropHint) XXX_Merge(src proto.Message) { + xxx_messageInfo_CropHint.Merge(dst, src) +} +func (m *CropHint) XXX_Size() int { + return xxx_messageInfo_CropHint.Size(m) +} +func (m *CropHint) XXX_DiscardUnknown() { + xxx_messageInfo_CropHint.DiscardUnknown(m) +} + +var xxx_messageInfo_CropHint proto.InternalMessageInfo + +func (m *CropHint) GetBoundingPoly() *BoundingPoly { + if m != nil { + return m.BoundingPoly + } + return nil +} + +func (m *CropHint) GetConfidence() float32 { + if m != nil { + return m.Confidence + } + return 0 +} + +func (m *CropHint) GetImportanceFraction() float32 { + if m != nil { + return m.ImportanceFraction + } + return 0 +} + +// Set of crop hints that are used to generate new crops when serving images. +type CropHintsAnnotation struct { + // Crop hint results. + CropHints []*CropHint `protobuf:"bytes,1,rep,name=crop_hints,json=cropHints,proto3" json:"crop_hints,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *CropHintsAnnotation) Reset() { *m = CropHintsAnnotation{} } +func (m *CropHintsAnnotation) String() string { return proto.CompactTextString(m) } +func (*CropHintsAnnotation) ProtoMessage() {} +func (*CropHintsAnnotation) Descriptor() ([]byte, []int) { + return fileDescriptor_image_annotator_3f9c4b0d06e54350, []int{13} +} +func (m *CropHintsAnnotation) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_CropHintsAnnotation.Unmarshal(m, b) +} +func (m *CropHintsAnnotation) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_CropHintsAnnotation.Marshal(b, m, deterministic) +} +func (dst *CropHintsAnnotation) XXX_Merge(src proto.Message) { + xxx_messageInfo_CropHintsAnnotation.Merge(dst, src) +} +func (m *CropHintsAnnotation) XXX_Size() int { + return xxx_messageInfo_CropHintsAnnotation.Size(m) +} +func (m *CropHintsAnnotation) XXX_DiscardUnknown() { + xxx_messageInfo_CropHintsAnnotation.DiscardUnknown(m) +} + +var xxx_messageInfo_CropHintsAnnotation proto.InternalMessageInfo + +func (m *CropHintsAnnotation) GetCropHints() []*CropHint { + if m != nil { + return m.CropHints + } + return nil +} + +// Parameters for crop hints annotation request. +type CropHintsParams struct { + // Aspect ratios in floats, representing the ratio of the width to the height + // of the image. For example, if the desired aspect ratio is 4/3, the + // corresponding float value should be 1.33333. If not specified, the + // best possible crop is returned. The number of provided aspect ratios is + // limited to a maximum of 16; any aspect ratios provided after the 16th are + // ignored. + AspectRatios []float32 `protobuf:"fixed32,1,rep,packed,name=aspect_ratios,json=aspectRatios,proto3" json:"aspect_ratios,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *CropHintsParams) Reset() { *m = CropHintsParams{} } +func (m *CropHintsParams) String() string { return proto.CompactTextString(m) } +func (*CropHintsParams) ProtoMessage() {} +func (*CropHintsParams) Descriptor() ([]byte, []int) { + return fileDescriptor_image_annotator_3f9c4b0d06e54350, []int{14} +} +func (m *CropHintsParams) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_CropHintsParams.Unmarshal(m, b) +} +func (m *CropHintsParams) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_CropHintsParams.Marshal(b, m, deterministic) +} +func (dst *CropHintsParams) XXX_Merge(src proto.Message) { + xxx_messageInfo_CropHintsParams.Merge(dst, src) +} +func (m *CropHintsParams) XXX_Size() int { + return xxx_messageInfo_CropHintsParams.Size(m) +} +func (m *CropHintsParams) XXX_DiscardUnknown() { + xxx_messageInfo_CropHintsParams.DiscardUnknown(m) +} + +var xxx_messageInfo_CropHintsParams proto.InternalMessageInfo + +func (m *CropHintsParams) GetAspectRatios() []float32 { + if m != nil { + return m.AspectRatios + } + return nil +} + +// Parameters for web detection request. +type WebDetectionParams struct { + // Whether to include results derived from the geo information in the image. + IncludeGeoResults bool `protobuf:"varint,2,opt,name=include_geo_results,json=includeGeoResults,proto3" json:"include_geo_results,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *WebDetectionParams) Reset() { *m = WebDetectionParams{} } +func (m *WebDetectionParams) String() string { return proto.CompactTextString(m) } +func (*WebDetectionParams) ProtoMessage() {} +func (*WebDetectionParams) Descriptor() ([]byte, []int) { + return fileDescriptor_image_annotator_3f9c4b0d06e54350, []int{15} +} +func (m *WebDetectionParams) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_WebDetectionParams.Unmarshal(m, b) +} +func (m *WebDetectionParams) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_WebDetectionParams.Marshal(b, m, deterministic) +} +func (dst *WebDetectionParams) XXX_Merge(src proto.Message) { + xxx_messageInfo_WebDetectionParams.Merge(dst, src) +} +func (m *WebDetectionParams) XXX_Size() int { + return xxx_messageInfo_WebDetectionParams.Size(m) +} +func (m *WebDetectionParams) XXX_DiscardUnknown() { + xxx_messageInfo_WebDetectionParams.DiscardUnknown(m) +} + +var xxx_messageInfo_WebDetectionParams proto.InternalMessageInfo + +func (m *WebDetectionParams) GetIncludeGeoResults() bool { + if m != nil { + return m.IncludeGeoResults + } + return false +} + +// Image context and/or feature-specific parameters. +type ImageContext struct { + // Not used. + LatLongRect *LatLongRect `protobuf:"bytes,1,opt,name=lat_long_rect,json=latLongRect,proto3" json:"lat_long_rect,omitempty"` + // List of languages to use for TEXT_DETECTION. In most cases, an empty value + // yields the best results since it enables automatic language detection. For + // languages based on the Latin alphabet, setting `language_hints` is not + // needed. In rare cases, when the language of the text in the image is known, + // setting a hint will help get better results (although it will be a + // significant hindrance if the hint is wrong). Text detection returns an + // error if one or more of the specified languages is not one of the + // [supported languages](/vision/docs/languages). + LanguageHints []string `protobuf:"bytes,2,rep,name=language_hints,json=languageHints,proto3" json:"language_hints,omitempty"` + // Parameters for crop hints annotation request. + CropHintsParams *CropHintsParams `protobuf:"bytes,4,opt,name=crop_hints_params,json=cropHintsParams,proto3" json:"crop_hints_params,omitempty"` + // Parameters for web detection. + WebDetectionParams *WebDetectionParams `protobuf:"bytes,6,opt,name=web_detection_params,json=webDetectionParams,proto3" json:"web_detection_params,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ImageContext) Reset() { *m = ImageContext{} } +func (m *ImageContext) String() string { return proto.CompactTextString(m) } +func (*ImageContext) ProtoMessage() {} +func (*ImageContext) Descriptor() ([]byte, []int) { + return fileDescriptor_image_annotator_3f9c4b0d06e54350, []int{16} +} +func (m *ImageContext) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ImageContext.Unmarshal(m, b) +} +func (m *ImageContext) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ImageContext.Marshal(b, m, deterministic) +} +func (dst *ImageContext) XXX_Merge(src proto.Message) { + xxx_messageInfo_ImageContext.Merge(dst, src) +} +func (m *ImageContext) XXX_Size() int { + return xxx_messageInfo_ImageContext.Size(m) +} +func (m *ImageContext) XXX_DiscardUnknown() { + xxx_messageInfo_ImageContext.DiscardUnknown(m) +} + +var xxx_messageInfo_ImageContext proto.InternalMessageInfo + +func (m *ImageContext) GetLatLongRect() *LatLongRect { + if m != nil { + return m.LatLongRect + } + return nil +} + +func (m *ImageContext) GetLanguageHints() []string { + if m != nil { + return m.LanguageHints + } + return nil +} + +func (m *ImageContext) GetCropHintsParams() *CropHintsParams { + if m != nil { + return m.CropHintsParams + } + return nil +} + +func (m *ImageContext) GetWebDetectionParams() *WebDetectionParams { + if m != nil { + return m.WebDetectionParams + } + return nil +} + +// Request for performing Google Cloud Vision API tasks over a user-provided +// image, with user-requested features. +type AnnotateImageRequest struct { + // The image to be processed. + Image *Image `protobuf:"bytes,1,opt,name=image,proto3" json:"image,omitempty"` + // Requested features. + Features []*Feature `protobuf:"bytes,2,rep,name=features,proto3" json:"features,omitempty"` + // Additional context that may accompany the image. + ImageContext *ImageContext `protobuf:"bytes,3,opt,name=image_context,json=imageContext,proto3" json:"image_context,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *AnnotateImageRequest) Reset() { *m = AnnotateImageRequest{} } +func (m *AnnotateImageRequest) String() string { return proto.CompactTextString(m) } +func (*AnnotateImageRequest) ProtoMessage() {} +func (*AnnotateImageRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_image_annotator_3f9c4b0d06e54350, []int{17} +} +func (m *AnnotateImageRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_AnnotateImageRequest.Unmarshal(m, b) +} +func (m *AnnotateImageRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_AnnotateImageRequest.Marshal(b, m, deterministic) +} +func (dst *AnnotateImageRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_AnnotateImageRequest.Merge(dst, src) +} +func (m *AnnotateImageRequest) XXX_Size() int { + return xxx_messageInfo_AnnotateImageRequest.Size(m) +} +func (m *AnnotateImageRequest) XXX_DiscardUnknown() { + xxx_messageInfo_AnnotateImageRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_AnnotateImageRequest proto.InternalMessageInfo + +func (m *AnnotateImageRequest) GetImage() *Image { + if m != nil { + return m.Image + } + return nil +} + +func (m *AnnotateImageRequest) GetFeatures() []*Feature { + if m != nil { + return m.Features + } + return nil +} + +func (m *AnnotateImageRequest) GetImageContext() *ImageContext { + if m != nil { + return m.ImageContext + } + return nil +} + +// If an image was produced from a file (e.g. a PDF), this message gives +// information about the source of that image. +type ImageAnnotationContext struct { + // The URI of the file used to produce the image. + Uri string `protobuf:"bytes,1,opt,name=uri,proto3" json:"uri,omitempty"` + // If the file was a PDF or TIFF, this field gives the page number within + // the file used to produce the image. + PageNumber int32 `protobuf:"varint,2,opt,name=page_number,json=pageNumber,proto3" json:"page_number,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ImageAnnotationContext) Reset() { *m = ImageAnnotationContext{} } +func (m *ImageAnnotationContext) String() string { return proto.CompactTextString(m) } +func (*ImageAnnotationContext) ProtoMessage() {} +func (*ImageAnnotationContext) Descriptor() ([]byte, []int) { + return fileDescriptor_image_annotator_3f9c4b0d06e54350, []int{18} +} +func (m *ImageAnnotationContext) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ImageAnnotationContext.Unmarshal(m, b) +} +func (m *ImageAnnotationContext) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ImageAnnotationContext.Marshal(b, m, deterministic) +} +func (dst *ImageAnnotationContext) XXX_Merge(src proto.Message) { + xxx_messageInfo_ImageAnnotationContext.Merge(dst, src) +} +func (m *ImageAnnotationContext) XXX_Size() int { + return xxx_messageInfo_ImageAnnotationContext.Size(m) +} +func (m *ImageAnnotationContext) XXX_DiscardUnknown() { + xxx_messageInfo_ImageAnnotationContext.DiscardUnknown(m) +} + +var xxx_messageInfo_ImageAnnotationContext proto.InternalMessageInfo + +func (m *ImageAnnotationContext) GetUri() string { + if m != nil { + return m.Uri + } + return "" +} + +func (m *ImageAnnotationContext) GetPageNumber() int32 { + if m != nil { + return m.PageNumber + } + return 0 +} + +// Response to an image annotation request. +type AnnotateImageResponse struct { + // If present, face detection has completed successfully. + FaceAnnotations []*FaceAnnotation `protobuf:"bytes,1,rep,name=face_annotations,json=faceAnnotations,proto3" json:"face_annotations,omitempty"` + // If present, landmark detection has completed successfully. + LandmarkAnnotations []*EntityAnnotation `protobuf:"bytes,2,rep,name=landmark_annotations,json=landmarkAnnotations,proto3" json:"landmark_annotations,omitempty"` + // If present, logo detection has completed successfully. + LogoAnnotations []*EntityAnnotation `protobuf:"bytes,3,rep,name=logo_annotations,json=logoAnnotations,proto3" json:"logo_annotations,omitempty"` + // If present, label detection has completed successfully. + LabelAnnotations []*EntityAnnotation `protobuf:"bytes,4,rep,name=label_annotations,json=labelAnnotations,proto3" json:"label_annotations,omitempty"` + // If present, text (OCR) detection has completed successfully. + TextAnnotations []*EntityAnnotation `protobuf:"bytes,5,rep,name=text_annotations,json=textAnnotations,proto3" json:"text_annotations,omitempty"` + // If present, text (OCR) detection or document (OCR) text detection has + // completed successfully. + // This annotation provides the structural hierarchy for the OCR detected + // text. + FullTextAnnotation *TextAnnotation `protobuf:"bytes,12,opt,name=full_text_annotation,json=fullTextAnnotation,proto3" json:"full_text_annotation,omitempty"` + // If present, safe-search annotation has completed successfully. + SafeSearchAnnotation *SafeSearchAnnotation `protobuf:"bytes,6,opt,name=safe_search_annotation,json=safeSearchAnnotation,proto3" json:"safe_search_annotation,omitempty"` + // If present, image properties were extracted successfully. + ImagePropertiesAnnotation *ImageProperties `protobuf:"bytes,8,opt,name=image_properties_annotation,json=imagePropertiesAnnotation,proto3" json:"image_properties_annotation,omitempty"` + // If present, crop hints have completed successfully. + CropHintsAnnotation *CropHintsAnnotation `protobuf:"bytes,11,opt,name=crop_hints_annotation,json=cropHintsAnnotation,proto3" json:"crop_hints_annotation,omitempty"` + // If present, web detection has completed successfully. + WebDetection *WebDetection `protobuf:"bytes,13,opt,name=web_detection,json=webDetection,proto3" json:"web_detection,omitempty"` + // If set, represents the error message for the operation. + // Note that filled-in image annotations are guaranteed to be + // correct, even when `error` is set. + Error *status.Status `protobuf:"bytes,9,opt,name=error,proto3" json:"error,omitempty"` + // If present, contextual information is needed to understand where this image + // comes from. + Context *ImageAnnotationContext `protobuf:"bytes,21,opt,name=context,proto3" json:"context,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *AnnotateImageResponse) Reset() { *m = AnnotateImageResponse{} } +func (m *AnnotateImageResponse) String() string { return proto.CompactTextString(m) } +func (*AnnotateImageResponse) ProtoMessage() {} +func (*AnnotateImageResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_image_annotator_3f9c4b0d06e54350, []int{19} +} +func (m *AnnotateImageResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_AnnotateImageResponse.Unmarshal(m, b) +} +func (m *AnnotateImageResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_AnnotateImageResponse.Marshal(b, m, deterministic) +} +func (dst *AnnotateImageResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_AnnotateImageResponse.Merge(dst, src) +} +func (m *AnnotateImageResponse) XXX_Size() int { + return xxx_messageInfo_AnnotateImageResponse.Size(m) +} +func (m *AnnotateImageResponse) XXX_DiscardUnknown() { + xxx_messageInfo_AnnotateImageResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_AnnotateImageResponse proto.InternalMessageInfo + +func (m *AnnotateImageResponse) GetFaceAnnotations() []*FaceAnnotation { + if m != nil { + return m.FaceAnnotations + } + return nil +} + +func (m *AnnotateImageResponse) GetLandmarkAnnotations() []*EntityAnnotation { + if m != nil { + return m.LandmarkAnnotations + } + return nil +} + +func (m *AnnotateImageResponse) GetLogoAnnotations() []*EntityAnnotation { + if m != nil { + return m.LogoAnnotations + } + return nil +} + +func (m *AnnotateImageResponse) GetLabelAnnotations() []*EntityAnnotation { + if m != nil { + return m.LabelAnnotations + } + return nil +} + +func (m *AnnotateImageResponse) GetTextAnnotations() []*EntityAnnotation { + if m != nil { + return m.TextAnnotations + } + return nil +} + +func (m *AnnotateImageResponse) GetFullTextAnnotation() *TextAnnotation { + if m != nil { + return m.FullTextAnnotation + } + return nil +} + +func (m *AnnotateImageResponse) GetSafeSearchAnnotation() *SafeSearchAnnotation { + if m != nil { + return m.SafeSearchAnnotation + } + return nil +} + +func (m *AnnotateImageResponse) GetImagePropertiesAnnotation() *ImageProperties { + if m != nil { + return m.ImagePropertiesAnnotation + } + return nil +} + +func (m *AnnotateImageResponse) GetCropHintsAnnotation() *CropHintsAnnotation { + if m != nil { + return m.CropHintsAnnotation + } + return nil +} + +func (m *AnnotateImageResponse) GetWebDetection() *WebDetection { + if m != nil { + return m.WebDetection + } + return nil +} + +func (m *AnnotateImageResponse) GetError() *status.Status { + if m != nil { + return m.Error + } + return nil +} + +func (m *AnnotateImageResponse) GetContext() *ImageAnnotationContext { + if m != nil { + return m.Context + } + return nil +} + +// Response to a single file annotation request. A file may contain one or more +// images, which individually have their own responses. +type AnnotateFileResponse struct { + // Information about the file for which this response is generated. + InputConfig *InputConfig `protobuf:"bytes,1,opt,name=input_config,json=inputConfig,proto3" json:"input_config,omitempty"` + // Individual responses to images found within the file. + Responses []*AnnotateImageResponse `protobuf:"bytes,2,rep,name=responses,proto3" json:"responses,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *AnnotateFileResponse) Reset() { *m = AnnotateFileResponse{} } +func (m *AnnotateFileResponse) String() string { return proto.CompactTextString(m) } +func (*AnnotateFileResponse) ProtoMessage() {} +func (*AnnotateFileResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_image_annotator_3f9c4b0d06e54350, []int{20} +} +func (m *AnnotateFileResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_AnnotateFileResponse.Unmarshal(m, b) +} +func (m *AnnotateFileResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_AnnotateFileResponse.Marshal(b, m, deterministic) +} +func (dst *AnnotateFileResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_AnnotateFileResponse.Merge(dst, src) +} +func (m *AnnotateFileResponse) XXX_Size() int { + return xxx_messageInfo_AnnotateFileResponse.Size(m) +} +func (m *AnnotateFileResponse) XXX_DiscardUnknown() { + xxx_messageInfo_AnnotateFileResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_AnnotateFileResponse proto.InternalMessageInfo + +func (m *AnnotateFileResponse) GetInputConfig() *InputConfig { + if m != nil { + return m.InputConfig + } + return nil +} + +func (m *AnnotateFileResponse) GetResponses() []*AnnotateImageResponse { + if m != nil { + return m.Responses + } + return nil +} + +// Multiple image annotation requests are batched into a single service call. +type BatchAnnotateImagesRequest struct { + // Individual image annotation requests for this batch. + Requests []*AnnotateImageRequest `protobuf:"bytes,1,rep,name=requests,proto3" json:"requests,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *BatchAnnotateImagesRequest) Reset() { *m = BatchAnnotateImagesRequest{} } +func (m *BatchAnnotateImagesRequest) String() string { return proto.CompactTextString(m) } +func (*BatchAnnotateImagesRequest) ProtoMessage() {} +func (*BatchAnnotateImagesRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_image_annotator_3f9c4b0d06e54350, []int{21} +} +func (m *BatchAnnotateImagesRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_BatchAnnotateImagesRequest.Unmarshal(m, b) +} +func (m *BatchAnnotateImagesRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_BatchAnnotateImagesRequest.Marshal(b, m, deterministic) +} +func (dst *BatchAnnotateImagesRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_BatchAnnotateImagesRequest.Merge(dst, src) +} +func (m *BatchAnnotateImagesRequest) XXX_Size() int { + return xxx_messageInfo_BatchAnnotateImagesRequest.Size(m) +} +func (m *BatchAnnotateImagesRequest) XXX_DiscardUnknown() { + xxx_messageInfo_BatchAnnotateImagesRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_BatchAnnotateImagesRequest proto.InternalMessageInfo + +func (m *BatchAnnotateImagesRequest) GetRequests() []*AnnotateImageRequest { + if m != nil { + return m.Requests + } + return nil +} + +// Response to a batch image annotation request. +type BatchAnnotateImagesResponse struct { + // Individual responses to image annotation requests within the batch. + Responses []*AnnotateImageResponse `protobuf:"bytes,1,rep,name=responses,proto3" json:"responses,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *BatchAnnotateImagesResponse) Reset() { *m = BatchAnnotateImagesResponse{} } +func (m *BatchAnnotateImagesResponse) String() string { return proto.CompactTextString(m) } +func (*BatchAnnotateImagesResponse) ProtoMessage() {} +func (*BatchAnnotateImagesResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_image_annotator_3f9c4b0d06e54350, []int{22} +} +func (m *BatchAnnotateImagesResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_BatchAnnotateImagesResponse.Unmarshal(m, b) +} +func (m *BatchAnnotateImagesResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_BatchAnnotateImagesResponse.Marshal(b, m, deterministic) +} +func (dst *BatchAnnotateImagesResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_BatchAnnotateImagesResponse.Merge(dst, src) +} +func (m *BatchAnnotateImagesResponse) XXX_Size() int { + return xxx_messageInfo_BatchAnnotateImagesResponse.Size(m) +} +func (m *BatchAnnotateImagesResponse) XXX_DiscardUnknown() { + xxx_messageInfo_BatchAnnotateImagesResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_BatchAnnotateImagesResponse proto.InternalMessageInfo + +func (m *BatchAnnotateImagesResponse) GetResponses() []*AnnotateImageResponse { + if m != nil { + return m.Responses + } + return nil +} + +// An offline file annotation request. +type AsyncAnnotateFileRequest struct { + // Required. Information about the input file. + InputConfig *InputConfig `protobuf:"bytes,1,opt,name=input_config,json=inputConfig,proto3" json:"input_config,omitempty"` + // Required. Requested features. + Features []*Feature `protobuf:"bytes,2,rep,name=features,proto3" json:"features,omitempty"` + // Additional context that may accompany the image(s) in the file. + ImageContext *ImageContext `protobuf:"bytes,3,opt,name=image_context,json=imageContext,proto3" json:"image_context,omitempty"` + // Required. The desired output location and metadata (e.g. format). + OutputConfig *OutputConfig `protobuf:"bytes,4,opt,name=output_config,json=outputConfig,proto3" json:"output_config,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *AsyncAnnotateFileRequest) Reset() { *m = AsyncAnnotateFileRequest{} } +func (m *AsyncAnnotateFileRequest) String() string { return proto.CompactTextString(m) } +func (*AsyncAnnotateFileRequest) ProtoMessage() {} +func (*AsyncAnnotateFileRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_image_annotator_3f9c4b0d06e54350, []int{23} +} +func (m *AsyncAnnotateFileRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_AsyncAnnotateFileRequest.Unmarshal(m, b) +} +func (m *AsyncAnnotateFileRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_AsyncAnnotateFileRequest.Marshal(b, m, deterministic) +} +func (dst *AsyncAnnotateFileRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_AsyncAnnotateFileRequest.Merge(dst, src) +} +func (m *AsyncAnnotateFileRequest) XXX_Size() int { + return xxx_messageInfo_AsyncAnnotateFileRequest.Size(m) +} +func (m *AsyncAnnotateFileRequest) XXX_DiscardUnknown() { + xxx_messageInfo_AsyncAnnotateFileRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_AsyncAnnotateFileRequest proto.InternalMessageInfo + +func (m *AsyncAnnotateFileRequest) GetInputConfig() *InputConfig { + if m != nil { + return m.InputConfig + } + return nil +} + +func (m *AsyncAnnotateFileRequest) GetFeatures() []*Feature { + if m != nil { + return m.Features + } + return nil +} + +func (m *AsyncAnnotateFileRequest) GetImageContext() *ImageContext { + if m != nil { + return m.ImageContext + } + return nil +} + +func (m *AsyncAnnotateFileRequest) GetOutputConfig() *OutputConfig { + if m != nil { + return m.OutputConfig + } + return nil +} + +// The response for a single offline file annotation request. +type AsyncAnnotateFileResponse struct { + // The output location and metadata from AsyncAnnotateFileRequest. + OutputConfig *OutputConfig `protobuf:"bytes,1,opt,name=output_config,json=outputConfig,proto3" json:"output_config,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *AsyncAnnotateFileResponse) Reset() { *m = AsyncAnnotateFileResponse{} } +func (m *AsyncAnnotateFileResponse) String() string { return proto.CompactTextString(m) } +func (*AsyncAnnotateFileResponse) ProtoMessage() {} +func (*AsyncAnnotateFileResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_image_annotator_3f9c4b0d06e54350, []int{24} +} +func (m *AsyncAnnotateFileResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_AsyncAnnotateFileResponse.Unmarshal(m, b) +} +func (m *AsyncAnnotateFileResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_AsyncAnnotateFileResponse.Marshal(b, m, deterministic) +} +func (dst *AsyncAnnotateFileResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_AsyncAnnotateFileResponse.Merge(dst, src) +} +func (m *AsyncAnnotateFileResponse) XXX_Size() int { + return xxx_messageInfo_AsyncAnnotateFileResponse.Size(m) +} +func (m *AsyncAnnotateFileResponse) XXX_DiscardUnknown() { + xxx_messageInfo_AsyncAnnotateFileResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_AsyncAnnotateFileResponse proto.InternalMessageInfo + +func (m *AsyncAnnotateFileResponse) GetOutputConfig() *OutputConfig { + if m != nil { + return m.OutputConfig + } + return nil +} + +// Multiple async file annotation requests are batched into a single service +// call. +type AsyncBatchAnnotateFilesRequest struct { + // Individual async file annotation requests for this batch. + Requests []*AsyncAnnotateFileRequest `protobuf:"bytes,1,rep,name=requests,proto3" json:"requests,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *AsyncBatchAnnotateFilesRequest) Reset() { *m = AsyncBatchAnnotateFilesRequest{} } +func (m *AsyncBatchAnnotateFilesRequest) String() string { return proto.CompactTextString(m) } +func (*AsyncBatchAnnotateFilesRequest) ProtoMessage() {} +func (*AsyncBatchAnnotateFilesRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_image_annotator_3f9c4b0d06e54350, []int{25} +} +func (m *AsyncBatchAnnotateFilesRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_AsyncBatchAnnotateFilesRequest.Unmarshal(m, b) +} +func (m *AsyncBatchAnnotateFilesRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_AsyncBatchAnnotateFilesRequest.Marshal(b, m, deterministic) +} +func (dst *AsyncBatchAnnotateFilesRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_AsyncBatchAnnotateFilesRequest.Merge(dst, src) +} +func (m *AsyncBatchAnnotateFilesRequest) XXX_Size() int { + return xxx_messageInfo_AsyncBatchAnnotateFilesRequest.Size(m) +} +func (m *AsyncBatchAnnotateFilesRequest) XXX_DiscardUnknown() { + xxx_messageInfo_AsyncBatchAnnotateFilesRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_AsyncBatchAnnotateFilesRequest proto.InternalMessageInfo + +func (m *AsyncBatchAnnotateFilesRequest) GetRequests() []*AsyncAnnotateFileRequest { + if m != nil { + return m.Requests + } + return nil +} + +// Response to an async batch file annotation request. +type AsyncBatchAnnotateFilesResponse struct { + // The list of file annotation responses, one for each request in + // AsyncBatchAnnotateFilesRequest. + Responses []*AsyncAnnotateFileResponse `protobuf:"bytes,1,rep,name=responses,proto3" json:"responses,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *AsyncBatchAnnotateFilesResponse) Reset() { *m = AsyncBatchAnnotateFilesResponse{} } +func (m *AsyncBatchAnnotateFilesResponse) String() string { return proto.CompactTextString(m) } +func (*AsyncBatchAnnotateFilesResponse) ProtoMessage() {} +func (*AsyncBatchAnnotateFilesResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_image_annotator_3f9c4b0d06e54350, []int{26} +} +func (m *AsyncBatchAnnotateFilesResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_AsyncBatchAnnotateFilesResponse.Unmarshal(m, b) +} +func (m *AsyncBatchAnnotateFilesResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_AsyncBatchAnnotateFilesResponse.Marshal(b, m, deterministic) +} +func (dst *AsyncBatchAnnotateFilesResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_AsyncBatchAnnotateFilesResponse.Merge(dst, src) +} +func (m *AsyncBatchAnnotateFilesResponse) XXX_Size() int { + return xxx_messageInfo_AsyncBatchAnnotateFilesResponse.Size(m) +} +func (m *AsyncBatchAnnotateFilesResponse) XXX_DiscardUnknown() { + xxx_messageInfo_AsyncBatchAnnotateFilesResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_AsyncBatchAnnotateFilesResponse proto.InternalMessageInfo + +func (m *AsyncBatchAnnotateFilesResponse) GetResponses() []*AsyncAnnotateFileResponse { + if m != nil { + return m.Responses + } + return nil +} + +// The desired input location and metadata. +type InputConfig struct { + // The Google Cloud Storage location to read the input from. + GcsSource *GcsSource `protobuf:"bytes,1,opt,name=gcs_source,json=gcsSource,proto3" json:"gcs_source,omitempty"` + // The type of the file. Currently only "application/pdf" and "image/tiff" + // are supported. Wildcards are not supported. + MimeType string `protobuf:"bytes,2,opt,name=mime_type,json=mimeType,proto3" json:"mime_type,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *InputConfig) Reset() { *m = InputConfig{} } +func (m *InputConfig) String() string { return proto.CompactTextString(m) } +func (*InputConfig) ProtoMessage() {} +func (*InputConfig) Descriptor() ([]byte, []int) { + return fileDescriptor_image_annotator_3f9c4b0d06e54350, []int{27} +} +func (m *InputConfig) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_InputConfig.Unmarshal(m, b) +} +func (m *InputConfig) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_InputConfig.Marshal(b, m, deterministic) +} +func (dst *InputConfig) XXX_Merge(src proto.Message) { + xxx_messageInfo_InputConfig.Merge(dst, src) +} +func (m *InputConfig) XXX_Size() int { + return xxx_messageInfo_InputConfig.Size(m) +} +func (m *InputConfig) XXX_DiscardUnknown() { + xxx_messageInfo_InputConfig.DiscardUnknown(m) +} + +var xxx_messageInfo_InputConfig proto.InternalMessageInfo + +func (m *InputConfig) GetGcsSource() *GcsSource { + if m != nil { + return m.GcsSource + } + return nil +} + +func (m *InputConfig) GetMimeType() string { + if m != nil { + return m.MimeType + } + return "" +} + +// The desired output location and metadata. +type OutputConfig struct { + // The Google Cloud Storage location to write the output(s) to. + GcsDestination *GcsDestination `protobuf:"bytes,1,opt,name=gcs_destination,json=gcsDestination,proto3" json:"gcs_destination,omitempty"` + // The max number of response protos to put into each output JSON file on GCS. + // The valid range is [1, 100]. If not specified, the default value is 20. + // + // For example, for one pdf file with 100 pages, 100 response protos will + // be generated. If `batch_size` = 20, then 5 json files each + // containing 20 response protos will be written under the prefix + // `gcs_destination`.`uri`. + // + // Currently, batch_size only applies to GcsDestination, with potential future + // support for other output configurations. + BatchSize int32 `protobuf:"varint,2,opt,name=batch_size,json=batchSize,proto3" json:"batch_size,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *OutputConfig) Reset() { *m = OutputConfig{} } +func (m *OutputConfig) String() string { return proto.CompactTextString(m) } +func (*OutputConfig) ProtoMessage() {} +func (*OutputConfig) Descriptor() ([]byte, []int) { + return fileDescriptor_image_annotator_3f9c4b0d06e54350, []int{28} +} +func (m *OutputConfig) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_OutputConfig.Unmarshal(m, b) +} +func (m *OutputConfig) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_OutputConfig.Marshal(b, m, deterministic) +} +func (dst *OutputConfig) XXX_Merge(src proto.Message) { + xxx_messageInfo_OutputConfig.Merge(dst, src) +} +func (m *OutputConfig) XXX_Size() int { + return xxx_messageInfo_OutputConfig.Size(m) +} +func (m *OutputConfig) XXX_DiscardUnknown() { + xxx_messageInfo_OutputConfig.DiscardUnknown(m) +} + +var xxx_messageInfo_OutputConfig proto.InternalMessageInfo + +func (m *OutputConfig) GetGcsDestination() *GcsDestination { + if m != nil { + return m.GcsDestination + } + return nil +} + +func (m *OutputConfig) GetBatchSize() int32 { + if m != nil { + return m.BatchSize + } + return 0 +} + +// The Google Cloud Storage location where the input will be read from. +type GcsSource struct { + // Google Cloud Storage URI for the input file. This must only be a GCS + // object. Wildcards are not currently supported. + Uri string `protobuf:"bytes,1,opt,name=uri,proto3" json:"uri,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GcsSource) Reset() { *m = GcsSource{} } +func (m *GcsSource) String() string { return proto.CompactTextString(m) } +func (*GcsSource) ProtoMessage() {} +func (*GcsSource) Descriptor() ([]byte, []int) { + return fileDescriptor_image_annotator_3f9c4b0d06e54350, []int{29} +} +func (m *GcsSource) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GcsSource.Unmarshal(m, b) +} +func (m *GcsSource) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GcsSource.Marshal(b, m, deterministic) +} +func (dst *GcsSource) XXX_Merge(src proto.Message) { + xxx_messageInfo_GcsSource.Merge(dst, src) +} +func (m *GcsSource) XXX_Size() int { + return xxx_messageInfo_GcsSource.Size(m) +} +func (m *GcsSource) XXX_DiscardUnknown() { + xxx_messageInfo_GcsSource.DiscardUnknown(m) +} + +var xxx_messageInfo_GcsSource proto.InternalMessageInfo + +func (m *GcsSource) GetUri() string { + if m != nil { + return m.Uri + } + return "" +} + +// The Google Cloud Storage location where the output will be written to. +type GcsDestination struct { + // Google Cloud Storage URI where the results will be stored. Results will + // be in JSON format and preceded by its corresponding input URI. This field + // can either represent a single file, or a prefix for multiple outputs. + // Prefixes must end in a `/`. + // + // Examples: + // + // * File: gs://bucket-name/filename.json + // * Prefix: gs://bucket-name/prefix/here/ + // * File: gs://bucket-name/prefix/here + // + // If multiple outputs, each response is still AnnotateFileResponse, each of + // which contains some subset of the full list of AnnotateImageResponse. + // Multiple outputs can happen if, for example, the output JSON is too large + // and overflows into multiple sharded files. + Uri string `protobuf:"bytes,1,opt,name=uri,proto3" json:"uri,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GcsDestination) Reset() { *m = GcsDestination{} } +func (m *GcsDestination) String() string { return proto.CompactTextString(m) } +func (*GcsDestination) ProtoMessage() {} +func (*GcsDestination) Descriptor() ([]byte, []int) { + return fileDescriptor_image_annotator_3f9c4b0d06e54350, []int{30} +} +func (m *GcsDestination) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GcsDestination.Unmarshal(m, b) +} +func (m *GcsDestination) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GcsDestination.Marshal(b, m, deterministic) +} +func (dst *GcsDestination) XXX_Merge(src proto.Message) { + xxx_messageInfo_GcsDestination.Merge(dst, src) +} +func (m *GcsDestination) XXX_Size() int { + return xxx_messageInfo_GcsDestination.Size(m) +} +func (m *GcsDestination) XXX_DiscardUnknown() { + xxx_messageInfo_GcsDestination.DiscardUnknown(m) +} + +var xxx_messageInfo_GcsDestination proto.InternalMessageInfo + +func (m *GcsDestination) GetUri() string { + if m != nil { + return m.Uri + } + return "" +} + +// Contains metadata for the BatchAnnotateImages operation. +type OperationMetadata struct { + // Current state of the batch operation. + State OperationMetadata_State `protobuf:"varint,1,opt,name=state,proto3,enum=google.cloud.vision.v1p2beta1.OperationMetadata_State" json:"state,omitempty"` + // The time when the batch request was received. + CreateTime *timestamp.Timestamp `protobuf:"bytes,5,opt,name=create_time,json=createTime,proto3" json:"create_time,omitempty"` + // The time when the operation result was last updated. + UpdateTime *timestamp.Timestamp `protobuf:"bytes,6,opt,name=update_time,json=updateTime,proto3" json:"update_time,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *OperationMetadata) Reset() { *m = OperationMetadata{} } +func (m *OperationMetadata) String() string { return proto.CompactTextString(m) } +func (*OperationMetadata) ProtoMessage() {} +func (*OperationMetadata) Descriptor() ([]byte, []int) { + return fileDescriptor_image_annotator_3f9c4b0d06e54350, []int{31} +} +func (m *OperationMetadata) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_OperationMetadata.Unmarshal(m, b) +} +func (m *OperationMetadata) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_OperationMetadata.Marshal(b, m, deterministic) +} +func (dst *OperationMetadata) XXX_Merge(src proto.Message) { + xxx_messageInfo_OperationMetadata.Merge(dst, src) +} +func (m *OperationMetadata) XXX_Size() int { + return xxx_messageInfo_OperationMetadata.Size(m) +} +func (m *OperationMetadata) XXX_DiscardUnknown() { + xxx_messageInfo_OperationMetadata.DiscardUnknown(m) +} + +var xxx_messageInfo_OperationMetadata proto.InternalMessageInfo + +func (m *OperationMetadata) GetState() OperationMetadata_State { + if m != nil { + return m.State + } + return OperationMetadata_STATE_UNSPECIFIED +} + +func (m *OperationMetadata) GetCreateTime() *timestamp.Timestamp { + if m != nil { + return m.CreateTime + } + return nil +} + +func (m *OperationMetadata) GetUpdateTime() *timestamp.Timestamp { + if m != nil { + return m.UpdateTime + } + return nil +} + +func init() { + proto.RegisterType((*Feature)(nil), "google.cloud.vision.v1p2beta1.Feature") + proto.RegisterType((*ImageSource)(nil), "google.cloud.vision.v1p2beta1.ImageSource") + proto.RegisterType((*Image)(nil), "google.cloud.vision.v1p2beta1.Image") + proto.RegisterType((*FaceAnnotation)(nil), "google.cloud.vision.v1p2beta1.FaceAnnotation") + proto.RegisterType((*FaceAnnotation_Landmark)(nil), "google.cloud.vision.v1p2beta1.FaceAnnotation.Landmark") + proto.RegisterType((*LocationInfo)(nil), "google.cloud.vision.v1p2beta1.LocationInfo") + proto.RegisterType((*Property)(nil), "google.cloud.vision.v1p2beta1.Property") + proto.RegisterType((*EntityAnnotation)(nil), "google.cloud.vision.v1p2beta1.EntityAnnotation") + proto.RegisterType((*SafeSearchAnnotation)(nil), "google.cloud.vision.v1p2beta1.SafeSearchAnnotation") + proto.RegisterType((*LatLongRect)(nil), "google.cloud.vision.v1p2beta1.LatLongRect") + proto.RegisterType((*ColorInfo)(nil), "google.cloud.vision.v1p2beta1.ColorInfo") + proto.RegisterType((*DominantColorsAnnotation)(nil), "google.cloud.vision.v1p2beta1.DominantColorsAnnotation") + proto.RegisterType((*ImageProperties)(nil), "google.cloud.vision.v1p2beta1.ImageProperties") + proto.RegisterType((*CropHint)(nil), "google.cloud.vision.v1p2beta1.CropHint") + proto.RegisterType((*CropHintsAnnotation)(nil), "google.cloud.vision.v1p2beta1.CropHintsAnnotation") + proto.RegisterType((*CropHintsParams)(nil), "google.cloud.vision.v1p2beta1.CropHintsParams") + proto.RegisterType((*WebDetectionParams)(nil), "google.cloud.vision.v1p2beta1.WebDetectionParams") + proto.RegisterType((*ImageContext)(nil), "google.cloud.vision.v1p2beta1.ImageContext") + proto.RegisterType((*AnnotateImageRequest)(nil), "google.cloud.vision.v1p2beta1.AnnotateImageRequest") + proto.RegisterType((*ImageAnnotationContext)(nil), "google.cloud.vision.v1p2beta1.ImageAnnotationContext") + proto.RegisterType((*AnnotateImageResponse)(nil), "google.cloud.vision.v1p2beta1.AnnotateImageResponse") + proto.RegisterType((*AnnotateFileResponse)(nil), "google.cloud.vision.v1p2beta1.AnnotateFileResponse") + proto.RegisterType((*BatchAnnotateImagesRequest)(nil), "google.cloud.vision.v1p2beta1.BatchAnnotateImagesRequest") + proto.RegisterType((*BatchAnnotateImagesResponse)(nil), "google.cloud.vision.v1p2beta1.BatchAnnotateImagesResponse") + proto.RegisterType((*AsyncAnnotateFileRequest)(nil), "google.cloud.vision.v1p2beta1.AsyncAnnotateFileRequest") + proto.RegisterType((*AsyncAnnotateFileResponse)(nil), "google.cloud.vision.v1p2beta1.AsyncAnnotateFileResponse") + proto.RegisterType((*AsyncBatchAnnotateFilesRequest)(nil), "google.cloud.vision.v1p2beta1.AsyncBatchAnnotateFilesRequest") + proto.RegisterType((*AsyncBatchAnnotateFilesResponse)(nil), "google.cloud.vision.v1p2beta1.AsyncBatchAnnotateFilesResponse") + proto.RegisterType((*InputConfig)(nil), "google.cloud.vision.v1p2beta1.InputConfig") + proto.RegisterType((*OutputConfig)(nil), "google.cloud.vision.v1p2beta1.OutputConfig") + proto.RegisterType((*GcsSource)(nil), "google.cloud.vision.v1p2beta1.GcsSource") + proto.RegisterType((*GcsDestination)(nil), "google.cloud.vision.v1p2beta1.GcsDestination") + proto.RegisterType((*OperationMetadata)(nil), "google.cloud.vision.v1p2beta1.OperationMetadata") + proto.RegisterEnum("google.cloud.vision.v1p2beta1.Likelihood", Likelihood_name, Likelihood_value) + proto.RegisterEnum("google.cloud.vision.v1p2beta1.Feature_Type", Feature_Type_name, Feature_Type_value) + proto.RegisterEnum("google.cloud.vision.v1p2beta1.FaceAnnotation_Landmark_Type", FaceAnnotation_Landmark_Type_name, FaceAnnotation_Landmark_Type_value) + proto.RegisterEnum("google.cloud.vision.v1p2beta1.OperationMetadata_State", OperationMetadata_State_name, OperationMetadata_State_value) +} + +// Reference imports to suppress errors if they are not otherwise used. +var _ context.Context +var _ grpc.ClientConn + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the grpc package it is being compiled against. +const _ = grpc.SupportPackageIsVersion4 + +// ImageAnnotatorClient is the client API for ImageAnnotator service. +// +// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://godoc.org/google.golang.org/grpc#ClientConn.NewStream. +type ImageAnnotatorClient interface { + // Run image detection and annotation for a batch of images. + BatchAnnotateImages(ctx context.Context, in *BatchAnnotateImagesRequest, opts ...grpc.CallOption) (*BatchAnnotateImagesResponse, error) + // Run async image detection and annotation for a list of generic files (e.g. + // PDF) which may contain multiple pages and multiple images per page. + // Progress and results can be retrieved through the + // `google.longrunning.Operations` interface. + // `Operation.metadata` contains `OperationMetadata` (metadata). + // `Operation.response` contains `AsyncBatchAnnotateFilesResponse` (results). + AsyncBatchAnnotateFiles(ctx context.Context, in *AsyncBatchAnnotateFilesRequest, opts ...grpc.CallOption) (*longrunning.Operation, error) +} + +type imageAnnotatorClient struct { + cc *grpc.ClientConn +} + +func NewImageAnnotatorClient(cc *grpc.ClientConn) ImageAnnotatorClient { + return &imageAnnotatorClient{cc} +} + +func (c *imageAnnotatorClient) BatchAnnotateImages(ctx context.Context, in *BatchAnnotateImagesRequest, opts ...grpc.CallOption) (*BatchAnnotateImagesResponse, error) { + out := new(BatchAnnotateImagesResponse) + err := c.cc.Invoke(ctx, "/google.cloud.vision.v1p2beta1.ImageAnnotator/BatchAnnotateImages", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *imageAnnotatorClient) AsyncBatchAnnotateFiles(ctx context.Context, in *AsyncBatchAnnotateFilesRequest, opts ...grpc.CallOption) (*longrunning.Operation, error) { + out := new(longrunning.Operation) + err := c.cc.Invoke(ctx, "/google.cloud.vision.v1p2beta1.ImageAnnotator/AsyncBatchAnnotateFiles", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +// ImageAnnotatorServer is the server API for ImageAnnotator service. +type ImageAnnotatorServer interface { + // Run image detection and annotation for a batch of images. + BatchAnnotateImages(context.Context, *BatchAnnotateImagesRequest) (*BatchAnnotateImagesResponse, error) + // Run async image detection and annotation for a list of generic files (e.g. + // PDF) which may contain multiple pages and multiple images per page. + // Progress and results can be retrieved through the + // `google.longrunning.Operations` interface. + // `Operation.metadata` contains `OperationMetadata` (metadata). + // `Operation.response` contains `AsyncBatchAnnotateFilesResponse` (results). + AsyncBatchAnnotateFiles(context.Context, *AsyncBatchAnnotateFilesRequest) (*longrunning.Operation, error) +} + +func RegisterImageAnnotatorServer(s *grpc.Server, srv ImageAnnotatorServer) { + s.RegisterService(&_ImageAnnotator_serviceDesc, srv) +} + +func _ImageAnnotator_BatchAnnotateImages_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(BatchAnnotateImagesRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(ImageAnnotatorServer).BatchAnnotateImages(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.vision.v1p2beta1.ImageAnnotator/BatchAnnotateImages", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(ImageAnnotatorServer).BatchAnnotateImages(ctx, req.(*BatchAnnotateImagesRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _ImageAnnotator_AsyncBatchAnnotateFiles_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(AsyncBatchAnnotateFilesRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(ImageAnnotatorServer).AsyncBatchAnnotateFiles(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.vision.v1p2beta1.ImageAnnotator/AsyncBatchAnnotateFiles", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(ImageAnnotatorServer).AsyncBatchAnnotateFiles(ctx, req.(*AsyncBatchAnnotateFilesRequest)) + } + return interceptor(ctx, in, info, handler) +} + +var _ImageAnnotator_serviceDesc = grpc.ServiceDesc{ + ServiceName: "google.cloud.vision.v1p2beta1.ImageAnnotator", + HandlerType: (*ImageAnnotatorServer)(nil), + Methods: []grpc.MethodDesc{ + { + MethodName: "BatchAnnotateImages", + Handler: _ImageAnnotator_BatchAnnotateImages_Handler, + }, + { + MethodName: "AsyncBatchAnnotateFiles", + Handler: _ImageAnnotator_AsyncBatchAnnotateFiles_Handler, + }, + }, + Streams: []grpc.StreamDesc{}, + Metadata: "google/cloud/vision/v1p2beta1/image_annotator.proto", +} + +func init() { + proto.RegisterFile("google/cloud/vision/v1p2beta1/image_annotator.proto", fileDescriptor_image_annotator_3f9c4b0d06e54350) +} + +var fileDescriptor_image_annotator_3f9c4b0d06e54350 = []byte{ + // 2899 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xcc, 0x5a, 0xcf, 0x73, 0xdb, 0xc6, + 0xf5, 0x0f, 0xa9, 0x5f, 0xe4, 0x23, 0x25, 0x41, 0xab, 0x1f, 0xa6, 0x65, 0x2b, 0x56, 0x90, 0x6f, + 0xbe, 0x55, 0xdd, 0x94, 0x1a, 0xcb, 0x49, 0xda, 0x3a, 0xcd, 0xa4, 0x14, 0x09, 0x49, 0x1c, 0x53, + 0x24, 0xbb, 0x84, 0xec, 0xd8, 0x93, 0x0e, 0x0a, 0x81, 0x4b, 0x1a, 0x09, 0x08, 0x20, 0x00, 0x68, + 0x8b, 0x39, 0x66, 0xa6, 0x7f, 0x41, 0x6f, 0xbd, 0x77, 0x7a, 0x6a, 0x2f, 0xed, 0xa5, 0xff, 0x40, + 0xef, 0x9d, 0x1e, 0x7a, 0xe9, 0xad, 0x3d, 0xf4, 0xd8, 0x53, 0xa7, 0xd3, 0x53, 0x67, 0x7f, 0x00, + 0x5c, 0x50, 0xb2, 0x29, 0x3a, 0xe9, 0x4c, 0x4f, 0xe2, 0xbe, 0xb7, 0x9f, 0xcf, 0xdb, 0x7d, 0xfb, + 0xf6, 0xed, 0xdb, 0x85, 0xe0, 0x7e, 0xdf, 0xf3, 0xfa, 0x0e, 0xd9, 0xb7, 0x1c, 0x6f, 0xd8, 0xdd, + 0x7f, 0x6e, 0x87, 0xb6, 0xe7, 0xee, 0x3f, 0xbf, 0xe7, 0x1f, 0x9c, 0x93, 0xc8, 0xbc, 0xb7, 0x6f, + 0x0f, 0xcc, 0x3e, 0x31, 0x4c, 0xd7, 0xf5, 0x22, 0x33, 0xf2, 0x82, 0xb2, 0x1f, 0x78, 0x91, 0x87, + 0x76, 0x38, 0xa8, 0xcc, 0x40, 0x65, 0x0e, 0x2a, 0x27, 0xa0, 0xed, 0xdb, 0x82, 0xd3, 0xf4, 0xed, + 0x7d, 0x01, 0xb5, 0x3d, 0x37, 0xe4, 0xe0, 0xed, 0x77, 0x5f, 0x6d, 0xb1, 0x4f, 0xbc, 0x01, 0x89, + 0x82, 0x91, 0xe8, 0x3d, 0x65, 0x7c, 0x11, 0xb9, 0x88, 0x8c, 0xb1, 0x0d, 0x01, 0xba, 0xf7, 0x6a, + 0xd0, 0x0b, 0x72, 0x6e, 0x74, 0x49, 0x44, 0x2c, 0x09, 0xf2, 0xb6, 0x80, 0x38, 0x9e, 0xdb, 0x0f, + 0x86, 0xae, 0x6b, 0xbb, 0xfd, 0x7d, 0xcf, 0x27, 0x41, 0x6a, 0xe8, 0x77, 0x44, 0x27, 0xd6, 0x3a, + 0x1f, 0xf6, 0xf6, 0x23, 0x7b, 0x40, 0xc2, 0xc8, 0x1c, 0xf8, 0xa2, 0xc3, 0x0d, 0xd1, 0x21, 0xf0, + 0xad, 0xfd, 0x30, 0x32, 0xa3, 0x61, 0x38, 0xa1, 0x88, 0x46, 0x3e, 0xd9, 0xb7, 0x3c, 0x27, 0x76, + 0xe5, 0x76, 0x49, 0x56, 0x38, 0x66, 0xe4, 0xb8, 0x7d, 0xae, 0x51, 0xff, 0x9d, 0x85, 0xa5, 0x23, + 0x62, 0x46, 0xc3, 0x80, 0xa0, 0x8f, 0x61, 0x9e, 0x76, 0x28, 0x65, 0x76, 0x33, 0x7b, 0x2b, 0x07, + 0xdf, 0x29, 0xbf, 0xd2, 0xff, 0x65, 0x81, 0x2a, 0xeb, 0x23, 0x9f, 0x60, 0x06, 0x44, 0x77, 0xa0, + 0x30, 0x30, 0x2f, 0x8c, 0x80, 0x84, 0x43, 0x27, 0x0a, 0x4b, 0xd9, 0xdd, 0xcc, 0xde, 0x02, 0x86, + 0x81, 0x79, 0x81, 0xb9, 0x04, 0x6d, 0xc0, 0xc2, 0xc0, 0xeb, 0x12, 0xa7, 0x34, 0xb7, 0x9b, 0xd9, + 0xcb, 0x63, 0xde, 0x50, 0xff, 0x99, 0x81, 0x79, 0xca, 0x82, 0x36, 0x40, 0xd1, 0x9f, 0xb4, 0x35, + 0xe3, 0xac, 0xd9, 0x69, 0x6b, 0xd5, 0xfa, 0x51, 0x5d, 0xab, 0x29, 0x6f, 0x20, 0x04, 0x2b, 0x47, + 0x95, 0xaa, 0x66, 0xd4, 0x34, 0x5d, 0xab, 0xea, 0xf5, 0x56, 0x53, 0xc9, 0xa0, 0x2d, 0x40, 0x8d, + 0x4a, 0xb3, 0x76, 0x5a, 0xc1, 0x0f, 0x25, 0x79, 0x96, 0xf6, 0x6d, 0xb4, 0x8e, 0x5b, 0x92, 0x6c, + 0x0e, 0xad, 0xc3, 0x6a, 0xa3, 0x72, 0xa8, 0x35, 0x24, 0xe1, 0x3c, 0xed, 0xa8, 0x6b, 0x9f, 0xe8, + 0x92, 0x6c, 0x01, 0xdd, 0x82, 0x1b, 0xb5, 0x56, 0xf5, 0xec, 0x54, 0x6b, 0xea, 0xc6, 0x84, 0xb2, + 0x80, 0x6e, 0xc2, 0x66, 0xa7, 0x72, 0xa4, 0x19, 0x1d, 0xad, 0x82, 0xab, 0x27, 0x92, 0x6a, 0x91, + 0x0e, 0xbb, 0x7e, 0x5a, 0x39, 0xd6, 0x8c, 0x36, 0x6e, 0xb5, 0x35, 0xac, 0xd7, 0xb5, 0x8e, 0xb2, + 0x84, 0x56, 0x00, 0xaa, 0xb8, 0xd5, 0x36, 0x4e, 0xea, 0x4d, 0xbd, 0xa3, 0xe4, 0xd1, 0x1a, 0x2c, + 0x3f, 0xd6, 0x0e, 0x25, 0x20, 0xa8, 0x4d, 0x28, 0xd4, 0x69, 0xe8, 0x77, 0xbc, 0x61, 0x60, 0x11, + 0xa4, 0xc2, 0x72, 0xdf, 0x0a, 0x0d, 0xbe, 0x1b, 0x86, 0x81, 0xcd, 0x16, 0x22, 0x8f, 0x0b, 0x7d, + 0x2b, 0x64, 0xdd, 0xce, 0x02, 0x1b, 0xdd, 0x82, 0xfc, 0x58, 0x9f, 0x65, 0xfa, 0x9c, 0x2d, 0x94, + 0x2a, 0x81, 0x05, 0xd6, 0x11, 0x95, 0x60, 0xc9, 0xf2, 0xdc, 0x88, 0xb8, 0x11, 0xe3, 0x28, 0xe2, + 0xb8, 0x89, 0x0e, 0x61, 0x31, 0x64, 0xd6, 0x18, 0xb8, 0x70, 0x70, 0x77, 0xca, 0x2a, 0x4b, 0xe3, + 0xc3, 0x02, 0xa9, 0xfe, 0x52, 0x81, 0x95, 0x23, 0xd3, 0x22, 0x95, 0x64, 0x47, 0xa0, 0x36, 0x2c, + 0x9f, 0x7b, 0x43, 0xb7, 0x6b, 0xbb, 0x7d, 0xc3, 0xf7, 0x9c, 0x11, 0x33, 0x5b, 0x98, 0x1a, 0x43, + 0x87, 0x02, 0xd3, 0xf6, 0x9c, 0x11, 0x2e, 0x9e, 0x4b, 0x2d, 0x74, 0x06, 0x4a, 0xaf, 0x6b, 0xa4, + 0x49, 0xb3, 0xb3, 0x93, 0xae, 0xf4, 0xba, 0x72, 0x1b, 0xe9, 0x90, 0x77, 0x4c, 0xb7, 0x3b, 0x30, + 0x83, 0xcf, 0xc3, 0xd2, 0xdc, 0xee, 0xdc, 0x5e, 0xe1, 0xe0, 0x83, 0x69, 0x81, 0x9e, 0x9a, 0x6a, + 0xb9, 0x21, 0xe0, 0x78, 0x4c, 0x84, 0x76, 0x00, 0x02, 0xcf, 0x71, 0x0c, 0xd3, 0xed, 0x3b, 0xa4, + 0x34, 0xbf, 0x9b, 0xd9, 0xcb, 0xe2, 0x3c, 0x95, 0x54, 0xa8, 0x80, 0x2e, 0x9a, 0x6f, 0xba, 0x42, + 0xbb, 0xc0, 0xb4, 0x39, 0xdf, 0x74, 0xb9, 0x72, 0x07, 0x20, 0xb2, 0x9d, 0x48, 0x68, 0x17, 0x39, + 0x96, 0x4a, 0xb8, 0xfa, 0x1e, 0x6c, 0x24, 0x59, 0xc4, 0xb0, 0x3c, 0xb7, 0x67, 0x77, 0x89, 0x6b, + 0x91, 0xd2, 0x12, 0xeb, 0xb8, 0x9e, 0xe8, 0xaa, 0x89, 0x0a, 0xbd, 0x0f, 0x5b, 0xf1, 0xd0, 0xa8, + 0xeb, 0x24, 0x50, 0x8e, 0x81, 0x36, 0x25, 0xad, 0x04, 0x6b, 0xc3, 0xca, 0x67, 0xde, 0xc8, 0x70, + 0xec, 0xcf, 0x89, 0x63, 0x3f, 0xf3, 0xbc, 0x6e, 0x29, 0xcf, 0x12, 0xc1, 0xb7, 0xa7, 0xf8, 0xa7, + 0x91, 0x00, 0xf0, 0xf2, 0x67, 0xde, 0x68, 0xdc, 0x44, 0x8f, 0x60, 0x2d, 0xf4, 0x82, 0xc0, 0x7b, + 0x21, 0x93, 0xc2, 0xac, 0xa4, 0x0a, 0xe7, 0x90, 0x78, 0x75, 0x50, 0x4c, 0xb7, 0x4f, 0x02, 0x99, + 0xb6, 0x30, 0x2b, 0xed, 0x2a, 0xa3, 0x90, 0x58, 0x9f, 0xc2, 0x7a, 0x38, 0x0c, 0xfc, 0xc0, 0x0e, + 0x89, 0x4c, 0x5c, 0x9c, 0x95, 0x18, 0xc5, 0x2c, 0x12, 0xb7, 0x05, 0xa5, 0xa1, 0xdb, 0x25, 0x81, + 0x41, 0x2e, 0x7c, 0x2f, 0x24, 0x5d, 0xd9, 0xc0, 0xf2, 0xac, 0x06, 0xb6, 0x18, 0x95, 0xc6, 0x99, + 0x24, 0x23, 0x9f, 0x00, 0x3a, 0x77, 0x86, 0x41, 0x90, 0xa6, 0x5f, 0x99, 0x95, 0x7e, 0x4d, 0x90, + 0xa4, 0x5d, 0xf3, 0x8c, 0x98, 0xdd, 0x17, 0xc4, 0x4c, 0xf9, 0x7c, 0x75, 0x66, 0xd7, 0xc4, 0x2c, + 0x63, 0xd9, 0xf6, 0x9f, 0x97, 0x20, 0x17, 0xef, 0x29, 0xd4, 0x12, 0x47, 0xd0, 0x1c, 0x63, 0xfe, + 0xf0, 0xf5, 0x76, 0xa6, 0x7c, 0x24, 0x55, 0x21, 0xe7, 0x7b, 0xa1, 0x4d, 0xf5, 0x6c, 0x5f, 0x16, + 0x0e, 0xbe, 0x35, 0x85, 0xb4, 0x2d, 0xba, 0xe3, 0x04, 0xa8, 0xfe, 0x6e, 0x71, 0x7c, 0x40, 0x9d, + 0x35, 0x1f, 0x36, 0x5b, 0x8f, 0x9b, 0x46, 0x7c, 0xfc, 0x28, 0x6f, 0xa0, 0x22, 0xe4, 0x1a, 0xda, + 0x91, 0x6e, 0x68, 0x4f, 0x34, 0x25, 0x83, 0x96, 0x21, 0x8f, 0xeb, 0xc7, 0x27, 0xbc, 0x99, 0x45, + 0x25, 0xd8, 0x60, 0xca, 0xd6, 0x91, 0x11, 0x77, 0x3a, 0xc4, 0xad, 0xc7, 0xca, 0x1c, 0x3d, 0x51, + 0x78, 0xc7, 0x49, 0xd5, 0x3c, 0x55, 0xc5, 0xa0, 0x84, 0x8b, 0xa9, 0x16, 0xd0, 0x36, 0x6c, 0x25, + 0xa8, 0xb4, 0x6e, 0x91, 0xc2, 0x4e, 0xeb, 0xb5, 0x76, 0xab, 0xde, 0xd4, 0x8d, 0x43, 0x4d, 0x7f, + 0xac, 0x69, 0x4d, 0xaa, 0xa5, 0xa7, 0x51, 0x11, 0x72, 0xcd, 0x56, 0x47, 0x33, 0xf4, 0x7a, 0x5b, + 0xc9, 0xd1, 0x31, 0x9e, 0xb5, 0xdb, 0x1a, 0x36, 0x1a, 0xf5, 0xb6, 0x92, 0xa7, 0xcd, 0x46, 0xeb, + 0xb1, 0x68, 0x02, 0x3d, 0xb9, 0x4e, 0x5b, 0x67, 0xfa, 0x09, 0x1b, 0x95, 0x52, 0x40, 0xab, 0x50, + 0xe0, 0x6d, 0x66, 0x4f, 0x29, 0x22, 0x05, 0x8a, 0x5c, 0x50, 0xd5, 0x9a, 0xba, 0x86, 0x95, 0x65, + 0xb4, 0x09, 0x6b, 0x8c, 0xfe, 0xb0, 0xa5, 0xeb, 0xad, 0x53, 0xd1, 0x71, 0x85, 0xfa, 0x4b, 0x16, + 0x33, 0xbe, 0x55, 0x7a, 0x78, 0xcb, 0x52, 0x41, 0xa2, 0x24, 0xb3, 0xd6, 0x9e, 0x68, 0x86, 0xde, + 0x6a, 0x1b, 0x87, 0xad, 0xb3, 0x66, 0xad, 0x82, 0x9f, 0x28, 0x6b, 0x29, 0x15, 0x9f, 0x75, 0xb5, + 0x85, 0x9b, 0x1a, 0x56, 0x10, 0xba, 0x0d, 0xa5, 0x44, 0x25, 0x18, 0x13, 0xe0, 0x7a, 0xe2, 0x7e, + 0xaa, 0x65, 0x3f, 0x04, 0x6e, 0x63, 0xec, 0xc8, 0x4b, 0xe6, 0x36, 0xd3, 0xba, 0x94, 0xbd, 0x2d, + 0xb4, 0x03, 0x37, 0xc7, 0xba, 0x49, 0x83, 0x37, 0xc6, 0xab, 0x3a, 0x69, 0xb1, 0x84, 0xee, 0xc0, + 0x2d, 0x79, 0x9d, 0x0d, 0xbe, 0x04, 0xf1, 0x8a, 0x29, 0x37, 0xd1, 0x2e, 0xdc, 0x4e, 0x2d, 0xe9, + 0x64, 0x8f, 0x6d, 0xea, 0x50, 0x4e, 0x51, 0xc1, 0x86, 0x8e, 0x2b, 0xc7, 0xb4, 0x8e, 0xb8, 0x45, + 0xbd, 0x2f, 0x70, 0x92, 0xf8, 0x36, 0x2b, 0x86, 0xe2, 0xb9, 0xb7, 0xcf, 0xda, 0xf5, 0x86, 0xb2, + 0x43, 0x8b, 0xa1, 0xf1, 0xf0, 0xb8, 0xf0, 0x4d, 0x8a, 0x3f, 0x6a, 0x61, 0xed, 0x44, 0xab, 0xd4, + 0x8c, 0x63, 0x56, 0x2b, 0x35, 0x2a, 0xca, 0x1d, 0x5a, 0xb1, 0x54, 0x4f, 0xea, 0x4d, 0xe3, 0xb8, + 0x59, 0xd1, 0x4f, 0x28, 0xe5, 0x2e, 0xb5, 0xcf, 0x44, 0x8c, 0xf7, 0xb8, 0xd5, 0xa4, 0xd2, 0xb7, + 0x28, 0x9e, 0x49, 0x39, 0xb3, 0x10, 0xab, 0xea, 0x0f, 0xa1, 0xd8, 0xf0, 0x2c, 0xb6, 0x37, 0xeb, + 0x6e, 0xcf, 0x43, 0xef, 0xc2, 0x92, 0x63, 0x46, 0x86, 0xe3, 0xf6, 0x45, 0x79, 0xb0, 0x1e, 0x6f, + 0x45, 0xba, 0x55, 0xcb, 0x0d, 0x33, 0x6a, 0xb8, 0x7d, 0xbc, 0xe8, 0xb0, 0xbf, 0xea, 0x63, 0xc8, + 0xb5, 0x03, 0x5a, 0x1c, 0x47, 0x23, 0x84, 0x60, 0xde, 0x35, 0x07, 0x44, 0x14, 0x44, 0xec, 0x37, + 0xad, 0x25, 0x9f, 0x9b, 0xce, 0x90, 0x88, 0x2a, 0x88, 0x37, 0xd0, 0x5b, 0x50, 0x1c, 0xda, 0x6e, + 0xf4, 0xc1, 0x7b, 0x06, 0x57, 0xd2, 0x44, 0x32, 0x8f, 0x0b, 0x5c, 0xf6, 0x88, 0x8a, 0xd4, 0x5f, + 0xcc, 0x81, 0xa2, 0xb9, 0x91, 0x1d, 0x8d, 0xa4, 0x02, 0x46, 0x81, 0xb9, 0x81, 0xdd, 0x15, 0x06, + 0xe8, 0x4f, 0xb4, 0x05, 0x8b, 0x8e, 0x67, 0x99, 0x4e, 0x6c, 0x40, 0xb4, 0xd0, 0x2e, 0x14, 0xba, + 0x24, 0xb4, 0x02, 0xdb, 0x67, 0x49, 0x85, 0x57, 0xb2, 0xb2, 0x88, 0x8e, 0x2c, 0xb4, 0xbc, 0x20, + 0x2e, 0x04, 0x78, 0x03, 0xbd, 0x09, 0x20, 0x9d, 0xc4, 0xbc, 0x0a, 0x90, 0x24, 0x54, 0x1f, 0x79, + 0xbe, 0x6d, 0x99, 0x8e, 0x1d, 0x8d, 0x44, 0x1d, 0x20, 0x49, 0x2e, 0x97, 0x58, 0x4b, 0x5f, 0xb7, + 0xc4, 0xaa, 0x43, 0xde, 0x11, 0xeb, 0x13, 0x96, 0x72, 0xac, 0x16, 0x9a, 0xc6, 0x26, 0xaf, 0x27, + 0x1e, 0xa3, 0xd1, 0x31, 0x80, 0xcf, 0x17, 0xcb, 0x26, 0x61, 0x29, 0xcf, 0xb8, 0xa6, 0x26, 0x5a, + 0xb1, 0xba, 0x58, 0x82, 0xaa, 0x7f, 0xcd, 0xc2, 0x46, 0xc7, 0xec, 0x91, 0x0e, 0x31, 0x03, 0xeb, + 0x99, 0xb4, 0x40, 0x1f, 0xc3, 0x82, 0xd9, 0x1d, 0x3a, 0x91, 0xb8, 0x9d, 0xcc, 0x70, 0xe8, 0x70, + 0x1c, 0x25, 0x08, 0x7d, 0xcf, 0xeb, 0xb1, 0xe5, 0x9c, 0x8d, 0x80, 0xe1, 0x50, 0x15, 0x96, 0x06, + 0xa4, 0x4b, 0x97, 0x43, 0x1c, 0x4f, 0x33, 0x50, 0xc4, 0x48, 0xa4, 0x41, 0xee, 0xb9, 0xed, 0x39, + 0x2c, 0x06, 0xe6, 0x67, 0x65, 0x49, 0xa0, 0xe8, 0x23, 0x98, 0x0f, 0x4c, 0x6b, 0x34, 0x7b, 0x85, + 0xc6, 0x60, 0xea, 0x0b, 0x28, 0xd0, 0xdd, 0xe6, 0xb9, 0x7d, 0x4c, 0xac, 0x08, 0xdd, 0x87, 0xc2, + 0xc0, 0x76, 0x8d, 0x6b, 0x6c, 0xce, 0xfc, 0xc0, 0x76, 0xf9, 0x4f, 0x06, 0x32, 0x2f, 0x12, 0x50, + 0xf6, 0x55, 0x20, 0xf3, 0x82, 0xff, 0x54, 0x03, 0xc8, 0x57, 0xe9, 0xbd, 0x94, 0xe5, 0x83, 0x3d, + 0x58, 0x60, 0x97, 0x54, 0x61, 0x10, 0xa5, 0xb0, 0xac, 0x1b, 0xe6, 0x1d, 0xc6, 0x3b, 0x2a, 0x2b, + 0xef, 0xa8, 0x77, 0x60, 0xc5, 0xb7, 0x2f, 0x88, 0x63, 0xf4, 0x02, 0xd3, 0x4a, 0x36, 0x63, 0x16, + 0x2f, 0x33, 0xe9, 0x91, 0x10, 0xaa, 0x9f, 0x42, 0xa9, 0xe6, 0x0d, 0x6c, 0xd7, 0x74, 0x23, 0x46, + 0x1a, 0x4a, 0x51, 0xf5, 0x23, 0x58, 0x64, 0x16, 0xc2, 0x52, 0x86, 0xc5, 0xec, 0xde, 0x14, 0x4f, + 0x26, 0x83, 0xc7, 0x02, 0xa7, 0x86, 0xb0, 0xca, 0xee, 0x48, 0xed, 0x24, 0x86, 0xd1, 0x4f, 0x61, + 0xb5, 0x2b, 0x0c, 0x1a, 0x09, 0x3b, 0x9d, 0xe1, 0xf7, 0xa6, 0xb0, 0xbf, 0x6c, 0x98, 0x78, 0xa5, + 0x9b, 0xd2, 0xa8, 0xbf, 0xce, 0x40, 0xae, 0x1a, 0x78, 0xfe, 0x89, 0xed, 0x46, 0xff, 0x85, 0xbb, + 0x57, 0x3a, 0x55, 0x65, 0x2f, 0xa5, 0xaa, 0x7d, 0x58, 0xb7, 0x07, 0xbe, 0x17, 0x44, 0xa6, 0x6b, + 0x91, 0x49, 0xef, 0xa3, 0xb1, 0x2a, 0x59, 0x82, 0x9f, 0xc0, 0x7a, 0x3c, 0x5c, 0xd9, 0xfb, 0x47, + 0x00, 0x56, 0xe0, 0xf9, 0xc6, 0x33, 0x2a, 0x17, 0x2b, 0x30, 0x2d, 0x6b, 0xc4, 0x3c, 0x38, 0x6f, + 0xc5, 0x8c, 0xea, 0x07, 0xb0, 0x9a, 0xd0, 0xb7, 0xcd, 0xc0, 0x1c, 0x84, 0xe8, 0x6d, 0x58, 0x36, + 0x43, 0x9f, 0x58, 0x91, 0xc1, 0x1e, 0x57, 0x38, 0x7b, 0x16, 0x17, 0xb9, 0x10, 0x33, 0x99, 0x5a, + 0x03, 0xf4, 0x98, 0x9c, 0xd7, 0xe2, 0x2b, 0x94, 0x80, 0x96, 0x61, 0xdd, 0x76, 0x2d, 0x67, 0xd8, + 0x25, 0x46, 0x9f, 0x78, 0xa9, 0xd7, 0x8c, 0x1c, 0x5e, 0x13, 0xaa, 0x63, 0xe2, 0x89, 0x47, 0x0d, + 0xf5, 0x0f, 0x59, 0x28, 0xb2, 0x10, 0xa8, 0xd2, 0x3b, 0xf6, 0x45, 0x84, 0x9a, 0xb0, 0xcc, 0x76, + 0x85, 0xe7, 0xf6, 0x8d, 0x80, 0x58, 0x91, 0x58, 0x90, 0x69, 0x57, 0x6d, 0x69, 0x47, 0xe2, 0x82, + 0x23, 0x6d, 0xcf, 0x77, 0x60, 0xc5, 0x31, 0xdd, 0xfe, 0x90, 0x5e, 0xfb, 0xb9, 0xab, 0xb2, 0xbb, + 0x73, 0x7b, 0x79, 0xbc, 0x1c, 0x4b, 0xd9, 0xc4, 0xd1, 0x53, 0x58, 0x1b, 0x7b, 0xd3, 0xf0, 0xd9, + 0x64, 0x44, 0xcd, 0x5b, 0xbe, 0xa6, 0x53, 0x85, 0xf7, 0xf0, 0xaa, 0x35, 0xe1, 0x4e, 0x0b, 0x36, + 0x52, 0xef, 0x59, 0x31, 0xfd, 0x22, 0xa3, 0xbf, 0x37, 0x85, 0xfe, 0xb2, 0x93, 0x31, 0x7a, 0x71, + 0x49, 0xa6, 0xfe, 0x3d, 0x03, 0x1b, 0x22, 0x3a, 0x08, 0x73, 0x28, 0x26, 0x5f, 0x0c, 0x49, 0x18, + 0xa1, 0x07, 0xb0, 0xc0, 0xde, 0x38, 0x84, 0x23, 0xff, 0xef, 0x3a, 0x6f, 0x16, 0x98, 0x43, 0xd0, + 0x21, 0xe4, 0x7a, 0xfc, 0xa5, 0x8a, 0xbb, 0xad, 0x70, 0xf0, 0xff, 0xd7, 0x7b, 0xd8, 0xc2, 0x09, + 0x8e, 0xee, 0x30, 0xfe, 0xe8, 0x62, 0xf1, 0x15, 0x66, 0x91, 0x3e, 0x7d, 0x87, 0xc9, 0x41, 0x81, + 0x8b, 0xb6, 0xd4, 0x52, 0x1f, 0xc2, 0x16, 0xd3, 0x8e, 0x37, 0x43, 0x1c, 0x3c, 0x0a, 0xcc, 0x8d, + 0x9f, 0x7e, 0xe8, 0x4f, 0x74, 0x07, 0x0a, 0x3e, 0x35, 0xee, 0x0e, 0x07, 0xe7, 0x24, 0x88, 0x5f, + 0xd5, 0xa8, 0xa8, 0xc9, 0x24, 0xea, 0x5f, 0x72, 0xb0, 0x39, 0xe1, 0xb7, 0xd0, 0xf7, 0xdc, 0x90, + 0xa0, 0x4f, 0x40, 0xe9, 0x99, 0x16, 0x91, 0xde, 0x2e, 0xe3, 0x6d, 0xf6, 0xdd, 0x99, 0xae, 0x56, + 0x78, 0xb5, 0x97, 0x6a, 0x87, 0xe8, 0x1c, 0x36, 0xe2, 0x57, 0x84, 0x14, 0x3b, 0x77, 0xf1, 0xfe, + 0x14, 0xf6, 0xc9, 0xf2, 0x0b, 0xaf, 0xc7, 0x64, 0xb2, 0x8d, 0xa7, 0xa0, 0x38, 0x5e, 0xdf, 0x4b, + 0xf1, 0xcf, 0xbd, 0x1e, 0xff, 0x2a, 0x25, 0x92, 0xb9, 0x3f, 0x85, 0x35, 0xc7, 0x3c, 0x27, 0x4e, + 0x8a, 0x7c, 0xfe, 0xf5, 0xc8, 0x15, 0xc6, 0x34, 0x31, 0xf2, 0x89, 0x37, 0xe3, 0xb0, 0xb4, 0xf0, + 0x9a, 0x23, 0xa7, 0x44, 0x32, 0xb7, 0x01, 0x1b, 0xbd, 0xa1, 0xe3, 0x18, 0x13, 0x06, 0xd8, 0x3b, + 0xc5, 0xf4, 0x75, 0xd5, 0x53, 0x6c, 0x18, 0x51, 0xaa, 0xb4, 0x0c, 0xd9, 0xb0, 0x15, 0x9a, 0x3d, + 0x62, 0x84, 0xac, 0x04, 0x93, 0x4d, 0xf0, 0xdd, 0x7e, 0x7f, 0x8a, 0x89, 0xab, 0xca, 0x37, 0xbc, + 0x11, 0x5e, 0x55, 0xd4, 0xb9, 0x70, 0x8b, 0x6f, 0xac, 0x71, 0x05, 0x28, 0xdb, 0xcb, 0x5d, 0x2b, + 0x79, 0x4d, 0x1c, 0xbf, 0xf8, 0xa6, 0x9d, 0x16, 0x48, 0xf6, 0x7a, 0xb0, 0x29, 0xa5, 0x48, 0xc9, + 0x52, 0x81, 0x59, 0x3a, 0xb8, 0x6e, 0x9a, 0x94, 0x23, 0xd7, 0xba, 0xe2, 0x60, 0x6b, 0xc3, 0x72, + 0x2a, 0x5d, 0xb2, 0x37, 0x9e, 0xe9, 0x09, 0x43, 0xce, 0x93, 0xb8, 0x28, 0x67, 0x48, 0x5a, 0x2b, + 0x91, 0x20, 0xf0, 0x02, 0x56, 0xf1, 0x49, 0xb5, 0x52, 0xe0, 0x5b, 0xe5, 0x0e, 0xfb, 0x06, 0x80, + 0x79, 0x07, 0xd4, 0x12, 0x6f, 0xbf, 0x17, 0x51, 0x69, 0x93, 0xf5, 0x7d, 0xff, 0x3a, 0xfe, 0xbb, + 0x94, 0x88, 0x70, 0xcc, 0xa2, 0xfe, 0x5e, 0x4a, 0xcb, 0x47, 0xb6, 0x33, 0xce, 0x2e, 0xa7, 0x50, + 0xb4, 0x5d, 0x7f, 0x18, 0xf1, 0x17, 0xc6, 0xfe, 0x35, 0x8f, 0xb9, 0x3a, 0x85, 0xb0, 0x67, 0xc7, + 0x3e, 0x2e, 0xd8, 0xe3, 0x06, 0xc2, 0x90, 0x0f, 0x04, 0x75, 0x9c, 0x47, 0xde, 0x9b, 0xc2, 0x75, + 0x65, 0xd6, 0xc3, 0x63, 0x1a, 0x75, 0x00, 0xdb, 0x87, 0x66, 0x94, 0xc4, 0x1c, 0xef, 0x18, 0xc6, + 0xe7, 0x4a, 0x0b, 0x72, 0x01, 0xff, 0x19, 0xa7, 0xc5, 0xfb, 0xb3, 0x19, 0x64, 0x58, 0x9c, 0x90, + 0xa8, 0x5f, 0xc0, 0xad, 0x2b, 0xcd, 0x09, 0x87, 0xa5, 0x66, 0x98, 0xf9, 0x66, 0x66, 0xf8, 0xc7, + 0x2c, 0x94, 0x2a, 0xe1, 0xc8, 0xb5, 0xd2, 0x4b, 0xc4, 0x27, 0xf8, 0x0d, 0xaf, 0xd0, 0xff, 0xe4, + 0x59, 0x4a, 0x19, 0xbd, 0x61, 0x24, 0xcd, 0x72, 0xfe, 0x5a, 0x8c, 0x2d, 0x86, 0x11, 0xd3, 0x2c, + 0x7a, 0x52, 0x4b, 0x1d, 0xc0, 0xcd, 0x2b, 0x5c, 0x2a, 0x16, 0xf1, 0x92, 0xb9, 0xcc, 0xd7, 0x35, + 0x37, 0x84, 0x37, 0x99, 0xb9, 0x54, 0xe8, 0x50, 0x9b, 0x49, 0xa0, 0x76, 0x2e, 0x05, 0xea, 0xb4, + 0xab, 0xc4, 0xcb, 0x42, 0x42, 0x0a, 0xd6, 0x11, 0xdc, 0x79, 0xa9, 0x59, 0x31, 0xd7, 0x47, 0x97, + 0x03, 0xf6, 0xfb, 0xb3, 0x1b, 0xbe, 0x1c, 0xb4, 0x21, 0x14, 0xa4, 0x20, 0x43, 0xc7, 0x00, 0x7d, + 0x2b, 0x34, 0xc4, 0x87, 0x29, 0xee, 0xcf, 0x69, 0x37, 0xb1, 0x63, 0x2b, 0x14, 0x9f, 0xa5, 0xf2, + 0xfd, 0xf8, 0x27, 0xba, 0x05, 0xf9, 0x81, 0x3d, 0x20, 0x06, 0x7b, 0x43, 0x16, 0x5f, 0xc7, 0xa8, + 0x40, 0x1f, 0xf9, 0x44, 0xfd, 0x59, 0x06, 0x8a, 0xf2, 0x2a, 0xa0, 0x47, 0xb0, 0x4a, 0xcd, 0x76, + 0x49, 0x18, 0xd9, 0x2e, 0x3f, 0x07, 0x32, 0xd7, 0x3a, 0x44, 0x8f, 0xad, 0xb0, 0x36, 0x06, 0xe1, + 0x95, 0x7e, 0xaa, 0x8d, 0x76, 0x00, 0xce, 0xa9, 0x4f, 0x8d, 0xd0, 0xfe, 0x92, 0x88, 0x7a, 0x2d, + 0xcf, 0x24, 0x1d, 0xfb, 0x4b, 0xa2, 0xee, 0x40, 0x3e, 0x19, 0xfc, 0xe5, 0x72, 0x4f, 0x55, 0x61, + 0x25, 0xcd, 0x7f, 0x45, 0x9f, 0xdf, 0x64, 0x61, 0xad, 0x15, 0x7f, 0x37, 0x3e, 0x25, 0x91, 0xd9, + 0x35, 0x23, 0x13, 0x35, 0x60, 0x21, 0xa4, 0x5e, 0x17, 0x4f, 0x24, 0xd3, 0xbe, 0x6b, 0x5d, 0x22, + 0x60, 0x07, 0x09, 0xc1, 0x9c, 0x04, 0x7d, 0x08, 0x05, 0x2b, 0x20, 0x66, 0x44, 0x8c, 0xc8, 0x1e, + 0xf0, 0x07, 0xab, 0xc2, 0xc1, 0x76, 0xcc, 0x19, 0x7f, 0x9c, 0x2e, 0xeb, 0xf1, 0xc7, 0x69, 0x0c, + 0xbc, 0x3b, 0x15, 0x50, 0xf0, 0xd0, 0xef, 0x26, 0xe0, 0xc5, 0xe9, 0x60, 0xde, 0x9d, 0x0a, 0xd4, + 0x1f, 0xc3, 0x02, 0x1b, 0x09, 0xda, 0x84, 0xb5, 0x8e, 0x5e, 0xd1, 0x27, 0x3f, 0x08, 0x17, 0x60, + 0xa9, 0x8a, 0xb5, 0x8a, 0xae, 0xd5, 0x94, 0x0c, 0x6d, 0xe0, 0xb3, 0x66, 0xb3, 0xde, 0x3c, 0x56, + 0xb2, 0x28, 0x07, 0xf3, 0xb5, 0x56, 0x53, 0x53, 0xe6, 0xd0, 0x32, 0xe4, 0xab, 0x95, 0x66, 0x55, + 0x6b, 0x34, 0xb4, 0x9a, 0x32, 0x7f, 0x97, 0x00, 0x48, 0x9f, 0x33, 0x0a, 0xb0, 0x24, 0x9e, 0xf1, + 0x95, 0x37, 0xd0, 0x1a, 0x2c, 0x3f, 0xd2, 0xf0, 0x13, 0xe3, 0xac, 0xd9, 0xa8, 0x3f, 0xd4, 0x1a, + 0x4f, 0x94, 0x0c, 0x2a, 0x42, 0x2e, 0x69, 0x65, 0x69, 0xab, 0xdd, 0xea, 0x74, 0xea, 0x87, 0x0d, + 0x4a, 0x0c, 0xb0, 0x28, 0x34, 0xf3, 0x68, 0x15, 0x0a, 0x0c, 0x2a, 0x04, 0x0b, 0x07, 0xff, 0xc8, + 0xc2, 0x8a, 0x7c, 0x9c, 0x7a, 0x01, 0xfa, 0x6d, 0x06, 0xd6, 0xaf, 0x38, 0x13, 0xd0, 0x0f, 0xa6, + 0x5d, 0xcf, 0x5f, 0x7a, 0x6c, 0x6d, 0x3f, 0x78, 0x1d, 0x28, 0xdf, 0x7a, 0xea, 0x3b, 0x5f, 0xfd, + 0xe9, 0x6f, 0x3f, 0xcf, 0xde, 0x51, 0xb7, 0x27, 0xff, 0xfd, 0x22, 0x7c, 0x20, 0xca, 0x21, 0xf2, + 0x20, 0x73, 0x17, 0xfd, 0x2a, 0x03, 0x37, 0x5e, 0x92, 0x1c, 0xd0, 0x47, 0xd7, 0xc9, 0x00, 0x2f, + 0xcd, 0x65, 0xdb, 0x3b, 0x31, 0x5c, 0xfa, 0x27, 0x88, 0x71, 0x2c, 0xaa, 0x65, 0x36, 0xc0, 0x3d, + 0xf5, 0x6d, 0x69, 0x80, 0x3d, 0x8a, 0x7f, 0x60, 0x5e, 0xe2, 0x7d, 0x90, 0xb9, 0x7b, 0xf8, 0x55, + 0x06, 0xde, 0xb2, 0xbc, 0xc1, 0xab, 0xc7, 0x74, 0xb8, 0x9e, 0x5e, 0x95, 0x36, 0x0d, 0xc0, 0x76, + 0xe6, 0x69, 0x55, 0xa0, 0xfa, 0x1e, 0xbd, 0x4a, 0x97, 0xbd, 0xa0, 0xbf, 0xdf, 0x27, 0x2e, 0x0b, + 0xcf, 0x7d, 0xae, 0x32, 0x7d, 0x3b, 0x7c, 0xc9, 0x7f, 0x78, 0x7c, 0xc8, 0x05, 0xff, 0xca, 0x64, + 0xce, 0x17, 0x19, 0xe4, 0xfe, 0x7f, 0x02, 0x00, 0x00, 0xff, 0xff, 0xa5, 0xcd, 0x74, 0xeb, 0xe8, + 0x22, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/cloud/vision/v1p2beta1/text_annotation.pb.go b/vendor/google.golang.org/genproto/googleapis/cloud/vision/v1p2beta1/text_annotation.pb.go new file mode 100644 index 0000000..99c9960 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/cloud/vision/v1p2beta1/text_annotation.pb.go @@ -0,0 +1,798 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/cloud/vision/v1p2beta1/text_annotation.proto + +package vision // import "google.golang.org/genproto/googleapis/cloud/vision/v1p2beta1" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Enum to denote the type of break found. New line, space etc. +type TextAnnotation_DetectedBreak_BreakType int32 + +const ( + // Unknown break label type. + TextAnnotation_DetectedBreak_UNKNOWN TextAnnotation_DetectedBreak_BreakType = 0 + // Regular space. + TextAnnotation_DetectedBreak_SPACE TextAnnotation_DetectedBreak_BreakType = 1 + // Sure space (very wide). + TextAnnotation_DetectedBreak_SURE_SPACE TextAnnotation_DetectedBreak_BreakType = 2 + // Line-wrapping break. + TextAnnotation_DetectedBreak_EOL_SURE_SPACE TextAnnotation_DetectedBreak_BreakType = 3 + // End-line hyphen that is not present in text; does not co-occur with + // `SPACE`, `LEADER_SPACE`, or `LINE_BREAK`. + TextAnnotation_DetectedBreak_HYPHEN TextAnnotation_DetectedBreak_BreakType = 4 + // Line break that ends a paragraph. + TextAnnotation_DetectedBreak_LINE_BREAK TextAnnotation_DetectedBreak_BreakType = 5 +) + +var TextAnnotation_DetectedBreak_BreakType_name = map[int32]string{ + 0: "UNKNOWN", + 1: "SPACE", + 2: "SURE_SPACE", + 3: "EOL_SURE_SPACE", + 4: "HYPHEN", + 5: "LINE_BREAK", +} +var TextAnnotation_DetectedBreak_BreakType_value = map[string]int32{ + "UNKNOWN": 0, + "SPACE": 1, + "SURE_SPACE": 2, + "EOL_SURE_SPACE": 3, + "HYPHEN": 4, + "LINE_BREAK": 5, +} + +func (x TextAnnotation_DetectedBreak_BreakType) String() string { + return proto.EnumName(TextAnnotation_DetectedBreak_BreakType_name, int32(x)) +} +func (TextAnnotation_DetectedBreak_BreakType) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_text_annotation_233617d1b400f277, []int{0, 1, 0} +} + +// Type of a block (text, image etc) as identified by OCR. +type Block_BlockType int32 + +const ( + // Unknown block type. + Block_UNKNOWN Block_BlockType = 0 + // Regular text block. + Block_TEXT Block_BlockType = 1 + // Table block. + Block_TABLE Block_BlockType = 2 + // Image block. + Block_PICTURE Block_BlockType = 3 + // Horizontal/vertical line box. + Block_RULER Block_BlockType = 4 + // Barcode block. + Block_BARCODE Block_BlockType = 5 +) + +var Block_BlockType_name = map[int32]string{ + 0: "UNKNOWN", + 1: "TEXT", + 2: "TABLE", + 3: "PICTURE", + 4: "RULER", + 5: "BARCODE", +} +var Block_BlockType_value = map[string]int32{ + "UNKNOWN": 0, + "TEXT": 1, + "TABLE": 2, + "PICTURE": 3, + "RULER": 4, + "BARCODE": 5, +} + +func (x Block_BlockType) String() string { + return proto.EnumName(Block_BlockType_name, int32(x)) +} +func (Block_BlockType) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_text_annotation_233617d1b400f277, []int{2, 0} +} + +// TextAnnotation contains a structured representation of OCR extracted text. +// The hierarchy of an OCR extracted text structure is like this: +// TextAnnotation -> Page -> Block -> Paragraph -> Word -> Symbol +// Each structural component, starting from Page, may further have their own +// properties. Properties describe detected languages, breaks etc.. Please refer +// to the +// [TextAnnotation.TextProperty][google.cloud.vision.v1p2beta1.TextAnnotation.TextProperty] +// message definition below for more detail. +type TextAnnotation struct { + // List of pages detected by OCR. + Pages []*Page `protobuf:"bytes,1,rep,name=pages,proto3" json:"pages,omitempty"` + // UTF-8 text detected on the pages. + Text string `protobuf:"bytes,2,opt,name=text,proto3" json:"text,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *TextAnnotation) Reset() { *m = TextAnnotation{} } +func (m *TextAnnotation) String() string { return proto.CompactTextString(m) } +func (*TextAnnotation) ProtoMessage() {} +func (*TextAnnotation) Descriptor() ([]byte, []int) { + return fileDescriptor_text_annotation_233617d1b400f277, []int{0} +} +func (m *TextAnnotation) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_TextAnnotation.Unmarshal(m, b) +} +func (m *TextAnnotation) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_TextAnnotation.Marshal(b, m, deterministic) +} +func (dst *TextAnnotation) XXX_Merge(src proto.Message) { + xxx_messageInfo_TextAnnotation.Merge(dst, src) +} +func (m *TextAnnotation) XXX_Size() int { + return xxx_messageInfo_TextAnnotation.Size(m) +} +func (m *TextAnnotation) XXX_DiscardUnknown() { + xxx_messageInfo_TextAnnotation.DiscardUnknown(m) +} + +var xxx_messageInfo_TextAnnotation proto.InternalMessageInfo + +func (m *TextAnnotation) GetPages() []*Page { + if m != nil { + return m.Pages + } + return nil +} + +func (m *TextAnnotation) GetText() string { + if m != nil { + return m.Text + } + return "" +} + +// Detected language for a structural component. +type TextAnnotation_DetectedLanguage struct { + // The BCP-47 language code, such as "en-US" or "sr-Latn". For more + // information, see + // http://www.unicode.org/reports/tr35/#Unicode_locale_identifier. + LanguageCode string `protobuf:"bytes,1,opt,name=language_code,json=languageCode,proto3" json:"language_code,omitempty"` + // Confidence of detected language. Range [0, 1]. + Confidence float32 `protobuf:"fixed32,2,opt,name=confidence,proto3" json:"confidence,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *TextAnnotation_DetectedLanguage) Reset() { *m = TextAnnotation_DetectedLanguage{} } +func (m *TextAnnotation_DetectedLanguage) String() string { return proto.CompactTextString(m) } +func (*TextAnnotation_DetectedLanguage) ProtoMessage() {} +func (*TextAnnotation_DetectedLanguage) Descriptor() ([]byte, []int) { + return fileDescriptor_text_annotation_233617d1b400f277, []int{0, 0} +} +func (m *TextAnnotation_DetectedLanguage) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_TextAnnotation_DetectedLanguage.Unmarshal(m, b) +} +func (m *TextAnnotation_DetectedLanguage) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_TextAnnotation_DetectedLanguage.Marshal(b, m, deterministic) +} +func (dst *TextAnnotation_DetectedLanguage) XXX_Merge(src proto.Message) { + xxx_messageInfo_TextAnnotation_DetectedLanguage.Merge(dst, src) +} +func (m *TextAnnotation_DetectedLanguage) XXX_Size() int { + return xxx_messageInfo_TextAnnotation_DetectedLanguage.Size(m) +} +func (m *TextAnnotation_DetectedLanguage) XXX_DiscardUnknown() { + xxx_messageInfo_TextAnnotation_DetectedLanguage.DiscardUnknown(m) +} + +var xxx_messageInfo_TextAnnotation_DetectedLanguage proto.InternalMessageInfo + +func (m *TextAnnotation_DetectedLanguage) GetLanguageCode() string { + if m != nil { + return m.LanguageCode + } + return "" +} + +func (m *TextAnnotation_DetectedLanguage) GetConfidence() float32 { + if m != nil { + return m.Confidence + } + return 0 +} + +// Detected start or end of a structural component. +type TextAnnotation_DetectedBreak struct { + // Detected break type. + Type TextAnnotation_DetectedBreak_BreakType `protobuf:"varint,1,opt,name=type,proto3,enum=google.cloud.vision.v1p2beta1.TextAnnotation_DetectedBreak_BreakType" json:"type,omitempty"` + // True if break prepends the element. + IsPrefix bool `protobuf:"varint,2,opt,name=is_prefix,json=isPrefix,proto3" json:"is_prefix,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *TextAnnotation_DetectedBreak) Reset() { *m = TextAnnotation_DetectedBreak{} } +func (m *TextAnnotation_DetectedBreak) String() string { return proto.CompactTextString(m) } +func (*TextAnnotation_DetectedBreak) ProtoMessage() {} +func (*TextAnnotation_DetectedBreak) Descriptor() ([]byte, []int) { + return fileDescriptor_text_annotation_233617d1b400f277, []int{0, 1} +} +func (m *TextAnnotation_DetectedBreak) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_TextAnnotation_DetectedBreak.Unmarshal(m, b) +} +func (m *TextAnnotation_DetectedBreak) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_TextAnnotation_DetectedBreak.Marshal(b, m, deterministic) +} +func (dst *TextAnnotation_DetectedBreak) XXX_Merge(src proto.Message) { + xxx_messageInfo_TextAnnotation_DetectedBreak.Merge(dst, src) +} +func (m *TextAnnotation_DetectedBreak) XXX_Size() int { + return xxx_messageInfo_TextAnnotation_DetectedBreak.Size(m) +} +func (m *TextAnnotation_DetectedBreak) XXX_DiscardUnknown() { + xxx_messageInfo_TextAnnotation_DetectedBreak.DiscardUnknown(m) +} + +var xxx_messageInfo_TextAnnotation_DetectedBreak proto.InternalMessageInfo + +func (m *TextAnnotation_DetectedBreak) GetType() TextAnnotation_DetectedBreak_BreakType { + if m != nil { + return m.Type + } + return TextAnnotation_DetectedBreak_UNKNOWN +} + +func (m *TextAnnotation_DetectedBreak) GetIsPrefix() bool { + if m != nil { + return m.IsPrefix + } + return false +} + +// Additional information detected on the structural component. +type TextAnnotation_TextProperty struct { + // A list of detected languages together with confidence. + DetectedLanguages []*TextAnnotation_DetectedLanguage `protobuf:"bytes,1,rep,name=detected_languages,json=detectedLanguages,proto3" json:"detected_languages,omitempty"` + // Detected start or end of a text segment. + DetectedBreak *TextAnnotation_DetectedBreak `protobuf:"bytes,2,opt,name=detected_break,json=detectedBreak,proto3" json:"detected_break,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *TextAnnotation_TextProperty) Reset() { *m = TextAnnotation_TextProperty{} } +func (m *TextAnnotation_TextProperty) String() string { return proto.CompactTextString(m) } +func (*TextAnnotation_TextProperty) ProtoMessage() {} +func (*TextAnnotation_TextProperty) Descriptor() ([]byte, []int) { + return fileDescriptor_text_annotation_233617d1b400f277, []int{0, 2} +} +func (m *TextAnnotation_TextProperty) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_TextAnnotation_TextProperty.Unmarshal(m, b) +} +func (m *TextAnnotation_TextProperty) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_TextAnnotation_TextProperty.Marshal(b, m, deterministic) +} +func (dst *TextAnnotation_TextProperty) XXX_Merge(src proto.Message) { + xxx_messageInfo_TextAnnotation_TextProperty.Merge(dst, src) +} +func (m *TextAnnotation_TextProperty) XXX_Size() int { + return xxx_messageInfo_TextAnnotation_TextProperty.Size(m) +} +func (m *TextAnnotation_TextProperty) XXX_DiscardUnknown() { + xxx_messageInfo_TextAnnotation_TextProperty.DiscardUnknown(m) +} + +var xxx_messageInfo_TextAnnotation_TextProperty proto.InternalMessageInfo + +func (m *TextAnnotation_TextProperty) GetDetectedLanguages() []*TextAnnotation_DetectedLanguage { + if m != nil { + return m.DetectedLanguages + } + return nil +} + +func (m *TextAnnotation_TextProperty) GetDetectedBreak() *TextAnnotation_DetectedBreak { + if m != nil { + return m.DetectedBreak + } + return nil +} + +// Detected page from OCR. +type Page struct { + // Additional information detected on the page. + Property *TextAnnotation_TextProperty `protobuf:"bytes,1,opt,name=property,proto3" json:"property,omitempty"` + // Page width. For PDFs the unit is points. For images (including + // TIFFs) the unit is pixels. + Width int32 `protobuf:"varint,2,opt,name=width,proto3" json:"width,omitempty"` + // Page height. For PDFs the unit is points. For images (including + // TIFFs) the unit is pixels. + Height int32 `protobuf:"varint,3,opt,name=height,proto3" json:"height,omitempty"` + // List of blocks of text, images etc on this page. + Blocks []*Block `protobuf:"bytes,4,rep,name=blocks,proto3" json:"blocks,omitempty"` + // Confidence of the OCR results on the page. Range [0, 1]. + Confidence float32 `protobuf:"fixed32,5,opt,name=confidence,proto3" json:"confidence,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Page) Reset() { *m = Page{} } +func (m *Page) String() string { return proto.CompactTextString(m) } +func (*Page) ProtoMessage() {} +func (*Page) Descriptor() ([]byte, []int) { + return fileDescriptor_text_annotation_233617d1b400f277, []int{1} +} +func (m *Page) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Page.Unmarshal(m, b) +} +func (m *Page) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Page.Marshal(b, m, deterministic) +} +func (dst *Page) XXX_Merge(src proto.Message) { + xxx_messageInfo_Page.Merge(dst, src) +} +func (m *Page) XXX_Size() int { + return xxx_messageInfo_Page.Size(m) +} +func (m *Page) XXX_DiscardUnknown() { + xxx_messageInfo_Page.DiscardUnknown(m) +} + +var xxx_messageInfo_Page proto.InternalMessageInfo + +func (m *Page) GetProperty() *TextAnnotation_TextProperty { + if m != nil { + return m.Property + } + return nil +} + +func (m *Page) GetWidth() int32 { + if m != nil { + return m.Width + } + return 0 +} + +func (m *Page) GetHeight() int32 { + if m != nil { + return m.Height + } + return 0 +} + +func (m *Page) GetBlocks() []*Block { + if m != nil { + return m.Blocks + } + return nil +} + +func (m *Page) GetConfidence() float32 { + if m != nil { + return m.Confidence + } + return 0 +} + +// Logical element on the page. +type Block struct { + // Additional information detected for the block. + Property *TextAnnotation_TextProperty `protobuf:"bytes,1,opt,name=property,proto3" json:"property,omitempty"` + // The bounding box for the block. + // The vertices are in the order of top-left, top-right, bottom-right, + // bottom-left. When a rotation of the bounding box is detected the rotation + // is represented as around the top-left corner as defined when the text is + // read in the 'natural' orientation. + // For example: + // + // * when the text is horizontal it might look like: + // + // 0----1 + // | | + // 3----2 + // + // * when it's rotated 180 degrees around the top-left corner it becomes: + // + // 2----3 + // | | + // 1----0 + // + // and the vertice order will still be (0, 1, 2, 3). + BoundingBox *BoundingPoly `protobuf:"bytes,2,opt,name=bounding_box,json=boundingBox,proto3" json:"bounding_box,omitempty"` + // List of paragraphs in this block (if this blocks is of type text). + Paragraphs []*Paragraph `protobuf:"bytes,3,rep,name=paragraphs,proto3" json:"paragraphs,omitempty"` + // Detected block type (text, image etc) for this block. + BlockType Block_BlockType `protobuf:"varint,4,opt,name=block_type,json=blockType,proto3,enum=google.cloud.vision.v1p2beta1.Block_BlockType" json:"block_type,omitempty"` + // Confidence of the OCR results on the block. Range [0, 1]. + Confidence float32 `protobuf:"fixed32,5,opt,name=confidence,proto3" json:"confidence,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Block) Reset() { *m = Block{} } +func (m *Block) String() string { return proto.CompactTextString(m) } +func (*Block) ProtoMessage() {} +func (*Block) Descriptor() ([]byte, []int) { + return fileDescriptor_text_annotation_233617d1b400f277, []int{2} +} +func (m *Block) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Block.Unmarshal(m, b) +} +func (m *Block) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Block.Marshal(b, m, deterministic) +} +func (dst *Block) XXX_Merge(src proto.Message) { + xxx_messageInfo_Block.Merge(dst, src) +} +func (m *Block) XXX_Size() int { + return xxx_messageInfo_Block.Size(m) +} +func (m *Block) XXX_DiscardUnknown() { + xxx_messageInfo_Block.DiscardUnknown(m) +} + +var xxx_messageInfo_Block proto.InternalMessageInfo + +func (m *Block) GetProperty() *TextAnnotation_TextProperty { + if m != nil { + return m.Property + } + return nil +} + +func (m *Block) GetBoundingBox() *BoundingPoly { + if m != nil { + return m.BoundingBox + } + return nil +} + +func (m *Block) GetParagraphs() []*Paragraph { + if m != nil { + return m.Paragraphs + } + return nil +} + +func (m *Block) GetBlockType() Block_BlockType { + if m != nil { + return m.BlockType + } + return Block_UNKNOWN +} + +func (m *Block) GetConfidence() float32 { + if m != nil { + return m.Confidence + } + return 0 +} + +// Structural unit of text representing a number of words in certain order. +type Paragraph struct { + // Additional information detected for the paragraph. + Property *TextAnnotation_TextProperty `protobuf:"bytes,1,opt,name=property,proto3" json:"property,omitempty"` + // The bounding box for the paragraph. + // The vertices are in the order of top-left, top-right, bottom-right, + // bottom-left. When a rotation of the bounding box is detected the rotation + // is represented as around the top-left corner as defined when the text is + // read in the 'natural' orientation. + // For example: + // * when the text is horizontal it might look like: + // 0----1 + // | | + // 3----2 + // * when it's rotated 180 degrees around the top-left corner it becomes: + // 2----3 + // | | + // 1----0 + // and the vertice order will still be (0, 1, 2, 3). + BoundingBox *BoundingPoly `protobuf:"bytes,2,opt,name=bounding_box,json=boundingBox,proto3" json:"bounding_box,omitempty"` + // List of words in this paragraph. + Words []*Word `protobuf:"bytes,3,rep,name=words,proto3" json:"words,omitempty"` + // Confidence of the OCR results for the paragraph. Range [0, 1]. + Confidence float32 `protobuf:"fixed32,4,opt,name=confidence,proto3" json:"confidence,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Paragraph) Reset() { *m = Paragraph{} } +func (m *Paragraph) String() string { return proto.CompactTextString(m) } +func (*Paragraph) ProtoMessage() {} +func (*Paragraph) Descriptor() ([]byte, []int) { + return fileDescriptor_text_annotation_233617d1b400f277, []int{3} +} +func (m *Paragraph) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Paragraph.Unmarshal(m, b) +} +func (m *Paragraph) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Paragraph.Marshal(b, m, deterministic) +} +func (dst *Paragraph) XXX_Merge(src proto.Message) { + xxx_messageInfo_Paragraph.Merge(dst, src) +} +func (m *Paragraph) XXX_Size() int { + return xxx_messageInfo_Paragraph.Size(m) +} +func (m *Paragraph) XXX_DiscardUnknown() { + xxx_messageInfo_Paragraph.DiscardUnknown(m) +} + +var xxx_messageInfo_Paragraph proto.InternalMessageInfo + +func (m *Paragraph) GetProperty() *TextAnnotation_TextProperty { + if m != nil { + return m.Property + } + return nil +} + +func (m *Paragraph) GetBoundingBox() *BoundingPoly { + if m != nil { + return m.BoundingBox + } + return nil +} + +func (m *Paragraph) GetWords() []*Word { + if m != nil { + return m.Words + } + return nil +} + +func (m *Paragraph) GetConfidence() float32 { + if m != nil { + return m.Confidence + } + return 0 +} + +// A word representation. +type Word struct { + // Additional information detected for the word. + Property *TextAnnotation_TextProperty `protobuf:"bytes,1,opt,name=property,proto3" json:"property,omitempty"` + // The bounding box for the word. + // The vertices are in the order of top-left, top-right, bottom-right, + // bottom-left. When a rotation of the bounding box is detected the rotation + // is represented as around the top-left corner as defined when the text is + // read in the 'natural' orientation. + // For example: + // * when the text is horizontal it might look like: + // 0----1 + // | | + // 3----2 + // * when it's rotated 180 degrees around the top-left corner it becomes: + // 2----3 + // | | + // 1----0 + // and the vertice order will still be (0, 1, 2, 3). + BoundingBox *BoundingPoly `protobuf:"bytes,2,opt,name=bounding_box,json=boundingBox,proto3" json:"bounding_box,omitempty"` + // List of symbols in the word. + // The order of the symbols follows the natural reading order. + Symbols []*Symbol `protobuf:"bytes,3,rep,name=symbols,proto3" json:"symbols,omitempty"` + // Confidence of the OCR results for the word. Range [0, 1]. + Confidence float32 `protobuf:"fixed32,4,opt,name=confidence,proto3" json:"confidence,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Word) Reset() { *m = Word{} } +func (m *Word) String() string { return proto.CompactTextString(m) } +func (*Word) ProtoMessage() {} +func (*Word) Descriptor() ([]byte, []int) { + return fileDescriptor_text_annotation_233617d1b400f277, []int{4} +} +func (m *Word) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Word.Unmarshal(m, b) +} +func (m *Word) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Word.Marshal(b, m, deterministic) +} +func (dst *Word) XXX_Merge(src proto.Message) { + xxx_messageInfo_Word.Merge(dst, src) +} +func (m *Word) XXX_Size() int { + return xxx_messageInfo_Word.Size(m) +} +func (m *Word) XXX_DiscardUnknown() { + xxx_messageInfo_Word.DiscardUnknown(m) +} + +var xxx_messageInfo_Word proto.InternalMessageInfo + +func (m *Word) GetProperty() *TextAnnotation_TextProperty { + if m != nil { + return m.Property + } + return nil +} + +func (m *Word) GetBoundingBox() *BoundingPoly { + if m != nil { + return m.BoundingBox + } + return nil +} + +func (m *Word) GetSymbols() []*Symbol { + if m != nil { + return m.Symbols + } + return nil +} + +func (m *Word) GetConfidence() float32 { + if m != nil { + return m.Confidence + } + return 0 +} + +// A single symbol representation. +type Symbol struct { + // Additional information detected for the symbol. + Property *TextAnnotation_TextProperty `protobuf:"bytes,1,opt,name=property,proto3" json:"property,omitempty"` + // The bounding box for the symbol. + // The vertices are in the order of top-left, top-right, bottom-right, + // bottom-left. When a rotation of the bounding box is detected the rotation + // is represented as around the top-left corner as defined when the text is + // read in the 'natural' orientation. + // For example: + // * when the text is horizontal it might look like: + // 0----1 + // | | + // 3----2 + // * when it's rotated 180 degrees around the top-left corner it becomes: + // 2----3 + // | | + // 1----0 + // and the vertice order will still be (0, 1, 2, 3). + BoundingBox *BoundingPoly `protobuf:"bytes,2,opt,name=bounding_box,json=boundingBox,proto3" json:"bounding_box,omitempty"` + // The actual UTF-8 representation of the symbol. + Text string `protobuf:"bytes,3,opt,name=text,proto3" json:"text,omitempty"` + // Confidence of the OCR results for the symbol. Range [0, 1]. + Confidence float32 `protobuf:"fixed32,4,opt,name=confidence,proto3" json:"confidence,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Symbol) Reset() { *m = Symbol{} } +func (m *Symbol) String() string { return proto.CompactTextString(m) } +func (*Symbol) ProtoMessage() {} +func (*Symbol) Descriptor() ([]byte, []int) { + return fileDescriptor_text_annotation_233617d1b400f277, []int{5} +} +func (m *Symbol) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Symbol.Unmarshal(m, b) +} +func (m *Symbol) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Symbol.Marshal(b, m, deterministic) +} +func (dst *Symbol) XXX_Merge(src proto.Message) { + xxx_messageInfo_Symbol.Merge(dst, src) +} +func (m *Symbol) XXX_Size() int { + return xxx_messageInfo_Symbol.Size(m) +} +func (m *Symbol) XXX_DiscardUnknown() { + xxx_messageInfo_Symbol.DiscardUnknown(m) +} + +var xxx_messageInfo_Symbol proto.InternalMessageInfo + +func (m *Symbol) GetProperty() *TextAnnotation_TextProperty { + if m != nil { + return m.Property + } + return nil +} + +func (m *Symbol) GetBoundingBox() *BoundingPoly { + if m != nil { + return m.BoundingBox + } + return nil +} + +func (m *Symbol) GetText() string { + if m != nil { + return m.Text + } + return "" +} + +func (m *Symbol) GetConfidence() float32 { + if m != nil { + return m.Confidence + } + return 0 +} + +func init() { + proto.RegisterType((*TextAnnotation)(nil), "google.cloud.vision.v1p2beta1.TextAnnotation") + proto.RegisterType((*TextAnnotation_DetectedLanguage)(nil), "google.cloud.vision.v1p2beta1.TextAnnotation.DetectedLanguage") + proto.RegisterType((*TextAnnotation_DetectedBreak)(nil), "google.cloud.vision.v1p2beta1.TextAnnotation.DetectedBreak") + proto.RegisterType((*TextAnnotation_TextProperty)(nil), "google.cloud.vision.v1p2beta1.TextAnnotation.TextProperty") + proto.RegisterType((*Page)(nil), "google.cloud.vision.v1p2beta1.Page") + proto.RegisterType((*Block)(nil), "google.cloud.vision.v1p2beta1.Block") + proto.RegisterType((*Paragraph)(nil), "google.cloud.vision.v1p2beta1.Paragraph") + proto.RegisterType((*Word)(nil), "google.cloud.vision.v1p2beta1.Word") + proto.RegisterType((*Symbol)(nil), "google.cloud.vision.v1p2beta1.Symbol") + proto.RegisterEnum("google.cloud.vision.v1p2beta1.TextAnnotation_DetectedBreak_BreakType", TextAnnotation_DetectedBreak_BreakType_name, TextAnnotation_DetectedBreak_BreakType_value) + proto.RegisterEnum("google.cloud.vision.v1p2beta1.Block_BlockType", Block_BlockType_name, Block_BlockType_value) +} + +func init() { + proto.RegisterFile("google/cloud/vision/v1p2beta1/text_annotation.proto", fileDescriptor_text_annotation_233617d1b400f277) +} + +var fileDescriptor_text_annotation_233617d1b400f277 = []byte{ + // 774 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xd4, 0x56, 0x4f, 0x6f, 0xd3, 0x48, + 0x14, 0x5f, 0x27, 0x76, 0x1a, 0xbf, 0xb4, 0x91, 0x77, 0x76, 0xb5, 0x8a, 0xb2, 0xbb, 0xa8, 0xa4, + 0x20, 0x55, 0x02, 0x39, 0x6a, 0x7a, 0x2a, 0x45, 0xa0, 0x38, 0xb5, 0xd4, 0xaa, 0x21, 0xb5, 0xa6, + 0x09, 0xa5, 0x5c, 0x2c, 0xff, 0x99, 0x3a, 0x56, 0x13, 0x8f, 0x65, 0xbb, 0x6d, 0x72, 0xe5, 0x8a, + 0x04, 0x5f, 0x88, 0x2f, 0x83, 0xc4, 0x09, 0xf1, 0x01, 0x38, 0x22, 0x8f, 0xed, 0x34, 0x09, 0xa2, + 0xe6, 0x8f, 0x38, 0xf4, 0x12, 0xcd, 0x7b, 0x79, 0xbf, 0x37, 0xef, 0xf7, 0x7b, 0xf3, 0x3c, 0x03, + 0xdb, 0x0e, 0xa5, 0xce, 0x88, 0x34, 0xad, 0x11, 0xbd, 0xb0, 0x9b, 0x97, 0x6e, 0xe8, 0x52, 0xaf, + 0x79, 0xb9, 0xe5, 0xb7, 0x4c, 0x12, 0x19, 0x5b, 0xcd, 0x88, 0x4c, 0x22, 0xdd, 0xf0, 0x3c, 0x1a, + 0x19, 0x91, 0x4b, 0x3d, 0xd9, 0x0f, 0x68, 0x44, 0xd1, 0xff, 0x09, 0x48, 0x66, 0x20, 0x39, 0x01, + 0xc9, 0x33, 0x50, 0xfd, 0xbf, 0x34, 0xa7, 0xe1, 0xbb, 0xcd, 0x6b, 0x6c, 0x98, 0x80, 0xeb, 0x0f, + 0x6f, 0xde, 0xd1, 0x21, 0x74, 0x4c, 0xa2, 0x60, 0x9a, 0x44, 0x37, 0x5e, 0x0b, 0x50, 0xed, 0x93, + 0x49, 0xd4, 0x9e, 0xe5, 0x41, 0x3b, 0x20, 0xf8, 0x86, 0x43, 0xc2, 0x1a, 0xb7, 0x5e, 0xdc, 0xac, + 0xb4, 0x36, 0xe4, 0x1b, 0xab, 0x91, 0x35, 0xc3, 0x21, 0x38, 0x41, 0x20, 0x04, 0x7c, 0xcc, 0xa8, + 0x56, 0x58, 0xe7, 0x36, 0x45, 0xcc, 0xd6, 0xf5, 0x13, 0x90, 0xf6, 0x48, 0x44, 0xac, 0x88, 0xd8, + 0x5d, 0xc3, 0x73, 0x2e, 0x0c, 0x87, 0xa0, 0x0d, 0x58, 0x1b, 0xa5, 0x6b, 0xdd, 0xa2, 0x36, 0xa9, + 0x71, 0x0c, 0xb0, 0x9a, 0x39, 0x3b, 0xd4, 0x26, 0xe8, 0x0e, 0x80, 0x45, 0xbd, 0x33, 0xd7, 0x26, + 0x9e, 0x45, 0x58, 0xca, 0x02, 0x9e, 0xf3, 0xd4, 0x3f, 0x71, 0xb0, 0x96, 0x65, 0x56, 0x02, 0x62, + 0x9c, 0xa3, 0x53, 0xe0, 0xa3, 0xa9, 0x9f, 0x64, 0xab, 0xb6, 0xd4, 0x9c, 0xc2, 0x17, 0x69, 0xcb, + 0x0b, 0xa9, 0x64, 0xf6, 0xdb, 0x9f, 0xfa, 0x04, 0xb3, 0x94, 0xe8, 0x5f, 0x10, 0xdd, 0x50, 0xf7, + 0x03, 0x72, 0xe6, 0x4e, 0x58, 0x2d, 0x65, 0x5c, 0x76, 0x43, 0x8d, 0xd9, 0x0d, 0x0b, 0xc4, 0x59, + 0x3c, 0xaa, 0xc0, 0xca, 0xa0, 0x77, 0xd8, 0x3b, 0x3a, 0xe9, 0x49, 0x7f, 0x20, 0x11, 0x84, 0x63, + 0xad, 0xdd, 0x51, 0x25, 0x0e, 0x55, 0x01, 0x8e, 0x07, 0x58, 0xd5, 0x13, 0xbb, 0x80, 0x10, 0x54, + 0xd5, 0xa3, 0xae, 0x3e, 0xe7, 0x2b, 0x22, 0x80, 0xd2, 0xfe, 0xa9, 0xb6, 0xaf, 0xf6, 0x24, 0x3e, + 0x8e, 0xef, 0x1e, 0xf4, 0x54, 0x5d, 0xc1, 0x6a, 0xfb, 0x50, 0x12, 0xea, 0xef, 0x39, 0x58, 0x8d, + 0x4b, 0xd6, 0x02, 0xea, 0x93, 0x20, 0x9a, 0xa2, 0x31, 0x20, 0x3b, 0xad, 0x59, 0xcf, 0x84, 0xcb, + 0x9a, 0xf6, 0xe4, 0xe7, 0xb8, 0x67, 0x0d, 0xc2, 0x7f, 0xda, 0x4b, 0x9e, 0x10, 0x99, 0x50, 0x9d, + 0x6d, 0x67, 0xc6, 0x6c, 0x99, 0x0c, 0x95, 0xd6, 0xee, 0x2f, 0xc8, 0x8c, 0xd7, 0xec, 0x79, 0xb3, + 0xf1, 0x91, 0x03, 0x3e, 0x3e, 0x4f, 0xe8, 0x39, 0x94, 0xfd, 0x94, 0x27, 0xeb, 0x66, 0xa5, 0xf5, + 0xe8, 0xc7, 0xb6, 0x99, 0x57, 0x0a, 0xcf, 0x72, 0xa1, 0xbf, 0x41, 0xb8, 0x72, 0xed, 0x68, 0xc8, + 0x6a, 0x17, 0x70, 0x62, 0xa0, 0x7f, 0xa0, 0x34, 0x24, 0xae, 0x33, 0x8c, 0x6a, 0x45, 0xe6, 0x4e, + 0x2d, 0xf4, 0x18, 0x4a, 0xe6, 0x88, 0x5a, 0xe7, 0x61, 0x8d, 0x67, 0xaa, 0xde, 0xcb, 0xa9, 0x41, + 0x89, 0x83, 0x71, 0x8a, 0x59, 0x3a, 0xbf, 0xc2, 0xf2, 0xf9, 0x6d, 0xbc, 0x2b, 0x82, 0xc0, 0x10, + 0xbf, 0x8d, 0x6d, 0x0f, 0x56, 0x4d, 0x7a, 0xe1, 0xd9, 0xae, 0xe7, 0xe8, 0x26, 0x9d, 0xa4, 0x0d, + 0x7b, 0x90, 0xc7, 0x22, 0x85, 0x68, 0x74, 0x34, 0xc5, 0x95, 0x2c, 0x81, 0x42, 0x27, 0x68, 0x1f, + 0xc0, 0x37, 0x02, 0xc3, 0x09, 0x0c, 0x7f, 0x18, 0xd6, 0x8a, 0x4c, 0x93, 0xcd, 0xdc, 0xcf, 0x43, + 0x0a, 0xc0, 0x73, 0x58, 0xf4, 0x0c, 0x80, 0xa9, 0xa4, 0xb3, 0x79, 0xe5, 0xd9, 0xbc, 0xca, 0xdf, + 0xa3, 0x6e, 0xf2, 0xcb, 0x06, 0x53, 0x34, 0xb3, 0x65, 0xae, 0xd4, 0x18, 0xc4, 0x19, 0x6e, 0x71, + 0x40, 0xcb, 0xc0, 0xf7, 0xd5, 0x17, 0x7d, 0x89, 0x8b, 0x47, 0xb5, 0xdf, 0x56, 0xba, 0xf1, 0x68, + 0x56, 0x60, 0x45, 0x3b, 0xe8, 0xf4, 0x07, 0x38, 0x9e, 0x49, 0x11, 0x04, 0x3c, 0xe8, 0xaa, 0x58, + 0xe2, 0x63, 0xbf, 0xd2, 0xc6, 0x9d, 0xa3, 0x3d, 0x55, 0x12, 0x1a, 0x6f, 0x0a, 0x20, 0xce, 0xc8, + 0xdd, 0x9a, 0x16, 0xee, 0x80, 0x70, 0x45, 0x03, 0x3b, 0xeb, 0x5e, 0xde, 0xc7, 0xfd, 0x84, 0x06, + 0x36, 0x4e, 0x10, 0x4b, 0x22, 0xf3, 0x5f, 0x89, 0xfc, 0xb6, 0x00, 0x7c, 0x1c, 0x7f, 0x6b, 0xb4, + 0x78, 0x0a, 0x2b, 0xe1, 0x74, 0x6c, 0xd2, 0x51, 0xa6, 0xc6, 0xfd, 0x9c, 0x54, 0xc7, 0x2c, 0x1a, + 0x67, 0xa8, 0x5c, 0x45, 0x3e, 0x70, 0x50, 0x4a, 0x30, 0xb7, 0x46, 0x93, 0xec, 0x06, 0x2f, 0x5e, + 0xdf, 0xe0, 0x79, 0x34, 0x95, 0x57, 0x1c, 0xdc, 0xb5, 0xe8, 0xf8, 0xe6, 0x3d, 0x95, 0xbf, 0x16, + 0x09, 0x69, 0xf1, 0xf3, 0x43, 0xe3, 0x5e, 0x76, 0x52, 0x94, 0x43, 0xe3, 0x3b, 0x4c, 0xa6, 0x81, + 0xd3, 0x74, 0x88, 0xc7, 0x1e, 0x27, 0xcd, 0xe4, 0x2f, 0xc3, 0x77, 0xc3, 0x6f, 0xbc, 0x66, 0x76, + 0x13, 0xc7, 0x67, 0x8e, 0x33, 0x4b, 0x0c, 0xb2, 0xfd, 0x25, 0x00, 0x00, 0xff, 0xff, 0xce, 0x91, + 0x71, 0x97, 0x71, 0x09, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/cloud/vision/v1p2beta1/web_detection.pb.go b/vendor/google.golang.org/genproto/googleapis/cloud/vision/v1p2beta1/web_detection.pb.go new file mode 100644 index 0000000..15a9f14 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/cloud/vision/v1p2beta1/web_detection.pb.go @@ -0,0 +1,395 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/cloud/vision/v1p2beta1/web_detection.proto + +package vision // import "google.golang.org/genproto/googleapis/cloud/vision/v1p2beta1" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Relevant information for the image from the Internet. +type WebDetection struct { + // Deduced entities from similar images on the Internet. + WebEntities []*WebDetection_WebEntity `protobuf:"bytes,1,rep,name=web_entities,json=webEntities,proto3" json:"web_entities,omitempty"` + // Fully matching images from the Internet. + // Can include resized copies of the query image. + FullMatchingImages []*WebDetection_WebImage `protobuf:"bytes,2,rep,name=full_matching_images,json=fullMatchingImages,proto3" json:"full_matching_images,omitempty"` + // Partial matching images from the Internet. + // Those images are similar enough to share some key-point features. For + // example an original image will likely have partial matching for its crops. + PartialMatchingImages []*WebDetection_WebImage `protobuf:"bytes,3,rep,name=partial_matching_images,json=partialMatchingImages,proto3" json:"partial_matching_images,omitempty"` + // Web pages containing the matching images from the Internet. + PagesWithMatchingImages []*WebDetection_WebPage `protobuf:"bytes,4,rep,name=pages_with_matching_images,json=pagesWithMatchingImages,proto3" json:"pages_with_matching_images,omitempty"` + // The visually similar image results. + VisuallySimilarImages []*WebDetection_WebImage `protobuf:"bytes,6,rep,name=visually_similar_images,json=visuallySimilarImages,proto3" json:"visually_similar_images,omitempty"` + // Best guess text labels for the request image. + BestGuessLabels []*WebDetection_WebLabel `protobuf:"bytes,8,rep,name=best_guess_labels,json=bestGuessLabels,proto3" json:"best_guess_labels,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *WebDetection) Reset() { *m = WebDetection{} } +func (m *WebDetection) String() string { return proto.CompactTextString(m) } +func (*WebDetection) ProtoMessage() {} +func (*WebDetection) Descriptor() ([]byte, []int) { + return fileDescriptor_web_detection_1ca040388105f0cb, []int{0} +} +func (m *WebDetection) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_WebDetection.Unmarshal(m, b) +} +func (m *WebDetection) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_WebDetection.Marshal(b, m, deterministic) +} +func (dst *WebDetection) XXX_Merge(src proto.Message) { + xxx_messageInfo_WebDetection.Merge(dst, src) +} +func (m *WebDetection) XXX_Size() int { + return xxx_messageInfo_WebDetection.Size(m) +} +func (m *WebDetection) XXX_DiscardUnknown() { + xxx_messageInfo_WebDetection.DiscardUnknown(m) +} + +var xxx_messageInfo_WebDetection proto.InternalMessageInfo + +func (m *WebDetection) GetWebEntities() []*WebDetection_WebEntity { + if m != nil { + return m.WebEntities + } + return nil +} + +func (m *WebDetection) GetFullMatchingImages() []*WebDetection_WebImage { + if m != nil { + return m.FullMatchingImages + } + return nil +} + +func (m *WebDetection) GetPartialMatchingImages() []*WebDetection_WebImage { + if m != nil { + return m.PartialMatchingImages + } + return nil +} + +func (m *WebDetection) GetPagesWithMatchingImages() []*WebDetection_WebPage { + if m != nil { + return m.PagesWithMatchingImages + } + return nil +} + +func (m *WebDetection) GetVisuallySimilarImages() []*WebDetection_WebImage { + if m != nil { + return m.VisuallySimilarImages + } + return nil +} + +func (m *WebDetection) GetBestGuessLabels() []*WebDetection_WebLabel { + if m != nil { + return m.BestGuessLabels + } + return nil +} + +// Entity deduced from similar images on the Internet. +type WebDetection_WebEntity struct { + // Opaque entity ID. + EntityId string `protobuf:"bytes,1,opt,name=entity_id,json=entityId,proto3" json:"entity_id,omitempty"` + // Overall relevancy score for the entity. + // Not normalized and not comparable across different image queries. + Score float32 `protobuf:"fixed32,2,opt,name=score,proto3" json:"score,omitempty"` + // Canonical description of the entity, in English. + Description string `protobuf:"bytes,3,opt,name=description,proto3" json:"description,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *WebDetection_WebEntity) Reset() { *m = WebDetection_WebEntity{} } +func (m *WebDetection_WebEntity) String() string { return proto.CompactTextString(m) } +func (*WebDetection_WebEntity) ProtoMessage() {} +func (*WebDetection_WebEntity) Descriptor() ([]byte, []int) { + return fileDescriptor_web_detection_1ca040388105f0cb, []int{0, 0} +} +func (m *WebDetection_WebEntity) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_WebDetection_WebEntity.Unmarshal(m, b) +} +func (m *WebDetection_WebEntity) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_WebDetection_WebEntity.Marshal(b, m, deterministic) +} +func (dst *WebDetection_WebEntity) XXX_Merge(src proto.Message) { + xxx_messageInfo_WebDetection_WebEntity.Merge(dst, src) +} +func (m *WebDetection_WebEntity) XXX_Size() int { + return xxx_messageInfo_WebDetection_WebEntity.Size(m) +} +func (m *WebDetection_WebEntity) XXX_DiscardUnknown() { + xxx_messageInfo_WebDetection_WebEntity.DiscardUnknown(m) +} + +var xxx_messageInfo_WebDetection_WebEntity proto.InternalMessageInfo + +func (m *WebDetection_WebEntity) GetEntityId() string { + if m != nil { + return m.EntityId + } + return "" +} + +func (m *WebDetection_WebEntity) GetScore() float32 { + if m != nil { + return m.Score + } + return 0 +} + +func (m *WebDetection_WebEntity) GetDescription() string { + if m != nil { + return m.Description + } + return "" +} + +// Metadata for online images. +type WebDetection_WebImage struct { + // The result image URL. + Url string `protobuf:"bytes,1,opt,name=url,proto3" json:"url,omitempty"` + // (Deprecated) Overall relevancy score for the image. + Score float32 `protobuf:"fixed32,2,opt,name=score,proto3" json:"score,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *WebDetection_WebImage) Reset() { *m = WebDetection_WebImage{} } +func (m *WebDetection_WebImage) String() string { return proto.CompactTextString(m) } +func (*WebDetection_WebImage) ProtoMessage() {} +func (*WebDetection_WebImage) Descriptor() ([]byte, []int) { + return fileDescriptor_web_detection_1ca040388105f0cb, []int{0, 1} +} +func (m *WebDetection_WebImage) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_WebDetection_WebImage.Unmarshal(m, b) +} +func (m *WebDetection_WebImage) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_WebDetection_WebImage.Marshal(b, m, deterministic) +} +func (dst *WebDetection_WebImage) XXX_Merge(src proto.Message) { + xxx_messageInfo_WebDetection_WebImage.Merge(dst, src) +} +func (m *WebDetection_WebImage) XXX_Size() int { + return xxx_messageInfo_WebDetection_WebImage.Size(m) +} +func (m *WebDetection_WebImage) XXX_DiscardUnknown() { + xxx_messageInfo_WebDetection_WebImage.DiscardUnknown(m) +} + +var xxx_messageInfo_WebDetection_WebImage proto.InternalMessageInfo + +func (m *WebDetection_WebImage) GetUrl() string { + if m != nil { + return m.Url + } + return "" +} + +func (m *WebDetection_WebImage) GetScore() float32 { + if m != nil { + return m.Score + } + return 0 +} + +// Metadata for web pages. +type WebDetection_WebPage struct { + // The result web page URL. + Url string `protobuf:"bytes,1,opt,name=url,proto3" json:"url,omitempty"` + // (Deprecated) Overall relevancy score for the web page. + Score float32 `protobuf:"fixed32,2,opt,name=score,proto3" json:"score,omitempty"` + // Title for the web page, may contain HTML markups. + PageTitle string `protobuf:"bytes,3,opt,name=page_title,json=pageTitle,proto3" json:"page_title,omitempty"` + // Fully matching images on the page. + // Can include resized copies of the query image. + FullMatchingImages []*WebDetection_WebImage `protobuf:"bytes,4,rep,name=full_matching_images,json=fullMatchingImages,proto3" json:"full_matching_images,omitempty"` + // Partial matching images on the page. + // Those images are similar enough to share some key-point features. For + // example an original image will likely have partial matching for its + // crops. + PartialMatchingImages []*WebDetection_WebImage `protobuf:"bytes,5,rep,name=partial_matching_images,json=partialMatchingImages,proto3" json:"partial_matching_images,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *WebDetection_WebPage) Reset() { *m = WebDetection_WebPage{} } +func (m *WebDetection_WebPage) String() string { return proto.CompactTextString(m) } +func (*WebDetection_WebPage) ProtoMessage() {} +func (*WebDetection_WebPage) Descriptor() ([]byte, []int) { + return fileDescriptor_web_detection_1ca040388105f0cb, []int{0, 2} +} +func (m *WebDetection_WebPage) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_WebDetection_WebPage.Unmarshal(m, b) +} +func (m *WebDetection_WebPage) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_WebDetection_WebPage.Marshal(b, m, deterministic) +} +func (dst *WebDetection_WebPage) XXX_Merge(src proto.Message) { + xxx_messageInfo_WebDetection_WebPage.Merge(dst, src) +} +func (m *WebDetection_WebPage) XXX_Size() int { + return xxx_messageInfo_WebDetection_WebPage.Size(m) +} +func (m *WebDetection_WebPage) XXX_DiscardUnknown() { + xxx_messageInfo_WebDetection_WebPage.DiscardUnknown(m) +} + +var xxx_messageInfo_WebDetection_WebPage proto.InternalMessageInfo + +func (m *WebDetection_WebPage) GetUrl() string { + if m != nil { + return m.Url + } + return "" +} + +func (m *WebDetection_WebPage) GetScore() float32 { + if m != nil { + return m.Score + } + return 0 +} + +func (m *WebDetection_WebPage) GetPageTitle() string { + if m != nil { + return m.PageTitle + } + return "" +} + +func (m *WebDetection_WebPage) GetFullMatchingImages() []*WebDetection_WebImage { + if m != nil { + return m.FullMatchingImages + } + return nil +} + +func (m *WebDetection_WebPage) GetPartialMatchingImages() []*WebDetection_WebImage { + if m != nil { + return m.PartialMatchingImages + } + return nil +} + +// Label to provide extra metadata for the web detection. +type WebDetection_WebLabel struct { + // Label for extra metadata. + Label string `protobuf:"bytes,1,opt,name=label,proto3" json:"label,omitempty"` + // The BCP-47 language code for `label`, such as "en-US" or "sr-Latn". + // For more information, see + // http://www.unicode.org/reports/tr35/#Unicode_locale_identifier. + LanguageCode string `protobuf:"bytes,2,opt,name=language_code,json=languageCode,proto3" json:"language_code,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *WebDetection_WebLabel) Reset() { *m = WebDetection_WebLabel{} } +func (m *WebDetection_WebLabel) String() string { return proto.CompactTextString(m) } +func (*WebDetection_WebLabel) ProtoMessage() {} +func (*WebDetection_WebLabel) Descriptor() ([]byte, []int) { + return fileDescriptor_web_detection_1ca040388105f0cb, []int{0, 3} +} +func (m *WebDetection_WebLabel) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_WebDetection_WebLabel.Unmarshal(m, b) +} +func (m *WebDetection_WebLabel) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_WebDetection_WebLabel.Marshal(b, m, deterministic) +} +func (dst *WebDetection_WebLabel) XXX_Merge(src proto.Message) { + xxx_messageInfo_WebDetection_WebLabel.Merge(dst, src) +} +func (m *WebDetection_WebLabel) XXX_Size() int { + return xxx_messageInfo_WebDetection_WebLabel.Size(m) +} +func (m *WebDetection_WebLabel) XXX_DiscardUnknown() { + xxx_messageInfo_WebDetection_WebLabel.DiscardUnknown(m) +} + +var xxx_messageInfo_WebDetection_WebLabel proto.InternalMessageInfo + +func (m *WebDetection_WebLabel) GetLabel() string { + if m != nil { + return m.Label + } + return "" +} + +func (m *WebDetection_WebLabel) GetLanguageCode() string { + if m != nil { + return m.LanguageCode + } + return "" +} + +func init() { + proto.RegisterType((*WebDetection)(nil), "google.cloud.vision.v1p2beta1.WebDetection") + proto.RegisterType((*WebDetection_WebEntity)(nil), "google.cloud.vision.v1p2beta1.WebDetection.WebEntity") + proto.RegisterType((*WebDetection_WebImage)(nil), "google.cloud.vision.v1p2beta1.WebDetection.WebImage") + proto.RegisterType((*WebDetection_WebPage)(nil), "google.cloud.vision.v1p2beta1.WebDetection.WebPage") + proto.RegisterType((*WebDetection_WebLabel)(nil), "google.cloud.vision.v1p2beta1.WebDetection.WebLabel") +} + +func init() { + proto.RegisterFile("google/cloud/vision/v1p2beta1/web_detection.proto", fileDescriptor_web_detection_1ca040388105f0cb) +} + +var fileDescriptor_web_detection_1ca040388105f0cb = []byte{ + // 511 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xbc, 0x94, 0x4f, 0x6f, 0xd3, 0x30, + 0x18, 0xc6, 0x95, 0x76, 0x1b, 0x8d, 0x5b, 0x04, 0xb3, 0x86, 0x16, 0x05, 0x26, 0x15, 0xb8, 0xf4, + 0x94, 0xa8, 0x1d, 0x9c, 0xb8, 0x6d, 0x4c, 0x68, 0x12, 0x48, 0x55, 0x40, 0x1a, 0xe2, 0x92, 0x39, + 0x89, 0x97, 0xbe, 0x92, 0x1b, 0x47, 0xb1, 0xd3, 0xaa, 0x37, 0x4e, 0x7c, 0x14, 0x3e, 0x23, 0x47, + 0xf4, 0xda, 0xee, 0x54, 0x51, 0x36, 0x31, 0x86, 0xb8, 0xf9, 0x7d, 0xac, 0xe7, 0xf9, 0xd9, 0xaf, + 0xff, 0x90, 0x71, 0x29, 0x65, 0x29, 0x78, 0x9c, 0x0b, 0xd9, 0x16, 0xf1, 0x02, 0x14, 0xc8, 0x2a, + 0x5e, 0x8c, 0xeb, 0x49, 0xc6, 0x35, 0x1b, 0xc7, 0x4b, 0x9e, 0xa5, 0x05, 0xd7, 0x3c, 0xd7, 0x20, + 0xab, 0xa8, 0x6e, 0xa4, 0x96, 0xf4, 0xc8, 0x5a, 0x22, 0x63, 0x89, 0xac, 0x25, 0xba, 0xb6, 0x84, + 0xcf, 0x5c, 0x22, 0xab, 0x21, 0x66, 0x55, 0x25, 0x35, 0x43, 0xaf, 0xb2, 0xe6, 0x17, 0xdf, 0x7c, + 0x32, 0xb8, 0xe0, 0xd9, 0xdb, 0x75, 0x26, 0xfd, 0x4c, 0x06, 0x08, 0xe1, 0x95, 0x06, 0x0d, 0x5c, + 0x05, 0xde, 0xb0, 0x3b, 0xea, 0x4f, 0x5e, 0x47, 0xb7, 0x42, 0xa2, 0xcd, 0x08, 0x2c, 0xce, 0xd0, + 0xbe, 0x4a, 0xfa, 0x4b, 0x37, 0x04, 0xae, 0xe8, 0x15, 0x39, 0xb8, 0x6a, 0x85, 0x48, 0xe7, 0x4c, + 0xe7, 0x33, 0xa8, 0xca, 0x14, 0xe6, 0xac, 0xe4, 0x2a, 0xe8, 0x18, 0xc2, 0xab, 0x3b, 0x12, 0xce, + 0xd1, 0x9c, 0x50, 0x4c, 0xfc, 0xe0, 0x02, 0x8d, 0xa4, 0xa8, 0x20, 0x87, 0x35, 0x6b, 0x34, 0xb0, + 0x6d, 0x54, 0xf7, 0x1e, 0xa8, 0x27, 0x2e, 0xf4, 0x17, 0x5a, 0x4d, 0xc2, 0x1a, 0x07, 0xe9, 0x12, + 0xf4, 0x6c, 0x0b, 0xb8, 0x63, 0x80, 0xc7, 0x77, 0x04, 0x4e, 0x91, 0x77, 0x68, 0x62, 0x2f, 0x40, + 0xcf, 0xb6, 0xf7, 0xb7, 0x00, 0xd5, 0x32, 0x21, 0x56, 0xa9, 0x82, 0x39, 0x08, 0xd6, 0xac, 0x71, + 0x7b, 0xf7, 0xd9, 0xdf, 0x3a, 0xf4, 0xa3, 0xcd, 0x74, 0xb4, 0x4b, 0xb2, 0x9f, 0x71, 0xa5, 0xd3, + 0xb2, 0xe5, 0x4a, 0xa5, 0x82, 0x65, 0x5c, 0xa8, 0xa0, 0xf7, 0x57, 0x9c, 0xf7, 0x68, 0x4e, 0x1e, + 0x61, 0xdc, 0x3b, 0x4c, 0x33, 0xb5, 0x0a, 0x2f, 0x89, 0x7f, 0x7d, 0x63, 0xe8, 0x53, 0xe2, 0x9b, + 0xab, 0xb7, 0x4a, 0xa1, 0x08, 0xbc, 0xa1, 0x37, 0xf2, 0x93, 0x9e, 0x15, 0xce, 0x0b, 0x7a, 0x40, + 0x76, 0x55, 0x2e, 0x1b, 0x1e, 0x74, 0x86, 0xde, 0xa8, 0x93, 0xd8, 0x82, 0x0e, 0x49, 0xbf, 0xe0, + 0x2a, 0x6f, 0xa0, 0x46, 0x50, 0xd0, 0x35, 0xa6, 0x4d, 0x29, 0x9c, 0x90, 0xde, 0x7a, 0x9b, 0xf4, + 0x31, 0xe9, 0xb6, 0x8d, 0x70, 0xd1, 0x38, 0xfc, 0x7d, 0x6a, 0xf8, 0xbd, 0x43, 0x1e, 0xb8, 0xa3, + 0xf8, 0x53, 0x0f, 0x3d, 0x22, 0x04, 0x0f, 0x2d, 0xd5, 0xa0, 0x05, 0x77, 0x0b, 0xf1, 0x51, 0xf9, + 0x84, 0xc2, 0x8d, 0x0f, 0x60, 0xe7, 0xff, 0x3d, 0x80, 0xdd, 0x7f, 0xfe, 0x00, 0xc2, 0x33, 0xd3, + 0x5c, 0x73, 0x96, 0xd8, 0x16, 0x73, 0x43, 0x5c, 0xab, 0x6c, 0x41, 0x5f, 0x92, 0x87, 0x82, 0x55, + 0x65, 0x8b, 0xad, 0xc9, 0x65, 0x61, 0x9b, 0xe6, 0x27, 0x83, 0xb5, 0x78, 0x2a, 0x0b, 0x7e, 0xf2, + 0xd5, 0x23, 0xcf, 0x73, 0x39, 0xbf, 0x7d, 0x65, 0x27, 0xfb, 0x9b, 0x4b, 0x9b, 0xe2, 0x0f, 0x36, + 0xf5, 0xbe, 0x9c, 0x3a, 0x4f, 0x29, 0x31, 0x31, 0x92, 0x4d, 0x19, 0x97, 0xbc, 0x32, 0xff, 0x5b, + 0x6c, 0xa7, 0x58, 0x0d, 0xea, 0x86, 0x2f, 0xf5, 0x8d, 0x15, 0x7e, 0x78, 0x5e, 0xb6, 0x67, 0x2c, + 0xc7, 0x3f, 0x03, 0x00, 0x00, 0xff, 0xff, 0x66, 0x62, 0xaa, 0xcd, 0x84, 0x05, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/cloud/vision/v1p3beta1/geometry.pb.go b/vendor/google.golang.org/genproto/googleapis/cloud/vision/v1p3beta1/geometry.pb.go new file mode 100644 index 0000000..79e2d47 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/cloud/vision/v1p3beta1/geometry.pb.go @@ -0,0 +1,306 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/cloud/vision/v1p3beta1/geometry.proto + +package vision // import "google.golang.org/genproto/googleapis/cloud/vision/v1p3beta1" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// A vertex represents a 2D point in the image. +// NOTE: the vertex coordinates are in the same scale as the original image. +type Vertex struct { + // X coordinate. + X int32 `protobuf:"varint,1,opt,name=x,proto3" json:"x,omitempty"` + // Y coordinate. + Y int32 `protobuf:"varint,2,opt,name=y,proto3" json:"y,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Vertex) Reset() { *m = Vertex{} } +func (m *Vertex) String() string { return proto.CompactTextString(m) } +func (*Vertex) ProtoMessage() {} +func (*Vertex) Descriptor() ([]byte, []int) { + return fileDescriptor_geometry_eec0e7cba47b2d57, []int{0} +} +func (m *Vertex) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Vertex.Unmarshal(m, b) +} +func (m *Vertex) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Vertex.Marshal(b, m, deterministic) +} +func (dst *Vertex) XXX_Merge(src proto.Message) { + xxx_messageInfo_Vertex.Merge(dst, src) +} +func (m *Vertex) XXX_Size() int { + return xxx_messageInfo_Vertex.Size(m) +} +func (m *Vertex) XXX_DiscardUnknown() { + xxx_messageInfo_Vertex.DiscardUnknown(m) +} + +var xxx_messageInfo_Vertex proto.InternalMessageInfo + +func (m *Vertex) GetX() int32 { + if m != nil { + return m.X + } + return 0 +} + +func (m *Vertex) GetY() int32 { + if m != nil { + return m.Y + } + return 0 +} + +// A vertex represents a 2D point in the image. +// NOTE: the normalized vertex coordinates are relative to the original image +// and range from 0 to 1. +type NormalizedVertex struct { + // X coordinate. + X float32 `protobuf:"fixed32,1,opt,name=x,proto3" json:"x,omitempty"` + // Y coordinate. + Y float32 `protobuf:"fixed32,2,opt,name=y,proto3" json:"y,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *NormalizedVertex) Reset() { *m = NormalizedVertex{} } +func (m *NormalizedVertex) String() string { return proto.CompactTextString(m) } +func (*NormalizedVertex) ProtoMessage() {} +func (*NormalizedVertex) Descriptor() ([]byte, []int) { + return fileDescriptor_geometry_eec0e7cba47b2d57, []int{1} +} +func (m *NormalizedVertex) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_NormalizedVertex.Unmarshal(m, b) +} +func (m *NormalizedVertex) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_NormalizedVertex.Marshal(b, m, deterministic) +} +func (dst *NormalizedVertex) XXX_Merge(src proto.Message) { + xxx_messageInfo_NormalizedVertex.Merge(dst, src) +} +func (m *NormalizedVertex) XXX_Size() int { + return xxx_messageInfo_NormalizedVertex.Size(m) +} +func (m *NormalizedVertex) XXX_DiscardUnknown() { + xxx_messageInfo_NormalizedVertex.DiscardUnknown(m) +} + +var xxx_messageInfo_NormalizedVertex proto.InternalMessageInfo + +func (m *NormalizedVertex) GetX() float32 { + if m != nil { + return m.X + } + return 0 +} + +func (m *NormalizedVertex) GetY() float32 { + if m != nil { + return m.Y + } + return 0 +} + +// A bounding polygon for the detected image annotation. +type BoundingPoly struct { + // The bounding polygon vertices. + Vertices []*Vertex `protobuf:"bytes,1,rep,name=vertices,proto3" json:"vertices,omitempty"` + // The bounding polygon normalized vertices. + NormalizedVertices []*NormalizedVertex `protobuf:"bytes,2,rep,name=normalized_vertices,json=normalizedVertices,proto3" json:"normalized_vertices,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *BoundingPoly) Reset() { *m = BoundingPoly{} } +func (m *BoundingPoly) String() string { return proto.CompactTextString(m) } +func (*BoundingPoly) ProtoMessage() {} +func (*BoundingPoly) Descriptor() ([]byte, []int) { + return fileDescriptor_geometry_eec0e7cba47b2d57, []int{2} +} +func (m *BoundingPoly) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_BoundingPoly.Unmarshal(m, b) +} +func (m *BoundingPoly) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_BoundingPoly.Marshal(b, m, deterministic) +} +func (dst *BoundingPoly) XXX_Merge(src proto.Message) { + xxx_messageInfo_BoundingPoly.Merge(dst, src) +} +func (m *BoundingPoly) XXX_Size() int { + return xxx_messageInfo_BoundingPoly.Size(m) +} +func (m *BoundingPoly) XXX_DiscardUnknown() { + xxx_messageInfo_BoundingPoly.DiscardUnknown(m) +} + +var xxx_messageInfo_BoundingPoly proto.InternalMessageInfo + +func (m *BoundingPoly) GetVertices() []*Vertex { + if m != nil { + return m.Vertices + } + return nil +} + +func (m *BoundingPoly) GetNormalizedVertices() []*NormalizedVertex { + if m != nil { + return m.NormalizedVertices + } + return nil +} + +// A normalized bounding polygon around a portion of an image. +type NormalizedBoundingPoly struct { + // Normalized vertices of the bounding polygon. + Vertices []*NormalizedVertex `protobuf:"bytes,1,rep,name=vertices,proto3" json:"vertices,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *NormalizedBoundingPoly) Reset() { *m = NormalizedBoundingPoly{} } +func (m *NormalizedBoundingPoly) String() string { return proto.CompactTextString(m) } +func (*NormalizedBoundingPoly) ProtoMessage() {} +func (*NormalizedBoundingPoly) Descriptor() ([]byte, []int) { + return fileDescriptor_geometry_eec0e7cba47b2d57, []int{3} +} +func (m *NormalizedBoundingPoly) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_NormalizedBoundingPoly.Unmarshal(m, b) +} +func (m *NormalizedBoundingPoly) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_NormalizedBoundingPoly.Marshal(b, m, deterministic) +} +func (dst *NormalizedBoundingPoly) XXX_Merge(src proto.Message) { + xxx_messageInfo_NormalizedBoundingPoly.Merge(dst, src) +} +func (m *NormalizedBoundingPoly) XXX_Size() int { + return xxx_messageInfo_NormalizedBoundingPoly.Size(m) +} +func (m *NormalizedBoundingPoly) XXX_DiscardUnknown() { + xxx_messageInfo_NormalizedBoundingPoly.DiscardUnknown(m) +} + +var xxx_messageInfo_NormalizedBoundingPoly proto.InternalMessageInfo + +func (m *NormalizedBoundingPoly) GetVertices() []*NormalizedVertex { + if m != nil { + return m.Vertices + } + return nil +} + +// A 3D position in the image, used primarily for Face detection landmarks. +// A valid Position must have both x and y coordinates. +// The position coordinates are in the same scale as the original image. +type Position struct { + // X coordinate. + X float32 `protobuf:"fixed32,1,opt,name=x,proto3" json:"x,omitempty"` + // Y coordinate. + Y float32 `protobuf:"fixed32,2,opt,name=y,proto3" json:"y,omitempty"` + // Z coordinate (or depth). + Z float32 `protobuf:"fixed32,3,opt,name=z,proto3" json:"z,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Position) Reset() { *m = Position{} } +func (m *Position) String() string { return proto.CompactTextString(m) } +func (*Position) ProtoMessage() {} +func (*Position) Descriptor() ([]byte, []int) { + return fileDescriptor_geometry_eec0e7cba47b2d57, []int{4} +} +func (m *Position) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Position.Unmarshal(m, b) +} +func (m *Position) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Position.Marshal(b, m, deterministic) +} +func (dst *Position) XXX_Merge(src proto.Message) { + xxx_messageInfo_Position.Merge(dst, src) +} +func (m *Position) XXX_Size() int { + return xxx_messageInfo_Position.Size(m) +} +func (m *Position) XXX_DiscardUnknown() { + xxx_messageInfo_Position.DiscardUnknown(m) +} + +var xxx_messageInfo_Position proto.InternalMessageInfo + +func (m *Position) GetX() float32 { + if m != nil { + return m.X + } + return 0 +} + +func (m *Position) GetY() float32 { + if m != nil { + return m.Y + } + return 0 +} + +func (m *Position) GetZ() float32 { + if m != nil { + return m.Z + } + return 0 +} + +func init() { + proto.RegisterType((*Vertex)(nil), "google.cloud.vision.v1p3beta1.Vertex") + proto.RegisterType((*NormalizedVertex)(nil), "google.cloud.vision.v1p3beta1.NormalizedVertex") + proto.RegisterType((*BoundingPoly)(nil), "google.cloud.vision.v1p3beta1.BoundingPoly") + proto.RegisterType((*NormalizedBoundingPoly)(nil), "google.cloud.vision.v1p3beta1.NormalizedBoundingPoly") + proto.RegisterType((*Position)(nil), "google.cloud.vision.v1p3beta1.Position") +} + +func init() { + proto.RegisterFile("google/cloud/vision/v1p3beta1/geometry.proto", fileDescriptor_geometry_eec0e7cba47b2d57) +} + +var fileDescriptor_geometry_eec0e7cba47b2d57 = []byte{ + // 316 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x94, 0x92, 0xc1, 0x4b, 0xc3, 0x30, + 0x14, 0xc6, 0x49, 0x87, 0x63, 0xc4, 0x09, 0x52, 0x41, 0x8a, 0x28, 0xcc, 0xa2, 0xb0, 0x83, 0x24, + 0xcc, 0x79, 0xf3, 0xe4, 0x3c, 0x78, 0x10, 0xa4, 0xf4, 0xe0, 0xc1, 0x8b, 0x66, 0x6d, 0x08, 0x81, + 0x36, 0xaf, 0xa4, 0x59, 0x59, 0x8b, 0xff, 0x95, 0xff, 0x9c, 0x47, 0x69, 0x53, 0x2a, 0x9d, 0x58, + 0xf1, 0xf8, 0xbd, 0xfc, 0xde, 0xf7, 0x3e, 0xf2, 0x1e, 0xbe, 0x12, 0x00, 0x22, 0xe1, 0x34, 0x4a, + 0x60, 0x13, 0xd3, 0x42, 0xe6, 0x12, 0x14, 0x2d, 0x16, 0xd9, 0x72, 0xcd, 0x0d, 0x5b, 0x50, 0xc1, + 0x21, 0xe5, 0x46, 0x97, 0x24, 0xd3, 0x60, 0xc0, 0x3d, 0xb3, 0x34, 0x69, 0x68, 0x62, 0x69, 0xd2, + 0xd1, 0x27, 0xa7, 0xad, 0x19, 0xcb, 0x24, 0x65, 0x4a, 0x81, 0x61, 0x46, 0x82, 0xca, 0x6d, 0xb3, + 0x7f, 0x81, 0xc7, 0xcf, 0x5c, 0x1b, 0xbe, 0x75, 0xa7, 0x18, 0x6d, 0x3d, 0x34, 0x43, 0xf3, 0xbd, + 0x10, 0x35, 0xaa, 0xf4, 0x1c, 0xab, 0x4a, 0x9f, 0xe0, 0xc3, 0x27, 0xd0, 0x29, 0x4b, 0x64, 0xc5, + 0xe3, 0x5d, 0xde, 0xe9, 0xf1, 0x4e, 0xcd, 0x7f, 0x20, 0x3c, 0x5d, 0xc1, 0x46, 0xc5, 0x52, 0x89, + 0x00, 0x92, 0xd2, 0xbd, 0xc3, 0x93, 0x82, 0x6b, 0x23, 0x23, 0x9e, 0x7b, 0x68, 0x36, 0x9a, 0xef, + 0x5f, 0x5f, 0x92, 0xc1, 0xd8, 0xc4, 0x4e, 0x09, 0xbb, 0x36, 0xf7, 0x0d, 0x1f, 0xa9, 0x2e, 0xc3, + 0x6b, 0xe7, 0xe6, 0x34, 0x6e, 0xf4, 0x0f, 0xb7, 0xdd, 0xf4, 0xa1, 0xab, 0x7a, 0x95, 0xda, 0xca, + 0xe7, 0xf8, 0xf8, 0x9b, 0xeb, 0xc5, 0x7f, 0xfc, 0x11, 0xff, 0xdf, 0x03, 0x3b, 0x03, 0xff, 0x06, + 0x4f, 0x02, 0xc8, 0x65, 0xbd, 0x85, 0xa1, 0x4f, 0xac, 0x55, 0xe5, 0x8d, 0xac, 0xaa, 0x56, 0xef, + 0xf8, 0x3c, 0x82, 0x74, 0x78, 0xea, 0xea, 0xe0, 0xa1, 0x3d, 0x8d, 0xa0, 0x5e, 0x6e, 0x80, 0x5e, + 0xee, 0x5b, 0x5e, 0x40, 0xc2, 0x94, 0x20, 0xa0, 0x05, 0x15, 0x5c, 0x35, 0xab, 0xa7, 0xf6, 0x89, + 0x65, 0x32, 0xff, 0xe5, 0xd0, 0x6e, 0x6d, 0xe1, 0x13, 0xa1, 0xf5, 0xb8, 0x69, 0x59, 0x7e, 0x05, + 0x00, 0x00, 0xff, 0xff, 0x79, 0x59, 0xbf, 0x39, 0x9a, 0x02, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/cloud/vision/v1p3beta1/image_annotator.pb.go b/vendor/google.golang.org/genproto/googleapis/cloud/vision/v1p3beta1/image_annotator.pb.go new file mode 100644 index 0000000..1ca665a --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/cloud/vision/v1p3beta1/image_annotator.pb.go @@ -0,0 +1,2826 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/cloud/vision/v1p3beta1/image_annotator.proto + +package vision // import "google.golang.org/genproto/googleapis/cloud/vision/v1p3beta1" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import timestamp "github.com/golang/protobuf/ptypes/timestamp" +import _ "google.golang.org/genproto/googleapis/api/annotations" +import longrunning "google.golang.org/genproto/googleapis/longrunning" +import status "google.golang.org/genproto/googleapis/rpc/status" +import color "google.golang.org/genproto/googleapis/type/color" +import latlng "google.golang.org/genproto/googleapis/type/latlng" + +import ( + context "golang.org/x/net/context" + grpc "google.golang.org/grpc" +) + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// A bucketized representation of likelihood, which is intended to give clients +// highly stable results across model upgrades. +type Likelihood int32 + +const ( + // Unknown likelihood. + Likelihood_UNKNOWN Likelihood = 0 + // It is very unlikely that the image belongs to the specified vertical. + Likelihood_VERY_UNLIKELY Likelihood = 1 + // It is unlikely that the image belongs to the specified vertical. + Likelihood_UNLIKELY Likelihood = 2 + // It is possible that the image belongs to the specified vertical. + Likelihood_POSSIBLE Likelihood = 3 + // It is likely that the image belongs to the specified vertical. + Likelihood_LIKELY Likelihood = 4 + // It is very likely that the image belongs to the specified vertical. + Likelihood_VERY_LIKELY Likelihood = 5 +) + +var Likelihood_name = map[int32]string{ + 0: "UNKNOWN", + 1: "VERY_UNLIKELY", + 2: "UNLIKELY", + 3: "POSSIBLE", + 4: "LIKELY", + 5: "VERY_LIKELY", +} +var Likelihood_value = map[string]int32{ + "UNKNOWN": 0, + "VERY_UNLIKELY": 1, + "UNLIKELY": 2, + "POSSIBLE": 3, + "LIKELY": 4, + "VERY_LIKELY": 5, +} + +func (x Likelihood) String() string { + return proto.EnumName(Likelihood_name, int32(x)) +} +func (Likelihood) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_image_annotator_741235179f821499, []int{0} +} + +// Type of Google Cloud Vision API feature to be extracted. +type Feature_Type int32 + +const ( + // Unspecified feature type. + Feature_TYPE_UNSPECIFIED Feature_Type = 0 + // Run face detection. + Feature_FACE_DETECTION Feature_Type = 1 + // Run landmark detection. + Feature_LANDMARK_DETECTION Feature_Type = 2 + // Run logo detection. + Feature_LOGO_DETECTION Feature_Type = 3 + // Run label detection. + Feature_LABEL_DETECTION Feature_Type = 4 + // Run text detection / optical character recognition (OCR). Text detection + // is optimized for areas of text within a larger image; if the image is + // a document, use `DOCUMENT_TEXT_DETECTION` instead. + Feature_TEXT_DETECTION Feature_Type = 5 + // Run dense text document OCR. Takes precedence when both + // `DOCUMENT_TEXT_DETECTION` and `TEXT_DETECTION` are present. + Feature_DOCUMENT_TEXT_DETECTION Feature_Type = 11 + // Run Safe Search to detect potentially unsafe + // or undesirable content. + Feature_SAFE_SEARCH_DETECTION Feature_Type = 6 + // Compute a set of image properties, such as the + // image's dominant colors. + Feature_IMAGE_PROPERTIES Feature_Type = 7 + // Run crop hints. + Feature_CROP_HINTS Feature_Type = 9 + // Run web detection. + Feature_WEB_DETECTION Feature_Type = 10 + // Run Product Search. + Feature_PRODUCT_SEARCH Feature_Type = 12 + // Run localizer for object detection. + Feature_OBJECT_LOCALIZATION Feature_Type = 19 +) + +var Feature_Type_name = map[int32]string{ + 0: "TYPE_UNSPECIFIED", + 1: "FACE_DETECTION", + 2: "LANDMARK_DETECTION", + 3: "LOGO_DETECTION", + 4: "LABEL_DETECTION", + 5: "TEXT_DETECTION", + 11: "DOCUMENT_TEXT_DETECTION", + 6: "SAFE_SEARCH_DETECTION", + 7: "IMAGE_PROPERTIES", + 9: "CROP_HINTS", + 10: "WEB_DETECTION", + 12: "PRODUCT_SEARCH", + 19: "OBJECT_LOCALIZATION", +} +var Feature_Type_value = map[string]int32{ + "TYPE_UNSPECIFIED": 0, + "FACE_DETECTION": 1, + "LANDMARK_DETECTION": 2, + "LOGO_DETECTION": 3, + "LABEL_DETECTION": 4, + "TEXT_DETECTION": 5, + "DOCUMENT_TEXT_DETECTION": 11, + "SAFE_SEARCH_DETECTION": 6, + "IMAGE_PROPERTIES": 7, + "CROP_HINTS": 9, + "WEB_DETECTION": 10, + "PRODUCT_SEARCH": 12, + "OBJECT_LOCALIZATION": 19, +} + +func (x Feature_Type) String() string { + return proto.EnumName(Feature_Type_name, int32(x)) +} +func (Feature_Type) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_image_annotator_741235179f821499, []int{0, 0} +} + +// Face landmark (feature) type. +// Left and right are defined from the vantage of the viewer of the image +// without considering mirror projections typical of photos. So, `LEFT_EYE`, +// typically, is the person's right eye. +type FaceAnnotation_Landmark_Type int32 + +const ( + // Unknown face landmark detected. Should not be filled. + FaceAnnotation_Landmark_UNKNOWN_LANDMARK FaceAnnotation_Landmark_Type = 0 + // Left eye. + FaceAnnotation_Landmark_LEFT_EYE FaceAnnotation_Landmark_Type = 1 + // Right eye. + FaceAnnotation_Landmark_RIGHT_EYE FaceAnnotation_Landmark_Type = 2 + // Left of left eyebrow. + FaceAnnotation_Landmark_LEFT_OF_LEFT_EYEBROW FaceAnnotation_Landmark_Type = 3 + // Right of left eyebrow. + FaceAnnotation_Landmark_RIGHT_OF_LEFT_EYEBROW FaceAnnotation_Landmark_Type = 4 + // Left of right eyebrow. + FaceAnnotation_Landmark_LEFT_OF_RIGHT_EYEBROW FaceAnnotation_Landmark_Type = 5 + // Right of right eyebrow. + FaceAnnotation_Landmark_RIGHT_OF_RIGHT_EYEBROW FaceAnnotation_Landmark_Type = 6 + // Midpoint between eyes. + FaceAnnotation_Landmark_MIDPOINT_BETWEEN_EYES FaceAnnotation_Landmark_Type = 7 + // Nose tip. + FaceAnnotation_Landmark_NOSE_TIP FaceAnnotation_Landmark_Type = 8 + // Upper lip. + FaceAnnotation_Landmark_UPPER_LIP FaceAnnotation_Landmark_Type = 9 + // Lower lip. + FaceAnnotation_Landmark_LOWER_LIP FaceAnnotation_Landmark_Type = 10 + // Mouth left. + FaceAnnotation_Landmark_MOUTH_LEFT FaceAnnotation_Landmark_Type = 11 + // Mouth right. + FaceAnnotation_Landmark_MOUTH_RIGHT FaceAnnotation_Landmark_Type = 12 + // Mouth center. + FaceAnnotation_Landmark_MOUTH_CENTER FaceAnnotation_Landmark_Type = 13 + // Nose, bottom right. + FaceAnnotation_Landmark_NOSE_BOTTOM_RIGHT FaceAnnotation_Landmark_Type = 14 + // Nose, bottom left. + FaceAnnotation_Landmark_NOSE_BOTTOM_LEFT FaceAnnotation_Landmark_Type = 15 + // Nose, bottom center. + FaceAnnotation_Landmark_NOSE_BOTTOM_CENTER FaceAnnotation_Landmark_Type = 16 + // Left eye, top boundary. + FaceAnnotation_Landmark_LEFT_EYE_TOP_BOUNDARY FaceAnnotation_Landmark_Type = 17 + // Left eye, right corner. + FaceAnnotation_Landmark_LEFT_EYE_RIGHT_CORNER FaceAnnotation_Landmark_Type = 18 + // Left eye, bottom boundary. + FaceAnnotation_Landmark_LEFT_EYE_BOTTOM_BOUNDARY FaceAnnotation_Landmark_Type = 19 + // Left eye, left corner. + FaceAnnotation_Landmark_LEFT_EYE_LEFT_CORNER FaceAnnotation_Landmark_Type = 20 + // Right eye, top boundary. + FaceAnnotation_Landmark_RIGHT_EYE_TOP_BOUNDARY FaceAnnotation_Landmark_Type = 21 + // Right eye, right corner. + FaceAnnotation_Landmark_RIGHT_EYE_RIGHT_CORNER FaceAnnotation_Landmark_Type = 22 + // Right eye, bottom boundary. + FaceAnnotation_Landmark_RIGHT_EYE_BOTTOM_BOUNDARY FaceAnnotation_Landmark_Type = 23 + // Right eye, left corner. + FaceAnnotation_Landmark_RIGHT_EYE_LEFT_CORNER FaceAnnotation_Landmark_Type = 24 + // Left eyebrow, upper midpoint. + FaceAnnotation_Landmark_LEFT_EYEBROW_UPPER_MIDPOINT FaceAnnotation_Landmark_Type = 25 + // Right eyebrow, upper midpoint. + FaceAnnotation_Landmark_RIGHT_EYEBROW_UPPER_MIDPOINT FaceAnnotation_Landmark_Type = 26 + // Left ear tragion. + FaceAnnotation_Landmark_LEFT_EAR_TRAGION FaceAnnotation_Landmark_Type = 27 + // Right ear tragion. + FaceAnnotation_Landmark_RIGHT_EAR_TRAGION FaceAnnotation_Landmark_Type = 28 + // Left eye pupil. + FaceAnnotation_Landmark_LEFT_EYE_PUPIL FaceAnnotation_Landmark_Type = 29 + // Right eye pupil. + FaceAnnotation_Landmark_RIGHT_EYE_PUPIL FaceAnnotation_Landmark_Type = 30 + // Forehead glabella. + FaceAnnotation_Landmark_FOREHEAD_GLABELLA FaceAnnotation_Landmark_Type = 31 + // Chin gnathion. + FaceAnnotation_Landmark_CHIN_GNATHION FaceAnnotation_Landmark_Type = 32 + // Chin left gonion. + FaceAnnotation_Landmark_CHIN_LEFT_GONION FaceAnnotation_Landmark_Type = 33 + // Chin right gonion. + FaceAnnotation_Landmark_CHIN_RIGHT_GONION FaceAnnotation_Landmark_Type = 34 +) + +var FaceAnnotation_Landmark_Type_name = map[int32]string{ + 0: "UNKNOWN_LANDMARK", + 1: "LEFT_EYE", + 2: "RIGHT_EYE", + 3: "LEFT_OF_LEFT_EYEBROW", + 4: "RIGHT_OF_LEFT_EYEBROW", + 5: "LEFT_OF_RIGHT_EYEBROW", + 6: "RIGHT_OF_RIGHT_EYEBROW", + 7: "MIDPOINT_BETWEEN_EYES", + 8: "NOSE_TIP", + 9: "UPPER_LIP", + 10: "LOWER_LIP", + 11: "MOUTH_LEFT", + 12: "MOUTH_RIGHT", + 13: "MOUTH_CENTER", + 14: "NOSE_BOTTOM_RIGHT", + 15: "NOSE_BOTTOM_LEFT", + 16: "NOSE_BOTTOM_CENTER", + 17: "LEFT_EYE_TOP_BOUNDARY", + 18: "LEFT_EYE_RIGHT_CORNER", + 19: "LEFT_EYE_BOTTOM_BOUNDARY", + 20: "LEFT_EYE_LEFT_CORNER", + 21: "RIGHT_EYE_TOP_BOUNDARY", + 22: "RIGHT_EYE_RIGHT_CORNER", + 23: "RIGHT_EYE_BOTTOM_BOUNDARY", + 24: "RIGHT_EYE_LEFT_CORNER", + 25: "LEFT_EYEBROW_UPPER_MIDPOINT", + 26: "RIGHT_EYEBROW_UPPER_MIDPOINT", + 27: "LEFT_EAR_TRAGION", + 28: "RIGHT_EAR_TRAGION", + 29: "LEFT_EYE_PUPIL", + 30: "RIGHT_EYE_PUPIL", + 31: "FOREHEAD_GLABELLA", + 32: "CHIN_GNATHION", + 33: "CHIN_LEFT_GONION", + 34: "CHIN_RIGHT_GONION", +} +var FaceAnnotation_Landmark_Type_value = map[string]int32{ + "UNKNOWN_LANDMARK": 0, + "LEFT_EYE": 1, + "RIGHT_EYE": 2, + "LEFT_OF_LEFT_EYEBROW": 3, + "RIGHT_OF_LEFT_EYEBROW": 4, + "LEFT_OF_RIGHT_EYEBROW": 5, + "RIGHT_OF_RIGHT_EYEBROW": 6, + "MIDPOINT_BETWEEN_EYES": 7, + "NOSE_TIP": 8, + "UPPER_LIP": 9, + "LOWER_LIP": 10, + "MOUTH_LEFT": 11, + "MOUTH_RIGHT": 12, + "MOUTH_CENTER": 13, + "NOSE_BOTTOM_RIGHT": 14, + "NOSE_BOTTOM_LEFT": 15, + "NOSE_BOTTOM_CENTER": 16, + "LEFT_EYE_TOP_BOUNDARY": 17, + "LEFT_EYE_RIGHT_CORNER": 18, + "LEFT_EYE_BOTTOM_BOUNDARY": 19, + "LEFT_EYE_LEFT_CORNER": 20, + "RIGHT_EYE_TOP_BOUNDARY": 21, + "RIGHT_EYE_RIGHT_CORNER": 22, + "RIGHT_EYE_BOTTOM_BOUNDARY": 23, + "RIGHT_EYE_LEFT_CORNER": 24, + "LEFT_EYEBROW_UPPER_MIDPOINT": 25, + "RIGHT_EYEBROW_UPPER_MIDPOINT": 26, + "LEFT_EAR_TRAGION": 27, + "RIGHT_EAR_TRAGION": 28, + "LEFT_EYE_PUPIL": 29, + "RIGHT_EYE_PUPIL": 30, + "FOREHEAD_GLABELLA": 31, + "CHIN_GNATHION": 32, + "CHIN_LEFT_GONION": 33, + "CHIN_RIGHT_GONION": 34, +} + +func (x FaceAnnotation_Landmark_Type) String() string { + return proto.EnumName(FaceAnnotation_Landmark_Type_name, int32(x)) +} +func (FaceAnnotation_Landmark_Type) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_image_annotator_741235179f821499, []int{3, 0, 0} +} + +// Batch operation states. +type OperationMetadata_State int32 + +const ( + // Invalid. + OperationMetadata_STATE_UNSPECIFIED OperationMetadata_State = 0 + // Request is received. + OperationMetadata_CREATED OperationMetadata_State = 1 + // Request is actively being processed. + OperationMetadata_RUNNING OperationMetadata_State = 2 + // The batch processing is done. + OperationMetadata_DONE OperationMetadata_State = 3 + // The batch processing was cancelled. + OperationMetadata_CANCELLED OperationMetadata_State = 4 +) + +var OperationMetadata_State_name = map[int32]string{ + 0: "STATE_UNSPECIFIED", + 1: "CREATED", + 2: "RUNNING", + 3: "DONE", + 4: "CANCELLED", +} +var OperationMetadata_State_value = map[string]int32{ + "STATE_UNSPECIFIED": 0, + "CREATED": 1, + "RUNNING": 2, + "DONE": 3, + "CANCELLED": 4, +} + +func (x OperationMetadata_State) String() string { + return proto.EnumName(OperationMetadata_State_name, int32(x)) +} +func (OperationMetadata_State) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_image_annotator_741235179f821499, []int{32, 0} +} + +// The type of Google Cloud Vision API detection to perform, and the maximum +// number of results to return for that type. Multiple `Feature` objects can +// be specified in the `features` list. +type Feature struct { + // The feature type. + Type Feature_Type `protobuf:"varint,1,opt,name=type,proto3,enum=google.cloud.vision.v1p3beta1.Feature_Type" json:"type,omitempty"` + // Maximum number of results of this type. Does not apply to + // `TEXT_DETECTION`, `DOCUMENT_TEXT_DETECTION`, or `CROP_HINTS`. + MaxResults int32 `protobuf:"varint,2,opt,name=max_results,json=maxResults,proto3" json:"max_results,omitempty"` + // Model to use for the feature. + // Supported values: "builtin/stable" (the default if unset) and + // "builtin/latest". + Model string `protobuf:"bytes,3,opt,name=model,proto3" json:"model,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Feature) Reset() { *m = Feature{} } +func (m *Feature) String() string { return proto.CompactTextString(m) } +func (*Feature) ProtoMessage() {} +func (*Feature) Descriptor() ([]byte, []int) { + return fileDescriptor_image_annotator_741235179f821499, []int{0} +} +func (m *Feature) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Feature.Unmarshal(m, b) +} +func (m *Feature) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Feature.Marshal(b, m, deterministic) +} +func (dst *Feature) XXX_Merge(src proto.Message) { + xxx_messageInfo_Feature.Merge(dst, src) +} +func (m *Feature) XXX_Size() int { + return xxx_messageInfo_Feature.Size(m) +} +func (m *Feature) XXX_DiscardUnknown() { + xxx_messageInfo_Feature.DiscardUnknown(m) +} + +var xxx_messageInfo_Feature proto.InternalMessageInfo + +func (m *Feature) GetType() Feature_Type { + if m != nil { + return m.Type + } + return Feature_TYPE_UNSPECIFIED +} + +func (m *Feature) GetMaxResults() int32 { + if m != nil { + return m.MaxResults + } + return 0 +} + +func (m *Feature) GetModel() string { + if m != nil { + return m.Model + } + return "" +} + +// External image source (Google Cloud Storage or web URL image location). +type ImageSource struct { + // **Use `image_uri` instead.** + // + // The Google Cloud Storage URI of the form + // `gs://bucket_name/object_name`. Object versioning is not supported. See + // [Google Cloud Storage Request + // URIs](https://cloud.google.com/storage/docs/reference-uris) for more info. + GcsImageUri string `protobuf:"bytes,1,opt,name=gcs_image_uri,json=gcsImageUri,proto3" json:"gcs_image_uri,omitempty"` + // The URI of the source image. Can be either: + // + // 1. A Google Cloud Storage URI of the form + // `gs://bucket_name/object_name`. Object versioning is not supported. See + // [Google Cloud Storage Request + // URIs](https://cloud.google.com/storage/docs/reference-uris) for more + // info. + // + // 2. A publicly-accessible image HTTP/HTTPS URL. When fetching images from + // HTTP/HTTPS URLs, Google cannot guarantee that the request will be + // completed. Your request may fail if the specified host denies the + // request (e.g. due to request throttling or DOS prevention), or if Google + // throttles requests to the site for abuse prevention. You should not + // depend on externally-hosted images for production applications. + // + // When both `gcs_image_uri` and `image_uri` are specified, `image_uri` takes + // precedence. + ImageUri string `protobuf:"bytes,2,opt,name=image_uri,json=imageUri,proto3" json:"image_uri,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ImageSource) Reset() { *m = ImageSource{} } +func (m *ImageSource) String() string { return proto.CompactTextString(m) } +func (*ImageSource) ProtoMessage() {} +func (*ImageSource) Descriptor() ([]byte, []int) { + return fileDescriptor_image_annotator_741235179f821499, []int{1} +} +func (m *ImageSource) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ImageSource.Unmarshal(m, b) +} +func (m *ImageSource) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ImageSource.Marshal(b, m, deterministic) +} +func (dst *ImageSource) XXX_Merge(src proto.Message) { + xxx_messageInfo_ImageSource.Merge(dst, src) +} +func (m *ImageSource) XXX_Size() int { + return xxx_messageInfo_ImageSource.Size(m) +} +func (m *ImageSource) XXX_DiscardUnknown() { + xxx_messageInfo_ImageSource.DiscardUnknown(m) +} + +var xxx_messageInfo_ImageSource proto.InternalMessageInfo + +func (m *ImageSource) GetGcsImageUri() string { + if m != nil { + return m.GcsImageUri + } + return "" +} + +func (m *ImageSource) GetImageUri() string { + if m != nil { + return m.ImageUri + } + return "" +} + +// Client image to perform Google Cloud Vision API tasks over. +type Image struct { + // Image content, represented as a stream of bytes. + // Note: As with all `bytes` fields, protobuffers use a pure binary + // representation, whereas JSON representations use base64. + Content []byte `protobuf:"bytes,1,opt,name=content,proto3" json:"content,omitempty"` + // Google Cloud Storage image location, or publicly-accessible image + // URL. If both `content` and `source` are provided for an image, `content` + // takes precedence and is used to perform the image annotation request. + Source *ImageSource `protobuf:"bytes,2,opt,name=source,proto3" json:"source,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Image) Reset() { *m = Image{} } +func (m *Image) String() string { return proto.CompactTextString(m) } +func (*Image) ProtoMessage() {} +func (*Image) Descriptor() ([]byte, []int) { + return fileDescriptor_image_annotator_741235179f821499, []int{2} +} +func (m *Image) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Image.Unmarshal(m, b) +} +func (m *Image) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Image.Marshal(b, m, deterministic) +} +func (dst *Image) XXX_Merge(src proto.Message) { + xxx_messageInfo_Image.Merge(dst, src) +} +func (m *Image) XXX_Size() int { + return xxx_messageInfo_Image.Size(m) +} +func (m *Image) XXX_DiscardUnknown() { + xxx_messageInfo_Image.DiscardUnknown(m) +} + +var xxx_messageInfo_Image proto.InternalMessageInfo + +func (m *Image) GetContent() []byte { + if m != nil { + return m.Content + } + return nil +} + +func (m *Image) GetSource() *ImageSource { + if m != nil { + return m.Source + } + return nil +} + +// A face annotation object contains the results of face detection. +type FaceAnnotation struct { + // The bounding polygon around the face. The coordinates of the bounding box + // are in the original image's scale, as returned in `ImageParams`. + // The bounding box is computed to "frame" the face in accordance with human + // expectations. It is based on the landmarker results. + // Note that one or more x and/or y coordinates may not be generated in the + // `BoundingPoly` (the polygon will be unbounded) if only a partial face + // appears in the image to be annotated. + BoundingPoly *BoundingPoly `protobuf:"bytes,1,opt,name=bounding_poly,json=boundingPoly,proto3" json:"bounding_poly,omitempty"` + // The `fd_bounding_poly` bounding polygon is tighter than the + // `boundingPoly`, and encloses only the skin part of the face. Typically, it + // is used to eliminate the face from any image analysis that detects the + // "amount of skin" visible in an image. It is not based on the + // landmarker results, only on the initial face detection, hence + // the fd (face detection) prefix. + FdBoundingPoly *BoundingPoly `protobuf:"bytes,2,opt,name=fd_bounding_poly,json=fdBoundingPoly,proto3" json:"fd_bounding_poly,omitempty"` + // Detected face landmarks. + Landmarks []*FaceAnnotation_Landmark `protobuf:"bytes,3,rep,name=landmarks,proto3" json:"landmarks,omitempty"` + // Roll angle, which indicates the amount of clockwise/anti-clockwise rotation + // of the face relative to the image vertical about the axis perpendicular to + // the face. Range [-180,180]. + RollAngle float32 `protobuf:"fixed32,4,opt,name=roll_angle,json=rollAngle,proto3" json:"roll_angle,omitempty"` + // Yaw angle, which indicates the leftward/rightward angle that the face is + // pointing relative to the vertical plane perpendicular to the image. Range + // [-180,180]. + PanAngle float32 `protobuf:"fixed32,5,opt,name=pan_angle,json=panAngle,proto3" json:"pan_angle,omitempty"` + // Pitch angle, which indicates the upwards/downwards angle that the face is + // pointing relative to the image's horizontal plane. Range [-180,180]. + TiltAngle float32 `protobuf:"fixed32,6,opt,name=tilt_angle,json=tiltAngle,proto3" json:"tilt_angle,omitempty"` + // Detection confidence. Range [0, 1]. + DetectionConfidence float32 `protobuf:"fixed32,7,opt,name=detection_confidence,json=detectionConfidence,proto3" json:"detection_confidence,omitempty"` + // Face landmarking confidence. Range [0, 1]. + LandmarkingConfidence float32 `protobuf:"fixed32,8,opt,name=landmarking_confidence,json=landmarkingConfidence,proto3" json:"landmarking_confidence,omitempty"` + // Joy likelihood. + JoyLikelihood Likelihood `protobuf:"varint,9,opt,name=joy_likelihood,json=joyLikelihood,proto3,enum=google.cloud.vision.v1p3beta1.Likelihood" json:"joy_likelihood,omitempty"` + // Sorrow likelihood. + SorrowLikelihood Likelihood `protobuf:"varint,10,opt,name=sorrow_likelihood,json=sorrowLikelihood,proto3,enum=google.cloud.vision.v1p3beta1.Likelihood" json:"sorrow_likelihood,omitempty"` + // Anger likelihood. + AngerLikelihood Likelihood `protobuf:"varint,11,opt,name=anger_likelihood,json=angerLikelihood,proto3,enum=google.cloud.vision.v1p3beta1.Likelihood" json:"anger_likelihood,omitempty"` + // Surprise likelihood. + SurpriseLikelihood Likelihood `protobuf:"varint,12,opt,name=surprise_likelihood,json=surpriseLikelihood,proto3,enum=google.cloud.vision.v1p3beta1.Likelihood" json:"surprise_likelihood,omitempty"` + // Under-exposed likelihood. + UnderExposedLikelihood Likelihood `protobuf:"varint,13,opt,name=under_exposed_likelihood,json=underExposedLikelihood,proto3,enum=google.cloud.vision.v1p3beta1.Likelihood" json:"under_exposed_likelihood,omitempty"` + // Blurred likelihood. + BlurredLikelihood Likelihood `protobuf:"varint,14,opt,name=blurred_likelihood,json=blurredLikelihood,proto3,enum=google.cloud.vision.v1p3beta1.Likelihood" json:"blurred_likelihood,omitempty"` + // Headwear likelihood. + HeadwearLikelihood Likelihood `protobuf:"varint,15,opt,name=headwear_likelihood,json=headwearLikelihood,proto3,enum=google.cloud.vision.v1p3beta1.Likelihood" json:"headwear_likelihood,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *FaceAnnotation) Reset() { *m = FaceAnnotation{} } +func (m *FaceAnnotation) String() string { return proto.CompactTextString(m) } +func (*FaceAnnotation) ProtoMessage() {} +func (*FaceAnnotation) Descriptor() ([]byte, []int) { + return fileDescriptor_image_annotator_741235179f821499, []int{3} +} +func (m *FaceAnnotation) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_FaceAnnotation.Unmarshal(m, b) +} +func (m *FaceAnnotation) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_FaceAnnotation.Marshal(b, m, deterministic) +} +func (dst *FaceAnnotation) XXX_Merge(src proto.Message) { + xxx_messageInfo_FaceAnnotation.Merge(dst, src) +} +func (m *FaceAnnotation) XXX_Size() int { + return xxx_messageInfo_FaceAnnotation.Size(m) +} +func (m *FaceAnnotation) XXX_DiscardUnknown() { + xxx_messageInfo_FaceAnnotation.DiscardUnknown(m) +} + +var xxx_messageInfo_FaceAnnotation proto.InternalMessageInfo + +func (m *FaceAnnotation) GetBoundingPoly() *BoundingPoly { + if m != nil { + return m.BoundingPoly + } + return nil +} + +func (m *FaceAnnotation) GetFdBoundingPoly() *BoundingPoly { + if m != nil { + return m.FdBoundingPoly + } + return nil +} + +func (m *FaceAnnotation) GetLandmarks() []*FaceAnnotation_Landmark { + if m != nil { + return m.Landmarks + } + return nil +} + +func (m *FaceAnnotation) GetRollAngle() float32 { + if m != nil { + return m.RollAngle + } + return 0 +} + +func (m *FaceAnnotation) GetPanAngle() float32 { + if m != nil { + return m.PanAngle + } + return 0 +} + +func (m *FaceAnnotation) GetTiltAngle() float32 { + if m != nil { + return m.TiltAngle + } + return 0 +} + +func (m *FaceAnnotation) GetDetectionConfidence() float32 { + if m != nil { + return m.DetectionConfidence + } + return 0 +} + +func (m *FaceAnnotation) GetLandmarkingConfidence() float32 { + if m != nil { + return m.LandmarkingConfidence + } + return 0 +} + +func (m *FaceAnnotation) GetJoyLikelihood() Likelihood { + if m != nil { + return m.JoyLikelihood + } + return Likelihood_UNKNOWN +} + +func (m *FaceAnnotation) GetSorrowLikelihood() Likelihood { + if m != nil { + return m.SorrowLikelihood + } + return Likelihood_UNKNOWN +} + +func (m *FaceAnnotation) GetAngerLikelihood() Likelihood { + if m != nil { + return m.AngerLikelihood + } + return Likelihood_UNKNOWN +} + +func (m *FaceAnnotation) GetSurpriseLikelihood() Likelihood { + if m != nil { + return m.SurpriseLikelihood + } + return Likelihood_UNKNOWN +} + +func (m *FaceAnnotation) GetUnderExposedLikelihood() Likelihood { + if m != nil { + return m.UnderExposedLikelihood + } + return Likelihood_UNKNOWN +} + +func (m *FaceAnnotation) GetBlurredLikelihood() Likelihood { + if m != nil { + return m.BlurredLikelihood + } + return Likelihood_UNKNOWN +} + +func (m *FaceAnnotation) GetHeadwearLikelihood() Likelihood { + if m != nil { + return m.HeadwearLikelihood + } + return Likelihood_UNKNOWN +} + +// A face-specific landmark (for example, a face feature). +type FaceAnnotation_Landmark struct { + // Face landmark type. + Type FaceAnnotation_Landmark_Type `protobuf:"varint,3,opt,name=type,proto3,enum=google.cloud.vision.v1p3beta1.FaceAnnotation_Landmark_Type" json:"type,omitempty"` + // Face landmark position. + Position *Position `protobuf:"bytes,4,opt,name=position,proto3" json:"position,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *FaceAnnotation_Landmark) Reset() { *m = FaceAnnotation_Landmark{} } +func (m *FaceAnnotation_Landmark) String() string { return proto.CompactTextString(m) } +func (*FaceAnnotation_Landmark) ProtoMessage() {} +func (*FaceAnnotation_Landmark) Descriptor() ([]byte, []int) { + return fileDescriptor_image_annotator_741235179f821499, []int{3, 0} +} +func (m *FaceAnnotation_Landmark) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_FaceAnnotation_Landmark.Unmarshal(m, b) +} +func (m *FaceAnnotation_Landmark) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_FaceAnnotation_Landmark.Marshal(b, m, deterministic) +} +func (dst *FaceAnnotation_Landmark) XXX_Merge(src proto.Message) { + xxx_messageInfo_FaceAnnotation_Landmark.Merge(dst, src) +} +func (m *FaceAnnotation_Landmark) XXX_Size() int { + return xxx_messageInfo_FaceAnnotation_Landmark.Size(m) +} +func (m *FaceAnnotation_Landmark) XXX_DiscardUnknown() { + xxx_messageInfo_FaceAnnotation_Landmark.DiscardUnknown(m) +} + +var xxx_messageInfo_FaceAnnotation_Landmark proto.InternalMessageInfo + +func (m *FaceAnnotation_Landmark) GetType() FaceAnnotation_Landmark_Type { + if m != nil { + return m.Type + } + return FaceAnnotation_Landmark_UNKNOWN_LANDMARK +} + +func (m *FaceAnnotation_Landmark) GetPosition() *Position { + if m != nil { + return m.Position + } + return nil +} + +// Detected entity location information. +type LocationInfo struct { + // lat/long location coordinates. + LatLng *latlng.LatLng `protobuf:"bytes,1,opt,name=lat_lng,json=latLng,proto3" json:"lat_lng,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *LocationInfo) Reset() { *m = LocationInfo{} } +func (m *LocationInfo) String() string { return proto.CompactTextString(m) } +func (*LocationInfo) ProtoMessage() {} +func (*LocationInfo) Descriptor() ([]byte, []int) { + return fileDescriptor_image_annotator_741235179f821499, []int{4} +} +func (m *LocationInfo) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_LocationInfo.Unmarshal(m, b) +} +func (m *LocationInfo) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_LocationInfo.Marshal(b, m, deterministic) +} +func (dst *LocationInfo) XXX_Merge(src proto.Message) { + xxx_messageInfo_LocationInfo.Merge(dst, src) +} +func (m *LocationInfo) XXX_Size() int { + return xxx_messageInfo_LocationInfo.Size(m) +} +func (m *LocationInfo) XXX_DiscardUnknown() { + xxx_messageInfo_LocationInfo.DiscardUnknown(m) +} + +var xxx_messageInfo_LocationInfo proto.InternalMessageInfo + +func (m *LocationInfo) GetLatLng() *latlng.LatLng { + if m != nil { + return m.LatLng + } + return nil +} + +// A `Property` consists of a user-supplied name/value pair. +type Property struct { + // Name of the property. + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // Value of the property. + Value string `protobuf:"bytes,2,opt,name=value,proto3" json:"value,omitempty"` + // Value of numeric properties. + Uint64Value uint64 `protobuf:"varint,3,opt,name=uint64_value,json=uint64Value,proto3" json:"uint64_value,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Property) Reset() { *m = Property{} } +func (m *Property) String() string { return proto.CompactTextString(m) } +func (*Property) ProtoMessage() {} +func (*Property) Descriptor() ([]byte, []int) { + return fileDescriptor_image_annotator_741235179f821499, []int{5} +} +func (m *Property) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Property.Unmarshal(m, b) +} +func (m *Property) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Property.Marshal(b, m, deterministic) +} +func (dst *Property) XXX_Merge(src proto.Message) { + xxx_messageInfo_Property.Merge(dst, src) +} +func (m *Property) XXX_Size() int { + return xxx_messageInfo_Property.Size(m) +} +func (m *Property) XXX_DiscardUnknown() { + xxx_messageInfo_Property.DiscardUnknown(m) +} + +var xxx_messageInfo_Property proto.InternalMessageInfo + +func (m *Property) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *Property) GetValue() string { + if m != nil { + return m.Value + } + return "" +} + +func (m *Property) GetUint64Value() uint64 { + if m != nil { + return m.Uint64Value + } + return 0 +} + +// Set of detected entity features. +type EntityAnnotation struct { + // Opaque entity ID. Some IDs may be available in + // [Google Knowledge Graph Search + // API](https://developers.google.com/knowledge-graph/). + Mid string `protobuf:"bytes,1,opt,name=mid,proto3" json:"mid,omitempty"` + // The language code for the locale in which the entity textual + // `description` is expressed. + Locale string `protobuf:"bytes,2,opt,name=locale,proto3" json:"locale,omitempty"` + // Entity textual description, expressed in its `locale` language. + Description string `protobuf:"bytes,3,opt,name=description,proto3" json:"description,omitempty"` + // Overall score of the result. Range [0, 1]. + Score float32 `protobuf:"fixed32,4,opt,name=score,proto3" json:"score,omitempty"` + // **Deprecated. Use `score` instead.** + // The accuracy of the entity detection in an image. + // For example, for an image in which the "Eiffel Tower" entity is detected, + // this field represents the confidence that there is a tower in the query + // image. Range [0, 1]. + Confidence float32 `protobuf:"fixed32,5,opt,name=confidence,proto3" json:"confidence,omitempty"` + // The relevancy of the ICA (Image Content Annotation) label to the + // image. For example, the relevancy of "tower" is likely higher to an image + // containing the detected "Eiffel Tower" than to an image containing a + // detected distant towering building, even though the confidence that + // there is a tower in each image may be the same. Range [0, 1]. + Topicality float32 `protobuf:"fixed32,6,opt,name=topicality,proto3" json:"topicality,omitempty"` + // Image region to which this entity belongs. Not produced + // for `LABEL_DETECTION` features. + BoundingPoly *BoundingPoly `protobuf:"bytes,7,opt,name=bounding_poly,json=boundingPoly,proto3" json:"bounding_poly,omitempty"` + // The location information for the detected entity. Multiple + // `LocationInfo` elements can be present because one location may + // indicate the location of the scene in the image, and another location + // may indicate the location of the place where the image was taken. + // Location information is usually present for landmarks. + Locations []*LocationInfo `protobuf:"bytes,8,rep,name=locations,proto3" json:"locations,omitempty"` + // Some entities may have optional user-supplied `Property` (name/value) + // fields, such a score or string that qualifies the entity. + Properties []*Property `protobuf:"bytes,9,rep,name=properties,proto3" json:"properties,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *EntityAnnotation) Reset() { *m = EntityAnnotation{} } +func (m *EntityAnnotation) String() string { return proto.CompactTextString(m) } +func (*EntityAnnotation) ProtoMessage() {} +func (*EntityAnnotation) Descriptor() ([]byte, []int) { + return fileDescriptor_image_annotator_741235179f821499, []int{6} +} +func (m *EntityAnnotation) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_EntityAnnotation.Unmarshal(m, b) +} +func (m *EntityAnnotation) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_EntityAnnotation.Marshal(b, m, deterministic) +} +func (dst *EntityAnnotation) XXX_Merge(src proto.Message) { + xxx_messageInfo_EntityAnnotation.Merge(dst, src) +} +func (m *EntityAnnotation) XXX_Size() int { + return xxx_messageInfo_EntityAnnotation.Size(m) +} +func (m *EntityAnnotation) XXX_DiscardUnknown() { + xxx_messageInfo_EntityAnnotation.DiscardUnknown(m) +} + +var xxx_messageInfo_EntityAnnotation proto.InternalMessageInfo + +func (m *EntityAnnotation) GetMid() string { + if m != nil { + return m.Mid + } + return "" +} + +func (m *EntityAnnotation) GetLocale() string { + if m != nil { + return m.Locale + } + return "" +} + +func (m *EntityAnnotation) GetDescription() string { + if m != nil { + return m.Description + } + return "" +} + +func (m *EntityAnnotation) GetScore() float32 { + if m != nil { + return m.Score + } + return 0 +} + +func (m *EntityAnnotation) GetConfidence() float32 { + if m != nil { + return m.Confidence + } + return 0 +} + +func (m *EntityAnnotation) GetTopicality() float32 { + if m != nil { + return m.Topicality + } + return 0 +} + +func (m *EntityAnnotation) GetBoundingPoly() *BoundingPoly { + if m != nil { + return m.BoundingPoly + } + return nil +} + +func (m *EntityAnnotation) GetLocations() []*LocationInfo { + if m != nil { + return m.Locations + } + return nil +} + +func (m *EntityAnnotation) GetProperties() []*Property { + if m != nil { + return m.Properties + } + return nil +} + +// Set of detected objects with bounding boxes. +type LocalizedObjectAnnotation struct { + // Object ID that should align with EntityAnnotation mid. + Mid string `protobuf:"bytes,1,opt,name=mid,proto3" json:"mid,omitempty"` + // The BCP-47 language code, such as "en-US" or "sr-Latn". For more + // information, see + // http://www.unicode.org/reports/tr35/#Unicode_locale_identifier. + LanguageCode string `protobuf:"bytes,2,opt,name=language_code,json=languageCode,proto3" json:"language_code,omitempty"` + // Object name, expressed in its `language_code` language. + Name string `protobuf:"bytes,3,opt,name=name,proto3" json:"name,omitempty"` + // Score of the result. Range [0, 1]. + Score float32 `protobuf:"fixed32,4,opt,name=score,proto3" json:"score,omitempty"` + // Image region to which this object belongs. This must be populated. + BoundingPoly *BoundingPoly `protobuf:"bytes,5,opt,name=bounding_poly,json=boundingPoly,proto3" json:"bounding_poly,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *LocalizedObjectAnnotation) Reset() { *m = LocalizedObjectAnnotation{} } +func (m *LocalizedObjectAnnotation) String() string { return proto.CompactTextString(m) } +func (*LocalizedObjectAnnotation) ProtoMessage() {} +func (*LocalizedObjectAnnotation) Descriptor() ([]byte, []int) { + return fileDescriptor_image_annotator_741235179f821499, []int{7} +} +func (m *LocalizedObjectAnnotation) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_LocalizedObjectAnnotation.Unmarshal(m, b) +} +func (m *LocalizedObjectAnnotation) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_LocalizedObjectAnnotation.Marshal(b, m, deterministic) +} +func (dst *LocalizedObjectAnnotation) XXX_Merge(src proto.Message) { + xxx_messageInfo_LocalizedObjectAnnotation.Merge(dst, src) +} +func (m *LocalizedObjectAnnotation) XXX_Size() int { + return xxx_messageInfo_LocalizedObjectAnnotation.Size(m) +} +func (m *LocalizedObjectAnnotation) XXX_DiscardUnknown() { + xxx_messageInfo_LocalizedObjectAnnotation.DiscardUnknown(m) +} + +var xxx_messageInfo_LocalizedObjectAnnotation proto.InternalMessageInfo + +func (m *LocalizedObjectAnnotation) GetMid() string { + if m != nil { + return m.Mid + } + return "" +} + +func (m *LocalizedObjectAnnotation) GetLanguageCode() string { + if m != nil { + return m.LanguageCode + } + return "" +} + +func (m *LocalizedObjectAnnotation) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *LocalizedObjectAnnotation) GetScore() float32 { + if m != nil { + return m.Score + } + return 0 +} + +func (m *LocalizedObjectAnnotation) GetBoundingPoly() *BoundingPoly { + if m != nil { + return m.BoundingPoly + } + return nil +} + +// Set of features pertaining to the image, computed by computer vision +// methods over safe-search verticals (for example, adult, spoof, medical, +// violence). +type SafeSearchAnnotation struct { + // Represents the adult content likelihood for the image. Adult content may + // contain elements such as nudity, pornographic images or cartoons, or + // sexual activities. + Adult Likelihood `protobuf:"varint,1,opt,name=adult,proto3,enum=google.cloud.vision.v1p3beta1.Likelihood" json:"adult,omitempty"` + // Spoof likelihood. The likelihood that an modification + // was made to the image's canonical version to make it appear + // funny or offensive. + Spoof Likelihood `protobuf:"varint,2,opt,name=spoof,proto3,enum=google.cloud.vision.v1p3beta1.Likelihood" json:"spoof,omitempty"` + // Likelihood that this is a medical image. + Medical Likelihood `protobuf:"varint,3,opt,name=medical,proto3,enum=google.cloud.vision.v1p3beta1.Likelihood" json:"medical,omitempty"` + // Likelihood that this image contains violent content. + Violence Likelihood `protobuf:"varint,4,opt,name=violence,proto3,enum=google.cloud.vision.v1p3beta1.Likelihood" json:"violence,omitempty"` + // Likelihood that the request image contains racy content. Racy content may + // include (but is not limited to) skimpy or sheer clothing, strategically + // covered nudity, lewd or provocative poses, or close-ups of sensitive + // body areas. + Racy Likelihood `protobuf:"varint,9,opt,name=racy,proto3,enum=google.cloud.vision.v1p3beta1.Likelihood" json:"racy,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *SafeSearchAnnotation) Reset() { *m = SafeSearchAnnotation{} } +func (m *SafeSearchAnnotation) String() string { return proto.CompactTextString(m) } +func (*SafeSearchAnnotation) ProtoMessage() {} +func (*SafeSearchAnnotation) Descriptor() ([]byte, []int) { + return fileDescriptor_image_annotator_741235179f821499, []int{8} +} +func (m *SafeSearchAnnotation) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_SafeSearchAnnotation.Unmarshal(m, b) +} +func (m *SafeSearchAnnotation) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_SafeSearchAnnotation.Marshal(b, m, deterministic) +} +func (dst *SafeSearchAnnotation) XXX_Merge(src proto.Message) { + xxx_messageInfo_SafeSearchAnnotation.Merge(dst, src) +} +func (m *SafeSearchAnnotation) XXX_Size() int { + return xxx_messageInfo_SafeSearchAnnotation.Size(m) +} +func (m *SafeSearchAnnotation) XXX_DiscardUnknown() { + xxx_messageInfo_SafeSearchAnnotation.DiscardUnknown(m) +} + +var xxx_messageInfo_SafeSearchAnnotation proto.InternalMessageInfo + +func (m *SafeSearchAnnotation) GetAdult() Likelihood { + if m != nil { + return m.Adult + } + return Likelihood_UNKNOWN +} + +func (m *SafeSearchAnnotation) GetSpoof() Likelihood { + if m != nil { + return m.Spoof + } + return Likelihood_UNKNOWN +} + +func (m *SafeSearchAnnotation) GetMedical() Likelihood { + if m != nil { + return m.Medical + } + return Likelihood_UNKNOWN +} + +func (m *SafeSearchAnnotation) GetViolence() Likelihood { + if m != nil { + return m.Violence + } + return Likelihood_UNKNOWN +} + +func (m *SafeSearchAnnotation) GetRacy() Likelihood { + if m != nil { + return m.Racy + } + return Likelihood_UNKNOWN +} + +// Rectangle determined by min and max `LatLng` pairs. +type LatLongRect struct { + // Min lat/long pair. + MinLatLng *latlng.LatLng `protobuf:"bytes,1,opt,name=min_lat_lng,json=minLatLng,proto3" json:"min_lat_lng,omitempty"` + // Max lat/long pair. + MaxLatLng *latlng.LatLng `protobuf:"bytes,2,opt,name=max_lat_lng,json=maxLatLng,proto3" json:"max_lat_lng,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *LatLongRect) Reset() { *m = LatLongRect{} } +func (m *LatLongRect) String() string { return proto.CompactTextString(m) } +func (*LatLongRect) ProtoMessage() {} +func (*LatLongRect) Descriptor() ([]byte, []int) { + return fileDescriptor_image_annotator_741235179f821499, []int{9} +} +func (m *LatLongRect) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_LatLongRect.Unmarshal(m, b) +} +func (m *LatLongRect) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_LatLongRect.Marshal(b, m, deterministic) +} +func (dst *LatLongRect) XXX_Merge(src proto.Message) { + xxx_messageInfo_LatLongRect.Merge(dst, src) +} +func (m *LatLongRect) XXX_Size() int { + return xxx_messageInfo_LatLongRect.Size(m) +} +func (m *LatLongRect) XXX_DiscardUnknown() { + xxx_messageInfo_LatLongRect.DiscardUnknown(m) +} + +var xxx_messageInfo_LatLongRect proto.InternalMessageInfo + +func (m *LatLongRect) GetMinLatLng() *latlng.LatLng { + if m != nil { + return m.MinLatLng + } + return nil +} + +func (m *LatLongRect) GetMaxLatLng() *latlng.LatLng { + if m != nil { + return m.MaxLatLng + } + return nil +} + +// Color information consists of RGB channels, score, and the fraction of +// the image that the color occupies in the image. +type ColorInfo struct { + // RGB components of the color. + Color *color.Color `protobuf:"bytes,1,opt,name=color,proto3" json:"color,omitempty"` + // Image-specific score for this color. Value in range [0, 1]. + Score float32 `protobuf:"fixed32,2,opt,name=score,proto3" json:"score,omitempty"` + // The fraction of pixels the color occupies in the image. + // Value in range [0, 1]. + PixelFraction float32 `protobuf:"fixed32,3,opt,name=pixel_fraction,json=pixelFraction,proto3" json:"pixel_fraction,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ColorInfo) Reset() { *m = ColorInfo{} } +func (m *ColorInfo) String() string { return proto.CompactTextString(m) } +func (*ColorInfo) ProtoMessage() {} +func (*ColorInfo) Descriptor() ([]byte, []int) { + return fileDescriptor_image_annotator_741235179f821499, []int{10} +} +func (m *ColorInfo) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ColorInfo.Unmarshal(m, b) +} +func (m *ColorInfo) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ColorInfo.Marshal(b, m, deterministic) +} +func (dst *ColorInfo) XXX_Merge(src proto.Message) { + xxx_messageInfo_ColorInfo.Merge(dst, src) +} +func (m *ColorInfo) XXX_Size() int { + return xxx_messageInfo_ColorInfo.Size(m) +} +func (m *ColorInfo) XXX_DiscardUnknown() { + xxx_messageInfo_ColorInfo.DiscardUnknown(m) +} + +var xxx_messageInfo_ColorInfo proto.InternalMessageInfo + +func (m *ColorInfo) GetColor() *color.Color { + if m != nil { + return m.Color + } + return nil +} + +func (m *ColorInfo) GetScore() float32 { + if m != nil { + return m.Score + } + return 0 +} + +func (m *ColorInfo) GetPixelFraction() float32 { + if m != nil { + return m.PixelFraction + } + return 0 +} + +// Set of dominant colors and their corresponding scores. +type DominantColorsAnnotation struct { + // RGB color values with their score and pixel fraction. + Colors []*ColorInfo `protobuf:"bytes,1,rep,name=colors,proto3" json:"colors,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *DominantColorsAnnotation) Reset() { *m = DominantColorsAnnotation{} } +func (m *DominantColorsAnnotation) String() string { return proto.CompactTextString(m) } +func (*DominantColorsAnnotation) ProtoMessage() {} +func (*DominantColorsAnnotation) Descriptor() ([]byte, []int) { + return fileDescriptor_image_annotator_741235179f821499, []int{11} +} +func (m *DominantColorsAnnotation) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_DominantColorsAnnotation.Unmarshal(m, b) +} +func (m *DominantColorsAnnotation) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_DominantColorsAnnotation.Marshal(b, m, deterministic) +} +func (dst *DominantColorsAnnotation) XXX_Merge(src proto.Message) { + xxx_messageInfo_DominantColorsAnnotation.Merge(dst, src) +} +func (m *DominantColorsAnnotation) XXX_Size() int { + return xxx_messageInfo_DominantColorsAnnotation.Size(m) +} +func (m *DominantColorsAnnotation) XXX_DiscardUnknown() { + xxx_messageInfo_DominantColorsAnnotation.DiscardUnknown(m) +} + +var xxx_messageInfo_DominantColorsAnnotation proto.InternalMessageInfo + +func (m *DominantColorsAnnotation) GetColors() []*ColorInfo { + if m != nil { + return m.Colors + } + return nil +} + +// Stores image properties, such as dominant colors. +type ImageProperties struct { + // If present, dominant colors completed successfully. + DominantColors *DominantColorsAnnotation `protobuf:"bytes,1,opt,name=dominant_colors,json=dominantColors,proto3" json:"dominant_colors,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ImageProperties) Reset() { *m = ImageProperties{} } +func (m *ImageProperties) String() string { return proto.CompactTextString(m) } +func (*ImageProperties) ProtoMessage() {} +func (*ImageProperties) Descriptor() ([]byte, []int) { + return fileDescriptor_image_annotator_741235179f821499, []int{12} +} +func (m *ImageProperties) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ImageProperties.Unmarshal(m, b) +} +func (m *ImageProperties) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ImageProperties.Marshal(b, m, deterministic) +} +func (dst *ImageProperties) XXX_Merge(src proto.Message) { + xxx_messageInfo_ImageProperties.Merge(dst, src) +} +func (m *ImageProperties) XXX_Size() int { + return xxx_messageInfo_ImageProperties.Size(m) +} +func (m *ImageProperties) XXX_DiscardUnknown() { + xxx_messageInfo_ImageProperties.DiscardUnknown(m) +} + +var xxx_messageInfo_ImageProperties proto.InternalMessageInfo + +func (m *ImageProperties) GetDominantColors() *DominantColorsAnnotation { + if m != nil { + return m.DominantColors + } + return nil +} + +// Single crop hint that is used to generate a new crop when serving an image. +type CropHint struct { + // The bounding polygon for the crop region. The coordinates of the bounding + // box are in the original image's scale, as returned in `ImageParams`. + BoundingPoly *BoundingPoly `protobuf:"bytes,1,opt,name=bounding_poly,json=boundingPoly,proto3" json:"bounding_poly,omitempty"` + // Confidence of this being a salient region. Range [0, 1]. + Confidence float32 `protobuf:"fixed32,2,opt,name=confidence,proto3" json:"confidence,omitempty"` + // Fraction of importance of this salient region with respect to the original + // image. + ImportanceFraction float32 `protobuf:"fixed32,3,opt,name=importance_fraction,json=importanceFraction,proto3" json:"importance_fraction,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *CropHint) Reset() { *m = CropHint{} } +func (m *CropHint) String() string { return proto.CompactTextString(m) } +func (*CropHint) ProtoMessage() {} +func (*CropHint) Descriptor() ([]byte, []int) { + return fileDescriptor_image_annotator_741235179f821499, []int{13} +} +func (m *CropHint) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_CropHint.Unmarshal(m, b) +} +func (m *CropHint) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_CropHint.Marshal(b, m, deterministic) +} +func (dst *CropHint) XXX_Merge(src proto.Message) { + xxx_messageInfo_CropHint.Merge(dst, src) +} +func (m *CropHint) XXX_Size() int { + return xxx_messageInfo_CropHint.Size(m) +} +func (m *CropHint) XXX_DiscardUnknown() { + xxx_messageInfo_CropHint.DiscardUnknown(m) +} + +var xxx_messageInfo_CropHint proto.InternalMessageInfo + +func (m *CropHint) GetBoundingPoly() *BoundingPoly { + if m != nil { + return m.BoundingPoly + } + return nil +} + +func (m *CropHint) GetConfidence() float32 { + if m != nil { + return m.Confidence + } + return 0 +} + +func (m *CropHint) GetImportanceFraction() float32 { + if m != nil { + return m.ImportanceFraction + } + return 0 +} + +// Set of crop hints that are used to generate new crops when serving images. +type CropHintsAnnotation struct { + // Crop hint results. + CropHints []*CropHint `protobuf:"bytes,1,rep,name=crop_hints,json=cropHints,proto3" json:"crop_hints,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *CropHintsAnnotation) Reset() { *m = CropHintsAnnotation{} } +func (m *CropHintsAnnotation) String() string { return proto.CompactTextString(m) } +func (*CropHintsAnnotation) ProtoMessage() {} +func (*CropHintsAnnotation) Descriptor() ([]byte, []int) { + return fileDescriptor_image_annotator_741235179f821499, []int{14} +} +func (m *CropHintsAnnotation) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_CropHintsAnnotation.Unmarshal(m, b) +} +func (m *CropHintsAnnotation) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_CropHintsAnnotation.Marshal(b, m, deterministic) +} +func (dst *CropHintsAnnotation) XXX_Merge(src proto.Message) { + xxx_messageInfo_CropHintsAnnotation.Merge(dst, src) +} +func (m *CropHintsAnnotation) XXX_Size() int { + return xxx_messageInfo_CropHintsAnnotation.Size(m) +} +func (m *CropHintsAnnotation) XXX_DiscardUnknown() { + xxx_messageInfo_CropHintsAnnotation.DiscardUnknown(m) +} + +var xxx_messageInfo_CropHintsAnnotation proto.InternalMessageInfo + +func (m *CropHintsAnnotation) GetCropHints() []*CropHint { + if m != nil { + return m.CropHints + } + return nil +} + +// Parameters for crop hints annotation request. +type CropHintsParams struct { + // Aspect ratios in floats, representing the ratio of the width to the height + // of the image. For example, if the desired aspect ratio is 4/3, the + // corresponding float value should be 1.33333. If not specified, the + // best possible crop is returned. The number of provided aspect ratios is + // limited to a maximum of 16; any aspect ratios provided after the 16th are + // ignored. + AspectRatios []float32 `protobuf:"fixed32,1,rep,packed,name=aspect_ratios,json=aspectRatios,proto3" json:"aspect_ratios,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *CropHintsParams) Reset() { *m = CropHintsParams{} } +func (m *CropHintsParams) String() string { return proto.CompactTextString(m) } +func (*CropHintsParams) ProtoMessage() {} +func (*CropHintsParams) Descriptor() ([]byte, []int) { + return fileDescriptor_image_annotator_741235179f821499, []int{15} +} +func (m *CropHintsParams) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_CropHintsParams.Unmarshal(m, b) +} +func (m *CropHintsParams) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_CropHintsParams.Marshal(b, m, deterministic) +} +func (dst *CropHintsParams) XXX_Merge(src proto.Message) { + xxx_messageInfo_CropHintsParams.Merge(dst, src) +} +func (m *CropHintsParams) XXX_Size() int { + return xxx_messageInfo_CropHintsParams.Size(m) +} +func (m *CropHintsParams) XXX_DiscardUnknown() { + xxx_messageInfo_CropHintsParams.DiscardUnknown(m) +} + +var xxx_messageInfo_CropHintsParams proto.InternalMessageInfo + +func (m *CropHintsParams) GetAspectRatios() []float32 { + if m != nil { + return m.AspectRatios + } + return nil +} + +// Parameters for web detection request. +type WebDetectionParams struct { + // Whether to include results derived from the geo information in the image. + IncludeGeoResults bool `protobuf:"varint,2,opt,name=include_geo_results,json=includeGeoResults,proto3" json:"include_geo_results,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *WebDetectionParams) Reset() { *m = WebDetectionParams{} } +func (m *WebDetectionParams) String() string { return proto.CompactTextString(m) } +func (*WebDetectionParams) ProtoMessage() {} +func (*WebDetectionParams) Descriptor() ([]byte, []int) { + return fileDescriptor_image_annotator_741235179f821499, []int{16} +} +func (m *WebDetectionParams) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_WebDetectionParams.Unmarshal(m, b) +} +func (m *WebDetectionParams) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_WebDetectionParams.Marshal(b, m, deterministic) +} +func (dst *WebDetectionParams) XXX_Merge(src proto.Message) { + xxx_messageInfo_WebDetectionParams.Merge(dst, src) +} +func (m *WebDetectionParams) XXX_Size() int { + return xxx_messageInfo_WebDetectionParams.Size(m) +} +func (m *WebDetectionParams) XXX_DiscardUnknown() { + xxx_messageInfo_WebDetectionParams.DiscardUnknown(m) +} + +var xxx_messageInfo_WebDetectionParams proto.InternalMessageInfo + +func (m *WebDetectionParams) GetIncludeGeoResults() bool { + if m != nil { + return m.IncludeGeoResults + } + return false +} + +// Image context and/or feature-specific parameters. +type ImageContext struct { + // Not used. + LatLongRect *LatLongRect `protobuf:"bytes,1,opt,name=lat_long_rect,json=latLongRect,proto3" json:"lat_long_rect,omitempty"` + // List of languages to use for TEXT_DETECTION. In most cases, an empty value + // yields the best results since it enables automatic language detection. For + // languages based on the Latin alphabet, setting `language_hints` is not + // needed. In rare cases, when the language of the text in the image is known, + // setting a hint will help get better results (although it will be a + // significant hindrance if the hint is wrong). Text detection returns an + // error if one or more of the specified languages is not one of the + // [supported languages](/vision/docs/languages). + LanguageHints []string `protobuf:"bytes,2,rep,name=language_hints,json=languageHints,proto3" json:"language_hints,omitempty"` + // Parameters for crop hints annotation request. + CropHintsParams *CropHintsParams `protobuf:"bytes,4,opt,name=crop_hints_params,json=cropHintsParams,proto3" json:"crop_hints_params,omitempty"` + // Parameters for product search. + ProductSearchParams *ProductSearchParams `protobuf:"bytes,5,opt,name=product_search_params,json=productSearchParams,proto3" json:"product_search_params,omitempty"` + // Parameters for web detection. + WebDetectionParams *WebDetectionParams `protobuf:"bytes,6,opt,name=web_detection_params,json=webDetectionParams,proto3" json:"web_detection_params,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ImageContext) Reset() { *m = ImageContext{} } +func (m *ImageContext) String() string { return proto.CompactTextString(m) } +func (*ImageContext) ProtoMessage() {} +func (*ImageContext) Descriptor() ([]byte, []int) { + return fileDescriptor_image_annotator_741235179f821499, []int{17} +} +func (m *ImageContext) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ImageContext.Unmarshal(m, b) +} +func (m *ImageContext) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ImageContext.Marshal(b, m, deterministic) +} +func (dst *ImageContext) XXX_Merge(src proto.Message) { + xxx_messageInfo_ImageContext.Merge(dst, src) +} +func (m *ImageContext) XXX_Size() int { + return xxx_messageInfo_ImageContext.Size(m) +} +func (m *ImageContext) XXX_DiscardUnknown() { + xxx_messageInfo_ImageContext.DiscardUnknown(m) +} + +var xxx_messageInfo_ImageContext proto.InternalMessageInfo + +func (m *ImageContext) GetLatLongRect() *LatLongRect { + if m != nil { + return m.LatLongRect + } + return nil +} + +func (m *ImageContext) GetLanguageHints() []string { + if m != nil { + return m.LanguageHints + } + return nil +} + +func (m *ImageContext) GetCropHintsParams() *CropHintsParams { + if m != nil { + return m.CropHintsParams + } + return nil +} + +func (m *ImageContext) GetProductSearchParams() *ProductSearchParams { + if m != nil { + return m.ProductSearchParams + } + return nil +} + +func (m *ImageContext) GetWebDetectionParams() *WebDetectionParams { + if m != nil { + return m.WebDetectionParams + } + return nil +} + +// Request for performing Google Cloud Vision API tasks over a user-provided +// image, with user-requested features. +type AnnotateImageRequest struct { + // The image to be processed. + Image *Image `protobuf:"bytes,1,opt,name=image,proto3" json:"image,omitempty"` + // Requested features. + Features []*Feature `protobuf:"bytes,2,rep,name=features,proto3" json:"features,omitempty"` + // Additional context that may accompany the image. + ImageContext *ImageContext `protobuf:"bytes,3,opt,name=image_context,json=imageContext,proto3" json:"image_context,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *AnnotateImageRequest) Reset() { *m = AnnotateImageRequest{} } +func (m *AnnotateImageRequest) String() string { return proto.CompactTextString(m) } +func (*AnnotateImageRequest) ProtoMessage() {} +func (*AnnotateImageRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_image_annotator_741235179f821499, []int{18} +} +func (m *AnnotateImageRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_AnnotateImageRequest.Unmarshal(m, b) +} +func (m *AnnotateImageRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_AnnotateImageRequest.Marshal(b, m, deterministic) +} +func (dst *AnnotateImageRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_AnnotateImageRequest.Merge(dst, src) +} +func (m *AnnotateImageRequest) XXX_Size() int { + return xxx_messageInfo_AnnotateImageRequest.Size(m) +} +func (m *AnnotateImageRequest) XXX_DiscardUnknown() { + xxx_messageInfo_AnnotateImageRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_AnnotateImageRequest proto.InternalMessageInfo + +func (m *AnnotateImageRequest) GetImage() *Image { + if m != nil { + return m.Image + } + return nil +} + +func (m *AnnotateImageRequest) GetFeatures() []*Feature { + if m != nil { + return m.Features + } + return nil +} + +func (m *AnnotateImageRequest) GetImageContext() *ImageContext { + if m != nil { + return m.ImageContext + } + return nil +} + +// If an image was produced from a file (e.g. a PDF), this message gives +// information about the source of that image. +type ImageAnnotationContext struct { + // The URI of the file used to produce the image. + Uri string `protobuf:"bytes,1,opt,name=uri,proto3" json:"uri,omitempty"` + // If the file was a PDF or TIFF, this field gives the page number within + // the file used to produce the image. + PageNumber int32 `protobuf:"varint,2,opt,name=page_number,json=pageNumber,proto3" json:"page_number,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ImageAnnotationContext) Reset() { *m = ImageAnnotationContext{} } +func (m *ImageAnnotationContext) String() string { return proto.CompactTextString(m) } +func (*ImageAnnotationContext) ProtoMessage() {} +func (*ImageAnnotationContext) Descriptor() ([]byte, []int) { + return fileDescriptor_image_annotator_741235179f821499, []int{19} +} +func (m *ImageAnnotationContext) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ImageAnnotationContext.Unmarshal(m, b) +} +func (m *ImageAnnotationContext) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ImageAnnotationContext.Marshal(b, m, deterministic) +} +func (dst *ImageAnnotationContext) XXX_Merge(src proto.Message) { + xxx_messageInfo_ImageAnnotationContext.Merge(dst, src) +} +func (m *ImageAnnotationContext) XXX_Size() int { + return xxx_messageInfo_ImageAnnotationContext.Size(m) +} +func (m *ImageAnnotationContext) XXX_DiscardUnknown() { + xxx_messageInfo_ImageAnnotationContext.DiscardUnknown(m) +} + +var xxx_messageInfo_ImageAnnotationContext proto.InternalMessageInfo + +func (m *ImageAnnotationContext) GetUri() string { + if m != nil { + return m.Uri + } + return "" +} + +func (m *ImageAnnotationContext) GetPageNumber() int32 { + if m != nil { + return m.PageNumber + } + return 0 +} + +// Response to an image annotation request. +type AnnotateImageResponse struct { + // If present, face detection has completed successfully. + FaceAnnotations []*FaceAnnotation `protobuf:"bytes,1,rep,name=face_annotations,json=faceAnnotations,proto3" json:"face_annotations,omitempty"` + // If present, landmark detection has completed successfully. + LandmarkAnnotations []*EntityAnnotation `protobuf:"bytes,2,rep,name=landmark_annotations,json=landmarkAnnotations,proto3" json:"landmark_annotations,omitempty"` + // If present, logo detection has completed successfully. + LogoAnnotations []*EntityAnnotation `protobuf:"bytes,3,rep,name=logo_annotations,json=logoAnnotations,proto3" json:"logo_annotations,omitempty"` + // If present, label detection has completed successfully. + LabelAnnotations []*EntityAnnotation `protobuf:"bytes,4,rep,name=label_annotations,json=labelAnnotations,proto3" json:"label_annotations,omitempty"` + // If present, localized object detection has completed successfully. + // This will be sorted descending by confidence score. + LocalizedObjectAnnotations []*LocalizedObjectAnnotation `protobuf:"bytes,22,rep,name=localized_object_annotations,json=localizedObjectAnnotations,proto3" json:"localized_object_annotations,omitempty"` + // If present, text (OCR) detection has completed successfully. + TextAnnotations []*EntityAnnotation `protobuf:"bytes,5,rep,name=text_annotations,json=textAnnotations,proto3" json:"text_annotations,omitempty"` + // If present, text (OCR) detection or document (OCR) text detection has + // completed successfully. + // This annotation provides the structural hierarchy for the OCR detected + // text. + FullTextAnnotation *TextAnnotation `protobuf:"bytes,12,opt,name=full_text_annotation,json=fullTextAnnotation,proto3" json:"full_text_annotation,omitempty"` + // If present, safe-search annotation has completed successfully. + SafeSearchAnnotation *SafeSearchAnnotation `protobuf:"bytes,6,opt,name=safe_search_annotation,json=safeSearchAnnotation,proto3" json:"safe_search_annotation,omitempty"` + // If present, image properties were extracted successfully. + ImagePropertiesAnnotation *ImageProperties `protobuf:"bytes,8,opt,name=image_properties_annotation,json=imagePropertiesAnnotation,proto3" json:"image_properties_annotation,omitempty"` + // If present, crop hints have completed successfully. + CropHintsAnnotation *CropHintsAnnotation `protobuf:"bytes,11,opt,name=crop_hints_annotation,json=cropHintsAnnotation,proto3" json:"crop_hints_annotation,omitempty"` + // If present, web detection has completed successfully. + WebDetection *WebDetection `protobuf:"bytes,13,opt,name=web_detection,json=webDetection,proto3" json:"web_detection,omitempty"` + // If present, product search has completed successfully. + ProductSearchResults *ProductSearchResults `protobuf:"bytes,14,opt,name=product_search_results,json=productSearchResults,proto3" json:"product_search_results,omitempty"` + // If set, represents the error message for the operation. + // Note that filled-in image annotations are guaranteed to be + // correct, even when `error` is set. + Error *status.Status `protobuf:"bytes,9,opt,name=error,proto3" json:"error,omitempty"` + // If present, contextual information is needed to understand where this image + // comes from. + Context *ImageAnnotationContext `protobuf:"bytes,21,opt,name=context,proto3" json:"context,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *AnnotateImageResponse) Reset() { *m = AnnotateImageResponse{} } +func (m *AnnotateImageResponse) String() string { return proto.CompactTextString(m) } +func (*AnnotateImageResponse) ProtoMessage() {} +func (*AnnotateImageResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_image_annotator_741235179f821499, []int{20} +} +func (m *AnnotateImageResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_AnnotateImageResponse.Unmarshal(m, b) +} +func (m *AnnotateImageResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_AnnotateImageResponse.Marshal(b, m, deterministic) +} +func (dst *AnnotateImageResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_AnnotateImageResponse.Merge(dst, src) +} +func (m *AnnotateImageResponse) XXX_Size() int { + return xxx_messageInfo_AnnotateImageResponse.Size(m) +} +func (m *AnnotateImageResponse) XXX_DiscardUnknown() { + xxx_messageInfo_AnnotateImageResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_AnnotateImageResponse proto.InternalMessageInfo + +func (m *AnnotateImageResponse) GetFaceAnnotations() []*FaceAnnotation { + if m != nil { + return m.FaceAnnotations + } + return nil +} + +func (m *AnnotateImageResponse) GetLandmarkAnnotations() []*EntityAnnotation { + if m != nil { + return m.LandmarkAnnotations + } + return nil +} + +func (m *AnnotateImageResponse) GetLogoAnnotations() []*EntityAnnotation { + if m != nil { + return m.LogoAnnotations + } + return nil +} + +func (m *AnnotateImageResponse) GetLabelAnnotations() []*EntityAnnotation { + if m != nil { + return m.LabelAnnotations + } + return nil +} + +func (m *AnnotateImageResponse) GetLocalizedObjectAnnotations() []*LocalizedObjectAnnotation { + if m != nil { + return m.LocalizedObjectAnnotations + } + return nil +} + +func (m *AnnotateImageResponse) GetTextAnnotations() []*EntityAnnotation { + if m != nil { + return m.TextAnnotations + } + return nil +} + +func (m *AnnotateImageResponse) GetFullTextAnnotation() *TextAnnotation { + if m != nil { + return m.FullTextAnnotation + } + return nil +} + +func (m *AnnotateImageResponse) GetSafeSearchAnnotation() *SafeSearchAnnotation { + if m != nil { + return m.SafeSearchAnnotation + } + return nil +} + +func (m *AnnotateImageResponse) GetImagePropertiesAnnotation() *ImageProperties { + if m != nil { + return m.ImagePropertiesAnnotation + } + return nil +} + +func (m *AnnotateImageResponse) GetCropHintsAnnotation() *CropHintsAnnotation { + if m != nil { + return m.CropHintsAnnotation + } + return nil +} + +func (m *AnnotateImageResponse) GetWebDetection() *WebDetection { + if m != nil { + return m.WebDetection + } + return nil +} + +func (m *AnnotateImageResponse) GetProductSearchResults() *ProductSearchResults { + if m != nil { + return m.ProductSearchResults + } + return nil +} + +func (m *AnnotateImageResponse) GetError() *status.Status { + if m != nil { + return m.Error + } + return nil +} + +func (m *AnnotateImageResponse) GetContext() *ImageAnnotationContext { + if m != nil { + return m.Context + } + return nil +} + +// Response to a single file annotation request. A file may contain one or more +// images, which individually have their own responses. +type AnnotateFileResponse struct { + // Information about the file for which this response is generated. + InputConfig *InputConfig `protobuf:"bytes,1,opt,name=input_config,json=inputConfig,proto3" json:"input_config,omitempty"` + // Individual responses to images found within the file. + Responses []*AnnotateImageResponse `protobuf:"bytes,2,rep,name=responses,proto3" json:"responses,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *AnnotateFileResponse) Reset() { *m = AnnotateFileResponse{} } +func (m *AnnotateFileResponse) String() string { return proto.CompactTextString(m) } +func (*AnnotateFileResponse) ProtoMessage() {} +func (*AnnotateFileResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_image_annotator_741235179f821499, []int{21} +} +func (m *AnnotateFileResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_AnnotateFileResponse.Unmarshal(m, b) +} +func (m *AnnotateFileResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_AnnotateFileResponse.Marshal(b, m, deterministic) +} +func (dst *AnnotateFileResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_AnnotateFileResponse.Merge(dst, src) +} +func (m *AnnotateFileResponse) XXX_Size() int { + return xxx_messageInfo_AnnotateFileResponse.Size(m) +} +func (m *AnnotateFileResponse) XXX_DiscardUnknown() { + xxx_messageInfo_AnnotateFileResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_AnnotateFileResponse proto.InternalMessageInfo + +func (m *AnnotateFileResponse) GetInputConfig() *InputConfig { + if m != nil { + return m.InputConfig + } + return nil +} + +func (m *AnnotateFileResponse) GetResponses() []*AnnotateImageResponse { + if m != nil { + return m.Responses + } + return nil +} + +// Multiple image annotation requests are batched into a single service call. +type BatchAnnotateImagesRequest struct { + // Individual image annotation requests for this batch. + Requests []*AnnotateImageRequest `protobuf:"bytes,1,rep,name=requests,proto3" json:"requests,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *BatchAnnotateImagesRequest) Reset() { *m = BatchAnnotateImagesRequest{} } +func (m *BatchAnnotateImagesRequest) String() string { return proto.CompactTextString(m) } +func (*BatchAnnotateImagesRequest) ProtoMessage() {} +func (*BatchAnnotateImagesRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_image_annotator_741235179f821499, []int{22} +} +func (m *BatchAnnotateImagesRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_BatchAnnotateImagesRequest.Unmarshal(m, b) +} +func (m *BatchAnnotateImagesRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_BatchAnnotateImagesRequest.Marshal(b, m, deterministic) +} +func (dst *BatchAnnotateImagesRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_BatchAnnotateImagesRequest.Merge(dst, src) +} +func (m *BatchAnnotateImagesRequest) XXX_Size() int { + return xxx_messageInfo_BatchAnnotateImagesRequest.Size(m) +} +func (m *BatchAnnotateImagesRequest) XXX_DiscardUnknown() { + xxx_messageInfo_BatchAnnotateImagesRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_BatchAnnotateImagesRequest proto.InternalMessageInfo + +func (m *BatchAnnotateImagesRequest) GetRequests() []*AnnotateImageRequest { + if m != nil { + return m.Requests + } + return nil +} + +// Response to a batch image annotation request. +type BatchAnnotateImagesResponse struct { + // Individual responses to image annotation requests within the batch. + Responses []*AnnotateImageResponse `protobuf:"bytes,1,rep,name=responses,proto3" json:"responses,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *BatchAnnotateImagesResponse) Reset() { *m = BatchAnnotateImagesResponse{} } +func (m *BatchAnnotateImagesResponse) String() string { return proto.CompactTextString(m) } +func (*BatchAnnotateImagesResponse) ProtoMessage() {} +func (*BatchAnnotateImagesResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_image_annotator_741235179f821499, []int{23} +} +func (m *BatchAnnotateImagesResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_BatchAnnotateImagesResponse.Unmarshal(m, b) +} +func (m *BatchAnnotateImagesResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_BatchAnnotateImagesResponse.Marshal(b, m, deterministic) +} +func (dst *BatchAnnotateImagesResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_BatchAnnotateImagesResponse.Merge(dst, src) +} +func (m *BatchAnnotateImagesResponse) XXX_Size() int { + return xxx_messageInfo_BatchAnnotateImagesResponse.Size(m) +} +func (m *BatchAnnotateImagesResponse) XXX_DiscardUnknown() { + xxx_messageInfo_BatchAnnotateImagesResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_BatchAnnotateImagesResponse proto.InternalMessageInfo + +func (m *BatchAnnotateImagesResponse) GetResponses() []*AnnotateImageResponse { + if m != nil { + return m.Responses + } + return nil +} + +// An offline file annotation request. +type AsyncAnnotateFileRequest struct { + // Required. Information about the input file. + InputConfig *InputConfig `protobuf:"bytes,1,opt,name=input_config,json=inputConfig,proto3" json:"input_config,omitempty"` + // Required. Requested features. + Features []*Feature `protobuf:"bytes,2,rep,name=features,proto3" json:"features,omitempty"` + // Additional context that may accompany the image(s) in the file. + ImageContext *ImageContext `protobuf:"bytes,3,opt,name=image_context,json=imageContext,proto3" json:"image_context,omitempty"` + // Required. The desired output location and metadata (e.g. format). + OutputConfig *OutputConfig `protobuf:"bytes,4,opt,name=output_config,json=outputConfig,proto3" json:"output_config,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *AsyncAnnotateFileRequest) Reset() { *m = AsyncAnnotateFileRequest{} } +func (m *AsyncAnnotateFileRequest) String() string { return proto.CompactTextString(m) } +func (*AsyncAnnotateFileRequest) ProtoMessage() {} +func (*AsyncAnnotateFileRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_image_annotator_741235179f821499, []int{24} +} +func (m *AsyncAnnotateFileRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_AsyncAnnotateFileRequest.Unmarshal(m, b) +} +func (m *AsyncAnnotateFileRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_AsyncAnnotateFileRequest.Marshal(b, m, deterministic) +} +func (dst *AsyncAnnotateFileRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_AsyncAnnotateFileRequest.Merge(dst, src) +} +func (m *AsyncAnnotateFileRequest) XXX_Size() int { + return xxx_messageInfo_AsyncAnnotateFileRequest.Size(m) +} +func (m *AsyncAnnotateFileRequest) XXX_DiscardUnknown() { + xxx_messageInfo_AsyncAnnotateFileRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_AsyncAnnotateFileRequest proto.InternalMessageInfo + +func (m *AsyncAnnotateFileRequest) GetInputConfig() *InputConfig { + if m != nil { + return m.InputConfig + } + return nil +} + +func (m *AsyncAnnotateFileRequest) GetFeatures() []*Feature { + if m != nil { + return m.Features + } + return nil +} + +func (m *AsyncAnnotateFileRequest) GetImageContext() *ImageContext { + if m != nil { + return m.ImageContext + } + return nil +} + +func (m *AsyncAnnotateFileRequest) GetOutputConfig() *OutputConfig { + if m != nil { + return m.OutputConfig + } + return nil +} + +// The response for a single offline file annotation request. +type AsyncAnnotateFileResponse struct { + // The output location and metadata from AsyncAnnotateFileRequest. + OutputConfig *OutputConfig `protobuf:"bytes,1,opt,name=output_config,json=outputConfig,proto3" json:"output_config,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *AsyncAnnotateFileResponse) Reset() { *m = AsyncAnnotateFileResponse{} } +func (m *AsyncAnnotateFileResponse) String() string { return proto.CompactTextString(m) } +func (*AsyncAnnotateFileResponse) ProtoMessage() {} +func (*AsyncAnnotateFileResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_image_annotator_741235179f821499, []int{25} +} +func (m *AsyncAnnotateFileResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_AsyncAnnotateFileResponse.Unmarshal(m, b) +} +func (m *AsyncAnnotateFileResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_AsyncAnnotateFileResponse.Marshal(b, m, deterministic) +} +func (dst *AsyncAnnotateFileResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_AsyncAnnotateFileResponse.Merge(dst, src) +} +func (m *AsyncAnnotateFileResponse) XXX_Size() int { + return xxx_messageInfo_AsyncAnnotateFileResponse.Size(m) +} +func (m *AsyncAnnotateFileResponse) XXX_DiscardUnknown() { + xxx_messageInfo_AsyncAnnotateFileResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_AsyncAnnotateFileResponse proto.InternalMessageInfo + +func (m *AsyncAnnotateFileResponse) GetOutputConfig() *OutputConfig { + if m != nil { + return m.OutputConfig + } + return nil +} + +// Multiple async file annotation requests are batched into a single service +// call. +type AsyncBatchAnnotateFilesRequest struct { + // Individual async file annotation requests for this batch. + Requests []*AsyncAnnotateFileRequest `protobuf:"bytes,1,rep,name=requests,proto3" json:"requests,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *AsyncBatchAnnotateFilesRequest) Reset() { *m = AsyncBatchAnnotateFilesRequest{} } +func (m *AsyncBatchAnnotateFilesRequest) String() string { return proto.CompactTextString(m) } +func (*AsyncBatchAnnotateFilesRequest) ProtoMessage() {} +func (*AsyncBatchAnnotateFilesRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_image_annotator_741235179f821499, []int{26} +} +func (m *AsyncBatchAnnotateFilesRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_AsyncBatchAnnotateFilesRequest.Unmarshal(m, b) +} +func (m *AsyncBatchAnnotateFilesRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_AsyncBatchAnnotateFilesRequest.Marshal(b, m, deterministic) +} +func (dst *AsyncBatchAnnotateFilesRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_AsyncBatchAnnotateFilesRequest.Merge(dst, src) +} +func (m *AsyncBatchAnnotateFilesRequest) XXX_Size() int { + return xxx_messageInfo_AsyncBatchAnnotateFilesRequest.Size(m) +} +func (m *AsyncBatchAnnotateFilesRequest) XXX_DiscardUnknown() { + xxx_messageInfo_AsyncBatchAnnotateFilesRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_AsyncBatchAnnotateFilesRequest proto.InternalMessageInfo + +func (m *AsyncBatchAnnotateFilesRequest) GetRequests() []*AsyncAnnotateFileRequest { + if m != nil { + return m.Requests + } + return nil +} + +// Response to an async batch file annotation request. +type AsyncBatchAnnotateFilesResponse struct { + // The list of file annotation responses, one for each request in + // AsyncBatchAnnotateFilesRequest. + Responses []*AsyncAnnotateFileResponse `protobuf:"bytes,1,rep,name=responses,proto3" json:"responses,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *AsyncBatchAnnotateFilesResponse) Reset() { *m = AsyncBatchAnnotateFilesResponse{} } +func (m *AsyncBatchAnnotateFilesResponse) String() string { return proto.CompactTextString(m) } +func (*AsyncBatchAnnotateFilesResponse) ProtoMessage() {} +func (*AsyncBatchAnnotateFilesResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_image_annotator_741235179f821499, []int{27} +} +func (m *AsyncBatchAnnotateFilesResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_AsyncBatchAnnotateFilesResponse.Unmarshal(m, b) +} +func (m *AsyncBatchAnnotateFilesResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_AsyncBatchAnnotateFilesResponse.Marshal(b, m, deterministic) +} +func (dst *AsyncBatchAnnotateFilesResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_AsyncBatchAnnotateFilesResponse.Merge(dst, src) +} +func (m *AsyncBatchAnnotateFilesResponse) XXX_Size() int { + return xxx_messageInfo_AsyncBatchAnnotateFilesResponse.Size(m) +} +func (m *AsyncBatchAnnotateFilesResponse) XXX_DiscardUnknown() { + xxx_messageInfo_AsyncBatchAnnotateFilesResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_AsyncBatchAnnotateFilesResponse proto.InternalMessageInfo + +func (m *AsyncBatchAnnotateFilesResponse) GetResponses() []*AsyncAnnotateFileResponse { + if m != nil { + return m.Responses + } + return nil +} + +// The desired input location and metadata. +type InputConfig struct { + // The Google Cloud Storage location to read the input from. + GcsSource *GcsSource `protobuf:"bytes,1,opt,name=gcs_source,json=gcsSource,proto3" json:"gcs_source,omitempty"` + // The type of the file. Currently only "application/pdf" and "image/tiff" + // are supported. Wildcards are not supported. + MimeType string `protobuf:"bytes,2,opt,name=mime_type,json=mimeType,proto3" json:"mime_type,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *InputConfig) Reset() { *m = InputConfig{} } +func (m *InputConfig) String() string { return proto.CompactTextString(m) } +func (*InputConfig) ProtoMessage() {} +func (*InputConfig) Descriptor() ([]byte, []int) { + return fileDescriptor_image_annotator_741235179f821499, []int{28} +} +func (m *InputConfig) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_InputConfig.Unmarshal(m, b) +} +func (m *InputConfig) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_InputConfig.Marshal(b, m, deterministic) +} +func (dst *InputConfig) XXX_Merge(src proto.Message) { + xxx_messageInfo_InputConfig.Merge(dst, src) +} +func (m *InputConfig) XXX_Size() int { + return xxx_messageInfo_InputConfig.Size(m) +} +func (m *InputConfig) XXX_DiscardUnknown() { + xxx_messageInfo_InputConfig.DiscardUnknown(m) +} + +var xxx_messageInfo_InputConfig proto.InternalMessageInfo + +func (m *InputConfig) GetGcsSource() *GcsSource { + if m != nil { + return m.GcsSource + } + return nil +} + +func (m *InputConfig) GetMimeType() string { + if m != nil { + return m.MimeType + } + return "" +} + +// The desired output location and metadata. +type OutputConfig struct { + // The Google Cloud Storage location to write the output(s) to. + GcsDestination *GcsDestination `protobuf:"bytes,1,opt,name=gcs_destination,json=gcsDestination,proto3" json:"gcs_destination,omitempty"` + // The max number of response protos to put into each output JSON file on + // Google Cloud Storage. + // The valid range is [1, 100]. If not specified, the default value is 20. + // + // For example, for one pdf file with 100 pages, 100 response protos will + // be generated. If `batch_size` = 20, then 5 json files each + // containing 20 response protos will be written under the prefix + // `gcs_destination`.`uri`. + // + // Currently, batch_size only applies to GcsDestination, with potential future + // support for other output configurations. + BatchSize int32 `protobuf:"varint,2,opt,name=batch_size,json=batchSize,proto3" json:"batch_size,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *OutputConfig) Reset() { *m = OutputConfig{} } +func (m *OutputConfig) String() string { return proto.CompactTextString(m) } +func (*OutputConfig) ProtoMessage() {} +func (*OutputConfig) Descriptor() ([]byte, []int) { + return fileDescriptor_image_annotator_741235179f821499, []int{29} +} +func (m *OutputConfig) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_OutputConfig.Unmarshal(m, b) +} +func (m *OutputConfig) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_OutputConfig.Marshal(b, m, deterministic) +} +func (dst *OutputConfig) XXX_Merge(src proto.Message) { + xxx_messageInfo_OutputConfig.Merge(dst, src) +} +func (m *OutputConfig) XXX_Size() int { + return xxx_messageInfo_OutputConfig.Size(m) +} +func (m *OutputConfig) XXX_DiscardUnknown() { + xxx_messageInfo_OutputConfig.DiscardUnknown(m) +} + +var xxx_messageInfo_OutputConfig proto.InternalMessageInfo + +func (m *OutputConfig) GetGcsDestination() *GcsDestination { + if m != nil { + return m.GcsDestination + } + return nil +} + +func (m *OutputConfig) GetBatchSize() int32 { + if m != nil { + return m.BatchSize + } + return 0 +} + +// The Google Cloud Storage location where the input will be read from. +type GcsSource struct { + // Google Cloud Storage URI for the input file. This must only be a + // Google Cloud Storage object. Wildcards are not currently supported. + Uri string `protobuf:"bytes,1,opt,name=uri,proto3" json:"uri,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GcsSource) Reset() { *m = GcsSource{} } +func (m *GcsSource) String() string { return proto.CompactTextString(m) } +func (*GcsSource) ProtoMessage() {} +func (*GcsSource) Descriptor() ([]byte, []int) { + return fileDescriptor_image_annotator_741235179f821499, []int{30} +} +func (m *GcsSource) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GcsSource.Unmarshal(m, b) +} +func (m *GcsSource) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GcsSource.Marshal(b, m, deterministic) +} +func (dst *GcsSource) XXX_Merge(src proto.Message) { + xxx_messageInfo_GcsSource.Merge(dst, src) +} +func (m *GcsSource) XXX_Size() int { + return xxx_messageInfo_GcsSource.Size(m) +} +func (m *GcsSource) XXX_DiscardUnknown() { + xxx_messageInfo_GcsSource.DiscardUnknown(m) +} + +var xxx_messageInfo_GcsSource proto.InternalMessageInfo + +func (m *GcsSource) GetUri() string { + if m != nil { + return m.Uri + } + return "" +} + +// The Google Cloud Storage location where the output will be written to. +type GcsDestination struct { + // Google Cloud Storage URI where the results will be stored. Results will + // be in JSON format and preceded by its corresponding input URI. This field + // can either represent a single file, or a prefix for multiple outputs. + // Prefixes must end in a `/`. + // + // Examples: + // + // * File: gs://bucket-name/filename.json + // * Prefix: gs://bucket-name/prefix/here/ + // * File: gs://bucket-name/prefix/here + // + // If multiple outputs, each response is still AnnotateFileResponse, each of + // which contains some subset of the full list of AnnotateImageResponse. + // Multiple outputs can happen if, for example, the output JSON is too large + // and overflows into multiple sharded files. + Uri string `protobuf:"bytes,1,opt,name=uri,proto3" json:"uri,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GcsDestination) Reset() { *m = GcsDestination{} } +func (m *GcsDestination) String() string { return proto.CompactTextString(m) } +func (*GcsDestination) ProtoMessage() {} +func (*GcsDestination) Descriptor() ([]byte, []int) { + return fileDescriptor_image_annotator_741235179f821499, []int{31} +} +func (m *GcsDestination) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GcsDestination.Unmarshal(m, b) +} +func (m *GcsDestination) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GcsDestination.Marshal(b, m, deterministic) +} +func (dst *GcsDestination) XXX_Merge(src proto.Message) { + xxx_messageInfo_GcsDestination.Merge(dst, src) +} +func (m *GcsDestination) XXX_Size() int { + return xxx_messageInfo_GcsDestination.Size(m) +} +func (m *GcsDestination) XXX_DiscardUnknown() { + xxx_messageInfo_GcsDestination.DiscardUnknown(m) +} + +var xxx_messageInfo_GcsDestination proto.InternalMessageInfo + +func (m *GcsDestination) GetUri() string { + if m != nil { + return m.Uri + } + return "" +} + +// Contains metadata for the BatchAnnotateImages operation. +type OperationMetadata struct { + // Current state of the batch operation. + State OperationMetadata_State `protobuf:"varint,1,opt,name=state,proto3,enum=google.cloud.vision.v1p3beta1.OperationMetadata_State" json:"state,omitempty"` + // The time when the batch request was received. + CreateTime *timestamp.Timestamp `protobuf:"bytes,5,opt,name=create_time,json=createTime,proto3" json:"create_time,omitempty"` + // The time when the operation result was last updated. + UpdateTime *timestamp.Timestamp `protobuf:"bytes,6,opt,name=update_time,json=updateTime,proto3" json:"update_time,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *OperationMetadata) Reset() { *m = OperationMetadata{} } +func (m *OperationMetadata) String() string { return proto.CompactTextString(m) } +func (*OperationMetadata) ProtoMessage() {} +func (*OperationMetadata) Descriptor() ([]byte, []int) { + return fileDescriptor_image_annotator_741235179f821499, []int{32} +} +func (m *OperationMetadata) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_OperationMetadata.Unmarshal(m, b) +} +func (m *OperationMetadata) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_OperationMetadata.Marshal(b, m, deterministic) +} +func (dst *OperationMetadata) XXX_Merge(src proto.Message) { + xxx_messageInfo_OperationMetadata.Merge(dst, src) +} +func (m *OperationMetadata) XXX_Size() int { + return xxx_messageInfo_OperationMetadata.Size(m) +} +func (m *OperationMetadata) XXX_DiscardUnknown() { + xxx_messageInfo_OperationMetadata.DiscardUnknown(m) +} + +var xxx_messageInfo_OperationMetadata proto.InternalMessageInfo + +func (m *OperationMetadata) GetState() OperationMetadata_State { + if m != nil { + return m.State + } + return OperationMetadata_STATE_UNSPECIFIED +} + +func (m *OperationMetadata) GetCreateTime() *timestamp.Timestamp { + if m != nil { + return m.CreateTime + } + return nil +} + +func (m *OperationMetadata) GetUpdateTime() *timestamp.Timestamp { + if m != nil { + return m.UpdateTime + } + return nil +} + +func init() { + proto.RegisterType((*Feature)(nil), "google.cloud.vision.v1p3beta1.Feature") + proto.RegisterType((*ImageSource)(nil), "google.cloud.vision.v1p3beta1.ImageSource") + proto.RegisterType((*Image)(nil), "google.cloud.vision.v1p3beta1.Image") + proto.RegisterType((*FaceAnnotation)(nil), "google.cloud.vision.v1p3beta1.FaceAnnotation") + proto.RegisterType((*FaceAnnotation_Landmark)(nil), "google.cloud.vision.v1p3beta1.FaceAnnotation.Landmark") + proto.RegisterType((*LocationInfo)(nil), "google.cloud.vision.v1p3beta1.LocationInfo") + proto.RegisterType((*Property)(nil), "google.cloud.vision.v1p3beta1.Property") + proto.RegisterType((*EntityAnnotation)(nil), "google.cloud.vision.v1p3beta1.EntityAnnotation") + proto.RegisterType((*LocalizedObjectAnnotation)(nil), "google.cloud.vision.v1p3beta1.LocalizedObjectAnnotation") + proto.RegisterType((*SafeSearchAnnotation)(nil), "google.cloud.vision.v1p3beta1.SafeSearchAnnotation") + proto.RegisterType((*LatLongRect)(nil), "google.cloud.vision.v1p3beta1.LatLongRect") + proto.RegisterType((*ColorInfo)(nil), "google.cloud.vision.v1p3beta1.ColorInfo") + proto.RegisterType((*DominantColorsAnnotation)(nil), "google.cloud.vision.v1p3beta1.DominantColorsAnnotation") + proto.RegisterType((*ImageProperties)(nil), "google.cloud.vision.v1p3beta1.ImageProperties") + proto.RegisterType((*CropHint)(nil), "google.cloud.vision.v1p3beta1.CropHint") + proto.RegisterType((*CropHintsAnnotation)(nil), "google.cloud.vision.v1p3beta1.CropHintsAnnotation") + proto.RegisterType((*CropHintsParams)(nil), "google.cloud.vision.v1p3beta1.CropHintsParams") + proto.RegisterType((*WebDetectionParams)(nil), "google.cloud.vision.v1p3beta1.WebDetectionParams") + proto.RegisterType((*ImageContext)(nil), "google.cloud.vision.v1p3beta1.ImageContext") + proto.RegisterType((*AnnotateImageRequest)(nil), "google.cloud.vision.v1p3beta1.AnnotateImageRequest") + proto.RegisterType((*ImageAnnotationContext)(nil), "google.cloud.vision.v1p3beta1.ImageAnnotationContext") + proto.RegisterType((*AnnotateImageResponse)(nil), "google.cloud.vision.v1p3beta1.AnnotateImageResponse") + proto.RegisterType((*AnnotateFileResponse)(nil), "google.cloud.vision.v1p3beta1.AnnotateFileResponse") + proto.RegisterType((*BatchAnnotateImagesRequest)(nil), "google.cloud.vision.v1p3beta1.BatchAnnotateImagesRequest") + proto.RegisterType((*BatchAnnotateImagesResponse)(nil), "google.cloud.vision.v1p3beta1.BatchAnnotateImagesResponse") + proto.RegisterType((*AsyncAnnotateFileRequest)(nil), "google.cloud.vision.v1p3beta1.AsyncAnnotateFileRequest") + proto.RegisterType((*AsyncAnnotateFileResponse)(nil), "google.cloud.vision.v1p3beta1.AsyncAnnotateFileResponse") + proto.RegisterType((*AsyncBatchAnnotateFilesRequest)(nil), "google.cloud.vision.v1p3beta1.AsyncBatchAnnotateFilesRequest") + proto.RegisterType((*AsyncBatchAnnotateFilesResponse)(nil), "google.cloud.vision.v1p3beta1.AsyncBatchAnnotateFilesResponse") + proto.RegisterType((*InputConfig)(nil), "google.cloud.vision.v1p3beta1.InputConfig") + proto.RegisterType((*OutputConfig)(nil), "google.cloud.vision.v1p3beta1.OutputConfig") + proto.RegisterType((*GcsSource)(nil), "google.cloud.vision.v1p3beta1.GcsSource") + proto.RegisterType((*GcsDestination)(nil), "google.cloud.vision.v1p3beta1.GcsDestination") + proto.RegisterType((*OperationMetadata)(nil), "google.cloud.vision.v1p3beta1.OperationMetadata") + proto.RegisterEnum("google.cloud.vision.v1p3beta1.Likelihood", Likelihood_name, Likelihood_value) + proto.RegisterEnum("google.cloud.vision.v1p3beta1.Feature_Type", Feature_Type_name, Feature_Type_value) + proto.RegisterEnum("google.cloud.vision.v1p3beta1.FaceAnnotation_Landmark_Type", FaceAnnotation_Landmark_Type_name, FaceAnnotation_Landmark_Type_value) + proto.RegisterEnum("google.cloud.vision.v1p3beta1.OperationMetadata_State", OperationMetadata_State_name, OperationMetadata_State_value) +} + +// Reference imports to suppress errors if they are not otherwise used. +var _ context.Context +var _ grpc.ClientConn + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the grpc package it is being compiled against. +const _ = grpc.SupportPackageIsVersion4 + +// ImageAnnotatorClient is the client API for ImageAnnotator service. +// +// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://godoc.org/google.golang.org/grpc#ClientConn.NewStream. +type ImageAnnotatorClient interface { + // Run image detection and annotation for a batch of images. + BatchAnnotateImages(ctx context.Context, in *BatchAnnotateImagesRequest, opts ...grpc.CallOption) (*BatchAnnotateImagesResponse, error) + // Run asynchronous image detection and annotation for a list of generic + // files, such as PDF files, which may contain multiple pages and multiple + // images per page. Progress and results can be retrieved through the + // `google.longrunning.Operations` interface. + // `Operation.metadata` contains `OperationMetadata` (metadata). + // `Operation.response` contains `AsyncBatchAnnotateFilesResponse` (results). + AsyncBatchAnnotateFiles(ctx context.Context, in *AsyncBatchAnnotateFilesRequest, opts ...grpc.CallOption) (*longrunning.Operation, error) +} + +type imageAnnotatorClient struct { + cc *grpc.ClientConn +} + +func NewImageAnnotatorClient(cc *grpc.ClientConn) ImageAnnotatorClient { + return &imageAnnotatorClient{cc} +} + +func (c *imageAnnotatorClient) BatchAnnotateImages(ctx context.Context, in *BatchAnnotateImagesRequest, opts ...grpc.CallOption) (*BatchAnnotateImagesResponse, error) { + out := new(BatchAnnotateImagesResponse) + err := c.cc.Invoke(ctx, "/google.cloud.vision.v1p3beta1.ImageAnnotator/BatchAnnotateImages", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *imageAnnotatorClient) AsyncBatchAnnotateFiles(ctx context.Context, in *AsyncBatchAnnotateFilesRequest, opts ...grpc.CallOption) (*longrunning.Operation, error) { + out := new(longrunning.Operation) + err := c.cc.Invoke(ctx, "/google.cloud.vision.v1p3beta1.ImageAnnotator/AsyncBatchAnnotateFiles", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +// ImageAnnotatorServer is the server API for ImageAnnotator service. +type ImageAnnotatorServer interface { + // Run image detection and annotation for a batch of images. + BatchAnnotateImages(context.Context, *BatchAnnotateImagesRequest) (*BatchAnnotateImagesResponse, error) + // Run asynchronous image detection and annotation for a list of generic + // files, such as PDF files, which may contain multiple pages and multiple + // images per page. Progress and results can be retrieved through the + // `google.longrunning.Operations` interface. + // `Operation.metadata` contains `OperationMetadata` (metadata). + // `Operation.response` contains `AsyncBatchAnnotateFilesResponse` (results). + AsyncBatchAnnotateFiles(context.Context, *AsyncBatchAnnotateFilesRequest) (*longrunning.Operation, error) +} + +func RegisterImageAnnotatorServer(s *grpc.Server, srv ImageAnnotatorServer) { + s.RegisterService(&_ImageAnnotator_serviceDesc, srv) +} + +func _ImageAnnotator_BatchAnnotateImages_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(BatchAnnotateImagesRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(ImageAnnotatorServer).BatchAnnotateImages(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.vision.v1p3beta1.ImageAnnotator/BatchAnnotateImages", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(ImageAnnotatorServer).BatchAnnotateImages(ctx, req.(*BatchAnnotateImagesRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _ImageAnnotator_AsyncBatchAnnotateFiles_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(AsyncBatchAnnotateFilesRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(ImageAnnotatorServer).AsyncBatchAnnotateFiles(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.vision.v1p3beta1.ImageAnnotator/AsyncBatchAnnotateFiles", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(ImageAnnotatorServer).AsyncBatchAnnotateFiles(ctx, req.(*AsyncBatchAnnotateFilesRequest)) + } + return interceptor(ctx, in, info, handler) +} + +var _ImageAnnotator_serviceDesc = grpc.ServiceDesc{ + ServiceName: "google.cloud.vision.v1p3beta1.ImageAnnotator", + HandlerType: (*ImageAnnotatorServer)(nil), + Methods: []grpc.MethodDesc{ + { + MethodName: "BatchAnnotateImages", + Handler: _ImageAnnotator_BatchAnnotateImages_Handler, + }, + { + MethodName: "AsyncBatchAnnotateFiles", + Handler: _ImageAnnotator_AsyncBatchAnnotateFiles_Handler, + }, + }, + Streams: []grpc.StreamDesc{}, + Metadata: "google/cloud/vision/v1p3beta1/image_annotator.proto", +} + +func init() { + proto.RegisterFile("google/cloud/vision/v1p3beta1/image_annotator.proto", fileDescriptor_image_annotator_741235179f821499) +} + +var fileDescriptor_image_annotator_741235179f821499 = []byte{ + // 3065 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xcc, 0x5a, 0xcf, 0x73, 0xdb, 0xc6, + 0xf5, 0x0f, 0xa9, 0x5f, 0xe4, 0x23, 0x25, 0x41, 0xab, 0x1f, 0xa6, 0x65, 0x2b, 0x56, 0x90, 0x6f, + 0xbe, 0x5f, 0x7d, 0xdd, 0x54, 0x1a, 0xcb, 0x49, 0x9a, 0x3a, 0xcd, 0xa4, 0x14, 0x05, 0x49, 0xac, + 0x29, 0x82, 0x5d, 0x41, 0x76, 0xec, 0x49, 0x07, 0x85, 0xc0, 0x25, 0x8d, 0x04, 0xc4, 0x22, 0x00, + 0x68, 0x4b, 0x3e, 0x66, 0xa6, 0x7f, 0x41, 0x6f, 0xbd, 0x76, 0x3a, 0x39, 0x35, 0x97, 0xf6, 0xd2, + 0x73, 0xef, 0x9d, 0xe9, 0xf4, 0xd0, 0x3f, 0xa0, 0x3d, 0xf4, 0xd8, 0x63, 0x8f, 0x9d, 0x5d, 0x2c, + 0xc0, 0x05, 0x29, 0x99, 0xa2, 0x93, 0xce, 0xf4, 0x44, 0xec, 0x7b, 0xfb, 0xf9, 0xbc, 0xdd, 0xf7, + 0x76, 0xdf, 0x3e, 0x2c, 0x08, 0xf7, 0xbb, 0x94, 0x76, 0x5d, 0xb2, 0x63, 0xbb, 0xb4, 0xdf, 0xde, + 0x79, 0xee, 0x84, 0x0e, 0xf5, 0x76, 0x9e, 0xdf, 0xf3, 0xef, 0x9f, 0x91, 0xc8, 0xba, 0xb7, 0xe3, + 0xf4, 0xac, 0x2e, 0x31, 0x2d, 0xcf, 0xa3, 0x91, 0x15, 0xd1, 0x60, 0xdb, 0x0f, 0x68, 0x44, 0xd1, + 0x46, 0x0c, 0xda, 0xe6, 0xa0, 0xed, 0x18, 0xb4, 0x9d, 0x82, 0xd6, 0x6f, 0x0b, 0x4e, 0xcb, 0x77, + 0x76, 0x04, 0xd4, 0xa1, 0x5e, 0x18, 0x83, 0xd7, 0xdf, 0x7d, 0xb5, 0xc5, 0x2e, 0xa1, 0x3d, 0x12, + 0x05, 0x17, 0xa2, 0xf7, 0xee, 0xab, 0x7b, 0xfb, 0x01, 0x6d, 0xf7, 0xed, 0xc8, 0x0c, 0x89, 0x15, + 0xd8, 0xcf, 0x04, 0x66, 0xcc, 0x9c, 0x22, 0x72, 0x1e, 0x99, 0x83, 0x71, 0x09, 0xd0, 0xbd, 0x57, + 0x83, 0x5e, 0x90, 0x33, 0xb3, 0x4d, 0x22, 0x62, 0x4b, 0x90, 0xb7, 0x05, 0xc4, 0xa5, 0x5e, 0x37, + 0xe8, 0x7b, 0x9e, 0xe3, 0x75, 0x77, 0xa8, 0x4f, 0x82, 0xcc, 0x74, 0xef, 0x88, 0x4e, 0xbc, 0x75, + 0xd6, 0xef, 0xec, 0x44, 0x4e, 0x8f, 0x84, 0x91, 0xd5, 0xf3, 0x45, 0x87, 0x1b, 0xa2, 0x43, 0xe0, + 0xdb, 0x3b, 0x61, 0x64, 0x45, 0xfd, 0x70, 0x48, 0x11, 0x5d, 0xf8, 0x64, 0xc7, 0xa6, 0x6e, 0xe2, + 0xfe, 0xf5, 0x8a, 0xac, 0x70, 0xad, 0xc8, 0xf5, 0xba, 0xb1, 0x46, 0xfd, 0x7a, 0x0a, 0xe6, 0x0e, + 0x88, 0x15, 0xf5, 0x03, 0x82, 0x3e, 0x81, 0x69, 0xd6, 0xa1, 0x92, 0xdb, 0xcc, 0x6d, 0x2d, 0xec, + 0x7e, 0x6f, 0xfb, 0x95, 0x31, 0xdb, 0x16, 0xa8, 0x6d, 0xe3, 0xc2, 0x27, 0x98, 0x03, 0xd1, 0x1d, + 0x28, 0xf5, 0xac, 0x73, 0x33, 0x20, 0x61, 0xdf, 0x8d, 0xc2, 0x4a, 0x7e, 0x33, 0xb7, 0x35, 0x83, + 0xa1, 0x67, 0x9d, 0xe3, 0x58, 0x82, 0x56, 0x60, 0xa6, 0x47, 0xdb, 0xc4, 0xad, 0x4c, 0x6d, 0xe6, + 0xb6, 0x8a, 0x38, 0x6e, 0xa8, 0xbf, 0xce, 0xc3, 0x34, 0x63, 0x41, 0x2b, 0xa0, 0x18, 0x4f, 0x5a, + 0x9a, 0x79, 0xda, 0x3c, 0x69, 0x69, 0xb5, 0xfa, 0x41, 0x5d, 0xdb, 0x57, 0xde, 0x40, 0x08, 0x16, + 0x0e, 0xaa, 0x35, 0xcd, 0xdc, 0xd7, 0x0c, 0xad, 0x66, 0xd4, 0xf5, 0xa6, 0x92, 0x43, 0x6b, 0x80, + 0x1a, 0xd5, 0xe6, 0xfe, 0x71, 0x15, 0x3f, 0x94, 0xe4, 0x79, 0xd6, 0xb7, 0xa1, 0x1f, 0xea, 0x92, + 0x6c, 0x0a, 0x2d, 0xc3, 0x62, 0xa3, 0xba, 0xa7, 0x35, 0x24, 0xe1, 0x34, 0xeb, 0x68, 0x68, 0x9f, + 0x1a, 0x92, 0x6c, 0x06, 0xdd, 0x82, 0x1b, 0xfb, 0x7a, 0xed, 0xf4, 0x58, 0x6b, 0x1a, 0xe6, 0x90, + 0xb2, 0x84, 0x6e, 0xc2, 0xea, 0x49, 0xf5, 0x40, 0x33, 0x4f, 0xb4, 0x2a, 0xae, 0x1d, 0x49, 0xaa, + 0x59, 0x36, 0xec, 0xfa, 0x71, 0xf5, 0x50, 0x33, 0x5b, 0x58, 0x6f, 0x69, 0xd8, 0xa8, 0x6b, 0x27, + 0xca, 0x1c, 0x5a, 0x00, 0xa8, 0x61, 0xbd, 0x65, 0x1e, 0xd5, 0x9b, 0xc6, 0x89, 0x52, 0x44, 0x4b, + 0x30, 0xff, 0x58, 0xdb, 0x93, 0x80, 0xc0, 0x06, 0xd1, 0xc2, 0xfa, 0xfe, 0x69, 0xcd, 0x10, 0xb4, + 0x4a, 0x19, 0xdd, 0x80, 0x65, 0x7d, 0xef, 0x27, 0x5a, 0xcd, 0x30, 0x1b, 0x7a, 0xad, 0xda, 0xa8, + 0x3f, 0xad, 0xf2, 0xce, 0xcb, 0x6a, 0x13, 0x4a, 0x75, 0xb6, 0xb7, 0x4e, 0x68, 0x3f, 0xb0, 0x09, + 0x52, 0x61, 0xbe, 0x6b, 0x87, 0x66, 0xbc, 0xdd, 0xfa, 0x81, 0xc3, 0xa3, 0x56, 0xc4, 0xa5, 0xae, + 0x1d, 0xf2, 0x6e, 0xa7, 0x81, 0x83, 0x6e, 0x41, 0x71, 0xa0, 0xcf, 0x73, 0x7d, 0xc1, 0x11, 0x4a, + 0x95, 0xc0, 0x0c, 0xef, 0x88, 0x2a, 0x30, 0x67, 0x53, 0x2f, 0x22, 0x5e, 0xc4, 0x39, 0xca, 0x38, + 0x69, 0xa2, 0x3d, 0x98, 0x0d, 0xb9, 0x35, 0x0e, 0x2e, 0xed, 0xde, 0x1d, 0xb3, 0x24, 0xa4, 0xf1, + 0x61, 0x81, 0x54, 0x7f, 0xa3, 0xc0, 0xc2, 0x81, 0x65, 0x93, 0x6a, 0xba, 0x7d, 0x50, 0x0b, 0xe6, + 0xcf, 0x68, 0xdf, 0x6b, 0x3b, 0x5e, 0xd7, 0xf4, 0xa9, 0x7b, 0xc1, 0xcd, 0x96, 0xc6, 0x2e, 0xb8, + 0x3d, 0x81, 0x69, 0x51, 0xf7, 0x02, 0x97, 0xcf, 0xa4, 0x16, 0x3a, 0x05, 0xa5, 0xd3, 0x36, 0xb3, + 0xa4, 0xf9, 0xc9, 0x49, 0x17, 0x3a, 0x6d, 0xb9, 0x8d, 0x0c, 0x28, 0xba, 0x96, 0xd7, 0xee, 0x59, + 0xc1, 0x17, 0x61, 0x65, 0x6a, 0x73, 0x6a, 0xab, 0xb4, 0xfb, 0xc1, 0xb8, 0x5d, 0x91, 0x99, 0xea, + 0x76, 0x43, 0xc0, 0xf1, 0x80, 0x08, 0x6d, 0x00, 0x04, 0xd4, 0x75, 0x4d, 0xcb, 0xeb, 0xba, 0xa4, + 0x32, 0xbd, 0x99, 0xdb, 0xca, 0xe3, 0x22, 0x93, 0x54, 0x99, 0x80, 0x05, 0xcd, 0xb7, 0x3c, 0xa1, + 0x9d, 0xe1, 0xda, 0x82, 0x6f, 0x79, 0xb1, 0x72, 0x03, 0x20, 0x72, 0xdc, 0x48, 0x68, 0x67, 0x63, + 0x2c, 0x93, 0xc4, 0xea, 0x7b, 0xb0, 0x92, 0xa6, 0x1c, 0xd3, 0xa6, 0x5e, 0xc7, 0x69, 0x13, 0xcf, + 0x26, 0x95, 0x39, 0xde, 0x71, 0x39, 0xd5, 0xd5, 0x52, 0x15, 0x7a, 0x1f, 0xd6, 0x92, 0xa1, 0x31, + 0xd7, 0x49, 0xa0, 0x02, 0x07, 0xad, 0x4a, 0x5a, 0x09, 0xd6, 0x82, 0x85, 0xcf, 0xe9, 0x85, 0xe9, + 0x3a, 0x5f, 0x10, 0xd7, 0x79, 0x46, 0x69, 0xbb, 0x52, 0xe4, 0x59, 0xe3, 0xff, 0xc7, 0xf8, 0xa7, + 0x91, 0x02, 0xf0, 0xfc, 0xe7, 0xf4, 0x62, 0xd0, 0x44, 0x8f, 0x60, 0x29, 0xa4, 0x41, 0x40, 0x5f, + 0xc8, 0xa4, 0x30, 0x29, 0xa9, 0x12, 0x73, 0x48, 0xbc, 0x06, 0x28, 0x96, 0xd7, 0x25, 0x81, 0x4c, + 0x5b, 0x9a, 0x94, 0x76, 0x91, 0x53, 0x48, 0xac, 0x4f, 0x61, 0x39, 0xec, 0x07, 0x7e, 0xe0, 0x84, + 0x44, 0x26, 0x2e, 0x4f, 0x4a, 0x8c, 0x12, 0x16, 0x89, 0xdb, 0x86, 0x4a, 0xdf, 0x6b, 0x93, 0xc0, + 0x24, 0xe7, 0x3e, 0x0d, 0x49, 0x5b, 0x36, 0x30, 0x3f, 0xa9, 0x81, 0x35, 0x4e, 0xa5, 0xc5, 0x4c, + 0x92, 0x91, 0x4f, 0x01, 0x9d, 0xb9, 0xfd, 0x20, 0xc8, 0xd2, 0x2f, 0x4c, 0x4a, 0xbf, 0x24, 0x48, + 0xb2, 0xae, 0x79, 0x46, 0xac, 0xf6, 0x0b, 0x62, 0x65, 0x7c, 0xbe, 0x38, 0xb1, 0x6b, 0x12, 0x96, + 0x81, 0x6c, 0xfd, 0xaf, 0x73, 0x50, 0x48, 0xf6, 0x14, 0xd2, 0xc5, 0x79, 0x35, 0xc5, 0x99, 0x3f, + 0x7a, 0xbd, 0x9d, 0x29, 0x9f, 0x5f, 0x35, 0x28, 0xf8, 0x34, 0x74, 0x98, 0x9e, 0xef, 0xcb, 0xd2, + 0xee, 0xff, 0x8d, 0x21, 0x6d, 0x89, 0xee, 0x38, 0x05, 0xaa, 0xbf, 0x9f, 0x1d, 0x9c, 0x66, 0xa7, + 0xcd, 0x87, 0x4d, 0xfd, 0x71, 0xd3, 0x4c, 0xce, 0x2a, 0xe5, 0x0d, 0x54, 0x86, 0x42, 0x43, 0x3b, + 0x30, 0x4c, 0xed, 0x89, 0xa6, 0xe4, 0xd0, 0x3c, 0x14, 0x71, 0xfd, 0xf0, 0x28, 0x6e, 0xe6, 0x51, + 0x05, 0x56, 0xb8, 0x52, 0x3f, 0x30, 0x93, 0x4e, 0x7b, 0x58, 0x7f, 0xac, 0x4c, 0xb1, 0xe3, 0x27, + 0xee, 0x38, 0xac, 0x9a, 0x66, 0xaa, 0x04, 0x94, 0x72, 0x71, 0xd5, 0x0c, 0x5a, 0x87, 0xb5, 0x14, + 0x95, 0xd5, 0xcd, 0x32, 0xd8, 0x71, 0x7d, 0xbf, 0xa5, 0xd7, 0x9b, 0x86, 0xb9, 0xa7, 0x19, 0x8f, + 0x35, 0xad, 0xc9, 0xb4, 0xec, 0xe8, 0x2a, 0x43, 0xa1, 0xa9, 0x9f, 0x68, 0xa6, 0x51, 0x6f, 0x29, + 0x05, 0x36, 0xc6, 0xd3, 0x56, 0x4b, 0xc3, 0x66, 0xa3, 0xde, 0x52, 0x8a, 0xac, 0xd9, 0xd0, 0x1f, + 0x8b, 0x26, 0xb0, 0x63, 0xee, 0x58, 0x3f, 0x35, 0x8e, 0xf8, 0xa8, 0x94, 0x12, 0x5a, 0x84, 0x52, + 0xdc, 0xe6, 0xf6, 0x94, 0x32, 0x52, 0xa0, 0x1c, 0x0b, 0x6a, 0x5a, 0xd3, 0xd0, 0xb0, 0x32, 0x8f, + 0x56, 0x61, 0x89, 0xd3, 0xef, 0xe9, 0x86, 0xa1, 0x1f, 0x8b, 0x8e, 0x0b, 0xcc, 0x5f, 0xb2, 0x98, + 0xf3, 0x2d, 0xb2, 0x93, 0x5e, 0x96, 0x0a, 0x12, 0x25, 0x9d, 0xb5, 0xf6, 0x44, 0x33, 0x0d, 0xbd, + 0x65, 0xee, 0xe9, 0xa7, 0xcd, 0xfd, 0x2a, 0x7e, 0xa2, 0x2c, 0x65, 0x54, 0xf1, 0xac, 0x6b, 0x3a, + 0x6e, 0x6a, 0x58, 0x41, 0xe8, 0x36, 0x54, 0x52, 0x95, 0x60, 0x4c, 0x81, 0xcb, 0xa9, 0xfb, 0x99, + 0x96, 0x3f, 0x08, 0xdc, 0xca, 0xc0, 0x91, 0x23, 0xe6, 0x56, 0xb3, 0xba, 0x8c, 0xbd, 0x35, 0xb4, + 0x01, 0x37, 0x07, 0xba, 0x61, 0x83, 0x37, 0x06, 0x51, 0x1d, 0xb6, 0x58, 0x41, 0x77, 0xe0, 0x96, + 0x1c, 0x67, 0x33, 0x0e, 0x41, 0x12, 0x31, 0xe5, 0x26, 0xda, 0x84, 0xdb, 0x99, 0x90, 0x0e, 0xf7, + 0x58, 0x67, 0x0e, 0x8d, 0x29, 0xaa, 0xd8, 0x34, 0x70, 0xf5, 0x90, 0xd5, 0x11, 0xb7, 0x98, 0xf7, + 0x05, 0x4e, 0x12, 0xdf, 0xe6, 0x95, 0x53, 0x32, 0xf7, 0xd6, 0x69, 0xab, 0xde, 0x50, 0x36, 0x58, + 0xe5, 0x34, 0x18, 0x5e, 0x2c, 0x7c, 0x93, 0xe1, 0x0f, 0x74, 0xac, 0x1d, 0x69, 0xd5, 0x7d, 0xf3, + 0x90, 0x17, 0x56, 0x8d, 0xaa, 0x72, 0x87, 0x95, 0x37, 0xb5, 0xa3, 0x7a, 0xd3, 0x3c, 0x6c, 0x56, + 0x8d, 0x23, 0x46, 0xb9, 0xc9, 0xec, 0x73, 0x11, 0xe7, 0x3d, 0xd4, 0x9b, 0x4c, 0xfa, 0x16, 0xc3, + 0x73, 0x69, 0xcc, 0x2c, 0xc4, 0xaa, 0xfa, 0x23, 0x28, 0x37, 0xa8, 0xcd, 0xf7, 0x66, 0xdd, 0xeb, + 0x50, 0xf4, 0x2e, 0xcc, 0xb9, 0x56, 0x64, 0xba, 0x5e, 0x57, 0x94, 0x07, 0xcb, 0xc9, 0x56, 0x64, + 0x5b, 0x75, 0xbb, 0x61, 0x45, 0x0d, 0xaf, 0x8b, 0x67, 0x5d, 0xfe, 0xab, 0x3e, 0x86, 0x42, 0x2b, + 0x60, 0x95, 0x74, 0x74, 0x81, 0x10, 0x4c, 0x7b, 0x56, 0x8f, 0x88, 0x82, 0x88, 0x3f, 0xb3, 0xc2, + 0xf3, 0xb9, 0xe5, 0xf6, 0x89, 0xa8, 0x82, 0xe2, 0x06, 0x7a, 0x0b, 0xca, 0x7d, 0xc7, 0x8b, 0x3e, + 0x78, 0xcf, 0x8c, 0x95, 0x2c, 0x91, 0x4c, 0xe3, 0x52, 0x2c, 0x7b, 0xc4, 0x44, 0xea, 0xaf, 0xa6, + 0x40, 0xd1, 0xbc, 0xc8, 0x89, 0x2e, 0xa4, 0x02, 0x46, 0x81, 0xa9, 0x9e, 0xd3, 0x16, 0x06, 0xd8, + 0x23, 0x5a, 0x83, 0x59, 0x97, 0xda, 0x96, 0x9b, 0x18, 0x10, 0x2d, 0xb4, 0x09, 0xa5, 0x36, 0x09, + 0xed, 0xc0, 0xf1, 0x79, 0x52, 0x89, 0xcb, 0x5e, 0x59, 0xc4, 0x46, 0x16, 0xda, 0x34, 0x48, 0x0a, + 0x81, 0xb8, 0x81, 0xde, 0x04, 0x90, 0x4e, 0xe2, 0xb8, 0x0a, 0x90, 0x24, 0x4c, 0x1f, 0x51, 0xdf, + 0xb1, 0x2d, 0xd7, 0x89, 0x2e, 0x44, 0x1d, 0x20, 0x49, 0x46, 0x4b, 0xac, 0xb9, 0x6f, 0x5b, 0x62, + 0xd5, 0xa1, 0xe8, 0x8a, 0xf8, 0x84, 0x95, 0x02, 0xaf, 0x85, 0xc6, 0xb1, 0xc9, 0xf1, 0xc4, 0x03, + 0x34, 0x3a, 0x04, 0xf0, 0xe3, 0x60, 0x39, 0x24, 0xac, 0x14, 0x39, 0xd7, 0xd8, 0x44, 0x2b, 0xa2, + 0x8b, 0x25, 0xa8, 0xfa, 0xa7, 0x1c, 0xdc, 0x64, 0x46, 0x5c, 0xe7, 0x25, 0x69, 0xeb, 0x67, 0x9f, + 0x13, 0x3b, 0x7a, 0x65, 0x94, 0xde, 0x86, 0x79, 0xd7, 0xf2, 0xba, 0x7d, 0x56, 0x12, 0xdb, 0xb4, + 0x9d, 0x04, 0xab, 0x9c, 0x08, 0x6b, 0xb4, 0x4d, 0xd2, 0xe5, 0x33, 0x95, 0x5d, 0x3e, 0x97, 0x04, + 0x69, 0xc4, 0xc9, 0x33, 0xdf, 0xd2, 0xc9, 0xea, 0xdf, 0xf2, 0xb0, 0x72, 0x62, 0x75, 0xc8, 0x09, + 0x7f, 0x39, 0x95, 0xe6, 0xf2, 0x09, 0xcc, 0x58, 0xed, 0xbe, 0x1b, 0x89, 0x77, 0xb3, 0x09, 0x4e, + 0xd1, 0x18, 0xc7, 0x08, 0x42, 0x9f, 0xd2, 0x0e, 0x9f, 0xf2, 0x64, 0x04, 0x1c, 0x87, 0x6a, 0x30, + 0xd7, 0x23, 0x6d, 0xb6, 0xbe, 0xc4, 0x79, 0x3b, 0x01, 0x45, 0x82, 0x44, 0x1a, 0x14, 0x9e, 0x3b, + 0xd4, 0xe5, 0x8b, 0x7a, 0x7a, 0x52, 0x96, 0x14, 0x8a, 0x3e, 0x86, 0xe9, 0xc0, 0xb2, 0x2f, 0x26, + 0x2f, 0x39, 0x39, 0x4c, 0x7d, 0x01, 0x25, 0x96, 0x3e, 0xa8, 0xd7, 0xc5, 0xc4, 0x8e, 0xd0, 0x7d, + 0x28, 0xf5, 0x1c, 0xcf, 0xbc, 0x46, 0xb6, 0x29, 0xf6, 0x1c, 0x2f, 0x7e, 0xe4, 0x20, 0xeb, 0x3c, + 0x05, 0xe5, 0x5f, 0x05, 0xb2, 0xce, 0xe3, 0x47, 0x35, 0x80, 0x62, 0x8d, 0xbd, 0x95, 0xf3, 0x04, + 0xb7, 0x05, 0x33, 0xfc, 0x15, 0x5d, 0x18, 0x44, 0x19, 0x2c, 0xef, 0x86, 0xe3, 0x0e, 0x83, 0xd5, + 0x97, 0x97, 0x57, 0xdf, 0x3b, 0xb0, 0xe0, 0x3b, 0xe7, 0xc4, 0x35, 0x3b, 0x81, 0x65, 0xa7, 0xd9, + 0x25, 0x8f, 0xe7, 0xb9, 0xf4, 0x40, 0x08, 0xd5, 0xcf, 0xa0, 0xb2, 0x4f, 0x7b, 0x8e, 0x67, 0x79, + 0x11, 0x27, 0x0d, 0xa5, 0x55, 0xf5, 0x63, 0x98, 0xe5, 0x16, 0xc2, 0x4a, 0x8e, 0x6f, 0xc2, 0xad, + 0x31, 0x9e, 0x4c, 0x07, 0x8f, 0x05, 0x4e, 0x0d, 0x61, 0x91, 0xbf, 0xf4, 0xb5, 0xd2, 0x4d, 0x89, + 0x7e, 0x0e, 0x8b, 0x6d, 0x61, 0xd0, 0x4c, 0xd9, 0xd9, 0x0c, 0x7f, 0x30, 0x86, 0xfd, 0xaa, 0x61, + 0xe2, 0x85, 0x76, 0x46, 0xa3, 0xfe, 0x36, 0x07, 0x85, 0x5a, 0x40, 0xfd, 0x23, 0xc7, 0x8b, 0xfe, + 0x03, 0x2f, 0x93, 0xd9, 0xdc, 0x9b, 0x1f, 0xc9, 0xbd, 0x3b, 0xb0, 0xec, 0xf4, 0x7c, 0x1a, 0x44, + 0x96, 0x67, 0x93, 0x61, 0xef, 0xa3, 0x81, 0x2a, 0x0d, 0xc1, 0xcf, 0x60, 0x39, 0x19, 0xae, 0xec, + 0xfd, 0x03, 0x00, 0x3b, 0xa0, 0xbe, 0xf9, 0x8c, 0xc9, 0x45, 0x04, 0xc6, 0xa5, 0xc1, 0x84, 0x07, + 0x17, 0xed, 0x84, 0x51, 0xfd, 0x00, 0x16, 0x53, 0xfa, 0x96, 0x15, 0x58, 0xbd, 0x90, 0x25, 0x3a, + 0x2b, 0xf4, 0x89, 0x1d, 0x99, 0xfc, 0x6a, 0x29, 0x66, 0xcf, 0xe3, 0x72, 0x2c, 0xc4, 0x5c, 0xa6, + 0xee, 0x03, 0x7a, 0x4c, 0xce, 0xf6, 0x93, 0x77, 0x42, 0x01, 0xdd, 0x86, 0x65, 0xc7, 0xb3, 0xdd, + 0x7e, 0x9b, 0x98, 0x5d, 0x42, 0x33, 0x77, 0x39, 0x05, 0xbc, 0x24, 0x54, 0x87, 0x84, 0x8a, 0x2b, + 0x1d, 0xf5, 0x9b, 0x29, 0x28, 0xf3, 0x25, 0x50, 0xa3, 0x5e, 0x44, 0xce, 0x23, 0xd4, 0x64, 0x49, + 0x36, 0x32, 0x5d, 0xea, 0x75, 0xcd, 0x80, 0xd8, 0x91, 0x08, 0xc8, 0xb8, 0xbb, 0x03, 0x69, 0x47, + 0xe2, 0x92, 0x2b, 0x6d, 0xcf, 0x77, 0x60, 0x21, 0x4d, 0xda, 0xb1, 0xab, 0xf2, 0x9b, 0x53, 0x5b, + 0x45, 0x9c, 0xa6, 0x72, 0x3e, 0x71, 0xf4, 0x14, 0x96, 0x06, 0xde, 0x34, 0x7d, 0x3e, 0x19, 0x51, + 0xc4, 0x6f, 0x5f, 0xd3, 0xa9, 0xc2, 0x7b, 0x78, 0xd1, 0x1e, 0x72, 0x67, 0x07, 0x56, 0xb3, 0xd7, + 0x86, 0x09, 0x7f, 0x9c, 0xf0, 0x77, 0xc7, 0x9f, 0x5d, 0x0c, 0x1b, 0x27, 0x75, 0x61, 0x63, 0xd9, + 0x1f, 0x15, 0x22, 0x1b, 0x56, 0x32, 0xb7, 0x86, 0x89, 0x99, 0x59, 0x6e, 0xe6, 0xde, 0x18, 0x33, + 0xa3, 0xc1, 0xc4, 0xe8, 0xc5, 0x88, 0x4c, 0xfd, 0x47, 0x0e, 0x56, 0xc4, 0x2a, 0x24, 0x3c, 0x70, + 0x98, 0x7c, 0xd9, 0x27, 0x61, 0x84, 0x1e, 0xc0, 0x0c, 0xbf, 0x1c, 0x12, 0x01, 0xfb, 0x9f, 0xeb, + 0x5c, 0xf6, 0xe0, 0x18, 0x82, 0xf6, 0xa0, 0xd0, 0x89, 0xef, 0x03, 0xe3, 0xf0, 0x94, 0x76, 0xff, + 0xf7, 0x7a, 0xd7, 0x87, 0x38, 0xc5, 0xb1, 0x9d, 0x1c, 0xdf, 0x56, 0xd9, 0xf1, 0x4a, 0xe2, 0x3b, + 0x6a, 0xfc, 0x4e, 0x96, 0x17, 0x1f, 0x2e, 0x3b, 0x52, 0x4b, 0x7d, 0x08, 0x6b, 0x5c, 0x3b, 0xd8, + 0x74, 0xc9, 0x22, 0x55, 0x60, 0x6a, 0x70, 0x67, 0xc6, 0x1e, 0xd1, 0x1d, 0x28, 0xf9, 0xcc, 0xb8, + 0xd7, 0xef, 0x9d, 0x91, 0x20, 0xb9, 0xbb, 0x64, 0xa2, 0x26, 0x97, 0xa8, 0x7f, 0x04, 0x58, 0x1d, + 0xf2, 0x5b, 0xe8, 0x53, 0x2f, 0x24, 0xe8, 0x53, 0x50, 0x3a, 0x96, 0x4d, 0xa4, 0x1b, 0xe2, 0x64, + 0x3b, 0x7f, 0x7f, 0xa2, 0x77, 0x52, 0xbc, 0xd8, 0xc9, 0xb4, 0x43, 0x74, 0x06, 0x2b, 0xc9, 0xf5, + 0x4b, 0x86, 0x3d, 0x76, 0xf1, 0xce, 0x18, 0xf6, 0xe1, 0xba, 0x15, 0x2f, 0x27, 0x64, 0xb2, 0x8d, + 0xa7, 0xa0, 0xb8, 0xb4, 0x4b, 0x33, 0xfc, 0x53, 0xaf, 0xc7, 0xbf, 0xc8, 0x88, 0x64, 0xee, 0xcf, + 0x60, 0xc9, 0xb5, 0xce, 0x88, 0x9b, 0x21, 0x9f, 0x7e, 0x3d, 0x72, 0x85, 0x33, 0xc9, 0xec, 0x2f, + 0xe1, 0xb6, 0x9b, 0x54, 0x7f, 0x26, 0xe5, 0xe5, 0x5f, 0xc6, 0xd0, 0x1a, 0x37, 0xf4, 0xe1, 0x35, + 0xaa, 0xd4, 0x4b, 0x0b, 0x48, 0xbc, 0xee, 0x5e, 0xa5, 0xe2, 0x5e, 0x1b, 0xfa, 0x2a, 0xc0, 0xb2, + 0xc1, 0xeb, 0x79, 0x8d, 0x11, 0xc9, 0xdc, 0x26, 0xac, 0x74, 0xfa, 0xae, 0x6b, 0x0e, 0x19, 0xe0, + 0x97, 0x4b, 0xe3, 0xd7, 0x94, 0x91, 0x61, 0xc3, 0x88, 0x51, 0x65, 0x65, 0xc8, 0x81, 0xb5, 0xd0, + 0xea, 0x90, 0x24, 0x99, 0x49, 0x26, 0xe2, 0x4c, 0x73, 0x7f, 0x8c, 0x89, 0xcb, 0x4a, 0x54, 0xbc, + 0x12, 0x5e, 0x56, 0xb8, 0x7a, 0x70, 0x2b, 0xde, 0xd4, 0x83, 0xb2, 0x5d, 0xb6, 0x57, 0xb8, 0x56, + 0x82, 0x1e, 0x2a, 0x31, 0xf0, 0x4d, 0x27, 0x2b, 0x90, 0xec, 0x75, 0x60, 0x55, 0x3a, 0x06, 0x24, + 0x4b, 0xa5, 0x6b, 0xa5, 0xea, 0x4b, 0xce, 0x69, 0xbc, 0x6c, 0x5f, 0x72, 0x78, 0xb7, 0x60, 0x3e, + 0x93, 0xaa, 0xf9, 0xc5, 0xdc, 0xf8, 0x64, 0x25, 0xe7, 0x68, 0x5c, 0x96, 0xb3, 0x33, 0x0b, 0xca, + 0xd0, 0x21, 0x93, 0x9c, 0xbd, 0x0b, 0xd7, 0x0a, 0x4a, 0xe6, 0x94, 0x11, 0xa7, 0x33, 0x5e, 0xf1, + 0x2f, 0x91, 0xb2, 0xd2, 0x93, 0x04, 0x01, 0x0d, 0x78, 0x01, 0x2d, 0x95, 0x9e, 0x81, 0x6f, 0x6f, + 0x9f, 0xf0, 0x0f, 0x4a, 0x38, 0xee, 0x80, 0x74, 0xf1, 0x6d, 0xe0, 0x3c, 0xaa, 0xac, 0xf2, 0xbe, + 0xef, 0x5f, 0x27, 0x54, 0x23, 0xf9, 0x16, 0x27, 0x2c, 0xea, 0x1f, 0xa4, 0xd3, 0xe7, 0xc0, 0x71, + 0x07, 0x49, 0xf4, 0x18, 0xca, 0x8e, 0xe7, 0xf7, 0xa3, 0xf8, 0x06, 0xba, 0x7b, 0xcd, 0xaa, 0xa1, + 0xce, 0x20, 0xfc, 0x5a, 0xba, 0x8b, 0x4b, 0xce, 0xa0, 0x81, 0x30, 0x14, 0x03, 0x41, 0x9d, 0xa4, + 0xcb, 0xf7, 0xc6, 0x70, 0x5d, 0x9a, 0xdc, 0xf1, 0x80, 0x46, 0xed, 0xc1, 0xfa, 0x9e, 0x15, 0xa5, + 0xcb, 0x3b, 0xee, 0x18, 0x26, 0xc7, 0xa7, 0x0e, 0x85, 0x20, 0x7e, 0x4c, 0xb2, 0xff, 0xfd, 0xc9, + 0x0c, 0x72, 0x2c, 0x4e, 0x49, 0xd4, 0x2f, 0xe1, 0xd6, 0xa5, 0xe6, 0x84, 0xc3, 0x32, 0x33, 0xcc, + 0x7d, 0x37, 0x33, 0xfc, 0x73, 0x1e, 0x2a, 0xd5, 0xf0, 0xc2, 0xb3, 0xb3, 0x21, 0x8a, 0x27, 0xf8, + 0x1d, 0x47, 0xe8, 0xbf, 0xb2, 0x64, 0x60, 0x8c, 0xb4, 0x1f, 0x49, 0xb3, 0x9c, 0xbe, 0x16, 0xa3, + 0xce, 0x31, 0x62, 0x9a, 0x65, 0x2a, 0xb5, 0xd4, 0x1e, 0xdc, 0xbc, 0xc4, 0xa5, 0x22, 0x88, 0x23, + 0xe6, 0x72, 0xdf, 0xd6, 0x5c, 0x1f, 0xde, 0xe4, 0xe6, 0x32, 0x4b, 0x87, 0xd9, 0x4c, 0x17, 0xea, + 0xc9, 0xc8, 0x42, 0x1d, 0xf7, 0x66, 0x76, 0xd5, 0x92, 0x90, 0x16, 0xeb, 0x05, 0xdc, 0xb9, 0xd2, + 0xac, 0x98, 0xeb, 0xa3, 0xd1, 0x05, 0xfb, 0xe1, 0xe4, 0x86, 0x47, 0x17, 0x6d, 0x08, 0x25, 0x69, + 0x91, 0xa1, 0x43, 0x80, 0xae, 0x1d, 0x9a, 0xe2, 0xc3, 0x65, 0xec, 0xcf, 0x71, 0x2f, 0xb6, 0x87, + 0x76, 0x28, 0x3e, 0x5b, 0x16, 0xbb, 0xc9, 0x23, 0xba, 0x05, 0xc5, 0x9e, 0xd3, 0x23, 0x26, 0xff, + 0xc6, 0x20, 0xbe, 0x9e, 0x32, 0x81, 0x71, 0xe1, 0x13, 0xf5, 0x17, 0x39, 0x28, 0xcb, 0x51, 0x40, + 0x8f, 0x60, 0x91, 0x99, 0x6d, 0x93, 0x30, 0x72, 0xbc, 0xf8, 0xc8, 0xc9, 0x5d, 0xeb, 0xbc, 0x3e, + 0xb4, 0xc3, 0xfd, 0x01, 0x08, 0x2f, 0x74, 0x33, 0x6d, 0xb4, 0x01, 0x70, 0xc6, 0x7c, 0x6a, 0x86, + 0xce, 0x4b, 0x22, 0xca, 0xd2, 0x22, 0x97, 0x9c, 0x38, 0x2f, 0x89, 0xba, 0x01, 0xc5, 0x74, 0xf0, + 0xa3, 0x55, 0xad, 0xaa, 0xc2, 0x42, 0x96, 0xff, 0x92, 0x3e, 0xdf, 0xe4, 0x61, 0x49, 0x4f, 0xfe, + 0x84, 0x70, 0x4c, 0x22, 0xab, 0x6d, 0x45, 0x16, 0x6a, 0xc0, 0x4c, 0xc8, 0xbc, 0x2e, 0x6e, 0x9c, + 0xc6, 0x7d, 0xf7, 0x1c, 0x21, 0xe0, 0x07, 0x09, 0xc1, 0x31, 0x09, 0xfa, 0x08, 0x4a, 0x76, 0x40, + 0xac, 0x88, 0x98, 0x91, 0xd3, 0x23, 0xe2, 0xbd, 0x69, 0x3d, 0xe1, 0x4c, 0xfe, 0xe9, 0xb0, 0x6d, + 0x24, 0xff, 0x74, 0xc0, 0x10, 0x77, 0x67, 0x02, 0x06, 0xee, 0xfb, 0xed, 0x14, 0x3c, 0x3b, 0x1e, + 0x1c, 0x77, 0x67, 0x02, 0xf5, 0xa7, 0x30, 0xc3, 0x47, 0x82, 0x56, 0x61, 0xe9, 0xc4, 0xa8, 0x1a, + 0xc3, 0xff, 0x2e, 0x28, 0xc1, 0x5c, 0x0d, 0x6b, 0x55, 0x43, 0xdb, 0x57, 0x72, 0xac, 0x81, 0x4f, + 0x9b, 0xcd, 0x7a, 0xf3, 0x50, 0xc9, 0xa3, 0x02, 0x4c, 0xef, 0xeb, 0x4d, 0x4d, 0x99, 0x42, 0xf3, + 0x50, 0xac, 0x55, 0x9b, 0x35, 0xad, 0xd1, 0xd0, 0xf6, 0x95, 0xe9, 0xbb, 0x04, 0x40, 0xfa, 0xdc, + 0x55, 0x82, 0x39, 0xf1, 0x99, 0x47, 0x79, 0x03, 0x2d, 0xc1, 0xfc, 0x23, 0x0d, 0x3f, 0x31, 0x4f, + 0x9b, 0x8d, 0xfa, 0x43, 0xad, 0xf1, 0x44, 0xc9, 0xa1, 0x32, 0x14, 0xd2, 0x56, 0x9e, 0xb5, 0x5a, + 0xfa, 0xc9, 0x49, 0x7d, 0xaf, 0xc1, 0x88, 0x01, 0x66, 0x85, 0x66, 0x1a, 0x2d, 0x42, 0x89, 0x43, + 0x85, 0x60, 0x66, 0xf7, 0x9f, 0x79, 0x58, 0x90, 0x8f, 0x53, 0x1a, 0xa0, 0xdf, 0xe5, 0x60, 0xf9, + 0x92, 0x33, 0x01, 0xfd, 0x70, 0xdc, 0x6d, 0xc7, 0x95, 0xc7, 0xd6, 0xfa, 0x83, 0xd7, 0x81, 0xc6, + 0x5b, 0x4f, 0x7d, 0xe7, 0xab, 0xbf, 0xfc, 0xfd, 0x97, 0xf9, 0x3b, 0xea, 0xfa, 0xf0, 0xff, 0x7f, + 0xc2, 0x07, 0xa2, 0xf2, 0x22, 0x0f, 0x72, 0x77, 0xd1, 0xd7, 0x39, 0xb8, 0x71, 0x45, 0x72, 0x40, + 0x1f, 0x5f, 0x27, 0x03, 0x5c, 0x99, 0xcb, 0xd6, 0x37, 0x12, 0xb8, 0xf4, 0x8f, 0x9a, 0xc1, 0x5a, + 0x54, 0xb7, 0xf9, 0x00, 0xb7, 0xd4, 0xb7, 0xa5, 0x01, 0x76, 0x18, 0xfe, 0x81, 0x35, 0xc2, 0xfb, + 0x20, 0x77, 0x77, 0xef, 0xab, 0x1c, 0xbc, 0x65, 0xd3, 0xde, 0xab, 0xc7, 0xb4, 0xb7, 0x9c, 0x8d, + 0x4a, 0x8b, 0x2d, 0xc0, 0x56, 0xee, 0x69, 0x4d, 0xa0, 0xba, 0xd4, 0xb5, 0xbc, 0xee, 0x36, 0x0d, + 0xba, 0x3b, 0x5d, 0xe2, 0xf1, 0xe5, 0xb9, 0x13, 0xab, 0x2c, 0xdf, 0x09, 0xaf, 0xf8, 0xbb, 0xd0, + 0x47, 0xb1, 0xe0, 0x5f, 0xb9, 0xdc, 0xd9, 0x2c, 0x87, 0xdc, 0xff, 0x77, 0x00, 0x00, 0x00, 0xff, + 0xff, 0xeb, 0x48, 0xba, 0xeb, 0x69, 0x25, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/cloud/vision/v1p3beta1/product_search.pb.go b/vendor/google.golang.org/genproto/googleapis/cloud/vision/v1p3beta1/product_search.pb.go new file mode 100644 index 0000000..80bd29a --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/cloud/vision/v1p3beta1/product_search.pb.go @@ -0,0 +1,483 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/cloud/vision/v1p3beta1/product_search.proto + +package vision // import "google.golang.org/genproto/googleapis/cloud/vision/v1p3beta1" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import timestamp "github.com/golang/protobuf/ptypes/timestamp" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Supported product search categories. +type ProductSearchCategory int32 + +const ( + // Default value used when a category is not specified. + ProductSearchCategory_PRODUCT_SEARCH_CATEGORY_UNSPECIFIED ProductSearchCategory = 0 + // Shoes category. + ProductSearchCategory_SHOES ProductSearchCategory = 1 + // Bags category. + ProductSearchCategory_BAGS ProductSearchCategory = 2 +) + +var ProductSearchCategory_name = map[int32]string{ + 0: "PRODUCT_SEARCH_CATEGORY_UNSPECIFIED", + 1: "SHOES", + 2: "BAGS", +} +var ProductSearchCategory_value = map[string]int32{ + "PRODUCT_SEARCH_CATEGORY_UNSPECIFIED": 0, + "SHOES": 1, + "BAGS": 2, +} + +func (x ProductSearchCategory) String() string { + return proto.EnumName(ProductSearchCategory_name, int32(x)) +} +func (ProductSearchCategory) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_product_search_16e9353648db1cfe, []int{0} +} + +// Specifies the fields to include in product search results. +type ProductSearchResultsView int32 + +const ( + // Product search results contain only `product_category` and `product_id`. + // Default value. + ProductSearchResultsView_BASIC ProductSearchResultsView = 0 + // Product search results contain `product_category`, `product_id`, + // `image_uri`, and `score`. + ProductSearchResultsView_FULL ProductSearchResultsView = 1 +) + +var ProductSearchResultsView_name = map[int32]string{ + 0: "BASIC", + 1: "FULL", +} +var ProductSearchResultsView_value = map[string]int32{ + "BASIC": 0, + "FULL": 1, +} + +func (x ProductSearchResultsView) String() string { + return proto.EnumName(ProductSearchResultsView_name, int32(x)) +} +func (ProductSearchResultsView) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_product_search_16e9353648db1cfe, []int{1} +} + +// Parameters for a product search request. +type ProductSearchParams struct { + // The resource name of the catalog to search. + // + // Format is: `productSearch/catalogs/CATALOG_NAME`. + CatalogName string `protobuf:"bytes,1,opt,name=catalog_name,json=catalogName,proto3" json:"catalog_name,omitempty"` + // The category to search in. + // Optional. It is inferred by the system if it is not specified. + // [Deprecated] Use `product_category`. + Category ProductSearchCategory `protobuf:"varint,2,opt,name=category,proto3,enum=google.cloud.vision.v1p3beta1.ProductSearchCategory" json:"category,omitempty"` + // The product category to search in. + // Optional. It is inferred by the system if it is not specified. + // Supported values are `bag`, `shoe`, `sunglasses`, `dress`, `outerwear`, + // `skirt`, `top`, `shorts`, and `pants`. + ProductCategory string `protobuf:"bytes,5,opt,name=product_category,json=productCategory,proto3" json:"product_category,omitempty"` + // The bounding polygon around the area of interest in the image. + // Optional. If it is not specified, system discretion will be applied. + // [Deprecated] Use `bounding_poly`. + NormalizedBoundingPoly *NormalizedBoundingPoly `protobuf:"bytes,3,opt,name=normalized_bounding_poly,json=normalizedBoundingPoly,proto3" json:"normalized_bounding_poly,omitempty"` + // The bounding polygon around the area of interest in the image. + // Optional. If it is not specified, system discretion will be applied. + BoundingPoly *BoundingPoly `protobuf:"bytes,9,opt,name=bounding_poly,json=boundingPoly,proto3" json:"bounding_poly,omitempty"` + // Specifies the verbosity of the product search results. + // Optional. Defaults to `BASIC`. + View ProductSearchResultsView `protobuf:"varint,4,opt,name=view,proto3,enum=google.cloud.vision.v1p3beta1.ProductSearchResultsView" json:"view,omitempty"` + // The resource name of a + // [ProductSet][google.cloud.vision.v1p3beta1.ProductSet] to be searched for + // similar images. + // + // Format is: + // `projects/PROJECT_ID/locations/LOC_ID/productSets/PRODUCT_SET_ID`. + ProductSet string `protobuf:"bytes,6,opt,name=product_set,json=productSet,proto3" json:"product_set,omitempty"` + // The list of product categories to search in. Currently, we only consider + // the first category, and either "homegoods" or "apparel" should be + // specified. + ProductCategories []string `protobuf:"bytes,7,rep,name=product_categories,json=productCategories,proto3" json:"product_categories,omitempty"` + // The filtering expression. This can be used to restrict search results based + // on Product labels. We currently support an AND of OR of key-value + // expressions, where each expression within an OR must have the same key. + // + // For example, "(color = red OR color = blue) AND brand = Google" is + // acceptable, but not "(color = red OR brand = Google)" or "color: red". + Filter string `protobuf:"bytes,8,opt,name=filter,proto3" json:"filter,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ProductSearchParams) Reset() { *m = ProductSearchParams{} } +func (m *ProductSearchParams) String() string { return proto.CompactTextString(m) } +func (*ProductSearchParams) ProtoMessage() {} +func (*ProductSearchParams) Descriptor() ([]byte, []int) { + return fileDescriptor_product_search_16e9353648db1cfe, []int{0} +} +func (m *ProductSearchParams) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ProductSearchParams.Unmarshal(m, b) +} +func (m *ProductSearchParams) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ProductSearchParams.Marshal(b, m, deterministic) +} +func (dst *ProductSearchParams) XXX_Merge(src proto.Message) { + xxx_messageInfo_ProductSearchParams.Merge(dst, src) +} +func (m *ProductSearchParams) XXX_Size() int { + return xxx_messageInfo_ProductSearchParams.Size(m) +} +func (m *ProductSearchParams) XXX_DiscardUnknown() { + xxx_messageInfo_ProductSearchParams.DiscardUnknown(m) +} + +var xxx_messageInfo_ProductSearchParams proto.InternalMessageInfo + +func (m *ProductSearchParams) GetCatalogName() string { + if m != nil { + return m.CatalogName + } + return "" +} + +func (m *ProductSearchParams) GetCategory() ProductSearchCategory { + if m != nil { + return m.Category + } + return ProductSearchCategory_PRODUCT_SEARCH_CATEGORY_UNSPECIFIED +} + +func (m *ProductSearchParams) GetProductCategory() string { + if m != nil { + return m.ProductCategory + } + return "" +} + +func (m *ProductSearchParams) GetNormalizedBoundingPoly() *NormalizedBoundingPoly { + if m != nil { + return m.NormalizedBoundingPoly + } + return nil +} + +func (m *ProductSearchParams) GetBoundingPoly() *BoundingPoly { + if m != nil { + return m.BoundingPoly + } + return nil +} + +func (m *ProductSearchParams) GetView() ProductSearchResultsView { + if m != nil { + return m.View + } + return ProductSearchResultsView_BASIC +} + +func (m *ProductSearchParams) GetProductSet() string { + if m != nil { + return m.ProductSet + } + return "" +} + +func (m *ProductSearchParams) GetProductCategories() []string { + if m != nil { + return m.ProductCategories + } + return nil +} + +func (m *ProductSearchParams) GetFilter() string { + if m != nil { + return m.Filter + } + return "" +} + +// Results for a product search request. +type ProductSearchResults struct { + // Product category. + // [Deprecated] Use `product_category`. + Category ProductSearchCategory `protobuf:"varint,1,opt,name=category,proto3,enum=google.cloud.vision.v1p3beta1.ProductSearchCategory" json:"category,omitempty"` + // Product category. + // Supported values are `bag` and `shoe`. + // [Deprecated] `product_category` is provided in each Product. + ProductCategory string `protobuf:"bytes,4,opt,name=product_category,json=productCategory,proto3" json:"product_category,omitempty"` + // Timestamp of the index which provided these results. Changes made after + // this time are not reflected in the current results. + IndexTime *timestamp.Timestamp `protobuf:"bytes,2,opt,name=index_time,json=indexTime,proto3" json:"index_time,omitempty"` + // List of detected products. + Products []*ProductSearchResults_ProductInfo `protobuf:"bytes,3,rep,name=products,proto3" json:"products,omitempty"` + // List of results, one for each product match. + Results []*ProductSearchResults_Result `protobuf:"bytes,5,rep,name=results,proto3" json:"results,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ProductSearchResults) Reset() { *m = ProductSearchResults{} } +func (m *ProductSearchResults) String() string { return proto.CompactTextString(m) } +func (*ProductSearchResults) ProtoMessage() {} +func (*ProductSearchResults) Descriptor() ([]byte, []int) { + return fileDescriptor_product_search_16e9353648db1cfe, []int{1} +} +func (m *ProductSearchResults) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ProductSearchResults.Unmarshal(m, b) +} +func (m *ProductSearchResults) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ProductSearchResults.Marshal(b, m, deterministic) +} +func (dst *ProductSearchResults) XXX_Merge(src proto.Message) { + xxx_messageInfo_ProductSearchResults.Merge(dst, src) +} +func (m *ProductSearchResults) XXX_Size() int { + return xxx_messageInfo_ProductSearchResults.Size(m) +} +func (m *ProductSearchResults) XXX_DiscardUnknown() { + xxx_messageInfo_ProductSearchResults.DiscardUnknown(m) +} + +var xxx_messageInfo_ProductSearchResults proto.InternalMessageInfo + +func (m *ProductSearchResults) GetCategory() ProductSearchCategory { + if m != nil { + return m.Category + } + return ProductSearchCategory_PRODUCT_SEARCH_CATEGORY_UNSPECIFIED +} + +func (m *ProductSearchResults) GetProductCategory() string { + if m != nil { + return m.ProductCategory + } + return "" +} + +func (m *ProductSearchResults) GetIndexTime() *timestamp.Timestamp { + if m != nil { + return m.IndexTime + } + return nil +} + +func (m *ProductSearchResults) GetProducts() []*ProductSearchResults_ProductInfo { + if m != nil { + return m.Products + } + return nil +} + +func (m *ProductSearchResults) GetResults() []*ProductSearchResults_Result { + if m != nil { + return m.Results + } + return nil +} + +// Information about a product. +type ProductSearchResults_ProductInfo struct { + // Product ID. + ProductId string `protobuf:"bytes,1,opt,name=product_id,json=productId,proto3" json:"product_id,omitempty"` + // The URI of the image which matched the query image. + // + // This field is returned only if `view` is set to `FULL` in + // the request. + ImageUri string `protobuf:"bytes,2,opt,name=image_uri,json=imageUri,proto3" json:"image_uri,omitempty"` + // A confidence level on the match, ranging from 0 (no confidence) to + // 1 (full confidence). + // + // This field is returned only if `view` is set to `FULL` in + // the request. + Score float32 `protobuf:"fixed32,3,opt,name=score,proto3" json:"score,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ProductSearchResults_ProductInfo) Reset() { *m = ProductSearchResults_ProductInfo{} } +func (m *ProductSearchResults_ProductInfo) String() string { return proto.CompactTextString(m) } +func (*ProductSearchResults_ProductInfo) ProtoMessage() {} +func (*ProductSearchResults_ProductInfo) Descriptor() ([]byte, []int) { + return fileDescriptor_product_search_16e9353648db1cfe, []int{1, 0} +} +func (m *ProductSearchResults_ProductInfo) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ProductSearchResults_ProductInfo.Unmarshal(m, b) +} +func (m *ProductSearchResults_ProductInfo) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ProductSearchResults_ProductInfo.Marshal(b, m, deterministic) +} +func (dst *ProductSearchResults_ProductInfo) XXX_Merge(src proto.Message) { + xxx_messageInfo_ProductSearchResults_ProductInfo.Merge(dst, src) +} +func (m *ProductSearchResults_ProductInfo) XXX_Size() int { + return xxx_messageInfo_ProductSearchResults_ProductInfo.Size(m) +} +func (m *ProductSearchResults_ProductInfo) XXX_DiscardUnknown() { + xxx_messageInfo_ProductSearchResults_ProductInfo.DiscardUnknown(m) +} + +var xxx_messageInfo_ProductSearchResults_ProductInfo proto.InternalMessageInfo + +func (m *ProductSearchResults_ProductInfo) GetProductId() string { + if m != nil { + return m.ProductId + } + return "" +} + +func (m *ProductSearchResults_ProductInfo) GetImageUri() string { + if m != nil { + return m.ImageUri + } + return "" +} + +func (m *ProductSearchResults_ProductInfo) GetScore() float32 { + if m != nil { + return m.Score + } + return 0 +} + +// Information about a product. +type ProductSearchResults_Result struct { + // The Product. + Product *Product `protobuf:"bytes,1,opt,name=product,proto3" json:"product,omitempty"` + // A confidence level on the match, ranging from 0 (no confidence) to + // 1 (full confidence). + // + // This field is returned only if `view` is set to `FULL` in + // the request. + Score float32 `protobuf:"fixed32,2,opt,name=score,proto3" json:"score,omitempty"` + // The resource name of the image from the product that is the closest match + // to the query. + Image string `protobuf:"bytes,3,opt,name=image,proto3" json:"image,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ProductSearchResults_Result) Reset() { *m = ProductSearchResults_Result{} } +func (m *ProductSearchResults_Result) String() string { return proto.CompactTextString(m) } +func (*ProductSearchResults_Result) ProtoMessage() {} +func (*ProductSearchResults_Result) Descriptor() ([]byte, []int) { + return fileDescriptor_product_search_16e9353648db1cfe, []int{1, 1} +} +func (m *ProductSearchResults_Result) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ProductSearchResults_Result.Unmarshal(m, b) +} +func (m *ProductSearchResults_Result) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ProductSearchResults_Result.Marshal(b, m, deterministic) +} +func (dst *ProductSearchResults_Result) XXX_Merge(src proto.Message) { + xxx_messageInfo_ProductSearchResults_Result.Merge(dst, src) +} +func (m *ProductSearchResults_Result) XXX_Size() int { + return xxx_messageInfo_ProductSearchResults_Result.Size(m) +} +func (m *ProductSearchResults_Result) XXX_DiscardUnknown() { + xxx_messageInfo_ProductSearchResults_Result.DiscardUnknown(m) +} + +var xxx_messageInfo_ProductSearchResults_Result proto.InternalMessageInfo + +func (m *ProductSearchResults_Result) GetProduct() *Product { + if m != nil { + return m.Product + } + return nil +} + +func (m *ProductSearchResults_Result) GetScore() float32 { + if m != nil { + return m.Score + } + return 0 +} + +func (m *ProductSearchResults_Result) GetImage() string { + if m != nil { + return m.Image + } + return "" +} + +func init() { + proto.RegisterType((*ProductSearchParams)(nil), "google.cloud.vision.v1p3beta1.ProductSearchParams") + proto.RegisterType((*ProductSearchResults)(nil), "google.cloud.vision.v1p3beta1.ProductSearchResults") + proto.RegisterType((*ProductSearchResults_ProductInfo)(nil), "google.cloud.vision.v1p3beta1.ProductSearchResults.ProductInfo") + proto.RegisterType((*ProductSearchResults_Result)(nil), "google.cloud.vision.v1p3beta1.ProductSearchResults.Result") + proto.RegisterEnum("google.cloud.vision.v1p3beta1.ProductSearchCategory", ProductSearchCategory_name, ProductSearchCategory_value) + proto.RegisterEnum("google.cloud.vision.v1p3beta1.ProductSearchResultsView", ProductSearchResultsView_name, ProductSearchResultsView_value) +} + +func init() { + proto.RegisterFile("google/cloud/vision/v1p3beta1/product_search.proto", fileDescriptor_product_search_16e9353648db1cfe) +} + +var fileDescriptor_product_search_16e9353648db1cfe = []byte{ + // 698 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xb4, 0x94, 0xdf, 0x6e, 0xd3, 0x30, + 0x14, 0xc6, 0x97, 0xfe, 0x5b, 0x73, 0x3a, 0xa0, 0x98, 0x31, 0x45, 0x85, 0x69, 0xdd, 0x90, 0xa0, + 0x0c, 0x48, 0xb4, 0x0e, 0x84, 0x18, 0x17, 0xd0, 0x76, 0xdd, 0x56, 0x31, 0x6d, 0x91, 0xdb, 0x22, + 0x01, 0x17, 0x91, 0x9b, 0x78, 0xc1, 0x52, 0x12, 0x47, 0x49, 0xda, 0x51, 0xee, 0x78, 0x20, 0x5e, + 0x88, 0x27, 0xe1, 0x12, 0xd5, 0x49, 0xba, 0x15, 0x75, 0x1b, 0x9b, 0xc4, 0x55, 0x75, 0x4e, 0xfd, + 0xfb, 0x3e, 0xfb, 0xe4, 0xb3, 0xa1, 0x6e, 0x73, 0x6e, 0x3b, 0x54, 0x33, 0x1d, 0x3e, 0xb4, 0xb4, + 0x11, 0x0b, 0x19, 0xf7, 0xb4, 0xd1, 0x96, 0xbf, 0x3d, 0xa0, 0x11, 0xd9, 0xd2, 0xfc, 0x80, 0x5b, + 0x43, 0x33, 0x32, 0x42, 0x4a, 0x02, 0xf3, 0xab, 0xea, 0x07, 0x3c, 0xe2, 0x68, 0x35, 0x66, 0x54, + 0xc1, 0xa8, 0x31, 0xa3, 0x4e, 0x99, 0xca, 0xc3, 0x44, 0x92, 0xf8, 0x4c, 0x23, 0x9e, 0xc7, 0x23, + 0x12, 0x31, 0xee, 0x85, 0x31, 0x5c, 0x79, 0x7e, 0xb9, 0xa1, 0x4d, 0xb9, 0x4b, 0xa3, 0x60, 0x9c, + 0xac, 0xde, 0xb9, 0xce, 0xf6, 0x8c, 0x90, 0x06, 0x23, 0x66, 0xd2, 0x84, 0x5d, 0x4b, 0x58, 0x51, + 0x0d, 0x86, 0x27, 0x5a, 0xc4, 0x5c, 0x1a, 0x46, 0xc4, 0xf5, 0xe3, 0x05, 0x1b, 0x3f, 0x73, 0x70, + 0x4f, 0x8f, 0x15, 0xba, 0x42, 0x40, 0x27, 0x01, 0x71, 0x43, 0xb4, 0x0e, 0x4b, 0x26, 0x89, 0x88, + 0xc3, 0x6d, 0xc3, 0x23, 0x2e, 0x55, 0xa4, 0xaa, 0x54, 0x93, 0x71, 0x29, 0xe9, 0x1d, 0x11, 0x97, + 0x22, 0x1d, 0x8a, 0x26, 0x89, 0xa8, 0xcd, 0x83, 0xb1, 0x92, 0xa9, 0x4a, 0xb5, 0xdb, 0xf5, 0x97, + 0xea, 0xa5, 0x53, 0x51, 0x67, 0x8c, 0x5a, 0x09, 0x8b, 0xa7, 0x2a, 0xe8, 0x29, 0x94, 0xd3, 0xd3, + 0x4c, 0x95, 0xf3, 0xc2, 0xf8, 0x4e, 0xd2, 0x4f, 0x21, 0xc4, 0x41, 0xf1, 0x78, 0xe0, 0x12, 0x87, + 0x7d, 0xa7, 0x96, 0x31, 0xe0, 0x43, 0xcf, 0x62, 0x9e, 0x6d, 0xf8, 0xdc, 0x19, 0x2b, 0xd9, 0xaa, + 0x54, 0x2b, 0xd5, 0x5f, 0x5d, 0xb1, 0x99, 0xa3, 0x29, 0xde, 0x4c, 0x68, 0x9d, 0x3b, 0x63, 0xbc, + 0xe2, 0xcd, 0xed, 0x23, 0x1d, 0x6e, 0xcd, 0xba, 0xc8, 0xc2, 0xe5, 0xd9, 0x15, 0x2e, 0x33, 0xda, + 0x4b, 0x83, 0xf3, 0x8a, 0x1f, 0x20, 0x37, 0x62, 0xf4, 0x54, 0xc9, 0x89, 0xd9, 0xbd, 0xbe, 0xce, + 0xec, 0x30, 0x0d, 0x87, 0x4e, 0x14, 0x7e, 0x64, 0xf4, 0x14, 0x0b, 0x11, 0xb4, 0x06, 0xa5, 0xb3, + 0x20, 0x44, 0x4a, 0x41, 0x4c, 0x0d, 0xfc, 0x14, 0x8a, 0xd0, 0x0b, 0x40, 0x7f, 0xcd, 0x96, 0xd1, + 0x50, 0x59, 0xac, 0x66, 0x6b, 0x32, 0xbe, 0x3b, 0x3b, 0x5d, 0x46, 0x43, 0xb4, 0x02, 0x85, 0x13, + 0xe6, 0x44, 0x34, 0x50, 0x8a, 0x42, 0x2a, 0xa9, 0x36, 0x7e, 0xe5, 0x60, 0x79, 0xde, 0x56, 0x66, + 0xd2, 0x20, 0xfd, 0xb7, 0x34, 0xe4, 0xe6, 0xa7, 0xe1, 0x0d, 0x00, 0xf3, 0x2c, 0xfa, 0xcd, 0x98, + 0xc4, 0x5b, 0x84, 0xb1, 0x54, 0xaf, 0xa4, 0xf6, 0x69, 0xf6, 0xd5, 0x5e, 0x9a, 0x7d, 0x2c, 0x8b, + 0xd5, 0x93, 0x1a, 0x7d, 0x81, 0x62, 0xa2, 0x16, 0x2a, 0xd9, 0x6a, 0xb6, 0x56, 0xaa, 0xbf, 0xbb, + 0xc1, 0x97, 0x48, 0x9b, 0x1d, 0xef, 0x84, 0xe3, 0xa9, 0x20, 0xea, 0xc1, 0x62, 0x10, 0x2f, 0x50, + 0xf2, 0x42, 0x7b, 0xe7, 0x26, 0xda, 0xf1, 0x2f, 0x4e, 0xa5, 0x2a, 0x06, 0x94, 0xce, 0xd9, 0xa1, + 0x55, 0x48, 0xbf, 0xb3, 0xc1, 0xac, 0xe4, 0xa2, 0xca, 0x49, 0xa7, 0x63, 0xa1, 0x07, 0x20, 0x33, + 0x97, 0xd8, 0xd4, 0x18, 0x06, 0x4c, 0x8c, 0x46, 0xc6, 0x45, 0xd1, 0xe8, 0x07, 0x0c, 0x2d, 0x43, + 0x3e, 0x34, 0x79, 0x40, 0xc5, 0x9d, 0xc9, 0xe0, 0xb8, 0xa8, 0x8c, 0xa0, 0x10, 0x7b, 0xa2, 0xf7, + 0xb0, 0x98, 0x28, 0x09, 0xe1, 0x52, 0xfd, 0xf1, 0xbf, 0x1d, 0x00, 0xa7, 0xd8, 0x99, 0x43, 0xe6, + 0x9c, 0xc3, 0xa4, 0x2b, 0xf6, 0x20, 0x7c, 0x65, 0x1c, 0x17, 0x9b, 0x7d, 0xb8, 0x3f, 0x37, 0x14, + 0xe8, 0x09, 0x3c, 0xd2, 0xf1, 0xf1, 0x6e, 0xbf, 0xd5, 0x33, 0xba, 0xed, 0x06, 0x6e, 0x1d, 0x18, + 0xad, 0x46, 0xaf, 0xbd, 0x7f, 0x8c, 0x3f, 0x19, 0xfd, 0xa3, 0xae, 0xde, 0x6e, 0x75, 0xf6, 0x3a, + 0xed, 0xdd, 0xf2, 0x02, 0x92, 0x21, 0xdf, 0x3d, 0x38, 0x6e, 0x77, 0xcb, 0x12, 0x2a, 0x42, 0xae, + 0xd9, 0xd8, 0xef, 0x96, 0x33, 0x9b, 0x1a, 0x28, 0x17, 0xdd, 0x9e, 0x09, 0xd0, 0x6c, 0x74, 0x3b, + 0xad, 0xf2, 0xc2, 0x04, 0xd8, 0xeb, 0x1f, 0x1e, 0x96, 0xa5, 0xe6, 0x0f, 0x09, 0xd6, 0x4d, 0xee, + 0x5e, 0x7e, 0xd4, 0x26, 0x9a, 0x7d, 0x37, 0x27, 0x29, 0xd3, 0xa5, 0xcf, 0xad, 0x04, 0xb2, 0xb9, + 0x43, 0x3c, 0x5b, 0xe5, 0x81, 0xad, 0xd9, 0xd4, 0x13, 0x19, 0xd4, 0xe2, 0xbf, 0x88, 0xcf, 0xc2, + 0x0b, 0x1e, 0xf3, 0xb7, 0x71, 0xe3, 0xb7, 0x24, 0x0d, 0x0a, 0x02, 0xd9, 0xfe, 0x13, 0x00, 0x00, + 0xff, 0xff, 0x4f, 0x50, 0xae, 0xbc, 0x9d, 0x06, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/cloud/vision/v1p3beta1/product_search_service.pb.go b/vendor/google.golang.org/genproto/googleapis/cloud/vision/v1p3beta1/product_search_service.pb.go new file mode 100644 index 0000000..48a4d52 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/cloud/vision/v1p3beta1/product_search_service.pb.go @@ -0,0 +1,2968 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/cloud/vision/v1p3beta1/product_search_service.proto + +package vision // import "google.golang.org/genproto/googleapis/cloud/vision/v1p3beta1" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import empty "github.com/golang/protobuf/ptypes/empty" +import timestamp "github.com/golang/protobuf/ptypes/timestamp" +import _ "google.golang.org/genproto/googleapis/api/annotations" +import longrunning "google.golang.org/genproto/googleapis/longrunning" +import status "google.golang.org/genproto/googleapis/rpc/status" +import field_mask "google.golang.org/genproto/protobuf/field_mask" + +import ( + context "golang.org/x/net/context" + grpc "google.golang.org/grpc" +) + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Enumerates the possible states that the batch request can be in. +type BatchOperationMetadata_State int32 + +const ( + // Invalid. + BatchOperationMetadata_STATE_UNSPECIFIED BatchOperationMetadata_State = 0 + // Request is actively being processed. + BatchOperationMetadata_PROCESSING BatchOperationMetadata_State = 1 + // The request is done and at least one item has been successfully + // processed. + BatchOperationMetadata_SUCCESSFUL BatchOperationMetadata_State = 2 + // The request is done and no item has been successfully processed. + BatchOperationMetadata_FAILED BatchOperationMetadata_State = 3 + // The request is done after the longrunning.Operations.CancelOperation has + // been called by the user. Any records that were processed before the + // cancel command are output as specified in the request. + BatchOperationMetadata_CANCELLED BatchOperationMetadata_State = 4 +) + +var BatchOperationMetadata_State_name = map[int32]string{ + 0: "STATE_UNSPECIFIED", + 1: "PROCESSING", + 2: "SUCCESSFUL", + 3: "FAILED", + 4: "CANCELLED", +} +var BatchOperationMetadata_State_value = map[string]int32{ + "STATE_UNSPECIFIED": 0, + "PROCESSING": 1, + "SUCCESSFUL": 2, + "FAILED": 3, + "CANCELLED": 4, +} + +func (x BatchOperationMetadata_State) String() string { + return proto.EnumName(BatchOperationMetadata_State_name, int32(x)) +} +func (BatchOperationMetadata_State) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_product_search_service_ad9db519f21bfe92, []int{28, 0} +} + +// A Product contains ReferenceImages. +type Product struct { + // The resource name of the product. + // + // Format is: + // `projects/PROJECT_ID/locations/LOC_ID/products/PRODUCT_ID`. + // + // This field is ignored when creating a product. + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // The user-provided name for this Product. Must not be empty. Must be at most + // 4096 characters long. + DisplayName string `protobuf:"bytes,2,opt,name=display_name,json=displayName,proto3" json:"display_name,omitempty"` + // User-provided metadata to be stored with this product. Must be at most 4096 + // characters long. + Description string `protobuf:"bytes,3,opt,name=description,proto3" json:"description,omitempty"` + // The category for the product identified by the reference image. This should + // be either "homegoods" or "apparel". + // + // This field is immutable. + ProductCategory string `protobuf:"bytes,4,opt,name=product_category,json=productCategory,proto3" json:"product_category,omitempty"` + // Key-value pairs that can be attached to a product. At query time, + // constraints can be specified based on the product_labels. + // + // Note that integer values can be provided as strings, e.g. "1199". Only + // strings with integer values can match a range-based restriction which is + // to be supported soon. + // + // Multiple values can be assigned to the same key. One product may have up to + // 100 product_labels. + ProductLabels []*Product_KeyValue `protobuf:"bytes,5,rep,name=product_labels,json=productLabels,proto3" json:"product_labels,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Product) Reset() { *m = Product{} } +func (m *Product) String() string { return proto.CompactTextString(m) } +func (*Product) ProtoMessage() {} +func (*Product) Descriptor() ([]byte, []int) { + return fileDescriptor_product_search_service_ad9db519f21bfe92, []int{0} +} +func (m *Product) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Product.Unmarshal(m, b) +} +func (m *Product) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Product.Marshal(b, m, deterministic) +} +func (dst *Product) XXX_Merge(src proto.Message) { + xxx_messageInfo_Product.Merge(dst, src) +} +func (m *Product) XXX_Size() int { + return xxx_messageInfo_Product.Size(m) +} +func (m *Product) XXX_DiscardUnknown() { + xxx_messageInfo_Product.DiscardUnknown(m) +} + +var xxx_messageInfo_Product proto.InternalMessageInfo + +func (m *Product) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *Product) GetDisplayName() string { + if m != nil { + return m.DisplayName + } + return "" +} + +func (m *Product) GetDescription() string { + if m != nil { + return m.Description + } + return "" +} + +func (m *Product) GetProductCategory() string { + if m != nil { + return m.ProductCategory + } + return "" +} + +func (m *Product) GetProductLabels() []*Product_KeyValue { + if m != nil { + return m.ProductLabels + } + return nil +} + +// A product label represented as a key-value pair. +type Product_KeyValue struct { + // The key of the label attached to the product. Cannot be empty and cannot + // exceed 128 bytes. + Key string `protobuf:"bytes,1,opt,name=key,proto3" json:"key,omitempty"` + // The value of the label attached to the product. Cannot be empty and + // cannot exceed 128 bytes. + Value string `protobuf:"bytes,2,opt,name=value,proto3" json:"value,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Product_KeyValue) Reset() { *m = Product_KeyValue{} } +func (m *Product_KeyValue) String() string { return proto.CompactTextString(m) } +func (*Product_KeyValue) ProtoMessage() {} +func (*Product_KeyValue) Descriptor() ([]byte, []int) { + return fileDescriptor_product_search_service_ad9db519f21bfe92, []int{0, 0} +} +func (m *Product_KeyValue) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Product_KeyValue.Unmarshal(m, b) +} +func (m *Product_KeyValue) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Product_KeyValue.Marshal(b, m, deterministic) +} +func (dst *Product_KeyValue) XXX_Merge(src proto.Message) { + xxx_messageInfo_Product_KeyValue.Merge(dst, src) +} +func (m *Product_KeyValue) XXX_Size() int { + return xxx_messageInfo_Product_KeyValue.Size(m) +} +func (m *Product_KeyValue) XXX_DiscardUnknown() { + xxx_messageInfo_Product_KeyValue.DiscardUnknown(m) +} + +var xxx_messageInfo_Product_KeyValue proto.InternalMessageInfo + +func (m *Product_KeyValue) GetKey() string { + if m != nil { + return m.Key + } + return "" +} + +func (m *Product_KeyValue) GetValue() string { + if m != nil { + return m.Value + } + return "" +} + +// A ProductSet contains Products. A ProductSet can contain a maximum of 1 +// million reference images. If the limit is exceeded, periodic indexing will +// fail. +type ProductSet struct { + // The resource name of the ProductSet. + // + // Format is: + // `projects/PROJECT_ID/locations/LOC_ID/productSets/PRODUCT_SET_ID`. + // + // This field is ignored when creating a ProductSet. + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // The user-provided name for this ProductSet. Must not be empty. Must be at + // most 4096 characters long. + DisplayName string `protobuf:"bytes,2,opt,name=display_name,json=displayName,proto3" json:"display_name,omitempty"` + // Output only. The time at which this ProductSet was last indexed. Query + // results will reflect all updates before this time. If this ProductSet has + // never been indexed, this field is 0. + // + // This field is ignored when creating a ProductSet. + IndexTime *timestamp.Timestamp `protobuf:"bytes,3,opt,name=index_time,json=indexTime,proto3" json:"index_time,omitempty"` + // Output only. If there was an error with indexing the product set, the field + // is populated. + // + // This field is ignored when creating a ProductSet. + IndexError *status.Status `protobuf:"bytes,4,opt,name=index_error,json=indexError,proto3" json:"index_error,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ProductSet) Reset() { *m = ProductSet{} } +func (m *ProductSet) String() string { return proto.CompactTextString(m) } +func (*ProductSet) ProtoMessage() {} +func (*ProductSet) Descriptor() ([]byte, []int) { + return fileDescriptor_product_search_service_ad9db519f21bfe92, []int{1} +} +func (m *ProductSet) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ProductSet.Unmarshal(m, b) +} +func (m *ProductSet) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ProductSet.Marshal(b, m, deterministic) +} +func (dst *ProductSet) XXX_Merge(src proto.Message) { + xxx_messageInfo_ProductSet.Merge(dst, src) +} +func (m *ProductSet) XXX_Size() int { + return xxx_messageInfo_ProductSet.Size(m) +} +func (m *ProductSet) XXX_DiscardUnknown() { + xxx_messageInfo_ProductSet.DiscardUnknown(m) +} + +var xxx_messageInfo_ProductSet proto.InternalMessageInfo + +func (m *ProductSet) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *ProductSet) GetDisplayName() string { + if m != nil { + return m.DisplayName + } + return "" +} + +func (m *ProductSet) GetIndexTime() *timestamp.Timestamp { + if m != nil { + return m.IndexTime + } + return nil +} + +func (m *ProductSet) GetIndexError() *status.Status { + if m != nil { + return m.IndexError + } + return nil +} + +// A `ReferenceImage` represents a product image and its associated metadata, +// such as bounding boxes. +type ReferenceImage struct { + // The resource name of the reference image. + // + // Format is: + // + // `projects/PROJECT_ID/locations/LOC_ID/products/PRODUCT_ID/referenceImages/IMAGE_ID`. + // + // This field is ignored when creating a reference image. + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // The Google Cloud Storage URI of the reference image. + // + // The URI must start with `gs://`. + // + // Required. + Uri string `protobuf:"bytes,2,opt,name=uri,proto3" json:"uri,omitempty"` + // Bounding polygons around the areas of interest in the reference image. + // Optional. If this field is empty, the system will try to detect regions of + // interest. At most 10 bounding polygons will be used. + // + // The provided shape is converted into a non-rotated rectangle. Once + // converted, the small edge of the rectangle must be greater than or equal + // to 300 pixels. The aspect ratio must be 1:4 or less (i.e. 1:3 is ok; 1:5 + // is not). + BoundingPolys []*BoundingPoly `protobuf:"bytes,3,rep,name=bounding_polys,json=boundingPolys,proto3" json:"bounding_polys,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ReferenceImage) Reset() { *m = ReferenceImage{} } +func (m *ReferenceImage) String() string { return proto.CompactTextString(m) } +func (*ReferenceImage) ProtoMessage() {} +func (*ReferenceImage) Descriptor() ([]byte, []int) { + return fileDescriptor_product_search_service_ad9db519f21bfe92, []int{2} +} +func (m *ReferenceImage) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ReferenceImage.Unmarshal(m, b) +} +func (m *ReferenceImage) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ReferenceImage.Marshal(b, m, deterministic) +} +func (dst *ReferenceImage) XXX_Merge(src proto.Message) { + xxx_messageInfo_ReferenceImage.Merge(dst, src) +} +func (m *ReferenceImage) XXX_Size() int { + return xxx_messageInfo_ReferenceImage.Size(m) +} +func (m *ReferenceImage) XXX_DiscardUnknown() { + xxx_messageInfo_ReferenceImage.DiscardUnknown(m) +} + +var xxx_messageInfo_ReferenceImage proto.InternalMessageInfo + +func (m *ReferenceImage) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *ReferenceImage) GetUri() string { + if m != nil { + return m.Uri + } + return "" +} + +func (m *ReferenceImage) GetBoundingPolys() []*BoundingPoly { + if m != nil { + return m.BoundingPolys + } + return nil +} + +// Request message for the `CreateProduct` method. +type CreateProductRequest struct { + // The project in which the Product should be created. + // + // Format is + // `projects/PROJECT_ID/locations/LOC_ID`. + Parent string `protobuf:"bytes,1,opt,name=parent,proto3" json:"parent,omitempty"` + // The product to create. + Product *Product `protobuf:"bytes,2,opt,name=product,proto3" json:"product,omitempty"` + // A user-supplied resource id for this Product. If set, the server will + // attempt to use this value as the resource id. If it is already in use, an + // error is returned with code ALREADY_EXISTS. Must be at most 128 characters + // long. It cannot contain the character `/`. + ProductId string `protobuf:"bytes,3,opt,name=product_id,json=productId,proto3" json:"product_id,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *CreateProductRequest) Reset() { *m = CreateProductRequest{} } +func (m *CreateProductRequest) String() string { return proto.CompactTextString(m) } +func (*CreateProductRequest) ProtoMessage() {} +func (*CreateProductRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_product_search_service_ad9db519f21bfe92, []int{3} +} +func (m *CreateProductRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_CreateProductRequest.Unmarshal(m, b) +} +func (m *CreateProductRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_CreateProductRequest.Marshal(b, m, deterministic) +} +func (dst *CreateProductRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_CreateProductRequest.Merge(dst, src) +} +func (m *CreateProductRequest) XXX_Size() int { + return xxx_messageInfo_CreateProductRequest.Size(m) +} +func (m *CreateProductRequest) XXX_DiscardUnknown() { + xxx_messageInfo_CreateProductRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_CreateProductRequest proto.InternalMessageInfo + +func (m *CreateProductRequest) GetParent() string { + if m != nil { + return m.Parent + } + return "" +} + +func (m *CreateProductRequest) GetProduct() *Product { + if m != nil { + return m.Product + } + return nil +} + +func (m *CreateProductRequest) GetProductId() string { + if m != nil { + return m.ProductId + } + return "" +} + +// Request message for the `ListProducts` method. +type ListProductsRequest struct { + // The project OR ProductSet from which Products should be listed. + // + // Format: + // `projects/PROJECT_ID/locations/LOC_ID` + Parent string `protobuf:"bytes,1,opt,name=parent,proto3" json:"parent,omitempty"` + // The maximum number of items to return. Default 10, maximum 100. + PageSize int32 `protobuf:"varint,2,opt,name=page_size,json=pageSize,proto3" json:"page_size,omitempty"` + // The next_page_token returned from a previous List request, if any. + PageToken string `protobuf:"bytes,3,opt,name=page_token,json=pageToken,proto3" json:"page_token,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ListProductsRequest) Reset() { *m = ListProductsRequest{} } +func (m *ListProductsRequest) String() string { return proto.CompactTextString(m) } +func (*ListProductsRequest) ProtoMessage() {} +func (*ListProductsRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_product_search_service_ad9db519f21bfe92, []int{4} +} +func (m *ListProductsRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ListProductsRequest.Unmarshal(m, b) +} +func (m *ListProductsRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ListProductsRequest.Marshal(b, m, deterministic) +} +func (dst *ListProductsRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_ListProductsRequest.Merge(dst, src) +} +func (m *ListProductsRequest) XXX_Size() int { + return xxx_messageInfo_ListProductsRequest.Size(m) +} +func (m *ListProductsRequest) XXX_DiscardUnknown() { + xxx_messageInfo_ListProductsRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_ListProductsRequest proto.InternalMessageInfo + +func (m *ListProductsRequest) GetParent() string { + if m != nil { + return m.Parent + } + return "" +} + +func (m *ListProductsRequest) GetPageSize() int32 { + if m != nil { + return m.PageSize + } + return 0 +} + +func (m *ListProductsRequest) GetPageToken() string { + if m != nil { + return m.PageToken + } + return "" +} + +// Response message for the `ListProducts` method. +type ListProductsResponse struct { + // List of products. + Products []*Product `protobuf:"bytes,1,rep,name=products,proto3" json:"products,omitempty"` + // Token to retrieve the next page of results, or empty if there are no more + // results in the list. + NextPageToken string `protobuf:"bytes,2,opt,name=next_page_token,json=nextPageToken,proto3" json:"next_page_token,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ListProductsResponse) Reset() { *m = ListProductsResponse{} } +func (m *ListProductsResponse) String() string { return proto.CompactTextString(m) } +func (*ListProductsResponse) ProtoMessage() {} +func (*ListProductsResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_product_search_service_ad9db519f21bfe92, []int{5} +} +func (m *ListProductsResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ListProductsResponse.Unmarshal(m, b) +} +func (m *ListProductsResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ListProductsResponse.Marshal(b, m, deterministic) +} +func (dst *ListProductsResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_ListProductsResponse.Merge(dst, src) +} +func (m *ListProductsResponse) XXX_Size() int { + return xxx_messageInfo_ListProductsResponse.Size(m) +} +func (m *ListProductsResponse) XXX_DiscardUnknown() { + xxx_messageInfo_ListProductsResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_ListProductsResponse proto.InternalMessageInfo + +func (m *ListProductsResponse) GetProducts() []*Product { + if m != nil { + return m.Products + } + return nil +} + +func (m *ListProductsResponse) GetNextPageToken() string { + if m != nil { + return m.NextPageToken + } + return "" +} + +// Request message for the `GetProduct` method. +type GetProductRequest struct { + // Resource name of the Product to get. + // + // Format is: + // `projects/PROJECT_ID/locations/LOC_ID/products/PRODUCT_ID` + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GetProductRequest) Reset() { *m = GetProductRequest{} } +func (m *GetProductRequest) String() string { return proto.CompactTextString(m) } +func (*GetProductRequest) ProtoMessage() {} +func (*GetProductRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_product_search_service_ad9db519f21bfe92, []int{6} +} +func (m *GetProductRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GetProductRequest.Unmarshal(m, b) +} +func (m *GetProductRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GetProductRequest.Marshal(b, m, deterministic) +} +func (dst *GetProductRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_GetProductRequest.Merge(dst, src) +} +func (m *GetProductRequest) XXX_Size() int { + return xxx_messageInfo_GetProductRequest.Size(m) +} +func (m *GetProductRequest) XXX_DiscardUnknown() { + xxx_messageInfo_GetProductRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_GetProductRequest proto.InternalMessageInfo + +func (m *GetProductRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +// Request message for the `UpdateProduct` method. +type UpdateProductRequest struct { + // The Product resource which replaces the one on the server. + // product.name is immutable. + Product *Product `protobuf:"bytes,1,opt,name=product,proto3" json:"product,omitempty"` + // The [FieldMask][google.protobuf.FieldMask] that specifies which fields + // to update. + // If update_mask isn't specified, all mutable fields are to be updated. + // Valid mask paths include `product_labels`, `display_name` and + // `description`. + UpdateMask *field_mask.FieldMask `protobuf:"bytes,2,opt,name=update_mask,json=updateMask,proto3" json:"update_mask,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *UpdateProductRequest) Reset() { *m = UpdateProductRequest{} } +func (m *UpdateProductRequest) String() string { return proto.CompactTextString(m) } +func (*UpdateProductRequest) ProtoMessage() {} +func (*UpdateProductRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_product_search_service_ad9db519f21bfe92, []int{7} +} +func (m *UpdateProductRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_UpdateProductRequest.Unmarshal(m, b) +} +func (m *UpdateProductRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_UpdateProductRequest.Marshal(b, m, deterministic) +} +func (dst *UpdateProductRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_UpdateProductRequest.Merge(dst, src) +} +func (m *UpdateProductRequest) XXX_Size() int { + return xxx_messageInfo_UpdateProductRequest.Size(m) +} +func (m *UpdateProductRequest) XXX_DiscardUnknown() { + xxx_messageInfo_UpdateProductRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_UpdateProductRequest proto.InternalMessageInfo + +func (m *UpdateProductRequest) GetProduct() *Product { + if m != nil { + return m.Product + } + return nil +} + +func (m *UpdateProductRequest) GetUpdateMask() *field_mask.FieldMask { + if m != nil { + return m.UpdateMask + } + return nil +} + +// Request message for the `DeleteProduct` method. +type DeleteProductRequest struct { + // Resource name of product to delete. + // + // Format is: + // `projects/PROJECT_ID/locations/LOC_ID/products/PRODUCT_ID` + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *DeleteProductRequest) Reset() { *m = DeleteProductRequest{} } +func (m *DeleteProductRequest) String() string { return proto.CompactTextString(m) } +func (*DeleteProductRequest) ProtoMessage() {} +func (*DeleteProductRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_product_search_service_ad9db519f21bfe92, []int{8} +} +func (m *DeleteProductRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_DeleteProductRequest.Unmarshal(m, b) +} +func (m *DeleteProductRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_DeleteProductRequest.Marshal(b, m, deterministic) +} +func (dst *DeleteProductRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_DeleteProductRequest.Merge(dst, src) +} +func (m *DeleteProductRequest) XXX_Size() int { + return xxx_messageInfo_DeleteProductRequest.Size(m) +} +func (m *DeleteProductRequest) XXX_DiscardUnknown() { + xxx_messageInfo_DeleteProductRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_DeleteProductRequest proto.InternalMessageInfo + +func (m *DeleteProductRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +// Request message for the `CreateProductSet` method. +type CreateProductSetRequest struct { + // The project in which the ProductSet should be created. + // + // Format is `projects/PROJECT_ID/locations/LOC_ID`. + Parent string `protobuf:"bytes,1,opt,name=parent,proto3" json:"parent,omitempty"` + // The ProductSet to create. + ProductSet *ProductSet `protobuf:"bytes,2,opt,name=product_set,json=productSet,proto3" json:"product_set,omitempty"` + // A user-supplied resource id for this ProductSet. If set, the server will + // attempt to use this value as the resource id. If it is already in use, an + // error is returned with code ALREADY_EXISTS. Must be at most 128 characters + // long. It cannot contain the character `/`. + ProductSetId string `protobuf:"bytes,3,opt,name=product_set_id,json=productSetId,proto3" json:"product_set_id,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *CreateProductSetRequest) Reset() { *m = CreateProductSetRequest{} } +func (m *CreateProductSetRequest) String() string { return proto.CompactTextString(m) } +func (*CreateProductSetRequest) ProtoMessage() {} +func (*CreateProductSetRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_product_search_service_ad9db519f21bfe92, []int{9} +} +func (m *CreateProductSetRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_CreateProductSetRequest.Unmarshal(m, b) +} +func (m *CreateProductSetRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_CreateProductSetRequest.Marshal(b, m, deterministic) +} +func (dst *CreateProductSetRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_CreateProductSetRequest.Merge(dst, src) +} +func (m *CreateProductSetRequest) XXX_Size() int { + return xxx_messageInfo_CreateProductSetRequest.Size(m) +} +func (m *CreateProductSetRequest) XXX_DiscardUnknown() { + xxx_messageInfo_CreateProductSetRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_CreateProductSetRequest proto.InternalMessageInfo + +func (m *CreateProductSetRequest) GetParent() string { + if m != nil { + return m.Parent + } + return "" +} + +func (m *CreateProductSetRequest) GetProductSet() *ProductSet { + if m != nil { + return m.ProductSet + } + return nil +} + +func (m *CreateProductSetRequest) GetProductSetId() string { + if m != nil { + return m.ProductSetId + } + return "" +} + +// Request message for the `ListProductSets` method. +type ListProductSetsRequest struct { + // The project from which ProductSets should be listed. + // + // Format is `projects/PROJECT_ID/locations/LOC_ID`. + Parent string `protobuf:"bytes,1,opt,name=parent,proto3" json:"parent,omitempty"` + // The maximum number of items to return. Default 10, maximum 100. + PageSize int32 `protobuf:"varint,2,opt,name=page_size,json=pageSize,proto3" json:"page_size,omitempty"` + // The next_page_token returned from a previous List request, if any. + PageToken string `protobuf:"bytes,3,opt,name=page_token,json=pageToken,proto3" json:"page_token,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ListProductSetsRequest) Reset() { *m = ListProductSetsRequest{} } +func (m *ListProductSetsRequest) String() string { return proto.CompactTextString(m) } +func (*ListProductSetsRequest) ProtoMessage() {} +func (*ListProductSetsRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_product_search_service_ad9db519f21bfe92, []int{10} +} +func (m *ListProductSetsRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ListProductSetsRequest.Unmarshal(m, b) +} +func (m *ListProductSetsRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ListProductSetsRequest.Marshal(b, m, deterministic) +} +func (dst *ListProductSetsRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_ListProductSetsRequest.Merge(dst, src) +} +func (m *ListProductSetsRequest) XXX_Size() int { + return xxx_messageInfo_ListProductSetsRequest.Size(m) +} +func (m *ListProductSetsRequest) XXX_DiscardUnknown() { + xxx_messageInfo_ListProductSetsRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_ListProductSetsRequest proto.InternalMessageInfo + +func (m *ListProductSetsRequest) GetParent() string { + if m != nil { + return m.Parent + } + return "" +} + +func (m *ListProductSetsRequest) GetPageSize() int32 { + if m != nil { + return m.PageSize + } + return 0 +} + +func (m *ListProductSetsRequest) GetPageToken() string { + if m != nil { + return m.PageToken + } + return "" +} + +// Response message for the `ListProductSets` method. +type ListProductSetsResponse struct { + // List of ProductSets. + ProductSets []*ProductSet `protobuf:"bytes,1,rep,name=product_sets,json=productSets,proto3" json:"product_sets,omitempty"` + // Token to retrieve the next page of results, or empty if there are no more + // results in the list. + NextPageToken string `protobuf:"bytes,2,opt,name=next_page_token,json=nextPageToken,proto3" json:"next_page_token,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ListProductSetsResponse) Reset() { *m = ListProductSetsResponse{} } +func (m *ListProductSetsResponse) String() string { return proto.CompactTextString(m) } +func (*ListProductSetsResponse) ProtoMessage() {} +func (*ListProductSetsResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_product_search_service_ad9db519f21bfe92, []int{11} +} +func (m *ListProductSetsResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ListProductSetsResponse.Unmarshal(m, b) +} +func (m *ListProductSetsResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ListProductSetsResponse.Marshal(b, m, deterministic) +} +func (dst *ListProductSetsResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_ListProductSetsResponse.Merge(dst, src) +} +func (m *ListProductSetsResponse) XXX_Size() int { + return xxx_messageInfo_ListProductSetsResponse.Size(m) +} +func (m *ListProductSetsResponse) XXX_DiscardUnknown() { + xxx_messageInfo_ListProductSetsResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_ListProductSetsResponse proto.InternalMessageInfo + +func (m *ListProductSetsResponse) GetProductSets() []*ProductSet { + if m != nil { + return m.ProductSets + } + return nil +} + +func (m *ListProductSetsResponse) GetNextPageToken() string { + if m != nil { + return m.NextPageToken + } + return "" +} + +// Request message for the `GetProductSet` method. +type GetProductSetRequest struct { + // Resource name of the ProductSet to get. + // + // Format is: + // `projects/PROJECT_ID/locations/LOG_ID/productSets/PRODUCT_SET_ID` + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GetProductSetRequest) Reset() { *m = GetProductSetRequest{} } +func (m *GetProductSetRequest) String() string { return proto.CompactTextString(m) } +func (*GetProductSetRequest) ProtoMessage() {} +func (*GetProductSetRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_product_search_service_ad9db519f21bfe92, []int{12} +} +func (m *GetProductSetRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GetProductSetRequest.Unmarshal(m, b) +} +func (m *GetProductSetRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GetProductSetRequest.Marshal(b, m, deterministic) +} +func (dst *GetProductSetRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_GetProductSetRequest.Merge(dst, src) +} +func (m *GetProductSetRequest) XXX_Size() int { + return xxx_messageInfo_GetProductSetRequest.Size(m) +} +func (m *GetProductSetRequest) XXX_DiscardUnknown() { + xxx_messageInfo_GetProductSetRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_GetProductSetRequest proto.InternalMessageInfo + +func (m *GetProductSetRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +// Request message for the `UpdateProductSet` method. +type UpdateProductSetRequest struct { + // The ProductSet resource which replaces the one on the server. + ProductSet *ProductSet `protobuf:"bytes,1,opt,name=product_set,json=productSet,proto3" json:"product_set,omitempty"` + // The [FieldMask][google.protobuf.FieldMask] that specifies which fields to + // update. + // If update_mask isn't specified, all mutable fields are to be updated. + // Valid mask path is `display_name`. + UpdateMask *field_mask.FieldMask `protobuf:"bytes,2,opt,name=update_mask,json=updateMask,proto3" json:"update_mask,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *UpdateProductSetRequest) Reset() { *m = UpdateProductSetRequest{} } +func (m *UpdateProductSetRequest) String() string { return proto.CompactTextString(m) } +func (*UpdateProductSetRequest) ProtoMessage() {} +func (*UpdateProductSetRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_product_search_service_ad9db519f21bfe92, []int{13} +} +func (m *UpdateProductSetRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_UpdateProductSetRequest.Unmarshal(m, b) +} +func (m *UpdateProductSetRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_UpdateProductSetRequest.Marshal(b, m, deterministic) +} +func (dst *UpdateProductSetRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_UpdateProductSetRequest.Merge(dst, src) +} +func (m *UpdateProductSetRequest) XXX_Size() int { + return xxx_messageInfo_UpdateProductSetRequest.Size(m) +} +func (m *UpdateProductSetRequest) XXX_DiscardUnknown() { + xxx_messageInfo_UpdateProductSetRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_UpdateProductSetRequest proto.InternalMessageInfo + +func (m *UpdateProductSetRequest) GetProductSet() *ProductSet { + if m != nil { + return m.ProductSet + } + return nil +} + +func (m *UpdateProductSetRequest) GetUpdateMask() *field_mask.FieldMask { + if m != nil { + return m.UpdateMask + } + return nil +} + +// Request message for the `DeleteProductSet` method. +type DeleteProductSetRequest struct { + // Resource name of the ProductSet to delete. + // + // Format is: + // `projects/PROJECT_ID/locations/LOC_ID/productSets/PRODUCT_SET_ID` + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *DeleteProductSetRequest) Reset() { *m = DeleteProductSetRequest{} } +func (m *DeleteProductSetRequest) String() string { return proto.CompactTextString(m) } +func (*DeleteProductSetRequest) ProtoMessage() {} +func (*DeleteProductSetRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_product_search_service_ad9db519f21bfe92, []int{14} +} +func (m *DeleteProductSetRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_DeleteProductSetRequest.Unmarshal(m, b) +} +func (m *DeleteProductSetRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_DeleteProductSetRequest.Marshal(b, m, deterministic) +} +func (dst *DeleteProductSetRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_DeleteProductSetRequest.Merge(dst, src) +} +func (m *DeleteProductSetRequest) XXX_Size() int { + return xxx_messageInfo_DeleteProductSetRequest.Size(m) +} +func (m *DeleteProductSetRequest) XXX_DiscardUnknown() { + xxx_messageInfo_DeleteProductSetRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_DeleteProductSetRequest proto.InternalMessageInfo + +func (m *DeleteProductSetRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +// Request message for the `CreateReferenceImage` method. +type CreateReferenceImageRequest struct { + // Resource name of the product in which to create the reference image. + // + // Format is + // `projects/PROJECT_ID/locations/LOC_ID/products/PRODUCT_ID`. + Parent string `protobuf:"bytes,1,opt,name=parent,proto3" json:"parent,omitempty"` + // The reference image to create. + // If an image ID is specified, it is ignored. + ReferenceImage *ReferenceImage `protobuf:"bytes,2,opt,name=reference_image,json=referenceImage,proto3" json:"reference_image,omitempty"` + // A user-supplied resource id for the ReferenceImage to be added. If set, + // the server will attempt to use this value as the resource id. If it is + // already in use, an error is returned with code ALREADY_EXISTS. Must be at + // most 128 characters long. It cannot contain the character `/`. + ReferenceImageId string `protobuf:"bytes,3,opt,name=reference_image_id,json=referenceImageId,proto3" json:"reference_image_id,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *CreateReferenceImageRequest) Reset() { *m = CreateReferenceImageRequest{} } +func (m *CreateReferenceImageRequest) String() string { return proto.CompactTextString(m) } +func (*CreateReferenceImageRequest) ProtoMessage() {} +func (*CreateReferenceImageRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_product_search_service_ad9db519f21bfe92, []int{15} +} +func (m *CreateReferenceImageRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_CreateReferenceImageRequest.Unmarshal(m, b) +} +func (m *CreateReferenceImageRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_CreateReferenceImageRequest.Marshal(b, m, deterministic) +} +func (dst *CreateReferenceImageRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_CreateReferenceImageRequest.Merge(dst, src) +} +func (m *CreateReferenceImageRequest) XXX_Size() int { + return xxx_messageInfo_CreateReferenceImageRequest.Size(m) +} +func (m *CreateReferenceImageRequest) XXX_DiscardUnknown() { + xxx_messageInfo_CreateReferenceImageRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_CreateReferenceImageRequest proto.InternalMessageInfo + +func (m *CreateReferenceImageRequest) GetParent() string { + if m != nil { + return m.Parent + } + return "" +} + +func (m *CreateReferenceImageRequest) GetReferenceImage() *ReferenceImage { + if m != nil { + return m.ReferenceImage + } + return nil +} + +func (m *CreateReferenceImageRequest) GetReferenceImageId() string { + if m != nil { + return m.ReferenceImageId + } + return "" +} + +// Request message for the `ListReferenceImages` method. +type ListReferenceImagesRequest struct { + // Resource name of the product containing the reference images. + // + // Format is + // `projects/PROJECT_ID/locations/LOC_ID/products/PRODUCT_ID`. + Parent string `protobuf:"bytes,1,opt,name=parent,proto3" json:"parent,omitempty"` + // The maximum number of items to return. Default 10, maximum 100. + PageSize int32 `protobuf:"varint,2,opt,name=page_size,json=pageSize,proto3" json:"page_size,omitempty"` + // A token identifying a page of results to be returned. This is the value + // of `nextPageToken` returned in a previous reference image list request. + // + // Defaults to the first page if not specified. + PageToken string `protobuf:"bytes,3,opt,name=page_token,json=pageToken,proto3" json:"page_token,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ListReferenceImagesRequest) Reset() { *m = ListReferenceImagesRequest{} } +func (m *ListReferenceImagesRequest) String() string { return proto.CompactTextString(m) } +func (*ListReferenceImagesRequest) ProtoMessage() {} +func (*ListReferenceImagesRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_product_search_service_ad9db519f21bfe92, []int{16} +} +func (m *ListReferenceImagesRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ListReferenceImagesRequest.Unmarshal(m, b) +} +func (m *ListReferenceImagesRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ListReferenceImagesRequest.Marshal(b, m, deterministic) +} +func (dst *ListReferenceImagesRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_ListReferenceImagesRequest.Merge(dst, src) +} +func (m *ListReferenceImagesRequest) XXX_Size() int { + return xxx_messageInfo_ListReferenceImagesRequest.Size(m) +} +func (m *ListReferenceImagesRequest) XXX_DiscardUnknown() { + xxx_messageInfo_ListReferenceImagesRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_ListReferenceImagesRequest proto.InternalMessageInfo + +func (m *ListReferenceImagesRequest) GetParent() string { + if m != nil { + return m.Parent + } + return "" +} + +func (m *ListReferenceImagesRequest) GetPageSize() int32 { + if m != nil { + return m.PageSize + } + return 0 +} + +func (m *ListReferenceImagesRequest) GetPageToken() string { + if m != nil { + return m.PageToken + } + return "" +} + +// Response message for the `ListReferenceImages` method. +type ListReferenceImagesResponse struct { + // The list of reference images. + ReferenceImages []*ReferenceImage `protobuf:"bytes,1,rep,name=reference_images,json=referenceImages,proto3" json:"reference_images,omitempty"` + // The maximum number of items to return. Default 10, maximum 100. + PageSize int32 `protobuf:"varint,2,opt,name=page_size,json=pageSize,proto3" json:"page_size,omitempty"` + // The next_page_token returned from a previous List request, if any. + NextPageToken string `protobuf:"bytes,3,opt,name=next_page_token,json=nextPageToken,proto3" json:"next_page_token,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ListReferenceImagesResponse) Reset() { *m = ListReferenceImagesResponse{} } +func (m *ListReferenceImagesResponse) String() string { return proto.CompactTextString(m) } +func (*ListReferenceImagesResponse) ProtoMessage() {} +func (*ListReferenceImagesResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_product_search_service_ad9db519f21bfe92, []int{17} +} +func (m *ListReferenceImagesResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ListReferenceImagesResponse.Unmarshal(m, b) +} +func (m *ListReferenceImagesResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ListReferenceImagesResponse.Marshal(b, m, deterministic) +} +func (dst *ListReferenceImagesResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_ListReferenceImagesResponse.Merge(dst, src) +} +func (m *ListReferenceImagesResponse) XXX_Size() int { + return xxx_messageInfo_ListReferenceImagesResponse.Size(m) +} +func (m *ListReferenceImagesResponse) XXX_DiscardUnknown() { + xxx_messageInfo_ListReferenceImagesResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_ListReferenceImagesResponse proto.InternalMessageInfo + +func (m *ListReferenceImagesResponse) GetReferenceImages() []*ReferenceImage { + if m != nil { + return m.ReferenceImages + } + return nil +} + +func (m *ListReferenceImagesResponse) GetPageSize() int32 { + if m != nil { + return m.PageSize + } + return 0 +} + +func (m *ListReferenceImagesResponse) GetNextPageToken() string { + if m != nil { + return m.NextPageToken + } + return "" +} + +// Request message for the `GetReferenceImage` method. +type GetReferenceImageRequest struct { + // The resource name of the ReferenceImage to get. + // + // Format is: + // + // `projects/PROJECT_ID/locations/LOC_ID/products/PRODUCT_ID/referenceImages/IMAGE_ID`. + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GetReferenceImageRequest) Reset() { *m = GetReferenceImageRequest{} } +func (m *GetReferenceImageRequest) String() string { return proto.CompactTextString(m) } +func (*GetReferenceImageRequest) ProtoMessage() {} +func (*GetReferenceImageRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_product_search_service_ad9db519f21bfe92, []int{18} +} +func (m *GetReferenceImageRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GetReferenceImageRequest.Unmarshal(m, b) +} +func (m *GetReferenceImageRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GetReferenceImageRequest.Marshal(b, m, deterministic) +} +func (dst *GetReferenceImageRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_GetReferenceImageRequest.Merge(dst, src) +} +func (m *GetReferenceImageRequest) XXX_Size() int { + return xxx_messageInfo_GetReferenceImageRequest.Size(m) +} +func (m *GetReferenceImageRequest) XXX_DiscardUnknown() { + xxx_messageInfo_GetReferenceImageRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_GetReferenceImageRequest proto.InternalMessageInfo + +func (m *GetReferenceImageRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +// Request message for the `DeleteReferenceImage` method. +type DeleteReferenceImageRequest struct { + // The resource name of the reference image to delete. + // + // Format is: + // + // `projects/PROJECT_ID/locations/LOC_ID/products/PRODUCT_ID/referenceImages/IMAGE_ID` + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *DeleteReferenceImageRequest) Reset() { *m = DeleteReferenceImageRequest{} } +func (m *DeleteReferenceImageRequest) String() string { return proto.CompactTextString(m) } +func (*DeleteReferenceImageRequest) ProtoMessage() {} +func (*DeleteReferenceImageRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_product_search_service_ad9db519f21bfe92, []int{19} +} +func (m *DeleteReferenceImageRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_DeleteReferenceImageRequest.Unmarshal(m, b) +} +func (m *DeleteReferenceImageRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_DeleteReferenceImageRequest.Marshal(b, m, deterministic) +} +func (dst *DeleteReferenceImageRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_DeleteReferenceImageRequest.Merge(dst, src) +} +func (m *DeleteReferenceImageRequest) XXX_Size() int { + return xxx_messageInfo_DeleteReferenceImageRequest.Size(m) +} +func (m *DeleteReferenceImageRequest) XXX_DiscardUnknown() { + xxx_messageInfo_DeleteReferenceImageRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_DeleteReferenceImageRequest proto.InternalMessageInfo + +func (m *DeleteReferenceImageRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +// Request message for the `AddProductToProductSet` method. +type AddProductToProductSetRequest struct { + // The resource name for the ProductSet to modify. + // + // Format is: + // `projects/PROJECT_ID/locations/LOC_ID/productSets/PRODUCT_SET_ID` + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // The resource name for the Product to be added to this ProductSet. + // + // Format is: + // `projects/PROJECT_ID/locations/LOC_ID/products/PRODUCT_ID` + Product string `protobuf:"bytes,2,opt,name=product,proto3" json:"product,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *AddProductToProductSetRequest) Reset() { *m = AddProductToProductSetRequest{} } +func (m *AddProductToProductSetRequest) String() string { return proto.CompactTextString(m) } +func (*AddProductToProductSetRequest) ProtoMessage() {} +func (*AddProductToProductSetRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_product_search_service_ad9db519f21bfe92, []int{20} +} +func (m *AddProductToProductSetRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_AddProductToProductSetRequest.Unmarshal(m, b) +} +func (m *AddProductToProductSetRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_AddProductToProductSetRequest.Marshal(b, m, deterministic) +} +func (dst *AddProductToProductSetRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_AddProductToProductSetRequest.Merge(dst, src) +} +func (m *AddProductToProductSetRequest) XXX_Size() int { + return xxx_messageInfo_AddProductToProductSetRequest.Size(m) +} +func (m *AddProductToProductSetRequest) XXX_DiscardUnknown() { + xxx_messageInfo_AddProductToProductSetRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_AddProductToProductSetRequest proto.InternalMessageInfo + +func (m *AddProductToProductSetRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *AddProductToProductSetRequest) GetProduct() string { + if m != nil { + return m.Product + } + return "" +} + +// Request message for the `RemoveProductFromProductSet` method. +type RemoveProductFromProductSetRequest struct { + // The resource name for the ProductSet to modify. + // + // Format is: + // `projects/PROJECT_ID/locations/LOC_ID/productSets/PRODUCT_SET_ID` + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // The resource name for the Product to be removed from this ProductSet. + // + // Format is: + // `projects/PROJECT_ID/locations/LOC_ID/products/PRODUCT_ID` + Product string `protobuf:"bytes,2,opt,name=product,proto3" json:"product,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *RemoveProductFromProductSetRequest) Reset() { *m = RemoveProductFromProductSetRequest{} } +func (m *RemoveProductFromProductSetRequest) String() string { return proto.CompactTextString(m) } +func (*RemoveProductFromProductSetRequest) ProtoMessage() {} +func (*RemoveProductFromProductSetRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_product_search_service_ad9db519f21bfe92, []int{21} +} +func (m *RemoveProductFromProductSetRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_RemoveProductFromProductSetRequest.Unmarshal(m, b) +} +func (m *RemoveProductFromProductSetRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_RemoveProductFromProductSetRequest.Marshal(b, m, deterministic) +} +func (dst *RemoveProductFromProductSetRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_RemoveProductFromProductSetRequest.Merge(dst, src) +} +func (m *RemoveProductFromProductSetRequest) XXX_Size() int { + return xxx_messageInfo_RemoveProductFromProductSetRequest.Size(m) +} +func (m *RemoveProductFromProductSetRequest) XXX_DiscardUnknown() { + xxx_messageInfo_RemoveProductFromProductSetRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_RemoveProductFromProductSetRequest proto.InternalMessageInfo + +func (m *RemoveProductFromProductSetRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *RemoveProductFromProductSetRequest) GetProduct() string { + if m != nil { + return m.Product + } + return "" +} + +// Request message for the `ListProductsInProductSet` method. +type ListProductsInProductSetRequest struct { + // The ProductSet resource for which to retrieve Products. + // + // Format is: + // `projects/PROJECT_ID/locations/LOC_ID/productSets/PRODUCT_SET_ID` + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // The maximum number of items to return. Default 10, maximum 100. + PageSize int32 `protobuf:"varint,2,opt,name=page_size,json=pageSize,proto3" json:"page_size,omitempty"` + // The next_page_token returned from a previous List request, if any. + PageToken string `protobuf:"bytes,3,opt,name=page_token,json=pageToken,proto3" json:"page_token,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ListProductsInProductSetRequest) Reset() { *m = ListProductsInProductSetRequest{} } +func (m *ListProductsInProductSetRequest) String() string { return proto.CompactTextString(m) } +func (*ListProductsInProductSetRequest) ProtoMessage() {} +func (*ListProductsInProductSetRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_product_search_service_ad9db519f21bfe92, []int{22} +} +func (m *ListProductsInProductSetRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ListProductsInProductSetRequest.Unmarshal(m, b) +} +func (m *ListProductsInProductSetRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ListProductsInProductSetRequest.Marshal(b, m, deterministic) +} +func (dst *ListProductsInProductSetRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_ListProductsInProductSetRequest.Merge(dst, src) +} +func (m *ListProductsInProductSetRequest) XXX_Size() int { + return xxx_messageInfo_ListProductsInProductSetRequest.Size(m) +} +func (m *ListProductsInProductSetRequest) XXX_DiscardUnknown() { + xxx_messageInfo_ListProductsInProductSetRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_ListProductsInProductSetRequest proto.InternalMessageInfo + +func (m *ListProductsInProductSetRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *ListProductsInProductSetRequest) GetPageSize() int32 { + if m != nil { + return m.PageSize + } + return 0 +} + +func (m *ListProductsInProductSetRequest) GetPageToken() string { + if m != nil { + return m.PageToken + } + return "" +} + +// Response message for the `ListProductsInProductSet` method. +type ListProductsInProductSetResponse struct { + // The list of Products. + Products []*Product `protobuf:"bytes,1,rep,name=products,proto3" json:"products,omitempty"` + // Token to retrieve the next page of results, or empty if there are no more + // results in the list. + NextPageToken string `protobuf:"bytes,2,opt,name=next_page_token,json=nextPageToken,proto3" json:"next_page_token,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ListProductsInProductSetResponse) Reset() { *m = ListProductsInProductSetResponse{} } +func (m *ListProductsInProductSetResponse) String() string { return proto.CompactTextString(m) } +func (*ListProductsInProductSetResponse) ProtoMessage() {} +func (*ListProductsInProductSetResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_product_search_service_ad9db519f21bfe92, []int{23} +} +func (m *ListProductsInProductSetResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ListProductsInProductSetResponse.Unmarshal(m, b) +} +func (m *ListProductsInProductSetResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ListProductsInProductSetResponse.Marshal(b, m, deterministic) +} +func (dst *ListProductsInProductSetResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_ListProductsInProductSetResponse.Merge(dst, src) +} +func (m *ListProductsInProductSetResponse) XXX_Size() int { + return xxx_messageInfo_ListProductsInProductSetResponse.Size(m) +} +func (m *ListProductsInProductSetResponse) XXX_DiscardUnknown() { + xxx_messageInfo_ListProductsInProductSetResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_ListProductsInProductSetResponse proto.InternalMessageInfo + +func (m *ListProductsInProductSetResponse) GetProducts() []*Product { + if m != nil { + return m.Products + } + return nil +} + +func (m *ListProductsInProductSetResponse) GetNextPageToken() string { + if m != nil { + return m.NextPageToken + } + return "" +} + +// The Google Cloud Storage location for a csv file which preserves a list of +// ImportProductSetRequests in each line. +type ImportProductSetsGcsSource struct { + // The Google Cloud Storage URI of the input csv file. + // + // The URI must start with gs:// + // + // The format of the input csv file should be one image per line. + // In each line, there are 6 columns. + // 1. image_uri + // 2, image_id + // 3. product_set_id + // 4. product_id + // 5, product_category + // 6, product_display_name + // 7, labels + // 8. bounding_poly + // + // Columns 1, 3, 4, and 5 are required, other columns are optional. A new + // ProductSet/Product with the same id will be created on the fly + // if the ProductSet/Product specified by product_set_id/product_id does not + // exist. + // + // The image_id field is optional but has to be unique if provided. If it is + // empty, we will automatically assign an unique id to the image. + // + // The product_display_name field is optional. If it is empty, a space (" ") + // is used as the place holder for the product display_name, which can + // be updated later through the realtime API. + // + // If the Product with product_id already exists, the fields + // product_display_name, product_category and labels are ignored. + // + // If a Product doesn't exist and needs to be created on the fly, the + // product_display_name field refers to + // [Product.display_name][google.cloud.vision.v1p3beta1.Product.display_name], + // the product_category field refers to + // [Product.product_category][google.cloud.vision.v1p3beta1.Product.product_category], + // and the labels field refers to [Product.labels][]. + // + // Labels (optional) should be a line containing a list of comma-separated + // key-value pairs, with the format + // "key_1=value_1,key_2=value_2,...,key_n=value_n". + // + // The bounding_poly (optional) field is used to identify one region of + // interest from the image in the same manner as CreateReferenceImage. If no + // bounding_poly is specified, the system will try to detect regions of + // interest automatically. + // + // Note that the pipeline will resize the image if the image resolution is too + // large to process (above 20MP). + // + // Also note that at most one bounding_poly is allowed per line. If the image + // contains multiple regions of interest, the csv should contain one line per + // region of interest. + // + // The bounding_poly column should contain an even number of comma-separated + // numbers, with the format "p1_x,p1_y,p2_x,p2_y,...,pn_x,pn_y". Nonnegative + // integers should be used for absolute bounding polygons, and float values + // in [0, 1] should be used for normalized bounding polygons. + CsvFileUri string `protobuf:"bytes,1,opt,name=csv_file_uri,json=csvFileUri,proto3" json:"csv_file_uri,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ImportProductSetsGcsSource) Reset() { *m = ImportProductSetsGcsSource{} } +func (m *ImportProductSetsGcsSource) String() string { return proto.CompactTextString(m) } +func (*ImportProductSetsGcsSource) ProtoMessage() {} +func (*ImportProductSetsGcsSource) Descriptor() ([]byte, []int) { + return fileDescriptor_product_search_service_ad9db519f21bfe92, []int{24} +} +func (m *ImportProductSetsGcsSource) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ImportProductSetsGcsSource.Unmarshal(m, b) +} +func (m *ImportProductSetsGcsSource) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ImportProductSetsGcsSource.Marshal(b, m, deterministic) +} +func (dst *ImportProductSetsGcsSource) XXX_Merge(src proto.Message) { + xxx_messageInfo_ImportProductSetsGcsSource.Merge(dst, src) +} +func (m *ImportProductSetsGcsSource) XXX_Size() int { + return xxx_messageInfo_ImportProductSetsGcsSource.Size(m) +} +func (m *ImportProductSetsGcsSource) XXX_DiscardUnknown() { + xxx_messageInfo_ImportProductSetsGcsSource.DiscardUnknown(m) +} + +var xxx_messageInfo_ImportProductSetsGcsSource proto.InternalMessageInfo + +func (m *ImportProductSetsGcsSource) GetCsvFileUri() string { + if m != nil { + return m.CsvFileUri + } + return "" +} + +// The input content for the `ImportProductSets` method. +type ImportProductSetsInputConfig struct { + // The source of the input. + // + // Types that are valid to be assigned to Source: + // *ImportProductSetsInputConfig_GcsSource + Source isImportProductSetsInputConfig_Source `protobuf_oneof:"source"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ImportProductSetsInputConfig) Reset() { *m = ImportProductSetsInputConfig{} } +func (m *ImportProductSetsInputConfig) String() string { return proto.CompactTextString(m) } +func (*ImportProductSetsInputConfig) ProtoMessage() {} +func (*ImportProductSetsInputConfig) Descriptor() ([]byte, []int) { + return fileDescriptor_product_search_service_ad9db519f21bfe92, []int{25} +} +func (m *ImportProductSetsInputConfig) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ImportProductSetsInputConfig.Unmarshal(m, b) +} +func (m *ImportProductSetsInputConfig) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ImportProductSetsInputConfig.Marshal(b, m, deterministic) +} +func (dst *ImportProductSetsInputConfig) XXX_Merge(src proto.Message) { + xxx_messageInfo_ImportProductSetsInputConfig.Merge(dst, src) +} +func (m *ImportProductSetsInputConfig) XXX_Size() int { + return xxx_messageInfo_ImportProductSetsInputConfig.Size(m) +} +func (m *ImportProductSetsInputConfig) XXX_DiscardUnknown() { + xxx_messageInfo_ImportProductSetsInputConfig.DiscardUnknown(m) +} + +var xxx_messageInfo_ImportProductSetsInputConfig proto.InternalMessageInfo + +type isImportProductSetsInputConfig_Source interface { + isImportProductSetsInputConfig_Source() +} + +type ImportProductSetsInputConfig_GcsSource struct { + GcsSource *ImportProductSetsGcsSource `protobuf:"bytes,1,opt,name=gcs_source,json=gcsSource,proto3,oneof"` +} + +func (*ImportProductSetsInputConfig_GcsSource) isImportProductSetsInputConfig_Source() {} + +func (m *ImportProductSetsInputConfig) GetSource() isImportProductSetsInputConfig_Source { + if m != nil { + return m.Source + } + return nil +} + +func (m *ImportProductSetsInputConfig) GetGcsSource() *ImportProductSetsGcsSource { + if x, ok := m.GetSource().(*ImportProductSetsInputConfig_GcsSource); ok { + return x.GcsSource + } + return nil +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*ImportProductSetsInputConfig) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _ImportProductSetsInputConfig_OneofMarshaler, _ImportProductSetsInputConfig_OneofUnmarshaler, _ImportProductSetsInputConfig_OneofSizer, []interface{}{ + (*ImportProductSetsInputConfig_GcsSource)(nil), + } +} + +func _ImportProductSetsInputConfig_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*ImportProductSetsInputConfig) + // source + switch x := m.Source.(type) { + case *ImportProductSetsInputConfig_GcsSource: + b.EncodeVarint(1<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.GcsSource); err != nil { + return err + } + case nil: + default: + return fmt.Errorf("ImportProductSetsInputConfig.Source has unexpected type %T", x) + } + return nil +} + +func _ImportProductSetsInputConfig_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*ImportProductSetsInputConfig) + switch tag { + case 1: // source.gcs_source + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(ImportProductSetsGcsSource) + err := b.DecodeMessage(msg) + m.Source = &ImportProductSetsInputConfig_GcsSource{msg} + return true, err + default: + return false, nil + } +} + +func _ImportProductSetsInputConfig_OneofSizer(msg proto.Message) (n int) { + m := msg.(*ImportProductSetsInputConfig) + // source + switch x := m.Source.(type) { + case *ImportProductSetsInputConfig_GcsSource: + s := proto.Size(x.GcsSource) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +// Request message for the `ImportProductSets` method. +type ImportProductSetsRequest struct { + // The project in which the ProductSets should be imported. + // + // Format is `projects/PROJECT_ID/locations/LOC_ID`. + Parent string `protobuf:"bytes,1,opt,name=parent,proto3" json:"parent,omitempty"` + // The input content for the list of requests. + InputConfig *ImportProductSetsInputConfig `protobuf:"bytes,2,opt,name=input_config,json=inputConfig,proto3" json:"input_config,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ImportProductSetsRequest) Reset() { *m = ImportProductSetsRequest{} } +func (m *ImportProductSetsRequest) String() string { return proto.CompactTextString(m) } +func (*ImportProductSetsRequest) ProtoMessage() {} +func (*ImportProductSetsRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_product_search_service_ad9db519f21bfe92, []int{26} +} +func (m *ImportProductSetsRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ImportProductSetsRequest.Unmarshal(m, b) +} +func (m *ImportProductSetsRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ImportProductSetsRequest.Marshal(b, m, deterministic) +} +func (dst *ImportProductSetsRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_ImportProductSetsRequest.Merge(dst, src) +} +func (m *ImportProductSetsRequest) XXX_Size() int { + return xxx_messageInfo_ImportProductSetsRequest.Size(m) +} +func (m *ImportProductSetsRequest) XXX_DiscardUnknown() { + xxx_messageInfo_ImportProductSetsRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_ImportProductSetsRequest proto.InternalMessageInfo + +func (m *ImportProductSetsRequest) GetParent() string { + if m != nil { + return m.Parent + } + return "" +} + +func (m *ImportProductSetsRequest) GetInputConfig() *ImportProductSetsInputConfig { + if m != nil { + return m.InputConfig + } + return nil +} + +// Response message for the `ImportProductSets` method. +// +// This message is returned by the +// [google.longrunning.Operations.GetOperation][google.longrunning.Operations.GetOperation] +// method in the returned +// [google.longrunning.Operation.response][google.longrunning.Operation.response] +// field. +type ImportProductSetsResponse struct { + // The list of reference_images that are imported successfully. + ReferenceImages []*ReferenceImage `protobuf:"bytes,1,rep,name=reference_images,json=referenceImages,proto3" json:"reference_images,omitempty"` + // The rpc status for each ImportProductSet request, including both successes + // and errors. + // + // The number of statuses here matches the number of lines in the csv file, + // and statuses[i] stores the success or failure status of processing the i-th + // line of the csv, starting from line 0. + Statuses []*status.Status `protobuf:"bytes,2,rep,name=statuses,proto3" json:"statuses,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ImportProductSetsResponse) Reset() { *m = ImportProductSetsResponse{} } +func (m *ImportProductSetsResponse) String() string { return proto.CompactTextString(m) } +func (*ImportProductSetsResponse) ProtoMessage() {} +func (*ImportProductSetsResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_product_search_service_ad9db519f21bfe92, []int{27} +} +func (m *ImportProductSetsResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ImportProductSetsResponse.Unmarshal(m, b) +} +func (m *ImportProductSetsResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ImportProductSetsResponse.Marshal(b, m, deterministic) +} +func (dst *ImportProductSetsResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_ImportProductSetsResponse.Merge(dst, src) +} +func (m *ImportProductSetsResponse) XXX_Size() int { + return xxx_messageInfo_ImportProductSetsResponse.Size(m) +} +func (m *ImportProductSetsResponse) XXX_DiscardUnknown() { + xxx_messageInfo_ImportProductSetsResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_ImportProductSetsResponse proto.InternalMessageInfo + +func (m *ImportProductSetsResponse) GetReferenceImages() []*ReferenceImage { + if m != nil { + return m.ReferenceImages + } + return nil +} + +func (m *ImportProductSetsResponse) GetStatuses() []*status.Status { + if m != nil { + return m.Statuses + } + return nil +} + +// Metadata for the batch operations such as the current state. +// +// This is included in the `metadata` field of the `Operation` returned by the +// `GetOperation` call of the `google::longrunning::Operations` service. +type BatchOperationMetadata struct { + // The current state of the batch operation. + State BatchOperationMetadata_State `protobuf:"varint,1,opt,name=state,proto3,enum=google.cloud.vision.v1p3beta1.BatchOperationMetadata_State" json:"state,omitempty"` + // The time when the batch request was submitted to the server. + SubmitTime *timestamp.Timestamp `protobuf:"bytes,2,opt,name=submit_time,json=submitTime,proto3" json:"submit_time,omitempty"` + // The time when the batch request is finished and + // [google.longrunning.Operation.done][google.longrunning.Operation.done] is + // set to true. + EndTime *timestamp.Timestamp `protobuf:"bytes,3,opt,name=end_time,json=endTime,proto3" json:"end_time,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *BatchOperationMetadata) Reset() { *m = BatchOperationMetadata{} } +func (m *BatchOperationMetadata) String() string { return proto.CompactTextString(m) } +func (*BatchOperationMetadata) ProtoMessage() {} +func (*BatchOperationMetadata) Descriptor() ([]byte, []int) { + return fileDescriptor_product_search_service_ad9db519f21bfe92, []int{28} +} +func (m *BatchOperationMetadata) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_BatchOperationMetadata.Unmarshal(m, b) +} +func (m *BatchOperationMetadata) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_BatchOperationMetadata.Marshal(b, m, deterministic) +} +func (dst *BatchOperationMetadata) XXX_Merge(src proto.Message) { + xxx_messageInfo_BatchOperationMetadata.Merge(dst, src) +} +func (m *BatchOperationMetadata) XXX_Size() int { + return xxx_messageInfo_BatchOperationMetadata.Size(m) +} +func (m *BatchOperationMetadata) XXX_DiscardUnknown() { + xxx_messageInfo_BatchOperationMetadata.DiscardUnknown(m) +} + +var xxx_messageInfo_BatchOperationMetadata proto.InternalMessageInfo + +func (m *BatchOperationMetadata) GetState() BatchOperationMetadata_State { + if m != nil { + return m.State + } + return BatchOperationMetadata_STATE_UNSPECIFIED +} + +func (m *BatchOperationMetadata) GetSubmitTime() *timestamp.Timestamp { + if m != nil { + return m.SubmitTime + } + return nil +} + +func (m *BatchOperationMetadata) GetEndTime() *timestamp.Timestamp { + if m != nil { + return m.EndTime + } + return nil +} + +func init() { + proto.RegisterType((*Product)(nil), "google.cloud.vision.v1p3beta1.Product") + proto.RegisterType((*Product_KeyValue)(nil), "google.cloud.vision.v1p3beta1.Product.KeyValue") + proto.RegisterType((*ProductSet)(nil), "google.cloud.vision.v1p3beta1.ProductSet") + proto.RegisterType((*ReferenceImage)(nil), "google.cloud.vision.v1p3beta1.ReferenceImage") + proto.RegisterType((*CreateProductRequest)(nil), "google.cloud.vision.v1p3beta1.CreateProductRequest") + proto.RegisterType((*ListProductsRequest)(nil), "google.cloud.vision.v1p3beta1.ListProductsRequest") + proto.RegisterType((*ListProductsResponse)(nil), "google.cloud.vision.v1p3beta1.ListProductsResponse") + proto.RegisterType((*GetProductRequest)(nil), "google.cloud.vision.v1p3beta1.GetProductRequest") + proto.RegisterType((*UpdateProductRequest)(nil), "google.cloud.vision.v1p3beta1.UpdateProductRequest") + proto.RegisterType((*DeleteProductRequest)(nil), "google.cloud.vision.v1p3beta1.DeleteProductRequest") + proto.RegisterType((*CreateProductSetRequest)(nil), "google.cloud.vision.v1p3beta1.CreateProductSetRequest") + proto.RegisterType((*ListProductSetsRequest)(nil), "google.cloud.vision.v1p3beta1.ListProductSetsRequest") + proto.RegisterType((*ListProductSetsResponse)(nil), "google.cloud.vision.v1p3beta1.ListProductSetsResponse") + proto.RegisterType((*GetProductSetRequest)(nil), "google.cloud.vision.v1p3beta1.GetProductSetRequest") + proto.RegisterType((*UpdateProductSetRequest)(nil), "google.cloud.vision.v1p3beta1.UpdateProductSetRequest") + proto.RegisterType((*DeleteProductSetRequest)(nil), "google.cloud.vision.v1p3beta1.DeleteProductSetRequest") + proto.RegisterType((*CreateReferenceImageRequest)(nil), "google.cloud.vision.v1p3beta1.CreateReferenceImageRequest") + proto.RegisterType((*ListReferenceImagesRequest)(nil), "google.cloud.vision.v1p3beta1.ListReferenceImagesRequest") + proto.RegisterType((*ListReferenceImagesResponse)(nil), "google.cloud.vision.v1p3beta1.ListReferenceImagesResponse") + proto.RegisterType((*GetReferenceImageRequest)(nil), "google.cloud.vision.v1p3beta1.GetReferenceImageRequest") + proto.RegisterType((*DeleteReferenceImageRequest)(nil), "google.cloud.vision.v1p3beta1.DeleteReferenceImageRequest") + proto.RegisterType((*AddProductToProductSetRequest)(nil), "google.cloud.vision.v1p3beta1.AddProductToProductSetRequest") + proto.RegisterType((*RemoveProductFromProductSetRequest)(nil), "google.cloud.vision.v1p3beta1.RemoveProductFromProductSetRequest") + proto.RegisterType((*ListProductsInProductSetRequest)(nil), "google.cloud.vision.v1p3beta1.ListProductsInProductSetRequest") + proto.RegisterType((*ListProductsInProductSetResponse)(nil), "google.cloud.vision.v1p3beta1.ListProductsInProductSetResponse") + proto.RegisterType((*ImportProductSetsGcsSource)(nil), "google.cloud.vision.v1p3beta1.ImportProductSetsGcsSource") + proto.RegisterType((*ImportProductSetsInputConfig)(nil), "google.cloud.vision.v1p3beta1.ImportProductSetsInputConfig") + proto.RegisterType((*ImportProductSetsRequest)(nil), "google.cloud.vision.v1p3beta1.ImportProductSetsRequest") + proto.RegisterType((*ImportProductSetsResponse)(nil), "google.cloud.vision.v1p3beta1.ImportProductSetsResponse") + proto.RegisterType((*BatchOperationMetadata)(nil), "google.cloud.vision.v1p3beta1.BatchOperationMetadata") + proto.RegisterEnum("google.cloud.vision.v1p3beta1.BatchOperationMetadata_State", BatchOperationMetadata_State_name, BatchOperationMetadata_State_value) +} + +// Reference imports to suppress errors if they are not otherwise used. +var _ context.Context +var _ grpc.ClientConn + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the grpc package it is being compiled against. +const _ = grpc.SupportPackageIsVersion4 + +// ProductSearchClient is the client API for ProductSearch service. +// +// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://godoc.org/google.golang.org/grpc#ClientConn.NewStream. +type ProductSearchClient interface { + // Creates and returns a new ProductSet resource. + // + // Possible errors: + // + // * Returns INVALID_ARGUMENT if display_name is missing, or is longer than + // 4096 characters. + CreateProductSet(ctx context.Context, in *CreateProductSetRequest, opts ...grpc.CallOption) (*ProductSet, error) + // Lists ProductSets in an unspecified order. + // + // Possible errors: + // + // * Returns INVALID_ARGUMENT if page_size is greater than 100, or less + // than 1. + ListProductSets(ctx context.Context, in *ListProductSetsRequest, opts ...grpc.CallOption) (*ListProductSetsResponse, error) + // Gets information associated with a ProductSet. + // + // Possible errors: + // + // * Returns NOT_FOUND if the ProductSet does not exist. + GetProductSet(ctx context.Context, in *GetProductSetRequest, opts ...grpc.CallOption) (*ProductSet, error) + // Makes changes to a ProductSet resource. + // Only display_name can be updated currently. + // + // Possible errors: + // + // * Returns NOT_FOUND if the ProductSet does not exist. + // * Returns INVALID_ARGUMENT if display_name is present in update_mask but + // missing from the request or longer than 4096 characters. + UpdateProductSet(ctx context.Context, in *UpdateProductSetRequest, opts ...grpc.CallOption) (*ProductSet, error) + // Permanently deletes a ProductSet. All Products and ReferenceImages in the + // ProductSet will be deleted. + // + // The actual image files are not deleted from Google Cloud Storage. + // + // Possible errors: + // + // * Returns NOT_FOUND if the ProductSet does not exist. + DeleteProductSet(ctx context.Context, in *DeleteProductSetRequest, opts ...grpc.CallOption) (*empty.Empty, error) + // Creates and returns a new product resource. + // + // Possible errors: + // + // * Returns INVALID_ARGUMENT if display_name is missing or longer than 4096 + // characters. + // * Returns INVALID_ARGUMENT if description is longer than 4096 characters. + // * Returns INVALID_ARGUMENT if product_category is missing or invalid. + CreateProduct(ctx context.Context, in *CreateProductRequest, opts ...grpc.CallOption) (*Product, error) + // Lists products in an unspecified order. + // + // Possible errors: + // + // * Returns INVALID_ARGUMENT if page_size is greater than 100 or less than 1. + ListProducts(ctx context.Context, in *ListProductsRequest, opts ...grpc.CallOption) (*ListProductsResponse, error) + // Gets information associated with a Product. + // + // Possible errors: + // + // * Returns NOT_FOUND if the Product does not exist. + GetProduct(ctx context.Context, in *GetProductRequest, opts ...grpc.CallOption) (*Product, error) + // Makes changes to a Product resource. + // Only display_name, description and labels can be updated right now. + // + // If labels are updated, the change will not be reflected in queries until + // the next index time. + // + // Possible errors: + // + // * Returns NOT_FOUND if the Product does not exist. + // * Returns INVALID_ARGUMENT if display_name is present in update_mask but is + // missing from the request or longer than 4096 characters. + // * Returns INVALID_ARGUMENT if description is present in update_mask but is + // longer than 4096 characters. + // * Returns INVALID_ARGUMENT if product_category is present in update_mask. + UpdateProduct(ctx context.Context, in *UpdateProductRequest, opts ...grpc.CallOption) (*Product, error) + // Permanently deletes a product and its reference images. + // + // Metadata of the product and all its images will be deleted right away, but + // search queries against ProductSets containing the product may still work + // until all related caches are refreshed. + // + // Possible errors: + // + // * Returns NOT_FOUND if the product does not exist. + DeleteProduct(ctx context.Context, in *DeleteProductRequest, opts ...grpc.CallOption) (*empty.Empty, error) + // Creates and returns a new ReferenceImage resource. + // + // The `bounding_poly` field is optional. If `bounding_poly` is not specified, + // the system will try to detect regions of interest in the image that are + // compatible with the product_category on the parent product. If it is + // specified, detection is ALWAYS skipped. The system converts polygons into + // non-rotated rectangles. + // + // Note that the pipeline will resize the image if the image resolution is too + // large to process (above 50MP). + // + // Possible errors: + // + // * Returns INVALID_ARGUMENT if the image_uri is missing or longer than 4096 + // characters. + // * Returns INVALID_ARGUMENT if the product does not exist. + // * Returns INVALID_ARGUMENT if bounding_poly is not provided, and nothing + // compatible with the parent product's product_category is detected. + // * Returns INVALID_ARGUMENT if bounding_poly contains more than 10 polygons. + CreateReferenceImage(ctx context.Context, in *CreateReferenceImageRequest, opts ...grpc.CallOption) (*ReferenceImage, error) + // Permanently deletes a reference image. + // + // The image metadata will be deleted right away, but search queries + // against ProductSets containing the image may still work until all related + // caches are refreshed. + // + // The actual image files are not deleted from Google Cloud Storage. + // + // Possible errors: + // + // * Returns NOT_FOUND if the reference image does not exist. + DeleteReferenceImage(ctx context.Context, in *DeleteReferenceImageRequest, opts ...grpc.CallOption) (*empty.Empty, error) + // Lists reference images. + // + // Possible errors: + // + // * Returns NOT_FOUND if the parent product does not exist. + // * Returns INVALID_ARGUMENT if the page_size is greater than 100, or less + // than 1. + ListReferenceImages(ctx context.Context, in *ListReferenceImagesRequest, opts ...grpc.CallOption) (*ListReferenceImagesResponse, error) + // Gets information associated with a ReferenceImage. + // + // Possible errors: + // + // * Returns NOT_FOUND if the specified image does not exist. + GetReferenceImage(ctx context.Context, in *GetReferenceImageRequest, opts ...grpc.CallOption) (*ReferenceImage, error) + // Adds a Product to the specified ProductSet. If the Product is already + // present, no change is made. + // + // One Product can be added to at most 100 ProductSets. + // + // Possible errors: + // + // * Returns NOT_FOUND if the Product or the ProductSet doesn't exist. + AddProductToProductSet(ctx context.Context, in *AddProductToProductSetRequest, opts ...grpc.CallOption) (*empty.Empty, error) + // Removes a Product from the specified ProductSet. + // + // Possible errors: + // + // * Returns NOT_FOUND If the Product is not found under the ProductSet. + RemoveProductFromProductSet(ctx context.Context, in *RemoveProductFromProductSetRequest, opts ...grpc.CallOption) (*empty.Empty, error) + // Lists the Products in a ProductSet, in an unspecified order. If the + // ProductSet does not exist, the products field of the response will be + // empty. + // + // Possible errors: + // + // * Returns INVALID_ARGUMENT if page_size is greater than 100 or less than 1. + ListProductsInProductSet(ctx context.Context, in *ListProductsInProductSetRequest, opts ...grpc.CallOption) (*ListProductsInProductSetResponse, error) + // Asynchronous API that imports a list of reference images to specified + // product sets based on a list of image information. + // + // The [google.longrunning.Operation][google.longrunning.Operation] API can be + // used to keep track of the progress and results of the request. + // `Operation.metadata` contains `BatchOperationMetadata`. (progress) + // `Operation.response` contains `ImportProductSetsResponse`. (results) + // + // The input source of this method is a csv file on Google Cloud Storage. + // For the format of the csv file please see + // [ImportProductSetsGcsSource.csv_file_uri][google.cloud.vision.v1p3beta1.ImportProductSetsGcsSource.csv_file_uri]. + ImportProductSets(ctx context.Context, in *ImportProductSetsRequest, opts ...grpc.CallOption) (*longrunning.Operation, error) +} + +type productSearchClient struct { + cc *grpc.ClientConn +} + +func NewProductSearchClient(cc *grpc.ClientConn) ProductSearchClient { + return &productSearchClient{cc} +} + +func (c *productSearchClient) CreateProductSet(ctx context.Context, in *CreateProductSetRequest, opts ...grpc.CallOption) (*ProductSet, error) { + out := new(ProductSet) + err := c.cc.Invoke(ctx, "/google.cloud.vision.v1p3beta1.ProductSearch/CreateProductSet", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *productSearchClient) ListProductSets(ctx context.Context, in *ListProductSetsRequest, opts ...grpc.CallOption) (*ListProductSetsResponse, error) { + out := new(ListProductSetsResponse) + err := c.cc.Invoke(ctx, "/google.cloud.vision.v1p3beta1.ProductSearch/ListProductSets", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *productSearchClient) GetProductSet(ctx context.Context, in *GetProductSetRequest, opts ...grpc.CallOption) (*ProductSet, error) { + out := new(ProductSet) + err := c.cc.Invoke(ctx, "/google.cloud.vision.v1p3beta1.ProductSearch/GetProductSet", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *productSearchClient) UpdateProductSet(ctx context.Context, in *UpdateProductSetRequest, opts ...grpc.CallOption) (*ProductSet, error) { + out := new(ProductSet) + err := c.cc.Invoke(ctx, "/google.cloud.vision.v1p3beta1.ProductSearch/UpdateProductSet", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *productSearchClient) DeleteProductSet(ctx context.Context, in *DeleteProductSetRequest, opts ...grpc.CallOption) (*empty.Empty, error) { + out := new(empty.Empty) + err := c.cc.Invoke(ctx, "/google.cloud.vision.v1p3beta1.ProductSearch/DeleteProductSet", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *productSearchClient) CreateProduct(ctx context.Context, in *CreateProductRequest, opts ...grpc.CallOption) (*Product, error) { + out := new(Product) + err := c.cc.Invoke(ctx, "/google.cloud.vision.v1p3beta1.ProductSearch/CreateProduct", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *productSearchClient) ListProducts(ctx context.Context, in *ListProductsRequest, opts ...grpc.CallOption) (*ListProductsResponse, error) { + out := new(ListProductsResponse) + err := c.cc.Invoke(ctx, "/google.cloud.vision.v1p3beta1.ProductSearch/ListProducts", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *productSearchClient) GetProduct(ctx context.Context, in *GetProductRequest, opts ...grpc.CallOption) (*Product, error) { + out := new(Product) + err := c.cc.Invoke(ctx, "/google.cloud.vision.v1p3beta1.ProductSearch/GetProduct", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *productSearchClient) UpdateProduct(ctx context.Context, in *UpdateProductRequest, opts ...grpc.CallOption) (*Product, error) { + out := new(Product) + err := c.cc.Invoke(ctx, "/google.cloud.vision.v1p3beta1.ProductSearch/UpdateProduct", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *productSearchClient) DeleteProduct(ctx context.Context, in *DeleteProductRequest, opts ...grpc.CallOption) (*empty.Empty, error) { + out := new(empty.Empty) + err := c.cc.Invoke(ctx, "/google.cloud.vision.v1p3beta1.ProductSearch/DeleteProduct", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *productSearchClient) CreateReferenceImage(ctx context.Context, in *CreateReferenceImageRequest, opts ...grpc.CallOption) (*ReferenceImage, error) { + out := new(ReferenceImage) + err := c.cc.Invoke(ctx, "/google.cloud.vision.v1p3beta1.ProductSearch/CreateReferenceImage", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *productSearchClient) DeleteReferenceImage(ctx context.Context, in *DeleteReferenceImageRequest, opts ...grpc.CallOption) (*empty.Empty, error) { + out := new(empty.Empty) + err := c.cc.Invoke(ctx, "/google.cloud.vision.v1p3beta1.ProductSearch/DeleteReferenceImage", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *productSearchClient) ListReferenceImages(ctx context.Context, in *ListReferenceImagesRequest, opts ...grpc.CallOption) (*ListReferenceImagesResponse, error) { + out := new(ListReferenceImagesResponse) + err := c.cc.Invoke(ctx, "/google.cloud.vision.v1p3beta1.ProductSearch/ListReferenceImages", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *productSearchClient) GetReferenceImage(ctx context.Context, in *GetReferenceImageRequest, opts ...grpc.CallOption) (*ReferenceImage, error) { + out := new(ReferenceImage) + err := c.cc.Invoke(ctx, "/google.cloud.vision.v1p3beta1.ProductSearch/GetReferenceImage", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *productSearchClient) AddProductToProductSet(ctx context.Context, in *AddProductToProductSetRequest, opts ...grpc.CallOption) (*empty.Empty, error) { + out := new(empty.Empty) + err := c.cc.Invoke(ctx, "/google.cloud.vision.v1p3beta1.ProductSearch/AddProductToProductSet", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *productSearchClient) RemoveProductFromProductSet(ctx context.Context, in *RemoveProductFromProductSetRequest, opts ...grpc.CallOption) (*empty.Empty, error) { + out := new(empty.Empty) + err := c.cc.Invoke(ctx, "/google.cloud.vision.v1p3beta1.ProductSearch/RemoveProductFromProductSet", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *productSearchClient) ListProductsInProductSet(ctx context.Context, in *ListProductsInProductSetRequest, opts ...grpc.CallOption) (*ListProductsInProductSetResponse, error) { + out := new(ListProductsInProductSetResponse) + err := c.cc.Invoke(ctx, "/google.cloud.vision.v1p3beta1.ProductSearch/ListProductsInProductSet", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *productSearchClient) ImportProductSets(ctx context.Context, in *ImportProductSetsRequest, opts ...grpc.CallOption) (*longrunning.Operation, error) { + out := new(longrunning.Operation) + err := c.cc.Invoke(ctx, "/google.cloud.vision.v1p3beta1.ProductSearch/ImportProductSets", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +// ProductSearchServer is the server API for ProductSearch service. +type ProductSearchServer interface { + // Creates and returns a new ProductSet resource. + // + // Possible errors: + // + // * Returns INVALID_ARGUMENT if display_name is missing, or is longer than + // 4096 characters. + CreateProductSet(context.Context, *CreateProductSetRequest) (*ProductSet, error) + // Lists ProductSets in an unspecified order. + // + // Possible errors: + // + // * Returns INVALID_ARGUMENT if page_size is greater than 100, or less + // than 1. + ListProductSets(context.Context, *ListProductSetsRequest) (*ListProductSetsResponse, error) + // Gets information associated with a ProductSet. + // + // Possible errors: + // + // * Returns NOT_FOUND if the ProductSet does not exist. + GetProductSet(context.Context, *GetProductSetRequest) (*ProductSet, error) + // Makes changes to a ProductSet resource. + // Only display_name can be updated currently. + // + // Possible errors: + // + // * Returns NOT_FOUND if the ProductSet does not exist. + // * Returns INVALID_ARGUMENT if display_name is present in update_mask but + // missing from the request or longer than 4096 characters. + UpdateProductSet(context.Context, *UpdateProductSetRequest) (*ProductSet, error) + // Permanently deletes a ProductSet. All Products and ReferenceImages in the + // ProductSet will be deleted. + // + // The actual image files are not deleted from Google Cloud Storage. + // + // Possible errors: + // + // * Returns NOT_FOUND if the ProductSet does not exist. + DeleteProductSet(context.Context, *DeleteProductSetRequest) (*empty.Empty, error) + // Creates and returns a new product resource. + // + // Possible errors: + // + // * Returns INVALID_ARGUMENT if display_name is missing or longer than 4096 + // characters. + // * Returns INVALID_ARGUMENT if description is longer than 4096 characters. + // * Returns INVALID_ARGUMENT if product_category is missing or invalid. + CreateProduct(context.Context, *CreateProductRequest) (*Product, error) + // Lists products in an unspecified order. + // + // Possible errors: + // + // * Returns INVALID_ARGUMENT if page_size is greater than 100 or less than 1. + ListProducts(context.Context, *ListProductsRequest) (*ListProductsResponse, error) + // Gets information associated with a Product. + // + // Possible errors: + // + // * Returns NOT_FOUND if the Product does not exist. + GetProduct(context.Context, *GetProductRequest) (*Product, error) + // Makes changes to a Product resource. + // Only display_name, description and labels can be updated right now. + // + // If labels are updated, the change will not be reflected in queries until + // the next index time. + // + // Possible errors: + // + // * Returns NOT_FOUND if the Product does not exist. + // * Returns INVALID_ARGUMENT if display_name is present in update_mask but is + // missing from the request or longer than 4096 characters. + // * Returns INVALID_ARGUMENT if description is present in update_mask but is + // longer than 4096 characters. + // * Returns INVALID_ARGUMENT if product_category is present in update_mask. + UpdateProduct(context.Context, *UpdateProductRequest) (*Product, error) + // Permanently deletes a product and its reference images. + // + // Metadata of the product and all its images will be deleted right away, but + // search queries against ProductSets containing the product may still work + // until all related caches are refreshed. + // + // Possible errors: + // + // * Returns NOT_FOUND if the product does not exist. + DeleteProduct(context.Context, *DeleteProductRequest) (*empty.Empty, error) + // Creates and returns a new ReferenceImage resource. + // + // The `bounding_poly` field is optional. If `bounding_poly` is not specified, + // the system will try to detect regions of interest in the image that are + // compatible with the product_category on the parent product. If it is + // specified, detection is ALWAYS skipped. The system converts polygons into + // non-rotated rectangles. + // + // Note that the pipeline will resize the image if the image resolution is too + // large to process (above 50MP). + // + // Possible errors: + // + // * Returns INVALID_ARGUMENT if the image_uri is missing or longer than 4096 + // characters. + // * Returns INVALID_ARGUMENT if the product does not exist. + // * Returns INVALID_ARGUMENT if bounding_poly is not provided, and nothing + // compatible with the parent product's product_category is detected. + // * Returns INVALID_ARGUMENT if bounding_poly contains more than 10 polygons. + CreateReferenceImage(context.Context, *CreateReferenceImageRequest) (*ReferenceImage, error) + // Permanently deletes a reference image. + // + // The image metadata will be deleted right away, but search queries + // against ProductSets containing the image may still work until all related + // caches are refreshed. + // + // The actual image files are not deleted from Google Cloud Storage. + // + // Possible errors: + // + // * Returns NOT_FOUND if the reference image does not exist. + DeleteReferenceImage(context.Context, *DeleteReferenceImageRequest) (*empty.Empty, error) + // Lists reference images. + // + // Possible errors: + // + // * Returns NOT_FOUND if the parent product does not exist. + // * Returns INVALID_ARGUMENT if the page_size is greater than 100, or less + // than 1. + ListReferenceImages(context.Context, *ListReferenceImagesRequest) (*ListReferenceImagesResponse, error) + // Gets information associated with a ReferenceImage. + // + // Possible errors: + // + // * Returns NOT_FOUND if the specified image does not exist. + GetReferenceImage(context.Context, *GetReferenceImageRequest) (*ReferenceImage, error) + // Adds a Product to the specified ProductSet. If the Product is already + // present, no change is made. + // + // One Product can be added to at most 100 ProductSets. + // + // Possible errors: + // + // * Returns NOT_FOUND if the Product or the ProductSet doesn't exist. + AddProductToProductSet(context.Context, *AddProductToProductSetRequest) (*empty.Empty, error) + // Removes a Product from the specified ProductSet. + // + // Possible errors: + // + // * Returns NOT_FOUND If the Product is not found under the ProductSet. + RemoveProductFromProductSet(context.Context, *RemoveProductFromProductSetRequest) (*empty.Empty, error) + // Lists the Products in a ProductSet, in an unspecified order. If the + // ProductSet does not exist, the products field of the response will be + // empty. + // + // Possible errors: + // + // * Returns INVALID_ARGUMENT if page_size is greater than 100 or less than 1. + ListProductsInProductSet(context.Context, *ListProductsInProductSetRequest) (*ListProductsInProductSetResponse, error) + // Asynchronous API that imports a list of reference images to specified + // product sets based on a list of image information. + // + // The [google.longrunning.Operation][google.longrunning.Operation] API can be + // used to keep track of the progress and results of the request. + // `Operation.metadata` contains `BatchOperationMetadata`. (progress) + // `Operation.response` contains `ImportProductSetsResponse`. (results) + // + // The input source of this method is a csv file on Google Cloud Storage. + // For the format of the csv file please see + // [ImportProductSetsGcsSource.csv_file_uri][google.cloud.vision.v1p3beta1.ImportProductSetsGcsSource.csv_file_uri]. + ImportProductSets(context.Context, *ImportProductSetsRequest) (*longrunning.Operation, error) +} + +func RegisterProductSearchServer(s *grpc.Server, srv ProductSearchServer) { + s.RegisterService(&_ProductSearch_serviceDesc, srv) +} + +func _ProductSearch_CreateProductSet_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(CreateProductSetRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(ProductSearchServer).CreateProductSet(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.vision.v1p3beta1.ProductSearch/CreateProductSet", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(ProductSearchServer).CreateProductSet(ctx, req.(*CreateProductSetRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _ProductSearch_ListProductSets_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(ListProductSetsRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(ProductSearchServer).ListProductSets(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.vision.v1p3beta1.ProductSearch/ListProductSets", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(ProductSearchServer).ListProductSets(ctx, req.(*ListProductSetsRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _ProductSearch_GetProductSet_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(GetProductSetRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(ProductSearchServer).GetProductSet(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.vision.v1p3beta1.ProductSearch/GetProductSet", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(ProductSearchServer).GetProductSet(ctx, req.(*GetProductSetRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _ProductSearch_UpdateProductSet_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(UpdateProductSetRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(ProductSearchServer).UpdateProductSet(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.vision.v1p3beta1.ProductSearch/UpdateProductSet", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(ProductSearchServer).UpdateProductSet(ctx, req.(*UpdateProductSetRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _ProductSearch_DeleteProductSet_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(DeleteProductSetRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(ProductSearchServer).DeleteProductSet(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.vision.v1p3beta1.ProductSearch/DeleteProductSet", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(ProductSearchServer).DeleteProductSet(ctx, req.(*DeleteProductSetRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _ProductSearch_CreateProduct_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(CreateProductRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(ProductSearchServer).CreateProduct(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.vision.v1p3beta1.ProductSearch/CreateProduct", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(ProductSearchServer).CreateProduct(ctx, req.(*CreateProductRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _ProductSearch_ListProducts_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(ListProductsRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(ProductSearchServer).ListProducts(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.vision.v1p3beta1.ProductSearch/ListProducts", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(ProductSearchServer).ListProducts(ctx, req.(*ListProductsRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _ProductSearch_GetProduct_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(GetProductRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(ProductSearchServer).GetProduct(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.vision.v1p3beta1.ProductSearch/GetProduct", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(ProductSearchServer).GetProduct(ctx, req.(*GetProductRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _ProductSearch_UpdateProduct_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(UpdateProductRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(ProductSearchServer).UpdateProduct(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.vision.v1p3beta1.ProductSearch/UpdateProduct", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(ProductSearchServer).UpdateProduct(ctx, req.(*UpdateProductRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _ProductSearch_DeleteProduct_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(DeleteProductRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(ProductSearchServer).DeleteProduct(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.vision.v1p3beta1.ProductSearch/DeleteProduct", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(ProductSearchServer).DeleteProduct(ctx, req.(*DeleteProductRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _ProductSearch_CreateReferenceImage_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(CreateReferenceImageRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(ProductSearchServer).CreateReferenceImage(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.vision.v1p3beta1.ProductSearch/CreateReferenceImage", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(ProductSearchServer).CreateReferenceImage(ctx, req.(*CreateReferenceImageRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _ProductSearch_DeleteReferenceImage_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(DeleteReferenceImageRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(ProductSearchServer).DeleteReferenceImage(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.vision.v1p3beta1.ProductSearch/DeleteReferenceImage", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(ProductSearchServer).DeleteReferenceImage(ctx, req.(*DeleteReferenceImageRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _ProductSearch_ListReferenceImages_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(ListReferenceImagesRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(ProductSearchServer).ListReferenceImages(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.vision.v1p3beta1.ProductSearch/ListReferenceImages", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(ProductSearchServer).ListReferenceImages(ctx, req.(*ListReferenceImagesRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _ProductSearch_GetReferenceImage_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(GetReferenceImageRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(ProductSearchServer).GetReferenceImage(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.vision.v1p3beta1.ProductSearch/GetReferenceImage", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(ProductSearchServer).GetReferenceImage(ctx, req.(*GetReferenceImageRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _ProductSearch_AddProductToProductSet_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(AddProductToProductSetRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(ProductSearchServer).AddProductToProductSet(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.vision.v1p3beta1.ProductSearch/AddProductToProductSet", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(ProductSearchServer).AddProductToProductSet(ctx, req.(*AddProductToProductSetRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _ProductSearch_RemoveProductFromProductSet_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(RemoveProductFromProductSetRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(ProductSearchServer).RemoveProductFromProductSet(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.vision.v1p3beta1.ProductSearch/RemoveProductFromProductSet", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(ProductSearchServer).RemoveProductFromProductSet(ctx, req.(*RemoveProductFromProductSetRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _ProductSearch_ListProductsInProductSet_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(ListProductsInProductSetRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(ProductSearchServer).ListProductsInProductSet(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.vision.v1p3beta1.ProductSearch/ListProductsInProductSet", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(ProductSearchServer).ListProductsInProductSet(ctx, req.(*ListProductsInProductSetRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _ProductSearch_ImportProductSets_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(ImportProductSetsRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(ProductSearchServer).ImportProductSets(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.vision.v1p3beta1.ProductSearch/ImportProductSets", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(ProductSearchServer).ImportProductSets(ctx, req.(*ImportProductSetsRequest)) + } + return interceptor(ctx, in, info, handler) +} + +var _ProductSearch_serviceDesc = grpc.ServiceDesc{ + ServiceName: "google.cloud.vision.v1p3beta1.ProductSearch", + HandlerType: (*ProductSearchServer)(nil), + Methods: []grpc.MethodDesc{ + { + MethodName: "CreateProductSet", + Handler: _ProductSearch_CreateProductSet_Handler, + }, + { + MethodName: "ListProductSets", + Handler: _ProductSearch_ListProductSets_Handler, + }, + { + MethodName: "GetProductSet", + Handler: _ProductSearch_GetProductSet_Handler, + }, + { + MethodName: "UpdateProductSet", + Handler: _ProductSearch_UpdateProductSet_Handler, + }, + { + MethodName: "DeleteProductSet", + Handler: _ProductSearch_DeleteProductSet_Handler, + }, + { + MethodName: "CreateProduct", + Handler: _ProductSearch_CreateProduct_Handler, + }, + { + MethodName: "ListProducts", + Handler: _ProductSearch_ListProducts_Handler, + }, + { + MethodName: "GetProduct", + Handler: _ProductSearch_GetProduct_Handler, + }, + { + MethodName: "UpdateProduct", + Handler: _ProductSearch_UpdateProduct_Handler, + }, + { + MethodName: "DeleteProduct", + Handler: _ProductSearch_DeleteProduct_Handler, + }, + { + MethodName: "CreateReferenceImage", + Handler: _ProductSearch_CreateReferenceImage_Handler, + }, + { + MethodName: "DeleteReferenceImage", + Handler: _ProductSearch_DeleteReferenceImage_Handler, + }, + { + MethodName: "ListReferenceImages", + Handler: _ProductSearch_ListReferenceImages_Handler, + }, + { + MethodName: "GetReferenceImage", + Handler: _ProductSearch_GetReferenceImage_Handler, + }, + { + MethodName: "AddProductToProductSet", + Handler: _ProductSearch_AddProductToProductSet_Handler, + }, + { + MethodName: "RemoveProductFromProductSet", + Handler: _ProductSearch_RemoveProductFromProductSet_Handler, + }, + { + MethodName: "ListProductsInProductSet", + Handler: _ProductSearch_ListProductsInProductSet_Handler, + }, + { + MethodName: "ImportProductSets", + Handler: _ProductSearch_ImportProductSets_Handler, + }, + }, + Streams: []grpc.StreamDesc{}, + Metadata: "google/cloud/vision/v1p3beta1/product_search_service.proto", +} + +func init() { + proto.RegisterFile("google/cloud/vision/v1p3beta1/product_search_service.proto", fileDescriptor_product_search_service_ad9db519f21bfe92) +} + +var fileDescriptor_product_search_service_ad9db519f21bfe92 = []byte{ + // 1867 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xc4, 0x59, 0xcf, 0x6f, 0xe3, 0xc6, + 0x15, 0xce, 0xd8, 0xeb, 0x5d, 0xfb, 0xc9, 0xb2, 0xb5, 0x53, 0xd7, 0x56, 0xe4, 0x2c, 0xe2, 0xb0, + 0x45, 0x9a, 0xa8, 0x89, 0xd8, 0x95, 0xb1, 0xdb, 0xae, 0xdc, 0x6c, 0x6a, 0xcb, 0xb2, 0xa3, 0xc6, + 0xbb, 0xeb, 0x50, 0xf6, 0x36, 0xcd, 0x21, 0x02, 0x4d, 0x8e, 0x19, 0x76, 0x29, 0x92, 0xe1, 0x50, + 0x46, 0x94, 0x22, 0x97, 0xa0, 0x87, 0xa2, 0x87, 0x06, 0x68, 0xd1, 0x43, 0x4e, 0x05, 0x12, 0xf4, + 0x5c, 0xa0, 0x3f, 0x0e, 0x4d, 0x73, 0x28, 0x7a, 0x6b, 0x0f, 0xbd, 0xf4, 0xd8, 0x53, 0x81, 0xfc, + 0x11, 0x3d, 0x16, 0x33, 0x1c, 0x4a, 0x24, 0x45, 0x99, 0x94, 0x36, 0x8b, 0xdc, 0x34, 0xc3, 0xf7, + 0xde, 0x7c, 0xef, 0xbd, 0x6f, 0x66, 0xde, 0x1b, 0x41, 0xc3, 0x70, 0x1c, 0xc3, 0x22, 0xb2, 0x66, + 0x39, 0x7d, 0x5d, 0xbe, 0x30, 0xa9, 0xe9, 0xd8, 0xf2, 0xc5, 0x4d, 0x77, 0xfb, 0x8c, 0xf8, 0xea, + 0x4d, 0xd9, 0xf5, 0x1c, 0xbd, 0xaf, 0xf9, 0x5d, 0x4a, 0x54, 0x4f, 0x7b, 0xa7, 0x4b, 0x89, 0x77, + 0x61, 0x6a, 0xa4, 0xe6, 0x7a, 0x8e, 0xef, 0xe0, 0x1b, 0x81, 0x6e, 0x8d, 0xeb, 0xd6, 0x02, 0xdd, + 0xda, 0x50, 0xb7, 0xf2, 0x8c, 0x30, 0xad, 0xba, 0xa6, 0xac, 0xda, 0xb6, 0xe3, 0xab, 0xbe, 0xe9, + 0xd8, 0x34, 0x50, 0xae, 0xbc, 0x74, 0xf9, 0xc2, 0x06, 0x71, 0x7a, 0xc4, 0xf7, 0x06, 0x42, 0xfa, + 0x1b, 0x42, 0xda, 0x72, 0x6c, 0xc3, 0xeb, 0xdb, 0xb6, 0x69, 0x1b, 0xb2, 0xe3, 0x12, 0x2f, 0x66, + 0x72, 0x53, 0x08, 0xf1, 0xd1, 0x59, 0xff, 0x5c, 0x26, 0x3d, 0xd7, 0x0f, 0x2d, 0x6c, 0x25, 0x3f, + 0x9e, 0x9b, 0xc4, 0xd2, 0xbb, 0x3d, 0x95, 0x3e, 0x12, 0x12, 0xcf, 0x26, 0x25, 0x7c, 0xb3, 0x47, + 0xa8, 0xaf, 0xf6, 0x5c, 0x21, 0xb0, 0x21, 0x04, 0x3c, 0x57, 0x93, 0xa9, 0xaf, 0xfa, 0x7d, 0xb1, + 0xb0, 0xf4, 0xf1, 0x1c, 0x5c, 0x3b, 0x0e, 0x22, 0x85, 0x31, 0x5c, 0xb1, 0xd5, 0x1e, 0x29, 0xa3, + 0x2d, 0xf4, 0xc2, 0x92, 0xc2, 0x7f, 0xe3, 0xe7, 0x60, 0x59, 0x37, 0xa9, 0x6b, 0xa9, 0x83, 0x2e, + 0xff, 0x36, 0xc7, 0xbf, 0x15, 0xc4, 0xdc, 0x7d, 0x26, 0xb2, 0x05, 0x05, 0x9d, 0x50, 0xcd, 0x33, + 0x5d, 0xe6, 0x51, 0x79, 0x5e, 0x48, 0x8c, 0xa6, 0xf0, 0x8b, 0x50, 0x0a, 0xb3, 0xa1, 0xa9, 0x3e, + 0x31, 0x1c, 0x6f, 0x50, 0xbe, 0xc2, 0xc5, 0x56, 0xc5, 0x7c, 0x53, 0x4c, 0xe3, 0x87, 0xb0, 0x12, + 0x8a, 0x5a, 0xea, 0x19, 0xb1, 0x68, 0x79, 0x61, 0x6b, 0xfe, 0x85, 0x42, 0x5d, 0xae, 0x5d, 0x9a, + 0xb1, 0x9a, 0xf0, 0xa1, 0xf6, 0x3a, 0x19, 0x3c, 0x54, 0xad, 0x3e, 0x51, 0x8a, 0xc2, 0xcc, 0x11, + 0xb7, 0x52, 0xa9, 0xc3, 0x62, 0xf8, 0x09, 0x97, 0x60, 0xfe, 0x11, 0x19, 0x08, 0x37, 0xd9, 0x4f, + 0xbc, 0x06, 0x0b, 0x17, 0xec, 0x93, 0x70, 0x2f, 0x18, 0x48, 0x7f, 0x44, 0x00, 0xc2, 0x6e, 0x87, + 0xcc, 0x1c, 0x9e, 0x3b, 0x00, 0xa6, 0xad, 0x93, 0xf7, 0xba, 0x2c, 0x27, 0x3c, 0x3a, 0x85, 0x7a, + 0x25, 0xf4, 0x26, 0x4c, 0x58, 0xed, 0x24, 0x4c, 0x98, 0xb2, 0xc4, 0xa5, 0xd9, 0x18, 0x6f, 0x43, + 0x21, 0x50, 0x25, 0x9e, 0xe7, 0x78, 0x3c, 0x64, 0x85, 0x3a, 0x0e, 0x75, 0x3d, 0x57, 0xab, 0x75, + 0x78, 0x2e, 0x95, 0x60, 0x85, 0x16, 0x93, 0x92, 0x7e, 0x81, 0x60, 0x45, 0x21, 0xe7, 0xc4, 0x23, + 0xb6, 0x46, 0xda, 0x3d, 0xd5, 0x20, 0xa9, 0xc8, 0x4b, 0x30, 0xdf, 0xf7, 0x4c, 0x01, 0x98, 0xfd, + 0xc4, 0x0a, 0xac, 0x9c, 0x39, 0x7d, 0x5b, 0x37, 0x6d, 0xa3, 0xeb, 0x3a, 0xd6, 0x80, 0x96, 0xe7, + 0x79, 0xe8, 0xbf, 0x9d, 0x11, 0xfa, 0x3d, 0xa1, 0x74, 0xec, 0x58, 0x03, 0xa5, 0x78, 0x16, 0x19, + 0x51, 0xe9, 0x23, 0x04, 0x6b, 0x4d, 0x8f, 0xa8, 0x3e, 0x11, 0x81, 0x54, 0xc8, 0xbb, 0x7d, 0x42, + 0x7d, 0xbc, 0x0e, 0x57, 0x5d, 0xd5, 0x23, 0xb6, 0x2f, 0x40, 0x89, 0x11, 0xfe, 0x01, 0x5c, 0x13, + 0x89, 0xe3, 0xd0, 0x0a, 0xf5, 0xe7, 0xf3, 0x25, 0x5e, 0x09, 0xd5, 0xf0, 0x0d, 0x80, 0x90, 0x41, + 0xa6, 0x2e, 0xd8, 0xb8, 0x24, 0x66, 0xda, 0xba, 0x64, 0xc2, 0xd7, 0x8e, 0x4c, 0xea, 0x0b, 0x35, + 0x9a, 0x85, 0x67, 0x13, 0x96, 0x5c, 0xd5, 0x20, 0x5d, 0x6a, 0xbe, 0x1f, 0x64, 0x77, 0x41, 0x59, + 0x64, 0x13, 0x1d, 0xf3, 0x7d, 0xc2, 0x97, 0x62, 0x1f, 0x7d, 0xe7, 0x11, 0xb1, 0x87, 0x4b, 0xa9, + 0x06, 0x39, 0x61, 0x13, 0xd2, 0x87, 0x08, 0xd6, 0xe2, 0x6b, 0x51, 0xd7, 0xb1, 0x29, 0xc1, 0x7b, + 0xb0, 0x28, 0x00, 0xd1, 0x32, 0xe2, 0x31, 0xce, 0xeb, 0xe5, 0x50, 0x0f, 0x3f, 0x0f, 0xab, 0x36, + 0x79, 0xcf, 0xef, 0x46, 0x00, 0x04, 0xb9, 0x2c, 0xb2, 0xe9, 0xe3, 0x21, 0x88, 0x6f, 0xc1, 0xf5, + 0x43, 0xe2, 0x27, 0xa2, 0x9f, 0x42, 0x08, 0xe9, 0x37, 0x08, 0xd6, 0x4e, 0x5d, 0x7d, 0x3c, 0x55, + 0x91, 0x94, 0xa0, 0xd9, 0x52, 0xb2, 0x03, 0x85, 0x3e, 0xb7, 0xcc, 0xcf, 0x2c, 0x91, 0xd8, 0xf1, + 0x3d, 0x70, 0xc0, 0x8e, 0xb5, 0x7b, 0x2a, 0x7d, 0xa4, 0x40, 0x20, 0xce, 0x7e, 0x4b, 0x55, 0x58, + 0xdb, 0x27, 0x16, 0x19, 0x83, 0x95, 0xe6, 0xc3, 0xa7, 0x08, 0x36, 0x62, 0x74, 0xeb, 0x90, 0x4c, + 0xc6, 0xfd, 0x10, 0x0a, 0xa3, 0xab, 0x22, 0x64, 0xdd, 0x8b, 0xf9, 0x5c, 0x64, 0xe6, 0x43, 0xb6, + 0xb1, 0x23, 0xe2, 0x9b, 0xa3, 0xd3, 0x8b, 0x92, 0x08, 0xff, 0x96, 0x47, 0x32, 0x6d, 0x5d, 0xb2, + 0x60, 0x3d, 0x42, 0x8b, 0x0e, 0x79, 0xb2, 0x2c, 0xfc, 0x08, 0xc1, 0xc6, 0xd8, 0x72, 0x82, 0x88, + 0x47, 0xb0, 0x1c, 0xc1, 0x1b, 0x92, 0x71, 0x0a, 0xe7, 0x0b, 0x23, 0xc7, 0xf2, 0x53, 0xb2, 0x0a, + 0x6b, 0x23, 0x4a, 0x46, 0x32, 0x94, 0x96, 0xd1, 0x4f, 0x10, 0x6c, 0xc4, 0x58, 0x19, 0x91, 0x4f, + 0x64, 0x0e, 0x3d, 0x4e, 0xe6, 0x1e, 0x8b, 0xa2, 0x2f, 0xc3, 0x46, 0x8c, 0xa2, 0x19, 0x3e, 0xfd, + 0x15, 0xc1, 0x66, 0xc0, 0xd2, 0xf8, 0x39, 0x9d, 0xc5, 0x82, 0x87, 0xb0, 0xea, 0x85, 0x0a, 0x5d, + 0x93, 0x69, 0x08, 0x9c, 0x2f, 0x67, 0xf8, 0x9c, 0x58, 0x66, 0xc5, 0x8b, 0x5f, 0x0f, 0x2f, 0x01, + 0x4e, 0xd8, 0x1d, 0x31, 0xb7, 0x14, 0x97, 0x6d, 0xeb, 0x92, 0x0b, 0x15, 0x46, 0xa7, 0xb8, 0xcd, + 0x27, 0xca, 0xe0, 0xcf, 0x11, 0x6c, 0xa6, 0x2e, 0x29, 0x58, 0xfc, 0x26, 0x94, 0x12, 0xf8, 0x43, + 0x26, 0x4f, 0x19, 0x98, 0xd5, 0xb8, 0xb3, 0xf4, 0x72, 0xd4, 0x29, 0x74, 0x9f, 0x4f, 0xa3, 0x7b, + 0x0d, 0xca, 0x87, 0xc4, 0x4f, 0x4f, 0x75, 0x1a, 0x3d, 0x6e, 0xc2, 0x66, 0xc0, 0xa6, 0xfc, 0x2a, + 0xf7, 0xe0, 0xc6, 0xae, 0xae, 0x0b, 0xf6, 0x9d, 0x38, 0xb9, 0x68, 0x88, 0xcb, 0xf1, 0xab, 0x76, + 0x69, 0x78, 0x5e, 0x4b, 0x0a, 0x48, 0x0a, 0xe9, 0x39, 0x17, 0x21, 0x9f, 0x0f, 0x3c, 0xa7, 0xf7, + 0xb8, 0x36, 0xdf, 0x85, 0x67, 0xa3, 0x77, 0x61, 0xdb, 0xce, 0x67, 0xf0, 0x71, 0x78, 0xf3, 0x4b, + 0x04, 0x5b, 0x93, 0xd7, 0xfc, 0x0a, 0xee, 0xe2, 0xbb, 0x50, 0x69, 0xf7, 0x5c, 0xc7, 0x8b, 0x9e, + 0xc5, 0x87, 0x1a, 0xed, 0x38, 0x7d, 0x4f, 0x63, 0x75, 0xf4, 0xb2, 0x46, 0x2f, 0xba, 0xe7, 0xa6, + 0x45, 0xba, 0xac, 0x34, 0x0b, 0xc2, 0x00, 0x1a, 0xbd, 0x38, 0x30, 0x2d, 0x72, 0xea, 0x99, 0xd2, + 0xcf, 0x10, 0x3c, 0x33, 0x66, 0xa0, 0x6d, 0xbb, 0x7d, 0xbf, 0xe9, 0xd8, 0xe7, 0xa6, 0x81, 0xdf, + 0x02, 0x30, 0x34, 0xda, 0xa5, 0xdc, 0xa0, 0x38, 0x10, 0xef, 0x64, 0xb8, 0x33, 0x19, 0xd1, 0x6b, + 0x4f, 0x29, 0x4b, 0x46, 0x38, 0xd8, 0x5b, 0x84, 0xab, 0x81, 0x5d, 0xe9, 0x57, 0x08, 0xca, 0x63, + 0x5a, 0x59, 0x07, 0xc0, 0xdb, 0xb0, 0x6c, 0x32, 0xa4, 0x5d, 0x8d, 0x43, 0x15, 0x27, 0xd7, 0xce, + 0xb4, 0xe0, 0x22, 0xde, 0x2a, 0x05, 0x73, 0x34, 0x90, 0x7e, 0x87, 0xe0, 0xe9, 0x14, 0x50, 0x4f, + 0xfc, 0x88, 0xa8, 0xc1, 0x62, 0xd0, 0x50, 0x11, 0x5a, 0x9e, 0xe3, 0x16, 0xd3, 0x0a, 0xf4, 0xa1, + 0x8c, 0xf4, 0xd9, 0x1c, 0xac, 0xef, 0xa9, 0xbe, 0xf6, 0xce, 0x83, 0xb0, 0x07, 0xbc, 0x47, 0x7c, + 0x55, 0x57, 0x7d, 0x15, 0xbf, 0x01, 0x0b, 0x4c, 0x2c, 0x48, 0xdc, 0x4a, 0x66, 0x6c, 0xd2, 0xad, + 0xf0, 0x15, 0x89, 0x12, 0x58, 0x62, 0xd7, 0x1a, 0xed, 0x9f, 0xf5, 0x4c, 0x3f, 0xe8, 0x3e, 0xe6, + 0x32, 0xbb, 0x0f, 0x08, 0xc4, 0x79, 0xfb, 0x71, 0x0b, 0x16, 0x89, 0xad, 0xe7, 0xed, 0x5b, 0xae, + 0x11, 0x5b, 0x67, 0x23, 0xe9, 0xc7, 0xb0, 0xc0, 0x31, 0xe0, 0xaf, 0xc3, 0xf5, 0xce, 0xc9, 0xee, + 0x49, 0xab, 0x7b, 0x7a, 0xbf, 0x73, 0xdc, 0x6a, 0xb6, 0x0f, 0xda, 0xad, 0xfd, 0xd2, 0x53, 0x78, + 0x05, 0xe0, 0x58, 0x79, 0xd0, 0x6c, 0x75, 0x3a, 0xed, 0xfb, 0x87, 0x25, 0xc4, 0xc6, 0x9d, 0xd3, + 0x26, 0x1b, 0x1f, 0x9c, 0x1e, 0x95, 0xe6, 0x30, 0xc0, 0xd5, 0x83, 0xdd, 0xf6, 0x51, 0x6b, 0xbf, + 0x34, 0x8f, 0x8b, 0xb0, 0xd4, 0xdc, 0xbd, 0xdf, 0x6c, 0x1d, 0xb1, 0xe1, 0x95, 0xfa, 0x6f, 0x37, + 0xa1, 0x38, 0x4c, 0x2f, 0x6b, 0xeb, 0xf1, 0xdf, 0x11, 0x94, 0x92, 0x15, 0x1f, 0xbe, 0x9d, 0x11, + 0xb9, 0x09, 0x25, 0x62, 0x25, 0x7f, 0xed, 0x20, 0xbd, 0xfe, 0xe1, 0xbf, 0xbf, 0xf8, 0xf5, 0x5c, + 0x4b, 0xba, 0x1d, 0x69, 0xfc, 0x7f, 0x1a, 0x50, 0xfd, 0x15, 0xd7, 0x73, 0x7e, 0x42, 0x34, 0x9f, + 0xca, 0x55, 0xd9, 0x72, 0xb4, 0xa0, 0xcf, 0x97, 0xab, 0x1f, 0xc8, 0x91, 0x5a, 0xa9, 0x11, 0xad, + 0x5c, 0xf0, 0xdf, 0x10, 0xac, 0x26, 0x4a, 0x34, 0x7c, 0x2b, 0x03, 0x4b, 0x7a, 0x05, 0x59, 0xb9, + 0x3d, 0xad, 0x5a, 0xb0, 0x41, 0xa4, 0xbb, 0xdc, 0x9f, 0xef, 0xe1, 0x19, 0xfd, 0xc1, 0xbf, 0x47, + 0x50, 0x8c, 0x15, 0x75, 0x78, 0x3b, 0x03, 0x49, 0x5a, 0x09, 0x38, 0x4d, 0x06, 0x52, 0x11, 0xb3, + 0x3b, 0x63, 0x02, 0xde, 0x28, 0x5c, 0xb9, 0xfa, 0x01, 0xfe, 0x17, 0x82, 0x52, 0xb2, 0xb2, 0xcc, + 0x64, 0xce, 0x84, 0x52, 0x74, 0x1a, 0xdc, 0x3f, 0xe2, 0xb8, 0xdf, 0xa8, 0xef, 0xc5, 0x22, 0x3d, + 0x62, 0x43, 0x2d, 0xbf, 0x0f, 0x71, 0x16, 0x7d, 0x82, 0xa0, 0x94, 0x2c, 0x43, 0x33, 0x1d, 0x9a, + 0x50, 0xb7, 0x56, 0xd6, 0xc7, 0x76, 0x7a, 0xab, 0xe7, 0xfa, 0x83, 0x30, 0xea, 0xd5, 0x59, 0xa3, + 0xfe, 0x07, 0x04, 0xc5, 0xd8, 0xf6, 0xcb, 0xe4, 0x49, 0xda, 0xf3, 0x41, 0x25, 0xe7, 0x1d, 0x2d, + 0xed, 0x73, 0xb8, 0x77, 0xa5, 0xed, 0xe9, 0x69, 0x4d, 0x1b, 0xc3, 0xfe, 0xf5, 0x4f, 0x08, 0x96, + 0xa3, 0x85, 0x04, 0xae, 0xe7, 0xdf, 0x65, 0xc3, 0x9d, 0xb9, 0x3d, 0x95, 0x8e, 0xd8, 0x96, 0x3b, + 0x1c, 0xff, 0x2d, 0x3c, 0x0b, 0x7e, 0xfc, 0x29, 0x02, 0x18, 0xed, 0x32, 0xfc, 0x9d, 0xdc, 0x1b, + 0x72, 0xda, 0x28, 0xa7, 0xa2, 0xcc, 0x41, 0x0a, 0xce, 0x88, 0xcf, 0x10, 0x14, 0x63, 0xdb, 0x2a, + 0x93, 0x11, 0x69, 0xaf, 0x14, 0xb9, 0xb1, 0x1e, 0x71, 0xac, 0x07, 0xf5, 0x9d, 0x94, 0xed, 0x57, + 0xcb, 0x89, 0x79, 0xc4, 0x8c, 0x8f, 0x11, 0x14, 0x63, 0x5b, 0x28, 0x13, 0x7c, 0xda, 0x5b, 0xc6, + 0xc4, 0xdd, 0x26, 0x02, 0x5b, 0x9d, 0x29, 0xb0, 0xff, 0x1d, 0xbe, 0xbd, 0x25, 0x9e, 0x03, 0x1b, + 0xb9, 0x76, 0x5c, 0x6a, 0xf7, 0x51, 0x99, 0xae, 0x5c, 0x92, 0xde, 0xe6, 0x0e, 0xbc, 0x29, 0xb5, + 0x72, 0xf3, 0x37, 0xea, 0x82, 0x9c, 0x28, 0xb6, 0x1a, 0xc9, 0xfe, 0x17, 0xff, 0x05, 0x85, 0xaf, + 0x43, 0x53, 0xfa, 0x78, 0x49, 0x87, 0x35, 0x31, 0x1b, 0xf7, 0xb8, 0x33, 0x87, 0xd5, 0xd6, 0xf4, + 0xd9, 0x48, 0x7a, 0xc2, 0xf2, 0xf3, 0x1f, 0x14, 0x3c, 0x45, 0x26, 0xda, 0x5a, 0x7c, 0x27, 0xc7, + 0x41, 0x91, 0xde, 0x7d, 0x57, 0x1a, 0xb3, 0xa8, 0x8a, 0xa3, 0x46, 0x78, 0x87, 0xbf, 0x9c, 0x54, + 0xe1, 0x7f, 0x22, 0xfe, 0xf0, 0x98, 0x48, 0xcb, 0x77, 0xb3, 0xcf, 0xa0, 0x2f, 0x85, 0x77, 0xa9, + 0xce, 0xcc, 0x9e, 0xaa, 0xcf, 0x11, 0xac, 0xa7, 0x37, 0xd8, 0xf8, 0xfb, 0x19, 0xc0, 0x2e, 0xed, + 0xcb, 0x27, 0x52, 0x4d, 0x9c, 0x52, 0xd2, 0xee, 0x6c, 0xd7, 0x6c, 0x43, 0x1d, 0xae, 0xda, 0x40, + 0x55, 0xfc, 0x0f, 0x04, 0x9b, 0x97, 0x34, 0xf4, 0x78, 0x37, 0x33, 0xb8, 0x59, 0x8f, 0x01, 0x13, + 0x1d, 0x79, 0xc0, 0x1d, 0x69, 0x4b, 0xfb, 0x33, 0x3a, 0xe2, 0x45, 0x97, 0x66, 0xbe, 0x7c, 0x81, + 0xa0, 0x3c, 0xa9, 0xa9, 0xc7, 0x77, 0xa7, 0xb8, 0x63, 0x53, 0x5e, 0x20, 0x2a, 0xaf, 0xce, 0xac, + 0x2f, 0x36, 0xd1, 0x21, 0x77, 0x77, 0x17, 0xbf, 0x3a, 0x9b, 0xbb, 0xa3, 0xbb, 0xfb, 0xcf, 0x08, + 0xae, 0x8f, 0xb5, 0xb3, 0x99, 0xdb, 0x67, 0x52, 0x57, 0x5e, 0xb9, 0x11, 0x2a, 0x46, 0xfe, 0x85, + 0xac, 0x0d, 0x7b, 0x47, 0xe9, 0x35, 0x0e, 0x7b, 0x4f, 0x7a, 0x65, 0xc6, 0x6e, 0xc6, 0xe4, 0xeb, + 0x36, 0x50, 0x75, 0xef, 0xe7, 0x08, 0x9e, 0xd3, 0x9c, 0xde, 0xe5, 0x38, 0xf7, 0x9e, 0x8e, 0x35, + 0x71, 0x9d, 0xe0, 0xaf, 0xd9, 0x63, 0xc6, 0x9c, 0x63, 0xf4, 0x56, 0x53, 0xe8, 0x1a, 0x8e, 0xa5, + 0xda, 0x46, 0xcd, 0xf1, 0x0c, 0xd9, 0x20, 0x36, 0xe7, 0x95, 0x1c, 0x7c, 0x52, 0x5d, 0x93, 0x4e, + 0xf8, 0xf7, 0x75, 0x27, 0x98, 0xf8, 0x1f, 0x42, 0x67, 0x57, 0xb9, 0xca, 0xf6, 0xff, 0x03, 0x00, + 0x00, 0xff, 0xff, 0x24, 0x8c, 0xbc, 0xfe, 0x28, 0x1e, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/cloud/vision/v1p3beta1/text_annotation.pb.go b/vendor/google.golang.org/genproto/googleapis/cloud/vision/v1p3beta1/text_annotation.pb.go new file mode 100644 index 0000000..db0ad4a --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/cloud/vision/v1p3beta1/text_annotation.pb.go @@ -0,0 +1,798 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/cloud/vision/v1p3beta1/text_annotation.proto + +package vision // import "google.golang.org/genproto/googleapis/cloud/vision/v1p3beta1" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Enum to denote the type of break found. New line, space etc. +type TextAnnotation_DetectedBreak_BreakType int32 + +const ( + // Unknown break label type. + TextAnnotation_DetectedBreak_UNKNOWN TextAnnotation_DetectedBreak_BreakType = 0 + // Regular space. + TextAnnotation_DetectedBreak_SPACE TextAnnotation_DetectedBreak_BreakType = 1 + // Sure space (very wide). + TextAnnotation_DetectedBreak_SURE_SPACE TextAnnotation_DetectedBreak_BreakType = 2 + // Line-wrapping break. + TextAnnotation_DetectedBreak_EOL_SURE_SPACE TextAnnotation_DetectedBreak_BreakType = 3 + // End-line hyphen that is not present in text; does not co-occur with + // `SPACE`, `LEADER_SPACE`, or `LINE_BREAK`. + TextAnnotation_DetectedBreak_HYPHEN TextAnnotation_DetectedBreak_BreakType = 4 + // Line break that ends a paragraph. + TextAnnotation_DetectedBreak_LINE_BREAK TextAnnotation_DetectedBreak_BreakType = 5 +) + +var TextAnnotation_DetectedBreak_BreakType_name = map[int32]string{ + 0: "UNKNOWN", + 1: "SPACE", + 2: "SURE_SPACE", + 3: "EOL_SURE_SPACE", + 4: "HYPHEN", + 5: "LINE_BREAK", +} +var TextAnnotation_DetectedBreak_BreakType_value = map[string]int32{ + "UNKNOWN": 0, + "SPACE": 1, + "SURE_SPACE": 2, + "EOL_SURE_SPACE": 3, + "HYPHEN": 4, + "LINE_BREAK": 5, +} + +func (x TextAnnotation_DetectedBreak_BreakType) String() string { + return proto.EnumName(TextAnnotation_DetectedBreak_BreakType_name, int32(x)) +} +func (TextAnnotation_DetectedBreak_BreakType) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_text_annotation_2b5b30276ebe092b, []int{0, 1, 0} +} + +// Type of a block (text, image etc) as identified by OCR. +type Block_BlockType int32 + +const ( + // Unknown block type. + Block_UNKNOWN Block_BlockType = 0 + // Regular text block. + Block_TEXT Block_BlockType = 1 + // Table block. + Block_TABLE Block_BlockType = 2 + // Image block. + Block_PICTURE Block_BlockType = 3 + // Horizontal/vertical line box. + Block_RULER Block_BlockType = 4 + // Barcode block. + Block_BARCODE Block_BlockType = 5 +) + +var Block_BlockType_name = map[int32]string{ + 0: "UNKNOWN", + 1: "TEXT", + 2: "TABLE", + 3: "PICTURE", + 4: "RULER", + 5: "BARCODE", +} +var Block_BlockType_value = map[string]int32{ + "UNKNOWN": 0, + "TEXT": 1, + "TABLE": 2, + "PICTURE": 3, + "RULER": 4, + "BARCODE": 5, +} + +func (x Block_BlockType) String() string { + return proto.EnumName(Block_BlockType_name, int32(x)) +} +func (Block_BlockType) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_text_annotation_2b5b30276ebe092b, []int{2, 0} +} + +// TextAnnotation contains a structured representation of OCR extracted text. +// The hierarchy of an OCR extracted text structure is like this: +// TextAnnotation -> Page -> Block -> Paragraph -> Word -> Symbol +// Each structural component, starting from Page, may further have their own +// properties. Properties describe detected languages, breaks etc.. Please refer +// to the +// [TextAnnotation.TextProperty][google.cloud.vision.v1p3beta1.TextAnnotation.TextProperty] +// message definition below for more detail. +type TextAnnotation struct { + // List of pages detected by OCR. + Pages []*Page `protobuf:"bytes,1,rep,name=pages,proto3" json:"pages,omitempty"` + // UTF-8 text detected on the pages. + Text string `protobuf:"bytes,2,opt,name=text,proto3" json:"text,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *TextAnnotation) Reset() { *m = TextAnnotation{} } +func (m *TextAnnotation) String() string { return proto.CompactTextString(m) } +func (*TextAnnotation) ProtoMessage() {} +func (*TextAnnotation) Descriptor() ([]byte, []int) { + return fileDescriptor_text_annotation_2b5b30276ebe092b, []int{0} +} +func (m *TextAnnotation) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_TextAnnotation.Unmarshal(m, b) +} +func (m *TextAnnotation) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_TextAnnotation.Marshal(b, m, deterministic) +} +func (dst *TextAnnotation) XXX_Merge(src proto.Message) { + xxx_messageInfo_TextAnnotation.Merge(dst, src) +} +func (m *TextAnnotation) XXX_Size() int { + return xxx_messageInfo_TextAnnotation.Size(m) +} +func (m *TextAnnotation) XXX_DiscardUnknown() { + xxx_messageInfo_TextAnnotation.DiscardUnknown(m) +} + +var xxx_messageInfo_TextAnnotation proto.InternalMessageInfo + +func (m *TextAnnotation) GetPages() []*Page { + if m != nil { + return m.Pages + } + return nil +} + +func (m *TextAnnotation) GetText() string { + if m != nil { + return m.Text + } + return "" +} + +// Detected language for a structural component. +type TextAnnotation_DetectedLanguage struct { + // The BCP-47 language code, such as "en-US" or "sr-Latn". For more + // information, see + // http://www.unicode.org/reports/tr35/#Unicode_locale_identifier. + LanguageCode string `protobuf:"bytes,1,opt,name=language_code,json=languageCode,proto3" json:"language_code,omitempty"` + // Confidence of detected language. Range [0, 1]. + Confidence float32 `protobuf:"fixed32,2,opt,name=confidence,proto3" json:"confidence,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *TextAnnotation_DetectedLanguage) Reset() { *m = TextAnnotation_DetectedLanguage{} } +func (m *TextAnnotation_DetectedLanguage) String() string { return proto.CompactTextString(m) } +func (*TextAnnotation_DetectedLanguage) ProtoMessage() {} +func (*TextAnnotation_DetectedLanguage) Descriptor() ([]byte, []int) { + return fileDescriptor_text_annotation_2b5b30276ebe092b, []int{0, 0} +} +func (m *TextAnnotation_DetectedLanguage) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_TextAnnotation_DetectedLanguage.Unmarshal(m, b) +} +func (m *TextAnnotation_DetectedLanguage) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_TextAnnotation_DetectedLanguage.Marshal(b, m, deterministic) +} +func (dst *TextAnnotation_DetectedLanguage) XXX_Merge(src proto.Message) { + xxx_messageInfo_TextAnnotation_DetectedLanguage.Merge(dst, src) +} +func (m *TextAnnotation_DetectedLanguage) XXX_Size() int { + return xxx_messageInfo_TextAnnotation_DetectedLanguage.Size(m) +} +func (m *TextAnnotation_DetectedLanguage) XXX_DiscardUnknown() { + xxx_messageInfo_TextAnnotation_DetectedLanguage.DiscardUnknown(m) +} + +var xxx_messageInfo_TextAnnotation_DetectedLanguage proto.InternalMessageInfo + +func (m *TextAnnotation_DetectedLanguage) GetLanguageCode() string { + if m != nil { + return m.LanguageCode + } + return "" +} + +func (m *TextAnnotation_DetectedLanguage) GetConfidence() float32 { + if m != nil { + return m.Confidence + } + return 0 +} + +// Detected start or end of a structural component. +type TextAnnotation_DetectedBreak struct { + // Detected break type. + Type TextAnnotation_DetectedBreak_BreakType `protobuf:"varint,1,opt,name=type,proto3,enum=google.cloud.vision.v1p3beta1.TextAnnotation_DetectedBreak_BreakType" json:"type,omitempty"` + // True if break prepends the element. + IsPrefix bool `protobuf:"varint,2,opt,name=is_prefix,json=isPrefix,proto3" json:"is_prefix,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *TextAnnotation_DetectedBreak) Reset() { *m = TextAnnotation_DetectedBreak{} } +func (m *TextAnnotation_DetectedBreak) String() string { return proto.CompactTextString(m) } +func (*TextAnnotation_DetectedBreak) ProtoMessage() {} +func (*TextAnnotation_DetectedBreak) Descriptor() ([]byte, []int) { + return fileDescriptor_text_annotation_2b5b30276ebe092b, []int{0, 1} +} +func (m *TextAnnotation_DetectedBreak) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_TextAnnotation_DetectedBreak.Unmarshal(m, b) +} +func (m *TextAnnotation_DetectedBreak) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_TextAnnotation_DetectedBreak.Marshal(b, m, deterministic) +} +func (dst *TextAnnotation_DetectedBreak) XXX_Merge(src proto.Message) { + xxx_messageInfo_TextAnnotation_DetectedBreak.Merge(dst, src) +} +func (m *TextAnnotation_DetectedBreak) XXX_Size() int { + return xxx_messageInfo_TextAnnotation_DetectedBreak.Size(m) +} +func (m *TextAnnotation_DetectedBreak) XXX_DiscardUnknown() { + xxx_messageInfo_TextAnnotation_DetectedBreak.DiscardUnknown(m) +} + +var xxx_messageInfo_TextAnnotation_DetectedBreak proto.InternalMessageInfo + +func (m *TextAnnotation_DetectedBreak) GetType() TextAnnotation_DetectedBreak_BreakType { + if m != nil { + return m.Type + } + return TextAnnotation_DetectedBreak_UNKNOWN +} + +func (m *TextAnnotation_DetectedBreak) GetIsPrefix() bool { + if m != nil { + return m.IsPrefix + } + return false +} + +// Additional information detected on the structural component. +type TextAnnotation_TextProperty struct { + // A list of detected languages together with confidence. + DetectedLanguages []*TextAnnotation_DetectedLanguage `protobuf:"bytes,1,rep,name=detected_languages,json=detectedLanguages,proto3" json:"detected_languages,omitempty"` + // Detected start or end of a text segment. + DetectedBreak *TextAnnotation_DetectedBreak `protobuf:"bytes,2,opt,name=detected_break,json=detectedBreak,proto3" json:"detected_break,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *TextAnnotation_TextProperty) Reset() { *m = TextAnnotation_TextProperty{} } +func (m *TextAnnotation_TextProperty) String() string { return proto.CompactTextString(m) } +func (*TextAnnotation_TextProperty) ProtoMessage() {} +func (*TextAnnotation_TextProperty) Descriptor() ([]byte, []int) { + return fileDescriptor_text_annotation_2b5b30276ebe092b, []int{0, 2} +} +func (m *TextAnnotation_TextProperty) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_TextAnnotation_TextProperty.Unmarshal(m, b) +} +func (m *TextAnnotation_TextProperty) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_TextAnnotation_TextProperty.Marshal(b, m, deterministic) +} +func (dst *TextAnnotation_TextProperty) XXX_Merge(src proto.Message) { + xxx_messageInfo_TextAnnotation_TextProperty.Merge(dst, src) +} +func (m *TextAnnotation_TextProperty) XXX_Size() int { + return xxx_messageInfo_TextAnnotation_TextProperty.Size(m) +} +func (m *TextAnnotation_TextProperty) XXX_DiscardUnknown() { + xxx_messageInfo_TextAnnotation_TextProperty.DiscardUnknown(m) +} + +var xxx_messageInfo_TextAnnotation_TextProperty proto.InternalMessageInfo + +func (m *TextAnnotation_TextProperty) GetDetectedLanguages() []*TextAnnotation_DetectedLanguage { + if m != nil { + return m.DetectedLanguages + } + return nil +} + +func (m *TextAnnotation_TextProperty) GetDetectedBreak() *TextAnnotation_DetectedBreak { + if m != nil { + return m.DetectedBreak + } + return nil +} + +// Detected page from OCR. +type Page struct { + // Additional information detected on the page. + Property *TextAnnotation_TextProperty `protobuf:"bytes,1,opt,name=property,proto3" json:"property,omitempty"` + // Page width. For PDFs the unit is points. For images (including + // TIFFs) the unit is pixels. + Width int32 `protobuf:"varint,2,opt,name=width,proto3" json:"width,omitempty"` + // Page height. For PDFs the unit is points. For images (including + // TIFFs) the unit is pixels. + Height int32 `protobuf:"varint,3,opt,name=height,proto3" json:"height,omitempty"` + // List of blocks of text, images etc on this page. + Blocks []*Block `protobuf:"bytes,4,rep,name=blocks,proto3" json:"blocks,omitempty"` + // Confidence of the OCR results on the page. Range [0, 1]. + Confidence float32 `protobuf:"fixed32,5,opt,name=confidence,proto3" json:"confidence,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Page) Reset() { *m = Page{} } +func (m *Page) String() string { return proto.CompactTextString(m) } +func (*Page) ProtoMessage() {} +func (*Page) Descriptor() ([]byte, []int) { + return fileDescriptor_text_annotation_2b5b30276ebe092b, []int{1} +} +func (m *Page) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Page.Unmarshal(m, b) +} +func (m *Page) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Page.Marshal(b, m, deterministic) +} +func (dst *Page) XXX_Merge(src proto.Message) { + xxx_messageInfo_Page.Merge(dst, src) +} +func (m *Page) XXX_Size() int { + return xxx_messageInfo_Page.Size(m) +} +func (m *Page) XXX_DiscardUnknown() { + xxx_messageInfo_Page.DiscardUnknown(m) +} + +var xxx_messageInfo_Page proto.InternalMessageInfo + +func (m *Page) GetProperty() *TextAnnotation_TextProperty { + if m != nil { + return m.Property + } + return nil +} + +func (m *Page) GetWidth() int32 { + if m != nil { + return m.Width + } + return 0 +} + +func (m *Page) GetHeight() int32 { + if m != nil { + return m.Height + } + return 0 +} + +func (m *Page) GetBlocks() []*Block { + if m != nil { + return m.Blocks + } + return nil +} + +func (m *Page) GetConfidence() float32 { + if m != nil { + return m.Confidence + } + return 0 +} + +// Logical element on the page. +type Block struct { + // Additional information detected for the block. + Property *TextAnnotation_TextProperty `protobuf:"bytes,1,opt,name=property,proto3" json:"property,omitempty"` + // The bounding box for the block. + // The vertices are in the order of top-left, top-right, bottom-right, + // bottom-left. When a rotation of the bounding box is detected the rotation + // is represented as around the top-left corner as defined when the text is + // read in the 'natural' orientation. + // For example: + // + // * when the text is horizontal it might look like: + // + // 0----1 + // | | + // 3----2 + // + // * when it's rotated 180 degrees around the top-left corner it becomes: + // + // 2----3 + // | | + // 1----0 + // + // and the vertice order will still be (0, 1, 2, 3). + BoundingBox *BoundingPoly `protobuf:"bytes,2,opt,name=bounding_box,json=boundingBox,proto3" json:"bounding_box,omitempty"` + // List of paragraphs in this block (if this blocks is of type text). + Paragraphs []*Paragraph `protobuf:"bytes,3,rep,name=paragraphs,proto3" json:"paragraphs,omitempty"` + // Detected block type (text, image etc) for this block. + BlockType Block_BlockType `protobuf:"varint,4,opt,name=block_type,json=blockType,proto3,enum=google.cloud.vision.v1p3beta1.Block_BlockType" json:"block_type,omitempty"` + // Confidence of the OCR results on the block. Range [0, 1]. + Confidence float32 `protobuf:"fixed32,5,opt,name=confidence,proto3" json:"confidence,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Block) Reset() { *m = Block{} } +func (m *Block) String() string { return proto.CompactTextString(m) } +func (*Block) ProtoMessage() {} +func (*Block) Descriptor() ([]byte, []int) { + return fileDescriptor_text_annotation_2b5b30276ebe092b, []int{2} +} +func (m *Block) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Block.Unmarshal(m, b) +} +func (m *Block) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Block.Marshal(b, m, deterministic) +} +func (dst *Block) XXX_Merge(src proto.Message) { + xxx_messageInfo_Block.Merge(dst, src) +} +func (m *Block) XXX_Size() int { + return xxx_messageInfo_Block.Size(m) +} +func (m *Block) XXX_DiscardUnknown() { + xxx_messageInfo_Block.DiscardUnknown(m) +} + +var xxx_messageInfo_Block proto.InternalMessageInfo + +func (m *Block) GetProperty() *TextAnnotation_TextProperty { + if m != nil { + return m.Property + } + return nil +} + +func (m *Block) GetBoundingBox() *BoundingPoly { + if m != nil { + return m.BoundingBox + } + return nil +} + +func (m *Block) GetParagraphs() []*Paragraph { + if m != nil { + return m.Paragraphs + } + return nil +} + +func (m *Block) GetBlockType() Block_BlockType { + if m != nil { + return m.BlockType + } + return Block_UNKNOWN +} + +func (m *Block) GetConfidence() float32 { + if m != nil { + return m.Confidence + } + return 0 +} + +// Structural unit of text representing a number of words in certain order. +type Paragraph struct { + // Additional information detected for the paragraph. + Property *TextAnnotation_TextProperty `protobuf:"bytes,1,opt,name=property,proto3" json:"property,omitempty"` + // The bounding box for the paragraph. + // The vertices are in the order of top-left, top-right, bottom-right, + // bottom-left. When a rotation of the bounding box is detected the rotation + // is represented as around the top-left corner as defined when the text is + // read in the 'natural' orientation. + // For example: + // * when the text is horizontal it might look like: + // 0----1 + // | | + // 3----2 + // * when it's rotated 180 degrees around the top-left corner it becomes: + // 2----3 + // | | + // 1----0 + // and the vertice order will still be (0, 1, 2, 3). + BoundingBox *BoundingPoly `protobuf:"bytes,2,opt,name=bounding_box,json=boundingBox,proto3" json:"bounding_box,omitempty"` + // List of words in this paragraph. + Words []*Word `protobuf:"bytes,3,rep,name=words,proto3" json:"words,omitempty"` + // Confidence of the OCR results for the paragraph. Range [0, 1]. + Confidence float32 `protobuf:"fixed32,4,opt,name=confidence,proto3" json:"confidence,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Paragraph) Reset() { *m = Paragraph{} } +func (m *Paragraph) String() string { return proto.CompactTextString(m) } +func (*Paragraph) ProtoMessage() {} +func (*Paragraph) Descriptor() ([]byte, []int) { + return fileDescriptor_text_annotation_2b5b30276ebe092b, []int{3} +} +func (m *Paragraph) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Paragraph.Unmarshal(m, b) +} +func (m *Paragraph) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Paragraph.Marshal(b, m, deterministic) +} +func (dst *Paragraph) XXX_Merge(src proto.Message) { + xxx_messageInfo_Paragraph.Merge(dst, src) +} +func (m *Paragraph) XXX_Size() int { + return xxx_messageInfo_Paragraph.Size(m) +} +func (m *Paragraph) XXX_DiscardUnknown() { + xxx_messageInfo_Paragraph.DiscardUnknown(m) +} + +var xxx_messageInfo_Paragraph proto.InternalMessageInfo + +func (m *Paragraph) GetProperty() *TextAnnotation_TextProperty { + if m != nil { + return m.Property + } + return nil +} + +func (m *Paragraph) GetBoundingBox() *BoundingPoly { + if m != nil { + return m.BoundingBox + } + return nil +} + +func (m *Paragraph) GetWords() []*Word { + if m != nil { + return m.Words + } + return nil +} + +func (m *Paragraph) GetConfidence() float32 { + if m != nil { + return m.Confidence + } + return 0 +} + +// A word representation. +type Word struct { + // Additional information detected for the word. + Property *TextAnnotation_TextProperty `protobuf:"bytes,1,opt,name=property,proto3" json:"property,omitempty"` + // The bounding box for the word. + // The vertices are in the order of top-left, top-right, bottom-right, + // bottom-left. When a rotation of the bounding box is detected the rotation + // is represented as around the top-left corner as defined when the text is + // read in the 'natural' orientation. + // For example: + // * when the text is horizontal it might look like: + // 0----1 + // | | + // 3----2 + // * when it's rotated 180 degrees around the top-left corner it becomes: + // 2----3 + // | | + // 1----0 + // and the vertice order will still be (0, 1, 2, 3). + BoundingBox *BoundingPoly `protobuf:"bytes,2,opt,name=bounding_box,json=boundingBox,proto3" json:"bounding_box,omitempty"` + // List of symbols in the word. + // The order of the symbols follows the natural reading order. + Symbols []*Symbol `protobuf:"bytes,3,rep,name=symbols,proto3" json:"symbols,omitempty"` + // Confidence of the OCR results for the word. Range [0, 1]. + Confidence float32 `protobuf:"fixed32,4,opt,name=confidence,proto3" json:"confidence,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Word) Reset() { *m = Word{} } +func (m *Word) String() string { return proto.CompactTextString(m) } +func (*Word) ProtoMessage() {} +func (*Word) Descriptor() ([]byte, []int) { + return fileDescriptor_text_annotation_2b5b30276ebe092b, []int{4} +} +func (m *Word) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Word.Unmarshal(m, b) +} +func (m *Word) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Word.Marshal(b, m, deterministic) +} +func (dst *Word) XXX_Merge(src proto.Message) { + xxx_messageInfo_Word.Merge(dst, src) +} +func (m *Word) XXX_Size() int { + return xxx_messageInfo_Word.Size(m) +} +func (m *Word) XXX_DiscardUnknown() { + xxx_messageInfo_Word.DiscardUnknown(m) +} + +var xxx_messageInfo_Word proto.InternalMessageInfo + +func (m *Word) GetProperty() *TextAnnotation_TextProperty { + if m != nil { + return m.Property + } + return nil +} + +func (m *Word) GetBoundingBox() *BoundingPoly { + if m != nil { + return m.BoundingBox + } + return nil +} + +func (m *Word) GetSymbols() []*Symbol { + if m != nil { + return m.Symbols + } + return nil +} + +func (m *Word) GetConfidence() float32 { + if m != nil { + return m.Confidence + } + return 0 +} + +// A single symbol representation. +type Symbol struct { + // Additional information detected for the symbol. + Property *TextAnnotation_TextProperty `protobuf:"bytes,1,opt,name=property,proto3" json:"property,omitempty"` + // The bounding box for the symbol. + // The vertices are in the order of top-left, top-right, bottom-right, + // bottom-left. When a rotation of the bounding box is detected the rotation + // is represented as around the top-left corner as defined when the text is + // read in the 'natural' orientation. + // For example: + // * when the text is horizontal it might look like: + // 0----1 + // | | + // 3----2 + // * when it's rotated 180 degrees around the top-left corner it becomes: + // 2----3 + // | | + // 1----0 + // and the vertice order will still be (0, 1, 2, 3). + BoundingBox *BoundingPoly `protobuf:"bytes,2,opt,name=bounding_box,json=boundingBox,proto3" json:"bounding_box,omitempty"` + // The actual UTF-8 representation of the symbol. + Text string `protobuf:"bytes,3,opt,name=text,proto3" json:"text,omitempty"` + // Confidence of the OCR results for the symbol. Range [0, 1]. + Confidence float32 `protobuf:"fixed32,4,opt,name=confidence,proto3" json:"confidence,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Symbol) Reset() { *m = Symbol{} } +func (m *Symbol) String() string { return proto.CompactTextString(m) } +func (*Symbol) ProtoMessage() {} +func (*Symbol) Descriptor() ([]byte, []int) { + return fileDescriptor_text_annotation_2b5b30276ebe092b, []int{5} +} +func (m *Symbol) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Symbol.Unmarshal(m, b) +} +func (m *Symbol) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Symbol.Marshal(b, m, deterministic) +} +func (dst *Symbol) XXX_Merge(src proto.Message) { + xxx_messageInfo_Symbol.Merge(dst, src) +} +func (m *Symbol) XXX_Size() int { + return xxx_messageInfo_Symbol.Size(m) +} +func (m *Symbol) XXX_DiscardUnknown() { + xxx_messageInfo_Symbol.DiscardUnknown(m) +} + +var xxx_messageInfo_Symbol proto.InternalMessageInfo + +func (m *Symbol) GetProperty() *TextAnnotation_TextProperty { + if m != nil { + return m.Property + } + return nil +} + +func (m *Symbol) GetBoundingBox() *BoundingPoly { + if m != nil { + return m.BoundingBox + } + return nil +} + +func (m *Symbol) GetText() string { + if m != nil { + return m.Text + } + return "" +} + +func (m *Symbol) GetConfidence() float32 { + if m != nil { + return m.Confidence + } + return 0 +} + +func init() { + proto.RegisterType((*TextAnnotation)(nil), "google.cloud.vision.v1p3beta1.TextAnnotation") + proto.RegisterType((*TextAnnotation_DetectedLanguage)(nil), "google.cloud.vision.v1p3beta1.TextAnnotation.DetectedLanguage") + proto.RegisterType((*TextAnnotation_DetectedBreak)(nil), "google.cloud.vision.v1p3beta1.TextAnnotation.DetectedBreak") + proto.RegisterType((*TextAnnotation_TextProperty)(nil), "google.cloud.vision.v1p3beta1.TextAnnotation.TextProperty") + proto.RegisterType((*Page)(nil), "google.cloud.vision.v1p3beta1.Page") + proto.RegisterType((*Block)(nil), "google.cloud.vision.v1p3beta1.Block") + proto.RegisterType((*Paragraph)(nil), "google.cloud.vision.v1p3beta1.Paragraph") + proto.RegisterType((*Word)(nil), "google.cloud.vision.v1p3beta1.Word") + proto.RegisterType((*Symbol)(nil), "google.cloud.vision.v1p3beta1.Symbol") + proto.RegisterEnum("google.cloud.vision.v1p3beta1.TextAnnotation_DetectedBreak_BreakType", TextAnnotation_DetectedBreak_BreakType_name, TextAnnotation_DetectedBreak_BreakType_value) + proto.RegisterEnum("google.cloud.vision.v1p3beta1.Block_BlockType", Block_BlockType_name, Block_BlockType_value) +} + +func init() { + proto.RegisterFile("google/cloud/vision/v1p3beta1/text_annotation.proto", fileDescriptor_text_annotation_2b5b30276ebe092b) +} + +var fileDescriptor_text_annotation_2b5b30276ebe092b = []byte{ + // 775 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xd4, 0x56, 0x4f, 0x6f, 0xd3, 0x48, + 0x14, 0x5f, 0x27, 0x76, 0x1a, 0xbf, 0xb4, 0x91, 0x77, 0x76, 0xb5, 0x8a, 0xb2, 0xbb, 0xa8, 0xa4, + 0x20, 0x55, 0x02, 0x39, 0x6a, 0x7a, 0x2a, 0x45, 0xa0, 0x38, 0xb5, 0xd4, 0xaa, 0x21, 0xb5, 0xa6, + 0x09, 0xa5, 0x5c, 0x2c, 0xff, 0x99, 0x3a, 0x56, 0x13, 0x8f, 0x65, 0xbb, 0x6d, 0x72, 0xe5, 0x8a, + 0x04, 0x5f, 0x88, 0x2f, 0x83, 0xc4, 0x09, 0xf1, 0x01, 0x38, 0x22, 0x8f, 0xed, 0x34, 0x09, 0xa2, + 0xe6, 0x8f, 0x38, 0xf4, 0x12, 0xcd, 0x7b, 0x79, 0xbf, 0x37, 0xef, 0xf7, 0x7b, 0xf3, 0x3c, 0x03, + 0xdb, 0x0e, 0xa5, 0xce, 0x88, 0x34, 0xad, 0x11, 0xbd, 0xb0, 0x9b, 0x97, 0x6e, 0xe8, 0x52, 0xaf, + 0x79, 0xb9, 0xe5, 0x6f, 0x9b, 0x24, 0x32, 0xb6, 0x9a, 0x11, 0x99, 0x44, 0xba, 0xe1, 0x79, 0x34, + 0x32, 0x22, 0x97, 0x7a, 0xb2, 0x1f, 0xd0, 0x88, 0xa2, 0xff, 0x13, 0x90, 0xcc, 0x40, 0x72, 0x02, + 0x92, 0x67, 0xa0, 0xfa, 0x7f, 0x69, 0x4e, 0xc3, 0x77, 0x9b, 0xd7, 0xd8, 0x30, 0x01, 0xd7, 0x1f, + 0xde, 0xbc, 0xa3, 0x43, 0xe8, 0x98, 0x44, 0xc1, 0x34, 0x89, 0x6e, 0xbc, 0x16, 0xa0, 0xda, 0x27, + 0x93, 0xa8, 0x3d, 0xcb, 0x83, 0x76, 0x40, 0xf0, 0x0d, 0x87, 0x84, 0x35, 0x6e, 0xbd, 0xb8, 0x59, + 0x69, 0x6d, 0xc8, 0x37, 0x56, 0x23, 0x6b, 0x86, 0x43, 0x70, 0x82, 0x40, 0x08, 0xf8, 0x98, 0x51, + 0xad, 0xb0, 0xce, 0x6d, 0x8a, 0x98, 0xad, 0xeb, 0x27, 0x20, 0xed, 0x91, 0x88, 0x58, 0x11, 0xb1, + 0xbb, 0x86, 0xe7, 0x5c, 0x18, 0x0e, 0x41, 0x1b, 0xb0, 0x36, 0x4a, 0xd7, 0xba, 0x45, 0x6d, 0x52, + 0xe3, 0x18, 0x60, 0x35, 0x73, 0x76, 0xa8, 0x4d, 0xd0, 0x1d, 0x00, 0x8b, 0x7a, 0x67, 0xae, 0x4d, + 0x3c, 0x8b, 0xb0, 0x94, 0x05, 0x3c, 0xe7, 0xa9, 0x7f, 0xe2, 0x60, 0x2d, 0xcb, 0xac, 0x04, 0xc4, + 0x38, 0x47, 0xa7, 0xc0, 0x47, 0x53, 0x3f, 0xc9, 0x56, 0x6d, 0xa9, 0x39, 0x85, 0x2f, 0xd2, 0x96, + 0x17, 0x52, 0xc9, 0xec, 0xb7, 0x3f, 0xf5, 0x09, 0x66, 0x29, 0xd1, 0xbf, 0x20, 0xba, 0xa1, 0xee, + 0x07, 0xe4, 0xcc, 0x9d, 0xb0, 0x5a, 0xca, 0xb8, 0xec, 0x86, 0x1a, 0xb3, 0x1b, 0x16, 0x88, 0xb3, + 0x78, 0x54, 0x81, 0x95, 0x41, 0xef, 0xb0, 0x77, 0x74, 0xd2, 0x93, 0xfe, 0x40, 0x22, 0x08, 0xc7, + 0x5a, 0xbb, 0xa3, 0x4a, 0x1c, 0xaa, 0x02, 0x1c, 0x0f, 0xb0, 0xaa, 0x27, 0x76, 0x01, 0x21, 0xa8, + 0xaa, 0x47, 0x5d, 0x7d, 0xce, 0x57, 0x44, 0x00, 0xa5, 0xfd, 0x53, 0x6d, 0x5f, 0xed, 0x49, 0x7c, + 0x1c, 0xdf, 0x3d, 0xe8, 0xa9, 0xba, 0x82, 0xd5, 0xf6, 0xa1, 0x24, 0xd4, 0xdf, 0x73, 0xb0, 0x1a, + 0x97, 0xac, 0x05, 0xd4, 0x27, 0x41, 0x34, 0x45, 0x63, 0x40, 0x76, 0x5a, 0xb3, 0x9e, 0x09, 0x97, + 0x35, 0xed, 0xc9, 0xcf, 0x71, 0xcf, 0x1a, 0x84, 0xff, 0xb4, 0x97, 0x3c, 0x21, 0x32, 0xa1, 0x3a, + 0xdb, 0xce, 0x8c, 0xd9, 0x32, 0x19, 0x2a, 0xad, 0xdd, 0x5f, 0x90, 0x19, 0xaf, 0xd9, 0xf3, 0x66, + 0xe3, 0x23, 0x07, 0x7c, 0x7c, 0x9e, 0xd0, 0x73, 0x28, 0xfb, 0x29, 0x4f, 0xd6, 0xcd, 0x4a, 0xeb, + 0xd1, 0x8f, 0x6d, 0x33, 0xaf, 0x14, 0x9e, 0xe5, 0x42, 0x7f, 0x83, 0x70, 0xe5, 0xda, 0xd1, 0x90, + 0xd5, 0x2e, 0xe0, 0xc4, 0x40, 0xff, 0x40, 0x69, 0x48, 0x5c, 0x67, 0x18, 0xd5, 0x8a, 0xcc, 0x9d, + 0x5a, 0xe8, 0x31, 0x94, 0xcc, 0x11, 0xb5, 0xce, 0xc3, 0x1a, 0xcf, 0x54, 0xbd, 0x97, 0x53, 0x83, + 0x12, 0x07, 0xe3, 0x14, 0xb3, 0x74, 0x7e, 0x85, 0xe5, 0xf3, 0xdb, 0x78, 0x57, 0x04, 0x81, 0x21, + 0x7e, 0x1b, 0xdb, 0x1e, 0xac, 0x9a, 0xf4, 0xc2, 0xb3, 0x5d, 0xcf, 0xd1, 0x4d, 0x3a, 0x49, 0x1b, + 0xf6, 0x20, 0x8f, 0x45, 0x0a, 0xd1, 0xe8, 0x68, 0x8a, 0x2b, 0x59, 0x02, 0x85, 0x4e, 0xd0, 0x3e, + 0x80, 0x6f, 0x04, 0x86, 0x13, 0x18, 0xfe, 0x30, 0xac, 0x15, 0x99, 0x26, 0x9b, 0xb9, 0x9f, 0x87, + 0x14, 0x80, 0xe7, 0xb0, 0xe8, 0x19, 0x00, 0x53, 0x49, 0x67, 0xf3, 0xca, 0xb3, 0x79, 0x95, 0xbf, + 0x47, 0xdd, 0xe4, 0x97, 0x0d, 0xa6, 0x68, 0x66, 0xcb, 0x5c, 0xa9, 0x31, 0x88, 0x33, 0xdc, 0xe2, + 0x80, 0x96, 0x81, 0xef, 0xab, 0x2f, 0xfa, 0x12, 0x17, 0x8f, 0x6a, 0xbf, 0xad, 0x74, 0xe3, 0xd1, + 0xac, 0xc0, 0x8a, 0x76, 0xd0, 0xe9, 0x0f, 0x70, 0x3c, 0x93, 0x22, 0x08, 0x78, 0xd0, 0x55, 0xb1, + 0xc4, 0xc7, 0x7e, 0xa5, 0x8d, 0x3b, 0x47, 0x7b, 0xaa, 0x24, 0x34, 0xde, 0x14, 0x40, 0x9c, 0x91, + 0xbb, 0x35, 0x2d, 0xdc, 0x01, 0xe1, 0x8a, 0x06, 0x76, 0xd6, 0xbd, 0xbc, 0x8f, 0xfb, 0x09, 0x0d, + 0x6c, 0x9c, 0x20, 0x96, 0x44, 0xe6, 0xbf, 0x12, 0xf9, 0x6d, 0x01, 0xf8, 0x38, 0xfe, 0xd6, 0x68, + 0xf1, 0x14, 0x56, 0xc2, 0xe9, 0xd8, 0xa4, 0xa3, 0x4c, 0x8d, 0xfb, 0x39, 0xa9, 0x8e, 0x59, 0x34, + 0xce, 0x50, 0xb9, 0x8a, 0x7c, 0xe0, 0xa0, 0x94, 0x60, 0x6e, 0x8d, 0x26, 0xd9, 0x0d, 0x5e, 0xbc, + 0xbe, 0xc1, 0xf3, 0x68, 0x2a, 0xaf, 0x38, 0xb8, 0x6b, 0xd1, 0xf1, 0xcd, 0x7b, 0x2a, 0x7f, 0x2d, + 0x12, 0xd2, 0xe2, 0xe7, 0x87, 0xc6, 0xbd, 0xec, 0xa4, 0x28, 0x87, 0xc6, 0x77, 0x98, 0x4c, 0x03, + 0xa7, 0xe9, 0x10, 0x8f, 0x3d, 0x4e, 0x9a, 0xc9, 0x5f, 0x86, 0xef, 0x86, 0xdf, 0x78, 0xcd, 0xec, + 0x26, 0x8e, 0xcf, 0x1c, 0x67, 0x96, 0x18, 0x64, 0xfb, 0x4b, 0x00, 0x00, 0x00, 0xff, 0xff, 0x24, + 0x7c, 0x70, 0x3a, 0x71, 0x09, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/cloud/vision/v1p3beta1/web_detection.pb.go b/vendor/google.golang.org/genproto/googleapis/cloud/vision/v1p3beta1/web_detection.pb.go new file mode 100644 index 0000000..fe1d777 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/cloud/vision/v1p3beta1/web_detection.pb.go @@ -0,0 +1,395 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/cloud/vision/v1p3beta1/web_detection.proto + +package vision // import "google.golang.org/genproto/googleapis/cloud/vision/v1p3beta1" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Relevant information for the image from the Internet. +type WebDetection struct { + // Deduced entities from similar images on the Internet. + WebEntities []*WebDetection_WebEntity `protobuf:"bytes,1,rep,name=web_entities,json=webEntities,proto3" json:"web_entities,omitempty"` + // Fully matching images from the Internet. + // Can include resized copies of the query image. + FullMatchingImages []*WebDetection_WebImage `protobuf:"bytes,2,rep,name=full_matching_images,json=fullMatchingImages,proto3" json:"full_matching_images,omitempty"` + // Partial matching images from the Internet. + // Those images are similar enough to share some key-point features. For + // example an original image will likely have partial matching for its crops. + PartialMatchingImages []*WebDetection_WebImage `protobuf:"bytes,3,rep,name=partial_matching_images,json=partialMatchingImages,proto3" json:"partial_matching_images,omitempty"` + // Web pages containing the matching images from the Internet. + PagesWithMatchingImages []*WebDetection_WebPage `protobuf:"bytes,4,rep,name=pages_with_matching_images,json=pagesWithMatchingImages,proto3" json:"pages_with_matching_images,omitempty"` + // The visually similar image results. + VisuallySimilarImages []*WebDetection_WebImage `protobuf:"bytes,6,rep,name=visually_similar_images,json=visuallySimilarImages,proto3" json:"visually_similar_images,omitempty"` + // Best guess text labels for the request image. + BestGuessLabels []*WebDetection_WebLabel `protobuf:"bytes,8,rep,name=best_guess_labels,json=bestGuessLabels,proto3" json:"best_guess_labels,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *WebDetection) Reset() { *m = WebDetection{} } +func (m *WebDetection) String() string { return proto.CompactTextString(m) } +func (*WebDetection) ProtoMessage() {} +func (*WebDetection) Descriptor() ([]byte, []int) { + return fileDescriptor_web_detection_589fe4db54ca6b50, []int{0} +} +func (m *WebDetection) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_WebDetection.Unmarshal(m, b) +} +func (m *WebDetection) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_WebDetection.Marshal(b, m, deterministic) +} +func (dst *WebDetection) XXX_Merge(src proto.Message) { + xxx_messageInfo_WebDetection.Merge(dst, src) +} +func (m *WebDetection) XXX_Size() int { + return xxx_messageInfo_WebDetection.Size(m) +} +func (m *WebDetection) XXX_DiscardUnknown() { + xxx_messageInfo_WebDetection.DiscardUnknown(m) +} + +var xxx_messageInfo_WebDetection proto.InternalMessageInfo + +func (m *WebDetection) GetWebEntities() []*WebDetection_WebEntity { + if m != nil { + return m.WebEntities + } + return nil +} + +func (m *WebDetection) GetFullMatchingImages() []*WebDetection_WebImage { + if m != nil { + return m.FullMatchingImages + } + return nil +} + +func (m *WebDetection) GetPartialMatchingImages() []*WebDetection_WebImage { + if m != nil { + return m.PartialMatchingImages + } + return nil +} + +func (m *WebDetection) GetPagesWithMatchingImages() []*WebDetection_WebPage { + if m != nil { + return m.PagesWithMatchingImages + } + return nil +} + +func (m *WebDetection) GetVisuallySimilarImages() []*WebDetection_WebImage { + if m != nil { + return m.VisuallySimilarImages + } + return nil +} + +func (m *WebDetection) GetBestGuessLabels() []*WebDetection_WebLabel { + if m != nil { + return m.BestGuessLabels + } + return nil +} + +// Entity deduced from similar images on the Internet. +type WebDetection_WebEntity struct { + // Opaque entity ID. + EntityId string `protobuf:"bytes,1,opt,name=entity_id,json=entityId,proto3" json:"entity_id,omitempty"` + // Overall relevancy score for the entity. + // Not normalized and not comparable across different image queries. + Score float32 `protobuf:"fixed32,2,opt,name=score,proto3" json:"score,omitempty"` + // Canonical description of the entity, in English. + Description string `protobuf:"bytes,3,opt,name=description,proto3" json:"description,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *WebDetection_WebEntity) Reset() { *m = WebDetection_WebEntity{} } +func (m *WebDetection_WebEntity) String() string { return proto.CompactTextString(m) } +func (*WebDetection_WebEntity) ProtoMessage() {} +func (*WebDetection_WebEntity) Descriptor() ([]byte, []int) { + return fileDescriptor_web_detection_589fe4db54ca6b50, []int{0, 0} +} +func (m *WebDetection_WebEntity) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_WebDetection_WebEntity.Unmarshal(m, b) +} +func (m *WebDetection_WebEntity) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_WebDetection_WebEntity.Marshal(b, m, deterministic) +} +func (dst *WebDetection_WebEntity) XXX_Merge(src proto.Message) { + xxx_messageInfo_WebDetection_WebEntity.Merge(dst, src) +} +func (m *WebDetection_WebEntity) XXX_Size() int { + return xxx_messageInfo_WebDetection_WebEntity.Size(m) +} +func (m *WebDetection_WebEntity) XXX_DiscardUnknown() { + xxx_messageInfo_WebDetection_WebEntity.DiscardUnknown(m) +} + +var xxx_messageInfo_WebDetection_WebEntity proto.InternalMessageInfo + +func (m *WebDetection_WebEntity) GetEntityId() string { + if m != nil { + return m.EntityId + } + return "" +} + +func (m *WebDetection_WebEntity) GetScore() float32 { + if m != nil { + return m.Score + } + return 0 +} + +func (m *WebDetection_WebEntity) GetDescription() string { + if m != nil { + return m.Description + } + return "" +} + +// Metadata for online images. +type WebDetection_WebImage struct { + // The result image URL. + Url string `protobuf:"bytes,1,opt,name=url,proto3" json:"url,omitempty"` + // (Deprecated) Overall relevancy score for the image. + Score float32 `protobuf:"fixed32,2,opt,name=score,proto3" json:"score,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *WebDetection_WebImage) Reset() { *m = WebDetection_WebImage{} } +func (m *WebDetection_WebImage) String() string { return proto.CompactTextString(m) } +func (*WebDetection_WebImage) ProtoMessage() {} +func (*WebDetection_WebImage) Descriptor() ([]byte, []int) { + return fileDescriptor_web_detection_589fe4db54ca6b50, []int{0, 1} +} +func (m *WebDetection_WebImage) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_WebDetection_WebImage.Unmarshal(m, b) +} +func (m *WebDetection_WebImage) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_WebDetection_WebImage.Marshal(b, m, deterministic) +} +func (dst *WebDetection_WebImage) XXX_Merge(src proto.Message) { + xxx_messageInfo_WebDetection_WebImage.Merge(dst, src) +} +func (m *WebDetection_WebImage) XXX_Size() int { + return xxx_messageInfo_WebDetection_WebImage.Size(m) +} +func (m *WebDetection_WebImage) XXX_DiscardUnknown() { + xxx_messageInfo_WebDetection_WebImage.DiscardUnknown(m) +} + +var xxx_messageInfo_WebDetection_WebImage proto.InternalMessageInfo + +func (m *WebDetection_WebImage) GetUrl() string { + if m != nil { + return m.Url + } + return "" +} + +func (m *WebDetection_WebImage) GetScore() float32 { + if m != nil { + return m.Score + } + return 0 +} + +// Metadata for web pages. +type WebDetection_WebPage struct { + // The result web page URL. + Url string `protobuf:"bytes,1,opt,name=url,proto3" json:"url,omitempty"` + // (Deprecated) Overall relevancy score for the web page. + Score float32 `protobuf:"fixed32,2,opt,name=score,proto3" json:"score,omitempty"` + // Title for the web page, may contain HTML markups. + PageTitle string `protobuf:"bytes,3,opt,name=page_title,json=pageTitle,proto3" json:"page_title,omitempty"` + // Fully matching images on the page. + // Can include resized copies of the query image. + FullMatchingImages []*WebDetection_WebImage `protobuf:"bytes,4,rep,name=full_matching_images,json=fullMatchingImages,proto3" json:"full_matching_images,omitempty"` + // Partial matching images on the page. + // Those images are similar enough to share some key-point features. For + // example an original image will likely have partial matching for its + // crops. + PartialMatchingImages []*WebDetection_WebImage `protobuf:"bytes,5,rep,name=partial_matching_images,json=partialMatchingImages,proto3" json:"partial_matching_images,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *WebDetection_WebPage) Reset() { *m = WebDetection_WebPage{} } +func (m *WebDetection_WebPage) String() string { return proto.CompactTextString(m) } +func (*WebDetection_WebPage) ProtoMessage() {} +func (*WebDetection_WebPage) Descriptor() ([]byte, []int) { + return fileDescriptor_web_detection_589fe4db54ca6b50, []int{0, 2} +} +func (m *WebDetection_WebPage) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_WebDetection_WebPage.Unmarshal(m, b) +} +func (m *WebDetection_WebPage) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_WebDetection_WebPage.Marshal(b, m, deterministic) +} +func (dst *WebDetection_WebPage) XXX_Merge(src proto.Message) { + xxx_messageInfo_WebDetection_WebPage.Merge(dst, src) +} +func (m *WebDetection_WebPage) XXX_Size() int { + return xxx_messageInfo_WebDetection_WebPage.Size(m) +} +func (m *WebDetection_WebPage) XXX_DiscardUnknown() { + xxx_messageInfo_WebDetection_WebPage.DiscardUnknown(m) +} + +var xxx_messageInfo_WebDetection_WebPage proto.InternalMessageInfo + +func (m *WebDetection_WebPage) GetUrl() string { + if m != nil { + return m.Url + } + return "" +} + +func (m *WebDetection_WebPage) GetScore() float32 { + if m != nil { + return m.Score + } + return 0 +} + +func (m *WebDetection_WebPage) GetPageTitle() string { + if m != nil { + return m.PageTitle + } + return "" +} + +func (m *WebDetection_WebPage) GetFullMatchingImages() []*WebDetection_WebImage { + if m != nil { + return m.FullMatchingImages + } + return nil +} + +func (m *WebDetection_WebPage) GetPartialMatchingImages() []*WebDetection_WebImage { + if m != nil { + return m.PartialMatchingImages + } + return nil +} + +// Label to provide extra metadata for the web detection. +type WebDetection_WebLabel struct { + // Label for extra metadata. + Label string `protobuf:"bytes,1,opt,name=label,proto3" json:"label,omitempty"` + // The BCP-47 language code for `label`, such as "en-US" or "sr-Latn". + // For more information, see + // http://www.unicode.org/reports/tr35/#Unicode_locale_identifier. + LanguageCode string `protobuf:"bytes,2,opt,name=language_code,json=languageCode,proto3" json:"language_code,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *WebDetection_WebLabel) Reset() { *m = WebDetection_WebLabel{} } +func (m *WebDetection_WebLabel) String() string { return proto.CompactTextString(m) } +func (*WebDetection_WebLabel) ProtoMessage() {} +func (*WebDetection_WebLabel) Descriptor() ([]byte, []int) { + return fileDescriptor_web_detection_589fe4db54ca6b50, []int{0, 3} +} +func (m *WebDetection_WebLabel) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_WebDetection_WebLabel.Unmarshal(m, b) +} +func (m *WebDetection_WebLabel) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_WebDetection_WebLabel.Marshal(b, m, deterministic) +} +func (dst *WebDetection_WebLabel) XXX_Merge(src proto.Message) { + xxx_messageInfo_WebDetection_WebLabel.Merge(dst, src) +} +func (m *WebDetection_WebLabel) XXX_Size() int { + return xxx_messageInfo_WebDetection_WebLabel.Size(m) +} +func (m *WebDetection_WebLabel) XXX_DiscardUnknown() { + xxx_messageInfo_WebDetection_WebLabel.DiscardUnknown(m) +} + +var xxx_messageInfo_WebDetection_WebLabel proto.InternalMessageInfo + +func (m *WebDetection_WebLabel) GetLabel() string { + if m != nil { + return m.Label + } + return "" +} + +func (m *WebDetection_WebLabel) GetLanguageCode() string { + if m != nil { + return m.LanguageCode + } + return "" +} + +func init() { + proto.RegisterType((*WebDetection)(nil), "google.cloud.vision.v1p3beta1.WebDetection") + proto.RegisterType((*WebDetection_WebEntity)(nil), "google.cloud.vision.v1p3beta1.WebDetection.WebEntity") + proto.RegisterType((*WebDetection_WebImage)(nil), "google.cloud.vision.v1p3beta1.WebDetection.WebImage") + proto.RegisterType((*WebDetection_WebPage)(nil), "google.cloud.vision.v1p3beta1.WebDetection.WebPage") + proto.RegisterType((*WebDetection_WebLabel)(nil), "google.cloud.vision.v1p3beta1.WebDetection.WebLabel") +} + +func init() { + proto.RegisterFile("google/cloud/vision/v1p3beta1/web_detection.proto", fileDescriptor_web_detection_589fe4db54ca6b50) +} + +var fileDescriptor_web_detection_589fe4db54ca6b50 = []byte{ + // 511 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xbc, 0x94, 0x4f, 0x6f, 0xd3, 0x30, + 0x18, 0xc6, 0x95, 0x76, 0x1b, 0x8d, 0x5b, 0x04, 0xb3, 0x86, 0x16, 0x05, 0x26, 0x15, 0xb8, 0xf4, + 0x94, 0xa8, 0x2b, 0x9c, 0xb8, 0x6d, 0x4c, 0x68, 0x12, 0x48, 0x55, 0x40, 0x1a, 0xe2, 0x92, 0x39, + 0x89, 0x97, 0xbe, 0x92, 0x1b, 0x47, 0xb1, 0xd3, 0xaa, 0x37, 0x4e, 0x7c, 0x14, 0x3e, 0x23, 0x47, + 0xf4, 0xda, 0xee, 0x54, 0x51, 0x36, 0x31, 0x86, 0xb8, 0xf9, 0x7d, 0xac, 0xe7, 0xf9, 0xd9, 0xaf, + 0xff, 0x90, 0x71, 0x29, 0x65, 0x29, 0x78, 0x9c, 0x0b, 0xd9, 0x16, 0xf1, 0x02, 0x14, 0xc8, 0x2a, + 0x5e, 0x8c, 0xeb, 0x49, 0xc6, 0x35, 0x1b, 0xc7, 0x4b, 0x9e, 0xa5, 0x05, 0xd7, 0x3c, 0xd7, 0x20, + 0xab, 0xa8, 0x6e, 0xa4, 0x96, 0xf4, 0xc8, 0x5a, 0x22, 0x63, 0x89, 0xac, 0x25, 0xba, 0xb6, 0x84, + 0xcf, 0x5c, 0x22, 0xab, 0x21, 0x66, 0x55, 0x25, 0x35, 0x43, 0xaf, 0xb2, 0xe6, 0x17, 0xdf, 0x7c, + 0x32, 0xb8, 0xe0, 0xd9, 0xdb, 0x75, 0x26, 0xfd, 0x4c, 0x06, 0x08, 0xe1, 0x95, 0x06, 0x0d, 0x5c, + 0x05, 0xde, 0xb0, 0x3b, 0xea, 0x1f, 0xbf, 0x8e, 0x6e, 0x85, 0x44, 0x9b, 0x11, 0x58, 0x9c, 0xa1, + 0x7d, 0x95, 0xf4, 0x97, 0x6e, 0x08, 0x5c, 0xd1, 0x2b, 0x72, 0x70, 0xd5, 0x0a, 0x91, 0xce, 0x99, + 0xce, 0x67, 0x50, 0x95, 0x29, 0xcc, 0x59, 0xc9, 0x55, 0xd0, 0x31, 0x84, 0x57, 0x77, 0x24, 0x9c, + 0xa3, 0x39, 0xa1, 0x98, 0xf8, 0xc1, 0x05, 0x1a, 0x49, 0x51, 0x41, 0x0e, 0x6b, 0xd6, 0x68, 0x60, + 0xdb, 0xa8, 0xee, 0x3d, 0x50, 0x4f, 0x5c, 0xe8, 0x2f, 0xb4, 0x9a, 0x84, 0x35, 0x0e, 0xd2, 0x25, + 0xe8, 0xd9, 0x16, 0x70, 0xc7, 0x00, 0x27, 0x77, 0x04, 0x4e, 0x91, 0x77, 0x68, 0x62, 0x2f, 0x40, + 0xcf, 0xb6, 0xf7, 0xb7, 0x00, 0xd5, 0x32, 0x21, 0x56, 0xa9, 0x82, 0x39, 0x08, 0xd6, 0xac, 0x71, + 0x7b, 0xf7, 0xd9, 0xdf, 0x3a, 0xf4, 0xa3, 0xcd, 0x74, 0xb4, 0x4b, 0xb2, 0x9f, 0x71, 0xa5, 0xd3, + 0xb2, 0xe5, 0x4a, 0xa5, 0x82, 0x65, 0x5c, 0xa8, 0xa0, 0xf7, 0x57, 0x9c, 0xf7, 0x68, 0x4e, 0x1e, + 0x61, 0xdc, 0x3b, 0x4c, 0x33, 0xb5, 0x0a, 0x2f, 0x89, 0x7f, 0x7d, 0x63, 0xe8, 0x53, 0xe2, 0x9b, + 0xab, 0xb7, 0x4a, 0xa1, 0x08, 0xbc, 0xa1, 0x37, 0xf2, 0x93, 0x9e, 0x15, 0xce, 0x0b, 0x7a, 0x40, + 0x76, 0x55, 0x2e, 0x1b, 0x1e, 0x74, 0x86, 0xde, 0xa8, 0x93, 0xd8, 0x82, 0x0e, 0x49, 0xbf, 0xe0, + 0x2a, 0x6f, 0xa0, 0x46, 0x50, 0xd0, 0x35, 0xa6, 0x4d, 0x29, 0x3c, 0x26, 0xbd, 0xf5, 0x36, 0xe9, + 0x63, 0xd2, 0x6d, 0x1b, 0xe1, 0xa2, 0x71, 0xf8, 0xfb, 0xd4, 0xf0, 0x7b, 0x87, 0x3c, 0x70, 0x47, + 0xf1, 0xa7, 0x1e, 0x7a, 0x44, 0x08, 0x1e, 0x5a, 0xaa, 0x41, 0x0b, 0xee, 0x16, 0xe2, 0xa3, 0xf2, + 0x09, 0x85, 0x1b, 0x1f, 0xc0, 0xce, 0xff, 0x7b, 0x00, 0xbb, 0xff, 0xfc, 0x01, 0x84, 0x67, 0xa6, + 0xb9, 0xe6, 0x2c, 0xb1, 0x2d, 0xe6, 0x86, 0xb8, 0x56, 0xd9, 0x82, 0xbe, 0x24, 0x0f, 0x05, 0xab, + 0xca, 0x16, 0x5b, 0x93, 0xcb, 0xc2, 0x36, 0xcd, 0x4f, 0x06, 0x6b, 0xf1, 0x54, 0x16, 0xfc, 0xe4, + 0xab, 0x47, 0x9e, 0xe7, 0x72, 0x7e, 0xfb, 0xca, 0x4e, 0xf6, 0x37, 0x97, 0x36, 0xc5, 0x1f, 0x6c, + 0xea, 0x7d, 0x39, 0x75, 0x9e, 0x52, 0x62, 0x62, 0x24, 0x9b, 0x32, 0x2e, 0x79, 0x65, 0xfe, 0xb7, + 0xd8, 0x4e, 0xb1, 0x1a, 0xd4, 0x0d, 0x5f, 0xea, 0x1b, 0x2b, 0xfc, 0xf0, 0xbc, 0x6c, 0xcf, 0x58, + 0x26, 0x3f, 0x03, 0x00, 0x00, 0xff, 0xff, 0xe5, 0xb6, 0x81, 0x34, 0x84, 0x05, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/cloud/vision/v1p4beta1/geometry.pb.go b/vendor/google.golang.org/genproto/googleapis/cloud/vision/v1p4beta1/geometry.pb.go new file mode 100644 index 0000000..24cb0e8 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/cloud/vision/v1p4beta1/geometry.pb.go @@ -0,0 +1,265 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/cloud/vision/v1p4beta1/geometry.proto + +package vision // import "google.golang.org/genproto/googleapis/cloud/vision/v1p4beta1" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// A vertex represents a 2D point in the image. +// NOTE: the vertex coordinates are in the same scale as the original image. +type Vertex struct { + // X coordinate. + X int32 `protobuf:"varint,1,opt,name=x,proto3" json:"x,omitempty"` + // Y coordinate. + Y int32 `protobuf:"varint,2,opt,name=y,proto3" json:"y,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Vertex) Reset() { *m = Vertex{} } +func (m *Vertex) String() string { return proto.CompactTextString(m) } +func (*Vertex) ProtoMessage() {} +func (*Vertex) Descriptor() ([]byte, []int) { + return fileDescriptor_geometry_4a9669950dfd9675, []int{0} +} +func (m *Vertex) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Vertex.Unmarshal(m, b) +} +func (m *Vertex) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Vertex.Marshal(b, m, deterministic) +} +func (dst *Vertex) XXX_Merge(src proto.Message) { + xxx_messageInfo_Vertex.Merge(dst, src) +} +func (m *Vertex) XXX_Size() int { + return xxx_messageInfo_Vertex.Size(m) +} +func (m *Vertex) XXX_DiscardUnknown() { + xxx_messageInfo_Vertex.DiscardUnknown(m) +} + +var xxx_messageInfo_Vertex proto.InternalMessageInfo + +func (m *Vertex) GetX() int32 { + if m != nil { + return m.X + } + return 0 +} + +func (m *Vertex) GetY() int32 { + if m != nil { + return m.Y + } + return 0 +} + +// A vertex represents a 2D point in the image. +// NOTE: the normalized vertex coordinates are relative to the original image +// and range from 0 to 1. +type NormalizedVertex struct { + // X coordinate. + X float32 `protobuf:"fixed32,1,opt,name=x,proto3" json:"x,omitempty"` + // Y coordinate. + Y float32 `protobuf:"fixed32,2,opt,name=y,proto3" json:"y,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *NormalizedVertex) Reset() { *m = NormalizedVertex{} } +func (m *NormalizedVertex) String() string { return proto.CompactTextString(m) } +func (*NormalizedVertex) ProtoMessage() {} +func (*NormalizedVertex) Descriptor() ([]byte, []int) { + return fileDescriptor_geometry_4a9669950dfd9675, []int{1} +} +func (m *NormalizedVertex) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_NormalizedVertex.Unmarshal(m, b) +} +func (m *NormalizedVertex) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_NormalizedVertex.Marshal(b, m, deterministic) +} +func (dst *NormalizedVertex) XXX_Merge(src proto.Message) { + xxx_messageInfo_NormalizedVertex.Merge(dst, src) +} +func (m *NormalizedVertex) XXX_Size() int { + return xxx_messageInfo_NormalizedVertex.Size(m) +} +func (m *NormalizedVertex) XXX_DiscardUnknown() { + xxx_messageInfo_NormalizedVertex.DiscardUnknown(m) +} + +var xxx_messageInfo_NormalizedVertex proto.InternalMessageInfo + +func (m *NormalizedVertex) GetX() float32 { + if m != nil { + return m.X + } + return 0 +} + +func (m *NormalizedVertex) GetY() float32 { + if m != nil { + return m.Y + } + return 0 +} + +// A bounding polygon for the detected image annotation. +type BoundingPoly struct { + // The bounding polygon vertices. + Vertices []*Vertex `protobuf:"bytes,1,rep,name=vertices,proto3" json:"vertices,omitempty"` + // The bounding polygon normalized vertices. + NormalizedVertices []*NormalizedVertex `protobuf:"bytes,2,rep,name=normalized_vertices,json=normalizedVertices,proto3" json:"normalized_vertices,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *BoundingPoly) Reset() { *m = BoundingPoly{} } +func (m *BoundingPoly) String() string { return proto.CompactTextString(m) } +func (*BoundingPoly) ProtoMessage() {} +func (*BoundingPoly) Descriptor() ([]byte, []int) { + return fileDescriptor_geometry_4a9669950dfd9675, []int{2} +} +func (m *BoundingPoly) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_BoundingPoly.Unmarshal(m, b) +} +func (m *BoundingPoly) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_BoundingPoly.Marshal(b, m, deterministic) +} +func (dst *BoundingPoly) XXX_Merge(src proto.Message) { + xxx_messageInfo_BoundingPoly.Merge(dst, src) +} +func (m *BoundingPoly) XXX_Size() int { + return xxx_messageInfo_BoundingPoly.Size(m) +} +func (m *BoundingPoly) XXX_DiscardUnknown() { + xxx_messageInfo_BoundingPoly.DiscardUnknown(m) +} + +var xxx_messageInfo_BoundingPoly proto.InternalMessageInfo + +func (m *BoundingPoly) GetVertices() []*Vertex { + if m != nil { + return m.Vertices + } + return nil +} + +func (m *BoundingPoly) GetNormalizedVertices() []*NormalizedVertex { + if m != nil { + return m.NormalizedVertices + } + return nil +} + +// A 3D position in the image, used primarily for Face detection landmarks. +// A valid Position must have both x and y coordinates. +// The position coordinates are in the same scale as the original image. +type Position struct { + // X coordinate. + X float32 `protobuf:"fixed32,1,opt,name=x,proto3" json:"x,omitempty"` + // Y coordinate. + Y float32 `protobuf:"fixed32,2,opt,name=y,proto3" json:"y,omitempty"` + // Z coordinate (or depth). + Z float32 `protobuf:"fixed32,3,opt,name=z,proto3" json:"z,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Position) Reset() { *m = Position{} } +func (m *Position) String() string { return proto.CompactTextString(m) } +func (*Position) ProtoMessage() {} +func (*Position) Descriptor() ([]byte, []int) { + return fileDescriptor_geometry_4a9669950dfd9675, []int{3} +} +func (m *Position) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Position.Unmarshal(m, b) +} +func (m *Position) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Position.Marshal(b, m, deterministic) +} +func (dst *Position) XXX_Merge(src proto.Message) { + xxx_messageInfo_Position.Merge(dst, src) +} +func (m *Position) XXX_Size() int { + return xxx_messageInfo_Position.Size(m) +} +func (m *Position) XXX_DiscardUnknown() { + xxx_messageInfo_Position.DiscardUnknown(m) +} + +var xxx_messageInfo_Position proto.InternalMessageInfo + +func (m *Position) GetX() float32 { + if m != nil { + return m.X + } + return 0 +} + +func (m *Position) GetY() float32 { + if m != nil { + return m.Y + } + return 0 +} + +func (m *Position) GetZ() float32 { + if m != nil { + return m.Z + } + return 0 +} + +func init() { + proto.RegisterType((*Vertex)(nil), "google.cloud.vision.v1p4beta1.Vertex") + proto.RegisterType((*NormalizedVertex)(nil), "google.cloud.vision.v1p4beta1.NormalizedVertex") + proto.RegisterType((*BoundingPoly)(nil), "google.cloud.vision.v1p4beta1.BoundingPoly") + proto.RegisterType((*Position)(nil), "google.cloud.vision.v1p4beta1.Position") +} + +func init() { + proto.RegisterFile("google/cloud/vision/v1p4beta1/geometry.proto", fileDescriptor_geometry_4a9669950dfd9675) +} + +var fileDescriptor_geometry_4a9669950dfd9675 = []byte{ + // 311 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x84, 0x92, 0xb1, 0x6a, 0xf3, 0x30, + 0x14, 0x85, 0x91, 0xf3, 0xff, 0x21, 0xa8, 0x29, 0x14, 0x77, 0x31, 0xa5, 0x85, 0xd4, 0xb4, 0x90, + 0xa1, 0x48, 0xa4, 0xcd, 0xd6, 0xa9, 0xce, 0x90, 0x2d, 0x18, 0x0f, 0x19, 0xba, 0xb4, 0x8a, 0x2d, + 0x84, 0xc0, 0xd6, 0x35, 0xb2, 0x62, 0x62, 0xaf, 0x7d, 0x93, 0xbe, 0x42, 0x5f, 0xae, 0x63, 0xb1, + 0x65, 0x0c, 0x09, 0xd4, 0x1d, 0xcf, 0xd5, 0x77, 0x8f, 0x8e, 0xee, 0x15, 0x7e, 0x10, 0x00, 0x22, + 0xe5, 0x34, 0x4e, 0x61, 0x9f, 0xd0, 0x52, 0x16, 0x12, 0x14, 0x2d, 0x17, 0xf9, 0x72, 0xc7, 0x0d, + 0x5b, 0x50, 0xc1, 0x21, 0xe3, 0x46, 0x57, 0x24, 0xd7, 0x60, 0xc0, 0xbd, 0xb1, 0x34, 0x69, 0x69, + 0x62, 0x69, 0xd2, 0xd3, 0x57, 0xd7, 0x9d, 0x19, 0xcb, 0x25, 0x65, 0x4a, 0x81, 0x61, 0x46, 0x82, + 0x2a, 0x6c, 0xb3, 0x7f, 0x87, 0xc7, 0x5b, 0xae, 0x0d, 0x3f, 0xb8, 0x53, 0x8c, 0x0e, 0x1e, 0x9a, + 0xa1, 0xf9, 0xff, 0x08, 0xb5, 0xaa, 0xf2, 0x1c, 0xab, 0x2a, 0x9f, 0xe0, 0x8b, 0x0d, 0xe8, 0x8c, + 0xa5, 0xb2, 0xe6, 0xc9, 0x29, 0xef, 0x1c, 0xf1, 0x4e, 0xc3, 0x7f, 0x21, 0x3c, 0x0d, 0x60, 0xaf, + 0x12, 0xa9, 0x44, 0x08, 0x69, 0xe5, 0xbe, 0xe0, 0x49, 0xc9, 0xb5, 0x91, 0x31, 0x2f, 0x3c, 0x34, + 0x1b, 0xcd, 0xcf, 0x1e, 0xef, 0xc9, 0x60, 0x6c, 0x62, 0x6f, 0x89, 0xfa, 0x36, 0xf7, 0x1d, 0x5f, + 0xaa, 0x3e, 0xc3, 0x5b, 0xef, 0xe6, 0xb4, 0x6e, 0xf4, 0x0f, 0xb7, 0xd3, 0xf4, 0x91, 0xab, 0x8e, + 0x2a, 0x8d, 0x95, 0xbf, 0xc4, 0x93, 0x10, 0x0a, 0xd9, 0x8c, 0x67, 0xe8, 0x75, 0x8d, 0xaa, 0xbd, + 0x91, 0x55, 0x75, 0xf0, 0x81, 0xf0, 0x6d, 0x0c, 0xd9, 0x70, 0x80, 0xe0, 0x7c, 0xdd, 0x2d, 0x2d, + 0x6c, 0xc6, 0x1e, 0xa2, 0xd7, 0x55, 0xc7, 0x0b, 0x48, 0x99, 0x12, 0x04, 0xb4, 0xa0, 0x82, 0xab, + 0x76, 0x29, 0xd4, 0x1e, 0xb1, 0x5c, 0x16, 0xbf, 0x7c, 0x81, 0x67, 0x5b, 0xf8, 0x46, 0xe8, 0xd3, + 0xf9, 0xb7, 0x5e, 0x6d, 0x37, 0xbb, 0x71, 0xdb, 0xf9, 0xf4, 0x13, 0x00, 0x00, 0xff, 0xff, 0xef, + 0xec, 0x6e, 0xa2, 0x3b, 0x02, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/cloud/vision/v1p4beta1/image_annotator.pb.go b/vendor/google.golang.org/genproto/googleapis/cloud/vision/v1p4beta1/image_annotator.pb.go new file mode 100644 index 0000000..120e85c --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/cloud/vision/v1p4beta1/image_annotator.pb.go @@ -0,0 +1,3215 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/cloud/vision/v1p4beta1/image_annotator.proto + +package vision // import "google.golang.org/genproto/googleapis/cloud/vision/v1p4beta1" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import timestamp "github.com/golang/protobuf/ptypes/timestamp" +import _ "google.golang.org/genproto/googleapis/api/annotations" +import longrunning "google.golang.org/genproto/googleapis/longrunning" +import status "google.golang.org/genproto/googleapis/rpc/status" +import color "google.golang.org/genproto/googleapis/type/color" +import latlng "google.golang.org/genproto/googleapis/type/latlng" +import _ "google.golang.org/genproto/protobuf/field_mask" + +import ( + context "golang.org/x/net/context" + grpc "google.golang.org/grpc" +) + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// A bucketized representation of likelihood, which is intended to give clients +// highly stable results across model upgrades. +type Likelihood int32 + +const ( + // Unknown likelihood. + Likelihood_UNKNOWN Likelihood = 0 + // It is very unlikely that the image belongs to the specified vertical. + Likelihood_VERY_UNLIKELY Likelihood = 1 + // It is unlikely that the image belongs to the specified vertical. + Likelihood_UNLIKELY Likelihood = 2 + // It is possible that the image belongs to the specified vertical. + Likelihood_POSSIBLE Likelihood = 3 + // It is likely that the image belongs to the specified vertical. + Likelihood_LIKELY Likelihood = 4 + // It is very likely that the image belongs to the specified vertical. + Likelihood_VERY_LIKELY Likelihood = 5 +) + +var Likelihood_name = map[int32]string{ + 0: "UNKNOWN", + 1: "VERY_UNLIKELY", + 2: "UNLIKELY", + 3: "POSSIBLE", + 4: "LIKELY", + 5: "VERY_LIKELY", +} +var Likelihood_value = map[string]int32{ + "UNKNOWN": 0, + "VERY_UNLIKELY": 1, + "UNLIKELY": 2, + "POSSIBLE": 3, + "LIKELY": 4, + "VERY_LIKELY": 5, +} + +func (x Likelihood) String() string { + return proto.EnumName(Likelihood_name, int32(x)) +} +func (Likelihood) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_image_annotator_4a9ef5af0fc4f4ff, []int{0} +} + +// Type of Google Cloud Vision API feature to be extracted. +type Feature_Type int32 + +const ( + // Unspecified feature type. + Feature_TYPE_UNSPECIFIED Feature_Type = 0 + // Run face detection. + Feature_FACE_DETECTION Feature_Type = 1 + // Run landmark detection. + Feature_LANDMARK_DETECTION Feature_Type = 2 + // Run logo detection. + Feature_LOGO_DETECTION Feature_Type = 3 + // Run label detection. + Feature_LABEL_DETECTION Feature_Type = 4 + // Run text detection / optical character recognition (OCR). Text detection + // is optimized for areas of text within a larger image; if the image is + // a document, use `DOCUMENT_TEXT_DETECTION` instead. + Feature_TEXT_DETECTION Feature_Type = 5 + // Run dense text document OCR. Takes precedence when both + // `DOCUMENT_TEXT_DETECTION` and `TEXT_DETECTION` are present. + Feature_DOCUMENT_TEXT_DETECTION Feature_Type = 11 + // Run Safe Search to detect potentially unsafe + // or undesirable content. + Feature_SAFE_SEARCH_DETECTION Feature_Type = 6 + // Compute a set of image properties, such as the + // image's dominant colors. + Feature_IMAGE_PROPERTIES Feature_Type = 7 + // Run crop hints. + Feature_CROP_HINTS Feature_Type = 9 + // Run web detection. + Feature_WEB_DETECTION Feature_Type = 10 + // Run Product Search. + Feature_PRODUCT_SEARCH Feature_Type = 12 + // Run localizer for object detection. + Feature_OBJECT_LOCALIZATION Feature_Type = 19 +) + +var Feature_Type_name = map[int32]string{ + 0: "TYPE_UNSPECIFIED", + 1: "FACE_DETECTION", + 2: "LANDMARK_DETECTION", + 3: "LOGO_DETECTION", + 4: "LABEL_DETECTION", + 5: "TEXT_DETECTION", + 11: "DOCUMENT_TEXT_DETECTION", + 6: "SAFE_SEARCH_DETECTION", + 7: "IMAGE_PROPERTIES", + 9: "CROP_HINTS", + 10: "WEB_DETECTION", + 12: "PRODUCT_SEARCH", + 19: "OBJECT_LOCALIZATION", +} +var Feature_Type_value = map[string]int32{ + "TYPE_UNSPECIFIED": 0, + "FACE_DETECTION": 1, + "LANDMARK_DETECTION": 2, + "LOGO_DETECTION": 3, + "LABEL_DETECTION": 4, + "TEXT_DETECTION": 5, + "DOCUMENT_TEXT_DETECTION": 11, + "SAFE_SEARCH_DETECTION": 6, + "IMAGE_PROPERTIES": 7, + "CROP_HINTS": 9, + "WEB_DETECTION": 10, + "PRODUCT_SEARCH": 12, + "OBJECT_LOCALIZATION": 19, +} + +func (x Feature_Type) String() string { + return proto.EnumName(Feature_Type_name, int32(x)) +} +func (Feature_Type) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_image_annotator_4a9ef5af0fc4f4ff, []int{0, 0} +} + +// Face landmark (feature) type. +// Left and right are defined from the vantage of the viewer of the image +// without considering mirror projections typical of photos. So, `LEFT_EYE`, +// typically, is the person's right eye. +type FaceAnnotation_Landmark_Type int32 + +const ( + // Unknown face landmark detected. Should not be filled. + FaceAnnotation_Landmark_UNKNOWN_LANDMARK FaceAnnotation_Landmark_Type = 0 + // Left eye. + FaceAnnotation_Landmark_LEFT_EYE FaceAnnotation_Landmark_Type = 1 + // Right eye. + FaceAnnotation_Landmark_RIGHT_EYE FaceAnnotation_Landmark_Type = 2 + // Left of left eyebrow. + FaceAnnotation_Landmark_LEFT_OF_LEFT_EYEBROW FaceAnnotation_Landmark_Type = 3 + // Right of left eyebrow. + FaceAnnotation_Landmark_RIGHT_OF_LEFT_EYEBROW FaceAnnotation_Landmark_Type = 4 + // Left of right eyebrow. + FaceAnnotation_Landmark_LEFT_OF_RIGHT_EYEBROW FaceAnnotation_Landmark_Type = 5 + // Right of right eyebrow. + FaceAnnotation_Landmark_RIGHT_OF_RIGHT_EYEBROW FaceAnnotation_Landmark_Type = 6 + // Midpoint between eyes. + FaceAnnotation_Landmark_MIDPOINT_BETWEEN_EYES FaceAnnotation_Landmark_Type = 7 + // Nose tip. + FaceAnnotation_Landmark_NOSE_TIP FaceAnnotation_Landmark_Type = 8 + // Upper lip. + FaceAnnotation_Landmark_UPPER_LIP FaceAnnotation_Landmark_Type = 9 + // Lower lip. + FaceAnnotation_Landmark_LOWER_LIP FaceAnnotation_Landmark_Type = 10 + // Mouth left. + FaceAnnotation_Landmark_MOUTH_LEFT FaceAnnotation_Landmark_Type = 11 + // Mouth right. + FaceAnnotation_Landmark_MOUTH_RIGHT FaceAnnotation_Landmark_Type = 12 + // Mouth center. + FaceAnnotation_Landmark_MOUTH_CENTER FaceAnnotation_Landmark_Type = 13 + // Nose, bottom right. + FaceAnnotation_Landmark_NOSE_BOTTOM_RIGHT FaceAnnotation_Landmark_Type = 14 + // Nose, bottom left. + FaceAnnotation_Landmark_NOSE_BOTTOM_LEFT FaceAnnotation_Landmark_Type = 15 + // Nose, bottom center. + FaceAnnotation_Landmark_NOSE_BOTTOM_CENTER FaceAnnotation_Landmark_Type = 16 + // Left eye, top boundary. + FaceAnnotation_Landmark_LEFT_EYE_TOP_BOUNDARY FaceAnnotation_Landmark_Type = 17 + // Left eye, right corner. + FaceAnnotation_Landmark_LEFT_EYE_RIGHT_CORNER FaceAnnotation_Landmark_Type = 18 + // Left eye, bottom boundary. + FaceAnnotation_Landmark_LEFT_EYE_BOTTOM_BOUNDARY FaceAnnotation_Landmark_Type = 19 + // Left eye, left corner. + FaceAnnotation_Landmark_LEFT_EYE_LEFT_CORNER FaceAnnotation_Landmark_Type = 20 + // Right eye, top boundary. + FaceAnnotation_Landmark_RIGHT_EYE_TOP_BOUNDARY FaceAnnotation_Landmark_Type = 21 + // Right eye, right corner. + FaceAnnotation_Landmark_RIGHT_EYE_RIGHT_CORNER FaceAnnotation_Landmark_Type = 22 + // Right eye, bottom boundary. + FaceAnnotation_Landmark_RIGHT_EYE_BOTTOM_BOUNDARY FaceAnnotation_Landmark_Type = 23 + // Right eye, left corner. + FaceAnnotation_Landmark_RIGHT_EYE_LEFT_CORNER FaceAnnotation_Landmark_Type = 24 + // Left eyebrow, upper midpoint. + FaceAnnotation_Landmark_LEFT_EYEBROW_UPPER_MIDPOINT FaceAnnotation_Landmark_Type = 25 + // Right eyebrow, upper midpoint. + FaceAnnotation_Landmark_RIGHT_EYEBROW_UPPER_MIDPOINT FaceAnnotation_Landmark_Type = 26 + // Left ear tragion. + FaceAnnotation_Landmark_LEFT_EAR_TRAGION FaceAnnotation_Landmark_Type = 27 + // Right ear tragion. + FaceAnnotation_Landmark_RIGHT_EAR_TRAGION FaceAnnotation_Landmark_Type = 28 + // Left eye pupil. + FaceAnnotation_Landmark_LEFT_EYE_PUPIL FaceAnnotation_Landmark_Type = 29 + // Right eye pupil. + FaceAnnotation_Landmark_RIGHT_EYE_PUPIL FaceAnnotation_Landmark_Type = 30 + // Forehead glabella. + FaceAnnotation_Landmark_FOREHEAD_GLABELLA FaceAnnotation_Landmark_Type = 31 + // Chin gnathion. + FaceAnnotation_Landmark_CHIN_GNATHION FaceAnnotation_Landmark_Type = 32 + // Chin left gonion. + FaceAnnotation_Landmark_CHIN_LEFT_GONION FaceAnnotation_Landmark_Type = 33 + // Chin right gonion. + FaceAnnotation_Landmark_CHIN_RIGHT_GONION FaceAnnotation_Landmark_Type = 34 +) + +var FaceAnnotation_Landmark_Type_name = map[int32]string{ + 0: "UNKNOWN_LANDMARK", + 1: "LEFT_EYE", + 2: "RIGHT_EYE", + 3: "LEFT_OF_LEFT_EYEBROW", + 4: "RIGHT_OF_LEFT_EYEBROW", + 5: "LEFT_OF_RIGHT_EYEBROW", + 6: "RIGHT_OF_RIGHT_EYEBROW", + 7: "MIDPOINT_BETWEEN_EYES", + 8: "NOSE_TIP", + 9: "UPPER_LIP", + 10: "LOWER_LIP", + 11: "MOUTH_LEFT", + 12: "MOUTH_RIGHT", + 13: "MOUTH_CENTER", + 14: "NOSE_BOTTOM_RIGHT", + 15: "NOSE_BOTTOM_LEFT", + 16: "NOSE_BOTTOM_CENTER", + 17: "LEFT_EYE_TOP_BOUNDARY", + 18: "LEFT_EYE_RIGHT_CORNER", + 19: "LEFT_EYE_BOTTOM_BOUNDARY", + 20: "LEFT_EYE_LEFT_CORNER", + 21: "RIGHT_EYE_TOP_BOUNDARY", + 22: "RIGHT_EYE_RIGHT_CORNER", + 23: "RIGHT_EYE_BOTTOM_BOUNDARY", + 24: "RIGHT_EYE_LEFT_CORNER", + 25: "LEFT_EYEBROW_UPPER_MIDPOINT", + 26: "RIGHT_EYEBROW_UPPER_MIDPOINT", + 27: "LEFT_EAR_TRAGION", + 28: "RIGHT_EAR_TRAGION", + 29: "LEFT_EYE_PUPIL", + 30: "RIGHT_EYE_PUPIL", + 31: "FOREHEAD_GLABELLA", + 32: "CHIN_GNATHION", + 33: "CHIN_LEFT_GONION", + 34: "CHIN_RIGHT_GONION", +} +var FaceAnnotation_Landmark_Type_value = map[string]int32{ + "UNKNOWN_LANDMARK": 0, + "LEFT_EYE": 1, + "RIGHT_EYE": 2, + "LEFT_OF_LEFT_EYEBROW": 3, + "RIGHT_OF_LEFT_EYEBROW": 4, + "LEFT_OF_RIGHT_EYEBROW": 5, + "RIGHT_OF_RIGHT_EYEBROW": 6, + "MIDPOINT_BETWEEN_EYES": 7, + "NOSE_TIP": 8, + "UPPER_LIP": 9, + "LOWER_LIP": 10, + "MOUTH_LEFT": 11, + "MOUTH_RIGHT": 12, + "MOUTH_CENTER": 13, + "NOSE_BOTTOM_RIGHT": 14, + "NOSE_BOTTOM_LEFT": 15, + "NOSE_BOTTOM_CENTER": 16, + "LEFT_EYE_TOP_BOUNDARY": 17, + "LEFT_EYE_RIGHT_CORNER": 18, + "LEFT_EYE_BOTTOM_BOUNDARY": 19, + "LEFT_EYE_LEFT_CORNER": 20, + "RIGHT_EYE_TOP_BOUNDARY": 21, + "RIGHT_EYE_RIGHT_CORNER": 22, + "RIGHT_EYE_BOTTOM_BOUNDARY": 23, + "RIGHT_EYE_LEFT_CORNER": 24, + "LEFT_EYEBROW_UPPER_MIDPOINT": 25, + "RIGHT_EYEBROW_UPPER_MIDPOINT": 26, + "LEFT_EAR_TRAGION": 27, + "RIGHT_EAR_TRAGION": 28, + "LEFT_EYE_PUPIL": 29, + "RIGHT_EYE_PUPIL": 30, + "FOREHEAD_GLABELLA": 31, + "CHIN_GNATHION": 32, + "CHIN_LEFT_GONION": 33, + "CHIN_RIGHT_GONION": 34, +} + +func (x FaceAnnotation_Landmark_Type) String() string { + return proto.EnumName(FaceAnnotation_Landmark_Type_name, int32(x)) +} +func (FaceAnnotation_Landmark_Type) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_image_annotator_4a9ef5af0fc4f4ff, []int{3, 0, 0} +} + +// Batch operation states. +type OperationMetadata_State int32 + +const ( + // Invalid. + OperationMetadata_STATE_UNSPECIFIED OperationMetadata_State = 0 + // Request is received. + OperationMetadata_CREATED OperationMetadata_State = 1 + // Request is actively being processed. + OperationMetadata_RUNNING OperationMetadata_State = 2 + // The batch processing is done. + OperationMetadata_DONE OperationMetadata_State = 3 + // The batch processing was cancelled. + OperationMetadata_CANCELLED OperationMetadata_State = 4 +) + +var OperationMetadata_State_name = map[int32]string{ + 0: "STATE_UNSPECIFIED", + 1: "CREATED", + 2: "RUNNING", + 3: "DONE", + 4: "CANCELLED", +} +var OperationMetadata_State_value = map[string]int32{ + "STATE_UNSPECIFIED": 0, + "CREATED": 1, + "RUNNING": 2, + "DONE": 3, + "CANCELLED": 4, +} + +func (x OperationMetadata_State) String() string { + return proto.EnumName(OperationMetadata_State_name, int32(x)) +} +func (OperationMetadata_State) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_image_annotator_4a9ef5af0fc4f4ff, []int{37, 0} +} + +// The type of Google Cloud Vision API detection to perform, and the maximum +// number of results to return for that type. Multiple `Feature` objects can +// be specified in the `features` list. +type Feature struct { + // The feature type. + Type Feature_Type `protobuf:"varint,1,opt,name=type,proto3,enum=google.cloud.vision.v1p4beta1.Feature_Type" json:"type,omitempty"` + // Maximum number of results of this type. Does not apply to + // `TEXT_DETECTION`, `DOCUMENT_TEXT_DETECTION`, or `CROP_HINTS`. + MaxResults int32 `protobuf:"varint,2,opt,name=max_results,json=maxResults,proto3" json:"max_results,omitempty"` + // Model to use for the feature. + // Supported values: "builtin/stable" (the default if unset) and + // "builtin/latest". + Model string `protobuf:"bytes,3,opt,name=model,proto3" json:"model,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Feature) Reset() { *m = Feature{} } +func (m *Feature) String() string { return proto.CompactTextString(m) } +func (*Feature) ProtoMessage() {} +func (*Feature) Descriptor() ([]byte, []int) { + return fileDescriptor_image_annotator_4a9ef5af0fc4f4ff, []int{0} +} +func (m *Feature) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Feature.Unmarshal(m, b) +} +func (m *Feature) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Feature.Marshal(b, m, deterministic) +} +func (dst *Feature) XXX_Merge(src proto.Message) { + xxx_messageInfo_Feature.Merge(dst, src) +} +func (m *Feature) XXX_Size() int { + return xxx_messageInfo_Feature.Size(m) +} +func (m *Feature) XXX_DiscardUnknown() { + xxx_messageInfo_Feature.DiscardUnknown(m) +} + +var xxx_messageInfo_Feature proto.InternalMessageInfo + +func (m *Feature) GetType() Feature_Type { + if m != nil { + return m.Type + } + return Feature_TYPE_UNSPECIFIED +} + +func (m *Feature) GetMaxResults() int32 { + if m != nil { + return m.MaxResults + } + return 0 +} + +func (m *Feature) GetModel() string { + if m != nil { + return m.Model + } + return "" +} + +// External image source (Google Cloud Storage or web URL image location). +type ImageSource struct { + // **Use `image_uri` instead.** + // + // The Google Cloud Storage URI of the form + // `gs://bucket_name/object_name`. Object versioning is not supported. See + // [Google Cloud Storage Request + // URIs](https://cloud.google.com/storage/docs/reference-uris) for more info. + GcsImageUri string `protobuf:"bytes,1,opt,name=gcs_image_uri,json=gcsImageUri,proto3" json:"gcs_image_uri,omitempty"` + // The URI of the source image. Can be either: + // + // 1. A Google Cloud Storage URI of the form + // `gs://bucket_name/object_name`. Object versioning is not supported. See + // [Google Cloud Storage Request + // URIs](https://cloud.google.com/storage/docs/reference-uris) for more + // info. + // + // 2. A publicly-accessible image HTTP/HTTPS URL. When fetching images from + // HTTP/HTTPS URLs, Google cannot guarantee that the request will be + // completed. Your request may fail if the specified host denies the + // request (e.g. due to request throttling or DOS prevention), or if Google + // throttles requests to the site for abuse prevention. You should not + // depend on externally-hosted images for production applications. + // + // When both `gcs_image_uri` and `image_uri` are specified, `image_uri` takes + // precedence. + ImageUri string `protobuf:"bytes,2,opt,name=image_uri,json=imageUri,proto3" json:"image_uri,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ImageSource) Reset() { *m = ImageSource{} } +func (m *ImageSource) String() string { return proto.CompactTextString(m) } +func (*ImageSource) ProtoMessage() {} +func (*ImageSource) Descriptor() ([]byte, []int) { + return fileDescriptor_image_annotator_4a9ef5af0fc4f4ff, []int{1} +} +func (m *ImageSource) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ImageSource.Unmarshal(m, b) +} +func (m *ImageSource) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ImageSource.Marshal(b, m, deterministic) +} +func (dst *ImageSource) XXX_Merge(src proto.Message) { + xxx_messageInfo_ImageSource.Merge(dst, src) +} +func (m *ImageSource) XXX_Size() int { + return xxx_messageInfo_ImageSource.Size(m) +} +func (m *ImageSource) XXX_DiscardUnknown() { + xxx_messageInfo_ImageSource.DiscardUnknown(m) +} + +var xxx_messageInfo_ImageSource proto.InternalMessageInfo + +func (m *ImageSource) GetGcsImageUri() string { + if m != nil { + return m.GcsImageUri + } + return "" +} + +func (m *ImageSource) GetImageUri() string { + if m != nil { + return m.ImageUri + } + return "" +} + +// Client image to perform Google Cloud Vision API tasks over. +type Image struct { + // Image content, represented as a stream of bytes. + // Note: As with all `bytes` fields, protobuffers use a pure binary + // representation, whereas JSON representations use base64. + Content []byte `protobuf:"bytes,1,opt,name=content,proto3" json:"content,omitempty"` + // Google Cloud Storage image location, or publicly-accessible image + // URL. If both `content` and `source` are provided for an image, `content` + // takes precedence and is used to perform the image annotation request. + Source *ImageSource `protobuf:"bytes,2,opt,name=source,proto3" json:"source,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Image) Reset() { *m = Image{} } +func (m *Image) String() string { return proto.CompactTextString(m) } +func (*Image) ProtoMessage() {} +func (*Image) Descriptor() ([]byte, []int) { + return fileDescriptor_image_annotator_4a9ef5af0fc4f4ff, []int{2} +} +func (m *Image) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Image.Unmarshal(m, b) +} +func (m *Image) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Image.Marshal(b, m, deterministic) +} +func (dst *Image) XXX_Merge(src proto.Message) { + xxx_messageInfo_Image.Merge(dst, src) +} +func (m *Image) XXX_Size() int { + return xxx_messageInfo_Image.Size(m) +} +func (m *Image) XXX_DiscardUnknown() { + xxx_messageInfo_Image.DiscardUnknown(m) +} + +var xxx_messageInfo_Image proto.InternalMessageInfo + +func (m *Image) GetContent() []byte { + if m != nil { + return m.Content + } + return nil +} + +func (m *Image) GetSource() *ImageSource { + if m != nil { + return m.Source + } + return nil +} + +// A face annotation object contains the results of face detection. +type FaceAnnotation struct { + // The bounding polygon around the face. The coordinates of the bounding box + // are in the original image's scale. + // The bounding box is computed to "frame" the face in accordance with human + // expectations. It is based on the landmarker results. + // Note that one or more x and/or y coordinates may not be generated in the + // `BoundingPoly` (the polygon will be unbounded) if only a partial face + // appears in the image to be annotated. + BoundingPoly *BoundingPoly `protobuf:"bytes,1,opt,name=bounding_poly,json=boundingPoly,proto3" json:"bounding_poly,omitempty"` + // The `fd_bounding_poly` bounding polygon is tighter than the + // `boundingPoly`, and encloses only the skin part of the face. Typically, it + // is used to eliminate the face from any image analysis that detects the + // "amount of skin" visible in an image. It is not based on the + // landmarker results, only on the initial face detection, hence + // the fd (face detection) prefix. + FdBoundingPoly *BoundingPoly `protobuf:"bytes,2,opt,name=fd_bounding_poly,json=fdBoundingPoly,proto3" json:"fd_bounding_poly,omitempty"` + // Detected face landmarks. + Landmarks []*FaceAnnotation_Landmark `protobuf:"bytes,3,rep,name=landmarks,proto3" json:"landmarks,omitempty"` + // Roll angle, which indicates the amount of clockwise/anti-clockwise rotation + // of the face relative to the image vertical about the axis perpendicular to + // the face. Range [-180,180]. + RollAngle float32 `protobuf:"fixed32,4,opt,name=roll_angle,json=rollAngle,proto3" json:"roll_angle,omitempty"` + // Yaw angle, which indicates the leftward/rightward angle that the face is + // pointing relative to the vertical plane perpendicular to the image. Range + // [-180,180]. + PanAngle float32 `protobuf:"fixed32,5,opt,name=pan_angle,json=panAngle,proto3" json:"pan_angle,omitempty"` + // Pitch angle, which indicates the upwards/downwards angle that the face is + // pointing relative to the image's horizontal plane. Range [-180,180]. + TiltAngle float32 `protobuf:"fixed32,6,opt,name=tilt_angle,json=tiltAngle,proto3" json:"tilt_angle,omitempty"` + // Detection confidence. Range [0, 1]. + DetectionConfidence float32 `protobuf:"fixed32,7,opt,name=detection_confidence,json=detectionConfidence,proto3" json:"detection_confidence,omitempty"` + // Face landmarking confidence. Range [0, 1]. + LandmarkingConfidence float32 `protobuf:"fixed32,8,opt,name=landmarking_confidence,json=landmarkingConfidence,proto3" json:"landmarking_confidence,omitempty"` + // Joy likelihood. + JoyLikelihood Likelihood `protobuf:"varint,9,opt,name=joy_likelihood,json=joyLikelihood,proto3,enum=google.cloud.vision.v1p4beta1.Likelihood" json:"joy_likelihood,omitempty"` + // Sorrow likelihood. + SorrowLikelihood Likelihood `protobuf:"varint,10,opt,name=sorrow_likelihood,json=sorrowLikelihood,proto3,enum=google.cloud.vision.v1p4beta1.Likelihood" json:"sorrow_likelihood,omitempty"` + // Anger likelihood. + AngerLikelihood Likelihood `protobuf:"varint,11,opt,name=anger_likelihood,json=angerLikelihood,proto3,enum=google.cloud.vision.v1p4beta1.Likelihood" json:"anger_likelihood,omitempty"` + // Surprise likelihood. + SurpriseLikelihood Likelihood `protobuf:"varint,12,opt,name=surprise_likelihood,json=surpriseLikelihood,proto3,enum=google.cloud.vision.v1p4beta1.Likelihood" json:"surprise_likelihood,omitempty"` + // Under-exposed likelihood. + UnderExposedLikelihood Likelihood `protobuf:"varint,13,opt,name=under_exposed_likelihood,json=underExposedLikelihood,proto3,enum=google.cloud.vision.v1p4beta1.Likelihood" json:"under_exposed_likelihood,omitempty"` + // Blurred likelihood. + BlurredLikelihood Likelihood `protobuf:"varint,14,opt,name=blurred_likelihood,json=blurredLikelihood,proto3,enum=google.cloud.vision.v1p4beta1.Likelihood" json:"blurred_likelihood,omitempty"` + // Headwear likelihood. + HeadwearLikelihood Likelihood `protobuf:"varint,15,opt,name=headwear_likelihood,json=headwearLikelihood,proto3,enum=google.cloud.vision.v1p4beta1.Likelihood" json:"headwear_likelihood,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *FaceAnnotation) Reset() { *m = FaceAnnotation{} } +func (m *FaceAnnotation) String() string { return proto.CompactTextString(m) } +func (*FaceAnnotation) ProtoMessage() {} +func (*FaceAnnotation) Descriptor() ([]byte, []int) { + return fileDescriptor_image_annotator_4a9ef5af0fc4f4ff, []int{3} +} +func (m *FaceAnnotation) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_FaceAnnotation.Unmarshal(m, b) +} +func (m *FaceAnnotation) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_FaceAnnotation.Marshal(b, m, deterministic) +} +func (dst *FaceAnnotation) XXX_Merge(src proto.Message) { + xxx_messageInfo_FaceAnnotation.Merge(dst, src) +} +func (m *FaceAnnotation) XXX_Size() int { + return xxx_messageInfo_FaceAnnotation.Size(m) +} +func (m *FaceAnnotation) XXX_DiscardUnknown() { + xxx_messageInfo_FaceAnnotation.DiscardUnknown(m) +} + +var xxx_messageInfo_FaceAnnotation proto.InternalMessageInfo + +func (m *FaceAnnotation) GetBoundingPoly() *BoundingPoly { + if m != nil { + return m.BoundingPoly + } + return nil +} + +func (m *FaceAnnotation) GetFdBoundingPoly() *BoundingPoly { + if m != nil { + return m.FdBoundingPoly + } + return nil +} + +func (m *FaceAnnotation) GetLandmarks() []*FaceAnnotation_Landmark { + if m != nil { + return m.Landmarks + } + return nil +} + +func (m *FaceAnnotation) GetRollAngle() float32 { + if m != nil { + return m.RollAngle + } + return 0 +} + +func (m *FaceAnnotation) GetPanAngle() float32 { + if m != nil { + return m.PanAngle + } + return 0 +} + +func (m *FaceAnnotation) GetTiltAngle() float32 { + if m != nil { + return m.TiltAngle + } + return 0 +} + +func (m *FaceAnnotation) GetDetectionConfidence() float32 { + if m != nil { + return m.DetectionConfidence + } + return 0 +} + +func (m *FaceAnnotation) GetLandmarkingConfidence() float32 { + if m != nil { + return m.LandmarkingConfidence + } + return 0 +} + +func (m *FaceAnnotation) GetJoyLikelihood() Likelihood { + if m != nil { + return m.JoyLikelihood + } + return Likelihood_UNKNOWN +} + +func (m *FaceAnnotation) GetSorrowLikelihood() Likelihood { + if m != nil { + return m.SorrowLikelihood + } + return Likelihood_UNKNOWN +} + +func (m *FaceAnnotation) GetAngerLikelihood() Likelihood { + if m != nil { + return m.AngerLikelihood + } + return Likelihood_UNKNOWN +} + +func (m *FaceAnnotation) GetSurpriseLikelihood() Likelihood { + if m != nil { + return m.SurpriseLikelihood + } + return Likelihood_UNKNOWN +} + +func (m *FaceAnnotation) GetUnderExposedLikelihood() Likelihood { + if m != nil { + return m.UnderExposedLikelihood + } + return Likelihood_UNKNOWN +} + +func (m *FaceAnnotation) GetBlurredLikelihood() Likelihood { + if m != nil { + return m.BlurredLikelihood + } + return Likelihood_UNKNOWN +} + +func (m *FaceAnnotation) GetHeadwearLikelihood() Likelihood { + if m != nil { + return m.HeadwearLikelihood + } + return Likelihood_UNKNOWN +} + +// A face-specific landmark (for example, a face feature). +type FaceAnnotation_Landmark struct { + // Face landmark type. + Type FaceAnnotation_Landmark_Type `protobuf:"varint,3,opt,name=type,proto3,enum=google.cloud.vision.v1p4beta1.FaceAnnotation_Landmark_Type" json:"type,omitempty"` + // Face landmark position. + Position *Position `protobuf:"bytes,4,opt,name=position,proto3" json:"position,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *FaceAnnotation_Landmark) Reset() { *m = FaceAnnotation_Landmark{} } +func (m *FaceAnnotation_Landmark) String() string { return proto.CompactTextString(m) } +func (*FaceAnnotation_Landmark) ProtoMessage() {} +func (*FaceAnnotation_Landmark) Descriptor() ([]byte, []int) { + return fileDescriptor_image_annotator_4a9ef5af0fc4f4ff, []int{3, 0} +} +func (m *FaceAnnotation_Landmark) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_FaceAnnotation_Landmark.Unmarshal(m, b) +} +func (m *FaceAnnotation_Landmark) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_FaceAnnotation_Landmark.Marshal(b, m, deterministic) +} +func (dst *FaceAnnotation_Landmark) XXX_Merge(src proto.Message) { + xxx_messageInfo_FaceAnnotation_Landmark.Merge(dst, src) +} +func (m *FaceAnnotation_Landmark) XXX_Size() int { + return xxx_messageInfo_FaceAnnotation_Landmark.Size(m) +} +func (m *FaceAnnotation_Landmark) XXX_DiscardUnknown() { + xxx_messageInfo_FaceAnnotation_Landmark.DiscardUnknown(m) +} + +var xxx_messageInfo_FaceAnnotation_Landmark proto.InternalMessageInfo + +func (m *FaceAnnotation_Landmark) GetType() FaceAnnotation_Landmark_Type { + if m != nil { + return m.Type + } + return FaceAnnotation_Landmark_UNKNOWN_LANDMARK +} + +func (m *FaceAnnotation_Landmark) GetPosition() *Position { + if m != nil { + return m.Position + } + return nil +} + +// Detected entity location information. +type LocationInfo struct { + // lat/long location coordinates. + LatLng *latlng.LatLng `protobuf:"bytes,1,opt,name=lat_lng,json=latLng,proto3" json:"lat_lng,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *LocationInfo) Reset() { *m = LocationInfo{} } +func (m *LocationInfo) String() string { return proto.CompactTextString(m) } +func (*LocationInfo) ProtoMessage() {} +func (*LocationInfo) Descriptor() ([]byte, []int) { + return fileDescriptor_image_annotator_4a9ef5af0fc4f4ff, []int{4} +} +func (m *LocationInfo) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_LocationInfo.Unmarshal(m, b) +} +func (m *LocationInfo) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_LocationInfo.Marshal(b, m, deterministic) +} +func (dst *LocationInfo) XXX_Merge(src proto.Message) { + xxx_messageInfo_LocationInfo.Merge(dst, src) +} +func (m *LocationInfo) XXX_Size() int { + return xxx_messageInfo_LocationInfo.Size(m) +} +func (m *LocationInfo) XXX_DiscardUnknown() { + xxx_messageInfo_LocationInfo.DiscardUnknown(m) +} + +var xxx_messageInfo_LocationInfo proto.InternalMessageInfo + +func (m *LocationInfo) GetLatLng() *latlng.LatLng { + if m != nil { + return m.LatLng + } + return nil +} + +// A `Property` consists of a user-supplied name/value pair. +type Property struct { + // Name of the property. + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // Value of the property. + Value string `protobuf:"bytes,2,opt,name=value,proto3" json:"value,omitempty"` + // Value of numeric properties. + Uint64Value uint64 `protobuf:"varint,3,opt,name=uint64_value,json=uint64Value,proto3" json:"uint64_value,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Property) Reset() { *m = Property{} } +func (m *Property) String() string { return proto.CompactTextString(m) } +func (*Property) ProtoMessage() {} +func (*Property) Descriptor() ([]byte, []int) { + return fileDescriptor_image_annotator_4a9ef5af0fc4f4ff, []int{5} +} +func (m *Property) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Property.Unmarshal(m, b) +} +func (m *Property) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Property.Marshal(b, m, deterministic) +} +func (dst *Property) XXX_Merge(src proto.Message) { + xxx_messageInfo_Property.Merge(dst, src) +} +func (m *Property) XXX_Size() int { + return xxx_messageInfo_Property.Size(m) +} +func (m *Property) XXX_DiscardUnknown() { + xxx_messageInfo_Property.DiscardUnknown(m) +} + +var xxx_messageInfo_Property proto.InternalMessageInfo + +func (m *Property) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *Property) GetValue() string { + if m != nil { + return m.Value + } + return "" +} + +func (m *Property) GetUint64Value() uint64 { + if m != nil { + return m.Uint64Value + } + return 0 +} + +// Set of detected entity features. +type EntityAnnotation struct { + // Opaque entity ID. Some IDs may be available in + // [Google Knowledge Graph Search + // API](https://developers.google.com/knowledge-graph/). + Mid string `protobuf:"bytes,1,opt,name=mid,proto3" json:"mid,omitempty"` + // The language code for the locale in which the entity textual + // `description` is expressed. + Locale string `protobuf:"bytes,2,opt,name=locale,proto3" json:"locale,omitempty"` + // Entity textual description, expressed in its `locale` language. + Description string `protobuf:"bytes,3,opt,name=description,proto3" json:"description,omitempty"` + // Overall score of the result. Range [0, 1]. + Score float32 `protobuf:"fixed32,4,opt,name=score,proto3" json:"score,omitempty"` + // **Deprecated. Use `score` instead.** + // The accuracy of the entity detection in an image. + // For example, for an image in which the "Eiffel Tower" entity is detected, + // this field represents the confidence that there is a tower in the query + // image. Range [0, 1]. + Confidence float32 `protobuf:"fixed32,5,opt,name=confidence,proto3" json:"confidence,omitempty"` // Deprecated: Do not use. + // The relevancy of the ICA (Image Content Annotation) label to the + // image. For example, the relevancy of "tower" is likely higher to an image + // containing the detected "Eiffel Tower" than to an image containing a + // detected distant towering building, even though the confidence that + // there is a tower in each image may be the same. Range [0, 1]. + Topicality float32 `protobuf:"fixed32,6,opt,name=topicality,proto3" json:"topicality,omitempty"` + // Image region to which this entity belongs. Not produced + // for `LABEL_DETECTION` features. + BoundingPoly *BoundingPoly `protobuf:"bytes,7,opt,name=bounding_poly,json=boundingPoly,proto3" json:"bounding_poly,omitempty"` + // The location information for the detected entity. Multiple + // `LocationInfo` elements can be present because one location may + // indicate the location of the scene in the image, and another location + // may indicate the location of the place where the image was taken. + // Location information is usually present for landmarks. + Locations []*LocationInfo `protobuf:"bytes,8,rep,name=locations,proto3" json:"locations,omitempty"` + // Some entities may have optional user-supplied `Property` (name/value) + // fields, such a score or string that qualifies the entity. + Properties []*Property `protobuf:"bytes,9,rep,name=properties,proto3" json:"properties,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *EntityAnnotation) Reset() { *m = EntityAnnotation{} } +func (m *EntityAnnotation) String() string { return proto.CompactTextString(m) } +func (*EntityAnnotation) ProtoMessage() {} +func (*EntityAnnotation) Descriptor() ([]byte, []int) { + return fileDescriptor_image_annotator_4a9ef5af0fc4f4ff, []int{6} +} +func (m *EntityAnnotation) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_EntityAnnotation.Unmarshal(m, b) +} +func (m *EntityAnnotation) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_EntityAnnotation.Marshal(b, m, deterministic) +} +func (dst *EntityAnnotation) XXX_Merge(src proto.Message) { + xxx_messageInfo_EntityAnnotation.Merge(dst, src) +} +func (m *EntityAnnotation) XXX_Size() int { + return xxx_messageInfo_EntityAnnotation.Size(m) +} +func (m *EntityAnnotation) XXX_DiscardUnknown() { + xxx_messageInfo_EntityAnnotation.DiscardUnknown(m) +} + +var xxx_messageInfo_EntityAnnotation proto.InternalMessageInfo + +func (m *EntityAnnotation) GetMid() string { + if m != nil { + return m.Mid + } + return "" +} + +func (m *EntityAnnotation) GetLocale() string { + if m != nil { + return m.Locale + } + return "" +} + +func (m *EntityAnnotation) GetDescription() string { + if m != nil { + return m.Description + } + return "" +} + +func (m *EntityAnnotation) GetScore() float32 { + if m != nil { + return m.Score + } + return 0 +} + +// Deprecated: Do not use. +func (m *EntityAnnotation) GetConfidence() float32 { + if m != nil { + return m.Confidence + } + return 0 +} + +func (m *EntityAnnotation) GetTopicality() float32 { + if m != nil { + return m.Topicality + } + return 0 +} + +func (m *EntityAnnotation) GetBoundingPoly() *BoundingPoly { + if m != nil { + return m.BoundingPoly + } + return nil +} + +func (m *EntityAnnotation) GetLocations() []*LocationInfo { + if m != nil { + return m.Locations + } + return nil +} + +func (m *EntityAnnotation) GetProperties() []*Property { + if m != nil { + return m.Properties + } + return nil +} + +// Set of detected objects with bounding boxes. +type LocalizedObjectAnnotation struct { + // Object ID that should align with EntityAnnotation mid. + Mid string `protobuf:"bytes,1,opt,name=mid,proto3" json:"mid,omitempty"` + // The BCP-47 language code, such as "en-US" or "sr-Latn". For more + // information, see + // http://www.unicode.org/reports/tr35/#Unicode_locale_identifier. + LanguageCode string `protobuf:"bytes,2,opt,name=language_code,json=languageCode,proto3" json:"language_code,omitempty"` + // Object name, expressed in its `language_code` language. + Name string `protobuf:"bytes,3,opt,name=name,proto3" json:"name,omitempty"` + // Score of the result. Range [0, 1]. + Score float32 `protobuf:"fixed32,4,opt,name=score,proto3" json:"score,omitempty"` + // Image region to which this object belongs. This must be populated. + BoundingPoly *BoundingPoly `protobuf:"bytes,5,opt,name=bounding_poly,json=boundingPoly,proto3" json:"bounding_poly,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *LocalizedObjectAnnotation) Reset() { *m = LocalizedObjectAnnotation{} } +func (m *LocalizedObjectAnnotation) String() string { return proto.CompactTextString(m) } +func (*LocalizedObjectAnnotation) ProtoMessage() {} +func (*LocalizedObjectAnnotation) Descriptor() ([]byte, []int) { + return fileDescriptor_image_annotator_4a9ef5af0fc4f4ff, []int{7} +} +func (m *LocalizedObjectAnnotation) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_LocalizedObjectAnnotation.Unmarshal(m, b) +} +func (m *LocalizedObjectAnnotation) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_LocalizedObjectAnnotation.Marshal(b, m, deterministic) +} +func (dst *LocalizedObjectAnnotation) XXX_Merge(src proto.Message) { + xxx_messageInfo_LocalizedObjectAnnotation.Merge(dst, src) +} +func (m *LocalizedObjectAnnotation) XXX_Size() int { + return xxx_messageInfo_LocalizedObjectAnnotation.Size(m) +} +func (m *LocalizedObjectAnnotation) XXX_DiscardUnknown() { + xxx_messageInfo_LocalizedObjectAnnotation.DiscardUnknown(m) +} + +var xxx_messageInfo_LocalizedObjectAnnotation proto.InternalMessageInfo + +func (m *LocalizedObjectAnnotation) GetMid() string { + if m != nil { + return m.Mid + } + return "" +} + +func (m *LocalizedObjectAnnotation) GetLanguageCode() string { + if m != nil { + return m.LanguageCode + } + return "" +} + +func (m *LocalizedObjectAnnotation) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *LocalizedObjectAnnotation) GetScore() float32 { + if m != nil { + return m.Score + } + return 0 +} + +func (m *LocalizedObjectAnnotation) GetBoundingPoly() *BoundingPoly { + if m != nil { + return m.BoundingPoly + } + return nil +} + +// Set of features pertaining to the image, computed by computer vision +// methods over safe-search verticals (for example, adult, spoof, medical, +// violence). +type SafeSearchAnnotation struct { + // Represents the adult content likelihood for the image. Adult content may + // contain elements such as nudity, pornographic images or cartoons, or + // sexual activities. + Adult Likelihood `protobuf:"varint,1,opt,name=adult,proto3,enum=google.cloud.vision.v1p4beta1.Likelihood" json:"adult,omitempty"` + // Spoof likelihood. The likelihood that an modification + // was made to the image's canonical version to make it appear + // funny or offensive. + Spoof Likelihood `protobuf:"varint,2,opt,name=spoof,proto3,enum=google.cloud.vision.v1p4beta1.Likelihood" json:"spoof,omitempty"` + // Likelihood that this is a medical image. + Medical Likelihood `protobuf:"varint,3,opt,name=medical,proto3,enum=google.cloud.vision.v1p4beta1.Likelihood" json:"medical,omitempty"` + // Likelihood that this image contains violent content. + Violence Likelihood `protobuf:"varint,4,opt,name=violence,proto3,enum=google.cloud.vision.v1p4beta1.Likelihood" json:"violence,omitempty"` + // Likelihood that the request image contains racy content. Racy content may + // include (but is not limited to) skimpy or sheer clothing, strategically + // covered nudity, lewd or provocative poses, or close-ups of sensitive + // body areas. + Racy Likelihood `protobuf:"varint,9,opt,name=racy,proto3,enum=google.cloud.vision.v1p4beta1.Likelihood" json:"racy,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *SafeSearchAnnotation) Reset() { *m = SafeSearchAnnotation{} } +func (m *SafeSearchAnnotation) String() string { return proto.CompactTextString(m) } +func (*SafeSearchAnnotation) ProtoMessage() {} +func (*SafeSearchAnnotation) Descriptor() ([]byte, []int) { + return fileDescriptor_image_annotator_4a9ef5af0fc4f4ff, []int{8} +} +func (m *SafeSearchAnnotation) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_SafeSearchAnnotation.Unmarshal(m, b) +} +func (m *SafeSearchAnnotation) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_SafeSearchAnnotation.Marshal(b, m, deterministic) +} +func (dst *SafeSearchAnnotation) XXX_Merge(src proto.Message) { + xxx_messageInfo_SafeSearchAnnotation.Merge(dst, src) +} +func (m *SafeSearchAnnotation) XXX_Size() int { + return xxx_messageInfo_SafeSearchAnnotation.Size(m) +} +func (m *SafeSearchAnnotation) XXX_DiscardUnknown() { + xxx_messageInfo_SafeSearchAnnotation.DiscardUnknown(m) +} + +var xxx_messageInfo_SafeSearchAnnotation proto.InternalMessageInfo + +func (m *SafeSearchAnnotation) GetAdult() Likelihood { + if m != nil { + return m.Adult + } + return Likelihood_UNKNOWN +} + +func (m *SafeSearchAnnotation) GetSpoof() Likelihood { + if m != nil { + return m.Spoof + } + return Likelihood_UNKNOWN +} + +func (m *SafeSearchAnnotation) GetMedical() Likelihood { + if m != nil { + return m.Medical + } + return Likelihood_UNKNOWN +} + +func (m *SafeSearchAnnotation) GetViolence() Likelihood { + if m != nil { + return m.Violence + } + return Likelihood_UNKNOWN +} + +func (m *SafeSearchAnnotation) GetRacy() Likelihood { + if m != nil { + return m.Racy + } + return Likelihood_UNKNOWN +} + +// Rectangle determined by min and max `LatLng` pairs. +type LatLongRect struct { + // Min lat/long pair. + MinLatLng *latlng.LatLng `protobuf:"bytes,1,opt,name=min_lat_lng,json=minLatLng,proto3" json:"min_lat_lng,omitempty"` + // Max lat/long pair. + MaxLatLng *latlng.LatLng `protobuf:"bytes,2,opt,name=max_lat_lng,json=maxLatLng,proto3" json:"max_lat_lng,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *LatLongRect) Reset() { *m = LatLongRect{} } +func (m *LatLongRect) String() string { return proto.CompactTextString(m) } +func (*LatLongRect) ProtoMessage() {} +func (*LatLongRect) Descriptor() ([]byte, []int) { + return fileDescriptor_image_annotator_4a9ef5af0fc4f4ff, []int{9} +} +func (m *LatLongRect) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_LatLongRect.Unmarshal(m, b) +} +func (m *LatLongRect) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_LatLongRect.Marshal(b, m, deterministic) +} +func (dst *LatLongRect) XXX_Merge(src proto.Message) { + xxx_messageInfo_LatLongRect.Merge(dst, src) +} +func (m *LatLongRect) XXX_Size() int { + return xxx_messageInfo_LatLongRect.Size(m) +} +func (m *LatLongRect) XXX_DiscardUnknown() { + xxx_messageInfo_LatLongRect.DiscardUnknown(m) +} + +var xxx_messageInfo_LatLongRect proto.InternalMessageInfo + +func (m *LatLongRect) GetMinLatLng() *latlng.LatLng { + if m != nil { + return m.MinLatLng + } + return nil +} + +func (m *LatLongRect) GetMaxLatLng() *latlng.LatLng { + if m != nil { + return m.MaxLatLng + } + return nil +} + +// Color information consists of RGB channels, score, and the fraction of +// the image that the color occupies in the image. +type ColorInfo struct { + // RGB components of the color. + Color *color.Color `protobuf:"bytes,1,opt,name=color,proto3" json:"color,omitempty"` + // Image-specific score for this color. Value in range [0, 1]. + Score float32 `protobuf:"fixed32,2,opt,name=score,proto3" json:"score,omitempty"` + // The fraction of pixels the color occupies in the image. + // Value in range [0, 1]. + PixelFraction float32 `protobuf:"fixed32,3,opt,name=pixel_fraction,json=pixelFraction,proto3" json:"pixel_fraction,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ColorInfo) Reset() { *m = ColorInfo{} } +func (m *ColorInfo) String() string { return proto.CompactTextString(m) } +func (*ColorInfo) ProtoMessage() {} +func (*ColorInfo) Descriptor() ([]byte, []int) { + return fileDescriptor_image_annotator_4a9ef5af0fc4f4ff, []int{10} +} +func (m *ColorInfo) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ColorInfo.Unmarshal(m, b) +} +func (m *ColorInfo) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ColorInfo.Marshal(b, m, deterministic) +} +func (dst *ColorInfo) XXX_Merge(src proto.Message) { + xxx_messageInfo_ColorInfo.Merge(dst, src) +} +func (m *ColorInfo) XXX_Size() int { + return xxx_messageInfo_ColorInfo.Size(m) +} +func (m *ColorInfo) XXX_DiscardUnknown() { + xxx_messageInfo_ColorInfo.DiscardUnknown(m) +} + +var xxx_messageInfo_ColorInfo proto.InternalMessageInfo + +func (m *ColorInfo) GetColor() *color.Color { + if m != nil { + return m.Color + } + return nil +} + +func (m *ColorInfo) GetScore() float32 { + if m != nil { + return m.Score + } + return 0 +} + +func (m *ColorInfo) GetPixelFraction() float32 { + if m != nil { + return m.PixelFraction + } + return 0 +} + +// Set of dominant colors and their corresponding scores. +type DominantColorsAnnotation struct { + // RGB color values with their score and pixel fraction. + Colors []*ColorInfo `protobuf:"bytes,1,rep,name=colors,proto3" json:"colors,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *DominantColorsAnnotation) Reset() { *m = DominantColorsAnnotation{} } +func (m *DominantColorsAnnotation) String() string { return proto.CompactTextString(m) } +func (*DominantColorsAnnotation) ProtoMessage() {} +func (*DominantColorsAnnotation) Descriptor() ([]byte, []int) { + return fileDescriptor_image_annotator_4a9ef5af0fc4f4ff, []int{11} +} +func (m *DominantColorsAnnotation) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_DominantColorsAnnotation.Unmarshal(m, b) +} +func (m *DominantColorsAnnotation) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_DominantColorsAnnotation.Marshal(b, m, deterministic) +} +func (dst *DominantColorsAnnotation) XXX_Merge(src proto.Message) { + xxx_messageInfo_DominantColorsAnnotation.Merge(dst, src) +} +func (m *DominantColorsAnnotation) XXX_Size() int { + return xxx_messageInfo_DominantColorsAnnotation.Size(m) +} +func (m *DominantColorsAnnotation) XXX_DiscardUnknown() { + xxx_messageInfo_DominantColorsAnnotation.DiscardUnknown(m) +} + +var xxx_messageInfo_DominantColorsAnnotation proto.InternalMessageInfo + +func (m *DominantColorsAnnotation) GetColors() []*ColorInfo { + if m != nil { + return m.Colors + } + return nil +} + +// Stores image properties, such as dominant colors. +type ImageProperties struct { + // If present, dominant colors completed successfully. + DominantColors *DominantColorsAnnotation `protobuf:"bytes,1,opt,name=dominant_colors,json=dominantColors,proto3" json:"dominant_colors,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ImageProperties) Reset() { *m = ImageProperties{} } +func (m *ImageProperties) String() string { return proto.CompactTextString(m) } +func (*ImageProperties) ProtoMessage() {} +func (*ImageProperties) Descriptor() ([]byte, []int) { + return fileDescriptor_image_annotator_4a9ef5af0fc4f4ff, []int{12} +} +func (m *ImageProperties) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ImageProperties.Unmarshal(m, b) +} +func (m *ImageProperties) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ImageProperties.Marshal(b, m, deterministic) +} +func (dst *ImageProperties) XXX_Merge(src proto.Message) { + xxx_messageInfo_ImageProperties.Merge(dst, src) +} +func (m *ImageProperties) XXX_Size() int { + return xxx_messageInfo_ImageProperties.Size(m) +} +func (m *ImageProperties) XXX_DiscardUnknown() { + xxx_messageInfo_ImageProperties.DiscardUnknown(m) +} + +var xxx_messageInfo_ImageProperties proto.InternalMessageInfo + +func (m *ImageProperties) GetDominantColors() *DominantColorsAnnotation { + if m != nil { + return m.DominantColors + } + return nil +} + +// Single crop hint that is used to generate a new crop when serving an image. +type CropHint struct { + // The bounding polygon for the crop region. The coordinates of the bounding + // box are in the original image's scale. + BoundingPoly *BoundingPoly `protobuf:"bytes,1,opt,name=bounding_poly,json=boundingPoly,proto3" json:"bounding_poly,omitempty"` + // Confidence of this being a salient region. Range [0, 1]. + Confidence float32 `protobuf:"fixed32,2,opt,name=confidence,proto3" json:"confidence,omitempty"` + // Fraction of importance of this salient region with respect to the original + // image. + ImportanceFraction float32 `protobuf:"fixed32,3,opt,name=importance_fraction,json=importanceFraction,proto3" json:"importance_fraction,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *CropHint) Reset() { *m = CropHint{} } +func (m *CropHint) String() string { return proto.CompactTextString(m) } +func (*CropHint) ProtoMessage() {} +func (*CropHint) Descriptor() ([]byte, []int) { + return fileDescriptor_image_annotator_4a9ef5af0fc4f4ff, []int{13} +} +func (m *CropHint) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_CropHint.Unmarshal(m, b) +} +func (m *CropHint) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_CropHint.Marshal(b, m, deterministic) +} +func (dst *CropHint) XXX_Merge(src proto.Message) { + xxx_messageInfo_CropHint.Merge(dst, src) +} +func (m *CropHint) XXX_Size() int { + return xxx_messageInfo_CropHint.Size(m) +} +func (m *CropHint) XXX_DiscardUnknown() { + xxx_messageInfo_CropHint.DiscardUnknown(m) +} + +var xxx_messageInfo_CropHint proto.InternalMessageInfo + +func (m *CropHint) GetBoundingPoly() *BoundingPoly { + if m != nil { + return m.BoundingPoly + } + return nil +} + +func (m *CropHint) GetConfidence() float32 { + if m != nil { + return m.Confidence + } + return 0 +} + +func (m *CropHint) GetImportanceFraction() float32 { + if m != nil { + return m.ImportanceFraction + } + return 0 +} + +// Set of crop hints that are used to generate new crops when serving images. +type CropHintsAnnotation struct { + // Crop hint results. + CropHints []*CropHint `protobuf:"bytes,1,rep,name=crop_hints,json=cropHints,proto3" json:"crop_hints,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *CropHintsAnnotation) Reset() { *m = CropHintsAnnotation{} } +func (m *CropHintsAnnotation) String() string { return proto.CompactTextString(m) } +func (*CropHintsAnnotation) ProtoMessage() {} +func (*CropHintsAnnotation) Descriptor() ([]byte, []int) { + return fileDescriptor_image_annotator_4a9ef5af0fc4f4ff, []int{14} +} +func (m *CropHintsAnnotation) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_CropHintsAnnotation.Unmarshal(m, b) +} +func (m *CropHintsAnnotation) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_CropHintsAnnotation.Marshal(b, m, deterministic) +} +func (dst *CropHintsAnnotation) XXX_Merge(src proto.Message) { + xxx_messageInfo_CropHintsAnnotation.Merge(dst, src) +} +func (m *CropHintsAnnotation) XXX_Size() int { + return xxx_messageInfo_CropHintsAnnotation.Size(m) +} +func (m *CropHintsAnnotation) XXX_DiscardUnknown() { + xxx_messageInfo_CropHintsAnnotation.DiscardUnknown(m) +} + +var xxx_messageInfo_CropHintsAnnotation proto.InternalMessageInfo + +func (m *CropHintsAnnotation) GetCropHints() []*CropHint { + if m != nil { + return m.CropHints + } + return nil +} + +// Parameters for crop hints annotation request. +type CropHintsParams struct { + // Aspect ratios in floats, representing the ratio of the width to the height + // of the image. For example, if the desired aspect ratio is 4/3, the + // corresponding float value should be 1.33333. If not specified, the + // best possible crop is returned. The number of provided aspect ratios is + // limited to a maximum of 16; any aspect ratios provided after the 16th are + // ignored. + AspectRatios []float32 `protobuf:"fixed32,1,rep,packed,name=aspect_ratios,json=aspectRatios,proto3" json:"aspect_ratios,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *CropHintsParams) Reset() { *m = CropHintsParams{} } +func (m *CropHintsParams) String() string { return proto.CompactTextString(m) } +func (*CropHintsParams) ProtoMessage() {} +func (*CropHintsParams) Descriptor() ([]byte, []int) { + return fileDescriptor_image_annotator_4a9ef5af0fc4f4ff, []int{15} +} +func (m *CropHintsParams) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_CropHintsParams.Unmarshal(m, b) +} +func (m *CropHintsParams) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_CropHintsParams.Marshal(b, m, deterministic) +} +func (dst *CropHintsParams) XXX_Merge(src proto.Message) { + xxx_messageInfo_CropHintsParams.Merge(dst, src) +} +func (m *CropHintsParams) XXX_Size() int { + return xxx_messageInfo_CropHintsParams.Size(m) +} +func (m *CropHintsParams) XXX_DiscardUnknown() { + xxx_messageInfo_CropHintsParams.DiscardUnknown(m) +} + +var xxx_messageInfo_CropHintsParams proto.InternalMessageInfo + +func (m *CropHintsParams) GetAspectRatios() []float32 { + if m != nil { + return m.AspectRatios + } + return nil +} + +// Parameters for web detection request. +type WebDetectionParams struct { + // Whether to include results derived from the geo information in the image. + IncludeGeoResults bool `protobuf:"varint,2,opt,name=include_geo_results,json=includeGeoResults,proto3" json:"include_geo_results,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *WebDetectionParams) Reset() { *m = WebDetectionParams{} } +func (m *WebDetectionParams) String() string { return proto.CompactTextString(m) } +func (*WebDetectionParams) ProtoMessage() {} +func (*WebDetectionParams) Descriptor() ([]byte, []int) { + return fileDescriptor_image_annotator_4a9ef5af0fc4f4ff, []int{16} +} +func (m *WebDetectionParams) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_WebDetectionParams.Unmarshal(m, b) +} +func (m *WebDetectionParams) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_WebDetectionParams.Marshal(b, m, deterministic) +} +func (dst *WebDetectionParams) XXX_Merge(src proto.Message) { + xxx_messageInfo_WebDetectionParams.Merge(dst, src) +} +func (m *WebDetectionParams) XXX_Size() int { + return xxx_messageInfo_WebDetectionParams.Size(m) +} +func (m *WebDetectionParams) XXX_DiscardUnknown() { + xxx_messageInfo_WebDetectionParams.DiscardUnknown(m) +} + +var xxx_messageInfo_WebDetectionParams proto.InternalMessageInfo + +func (m *WebDetectionParams) GetIncludeGeoResults() bool { + if m != nil { + return m.IncludeGeoResults + } + return false +} + +// Image context and/or feature-specific parameters. +type ImageContext struct { + // Not used. + LatLongRect *LatLongRect `protobuf:"bytes,1,opt,name=lat_long_rect,json=latLongRect,proto3" json:"lat_long_rect,omitempty"` + // List of languages to use for TEXT_DETECTION. In most cases, an empty value + // yields the best results since it enables automatic language detection. For + // languages based on the Latin alphabet, setting `language_hints` is not + // needed. In rare cases, when the language of the text in the image is known, + // setting a hint will help get better results (although it will be a + // significant hindrance if the hint is wrong). Text detection returns an + // error if one or more of the specified languages is not one of the + // [supported languages](/vision/docs/languages). + LanguageHints []string `protobuf:"bytes,2,rep,name=language_hints,json=languageHints,proto3" json:"language_hints,omitempty"` + // Parameters for crop hints annotation request. + CropHintsParams *CropHintsParams `protobuf:"bytes,4,opt,name=crop_hints_params,json=cropHintsParams,proto3" json:"crop_hints_params,omitempty"` + // Parameters for product search. + ProductSearchParams *ProductSearchParams `protobuf:"bytes,5,opt,name=product_search_params,json=productSearchParams,proto3" json:"product_search_params,omitempty"` + // Parameters for web detection. + WebDetectionParams *WebDetectionParams `protobuf:"bytes,6,opt,name=web_detection_params,json=webDetectionParams,proto3" json:"web_detection_params,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ImageContext) Reset() { *m = ImageContext{} } +func (m *ImageContext) String() string { return proto.CompactTextString(m) } +func (*ImageContext) ProtoMessage() {} +func (*ImageContext) Descriptor() ([]byte, []int) { + return fileDescriptor_image_annotator_4a9ef5af0fc4f4ff, []int{17} +} +func (m *ImageContext) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ImageContext.Unmarshal(m, b) +} +func (m *ImageContext) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ImageContext.Marshal(b, m, deterministic) +} +func (dst *ImageContext) XXX_Merge(src proto.Message) { + xxx_messageInfo_ImageContext.Merge(dst, src) +} +func (m *ImageContext) XXX_Size() int { + return xxx_messageInfo_ImageContext.Size(m) +} +func (m *ImageContext) XXX_DiscardUnknown() { + xxx_messageInfo_ImageContext.DiscardUnknown(m) +} + +var xxx_messageInfo_ImageContext proto.InternalMessageInfo + +func (m *ImageContext) GetLatLongRect() *LatLongRect { + if m != nil { + return m.LatLongRect + } + return nil +} + +func (m *ImageContext) GetLanguageHints() []string { + if m != nil { + return m.LanguageHints + } + return nil +} + +func (m *ImageContext) GetCropHintsParams() *CropHintsParams { + if m != nil { + return m.CropHintsParams + } + return nil +} + +func (m *ImageContext) GetProductSearchParams() *ProductSearchParams { + if m != nil { + return m.ProductSearchParams + } + return nil +} + +func (m *ImageContext) GetWebDetectionParams() *WebDetectionParams { + if m != nil { + return m.WebDetectionParams + } + return nil +} + +// Request for performing Google Cloud Vision API tasks over a user-provided +// image, with user-requested features, and with context information. +type AnnotateImageRequest struct { + // The image to be processed. + Image *Image `protobuf:"bytes,1,opt,name=image,proto3" json:"image,omitempty"` + // Requested features. + Features []*Feature `protobuf:"bytes,2,rep,name=features,proto3" json:"features,omitempty"` + // Additional context that may accompany the image. + ImageContext *ImageContext `protobuf:"bytes,3,opt,name=image_context,json=imageContext,proto3" json:"image_context,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *AnnotateImageRequest) Reset() { *m = AnnotateImageRequest{} } +func (m *AnnotateImageRequest) String() string { return proto.CompactTextString(m) } +func (*AnnotateImageRequest) ProtoMessage() {} +func (*AnnotateImageRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_image_annotator_4a9ef5af0fc4f4ff, []int{18} +} +func (m *AnnotateImageRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_AnnotateImageRequest.Unmarshal(m, b) +} +func (m *AnnotateImageRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_AnnotateImageRequest.Marshal(b, m, deterministic) +} +func (dst *AnnotateImageRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_AnnotateImageRequest.Merge(dst, src) +} +func (m *AnnotateImageRequest) XXX_Size() int { + return xxx_messageInfo_AnnotateImageRequest.Size(m) +} +func (m *AnnotateImageRequest) XXX_DiscardUnknown() { + xxx_messageInfo_AnnotateImageRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_AnnotateImageRequest proto.InternalMessageInfo + +func (m *AnnotateImageRequest) GetImage() *Image { + if m != nil { + return m.Image + } + return nil +} + +func (m *AnnotateImageRequest) GetFeatures() []*Feature { + if m != nil { + return m.Features + } + return nil +} + +func (m *AnnotateImageRequest) GetImageContext() *ImageContext { + if m != nil { + return m.ImageContext + } + return nil +} + +// If an image was produced from a file (e.g. a PDF), this message gives +// information about the source of that image. +type ImageAnnotationContext struct { + // The URI of the file used to produce the image. + Uri string `protobuf:"bytes,1,opt,name=uri,proto3" json:"uri,omitempty"` + // If the file was a PDF or TIFF, this field gives the page number within + // the file used to produce the image. + PageNumber int32 `protobuf:"varint,2,opt,name=page_number,json=pageNumber,proto3" json:"page_number,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ImageAnnotationContext) Reset() { *m = ImageAnnotationContext{} } +func (m *ImageAnnotationContext) String() string { return proto.CompactTextString(m) } +func (*ImageAnnotationContext) ProtoMessage() {} +func (*ImageAnnotationContext) Descriptor() ([]byte, []int) { + return fileDescriptor_image_annotator_4a9ef5af0fc4f4ff, []int{19} +} +func (m *ImageAnnotationContext) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ImageAnnotationContext.Unmarshal(m, b) +} +func (m *ImageAnnotationContext) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ImageAnnotationContext.Marshal(b, m, deterministic) +} +func (dst *ImageAnnotationContext) XXX_Merge(src proto.Message) { + xxx_messageInfo_ImageAnnotationContext.Merge(dst, src) +} +func (m *ImageAnnotationContext) XXX_Size() int { + return xxx_messageInfo_ImageAnnotationContext.Size(m) +} +func (m *ImageAnnotationContext) XXX_DiscardUnknown() { + xxx_messageInfo_ImageAnnotationContext.DiscardUnknown(m) +} + +var xxx_messageInfo_ImageAnnotationContext proto.InternalMessageInfo + +func (m *ImageAnnotationContext) GetUri() string { + if m != nil { + return m.Uri + } + return "" +} + +func (m *ImageAnnotationContext) GetPageNumber() int32 { + if m != nil { + return m.PageNumber + } + return 0 +} + +// Response to an image annotation request. +type AnnotateImageResponse struct { + // If present, face detection has completed successfully. + FaceAnnotations []*FaceAnnotation `protobuf:"bytes,1,rep,name=face_annotations,json=faceAnnotations,proto3" json:"face_annotations,omitempty"` + // If present, landmark detection has completed successfully. + LandmarkAnnotations []*EntityAnnotation `protobuf:"bytes,2,rep,name=landmark_annotations,json=landmarkAnnotations,proto3" json:"landmark_annotations,omitempty"` + // If present, logo detection has completed successfully. + LogoAnnotations []*EntityAnnotation `protobuf:"bytes,3,rep,name=logo_annotations,json=logoAnnotations,proto3" json:"logo_annotations,omitempty"` + // If present, label detection has completed successfully. + LabelAnnotations []*EntityAnnotation `protobuf:"bytes,4,rep,name=label_annotations,json=labelAnnotations,proto3" json:"label_annotations,omitempty"` + // If present, localized object detection has completed successfully. + // This will be sorted descending by confidence score. + LocalizedObjectAnnotations []*LocalizedObjectAnnotation `protobuf:"bytes,22,rep,name=localized_object_annotations,json=localizedObjectAnnotations,proto3" json:"localized_object_annotations,omitempty"` + // If present, text (OCR) detection has completed successfully. + TextAnnotations []*EntityAnnotation `protobuf:"bytes,5,rep,name=text_annotations,json=textAnnotations,proto3" json:"text_annotations,omitempty"` + // If present, text (OCR) detection or document (OCR) text detection has + // completed successfully. + // This annotation provides the structural hierarchy for the OCR detected + // text. + FullTextAnnotation *TextAnnotation `protobuf:"bytes,12,opt,name=full_text_annotation,json=fullTextAnnotation,proto3" json:"full_text_annotation,omitempty"` + // If present, safe-search annotation has completed successfully. + SafeSearchAnnotation *SafeSearchAnnotation `protobuf:"bytes,6,opt,name=safe_search_annotation,json=safeSearchAnnotation,proto3" json:"safe_search_annotation,omitempty"` + // If present, image properties were extracted successfully. + ImagePropertiesAnnotation *ImageProperties `protobuf:"bytes,8,opt,name=image_properties_annotation,json=imagePropertiesAnnotation,proto3" json:"image_properties_annotation,omitempty"` + // If present, crop hints have completed successfully. + CropHintsAnnotation *CropHintsAnnotation `protobuf:"bytes,11,opt,name=crop_hints_annotation,json=cropHintsAnnotation,proto3" json:"crop_hints_annotation,omitempty"` + // If present, web detection has completed successfully. + WebDetection *WebDetection `protobuf:"bytes,13,opt,name=web_detection,json=webDetection,proto3" json:"web_detection,omitempty"` + // If present, product search has completed successfully. + ProductSearchResults *ProductSearchResults `protobuf:"bytes,14,opt,name=product_search_results,json=productSearchResults,proto3" json:"product_search_results,omitempty"` + // If set, represents the error message for the operation. + // Note that filled-in image annotations are guaranteed to be + // correct, even when `error` is set. + Error *status.Status `protobuf:"bytes,9,opt,name=error,proto3" json:"error,omitempty"` + // If present, contextual information is needed to understand where this image + // comes from. + Context *ImageAnnotationContext `protobuf:"bytes,21,opt,name=context,proto3" json:"context,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *AnnotateImageResponse) Reset() { *m = AnnotateImageResponse{} } +func (m *AnnotateImageResponse) String() string { return proto.CompactTextString(m) } +func (*AnnotateImageResponse) ProtoMessage() {} +func (*AnnotateImageResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_image_annotator_4a9ef5af0fc4f4ff, []int{20} +} +func (m *AnnotateImageResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_AnnotateImageResponse.Unmarshal(m, b) +} +func (m *AnnotateImageResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_AnnotateImageResponse.Marshal(b, m, deterministic) +} +func (dst *AnnotateImageResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_AnnotateImageResponse.Merge(dst, src) +} +func (m *AnnotateImageResponse) XXX_Size() int { + return xxx_messageInfo_AnnotateImageResponse.Size(m) +} +func (m *AnnotateImageResponse) XXX_DiscardUnknown() { + xxx_messageInfo_AnnotateImageResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_AnnotateImageResponse proto.InternalMessageInfo + +func (m *AnnotateImageResponse) GetFaceAnnotations() []*FaceAnnotation { + if m != nil { + return m.FaceAnnotations + } + return nil +} + +func (m *AnnotateImageResponse) GetLandmarkAnnotations() []*EntityAnnotation { + if m != nil { + return m.LandmarkAnnotations + } + return nil +} + +func (m *AnnotateImageResponse) GetLogoAnnotations() []*EntityAnnotation { + if m != nil { + return m.LogoAnnotations + } + return nil +} + +func (m *AnnotateImageResponse) GetLabelAnnotations() []*EntityAnnotation { + if m != nil { + return m.LabelAnnotations + } + return nil +} + +func (m *AnnotateImageResponse) GetLocalizedObjectAnnotations() []*LocalizedObjectAnnotation { + if m != nil { + return m.LocalizedObjectAnnotations + } + return nil +} + +func (m *AnnotateImageResponse) GetTextAnnotations() []*EntityAnnotation { + if m != nil { + return m.TextAnnotations + } + return nil +} + +func (m *AnnotateImageResponse) GetFullTextAnnotation() *TextAnnotation { + if m != nil { + return m.FullTextAnnotation + } + return nil +} + +func (m *AnnotateImageResponse) GetSafeSearchAnnotation() *SafeSearchAnnotation { + if m != nil { + return m.SafeSearchAnnotation + } + return nil +} + +func (m *AnnotateImageResponse) GetImagePropertiesAnnotation() *ImageProperties { + if m != nil { + return m.ImagePropertiesAnnotation + } + return nil +} + +func (m *AnnotateImageResponse) GetCropHintsAnnotation() *CropHintsAnnotation { + if m != nil { + return m.CropHintsAnnotation + } + return nil +} + +func (m *AnnotateImageResponse) GetWebDetection() *WebDetection { + if m != nil { + return m.WebDetection + } + return nil +} + +func (m *AnnotateImageResponse) GetProductSearchResults() *ProductSearchResults { + if m != nil { + return m.ProductSearchResults + } + return nil +} + +func (m *AnnotateImageResponse) GetError() *status.Status { + if m != nil { + return m.Error + } + return nil +} + +func (m *AnnotateImageResponse) GetContext() *ImageAnnotationContext { + if m != nil { + return m.Context + } + return nil +} + +// Response to a single file annotation request. A file may contain one or more +// images, which individually have their own responses. +type AnnotateFileResponse struct { + // Information about the file for which this response is generated. + InputConfig *InputConfig `protobuf:"bytes,1,opt,name=input_config,json=inputConfig,proto3" json:"input_config,omitempty"` + // Individual responses to images found within the file. + Responses []*AnnotateImageResponse `protobuf:"bytes,2,rep,name=responses,proto3" json:"responses,omitempty"` + // This field gives the total number of pages in the file. + TotalPages int32 `protobuf:"varint,3,opt,name=total_pages,json=totalPages,proto3" json:"total_pages,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *AnnotateFileResponse) Reset() { *m = AnnotateFileResponse{} } +func (m *AnnotateFileResponse) String() string { return proto.CompactTextString(m) } +func (*AnnotateFileResponse) ProtoMessage() {} +func (*AnnotateFileResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_image_annotator_4a9ef5af0fc4f4ff, []int{21} +} +func (m *AnnotateFileResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_AnnotateFileResponse.Unmarshal(m, b) +} +func (m *AnnotateFileResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_AnnotateFileResponse.Marshal(b, m, deterministic) +} +func (dst *AnnotateFileResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_AnnotateFileResponse.Merge(dst, src) +} +func (m *AnnotateFileResponse) XXX_Size() int { + return xxx_messageInfo_AnnotateFileResponse.Size(m) +} +func (m *AnnotateFileResponse) XXX_DiscardUnknown() { + xxx_messageInfo_AnnotateFileResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_AnnotateFileResponse proto.InternalMessageInfo + +func (m *AnnotateFileResponse) GetInputConfig() *InputConfig { + if m != nil { + return m.InputConfig + } + return nil +} + +func (m *AnnotateFileResponse) GetResponses() []*AnnotateImageResponse { + if m != nil { + return m.Responses + } + return nil +} + +func (m *AnnotateFileResponse) GetTotalPages() int32 { + if m != nil { + return m.TotalPages + } + return 0 +} + +// Multiple image annotation requests are batched into a single service call. +type BatchAnnotateImagesRequest struct { + // Individual image annotation requests for this batch. + Requests []*AnnotateImageRequest `protobuf:"bytes,1,rep,name=requests,proto3" json:"requests,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *BatchAnnotateImagesRequest) Reset() { *m = BatchAnnotateImagesRequest{} } +func (m *BatchAnnotateImagesRequest) String() string { return proto.CompactTextString(m) } +func (*BatchAnnotateImagesRequest) ProtoMessage() {} +func (*BatchAnnotateImagesRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_image_annotator_4a9ef5af0fc4f4ff, []int{22} +} +func (m *BatchAnnotateImagesRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_BatchAnnotateImagesRequest.Unmarshal(m, b) +} +func (m *BatchAnnotateImagesRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_BatchAnnotateImagesRequest.Marshal(b, m, deterministic) +} +func (dst *BatchAnnotateImagesRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_BatchAnnotateImagesRequest.Merge(dst, src) +} +func (m *BatchAnnotateImagesRequest) XXX_Size() int { + return xxx_messageInfo_BatchAnnotateImagesRequest.Size(m) +} +func (m *BatchAnnotateImagesRequest) XXX_DiscardUnknown() { + xxx_messageInfo_BatchAnnotateImagesRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_BatchAnnotateImagesRequest proto.InternalMessageInfo + +func (m *BatchAnnotateImagesRequest) GetRequests() []*AnnotateImageRequest { + if m != nil { + return m.Requests + } + return nil +} + +// Response to a batch image annotation request. +type BatchAnnotateImagesResponse struct { + // Individual responses to image annotation requests within the batch. + Responses []*AnnotateImageResponse `protobuf:"bytes,1,rep,name=responses,proto3" json:"responses,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *BatchAnnotateImagesResponse) Reset() { *m = BatchAnnotateImagesResponse{} } +func (m *BatchAnnotateImagesResponse) String() string { return proto.CompactTextString(m) } +func (*BatchAnnotateImagesResponse) ProtoMessage() {} +func (*BatchAnnotateImagesResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_image_annotator_4a9ef5af0fc4f4ff, []int{23} +} +func (m *BatchAnnotateImagesResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_BatchAnnotateImagesResponse.Unmarshal(m, b) +} +func (m *BatchAnnotateImagesResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_BatchAnnotateImagesResponse.Marshal(b, m, deterministic) +} +func (dst *BatchAnnotateImagesResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_BatchAnnotateImagesResponse.Merge(dst, src) +} +func (m *BatchAnnotateImagesResponse) XXX_Size() int { + return xxx_messageInfo_BatchAnnotateImagesResponse.Size(m) +} +func (m *BatchAnnotateImagesResponse) XXX_DiscardUnknown() { + xxx_messageInfo_BatchAnnotateImagesResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_BatchAnnotateImagesResponse proto.InternalMessageInfo + +func (m *BatchAnnotateImagesResponse) GetResponses() []*AnnotateImageResponse { + if m != nil { + return m.Responses + } + return nil +} + +// A request to annotate one single file, e.g. a PDF, TIFF or GIF file. +type AnnotateFileRequest struct { + // Required. Information about the input file. + InputConfig *InputConfig `protobuf:"bytes,1,opt,name=input_config,json=inputConfig,proto3" json:"input_config,omitempty"` + // Required. Requested features. + Features []*Feature `protobuf:"bytes,2,rep,name=features,proto3" json:"features,omitempty"` + // Additional context that may accompany the image(s) in the file. + ImageContext *ImageContext `protobuf:"bytes,3,opt,name=image_context,json=imageContext,proto3" json:"image_context,omitempty"` + // Pages of the file to perform image annotation. + // + // Pages starts from 1, we assume the first page of the file is page 1. + // At most 5 pages are supported per request. Pages can be negative. + // + // Page 1 means the first page. + // Page 2 means the second page. + // Page -1 means the last page. + // Page -2 means the second to the last page. + // + // If the file is GIF instead of PDF or TIFF, page refers to GIF frames. + // + // If this field is empty, by default the service performs image annotation + // for the first 5 pages of the file. + Pages []int32 `protobuf:"varint,4,rep,packed,name=pages,proto3" json:"pages,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *AnnotateFileRequest) Reset() { *m = AnnotateFileRequest{} } +func (m *AnnotateFileRequest) String() string { return proto.CompactTextString(m) } +func (*AnnotateFileRequest) ProtoMessage() {} +func (*AnnotateFileRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_image_annotator_4a9ef5af0fc4f4ff, []int{24} +} +func (m *AnnotateFileRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_AnnotateFileRequest.Unmarshal(m, b) +} +func (m *AnnotateFileRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_AnnotateFileRequest.Marshal(b, m, deterministic) +} +func (dst *AnnotateFileRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_AnnotateFileRequest.Merge(dst, src) +} +func (m *AnnotateFileRequest) XXX_Size() int { + return xxx_messageInfo_AnnotateFileRequest.Size(m) +} +func (m *AnnotateFileRequest) XXX_DiscardUnknown() { + xxx_messageInfo_AnnotateFileRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_AnnotateFileRequest proto.InternalMessageInfo + +func (m *AnnotateFileRequest) GetInputConfig() *InputConfig { + if m != nil { + return m.InputConfig + } + return nil +} + +func (m *AnnotateFileRequest) GetFeatures() []*Feature { + if m != nil { + return m.Features + } + return nil +} + +func (m *AnnotateFileRequest) GetImageContext() *ImageContext { + if m != nil { + return m.ImageContext + } + return nil +} + +func (m *AnnotateFileRequest) GetPages() []int32 { + if m != nil { + return m.Pages + } + return nil +} + +// A list of requests to annotate files using the BatchAnnotateFiles API. +type BatchAnnotateFilesRequest struct { + // The list of file annotation requests. Right now we support only one + // AnnotateFileRequest in BatchAnnotateFilesRequest. + Requests []*AnnotateFileRequest `protobuf:"bytes,1,rep,name=requests,proto3" json:"requests,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *BatchAnnotateFilesRequest) Reset() { *m = BatchAnnotateFilesRequest{} } +func (m *BatchAnnotateFilesRequest) String() string { return proto.CompactTextString(m) } +func (*BatchAnnotateFilesRequest) ProtoMessage() {} +func (*BatchAnnotateFilesRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_image_annotator_4a9ef5af0fc4f4ff, []int{25} +} +func (m *BatchAnnotateFilesRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_BatchAnnotateFilesRequest.Unmarshal(m, b) +} +func (m *BatchAnnotateFilesRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_BatchAnnotateFilesRequest.Marshal(b, m, deterministic) +} +func (dst *BatchAnnotateFilesRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_BatchAnnotateFilesRequest.Merge(dst, src) +} +func (m *BatchAnnotateFilesRequest) XXX_Size() int { + return xxx_messageInfo_BatchAnnotateFilesRequest.Size(m) +} +func (m *BatchAnnotateFilesRequest) XXX_DiscardUnknown() { + xxx_messageInfo_BatchAnnotateFilesRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_BatchAnnotateFilesRequest proto.InternalMessageInfo + +func (m *BatchAnnotateFilesRequest) GetRequests() []*AnnotateFileRequest { + if m != nil { + return m.Requests + } + return nil +} + +// A list of file annotation responses. +type BatchAnnotateFilesResponse struct { + // The list of file annotation responses, each response corresponding to each + // AnnotateFileRequest in BatchAnnotateFilesRequest. + Responses []*AnnotateFileResponse `protobuf:"bytes,1,rep,name=responses,proto3" json:"responses,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *BatchAnnotateFilesResponse) Reset() { *m = BatchAnnotateFilesResponse{} } +func (m *BatchAnnotateFilesResponse) String() string { return proto.CompactTextString(m) } +func (*BatchAnnotateFilesResponse) ProtoMessage() {} +func (*BatchAnnotateFilesResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_image_annotator_4a9ef5af0fc4f4ff, []int{26} +} +func (m *BatchAnnotateFilesResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_BatchAnnotateFilesResponse.Unmarshal(m, b) +} +func (m *BatchAnnotateFilesResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_BatchAnnotateFilesResponse.Marshal(b, m, deterministic) +} +func (dst *BatchAnnotateFilesResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_BatchAnnotateFilesResponse.Merge(dst, src) +} +func (m *BatchAnnotateFilesResponse) XXX_Size() int { + return xxx_messageInfo_BatchAnnotateFilesResponse.Size(m) +} +func (m *BatchAnnotateFilesResponse) XXX_DiscardUnknown() { + xxx_messageInfo_BatchAnnotateFilesResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_BatchAnnotateFilesResponse proto.InternalMessageInfo + +func (m *BatchAnnotateFilesResponse) GetResponses() []*AnnotateFileResponse { + if m != nil { + return m.Responses + } + return nil +} + +// An offline file annotation request. +type AsyncAnnotateFileRequest struct { + // Required. Information about the input file. + InputConfig *InputConfig `protobuf:"bytes,1,opt,name=input_config,json=inputConfig,proto3" json:"input_config,omitempty"` + // Required. Requested features. + Features []*Feature `protobuf:"bytes,2,rep,name=features,proto3" json:"features,omitempty"` + // Additional context that may accompany the image(s) in the file. + ImageContext *ImageContext `protobuf:"bytes,3,opt,name=image_context,json=imageContext,proto3" json:"image_context,omitempty"` + // Required. The desired output location and metadata (e.g. format). + OutputConfig *OutputConfig `protobuf:"bytes,4,opt,name=output_config,json=outputConfig,proto3" json:"output_config,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *AsyncAnnotateFileRequest) Reset() { *m = AsyncAnnotateFileRequest{} } +func (m *AsyncAnnotateFileRequest) String() string { return proto.CompactTextString(m) } +func (*AsyncAnnotateFileRequest) ProtoMessage() {} +func (*AsyncAnnotateFileRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_image_annotator_4a9ef5af0fc4f4ff, []int{27} +} +func (m *AsyncAnnotateFileRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_AsyncAnnotateFileRequest.Unmarshal(m, b) +} +func (m *AsyncAnnotateFileRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_AsyncAnnotateFileRequest.Marshal(b, m, deterministic) +} +func (dst *AsyncAnnotateFileRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_AsyncAnnotateFileRequest.Merge(dst, src) +} +func (m *AsyncAnnotateFileRequest) XXX_Size() int { + return xxx_messageInfo_AsyncAnnotateFileRequest.Size(m) +} +func (m *AsyncAnnotateFileRequest) XXX_DiscardUnknown() { + xxx_messageInfo_AsyncAnnotateFileRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_AsyncAnnotateFileRequest proto.InternalMessageInfo + +func (m *AsyncAnnotateFileRequest) GetInputConfig() *InputConfig { + if m != nil { + return m.InputConfig + } + return nil +} + +func (m *AsyncAnnotateFileRequest) GetFeatures() []*Feature { + if m != nil { + return m.Features + } + return nil +} + +func (m *AsyncAnnotateFileRequest) GetImageContext() *ImageContext { + if m != nil { + return m.ImageContext + } + return nil +} + +func (m *AsyncAnnotateFileRequest) GetOutputConfig() *OutputConfig { + if m != nil { + return m.OutputConfig + } + return nil +} + +// The response for a single offline file annotation request. +type AsyncAnnotateFileResponse struct { + // The output location and metadata from AsyncAnnotateFileRequest. + OutputConfig *OutputConfig `protobuf:"bytes,1,opt,name=output_config,json=outputConfig,proto3" json:"output_config,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *AsyncAnnotateFileResponse) Reset() { *m = AsyncAnnotateFileResponse{} } +func (m *AsyncAnnotateFileResponse) String() string { return proto.CompactTextString(m) } +func (*AsyncAnnotateFileResponse) ProtoMessage() {} +func (*AsyncAnnotateFileResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_image_annotator_4a9ef5af0fc4f4ff, []int{28} +} +func (m *AsyncAnnotateFileResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_AsyncAnnotateFileResponse.Unmarshal(m, b) +} +func (m *AsyncAnnotateFileResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_AsyncAnnotateFileResponse.Marshal(b, m, deterministic) +} +func (dst *AsyncAnnotateFileResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_AsyncAnnotateFileResponse.Merge(dst, src) +} +func (m *AsyncAnnotateFileResponse) XXX_Size() int { + return xxx_messageInfo_AsyncAnnotateFileResponse.Size(m) +} +func (m *AsyncAnnotateFileResponse) XXX_DiscardUnknown() { + xxx_messageInfo_AsyncAnnotateFileResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_AsyncAnnotateFileResponse proto.InternalMessageInfo + +func (m *AsyncAnnotateFileResponse) GetOutputConfig() *OutputConfig { + if m != nil { + return m.OutputConfig + } + return nil +} + +// Request for async image annotation for a list of images. +type AsyncBatchAnnotateImagesRequest struct { + // Individual image annotation requests for this batch. + Requests []*AnnotateImageRequest `protobuf:"bytes,1,rep,name=requests,proto3" json:"requests,omitempty"` + // Required. The desired output location and metadata (e.g. format). + OutputConfig *OutputConfig `protobuf:"bytes,2,opt,name=output_config,json=outputConfig,proto3" json:"output_config,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *AsyncBatchAnnotateImagesRequest) Reset() { *m = AsyncBatchAnnotateImagesRequest{} } +func (m *AsyncBatchAnnotateImagesRequest) String() string { return proto.CompactTextString(m) } +func (*AsyncBatchAnnotateImagesRequest) ProtoMessage() {} +func (*AsyncBatchAnnotateImagesRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_image_annotator_4a9ef5af0fc4f4ff, []int{29} +} +func (m *AsyncBatchAnnotateImagesRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_AsyncBatchAnnotateImagesRequest.Unmarshal(m, b) +} +func (m *AsyncBatchAnnotateImagesRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_AsyncBatchAnnotateImagesRequest.Marshal(b, m, deterministic) +} +func (dst *AsyncBatchAnnotateImagesRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_AsyncBatchAnnotateImagesRequest.Merge(dst, src) +} +func (m *AsyncBatchAnnotateImagesRequest) XXX_Size() int { + return xxx_messageInfo_AsyncBatchAnnotateImagesRequest.Size(m) +} +func (m *AsyncBatchAnnotateImagesRequest) XXX_DiscardUnknown() { + xxx_messageInfo_AsyncBatchAnnotateImagesRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_AsyncBatchAnnotateImagesRequest proto.InternalMessageInfo + +func (m *AsyncBatchAnnotateImagesRequest) GetRequests() []*AnnotateImageRequest { + if m != nil { + return m.Requests + } + return nil +} + +func (m *AsyncBatchAnnotateImagesRequest) GetOutputConfig() *OutputConfig { + if m != nil { + return m.OutputConfig + } + return nil +} + +// Response to an async batch image annotation request. +type AsyncBatchAnnotateImagesResponse struct { + // The output location and metadata from AsyncBatchAnnotateImagesRequest. + OutputConfig *OutputConfig `protobuf:"bytes,1,opt,name=output_config,json=outputConfig,proto3" json:"output_config,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *AsyncBatchAnnotateImagesResponse) Reset() { *m = AsyncBatchAnnotateImagesResponse{} } +func (m *AsyncBatchAnnotateImagesResponse) String() string { return proto.CompactTextString(m) } +func (*AsyncBatchAnnotateImagesResponse) ProtoMessage() {} +func (*AsyncBatchAnnotateImagesResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_image_annotator_4a9ef5af0fc4f4ff, []int{30} +} +func (m *AsyncBatchAnnotateImagesResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_AsyncBatchAnnotateImagesResponse.Unmarshal(m, b) +} +func (m *AsyncBatchAnnotateImagesResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_AsyncBatchAnnotateImagesResponse.Marshal(b, m, deterministic) +} +func (dst *AsyncBatchAnnotateImagesResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_AsyncBatchAnnotateImagesResponse.Merge(dst, src) +} +func (m *AsyncBatchAnnotateImagesResponse) XXX_Size() int { + return xxx_messageInfo_AsyncBatchAnnotateImagesResponse.Size(m) +} +func (m *AsyncBatchAnnotateImagesResponse) XXX_DiscardUnknown() { + xxx_messageInfo_AsyncBatchAnnotateImagesResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_AsyncBatchAnnotateImagesResponse proto.InternalMessageInfo + +func (m *AsyncBatchAnnotateImagesResponse) GetOutputConfig() *OutputConfig { + if m != nil { + return m.OutputConfig + } + return nil +} + +// Multiple async file annotation requests are batched into a single service +// call. +type AsyncBatchAnnotateFilesRequest struct { + // Individual async file annotation requests for this batch. + Requests []*AsyncAnnotateFileRequest `protobuf:"bytes,1,rep,name=requests,proto3" json:"requests,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *AsyncBatchAnnotateFilesRequest) Reset() { *m = AsyncBatchAnnotateFilesRequest{} } +func (m *AsyncBatchAnnotateFilesRequest) String() string { return proto.CompactTextString(m) } +func (*AsyncBatchAnnotateFilesRequest) ProtoMessage() {} +func (*AsyncBatchAnnotateFilesRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_image_annotator_4a9ef5af0fc4f4ff, []int{31} +} +func (m *AsyncBatchAnnotateFilesRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_AsyncBatchAnnotateFilesRequest.Unmarshal(m, b) +} +func (m *AsyncBatchAnnotateFilesRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_AsyncBatchAnnotateFilesRequest.Marshal(b, m, deterministic) +} +func (dst *AsyncBatchAnnotateFilesRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_AsyncBatchAnnotateFilesRequest.Merge(dst, src) +} +func (m *AsyncBatchAnnotateFilesRequest) XXX_Size() int { + return xxx_messageInfo_AsyncBatchAnnotateFilesRequest.Size(m) +} +func (m *AsyncBatchAnnotateFilesRequest) XXX_DiscardUnknown() { + xxx_messageInfo_AsyncBatchAnnotateFilesRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_AsyncBatchAnnotateFilesRequest proto.InternalMessageInfo + +func (m *AsyncBatchAnnotateFilesRequest) GetRequests() []*AsyncAnnotateFileRequest { + if m != nil { + return m.Requests + } + return nil +} + +// Response to an async batch file annotation request. +type AsyncBatchAnnotateFilesResponse struct { + // The list of file annotation responses, one for each request in + // AsyncBatchAnnotateFilesRequest. + Responses []*AsyncAnnotateFileResponse `protobuf:"bytes,1,rep,name=responses,proto3" json:"responses,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *AsyncBatchAnnotateFilesResponse) Reset() { *m = AsyncBatchAnnotateFilesResponse{} } +func (m *AsyncBatchAnnotateFilesResponse) String() string { return proto.CompactTextString(m) } +func (*AsyncBatchAnnotateFilesResponse) ProtoMessage() {} +func (*AsyncBatchAnnotateFilesResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_image_annotator_4a9ef5af0fc4f4ff, []int{32} +} +func (m *AsyncBatchAnnotateFilesResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_AsyncBatchAnnotateFilesResponse.Unmarshal(m, b) +} +func (m *AsyncBatchAnnotateFilesResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_AsyncBatchAnnotateFilesResponse.Marshal(b, m, deterministic) +} +func (dst *AsyncBatchAnnotateFilesResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_AsyncBatchAnnotateFilesResponse.Merge(dst, src) +} +func (m *AsyncBatchAnnotateFilesResponse) XXX_Size() int { + return xxx_messageInfo_AsyncBatchAnnotateFilesResponse.Size(m) +} +func (m *AsyncBatchAnnotateFilesResponse) XXX_DiscardUnknown() { + xxx_messageInfo_AsyncBatchAnnotateFilesResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_AsyncBatchAnnotateFilesResponse proto.InternalMessageInfo + +func (m *AsyncBatchAnnotateFilesResponse) GetResponses() []*AsyncAnnotateFileResponse { + if m != nil { + return m.Responses + } + return nil +} + +// The desired input location and metadata. +type InputConfig struct { + // The Google Cloud Storage location to read the input from. + GcsSource *GcsSource `protobuf:"bytes,1,opt,name=gcs_source,json=gcsSource,proto3" json:"gcs_source,omitempty"` + // File content, represented as a stream of bytes. + // Note: As with all `bytes` fields, protobuffers use a pure binary + // representation, whereas JSON representations use base64. + // + // Currently, this field only works for BatchAnnotateFiles requests. It does + // not work for AsyncBatchAnnotateFiles requests. + Content []byte `protobuf:"bytes,3,opt,name=content,proto3" json:"content,omitempty"` + // The type of the file. Currently only "application/pdf" and "image/tiff" + // are supported. Wildcards are not supported. + MimeType string `protobuf:"bytes,2,opt,name=mime_type,json=mimeType,proto3" json:"mime_type,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *InputConfig) Reset() { *m = InputConfig{} } +func (m *InputConfig) String() string { return proto.CompactTextString(m) } +func (*InputConfig) ProtoMessage() {} +func (*InputConfig) Descriptor() ([]byte, []int) { + return fileDescriptor_image_annotator_4a9ef5af0fc4f4ff, []int{33} +} +func (m *InputConfig) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_InputConfig.Unmarshal(m, b) +} +func (m *InputConfig) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_InputConfig.Marshal(b, m, deterministic) +} +func (dst *InputConfig) XXX_Merge(src proto.Message) { + xxx_messageInfo_InputConfig.Merge(dst, src) +} +func (m *InputConfig) XXX_Size() int { + return xxx_messageInfo_InputConfig.Size(m) +} +func (m *InputConfig) XXX_DiscardUnknown() { + xxx_messageInfo_InputConfig.DiscardUnknown(m) +} + +var xxx_messageInfo_InputConfig proto.InternalMessageInfo + +func (m *InputConfig) GetGcsSource() *GcsSource { + if m != nil { + return m.GcsSource + } + return nil +} + +func (m *InputConfig) GetContent() []byte { + if m != nil { + return m.Content + } + return nil +} + +func (m *InputConfig) GetMimeType() string { + if m != nil { + return m.MimeType + } + return "" +} + +// The desired output location and metadata. +type OutputConfig struct { + // The Google Cloud Storage location to write the output(s) to. + GcsDestination *GcsDestination `protobuf:"bytes,1,opt,name=gcs_destination,json=gcsDestination,proto3" json:"gcs_destination,omitempty"` + // The max number of response protos to put into each output JSON file on + // Google Cloud Storage. + // The valid range is [1, 100]. If not specified, the default value is 20. + // + // For example, for one pdf file with 100 pages, 100 response protos will + // be generated. If `batch_size` = 20, then 5 json files each + // containing 20 response protos will be written under the prefix + // `gcs_destination`.`uri`. + // + // Currently, batch_size only applies to GcsDestination, with potential future + // support for other output configurations. + BatchSize int32 `protobuf:"varint,2,opt,name=batch_size,json=batchSize,proto3" json:"batch_size,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *OutputConfig) Reset() { *m = OutputConfig{} } +func (m *OutputConfig) String() string { return proto.CompactTextString(m) } +func (*OutputConfig) ProtoMessage() {} +func (*OutputConfig) Descriptor() ([]byte, []int) { + return fileDescriptor_image_annotator_4a9ef5af0fc4f4ff, []int{34} +} +func (m *OutputConfig) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_OutputConfig.Unmarshal(m, b) +} +func (m *OutputConfig) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_OutputConfig.Marshal(b, m, deterministic) +} +func (dst *OutputConfig) XXX_Merge(src proto.Message) { + xxx_messageInfo_OutputConfig.Merge(dst, src) +} +func (m *OutputConfig) XXX_Size() int { + return xxx_messageInfo_OutputConfig.Size(m) +} +func (m *OutputConfig) XXX_DiscardUnknown() { + xxx_messageInfo_OutputConfig.DiscardUnknown(m) +} + +var xxx_messageInfo_OutputConfig proto.InternalMessageInfo + +func (m *OutputConfig) GetGcsDestination() *GcsDestination { + if m != nil { + return m.GcsDestination + } + return nil +} + +func (m *OutputConfig) GetBatchSize() int32 { + if m != nil { + return m.BatchSize + } + return 0 +} + +// The Google Cloud Storage location where the input will be read from. +type GcsSource struct { + // Google Cloud Storage URI for the input file. This must only be a + // Google Cloud Storage object. Wildcards are not currently supported. + Uri string `protobuf:"bytes,1,opt,name=uri,proto3" json:"uri,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GcsSource) Reset() { *m = GcsSource{} } +func (m *GcsSource) String() string { return proto.CompactTextString(m) } +func (*GcsSource) ProtoMessage() {} +func (*GcsSource) Descriptor() ([]byte, []int) { + return fileDescriptor_image_annotator_4a9ef5af0fc4f4ff, []int{35} +} +func (m *GcsSource) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GcsSource.Unmarshal(m, b) +} +func (m *GcsSource) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GcsSource.Marshal(b, m, deterministic) +} +func (dst *GcsSource) XXX_Merge(src proto.Message) { + xxx_messageInfo_GcsSource.Merge(dst, src) +} +func (m *GcsSource) XXX_Size() int { + return xxx_messageInfo_GcsSource.Size(m) +} +func (m *GcsSource) XXX_DiscardUnknown() { + xxx_messageInfo_GcsSource.DiscardUnknown(m) +} + +var xxx_messageInfo_GcsSource proto.InternalMessageInfo + +func (m *GcsSource) GetUri() string { + if m != nil { + return m.Uri + } + return "" +} + +// The Google Cloud Storage location where the output will be written to. +type GcsDestination struct { + // Google Cloud Storage URI where the results will be stored. Results will + // be in JSON format and preceded by its corresponding input URI. This field + // can either represent a single file, or a prefix for multiple outputs. + // Prefixes must end in a `/`. + // + // Examples: + // + // * File: gs://bucket-name/filename.json + // * Prefix: gs://bucket-name/prefix/here/ + // * File: gs://bucket-name/prefix/here + // + // If multiple outputs, each response is still AnnotateFileResponse, each of + // which contains some subset of the full list of AnnotateImageResponse. + // Multiple outputs can happen if, for example, the output JSON is too large + // and overflows into multiple sharded files. + Uri string `protobuf:"bytes,1,opt,name=uri,proto3" json:"uri,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GcsDestination) Reset() { *m = GcsDestination{} } +func (m *GcsDestination) String() string { return proto.CompactTextString(m) } +func (*GcsDestination) ProtoMessage() {} +func (*GcsDestination) Descriptor() ([]byte, []int) { + return fileDescriptor_image_annotator_4a9ef5af0fc4f4ff, []int{36} +} +func (m *GcsDestination) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GcsDestination.Unmarshal(m, b) +} +func (m *GcsDestination) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GcsDestination.Marshal(b, m, deterministic) +} +func (dst *GcsDestination) XXX_Merge(src proto.Message) { + xxx_messageInfo_GcsDestination.Merge(dst, src) +} +func (m *GcsDestination) XXX_Size() int { + return xxx_messageInfo_GcsDestination.Size(m) +} +func (m *GcsDestination) XXX_DiscardUnknown() { + xxx_messageInfo_GcsDestination.DiscardUnknown(m) +} + +var xxx_messageInfo_GcsDestination proto.InternalMessageInfo + +func (m *GcsDestination) GetUri() string { + if m != nil { + return m.Uri + } + return "" +} + +// Contains metadata for the BatchAnnotateImages operation. +type OperationMetadata struct { + // Current state of the batch operation. + State OperationMetadata_State `protobuf:"varint,1,opt,name=state,proto3,enum=google.cloud.vision.v1p4beta1.OperationMetadata_State" json:"state,omitempty"` + // The time when the batch request was received. + CreateTime *timestamp.Timestamp `protobuf:"bytes,5,opt,name=create_time,json=createTime,proto3" json:"create_time,omitempty"` + // The time when the operation result was last updated. + UpdateTime *timestamp.Timestamp `protobuf:"bytes,6,opt,name=update_time,json=updateTime,proto3" json:"update_time,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *OperationMetadata) Reset() { *m = OperationMetadata{} } +func (m *OperationMetadata) String() string { return proto.CompactTextString(m) } +func (*OperationMetadata) ProtoMessage() {} +func (*OperationMetadata) Descriptor() ([]byte, []int) { + return fileDescriptor_image_annotator_4a9ef5af0fc4f4ff, []int{37} +} +func (m *OperationMetadata) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_OperationMetadata.Unmarshal(m, b) +} +func (m *OperationMetadata) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_OperationMetadata.Marshal(b, m, deterministic) +} +func (dst *OperationMetadata) XXX_Merge(src proto.Message) { + xxx_messageInfo_OperationMetadata.Merge(dst, src) +} +func (m *OperationMetadata) XXX_Size() int { + return xxx_messageInfo_OperationMetadata.Size(m) +} +func (m *OperationMetadata) XXX_DiscardUnknown() { + xxx_messageInfo_OperationMetadata.DiscardUnknown(m) +} + +var xxx_messageInfo_OperationMetadata proto.InternalMessageInfo + +func (m *OperationMetadata) GetState() OperationMetadata_State { + if m != nil { + return m.State + } + return OperationMetadata_STATE_UNSPECIFIED +} + +func (m *OperationMetadata) GetCreateTime() *timestamp.Timestamp { + if m != nil { + return m.CreateTime + } + return nil +} + +func (m *OperationMetadata) GetUpdateTime() *timestamp.Timestamp { + if m != nil { + return m.UpdateTime + } + return nil +} + +func init() { + proto.RegisterType((*Feature)(nil), "google.cloud.vision.v1p4beta1.Feature") + proto.RegisterType((*ImageSource)(nil), "google.cloud.vision.v1p4beta1.ImageSource") + proto.RegisterType((*Image)(nil), "google.cloud.vision.v1p4beta1.Image") + proto.RegisterType((*FaceAnnotation)(nil), "google.cloud.vision.v1p4beta1.FaceAnnotation") + proto.RegisterType((*FaceAnnotation_Landmark)(nil), "google.cloud.vision.v1p4beta1.FaceAnnotation.Landmark") + proto.RegisterType((*LocationInfo)(nil), "google.cloud.vision.v1p4beta1.LocationInfo") + proto.RegisterType((*Property)(nil), "google.cloud.vision.v1p4beta1.Property") + proto.RegisterType((*EntityAnnotation)(nil), "google.cloud.vision.v1p4beta1.EntityAnnotation") + proto.RegisterType((*LocalizedObjectAnnotation)(nil), "google.cloud.vision.v1p4beta1.LocalizedObjectAnnotation") + proto.RegisterType((*SafeSearchAnnotation)(nil), "google.cloud.vision.v1p4beta1.SafeSearchAnnotation") + proto.RegisterType((*LatLongRect)(nil), "google.cloud.vision.v1p4beta1.LatLongRect") + proto.RegisterType((*ColorInfo)(nil), "google.cloud.vision.v1p4beta1.ColorInfo") + proto.RegisterType((*DominantColorsAnnotation)(nil), "google.cloud.vision.v1p4beta1.DominantColorsAnnotation") + proto.RegisterType((*ImageProperties)(nil), "google.cloud.vision.v1p4beta1.ImageProperties") + proto.RegisterType((*CropHint)(nil), "google.cloud.vision.v1p4beta1.CropHint") + proto.RegisterType((*CropHintsAnnotation)(nil), "google.cloud.vision.v1p4beta1.CropHintsAnnotation") + proto.RegisterType((*CropHintsParams)(nil), "google.cloud.vision.v1p4beta1.CropHintsParams") + proto.RegisterType((*WebDetectionParams)(nil), "google.cloud.vision.v1p4beta1.WebDetectionParams") + proto.RegisterType((*ImageContext)(nil), "google.cloud.vision.v1p4beta1.ImageContext") + proto.RegisterType((*AnnotateImageRequest)(nil), "google.cloud.vision.v1p4beta1.AnnotateImageRequest") + proto.RegisterType((*ImageAnnotationContext)(nil), "google.cloud.vision.v1p4beta1.ImageAnnotationContext") + proto.RegisterType((*AnnotateImageResponse)(nil), "google.cloud.vision.v1p4beta1.AnnotateImageResponse") + proto.RegisterType((*AnnotateFileResponse)(nil), "google.cloud.vision.v1p4beta1.AnnotateFileResponse") + proto.RegisterType((*BatchAnnotateImagesRequest)(nil), "google.cloud.vision.v1p4beta1.BatchAnnotateImagesRequest") + proto.RegisterType((*BatchAnnotateImagesResponse)(nil), "google.cloud.vision.v1p4beta1.BatchAnnotateImagesResponse") + proto.RegisterType((*AnnotateFileRequest)(nil), "google.cloud.vision.v1p4beta1.AnnotateFileRequest") + proto.RegisterType((*BatchAnnotateFilesRequest)(nil), "google.cloud.vision.v1p4beta1.BatchAnnotateFilesRequest") + proto.RegisterType((*BatchAnnotateFilesResponse)(nil), "google.cloud.vision.v1p4beta1.BatchAnnotateFilesResponse") + proto.RegisterType((*AsyncAnnotateFileRequest)(nil), "google.cloud.vision.v1p4beta1.AsyncAnnotateFileRequest") + proto.RegisterType((*AsyncAnnotateFileResponse)(nil), "google.cloud.vision.v1p4beta1.AsyncAnnotateFileResponse") + proto.RegisterType((*AsyncBatchAnnotateImagesRequest)(nil), "google.cloud.vision.v1p4beta1.AsyncBatchAnnotateImagesRequest") + proto.RegisterType((*AsyncBatchAnnotateImagesResponse)(nil), "google.cloud.vision.v1p4beta1.AsyncBatchAnnotateImagesResponse") + proto.RegisterType((*AsyncBatchAnnotateFilesRequest)(nil), "google.cloud.vision.v1p4beta1.AsyncBatchAnnotateFilesRequest") + proto.RegisterType((*AsyncBatchAnnotateFilesResponse)(nil), "google.cloud.vision.v1p4beta1.AsyncBatchAnnotateFilesResponse") + proto.RegisterType((*InputConfig)(nil), "google.cloud.vision.v1p4beta1.InputConfig") + proto.RegisterType((*OutputConfig)(nil), "google.cloud.vision.v1p4beta1.OutputConfig") + proto.RegisterType((*GcsSource)(nil), "google.cloud.vision.v1p4beta1.GcsSource") + proto.RegisterType((*GcsDestination)(nil), "google.cloud.vision.v1p4beta1.GcsDestination") + proto.RegisterType((*OperationMetadata)(nil), "google.cloud.vision.v1p4beta1.OperationMetadata") + proto.RegisterEnum("google.cloud.vision.v1p4beta1.Likelihood", Likelihood_name, Likelihood_value) + proto.RegisterEnum("google.cloud.vision.v1p4beta1.Feature_Type", Feature_Type_name, Feature_Type_value) + proto.RegisterEnum("google.cloud.vision.v1p4beta1.FaceAnnotation_Landmark_Type", FaceAnnotation_Landmark_Type_name, FaceAnnotation_Landmark_Type_value) + proto.RegisterEnum("google.cloud.vision.v1p4beta1.OperationMetadata_State", OperationMetadata_State_name, OperationMetadata_State_value) +} + +// Reference imports to suppress errors if they are not otherwise used. +var _ context.Context +var _ grpc.ClientConn + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the grpc package it is being compiled against. +const _ = grpc.SupportPackageIsVersion4 + +// ImageAnnotatorClient is the client API for ImageAnnotator service. +// +// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://godoc.org/google.golang.org/grpc#ClientConn.NewStream. +type ImageAnnotatorClient interface { + // Run image detection and annotation for a batch of images. + BatchAnnotateImages(ctx context.Context, in *BatchAnnotateImagesRequest, opts ...grpc.CallOption) (*BatchAnnotateImagesResponse, error) + // Service that performs image detection and annotation for a batch of files. + // Now only "application/pdf", "image/tiff" and "image/gif" are supported. + // + // This service will extract at most the first 10 frames (gif) or pages + // (pdf or tiff) from each file provided and perform detection and annotation + // for each image extracted. + BatchAnnotateFiles(ctx context.Context, in *BatchAnnotateFilesRequest, opts ...grpc.CallOption) (*BatchAnnotateFilesResponse, error) + // Run asynchronous image detection and annotation for a list of images. + // + // Progress and results can be retrieved through the + // `google.longrunning.Operations` interface. + // `Operation.metadata` contains `OperationMetadata` (metadata). + // `Operation.response` contains `AsyncBatchAnnotateImagesResponse` (results). + // + // This service will write image annotation outputs to json files in customer + // GCS bucket, each json file containing BatchAnnotateImagesResponse proto. + AsyncBatchAnnotateImages(ctx context.Context, in *AsyncBatchAnnotateImagesRequest, opts ...grpc.CallOption) (*longrunning.Operation, error) + // Run asynchronous image detection and annotation for a list of generic + // files, such as PDF files, which may contain multiple pages and multiple + // images per page. Progress and results can be retrieved through the + // `google.longrunning.Operations` interface. + // `Operation.metadata` contains `OperationMetadata` (metadata). + // `Operation.response` contains `AsyncBatchAnnotateFilesResponse` (results). + AsyncBatchAnnotateFiles(ctx context.Context, in *AsyncBatchAnnotateFilesRequest, opts ...grpc.CallOption) (*longrunning.Operation, error) +} + +type imageAnnotatorClient struct { + cc *grpc.ClientConn +} + +func NewImageAnnotatorClient(cc *grpc.ClientConn) ImageAnnotatorClient { + return &imageAnnotatorClient{cc} +} + +func (c *imageAnnotatorClient) BatchAnnotateImages(ctx context.Context, in *BatchAnnotateImagesRequest, opts ...grpc.CallOption) (*BatchAnnotateImagesResponse, error) { + out := new(BatchAnnotateImagesResponse) + err := c.cc.Invoke(ctx, "/google.cloud.vision.v1p4beta1.ImageAnnotator/BatchAnnotateImages", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *imageAnnotatorClient) BatchAnnotateFiles(ctx context.Context, in *BatchAnnotateFilesRequest, opts ...grpc.CallOption) (*BatchAnnotateFilesResponse, error) { + out := new(BatchAnnotateFilesResponse) + err := c.cc.Invoke(ctx, "/google.cloud.vision.v1p4beta1.ImageAnnotator/BatchAnnotateFiles", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *imageAnnotatorClient) AsyncBatchAnnotateImages(ctx context.Context, in *AsyncBatchAnnotateImagesRequest, opts ...grpc.CallOption) (*longrunning.Operation, error) { + out := new(longrunning.Operation) + err := c.cc.Invoke(ctx, "/google.cloud.vision.v1p4beta1.ImageAnnotator/AsyncBatchAnnotateImages", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *imageAnnotatorClient) AsyncBatchAnnotateFiles(ctx context.Context, in *AsyncBatchAnnotateFilesRequest, opts ...grpc.CallOption) (*longrunning.Operation, error) { + out := new(longrunning.Operation) + err := c.cc.Invoke(ctx, "/google.cloud.vision.v1p4beta1.ImageAnnotator/AsyncBatchAnnotateFiles", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +// ImageAnnotatorServer is the server API for ImageAnnotator service. +type ImageAnnotatorServer interface { + // Run image detection and annotation for a batch of images. + BatchAnnotateImages(context.Context, *BatchAnnotateImagesRequest) (*BatchAnnotateImagesResponse, error) + // Service that performs image detection and annotation for a batch of files. + // Now only "application/pdf", "image/tiff" and "image/gif" are supported. + // + // This service will extract at most the first 10 frames (gif) or pages + // (pdf or tiff) from each file provided and perform detection and annotation + // for each image extracted. + BatchAnnotateFiles(context.Context, *BatchAnnotateFilesRequest) (*BatchAnnotateFilesResponse, error) + // Run asynchronous image detection and annotation for a list of images. + // + // Progress and results can be retrieved through the + // `google.longrunning.Operations` interface. + // `Operation.metadata` contains `OperationMetadata` (metadata). + // `Operation.response` contains `AsyncBatchAnnotateImagesResponse` (results). + // + // This service will write image annotation outputs to json files in customer + // GCS bucket, each json file containing BatchAnnotateImagesResponse proto. + AsyncBatchAnnotateImages(context.Context, *AsyncBatchAnnotateImagesRequest) (*longrunning.Operation, error) + // Run asynchronous image detection and annotation for a list of generic + // files, such as PDF files, which may contain multiple pages and multiple + // images per page. Progress and results can be retrieved through the + // `google.longrunning.Operations` interface. + // `Operation.metadata` contains `OperationMetadata` (metadata). + // `Operation.response` contains `AsyncBatchAnnotateFilesResponse` (results). + AsyncBatchAnnotateFiles(context.Context, *AsyncBatchAnnotateFilesRequest) (*longrunning.Operation, error) +} + +func RegisterImageAnnotatorServer(s *grpc.Server, srv ImageAnnotatorServer) { + s.RegisterService(&_ImageAnnotator_serviceDesc, srv) +} + +func _ImageAnnotator_BatchAnnotateImages_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(BatchAnnotateImagesRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(ImageAnnotatorServer).BatchAnnotateImages(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.vision.v1p4beta1.ImageAnnotator/BatchAnnotateImages", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(ImageAnnotatorServer).BatchAnnotateImages(ctx, req.(*BatchAnnotateImagesRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _ImageAnnotator_BatchAnnotateFiles_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(BatchAnnotateFilesRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(ImageAnnotatorServer).BatchAnnotateFiles(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.vision.v1p4beta1.ImageAnnotator/BatchAnnotateFiles", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(ImageAnnotatorServer).BatchAnnotateFiles(ctx, req.(*BatchAnnotateFilesRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _ImageAnnotator_AsyncBatchAnnotateImages_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(AsyncBatchAnnotateImagesRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(ImageAnnotatorServer).AsyncBatchAnnotateImages(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.vision.v1p4beta1.ImageAnnotator/AsyncBatchAnnotateImages", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(ImageAnnotatorServer).AsyncBatchAnnotateImages(ctx, req.(*AsyncBatchAnnotateImagesRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _ImageAnnotator_AsyncBatchAnnotateFiles_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(AsyncBatchAnnotateFilesRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(ImageAnnotatorServer).AsyncBatchAnnotateFiles(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.vision.v1p4beta1.ImageAnnotator/AsyncBatchAnnotateFiles", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(ImageAnnotatorServer).AsyncBatchAnnotateFiles(ctx, req.(*AsyncBatchAnnotateFilesRequest)) + } + return interceptor(ctx, in, info, handler) +} + +var _ImageAnnotator_serviceDesc = grpc.ServiceDesc{ + ServiceName: "google.cloud.vision.v1p4beta1.ImageAnnotator", + HandlerType: (*ImageAnnotatorServer)(nil), + Methods: []grpc.MethodDesc{ + { + MethodName: "BatchAnnotateImages", + Handler: _ImageAnnotator_BatchAnnotateImages_Handler, + }, + { + MethodName: "BatchAnnotateFiles", + Handler: _ImageAnnotator_BatchAnnotateFiles_Handler, + }, + { + MethodName: "AsyncBatchAnnotateImages", + Handler: _ImageAnnotator_AsyncBatchAnnotateImages_Handler, + }, + { + MethodName: "AsyncBatchAnnotateFiles", + Handler: _ImageAnnotator_AsyncBatchAnnotateFiles_Handler, + }, + }, + Streams: []grpc.StreamDesc{}, + Metadata: "google/cloud/vision/v1p4beta1/image_annotator.proto", +} + +func init() { + proto.RegisterFile("google/cloud/vision/v1p4beta1/image_annotator.proto", fileDescriptor_image_annotator_4a9ef5af0fc4f4ff) +} + +var fileDescriptor_image_annotator_4a9ef5af0fc4f4ff = []byte{ + // 3262 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xe4, 0x5a, 0x41, 0x6f, 0x23, 0xc7, + 0x95, 0x36, 0x49, 0x51, 0x22, 0x1f, 0x29, 0xa9, 0x55, 0x94, 0x34, 0x94, 0x66, 0x34, 0x23, 0xb7, + 0xed, 0x5d, 0xd9, 0xeb, 0x95, 0x30, 0x1a, 0xdb, 0x6b, 0x8f, 0xd7, 0xeb, 0xa5, 0xa8, 0x96, 0xc4, + 0x1d, 0x8a, 0xa4, 0x4b, 0xd4, 0x8c, 0x67, 0xe0, 0x45, 0x6f, 0xab, 0x59, 0xe4, 0xb4, 0xa7, 0xd9, + 0xd5, 0xee, 0x6e, 0xce, 0x48, 0x73, 0x5c, 0x60, 0x0f, 0x7b, 0x58, 0x60, 0x81, 0xdc, 0x73, 0x48, + 0x10, 0x18, 0x08, 0x10, 0xe7, 0x90, 0xfc, 0x83, 0xe4, 0x9c, 0x20, 0xc8, 0x21, 0x40, 0xae, 0xc9, + 0x21, 0x3f, 0x21, 0xc7, 0xa0, 0xaa, 0xab, 0xc9, 0x6a, 0x92, 0x23, 0x8a, 0x33, 0x76, 0x10, 0x20, + 0x27, 0x76, 0xbd, 0x57, 0xdf, 0xf7, 0xaa, 0xde, 0xab, 0x7a, 0xf5, 0xba, 0xd8, 0x70, 0xa7, 0x43, + 0x69, 0xc7, 0x26, 0x3b, 0xa6, 0x4d, 0x7b, 0xad, 0x9d, 0xa7, 0x96, 0x6f, 0x51, 0x67, 0xe7, 0xe9, + 0x6d, 0xf7, 0xbd, 0x33, 0x12, 0x18, 0xb7, 0x77, 0xac, 0xae, 0xd1, 0x21, 0xba, 0xe1, 0x38, 0x34, + 0x30, 0x02, 0xea, 0x6d, 0xbb, 0x1e, 0x0d, 0x28, 0xda, 0x08, 0x41, 0xdb, 0x1c, 0xb4, 0x1d, 0x82, + 0xb6, 0xfb, 0xa0, 0xf5, 0x1b, 0x82, 0xd3, 0x70, 0xad, 0x1d, 0x01, 0xb5, 0xa8, 0xe3, 0x87, 0xe0, + 0xf5, 0x77, 0x2f, 0xb7, 0xd8, 0x21, 0xb4, 0x4b, 0x02, 0xef, 0x42, 0xf4, 0xde, 0xbd, 0xbc, 0xb7, + 0xeb, 0xd1, 0x56, 0xcf, 0x0c, 0x74, 0x9f, 0x18, 0x9e, 0xf9, 0x58, 0x60, 0x26, 0xcc, 0x29, 0x20, + 0xe7, 0x81, 0x3e, 0x18, 0x97, 0x00, 0xdd, 0xbe, 0x1c, 0xf4, 0x8c, 0x9c, 0xe9, 0x2d, 0x12, 0x10, + 0x53, 0x82, 0xbc, 0x21, 0x20, 0x36, 0x75, 0x3a, 0x5e, 0xcf, 0x71, 0x2c, 0xa7, 0xb3, 0x43, 0x5d, + 0xe2, 0xc5, 0xa6, 0xbb, 0x29, 0x3a, 0xf1, 0xd6, 0x59, 0xaf, 0xbd, 0xd3, 0xb6, 0x88, 0xdd, 0xd2, + 0xbb, 0x86, 0xff, 0x44, 0xf4, 0xb8, 0x35, 0xdc, 0x23, 0xb0, 0xba, 0xc4, 0x0f, 0x8c, 0xae, 0x2b, + 0x3a, 0x5c, 0x13, 0x1d, 0x3c, 0xd7, 0xdc, 0xf1, 0x03, 0x23, 0xe8, 0xf9, 0x43, 0x8a, 0xe0, 0xc2, + 0x25, 0x3b, 0x26, 0xb5, 0xa3, 0x00, 0xad, 0x17, 0x65, 0x85, 0x6d, 0x04, 0xb6, 0xd3, 0x09, 0x35, + 0xea, 0xd7, 0x29, 0x98, 0x3b, 0x20, 0x46, 0xd0, 0xf3, 0x08, 0xfa, 0x14, 0x66, 0x58, 0x87, 0x62, + 0x62, 0x33, 0xb1, 0xb5, 0xb0, 0xfb, 0x4f, 0xdb, 0x97, 0x46, 0x75, 0x5b, 0xa0, 0xb6, 0x9b, 0x17, + 0x2e, 0xc1, 0x1c, 0x88, 0x6e, 0x41, 0xae, 0x6b, 0x9c, 0xeb, 0x1e, 0xf1, 0x7b, 0x76, 0xe0, 0x17, + 0x93, 0x9b, 0x89, 0xad, 0x34, 0x86, 0xae, 0x71, 0x8e, 0x43, 0x09, 0x5a, 0x86, 0x74, 0x97, 0xb6, + 0x88, 0x5d, 0x4c, 0x6d, 0x26, 0xb6, 0xb2, 0x38, 0x6c, 0xa8, 0x3f, 0x4c, 0xc2, 0x0c, 0x63, 0x41, + 0xcb, 0xa0, 0x34, 0x1f, 0x36, 0x34, 0xfd, 0xb4, 0x76, 0xd2, 0xd0, 0xca, 0x95, 0x83, 0x8a, 0xb6, + 0xaf, 0xbc, 0x86, 0x10, 0x2c, 0x1c, 0x94, 0xca, 0x9a, 0xbe, 0xaf, 0x35, 0xb5, 0x72, 0xb3, 0x52, + 0xaf, 0x29, 0x09, 0xb4, 0x0a, 0xa8, 0x5a, 0xaa, 0xed, 0x1f, 0x97, 0xf0, 0x3d, 0x49, 0x9e, 0x64, + 0x7d, 0xab, 0xf5, 0xc3, 0xba, 0x24, 0x4b, 0xa1, 0x02, 0x2c, 0x56, 0x4b, 0x7b, 0x5a, 0x55, 0x12, + 0xce, 0xb0, 0x8e, 0x4d, 0xed, 0xf3, 0xa6, 0x24, 0x4b, 0xa3, 0xeb, 0x70, 0x6d, 0xbf, 0x5e, 0x3e, + 0x3d, 0xd6, 0x6a, 0x4d, 0x7d, 0x48, 0x99, 0x43, 0x6b, 0xb0, 0x72, 0x52, 0x3a, 0xd0, 0xf4, 0x13, + 0xad, 0x84, 0xcb, 0x47, 0x92, 0x6a, 0x96, 0x0d, 0xbb, 0x72, 0x5c, 0x3a, 0xd4, 0xf4, 0x06, 0xae, + 0x37, 0x34, 0xdc, 0xac, 0x68, 0x27, 0xca, 0x1c, 0x5a, 0x00, 0x28, 0xe3, 0x7a, 0x43, 0x3f, 0xaa, + 0xd4, 0x9a, 0x27, 0x4a, 0x16, 0x2d, 0xc1, 0xfc, 0x03, 0x6d, 0x4f, 0x02, 0x02, 0x1b, 0x44, 0x03, + 0xd7, 0xf7, 0x4f, 0xcb, 0x4d, 0x41, 0xab, 0xe4, 0xd1, 0x35, 0x28, 0xd4, 0xf7, 0xfe, 0x43, 0x2b, + 0x37, 0xf5, 0x6a, 0xbd, 0x5c, 0xaa, 0x56, 0x1e, 0x95, 0x78, 0xe7, 0x82, 0x5a, 0x83, 0x5c, 0x85, + 0xed, 0xbe, 0x13, 0xda, 0xf3, 0x4c, 0x82, 0x54, 0x98, 0xef, 0x98, 0xbe, 0x1e, 0x6e, 0xc8, 0x9e, + 0x67, 0xf1, 0xa8, 0x65, 0x71, 0xae, 0x63, 0xfa, 0xbc, 0xdb, 0xa9, 0x67, 0xa1, 0xeb, 0x90, 0x1d, + 0xe8, 0x93, 0x5c, 0x9f, 0xb1, 0x84, 0x52, 0x25, 0x90, 0xe6, 0x1d, 0x51, 0x11, 0xe6, 0x4c, 0xea, + 0x04, 0xc4, 0x09, 0x38, 0x47, 0x1e, 0x47, 0x4d, 0xb4, 0x07, 0xb3, 0x3e, 0xb7, 0xc6, 0xc1, 0xb9, + 0xdd, 0x77, 0x26, 0x2c, 0x09, 0x69, 0x7c, 0x58, 0x20, 0xd5, 0x1f, 0x29, 0xb0, 0x70, 0x60, 0x98, + 0xa4, 0xd4, 0xdf, 0x60, 0xa8, 0x01, 0xf3, 0x67, 0xb4, 0xe7, 0xb4, 0x2c, 0xa7, 0xa3, 0xbb, 0xd4, + 0xbe, 0xe0, 0x66, 0x73, 0x13, 0x17, 0xdc, 0x9e, 0xc0, 0x34, 0xa8, 0x7d, 0x81, 0xf3, 0x67, 0x52, + 0x0b, 0x9d, 0x82, 0xd2, 0x6e, 0xe9, 0x71, 0xd2, 0xe4, 0xf4, 0xa4, 0x0b, 0xed, 0x96, 0xdc, 0x46, + 0x4d, 0xc8, 0xda, 0x86, 0xd3, 0xea, 0x1a, 0xde, 0x13, 0xbf, 0x98, 0xda, 0x4c, 0x6d, 0xe5, 0x76, + 0x3f, 0x98, 0xb4, 0x2b, 0x62, 0x53, 0xdd, 0xae, 0x0a, 0x38, 0x1e, 0x10, 0xa1, 0x0d, 0x00, 0x8f, + 0xda, 0xb6, 0x6e, 0x38, 0x1d, 0x9b, 0x14, 0x67, 0x36, 0x13, 0x5b, 0x49, 0x9c, 0x65, 0x92, 0x12, + 0x13, 0xb0, 0xa0, 0xb9, 0x86, 0x23, 0xb4, 0x69, 0xae, 0xcd, 0xb8, 0x86, 0x13, 0x2a, 0x37, 0x00, + 0x02, 0xcb, 0x0e, 0x84, 0x76, 0x36, 0xc4, 0x32, 0x49, 0xa8, 0xbe, 0x0d, 0xcb, 0xfd, 0xa4, 0xa4, + 0x9b, 0xd4, 0x69, 0x5b, 0x2d, 0xe2, 0x98, 0xa4, 0x38, 0xc7, 0x3b, 0x16, 0xfa, 0xba, 0x72, 0x5f, + 0x85, 0xde, 0x87, 0xd5, 0x68, 0x68, 0xcc, 0x75, 0x12, 0x28, 0xc3, 0x41, 0x2b, 0x92, 0x56, 0x82, + 0x35, 0x60, 0xe1, 0x4b, 0x7a, 0xa1, 0xdb, 0xd6, 0x13, 0x62, 0x5b, 0x8f, 0x29, 0x6d, 0x15, 0xb3, + 0x3c, 0x6b, 0xbc, 0x3d, 0xc1, 0x3f, 0xd5, 0x3e, 0x00, 0xcf, 0x7f, 0x49, 0x2f, 0x06, 0x4d, 0x74, + 0x1f, 0x96, 0x7c, 0xea, 0x79, 0xf4, 0x99, 0x4c, 0x0a, 0xd3, 0x92, 0x2a, 0x21, 0x87, 0xc4, 0xdb, + 0x04, 0xc5, 0x70, 0x3a, 0xc4, 0x93, 0x69, 0x73, 0xd3, 0xd2, 0x2e, 0x72, 0x0a, 0x89, 0xf5, 0x11, + 0x14, 0xfc, 0x9e, 0xe7, 0x7a, 0x96, 0x4f, 0x64, 0xe2, 0xfc, 0xb4, 0xc4, 0x28, 0x62, 0x91, 0xb8, + 0x4d, 0x28, 0xf6, 0x9c, 0x16, 0xf1, 0x74, 0x72, 0xee, 0x52, 0x9f, 0xb4, 0x64, 0x03, 0xf3, 0xd3, + 0x1a, 0x58, 0xe5, 0x54, 0x5a, 0xc8, 0x24, 0x19, 0xf9, 0x1c, 0xd0, 0x99, 0xdd, 0xf3, 0xbc, 0x38, + 0xfd, 0xc2, 0xb4, 0xf4, 0x4b, 0x82, 0x24, 0xee, 0x9a, 0xc7, 0xc4, 0x68, 0x3d, 0x23, 0x46, 0xcc, + 0xe7, 0x8b, 0x53, 0xbb, 0x26, 0x62, 0x19, 0xc8, 0xd6, 0x7f, 0x37, 0x07, 0x99, 0x68, 0x4f, 0xa1, + 0xba, 0x38, 0xaf, 0x52, 0x9c, 0xf9, 0xe3, 0x97, 0xdb, 0x99, 0xf2, 0xf9, 0x55, 0x86, 0x8c, 0x4b, + 0x7d, 0x8b, 0xe9, 0xf9, 0xbe, 0xcc, 0xed, 0xfe, 0xe3, 0x04, 0xd2, 0x86, 0xe8, 0x8e, 0xfb, 0x40, + 0xf5, 0xe7, 0xb3, 0x83, 0xd3, 0xec, 0xb4, 0x76, 0xaf, 0x56, 0x7f, 0x50, 0xd3, 0xa3, 0xb3, 0x4a, + 0x79, 0x0d, 0xe5, 0x21, 0x53, 0xd5, 0x0e, 0x9a, 0xba, 0xf6, 0x50, 0x53, 0x12, 0x68, 0x1e, 0xb2, + 0xb8, 0x72, 0x78, 0x14, 0x36, 0x93, 0xa8, 0x08, 0xcb, 0x5c, 0x59, 0x3f, 0xd0, 0xa3, 0x4e, 0x7b, + 0xb8, 0xfe, 0x40, 0x49, 0xb1, 0xe3, 0x27, 0xec, 0x38, 0xac, 0x9a, 0x61, 0xaa, 0x08, 0xd4, 0xe7, + 0xe2, 0xaa, 0x34, 0x5a, 0x87, 0xd5, 0x3e, 0x2a, 0xae, 0x9b, 0x65, 0xb0, 0xe3, 0xca, 0x7e, 0xa3, + 0x5e, 0xa9, 0x35, 0xf5, 0x3d, 0xad, 0xf9, 0x40, 0xd3, 0x6a, 0x4c, 0xcb, 0x8e, 0xae, 0x3c, 0x64, + 0x6a, 0xf5, 0x13, 0x4d, 0x6f, 0x56, 0x1a, 0x4a, 0x86, 0x8d, 0xf1, 0xb4, 0xd1, 0xd0, 0xb0, 0x5e, + 0xad, 0x34, 0x94, 0x2c, 0x6b, 0x56, 0xeb, 0x0f, 0x44, 0x13, 0xd8, 0x31, 0x77, 0x5c, 0x3f, 0x6d, + 0x1e, 0xf1, 0x51, 0x29, 0x39, 0xb4, 0x08, 0xb9, 0xb0, 0xcd, 0xed, 0x29, 0x79, 0xa4, 0x40, 0x3e, + 0x14, 0x94, 0xb5, 0x5a, 0x53, 0xc3, 0xca, 0x3c, 0x5a, 0x81, 0x25, 0x4e, 0xbf, 0x57, 0x6f, 0x36, + 0xeb, 0xc7, 0xa2, 0xe3, 0x02, 0xf3, 0x97, 0x2c, 0xe6, 0x7c, 0x8b, 0xec, 0xa4, 0x97, 0xa5, 0x82, + 0x44, 0xe9, 0xcf, 0x5a, 0x7b, 0xa8, 0xe9, 0xcd, 0x7a, 0x43, 0xdf, 0xab, 0x9f, 0xd6, 0xf6, 0x4b, + 0xf8, 0xa1, 0xb2, 0x14, 0x53, 0x85, 0xb3, 0x2e, 0xd7, 0x71, 0x4d, 0xc3, 0x0a, 0x42, 0x37, 0xa0, + 0xd8, 0x57, 0x09, 0xc6, 0x3e, 0xb0, 0xd0, 0x77, 0x3f, 0xd3, 0xf2, 0x07, 0x81, 0x5b, 0x1e, 0x38, + 0x72, 0xc4, 0xdc, 0x4a, 0x5c, 0x17, 0xb3, 0xb7, 0x8a, 0x36, 0x60, 0x6d, 0xa0, 0x1b, 0x36, 0x78, + 0x6d, 0x10, 0xd5, 0x61, 0x8b, 0x45, 0x74, 0x0b, 0xae, 0xcb, 0x71, 0xd6, 0xc3, 0x10, 0x44, 0x11, + 0x53, 0xd6, 0xd0, 0x26, 0xdc, 0x88, 0x85, 0x74, 0xb8, 0xc7, 0x3a, 0x73, 0x68, 0x48, 0x51, 0xc2, + 0x7a, 0x13, 0x97, 0x0e, 0x59, 0x1d, 0x71, 0x9d, 0x79, 0x5f, 0xe0, 0x24, 0xf1, 0x0d, 0x5e, 0x39, + 0x45, 0x73, 0x6f, 0x9c, 0x36, 0x2a, 0x55, 0x65, 0x83, 0x55, 0x4e, 0x83, 0xe1, 0x85, 0xc2, 0x9b, + 0x0c, 0x7f, 0x50, 0xc7, 0xda, 0x91, 0x56, 0xda, 0xd7, 0x0f, 0x79, 0x61, 0x55, 0x2d, 0x29, 0xb7, + 0x58, 0x79, 0x53, 0x3e, 0xaa, 0xd4, 0xf4, 0xc3, 0x5a, 0xa9, 0x79, 0xc4, 0x28, 0x37, 0x99, 0x7d, + 0x2e, 0xe2, 0xbc, 0x87, 0xf5, 0x1a, 0x93, 0xbe, 0xce, 0xf0, 0x5c, 0x1a, 0x32, 0x0b, 0xb1, 0xaa, + 0xfe, 0x2b, 0xe4, 0xab, 0xd4, 0xe4, 0x7b, 0xb3, 0xe2, 0xb4, 0x29, 0x7a, 0x17, 0xe6, 0x6c, 0x23, + 0xd0, 0x6d, 0xa7, 0x23, 0xca, 0x83, 0x42, 0xb4, 0x15, 0xd9, 0x56, 0xdd, 0xae, 0x1a, 0x41, 0xd5, + 0xe9, 0xe0, 0x59, 0x9b, 0xff, 0xaa, 0x0f, 0x20, 0xd3, 0xf0, 0x58, 0xad, 0x1d, 0x5c, 0x20, 0x04, + 0x33, 0x8e, 0xd1, 0x25, 0xa2, 0x20, 0xe2, 0xcf, 0xac, 0xf0, 0x7c, 0x6a, 0xd8, 0x3d, 0x22, 0xaa, + 0xa0, 0xb0, 0x81, 0x5e, 0x87, 0x7c, 0xcf, 0x72, 0x82, 0x0f, 0xde, 0xd3, 0x43, 0x25, 0x4b, 0x24, + 0x33, 0x38, 0x17, 0xca, 0xee, 0x33, 0x91, 0xfa, 0xfd, 0x14, 0x28, 0x9a, 0x13, 0x58, 0xc1, 0x85, + 0x54, 0xc0, 0x28, 0x90, 0xea, 0x5a, 0x2d, 0x61, 0x80, 0x3d, 0xa2, 0x55, 0x98, 0xb5, 0xa9, 0x69, + 0xd8, 0x91, 0x01, 0xd1, 0x42, 0x9b, 0x90, 0x6b, 0x11, 0xdf, 0xf4, 0x2c, 0x97, 0x27, 0x95, 0xb0, + 0xec, 0x95, 0x45, 0x6c, 0x64, 0xbe, 0x49, 0xbd, 0xa8, 0x10, 0x08, 0x1b, 0x48, 0x05, 0x90, 0x4e, + 0x62, 0x5e, 0x05, 0xec, 0x25, 0x8b, 0x09, 0x2c, 0x49, 0xd1, 0x4d, 0x80, 0x80, 0xba, 0x96, 0x69, + 0xd8, 0x56, 0x70, 0x21, 0x6a, 0x01, 0x49, 0x32, 0x5a, 0x66, 0xcd, 0xbd, 0x6a, 0x99, 0x55, 0x81, + 0xac, 0x2d, 0x62, 0xe4, 0x17, 0x33, 0xbc, 0x1e, 0x9a, 0xc4, 0x26, 0xc7, 0x14, 0x0f, 0xd0, 0xe8, + 0x10, 0xc0, 0x0d, 0x03, 0x66, 0x11, 0xbf, 0x98, 0xe5, 0x5c, 0x13, 0x93, 0xad, 0x88, 0x30, 0x96, + 0xa0, 0xea, 0xaf, 0x12, 0xb0, 0xc6, 0x8c, 0xd8, 0xd6, 0x73, 0xd2, 0xaa, 0x9f, 0x7d, 0x49, 0xcc, + 0xe0, 0xd2, 0x48, 0xbd, 0x01, 0xf3, 0xb6, 0xe1, 0x74, 0x7a, 0xac, 0x2c, 0x36, 0x69, 0x2b, 0x0a, + 0x58, 0x3e, 0x12, 0x96, 0x69, 0x8b, 0xf4, 0x97, 0x50, 0x2a, 0xbe, 0x84, 0xc6, 0x04, 0x6a, 0xc4, + 0xc9, 0xe9, 0x57, 0x74, 0xb2, 0xfa, 0x87, 0x24, 0x2c, 0x9f, 0x18, 0x6d, 0x72, 0xc2, 0x5f, 0x61, + 0xa5, 0xb9, 0x7c, 0x0a, 0x69, 0xa3, 0xd5, 0xb3, 0x03, 0xf1, 0x7e, 0x36, 0xc5, 0x49, 0x1a, 0xe2, + 0x18, 0x81, 0xef, 0x52, 0xda, 0xe6, 0x53, 0x9e, 0x8e, 0x80, 0xe3, 0x50, 0x19, 0xe6, 0xba, 0xa4, + 0xc5, 0xd6, 0x97, 0x38, 0x73, 0xa7, 0xa0, 0x88, 0x90, 0x48, 0x83, 0xcc, 0x53, 0x8b, 0xda, 0x7c, + 0x61, 0xcf, 0x4c, 0xcb, 0xd2, 0x87, 0xa2, 0x4f, 0x60, 0xc6, 0x33, 0xcc, 0x8b, 0xe9, 0xcb, 0x4e, + 0x0e, 0x53, 0x9f, 0x41, 0x8e, 0xa5, 0x10, 0xea, 0x74, 0x30, 0x31, 0x03, 0x74, 0x07, 0x72, 0x5d, + 0xcb, 0xd1, 0xaf, 0x90, 0x71, 0xb2, 0x5d, 0xcb, 0x09, 0x1f, 0x39, 0xc8, 0x38, 0xef, 0x83, 0x92, + 0x97, 0x81, 0x8c, 0xf3, 0xf0, 0x51, 0xf5, 0x20, 0x5b, 0x66, 0x6f, 0xe6, 0x3c, 0xc9, 0x6d, 0x41, + 0x9a, 0xbf, 0xa6, 0x0b, 0x83, 0x28, 0x86, 0xe5, 0xdd, 0x70, 0xd8, 0x61, 0xb0, 0xfa, 0x92, 0xf2, + 0xea, 0x7b, 0x0b, 0x16, 0x5c, 0xeb, 0x9c, 0xd8, 0x7a, 0xdb, 0x33, 0xcc, 0x7e, 0x86, 0x49, 0xe2, + 0x79, 0x2e, 0x3d, 0x10, 0x42, 0xf5, 0x0b, 0x28, 0xee, 0xd3, 0xae, 0xe5, 0x18, 0x4e, 0xc0, 0x49, + 0x7d, 0x69, 0x55, 0xfd, 0x3b, 0xcc, 0x72, 0x0b, 0x7e, 0x31, 0xc1, 0x37, 0xe1, 0xd6, 0x04, 0x4f, + 0xf6, 0x07, 0x8f, 0x05, 0x4e, 0xf5, 0x61, 0x91, 0xbf, 0xf8, 0x35, 0xfa, 0x9b, 0x12, 0xfd, 0x17, + 0x2c, 0xb6, 0x84, 0x41, 0xbd, 0xcf, 0xce, 0x66, 0xf8, 0x2f, 0x13, 0xd8, 0x5f, 0x34, 0x4c, 0xbc, + 0xd0, 0x8a, 0x69, 0xd4, 0x9f, 0x24, 0x20, 0x53, 0xf6, 0xa8, 0x7b, 0x64, 0x39, 0xc1, 0x77, 0xf0, + 0x42, 0x79, 0x33, 0x96, 0x7f, 0x43, 0x9f, 0xcb, 0xb9, 0x77, 0x07, 0x0a, 0x56, 0xd7, 0xa5, 0x5e, + 0x60, 0x38, 0x26, 0x19, 0xf6, 0x3e, 0x1a, 0xa8, 0xfa, 0x21, 0xf8, 0x4f, 0x28, 0x44, 0xc3, 0x95, + 0xbd, 0x7f, 0x00, 0x60, 0x7a, 0xd4, 0xd5, 0x1f, 0x33, 0xb9, 0x88, 0xc0, 0xa4, 0x34, 0x18, 0xf1, + 0xe0, 0xac, 0x19, 0x31, 0xaa, 0x1f, 0xc0, 0x62, 0x9f, 0xbe, 0x61, 0x78, 0x46, 0xd7, 0x67, 0x89, + 0xce, 0xf0, 0x5d, 0x62, 0x06, 0x3a, 0xbf, 0x80, 0x0a, 0xd9, 0x93, 0x38, 0x1f, 0x0a, 0x31, 0x97, + 0xa9, 0xfb, 0x80, 0x1e, 0x90, 0xb3, 0xfd, 0xe8, 0xbd, 0x50, 0x40, 0xb7, 0xa1, 0x60, 0x39, 0xa6, + 0xdd, 0x6b, 0x11, 0xbd, 0x43, 0x68, 0xec, 0x3e, 0x27, 0x83, 0x97, 0x84, 0xea, 0x90, 0x50, 0x71, + 0xad, 0xa3, 0x7e, 0x93, 0x82, 0x3c, 0x5f, 0x02, 0x65, 0xea, 0x04, 0xe4, 0x3c, 0x40, 0x35, 0x96, + 0x64, 0x03, 0xdd, 0xa6, 0x4e, 0x47, 0xf7, 0x88, 0x19, 0x88, 0x80, 0x4c, 0xba, 0x3f, 0x90, 0x76, + 0x24, 0xce, 0xd9, 0xd2, 0xf6, 0x7c, 0x0b, 0x16, 0xfa, 0x49, 0x3b, 0x74, 0x55, 0x72, 0x33, 0xb5, + 0x95, 0xc5, 0xfd, 0x54, 0xce, 0x27, 0x8e, 0x1e, 0xc1, 0xd2, 0xc0, 0x9b, 0xba, 0xcb, 0x27, 0x23, + 0x0a, 0xf9, 0xed, 0x2b, 0x3a, 0x55, 0x78, 0x0f, 0x2f, 0x9a, 0x43, 0xee, 0x6c, 0xc3, 0x4a, 0xfc, + 0x72, 0x31, 0xe2, 0x0f, 0x13, 0xfe, 0xee, 0xe4, 0xb3, 0x8b, 0x61, 0xc3, 0xa4, 0x2e, 0x6c, 0x14, + 0xdc, 0x51, 0x21, 0x32, 0x61, 0x39, 0x76, 0xb7, 0x18, 0x99, 0x99, 0xe5, 0x66, 0x6e, 0x4f, 0x30, + 0x33, 0x1a, 0x4c, 0x8c, 0x9e, 0x8d, 0xc8, 0xd4, 0x3f, 0x25, 0x60, 0x59, 0xac, 0x42, 0xc2, 0x03, + 0x87, 0xc9, 0x57, 0x3d, 0xe2, 0x07, 0xe8, 0x2e, 0xa4, 0xf9, 0x05, 0x91, 0x08, 0xd8, 0x9b, 0x57, + 0xb9, 0xf0, 0xc1, 0x21, 0x04, 0xed, 0x41, 0xa6, 0x1d, 0xde, 0x09, 0x86, 0xe1, 0xc9, 0xed, 0xfe, + 0xc3, 0xd5, 0xae, 0x10, 0x71, 0x1f, 0xc7, 0x76, 0x72, 0x78, 0x63, 0x65, 0x86, 0x2b, 0x89, 0xef, + 0xa8, 0xc9, 0x3b, 0x59, 0x5e, 0x7c, 0x38, 0x6f, 0x49, 0x2d, 0xf5, 0x1e, 0xac, 0x72, 0xed, 0x60, + 0xd3, 0x45, 0x8b, 0x54, 0x81, 0xd4, 0xe0, 0xde, 0x8c, 0x3d, 0xa2, 0x5b, 0x90, 0x73, 0x99, 0x71, + 0xa7, 0xd7, 0x3d, 0x23, 0x5e, 0x74, 0x7f, 0xc9, 0x44, 0x35, 0x2e, 0x51, 0x7f, 0x01, 0xb0, 0x32, + 0xe4, 0x37, 0xdf, 0xa5, 0x8e, 0x4f, 0xd0, 0xe7, 0xa0, 0xb4, 0x0d, 0x93, 0x48, 0xf7, 0xc8, 0xd1, + 0x76, 0xfe, 0xe7, 0xa9, 0xde, 0x4b, 0xf1, 0x62, 0x3b, 0xd6, 0xf6, 0xd1, 0x19, 0x2c, 0x47, 0x57, + 0x30, 0x31, 0xf6, 0xd0, 0xc5, 0x3b, 0x13, 0xd8, 0x87, 0x6b, 0x57, 0x5c, 0x88, 0xc8, 0x64, 0x1b, + 0x8f, 0x40, 0xb1, 0x69, 0x87, 0xc6, 0xf8, 0x53, 0x2f, 0xc7, 0xbf, 0xc8, 0x88, 0x64, 0xee, 0x2f, + 0x60, 0xc9, 0x36, 0xce, 0x88, 0x1d, 0x23, 0x9f, 0x79, 0x39, 0x72, 0x85, 0x33, 0xc9, 0xec, 0xcf, + 0xe1, 0x86, 0x1d, 0x55, 0x7f, 0x3a, 0xe5, 0xe5, 0x5f, 0xcc, 0xd0, 0x2a, 0x37, 0xf4, 0xe1, 0x15, + 0xaa, 0xd4, 0xb1, 0x05, 0x24, 0x5e, 0xb7, 0x5f, 0xa4, 0xe2, 0x5e, 0x1b, 0xfa, 0xef, 0x80, 0x65, + 0x83, 0x97, 0xf3, 0x1a, 0x23, 0x92, 0xb9, 0x75, 0x58, 0x6e, 0xf7, 0x6c, 0x5b, 0x1f, 0x32, 0xc0, + 0x2f, 0x98, 0x26, 0xaf, 0xa9, 0x66, 0x8c, 0x0d, 0x23, 0x46, 0x15, 0x97, 0x21, 0x0b, 0x56, 0x7d, + 0xa3, 0x4d, 0xa2, 0x64, 0x26, 0x99, 0x08, 0x33, 0xcd, 0x9d, 0x09, 0x26, 0xc6, 0x95, 0xa8, 0x78, + 0xd9, 0x1f, 0x57, 0xb8, 0x3a, 0x70, 0x3d, 0xdc, 0xd4, 0x83, 0xb2, 0x5d, 0xb6, 0x97, 0xb9, 0x52, + 0x82, 0x1e, 0x2a, 0x31, 0xf0, 0x9a, 0x15, 0x17, 0x48, 0xf6, 0xda, 0xb0, 0x22, 0x1d, 0x03, 0x92, + 0xa5, 0xdc, 0x95, 0x52, 0xf5, 0x98, 0x73, 0x1a, 0x17, 0xcc, 0x31, 0x87, 0x77, 0x03, 0xe6, 0x63, + 0xa9, 0x9a, 0x5f, 0xce, 0x4d, 0x4e, 0x56, 0x72, 0x8e, 0xc6, 0x79, 0x39, 0x3b, 0xb3, 0xa0, 0x0c, + 0x1d, 0x32, 0xd1, 0xd9, 0xbb, 0x70, 0xa5, 0xa0, 0xc4, 0x4e, 0x19, 0x71, 0x3a, 0xe3, 0x65, 0x77, + 0x8c, 0x94, 0x95, 0x9e, 0xc4, 0xf3, 0xa8, 0xc7, 0x0b, 0x68, 0xa9, 0xf4, 0xf4, 0x5c, 0x73, 0xfb, + 0x84, 0xff, 0xa9, 0x84, 0xc3, 0x0e, 0xa8, 0x2e, 0xfe, 0x1f, 0x38, 0x0f, 0x8a, 0x2b, 0xbc, 0xef, + 0xfb, 0x57, 0x09, 0xd5, 0x48, 0xbe, 0xc5, 0x11, 0x8b, 0xfa, 0x7b, 0xe9, 0xf4, 0x39, 0xb0, 0xec, + 0x41, 0x12, 0x3d, 0x86, 0xbc, 0xe5, 0xb8, 0xbd, 0x20, 0xbc, 0x85, 0xee, 0x5c, 0xb1, 0x6a, 0xa8, + 0x30, 0x08, 0xbf, 0x9a, 0xee, 0xe0, 0x9c, 0x35, 0x68, 0x20, 0x0c, 0x59, 0x4f, 0x50, 0x47, 0xe9, + 0xf2, 0xbd, 0x09, 0x5c, 0x63, 0x93, 0x3b, 0x1e, 0xd0, 0xb0, 0x23, 0x22, 0xa0, 0x81, 0x61, 0xeb, + 0xec, 0x54, 0xf0, 0xf9, 0xf1, 0x94, 0x66, 0x6f, 0xdd, 0x81, 0x61, 0x37, 0x98, 0x44, 0xed, 0xc2, + 0xfa, 0x9e, 0x11, 0xf4, 0xd7, 0x7f, 0xc8, 0xe4, 0x47, 0xe7, 0x6b, 0x1d, 0x32, 0x5e, 0xf8, 0x18, + 0x1d, 0x0f, 0x77, 0xa6, 0x1b, 0x11, 0xc7, 0xe2, 0x3e, 0x89, 0xfa, 0x15, 0x5c, 0x1f, 0x6b, 0x4e, + 0x78, 0x34, 0xe6, 0x82, 0xc4, 0xb7, 0xe2, 0x02, 0xf5, 0xff, 0x93, 0x50, 0x88, 0x87, 0x2f, 0x9c, + 0xdb, 0xb7, 0x1c, 0xbd, 0xbf, 0xc9, 0x72, 0x82, 0xbd, 0x87, 0x85, 0x91, 0x67, 0x27, 0x58, 0x1a, + 0x87, 0x0d, 0xf5, 0x09, 0xac, 0xc5, 0xa2, 0xc0, 0xdc, 0xd2, 0x8f, 0x79, 0x6d, 0x24, 0xe6, 0xbb, + 0x57, 0x0c, 0x81, 0xe4, 0x5d, 0x29, 0xe4, 0x74, 0x68, 0x85, 0x09, 0x63, 0x22, 0xe2, 0x9f, 0x8d, + 0x46, 0xfc, 0xce, 0x54, 0xe6, 0x46, 0x03, 0xfe, 0x9b, 0x24, 0x14, 0x4b, 0xfe, 0x85, 0x63, 0xfe, + 0xdd, 0x46, 0xbd, 0x01, 0xf3, 0xb4, 0x17, 0x48, 0xb3, 0x9c, 0xb9, 0x12, 0x63, 0x9d, 0x63, 0xc4, + 0x34, 0xf3, 0x54, 0x6a, 0xa9, 0x5d, 0x58, 0x1b, 0xe3, 0x52, 0x11, 0xc3, 0x11, 0x73, 0x89, 0x57, + 0x35, 0xf7, 0xcb, 0x04, 0xdc, 0xe2, 0xf6, 0xfe, 0x8a, 0xb9, 0x69, 0x74, 0x1a, 0xc9, 0x57, 0x9d, + 0x46, 0x00, 0x9b, 0x2f, 0x9e, 0xc5, 0x77, 0xe6, 0xbc, 0x1e, 0xdc, 0x1c, 0xb5, 0x1a, 0xdb, 0xe2, + 0x27, 0x23, 0xae, 0x9b, 0x74, 0xd1, 0xf1, 0xa2, 0xfd, 0x24, 0xed, 0xf3, 0x8b, 0x71, 0x21, 0x8b, + 0x6f, 0xf6, 0xfb, 0xa3, 0x9b, 0xfd, 0xc3, 0xe9, 0x0d, 0x8f, 0xee, 0xf8, 0xff, 0x4b, 0x40, 0x4e, + 0xda, 0xa2, 0xe8, 0x10, 0xa0, 0x63, 0xfa, 0xba, 0xf8, 0x18, 0x20, 0x74, 0xe8, 0xa4, 0x8b, 0xa2, + 0x43, 0xd3, 0x17, 0x9f, 0x02, 0x64, 0x3b, 0xd1, 0xa3, 0xfc, 0xad, 0x41, 0x2a, 0xfe, 0xad, 0xc1, + 0x75, 0xc8, 0x76, 0xad, 0x2e, 0xd1, 0xf9, 0x3f, 0x7a, 0xe2, 0x5b, 0x05, 0x26, 0x68, 0x5e, 0xb8, + 0x44, 0xfd, 0x9f, 0x04, 0xe4, 0xe5, 0x00, 0xa1, 0xfb, 0xb0, 0xc8, 0x06, 0xd4, 0x22, 0x7e, 0x60, + 0x39, 0x61, 0x71, 0x97, 0xb8, 0x52, 0x65, 0x7c, 0x68, 0xfa, 0xfb, 0x03, 0x10, 0x5e, 0xe8, 0xc4, + 0xda, 0x68, 0x03, 0xe0, 0x8c, 0xb9, 0x5b, 0xf7, 0xad, 0xe7, 0x44, 0xbc, 0x00, 0x66, 0xb9, 0xe4, + 0xc4, 0x7a, 0x4e, 0xd4, 0x0d, 0xc8, 0xf6, 0xa7, 0x35, 0xfa, 0xfe, 0xa8, 0xaa, 0xb0, 0x10, 0xe7, + 0x1f, 0xd3, 0xe7, 0x9b, 0x24, 0x2c, 0xd5, 0xa3, 0x8f, 0x82, 0x8e, 0x49, 0x60, 0xb4, 0x8c, 0xc0, + 0x40, 0x55, 0x48, 0xfb, 0x2c, 0x20, 0xe2, 0x6e, 0x77, 0xd2, 0x57, 0x06, 0x23, 0x04, 0xbc, 0x64, + 0x23, 0x38, 0x24, 0x41, 0x1f, 0x43, 0xce, 0xf4, 0x88, 0x11, 0x10, 0x3d, 0xb0, 0xba, 0x44, 0xdc, + 0x50, 0xac, 0x47, 0x9c, 0xd1, 0x77, 0x45, 0xdb, 0xcd, 0xe8, 0xbb, 0x22, 0x0c, 0x61, 0x77, 0x26, + 0x60, 0xe0, 0x9e, 0xdb, 0xea, 0x83, 0x67, 0x27, 0x83, 0xc3, 0xee, 0x4c, 0xa0, 0x7e, 0x06, 0x69, + 0x3e, 0x12, 0xb4, 0x02, 0x4b, 0x27, 0xcd, 0x52, 0x73, 0xf8, 0x5b, 0x9e, 0x1c, 0xcc, 0x95, 0xb1, + 0x56, 0x6a, 0x6a, 0xfb, 0x4a, 0x82, 0x35, 0xf0, 0x69, 0xad, 0x56, 0xa9, 0x1d, 0x2a, 0x49, 0x94, + 0x81, 0x99, 0xfd, 0x7a, 0x4d, 0x53, 0x52, 0x68, 0x1e, 0xb2, 0xe5, 0x52, 0xad, 0xac, 0x55, 0xab, + 0xda, 0xbe, 0x32, 0xf3, 0x0e, 0x01, 0x90, 0xfe, 0x5c, 0xce, 0xc1, 0x9c, 0xf8, 0x53, 0x55, 0x79, + 0x0d, 0x2d, 0xc1, 0xfc, 0x7d, 0x0d, 0x3f, 0xd4, 0x4f, 0x6b, 0xd5, 0xca, 0x3d, 0xad, 0xfa, 0x50, + 0x49, 0xa0, 0x3c, 0x64, 0xfa, 0xad, 0x24, 0x6b, 0x35, 0xea, 0x27, 0x27, 0x95, 0xbd, 0x2a, 0x23, + 0x06, 0x98, 0x15, 0x9a, 0x19, 0xb4, 0x08, 0x39, 0x0e, 0x15, 0x82, 0xf4, 0xee, 0xaf, 0xd3, 0xb0, + 0x20, 0x17, 0xae, 0xd4, 0x43, 0x3f, 0x4b, 0x40, 0x61, 0x4c, 0xa6, 0x41, 0x1f, 0x4d, 0xba, 0x57, + 0x7c, 0x61, 0x8e, 0x5d, 0xbf, 0xfb, 0x32, 0xd0, 0x70, 0x57, 0xaa, 0x6f, 0xfd, 0xf7, 0x6f, 0xff, + 0xf8, 0xbd, 0xe4, 0x2d, 0x75, 0x7d, 0xf8, 0x7b, 0x3c, 0xff, 0xae, 0x78, 0xc7, 0x21, 0x77, 0x13, + 0xef, 0xa0, 0x9f, 0x26, 0x00, 0x8d, 0xa6, 0x0c, 0xf4, 0xe1, 0x34, 0x96, 0xe5, 0xe4, 0xb6, 0xfe, + 0xd1, 0x4b, 0x20, 0xc5, 0x90, 0xdf, 0xe4, 0x43, 0xbe, 0xa9, 0xae, 0x49, 0x43, 0x6e, 0xb3, 0x1e, + 0xb1, 0x11, 0xff, 0x38, 0x21, 0xea, 0x8b, 0x71, 0xce, 0xfe, 0xb7, 0xab, 0xe4, 0xb3, 0x4b, 0x3c, + 0xbe, 0x11, 0xe1, 0xa5, 0xaf, 0xf2, 0x06, 0xfb, 0x47, 0xdd, 0xe1, 0x23, 0x7c, 0x5b, 0x7d, 0x73, + 0x8c, 0x53, 0x47, 0x98, 0xd9, 0x60, 0xbf, 0x4e, 0xc0, 0xb5, 0x17, 0xa4, 0x65, 0xf4, 0xc9, 0xd4, + 0x63, 0x8d, 0x39, 0x7a, 0xc2, 0x50, 0xb7, 0xf9, 0x50, 0xb7, 0xd4, 0x37, 0x46, 0x9d, 0x39, 0x6e, + 0xa4, 0x7b, 0xff, 0x9b, 0x80, 0xd7, 0x4d, 0xda, 0xbd, 0x7c, 0x4c, 0x7b, 0x85, 0xf8, 0xa2, 0x6f, + 0xb0, 0xfd, 0xdd, 0x48, 0x3c, 0x2a, 0x0b, 0x54, 0x87, 0xda, 0x86, 0xd3, 0xd9, 0xa6, 0x5e, 0x67, + 0xa7, 0x43, 0x1c, 0xbe, 0xfb, 0x77, 0x42, 0x95, 0xe1, 0x5a, 0xfe, 0x0b, 0xbe, 0x8e, 0xfc, 0x38, + 0x14, 0xfc, 0x39, 0x91, 0xf8, 0x41, 0x72, 0xe6, 0xb0, 0x7c, 0xbf, 0x76, 0x36, 0xcb, 0x91, 0x77, + 0xfe, 0x12, 0x00, 0x00, 0xff, 0xff, 0x60, 0xef, 0x71, 0x29, 0x5f, 0x2a, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/cloud/vision/v1p4beta1/product_search.pb.go b/vendor/google.golang.org/genproto/googleapis/cloud/vision/v1p4beta1/product_search.pb.go new file mode 100644 index 0000000..6e72383 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/cloud/vision/v1p4beta1/product_search.pb.go @@ -0,0 +1,319 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/cloud/vision/v1p4beta1/product_search.proto + +package vision // import "google.golang.org/genproto/googleapis/cloud/vision/v1p4beta1" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import timestamp "github.com/golang/protobuf/ptypes/timestamp" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Parameters for a product search request. +type ProductSearchParams struct { + // The bounding polygon around the area of interest in the image. + // Optional. If it is not specified, system discretion will be applied. + BoundingPoly *BoundingPoly `protobuf:"bytes,9,opt,name=bounding_poly,json=boundingPoly,proto3" json:"bounding_poly,omitempty"` + // The resource name of a + // [ProductSet][google.cloud.vision.v1p4beta1.ProductSet] to be searched for + // similar images. + // + // Format is: + // `projects/PROJECT_ID/locations/LOC_ID/productSets/PRODUCT_SET_ID`. + ProductSet string `protobuf:"bytes,6,opt,name=product_set,json=productSet,proto3" json:"product_set,omitempty"` + // The list of product categories to search in. Currently, we only consider + // the first category, and either "homegoods", "apparel", or "toys" should be + // specified. + ProductCategories []string `protobuf:"bytes,7,rep,name=product_categories,json=productCategories,proto3" json:"product_categories,omitempty"` + // The filtering expression. This can be used to restrict search results based + // on Product labels. We currently support an AND of OR of key-value + // expressions, where each expression within an OR must have the same key. + // + // For example, "(color = red OR color = blue) AND brand = Google" is + // acceptable, but not "(color = red OR brand = Google)" or "color: red". + Filter string `protobuf:"bytes,8,opt,name=filter,proto3" json:"filter,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ProductSearchParams) Reset() { *m = ProductSearchParams{} } +func (m *ProductSearchParams) String() string { return proto.CompactTextString(m) } +func (*ProductSearchParams) ProtoMessage() {} +func (*ProductSearchParams) Descriptor() ([]byte, []int) { + return fileDescriptor_product_search_573b317bf5620815, []int{0} +} +func (m *ProductSearchParams) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ProductSearchParams.Unmarshal(m, b) +} +func (m *ProductSearchParams) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ProductSearchParams.Marshal(b, m, deterministic) +} +func (dst *ProductSearchParams) XXX_Merge(src proto.Message) { + xxx_messageInfo_ProductSearchParams.Merge(dst, src) +} +func (m *ProductSearchParams) XXX_Size() int { + return xxx_messageInfo_ProductSearchParams.Size(m) +} +func (m *ProductSearchParams) XXX_DiscardUnknown() { + xxx_messageInfo_ProductSearchParams.DiscardUnknown(m) +} + +var xxx_messageInfo_ProductSearchParams proto.InternalMessageInfo + +func (m *ProductSearchParams) GetBoundingPoly() *BoundingPoly { + if m != nil { + return m.BoundingPoly + } + return nil +} + +func (m *ProductSearchParams) GetProductSet() string { + if m != nil { + return m.ProductSet + } + return "" +} + +func (m *ProductSearchParams) GetProductCategories() []string { + if m != nil { + return m.ProductCategories + } + return nil +} + +func (m *ProductSearchParams) GetFilter() string { + if m != nil { + return m.Filter + } + return "" +} + +// Results for a product search request. +type ProductSearchResults struct { + // Timestamp of the index which provided these results. Changes made after + // this time are not reflected in the current results. + IndexTime *timestamp.Timestamp `protobuf:"bytes,2,opt,name=index_time,json=indexTime,proto3" json:"index_time,omitempty"` + // List of results, one for each product match. + Results []*ProductSearchResults_Result `protobuf:"bytes,5,rep,name=results,proto3" json:"results,omitempty"` + // List of results grouped by products detected in the query image. Each entry + // corresponds to one bounding polygon in the query image, and contains the + // matching products specific to that region. There may be duplicate product + // matches in the union of all the per-product results. + ProductGroupedResults []*ProductSearchResults_GroupedResult `protobuf:"bytes,6,rep,name=product_grouped_results,json=productGroupedResults,proto3" json:"product_grouped_results,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ProductSearchResults) Reset() { *m = ProductSearchResults{} } +func (m *ProductSearchResults) String() string { return proto.CompactTextString(m) } +func (*ProductSearchResults) ProtoMessage() {} +func (*ProductSearchResults) Descriptor() ([]byte, []int) { + return fileDescriptor_product_search_573b317bf5620815, []int{1} +} +func (m *ProductSearchResults) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ProductSearchResults.Unmarshal(m, b) +} +func (m *ProductSearchResults) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ProductSearchResults.Marshal(b, m, deterministic) +} +func (dst *ProductSearchResults) XXX_Merge(src proto.Message) { + xxx_messageInfo_ProductSearchResults.Merge(dst, src) +} +func (m *ProductSearchResults) XXX_Size() int { + return xxx_messageInfo_ProductSearchResults.Size(m) +} +func (m *ProductSearchResults) XXX_DiscardUnknown() { + xxx_messageInfo_ProductSearchResults.DiscardUnknown(m) +} + +var xxx_messageInfo_ProductSearchResults proto.InternalMessageInfo + +func (m *ProductSearchResults) GetIndexTime() *timestamp.Timestamp { + if m != nil { + return m.IndexTime + } + return nil +} + +func (m *ProductSearchResults) GetResults() []*ProductSearchResults_Result { + if m != nil { + return m.Results + } + return nil +} + +func (m *ProductSearchResults) GetProductGroupedResults() []*ProductSearchResults_GroupedResult { + if m != nil { + return m.ProductGroupedResults + } + return nil +} + +// Information about a product. +type ProductSearchResults_Result struct { + // The Product. + Product *Product `protobuf:"bytes,1,opt,name=product,proto3" json:"product,omitempty"` + // A confidence level on the match, ranging from 0 (no confidence) to + // 1 (full confidence). + Score float32 `protobuf:"fixed32,2,opt,name=score,proto3" json:"score,omitempty"` + // The resource name of the image from the product that is the closest match + // to the query. + Image string `protobuf:"bytes,3,opt,name=image,proto3" json:"image,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ProductSearchResults_Result) Reset() { *m = ProductSearchResults_Result{} } +func (m *ProductSearchResults_Result) String() string { return proto.CompactTextString(m) } +func (*ProductSearchResults_Result) ProtoMessage() {} +func (*ProductSearchResults_Result) Descriptor() ([]byte, []int) { + return fileDescriptor_product_search_573b317bf5620815, []int{1, 0} +} +func (m *ProductSearchResults_Result) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ProductSearchResults_Result.Unmarshal(m, b) +} +func (m *ProductSearchResults_Result) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ProductSearchResults_Result.Marshal(b, m, deterministic) +} +func (dst *ProductSearchResults_Result) XXX_Merge(src proto.Message) { + xxx_messageInfo_ProductSearchResults_Result.Merge(dst, src) +} +func (m *ProductSearchResults_Result) XXX_Size() int { + return xxx_messageInfo_ProductSearchResults_Result.Size(m) +} +func (m *ProductSearchResults_Result) XXX_DiscardUnknown() { + xxx_messageInfo_ProductSearchResults_Result.DiscardUnknown(m) +} + +var xxx_messageInfo_ProductSearchResults_Result proto.InternalMessageInfo + +func (m *ProductSearchResults_Result) GetProduct() *Product { + if m != nil { + return m.Product + } + return nil +} + +func (m *ProductSearchResults_Result) GetScore() float32 { + if m != nil { + return m.Score + } + return 0 +} + +func (m *ProductSearchResults_Result) GetImage() string { + if m != nil { + return m.Image + } + return "" +} + +// Information about the products similar to a single product in a query +// image. +type ProductSearchResults_GroupedResult struct { + // The bounding polygon around the product detected in the query image. + BoundingPoly *BoundingPoly `protobuf:"bytes,1,opt,name=bounding_poly,json=boundingPoly,proto3" json:"bounding_poly,omitempty"` + // List of results, one for each product match. + Results []*ProductSearchResults_Result `protobuf:"bytes,2,rep,name=results,proto3" json:"results,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ProductSearchResults_GroupedResult) Reset() { *m = ProductSearchResults_GroupedResult{} } +func (m *ProductSearchResults_GroupedResult) String() string { return proto.CompactTextString(m) } +func (*ProductSearchResults_GroupedResult) ProtoMessage() {} +func (*ProductSearchResults_GroupedResult) Descriptor() ([]byte, []int) { + return fileDescriptor_product_search_573b317bf5620815, []int{1, 1} +} +func (m *ProductSearchResults_GroupedResult) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ProductSearchResults_GroupedResult.Unmarshal(m, b) +} +func (m *ProductSearchResults_GroupedResult) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ProductSearchResults_GroupedResult.Marshal(b, m, deterministic) +} +func (dst *ProductSearchResults_GroupedResult) XXX_Merge(src proto.Message) { + xxx_messageInfo_ProductSearchResults_GroupedResult.Merge(dst, src) +} +func (m *ProductSearchResults_GroupedResult) XXX_Size() int { + return xxx_messageInfo_ProductSearchResults_GroupedResult.Size(m) +} +func (m *ProductSearchResults_GroupedResult) XXX_DiscardUnknown() { + xxx_messageInfo_ProductSearchResults_GroupedResult.DiscardUnknown(m) +} + +var xxx_messageInfo_ProductSearchResults_GroupedResult proto.InternalMessageInfo + +func (m *ProductSearchResults_GroupedResult) GetBoundingPoly() *BoundingPoly { + if m != nil { + return m.BoundingPoly + } + return nil +} + +func (m *ProductSearchResults_GroupedResult) GetResults() []*ProductSearchResults_Result { + if m != nil { + return m.Results + } + return nil +} + +func init() { + proto.RegisterType((*ProductSearchParams)(nil), "google.cloud.vision.v1p4beta1.ProductSearchParams") + proto.RegisterType((*ProductSearchResults)(nil), "google.cloud.vision.v1p4beta1.ProductSearchResults") + proto.RegisterType((*ProductSearchResults_Result)(nil), "google.cloud.vision.v1p4beta1.ProductSearchResults.Result") + proto.RegisterType((*ProductSearchResults_GroupedResult)(nil), "google.cloud.vision.v1p4beta1.ProductSearchResults.GroupedResult") +} + +func init() { + proto.RegisterFile("google/cloud/vision/v1p4beta1/product_search.proto", fileDescriptor_product_search_573b317bf5620815) +} + +var fileDescriptor_product_search_573b317bf5620815 = []byte{ + // 495 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xac, 0x53, 0xcd, 0x8e, 0xd3, 0x3c, + 0x14, 0x95, 0xdb, 0x99, 0xf4, 0xab, 0xfb, 0xcd, 0x02, 0x33, 0x40, 0x14, 0x81, 0xa6, 0xcc, 0x02, + 0x55, 0x02, 0x12, 0x4d, 0x61, 0xc3, 0xb0, 0x81, 0x76, 0x31, 0x3b, 0x14, 0x85, 0x11, 0x0b, 0x36, + 0x91, 0x93, 0x78, 0x8c, 0xa5, 0x24, 0x37, 0xb2, 0x9d, 0x8a, 0xbe, 0x01, 0xcf, 0xc1, 0x4b, 0xf0, + 0x18, 0x6c, 0x78, 0x18, 0x96, 0xa8, 0xfe, 0x19, 0xa6, 0x02, 0xca, 0xef, 0x2a, 0xb9, 0xf6, 0x3d, + 0xe7, 0xdc, 0x7b, 0xee, 0x35, 0x9e, 0x73, 0x00, 0x5e, 0xb3, 0xa4, 0xac, 0xa1, 0xaf, 0x92, 0x95, + 0x50, 0x02, 0xda, 0x64, 0x75, 0xd2, 0x3d, 0x2e, 0x98, 0xa6, 0x27, 0x49, 0x27, 0xa1, 0xea, 0x4b, + 0x9d, 0x2b, 0x46, 0x65, 0xf9, 0x26, 0xee, 0x24, 0x68, 0x20, 0x77, 0x2c, 0x26, 0x36, 0x98, 0xd8, + 0x62, 0xe2, 0x4b, 0x4c, 0x74, 0xdb, 0x51, 0xd2, 0x4e, 0x24, 0xb4, 0x6d, 0x41, 0x53, 0x2d, 0xa0, + 0x55, 0x16, 0x1c, 0x3d, 0xd8, 0x2d, 0xc8, 0x19, 0x34, 0x4c, 0xcb, 0xb5, 0xcb, 0x3e, 0xfd, 0x9d, + 0xf2, 0x72, 0xc5, 0xe4, 0x4a, 0x94, 0xcc, 0x61, 0x8f, 0x1c, 0xd6, 0x44, 0x45, 0x7f, 0x91, 0x68, + 0xd1, 0x30, 0xa5, 0x69, 0xd3, 0xd9, 0x84, 0xe3, 0x8f, 0x08, 0x5f, 0x4f, 0x2d, 0xc3, 0x4b, 0x43, + 0x90, 0x52, 0x49, 0x1b, 0x45, 0x52, 0x7c, 0x50, 0x40, 0xdf, 0x56, 0xa2, 0xe5, 0x79, 0x07, 0xf5, + 0x3a, 0x1c, 0x4f, 0xd1, 0x6c, 0x32, 0xbf, 0x1f, 0xef, 0xec, 0x3b, 0x5e, 0x38, 0x4c, 0x0a, 0xf5, + 0x3a, 0xfb, 0xbf, 0xb8, 0x12, 0x91, 0x23, 0x3c, 0xf9, 0x5a, 0xaa, 0x0e, 0x83, 0x29, 0x9a, 0x8d, + 0x33, 0xdc, 0x79, 0x6d, 0x4d, 0x1e, 0x62, 0xe2, 0x13, 0x4a, 0xaa, 0x19, 0x07, 0x29, 0x98, 0x0a, + 0x47, 0xd3, 0xe1, 0x6c, 0x9c, 0x5d, 0x73, 0x37, 0xcb, 0xcb, 0x0b, 0x72, 0x13, 0x07, 0x17, 0xa2, + 0xd6, 0x4c, 0x86, 0xff, 0x19, 0x2a, 0x17, 0x1d, 0x7f, 0xda, 0xc3, 0x87, 0x5b, 0x1d, 0x65, 0x4c, + 0xf5, 0xb5, 0x56, 0xe4, 0x09, 0xc6, 0xa2, 0xad, 0xd8, 0xdb, 0x7c, 0xe3, 0x41, 0x38, 0x30, 0xfd, + 0x44, 0xbe, 0x1f, 0x6f, 0x50, 0x7c, 0xee, 0x0d, 0xca, 0xc6, 0x26, 0x7b, 0x13, 0x93, 0x73, 0x3c, + 0x92, 0x96, 0x25, 0xdc, 0x9f, 0x0e, 0x67, 0x93, 0xf9, 0xe9, 0x4f, 0x7c, 0xf8, 0x5e, 0x01, 0xb1, + 0xfd, 0x66, 0x9e, 0x8a, 0xac, 0xf1, 0x2d, 0xdf, 0x30, 0x97, 0xd0, 0x77, 0xac, 0xca, 0xbd, 0x4a, + 0x60, 0x54, 0x9e, 0xff, 0x89, 0xca, 0x99, 0xa5, 0x72, 0x62, 0x37, 0x9c, 0xc2, 0xd6, 0xa9, 0x8a, + 0x56, 0x38, 0xb0, 0xbf, 0xe4, 0x19, 0x1e, 0xb9, 0x94, 0x10, 0x19, 0x4b, 0xee, 0xfd, 0x9a, 0x68, + 0xe6, 0x61, 0xe4, 0x10, 0xef, 0xab, 0x12, 0xa4, 0xb5, 0x74, 0x90, 0xd9, 0x60, 0x73, 0x2a, 0x1a, + 0xca, 0x59, 0x38, 0x34, 0xd3, 0xb1, 0x41, 0xf4, 0x01, 0xe1, 0x83, 0xad, 0x52, 0xbe, 0x5d, 0x34, + 0xf4, 0xb7, 0x8b, 0x76, 0x65, 0x58, 0x83, 0x7f, 0x36, 0xac, 0xc5, 0x3b, 0x84, 0xef, 0x96, 0xd0, + 0xec, 0xa6, 0x5a, 0x90, 0xed, 0xb7, 0xb4, 0x59, 0xaa, 0x14, 0xbd, 0x5e, 0x3a, 0x10, 0x87, 0x9a, + 0xb6, 0x3c, 0x06, 0xc9, 0x13, 0xce, 0x5a, 0xb3, 0x72, 0x89, 0xbd, 0xa2, 0x9d, 0x50, 0x3f, 0x78, + 0xe0, 0x4f, 0xed, 0xc1, 0x67, 0x84, 0xde, 0x0f, 0xf6, 0xce, 0x96, 0xaf, 0x5e, 0x14, 0x81, 0x41, + 0x3e, 0xfa, 0x12, 0x00, 0x00, 0xff, 0xff, 0x50, 0x51, 0x5b, 0x19, 0xb8, 0x04, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/cloud/vision/v1p4beta1/product_search_service.pb.go b/vendor/google.golang.org/genproto/googleapis/cloud/vision/v1p4beta1/product_search_service.pb.go new file mode 100644 index 0000000..e43996f --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/cloud/vision/v1p4beta1/product_search_service.pb.go @@ -0,0 +1,2978 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/cloud/vision/v1p4beta1/product_search_service.proto + +package vision // import "google.golang.org/genproto/googleapis/cloud/vision/v1p4beta1" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import empty "github.com/golang/protobuf/ptypes/empty" +import timestamp "github.com/golang/protobuf/ptypes/timestamp" +import _ "google.golang.org/genproto/googleapis/api/annotations" +import longrunning "google.golang.org/genproto/googleapis/longrunning" +import status "google.golang.org/genproto/googleapis/rpc/status" +import field_mask "google.golang.org/genproto/protobuf/field_mask" + +import ( + context "golang.org/x/net/context" + grpc "google.golang.org/grpc" +) + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Enumerates the possible states that the batch request can be in. +type BatchOperationMetadata_State int32 + +const ( + // Invalid. + BatchOperationMetadata_STATE_UNSPECIFIED BatchOperationMetadata_State = 0 + // Request is actively being processed. + BatchOperationMetadata_PROCESSING BatchOperationMetadata_State = 1 + // The request is done and at least one item has been successfully + // processed. + BatchOperationMetadata_SUCCESSFUL BatchOperationMetadata_State = 2 + // The request is done and no item has been successfully processed. + BatchOperationMetadata_FAILED BatchOperationMetadata_State = 3 + // The request is done after the longrunning.Operations.CancelOperation has + // been called by the user. Any records that were processed before the + // cancel command are output as specified in the request. + BatchOperationMetadata_CANCELLED BatchOperationMetadata_State = 4 +) + +var BatchOperationMetadata_State_name = map[int32]string{ + 0: "STATE_UNSPECIFIED", + 1: "PROCESSING", + 2: "SUCCESSFUL", + 3: "FAILED", + 4: "CANCELLED", +} +var BatchOperationMetadata_State_value = map[string]int32{ + "STATE_UNSPECIFIED": 0, + "PROCESSING": 1, + "SUCCESSFUL": 2, + "FAILED": 3, + "CANCELLED": 4, +} + +func (x BatchOperationMetadata_State) String() string { + return proto.EnumName(BatchOperationMetadata_State_name, int32(x)) +} +func (BatchOperationMetadata_State) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_product_search_service_6c1bda02aae064cc, []int{28, 0} +} + +// A Product contains ReferenceImages. +type Product struct { + // The resource name of the product. + // + // Format is: + // `projects/PROJECT_ID/locations/LOC_ID/products/PRODUCT_ID`. + // + // This field is ignored when creating a product. + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // The user-provided name for this Product. Must not be empty. Must be at most + // 4096 characters long. + DisplayName string `protobuf:"bytes,2,opt,name=display_name,json=displayName,proto3" json:"display_name,omitempty"` + // User-provided metadata to be stored with this product. Must be at most 4096 + // characters long. + Description string `protobuf:"bytes,3,opt,name=description,proto3" json:"description,omitempty"` + // The category for the product identified by the reference image. This should + // be either "homegoods", "apparel", or "toys". + // + // This field is immutable. + ProductCategory string `protobuf:"bytes,4,opt,name=product_category,json=productCategory,proto3" json:"product_category,omitempty"` + // Key-value pairs that can be attached to a product. At query time, + // constraints can be specified based on the product_labels. + // + // Note that integer values can be provided as strings, e.g. "1199". Only + // strings with integer values can match a range-based restriction which is + // to be supported soon. + // + // Multiple values can be assigned to the same key. One product may have up to + // 100 product_labels. + ProductLabels []*Product_KeyValue `protobuf:"bytes,5,rep,name=product_labels,json=productLabels,proto3" json:"product_labels,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Product) Reset() { *m = Product{} } +func (m *Product) String() string { return proto.CompactTextString(m) } +func (*Product) ProtoMessage() {} +func (*Product) Descriptor() ([]byte, []int) { + return fileDescriptor_product_search_service_6c1bda02aae064cc, []int{0} +} +func (m *Product) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Product.Unmarshal(m, b) +} +func (m *Product) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Product.Marshal(b, m, deterministic) +} +func (dst *Product) XXX_Merge(src proto.Message) { + xxx_messageInfo_Product.Merge(dst, src) +} +func (m *Product) XXX_Size() int { + return xxx_messageInfo_Product.Size(m) +} +func (m *Product) XXX_DiscardUnknown() { + xxx_messageInfo_Product.DiscardUnknown(m) +} + +var xxx_messageInfo_Product proto.InternalMessageInfo + +func (m *Product) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *Product) GetDisplayName() string { + if m != nil { + return m.DisplayName + } + return "" +} + +func (m *Product) GetDescription() string { + if m != nil { + return m.Description + } + return "" +} + +func (m *Product) GetProductCategory() string { + if m != nil { + return m.ProductCategory + } + return "" +} + +func (m *Product) GetProductLabels() []*Product_KeyValue { + if m != nil { + return m.ProductLabels + } + return nil +} + +// A product label represented as a key-value pair. +type Product_KeyValue struct { + // The key of the label attached to the product. Cannot be empty and cannot + // exceed 128 bytes. + Key string `protobuf:"bytes,1,opt,name=key,proto3" json:"key,omitempty"` + // The value of the label attached to the product. Cannot be empty and + // cannot exceed 128 bytes. + Value string `protobuf:"bytes,2,opt,name=value,proto3" json:"value,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Product_KeyValue) Reset() { *m = Product_KeyValue{} } +func (m *Product_KeyValue) String() string { return proto.CompactTextString(m) } +func (*Product_KeyValue) ProtoMessage() {} +func (*Product_KeyValue) Descriptor() ([]byte, []int) { + return fileDescriptor_product_search_service_6c1bda02aae064cc, []int{0, 0} +} +func (m *Product_KeyValue) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Product_KeyValue.Unmarshal(m, b) +} +func (m *Product_KeyValue) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Product_KeyValue.Marshal(b, m, deterministic) +} +func (dst *Product_KeyValue) XXX_Merge(src proto.Message) { + xxx_messageInfo_Product_KeyValue.Merge(dst, src) +} +func (m *Product_KeyValue) XXX_Size() int { + return xxx_messageInfo_Product_KeyValue.Size(m) +} +func (m *Product_KeyValue) XXX_DiscardUnknown() { + xxx_messageInfo_Product_KeyValue.DiscardUnknown(m) +} + +var xxx_messageInfo_Product_KeyValue proto.InternalMessageInfo + +func (m *Product_KeyValue) GetKey() string { + if m != nil { + return m.Key + } + return "" +} + +func (m *Product_KeyValue) GetValue() string { + if m != nil { + return m.Value + } + return "" +} + +// A ProductSet contains Products. A ProductSet can contain a maximum of 1 +// million reference images. If the limit is exceeded, periodic indexing will +// fail. +type ProductSet struct { + // The resource name of the ProductSet. + // + // Format is: + // `projects/PROJECT_ID/locations/LOC_ID/productSets/PRODUCT_SET_ID`. + // + // This field is ignored when creating a ProductSet. + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // The user-provided name for this ProductSet. Must not be empty. Must be at + // most 4096 characters long. + DisplayName string `protobuf:"bytes,2,opt,name=display_name,json=displayName,proto3" json:"display_name,omitempty"` + // Output only. The time at which this ProductSet was last indexed. Query + // results will reflect all updates before this time. If this ProductSet has + // never been indexed, this timestamp is the default value + // "1970-01-01T00:00:00Z". + // + // This field is ignored when creating a ProductSet. + IndexTime *timestamp.Timestamp `protobuf:"bytes,3,opt,name=index_time,json=indexTime,proto3" json:"index_time,omitempty"` + // Output only. If there was an error with indexing the product set, the field + // is populated. + // + // This field is ignored when creating a ProductSet. + IndexError *status.Status `protobuf:"bytes,4,opt,name=index_error,json=indexError,proto3" json:"index_error,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ProductSet) Reset() { *m = ProductSet{} } +func (m *ProductSet) String() string { return proto.CompactTextString(m) } +func (*ProductSet) ProtoMessage() {} +func (*ProductSet) Descriptor() ([]byte, []int) { + return fileDescriptor_product_search_service_6c1bda02aae064cc, []int{1} +} +func (m *ProductSet) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ProductSet.Unmarshal(m, b) +} +func (m *ProductSet) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ProductSet.Marshal(b, m, deterministic) +} +func (dst *ProductSet) XXX_Merge(src proto.Message) { + xxx_messageInfo_ProductSet.Merge(dst, src) +} +func (m *ProductSet) XXX_Size() int { + return xxx_messageInfo_ProductSet.Size(m) +} +func (m *ProductSet) XXX_DiscardUnknown() { + xxx_messageInfo_ProductSet.DiscardUnknown(m) +} + +var xxx_messageInfo_ProductSet proto.InternalMessageInfo + +func (m *ProductSet) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *ProductSet) GetDisplayName() string { + if m != nil { + return m.DisplayName + } + return "" +} + +func (m *ProductSet) GetIndexTime() *timestamp.Timestamp { + if m != nil { + return m.IndexTime + } + return nil +} + +func (m *ProductSet) GetIndexError() *status.Status { + if m != nil { + return m.IndexError + } + return nil +} + +// A `ReferenceImage` represents a product image and its associated metadata, +// such as bounding boxes. +type ReferenceImage struct { + // The resource name of the reference image. + // + // Format is: + // + // `projects/PROJECT_ID/locations/LOC_ID/products/PRODUCT_ID/referenceImages/IMAGE_ID`. + // + // This field is ignored when creating a reference image. + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // The Google Cloud Storage URI of the reference image. + // + // The URI must start with `gs://`. + // + // Required. + Uri string `protobuf:"bytes,2,opt,name=uri,proto3" json:"uri,omitempty"` + // Bounding polygons around the areas of interest in the reference image. + // Optional. If this field is empty, the system will try to detect regions of + // interest. At most 10 bounding polygons will be used. + // + // The provided shape is converted into a non-rotated rectangle. Once + // converted, the small edge of the rectangle must be greater than or equal + // to 300 pixels. The aspect ratio must be 1:4 or less (i.e. 1:3 is ok; 1:5 + // is not). + BoundingPolys []*BoundingPoly `protobuf:"bytes,3,rep,name=bounding_polys,json=boundingPolys,proto3" json:"bounding_polys,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ReferenceImage) Reset() { *m = ReferenceImage{} } +func (m *ReferenceImage) String() string { return proto.CompactTextString(m) } +func (*ReferenceImage) ProtoMessage() {} +func (*ReferenceImage) Descriptor() ([]byte, []int) { + return fileDescriptor_product_search_service_6c1bda02aae064cc, []int{2} +} +func (m *ReferenceImage) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ReferenceImage.Unmarshal(m, b) +} +func (m *ReferenceImage) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ReferenceImage.Marshal(b, m, deterministic) +} +func (dst *ReferenceImage) XXX_Merge(src proto.Message) { + xxx_messageInfo_ReferenceImage.Merge(dst, src) +} +func (m *ReferenceImage) XXX_Size() int { + return xxx_messageInfo_ReferenceImage.Size(m) +} +func (m *ReferenceImage) XXX_DiscardUnknown() { + xxx_messageInfo_ReferenceImage.DiscardUnknown(m) +} + +var xxx_messageInfo_ReferenceImage proto.InternalMessageInfo + +func (m *ReferenceImage) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *ReferenceImage) GetUri() string { + if m != nil { + return m.Uri + } + return "" +} + +func (m *ReferenceImage) GetBoundingPolys() []*BoundingPoly { + if m != nil { + return m.BoundingPolys + } + return nil +} + +// Request message for the `CreateProduct` method. +type CreateProductRequest struct { + // The project in which the Product should be created. + // + // Format is + // `projects/PROJECT_ID/locations/LOC_ID`. + Parent string `protobuf:"bytes,1,opt,name=parent,proto3" json:"parent,omitempty"` + // The product to create. + Product *Product `protobuf:"bytes,2,opt,name=product,proto3" json:"product,omitempty"` + // A user-supplied resource id for this Product. If set, the server will + // attempt to use this value as the resource id. If it is already in use, an + // error is returned with code ALREADY_EXISTS. Must be at most 128 characters + // long. It cannot contain the character `/`. + ProductId string `protobuf:"bytes,3,opt,name=product_id,json=productId,proto3" json:"product_id,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *CreateProductRequest) Reset() { *m = CreateProductRequest{} } +func (m *CreateProductRequest) String() string { return proto.CompactTextString(m) } +func (*CreateProductRequest) ProtoMessage() {} +func (*CreateProductRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_product_search_service_6c1bda02aae064cc, []int{3} +} +func (m *CreateProductRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_CreateProductRequest.Unmarshal(m, b) +} +func (m *CreateProductRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_CreateProductRequest.Marshal(b, m, deterministic) +} +func (dst *CreateProductRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_CreateProductRequest.Merge(dst, src) +} +func (m *CreateProductRequest) XXX_Size() int { + return xxx_messageInfo_CreateProductRequest.Size(m) +} +func (m *CreateProductRequest) XXX_DiscardUnknown() { + xxx_messageInfo_CreateProductRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_CreateProductRequest proto.InternalMessageInfo + +func (m *CreateProductRequest) GetParent() string { + if m != nil { + return m.Parent + } + return "" +} + +func (m *CreateProductRequest) GetProduct() *Product { + if m != nil { + return m.Product + } + return nil +} + +func (m *CreateProductRequest) GetProductId() string { + if m != nil { + return m.ProductId + } + return "" +} + +// Request message for the `ListProducts` method. +type ListProductsRequest struct { + // The project OR ProductSet from which Products should be listed. + // + // Format: + // `projects/PROJECT_ID/locations/LOC_ID` + Parent string `protobuf:"bytes,1,opt,name=parent,proto3" json:"parent,omitempty"` + // The maximum number of items to return. Default 10, maximum 100. + PageSize int32 `protobuf:"varint,2,opt,name=page_size,json=pageSize,proto3" json:"page_size,omitempty"` + // The next_page_token returned from a previous List request, if any. + PageToken string `protobuf:"bytes,3,opt,name=page_token,json=pageToken,proto3" json:"page_token,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ListProductsRequest) Reset() { *m = ListProductsRequest{} } +func (m *ListProductsRequest) String() string { return proto.CompactTextString(m) } +func (*ListProductsRequest) ProtoMessage() {} +func (*ListProductsRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_product_search_service_6c1bda02aae064cc, []int{4} +} +func (m *ListProductsRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ListProductsRequest.Unmarshal(m, b) +} +func (m *ListProductsRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ListProductsRequest.Marshal(b, m, deterministic) +} +func (dst *ListProductsRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_ListProductsRequest.Merge(dst, src) +} +func (m *ListProductsRequest) XXX_Size() int { + return xxx_messageInfo_ListProductsRequest.Size(m) +} +func (m *ListProductsRequest) XXX_DiscardUnknown() { + xxx_messageInfo_ListProductsRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_ListProductsRequest proto.InternalMessageInfo + +func (m *ListProductsRequest) GetParent() string { + if m != nil { + return m.Parent + } + return "" +} + +func (m *ListProductsRequest) GetPageSize() int32 { + if m != nil { + return m.PageSize + } + return 0 +} + +func (m *ListProductsRequest) GetPageToken() string { + if m != nil { + return m.PageToken + } + return "" +} + +// Response message for the `ListProducts` method. +type ListProductsResponse struct { + // List of products. + Products []*Product `protobuf:"bytes,1,rep,name=products,proto3" json:"products,omitempty"` + // Token to retrieve the next page of results, or empty if there are no more + // results in the list. + NextPageToken string `protobuf:"bytes,2,opt,name=next_page_token,json=nextPageToken,proto3" json:"next_page_token,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ListProductsResponse) Reset() { *m = ListProductsResponse{} } +func (m *ListProductsResponse) String() string { return proto.CompactTextString(m) } +func (*ListProductsResponse) ProtoMessage() {} +func (*ListProductsResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_product_search_service_6c1bda02aae064cc, []int{5} +} +func (m *ListProductsResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ListProductsResponse.Unmarshal(m, b) +} +func (m *ListProductsResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ListProductsResponse.Marshal(b, m, deterministic) +} +func (dst *ListProductsResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_ListProductsResponse.Merge(dst, src) +} +func (m *ListProductsResponse) XXX_Size() int { + return xxx_messageInfo_ListProductsResponse.Size(m) +} +func (m *ListProductsResponse) XXX_DiscardUnknown() { + xxx_messageInfo_ListProductsResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_ListProductsResponse proto.InternalMessageInfo + +func (m *ListProductsResponse) GetProducts() []*Product { + if m != nil { + return m.Products + } + return nil +} + +func (m *ListProductsResponse) GetNextPageToken() string { + if m != nil { + return m.NextPageToken + } + return "" +} + +// Request message for the `GetProduct` method. +type GetProductRequest struct { + // Resource name of the Product to get. + // + // Format is: + // `projects/PROJECT_ID/locations/LOC_ID/products/PRODUCT_ID` + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GetProductRequest) Reset() { *m = GetProductRequest{} } +func (m *GetProductRequest) String() string { return proto.CompactTextString(m) } +func (*GetProductRequest) ProtoMessage() {} +func (*GetProductRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_product_search_service_6c1bda02aae064cc, []int{6} +} +func (m *GetProductRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GetProductRequest.Unmarshal(m, b) +} +func (m *GetProductRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GetProductRequest.Marshal(b, m, deterministic) +} +func (dst *GetProductRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_GetProductRequest.Merge(dst, src) +} +func (m *GetProductRequest) XXX_Size() int { + return xxx_messageInfo_GetProductRequest.Size(m) +} +func (m *GetProductRequest) XXX_DiscardUnknown() { + xxx_messageInfo_GetProductRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_GetProductRequest proto.InternalMessageInfo + +func (m *GetProductRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +// Request message for the `UpdateProduct` method. +type UpdateProductRequest struct { + // The Product resource which replaces the one on the server. + // product.name is immutable. + Product *Product `protobuf:"bytes,1,opt,name=product,proto3" json:"product,omitempty"` + // The [FieldMask][google.protobuf.FieldMask] that specifies which fields + // to update. + // If update_mask isn't specified, all mutable fields are to be updated. + // Valid mask paths include `product_labels`, `display_name`, and + // `description`. + UpdateMask *field_mask.FieldMask `protobuf:"bytes,2,opt,name=update_mask,json=updateMask,proto3" json:"update_mask,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *UpdateProductRequest) Reset() { *m = UpdateProductRequest{} } +func (m *UpdateProductRequest) String() string { return proto.CompactTextString(m) } +func (*UpdateProductRequest) ProtoMessage() {} +func (*UpdateProductRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_product_search_service_6c1bda02aae064cc, []int{7} +} +func (m *UpdateProductRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_UpdateProductRequest.Unmarshal(m, b) +} +func (m *UpdateProductRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_UpdateProductRequest.Marshal(b, m, deterministic) +} +func (dst *UpdateProductRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_UpdateProductRequest.Merge(dst, src) +} +func (m *UpdateProductRequest) XXX_Size() int { + return xxx_messageInfo_UpdateProductRequest.Size(m) +} +func (m *UpdateProductRequest) XXX_DiscardUnknown() { + xxx_messageInfo_UpdateProductRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_UpdateProductRequest proto.InternalMessageInfo + +func (m *UpdateProductRequest) GetProduct() *Product { + if m != nil { + return m.Product + } + return nil +} + +func (m *UpdateProductRequest) GetUpdateMask() *field_mask.FieldMask { + if m != nil { + return m.UpdateMask + } + return nil +} + +// Request message for the `DeleteProduct` method. +type DeleteProductRequest struct { + // Resource name of product to delete. + // + // Format is: + // `projects/PROJECT_ID/locations/LOC_ID/products/PRODUCT_ID` + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *DeleteProductRequest) Reset() { *m = DeleteProductRequest{} } +func (m *DeleteProductRequest) String() string { return proto.CompactTextString(m) } +func (*DeleteProductRequest) ProtoMessage() {} +func (*DeleteProductRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_product_search_service_6c1bda02aae064cc, []int{8} +} +func (m *DeleteProductRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_DeleteProductRequest.Unmarshal(m, b) +} +func (m *DeleteProductRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_DeleteProductRequest.Marshal(b, m, deterministic) +} +func (dst *DeleteProductRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_DeleteProductRequest.Merge(dst, src) +} +func (m *DeleteProductRequest) XXX_Size() int { + return xxx_messageInfo_DeleteProductRequest.Size(m) +} +func (m *DeleteProductRequest) XXX_DiscardUnknown() { + xxx_messageInfo_DeleteProductRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_DeleteProductRequest proto.InternalMessageInfo + +func (m *DeleteProductRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +// Request message for the `CreateProductSet` method. +type CreateProductSetRequest struct { + // The project in which the ProductSet should be created. + // + // Format is `projects/PROJECT_ID/locations/LOC_ID`. + Parent string `protobuf:"bytes,1,opt,name=parent,proto3" json:"parent,omitempty"` + // The ProductSet to create. + ProductSet *ProductSet `protobuf:"bytes,2,opt,name=product_set,json=productSet,proto3" json:"product_set,omitempty"` + // A user-supplied resource id for this ProductSet. If set, the server will + // attempt to use this value as the resource id. If it is already in use, an + // error is returned with code ALREADY_EXISTS. Must be at most 128 characters + // long. It cannot contain the character `/`. + ProductSetId string `protobuf:"bytes,3,opt,name=product_set_id,json=productSetId,proto3" json:"product_set_id,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *CreateProductSetRequest) Reset() { *m = CreateProductSetRequest{} } +func (m *CreateProductSetRequest) String() string { return proto.CompactTextString(m) } +func (*CreateProductSetRequest) ProtoMessage() {} +func (*CreateProductSetRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_product_search_service_6c1bda02aae064cc, []int{9} +} +func (m *CreateProductSetRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_CreateProductSetRequest.Unmarshal(m, b) +} +func (m *CreateProductSetRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_CreateProductSetRequest.Marshal(b, m, deterministic) +} +func (dst *CreateProductSetRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_CreateProductSetRequest.Merge(dst, src) +} +func (m *CreateProductSetRequest) XXX_Size() int { + return xxx_messageInfo_CreateProductSetRequest.Size(m) +} +func (m *CreateProductSetRequest) XXX_DiscardUnknown() { + xxx_messageInfo_CreateProductSetRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_CreateProductSetRequest proto.InternalMessageInfo + +func (m *CreateProductSetRequest) GetParent() string { + if m != nil { + return m.Parent + } + return "" +} + +func (m *CreateProductSetRequest) GetProductSet() *ProductSet { + if m != nil { + return m.ProductSet + } + return nil +} + +func (m *CreateProductSetRequest) GetProductSetId() string { + if m != nil { + return m.ProductSetId + } + return "" +} + +// Request message for the `ListProductSets` method. +type ListProductSetsRequest struct { + // The project from which ProductSets should be listed. + // + // Format is `projects/PROJECT_ID/locations/LOC_ID`. + Parent string `protobuf:"bytes,1,opt,name=parent,proto3" json:"parent,omitempty"` + // The maximum number of items to return. Default 10, maximum 100. + PageSize int32 `protobuf:"varint,2,opt,name=page_size,json=pageSize,proto3" json:"page_size,omitempty"` + // The next_page_token returned from a previous List request, if any. + PageToken string `protobuf:"bytes,3,opt,name=page_token,json=pageToken,proto3" json:"page_token,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ListProductSetsRequest) Reset() { *m = ListProductSetsRequest{} } +func (m *ListProductSetsRequest) String() string { return proto.CompactTextString(m) } +func (*ListProductSetsRequest) ProtoMessage() {} +func (*ListProductSetsRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_product_search_service_6c1bda02aae064cc, []int{10} +} +func (m *ListProductSetsRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ListProductSetsRequest.Unmarshal(m, b) +} +func (m *ListProductSetsRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ListProductSetsRequest.Marshal(b, m, deterministic) +} +func (dst *ListProductSetsRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_ListProductSetsRequest.Merge(dst, src) +} +func (m *ListProductSetsRequest) XXX_Size() int { + return xxx_messageInfo_ListProductSetsRequest.Size(m) +} +func (m *ListProductSetsRequest) XXX_DiscardUnknown() { + xxx_messageInfo_ListProductSetsRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_ListProductSetsRequest proto.InternalMessageInfo + +func (m *ListProductSetsRequest) GetParent() string { + if m != nil { + return m.Parent + } + return "" +} + +func (m *ListProductSetsRequest) GetPageSize() int32 { + if m != nil { + return m.PageSize + } + return 0 +} + +func (m *ListProductSetsRequest) GetPageToken() string { + if m != nil { + return m.PageToken + } + return "" +} + +// Response message for the `ListProductSets` method. +type ListProductSetsResponse struct { + // List of ProductSets. + ProductSets []*ProductSet `protobuf:"bytes,1,rep,name=product_sets,json=productSets,proto3" json:"product_sets,omitempty"` + // Token to retrieve the next page of results, or empty if there are no more + // results in the list. + NextPageToken string `protobuf:"bytes,2,opt,name=next_page_token,json=nextPageToken,proto3" json:"next_page_token,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ListProductSetsResponse) Reset() { *m = ListProductSetsResponse{} } +func (m *ListProductSetsResponse) String() string { return proto.CompactTextString(m) } +func (*ListProductSetsResponse) ProtoMessage() {} +func (*ListProductSetsResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_product_search_service_6c1bda02aae064cc, []int{11} +} +func (m *ListProductSetsResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ListProductSetsResponse.Unmarshal(m, b) +} +func (m *ListProductSetsResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ListProductSetsResponse.Marshal(b, m, deterministic) +} +func (dst *ListProductSetsResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_ListProductSetsResponse.Merge(dst, src) +} +func (m *ListProductSetsResponse) XXX_Size() int { + return xxx_messageInfo_ListProductSetsResponse.Size(m) +} +func (m *ListProductSetsResponse) XXX_DiscardUnknown() { + xxx_messageInfo_ListProductSetsResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_ListProductSetsResponse proto.InternalMessageInfo + +func (m *ListProductSetsResponse) GetProductSets() []*ProductSet { + if m != nil { + return m.ProductSets + } + return nil +} + +func (m *ListProductSetsResponse) GetNextPageToken() string { + if m != nil { + return m.NextPageToken + } + return "" +} + +// Request message for the `GetProductSet` method. +type GetProductSetRequest struct { + // Resource name of the ProductSet to get. + // + // Format is: + // `projects/PROJECT_ID/locations/LOG_ID/productSets/PRODUCT_SET_ID` + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GetProductSetRequest) Reset() { *m = GetProductSetRequest{} } +func (m *GetProductSetRequest) String() string { return proto.CompactTextString(m) } +func (*GetProductSetRequest) ProtoMessage() {} +func (*GetProductSetRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_product_search_service_6c1bda02aae064cc, []int{12} +} +func (m *GetProductSetRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GetProductSetRequest.Unmarshal(m, b) +} +func (m *GetProductSetRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GetProductSetRequest.Marshal(b, m, deterministic) +} +func (dst *GetProductSetRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_GetProductSetRequest.Merge(dst, src) +} +func (m *GetProductSetRequest) XXX_Size() int { + return xxx_messageInfo_GetProductSetRequest.Size(m) +} +func (m *GetProductSetRequest) XXX_DiscardUnknown() { + xxx_messageInfo_GetProductSetRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_GetProductSetRequest proto.InternalMessageInfo + +func (m *GetProductSetRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +// Request message for the `UpdateProductSet` method. +type UpdateProductSetRequest struct { + // The ProductSet resource which replaces the one on the server. + ProductSet *ProductSet `protobuf:"bytes,1,opt,name=product_set,json=productSet,proto3" json:"product_set,omitempty"` + // The [FieldMask][google.protobuf.FieldMask] that specifies which fields to + // update. + // If update_mask isn't specified, all mutable fields are to be updated. + // Valid mask path is `display_name`. + UpdateMask *field_mask.FieldMask `protobuf:"bytes,2,opt,name=update_mask,json=updateMask,proto3" json:"update_mask,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *UpdateProductSetRequest) Reset() { *m = UpdateProductSetRequest{} } +func (m *UpdateProductSetRequest) String() string { return proto.CompactTextString(m) } +func (*UpdateProductSetRequest) ProtoMessage() {} +func (*UpdateProductSetRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_product_search_service_6c1bda02aae064cc, []int{13} +} +func (m *UpdateProductSetRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_UpdateProductSetRequest.Unmarshal(m, b) +} +func (m *UpdateProductSetRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_UpdateProductSetRequest.Marshal(b, m, deterministic) +} +func (dst *UpdateProductSetRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_UpdateProductSetRequest.Merge(dst, src) +} +func (m *UpdateProductSetRequest) XXX_Size() int { + return xxx_messageInfo_UpdateProductSetRequest.Size(m) +} +func (m *UpdateProductSetRequest) XXX_DiscardUnknown() { + xxx_messageInfo_UpdateProductSetRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_UpdateProductSetRequest proto.InternalMessageInfo + +func (m *UpdateProductSetRequest) GetProductSet() *ProductSet { + if m != nil { + return m.ProductSet + } + return nil +} + +func (m *UpdateProductSetRequest) GetUpdateMask() *field_mask.FieldMask { + if m != nil { + return m.UpdateMask + } + return nil +} + +// Request message for the `DeleteProductSet` method. +type DeleteProductSetRequest struct { + // Resource name of the ProductSet to delete. + // + // Format is: + // `projects/PROJECT_ID/locations/LOC_ID/productSets/PRODUCT_SET_ID` + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *DeleteProductSetRequest) Reset() { *m = DeleteProductSetRequest{} } +func (m *DeleteProductSetRequest) String() string { return proto.CompactTextString(m) } +func (*DeleteProductSetRequest) ProtoMessage() {} +func (*DeleteProductSetRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_product_search_service_6c1bda02aae064cc, []int{14} +} +func (m *DeleteProductSetRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_DeleteProductSetRequest.Unmarshal(m, b) +} +func (m *DeleteProductSetRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_DeleteProductSetRequest.Marshal(b, m, deterministic) +} +func (dst *DeleteProductSetRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_DeleteProductSetRequest.Merge(dst, src) +} +func (m *DeleteProductSetRequest) XXX_Size() int { + return xxx_messageInfo_DeleteProductSetRequest.Size(m) +} +func (m *DeleteProductSetRequest) XXX_DiscardUnknown() { + xxx_messageInfo_DeleteProductSetRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_DeleteProductSetRequest proto.InternalMessageInfo + +func (m *DeleteProductSetRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +// Request message for the `CreateReferenceImage` method. +type CreateReferenceImageRequest struct { + // Resource name of the product in which to create the reference image. + // + // Format is + // `projects/PROJECT_ID/locations/LOC_ID/products/PRODUCT_ID`. + Parent string `protobuf:"bytes,1,opt,name=parent,proto3" json:"parent,omitempty"` + // The reference image to create. + // If an image ID is specified, it is ignored. + ReferenceImage *ReferenceImage `protobuf:"bytes,2,opt,name=reference_image,json=referenceImage,proto3" json:"reference_image,omitempty"` + // A user-supplied resource id for the ReferenceImage to be added. If set, + // the server will attempt to use this value as the resource id. If it is + // already in use, an error is returned with code ALREADY_EXISTS. Must be at + // most 128 characters long. It cannot contain the character `/`. + ReferenceImageId string `protobuf:"bytes,3,opt,name=reference_image_id,json=referenceImageId,proto3" json:"reference_image_id,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *CreateReferenceImageRequest) Reset() { *m = CreateReferenceImageRequest{} } +func (m *CreateReferenceImageRequest) String() string { return proto.CompactTextString(m) } +func (*CreateReferenceImageRequest) ProtoMessage() {} +func (*CreateReferenceImageRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_product_search_service_6c1bda02aae064cc, []int{15} +} +func (m *CreateReferenceImageRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_CreateReferenceImageRequest.Unmarshal(m, b) +} +func (m *CreateReferenceImageRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_CreateReferenceImageRequest.Marshal(b, m, deterministic) +} +func (dst *CreateReferenceImageRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_CreateReferenceImageRequest.Merge(dst, src) +} +func (m *CreateReferenceImageRequest) XXX_Size() int { + return xxx_messageInfo_CreateReferenceImageRequest.Size(m) +} +func (m *CreateReferenceImageRequest) XXX_DiscardUnknown() { + xxx_messageInfo_CreateReferenceImageRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_CreateReferenceImageRequest proto.InternalMessageInfo + +func (m *CreateReferenceImageRequest) GetParent() string { + if m != nil { + return m.Parent + } + return "" +} + +func (m *CreateReferenceImageRequest) GetReferenceImage() *ReferenceImage { + if m != nil { + return m.ReferenceImage + } + return nil +} + +func (m *CreateReferenceImageRequest) GetReferenceImageId() string { + if m != nil { + return m.ReferenceImageId + } + return "" +} + +// Request message for the `ListReferenceImages` method. +type ListReferenceImagesRequest struct { + // Resource name of the product containing the reference images. + // + // Format is + // `projects/PROJECT_ID/locations/LOC_ID/products/PRODUCT_ID`. + Parent string `protobuf:"bytes,1,opt,name=parent,proto3" json:"parent,omitempty"` + // The maximum number of items to return. Default 10, maximum 100. + PageSize int32 `protobuf:"varint,2,opt,name=page_size,json=pageSize,proto3" json:"page_size,omitempty"` + // A token identifying a page of results to be returned. This is the value + // of `nextPageToken` returned in a previous reference image list request. + // + // Defaults to the first page if not specified. + PageToken string `protobuf:"bytes,3,opt,name=page_token,json=pageToken,proto3" json:"page_token,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ListReferenceImagesRequest) Reset() { *m = ListReferenceImagesRequest{} } +func (m *ListReferenceImagesRequest) String() string { return proto.CompactTextString(m) } +func (*ListReferenceImagesRequest) ProtoMessage() {} +func (*ListReferenceImagesRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_product_search_service_6c1bda02aae064cc, []int{16} +} +func (m *ListReferenceImagesRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ListReferenceImagesRequest.Unmarshal(m, b) +} +func (m *ListReferenceImagesRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ListReferenceImagesRequest.Marshal(b, m, deterministic) +} +func (dst *ListReferenceImagesRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_ListReferenceImagesRequest.Merge(dst, src) +} +func (m *ListReferenceImagesRequest) XXX_Size() int { + return xxx_messageInfo_ListReferenceImagesRequest.Size(m) +} +func (m *ListReferenceImagesRequest) XXX_DiscardUnknown() { + xxx_messageInfo_ListReferenceImagesRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_ListReferenceImagesRequest proto.InternalMessageInfo + +func (m *ListReferenceImagesRequest) GetParent() string { + if m != nil { + return m.Parent + } + return "" +} + +func (m *ListReferenceImagesRequest) GetPageSize() int32 { + if m != nil { + return m.PageSize + } + return 0 +} + +func (m *ListReferenceImagesRequest) GetPageToken() string { + if m != nil { + return m.PageToken + } + return "" +} + +// Response message for the `ListReferenceImages` method. +type ListReferenceImagesResponse struct { + // The list of reference images. + ReferenceImages []*ReferenceImage `protobuf:"bytes,1,rep,name=reference_images,json=referenceImages,proto3" json:"reference_images,omitempty"` + // The maximum number of items to return. Default 10, maximum 100. + PageSize int32 `protobuf:"varint,2,opt,name=page_size,json=pageSize,proto3" json:"page_size,omitempty"` + // The next_page_token returned from a previous List request, if any. + NextPageToken string `protobuf:"bytes,3,opt,name=next_page_token,json=nextPageToken,proto3" json:"next_page_token,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ListReferenceImagesResponse) Reset() { *m = ListReferenceImagesResponse{} } +func (m *ListReferenceImagesResponse) String() string { return proto.CompactTextString(m) } +func (*ListReferenceImagesResponse) ProtoMessage() {} +func (*ListReferenceImagesResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_product_search_service_6c1bda02aae064cc, []int{17} +} +func (m *ListReferenceImagesResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ListReferenceImagesResponse.Unmarshal(m, b) +} +func (m *ListReferenceImagesResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ListReferenceImagesResponse.Marshal(b, m, deterministic) +} +func (dst *ListReferenceImagesResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_ListReferenceImagesResponse.Merge(dst, src) +} +func (m *ListReferenceImagesResponse) XXX_Size() int { + return xxx_messageInfo_ListReferenceImagesResponse.Size(m) +} +func (m *ListReferenceImagesResponse) XXX_DiscardUnknown() { + xxx_messageInfo_ListReferenceImagesResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_ListReferenceImagesResponse proto.InternalMessageInfo + +func (m *ListReferenceImagesResponse) GetReferenceImages() []*ReferenceImage { + if m != nil { + return m.ReferenceImages + } + return nil +} + +func (m *ListReferenceImagesResponse) GetPageSize() int32 { + if m != nil { + return m.PageSize + } + return 0 +} + +func (m *ListReferenceImagesResponse) GetNextPageToken() string { + if m != nil { + return m.NextPageToken + } + return "" +} + +// Request message for the `GetReferenceImage` method. +type GetReferenceImageRequest struct { + // The resource name of the ReferenceImage to get. + // + // Format is: + // + // `projects/PROJECT_ID/locations/LOC_ID/products/PRODUCT_ID/referenceImages/IMAGE_ID`. + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GetReferenceImageRequest) Reset() { *m = GetReferenceImageRequest{} } +func (m *GetReferenceImageRequest) String() string { return proto.CompactTextString(m) } +func (*GetReferenceImageRequest) ProtoMessage() {} +func (*GetReferenceImageRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_product_search_service_6c1bda02aae064cc, []int{18} +} +func (m *GetReferenceImageRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GetReferenceImageRequest.Unmarshal(m, b) +} +func (m *GetReferenceImageRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GetReferenceImageRequest.Marshal(b, m, deterministic) +} +func (dst *GetReferenceImageRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_GetReferenceImageRequest.Merge(dst, src) +} +func (m *GetReferenceImageRequest) XXX_Size() int { + return xxx_messageInfo_GetReferenceImageRequest.Size(m) +} +func (m *GetReferenceImageRequest) XXX_DiscardUnknown() { + xxx_messageInfo_GetReferenceImageRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_GetReferenceImageRequest proto.InternalMessageInfo + +func (m *GetReferenceImageRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +// Request message for the `DeleteReferenceImage` method. +type DeleteReferenceImageRequest struct { + // The resource name of the reference image to delete. + // + // Format is: + // + // `projects/PROJECT_ID/locations/LOC_ID/products/PRODUCT_ID/referenceImages/IMAGE_ID` + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *DeleteReferenceImageRequest) Reset() { *m = DeleteReferenceImageRequest{} } +func (m *DeleteReferenceImageRequest) String() string { return proto.CompactTextString(m) } +func (*DeleteReferenceImageRequest) ProtoMessage() {} +func (*DeleteReferenceImageRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_product_search_service_6c1bda02aae064cc, []int{19} +} +func (m *DeleteReferenceImageRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_DeleteReferenceImageRequest.Unmarshal(m, b) +} +func (m *DeleteReferenceImageRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_DeleteReferenceImageRequest.Marshal(b, m, deterministic) +} +func (dst *DeleteReferenceImageRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_DeleteReferenceImageRequest.Merge(dst, src) +} +func (m *DeleteReferenceImageRequest) XXX_Size() int { + return xxx_messageInfo_DeleteReferenceImageRequest.Size(m) +} +func (m *DeleteReferenceImageRequest) XXX_DiscardUnknown() { + xxx_messageInfo_DeleteReferenceImageRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_DeleteReferenceImageRequest proto.InternalMessageInfo + +func (m *DeleteReferenceImageRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +// Request message for the `AddProductToProductSet` method. +type AddProductToProductSetRequest struct { + // The resource name for the ProductSet to modify. + // + // Format is: + // `projects/PROJECT_ID/locations/LOC_ID/productSets/PRODUCT_SET_ID` + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // The resource name for the Product to be added to this ProductSet. + // + // Format is: + // `projects/PROJECT_ID/locations/LOC_ID/products/PRODUCT_ID` + Product string `protobuf:"bytes,2,opt,name=product,proto3" json:"product,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *AddProductToProductSetRequest) Reset() { *m = AddProductToProductSetRequest{} } +func (m *AddProductToProductSetRequest) String() string { return proto.CompactTextString(m) } +func (*AddProductToProductSetRequest) ProtoMessage() {} +func (*AddProductToProductSetRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_product_search_service_6c1bda02aae064cc, []int{20} +} +func (m *AddProductToProductSetRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_AddProductToProductSetRequest.Unmarshal(m, b) +} +func (m *AddProductToProductSetRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_AddProductToProductSetRequest.Marshal(b, m, deterministic) +} +func (dst *AddProductToProductSetRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_AddProductToProductSetRequest.Merge(dst, src) +} +func (m *AddProductToProductSetRequest) XXX_Size() int { + return xxx_messageInfo_AddProductToProductSetRequest.Size(m) +} +func (m *AddProductToProductSetRequest) XXX_DiscardUnknown() { + xxx_messageInfo_AddProductToProductSetRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_AddProductToProductSetRequest proto.InternalMessageInfo + +func (m *AddProductToProductSetRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *AddProductToProductSetRequest) GetProduct() string { + if m != nil { + return m.Product + } + return "" +} + +// Request message for the `RemoveProductFromProductSet` method. +type RemoveProductFromProductSetRequest struct { + // The resource name for the ProductSet to modify. + // + // Format is: + // `projects/PROJECT_ID/locations/LOC_ID/productSets/PRODUCT_SET_ID` + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // The resource name for the Product to be removed from this ProductSet. + // + // Format is: + // `projects/PROJECT_ID/locations/LOC_ID/products/PRODUCT_ID` + Product string `protobuf:"bytes,2,opt,name=product,proto3" json:"product,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *RemoveProductFromProductSetRequest) Reset() { *m = RemoveProductFromProductSetRequest{} } +func (m *RemoveProductFromProductSetRequest) String() string { return proto.CompactTextString(m) } +func (*RemoveProductFromProductSetRequest) ProtoMessage() {} +func (*RemoveProductFromProductSetRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_product_search_service_6c1bda02aae064cc, []int{21} +} +func (m *RemoveProductFromProductSetRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_RemoveProductFromProductSetRequest.Unmarshal(m, b) +} +func (m *RemoveProductFromProductSetRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_RemoveProductFromProductSetRequest.Marshal(b, m, deterministic) +} +func (dst *RemoveProductFromProductSetRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_RemoveProductFromProductSetRequest.Merge(dst, src) +} +func (m *RemoveProductFromProductSetRequest) XXX_Size() int { + return xxx_messageInfo_RemoveProductFromProductSetRequest.Size(m) +} +func (m *RemoveProductFromProductSetRequest) XXX_DiscardUnknown() { + xxx_messageInfo_RemoveProductFromProductSetRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_RemoveProductFromProductSetRequest proto.InternalMessageInfo + +func (m *RemoveProductFromProductSetRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *RemoveProductFromProductSetRequest) GetProduct() string { + if m != nil { + return m.Product + } + return "" +} + +// Request message for the `ListProductsInProductSet` method. +type ListProductsInProductSetRequest struct { + // The ProductSet resource for which to retrieve Products. + // + // Format is: + // `projects/PROJECT_ID/locations/LOC_ID/productSets/PRODUCT_SET_ID` + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // The maximum number of items to return. Default 10, maximum 100. + PageSize int32 `protobuf:"varint,2,opt,name=page_size,json=pageSize,proto3" json:"page_size,omitempty"` + // The next_page_token returned from a previous List request, if any. + PageToken string `protobuf:"bytes,3,opt,name=page_token,json=pageToken,proto3" json:"page_token,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ListProductsInProductSetRequest) Reset() { *m = ListProductsInProductSetRequest{} } +func (m *ListProductsInProductSetRequest) String() string { return proto.CompactTextString(m) } +func (*ListProductsInProductSetRequest) ProtoMessage() {} +func (*ListProductsInProductSetRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_product_search_service_6c1bda02aae064cc, []int{22} +} +func (m *ListProductsInProductSetRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ListProductsInProductSetRequest.Unmarshal(m, b) +} +func (m *ListProductsInProductSetRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ListProductsInProductSetRequest.Marshal(b, m, deterministic) +} +func (dst *ListProductsInProductSetRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_ListProductsInProductSetRequest.Merge(dst, src) +} +func (m *ListProductsInProductSetRequest) XXX_Size() int { + return xxx_messageInfo_ListProductsInProductSetRequest.Size(m) +} +func (m *ListProductsInProductSetRequest) XXX_DiscardUnknown() { + xxx_messageInfo_ListProductsInProductSetRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_ListProductsInProductSetRequest proto.InternalMessageInfo + +func (m *ListProductsInProductSetRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *ListProductsInProductSetRequest) GetPageSize() int32 { + if m != nil { + return m.PageSize + } + return 0 +} + +func (m *ListProductsInProductSetRequest) GetPageToken() string { + if m != nil { + return m.PageToken + } + return "" +} + +// Response message for the `ListProductsInProductSet` method. +type ListProductsInProductSetResponse struct { + // The list of Products. + Products []*Product `protobuf:"bytes,1,rep,name=products,proto3" json:"products,omitempty"` + // Token to retrieve the next page of results, or empty if there are no more + // results in the list. + NextPageToken string `protobuf:"bytes,2,opt,name=next_page_token,json=nextPageToken,proto3" json:"next_page_token,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ListProductsInProductSetResponse) Reset() { *m = ListProductsInProductSetResponse{} } +func (m *ListProductsInProductSetResponse) String() string { return proto.CompactTextString(m) } +func (*ListProductsInProductSetResponse) ProtoMessage() {} +func (*ListProductsInProductSetResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_product_search_service_6c1bda02aae064cc, []int{23} +} +func (m *ListProductsInProductSetResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ListProductsInProductSetResponse.Unmarshal(m, b) +} +func (m *ListProductsInProductSetResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ListProductsInProductSetResponse.Marshal(b, m, deterministic) +} +func (dst *ListProductsInProductSetResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_ListProductsInProductSetResponse.Merge(dst, src) +} +func (m *ListProductsInProductSetResponse) XXX_Size() int { + return xxx_messageInfo_ListProductsInProductSetResponse.Size(m) +} +func (m *ListProductsInProductSetResponse) XXX_DiscardUnknown() { + xxx_messageInfo_ListProductsInProductSetResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_ListProductsInProductSetResponse proto.InternalMessageInfo + +func (m *ListProductsInProductSetResponse) GetProducts() []*Product { + if m != nil { + return m.Products + } + return nil +} + +func (m *ListProductsInProductSetResponse) GetNextPageToken() string { + if m != nil { + return m.NextPageToken + } + return "" +} + +// The Google Cloud Storage location for a csv file which preserves a list of +// ImportProductSetRequests in each line. +type ImportProductSetsGcsSource struct { + // The Google Cloud Storage URI of the input csv file. + // + // The URI must start with `gs://`. + // + // The format of the input csv file should be one image per line. + // In each line, there are 8 columns. + // + // 1. image-uri + // 2. image-id + // 3. product-set-id + // 4. product-id + // 5. product-category + // 6. product-display-name + // 7. labels + // 8. bounding-poly + // + // The `image-uri`, `product-set-id`, `product-id`, and `product-category` + // columns are required. All other columns are optional. + // + // If the `ProductSet` or `Product` specified by the `product-set-id` and + // `product-id` values does not exist, then the system will create a new + // `ProductSet` or `Product` for the image. In this case, the + // `product-display-name` column refers to + // [display_name][google.cloud.vision.v1p4beta1.Product.display_name], the + // `product-category` column refers to + // [product_category][google.cloud.vision.v1p4beta1.Product.product_category], + // and the `labels` column refers to + // [product_labels][google.cloud.vision.v1p4beta1.Product.product_labels]. + // + // The `image-id` column is optional but must be unique if provided. If it is + // empty, the system will automatically assign a unique id to the image. + // + // The `product-display-name` column is optional. If it is empty, the system + // sets the [display_name][google.cloud.vision.v1p4beta1.Product.display_name] + // field for the product to a space (" "). You can update the `display_name` + // later by using the API. + // + // If a `Product` with the specified `product-id` already exists, then the + // system ignores the `product-display-name`, `product-category`, and `labels` + // columns. + // + // The `labels` column (optional) is a line containing a list of + // comma-separated key-value pairs, in the following format: + // + // "key_1=value_1,key_2=value_2,...,key_n=value_n" + // + // The `bounding-poly` column (optional) identifies one region of + // interest from the image in the same manner as `CreateReferenceImage`. If + // you do not specify the `bounding-poly` column, then the system will try to + // detect regions of interest automatically. + // + // At most one `bounding-poly` column is allowed per line. If the image + // contains multiple regions of interest, add a line to the CSV file that + // includes the same product information, and the `bounding-poly` values for + // each region of interest. + // + // The `bounding-poly` column must contain an even number of comma-separated + // numbers, in the format "p1_x,p1_y,p2_x,p2_y,...,pn_x,pn_y". Use + // non-negative integers for absolute bounding polygons, and float values + // in [0, 1] for normalized bounding polygons. + // + // The system will resize the image if the image resolution is too + // large to process (larger than 20MP). + CsvFileUri string `protobuf:"bytes,1,opt,name=csv_file_uri,json=csvFileUri,proto3" json:"csv_file_uri,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ImportProductSetsGcsSource) Reset() { *m = ImportProductSetsGcsSource{} } +func (m *ImportProductSetsGcsSource) String() string { return proto.CompactTextString(m) } +func (*ImportProductSetsGcsSource) ProtoMessage() {} +func (*ImportProductSetsGcsSource) Descriptor() ([]byte, []int) { + return fileDescriptor_product_search_service_6c1bda02aae064cc, []int{24} +} +func (m *ImportProductSetsGcsSource) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ImportProductSetsGcsSource.Unmarshal(m, b) +} +func (m *ImportProductSetsGcsSource) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ImportProductSetsGcsSource.Marshal(b, m, deterministic) +} +func (dst *ImportProductSetsGcsSource) XXX_Merge(src proto.Message) { + xxx_messageInfo_ImportProductSetsGcsSource.Merge(dst, src) +} +func (m *ImportProductSetsGcsSource) XXX_Size() int { + return xxx_messageInfo_ImportProductSetsGcsSource.Size(m) +} +func (m *ImportProductSetsGcsSource) XXX_DiscardUnknown() { + xxx_messageInfo_ImportProductSetsGcsSource.DiscardUnknown(m) +} + +var xxx_messageInfo_ImportProductSetsGcsSource proto.InternalMessageInfo + +func (m *ImportProductSetsGcsSource) GetCsvFileUri() string { + if m != nil { + return m.CsvFileUri + } + return "" +} + +// The input content for the `ImportProductSets` method. +type ImportProductSetsInputConfig struct { + // The source of the input. + // + // Types that are valid to be assigned to Source: + // *ImportProductSetsInputConfig_GcsSource + Source isImportProductSetsInputConfig_Source `protobuf_oneof:"source"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ImportProductSetsInputConfig) Reset() { *m = ImportProductSetsInputConfig{} } +func (m *ImportProductSetsInputConfig) String() string { return proto.CompactTextString(m) } +func (*ImportProductSetsInputConfig) ProtoMessage() {} +func (*ImportProductSetsInputConfig) Descriptor() ([]byte, []int) { + return fileDescriptor_product_search_service_6c1bda02aae064cc, []int{25} +} +func (m *ImportProductSetsInputConfig) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ImportProductSetsInputConfig.Unmarshal(m, b) +} +func (m *ImportProductSetsInputConfig) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ImportProductSetsInputConfig.Marshal(b, m, deterministic) +} +func (dst *ImportProductSetsInputConfig) XXX_Merge(src proto.Message) { + xxx_messageInfo_ImportProductSetsInputConfig.Merge(dst, src) +} +func (m *ImportProductSetsInputConfig) XXX_Size() int { + return xxx_messageInfo_ImportProductSetsInputConfig.Size(m) +} +func (m *ImportProductSetsInputConfig) XXX_DiscardUnknown() { + xxx_messageInfo_ImportProductSetsInputConfig.DiscardUnknown(m) +} + +var xxx_messageInfo_ImportProductSetsInputConfig proto.InternalMessageInfo + +type isImportProductSetsInputConfig_Source interface { + isImportProductSetsInputConfig_Source() +} + +type ImportProductSetsInputConfig_GcsSource struct { + GcsSource *ImportProductSetsGcsSource `protobuf:"bytes,1,opt,name=gcs_source,json=gcsSource,proto3,oneof"` +} + +func (*ImportProductSetsInputConfig_GcsSource) isImportProductSetsInputConfig_Source() {} + +func (m *ImportProductSetsInputConfig) GetSource() isImportProductSetsInputConfig_Source { + if m != nil { + return m.Source + } + return nil +} + +func (m *ImportProductSetsInputConfig) GetGcsSource() *ImportProductSetsGcsSource { + if x, ok := m.GetSource().(*ImportProductSetsInputConfig_GcsSource); ok { + return x.GcsSource + } + return nil +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*ImportProductSetsInputConfig) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _ImportProductSetsInputConfig_OneofMarshaler, _ImportProductSetsInputConfig_OneofUnmarshaler, _ImportProductSetsInputConfig_OneofSizer, []interface{}{ + (*ImportProductSetsInputConfig_GcsSource)(nil), + } +} + +func _ImportProductSetsInputConfig_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*ImportProductSetsInputConfig) + // source + switch x := m.Source.(type) { + case *ImportProductSetsInputConfig_GcsSource: + b.EncodeVarint(1<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.GcsSource); err != nil { + return err + } + case nil: + default: + return fmt.Errorf("ImportProductSetsInputConfig.Source has unexpected type %T", x) + } + return nil +} + +func _ImportProductSetsInputConfig_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*ImportProductSetsInputConfig) + switch tag { + case 1: // source.gcs_source + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(ImportProductSetsGcsSource) + err := b.DecodeMessage(msg) + m.Source = &ImportProductSetsInputConfig_GcsSource{msg} + return true, err + default: + return false, nil + } +} + +func _ImportProductSetsInputConfig_OneofSizer(msg proto.Message) (n int) { + m := msg.(*ImportProductSetsInputConfig) + // source + switch x := m.Source.(type) { + case *ImportProductSetsInputConfig_GcsSource: + s := proto.Size(x.GcsSource) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +// Request message for the `ImportProductSets` method. +type ImportProductSetsRequest struct { + // The project in which the ProductSets should be imported. + // + // Format is `projects/PROJECT_ID/locations/LOC_ID`. + Parent string `protobuf:"bytes,1,opt,name=parent,proto3" json:"parent,omitempty"` + // The input content for the list of requests. + InputConfig *ImportProductSetsInputConfig `protobuf:"bytes,2,opt,name=input_config,json=inputConfig,proto3" json:"input_config,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ImportProductSetsRequest) Reset() { *m = ImportProductSetsRequest{} } +func (m *ImportProductSetsRequest) String() string { return proto.CompactTextString(m) } +func (*ImportProductSetsRequest) ProtoMessage() {} +func (*ImportProductSetsRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_product_search_service_6c1bda02aae064cc, []int{26} +} +func (m *ImportProductSetsRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ImportProductSetsRequest.Unmarshal(m, b) +} +func (m *ImportProductSetsRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ImportProductSetsRequest.Marshal(b, m, deterministic) +} +func (dst *ImportProductSetsRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_ImportProductSetsRequest.Merge(dst, src) +} +func (m *ImportProductSetsRequest) XXX_Size() int { + return xxx_messageInfo_ImportProductSetsRequest.Size(m) +} +func (m *ImportProductSetsRequest) XXX_DiscardUnknown() { + xxx_messageInfo_ImportProductSetsRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_ImportProductSetsRequest proto.InternalMessageInfo + +func (m *ImportProductSetsRequest) GetParent() string { + if m != nil { + return m.Parent + } + return "" +} + +func (m *ImportProductSetsRequest) GetInputConfig() *ImportProductSetsInputConfig { + if m != nil { + return m.InputConfig + } + return nil +} + +// Response message for the `ImportProductSets` method. +// +// This message is returned by the +// [google.longrunning.Operations.GetOperation][google.longrunning.Operations.GetOperation] +// method in the returned +// [google.longrunning.Operation.response][google.longrunning.Operation.response] +// field. +type ImportProductSetsResponse struct { + // The list of reference_images that are imported successfully. + ReferenceImages []*ReferenceImage `protobuf:"bytes,1,rep,name=reference_images,json=referenceImages,proto3" json:"reference_images,omitempty"` + // The rpc status for each ImportProductSet request, including both successes + // and errors. + // + // The number of statuses here matches the number of lines in the csv file, + // and statuses[i] stores the success or failure status of processing the i-th + // line of the csv, starting from line 0. + Statuses []*status.Status `protobuf:"bytes,2,rep,name=statuses,proto3" json:"statuses,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ImportProductSetsResponse) Reset() { *m = ImportProductSetsResponse{} } +func (m *ImportProductSetsResponse) String() string { return proto.CompactTextString(m) } +func (*ImportProductSetsResponse) ProtoMessage() {} +func (*ImportProductSetsResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_product_search_service_6c1bda02aae064cc, []int{27} +} +func (m *ImportProductSetsResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ImportProductSetsResponse.Unmarshal(m, b) +} +func (m *ImportProductSetsResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ImportProductSetsResponse.Marshal(b, m, deterministic) +} +func (dst *ImportProductSetsResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_ImportProductSetsResponse.Merge(dst, src) +} +func (m *ImportProductSetsResponse) XXX_Size() int { + return xxx_messageInfo_ImportProductSetsResponse.Size(m) +} +func (m *ImportProductSetsResponse) XXX_DiscardUnknown() { + xxx_messageInfo_ImportProductSetsResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_ImportProductSetsResponse proto.InternalMessageInfo + +func (m *ImportProductSetsResponse) GetReferenceImages() []*ReferenceImage { + if m != nil { + return m.ReferenceImages + } + return nil +} + +func (m *ImportProductSetsResponse) GetStatuses() []*status.Status { + if m != nil { + return m.Statuses + } + return nil +} + +// Metadata for the batch operations such as the current state. +// +// This is included in the `metadata` field of the `Operation` returned by the +// `GetOperation` call of the `google::longrunning::Operations` service. +type BatchOperationMetadata struct { + // The current state of the batch operation. + State BatchOperationMetadata_State `protobuf:"varint,1,opt,name=state,proto3,enum=google.cloud.vision.v1p4beta1.BatchOperationMetadata_State" json:"state,omitempty"` + // The time when the batch request was submitted to the server. + SubmitTime *timestamp.Timestamp `protobuf:"bytes,2,opt,name=submit_time,json=submitTime,proto3" json:"submit_time,omitempty"` + // The time when the batch request is finished and + // [google.longrunning.Operation.done][google.longrunning.Operation.done] is + // set to true. + EndTime *timestamp.Timestamp `protobuf:"bytes,3,opt,name=end_time,json=endTime,proto3" json:"end_time,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *BatchOperationMetadata) Reset() { *m = BatchOperationMetadata{} } +func (m *BatchOperationMetadata) String() string { return proto.CompactTextString(m) } +func (*BatchOperationMetadata) ProtoMessage() {} +func (*BatchOperationMetadata) Descriptor() ([]byte, []int) { + return fileDescriptor_product_search_service_6c1bda02aae064cc, []int{28} +} +func (m *BatchOperationMetadata) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_BatchOperationMetadata.Unmarshal(m, b) +} +func (m *BatchOperationMetadata) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_BatchOperationMetadata.Marshal(b, m, deterministic) +} +func (dst *BatchOperationMetadata) XXX_Merge(src proto.Message) { + xxx_messageInfo_BatchOperationMetadata.Merge(dst, src) +} +func (m *BatchOperationMetadata) XXX_Size() int { + return xxx_messageInfo_BatchOperationMetadata.Size(m) +} +func (m *BatchOperationMetadata) XXX_DiscardUnknown() { + xxx_messageInfo_BatchOperationMetadata.DiscardUnknown(m) +} + +var xxx_messageInfo_BatchOperationMetadata proto.InternalMessageInfo + +func (m *BatchOperationMetadata) GetState() BatchOperationMetadata_State { + if m != nil { + return m.State + } + return BatchOperationMetadata_STATE_UNSPECIFIED +} + +func (m *BatchOperationMetadata) GetSubmitTime() *timestamp.Timestamp { + if m != nil { + return m.SubmitTime + } + return nil +} + +func (m *BatchOperationMetadata) GetEndTime() *timestamp.Timestamp { + if m != nil { + return m.EndTime + } + return nil +} + +func init() { + proto.RegisterType((*Product)(nil), "google.cloud.vision.v1p4beta1.Product") + proto.RegisterType((*Product_KeyValue)(nil), "google.cloud.vision.v1p4beta1.Product.KeyValue") + proto.RegisterType((*ProductSet)(nil), "google.cloud.vision.v1p4beta1.ProductSet") + proto.RegisterType((*ReferenceImage)(nil), "google.cloud.vision.v1p4beta1.ReferenceImage") + proto.RegisterType((*CreateProductRequest)(nil), "google.cloud.vision.v1p4beta1.CreateProductRequest") + proto.RegisterType((*ListProductsRequest)(nil), "google.cloud.vision.v1p4beta1.ListProductsRequest") + proto.RegisterType((*ListProductsResponse)(nil), "google.cloud.vision.v1p4beta1.ListProductsResponse") + proto.RegisterType((*GetProductRequest)(nil), "google.cloud.vision.v1p4beta1.GetProductRequest") + proto.RegisterType((*UpdateProductRequest)(nil), "google.cloud.vision.v1p4beta1.UpdateProductRequest") + proto.RegisterType((*DeleteProductRequest)(nil), "google.cloud.vision.v1p4beta1.DeleteProductRequest") + proto.RegisterType((*CreateProductSetRequest)(nil), "google.cloud.vision.v1p4beta1.CreateProductSetRequest") + proto.RegisterType((*ListProductSetsRequest)(nil), "google.cloud.vision.v1p4beta1.ListProductSetsRequest") + proto.RegisterType((*ListProductSetsResponse)(nil), "google.cloud.vision.v1p4beta1.ListProductSetsResponse") + proto.RegisterType((*GetProductSetRequest)(nil), "google.cloud.vision.v1p4beta1.GetProductSetRequest") + proto.RegisterType((*UpdateProductSetRequest)(nil), "google.cloud.vision.v1p4beta1.UpdateProductSetRequest") + proto.RegisterType((*DeleteProductSetRequest)(nil), "google.cloud.vision.v1p4beta1.DeleteProductSetRequest") + proto.RegisterType((*CreateReferenceImageRequest)(nil), "google.cloud.vision.v1p4beta1.CreateReferenceImageRequest") + proto.RegisterType((*ListReferenceImagesRequest)(nil), "google.cloud.vision.v1p4beta1.ListReferenceImagesRequest") + proto.RegisterType((*ListReferenceImagesResponse)(nil), "google.cloud.vision.v1p4beta1.ListReferenceImagesResponse") + proto.RegisterType((*GetReferenceImageRequest)(nil), "google.cloud.vision.v1p4beta1.GetReferenceImageRequest") + proto.RegisterType((*DeleteReferenceImageRequest)(nil), "google.cloud.vision.v1p4beta1.DeleteReferenceImageRequest") + proto.RegisterType((*AddProductToProductSetRequest)(nil), "google.cloud.vision.v1p4beta1.AddProductToProductSetRequest") + proto.RegisterType((*RemoveProductFromProductSetRequest)(nil), "google.cloud.vision.v1p4beta1.RemoveProductFromProductSetRequest") + proto.RegisterType((*ListProductsInProductSetRequest)(nil), "google.cloud.vision.v1p4beta1.ListProductsInProductSetRequest") + proto.RegisterType((*ListProductsInProductSetResponse)(nil), "google.cloud.vision.v1p4beta1.ListProductsInProductSetResponse") + proto.RegisterType((*ImportProductSetsGcsSource)(nil), "google.cloud.vision.v1p4beta1.ImportProductSetsGcsSource") + proto.RegisterType((*ImportProductSetsInputConfig)(nil), "google.cloud.vision.v1p4beta1.ImportProductSetsInputConfig") + proto.RegisterType((*ImportProductSetsRequest)(nil), "google.cloud.vision.v1p4beta1.ImportProductSetsRequest") + proto.RegisterType((*ImportProductSetsResponse)(nil), "google.cloud.vision.v1p4beta1.ImportProductSetsResponse") + proto.RegisterType((*BatchOperationMetadata)(nil), "google.cloud.vision.v1p4beta1.BatchOperationMetadata") + proto.RegisterEnum("google.cloud.vision.v1p4beta1.BatchOperationMetadata_State", BatchOperationMetadata_State_name, BatchOperationMetadata_State_value) +} + +// Reference imports to suppress errors if they are not otherwise used. +var _ context.Context +var _ grpc.ClientConn + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the grpc package it is being compiled against. +const _ = grpc.SupportPackageIsVersion4 + +// ProductSearchClient is the client API for ProductSearch service. +// +// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://godoc.org/google.golang.org/grpc#ClientConn.NewStream. +type ProductSearchClient interface { + // Creates and returns a new ProductSet resource. + // + // Possible errors: + // + // * Returns INVALID_ARGUMENT if display_name is missing, or is longer than + // 4096 characters. + CreateProductSet(ctx context.Context, in *CreateProductSetRequest, opts ...grpc.CallOption) (*ProductSet, error) + // Lists ProductSets in an unspecified order. + // + // Possible errors: + // + // * Returns INVALID_ARGUMENT if page_size is greater than 100, or less + // than 1. + ListProductSets(ctx context.Context, in *ListProductSetsRequest, opts ...grpc.CallOption) (*ListProductSetsResponse, error) + // Gets information associated with a ProductSet. + // + // Possible errors: + // + // * Returns NOT_FOUND if the ProductSet does not exist. + GetProductSet(ctx context.Context, in *GetProductSetRequest, opts ...grpc.CallOption) (*ProductSet, error) + // Makes changes to a ProductSet resource. + // Only display_name can be updated currently. + // + // Possible errors: + // + // * Returns NOT_FOUND if the ProductSet does not exist. + // * Returns INVALID_ARGUMENT if display_name is present in update_mask but + // missing from the request or longer than 4096 characters. + UpdateProductSet(ctx context.Context, in *UpdateProductSetRequest, opts ...grpc.CallOption) (*ProductSet, error) + // Permanently deletes a ProductSet. Products and ReferenceImages in the + // ProductSet are not deleted. + // + // The actual image files are not deleted from Google Cloud Storage. + // + // Possible errors: + // + // * Returns NOT_FOUND if the ProductSet does not exist. + DeleteProductSet(ctx context.Context, in *DeleteProductSetRequest, opts ...grpc.CallOption) (*empty.Empty, error) + // Creates and returns a new product resource. + // + // Possible errors: + // + // * Returns INVALID_ARGUMENT if display_name is missing or longer than 4096 + // characters. + // * Returns INVALID_ARGUMENT if description is longer than 4096 characters. + // * Returns INVALID_ARGUMENT if product_category is missing or invalid. + CreateProduct(ctx context.Context, in *CreateProductRequest, opts ...grpc.CallOption) (*Product, error) + // Lists products in an unspecified order. + // + // Possible errors: + // + // * Returns INVALID_ARGUMENT if page_size is greater than 100 or less than 1. + ListProducts(ctx context.Context, in *ListProductsRequest, opts ...grpc.CallOption) (*ListProductsResponse, error) + // Gets information associated with a Product. + // + // Possible errors: + // + // * Returns NOT_FOUND if the Product does not exist. + GetProduct(ctx context.Context, in *GetProductRequest, opts ...grpc.CallOption) (*Product, error) + // Makes changes to a Product resource. + // Only the `display_name`, `description`, and `labels` fields can be updated + // right now. + // + // If labels are updated, the change will not be reflected in queries until + // the next index time. + // + // Possible errors: + // + // * Returns NOT_FOUND if the Product does not exist. + // * Returns INVALID_ARGUMENT if display_name is present in update_mask but is + // missing from the request or longer than 4096 characters. + // * Returns INVALID_ARGUMENT if description is present in update_mask but is + // longer than 4096 characters. + // * Returns INVALID_ARGUMENT if product_category is present in update_mask. + UpdateProduct(ctx context.Context, in *UpdateProductRequest, opts ...grpc.CallOption) (*Product, error) + // Permanently deletes a product and its reference images. + // + // Metadata of the product and all its images will be deleted right away, but + // search queries against ProductSets containing the product may still work + // until all related caches are refreshed. + // + // Possible errors: + // + // * Returns NOT_FOUND if the product does not exist. + DeleteProduct(ctx context.Context, in *DeleteProductRequest, opts ...grpc.CallOption) (*empty.Empty, error) + // Creates and returns a new ReferenceImage resource. + // + // The `bounding_poly` field is optional. If `bounding_poly` is not specified, + // the system will try to detect regions of interest in the image that are + // compatible with the product_category on the parent product. If it is + // specified, detection is ALWAYS skipped. The system converts polygons into + // non-rotated rectangles. + // + // Note that the pipeline will resize the image if the image resolution is too + // large to process (above 50MP). + // + // Possible errors: + // + // * Returns INVALID_ARGUMENT if the image_uri is missing or longer than 4096 + // characters. + // * Returns INVALID_ARGUMENT if the product does not exist. + // * Returns INVALID_ARGUMENT if bounding_poly is not provided, and nothing + // compatible with the parent product's product_category is detected. + // * Returns INVALID_ARGUMENT if bounding_poly contains more than 10 polygons. + CreateReferenceImage(ctx context.Context, in *CreateReferenceImageRequest, opts ...grpc.CallOption) (*ReferenceImage, error) + // Permanently deletes a reference image. + // + // The image metadata will be deleted right away, but search queries + // against ProductSets containing the image may still work until all related + // caches are refreshed. + // + // The actual image files are not deleted from Google Cloud Storage. + // + // Possible errors: + // + // * Returns NOT_FOUND if the reference image does not exist. + DeleteReferenceImage(ctx context.Context, in *DeleteReferenceImageRequest, opts ...grpc.CallOption) (*empty.Empty, error) + // Lists reference images. + // + // Possible errors: + // + // * Returns NOT_FOUND if the parent product does not exist. + // * Returns INVALID_ARGUMENT if the page_size is greater than 100, or less + // than 1. + ListReferenceImages(ctx context.Context, in *ListReferenceImagesRequest, opts ...grpc.CallOption) (*ListReferenceImagesResponse, error) + // Gets information associated with a ReferenceImage. + // + // Possible errors: + // + // * Returns NOT_FOUND if the specified image does not exist. + GetReferenceImage(ctx context.Context, in *GetReferenceImageRequest, opts ...grpc.CallOption) (*ReferenceImage, error) + // Adds a Product to the specified ProductSet. If the Product is already + // present, no change is made. + // + // One Product can be added to at most 100 ProductSets. + // + // Possible errors: + // + // * Returns NOT_FOUND if the Product or the ProductSet doesn't exist. + AddProductToProductSet(ctx context.Context, in *AddProductToProductSetRequest, opts ...grpc.CallOption) (*empty.Empty, error) + // Removes a Product from the specified ProductSet. + // + // Possible errors: + // + // * Returns NOT_FOUND If the Product is not found under the ProductSet. + RemoveProductFromProductSet(ctx context.Context, in *RemoveProductFromProductSetRequest, opts ...grpc.CallOption) (*empty.Empty, error) + // Lists the Products in a ProductSet, in an unspecified order. If the + // ProductSet does not exist, the products field of the response will be + // empty. + // + // Possible errors: + // + // * Returns INVALID_ARGUMENT if page_size is greater than 100 or less than 1. + ListProductsInProductSet(ctx context.Context, in *ListProductsInProductSetRequest, opts ...grpc.CallOption) (*ListProductsInProductSetResponse, error) + // Asynchronous API that imports a list of reference images to specified + // product sets based on a list of image information. + // + // The [google.longrunning.Operation][google.longrunning.Operation] API can be + // used to keep track of the progress and results of the request. + // `Operation.metadata` contains `BatchOperationMetadata`. (progress) + // `Operation.response` contains `ImportProductSetsResponse`. (results) + // + // The input source of this method is a csv file on Google Cloud Storage. + // For the format of the csv file please see + // [ImportProductSetsGcsSource.csv_file_uri][google.cloud.vision.v1p4beta1.ImportProductSetsGcsSource.csv_file_uri]. + ImportProductSets(ctx context.Context, in *ImportProductSetsRequest, opts ...grpc.CallOption) (*longrunning.Operation, error) +} + +type productSearchClient struct { + cc *grpc.ClientConn +} + +func NewProductSearchClient(cc *grpc.ClientConn) ProductSearchClient { + return &productSearchClient{cc} +} + +func (c *productSearchClient) CreateProductSet(ctx context.Context, in *CreateProductSetRequest, opts ...grpc.CallOption) (*ProductSet, error) { + out := new(ProductSet) + err := c.cc.Invoke(ctx, "/google.cloud.vision.v1p4beta1.ProductSearch/CreateProductSet", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *productSearchClient) ListProductSets(ctx context.Context, in *ListProductSetsRequest, opts ...grpc.CallOption) (*ListProductSetsResponse, error) { + out := new(ListProductSetsResponse) + err := c.cc.Invoke(ctx, "/google.cloud.vision.v1p4beta1.ProductSearch/ListProductSets", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *productSearchClient) GetProductSet(ctx context.Context, in *GetProductSetRequest, opts ...grpc.CallOption) (*ProductSet, error) { + out := new(ProductSet) + err := c.cc.Invoke(ctx, "/google.cloud.vision.v1p4beta1.ProductSearch/GetProductSet", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *productSearchClient) UpdateProductSet(ctx context.Context, in *UpdateProductSetRequest, opts ...grpc.CallOption) (*ProductSet, error) { + out := new(ProductSet) + err := c.cc.Invoke(ctx, "/google.cloud.vision.v1p4beta1.ProductSearch/UpdateProductSet", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *productSearchClient) DeleteProductSet(ctx context.Context, in *DeleteProductSetRequest, opts ...grpc.CallOption) (*empty.Empty, error) { + out := new(empty.Empty) + err := c.cc.Invoke(ctx, "/google.cloud.vision.v1p4beta1.ProductSearch/DeleteProductSet", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *productSearchClient) CreateProduct(ctx context.Context, in *CreateProductRequest, opts ...grpc.CallOption) (*Product, error) { + out := new(Product) + err := c.cc.Invoke(ctx, "/google.cloud.vision.v1p4beta1.ProductSearch/CreateProduct", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *productSearchClient) ListProducts(ctx context.Context, in *ListProductsRequest, opts ...grpc.CallOption) (*ListProductsResponse, error) { + out := new(ListProductsResponse) + err := c.cc.Invoke(ctx, "/google.cloud.vision.v1p4beta1.ProductSearch/ListProducts", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *productSearchClient) GetProduct(ctx context.Context, in *GetProductRequest, opts ...grpc.CallOption) (*Product, error) { + out := new(Product) + err := c.cc.Invoke(ctx, "/google.cloud.vision.v1p4beta1.ProductSearch/GetProduct", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *productSearchClient) UpdateProduct(ctx context.Context, in *UpdateProductRequest, opts ...grpc.CallOption) (*Product, error) { + out := new(Product) + err := c.cc.Invoke(ctx, "/google.cloud.vision.v1p4beta1.ProductSearch/UpdateProduct", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *productSearchClient) DeleteProduct(ctx context.Context, in *DeleteProductRequest, opts ...grpc.CallOption) (*empty.Empty, error) { + out := new(empty.Empty) + err := c.cc.Invoke(ctx, "/google.cloud.vision.v1p4beta1.ProductSearch/DeleteProduct", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *productSearchClient) CreateReferenceImage(ctx context.Context, in *CreateReferenceImageRequest, opts ...grpc.CallOption) (*ReferenceImage, error) { + out := new(ReferenceImage) + err := c.cc.Invoke(ctx, "/google.cloud.vision.v1p4beta1.ProductSearch/CreateReferenceImage", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *productSearchClient) DeleteReferenceImage(ctx context.Context, in *DeleteReferenceImageRequest, opts ...grpc.CallOption) (*empty.Empty, error) { + out := new(empty.Empty) + err := c.cc.Invoke(ctx, "/google.cloud.vision.v1p4beta1.ProductSearch/DeleteReferenceImage", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *productSearchClient) ListReferenceImages(ctx context.Context, in *ListReferenceImagesRequest, opts ...grpc.CallOption) (*ListReferenceImagesResponse, error) { + out := new(ListReferenceImagesResponse) + err := c.cc.Invoke(ctx, "/google.cloud.vision.v1p4beta1.ProductSearch/ListReferenceImages", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *productSearchClient) GetReferenceImage(ctx context.Context, in *GetReferenceImageRequest, opts ...grpc.CallOption) (*ReferenceImage, error) { + out := new(ReferenceImage) + err := c.cc.Invoke(ctx, "/google.cloud.vision.v1p4beta1.ProductSearch/GetReferenceImage", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *productSearchClient) AddProductToProductSet(ctx context.Context, in *AddProductToProductSetRequest, opts ...grpc.CallOption) (*empty.Empty, error) { + out := new(empty.Empty) + err := c.cc.Invoke(ctx, "/google.cloud.vision.v1p4beta1.ProductSearch/AddProductToProductSet", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *productSearchClient) RemoveProductFromProductSet(ctx context.Context, in *RemoveProductFromProductSetRequest, opts ...grpc.CallOption) (*empty.Empty, error) { + out := new(empty.Empty) + err := c.cc.Invoke(ctx, "/google.cloud.vision.v1p4beta1.ProductSearch/RemoveProductFromProductSet", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *productSearchClient) ListProductsInProductSet(ctx context.Context, in *ListProductsInProductSetRequest, opts ...grpc.CallOption) (*ListProductsInProductSetResponse, error) { + out := new(ListProductsInProductSetResponse) + err := c.cc.Invoke(ctx, "/google.cloud.vision.v1p4beta1.ProductSearch/ListProductsInProductSet", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *productSearchClient) ImportProductSets(ctx context.Context, in *ImportProductSetsRequest, opts ...grpc.CallOption) (*longrunning.Operation, error) { + out := new(longrunning.Operation) + err := c.cc.Invoke(ctx, "/google.cloud.vision.v1p4beta1.ProductSearch/ImportProductSets", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +// ProductSearchServer is the server API for ProductSearch service. +type ProductSearchServer interface { + // Creates and returns a new ProductSet resource. + // + // Possible errors: + // + // * Returns INVALID_ARGUMENT if display_name is missing, or is longer than + // 4096 characters. + CreateProductSet(context.Context, *CreateProductSetRequest) (*ProductSet, error) + // Lists ProductSets in an unspecified order. + // + // Possible errors: + // + // * Returns INVALID_ARGUMENT if page_size is greater than 100, or less + // than 1. + ListProductSets(context.Context, *ListProductSetsRequest) (*ListProductSetsResponse, error) + // Gets information associated with a ProductSet. + // + // Possible errors: + // + // * Returns NOT_FOUND if the ProductSet does not exist. + GetProductSet(context.Context, *GetProductSetRequest) (*ProductSet, error) + // Makes changes to a ProductSet resource. + // Only display_name can be updated currently. + // + // Possible errors: + // + // * Returns NOT_FOUND if the ProductSet does not exist. + // * Returns INVALID_ARGUMENT if display_name is present in update_mask but + // missing from the request or longer than 4096 characters. + UpdateProductSet(context.Context, *UpdateProductSetRequest) (*ProductSet, error) + // Permanently deletes a ProductSet. Products and ReferenceImages in the + // ProductSet are not deleted. + // + // The actual image files are not deleted from Google Cloud Storage. + // + // Possible errors: + // + // * Returns NOT_FOUND if the ProductSet does not exist. + DeleteProductSet(context.Context, *DeleteProductSetRequest) (*empty.Empty, error) + // Creates and returns a new product resource. + // + // Possible errors: + // + // * Returns INVALID_ARGUMENT if display_name is missing or longer than 4096 + // characters. + // * Returns INVALID_ARGUMENT if description is longer than 4096 characters. + // * Returns INVALID_ARGUMENT if product_category is missing or invalid. + CreateProduct(context.Context, *CreateProductRequest) (*Product, error) + // Lists products in an unspecified order. + // + // Possible errors: + // + // * Returns INVALID_ARGUMENT if page_size is greater than 100 or less than 1. + ListProducts(context.Context, *ListProductsRequest) (*ListProductsResponse, error) + // Gets information associated with a Product. + // + // Possible errors: + // + // * Returns NOT_FOUND if the Product does not exist. + GetProduct(context.Context, *GetProductRequest) (*Product, error) + // Makes changes to a Product resource. + // Only the `display_name`, `description`, and `labels` fields can be updated + // right now. + // + // If labels are updated, the change will not be reflected in queries until + // the next index time. + // + // Possible errors: + // + // * Returns NOT_FOUND if the Product does not exist. + // * Returns INVALID_ARGUMENT if display_name is present in update_mask but is + // missing from the request or longer than 4096 characters. + // * Returns INVALID_ARGUMENT if description is present in update_mask but is + // longer than 4096 characters. + // * Returns INVALID_ARGUMENT if product_category is present in update_mask. + UpdateProduct(context.Context, *UpdateProductRequest) (*Product, error) + // Permanently deletes a product and its reference images. + // + // Metadata of the product and all its images will be deleted right away, but + // search queries against ProductSets containing the product may still work + // until all related caches are refreshed. + // + // Possible errors: + // + // * Returns NOT_FOUND if the product does not exist. + DeleteProduct(context.Context, *DeleteProductRequest) (*empty.Empty, error) + // Creates and returns a new ReferenceImage resource. + // + // The `bounding_poly` field is optional. If `bounding_poly` is not specified, + // the system will try to detect regions of interest in the image that are + // compatible with the product_category on the parent product. If it is + // specified, detection is ALWAYS skipped. The system converts polygons into + // non-rotated rectangles. + // + // Note that the pipeline will resize the image if the image resolution is too + // large to process (above 50MP). + // + // Possible errors: + // + // * Returns INVALID_ARGUMENT if the image_uri is missing or longer than 4096 + // characters. + // * Returns INVALID_ARGUMENT if the product does not exist. + // * Returns INVALID_ARGUMENT if bounding_poly is not provided, and nothing + // compatible with the parent product's product_category is detected. + // * Returns INVALID_ARGUMENT if bounding_poly contains more than 10 polygons. + CreateReferenceImage(context.Context, *CreateReferenceImageRequest) (*ReferenceImage, error) + // Permanently deletes a reference image. + // + // The image metadata will be deleted right away, but search queries + // against ProductSets containing the image may still work until all related + // caches are refreshed. + // + // The actual image files are not deleted from Google Cloud Storage. + // + // Possible errors: + // + // * Returns NOT_FOUND if the reference image does not exist. + DeleteReferenceImage(context.Context, *DeleteReferenceImageRequest) (*empty.Empty, error) + // Lists reference images. + // + // Possible errors: + // + // * Returns NOT_FOUND if the parent product does not exist. + // * Returns INVALID_ARGUMENT if the page_size is greater than 100, or less + // than 1. + ListReferenceImages(context.Context, *ListReferenceImagesRequest) (*ListReferenceImagesResponse, error) + // Gets information associated with a ReferenceImage. + // + // Possible errors: + // + // * Returns NOT_FOUND if the specified image does not exist. + GetReferenceImage(context.Context, *GetReferenceImageRequest) (*ReferenceImage, error) + // Adds a Product to the specified ProductSet. If the Product is already + // present, no change is made. + // + // One Product can be added to at most 100 ProductSets. + // + // Possible errors: + // + // * Returns NOT_FOUND if the Product or the ProductSet doesn't exist. + AddProductToProductSet(context.Context, *AddProductToProductSetRequest) (*empty.Empty, error) + // Removes a Product from the specified ProductSet. + // + // Possible errors: + // + // * Returns NOT_FOUND If the Product is not found under the ProductSet. + RemoveProductFromProductSet(context.Context, *RemoveProductFromProductSetRequest) (*empty.Empty, error) + // Lists the Products in a ProductSet, in an unspecified order. If the + // ProductSet does not exist, the products field of the response will be + // empty. + // + // Possible errors: + // + // * Returns INVALID_ARGUMENT if page_size is greater than 100 or less than 1. + ListProductsInProductSet(context.Context, *ListProductsInProductSetRequest) (*ListProductsInProductSetResponse, error) + // Asynchronous API that imports a list of reference images to specified + // product sets based on a list of image information. + // + // The [google.longrunning.Operation][google.longrunning.Operation] API can be + // used to keep track of the progress and results of the request. + // `Operation.metadata` contains `BatchOperationMetadata`. (progress) + // `Operation.response` contains `ImportProductSetsResponse`. (results) + // + // The input source of this method is a csv file on Google Cloud Storage. + // For the format of the csv file please see + // [ImportProductSetsGcsSource.csv_file_uri][google.cloud.vision.v1p4beta1.ImportProductSetsGcsSource.csv_file_uri]. + ImportProductSets(context.Context, *ImportProductSetsRequest) (*longrunning.Operation, error) +} + +func RegisterProductSearchServer(s *grpc.Server, srv ProductSearchServer) { + s.RegisterService(&_ProductSearch_serviceDesc, srv) +} + +func _ProductSearch_CreateProductSet_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(CreateProductSetRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(ProductSearchServer).CreateProductSet(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.vision.v1p4beta1.ProductSearch/CreateProductSet", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(ProductSearchServer).CreateProductSet(ctx, req.(*CreateProductSetRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _ProductSearch_ListProductSets_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(ListProductSetsRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(ProductSearchServer).ListProductSets(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.vision.v1p4beta1.ProductSearch/ListProductSets", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(ProductSearchServer).ListProductSets(ctx, req.(*ListProductSetsRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _ProductSearch_GetProductSet_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(GetProductSetRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(ProductSearchServer).GetProductSet(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.vision.v1p4beta1.ProductSearch/GetProductSet", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(ProductSearchServer).GetProductSet(ctx, req.(*GetProductSetRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _ProductSearch_UpdateProductSet_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(UpdateProductSetRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(ProductSearchServer).UpdateProductSet(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.vision.v1p4beta1.ProductSearch/UpdateProductSet", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(ProductSearchServer).UpdateProductSet(ctx, req.(*UpdateProductSetRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _ProductSearch_DeleteProductSet_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(DeleteProductSetRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(ProductSearchServer).DeleteProductSet(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.vision.v1p4beta1.ProductSearch/DeleteProductSet", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(ProductSearchServer).DeleteProductSet(ctx, req.(*DeleteProductSetRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _ProductSearch_CreateProduct_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(CreateProductRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(ProductSearchServer).CreateProduct(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.vision.v1p4beta1.ProductSearch/CreateProduct", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(ProductSearchServer).CreateProduct(ctx, req.(*CreateProductRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _ProductSearch_ListProducts_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(ListProductsRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(ProductSearchServer).ListProducts(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.vision.v1p4beta1.ProductSearch/ListProducts", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(ProductSearchServer).ListProducts(ctx, req.(*ListProductsRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _ProductSearch_GetProduct_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(GetProductRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(ProductSearchServer).GetProduct(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.vision.v1p4beta1.ProductSearch/GetProduct", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(ProductSearchServer).GetProduct(ctx, req.(*GetProductRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _ProductSearch_UpdateProduct_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(UpdateProductRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(ProductSearchServer).UpdateProduct(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.vision.v1p4beta1.ProductSearch/UpdateProduct", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(ProductSearchServer).UpdateProduct(ctx, req.(*UpdateProductRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _ProductSearch_DeleteProduct_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(DeleteProductRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(ProductSearchServer).DeleteProduct(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.vision.v1p4beta1.ProductSearch/DeleteProduct", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(ProductSearchServer).DeleteProduct(ctx, req.(*DeleteProductRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _ProductSearch_CreateReferenceImage_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(CreateReferenceImageRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(ProductSearchServer).CreateReferenceImage(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.vision.v1p4beta1.ProductSearch/CreateReferenceImage", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(ProductSearchServer).CreateReferenceImage(ctx, req.(*CreateReferenceImageRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _ProductSearch_DeleteReferenceImage_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(DeleteReferenceImageRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(ProductSearchServer).DeleteReferenceImage(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.vision.v1p4beta1.ProductSearch/DeleteReferenceImage", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(ProductSearchServer).DeleteReferenceImage(ctx, req.(*DeleteReferenceImageRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _ProductSearch_ListReferenceImages_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(ListReferenceImagesRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(ProductSearchServer).ListReferenceImages(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.vision.v1p4beta1.ProductSearch/ListReferenceImages", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(ProductSearchServer).ListReferenceImages(ctx, req.(*ListReferenceImagesRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _ProductSearch_GetReferenceImage_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(GetReferenceImageRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(ProductSearchServer).GetReferenceImage(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.vision.v1p4beta1.ProductSearch/GetReferenceImage", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(ProductSearchServer).GetReferenceImage(ctx, req.(*GetReferenceImageRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _ProductSearch_AddProductToProductSet_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(AddProductToProductSetRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(ProductSearchServer).AddProductToProductSet(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.vision.v1p4beta1.ProductSearch/AddProductToProductSet", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(ProductSearchServer).AddProductToProductSet(ctx, req.(*AddProductToProductSetRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _ProductSearch_RemoveProductFromProductSet_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(RemoveProductFromProductSetRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(ProductSearchServer).RemoveProductFromProductSet(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.vision.v1p4beta1.ProductSearch/RemoveProductFromProductSet", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(ProductSearchServer).RemoveProductFromProductSet(ctx, req.(*RemoveProductFromProductSetRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _ProductSearch_ListProductsInProductSet_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(ListProductsInProductSetRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(ProductSearchServer).ListProductsInProductSet(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.vision.v1p4beta1.ProductSearch/ListProductsInProductSet", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(ProductSearchServer).ListProductsInProductSet(ctx, req.(*ListProductsInProductSetRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _ProductSearch_ImportProductSets_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(ImportProductSetsRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(ProductSearchServer).ImportProductSets(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.vision.v1p4beta1.ProductSearch/ImportProductSets", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(ProductSearchServer).ImportProductSets(ctx, req.(*ImportProductSetsRequest)) + } + return interceptor(ctx, in, info, handler) +} + +var _ProductSearch_serviceDesc = grpc.ServiceDesc{ + ServiceName: "google.cloud.vision.v1p4beta1.ProductSearch", + HandlerType: (*ProductSearchServer)(nil), + Methods: []grpc.MethodDesc{ + { + MethodName: "CreateProductSet", + Handler: _ProductSearch_CreateProductSet_Handler, + }, + { + MethodName: "ListProductSets", + Handler: _ProductSearch_ListProductSets_Handler, + }, + { + MethodName: "GetProductSet", + Handler: _ProductSearch_GetProductSet_Handler, + }, + { + MethodName: "UpdateProductSet", + Handler: _ProductSearch_UpdateProductSet_Handler, + }, + { + MethodName: "DeleteProductSet", + Handler: _ProductSearch_DeleteProductSet_Handler, + }, + { + MethodName: "CreateProduct", + Handler: _ProductSearch_CreateProduct_Handler, + }, + { + MethodName: "ListProducts", + Handler: _ProductSearch_ListProducts_Handler, + }, + { + MethodName: "GetProduct", + Handler: _ProductSearch_GetProduct_Handler, + }, + { + MethodName: "UpdateProduct", + Handler: _ProductSearch_UpdateProduct_Handler, + }, + { + MethodName: "DeleteProduct", + Handler: _ProductSearch_DeleteProduct_Handler, + }, + { + MethodName: "CreateReferenceImage", + Handler: _ProductSearch_CreateReferenceImage_Handler, + }, + { + MethodName: "DeleteReferenceImage", + Handler: _ProductSearch_DeleteReferenceImage_Handler, + }, + { + MethodName: "ListReferenceImages", + Handler: _ProductSearch_ListReferenceImages_Handler, + }, + { + MethodName: "GetReferenceImage", + Handler: _ProductSearch_GetReferenceImage_Handler, + }, + { + MethodName: "AddProductToProductSet", + Handler: _ProductSearch_AddProductToProductSet_Handler, + }, + { + MethodName: "RemoveProductFromProductSet", + Handler: _ProductSearch_RemoveProductFromProductSet_Handler, + }, + { + MethodName: "ListProductsInProductSet", + Handler: _ProductSearch_ListProductsInProductSet_Handler, + }, + { + MethodName: "ImportProductSets", + Handler: _ProductSearch_ImportProductSets_Handler, + }, + }, + Streams: []grpc.StreamDesc{}, + Metadata: "google/cloud/vision/v1p4beta1/product_search_service.proto", +} + +func init() { + proto.RegisterFile("google/cloud/vision/v1p4beta1/product_search_service.proto", fileDescriptor_product_search_service_6c1bda02aae064cc) +} + +var fileDescriptor_product_search_service_6c1bda02aae064cc = []byte{ + // 1874 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xc4, 0x59, 0xcf, 0x6f, 0xe3, 0xc6, + 0x15, 0xce, 0xd8, 0xeb, 0x5d, 0xfb, 0xc9, 0xb2, 0xb5, 0x53, 0xd7, 0x56, 0xe4, 0x2c, 0xe2, 0xb0, + 0x45, 0x9a, 0xa8, 0x89, 0xd8, 0x95, 0xbb, 0xdb, 0xae, 0xdc, 0x6c, 0x6a, 0xcb, 0xb2, 0xa3, 0xc6, + 0xeb, 0x75, 0x28, 0x7b, 0x9b, 0xe6, 0x10, 0x81, 0x26, 0xc7, 0x0c, 0xbb, 0x14, 0xc9, 0x70, 0x28, + 0x23, 0x4e, 0x91, 0x4b, 0xd0, 0x53, 0x0f, 0x5d, 0xa0, 0x45, 0x0f, 0x39, 0x15, 0xd8, 0xa0, 0xe7, + 0x02, 0xfd, 0x71, 0x68, 0x9a, 0x43, 0xd1, 0x5b, 0x7b, 0xe8, 0xa5, 0xc7, 0x9e, 0x0a, 0xe4, 0x8f, + 0xe8, 0xb1, 0x98, 0xe1, 0x50, 0x22, 0x29, 0xca, 0xa4, 0xb4, 0x59, 0xe4, 0xa6, 0x19, 0xbe, 0xf7, + 0xe6, 0x7b, 0xef, 0x7d, 0x33, 0xf3, 0xde, 0x08, 0x1a, 0x86, 0xe3, 0x18, 0x16, 0x91, 0x35, 0xcb, + 0xe9, 0xeb, 0xf2, 0xb9, 0x49, 0x4d, 0xc7, 0x96, 0xcf, 0x6f, 0xba, 0xdf, 0x3d, 0x25, 0xbe, 0x7a, + 0x53, 0x76, 0x3d, 0x47, 0xef, 0x6b, 0x7e, 0x97, 0x12, 0xd5, 0xd3, 0xde, 0xeb, 0x52, 0xe2, 0x9d, + 0x9b, 0x1a, 0xa9, 0xb9, 0x9e, 0xe3, 0x3b, 0xf8, 0x46, 0xa0, 0x5b, 0xe3, 0xba, 0xb5, 0x40, 0xb7, + 0x36, 0xd0, 0xad, 0x3c, 0x27, 0x4c, 0xab, 0xae, 0x29, 0xab, 0xb6, 0xed, 0xf8, 0xaa, 0x6f, 0x3a, + 0x36, 0x0d, 0x94, 0x2b, 0xaf, 0x5c, 0xbe, 0xb0, 0x41, 0x9c, 0x1e, 0xf1, 0xbd, 0x0b, 0x21, 0xfd, + 0x0d, 0x21, 0x6d, 0x39, 0xb6, 0xe1, 0xf5, 0x6d, 0xdb, 0xb4, 0x0d, 0xd9, 0x71, 0x89, 0x17, 0x33, + 0xb9, 0x2e, 0x84, 0xf8, 0xe8, 0xb4, 0x7f, 0x26, 0x93, 0x9e, 0xeb, 0x87, 0x16, 0x36, 0x92, 0x1f, + 0xcf, 0x4c, 0x62, 0xe9, 0xdd, 0x9e, 0x4a, 0x1f, 0x0a, 0x89, 0xe7, 0x93, 0x12, 0xbe, 0xd9, 0x23, + 0xd4, 0x57, 0x7b, 0xae, 0x10, 0x58, 0x13, 0x02, 0x9e, 0xab, 0xc9, 0xd4, 0x57, 0xfd, 0xbe, 0x58, + 0x58, 0xfa, 0x64, 0x06, 0xae, 0x1d, 0x05, 0x91, 0xc2, 0x18, 0xae, 0xd8, 0x6a, 0x8f, 0x94, 0xd1, + 0x06, 0x7a, 0x69, 0x41, 0xe1, 0xbf, 0xf1, 0x0b, 0xb0, 0xa8, 0x9b, 0xd4, 0xb5, 0xd4, 0x8b, 0x2e, + 0xff, 0x36, 0xc3, 0xbf, 0x15, 0xc4, 0xdc, 0x21, 0x13, 0xd9, 0x80, 0x82, 0x4e, 0xa8, 0xe6, 0x99, + 0x2e, 0xf3, 0xa8, 0x3c, 0x2b, 0x24, 0x86, 0x53, 0xf8, 0x65, 0x28, 0x85, 0xd9, 0xd0, 0x54, 0x9f, + 0x18, 0x8e, 0x77, 0x51, 0xbe, 0xc2, 0xc5, 0x96, 0xc5, 0x7c, 0x53, 0x4c, 0xe3, 0x07, 0xb0, 0x14, + 0x8a, 0x5a, 0xea, 0x29, 0xb1, 0x68, 0x79, 0x6e, 0x63, 0xf6, 0xa5, 0x42, 0x5d, 0xae, 0x5d, 0x9a, + 0xb1, 0x9a, 0xf0, 0xa1, 0xf6, 0x26, 0xb9, 0x78, 0xa0, 0x5a, 0x7d, 0xa2, 0x14, 0x85, 0x99, 0x03, + 0x6e, 0xa5, 0x52, 0x87, 0xf9, 0xf0, 0x13, 0x2e, 0xc1, 0xec, 0x43, 0x72, 0x21, 0xdc, 0x64, 0x3f, + 0xf1, 0x0a, 0xcc, 0x9d, 0xb3, 0x4f, 0xc2, 0xbd, 0x60, 0x20, 0xfd, 0x11, 0x01, 0x08, 0xbb, 0x1d, + 0x32, 0x75, 0x78, 0xee, 0x00, 0x98, 0xb6, 0x4e, 0x3e, 0xe8, 0xb2, 0x9c, 0xf0, 0xe8, 0x14, 0xea, + 0x95, 0xd0, 0x9b, 0x30, 0x61, 0xb5, 0xe3, 0x30, 0x61, 0xca, 0x02, 0x97, 0x66, 0x63, 0xbc, 0x09, + 0x85, 0x40, 0x95, 0x78, 0x9e, 0xe3, 0xf1, 0x90, 0x15, 0xea, 0x38, 0xd4, 0xf5, 0x5c, 0xad, 0xd6, + 0xe1, 0xb9, 0x54, 0x82, 0x15, 0x5a, 0x4c, 0x4a, 0xfa, 0x05, 0x82, 0x25, 0x85, 0x9c, 0x11, 0x8f, + 0xd8, 0x1a, 0x69, 0xf7, 0x54, 0x83, 0xa4, 0x22, 0x2f, 0xc1, 0x6c, 0xdf, 0x33, 0x05, 0x60, 0xf6, + 0x13, 0x2b, 0xb0, 0x74, 0xea, 0xf4, 0x6d, 0xdd, 0xb4, 0x8d, 0xae, 0xeb, 0x58, 0x17, 0xb4, 0x3c, + 0xcb, 0x43, 0xff, 0xed, 0x8c, 0xd0, 0xef, 0x08, 0xa5, 0x23, 0xc7, 0xba, 0x50, 0x8a, 0xa7, 0x91, + 0x11, 0x95, 0x1e, 0x21, 0x58, 0x69, 0x7a, 0x44, 0xf5, 0x89, 0x08, 0xa4, 0x42, 0xde, 0xef, 0x13, + 0xea, 0xe3, 0x55, 0xb8, 0xea, 0xaa, 0x1e, 0xb1, 0x7d, 0x01, 0x4a, 0x8c, 0xf0, 0x0f, 0xe1, 0x9a, + 0x48, 0x1c, 0x87, 0x56, 0xa8, 0xbf, 0x98, 0x2f, 0xf1, 0x4a, 0xa8, 0x86, 0x6f, 0x00, 0x84, 0x0c, + 0x32, 0x75, 0xc1, 0xc6, 0x05, 0x31, 0xd3, 0xd6, 0x25, 0x13, 0xbe, 0x76, 0x60, 0x52, 0x5f, 0xa8, + 0xd1, 0x2c, 0x3c, 0xeb, 0xb0, 0xe0, 0xaa, 0x06, 0xe9, 0x52, 0xf3, 0xc3, 0x20, 0xbb, 0x73, 0xca, + 0x3c, 0x9b, 0xe8, 0x98, 0x1f, 0x12, 0xbe, 0x14, 0xfb, 0xe8, 0x3b, 0x0f, 0x89, 0x3d, 0x58, 0x4a, + 0x35, 0xc8, 0x31, 0x9b, 0x90, 0x3e, 0x46, 0xb0, 0x12, 0x5f, 0x8b, 0xba, 0x8e, 0x4d, 0x09, 0xde, + 0x81, 0x79, 0x01, 0x88, 0x96, 0x11, 0x8f, 0x71, 0x5e, 0x2f, 0x07, 0x7a, 0xf8, 0x45, 0x58, 0xb6, + 0xc9, 0x07, 0x7e, 0x37, 0x02, 0x20, 0xc8, 0x65, 0x91, 0x4d, 0x1f, 0x0d, 0x40, 0x7c, 0x0b, 0xae, + 0xef, 0x13, 0x3f, 0x11, 0xfd, 0x14, 0x42, 0x48, 0xbf, 0x41, 0xb0, 0x72, 0xe2, 0xea, 0xa3, 0xa9, + 0x8a, 0xa4, 0x04, 0x4d, 0x97, 0x92, 0x2d, 0x28, 0xf4, 0xb9, 0x65, 0x7e, 0x66, 0x89, 0xc4, 0x8e, + 0xee, 0x81, 0x3d, 0x76, 0xac, 0xdd, 0x53, 0xe9, 0x43, 0x05, 0x02, 0x71, 0xf6, 0x5b, 0xaa, 0xc2, + 0xca, 0x2e, 0xb1, 0xc8, 0x08, 0xac, 0x34, 0x1f, 0x3e, 0x45, 0xb0, 0x16, 0xa3, 0x5b, 0x87, 0x64, + 0x32, 0xee, 0x47, 0x50, 0x18, 0x5e, 0x15, 0x21, 0xeb, 0x5e, 0xce, 0xe7, 0x22, 0x33, 0x1f, 0xb2, + 0x8d, 0x1d, 0x11, 0xdf, 0x1c, 0x9e, 0x5e, 0x94, 0x44, 0xf8, 0xb7, 0x38, 0x94, 0x69, 0xeb, 0x92, + 0x05, 0xab, 0x11, 0x5a, 0x74, 0xc8, 0xd3, 0x65, 0xe1, 0x23, 0x04, 0x6b, 0x23, 0xcb, 0x09, 0x22, + 0x1e, 0xc0, 0x62, 0x04, 0x6f, 0x48, 0xc6, 0x09, 0x9c, 0x2f, 0x0c, 0x1d, 0xcb, 0x4f, 0xc9, 0x2a, + 0xac, 0x0c, 0x29, 0x19, 0xc9, 0x50, 0x5a, 0x46, 0x1f, 0x23, 0x58, 0x8b, 0xb1, 0x32, 0x22, 0x9f, + 0xc8, 0x1c, 0x7a, 0x92, 0xcc, 0x3d, 0x11, 0x45, 0x5f, 0x85, 0xb5, 0x18, 0x45, 0x33, 0x7c, 0xfa, + 0x2b, 0x82, 0xf5, 0x80, 0xa5, 0xf1, 0x73, 0x3a, 0x8b, 0x05, 0x0f, 0x60, 0xd9, 0x0b, 0x15, 0xba, + 0x26, 0xd3, 0x10, 0x38, 0x5f, 0xcd, 0xf0, 0x39, 0xb1, 0xcc, 0x92, 0x17, 0xbf, 0x1e, 0x5e, 0x01, + 0x9c, 0xb0, 0x3b, 0x64, 0x6e, 0x29, 0x2e, 0xdb, 0xd6, 0x25, 0x17, 0x2a, 0x8c, 0x4e, 0x71, 0x9b, + 0x4f, 0x95, 0xc1, 0x9f, 0x23, 0x58, 0x4f, 0x5d, 0x52, 0xb0, 0xf8, 0x6d, 0x28, 0x25, 0xf0, 0x87, + 0x4c, 0x9e, 0x30, 0x30, 0xcb, 0x71, 0x67, 0xe9, 0xe5, 0xa8, 0x53, 0xe8, 0x3e, 0x9b, 0x46, 0xf7, + 0x1a, 0x94, 0xf7, 0x89, 0x9f, 0x9e, 0xea, 0x34, 0x7a, 0xdc, 0x84, 0xf5, 0x80, 0x4d, 0xf9, 0x55, + 0xee, 0xc1, 0x8d, 0x6d, 0x5d, 0x17, 0xec, 0x3b, 0x76, 0x72, 0xd1, 0x10, 0x97, 0xe3, 0x57, 0xed, + 0xc2, 0xe0, 0xbc, 0x96, 0x14, 0x90, 0x14, 0xd2, 0x73, 0xce, 0x43, 0x3e, 0xef, 0x79, 0x4e, 0xef, + 0x49, 0x6d, 0xbe, 0x0f, 0xcf, 0x47, 0xef, 0xc2, 0xb6, 0x9d, 0xcf, 0xe0, 0x93, 0xf0, 0xe6, 0x97, + 0x08, 0x36, 0xc6, 0xaf, 0xf9, 0x15, 0xdc, 0xc5, 0x77, 0xa1, 0xd2, 0xee, 0xb9, 0x8e, 0x17, 0x3d, + 0x8b, 0xf7, 0x35, 0xda, 0x71, 0xfa, 0x9e, 0xc6, 0xea, 0xe8, 0x45, 0x8d, 0x9e, 0x77, 0xcf, 0x4c, + 0x8b, 0x74, 0x59, 0x69, 0x16, 0x84, 0x01, 0x34, 0x7a, 0xbe, 0x67, 0x5a, 0xe4, 0xc4, 0x33, 0xa5, + 0x9f, 0x23, 0x78, 0x6e, 0xc4, 0x40, 0xdb, 0x76, 0xfb, 0x7e, 0xd3, 0xb1, 0xcf, 0x4c, 0x03, 0xbf, + 0x03, 0x60, 0x68, 0xb4, 0x4b, 0xb9, 0x41, 0x71, 0x20, 0xde, 0xc9, 0x70, 0x67, 0x3c, 0xa2, 0x37, + 0x9e, 0x51, 0x16, 0x8c, 0x70, 0xb0, 0x33, 0x0f, 0x57, 0x03, 0xbb, 0xd2, 0xaf, 0x10, 0x94, 0x47, + 0xb4, 0xb2, 0x0e, 0x80, 0x77, 0x61, 0xd1, 0x64, 0x48, 0xbb, 0x1a, 0x87, 0x2a, 0x4e, 0xae, 0xad, + 0x49, 0xc1, 0x45, 0xbc, 0x55, 0x0a, 0xe6, 0x70, 0x20, 0xfd, 0x0e, 0xc1, 0xb3, 0x29, 0xa0, 0x9e, + 0xfa, 0x11, 0x51, 0x83, 0xf9, 0xa0, 0xa1, 0x22, 0xb4, 0x3c, 0xc3, 0x2d, 0xa6, 0x15, 0xe8, 0x03, + 0x19, 0xe9, 0xb3, 0x19, 0x58, 0xdd, 0x51, 0x7d, 0xed, 0xbd, 0xfb, 0x61, 0x0f, 0x78, 0x8f, 0xf8, + 0xaa, 0xae, 0xfa, 0x2a, 0x7e, 0x0b, 0xe6, 0x98, 0x58, 0x90, 0xb8, 0xa5, 0xcc, 0xd8, 0xa4, 0x5b, + 0xe1, 0x2b, 0x12, 0x25, 0xb0, 0xc4, 0xae, 0x35, 0xda, 0x3f, 0xed, 0x99, 0x7e, 0xd0, 0x7d, 0xcc, + 0x64, 0x76, 0x1f, 0x10, 0x88, 0xf3, 0xf6, 0xe3, 0x16, 0xcc, 0x13, 0x5b, 0xcf, 0xdb, 0xb7, 0x5c, + 0x23, 0xb6, 0xce, 0x46, 0xd2, 0x4f, 0x60, 0x8e, 0x63, 0xc0, 0x5f, 0x87, 0xeb, 0x9d, 0xe3, 0xed, + 0xe3, 0x56, 0xf7, 0xe4, 0xb0, 0x73, 0xd4, 0x6a, 0xb6, 0xf7, 0xda, 0xad, 0xdd, 0xd2, 0x33, 0x78, + 0x09, 0xe0, 0x48, 0xb9, 0xdf, 0x6c, 0x75, 0x3a, 0xed, 0xc3, 0xfd, 0x12, 0x62, 0xe3, 0xce, 0x49, + 0x93, 0x8d, 0xf7, 0x4e, 0x0e, 0x4a, 0x33, 0x18, 0xe0, 0xea, 0xde, 0x76, 0xfb, 0xa0, 0xb5, 0x5b, + 0x9a, 0xc5, 0x45, 0x58, 0x68, 0x6e, 0x1f, 0x36, 0x5b, 0x07, 0x6c, 0x78, 0xa5, 0xfe, 0xdb, 0x75, + 0x28, 0x0e, 0xd2, 0xcb, 0xda, 0x7a, 0xfc, 0x77, 0x04, 0xa5, 0x64, 0xc5, 0x87, 0x6f, 0x67, 0x44, + 0x6e, 0x4c, 0x89, 0x58, 0xc9, 0x5f, 0x3b, 0x48, 0x6f, 0x7e, 0xfc, 0xef, 0x2f, 0x7e, 0x3d, 0xd3, + 0x92, 0x6e, 0x47, 0x1a, 0xff, 0x9f, 0x05, 0x54, 0x7f, 0xcd, 0xf5, 0x9c, 0x9f, 0x12, 0xcd, 0xa7, + 0x72, 0x55, 0xb6, 0x1c, 0x2d, 0xe8, 0xf3, 0xe5, 0xea, 0x47, 0x72, 0xa4, 0x56, 0x6a, 0x44, 0x2b, + 0x17, 0xfc, 0x37, 0x04, 0xcb, 0x89, 0x12, 0x0d, 0xdf, 0xca, 0xc0, 0x92, 0x5e, 0x41, 0x56, 0x6e, + 0x4f, 0xaa, 0x16, 0x6c, 0x10, 0xe9, 0x2e, 0xf7, 0xe7, 0xfb, 0x78, 0x4a, 0x7f, 0xf0, 0xef, 0x11, + 0x14, 0x63, 0x45, 0x1d, 0xde, 0xcc, 0x40, 0x92, 0x56, 0x02, 0x4e, 0x92, 0x81, 0x54, 0xc4, 0xec, + 0xce, 0x18, 0x83, 0x37, 0x0a, 0x57, 0xae, 0x7e, 0x84, 0xff, 0x85, 0xa0, 0x94, 0xac, 0x2c, 0x33, + 0x99, 0x33, 0xa6, 0x14, 0x9d, 0x04, 0xf7, 0x8f, 0x39, 0xee, 0xb7, 0xea, 0x3b, 0xb1, 0x48, 0x0f, + 0xd9, 0x50, 0xcb, 0xef, 0x43, 0x9c, 0x45, 0x8f, 0x11, 0x94, 0x92, 0x65, 0x68, 0xa6, 0x43, 0x63, + 0xea, 0xd6, 0xca, 0xea, 0xc8, 0x4e, 0x6f, 0xf5, 0x5c, 0xff, 0x22, 0x8c, 0x7a, 0x75, 0xda, 0xa8, + 0xff, 0x01, 0x41, 0x31, 0xb6, 0xfd, 0x32, 0x79, 0x92, 0xf6, 0x7c, 0x50, 0xc9, 0x79, 0x47, 0x4b, + 0xbb, 0x1c, 0xee, 0x5d, 0x69, 0x73, 0x72, 0x5a, 0xd3, 0xc6, 0xa0, 0x7f, 0xfd, 0x13, 0x82, 0xc5, + 0x68, 0x21, 0x81, 0xeb, 0xf9, 0x77, 0xd9, 0x60, 0x67, 0x6e, 0x4e, 0xa4, 0x23, 0xb6, 0xe5, 0x16, + 0xc7, 0x7f, 0x0b, 0x4f, 0x83, 0x1f, 0x7f, 0x8a, 0x00, 0x86, 0xbb, 0x0c, 0x7f, 0x27, 0xf7, 0x86, + 0x9c, 0x34, 0xca, 0xa9, 0x28, 0x73, 0x90, 0x82, 0x33, 0xe2, 0x33, 0x04, 0xc5, 0xd8, 0xb6, 0xca, + 0x64, 0x44, 0xda, 0x2b, 0x45, 0x6e, 0xac, 0x07, 0x1c, 0xeb, 0x5e, 0x7d, 0x2b, 0x65, 0xfb, 0xd5, + 0x72, 0x62, 0x1e, 0x32, 0xe3, 0x13, 0x04, 0xc5, 0xd8, 0x16, 0xca, 0x04, 0x9f, 0xf6, 0x96, 0x31, + 0x76, 0xb7, 0x89, 0xc0, 0x56, 0xa7, 0x0a, 0xec, 0x7f, 0x07, 0x6f, 0x6f, 0x89, 0xe7, 0xc0, 0x46, + 0xae, 0x1d, 0x97, 0xda, 0x7d, 0x54, 0x26, 0x2b, 0x97, 0xa4, 0x77, 0xb9, 0x03, 0x6f, 0x4b, 0xad, + 0xdc, 0xfc, 0x8d, 0xba, 0x20, 0x27, 0x8a, 0xad, 0x46, 0xb2, 0xff, 0xc5, 0x7f, 0x41, 0xe1, 0xeb, + 0xd0, 0x84, 0x3e, 0x5e, 0xd2, 0x61, 0x8d, 0xcd, 0xc6, 0x3d, 0xee, 0xcc, 0x7e, 0xb5, 0x35, 0x79, + 0x36, 0x92, 0x9e, 0xb0, 0xfc, 0xfc, 0x07, 0x05, 0x4f, 0x91, 0x89, 0xb6, 0x16, 0xdf, 0xc9, 0x71, + 0x50, 0xa4, 0x77, 0xdf, 0x95, 0xc6, 0x34, 0xaa, 0xe2, 0xa8, 0x11, 0xde, 0xe1, 0x2f, 0x27, 0x55, + 0xf8, 0x9f, 0x88, 0x3f, 0x3c, 0x26, 0xd2, 0xf2, 0xbd, 0xec, 0x33, 0xe8, 0x4b, 0xe1, 0x5d, 0xaa, + 0x33, 0xd3, 0xa7, 0xea, 0x73, 0x04, 0xab, 0xe9, 0x0d, 0x36, 0xfe, 0x41, 0x06, 0xb0, 0x4b, 0xfb, + 0xf2, 0xb1, 0x54, 0x13, 0xa7, 0x94, 0xb4, 0x3d, 0xdd, 0x35, 0xdb, 0x50, 0x07, 0xab, 0x36, 0x50, + 0x15, 0xff, 0x03, 0xc1, 0xfa, 0x25, 0x0d, 0x3d, 0xde, 0xce, 0x0c, 0x6e, 0xd6, 0x63, 0xc0, 0x58, + 0x47, 0xee, 0x73, 0x47, 0xda, 0xd2, 0xee, 0x94, 0x8e, 0x78, 0xd1, 0xa5, 0x99, 0x2f, 0x5f, 0x20, + 0x28, 0x8f, 0x6b, 0xea, 0xf1, 0xdd, 0x09, 0xee, 0xd8, 0x94, 0x17, 0x88, 0xca, 0xeb, 0x53, 0xeb, + 0x8b, 0x4d, 0xb4, 0xcf, 0xdd, 0xdd, 0xc6, 0xaf, 0x4f, 0xe7, 0xee, 0xf0, 0xee, 0xfe, 0x33, 0x82, + 0xeb, 0x23, 0xed, 0x6c, 0xe6, 0xf6, 0x19, 0xd7, 0x95, 0x57, 0x6e, 0x84, 0x8a, 0x91, 0x7f, 0x21, + 0x6b, 0x83, 0xde, 0x51, 0x7a, 0x83, 0xc3, 0xde, 0x91, 0x5e, 0x9b, 0xb2, 0x9b, 0x31, 0xf9, 0xba, + 0x0d, 0x54, 0xdd, 0x79, 0x84, 0xe0, 0x05, 0xcd, 0xe9, 0x5d, 0x8e, 0x73, 0xe7, 0xd9, 0x58, 0x13, + 0xd7, 0x09, 0xfe, 0x9a, 0x3d, 0x62, 0xcc, 0x39, 0x42, 0xef, 0x34, 0x85, 0xae, 0xe1, 0x58, 0xaa, + 0x6d, 0xd4, 0x1c, 0xcf, 0x90, 0x0d, 0x62, 0x73, 0x5e, 0xc9, 0xc1, 0x27, 0xd5, 0x35, 0xe9, 0x98, + 0x7f, 0x5f, 0xb7, 0x82, 0x89, 0xff, 0x21, 0xf4, 0x78, 0xe6, 0xca, 0x7e, 0xf3, 0xc1, 0xe1, 0xe9, + 0x55, 0xae, 0xb9, 0xf9, 0xff, 0x00, 0x00, 0x00, 0xff, 0xff, 0xc0, 0x32, 0x1a, 0x41, 0x2f, 0x1e, + 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/cloud/vision/v1p4beta1/text_annotation.pb.go b/vendor/google.golang.org/genproto/googleapis/cloud/vision/v1p4beta1/text_annotation.pb.go new file mode 100644 index 0000000..308ccae --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/cloud/vision/v1p4beta1/text_annotation.pb.go @@ -0,0 +1,799 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/cloud/vision/v1p4beta1/text_annotation.proto + +package vision // import "google.golang.org/genproto/googleapis/cloud/vision/v1p4beta1" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Enum to denote the type of break found. New line, space etc. +type TextAnnotation_DetectedBreak_BreakType int32 + +const ( + // Unknown break label type. + TextAnnotation_DetectedBreak_UNKNOWN TextAnnotation_DetectedBreak_BreakType = 0 + // Regular space. + TextAnnotation_DetectedBreak_SPACE TextAnnotation_DetectedBreak_BreakType = 1 + // Sure space (very wide). + TextAnnotation_DetectedBreak_SURE_SPACE TextAnnotation_DetectedBreak_BreakType = 2 + // Line-wrapping break. + TextAnnotation_DetectedBreak_EOL_SURE_SPACE TextAnnotation_DetectedBreak_BreakType = 3 + // End-line hyphen that is not present in text; does not co-occur with + // `SPACE`, `LEADER_SPACE`, or `LINE_BREAK`. + TextAnnotation_DetectedBreak_HYPHEN TextAnnotation_DetectedBreak_BreakType = 4 + // Line break that ends a paragraph. + TextAnnotation_DetectedBreak_LINE_BREAK TextAnnotation_DetectedBreak_BreakType = 5 +) + +var TextAnnotation_DetectedBreak_BreakType_name = map[int32]string{ + 0: "UNKNOWN", + 1: "SPACE", + 2: "SURE_SPACE", + 3: "EOL_SURE_SPACE", + 4: "HYPHEN", + 5: "LINE_BREAK", +} +var TextAnnotation_DetectedBreak_BreakType_value = map[string]int32{ + "UNKNOWN": 0, + "SPACE": 1, + "SURE_SPACE": 2, + "EOL_SURE_SPACE": 3, + "HYPHEN": 4, + "LINE_BREAK": 5, +} + +func (x TextAnnotation_DetectedBreak_BreakType) String() string { + return proto.EnumName(TextAnnotation_DetectedBreak_BreakType_name, int32(x)) +} +func (TextAnnotation_DetectedBreak_BreakType) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_text_annotation_4efa847ad22b599e, []int{0, 1, 0} +} + +// Type of a block (text, image etc) as identified by OCR. +type Block_BlockType int32 + +const ( + // Unknown block type. + Block_UNKNOWN Block_BlockType = 0 + // Regular text block. + Block_TEXT Block_BlockType = 1 + // Table block. + Block_TABLE Block_BlockType = 2 + // Image block. + Block_PICTURE Block_BlockType = 3 + // Horizontal/vertical line box. + Block_RULER Block_BlockType = 4 + // Barcode block. + Block_BARCODE Block_BlockType = 5 +) + +var Block_BlockType_name = map[int32]string{ + 0: "UNKNOWN", + 1: "TEXT", + 2: "TABLE", + 3: "PICTURE", + 4: "RULER", + 5: "BARCODE", +} +var Block_BlockType_value = map[string]int32{ + "UNKNOWN": 0, + "TEXT": 1, + "TABLE": 2, + "PICTURE": 3, + "RULER": 4, + "BARCODE": 5, +} + +func (x Block_BlockType) String() string { + return proto.EnumName(Block_BlockType_name, int32(x)) +} +func (Block_BlockType) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_text_annotation_4efa847ad22b599e, []int{2, 0} +} + +// TextAnnotation contains a structured representation of OCR extracted text. +// The hierarchy of an OCR extracted text structure is like this: +// TextAnnotation -> Page -> Block -> Paragraph -> Word -> Symbol +// Each structural component, starting from Page, may further have their own +// properties. Properties describe detected languages, breaks etc.. Please refer +// to the +// [TextAnnotation.TextProperty][google.cloud.vision.v1p4beta1.TextAnnotation.TextProperty] +// message definition below for more detail. +type TextAnnotation struct { + // List of pages detected by OCR. + Pages []*Page `protobuf:"bytes,1,rep,name=pages,proto3" json:"pages,omitempty"` + // UTF-8 text detected on the pages. + Text string `protobuf:"bytes,2,opt,name=text,proto3" json:"text,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *TextAnnotation) Reset() { *m = TextAnnotation{} } +func (m *TextAnnotation) String() string { return proto.CompactTextString(m) } +func (*TextAnnotation) ProtoMessage() {} +func (*TextAnnotation) Descriptor() ([]byte, []int) { + return fileDescriptor_text_annotation_4efa847ad22b599e, []int{0} +} +func (m *TextAnnotation) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_TextAnnotation.Unmarshal(m, b) +} +func (m *TextAnnotation) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_TextAnnotation.Marshal(b, m, deterministic) +} +func (dst *TextAnnotation) XXX_Merge(src proto.Message) { + xxx_messageInfo_TextAnnotation.Merge(dst, src) +} +func (m *TextAnnotation) XXX_Size() int { + return xxx_messageInfo_TextAnnotation.Size(m) +} +func (m *TextAnnotation) XXX_DiscardUnknown() { + xxx_messageInfo_TextAnnotation.DiscardUnknown(m) +} + +var xxx_messageInfo_TextAnnotation proto.InternalMessageInfo + +func (m *TextAnnotation) GetPages() []*Page { + if m != nil { + return m.Pages + } + return nil +} + +func (m *TextAnnotation) GetText() string { + if m != nil { + return m.Text + } + return "" +} + +// Detected language for a structural component. +type TextAnnotation_DetectedLanguage struct { + // The BCP-47 language code, such as "en-US" or "sr-Latn". For more + // information, see + // http://www.unicode.org/reports/tr35/#Unicode_locale_identifier. + LanguageCode string `protobuf:"bytes,1,opt,name=language_code,json=languageCode,proto3" json:"language_code,omitempty"` + // Confidence of detected language. Range [0, 1]. + Confidence float32 `protobuf:"fixed32,2,opt,name=confidence,proto3" json:"confidence,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *TextAnnotation_DetectedLanguage) Reset() { *m = TextAnnotation_DetectedLanguage{} } +func (m *TextAnnotation_DetectedLanguage) String() string { return proto.CompactTextString(m) } +func (*TextAnnotation_DetectedLanguage) ProtoMessage() {} +func (*TextAnnotation_DetectedLanguage) Descriptor() ([]byte, []int) { + return fileDescriptor_text_annotation_4efa847ad22b599e, []int{0, 0} +} +func (m *TextAnnotation_DetectedLanguage) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_TextAnnotation_DetectedLanguage.Unmarshal(m, b) +} +func (m *TextAnnotation_DetectedLanguage) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_TextAnnotation_DetectedLanguage.Marshal(b, m, deterministic) +} +func (dst *TextAnnotation_DetectedLanguage) XXX_Merge(src proto.Message) { + xxx_messageInfo_TextAnnotation_DetectedLanguage.Merge(dst, src) +} +func (m *TextAnnotation_DetectedLanguage) XXX_Size() int { + return xxx_messageInfo_TextAnnotation_DetectedLanguage.Size(m) +} +func (m *TextAnnotation_DetectedLanguage) XXX_DiscardUnknown() { + xxx_messageInfo_TextAnnotation_DetectedLanguage.DiscardUnknown(m) +} + +var xxx_messageInfo_TextAnnotation_DetectedLanguage proto.InternalMessageInfo + +func (m *TextAnnotation_DetectedLanguage) GetLanguageCode() string { + if m != nil { + return m.LanguageCode + } + return "" +} + +func (m *TextAnnotation_DetectedLanguage) GetConfidence() float32 { + if m != nil { + return m.Confidence + } + return 0 +} + +// Detected start or end of a structural component. +type TextAnnotation_DetectedBreak struct { + // Detected break type. + Type TextAnnotation_DetectedBreak_BreakType `protobuf:"varint,1,opt,name=type,proto3,enum=google.cloud.vision.v1p4beta1.TextAnnotation_DetectedBreak_BreakType" json:"type,omitempty"` + // True if break prepends the element. + IsPrefix bool `protobuf:"varint,2,opt,name=is_prefix,json=isPrefix,proto3" json:"is_prefix,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *TextAnnotation_DetectedBreak) Reset() { *m = TextAnnotation_DetectedBreak{} } +func (m *TextAnnotation_DetectedBreak) String() string { return proto.CompactTextString(m) } +func (*TextAnnotation_DetectedBreak) ProtoMessage() {} +func (*TextAnnotation_DetectedBreak) Descriptor() ([]byte, []int) { + return fileDescriptor_text_annotation_4efa847ad22b599e, []int{0, 1} +} +func (m *TextAnnotation_DetectedBreak) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_TextAnnotation_DetectedBreak.Unmarshal(m, b) +} +func (m *TextAnnotation_DetectedBreak) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_TextAnnotation_DetectedBreak.Marshal(b, m, deterministic) +} +func (dst *TextAnnotation_DetectedBreak) XXX_Merge(src proto.Message) { + xxx_messageInfo_TextAnnotation_DetectedBreak.Merge(dst, src) +} +func (m *TextAnnotation_DetectedBreak) XXX_Size() int { + return xxx_messageInfo_TextAnnotation_DetectedBreak.Size(m) +} +func (m *TextAnnotation_DetectedBreak) XXX_DiscardUnknown() { + xxx_messageInfo_TextAnnotation_DetectedBreak.DiscardUnknown(m) +} + +var xxx_messageInfo_TextAnnotation_DetectedBreak proto.InternalMessageInfo + +func (m *TextAnnotation_DetectedBreak) GetType() TextAnnotation_DetectedBreak_BreakType { + if m != nil { + return m.Type + } + return TextAnnotation_DetectedBreak_UNKNOWN +} + +func (m *TextAnnotation_DetectedBreak) GetIsPrefix() bool { + if m != nil { + return m.IsPrefix + } + return false +} + +// Additional information detected on the structural component. +type TextAnnotation_TextProperty struct { + // A list of detected languages together with confidence. + DetectedLanguages []*TextAnnotation_DetectedLanguage `protobuf:"bytes,1,rep,name=detected_languages,json=detectedLanguages,proto3" json:"detected_languages,omitempty"` + // Detected start or end of a text segment. + DetectedBreak *TextAnnotation_DetectedBreak `protobuf:"bytes,2,opt,name=detected_break,json=detectedBreak,proto3" json:"detected_break,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *TextAnnotation_TextProperty) Reset() { *m = TextAnnotation_TextProperty{} } +func (m *TextAnnotation_TextProperty) String() string { return proto.CompactTextString(m) } +func (*TextAnnotation_TextProperty) ProtoMessage() {} +func (*TextAnnotation_TextProperty) Descriptor() ([]byte, []int) { + return fileDescriptor_text_annotation_4efa847ad22b599e, []int{0, 2} +} +func (m *TextAnnotation_TextProperty) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_TextAnnotation_TextProperty.Unmarshal(m, b) +} +func (m *TextAnnotation_TextProperty) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_TextAnnotation_TextProperty.Marshal(b, m, deterministic) +} +func (dst *TextAnnotation_TextProperty) XXX_Merge(src proto.Message) { + xxx_messageInfo_TextAnnotation_TextProperty.Merge(dst, src) +} +func (m *TextAnnotation_TextProperty) XXX_Size() int { + return xxx_messageInfo_TextAnnotation_TextProperty.Size(m) +} +func (m *TextAnnotation_TextProperty) XXX_DiscardUnknown() { + xxx_messageInfo_TextAnnotation_TextProperty.DiscardUnknown(m) +} + +var xxx_messageInfo_TextAnnotation_TextProperty proto.InternalMessageInfo + +func (m *TextAnnotation_TextProperty) GetDetectedLanguages() []*TextAnnotation_DetectedLanguage { + if m != nil { + return m.DetectedLanguages + } + return nil +} + +func (m *TextAnnotation_TextProperty) GetDetectedBreak() *TextAnnotation_DetectedBreak { + if m != nil { + return m.DetectedBreak + } + return nil +} + +// Detected page from OCR. +type Page struct { + // Additional information detected on the page. + Property *TextAnnotation_TextProperty `protobuf:"bytes,1,opt,name=property,proto3" json:"property,omitempty"` + // Page width. For PDFs the unit is points. For images (including + // TIFFs) the unit is pixels. + Width int32 `protobuf:"varint,2,opt,name=width,proto3" json:"width,omitempty"` + // Page height. For PDFs the unit is points. For images (including + // TIFFs) the unit is pixels. + Height int32 `protobuf:"varint,3,opt,name=height,proto3" json:"height,omitempty"` + // List of blocks of text, images etc on this page. + Blocks []*Block `protobuf:"bytes,4,rep,name=blocks,proto3" json:"blocks,omitempty"` + // Confidence of the OCR results on the page. Range [0, 1]. + Confidence float32 `protobuf:"fixed32,5,opt,name=confidence,proto3" json:"confidence,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Page) Reset() { *m = Page{} } +func (m *Page) String() string { return proto.CompactTextString(m) } +func (*Page) ProtoMessage() {} +func (*Page) Descriptor() ([]byte, []int) { + return fileDescriptor_text_annotation_4efa847ad22b599e, []int{1} +} +func (m *Page) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Page.Unmarshal(m, b) +} +func (m *Page) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Page.Marshal(b, m, deterministic) +} +func (dst *Page) XXX_Merge(src proto.Message) { + xxx_messageInfo_Page.Merge(dst, src) +} +func (m *Page) XXX_Size() int { + return xxx_messageInfo_Page.Size(m) +} +func (m *Page) XXX_DiscardUnknown() { + xxx_messageInfo_Page.DiscardUnknown(m) +} + +var xxx_messageInfo_Page proto.InternalMessageInfo + +func (m *Page) GetProperty() *TextAnnotation_TextProperty { + if m != nil { + return m.Property + } + return nil +} + +func (m *Page) GetWidth() int32 { + if m != nil { + return m.Width + } + return 0 +} + +func (m *Page) GetHeight() int32 { + if m != nil { + return m.Height + } + return 0 +} + +func (m *Page) GetBlocks() []*Block { + if m != nil { + return m.Blocks + } + return nil +} + +func (m *Page) GetConfidence() float32 { + if m != nil { + return m.Confidence + } + return 0 +} + +// Logical element on the page. +type Block struct { + // Additional information detected for the block. + Property *TextAnnotation_TextProperty `protobuf:"bytes,1,opt,name=property,proto3" json:"property,omitempty"` + // The bounding box for the block. + // The vertices are in the order of top-left, top-right, bottom-right, + // bottom-left. When a rotation of the bounding box is detected the rotation + // is represented as around the top-left corner as defined when the text is + // read in the 'natural' orientation. + // For example: + // + // * when the text is horizontal it might look like: + // + // 0----1 + // | | + // 3----2 + // + // * when it's rotated 180 degrees around the top-left corner it becomes: + // + // 2----3 + // | | + // 1----0 + // + // and the vertex order will still be (0, 1, 2, 3). + BoundingBox *BoundingPoly `protobuf:"bytes,2,opt,name=bounding_box,json=boundingBox,proto3" json:"bounding_box,omitempty"` + // List of paragraphs in this block (if this blocks is of type text). + Paragraphs []*Paragraph `protobuf:"bytes,3,rep,name=paragraphs,proto3" json:"paragraphs,omitempty"` + // Detected block type (text, image etc) for this block. + BlockType Block_BlockType `protobuf:"varint,4,opt,name=block_type,json=blockType,proto3,enum=google.cloud.vision.v1p4beta1.Block_BlockType" json:"block_type,omitempty"` + // Confidence of the OCR results on the block. Range [0, 1]. + Confidence float32 `protobuf:"fixed32,5,opt,name=confidence,proto3" json:"confidence,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Block) Reset() { *m = Block{} } +func (m *Block) String() string { return proto.CompactTextString(m) } +func (*Block) ProtoMessage() {} +func (*Block) Descriptor() ([]byte, []int) { + return fileDescriptor_text_annotation_4efa847ad22b599e, []int{2} +} +func (m *Block) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Block.Unmarshal(m, b) +} +func (m *Block) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Block.Marshal(b, m, deterministic) +} +func (dst *Block) XXX_Merge(src proto.Message) { + xxx_messageInfo_Block.Merge(dst, src) +} +func (m *Block) XXX_Size() int { + return xxx_messageInfo_Block.Size(m) +} +func (m *Block) XXX_DiscardUnknown() { + xxx_messageInfo_Block.DiscardUnknown(m) +} + +var xxx_messageInfo_Block proto.InternalMessageInfo + +func (m *Block) GetProperty() *TextAnnotation_TextProperty { + if m != nil { + return m.Property + } + return nil +} + +func (m *Block) GetBoundingBox() *BoundingPoly { + if m != nil { + return m.BoundingBox + } + return nil +} + +func (m *Block) GetParagraphs() []*Paragraph { + if m != nil { + return m.Paragraphs + } + return nil +} + +func (m *Block) GetBlockType() Block_BlockType { + if m != nil { + return m.BlockType + } + return Block_UNKNOWN +} + +func (m *Block) GetConfidence() float32 { + if m != nil { + return m.Confidence + } + return 0 +} + +// Structural unit of text representing a number of words in certain order. +type Paragraph struct { + // Additional information detected for the paragraph. + Property *TextAnnotation_TextProperty `protobuf:"bytes,1,opt,name=property,proto3" json:"property,omitempty"` + // The bounding box for the paragraph. + // The vertices are in the order of top-left, top-right, bottom-right, + // bottom-left. When a rotation of the bounding box is detected the rotation + // is represented as around the top-left corner as defined when the text is + // read in the 'natural' orientation. + // For example: + // * when the text is horizontal it might look like: + // 0----1 + // | | + // 3----2 + // * when it's rotated 180 degrees around the top-left corner it becomes: + // 2----3 + // | | + // 1----0 + // and the vertex order will still be (0, 1, 2, 3). + BoundingBox *BoundingPoly `protobuf:"bytes,2,opt,name=bounding_box,json=boundingBox,proto3" json:"bounding_box,omitempty"` + // List of words in this paragraph. + Words []*Word `protobuf:"bytes,3,rep,name=words,proto3" json:"words,omitempty"` + // Confidence of the OCR results for the paragraph. Range [0, 1]. + Confidence float32 `protobuf:"fixed32,4,opt,name=confidence,proto3" json:"confidence,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Paragraph) Reset() { *m = Paragraph{} } +func (m *Paragraph) String() string { return proto.CompactTextString(m) } +func (*Paragraph) ProtoMessage() {} +func (*Paragraph) Descriptor() ([]byte, []int) { + return fileDescriptor_text_annotation_4efa847ad22b599e, []int{3} +} +func (m *Paragraph) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Paragraph.Unmarshal(m, b) +} +func (m *Paragraph) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Paragraph.Marshal(b, m, deterministic) +} +func (dst *Paragraph) XXX_Merge(src proto.Message) { + xxx_messageInfo_Paragraph.Merge(dst, src) +} +func (m *Paragraph) XXX_Size() int { + return xxx_messageInfo_Paragraph.Size(m) +} +func (m *Paragraph) XXX_DiscardUnknown() { + xxx_messageInfo_Paragraph.DiscardUnknown(m) +} + +var xxx_messageInfo_Paragraph proto.InternalMessageInfo + +func (m *Paragraph) GetProperty() *TextAnnotation_TextProperty { + if m != nil { + return m.Property + } + return nil +} + +func (m *Paragraph) GetBoundingBox() *BoundingPoly { + if m != nil { + return m.BoundingBox + } + return nil +} + +func (m *Paragraph) GetWords() []*Word { + if m != nil { + return m.Words + } + return nil +} + +func (m *Paragraph) GetConfidence() float32 { + if m != nil { + return m.Confidence + } + return 0 +} + +// A word representation. +type Word struct { + // Additional information detected for the word. + Property *TextAnnotation_TextProperty `protobuf:"bytes,1,opt,name=property,proto3" json:"property,omitempty"` + // The bounding box for the word. + // The vertices are in the order of top-left, top-right, bottom-right, + // bottom-left. When a rotation of the bounding box is detected the rotation + // is represented as around the top-left corner as defined when the text is + // read in the 'natural' orientation. + // For example: + // * when the text is horizontal it might look like: + // 0----1 + // | | + // 3----2 + // * when it's rotated 180 degrees around the top-left corner it becomes: + // 2----3 + // | | + // 1----0 + // and the vertex order will still be (0, 1, 2, 3). + BoundingBox *BoundingPoly `protobuf:"bytes,2,opt,name=bounding_box,json=boundingBox,proto3" json:"bounding_box,omitempty"` + // List of symbols in the word. + // The order of the symbols follows the natural reading order. + Symbols []*Symbol `protobuf:"bytes,3,rep,name=symbols,proto3" json:"symbols,omitempty"` + // Confidence of the OCR results for the word. Range [0, 1]. + Confidence float32 `protobuf:"fixed32,4,opt,name=confidence,proto3" json:"confidence,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Word) Reset() { *m = Word{} } +func (m *Word) String() string { return proto.CompactTextString(m) } +func (*Word) ProtoMessage() {} +func (*Word) Descriptor() ([]byte, []int) { + return fileDescriptor_text_annotation_4efa847ad22b599e, []int{4} +} +func (m *Word) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Word.Unmarshal(m, b) +} +func (m *Word) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Word.Marshal(b, m, deterministic) +} +func (dst *Word) XXX_Merge(src proto.Message) { + xxx_messageInfo_Word.Merge(dst, src) +} +func (m *Word) XXX_Size() int { + return xxx_messageInfo_Word.Size(m) +} +func (m *Word) XXX_DiscardUnknown() { + xxx_messageInfo_Word.DiscardUnknown(m) +} + +var xxx_messageInfo_Word proto.InternalMessageInfo + +func (m *Word) GetProperty() *TextAnnotation_TextProperty { + if m != nil { + return m.Property + } + return nil +} + +func (m *Word) GetBoundingBox() *BoundingPoly { + if m != nil { + return m.BoundingBox + } + return nil +} + +func (m *Word) GetSymbols() []*Symbol { + if m != nil { + return m.Symbols + } + return nil +} + +func (m *Word) GetConfidence() float32 { + if m != nil { + return m.Confidence + } + return 0 +} + +// A single symbol representation. +type Symbol struct { + // Additional information detected for the symbol. + Property *TextAnnotation_TextProperty `protobuf:"bytes,1,opt,name=property,proto3" json:"property,omitempty"` + // The bounding box for the symbol. + // The vertices are in the order of top-left, top-right, bottom-right, + // bottom-left. When a rotation of the bounding box is detected the rotation + // is represented as around the top-left corner as defined when the text is + // read in the 'natural' orientation. + // For example: + // * when the text is horizontal it might look like: + // 0----1 + // | | + // 3----2 + // * when it's rotated 180 degrees around the top-left corner it becomes: + // 2----3 + // | | + // 1----0 + // and the vertice order will still be (0, 1, 2, 3). + BoundingBox *BoundingPoly `protobuf:"bytes,2,opt,name=bounding_box,json=boundingBox,proto3" json:"bounding_box,omitempty"` + // The actual UTF-8 representation of the symbol. + Text string `protobuf:"bytes,3,opt,name=text,proto3" json:"text,omitempty"` + // Confidence of the OCR results for the symbol. Range [0, 1]. + Confidence float32 `protobuf:"fixed32,4,opt,name=confidence,proto3" json:"confidence,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Symbol) Reset() { *m = Symbol{} } +func (m *Symbol) String() string { return proto.CompactTextString(m) } +func (*Symbol) ProtoMessage() {} +func (*Symbol) Descriptor() ([]byte, []int) { + return fileDescriptor_text_annotation_4efa847ad22b599e, []int{5} +} +func (m *Symbol) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Symbol.Unmarshal(m, b) +} +func (m *Symbol) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Symbol.Marshal(b, m, deterministic) +} +func (dst *Symbol) XXX_Merge(src proto.Message) { + xxx_messageInfo_Symbol.Merge(dst, src) +} +func (m *Symbol) XXX_Size() int { + return xxx_messageInfo_Symbol.Size(m) +} +func (m *Symbol) XXX_DiscardUnknown() { + xxx_messageInfo_Symbol.DiscardUnknown(m) +} + +var xxx_messageInfo_Symbol proto.InternalMessageInfo + +func (m *Symbol) GetProperty() *TextAnnotation_TextProperty { + if m != nil { + return m.Property + } + return nil +} + +func (m *Symbol) GetBoundingBox() *BoundingPoly { + if m != nil { + return m.BoundingBox + } + return nil +} + +func (m *Symbol) GetText() string { + if m != nil { + return m.Text + } + return "" +} + +func (m *Symbol) GetConfidence() float32 { + if m != nil { + return m.Confidence + } + return 0 +} + +func init() { + proto.RegisterType((*TextAnnotation)(nil), "google.cloud.vision.v1p4beta1.TextAnnotation") + proto.RegisterType((*TextAnnotation_DetectedLanguage)(nil), "google.cloud.vision.v1p4beta1.TextAnnotation.DetectedLanguage") + proto.RegisterType((*TextAnnotation_DetectedBreak)(nil), "google.cloud.vision.v1p4beta1.TextAnnotation.DetectedBreak") + proto.RegisterType((*TextAnnotation_TextProperty)(nil), "google.cloud.vision.v1p4beta1.TextAnnotation.TextProperty") + proto.RegisterType((*Page)(nil), "google.cloud.vision.v1p4beta1.Page") + proto.RegisterType((*Block)(nil), "google.cloud.vision.v1p4beta1.Block") + proto.RegisterType((*Paragraph)(nil), "google.cloud.vision.v1p4beta1.Paragraph") + proto.RegisterType((*Word)(nil), "google.cloud.vision.v1p4beta1.Word") + proto.RegisterType((*Symbol)(nil), "google.cloud.vision.v1p4beta1.Symbol") + proto.RegisterEnum("google.cloud.vision.v1p4beta1.TextAnnotation_DetectedBreak_BreakType", TextAnnotation_DetectedBreak_BreakType_name, TextAnnotation_DetectedBreak_BreakType_value) + proto.RegisterEnum("google.cloud.vision.v1p4beta1.Block_BlockType", Block_BlockType_name, Block_BlockType_value) +} + +func init() { + proto.RegisterFile("google/cloud/vision/v1p4beta1/text_annotation.proto", fileDescriptor_text_annotation_4efa847ad22b599e) +} + +var fileDescriptor_text_annotation_4efa847ad22b599e = []byte{ + // 785 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xd4, 0x56, 0xdd, 0x6a, 0xdb, 0x48, + 0x14, 0x5e, 0xd9, 0x92, 0x63, 0x1d, 0x27, 0x46, 0x3b, 0xbb, 0x2c, 0xc6, 0xfb, 0x43, 0xd6, 0xd9, + 0x85, 0xc0, 0x2e, 0x32, 0x71, 0xf6, 0x26, 0x9b, 0xd2, 0x62, 0x39, 0xa2, 0x09, 0x71, 0x1d, 0x31, + 0xb1, 0x93, 0xa6, 0x37, 0x42, 0x3f, 0x13, 0x59, 0xc4, 0xd6, 0x08, 0x49, 0x49, 0xec, 0x47, 0x28, + 0x85, 0xf6, 0x1d, 0xfa, 0x1c, 0x7d, 0x99, 0x42, 0xaf, 0x4a, 0x1f, 0xa0, 0x97, 0x45, 0x23, 0xc9, + 0xb1, 0x5d, 0x1a, 0xf5, 0x87, 0x5e, 0xe4, 0xc6, 0xcc, 0x39, 0x3e, 0xdf, 0x99, 0xf3, 0x7d, 0x67, + 0x8e, 0x66, 0x60, 0xdb, 0xa1, 0xd4, 0x19, 0x91, 0xa6, 0x35, 0xa2, 0x97, 0x76, 0xf3, 0xca, 0x0d, + 0x5d, 0xea, 0x35, 0xaf, 0xb6, 0xfc, 0xff, 0x4c, 0x12, 0x19, 0x5b, 0xcd, 0x88, 0x4c, 0x22, 0xdd, + 0xf0, 0x3c, 0x1a, 0x19, 0x91, 0x4b, 0x3d, 0xd9, 0x0f, 0x68, 0x44, 0xd1, 0xef, 0x09, 0x48, 0x66, + 0x20, 0x39, 0x01, 0xc9, 0x33, 0x50, 0xfd, 0xb7, 0x34, 0xa7, 0xe1, 0xbb, 0xcd, 0x1b, 0x6c, 0x98, + 0x80, 0xeb, 0xff, 0xde, 0xbe, 0xa3, 0x43, 0xe8, 0x98, 0x44, 0xc1, 0x34, 0x89, 0x6e, 0x3c, 0x13, + 0xa0, 0xda, 0x27, 0x93, 0xa8, 0x3d, 0xcb, 0x83, 0x76, 0x40, 0xf0, 0x0d, 0x87, 0x84, 0x35, 0x6e, + 0xbd, 0xb8, 0x59, 0x69, 0x6d, 0xc8, 0xb7, 0x56, 0x23, 0x6b, 0x86, 0x43, 0x70, 0x82, 0x40, 0x08, + 0xf8, 0x98, 0x51, 0xad, 0xb0, 0xce, 0x6d, 0x8a, 0x98, 0xad, 0xeb, 0xa7, 0x20, 0xed, 0x91, 0x88, + 0x58, 0x11, 0xb1, 0xbb, 0x86, 0xe7, 0x5c, 0x1a, 0x0e, 0x41, 0x1b, 0xb0, 0x36, 0x4a, 0xd7, 0xba, + 0x45, 0x6d, 0x52, 0xe3, 0x18, 0x60, 0x35, 0x73, 0x76, 0xa8, 0x4d, 0xd0, 0x1f, 0x00, 0x16, 0xf5, + 0xce, 0x5d, 0x9b, 0x78, 0x16, 0x61, 0x29, 0x0b, 0x78, 0xce, 0x53, 0x7f, 0xc7, 0xc1, 0x5a, 0x96, + 0x59, 0x09, 0x88, 0x71, 0x81, 0xce, 0x80, 0x8f, 0xa6, 0x7e, 0x92, 0xad, 0xda, 0x52, 0x73, 0x0a, + 0x5f, 0xa4, 0x2d, 0x2f, 0xa4, 0x92, 0xd9, 0x6f, 0x7f, 0xea, 0x13, 0xcc, 0x52, 0xa2, 0x5f, 0x41, + 0x74, 0x43, 0xdd, 0x0f, 0xc8, 0xb9, 0x3b, 0x61, 0xb5, 0x94, 0x71, 0xd9, 0x0d, 0x35, 0x66, 0x37, + 0x2c, 0x10, 0x67, 0xf1, 0xa8, 0x02, 0x2b, 0x83, 0xde, 0x61, 0xef, 0xe8, 0xb4, 0x27, 0xfd, 0x80, + 0x44, 0x10, 0x8e, 0xb5, 0x76, 0x47, 0x95, 0x38, 0x54, 0x05, 0x38, 0x1e, 0x60, 0x55, 0x4f, 0xec, + 0x02, 0x42, 0x50, 0x55, 0x8f, 0xba, 0xfa, 0x9c, 0xaf, 0x88, 0x00, 0x4a, 0xfb, 0x67, 0xda, 0xbe, + 0xda, 0x93, 0xf8, 0x38, 0xbe, 0x7b, 0xd0, 0x53, 0x75, 0x05, 0xab, 0xed, 0x43, 0x49, 0xa8, 0xbf, + 0xe6, 0x60, 0x35, 0x2e, 0x59, 0x0b, 0xa8, 0x4f, 0x82, 0x68, 0x8a, 0xc6, 0x80, 0xec, 0xb4, 0x66, + 0x3d, 0x13, 0x2e, 0x6b, 0xda, 0xfd, 0xaf, 0xe3, 0x9e, 0x35, 0x08, 0xff, 0x68, 0x2f, 0x79, 0x42, + 0x64, 0x42, 0x75, 0xb6, 0x9d, 0x19, 0xb3, 0x65, 0x32, 0x54, 0x5a, 0xbb, 0xdf, 0x20, 0x33, 0x5e, + 0xb3, 0xe7, 0xcd, 0xc6, 0x5b, 0x0e, 0xf8, 0xf8, 0x3c, 0xa1, 0x13, 0x28, 0xfb, 0x29, 0x4f, 0xd6, + 0xcd, 0x4a, 0xeb, 0xff, 0x2f, 0xdb, 0x66, 0x5e, 0x29, 0x3c, 0xcb, 0x85, 0x7e, 0x06, 0xe1, 0xda, + 0xb5, 0xa3, 0x21, 0xab, 0x5d, 0xc0, 0x89, 0x81, 0x7e, 0x81, 0xd2, 0x90, 0xb8, 0xce, 0x30, 0xaa, + 0x15, 0x99, 0x3b, 0xb5, 0xd0, 0x3d, 0x28, 0x99, 0x23, 0x6a, 0x5d, 0x84, 0x35, 0x9e, 0xa9, 0xfa, + 0x57, 0x4e, 0x0d, 0x4a, 0x1c, 0x8c, 0x53, 0xcc, 0xd2, 0xf9, 0x15, 0x96, 0xcf, 0x6f, 0xe3, 0x55, + 0x11, 0x04, 0x86, 0xf8, 0x6e, 0x6c, 0x7b, 0xb0, 0x6a, 0xd2, 0x4b, 0xcf, 0x76, 0x3d, 0x47, 0x37, + 0xe9, 0x24, 0x6d, 0xd8, 0x3f, 0x79, 0x2c, 0x52, 0x88, 0x46, 0x47, 0x53, 0x5c, 0xc9, 0x12, 0x28, + 0x74, 0x82, 0xf6, 0x01, 0x7c, 0x23, 0x30, 0x9c, 0xc0, 0xf0, 0x87, 0x61, 0xad, 0xc8, 0x34, 0xd9, + 0xcc, 0xfd, 0x3c, 0xa4, 0x00, 0x3c, 0x87, 0x45, 0x8f, 0x00, 0x98, 0x4a, 0x3a, 0x9b, 0x57, 0x9e, + 0xcd, 0xab, 0xfc, 0x39, 0xea, 0x26, 0xbf, 0x6c, 0x30, 0x45, 0x33, 0x5b, 0xe6, 0x4a, 0x8d, 0x41, + 0x9c, 0xe1, 0x16, 0x07, 0xb4, 0x0c, 0x7c, 0x5f, 0x7d, 0xdc, 0x97, 0xb8, 0x78, 0x54, 0xfb, 0x6d, + 0xa5, 0x1b, 0x8f, 0x66, 0x05, 0x56, 0xb4, 0x83, 0x4e, 0x7f, 0x80, 0xe3, 0x99, 0x14, 0x41, 0xc0, + 0x83, 0xae, 0x8a, 0x25, 0x3e, 0xf6, 0x2b, 0x6d, 0xdc, 0x39, 0xda, 0x53, 0x25, 0xa1, 0xf1, 0xbc, + 0x00, 0xe2, 0x8c, 0xdc, 0x9d, 0x69, 0xe1, 0x0e, 0x08, 0xd7, 0x34, 0xb0, 0xb3, 0xee, 0xe5, 0x7d, + 0xdc, 0x4f, 0x69, 0x60, 0xe3, 0x04, 0xb1, 0x24, 0x32, 0xff, 0x91, 0xc8, 0x2f, 0x0a, 0xc0, 0xc7, + 0xf1, 0x77, 0x46, 0x8b, 0x07, 0xb0, 0x12, 0x4e, 0xc7, 0x26, 0x1d, 0x65, 0x6a, 0xfc, 0x9d, 0x93, + 0xea, 0x98, 0x45, 0xe3, 0x0c, 0x95, 0xab, 0xc8, 0x1b, 0x0e, 0x4a, 0x09, 0xe6, 0xce, 0x68, 0x92, + 0xdd, 0xe0, 0xc5, 0x9b, 0x1b, 0x3c, 0x8f, 0xa6, 0xf2, 0x94, 0x83, 0x3f, 0x2d, 0x3a, 0xbe, 0x7d, + 0x4f, 0xe5, 0xa7, 0x45, 0x42, 0x5a, 0xfc, 0xfc, 0xd0, 0xb8, 0x27, 0x9d, 0x14, 0xe5, 0xd0, 0xf8, + 0x0e, 0x93, 0x69, 0xe0, 0x34, 0x1d, 0xe2, 0xb1, 0xc7, 0x49, 0x33, 0xf9, 0xcb, 0xf0, 0xdd, 0xf0, + 0x13, 0xaf, 0x99, 0xdd, 0xc4, 0xf1, 0x9e, 0xe3, 0x5e, 0x16, 0xf8, 0x87, 0x9d, 0x93, 0x9e, 0x59, + 0x62, 0xc8, 0xed, 0x0f, 0x01, 0x00, 0x00, 0xff, 0xff, 0xd3, 0x30, 0xc7, 0x3a, 0x78, 0x09, 0x00, + 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/cloud/vision/v1p4beta1/web_detection.pb.go b/vendor/google.golang.org/genproto/googleapis/cloud/vision/v1p4beta1/web_detection.pb.go new file mode 100644 index 0000000..8bf0f72 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/cloud/vision/v1p4beta1/web_detection.pb.go @@ -0,0 +1,397 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/cloud/vision/v1p4beta1/web_detection.proto + +package vision // import "google.golang.org/genproto/googleapis/cloud/vision/v1p4beta1" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Relevant information for the image from the Internet. +type WebDetection struct { + // Deduced entities from similar images on the Internet. + WebEntities []*WebDetection_WebEntity `protobuf:"bytes,1,rep,name=web_entities,json=webEntities,proto3" json:"web_entities,omitempty"` + // Fully matching images from the Internet. + // Can include resized copies of the query image. + FullMatchingImages []*WebDetection_WebImage `protobuf:"bytes,2,rep,name=full_matching_images,json=fullMatchingImages,proto3" json:"full_matching_images,omitempty"` + // Partial matching images from the Internet. + // Those images are similar enough to share some key-point features. For + // example an original image will likely have partial matching for its crops. + PartialMatchingImages []*WebDetection_WebImage `protobuf:"bytes,3,rep,name=partial_matching_images,json=partialMatchingImages,proto3" json:"partial_matching_images,omitempty"` + // Web pages containing the matching images from the Internet. + PagesWithMatchingImages []*WebDetection_WebPage `protobuf:"bytes,4,rep,name=pages_with_matching_images,json=pagesWithMatchingImages,proto3" json:"pages_with_matching_images,omitempty"` + // The visually similar image results. + VisuallySimilarImages []*WebDetection_WebImage `protobuf:"bytes,6,rep,name=visually_similar_images,json=visuallySimilarImages,proto3" json:"visually_similar_images,omitempty"` + // The service's best guess as to the topic of the request image. + // Inferred from similar images on the open web. + BestGuessLabels []*WebDetection_WebLabel `protobuf:"bytes,8,rep,name=best_guess_labels,json=bestGuessLabels,proto3" json:"best_guess_labels,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *WebDetection) Reset() { *m = WebDetection{} } +func (m *WebDetection) String() string { return proto.CompactTextString(m) } +func (*WebDetection) ProtoMessage() {} +func (*WebDetection) Descriptor() ([]byte, []int) { + return fileDescriptor_web_detection_12add5f6a37c3a06, []int{0} +} +func (m *WebDetection) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_WebDetection.Unmarshal(m, b) +} +func (m *WebDetection) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_WebDetection.Marshal(b, m, deterministic) +} +func (dst *WebDetection) XXX_Merge(src proto.Message) { + xxx_messageInfo_WebDetection.Merge(dst, src) +} +func (m *WebDetection) XXX_Size() int { + return xxx_messageInfo_WebDetection.Size(m) +} +func (m *WebDetection) XXX_DiscardUnknown() { + xxx_messageInfo_WebDetection.DiscardUnknown(m) +} + +var xxx_messageInfo_WebDetection proto.InternalMessageInfo + +func (m *WebDetection) GetWebEntities() []*WebDetection_WebEntity { + if m != nil { + return m.WebEntities + } + return nil +} + +func (m *WebDetection) GetFullMatchingImages() []*WebDetection_WebImage { + if m != nil { + return m.FullMatchingImages + } + return nil +} + +func (m *WebDetection) GetPartialMatchingImages() []*WebDetection_WebImage { + if m != nil { + return m.PartialMatchingImages + } + return nil +} + +func (m *WebDetection) GetPagesWithMatchingImages() []*WebDetection_WebPage { + if m != nil { + return m.PagesWithMatchingImages + } + return nil +} + +func (m *WebDetection) GetVisuallySimilarImages() []*WebDetection_WebImage { + if m != nil { + return m.VisuallySimilarImages + } + return nil +} + +func (m *WebDetection) GetBestGuessLabels() []*WebDetection_WebLabel { + if m != nil { + return m.BestGuessLabels + } + return nil +} + +// Entity deduced from similar images on the Internet. +type WebDetection_WebEntity struct { + // Opaque entity ID. + EntityId string `protobuf:"bytes,1,opt,name=entity_id,json=entityId,proto3" json:"entity_id,omitempty"` + // Overall relevancy score for the entity. + // Not normalized and not comparable across different image queries. + Score float32 `protobuf:"fixed32,2,opt,name=score,proto3" json:"score,omitempty"` + // Canonical description of the entity, in English. + Description string `protobuf:"bytes,3,opt,name=description,proto3" json:"description,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *WebDetection_WebEntity) Reset() { *m = WebDetection_WebEntity{} } +func (m *WebDetection_WebEntity) String() string { return proto.CompactTextString(m) } +func (*WebDetection_WebEntity) ProtoMessage() {} +func (*WebDetection_WebEntity) Descriptor() ([]byte, []int) { + return fileDescriptor_web_detection_12add5f6a37c3a06, []int{0, 0} +} +func (m *WebDetection_WebEntity) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_WebDetection_WebEntity.Unmarshal(m, b) +} +func (m *WebDetection_WebEntity) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_WebDetection_WebEntity.Marshal(b, m, deterministic) +} +func (dst *WebDetection_WebEntity) XXX_Merge(src proto.Message) { + xxx_messageInfo_WebDetection_WebEntity.Merge(dst, src) +} +func (m *WebDetection_WebEntity) XXX_Size() int { + return xxx_messageInfo_WebDetection_WebEntity.Size(m) +} +func (m *WebDetection_WebEntity) XXX_DiscardUnknown() { + xxx_messageInfo_WebDetection_WebEntity.DiscardUnknown(m) +} + +var xxx_messageInfo_WebDetection_WebEntity proto.InternalMessageInfo + +func (m *WebDetection_WebEntity) GetEntityId() string { + if m != nil { + return m.EntityId + } + return "" +} + +func (m *WebDetection_WebEntity) GetScore() float32 { + if m != nil { + return m.Score + } + return 0 +} + +func (m *WebDetection_WebEntity) GetDescription() string { + if m != nil { + return m.Description + } + return "" +} + +// Metadata for online images. +type WebDetection_WebImage struct { + // The result image URL. + Url string `protobuf:"bytes,1,opt,name=url,proto3" json:"url,omitempty"` + // (Deprecated) Overall relevancy score for the image. + Score float32 `protobuf:"fixed32,2,opt,name=score,proto3" json:"score,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *WebDetection_WebImage) Reset() { *m = WebDetection_WebImage{} } +func (m *WebDetection_WebImage) String() string { return proto.CompactTextString(m) } +func (*WebDetection_WebImage) ProtoMessage() {} +func (*WebDetection_WebImage) Descriptor() ([]byte, []int) { + return fileDescriptor_web_detection_12add5f6a37c3a06, []int{0, 1} +} +func (m *WebDetection_WebImage) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_WebDetection_WebImage.Unmarshal(m, b) +} +func (m *WebDetection_WebImage) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_WebDetection_WebImage.Marshal(b, m, deterministic) +} +func (dst *WebDetection_WebImage) XXX_Merge(src proto.Message) { + xxx_messageInfo_WebDetection_WebImage.Merge(dst, src) +} +func (m *WebDetection_WebImage) XXX_Size() int { + return xxx_messageInfo_WebDetection_WebImage.Size(m) +} +func (m *WebDetection_WebImage) XXX_DiscardUnknown() { + xxx_messageInfo_WebDetection_WebImage.DiscardUnknown(m) +} + +var xxx_messageInfo_WebDetection_WebImage proto.InternalMessageInfo + +func (m *WebDetection_WebImage) GetUrl() string { + if m != nil { + return m.Url + } + return "" +} + +func (m *WebDetection_WebImage) GetScore() float32 { + if m != nil { + return m.Score + } + return 0 +} + +// Label to provide extra metadata for the web detection. +type WebDetection_WebLabel struct { + // Label for extra metadata. + Label string `protobuf:"bytes,1,opt,name=label,proto3" json:"label,omitempty"` + // The BCP-47 language code for `label`, such as "en-US" or "sr-Latn". + // For more information, see + // http://www.unicode.org/reports/tr35/#Unicode_locale_identifier. + LanguageCode string `protobuf:"bytes,2,opt,name=language_code,json=languageCode,proto3" json:"language_code,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *WebDetection_WebLabel) Reset() { *m = WebDetection_WebLabel{} } +func (m *WebDetection_WebLabel) String() string { return proto.CompactTextString(m) } +func (*WebDetection_WebLabel) ProtoMessage() {} +func (*WebDetection_WebLabel) Descriptor() ([]byte, []int) { + return fileDescriptor_web_detection_12add5f6a37c3a06, []int{0, 2} +} +func (m *WebDetection_WebLabel) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_WebDetection_WebLabel.Unmarshal(m, b) +} +func (m *WebDetection_WebLabel) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_WebDetection_WebLabel.Marshal(b, m, deterministic) +} +func (dst *WebDetection_WebLabel) XXX_Merge(src proto.Message) { + xxx_messageInfo_WebDetection_WebLabel.Merge(dst, src) +} +func (m *WebDetection_WebLabel) XXX_Size() int { + return xxx_messageInfo_WebDetection_WebLabel.Size(m) +} +func (m *WebDetection_WebLabel) XXX_DiscardUnknown() { + xxx_messageInfo_WebDetection_WebLabel.DiscardUnknown(m) +} + +var xxx_messageInfo_WebDetection_WebLabel proto.InternalMessageInfo + +func (m *WebDetection_WebLabel) GetLabel() string { + if m != nil { + return m.Label + } + return "" +} + +func (m *WebDetection_WebLabel) GetLanguageCode() string { + if m != nil { + return m.LanguageCode + } + return "" +} + +// Metadata for web pages. +type WebDetection_WebPage struct { + // The result web page URL. + Url string `protobuf:"bytes,1,opt,name=url,proto3" json:"url,omitempty"` + // (Deprecated) Overall relevancy score for the web page. + Score float32 `protobuf:"fixed32,2,opt,name=score,proto3" json:"score,omitempty"` + // Title for the web page, may contain HTML markups. + PageTitle string `protobuf:"bytes,3,opt,name=page_title,json=pageTitle,proto3" json:"page_title,omitempty"` + // Fully matching images on the page. + // Can include resized copies of the query image. + FullMatchingImages []*WebDetection_WebImage `protobuf:"bytes,4,rep,name=full_matching_images,json=fullMatchingImages,proto3" json:"full_matching_images,omitempty"` + // Partial matching images on the page. + // Those images are similar enough to share some key-point features. For + // example an original image will likely have partial matching for its + // crops. + PartialMatchingImages []*WebDetection_WebImage `protobuf:"bytes,5,rep,name=partial_matching_images,json=partialMatchingImages,proto3" json:"partial_matching_images,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *WebDetection_WebPage) Reset() { *m = WebDetection_WebPage{} } +func (m *WebDetection_WebPage) String() string { return proto.CompactTextString(m) } +func (*WebDetection_WebPage) ProtoMessage() {} +func (*WebDetection_WebPage) Descriptor() ([]byte, []int) { + return fileDescriptor_web_detection_12add5f6a37c3a06, []int{0, 3} +} +func (m *WebDetection_WebPage) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_WebDetection_WebPage.Unmarshal(m, b) +} +func (m *WebDetection_WebPage) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_WebDetection_WebPage.Marshal(b, m, deterministic) +} +func (dst *WebDetection_WebPage) XXX_Merge(src proto.Message) { + xxx_messageInfo_WebDetection_WebPage.Merge(dst, src) +} +func (m *WebDetection_WebPage) XXX_Size() int { + return xxx_messageInfo_WebDetection_WebPage.Size(m) +} +func (m *WebDetection_WebPage) XXX_DiscardUnknown() { + xxx_messageInfo_WebDetection_WebPage.DiscardUnknown(m) +} + +var xxx_messageInfo_WebDetection_WebPage proto.InternalMessageInfo + +func (m *WebDetection_WebPage) GetUrl() string { + if m != nil { + return m.Url + } + return "" +} + +func (m *WebDetection_WebPage) GetScore() float32 { + if m != nil { + return m.Score + } + return 0 +} + +func (m *WebDetection_WebPage) GetPageTitle() string { + if m != nil { + return m.PageTitle + } + return "" +} + +func (m *WebDetection_WebPage) GetFullMatchingImages() []*WebDetection_WebImage { + if m != nil { + return m.FullMatchingImages + } + return nil +} + +func (m *WebDetection_WebPage) GetPartialMatchingImages() []*WebDetection_WebImage { + if m != nil { + return m.PartialMatchingImages + } + return nil +} + +func init() { + proto.RegisterType((*WebDetection)(nil), "google.cloud.vision.v1p4beta1.WebDetection") + proto.RegisterType((*WebDetection_WebEntity)(nil), "google.cloud.vision.v1p4beta1.WebDetection.WebEntity") + proto.RegisterType((*WebDetection_WebImage)(nil), "google.cloud.vision.v1p4beta1.WebDetection.WebImage") + proto.RegisterType((*WebDetection_WebLabel)(nil), "google.cloud.vision.v1p4beta1.WebDetection.WebLabel") + proto.RegisterType((*WebDetection_WebPage)(nil), "google.cloud.vision.v1p4beta1.WebDetection.WebPage") +} + +func init() { + proto.RegisterFile("google/cloud/vision/v1p4beta1/web_detection.proto", fileDescriptor_web_detection_12add5f6a37c3a06) +} + +var fileDescriptor_web_detection_12add5f6a37c3a06 = []byte{ + // 517 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xbc, 0x94, 0x4d, 0x6f, 0xd3, 0x30, + 0x18, 0xc7, 0x95, 0xb4, 0x1b, 0x8d, 0x5b, 0x04, 0xb3, 0x86, 0x16, 0x05, 0x26, 0x15, 0xb8, 0xf4, + 0x94, 0xa8, 0xdb, 0x38, 0x71, 0x5b, 0x99, 0xa6, 0x49, 0x80, 0xaa, 0x80, 0x18, 0xe2, 0xe2, 0x39, + 0x89, 0xe7, 0x5a, 0x72, 0xe3, 0x28, 0x76, 0x5a, 0xf5, 0x0b, 0xc0, 0xf7, 0xe0, 0xc0, 0x67, 0xe4, + 0x88, 0xfc, 0x92, 0xa9, 0xa2, 0x6c, 0x62, 0x0c, 0xed, 0xe6, 0xe7, 0x69, 0xff, 0xbf, 0x5f, 0x9f, + 0xfa, 0x05, 0x8c, 0xa9, 0x10, 0x94, 0x93, 0x24, 0xe7, 0xa2, 0x29, 0x92, 0x05, 0x93, 0x4c, 0x94, + 0xc9, 0x62, 0x5c, 0x1d, 0x65, 0x44, 0xe1, 0x71, 0xb2, 0x24, 0x19, 0x2a, 0x88, 0x22, 0xb9, 0x62, + 0xa2, 0x8c, 0xab, 0x5a, 0x28, 0x01, 0xf7, 0x6d, 0x24, 0x36, 0x91, 0xd8, 0x46, 0xe2, 0xab, 0x48, + 0xf4, 0xcc, 0x11, 0x71, 0xc5, 0x12, 0x5c, 0x96, 0x42, 0x61, 0x9d, 0x95, 0x36, 0xfc, 0xe2, 0x6b, + 0x00, 0x06, 0xe7, 0x24, 0x7b, 0xd3, 0x32, 0xe1, 0x67, 0x30, 0xd0, 0x12, 0x52, 0x2a, 0xa6, 0x18, + 0x91, 0xa1, 0x37, 0xec, 0x8c, 0xfa, 0x07, 0xaf, 0xe2, 0x1b, 0x25, 0xf1, 0x3a, 0x42, 0x17, 0x27, + 0x3a, 0xbe, 0x4a, 0xfb, 0x4b, 0xb7, 0x64, 0x44, 0xc2, 0x4b, 0xb0, 0x7b, 0xd9, 0x70, 0x8e, 0xe6, + 0x58, 0xe5, 0x33, 0x56, 0x52, 0xc4, 0xe6, 0x98, 0x12, 0x19, 0xfa, 0xc6, 0x70, 0x74, 0x4b, 0xc3, + 0x99, 0x0e, 0xa7, 0x50, 0x13, 0xdf, 0x39, 0xa0, 0x69, 0x49, 0xc8, 0xc1, 0x5e, 0x85, 0x6b, 0xc5, + 0xf0, 0xa6, 0xaa, 0x73, 0x07, 0xd5, 0x13, 0x07, 0xfd, 0xcd, 0x56, 0x81, 0xa8, 0xd2, 0x0b, 0xb4, + 0x64, 0x6a, 0xb6, 0x21, 0xec, 0x1a, 0xe1, 0xe1, 0x2d, 0x85, 0x53, 0xed, 0xdb, 0x33, 0xd8, 0x73, + 0xa6, 0x66, 0x9b, 0xf3, 0x2d, 0x98, 0x6c, 0x30, 0xe7, 0x2b, 0x24, 0xd9, 0x9c, 0x71, 0x5c, 0xb7, + 0xba, 0xed, 0xbb, 0xcc, 0xd7, 0x42, 0x3f, 0x58, 0xa6, 0xb3, 0x5d, 0x80, 0x9d, 0x8c, 0x48, 0x85, + 0x68, 0x43, 0xa4, 0x44, 0x1c, 0x67, 0x84, 0xcb, 0xb0, 0xf7, 0x4f, 0x9e, 0xb7, 0x3a, 0x9c, 0x3e, + 0xd2, 0xb8, 0x53, 0x4d, 0x33, 0xb5, 0x8c, 0x2e, 0x40, 0x70, 0x75, 0x62, 0xe0, 0x53, 0x10, 0x98, + 0xa3, 0xb7, 0x42, 0xac, 0x08, 0xbd, 0xa1, 0x37, 0x0a, 0xd2, 0x9e, 0x6d, 0x9c, 0x15, 0x70, 0x17, + 0x6c, 0xc9, 0x5c, 0xd4, 0x24, 0xf4, 0x87, 0xde, 0xc8, 0x4f, 0x6d, 0x01, 0x87, 0xa0, 0x5f, 0x10, + 0x99, 0xd7, 0xac, 0xd2, 0xa2, 0xb0, 0x63, 0x42, 0xeb, 0xad, 0xe8, 0x00, 0xf4, 0xda, 0x31, 0xe1, + 0x63, 0xd0, 0x69, 0x6a, 0xee, 0xd0, 0x7a, 0xf9, 0x67, 0x6a, 0x74, 0x62, 0x32, 0xe6, 0x27, 0xea, + 0x6f, 0x98, 0xc1, 0x5d, 0xca, 0x16, 0xf0, 0x25, 0x78, 0xc8, 0x71, 0x49, 0x1b, 0x4c, 0x09, 0xca, + 0x45, 0x61, 0xf3, 0x41, 0x3a, 0x68, 0x9b, 0x13, 0x51, 0x90, 0xe8, 0x87, 0x0f, 0x1e, 0xb8, 0x1d, + 0xfd, 0x5b, 0x35, 0xdc, 0x07, 0x40, 0xef, 0x3d, 0x52, 0x4c, 0x71, 0xe2, 0xe6, 0x09, 0x74, 0xe7, + 0xa3, 0x6e, 0x5c, 0x7b, 0x8f, 0xba, 0xf7, 0x77, 0x8f, 0xb6, 0xfe, 0xfb, 0x3d, 0x3a, 0xfe, 0xe6, + 0x81, 0xe7, 0xb9, 0x98, 0xdf, 0x8c, 0x3c, 0xde, 0x59, 0x67, 0x4e, 0xf5, 0x0b, 0x36, 0xf5, 0xbe, + 0x4c, 0x5c, 0x86, 0x0a, 0xfd, 0xd7, 0xc7, 0xa2, 0xa6, 0x09, 0x25, 0xa5, 0x79, 0xdf, 0x12, 0xfb, + 0x11, 0xae, 0x98, 0xbc, 0xe6, 0x49, 0x7d, 0x6d, 0x1b, 0x3f, 0x3d, 0xef, 0xbb, 0xdf, 0x3d, 0x9d, + 0x7c, 0x7a, 0x9f, 0x6d, 0x9b, 0xe4, 0xe1, 0xaf, 0x00, 0x00, 0x00, 0xff, 0xff, 0x45, 0xda, 0x7b, + 0x3e, 0x8b, 0x05, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/cloud/webrisk/v1beta1/webrisk.pb.go b/vendor/google.golang.org/genproto/googleapis/cloud/webrisk/v1beta1/webrisk.pb.go new file mode 100644 index 0000000..0f58b7d --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/cloud/webrisk/v1beta1/webrisk.pb.go @@ -0,0 +1,1222 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/cloud/webrisk/v1beta1/webrisk.proto + +package webrisk // import "google.golang.org/genproto/googleapis/cloud/webrisk/v1beta1" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import timestamp "github.com/golang/protobuf/ptypes/timestamp" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +import ( + context "golang.org/x/net/context" + grpc "google.golang.org/grpc" +) + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// The type of threat. This maps dirrectly to the threat list a threat may +// belong to. +type ThreatType int32 + +const ( + // Unknown. + ThreatType_THREAT_TYPE_UNSPECIFIED ThreatType = 0 + // Malware targeting any platform. + ThreatType_MALWARE ThreatType = 1 + // Social engineering targeting any platform. + ThreatType_SOCIAL_ENGINEERING ThreatType = 2 + // Unwanted software targeting any platform. + ThreatType_UNWANTED_SOFTWARE ThreatType = 3 +) + +var ThreatType_name = map[int32]string{ + 0: "THREAT_TYPE_UNSPECIFIED", + 1: "MALWARE", + 2: "SOCIAL_ENGINEERING", + 3: "UNWANTED_SOFTWARE", +} +var ThreatType_value = map[string]int32{ + "THREAT_TYPE_UNSPECIFIED": 0, + "MALWARE": 1, + "SOCIAL_ENGINEERING": 2, + "UNWANTED_SOFTWARE": 3, +} + +func (x ThreatType) String() string { + return proto.EnumName(ThreatType_name, int32(x)) +} +func (ThreatType) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_webrisk_6b128999d2c68f99, []int{0} +} + +// The ways in which threat entry sets can be compressed. +type CompressionType int32 + +const ( + // Unknown. + CompressionType_COMPRESSION_TYPE_UNSPECIFIED CompressionType = 0 + // Raw, uncompressed data. + CompressionType_RAW CompressionType = 1 + // Rice-Golomb encoded data. + CompressionType_RICE CompressionType = 2 +) + +var CompressionType_name = map[int32]string{ + 0: "COMPRESSION_TYPE_UNSPECIFIED", + 1: "RAW", + 2: "RICE", +} +var CompressionType_value = map[string]int32{ + "COMPRESSION_TYPE_UNSPECIFIED": 0, + "RAW": 1, + "RICE": 2, +} + +func (x CompressionType) String() string { + return proto.EnumName(CompressionType_name, int32(x)) +} +func (CompressionType) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_webrisk_6b128999d2c68f99, []int{1} +} + +// The type of response sent to the client. +type ComputeThreatListDiffResponse_ResponseType int32 + +const ( + // Unknown. + ComputeThreatListDiffResponse_RESPONSE_TYPE_UNSPECIFIED ComputeThreatListDiffResponse_ResponseType = 0 + // Partial updates are applied to the client's existing local database. + ComputeThreatListDiffResponse_DIFF ComputeThreatListDiffResponse_ResponseType = 1 + // Full updates resets the client's entire local database. This means + // that either the client had no state, was seriously out-of-date, + // or the client is believed to be corrupt. + ComputeThreatListDiffResponse_RESET ComputeThreatListDiffResponse_ResponseType = 2 +) + +var ComputeThreatListDiffResponse_ResponseType_name = map[int32]string{ + 0: "RESPONSE_TYPE_UNSPECIFIED", + 1: "DIFF", + 2: "RESET", +} +var ComputeThreatListDiffResponse_ResponseType_value = map[string]int32{ + "RESPONSE_TYPE_UNSPECIFIED": 0, + "DIFF": 1, + "RESET": 2, +} + +func (x ComputeThreatListDiffResponse_ResponseType) String() string { + return proto.EnumName(ComputeThreatListDiffResponse_ResponseType_name, int32(x)) +} +func (ComputeThreatListDiffResponse_ResponseType) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_webrisk_6b128999d2c68f99, []int{1, 0} +} + +// Describes an API diff request. +type ComputeThreatListDiffRequest struct { + // Required. The ThreatList to update. + ThreatType ThreatType `protobuf:"varint,1,opt,name=threat_type,json=threatType,proto3,enum=google.cloud.webrisk.v1beta1.ThreatType" json:"threat_type,omitempty"` + // The current version token of the client for the requested list (the + // client version that was received from the last successful diff). + VersionToken []byte `protobuf:"bytes,2,opt,name=version_token,json=versionToken,proto3" json:"version_token,omitempty"` + // The constraints associated with this request. + Constraints *ComputeThreatListDiffRequest_Constraints `protobuf:"bytes,3,opt,name=constraints,proto3" json:"constraints,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ComputeThreatListDiffRequest) Reset() { *m = ComputeThreatListDiffRequest{} } +func (m *ComputeThreatListDiffRequest) String() string { return proto.CompactTextString(m) } +func (*ComputeThreatListDiffRequest) ProtoMessage() {} +func (*ComputeThreatListDiffRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_webrisk_6b128999d2c68f99, []int{0} +} +func (m *ComputeThreatListDiffRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ComputeThreatListDiffRequest.Unmarshal(m, b) +} +func (m *ComputeThreatListDiffRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ComputeThreatListDiffRequest.Marshal(b, m, deterministic) +} +func (dst *ComputeThreatListDiffRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_ComputeThreatListDiffRequest.Merge(dst, src) +} +func (m *ComputeThreatListDiffRequest) XXX_Size() int { + return xxx_messageInfo_ComputeThreatListDiffRequest.Size(m) +} +func (m *ComputeThreatListDiffRequest) XXX_DiscardUnknown() { + xxx_messageInfo_ComputeThreatListDiffRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_ComputeThreatListDiffRequest proto.InternalMessageInfo + +func (m *ComputeThreatListDiffRequest) GetThreatType() ThreatType { + if m != nil { + return m.ThreatType + } + return ThreatType_THREAT_TYPE_UNSPECIFIED +} + +func (m *ComputeThreatListDiffRequest) GetVersionToken() []byte { + if m != nil { + return m.VersionToken + } + return nil +} + +func (m *ComputeThreatListDiffRequest) GetConstraints() *ComputeThreatListDiffRequest_Constraints { + if m != nil { + return m.Constraints + } + return nil +} + +// The constraints for this diff. +type ComputeThreatListDiffRequest_Constraints struct { + // The maximum size in number of entries. The diff will not contain more + // entries than this value. This should be a power of 2 between 2**10 and + // 2**20. If zero, no diff size limit is set. + MaxDiffEntries int32 `protobuf:"varint,1,opt,name=max_diff_entries,json=maxDiffEntries,proto3" json:"max_diff_entries,omitempty"` + // Sets the maximum number of entries that the client is willing to have + // in the local database. This should be a power of 2 between 2**10 and + // 2**20. If zero, no database size limit is set. + MaxDatabaseEntries int32 `protobuf:"varint,2,opt,name=max_database_entries,json=maxDatabaseEntries,proto3" json:"max_database_entries,omitempty"` + // The compression types supported by the client. + SupportedCompressions []CompressionType `protobuf:"varint,3,rep,packed,name=supported_compressions,json=supportedCompressions,proto3,enum=google.cloud.webrisk.v1beta1.CompressionType" json:"supported_compressions,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ComputeThreatListDiffRequest_Constraints) Reset() { + *m = ComputeThreatListDiffRequest_Constraints{} +} +func (m *ComputeThreatListDiffRequest_Constraints) String() string { return proto.CompactTextString(m) } +func (*ComputeThreatListDiffRequest_Constraints) ProtoMessage() {} +func (*ComputeThreatListDiffRequest_Constraints) Descriptor() ([]byte, []int) { + return fileDescriptor_webrisk_6b128999d2c68f99, []int{0, 0} +} +func (m *ComputeThreatListDiffRequest_Constraints) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ComputeThreatListDiffRequest_Constraints.Unmarshal(m, b) +} +func (m *ComputeThreatListDiffRequest_Constraints) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ComputeThreatListDiffRequest_Constraints.Marshal(b, m, deterministic) +} +func (dst *ComputeThreatListDiffRequest_Constraints) XXX_Merge(src proto.Message) { + xxx_messageInfo_ComputeThreatListDiffRequest_Constraints.Merge(dst, src) +} +func (m *ComputeThreatListDiffRequest_Constraints) XXX_Size() int { + return xxx_messageInfo_ComputeThreatListDiffRequest_Constraints.Size(m) +} +func (m *ComputeThreatListDiffRequest_Constraints) XXX_DiscardUnknown() { + xxx_messageInfo_ComputeThreatListDiffRequest_Constraints.DiscardUnknown(m) +} + +var xxx_messageInfo_ComputeThreatListDiffRequest_Constraints proto.InternalMessageInfo + +func (m *ComputeThreatListDiffRequest_Constraints) GetMaxDiffEntries() int32 { + if m != nil { + return m.MaxDiffEntries + } + return 0 +} + +func (m *ComputeThreatListDiffRequest_Constraints) GetMaxDatabaseEntries() int32 { + if m != nil { + return m.MaxDatabaseEntries + } + return 0 +} + +func (m *ComputeThreatListDiffRequest_Constraints) GetSupportedCompressions() []CompressionType { + if m != nil { + return m.SupportedCompressions + } + return nil +} + +type ComputeThreatListDiffResponse struct { + // The type of response. This may indicate that an action is required by the + // client when the response is received. + ResponseType ComputeThreatListDiffResponse_ResponseType `protobuf:"varint,4,opt,name=response_type,json=responseType,proto3,enum=google.cloud.webrisk.v1beta1.ComputeThreatListDiffResponse_ResponseType" json:"response_type,omitempty"` + // A set of entries to add to a local threat type's list. + Additions *ThreatEntryAdditions `protobuf:"bytes,5,opt,name=additions,proto3" json:"additions,omitempty"` + // A set of entries to remove from a local threat type's list. + // This field may be empty. + Removals *ThreatEntryRemovals `protobuf:"bytes,6,opt,name=removals,proto3" json:"removals,omitempty"` + // The new opaque client version token. + NewVersionToken []byte `protobuf:"bytes,7,opt,name=new_version_token,json=newVersionToken,proto3" json:"new_version_token,omitempty"` + // The expected SHA256 hash of the client state; that is, of the sorted list + // of all hashes present in the database after applying the provided diff. + // If the client state doesn't match the expected state, the client must + // disregard this diff and retry later. + Checksum *ComputeThreatListDiffResponse_Checksum `protobuf:"bytes,8,opt,name=checksum,proto3" json:"checksum,omitempty"` + // The soonest the client should wait before issuing any diff + // request. Querying sooner is unlikely to produce a meaningful diff. + // Waiting longer is acceptable considering the use case. + // If this field is not set clients may update as soon as they want. + RecommendedNextDiff *timestamp.Timestamp `protobuf:"bytes,2,opt,name=recommended_next_diff,json=recommendedNextDiff,proto3" json:"recommended_next_diff,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ComputeThreatListDiffResponse) Reset() { *m = ComputeThreatListDiffResponse{} } +func (m *ComputeThreatListDiffResponse) String() string { return proto.CompactTextString(m) } +func (*ComputeThreatListDiffResponse) ProtoMessage() {} +func (*ComputeThreatListDiffResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_webrisk_6b128999d2c68f99, []int{1} +} +func (m *ComputeThreatListDiffResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ComputeThreatListDiffResponse.Unmarshal(m, b) +} +func (m *ComputeThreatListDiffResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ComputeThreatListDiffResponse.Marshal(b, m, deterministic) +} +func (dst *ComputeThreatListDiffResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_ComputeThreatListDiffResponse.Merge(dst, src) +} +func (m *ComputeThreatListDiffResponse) XXX_Size() int { + return xxx_messageInfo_ComputeThreatListDiffResponse.Size(m) +} +func (m *ComputeThreatListDiffResponse) XXX_DiscardUnknown() { + xxx_messageInfo_ComputeThreatListDiffResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_ComputeThreatListDiffResponse proto.InternalMessageInfo + +func (m *ComputeThreatListDiffResponse) GetResponseType() ComputeThreatListDiffResponse_ResponseType { + if m != nil { + return m.ResponseType + } + return ComputeThreatListDiffResponse_RESPONSE_TYPE_UNSPECIFIED +} + +func (m *ComputeThreatListDiffResponse) GetAdditions() *ThreatEntryAdditions { + if m != nil { + return m.Additions + } + return nil +} + +func (m *ComputeThreatListDiffResponse) GetRemovals() *ThreatEntryRemovals { + if m != nil { + return m.Removals + } + return nil +} + +func (m *ComputeThreatListDiffResponse) GetNewVersionToken() []byte { + if m != nil { + return m.NewVersionToken + } + return nil +} + +func (m *ComputeThreatListDiffResponse) GetChecksum() *ComputeThreatListDiffResponse_Checksum { + if m != nil { + return m.Checksum + } + return nil +} + +func (m *ComputeThreatListDiffResponse) GetRecommendedNextDiff() *timestamp.Timestamp { + if m != nil { + return m.RecommendedNextDiff + } + return nil +} + +// The expected state of a client's local database. +type ComputeThreatListDiffResponse_Checksum struct { + // The SHA256 hash of the client state; that is, of the sorted list of all + // hashes present in the database. + Sha256 []byte `protobuf:"bytes,1,opt,name=sha256,proto3" json:"sha256,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ComputeThreatListDiffResponse_Checksum) Reset() { + *m = ComputeThreatListDiffResponse_Checksum{} +} +func (m *ComputeThreatListDiffResponse_Checksum) String() string { return proto.CompactTextString(m) } +func (*ComputeThreatListDiffResponse_Checksum) ProtoMessage() {} +func (*ComputeThreatListDiffResponse_Checksum) Descriptor() ([]byte, []int) { + return fileDescriptor_webrisk_6b128999d2c68f99, []int{1, 0} +} +func (m *ComputeThreatListDiffResponse_Checksum) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ComputeThreatListDiffResponse_Checksum.Unmarshal(m, b) +} +func (m *ComputeThreatListDiffResponse_Checksum) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ComputeThreatListDiffResponse_Checksum.Marshal(b, m, deterministic) +} +func (dst *ComputeThreatListDiffResponse_Checksum) XXX_Merge(src proto.Message) { + xxx_messageInfo_ComputeThreatListDiffResponse_Checksum.Merge(dst, src) +} +func (m *ComputeThreatListDiffResponse_Checksum) XXX_Size() int { + return xxx_messageInfo_ComputeThreatListDiffResponse_Checksum.Size(m) +} +func (m *ComputeThreatListDiffResponse_Checksum) XXX_DiscardUnknown() { + xxx_messageInfo_ComputeThreatListDiffResponse_Checksum.DiscardUnknown(m) +} + +var xxx_messageInfo_ComputeThreatListDiffResponse_Checksum proto.InternalMessageInfo + +func (m *ComputeThreatListDiffResponse_Checksum) GetSha256() []byte { + if m != nil { + return m.Sha256 + } + return nil +} + +// Request to check URI entries against threatLists. +type SearchUrisRequest struct { + // The URI to be checked for matches. + Uri string `protobuf:"bytes,1,opt,name=uri,proto3" json:"uri,omitempty"` + // Required. The ThreatLists to search in. + ThreatTypes []ThreatType `protobuf:"varint,2,rep,packed,name=threat_types,json=threatTypes,proto3,enum=google.cloud.webrisk.v1beta1.ThreatType" json:"threat_types,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *SearchUrisRequest) Reset() { *m = SearchUrisRequest{} } +func (m *SearchUrisRequest) String() string { return proto.CompactTextString(m) } +func (*SearchUrisRequest) ProtoMessage() {} +func (*SearchUrisRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_webrisk_6b128999d2c68f99, []int{2} +} +func (m *SearchUrisRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_SearchUrisRequest.Unmarshal(m, b) +} +func (m *SearchUrisRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_SearchUrisRequest.Marshal(b, m, deterministic) +} +func (dst *SearchUrisRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_SearchUrisRequest.Merge(dst, src) +} +func (m *SearchUrisRequest) XXX_Size() int { + return xxx_messageInfo_SearchUrisRequest.Size(m) +} +func (m *SearchUrisRequest) XXX_DiscardUnknown() { + xxx_messageInfo_SearchUrisRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_SearchUrisRequest proto.InternalMessageInfo + +func (m *SearchUrisRequest) GetUri() string { + if m != nil { + return m.Uri + } + return "" +} + +func (m *SearchUrisRequest) GetThreatTypes() []ThreatType { + if m != nil { + return m.ThreatTypes + } + return nil +} + +type SearchUrisResponse struct { + // The threat list matches. This may be empty if the URI is on no list. + Threat *SearchUrisResponse_ThreatUri `protobuf:"bytes,1,opt,name=threat,proto3" json:"threat,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *SearchUrisResponse) Reset() { *m = SearchUrisResponse{} } +func (m *SearchUrisResponse) String() string { return proto.CompactTextString(m) } +func (*SearchUrisResponse) ProtoMessage() {} +func (*SearchUrisResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_webrisk_6b128999d2c68f99, []int{3} +} +func (m *SearchUrisResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_SearchUrisResponse.Unmarshal(m, b) +} +func (m *SearchUrisResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_SearchUrisResponse.Marshal(b, m, deterministic) +} +func (dst *SearchUrisResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_SearchUrisResponse.Merge(dst, src) +} +func (m *SearchUrisResponse) XXX_Size() int { + return xxx_messageInfo_SearchUrisResponse.Size(m) +} +func (m *SearchUrisResponse) XXX_DiscardUnknown() { + xxx_messageInfo_SearchUrisResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_SearchUrisResponse proto.InternalMessageInfo + +func (m *SearchUrisResponse) GetThreat() *SearchUrisResponse_ThreatUri { + if m != nil { + return m.Threat + } + return nil +} + +// Contains threat information on a matching uri. +type SearchUrisResponse_ThreatUri struct { + // The ThreatList this threat belongs to. + ThreatTypes []ThreatType `protobuf:"varint,1,rep,packed,name=threat_types,json=threatTypes,proto3,enum=google.cloud.webrisk.v1beta1.ThreatType" json:"threat_types,omitempty"` + // The cache lifetime for the returned match. Clients must not cache this + // response past this timestamp to avoid false positives. + ExpireTime *timestamp.Timestamp `protobuf:"bytes,2,opt,name=expire_time,json=expireTime,proto3" json:"expire_time,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *SearchUrisResponse_ThreatUri) Reset() { *m = SearchUrisResponse_ThreatUri{} } +func (m *SearchUrisResponse_ThreatUri) String() string { return proto.CompactTextString(m) } +func (*SearchUrisResponse_ThreatUri) ProtoMessage() {} +func (*SearchUrisResponse_ThreatUri) Descriptor() ([]byte, []int) { + return fileDescriptor_webrisk_6b128999d2c68f99, []int{3, 0} +} +func (m *SearchUrisResponse_ThreatUri) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_SearchUrisResponse_ThreatUri.Unmarshal(m, b) +} +func (m *SearchUrisResponse_ThreatUri) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_SearchUrisResponse_ThreatUri.Marshal(b, m, deterministic) +} +func (dst *SearchUrisResponse_ThreatUri) XXX_Merge(src proto.Message) { + xxx_messageInfo_SearchUrisResponse_ThreatUri.Merge(dst, src) +} +func (m *SearchUrisResponse_ThreatUri) XXX_Size() int { + return xxx_messageInfo_SearchUrisResponse_ThreatUri.Size(m) +} +func (m *SearchUrisResponse_ThreatUri) XXX_DiscardUnknown() { + xxx_messageInfo_SearchUrisResponse_ThreatUri.DiscardUnknown(m) +} + +var xxx_messageInfo_SearchUrisResponse_ThreatUri proto.InternalMessageInfo + +func (m *SearchUrisResponse_ThreatUri) GetThreatTypes() []ThreatType { + if m != nil { + return m.ThreatTypes + } + return nil +} + +func (m *SearchUrisResponse_ThreatUri) GetExpireTime() *timestamp.Timestamp { + if m != nil { + return m.ExpireTime + } + return nil +} + +// Request to return full hashes matched by the provided hash prefixes. +type SearchHashesRequest struct { + // A hash prefix, consisting of the most significant 4-32 bytes of a SHA256 + // hash. For JSON requests, this field is base64-encoded. + HashPrefix []byte `protobuf:"bytes,1,opt,name=hash_prefix,json=hashPrefix,proto3" json:"hash_prefix,omitempty"` + // Required. The ThreatLists to search in. + ThreatTypes []ThreatType `protobuf:"varint,2,rep,packed,name=threat_types,json=threatTypes,proto3,enum=google.cloud.webrisk.v1beta1.ThreatType" json:"threat_types,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *SearchHashesRequest) Reset() { *m = SearchHashesRequest{} } +func (m *SearchHashesRequest) String() string { return proto.CompactTextString(m) } +func (*SearchHashesRequest) ProtoMessage() {} +func (*SearchHashesRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_webrisk_6b128999d2c68f99, []int{4} +} +func (m *SearchHashesRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_SearchHashesRequest.Unmarshal(m, b) +} +func (m *SearchHashesRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_SearchHashesRequest.Marshal(b, m, deterministic) +} +func (dst *SearchHashesRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_SearchHashesRequest.Merge(dst, src) +} +func (m *SearchHashesRequest) XXX_Size() int { + return xxx_messageInfo_SearchHashesRequest.Size(m) +} +func (m *SearchHashesRequest) XXX_DiscardUnknown() { + xxx_messageInfo_SearchHashesRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_SearchHashesRequest proto.InternalMessageInfo + +func (m *SearchHashesRequest) GetHashPrefix() []byte { + if m != nil { + return m.HashPrefix + } + return nil +} + +func (m *SearchHashesRequest) GetThreatTypes() []ThreatType { + if m != nil { + return m.ThreatTypes + } + return nil +} + +type SearchHashesResponse struct { + // The full hashes that matched the requested prefixes. + // The hash will be populated in the key. + Threats []*SearchHashesResponse_ThreatHash `protobuf:"bytes,1,rep,name=threats,proto3" json:"threats,omitempty"` + // For requested entities that did not match the threat list, how long to + // cache the response until. + NegativeExpireTime *timestamp.Timestamp `protobuf:"bytes,2,opt,name=negative_expire_time,json=negativeExpireTime,proto3" json:"negative_expire_time,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *SearchHashesResponse) Reset() { *m = SearchHashesResponse{} } +func (m *SearchHashesResponse) String() string { return proto.CompactTextString(m) } +func (*SearchHashesResponse) ProtoMessage() {} +func (*SearchHashesResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_webrisk_6b128999d2c68f99, []int{5} +} +func (m *SearchHashesResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_SearchHashesResponse.Unmarshal(m, b) +} +func (m *SearchHashesResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_SearchHashesResponse.Marshal(b, m, deterministic) +} +func (dst *SearchHashesResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_SearchHashesResponse.Merge(dst, src) +} +func (m *SearchHashesResponse) XXX_Size() int { + return xxx_messageInfo_SearchHashesResponse.Size(m) +} +func (m *SearchHashesResponse) XXX_DiscardUnknown() { + xxx_messageInfo_SearchHashesResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_SearchHashesResponse proto.InternalMessageInfo + +func (m *SearchHashesResponse) GetThreats() []*SearchHashesResponse_ThreatHash { + if m != nil { + return m.Threats + } + return nil +} + +func (m *SearchHashesResponse) GetNegativeExpireTime() *timestamp.Timestamp { + if m != nil { + return m.NegativeExpireTime + } + return nil +} + +// Contains threat information on a matching hash. +type SearchHashesResponse_ThreatHash struct { + // The ThreatList this threat belongs to. + // This must contain at least one entry. + ThreatTypes []ThreatType `protobuf:"varint,1,rep,packed,name=threat_types,json=threatTypes,proto3,enum=google.cloud.webrisk.v1beta1.ThreatType" json:"threat_types,omitempty"` + // A 32 byte SHA256 hash. This field is in binary format. For JSON + // requests, hashes are base64-encoded. + Hash []byte `protobuf:"bytes,2,opt,name=hash,proto3" json:"hash,omitempty"` + // The cache lifetime for the returned match. Clients must not cache this + // response past this timestamp to avoid false positives. + ExpireTime *timestamp.Timestamp `protobuf:"bytes,3,opt,name=expire_time,json=expireTime,proto3" json:"expire_time,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *SearchHashesResponse_ThreatHash) Reset() { *m = SearchHashesResponse_ThreatHash{} } +func (m *SearchHashesResponse_ThreatHash) String() string { return proto.CompactTextString(m) } +func (*SearchHashesResponse_ThreatHash) ProtoMessage() {} +func (*SearchHashesResponse_ThreatHash) Descriptor() ([]byte, []int) { + return fileDescriptor_webrisk_6b128999d2c68f99, []int{5, 0} +} +func (m *SearchHashesResponse_ThreatHash) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_SearchHashesResponse_ThreatHash.Unmarshal(m, b) +} +func (m *SearchHashesResponse_ThreatHash) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_SearchHashesResponse_ThreatHash.Marshal(b, m, deterministic) +} +func (dst *SearchHashesResponse_ThreatHash) XXX_Merge(src proto.Message) { + xxx_messageInfo_SearchHashesResponse_ThreatHash.Merge(dst, src) +} +func (m *SearchHashesResponse_ThreatHash) XXX_Size() int { + return xxx_messageInfo_SearchHashesResponse_ThreatHash.Size(m) +} +func (m *SearchHashesResponse_ThreatHash) XXX_DiscardUnknown() { + xxx_messageInfo_SearchHashesResponse_ThreatHash.DiscardUnknown(m) +} + +var xxx_messageInfo_SearchHashesResponse_ThreatHash proto.InternalMessageInfo + +func (m *SearchHashesResponse_ThreatHash) GetThreatTypes() []ThreatType { + if m != nil { + return m.ThreatTypes + } + return nil +} + +func (m *SearchHashesResponse_ThreatHash) GetHash() []byte { + if m != nil { + return m.Hash + } + return nil +} + +func (m *SearchHashesResponse_ThreatHash) GetExpireTime() *timestamp.Timestamp { + if m != nil { + return m.ExpireTime + } + return nil +} + +// Contains the set of entries to add to a local database. +// May contain a combination of compressed and raw data in a single response. +type ThreatEntryAdditions struct { + // The raw SHA256-formatted entries. + // Repeated to allow returning sets of hashes with different prefix sizes. + RawHashes []*RawHashes `protobuf:"bytes,1,rep,name=raw_hashes,json=rawHashes,proto3" json:"raw_hashes,omitempty"` + // The encoded 4-byte prefixes of SHA256-formatted entries, using a + // Golomb-Rice encoding. The hashes are converted to uint32, sorted in + // ascending order, then delta encoded and stored as encoded_data. + RiceHashes *RiceDeltaEncoding `protobuf:"bytes,2,opt,name=rice_hashes,json=riceHashes,proto3" json:"rice_hashes,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ThreatEntryAdditions) Reset() { *m = ThreatEntryAdditions{} } +func (m *ThreatEntryAdditions) String() string { return proto.CompactTextString(m) } +func (*ThreatEntryAdditions) ProtoMessage() {} +func (*ThreatEntryAdditions) Descriptor() ([]byte, []int) { + return fileDescriptor_webrisk_6b128999d2c68f99, []int{6} +} +func (m *ThreatEntryAdditions) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ThreatEntryAdditions.Unmarshal(m, b) +} +func (m *ThreatEntryAdditions) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ThreatEntryAdditions.Marshal(b, m, deterministic) +} +func (dst *ThreatEntryAdditions) XXX_Merge(src proto.Message) { + xxx_messageInfo_ThreatEntryAdditions.Merge(dst, src) +} +func (m *ThreatEntryAdditions) XXX_Size() int { + return xxx_messageInfo_ThreatEntryAdditions.Size(m) +} +func (m *ThreatEntryAdditions) XXX_DiscardUnknown() { + xxx_messageInfo_ThreatEntryAdditions.DiscardUnknown(m) +} + +var xxx_messageInfo_ThreatEntryAdditions proto.InternalMessageInfo + +func (m *ThreatEntryAdditions) GetRawHashes() []*RawHashes { + if m != nil { + return m.RawHashes + } + return nil +} + +func (m *ThreatEntryAdditions) GetRiceHashes() *RiceDeltaEncoding { + if m != nil { + return m.RiceHashes + } + return nil +} + +// Contains the set of entries to remove from a local database. +type ThreatEntryRemovals struct { + // The raw removal indices for a local list. + RawIndices *RawIndices `protobuf:"bytes,1,opt,name=raw_indices,json=rawIndices,proto3" json:"raw_indices,omitempty"` + // The encoded local, lexicographically-sorted list indices, using a + // Golomb-Rice encoding. Used for sending compressed removal indices. The + // removal indices (uint32) are sorted in ascending order, then delta encoded + // and stored as encoded_data. + RiceIndices *RiceDeltaEncoding `protobuf:"bytes,2,opt,name=rice_indices,json=riceIndices,proto3" json:"rice_indices,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ThreatEntryRemovals) Reset() { *m = ThreatEntryRemovals{} } +func (m *ThreatEntryRemovals) String() string { return proto.CompactTextString(m) } +func (*ThreatEntryRemovals) ProtoMessage() {} +func (*ThreatEntryRemovals) Descriptor() ([]byte, []int) { + return fileDescriptor_webrisk_6b128999d2c68f99, []int{7} +} +func (m *ThreatEntryRemovals) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ThreatEntryRemovals.Unmarshal(m, b) +} +func (m *ThreatEntryRemovals) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ThreatEntryRemovals.Marshal(b, m, deterministic) +} +func (dst *ThreatEntryRemovals) XXX_Merge(src proto.Message) { + xxx_messageInfo_ThreatEntryRemovals.Merge(dst, src) +} +func (m *ThreatEntryRemovals) XXX_Size() int { + return xxx_messageInfo_ThreatEntryRemovals.Size(m) +} +func (m *ThreatEntryRemovals) XXX_DiscardUnknown() { + xxx_messageInfo_ThreatEntryRemovals.DiscardUnknown(m) +} + +var xxx_messageInfo_ThreatEntryRemovals proto.InternalMessageInfo + +func (m *ThreatEntryRemovals) GetRawIndices() *RawIndices { + if m != nil { + return m.RawIndices + } + return nil +} + +func (m *ThreatEntryRemovals) GetRiceIndices() *RiceDeltaEncoding { + if m != nil { + return m.RiceIndices + } + return nil +} + +// A set of raw indices to remove from a local list. +type RawIndices struct { + // The indices to remove from a lexicographically-sorted local list. + Indices []int32 `protobuf:"varint,1,rep,packed,name=indices,proto3" json:"indices,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *RawIndices) Reset() { *m = RawIndices{} } +func (m *RawIndices) String() string { return proto.CompactTextString(m) } +func (*RawIndices) ProtoMessage() {} +func (*RawIndices) Descriptor() ([]byte, []int) { + return fileDescriptor_webrisk_6b128999d2c68f99, []int{8} +} +func (m *RawIndices) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_RawIndices.Unmarshal(m, b) +} +func (m *RawIndices) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_RawIndices.Marshal(b, m, deterministic) +} +func (dst *RawIndices) XXX_Merge(src proto.Message) { + xxx_messageInfo_RawIndices.Merge(dst, src) +} +func (m *RawIndices) XXX_Size() int { + return xxx_messageInfo_RawIndices.Size(m) +} +func (m *RawIndices) XXX_DiscardUnknown() { + xxx_messageInfo_RawIndices.DiscardUnknown(m) +} + +var xxx_messageInfo_RawIndices proto.InternalMessageInfo + +func (m *RawIndices) GetIndices() []int32 { + if m != nil { + return m.Indices + } + return nil +} + +// The uncompressed threat entries in hash format. +// Hashes can be anywhere from 4 to 32 bytes in size. A large majority are 4 +// bytes, but some hashes are lengthened if they collide with the hash of a +// popular URI. +// +// Used for sending ThreatEntryAdditons to clients that do not support +// compression, or when sending non-4-byte hashes to clients that do support +// compression. +type RawHashes struct { + // The number of bytes for each prefix encoded below. This field can be + // anywhere from 4 (shortest prefix) to 32 (full SHA256 hash). + PrefixSize int32 `protobuf:"varint,1,opt,name=prefix_size,json=prefixSize,proto3" json:"prefix_size,omitempty"` + // The hashes, in binary format, concatenated into one long string. Hashes are + // sorted in lexicographic order. For JSON API users, hashes are + // base64-encoded. + RawHashes []byte `protobuf:"bytes,2,opt,name=raw_hashes,json=rawHashes,proto3" json:"raw_hashes,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *RawHashes) Reset() { *m = RawHashes{} } +func (m *RawHashes) String() string { return proto.CompactTextString(m) } +func (*RawHashes) ProtoMessage() {} +func (*RawHashes) Descriptor() ([]byte, []int) { + return fileDescriptor_webrisk_6b128999d2c68f99, []int{9} +} +func (m *RawHashes) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_RawHashes.Unmarshal(m, b) +} +func (m *RawHashes) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_RawHashes.Marshal(b, m, deterministic) +} +func (dst *RawHashes) XXX_Merge(src proto.Message) { + xxx_messageInfo_RawHashes.Merge(dst, src) +} +func (m *RawHashes) XXX_Size() int { + return xxx_messageInfo_RawHashes.Size(m) +} +func (m *RawHashes) XXX_DiscardUnknown() { + xxx_messageInfo_RawHashes.DiscardUnknown(m) +} + +var xxx_messageInfo_RawHashes proto.InternalMessageInfo + +func (m *RawHashes) GetPrefixSize() int32 { + if m != nil { + return m.PrefixSize + } + return 0 +} + +func (m *RawHashes) GetRawHashes() []byte { + if m != nil { + return m.RawHashes + } + return nil +} + +// The Rice-Golomb encoded data. Used for sending compressed 4-byte hashes or +// compressed removal indices. +type RiceDeltaEncoding struct { + // The offset of the first entry in the encoded data, or, if only a single + // integer was encoded, that single integer's value. If the field is empty or + // missing, assume zero. + FirstValue int64 `protobuf:"varint,1,opt,name=first_value,json=firstValue,proto3" json:"first_value,omitempty"` + // The Golomb-Rice parameter, which is a number between 2 and 28. This field + // is missing (that is, zero) if `num_entries` is zero. + RiceParameter int32 `protobuf:"varint,2,opt,name=rice_parameter,json=riceParameter,proto3" json:"rice_parameter,omitempty"` + // The number of entries that are delta encoded in the encoded data. If only a + // single integer was encoded, this will be zero and the single value will be + // stored in `first_value`. + EntryCount int32 `protobuf:"varint,3,opt,name=entry_count,json=entryCount,proto3" json:"entry_count,omitempty"` + // The encoded deltas that are encoded using the Golomb-Rice coder. + EncodedData []byte `protobuf:"bytes,4,opt,name=encoded_data,json=encodedData,proto3" json:"encoded_data,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *RiceDeltaEncoding) Reset() { *m = RiceDeltaEncoding{} } +func (m *RiceDeltaEncoding) String() string { return proto.CompactTextString(m) } +func (*RiceDeltaEncoding) ProtoMessage() {} +func (*RiceDeltaEncoding) Descriptor() ([]byte, []int) { + return fileDescriptor_webrisk_6b128999d2c68f99, []int{10} +} +func (m *RiceDeltaEncoding) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_RiceDeltaEncoding.Unmarshal(m, b) +} +func (m *RiceDeltaEncoding) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_RiceDeltaEncoding.Marshal(b, m, deterministic) +} +func (dst *RiceDeltaEncoding) XXX_Merge(src proto.Message) { + xxx_messageInfo_RiceDeltaEncoding.Merge(dst, src) +} +func (m *RiceDeltaEncoding) XXX_Size() int { + return xxx_messageInfo_RiceDeltaEncoding.Size(m) +} +func (m *RiceDeltaEncoding) XXX_DiscardUnknown() { + xxx_messageInfo_RiceDeltaEncoding.DiscardUnknown(m) +} + +var xxx_messageInfo_RiceDeltaEncoding proto.InternalMessageInfo + +func (m *RiceDeltaEncoding) GetFirstValue() int64 { + if m != nil { + return m.FirstValue + } + return 0 +} + +func (m *RiceDeltaEncoding) GetRiceParameter() int32 { + if m != nil { + return m.RiceParameter + } + return 0 +} + +func (m *RiceDeltaEncoding) GetEntryCount() int32 { + if m != nil { + return m.EntryCount + } + return 0 +} + +func (m *RiceDeltaEncoding) GetEncodedData() []byte { + if m != nil { + return m.EncodedData + } + return nil +} + +func init() { + proto.RegisterType((*ComputeThreatListDiffRequest)(nil), "google.cloud.webrisk.v1beta1.ComputeThreatListDiffRequest") + proto.RegisterType((*ComputeThreatListDiffRequest_Constraints)(nil), "google.cloud.webrisk.v1beta1.ComputeThreatListDiffRequest.Constraints") + proto.RegisterType((*ComputeThreatListDiffResponse)(nil), "google.cloud.webrisk.v1beta1.ComputeThreatListDiffResponse") + proto.RegisterType((*ComputeThreatListDiffResponse_Checksum)(nil), "google.cloud.webrisk.v1beta1.ComputeThreatListDiffResponse.Checksum") + proto.RegisterType((*SearchUrisRequest)(nil), "google.cloud.webrisk.v1beta1.SearchUrisRequest") + proto.RegisterType((*SearchUrisResponse)(nil), "google.cloud.webrisk.v1beta1.SearchUrisResponse") + proto.RegisterType((*SearchUrisResponse_ThreatUri)(nil), "google.cloud.webrisk.v1beta1.SearchUrisResponse.ThreatUri") + proto.RegisterType((*SearchHashesRequest)(nil), "google.cloud.webrisk.v1beta1.SearchHashesRequest") + proto.RegisterType((*SearchHashesResponse)(nil), "google.cloud.webrisk.v1beta1.SearchHashesResponse") + proto.RegisterType((*SearchHashesResponse_ThreatHash)(nil), "google.cloud.webrisk.v1beta1.SearchHashesResponse.ThreatHash") + proto.RegisterType((*ThreatEntryAdditions)(nil), "google.cloud.webrisk.v1beta1.ThreatEntryAdditions") + proto.RegisterType((*ThreatEntryRemovals)(nil), "google.cloud.webrisk.v1beta1.ThreatEntryRemovals") + proto.RegisterType((*RawIndices)(nil), "google.cloud.webrisk.v1beta1.RawIndices") + proto.RegisterType((*RawHashes)(nil), "google.cloud.webrisk.v1beta1.RawHashes") + proto.RegisterType((*RiceDeltaEncoding)(nil), "google.cloud.webrisk.v1beta1.RiceDeltaEncoding") + proto.RegisterEnum("google.cloud.webrisk.v1beta1.ThreatType", ThreatType_name, ThreatType_value) + proto.RegisterEnum("google.cloud.webrisk.v1beta1.CompressionType", CompressionType_name, CompressionType_value) + proto.RegisterEnum("google.cloud.webrisk.v1beta1.ComputeThreatListDiffResponse_ResponseType", ComputeThreatListDiffResponse_ResponseType_name, ComputeThreatListDiffResponse_ResponseType_value) +} + +// Reference imports to suppress errors if they are not otherwise used. +var _ context.Context +var _ grpc.ClientConn + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the grpc package it is being compiled against. +const _ = grpc.SupportPackageIsVersion4 + +// WebRiskServiceV1Beta1Client is the client API for WebRiskServiceV1Beta1 service. +// +// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://godoc.org/google.golang.org/grpc#ClientConn.NewStream. +type WebRiskServiceV1Beta1Client interface { + // Gets the most recent threat list diffs. + ComputeThreatListDiff(ctx context.Context, in *ComputeThreatListDiffRequest, opts ...grpc.CallOption) (*ComputeThreatListDiffResponse, error) + // This method is used to check whether a URI is on a given threatList. + SearchUris(ctx context.Context, in *SearchUrisRequest, opts ...grpc.CallOption) (*SearchUrisResponse, error) + // Gets the full hashes that match the requested hash prefix. + // This is used after a hash prefix is looked up in a threatList + // and there is a match. The client side threatList only holds partial hashes + // so the client must query this method to determine if there is a full + // hash match of a threat. + SearchHashes(ctx context.Context, in *SearchHashesRequest, opts ...grpc.CallOption) (*SearchHashesResponse, error) +} + +type webRiskServiceV1Beta1Client struct { + cc *grpc.ClientConn +} + +func NewWebRiskServiceV1Beta1Client(cc *grpc.ClientConn) WebRiskServiceV1Beta1Client { + return &webRiskServiceV1Beta1Client{cc} +} + +func (c *webRiskServiceV1Beta1Client) ComputeThreatListDiff(ctx context.Context, in *ComputeThreatListDiffRequest, opts ...grpc.CallOption) (*ComputeThreatListDiffResponse, error) { + out := new(ComputeThreatListDiffResponse) + err := c.cc.Invoke(ctx, "/google.cloud.webrisk.v1beta1.WebRiskServiceV1Beta1/ComputeThreatListDiff", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *webRiskServiceV1Beta1Client) SearchUris(ctx context.Context, in *SearchUrisRequest, opts ...grpc.CallOption) (*SearchUrisResponse, error) { + out := new(SearchUrisResponse) + err := c.cc.Invoke(ctx, "/google.cloud.webrisk.v1beta1.WebRiskServiceV1Beta1/SearchUris", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *webRiskServiceV1Beta1Client) SearchHashes(ctx context.Context, in *SearchHashesRequest, opts ...grpc.CallOption) (*SearchHashesResponse, error) { + out := new(SearchHashesResponse) + err := c.cc.Invoke(ctx, "/google.cloud.webrisk.v1beta1.WebRiskServiceV1Beta1/SearchHashes", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +// WebRiskServiceV1Beta1Server is the server API for WebRiskServiceV1Beta1 service. +type WebRiskServiceV1Beta1Server interface { + // Gets the most recent threat list diffs. + ComputeThreatListDiff(context.Context, *ComputeThreatListDiffRequest) (*ComputeThreatListDiffResponse, error) + // This method is used to check whether a URI is on a given threatList. + SearchUris(context.Context, *SearchUrisRequest) (*SearchUrisResponse, error) + // Gets the full hashes that match the requested hash prefix. + // This is used after a hash prefix is looked up in a threatList + // and there is a match. The client side threatList only holds partial hashes + // so the client must query this method to determine if there is a full + // hash match of a threat. + SearchHashes(context.Context, *SearchHashesRequest) (*SearchHashesResponse, error) +} + +func RegisterWebRiskServiceV1Beta1Server(s *grpc.Server, srv WebRiskServiceV1Beta1Server) { + s.RegisterService(&_WebRiskServiceV1Beta1_serviceDesc, srv) +} + +func _WebRiskServiceV1Beta1_ComputeThreatListDiff_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(ComputeThreatListDiffRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(WebRiskServiceV1Beta1Server).ComputeThreatListDiff(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.webrisk.v1beta1.WebRiskServiceV1Beta1/ComputeThreatListDiff", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(WebRiskServiceV1Beta1Server).ComputeThreatListDiff(ctx, req.(*ComputeThreatListDiffRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _WebRiskServiceV1Beta1_SearchUris_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(SearchUrisRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(WebRiskServiceV1Beta1Server).SearchUris(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.webrisk.v1beta1.WebRiskServiceV1Beta1/SearchUris", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(WebRiskServiceV1Beta1Server).SearchUris(ctx, req.(*SearchUrisRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _WebRiskServiceV1Beta1_SearchHashes_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(SearchHashesRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(WebRiskServiceV1Beta1Server).SearchHashes(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.webrisk.v1beta1.WebRiskServiceV1Beta1/SearchHashes", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(WebRiskServiceV1Beta1Server).SearchHashes(ctx, req.(*SearchHashesRequest)) + } + return interceptor(ctx, in, info, handler) +} + +var _WebRiskServiceV1Beta1_serviceDesc = grpc.ServiceDesc{ + ServiceName: "google.cloud.webrisk.v1beta1.WebRiskServiceV1Beta1", + HandlerType: (*WebRiskServiceV1Beta1Server)(nil), + Methods: []grpc.MethodDesc{ + { + MethodName: "ComputeThreatListDiff", + Handler: _WebRiskServiceV1Beta1_ComputeThreatListDiff_Handler, + }, + { + MethodName: "SearchUris", + Handler: _WebRiskServiceV1Beta1_SearchUris_Handler, + }, + { + MethodName: "SearchHashes", + Handler: _WebRiskServiceV1Beta1_SearchHashes_Handler, + }, + }, + Streams: []grpc.StreamDesc{}, + Metadata: "google/cloud/webrisk/v1beta1/webrisk.proto", +} + +func init() { + proto.RegisterFile("google/cloud/webrisk/v1beta1/webrisk.proto", fileDescriptor_webrisk_6b128999d2c68f99) +} + +var fileDescriptor_webrisk_6b128999d2c68f99 = []byte{ + // 1333 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xac, 0x57, 0xcd, 0x6f, 0x1b, 0x45, + 0x14, 0x67, 0xed, 0x7c, 0xbe, 0x75, 0x53, 0x67, 0x9a, 0x04, 0x63, 0x52, 0x1a, 0x16, 0x01, 0x56, + 0x24, 0xec, 0xc6, 0x08, 0x0e, 0xa9, 0x38, 0x38, 0xce, 0x3a, 0xb5, 0x9a, 0x3a, 0xd6, 0xd8, 0x49, + 0x04, 0xaa, 0xb4, 0x4c, 0xd6, 0x63, 0x7b, 0x94, 0x78, 0x77, 0x99, 0x59, 0xe7, 0xa3, 0x27, 0x04, + 0x67, 0x4e, 0xa8, 0x67, 0x24, 0x8e, 0x3d, 0x71, 0x40, 0x1c, 0xf8, 0x13, 0x7a, 0xe1, 0xc0, 0xbf, + 0xc0, 0x89, 0x3b, 0x37, 0x0e, 0x68, 0x66, 0x67, 0xd7, 0x6e, 0x1b, 0xd2, 0xa4, 0xe9, 0x6d, 0xf6, + 0xbd, 0x37, 0xbf, 0xf7, 0x39, 0xef, 0xbd, 0x85, 0xd5, 0x9e, 0xef, 0xf7, 0x8e, 0x68, 0xc9, 0x3d, + 0xf2, 0x87, 0x9d, 0xd2, 0x09, 0x3d, 0xe0, 0x4c, 0x1c, 0x96, 0x8e, 0xd7, 0x0e, 0x68, 0x48, 0xd6, + 0xe2, 0xef, 0x62, 0xc0, 0xfd, 0xd0, 0x47, 0xcb, 0x91, 0x6c, 0x51, 0xc9, 0x16, 0x63, 0x9e, 0x96, + 0xcd, 0x6b, 0x6e, 0x89, 0x04, 0xac, 0x44, 0x3c, 0xcf, 0x0f, 0x49, 0xc8, 0x7c, 0x4f, 0x44, 0x77, + 0xf3, 0x77, 0x34, 0x57, 0x7d, 0x1d, 0x0c, 0xbb, 0xa5, 0x90, 0x0d, 0xa8, 0x08, 0xc9, 0x20, 0x88, + 0x04, 0xac, 0x67, 0x69, 0x58, 0xae, 0xfa, 0x83, 0x60, 0x18, 0xd2, 0x76, 0x9f, 0x53, 0x12, 0x6e, + 0x33, 0x11, 0x6e, 0xb2, 0x6e, 0x17, 0xd3, 0x6f, 0x86, 0x54, 0x84, 0xa8, 0x0e, 0x66, 0xa8, 0x18, + 0x4e, 0x78, 0x16, 0xd0, 0x9c, 0xb1, 0x62, 0x14, 0xe6, 0xca, 0x85, 0xe2, 0x45, 0x36, 0x15, 0x23, + 0xa4, 0xf6, 0x59, 0x40, 0x31, 0x84, 0xc9, 0x19, 0x7d, 0x00, 0x37, 0x8e, 0x29, 0x17, 0xcc, 0xf7, + 0x9c, 0xd0, 0x3f, 0xa4, 0x5e, 0x2e, 0xb5, 0x62, 0x14, 0x32, 0x38, 0xa3, 0x89, 0x6d, 0x49, 0x43, + 0x7d, 0x30, 0x5d, 0xdf, 0x13, 0x21, 0x27, 0xcc, 0x0b, 0x45, 0x2e, 0xbd, 0x62, 0x14, 0xcc, 0x72, + 0xed, 0x62, 0x7d, 0x17, 0x39, 0x50, 0xac, 0x8e, 0xd0, 0xf0, 0x38, 0x74, 0xfe, 0x0f, 0x03, 0xcc, + 0x31, 0x26, 0x2a, 0x40, 0x76, 0x40, 0x4e, 0x9d, 0x0e, 0xeb, 0x76, 0x1d, 0xea, 0x85, 0x9c, 0x51, + 0xa1, 0xdc, 0x9d, 0xc4, 0x73, 0x03, 0x72, 0x2a, 0x21, 0xed, 0x88, 0x8a, 0xee, 0xc2, 0x82, 0x92, + 0x24, 0x21, 0x39, 0x20, 0x82, 0x26, 0xd2, 0x29, 0x25, 0x8d, 0xa4, 0xb4, 0x66, 0xc5, 0x37, 0x3a, + 0xb0, 0x24, 0x86, 0x41, 0xe0, 0xf3, 0x90, 0x76, 0x1c, 0xd7, 0x1f, 0x04, 0x9c, 0x0a, 0xe9, 0xb3, + 0x74, 0x30, 0x5d, 0x98, 0x2b, 0x7f, 0xf2, 0x6a, 0x07, 0xf5, 0x0d, 0x15, 0xd5, 0xc5, 0x04, 0x6c, + 0x8c, 0x23, 0xac, 0x7f, 0x26, 0xe0, 0xf6, 0xff, 0xc4, 0x42, 0x04, 0xbe, 0x27, 0x28, 0x1a, 0xc0, + 0x0d, 0xae, 0xcf, 0x51, 0x3e, 0x27, 0x54, 0x3e, 0xef, 0xbf, 0x56, 0x7c, 0x23, 0x9c, 0x62, 0x7c, + 0x50, 0x96, 0x65, 0xf8, 0xd8, 0x17, 0x6a, 0xc2, 0x2c, 0xe9, 0x74, 0x98, 0xaa, 0xc8, 0xdc, 0xa4, + 0x4a, 0x65, 0xf9, 0x32, 0xa5, 0x23, 0xc3, 0x76, 0x56, 0x89, 0x6f, 0xe2, 0x11, 0x08, 0x7a, 0x08, + 0x33, 0x9c, 0x0e, 0xfc, 0x63, 0x72, 0x24, 0x72, 0x53, 0x0a, 0x70, 0xed, 0xd2, 0x80, 0x58, 0x5f, + 0xc4, 0x09, 0x04, 0x5a, 0x85, 0x79, 0x8f, 0x9e, 0x38, 0xcf, 0x97, 0xe5, 0xb4, 0x2a, 0xcb, 0x9b, + 0x1e, 0x3d, 0xd9, 0x1b, 0xaf, 0xcc, 0xaf, 0x61, 0xc6, 0xed, 0x53, 0xf7, 0x50, 0x0c, 0x07, 0xb9, + 0x19, 0xa5, 0x7a, 0xf3, 0x3a, 0x61, 0xab, 0x6a, 0x2c, 0x9c, 0xa0, 0xa2, 0x06, 0x2c, 0x72, 0xea, + 0xfa, 0x83, 0x01, 0xf5, 0x3a, 0xb4, 0xe3, 0x78, 0xf4, 0x34, 0x54, 0xe5, 0xa8, 0x0a, 0xcb, 0x2c, + 0xe7, 0x63, 0x75, 0xf1, 0x6b, 0x2e, 0xb6, 0xe3, 0xd7, 0x8c, 0x6f, 0x8d, 0x5d, 0x6c, 0xd0, 0x53, + 0xa5, 0x2a, 0x6f, 0xc1, 0x4c, 0xac, 0x05, 0x2d, 0xc1, 0x94, 0xe8, 0x93, 0xf2, 0x67, 0x9f, 0xab, + 0x9a, 0xce, 0x60, 0xfd, 0x65, 0x6d, 0x40, 0x66, 0x3c, 0x81, 0xe8, 0x36, 0xbc, 0x83, 0xed, 0x56, + 0x73, 0xa7, 0xd1, 0xb2, 0x9d, 0xf6, 0x97, 0x4d, 0xdb, 0xd9, 0x6d, 0xb4, 0x9a, 0x76, 0xb5, 0x5e, + 0xab, 0xdb, 0x9b, 0xd9, 0xb7, 0xd0, 0x0c, 0x4c, 0x6c, 0xd6, 0x6b, 0xb5, 0xac, 0x81, 0x66, 0x61, + 0x12, 0xdb, 0x2d, 0xbb, 0x9d, 0x4d, 0x59, 0x1c, 0xe6, 0x5b, 0x94, 0x70, 0xb7, 0xbf, 0xcb, 0x99, + 0x88, 0x1b, 0x47, 0x16, 0xd2, 0x43, 0xce, 0x94, 0xb6, 0x59, 0x2c, 0x8f, 0xe8, 0x01, 0x64, 0xc6, + 0x5a, 0x89, 0x7c, 0x2e, 0xe9, 0x2b, 0xf5, 0x12, 0x73, 0xd4, 0x4b, 0x84, 0xf5, 0x6d, 0x0a, 0xd0, + 0xb8, 0x52, 0x5d, 0xe0, 0x18, 0xa6, 0x22, 0x29, 0xa5, 0xd8, 0x2c, 0xaf, 0x5f, 0x8c, 0xfe, 0x32, + 0x82, 0x56, 0xb8, 0xcb, 0x19, 0xd6, 0x48, 0xf9, 0x27, 0x06, 0xcc, 0x26, 0xd4, 0x97, 0xbc, 0x30, + 0xae, 0xe1, 0x05, 0xba, 0x07, 0x26, 0x3d, 0x0d, 0x18, 0xa7, 0x8e, 0x6c, 0xcc, 0x97, 0xc8, 0x33, + 0x44, 0xe2, 0x92, 0x60, 0x7d, 0x6f, 0xc0, 0xad, 0xc8, 0x81, 0xfb, 0x44, 0xf4, 0x69, 0x12, 0xf9, + 0x3b, 0x60, 0xf6, 0x89, 0xe8, 0x3b, 0x01, 0xa7, 0x5d, 0x76, 0xaa, 0xf3, 0x0d, 0x92, 0xd4, 0x54, + 0x94, 0x37, 0x9b, 0x88, 0xbf, 0x53, 0xb0, 0xf0, 0xbc, 0x15, 0x3a, 0x15, 0xfb, 0x30, 0x1d, 0xc9, + 0x45, 0x31, 0x32, 0xcb, 0x5f, 0x5c, 0x26, 0x17, 0xcf, 0x83, 0x68, 0xad, 0x92, 0x88, 0x63, 0x34, + 0xb4, 0x0d, 0x0b, 0x1e, 0xed, 0x91, 0x90, 0x1d, 0x53, 0xe7, 0x6a, 0xd1, 0x43, 0xf1, 0x3d, 0x3b, + 0x89, 0x62, 0xfe, 0xa9, 0x01, 0x30, 0xd2, 0xf2, 0x66, 0xd3, 0x8b, 0x60, 0x42, 0x86, 0x5d, 0x0f, + 0x3a, 0x75, 0x7e, 0x31, 0xe5, 0xe9, 0x2b, 0xa5, 0xfc, 0x17, 0x03, 0x16, 0xce, 0x6b, 0x91, 0xa8, + 0x06, 0xc0, 0xc9, 0x89, 0xd3, 0x57, 0xd1, 0xd3, 0xf1, 0xfe, 0xf8, 0x62, 0xa3, 0x31, 0x39, 0xd1, + 0xc1, 0x9e, 0xe5, 0xf1, 0x11, 0x35, 0xc1, 0xe4, 0xcc, 0xa5, 0x31, 0x50, 0x14, 0xd2, 0xd2, 0x2b, + 0x80, 0x98, 0x4b, 0x37, 0xe9, 0x51, 0x48, 0x6c, 0xcf, 0xf5, 0x3b, 0xcc, 0xeb, 0x61, 0x90, 0x18, + 0x11, 0xa2, 0xf5, 0xab, 0x01, 0xb7, 0xce, 0x69, 0xc2, 0x72, 0xb1, 0x90, 0x16, 0x33, 0xaf, 0xc3, + 0x5c, 0x3d, 0x69, 0xcd, 0x57, 0xc5, 0x19, 0x93, 0x93, 0x7a, 0x24, 0x8f, 0xa5, 0xbb, 0xfa, 0x8c, + 0x30, 0x64, 0x94, 0xd1, 0x31, 0xd6, 0x6b, 0x5a, 0xad, 0x3c, 0xd7, 0x98, 0xd6, 0x47, 0x00, 0x23, + 0x6d, 0x28, 0x07, 0xd3, 0x23, 0x43, 0xd3, 0x85, 0x49, 0x1c, 0x7f, 0x5a, 0x0f, 0x60, 0x36, 0x09, + 0xa4, 0x7c, 0x79, 0xd1, 0xa3, 0x73, 0x04, 0x7b, 0x4c, 0xf5, 0xf6, 0x00, 0x11, 0xa9, 0xc5, 0x1e, + 0xcb, 0xee, 0x3a, 0x9e, 0xa6, 0xa8, 0x2c, 0x46, 0xd1, 0xb7, 0x7e, 0x32, 0x60, 0xfe, 0x25, 0xbb, + 0x24, 0x6a, 0x97, 0x71, 0x11, 0x3a, 0xc7, 0xe4, 0x68, 0x18, 0xa1, 0xa6, 0x31, 0x28, 0xd2, 0x9e, + 0xa4, 0xa0, 0x0f, 0x61, 0x4e, 0xf9, 0x1f, 0x10, 0x4e, 0x06, 0x34, 0xa4, 0x5c, 0x6f, 0x22, 0x37, + 0x24, 0xb5, 0x19, 0x13, 0x25, 0x8e, 0xdc, 0x54, 0xce, 0x1c, 0xd7, 0x1f, 0x7a, 0xa1, 0xaa, 0xbc, + 0x49, 0x0c, 0x8a, 0x54, 0x95, 0x14, 0xf4, 0x3e, 0x64, 0xa8, 0x54, 0x4a, 0x3b, 0x6a, 0xb7, 0x51, + 0xcb, 0x41, 0x06, 0x9b, 0x9a, 0x26, 0x77, 0x9a, 0x55, 0x1a, 0x3f, 0x16, 0x35, 0x2c, 0xde, 0x85, + 0xb7, 0xdb, 0xf7, 0xb1, 0x5d, 0x69, 0x9f, 0x37, 0x2a, 0x4c, 0x98, 0x7e, 0x58, 0xd9, 0xde, 0xaf, + 0x60, 0x3b, 0x6b, 0xa0, 0x25, 0x40, 0xad, 0x9d, 0x6a, 0xbd, 0xb2, 0xed, 0xd8, 0x8d, 0xad, 0x7a, + 0xc3, 0xb6, 0x71, 0xbd, 0xb1, 0x95, 0x4d, 0xa1, 0x45, 0x98, 0xdf, 0x6d, 0xec, 0x57, 0x1a, 0x6d, + 0x7b, 0xd3, 0x69, 0xed, 0xd4, 0xda, 0x4a, 0x3c, 0xbd, 0x5a, 0x83, 0x9b, 0x2f, 0xec, 0x3c, 0x68, + 0x05, 0x96, 0xab, 0x3b, 0x0f, 0x9b, 0xd8, 0x6e, 0xb5, 0xea, 0x3b, 0x8d, 0xf3, 0x14, 0x4e, 0x43, + 0x1a, 0x57, 0xf6, 0xb3, 0x86, 0x1c, 0x52, 0xb8, 0x5e, 0xb5, 0xb3, 0xa9, 0xf2, 0xbf, 0x69, 0x58, + 0xdc, 0xa7, 0x07, 0x98, 0x89, 0xc3, 0x16, 0xe5, 0xc7, 0xcc, 0xa5, 0x7b, 0x6b, 0x1b, 0x32, 0xfd, + 0xe8, 0x77, 0x03, 0x16, 0xcf, 0x1d, 0xd0, 0x68, 0xfd, 0xf5, 0x97, 0xcd, 0xfc, 0xbd, 0x6b, 0x6c, + 0x04, 0x56, 0xe1, 0xbb, 0x3f, 0xff, 0xfa, 0x31, 0x65, 0xa1, 0x95, 0xe4, 0x47, 0x20, 0x4c, 0x04, + 0xc5, 0xba, 0x1b, 0xdd, 0x55, 0x26, 0xfe, 0x60, 0x00, 0x8c, 0x46, 0x17, 0x2a, 0x5d, 0x7e, 0xc8, + 0x45, 0x66, 0xde, 0xbd, 0xea, 0x54, 0xb4, 0x96, 0x95, 0x6d, 0x4b, 0x68, 0x21, 0xb1, 0x6d, 0xc8, + 0x99, 0x58, 0x17, 0x4a, 0x12, 0x3d, 0x31, 0x20, 0x33, 0xde, 0xbe, 0xd1, 0xda, 0x55, 0x5a, 0x7d, + 0x64, 0x53, 0xf9, 0xea, 0xd3, 0xc1, 0x7a, 0x4f, 0x59, 0x95, 0x43, 0x4b, 0x89, 0x55, 0xd1, 0xcb, + 0xd2, 0x76, 0x6d, 0xfc, 0x66, 0x40, 0xde, 0xf5, 0x07, 0x31, 0xf2, 0x0b, 0x98, 0x1b, 0x19, 0x5d, + 0x1a, 0x4d, 0xd9, 0x74, 0x9b, 0xc6, 0x57, 0x55, 0x2d, 0xd7, 0xf3, 0x8f, 0x88, 0xd7, 0x2b, 0xfa, + 0xbc, 0x57, 0xea, 0x51, 0x4f, 0xb5, 0xe4, 0x52, 0xc4, 0x22, 0x01, 0x13, 0xe7, 0xff, 0xb4, 0xdd, + 0xd3, 0xdf, 0x3f, 0xa7, 0x26, 0xb6, 0xaa, 0xfb, 0xf8, 0x69, 0x6a, 0x79, 0x2b, 0x02, 0xab, 0x2a, + 0x77, 0xb4, 0xa6, 0xa2, 0x2e, 0xbf, 0x67, 0x31, 0xfb, 0x91, 0x62, 0x3f, 0xd2, 0xec, 0x47, 0x7b, + 0x11, 0xd8, 0xc1, 0x94, 0x52, 0xfa, 0xe9, 0x7f, 0x01, 0x00, 0x00, 0xff, 0xff, 0xca, 0x5c, 0x3e, + 0xd4, 0x28, 0x0e, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/cloud/websecurityscanner/v1alpha/crawled_url.pb.go b/vendor/google.golang.org/genproto/googleapis/cloud/websecurityscanner/v1alpha/crawled_url.pb.go new file mode 100644 index 0000000..542aea3 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/cloud/websecurityscanner/v1alpha/crawled_url.pb.go @@ -0,0 +1,111 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/cloud/websecurityscanner/v1alpha/crawled_url.proto + +package websecurityscanner // import "google.golang.org/genproto/googleapis/cloud/websecurityscanner/v1alpha" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// A CrawledUrl resource represents a URL that was crawled during a ScanRun. Web +// Security Scanner Service crawls the web applications, following all links +// within the scope of sites, to find the URLs to test against. +type CrawledUrl struct { + // Output only. + // The http method of the request that was used to visit the URL, in + // uppercase. + HttpMethod string `protobuf:"bytes,1,opt,name=http_method,json=httpMethod,proto3" json:"http_method,omitempty"` + // Output only. + // The URL that was crawled. + Url string `protobuf:"bytes,2,opt,name=url,proto3" json:"url,omitempty"` + // Output only. + // The body of the request that was used to visit the URL. + Body string `protobuf:"bytes,3,opt,name=body,proto3" json:"body,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *CrawledUrl) Reset() { *m = CrawledUrl{} } +func (m *CrawledUrl) String() string { return proto.CompactTextString(m) } +func (*CrawledUrl) ProtoMessage() {} +func (*CrawledUrl) Descriptor() ([]byte, []int) { + return fileDescriptor_crawled_url_fd7b6fb38bf10fe0, []int{0} +} +func (m *CrawledUrl) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_CrawledUrl.Unmarshal(m, b) +} +func (m *CrawledUrl) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_CrawledUrl.Marshal(b, m, deterministic) +} +func (dst *CrawledUrl) XXX_Merge(src proto.Message) { + xxx_messageInfo_CrawledUrl.Merge(dst, src) +} +func (m *CrawledUrl) XXX_Size() int { + return xxx_messageInfo_CrawledUrl.Size(m) +} +func (m *CrawledUrl) XXX_DiscardUnknown() { + xxx_messageInfo_CrawledUrl.DiscardUnknown(m) +} + +var xxx_messageInfo_CrawledUrl proto.InternalMessageInfo + +func (m *CrawledUrl) GetHttpMethod() string { + if m != nil { + return m.HttpMethod + } + return "" +} + +func (m *CrawledUrl) GetUrl() string { + if m != nil { + return m.Url + } + return "" +} + +func (m *CrawledUrl) GetBody() string { + if m != nil { + return m.Body + } + return "" +} + +func init() { + proto.RegisterType((*CrawledUrl)(nil), "google.cloud.websecurityscanner.v1alpha.CrawledUrl") +} + +func init() { + proto.RegisterFile("google/cloud/websecurityscanner/v1alpha/crawled_url.proto", fileDescriptor_crawled_url_fd7b6fb38bf10fe0) +} + +var fileDescriptor_crawled_url_fd7b6fb38bf10fe0 = []byte{ + // 235 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x8c, 0x90, 0x31, 0x4b, 0x04, 0x31, + 0x10, 0x85, 0x59, 0x4f, 0x04, 0xc7, 0x42, 0x49, 0xb5, 0x88, 0xa0, 0xd8, 0x28, 0x08, 0x09, 0x62, + 0x25, 0x76, 0x67, 0x2d, 0x1c, 0x8a, 0x85, 0x36, 0xc7, 0x6c, 0x36, 0x64, 0x17, 0xe6, 0x32, 0x61, + 0x36, 0xeb, 0x71, 0xbf, 0xc5, 0x3f, 0x2b, 0x9b, 0x2c, 0x58, 0x5c, 0xa1, 0xdd, 0xf0, 0x1e, 0xdf, + 0xcb, 0x47, 0xe0, 0xd1, 0x33, 0x7b, 0x72, 0xc6, 0x12, 0x8f, 0xad, 0xd9, 0xba, 0x66, 0x70, 0x76, + 0x94, 0x3e, 0xed, 0x06, 0x8b, 0x21, 0x38, 0x31, 0x5f, 0xf7, 0x48, 0xb1, 0x43, 0x63, 0x05, 0xb7, + 0xe4, 0xda, 0xf5, 0x28, 0xa4, 0xa3, 0x70, 0x62, 0x75, 0x53, 0x50, 0x9d, 0x51, 0xbd, 0x8f, 0xea, + 0x19, 0x3d, 0xbf, 0x98, 0xdf, 0xc0, 0xd8, 0x1b, 0x0c, 0x81, 0x13, 0xa6, 0x9e, 0xc3, 0x50, 0x66, + 0xae, 0xdf, 0x00, 0x9e, 0xcb, 0xf6, 0xbb, 0x90, 0xba, 0x84, 0x93, 0x2e, 0xa5, 0xb8, 0xde, 0xb8, + 0xd4, 0x71, 0x5b, 0x57, 0x57, 0xd5, 0xed, 0xf1, 0x2b, 0x4c, 0xd1, 0x4b, 0x4e, 0xd4, 0x19, 0x2c, + 0x46, 0xa1, 0xfa, 0x20, 0x17, 0xd3, 0xa9, 0x14, 0x1c, 0x36, 0xdc, 0xee, 0xea, 0x45, 0x8e, 0xf2, + 0xbd, 0xfc, 0xae, 0xe0, 0xce, 0xf2, 0x46, 0xff, 0x53, 0x71, 0x79, 0xfa, 0xab, 0xb0, 0x9a, 0xac, + 0x56, 0xd5, 0xe7, 0xc7, 0xcc, 0x7a, 0x26, 0x0c, 0x5e, 0xb3, 0x78, 0xe3, 0x5d, 0xc8, 0xce, 0xa6, + 0x54, 0x18, 0xfb, 0xe1, 0xcf, 0x8f, 0x7b, 0xda, 0xaf, 0x9a, 0xa3, 0xbc, 0xf2, 0xf0, 0x13, 0x00, + 0x00, 0xff, 0xff, 0x7b, 0x55, 0xd9, 0x73, 0x7d, 0x01, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/cloud/websecurityscanner/v1alpha/finding.pb.go b/vendor/google.golang.org/genproto/googleapis/cloud/websecurityscanner/v1alpha/finding.pb.go new file mode 100644 index 0000000..74594eb --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/cloud/websecurityscanner/v1alpha/finding.pb.go @@ -0,0 +1,322 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/cloud/websecurityscanner/v1alpha/finding.proto + +package websecurityscanner // import "google.golang.org/genproto/googleapis/cloud/websecurityscanner/v1alpha" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Types of Findings. +type Finding_FindingType int32 + +const ( + // The invalid finding type. + Finding_FINDING_TYPE_UNSPECIFIED Finding_FindingType = 0 + // A page that was served over HTTPS also resources over HTTP. A + // man-in-the-middle attacker could tamper with the HTTP resource and gain + // full access to the website that loads the resource or to monitor the + // actions taken by the user. + Finding_MIXED_CONTENT Finding_FindingType = 1 + // The version of an included library is known to contain a security issue. + // The scanner checks the version of library in use against a known list of + // vulnerable libraries. False positives are possible if the version + // detection fails or if the library has been manually patched. + Finding_OUTDATED_LIBRARY Finding_FindingType = 2 + // This type of vulnerability occurs when the value of a request parameter + // is reflected at the beginning of the response, for example, in requests + // using JSONP. Under certain circumstances, an attacker may be able to + // supply an alphanumeric-only Flash file in the vulnerable parameter + // causing the browser to execute the Flash file as if it originated on the + // vulnerable server. + Finding_ROSETTA_FLASH Finding_FindingType = 5 + // A cross-site scripting (XSS) bug is found via JavaScript callback. For + // detailed explanations on XSS, see + // https://www.google.com/about/appsecurity/learning/xss/. + Finding_XSS_CALLBACK Finding_FindingType = 3 + // A potential cross-site scripting (XSS) bug due to JavaScript breakage. + // In some circumstances, the application under test might modify the test + // string before it is parsed by the browser. When the browser attempts to + // runs this modified test string, it will likely break and throw a + // JavaScript execution error, thus an injection issue is occurring. + // However, it may not be exploitable. Manual verification is needed to see + // if the test string modifications can be evaded and confirm that the issue + // is in fact an XSS vulnerability. For detailed explanations on XSS, see + // https://www.google.com/about/appsecurity/learning/xss/. + Finding_XSS_ERROR Finding_FindingType = 4 + // An application appears to be transmitting a password field in clear text. + // An attacker can eavesdrop network traffic and sniff the password field. + Finding_CLEAR_TEXT_PASSWORD Finding_FindingType = 6 +) + +var Finding_FindingType_name = map[int32]string{ + 0: "FINDING_TYPE_UNSPECIFIED", + 1: "MIXED_CONTENT", + 2: "OUTDATED_LIBRARY", + 5: "ROSETTA_FLASH", + 3: "XSS_CALLBACK", + 4: "XSS_ERROR", + 6: "CLEAR_TEXT_PASSWORD", +} +var Finding_FindingType_value = map[string]int32{ + "FINDING_TYPE_UNSPECIFIED": 0, + "MIXED_CONTENT": 1, + "OUTDATED_LIBRARY": 2, + "ROSETTA_FLASH": 5, + "XSS_CALLBACK": 3, + "XSS_ERROR": 4, + "CLEAR_TEXT_PASSWORD": 6, +} + +func (x Finding_FindingType) String() string { + return proto.EnumName(Finding_FindingType_name, int32(x)) +} +func (Finding_FindingType) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_finding_0d846c4fd25e6d9a, []int{0, 0} +} + +// A Finding resource represents a vulnerability instance identified during a +// ScanRun. +type Finding struct { + // Output only. + // The resource name of the Finding. The name follows the format of + // 'projects/{projectId}/scanConfigs/{scanConfigId}/scanruns/{scanRunId}/findings/{findingId}'. + // The finding IDs are generated by the system. + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // Output only. + // The type of the Finding. + FindingType Finding_FindingType `protobuf:"varint,2,opt,name=finding_type,json=findingType,proto3,enum=google.cloud.websecurityscanner.v1alpha.Finding_FindingType" json:"finding_type,omitempty"` + // Output only. + // The http method of the request that triggered the vulnerability, in + // uppercase. + HttpMethod string `protobuf:"bytes,3,opt,name=http_method,json=httpMethod,proto3" json:"http_method,omitempty"` + // Output only. + // The URL produced by the server-side fuzzer and used in the request that + // triggered the vulnerability. + FuzzedUrl string `protobuf:"bytes,4,opt,name=fuzzed_url,json=fuzzedUrl,proto3" json:"fuzzed_url,omitempty"` + // Output only. + // The body of the request that triggered the vulnerability. + Body string `protobuf:"bytes,5,opt,name=body,proto3" json:"body,omitempty"` + // Output only. + // The description of the vulnerability. + Description string `protobuf:"bytes,6,opt,name=description,proto3" json:"description,omitempty"` + // Output only. + // The URL containing human-readable payload that user can leverage to + // reproduce the vulnerability. + ReproductionUrl string `protobuf:"bytes,7,opt,name=reproduction_url,json=reproductionUrl,proto3" json:"reproduction_url,omitempty"` + // Output only. + // If the vulnerability was originated from nested IFrame, the immediate + // parent IFrame is reported. + FrameUrl string `protobuf:"bytes,8,opt,name=frame_url,json=frameUrl,proto3" json:"frame_url,omitempty"` + // Output only. + // The URL where the browser lands when the vulnerability is detected. + FinalUrl string `protobuf:"bytes,9,opt,name=final_url,json=finalUrl,proto3" json:"final_url,omitempty"` + // Output only. + // The tracking ID uniquely identifies a vulnerability instance across + // multiple ScanRuns. + TrackingId string `protobuf:"bytes,10,opt,name=tracking_id,json=trackingId,proto3" json:"tracking_id,omitempty"` + // Output only. + // An addon containing information about outdated libraries. + OutdatedLibrary *OutdatedLibrary `protobuf:"bytes,11,opt,name=outdated_library,json=outdatedLibrary,proto3" json:"outdated_library,omitempty"` + // Output only. + // An addon containing detailed information regarding any resource causing the + // vulnerability such as JavaScript sources, image, audio files, etc. + ViolatingResource *ViolatingResource `protobuf:"bytes,12,opt,name=violating_resource,json=violatingResource,proto3" json:"violating_resource,omitempty"` + // Output only. + // An addon containing information about request parameters which were found + // to be vulnerable. + VulnerableParameters *VulnerableParameters `protobuf:"bytes,13,opt,name=vulnerable_parameters,json=vulnerableParameters,proto3" json:"vulnerable_parameters,omitempty"` + // Output only. + // An addon containing information reported for an XSS, if any. + Xss *Xss `protobuf:"bytes,14,opt,name=xss,proto3" json:"xss,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Finding) Reset() { *m = Finding{} } +func (m *Finding) String() string { return proto.CompactTextString(m) } +func (*Finding) ProtoMessage() {} +func (*Finding) Descriptor() ([]byte, []int) { + return fileDescriptor_finding_0d846c4fd25e6d9a, []int{0} +} +func (m *Finding) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Finding.Unmarshal(m, b) +} +func (m *Finding) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Finding.Marshal(b, m, deterministic) +} +func (dst *Finding) XXX_Merge(src proto.Message) { + xxx_messageInfo_Finding.Merge(dst, src) +} +func (m *Finding) XXX_Size() int { + return xxx_messageInfo_Finding.Size(m) +} +func (m *Finding) XXX_DiscardUnknown() { + xxx_messageInfo_Finding.DiscardUnknown(m) +} + +var xxx_messageInfo_Finding proto.InternalMessageInfo + +func (m *Finding) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *Finding) GetFindingType() Finding_FindingType { + if m != nil { + return m.FindingType + } + return Finding_FINDING_TYPE_UNSPECIFIED +} + +func (m *Finding) GetHttpMethod() string { + if m != nil { + return m.HttpMethod + } + return "" +} + +func (m *Finding) GetFuzzedUrl() string { + if m != nil { + return m.FuzzedUrl + } + return "" +} + +func (m *Finding) GetBody() string { + if m != nil { + return m.Body + } + return "" +} + +func (m *Finding) GetDescription() string { + if m != nil { + return m.Description + } + return "" +} + +func (m *Finding) GetReproductionUrl() string { + if m != nil { + return m.ReproductionUrl + } + return "" +} + +func (m *Finding) GetFrameUrl() string { + if m != nil { + return m.FrameUrl + } + return "" +} + +func (m *Finding) GetFinalUrl() string { + if m != nil { + return m.FinalUrl + } + return "" +} + +func (m *Finding) GetTrackingId() string { + if m != nil { + return m.TrackingId + } + return "" +} + +func (m *Finding) GetOutdatedLibrary() *OutdatedLibrary { + if m != nil { + return m.OutdatedLibrary + } + return nil +} + +func (m *Finding) GetViolatingResource() *ViolatingResource { + if m != nil { + return m.ViolatingResource + } + return nil +} + +func (m *Finding) GetVulnerableParameters() *VulnerableParameters { + if m != nil { + return m.VulnerableParameters + } + return nil +} + +func (m *Finding) GetXss() *Xss { + if m != nil { + return m.Xss + } + return nil +} + +func init() { + proto.RegisterType((*Finding)(nil), "google.cloud.websecurityscanner.v1alpha.Finding") + proto.RegisterEnum("google.cloud.websecurityscanner.v1alpha.Finding_FindingType", Finding_FindingType_name, Finding_FindingType_value) +} + +func init() { + proto.RegisterFile("google/cloud/websecurityscanner/v1alpha/finding.proto", fileDescriptor_finding_0d846c4fd25e6d9a) +} + +var fileDescriptor_finding_0d846c4fd25e6d9a = []byte{ + // 631 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x94, 0x54, 0x5f, 0x6f, 0xd3, 0x3e, + 0x14, 0xfd, 0x65, 0xff, 0xeb, 0x74, 0x5b, 0xe6, 0xdf, 0x10, 0xd1, 0x18, 0x5a, 0xb5, 0x17, 0x86, + 0x40, 0x89, 0x18, 0x42, 0x42, 0x0c, 0x90, 0xd2, 0x36, 0x85, 0x88, 0xae, 0xad, 0x9c, 0x0c, 0x3a, + 0x5e, 0x2c, 0x37, 0x71, 0xb3, 0x88, 0x34, 0x8e, 0x9c, 0xa4, 0xd0, 0x7d, 0x12, 0x5e, 0xe1, 0x93, + 0x22, 0x3b, 0xe9, 0x28, 0x0c, 0x89, 0xf2, 0x54, 0xfb, 0x9c, 0x7b, 0xce, 0xb9, 0xf7, 0xaa, 0x31, + 0x78, 0x16, 0x32, 0x16, 0xc6, 0xd4, 0xf4, 0x63, 0x56, 0x04, 0xe6, 0x67, 0x3a, 0xca, 0xa8, 0x5f, + 0xf0, 0x28, 0x9f, 0x65, 0x3e, 0x49, 0x12, 0xca, 0xcd, 0xe9, 0x13, 0x12, 0xa7, 0x57, 0xc4, 0x1c, + 0x47, 0x49, 0x10, 0x25, 0xa1, 0x91, 0x72, 0x96, 0x33, 0xf8, 0xa0, 0x94, 0x19, 0x52, 0x66, 0xdc, + 0x96, 0x19, 0x95, 0xec, 0xe0, 0xb0, 0xf2, 0x27, 0x69, 0x64, 0x92, 0x24, 0x61, 0x39, 0xc9, 0x23, + 0x96, 0x64, 0xa5, 0xcd, 0xc1, 0xd9, 0x3f, 0xa6, 0x63, 0x12, 0x04, 0x2c, 0x29, 0xc5, 0xc7, 0xdf, + 0x37, 0xc1, 0x66, 0xa7, 0xc4, 0x21, 0x04, 0x6b, 0x09, 0x99, 0x50, 0x5d, 0x69, 0x28, 0x27, 0x35, + 0x24, 0xcf, 0x10, 0x83, 0xfa, 0x5c, 0x96, 0xcf, 0x52, 0xaa, 0xaf, 0x34, 0x94, 0x93, 0x9d, 0xd3, + 0x97, 0xc6, 0x92, 0xad, 0x1b, 0x95, 0xf7, 0xfc, 0xd7, 0x9b, 0xa5, 0x14, 0xa9, 0xe3, 0x9f, 0x17, + 0x78, 0x04, 0xd4, 0xab, 0x3c, 0x4f, 0xf1, 0x84, 0xe6, 0x57, 0x2c, 0xd0, 0x57, 0x65, 0x36, 0x10, + 0xd0, 0xb9, 0x44, 0xe0, 0x7d, 0x00, 0xc6, 0xc5, 0xf5, 0x35, 0x0d, 0x70, 0xc1, 0x63, 0x7d, 0x4d, + 0xf2, 0xb5, 0x12, 0xb9, 0xe0, 0xb1, 0x68, 0x7a, 0xc4, 0x82, 0x99, 0xbe, 0x5e, 0x36, 0x2d, 0xce, + 0xb0, 0x01, 0xd4, 0x80, 0x66, 0x3e, 0x8f, 0x52, 0xb1, 0x27, 0x7d, 0x43, 0x52, 0x8b, 0x10, 0x7c, + 0x08, 0x34, 0x4e, 0x53, 0xce, 0x82, 0xc2, 0x17, 0x77, 0x69, 0xbd, 0x29, 0xcb, 0x76, 0x17, 0x71, + 0x11, 0x70, 0x0f, 0xd4, 0xc6, 0x9c, 0x4c, 0xa8, 0xac, 0xd9, 0x92, 0x35, 0x5b, 0x12, 0x98, 0x93, + 0x51, 0x42, 0x62, 0x49, 0xd6, 0x2a, 0x52, 0x00, 0x82, 0x3c, 0x02, 0x6a, 0xce, 0x89, 0xff, 0x49, + 0x2c, 0x2f, 0x0a, 0x74, 0x50, 0x8e, 0x36, 0x87, 0x9c, 0x00, 0xfa, 0x40, 0x63, 0x45, 0x1e, 0x90, + 0x9c, 0x06, 0x38, 0x8e, 0x46, 0x9c, 0xf0, 0x99, 0xae, 0x36, 0x94, 0x13, 0xf5, 0xf4, 0xf9, 0xd2, + 0x0b, 0xee, 0x57, 0x06, 0xdd, 0x52, 0x8f, 0x76, 0xd9, 0xaf, 0x00, 0x8c, 0x00, 0x9c, 0x46, 0x2c, + 0x26, 0xb9, 0x68, 0x83, 0xd3, 0x8c, 0x15, 0xdc, 0xa7, 0x7a, 0x5d, 0xc6, 0xbc, 0x58, 0x3a, 0xe6, + 0xfd, 0xdc, 0x02, 0x55, 0x0e, 0x68, 0x6f, 0xfa, 0x3b, 0x04, 0x39, 0xb8, 0x33, 0x2d, 0xe2, 0x84, + 0x72, 0x32, 0x8a, 0x29, 0x4e, 0x89, 0xd8, 0x52, 0x4e, 0x79, 0xa6, 0x6f, 0xcb, 0xb4, 0x57, 0xcb, + 0xa7, 0xdd, 0xb8, 0x0c, 0x6e, 0x4c, 0xd0, 0xfe, 0xf4, 0x0f, 0x28, 0x7c, 0x0d, 0x56, 0xbf, 0x64, + 0x99, 0xbe, 0x23, 0x13, 0x1e, 0x2f, 0x9d, 0x30, 0xcc, 0x32, 0x24, 0x84, 0xc7, 0xdf, 0x14, 0xa0, + 0x2e, 0xfc, 0x39, 0xe1, 0x21, 0xd0, 0x3b, 0x4e, 0xaf, 0xed, 0xf4, 0xde, 0x60, 0xef, 0x72, 0x60, + 0xe3, 0x8b, 0x9e, 0x3b, 0xb0, 0x5b, 0x4e, 0xc7, 0xb1, 0xdb, 0xda, 0x7f, 0x70, 0x0f, 0x6c, 0x9f, + 0x3b, 0x43, 0xbb, 0x8d, 0x5b, 0xfd, 0x9e, 0x67, 0xf7, 0x3c, 0x4d, 0x81, 0xfb, 0x40, 0xeb, 0x5f, + 0x78, 0x6d, 0xcb, 0xb3, 0xdb, 0xb8, 0xeb, 0x34, 0x91, 0x85, 0x2e, 0xb5, 0x15, 0x51, 0x88, 0xfa, + 0xae, 0xed, 0x79, 0x16, 0xee, 0x74, 0x2d, 0xf7, 0xad, 0xb6, 0x0e, 0x35, 0x50, 0x1f, 0xba, 0x2e, + 0x6e, 0x59, 0xdd, 0x6e, 0xd3, 0x6a, 0xbd, 0xd3, 0x56, 0xe1, 0x36, 0xa8, 0x09, 0xc4, 0x46, 0xa8, + 0x8f, 0xb4, 0x35, 0x78, 0x17, 0xfc, 0xdf, 0xea, 0xda, 0x16, 0xc2, 0x9e, 0x3d, 0xf4, 0xf0, 0xc0, + 0x72, 0xdd, 0x0f, 0x7d, 0xd4, 0xd6, 0x36, 0x9a, 0x5f, 0x15, 0xf0, 0xc8, 0x67, 0x93, 0x65, 0x87, + 0x6b, 0xd6, 0xab, 0x81, 0x06, 0xe2, 0x13, 0x1f, 0x28, 0x1f, 0x2f, 0x2b, 0x61, 0xc8, 0x62, 0x92, + 0x84, 0x06, 0xe3, 0xa1, 0x19, 0xd2, 0x44, 0x3e, 0x00, 0x66, 0x49, 0x91, 0x34, 0xca, 0xfe, 0xfa, + 0x80, 0x9c, 0xdd, 0xa6, 0x46, 0x1b, 0xd2, 0xe5, 0xe9, 0x8f, 0x00, 0x00, 0x00, 0xff, 0xff, 0x8a, + 0x7d, 0x4d, 0xc7, 0x03, 0x05, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/cloud/websecurityscanner/v1alpha/finding_addon.pb.go b/vendor/google.golang.org/genproto/googleapis/cloud/websecurityscanner/v1alpha/finding_addon.pb.go new file mode 100644 index 0000000..e6b26df --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/cloud/websecurityscanner/v1alpha/finding_addon.pb.go @@ -0,0 +1,257 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/cloud/websecurityscanner/v1alpha/finding_addon.proto + +package websecurityscanner // import "google.golang.org/genproto/googleapis/cloud/websecurityscanner/v1alpha" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Information reported for an outdated library. +type OutdatedLibrary struct { + // The name of the outdated library. + LibraryName string `protobuf:"bytes,1,opt,name=library_name,json=libraryName,proto3" json:"library_name,omitempty"` + // The version number. + Version string `protobuf:"bytes,2,opt,name=version,proto3" json:"version,omitempty"` + // URLs to learn more information about the vulnerabilities in the library. + LearnMoreUrls []string `protobuf:"bytes,3,rep,name=learn_more_urls,json=learnMoreUrls,proto3" json:"learn_more_urls,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *OutdatedLibrary) Reset() { *m = OutdatedLibrary{} } +func (m *OutdatedLibrary) String() string { return proto.CompactTextString(m) } +func (*OutdatedLibrary) ProtoMessage() {} +func (*OutdatedLibrary) Descriptor() ([]byte, []int) { + return fileDescriptor_finding_addon_0a47c25fab663451, []int{0} +} +func (m *OutdatedLibrary) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_OutdatedLibrary.Unmarshal(m, b) +} +func (m *OutdatedLibrary) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_OutdatedLibrary.Marshal(b, m, deterministic) +} +func (dst *OutdatedLibrary) XXX_Merge(src proto.Message) { + xxx_messageInfo_OutdatedLibrary.Merge(dst, src) +} +func (m *OutdatedLibrary) XXX_Size() int { + return xxx_messageInfo_OutdatedLibrary.Size(m) +} +func (m *OutdatedLibrary) XXX_DiscardUnknown() { + xxx_messageInfo_OutdatedLibrary.DiscardUnknown(m) +} + +var xxx_messageInfo_OutdatedLibrary proto.InternalMessageInfo + +func (m *OutdatedLibrary) GetLibraryName() string { + if m != nil { + return m.LibraryName + } + return "" +} + +func (m *OutdatedLibrary) GetVersion() string { + if m != nil { + return m.Version + } + return "" +} + +func (m *OutdatedLibrary) GetLearnMoreUrls() []string { + if m != nil { + return m.LearnMoreUrls + } + return nil +} + +// Information regarding any resource causing the vulnerability such +// as JavaScript sources, image, audio files, etc. +type ViolatingResource struct { + // The MIME type of this resource. + ContentType string `protobuf:"bytes,1,opt,name=content_type,json=contentType,proto3" json:"content_type,omitempty"` + // URL of this violating resource. + ResourceUrl string `protobuf:"bytes,2,opt,name=resource_url,json=resourceUrl,proto3" json:"resource_url,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ViolatingResource) Reset() { *m = ViolatingResource{} } +func (m *ViolatingResource) String() string { return proto.CompactTextString(m) } +func (*ViolatingResource) ProtoMessage() {} +func (*ViolatingResource) Descriptor() ([]byte, []int) { + return fileDescriptor_finding_addon_0a47c25fab663451, []int{1} +} +func (m *ViolatingResource) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ViolatingResource.Unmarshal(m, b) +} +func (m *ViolatingResource) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ViolatingResource.Marshal(b, m, deterministic) +} +func (dst *ViolatingResource) XXX_Merge(src proto.Message) { + xxx_messageInfo_ViolatingResource.Merge(dst, src) +} +func (m *ViolatingResource) XXX_Size() int { + return xxx_messageInfo_ViolatingResource.Size(m) +} +func (m *ViolatingResource) XXX_DiscardUnknown() { + xxx_messageInfo_ViolatingResource.DiscardUnknown(m) +} + +var xxx_messageInfo_ViolatingResource proto.InternalMessageInfo + +func (m *ViolatingResource) GetContentType() string { + if m != nil { + return m.ContentType + } + return "" +} + +func (m *ViolatingResource) GetResourceUrl() string { + if m != nil { + return m.ResourceUrl + } + return "" +} + +// Information about vulnerable request parameters. +type VulnerableParameters struct { + // The vulnerable parameter names. + ParameterNames []string `protobuf:"bytes,1,rep,name=parameter_names,json=parameterNames,proto3" json:"parameter_names,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *VulnerableParameters) Reset() { *m = VulnerableParameters{} } +func (m *VulnerableParameters) String() string { return proto.CompactTextString(m) } +func (*VulnerableParameters) ProtoMessage() {} +func (*VulnerableParameters) Descriptor() ([]byte, []int) { + return fileDescriptor_finding_addon_0a47c25fab663451, []int{2} +} +func (m *VulnerableParameters) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_VulnerableParameters.Unmarshal(m, b) +} +func (m *VulnerableParameters) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_VulnerableParameters.Marshal(b, m, deterministic) +} +func (dst *VulnerableParameters) XXX_Merge(src proto.Message) { + xxx_messageInfo_VulnerableParameters.Merge(dst, src) +} +func (m *VulnerableParameters) XXX_Size() int { + return xxx_messageInfo_VulnerableParameters.Size(m) +} +func (m *VulnerableParameters) XXX_DiscardUnknown() { + xxx_messageInfo_VulnerableParameters.DiscardUnknown(m) +} + +var xxx_messageInfo_VulnerableParameters proto.InternalMessageInfo + +func (m *VulnerableParameters) GetParameterNames() []string { + if m != nil { + return m.ParameterNames + } + return nil +} + +// Information reported for an XSS. +type Xss struct { + // Stack traces leading to the point where the XSS occurred. + StackTraces []string `protobuf:"bytes,1,rep,name=stack_traces,json=stackTraces,proto3" json:"stack_traces,omitempty"` + // An error message generated by a javascript breakage. + ErrorMessage string `protobuf:"bytes,2,opt,name=error_message,json=errorMessage,proto3" json:"error_message,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Xss) Reset() { *m = Xss{} } +func (m *Xss) String() string { return proto.CompactTextString(m) } +func (*Xss) ProtoMessage() {} +func (*Xss) Descriptor() ([]byte, []int) { + return fileDescriptor_finding_addon_0a47c25fab663451, []int{3} +} +func (m *Xss) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Xss.Unmarshal(m, b) +} +func (m *Xss) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Xss.Marshal(b, m, deterministic) +} +func (dst *Xss) XXX_Merge(src proto.Message) { + xxx_messageInfo_Xss.Merge(dst, src) +} +func (m *Xss) XXX_Size() int { + return xxx_messageInfo_Xss.Size(m) +} +func (m *Xss) XXX_DiscardUnknown() { + xxx_messageInfo_Xss.DiscardUnknown(m) +} + +var xxx_messageInfo_Xss proto.InternalMessageInfo + +func (m *Xss) GetStackTraces() []string { + if m != nil { + return m.StackTraces + } + return nil +} + +func (m *Xss) GetErrorMessage() string { + if m != nil { + return m.ErrorMessage + } + return "" +} + +func init() { + proto.RegisterType((*OutdatedLibrary)(nil), "google.cloud.websecurityscanner.v1alpha.OutdatedLibrary") + proto.RegisterType((*ViolatingResource)(nil), "google.cloud.websecurityscanner.v1alpha.ViolatingResource") + proto.RegisterType((*VulnerableParameters)(nil), "google.cloud.websecurityscanner.v1alpha.VulnerableParameters") + proto.RegisterType((*Xss)(nil), "google.cloud.websecurityscanner.v1alpha.Xss") +} + +func init() { + proto.RegisterFile("google/cloud/websecurityscanner/v1alpha/finding_addon.proto", fileDescriptor_finding_addon_0a47c25fab663451) +} + +var fileDescriptor_finding_addon_0a47c25fab663451 = []byte{ + // 388 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x8c, 0x92, 0xc1, 0x6b, 0xd4, 0x50, + 0x10, 0xc6, 0x89, 0x0b, 0x4a, 0xdf, 0xb6, 0x2e, 0x0d, 0x1e, 0x82, 0x78, 0xa8, 0x2b, 0xd8, 0x82, + 0x90, 0x20, 0x1e, 0x7b, 0x10, 0x7b, 0xf0, 0xe4, 0xea, 0xb2, 0xb4, 0xc5, 0x7a, 0x09, 0xb3, 0xc9, + 0xf8, 0x7c, 0xf8, 0x32, 0x13, 0x66, 0x5e, 0x56, 0xf2, 0xc7, 0xf8, 0xbf, 0x4a, 0x5e, 0xb2, 0x7b, + 0xd9, 0x83, 0xbd, 0xe5, 0xfd, 0x66, 0xbe, 0xf9, 0x66, 0xf8, 0x62, 0xae, 0x2d, 0xb3, 0xf5, 0x58, + 0x54, 0x9e, 0xbb, 0xba, 0xf8, 0x83, 0x5b, 0xc5, 0xaa, 0x13, 0x17, 0x7a, 0xad, 0x80, 0x08, 0xa5, + 0xd8, 0xbd, 0x07, 0xdf, 0xfe, 0x82, 0xe2, 0xa7, 0xa3, 0xda, 0x91, 0x2d, 0xa1, 0xae, 0x99, 0xf2, + 0x56, 0x38, 0x70, 0x7a, 0x39, 0x8a, 0xf3, 0x28, 0xce, 0x8f, 0xc5, 0xf9, 0x24, 0x7e, 0xf9, 0x6a, + 0x72, 0x81, 0xd6, 0x15, 0x40, 0xc4, 0x01, 0x82, 0x63, 0xd2, 0x71, 0xcc, 0x72, 0x67, 0x16, 0xdf, + 0xba, 0x50, 0x43, 0xc0, 0xfa, 0x8b, 0xdb, 0x0a, 0x48, 0x9f, 0xbe, 0x36, 0xa7, 0x7e, 0xfc, 0x2c, + 0x09, 0x1a, 0xcc, 0x92, 0x8b, 0xe4, 0xea, 0x64, 0x33, 0x9f, 0xd8, 0x57, 0x68, 0x30, 0xcd, 0xcc, + 0xb3, 0x1d, 0x8a, 0x3a, 0xa6, 0xec, 0x49, 0xac, 0xee, 0x9f, 0xe9, 0x5b, 0xb3, 0xf0, 0x08, 0x42, + 0x65, 0xc3, 0x82, 0x65, 0x27, 0x5e, 0xb3, 0xd9, 0xc5, 0xec, 0xea, 0x64, 0x73, 0x16, 0xf1, 0x8a, + 0x05, 0xef, 0xc4, 0xeb, 0xf2, 0xc1, 0x9c, 0xdf, 0x3b, 0xf6, 0x10, 0x1c, 0xd9, 0x0d, 0x2a, 0x77, + 0x52, 0xe1, 0xe0, 0x5c, 0x31, 0x05, 0xa4, 0x50, 0x86, 0xbe, 0x3d, 0x38, 0x4f, 0xec, 0xb6, 0x6f, + 0x63, 0x8b, 0x4c, 0xed, 0xc3, 0xf4, 0xc9, 0x7e, 0xbe, 0x67, 0x77, 0xe2, 0x97, 0x1f, 0xcd, 0x8b, + 0xfb, 0xce, 0x13, 0x0a, 0x6c, 0x3d, 0xae, 0x41, 0xa0, 0xc1, 0x80, 0xa2, 0xe9, 0xa5, 0x59, 0xb4, + 0xfb, 0x57, 0xbc, 0x4c, 0xb3, 0x24, 0xae, 0xf6, 0xfc, 0x80, 0x87, 0xe3, 0x74, 0xb9, 0x32, 0xb3, + 0xef, 0xaa, 0x83, 0x95, 0x06, 0xa8, 0x7e, 0x97, 0x41, 0xa0, 0x3a, 0x34, 0xcf, 0x23, 0xbb, 0x8d, + 0x28, 0x7d, 0x63, 0xce, 0x50, 0x84, 0xa5, 0x6c, 0x50, 0x15, 0x2c, 0x4e, 0xeb, 0x9c, 0x46, 0xb8, + 0x1a, 0xd9, 0xcd, 0xdf, 0xc4, 0xbc, 0xab, 0xb8, 0xc9, 0x1f, 0x19, 0xd8, 0xcd, 0xf9, 0xe7, 0x31, + 0xee, 0x4f, 0x43, 0xda, 0xeb, 0x21, 0xa5, 0x75, 0xf2, 0xe3, 0x61, 0x52, 0x5b, 0xf6, 0x40, 0x36, + 0x67, 0xb1, 0x85, 0x45, 0x8a, 0x19, 0x16, 0x63, 0x09, 0x5a, 0xa7, 0xff, 0xfd, 0x95, 0xae, 0x8f, + 0x4b, 0xdb, 0xa7, 0x71, 0xca, 0x87, 0x7f, 0x01, 0x00, 0x00, 0xff, 0xff, 0xb6, 0x61, 0x84, 0x84, + 0x8f, 0x02, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/cloud/websecurityscanner/v1alpha/finding_type_stats.pb.go b/vendor/google.golang.org/genproto/googleapis/cloud/websecurityscanner/v1alpha/finding_type_stats.pb.go new file mode 100644 index 0000000..16a57b8 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/cloud/websecurityscanner/v1alpha/finding_type_stats.pb.go @@ -0,0 +1,100 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/cloud/websecurityscanner/v1alpha/finding_type_stats.proto + +package websecurityscanner // import "google.golang.org/genproto/googleapis/cloud/websecurityscanner/v1alpha" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// A FindingTypeStats resource represents stats regarding a specific FindingType +// of Findings under a given ScanRun. +type FindingTypeStats struct { + // Output only. + // The finding type associated with the stats. + FindingType Finding_FindingType `protobuf:"varint,1,opt,name=finding_type,json=findingType,proto3,enum=google.cloud.websecurityscanner.v1alpha.Finding_FindingType" json:"finding_type,omitempty"` + // Output only. + // The count of findings belonging to this finding type. + FindingCount int32 `protobuf:"varint,2,opt,name=finding_count,json=findingCount,proto3" json:"finding_count,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *FindingTypeStats) Reset() { *m = FindingTypeStats{} } +func (m *FindingTypeStats) String() string { return proto.CompactTextString(m) } +func (*FindingTypeStats) ProtoMessage() {} +func (*FindingTypeStats) Descriptor() ([]byte, []int) { + return fileDescriptor_finding_type_stats_93e51e03522ec716, []int{0} +} +func (m *FindingTypeStats) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_FindingTypeStats.Unmarshal(m, b) +} +func (m *FindingTypeStats) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_FindingTypeStats.Marshal(b, m, deterministic) +} +func (dst *FindingTypeStats) XXX_Merge(src proto.Message) { + xxx_messageInfo_FindingTypeStats.Merge(dst, src) +} +func (m *FindingTypeStats) XXX_Size() int { + return xxx_messageInfo_FindingTypeStats.Size(m) +} +func (m *FindingTypeStats) XXX_DiscardUnknown() { + xxx_messageInfo_FindingTypeStats.DiscardUnknown(m) +} + +var xxx_messageInfo_FindingTypeStats proto.InternalMessageInfo + +func (m *FindingTypeStats) GetFindingType() Finding_FindingType { + if m != nil { + return m.FindingType + } + return Finding_FINDING_TYPE_UNSPECIFIED +} + +func (m *FindingTypeStats) GetFindingCount() int32 { + if m != nil { + return m.FindingCount + } + return 0 +} + +func init() { + proto.RegisterType((*FindingTypeStats)(nil), "google.cloud.websecurityscanner.v1alpha.FindingTypeStats") +} + +func init() { + proto.RegisterFile("google/cloud/websecurityscanner/v1alpha/finding_type_stats.proto", fileDescriptor_finding_type_stats_93e51e03522ec716) +} + +var fileDescriptor_finding_type_stats_93e51e03522ec716 = []byte{ + // 256 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xe2, 0x72, 0x48, 0xcf, 0xcf, 0x4f, + 0xcf, 0x49, 0xd5, 0x4f, 0xce, 0xc9, 0x2f, 0x4d, 0xd1, 0x2f, 0x4f, 0x4d, 0x2a, 0x4e, 0x4d, 0x2e, + 0x2d, 0xca, 0x2c, 0xa9, 0x2c, 0x4e, 0x4e, 0xcc, 0xcb, 0x4b, 0x2d, 0xd2, 0x2f, 0x33, 0x4c, 0xcc, + 0x29, 0xc8, 0x48, 0xd4, 0x4f, 0xcb, 0xcc, 0x4b, 0xc9, 0xcc, 0x4b, 0x8f, 0x2f, 0xa9, 0x2c, 0x48, + 0x8d, 0x2f, 0x2e, 0x49, 0x2c, 0x29, 0xd6, 0x2b, 0x28, 0xca, 0x2f, 0xc9, 0x17, 0x52, 0x87, 0x98, + 0xa0, 0x07, 0x36, 0x41, 0x0f, 0xd3, 0x04, 0x3d, 0xa8, 0x09, 0x52, 0x32, 0x50, 0xab, 0x12, 0x0b, + 0x32, 0xf5, 0x13, 0xf3, 0xf2, 0xf2, 0x4b, 0x12, 0x4b, 0x32, 0xf3, 0xf3, 0xa0, 0xc6, 0x48, 0x99, + 0x92, 0xe8, 0x10, 0x88, 0x36, 0xa5, 0x19, 0x8c, 0x5c, 0x02, 0x6e, 0x10, 0x91, 0x90, 0xca, 0x82, + 0xd4, 0x60, 0x90, 0xc3, 0x84, 0xe2, 0xb9, 0x78, 0x90, 0x9d, 0x2b, 0xc1, 0xa8, 0xc0, 0xa8, 0xc1, + 0x67, 0x64, 0xa3, 0x47, 0xa4, 0x4b, 0xf5, 0xa0, 0x06, 0xea, 0x21, 0x19, 0x1c, 0xc4, 0x9d, 0x86, + 0xe0, 0x08, 0x29, 0x73, 0xf1, 0xc2, 0x2c, 0x48, 0xce, 0x2f, 0xcd, 0x2b, 0x91, 0x60, 0x52, 0x60, + 0xd4, 0x60, 0x0d, 0x82, 0xd9, 0xea, 0x0c, 0x12, 0x73, 0x5a, 0xc8, 0xc8, 0xa5, 0x9d, 0x9c, 0x9f, + 0x4b, 0xac, 0xad, 0x4e, 0xa2, 0xe8, 0xfe, 0x08, 0x00, 0xf9, 0x30, 0x80, 0x31, 0x2a, 0x12, 0x6a, + 0x42, 0x7a, 0x7e, 0x4e, 0x62, 0x5e, 0xba, 0x5e, 0x7e, 0x51, 0xba, 0x7e, 0x7a, 0x6a, 0x1e, 0xd8, + 0xff, 0xfa, 0x10, 0xa9, 0xc4, 0x82, 0xcc, 0x62, 0x82, 0x21, 0x67, 0x8d, 0x29, 0x95, 0xc4, 0x06, + 0x36, 0xc5, 0x18, 0x10, 0x00, 0x00, 0xff, 0xff, 0xbd, 0xb4, 0x52, 0xba, 0x07, 0x02, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/cloud/websecurityscanner/v1alpha/scan_config.pb.go b/vendor/google.golang.org/genproto/googleapis/cloud/websecurityscanner/v1alpha/scan_config.pb.go new file mode 100644 index 0000000..602f3a8 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/cloud/websecurityscanner/v1alpha/scan_config.pb.go @@ -0,0 +1,595 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/cloud/websecurityscanner/v1alpha/scan_config.proto + +package websecurityscanner // import "google.golang.org/genproto/googleapis/cloud/websecurityscanner/v1alpha" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import timestamp "github.com/golang/protobuf/ptypes/timestamp" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Type of user agents used for scanning. +type ScanConfig_UserAgent int32 + +const ( + // The user agent is unknown. Service will default to CHROME_LINUX. + ScanConfig_USER_AGENT_UNSPECIFIED ScanConfig_UserAgent = 0 + // Chrome on Linux. This is the service default if unspecified. + ScanConfig_CHROME_LINUX ScanConfig_UserAgent = 1 + // Chrome on Android. + ScanConfig_CHROME_ANDROID ScanConfig_UserAgent = 2 + // Safari on IPhone. + ScanConfig_SAFARI_IPHONE ScanConfig_UserAgent = 3 +) + +var ScanConfig_UserAgent_name = map[int32]string{ + 0: "USER_AGENT_UNSPECIFIED", + 1: "CHROME_LINUX", + 2: "CHROME_ANDROID", + 3: "SAFARI_IPHONE", +} +var ScanConfig_UserAgent_value = map[string]int32{ + "USER_AGENT_UNSPECIFIED": 0, + "CHROME_LINUX": 1, + "CHROME_ANDROID": 2, + "SAFARI_IPHONE": 3, +} + +func (x ScanConfig_UserAgent) String() string { + return proto.EnumName(ScanConfig_UserAgent_name, int32(x)) +} +func (ScanConfig_UserAgent) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_scan_config_dda68c43e028bff3, []int{0, 0} +} + +// Cloud platforms supported by Cloud Web Security Scanner. +type ScanConfig_TargetPlatform int32 + +const ( + // The target platform is unknown. Requests with this enum value will be + // rejected with INVALID_ARGUMENT error. + ScanConfig_TARGET_PLATFORM_UNSPECIFIED ScanConfig_TargetPlatform = 0 + // Google App Engine service. + ScanConfig_APP_ENGINE ScanConfig_TargetPlatform = 1 + // Google Compute Engine service. + ScanConfig_COMPUTE ScanConfig_TargetPlatform = 2 +) + +var ScanConfig_TargetPlatform_name = map[int32]string{ + 0: "TARGET_PLATFORM_UNSPECIFIED", + 1: "APP_ENGINE", + 2: "COMPUTE", +} +var ScanConfig_TargetPlatform_value = map[string]int32{ + "TARGET_PLATFORM_UNSPECIFIED": 0, + "APP_ENGINE": 1, + "COMPUTE": 2, +} + +func (x ScanConfig_TargetPlatform) String() string { + return proto.EnumName(ScanConfig_TargetPlatform_name, int32(x)) +} +func (ScanConfig_TargetPlatform) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_scan_config_dda68c43e028bff3, []int{0, 1} +} + +// A ScanConfig resource contains the configurations to launch a scan. +type ScanConfig struct { + // The resource name of the ScanConfig. The name follows the format of + // 'projects/{projectId}/scanConfigs/{scanConfigId}'. The ScanConfig IDs are + // generated by the system. + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // Required. + // The user provided display name of the ScanConfig. + DisplayName string `protobuf:"bytes,2,opt,name=display_name,json=displayName,proto3" json:"display_name,omitempty"` + // The maximum QPS during scanning. A valid value ranges from 5 to 20 + // inclusively. If the field is unspecified or its value is set 0, server will + // default to 15. Other values outside of [5, 20] range will be rejected with + // INVALID_ARGUMENT error. + MaxQps int32 `protobuf:"varint,3,opt,name=max_qps,json=maxQps,proto3" json:"max_qps,omitempty"` + // Required. + // The starting URLs from which the scanner finds site pages. + StartingUrls []string `protobuf:"bytes,4,rep,name=starting_urls,json=startingUrls,proto3" json:"starting_urls,omitempty"` + // The authentication configuration. If specified, service will use the + // authentication configuration during scanning. + Authentication *ScanConfig_Authentication `protobuf:"bytes,5,opt,name=authentication,proto3" json:"authentication,omitempty"` + // The user agent used during scanning. + UserAgent ScanConfig_UserAgent `protobuf:"varint,6,opt,name=user_agent,json=userAgent,proto3,enum=google.cloud.websecurityscanner.v1alpha.ScanConfig_UserAgent" json:"user_agent,omitempty"` + // The blacklist URL patterns as described in + // https://cloud.google.com/security-scanner/docs/excluded-urls + BlacklistPatterns []string `protobuf:"bytes,7,rep,name=blacklist_patterns,json=blacklistPatterns,proto3" json:"blacklist_patterns,omitempty"` + // The schedule of the ScanConfig. + Schedule *ScanConfig_Schedule `protobuf:"bytes,8,opt,name=schedule,proto3" json:"schedule,omitempty"` + // Set of Cloud Platforms targeted by the scan. If empty, APP_ENGINE will be + // used as a default. + TargetPlatforms []ScanConfig_TargetPlatform `protobuf:"varint,9,rep,packed,name=target_platforms,json=targetPlatforms,proto3,enum=google.cloud.websecurityscanner.v1alpha.ScanConfig_TargetPlatform" json:"target_platforms,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ScanConfig) Reset() { *m = ScanConfig{} } +func (m *ScanConfig) String() string { return proto.CompactTextString(m) } +func (*ScanConfig) ProtoMessage() {} +func (*ScanConfig) Descriptor() ([]byte, []int) { + return fileDescriptor_scan_config_dda68c43e028bff3, []int{0} +} +func (m *ScanConfig) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ScanConfig.Unmarshal(m, b) +} +func (m *ScanConfig) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ScanConfig.Marshal(b, m, deterministic) +} +func (dst *ScanConfig) XXX_Merge(src proto.Message) { + xxx_messageInfo_ScanConfig.Merge(dst, src) +} +func (m *ScanConfig) XXX_Size() int { + return xxx_messageInfo_ScanConfig.Size(m) +} +func (m *ScanConfig) XXX_DiscardUnknown() { + xxx_messageInfo_ScanConfig.DiscardUnknown(m) +} + +var xxx_messageInfo_ScanConfig proto.InternalMessageInfo + +func (m *ScanConfig) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *ScanConfig) GetDisplayName() string { + if m != nil { + return m.DisplayName + } + return "" +} + +func (m *ScanConfig) GetMaxQps() int32 { + if m != nil { + return m.MaxQps + } + return 0 +} + +func (m *ScanConfig) GetStartingUrls() []string { + if m != nil { + return m.StartingUrls + } + return nil +} + +func (m *ScanConfig) GetAuthentication() *ScanConfig_Authentication { + if m != nil { + return m.Authentication + } + return nil +} + +func (m *ScanConfig) GetUserAgent() ScanConfig_UserAgent { + if m != nil { + return m.UserAgent + } + return ScanConfig_USER_AGENT_UNSPECIFIED +} + +func (m *ScanConfig) GetBlacklistPatterns() []string { + if m != nil { + return m.BlacklistPatterns + } + return nil +} + +func (m *ScanConfig) GetSchedule() *ScanConfig_Schedule { + if m != nil { + return m.Schedule + } + return nil +} + +func (m *ScanConfig) GetTargetPlatforms() []ScanConfig_TargetPlatform { + if m != nil { + return m.TargetPlatforms + } + return nil +} + +// Scan authentication configuration. +type ScanConfig_Authentication struct { + // Required. + // Authentication configuration + // + // Types that are valid to be assigned to Authentication: + // *ScanConfig_Authentication_GoogleAccount_ + // *ScanConfig_Authentication_CustomAccount_ + Authentication isScanConfig_Authentication_Authentication `protobuf_oneof:"authentication"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ScanConfig_Authentication) Reset() { *m = ScanConfig_Authentication{} } +func (m *ScanConfig_Authentication) String() string { return proto.CompactTextString(m) } +func (*ScanConfig_Authentication) ProtoMessage() {} +func (*ScanConfig_Authentication) Descriptor() ([]byte, []int) { + return fileDescriptor_scan_config_dda68c43e028bff3, []int{0, 0} +} +func (m *ScanConfig_Authentication) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ScanConfig_Authentication.Unmarshal(m, b) +} +func (m *ScanConfig_Authentication) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ScanConfig_Authentication.Marshal(b, m, deterministic) +} +func (dst *ScanConfig_Authentication) XXX_Merge(src proto.Message) { + xxx_messageInfo_ScanConfig_Authentication.Merge(dst, src) +} +func (m *ScanConfig_Authentication) XXX_Size() int { + return xxx_messageInfo_ScanConfig_Authentication.Size(m) +} +func (m *ScanConfig_Authentication) XXX_DiscardUnknown() { + xxx_messageInfo_ScanConfig_Authentication.DiscardUnknown(m) +} + +var xxx_messageInfo_ScanConfig_Authentication proto.InternalMessageInfo + +type isScanConfig_Authentication_Authentication interface { + isScanConfig_Authentication_Authentication() +} + +type ScanConfig_Authentication_GoogleAccount_ struct { + GoogleAccount *ScanConfig_Authentication_GoogleAccount `protobuf:"bytes,1,opt,name=google_account,json=googleAccount,proto3,oneof"` +} + +type ScanConfig_Authentication_CustomAccount_ struct { + CustomAccount *ScanConfig_Authentication_CustomAccount `protobuf:"bytes,2,opt,name=custom_account,json=customAccount,proto3,oneof"` +} + +func (*ScanConfig_Authentication_GoogleAccount_) isScanConfig_Authentication_Authentication() {} + +func (*ScanConfig_Authentication_CustomAccount_) isScanConfig_Authentication_Authentication() {} + +func (m *ScanConfig_Authentication) GetAuthentication() isScanConfig_Authentication_Authentication { + if m != nil { + return m.Authentication + } + return nil +} + +func (m *ScanConfig_Authentication) GetGoogleAccount() *ScanConfig_Authentication_GoogleAccount { + if x, ok := m.GetAuthentication().(*ScanConfig_Authentication_GoogleAccount_); ok { + return x.GoogleAccount + } + return nil +} + +func (m *ScanConfig_Authentication) GetCustomAccount() *ScanConfig_Authentication_CustomAccount { + if x, ok := m.GetAuthentication().(*ScanConfig_Authentication_CustomAccount_); ok { + return x.CustomAccount + } + return nil +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*ScanConfig_Authentication) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _ScanConfig_Authentication_OneofMarshaler, _ScanConfig_Authentication_OneofUnmarshaler, _ScanConfig_Authentication_OneofSizer, []interface{}{ + (*ScanConfig_Authentication_GoogleAccount_)(nil), + (*ScanConfig_Authentication_CustomAccount_)(nil), + } +} + +func _ScanConfig_Authentication_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*ScanConfig_Authentication) + // authentication + switch x := m.Authentication.(type) { + case *ScanConfig_Authentication_GoogleAccount_: + b.EncodeVarint(1<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.GoogleAccount); err != nil { + return err + } + case *ScanConfig_Authentication_CustomAccount_: + b.EncodeVarint(2<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.CustomAccount); err != nil { + return err + } + case nil: + default: + return fmt.Errorf("ScanConfig_Authentication.Authentication has unexpected type %T", x) + } + return nil +} + +func _ScanConfig_Authentication_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*ScanConfig_Authentication) + switch tag { + case 1: // authentication.google_account + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(ScanConfig_Authentication_GoogleAccount) + err := b.DecodeMessage(msg) + m.Authentication = &ScanConfig_Authentication_GoogleAccount_{msg} + return true, err + case 2: // authentication.custom_account + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(ScanConfig_Authentication_CustomAccount) + err := b.DecodeMessage(msg) + m.Authentication = &ScanConfig_Authentication_CustomAccount_{msg} + return true, err + default: + return false, nil + } +} + +func _ScanConfig_Authentication_OneofSizer(msg proto.Message) (n int) { + m := msg.(*ScanConfig_Authentication) + // authentication + switch x := m.Authentication.(type) { + case *ScanConfig_Authentication_GoogleAccount_: + s := proto.Size(x.GoogleAccount) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *ScanConfig_Authentication_CustomAccount_: + s := proto.Size(x.CustomAccount) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +// Describes authentication configuration that uses a Google account. +type ScanConfig_Authentication_GoogleAccount struct { + // Required. + // The user name of the Google account. + Username string `protobuf:"bytes,1,opt,name=username,proto3" json:"username,omitempty"` + // Input only. + // Required. + // The password of the Google account. The credential is stored encrypted + // and not returned in any response. + Password string `protobuf:"bytes,2,opt,name=password,proto3" json:"password,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ScanConfig_Authentication_GoogleAccount) Reset() { + *m = ScanConfig_Authentication_GoogleAccount{} +} +func (m *ScanConfig_Authentication_GoogleAccount) String() string { return proto.CompactTextString(m) } +func (*ScanConfig_Authentication_GoogleAccount) ProtoMessage() {} +func (*ScanConfig_Authentication_GoogleAccount) Descriptor() ([]byte, []int) { + return fileDescriptor_scan_config_dda68c43e028bff3, []int{0, 0, 0} +} +func (m *ScanConfig_Authentication_GoogleAccount) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ScanConfig_Authentication_GoogleAccount.Unmarshal(m, b) +} +func (m *ScanConfig_Authentication_GoogleAccount) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ScanConfig_Authentication_GoogleAccount.Marshal(b, m, deterministic) +} +func (dst *ScanConfig_Authentication_GoogleAccount) XXX_Merge(src proto.Message) { + xxx_messageInfo_ScanConfig_Authentication_GoogleAccount.Merge(dst, src) +} +func (m *ScanConfig_Authentication_GoogleAccount) XXX_Size() int { + return xxx_messageInfo_ScanConfig_Authentication_GoogleAccount.Size(m) +} +func (m *ScanConfig_Authentication_GoogleAccount) XXX_DiscardUnknown() { + xxx_messageInfo_ScanConfig_Authentication_GoogleAccount.DiscardUnknown(m) +} + +var xxx_messageInfo_ScanConfig_Authentication_GoogleAccount proto.InternalMessageInfo + +func (m *ScanConfig_Authentication_GoogleAccount) GetUsername() string { + if m != nil { + return m.Username + } + return "" +} + +func (m *ScanConfig_Authentication_GoogleAccount) GetPassword() string { + if m != nil { + return m.Password + } + return "" +} + +// Describes authentication configuration that uses a custom account. +type ScanConfig_Authentication_CustomAccount struct { + // Required. + // The user name of the custom account. + Username string `protobuf:"bytes,1,opt,name=username,proto3" json:"username,omitempty"` + // Input only. + // Required. + // The password of the custom account. The credential is stored encrypted + // and not returned in any response. + Password string `protobuf:"bytes,2,opt,name=password,proto3" json:"password,omitempty"` + // Required. + // The login form URL of the website. + LoginUrl string `protobuf:"bytes,3,opt,name=login_url,json=loginUrl,proto3" json:"login_url,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ScanConfig_Authentication_CustomAccount) Reset() { + *m = ScanConfig_Authentication_CustomAccount{} +} +func (m *ScanConfig_Authentication_CustomAccount) String() string { return proto.CompactTextString(m) } +func (*ScanConfig_Authentication_CustomAccount) ProtoMessage() {} +func (*ScanConfig_Authentication_CustomAccount) Descriptor() ([]byte, []int) { + return fileDescriptor_scan_config_dda68c43e028bff3, []int{0, 0, 1} +} +func (m *ScanConfig_Authentication_CustomAccount) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ScanConfig_Authentication_CustomAccount.Unmarshal(m, b) +} +func (m *ScanConfig_Authentication_CustomAccount) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ScanConfig_Authentication_CustomAccount.Marshal(b, m, deterministic) +} +func (dst *ScanConfig_Authentication_CustomAccount) XXX_Merge(src proto.Message) { + xxx_messageInfo_ScanConfig_Authentication_CustomAccount.Merge(dst, src) +} +func (m *ScanConfig_Authentication_CustomAccount) XXX_Size() int { + return xxx_messageInfo_ScanConfig_Authentication_CustomAccount.Size(m) +} +func (m *ScanConfig_Authentication_CustomAccount) XXX_DiscardUnknown() { + xxx_messageInfo_ScanConfig_Authentication_CustomAccount.DiscardUnknown(m) +} + +var xxx_messageInfo_ScanConfig_Authentication_CustomAccount proto.InternalMessageInfo + +func (m *ScanConfig_Authentication_CustomAccount) GetUsername() string { + if m != nil { + return m.Username + } + return "" +} + +func (m *ScanConfig_Authentication_CustomAccount) GetPassword() string { + if m != nil { + return m.Password + } + return "" +} + +func (m *ScanConfig_Authentication_CustomAccount) GetLoginUrl() string { + if m != nil { + return m.LoginUrl + } + return "" +} + +// Scan schedule configuration. +type ScanConfig_Schedule struct { + // A timestamp indicates when the next run will be scheduled. The value is + // refreshed by the server after each run. If unspecified, it will default + // to current server time, which means the scan will be scheduled to start + // immediately. + ScheduleTime *timestamp.Timestamp `protobuf:"bytes,1,opt,name=schedule_time,json=scheduleTime,proto3" json:"schedule_time,omitempty"` + // Required. + // The duration of time between executions in days. + IntervalDurationDays int32 `protobuf:"varint,2,opt,name=interval_duration_days,json=intervalDurationDays,proto3" json:"interval_duration_days,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ScanConfig_Schedule) Reset() { *m = ScanConfig_Schedule{} } +func (m *ScanConfig_Schedule) String() string { return proto.CompactTextString(m) } +func (*ScanConfig_Schedule) ProtoMessage() {} +func (*ScanConfig_Schedule) Descriptor() ([]byte, []int) { + return fileDescriptor_scan_config_dda68c43e028bff3, []int{0, 1} +} +func (m *ScanConfig_Schedule) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ScanConfig_Schedule.Unmarshal(m, b) +} +func (m *ScanConfig_Schedule) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ScanConfig_Schedule.Marshal(b, m, deterministic) +} +func (dst *ScanConfig_Schedule) XXX_Merge(src proto.Message) { + xxx_messageInfo_ScanConfig_Schedule.Merge(dst, src) +} +func (m *ScanConfig_Schedule) XXX_Size() int { + return xxx_messageInfo_ScanConfig_Schedule.Size(m) +} +func (m *ScanConfig_Schedule) XXX_DiscardUnknown() { + xxx_messageInfo_ScanConfig_Schedule.DiscardUnknown(m) +} + +var xxx_messageInfo_ScanConfig_Schedule proto.InternalMessageInfo + +func (m *ScanConfig_Schedule) GetScheduleTime() *timestamp.Timestamp { + if m != nil { + return m.ScheduleTime + } + return nil +} + +func (m *ScanConfig_Schedule) GetIntervalDurationDays() int32 { + if m != nil { + return m.IntervalDurationDays + } + return 0 +} + +func init() { + proto.RegisterType((*ScanConfig)(nil), "google.cloud.websecurityscanner.v1alpha.ScanConfig") + proto.RegisterType((*ScanConfig_Authentication)(nil), "google.cloud.websecurityscanner.v1alpha.ScanConfig.Authentication") + proto.RegisterType((*ScanConfig_Authentication_GoogleAccount)(nil), "google.cloud.websecurityscanner.v1alpha.ScanConfig.Authentication.GoogleAccount") + proto.RegisterType((*ScanConfig_Authentication_CustomAccount)(nil), "google.cloud.websecurityscanner.v1alpha.ScanConfig.Authentication.CustomAccount") + proto.RegisterType((*ScanConfig_Schedule)(nil), "google.cloud.websecurityscanner.v1alpha.ScanConfig.Schedule") + proto.RegisterEnum("google.cloud.websecurityscanner.v1alpha.ScanConfig_UserAgent", ScanConfig_UserAgent_name, ScanConfig_UserAgent_value) + proto.RegisterEnum("google.cloud.websecurityscanner.v1alpha.ScanConfig_TargetPlatform", ScanConfig_TargetPlatform_name, ScanConfig_TargetPlatform_value) +} + +func init() { + proto.RegisterFile("google/cloud/websecurityscanner/v1alpha/scan_config.proto", fileDescriptor_scan_config_dda68c43e028bff3) +} + +var fileDescriptor_scan_config_dda68c43e028bff3 = []byte{ + // 748 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xac, 0x55, 0x51, 0x6f, 0xe3, 0x44, + 0x10, 0x3e, 0x37, 0xd7, 0x36, 0x99, 0x36, 0x39, 0xdf, 0x0a, 0x1d, 0x91, 0x0f, 0xe9, 0x42, 0x79, + 0x20, 0x12, 0xc2, 0x16, 0x85, 0x17, 0x04, 0x08, 0xb9, 0x89, 0x9b, 0x46, 0xba, 0x3a, 0x66, 0x93, + 0x48, 0x07, 0x42, 0x5a, 0xb6, 0xce, 0xd6, 0x35, 0xd8, 0xbb, 0x66, 0x77, 0x7d, 0x77, 0x79, 0xe4, + 0x77, 0xf0, 0x73, 0x78, 0xe0, 0x6f, 0x21, 0x6f, 0xec, 0x5c, 0xd3, 0x7b, 0xa0, 0x2a, 0xbc, 0x79, + 0xe6, 0xf3, 0x7c, 0xdf, 0xec, 0xf8, 0x9b, 0x35, 0x7c, 0x9d, 0x08, 0x91, 0x64, 0xcc, 0x8b, 0x33, + 0x51, 0xae, 0xbc, 0x37, 0xec, 0x4a, 0xb1, 0xb8, 0x94, 0xa9, 0x5e, 0xab, 0x98, 0x72, 0xce, 0xa4, + 0xf7, 0xfa, 0x0b, 0x9a, 0x15, 0x37, 0xd4, 0xab, 0x62, 0x12, 0x0b, 0x7e, 0x9d, 0x26, 0x6e, 0x21, + 0x85, 0x16, 0xe8, 0xd3, 0x4d, 0xa9, 0x6b, 0x4a, 0xdd, 0xf7, 0x4b, 0xdd, 0xba, 0xd4, 0xf9, 0xa8, + 0xd6, 0xa0, 0x45, 0xea, 0x51, 0xce, 0x85, 0xa6, 0x3a, 0x15, 0x5c, 0x6d, 0x68, 0x9c, 0x17, 0x35, + 0x6a, 0xa2, 0xab, 0xf2, 0xda, 0xd3, 0x69, 0xce, 0x94, 0xa6, 0x79, 0xb1, 0x79, 0xe1, 0xe4, 0x2f, + 0x00, 0x98, 0xc7, 0x94, 0x8f, 0x8c, 0x38, 0x42, 0xf0, 0x98, 0xd3, 0x9c, 0xf5, 0xad, 0x81, 0x35, + 0xec, 0x60, 0xf3, 0x8c, 0x3e, 0x86, 0xe3, 0x55, 0xaa, 0x8a, 0x8c, 0xae, 0x89, 0xc1, 0xf6, 0x0c, + 0x76, 0x54, 0xe7, 0xc2, 0xea, 0x95, 0x0f, 0xe1, 0x30, 0xa7, 0x6f, 0xc9, 0xef, 0x85, 0xea, 0xb7, + 0x06, 0xd6, 0x70, 0x1f, 0x1f, 0xe4, 0xf4, 0xed, 0x0f, 0x85, 0x42, 0x9f, 0x40, 0x57, 0x69, 0x2a, + 0x75, 0xca, 0x13, 0x52, 0xca, 0x4c, 0xf5, 0x1f, 0x0f, 0x5a, 0xc3, 0x0e, 0x3e, 0x6e, 0x92, 0x4b, + 0x99, 0x29, 0xf4, 0x2b, 0xf4, 0x68, 0xa9, 0x6f, 0x18, 0xd7, 0x69, 0x6c, 0xba, 0xef, 0xef, 0x0f, + 0xac, 0xe1, 0xd1, 0xe9, 0x99, 0x7b, 0xcf, 0x21, 0xb8, 0xef, 0x4e, 0xe0, 0xfa, 0x3b, 0x4c, 0xf8, + 0x0e, 0x33, 0xfa, 0x19, 0xa0, 0x54, 0x4c, 0x12, 0x9a, 0x30, 0xae, 0xfb, 0x07, 0x03, 0x6b, 0xd8, + 0x3b, 0xfd, 0xee, 0x21, 0x3a, 0x4b, 0xc5, 0xa4, 0x5f, 0x91, 0xe0, 0x4e, 0xd9, 0x3c, 0xa2, 0xcf, + 0x01, 0x5d, 0x65, 0x34, 0xfe, 0x2d, 0x4b, 0x95, 0x26, 0x05, 0xd5, 0x9a, 0x49, 0xae, 0xfa, 0x87, + 0xe6, 0xcc, 0x4f, 0xb7, 0x48, 0x54, 0x03, 0xe8, 0x15, 0xb4, 0x55, 0x7c, 0xc3, 0x56, 0x65, 0xc6, + 0xfa, 0x6d, 0x73, 0xe4, 0x6f, 0x1f, 0xd2, 0xca, 0xbc, 0xe6, 0xc0, 0x5b, 0x36, 0x94, 0x83, 0xad, + 0xa9, 0x4c, 0x98, 0x26, 0x45, 0x46, 0xf5, 0xb5, 0x90, 0xb9, 0xea, 0x77, 0x06, 0xad, 0x61, 0xef, + 0x61, 0x43, 0x5d, 0x18, 0xae, 0xa8, 0xa6, 0xc2, 0x4f, 0xf4, 0x4e, 0xac, 0x9c, 0xbf, 0x5b, 0xd0, + 0xdb, 0x1d, 0x3c, 0x5a, 0x43, 0x6f, 0x23, 0x44, 0x68, 0x1c, 0x8b, 0x92, 0x6b, 0xe3, 0xa9, 0xa3, + 0xd3, 0xe8, 0xbf, 0x7f, 0x54, 0x77, 0x62, 0x18, 0xfc, 0x0d, 0xef, 0xc5, 0x23, 0xdc, 0x4d, 0x6e, + 0x27, 0x2a, 0xe9, 0xb8, 0x54, 0x5a, 0xe4, 0x5b, 0xe9, 0xbd, 0xff, 0x4d, 0x7a, 0x64, 0x88, 0x6f, + 0x49, 0xc7, 0xb7, 0x13, 0xce, 0x04, 0xba, 0x3b, 0xcd, 0x21, 0x07, 0xda, 0x95, 0x3d, 0x6e, 0x2d, + 0xd5, 0x36, 0xae, 0xb0, 0x82, 0x2a, 0xf5, 0x46, 0xc8, 0x55, 0xbd, 0x54, 0xdb, 0xd8, 0x59, 0x41, + 0x77, 0x47, 0xea, 0xa1, 0x44, 0xe8, 0x39, 0x74, 0x32, 0x91, 0xa4, 0xbc, 0x5a, 0x3f, 0xb3, 0x9c, + 0x1d, 0xdc, 0x36, 0x89, 0xa5, 0xcc, 0xce, 0xec, 0xbb, 0x9b, 0xe7, 0xfc, 0x61, 0x41, 0xbb, 0xf1, + 0x13, 0xfa, 0x1e, 0xba, 0x8d, 0xa3, 0x48, 0x75, 0x71, 0xd4, 0x9f, 0xd0, 0x69, 0xe6, 0xd8, 0xdc, + 0x2a, 0xee, 0xa2, 0xb9, 0x55, 0xf0, 0x71, 0x53, 0x50, 0xa5, 0xd0, 0x57, 0xf0, 0x2c, 0xe5, 0x9a, + 0xc9, 0xd7, 0x34, 0x23, 0xab, 0x52, 0x1a, 0x09, 0xb2, 0xa2, 0x6b, 0x65, 0xda, 0xdc, 0xc7, 0x1f, + 0x34, 0xe8, 0xb8, 0x06, 0xc7, 0x74, 0xad, 0x4e, 0x7e, 0x81, 0xce, 0x76, 0xbb, 0x90, 0x03, 0xcf, + 0x96, 0xf3, 0x00, 0x13, 0x7f, 0x12, 0x84, 0x0b, 0xb2, 0x0c, 0xe7, 0x51, 0x30, 0x9a, 0x9e, 0x4f, + 0x83, 0xb1, 0xfd, 0x08, 0xd9, 0x70, 0x3c, 0xba, 0xc0, 0xb3, 0xcb, 0x80, 0xbc, 0x9c, 0x86, 0xcb, + 0x57, 0xb6, 0x85, 0x10, 0xf4, 0xea, 0x8c, 0x1f, 0x8e, 0xf1, 0x6c, 0x3a, 0xb6, 0xf7, 0xd0, 0x53, + 0xe8, 0xce, 0xfd, 0x73, 0x1f, 0x4f, 0xc9, 0x34, 0xba, 0x98, 0x85, 0x81, 0xdd, 0x3a, 0x09, 0xa1, + 0xb7, 0x6b, 0x69, 0xf4, 0x02, 0x9e, 0x2f, 0x7c, 0x3c, 0x09, 0x16, 0x24, 0x7a, 0xe9, 0x2f, 0xce, + 0x67, 0xf8, 0xf2, 0x8e, 0x56, 0x0f, 0xc0, 0x8f, 0x22, 0x12, 0x84, 0x93, 0x69, 0x18, 0xd8, 0x16, + 0x3a, 0x82, 0xc3, 0xd1, 0xec, 0x32, 0x5a, 0x2e, 0x02, 0x7b, 0xef, 0xec, 0x4f, 0x0b, 0x3e, 0x8b, + 0x45, 0x7e, 0x5f, 0x7f, 0x9d, 0x3d, 0x79, 0x67, 0xb0, 0xa8, 0x9a, 0x61, 0x64, 0xfd, 0xf4, 0x63, + 0x5d, 0x9b, 0x88, 0x8c, 0xf2, 0xc4, 0x15, 0x32, 0xf1, 0x12, 0xc6, 0xcd, 0x84, 0xbd, 0x0d, 0x44, + 0x8b, 0x54, 0xfd, 0xeb, 0xaf, 0xe4, 0x9b, 0xf7, 0xa1, 0xab, 0x03, 0xc3, 0xf2, 0xe5, 0x3f, 0x01, + 0x00, 0x00, 0xff, 0xff, 0x7d, 0xa3, 0x6b, 0x23, 0x8f, 0x06, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/cloud/websecurityscanner/v1alpha/scan_run.pb.go b/vendor/google.golang.org/genproto/googleapis/cloud/websecurityscanner/v1alpha/scan_run.pb.go new file mode 100644 index 0000000..53a46c6 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/cloud/websecurityscanner/v1alpha/scan_run.pb.go @@ -0,0 +1,272 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/cloud/websecurityscanner/v1alpha/scan_run.proto + +package websecurityscanner // import "google.golang.org/genproto/googleapis/cloud/websecurityscanner/v1alpha" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import timestamp "github.com/golang/protobuf/ptypes/timestamp" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Types of ScanRun execution state. +type ScanRun_ExecutionState int32 + +const ( + // Represents an invalid state caused by internal server error. This value + // should never be returned. + ScanRun_EXECUTION_STATE_UNSPECIFIED ScanRun_ExecutionState = 0 + // The scan is waiting in the queue. + ScanRun_QUEUED ScanRun_ExecutionState = 1 + // The scan is in progress. + ScanRun_SCANNING ScanRun_ExecutionState = 2 + // The scan is either finished or stopped by user. + ScanRun_FINISHED ScanRun_ExecutionState = 3 +) + +var ScanRun_ExecutionState_name = map[int32]string{ + 0: "EXECUTION_STATE_UNSPECIFIED", + 1: "QUEUED", + 2: "SCANNING", + 3: "FINISHED", +} +var ScanRun_ExecutionState_value = map[string]int32{ + "EXECUTION_STATE_UNSPECIFIED": 0, + "QUEUED": 1, + "SCANNING": 2, + "FINISHED": 3, +} + +func (x ScanRun_ExecutionState) String() string { + return proto.EnumName(ScanRun_ExecutionState_name, int32(x)) +} +func (ScanRun_ExecutionState) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_scan_run_013f75bb73c5af19, []int{0, 0} +} + +// Types of ScanRun result state. +type ScanRun_ResultState int32 + +const ( + // Default value. This value is returned when the ScanRun is not yet + // finished. + ScanRun_RESULT_STATE_UNSPECIFIED ScanRun_ResultState = 0 + // The scan finished without errors. + ScanRun_SUCCESS ScanRun_ResultState = 1 + // The scan finished with errors. + ScanRun_ERROR ScanRun_ResultState = 2 + // The scan was terminated by user. + ScanRun_KILLED ScanRun_ResultState = 3 +) + +var ScanRun_ResultState_name = map[int32]string{ + 0: "RESULT_STATE_UNSPECIFIED", + 1: "SUCCESS", + 2: "ERROR", + 3: "KILLED", +} +var ScanRun_ResultState_value = map[string]int32{ + "RESULT_STATE_UNSPECIFIED": 0, + "SUCCESS": 1, + "ERROR": 2, + "KILLED": 3, +} + +func (x ScanRun_ResultState) String() string { + return proto.EnumName(ScanRun_ResultState_name, int32(x)) +} +func (ScanRun_ResultState) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_scan_run_013f75bb73c5af19, []int{0, 1} +} + +// A ScanRun is a output-only resource representing an actual run of the scan. +type ScanRun struct { + // Output only. + // The resource name of the ScanRun. The name follows the format of + // 'projects/{projectId}/scanConfigs/{scanConfigId}/scanRuns/{scanRunId}'. + // The ScanRun IDs are generated by the system. + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // Output only. + // The execution state of the ScanRun. + ExecutionState ScanRun_ExecutionState `protobuf:"varint,2,opt,name=execution_state,json=executionState,proto3,enum=google.cloud.websecurityscanner.v1alpha.ScanRun_ExecutionState" json:"execution_state,omitempty"` + // Output only. + // The result state of the ScanRun. This field is only available after the + // execution state reaches "FINISHED". + ResultState ScanRun_ResultState `protobuf:"varint,3,opt,name=result_state,json=resultState,proto3,enum=google.cloud.websecurityscanner.v1alpha.ScanRun_ResultState" json:"result_state,omitempty"` + // Output only. + // The time at which the ScanRun started. + StartTime *timestamp.Timestamp `protobuf:"bytes,4,opt,name=start_time,json=startTime,proto3" json:"start_time,omitempty"` + // Output only. + // The time at which the ScanRun reached termination state - that the ScanRun + // is either finished or stopped by user. + EndTime *timestamp.Timestamp `protobuf:"bytes,5,opt,name=end_time,json=endTime,proto3" json:"end_time,omitempty"` + // Output only. + // The number of URLs crawled during this ScanRun. If the scan is in progress, + // the value represents the number of URLs crawled up to now. + UrlsCrawledCount int64 `protobuf:"varint,6,opt,name=urls_crawled_count,json=urlsCrawledCount,proto3" json:"urls_crawled_count,omitempty"` + // Output only. + // The number of URLs tested during this ScanRun. If the scan is in progress, + // the value represents the number of URLs tested up to now. The number of + // URLs tested is usually larger than the number URLS crawled because + // typically a crawled URL is tested with multiple test payloads. + UrlsTestedCount int64 `protobuf:"varint,7,opt,name=urls_tested_count,json=urlsTestedCount,proto3" json:"urls_tested_count,omitempty"` + // Output only. + // Whether the scan run has found any vulnerabilities. + HasVulnerabilities bool `protobuf:"varint,8,opt,name=has_vulnerabilities,json=hasVulnerabilities,proto3" json:"has_vulnerabilities,omitempty"` + // Output only. + // The percentage of total completion ranging from 0 to 100. + // If the scan is in queue, the value is 0. + // If the scan is running, the value ranges from 0 to 100. + // If the scan is finished, the value is 100. + ProgressPercent int32 `protobuf:"varint,9,opt,name=progress_percent,json=progressPercent,proto3" json:"progress_percent,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ScanRun) Reset() { *m = ScanRun{} } +func (m *ScanRun) String() string { return proto.CompactTextString(m) } +func (*ScanRun) ProtoMessage() {} +func (*ScanRun) Descriptor() ([]byte, []int) { + return fileDescriptor_scan_run_013f75bb73c5af19, []int{0} +} +func (m *ScanRun) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ScanRun.Unmarshal(m, b) +} +func (m *ScanRun) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ScanRun.Marshal(b, m, deterministic) +} +func (dst *ScanRun) XXX_Merge(src proto.Message) { + xxx_messageInfo_ScanRun.Merge(dst, src) +} +func (m *ScanRun) XXX_Size() int { + return xxx_messageInfo_ScanRun.Size(m) +} +func (m *ScanRun) XXX_DiscardUnknown() { + xxx_messageInfo_ScanRun.DiscardUnknown(m) +} + +var xxx_messageInfo_ScanRun proto.InternalMessageInfo + +func (m *ScanRun) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *ScanRun) GetExecutionState() ScanRun_ExecutionState { + if m != nil { + return m.ExecutionState + } + return ScanRun_EXECUTION_STATE_UNSPECIFIED +} + +func (m *ScanRun) GetResultState() ScanRun_ResultState { + if m != nil { + return m.ResultState + } + return ScanRun_RESULT_STATE_UNSPECIFIED +} + +func (m *ScanRun) GetStartTime() *timestamp.Timestamp { + if m != nil { + return m.StartTime + } + return nil +} + +func (m *ScanRun) GetEndTime() *timestamp.Timestamp { + if m != nil { + return m.EndTime + } + return nil +} + +func (m *ScanRun) GetUrlsCrawledCount() int64 { + if m != nil { + return m.UrlsCrawledCount + } + return 0 +} + +func (m *ScanRun) GetUrlsTestedCount() int64 { + if m != nil { + return m.UrlsTestedCount + } + return 0 +} + +func (m *ScanRun) GetHasVulnerabilities() bool { + if m != nil { + return m.HasVulnerabilities + } + return false +} + +func (m *ScanRun) GetProgressPercent() int32 { + if m != nil { + return m.ProgressPercent + } + return 0 +} + +func init() { + proto.RegisterType((*ScanRun)(nil), "google.cloud.websecurityscanner.v1alpha.ScanRun") + proto.RegisterEnum("google.cloud.websecurityscanner.v1alpha.ScanRun_ExecutionState", ScanRun_ExecutionState_name, ScanRun_ExecutionState_value) + proto.RegisterEnum("google.cloud.websecurityscanner.v1alpha.ScanRun_ResultState", ScanRun_ResultState_name, ScanRun_ResultState_value) +} + +func init() { + proto.RegisterFile("google/cloud/websecurityscanner/v1alpha/scan_run.proto", fileDescriptor_scan_run_013f75bb73c5af19) +} + +var fileDescriptor_scan_run_013f75bb73c5af19 = []byte{ + // 547 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x94, 0x53, 0xdd, 0x6e, 0xd3, 0x3c, + 0x18, 0xfe, 0xb2, 0xad, 0x6b, 0xeb, 0x4e, 0x5b, 0x3e, 0x73, 0x12, 0x8d, 0x49, 0x8b, 0x76, 0x42, + 0xf8, 0x51, 0x22, 0x86, 0x40, 0x42, 0x20, 0xa1, 0x2d, 0xf3, 0x20, 0x62, 0xca, 0x8a, 0x93, 0x20, + 0xc6, 0x49, 0xe4, 0xa6, 0x26, 0x8d, 0x94, 0xda, 0x91, 0xed, 0x6c, 0x70, 0x27, 0x5c, 0x0c, 0x17, + 0x87, 0xec, 0xa4, 0xc0, 0x34, 0xa4, 0x8d, 0xb3, 0x3e, 0xef, 0xf3, 0x57, 0xbd, 0xaf, 0x03, 0x5e, + 0x94, 0x9c, 0x97, 0x35, 0x0d, 0x8a, 0x9a, 0xb7, 0xf3, 0xe0, 0x8a, 0xce, 0x24, 0x2d, 0x5a, 0x51, + 0xa9, 0x6f, 0xb2, 0x20, 0x8c, 0x51, 0x11, 0x5c, 0x3e, 0x25, 0x75, 0xb3, 0x20, 0x81, 0xc6, 0xb9, + 0x68, 0x99, 0xdf, 0x08, 0xae, 0x38, 0x7c, 0xd0, 0xf9, 0x7c, 0xe3, 0xf3, 0x6f, 0xfa, 0xfc, 0xde, + 0xb7, 0xbb, 0xd7, 0x17, 0x90, 0xa6, 0x0a, 0x08, 0x63, 0x5c, 0x11, 0x55, 0x71, 0x26, 0xbb, 0x98, + 0xdd, 0xfd, 0x9e, 0x35, 0x68, 0xd6, 0x7e, 0x09, 0x54, 0xb5, 0xa4, 0x52, 0x91, 0x65, 0xd3, 0x09, + 0x0e, 0x7e, 0x0c, 0xc0, 0x30, 0x29, 0x08, 0xc3, 0x2d, 0x83, 0x10, 0x6c, 0x30, 0xb2, 0xa4, 0x8e, + 0xe5, 0x5a, 0xde, 0x18, 0x9b, 0xdf, 0x70, 0x01, 0x76, 0xe8, 0x57, 0x5a, 0xb4, 0x3a, 0x34, 0x97, + 0x8a, 0x28, 0xea, 0xac, 0xb9, 0x96, 0xb7, 0x7d, 0xf8, 0xc6, 0xbf, 0xe3, 0x3f, 0xf4, 0xfb, 0x78, + 0x1f, 0xad, 0x72, 0x12, 0x1d, 0x83, 0xb7, 0xe9, 0x35, 0x0c, 0x73, 0xb0, 0x25, 0xa8, 0x6c, 0x6b, + 0xd5, 0xd7, 0xac, 0x9b, 0x9a, 0xd7, 0xff, 0x5c, 0x83, 0x4d, 0x48, 0xd7, 0x31, 0x11, 0xbf, 0x01, + 0x7c, 0x09, 0x80, 0x54, 0x44, 0xa8, 0x5c, 0xef, 0xc0, 0xd9, 0x70, 0x2d, 0x6f, 0x72, 0xb8, 0xbb, + 0x8a, 0x5f, 0x2d, 0xc8, 0x4f, 0x57, 0x0b, 0xc2, 0x63, 0xa3, 0xd6, 0x18, 0x3e, 0x07, 0x23, 0xca, + 0xe6, 0x9d, 0x71, 0x70, 0xab, 0x71, 0x48, 0xd9, 0xdc, 0xd8, 0x9e, 0x00, 0xd8, 0x8a, 0x5a, 0xe6, + 0x85, 0x20, 0x57, 0x35, 0x9d, 0xe7, 0x05, 0x6f, 0x99, 0x72, 0x36, 0x5d, 0xcb, 0x5b, 0xc7, 0xb6, + 0x66, 0xc2, 0x8e, 0x08, 0xf5, 0x1c, 0x3e, 0x02, 0xff, 0x1b, 0xb5, 0xa2, 0x52, 0xfd, 0x12, 0x0f, + 0x8d, 0x78, 0x47, 0x13, 0xa9, 0x99, 0x77, 0xda, 0x00, 0xdc, 0x5b, 0x10, 0x99, 0x5f, 0xb6, 0x35, + 0xa3, 0x82, 0xcc, 0xaa, 0xba, 0x52, 0x15, 0x95, 0xce, 0xc8, 0xb5, 0xbc, 0x11, 0x86, 0x0b, 0x22, + 0x3f, 0x5e, 0x67, 0xe0, 0x43, 0x60, 0x37, 0x82, 0x97, 0x82, 0x4a, 0x99, 0x37, 0x54, 0x14, 0x94, + 0x29, 0x67, 0xec, 0x5a, 0xde, 0x00, 0xef, 0xac, 0xe6, 0xd3, 0x6e, 0x7c, 0x70, 0x01, 0xb6, 0xaf, + 0x9f, 0x0a, 0xee, 0x83, 0xfb, 0xe8, 0x13, 0x0a, 0xb3, 0x34, 0x3a, 0x8f, 0xf3, 0x24, 0x3d, 0x4a, + 0x51, 0x9e, 0xc5, 0xc9, 0x14, 0x85, 0xd1, 0x69, 0x84, 0x4e, 0xec, 0xff, 0x20, 0x00, 0x9b, 0x1f, + 0x32, 0x94, 0xa1, 0x13, 0xdb, 0x82, 0x5b, 0x60, 0x94, 0x84, 0x47, 0x71, 0x1c, 0xc5, 0x6f, 0xed, + 0x35, 0x8d, 0x4e, 0xa3, 0x38, 0x4a, 0xde, 0xa1, 0x13, 0x7b, 0xfd, 0xe0, 0x1c, 0x4c, 0xfe, 0x38, + 0x0f, 0xdc, 0x03, 0x0e, 0x46, 0x49, 0x76, 0x96, 0xfe, 0x35, 0x74, 0x02, 0x86, 0x49, 0x16, 0x86, + 0x28, 0x49, 0x6c, 0x0b, 0x8e, 0xc1, 0x00, 0x61, 0x7c, 0x8e, 0xed, 0x35, 0x5d, 0xf6, 0x3e, 0x3a, + 0x3b, 0xd3, 0x81, 0xc7, 0xdf, 0x2d, 0xf0, 0xb8, 0xe0, 0xcb, 0xbb, 0x3e, 0x92, 0xe3, 0xad, 0xfe, + 0x95, 0x4c, 0xf5, 0xd5, 0xa6, 0xd6, 0xe7, 0x8b, 0xde, 0x58, 0xf2, 0x9a, 0xb0, 0xd2, 0xe7, 0xa2, + 0x0c, 0x4a, 0xca, 0xcc, 0x4d, 0x83, 0x8e, 0x22, 0x4d, 0x25, 0x6f, 0xfd, 0x7a, 0x5f, 0xdd, 0xa4, + 0x66, 0x9b, 0x26, 0xe5, 0xd9, 0xcf, 0x00, 0x00, 0x00, 0xff, 0xff, 0x5b, 0x43, 0xba, 0xc7, 0x02, + 0x04, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/cloud/websecurityscanner/v1alpha/web_security_scanner.pb.go b/vendor/google.golang.org/genproto/googleapis/cloud/websecurityscanner/v1alpha/web_security_scanner.pb.go new file mode 100644 index 0000000..213d551 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/cloud/websecurityscanner/v1alpha/web_security_scanner.pb.go @@ -0,0 +1,1537 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/cloud/websecurityscanner/v1alpha/web_security_scanner.proto + +package websecurityscanner // import "google.golang.org/genproto/googleapis/cloud/websecurityscanner/v1alpha" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import empty "github.com/golang/protobuf/ptypes/empty" +import _ "google.golang.org/genproto/googleapis/api/annotations" +import field_mask "google.golang.org/genproto/protobuf/field_mask" + +import ( + context "golang.org/x/net/context" + grpc "google.golang.org/grpc" +) + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Request for the `CreateScanConfig` method. +type CreateScanConfigRequest struct { + // Required. + // The parent resource name where the scan is created, which should be a + // project resource name in the format 'projects/{projectId}'. + Parent string `protobuf:"bytes,1,opt,name=parent,proto3" json:"parent,omitempty"` + // Required. + // The ScanConfig to be created. + ScanConfig *ScanConfig `protobuf:"bytes,2,opt,name=scan_config,json=scanConfig,proto3" json:"scan_config,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *CreateScanConfigRequest) Reset() { *m = CreateScanConfigRequest{} } +func (m *CreateScanConfigRequest) String() string { return proto.CompactTextString(m) } +func (*CreateScanConfigRequest) ProtoMessage() {} +func (*CreateScanConfigRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_web_security_scanner_80f7154cd0746ae1, []int{0} +} +func (m *CreateScanConfigRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_CreateScanConfigRequest.Unmarshal(m, b) +} +func (m *CreateScanConfigRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_CreateScanConfigRequest.Marshal(b, m, deterministic) +} +func (dst *CreateScanConfigRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_CreateScanConfigRequest.Merge(dst, src) +} +func (m *CreateScanConfigRequest) XXX_Size() int { + return xxx_messageInfo_CreateScanConfigRequest.Size(m) +} +func (m *CreateScanConfigRequest) XXX_DiscardUnknown() { + xxx_messageInfo_CreateScanConfigRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_CreateScanConfigRequest proto.InternalMessageInfo + +func (m *CreateScanConfigRequest) GetParent() string { + if m != nil { + return m.Parent + } + return "" +} + +func (m *CreateScanConfigRequest) GetScanConfig() *ScanConfig { + if m != nil { + return m.ScanConfig + } + return nil +} + +// Request for the `DeleteScanConfig` method. +type DeleteScanConfigRequest struct { + // Required. + // The resource name of the ScanConfig to be deleted. The name follows the + // format of 'projects/{projectId}/scanConfigs/{scanConfigId}'. + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *DeleteScanConfigRequest) Reset() { *m = DeleteScanConfigRequest{} } +func (m *DeleteScanConfigRequest) String() string { return proto.CompactTextString(m) } +func (*DeleteScanConfigRequest) ProtoMessage() {} +func (*DeleteScanConfigRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_web_security_scanner_80f7154cd0746ae1, []int{1} +} +func (m *DeleteScanConfigRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_DeleteScanConfigRequest.Unmarshal(m, b) +} +func (m *DeleteScanConfigRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_DeleteScanConfigRequest.Marshal(b, m, deterministic) +} +func (dst *DeleteScanConfigRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_DeleteScanConfigRequest.Merge(dst, src) +} +func (m *DeleteScanConfigRequest) XXX_Size() int { + return xxx_messageInfo_DeleteScanConfigRequest.Size(m) +} +func (m *DeleteScanConfigRequest) XXX_DiscardUnknown() { + xxx_messageInfo_DeleteScanConfigRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_DeleteScanConfigRequest proto.InternalMessageInfo + +func (m *DeleteScanConfigRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +// Request for the `GetScanConfig` method. +type GetScanConfigRequest struct { + // Required. + // The resource name of the ScanConfig to be returned. The name follows the + // format of 'projects/{projectId}/scanConfigs/{scanConfigId}'. + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GetScanConfigRequest) Reset() { *m = GetScanConfigRequest{} } +func (m *GetScanConfigRequest) String() string { return proto.CompactTextString(m) } +func (*GetScanConfigRequest) ProtoMessage() {} +func (*GetScanConfigRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_web_security_scanner_80f7154cd0746ae1, []int{2} +} +func (m *GetScanConfigRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GetScanConfigRequest.Unmarshal(m, b) +} +func (m *GetScanConfigRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GetScanConfigRequest.Marshal(b, m, deterministic) +} +func (dst *GetScanConfigRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_GetScanConfigRequest.Merge(dst, src) +} +func (m *GetScanConfigRequest) XXX_Size() int { + return xxx_messageInfo_GetScanConfigRequest.Size(m) +} +func (m *GetScanConfigRequest) XXX_DiscardUnknown() { + xxx_messageInfo_GetScanConfigRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_GetScanConfigRequest proto.InternalMessageInfo + +func (m *GetScanConfigRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +// Request for the `ListScanConfigs` method. +type ListScanConfigsRequest struct { + // Required. + // The parent resource name, which should be a project resource name in the + // format 'projects/{projectId}'. + Parent string `protobuf:"bytes,1,opt,name=parent,proto3" json:"parent,omitempty"` + // A token identifying a page of results to be returned. This should be a + // `next_page_token` value returned from a previous List request. + // If unspecified, the first page of results is returned. + PageToken string `protobuf:"bytes,2,opt,name=page_token,json=pageToken,proto3" json:"page_token,omitempty"` + // The maximum number of ScanConfigs to return, can be limited by server. + // If not specified or not positive, the implementation will select a + // reasonable value. + PageSize int32 `protobuf:"varint,3,opt,name=page_size,json=pageSize,proto3" json:"page_size,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ListScanConfigsRequest) Reset() { *m = ListScanConfigsRequest{} } +func (m *ListScanConfigsRequest) String() string { return proto.CompactTextString(m) } +func (*ListScanConfigsRequest) ProtoMessage() {} +func (*ListScanConfigsRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_web_security_scanner_80f7154cd0746ae1, []int{3} +} +func (m *ListScanConfigsRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ListScanConfigsRequest.Unmarshal(m, b) +} +func (m *ListScanConfigsRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ListScanConfigsRequest.Marshal(b, m, deterministic) +} +func (dst *ListScanConfigsRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_ListScanConfigsRequest.Merge(dst, src) +} +func (m *ListScanConfigsRequest) XXX_Size() int { + return xxx_messageInfo_ListScanConfigsRequest.Size(m) +} +func (m *ListScanConfigsRequest) XXX_DiscardUnknown() { + xxx_messageInfo_ListScanConfigsRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_ListScanConfigsRequest proto.InternalMessageInfo + +func (m *ListScanConfigsRequest) GetParent() string { + if m != nil { + return m.Parent + } + return "" +} + +func (m *ListScanConfigsRequest) GetPageToken() string { + if m != nil { + return m.PageToken + } + return "" +} + +func (m *ListScanConfigsRequest) GetPageSize() int32 { + if m != nil { + return m.PageSize + } + return 0 +} + +// Request for the `UpdateScanConfigRequest` method. +type UpdateScanConfigRequest struct { + // Required. + // The ScanConfig to be updated. The name field must be set to identify the + // resource to be updated. The values of fields not covered by the mask + // will be ignored. + ScanConfig *ScanConfig `protobuf:"bytes,2,opt,name=scan_config,json=scanConfig,proto3" json:"scan_config,omitempty"` + // Required. + // The update mask applies to the resource. For the `FieldMask` definition, + // see + // https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#fieldmask + UpdateMask *field_mask.FieldMask `protobuf:"bytes,3,opt,name=update_mask,json=updateMask,proto3" json:"update_mask,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *UpdateScanConfigRequest) Reset() { *m = UpdateScanConfigRequest{} } +func (m *UpdateScanConfigRequest) String() string { return proto.CompactTextString(m) } +func (*UpdateScanConfigRequest) ProtoMessage() {} +func (*UpdateScanConfigRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_web_security_scanner_80f7154cd0746ae1, []int{4} +} +func (m *UpdateScanConfigRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_UpdateScanConfigRequest.Unmarshal(m, b) +} +func (m *UpdateScanConfigRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_UpdateScanConfigRequest.Marshal(b, m, deterministic) +} +func (dst *UpdateScanConfigRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_UpdateScanConfigRequest.Merge(dst, src) +} +func (m *UpdateScanConfigRequest) XXX_Size() int { + return xxx_messageInfo_UpdateScanConfigRequest.Size(m) +} +func (m *UpdateScanConfigRequest) XXX_DiscardUnknown() { + xxx_messageInfo_UpdateScanConfigRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_UpdateScanConfigRequest proto.InternalMessageInfo + +func (m *UpdateScanConfigRequest) GetScanConfig() *ScanConfig { + if m != nil { + return m.ScanConfig + } + return nil +} + +func (m *UpdateScanConfigRequest) GetUpdateMask() *field_mask.FieldMask { + if m != nil { + return m.UpdateMask + } + return nil +} + +// Response for the `ListScanConfigs` method. +type ListScanConfigsResponse struct { + // The list of ScanConfigs returned. + ScanConfigs []*ScanConfig `protobuf:"bytes,1,rep,name=scan_configs,json=scanConfigs,proto3" json:"scan_configs,omitempty"` + // Token to retrieve the next page of results, or empty if there are no + // more results in the list. + NextPageToken string `protobuf:"bytes,2,opt,name=next_page_token,json=nextPageToken,proto3" json:"next_page_token,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ListScanConfigsResponse) Reset() { *m = ListScanConfigsResponse{} } +func (m *ListScanConfigsResponse) String() string { return proto.CompactTextString(m) } +func (*ListScanConfigsResponse) ProtoMessage() {} +func (*ListScanConfigsResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_web_security_scanner_80f7154cd0746ae1, []int{5} +} +func (m *ListScanConfigsResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ListScanConfigsResponse.Unmarshal(m, b) +} +func (m *ListScanConfigsResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ListScanConfigsResponse.Marshal(b, m, deterministic) +} +func (dst *ListScanConfigsResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_ListScanConfigsResponse.Merge(dst, src) +} +func (m *ListScanConfigsResponse) XXX_Size() int { + return xxx_messageInfo_ListScanConfigsResponse.Size(m) +} +func (m *ListScanConfigsResponse) XXX_DiscardUnknown() { + xxx_messageInfo_ListScanConfigsResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_ListScanConfigsResponse proto.InternalMessageInfo + +func (m *ListScanConfigsResponse) GetScanConfigs() []*ScanConfig { + if m != nil { + return m.ScanConfigs + } + return nil +} + +func (m *ListScanConfigsResponse) GetNextPageToken() string { + if m != nil { + return m.NextPageToken + } + return "" +} + +// Request for the `StartScanRun` method. +type StartScanRunRequest struct { + // Required. + // The resource name of the ScanConfig to be used. The name follows the + // format of 'projects/{projectId}/scanConfigs/{scanConfigId}'. + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *StartScanRunRequest) Reset() { *m = StartScanRunRequest{} } +func (m *StartScanRunRequest) String() string { return proto.CompactTextString(m) } +func (*StartScanRunRequest) ProtoMessage() {} +func (*StartScanRunRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_web_security_scanner_80f7154cd0746ae1, []int{6} +} +func (m *StartScanRunRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_StartScanRunRequest.Unmarshal(m, b) +} +func (m *StartScanRunRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_StartScanRunRequest.Marshal(b, m, deterministic) +} +func (dst *StartScanRunRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_StartScanRunRequest.Merge(dst, src) +} +func (m *StartScanRunRequest) XXX_Size() int { + return xxx_messageInfo_StartScanRunRequest.Size(m) +} +func (m *StartScanRunRequest) XXX_DiscardUnknown() { + xxx_messageInfo_StartScanRunRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_StartScanRunRequest proto.InternalMessageInfo + +func (m *StartScanRunRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +// Request for the `GetScanRun` method. +type GetScanRunRequest struct { + // Required. + // The resource name of the ScanRun to be returned. The name follows the + // format of + // 'projects/{projectId}/scanConfigs/{scanConfigId}/scanRuns/{scanRunId}'. + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GetScanRunRequest) Reset() { *m = GetScanRunRequest{} } +func (m *GetScanRunRequest) String() string { return proto.CompactTextString(m) } +func (*GetScanRunRequest) ProtoMessage() {} +func (*GetScanRunRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_web_security_scanner_80f7154cd0746ae1, []int{7} +} +func (m *GetScanRunRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GetScanRunRequest.Unmarshal(m, b) +} +func (m *GetScanRunRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GetScanRunRequest.Marshal(b, m, deterministic) +} +func (dst *GetScanRunRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_GetScanRunRequest.Merge(dst, src) +} +func (m *GetScanRunRequest) XXX_Size() int { + return xxx_messageInfo_GetScanRunRequest.Size(m) +} +func (m *GetScanRunRequest) XXX_DiscardUnknown() { + xxx_messageInfo_GetScanRunRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_GetScanRunRequest proto.InternalMessageInfo + +func (m *GetScanRunRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +// Request for the `ListScanRuns` method. +type ListScanRunsRequest struct { + // Required. + // The parent resource name, which should be a scan resource name in the + // format 'projects/{projectId}/scanConfigs/{scanConfigId}'. + Parent string `protobuf:"bytes,1,opt,name=parent,proto3" json:"parent,omitempty"` + // A token identifying a page of results to be returned. This should be a + // `next_page_token` value returned from a previous List request. + // If unspecified, the first page of results is returned. + PageToken string `protobuf:"bytes,2,opt,name=page_token,json=pageToken,proto3" json:"page_token,omitempty"` + // The maximum number of ScanRuns to return, can be limited by server. + // If not specified or not positive, the implementation will select a + // reasonable value. + PageSize int32 `protobuf:"varint,3,opt,name=page_size,json=pageSize,proto3" json:"page_size,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ListScanRunsRequest) Reset() { *m = ListScanRunsRequest{} } +func (m *ListScanRunsRequest) String() string { return proto.CompactTextString(m) } +func (*ListScanRunsRequest) ProtoMessage() {} +func (*ListScanRunsRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_web_security_scanner_80f7154cd0746ae1, []int{8} +} +func (m *ListScanRunsRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ListScanRunsRequest.Unmarshal(m, b) +} +func (m *ListScanRunsRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ListScanRunsRequest.Marshal(b, m, deterministic) +} +func (dst *ListScanRunsRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_ListScanRunsRequest.Merge(dst, src) +} +func (m *ListScanRunsRequest) XXX_Size() int { + return xxx_messageInfo_ListScanRunsRequest.Size(m) +} +func (m *ListScanRunsRequest) XXX_DiscardUnknown() { + xxx_messageInfo_ListScanRunsRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_ListScanRunsRequest proto.InternalMessageInfo + +func (m *ListScanRunsRequest) GetParent() string { + if m != nil { + return m.Parent + } + return "" +} + +func (m *ListScanRunsRequest) GetPageToken() string { + if m != nil { + return m.PageToken + } + return "" +} + +func (m *ListScanRunsRequest) GetPageSize() int32 { + if m != nil { + return m.PageSize + } + return 0 +} + +// Response for the `ListScanRuns` method. +type ListScanRunsResponse struct { + // The list of ScanRuns returned. + ScanRuns []*ScanRun `protobuf:"bytes,1,rep,name=scan_runs,json=scanRuns,proto3" json:"scan_runs,omitempty"` + // Token to retrieve the next page of results, or empty if there are no + // more results in the list. + NextPageToken string `protobuf:"bytes,2,opt,name=next_page_token,json=nextPageToken,proto3" json:"next_page_token,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ListScanRunsResponse) Reset() { *m = ListScanRunsResponse{} } +func (m *ListScanRunsResponse) String() string { return proto.CompactTextString(m) } +func (*ListScanRunsResponse) ProtoMessage() {} +func (*ListScanRunsResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_web_security_scanner_80f7154cd0746ae1, []int{9} +} +func (m *ListScanRunsResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ListScanRunsResponse.Unmarshal(m, b) +} +func (m *ListScanRunsResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ListScanRunsResponse.Marshal(b, m, deterministic) +} +func (dst *ListScanRunsResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_ListScanRunsResponse.Merge(dst, src) +} +func (m *ListScanRunsResponse) XXX_Size() int { + return xxx_messageInfo_ListScanRunsResponse.Size(m) +} +func (m *ListScanRunsResponse) XXX_DiscardUnknown() { + xxx_messageInfo_ListScanRunsResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_ListScanRunsResponse proto.InternalMessageInfo + +func (m *ListScanRunsResponse) GetScanRuns() []*ScanRun { + if m != nil { + return m.ScanRuns + } + return nil +} + +func (m *ListScanRunsResponse) GetNextPageToken() string { + if m != nil { + return m.NextPageToken + } + return "" +} + +// Request for the `StopScanRun` method. +type StopScanRunRequest struct { + // Required. + // The resource name of the ScanRun to be stopped. The name follows the + // format of + // 'projects/{projectId}/scanConfigs/{scanConfigId}/scanRuns/{scanRunId}'. + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *StopScanRunRequest) Reset() { *m = StopScanRunRequest{} } +func (m *StopScanRunRequest) String() string { return proto.CompactTextString(m) } +func (*StopScanRunRequest) ProtoMessage() {} +func (*StopScanRunRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_web_security_scanner_80f7154cd0746ae1, []int{10} +} +func (m *StopScanRunRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_StopScanRunRequest.Unmarshal(m, b) +} +func (m *StopScanRunRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_StopScanRunRequest.Marshal(b, m, deterministic) +} +func (dst *StopScanRunRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_StopScanRunRequest.Merge(dst, src) +} +func (m *StopScanRunRequest) XXX_Size() int { + return xxx_messageInfo_StopScanRunRequest.Size(m) +} +func (m *StopScanRunRequest) XXX_DiscardUnknown() { + xxx_messageInfo_StopScanRunRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_StopScanRunRequest proto.InternalMessageInfo + +func (m *StopScanRunRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +// Request for the `ListCrawledUrls` method. +type ListCrawledUrlsRequest struct { + // Required. + // The parent resource name, which should be a scan run resource name in the + // format + // 'projects/{projectId}/scanConfigs/{scanConfigId}/scanRuns/{scanRunId}'. + Parent string `protobuf:"bytes,1,opt,name=parent,proto3" json:"parent,omitempty"` + // A token identifying a page of results to be returned. This should be a + // `next_page_token` value returned from a previous List request. + // If unspecified, the first page of results is returned. + PageToken string `protobuf:"bytes,2,opt,name=page_token,json=pageToken,proto3" json:"page_token,omitempty"` + // The maximum number of CrawledUrls to return, can be limited by server. + // If not specified or not positive, the implementation will select a + // reasonable value. + PageSize int32 `protobuf:"varint,3,opt,name=page_size,json=pageSize,proto3" json:"page_size,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ListCrawledUrlsRequest) Reset() { *m = ListCrawledUrlsRequest{} } +func (m *ListCrawledUrlsRequest) String() string { return proto.CompactTextString(m) } +func (*ListCrawledUrlsRequest) ProtoMessage() {} +func (*ListCrawledUrlsRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_web_security_scanner_80f7154cd0746ae1, []int{11} +} +func (m *ListCrawledUrlsRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ListCrawledUrlsRequest.Unmarshal(m, b) +} +func (m *ListCrawledUrlsRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ListCrawledUrlsRequest.Marshal(b, m, deterministic) +} +func (dst *ListCrawledUrlsRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_ListCrawledUrlsRequest.Merge(dst, src) +} +func (m *ListCrawledUrlsRequest) XXX_Size() int { + return xxx_messageInfo_ListCrawledUrlsRequest.Size(m) +} +func (m *ListCrawledUrlsRequest) XXX_DiscardUnknown() { + xxx_messageInfo_ListCrawledUrlsRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_ListCrawledUrlsRequest proto.InternalMessageInfo + +func (m *ListCrawledUrlsRequest) GetParent() string { + if m != nil { + return m.Parent + } + return "" +} + +func (m *ListCrawledUrlsRequest) GetPageToken() string { + if m != nil { + return m.PageToken + } + return "" +} + +func (m *ListCrawledUrlsRequest) GetPageSize() int32 { + if m != nil { + return m.PageSize + } + return 0 +} + +// Response for the `ListCrawledUrls` method. +type ListCrawledUrlsResponse struct { + // The list of CrawledUrls returned. + CrawledUrls []*CrawledUrl `protobuf:"bytes,1,rep,name=crawled_urls,json=crawledUrls,proto3" json:"crawled_urls,omitempty"` + // Token to retrieve the next page of results, or empty if there are no + // more results in the list. + NextPageToken string `protobuf:"bytes,2,opt,name=next_page_token,json=nextPageToken,proto3" json:"next_page_token,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ListCrawledUrlsResponse) Reset() { *m = ListCrawledUrlsResponse{} } +func (m *ListCrawledUrlsResponse) String() string { return proto.CompactTextString(m) } +func (*ListCrawledUrlsResponse) ProtoMessage() {} +func (*ListCrawledUrlsResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_web_security_scanner_80f7154cd0746ae1, []int{12} +} +func (m *ListCrawledUrlsResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ListCrawledUrlsResponse.Unmarshal(m, b) +} +func (m *ListCrawledUrlsResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ListCrawledUrlsResponse.Marshal(b, m, deterministic) +} +func (dst *ListCrawledUrlsResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_ListCrawledUrlsResponse.Merge(dst, src) +} +func (m *ListCrawledUrlsResponse) XXX_Size() int { + return xxx_messageInfo_ListCrawledUrlsResponse.Size(m) +} +func (m *ListCrawledUrlsResponse) XXX_DiscardUnknown() { + xxx_messageInfo_ListCrawledUrlsResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_ListCrawledUrlsResponse proto.InternalMessageInfo + +func (m *ListCrawledUrlsResponse) GetCrawledUrls() []*CrawledUrl { + if m != nil { + return m.CrawledUrls + } + return nil +} + +func (m *ListCrawledUrlsResponse) GetNextPageToken() string { + if m != nil { + return m.NextPageToken + } + return "" +} + +// Request for the `GetFinding` method. +type GetFindingRequest struct { + // Required. + // The resource name of the Finding to be returned. The name follows the + // format of + // 'projects/{projectId}/scanConfigs/{scanConfigId}/scanRuns/{scanRunId}/findings/{findingId}'. + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GetFindingRequest) Reset() { *m = GetFindingRequest{} } +func (m *GetFindingRequest) String() string { return proto.CompactTextString(m) } +func (*GetFindingRequest) ProtoMessage() {} +func (*GetFindingRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_web_security_scanner_80f7154cd0746ae1, []int{13} +} +func (m *GetFindingRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GetFindingRequest.Unmarshal(m, b) +} +func (m *GetFindingRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GetFindingRequest.Marshal(b, m, deterministic) +} +func (dst *GetFindingRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_GetFindingRequest.Merge(dst, src) +} +func (m *GetFindingRequest) XXX_Size() int { + return xxx_messageInfo_GetFindingRequest.Size(m) +} +func (m *GetFindingRequest) XXX_DiscardUnknown() { + xxx_messageInfo_GetFindingRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_GetFindingRequest proto.InternalMessageInfo + +func (m *GetFindingRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +// Request for the `ListFindings` method. +type ListFindingsRequest struct { + // Required. + // The parent resource name, which should be a scan run resource name in the + // format + // 'projects/{projectId}/scanConfigs/{scanConfigId}/scanRuns/{scanRunId}'. + Parent string `protobuf:"bytes,1,opt,name=parent,proto3" json:"parent,omitempty"` + // The filter expression. The expression must be in the format: + // . + // Supported field: 'finding_type'. + // Supported operator: '='. + Filter string `protobuf:"bytes,2,opt,name=filter,proto3" json:"filter,omitempty"` + // A token identifying a page of results to be returned. This should be a + // `next_page_token` value returned from a previous List request. + // If unspecified, the first page of results is returned. + PageToken string `protobuf:"bytes,3,opt,name=page_token,json=pageToken,proto3" json:"page_token,omitempty"` + // The maximum number of Findings to return, can be limited by server. + // If not specified or not positive, the implementation will select a + // reasonable value. + PageSize int32 `protobuf:"varint,4,opt,name=page_size,json=pageSize,proto3" json:"page_size,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ListFindingsRequest) Reset() { *m = ListFindingsRequest{} } +func (m *ListFindingsRequest) String() string { return proto.CompactTextString(m) } +func (*ListFindingsRequest) ProtoMessage() {} +func (*ListFindingsRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_web_security_scanner_80f7154cd0746ae1, []int{14} +} +func (m *ListFindingsRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ListFindingsRequest.Unmarshal(m, b) +} +func (m *ListFindingsRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ListFindingsRequest.Marshal(b, m, deterministic) +} +func (dst *ListFindingsRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_ListFindingsRequest.Merge(dst, src) +} +func (m *ListFindingsRequest) XXX_Size() int { + return xxx_messageInfo_ListFindingsRequest.Size(m) +} +func (m *ListFindingsRequest) XXX_DiscardUnknown() { + xxx_messageInfo_ListFindingsRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_ListFindingsRequest proto.InternalMessageInfo + +func (m *ListFindingsRequest) GetParent() string { + if m != nil { + return m.Parent + } + return "" +} + +func (m *ListFindingsRequest) GetFilter() string { + if m != nil { + return m.Filter + } + return "" +} + +func (m *ListFindingsRequest) GetPageToken() string { + if m != nil { + return m.PageToken + } + return "" +} + +func (m *ListFindingsRequest) GetPageSize() int32 { + if m != nil { + return m.PageSize + } + return 0 +} + +// Response for the `ListFindings` method. +type ListFindingsResponse struct { + // The list of Findings returned. + Findings []*Finding `protobuf:"bytes,1,rep,name=findings,proto3" json:"findings,omitempty"` + // Token to retrieve the next page of results, or empty if there are no + // more results in the list. + NextPageToken string `protobuf:"bytes,2,opt,name=next_page_token,json=nextPageToken,proto3" json:"next_page_token,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ListFindingsResponse) Reset() { *m = ListFindingsResponse{} } +func (m *ListFindingsResponse) String() string { return proto.CompactTextString(m) } +func (*ListFindingsResponse) ProtoMessage() {} +func (*ListFindingsResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_web_security_scanner_80f7154cd0746ae1, []int{15} +} +func (m *ListFindingsResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ListFindingsResponse.Unmarshal(m, b) +} +func (m *ListFindingsResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ListFindingsResponse.Marshal(b, m, deterministic) +} +func (dst *ListFindingsResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_ListFindingsResponse.Merge(dst, src) +} +func (m *ListFindingsResponse) XXX_Size() int { + return xxx_messageInfo_ListFindingsResponse.Size(m) +} +func (m *ListFindingsResponse) XXX_DiscardUnknown() { + xxx_messageInfo_ListFindingsResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_ListFindingsResponse proto.InternalMessageInfo + +func (m *ListFindingsResponse) GetFindings() []*Finding { + if m != nil { + return m.Findings + } + return nil +} + +func (m *ListFindingsResponse) GetNextPageToken() string { + if m != nil { + return m.NextPageToken + } + return "" +} + +// Request for the `ListFindingTypeStats` method. +type ListFindingTypeStatsRequest struct { + // Required. + // The parent resource name, which should be a scan run resource name in the + // format + // 'projects/{projectId}/scanConfigs/{scanConfigId}/scanRuns/{scanRunId}'. + Parent string `protobuf:"bytes,1,opt,name=parent,proto3" json:"parent,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ListFindingTypeStatsRequest) Reset() { *m = ListFindingTypeStatsRequest{} } +func (m *ListFindingTypeStatsRequest) String() string { return proto.CompactTextString(m) } +func (*ListFindingTypeStatsRequest) ProtoMessage() {} +func (*ListFindingTypeStatsRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_web_security_scanner_80f7154cd0746ae1, []int{16} +} +func (m *ListFindingTypeStatsRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ListFindingTypeStatsRequest.Unmarshal(m, b) +} +func (m *ListFindingTypeStatsRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ListFindingTypeStatsRequest.Marshal(b, m, deterministic) +} +func (dst *ListFindingTypeStatsRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_ListFindingTypeStatsRequest.Merge(dst, src) +} +func (m *ListFindingTypeStatsRequest) XXX_Size() int { + return xxx_messageInfo_ListFindingTypeStatsRequest.Size(m) +} +func (m *ListFindingTypeStatsRequest) XXX_DiscardUnknown() { + xxx_messageInfo_ListFindingTypeStatsRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_ListFindingTypeStatsRequest proto.InternalMessageInfo + +func (m *ListFindingTypeStatsRequest) GetParent() string { + if m != nil { + return m.Parent + } + return "" +} + +// Response for the `ListFindingTypeStats` method. +type ListFindingTypeStatsResponse struct { + // The list of FindingTypeStats returned. + FindingTypeStats []*FindingTypeStats `protobuf:"bytes,1,rep,name=finding_type_stats,json=findingTypeStats,proto3" json:"finding_type_stats,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ListFindingTypeStatsResponse) Reset() { *m = ListFindingTypeStatsResponse{} } +func (m *ListFindingTypeStatsResponse) String() string { return proto.CompactTextString(m) } +func (*ListFindingTypeStatsResponse) ProtoMessage() {} +func (*ListFindingTypeStatsResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_web_security_scanner_80f7154cd0746ae1, []int{17} +} +func (m *ListFindingTypeStatsResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ListFindingTypeStatsResponse.Unmarshal(m, b) +} +func (m *ListFindingTypeStatsResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ListFindingTypeStatsResponse.Marshal(b, m, deterministic) +} +func (dst *ListFindingTypeStatsResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_ListFindingTypeStatsResponse.Merge(dst, src) +} +func (m *ListFindingTypeStatsResponse) XXX_Size() int { + return xxx_messageInfo_ListFindingTypeStatsResponse.Size(m) +} +func (m *ListFindingTypeStatsResponse) XXX_DiscardUnknown() { + xxx_messageInfo_ListFindingTypeStatsResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_ListFindingTypeStatsResponse proto.InternalMessageInfo + +func (m *ListFindingTypeStatsResponse) GetFindingTypeStats() []*FindingTypeStats { + if m != nil { + return m.FindingTypeStats + } + return nil +} + +func init() { + proto.RegisterType((*CreateScanConfigRequest)(nil), "google.cloud.websecurityscanner.v1alpha.CreateScanConfigRequest") + proto.RegisterType((*DeleteScanConfigRequest)(nil), "google.cloud.websecurityscanner.v1alpha.DeleteScanConfigRequest") + proto.RegisterType((*GetScanConfigRequest)(nil), "google.cloud.websecurityscanner.v1alpha.GetScanConfigRequest") + proto.RegisterType((*ListScanConfigsRequest)(nil), "google.cloud.websecurityscanner.v1alpha.ListScanConfigsRequest") + proto.RegisterType((*UpdateScanConfigRequest)(nil), "google.cloud.websecurityscanner.v1alpha.UpdateScanConfigRequest") + proto.RegisterType((*ListScanConfigsResponse)(nil), "google.cloud.websecurityscanner.v1alpha.ListScanConfigsResponse") + proto.RegisterType((*StartScanRunRequest)(nil), "google.cloud.websecurityscanner.v1alpha.StartScanRunRequest") + proto.RegisterType((*GetScanRunRequest)(nil), "google.cloud.websecurityscanner.v1alpha.GetScanRunRequest") + proto.RegisterType((*ListScanRunsRequest)(nil), "google.cloud.websecurityscanner.v1alpha.ListScanRunsRequest") + proto.RegisterType((*ListScanRunsResponse)(nil), "google.cloud.websecurityscanner.v1alpha.ListScanRunsResponse") + proto.RegisterType((*StopScanRunRequest)(nil), "google.cloud.websecurityscanner.v1alpha.StopScanRunRequest") + proto.RegisterType((*ListCrawledUrlsRequest)(nil), "google.cloud.websecurityscanner.v1alpha.ListCrawledUrlsRequest") + proto.RegisterType((*ListCrawledUrlsResponse)(nil), "google.cloud.websecurityscanner.v1alpha.ListCrawledUrlsResponse") + proto.RegisterType((*GetFindingRequest)(nil), "google.cloud.websecurityscanner.v1alpha.GetFindingRequest") + proto.RegisterType((*ListFindingsRequest)(nil), "google.cloud.websecurityscanner.v1alpha.ListFindingsRequest") + proto.RegisterType((*ListFindingsResponse)(nil), "google.cloud.websecurityscanner.v1alpha.ListFindingsResponse") + proto.RegisterType((*ListFindingTypeStatsRequest)(nil), "google.cloud.websecurityscanner.v1alpha.ListFindingTypeStatsRequest") + proto.RegisterType((*ListFindingTypeStatsResponse)(nil), "google.cloud.websecurityscanner.v1alpha.ListFindingTypeStatsResponse") +} + +// Reference imports to suppress errors if they are not otherwise used. +var _ context.Context +var _ grpc.ClientConn + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the grpc package it is being compiled against. +const _ = grpc.SupportPackageIsVersion4 + +// WebSecurityScannerClient is the client API for WebSecurityScanner service. +// +// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://godoc.org/google.golang.org/grpc#ClientConn.NewStream. +type WebSecurityScannerClient interface { + // Creates a new ScanConfig. + CreateScanConfig(ctx context.Context, in *CreateScanConfigRequest, opts ...grpc.CallOption) (*ScanConfig, error) + // Deletes an existing ScanConfig and its child resources. + DeleteScanConfig(ctx context.Context, in *DeleteScanConfigRequest, opts ...grpc.CallOption) (*empty.Empty, error) + // Gets a ScanConfig. + GetScanConfig(ctx context.Context, in *GetScanConfigRequest, opts ...grpc.CallOption) (*ScanConfig, error) + // Lists ScanConfigs under a given project. + ListScanConfigs(ctx context.Context, in *ListScanConfigsRequest, opts ...grpc.CallOption) (*ListScanConfigsResponse, error) + // Updates a ScanConfig. This method support partial update of a ScanConfig. + UpdateScanConfig(ctx context.Context, in *UpdateScanConfigRequest, opts ...grpc.CallOption) (*ScanConfig, error) + // Start a ScanRun according to the given ScanConfig. + StartScanRun(ctx context.Context, in *StartScanRunRequest, opts ...grpc.CallOption) (*ScanRun, error) + // Gets a ScanRun. + GetScanRun(ctx context.Context, in *GetScanRunRequest, opts ...grpc.CallOption) (*ScanRun, error) + // Lists ScanRuns under a given ScanConfig, in descending order of ScanRun + // stop time. + ListScanRuns(ctx context.Context, in *ListScanRunsRequest, opts ...grpc.CallOption) (*ListScanRunsResponse, error) + // Stops a ScanRun. The stopped ScanRun is returned. + StopScanRun(ctx context.Context, in *StopScanRunRequest, opts ...grpc.CallOption) (*ScanRun, error) + // List CrawledUrls under a given ScanRun. + ListCrawledUrls(ctx context.Context, in *ListCrawledUrlsRequest, opts ...grpc.CallOption) (*ListCrawledUrlsResponse, error) + // Gets a Finding. + GetFinding(ctx context.Context, in *GetFindingRequest, opts ...grpc.CallOption) (*Finding, error) + // List Findings under a given ScanRun. + ListFindings(ctx context.Context, in *ListFindingsRequest, opts ...grpc.CallOption) (*ListFindingsResponse, error) + // List all FindingTypeStats under a given ScanRun. + ListFindingTypeStats(ctx context.Context, in *ListFindingTypeStatsRequest, opts ...grpc.CallOption) (*ListFindingTypeStatsResponse, error) +} + +type webSecurityScannerClient struct { + cc *grpc.ClientConn +} + +func NewWebSecurityScannerClient(cc *grpc.ClientConn) WebSecurityScannerClient { + return &webSecurityScannerClient{cc} +} + +func (c *webSecurityScannerClient) CreateScanConfig(ctx context.Context, in *CreateScanConfigRequest, opts ...grpc.CallOption) (*ScanConfig, error) { + out := new(ScanConfig) + err := c.cc.Invoke(ctx, "/google.cloud.websecurityscanner.v1alpha.WebSecurityScanner/CreateScanConfig", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *webSecurityScannerClient) DeleteScanConfig(ctx context.Context, in *DeleteScanConfigRequest, opts ...grpc.CallOption) (*empty.Empty, error) { + out := new(empty.Empty) + err := c.cc.Invoke(ctx, "/google.cloud.websecurityscanner.v1alpha.WebSecurityScanner/DeleteScanConfig", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *webSecurityScannerClient) GetScanConfig(ctx context.Context, in *GetScanConfigRequest, opts ...grpc.CallOption) (*ScanConfig, error) { + out := new(ScanConfig) + err := c.cc.Invoke(ctx, "/google.cloud.websecurityscanner.v1alpha.WebSecurityScanner/GetScanConfig", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *webSecurityScannerClient) ListScanConfigs(ctx context.Context, in *ListScanConfigsRequest, opts ...grpc.CallOption) (*ListScanConfigsResponse, error) { + out := new(ListScanConfigsResponse) + err := c.cc.Invoke(ctx, "/google.cloud.websecurityscanner.v1alpha.WebSecurityScanner/ListScanConfigs", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *webSecurityScannerClient) UpdateScanConfig(ctx context.Context, in *UpdateScanConfigRequest, opts ...grpc.CallOption) (*ScanConfig, error) { + out := new(ScanConfig) + err := c.cc.Invoke(ctx, "/google.cloud.websecurityscanner.v1alpha.WebSecurityScanner/UpdateScanConfig", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *webSecurityScannerClient) StartScanRun(ctx context.Context, in *StartScanRunRequest, opts ...grpc.CallOption) (*ScanRun, error) { + out := new(ScanRun) + err := c.cc.Invoke(ctx, "/google.cloud.websecurityscanner.v1alpha.WebSecurityScanner/StartScanRun", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *webSecurityScannerClient) GetScanRun(ctx context.Context, in *GetScanRunRequest, opts ...grpc.CallOption) (*ScanRun, error) { + out := new(ScanRun) + err := c.cc.Invoke(ctx, "/google.cloud.websecurityscanner.v1alpha.WebSecurityScanner/GetScanRun", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *webSecurityScannerClient) ListScanRuns(ctx context.Context, in *ListScanRunsRequest, opts ...grpc.CallOption) (*ListScanRunsResponse, error) { + out := new(ListScanRunsResponse) + err := c.cc.Invoke(ctx, "/google.cloud.websecurityscanner.v1alpha.WebSecurityScanner/ListScanRuns", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *webSecurityScannerClient) StopScanRun(ctx context.Context, in *StopScanRunRequest, opts ...grpc.CallOption) (*ScanRun, error) { + out := new(ScanRun) + err := c.cc.Invoke(ctx, "/google.cloud.websecurityscanner.v1alpha.WebSecurityScanner/StopScanRun", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *webSecurityScannerClient) ListCrawledUrls(ctx context.Context, in *ListCrawledUrlsRequest, opts ...grpc.CallOption) (*ListCrawledUrlsResponse, error) { + out := new(ListCrawledUrlsResponse) + err := c.cc.Invoke(ctx, "/google.cloud.websecurityscanner.v1alpha.WebSecurityScanner/ListCrawledUrls", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *webSecurityScannerClient) GetFinding(ctx context.Context, in *GetFindingRequest, opts ...grpc.CallOption) (*Finding, error) { + out := new(Finding) + err := c.cc.Invoke(ctx, "/google.cloud.websecurityscanner.v1alpha.WebSecurityScanner/GetFinding", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *webSecurityScannerClient) ListFindings(ctx context.Context, in *ListFindingsRequest, opts ...grpc.CallOption) (*ListFindingsResponse, error) { + out := new(ListFindingsResponse) + err := c.cc.Invoke(ctx, "/google.cloud.websecurityscanner.v1alpha.WebSecurityScanner/ListFindings", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *webSecurityScannerClient) ListFindingTypeStats(ctx context.Context, in *ListFindingTypeStatsRequest, opts ...grpc.CallOption) (*ListFindingTypeStatsResponse, error) { + out := new(ListFindingTypeStatsResponse) + err := c.cc.Invoke(ctx, "/google.cloud.websecurityscanner.v1alpha.WebSecurityScanner/ListFindingTypeStats", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +// WebSecurityScannerServer is the server API for WebSecurityScanner service. +type WebSecurityScannerServer interface { + // Creates a new ScanConfig. + CreateScanConfig(context.Context, *CreateScanConfigRequest) (*ScanConfig, error) + // Deletes an existing ScanConfig and its child resources. + DeleteScanConfig(context.Context, *DeleteScanConfigRequest) (*empty.Empty, error) + // Gets a ScanConfig. + GetScanConfig(context.Context, *GetScanConfigRequest) (*ScanConfig, error) + // Lists ScanConfigs under a given project. + ListScanConfigs(context.Context, *ListScanConfigsRequest) (*ListScanConfigsResponse, error) + // Updates a ScanConfig. This method support partial update of a ScanConfig. + UpdateScanConfig(context.Context, *UpdateScanConfigRequest) (*ScanConfig, error) + // Start a ScanRun according to the given ScanConfig. + StartScanRun(context.Context, *StartScanRunRequest) (*ScanRun, error) + // Gets a ScanRun. + GetScanRun(context.Context, *GetScanRunRequest) (*ScanRun, error) + // Lists ScanRuns under a given ScanConfig, in descending order of ScanRun + // stop time. + ListScanRuns(context.Context, *ListScanRunsRequest) (*ListScanRunsResponse, error) + // Stops a ScanRun. The stopped ScanRun is returned. + StopScanRun(context.Context, *StopScanRunRequest) (*ScanRun, error) + // List CrawledUrls under a given ScanRun. + ListCrawledUrls(context.Context, *ListCrawledUrlsRequest) (*ListCrawledUrlsResponse, error) + // Gets a Finding. + GetFinding(context.Context, *GetFindingRequest) (*Finding, error) + // List Findings under a given ScanRun. + ListFindings(context.Context, *ListFindingsRequest) (*ListFindingsResponse, error) + // List all FindingTypeStats under a given ScanRun. + ListFindingTypeStats(context.Context, *ListFindingTypeStatsRequest) (*ListFindingTypeStatsResponse, error) +} + +func RegisterWebSecurityScannerServer(s *grpc.Server, srv WebSecurityScannerServer) { + s.RegisterService(&_WebSecurityScanner_serviceDesc, srv) +} + +func _WebSecurityScanner_CreateScanConfig_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(CreateScanConfigRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(WebSecurityScannerServer).CreateScanConfig(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.websecurityscanner.v1alpha.WebSecurityScanner/CreateScanConfig", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(WebSecurityScannerServer).CreateScanConfig(ctx, req.(*CreateScanConfigRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _WebSecurityScanner_DeleteScanConfig_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(DeleteScanConfigRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(WebSecurityScannerServer).DeleteScanConfig(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.websecurityscanner.v1alpha.WebSecurityScanner/DeleteScanConfig", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(WebSecurityScannerServer).DeleteScanConfig(ctx, req.(*DeleteScanConfigRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _WebSecurityScanner_GetScanConfig_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(GetScanConfigRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(WebSecurityScannerServer).GetScanConfig(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.websecurityscanner.v1alpha.WebSecurityScanner/GetScanConfig", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(WebSecurityScannerServer).GetScanConfig(ctx, req.(*GetScanConfigRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _WebSecurityScanner_ListScanConfigs_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(ListScanConfigsRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(WebSecurityScannerServer).ListScanConfigs(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.websecurityscanner.v1alpha.WebSecurityScanner/ListScanConfigs", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(WebSecurityScannerServer).ListScanConfigs(ctx, req.(*ListScanConfigsRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _WebSecurityScanner_UpdateScanConfig_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(UpdateScanConfigRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(WebSecurityScannerServer).UpdateScanConfig(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.websecurityscanner.v1alpha.WebSecurityScanner/UpdateScanConfig", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(WebSecurityScannerServer).UpdateScanConfig(ctx, req.(*UpdateScanConfigRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _WebSecurityScanner_StartScanRun_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(StartScanRunRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(WebSecurityScannerServer).StartScanRun(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.websecurityscanner.v1alpha.WebSecurityScanner/StartScanRun", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(WebSecurityScannerServer).StartScanRun(ctx, req.(*StartScanRunRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _WebSecurityScanner_GetScanRun_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(GetScanRunRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(WebSecurityScannerServer).GetScanRun(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.websecurityscanner.v1alpha.WebSecurityScanner/GetScanRun", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(WebSecurityScannerServer).GetScanRun(ctx, req.(*GetScanRunRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _WebSecurityScanner_ListScanRuns_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(ListScanRunsRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(WebSecurityScannerServer).ListScanRuns(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.websecurityscanner.v1alpha.WebSecurityScanner/ListScanRuns", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(WebSecurityScannerServer).ListScanRuns(ctx, req.(*ListScanRunsRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _WebSecurityScanner_StopScanRun_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(StopScanRunRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(WebSecurityScannerServer).StopScanRun(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.websecurityscanner.v1alpha.WebSecurityScanner/StopScanRun", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(WebSecurityScannerServer).StopScanRun(ctx, req.(*StopScanRunRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _WebSecurityScanner_ListCrawledUrls_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(ListCrawledUrlsRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(WebSecurityScannerServer).ListCrawledUrls(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.websecurityscanner.v1alpha.WebSecurityScanner/ListCrawledUrls", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(WebSecurityScannerServer).ListCrawledUrls(ctx, req.(*ListCrawledUrlsRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _WebSecurityScanner_GetFinding_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(GetFindingRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(WebSecurityScannerServer).GetFinding(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.websecurityscanner.v1alpha.WebSecurityScanner/GetFinding", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(WebSecurityScannerServer).GetFinding(ctx, req.(*GetFindingRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _WebSecurityScanner_ListFindings_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(ListFindingsRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(WebSecurityScannerServer).ListFindings(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.websecurityscanner.v1alpha.WebSecurityScanner/ListFindings", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(WebSecurityScannerServer).ListFindings(ctx, req.(*ListFindingsRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _WebSecurityScanner_ListFindingTypeStats_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(ListFindingTypeStatsRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(WebSecurityScannerServer).ListFindingTypeStats(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.websecurityscanner.v1alpha.WebSecurityScanner/ListFindingTypeStats", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(WebSecurityScannerServer).ListFindingTypeStats(ctx, req.(*ListFindingTypeStatsRequest)) + } + return interceptor(ctx, in, info, handler) +} + +var _WebSecurityScanner_serviceDesc = grpc.ServiceDesc{ + ServiceName: "google.cloud.websecurityscanner.v1alpha.WebSecurityScanner", + HandlerType: (*WebSecurityScannerServer)(nil), + Methods: []grpc.MethodDesc{ + { + MethodName: "CreateScanConfig", + Handler: _WebSecurityScanner_CreateScanConfig_Handler, + }, + { + MethodName: "DeleteScanConfig", + Handler: _WebSecurityScanner_DeleteScanConfig_Handler, + }, + { + MethodName: "GetScanConfig", + Handler: _WebSecurityScanner_GetScanConfig_Handler, + }, + { + MethodName: "ListScanConfigs", + Handler: _WebSecurityScanner_ListScanConfigs_Handler, + }, + { + MethodName: "UpdateScanConfig", + Handler: _WebSecurityScanner_UpdateScanConfig_Handler, + }, + { + MethodName: "StartScanRun", + Handler: _WebSecurityScanner_StartScanRun_Handler, + }, + { + MethodName: "GetScanRun", + Handler: _WebSecurityScanner_GetScanRun_Handler, + }, + { + MethodName: "ListScanRuns", + Handler: _WebSecurityScanner_ListScanRuns_Handler, + }, + { + MethodName: "StopScanRun", + Handler: _WebSecurityScanner_StopScanRun_Handler, + }, + { + MethodName: "ListCrawledUrls", + Handler: _WebSecurityScanner_ListCrawledUrls_Handler, + }, + { + MethodName: "GetFinding", + Handler: _WebSecurityScanner_GetFinding_Handler, + }, + { + MethodName: "ListFindings", + Handler: _WebSecurityScanner_ListFindings_Handler, + }, + { + MethodName: "ListFindingTypeStats", + Handler: _WebSecurityScanner_ListFindingTypeStats_Handler, + }, + }, + Streams: []grpc.StreamDesc{}, + Metadata: "google/cloud/websecurityscanner/v1alpha/web_security_scanner.proto", +} + +func init() { + proto.RegisterFile("google/cloud/websecurityscanner/v1alpha/web_security_scanner.proto", fileDescriptor_web_security_scanner_80f7154cd0746ae1) +} + +var fileDescriptor_web_security_scanner_80f7154cd0746ae1 = []byte{ + // 1115 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xb4, 0x58, 0xcf, 0x6f, 0x1b, 0x45, + 0x14, 0xd6, 0xb4, 0x25, 0x4a, 0x9e, 0x53, 0x35, 0x4c, 0xa3, 0x38, 0xda, 0x14, 0xc9, 0xda, 0x03, + 0x35, 0x46, 0x78, 0x4b, 0x42, 0x80, 0x38, 0x18, 0x42, 0xdc, 0x06, 0x90, 0x5a, 0x14, 0xd9, 0x29, + 0x08, 0x2e, 0xab, 0x8d, 0x3d, 0x36, 0x4b, 0x36, 0xb3, 0xcb, 0xce, 0x98, 0x92, 0xa2, 0x1e, 0xe0, + 0xc2, 0x05, 0x4e, 0x9c, 0xb8, 0x70, 0xe1, 0xca, 0x15, 0x21, 0x8e, 0x70, 0x40, 0x3d, 0x22, 0x95, + 0x3b, 0x17, 0xfe, 0x03, 0xfe, 0x01, 0xb4, 0xb3, 0xb3, 0xf6, 0x78, 0x77, 0x6b, 0xef, 0x38, 0xca, + 0x6d, 0xe7, 0xd7, 0x9b, 0xef, 0x7d, 0xef, 0xbd, 0x79, 0x9f, 0x16, 0xf6, 0x07, 0xbe, 0x3f, 0xf0, + 0x88, 0xd5, 0xf5, 0xfc, 0x61, 0xcf, 0x7a, 0x40, 0x8e, 0x19, 0xe9, 0x0e, 0x43, 0x97, 0x9f, 0xb1, + 0xae, 0x43, 0x29, 0x09, 0xad, 0xcf, 0x5f, 0x76, 0xbc, 0xe0, 0x13, 0x27, 0x5a, 0xb2, 0x93, 0x35, + 0x5b, 0x2e, 0xd6, 0x83, 0xd0, 0xe7, 0x3e, 0xbe, 0x19, 0xdb, 0xa8, 0x0b, 0x1b, 0xf5, 0xac, 0x8d, + 0xba, 0xb4, 0x61, 0xdc, 0x90, 0x97, 0x39, 0x81, 0x6b, 0x39, 0x94, 0xfa, 0xdc, 0xe1, 0xae, 0x4f, + 0x59, 0x6c, 0xc6, 0xd8, 0x29, 0x0a, 0xa5, 0x1b, 0x3a, 0x0f, 0x3c, 0xd2, 0xb3, 0x87, 0xa1, 0x27, + 0x8f, 0x6e, 0x17, 0x3d, 0xda, 0x77, 0x69, 0xcf, 0xa5, 0x03, 0x79, 0x6c, 0x4f, 0xf3, 0x98, 0xcd, + 0xcf, 0x02, 0x62, 0x33, 0xee, 0x70, 0x6d, 0xcc, 0xd1, 0xd8, 0xee, 0xfa, 0xb4, 0xef, 0x26, 0x97, + 0xbf, 0xaa, 0x75, 0x34, 0x1c, 0x52, 0x79, 0x6e, 0x43, 0x9e, 0x13, 0xa3, 0xe3, 0x61, 0xdf, 0x22, + 0xa7, 0x01, 0x3f, 0x93, 0x8b, 0x95, 0xf4, 0x62, 0xdf, 0x25, 0x5e, 0xcf, 0x3e, 0x75, 0xd8, 0x49, + 0xbc, 0xc3, 0xfc, 0x06, 0x41, 0xb9, 0x15, 0x12, 0x87, 0x93, 0x4e, 0xd7, 0xa1, 0x2d, 0x81, 0xa8, + 0x4d, 0x3e, 0x1b, 0x12, 0xc6, 0xf1, 0x1a, 0x2c, 0x04, 0x4e, 0x48, 0x28, 0x5f, 0x47, 0x15, 0x54, + 0x5d, 0x6a, 0xcb, 0x11, 0x3e, 0x82, 0x92, 0x82, 0x7f, 0xfd, 0x52, 0x05, 0x55, 0x4b, 0x9b, 0x5b, + 0xf5, 0x82, 0x61, 0xaf, 0x2b, 0x17, 0x01, 0x1b, 0x7d, 0x9b, 0x2f, 0x41, 0xf9, 0x36, 0xf1, 0x48, + 0x1e, 0x10, 0x0c, 0x57, 0xa8, 0x73, 0x4a, 0x24, 0x0c, 0xf1, 0x6d, 0xd6, 0x60, 0xf5, 0x1d, 0xc2, + 0x8b, 0xed, 0xf5, 0x60, 0xed, 0xae, 0xcb, 0x94, 0xcd, 0x6c, 0x96, 0x8b, 0xcf, 0x01, 0x04, 0xce, + 0x80, 0xd8, 0xdc, 0x3f, 0x21, 0x54, 0x78, 0xb8, 0xd4, 0x5e, 0x8a, 0x66, 0x8e, 0xa2, 0x09, 0xbc, + 0x01, 0x62, 0x60, 0x33, 0xf7, 0x21, 0x59, 0xbf, 0x5c, 0x41, 0xd5, 0x67, 0xda, 0x8b, 0xd1, 0x44, + 0xc7, 0x7d, 0x48, 0xcc, 0x9f, 0x11, 0x94, 0xef, 0x07, 0xbd, 0x5c, 0x4a, 0x2f, 0x84, 0x3a, 0xbc, + 0x0b, 0xa5, 0xa1, 0xb8, 0x50, 0x44, 0x56, 0x00, 0x2a, 0x6d, 0x1a, 0x89, 0xd5, 0x24, 0xf8, 0xf5, + 0x83, 0x28, 0xf8, 0xf7, 0x1c, 0x76, 0xd2, 0x86, 0x78, 0x7b, 0xf4, 0x6d, 0xfe, 0x80, 0xa0, 0x9c, + 0x61, 0x87, 0x05, 0x3e, 0x65, 0x04, 0x7f, 0x00, 0xcb, 0x0a, 0x5c, 0xb6, 0x8e, 0x2a, 0x97, 0xe7, + 0xc5, 0x5b, 0x1a, 0xe3, 0x65, 0xf8, 0x79, 0xb8, 0x46, 0xc9, 0x17, 0xdc, 0xce, 0x70, 0x7c, 0x35, + 0x9a, 0x3e, 0x4c, 0x78, 0x36, 0x5f, 0x80, 0xeb, 0x1d, 0xee, 0x84, 0x02, 0x5b, 0x7b, 0x48, 0xa7, + 0xc5, 0xf8, 0x26, 0x3c, 0x2b, 0xf3, 0x61, 0xc6, 0x46, 0x17, 0xae, 0x27, 0xee, 0xb6, 0x87, 0xf4, + 0x42, 0x33, 0xe1, 0x3b, 0x04, 0xab, 0x93, 0x77, 0x49, 0x5e, 0xef, 0xc1, 0x52, 0x52, 0xc6, 0x09, + 0xa9, 0xb7, 0xb4, 0x48, 0x8d, 0x7c, 0x5c, 0x64, 0xd2, 0x6c, 0x61, 0x3a, 0xab, 0x80, 0x3b, 0xdc, + 0x0f, 0x0a, 0x90, 0x24, 0x2b, 0xa6, 0x15, 0x3f, 0xad, 0xf7, 0x43, 0xef, 0x42, 0x79, 0x4a, 0x52, + 0x70, 0xe2, 0xba, 0x71, 0x0a, 0x2a, 0x0f, 0xbc, 0x7e, 0x0a, 0x8e, 0x6d, 0xb6, 0x4b, 0xdd, 0xb1, + 0xfd, 0xc2, 0x9c, 0xc5, 0x79, 0x75, 0x10, 0xbf, 0xf8, 0xd3, 0x28, 0xfb, 0x0a, 0xc5, 0x89, 0x25, + 0xb7, 0xce, 0x24, 0x6c, 0x0d, 0x16, 0xfa, 0xae, 0xc7, 0x49, 0x28, 0xef, 0x95, 0xa3, 0x14, 0x91, + 0x97, 0xa7, 0x12, 0x79, 0x25, 0x45, 0xe4, 0xb7, 0x32, 0xe1, 0xc6, 0x18, 0x24, 0x8b, 0x77, 0x61, + 0x51, 0x36, 0x2d, 0xfd, 0x7c, 0x4b, 0x7c, 0x1f, 0x59, 0x28, 0xcc, 0xdd, 0x36, 0x6c, 0x28, 0x68, + 0x8e, 0xce, 0x02, 0xd2, 0x89, 0x9a, 0xe5, 0x0c, 0x66, 0xa2, 0x9e, 0x74, 0x23, 0xff, 0x9c, 0xf4, + 0x66, 0x00, 0x38, 0xdb, 0x82, 0xa5, 0x5f, 0x3b, 0xba, 0x7e, 0x8d, 0xcd, 0xaf, 0xf4, 0x53, 0x33, + 0x9b, 0x7f, 0xad, 0x02, 0xfe, 0x90, 0x1c, 0x77, 0xa4, 0x85, 0x4e, 0x6c, 0x01, 0x3f, 0x46, 0xb0, + 0x92, 0x6e, 0x9a, 0x78, 0x4f, 0x23, 0x25, 0x73, 0xfb, 0xad, 0x31, 0xcf, 0xbb, 0x6a, 0x36, 0xbf, + 0x7e, 0xf2, 0xef, 0xf7, 0x97, 0x5e, 0x33, 0xab, 0x23, 0x81, 0xf0, 0x65, 0x4c, 0x63, 0x33, 0x08, + 0xfd, 0x4f, 0x49, 0x97, 0x33, 0xab, 0xf6, 0xc8, 0x52, 0x1e, 0xdf, 0x86, 0xda, 0x81, 0xf0, 0x8f, + 0x08, 0x56, 0xd2, 0x6d, 0x57, 0xc3, 0x95, 0xa7, 0x74, 0x6c, 0x63, 0x2d, 0xd3, 0x7c, 0xee, 0x44, + 0xb2, 0xc4, 0xbc, 0x25, 0xd0, 0xd6, 0x6a, 0x0a, 0xda, 0xa8, 0x78, 0x14, 0xac, 0x2a, 0x54, 0xab, + 0xf6, 0x08, 0xff, 0x82, 0xe0, 0xea, 0x44, 0xa3, 0xc7, 0xcd, 0xc2, 0xe8, 0xf2, 0x04, 0xc2, 0x7c, + 0x2c, 0x4b, 0xdc, 0xb8, 0x38, 0xee, 0x3f, 0x11, 0x5c, 0x4b, 0xb5, 0x55, 0xfc, 0x56, 0xe1, 0xab, + 0xf3, 0xe5, 0x8a, 0xb1, 0x37, 0xbf, 0x81, 0xb8, 0x74, 0xf2, 0x1c, 0x99, 0x9e, 0x2e, 0xf8, 0x6f, + 0x04, 0x2b, 0x69, 0x39, 0xa3, 0x91, 0x21, 0x4f, 0x51, 0x42, 0xf3, 0x85, 0xe1, 0x3d, 0x81, 0xbe, + 0xb5, 0xf9, 0xca, 0x18, 0xbd, 0xaa, 0xa4, 0xa7, 0x87, 0x64, 0x32, 0xf1, 0x7f, 0x43, 0xb0, 0xac, + 0x6a, 0x0b, 0xfc, 0x46, 0x71, 0x40, 0x59, 0x49, 0x62, 0x68, 0xb7, 0x6f, 0x73, 0x47, 0xf8, 0xb2, + 0x65, 0xd6, 0x8b, 0xa6, 0x54, 0x83, 0x45, 0xf7, 0x36, 0x50, 0x0d, 0xff, 0x8a, 0x00, 0xc6, 0x62, + 0x07, 0x37, 0x74, 0x0b, 0xe2, 0x5c, 0xb8, 0x77, 0x05, 0xee, 0x6d, 0xbc, 0x55, 0x10, 0xb7, 0x95, + 0xc8, 0x94, 0xa8, 0x2a, 0x1e, 0x23, 0x58, 0x56, 0x15, 0x91, 0x06, 0xeb, 0x39, 0xa2, 0xcd, 0x68, + 0xce, 0x79, 0x5a, 0x16, 0x43, 0x8e, 0x2b, 0x99, 0x62, 0x48, 0x05, 0x61, 0xe4, 0x0d, 0xfe, 0x1d, + 0x41, 0x49, 0x51, 0x53, 0x78, 0x57, 0x23, 0x7f, 0xd2, 0x1a, 0x6c, 0x8e, 0x30, 0xb4, 0x04, 0xf6, + 0xa6, 0xf9, 0xfa, 0x1c, 0x61, 0x68, 0x30, 0xee, 0x07, 0x51, 0x22, 0xfd, 0x23, 0x1f, 0x29, 0x45, + 0x78, 0x69, 0x3e, 0x52, 0x59, 0x85, 0xa8, 0xf9, 0x48, 0xe5, 0x68, 0xbe, 0xa4, 0xcc, 0xf1, 0xdb, + 0x85, 0xe3, 0xa2, 0x7a, 0x67, 0xa9, 0x32, 0xef, 0x8f, 0xb8, 0x54, 0x64, 0xaf, 0xd7, 0x2b, 0x95, + 0x49, 0xd1, 0x67, 0x68, 0x2b, 0x26, 0xf3, 0x40, 0xf8, 0xb1, 0x87, 0xdf, 0xd4, 0x8f, 0x51, 0xf2, + 0x8f, 0x41, 0x54, 0xcd, 0x13, 0x59, 0x35, 0x89, 0xac, 0xd3, 0xac, 0x9a, 0x94, 0x22, 0xd5, 0xac, + 0x9a, 0xb4, 0x96, 0xcc, 0xf3, 0x4a, 0x27, 0x3a, 0x23, 0x15, 0xf9, 0xdf, 0xa4, 0x58, 0x1d, 0xa9, + 0x2e, 0x7c, 0x7b, 0x1e, 0x7c, 0x69, 0x75, 0x69, 0xdc, 0x39, 0xa7, 0x15, 0xe9, 0xed, 0xfb, 0xc2, + 0xdb, 0x77, 0xf1, 0xc1, 0xb9, 0xbc, 0x1d, 0xd9, 0xdd, 0xff, 0x09, 0xc1, 0x8b, 0x5d, 0xff, 0xb4, + 0x28, 0xb8, 0xfd, 0x72, 0x56, 0x7f, 0x1e, 0x46, 0x9a, 0xea, 0x10, 0x7d, 0xfc, 0x91, 0xb4, 0x31, + 0xf0, 0x3d, 0x87, 0x0e, 0xea, 0x7e, 0x38, 0xb0, 0x06, 0x84, 0x0a, 0xc5, 0x65, 0xc5, 0x4b, 0x4e, + 0xe0, 0xb2, 0x99, 0x7f, 0x94, 0x76, 0xb3, 0x4b, 0xc7, 0x0b, 0xc2, 0xca, 0xd6, 0xff, 0x01, 0x00, + 0x00, 0xff, 0xff, 0x32, 0x00, 0x5e, 0x95, 0x10, 0x14, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/cloud/websecurityscanner/v1beta/crawled_url.pb.go b/vendor/google.golang.org/genproto/googleapis/cloud/websecurityscanner/v1beta/crawled_url.pb.go new file mode 100644 index 0000000..6f4e71d --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/cloud/websecurityscanner/v1beta/crawled_url.pb.go @@ -0,0 +1,111 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/cloud/websecurityscanner/v1beta/crawled_url.proto + +package websecurityscanner // import "google.golang.org/genproto/googleapis/cloud/websecurityscanner/v1beta" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// A CrawledUrl resource represents a URL that was crawled during a ScanRun. Web +// Security Scanner Service crawls the web applications, following all links +// within the scope of sites, to find the URLs to test against. +type CrawledUrl struct { + // Output only. + // The http method of the request that was used to visit the URL, in + // uppercase. + HttpMethod string `protobuf:"bytes,1,opt,name=http_method,json=httpMethod,proto3" json:"http_method,omitempty"` + // Output only. + // The URL that was crawled. + Url string `protobuf:"bytes,2,opt,name=url,proto3" json:"url,omitempty"` + // Output only. + // The body of the request that was used to visit the URL. + Body string `protobuf:"bytes,3,opt,name=body,proto3" json:"body,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *CrawledUrl) Reset() { *m = CrawledUrl{} } +func (m *CrawledUrl) String() string { return proto.CompactTextString(m) } +func (*CrawledUrl) ProtoMessage() {} +func (*CrawledUrl) Descriptor() ([]byte, []int) { + return fileDescriptor_crawled_url_47969d3dbeaecdf6, []int{0} +} +func (m *CrawledUrl) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_CrawledUrl.Unmarshal(m, b) +} +func (m *CrawledUrl) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_CrawledUrl.Marshal(b, m, deterministic) +} +func (dst *CrawledUrl) XXX_Merge(src proto.Message) { + xxx_messageInfo_CrawledUrl.Merge(dst, src) +} +func (m *CrawledUrl) XXX_Size() int { + return xxx_messageInfo_CrawledUrl.Size(m) +} +func (m *CrawledUrl) XXX_DiscardUnknown() { + xxx_messageInfo_CrawledUrl.DiscardUnknown(m) +} + +var xxx_messageInfo_CrawledUrl proto.InternalMessageInfo + +func (m *CrawledUrl) GetHttpMethod() string { + if m != nil { + return m.HttpMethod + } + return "" +} + +func (m *CrawledUrl) GetUrl() string { + if m != nil { + return m.Url + } + return "" +} + +func (m *CrawledUrl) GetBody() string { + if m != nil { + return m.Body + } + return "" +} + +func init() { + proto.RegisterType((*CrawledUrl)(nil), "google.cloud.websecurityscanner.v1beta.CrawledUrl") +} + +func init() { + proto.RegisterFile("google/cloud/websecurityscanner/v1beta/crawled_url.proto", fileDescriptor_crawled_url_47969d3dbeaecdf6) +} + +var fileDescriptor_crawled_url_47969d3dbeaecdf6 = []byte{ + // 235 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x8c, 0x90, 0x31, 0x4b, 0x04, 0x31, + 0x10, 0x85, 0x59, 0x4f, 0x04, 0xc7, 0x42, 0x49, 0xb5, 0x88, 0xa0, 0x58, 0x88, 0x58, 0x24, 0x88, + 0x8d, 0x60, 0x77, 0xd6, 0xc2, 0xa1, 0x08, 0x62, 0x73, 0xcc, 0x66, 0x87, 0xdc, 0x42, 0x2e, 0xb3, + 0xcc, 0xce, 0x7a, 0xdc, 0x4f, 0xf1, 0xdf, 0xca, 0x26, 0x07, 0x16, 0x57, 0x68, 0x37, 0xbc, 0xc7, + 0xf7, 0xf2, 0x11, 0x78, 0x0c, 0xcc, 0x21, 0x92, 0xf3, 0x91, 0xc7, 0xd6, 0x6d, 0xa8, 0x19, 0xc8, + 0x8f, 0xd2, 0xe9, 0x76, 0xf0, 0x98, 0x12, 0x89, 0xfb, 0xba, 0x6f, 0x48, 0xd1, 0x79, 0xc1, 0x4d, + 0xa4, 0x76, 0x39, 0x4a, 0xb4, 0xbd, 0xb0, 0xb2, 0xb9, 0x29, 0xa4, 0xcd, 0xa4, 0xdd, 0x27, 0x6d, + 0x21, 0xcf, 0x2f, 0x76, 0x2f, 0x60, 0xdf, 0x39, 0x4c, 0x89, 0x15, 0xb5, 0xe3, 0x34, 0x94, 0x95, + 0xeb, 0x37, 0x80, 0xe7, 0x32, 0xfd, 0x2e, 0xd1, 0x5c, 0xc2, 0xc9, 0x4a, 0xb5, 0x5f, 0xae, 0x49, + 0x57, 0xdc, 0xd6, 0xd5, 0x55, 0x75, 0x7b, 0xfc, 0x0a, 0x53, 0xf4, 0x92, 0x13, 0x73, 0x06, 0xb3, + 0x51, 0x62, 0x7d, 0x90, 0x8b, 0xe9, 0x34, 0x06, 0x0e, 0x1b, 0x6e, 0xb7, 0xf5, 0x2c, 0x47, 0xf9, + 0x9e, 0x7f, 0x57, 0x70, 0xe7, 0x79, 0x6d, 0xff, 0x67, 0x38, 0x3f, 0xfd, 0x35, 0x58, 0x4c, 0x52, + 0x8b, 0xea, 0xf3, 0x63, 0x87, 0x06, 0x8e, 0x98, 0x82, 0x65, 0x09, 0x2e, 0x50, 0xca, 0xca, 0xae, + 0x54, 0xd8, 0x77, 0xc3, 0x5f, 0xbf, 0xf6, 0xb4, 0xdf, 0x34, 0x47, 0x79, 0xe4, 0xe1, 0x27, 0x00, + 0x00, 0xff, 0xff, 0xac, 0xe1, 0xa8, 0x52, 0x79, 0x01, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/cloud/websecurityscanner/v1beta/finding.pb.go b/vendor/google.golang.org/genproto/googleapis/cloud/websecurityscanner/v1beta/finding.pb.go new file mode 100644 index 0000000..c1a1707 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/cloud/websecurityscanner/v1beta/finding.pb.go @@ -0,0 +1,269 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/cloud/websecurityscanner/v1beta/finding.proto + +package websecurityscanner // import "google.golang.org/genproto/googleapis/cloud/websecurityscanner/v1beta" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// A Finding resource represents a vulnerability instance identified during a +// ScanRun. +type Finding struct { + // Output only. + // The resource name of the Finding. The name follows the format of + // 'projects/{projectId}/scanConfigs/{scanConfigId}/scanruns/{scanRunId}/findings/{findingId}'. + // The finding IDs are generated by the system. + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // Output only. + // The type of the Finding. + // Detailed and up-to-date information on findings can be found here: + // https://cloud.google.com/security-scanner/docs/scan-result-details + FindingType string `protobuf:"bytes,2,opt,name=finding_type,json=findingType,proto3" json:"finding_type,omitempty"` + // Output only. + // The http method of the request that triggered the vulnerability, in + // uppercase. + HttpMethod string `protobuf:"bytes,3,opt,name=http_method,json=httpMethod,proto3" json:"http_method,omitempty"` + // Output only. + // The URL produced by the server-side fuzzer and used in the request that + // triggered the vulnerability. + FuzzedUrl string `protobuf:"bytes,4,opt,name=fuzzed_url,json=fuzzedUrl,proto3" json:"fuzzed_url,omitempty"` + // Output only. + // The body of the request that triggered the vulnerability. + Body string `protobuf:"bytes,5,opt,name=body,proto3" json:"body,omitempty"` + // Output only. + // The description of the vulnerability. + Description string `protobuf:"bytes,6,opt,name=description,proto3" json:"description,omitempty"` + // Output only. + // The URL containing human-readable payload that user can leverage to + // reproduce the vulnerability. + ReproductionUrl string `protobuf:"bytes,7,opt,name=reproduction_url,json=reproductionUrl,proto3" json:"reproduction_url,omitempty"` + // Output only. + // If the vulnerability was originated from nested IFrame, the immediate + // parent IFrame is reported. + FrameUrl string `protobuf:"bytes,8,opt,name=frame_url,json=frameUrl,proto3" json:"frame_url,omitempty"` + // Output only. + // The URL where the browser lands when the vulnerability is detected. + FinalUrl string `protobuf:"bytes,9,opt,name=final_url,json=finalUrl,proto3" json:"final_url,omitempty"` + // Output only. + // The tracking ID uniquely identifies a vulnerability instance across + // multiple ScanRuns. + TrackingId string `protobuf:"bytes,10,opt,name=tracking_id,json=trackingId,proto3" json:"tracking_id,omitempty"` + // Output only. + // An addon containing information reported for a vulnerability with an HTML + // form, if any. + Form *Form `protobuf:"bytes,16,opt,name=form,proto3" json:"form,omitempty"` + // Output only. + // An addon containing information about outdated libraries. + OutdatedLibrary *OutdatedLibrary `protobuf:"bytes,11,opt,name=outdated_library,json=outdatedLibrary,proto3" json:"outdated_library,omitempty"` + // Output only. + // An addon containing detailed information regarding any resource causing the + // vulnerability such as JavaScript sources, image, audio files, etc. + ViolatingResource *ViolatingResource `protobuf:"bytes,12,opt,name=violating_resource,json=violatingResource,proto3" json:"violating_resource,omitempty"` + // Output only. + // An addon containing information about vulnerable or missing HTTP headers. + VulnerableHeaders *VulnerableHeaders `protobuf:"bytes,15,opt,name=vulnerable_headers,json=vulnerableHeaders,proto3" json:"vulnerable_headers,omitempty"` + // Output only. + // An addon containing information about request parameters which were found + // to be vulnerable. + VulnerableParameters *VulnerableParameters `protobuf:"bytes,13,opt,name=vulnerable_parameters,json=vulnerableParameters,proto3" json:"vulnerable_parameters,omitempty"` + // Output only. + // An addon containing information reported for an XSS, if any. + Xss *Xss `protobuf:"bytes,14,opt,name=xss,proto3" json:"xss,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Finding) Reset() { *m = Finding{} } +func (m *Finding) String() string { return proto.CompactTextString(m) } +func (*Finding) ProtoMessage() {} +func (*Finding) Descriptor() ([]byte, []int) { + return fileDescriptor_finding_18e691d702d328cd, []int{0} +} +func (m *Finding) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Finding.Unmarshal(m, b) +} +func (m *Finding) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Finding.Marshal(b, m, deterministic) +} +func (dst *Finding) XXX_Merge(src proto.Message) { + xxx_messageInfo_Finding.Merge(dst, src) +} +func (m *Finding) XXX_Size() int { + return xxx_messageInfo_Finding.Size(m) +} +func (m *Finding) XXX_DiscardUnknown() { + xxx_messageInfo_Finding.DiscardUnknown(m) +} + +var xxx_messageInfo_Finding proto.InternalMessageInfo + +func (m *Finding) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *Finding) GetFindingType() string { + if m != nil { + return m.FindingType + } + return "" +} + +func (m *Finding) GetHttpMethod() string { + if m != nil { + return m.HttpMethod + } + return "" +} + +func (m *Finding) GetFuzzedUrl() string { + if m != nil { + return m.FuzzedUrl + } + return "" +} + +func (m *Finding) GetBody() string { + if m != nil { + return m.Body + } + return "" +} + +func (m *Finding) GetDescription() string { + if m != nil { + return m.Description + } + return "" +} + +func (m *Finding) GetReproductionUrl() string { + if m != nil { + return m.ReproductionUrl + } + return "" +} + +func (m *Finding) GetFrameUrl() string { + if m != nil { + return m.FrameUrl + } + return "" +} + +func (m *Finding) GetFinalUrl() string { + if m != nil { + return m.FinalUrl + } + return "" +} + +func (m *Finding) GetTrackingId() string { + if m != nil { + return m.TrackingId + } + return "" +} + +func (m *Finding) GetForm() *Form { + if m != nil { + return m.Form + } + return nil +} + +func (m *Finding) GetOutdatedLibrary() *OutdatedLibrary { + if m != nil { + return m.OutdatedLibrary + } + return nil +} + +func (m *Finding) GetViolatingResource() *ViolatingResource { + if m != nil { + return m.ViolatingResource + } + return nil +} + +func (m *Finding) GetVulnerableHeaders() *VulnerableHeaders { + if m != nil { + return m.VulnerableHeaders + } + return nil +} + +func (m *Finding) GetVulnerableParameters() *VulnerableParameters { + if m != nil { + return m.VulnerableParameters + } + return nil +} + +func (m *Finding) GetXss() *Xss { + if m != nil { + return m.Xss + } + return nil +} + +func init() { + proto.RegisterType((*Finding)(nil), "google.cloud.websecurityscanner.v1beta.Finding") +} + +func init() { + proto.RegisterFile("google/cloud/websecurityscanner/v1beta/finding.proto", fileDescriptor_finding_18e691d702d328cd) +} + +var fileDescriptor_finding_18e691d702d328cd = []byte{ + // 517 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x94, 0x94, 0x5f, 0x8b, 0x13, 0x31, + 0x14, 0xc5, 0xa9, 0xbb, 0x76, 0xb7, 0x99, 0x6a, 0xbb, 0x41, 0x21, 0xac, 0xca, 0x56, 0x1f, 0x64, + 0xfd, 0xc3, 0x0c, 0xfe, 0x01, 0xf1, 0x1f, 0xc8, 0x3e, 0x2c, 0x0a, 0x8a, 0x65, 0x50, 0x59, 0x7c, + 0x29, 0x99, 0xc9, 0xed, 0x34, 0x38, 0x93, 0x8c, 0x49, 0xa6, 0xda, 0xfd, 0x20, 0x7e, 0x46, 0x3f, + 0x86, 0xe4, 0x66, 0xba, 0xd4, 0x5d, 0xc1, 0xf6, 0x2d, 0x73, 0xce, 0x3d, 0xe7, 0xc7, 0x25, 0x64, + 0xc8, 0xd3, 0x42, 0xeb, 0xa2, 0x84, 0x24, 0x2f, 0x75, 0x23, 0x92, 0x1f, 0x90, 0x59, 0xc8, 0x1b, + 0x23, 0xdd, 0xc2, 0xe6, 0x5c, 0x29, 0x30, 0xc9, 0xfc, 0x51, 0x06, 0x8e, 0x27, 0x53, 0xa9, 0x84, + 0x54, 0x45, 0x5c, 0x1b, 0xed, 0x34, 0xbd, 0x1b, 0x52, 0x31, 0xa6, 0xe2, 0x8b, 0xa9, 0x38, 0xa4, + 0xf6, 0x6f, 0xb6, 0xed, 0xbc, 0x96, 0x09, 0x57, 0x4a, 0x3b, 0xee, 0xa4, 0x56, 0x36, 0xb4, 0xec, + 0xbf, 0xd8, 0x8c, 0x3d, 0xe1, 0x42, 0x68, 0x15, 0xb2, 0x77, 0x7e, 0x77, 0xc9, 0xce, 0x71, 0xd0, + 0x29, 0x25, 0xdb, 0x8a, 0x57, 0xc0, 0x3a, 0xa3, 0xce, 0x61, 0x2f, 0xc5, 0x33, 0xbd, 0x4d, 0xfa, + 0xcb, 0x98, 0x5b, 0xd4, 0xc0, 0x2e, 0xa1, 0x17, 0xb5, 0xda, 0xa7, 0x45, 0x0d, 0xf4, 0x80, 0x44, + 0x33, 0xe7, 0xea, 0x49, 0x05, 0x6e, 0xa6, 0x05, 0xdb, 0xc2, 0x09, 0xe2, 0xa5, 0x0f, 0xa8, 0xd0, + 0x5b, 0x84, 0x4c, 0x9b, 0xd3, 0x53, 0x10, 0x93, 0xc6, 0x94, 0x6c, 0x1b, 0xfd, 0x5e, 0x50, 0x3e, + 0x9b, 0xd2, 0x63, 0x33, 0x2d, 0x16, 0xec, 0x72, 0xc0, 0xfa, 0x33, 0x1d, 0x91, 0x48, 0x80, 0xcd, + 0x8d, 0xac, 0xfd, 0xa2, 0xac, 0x1b, 0xa8, 0x2b, 0x12, 0xbd, 0x47, 0x86, 0x06, 0x6a, 0xa3, 0x45, + 0x93, 0xfb, 0x6f, 0xac, 0xde, 0xc1, 0xb1, 0xc1, 0xaa, 0xee, 0x01, 0x37, 0x48, 0x6f, 0x6a, 0x78, + 0x05, 0x38, 0xb3, 0x8b, 0x33, 0xbb, 0x28, 0x2c, 0x4d, 0xa9, 0x78, 0x89, 0x66, 0xaf, 0x35, 0xbd, + 0xe0, 0xcd, 0x03, 0x12, 0x39, 0xc3, 0xf3, 0x6f, 0x7e, 0x7d, 0x29, 0x18, 0x09, 0xab, 0x2d, 0xa5, + 0x77, 0x82, 0xbe, 0x21, 0xdb, 0x53, 0x6d, 0x2a, 0x36, 0x1c, 0x75, 0x0e, 0xa3, 0xc7, 0x0f, 0xe3, + 0xf5, 0xee, 0x33, 0x3e, 0xd6, 0xa6, 0x4a, 0x31, 0x49, 0x33, 0x32, 0xd4, 0x8d, 0x13, 0xdc, 0x81, + 0x98, 0x94, 0x32, 0x33, 0xdc, 0x2c, 0x58, 0x84, 0x6d, 0xcf, 0xd6, 0x6d, 0xfb, 0xd8, 0xe6, 0xdf, + 0x87, 0x78, 0x3a, 0xd0, 0x7f, 0x0b, 0x74, 0x46, 0xe8, 0x5c, 0xea, 0x92, 0x3b, 0xbf, 0x87, 0x01, + 0xab, 0x1b, 0x93, 0x03, 0xeb, 0x23, 0xe5, 0xf9, 0xba, 0x94, 0x2f, 0xcb, 0x86, 0xb4, 0x2d, 0x48, + 0xf7, 0xe6, 0xe7, 0x25, 0x24, 0x35, 0xa5, 0x02, 0xc3, 0xb3, 0x12, 0x26, 0x33, 0xe0, 0x02, 0x8c, + 0x65, 0x83, 0x0d, 0x49, 0x67, 0x0d, 0x6f, 0x43, 0x41, 0xba, 0x37, 0x3f, 0x2f, 0xd1, 0xef, 0xe4, + 0xfa, 0x0a, 0xa9, 0xe6, 0xfe, 0x3e, 0x9d, 0x87, 0x5d, 0x41, 0xd8, 0xab, 0xcd, 0x61, 0xe3, 0xb3, + 0x8e, 0xf4, 0xda, 0xfc, 0x1f, 0x2a, 0x7d, 0x4d, 0xb6, 0x7e, 0x5a, 0xcb, 0xae, 0x22, 0xe0, 0xc1, + 0xba, 0x80, 0x13, 0x6b, 0x53, 0x9f, 0x3b, 0xfa, 0xd5, 0x21, 0xf7, 0x73, 0x5d, 0xad, 0x99, 0x3b, + 0xea, 0xb7, 0xcf, 0x72, 0xec, 0xdf, 0xe9, 0xb8, 0xf3, 0xf5, 0xa4, 0xcd, 0x15, 0xba, 0xe4, 0xaa, + 0x88, 0xb5, 0x29, 0x92, 0x02, 0x14, 0xbe, 0xe2, 0x24, 0x58, 0xbc, 0x96, 0xf6, 0x7f, 0x3f, 0x81, + 0x97, 0x17, 0x9d, 0xac, 0x8b, 0x25, 0x4f, 0xfe, 0x04, 0x00, 0x00, 0xff, 0xff, 0x3a, 0xa4, 0xbf, + 0x31, 0xc4, 0x04, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/cloud/websecurityscanner/v1beta/finding_addon.pb.go b/vendor/google.golang.org/genproto/googleapis/cloud/websecurityscanner/v1beta/finding_addon.pb.go new file mode 100644 index 0000000..be62a07 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/cloud/websecurityscanner/v1beta/finding_addon.pb.go @@ -0,0 +1,414 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/cloud/websecurityscanner/v1beta/finding_addon.proto + +package websecurityscanner // import "google.golang.org/genproto/googleapis/cloud/websecurityscanner/v1beta" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// ! Information about a vulnerability with an HTML. +type Form struct { + // ! The URI where to send the form when it's submitted. + ActionUri string `protobuf:"bytes,1,opt,name=action_uri,json=actionUri,proto3" json:"action_uri,omitempty"` + // ! The names of form fields related to the vulnerability. + Fields []string `protobuf:"bytes,2,rep,name=fields,proto3" json:"fields,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Form) Reset() { *m = Form{} } +func (m *Form) String() string { return proto.CompactTextString(m) } +func (*Form) ProtoMessage() {} +func (*Form) Descriptor() ([]byte, []int) { + return fileDescriptor_finding_addon_2b00ebbf5c6ee79e, []int{0} +} +func (m *Form) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Form.Unmarshal(m, b) +} +func (m *Form) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Form.Marshal(b, m, deterministic) +} +func (dst *Form) XXX_Merge(src proto.Message) { + xxx_messageInfo_Form.Merge(dst, src) +} +func (m *Form) XXX_Size() int { + return xxx_messageInfo_Form.Size(m) +} +func (m *Form) XXX_DiscardUnknown() { + xxx_messageInfo_Form.DiscardUnknown(m) +} + +var xxx_messageInfo_Form proto.InternalMessageInfo + +func (m *Form) GetActionUri() string { + if m != nil { + return m.ActionUri + } + return "" +} + +func (m *Form) GetFields() []string { + if m != nil { + return m.Fields + } + return nil +} + +// Information reported for an outdated library. +type OutdatedLibrary struct { + // The name of the outdated library. + LibraryName string `protobuf:"bytes,1,opt,name=library_name,json=libraryName,proto3" json:"library_name,omitempty"` + // The version number. + Version string `protobuf:"bytes,2,opt,name=version,proto3" json:"version,omitempty"` + // URLs to learn more information about the vulnerabilities in the library. + LearnMoreUrls []string `protobuf:"bytes,3,rep,name=learn_more_urls,json=learnMoreUrls,proto3" json:"learn_more_urls,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *OutdatedLibrary) Reset() { *m = OutdatedLibrary{} } +func (m *OutdatedLibrary) String() string { return proto.CompactTextString(m) } +func (*OutdatedLibrary) ProtoMessage() {} +func (*OutdatedLibrary) Descriptor() ([]byte, []int) { + return fileDescriptor_finding_addon_2b00ebbf5c6ee79e, []int{1} +} +func (m *OutdatedLibrary) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_OutdatedLibrary.Unmarshal(m, b) +} +func (m *OutdatedLibrary) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_OutdatedLibrary.Marshal(b, m, deterministic) +} +func (dst *OutdatedLibrary) XXX_Merge(src proto.Message) { + xxx_messageInfo_OutdatedLibrary.Merge(dst, src) +} +func (m *OutdatedLibrary) XXX_Size() int { + return xxx_messageInfo_OutdatedLibrary.Size(m) +} +func (m *OutdatedLibrary) XXX_DiscardUnknown() { + xxx_messageInfo_OutdatedLibrary.DiscardUnknown(m) +} + +var xxx_messageInfo_OutdatedLibrary proto.InternalMessageInfo + +func (m *OutdatedLibrary) GetLibraryName() string { + if m != nil { + return m.LibraryName + } + return "" +} + +func (m *OutdatedLibrary) GetVersion() string { + if m != nil { + return m.Version + } + return "" +} + +func (m *OutdatedLibrary) GetLearnMoreUrls() []string { + if m != nil { + return m.LearnMoreUrls + } + return nil +} + +// Information regarding any resource causing the vulnerability such +// as JavaScript sources, image, audio files, etc. +type ViolatingResource struct { + // The MIME type of this resource. + ContentType string `protobuf:"bytes,1,opt,name=content_type,json=contentType,proto3" json:"content_type,omitempty"` + // URL of this violating resource. + ResourceUrl string `protobuf:"bytes,2,opt,name=resource_url,json=resourceUrl,proto3" json:"resource_url,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ViolatingResource) Reset() { *m = ViolatingResource{} } +func (m *ViolatingResource) String() string { return proto.CompactTextString(m) } +func (*ViolatingResource) ProtoMessage() {} +func (*ViolatingResource) Descriptor() ([]byte, []int) { + return fileDescriptor_finding_addon_2b00ebbf5c6ee79e, []int{2} +} +func (m *ViolatingResource) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ViolatingResource.Unmarshal(m, b) +} +func (m *ViolatingResource) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ViolatingResource.Marshal(b, m, deterministic) +} +func (dst *ViolatingResource) XXX_Merge(src proto.Message) { + xxx_messageInfo_ViolatingResource.Merge(dst, src) +} +func (m *ViolatingResource) XXX_Size() int { + return xxx_messageInfo_ViolatingResource.Size(m) +} +func (m *ViolatingResource) XXX_DiscardUnknown() { + xxx_messageInfo_ViolatingResource.DiscardUnknown(m) +} + +var xxx_messageInfo_ViolatingResource proto.InternalMessageInfo + +func (m *ViolatingResource) GetContentType() string { + if m != nil { + return m.ContentType + } + return "" +} + +func (m *ViolatingResource) GetResourceUrl() string { + if m != nil { + return m.ResourceUrl + } + return "" +} + +// Information about vulnerable request parameters. +type VulnerableParameters struct { + // The vulnerable parameter names. + ParameterNames []string `protobuf:"bytes,1,rep,name=parameter_names,json=parameterNames,proto3" json:"parameter_names,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *VulnerableParameters) Reset() { *m = VulnerableParameters{} } +func (m *VulnerableParameters) String() string { return proto.CompactTextString(m) } +func (*VulnerableParameters) ProtoMessage() {} +func (*VulnerableParameters) Descriptor() ([]byte, []int) { + return fileDescriptor_finding_addon_2b00ebbf5c6ee79e, []int{3} +} +func (m *VulnerableParameters) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_VulnerableParameters.Unmarshal(m, b) +} +func (m *VulnerableParameters) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_VulnerableParameters.Marshal(b, m, deterministic) +} +func (dst *VulnerableParameters) XXX_Merge(src proto.Message) { + xxx_messageInfo_VulnerableParameters.Merge(dst, src) +} +func (m *VulnerableParameters) XXX_Size() int { + return xxx_messageInfo_VulnerableParameters.Size(m) +} +func (m *VulnerableParameters) XXX_DiscardUnknown() { + xxx_messageInfo_VulnerableParameters.DiscardUnknown(m) +} + +var xxx_messageInfo_VulnerableParameters proto.InternalMessageInfo + +func (m *VulnerableParameters) GetParameterNames() []string { + if m != nil { + return m.ParameterNames + } + return nil +} + +// Information about vulnerable or missing HTTP Headers. +type VulnerableHeaders struct { + // List of vulnerable headers. + Headers []*VulnerableHeaders_Header `protobuf:"bytes,1,rep,name=headers,proto3" json:"headers,omitempty"` + // List of missing headers. + MissingHeaders []*VulnerableHeaders_Header `protobuf:"bytes,2,rep,name=missing_headers,json=missingHeaders,proto3" json:"missing_headers,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *VulnerableHeaders) Reset() { *m = VulnerableHeaders{} } +func (m *VulnerableHeaders) String() string { return proto.CompactTextString(m) } +func (*VulnerableHeaders) ProtoMessage() {} +func (*VulnerableHeaders) Descriptor() ([]byte, []int) { + return fileDescriptor_finding_addon_2b00ebbf5c6ee79e, []int{4} +} +func (m *VulnerableHeaders) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_VulnerableHeaders.Unmarshal(m, b) +} +func (m *VulnerableHeaders) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_VulnerableHeaders.Marshal(b, m, deterministic) +} +func (dst *VulnerableHeaders) XXX_Merge(src proto.Message) { + xxx_messageInfo_VulnerableHeaders.Merge(dst, src) +} +func (m *VulnerableHeaders) XXX_Size() int { + return xxx_messageInfo_VulnerableHeaders.Size(m) +} +func (m *VulnerableHeaders) XXX_DiscardUnknown() { + xxx_messageInfo_VulnerableHeaders.DiscardUnknown(m) +} + +var xxx_messageInfo_VulnerableHeaders proto.InternalMessageInfo + +func (m *VulnerableHeaders) GetHeaders() []*VulnerableHeaders_Header { + if m != nil { + return m.Headers + } + return nil +} + +func (m *VulnerableHeaders) GetMissingHeaders() []*VulnerableHeaders_Header { + if m != nil { + return m.MissingHeaders + } + return nil +} + +// Describes a HTTP Header. +type VulnerableHeaders_Header struct { + // Header name. + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // Header value. + Value string `protobuf:"bytes,2,opt,name=value,proto3" json:"value,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *VulnerableHeaders_Header) Reset() { *m = VulnerableHeaders_Header{} } +func (m *VulnerableHeaders_Header) String() string { return proto.CompactTextString(m) } +func (*VulnerableHeaders_Header) ProtoMessage() {} +func (*VulnerableHeaders_Header) Descriptor() ([]byte, []int) { + return fileDescriptor_finding_addon_2b00ebbf5c6ee79e, []int{4, 0} +} +func (m *VulnerableHeaders_Header) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_VulnerableHeaders_Header.Unmarshal(m, b) +} +func (m *VulnerableHeaders_Header) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_VulnerableHeaders_Header.Marshal(b, m, deterministic) +} +func (dst *VulnerableHeaders_Header) XXX_Merge(src proto.Message) { + xxx_messageInfo_VulnerableHeaders_Header.Merge(dst, src) +} +func (m *VulnerableHeaders_Header) XXX_Size() int { + return xxx_messageInfo_VulnerableHeaders_Header.Size(m) +} +func (m *VulnerableHeaders_Header) XXX_DiscardUnknown() { + xxx_messageInfo_VulnerableHeaders_Header.DiscardUnknown(m) +} + +var xxx_messageInfo_VulnerableHeaders_Header proto.InternalMessageInfo + +func (m *VulnerableHeaders_Header) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *VulnerableHeaders_Header) GetValue() string { + if m != nil { + return m.Value + } + return "" +} + +// Information reported for an XSS. +type Xss struct { + // Stack traces leading to the point where the XSS occurred. + StackTraces []string `protobuf:"bytes,1,rep,name=stack_traces,json=stackTraces,proto3" json:"stack_traces,omitempty"` + // An error message generated by a javascript breakage. + ErrorMessage string `protobuf:"bytes,2,opt,name=error_message,json=errorMessage,proto3" json:"error_message,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Xss) Reset() { *m = Xss{} } +func (m *Xss) String() string { return proto.CompactTextString(m) } +func (*Xss) ProtoMessage() {} +func (*Xss) Descriptor() ([]byte, []int) { + return fileDescriptor_finding_addon_2b00ebbf5c6ee79e, []int{5} +} +func (m *Xss) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Xss.Unmarshal(m, b) +} +func (m *Xss) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Xss.Marshal(b, m, deterministic) +} +func (dst *Xss) XXX_Merge(src proto.Message) { + xxx_messageInfo_Xss.Merge(dst, src) +} +func (m *Xss) XXX_Size() int { + return xxx_messageInfo_Xss.Size(m) +} +func (m *Xss) XXX_DiscardUnknown() { + xxx_messageInfo_Xss.DiscardUnknown(m) +} + +var xxx_messageInfo_Xss proto.InternalMessageInfo + +func (m *Xss) GetStackTraces() []string { + if m != nil { + return m.StackTraces + } + return nil +} + +func (m *Xss) GetErrorMessage() string { + if m != nil { + return m.ErrorMessage + } + return "" +} + +func init() { + proto.RegisterType((*Form)(nil), "google.cloud.websecurityscanner.v1beta.Form") + proto.RegisterType((*OutdatedLibrary)(nil), "google.cloud.websecurityscanner.v1beta.OutdatedLibrary") + proto.RegisterType((*ViolatingResource)(nil), "google.cloud.websecurityscanner.v1beta.ViolatingResource") + proto.RegisterType((*VulnerableParameters)(nil), "google.cloud.websecurityscanner.v1beta.VulnerableParameters") + proto.RegisterType((*VulnerableHeaders)(nil), "google.cloud.websecurityscanner.v1beta.VulnerableHeaders") + proto.RegisterType((*VulnerableHeaders_Header)(nil), "google.cloud.websecurityscanner.v1beta.VulnerableHeaders.Header") + proto.RegisterType((*Xss)(nil), "google.cloud.websecurityscanner.v1beta.Xss") +} + +func init() { + proto.RegisterFile("google/cloud/websecurityscanner/v1beta/finding_addon.proto", fileDescriptor_finding_addon_2b00ebbf5c6ee79e) +} + +var fileDescriptor_finding_addon_2b00ebbf5c6ee79e = []byte{ + // 502 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xac, 0x53, 0x41, 0x6f, 0xd3, 0x4c, + 0x10, 0x55, 0x92, 0x7e, 0xa9, 0xb2, 0x69, 0x1b, 0x65, 0x55, 0x7d, 0xb2, 0x2a, 0x90, 0x8a, 0x91, + 0x4a, 0xc5, 0xc1, 0x16, 0xe5, 0x06, 0x42, 0x40, 0x0f, 0x15, 0x07, 0x02, 0x51, 0xd4, 0x54, 0xa5, + 0x17, 0x6b, 0x62, 0x4f, 0xcd, 0x8a, 0xf5, 0xae, 0x35, 0xbb, 0x0e, 0xca, 0x9f, 0xe0, 0x0f, 0xf0, + 0x67, 0x91, 0x77, 0xd7, 0x01, 0x29, 0x07, 0x7a, 0xe0, 0xe4, 0x99, 0x37, 0xfb, 0xde, 0xf3, 0xcc, + 0xee, 0xb0, 0x57, 0xa5, 0xd6, 0xa5, 0xc4, 0x34, 0x97, 0xba, 0x29, 0xd2, 0xef, 0xb8, 0x32, 0x98, + 0x37, 0x24, 0xec, 0xc6, 0xe4, 0xa0, 0x14, 0x52, 0xba, 0x7e, 0xb1, 0x42, 0x0b, 0xe9, 0xbd, 0x50, + 0x85, 0x50, 0x65, 0x06, 0x45, 0xa1, 0x55, 0x52, 0x93, 0xb6, 0x9a, 0x9f, 0x79, 0x6e, 0xe2, 0xb8, + 0xc9, 0x2e, 0x37, 0xf1, 0xdc, 0x93, 0x47, 0xc1, 0x03, 0x6a, 0x91, 0x82, 0x52, 0xda, 0x82, 0x15, + 0x5a, 0x19, 0xaf, 0x12, 0xbf, 0x61, 0x7b, 0x57, 0x9a, 0x2a, 0xfe, 0x98, 0x31, 0xc8, 0xdb, 0x42, + 0xd6, 0x90, 0x88, 0x7a, 0xa7, 0xbd, 0xf3, 0xd1, 0x62, 0xe4, 0x91, 0x25, 0x09, 0xfe, 0x3f, 0x1b, + 0xde, 0x0b, 0x94, 0x85, 0x89, 0xfa, 0xa7, 0x83, 0xf3, 0xd1, 0x22, 0x64, 0xf1, 0x9a, 0x4d, 0x3e, + 0x37, 0xb6, 0x00, 0x8b, 0xc5, 0x47, 0xb1, 0x22, 0xa0, 0x0d, 0x7f, 0xc2, 0x0e, 0xa4, 0x0f, 0x33, + 0x05, 0x15, 0x06, 0xad, 0x71, 0xc0, 0x3e, 0x41, 0x85, 0x3c, 0x62, 0xfb, 0x6b, 0x24, 0x23, 0xb4, + 0x8a, 0xfa, 0xae, 0xda, 0xa5, 0xfc, 0x8c, 0x4d, 0x24, 0x02, 0xa9, 0xac, 0xd2, 0x84, 0x59, 0x43, + 0xd2, 0x44, 0x03, 0x67, 0x78, 0xe8, 0xe0, 0x99, 0x26, 0x5c, 0x92, 0x34, 0xf1, 0x17, 0x36, 0xbd, + 0x11, 0x5a, 0x82, 0x15, 0xaa, 0x5c, 0xa0, 0xd1, 0x0d, 0xe5, 0xd8, 0x3a, 0xe7, 0x5a, 0x59, 0x54, + 0x36, 0xb3, 0x9b, 0x7a, 0xeb, 0x1c, 0xb0, 0xeb, 0x4d, 0xed, 0x8e, 0x50, 0x38, 0xde, 0xaa, 0x07, + 0xfb, 0x71, 0x87, 0x2d, 0x49, 0xc6, 0x6f, 0xd9, 0xf1, 0x4d, 0x23, 0x15, 0x12, 0xac, 0x24, 0xce, + 0x81, 0xa0, 0x42, 0x8b, 0x64, 0xf8, 0x33, 0x36, 0xa9, 0xbb, 0xcc, 0x75, 0x66, 0xa2, 0x9e, 0xfb, + 0xb5, 0xa3, 0x2d, 0xdc, 0x36, 0x67, 0xe2, 0x1f, 0x7d, 0x36, 0xfd, 0xad, 0xf0, 0x01, 0xa1, 0x68, + 0xe9, 0x77, 0x6c, 0xff, 0xab, 0x0f, 0x1d, 0x6d, 0x7c, 0xf1, 0x2e, 0x79, 0xd8, 0x05, 0x26, 0x3b, + 0x5a, 0x89, 0xff, 0x2e, 0x3a, 0x41, 0x2e, 0xd8, 0xa4, 0x12, 0xc6, 0xb4, 0x2f, 0xa4, 0xf3, 0xe8, + 0xff, 0x23, 0x8f, 0xa3, 0x20, 0x1c, 0xe0, 0x93, 0x0b, 0x36, 0xf4, 0x21, 0xe7, 0x6c, 0xef, 0x8f, + 0xfb, 0x75, 0x31, 0x3f, 0x66, 0xff, 0xad, 0x41, 0x36, 0x18, 0xe6, 0xea, 0x93, 0x78, 0xc6, 0x06, + 0xb7, 0xc6, 0xb4, 0xb3, 0x37, 0x16, 0xf2, 0x6f, 0x99, 0x25, 0xc8, 0xb7, 0xd3, 0x1b, 0x3b, 0xec, + 0xda, 0x41, 0xfc, 0x29, 0x3b, 0x44, 0x22, 0x4d, 0x59, 0x85, 0xc6, 0x40, 0xd9, 0xe9, 0x1c, 0x38, + 0x70, 0xe6, 0xb1, 0xcb, 0x9f, 0x3d, 0xf6, 0x3c, 0xd7, 0xd5, 0x03, 0x5b, 0xbb, 0x9c, 0x5e, 0xf9, + 0xe5, 0x79, 0xdf, 0xee, 0xce, 0xbc, 0x7d, 0xf4, 0xf3, 0xde, 0xdd, 0x6d, 0x20, 0x97, 0x5a, 0x82, + 0x2a, 0x13, 0x4d, 0x65, 0x5a, 0xa2, 0x72, 0x2b, 0x91, 0xfa, 0x12, 0xd4, 0xc2, 0xfc, 0x6d, 0x2f, + 0x5f, 0xef, 0x56, 0x56, 0x43, 0x27, 0xf2, 0xf2, 0x57, 0x00, 0x00, 0x00, 0xff, 0xff, 0x39, 0xbe, + 0xb5, 0xb5, 0xdb, 0x03, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/cloud/websecurityscanner/v1beta/finding_type_stats.pb.go b/vendor/google.golang.org/genproto/googleapis/cloud/websecurityscanner/v1beta/finding_type_stats.pb.go new file mode 100644 index 0000000..7f948c0 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/cloud/websecurityscanner/v1beta/finding_type_stats.pb.go @@ -0,0 +1,99 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/cloud/websecurityscanner/v1beta/finding_type_stats.proto + +package websecurityscanner // import "google.golang.org/genproto/googleapis/cloud/websecurityscanner/v1beta" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// A FindingTypeStats resource represents stats regarding a specific FindingType +// of Findings under a given ScanRun. +type FindingTypeStats struct { + // Output only. + // The finding type associated with the stats. + FindingType string `protobuf:"bytes,1,opt,name=finding_type,json=findingType,proto3" json:"finding_type,omitempty"` + // Output only. + // The count of findings belonging to this finding type. + FindingCount int32 `protobuf:"varint,2,opt,name=finding_count,json=findingCount,proto3" json:"finding_count,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *FindingTypeStats) Reset() { *m = FindingTypeStats{} } +func (m *FindingTypeStats) String() string { return proto.CompactTextString(m) } +func (*FindingTypeStats) ProtoMessage() {} +func (*FindingTypeStats) Descriptor() ([]byte, []int) { + return fileDescriptor_finding_type_stats_4b1a34a40ed70c6c, []int{0} +} +func (m *FindingTypeStats) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_FindingTypeStats.Unmarshal(m, b) +} +func (m *FindingTypeStats) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_FindingTypeStats.Marshal(b, m, deterministic) +} +func (dst *FindingTypeStats) XXX_Merge(src proto.Message) { + xxx_messageInfo_FindingTypeStats.Merge(dst, src) +} +func (m *FindingTypeStats) XXX_Size() int { + return xxx_messageInfo_FindingTypeStats.Size(m) +} +func (m *FindingTypeStats) XXX_DiscardUnknown() { + xxx_messageInfo_FindingTypeStats.DiscardUnknown(m) +} + +var xxx_messageInfo_FindingTypeStats proto.InternalMessageInfo + +func (m *FindingTypeStats) GetFindingType() string { + if m != nil { + return m.FindingType + } + return "" +} + +func (m *FindingTypeStats) GetFindingCount() int32 { + if m != nil { + return m.FindingCount + } + return 0 +} + +func init() { + proto.RegisterType((*FindingTypeStats)(nil), "google.cloud.websecurityscanner.v1beta.FindingTypeStats") +} + +func init() { + proto.RegisterFile("google/cloud/websecurityscanner/v1beta/finding_type_stats.proto", fileDescriptor_finding_type_stats_4b1a34a40ed70c6c) +} + +var fileDescriptor_finding_type_stats_4b1a34a40ed70c6c = []byte{ + // 236 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x8c, 0xd0, 0xc1, 0x4a, 0x03, 0x31, + 0x10, 0x06, 0x60, 0x22, 0x28, 0x18, 0x15, 0x64, 0x41, 0x28, 0xe2, 0xa1, 0x2a, 0x48, 0xf1, 0x90, + 0x20, 0x1e, 0x3d, 0x08, 0x15, 0x3c, 0x97, 0xea, 0x41, 0x7a, 0x29, 0xb3, 0xe9, 0x34, 0x04, 0xd6, + 0x99, 0xb0, 0x99, 0x55, 0xf6, 0x69, 0x7c, 0x55, 0xd9, 0xcd, 0x16, 0xc5, 0x1e, 0xec, 0x35, 0x33, + 0xf9, 0xfe, 0x9f, 0xd1, 0x8f, 0x9e, 0xd9, 0x57, 0x68, 0x5d, 0xc5, 0xcd, 0xca, 0x7e, 0x62, 0x99, + 0xd0, 0x35, 0x75, 0x90, 0x36, 0x39, 0x20, 0xc2, 0xda, 0x7e, 0xdc, 0x95, 0x28, 0x60, 0xd7, 0x81, + 0x56, 0x81, 0xfc, 0x52, 0xda, 0x88, 0xcb, 0x24, 0x20, 0xc9, 0xc4, 0x9a, 0x85, 0x8b, 0x9b, 0x0c, + 0x98, 0x1e, 0x30, 0xdb, 0x80, 0xc9, 0xc0, 0xf9, 0xc5, 0x10, 0x04, 0x31, 0x58, 0x20, 0x62, 0x01, + 0x09, 0x4c, 0x83, 0x72, 0xb5, 0xd0, 0xa7, 0xcf, 0x39, 0xe1, 0xb5, 0x8d, 0xf8, 0xd2, 0xf9, 0xc5, + 0xa5, 0x3e, 0xfe, 0x9d, 0x3a, 0x52, 0x63, 0x35, 0x39, 0x9c, 0x1f, 0xad, 0x7f, 0xf6, 0x8a, 0x6b, + 0x7d, 0xb2, 0x59, 0x71, 0xdc, 0x90, 0x8c, 0xf6, 0xc6, 0x6a, 0xb2, 0x3f, 0xdf, 0xfc, 0x7b, 0xea, + 0xde, 0xa6, 0x5f, 0x4a, 0xdf, 0x3a, 0x7e, 0x37, 0xbb, 0x15, 0x9d, 0x9e, 0xfd, 0x2d, 0x32, 0xeb, + 0x1a, 0xce, 0xd4, 0xe2, 0x6d, 0x00, 0x3c, 0x57, 0x40, 0xde, 0x70, 0xed, 0xad, 0x47, 0xea, 0xfb, + 0xdb, 0x3c, 0x82, 0x18, 0xd2, 0x7f, 0x97, 0x7c, 0xd8, 0x9e, 0x94, 0x07, 0x3d, 0x72, 0xff, 0x1d, + 0x00, 0x00, 0xff, 0xff, 0xd7, 0xe9, 0x8e, 0x1a, 0x8d, 0x01, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/cloud/websecurityscanner/v1beta/scan_config.pb.go b/vendor/google.golang.org/genproto/googleapis/cloud/websecurityscanner/v1beta/scan_config.pb.go new file mode 100644 index 0000000..7ce47f0 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/cloud/websecurityscanner/v1beta/scan_config.pb.go @@ -0,0 +1,700 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/cloud/websecurityscanner/v1beta/scan_config.proto + +package websecurityscanner // import "google.golang.org/genproto/googleapis/cloud/websecurityscanner/v1beta" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import timestamp "github.com/golang/protobuf/ptypes/timestamp" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Type of user agents used for scanning. +type ScanConfig_UserAgent int32 + +const ( + // The user agent is unknown. Service will default to CHROME_LINUX. + ScanConfig_USER_AGENT_UNSPECIFIED ScanConfig_UserAgent = 0 + // Chrome on Linux. This is the service default if unspecified. + ScanConfig_CHROME_LINUX ScanConfig_UserAgent = 1 + // Chrome on Android. + ScanConfig_CHROME_ANDROID ScanConfig_UserAgent = 2 + // Safari on IPhone. + ScanConfig_SAFARI_IPHONE ScanConfig_UserAgent = 3 +) + +var ScanConfig_UserAgent_name = map[int32]string{ + 0: "USER_AGENT_UNSPECIFIED", + 1: "CHROME_LINUX", + 2: "CHROME_ANDROID", + 3: "SAFARI_IPHONE", +} +var ScanConfig_UserAgent_value = map[string]int32{ + "USER_AGENT_UNSPECIFIED": 0, + "CHROME_LINUX": 1, + "CHROME_ANDROID": 2, + "SAFARI_IPHONE": 3, +} + +func (x ScanConfig_UserAgent) String() string { + return proto.EnumName(ScanConfig_UserAgent_name, int32(x)) +} +func (ScanConfig_UserAgent) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_scan_config_cb3798ae69a3d538, []int{0, 0} +} + +// Cloud platforms supported by Cloud Web Security Scanner. +type ScanConfig_TargetPlatform int32 + +const ( + // The target platform is unknown. Requests with this enum value will be + // rejected with INVALID_ARGUMENT error. + ScanConfig_TARGET_PLATFORM_UNSPECIFIED ScanConfig_TargetPlatform = 0 + // Google App Engine service. + ScanConfig_APP_ENGINE ScanConfig_TargetPlatform = 1 + // Google Compute Engine service. + ScanConfig_COMPUTE ScanConfig_TargetPlatform = 2 +) + +var ScanConfig_TargetPlatform_name = map[int32]string{ + 0: "TARGET_PLATFORM_UNSPECIFIED", + 1: "APP_ENGINE", + 2: "COMPUTE", +} +var ScanConfig_TargetPlatform_value = map[string]int32{ + "TARGET_PLATFORM_UNSPECIFIED": 0, + "APP_ENGINE": 1, + "COMPUTE": 2, +} + +func (x ScanConfig_TargetPlatform) String() string { + return proto.EnumName(ScanConfig_TargetPlatform_name, int32(x)) +} +func (ScanConfig_TargetPlatform) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_scan_config_cb3798ae69a3d538, []int{0, 1} +} + +// Scan risk levels supported by Cloud Web Security Scanner. LOW impact +// scanning will minimize requests with the potential to modify data. To +// achieve the maximum scan coverage, NORMAL risk level is recommended. +type ScanConfig_RiskLevel int32 + +const ( + // Use default, which is NORMAL. + ScanConfig_RISK_LEVEL_UNSPECIFIED ScanConfig_RiskLevel = 0 + // Normal scanning (Recommended) + ScanConfig_NORMAL ScanConfig_RiskLevel = 1 + // Lower impact scanning + ScanConfig_LOW ScanConfig_RiskLevel = 2 +) + +var ScanConfig_RiskLevel_name = map[int32]string{ + 0: "RISK_LEVEL_UNSPECIFIED", + 1: "NORMAL", + 2: "LOW", +} +var ScanConfig_RiskLevel_value = map[string]int32{ + "RISK_LEVEL_UNSPECIFIED": 0, + "NORMAL": 1, + "LOW": 2, +} + +func (x ScanConfig_RiskLevel) String() string { + return proto.EnumName(ScanConfig_RiskLevel_name, int32(x)) +} +func (ScanConfig_RiskLevel) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_scan_config_cb3798ae69a3d538, []int{0, 2} +} + +// Controls export of scan configurations and results to Cloud Security +// Command Center. +type ScanConfig_ExportToSecurityCommandCenter int32 + +const ( + // Use default, which is ENABLED. + ScanConfig_EXPORT_TO_SECURITY_COMMAND_CENTER_UNSPECIFIED ScanConfig_ExportToSecurityCommandCenter = 0 + // Export results of this scan to Cloud Security Command Center. + ScanConfig_ENABLED ScanConfig_ExportToSecurityCommandCenter = 1 + // Do not export results of this scan to Cloud Security Command Center. + ScanConfig_DISABLED ScanConfig_ExportToSecurityCommandCenter = 2 +) + +var ScanConfig_ExportToSecurityCommandCenter_name = map[int32]string{ + 0: "EXPORT_TO_SECURITY_COMMAND_CENTER_UNSPECIFIED", + 1: "ENABLED", + 2: "DISABLED", +} +var ScanConfig_ExportToSecurityCommandCenter_value = map[string]int32{ + "EXPORT_TO_SECURITY_COMMAND_CENTER_UNSPECIFIED": 0, + "ENABLED": 1, + "DISABLED": 2, +} + +func (x ScanConfig_ExportToSecurityCommandCenter) String() string { + return proto.EnumName(ScanConfig_ExportToSecurityCommandCenter_name, int32(x)) +} +func (ScanConfig_ExportToSecurityCommandCenter) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_scan_config_cb3798ae69a3d538, []int{0, 3} +} + +// A ScanConfig resource contains the configurations to launch a scan. +type ScanConfig struct { + // The resource name of the ScanConfig. The name follows the format of + // 'projects/{projectId}/scanConfigs/{scanConfigId}'. The ScanConfig IDs are + // generated by the system. + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // Required. + // The user provided display name of the ScanConfig. + DisplayName string `protobuf:"bytes,2,opt,name=display_name,json=displayName,proto3" json:"display_name,omitempty"` + // The maximum QPS during scanning. A valid value ranges from 5 to 20 + // inclusively. If the field is unspecified or its value is set 0, server will + // default to 15. Other values outside of [5, 20] range will be rejected with + // INVALID_ARGUMENT error. + MaxQps int32 `protobuf:"varint,3,opt,name=max_qps,json=maxQps,proto3" json:"max_qps,omitempty"` + // Required. + // The starting URLs from which the scanner finds site pages. + StartingUrls []string `protobuf:"bytes,4,rep,name=starting_urls,json=startingUrls,proto3" json:"starting_urls,omitempty"` + // The authentication configuration. If specified, service will use the + // authentication configuration during scanning. + Authentication *ScanConfig_Authentication `protobuf:"bytes,5,opt,name=authentication,proto3" json:"authentication,omitempty"` + // The user agent used during scanning. + UserAgent ScanConfig_UserAgent `protobuf:"varint,6,opt,name=user_agent,json=userAgent,proto3,enum=google.cloud.websecurityscanner.v1beta.ScanConfig_UserAgent" json:"user_agent,omitempty"` + // The blacklist URL patterns as described in + // https://cloud.google.com/security-scanner/docs/excluded-urls + BlacklistPatterns []string `protobuf:"bytes,7,rep,name=blacklist_patterns,json=blacklistPatterns,proto3" json:"blacklist_patterns,omitempty"` + // The schedule of the ScanConfig. + Schedule *ScanConfig_Schedule `protobuf:"bytes,8,opt,name=schedule,proto3" json:"schedule,omitempty"` + // Set of Cloud Platforms targeted by the scan. If empty, APP_ENGINE will be + // used as a default. + TargetPlatforms []ScanConfig_TargetPlatform `protobuf:"varint,9,rep,packed,name=target_platforms,json=targetPlatforms,proto3,enum=google.cloud.websecurityscanner.v1beta.ScanConfig_TargetPlatform" json:"target_platforms,omitempty"` + // Controls export of scan configurations and results to Cloud Security + // Command Center. + ExportToSecurityCommandCenter ScanConfig_ExportToSecurityCommandCenter `protobuf:"varint,10,opt,name=export_to_security_command_center,json=exportToSecurityCommandCenter,proto3,enum=google.cloud.websecurityscanner.v1beta.ScanConfig_ExportToSecurityCommandCenter" json:"export_to_security_command_center,omitempty"` + // Latest ScanRun if available. + LatestRun *ScanRun `protobuf:"bytes,11,opt,name=latest_run,json=latestRun,proto3" json:"latest_run,omitempty"` + // The risk level selected for the scan + RiskLevel ScanConfig_RiskLevel `protobuf:"varint,12,opt,name=risk_level,json=riskLevel,proto3,enum=google.cloud.websecurityscanner.v1beta.ScanConfig_RiskLevel" json:"risk_level,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ScanConfig) Reset() { *m = ScanConfig{} } +func (m *ScanConfig) String() string { return proto.CompactTextString(m) } +func (*ScanConfig) ProtoMessage() {} +func (*ScanConfig) Descriptor() ([]byte, []int) { + return fileDescriptor_scan_config_cb3798ae69a3d538, []int{0} +} +func (m *ScanConfig) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ScanConfig.Unmarshal(m, b) +} +func (m *ScanConfig) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ScanConfig.Marshal(b, m, deterministic) +} +func (dst *ScanConfig) XXX_Merge(src proto.Message) { + xxx_messageInfo_ScanConfig.Merge(dst, src) +} +func (m *ScanConfig) XXX_Size() int { + return xxx_messageInfo_ScanConfig.Size(m) +} +func (m *ScanConfig) XXX_DiscardUnknown() { + xxx_messageInfo_ScanConfig.DiscardUnknown(m) +} + +var xxx_messageInfo_ScanConfig proto.InternalMessageInfo + +func (m *ScanConfig) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *ScanConfig) GetDisplayName() string { + if m != nil { + return m.DisplayName + } + return "" +} + +func (m *ScanConfig) GetMaxQps() int32 { + if m != nil { + return m.MaxQps + } + return 0 +} + +func (m *ScanConfig) GetStartingUrls() []string { + if m != nil { + return m.StartingUrls + } + return nil +} + +func (m *ScanConfig) GetAuthentication() *ScanConfig_Authentication { + if m != nil { + return m.Authentication + } + return nil +} + +func (m *ScanConfig) GetUserAgent() ScanConfig_UserAgent { + if m != nil { + return m.UserAgent + } + return ScanConfig_USER_AGENT_UNSPECIFIED +} + +func (m *ScanConfig) GetBlacklistPatterns() []string { + if m != nil { + return m.BlacklistPatterns + } + return nil +} + +func (m *ScanConfig) GetSchedule() *ScanConfig_Schedule { + if m != nil { + return m.Schedule + } + return nil +} + +func (m *ScanConfig) GetTargetPlatforms() []ScanConfig_TargetPlatform { + if m != nil { + return m.TargetPlatforms + } + return nil +} + +func (m *ScanConfig) GetExportToSecurityCommandCenter() ScanConfig_ExportToSecurityCommandCenter { + if m != nil { + return m.ExportToSecurityCommandCenter + } + return ScanConfig_EXPORT_TO_SECURITY_COMMAND_CENTER_UNSPECIFIED +} + +func (m *ScanConfig) GetLatestRun() *ScanRun { + if m != nil { + return m.LatestRun + } + return nil +} + +func (m *ScanConfig) GetRiskLevel() ScanConfig_RiskLevel { + if m != nil { + return m.RiskLevel + } + return ScanConfig_RISK_LEVEL_UNSPECIFIED +} + +// Scan authentication configuration. +type ScanConfig_Authentication struct { + // Required. + // Authentication configuration + // + // Types that are valid to be assigned to Authentication: + // *ScanConfig_Authentication_GoogleAccount_ + // *ScanConfig_Authentication_CustomAccount_ + Authentication isScanConfig_Authentication_Authentication `protobuf_oneof:"authentication"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ScanConfig_Authentication) Reset() { *m = ScanConfig_Authentication{} } +func (m *ScanConfig_Authentication) String() string { return proto.CompactTextString(m) } +func (*ScanConfig_Authentication) ProtoMessage() {} +func (*ScanConfig_Authentication) Descriptor() ([]byte, []int) { + return fileDescriptor_scan_config_cb3798ae69a3d538, []int{0, 0} +} +func (m *ScanConfig_Authentication) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ScanConfig_Authentication.Unmarshal(m, b) +} +func (m *ScanConfig_Authentication) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ScanConfig_Authentication.Marshal(b, m, deterministic) +} +func (dst *ScanConfig_Authentication) XXX_Merge(src proto.Message) { + xxx_messageInfo_ScanConfig_Authentication.Merge(dst, src) +} +func (m *ScanConfig_Authentication) XXX_Size() int { + return xxx_messageInfo_ScanConfig_Authentication.Size(m) +} +func (m *ScanConfig_Authentication) XXX_DiscardUnknown() { + xxx_messageInfo_ScanConfig_Authentication.DiscardUnknown(m) +} + +var xxx_messageInfo_ScanConfig_Authentication proto.InternalMessageInfo + +type isScanConfig_Authentication_Authentication interface { + isScanConfig_Authentication_Authentication() +} + +type ScanConfig_Authentication_GoogleAccount_ struct { + GoogleAccount *ScanConfig_Authentication_GoogleAccount `protobuf:"bytes,1,opt,name=google_account,json=googleAccount,proto3,oneof"` +} + +type ScanConfig_Authentication_CustomAccount_ struct { + CustomAccount *ScanConfig_Authentication_CustomAccount `protobuf:"bytes,2,opt,name=custom_account,json=customAccount,proto3,oneof"` +} + +func (*ScanConfig_Authentication_GoogleAccount_) isScanConfig_Authentication_Authentication() {} + +func (*ScanConfig_Authentication_CustomAccount_) isScanConfig_Authentication_Authentication() {} + +func (m *ScanConfig_Authentication) GetAuthentication() isScanConfig_Authentication_Authentication { + if m != nil { + return m.Authentication + } + return nil +} + +func (m *ScanConfig_Authentication) GetGoogleAccount() *ScanConfig_Authentication_GoogleAccount { + if x, ok := m.GetAuthentication().(*ScanConfig_Authentication_GoogleAccount_); ok { + return x.GoogleAccount + } + return nil +} + +func (m *ScanConfig_Authentication) GetCustomAccount() *ScanConfig_Authentication_CustomAccount { + if x, ok := m.GetAuthentication().(*ScanConfig_Authentication_CustomAccount_); ok { + return x.CustomAccount + } + return nil +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*ScanConfig_Authentication) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _ScanConfig_Authentication_OneofMarshaler, _ScanConfig_Authentication_OneofUnmarshaler, _ScanConfig_Authentication_OneofSizer, []interface{}{ + (*ScanConfig_Authentication_GoogleAccount_)(nil), + (*ScanConfig_Authentication_CustomAccount_)(nil), + } +} + +func _ScanConfig_Authentication_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*ScanConfig_Authentication) + // authentication + switch x := m.Authentication.(type) { + case *ScanConfig_Authentication_GoogleAccount_: + b.EncodeVarint(1<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.GoogleAccount); err != nil { + return err + } + case *ScanConfig_Authentication_CustomAccount_: + b.EncodeVarint(2<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.CustomAccount); err != nil { + return err + } + case nil: + default: + return fmt.Errorf("ScanConfig_Authentication.Authentication has unexpected type %T", x) + } + return nil +} + +func _ScanConfig_Authentication_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*ScanConfig_Authentication) + switch tag { + case 1: // authentication.google_account + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(ScanConfig_Authentication_GoogleAccount) + err := b.DecodeMessage(msg) + m.Authentication = &ScanConfig_Authentication_GoogleAccount_{msg} + return true, err + case 2: // authentication.custom_account + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(ScanConfig_Authentication_CustomAccount) + err := b.DecodeMessage(msg) + m.Authentication = &ScanConfig_Authentication_CustomAccount_{msg} + return true, err + default: + return false, nil + } +} + +func _ScanConfig_Authentication_OneofSizer(msg proto.Message) (n int) { + m := msg.(*ScanConfig_Authentication) + // authentication + switch x := m.Authentication.(type) { + case *ScanConfig_Authentication_GoogleAccount_: + s := proto.Size(x.GoogleAccount) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *ScanConfig_Authentication_CustomAccount_: + s := proto.Size(x.CustomAccount) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +// Describes authentication configuration that uses a Google account. +type ScanConfig_Authentication_GoogleAccount struct { + // Required. + // The user name of the Google account. + Username string `protobuf:"bytes,1,opt,name=username,proto3" json:"username,omitempty"` + // Input only. + // Required. + // The password of the Google account. The credential is stored encrypted + // and not returned in any response nor included in audit logs. + Password string `protobuf:"bytes,2,opt,name=password,proto3" json:"password,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ScanConfig_Authentication_GoogleAccount) Reset() { + *m = ScanConfig_Authentication_GoogleAccount{} +} +func (m *ScanConfig_Authentication_GoogleAccount) String() string { return proto.CompactTextString(m) } +func (*ScanConfig_Authentication_GoogleAccount) ProtoMessage() {} +func (*ScanConfig_Authentication_GoogleAccount) Descriptor() ([]byte, []int) { + return fileDescriptor_scan_config_cb3798ae69a3d538, []int{0, 0, 0} +} +func (m *ScanConfig_Authentication_GoogleAccount) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ScanConfig_Authentication_GoogleAccount.Unmarshal(m, b) +} +func (m *ScanConfig_Authentication_GoogleAccount) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ScanConfig_Authentication_GoogleAccount.Marshal(b, m, deterministic) +} +func (dst *ScanConfig_Authentication_GoogleAccount) XXX_Merge(src proto.Message) { + xxx_messageInfo_ScanConfig_Authentication_GoogleAccount.Merge(dst, src) +} +func (m *ScanConfig_Authentication_GoogleAccount) XXX_Size() int { + return xxx_messageInfo_ScanConfig_Authentication_GoogleAccount.Size(m) +} +func (m *ScanConfig_Authentication_GoogleAccount) XXX_DiscardUnknown() { + xxx_messageInfo_ScanConfig_Authentication_GoogleAccount.DiscardUnknown(m) +} + +var xxx_messageInfo_ScanConfig_Authentication_GoogleAccount proto.InternalMessageInfo + +func (m *ScanConfig_Authentication_GoogleAccount) GetUsername() string { + if m != nil { + return m.Username + } + return "" +} + +func (m *ScanConfig_Authentication_GoogleAccount) GetPassword() string { + if m != nil { + return m.Password + } + return "" +} + +// Describes authentication configuration that uses a custom account. +type ScanConfig_Authentication_CustomAccount struct { + // Required. + // The user name of the custom account. + Username string `protobuf:"bytes,1,opt,name=username,proto3" json:"username,omitempty"` + // Input only. + // Required. + // The password of the custom account. The credential is stored encrypted + // and not returned in any response nor included in audit logs. + Password string `protobuf:"bytes,2,opt,name=password,proto3" json:"password,omitempty"` + // Required. + // The login form URL of the website. + LoginUrl string `protobuf:"bytes,3,opt,name=login_url,json=loginUrl,proto3" json:"login_url,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ScanConfig_Authentication_CustomAccount) Reset() { + *m = ScanConfig_Authentication_CustomAccount{} +} +func (m *ScanConfig_Authentication_CustomAccount) String() string { return proto.CompactTextString(m) } +func (*ScanConfig_Authentication_CustomAccount) ProtoMessage() {} +func (*ScanConfig_Authentication_CustomAccount) Descriptor() ([]byte, []int) { + return fileDescriptor_scan_config_cb3798ae69a3d538, []int{0, 0, 1} +} +func (m *ScanConfig_Authentication_CustomAccount) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ScanConfig_Authentication_CustomAccount.Unmarshal(m, b) +} +func (m *ScanConfig_Authentication_CustomAccount) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ScanConfig_Authentication_CustomAccount.Marshal(b, m, deterministic) +} +func (dst *ScanConfig_Authentication_CustomAccount) XXX_Merge(src proto.Message) { + xxx_messageInfo_ScanConfig_Authentication_CustomAccount.Merge(dst, src) +} +func (m *ScanConfig_Authentication_CustomAccount) XXX_Size() int { + return xxx_messageInfo_ScanConfig_Authentication_CustomAccount.Size(m) +} +func (m *ScanConfig_Authentication_CustomAccount) XXX_DiscardUnknown() { + xxx_messageInfo_ScanConfig_Authentication_CustomAccount.DiscardUnknown(m) +} + +var xxx_messageInfo_ScanConfig_Authentication_CustomAccount proto.InternalMessageInfo + +func (m *ScanConfig_Authentication_CustomAccount) GetUsername() string { + if m != nil { + return m.Username + } + return "" +} + +func (m *ScanConfig_Authentication_CustomAccount) GetPassword() string { + if m != nil { + return m.Password + } + return "" +} + +func (m *ScanConfig_Authentication_CustomAccount) GetLoginUrl() string { + if m != nil { + return m.LoginUrl + } + return "" +} + +// Scan schedule configuration. +type ScanConfig_Schedule struct { + // A timestamp indicates when the next run will be scheduled. The value is + // refreshed by the server after each run. If unspecified, it will default + // to current server time, which means the scan will be scheduled to start + // immediately. + ScheduleTime *timestamp.Timestamp `protobuf:"bytes,1,opt,name=schedule_time,json=scheduleTime,proto3" json:"schedule_time,omitempty"` + // Required. + // The duration of time between executions in days. + IntervalDurationDays int32 `protobuf:"varint,2,opt,name=interval_duration_days,json=intervalDurationDays,proto3" json:"interval_duration_days,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ScanConfig_Schedule) Reset() { *m = ScanConfig_Schedule{} } +func (m *ScanConfig_Schedule) String() string { return proto.CompactTextString(m) } +func (*ScanConfig_Schedule) ProtoMessage() {} +func (*ScanConfig_Schedule) Descriptor() ([]byte, []int) { + return fileDescriptor_scan_config_cb3798ae69a3d538, []int{0, 1} +} +func (m *ScanConfig_Schedule) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ScanConfig_Schedule.Unmarshal(m, b) +} +func (m *ScanConfig_Schedule) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ScanConfig_Schedule.Marshal(b, m, deterministic) +} +func (dst *ScanConfig_Schedule) XXX_Merge(src proto.Message) { + xxx_messageInfo_ScanConfig_Schedule.Merge(dst, src) +} +func (m *ScanConfig_Schedule) XXX_Size() int { + return xxx_messageInfo_ScanConfig_Schedule.Size(m) +} +func (m *ScanConfig_Schedule) XXX_DiscardUnknown() { + xxx_messageInfo_ScanConfig_Schedule.DiscardUnknown(m) +} + +var xxx_messageInfo_ScanConfig_Schedule proto.InternalMessageInfo + +func (m *ScanConfig_Schedule) GetScheduleTime() *timestamp.Timestamp { + if m != nil { + return m.ScheduleTime + } + return nil +} + +func (m *ScanConfig_Schedule) GetIntervalDurationDays() int32 { + if m != nil { + return m.IntervalDurationDays + } + return 0 +} + +func init() { + proto.RegisterType((*ScanConfig)(nil), "google.cloud.websecurityscanner.v1beta.ScanConfig") + proto.RegisterType((*ScanConfig_Authentication)(nil), "google.cloud.websecurityscanner.v1beta.ScanConfig.Authentication") + proto.RegisterType((*ScanConfig_Authentication_GoogleAccount)(nil), "google.cloud.websecurityscanner.v1beta.ScanConfig.Authentication.GoogleAccount") + proto.RegisterType((*ScanConfig_Authentication_CustomAccount)(nil), "google.cloud.websecurityscanner.v1beta.ScanConfig.Authentication.CustomAccount") + proto.RegisterType((*ScanConfig_Schedule)(nil), "google.cloud.websecurityscanner.v1beta.ScanConfig.Schedule") + proto.RegisterEnum("google.cloud.websecurityscanner.v1beta.ScanConfig_UserAgent", ScanConfig_UserAgent_name, ScanConfig_UserAgent_value) + proto.RegisterEnum("google.cloud.websecurityscanner.v1beta.ScanConfig_TargetPlatform", ScanConfig_TargetPlatform_name, ScanConfig_TargetPlatform_value) + proto.RegisterEnum("google.cloud.websecurityscanner.v1beta.ScanConfig_RiskLevel", ScanConfig_RiskLevel_name, ScanConfig_RiskLevel_value) + proto.RegisterEnum("google.cloud.websecurityscanner.v1beta.ScanConfig_ExportToSecurityCommandCenter", ScanConfig_ExportToSecurityCommandCenter_name, ScanConfig_ExportToSecurityCommandCenter_value) +} + +func init() { + proto.RegisterFile("google/cloud/websecurityscanner/v1beta/scan_config.proto", fileDescriptor_scan_config_cb3798ae69a3d538) +} + +var fileDescriptor_scan_config_cb3798ae69a3d538 = []byte{ + // 943 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xac, 0x95, 0x6d, 0x6f, 0xdb, 0x36, + 0x10, 0xc7, 0x2b, 0xbb, 0x4d, 0xec, 0xf3, 0x43, 0x55, 0x62, 0xe8, 0x04, 0x77, 0x45, 0xdd, 0x0c, + 0x18, 0x8c, 0x01, 0xb5, 0xd1, 0x6c, 0x03, 0x06, 0xb4, 0xc0, 0xa0, 0xd8, 0x4a, 0x62, 0xcc, 0x96, + 0x34, 0x5a, 0x5e, 0xb3, 0xed, 0x05, 0xc7, 0xc8, 0x8c, 0x2a, 0x44, 0x4f, 0x13, 0xa9, 0x34, 0x79, + 0xb9, 0x8f, 0xb1, 0x7d, 0x8e, 0x7d, 0x85, 0x7d, 0xaf, 0x41, 0x94, 0xe4, 0xc6, 0x2e, 0xd0, 0xb5, + 0xde, 0xde, 0xf1, 0xee, 0x78, 0xbf, 0xff, 0x91, 0xe2, 0x9d, 0xe0, 0x5b, 0x2f, 0x8e, 0xbd, 0x80, + 0x8d, 0xdc, 0x20, 0xce, 0x56, 0xa3, 0x37, 0xec, 0x9c, 0x33, 0x37, 0x4b, 0x7d, 0x71, 0xc3, 0x5d, + 0x1a, 0x45, 0x2c, 0x1d, 0x5d, 0x3d, 0x3f, 0x67, 0x82, 0x8e, 0x72, 0x93, 0xb8, 0x71, 0x74, 0xe1, + 0x7b, 0xc3, 0x24, 0x8d, 0x45, 0x8c, 0xbe, 0x28, 0x32, 0x87, 0x32, 0x73, 0xf8, 0x6e, 0xe6, 0xb0, + 0xc8, 0xec, 0x7d, 0x56, 0x2a, 0xd0, 0xc4, 0x1f, 0xd1, 0x28, 0x8a, 0x05, 0x15, 0x7e, 0x1c, 0xf1, + 0x82, 0xd2, 0xfb, 0xe6, 0x63, 0xf4, 0xd3, 0x2c, 0x2a, 0xd3, 0x9e, 0x94, 0x69, 0xd2, 0x3a, 0xcf, + 0x2e, 0x46, 0xc2, 0x0f, 0x19, 0x17, 0x34, 0x4c, 0x8a, 0x0d, 0x07, 0x7f, 0x75, 0x01, 0x16, 0x2e, + 0x8d, 0xc6, 0xb2, 0x64, 0x84, 0xe0, 0x6e, 0x44, 0x43, 0xa6, 0x29, 0x7d, 0x65, 0xd0, 0xc4, 0x72, + 0x8d, 0x9e, 0x42, 0x7b, 0xe5, 0xf3, 0x24, 0xa0, 0x37, 0x44, 0xc6, 0x6a, 0x32, 0xd6, 0x2a, 0x7d, + 0x66, 0xbe, 0xe5, 0x53, 0xd8, 0x0f, 0xe9, 0x35, 0xf9, 0x2d, 0xe1, 0x5a, 0xbd, 0xaf, 0x0c, 0xee, + 0xe1, 0xbd, 0x90, 0x5e, 0xff, 0x90, 0x70, 0xf4, 0x39, 0x74, 0xb8, 0xa0, 0xa9, 0xf0, 0x23, 0x8f, + 0x64, 0x69, 0xc0, 0xb5, 0xbb, 0xfd, 0xfa, 0xa0, 0x89, 0xdb, 0x95, 0x73, 0x99, 0x06, 0x1c, 0xf9, + 0xd0, 0xa5, 0x99, 0x78, 0xcd, 0x22, 0xe1, 0xbb, 0xf2, 0xd0, 0xda, 0xbd, 0xbe, 0x32, 0x68, 0x1d, + 0xea, 0xc3, 0x0f, 0xbb, 0xba, 0xe1, 0xdb, 0x03, 0x0c, 0xf5, 0x0d, 0x10, 0xde, 0x02, 0xa3, 0x5f, + 0x00, 0x32, 0xce, 0x52, 0x42, 0x3d, 0x16, 0x09, 0x6d, 0xaf, 0xaf, 0x0c, 0xba, 0x87, 0x2f, 0x77, + 0x90, 0x59, 0x72, 0x96, 0xea, 0x39, 0x03, 0x37, 0xb3, 0x6a, 0x89, 0x9e, 0x01, 0x3a, 0x0f, 0xa8, + 0x7b, 0x19, 0xf8, 0x5c, 0x90, 0x84, 0x0a, 0xc1, 0xd2, 0x88, 0x6b, 0xfb, 0xf2, 0xc4, 0x0f, 0xd6, + 0x11, 0xbb, 0x0c, 0xa0, 0x57, 0xd0, 0xe0, 0xee, 0x6b, 0xb6, 0xca, 0x02, 0xa6, 0x35, 0xe4, 0x81, + 0x5f, 0xec, 0x50, 0xc9, 0xa2, 0x44, 0xe0, 0x35, 0x0c, 0x05, 0xa0, 0x0a, 0x9a, 0x7a, 0x4c, 0x90, + 0x24, 0xa0, 0xe2, 0x22, 0x4e, 0x43, 0xae, 0x35, 0xfb, 0xf5, 0x41, 0x77, 0xa7, 0x1b, 0x75, 0x24, + 0xca, 0x2e, 0x49, 0xf8, 0xbe, 0xd8, 0xb0, 0x39, 0xfa, 0x53, 0x81, 0xa7, 0xec, 0x3a, 0x89, 0x53, + 0x41, 0x44, 0x4c, 0x2a, 0x1e, 0x71, 0xe3, 0x30, 0xa4, 0xd1, 0x8a, 0xb8, 0x2c, 0x12, 0x2c, 0xd5, + 0x40, 0x5e, 0xb5, 0xbd, 0x83, 0xbe, 0x21, 0xd9, 0x4e, 0xbc, 0x28, 0x77, 0x8e, 0x0b, 0xf0, 0x58, + 0x72, 0xf1, 0x63, 0xf6, 0xbe, 0x30, 0x32, 0x01, 0x02, 0x2a, 0x18, 0x17, 0x79, 0x4f, 0x68, 0x2d, + 0x79, 0xcb, 0xa3, 0x8f, 0x29, 0x02, 0x67, 0x11, 0x6e, 0x16, 0x08, 0x9c, 0xc9, 0xf7, 0x93, 0xfa, + 0xfc, 0x92, 0x04, 0xec, 0x8a, 0x05, 0x5a, 0x7b, 0xe7, 0xf7, 0x83, 0x7d, 0x7e, 0x39, 0xcb, 0x19, + 0xb8, 0x99, 0x56, 0xcb, 0xde, 0xdf, 0x75, 0xe8, 0x6e, 0xbe, 0x5f, 0x74, 0x0d, 0xdd, 0x02, 0x4e, + 0xa8, 0xeb, 0xc6, 0x59, 0x24, 0x64, 0x67, 0xb6, 0x0e, 0xad, 0xff, 0xdc, 0x1a, 0xc3, 0x13, 0x09, + 0xd0, 0x0b, 0xec, 0xe9, 0x1d, 0xdc, 0xf1, 0x6e, 0x3b, 0x72, 0x65, 0x37, 0xe3, 0x22, 0x0e, 0xd7, + 0xca, 0xb5, 0xff, 0x4b, 0x79, 0x2c, 0xb9, 0xb7, 0x94, 0xdd, 0xdb, 0x8e, 0xde, 0x09, 0x74, 0x36, + 0x6a, 0x43, 0x3d, 0x68, 0xe4, 0x4d, 0x76, 0x6b, 0x30, 0xad, 0xed, 0x3c, 0x96, 0x50, 0xce, 0xdf, + 0xc4, 0xe9, 0xaa, 0x1c, 0x4c, 0x6b, 0xbb, 0xb7, 0x82, 0xce, 0x86, 0xd4, 0xae, 0x20, 0xf4, 0x08, + 0x9a, 0x41, 0xec, 0xf9, 0x51, 0x3e, 0xc2, 0xe4, 0x80, 0x6b, 0xe2, 0x86, 0x74, 0x2c, 0xd3, 0xe0, + 0x48, 0xdd, 0x9e, 0x5e, 0xbd, 0xdf, 0x15, 0x68, 0x54, 0x6d, 0x89, 0xbe, 0x83, 0x4e, 0xd5, 0x98, + 0x24, 0x1f, 0xbe, 0xe5, 0x07, 0xec, 0x55, 0xd7, 0x58, 0x4d, 0xe6, 0xa1, 0x53, 0x4d, 0x66, 0xdc, + 0xae, 0x12, 0x72, 0x17, 0xfa, 0x1a, 0x1e, 0xfa, 0xf9, 0x5b, 0xbe, 0xa2, 0x01, 0x59, 0x65, 0xa9, + 0x94, 0x20, 0x2b, 0x7a, 0xc3, 0x65, 0x99, 0xf7, 0xf0, 0x27, 0x55, 0x74, 0x52, 0x06, 0x27, 0xf4, + 0x86, 0x1f, 0xfc, 0x0a, 0xcd, 0xf5, 0x8c, 0x42, 0x3d, 0x78, 0xb8, 0x5c, 0x18, 0x98, 0xe8, 0x27, + 0x86, 0xe9, 0x90, 0xa5, 0xb9, 0xb0, 0x8d, 0xf1, 0xf4, 0x78, 0x6a, 0x4c, 0xd4, 0x3b, 0x48, 0x85, + 0xf6, 0xf8, 0x14, 0x5b, 0x73, 0x83, 0xcc, 0xa6, 0xe6, 0xf2, 0x4c, 0x55, 0x10, 0x82, 0x6e, 0xe9, + 0xd1, 0xcd, 0x09, 0xb6, 0xa6, 0x13, 0xb5, 0x86, 0x1e, 0x40, 0x67, 0xa1, 0x1f, 0xeb, 0x78, 0x4a, + 0xa6, 0xf6, 0xa9, 0x65, 0x1a, 0x6a, 0xfd, 0xc0, 0x84, 0xee, 0xe6, 0x68, 0x40, 0x4f, 0xe0, 0x91, + 0xa3, 0xe3, 0x13, 0xc3, 0x21, 0xf6, 0x4c, 0x77, 0x8e, 0x2d, 0x3c, 0xdf, 0xd2, 0xea, 0x02, 0xe8, + 0xb6, 0x4d, 0x0c, 0xf3, 0x64, 0x6a, 0x1a, 0xaa, 0x82, 0x5a, 0xb0, 0x3f, 0xb6, 0xe6, 0xf6, 0xd2, + 0x31, 0xd4, 0xda, 0xc1, 0x4b, 0x68, 0xae, 0xbb, 0x22, 0xaf, 0x18, 0x4f, 0x17, 0xdf, 0x93, 0x99, + 0xf1, 0xa3, 0x31, 0xdb, 0xa2, 0x00, 0xec, 0x99, 0x16, 0x9e, 0xeb, 0x33, 0x55, 0x41, 0xfb, 0x50, + 0x9f, 0x59, 0xaf, 0xd4, 0xda, 0x41, 0x08, 0x8f, 0xdf, 0x3b, 0x28, 0xd0, 0x73, 0x78, 0x66, 0x9c, + 0xd9, 0x16, 0x76, 0x88, 0x63, 0x91, 0x85, 0x31, 0x5e, 0xe2, 0xa9, 0xf3, 0x13, 0x19, 0x5b, 0xf3, + 0xb9, 0x6e, 0x4e, 0xc8, 0xd8, 0x30, 0x1d, 0x03, 0x6f, 0x09, 0xb5, 0x60, 0xdf, 0x30, 0xf5, 0xa3, + 0x99, 0x31, 0x51, 0x15, 0xd4, 0x86, 0xc6, 0x64, 0xba, 0x28, 0xac, 0xda, 0xd1, 0x1f, 0x0a, 0x7c, + 0xe9, 0xc6, 0xe1, 0x07, 0xf6, 0xc2, 0xd1, 0xfd, 0xb7, 0xcd, 0x60, 0xe7, 0xdf, 0xdb, 0x56, 0x7e, + 0x3e, 0x2b, 0x53, 0xbd, 0x38, 0xa0, 0x91, 0x37, 0x8c, 0x53, 0x6f, 0xe4, 0xb1, 0x48, 0xbe, 0x86, + 0x51, 0x11, 0xa2, 0x89, 0xcf, 0xff, 0xed, 0x7f, 0xff, 0xe2, 0xdd, 0xc8, 0xf9, 0x9e, 0x84, 0x7c, + 0xf5, 0x4f, 0x00, 0x00, 0x00, 0xff, 0xff, 0x05, 0x47, 0x5c, 0x1f, 0xb3, 0x08, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/cloud/websecurityscanner/v1beta/scan_config_error.pb.go b/vendor/google.golang.org/genproto/googleapis/cloud/websecurityscanner/v1beta/scan_config_error.pb.go new file mode 100644 index 0000000..ef30617 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/cloud/websecurityscanner/v1beta/scan_config_error.pb.go @@ -0,0 +1,344 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/cloud/websecurityscanner/v1beta/scan_config_error.proto + +package websecurityscanner // import "google.golang.org/genproto/googleapis/cloud/websecurityscanner/v1beta" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Output only. +// Defines an error reason code. +// Next id: 43 +type ScanConfigError_Code int32 + +const ( + // There is no error. + ScanConfigError_CODE_UNSPECIFIED ScanConfigError_Code = 0 + // There is no error. + ScanConfigError_OK ScanConfigError_Code = 0 + // Indicates an internal server error. + // Please DO NOT USE THIS ERROR CODE unless the root cause is truly unknown. + ScanConfigError_INTERNAL_ERROR ScanConfigError_Code = 1 + // One of the seed URLs is an App Engine URL but we cannot validate the scan + // settings due to an App Engine API backend error. + ScanConfigError_APPENGINE_API_BACKEND_ERROR ScanConfigError_Code = 2 + // One of the seed URLs is an App Engine URL but we cannot access the + // App Engine API to validate scan settings. + ScanConfigError_APPENGINE_API_NOT_ACCESSIBLE ScanConfigError_Code = 3 + // One of the seed URLs is an App Engine URL but the Default Host of the + // App Engine is not set. + ScanConfigError_APPENGINE_DEFAULT_HOST_MISSING ScanConfigError_Code = 4 + // Google corporate accounts can not be used for scanning. + ScanConfigError_CANNOT_USE_GOOGLE_COM_ACCOUNT ScanConfigError_Code = 6 + // The account of the scan creator can not be used for scanning. + ScanConfigError_CANNOT_USE_OWNER_ACCOUNT ScanConfigError_Code = 7 + // This scan targets Compute Engine, but we cannot validate scan settings + // due to a Compute Engine API backend error. + ScanConfigError_COMPUTE_API_BACKEND_ERROR ScanConfigError_Code = 8 + // This scan targets Compute Engine, but we cannot access the Compute Engine + // API to validate the scan settings. + ScanConfigError_COMPUTE_API_NOT_ACCESSIBLE ScanConfigError_Code = 9 + // The Custom Login URL does not belong to the current project. + ScanConfigError_CUSTOM_LOGIN_URL_DOES_NOT_BELONG_TO_CURRENT_PROJECT ScanConfigError_Code = 10 + // The Custom Login URL is malformed (can not be parsed). + ScanConfigError_CUSTOM_LOGIN_URL_MALFORMED ScanConfigError_Code = 11 + // The Custom Login URL is mapped to a non-routable IP address in DNS. + ScanConfigError_CUSTOM_LOGIN_URL_MAPPED_TO_NON_ROUTABLE_ADDRESS ScanConfigError_Code = 12 + // The Custom Login URL is mapped to an IP address which is not reserved for + // the current project. + ScanConfigError_CUSTOM_LOGIN_URL_MAPPED_TO_UNRESERVED_ADDRESS ScanConfigError_Code = 13 + // The Custom Login URL has a non-routable IP address. + ScanConfigError_CUSTOM_LOGIN_URL_HAS_NON_ROUTABLE_IP_ADDRESS ScanConfigError_Code = 14 + // The Custom Login URL has an IP address which is not reserved for the + // current project. + ScanConfigError_CUSTOM_LOGIN_URL_HAS_UNRESERVED_IP_ADDRESS ScanConfigError_Code = 15 + // Another scan with the same name (case-sensitive) already exists. + ScanConfigError_DUPLICATE_SCAN_NAME ScanConfigError_Code = 16 + // A field is set to an invalid value. + ScanConfigError_INVALID_FIELD_VALUE ScanConfigError_Code = 18 + // There was an error trying to authenticate to the scan target. + ScanConfigError_FAILED_TO_AUTHENTICATE_TO_TARGET ScanConfigError_Code = 19 + // Finding type value is not specified in the list findings request. + ScanConfigError_FINDING_TYPE_UNSPECIFIED ScanConfigError_Code = 20 + // Scan targets Compute Engine, yet current project was not whitelisted for + // Google Compute Engine Scanning Alpha access. + ScanConfigError_FORBIDDEN_TO_SCAN_COMPUTE ScanConfigError_Code = 21 + // The supplied filter is malformed. For example, it can not be parsed, does + // not have a filter type in expression, or the same filter type appears + // more than once. + ScanConfigError_MALFORMED_FILTER ScanConfigError_Code = 22 + // The supplied resource name is malformed (can not be parsed). + ScanConfigError_MALFORMED_RESOURCE_NAME ScanConfigError_Code = 23 + // The current project is not in an active state. + ScanConfigError_PROJECT_INACTIVE ScanConfigError_Code = 24 + // A required field is not set. + ScanConfigError_REQUIRED_FIELD ScanConfigError_Code = 25 + // Project id, scanconfig id, scanrun id, or finding id are not consistent + // with each other in resource name. + ScanConfigError_RESOURCE_NAME_INCONSISTENT ScanConfigError_Code = 26 + // The scan being requested to start is already running. + ScanConfigError_SCAN_ALREADY_RUNNING ScanConfigError_Code = 27 + // The scan that was requested to be stopped is not running. + ScanConfigError_SCAN_NOT_RUNNING ScanConfigError_Code = 28 + // One of the seed URLs does not belong to the current project. + ScanConfigError_SEED_URL_DOES_NOT_BELONG_TO_CURRENT_PROJECT ScanConfigError_Code = 29 + // One of the seed URLs is malformed (can not be parsed). + ScanConfigError_SEED_URL_MALFORMED ScanConfigError_Code = 30 + // One of the seed URLs is mapped to a non-routable IP address in DNS. + ScanConfigError_SEED_URL_MAPPED_TO_NON_ROUTABLE_ADDRESS ScanConfigError_Code = 31 + // One of the seed URLs is mapped to an IP address which is not reserved + // for the current project. + ScanConfigError_SEED_URL_MAPPED_TO_UNRESERVED_ADDRESS ScanConfigError_Code = 32 + // One of the seed URLs has on-routable IP address. + ScanConfigError_SEED_URL_HAS_NON_ROUTABLE_IP_ADDRESS ScanConfigError_Code = 33 + // One of the seed URLs has an IP address that is not reserved + // for the current project. + ScanConfigError_SEED_URL_HAS_UNRESERVED_IP_ADDRESS ScanConfigError_Code = 35 + // The Cloud Security Scanner service account is not configured under the + // project. + ScanConfigError_SERVICE_ACCOUNT_NOT_CONFIGURED ScanConfigError_Code = 36 + // A project has reached the maximum number of scans. + ScanConfigError_TOO_MANY_SCANS ScanConfigError_Code = 37 + // Resolving the details of the current project fails. + ScanConfigError_UNABLE_TO_RESOLVE_PROJECT_INFO ScanConfigError_Code = 38 + // One or more blacklist patterns were in the wrong format. + ScanConfigError_UNSUPPORTED_BLACKLIST_PATTERN_FORMAT ScanConfigError_Code = 39 + // The supplied filter is not supported. + ScanConfigError_UNSUPPORTED_FILTER ScanConfigError_Code = 40 + // The supplied finding type is not supported. For example, we do not + // provide findings of the given finding type. + ScanConfigError_UNSUPPORTED_FINDING_TYPE ScanConfigError_Code = 41 + // The URL scheme of one or more of the supplied URLs is not supported. + ScanConfigError_UNSUPPORTED_URL_SCHEME ScanConfigError_Code = 42 +) + +var ScanConfigError_Code_name = map[int32]string{ + 0: "CODE_UNSPECIFIED", + // Duplicate value: 0: "OK", + 1: "INTERNAL_ERROR", + 2: "APPENGINE_API_BACKEND_ERROR", + 3: "APPENGINE_API_NOT_ACCESSIBLE", + 4: "APPENGINE_DEFAULT_HOST_MISSING", + 6: "CANNOT_USE_GOOGLE_COM_ACCOUNT", + 7: "CANNOT_USE_OWNER_ACCOUNT", + 8: "COMPUTE_API_BACKEND_ERROR", + 9: "COMPUTE_API_NOT_ACCESSIBLE", + 10: "CUSTOM_LOGIN_URL_DOES_NOT_BELONG_TO_CURRENT_PROJECT", + 11: "CUSTOM_LOGIN_URL_MALFORMED", + 12: "CUSTOM_LOGIN_URL_MAPPED_TO_NON_ROUTABLE_ADDRESS", + 13: "CUSTOM_LOGIN_URL_MAPPED_TO_UNRESERVED_ADDRESS", + 14: "CUSTOM_LOGIN_URL_HAS_NON_ROUTABLE_IP_ADDRESS", + 15: "CUSTOM_LOGIN_URL_HAS_UNRESERVED_IP_ADDRESS", + 16: "DUPLICATE_SCAN_NAME", + 18: "INVALID_FIELD_VALUE", + 19: "FAILED_TO_AUTHENTICATE_TO_TARGET", + 20: "FINDING_TYPE_UNSPECIFIED", + 21: "FORBIDDEN_TO_SCAN_COMPUTE", + 22: "MALFORMED_FILTER", + 23: "MALFORMED_RESOURCE_NAME", + 24: "PROJECT_INACTIVE", + 25: "REQUIRED_FIELD", + 26: "RESOURCE_NAME_INCONSISTENT", + 27: "SCAN_ALREADY_RUNNING", + 28: "SCAN_NOT_RUNNING", + 29: "SEED_URL_DOES_NOT_BELONG_TO_CURRENT_PROJECT", + 30: "SEED_URL_MALFORMED", + 31: "SEED_URL_MAPPED_TO_NON_ROUTABLE_ADDRESS", + 32: "SEED_URL_MAPPED_TO_UNRESERVED_ADDRESS", + 33: "SEED_URL_HAS_NON_ROUTABLE_IP_ADDRESS", + 35: "SEED_URL_HAS_UNRESERVED_IP_ADDRESS", + 36: "SERVICE_ACCOUNT_NOT_CONFIGURED", + 37: "TOO_MANY_SCANS", + 38: "UNABLE_TO_RESOLVE_PROJECT_INFO", + 39: "UNSUPPORTED_BLACKLIST_PATTERN_FORMAT", + 40: "UNSUPPORTED_FILTER", + 41: "UNSUPPORTED_FINDING_TYPE", + 42: "UNSUPPORTED_URL_SCHEME", +} +var ScanConfigError_Code_value = map[string]int32{ + "CODE_UNSPECIFIED": 0, + "OK": 0, + "INTERNAL_ERROR": 1, + "APPENGINE_API_BACKEND_ERROR": 2, + "APPENGINE_API_NOT_ACCESSIBLE": 3, + "APPENGINE_DEFAULT_HOST_MISSING": 4, + "CANNOT_USE_GOOGLE_COM_ACCOUNT": 6, + "CANNOT_USE_OWNER_ACCOUNT": 7, + "COMPUTE_API_BACKEND_ERROR": 8, + "COMPUTE_API_NOT_ACCESSIBLE": 9, + "CUSTOM_LOGIN_URL_DOES_NOT_BELONG_TO_CURRENT_PROJECT": 10, + "CUSTOM_LOGIN_URL_MALFORMED": 11, + "CUSTOM_LOGIN_URL_MAPPED_TO_NON_ROUTABLE_ADDRESS": 12, + "CUSTOM_LOGIN_URL_MAPPED_TO_UNRESERVED_ADDRESS": 13, + "CUSTOM_LOGIN_URL_HAS_NON_ROUTABLE_IP_ADDRESS": 14, + "CUSTOM_LOGIN_URL_HAS_UNRESERVED_IP_ADDRESS": 15, + "DUPLICATE_SCAN_NAME": 16, + "INVALID_FIELD_VALUE": 18, + "FAILED_TO_AUTHENTICATE_TO_TARGET": 19, + "FINDING_TYPE_UNSPECIFIED": 20, + "FORBIDDEN_TO_SCAN_COMPUTE": 21, + "MALFORMED_FILTER": 22, + "MALFORMED_RESOURCE_NAME": 23, + "PROJECT_INACTIVE": 24, + "REQUIRED_FIELD": 25, + "RESOURCE_NAME_INCONSISTENT": 26, + "SCAN_ALREADY_RUNNING": 27, + "SCAN_NOT_RUNNING": 28, + "SEED_URL_DOES_NOT_BELONG_TO_CURRENT_PROJECT": 29, + "SEED_URL_MALFORMED": 30, + "SEED_URL_MAPPED_TO_NON_ROUTABLE_ADDRESS": 31, + "SEED_URL_MAPPED_TO_UNRESERVED_ADDRESS": 32, + "SEED_URL_HAS_NON_ROUTABLE_IP_ADDRESS": 33, + "SEED_URL_HAS_UNRESERVED_IP_ADDRESS": 35, + "SERVICE_ACCOUNT_NOT_CONFIGURED": 36, + "TOO_MANY_SCANS": 37, + "UNABLE_TO_RESOLVE_PROJECT_INFO": 38, + "UNSUPPORTED_BLACKLIST_PATTERN_FORMAT": 39, + "UNSUPPORTED_FILTER": 40, + "UNSUPPORTED_FINDING_TYPE": 41, + "UNSUPPORTED_URL_SCHEME": 42, +} + +func (x ScanConfigError_Code) String() string { + return proto.EnumName(ScanConfigError_Code_name, int32(x)) +} +func (ScanConfigError_Code) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_scan_config_error_4b32cc55957c457c, []int{0, 0} +} + +// Defines a custom error message used by CreateScanConfig and UpdateScanConfig +// APIs when scan configuration validation fails. It is also reported as part of +// a ScanRunErrorTrace message if scan validation fails due to a scan +// configuration error. +type ScanConfigError struct { + // Output only. + // Indicates the reason code for a configuration failure. + Code ScanConfigError_Code `protobuf:"varint,1,opt,name=code,proto3,enum=google.cloud.websecurityscanner.v1beta.ScanConfigError_Code" json:"code,omitempty"` + // Output only. + // Indicates the full name of the ScanConfig field that triggers this error, + // for example "scan_config.max_qps". This field is provided for + // troubleshooting purposes only and its actual value can change in the + // future. + FieldName string `protobuf:"bytes,2,opt,name=field_name,json=fieldName,proto3" json:"field_name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ScanConfigError) Reset() { *m = ScanConfigError{} } +func (m *ScanConfigError) String() string { return proto.CompactTextString(m) } +func (*ScanConfigError) ProtoMessage() {} +func (*ScanConfigError) Descriptor() ([]byte, []int) { + return fileDescriptor_scan_config_error_4b32cc55957c457c, []int{0} +} +func (m *ScanConfigError) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ScanConfigError.Unmarshal(m, b) +} +func (m *ScanConfigError) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ScanConfigError.Marshal(b, m, deterministic) +} +func (dst *ScanConfigError) XXX_Merge(src proto.Message) { + xxx_messageInfo_ScanConfigError.Merge(dst, src) +} +func (m *ScanConfigError) XXX_Size() int { + return xxx_messageInfo_ScanConfigError.Size(m) +} +func (m *ScanConfigError) XXX_DiscardUnknown() { + xxx_messageInfo_ScanConfigError.DiscardUnknown(m) +} + +var xxx_messageInfo_ScanConfigError proto.InternalMessageInfo + +func (m *ScanConfigError) GetCode() ScanConfigError_Code { + if m != nil { + return m.Code + } + return ScanConfigError_CODE_UNSPECIFIED +} + +func (m *ScanConfigError) GetFieldName() string { + if m != nil { + return m.FieldName + } + return "" +} + +func init() { + proto.RegisterType((*ScanConfigError)(nil), "google.cloud.websecurityscanner.v1beta.ScanConfigError") + proto.RegisterEnum("google.cloud.websecurityscanner.v1beta.ScanConfigError_Code", ScanConfigError_Code_name, ScanConfigError_Code_value) +} + +func init() { + proto.RegisterFile("google/cloud/websecurityscanner/v1beta/scan_config_error.proto", fileDescriptor_scan_config_error_4b32cc55957c457c) +} + +var fileDescriptor_scan_config_error_4b32cc55957c457c = []byte{ + // 865 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x8c, 0x55, 0x5d, 0x53, 0x1b, 0x37, + 0x14, 0x8d, 0x29, 0x43, 0x8b, 0xd2, 0x12, 0x8d, 0x42, 0xc1, 0xe1, 0x2b, 0x0e, 0x25, 0x84, 0x90, + 0xd6, 0x6e, 0xca, 0x43, 0x1f, 0xda, 0xe9, 0x8c, 0x2c, 0xdd, 0x35, 0x2a, 0x5a, 0x69, 0xab, 0x0f, + 0xb7, 0xf4, 0x45, 0x63, 0xcc, 0xc6, 0xc3, 0x0c, 0x78, 0x33, 0x86, 0xb4, 0xd3, 0x3f, 0xd3, 0x3f, + 0xd5, 0x97, 0xfe, 0x9c, 0x8e, 0xd6, 0xae, 0x3f, 0xc0, 0x43, 0x78, 0xdc, 0x7b, 0xcf, 0x39, 0xf7, + 0xea, 0xde, 0xb3, 0x12, 0xfa, 0xa9, 0x57, 0x14, 0xbd, 0xcb, 0xbc, 0xd1, 0xbd, 0x2c, 0x3e, 0x9c, + 0x37, 0xfe, 0xcc, 0xcf, 0xae, 0xf3, 0xee, 0x87, 0xc1, 0xc5, 0xcd, 0x5f, 0xd7, 0xdd, 0x4e, 0xbf, + 0x9f, 0x0f, 0x1a, 0x7f, 0xbc, 0x3d, 0xcb, 0x6f, 0x3a, 0x8d, 0xf8, 0x19, 0xba, 0x45, 0xff, 0xdd, + 0x45, 0x2f, 0xe4, 0x83, 0x41, 0x31, 0xa8, 0xbf, 0x1f, 0x14, 0x37, 0x05, 0xd9, 0x1f, 0xf2, 0xeb, + 0x25, 0xbf, 0x7e, 0x97, 0x5f, 0x1f, 0xf2, 0x77, 0xff, 0x7d, 0x8c, 0x9e, 0xd8, 0x6e, 0xa7, 0xcf, + 0x4a, 0x09, 0x88, 0x0a, 0x24, 0x43, 0x8b, 0xdd, 0xe2, 0x3c, 0xaf, 0x56, 0x6a, 0x95, 0x83, 0x95, + 0xef, 0x7e, 0xac, 0x3f, 0x4c, 0xaa, 0x7e, 0x4b, 0xa6, 0xce, 0x8a, 0xf3, 0xdc, 0x94, 0x4a, 0x64, + 0x1b, 0xa1, 0x77, 0x17, 0xf9, 0xe5, 0x79, 0xe8, 0x77, 0xae, 0xf2, 0xea, 0x42, 0xad, 0x72, 0xb0, + 0x6c, 0x96, 0xcb, 0x88, 0xea, 0x5c, 0xe5, 0xbb, 0xff, 0x20, 0xb4, 0x18, 0xd1, 0x64, 0x15, 0x61, + 0xa6, 0x39, 0x04, 0xaf, 0x6c, 0x06, 0x4c, 0x24, 0x02, 0x38, 0x7e, 0x44, 0x96, 0xd0, 0x82, 0x3e, + 0xc1, 0x8f, 0x08, 0x41, 0x2b, 0x42, 0x39, 0x30, 0x8a, 0xca, 0x00, 0xc6, 0x68, 0x83, 0x2b, 0xe4, + 0x39, 0xda, 0xa4, 0x59, 0x06, 0xaa, 0x25, 0x14, 0x04, 0x9a, 0x89, 0xd0, 0xa4, 0xec, 0x04, 0x14, + 0x1f, 0x01, 0x16, 0x48, 0x0d, 0x6d, 0xcd, 0x02, 0x94, 0x76, 0x81, 0x32, 0x06, 0xd6, 0x8a, 0xa6, + 0x04, 0xfc, 0x09, 0xd9, 0x45, 0x3b, 0x13, 0x04, 0x87, 0x84, 0x7a, 0xe9, 0xc2, 0xb1, 0xb6, 0x2e, + 0xa4, 0xc2, 0x5a, 0xa1, 0x5a, 0x78, 0x91, 0xbc, 0x40, 0xdb, 0x8c, 0xaa, 0x48, 0xf5, 0x16, 0x42, + 0x4b, 0xeb, 0x96, 0x84, 0xc0, 0x74, 0x1a, 0x95, 0xb4, 0x57, 0x0e, 0x2f, 0x91, 0x2d, 0x54, 0x9d, + 0x82, 0xe8, 0x5f, 0x15, 0x98, 0x71, 0xf6, 0x53, 0xb2, 0x8d, 0x9e, 0x31, 0x9d, 0x66, 0xde, 0xcd, + 0xeb, 0xf2, 0x33, 0xb2, 0x83, 0x36, 0xa6, 0xd3, 0xb7, 0x7a, 0x5c, 0x26, 0xdf, 0xa3, 0x23, 0xe6, + 0xad, 0xd3, 0x69, 0x90, 0xba, 0x25, 0x54, 0xf0, 0x46, 0x06, 0xae, 0xc1, 0x96, 0xc8, 0x26, 0x48, + 0xad, 0x5a, 0xc1, 0xe9, 0xc0, 0xbc, 0x31, 0xa0, 0x5c, 0xc8, 0x8c, 0xfe, 0x19, 0x98, 0xc3, 0xa8, + 0x14, 0xbe, 0x4d, 0x4c, 0xa9, 0x4c, 0xb4, 0x49, 0x81, 0xe3, 0xc7, 0xe4, 0x08, 0x35, 0xe6, 0xe4, + 0xb3, 0x0c, 0x78, 0xd4, 0x53, 0x5a, 0x05, 0xa3, 0xbd, 0xa3, 0x4d, 0x09, 0x81, 0x72, 0x6e, 0xc0, + 0x5a, 0xfc, 0x39, 0x79, 0x8b, 0xbe, 0xb9, 0x87, 0xe4, 0x95, 0x01, 0x0b, 0xa6, 0x0d, 0x7c, 0x4c, + 0xf9, 0x82, 0x7c, 0x8b, 0xbe, 0xbe, 0x43, 0x39, 0xa6, 0x76, 0xb6, 0x82, 0xc8, 0xc6, 0x8c, 0x15, + 0x52, 0x47, 0x87, 0x73, 0x19, 0x53, 0xf2, 0x53, 0xf8, 0x27, 0x64, 0x1d, 0x3d, 0xe5, 0x3e, 0x93, + 0x82, 0x51, 0x07, 0xc1, 0x32, 0xaa, 0x82, 0xa2, 0x29, 0x60, 0x1c, 0x13, 0x42, 0xb5, 0xa9, 0x14, + 0x3c, 0x24, 0x02, 0x24, 0x0f, 0x6d, 0x2a, 0x3d, 0x60, 0x42, 0xf6, 0x50, 0x2d, 0xa1, 0x42, 0x0e, + 0xbb, 0xa6, 0xde, 0x1d, 0x83, 0x72, 0x43, 0xba, 0xd3, 0xc1, 0x51, 0xd3, 0x02, 0x87, 0x9f, 0xc6, + 0xbd, 0x26, 0x42, 0x71, 0x11, 0x27, 0x7c, 0x9a, 0xcd, 0x7a, 0x73, 0x35, 0xee, 0x35, 0xd1, 0xa6, + 0x29, 0x38, 0x07, 0x15, 0x69, 0x65, 0xe1, 0xd1, 0x2a, 0xf1, 0x97, 0xd1, 0xd0, 0xe3, 0x69, 0x87, + 0x44, 0x48, 0x07, 0x06, 0xaf, 0x91, 0x4d, 0xb4, 0x3e, 0x89, 0x1a, 0xb0, 0xda, 0x1b, 0x06, 0xc3, + 0x76, 0xd7, 0x23, 0x65, 0xb4, 0xbe, 0x20, 0x14, 0x65, 0x4e, 0xb4, 0x01, 0x57, 0xa3, 0xf7, 0x0d, + 0xfc, 0xe2, 0x85, 0x81, 0xd1, 0x29, 0xf0, 0xb3, 0xb8, 0xdb, 0x19, 0x72, 0x10, 0x8a, 0x69, 0x65, + 0x85, 0x75, 0xa0, 0x1c, 0xde, 0x20, 0x55, 0xb4, 0x5a, 0xb6, 0x43, 0xa5, 0x01, 0xca, 0x4f, 0x83, + 0xf1, 0x4a, 0x45, 0x3b, 0x6f, 0xc6, 0x1a, 0xc3, 0x09, 0x69, 0x37, 0x8e, 0x6e, 0x91, 0x06, 0x7a, + 0x63, 0x01, 0xf8, 0x43, 0xcd, 0xb5, 0x4d, 0xd6, 0x10, 0x19, 0x13, 0x26, 0xa6, 0xda, 0x21, 0x6f, + 0xd0, 0xab, 0xa9, 0xf8, 0xbd, 0x66, 0x7a, 0x4e, 0x5e, 0xa3, 0x97, 0x73, 0xc0, 0x73, 0x4c, 0x54, + 0x23, 0x07, 0x68, 0x6f, 0x0c, 0xbd, 0xcf, 0x3c, 0x2f, 0xc8, 0x3e, 0xda, 0x9d, 0x41, 0xce, 0x37, + 0xcd, 0x57, 0xf1, 0xdf, 0x8f, 0x61, 0xc1, 0xe0, 0xff, 0x7f, 0xb5, 0x3c, 0x34, 0xd3, 0x2a, 0x11, + 0x2d, 0x6f, 0x80, 0xe3, 0xbd, 0x38, 0x7a, 0xa7, 0x75, 0x48, 0xa9, 0x3a, 0x2d, 0xd7, 0x6b, 0xf1, + 0xcb, 0xc8, 0xf3, 0xaa, 0x2c, 0xeb, 0x74, 0xb9, 0x41, 0xd9, 0x86, 0x30, 0x59, 0x5b, 0xa2, 0xf1, + 0x7e, 0xec, 0xd6, 0x2b, 0xeb, 0xb3, 0x4c, 0x1b, 0x07, 0x3c, 0x34, 0x25, 0x65, 0x27, 0x52, 0x58, + 0x17, 0x32, 0xea, 0xe2, 0x2d, 0x16, 0xe2, 0xbc, 0xa8, 0xc3, 0xaf, 0xe2, 0x1c, 0xa7, 0x91, 0x23, + 0x9f, 0x1c, 0x44, 0xeb, 0xcd, 0xc6, 0x27, 0x36, 0xc4, 0xaf, 0xc9, 0x06, 0x5a, 0x9b, 0xce, 0xc6, + 0xa3, 0x5a, 0x76, 0x0c, 0x29, 0xe0, 0xc3, 0x8d, 0x05, 0x5c, 0x69, 0xfe, 0x5d, 0x41, 0x87, 0xdd, + 0xe2, 0xea, 0x81, 0xd7, 0x77, 0x73, 0xf5, 0xd6, 0xfd, 0x9d, 0xc5, 0x77, 0x24, 0xab, 0xfc, 0xfe, + 0xdb, 0x88, 0xdf, 0x2b, 0x2e, 0x3b, 0xfd, 0x5e, 0xbd, 0x18, 0xf4, 0x1a, 0xbd, 0xbc, 0x5f, 0xbe, + 0x32, 0x8d, 0x61, 0xaa, 0xf3, 0xfe, 0xe2, 0xfa, 0x63, 0x0f, 0xd5, 0x0f, 0x77, 0x33, 0x67, 0x4b, + 0xa5, 0xc8, 0xd1, 0x7f, 0x01, 0x00, 0x00, 0xff, 0xff, 0x82, 0x51, 0xa7, 0xe9, 0xec, 0x06, 0x00, + 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/cloud/websecurityscanner/v1beta/scan_run.pb.go b/vendor/google.golang.org/genproto/googleapis/cloud/websecurityscanner/v1beta/scan_run.pb.go new file mode 100644 index 0000000..2cd7443 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/cloud/websecurityscanner/v1beta/scan_run.pb.go @@ -0,0 +1,298 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/cloud/websecurityscanner/v1beta/scan_run.proto + +package websecurityscanner // import "google.golang.org/genproto/googleapis/cloud/websecurityscanner/v1beta" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import timestamp "github.com/golang/protobuf/ptypes/timestamp" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Types of ScanRun execution state. +type ScanRun_ExecutionState int32 + +const ( + // Represents an invalid state caused by internal server error. This value + // should never be returned. + ScanRun_EXECUTION_STATE_UNSPECIFIED ScanRun_ExecutionState = 0 + // The scan is waiting in the queue. + ScanRun_QUEUED ScanRun_ExecutionState = 1 + // The scan is in progress. + ScanRun_SCANNING ScanRun_ExecutionState = 2 + // The scan is either finished or stopped by user. + ScanRun_FINISHED ScanRun_ExecutionState = 3 +) + +var ScanRun_ExecutionState_name = map[int32]string{ + 0: "EXECUTION_STATE_UNSPECIFIED", + 1: "QUEUED", + 2: "SCANNING", + 3: "FINISHED", +} +var ScanRun_ExecutionState_value = map[string]int32{ + "EXECUTION_STATE_UNSPECIFIED": 0, + "QUEUED": 1, + "SCANNING": 2, + "FINISHED": 3, +} + +func (x ScanRun_ExecutionState) String() string { + return proto.EnumName(ScanRun_ExecutionState_name, int32(x)) +} +func (ScanRun_ExecutionState) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_scan_run_4ba20ab7789cc312, []int{0, 0} +} + +// Types of ScanRun result state. +type ScanRun_ResultState int32 + +const ( + // Default value. This value is returned when the ScanRun is not yet + // finished. + ScanRun_RESULT_STATE_UNSPECIFIED ScanRun_ResultState = 0 + // The scan finished without errors. + ScanRun_SUCCESS ScanRun_ResultState = 1 + // The scan finished with errors. + ScanRun_ERROR ScanRun_ResultState = 2 + // The scan was terminated by user. + ScanRun_KILLED ScanRun_ResultState = 3 +) + +var ScanRun_ResultState_name = map[int32]string{ + 0: "RESULT_STATE_UNSPECIFIED", + 1: "SUCCESS", + 2: "ERROR", + 3: "KILLED", +} +var ScanRun_ResultState_value = map[string]int32{ + "RESULT_STATE_UNSPECIFIED": 0, + "SUCCESS": 1, + "ERROR": 2, + "KILLED": 3, +} + +func (x ScanRun_ResultState) String() string { + return proto.EnumName(ScanRun_ResultState_name, int32(x)) +} +func (ScanRun_ResultState) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_scan_run_4ba20ab7789cc312, []int{0, 1} +} + +// A ScanRun is a output-only resource representing an actual run of the scan. +// Next id: 12 +type ScanRun struct { + // Output only. + // The resource name of the ScanRun. The name follows the format of + // 'projects/{projectId}/scanConfigs/{scanConfigId}/scanRuns/{scanRunId}'. + // The ScanRun IDs are generated by the system. + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // Output only. + // The execution state of the ScanRun. + ExecutionState ScanRun_ExecutionState `protobuf:"varint,2,opt,name=execution_state,json=executionState,proto3,enum=google.cloud.websecurityscanner.v1beta.ScanRun_ExecutionState" json:"execution_state,omitempty"` + // Output only. + // The result state of the ScanRun. This field is only available after the + // execution state reaches "FINISHED". + ResultState ScanRun_ResultState `protobuf:"varint,3,opt,name=result_state,json=resultState,proto3,enum=google.cloud.websecurityscanner.v1beta.ScanRun_ResultState" json:"result_state,omitempty"` + // Output only. + // The time at which the ScanRun started. + StartTime *timestamp.Timestamp `protobuf:"bytes,4,opt,name=start_time,json=startTime,proto3" json:"start_time,omitempty"` + // Output only. + // The time at which the ScanRun reached termination state - that the ScanRun + // is either finished or stopped by user. + EndTime *timestamp.Timestamp `protobuf:"bytes,5,opt,name=end_time,json=endTime,proto3" json:"end_time,omitempty"` + // Output only. + // The number of URLs crawled during this ScanRun. If the scan is in progress, + // the value represents the number of URLs crawled up to now. + UrlsCrawledCount int64 `protobuf:"varint,6,opt,name=urls_crawled_count,json=urlsCrawledCount,proto3" json:"urls_crawled_count,omitempty"` + // Output only. + // The number of URLs tested during this ScanRun. If the scan is in progress, + // the value represents the number of URLs tested up to now. The number of + // URLs tested is usually larger than the number URLS crawled because + // typically a crawled URL is tested with multiple test payloads. + UrlsTestedCount int64 `protobuf:"varint,7,opt,name=urls_tested_count,json=urlsTestedCount,proto3" json:"urls_tested_count,omitempty"` + // Output only. + // Whether the scan run has found any vulnerabilities. + HasVulnerabilities bool `protobuf:"varint,8,opt,name=has_vulnerabilities,json=hasVulnerabilities,proto3" json:"has_vulnerabilities,omitempty"` + // Output only. + // The percentage of total completion ranging from 0 to 100. + // If the scan is in queue, the value is 0. + // If the scan is running, the value ranges from 0 to 100. + // If the scan is finished, the value is 100. + ProgressPercent int32 `protobuf:"varint,9,opt,name=progress_percent,json=progressPercent,proto3" json:"progress_percent,omitempty"` + // Output only. + // If result_state is an ERROR, this field provides the primary reason for + // scan's termination and more details, if such are available. + ErrorTrace *ScanRunErrorTrace `protobuf:"bytes,10,opt,name=error_trace,json=errorTrace,proto3" json:"error_trace,omitempty"` + // Output only. + // A list of warnings, if such are encountered during this scan run. + WarningTraces []*ScanRunWarningTrace `protobuf:"bytes,11,rep,name=warning_traces,json=warningTraces,proto3" json:"warning_traces,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ScanRun) Reset() { *m = ScanRun{} } +func (m *ScanRun) String() string { return proto.CompactTextString(m) } +func (*ScanRun) ProtoMessage() {} +func (*ScanRun) Descriptor() ([]byte, []int) { + return fileDescriptor_scan_run_4ba20ab7789cc312, []int{0} +} +func (m *ScanRun) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ScanRun.Unmarshal(m, b) +} +func (m *ScanRun) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ScanRun.Marshal(b, m, deterministic) +} +func (dst *ScanRun) XXX_Merge(src proto.Message) { + xxx_messageInfo_ScanRun.Merge(dst, src) +} +func (m *ScanRun) XXX_Size() int { + return xxx_messageInfo_ScanRun.Size(m) +} +func (m *ScanRun) XXX_DiscardUnknown() { + xxx_messageInfo_ScanRun.DiscardUnknown(m) +} + +var xxx_messageInfo_ScanRun proto.InternalMessageInfo + +func (m *ScanRun) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *ScanRun) GetExecutionState() ScanRun_ExecutionState { + if m != nil { + return m.ExecutionState + } + return ScanRun_EXECUTION_STATE_UNSPECIFIED +} + +func (m *ScanRun) GetResultState() ScanRun_ResultState { + if m != nil { + return m.ResultState + } + return ScanRun_RESULT_STATE_UNSPECIFIED +} + +func (m *ScanRun) GetStartTime() *timestamp.Timestamp { + if m != nil { + return m.StartTime + } + return nil +} + +func (m *ScanRun) GetEndTime() *timestamp.Timestamp { + if m != nil { + return m.EndTime + } + return nil +} + +func (m *ScanRun) GetUrlsCrawledCount() int64 { + if m != nil { + return m.UrlsCrawledCount + } + return 0 +} + +func (m *ScanRun) GetUrlsTestedCount() int64 { + if m != nil { + return m.UrlsTestedCount + } + return 0 +} + +func (m *ScanRun) GetHasVulnerabilities() bool { + if m != nil { + return m.HasVulnerabilities + } + return false +} + +func (m *ScanRun) GetProgressPercent() int32 { + if m != nil { + return m.ProgressPercent + } + return 0 +} + +func (m *ScanRun) GetErrorTrace() *ScanRunErrorTrace { + if m != nil { + return m.ErrorTrace + } + return nil +} + +func (m *ScanRun) GetWarningTraces() []*ScanRunWarningTrace { + if m != nil { + return m.WarningTraces + } + return nil +} + +func init() { + proto.RegisterType((*ScanRun)(nil), "google.cloud.websecurityscanner.v1beta.ScanRun") + proto.RegisterEnum("google.cloud.websecurityscanner.v1beta.ScanRun_ExecutionState", ScanRun_ExecutionState_name, ScanRun_ExecutionState_value) + proto.RegisterEnum("google.cloud.websecurityscanner.v1beta.ScanRun_ResultState", ScanRun_ResultState_name, ScanRun_ResultState_value) +} + +func init() { + proto.RegisterFile("google/cloud/websecurityscanner/v1beta/scan_run.proto", fileDescriptor_scan_run_4ba20ab7789cc312) +} + +var fileDescriptor_scan_run_4ba20ab7789cc312 = []byte{ + // 616 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x94, 0x94, 0x5d, 0x4b, 0xdc, 0x4c, + 0x14, 0xc7, 0x9f, 0xb8, 0xba, 0x2f, 0x27, 0x3e, 0x6b, 0x3a, 0xbd, 0x09, 0x56, 0x30, 0x78, 0x51, + 0x52, 0x29, 0x09, 0xb5, 0x78, 0x21, 0x42, 0x41, 0x63, 0x6c, 0x43, 0x65, 0xdd, 0x26, 0xd9, 0xd6, + 0x7a, 0xd1, 0x30, 0x9b, 0x9d, 0xc6, 0x40, 0x76, 0xb2, 0xcc, 0x4c, 0xdc, 0xf6, 0x8b, 0xf4, 0xc3, + 0xf6, 0xaa, 0xcc, 0x24, 0x6b, 0x5d, 0x2c, 0xe8, 0xde, 0xe5, 0xbc, 0xfc, 0x7f, 0x7f, 0xce, 0x99, + 0x43, 0xe0, 0x30, 0x2b, 0xcb, 0xac, 0x20, 0x6e, 0x5a, 0x94, 0xd5, 0xc4, 0x9d, 0x93, 0x31, 0x27, + 0x69, 0xc5, 0x72, 0xf1, 0x93, 0xa7, 0x98, 0x52, 0xc2, 0xdc, 0xdb, 0x37, 0x63, 0x22, 0xb0, 0x2b, + 0xc3, 0x84, 0x55, 0xd4, 0x99, 0xb1, 0x52, 0x94, 0xe8, 0x65, 0x2d, 0x73, 0x94, 0xcc, 0x79, 0x28, + 0x73, 0x6a, 0xd9, 0xf6, 0x4e, 0x83, 0xc7, 0xb3, 0xdc, 0xc5, 0x94, 0x96, 0x02, 0x8b, 0xbc, 0xa4, + 0xbc, 0xa6, 0x6c, 0x9f, 0xac, 0x68, 0x9e, 0x10, 0xc6, 0x4a, 0x96, 0x08, 0x86, 0x53, 0xd2, 0x20, + 0xbc, 0x55, 0x11, 0x73, 0xcc, 0x68, 0x4e, 0xb3, 0x25, 0xc8, 0x6e, 0x03, 0x51, 0xd1, 0xb8, 0xfa, + 0xee, 0x8a, 0x7c, 0x4a, 0xb8, 0xc0, 0xd3, 0x59, 0xdd, 0xb0, 0xf7, 0xbb, 0x0d, 0x9d, 0x28, 0xc5, + 0x34, 0xac, 0x28, 0x42, 0xb0, 0x4e, 0xf1, 0x94, 0x98, 0x9a, 0xa5, 0xd9, 0xbd, 0x50, 0x7d, 0xa3, + 0x0c, 0xb6, 0xc8, 0x0f, 0x92, 0x56, 0x72, 0xb8, 0x84, 0x0b, 0x2c, 0x88, 0xb9, 0x66, 0x69, 0x76, + 0xff, 0xe0, 0x9d, 0xf3, 0xb4, 0x45, 0x39, 0x0d, 0xdd, 0xf1, 0x17, 0x98, 0x48, 0x52, 0xc2, 0x3e, + 0x59, 0x8a, 0xd1, 0x37, 0xd8, 0x64, 0x84, 0x57, 0x85, 0x68, 0x5c, 0x5a, 0xca, 0xe5, 0x78, 0x55, + 0x97, 0x50, 0x31, 0x6a, 0x0b, 0x9d, 0xfd, 0x0d, 0xd0, 0x11, 0x00, 0x17, 0x98, 0x89, 0x44, 0x6e, + 0xc0, 0x5c, 0xb7, 0x34, 0x5b, 0x3f, 0xd8, 0x5e, 0xd0, 0x17, 0xeb, 0x71, 0xe2, 0xc5, 0x7a, 0xc2, + 0x9e, 0xea, 0x96, 0x31, 0x3a, 0x84, 0x2e, 0xa1, 0x93, 0x5a, 0xb8, 0xf1, 0xa8, 0xb0, 0x43, 0xe8, + 0x44, 0xc9, 0x5e, 0x03, 0xaa, 0x58, 0xc1, 0x93, 0x94, 0xe1, 0x79, 0x41, 0x26, 0x49, 0x5a, 0x56, + 0x54, 0x98, 0x6d, 0x4b, 0xb3, 0x5b, 0xa1, 0x21, 0x2b, 0x5e, 0x5d, 0xf0, 0x64, 0x1e, 0xed, 0xc3, + 0x33, 0xd5, 0x2d, 0x08, 0x17, 0x77, 0xcd, 0x1d, 0xd5, 0xbc, 0x25, 0x0b, 0xb1, 0xca, 0xd7, 0xbd, + 0x2e, 0x3c, 0xbf, 0xc1, 0x3c, 0xb9, 0xad, 0x0a, 0x4a, 0x18, 0x1e, 0xe7, 0x45, 0x2e, 0x72, 0xc2, + 0xcd, 0xae, 0xa5, 0xd9, 0xdd, 0x10, 0xdd, 0x60, 0xfe, 0x79, 0xb9, 0x82, 0x5e, 0x81, 0x31, 0x63, + 0x65, 0xc6, 0x08, 0xe7, 0xc9, 0x8c, 0xb0, 0x94, 0x50, 0x61, 0xf6, 0x2c, 0xcd, 0xde, 0x08, 0xb7, + 0x16, 0xf9, 0x61, 0x9d, 0x46, 0xd7, 0xa0, 0xdf, 0xbb, 0x45, 0x13, 0xd4, 0xbc, 0x47, 0x2b, 0x3e, + 0x83, 0x2f, 0x09, 0xb1, 0x04, 0x84, 0x40, 0xee, 0xbe, 0xd1, 0x18, 0xfa, 0x4b, 0x47, 0xca, 0x4d, + 0xdd, 0x6a, 0xd9, 0xfa, 0xca, 0xaf, 0xfc, 0xa5, 0x86, 0xd4, 0x06, 0xff, 0xcf, 0xef, 0x45, 0x7c, + 0xef, 0x2b, 0xf4, 0x97, 0x2f, 0x0d, 0xed, 0xc2, 0x0b, 0xff, 0xca, 0xf7, 0x46, 0x71, 0x70, 0x39, + 0x48, 0xa2, 0xf8, 0x24, 0xf6, 0x93, 0xd1, 0x20, 0x1a, 0xfa, 0x5e, 0x70, 0x1e, 0xf8, 0x67, 0xc6, + 0x7f, 0x08, 0xa0, 0xfd, 0x69, 0xe4, 0x8f, 0xfc, 0x33, 0x43, 0x43, 0x9b, 0xd0, 0x8d, 0xbc, 0x93, + 0xc1, 0x20, 0x18, 0xbc, 0x37, 0xd6, 0x64, 0x74, 0x1e, 0x0c, 0x82, 0xe8, 0x83, 0x7f, 0x66, 0xb4, + 0xf6, 0x2e, 0x41, 0xbf, 0x77, 0x5e, 0x68, 0x07, 0xcc, 0xd0, 0x8f, 0x46, 0x17, 0xf1, 0x3f, 0xa1, + 0x3a, 0x74, 0xa2, 0x91, 0xe7, 0xf9, 0x51, 0x64, 0x68, 0xa8, 0x07, 0x1b, 0x7e, 0x18, 0x5e, 0x86, + 0xc6, 0x9a, 0x34, 0xfb, 0x18, 0x5c, 0x5c, 0x48, 0xe0, 0xe9, 0x2f, 0x0d, 0xf6, 0xd3, 0x72, 0xfa, + 0xc4, 0xe9, 0x4f, 0x37, 0x9b, 0xf1, 0x87, 0xf2, 0xe8, 0x86, 0xda, 0xf5, 0x55, 0xa3, 0xcb, 0xca, + 0x02, 0xd3, 0xcc, 0x29, 0x59, 0xe6, 0x66, 0x84, 0xaa, 0x93, 0x74, 0xeb, 0x12, 0x9e, 0xe5, 0xfc, + 0xb1, 0x1f, 0xc8, 0xf1, 0xc3, 0xca, 0xb8, 0xad, 0x20, 0x6f, 0xff, 0x04, 0x00, 0x00, 0xff, 0xff, + 0x9e, 0x77, 0x0e, 0xed, 0x44, 0x05, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/cloud/websecurityscanner/v1beta/scan_run_error_trace.pb.go b/vendor/google.golang.org/genproto/googleapis/cloud/websecurityscanner/v1beta/scan_run_error_trace.pb.go new file mode 100644 index 0000000..6f8c069 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/cloud/websecurityscanner/v1beta/scan_run_error_trace.pb.go @@ -0,0 +1,178 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/cloud/websecurityscanner/v1beta/scan_run_error_trace.proto + +package websecurityscanner // import "google.golang.org/genproto/googleapis/cloud/websecurityscanner/v1beta" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Output only. +// Defines an error reason code. +// Next id: 7 +type ScanRunErrorTrace_Code int32 + +const ( + // Default value is never used. + ScanRunErrorTrace_CODE_UNSPECIFIED ScanRunErrorTrace_Code = 0 + // Indicates that the scan run failed due to an internal server error. + ScanRunErrorTrace_INTERNAL_ERROR ScanRunErrorTrace_Code = 1 + // Indicates a scan configuration error, usually due to outdated ScanConfig + // settings, such as starting_urls or the DNS configuration. + ScanRunErrorTrace_SCAN_CONFIG_ISSUE ScanRunErrorTrace_Code = 2 + // Indicates an authentication error, usually due to outdated ScanConfig + // authentication settings. + ScanRunErrorTrace_AUTHENTICATION_CONFIG_ISSUE ScanRunErrorTrace_Code = 3 + // Indicates a scan operation timeout, usually caused by a very large site. + ScanRunErrorTrace_TIMED_OUT_WHILE_SCANNING ScanRunErrorTrace_Code = 4 + // Indicates that a scan encountered excessive redirects, either to + // authentication or some other page outside of the scan scope. + ScanRunErrorTrace_TOO_MANY_REDIRECTS ScanRunErrorTrace_Code = 5 + // Indicates that a scan encountered numerous errors from the web site + // pages. When available, most_common_http_error_code field indicates the + // the most common HTTP error code encountered during the scan. + ScanRunErrorTrace_TOO_MANY_HTTP_ERRORS ScanRunErrorTrace_Code = 6 +) + +var ScanRunErrorTrace_Code_name = map[int32]string{ + 0: "CODE_UNSPECIFIED", + 1: "INTERNAL_ERROR", + 2: "SCAN_CONFIG_ISSUE", + 3: "AUTHENTICATION_CONFIG_ISSUE", + 4: "TIMED_OUT_WHILE_SCANNING", + 5: "TOO_MANY_REDIRECTS", + 6: "TOO_MANY_HTTP_ERRORS", +} +var ScanRunErrorTrace_Code_value = map[string]int32{ + "CODE_UNSPECIFIED": 0, + "INTERNAL_ERROR": 1, + "SCAN_CONFIG_ISSUE": 2, + "AUTHENTICATION_CONFIG_ISSUE": 3, + "TIMED_OUT_WHILE_SCANNING": 4, + "TOO_MANY_REDIRECTS": 5, + "TOO_MANY_HTTP_ERRORS": 6, +} + +func (x ScanRunErrorTrace_Code) String() string { + return proto.EnumName(ScanRunErrorTrace_Code_name, int32(x)) +} +func (ScanRunErrorTrace_Code) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_scan_run_error_trace_3b9236ac69667833, []int{0, 0} +} + +// Output only. +// Defines an error trace message for a ScanRun. +type ScanRunErrorTrace struct { + // Output only. + // Indicates the error reason code. + Code ScanRunErrorTrace_Code `protobuf:"varint,1,opt,name=code,proto3,enum=google.cloud.websecurityscanner.v1beta.ScanRunErrorTrace_Code" json:"code,omitempty"` + // Output only. + // If the scan encounters SCAN_CONFIG_ISSUE error, this field has the error + // message encountered during scan configuration validation that is performed + // before each scan run. + ScanConfigError *ScanConfigError `protobuf:"bytes,2,opt,name=scan_config_error,json=scanConfigError,proto3" json:"scan_config_error,omitempty"` + // Output only. + // If the scan encounters TOO_MANY_HTTP_ERRORS, this field indicates the most + // common HTTP error code, if such is available. For example, if this code is + // 404, the scan has encountered too many NOT_FOUND responses. + MostCommonHttpErrorCode int32 `protobuf:"varint,3,opt,name=most_common_http_error_code,json=mostCommonHttpErrorCode,proto3" json:"most_common_http_error_code,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ScanRunErrorTrace) Reset() { *m = ScanRunErrorTrace{} } +func (m *ScanRunErrorTrace) String() string { return proto.CompactTextString(m) } +func (*ScanRunErrorTrace) ProtoMessage() {} +func (*ScanRunErrorTrace) Descriptor() ([]byte, []int) { + return fileDescriptor_scan_run_error_trace_3b9236ac69667833, []int{0} +} +func (m *ScanRunErrorTrace) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ScanRunErrorTrace.Unmarshal(m, b) +} +func (m *ScanRunErrorTrace) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ScanRunErrorTrace.Marshal(b, m, deterministic) +} +func (dst *ScanRunErrorTrace) XXX_Merge(src proto.Message) { + xxx_messageInfo_ScanRunErrorTrace.Merge(dst, src) +} +func (m *ScanRunErrorTrace) XXX_Size() int { + return xxx_messageInfo_ScanRunErrorTrace.Size(m) +} +func (m *ScanRunErrorTrace) XXX_DiscardUnknown() { + xxx_messageInfo_ScanRunErrorTrace.DiscardUnknown(m) +} + +var xxx_messageInfo_ScanRunErrorTrace proto.InternalMessageInfo + +func (m *ScanRunErrorTrace) GetCode() ScanRunErrorTrace_Code { + if m != nil { + return m.Code + } + return ScanRunErrorTrace_CODE_UNSPECIFIED +} + +func (m *ScanRunErrorTrace) GetScanConfigError() *ScanConfigError { + if m != nil { + return m.ScanConfigError + } + return nil +} + +func (m *ScanRunErrorTrace) GetMostCommonHttpErrorCode() int32 { + if m != nil { + return m.MostCommonHttpErrorCode + } + return 0 +} + +func init() { + proto.RegisterType((*ScanRunErrorTrace)(nil), "google.cloud.websecurityscanner.v1beta.ScanRunErrorTrace") + proto.RegisterEnum("google.cloud.websecurityscanner.v1beta.ScanRunErrorTrace_Code", ScanRunErrorTrace_Code_name, ScanRunErrorTrace_Code_value) +} + +func init() { + proto.RegisterFile("google/cloud/websecurityscanner/v1beta/scan_run_error_trace.proto", fileDescriptor_scan_run_error_trace_3b9236ac69667833) +} + +var fileDescriptor_scan_run_error_trace_3b9236ac69667833 = []byte{ + // 432 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x94, 0x92, 0xcb, 0x6e, 0x13, 0x31, + 0x14, 0x86, 0x99, 0x26, 0xed, 0xc2, 0x48, 0x65, 0x62, 0x95, 0x12, 0x51, 0x24, 0xa2, 0x2e, 0x50, + 0xc4, 0xc2, 0x23, 0xca, 0x82, 0x05, 0xa8, 0xd2, 0xd4, 0x71, 0x1b, 0x4b, 0xad, 0x27, 0xf2, 0x38, + 0xe2, 0xb2, 0xb1, 0x26, 0x8e, 0x19, 0x22, 0x25, 0x76, 0xe4, 0x71, 0x40, 0xbc, 0x0d, 0x8f, 0xc1, + 0x0b, 0xf0, 0x5e, 0xc8, 0x9e, 0x0a, 0x09, 0x66, 0x41, 0xbb, 0xb3, 0xcf, 0xe5, 0xfb, 0x7f, 0x1f, + 0x1f, 0x90, 0xd7, 0xd6, 0xd6, 0x6b, 0x9d, 0xa9, 0xb5, 0xdd, 0x2d, 0xb3, 0x6f, 0x7a, 0xd1, 0x68, + 0xb5, 0x73, 0x2b, 0xff, 0xbd, 0x51, 0x95, 0x31, 0xda, 0x65, 0x5f, 0x5f, 0x2d, 0xb4, 0xaf, 0xb2, + 0x70, 0x95, 0x6e, 0x67, 0xa4, 0x76, 0xce, 0x3a, 0xe9, 0x5d, 0xa5, 0x34, 0xda, 0x3a, 0xeb, 0x2d, + 0x7c, 0xd1, 0x22, 0x50, 0x44, 0xa0, 0x2e, 0x02, 0xb5, 0x88, 0xa7, 0xe7, 0xf7, 0x91, 0x52, 0xd6, + 0x7c, 0x5e, 0xd5, 0xad, 0x5a, 0xab, 0x73, 0xfa, 0xab, 0x07, 0x06, 0xa5, 0xaa, 0x0c, 0xdf, 0x19, + 0x12, 0xc2, 0x22, 0x78, 0x80, 0x1c, 0xf4, 0x95, 0x5d, 0xea, 0x61, 0x32, 0x4a, 0xc6, 0x87, 0x67, + 0xe7, 0xe8, 0x6e, 0x66, 0x50, 0x07, 0x84, 0xb0, 0x5d, 0x6a, 0x1e, 0x59, 0x50, 0x81, 0x41, 0xc7, + 0xc4, 0x70, 0x6f, 0x94, 0x8c, 0x1f, 0x9e, 0xbd, 0xb9, 0x8f, 0x00, 0x8e, 0xfd, 0x51, 0x83, 0x3f, + 0x6a, 0xfe, 0x0e, 0xc0, 0x77, 0xe0, 0x64, 0x63, 0x1b, 0x2f, 0x95, 0xdd, 0x6c, 0xac, 0x91, 0x5f, + 0xbc, 0xdf, 0xde, 0x0e, 0x37, 0xbe, 0xa7, 0x37, 0x4a, 0xc6, 0xfb, 0xfc, 0x49, 0x28, 0xc1, 0xb1, + 0x62, 0xea, 0xfd, 0x36, 0x76, 0x06, 0xa3, 0xa7, 0x3f, 0x13, 0xd0, 0x0f, 0x07, 0x78, 0x04, 0x52, + 0x5c, 0x4c, 0x88, 0x9c, 0xb3, 0x72, 0x46, 0x30, 0xbd, 0xa4, 0x64, 0x92, 0x3e, 0x80, 0x10, 0x1c, + 0x52, 0x26, 0x08, 0x67, 0xf9, 0xb5, 0x24, 0x9c, 0x17, 0x3c, 0x4d, 0xe0, 0x63, 0x30, 0x28, 0x71, + 0xce, 0x24, 0x2e, 0xd8, 0x25, 0xbd, 0x92, 0xb4, 0x2c, 0xe7, 0x24, 0xdd, 0x83, 0xcf, 0xc1, 0x49, + 0x3e, 0x17, 0x53, 0xc2, 0x04, 0xc5, 0xb9, 0xa0, 0xc5, 0x3f, 0x05, 0x3d, 0xf8, 0x0c, 0x0c, 0x05, + 0xbd, 0x21, 0x13, 0x59, 0xcc, 0x85, 0x7c, 0x3f, 0xa5, 0xd7, 0x44, 0x06, 0x0e, 0xa3, 0xec, 0x2a, + 0xed, 0xc3, 0x63, 0x00, 0x45, 0x51, 0xc8, 0x9b, 0x9c, 0x7d, 0x94, 0x9c, 0x4c, 0x28, 0x27, 0x58, + 0x94, 0xe9, 0x3e, 0x1c, 0x82, 0xa3, 0x3f, 0xf1, 0xa9, 0x10, 0xb3, 0xd6, 0x46, 0x99, 0x1e, 0x5c, + 0xfc, 0x48, 0xc0, 0x4b, 0x65, 0x37, 0x77, 0x1c, 0xe4, 0xc5, 0x71, 0xe7, 0xab, 0x66, 0x61, 0x1d, + 0x66, 0xc9, 0xa7, 0x0f, 0xb7, 0x84, 0xda, 0xae, 0x2b, 0x53, 0x23, 0xeb, 0xea, 0xac, 0xd6, 0x26, + 0x2e, 0x4b, 0xd6, 0xa6, 0xaa, 0xed, 0xaa, 0xf9, 0xdf, 0xbe, 0xbd, 0xed, 0x66, 0x16, 0x07, 0x11, + 0xf2, 0xfa, 0x77, 0x00, 0x00, 0x00, 0xff, 0xff, 0x22, 0xa4, 0x4a, 0x32, 0x1e, 0x03, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/cloud/websecurityscanner/v1beta/scan_run_warning_trace.pb.go b/vendor/google.golang.org/genproto/googleapis/cloud/websecurityscanner/v1beta/scan_run_warning_trace.pb.go new file mode 100644 index 0000000..8b649b6 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/cloud/websecurityscanner/v1beta/scan_run_warning_trace.pb.go @@ -0,0 +1,140 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/cloud/websecurityscanner/v1beta/scan_run_warning_trace.proto + +package websecurityscanner // import "google.golang.org/genproto/googleapis/cloud/websecurityscanner/v1beta" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Output only. +// Defines a warning message code. +// Next id: 5 +type ScanRunWarningTrace_Code int32 + +const ( + // Default value is never used. + ScanRunWarningTrace_CODE_UNSPECIFIED ScanRunWarningTrace_Code = 0 + // Indicates that a scan discovered an unexpectedly low number of URLs. This + // is sometimes caused by complex navigation features or by using a single + // URL for numerous pages. + ScanRunWarningTrace_INSUFFICIENT_CRAWL_RESULTS ScanRunWarningTrace_Code = 1 + // Indicates that a scan discovered too many URLs to test, or excessive + // redundant URLs. + ScanRunWarningTrace_TOO_MANY_CRAWL_RESULTS ScanRunWarningTrace_Code = 2 + // Indicates that too many tests have been generated for the scan. Customer + // should try reducing the number of starting URLs, increasing the QPS rate, + // or narrowing down the scope of the scan using the excluded patterns. + ScanRunWarningTrace_TOO_MANY_FUZZ_TASKS ScanRunWarningTrace_Code = 3 + // Indicates that a scan is blocked by IAP. + ScanRunWarningTrace_BLOCKED_BY_IAP ScanRunWarningTrace_Code = 4 +) + +var ScanRunWarningTrace_Code_name = map[int32]string{ + 0: "CODE_UNSPECIFIED", + 1: "INSUFFICIENT_CRAWL_RESULTS", + 2: "TOO_MANY_CRAWL_RESULTS", + 3: "TOO_MANY_FUZZ_TASKS", + 4: "BLOCKED_BY_IAP", +} +var ScanRunWarningTrace_Code_value = map[string]int32{ + "CODE_UNSPECIFIED": 0, + "INSUFFICIENT_CRAWL_RESULTS": 1, + "TOO_MANY_CRAWL_RESULTS": 2, + "TOO_MANY_FUZZ_TASKS": 3, + "BLOCKED_BY_IAP": 4, +} + +func (x ScanRunWarningTrace_Code) String() string { + return proto.EnumName(ScanRunWarningTrace_Code_name, int32(x)) +} +func (ScanRunWarningTrace_Code) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_scan_run_warning_trace_e453d12033752715, []int{0, 0} +} + +// Output only. +// Defines a warning trace message for ScanRun. Warning traces provide customers +// with useful information that helps make the scanning process more effective. +type ScanRunWarningTrace struct { + // Output only. + // Indicates the warning code. + Code ScanRunWarningTrace_Code `protobuf:"varint,1,opt,name=code,proto3,enum=google.cloud.websecurityscanner.v1beta.ScanRunWarningTrace_Code" json:"code,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ScanRunWarningTrace) Reset() { *m = ScanRunWarningTrace{} } +func (m *ScanRunWarningTrace) String() string { return proto.CompactTextString(m) } +func (*ScanRunWarningTrace) ProtoMessage() {} +func (*ScanRunWarningTrace) Descriptor() ([]byte, []int) { + return fileDescriptor_scan_run_warning_trace_e453d12033752715, []int{0} +} +func (m *ScanRunWarningTrace) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ScanRunWarningTrace.Unmarshal(m, b) +} +func (m *ScanRunWarningTrace) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ScanRunWarningTrace.Marshal(b, m, deterministic) +} +func (dst *ScanRunWarningTrace) XXX_Merge(src proto.Message) { + xxx_messageInfo_ScanRunWarningTrace.Merge(dst, src) +} +func (m *ScanRunWarningTrace) XXX_Size() int { + return xxx_messageInfo_ScanRunWarningTrace.Size(m) +} +func (m *ScanRunWarningTrace) XXX_DiscardUnknown() { + xxx_messageInfo_ScanRunWarningTrace.DiscardUnknown(m) +} + +var xxx_messageInfo_ScanRunWarningTrace proto.InternalMessageInfo + +func (m *ScanRunWarningTrace) GetCode() ScanRunWarningTrace_Code { + if m != nil { + return m.Code + } + return ScanRunWarningTrace_CODE_UNSPECIFIED +} + +func init() { + proto.RegisterType((*ScanRunWarningTrace)(nil), "google.cloud.websecurityscanner.v1beta.ScanRunWarningTrace") + proto.RegisterEnum("google.cloud.websecurityscanner.v1beta.ScanRunWarningTrace_Code", ScanRunWarningTrace_Code_name, ScanRunWarningTrace_Code_value) +} + +func init() { + proto.RegisterFile("google/cloud/websecurityscanner/v1beta/scan_run_warning_trace.proto", fileDescriptor_scan_run_warning_trace_e453d12033752715) +} + +var fileDescriptor_scan_run_warning_trace_e453d12033752715 = []byte{ + // 318 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x8c, 0xd1, 0x41, 0x4b, 0xf3, 0x30, + 0x18, 0xc0, 0xf1, 0x37, 0xef, 0x3b, 0xde, 0x43, 0x0e, 0xa3, 0x64, 0xa2, 0x63, 0x07, 0x91, 0x1d, + 0x44, 0x3c, 0xa4, 0xa8, 0x47, 0x2f, 0xb6, 0x59, 0x0b, 0x65, 0xb3, 0x2d, 0x4d, 0xcb, 0xdc, 0x2e, + 0x21, 0xcb, 0x42, 0x18, 0xcc, 0x64, 0x64, 0xad, 0xc3, 0x0f, 0xe0, 0x17, 0xf1, 0x2b, 0xfa, 0x05, + 0xa4, 0xad, 0x78, 0x70, 0x03, 0x77, 0x7c, 0xf8, 0xc3, 0x0f, 0x9e, 0xe7, 0x81, 0x44, 0x19, 0xa3, + 0xd6, 0xd2, 0x15, 0x6b, 0x53, 0x2d, 0xdd, 0x9d, 0x5c, 0x6c, 0xa5, 0xa8, 0xec, 0xaa, 0x7c, 0xdd, + 0x0a, 0xae, 0xb5, 0xb4, 0xee, 0xcb, 0xcd, 0x42, 0x96, 0xdc, 0xad, 0x47, 0x66, 0x2b, 0xcd, 0x76, + 0xdc, 0xea, 0x95, 0x56, 0xac, 0xb4, 0x5c, 0x48, 0xbc, 0xb1, 0xa6, 0x34, 0xe8, 0xb2, 0x45, 0x70, + 0x83, 0xe0, 0x7d, 0x04, 0xb7, 0xc8, 0xf0, 0x03, 0xc0, 0x1e, 0x15, 0x5c, 0x67, 0x95, 0x9e, 0xb6, + 0x4c, 0x5e, 0x2b, 0x28, 0x87, 0x1d, 0x61, 0x96, 0xb2, 0x0f, 0x2e, 0xc0, 0x55, 0xf7, 0xf6, 0x01, + 0x1f, 0xc7, 0xe1, 0x03, 0x14, 0x26, 0x66, 0x29, 0xb3, 0x46, 0x1b, 0xbe, 0x01, 0xd8, 0xa9, 0x47, + 0x74, 0x02, 0x1d, 0x92, 0x8c, 0x02, 0x56, 0xc4, 0x34, 0x0d, 0x48, 0x14, 0x46, 0xc1, 0xc8, 0xf9, + 0x83, 0xce, 0xe1, 0x20, 0x8a, 0x69, 0x11, 0x86, 0x11, 0x89, 0x82, 0x38, 0x67, 0x24, 0xf3, 0xa6, + 0x13, 0x96, 0x05, 0xb4, 0x98, 0xe4, 0xd4, 0x01, 0x68, 0x00, 0x4f, 0xf3, 0x24, 0x61, 0x8f, 0x5e, + 0x3c, 0xfb, 0xd1, 0xfe, 0xa2, 0x33, 0xd8, 0xfb, 0x6e, 0x61, 0x31, 0x9f, 0xb3, 0xdc, 0xa3, 0x63, + 0xea, 0xfc, 0x43, 0x08, 0x76, 0xfd, 0x49, 0x42, 0xc6, 0xc1, 0x88, 0xf9, 0x33, 0x16, 0x79, 0xa9, + 0xd3, 0xf1, 0xdf, 0x01, 0xbc, 0x16, 0xe6, 0xf9, 0xc8, 0xad, 0xfc, 0xfe, 0x81, 0xb5, 0xd2, 0xfa, + 0xcc, 0x29, 0x98, 0x3f, 0x7d, 0x19, 0xca, 0xac, 0xb9, 0x56, 0xd8, 0x58, 0xe5, 0x2a, 0xa9, 0x9b, + 0x27, 0xb8, 0x6d, 0xe2, 0x9b, 0xd5, 0xf6, 0xb7, 0x67, 0xde, 0xef, 0x97, 0xc5, 0xff, 0x06, 0xb9, + 0xfb, 0x0c, 0x00, 0x00, 0xff, 0xff, 0x13, 0x9b, 0x58, 0x02, 0x10, 0x02, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/cloud/websecurityscanner/v1beta/web_security_scanner.pb.go b/vendor/google.golang.org/genproto/googleapis/cloud/websecurityscanner/v1beta/web_security_scanner.pb.go new file mode 100644 index 0000000..38f7e23 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/cloud/websecurityscanner/v1beta/web_security_scanner.pb.go @@ -0,0 +1,1537 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/cloud/websecurityscanner/v1beta/web_security_scanner.proto + +package websecurityscanner // import "google.golang.org/genproto/googleapis/cloud/websecurityscanner/v1beta" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import empty "github.com/golang/protobuf/ptypes/empty" +import _ "google.golang.org/genproto/googleapis/api/annotations" +import field_mask "google.golang.org/genproto/protobuf/field_mask" + +import ( + context "golang.org/x/net/context" + grpc "google.golang.org/grpc" +) + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Request for the `CreateScanConfig` method. +type CreateScanConfigRequest struct { + // Required. + // The parent resource name where the scan is created, which should be a + // project resource name in the format 'projects/{projectId}'. + Parent string `protobuf:"bytes,1,opt,name=parent,proto3" json:"parent,omitempty"` + // Required. + // The ScanConfig to be created. + ScanConfig *ScanConfig `protobuf:"bytes,2,opt,name=scan_config,json=scanConfig,proto3" json:"scan_config,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *CreateScanConfigRequest) Reset() { *m = CreateScanConfigRequest{} } +func (m *CreateScanConfigRequest) String() string { return proto.CompactTextString(m) } +func (*CreateScanConfigRequest) ProtoMessage() {} +func (*CreateScanConfigRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_web_security_scanner_ccd98c4097e38a3b, []int{0} +} +func (m *CreateScanConfigRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_CreateScanConfigRequest.Unmarshal(m, b) +} +func (m *CreateScanConfigRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_CreateScanConfigRequest.Marshal(b, m, deterministic) +} +func (dst *CreateScanConfigRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_CreateScanConfigRequest.Merge(dst, src) +} +func (m *CreateScanConfigRequest) XXX_Size() int { + return xxx_messageInfo_CreateScanConfigRequest.Size(m) +} +func (m *CreateScanConfigRequest) XXX_DiscardUnknown() { + xxx_messageInfo_CreateScanConfigRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_CreateScanConfigRequest proto.InternalMessageInfo + +func (m *CreateScanConfigRequest) GetParent() string { + if m != nil { + return m.Parent + } + return "" +} + +func (m *CreateScanConfigRequest) GetScanConfig() *ScanConfig { + if m != nil { + return m.ScanConfig + } + return nil +} + +// Request for the `DeleteScanConfig` method. +type DeleteScanConfigRequest struct { + // Required. + // The resource name of the ScanConfig to be deleted. The name follows the + // format of 'projects/{projectId}/scanConfigs/{scanConfigId}'. + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *DeleteScanConfigRequest) Reset() { *m = DeleteScanConfigRequest{} } +func (m *DeleteScanConfigRequest) String() string { return proto.CompactTextString(m) } +func (*DeleteScanConfigRequest) ProtoMessage() {} +func (*DeleteScanConfigRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_web_security_scanner_ccd98c4097e38a3b, []int{1} +} +func (m *DeleteScanConfigRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_DeleteScanConfigRequest.Unmarshal(m, b) +} +func (m *DeleteScanConfigRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_DeleteScanConfigRequest.Marshal(b, m, deterministic) +} +func (dst *DeleteScanConfigRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_DeleteScanConfigRequest.Merge(dst, src) +} +func (m *DeleteScanConfigRequest) XXX_Size() int { + return xxx_messageInfo_DeleteScanConfigRequest.Size(m) +} +func (m *DeleteScanConfigRequest) XXX_DiscardUnknown() { + xxx_messageInfo_DeleteScanConfigRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_DeleteScanConfigRequest proto.InternalMessageInfo + +func (m *DeleteScanConfigRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +// Request for the `GetScanConfig` method. +type GetScanConfigRequest struct { + // Required. + // The resource name of the ScanConfig to be returned. The name follows the + // format of 'projects/{projectId}/scanConfigs/{scanConfigId}'. + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GetScanConfigRequest) Reset() { *m = GetScanConfigRequest{} } +func (m *GetScanConfigRequest) String() string { return proto.CompactTextString(m) } +func (*GetScanConfigRequest) ProtoMessage() {} +func (*GetScanConfigRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_web_security_scanner_ccd98c4097e38a3b, []int{2} +} +func (m *GetScanConfigRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GetScanConfigRequest.Unmarshal(m, b) +} +func (m *GetScanConfigRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GetScanConfigRequest.Marshal(b, m, deterministic) +} +func (dst *GetScanConfigRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_GetScanConfigRequest.Merge(dst, src) +} +func (m *GetScanConfigRequest) XXX_Size() int { + return xxx_messageInfo_GetScanConfigRequest.Size(m) +} +func (m *GetScanConfigRequest) XXX_DiscardUnknown() { + xxx_messageInfo_GetScanConfigRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_GetScanConfigRequest proto.InternalMessageInfo + +func (m *GetScanConfigRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +// Request for the `ListScanConfigs` method. +type ListScanConfigsRequest struct { + // Required. + // The parent resource name, which should be a project resource name in the + // format 'projects/{projectId}'. + Parent string `protobuf:"bytes,1,opt,name=parent,proto3" json:"parent,omitempty"` + // A token identifying a page of results to be returned. This should be a + // `next_page_token` value returned from a previous List request. + // If unspecified, the first page of results is returned. + PageToken string `protobuf:"bytes,2,opt,name=page_token,json=pageToken,proto3" json:"page_token,omitempty"` + // The maximum number of ScanConfigs to return, can be limited by server. + // If not specified or not positive, the implementation will select a + // reasonable value. + PageSize int32 `protobuf:"varint,3,opt,name=page_size,json=pageSize,proto3" json:"page_size,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ListScanConfigsRequest) Reset() { *m = ListScanConfigsRequest{} } +func (m *ListScanConfigsRequest) String() string { return proto.CompactTextString(m) } +func (*ListScanConfigsRequest) ProtoMessage() {} +func (*ListScanConfigsRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_web_security_scanner_ccd98c4097e38a3b, []int{3} +} +func (m *ListScanConfigsRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ListScanConfigsRequest.Unmarshal(m, b) +} +func (m *ListScanConfigsRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ListScanConfigsRequest.Marshal(b, m, deterministic) +} +func (dst *ListScanConfigsRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_ListScanConfigsRequest.Merge(dst, src) +} +func (m *ListScanConfigsRequest) XXX_Size() int { + return xxx_messageInfo_ListScanConfigsRequest.Size(m) +} +func (m *ListScanConfigsRequest) XXX_DiscardUnknown() { + xxx_messageInfo_ListScanConfigsRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_ListScanConfigsRequest proto.InternalMessageInfo + +func (m *ListScanConfigsRequest) GetParent() string { + if m != nil { + return m.Parent + } + return "" +} + +func (m *ListScanConfigsRequest) GetPageToken() string { + if m != nil { + return m.PageToken + } + return "" +} + +func (m *ListScanConfigsRequest) GetPageSize() int32 { + if m != nil { + return m.PageSize + } + return 0 +} + +// Request for the `UpdateScanConfigRequest` method. +type UpdateScanConfigRequest struct { + // Required. + // The ScanConfig to be updated. The name field must be set to identify the + // resource to be updated. The values of fields not covered by the mask + // will be ignored. + ScanConfig *ScanConfig `protobuf:"bytes,2,opt,name=scan_config,json=scanConfig,proto3" json:"scan_config,omitempty"` + // Required. + // The update mask applies to the resource. For the `FieldMask` definition, + // see + // https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#fieldmask + UpdateMask *field_mask.FieldMask `protobuf:"bytes,3,opt,name=update_mask,json=updateMask,proto3" json:"update_mask,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *UpdateScanConfigRequest) Reset() { *m = UpdateScanConfigRequest{} } +func (m *UpdateScanConfigRequest) String() string { return proto.CompactTextString(m) } +func (*UpdateScanConfigRequest) ProtoMessage() {} +func (*UpdateScanConfigRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_web_security_scanner_ccd98c4097e38a3b, []int{4} +} +func (m *UpdateScanConfigRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_UpdateScanConfigRequest.Unmarshal(m, b) +} +func (m *UpdateScanConfigRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_UpdateScanConfigRequest.Marshal(b, m, deterministic) +} +func (dst *UpdateScanConfigRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_UpdateScanConfigRequest.Merge(dst, src) +} +func (m *UpdateScanConfigRequest) XXX_Size() int { + return xxx_messageInfo_UpdateScanConfigRequest.Size(m) +} +func (m *UpdateScanConfigRequest) XXX_DiscardUnknown() { + xxx_messageInfo_UpdateScanConfigRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_UpdateScanConfigRequest proto.InternalMessageInfo + +func (m *UpdateScanConfigRequest) GetScanConfig() *ScanConfig { + if m != nil { + return m.ScanConfig + } + return nil +} + +func (m *UpdateScanConfigRequest) GetUpdateMask() *field_mask.FieldMask { + if m != nil { + return m.UpdateMask + } + return nil +} + +// Response for the `ListScanConfigs` method. +type ListScanConfigsResponse struct { + // The list of ScanConfigs returned. + ScanConfigs []*ScanConfig `protobuf:"bytes,1,rep,name=scan_configs,json=scanConfigs,proto3" json:"scan_configs,omitempty"` + // Token to retrieve the next page of results, or empty if there are no + // more results in the list. + NextPageToken string `protobuf:"bytes,2,opt,name=next_page_token,json=nextPageToken,proto3" json:"next_page_token,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ListScanConfigsResponse) Reset() { *m = ListScanConfigsResponse{} } +func (m *ListScanConfigsResponse) String() string { return proto.CompactTextString(m) } +func (*ListScanConfigsResponse) ProtoMessage() {} +func (*ListScanConfigsResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_web_security_scanner_ccd98c4097e38a3b, []int{5} +} +func (m *ListScanConfigsResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ListScanConfigsResponse.Unmarshal(m, b) +} +func (m *ListScanConfigsResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ListScanConfigsResponse.Marshal(b, m, deterministic) +} +func (dst *ListScanConfigsResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_ListScanConfigsResponse.Merge(dst, src) +} +func (m *ListScanConfigsResponse) XXX_Size() int { + return xxx_messageInfo_ListScanConfigsResponse.Size(m) +} +func (m *ListScanConfigsResponse) XXX_DiscardUnknown() { + xxx_messageInfo_ListScanConfigsResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_ListScanConfigsResponse proto.InternalMessageInfo + +func (m *ListScanConfigsResponse) GetScanConfigs() []*ScanConfig { + if m != nil { + return m.ScanConfigs + } + return nil +} + +func (m *ListScanConfigsResponse) GetNextPageToken() string { + if m != nil { + return m.NextPageToken + } + return "" +} + +// Request for the `StartScanRun` method. +type StartScanRunRequest struct { + // Required. + // The resource name of the ScanConfig to be used. The name follows the + // format of 'projects/{projectId}/scanConfigs/{scanConfigId}'. + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *StartScanRunRequest) Reset() { *m = StartScanRunRequest{} } +func (m *StartScanRunRequest) String() string { return proto.CompactTextString(m) } +func (*StartScanRunRequest) ProtoMessage() {} +func (*StartScanRunRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_web_security_scanner_ccd98c4097e38a3b, []int{6} +} +func (m *StartScanRunRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_StartScanRunRequest.Unmarshal(m, b) +} +func (m *StartScanRunRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_StartScanRunRequest.Marshal(b, m, deterministic) +} +func (dst *StartScanRunRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_StartScanRunRequest.Merge(dst, src) +} +func (m *StartScanRunRequest) XXX_Size() int { + return xxx_messageInfo_StartScanRunRequest.Size(m) +} +func (m *StartScanRunRequest) XXX_DiscardUnknown() { + xxx_messageInfo_StartScanRunRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_StartScanRunRequest proto.InternalMessageInfo + +func (m *StartScanRunRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +// Request for the `GetScanRun` method. +type GetScanRunRequest struct { + // Required. + // The resource name of the ScanRun to be returned. The name follows the + // format of + // 'projects/{projectId}/scanConfigs/{scanConfigId}/scanRuns/{scanRunId}'. + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GetScanRunRequest) Reset() { *m = GetScanRunRequest{} } +func (m *GetScanRunRequest) String() string { return proto.CompactTextString(m) } +func (*GetScanRunRequest) ProtoMessage() {} +func (*GetScanRunRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_web_security_scanner_ccd98c4097e38a3b, []int{7} +} +func (m *GetScanRunRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GetScanRunRequest.Unmarshal(m, b) +} +func (m *GetScanRunRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GetScanRunRequest.Marshal(b, m, deterministic) +} +func (dst *GetScanRunRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_GetScanRunRequest.Merge(dst, src) +} +func (m *GetScanRunRequest) XXX_Size() int { + return xxx_messageInfo_GetScanRunRequest.Size(m) +} +func (m *GetScanRunRequest) XXX_DiscardUnknown() { + xxx_messageInfo_GetScanRunRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_GetScanRunRequest proto.InternalMessageInfo + +func (m *GetScanRunRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +// Request for the `ListScanRuns` method. +type ListScanRunsRequest struct { + // Required. + // The parent resource name, which should be a scan resource name in the + // format 'projects/{projectId}/scanConfigs/{scanConfigId}'. + Parent string `protobuf:"bytes,1,opt,name=parent,proto3" json:"parent,omitempty"` + // A token identifying a page of results to be returned. This should be a + // `next_page_token` value returned from a previous List request. + // If unspecified, the first page of results is returned. + PageToken string `protobuf:"bytes,2,opt,name=page_token,json=pageToken,proto3" json:"page_token,omitempty"` + // The maximum number of ScanRuns to return, can be limited by server. + // If not specified or not positive, the implementation will select a + // reasonable value. + PageSize int32 `protobuf:"varint,3,opt,name=page_size,json=pageSize,proto3" json:"page_size,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ListScanRunsRequest) Reset() { *m = ListScanRunsRequest{} } +func (m *ListScanRunsRequest) String() string { return proto.CompactTextString(m) } +func (*ListScanRunsRequest) ProtoMessage() {} +func (*ListScanRunsRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_web_security_scanner_ccd98c4097e38a3b, []int{8} +} +func (m *ListScanRunsRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ListScanRunsRequest.Unmarshal(m, b) +} +func (m *ListScanRunsRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ListScanRunsRequest.Marshal(b, m, deterministic) +} +func (dst *ListScanRunsRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_ListScanRunsRequest.Merge(dst, src) +} +func (m *ListScanRunsRequest) XXX_Size() int { + return xxx_messageInfo_ListScanRunsRequest.Size(m) +} +func (m *ListScanRunsRequest) XXX_DiscardUnknown() { + xxx_messageInfo_ListScanRunsRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_ListScanRunsRequest proto.InternalMessageInfo + +func (m *ListScanRunsRequest) GetParent() string { + if m != nil { + return m.Parent + } + return "" +} + +func (m *ListScanRunsRequest) GetPageToken() string { + if m != nil { + return m.PageToken + } + return "" +} + +func (m *ListScanRunsRequest) GetPageSize() int32 { + if m != nil { + return m.PageSize + } + return 0 +} + +// Response for the `ListScanRuns` method. +type ListScanRunsResponse struct { + // The list of ScanRuns returned. + ScanRuns []*ScanRun `protobuf:"bytes,1,rep,name=scan_runs,json=scanRuns,proto3" json:"scan_runs,omitempty"` + // Token to retrieve the next page of results, or empty if there are no + // more results in the list. + NextPageToken string `protobuf:"bytes,2,opt,name=next_page_token,json=nextPageToken,proto3" json:"next_page_token,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ListScanRunsResponse) Reset() { *m = ListScanRunsResponse{} } +func (m *ListScanRunsResponse) String() string { return proto.CompactTextString(m) } +func (*ListScanRunsResponse) ProtoMessage() {} +func (*ListScanRunsResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_web_security_scanner_ccd98c4097e38a3b, []int{9} +} +func (m *ListScanRunsResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ListScanRunsResponse.Unmarshal(m, b) +} +func (m *ListScanRunsResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ListScanRunsResponse.Marshal(b, m, deterministic) +} +func (dst *ListScanRunsResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_ListScanRunsResponse.Merge(dst, src) +} +func (m *ListScanRunsResponse) XXX_Size() int { + return xxx_messageInfo_ListScanRunsResponse.Size(m) +} +func (m *ListScanRunsResponse) XXX_DiscardUnknown() { + xxx_messageInfo_ListScanRunsResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_ListScanRunsResponse proto.InternalMessageInfo + +func (m *ListScanRunsResponse) GetScanRuns() []*ScanRun { + if m != nil { + return m.ScanRuns + } + return nil +} + +func (m *ListScanRunsResponse) GetNextPageToken() string { + if m != nil { + return m.NextPageToken + } + return "" +} + +// Request for the `StopScanRun` method. +type StopScanRunRequest struct { + // Required. + // The resource name of the ScanRun to be stopped. The name follows the + // format of + // 'projects/{projectId}/scanConfigs/{scanConfigId}/scanRuns/{scanRunId}'. + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *StopScanRunRequest) Reset() { *m = StopScanRunRequest{} } +func (m *StopScanRunRequest) String() string { return proto.CompactTextString(m) } +func (*StopScanRunRequest) ProtoMessage() {} +func (*StopScanRunRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_web_security_scanner_ccd98c4097e38a3b, []int{10} +} +func (m *StopScanRunRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_StopScanRunRequest.Unmarshal(m, b) +} +func (m *StopScanRunRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_StopScanRunRequest.Marshal(b, m, deterministic) +} +func (dst *StopScanRunRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_StopScanRunRequest.Merge(dst, src) +} +func (m *StopScanRunRequest) XXX_Size() int { + return xxx_messageInfo_StopScanRunRequest.Size(m) +} +func (m *StopScanRunRequest) XXX_DiscardUnknown() { + xxx_messageInfo_StopScanRunRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_StopScanRunRequest proto.InternalMessageInfo + +func (m *StopScanRunRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +// Request for the `ListCrawledUrls` method. +type ListCrawledUrlsRequest struct { + // Required. + // The parent resource name, which should be a scan run resource name in the + // format + // 'projects/{projectId}/scanConfigs/{scanConfigId}/scanRuns/{scanRunId}'. + Parent string `protobuf:"bytes,1,opt,name=parent,proto3" json:"parent,omitempty"` + // A token identifying a page of results to be returned. This should be a + // `next_page_token` value returned from a previous List request. + // If unspecified, the first page of results is returned. + PageToken string `protobuf:"bytes,2,opt,name=page_token,json=pageToken,proto3" json:"page_token,omitempty"` + // The maximum number of CrawledUrls to return, can be limited by server. + // If not specified or not positive, the implementation will select a + // reasonable value. + PageSize int32 `protobuf:"varint,3,opt,name=page_size,json=pageSize,proto3" json:"page_size,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ListCrawledUrlsRequest) Reset() { *m = ListCrawledUrlsRequest{} } +func (m *ListCrawledUrlsRequest) String() string { return proto.CompactTextString(m) } +func (*ListCrawledUrlsRequest) ProtoMessage() {} +func (*ListCrawledUrlsRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_web_security_scanner_ccd98c4097e38a3b, []int{11} +} +func (m *ListCrawledUrlsRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ListCrawledUrlsRequest.Unmarshal(m, b) +} +func (m *ListCrawledUrlsRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ListCrawledUrlsRequest.Marshal(b, m, deterministic) +} +func (dst *ListCrawledUrlsRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_ListCrawledUrlsRequest.Merge(dst, src) +} +func (m *ListCrawledUrlsRequest) XXX_Size() int { + return xxx_messageInfo_ListCrawledUrlsRequest.Size(m) +} +func (m *ListCrawledUrlsRequest) XXX_DiscardUnknown() { + xxx_messageInfo_ListCrawledUrlsRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_ListCrawledUrlsRequest proto.InternalMessageInfo + +func (m *ListCrawledUrlsRequest) GetParent() string { + if m != nil { + return m.Parent + } + return "" +} + +func (m *ListCrawledUrlsRequest) GetPageToken() string { + if m != nil { + return m.PageToken + } + return "" +} + +func (m *ListCrawledUrlsRequest) GetPageSize() int32 { + if m != nil { + return m.PageSize + } + return 0 +} + +// Response for the `ListCrawledUrls` method. +type ListCrawledUrlsResponse struct { + // The list of CrawledUrls returned. + CrawledUrls []*CrawledUrl `protobuf:"bytes,1,rep,name=crawled_urls,json=crawledUrls,proto3" json:"crawled_urls,omitempty"` + // Token to retrieve the next page of results, or empty if there are no + // more results in the list. + NextPageToken string `protobuf:"bytes,2,opt,name=next_page_token,json=nextPageToken,proto3" json:"next_page_token,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ListCrawledUrlsResponse) Reset() { *m = ListCrawledUrlsResponse{} } +func (m *ListCrawledUrlsResponse) String() string { return proto.CompactTextString(m) } +func (*ListCrawledUrlsResponse) ProtoMessage() {} +func (*ListCrawledUrlsResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_web_security_scanner_ccd98c4097e38a3b, []int{12} +} +func (m *ListCrawledUrlsResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ListCrawledUrlsResponse.Unmarshal(m, b) +} +func (m *ListCrawledUrlsResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ListCrawledUrlsResponse.Marshal(b, m, deterministic) +} +func (dst *ListCrawledUrlsResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_ListCrawledUrlsResponse.Merge(dst, src) +} +func (m *ListCrawledUrlsResponse) XXX_Size() int { + return xxx_messageInfo_ListCrawledUrlsResponse.Size(m) +} +func (m *ListCrawledUrlsResponse) XXX_DiscardUnknown() { + xxx_messageInfo_ListCrawledUrlsResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_ListCrawledUrlsResponse proto.InternalMessageInfo + +func (m *ListCrawledUrlsResponse) GetCrawledUrls() []*CrawledUrl { + if m != nil { + return m.CrawledUrls + } + return nil +} + +func (m *ListCrawledUrlsResponse) GetNextPageToken() string { + if m != nil { + return m.NextPageToken + } + return "" +} + +// Request for the `GetFinding` method. +type GetFindingRequest struct { + // Required. + // The resource name of the Finding to be returned. The name follows the + // format of + // 'projects/{projectId}/scanConfigs/{scanConfigId}/scanRuns/{scanRunId}/findings/{findingId}'. + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GetFindingRequest) Reset() { *m = GetFindingRequest{} } +func (m *GetFindingRequest) String() string { return proto.CompactTextString(m) } +func (*GetFindingRequest) ProtoMessage() {} +func (*GetFindingRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_web_security_scanner_ccd98c4097e38a3b, []int{13} +} +func (m *GetFindingRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GetFindingRequest.Unmarshal(m, b) +} +func (m *GetFindingRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GetFindingRequest.Marshal(b, m, deterministic) +} +func (dst *GetFindingRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_GetFindingRequest.Merge(dst, src) +} +func (m *GetFindingRequest) XXX_Size() int { + return xxx_messageInfo_GetFindingRequest.Size(m) +} +func (m *GetFindingRequest) XXX_DiscardUnknown() { + xxx_messageInfo_GetFindingRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_GetFindingRequest proto.InternalMessageInfo + +func (m *GetFindingRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +// Request for the `ListFindings` method. +type ListFindingsRequest struct { + // Required. + // The parent resource name, which should be a scan run resource name in the + // format + // 'projects/{projectId}/scanConfigs/{scanConfigId}/scanRuns/{scanRunId}'. + Parent string `protobuf:"bytes,1,opt,name=parent,proto3" json:"parent,omitempty"` + // The filter expression. The expression must be in the format: + // . + // Supported field: 'finding_type'. + // Supported operator: '='. + Filter string `protobuf:"bytes,2,opt,name=filter,proto3" json:"filter,omitempty"` + // A token identifying a page of results to be returned. This should be a + // `next_page_token` value returned from a previous List request. + // If unspecified, the first page of results is returned. + PageToken string `protobuf:"bytes,3,opt,name=page_token,json=pageToken,proto3" json:"page_token,omitempty"` + // The maximum number of Findings to return, can be limited by server. + // If not specified or not positive, the implementation will select a + // reasonable value. + PageSize int32 `protobuf:"varint,4,opt,name=page_size,json=pageSize,proto3" json:"page_size,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ListFindingsRequest) Reset() { *m = ListFindingsRequest{} } +func (m *ListFindingsRequest) String() string { return proto.CompactTextString(m) } +func (*ListFindingsRequest) ProtoMessage() {} +func (*ListFindingsRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_web_security_scanner_ccd98c4097e38a3b, []int{14} +} +func (m *ListFindingsRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ListFindingsRequest.Unmarshal(m, b) +} +func (m *ListFindingsRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ListFindingsRequest.Marshal(b, m, deterministic) +} +func (dst *ListFindingsRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_ListFindingsRequest.Merge(dst, src) +} +func (m *ListFindingsRequest) XXX_Size() int { + return xxx_messageInfo_ListFindingsRequest.Size(m) +} +func (m *ListFindingsRequest) XXX_DiscardUnknown() { + xxx_messageInfo_ListFindingsRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_ListFindingsRequest proto.InternalMessageInfo + +func (m *ListFindingsRequest) GetParent() string { + if m != nil { + return m.Parent + } + return "" +} + +func (m *ListFindingsRequest) GetFilter() string { + if m != nil { + return m.Filter + } + return "" +} + +func (m *ListFindingsRequest) GetPageToken() string { + if m != nil { + return m.PageToken + } + return "" +} + +func (m *ListFindingsRequest) GetPageSize() int32 { + if m != nil { + return m.PageSize + } + return 0 +} + +// Response for the `ListFindings` method. +type ListFindingsResponse struct { + // The list of Findings returned. + Findings []*Finding `protobuf:"bytes,1,rep,name=findings,proto3" json:"findings,omitempty"` + // Token to retrieve the next page of results, or empty if there are no + // more results in the list. + NextPageToken string `protobuf:"bytes,2,opt,name=next_page_token,json=nextPageToken,proto3" json:"next_page_token,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ListFindingsResponse) Reset() { *m = ListFindingsResponse{} } +func (m *ListFindingsResponse) String() string { return proto.CompactTextString(m) } +func (*ListFindingsResponse) ProtoMessage() {} +func (*ListFindingsResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_web_security_scanner_ccd98c4097e38a3b, []int{15} +} +func (m *ListFindingsResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ListFindingsResponse.Unmarshal(m, b) +} +func (m *ListFindingsResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ListFindingsResponse.Marshal(b, m, deterministic) +} +func (dst *ListFindingsResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_ListFindingsResponse.Merge(dst, src) +} +func (m *ListFindingsResponse) XXX_Size() int { + return xxx_messageInfo_ListFindingsResponse.Size(m) +} +func (m *ListFindingsResponse) XXX_DiscardUnknown() { + xxx_messageInfo_ListFindingsResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_ListFindingsResponse proto.InternalMessageInfo + +func (m *ListFindingsResponse) GetFindings() []*Finding { + if m != nil { + return m.Findings + } + return nil +} + +func (m *ListFindingsResponse) GetNextPageToken() string { + if m != nil { + return m.NextPageToken + } + return "" +} + +// Request for the `ListFindingTypeStats` method. +type ListFindingTypeStatsRequest struct { + // Required. + // The parent resource name, which should be a scan run resource name in the + // format + // 'projects/{projectId}/scanConfigs/{scanConfigId}/scanRuns/{scanRunId}'. + Parent string `protobuf:"bytes,1,opt,name=parent,proto3" json:"parent,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ListFindingTypeStatsRequest) Reset() { *m = ListFindingTypeStatsRequest{} } +func (m *ListFindingTypeStatsRequest) String() string { return proto.CompactTextString(m) } +func (*ListFindingTypeStatsRequest) ProtoMessage() {} +func (*ListFindingTypeStatsRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_web_security_scanner_ccd98c4097e38a3b, []int{16} +} +func (m *ListFindingTypeStatsRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ListFindingTypeStatsRequest.Unmarshal(m, b) +} +func (m *ListFindingTypeStatsRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ListFindingTypeStatsRequest.Marshal(b, m, deterministic) +} +func (dst *ListFindingTypeStatsRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_ListFindingTypeStatsRequest.Merge(dst, src) +} +func (m *ListFindingTypeStatsRequest) XXX_Size() int { + return xxx_messageInfo_ListFindingTypeStatsRequest.Size(m) +} +func (m *ListFindingTypeStatsRequest) XXX_DiscardUnknown() { + xxx_messageInfo_ListFindingTypeStatsRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_ListFindingTypeStatsRequest proto.InternalMessageInfo + +func (m *ListFindingTypeStatsRequest) GetParent() string { + if m != nil { + return m.Parent + } + return "" +} + +// Response for the `ListFindingTypeStats` method. +type ListFindingTypeStatsResponse struct { + // The list of FindingTypeStats returned. + FindingTypeStats []*FindingTypeStats `protobuf:"bytes,1,rep,name=finding_type_stats,json=findingTypeStats,proto3" json:"finding_type_stats,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ListFindingTypeStatsResponse) Reset() { *m = ListFindingTypeStatsResponse{} } +func (m *ListFindingTypeStatsResponse) String() string { return proto.CompactTextString(m) } +func (*ListFindingTypeStatsResponse) ProtoMessage() {} +func (*ListFindingTypeStatsResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_web_security_scanner_ccd98c4097e38a3b, []int{17} +} +func (m *ListFindingTypeStatsResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ListFindingTypeStatsResponse.Unmarshal(m, b) +} +func (m *ListFindingTypeStatsResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ListFindingTypeStatsResponse.Marshal(b, m, deterministic) +} +func (dst *ListFindingTypeStatsResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_ListFindingTypeStatsResponse.Merge(dst, src) +} +func (m *ListFindingTypeStatsResponse) XXX_Size() int { + return xxx_messageInfo_ListFindingTypeStatsResponse.Size(m) +} +func (m *ListFindingTypeStatsResponse) XXX_DiscardUnknown() { + xxx_messageInfo_ListFindingTypeStatsResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_ListFindingTypeStatsResponse proto.InternalMessageInfo + +func (m *ListFindingTypeStatsResponse) GetFindingTypeStats() []*FindingTypeStats { + if m != nil { + return m.FindingTypeStats + } + return nil +} + +func init() { + proto.RegisterType((*CreateScanConfigRequest)(nil), "google.cloud.websecurityscanner.v1beta.CreateScanConfigRequest") + proto.RegisterType((*DeleteScanConfigRequest)(nil), "google.cloud.websecurityscanner.v1beta.DeleteScanConfigRequest") + proto.RegisterType((*GetScanConfigRequest)(nil), "google.cloud.websecurityscanner.v1beta.GetScanConfigRequest") + proto.RegisterType((*ListScanConfigsRequest)(nil), "google.cloud.websecurityscanner.v1beta.ListScanConfigsRequest") + proto.RegisterType((*UpdateScanConfigRequest)(nil), "google.cloud.websecurityscanner.v1beta.UpdateScanConfigRequest") + proto.RegisterType((*ListScanConfigsResponse)(nil), "google.cloud.websecurityscanner.v1beta.ListScanConfigsResponse") + proto.RegisterType((*StartScanRunRequest)(nil), "google.cloud.websecurityscanner.v1beta.StartScanRunRequest") + proto.RegisterType((*GetScanRunRequest)(nil), "google.cloud.websecurityscanner.v1beta.GetScanRunRequest") + proto.RegisterType((*ListScanRunsRequest)(nil), "google.cloud.websecurityscanner.v1beta.ListScanRunsRequest") + proto.RegisterType((*ListScanRunsResponse)(nil), "google.cloud.websecurityscanner.v1beta.ListScanRunsResponse") + proto.RegisterType((*StopScanRunRequest)(nil), "google.cloud.websecurityscanner.v1beta.StopScanRunRequest") + proto.RegisterType((*ListCrawledUrlsRequest)(nil), "google.cloud.websecurityscanner.v1beta.ListCrawledUrlsRequest") + proto.RegisterType((*ListCrawledUrlsResponse)(nil), "google.cloud.websecurityscanner.v1beta.ListCrawledUrlsResponse") + proto.RegisterType((*GetFindingRequest)(nil), "google.cloud.websecurityscanner.v1beta.GetFindingRequest") + proto.RegisterType((*ListFindingsRequest)(nil), "google.cloud.websecurityscanner.v1beta.ListFindingsRequest") + proto.RegisterType((*ListFindingsResponse)(nil), "google.cloud.websecurityscanner.v1beta.ListFindingsResponse") + proto.RegisterType((*ListFindingTypeStatsRequest)(nil), "google.cloud.websecurityscanner.v1beta.ListFindingTypeStatsRequest") + proto.RegisterType((*ListFindingTypeStatsResponse)(nil), "google.cloud.websecurityscanner.v1beta.ListFindingTypeStatsResponse") +} + +// Reference imports to suppress errors if they are not otherwise used. +var _ context.Context +var _ grpc.ClientConn + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the grpc package it is being compiled against. +const _ = grpc.SupportPackageIsVersion4 + +// WebSecurityScannerClient is the client API for WebSecurityScanner service. +// +// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://godoc.org/google.golang.org/grpc#ClientConn.NewStream. +type WebSecurityScannerClient interface { + // Creates a new ScanConfig. + CreateScanConfig(ctx context.Context, in *CreateScanConfigRequest, opts ...grpc.CallOption) (*ScanConfig, error) + // Deletes an existing ScanConfig and its child resources. + DeleteScanConfig(ctx context.Context, in *DeleteScanConfigRequest, opts ...grpc.CallOption) (*empty.Empty, error) + // Gets a ScanConfig. + GetScanConfig(ctx context.Context, in *GetScanConfigRequest, opts ...grpc.CallOption) (*ScanConfig, error) + // Lists ScanConfigs under a given project. + ListScanConfigs(ctx context.Context, in *ListScanConfigsRequest, opts ...grpc.CallOption) (*ListScanConfigsResponse, error) + // Updates a ScanConfig. This method support partial update of a ScanConfig. + UpdateScanConfig(ctx context.Context, in *UpdateScanConfigRequest, opts ...grpc.CallOption) (*ScanConfig, error) + // Start a ScanRun according to the given ScanConfig. + StartScanRun(ctx context.Context, in *StartScanRunRequest, opts ...grpc.CallOption) (*ScanRun, error) + // Gets a ScanRun. + GetScanRun(ctx context.Context, in *GetScanRunRequest, opts ...grpc.CallOption) (*ScanRun, error) + // Lists ScanRuns under a given ScanConfig, in descending order of ScanRun + // stop time. + ListScanRuns(ctx context.Context, in *ListScanRunsRequest, opts ...grpc.CallOption) (*ListScanRunsResponse, error) + // Stops a ScanRun. The stopped ScanRun is returned. + StopScanRun(ctx context.Context, in *StopScanRunRequest, opts ...grpc.CallOption) (*ScanRun, error) + // List CrawledUrls under a given ScanRun. + ListCrawledUrls(ctx context.Context, in *ListCrawledUrlsRequest, opts ...grpc.CallOption) (*ListCrawledUrlsResponse, error) + // Gets a Finding. + GetFinding(ctx context.Context, in *GetFindingRequest, opts ...grpc.CallOption) (*Finding, error) + // List Findings under a given ScanRun. + ListFindings(ctx context.Context, in *ListFindingsRequest, opts ...grpc.CallOption) (*ListFindingsResponse, error) + // List all FindingTypeStats under a given ScanRun. + ListFindingTypeStats(ctx context.Context, in *ListFindingTypeStatsRequest, opts ...grpc.CallOption) (*ListFindingTypeStatsResponse, error) +} + +type webSecurityScannerClient struct { + cc *grpc.ClientConn +} + +func NewWebSecurityScannerClient(cc *grpc.ClientConn) WebSecurityScannerClient { + return &webSecurityScannerClient{cc} +} + +func (c *webSecurityScannerClient) CreateScanConfig(ctx context.Context, in *CreateScanConfigRequest, opts ...grpc.CallOption) (*ScanConfig, error) { + out := new(ScanConfig) + err := c.cc.Invoke(ctx, "/google.cloud.websecurityscanner.v1beta.WebSecurityScanner/CreateScanConfig", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *webSecurityScannerClient) DeleteScanConfig(ctx context.Context, in *DeleteScanConfigRequest, opts ...grpc.CallOption) (*empty.Empty, error) { + out := new(empty.Empty) + err := c.cc.Invoke(ctx, "/google.cloud.websecurityscanner.v1beta.WebSecurityScanner/DeleteScanConfig", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *webSecurityScannerClient) GetScanConfig(ctx context.Context, in *GetScanConfigRequest, opts ...grpc.CallOption) (*ScanConfig, error) { + out := new(ScanConfig) + err := c.cc.Invoke(ctx, "/google.cloud.websecurityscanner.v1beta.WebSecurityScanner/GetScanConfig", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *webSecurityScannerClient) ListScanConfigs(ctx context.Context, in *ListScanConfigsRequest, opts ...grpc.CallOption) (*ListScanConfigsResponse, error) { + out := new(ListScanConfigsResponse) + err := c.cc.Invoke(ctx, "/google.cloud.websecurityscanner.v1beta.WebSecurityScanner/ListScanConfigs", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *webSecurityScannerClient) UpdateScanConfig(ctx context.Context, in *UpdateScanConfigRequest, opts ...grpc.CallOption) (*ScanConfig, error) { + out := new(ScanConfig) + err := c.cc.Invoke(ctx, "/google.cloud.websecurityscanner.v1beta.WebSecurityScanner/UpdateScanConfig", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *webSecurityScannerClient) StartScanRun(ctx context.Context, in *StartScanRunRequest, opts ...grpc.CallOption) (*ScanRun, error) { + out := new(ScanRun) + err := c.cc.Invoke(ctx, "/google.cloud.websecurityscanner.v1beta.WebSecurityScanner/StartScanRun", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *webSecurityScannerClient) GetScanRun(ctx context.Context, in *GetScanRunRequest, opts ...grpc.CallOption) (*ScanRun, error) { + out := new(ScanRun) + err := c.cc.Invoke(ctx, "/google.cloud.websecurityscanner.v1beta.WebSecurityScanner/GetScanRun", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *webSecurityScannerClient) ListScanRuns(ctx context.Context, in *ListScanRunsRequest, opts ...grpc.CallOption) (*ListScanRunsResponse, error) { + out := new(ListScanRunsResponse) + err := c.cc.Invoke(ctx, "/google.cloud.websecurityscanner.v1beta.WebSecurityScanner/ListScanRuns", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *webSecurityScannerClient) StopScanRun(ctx context.Context, in *StopScanRunRequest, opts ...grpc.CallOption) (*ScanRun, error) { + out := new(ScanRun) + err := c.cc.Invoke(ctx, "/google.cloud.websecurityscanner.v1beta.WebSecurityScanner/StopScanRun", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *webSecurityScannerClient) ListCrawledUrls(ctx context.Context, in *ListCrawledUrlsRequest, opts ...grpc.CallOption) (*ListCrawledUrlsResponse, error) { + out := new(ListCrawledUrlsResponse) + err := c.cc.Invoke(ctx, "/google.cloud.websecurityscanner.v1beta.WebSecurityScanner/ListCrawledUrls", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *webSecurityScannerClient) GetFinding(ctx context.Context, in *GetFindingRequest, opts ...grpc.CallOption) (*Finding, error) { + out := new(Finding) + err := c.cc.Invoke(ctx, "/google.cloud.websecurityscanner.v1beta.WebSecurityScanner/GetFinding", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *webSecurityScannerClient) ListFindings(ctx context.Context, in *ListFindingsRequest, opts ...grpc.CallOption) (*ListFindingsResponse, error) { + out := new(ListFindingsResponse) + err := c.cc.Invoke(ctx, "/google.cloud.websecurityscanner.v1beta.WebSecurityScanner/ListFindings", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *webSecurityScannerClient) ListFindingTypeStats(ctx context.Context, in *ListFindingTypeStatsRequest, opts ...grpc.CallOption) (*ListFindingTypeStatsResponse, error) { + out := new(ListFindingTypeStatsResponse) + err := c.cc.Invoke(ctx, "/google.cloud.websecurityscanner.v1beta.WebSecurityScanner/ListFindingTypeStats", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +// WebSecurityScannerServer is the server API for WebSecurityScanner service. +type WebSecurityScannerServer interface { + // Creates a new ScanConfig. + CreateScanConfig(context.Context, *CreateScanConfigRequest) (*ScanConfig, error) + // Deletes an existing ScanConfig and its child resources. + DeleteScanConfig(context.Context, *DeleteScanConfigRequest) (*empty.Empty, error) + // Gets a ScanConfig. + GetScanConfig(context.Context, *GetScanConfigRequest) (*ScanConfig, error) + // Lists ScanConfigs under a given project. + ListScanConfigs(context.Context, *ListScanConfigsRequest) (*ListScanConfigsResponse, error) + // Updates a ScanConfig. This method support partial update of a ScanConfig. + UpdateScanConfig(context.Context, *UpdateScanConfigRequest) (*ScanConfig, error) + // Start a ScanRun according to the given ScanConfig. + StartScanRun(context.Context, *StartScanRunRequest) (*ScanRun, error) + // Gets a ScanRun. + GetScanRun(context.Context, *GetScanRunRequest) (*ScanRun, error) + // Lists ScanRuns under a given ScanConfig, in descending order of ScanRun + // stop time. + ListScanRuns(context.Context, *ListScanRunsRequest) (*ListScanRunsResponse, error) + // Stops a ScanRun. The stopped ScanRun is returned. + StopScanRun(context.Context, *StopScanRunRequest) (*ScanRun, error) + // List CrawledUrls under a given ScanRun. + ListCrawledUrls(context.Context, *ListCrawledUrlsRequest) (*ListCrawledUrlsResponse, error) + // Gets a Finding. + GetFinding(context.Context, *GetFindingRequest) (*Finding, error) + // List Findings under a given ScanRun. + ListFindings(context.Context, *ListFindingsRequest) (*ListFindingsResponse, error) + // List all FindingTypeStats under a given ScanRun. + ListFindingTypeStats(context.Context, *ListFindingTypeStatsRequest) (*ListFindingTypeStatsResponse, error) +} + +func RegisterWebSecurityScannerServer(s *grpc.Server, srv WebSecurityScannerServer) { + s.RegisterService(&_WebSecurityScanner_serviceDesc, srv) +} + +func _WebSecurityScanner_CreateScanConfig_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(CreateScanConfigRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(WebSecurityScannerServer).CreateScanConfig(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.websecurityscanner.v1beta.WebSecurityScanner/CreateScanConfig", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(WebSecurityScannerServer).CreateScanConfig(ctx, req.(*CreateScanConfigRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _WebSecurityScanner_DeleteScanConfig_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(DeleteScanConfigRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(WebSecurityScannerServer).DeleteScanConfig(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.websecurityscanner.v1beta.WebSecurityScanner/DeleteScanConfig", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(WebSecurityScannerServer).DeleteScanConfig(ctx, req.(*DeleteScanConfigRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _WebSecurityScanner_GetScanConfig_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(GetScanConfigRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(WebSecurityScannerServer).GetScanConfig(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.websecurityscanner.v1beta.WebSecurityScanner/GetScanConfig", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(WebSecurityScannerServer).GetScanConfig(ctx, req.(*GetScanConfigRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _WebSecurityScanner_ListScanConfigs_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(ListScanConfigsRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(WebSecurityScannerServer).ListScanConfigs(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.websecurityscanner.v1beta.WebSecurityScanner/ListScanConfigs", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(WebSecurityScannerServer).ListScanConfigs(ctx, req.(*ListScanConfigsRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _WebSecurityScanner_UpdateScanConfig_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(UpdateScanConfigRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(WebSecurityScannerServer).UpdateScanConfig(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.websecurityscanner.v1beta.WebSecurityScanner/UpdateScanConfig", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(WebSecurityScannerServer).UpdateScanConfig(ctx, req.(*UpdateScanConfigRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _WebSecurityScanner_StartScanRun_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(StartScanRunRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(WebSecurityScannerServer).StartScanRun(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.websecurityscanner.v1beta.WebSecurityScanner/StartScanRun", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(WebSecurityScannerServer).StartScanRun(ctx, req.(*StartScanRunRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _WebSecurityScanner_GetScanRun_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(GetScanRunRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(WebSecurityScannerServer).GetScanRun(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.websecurityscanner.v1beta.WebSecurityScanner/GetScanRun", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(WebSecurityScannerServer).GetScanRun(ctx, req.(*GetScanRunRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _WebSecurityScanner_ListScanRuns_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(ListScanRunsRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(WebSecurityScannerServer).ListScanRuns(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.websecurityscanner.v1beta.WebSecurityScanner/ListScanRuns", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(WebSecurityScannerServer).ListScanRuns(ctx, req.(*ListScanRunsRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _WebSecurityScanner_StopScanRun_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(StopScanRunRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(WebSecurityScannerServer).StopScanRun(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.websecurityscanner.v1beta.WebSecurityScanner/StopScanRun", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(WebSecurityScannerServer).StopScanRun(ctx, req.(*StopScanRunRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _WebSecurityScanner_ListCrawledUrls_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(ListCrawledUrlsRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(WebSecurityScannerServer).ListCrawledUrls(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.websecurityscanner.v1beta.WebSecurityScanner/ListCrawledUrls", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(WebSecurityScannerServer).ListCrawledUrls(ctx, req.(*ListCrawledUrlsRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _WebSecurityScanner_GetFinding_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(GetFindingRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(WebSecurityScannerServer).GetFinding(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.websecurityscanner.v1beta.WebSecurityScanner/GetFinding", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(WebSecurityScannerServer).GetFinding(ctx, req.(*GetFindingRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _WebSecurityScanner_ListFindings_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(ListFindingsRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(WebSecurityScannerServer).ListFindings(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.websecurityscanner.v1beta.WebSecurityScanner/ListFindings", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(WebSecurityScannerServer).ListFindings(ctx, req.(*ListFindingsRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _WebSecurityScanner_ListFindingTypeStats_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(ListFindingTypeStatsRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(WebSecurityScannerServer).ListFindingTypeStats(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.cloud.websecurityscanner.v1beta.WebSecurityScanner/ListFindingTypeStats", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(WebSecurityScannerServer).ListFindingTypeStats(ctx, req.(*ListFindingTypeStatsRequest)) + } + return interceptor(ctx, in, info, handler) +} + +var _WebSecurityScanner_serviceDesc = grpc.ServiceDesc{ + ServiceName: "google.cloud.websecurityscanner.v1beta.WebSecurityScanner", + HandlerType: (*WebSecurityScannerServer)(nil), + Methods: []grpc.MethodDesc{ + { + MethodName: "CreateScanConfig", + Handler: _WebSecurityScanner_CreateScanConfig_Handler, + }, + { + MethodName: "DeleteScanConfig", + Handler: _WebSecurityScanner_DeleteScanConfig_Handler, + }, + { + MethodName: "GetScanConfig", + Handler: _WebSecurityScanner_GetScanConfig_Handler, + }, + { + MethodName: "ListScanConfigs", + Handler: _WebSecurityScanner_ListScanConfigs_Handler, + }, + { + MethodName: "UpdateScanConfig", + Handler: _WebSecurityScanner_UpdateScanConfig_Handler, + }, + { + MethodName: "StartScanRun", + Handler: _WebSecurityScanner_StartScanRun_Handler, + }, + { + MethodName: "GetScanRun", + Handler: _WebSecurityScanner_GetScanRun_Handler, + }, + { + MethodName: "ListScanRuns", + Handler: _WebSecurityScanner_ListScanRuns_Handler, + }, + { + MethodName: "StopScanRun", + Handler: _WebSecurityScanner_StopScanRun_Handler, + }, + { + MethodName: "ListCrawledUrls", + Handler: _WebSecurityScanner_ListCrawledUrls_Handler, + }, + { + MethodName: "GetFinding", + Handler: _WebSecurityScanner_GetFinding_Handler, + }, + { + MethodName: "ListFindings", + Handler: _WebSecurityScanner_ListFindings_Handler, + }, + { + MethodName: "ListFindingTypeStats", + Handler: _WebSecurityScanner_ListFindingTypeStats_Handler, + }, + }, + Streams: []grpc.StreamDesc{}, + Metadata: "google/cloud/websecurityscanner/v1beta/web_security_scanner.proto", +} + +func init() { + proto.RegisterFile("google/cloud/websecurityscanner/v1beta/web_security_scanner.proto", fileDescriptor_web_security_scanner_ccd98c4097e38a3b) +} + +var fileDescriptor_web_security_scanner_ccd98c4097e38a3b = []byte{ + // 1113 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xb4, 0x58, 0x41, 0x6f, 0x1b, 0x45, + 0x14, 0xd6, 0xb4, 0x25, 0x4a, 0x9e, 0x53, 0x35, 0x4c, 0xa3, 0x38, 0xda, 0x14, 0xc9, 0xda, 0x43, + 0x9b, 0x5a, 0xaa, 0x57, 0xb8, 0x2d, 0x04, 0x27, 0x25, 0x90, 0x34, 0x2d, 0x12, 0xad, 0x14, 0xd9, + 0x8d, 0x40, 0x5c, 0x56, 0x6b, 0x7b, 0x6c, 0x2d, 0xd9, 0xcc, 0x2e, 0x3b, 0xb3, 0x94, 0x14, 0xf5, + 0x00, 0x07, 0x2e, 0x70, 0xe3, 0xc2, 0x81, 0x0b, 0x57, 0xb8, 0x21, 0xc4, 0x11, 0x0e, 0xa8, 0x77, + 0x10, 0x57, 0x8e, 0xfc, 0x00, 0x7e, 0x02, 0xda, 0xd9, 0x59, 0x7b, 0xbd, 0xbb, 0xb5, 0x67, 0x1c, + 0xe5, 0xe6, 0x9d, 0x99, 0xf7, 0xe6, 0x7b, 0xdf, 0x7b, 0x6f, 0xde, 0x27, 0xc3, 0xbb, 0x43, 0xdf, + 0x1f, 0x7a, 0xc4, 0xea, 0x79, 0x7e, 0xd4, 0xb7, 0x9e, 0x92, 0x2e, 0x23, 0xbd, 0x28, 0x74, 0xf9, + 0x29, 0xeb, 0x39, 0x94, 0x92, 0xd0, 0xfa, 0xf4, 0xf5, 0x2e, 0xe1, 0x4e, 0xbc, 0x63, 0xa7, 0x5b, + 0xb6, 0xdc, 0x6b, 0x04, 0xa1, 0xcf, 0x7d, 0x7c, 0x3d, 0x71, 0xd1, 0x10, 0x2e, 0x1a, 0x45, 0x17, + 0x8d, 0xc4, 0x85, 0x71, 0x4d, 0x5e, 0xe5, 0x04, 0xae, 0xe5, 0x50, 0xea, 0x73, 0x87, 0xbb, 0x3e, + 0x65, 0x89, 0x17, 0x63, 0x4b, 0x11, 0x48, 0x2f, 0x74, 0x9e, 0x7a, 0xa4, 0x6f, 0x47, 0xa1, 0x27, + 0x2d, 0xef, 0x28, 0x5a, 0x0e, 0x5c, 0xda, 0x77, 0xe9, 0x50, 0x5a, 0xed, 0xea, 0x59, 0xd9, 0xfc, + 0x34, 0x20, 0x36, 0xe3, 0x0e, 0xd7, 0x05, 0x1c, 0x7f, 0xda, 0x3d, 0x9f, 0x0e, 0xdc, 0xf4, 0xea, + 0xbb, 0x3a, 0x96, 0x61, 0x44, 0xa5, 0xd9, 0x86, 0x34, 0x13, 0x5f, 0xdd, 0x68, 0x60, 0x91, 0x93, + 0x80, 0x9f, 0xca, 0xcd, 0x5a, 0x7e, 0x73, 0xe0, 0x12, 0xaf, 0x6f, 0x9f, 0x38, 0xec, 0x38, 0x39, + 0x61, 0x7e, 0x85, 0xa0, 0xba, 0x1f, 0x12, 0x87, 0x93, 0x4e, 0xcf, 0xa1, 0xfb, 0x02, 0x50, 0x9b, + 0x7c, 0x12, 0x11, 0xc6, 0xf1, 0x1a, 0x2c, 0x04, 0x4e, 0x48, 0x28, 0x5f, 0x47, 0x35, 0xb4, 0xb9, + 0xd4, 0x96, 0x5f, 0xb8, 0x03, 0x95, 0x0c, 0xfc, 0xf5, 0x0b, 0x35, 0xb4, 0x59, 0x69, 0x36, 0x1b, + 0x6a, 0x09, 0x6f, 0x64, 0xee, 0x01, 0x36, 0xfa, 0x6d, 0xde, 0x82, 0xea, 0x7d, 0xe2, 0x91, 0x32, + 0x1c, 0x18, 0x2e, 0x51, 0xe7, 0x84, 0x48, 0x14, 0xe2, 0xb7, 0x59, 0x87, 0xd5, 0x87, 0x84, 0xab, + 0x9d, 0xf5, 0x60, 0xed, 0x91, 0xcb, 0x32, 0x87, 0xd9, 0xac, 0x08, 0x5f, 0x03, 0x08, 0x9c, 0x21, + 0xb1, 0xb9, 0x7f, 0x4c, 0xa8, 0x08, 0x70, 0xa9, 0xbd, 0x14, 0xaf, 0x3c, 0x89, 0x17, 0xf0, 0x06, + 0x88, 0x0f, 0x9b, 0xb9, 0xcf, 0xc8, 0xfa, 0xc5, 0x1a, 0xda, 0x7c, 0xa5, 0xbd, 0x18, 0x2f, 0x74, + 0xdc, 0x67, 0xc4, 0xfc, 0x09, 0x41, 0xf5, 0x28, 0xe8, 0x97, 0x32, 0x7a, 0x1e, 0xcc, 0xe1, 0x6d, + 0xa8, 0x44, 0xe2, 0x3e, 0x91, 0x57, 0x81, 0xa7, 0xd2, 0x34, 0x52, 0xa7, 0x69, 0xea, 0x1b, 0x0f, + 0xe2, 0xd4, 0x3f, 0x76, 0xd8, 0x71, 0x1b, 0x92, 0xe3, 0xf1, 0x6f, 0xf3, 0x3b, 0x04, 0xd5, 0x02, + 0x39, 0x2c, 0xf0, 0x29, 0x23, 0xf8, 0x08, 0x96, 0x33, 0x68, 0xd9, 0x3a, 0xaa, 0x5d, 0x9c, 0x13, + 0x6e, 0x65, 0x0c, 0x97, 0xe1, 0xeb, 0x70, 0x85, 0x92, 0xcf, 0xb8, 0x5d, 0x60, 0xf8, 0x72, 0xbc, + 0x7c, 0x98, 0xb2, 0x6c, 0xde, 0x84, 0xab, 0x1d, 0xee, 0x84, 0x02, 0x5a, 0x3b, 0xa2, 0xd3, 0x32, + 0x7c, 0x03, 0x5e, 0x95, 0xd5, 0x30, 0xe3, 0xa0, 0x0b, 0x57, 0xd3, 0x68, 0xdb, 0x11, 0x3d, 0xd7, + 0x3a, 0xf8, 0x06, 0xc1, 0xea, 0xe4, 0x5d, 0x92, 0xd6, 0x47, 0xb0, 0x94, 0xf6, 0x70, 0xca, 0xa9, + 0xa5, 0xc3, 0x69, 0x1c, 0xe2, 0x22, 0x93, 0x5e, 0x95, 0xd9, 0xdc, 0x04, 0xdc, 0xe1, 0x7e, 0xa0, + 0xc0, 0x91, 0x6c, 0x97, 0xfd, 0xe4, 0x49, 0x3d, 0x0a, 0xbd, 0x73, 0xa5, 0x29, 0x2d, 0xc0, 0x89, + 0xeb, 0xc6, 0x05, 0x98, 0x79, 0xd8, 0xb5, 0x0b, 0x70, 0xec, 0xb2, 0x5d, 0xe9, 0x8d, 0xdd, 0x2b, + 0x53, 0x96, 0x54, 0xd5, 0x83, 0xe4, 0xa9, 0x9f, 0xc6, 0xd8, 0x17, 0x28, 0x29, 0x2b, 0x79, 0x74, + 0x26, 0x5f, 0x6b, 0xb0, 0x30, 0x70, 0x3d, 0x4e, 0x42, 0x79, 0xaf, 0xfc, 0xca, 0xf1, 0x78, 0x71, + 0x2a, 0x8f, 0x97, 0x72, 0x3c, 0x7e, 0x2d, 0xcb, 0x6d, 0x8c, 0x41, 0x92, 0xf8, 0x3e, 0x2c, 0xca, + 0x69, 0xa5, 0x5d, 0x6d, 0x69, 0xe8, 0x23, 0x07, 0xca, 0xd4, 0xdd, 0x85, 0x8d, 0x0c, 0x98, 0x27, + 0xa7, 0x01, 0xe9, 0xc4, 0x43, 0x72, 0x06, 0x31, 0xf1, 0x34, 0xba, 0x56, 0x6e, 0x27, 0x83, 0x19, + 0x00, 0x2e, 0x8e, 0x5e, 0x19, 0xd6, 0x96, 0x66, 0x58, 0x63, 0xef, 0x2b, 0x83, 0xdc, 0x4a, 0xf3, + 0xc7, 0x55, 0xc0, 0x1f, 0x90, 0x6e, 0x47, 0x3a, 0xe8, 0x24, 0x0e, 0xf0, 0x0b, 0x04, 0x2b, 0xf9, + 0x69, 0x89, 0x77, 0xd5, 0xeb, 0xb1, 0x74, 0xce, 0x1a, 0x73, 0xbc, 0xa8, 0xe6, 0xce, 0x97, 0x7f, + 0xff, 0xfb, 0xed, 0x85, 0x37, 0xcc, 0x1b, 0xa9, 0x2c, 0xf8, 0x3c, 0xa1, 0xf0, 0x5e, 0x10, 0xfa, + 0x1f, 0x93, 0x1e, 0x67, 0x56, 0xfd, 0xb9, 0x95, 0x79, 0x75, 0x5b, 0xd9, 0xc1, 0x83, 0xbf, 0x47, + 0xb0, 0x92, 0x9f, 0xb6, 0xea, 0x71, 0xbc, 0x64, 0x4e, 0x1b, 0x6b, 0x85, 0x99, 0x73, 0x10, 0x6b, + 0x11, 0xd3, 0x12, 0x58, 0x6f, 0xd6, 0xc7, 0x58, 0xe3, 0xae, 0xc9, 0x20, 0xcd, 0x02, 0xb5, 0xea, + 0xcf, 0xf1, 0xcf, 0x08, 0x2e, 0x4f, 0x4c, 0x77, 0xbc, 0xa3, 0x8a, 0xad, 0x4c, 0x14, 0xcc, 0x45, + 0xb0, 0x04, 0x8d, 0x95, 0x41, 0xff, 0x81, 0xe0, 0x4a, 0x6e, 0x92, 0xe2, 0xb7, 0x55, 0x2f, 0x2e, + 0xd7, 0x27, 0xc6, 0xee, 0xdc, 0xf6, 0x49, 0xbf, 0x94, 0x44, 0x31, 0xbd, 0x4c, 0xf0, 0x5f, 0x08, + 0x56, 0xf2, 0xea, 0x45, 0xbd, 0x32, 0x5e, 0xa2, 0x7b, 0xe6, 0x4a, 0xc0, 0x7b, 0x02, 0xfa, 0x5e, + 0xf3, 0xf6, 0x08, 0x7a, 0x56, 0x33, 0x4f, 0x4f, 0xc6, 0x64, 0xb5, 0xff, 0x8a, 0x60, 0x39, 0xab, + 0x24, 0xf0, 0xb6, 0x32, 0x9c, 0xa2, 0xfe, 0x30, 0x74, 0x67, 0xb5, 0xb9, 0x25, 0x02, 0x69, 0x9a, + 0xb7, 0x14, 0x2b, 0xa9, 0xc5, 0xe2, 0x5b, 0x5b, 0xa8, 0x8e, 0x7f, 0x41, 0x00, 0x63, 0x5d, 0x83, + 0xdf, 0xd2, 0x6c, 0x82, 0xb3, 0x80, 0x6e, 0x09, 0xd0, 0x77, 0x70, 0x53, 0x0d, 0xb4, 0x95, 0xea, + 0x91, 0xb8, 0x13, 0x5e, 0x20, 0x58, 0xce, 0x2a, 0x1f, 0x75, 0xbe, 0x4b, 0xb4, 0x99, 0xb1, 0x33, + 0x9f, 0xb1, 0x6c, 0x80, 0x62, 0x1c, 0x85, 0x06, 0xc8, 0xd1, 0x3f, 0x0a, 0x05, 0xff, 0x86, 0xa0, + 0x92, 0xd1, 0x4c, 0xb8, 0xa5, 0x5e, 0x36, 0x79, 0xa1, 0xa5, 0x9f, 0x80, 0x3d, 0x01, 0x7c, 0xc7, + 0x7c, 0x53, 0x3f, 0x01, 0x2d, 0xc6, 0xfd, 0x20, 0xae, 0x9f, 0x7f, 0xe4, 0x93, 0x94, 0xd1, 0x56, + 0x7a, 0x4f, 0x52, 0x51, 0x03, 0xea, 0x3d, 0x49, 0x25, 0xa2, 0x2e, 0xed, 0x6b, 0xfc, 0x8e, 0x6a, + 0x46, 0xb2, 0xa1, 0x59, 0x59, 0x1d, 0xf7, 0x7b, 0xd2, 0x1e, 0x72, 0x9c, 0x6b, 0xb5, 0xc7, 0xa4, + 0xa8, 0x33, 0x74, 0x15, 0x91, 0x79, 0x20, 0x82, 0xd8, 0xc5, 0xf7, 0xb4, 0xb3, 0x93, 0xfe, 0x75, + 0x20, 0x3a, 0xe5, 0x4f, 0xd9, 0x29, 0xa9, 0x68, 0xd3, 0xeb, 0x94, 0x9c, 0xdc, 0xd4, 0xeb, 0x94, + 0xbc, 0x4e, 0x2c, 0x09, 0x49, 0x27, 0x2f, 0x23, 0x85, 0xf8, 0xdf, 0xa4, 0x0e, 0x1d, 0x49, 0x2a, + 0xbc, 0x3f, 0x07, 0xba, 0xbc, 0x70, 0x34, 0xee, 0x9f, 0xcd, 0x89, 0x0c, 0xf5, 0xb1, 0x08, 0xf5, + 0x21, 0x3e, 0x38, 0x4b, 0xa8, 0x23, 0xb7, 0x7b, 0x3f, 0x20, 0xa8, 0xf7, 0xfc, 0x13, 0x45, 0x68, + 0x7b, 0xd5, 0xa2, 0xb0, 0x3c, 0x8c, 0x05, 0xd3, 0x21, 0xfa, 0xe8, 0x43, 0xe9, 0x62, 0xe8, 0x7b, + 0x0e, 0x1d, 0x36, 0xfc, 0x70, 0x68, 0x0d, 0x09, 0x15, 0x72, 0xca, 0x4a, 0xb6, 0x9c, 0xc0, 0x65, + 0xb3, 0xfe, 0x22, 0xda, 0x2e, 0xee, 0x74, 0x17, 0x84, 0x93, 0xdb, 0xff, 0x07, 0x00, 0x00, 0xff, + 0xff, 0x46, 0x3b, 0x5e, 0x3b, 0xda, 0x13, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/container/v1/cluster_service.pb.go b/vendor/google.golang.org/genproto/googleapis/container/v1/cluster_service.pb.go new file mode 100644 index 0000000..e8a7dbe --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/container/v1/cluster_service.pb.go @@ -0,0 +1,7105 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/container/v1/cluster_service.proto + +package container // import "google.golang.org/genproto/googleapis/container/v1" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import empty "github.com/golang/protobuf/ptypes/empty" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +import ( + context "golang.org/x/net/context" + grpc "google.golang.org/grpc" +) + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Allowed Network Policy providers. +type NetworkPolicy_Provider int32 + +const ( + // Not set + NetworkPolicy_PROVIDER_UNSPECIFIED NetworkPolicy_Provider = 0 + // Tigera (Calico Felix). + NetworkPolicy_CALICO NetworkPolicy_Provider = 1 +) + +var NetworkPolicy_Provider_name = map[int32]string{ + 0: "PROVIDER_UNSPECIFIED", + 1: "CALICO", +} +var NetworkPolicy_Provider_value = map[string]int32{ + "PROVIDER_UNSPECIFIED": 0, + "CALICO": 1, +} + +func (x NetworkPolicy_Provider) String() string { + return proto.EnumName(NetworkPolicy_Provider_name, int32(x)) +} +func (NetworkPolicy_Provider) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_cluster_service_0391086e6c7dadfe, []int{11, 0} +} + +// The current status of the cluster. +type Cluster_Status int32 + +const ( + // Not set. + Cluster_STATUS_UNSPECIFIED Cluster_Status = 0 + // The PROVISIONING state indicates the cluster is being created. + Cluster_PROVISIONING Cluster_Status = 1 + // The RUNNING state indicates the cluster has been created and is fully + // usable. + Cluster_RUNNING Cluster_Status = 2 + // The RECONCILING state indicates that some work is actively being done on + // the cluster, such as upgrading the master or node software. Details can + // be found in the `statusMessage` field. + Cluster_RECONCILING Cluster_Status = 3 + // The STOPPING state indicates the cluster is being deleted. + Cluster_STOPPING Cluster_Status = 4 + // The ERROR state indicates the cluster may be unusable. Details + // can be found in the `statusMessage` field. + Cluster_ERROR Cluster_Status = 5 + // The DEGRADED state indicates the cluster requires user action to restore + // full functionality. Details can be found in the `statusMessage` field. + Cluster_DEGRADED Cluster_Status = 6 +) + +var Cluster_Status_name = map[int32]string{ + 0: "STATUS_UNSPECIFIED", + 1: "PROVISIONING", + 2: "RUNNING", + 3: "RECONCILING", + 4: "STOPPING", + 5: "ERROR", + 6: "DEGRADED", +} +var Cluster_Status_value = map[string]int32{ + "STATUS_UNSPECIFIED": 0, + "PROVISIONING": 1, + "RUNNING": 2, + "RECONCILING": 3, + "STOPPING": 4, + "ERROR": 5, + "DEGRADED": 6, +} + +func (x Cluster_Status) String() string { + return proto.EnumName(Cluster_Status_name, int32(x)) +} +func (Cluster_Status) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_cluster_service_0391086e6c7dadfe, []int{13, 0} +} + +// Current status of the operation. +type Operation_Status int32 + +const ( + // Not set. + Operation_STATUS_UNSPECIFIED Operation_Status = 0 + // The operation has been created. + Operation_PENDING Operation_Status = 1 + // The operation is currently running. + Operation_RUNNING Operation_Status = 2 + // The operation is done, either cancelled or completed. + Operation_DONE Operation_Status = 3 + // The operation is aborting. + Operation_ABORTING Operation_Status = 4 +) + +var Operation_Status_name = map[int32]string{ + 0: "STATUS_UNSPECIFIED", + 1: "PENDING", + 2: "RUNNING", + 3: "DONE", + 4: "ABORTING", +} +var Operation_Status_value = map[string]int32{ + "STATUS_UNSPECIFIED": 0, + "PENDING": 1, + "RUNNING": 2, + "DONE": 3, + "ABORTING": 4, +} + +func (x Operation_Status) String() string { + return proto.EnumName(Operation_Status_name, int32(x)) +} +func (Operation_Status) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_cluster_service_0391086e6c7dadfe, []int{15, 0} +} + +// Operation type. +type Operation_Type int32 + +const ( + // Not set. + Operation_TYPE_UNSPECIFIED Operation_Type = 0 + // Cluster create. + Operation_CREATE_CLUSTER Operation_Type = 1 + // Cluster delete. + Operation_DELETE_CLUSTER Operation_Type = 2 + // A master upgrade. + Operation_UPGRADE_MASTER Operation_Type = 3 + // A node upgrade. + Operation_UPGRADE_NODES Operation_Type = 4 + // Cluster repair. + Operation_REPAIR_CLUSTER Operation_Type = 5 + // Cluster update. + Operation_UPDATE_CLUSTER Operation_Type = 6 + // Node pool create. + Operation_CREATE_NODE_POOL Operation_Type = 7 + // Node pool delete. + Operation_DELETE_NODE_POOL Operation_Type = 8 + // Set node pool management. + Operation_SET_NODE_POOL_MANAGEMENT Operation_Type = 9 + // Automatic node pool repair. + Operation_AUTO_REPAIR_NODES Operation_Type = 10 + // Automatic node upgrade. + Operation_AUTO_UPGRADE_NODES Operation_Type = 11 + // Set labels. + Operation_SET_LABELS Operation_Type = 12 + // Set/generate master auth materials + Operation_SET_MASTER_AUTH Operation_Type = 13 + // Set node pool size. + Operation_SET_NODE_POOL_SIZE Operation_Type = 14 + // Updates network policy for a cluster. + Operation_SET_NETWORK_POLICY Operation_Type = 15 + // Set the maintenance policy. + Operation_SET_MAINTENANCE_POLICY Operation_Type = 16 +) + +var Operation_Type_name = map[int32]string{ + 0: "TYPE_UNSPECIFIED", + 1: "CREATE_CLUSTER", + 2: "DELETE_CLUSTER", + 3: "UPGRADE_MASTER", + 4: "UPGRADE_NODES", + 5: "REPAIR_CLUSTER", + 6: "UPDATE_CLUSTER", + 7: "CREATE_NODE_POOL", + 8: "DELETE_NODE_POOL", + 9: "SET_NODE_POOL_MANAGEMENT", + 10: "AUTO_REPAIR_NODES", + 11: "AUTO_UPGRADE_NODES", + 12: "SET_LABELS", + 13: "SET_MASTER_AUTH", + 14: "SET_NODE_POOL_SIZE", + 15: "SET_NETWORK_POLICY", + 16: "SET_MAINTENANCE_POLICY", +} +var Operation_Type_value = map[string]int32{ + "TYPE_UNSPECIFIED": 0, + "CREATE_CLUSTER": 1, + "DELETE_CLUSTER": 2, + "UPGRADE_MASTER": 3, + "UPGRADE_NODES": 4, + "REPAIR_CLUSTER": 5, + "UPDATE_CLUSTER": 6, + "CREATE_NODE_POOL": 7, + "DELETE_NODE_POOL": 8, + "SET_NODE_POOL_MANAGEMENT": 9, + "AUTO_REPAIR_NODES": 10, + "AUTO_UPGRADE_NODES": 11, + "SET_LABELS": 12, + "SET_MASTER_AUTH": 13, + "SET_NODE_POOL_SIZE": 14, + "SET_NETWORK_POLICY": 15, + "SET_MAINTENANCE_POLICY": 16, +} + +func (x Operation_Type) String() string { + return proto.EnumName(Operation_Type_name, int32(x)) +} +func (Operation_Type) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_cluster_service_0391086e6c7dadfe, []int{15, 1} +} + +// Operation type: what type update to perform. +type SetMasterAuthRequest_Action int32 + +const ( + // Operation is unknown and will error out. + SetMasterAuthRequest_UNKNOWN SetMasterAuthRequest_Action = 0 + // Set the password to a user generated value. + SetMasterAuthRequest_SET_PASSWORD SetMasterAuthRequest_Action = 1 + // Generate a new password and set it to that. + SetMasterAuthRequest_GENERATE_PASSWORD SetMasterAuthRequest_Action = 2 + // Set the username. If an empty username is provided, basic authentication + // is disabled for the cluster. If a non-empty username is provided, basic + // authentication is enabled, with either a provided password or a generated + // one. + SetMasterAuthRequest_SET_USERNAME SetMasterAuthRequest_Action = 3 +) + +var SetMasterAuthRequest_Action_name = map[int32]string{ + 0: "UNKNOWN", + 1: "SET_PASSWORD", + 2: "GENERATE_PASSWORD", + 3: "SET_USERNAME", +} +var SetMasterAuthRequest_Action_value = map[string]int32{ + "UNKNOWN": 0, + "SET_PASSWORD": 1, + "GENERATE_PASSWORD": 2, + "SET_USERNAME": 3, +} + +func (x SetMasterAuthRequest_Action) String() string { + return proto.EnumName(SetMasterAuthRequest_Action_name, int32(x)) +} +func (SetMasterAuthRequest_Action) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_cluster_service_0391086e6c7dadfe, []int{26, 0} +} + +// The current status of the node pool instance. +type NodePool_Status int32 + +const ( + // Not set. + NodePool_STATUS_UNSPECIFIED NodePool_Status = 0 + // The PROVISIONING state indicates the node pool is being created. + NodePool_PROVISIONING NodePool_Status = 1 + // The RUNNING state indicates the node pool has been created + // and is fully usable. + NodePool_RUNNING NodePool_Status = 2 + // The RUNNING_WITH_ERROR state indicates the node pool has been created + // and is partially usable. Some error state has occurred and some + // functionality may be impaired. Customer may need to reissue a request + // or trigger a new update. + NodePool_RUNNING_WITH_ERROR NodePool_Status = 3 + // The RECONCILING state indicates that some work is actively being done on + // the node pool, such as upgrading node software. Details can + // be found in the `statusMessage` field. + NodePool_RECONCILING NodePool_Status = 4 + // The STOPPING state indicates the node pool is being deleted. + NodePool_STOPPING NodePool_Status = 5 + // The ERROR state indicates the node pool may be unusable. Details + // can be found in the `statusMessage` field. + NodePool_ERROR NodePool_Status = 6 +) + +var NodePool_Status_name = map[int32]string{ + 0: "STATUS_UNSPECIFIED", + 1: "PROVISIONING", + 2: "RUNNING", + 3: "RUNNING_WITH_ERROR", + 4: "RECONCILING", + 5: "STOPPING", + 6: "ERROR", +} +var NodePool_Status_value = map[string]int32{ + "STATUS_UNSPECIFIED": 0, + "PROVISIONING": 1, + "RUNNING": 2, + "RUNNING_WITH_ERROR": 3, + "RECONCILING": 4, + "STOPPING": 5, + "ERROR": 6, +} + +func (x NodePool_Status) String() string { + return proto.EnumName(NodePool_Status_name, int32(x)) +} +func (NodePool_Status) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_cluster_service_0391086e6c7dadfe, []int{40, 0} +} + +// Parameters that describe the nodes in a cluster. +type NodeConfig struct { + // The name of a Google Compute Engine [machine + // type](/compute/docs/machine-types) (e.g. + // `n1-standard-1`). + // + // If unspecified, the default machine type is + // `n1-standard-1`. + MachineType string `protobuf:"bytes,1,opt,name=machine_type,json=machineType,proto3" json:"machine_type,omitempty"` + // Size of the disk attached to each node, specified in GB. + // The smallest allowed disk size is 10GB. + // + // If unspecified, the default disk size is 100GB. + DiskSizeGb int32 `protobuf:"varint,2,opt,name=disk_size_gb,json=diskSizeGb,proto3" json:"disk_size_gb,omitempty"` + // The set of Google API scopes to be made available on all of the + // node VMs under the "default" service account. + // + // The following scopes are recommended, but not required, and by default are + // not included: + // + // * `https://www.googleapis.com/auth/compute` is required for mounting + // persistent storage on your nodes. + // * `https://www.googleapis.com/auth/devstorage.read_only` is required for + // communicating with **gcr.io** + // (the [Google Container Registry](/container-registry/)). + // + // If unspecified, no scopes are added, unless Cloud Logging or Cloud + // Monitoring are enabled, in which case their required scopes will be added. + OauthScopes []string `protobuf:"bytes,3,rep,name=oauth_scopes,json=oauthScopes,proto3" json:"oauth_scopes,omitempty"` + // The Google Cloud Platform Service Account to be used by the node VMs. If + // no Service Account is specified, the "default" service account is used. + ServiceAccount string `protobuf:"bytes,9,opt,name=service_account,json=serviceAccount,proto3" json:"service_account,omitempty"` + // The metadata key/value pairs assigned to instances in the cluster. + // + // Keys must conform to the regexp [a-zA-Z0-9-_]+ and be less than 128 bytes + // in length. These are reflected as part of a URL in the metadata server. + // Additionally, to avoid ambiguity, keys must not conflict with any other + // metadata keys for the project or be one of the reserved keys: + // "cluster-location" + // "cluster-name" + // "cluster-uid" + // "configure-sh" + // "enable-os-login" + // "gci-update-strategy" + // "gci-ensure-gke-docker" + // "instance-template" + // "kube-env" + // "startup-script" + // "user-data" + // + // Values are free-form strings, and only have meaning as interpreted by + // the image running in the instance. The only restriction placed on them is + // that each value's size must be less than or equal to 32 KB. + // + // The total size of all keys and values must be less than 512 KB. + Metadata map[string]string `protobuf:"bytes,4,rep,name=metadata,proto3" json:"metadata,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` + // The image type to use for this node. Note that for a given image type, + // the latest version of it will be used. + ImageType string `protobuf:"bytes,5,opt,name=image_type,json=imageType,proto3" json:"image_type,omitempty"` + // The map of Kubernetes labels (key/value pairs) to be applied to each node. + // These will added in addition to any default label(s) that + // Kubernetes may apply to the node. + // In case of conflict in label keys, the applied set may differ depending on + // the Kubernetes version -- it's best to assume the behavior is undefined + // and conflicts should be avoided. + // For more information, including usage and the valid values, see: + // https://kubernetes.io/docs/concepts/overview/working-with-objects/labels/ + Labels map[string]string `protobuf:"bytes,6,rep,name=labels,proto3" json:"labels,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` + // The number of local SSD disks to be attached to the node. + // + // The limit for this value is dependant upon the maximum number of + // disks available on a machine per zone. See: + // https://cloud.google.com/compute/docs/disks/local-ssd#local_ssd_limits + // for more information. + LocalSsdCount int32 `protobuf:"varint,7,opt,name=local_ssd_count,json=localSsdCount,proto3" json:"local_ssd_count,omitempty"` + // The list of instance tags applied to all nodes. Tags are used to identify + // valid sources or targets for network firewalls and are specified by + // the client during cluster or node pool creation. Each tag within the list + // must comply with RFC1035. + Tags []string `protobuf:"bytes,8,rep,name=tags,proto3" json:"tags,omitempty"` + // Whether the nodes are created as preemptible VM instances. See: + // https://cloud.google.com/compute/docs/instances/preemptible for more + // information about preemptible VM instances. + Preemptible bool `protobuf:"varint,10,opt,name=preemptible,proto3" json:"preemptible,omitempty"` + // A list of hardware accelerators to be attached to each node. + // See https://cloud.google.com/compute/docs/gpus for more information about + // support for GPUs. + Accelerators []*AcceleratorConfig `protobuf:"bytes,11,rep,name=accelerators,proto3" json:"accelerators,omitempty"` + // Type of the disk attached to each node (e.g. 'pd-standard' or 'pd-ssd') + // + // If unspecified, the default disk type is 'pd-standard' + DiskType string `protobuf:"bytes,12,opt,name=disk_type,json=diskType,proto3" json:"disk_type,omitempty"` + // Minimum CPU platform to be used by this instance. The instance may be + // scheduled on the specified or newer CPU platform. Applicable values are the + // friendly names of CPU platforms, such as + // minCpuPlatform: "Intel Haswell" or + // minCpuPlatform: "Intel Sandy Bridge". For more + // information, read [how to specify min CPU + // platform](https://cloud.google.com/compute/docs/instances/specify-min-cpu-platform) + MinCpuPlatform string `protobuf:"bytes,13,opt,name=min_cpu_platform,json=minCpuPlatform,proto3" json:"min_cpu_platform,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *NodeConfig) Reset() { *m = NodeConfig{} } +func (m *NodeConfig) String() string { return proto.CompactTextString(m) } +func (*NodeConfig) ProtoMessage() {} +func (*NodeConfig) Descriptor() ([]byte, []int) { + return fileDescriptor_cluster_service_0391086e6c7dadfe, []int{0} +} +func (m *NodeConfig) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_NodeConfig.Unmarshal(m, b) +} +func (m *NodeConfig) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_NodeConfig.Marshal(b, m, deterministic) +} +func (dst *NodeConfig) XXX_Merge(src proto.Message) { + xxx_messageInfo_NodeConfig.Merge(dst, src) +} +func (m *NodeConfig) XXX_Size() int { + return xxx_messageInfo_NodeConfig.Size(m) +} +func (m *NodeConfig) XXX_DiscardUnknown() { + xxx_messageInfo_NodeConfig.DiscardUnknown(m) +} + +var xxx_messageInfo_NodeConfig proto.InternalMessageInfo + +func (m *NodeConfig) GetMachineType() string { + if m != nil { + return m.MachineType + } + return "" +} + +func (m *NodeConfig) GetDiskSizeGb() int32 { + if m != nil { + return m.DiskSizeGb + } + return 0 +} + +func (m *NodeConfig) GetOauthScopes() []string { + if m != nil { + return m.OauthScopes + } + return nil +} + +func (m *NodeConfig) GetServiceAccount() string { + if m != nil { + return m.ServiceAccount + } + return "" +} + +func (m *NodeConfig) GetMetadata() map[string]string { + if m != nil { + return m.Metadata + } + return nil +} + +func (m *NodeConfig) GetImageType() string { + if m != nil { + return m.ImageType + } + return "" +} + +func (m *NodeConfig) GetLabels() map[string]string { + if m != nil { + return m.Labels + } + return nil +} + +func (m *NodeConfig) GetLocalSsdCount() int32 { + if m != nil { + return m.LocalSsdCount + } + return 0 +} + +func (m *NodeConfig) GetTags() []string { + if m != nil { + return m.Tags + } + return nil +} + +func (m *NodeConfig) GetPreemptible() bool { + if m != nil { + return m.Preemptible + } + return false +} + +func (m *NodeConfig) GetAccelerators() []*AcceleratorConfig { + if m != nil { + return m.Accelerators + } + return nil +} + +func (m *NodeConfig) GetDiskType() string { + if m != nil { + return m.DiskType + } + return "" +} + +func (m *NodeConfig) GetMinCpuPlatform() string { + if m != nil { + return m.MinCpuPlatform + } + return "" +} + +// The authentication information for accessing the master endpoint. +// Authentication can be done using HTTP basic auth or using client +// certificates. +type MasterAuth struct { + // The username to use for HTTP basic authentication to the master endpoint. + // For clusters v1.6.0 and later, you can disable basic authentication by + // providing an empty username. + Username string `protobuf:"bytes,1,opt,name=username,proto3" json:"username,omitempty"` + // The password to use for HTTP basic authentication to the master endpoint. + // Because the master endpoint is open to the Internet, you should create a + // strong password. If a password is provided for cluster creation, username + // must be non-empty. + Password string `protobuf:"bytes,2,opt,name=password,proto3" json:"password,omitempty"` + // Configuration for client certificate authentication on the cluster. For + // clusters before v1.12, if no configuration is specified, a client + // certificate is issued. + ClientCertificateConfig *ClientCertificateConfig `protobuf:"bytes,3,opt,name=client_certificate_config,json=clientCertificateConfig,proto3" json:"client_certificate_config,omitempty"` + // [Output only] Base64-encoded public certificate that is the root of + // trust for the cluster. + ClusterCaCertificate string `protobuf:"bytes,100,opt,name=cluster_ca_certificate,json=clusterCaCertificate,proto3" json:"cluster_ca_certificate,omitempty"` + // [Output only] Base64-encoded public certificate used by clients to + // authenticate to the cluster endpoint. + ClientCertificate string `protobuf:"bytes,101,opt,name=client_certificate,json=clientCertificate,proto3" json:"client_certificate,omitempty"` + // [Output only] Base64-encoded private key used by clients to authenticate + // to the cluster endpoint. + ClientKey string `protobuf:"bytes,102,opt,name=client_key,json=clientKey,proto3" json:"client_key,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *MasterAuth) Reset() { *m = MasterAuth{} } +func (m *MasterAuth) String() string { return proto.CompactTextString(m) } +func (*MasterAuth) ProtoMessage() {} +func (*MasterAuth) Descriptor() ([]byte, []int) { + return fileDescriptor_cluster_service_0391086e6c7dadfe, []int{1} +} +func (m *MasterAuth) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_MasterAuth.Unmarshal(m, b) +} +func (m *MasterAuth) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_MasterAuth.Marshal(b, m, deterministic) +} +func (dst *MasterAuth) XXX_Merge(src proto.Message) { + xxx_messageInfo_MasterAuth.Merge(dst, src) +} +func (m *MasterAuth) XXX_Size() int { + return xxx_messageInfo_MasterAuth.Size(m) +} +func (m *MasterAuth) XXX_DiscardUnknown() { + xxx_messageInfo_MasterAuth.DiscardUnknown(m) +} + +var xxx_messageInfo_MasterAuth proto.InternalMessageInfo + +func (m *MasterAuth) GetUsername() string { + if m != nil { + return m.Username + } + return "" +} + +func (m *MasterAuth) GetPassword() string { + if m != nil { + return m.Password + } + return "" +} + +func (m *MasterAuth) GetClientCertificateConfig() *ClientCertificateConfig { + if m != nil { + return m.ClientCertificateConfig + } + return nil +} + +func (m *MasterAuth) GetClusterCaCertificate() string { + if m != nil { + return m.ClusterCaCertificate + } + return "" +} + +func (m *MasterAuth) GetClientCertificate() string { + if m != nil { + return m.ClientCertificate + } + return "" +} + +func (m *MasterAuth) GetClientKey() string { + if m != nil { + return m.ClientKey + } + return "" +} + +// Configuration for client certificates on the cluster. +type ClientCertificateConfig struct { + // Issue a client certificate. + IssueClientCertificate bool `protobuf:"varint,1,opt,name=issue_client_certificate,json=issueClientCertificate,proto3" json:"issue_client_certificate,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ClientCertificateConfig) Reset() { *m = ClientCertificateConfig{} } +func (m *ClientCertificateConfig) String() string { return proto.CompactTextString(m) } +func (*ClientCertificateConfig) ProtoMessage() {} +func (*ClientCertificateConfig) Descriptor() ([]byte, []int) { + return fileDescriptor_cluster_service_0391086e6c7dadfe, []int{2} +} +func (m *ClientCertificateConfig) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ClientCertificateConfig.Unmarshal(m, b) +} +func (m *ClientCertificateConfig) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ClientCertificateConfig.Marshal(b, m, deterministic) +} +func (dst *ClientCertificateConfig) XXX_Merge(src proto.Message) { + xxx_messageInfo_ClientCertificateConfig.Merge(dst, src) +} +func (m *ClientCertificateConfig) XXX_Size() int { + return xxx_messageInfo_ClientCertificateConfig.Size(m) +} +func (m *ClientCertificateConfig) XXX_DiscardUnknown() { + xxx_messageInfo_ClientCertificateConfig.DiscardUnknown(m) +} + +var xxx_messageInfo_ClientCertificateConfig proto.InternalMessageInfo + +func (m *ClientCertificateConfig) GetIssueClientCertificate() bool { + if m != nil { + return m.IssueClientCertificate + } + return false +} + +// Configuration for the addons that can be automatically spun up in the +// cluster, enabling additional functionality. +type AddonsConfig struct { + // Configuration for the HTTP (L7) load balancing controller addon, which + // makes it easy to set up HTTP load balancers for services in a cluster. + HttpLoadBalancing *HttpLoadBalancing `protobuf:"bytes,1,opt,name=http_load_balancing,json=httpLoadBalancing,proto3" json:"http_load_balancing,omitempty"` + // Configuration for the horizontal pod autoscaling feature, which + // increases or decreases the number of replica pods a replication controller + // has based on the resource usage of the existing pods. + HorizontalPodAutoscaling *HorizontalPodAutoscaling `protobuf:"bytes,2,opt,name=horizontal_pod_autoscaling,json=horizontalPodAutoscaling,proto3" json:"horizontal_pod_autoscaling,omitempty"` + // Configuration for the Kubernetes Dashboard. + KubernetesDashboard *KubernetesDashboard `protobuf:"bytes,3,opt,name=kubernetes_dashboard,json=kubernetesDashboard,proto3" json:"kubernetes_dashboard,omitempty"` + // Configuration for NetworkPolicy. This only tracks whether the addon + // is enabled or not on the Master, it does not track whether network policy + // is enabled for the nodes. + NetworkPolicyConfig *NetworkPolicyConfig `protobuf:"bytes,4,opt,name=network_policy_config,json=networkPolicyConfig,proto3" json:"network_policy_config,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *AddonsConfig) Reset() { *m = AddonsConfig{} } +func (m *AddonsConfig) String() string { return proto.CompactTextString(m) } +func (*AddonsConfig) ProtoMessage() {} +func (*AddonsConfig) Descriptor() ([]byte, []int) { + return fileDescriptor_cluster_service_0391086e6c7dadfe, []int{3} +} +func (m *AddonsConfig) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_AddonsConfig.Unmarshal(m, b) +} +func (m *AddonsConfig) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_AddonsConfig.Marshal(b, m, deterministic) +} +func (dst *AddonsConfig) XXX_Merge(src proto.Message) { + xxx_messageInfo_AddonsConfig.Merge(dst, src) +} +func (m *AddonsConfig) XXX_Size() int { + return xxx_messageInfo_AddonsConfig.Size(m) +} +func (m *AddonsConfig) XXX_DiscardUnknown() { + xxx_messageInfo_AddonsConfig.DiscardUnknown(m) +} + +var xxx_messageInfo_AddonsConfig proto.InternalMessageInfo + +func (m *AddonsConfig) GetHttpLoadBalancing() *HttpLoadBalancing { + if m != nil { + return m.HttpLoadBalancing + } + return nil +} + +func (m *AddonsConfig) GetHorizontalPodAutoscaling() *HorizontalPodAutoscaling { + if m != nil { + return m.HorizontalPodAutoscaling + } + return nil +} + +func (m *AddonsConfig) GetKubernetesDashboard() *KubernetesDashboard { + if m != nil { + return m.KubernetesDashboard + } + return nil +} + +func (m *AddonsConfig) GetNetworkPolicyConfig() *NetworkPolicyConfig { + if m != nil { + return m.NetworkPolicyConfig + } + return nil +} + +// Configuration options for the HTTP (L7) load balancing controller addon, +// which makes it easy to set up HTTP load balancers for services in a cluster. +type HttpLoadBalancing struct { + // Whether the HTTP Load Balancing controller is enabled in the cluster. + // When enabled, it runs a small pod in the cluster that manages the load + // balancers. + Disabled bool `protobuf:"varint,1,opt,name=disabled,proto3" json:"disabled,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *HttpLoadBalancing) Reset() { *m = HttpLoadBalancing{} } +func (m *HttpLoadBalancing) String() string { return proto.CompactTextString(m) } +func (*HttpLoadBalancing) ProtoMessage() {} +func (*HttpLoadBalancing) Descriptor() ([]byte, []int) { + return fileDescriptor_cluster_service_0391086e6c7dadfe, []int{4} +} +func (m *HttpLoadBalancing) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_HttpLoadBalancing.Unmarshal(m, b) +} +func (m *HttpLoadBalancing) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_HttpLoadBalancing.Marshal(b, m, deterministic) +} +func (dst *HttpLoadBalancing) XXX_Merge(src proto.Message) { + xxx_messageInfo_HttpLoadBalancing.Merge(dst, src) +} +func (m *HttpLoadBalancing) XXX_Size() int { + return xxx_messageInfo_HttpLoadBalancing.Size(m) +} +func (m *HttpLoadBalancing) XXX_DiscardUnknown() { + xxx_messageInfo_HttpLoadBalancing.DiscardUnknown(m) +} + +var xxx_messageInfo_HttpLoadBalancing proto.InternalMessageInfo + +func (m *HttpLoadBalancing) GetDisabled() bool { + if m != nil { + return m.Disabled + } + return false +} + +// Configuration options for the horizontal pod autoscaling feature, which +// increases or decreases the number of replica pods a replication controller +// has based on the resource usage of the existing pods. +type HorizontalPodAutoscaling struct { + // Whether the Horizontal Pod Autoscaling feature is enabled in the cluster. + // When enabled, it ensures that a Heapster pod is running in the cluster, + // which is also used by the Cloud Monitoring service. + Disabled bool `protobuf:"varint,1,opt,name=disabled,proto3" json:"disabled,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *HorizontalPodAutoscaling) Reset() { *m = HorizontalPodAutoscaling{} } +func (m *HorizontalPodAutoscaling) String() string { return proto.CompactTextString(m) } +func (*HorizontalPodAutoscaling) ProtoMessage() {} +func (*HorizontalPodAutoscaling) Descriptor() ([]byte, []int) { + return fileDescriptor_cluster_service_0391086e6c7dadfe, []int{5} +} +func (m *HorizontalPodAutoscaling) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_HorizontalPodAutoscaling.Unmarshal(m, b) +} +func (m *HorizontalPodAutoscaling) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_HorizontalPodAutoscaling.Marshal(b, m, deterministic) +} +func (dst *HorizontalPodAutoscaling) XXX_Merge(src proto.Message) { + xxx_messageInfo_HorizontalPodAutoscaling.Merge(dst, src) +} +func (m *HorizontalPodAutoscaling) XXX_Size() int { + return xxx_messageInfo_HorizontalPodAutoscaling.Size(m) +} +func (m *HorizontalPodAutoscaling) XXX_DiscardUnknown() { + xxx_messageInfo_HorizontalPodAutoscaling.DiscardUnknown(m) +} + +var xxx_messageInfo_HorizontalPodAutoscaling proto.InternalMessageInfo + +func (m *HorizontalPodAutoscaling) GetDisabled() bool { + if m != nil { + return m.Disabled + } + return false +} + +// Configuration for the Kubernetes Dashboard. +type KubernetesDashboard struct { + // Whether the Kubernetes Dashboard is enabled for this cluster. + Disabled bool `protobuf:"varint,1,opt,name=disabled,proto3" json:"disabled,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *KubernetesDashboard) Reset() { *m = KubernetesDashboard{} } +func (m *KubernetesDashboard) String() string { return proto.CompactTextString(m) } +func (*KubernetesDashboard) ProtoMessage() {} +func (*KubernetesDashboard) Descriptor() ([]byte, []int) { + return fileDescriptor_cluster_service_0391086e6c7dadfe, []int{6} +} +func (m *KubernetesDashboard) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_KubernetesDashboard.Unmarshal(m, b) +} +func (m *KubernetesDashboard) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_KubernetesDashboard.Marshal(b, m, deterministic) +} +func (dst *KubernetesDashboard) XXX_Merge(src proto.Message) { + xxx_messageInfo_KubernetesDashboard.Merge(dst, src) +} +func (m *KubernetesDashboard) XXX_Size() int { + return xxx_messageInfo_KubernetesDashboard.Size(m) +} +func (m *KubernetesDashboard) XXX_DiscardUnknown() { + xxx_messageInfo_KubernetesDashboard.DiscardUnknown(m) +} + +var xxx_messageInfo_KubernetesDashboard proto.InternalMessageInfo + +func (m *KubernetesDashboard) GetDisabled() bool { + if m != nil { + return m.Disabled + } + return false +} + +// Configuration for NetworkPolicy. This only tracks whether the addon +// is enabled or not on the Master, it does not track whether network policy +// is enabled for the nodes. +type NetworkPolicyConfig struct { + // Whether NetworkPolicy is enabled for this cluster. + Disabled bool `protobuf:"varint,1,opt,name=disabled,proto3" json:"disabled,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *NetworkPolicyConfig) Reset() { *m = NetworkPolicyConfig{} } +func (m *NetworkPolicyConfig) String() string { return proto.CompactTextString(m) } +func (*NetworkPolicyConfig) ProtoMessage() {} +func (*NetworkPolicyConfig) Descriptor() ([]byte, []int) { + return fileDescriptor_cluster_service_0391086e6c7dadfe, []int{7} +} +func (m *NetworkPolicyConfig) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_NetworkPolicyConfig.Unmarshal(m, b) +} +func (m *NetworkPolicyConfig) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_NetworkPolicyConfig.Marshal(b, m, deterministic) +} +func (dst *NetworkPolicyConfig) XXX_Merge(src proto.Message) { + xxx_messageInfo_NetworkPolicyConfig.Merge(dst, src) +} +func (m *NetworkPolicyConfig) XXX_Size() int { + return xxx_messageInfo_NetworkPolicyConfig.Size(m) +} +func (m *NetworkPolicyConfig) XXX_DiscardUnknown() { + xxx_messageInfo_NetworkPolicyConfig.DiscardUnknown(m) +} + +var xxx_messageInfo_NetworkPolicyConfig proto.InternalMessageInfo + +func (m *NetworkPolicyConfig) GetDisabled() bool { + if m != nil { + return m.Disabled + } + return false +} + +// Configuration options for private clusters. +type PrivateClusterConfig struct { + // Whether nodes have internal IP addresses only. If enabled, all nodes are + // given only RFC 1918 private addresses and communicate with the master via + // private networking. + EnablePrivateNodes bool `protobuf:"varint,1,opt,name=enable_private_nodes,json=enablePrivateNodes,proto3" json:"enable_private_nodes,omitempty"` + // Whether the master's internal IP address is used as the cluster endpoint. + EnablePrivateEndpoint bool `protobuf:"varint,2,opt,name=enable_private_endpoint,json=enablePrivateEndpoint,proto3" json:"enable_private_endpoint,omitempty"` + // The IP range in CIDR notation to use for the hosted master network. This + // range will be used for assigning internal IP addresses to the master or + // set of masters, as well as the ILB VIP. This range must not overlap with + // any other ranges in use within the cluster's network. + MasterIpv4CidrBlock string `protobuf:"bytes,3,opt,name=master_ipv4_cidr_block,json=masterIpv4CidrBlock,proto3" json:"master_ipv4_cidr_block,omitempty"` + // Output only. The internal IP address of this cluster's master endpoint. + PrivateEndpoint string `protobuf:"bytes,4,opt,name=private_endpoint,json=privateEndpoint,proto3" json:"private_endpoint,omitempty"` + // Output only. The external IP address of this cluster's master endpoint. + PublicEndpoint string `protobuf:"bytes,5,opt,name=public_endpoint,json=publicEndpoint,proto3" json:"public_endpoint,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *PrivateClusterConfig) Reset() { *m = PrivateClusterConfig{} } +func (m *PrivateClusterConfig) String() string { return proto.CompactTextString(m) } +func (*PrivateClusterConfig) ProtoMessage() {} +func (*PrivateClusterConfig) Descriptor() ([]byte, []int) { + return fileDescriptor_cluster_service_0391086e6c7dadfe, []int{8} +} +func (m *PrivateClusterConfig) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_PrivateClusterConfig.Unmarshal(m, b) +} +func (m *PrivateClusterConfig) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_PrivateClusterConfig.Marshal(b, m, deterministic) +} +func (dst *PrivateClusterConfig) XXX_Merge(src proto.Message) { + xxx_messageInfo_PrivateClusterConfig.Merge(dst, src) +} +func (m *PrivateClusterConfig) XXX_Size() int { + return xxx_messageInfo_PrivateClusterConfig.Size(m) +} +func (m *PrivateClusterConfig) XXX_DiscardUnknown() { + xxx_messageInfo_PrivateClusterConfig.DiscardUnknown(m) +} + +var xxx_messageInfo_PrivateClusterConfig proto.InternalMessageInfo + +func (m *PrivateClusterConfig) GetEnablePrivateNodes() bool { + if m != nil { + return m.EnablePrivateNodes + } + return false +} + +func (m *PrivateClusterConfig) GetEnablePrivateEndpoint() bool { + if m != nil { + return m.EnablePrivateEndpoint + } + return false +} + +func (m *PrivateClusterConfig) GetMasterIpv4CidrBlock() string { + if m != nil { + return m.MasterIpv4CidrBlock + } + return "" +} + +func (m *PrivateClusterConfig) GetPrivateEndpoint() string { + if m != nil { + return m.PrivateEndpoint + } + return "" +} + +func (m *PrivateClusterConfig) GetPublicEndpoint() string { + if m != nil { + return m.PublicEndpoint + } + return "" +} + +// Configuration options for the master authorized networks feature. Enabled +// master authorized networks will disallow all external traffic to access +// Kubernetes master through HTTPS except traffic from the given CIDR blocks, +// Google Compute Engine Public IPs and Google Prod IPs. +type MasterAuthorizedNetworksConfig struct { + // Whether or not master authorized networks is enabled. + Enabled bool `protobuf:"varint,1,opt,name=enabled,proto3" json:"enabled,omitempty"` + // cidr_blocks define up to 10 external networks that could access + // Kubernetes master through HTTPS. + CidrBlocks []*MasterAuthorizedNetworksConfig_CidrBlock `protobuf:"bytes,2,rep,name=cidr_blocks,json=cidrBlocks,proto3" json:"cidr_blocks,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *MasterAuthorizedNetworksConfig) Reset() { *m = MasterAuthorizedNetworksConfig{} } +func (m *MasterAuthorizedNetworksConfig) String() string { return proto.CompactTextString(m) } +func (*MasterAuthorizedNetworksConfig) ProtoMessage() {} +func (*MasterAuthorizedNetworksConfig) Descriptor() ([]byte, []int) { + return fileDescriptor_cluster_service_0391086e6c7dadfe, []int{9} +} +func (m *MasterAuthorizedNetworksConfig) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_MasterAuthorizedNetworksConfig.Unmarshal(m, b) +} +func (m *MasterAuthorizedNetworksConfig) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_MasterAuthorizedNetworksConfig.Marshal(b, m, deterministic) +} +func (dst *MasterAuthorizedNetworksConfig) XXX_Merge(src proto.Message) { + xxx_messageInfo_MasterAuthorizedNetworksConfig.Merge(dst, src) +} +func (m *MasterAuthorizedNetworksConfig) XXX_Size() int { + return xxx_messageInfo_MasterAuthorizedNetworksConfig.Size(m) +} +func (m *MasterAuthorizedNetworksConfig) XXX_DiscardUnknown() { + xxx_messageInfo_MasterAuthorizedNetworksConfig.DiscardUnknown(m) +} + +var xxx_messageInfo_MasterAuthorizedNetworksConfig proto.InternalMessageInfo + +func (m *MasterAuthorizedNetworksConfig) GetEnabled() bool { + if m != nil { + return m.Enabled + } + return false +} + +func (m *MasterAuthorizedNetworksConfig) GetCidrBlocks() []*MasterAuthorizedNetworksConfig_CidrBlock { + if m != nil { + return m.CidrBlocks + } + return nil +} + +// CidrBlock contains an optional name and one CIDR block. +type MasterAuthorizedNetworksConfig_CidrBlock struct { + // display_name is an optional field for users to identify CIDR blocks. + DisplayName string `protobuf:"bytes,1,opt,name=display_name,json=displayName,proto3" json:"display_name,omitempty"` + // cidr_block must be specified in CIDR notation. + CidrBlock string `protobuf:"bytes,2,opt,name=cidr_block,json=cidrBlock,proto3" json:"cidr_block,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *MasterAuthorizedNetworksConfig_CidrBlock) Reset() { + *m = MasterAuthorizedNetworksConfig_CidrBlock{} +} +func (m *MasterAuthorizedNetworksConfig_CidrBlock) String() string { return proto.CompactTextString(m) } +func (*MasterAuthorizedNetworksConfig_CidrBlock) ProtoMessage() {} +func (*MasterAuthorizedNetworksConfig_CidrBlock) Descriptor() ([]byte, []int) { + return fileDescriptor_cluster_service_0391086e6c7dadfe, []int{9, 0} +} +func (m *MasterAuthorizedNetworksConfig_CidrBlock) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_MasterAuthorizedNetworksConfig_CidrBlock.Unmarshal(m, b) +} +func (m *MasterAuthorizedNetworksConfig_CidrBlock) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_MasterAuthorizedNetworksConfig_CidrBlock.Marshal(b, m, deterministic) +} +func (dst *MasterAuthorizedNetworksConfig_CidrBlock) XXX_Merge(src proto.Message) { + xxx_messageInfo_MasterAuthorizedNetworksConfig_CidrBlock.Merge(dst, src) +} +func (m *MasterAuthorizedNetworksConfig_CidrBlock) XXX_Size() int { + return xxx_messageInfo_MasterAuthorizedNetworksConfig_CidrBlock.Size(m) +} +func (m *MasterAuthorizedNetworksConfig_CidrBlock) XXX_DiscardUnknown() { + xxx_messageInfo_MasterAuthorizedNetworksConfig_CidrBlock.DiscardUnknown(m) +} + +var xxx_messageInfo_MasterAuthorizedNetworksConfig_CidrBlock proto.InternalMessageInfo + +func (m *MasterAuthorizedNetworksConfig_CidrBlock) GetDisplayName() string { + if m != nil { + return m.DisplayName + } + return "" +} + +func (m *MasterAuthorizedNetworksConfig_CidrBlock) GetCidrBlock() string { + if m != nil { + return m.CidrBlock + } + return "" +} + +// Configuration for the legacy Attribute Based Access Control authorization +// mode. +type LegacyAbac struct { + // Whether the ABAC authorizer is enabled for this cluster. When enabled, + // identities in the system, including service accounts, nodes, and + // controllers, will have statically granted permissions beyond those + // provided by the RBAC configuration or IAM. + Enabled bool `protobuf:"varint,1,opt,name=enabled,proto3" json:"enabled,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *LegacyAbac) Reset() { *m = LegacyAbac{} } +func (m *LegacyAbac) String() string { return proto.CompactTextString(m) } +func (*LegacyAbac) ProtoMessage() {} +func (*LegacyAbac) Descriptor() ([]byte, []int) { + return fileDescriptor_cluster_service_0391086e6c7dadfe, []int{10} +} +func (m *LegacyAbac) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_LegacyAbac.Unmarshal(m, b) +} +func (m *LegacyAbac) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_LegacyAbac.Marshal(b, m, deterministic) +} +func (dst *LegacyAbac) XXX_Merge(src proto.Message) { + xxx_messageInfo_LegacyAbac.Merge(dst, src) +} +func (m *LegacyAbac) XXX_Size() int { + return xxx_messageInfo_LegacyAbac.Size(m) +} +func (m *LegacyAbac) XXX_DiscardUnknown() { + xxx_messageInfo_LegacyAbac.DiscardUnknown(m) +} + +var xxx_messageInfo_LegacyAbac proto.InternalMessageInfo + +func (m *LegacyAbac) GetEnabled() bool { + if m != nil { + return m.Enabled + } + return false +} + +// Configuration options for the NetworkPolicy feature. +// https://kubernetes.io/docs/concepts/services-networking/networkpolicies/ +type NetworkPolicy struct { + // The selected network policy provider. + Provider NetworkPolicy_Provider `protobuf:"varint,1,opt,name=provider,proto3,enum=google.container.v1.NetworkPolicy_Provider" json:"provider,omitempty"` + // Whether network policy is enabled on the cluster. + Enabled bool `protobuf:"varint,2,opt,name=enabled,proto3" json:"enabled,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *NetworkPolicy) Reset() { *m = NetworkPolicy{} } +func (m *NetworkPolicy) String() string { return proto.CompactTextString(m) } +func (*NetworkPolicy) ProtoMessage() {} +func (*NetworkPolicy) Descriptor() ([]byte, []int) { + return fileDescriptor_cluster_service_0391086e6c7dadfe, []int{11} +} +func (m *NetworkPolicy) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_NetworkPolicy.Unmarshal(m, b) +} +func (m *NetworkPolicy) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_NetworkPolicy.Marshal(b, m, deterministic) +} +func (dst *NetworkPolicy) XXX_Merge(src proto.Message) { + xxx_messageInfo_NetworkPolicy.Merge(dst, src) +} +func (m *NetworkPolicy) XXX_Size() int { + return xxx_messageInfo_NetworkPolicy.Size(m) +} +func (m *NetworkPolicy) XXX_DiscardUnknown() { + xxx_messageInfo_NetworkPolicy.DiscardUnknown(m) +} + +var xxx_messageInfo_NetworkPolicy proto.InternalMessageInfo + +func (m *NetworkPolicy) GetProvider() NetworkPolicy_Provider { + if m != nil { + return m.Provider + } + return NetworkPolicy_PROVIDER_UNSPECIFIED +} + +func (m *NetworkPolicy) GetEnabled() bool { + if m != nil { + return m.Enabled + } + return false +} + +// Configuration for controlling how IPs are allocated in the cluster. +type IPAllocationPolicy struct { + // Whether alias IPs will be used for pod IPs in the cluster. + UseIpAliases bool `protobuf:"varint,1,opt,name=use_ip_aliases,json=useIpAliases,proto3" json:"use_ip_aliases,omitempty"` + // Whether a new subnetwork will be created automatically for the cluster. + // + // This field is only applicable when `use_ip_aliases` is true. + CreateSubnetwork bool `protobuf:"varint,2,opt,name=create_subnetwork,json=createSubnetwork,proto3" json:"create_subnetwork,omitempty"` + // A custom subnetwork name to be used if `create_subnetwork` is true. If + // this field is empty, then an automatic name will be chosen for the new + // subnetwork. + SubnetworkName string `protobuf:"bytes,3,opt,name=subnetwork_name,json=subnetworkName,proto3" json:"subnetwork_name,omitempty"` + // This field is deprecated, use cluster_ipv4_cidr_block. + ClusterIpv4Cidr string `protobuf:"bytes,4,opt,name=cluster_ipv4_cidr,json=clusterIpv4Cidr,proto3" json:"cluster_ipv4_cidr,omitempty"` // Deprecated: Do not use. + // This field is deprecated, use node_ipv4_cidr_block. + NodeIpv4Cidr string `protobuf:"bytes,5,opt,name=node_ipv4_cidr,json=nodeIpv4Cidr,proto3" json:"node_ipv4_cidr,omitempty"` // Deprecated: Do not use. + // This field is deprecated, use services_ipv4_cidr_block. + ServicesIpv4Cidr string `protobuf:"bytes,6,opt,name=services_ipv4_cidr,json=servicesIpv4Cidr,proto3" json:"services_ipv4_cidr,omitempty"` // Deprecated: Do not use. + // The name of the secondary range to be used for the cluster CIDR + // block. The secondary range will be used for pod IP + // addresses. This must be an existing secondary range associated + // with the cluster subnetwork. + // + // This field is only applicable with use_ip_aliases is true and + // create_subnetwork is false. + ClusterSecondaryRangeName string `protobuf:"bytes,7,opt,name=cluster_secondary_range_name,json=clusterSecondaryRangeName,proto3" json:"cluster_secondary_range_name,omitempty"` + // The name of the secondary range to be used as for the services + // CIDR block. The secondary range will be used for service + // ClusterIPs. This must be an existing secondary range associated + // with the cluster subnetwork. + // + // This field is only applicable with use_ip_aliases is true and + // create_subnetwork is false. + ServicesSecondaryRangeName string `protobuf:"bytes,8,opt,name=services_secondary_range_name,json=servicesSecondaryRangeName,proto3" json:"services_secondary_range_name,omitempty"` + // The IP address range for the cluster pod IPs. If this field is set, then + // `cluster.cluster_ipv4_cidr` must be left blank. + // + // This field is only applicable when `use_ip_aliases` is true. + // + // Set to blank to have a range chosen with the default size. + // + // Set to /netmask (e.g. `/14`) to have a range chosen with a specific + // netmask. + // + // Set to a + // [CIDR](http://en.wikipedia.org/wiki/Classless_Inter-Domain_Routing) + // notation (e.g. `10.96.0.0/14`) from the RFC-1918 private networks (e.g. + // `10.0.0.0/8`, `172.16.0.0/12`, `192.168.0.0/16`) to pick a specific range + // to use. + ClusterIpv4CidrBlock string `protobuf:"bytes,9,opt,name=cluster_ipv4_cidr_block,json=clusterIpv4CidrBlock,proto3" json:"cluster_ipv4_cidr_block,omitempty"` + // The IP address range of the instance IPs in this cluster. + // + // This is applicable only if `create_subnetwork` is true. + // + // Set to blank to have a range chosen with the default size. + // + // Set to /netmask (e.g. `/14`) to have a range chosen with a specific + // netmask. + // + // Set to a + // [CIDR](http://en.wikipedia.org/wiki/Classless_Inter-Domain_Routing) + // notation (e.g. `10.96.0.0/14`) from the RFC-1918 private networks (e.g. + // `10.0.0.0/8`, `172.16.0.0/12`, `192.168.0.0/16`) to pick a specific range + // to use. + NodeIpv4CidrBlock string `protobuf:"bytes,10,opt,name=node_ipv4_cidr_block,json=nodeIpv4CidrBlock,proto3" json:"node_ipv4_cidr_block,omitempty"` + // The IP address range of the services IPs in this cluster. If blank, a range + // will be automatically chosen with the default size. + // + // This field is only applicable when `use_ip_aliases` is true. + // + // Set to blank to have a range chosen with the default size. + // + // Set to /netmask (e.g. `/14`) to have a range chosen with a specific + // netmask. + // + // Set to a + // [CIDR](http://en.wikipedia.org/wiki/Classless_Inter-Domain_Routing) + // notation (e.g. `10.96.0.0/14`) from the RFC-1918 private networks (e.g. + // `10.0.0.0/8`, `172.16.0.0/12`, `192.168.0.0/16`) to pick a specific range + // to use. + ServicesIpv4CidrBlock string `protobuf:"bytes,11,opt,name=services_ipv4_cidr_block,json=servicesIpv4CidrBlock,proto3" json:"services_ipv4_cidr_block,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *IPAllocationPolicy) Reset() { *m = IPAllocationPolicy{} } +func (m *IPAllocationPolicy) String() string { return proto.CompactTextString(m) } +func (*IPAllocationPolicy) ProtoMessage() {} +func (*IPAllocationPolicy) Descriptor() ([]byte, []int) { + return fileDescriptor_cluster_service_0391086e6c7dadfe, []int{12} +} +func (m *IPAllocationPolicy) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_IPAllocationPolicy.Unmarshal(m, b) +} +func (m *IPAllocationPolicy) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_IPAllocationPolicy.Marshal(b, m, deterministic) +} +func (dst *IPAllocationPolicy) XXX_Merge(src proto.Message) { + xxx_messageInfo_IPAllocationPolicy.Merge(dst, src) +} +func (m *IPAllocationPolicy) XXX_Size() int { + return xxx_messageInfo_IPAllocationPolicy.Size(m) +} +func (m *IPAllocationPolicy) XXX_DiscardUnknown() { + xxx_messageInfo_IPAllocationPolicy.DiscardUnknown(m) +} + +var xxx_messageInfo_IPAllocationPolicy proto.InternalMessageInfo + +func (m *IPAllocationPolicy) GetUseIpAliases() bool { + if m != nil { + return m.UseIpAliases + } + return false +} + +func (m *IPAllocationPolicy) GetCreateSubnetwork() bool { + if m != nil { + return m.CreateSubnetwork + } + return false +} + +func (m *IPAllocationPolicy) GetSubnetworkName() string { + if m != nil { + return m.SubnetworkName + } + return "" +} + +// Deprecated: Do not use. +func (m *IPAllocationPolicy) GetClusterIpv4Cidr() string { + if m != nil { + return m.ClusterIpv4Cidr + } + return "" +} + +// Deprecated: Do not use. +func (m *IPAllocationPolicy) GetNodeIpv4Cidr() string { + if m != nil { + return m.NodeIpv4Cidr + } + return "" +} + +// Deprecated: Do not use. +func (m *IPAllocationPolicy) GetServicesIpv4Cidr() string { + if m != nil { + return m.ServicesIpv4Cidr + } + return "" +} + +func (m *IPAllocationPolicy) GetClusterSecondaryRangeName() string { + if m != nil { + return m.ClusterSecondaryRangeName + } + return "" +} + +func (m *IPAllocationPolicy) GetServicesSecondaryRangeName() string { + if m != nil { + return m.ServicesSecondaryRangeName + } + return "" +} + +func (m *IPAllocationPolicy) GetClusterIpv4CidrBlock() string { + if m != nil { + return m.ClusterIpv4CidrBlock + } + return "" +} + +func (m *IPAllocationPolicy) GetNodeIpv4CidrBlock() string { + if m != nil { + return m.NodeIpv4CidrBlock + } + return "" +} + +func (m *IPAllocationPolicy) GetServicesIpv4CidrBlock() string { + if m != nil { + return m.ServicesIpv4CidrBlock + } + return "" +} + +// A Google Kubernetes Engine cluster. +type Cluster struct { + // The name of this cluster. The name must be unique within this project + // and zone, and can be up to 40 characters with the following restrictions: + // + // * Lowercase letters, numbers, and hyphens only. + // * Must start with a letter. + // * Must end with a number or a letter. + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // An optional description of this cluster. + Description string `protobuf:"bytes,2,opt,name=description,proto3" json:"description,omitempty"` + // The number of nodes to create in this cluster. You must ensure that your + // Compute Engine resource quota + // is sufficient for this number of instances. You must also have available + // firewall and routes quota. + // For requests, this field should only be used in lieu of a + // "node_pool" object, since this configuration (along with the + // "node_config") will be used to create a "NodePool" object with an + // auto-generated name. Do not use this and a node_pool at the same time. + InitialNodeCount int32 `protobuf:"varint,3,opt,name=initial_node_count,json=initialNodeCount,proto3" json:"initial_node_count,omitempty"` + // Parameters used in creating the cluster's nodes. + // See `nodeConfig` for the description of its properties. + // For requests, this field should only be used in lieu of a + // "node_pool" object, since this configuration (along with the + // "initial_node_count") will be used to create a "NodePool" object with an + // auto-generated name. Do not use this and a node_pool at the same time. + // For responses, this field will be populated with the node configuration of + // the first node pool. + // + // If unspecified, the defaults are used. + NodeConfig *NodeConfig `protobuf:"bytes,4,opt,name=node_config,json=nodeConfig,proto3" json:"node_config,omitempty"` + // The authentication information for accessing the master endpoint. + MasterAuth *MasterAuth `protobuf:"bytes,5,opt,name=master_auth,json=masterAuth,proto3" json:"master_auth,omitempty"` + // The logging service the cluster should use to write logs. + // Currently available options: + // + // * `logging.googleapis.com` - the Google Cloud Logging service. + // * `none` - no logs will be exported from the cluster. + // * if left as an empty string,`logging.googleapis.com` will be used. + LoggingService string `protobuf:"bytes,6,opt,name=logging_service,json=loggingService,proto3" json:"logging_service,omitempty"` + // The monitoring service the cluster should use to write metrics. + // Currently available options: + // + // * `monitoring.googleapis.com` - the Google Cloud Monitoring service. + // * `none` - no metrics will be exported from the cluster. + // * if left as an empty string, `monitoring.googleapis.com` will be used. + MonitoringService string `protobuf:"bytes,7,opt,name=monitoring_service,json=monitoringService,proto3" json:"monitoring_service,omitempty"` + // The name of the Google Compute Engine + // [network](/compute/docs/networks-and-firewalls#networks) to which the + // cluster is connected. If left unspecified, the `default` network + // will be used. + Network string `protobuf:"bytes,8,opt,name=network,proto3" json:"network,omitempty"` + // The IP address range of the container pods in this cluster, in + // [CIDR](http://en.wikipedia.org/wiki/Classless_Inter-Domain_Routing) + // notation (e.g. `10.96.0.0/14`). Leave blank to have + // one automatically chosen or specify a `/14` block in `10.0.0.0/8`. + ClusterIpv4Cidr string `protobuf:"bytes,9,opt,name=cluster_ipv4_cidr,json=clusterIpv4Cidr,proto3" json:"cluster_ipv4_cidr,omitempty"` + // Configurations for the various addons available to run in the cluster. + AddonsConfig *AddonsConfig `protobuf:"bytes,10,opt,name=addons_config,json=addonsConfig,proto3" json:"addons_config,omitempty"` + // The name of the Google Compute Engine + // [subnetwork](/compute/docs/subnetworks) to which the + // cluster is connected. + Subnetwork string `protobuf:"bytes,11,opt,name=subnetwork,proto3" json:"subnetwork,omitempty"` + // The node pools associated with this cluster. + // This field should not be set if "node_config" or "initial_node_count" are + // specified. + NodePools []*NodePool `protobuf:"bytes,12,rep,name=node_pools,json=nodePools,proto3" json:"node_pools,omitempty"` + // The list of Google Compute Engine + // [locations](/compute/docs/zones#available) in which the cluster's nodes + // should be located. + Locations []string `protobuf:"bytes,13,rep,name=locations,proto3" json:"locations,omitempty"` + // Kubernetes alpha features are enabled on this cluster. This includes alpha + // API groups (e.g. v1alpha1) and features that may not be production ready in + // the kubernetes version of the master and nodes. + // The cluster has no SLA for uptime and master/node upgrades are disabled. + // Alpha enabled clusters are automatically deleted thirty days after + // creation. + EnableKubernetesAlpha bool `protobuf:"varint,14,opt,name=enable_kubernetes_alpha,json=enableKubernetesAlpha,proto3" json:"enable_kubernetes_alpha,omitempty"` + // The resource labels for the cluster to use to annotate any related + // Google Compute Engine resources. + ResourceLabels map[string]string `protobuf:"bytes,15,rep,name=resource_labels,json=resourceLabels,proto3" json:"resource_labels,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` + // The fingerprint of the set of labels for this cluster. + LabelFingerprint string `protobuf:"bytes,16,opt,name=label_fingerprint,json=labelFingerprint,proto3" json:"label_fingerprint,omitempty"` + // Configuration for the legacy ABAC authorization mode. + LegacyAbac *LegacyAbac `protobuf:"bytes,18,opt,name=legacy_abac,json=legacyAbac,proto3" json:"legacy_abac,omitempty"` + // Configuration options for the NetworkPolicy feature. + NetworkPolicy *NetworkPolicy `protobuf:"bytes,19,opt,name=network_policy,json=networkPolicy,proto3" json:"network_policy,omitempty"` + // Configuration for cluster IP allocation. + IpAllocationPolicy *IPAllocationPolicy `protobuf:"bytes,20,opt,name=ip_allocation_policy,json=ipAllocationPolicy,proto3" json:"ip_allocation_policy,omitempty"` + // The configuration options for master authorized networks feature. + MasterAuthorizedNetworksConfig *MasterAuthorizedNetworksConfig `protobuf:"bytes,22,opt,name=master_authorized_networks_config,json=masterAuthorizedNetworksConfig,proto3" json:"master_authorized_networks_config,omitempty"` + // Configure the maintenance policy for this cluster. + MaintenancePolicy *MaintenancePolicy `protobuf:"bytes,23,opt,name=maintenance_policy,json=maintenancePolicy,proto3" json:"maintenance_policy,omitempty"` + // Configuration for cluster networking. + NetworkConfig *NetworkConfig `protobuf:"bytes,27,opt,name=network_config,json=networkConfig,proto3" json:"network_config,omitempty"` + // Configuration for private cluster. + PrivateClusterConfig *PrivateClusterConfig `protobuf:"bytes,37,opt,name=private_cluster_config,json=privateClusterConfig,proto3" json:"private_cluster_config,omitempty"` + // [Output only] Server-defined URL for the resource. + SelfLink string `protobuf:"bytes,100,opt,name=self_link,json=selfLink,proto3" json:"self_link,omitempty"` + // [Output only] The name of the Google Compute Engine + // [zone](/compute/docs/zones#available) in which the cluster + // resides. + // This field is deprecated, use location instead. + Zone string `protobuf:"bytes,101,opt,name=zone,proto3" json:"zone,omitempty"` // Deprecated: Do not use. + // [Output only] The IP address of this cluster's master endpoint. + // The endpoint can be accessed from the internet at + // `https://username:password@endpoint/`. + // + // See the `masterAuth` property of this resource for username and + // password information. + Endpoint string `protobuf:"bytes,102,opt,name=endpoint,proto3" json:"endpoint,omitempty"` + // The initial Kubernetes version for this cluster. Valid versions are those + // found in validMasterVersions returned by getServerConfig. The version can + // be upgraded over time; such upgrades are reflected in + // currentMasterVersion and currentNodeVersion. + // + // Users may specify either explicit versions offered by + // Kubernetes Engine or version aliases, which have the following behavior: + // + // - "latest": picks the highest valid Kubernetes version + // - "1.X": picks the highest valid patch+gke.N patch in the 1.X version + // - "1.X.Y": picks the highest valid gke.N patch in the 1.X.Y version + // - "1.X.Y-gke.N": picks an explicit Kubernetes version + // - "","-": picks the default Kubernetes version + InitialClusterVersion string `protobuf:"bytes,103,opt,name=initial_cluster_version,json=initialClusterVersion,proto3" json:"initial_cluster_version,omitempty"` + // [Output only] The current software version of the master endpoint. + CurrentMasterVersion string `protobuf:"bytes,104,opt,name=current_master_version,json=currentMasterVersion,proto3" json:"current_master_version,omitempty"` + // [Output only] Deprecated, use + // [NodePool.version](/kubernetes-engine/docs/reference/rest/v1/projects.zones.clusters.nodePool) + // instead. The current version of the node software components. If they are + // currently at multiple versions because they're in the process of being + // upgraded, this reflects the minimum version of all nodes. + CurrentNodeVersion string `protobuf:"bytes,105,opt,name=current_node_version,json=currentNodeVersion,proto3" json:"current_node_version,omitempty"` // Deprecated: Do not use. + // [Output only] The time the cluster was created, in + // [RFC3339](https://www.ietf.org/rfc/rfc3339.txt) text format. + CreateTime string `protobuf:"bytes,106,opt,name=create_time,json=createTime,proto3" json:"create_time,omitempty"` + // [Output only] The current status of this cluster. + Status Cluster_Status `protobuf:"varint,107,opt,name=status,proto3,enum=google.container.v1.Cluster_Status" json:"status,omitempty"` + // [Output only] Additional information about the current status of this + // cluster, if available. + StatusMessage string `protobuf:"bytes,108,opt,name=status_message,json=statusMessage,proto3" json:"status_message,omitempty"` + // [Output only] The size of the address space on each node for hosting + // containers. This is provisioned from within the `container_ipv4_cidr` + // range. + NodeIpv4CidrSize int32 `protobuf:"varint,109,opt,name=node_ipv4_cidr_size,json=nodeIpv4CidrSize,proto3" json:"node_ipv4_cidr_size,omitempty"` + // [Output only] The IP address range of the Kubernetes services in + // this cluster, in + // [CIDR](http://en.wikipedia.org/wiki/Classless_Inter-Domain_Routing) + // notation (e.g. `1.2.3.4/29`). Service addresses are + // typically put in the last `/16` from the container CIDR. + ServicesIpv4Cidr string `protobuf:"bytes,110,opt,name=services_ipv4_cidr,json=servicesIpv4Cidr,proto3" json:"services_ipv4_cidr,omitempty"` + // Deprecated. Use node_pools.instance_group_urls. + InstanceGroupUrls []string `protobuf:"bytes,111,rep,name=instance_group_urls,json=instanceGroupUrls,proto3" json:"instance_group_urls,omitempty"` // Deprecated: Do not use. + // [Output only] The number of nodes currently in the cluster. + CurrentNodeCount int32 `protobuf:"varint,112,opt,name=current_node_count,json=currentNodeCount,proto3" json:"current_node_count,omitempty"` + // [Output only] The time the cluster will be automatically + // deleted in [RFC3339](https://www.ietf.org/rfc/rfc3339.txt) text format. + ExpireTime string `protobuf:"bytes,113,opt,name=expire_time,json=expireTime,proto3" json:"expire_time,omitempty"` + // [Output only] The name of the Google Compute Engine + // [zone](/compute/docs/regions-zones/regions-zones#available) or + // [region](/compute/docs/regions-zones/regions-zones#available) in which + // the cluster resides. + Location string `protobuf:"bytes,114,opt,name=location,proto3" json:"location,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Cluster) Reset() { *m = Cluster{} } +func (m *Cluster) String() string { return proto.CompactTextString(m) } +func (*Cluster) ProtoMessage() {} +func (*Cluster) Descriptor() ([]byte, []int) { + return fileDescriptor_cluster_service_0391086e6c7dadfe, []int{13} +} +func (m *Cluster) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Cluster.Unmarshal(m, b) +} +func (m *Cluster) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Cluster.Marshal(b, m, deterministic) +} +func (dst *Cluster) XXX_Merge(src proto.Message) { + xxx_messageInfo_Cluster.Merge(dst, src) +} +func (m *Cluster) XXX_Size() int { + return xxx_messageInfo_Cluster.Size(m) +} +func (m *Cluster) XXX_DiscardUnknown() { + xxx_messageInfo_Cluster.DiscardUnknown(m) +} + +var xxx_messageInfo_Cluster proto.InternalMessageInfo + +func (m *Cluster) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *Cluster) GetDescription() string { + if m != nil { + return m.Description + } + return "" +} + +func (m *Cluster) GetInitialNodeCount() int32 { + if m != nil { + return m.InitialNodeCount + } + return 0 +} + +func (m *Cluster) GetNodeConfig() *NodeConfig { + if m != nil { + return m.NodeConfig + } + return nil +} + +func (m *Cluster) GetMasterAuth() *MasterAuth { + if m != nil { + return m.MasterAuth + } + return nil +} + +func (m *Cluster) GetLoggingService() string { + if m != nil { + return m.LoggingService + } + return "" +} + +func (m *Cluster) GetMonitoringService() string { + if m != nil { + return m.MonitoringService + } + return "" +} + +func (m *Cluster) GetNetwork() string { + if m != nil { + return m.Network + } + return "" +} + +func (m *Cluster) GetClusterIpv4Cidr() string { + if m != nil { + return m.ClusterIpv4Cidr + } + return "" +} + +func (m *Cluster) GetAddonsConfig() *AddonsConfig { + if m != nil { + return m.AddonsConfig + } + return nil +} + +func (m *Cluster) GetSubnetwork() string { + if m != nil { + return m.Subnetwork + } + return "" +} + +func (m *Cluster) GetNodePools() []*NodePool { + if m != nil { + return m.NodePools + } + return nil +} + +func (m *Cluster) GetLocations() []string { + if m != nil { + return m.Locations + } + return nil +} + +func (m *Cluster) GetEnableKubernetesAlpha() bool { + if m != nil { + return m.EnableKubernetesAlpha + } + return false +} + +func (m *Cluster) GetResourceLabels() map[string]string { + if m != nil { + return m.ResourceLabels + } + return nil +} + +func (m *Cluster) GetLabelFingerprint() string { + if m != nil { + return m.LabelFingerprint + } + return "" +} + +func (m *Cluster) GetLegacyAbac() *LegacyAbac { + if m != nil { + return m.LegacyAbac + } + return nil +} + +func (m *Cluster) GetNetworkPolicy() *NetworkPolicy { + if m != nil { + return m.NetworkPolicy + } + return nil +} + +func (m *Cluster) GetIpAllocationPolicy() *IPAllocationPolicy { + if m != nil { + return m.IpAllocationPolicy + } + return nil +} + +func (m *Cluster) GetMasterAuthorizedNetworksConfig() *MasterAuthorizedNetworksConfig { + if m != nil { + return m.MasterAuthorizedNetworksConfig + } + return nil +} + +func (m *Cluster) GetMaintenancePolicy() *MaintenancePolicy { + if m != nil { + return m.MaintenancePolicy + } + return nil +} + +func (m *Cluster) GetNetworkConfig() *NetworkConfig { + if m != nil { + return m.NetworkConfig + } + return nil +} + +func (m *Cluster) GetPrivateClusterConfig() *PrivateClusterConfig { + if m != nil { + return m.PrivateClusterConfig + } + return nil +} + +func (m *Cluster) GetSelfLink() string { + if m != nil { + return m.SelfLink + } + return "" +} + +// Deprecated: Do not use. +func (m *Cluster) GetZone() string { + if m != nil { + return m.Zone + } + return "" +} + +func (m *Cluster) GetEndpoint() string { + if m != nil { + return m.Endpoint + } + return "" +} + +func (m *Cluster) GetInitialClusterVersion() string { + if m != nil { + return m.InitialClusterVersion + } + return "" +} + +func (m *Cluster) GetCurrentMasterVersion() string { + if m != nil { + return m.CurrentMasterVersion + } + return "" +} + +// Deprecated: Do not use. +func (m *Cluster) GetCurrentNodeVersion() string { + if m != nil { + return m.CurrentNodeVersion + } + return "" +} + +func (m *Cluster) GetCreateTime() string { + if m != nil { + return m.CreateTime + } + return "" +} + +func (m *Cluster) GetStatus() Cluster_Status { + if m != nil { + return m.Status + } + return Cluster_STATUS_UNSPECIFIED +} + +func (m *Cluster) GetStatusMessage() string { + if m != nil { + return m.StatusMessage + } + return "" +} + +func (m *Cluster) GetNodeIpv4CidrSize() int32 { + if m != nil { + return m.NodeIpv4CidrSize + } + return 0 +} + +func (m *Cluster) GetServicesIpv4Cidr() string { + if m != nil { + return m.ServicesIpv4Cidr + } + return "" +} + +// Deprecated: Do not use. +func (m *Cluster) GetInstanceGroupUrls() []string { + if m != nil { + return m.InstanceGroupUrls + } + return nil +} + +func (m *Cluster) GetCurrentNodeCount() int32 { + if m != nil { + return m.CurrentNodeCount + } + return 0 +} + +func (m *Cluster) GetExpireTime() string { + if m != nil { + return m.ExpireTime + } + return "" +} + +func (m *Cluster) GetLocation() string { + if m != nil { + return m.Location + } + return "" +} + +// ClusterUpdate describes an update to the cluster. Exactly one update can +// be applied to a cluster with each request, so at most one field can be +// provided. +type ClusterUpdate struct { + // The Kubernetes version to change the nodes to (typically an + // upgrade). + // + // Users may specify either explicit versions offered by + // Kubernetes Engine or version aliases, which have the following behavior: + // + // - "latest": picks the highest valid Kubernetes version + // - "1.X": picks the highest valid patch+gke.N patch in the 1.X version + // - "1.X.Y": picks the highest valid gke.N patch in the 1.X.Y version + // - "1.X.Y-gke.N": picks an explicit Kubernetes version + // - "-": picks the Kubernetes master version + DesiredNodeVersion string `protobuf:"bytes,4,opt,name=desired_node_version,json=desiredNodeVersion,proto3" json:"desired_node_version,omitempty"` + // The monitoring service the cluster should use to write metrics. + // Currently available options: + // + // * "monitoring.googleapis.com" - the Google Cloud Monitoring service + // * "none" - no metrics will be exported from the cluster + DesiredMonitoringService string `protobuf:"bytes,5,opt,name=desired_monitoring_service,json=desiredMonitoringService,proto3" json:"desired_monitoring_service,omitempty"` + // Configurations for the various addons available to run in the cluster. + DesiredAddonsConfig *AddonsConfig `protobuf:"bytes,6,opt,name=desired_addons_config,json=desiredAddonsConfig,proto3" json:"desired_addons_config,omitempty"` + // The node pool to be upgraded. This field is mandatory if + // "desired_node_version", "desired_image_family" or + // "desired_node_pool_autoscaling" is specified and there is more than one + // node pool on the cluster. + DesiredNodePoolId string `protobuf:"bytes,7,opt,name=desired_node_pool_id,json=desiredNodePoolId,proto3" json:"desired_node_pool_id,omitempty"` + // The desired image type for the node pool. + // NOTE: Set the "desired_node_pool" field as well. + DesiredImageType string `protobuf:"bytes,8,opt,name=desired_image_type,json=desiredImageType,proto3" json:"desired_image_type,omitempty"` + // Autoscaler configuration for the node pool specified in + // desired_node_pool_id. If there is only one pool in the + // cluster and desired_node_pool_id is not provided then + // the change applies to that single node pool. + DesiredNodePoolAutoscaling *NodePoolAutoscaling `protobuf:"bytes,9,opt,name=desired_node_pool_autoscaling,json=desiredNodePoolAutoscaling,proto3" json:"desired_node_pool_autoscaling,omitempty"` + // The desired list of Google Compute Engine + // [locations](/compute/docs/zones#available) in which the cluster's nodes + // should be located. Changing the locations a cluster is in will result + // in nodes being either created or removed from the cluster, depending on + // whether locations are being added or removed. + // + // This list must always include the cluster's primary zone. + DesiredLocations []string `protobuf:"bytes,10,rep,name=desired_locations,json=desiredLocations,proto3" json:"desired_locations,omitempty"` + // The desired configuration options for master authorized networks feature. + DesiredMasterAuthorizedNetworksConfig *MasterAuthorizedNetworksConfig `protobuf:"bytes,12,opt,name=desired_master_authorized_networks_config,json=desiredMasterAuthorizedNetworksConfig,proto3" json:"desired_master_authorized_networks_config,omitempty"` + // The Kubernetes version to change the master to. + // + // Users may specify either explicit versions offered by + // Kubernetes Engine or version aliases, which have the following behavior: + // + // - "latest": picks the highest valid Kubernetes version + // - "1.X": picks the highest valid patch+gke.N patch in the 1.X version + // - "1.X.Y": picks the highest valid gke.N patch in the 1.X.Y version + // - "1.X.Y-gke.N": picks an explicit Kubernetes version + // - "-": picks the default Kubernetes version + DesiredMasterVersion string `protobuf:"bytes,100,opt,name=desired_master_version,json=desiredMasterVersion,proto3" json:"desired_master_version,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ClusterUpdate) Reset() { *m = ClusterUpdate{} } +func (m *ClusterUpdate) String() string { return proto.CompactTextString(m) } +func (*ClusterUpdate) ProtoMessage() {} +func (*ClusterUpdate) Descriptor() ([]byte, []int) { + return fileDescriptor_cluster_service_0391086e6c7dadfe, []int{14} +} +func (m *ClusterUpdate) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ClusterUpdate.Unmarshal(m, b) +} +func (m *ClusterUpdate) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ClusterUpdate.Marshal(b, m, deterministic) +} +func (dst *ClusterUpdate) XXX_Merge(src proto.Message) { + xxx_messageInfo_ClusterUpdate.Merge(dst, src) +} +func (m *ClusterUpdate) XXX_Size() int { + return xxx_messageInfo_ClusterUpdate.Size(m) +} +func (m *ClusterUpdate) XXX_DiscardUnknown() { + xxx_messageInfo_ClusterUpdate.DiscardUnknown(m) +} + +var xxx_messageInfo_ClusterUpdate proto.InternalMessageInfo + +func (m *ClusterUpdate) GetDesiredNodeVersion() string { + if m != nil { + return m.DesiredNodeVersion + } + return "" +} + +func (m *ClusterUpdate) GetDesiredMonitoringService() string { + if m != nil { + return m.DesiredMonitoringService + } + return "" +} + +func (m *ClusterUpdate) GetDesiredAddonsConfig() *AddonsConfig { + if m != nil { + return m.DesiredAddonsConfig + } + return nil +} + +func (m *ClusterUpdate) GetDesiredNodePoolId() string { + if m != nil { + return m.DesiredNodePoolId + } + return "" +} + +func (m *ClusterUpdate) GetDesiredImageType() string { + if m != nil { + return m.DesiredImageType + } + return "" +} + +func (m *ClusterUpdate) GetDesiredNodePoolAutoscaling() *NodePoolAutoscaling { + if m != nil { + return m.DesiredNodePoolAutoscaling + } + return nil +} + +func (m *ClusterUpdate) GetDesiredLocations() []string { + if m != nil { + return m.DesiredLocations + } + return nil +} + +func (m *ClusterUpdate) GetDesiredMasterAuthorizedNetworksConfig() *MasterAuthorizedNetworksConfig { + if m != nil { + return m.DesiredMasterAuthorizedNetworksConfig + } + return nil +} + +func (m *ClusterUpdate) GetDesiredMasterVersion() string { + if m != nil { + return m.DesiredMasterVersion + } + return "" +} + +// This operation resource represents operations that may have happened or are +// happening on the cluster. All fields are output only. +type Operation struct { + // The server-assigned ID for the operation. + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // The name of the Google Compute Engine + // [zone](/compute/docs/zones#available) in which the operation + // is taking place. + // This field is deprecated, use location instead. + Zone string `protobuf:"bytes,2,opt,name=zone,proto3" json:"zone,omitempty"` // Deprecated: Do not use. + // The operation type. + OperationType Operation_Type `protobuf:"varint,3,opt,name=operation_type,json=operationType,proto3,enum=google.container.v1.Operation_Type" json:"operation_type,omitempty"` + // The current status of the operation. + Status Operation_Status `protobuf:"varint,4,opt,name=status,proto3,enum=google.container.v1.Operation_Status" json:"status,omitempty"` + // Detailed operation progress, if available. + Detail string `protobuf:"bytes,8,opt,name=detail,proto3" json:"detail,omitempty"` + // If an error has occurred, a textual description of the error. + StatusMessage string `protobuf:"bytes,5,opt,name=status_message,json=statusMessage,proto3" json:"status_message,omitempty"` + // Server-defined URL for the resource. + SelfLink string `protobuf:"bytes,6,opt,name=self_link,json=selfLink,proto3" json:"self_link,omitempty"` + // Server-defined URL for the target of the operation. + TargetLink string `protobuf:"bytes,7,opt,name=target_link,json=targetLink,proto3" json:"target_link,omitempty"` + // [Output only] The name of the Google Compute Engine + // [zone](/compute/docs/regions-zones/regions-zones#available) or + // [region](/compute/docs/regions-zones/regions-zones#available) in which + // the cluster resides. + Location string `protobuf:"bytes,9,opt,name=location,proto3" json:"location,omitempty"` + // [Output only] The time the operation started, in + // [RFC3339](https://www.ietf.org/rfc/rfc3339.txt) text format. + StartTime string `protobuf:"bytes,10,opt,name=start_time,json=startTime,proto3" json:"start_time,omitempty"` + // [Output only] The time the operation completed, in + // [RFC3339](https://www.ietf.org/rfc/rfc3339.txt) text format. + EndTime string `protobuf:"bytes,11,opt,name=end_time,json=endTime,proto3" json:"end_time,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Operation) Reset() { *m = Operation{} } +func (m *Operation) String() string { return proto.CompactTextString(m) } +func (*Operation) ProtoMessage() {} +func (*Operation) Descriptor() ([]byte, []int) { + return fileDescriptor_cluster_service_0391086e6c7dadfe, []int{15} +} +func (m *Operation) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Operation.Unmarshal(m, b) +} +func (m *Operation) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Operation.Marshal(b, m, deterministic) +} +func (dst *Operation) XXX_Merge(src proto.Message) { + xxx_messageInfo_Operation.Merge(dst, src) +} +func (m *Operation) XXX_Size() int { + return xxx_messageInfo_Operation.Size(m) +} +func (m *Operation) XXX_DiscardUnknown() { + xxx_messageInfo_Operation.DiscardUnknown(m) +} + +var xxx_messageInfo_Operation proto.InternalMessageInfo + +func (m *Operation) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +// Deprecated: Do not use. +func (m *Operation) GetZone() string { + if m != nil { + return m.Zone + } + return "" +} + +func (m *Operation) GetOperationType() Operation_Type { + if m != nil { + return m.OperationType + } + return Operation_TYPE_UNSPECIFIED +} + +func (m *Operation) GetStatus() Operation_Status { + if m != nil { + return m.Status + } + return Operation_STATUS_UNSPECIFIED +} + +func (m *Operation) GetDetail() string { + if m != nil { + return m.Detail + } + return "" +} + +func (m *Operation) GetStatusMessage() string { + if m != nil { + return m.StatusMessage + } + return "" +} + +func (m *Operation) GetSelfLink() string { + if m != nil { + return m.SelfLink + } + return "" +} + +func (m *Operation) GetTargetLink() string { + if m != nil { + return m.TargetLink + } + return "" +} + +func (m *Operation) GetLocation() string { + if m != nil { + return m.Location + } + return "" +} + +func (m *Operation) GetStartTime() string { + if m != nil { + return m.StartTime + } + return "" +} + +func (m *Operation) GetEndTime() string { + if m != nil { + return m.EndTime + } + return "" +} + +// CreateClusterRequest creates a cluster. +type CreateClusterRequest struct { + // Deprecated. The Google Developers Console [project ID or project + // number](https://support.google.com/cloud/answer/6158840). + // This field has been deprecated and replaced by the parent field. + ProjectId string `protobuf:"bytes,1,opt,name=project_id,json=projectId,proto3" json:"project_id,omitempty"` // Deprecated: Do not use. + // Deprecated. The name of the Google Compute Engine + // [zone](/compute/docs/zones#available) in which the cluster + // resides. + // This field has been deprecated and replaced by the parent field. + Zone string `protobuf:"bytes,2,opt,name=zone,proto3" json:"zone,omitempty"` // Deprecated: Do not use. + // A [cluster + // resource](/container-engine/reference/rest/v1/projects.zones.clusters) + Cluster *Cluster `protobuf:"bytes,3,opt,name=cluster,proto3" json:"cluster,omitempty"` + // The parent (project and location) where the cluster will be created. + // Specified in the format 'projects/*/locations/*'. + Parent string `protobuf:"bytes,5,opt,name=parent,proto3" json:"parent,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *CreateClusterRequest) Reset() { *m = CreateClusterRequest{} } +func (m *CreateClusterRequest) String() string { return proto.CompactTextString(m) } +func (*CreateClusterRequest) ProtoMessage() {} +func (*CreateClusterRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_cluster_service_0391086e6c7dadfe, []int{16} +} +func (m *CreateClusterRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_CreateClusterRequest.Unmarshal(m, b) +} +func (m *CreateClusterRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_CreateClusterRequest.Marshal(b, m, deterministic) +} +func (dst *CreateClusterRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_CreateClusterRequest.Merge(dst, src) +} +func (m *CreateClusterRequest) XXX_Size() int { + return xxx_messageInfo_CreateClusterRequest.Size(m) +} +func (m *CreateClusterRequest) XXX_DiscardUnknown() { + xxx_messageInfo_CreateClusterRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_CreateClusterRequest proto.InternalMessageInfo + +// Deprecated: Do not use. +func (m *CreateClusterRequest) GetProjectId() string { + if m != nil { + return m.ProjectId + } + return "" +} + +// Deprecated: Do not use. +func (m *CreateClusterRequest) GetZone() string { + if m != nil { + return m.Zone + } + return "" +} + +func (m *CreateClusterRequest) GetCluster() *Cluster { + if m != nil { + return m.Cluster + } + return nil +} + +func (m *CreateClusterRequest) GetParent() string { + if m != nil { + return m.Parent + } + return "" +} + +// GetClusterRequest gets the settings of a cluster. +type GetClusterRequest struct { + // Deprecated. The Google Developers Console [project ID or project + // number](https://support.google.com/cloud/answer/6158840). + // This field has been deprecated and replaced by the name field. + ProjectId string `protobuf:"bytes,1,opt,name=project_id,json=projectId,proto3" json:"project_id,omitempty"` // Deprecated: Do not use. + // Deprecated. The name of the Google Compute Engine + // [zone](/compute/docs/zones#available) in which the cluster + // resides. + // This field has been deprecated and replaced by the name field. + Zone string `protobuf:"bytes,2,opt,name=zone,proto3" json:"zone,omitempty"` // Deprecated: Do not use. + // Deprecated. The name of the cluster to retrieve. + // This field has been deprecated and replaced by the name field. + ClusterId string `protobuf:"bytes,3,opt,name=cluster_id,json=clusterId,proto3" json:"cluster_id,omitempty"` // Deprecated: Do not use. + // The name (project, location, cluster) of the cluster to retrieve. + // Specified in the format 'projects/*/locations/*/clusters/*'. + Name string `protobuf:"bytes,5,opt,name=name,proto3" json:"name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GetClusterRequest) Reset() { *m = GetClusterRequest{} } +func (m *GetClusterRequest) String() string { return proto.CompactTextString(m) } +func (*GetClusterRequest) ProtoMessage() {} +func (*GetClusterRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_cluster_service_0391086e6c7dadfe, []int{17} +} +func (m *GetClusterRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GetClusterRequest.Unmarshal(m, b) +} +func (m *GetClusterRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GetClusterRequest.Marshal(b, m, deterministic) +} +func (dst *GetClusterRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_GetClusterRequest.Merge(dst, src) +} +func (m *GetClusterRequest) XXX_Size() int { + return xxx_messageInfo_GetClusterRequest.Size(m) +} +func (m *GetClusterRequest) XXX_DiscardUnknown() { + xxx_messageInfo_GetClusterRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_GetClusterRequest proto.InternalMessageInfo + +// Deprecated: Do not use. +func (m *GetClusterRequest) GetProjectId() string { + if m != nil { + return m.ProjectId + } + return "" +} + +// Deprecated: Do not use. +func (m *GetClusterRequest) GetZone() string { + if m != nil { + return m.Zone + } + return "" +} + +// Deprecated: Do not use. +func (m *GetClusterRequest) GetClusterId() string { + if m != nil { + return m.ClusterId + } + return "" +} + +func (m *GetClusterRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +// UpdateClusterRequest updates the settings of a cluster. +type UpdateClusterRequest struct { + // Deprecated. The Google Developers Console [project ID or project + // number](https://support.google.com/cloud/answer/6158840). + // This field has been deprecated and replaced by the name field. + ProjectId string `protobuf:"bytes,1,opt,name=project_id,json=projectId,proto3" json:"project_id,omitempty"` // Deprecated: Do not use. + // Deprecated. The name of the Google Compute Engine + // [zone](/compute/docs/zones#available) in which the cluster + // resides. + // This field has been deprecated and replaced by the name field. + Zone string `protobuf:"bytes,2,opt,name=zone,proto3" json:"zone,omitempty"` // Deprecated: Do not use. + // Deprecated. The name of the cluster to upgrade. + // This field has been deprecated and replaced by the name field. + ClusterId string `protobuf:"bytes,3,opt,name=cluster_id,json=clusterId,proto3" json:"cluster_id,omitempty"` // Deprecated: Do not use. + // A description of the update. + Update *ClusterUpdate `protobuf:"bytes,4,opt,name=update,proto3" json:"update,omitempty"` + // The name (project, location, cluster) of the cluster to update. + // Specified in the format 'projects/*/locations/*/clusters/*'. + Name string `protobuf:"bytes,5,opt,name=name,proto3" json:"name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *UpdateClusterRequest) Reset() { *m = UpdateClusterRequest{} } +func (m *UpdateClusterRequest) String() string { return proto.CompactTextString(m) } +func (*UpdateClusterRequest) ProtoMessage() {} +func (*UpdateClusterRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_cluster_service_0391086e6c7dadfe, []int{18} +} +func (m *UpdateClusterRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_UpdateClusterRequest.Unmarshal(m, b) +} +func (m *UpdateClusterRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_UpdateClusterRequest.Marshal(b, m, deterministic) +} +func (dst *UpdateClusterRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_UpdateClusterRequest.Merge(dst, src) +} +func (m *UpdateClusterRequest) XXX_Size() int { + return xxx_messageInfo_UpdateClusterRequest.Size(m) +} +func (m *UpdateClusterRequest) XXX_DiscardUnknown() { + xxx_messageInfo_UpdateClusterRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_UpdateClusterRequest proto.InternalMessageInfo + +// Deprecated: Do not use. +func (m *UpdateClusterRequest) GetProjectId() string { + if m != nil { + return m.ProjectId + } + return "" +} + +// Deprecated: Do not use. +func (m *UpdateClusterRequest) GetZone() string { + if m != nil { + return m.Zone + } + return "" +} + +// Deprecated: Do not use. +func (m *UpdateClusterRequest) GetClusterId() string { + if m != nil { + return m.ClusterId + } + return "" +} + +func (m *UpdateClusterRequest) GetUpdate() *ClusterUpdate { + if m != nil { + return m.Update + } + return nil +} + +func (m *UpdateClusterRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +// UpdateNodePoolRequests update a node pool's image and/or version. +type UpdateNodePoolRequest struct { + // Deprecated. The Google Developers Console [project ID or project + // number](https://support.google.com/cloud/answer/6158840). + // This field has been deprecated and replaced by the name field. + ProjectId string `protobuf:"bytes,1,opt,name=project_id,json=projectId,proto3" json:"project_id,omitempty"` // Deprecated: Do not use. + // Deprecated. The name of the Google Compute Engine + // [zone](/compute/docs/zones#available) in which the cluster + // resides. + // This field has been deprecated and replaced by the name field. + Zone string `protobuf:"bytes,2,opt,name=zone,proto3" json:"zone,omitempty"` // Deprecated: Do not use. + // Deprecated. The name of the cluster to upgrade. + // This field has been deprecated and replaced by the name field. + ClusterId string `protobuf:"bytes,3,opt,name=cluster_id,json=clusterId,proto3" json:"cluster_id,omitempty"` // Deprecated: Do not use. + // Deprecated. The name of the node pool to upgrade. + // This field has been deprecated and replaced by the name field. + NodePoolId string `protobuf:"bytes,4,opt,name=node_pool_id,json=nodePoolId,proto3" json:"node_pool_id,omitempty"` // Deprecated: Do not use. + // The Kubernetes version to change the nodes to (typically an + // upgrade). + // + // Users may specify either explicit versions offered by Kubernetes Engine or + // version aliases, which have the following behavior: + // + // - "latest": picks the highest valid Kubernetes version + // - "1.X": picks the highest valid patch+gke.N patch in the 1.X version + // - "1.X.Y": picks the highest valid gke.N patch in the 1.X.Y version + // - "1.X.Y-gke.N": picks an explicit Kubernetes version + // - "-": picks the Kubernetes master version + NodeVersion string `protobuf:"bytes,5,opt,name=node_version,json=nodeVersion,proto3" json:"node_version,omitempty"` + // The desired image type for the node pool. + ImageType string `protobuf:"bytes,6,opt,name=image_type,json=imageType,proto3" json:"image_type,omitempty"` + // The name (project, location, cluster, node pool) of the node pool to + // update. Specified in the format + // 'projects/*/locations/*/clusters/*/nodePools/*'. + Name string `protobuf:"bytes,8,opt,name=name,proto3" json:"name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *UpdateNodePoolRequest) Reset() { *m = UpdateNodePoolRequest{} } +func (m *UpdateNodePoolRequest) String() string { return proto.CompactTextString(m) } +func (*UpdateNodePoolRequest) ProtoMessage() {} +func (*UpdateNodePoolRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_cluster_service_0391086e6c7dadfe, []int{19} +} +func (m *UpdateNodePoolRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_UpdateNodePoolRequest.Unmarshal(m, b) +} +func (m *UpdateNodePoolRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_UpdateNodePoolRequest.Marshal(b, m, deterministic) +} +func (dst *UpdateNodePoolRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_UpdateNodePoolRequest.Merge(dst, src) +} +func (m *UpdateNodePoolRequest) XXX_Size() int { + return xxx_messageInfo_UpdateNodePoolRequest.Size(m) +} +func (m *UpdateNodePoolRequest) XXX_DiscardUnknown() { + xxx_messageInfo_UpdateNodePoolRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_UpdateNodePoolRequest proto.InternalMessageInfo + +// Deprecated: Do not use. +func (m *UpdateNodePoolRequest) GetProjectId() string { + if m != nil { + return m.ProjectId + } + return "" +} + +// Deprecated: Do not use. +func (m *UpdateNodePoolRequest) GetZone() string { + if m != nil { + return m.Zone + } + return "" +} + +// Deprecated: Do not use. +func (m *UpdateNodePoolRequest) GetClusterId() string { + if m != nil { + return m.ClusterId + } + return "" +} + +// Deprecated: Do not use. +func (m *UpdateNodePoolRequest) GetNodePoolId() string { + if m != nil { + return m.NodePoolId + } + return "" +} + +func (m *UpdateNodePoolRequest) GetNodeVersion() string { + if m != nil { + return m.NodeVersion + } + return "" +} + +func (m *UpdateNodePoolRequest) GetImageType() string { + if m != nil { + return m.ImageType + } + return "" +} + +func (m *UpdateNodePoolRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +// SetNodePoolAutoscalingRequest sets the autoscaler settings of a node pool. +type SetNodePoolAutoscalingRequest struct { + // Deprecated. The Google Developers Console [project ID or project + // number](https://support.google.com/cloud/answer/6158840). + // This field has been deprecated and replaced by the name field. + ProjectId string `protobuf:"bytes,1,opt,name=project_id,json=projectId,proto3" json:"project_id,omitempty"` // Deprecated: Do not use. + // Deprecated. The name of the Google Compute Engine + // [zone](/compute/docs/zones#available) in which the cluster + // resides. + // This field has been deprecated and replaced by the name field. + Zone string `protobuf:"bytes,2,opt,name=zone,proto3" json:"zone,omitempty"` // Deprecated: Do not use. + // Deprecated. The name of the cluster to upgrade. + // This field has been deprecated and replaced by the name field. + ClusterId string `protobuf:"bytes,3,opt,name=cluster_id,json=clusterId,proto3" json:"cluster_id,omitempty"` // Deprecated: Do not use. + // Deprecated. The name of the node pool to upgrade. + // This field has been deprecated and replaced by the name field. + NodePoolId string `protobuf:"bytes,4,opt,name=node_pool_id,json=nodePoolId,proto3" json:"node_pool_id,omitempty"` // Deprecated: Do not use. + // Autoscaling configuration for the node pool. + Autoscaling *NodePoolAutoscaling `protobuf:"bytes,5,opt,name=autoscaling,proto3" json:"autoscaling,omitempty"` + // The name (project, location, cluster, node pool) of the node pool to set + // autoscaler settings. Specified in the format + // 'projects/*/locations/*/clusters/*/nodePools/*'. + Name string `protobuf:"bytes,6,opt,name=name,proto3" json:"name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *SetNodePoolAutoscalingRequest) Reset() { *m = SetNodePoolAutoscalingRequest{} } +func (m *SetNodePoolAutoscalingRequest) String() string { return proto.CompactTextString(m) } +func (*SetNodePoolAutoscalingRequest) ProtoMessage() {} +func (*SetNodePoolAutoscalingRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_cluster_service_0391086e6c7dadfe, []int{20} +} +func (m *SetNodePoolAutoscalingRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_SetNodePoolAutoscalingRequest.Unmarshal(m, b) +} +func (m *SetNodePoolAutoscalingRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_SetNodePoolAutoscalingRequest.Marshal(b, m, deterministic) +} +func (dst *SetNodePoolAutoscalingRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_SetNodePoolAutoscalingRequest.Merge(dst, src) +} +func (m *SetNodePoolAutoscalingRequest) XXX_Size() int { + return xxx_messageInfo_SetNodePoolAutoscalingRequest.Size(m) +} +func (m *SetNodePoolAutoscalingRequest) XXX_DiscardUnknown() { + xxx_messageInfo_SetNodePoolAutoscalingRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_SetNodePoolAutoscalingRequest proto.InternalMessageInfo + +// Deprecated: Do not use. +func (m *SetNodePoolAutoscalingRequest) GetProjectId() string { + if m != nil { + return m.ProjectId + } + return "" +} + +// Deprecated: Do not use. +func (m *SetNodePoolAutoscalingRequest) GetZone() string { + if m != nil { + return m.Zone + } + return "" +} + +// Deprecated: Do not use. +func (m *SetNodePoolAutoscalingRequest) GetClusterId() string { + if m != nil { + return m.ClusterId + } + return "" +} + +// Deprecated: Do not use. +func (m *SetNodePoolAutoscalingRequest) GetNodePoolId() string { + if m != nil { + return m.NodePoolId + } + return "" +} + +func (m *SetNodePoolAutoscalingRequest) GetAutoscaling() *NodePoolAutoscaling { + if m != nil { + return m.Autoscaling + } + return nil +} + +func (m *SetNodePoolAutoscalingRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +// SetLoggingServiceRequest sets the logging service of a cluster. +type SetLoggingServiceRequest struct { + // Deprecated. The Google Developers Console [project ID or project + // number](https://support.google.com/cloud/answer/6158840). + // This field has been deprecated and replaced by the name field. + ProjectId string `protobuf:"bytes,1,opt,name=project_id,json=projectId,proto3" json:"project_id,omitempty"` // Deprecated: Do not use. + // Deprecated. The name of the Google Compute Engine + // [zone](/compute/docs/zones#available) in which the cluster + // resides. + // This field has been deprecated and replaced by the name field. + Zone string `protobuf:"bytes,2,opt,name=zone,proto3" json:"zone,omitempty"` + // Deprecated. The name of the cluster to upgrade. + // This field has been deprecated and replaced by the name field. + ClusterId string `protobuf:"bytes,3,opt,name=cluster_id,json=clusterId,proto3" json:"cluster_id,omitempty"` // Deprecated: Do not use. + // The logging service the cluster should use to write metrics. + // Currently available options: + // + // * "logging.googleapis.com" - the Google Cloud Logging service + // * "none" - no metrics will be exported from the cluster + LoggingService string `protobuf:"bytes,4,opt,name=logging_service,json=loggingService,proto3" json:"logging_service,omitempty"` + // The name (project, location, cluster) of the cluster to set logging. + // Specified in the format 'projects/*/locations/*/clusters/*'. + Name string `protobuf:"bytes,5,opt,name=name,proto3" json:"name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *SetLoggingServiceRequest) Reset() { *m = SetLoggingServiceRequest{} } +func (m *SetLoggingServiceRequest) String() string { return proto.CompactTextString(m) } +func (*SetLoggingServiceRequest) ProtoMessage() {} +func (*SetLoggingServiceRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_cluster_service_0391086e6c7dadfe, []int{21} +} +func (m *SetLoggingServiceRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_SetLoggingServiceRequest.Unmarshal(m, b) +} +func (m *SetLoggingServiceRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_SetLoggingServiceRequest.Marshal(b, m, deterministic) +} +func (dst *SetLoggingServiceRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_SetLoggingServiceRequest.Merge(dst, src) +} +func (m *SetLoggingServiceRequest) XXX_Size() int { + return xxx_messageInfo_SetLoggingServiceRequest.Size(m) +} +func (m *SetLoggingServiceRequest) XXX_DiscardUnknown() { + xxx_messageInfo_SetLoggingServiceRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_SetLoggingServiceRequest proto.InternalMessageInfo + +// Deprecated: Do not use. +func (m *SetLoggingServiceRequest) GetProjectId() string { + if m != nil { + return m.ProjectId + } + return "" +} + +func (m *SetLoggingServiceRequest) GetZone() string { + if m != nil { + return m.Zone + } + return "" +} + +// Deprecated: Do not use. +func (m *SetLoggingServiceRequest) GetClusterId() string { + if m != nil { + return m.ClusterId + } + return "" +} + +func (m *SetLoggingServiceRequest) GetLoggingService() string { + if m != nil { + return m.LoggingService + } + return "" +} + +func (m *SetLoggingServiceRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +// SetMonitoringServiceRequest sets the monitoring service of a cluster. +type SetMonitoringServiceRequest struct { + // Deprecated. The Google Developers Console [project ID or project + // number](https://support.google.com/cloud/answer/6158840). + // This field has been deprecated and replaced by the name field. + ProjectId string `protobuf:"bytes,1,opt,name=project_id,json=projectId,proto3" json:"project_id,omitempty"` // Deprecated: Do not use. + // Deprecated. The name of the Google Compute Engine + // [zone](/compute/docs/zones#available) in which the cluster + // resides. + // This field has been deprecated and replaced by the name field. + Zone string `protobuf:"bytes,2,opt,name=zone,proto3" json:"zone,omitempty"` // Deprecated: Do not use. + // Deprecated. The name of the cluster to upgrade. + // This field has been deprecated and replaced by the name field. + ClusterId string `protobuf:"bytes,3,opt,name=cluster_id,json=clusterId,proto3" json:"cluster_id,omitempty"` // Deprecated: Do not use. + // The monitoring service the cluster should use to write metrics. + // Currently available options: + // + // * "monitoring.googleapis.com" - the Google Cloud Monitoring service + // * "none" - no metrics will be exported from the cluster + MonitoringService string `protobuf:"bytes,4,opt,name=monitoring_service,json=monitoringService,proto3" json:"monitoring_service,omitempty"` + // The name (project, location, cluster) of the cluster to set monitoring. + // Specified in the format 'projects/*/locations/*/clusters/*'. + Name string `protobuf:"bytes,6,opt,name=name,proto3" json:"name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *SetMonitoringServiceRequest) Reset() { *m = SetMonitoringServiceRequest{} } +func (m *SetMonitoringServiceRequest) String() string { return proto.CompactTextString(m) } +func (*SetMonitoringServiceRequest) ProtoMessage() {} +func (*SetMonitoringServiceRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_cluster_service_0391086e6c7dadfe, []int{22} +} +func (m *SetMonitoringServiceRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_SetMonitoringServiceRequest.Unmarshal(m, b) +} +func (m *SetMonitoringServiceRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_SetMonitoringServiceRequest.Marshal(b, m, deterministic) +} +func (dst *SetMonitoringServiceRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_SetMonitoringServiceRequest.Merge(dst, src) +} +func (m *SetMonitoringServiceRequest) XXX_Size() int { + return xxx_messageInfo_SetMonitoringServiceRequest.Size(m) +} +func (m *SetMonitoringServiceRequest) XXX_DiscardUnknown() { + xxx_messageInfo_SetMonitoringServiceRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_SetMonitoringServiceRequest proto.InternalMessageInfo + +// Deprecated: Do not use. +func (m *SetMonitoringServiceRequest) GetProjectId() string { + if m != nil { + return m.ProjectId + } + return "" +} + +// Deprecated: Do not use. +func (m *SetMonitoringServiceRequest) GetZone() string { + if m != nil { + return m.Zone + } + return "" +} + +// Deprecated: Do not use. +func (m *SetMonitoringServiceRequest) GetClusterId() string { + if m != nil { + return m.ClusterId + } + return "" +} + +func (m *SetMonitoringServiceRequest) GetMonitoringService() string { + if m != nil { + return m.MonitoringService + } + return "" +} + +func (m *SetMonitoringServiceRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +// SetAddonsConfigRequest sets the addons associated with the cluster. +type SetAddonsConfigRequest struct { + // Deprecated. The Google Developers Console [project ID or project + // number](https://support.google.com/cloud/answer/6158840). + // This field has been deprecated and replaced by the name field. + ProjectId string `protobuf:"bytes,1,opt,name=project_id,json=projectId,proto3" json:"project_id,omitempty"` // Deprecated: Do not use. + // Deprecated. The name of the Google Compute Engine + // [zone](/compute/docs/zones#available) in which the cluster + // resides. + // This field has been deprecated and replaced by the name field. + Zone string `protobuf:"bytes,2,opt,name=zone,proto3" json:"zone,omitempty"` // Deprecated: Do not use. + // Deprecated. The name of the cluster to upgrade. + // This field has been deprecated and replaced by the name field. + ClusterId string `protobuf:"bytes,3,opt,name=cluster_id,json=clusterId,proto3" json:"cluster_id,omitempty"` // Deprecated: Do not use. + // The desired configurations for the various addons available to run in the + // cluster. + AddonsConfig *AddonsConfig `protobuf:"bytes,4,opt,name=addons_config,json=addonsConfig,proto3" json:"addons_config,omitempty"` + // The name (project, location, cluster) of the cluster to set addons. + // Specified in the format 'projects/*/locations/*/clusters/*'. + Name string `protobuf:"bytes,6,opt,name=name,proto3" json:"name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *SetAddonsConfigRequest) Reset() { *m = SetAddonsConfigRequest{} } +func (m *SetAddonsConfigRequest) String() string { return proto.CompactTextString(m) } +func (*SetAddonsConfigRequest) ProtoMessage() {} +func (*SetAddonsConfigRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_cluster_service_0391086e6c7dadfe, []int{23} +} +func (m *SetAddonsConfigRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_SetAddonsConfigRequest.Unmarshal(m, b) +} +func (m *SetAddonsConfigRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_SetAddonsConfigRequest.Marshal(b, m, deterministic) +} +func (dst *SetAddonsConfigRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_SetAddonsConfigRequest.Merge(dst, src) +} +func (m *SetAddonsConfigRequest) XXX_Size() int { + return xxx_messageInfo_SetAddonsConfigRequest.Size(m) +} +func (m *SetAddonsConfigRequest) XXX_DiscardUnknown() { + xxx_messageInfo_SetAddonsConfigRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_SetAddonsConfigRequest proto.InternalMessageInfo + +// Deprecated: Do not use. +func (m *SetAddonsConfigRequest) GetProjectId() string { + if m != nil { + return m.ProjectId + } + return "" +} + +// Deprecated: Do not use. +func (m *SetAddonsConfigRequest) GetZone() string { + if m != nil { + return m.Zone + } + return "" +} + +// Deprecated: Do not use. +func (m *SetAddonsConfigRequest) GetClusterId() string { + if m != nil { + return m.ClusterId + } + return "" +} + +func (m *SetAddonsConfigRequest) GetAddonsConfig() *AddonsConfig { + if m != nil { + return m.AddonsConfig + } + return nil +} + +func (m *SetAddonsConfigRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +// SetLocationsRequest sets the locations of the cluster. +type SetLocationsRequest struct { + // Deprecated. The Google Developers Console [project ID or project + // number](https://support.google.com/cloud/answer/6158840). + // This field has been deprecated and replaced by the name field. + ProjectId string `protobuf:"bytes,1,opt,name=project_id,json=projectId,proto3" json:"project_id,omitempty"` // Deprecated: Do not use. + // Deprecated. The name of the Google Compute Engine + // [zone](/compute/docs/zones#available) in which the cluster + // resides. + // This field has been deprecated and replaced by the name field. + Zone string `protobuf:"bytes,2,opt,name=zone,proto3" json:"zone,omitempty"` // Deprecated: Do not use. + // Deprecated. The name of the cluster to upgrade. + // This field has been deprecated and replaced by the name field. + ClusterId string `protobuf:"bytes,3,opt,name=cluster_id,json=clusterId,proto3" json:"cluster_id,omitempty"` // Deprecated: Do not use. + // The desired list of Google Compute Engine + // [locations](/compute/docs/zones#available) in which the cluster's nodes + // should be located. Changing the locations a cluster is in will result + // in nodes being either created or removed from the cluster, depending on + // whether locations are being added or removed. + // + // This list must always include the cluster's primary zone. + Locations []string `protobuf:"bytes,4,rep,name=locations,proto3" json:"locations,omitempty"` + // The name (project, location, cluster) of the cluster to set locations. + // Specified in the format 'projects/*/locations/*/clusters/*'. + Name string `protobuf:"bytes,6,opt,name=name,proto3" json:"name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *SetLocationsRequest) Reset() { *m = SetLocationsRequest{} } +func (m *SetLocationsRequest) String() string { return proto.CompactTextString(m) } +func (*SetLocationsRequest) ProtoMessage() {} +func (*SetLocationsRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_cluster_service_0391086e6c7dadfe, []int{24} +} +func (m *SetLocationsRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_SetLocationsRequest.Unmarshal(m, b) +} +func (m *SetLocationsRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_SetLocationsRequest.Marshal(b, m, deterministic) +} +func (dst *SetLocationsRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_SetLocationsRequest.Merge(dst, src) +} +func (m *SetLocationsRequest) XXX_Size() int { + return xxx_messageInfo_SetLocationsRequest.Size(m) +} +func (m *SetLocationsRequest) XXX_DiscardUnknown() { + xxx_messageInfo_SetLocationsRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_SetLocationsRequest proto.InternalMessageInfo + +// Deprecated: Do not use. +func (m *SetLocationsRequest) GetProjectId() string { + if m != nil { + return m.ProjectId + } + return "" +} + +// Deprecated: Do not use. +func (m *SetLocationsRequest) GetZone() string { + if m != nil { + return m.Zone + } + return "" +} + +// Deprecated: Do not use. +func (m *SetLocationsRequest) GetClusterId() string { + if m != nil { + return m.ClusterId + } + return "" +} + +func (m *SetLocationsRequest) GetLocations() []string { + if m != nil { + return m.Locations + } + return nil +} + +func (m *SetLocationsRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +// UpdateMasterRequest updates the master of the cluster. +type UpdateMasterRequest struct { + // Deprecated. The Google Developers Console [project ID or project + // number](https://support.google.com/cloud/answer/6158840). + // This field has been deprecated and replaced by the name field. + ProjectId string `protobuf:"bytes,1,opt,name=project_id,json=projectId,proto3" json:"project_id,omitempty"` // Deprecated: Do not use. + // Deprecated. The name of the Google Compute Engine + // [zone](/compute/docs/zones#available) in which the cluster + // resides. + // This field has been deprecated and replaced by the name field. + Zone string `protobuf:"bytes,2,opt,name=zone,proto3" json:"zone,omitempty"` // Deprecated: Do not use. + // Deprecated. The name of the cluster to upgrade. + // This field has been deprecated and replaced by the name field. + ClusterId string `protobuf:"bytes,3,opt,name=cluster_id,json=clusterId,proto3" json:"cluster_id,omitempty"` // Deprecated: Do not use. + // The Kubernetes version to change the master to. + // + // Users may specify either explicit versions offered by Kubernetes Engine or + // version aliases, which have the following behavior: + // + // - "latest": picks the highest valid Kubernetes version + // - "1.X": picks the highest valid patch+gke.N patch in the 1.X version + // - "1.X.Y": picks the highest valid gke.N patch in the 1.X.Y version + // - "1.X.Y-gke.N": picks an explicit Kubernetes version + // - "-": picks the default Kubernetes version + MasterVersion string `protobuf:"bytes,4,opt,name=master_version,json=masterVersion,proto3" json:"master_version,omitempty"` + // The name (project, location, cluster) of the cluster to update. + // Specified in the format 'projects/*/locations/*/clusters/*'. + Name string `protobuf:"bytes,7,opt,name=name,proto3" json:"name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *UpdateMasterRequest) Reset() { *m = UpdateMasterRequest{} } +func (m *UpdateMasterRequest) String() string { return proto.CompactTextString(m) } +func (*UpdateMasterRequest) ProtoMessage() {} +func (*UpdateMasterRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_cluster_service_0391086e6c7dadfe, []int{25} +} +func (m *UpdateMasterRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_UpdateMasterRequest.Unmarshal(m, b) +} +func (m *UpdateMasterRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_UpdateMasterRequest.Marshal(b, m, deterministic) +} +func (dst *UpdateMasterRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_UpdateMasterRequest.Merge(dst, src) +} +func (m *UpdateMasterRequest) XXX_Size() int { + return xxx_messageInfo_UpdateMasterRequest.Size(m) +} +func (m *UpdateMasterRequest) XXX_DiscardUnknown() { + xxx_messageInfo_UpdateMasterRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_UpdateMasterRequest proto.InternalMessageInfo + +// Deprecated: Do not use. +func (m *UpdateMasterRequest) GetProjectId() string { + if m != nil { + return m.ProjectId + } + return "" +} + +// Deprecated: Do not use. +func (m *UpdateMasterRequest) GetZone() string { + if m != nil { + return m.Zone + } + return "" +} + +// Deprecated: Do not use. +func (m *UpdateMasterRequest) GetClusterId() string { + if m != nil { + return m.ClusterId + } + return "" +} + +func (m *UpdateMasterRequest) GetMasterVersion() string { + if m != nil { + return m.MasterVersion + } + return "" +} + +func (m *UpdateMasterRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +// SetMasterAuthRequest updates the admin password of a cluster. +type SetMasterAuthRequest struct { + // Deprecated. The Google Developers Console [project ID or project + // number](https://support.google.com/cloud/answer/6158840). + // This field has been deprecated and replaced by the name field. + ProjectId string `protobuf:"bytes,1,opt,name=project_id,json=projectId,proto3" json:"project_id,omitempty"` // Deprecated: Do not use. + // Deprecated. The name of the Google Compute Engine + // [zone](/compute/docs/zones#available) in which the cluster + // resides. + // This field has been deprecated and replaced by the name field. + Zone string `protobuf:"bytes,2,opt,name=zone,proto3" json:"zone,omitempty"` // Deprecated: Do not use. + // Deprecated. The name of the cluster to upgrade. + // This field has been deprecated and replaced by the name field. + ClusterId string `protobuf:"bytes,3,opt,name=cluster_id,json=clusterId,proto3" json:"cluster_id,omitempty"` // Deprecated: Do not use. + // The exact form of action to be taken on the master auth. + Action SetMasterAuthRequest_Action `protobuf:"varint,4,opt,name=action,proto3,enum=google.container.v1.SetMasterAuthRequest_Action" json:"action,omitempty"` + // A description of the update. + Update *MasterAuth `protobuf:"bytes,5,opt,name=update,proto3" json:"update,omitempty"` + // The name (project, location, cluster) of the cluster to set auth. + // Specified in the format 'projects/*/locations/*/clusters/*'. + Name string `protobuf:"bytes,7,opt,name=name,proto3" json:"name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *SetMasterAuthRequest) Reset() { *m = SetMasterAuthRequest{} } +func (m *SetMasterAuthRequest) String() string { return proto.CompactTextString(m) } +func (*SetMasterAuthRequest) ProtoMessage() {} +func (*SetMasterAuthRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_cluster_service_0391086e6c7dadfe, []int{26} +} +func (m *SetMasterAuthRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_SetMasterAuthRequest.Unmarshal(m, b) +} +func (m *SetMasterAuthRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_SetMasterAuthRequest.Marshal(b, m, deterministic) +} +func (dst *SetMasterAuthRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_SetMasterAuthRequest.Merge(dst, src) +} +func (m *SetMasterAuthRequest) XXX_Size() int { + return xxx_messageInfo_SetMasterAuthRequest.Size(m) +} +func (m *SetMasterAuthRequest) XXX_DiscardUnknown() { + xxx_messageInfo_SetMasterAuthRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_SetMasterAuthRequest proto.InternalMessageInfo + +// Deprecated: Do not use. +func (m *SetMasterAuthRequest) GetProjectId() string { + if m != nil { + return m.ProjectId + } + return "" +} + +// Deprecated: Do not use. +func (m *SetMasterAuthRequest) GetZone() string { + if m != nil { + return m.Zone + } + return "" +} + +// Deprecated: Do not use. +func (m *SetMasterAuthRequest) GetClusterId() string { + if m != nil { + return m.ClusterId + } + return "" +} + +func (m *SetMasterAuthRequest) GetAction() SetMasterAuthRequest_Action { + if m != nil { + return m.Action + } + return SetMasterAuthRequest_UNKNOWN +} + +func (m *SetMasterAuthRequest) GetUpdate() *MasterAuth { + if m != nil { + return m.Update + } + return nil +} + +func (m *SetMasterAuthRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +// DeleteClusterRequest deletes a cluster. +type DeleteClusterRequest struct { + // Deprecated. The Google Developers Console [project ID or project + // number](https://support.google.com/cloud/answer/6158840). + // This field has been deprecated and replaced by the name field. + ProjectId string `protobuf:"bytes,1,opt,name=project_id,json=projectId,proto3" json:"project_id,omitempty"` // Deprecated: Do not use. + // Deprecated. The name of the Google Compute Engine + // [zone](/compute/docs/zones#available) in which the cluster + // resides. + // This field has been deprecated and replaced by the name field. + Zone string `protobuf:"bytes,2,opt,name=zone,proto3" json:"zone,omitempty"` // Deprecated: Do not use. + // Deprecated. The name of the cluster to delete. + // This field has been deprecated and replaced by the name field. + ClusterId string `protobuf:"bytes,3,opt,name=cluster_id,json=clusterId,proto3" json:"cluster_id,omitempty"` // Deprecated: Do not use. + // The name (project, location, cluster) of the cluster to delete. + // Specified in the format 'projects/*/locations/*/clusters/*'. + Name string `protobuf:"bytes,4,opt,name=name,proto3" json:"name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *DeleteClusterRequest) Reset() { *m = DeleteClusterRequest{} } +func (m *DeleteClusterRequest) String() string { return proto.CompactTextString(m) } +func (*DeleteClusterRequest) ProtoMessage() {} +func (*DeleteClusterRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_cluster_service_0391086e6c7dadfe, []int{27} +} +func (m *DeleteClusterRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_DeleteClusterRequest.Unmarshal(m, b) +} +func (m *DeleteClusterRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_DeleteClusterRequest.Marshal(b, m, deterministic) +} +func (dst *DeleteClusterRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_DeleteClusterRequest.Merge(dst, src) +} +func (m *DeleteClusterRequest) XXX_Size() int { + return xxx_messageInfo_DeleteClusterRequest.Size(m) +} +func (m *DeleteClusterRequest) XXX_DiscardUnknown() { + xxx_messageInfo_DeleteClusterRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_DeleteClusterRequest proto.InternalMessageInfo + +// Deprecated: Do not use. +func (m *DeleteClusterRequest) GetProjectId() string { + if m != nil { + return m.ProjectId + } + return "" +} + +// Deprecated: Do not use. +func (m *DeleteClusterRequest) GetZone() string { + if m != nil { + return m.Zone + } + return "" +} + +// Deprecated: Do not use. +func (m *DeleteClusterRequest) GetClusterId() string { + if m != nil { + return m.ClusterId + } + return "" +} + +func (m *DeleteClusterRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +// ListClustersRequest lists clusters. +type ListClustersRequest struct { + // Deprecated. The Google Developers Console [project ID or project + // number](https://support.google.com/cloud/answer/6158840). + // This field has been deprecated and replaced by the parent field. + ProjectId string `protobuf:"bytes,1,opt,name=project_id,json=projectId,proto3" json:"project_id,omitempty"` // Deprecated: Do not use. + // Deprecated. The name of the Google Compute Engine + // [zone](/compute/docs/zones#available) in which the cluster + // resides, or "-" for all zones. + // This field has been deprecated and replaced by the parent field. + Zone string `protobuf:"bytes,2,opt,name=zone,proto3" json:"zone,omitempty"` // Deprecated: Do not use. + // The parent (project and location) where the clusters will be listed. + // Specified in the format 'projects/*/locations/*'. + // Location "-" matches all zones and all regions. + Parent string `protobuf:"bytes,4,opt,name=parent,proto3" json:"parent,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ListClustersRequest) Reset() { *m = ListClustersRequest{} } +func (m *ListClustersRequest) String() string { return proto.CompactTextString(m) } +func (*ListClustersRequest) ProtoMessage() {} +func (*ListClustersRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_cluster_service_0391086e6c7dadfe, []int{28} +} +func (m *ListClustersRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ListClustersRequest.Unmarshal(m, b) +} +func (m *ListClustersRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ListClustersRequest.Marshal(b, m, deterministic) +} +func (dst *ListClustersRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_ListClustersRequest.Merge(dst, src) +} +func (m *ListClustersRequest) XXX_Size() int { + return xxx_messageInfo_ListClustersRequest.Size(m) +} +func (m *ListClustersRequest) XXX_DiscardUnknown() { + xxx_messageInfo_ListClustersRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_ListClustersRequest proto.InternalMessageInfo + +// Deprecated: Do not use. +func (m *ListClustersRequest) GetProjectId() string { + if m != nil { + return m.ProjectId + } + return "" +} + +// Deprecated: Do not use. +func (m *ListClustersRequest) GetZone() string { + if m != nil { + return m.Zone + } + return "" +} + +func (m *ListClustersRequest) GetParent() string { + if m != nil { + return m.Parent + } + return "" +} + +// ListClustersResponse is the result of ListClustersRequest. +type ListClustersResponse struct { + // A list of clusters in the project in the specified zone, or + // across all ones. + Clusters []*Cluster `protobuf:"bytes,1,rep,name=clusters,proto3" json:"clusters,omitempty"` + // If any zones are listed here, the list of clusters returned + // may be missing those zones. + MissingZones []string `protobuf:"bytes,2,rep,name=missing_zones,json=missingZones,proto3" json:"missing_zones,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ListClustersResponse) Reset() { *m = ListClustersResponse{} } +func (m *ListClustersResponse) String() string { return proto.CompactTextString(m) } +func (*ListClustersResponse) ProtoMessage() {} +func (*ListClustersResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_cluster_service_0391086e6c7dadfe, []int{29} +} +func (m *ListClustersResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ListClustersResponse.Unmarshal(m, b) +} +func (m *ListClustersResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ListClustersResponse.Marshal(b, m, deterministic) +} +func (dst *ListClustersResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_ListClustersResponse.Merge(dst, src) +} +func (m *ListClustersResponse) XXX_Size() int { + return xxx_messageInfo_ListClustersResponse.Size(m) +} +func (m *ListClustersResponse) XXX_DiscardUnknown() { + xxx_messageInfo_ListClustersResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_ListClustersResponse proto.InternalMessageInfo + +func (m *ListClustersResponse) GetClusters() []*Cluster { + if m != nil { + return m.Clusters + } + return nil +} + +func (m *ListClustersResponse) GetMissingZones() []string { + if m != nil { + return m.MissingZones + } + return nil +} + +// GetOperationRequest gets a single operation. +type GetOperationRequest struct { + // Deprecated. The Google Developers Console [project ID or project + // number](https://support.google.com/cloud/answer/6158840). + // This field has been deprecated and replaced by the name field. + ProjectId string `protobuf:"bytes,1,opt,name=project_id,json=projectId,proto3" json:"project_id,omitempty"` // Deprecated: Do not use. + // Deprecated. The name of the Google Compute Engine + // [zone](/compute/docs/zones#available) in which the cluster + // resides. + // This field has been deprecated and replaced by the name field. + Zone string `protobuf:"bytes,2,opt,name=zone,proto3" json:"zone,omitempty"` // Deprecated: Do not use. + // Deprecated. The server-assigned `name` of the operation. + // This field has been deprecated and replaced by the name field. + OperationId string `protobuf:"bytes,3,opt,name=operation_id,json=operationId,proto3" json:"operation_id,omitempty"` // Deprecated: Do not use. + // The name (project, location, operation id) of the operation to get. + // Specified in the format 'projects/*/locations/*/operations/*'. + Name string `protobuf:"bytes,5,opt,name=name,proto3" json:"name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GetOperationRequest) Reset() { *m = GetOperationRequest{} } +func (m *GetOperationRequest) String() string { return proto.CompactTextString(m) } +func (*GetOperationRequest) ProtoMessage() {} +func (*GetOperationRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_cluster_service_0391086e6c7dadfe, []int{30} +} +func (m *GetOperationRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GetOperationRequest.Unmarshal(m, b) +} +func (m *GetOperationRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GetOperationRequest.Marshal(b, m, deterministic) +} +func (dst *GetOperationRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_GetOperationRequest.Merge(dst, src) +} +func (m *GetOperationRequest) XXX_Size() int { + return xxx_messageInfo_GetOperationRequest.Size(m) +} +func (m *GetOperationRequest) XXX_DiscardUnknown() { + xxx_messageInfo_GetOperationRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_GetOperationRequest proto.InternalMessageInfo + +// Deprecated: Do not use. +func (m *GetOperationRequest) GetProjectId() string { + if m != nil { + return m.ProjectId + } + return "" +} + +// Deprecated: Do not use. +func (m *GetOperationRequest) GetZone() string { + if m != nil { + return m.Zone + } + return "" +} + +// Deprecated: Do not use. +func (m *GetOperationRequest) GetOperationId() string { + if m != nil { + return m.OperationId + } + return "" +} + +func (m *GetOperationRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +// ListOperationsRequest lists operations. +type ListOperationsRequest struct { + // Deprecated. The Google Developers Console [project ID or project + // number](https://support.google.com/cloud/answer/6158840). + // This field has been deprecated and replaced by the parent field. + ProjectId string `protobuf:"bytes,1,opt,name=project_id,json=projectId,proto3" json:"project_id,omitempty"` // Deprecated: Do not use. + // Deprecated. The name of the Google Compute Engine + // [zone](/compute/docs/zones#available) to return operations for, or `-` for + // all zones. This field has been deprecated and replaced by the parent field. + Zone string `protobuf:"bytes,2,opt,name=zone,proto3" json:"zone,omitempty"` // Deprecated: Do not use. + // The parent (project and location) where the operations will be listed. + // Specified in the format 'projects/*/locations/*'. + // Location "-" matches all zones and all regions. + Parent string `protobuf:"bytes,4,opt,name=parent,proto3" json:"parent,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ListOperationsRequest) Reset() { *m = ListOperationsRequest{} } +func (m *ListOperationsRequest) String() string { return proto.CompactTextString(m) } +func (*ListOperationsRequest) ProtoMessage() {} +func (*ListOperationsRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_cluster_service_0391086e6c7dadfe, []int{31} +} +func (m *ListOperationsRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ListOperationsRequest.Unmarshal(m, b) +} +func (m *ListOperationsRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ListOperationsRequest.Marshal(b, m, deterministic) +} +func (dst *ListOperationsRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_ListOperationsRequest.Merge(dst, src) +} +func (m *ListOperationsRequest) XXX_Size() int { + return xxx_messageInfo_ListOperationsRequest.Size(m) +} +func (m *ListOperationsRequest) XXX_DiscardUnknown() { + xxx_messageInfo_ListOperationsRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_ListOperationsRequest proto.InternalMessageInfo + +// Deprecated: Do not use. +func (m *ListOperationsRequest) GetProjectId() string { + if m != nil { + return m.ProjectId + } + return "" +} + +// Deprecated: Do not use. +func (m *ListOperationsRequest) GetZone() string { + if m != nil { + return m.Zone + } + return "" +} + +func (m *ListOperationsRequest) GetParent() string { + if m != nil { + return m.Parent + } + return "" +} + +// CancelOperationRequest cancels a single operation. +type CancelOperationRequest struct { + // Deprecated. The Google Developers Console [project ID or project + // number](https://support.google.com/cloud/answer/6158840). + // This field has been deprecated and replaced by the name field. + ProjectId string `protobuf:"bytes,1,opt,name=project_id,json=projectId,proto3" json:"project_id,omitempty"` // Deprecated: Do not use. + // Deprecated. The name of the Google Compute Engine + // [zone](/compute/docs/zones#available) in which the operation resides. + // This field has been deprecated and replaced by the name field. + Zone string `protobuf:"bytes,2,opt,name=zone,proto3" json:"zone,omitempty"` // Deprecated: Do not use. + // Deprecated. The server-assigned `name` of the operation. + // This field has been deprecated and replaced by the name field. + OperationId string `protobuf:"bytes,3,opt,name=operation_id,json=operationId,proto3" json:"operation_id,omitempty"` // Deprecated: Do not use. + // The name (project, location, operation id) of the operation to cancel. + // Specified in the format 'projects/*/locations/*/operations/*'. + Name string `protobuf:"bytes,4,opt,name=name,proto3" json:"name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *CancelOperationRequest) Reset() { *m = CancelOperationRequest{} } +func (m *CancelOperationRequest) String() string { return proto.CompactTextString(m) } +func (*CancelOperationRequest) ProtoMessage() {} +func (*CancelOperationRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_cluster_service_0391086e6c7dadfe, []int{32} +} +func (m *CancelOperationRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_CancelOperationRequest.Unmarshal(m, b) +} +func (m *CancelOperationRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_CancelOperationRequest.Marshal(b, m, deterministic) +} +func (dst *CancelOperationRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_CancelOperationRequest.Merge(dst, src) +} +func (m *CancelOperationRequest) XXX_Size() int { + return xxx_messageInfo_CancelOperationRequest.Size(m) +} +func (m *CancelOperationRequest) XXX_DiscardUnknown() { + xxx_messageInfo_CancelOperationRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_CancelOperationRequest proto.InternalMessageInfo + +// Deprecated: Do not use. +func (m *CancelOperationRequest) GetProjectId() string { + if m != nil { + return m.ProjectId + } + return "" +} + +// Deprecated: Do not use. +func (m *CancelOperationRequest) GetZone() string { + if m != nil { + return m.Zone + } + return "" +} + +// Deprecated: Do not use. +func (m *CancelOperationRequest) GetOperationId() string { + if m != nil { + return m.OperationId + } + return "" +} + +func (m *CancelOperationRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +// ListOperationsResponse is the result of ListOperationsRequest. +type ListOperationsResponse struct { + // A list of operations in the project in the specified zone. + Operations []*Operation `protobuf:"bytes,1,rep,name=operations,proto3" json:"operations,omitempty"` + // If any zones are listed here, the list of operations returned + // may be missing the operations from those zones. + MissingZones []string `protobuf:"bytes,2,rep,name=missing_zones,json=missingZones,proto3" json:"missing_zones,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ListOperationsResponse) Reset() { *m = ListOperationsResponse{} } +func (m *ListOperationsResponse) String() string { return proto.CompactTextString(m) } +func (*ListOperationsResponse) ProtoMessage() {} +func (*ListOperationsResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_cluster_service_0391086e6c7dadfe, []int{33} +} +func (m *ListOperationsResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ListOperationsResponse.Unmarshal(m, b) +} +func (m *ListOperationsResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ListOperationsResponse.Marshal(b, m, deterministic) +} +func (dst *ListOperationsResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_ListOperationsResponse.Merge(dst, src) +} +func (m *ListOperationsResponse) XXX_Size() int { + return xxx_messageInfo_ListOperationsResponse.Size(m) +} +func (m *ListOperationsResponse) XXX_DiscardUnknown() { + xxx_messageInfo_ListOperationsResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_ListOperationsResponse proto.InternalMessageInfo + +func (m *ListOperationsResponse) GetOperations() []*Operation { + if m != nil { + return m.Operations + } + return nil +} + +func (m *ListOperationsResponse) GetMissingZones() []string { + if m != nil { + return m.MissingZones + } + return nil +} + +// Gets the current Kubernetes Engine service configuration. +type GetServerConfigRequest struct { + // Deprecated. The Google Developers Console [project ID or project + // number](https://support.google.com/cloud/answer/6158840). + // This field has been deprecated and replaced by the name field. + ProjectId string `protobuf:"bytes,1,opt,name=project_id,json=projectId,proto3" json:"project_id,omitempty"` // Deprecated: Do not use. + // Deprecated. The name of the Google Compute Engine + // [zone](/compute/docs/zones#available) to return operations for. + // This field has been deprecated and replaced by the name field. + Zone string `protobuf:"bytes,2,opt,name=zone,proto3" json:"zone,omitempty"` // Deprecated: Do not use. + // The name (project and location) of the server config to get + // Specified in the format 'projects/*/locations/*'. + Name string `protobuf:"bytes,4,opt,name=name,proto3" json:"name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GetServerConfigRequest) Reset() { *m = GetServerConfigRequest{} } +func (m *GetServerConfigRequest) String() string { return proto.CompactTextString(m) } +func (*GetServerConfigRequest) ProtoMessage() {} +func (*GetServerConfigRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_cluster_service_0391086e6c7dadfe, []int{34} +} +func (m *GetServerConfigRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GetServerConfigRequest.Unmarshal(m, b) +} +func (m *GetServerConfigRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GetServerConfigRequest.Marshal(b, m, deterministic) +} +func (dst *GetServerConfigRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_GetServerConfigRequest.Merge(dst, src) +} +func (m *GetServerConfigRequest) XXX_Size() int { + return xxx_messageInfo_GetServerConfigRequest.Size(m) +} +func (m *GetServerConfigRequest) XXX_DiscardUnknown() { + xxx_messageInfo_GetServerConfigRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_GetServerConfigRequest proto.InternalMessageInfo + +// Deprecated: Do not use. +func (m *GetServerConfigRequest) GetProjectId() string { + if m != nil { + return m.ProjectId + } + return "" +} + +// Deprecated: Do not use. +func (m *GetServerConfigRequest) GetZone() string { + if m != nil { + return m.Zone + } + return "" +} + +func (m *GetServerConfigRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +// Kubernetes Engine service configuration. +type ServerConfig struct { + // Version of Kubernetes the service deploys by default. + DefaultClusterVersion string `protobuf:"bytes,1,opt,name=default_cluster_version,json=defaultClusterVersion,proto3" json:"default_cluster_version,omitempty"` + // List of valid node upgrade target versions. + ValidNodeVersions []string `protobuf:"bytes,3,rep,name=valid_node_versions,json=validNodeVersions,proto3" json:"valid_node_versions,omitempty"` + // Default image type. + DefaultImageType string `protobuf:"bytes,4,opt,name=default_image_type,json=defaultImageType,proto3" json:"default_image_type,omitempty"` + // List of valid image types. + ValidImageTypes []string `protobuf:"bytes,5,rep,name=valid_image_types,json=validImageTypes,proto3" json:"valid_image_types,omitempty"` + // List of valid master versions. + ValidMasterVersions []string `protobuf:"bytes,6,rep,name=valid_master_versions,json=validMasterVersions,proto3" json:"valid_master_versions,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ServerConfig) Reset() { *m = ServerConfig{} } +func (m *ServerConfig) String() string { return proto.CompactTextString(m) } +func (*ServerConfig) ProtoMessage() {} +func (*ServerConfig) Descriptor() ([]byte, []int) { + return fileDescriptor_cluster_service_0391086e6c7dadfe, []int{35} +} +func (m *ServerConfig) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ServerConfig.Unmarshal(m, b) +} +func (m *ServerConfig) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ServerConfig.Marshal(b, m, deterministic) +} +func (dst *ServerConfig) XXX_Merge(src proto.Message) { + xxx_messageInfo_ServerConfig.Merge(dst, src) +} +func (m *ServerConfig) XXX_Size() int { + return xxx_messageInfo_ServerConfig.Size(m) +} +func (m *ServerConfig) XXX_DiscardUnknown() { + xxx_messageInfo_ServerConfig.DiscardUnknown(m) +} + +var xxx_messageInfo_ServerConfig proto.InternalMessageInfo + +func (m *ServerConfig) GetDefaultClusterVersion() string { + if m != nil { + return m.DefaultClusterVersion + } + return "" +} + +func (m *ServerConfig) GetValidNodeVersions() []string { + if m != nil { + return m.ValidNodeVersions + } + return nil +} + +func (m *ServerConfig) GetDefaultImageType() string { + if m != nil { + return m.DefaultImageType + } + return "" +} + +func (m *ServerConfig) GetValidImageTypes() []string { + if m != nil { + return m.ValidImageTypes + } + return nil +} + +func (m *ServerConfig) GetValidMasterVersions() []string { + if m != nil { + return m.ValidMasterVersions + } + return nil +} + +// CreateNodePoolRequest creates a node pool for a cluster. +type CreateNodePoolRequest struct { + // Deprecated. The Google Developers Console [project ID or project + // number](https://developers.google.com/console/help/new/#projectnumber). + // This field has been deprecated and replaced by the parent field. + ProjectId string `protobuf:"bytes,1,opt,name=project_id,json=projectId,proto3" json:"project_id,omitempty"` // Deprecated: Do not use. + // Deprecated. The name of the Google Compute Engine + // [zone](/compute/docs/zones#available) in which the cluster + // resides. + // This field has been deprecated and replaced by the parent field. + Zone string `protobuf:"bytes,2,opt,name=zone,proto3" json:"zone,omitempty"` // Deprecated: Do not use. + // Deprecated. The name of the cluster. + // This field has been deprecated and replaced by the parent field. + ClusterId string `protobuf:"bytes,3,opt,name=cluster_id,json=clusterId,proto3" json:"cluster_id,omitempty"` // Deprecated: Do not use. + // The node pool to create. + NodePool *NodePool `protobuf:"bytes,4,opt,name=node_pool,json=nodePool,proto3" json:"node_pool,omitempty"` + // The parent (project, location, cluster id) where the node pool will be + // created. Specified in the format + // 'projects/*/locations/*/clusters/*'. + Parent string `protobuf:"bytes,6,opt,name=parent,proto3" json:"parent,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *CreateNodePoolRequest) Reset() { *m = CreateNodePoolRequest{} } +func (m *CreateNodePoolRequest) String() string { return proto.CompactTextString(m) } +func (*CreateNodePoolRequest) ProtoMessage() {} +func (*CreateNodePoolRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_cluster_service_0391086e6c7dadfe, []int{36} +} +func (m *CreateNodePoolRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_CreateNodePoolRequest.Unmarshal(m, b) +} +func (m *CreateNodePoolRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_CreateNodePoolRequest.Marshal(b, m, deterministic) +} +func (dst *CreateNodePoolRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_CreateNodePoolRequest.Merge(dst, src) +} +func (m *CreateNodePoolRequest) XXX_Size() int { + return xxx_messageInfo_CreateNodePoolRequest.Size(m) +} +func (m *CreateNodePoolRequest) XXX_DiscardUnknown() { + xxx_messageInfo_CreateNodePoolRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_CreateNodePoolRequest proto.InternalMessageInfo + +// Deprecated: Do not use. +func (m *CreateNodePoolRequest) GetProjectId() string { + if m != nil { + return m.ProjectId + } + return "" +} + +// Deprecated: Do not use. +func (m *CreateNodePoolRequest) GetZone() string { + if m != nil { + return m.Zone + } + return "" +} + +// Deprecated: Do not use. +func (m *CreateNodePoolRequest) GetClusterId() string { + if m != nil { + return m.ClusterId + } + return "" +} + +func (m *CreateNodePoolRequest) GetNodePool() *NodePool { + if m != nil { + return m.NodePool + } + return nil +} + +func (m *CreateNodePoolRequest) GetParent() string { + if m != nil { + return m.Parent + } + return "" +} + +// DeleteNodePoolRequest deletes a node pool for a cluster. +type DeleteNodePoolRequest struct { + // Deprecated. The Google Developers Console [project ID or project + // number](https://developers.google.com/console/help/new/#projectnumber). + // This field has been deprecated and replaced by the name field. + ProjectId string `protobuf:"bytes,1,opt,name=project_id,json=projectId,proto3" json:"project_id,omitempty"` // Deprecated: Do not use. + // Deprecated. The name of the Google Compute Engine + // [zone](/compute/docs/zones#available) in which the cluster + // resides. + // This field has been deprecated and replaced by the name field. + Zone string `protobuf:"bytes,2,opt,name=zone,proto3" json:"zone,omitempty"` // Deprecated: Do not use. + // Deprecated. The name of the cluster. + // This field has been deprecated and replaced by the name field. + ClusterId string `protobuf:"bytes,3,opt,name=cluster_id,json=clusterId,proto3" json:"cluster_id,omitempty"` // Deprecated: Do not use. + // Deprecated. The name of the node pool to delete. + // This field has been deprecated and replaced by the name field. + NodePoolId string `protobuf:"bytes,4,opt,name=node_pool_id,json=nodePoolId,proto3" json:"node_pool_id,omitempty"` // Deprecated: Do not use. + // The name (project, location, cluster, node pool id) of the node pool to + // delete. Specified in the format + // 'projects/*/locations/*/clusters/*/nodePools/*'. + Name string `protobuf:"bytes,6,opt,name=name,proto3" json:"name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *DeleteNodePoolRequest) Reset() { *m = DeleteNodePoolRequest{} } +func (m *DeleteNodePoolRequest) String() string { return proto.CompactTextString(m) } +func (*DeleteNodePoolRequest) ProtoMessage() {} +func (*DeleteNodePoolRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_cluster_service_0391086e6c7dadfe, []int{37} +} +func (m *DeleteNodePoolRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_DeleteNodePoolRequest.Unmarshal(m, b) +} +func (m *DeleteNodePoolRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_DeleteNodePoolRequest.Marshal(b, m, deterministic) +} +func (dst *DeleteNodePoolRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_DeleteNodePoolRequest.Merge(dst, src) +} +func (m *DeleteNodePoolRequest) XXX_Size() int { + return xxx_messageInfo_DeleteNodePoolRequest.Size(m) +} +func (m *DeleteNodePoolRequest) XXX_DiscardUnknown() { + xxx_messageInfo_DeleteNodePoolRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_DeleteNodePoolRequest proto.InternalMessageInfo + +// Deprecated: Do not use. +func (m *DeleteNodePoolRequest) GetProjectId() string { + if m != nil { + return m.ProjectId + } + return "" +} + +// Deprecated: Do not use. +func (m *DeleteNodePoolRequest) GetZone() string { + if m != nil { + return m.Zone + } + return "" +} + +// Deprecated: Do not use. +func (m *DeleteNodePoolRequest) GetClusterId() string { + if m != nil { + return m.ClusterId + } + return "" +} + +// Deprecated: Do not use. +func (m *DeleteNodePoolRequest) GetNodePoolId() string { + if m != nil { + return m.NodePoolId + } + return "" +} + +func (m *DeleteNodePoolRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +// ListNodePoolsRequest lists the node pool(s) for a cluster. +type ListNodePoolsRequest struct { + // Deprecated. The Google Developers Console [project ID or project + // number](https://developers.google.com/console/help/new/#projectnumber). + // This field has been deprecated and replaced by the parent field. + ProjectId string `protobuf:"bytes,1,opt,name=project_id,json=projectId,proto3" json:"project_id,omitempty"` // Deprecated: Do not use. + // Deprecated. The name of the Google Compute Engine + // [zone](/compute/docs/zones#available) in which the cluster + // resides. + // This field has been deprecated and replaced by the parent field. + Zone string `protobuf:"bytes,2,opt,name=zone,proto3" json:"zone,omitempty"` // Deprecated: Do not use. + // Deprecated. The name of the cluster. + // This field has been deprecated and replaced by the parent field. + ClusterId string `protobuf:"bytes,3,opt,name=cluster_id,json=clusterId,proto3" json:"cluster_id,omitempty"` // Deprecated: Do not use. + // The parent (project, location, cluster id) where the node pools will be + // listed. Specified in the format 'projects/*/locations/*/clusters/*'. + Parent string `protobuf:"bytes,5,opt,name=parent,proto3" json:"parent,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ListNodePoolsRequest) Reset() { *m = ListNodePoolsRequest{} } +func (m *ListNodePoolsRequest) String() string { return proto.CompactTextString(m) } +func (*ListNodePoolsRequest) ProtoMessage() {} +func (*ListNodePoolsRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_cluster_service_0391086e6c7dadfe, []int{38} +} +func (m *ListNodePoolsRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ListNodePoolsRequest.Unmarshal(m, b) +} +func (m *ListNodePoolsRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ListNodePoolsRequest.Marshal(b, m, deterministic) +} +func (dst *ListNodePoolsRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_ListNodePoolsRequest.Merge(dst, src) +} +func (m *ListNodePoolsRequest) XXX_Size() int { + return xxx_messageInfo_ListNodePoolsRequest.Size(m) +} +func (m *ListNodePoolsRequest) XXX_DiscardUnknown() { + xxx_messageInfo_ListNodePoolsRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_ListNodePoolsRequest proto.InternalMessageInfo + +// Deprecated: Do not use. +func (m *ListNodePoolsRequest) GetProjectId() string { + if m != nil { + return m.ProjectId + } + return "" +} + +// Deprecated: Do not use. +func (m *ListNodePoolsRequest) GetZone() string { + if m != nil { + return m.Zone + } + return "" +} + +// Deprecated: Do not use. +func (m *ListNodePoolsRequest) GetClusterId() string { + if m != nil { + return m.ClusterId + } + return "" +} + +func (m *ListNodePoolsRequest) GetParent() string { + if m != nil { + return m.Parent + } + return "" +} + +// GetNodePoolRequest retrieves a node pool for a cluster. +type GetNodePoolRequest struct { + // Deprecated. The Google Developers Console [project ID or project + // number](https://developers.google.com/console/help/new/#projectnumber). + // This field has been deprecated and replaced by the name field. + ProjectId string `protobuf:"bytes,1,opt,name=project_id,json=projectId,proto3" json:"project_id,omitempty"` // Deprecated: Do not use. + // Deprecated. The name of the Google Compute Engine + // [zone](/compute/docs/zones#available) in which the cluster + // resides. + // This field has been deprecated and replaced by the name field. + Zone string `protobuf:"bytes,2,opt,name=zone,proto3" json:"zone,omitempty"` // Deprecated: Do not use. + // Deprecated. The name of the cluster. + // This field has been deprecated and replaced by the name field. + ClusterId string `protobuf:"bytes,3,opt,name=cluster_id,json=clusterId,proto3" json:"cluster_id,omitempty"` // Deprecated: Do not use. + // Deprecated. The name of the node pool. + // This field has been deprecated and replaced by the name field. + NodePoolId string `protobuf:"bytes,4,opt,name=node_pool_id,json=nodePoolId,proto3" json:"node_pool_id,omitempty"` // Deprecated: Do not use. + // The name (project, location, cluster, node pool id) of the node pool to + // get. Specified in the format + // 'projects/*/locations/*/clusters/*/nodePools/*'. + Name string `protobuf:"bytes,6,opt,name=name,proto3" json:"name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GetNodePoolRequest) Reset() { *m = GetNodePoolRequest{} } +func (m *GetNodePoolRequest) String() string { return proto.CompactTextString(m) } +func (*GetNodePoolRequest) ProtoMessage() {} +func (*GetNodePoolRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_cluster_service_0391086e6c7dadfe, []int{39} +} +func (m *GetNodePoolRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GetNodePoolRequest.Unmarshal(m, b) +} +func (m *GetNodePoolRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GetNodePoolRequest.Marshal(b, m, deterministic) +} +func (dst *GetNodePoolRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_GetNodePoolRequest.Merge(dst, src) +} +func (m *GetNodePoolRequest) XXX_Size() int { + return xxx_messageInfo_GetNodePoolRequest.Size(m) +} +func (m *GetNodePoolRequest) XXX_DiscardUnknown() { + xxx_messageInfo_GetNodePoolRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_GetNodePoolRequest proto.InternalMessageInfo + +// Deprecated: Do not use. +func (m *GetNodePoolRequest) GetProjectId() string { + if m != nil { + return m.ProjectId + } + return "" +} + +// Deprecated: Do not use. +func (m *GetNodePoolRequest) GetZone() string { + if m != nil { + return m.Zone + } + return "" +} + +// Deprecated: Do not use. +func (m *GetNodePoolRequest) GetClusterId() string { + if m != nil { + return m.ClusterId + } + return "" +} + +// Deprecated: Do not use. +func (m *GetNodePoolRequest) GetNodePoolId() string { + if m != nil { + return m.NodePoolId + } + return "" +} + +func (m *GetNodePoolRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +// NodePool contains the name and configuration for a cluster's node pool. +// Node pools are a set of nodes (i.e. VM's), with a common configuration and +// specification, under the control of the cluster master. They may have a set +// of Kubernetes labels applied to them, which may be used to reference them +// during pod scheduling. They may also be resized up or down, to accommodate +// the workload. +type NodePool struct { + // The name of the node pool. + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // The node configuration of the pool. + Config *NodeConfig `protobuf:"bytes,2,opt,name=config,proto3" json:"config,omitempty"` + // The initial node count for the pool. You must ensure that your + // Compute Engine resource quota + // is sufficient for this number of instances. You must also have available + // firewall and routes quota. + InitialNodeCount int32 `protobuf:"varint,3,opt,name=initial_node_count,json=initialNodeCount,proto3" json:"initial_node_count,omitempty"` + // [Output only] Server-defined URL for the resource. + SelfLink string `protobuf:"bytes,100,opt,name=self_link,json=selfLink,proto3" json:"self_link,omitempty"` + // The version of the Kubernetes of this node. + Version string `protobuf:"bytes,101,opt,name=version,proto3" json:"version,omitempty"` + // [Output only] The resource URLs of the [managed instance + // groups](/compute/docs/instance-groups/creating-groups-of-managed-instances) + // associated with this node pool. + InstanceGroupUrls []string `protobuf:"bytes,102,rep,name=instance_group_urls,json=instanceGroupUrls,proto3" json:"instance_group_urls,omitempty"` + // [Output only] The status of the nodes in this pool instance. + Status NodePool_Status `protobuf:"varint,103,opt,name=status,proto3,enum=google.container.v1.NodePool_Status" json:"status,omitempty"` + // [Output only] Additional information about the current status of this + // node pool instance, if available. + StatusMessage string `protobuf:"bytes,104,opt,name=status_message,json=statusMessage,proto3" json:"status_message,omitempty"` + // Autoscaler configuration for this NodePool. Autoscaler is enabled + // only if a valid configuration is present. + Autoscaling *NodePoolAutoscaling `protobuf:"bytes,4,opt,name=autoscaling,proto3" json:"autoscaling,omitempty"` + // NodeManagement configuration for this NodePool. + Management *NodeManagement `protobuf:"bytes,5,opt,name=management,proto3" json:"management,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *NodePool) Reset() { *m = NodePool{} } +func (m *NodePool) String() string { return proto.CompactTextString(m) } +func (*NodePool) ProtoMessage() {} +func (*NodePool) Descriptor() ([]byte, []int) { + return fileDescriptor_cluster_service_0391086e6c7dadfe, []int{40} +} +func (m *NodePool) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_NodePool.Unmarshal(m, b) +} +func (m *NodePool) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_NodePool.Marshal(b, m, deterministic) +} +func (dst *NodePool) XXX_Merge(src proto.Message) { + xxx_messageInfo_NodePool.Merge(dst, src) +} +func (m *NodePool) XXX_Size() int { + return xxx_messageInfo_NodePool.Size(m) +} +func (m *NodePool) XXX_DiscardUnknown() { + xxx_messageInfo_NodePool.DiscardUnknown(m) +} + +var xxx_messageInfo_NodePool proto.InternalMessageInfo + +func (m *NodePool) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *NodePool) GetConfig() *NodeConfig { + if m != nil { + return m.Config + } + return nil +} + +func (m *NodePool) GetInitialNodeCount() int32 { + if m != nil { + return m.InitialNodeCount + } + return 0 +} + +func (m *NodePool) GetSelfLink() string { + if m != nil { + return m.SelfLink + } + return "" +} + +func (m *NodePool) GetVersion() string { + if m != nil { + return m.Version + } + return "" +} + +func (m *NodePool) GetInstanceGroupUrls() []string { + if m != nil { + return m.InstanceGroupUrls + } + return nil +} + +func (m *NodePool) GetStatus() NodePool_Status { + if m != nil { + return m.Status + } + return NodePool_STATUS_UNSPECIFIED +} + +func (m *NodePool) GetStatusMessage() string { + if m != nil { + return m.StatusMessage + } + return "" +} + +func (m *NodePool) GetAutoscaling() *NodePoolAutoscaling { + if m != nil { + return m.Autoscaling + } + return nil +} + +func (m *NodePool) GetManagement() *NodeManagement { + if m != nil { + return m.Management + } + return nil +} + +// NodeManagement defines the set of node management services turned on for the +// node pool. +type NodeManagement struct { + // A flag that specifies whether node auto-upgrade is enabled for the node + // pool. If enabled, node auto-upgrade helps keep the nodes in your node pool + // up to date with the latest release version of Kubernetes. + AutoUpgrade bool `protobuf:"varint,1,opt,name=auto_upgrade,json=autoUpgrade,proto3" json:"auto_upgrade,omitempty"` + // A flag that specifies whether the node auto-repair is enabled for the node + // pool. If enabled, the nodes in this node pool will be monitored and, if + // they fail health checks too many times, an automatic repair action will be + // triggered. + AutoRepair bool `protobuf:"varint,2,opt,name=auto_repair,json=autoRepair,proto3" json:"auto_repair,omitempty"` + // Specifies the Auto Upgrade knobs for the node pool. + UpgradeOptions *AutoUpgradeOptions `protobuf:"bytes,10,opt,name=upgrade_options,json=upgradeOptions,proto3" json:"upgrade_options,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *NodeManagement) Reset() { *m = NodeManagement{} } +func (m *NodeManagement) String() string { return proto.CompactTextString(m) } +func (*NodeManagement) ProtoMessage() {} +func (*NodeManagement) Descriptor() ([]byte, []int) { + return fileDescriptor_cluster_service_0391086e6c7dadfe, []int{41} +} +func (m *NodeManagement) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_NodeManagement.Unmarshal(m, b) +} +func (m *NodeManagement) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_NodeManagement.Marshal(b, m, deterministic) +} +func (dst *NodeManagement) XXX_Merge(src proto.Message) { + xxx_messageInfo_NodeManagement.Merge(dst, src) +} +func (m *NodeManagement) XXX_Size() int { + return xxx_messageInfo_NodeManagement.Size(m) +} +func (m *NodeManagement) XXX_DiscardUnknown() { + xxx_messageInfo_NodeManagement.DiscardUnknown(m) +} + +var xxx_messageInfo_NodeManagement proto.InternalMessageInfo + +func (m *NodeManagement) GetAutoUpgrade() bool { + if m != nil { + return m.AutoUpgrade + } + return false +} + +func (m *NodeManagement) GetAutoRepair() bool { + if m != nil { + return m.AutoRepair + } + return false +} + +func (m *NodeManagement) GetUpgradeOptions() *AutoUpgradeOptions { + if m != nil { + return m.UpgradeOptions + } + return nil +} + +// AutoUpgradeOptions defines the set of options for the user to control how +// the Auto Upgrades will proceed. +type AutoUpgradeOptions struct { + // [Output only] This field is set when upgrades are about to commence + // with the approximate start time for the upgrades, in + // [RFC3339](https://www.ietf.org/rfc/rfc3339.txt) text format. + AutoUpgradeStartTime string `protobuf:"bytes,1,opt,name=auto_upgrade_start_time,json=autoUpgradeStartTime,proto3" json:"auto_upgrade_start_time,omitempty"` + // [Output only] This field is set when upgrades are about to commence + // with the description of the upgrade. + Description string `protobuf:"bytes,2,opt,name=description,proto3" json:"description,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *AutoUpgradeOptions) Reset() { *m = AutoUpgradeOptions{} } +func (m *AutoUpgradeOptions) String() string { return proto.CompactTextString(m) } +func (*AutoUpgradeOptions) ProtoMessage() {} +func (*AutoUpgradeOptions) Descriptor() ([]byte, []int) { + return fileDescriptor_cluster_service_0391086e6c7dadfe, []int{42} +} +func (m *AutoUpgradeOptions) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_AutoUpgradeOptions.Unmarshal(m, b) +} +func (m *AutoUpgradeOptions) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_AutoUpgradeOptions.Marshal(b, m, deterministic) +} +func (dst *AutoUpgradeOptions) XXX_Merge(src proto.Message) { + xxx_messageInfo_AutoUpgradeOptions.Merge(dst, src) +} +func (m *AutoUpgradeOptions) XXX_Size() int { + return xxx_messageInfo_AutoUpgradeOptions.Size(m) +} +func (m *AutoUpgradeOptions) XXX_DiscardUnknown() { + xxx_messageInfo_AutoUpgradeOptions.DiscardUnknown(m) +} + +var xxx_messageInfo_AutoUpgradeOptions proto.InternalMessageInfo + +func (m *AutoUpgradeOptions) GetAutoUpgradeStartTime() string { + if m != nil { + return m.AutoUpgradeStartTime + } + return "" +} + +func (m *AutoUpgradeOptions) GetDescription() string { + if m != nil { + return m.Description + } + return "" +} + +// MaintenancePolicy defines the maintenance policy to be used for the cluster. +type MaintenancePolicy struct { + // Specifies the maintenance window in which maintenance may be performed. + Window *MaintenanceWindow `protobuf:"bytes,1,opt,name=window,proto3" json:"window,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *MaintenancePolicy) Reset() { *m = MaintenancePolicy{} } +func (m *MaintenancePolicy) String() string { return proto.CompactTextString(m) } +func (*MaintenancePolicy) ProtoMessage() {} +func (*MaintenancePolicy) Descriptor() ([]byte, []int) { + return fileDescriptor_cluster_service_0391086e6c7dadfe, []int{43} +} +func (m *MaintenancePolicy) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_MaintenancePolicy.Unmarshal(m, b) +} +func (m *MaintenancePolicy) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_MaintenancePolicy.Marshal(b, m, deterministic) +} +func (dst *MaintenancePolicy) XXX_Merge(src proto.Message) { + xxx_messageInfo_MaintenancePolicy.Merge(dst, src) +} +func (m *MaintenancePolicy) XXX_Size() int { + return xxx_messageInfo_MaintenancePolicy.Size(m) +} +func (m *MaintenancePolicy) XXX_DiscardUnknown() { + xxx_messageInfo_MaintenancePolicy.DiscardUnknown(m) +} + +var xxx_messageInfo_MaintenancePolicy proto.InternalMessageInfo + +func (m *MaintenancePolicy) GetWindow() *MaintenanceWindow { + if m != nil { + return m.Window + } + return nil +} + +// MaintenanceWindow defines the maintenance window to be used for the cluster. +type MaintenanceWindow struct { + // Types that are valid to be assigned to Policy: + // *MaintenanceWindow_DailyMaintenanceWindow + Policy isMaintenanceWindow_Policy `protobuf_oneof:"policy"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *MaintenanceWindow) Reset() { *m = MaintenanceWindow{} } +func (m *MaintenanceWindow) String() string { return proto.CompactTextString(m) } +func (*MaintenanceWindow) ProtoMessage() {} +func (*MaintenanceWindow) Descriptor() ([]byte, []int) { + return fileDescriptor_cluster_service_0391086e6c7dadfe, []int{44} +} +func (m *MaintenanceWindow) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_MaintenanceWindow.Unmarshal(m, b) +} +func (m *MaintenanceWindow) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_MaintenanceWindow.Marshal(b, m, deterministic) +} +func (dst *MaintenanceWindow) XXX_Merge(src proto.Message) { + xxx_messageInfo_MaintenanceWindow.Merge(dst, src) +} +func (m *MaintenanceWindow) XXX_Size() int { + return xxx_messageInfo_MaintenanceWindow.Size(m) +} +func (m *MaintenanceWindow) XXX_DiscardUnknown() { + xxx_messageInfo_MaintenanceWindow.DiscardUnknown(m) +} + +var xxx_messageInfo_MaintenanceWindow proto.InternalMessageInfo + +type isMaintenanceWindow_Policy interface { + isMaintenanceWindow_Policy() +} + +type MaintenanceWindow_DailyMaintenanceWindow struct { + DailyMaintenanceWindow *DailyMaintenanceWindow `protobuf:"bytes,2,opt,name=daily_maintenance_window,json=dailyMaintenanceWindow,proto3,oneof"` +} + +func (*MaintenanceWindow_DailyMaintenanceWindow) isMaintenanceWindow_Policy() {} + +func (m *MaintenanceWindow) GetPolicy() isMaintenanceWindow_Policy { + if m != nil { + return m.Policy + } + return nil +} + +func (m *MaintenanceWindow) GetDailyMaintenanceWindow() *DailyMaintenanceWindow { + if x, ok := m.GetPolicy().(*MaintenanceWindow_DailyMaintenanceWindow); ok { + return x.DailyMaintenanceWindow + } + return nil +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*MaintenanceWindow) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _MaintenanceWindow_OneofMarshaler, _MaintenanceWindow_OneofUnmarshaler, _MaintenanceWindow_OneofSizer, []interface{}{ + (*MaintenanceWindow_DailyMaintenanceWindow)(nil), + } +} + +func _MaintenanceWindow_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*MaintenanceWindow) + // policy + switch x := m.Policy.(type) { + case *MaintenanceWindow_DailyMaintenanceWindow: + b.EncodeVarint(2<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.DailyMaintenanceWindow); err != nil { + return err + } + case nil: + default: + return fmt.Errorf("MaintenanceWindow.Policy has unexpected type %T", x) + } + return nil +} + +func _MaintenanceWindow_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*MaintenanceWindow) + switch tag { + case 2: // policy.daily_maintenance_window + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(DailyMaintenanceWindow) + err := b.DecodeMessage(msg) + m.Policy = &MaintenanceWindow_DailyMaintenanceWindow{msg} + return true, err + default: + return false, nil + } +} + +func _MaintenanceWindow_OneofSizer(msg proto.Message) (n int) { + m := msg.(*MaintenanceWindow) + // policy + switch x := m.Policy.(type) { + case *MaintenanceWindow_DailyMaintenanceWindow: + s := proto.Size(x.DailyMaintenanceWindow) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +// Time window specified for daily maintenance operations. +type DailyMaintenanceWindow struct { + // Time within the maintenance window to start the maintenance operations. + // Time format should be in [RFC3339](https://www.ietf.org/rfc/rfc3339.txt) + // format "HH:MM”, where HH : [00-23] and MM : [00-59] GMT. + StartTime string `protobuf:"bytes,2,opt,name=start_time,json=startTime,proto3" json:"start_time,omitempty"` + // [Output only] Duration of the time window, automatically chosen to be + // smallest possible in the given scenario. + // Duration will be in [RFC3339](https://www.ietf.org/rfc/rfc3339.txt) + // format "PTnHnMnS". + Duration string `protobuf:"bytes,3,opt,name=duration,proto3" json:"duration,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *DailyMaintenanceWindow) Reset() { *m = DailyMaintenanceWindow{} } +func (m *DailyMaintenanceWindow) String() string { return proto.CompactTextString(m) } +func (*DailyMaintenanceWindow) ProtoMessage() {} +func (*DailyMaintenanceWindow) Descriptor() ([]byte, []int) { + return fileDescriptor_cluster_service_0391086e6c7dadfe, []int{45} +} +func (m *DailyMaintenanceWindow) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_DailyMaintenanceWindow.Unmarshal(m, b) +} +func (m *DailyMaintenanceWindow) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_DailyMaintenanceWindow.Marshal(b, m, deterministic) +} +func (dst *DailyMaintenanceWindow) XXX_Merge(src proto.Message) { + xxx_messageInfo_DailyMaintenanceWindow.Merge(dst, src) +} +func (m *DailyMaintenanceWindow) XXX_Size() int { + return xxx_messageInfo_DailyMaintenanceWindow.Size(m) +} +func (m *DailyMaintenanceWindow) XXX_DiscardUnknown() { + xxx_messageInfo_DailyMaintenanceWindow.DiscardUnknown(m) +} + +var xxx_messageInfo_DailyMaintenanceWindow proto.InternalMessageInfo + +func (m *DailyMaintenanceWindow) GetStartTime() string { + if m != nil { + return m.StartTime + } + return "" +} + +func (m *DailyMaintenanceWindow) GetDuration() string { + if m != nil { + return m.Duration + } + return "" +} + +// SetNodePoolManagementRequest sets the node management properties of a node +// pool. +type SetNodePoolManagementRequest struct { + // Deprecated. The Google Developers Console [project ID or project + // number](https://support.google.com/cloud/answer/6158840). + // This field has been deprecated and replaced by the name field. + ProjectId string `protobuf:"bytes,1,opt,name=project_id,json=projectId,proto3" json:"project_id,omitempty"` // Deprecated: Do not use. + // Deprecated. The name of the Google Compute Engine + // [zone](/compute/docs/zones#available) in which the cluster + // resides. + // This field has been deprecated and replaced by the name field. + Zone string `protobuf:"bytes,2,opt,name=zone,proto3" json:"zone,omitempty"` // Deprecated: Do not use. + // Deprecated. The name of the cluster to update. + // This field has been deprecated and replaced by the name field. + ClusterId string `protobuf:"bytes,3,opt,name=cluster_id,json=clusterId,proto3" json:"cluster_id,omitempty"` // Deprecated: Do not use. + // Deprecated. The name of the node pool to update. + // This field has been deprecated and replaced by the name field. + NodePoolId string `protobuf:"bytes,4,opt,name=node_pool_id,json=nodePoolId,proto3" json:"node_pool_id,omitempty"` // Deprecated: Do not use. + // NodeManagement configuration for the node pool. + Management *NodeManagement `protobuf:"bytes,5,opt,name=management,proto3" json:"management,omitempty"` + // The name (project, location, cluster, node pool id) of the node pool to set + // management properties. Specified in the format + // 'projects/*/locations/*/clusters/*/nodePools/*'. + Name string `protobuf:"bytes,7,opt,name=name,proto3" json:"name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *SetNodePoolManagementRequest) Reset() { *m = SetNodePoolManagementRequest{} } +func (m *SetNodePoolManagementRequest) String() string { return proto.CompactTextString(m) } +func (*SetNodePoolManagementRequest) ProtoMessage() {} +func (*SetNodePoolManagementRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_cluster_service_0391086e6c7dadfe, []int{46} +} +func (m *SetNodePoolManagementRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_SetNodePoolManagementRequest.Unmarshal(m, b) +} +func (m *SetNodePoolManagementRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_SetNodePoolManagementRequest.Marshal(b, m, deterministic) +} +func (dst *SetNodePoolManagementRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_SetNodePoolManagementRequest.Merge(dst, src) +} +func (m *SetNodePoolManagementRequest) XXX_Size() int { + return xxx_messageInfo_SetNodePoolManagementRequest.Size(m) +} +func (m *SetNodePoolManagementRequest) XXX_DiscardUnknown() { + xxx_messageInfo_SetNodePoolManagementRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_SetNodePoolManagementRequest proto.InternalMessageInfo + +// Deprecated: Do not use. +func (m *SetNodePoolManagementRequest) GetProjectId() string { + if m != nil { + return m.ProjectId + } + return "" +} + +// Deprecated: Do not use. +func (m *SetNodePoolManagementRequest) GetZone() string { + if m != nil { + return m.Zone + } + return "" +} + +// Deprecated: Do not use. +func (m *SetNodePoolManagementRequest) GetClusterId() string { + if m != nil { + return m.ClusterId + } + return "" +} + +// Deprecated: Do not use. +func (m *SetNodePoolManagementRequest) GetNodePoolId() string { + if m != nil { + return m.NodePoolId + } + return "" +} + +func (m *SetNodePoolManagementRequest) GetManagement() *NodeManagement { + if m != nil { + return m.Management + } + return nil +} + +func (m *SetNodePoolManagementRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +// SetNodePoolSizeRequest sets the size a node +// pool. +type SetNodePoolSizeRequest struct { + // Deprecated. The Google Developers Console [project ID or project + // number](https://support.google.com/cloud/answer/6158840). + // This field has been deprecated and replaced by the name field. + ProjectId string `protobuf:"bytes,1,opt,name=project_id,json=projectId,proto3" json:"project_id,omitempty"` // Deprecated: Do not use. + // Deprecated. The name of the Google Compute Engine + // [zone](/compute/docs/zones#available) in which the cluster + // resides. + // This field has been deprecated and replaced by the name field. + Zone string `protobuf:"bytes,2,opt,name=zone,proto3" json:"zone,omitempty"` // Deprecated: Do not use. + // Deprecated. The name of the cluster to update. + // This field has been deprecated and replaced by the name field. + ClusterId string `protobuf:"bytes,3,opt,name=cluster_id,json=clusterId,proto3" json:"cluster_id,omitempty"` // Deprecated: Do not use. + // Deprecated. The name of the node pool to update. + // This field has been deprecated and replaced by the name field. + NodePoolId string `protobuf:"bytes,4,opt,name=node_pool_id,json=nodePoolId,proto3" json:"node_pool_id,omitempty"` // Deprecated: Do not use. + // The desired node count for the pool. + NodeCount int32 `protobuf:"varint,5,opt,name=node_count,json=nodeCount,proto3" json:"node_count,omitempty"` + // The name (project, location, cluster, node pool id) of the node pool to set + // size. + // Specified in the format 'projects/*/locations/*/clusters/*/nodePools/*'. + Name string `protobuf:"bytes,7,opt,name=name,proto3" json:"name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *SetNodePoolSizeRequest) Reset() { *m = SetNodePoolSizeRequest{} } +func (m *SetNodePoolSizeRequest) String() string { return proto.CompactTextString(m) } +func (*SetNodePoolSizeRequest) ProtoMessage() {} +func (*SetNodePoolSizeRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_cluster_service_0391086e6c7dadfe, []int{47} +} +func (m *SetNodePoolSizeRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_SetNodePoolSizeRequest.Unmarshal(m, b) +} +func (m *SetNodePoolSizeRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_SetNodePoolSizeRequest.Marshal(b, m, deterministic) +} +func (dst *SetNodePoolSizeRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_SetNodePoolSizeRequest.Merge(dst, src) +} +func (m *SetNodePoolSizeRequest) XXX_Size() int { + return xxx_messageInfo_SetNodePoolSizeRequest.Size(m) +} +func (m *SetNodePoolSizeRequest) XXX_DiscardUnknown() { + xxx_messageInfo_SetNodePoolSizeRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_SetNodePoolSizeRequest proto.InternalMessageInfo + +// Deprecated: Do not use. +func (m *SetNodePoolSizeRequest) GetProjectId() string { + if m != nil { + return m.ProjectId + } + return "" +} + +// Deprecated: Do not use. +func (m *SetNodePoolSizeRequest) GetZone() string { + if m != nil { + return m.Zone + } + return "" +} + +// Deprecated: Do not use. +func (m *SetNodePoolSizeRequest) GetClusterId() string { + if m != nil { + return m.ClusterId + } + return "" +} + +// Deprecated: Do not use. +func (m *SetNodePoolSizeRequest) GetNodePoolId() string { + if m != nil { + return m.NodePoolId + } + return "" +} + +func (m *SetNodePoolSizeRequest) GetNodeCount() int32 { + if m != nil { + return m.NodeCount + } + return 0 +} + +func (m *SetNodePoolSizeRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +// RollbackNodePoolUpgradeRequest rollbacks the previously Aborted or Failed +// NodePool upgrade. This will be an no-op if the last upgrade successfully +// completed. +type RollbackNodePoolUpgradeRequest struct { + // Deprecated. The Google Developers Console [project ID or project + // number](https://support.google.com/cloud/answer/6158840). + // This field has been deprecated and replaced by the name field. + ProjectId string `protobuf:"bytes,1,opt,name=project_id,json=projectId,proto3" json:"project_id,omitempty"` // Deprecated: Do not use. + // Deprecated. The name of the Google Compute Engine + // [zone](/compute/docs/zones#available) in which the cluster + // resides. + // This field has been deprecated and replaced by the name field. + Zone string `protobuf:"bytes,2,opt,name=zone,proto3" json:"zone,omitempty"` // Deprecated: Do not use. + // Deprecated. The name of the cluster to rollback. + // This field has been deprecated and replaced by the name field. + ClusterId string `protobuf:"bytes,3,opt,name=cluster_id,json=clusterId,proto3" json:"cluster_id,omitempty"` // Deprecated: Do not use. + // Deprecated. The name of the node pool to rollback. + // This field has been deprecated and replaced by the name field. + NodePoolId string `protobuf:"bytes,4,opt,name=node_pool_id,json=nodePoolId,proto3" json:"node_pool_id,omitempty"` // Deprecated: Do not use. + // The name (project, location, cluster, node pool id) of the node poll to + // rollback upgrade. + // Specified in the format 'projects/*/locations/*/clusters/*/nodePools/*'. + Name string `protobuf:"bytes,6,opt,name=name,proto3" json:"name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *RollbackNodePoolUpgradeRequest) Reset() { *m = RollbackNodePoolUpgradeRequest{} } +func (m *RollbackNodePoolUpgradeRequest) String() string { return proto.CompactTextString(m) } +func (*RollbackNodePoolUpgradeRequest) ProtoMessage() {} +func (*RollbackNodePoolUpgradeRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_cluster_service_0391086e6c7dadfe, []int{48} +} +func (m *RollbackNodePoolUpgradeRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_RollbackNodePoolUpgradeRequest.Unmarshal(m, b) +} +func (m *RollbackNodePoolUpgradeRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_RollbackNodePoolUpgradeRequest.Marshal(b, m, deterministic) +} +func (dst *RollbackNodePoolUpgradeRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_RollbackNodePoolUpgradeRequest.Merge(dst, src) +} +func (m *RollbackNodePoolUpgradeRequest) XXX_Size() int { + return xxx_messageInfo_RollbackNodePoolUpgradeRequest.Size(m) +} +func (m *RollbackNodePoolUpgradeRequest) XXX_DiscardUnknown() { + xxx_messageInfo_RollbackNodePoolUpgradeRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_RollbackNodePoolUpgradeRequest proto.InternalMessageInfo + +// Deprecated: Do not use. +func (m *RollbackNodePoolUpgradeRequest) GetProjectId() string { + if m != nil { + return m.ProjectId + } + return "" +} + +// Deprecated: Do not use. +func (m *RollbackNodePoolUpgradeRequest) GetZone() string { + if m != nil { + return m.Zone + } + return "" +} + +// Deprecated: Do not use. +func (m *RollbackNodePoolUpgradeRequest) GetClusterId() string { + if m != nil { + return m.ClusterId + } + return "" +} + +// Deprecated: Do not use. +func (m *RollbackNodePoolUpgradeRequest) GetNodePoolId() string { + if m != nil { + return m.NodePoolId + } + return "" +} + +func (m *RollbackNodePoolUpgradeRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +// ListNodePoolsResponse is the result of ListNodePoolsRequest. +type ListNodePoolsResponse struct { + // A list of node pools for a cluster. + NodePools []*NodePool `protobuf:"bytes,1,rep,name=node_pools,json=nodePools,proto3" json:"node_pools,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ListNodePoolsResponse) Reset() { *m = ListNodePoolsResponse{} } +func (m *ListNodePoolsResponse) String() string { return proto.CompactTextString(m) } +func (*ListNodePoolsResponse) ProtoMessage() {} +func (*ListNodePoolsResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_cluster_service_0391086e6c7dadfe, []int{49} +} +func (m *ListNodePoolsResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ListNodePoolsResponse.Unmarshal(m, b) +} +func (m *ListNodePoolsResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ListNodePoolsResponse.Marshal(b, m, deterministic) +} +func (dst *ListNodePoolsResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_ListNodePoolsResponse.Merge(dst, src) +} +func (m *ListNodePoolsResponse) XXX_Size() int { + return xxx_messageInfo_ListNodePoolsResponse.Size(m) +} +func (m *ListNodePoolsResponse) XXX_DiscardUnknown() { + xxx_messageInfo_ListNodePoolsResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_ListNodePoolsResponse proto.InternalMessageInfo + +func (m *ListNodePoolsResponse) GetNodePools() []*NodePool { + if m != nil { + return m.NodePools + } + return nil +} + +// NodePoolAutoscaling contains information required by cluster autoscaler to +// adjust the size of the node pool to the current cluster usage. +type NodePoolAutoscaling struct { + // Is autoscaling enabled for this node pool. + Enabled bool `protobuf:"varint,1,opt,name=enabled,proto3" json:"enabled,omitempty"` + // Minimum number of nodes in the NodePool. Must be >= 1 and <= + // max_node_count. + MinNodeCount int32 `protobuf:"varint,2,opt,name=min_node_count,json=minNodeCount,proto3" json:"min_node_count,omitempty"` + // Maximum number of nodes in the NodePool. Must be >= min_node_count. There + // has to enough quota to scale up the cluster. + MaxNodeCount int32 `protobuf:"varint,3,opt,name=max_node_count,json=maxNodeCount,proto3" json:"max_node_count,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *NodePoolAutoscaling) Reset() { *m = NodePoolAutoscaling{} } +func (m *NodePoolAutoscaling) String() string { return proto.CompactTextString(m) } +func (*NodePoolAutoscaling) ProtoMessage() {} +func (*NodePoolAutoscaling) Descriptor() ([]byte, []int) { + return fileDescriptor_cluster_service_0391086e6c7dadfe, []int{50} +} +func (m *NodePoolAutoscaling) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_NodePoolAutoscaling.Unmarshal(m, b) +} +func (m *NodePoolAutoscaling) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_NodePoolAutoscaling.Marshal(b, m, deterministic) +} +func (dst *NodePoolAutoscaling) XXX_Merge(src proto.Message) { + xxx_messageInfo_NodePoolAutoscaling.Merge(dst, src) +} +func (m *NodePoolAutoscaling) XXX_Size() int { + return xxx_messageInfo_NodePoolAutoscaling.Size(m) +} +func (m *NodePoolAutoscaling) XXX_DiscardUnknown() { + xxx_messageInfo_NodePoolAutoscaling.DiscardUnknown(m) +} + +var xxx_messageInfo_NodePoolAutoscaling proto.InternalMessageInfo + +func (m *NodePoolAutoscaling) GetEnabled() bool { + if m != nil { + return m.Enabled + } + return false +} + +func (m *NodePoolAutoscaling) GetMinNodeCount() int32 { + if m != nil { + return m.MinNodeCount + } + return 0 +} + +func (m *NodePoolAutoscaling) GetMaxNodeCount() int32 { + if m != nil { + return m.MaxNodeCount + } + return 0 +} + +// SetLabelsRequest sets the Google Cloud Platform labels on a Google Container +// Engine cluster, which will in turn set them for Google Compute Engine +// resources used by that cluster +type SetLabelsRequest struct { + // Deprecated. The Google Developers Console [project ID or project + // number](https://developers.google.com/console/help/new/#projectnumber). + // This field has been deprecated and replaced by the name field. + ProjectId string `protobuf:"bytes,1,opt,name=project_id,json=projectId,proto3" json:"project_id,omitempty"` // Deprecated: Do not use. + // Deprecated. The name of the Google Compute Engine + // [zone](/compute/docs/zones#available) in which the cluster + // resides. + // This field has been deprecated and replaced by the name field. + Zone string `protobuf:"bytes,2,opt,name=zone,proto3" json:"zone,omitempty"` // Deprecated: Do not use. + // Deprecated. The name of the cluster. + // This field has been deprecated and replaced by the name field. + ClusterId string `protobuf:"bytes,3,opt,name=cluster_id,json=clusterId,proto3" json:"cluster_id,omitempty"` // Deprecated: Do not use. + // The labels to set for that cluster. + ResourceLabels map[string]string `protobuf:"bytes,4,rep,name=resource_labels,json=resourceLabels,proto3" json:"resource_labels,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` + // The fingerprint of the previous set of labels for this resource, + // used to detect conflicts. The fingerprint is initially generated by + // Kubernetes Engine and changes after every request to modify or update + // labels. You must always provide an up-to-date fingerprint hash when + // updating or changing labels. Make a get() request to the + // resource to get the latest fingerprint. + LabelFingerprint string `protobuf:"bytes,5,opt,name=label_fingerprint,json=labelFingerprint,proto3" json:"label_fingerprint,omitempty"` + // The name (project, location, cluster id) of the cluster to set labels. + // Specified in the format 'projects/*/locations/*/clusters/*'. + Name string `protobuf:"bytes,7,opt,name=name,proto3" json:"name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *SetLabelsRequest) Reset() { *m = SetLabelsRequest{} } +func (m *SetLabelsRequest) String() string { return proto.CompactTextString(m) } +func (*SetLabelsRequest) ProtoMessage() {} +func (*SetLabelsRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_cluster_service_0391086e6c7dadfe, []int{51} +} +func (m *SetLabelsRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_SetLabelsRequest.Unmarshal(m, b) +} +func (m *SetLabelsRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_SetLabelsRequest.Marshal(b, m, deterministic) +} +func (dst *SetLabelsRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_SetLabelsRequest.Merge(dst, src) +} +func (m *SetLabelsRequest) XXX_Size() int { + return xxx_messageInfo_SetLabelsRequest.Size(m) +} +func (m *SetLabelsRequest) XXX_DiscardUnknown() { + xxx_messageInfo_SetLabelsRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_SetLabelsRequest proto.InternalMessageInfo + +// Deprecated: Do not use. +func (m *SetLabelsRequest) GetProjectId() string { + if m != nil { + return m.ProjectId + } + return "" +} + +// Deprecated: Do not use. +func (m *SetLabelsRequest) GetZone() string { + if m != nil { + return m.Zone + } + return "" +} + +// Deprecated: Do not use. +func (m *SetLabelsRequest) GetClusterId() string { + if m != nil { + return m.ClusterId + } + return "" +} + +func (m *SetLabelsRequest) GetResourceLabels() map[string]string { + if m != nil { + return m.ResourceLabels + } + return nil +} + +func (m *SetLabelsRequest) GetLabelFingerprint() string { + if m != nil { + return m.LabelFingerprint + } + return "" +} + +func (m *SetLabelsRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +// SetLegacyAbacRequest enables or disables the ABAC authorization mechanism for +// a cluster. +type SetLegacyAbacRequest struct { + // Deprecated. The Google Developers Console [project ID or project + // number](https://support.google.com/cloud/answer/6158840). + // This field has been deprecated and replaced by the name field. + ProjectId string `protobuf:"bytes,1,opt,name=project_id,json=projectId,proto3" json:"project_id,omitempty"` // Deprecated: Do not use. + // Deprecated. The name of the Google Compute Engine + // [zone](/compute/docs/zones#available) in which the cluster + // resides. + // This field has been deprecated and replaced by the name field. + Zone string `protobuf:"bytes,2,opt,name=zone,proto3" json:"zone,omitempty"` // Deprecated: Do not use. + // Deprecated. The name of the cluster to update. + // This field has been deprecated and replaced by the name field. + ClusterId string `protobuf:"bytes,3,opt,name=cluster_id,json=clusterId,proto3" json:"cluster_id,omitempty"` // Deprecated: Do not use. + // Whether ABAC authorization will be enabled in the cluster. + Enabled bool `protobuf:"varint,4,opt,name=enabled,proto3" json:"enabled,omitempty"` + // The name (project, location, cluster id) of the cluster to set legacy abac. + // Specified in the format 'projects/*/locations/*/clusters/*'. + Name string `protobuf:"bytes,6,opt,name=name,proto3" json:"name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *SetLegacyAbacRequest) Reset() { *m = SetLegacyAbacRequest{} } +func (m *SetLegacyAbacRequest) String() string { return proto.CompactTextString(m) } +func (*SetLegacyAbacRequest) ProtoMessage() {} +func (*SetLegacyAbacRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_cluster_service_0391086e6c7dadfe, []int{52} +} +func (m *SetLegacyAbacRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_SetLegacyAbacRequest.Unmarshal(m, b) +} +func (m *SetLegacyAbacRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_SetLegacyAbacRequest.Marshal(b, m, deterministic) +} +func (dst *SetLegacyAbacRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_SetLegacyAbacRequest.Merge(dst, src) +} +func (m *SetLegacyAbacRequest) XXX_Size() int { + return xxx_messageInfo_SetLegacyAbacRequest.Size(m) +} +func (m *SetLegacyAbacRequest) XXX_DiscardUnknown() { + xxx_messageInfo_SetLegacyAbacRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_SetLegacyAbacRequest proto.InternalMessageInfo + +// Deprecated: Do not use. +func (m *SetLegacyAbacRequest) GetProjectId() string { + if m != nil { + return m.ProjectId + } + return "" +} + +// Deprecated: Do not use. +func (m *SetLegacyAbacRequest) GetZone() string { + if m != nil { + return m.Zone + } + return "" +} + +// Deprecated: Do not use. +func (m *SetLegacyAbacRequest) GetClusterId() string { + if m != nil { + return m.ClusterId + } + return "" +} + +func (m *SetLegacyAbacRequest) GetEnabled() bool { + if m != nil { + return m.Enabled + } + return false +} + +func (m *SetLegacyAbacRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +// StartIPRotationRequest creates a new IP for the cluster and then performs +// a node upgrade on each node pool to point to the new IP. +type StartIPRotationRequest struct { + // Deprecated. The Google Developers Console [project ID or project + // number](https://developers.google.com/console/help/new/#projectnumber). + // This field has been deprecated and replaced by the name field. + ProjectId string `protobuf:"bytes,1,opt,name=project_id,json=projectId,proto3" json:"project_id,omitempty"` // Deprecated: Do not use. + // Deprecated. The name of the Google Compute Engine + // [zone](/compute/docs/zones#available) in which the cluster + // resides. + // This field has been deprecated and replaced by the name field. + Zone string `protobuf:"bytes,2,opt,name=zone,proto3" json:"zone,omitempty"` // Deprecated: Do not use. + // Deprecated. The name of the cluster. + // This field has been deprecated and replaced by the name field. + ClusterId string `protobuf:"bytes,3,opt,name=cluster_id,json=clusterId,proto3" json:"cluster_id,omitempty"` // Deprecated: Do not use. + // The name (project, location, cluster id) of the cluster to start IP + // rotation. Specified in the format 'projects/*/locations/*/clusters/*'. + Name string `protobuf:"bytes,6,opt,name=name,proto3" json:"name,omitempty"` + // Whether to rotate credentials during IP rotation. + RotateCredentials bool `protobuf:"varint,7,opt,name=rotate_credentials,json=rotateCredentials,proto3" json:"rotate_credentials,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *StartIPRotationRequest) Reset() { *m = StartIPRotationRequest{} } +func (m *StartIPRotationRequest) String() string { return proto.CompactTextString(m) } +func (*StartIPRotationRequest) ProtoMessage() {} +func (*StartIPRotationRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_cluster_service_0391086e6c7dadfe, []int{53} +} +func (m *StartIPRotationRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_StartIPRotationRequest.Unmarshal(m, b) +} +func (m *StartIPRotationRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_StartIPRotationRequest.Marshal(b, m, deterministic) +} +func (dst *StartIPRotationRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_StartIPRotationRequest.Merge(dst, src) +} +func (m *StartIPRotationRequest) XXX_Size() int { + return xxx_messageInfo_StartIPRotationRequest.Size(m) +} +func (m *StartIPRotationRequest) XXX_DiscardUnknown() { + xxx_messageInfo_StartIPRotationRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_StartIPRotationRequest proto.InternalMessageInfo + +// Deprecated: Do not use. +func (m *StartIPRotationRequest) GetProjectId() string { + if m != nil { + return m.ProjectId + } + return "" +} + +// Deprecated: Do not use. +func (m *StartIPRotationRequest) GetZone() string { + if m != nil { + return m.Zone + } + return "" +} + +// Deprecated: Do not use. +func (m *StartIPRotationRequest) GetClusterId() string { + if m != nil { + return m.ClusterId + } + return "" +} + +func (m *StartIPRotationRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *StartIPRotationRequest) GetRotateCredentials() bool { + if m != nil { + return m.RotateCredentials + } + return false +} + +// CompleteIPRotationRequest moves the cluster master back into single-IP mode. +type CompleteIPRotationRequest struct { + // Deprecated. The Google Developers Console [project ID or project + // number](https://developers.google.com/console/help/new/#projectnumber). + // This field has been deprecated and replaced by the name field. + ProjectId string `protobuf:"bytes,1,opt,name=project_id,json=projectId,proto3" json:"project_id,omitempty"` // Deprecated: Do not use. + // Deprecated. The name of the Google Compute Engine + // [zone](/compute/docs/zones#available) in which the cluster + // resides. + // This field has been deprecated and replaced by the name field. + Zone string `protobuf:"bytes,2,opt,name=zone,proto3" json:"zone,omitempty"` // Deprecated: Do not use. + // Deprecated. The name of the cluster. + // This field has been deprecated and replaced by the name field. + ClusterId string `protobuf:"bytes,3,opt,name=cluster_id,json=clusterId,proto3" json:"cluster_id,omitempty"` // Deprecated: Do not use. + // The name (project, location, cluster id) of the cluster to complete IP + // rotation. Specified in the format 'projects/*/locations/*/clusters/*'. + Name string `protobuf:"bytes,7,opt,name=name,proto3" json:"name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *CompleteIPRotationRequest) Reset() { *m = CompleteIPRotationRequest{} } +func (m *CompleteIPRotationRequest) String() string { return proto.CompactTextString(m) } +func (*CompleteIPRotationRequest) ProtoMessage() {} +func (*CompleteIPRotationRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_cluster_service_0391086e6c7dadfe, []int{54} +} +func (m *CompleteIPRotationRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_CompleteIPRotationRequest.Unmarshal(m, b) +} +func (m *CompleteIPRotationRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_CompleteIPRotationRequest.Marshal(b, m, deterministic) +} +func (dst *CompleteIPRotationRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_CompleteIPRotationRequest.Merge(dst, src) +} +func (m *CompleteIPRotationRequest) XXX_Size() int { + return xxx_messageInfo_CompleteIPRotationRequest.Size(m) +} +func (m *CompleteIPRotationRequest) XXX_DiscardUnknown() { + xxx_messageInfo_CompleteIPRotationRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_CompleteIPRotationRequest proto.InternalMessageInfo + +// Deprecated: Do not use. +func (m *CompleteIPRotationRequest) GetProjectId() string { + if m != nil { + return m.ProjectId + } + return "" +} + +// Deprecated: Do not use. +func (m *CompleteIPRotationRequest) GetZone() string { + if m != nil { + return m.Zone + } + return "" +} + +// Deprecated: Do not use. +func (m *CompleteIPRotationRequest) GetClusterId() string { + if m != nil { + return m.ClusterId + } + return "" +} + +func (m *CompleteIPRotationRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +// AcceleratorConfig represents a Hardware Accelerator request. +type AcceleratorConfig struct { + // The number of the accelerator cards exposed to an instance. + AcceleratorCount int64 `protobuf:"varint,1,opt,name=accelerator_count,json=acceleratorCount,proto3" json:"accelerator_count,omitempty"` + // The accelerator type resource name. List of supported accelerators + // [here](/compute/docs/gpus/#Introduction) + AcceleratorType string `protobuf:"bytes,2,opt,name=accelerator_type,json=acceleratorType,proto3" json:"accelerator_type,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *AcceleratorConfig) Reset() { *m = AcceleratorConfig{} } +func (m *AcceleratorConfig) String() string { return proto.CompactTextString(m) } +func (*AcceleratorConfig) ProtoMessage() {} +func (*AcceleratorConfig) Descriptor() ([]byte, []int) { + return fileDescriptor_cluster_service_0391086e6c7dadfe, []int{55} +} +func (m *AcceleratorConfig) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_AcceleratorConfig.Unmarshal(m, b) +} +func (m *AcceleratorConfig) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_AcceleratorConfig.Marshal(b, m, deterministic) +} +func (dst *AcceleratorConfig) XXX_Merge(src proto.Message) { + xxx_messageInfo_AcceleratorConfig.Merge(dst, src) +} +func (m *AcceleratorConfig) XXX_Size() int { + return xxx_messageInfo_AcceleratorConfig.Size(m) +} +func (m *AcceleratorConfig) XXX_DiscardUnknown() { + xxx_messageInfo_AcceleratorConfig.DiscardUnknown(m) +} + +var xxx_messageInfo_AcceleratorConfig proto.InternalMessageInfo + +func (m *AcceleratorConfig) GetAcceleratorCount() int64 { + if m != nil { + return m.AcceleratorCount + } + return 0 +} + +func (m *AcceleratorConfig) GetAcceleratorType() string { + if m != nil { + return m.AcceleratorType + } + return "" +} + +// SetNetworkPolicyRequest enables/disables network policy for a cluster. +type SetNetworkPolicyRequest struct { + // Deprecated. The Google Developers Console [project ID or project + // number](https://developers.google.com/console/help/new/#projectnumber). + // This field has been deprecated and replaced by the name field. + ProjectId string `protobuf:"bytes,1,opt,name=project_id,json=projectId,proto3" json:"project_id,omitempty"` // Deprecated: Do not use. + // Deprecated. The name of the Google Compute Engine + // [zone](/compute/docs/zones#available) in which the cluster + // resides. + // This field has been deprecated and replaced by the name field. + Zone string `protobuf:"bytes,2,opt,name=zone,proto3" json:"zone,omitempty"` // Deprecated: Do not use. + // Deprecated. The name of the cluster. + // This field has been deprecated and replaced by the name field. + ClusterId string `protobuf:"bytes,3,opt,name=cluster_id,json=clusterId,proto3" json:"cluster_id,omitempty"` // Deprecated: Do not use. + // Configuration options for the NetworkPolicy feature. + NetworkPolicy *NetworkPolicy `protobuf:"bytes,4,opt,name=network_policy,json=networkPolicy,proto3" json:"network_policy,omitempty"` + // The name (project, location, cluster id) of the cluster to set networking + // policy. Specified in the format 'projects/*/locations/*/clusters/*'. + Name string `protobuf:"bytes,6,opt,name=name,proto3" json:"name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *SetNetworkPolicyRequest) Reset() { *m = SetNetworkPolicyRequest{} } +func (m *SetNetworkPolicyRequest) String() string { return proto.CompactTextString(m) } +func (*SetNetworkPolicyRequest) ProtoMessage() {} +func (*SetNetworkPolicyRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_cluster_service_0391086e6c7dadfe, []int{56} +} +func (m *SetNetworkPolicyRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_SetNetworkPolicyRequest.Unmarshal(m, b) +} +func (m *SetNetworkPolicyRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_SetNetworkPolicyRequest.Marshal(b, m, deterministic) +} +func (dst *SetNetworkPolicyRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_SetNetworkPolicyRequest.Merge(dst, src) +} +func (m *SetNetworkPolicyRequest) XXX_Size() int { + return xxx_messageInfo_SetNetworkPolicyRequest.Size(m) +} +func (m *SetNetworkPolicyRequest) XXX_DiscardUnknown() { + xxx_messageInfo_SetNetworkPolicyRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_SetNetworkPolicyRequest proto.InternalMessageInfo + +// Deprecated: Do not use. +func (m *SetNetworkPolicyRequest) GetProjectId() string { + if m != nil { + return m.ProjectId + } + return "" +} + +// Deprecated: Do not use. +func (m *SetNetworkPolicyRequest) GetZone() string { + if m != nil { + return m.Zone + } + return "" +} + +// Deprecated: Do not use. +func (m *SetNetworkPolicyRequest) GetClusterId() string { + if m != nil { + return m.ClusterId + } + return "" +} + +func (m *SetNetworkPolicyRequest) GetNetworkPolicy() *NetworkPolicy { + if m != nil { + return m.NetworkPolicy + } + return nil +} + +func (m *SetNetworkPolicyRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +// SetMaintenancePolicyRequest sets the maintenance policy for a cluster. +type SetMaintenancePolicyRequest struct { + // The Google Developers Console [project ID or project + // number](https://support.google.com/cloud/answer/6158840). + ProjectId string `protobuf:"bytes,1,opt,name=project_id,json=projectId,proto3" json:"project_id,omitempty"` + // The name of the Google Compute Engine + // [zone](/compute/docs/zones#available) in which the cluster + // resides. + Zone string `protobuf:"bytes,2,opt,name=zone,proto3" json:"zone,omitempty"` + // The name of the cluster to update. + ClusterId string `protobuf:"bytes,3,opt,name=cluster_id,json=clusterId,proto3" json:"cluster_id,omitempty"` + // The maintenance policy to be set for the cluster. An empty field + // clears the existing maintenance policy. + MaintenancePolicy *MaintenancePolicy `protobuf:"bytes,4,opt,name=maintenance_policy,json=maintenancePolicy,proto3" json:"maintenance_policy,omitempty"` + // The name (project, location, cluster id) of the cluster to set maintenance + // policy. + // Specified in the format 'projects/*/locations/*/clusters/*'. + Name string `protobuf:"bytes,5,opt,name=name,proto3" json:"name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *SetMaintenancePolicyRequest) Reset() { *m = SetMaintenancePolicyRequest{} } +func (m *SetMaintenancePolicyRequest) String() string { return proto.CompactTextString(m) } +func (*SetMaintenancePolicyRequest) ProtoMessage() {} +func (*SetMaintenancePolicyRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_cluster_service_0391086e6c7dadfe, []int{57} +} +func (m *SetMaintenancePolicyRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_SetMaintenancePolicyRequest.Unmarshal(m, b) +} +func (m *SetMaintenancePolicyRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_SetMaintenancePolicyRequest.Marshal(b, m, deterministic) +} +func (dst *SetMaintenancePolicyRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_SetMaintenancePolicyRequest.Merge(dst, src) +} +func (m *SetMaintenancePolicyRequest) XXX_Size() int { + return xxx_messageInfo_SetMaintenancePolicyRequest.Size(m) +} +func (m *SetMaintenancePolicyRequest) XXX_DiscardUnknown() { + xxx_messageInfo_SetMaintenancePolicyRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_SetMaintenancePolicyRequest proto.InternalMessageInfo + +func (m *SetMaintenancePolicyRequest) GetProjectId() string { + if m != nil { + return m.ProjectId + } + return "" +} + +func (m *SetMaintenancePolicyRequest) GetZone() string { + if m != nil { + return m.Zone + } + return "" +} + +func (m *SetMaintenancePolicyRequest) GetClusterId() string { + if m != nil { + return m.ClusterId + } + return "" +} + +func (m *SetMaintenancePolicyRequest) GetMaintenancePolicy() *MaintenancePolicy { + if m != nil { + return m.MaintenancePolicy + } + return nil +} + +func (m *SetMaintenancePolicyRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +// NetworkConfig reports the relative names of network & subnetwork. +type NetworkConfig struct { + // Output only. The relative name of the Google Compute Engine + // [network][google.container.v1.NetworkConfig.network](/compute/docs/networks-and-firewalls#networks) to which + // the cluster is connected. + // Example: projects/my-project/global/networks/my-network + Network string `protobuf:"bytes,1,opt,name=network,proto3" json:"network,omitempty"` + // Output only. The relative name of the Google Compute Engine + // [subnetwork](/compute/docs/vpc) to which the cluster is connected. + // Example: projects/my-project/regions/us-central1/subnetworks/my-subnet + Subnetwork string `protobuf:"bytes,2,opt,name=subnetwork,proto3" json:"subnetwork,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *NetworkConfig) Reset() { *m = NetworkConfig{} } +func (m *NetworkConfig) String() string { return proto.CompactTextString(m) } +func (*NetworkConfig) ProtoMessage() {} +func (*NetworkConfig) Descriptor() ([]byte, []int) { + return fileDescriptor_cluster_service_0391086e6c7dadfe, []int{58} +} +func (m *NetworkConfig) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_NetworkConfig.Unmarshal(m, b) +} +func (m *NetworkConfig) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_NetworkConfig.Marshal(b, m, deterministic) +} +func (dst *NetworkConfig) XXX_Merge(src proto.Message) { + xxx_messageInfo_NetworkConfig.Merge(dst, src) +} +func (m *NetworkConfig) XXX_Size() int { + return xxx_messageInfo_NetworkConfig.Size(m) +} +func (m *NetworkConfig) XXX_DiscardUnknown() { + xxx_messageInfo_NetworkConfig.DiscardUnknown(m) +} + +var xxx_messageInfo_NetworkConfig proto.InternalMessageInfo + +func (m *NetworkConfig) GetNetwork() string { + if m != nil { + return m.Network + } + return "" +} + +func (m *NetworkConfig) GetSubnetwork() string { + if m != nil { + return m.Subnetwork + } + return "" +} + +func init() { + proto.RegisterType((*NodeConfig)(nil), "google.container.v1.NodeConfig") + proto.RegisterMapType((map[string]string)(nil), "google.container.v1.NodeConfig.LabelsEntry") + proto.RegisterMapType((map[string]string)(nil), "google.container.v1.NodeConfig.MetadataEntry") + proto.RegisterType((*MasterAuth)(nil), "google.container.v1.MasterAuth") + proto.RegisterType((*ClientCertificateConfig)(nil), "google.container.v1.ClientCertificateConfig") + proto.RegisterType((*AddonsConfig)(nil), "google.container.v1.AddonsConfig") + proto.RegisterType((*HttpLoadBalancing)(nil), "google.container.v1.HttpLoadBalancing") + proto.RegisterType((*HorizontalPodAutoscaling)(nil), "google.container.v1.HorizontalPodAutoscaling") + proto.RegisterType((*KubernetesDashboard)(nil), "google.container.v1.KubernetesDashboard") + proto.RegisterType((*NetworkPolicyConfig)(nil), "google.container.v1.NetworkPolicyConfig") + proto.RegisterType((*PrivateClusterConfig)(nil), "google.container.v1.PrivateClusterConfig") + proto.RegisterType((*MasterAuthorizedNetworksConfig)(nil), "google.container.v1.MasterAuthorizedNetworksConfig") + proto.RegisterType((*MasterAuthorizedNetworksConfig_CidrBlock)(nil), "google.container.v1.MasterAuthorizedNetworksConfig.CidrBlock") + proto.RegisterType((*LegacyAbac)(nil), "google.container.v1.LegacyAbac") + proto.RegisterType((*NetworkPolicy)(nil), "google.container.v1.NetworkPolicy") + proto.RegisterType((*IPAllocationPolicy)(nil), "google.container.v1.IPAllocationPolicy") + proto.RegisterType((*Cluster)(nil), "google.container.v1.Cluster") + proto.RegisterMapType((map[string]string)(nil), "google.container.v1.Cluster.ResourceLabelsEntry") + proto.RegisterType((*ClusterUpdate)(nil), "google.container.v1.ClusterUpdate") + proto.RegisterType((*Operation)(nil), "google.container.v1.Operation") + proto.RegisterType((*CreateClusterRequest)(nil), "google.container.v1.CreateClusterRequest") + proto.RegisterType((*GetClusterRequest)(nil), "google.container.v1.GetClusterRequest") + proto.RegisterType((*UpdateClusterRequest)(nil), "google.container.v1.UpdateClusterRequest") + proto.RegisterType((*UpdateNodePoolRequest)(nil), "google.container.v1.UpdateNodePoolRequest") + proto.RegisterType((*SetNodePoolAutoscalingRequest)(nil), "google.container.v1.SetNodePoolAutoscalingRequest") + proto.RegisterType((*SetLoggingServiceRequest)(nil), "google.container.v1.SetLoggingServiceRequest") + proto.RegisterType((*SetMonitoringServiceRequest)(nil), "google.container.v1.SetMonitoringServiceRequest") + proto.RegisterType((*SetAddonsConfigRequest)(nil), "google.container.v1.SetAddonsConfigRequest") + proto.RegisterType((*SetLocationsRequest)(nil), "google.container.v1.SetLocationsRequest") + proto.RegisterType((*UpdateMasterRequest)(nil), "google.container.v1.UpdateMasterRequest") + proto.RegisterType((*SetMasterAuthRequest)(nil), "google.container.v1.SetMasterAuthRequest") + proto.RegisterType((*DeleteClusterRequest)(nil), "google.container.v1.DeleteClusterRequest") + proto.RegisterType((*ListClustersRequest)(nil), "google.container.v1.ListClustersRequest") + proto.RegisterType((*ListClustersResponse)(nil), "google.container.v1.ListClustersResponse") + proto.RegisterType((*GetOperationRequest)(nil), "google.container.v1.GetOperationRequest") + proto.RegisterType((*ListOperationsRequest)(nil), "google.container.v1.ListOperationsRequest") + proto.RegisterType((*CancelOperationRequest)(nil), "google.container.v1.CancelOperationRequest") + proto.RegisterType((*ListOperationsResponse)(nil), "google.container.v1.ListOperationsResponse") + proto.RegisterType((*GetServerConfigRequest)(nil), "google.container.v1.GetServerConfigRequest") + proto.RegisterType((*ServerConfig)(nil), "google.container.v1.ServerConfig") + proto.RegisterType((*CreateNodePoolRequest)(nil), "google.container.v1.CreateNodePoolRequest") + proto.RegisterType((*DeleteNodePoolRequest)(nil), "google.container.v1.DeleteNodePoolRequest") + proto.RegisterType((*ListNodePoolsRequest)(nil), "google.container.v1.ListNodePoolsRequest") + proto.RegisterType((*GetNodePoolRequest)(nil), "google.container.v1.GetNodePoolRequest") + proto.RegisterType((*NodePool)(nil), "google.container.v1.NodePool") + proto.RegisterType((*NodeManagement)(nil), "google.container.v1.NodeManagement") + proto.RegisterType((*AutoUpgradeOptions)(nil), "google.container.v1.AutoUpgradeOptions") + proto.RegisterType((*MaintenancePolicy)(nil), "google.container.v1.MaintenancePolicy") + proto.RegisterType((*MaintenanceWindow)(nil), "google.container.v1.MaintenanceWindow") + proto.RegisterType((*DailyMaintenanceWindow)(nil), "google.container.v1.DailyMaintenanceWindow") + proto.RegisterType((*SetNodePoolManagementRequest)(nil), "google.container.v1.SetNodePoolManagementRequest") + proto.RegisterType((*SetNodePoolSizeRequest)(nil), "google.container.v1.SetNodePoolSizeRequest") + proto.RegisterType((*RollbackNodePoolUpgradeRequest)(nil), "google.container.v1.RollbackNodePoolUpgradeRequest") + proto.RegisterType((*ListNodePoolsResponse)(nil), "google.container.v1.ListNodePoolsResponse") + proto.RegisterType((*NodePoolAutoscaling)(nil), "google.container.v1.NodePoolAutoscaling") + proto.RegisterType((*SetLabelsRequest)(nil), "google.container.v1.SetLabelsRequest") + proto.RegisterMapType((map[string]string)(nil), "google.container.v1.SetLabelsRequest.ResourceLabelsEntry") + proto.RegisterType((*SetLegacyAbacRequest)(nil), "google.container.v1.SetLegacyAbacRequest") + proto.RegisterType((*StartIPRotationRequest)(nil), "google.container.v1.StartIPRotationRequest") + proto.RegisterType((*CompleteIPRotationRequest)(nil), "google.container.v1.CompleteIPRotationRequest") + proto.RegisterType((*AcceleratorConfig)(nil), "google.container.v1.AcceleratorConfig") + proto.RegisterType((*SetNetworkPolicyRequest)(nil), "google.container.v1.SetNetworkPolicyRequest") + proto.RegisterType((*SetMaintenancePolicyRequest)(nil), "google.container.v1.SetMaintenancePolicyRequest") + proto.RegisterType((*NetworkConfig)(nil), "google.container.v1.NetworkConfig") + proto.RegisterEnum("google.container.v1.NetworkPolicy_Provider", NetworkPolicy_Provider_name, NetworkPolicy_Provider_value) + proto.RegisterEnum("google.container.v1.Cluster_Status", Cluster_Status_name, Cluster_Status_value) + proto.RegisterEnum("google.container.v1.Operation_Status", Operation_Status_name, Operation_Status_value) + proto.RegisterEnum("google.container.v1.Operation_Type", Operation_Type_name, Operation_Type_value) + proto.RegisterEnum("google.container.v1.SetMasterAuthRequest_Action", SetMasterAuthRequest_Action_name, SetMasterAuthRequest_Action_value) + proto.RegisterEnum("google.container.v1.NodePool_Status", NodePool_Status_name, NodePool_Status_value) +} + +// Reference imports to suppress errors if they are not otherwise used. +var _ context.Context +var _ grpc.ClientConn + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the grpc package it is being compiled against. +const _ = grpc.SupportPackageIsVersion4 + +// ClusterManagerClient is the client API for ClusterManager service. +// +// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://godoc.org/google.golang.org/grpc#ClientConn.NewStream. +type ClusterManagerClient interface { + // Lists all clusters owned by a project in either the specified zone or all + // zones. + ListClusters(ctx context.Context, in *ListClustersRequest, opts ...grpc.CallOption) (*ListClustersResponse, error) + // Gets the details of a specific cluster. + GetCluster(ctx context.Context, in *GetClusterRequest, opts ...grpc.CallOption) (*Cluster, error) + // Creates a cluster, consisting of the specified number and type of Google + // Compute Engine instances. + // + // By default, the cluster is created in the project's + // [default network](/compute/docs/networks-and-firewalls#networks). + // + // One firewall is added for the cluster. After cluster creation, + // the cluster creates routes for each node to allow the containers + // on that node to communicate with all other instances in the + // cluster. + // + // Finally, an entry is added to the project's global metadata indicating + // which CIDR range is being used by the cluster. + CreateCluster(ctx context.Context, in *CreateClusterRequest, opts ...grpc.CallOption) (*Operation, error) + // Updates the settings of a specific cluster. + UpdateCluster(ctx context.Context, in *UpdateClusterRequest, opts ...grpc.CallOption) (*Operation, error) + // Updates the version and/or image type for a specific node pool. + UpdateNodePool(ctx context.Context, in *UpdateNodePoolRequest, opts ...grpc.CallOption) (*Operation, error) + // Sets the autoscaling settings for a specific node pool. + SetNodePoolAutoscaling(ctx context.Context, in *SetNodePoolAutoscalingRequest, opts ...grpc.CallOption) (*Operation, error) + // Sets the logging service for a specific cluster. + SetLoggingService(ctx context.Context, in *SetLoggingServiceRequest, opts ...grpc.CallOption) (*Operation, error) + // Sets the monitoring service for a specific cluster. + SetMonitoringService(ctx context.Context, in *SetMonitoringServiceRequest, opts ...grpc.CallOption) (*Operation, error) + // Sets the addons for a specific cluster. + SetAddonsConfig(ctx context.Context, in *SetAddonsConfigRequest, opts ...grpc.CallOption) (*Operation, error) + // Sets the locations for a specific cluster. + SetLocations(ctx context.Context, in *SetLocationsRequest, opts ...grpc.CallOption) (*Operation, error) + // Updates the master for a specific cluster. + UpdateMaster(ctx context.Context, in *UpdateMasterRequest, opts ...grpc.CallOption) (*Operation, error) + // Used to set master auth materials. Currently supports :- + // Changing the admin password for a specific cluster. + // This can be either via password generation or explicitly set the password. + SetMasterAuth(ctx context.Context, in *SetMasterAuthRequest, opts ...grpc.CallOption) (*Operation, error) + // Deletes the cluster, including the Kubernetes endpoint and all worker + // nodes. + // + // Firewalls and routes that were configured during cluster creation + // are also deleted. + // + // Other Google Compute Engine resources that might be in use by the cluster + // (e.g. load balancer resources) will not be deleted if they weren't present + // at the initial create time. + DeleteCluster(ctx context.Context, in *DeleteClusterRequest, opts ...grpc.CallOption) (*Operation, error) + // Lists all operations in a project in a specific zone or all zones. + ListOperations(ctx context.Context, in *ListOperationsRequest, opts ...grpc.CallOption) (*ListOperationsResponse, error) + // Gets the specified operation. + GetOperation(ctx context.Context, in *GetOperationRequest, opts ...grpc.CallOption) (*Operation, error) + // Cancels the specified operation. + CancelOperation(ctx context.Context, in *CancelOperationRequest, opts ...grpc.CallOption) (*empty.Empty, error) + // Returns configuration info about the Kubernetes Engine service. + GetServerConfig(ctx context.Context, in *GetServerConfigRequest, opts ...grpc.CallOption) (*ServerConfig, error) + // Lists the node pools for a cluster. + ListNodePools(ctx context.Context, in *ListNodePoolsRequest, opts ...grpc.CallOption) (*ListNodePoolsResponse, error) + // Retrieves the node pool requested. + GetNodePool(ctx context.Context, in *GetNodePoolRequest, opts ...grpc.CallOption) (*NodePool, error) + // Creates a node pool for a cluster. + CreateNodePool(ctx context.Context, in *CreateNodePoolRequest, opts ...grpc.CallOption) (*Operation, error) + // Deletes a node pool from a cluster. + DeleteNodePool(ctx context.Context, in *DeleteNodePoolRequest, opts ...grpc.CallOption) (*Operation, error) + // Roll back the previously Aborted or Failed NodePool upgrade. + // This will be an no-op if the last upgrade successfully completed. + RollbackNodePoolUpgrade(ctx context.Context, in *RollbackNodePoolUpgradeRequest, opts ...grpc.CallOption) (*Operation, error) + // Sets the NodeManagement options for a node pool. + SetNodePoolManagement(ctx context.Context, in *SetNodePoolManagementRequest, opts ...grpc.CallOption) (*Operation, error) + // Sets labels on a cluster. + SetLabels(ctx context.Context, in *SetLabelsRequest, opts ...grpc.CallOption) (*Operation, error) + // Enables or disables the ABAC authorization mechanism on a cluster. + SetLegacyAbac(ctx context.Context, in *SetLegacyAbacRequest, opts ...grpc.CallOption) (*Operation, error) + // Start master IP rotation. + StartIPRotation(ctx context.Context, in *StartIPRotationRequest, opts ...grpc.CallOption) (*Operation, error) + // Completes master IP rotation. + CompleteIPRotation(ctx context.Context, in *CompleteIPRotationRequest, opts ...grpc.CallOption) (*Operation, error) + // Sets the size for a specific node pool. + SetNodePoolSize(ctx context.Context, in *SetNodePoolSizeRequest, opts ...grpc.CallOption) (*Operation, error) + // Enables/Disables Network Policy for a cluster. + SetNetworkPolicy(ctx context.Context, in *SetNetworkPolicyRequest, opts ...grpc.CallOption) (*Operation, error) + // Sets the maintenance policy for a cluster. + SetMaintenancePolicy(ctx context.Context, in *SetMaintenancePolicyRequest, opts ...grpc.CallOption) (*Operation, error) +} + +type clusterManagerClient struct { + cc *grpc.ClientConn +} + +func NewClusterManagerClient(cc *grpc.ClientConn) ClusterManagerClient { + return &clusterManagerClient{cc} +} + +func (c *clusterManagerClient) ListClusters(ctx context.Context, in *ListClustersRequest, opts ...grpc.CallOption) (*ListClustersResponse, error) { + out := new(ListClustersResponse) + err := c.cc.Invoke(ctx, "/google.container.v1.ClusterManager/ListClusters", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *clusterManagerClient) GetCluster(ctx context.Context, in *GetClusterRequest, opts ...grpc.CallOption) (*Cluster, error) { + out := new(Cluster) + err := c.cc.Invoke(ctx, "/google.container.v1.ClusterManager/GetCluster", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *clusterManagerClient) CreateCluster(ctx context.Context, in *CreateClusterRequest, opts ...grpc.CallOption) (*Operation, error) { + out := new(Operation) + err := c.cc.Invoke(ctx, "/google.container.v1.ClusterManager/CreateCluster", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *clusterManagerClient) UpdateCluster(ctx context.Context, in *UpdateClusterRequest, opts ...grpc.CallOption) (*Operation, error) { + out := new(Operation) + err := c.cc.Invoke(ctx, "/google.container.v1.ClusterManager/UpdateCluster", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *clusterManagerClient) UpdateNodePool(ctx context.Context, in *UpdateNodePoolRequest, opts ...grpc.CallOption) (*Operation, error) { + out := new(Operation) + err := c.cc.Invoke(ctx, "/google.container.v1.ClusterManager/UpdateNodePool", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *clusterManagerClient) SetNodePoolAutoscaling(ctx context.Context, in *SetNodePoolAutoscalingRequest, opts ...grpc.CallOption) (*Operation, error) { + out := new(Operation) + err := c.cc.Invoke(ctx, "/google.container.v1.ClusterManager/SetNodePoolAutoscaling", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *clusterManagerClient) SetLoggingService(ctx context.Context, in *SetLoggingServiceRequest, opts ...grpc.CallOption) (*Operation, error) { + out := new(Operation) + err := c.cc.Invoke(ctx, "/google.container.v1.ClusterManager/SetLoggingService", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *clusterManagerClient) SetMonitoringService(ctx context.Context, in *SetMonitoringServiceRequest, opts ...grpc.CallOption) (*Operation, error) { + out := new(Operation) + err := c.cc.Invoke(ctx, "/google.container.v1.ClusterManager/SetMonitoringService", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *clusterManagerClient) SetAddonsConfig(ctx context.Context, in *SetAddonsConfigRequest, opts ...grpc.CallOption) (*Operation, error) { + out := new(Operation) + err := c.cc.Invoke(ctx, "/google.container.v1.ClusterManager/SetAddonsConfig", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *clusterManagerClient) SetLocations(ctx context.Context, in *SetLocationsRequest, opts ...grpc.CallOption) (*Operation, error) { + out := new(Operation) + err := c.cc.Invoke(ctx, "/google.container.v1.ClusterManager/SetLocations", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *clusterManagerClient) UpdateMaster(ctx context.Context, in *UpdateMasterRequest, opts ...grpc.CallOption) (*Operation, error) { + out := new(Operation) + err := c.cc.Invoke(ctx, "/google.container.v1.ClusterManager/UpdateMaster", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *clusterManagerClient) SetMasterAuth(ctx context.Context, in *SetMasterAuthRequest, opts ...grpc.CallOption) (*Operation, error) { + out := new(Operation) + err := c.cc.Invoke(ctx, "/google.container.v1.ClusterManager/SetMasterAuth", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *clusterManagerClient) DeleteCluster(ctx context.Context, in *DeleteClusterRequest, opts ...grpc.CallOption) (*Operation, error) { + out := new(Operation) + err := c.cc.Invoke(ctx, "/google.container.v1.ClusterManager/DeleteCluster", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *clusterManagerClient) ListOperations(ctx context.Context, in *ListOperationsRequest, opts ...grpc.CallOption) (*ListOperationsResponse, error) { + out := new(ListOperationsResponse) + err := c.cc.Invoke(ctx, "/google.container.v1.ClusterManager/ListOperations", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *clusterManagerClient) GetOperation(ctx context.Context, in *GetOperationRequest, opts ...grpc.CallOption) (*Operation, error) { + out := new(Operation) + err := c.cc.Invoke(ctx, "/google.container.v1.ClusterManager/GetOperation", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *clusterManagerClient) CancelOperation(ctx context.Context, in *CancelOperationRequest, opts ...grpc.CallOption) (*empty.Empty, error) { + out := new(empty.Empty) + err := c.cc.Invoke(ctx, "/google.container.v1.ClusterManager/CancelOperation", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *clusterManagerClient) GetServerConfig(ctx context.Context, in *GetServerConfigRequest, opts ...grpc.CallOption) (*ServerConfig, error) { + out := new(ServerConfig) + err := c.cc.Invoke(ctx, "/google.container.v1.ClusterManager/GetServerConfig", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *clusterManagerClient) ListNodePools(ctx context.Context, in *ListNodePoolsRequest, opts ...grpc.CallOption) (*ListNodePoolsResponse, error) { + out := new(ListNodePoolsResponse) + err := c.cc.Invoke(ctx, "/google.container.v1.ClusterManager/ListNodePools", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *clusterManagerClient) GetNodePool(ctx context.Context, in *GetNodePoolRequest, opts ...grpc.CallOption) (*NodePool, error) { + out := new(NodePool) + err := c.cc.Invoke(ctx, "/google.container.v1.ClusterManager/GetNodePool", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *clusterManagerClient) CreateNodePool(ctx context.Context, in *CreateNodePoolRequest, opts ...grpc.CallOption) (*Operation, error) { + out := new(Operation) + err := c.cc.Invoke(ctx, "/google.container.v1.ClusterManager/CreateNodePool", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *clusterManagerClient) DeleteNodePool(ctx context.Context, in *DeleteNodePoolRequest, opts ...grpc.CallOption) (*Operation, error) { + out := new(Operation) + err := c.cc.Invoke(ctx, "/google.container.v1.ClusterManager/DeleteNodePool", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *clusterManagerClient) RollbackNodePoolUpgrade(ctx context.Context, in *RollbackNodePoolUpgradeRequest, opts ...grpc.CallOption) (*Operation, error) { + out := new(Operation) + err := c.cc.Invoke(ctx, "/google.container.v1.ClusterManager/RollbackNodePoolUpgrade", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *clusterManagerClient) SetNodePoolManagement(ctx context.Context, in *SetNodePoolManagementRequest, opts ...grpc.CallOption) (*Operation, error) { + out := new(Operation) + err := c.cc.Invoke(ctx, "/google.container.v1.ClusterManager/SetNodePoolManagement", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *clusterManagerClient) SetLabels(ctx context.Context, in *SetLabelsRequest, opts ...grpc.CallOption) (*Operation, error) { + out := new(Operation) + err := c.cc.Invoke(ctx, "/google.container.v1.ClusterManager/SetLabels", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *clusterManagerClient) SetLegacyAbac(ctx context.Context, in *SetLegacyAbacRequest, opts ...grpc.CallOption) (*Operation, error) { + out := new(Operation) + err := c.cc.Invoke(ctx, "/google.container.v1.ClusterManager/SetLegacyAbac", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *clusterManagerClient) StartIPRotation(ctx context.Context, in *StartIPRotationRequest, opts ...grpc.CallOption) (*Operation, error) { + out := new(Operation) + err := c.cc.Invoke(ctx, "/google.container.v1.ClusterManager/StartIPRotation", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *clusterManagerClient) CompleteIPRotation(ctx context.Context, in *CompleteIPRotationRequest, opts ...grpc.CallOption) (*Operation, error) { + out := new(Operation) + err := c.cc.Invoke(ctx, "/google.container.v1.ClusterManager/CompleteIPRotation", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *clusterManagerClient) SetNodePoolSize(ctx context.Context, in *SetNodePoolSizeRequest, opts ...grpc.CallOption) (*Operation, error) { + out := new(Operation) + err := c.cc.Invoke(ctx, "/google.container.v1.ClusterManager/SetNodePoolSize", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *clusterManagerClient) SetNetworkPolicy(ctx context.Context, in *SetNetworkPolicyRequest, opts ...grpc.CallOption) (*Operation, error) { + out := new(Operation) + err := c.cc.Invoke(ctx, "/google.container.v1.ClusterManager/SetNetworkPolicy", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *clusterManagerClient) SetMaintenancePolicy(ctx context.Context, in *SetMaintenancePolicyRequest, opts ...grpc.CallOption) (*Operation, error) { + out := new(Operation) + err := c.cc.Invoke(ctx, "/google.container.v1.ClusterManager/SetMaintenancePolicy", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +// ClusterManagerServer is the server API for ClusterManager service. +type ClusterManagerServer interface { + // Lists all clusters owned by a project in either the specified zone or all + // zones. + ListClusters(context.Context, *ListClustersRequest) (*ListClustersResponse, error) + // Gets the details of a specific cluster. + GetCluster(context.Context, *GetClusterRequest) (*Cluster, error) + // Creates a cluster, consisting of the specified number and type of Google + // Compute Engine instances. + // + // By default, the cluster is created in the project's + // [default network](/compute/docs/networks-and-firewalls#networks). + // + // One firewall is added for the cluster. After cluster creation, + // the cluster creates routes for each node to allow the containers + // on that node to communicate with all other instances in the + // cluster. + // + // Finally, an entry is added to the project's global metadata indicating + // which CIDR range is being used by the cluster. + CreateCluster(context.Context, *CreateClusterRequest) (*Operation, error) + // Updates the settings of a specific cluster. + UpdateCluster(context.Context, *UpdateClusterRequest) (*Operation, error) + // Updates the version and/or image type for a specific node pool. + UpdateNodePool(context.Context, *UpdateNodePoolRequest) (*Operation, error) + // Sets the autoscaling settings for a specific node pool. + SetNodePoolAutoscaling(context.Context, *SetNodePoolAutoscalingRequest) (*Operation, error) + // Sets the logging service for a specific cluster. + SetLoggingService(context.Context, *SetLoggingServiceRequest) (*Operation, error) + // Sets the monitoring service for a specific cluster. + SetMonitoringService(context.Context, *SetMonitoringServiceRequest) (*Operation, error) + // Sets the addons for a specific cluster. + SetAddonsConfig(context.Context, *SetAddonsConfigRequest) (*Operation, error) + // Sets the locations for a specific cluster. + SetLocations(context.Context, *SetLocationsRequest) (*Operation, error) + // Updates the master for a specific cluster. + UpdateMaster(context.Context, *UpdateMasterRequest) (*Operation, error) + // Used to set master auth materials. Currently supports :- + // Changing the admin password for a specific cluster. + // This can be either via password generation or explicitly set the password. + SetMasterAuth(context.Context, *SetMasterAuthRequest) (*Operation, error) + // Deletes the cluster, including the Kubernetes endpoint and all worker + // nodes. + // + // Firewalls and routes that were configured during cluster creation + // are also deleted. + // + // Other Google Compute Engine resources that might be in use by the cluster + // (e.g. load balancer resources) will not be deleted if they weren't present + // at the initial create time. + DeleteCluster(context.Context, *DeleteClusterRequest) (*Operation, error) + // Lists all operations in a project in a specific zone or all zones. + ListOperations(context.Context, *ListOperationsRequest) (*ListOperationsResponse, error) + // Gets the specified operation. + GetOperation(context.Context, *GetOperationRequest) (*Operation, error) + // Cancels the specified operation. + CancelOperation(context.Context, *CancelOperationRequest) (*empty.Empty, error) + // Returns configuration info about the Kubernetes Engine service. + GetServerConfig(context.Context, *GetServerConfigRequest) (*ServerConfig, error) + // Lists the node pools for a cluster. + ListNodePools(context.Context, *ListNodePoolsRequest) (*ListNodePoolsResponse, error) + // Retrieves the node pool requested. + GetNodePool(context.Context, *GetNodePoolRequest) (*NodePool, error) + // Creates a node pool for a cluster. + CreateNodePool(context.Context, *CreateNodePoolRequest) (*Operation, error) + // Deletes a node pool from a cluster. + DeleteNodePool(context.Context, *DeleteNodePoolRequest) (*Operation, error) + // Roll back the previously Aborted or Failed NodePool upgrade. + // This will be an no-op if the last upgrade successfully completed. + RollbackNodePoolUpgrade(context.Context, *RollbackNodePoolUpgradeRequest) (*Operation, error) + // Sets the NodeManagement options for a node pool. + SetNodePoolManagement(context.Context, *SetNodePoolManagementRequest) (*Operation, error) + // Sets labels on a cluster. + SetLabels(context.Context, *SetLabelsRequest) (*Operation, error) + // Enables or disables the ABAC authorization mechanism on a cluster. + SetLegacyAbac(context.Context, *SetLegacyAbacRequest) (*Operation, error) + // Start master IP rotation. + StartIPRotation(context.Context, *StartIPRotationRequest) (*Operation, error) + // Completes master IP rotation. + CompleteIPRotation(context.Context, *CompleteIPRotationRequest) (*Operation, error) + // Sets the size for a specific node pool. + SetNodePoolSize(context.Context, *SetNodePoolSizeRequest) (*Operation, error) + // Enables/Disables Network Policy for a cluster. + SetNetworkPolicy(context.Context, *SetNetworkPolicyRequest) (*Operation, error) + // Sets the maintenance policy for a cluster. + SetMaintenancePolicy(context.Context, *SetMaintenancePolicyRequest) (*Operation, error) +} + +func RegisterClusterManagerServer(s *grpc.Server, srv ClusterManagerServer) { + s.RegisterService(&_ClusterManager_serviceDesc, srv) +} + +func _ClusterManager_ListClusters_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(ListClustersRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(ClusterManagerServer).ListClusters(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.container.v1.ClusterManager/ListClusters", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(ClusterManagerServer).ListClusters(ctx, req.(*ListClustersRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _ClusterManager_GetCluster_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(GetClusterRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(ClusterManagerServer).GetCluster(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.container.v1.ClusterManager/GetCluster", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(ClusterManagerServer).GetCluster(ctx, req.(*GetClusterRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _ClusterManager_CreateCluster_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(CreateClusterRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(ClusterManagerServer).CreateCluster(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.container.v1.ClusterManager/CreateCluster", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(ClusterManagerServer).CreateCluster(ctx, req.(*CreateClusterRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _ClusterManager_UpdateCluster_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(UpdateClusterRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(ClusterManagerServer).UpdateCluster(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.container.v1.ClusterManager/UpdateCluster", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(ClusterManagerServer).UpdateCluster(ctx, req.(*UpdateClusterRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _ClusterManager_UpdateNodePool_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(UpdateNodePoolRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(ClusterManagerServer).UpdateNodePool(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.container.v1.ClusterManager/UpdateNodePool", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(ClusterManagerServer).UpdateNodePool(ctx, req.(*UpdateNodePoolRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _ClusterManager_SetNodePoolAutoscaling_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(SetNodePoolAutoscalingRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(ClusterManagerServer).SetNodePoolAutoscaling(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.container.v1.ClusterManager/SetNodePoolAutoscaling", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(ClusterManagerServer).SetNodePoolAutoscaling(ctx, req.(*SetNodePoolAutoscalingRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _ClusterManager_SetLoggingService_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(SetLoggingServiceRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(ClusterManagerServer).SetLoggingService(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.container.v1.ClusterManager/SetLoggingService", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(ClusterManagerServer).SetLoggingService(ctx, req.(*SetLoggingServiceRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _ClusterManager_SetMonitoringService_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(SetMonitoringServiceRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(ClusterManagerServer).SetMonitoringService(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.container.v1.ClusterManager/SetMonitoringService", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(ClusterManagerServer).SetMonitoringService(ctx, req.(*SetMonitoringServiceRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _ClusterManager_SetAddonsConfig_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(SetAddonsConfigRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(ClusterManagerServer).SetAddonsConfig(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.container.v1.ClusterManager/SetAddonsConfig", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(ClusterManagerServer).SetAddonsConfig(ctx, req.(*SetAddonsConfigRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _ClusterManager_SetLocations_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(SetLocationsRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(ClusterManagerServer).SetLocations(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.container.v1.ClusterManager/SetLocations", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(ClusterManagerServer).SetLocations(ctx, req.(*SetLocationsRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _ClusterManager_UpdateMaster_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(UpdateMasterRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(ClusterManagerServer).UpdateMaster(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.container.v1.ClusterManager/UpdateMaster", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(ClusterManagerServer).UpdateMaster(ctx, req.(*UpdateMasterRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _ClusterManager_SetMasterAuth_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(SetMasterAuthRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(ClusterManagerServer).SetMasterAuth(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.container.v1.ClusterManager/SetMasterAuth", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(ClusterManagerServer).SetMasterAuth(ctx, req.(*SetMasterAuthRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _ClusterManager_DeleteCluster_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(DeleteClusterRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(ClusterManagerServer).DeleteCluster(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.container.v1.ClusterManager/DeleteCluster", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(ClusterManagerServer).DeleteCluster(ctx, req.(*DeleteClusterRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _ClusterManager_ListOperations_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(ListOperationsRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(ClusterManagerServer).ListOperations(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.container.v1.ClusterManager/ListOperations", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(ClusterManagerServer).ListOperations(ctx, req.(*ListOperationsRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _ClusterManager_GetOperation_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(GetOperationRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(ClusterManagerServer).GetOperation(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.container.v1.ClusterManager/GetOperation", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(ClusterManagerServer).GetOperation(ctx, req.(*GetOperationRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _ClusterManager_CancelOperation_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(CancelOperationRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(ClusterManagerServer).CancelOperation(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.container.v1.ClusterManager/CancelOperation", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(ClusterManagerServer).CancelOperation(ctx, req.(*CancelOperationRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _ClusterManager_GetServerConfig_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(GetServerConfigRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(ClusterManagerServer).GetServerConfig(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.container.v1.ClusterManager/GetServerConfig", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(ClusterManagerServer).GetServerConfig(ctx, req.(*GetServerConfigRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _ClusterManager_ListNodePools_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(ListNodePoolsRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(ClusterManagerServer).ListNodePools(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.container.v1.ClusterManager/ListNodePools", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(ClusterManagerServer).ListNodePools(ctx, req.(*ListNodePoolsRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _ClusterManager_GetNodePool_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(GetNodePoolRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(ClusterManagerServer).GetNodePool(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.container.v1.ClusterManager/GetNodePool", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(ClusterManagerServer).GetNodePool(ctx, req.(*GetNodePoolRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _ClusterManager_CreateNodePool_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(CreateNodePoolRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(ClusterManagerServer).CreateNodePool(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.container.v1.ClusterManager/CreateNodePool", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(ClusterManagerServer).CreateNodePool(ctx, req.(*CreateNodePoolRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _ClusterManager_DeleteNodePool_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(DeleteNodePoolRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(ClusterManagerServer).DeleteNodePool(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.container.v1.ClusterManager/DeleteNodePool", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(ClusterManagerServer).DeleteNodePool(ctx, req.(*DeleteNodePoolRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _ClusterManager_RollbackNodePoolUpgrade_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(RollbackNodePoolUpgradeRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(ClusterManagerServer).RollbackNodePoolUpgrade(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.container.v1.ClusterManager/RollbackNodePoolUpgrade", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(ClusterManagerServer).RollbackNodePoolUpgrade(ctx, req.(*RollbackNodePoolUpgradeRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _ClusterManager_SetNodePoolManagement_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(SetNodePoolManagementRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(ClusterManagerServer).SetNodePoolManagement(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.container.v1.ClusterManager/SetNodePoolManagement", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(ClusterManagerServer).SetNodePoolManagement(ctx, req.(*SetNodePoolManagementRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _ClusterManager_SetLabels_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(SetLabelsRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(ClusterManagerServer).SetLabels(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.container.v1.ClusterManager/SetLabels", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(ClusterManagerServer).SetLabels(ctx, req.(*SetLabelsRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _ClusterManager_SetLegacyAbac_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(SetLegacyAbacRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(ClusterManagerServer).SetLegacyAbac(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.container.v1.ClusterManager/SetLegacyAbac", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(ClusterManagerServer).SetLegacyAbac(ctx, req.(*SetLegacyAbacRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _ClusterManager_StartIPRotation_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(StartIPRotationRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(ClusterManagerServer).StartIPRotation(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.container.v1.ClusterManager/StartIPRotation", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(ClusterManagerServer).StartIPRotation(ctx, req.(*StartIPRotationRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _ClusterManager_CompleteIPRotation_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(CompleteIPRotationRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(ClusterManagerServer).CompleteIPRotation(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.container.v1.ClusterManager/CompleteIPRotation", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(ClusterManagerServer).CompleteIPRotation(ctx, req.(*CompleteIPRotationRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _ClusterManager_SetNodePoolSize_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(SetNodePoolSizeRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(ClusterManagerServer).SetNodePoolSize(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.container.v1.ClusterManager/SetNodePoolSize", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(ClusterManagerServer).SetNodePoolSize(ctx, req.(*SetNodePoolSizeRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _ClusterManager_SetNetworkPolicy_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(SetNetworkPolicyRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(ClusterManagerServer).SetNetworkPolicy(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.container.v1.ClusterManager/SetNetworkPolicy", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(ClusterManagerServer).SetNetworkPolicy(ctx, req.(*SetNetworkPolicyRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _ClusterManager_SetMaintenancePolicy_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(SetMaintenancePolicyRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(ClusterManagerServer).SetMaintenancePolicy(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.container.v1.ClusterManager/SetMaintenancePolicy", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(ClusterManagerServer).SetMaintenancePolicy(ctx, req.(*SetMaintenancePolicyRequest)) + } + return interceptor(ctx, in, info, handler) +} + +var _ClusterManager_serviceDesc = grpc.ServiceDesc{ + ServiceName: "google.container.v1.ClusterManager", + HandlerType: (*ClusterManagerServer)(nil), + Methods: []grpc.MethodDesc{ + { + MethodName: "ListClusters", + Handler: _ClusterManager_ListClusters_Handler, + }, + { + MethodName: "GetCluster", + Handler: _ClusterManager_GetCluster_Handler, + }, + { + MethodName: "CreateCluster", + Handler: _ClusterManager_CreateCluster_Handler, + }, + { + MethodName: "UpdateCluster", + Handler: _ClusterManager_UpdateCluster_Handler, + }, + { + MethodName: "UpdateNodePool", + Handler: _ClusterManager_UpdateNodePool_Handler, + }, + { + MethodName: "SetNodePoolAutoscaling", + Handler: _ClusterManager_SetNodePoolAutoscaling_Handler, + }, + { + MethodName: "SetLoggingService", + Handler: _ClusterManager_SetLoggingService_Handler, + }, + { + MethodName: "SetMonitoringService", + Handler: _ClusterManager_SetMonitoringService_Handler, + }, + { + MethodName: "SetAddonsConfig", + Handler: _ClusterManager_SetAddonsConfig_Handler, + }, + { + MethodName: "SetLocations", + Handler: _ClusterManager_SetLocations_Handler, + }, + { + MethodName: "UpdateMaster", + Handler: _ClusterManager_UpdateMaster_Handler, + }, + { + MethodName: "SetMasterAuth", + Handler: _ClusterManager_SetMasterAuth_Handler, + }, + { + MethodName: "DeleteCluster", + Handler: _ClusterManager_DeleteCluster_Handler, + }, + { + MethodName: "ListOperations", + Handler: _ClusterManager_ListOperations_Handler, + }, + { + MethodName: "GetOperation", + Handler: _ClusterManager_GetOperation_Handler, + }, + { + MethodName: "CancelOperation", + Handler: _ClusterManager_CancelOperation_Handler, + }, + { + MethodName: "GetServerConfig", + Handler: _ClusterManager_GetServerConfig_Handler, + }, + { + MethodName: "ListNodePools", + Handler: _ClusterManager_ListNodePools_Handler, + }, + { + MethodName: "GetNodePool", + Handler: _ClusterManager_GetNodePool_Handler, + }, + { + MethodName: "CreateNodePool", + Handler: _ClusterManager_CreateNodePool_Handler, + }, + { + MethodName: "DeleteNodePool", + Handler: _ClusterManager_DeleteNodePool_Handler, + }, + { + MethodName: "RollbackNodePoolUpgrade", + Handler: _ClusterManager_RollbackNodePoolUpgrade_Handler, + }, + { + MethodName: "SetNodePoolManagement", + Handler: _ClusterManager_SetNodePoolManagement_Handler, + }, + { + MethodName: "SetLabels", + Handler: _ClusterManager_SetLabels_Handler, + }, + { + MethodName: "SetLegacyAbac", + Handler: _ClusterManager_SetLegacyAbac_Handler, + }, + { + MethodName: "StartIPRotation", + Handler: _ClusterManager_StartIPRotation_Handler, + }, + { + MethodName: "CompleteIPRotation", + Handler: _ClusterManager_CompleteIPRotation_Handler, + }, + { + MethodName: "SetNodePoolSize", + Handler: _ClusterManager_SetNodePoolSize_Handler, + }, + { + MethodName: "SetNetworkPolicy", + Handler: _ClusterManager_SetNetworkPolicy_Handler, + }, + { + MethodName: "SetMaintenancePolicy", + Handler: _ClusterManager_SetMaintenancePolicy_Handler, + }, + }, + Streams: []grpc.StreamDesc{}, + Metadata: "google/container/v1/cluster_service.proto", +} + +func init() { + proto.RegisterFile("google/container/v1/cluster_service.proto", fileDescriptor_cluster_service_0391086e6c7dadfe) +} + +var fileDescriptor_cluster_service_0391086e6c7dadfe = []byte{ + // 5271 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xcc, 0x3c, 0x6d, 0x8c, 0x24, 0xd7, + 0x51, 0xe9, 0xd9, 0xd9, 0xd9, 0x9d, 0x9a, 0xd9, 0xd9, 0xd9, 0xb7, 0x1f, 0x37, 0x1e, 0xdf, 0xd9, + 0x77, 0x1d, 0x9f, 0x7d, 0xb7, 0xb6, 0x77, 0x7c, 0xe7, 0xef, 0xf3, 0xd9, 0xf1, 0xdc, 0xec, 0x78, + 0x6f, 0x7c, 0xfb, 0x31, 0xe9, 0xd9, 0xbd, 0x93, 0x0f, 0x93, 0x56, 0xef, 0x4c, 0xdf, 0x6c, 0x7b, + 0x7b, 0xba, 0x3b, 0xdd, 0x3d, 0x67, 0xef, 0x9d, 0x0e, 0xf1, 0x91, 0x04, 0x83, 0x63, 0x93, 0x90, + 0x04, 0xa4, 0xf0, 0x21, 0x02, 0x24, 0x28, 0x01, 0x41, 0x88, 0x88, 0x04, 0x02, 0x21, 0x21, 0xf1, + 0x83, 0x00, 0x12, 0x08, 0x90, 0x85, 0x90, 0xf8, 0x87, 0x10, 0x41, 0x42, 0x22, 0xc0, 0x1f, 0x04, + 0x88, 0xe8, 0x7d, 0x74, 0xf7, 0xeb, 0x99, 0xee, 0x99, 0x9d, 0xdd, 0xbb, 0xcd, 0xfd, 0xba, 0xed, + 0x7a, 0xaf, 0xde, 0xab, 0xaa, 0x57, 0xaf, 0xaa, 0x5e, 0x55, 0xcd, 0xc1, 0xd9, 0xb6, 0x69, 0xb6, + 0x75, 0xb5, 0xd4, 0x34, 0x0d, 0x57, 0xd1, 0x0c, 0xd5, 0x2e, 0xdd, 0x3c, 0x57, 0x6a, 0xea, 0x5d, + 0xc7, 0x55, 0x6d, 0xd9, 0x51, 0xed, 0x9b, 0x5a, 0x53, 0x5d, 0xb2, 0x6c, 0xd3, 0x35, 0xd1, 0x2c, + 0x9d, 0xba, 0xe4, 0x4f, 0x5d, 0xba, 0x79, 0xae, 0x78, 0x9c, 0xe1, 0x2b, 0x96, 0x56, 0x52, 0x0c, + 0xc3, 0x74, 0x15, 0x57, 0x33, 0x0d, 0x87, 0xa2, 0x14, 0x1f, 0x64, 0xa3, 0xe4, 0x6b, 0xbb, 0x7b, + 0xa3, 0xa4, 0x76, 0x2c, 0x77, 0x8f, 0x0e, 0x8a, 0xdf, 0x1e, 0x07, 0x58, 0x37, 0x5b, 0x6a, 0xc5, + 0x34, 0x6e, 0x68, 0x6d, 0x74, 0x0a, 0xb2, 0x1d, 0xa5, 0xb9, 0xa3, 0x19, 0xaa, 0xec, 0xee, 0x59, + 0x6a, 0x41, 0x38, 0x29, 0x9c, 0x49, 0x4b, 0x19, 0x06, 0xdb, 0xdc, 0xb3, 0x54, 0x74, 0x12, 0xb2, + 0x2d, 0xcd, 0xd9, 0x95, 0x1d, 0xed, 0x96, 0x2a, 0xb7, 0xb7, 0x0b, 0x89, 0x93, 0xc2, 0x99, 0x71, + 0x09, 0x30, 0xac, 0xa1, 0xdd, 0x52, 0x57, 0xb6, 0xf1, 0x22, 0xa6, 0xd2, 0x75, 0x77, 0x64, 0xa7, + 0x69, 0x5a, 0xaa, 0x53, 0x18, 0x3b, 0x39, 0x86, 0x17, 0x21, 0xb0, 0x06, 0x01, 0xa1, 0xc7, 0x60, + 0x9a, 0xf1, 0x25, 0x2b, 0xcd, 0xa6, 0xd9, 0x35, 0xdc, 0x42, 0x9a, 0x6c, 0x95, 0x63, 0xe0, 0x32, + 0x85, 0xa2, 0x1a, 0x4c, 0x76, 0x54, 0x57, 0x69, 0x29, 0xae, 0x52, 0x48, 0x9e, 0x1c, 0x3b, 0x93, + 0x39, 0xff, 0xe4, 0x52, 0x84, 0x08, 0x96, 0x02, 0x1e, 0x96, 0xd6, 0xd8, 0xfc, 0xaa, 0xe1, 0xda, + 0x7b, 0x92, 0x8f, 0x8e, 0x4e, 0x00, 0x68, 0x1d, 0xa5, 0xcd, 0x38, 0x1b, 0x27, 0xdb, 0xa5, 0x09, + 0x84, 0xf0, 0x55, 0x81, 0x94, 0xae, 0x6c, 0xab, 0xba, 0x53, 0x48, 0x91, 0x7d, 0x1e, 0x1f, 0xb6, + 0xcf, 0x2a, 0x99, 0x4d, 0x77, 0x61, 0xa8, 0xe8, 0x51, 0x98, 0xd6, 0xcd, 0xa6, 0xa2, 0xcb, 0x8e, + 0xd3, 0x92, 0x29, 0x5f, 0x13, 0x44, 0x3e, 0x53, 0x04, 0xdc, 0x70, 0x5a, 0x15, 0xc2, 0x16, 0x82, + 0xa4, 0xab, 0xb4, 0x9d, 0xc2, 0x24, 0x11, 0x0d, 0xf9, 0x1b, 0x9d, 0x84, 0x8c, 0x65, 0xab, 0xf8, + 0x70, 0xb4, 0x6d, 0x5d, 0x2d, 0xc0, 0x49, 0xe1, 0xcc, 0xa4, 0xc4, 0x83, 0xd0, 0xeb, 0x90, 0x55, + 0x9a, 0x4d, 0x55, 0x57, 0x6d, 0xc5, 0x35, 0x6d, 0xa7, 0x90, 0x21, 0x84, 0x3e, 0x1a, 0x49, 0x68, + 0x39, 0x98, 0x48, 0xe9, 0x95, 0x42, 0xb8, 0xe8, 0x41, 0x48, 0x93, 0x63, 0x24, 0xc2, 0xc8, 0x12, + 0x61, 0x4c, 0x62, 0x00, 0x91, 0xc5, 0x19, 0xc8, 0x77, 0x34, 0x43, 0x6e, 0x5a, 0x5d, 0xd9, 0xd2, + 0x15, 0xf7, 0x86, 0x69, 0x77, 0x0a, 0x53, 0xf4, 0x7c, 0x3a, 0x9a, 0x51, 0xb1, 0xba, 0x75, 0x06, + 0x2d, 0xbe, 0x04, 0x53, 0x21, 0x79, 0xa3, 0x3c, 0x8c, 0xed, 0xaa, 0x7b, 0x4c, 0x71, 0xf0, 0x9f, + 0x68, 0x0e, 0xc6, 0x6f, 0x2a, 0x7a, 0x57, 0x25, 0x9a, 0x92, 0x96, 0xe8, 0xc7, 0x85, 0xc4, 0x0b, + 0x42, 0xf1, 0x45, 0xc8, 0x70, 0x42, 0x1c, 0x05, 0x55, 0xfc, 0x56, 0x02, 0x60, 0x4d, 0xc1, 0x17, + 0xa4, 0xdc, 0x75, 0x77, 0x50, 0x11, 0x26, 0xbb, 0x8e, 0x6a, 0x1b, 0x4a, 0xc7, 0xd3, 0x59, 0xff, + 0x1b, 0x8f, 0x59, 0x8a, 0xe3, 0xbc, 0x6d, 0xda, 0x2d, 0xb6, 0x8e, 0xff, 0x8d, 0x76, 0xe0, 0x81, + 0xa6, 0xae, 0xa9, 0x86, 0x2b, 0x37, 0x55, 0xdb, 0xd5, 0x6e, 0x68, 0x4d, 0xc5, 0x55, 0xe5, 0x26, + 0x11, 0x58, 0x61, 0xec, 0xa4, 0x70, 0x26, 0x73, 0xfe, 0x89, 0x48, 0xf1, 0x56, 0x08, 0x56, 0x25, + 0x40, 0x62, 0x42, 0x3e, 0xd6, 0x8c, 0x1e, 0x40, 0xcf, 0xc0, 0x82, 0x77, 0xa3, 0x9b, 0x0a, 0xbf, + 0x5b, 0xa1, 0x45, 0x68, 0x9a, 0x63, 0xa3, 0x15, 0x85, 0xc3, 0x45, 0x4f, 0x02, 0xea, 0xa7, 0xaf, + 0xa0, 0x12, 0x8c, 0x99, 0xbe, 0xad, 0xb0, 0x8a, 0xb3, 0xe9, 0x58, 0x90, 0x37, 0xa8, 0x8a, 0x53, + 0xc8, 0x15, 0x75, 0x4f, 0x6c, 0xc0, 0xb1, 0x18, 0xba, 0xd1, 0x0b, 0x50, 0xd0, 0x1c, 0xa7, 0xab, + 0xca, 0x11, 0xdb, 0x09, 0x44, 0x13, 0x17, 0xc8, 0x78, 0x1f, 0xbe, 0xf8, 0xc1, 0x18, 0x64, 0xcb, + 0xad, 0x96, 0x69, 0x38, 0x6c, 0xa9, 0xab, 0x30, 0xbb, 0xe3, 0xba, 0x96, 0xac, 0x9b, 0x4a, 0x4b, + 0xde, 0x56, 0x74, 0xc5, 0x68, 0x6a, 0x46, 0x9b, 0xac, 0x12, 0xa7, 0xac, 0x97, 0x5d, 0xd7, 0x5a, + 0x35, 0x95, 0xd6, 0x25, 0x6f, 0xb6, 0x34, 0xb3, 0xd3, 0x0b, 0x42, 0xbb, 0x50, 0xdc, 0x31, 0x6d, + 0xed, 0x16, 0x46, 0xd4, 0x65, 0xcb, 0x6c, 0xc9, 0x4a, 0xd7, 0x35, 0x9d, 0xa6, 0xa2, 0xe3, 0xe5, + 0x13, 0x64, 0xf9, 0x68, 0xe3, 0x70, 0xd9, 0x47, 0xab, 0x9b, 0xad, 0x72, 0x80, 0x24, 0x15, 0x76, + 0x62, 0x46, 0xd0, 0x0f, 0xc1, 0xdc, 0x6e, 0x77, 0x5b, 0xb5, 0x0d, 0xd5, 0x55, 0x1d, 0xb9, 0xa5, + 0x38, 0x3b, 0xdb, 0xa6, 0x62, 0xb7, 0x98, 0x4e, 0x9c, 0x89, 0xdc, 0xe6, 0x8a, 0x8f, 0xb0, 0xec, + 0xcd, 0x97, 0x66, 0x77, 0xfb, 0x81, 0xe8, 0x4d, 0x98, 0x37, 0x54, 0xf7, 0x6d, 0xd3, 0xde, 0x95, + 0x2d, 0x53, 0xd7, 0x9a, 0x7b, 0x9e, 0xc6, 0x25, 0x07, 0xac, 0xbe, 0x4e, 0x31, 0xea, 0x04, 0x81, + 0x69, 0xdb, 0xac, 0xd1, 0x0f, 0x14, 0x4b, 0x30, 0xd3, 0x27, 0x4f, 0x7c, 0x09, 0x5a, 0x9a, 0xa3, + 0x6c, 0xeb, 0x6a, 0x8b, 0x9d, 0xa7, 0xff, 0x2d, 0x3e, 0x07, 0x85, 0x38, 0x09, 0x0d, 0xc4, 0x3b, + 0x07, 0xb3, 0x11, 0x2c, 0x0f, 0x43, 0x89, 0xe0, 0x63, 0x20, 0xca, 0x4f, 0x25, 0x60, 0xae, 0x6e, + 0x6b, 0x37, 0xb1, 0xae, 0xb2, 0x2b, 0x42, 0x91, 0x9e, 0x82, 0x39, 0xd5, 0xc0, 0x73, 0x64, 0x8b, + 0x0e, 0xcb, 0x86, 0xd9, 0x52, 0x1d, 0xb6, 0x00, 0xa2, 0x63, 0x0c, 0x13, 0x9b, 0x6d, 0x07, 0x3d, + 0x07, 0xc7, 0x7a, 0x30, 0x54, 0xa3, 0x65, 0x99, 0x9a, 0xe1, 0x12, 0xf5, 0x99, 0x94, 0xe6, 0x43, + 0x48, 0x55, 0x36, 0x88, 0x9e, 0x86, 0x85, 0x0e, 0xb1, 0x35, 0xb2, 0x66, 0xdd, 0x7c, 0x46, 0x6e, + 0x6a, 0x2d, 0x5b, 0xde, 0xd6, 0xcd, 0xe6, 0x2e, 0x51, 0x87, 0xb4, 0x34, 0x4b, 0x47, 0x6b, 0xd6, + 0xcd, 0x67, 0x2a, 0x5a, 0xcb, 0xbe, 0x84, 0x87, 0xd0, 0x59, 0xc8, 0xf7, 0xed, 0x92, 0x24, 0xd3, + 0xa7, 0xad, 0x9e, 0xf5, 0x1f, 0x83, 0x69, 0xab, 0xbb, 0xad, 0x6b, 0xcd, 0x60, 0x26, 0x75, 0x4f, + 0x39, 0x0a, 0xf6, 0x26, 0x8a, 0xdf, 0x15, 0xe0, 0xa1, 0xc0, 0xea, 0xe1, 0x33, 0x53, 0x5b, 0x4c, + 0x9e, 0xde, 0xed, 0x2b, 0xc0, 0x04, 0x65, 0xc2, 0x93, 0xa4, 0xf7, 0x89, 0x3e, 0x01, 0x99, 0x80, + 0x72, 0xa7, 0x90, 0x20, 0xce, 0xe3, 0xe5, 0x48, 0x5d, 0x1b, 0xbc, 0xc7, 0x92, 0xcf, 0xa4, 0x04, + 0x4d, 0xef, 0x4f, 0xa7, 0xb8, 0x06, 0xe9, 0x80, 0xfb, 0x53, 0x24, 0x4a, 0xb0, 0x74, 0x65, 0x4f, + 0xe6, 0x8c, 0x72, 0x86, 0xc1, 0xd6, 0xb1, 0x5d, 0xc6, 0xc6, 0x2a, 0x90, 0x64, 0x82, 0x19, 0x2b, + 0x6f, 0x05, 0xf1, 0x51, 0x80, 0x55, 0xb5, 0xad, 0x34, 0xf7, 0xca, 0xdb, 0x4a, 0x33, 0x9e, 0x2d, + 0xf1, 0xd7, 0x05, 0x98, 0x0a, 0xe9, 0x14, 0x5a, 0x81, 0x49, 0xcb, 0x36, 0x6f, 0x6a, 0x2d, 0xd5, + 0x26, 0x93, 0x73, 0x71, 0xbe, 0x9c, 0xc7, 0x5a, 0xaa, 0x33, 0x14, 0xc9, 0x47, 0xe6, 0x37, 0x4d, + 0x84, 0x37, 0x7d, 0x0a, 0x26, 0xeb, 0xc1, 0xac, 0xb9, 0xba, 0xb4, 0x71, 0xb5, 0xb6, 0x5c, 0x95, + 0xe4, 0xad, 0xf5, 0x46, 0xbd, 0x5a, 0xa9, 0xbd, 0x56, 0xab, 0x2e, 0xe7, 0x3f, 0x82, 0x00, 0x52, + 0x95, 0xf2, 0x6a, 0xad, 0xb2, 0x91, 0x17, 0xc4, 0x3f, 0x49, 0x02, 0xaa, 0xd5, 0xcb, 0x3a, 0x8e, + 0x03, 0x70, 0x74, 0xc6, 0x68, 0x7d, 0x04, 0x72, 0x5d, 0x47, 0x95, 0x35, 0x4b, 0x56, 0x74, 0x4d, + 0x71, 0x7c, 0xf5, 0xcd, 0x76, 0x1d, 0xb5, 0x66, 0x95, 0x29, 0x0c, 0x3d, 0x0e, 0x33, 0x4d, 0x5b, + 0xc5, 0xaa, 0xe4, 0x74, 0xb7, 0xd9, 0x9d, 0x67, 0x24, 0xe5, 0xe9, 0x40, 0xc3, 0x87, 0x93, 0xd8, + 0xca, 0xff, 0xa2, 0xd2, 0x1f, 0x63, 0xb1, 0x95, 0x0f, 0x26, 0x07, 0xb0, 0x04, 0x33, 0x9e, 0x4b, + 0xf2, 0xf5, 0x9a, 0xaa, 0xe8, 0xa5, 0x44, 0x41, 0x90, 0xa6, 0xd9, 0xa0, 0xa7, 0xd6, 0xe8, 0x0c, + 0xe4, 0xf0, 0x0d, 0xe3, 0x26, 0x8f, 0xfb, 0x93, 0xb3, 0x78, 0xc4, 0x9f, 0xf9, 0x14, 0x20, 0x16, + 0xc7, 0x39, 0xdc, 0xec, 0x94, 0x3f, 0x3b, 0xef, 0x8d, 0xfa, 0x18, 0x1f, 0x83, 0xe3, 0x41, 0xc0, + 0xdb, 0x34, 0x8d, 0x96, 0x62, 0xef, 0xc9, 0xb6, 0x62, 0xb4, 0x55, 0xca, 0xc1, 0x04, 0xe1, 0xe0, + 0x01, 0x36, 0xa7, 0xe1, 0x4d, 0x91, 0xf0, 0x0c, 0xc2, 0x4c, 0x19, 0x4e, 0xf8, 0x5b, 0x46, 0xae, + 0x30, 0x49, 0x56, 0x28, 0x7a, 0x93, 0x22, 0x96, 0x78, 0x16, 0x8e, 0xf5, 0xc9, 0x83, 0x69, 0x67, + 0x3a, 0xe4, 0xa3, 0xc3, 0x17, 0xbd, 0x04, 0x73, 0x61, 0xb1, 0x30, 0x1c, 0xa0, 0x5e, 0x9a, 0x17, + 0x0c, 0x45, 0x78, 0x1e, 0x0a, 0xfd, 0xd2, 0x61, 0x48, 0x19, 0x82, 0x34, 0xdf, 0x2b, 0x1f, 0x7a, + 0x25, 0xbe, 0x39, 0x03, 0x13, 0xcc, 0x06, 0xe2, 0x08, 0x92, 0xbb, 0x58, 0xe4, 0x6f, 0x1c, 0x41, + 0xb6, 0x54, 0xa7, 0x69, 0x6b, 0x16, 0xd6, 0x30, 0x76, 0xa5, 0x78, 0x10, 0x7a, 0x02, 0x90, 0x66, + 0x68, 0xae, 0xa6, 0xe8, 0xc4, 0x58, 0xb2, 0x10, 0x75, 0x8c, 0x84, 0xa8, 0x79, 0x36, 0x42, 0x43, + 0x5c, 0x1c, 0xa5, 0xbe, 0x0a, 0x19, 0x36, 0x8b, 0xf3, 0x4e, 0x0f, 0x0f, 0x89, 0x8b, 0x25, 0x30, + 0x82, 0xf7, 0xc4, 0xab, 0x90, 0x61, 0x96, 0x13, 0x07, 0xff, 0x44, 0x5f, 0xe2, 0x56, 0x08, 0x6c, + 0x8e, 0x04, 0x9d, 0x20, 0xb2, 0x7b, 0x0c, 0x47, 0xd4, 0xed, 0xb6, 0x66, 0xb4, 0xbd, 0x97, 0x10, + 0xd5, 0x23, 0x29, 0xc7, 0xc0, 0x0d, 0x0a, 0xc5, 0xa1, 0x52, 0xc7, 0x34, 0x34, 0xd7, 0xb4, 0xf9, + 0xb9, 0x54, 0x6f, 0x66, 0x82, 0x11, 0x6f, 0x7a, 0x01, 0x26, 0xbc, 0x8b, 0x44, 0x35, 0xc3, 0xfb, + 0x44, 0x8b, 0x51, 0xd7, 0x82, 0x2a, 0x40, 0xdf, 0x95, 0x78, 0x0d, 0xa6, 0x14, 0x12, 0xfb, 0x78, + 0x32, 0x02, 0xc2, 0xe1, 0xa9, 0xe8, 0x90, 0x9c, 0x8b, 0x92, 0xa4, 0xac, 0xc2, 0xc7, 0x4c, 0x0f, + 0x01, 0x70, 0x37, 0x9b, 0x2a, 0x01, 0x07, 0x41, 0x17, 0x81, 0x48, 0x55, 0xb6, 0x4c, 0x53, 0x77, + 0x0a, 0x59, 0x62, 0xba, 0x4f, 0xc4, 0x1e, 0x44, 0xdd, 0x34, 0x75, 0x29, 0x6d, 0xb0, 0xbf, 0x1c, + 0x74, 0x1c, 0xd2, 0x9e, 0xd9, 0x71, 0x0a, 0x53, 0xe4, 0xc9, 0x11, 0x00, 0x38, 0xaf, 0xc8, 0x45, + 0x3c, 0x8a, 0x6e, 0xed, 0x28, 0x85, 0x1c, 0xef, 0x15, 0x03, 0x5f, 0x5f, 0xc6, 0x83, 0xe8, 0x0d, + 0x98, 0xb6, 0x55, 0xc7, 0xec, 0xda, 0x4d, 0x55, 0x66, 0x2f, 0xa7, 0x69, 0x42, 0xd8, 0x53, 0x31, + 0x11, 0x33, 0x11, 0xdd, 0x92, 0xc4, 0x70, 0xf8, 0xe7, 0x53, 0xce, 0x0e, 0x01, 0xb1, 0xbd, 0x23, + 0x2b, 0xca, 0x37, 0x34, 0xa3, 0xad, 0xda, 0x96, 0x8d, 0x5d, 0x62, 0x9e, 0x48, 0x25, 0x4f, 0x06, + 0x5e, 0x0b, 0xe0, 0x58, 0xc7, 0x74, 0xe2, 0x28, 0x64, 0x65, 0x5b, 0x69, 0x16, 0xd0, 0x00, 0x1d, + 0x0b, 0x1c, 0x8a, 0x04, 0x7a, 0xe0, 0x5c, 0x6a, 0x90, 0x0b, 0xc7, 0x63, 0x85, 0x59, 0xb2, 0x88, + 0x38, 0xdc, 0x6d, 0x48, 0x53, 0xa1, 0x10, 0x0c, 0xbd, 0x01, 0x73, 0xc4, 0x96, 0x7b, 0xe2, 0xf5, + 0x16, 0x9c, 0x23, 0x0b, 0x3e, 0x16, 0xb9, 0x60, 0xbf, 0x5b, 0x90, 0x90, 0x66, 0xf5, 0xb9, 0x8a, + 0x1f, 0x81, 0x53, 0xdc, 0x5d, 0xa2, 0x8e, 0x59, 0x66, 0xbb, 0xfb, 0xfa, 0xb7, 0x40, 0xf6, 0x79, + 0xfa, 0x00, 0x5e, 0x5d, 0x7a, 0xa8, 0x33, 0x38, 0xb2, 0xd8, 0x02, 0xd4, 0x51, 0x34, 0xc3, 0x55, + 0x0d, 0xc5, 0x68, 0xaa, 0x1e, 0x63, 0xc7, 0x06, 0x84, 0xf5, 0x6b, 0xc1, 0x74, 0xc6, 0xd7, 0x4c, + 0xa7, 0x17, 0xc4, 0x0b, 0x9f, 0xf1, 0xf0, 0xe0, 0x70, 0xe1, 0x33, 0x92, 0x3d, 0xe1, 0x33, 0x0a, + 0x65, 0x58, 0xf0, 0x42, 0x2e, 0xff, 0xad, 0x45, 0x97, 0x3c, 0x4d, 0x96, 0x3c, 0x1b, 0xb9, 0x64, + 0x54, 0x70, 0x29, 0xcd, 0x59, 0x51, 0x21, 0xe7, 0x83, 0x90, 0x76, 0x54, 0xfd, 0x86, 0xac, 0x6b, + 0xc6, 0x2e, 0x7b, 0xb7, 0x4d, 0x62, 0xc0, 0xaa, 0x66, 0xec, 0xa2, 0x05, 0x48, 0xde, 0x32, 0x0d, + 0xf6, 0x3a, 0x23, 0x6e, 0x8e, 0x7c, 0xe3, 0xe0, 0xd6, 0x0f, 0xeb, 0xe8, 0x93, 0xcc, 0xff, 0xc6, + 0x77, 0xcf, 0xb3, 0xc7, 0x1e, 0xc5, 0x37, 0x55, 0xdb, 0xc1, 0xd6, 0xbb, 0x4d, 0x3d, 0x01, 0x1b, + 0x66, 0x74, 0x5c, 0xa5, 0x83, 0xe4, 0x35, 0xd9, 0xb5, 0x6d, 0xfc, 0x52, 0x63, 0x3a, 0xe1, 0xa1, + 0xed, 0x30, 0x4f, 0x45, 0x47, 0xe9, 0x91, 0x07, 0x58, 0x1e, 0x9c, 0x5a, 0x7f, 0x0f, 0x47, 0xf3, + 0x29, 0x46, 0x6c, 0x1c, 0x5b, 0x11, 0x0f, 0xeb, 0x61, 0xc8, 0xb0, 0xe0, 0xc3, 0xd5, 0x3a, 0x6a, + 0xe1, 0x2d, 0x6a, 0x9c, 0x28, 0x68, 0x53, 0xeb, 0xa8, 0xe8, 0x25, 0x48, 0x39, 0xae, 0xe2, 0x76, + 0x9d, 0xc2, 0x2e, 0x89, 0xb6, 0x3e, 0x3a, 0xf0, 0xfe, 0x37, 0xc8, 0x54, 0x89, 0xa1, 0xa0, 0xd3, + 0x90, 0xa3, 0x7f, 0xc9, 0x1d, 0xd5, 0x71, 0x94, 0xb6, 0x5a, 0xd0, 0xc9, 0x06, 0x53, 0x14, 0xba, + 0x46, 0x81, 0xe8, 0x49, 0x98, 0xed, 0x71, 0xb2, 0x8e, 0x76, 0x4b, 0x2d, 0x74, 0xa8, 0xe7, 0xe2, + 0x7d, 0x6c, 0x43, 0xbb, 0xa5, 0x62, 0x3f, 0x17, 0x11, 0x80, 0x18, 0xd4, 0x82, 0xf4, 0x05, 0x1f, + 0xe7, 0x61, 0x56, 0x33, 0x1c, 0x97, 0xa8, 0x75, 0xdb, 0x36, 0xbb, 0x96, 0xdc, 0xb5, 0x75, 0xa7, + 0x60, 0x62, 0x4b, 0x49, 0xc4, 0x32, 0xe3, 0x0d, 0xaf, 0xe0, 0xd1, 0x2d, 0x5b, 0x77, 0xf0, 0x0e, + 0x21, 0x59, 0x52, 0x4f, 0x6a, 0x51, 0x7a, 0x38, 0x29, 0x52, 0x4f, 0xfa, 0x30, 0x64, 0xd4, 0x77, + 0x2c, 0xcd, 0x66, 0x32, 0xfc, 0x24, 0x95, 0x21, 0x05, 0x11, 0x19, 0x16, 0x61, 0xd2, 0xbb, 0xee, + 0x05, 0x9b, 0x2a, 0x89, 0xf7, 0x5d, 0x2c, 0xc3, 0x6c, 0x84, 0xd1, 0x1c, 0x29, 0x5d, 0xf2, 0x36, + 0xa4, 0xa8, 0xdc, 0xd1, 0x02, 0xa0, 0xc6, 0x66, 0x79, 0x73, 0xab, 0xd1, 0x13, 0xab, 0xe6, 0x21, + 0x4b, 0xa2, 0xd8, 0x46, 0x6d, 0x63, 0xbd, 0xb6, 0xbe, 0x92, 0x17, 0x50, 0x06, 0x26, 0xa4, 0xad, + 0x75, 0xf2, 0x91, 0x40, 0xd3, 0x90, 0x91, 0xaa, 0x95, 0x8d, 0xf5, 0x4a, 0x6d, 0x15, 0x03, 0xc6, + 0x50, 0x16, 0x26, 0x1b, 0x9b, 0x1b, 0xf5, 0x3a, 0xfe, 0x4a, 0xa2, 0x34, 0x8c, 0x57, 0x25, 0x69, + 0x43, 0xca, 0x8f, 0xe3, 0x81, 0xe5, 0xea, 0x8a, 0x54, 0x5e, 0xae, 0x2e, 0xe7, 0x53, 0xe2, 0x97, + 0xc6, 0x61, 0x8a, 0x9d, 0xfc, 0x96, 0xd5, 0x52, 0x5c, 0x15, 0x3f, 0xdb, 0x5a, 0xaa, 0xa3, 0xd9, + 0xd8, 0x78, 0xf1, 0x4a, 0x48, 0xdf, 0x46, 0x88, 0x8d, 0xf1, 0x0a, 0x78, 0x11, 0x8a, 0x1e, 0x46, + 0x84, 0x87, 0xa7, 0x2f, 0xa5, 0x02, 0x9b, 0xb1, 0xd6, 0xe7, 0xe8, 0xb7, 0x60, 0xde, 0xc3, 0x0e, + 0xbb, 0xea, 0xd4, 0x7e, 0x5d, 0xf5, 0x2c, 0xc3, 0x0f, 0x65, 0x39, 0x4a, 0x3d, 0x6c, 0x60, 0xcf, + 0x2c, 0x6b, 0x2d, 0x2f, 0xe0, 0xe0, 0xd8, 0xc0, 0x3e, 0xb8, 0xd6, 0xc2, 0x0a, 0xe3, 0x21, 0x70, + 0x69, 0x48, 0x1a, 0x7b, 0xe4, 0xd9, 0x48, 0xcd, 0xcf, 0x46, 0xee, 0xc2, 0x89, 0xfe, 0xe5, 0xf9, + 0x7c, 0x47, 0x7a, 0x50, 0xaa, 0x80, 0xed, 0xca, 0xa7, 0x3a, 0x8a, 0x3d, 0x14, 0xf1, 0x8f, 0xfc, + 0xc7, 0xc1, 0xa3, 0x57, 0x0e, 0xe2, 0x04, 0x20, 0x71, 0x82, 0x47, 0xd9, 0xaa, 0x1f, 0x2e, 0xbc, + 0x2f, 0xc0, 0x59, 0xff, 0x38, 0x86, 0xfa, 0xa3, 0xec, 0xc1, 0xfd, 0xd1, 0x69, 0xef, 0x48, 0x07, + 0xbb, 0xa5, 0x67, 0x60, 0xa1, 0x87, 0x1c, 0x4f, 0xa3, 0x58, 0x62, 0x2d, 0xb4, 0x0c, 0xd3, 0x29, + 0xf1, 0xbb, 0x29, 0x48, 0x6f, 0x58, 0xaa, 0x4d, 0x98, 0x8a, 0x0c, 0xa6, 0x3d, 0x73, 0x9e, 0xe8, + 0x31, 0xe7, 0xaf, 0x43, 0xce, 0xf4, 0x10, 0xe9, 0x19, 0x8e, 0x0d, 0xb0, 0x7a, 0xfe, 0x1e, 0x4b, + 0xf8, 0x58, 0xa5, 0x29, 0x1f, 0x95, 0x9c, 0xf2, 0xcb, 0xbe, 0xe5, 0x4c, 0x92, 0x35, 0x4e, 0x0f, + 0x59, 0xa3, 0xc7, 0x76, 0x2e, 0x40, 0xaa, 0xa5, 0xba, 0x8a, 0xa6, 0x33, 0x35, 0x62, 0x5f, 0x11, + 0x36, 0x75, 0x3c, 0xca, 0xa6, 0x86, 0xbc, 0x59, 0xaa, 0xc7, 0x9b, 0x3d, 0x0c, 0x19, 0x57, 0xb1, + 0xdb, 0xaa, 0x4b, 0x87, 0xa9, 0x5a, 0x03, 0x05, 0x91, 0x09, 0xbc, 0xc5, 0x4a, 0x87, 0x2d, 0x16, + 0x7e, 0xda, 0x3b, 0xae, 0x62, 0xbb, 0xd4, 0xda, 0xd1, 0x87, 0x50, 0x9a, 0x40, 0x88, 0xb1, 0x7b, + 0x80, 0x78, 0x44, 0x3a, 0x48, 0x63, 0xdd, 0x09, 0xd5, 0x68, 0xe1, 0x21, 0x51, 0x1a, 0x6a, 0xa8, + 0x32, 0x30, 0x51, 0xaf, 0xae, 0x2f, 0x47, 0xd8, 0xa8, 0x49, 0x48, 0x2e, 0x6f, 0xac, 0x57, 0xa9, + 0x71, 0x2a, 0x5f, 0xda, 0x90, 0x36, 0x89, 0x71, 0x12, 0xff, 0x2f, 0x01, 0x49, 0x22, 0xee, 0x39, + 0xc8, 0x6f, 0xbe, 0x51, 0xaf, 0xf6, 0x2c, 0x88, 0x20, 0x57, 0x91, 0xaa, 0xe5, 0xcd, 0xaa, 0x5c, + 0x59, 0xdd, 0x6a, 0x6c, 0x56, 0xa5, 0xbc, 0x80, 0x61, 0xcb, 0xd5, 0xd5, 0x2a, 0x07, 0x4b, 0x60, + 0xd8, 0x56, 0x9d, 0x18, 0x36, 0x79, 0xad, 0x4c, 0x60, 0x63, 0x68, 0x06, 0xa6, 0x3c, 0xd8, 0xfa, + 0xc6, 0x72, 0xb5, 0x91, 0x4f, 0xe2, 0x69, 0x52, 0xb5, 0x5e, 0xae, 0x49, 0x3e, 0xea, 0x38, 0x45, + 0x5d, 0xe6, 0xb7, 0x48, 0x61, 0x62, 0xd8, 0xb6, 0x18, 0x53, 0xae, 0x6f, 0x6c, 0xac, 0xe6, 0x27, + 0x30, 0x94, 0x6d, 0x1c, 0x40, 0x27, 0xd1, 0x71, 0x28, 0x34, 0xaa, 0x9b, 0x01, 0x48, 0x5e, 0x2b, + 0xaf, 0x97, 0x57, 0xaa, 0x6b, 0xd5, 0xf5, 0xcd, 0x7c, 0x1a, 0xcd, 0xc3, 0x4c, 0x79, 0x6b, 0x73, + 0x43, 0x66, 0xdb, 0x52, 0x42, 0x00, 0x0b, 0x90, 0x80, 0xc3, 0x04, 0x66, 0x50, 0x0e, 0x00, 0x2f, + 0xb6, 0x5a, 0xbe, 0x54, 0x5d, 0x6d, 0xe4, 0xb3, 0x68, 0x16, 0xa6, 0xf1, 0x37, 0xe5, 0x49, 0x2e, + 0x6f, 0x6d, 0x5e, 0xce, 0x4f, 0x11, 0xe9, 0x87, 0x76, 0x6c, 0xd4, 0xae, 0x57, 0xf3, 0x39, 0x1f, + 0x5e, 0xdd, 0xbc, 0xb6, 0x21, 0x5d, 0x91, 0xeb, 0x1b, 0xab, 0xb5, 0xca, 0x1b, 0xf9, 0x69, 0x54, + 0x84, 0x05, 0xba, 0x48, 0x6d, 0x7d, 0xb3, 0xba, 0x5e, 0x5e, 0xaf, 0x54, 0xbd, 0xb1, 0xbc, 0xf8, + 0x15, 0x01, 0xe6, 0x2a, 0x24, 0x5c, 0x60, 0x9e, 0x40, 0x52, 0x3f, 0xd9, 0x55, 0x1d, 0x17, 0x9d, + 0x02, 0xb0, 0x6c, 0xf3, 0x2d, 0xb5, 0xe9, 0x62, 0xcb, 0x29, 0xf8, 0x17, 0x2d, 0xcd, 0xa0, 0xb5, + 0x56, 0xec, 0x2d, 0x7c, 0x0e, 0x26, 0x58, 0xc0, 0xc4, 0x52, 0xb2, 0xc7, 0x07, 0x05, 0x1d, 0x92, + 0x37, 0x19, 0x5f, 0x19, 0x4b, 0xc1, 0xbe, 0x99, 0x5d, 0x09, 0xf6, 0x25, 0x7e, 0x5a, 0x80, 0x99, + 0x15, 0xd5, 0xbd, 0x7b, 0x04, 0x9e, 0x02, 0xf0, 0x5f, 0x91, 0x34, 0x6d, 0xcc, 0x50, 0xbd, 0x27, + 0x64, 0xcb, 0xb7, 0x3a, 0xe3, 0x81, 0xd5, 0x11, 0xff, 0x54, 0x80, 0x39, 0xea, 0x28, 0x8f, 0x94, + 0x94, 0x0b, 0x90, 0xea, 0x92, 0x5d, 0xd9, 0x23, 0x5f, 0x1c, 0x24, 0x4d, 0x4a, 0x9f, 0xc4, 0x30, + 0x22, 0xd9, 0xf8, 0x9e, 0x00, 0xf3, 0x74, 0x9a, 0xff, 0x1e, 0x3d, 0x12, 0x3e, 0x1e, 0x81, 0x6c, + 0xc8, 0x1b, 0x07, 0xd9, 0x2c, 0x30, 0x02, 0x57, 0x7c, 0x8a, 0xcd, 0xf2, 0x1c, 0x05, 0xa5, 0x9c, + 0xe4, 0x3a, 0xbc, 0x98, 0x23, 0x5c, 0x2c, 0x4c, 0xf5, 0x16, 0x0b, 0x3d, 0x9e, 0x27, 0x39, 0x9e, + 0x7f, 0x22, 0x01, 0x27, 0x1a, 0xaa, 0x1b, 0xe5, 0x7c, 0xef, 0x23, 0xde, 0x5f, 0x87, 0x0c, 0x1f, + 0x46, 0x8c, 0x8f, 0x18, 0x46, 0xf0, 0xc8, 0xbe, 0x14, 0x52, 0x9c, 0x14, 0x7e, 0x57, 0x80, 0x42, + 0x43, 0x75, 0x57, 0x43, 0xc9, 0x99, 0x11, 0x04, 0x80, 0x78, 0x01, 0xec, 0x9f, 0xf9, 0x88, 0x34, + 0x51, 0x32, 0x32, 0x4d, 0x14, 0xa5, 0xad, 0x7f, 0x2c, 0xc0, 0x83, 0x0d, 0xd5, 0xed, 0x8b, 0x1d, + 0x8f, 0xe6, 0xdc, 0xa2, 0x13, 0x57, 0xc9, 0xb8, 0xc4, 0x55, 0x94, 0xd0, 0xff, 0x46, 0x80, 0x85, + 0x86, 0xea, 0x86, 0xa2, 0xd6, 0x23, 0xa1, 0xbd, 0x2f, 0xff, 0x95, 0x3c, 0x58, 0xfe, 0x2b, 0x8a, + 0xa9, 0xaf, 0x0a, 0x30, 0x4b, 0x34, 0x89, 0x45, 0x9e, 0x47, 0xc3, 0x51, 0x28, 0x57, 0x96, 0xec, + 0xcd, 0x95, 0x45, 0xd1, 0xf9, 0xdb, 0x02, 0xcc, 0x52, 0x5b, 0x47, 0x43, 0xcc, 0xa3, 0xa1, 0xf3, + 0x34, 0xe4, 0x7a, 0xc2, 0x5d, 0xaa, 0x31, 0x53, 0x9d, 0xd0, 0x93, 0xdf, 0x23, 0x78, 0x82, 0x23, + 0xf8, 0x1f, 0x13, 0x30, 0x87, 0xd5, 0x3d, 0x48, 0xb8, 0x1e, 0x09, 0xc5, 0x97, 0x21, 0xa5, 0x34, + 0x5d, 0x8f, 0xd2, 0x5c, 0x4c, 0x9a, 0x30, 0x8a, 0xb0, 0xa5, 0x32, 0xc1, 0x93, 0x18, 0x3e, 0x7a, + 0xde, 0xf7, 0x56, 0xfb, 0x4c, 0x28, 0xf7, 0xba, 0x2a, 0x5e, 0x1a, 0x75, 0x48, 0xd1, 0xe5, 0x71, + 0x30, 0xb9, 0xb5, 0x7e, 0x65, 0x7d, 0xe3, 0xda, 0x3a, 0x7d, 0x0f, 0xe3, 0x80, 0xa6, 0x5e, 0x6e, + 0x34, 0xae, 0x6d, 0x48, 0xcb, 0x79, 0x01, 0x87, 0x59, 0x2b, 0xd5, 0xf5, 0xaa, 0x84, 0x43, 0x36, + 0x1f, 0x9c, 0xf0, 0x26, 0x6e, 0x35, 0xaa, 0xd2, 0x7a, 0x79, 0xad, 0x9a, 0x1f, 0x13, 0xdf, 0x15, + 0x60, 0x6e, 0x59, 0xd5, 0xd5, 0x23, 0xf6, 0xe1, 0x1e, 0x73, 0x49, 0x8e, 0xb9, 0x1d, 0x98, 0x5d, + 0xd5, 0x1c, 0x2f, 0xac, 0xb9, 0x1b, 0x57, 0x28, 0x08, 0xa0, 0x92, 0xa1, 0x00, 0xaa, 0x0b, 0x73, + 0xe1, 0x9d, 0x1c, 0xcb, 0x34, 0x1c, 0x15, 0xbd, 0x00, 0x93, 0x8c, 0x44, 0xa7, 0x20, 0x90, 0xf4, + 0xf0, 0xe0, 0x48, 0xcd, 0x9f, 0x8d, 0x3e, 0x0a, 0x53, 0x1d, 0xcd, 0x71, 0xb0, 0x51, 0xc4, 0x3b, + 0xd3, 0x8a, 0x65, 0x5a, 0xca, 0x32, 0xe0, 0x75, 0x0c, 0x13, 0xdf, 0x13, 0x60, 0x76, 0x45, 0x75, + 0xfd, 0x27, 0xd2, 0x5d, 0xe0, 0xf0, 0x34, 0x64, 0x83, 0x07, 0x5e, 0x48, 0xd8, 0x19, 0x1f, 0x1e, + 0x13, 0xbd, 0xbd, 0x05, 0xf3, 0x58, 0x08, 0x3e, 0x35, 0xf7, 0x52, 0xe0, 0x1f, 0x08, 0xb0, 0x50, + 0x51, 0x8c, 0xa6, 0xaa, 0xff, 0x00, 0x99, 0xe7, 0x75, 0xed, 0x0e, 0x2c, 0xf4, 0x32, 0xcf, 0x74, + 0xe0, 0x15, 0x00, 0x1f, 0xd9, 0xd3, 0x82, 0x87, 0x06, 0x3f, 0x75, 0x25, 0x0e, 0x63, 0x7f, 0x9a, + 0xd0, 0x86, 0x85, 0x15, 0xd5, 0xc5, 0x5e, 0xd2, 0x4f, 0xe2, 0x1e, 0x5e, 0x1c, 0x51, 0x7c, 0x7e, + 0x2a, 0x01, 0x59, 0x7e, 0x1b, 0xf4, 0x1c, 0x1c, 0x6b, 0xa9, 0x37, 0x94, 0xae, 0xee, 0xf6, 0x25, + 0x71, 0x69, 0x42, 0x61, 0x9e, 0x0d, 0xf7, 0x24, 0x71, 0x97, 0x60, 0xf6, 0xa6, 0xa2, 0x6b, 0xe1, + 0x3c, 0x98, 0xd7, 0x2e, 0x37, 0x43, 0x86, 0xb8, 0x34, 0x98, 0x43, 0x33, 0x48, 0x74, 0x1f, 0x2e, + 0x36, 0x4d, 0x7a, 0x19, 0x24, 0x32, 0x12, 0x64, 0x90, 0x16, 0x81, 0x2e, 0xc1, 0xcd, 0x75, 0x0a, + 0xe3, 0x64, 0xed, 0x69, 0x32, 0xe0, 0x4f, 0x75, 0xd0, 0x79, 0x98, 0xa7, 0x73, 0xc3, 0x2e, 0x85, + 0xb6, 0xc2, 0xa5, 0x25, 0x4a, 0x66, 0x28, 0x81, 0xe2, 0x88, 0xdf, 0x11, 0x60, 0x9e, 0xbe, 0xea, + 0x8e, 0x36, 0xc4, 0xbf, 0x00, 0x69, 0x3f, 0xcc, 0x65, 0xe1, 0xc6, 0x90, 0x4a, 0xd8, 0xa4, 0x17, + 0xfe, 0x72, 0x77, 0x29, 0x15, 0xba, 0x4b, 0xbf, 0x23, 0xc0, 0x3c, 0xb5, 0xd8, 0xf7, 0xe3, 0x73, + 0x25, 0x2a, 0xe8, 0xf8, 0xac, 0x40, 0xed, 0xad, 0x47, 0xef, 0x11, 0x45, 0x47, 0x71, 0xcf, 0xe7, + 0xdf, 0x12, 0x00, 0xad, 0x04, 0x6f, 0x9f, 0xfb, 0x5d, 0x7a, 0xff, 0x9c, 0x84, 0x49, 0x8f, 0xd6, + 0xc8, 0xe4, 0xdf, 0xf3, 0x90, 0x62, 0x01, 0x6d, 0x62, 0x7f, 0x45, 0x6f, 0x36, 0x7d, 0xc4, 0x02, + 0xfb, 0xc0, 0x7a, 0x52, 0x01, 0x26, 0x3c, 0x33, 0x42, 0x1b, 0xfe, 0xbc, 0x4f, 0x6c, 0x38, 0xa2, + 0xea, 0x15, 0x37, 0xa8, 0xe1, 0xe8, 0xaf, 0x55, 0x5c, 0xf4, 0xd3, 0x8c, 0x6d, 0x12, 0x79, 0x3d, + 0x32, 0xf0, 0xbe, 0x0c, 0xaf, 0xd0, 0xec, 0x44, 0x65, 0x13, 0x7b, 0x1e, 0x96, 0xc9, 0xc3, 0x3c, + 0x2c, 0x2b, 0x00, 0x1d, 0xc5, 0x50, 0xda, 0x6a, 0xc7, 0x53, 0xb5, 0x4c, 0x4c, 0x7e, 0x15, 0x2f, + 0xb5, 0xe6, 0x4f, 0x95, 0x38, 0x34, 0xf1, 0xc7, 0x84, 0xc3, 0x16, 0x3d, 0x16, 0x00, 0xb1, 0x0f, + 0xf9, 0x5a, 0x6d, 0xf3, 0xb2, 0x4c, 0x4b, 0x1c, 0x63, 0xbd, 0xc5, 0x90, 0x64, 0xa8, 0x18, 0x32, + 0x1e, 0x14, 0x43, 0x52, 0xe2, 0xd7, 0x04, 0xc8, 0x85, 0x49, 0x44, 0xa7, 0x20, 0x8b, 0x59, 0x95, + 0xbb, 0x56, 0xdb, 0x56, 0x5a, 0x5e, 0x77, 0x25, 0x61, 0x7f, 0x8b, 0x82, 0xd0, 0xc3, 0x54, 0x94, + 0xb2, 0xad, 0x5a, 0x8a, 0x66, 0xb3, 0x46, 0x1f, 0xc0, 0x20, 0x89, 0x40, 0x50, 0x1d, 0xa6, 0x19, + 0xba, 0x6c, 0x5a, 0x5e, 0xba, 0x3e, 0xbe, 0xc0, 0x5c, 0x0e, 0xd6, 0xde, 0xa0, 0xd3, 0xa5, 0x5c, + 0x37, 0xf4, 0x2d, 0x76, 0x00, 0xf5, 0xcf, 0x42, 0xcf, 0xc2, 0x31, 0x9e, 0x56, 0x99, 0x4b, 0xea, + 0xd2, 0xdb, 0x32, 0xc7, 0x91, 0xdd, 0xf0, 0xf3, 0xbb, 0x43, 0xfb, 0x50, 0xc4, 0x06, 0xcc, 0xf4, + 0x15, 0x87, 0xd1, 0x2b, 0x90, 0x7a, 0x5b, 0x33, 0x5a, 0xe6, 0xdb, 0x03, 0x7b, 0x45, 0x39, 0xbc, + 0x6b, 0x64, 0xb6, 0xc4, 0xb0, 0xc4, 0xcf, 0x08, 0xa1, 0x55, 0xe9, 0x28, 0x6a, 0x43, 0xa1, 0xa5, + 0x68, 0xfa, 0x9e, 0xcc, 0x17, 0xaf, 0xd9, 0x3e, 0xf4, 0x72, 0x47, 0x77, 0x87, 0x2d, 0x63, 0xa4, + 0xbe, 0xe5, 0x2e, 0x7f, 0x44, 0x5a, 0x68, 0x45, 0x8e, 0x5c, 0x9a, 0x84, 0x14, 0xad, 0x89, 0x8b, + 0x0d, 0x58, 0x88, 0xc6, 0xee, 0x49, 0x8c, 0x27, 0x7a, 0x13, 0xe3, 0x45, 0x98, 0x6c, 0x75, 0x69, + 0xd4, 0xc3, 0x7a, 0xb6, 0xfc, 0x6f, 0xf1, 0x7f, 0x04, 0x38, 0xce, 0xa5, 0x97, 0x38, 0xa5, 0xbf, + 0x8f, 0x8c, 0xed, 0xdd, 0xb8, 0xb8, 0x91, 0xaf, 0xb4, 0xbf, 0xa2, 0x19, 0x0e, 0x8f, 0xfb, 0x86, + 0x76, 0x4b, 0xbd, 0x9f, 0xf8, 0x3e, 0xc1, 0xfa, 0x73, 0xa8, 0xb9, 0x1f, 0x27, 0xe6, 0x3e, 0x6d, + 0xf8, 0x76, 0x3e, 0x8a, 0xa3, 0xdf, 0x17, 0xe0, 0x21, 0xc9, 0xd4, 0xf5, 0x6d, 0xa5, 0xb9, 0xeb, + 0xb1, 0xc5, 0x6e, 0xd2, 0xfd, 0xee, 0x3e, 0xb7, 0xe8, 0x33, 0x87, 0x8b, 0x3d, 0x58, 0xa0, 0x1f, + 0x6e, 0x53, 0x12, 0x46, 0x6b, 0x53, 0x12, 0x6f, 0xc3, 0x6c, 0x54, 0x75, 0x32, 0xbe, 0xa3, 0xf5, + 0x11, 0xc8, 0x75, 0x34, 0x83, 0x77, 0xb4, 0xf4, 0xc7, 0x28, 0xd9, 0x8e, 0x66, 0x04, 0x4e, 0x16, + 0xcf, 0x52, 0xde, 0xe9, 0x77, 0xc7, 0xd9, 0x8e, 0xf2, 0x8e, 0x3f, 0x4b, 0xfc, 0xfb, 0x04, 0xe4, + 0x1b, 0xaa, 0x4b, 0x0b, 0xec, 0x47, 0x73, 0x00, 0xdb, 0xfd, 0x0d, 0x54, 0xf4, 0x27, 0x2e, 0x2f, + 0xc6, 0x65, 0x46, 0x42, 0xd4, 0x1d, 0xbc, 0x93, 0x6a, 0x3c, 0xa6, 0x93, 0x2a, 0x42, 0x4d, 0xef, + 0x46, 0xf3, 0xc1, 0xaf, 0x0a, 0x24, 0xdf, 0xc4, 0x35, 0x5f, 0x1d, 0x89, 0x78, 0x39, 0xbd, 0x49, + 0x86, 0xf5, 0x26, 0x4a, 0xa7, 0xff, 0x00, 0x1b, 0x18, 0x6c, 0x88, 0x6b, 0x75, 0x89, 0xfd, 0x80, + 0xea, 0x68, 0xd3, 0x36, 0x1c, 0x31, 0xe8, 0x49, 0x40, 0x36, 0x26, 0x42, 0x95, 0x9b, 0xb6, 0xda, + 0x52, 0x0d, 0x1c, 0x35, 0x3a, 0xe4, 0x58, 0x26, 0xa5, 0x19, 0x3a, 0x52, 0x09, 0x06, 0xc4, 0xf7, + 0x05, 0x78, 0xa0, 0x62, 0x76, 0x2c, 0xfc, 0x80, 0xf9, 0x41, 0x91, 0xcf, 0x9b, 0xb6, 0x5d, 0x98, + 0xe9, 0xfb, 0xf9, 0x11, 0xd6, 0x44, 0xee, 0x07, 0x48, 0xec, 0x26, 0x62, 0x6a, 0xc6, 0xa4, 0xbc, + 0xc2, 0xcf, 0xc6, 0x77, 0xf6, 0x2c, 0xf0, 0x30, 0xfa, 0xd0, 0xa5, 0x7a, 0x35, 0xcd, 0xc1, 0xf1, + 0xe3, 0x55, 0xfc, 0x50, 0x80, 0x63, 0xd8, 0x33, 0x84, 0xba, 0xf2, 0x8e, 0x84, 0xf5, 0xfe, 0xb6, + 0xc1, 0xe4, 0x41, 0xdb, 0x06, 0xa3, 0x34, 0xf2, 0x1f, 0x58, 0x55, 0xa2, 0xaf, 0x89, 0x8e, 0x31, + 0x77, 0xa2, 0x9f, 0xb9, 0x61, 0x85, 0x94, 0x13, 0xfd, 0x4c, 0xf1, 0x0c, 0x45, 0x77, 0xf8, 0x25, + 0x0f, 0xdb, 0xe1, 0x17, 0x95, 0x29, 0xab, 0xf9, 0x4d, 0xfb, 0xc1, 0xef, 0x16, 0xbc, 0xf6, 0x57, + 0x21, 0xdc, 0x8f, 0x1b, 0xee, 0x8d, 0x4d, 0xf4, 0xf6, 0xc6, 0x9e, 0xff, 0x8b, 0x17, 0x21, 0xc7, + 0x32, 0x2b, 0x34, 0xa0, 0xb0, 0xd1, 0x5f, 0x0a, 0x90, 0xe5, 0xb3, 0x91, 0x28, 0xfa, 0x1d, 0x12, + 0x91, 0x1a, 0x2d, 0x9e, 0xdd, 0xc7, 0x4c, 0xea, 0xed, 0xc4, 0xf6, 0x8f, 0xff, 0xed, 0x3f, 0x7d, + 0x21, 0xa1, 0xa0, 0x27, 0x4a, 0x37, 0xcf, 0x95, 0x6e, 0xd3, 0x97, 0xf0, 0xcb, 0xec, 0x00, 0x9c, + 0xd2, 0x62, 0xc9, 0x2f, 0x1b, 0x94, 0x16, 0xef, 0x78, 0x3f, 0xe7, 0x74, 0xae, 0x9f, 0x43, 0x25, + 0x3c, 0xdf, 0x9f, 0x77, 0x3b, 0x38, 0xc4, 0x3b, 0x25, 0x92, 0xde, 0x2a, 0xdd, 0xc6, 0xff, 0x04, + 0x28, 0xe8, 0xcf, 0x04, 0x80, 0xa0, 0x38, 0x8d, 0xa2, 0x8f, 0xa2, 0xaf, 0x7a, 0x5d, 0x1c, 0x98, + 0x68, 0x15, 0x5d, 0x42, 0xbd, 0xc1, 0xa8, 0xc7, 0x87, 0x12, 0x43, 0xbb, 0x4f, 0x47, 0x69, 0xf1, + 0xce, 0xf5, 0x57, 0xd0, 0xc5, 0x11, 0xa9, 0x2f, 0xdd, 0x0e, 0x54, 0xee, 0x0e, 0xfa, 0x8e, 0x00, + 0x53, 0xa1, 0x5e, 0x00, 0x14, 0x2d, 0xf0, 0xa8, 0x7e, 0x81, 0xe2, 0x90, 0x9c, 0xa1, 0xd8, 0x21, + 0x2c, 0xb5, 0xc5, 0x91, 0x0e, 0xe4, 0x82, 0xb0, 0x78, 0xfd, 0x19, 0x71, 0xd4, 0x33, 0xb9, 0x20, + 0x2c, 0xa2, 0x0f, 0x05, 0x98, 0x0a, 0xd5, 0xea, 0x63, 0x78, 0x89, 0xaa, 0xe7, 0x0f, 0xe5, 0xe5, + 0x16, 0xe1, 0xc5, 0x2d, 0x8e, 0x74, 0x3c, 0x98, 0x97, 0x72, 0xf1, 0x50, 0x27, 0x84, 0x19, 0xfb, + 0x4c, 0x02, 0x72, 0xe1, 0xea, 0x3d, 0x5a, 0x1c, 0xc0, 0x59, 0x4f, 0xd6, 0x67, 0x28, 0x6b, 0xbf, + 0x21, 0x10, 0xde, 0x7e, 0x45, 0x28, 0xbe, 0x30, 0x02, 0x73, 0x25, 0x3f, 0x50, 0x64, 0x8c, 0x2a, + 0xe2, 0x9b, 0x87, 0x61, 0x94, 0x5b, 0xef, 0x36, 0x1f, 0x07, 0xdf, 0x29, 0xd1, 0x62, 0x11, 0x16, + 0xc4, 0x37, 0x12, 0xa1, 0x57, 0x07, 0x1f, 0x95, 0x9e, 0x8f, 0x8b, 0xd1, 0xe2, 0xeb, 0xff, 0x43, + 0x05, 0xf3, 0x87, 0x54, 0x30, 0xbf, 0x27, 0x88, 0x2b, 0x07, 0x16, 0x8c, 0xa3, 0xba, 0xdc, 0xce, + 0x58, 0x4e, 0x37, 0x44, 0xe5, 0xde, 0xc8, 0x49, 0x09, 0xed, 0x83, 0xfe, 0x53, 0x80, 0x99, 0xbe, + 0xca, 0x3f, 0x7a, 0x32, 0x36, 0x96, 0x8d, 0xea, 0x10, 0x18, 0x2a, 0xa2, 0x0f, 0xa8, 0x88, 0x7e, + 0x52, 0x10, 0x9f, 0x1f, 0xe9, 0x62, 0x38, 0xfe, 0x86, 0x58, 0x24, 0x35, 0x71, 0xf9, 0x50, 0x22, + 0xd1, 0xfd, 0xa5, 0xd0, 0xff, 0xd3, 0xe0, 0xb6, 0xbf, 0xef, 0x34, 0xbe, 0xbc, 0x19, 0xd3, 0x66, + 0x30, 0x94, 0xf7, 0x2f, 0x52, 0xde, 0xdf, 0x17, 0xc4, 0x0b, 0xa3, 0xf2, 0x1e, 0xec, 0x89, 0xd9, + 0x5f, 0xa5, 0xfa, 0x75, 0x60, 0xf6, 0x3b, 0xfc, 0x6a, 0xe8, 0xdf, 0x04, 0x98, 0xee, 0x69, 0x3e, + 0x40, 0x8f, 0xc7, 0x31, 0x1f, 0xd1, 0xa2, 0x30, 0x94, 0xef, 0xcf, 0x52, 0xbe, 0x3f, 0x2d, 0x88, + 0xcf, 0x8d, 0xca, 0x37, 0xdd, 0x0e, 0xf3, 0x7c, 0x59, 0xac, 0x1c, 0x8a, 0x67, 0xc5, 0x5b, 0x09, + 0xf3, 0x9b, 0xe5, 0xfb, 0x12, 0x62, 0x82, 0x8b, 0x88, 0xd6, 0x85, 0xa1, 0x9c, 0xfe, 0x2c, 0xe5, + 0xf4, 0x3d, 0x41, 0x7c, 0x71, 0x74, 0xed, 0x66, 0x43, 0x98, 0xd9, 0x2b, 0xe2, 0x6b, 0x87, 0xd4, + 0xef, 0x60, 0x31, 0xf4, 0xaf, 0x02, 0x64, 0xf9, 0xfe, 0x86, 0x18, 0x7e, 0x23, 0x5a, 0x20, 0x86, + 0xf2, 0xfb, 0x33, 0x94, 0xdf, 0x77, 0x47, 0xe5, 0xb7, 0xcb, 0xed, 0x75, 0x37, 0x0e, 0xb7, 0xe3, + 0xad, 0x84, 0xfe, 0x4b, 0x80, 0xa9, 0x50, 0x0b, 0x42, 0x8c, 0x4f, 0x8f, 0x6a, 0x53, 0x18, 0xca, + 0xee, 0xcf, 0x53, 0x76, 0x3f, 0x77, 0x90, 0x0b, 0xec, 0x6f, 0x86, 0xf9, 0xdd, 0x10, 0x5f, 0x3f, + 0x94, 0x8f, 0xef, 0x5d, 0x10, 0xfd, 0xb5, 0x00, 0x53, 0xa1, 0x96, 0x85, 0x18, 0xb6, 0xa3, 0xda, + 0x1a, 0x86, 0xb2, 0xcd, 0x22, 0xcd, 0xc5, 0x11, 0x23, 0xcd, 0xc5, 0xc3, 0x45, 0x9a, 0x1f, 0x0a, + 0x90, 0x0b, 0xd7, 0xa3, 0x63, 0x82, 0x98, 0xc8, 0x8a, 0x7d, 0xf1, 0xf1, 0x7d, 0xcd, 0x65, 0x2f, + 0x81, 0x5d, 0xc2, 0xa1, 0x8a, 0x96, 0xf6, 0x13, 0x78, 0x06, 0x85, 0xed, 0xeb, 0x4f, 0xa3, 0x73, + 0xfb, 0xe4, 0x91, 0xab, 0x86, 0xff, 0x9d, 0x00, 0x59, 0xbe, 0xe5, 0x21, 0xe6, 0x3e, 0x46, 0x74, + 0x45, 0xec, 0x37, 0xe8, 0x64, 0x7c, 0x0c, 0x3a, 0xa9, 0x80, 0x1e, 0x7c, 0x56, 0x97, 0xd0, 0xab, + 0x23, 0xf3, 0x51, 0xba, 0xcd, 0x37, 0x1a, 0xdc, 0x41, 0xff, 0x22, 0xc0, 0x74, 0x4f, 0x3f, 0x43, + 0x8c, 0x1b, 0x89, 0xee, 0x7a, 0x28, 0x2e, 0x78, 0x93, 0xbd, 0xff, 0xe8, 0x66, 0xa9, 0xda, 0xb1, + 0xdc, 0x3d, 0xce, 0xc8, 0x3c, 0x3b, 0x1a, 0x5b, 0x17, 0x9a, 0x64, 0xa3, 0xd1, 0x3c, 0x66, 0x2c, + 0x83, 0xc1, 0x6a, 0xf8, 0xb6, 0x4d, 0xf7, 0xf4, 0x2a, 0xc4, 0xb0, 0x1a, 0xdd, 0xd1, 0x50, 0x3c, + 0x15, 0x63, 0x93, 0x82, 0x99, 0xde, 0x5b, 0x68, 0xf8, 0x51, 0xde, 0x29, 0x39, 0x1c, 0xde, 0xf5, + 0x67, 0xd1, 0xd3, 0xfb, 0xe4, 0x94, 0xa2, 0xb1, 0xba, 0x27, 0xb6, 0x9b, 0xa1, 0x9c, 0x30, 0x8a, + 0x7f, 0x48, 0xf7, 0xd6, 0xac, 0x8b, 0x8b, 0xfb, 0x99, 0xca, 0xae, 0xda, 0x7b, 0xf4, 0x34, 0x3f, + 0x25, 0xa0, 0x17, 0x86, 0x5f, 0x36, 0xde, 0xa0, 0x04, 0x71, 0xec, 0xf5, 0xcb, 0xe8, 0xb5, 0xbb, + 0x13, 0x11, 0xa3, 0xff, 0x10, 0x20, 0xc3, 0xd5, 0xbd, 0xd1, 0x63, 0x71, 0xa7, 0xd8, 0xfb, 0x46, + 0x1a, 0x9c, 0x15, 0x17, 0xbf, 0x4c, 0xb9, 0xfc, 0x82, 0xc7, 0xe5, 0x01, 0x5e, 0x02, 0xd7, 0xaf, + 0xa1, 0xad, 0x7b, 0x12, 0xf7, 0xa3, 0x7f, 0x17, 0x20, 0x17, 0xee, 0xfc, 0x88, 0xb1, 0xac, 0x91, + 0xed, 0x21, 0x43, 0x8d, 0xd0, 0xe7, 0x29, 0xef, 0x3f, 0x2d, 0x88, 0x07, 0x3e, 0xe1, 0xbb, 0x11, + 0x03, 0xf1, 0x8b, 0xa1, 0xff, 0x16, 0x20, 0x17, 0x6e, 0x10, 0x89, 0x61, 0x39, 0xb2, 0x8b, 0x64, + 0x28, 0xcb, 0xfe, 0x71, 0x2f, 0x1e, 0xe2, 0xb8, 0x17, 0xef, 0xd1, 0x71, 0xff, 0x5a, 0x02, 0x8e, + 0xc5, 0x14, 0xaa, 0x50, 0xf4, 0x0f, 0xbb, 0x06, 0x97, 0xb5, 0x86, 0x4a, 0xe3, 0x5b, 0x54, 0x1a, + 0xbf, 0x29, 0x88, 0xe5, 0x03, 0x3f, 0x83, 0x6d, 0x46, 0x02, 0xd6, 0x84, 0xa6, 0xf8, 0x89, 0x7b, + 0x22, 0x19, 0x7e, 0x13, 0xf4, 0xf5, 0x04, 0xcc, 0x47, 0x96, 0x67, 0xd1, 0xb9, 0x61, 0x99, 0x82, + 0xbe, 0x52, 0xee, 0x50, 0x09, 0xfd, 0x11, 0x95, 0xd0, 0xb7, 0x05, 0xaa, 0xe3, 0x07, 0x4d, 0x14, + 0x04, 0x1b, 0x63, 0x31, 0xed, 0x88, 0xcd, 0x7b, 0x93, 0x27, 0xe8, 0xdd, 0x09, 0x7d, 0x4f, 0x80, + 0xb4, 0x5f, 0xcd, 0x42, 0xa7, 0xf7, 0x55, 0xed, 0x1a, 0x2a, 0x93, 0x5f, 0xa4, 0x32, 0xf9, 0x92, + 0x20, 0xbe, 0x32, 0x6a, 0x70, 0x1d, 0x2e, 0x70, 0x61, 0x59, 0xd4, 0xc5, 0x2b, 0x87, 0x92, 0x85, + 0xdd, 0xbb, 0x22, 0xe6, 0x79, 0x2a, 0x54, 0x04, 0x8b, 0x7f, 0x58, 0xf4, 0x15, 0xca, 0xee, 0x65, + 0x66, 0x20, 0xd8, 0xec, 0x6e, 0x64, 0x06, 0x74, 0x7e, 0x35, 0xf4, 0xbf, 0x02, 0x4c, 0xf7, 0xd4, + 0xd4, 0xe2, 0x32, 0x03, 0x91, 0x95, 0xb7, 0xa1, 0x7c, 0xff, 0x02, 0xe5, 0xfb, 0x8b, 0x82, 0x78, + 0x71, 0x34, 0xbe, 0xc9, 0x76, 0x96, 0xb7, 0x1d, 0xe6, 0xfc, 0xe3, 0xe2, 0xea, 0xe1, 0x9e, 0x54, + 0x7d, 0x4b, 0xa2, 0x77, 0x13, 0x80, 0xfa, 0xcb, 0x72, 0x68, 0x29, 0xda, 0x57, 0xc6, 0xd5, 0xef, + 0x86, 0x0a, 0xe1, 0x2b, 0x54, 0x08, 0x5f, 0x16, 0xc4, 0x8f, 0x8d, 0x24, 0x84, 0xa6, 0xb7, 0x63, + 0x48, 0x0e, 0x9b, 0xe2, 0xc6, 0xa1, 0xe4, 0x10, 0xb9, 0x2a, 0xfa, 0x7c, 0x82, 0xe4, 0x88, 0xf8, + 0xf6, 0x8d, 0xf8, 0x1c, 0x51, 0x44, 0x93, 0xc7, 0x50, 0x21, 0x7c, 0x93, 0x0a, 0xe1, 0xeb, 0x82, + 0xf8, 0xea, 0x61, 0x2c, 0x22, 0xde, 0x12, 0x4b, 0x61, 0x5b, 0xfc, 0xe1, 0x7b, 0x66, 0x0b, 0xd9, + 0x1e, 0xe8, 0x47, 0x69, 0xc7, 0x41, 0xf8, 0xff, 0xae, 0x7a, 0x22, 0x56, 0x28, 0x11, 0xf5, 0xcd, + 0xa1, 0x52, 0xf9, 0x25, 0x2a, 0x95, 0x9f, 0x13, 0xc4, 0x97, 0x47, 0xb5, 0x0b, 0xa1, 0xfd, 0xb0, + 0x48, 0x24, 0x71, 0xed, 0xb0, 0x39, 0x87, 0xde, 0x35, 0xd1, 0xe7, 0xbc, 0x5f, 0xa2, 0xf4, 0x96, + 0x0c, 0x07, 0xfc, 0x36, 0x24, 0xba, 0x1a, 0x3a, 0x54, 0x14, 0x5f, 0xa5, 0xa2, 0xf8, 0xe5, 0xd1, + 0x82, 0x0a, 0xe6, 0x26, 0x7b, 0xf6, 0xc4, 0xe2, 0xb8, 0x2a, 0x7e, 0xfc, 0xf0, 0x29, 0x98, 0xfe, + 0x75, 0x2f, 0x7d, 0x4d, 0x80, 0x63, 0x4d, 0xb3, 0x13, 0xc5, 0xcd, 0xa5, 0xd9, 0x8a, 0xf7, 0xdf, + 0x5f, 0x91, 0xbc, 0x71, 0x1d, 0xbf, 0x73, 0xeb, 0xc2, 0xf5, 0x8b, 0x6c, 0x6e, 0xdb, 0xd4, 0x15, + 0xa3, 0xbd, 0x64, 0xda, 0xed, 0x52, 0x5b, 0x35, 0xc8, 0x2b, 0xb8, 0x44, 0x87, 0x14, 0x4b, 0x73, + 0x42, 0xff, 0xbb, 0xec, 0x4b, 0xfe, 0xc7, 0x37, 0x12, 0x0f, 0xac, 0x50, 0xf4, 0x8a, 0x6e, 0x76, + 0x5b, 0x4b, 0x15, 0x7f, 0xc3, 0xab, 0xe7, 0xfe, 0xdc, 0x1b, 0x7b, 0x93, 0x8c, 0xbd, 0xe9, 0x8f, + 0xbd, 0x79, 0xf5, 0xdc, 0x76, 0x8a, 0x6c, 0xf0, 0xf4, 0xf7, 0x03, 0x00, 0x00, 0xff, 0xff, 0x61, + 0xea, 0xa4, 0xfb, 0xbd, 0x56, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/container/v1alpha1/cluster_service.pb.go b/vendor/google.golang.org/genproto/googleapis/container/v1alpha1/cluster_service.pb.go new file mode 100644 index 0000000..7252d29 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/container/v1alpha1/cluster_service.pb.go @@ -0,0 +1,6835 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/container/v1alpha1/cluster_service.proto + +package container // import "google.golang.org/genproto/googleapis/container/v1alpha1" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import empty "github.com/golang/protobuf/ptypes/empty" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +import ( + context "golang.org/x/net/context" + grpc "google.golang.org/grpc" +) + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Possible values for Effect in taint. +type NodeTaint_Effect int32 + +const ( + // Not set + NodeTaint_EFFECT_UNSPECIFIED NodeTaint_Effect = 0 + // NoSchedule + NodeTaint_NO_SCHEDULE NodeTaint_Effect = 1 + // PreferNoSchedule + NodeTaint_PREFER_NO_SCHEDULE NodeTaint_Effect = 2 + // NoExecute + NodeTaint_NO_EXECUTE NodeTaint_Effect = 3 +) + +var NodeTaint_Effect_name = map[int32]string{ + 0: "EFFECT_UNSPECIFIED", + 1: "NO_SCHEDULE", + 2: "PREFER_NO_SCHEDULE", + 3: "NO_EXECUTE", +} +var NodeTaint_Effect_value = map[string]int32{ + "EFFECT_UNSPECIFIED": 0, + "NO_SCHEDULE": 1, + "PREFER_NO_SCHEDULE": 2, + "NO_EXECUTE": 3, +} + +func (x NodeTaint_Effect) String() string { + return proto.EnumName(NodeTaint_Effect_name, int32(x)) +} +func (NodeTaint_Effect) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_cluster_service_e36c06b9432393ee, []int{1, 0} +} + +// Allowed Network Policy providers. +type NetworkPolicy_Provider int32 + +const ( + // Not set + NetworkPolicy_PROVIDER_UNSPECIFIED NetworkPolicy_Provider = 0 + // Tigera (Calico Felix). + NetworkPolicy_CALICO NetworkPolicy_Provider = 1 +) + +var NetworkPolicy_Provider_name = map[int32]string{ + 0: "PROVIDER_UNSPECIFIED", + 1: "CALICO", +} +var NetworkPolicy_Provider_value = map[string]int32{ + "PROVIDER_UNSPECIFIED": 0, + "CALICO": 1, +} + +func (x NetworkPolicy_Provider) String() string { + return proto.EnumName(NetworkPolicy_Provider_name, int32(x)) +} +func (NetworkPolicy_Provider) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_cluster_service_e36c06b9432393ee, []int{10, 0} +} + +// The current status of the cluster. +type Cluster_Status int32 + +const ( + // Not set. + Cluster_STATUS_UNSPECIFIED Cluster_Status = 0 + // The PROVISIONING state indicates the cluster is being created. + Cluster_PROVISIONING Cluster_Status = 1 + // The RUNNING state indicates the cluster has been created and is fully + // usable. + Cluster_RUNNING Cluster_Status = 2 + // The RECONCILING state indicates that some work is actively being done on + // the cluster, such as upgrading the master or node software. Details can + // be found in the `statusMessage` field. + Cluster_RECONCILING Cluster_Status = 3 + // The STOPPING state indicates the cluster is being deleted. + Cluster_STOPPING Cluster_Status = 4 + // The ERROR state indicates the cluster may be unusable. Details + // can be found in the `statusMessage` field. + Cluster_ERROR Cluster_Status = 5 +) + +var Cluster_Status_name = map[int32]string{ + 0: "STATUS_UNSPECIFIED", + 1: "PROVISIONING", + 2: "RUNNING", + 3: "RECONCILING", + 4: "STOPPING", + 5: "ERROR", +} +var Cluster_Status_value = map[string]int32{ + "STATUS_UNSPECIFIED": 0, + "PROVISIONING": 1, + "RUNNING": 2, + "RECONCILING": 3, + "STOPPING": 4, + "ERROR": 5, +} + +func (x Cluster_Status) String() string { + return proto.EnumName(Cluster_Status_name, int32(x)) +} +func (Cluster_Status) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_cluster_service_e36c06b9432393ee, []int{13, 0} +} + +// Current status of the operation. +type Operation_Status int32 + +const ( + // Not set. + Operation_STATUS_UNSPECIFIED Operation_Status = 0 + // The operation has been created. + Operation_PENDING Operation_Status = 1 + // The operation is currently running. + Operation_RUNNING Operation_Status = 2 + // The operation is done, either cancelled or completed. + Operation_DONE Operation_Status = 3 + // The operation is aborting. + Operation_ABORTING Operation_Status = 4 +) + +var Operation_Status_name = map[int32]string{ + 0: "STATUS_UNSPECIFIED", + 1: "PENDING", + 2: "RUNNING", + 3: "DONE", + 4: "ABORTING", +} +var Operation_Status_value = map[string]int32{ + "STATUS_UNSPECIFIED": 0, + "PENDING": 1, + "RUNNING": 2, + "DONE": 3, + "ABORTING": 4, +} + +func (x Operation_Status) String() string { + return proto.EnumName(Operation_Status_name, int32(x)) +} +func (Operation_Status) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_cluster_service_e36c06b9432393ee, []int{15, 0} +} + +// Operation type. +type Operation_Type int32 + +const ( + // Not set. + Operation_TYPE_UNSPECIFIED Operation_Type = 0 + // Cluster create. + Operation_CREATE_CLUSTER Operation_Type = 1 + // Cluster delete. + Operation_DELETE_CLUSTER Operation_Type = 2 + // A master upgrade. + Operation_UPGRADE_MASTER Operation_Type = 3 + // A node upgrade. + Operation_UPGRADE_NODES Operation_Type = 4 + // Cluster repair. + Operation_REPAIR_CLUSTER Operation_Type = 5 + // Cluster update. + Operation_UPDATE_CLUSTER Operation_Type = 6 + // Node pool create. + Operation_CREATE_NODE_POOL Operation_Type = 7 + // Node pool delete. + Operation_DELETE_NODE_POOL Operation_Type = 8 + // Set node pool management. + Operation_SET_NODE_POOL_MANAGEMENT Operation_Type = 9 + // Automatic node pool repair. + Operation_AUTO_REPAIR_NODES Operation_Type = 10 + // Automatic node upgrade. + Operation_AUTO_UPGRADE_NODES Operation_Type = 11 + // Set labels. + Operation_SET_LABELS Operation_Type = 12 + // Set/generate master auth materials + Operation_SET_MASTER_AUTH Operation_Type = 13 + // Set node pool size. + Operation_SET_NODE_POOL_SIZE Operation_Type = 14 + // Updates network policy for a cluster. + Operation_SET_NETWORK_POLICY Operation_Type = 15 + // Set the maintenance policy. + Operation_SET_MAINTENANCE_POLICY Operation_Type = 16 +) + +var Operation_Type_name = map[int32]string{ + 0: "TYPE_UNSPECIFIED", + 1: "CREATE_CLUSTER", + 2: "DELETE_CLUSTER", + 3: "UPGRADE_MASTER", + 4: "UPGRADE_NODES", + 5: "REPAIR_CLUSTER", + 6: "UPDATE_CLUSTER", + 7: "CREATE_NODE_POOL", + 8: "DELETE_NODE_POOL", + 9: "SET_NODE_POOL_MANAGEMENT", + 10: "AUTO_REPAIR_NODES", + 11: "AUTO_UPGRADE_NODES", + 12: "SET_LABELS", + 13: "SET_MASTER_AUTH", + 14: "SET_NODE_POOL_SIZE", + 15: "SET_NETWORK_POLICY", + 16: "SET_MAINTENANCE_POLICY", +} +var Operation_Type_value = map[string]int32{ + "TYPE_UNSPECIFIED": 0, + "CREATE_CLUSTER": 1, + "DELETE_CLUSTER": 2, + "UPGRADE_MASTER": 3, + "UPGRADE_NODES": 4, + "REPAIR_CLUSTER": 5, + "UPDATE_CLUSTER": 6, + "CREATE_NODE_POOL": 7, + "DELETE_NODE_POOL": 8, + "SET_NODE_POOL_MANAGEMENT": 9, + "AUTO_REPAIR_NODES": 10, + "AUTO_UPGRADE_NODES": 11, + "SET_LABELS": 12, + "SET_MASTER_AUTH": 13, + "SET_NODE_POOL_SIZE": 14, + "SET_NETWORK_POLICY": 15, + "SET_MAINTENANCE_POLICY": 16, +} + +func (x Operation_Type) String() string { + return proto.EnumName(Operation_Type_name, int32(x)) +} +func (Operation_Type) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_cluster_service_e36c06b9432393ee, []int{15, 1} +} + +// Operation type: what type update to perform. +type SetMasterAuthRequest_Action int32 + +const ( + // Operation is unknown and will error out. + SetMasterAuthRequest_UNKNOWN SetMasterAuthRequest_Action = 0 + // Set the password to a user generated value. + SetMasterAuthRequest_SET_PASSWORD SetMasterAuthRequest_Action = 1 + // Generate a new password and set it to that. + SetMasterAuthRequest_GENERATE_PASSWORD SetMasterAuthRequest_Action = 2 + // Set the username. If an empty username is provided, basic authentication + // is disabled for the cluster. If a non-empty username is provided, basic + // authentication is enabled, with either a provided password or a generated + // one. + SetMasterAuthRequest_SET_USERNAME SetMasterAuthRequest_Action = 3 +) + +var SetMasterAuthRequest_Action_name = map[int32]string{ + 0: "UNKNOWN", + 1: "SET_PASSWORD", + 2: "GENERATE_PASSWORD", + 3: "SET_USERNAME", +} +var SetMasterAuthRequest_Action_value = map[string]int32{ + "UNKNOWN": 0, + "SET_PASSWORD": 1, + "GENERATE_PASSWORD": 2, + "SET_USERNAME": 3, +} + +func (x SetMasterAuthRequest_Action) String() string { + return proto.EnumName(SetMasterAuthRequest_Action_name, int32(x)) +} +func (SetMasterAuthRequest_Action) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_cluster_service_e36c06b9432393ee, []int{26, 0} +} + +// The current status of the node pool instance. +type NodePool_Status int32 + +const ( + // Not set. + NodePool_STATUS_UNSPECIFIED NodePool_Status = 0 + // The PROVISIONING state indicates the node pool is being created. + NodePool_PROVISIONING NodePool_Status = 1 + // The RUNNING state indicates the node pool has been created + // and is fully usable. + NodePool_RUNNING NodePool_Status = 2 + // The RUNNING_WITH_ERROR state indicates the node pool has been created + // and is partially usable. Some error state has occurred and some + // functionality may be impaired. Customer may need to reissue a request + // or trigger a new update. + NodePool_RUNNING_WITH_ERROR NodePool_Status = 3 + // The RECONCILING state indicates that some work is actively being done on + // the node pool, such as upgrading node software. Details can + // be found in the `statusMessage` field. + NodePool_RECONCILING NodePool_Status = 4 + // The STOPPING state indicates the node pool is being deleted. + NodePool_STOPPING NodePool_Status = 5 + // The ERROR state indicates the node pool may be unusable. Details + // can be found in the `statusMessage` field. + NodePool_ERROR NodePool_Status = 6 +) + +var NodePool_Status_name = map[int32]string{ + 0: "STATUS_UNSPECIFIED", + 1: "PROVISIONING", + 2: "RUNNING", + 3: "RUNNING_WITH_ERROR", + 4: "RECONCILING", + 5: "STOPPING", + 6: "ERROR", +} +var NodePool_Status_value = map[string]int32{ + "STATUS_UNSPECIFIED": 0, + "PROVISIONING": 1, + "RUNNING": 2, + "RUNNING_WITH_ERROR": 3, + "RECONCILING": 4, + "STOPPING": 5, + "ERROR": 6, +} + +func (x NodePool_Status) String() string { + return proto.EnumName(NodePool_Status_name, int32(x)) +} +func (NodePool_Status) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_cluster_service_e36c06b9432393ee, []int{40, 0} +} + +// Parameters that describe the nodes in a cluster. +type NodeConfig struct { + // The name of a Google Compute Engine [machine + // type](/compute/docs/machine-types) (e.g. + // `n1-standard-1`). + // + // If unspecified, the default machine type is + // `n1-standard-1`. + MachineType string `protobuf:"bytes,1,opt,name=machine_type,json=machineType,proto3" json:"machine_type,omitempty"` + // Size of the disk attached to each node, specified in GB. + // The smallest allowed disk size is 10GB. + // + // If unspecified, the default disk size is 100GB. + DiskSizeGb int32 `protobuf:"varint,2,opt,name=disk_size_gb,json=diskSizeGb,proto3" json:"disk_size_gb,omitempty"` + // The set of Google API scopes to be made available on all of the + // node VMs under the "default" service account. + // + // The following scopes are recommended, but not required, and by default are + // not included: + // + // * `https://www.googleapis.com/auth/compute` is required for mounting + // persistent storage on your nodes. + // * `https://www.googleapis.com/auth/devstorage.read_only` is required for + // communicating with **gcr.io** + // (the [Google Container Registry](/container-registry/)). + // + // If unspecified, no scopes are added, unless Cloud Logging or Cloud + // Monitoring are enabled, in which case their required scopes will be added. + OauthScopes []string `protobuf:"bytes,3,rep,name=oauth_scopes,json=oauthScopes,proto3" json:"oauth_scopes,omitempty"` + // The Google Cloud Platform Service Account to be used by the node VMs. If + // no Service Account is specified, the "default" service account is used. + ServiceAccount string `protobuf:"bytes,9,opt,name=service_account,json=serviceAccount,proto3" json:"service_account,omitempty"` + // The metadata key/value pairs assigned to instances in the cluster. + // + // Keys must conform to the regexp [a-zA-Z0-9-_]+ and be less than 128 bytes + // in length. These are reflected as part of a URL in the metadata server. + // Additionally, to avoid ambiguity, keys must not conflict with any other + // metadata keys for the project or be one of the four reserved keys: + // "instance-template", "kube-env", "startup-script", and "user-data" + // + // Values are free-form strings, and only have meaning as interpreted by + // the image running in the instance. The only restriction placed on them is + // that each value's size must be less than or equal to 32 KB. + // + // The total size of all keys and values must be less than 512 KB. + Metadata map[string]string `protobuf:"bytes,4,rep,name=metadata,proto3" json:"metadata,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` + // The image type to use for this node. Note that for a given image type, + // the latest version of it will be used. + ImageType string `protobuf:"bytes,5,opt,name=image_type,json=imageType,proto3" json:"image_type,omitempty"` + // The map of Kubernetes labels (key/value pairs) to be applied to each node. + // These will added in addition to any default label(s) that + // Kubernetes may apply to the node. + // In case of conflict in label keys, the applied set may differ depending on + // the Kubernetes version -- it's best to assume the behavior is undefined + // and conflicts should be avoided. + // For more information, including usage and the valid values, see: + // https://kubernetes.io/docs/concepts/overview/working-with-objects/labels/ + Labels map[string]string `protobuf:"bytes,6,rep,name=labels,proto3" json:"labels,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` + // The number of local SSD disks to be attached to the node. + // + // The limit for this value is dependant upon the maximum number of + // disks available on a machine per zone. See: + // https://cloud.google.com/compute/docs/disks/local-ssd#local_ssd_limits + // for more information. + LocalSsdCount int32 `protobuf:"varint,7,opt,name=local_ssd_count,json=localSsdCount,proto3" json:"local_ssd_count,omitempty"` + // The list of instance tags applied to all nodes. Tags are used to identify + // valid sources or targets for network firewalls and are specified by + // the client during cluster or node pool creation. Each tag within the list + // must comply with RFC1035. + Tags []string `protobuf:"bytes,8,rep,name=tags,proto3" json:"tags,omitempty"` + // Whether the nodes are created as preemptible VM instances. See: + // https://cloud.google.com/compute/docs/instances/preemptible for more + // inforamtion about preemptible VM instances. + Preemptible bool `protobuf:"varint,10,opt,name=preemptible,proto3" json:"preemptible,omitempty"` + // A list of hardware accelerators to be attached to each node. + // See https://cloud.google.com/compute/docs/gpus for more information about + // support for GPUs. + Accelerators []*AcceleratorConfig `protobuf:"bytes,11,rep,name=accelerators,proto3" json:"accelerators,omitempty"` + // Minimum CPU platform to be used by this instance. The instance may be + // scheduled on the specified or newer CPU platform. Applicable values are the + // friendly names of CPU platforms, such as + // minCpuPlatform: "Intel Haswell" or + // minCpuPlatform: "Intel Sandy Bridge". For more + // information, read [how to specify min CPU platform](https://cloud.google.com/compute/docs/instances/specify-min-cpu-platform) + MinCpuPlatform string `protobuf:"bytes,13,opt,name=min_cpu_platform,json=minCpuPlatform,proto3" json:"min_cpu_platform,omitempty"` + // List of kubernetes taints to be applied to each node. + // + // For more information, including usage and the valid values, see: + // https://kubernetes.io/docs/concepts/configuration/taint-and-toleration/ + Taints []*NodeTaint `protobuf:"bytes,15,rep,name=taints,proto3" json:"taints,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *NodeConfig) Reset() { *m = NodeConfig{} } +func (m *NodeConfig) String() string { return proto.CompactTextString(m) } +func (*NodeConfig) ProtoMessage() {} +func (*NodeConfig) Descriptor() ([]byte, []int) { + return fileDescriptor_cluster_service_e36c06b9432393ee, []int{0} +} +func (m *NodeConfig) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_NodeConfig.Unmarshal(m, b) +} +func (m *NodeConfig) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_NodeConfig.Marshal(b, m, deterministic) +} +func (dst *NodeConfig) XXX_Merge(src proto.Message) { + xxx_messageInfo_NodeConfig.Merge(dst, src) +} +func (m *NodeConfig) XXX_Size() int { + return xxx_messageInfo_NodeConfig.Size(m) +} +func (m *NodeConfig) XXX_DiscardUnknown() { + xxx_messageInfo_NodeConfig.DiscardUnknown(m) +} + +var xxx_messageInfo_NodeConfig proto.InternalMessageInfo + +func (m *NodeConfig) GetMachineType() string { + if m != nil { + return m.MachineType + } + return "" +} + +func (m *NodeConfig) GetDiskSizeGb() int32 { + if m != nil { + return m.DiskSizeGb + } + return 0 +} + +func (m *NodeConfig) GetOauthScopes() []string { + if m != nil { + return m.OauthScopes + } + return nil +} + +func (m *NodeConfig) GetServiceAccount() string { + if m != nil { + return m.ServiceAccount + } + return "" +} + +func (m *NodeConfig) GetMetadata() map[string]string { + if m != nil { + return m.Metadata + } + return nil +} + +func (m *NodeConfig) GetImageType() string { + if m != nil { + return m.ImageType + } + return "" +} + +func (m *NodeConfig) GetLabels() map[string]string { + if m != nil { + return m.Labels + } + return nil +} + +func (m *NodeConfig) GetLocalSsdCount() int32 { + if m != nil { + return m.LocalSsdCount + } + return 0 +} + +func (m *NodeConfig) GetTags() []string { + if m != nil { + return m.Tags + } + return nil +} + +func (m *NodeConfig) GetPreemptible() bool { + if m != nil { + return m.Preemptible + } + return false +} + +func (m *NodeConfig) GetAccelerators() []*AcceleratorConfig { + if m != nil { + return m.Accelerators + } + return nil +} + +func (m *NodeConfig) GetMinCpuPlatform() string { + if m != nil { + return m.MinCpuPlatform + } + return "" +} + +func (m *NodeConfig) GetTaints() []*NodeTaint { + if m != nil { + return m.Taints + } + return nil +} + +// Kubernetes taint is comprised of three fields: key, value, and effect. Effect +// can only be one of three types: NoSchedule, PreferNoSchedule or NoExecute. +// +// For more information, including usage and the valid values, see: +// https://kubernetes.io/docs/concepts/configuration/taint-and-toleration/ +type NodeTaint struct { + // Key for taint. + Key string `protobuf:"bytes,1,opt,name=key,proto3" json:"key,omitempty"` + // Value for taint. + Value string `protobuf:"bytes,2,opt,name=value,proto3" json:"value,omitempty"` + // Effect for taint. + Effect NodeTaint_Effect `protobuf:"varint,3,opt,name=effect,proto3,enum=google.container.v1alpha1.NodeTaint_Effect" json:"effect,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *NodeTaint) Reset() { *m = NodeTaint{} } +func (m *NodeTaint) String() string { return proto.CompactTextString(m) } +func (*NodeTaint) ProtoMessage() {} +func (*NodeTaint) Descriptor() ([]byte, []int) { + return fileDescriptor_cluster_service_e36c06b9432393ee, []int{1} +} +func (m *NodeTaint) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_NodeTaint.Unmarshal(m, b) +} +func (m *NodeTaint) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_NodeTaint.Marshal(b, m, deterministic) +} +func (dst *NodeTaint) XXX_Merge(src proto.Message) { + xxx_messageInfo_NodeTaint.Merge(dst, src) +} +func (m *NodeTaint) XXX_Size() int { + return xxx_messageInfo_NodeTaint.Size(m) +} +func (m *NodeTaint) XXX_DiscardUnknown() { + xxx_messageInfo_NodeTaint.DiscardUnknown(m) +} + +var xxx_messageInfo_NodeTaint proto.InternalMessageInfo + +func (m *NodeTaint) GetKey() string { + if m != nil { + return m.Key + } + return "" +} + +func (m *NodeTaint) GetValue() string { + if m != nil { + return m.Value + } + return "" +} + +func (m *NodeTaint) GetEffect() NodeTaint_Effect { + if m != nil { + return m.Effect + } + return NodeTaint_EFFECT_UNSPECIFIED +} + +// The authentication information for accessing the master endpoint. +// Authentication can be done using HTTP basic auth or using client +// certificates. +type MasterAuth struct { + // The username to use for HTTP basic authentication to the master endpoint. + // For clusters v1.6.0 and later, you can disable basic authentication by + // providing an empty username. + Username string `protobuf:"bytes,1,opt,name=username,proto3" json:"username,omitempty"` + // The password to use for HTTP basic authentication to the master endpoint. + // Because the master endpoint is open to the Internet, you should create a + // strong password. If a password is provided for cluster creation, username + // must be non-empty. + Password string `protobuf:"bytes,2,opt,name=password,proto3" json:"password,omitempty"` + // Configuration for client certificate authentication on the cluster. If no + // configuration is specified, a client certificate is issued. + ClientCertificateConfig *ClientCertificateConfig `protobuf:"bytes,3,opt,name=client_certificate_config,json=clientCertificateConfig,proto3" json:"client_certificate_config,omitempty"` + // [Output only] Base64-encoded public certificate that is the root of + // trust for the cluster. + ClusterCaCertificate string `protobuf:"bytes,100,opt,name=cluster_ca_certificate,json=clusterCaCertificate,proto3" json:"cluster_ca_certificate,omitempty"` + // [Output only] Base64-encoded public certificate used by clients to + // authenticate to the cluster endpoint. + ClientCertificate string `protobuf:"bytes,101,opt,name=client_certificate,json=clientCertificate,proto3" json:"client_certificate,omitempty"` + // [Output only] Base64-encoded private key used by clients to authenticate + // to the cluster endpoint. + ClientKey string `protobuf:"bytes,102,opt,name=client_key,json=clientKey,proto3" json:"client_key,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *MasterAuth) Reset() { *m = MasterAuth{} } +func (m *MasterAuth) String() string { return proto.CompactTextString(m) } +func (*MasterAuth) ProtoMessage() {} +func (*MasterAuth) Descriptor() ([]byte, []int) { + return fileDescriptor_cluster_service_e36c06b9432393ee, []int{2} +} +func (m *MasterAuth) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_MasterAuth.Unmarshal(m, b) +} +func (m *MasterAuth) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_MasterAuth.Marshal(b, m, deterministic) +} +func (dst *MasterAuth) XXX_Merge(src proto.Message) { + xxx_messageInfo_MasterAuth.Merge(dst, src) +} +func (m *MasterAuth) XXX_Size() int { + return xxx_messageInfo_MasterAuth.Size(m) +} +func (m *MasterAuth) XXX_DiscardUnknown() { + xxx_messageInfo_MasterAuth.DiscardUnknown(m) +} + +var xxx_messageInfo_MasterAuth proto.InternalMessageInfo + +func (m *MasterAuth) GetUsername() string { + if m != nil { + return m.Username + } + return "" +} + +func (m *MasterAuth) GetPassword() string { + if m != nil { + return m.Password + } + return "" +} + +func (m *MasterAuth) GetClientCertificateConfig() *ClientCertificateConfig { + if m != nil { + return m.ClientCertificateConfig + } + return nil +} + +func (m *MasterAuth) GetClusterCaCertificate() string { + if m != nil { + return m.ClusterCaCertificate + } + return "" +} + +func (m *MasterAuth) GetClientCertificate() string { + if m != nil { + return m.ClientCertificate + } + return "" +} + +func (m *MasterAuth) GetClientKey() string { + if m != nil { + return m.ClientKey + } + return "" +} + +// Configuration for client certificates on the cluster. +type ClientCertificateConfig struct { + // Issue a client certificate. + IssueClientCertificate bool `protobuf:"varint,1,opt,name=issue_client_certificate,json=issueClientCertificate,proto3" json:"issue_client_certificate,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ClientCertificateConfig) Reset() { *m = ClientCertificateConfig{} } +func (m *ClientCertificateConfig) String() string { return proto.CompactTextString(m) } +func (*ClientCertificateConfig) ProtoMessage() {} +func (*ClientCertificateConfig) Descriptor() ([]byte, []int) { + return fileDescriptor_cluster_service_e36c06b9432393ee, []int{3} +} +func (m *ClientCertificateConfig) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ClientCertificateConfig.Unmarshal(m, b) +} +func (m *ClientCertificateConfig) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ClientCertificateConfig.Marshal(b, m, deterministic) +} +func (dst *ClientCertificateConfig) XXX_Merge(src proto.Message) { + xxx_messageInfo_ClientCertificateConfig.Merge(dst, src) +} +func (m *ClientCertificateConfig) XXX_Size() int { + return xxx_messageInfo_ClientCertificateConfig.Size(m) +} +func (m *ClientCertificateConfig) XXX_DiscardUnknown() { + xxx_messageInfo_ClientCertificateConfig.DiscardUnknown(m) +} + +var xxx_messageInfo_ClientCertificateConfig proto.InternalMessageInfo + +func (m *ClientCertificateConfig) GetIssueClientCertificate() bool { + if m != nil { + return m.IssueClientCertificate + } + return false +} + +// Configuration for the addons that can be automatically spun up in the +// cluster, enabling additional functionality. +type AddonsConfig struct { + // Configuration for the HTTP (L7) load balancing controller addon, which + // makes it easy to set up HTTP load balancers for services in a cluster. + HttpLoadBalancing *HttpLoadBalancing `protobuf:"bytes,1,opt,name=http_load_balancing,json=httpLoadBalancing,proto3" json:"http_load_balancing,omitempty"` + // Configuration for the horizontal pod autoscaling feature, which + // increases or decreases the number of replica pods a replication controller + // has based on the resource usage of the existing pods. + HorizontalPodAutoscaling *HorizontalPodAutoscaling `protobuf:"bytes,2,opt,name=horizontal_pod_autoscaling,json=horizontalPodAutoscaling,proto3" json:"horizontal_pod_autoscaling,omitempty"` + // Configuration for the Kubernetes Dashboard. + KubernetesDashboard *KubernetesDashboard `protobuf:"bytes,3,opt,name=kubernetes_dashboard,json=kubernetesDashboard,proto3" json:"kubernetes_dashboard,omitempty"` + // Configuration for NetworkPolicy. This only tracks whether the addon + // is enabled or not on the Master, it does not track whether network policy + // is enabled for the nodes. + NetworkPolicyConfig *NetworkPolicyConfig `protobuf:"bytes,4,opt,name=network_policy_config,json=networkPolicyConfig,proto3" json:"network_policy_config,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *AddonsConfig) Reset() { *m = AddonsConfig{} } +func (m *AddonsConfig) String() string { return proto.CompactTextString(m) } +func (*AddonsConfig) ProtoMessage() {} +func (*AddonsConfig) Descriptor() ([]byte, []int) { + return fileDescriptor_cluster_service_e36c06b9432393ee, []int{4} +} +func (m *AddonsConfig) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_AddonsConfig.Unmarshal(m, b) +} +func (m *AddonsConfig) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_AddonsConfig.Marshal(b, m, deterministic) +} +func (dst *AddonsConfig) XXX_Merge(src proto.Message) { + xxx_messageInfo_AddonsConfig.Merge(dst, src) +} +func (m *AddonsConfig) XXX_Size() int { + return xxx_messageInfo_AddonsConfig.Size(m) +} +func (m *AddonsConfig) XXX_DiscardUnknown() { + xxx_messageInfo_AddonsConfig.DiscardUnknown(m) +} + +var xxx_messageInfo_AddonsConfig proto.InternalMessageInfo + +func (m *AddonsConfig) GetHttpLoadBalancing() *HttpLoadBalancing { + if m != nil { + return m.HttpLoadBalancing + } + return nil +} + +func (m *AddonsConfig) GetHorizontalPodAutoscaling() *HorizontalPodAutoscaling { + if m != nil { + return m.HorizontalPodAutoscaling + } + return nil +} + +func (m *AddonsConfig) GetKubernetesDashboard() *KubernetesDashboard { + if m != nil { + return m.KubernetesDashboard + } + return nil +} + +func (m *AddonsConfig) GetNetworkPolicyConfig() *NetworkPolicyConfig { + if m != nil { + return m.NetworkPolicyConfig + } + return nil +} + +// Configuration options for the HTTP (L7) load balancing controller addon, +// which makes it easy to set up HTTP load balancers for services in a cluster. +type HttpLoadBalancing struct { + // Whether the HTTP Load Balancing controller is enabled in the cluster. + // When enabled, it runs a small pod in the cluster that manages the load + // balancers. + Disabled bool `protobuf:"varint,1,opt,name=disabled,proto3" json:"disabled,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *HttpLoadBalancing) Reset() { *m = HttpLoadBalancing{} } +func (m *HttpLoadBalancing) String() string { return proto.CompactTextString(m) } +func (*HttpLoadBalancing) ProtoMessage() {} +func (*HttpLoadBalancing) Descriptor() ([]byte, []int) { + return fileDescriptor_cluster_service_e36c06b9432393ee, []int{5} +} +func (m *HttpLoadBalancing) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_HttpLoadBalancing.Unmarshal(m, b) +} +func (m *HttpLoadBalancing) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_HttpLoadBalancing.Marshal(b, m, deterministic) +} +func (dst *HttpLoadBalancing) XXX_Merge(src proto.Message) { + xxx_messageInfo_HttpLoadBalancing.Merge(dst, src) +} +func (m *HttpLoadBalancing) XXX_Size() int { + return xxx_messageInfo_HttpLoadBalancing.Size(m) +} +func (m *HttpLoadBalancing) XXX_DiscardUnknown() { + xxx_messageInfo_HttpLoadBalancing.DiscardUnknown(m) +} + +var xxx_messageInfo_HttpLoadBalancing proto.InternalMessageInfo + +func (m *HttpLoadBalancing) GetDisabled() bool { + if m != nil { + return m.Disabled + } + return false +} + +// Configuration options for the horizontal pod autoscaling feature, which +// increases or decreases the number of replica pods a replication controller +// has based on the resource usage of the existing pods. +type HorizontalPodAutoscaling struct { + // Whether the Horizontal Pod Autoscaling feature is enabled in the cluster. + // When enabled, it ensures that a Heapster pod is running in the cluster, + // which is also used by the Cloud Monitoring service. + Disabled bool `protobuf:"varint,1,opt,name=disabled,proto3" json:"disabled,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *HorizontalPodAutoscaling) Reset() { *m = HorizontalPodAutoscaling{} } +func (m *HorizontalPodAutoscaling) String() string { return proto.CompactTextString(m) } +func (*HorizontalPodAutoscaling) ProtoMessage() {} +func (*HorizontalPodAutoscaling) Descriptor() ([]byte, []int) { + return fileDescriptor_cluster_service_e36c06b9432393ee, []int{6} +} +func (m *HorizontalPodAutoscaling) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_HorizontalPodAutoscaling.Unmarshal(m, b) +} +func (m *HorizontalPodAutoscaling) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_HorizontalPodAutoscaling.Marshal(b, m, deterministic) +} +func (dst *HorizontalPodAutoscaling) XXX_Merge(src proto.Message) { + xxx_messageInfo_HorizontalPodAutoscaling.Merge(dst, src) +} +func (m *HorizontalPodAutoscaling) XXX_Size() int { + return xxx_messageInfo_HorizontalPodAutoscaling.Size(m) +} +func (m *HorizontalPodAutoscaling) XXX_DiscardUnknown() { + xxx_messageInfo_HorizontalPodAutoscaling.DiscardUnknown(m) +} + +var xxx_messageInfo_HorizontalPodAutoscaling proto.InternalMessageInfo + +func (m *HorizontalPodAutoscaling) GetDisabled() bool { + if m != nil { + return m.Disabled + } + return false +} + +// Configuration for the Kubernetes Dashboard. +type KubernetesDashboard struct { + // Whether the Kubernetes Dashboard is enabled for this cluster. + Disabled bool `protobuf:"varint,1,opt,name=disabled,proto3" json:"disabled,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *KubernetesDashboard) Reset() { *m = KubernetesDashboard{} } +func (m *KubernetesDashboard) String() string { return proto.CompactTextString(m) } +func (*KubernetesDashboard) ProtoMessage() {} +func (*KubernetesDashboard) Descriptor() ([]byte, []int) { + return fileDescriptor_cluster_service_e36c06b9432393ee, []int{7} +} +func (m *KubernetesDashboard) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_KubernetesDashboard.Unmarshal(m, b) +} +func (m *KubernetesDashboard) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_KubernetesDashboard.Marshal(b, m, deterministic) +} +func (dst *KubernetesDashboard) XXX_Merge(src proto.Message) { + xxx_messageInfo_KubernetesDashboard.Merge(dst, src) +} +func (m *KubernetesDashboard) XXX_Size() int { + return xxx_messageInfo_KubernetesDashboard.Size(m) +} +func (m *KubernetesDashboard) XXX_DiscardUnknown() { + xxx_messageInfo_KubernetesDashboard.DiscardUnknown(m) +} + +var xxx_messageInfo_KubernetesDashboard proto.InternalMessageInfo + +func (m *KubernetesDashboard) GetDisabled() bool { + if m != nil { + return m.Disabled + } + return false +} + +// Configuration for NetworkPolicy. This only tracks whether the addon +// is enabled or not on the Master, it does not track whether network policy +// is enabled for the nodes. +type NetworkPolicyConfig struct { + // Whether NetworkPolicy is enabled for this cluster. + Disabled bool `protobuf:"varint,1,opt,name=disabled,proto3" json:"disabled,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *NetworkPolicyConfig) Reset() { *m = NetworkPolicyConfig{} } +func (m *NetworkPolicyConfig) String() string { return proto.CompactTextString(m) } +func (*NetworkPolicyConfig) ProtoMessage() {} +func (*NetworkPolicyConfig) Descriptor() ([]byte, []int) { + return fileDescriptor_cluster_service_e36c06b9432393ee, []int{8} +} +func (m *NetworkPolicyConfig) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_NetworkPolicyConfig.Unmarshal(m, b) +} +func (m *NetworkPolicyConfig) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_NetworkPolicyConfig.Marshal(b, m, deterministic) +} +func (dst *NetworkPolicyConfig) XXX_Merge(src proto.Message) { + xxx_messageInfo_NetworkPolicyConfig.Merge(dst, src) +} +func (m *NetworkPolicyConfig) XXX_Size() int { + return xxx_messageInfo_NetworkPolicyConfig.Size(m) +} +func (m *NetworkPolicyConfig) XXX_DiscardUnknown() { + xxx_messageInfo_NetworkPolicyConfig.DiscardUnknown(m) +} + +var xxx_messageInfo_NetworkPolicyConfig proto.InternalMessageInfo + +func (m *NetworkPolicyConfig) GetDisabled() bool { + if m != nil { + return m.Disabled + } + return false +} + +// Configuration options for the master authorized networks feature. Enabled +// master authorized networks will disallow all external traffic to access +// Kubernetes master through HTTPS except traffic from the given CIDR blocks, +// Google Compute Engine Public IPs and Google Prod IPs. +type MasterAuthorizedNetworksConfig struct { + // Whether or not master authorized networks is enabled. + Enabled bool `protobuf:"varint,1,opt,name=enabled,proto3" json:"enabled,omitempty"` + // cidr_blocks define up to 10 external networks that could access + // Kubernetes master through HTTPS. + CidrBlocks []*MasterAuthorizedNetworksConfig_CidrBlock `protobuf:"bytes,2,rep,name=cidr_blocks,json=cidrBlocks,proto3" json:"cidr_blocks,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *MasterAuthorizedNetworksConfig) Reset() { *m = MasterAuthorizedNetworksConfig{} } +func (m *MasterAuthorizedNetworksConfig) String() string { return proto.CompactTextString(m) } +func (*MasterAuthorizedNetworksConfig) ProtoMessage() {} +func (*MasterAuthorizedNetworksConfig) Descriptor() ([]byte, []int) { + return fileDescriptor_cluster_service_e36c06b9432393ee, []int{9} +} +func (m *MasterAuthorizedNetworksConfig) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_MasterAuthorizedNetworksConfig.Unmarshal(m, b) +} +func (m *MasterAuthorizedNetworksConfig) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_MasterAuthorizedNetworksConfig.Marshal(b, m, deterministic) +} +func (dst *MasterAuthorizedNetworksConfig) XXX_Merge(src proto.Message) { + xxx_messageInfo_MasterAuthorizedNetworksConfig.Merge(dst, src) +} +func (m *MasterAuthorizedNetworksConfig) XXX_Size() int { + return xxx_messageInfo_MasterAuthorizedNetworksConfig.Size(m) +} +func (m *MasterAuthorizedNetworksConfig) XXX_DiscardUnknown() { + xxx_messageInfo_MasterAuthorizedNetworksConfig.DiscardUnknown(m) +} + +var xxx_messageInfo_MasterAuthorizedNetworksConfig proto.InternalMessageInfo + +func (m *MasterAuthorizedNetworksConfig) GetEnabled() bool { + if m != nil { + return m.Enabled + } + return false +} + +func (m *MasterAuthorizedNetworksConfig) GetCidrBlocks() []*MasterAuthorizedNetworksConfig_CidrBlock { + if m != nil { + return m.CidrBlocks + } + return nil +} + +// CidrBlock contains an optional name and one CIDR block. +type MasterAuthorizedNetworksConfig_CidrBlock struct { + // display_name is an optional field for users to identify CIDR blocks. + DisplayName string `protobuf:"bytes,1,opt,name=display_name,json=displayName,proto3" json:"display_name,omitempty"` + // cidr_block must be specified in CIDR notation. + CidrBlock string `protobuf:"bytes,2,opt,name=cidr_block,json=cidrBlock,proto3" json:"cidr_block,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *MasterAuthorizedNetworksConfig_CidrBlock) Reset() { + *m = MasterAuthorizedNetworksConfig_CidrBlock{} +} +func (m *MasterAuthorizedNetworksConfig_CidrBlock) String() string { return proto.CompactTextString(m) } +func (*MasterAuthorizedNetworksConfig_CidrBlock) ProtoMessage() {} +func (*MasterAuthorizedNetworksConfig_CidrBlock) Descriptor() ([]byte, []int) { + return fileDescriptor_cluster_service_e36c06b9432393ee, []int{9, 0} +} +func (m *MasterAuthorizedNetworksConfig_CidrBlock) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_MasterAuthorizedNetworksConfig_CidrBlock.Unmarshal(m, b) +} +func (m *MasterAuthorizedNetworksConfig_CidrBlock) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_MasterAuthorizedNetworksConfig_CidrBlock.Marshal(b, m, deterministic) +} +func (dst *MasterAuthorizedNetworksConfig_CidrBlock) XXX_Merge(src proto.Message) { + xxx_messageInfo_MasterAuthorizedNetworksConfig_CidrBlock.Merge(dst, src) +} +func (m *MasterAuthorizedNetworksConfig_CidrBlock) XXX_Size() int { + return xxx_messageInfo_MasterAuthorizedNetworksConfig_CidrBlock.Size(m) +} +func (m *MasterAuthorizedNetworksConfig_CidrBlock) XXX_DiscardUnknown() { + xxx_messageInfo_MasterAuthorizedNetworksConfig_CidrBlock.DiscardUnknown(m) +} + +var xxx_messageInfo_MasterAuthorizedNetworksConfig_CidrBlock proto.InternalMessageInfo + +func (m *MasterAuthorizedNetworksConfig_CidrBlock) GetDisplayName() string { + if m != nil { + return m.DisplayName + } + return "" +} + +func (m *MasterAuthorizedNetworksConfig_CidrBlock) GetCidrBlock() string { + if m != nil { + return m.CidrBlock + } + return "" +} + +// Configuration options for the NetworkPolicy feature. +// https://kubernetes.io/docs/concepts/services-networking/networkpolicies/ +type NetworkPolicy struct { + // The selected network policy provider. + Provider NetworkPolicy_Provider `protobuf:"varint,1,opt,name=provider,proto3,enum=google.container.v1alpha1.NetworkPolicy_Provider" json:"provider,omitempty"` + // Whether network policy is enabled on the cluster. + Enabled bool `protobuf:"varint,2,opt,name=enabled,proto3" json:"enabled,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *NetworkPolicy) Reset() { *m = NetworkPolicy{} } +func (m *NetworkPolicy) String() string { return proto.CompactTextString(m) } +func (*NetworkPolicy) ProtoMessage() {} +func (*NetworkPolicy) Descriptor() ([]byte, []int) { + return fileDescriptor_cluster_service_e36c06b9432393ee, []int{10} +} +func (m *NetworkPolicy) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_NetworkPolicy.Unmarshal(m, b) +} +func (m *NetworkPolicy) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_NetworkPolicy.Marshal(b, m, deterministic) +} +func (dst *NetworkPolicy) XXX_Merge(src proto.Message) { + xxx_messageInfo_NetworkPolicy.Merge(dst, src) +} +func (m *NetworkPolicy) XXX_Size() int { + return xxx_messageInfo_NetworkPolicy.Size(m) +} +func (m *NetworkPolicy) XXX_DiscardUnknown() { + xxx_messageInfo_NetworkPolicy.DiscardUnknown(m) +} + +var xxx_messageInfo_NetworkPolicy proto.InternalMessageInfo + +func (m *NetworkPolicy) GetProvider() NetworkPolicy_Provider { + if m != nil { + return m.Provider + } + return NetworkPolicy_PROVIDER_UNSPECIFIED +} + +func (m *NetworkPolicy) GetEnabled() bool { + if m != nil { + return m.Enabled + } + return false +} + +// Configuration for controlling how IPs are allocated in the cluster. +type IPAllocationPolicy struct { + // Whether alias IPs will be used for pod IPs in the cluster. + UseIpAliases bool `protobuf:"varint,1,opt,name=use_ip_aliases,json=useIpAliases,proto3" json:"use_ip_aliases,omitempty"` + // Whether a new subnetwork will be created automatically for the cluster. + // + // This field is only applicable when `use_ip_aliases` is true. + CreateSubnetwork bool `protobuf:"varint,2,opt,name=create_subnetwork,json=createSubnetwork,proto3" json:"create_subnetwork,omitempty"` + // A custom subnetwork name to be used if `create_subnetwork` is true. If + // this field is empty, then an automatic name will be chosen for the new + // subnetwork. + SubnetworkName string `protobuf:"bytes,3,opt,name=subnetwork_name,json=subnetworkName,proto3" json:"subnetwork_name,omitempty"` + // This field is deprecated, use cluster_ipv4_cidr_block. + ClusterIpv4Cidr string `protobuf:"bytes,4,opt,name=cluster_ipv4_cidr,json=clusterIpv4Cidr,proto3" json:"cluster_ipv4_cidr,omitempty"` + // This field is deprecated, use node_ipv4_cidr_block. + NodeIpv4Cidr string `protobuf:"bytes,5,opt,name=node_ipv4_cidr,json=nodeIpv4Cidr,proto3" json:"node_ipv4_cidr,omitempty"` + // This field is deprecated, use services_ipv4_cidr_block. + ServicesIpv4Cidr string `protobuf:"bytes,6,opt,name=services_ipv4_cidr,json=servicesIpv4Cidr,proto3" json:"services_ipv4_cidr,omitempty"` + // The name of the secondary range to be used for the cluster CIDR + // block. The secondary range will be used for pod IP + // addresses. This must be an existing secondary range associated + // with the cluster subnetwork. + // + // This field is only applicable if use_ip_aliases is true and + // create_subnetwork is false. + ClusterSecondaryRangeName string `protobuf:"bytes,7,opt,name=cluster_secondary_range_name,json=clusterSecondaryRangeName,proto3" json:"cluster_secondary_range_name,omitempty"` + // The name of the secondary range to be used as for the services + // CIDR block. The secondary range will be used for service + // ClusterIPs. This must be an existing secondary range associated + // with the cluster subnetwork. + // + // This field is only applicable with use_ip_aliases is true and + // create_subnetwork is false. + ServicesSecondaryRangeName string `protobuf:"bytes,8,opt,name=services_secondary_range_name,json=servicesSecondaryRangeName,proto3" json:"services_secondary_range_name,omitempty"` + // The IP address range for the cluster pod IPs. If this field is set, then + // `cluster.cluster_ipv4_cidr` must be left blank. + // + // This field is only applicable when `use_ip_aliases` is true. + // + // Set to blank to have a range chosen with the default size. + // + // Set to /netmask (e.g. `/14`) to have a range chosen with a specific + // netmask. + // + // Set to a + // [CIDR](http://en.wikipedia.org/wiki/Classless_Inter-Domain_Routing) + // notation (e.g. `10.96.0.0/14`) from the RFC-1918 private networks (e.g. + // `10.0.0.0/8`, `172.16.0.0/12`, `192.168.0.0/16`) to pick a specific range + // to use. + ClusterIpv4CidrBlock string `protobuf:"bytes,9,opt,name=cluster_ipv4_cidr_block,json=clusterIpv4CidrBlock,proto3" json:"cluster_ipv4_cidr_block,omitempty"` + // The IP address range of the instance IPs in this cluster. + // + // This is applicable only if `create_subnetwork` is true. + // + // Set to blank to have a range chosen with the default size. + // + // Set to /netmask (e.g. `/14`) to have a range chosen with a specific + // netmask. + // + // Set to a + // [CIDR](http://en.wikipedia.org/wiki/Classless_Inter-Domain_Routing) + // notation (e.g. `10.96.0.0/14`) from the RFC-1918 private networks (e.g. + // `10.0.0.0/8`, `172.16.0.0/12`, `192.168.0.0/16`) to pick a specific range + // to use. + NodeIpv4CidrBlock string `protobuf:"bytes,10,opt,name=node_ipv4_cidr_block,json=nodeIpv4CidrBlock,proto3" json:"node_ipv4_cidr_block,omitempty"` + // The IP address range of the services IPs in this cluster. If blank, a range + // will be automatically chosen with the default size. + // + // This field is only applicable when `use_ip_aliases` is true. + // + // Set to blank to have a range chosen with the default size. + // + // Set to /netmask (e.g. `/14`) to have a range chosen with a specific + // netmask. + // + // Set to a + // [CIDR](http://en.wikipedia.org/wiki/Classless_Inter-Domain_Routing) + // notation (e.g. `10.96.0.0/14`) from the RFC-1918 private networks (e.g. + // `10.0.0.0/8`, `172.16.0.0/12`, `192.168.0.0/16`) to pick a specific range + // to use. + ServicesIpv4CidrBlock string `protobuf:"bytes,11,opt,name=services_ipv4_cidr_block,json=servicesIpv4CidrBlock,proto3" json:"services_ipv4_cidr_block,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *IPAllocationPolicy) Reset() { *m = IPAllocationPolicy{} } +func (m *IPAllocationPolicy) String() string { return proto.CompactTextString(m) } +func (*IPAllocationPolicy) ProtoMessage() {} +func (*IPAllocationPolicy) Descriptor() ([]byte, []int) { + return fileDescriptor_cluster_service_e36c06b9432393ee, []int{11} +} +func (m *IPAllocationPolicy) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_IPAllocationPolicy.Unmarshal(m, b) +} +func (m *IPAllocationPolicy) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_IPAllocationPolicy.Marshal(b, m, deterministic) +} +func (dst *IPAllocationPolicy) XXX_Merge(src proto.Message) { + xxx_messageInfo_IPAllocationPolicy.Merge(dst, src) +} +func (m *IPAllocationPolicy) XXX_Size() int { + return xxx_messageInfo_IPAllocationPolicy.Size(m) +} +func (m *IPAllocationPolicy) XXX_DiscardUnknown() { + xxx_messageInfo_IPAllocationPolicy.DiscardUnknown(m) +} + +var xxx_messageInfo_IPAllocationPolicy proto.InternalMessageInfo + +func (m *IPAllocationPolicy) GetUseIpAliases() bool { + if m != nil { + return m.UseIpAliases + } + return false +} + +func (m *IPAllocationPolicy) GetCreateSubnetwork() bool { + if m != nil { + return m.CreateSubnetwork + } + return false +} + +func (m *IPAllocationPolicy) GetSubnetworkName() string { + if m != nil { + return m.SubnetworkName + } + return "" +} + +func (m *IPAllocationPolicy) GetClusterIpv4Cidr() string { + if m != nil { + return m.ClusterIpv4Cidr + } + return "" +} + +func (m *IPAllocationPolicy) GetNodeIpv4Cidr() string { + if m != nil { + return m.NodeIpv4Cidr + } + return "" +} + +func (m *IPAllocationPolicy) GetServicesIpv4Cidr() string { + if m != nil { + return m.ServicesIpv4Cidr + } + return "" +} + +func (m *IPAllocationPolicy) GetClusterSecondaryRangeName() string { + if m != nil { + return m.ClusterSecondaryRangeName + } + return "" +} + +func (m *IPAllocationPolicy) GetServicesSecondaryRangeName() string { + if m != nil { + return m.ServicesSecondaryRangeName + } + return "" +} + +func (m *IPAllocationPolicy) GetClusterIpv4CidrBlock() string { + if m != nil { + return m.ClusterIpv4CidrBlock + } + return "" +} + +func (m *IPAllocationPolicy) GetNodeIpv4CidrBlock() string { + if m != nil { + return m.NodeIpv4CidrBlock + } + return "" +} + +func (m *IPAllocationPolicy) GetServicesIpv4CidrBlock() string { + if m != nil { + return m.ServicesIpv4CidrBlock + } + return "" +} + +// Configuration for the PodSecurityPolicy feature. +type PodSecurityPolicyConfig struct { + // Enable the PodSecurityPolicy controller for this cluster. If enabled, pods + // must be valid under a PodSecurityPolicy to be created. + Enabled bool `protobuf:"varint,1,opt,name=enabled,proto3" json:"enabled,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *PodSecurityPolicyConfig) Reset() { *m = PodSecurityPolicyConfig{} } +func (m *PodSecurityPolicyConfig) String() string { return proto.CompactTextString(m) } +func (*PodSecurityPolicyConfig) ProtoMessage() {} +func (*PodSecurityPolicyConfig) Descriptor() ([]byte, []int) { + return fileDescriptor_cluster_service_e36c06b9432393ee, []int{12} +} +func (m *PodSecurityPolicyConfig) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_PodSecurityPolicyConfig.Unmarshal(m, b) +} +func (m *PodSecurityPolicyConfig) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_PodSecurityPolicyConfig.Marshal(b, m, deterministic) +} +func (dst *PodSecurityPolicyConfig) XXX_Merge(src proto.Message) { + xxx_messageInfo_PodSecurityPolicyConfig.Merge(dst, src) +} +func (m *PodSecurityPolicyConfig) XXX_Size() int { + return xxx_messageInfo_PodSecurityPolicyConfig.Size(m) +} +func (m *PodSecurityPolicyConfig) XXX_DiscardUnknown() { + xxx_messageInfo_PodSecurityPolicyConfig.DiscardUnknown(m) +} + +var xxx_messageInfo_PodSecurityPolicyConfig proto.InternalMessageInfo + +func (m *PodSecurityPolicyConfig) GetEnabled() bool { + if m != nil { + return m.Enabled + } + return false +} + +// A Google Container Engine cluster. +type Cluster struct { + // The name of this cluster. The name must be unique within this project + // and zone, and can be up to 40 characters with the following restrictions: + // + // * Lowercase letters, numbers, and hyphens only. + // * Must start with a letter. + // * Must end with a number or a letter. + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // An optional description of this cluster. + Description string `protobuf:"bytes,2,opt,name=description,proto3" json:"description,omitempty"` + // The number of nodes to create in this cluster. You must ensure that your + // Compute Engine resource quota + // is sufficient for this number of instances. You must also have available + // firewall and routes quota. + // For requests, this field should only be used in lieu of a + // "node_pool" object, since this configuration (along with the + // "node_config") will be used to create a "NodePool" object with an + // auto-generated name. Do not use this and a node_pool at the same time. + InitialNodeCount int32 `protobuf:"varint,3,opt,name=initial_node_count,json=initialNodeCount,proto3" json:"initial_node_count,omitempty"` + // Parameters used in creating the cluster's nodes. + // See `nodeConfig` for the description of its properties. + // For requests, this field should only be used in lieu of a + // "node_pool" object, since this configuration (along with the + // "initial_node_count") will be used to create a "NodePool" object with an + // auto-generated name. Do not use this and a node_pool at the same time. + // For responses, this field will be populated with the node configuration of + // the first node pool. + // + // If unspecified, the defaults are used. + NodeConfig *NodeConfig `protobuf:"bytes,4,opt,name=node_config,json=nodeConfig,proto3" json:"node_config,omitempty"` + // The authentication information for accessing the master endpoint. + MasterAuth *MasterAuth `protobuf:"bytes,5,opt,name=master_auth,json=masterAuth,proto3" json:"master_auth,omitempty"` + // The logging service the cluster should use to write logs. + // Currently available options: + // + // * `logging.googleapis.com` - the Google Cloud Logging service. + // * `none` - no logs will be exported from the cluster. + // * if left as an empty string,`logging.googleapis.com` will be used. + LoggingService string `protobuf:"bytes,6,opt,name=logging_service,json=loggingService,proto3" json:"logging_service,omitempty"` + // The monitoring service the cluster should use to write metrics. + // Currently available options: + // + // * `monitoring.googleapis.com` - the Google Cloud Monitoring service. + // * `none` - no metrics will be exported from the cluster. + // * if left as an empty string, `monitoring.googleapis.com` will be used. + MonitoringService string `protobuf:"bytes,7,opt,name=monitoring_service,json=monitoringService,proto3" json:"monitoring_service,omitempty"` + // The name of the Google Compute Engine + // [network](/compute/docs/networks-and-firewalls#networks) to which the + // cluster is connected. If left unspecified, the `default` network + // will be used. + Network string `protobuf:"bytes,8,opt,name=network,proto3" json:"network,omitempty"` + // The IP address range of the container pods in this cluster, in + // [CIDR](http://en.wikipedia.org/wiki/Classless_Inter-Domain_Routing) + // notation (e.g. `10.96.0.0/14`). Leave blank to have + // one automatically chosen or specify a `/14` block in `10.0.0.0/8`. + ClusterIpv4Cidr string `protobuf:"bytes,9,opt,name=cluster_ipv4_cidr,json=clusterIpv4Cidr,proto3" json:"cluster_ipv4_cidr,omitempty"` + // Configurations for the various addons available to run in the cluster. + AddonsConfig *AddonsConfig `protobuf:"bytes,10,opt,name=addons_config,json=addonsConfig,proto3" json:"addons_config,omitempty"` + // The name of the Google Compute Engine + // [subnetwork](/compute/docs/subnetworks) to which the + // cluster is connected. + Subnetwork string `protobuf:"bytes,11,opt,name=subnetwork,proto3" json:"subnetwork,omitempty"` + // The node pools associated with this cluster. + // This field should not be set if "node_config" or "initial_node_count" are + // specified. + NodePools []*NodePool `protobuf:"bytes,12,rep,name=node_pools,json=nodePools,proto3" json:"node_pools,omitempty"` + // The list of Google Compute Engine + // [locations](/compute/docs/zones#available) in which the cluster's nodes + // should be located. + Locations []string `protobuf:"bytes,13,rep,name=locations,proto3" json:"locations,omitempty"` + // Kubernetes alpha features are enabled on this cluster. This includes alpha + // API groups (e.g. v1alpha1) and features that may not be production ready in + // the kubernetes version of the master and nodes. + // The cluster has no SLA for uptime and master/node upgrades are disabled. + // Alpha enabled clusters are automatically deleted thirty days after + // creation. + EnableKubernetesAlpha bool `protobuf:"varint,14,opt,name=enable_kubernetes_alpha,json=enableKubernetesAlpha,proto3" json:"enable_kubernetes_alpha,omitempty"` + // Configuration options for the NetworkPolicy feature. + NetworkPolicy *NetworkPolicy `protobuf:"bytes,19,opt,name=network_policy,json=networkPolicy,proto3" json:"network_policy,omitempty"` + // Configuration for cluster IP allocation. + IpAllocationPolicy *IPAllocationPolicy `protobuf:"bytes,20,opt,name=ip_allocation_policy,json=ipAllocationPolicy,proto3" json:"ip_allocation_policy,omitempty"` + // The configuration options for master authorized networks feature. + MasterAuthorizedNetworksConfig *MasterAuthorizedNetworksConfig `protobuf:"bytes,22,opt,name=master_authorized_networks_config,json=masterAuthorizedNetworksConfig,proto3" json:"master_authorized_networks_config,omitempty"` + // Configure the maintenance policy for this cluster. + MaintenancePolicy *MaintenancePolicy `protobuf:"bytes,23,opt,name=maintenance_policy,json=maintenancePolicy,proto3" json:"maintenance_policy,omitempty"` + // Configuration for the PodSecurityPolicy feature. + PodSecurityPolicyConfig *PodSecurityPolicyConfig `protobuf:"bytes,25,opt,name=pod_security_policy_config,json=podSecurityPolicyConfig,proto3" json:"pod_security_policy_config,omitempty"` + // [Output only] Server-defined URL for the resource. + SelfLink string `protobuf:"bytes,100,opt,name=self_link,json=selfLink,proto3" json:"self_link,omitempty"` + // [Output only] The name of the Google Compute Engine + // [zone](/compute/docs/zones#available) in which the cluster + // resides. + // This field is deprecated, use location instead. + Zone string `protobuf:"bytes,101,opt,name=zone,proto3" json:"zone,omitempty"` + // [Output only] The IP address of this cluster's master endpoint. + // The endpoint can be accessed from the internet at + // `https://username:password@endpoint/`. + // + // See the `masterAuth` property of this resource for username and + // password information. + Endpoint string `protobuf:"bytes,102,opt,name=endpoint,proto3" json:"endpoint,omitempty"` + // The initial Kubernetes version for this cluster. Valid versions are those + // found in validMasterVersions returned by getServerConfig. The version can + // be upgraded over time; such upgrades are reflected in + // currentMasterVersion and currentNodeVersion. + InitialClusterVersion string `protobuf:"bytes,103,opt,name=initial_cluster_version,json=initialClusterVersion,proto3" json:"initial_cluster_version,omitempty"` + // [Output only] The current software version of the master endpoint. + CurrentMasterVersion string `protobuf:"bytes,104,opt,name=current_master_version,json=currentMasterVersion,proto3" json:"current_master_version,omitempty"` + // [Output only] The current version of the node software components. + // If they are currently at multiple versions because they're in the process + // of being upgraded, this reflects the minimum version of all nodes. + CurrentNodeVersion string `protobuf:"bytes,105,opt,name=current_node_version,json=currentNodeVersion,proto3" json:"current_node_version,omitempty"` + // [Output only] The time the cluster was created, in + // [RFC3339](https://www.ietf.org/rfc/rfc3339.txt) text format. + CreateTime string `protobuf:"bytes,106,opt,name=create_time,json=createTime,proto3" json:"create_time,omitempty"` + // [Output only] The current status of this cluster. + Status Cluster_Status `protobuf:"varint,107,opt,name=status,proto3,enum=google.container.v1alpha1.Cluster_Status" json:"status,omitempty"` + // [Output only] Additional information about the current status of this + // cluster, if available. + StatusMessage string `protobuf:"bytes,108,opt,name=status_message,json=statusMessage,proto3" json:"status_message,omitempty"` + // [Output only] The size of the address space on each node for hosting + // containers. This is provisioned from within the `container_ipv4_cidr` + // range. + NodeIpv4CidrSize int32 `protobuf:"varint,109,opt,name=node_ipv4_cidr_size,json=nodeIpv4CidrSize,proto3" json:"node_ipv4_cidr_size,omitempty"` + // [Output only] The IP address range of the Kubernetes services in + // this cluster, in + // [CIDR](http://en.wikipedia.org/wiki/Classless_Inter-Domain_Routing) + // notation (e.g. `1.2.3.4/29`). Service addresses are + // typically put in the last `/16` from the container CIDR. + ServicesIpv4Cidr string `protobuf:"bytes,110,opt,name=services_ipv4_cidr,json=servicesIpv4Cidr,proto3" json:"services_ipv4_cidr,omitempty"` + // [Output only] The resource URLs of [instance + // groups](/compute/docs/instance-groups/) associated with this + // cluster. + InstanceGroupUrls []string `protobuf:"bytes,111,rep,name=instance_group_urls,json=instanceGroupUrls,proto3" json:"instance_group_urls,omitempty"` + // [Output only] The number of nodes currently in the cluster. + CurrentNodeCount int32 `protobuf:"varint,112,opt,name=current_node_count,json=currentNodeCount,proto3" json:"current_node_count,omitempty"` + // [Output only] The time the cluster will be automatically + // deleted in [RFC3339](https://www.ietf.org/rfc/rfc3339.txt) text format. + ExpireTime string `protobuf:"bytes,113,opt,name=expire_time,json=expireTime,proto3" json:"expire_time,omitempty"` + // [Output only] The name of the Google Compute Engine + // [zone](/compute/docs/regions-zones/regions-zones#available) or + // [region](/compute/docs/regions-zones/regions-zones#available) in which + // the cluster resides. + Location string `protobuf:"bytes,114,opt,name=location,proto3" json:"location,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Cluster) Reset() { *m = Cluster{} } +func (m *Cluster) String() string { return proto.CompactTextString(m) } +func (*Cluster) ProtoMessage() {} +func (*Cluster) Descriptor() ([]byte, []int) { + return fileDescriptor_cluster_service_e36c06b9432393ee, []int{13} +} +func (m *Cluster) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Cluster.Unmarshal(m, b) +} +func (m *Cluster) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Cluster.Marshal(b, m, deterministic) +} +func (dst *Cluster) XXX_Merge(src proto.Message) { + xxx_messageInfo_Cluster.Merge(dst, src) +} +func (m *Cluster) XXX_Size() int { + return xxx_messageInfo_Cluster.Size(m) +} +func (m *Cluster) XXX_DiscardUnknown() { + xxx_messageInfo_Cluster.DiscardUnknown(m) +} + +var xxx_messageInfo_Cluster proto.InternalMessageInfo + +func (m *Cluster) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *Cluster) GetDescription() string { + if m != nil { + return m.Description + } + return "" +} + +func (m *Cluster) GetInitialNodeCount() int32 { + if m != nil { + return m.InitialNodeCount + } + return 0 +} + +func (m *Cluster) GetNodeConfig() *NodeConfig { + if m != nil { + return m.NodeConfig + } + return nil +} + +func (m *Cluster) GetMasterAuth() *MasterAuth { + if m != nil { + return m.MasterAuth + } + return nil +} + +func (m *Cluster) GetLoggingService() string { + if m != nil { + return m.LoggingService + } + return "" +} + +func (m *Cluster) GetMonitoringService() string { + if m != nil { + return m.MonitoringService + } + return "" +} + +func (m *Cluster) GetNetwork() string { + if m != nil { + return m.Network + } + return "" +} + +func (m *Cluster) GetClusterIpv4Cidr() string { + if m != nil { + return m.ClusterIpv4Cidr + } + return "" +} + +func (m *Cluster) GetAddonsConfig() *AddonsConfig { + if m != nil { + return m.AddonsConfig + } + return nil +} + +func (m *Cluster) GetSubnetwork() string { + if m != nil { + return m.Subnetwork + } + return "" +} + +func (m *Cluster) GetNodePools() []*NodePool { + if m != nil { + return m.NodePools + } + return nil +} + +func (m *Cluster) GetLocations() []string { + if m != nil { + return m.Locations + } + return nil +} + +func (m *Cluster) GetEnableKubernetesAlpha() bool { + if m != nil { + return m.EnableKubernetesAlpha + } + return false +} + +func (m *Cluster) GetNetworkPolicy() *NetworkPolicy { + if m != nil { + return m.NetworkPolicy + } + return nil +} + +func (m *Cluster) GetIpAllocationPolicy() *IPAllocationPolicy { + if m != nil { + return m.IpAllocationPolicy + } + return nil +} + +func (m *Cluster) GetMasterAuthorizedNetworksConfig() *MasterAuthorizedNetworksConfig { + if m != nil { + return m.MasterAuthorizedNetworksConfig + } + return nil +} + +func (m *Cluster) GetMaintenancePolicy() *MaintenancePolicy { + if m != nil { + return m.MaintenancePolicy + } + return nil +} + +func (m *Cluster) GetPodSecurityPolicyConfig() *PodSecurityPolicyConfig { + if m != nil { + return m.PodSecurityPolicyConfig + } + return nil +} + +func (m *Cluster) GetSelfLink() string { + if m != nil { + return m.SelfLink + } + return "" +} + +func (m *Cluster) GetZone() string { + if m != nil { + return m.Zone + } + return "" +} + +func (m *Cluster) GetEndpoint() string { + if m != nil { + return m.Endpoint + } + return "" +} + +func (m *Cluster) GetInitialClusterVersion() string { + if m != nil { + return m.InitialClusterVersion + } + return "" +} + +func (m *Cluster) GetCurrentMasterVersion() string { + if m != nil { + return m.CurrentMasterVersion + } + return "" +} + +func (m *Cluster) GetCurrentNodeVersion() string { + if m != nil { + return m.CurrentNodeVersion + } + return "" +} + +func (m *Cluster) GetCreateTime() string { + if m != nil { + return m.CreateTime + } + return "" +} + +func (m *Cluster) GetStatus() Cluster_Status { + if m != nil { + return m.Status + } + return Cluster_STATUS_UNSPECIFIED +} + +func (m *Cluster) GetStatusMessage() string { + if m != nil { + return m.StatusMessage + } + return "" +} + +func (m *Cluster) GetNodeIpv4CidrSize() int32 { + if m != nil { + return m.NodeIpv4CidrSize + } + return 0 +} + +func (m *Cluster) GetServicesIpv4Cidr() string { + if m != nil { + return m.ServicesIpv4Cidr + } + return "" +} + +func (m *Cluster) GetInstanceGroupUrls() []string { + if m != nil { + return m.InstanceGroupUrls + } + return nil +} + +func (m *Cluster) GetCurrentNodeCount() int32 { + if m != nil { + return m.CurrentNodeCount + } + return 0 +} + +func (m *Cluster) GetExpireTime() string { + if m != nil { + return m.ExpireTime + } + return "" +} + +func (m *Cluster) GetLocation() string { + if m != nil { + return m.Location + } + return "" +} + +// ClusterUpdate describes an update to the cluster. Exactly one update can +// be applied to a cluster with each request, so at most one field can be +// provided. +type ClusterUpdate struct { + // The Kubernetes version to change the nodes to (typically an + // upgrade). Use `-` to upgrade to the latest version supported by + // the server. + DesiredNodeVersion string `protobuf:"bytes,4,opt,name=desired_node_version,json=desiredNodeVersion,proto3" json:"desired_node_version,omitempty"` + // The monitoring service the cluster should use to write metrics. + // Currently available options: + // + // * "monitoring.googleapis.com" - the Google Cloud Monitoring service + // * "none" - no metrics will be exported from the cluster + DesiredMonitoringService string `protobuf:"bytes,5,opt,name=desired_monitoring_service,json=desiredMonitoringService,proto3" json:"desired_monitoring_service,omitempty"` + // Configurations for the various addons available to run in the cluster. + DesiredAddonsConfig *AddonsConfig `protobuf:"bytes,6,opt,name=desired_addons_config,json=desiredAddonsConfig,proto3" json:"desired_addons_config,omitempty"` + // The node pool to be upgraded. This field is mandatory if + // "desired_node_version", "desired_image_family" or + // "desired_node_pool_autoscaling" is specified and there is more than one + // node pool on the cluster. + DesiredNodePoolId string `protobuf:"bytes,7,opt,name=desired_node_pool_id,json=desiredNodePoolId,proto3" json:"desired_node_pool_id,omitempty"` + // The desired image type for the node pool. + // NOTE: Set the "desired_node_pool" field as well. + DesiredImageType string `protobuf:"bytes,8,opt,name=desired_image_type,json=desiredImageType,proto3" json:"desired_image_type,omitempty"` + // Autoscaler configuration for the node pool specified in + // desired_node_pool_id. If there is only one pool in the + // cluster and desired_node_pool_id is not provided then + // the change applies to that single node pool. + DesiredNodePoolAutoscaling *NodePoolAutoscaling `protobuf:"bytes,9,opt,name=desired_node_pool_autoscaling,json=desiredNodePoolAutoscaling,proto3" json:"desired_node_pool_autoscaling,omitempty"` + // The desired list of Google Compute Engine + // [locations](/compute/docs/zones#available) in which the cluster's nodes + // should be located. Changing the locations a cluster is in will result + // in nodes being either created or removed from the cluster, depending on + // whether locations are being added or removed. + // + // This list must always include the cluster's primary zone. + DesiredLocations []string `protobuf:"bytes,10,rep,name=desired_locations,json=desiredLocations,proto3" json:"desired_locations,omitempty"` + // The desired configuration options for master authorized networks feature. + DesiredMasterAuthorizedNetworksConfig *MasterAuthorizedNetworksConfig `protobuf:"bytes,12,opt,name=desired_master_authorized_networks_config,json=desiredMasterAuthorizedNetworksConfig,proto3" json:"desired_master_authorized_networks_config,omitempty"` + // The desired configuration options for the PodSecurityPolicy feature. + DesiredPodSecurityPolicyConfig *PodSecurityPolicyConfig `protobuf:"bytes,14,opt,name=desired_pod_security_policy_config,json=desiredPodSecurityPolicyConfig,proto3" json:"desired_pod_security_policy_config,omitempty"` + // The Kubernetes version to change the master to. The only valid value is the + // latest supported version. Use "-" to have the server automatically select + // the latest version. + DesiredMasterVersion string `protobuf:"bytes,100,opt,name=desired_master_version,json=desiredMasterVersion,proto3" json:"desired_master_version,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ClusterUpdate) Reset() { *m = ClusterUpdate{} } +func (m *ClusterUpdate) String() string { return proto.CompactTextString(m) } +func (*ClusterUpdate) ProtoMessage() {} +func (*ClusterUpdate) Descriptor() ([]byte, []int) { + return fileDescriptor_cluster_service_e36c06b9432393ee, []int{14} +} +func (m *ClusterUpdate) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ClusterUpdate.Unmarshal(m, b) +} +func (m *ClusterUpdate) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ClusterUpdate.Marshal(b, m, deterministic) +} +func (dst *ClusterUpdate) XXX_Merge(src proto.Message) { + xxx_messageInfo_ClusterUpdate.Merge(dst, src) +} +func (m *ClusterUpdate) XXX_Size() int { + return xxx_messageInfo_ClusterUpdate.Size(m) +} +func (m *ClusterUpdate) XXX_DiscardUnknown() { + xxx_messageInfo_ClusterUpdate.DiscardUnknown(m) +} + +var xxx_messageInfo_ClusterUpdate proto.InternalMessageInfo + +func (m *ClusterUpdate) GetDesiredNodeVersion() string { + if m != nil { + return m.DesiredNodeVersion + } + return "" +} + +func (m *ClusterUpdate) GetDesiredMonitoringService() string { + if m != nil { + return m.DesiredMonitoringService + } + return "" +} + +func (m *ClusterUpdate) GetDesiredAddonsConfig() *AddonsConfig { + if m != nil { + return m.DesiredAddonsConfig + } + return nil +} + +func (m *ClusterUpdate) GetDesiredNodePoolId() string { + if m != nil { + return m.DesiredNodePoolId + } + return "" +} + +func (m *ClusterUpdate) GetDesiredImageType() string { + if m != nil { + return m.DesiredImageType + } + return "" +} + +func (m *ClusterUpdate) GetDesiredNodePoolAutoscaling() *NodePoolAutoscaling { + if m != nil { + return m.DesiredNodePoolAutoscaling + } + return nil +} + +func (m *ClusterUpdate) GetDesiredLocations() []string { + if m != nil { + return m.DesiredLocations + } + return nil +} + +func (m *ClusterUpdate) GetDesiredMasterAuthorizedNetworksConfig() *MasterAuthorizedNetworksConfig { + if m != nil { + return m.DesiredMasterAuthorizedNetworksConfig + } + return nil +} + +func (m *ClusterUpdate) GetDesiredPodSecurityPolicyConfig() *PodSecurityPolicyConfig { + if m != nil { + return m.DesiredPodSecurityPolicyConfig + } + return nil +} + +func (m *ClusterUpdate) GetDesiredMasterVersion() string { + if m != nil { + return m.DesiredMasterVersion + } + return "" +} + +// This operation resource represents operations that may have happened or are +// happening on the cluster. All fields are output only. +type Operation struct { + // The server-assigned ID for the operation. + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // The name of the Google Compute Engine + // [zone](/compute/docs/zones#available) in which the operation + // is taking place. + // This field is deprecated, use location instead. + Zone string `protobuf:"bytes,2,opt,name=zone,proto3" json:"zone,omitempty"` + // The operation type. + OperationType Operation_Type `protobuf:"varint,3,opt,name=operation_type,json=operationType,proto3,enum=google.container.v1alpha1.Operation_Type" json:"operation_type,omitempty"` + // The current status of the operation. + Status Operation_Status `protobuf:"varint,4,opt,name=status,proto3,enum=google.container.v1alpha1.Operation_Status" json:"status,omitempty"` + // Detailed operation progress, if available. + Detail string `protobuf:"bytes,8,opt,name=detail,proto3" json:"detail,omitempty"` + // If an error has occurred, a textual description of the error. + StatusMessage string `protobuf:"bytes,5,opt,name=status_message,json=statusMessage,proto3" json:"status_message,omitempty"` + // Server-defined URL for the resource. + SelfLink string `protobuf:"bytes,6,opt,name=self_link,json=selfLink,proto3" json:"self_link,omitempty"` + // Server-defined URL for the target of the operation. + TargetLink string `protobuf:"bytes,7,opt,name=target_link,json=targetLink,proto3" json:"target_link,omitempty"` + // [Output only] The name of the Google Compute Engine + // [zone](/compute/docs/regions-zones/regions-zones#available) or + // [region](/compute/docs/regions-zones/regions-zones#available) in which + // the cluster resides. + Location string `protobuf:"bytes,9,opt,name=location,proto3" json:"location,omitempty"` + // [Output only] The time the operation started, in + // [RFC3339](https://www.ietf.org/rfc/rfc3339.txt) text format. + StartTime string `protobuf:"bytes,10,opt,name=start_time,json=startTime,proto3" json:"start_time,omitempty"` + // [Output only] The time the operation completed, in + // [RFC3339](https://www.ietf.org/rfc/rfc3339.txt) text format. + EndTime string `protobuf:"bytes,11,opt,name=end_time,json=endTime,proto3" json:"end_time,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Operation) Reset() { *m = Operation{} } +func (m *Operation) String() string { return proto.CompactTextString(m) } +func (*Operation) ProtoMessage() {} +func (*Operation) Descriptor() ([]byte, []int) { + return fileDescriptor_cluster_service_e36c06b9432393ee, []int{15} +} +func (m *Operation) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Operation.Unmarshal(m, b) +} +func (m *Operation) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Operation.Marshal(b, m, deterministic) +} +func (dst *Operation) XXX_Merge(src proto.Message) { + xxx_messageInfo_Operation.Merge(dst, src) +} +func (m *Operation) XXX_Size() int { + return xxx_messageInfo_Operation.Size(m) +} +func (m *Operation) XXX_DiscardUnknown() { + xxx_messageInfo_Operation.DiscardUnknown(m) +} + +var xxx_messageInfo_Operation proto.InternalMessageInfo + +func (m *Operation) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *Operation) GetZone() string { + if m != nil { + return m.Zone + } + return "" +} + +func (m *Operation) GetOperationType() Operation_Type { + if m != nil { + return m.OperationType + } + return Operation_TYPE_UNSPECIFIED +} + +func (m *Operation) GetStatus() Operation_Status { + if m != nil { + return m.Status + } + return Operation_STATUS_UNSPECIFIED +} + +func (m *Operation) GetDetail() string { + if m != nil { + return m.Detail + } + return "" +} + +func (m *Operation) GetStatusMessage() string { + if m != nil { + return m.StatusMessage + } + return "" +} + +func (m *Operation) GetSelfLink() string { + if m != nil { + return m.SelfLink + } + return "" +} + +func (m *Operation) GetTargetLink() string { + if m != nil { + return m.TargetLink + } + return "" +} + +func (m *Operation) GetLocation() string { + if m != nil { + return m.Location + } + return "" +} + +func (m *Operation) GetStartTime() string { + if m != nil { + return m.StartTime + } + return "" +} + +func (m *Operation) GetEndTime() string { + if m != nil { + return m.EndTime + } + return "" +} + +// CreateClusterRequest creates a cluster. +type CreateClusterRequest struct { + // The Google Developers Console [project ID or project + // number](https://support.google.com/cloud/answer/6158840). + // This field is deprecated, use parent instead. + ProjectId string `protobuf:"bytes,1,opt,name=project_id,json=projectId,proto3" json:"project_id,omitempty"` + // The name of the Google Compute Engine + // [zone](/compute/docs/zones#available) in which the cluster + // resides. + // This field is deprecated, use parent instead. + Zone string `protobuf:"bytes,2,opt,name=zone,proto3" json:"zone,omitempty"` + // A [cluster + // resource](/container-engine/reference/rest/v1alpha1/projects.zones.clusters) + Cluster *Cluster `protobuf:"bytes,3,opt,name=cluster,proto3" json:"cluster,omitempty"` + // The parent (project and location) where the cluster will be created. + // Specified in the format 'projects/*/locations/*'. + Parent string `protobuf:"bytes,5,opt,name=parent,proto3" json:"parent,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *CreateClusterRequest) Reset() { *m = CreateClusterRequest{} } +func (m *CreateClusterRequest) String() string { return proto.CompactTextString(m) } +func (*CreateClusterRequest) ProtoMessage() {} +func (*CreateClusterRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_cluster_service_e36c06b9432393ee, []int{16} +} +func (m *CreateClusterRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_CreateClusterRequest.Unmarshal(m, b) +} +func (m *CreateClusterRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_CreateClusterRequest.Marshal(b, m, deterministic) +} +func (dst *CreateClusterRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_CreateClusterRequest.Merge(dst, src) +} +func (m *CreateClusterRequest) XXX_Size() int { + return xxx_messageInfo_CreateClusterRequest.Size(m) +} +func (m *CreateClusterRequest) XXX_DiscardUnknown() { + xxx_messageInfo_CreateClusterRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_CreateClusterRequest proto.InternalMessageInfo + +func (m *CreateClusterRequest) GetProjectId() string { + if m != nil { + return m.ProjectId + } + return "" +} + +func (m *CreateClusterRequest) GetZone() string { + if m != nil { + return m.Zone + } + return "" +} + +func (m *CreateClusterRequest) GetCluster() *Cluster { + if m != nil { + return m.Cluster + } + return nil +} + +func (m *CreateClusterRequest) GetParent() string { + if m != nil { + return m.Parent + } + return "" +} + +// GetClusterRequest gets the settings of a cluster. +type GetClusterRequest struct { + // The Google Developers Console [project ID or project + // number](https://support.google.com/cloud/answer/6158840). + // This field is deprecated, use name instead. + ProjectId string `protobuf:"bytes,1,opt,name=project_id,json=projectId,proto3" json:"project_id,omitempty"` + // The name of the Google Compute Engine + // [zone](/compute/docs/zones#available) in which the cluster + // resides. + // This field is deprecated, use name instead. + Zone string `protobuf:"bytes,2,opt,name=zone,proto3" json:"zone,omitempty"` + // The name of the cluster to retrieve. + // This field is deprecated, use name instead. + ClusterId string `protobuf:"bytes,3,opt,name=cluster_id,json=clusterId,proto3" json:"cluster_id,omitempty"` + // The name (project, location, cluster) of the cluster to retrieve. + // Specified in the format 'projects/*/locations/*/clusters/*'. + Name string `protobuf:"bytes,5,opt,name=name,proto3" json:"name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GetClusterRequest) Reset() { *m = GetClusterRequest{} } +func (m *GetClusterRequest) String() string { return proto.CompactTextString(m) } +func (*GetClusterRequest) ProtoMessage() {} +func (*GetClusterRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_cluster_service_e36c06b9432393ee, []int{17} +} +func (m *GetClusterRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GetClusterRequest.Unmarshal(m, b) +} +func (m *GetClusterRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GetClusterRequest.Marshal(b, m, deterministic) +} +func (dst *GetClusterRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_GetClusterRequest.Merge(dst, src) +} +func (m *GetClusterRequest) XXX_Size() int { + return xxx_messageInfo_GetClusterRequest.Size(m) +} +func (m *GetClusterRequest) XXX_DiscardUnknown() { + xxx_messageInfo_GetClusterRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_GetClusterRequest proto.InternalMessageInfo + +func (m *GetClusterRequest) GetProjectId() string { + if m != nil { + return m.ProjectId + } + return "" +} + +func (m *GetClusterRequest) GetZone() string { + if m != nil { + return m.Zone + } + return "" +} + +func (m *GetClusterRequest) GetClusterId() string { + if m != nil { + return m.ClusterId + } + return "" +} + +func (m *GetClusterRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +// UpdateClusterRequest updates the settings of a cluster. +type UpdateClusterRequest struct { + // The Google Developers Console [project ID or project + // number](https://support.google.com/cloud/answer/6158840). + // This field is deprecated, use name instead. + ProjectId string `protobuf:"bytes,1,opt,name=project_id,json=projectId,proto3" json:"project_id,omitempty"` + // The name of the Google Compute Engine + // [zone](/compute/docs/zones#available) in which the cluster + // resides. + // This field is deprecated, use name instead. + Zone string `protobuf:"bytes,2,opt,name=zone,proto3" json:"zone,omitempty"` + // The name of the cluster to upgrade. + // This field is deprecated, use name instead. + ClusterId string `protobuf:"bytes,3,opt,name=cluster_id,json=clusterId,proto3" json:"cluster_id,omitempty"` + // A description of the update. + Update *ClusterUpdate `protobuf:"bytes,4,opt,name=update,proto3" json:"update,omitempty"` + // The name (project, location, cluster) of the cluster to update. + // Specified in the format 'projects/*/locations/*/clusters/*'. + Name string `protobuf:"bytes,5,opt,name=name,proto3" json:"name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *UpdateClusterRequest) Reset() { *m = UpdateClusterRequest{} } +func (m *UpdateClusterRequest) String() string { return proto.CompactTextString(m) } +func (*UpdateClusterRequest) ProtoMessage() {} +func (*UpdateClusterRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_cluster_service_e36c06b9432393ee, []int{18} +} +func (m *UpdateClusterRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_UpdateClusterRequest.Unmarshal(m, b) +} +func (m *UpdateClusterRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_UpdateClusterRequest.Marshal(b, m, deterministic) +} +func (dst *UpdateClusterRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_UpdateClusterRequest.Merge(dst, src) +} +func (m *UpdateClusterRequest) XXX_Size() int { + return xxx_messageInfo_UpdateClusterRequest.Size(m) +} +func (m *UpdateClusterRequest) XXX_DiscardUnknown() { + xxx_messageInfo_UpdateClusterRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_UpdateClusterRequest proto.InternalMessageInfo + +func (m *UpdateClusterRequest) GetProjectId() string { + if m != nil { + return m.ProjectId + } + return "" +} + +func (m *UpdateClusterRequest) GetZone() string { + if m != nil { + return m.Zone + } + return "" +} + +func (m *UpdateClusterRequest) GetClusterId() string { + if m != nil { + return m.ClusterId + } + return "" +} + +func (m *UpdateClusterRequest) GetUpdate() *ClusterUpdate { + if m != nil { + return m.Update + } + return nil +} + +func (m *UpdateClusterRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +// SetNodePoolVersionRequest updates the version of a node pool. +type UpdateNodePoolRequest struct { + // The Google Developers Console [project ID or project + // number](https://support.google.com/cloud/answer/6158840). + // This field is deprecated, use name instead. + ProjectId string `protobuf:"bytes,1,opt,name=project_id,json=projectId,proto3" json:"project_id,omitempty"` + // The name of the Google Compute Engine + // [zone](/compute/docs/zones#available) in which the cluster + // resides. + // This field is deprecated, use name instead. + Zone string `protobuf:"bytes,2,opt,name=zone,proto3" json:"zone,omitempty"` + // The name of the cluster to upgrade. + // This field is deprecated, use name instead. + ClusterId string `protobuf:"bytes,3,opt,name=cluster_id,json=clusterId,proto3" json:"cluster_id,omitempty"` + // The name of the node pool to upgrade. + // This field is deprecated, use name instead. + NodePoolId string `protobuf:"bytes,4,opt,name=node_pool_id,json=nodePoolId,proto3" json:"node_pool_id,omitempty"` + // The Kubernetes version to change the nodes to (typically an + // upgrade). Use `-` to upgrade to the latest version supported by + // the server. + NodeVersion string `protobuf:"bytes,5,opt,name=node_version,json=nodeVersion,proto3" json:"node_version,omitempty"` + // The desired image type for the node pool. + ImageType string `protobuf:"bytes,6,opt,name=image_type,json=imageType,proto3" json:"image_type,omitempty"` + // The name (project, location, cluster, node pool) of the node pool to update. + // Specified in the format 'projects/*/locations/*/clusters/*/nodePools/*'. + Name string `protobuf:"bytes,8,opt,name=name,proto3" json:"name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *UpdateNodePoolRequest) Reset() { *m = UpdateNodePoolRequest{} } +func (m *UpdateNodePoolRequest) String() string { return proto.CompactTextString(m) } +func (*UpdateNodePoolRequest) ProtoMessage() {} +func (*UpdateNodePoolRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_cluster_service_e36c06b9432393ee, []int{19} +} +func (m *UpdateNodePoolRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_UpdateNodePoolRequest.Unmarshal(m, b) +} +func (m *UpdateNodePoolRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_UpdateNodePoolRequest.Marshal(b, m, deterministic) +} +func (dst *UpdateNodePoolRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_UpdateNodePoolRequest.Merge(dst, src) +} +func (m *UpdateNodePoolRequest) XXX_Size() int { + return xxx_messageInfo_UpdateNodePoolRequest.Size(m) +} +func (m *UpdateNodePoolRequest) XXX_DiscardUnknown() { + xxx_messageInfo_UpdateNodePoolRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_UpdateNodePoolRequest proto.InternalMessageInfo + +func (m *UpdateNodePoolRequest) GetProjectId() string { + if m != nil { + return m.ProjectId + } + return "" +} + +func (m *UpdateNodePoolRequest) GetZone() string { + if m != nil { + return m.Zone + } + return "" +} + +func (m *UpdateNodePoolRequest) GetClusterId() string { + if m != nil { + return m.ClusterId + } + return "" +} + +func (m *UpdateNodePoolRequest) GetNodePoolId() string { + if m != nil { + return m.NodePoolId + } + return "" +} + +func (m *UpdateNodePoolRequest) GetNodeVersion() string { + if m != nil { + return m.NodeVersion + } + return "" +} + +func (m *UpdateNodePoolRequest) GetImageType() string { + if m != nil { + return m.ImageType + } + return "" +} + +func (m *UpdateNodePoolRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +// SetNodePoolAutoscalingRequest sets the autoscaler settings of a node pool. +type SetNodePoolAutoscalingRequest struct { + // The Google Developers Console [project ID or project + // number](https://support.google.com/cloud/answer/6158840). + // This field is deprecated, use name instead. + ProjectId string `protobuf:"bytes,1,opt,name=project_id,json=projectId,proto3" json:"project_id,omitempty"` + // The name of the Google Compute Engine + // [zone](/compute/docs/zones#available) in which the cluster + // resides. + // This field is deprecated, use name instead. + Zone string `protobuf:"bytes,2,opt,name=zone,proto3" json:"zone,omitempty"` + // The name of the cluster to upgrade. + // This field is deprecated, use name instead. + ClusterId string `protobuf:"bytes,3,opt,name=cluster_id,json=clusterId,proto3" json:"cluster_id,omitempty"` + // The name of the node pool to upgrade. + // This field is deprecated, use name instead. + NodePoolId string `protobuf:"bytes,4,opt,name=node_pool_id,json=nodePoolId,proto3" json:"node_pool_id,omitempty"` + // Autoscaling configuration for the node pool. + Autoscaling *NodePoolAutoscaling `protobuf:"bytes,5,opt,name=autoscaling,proto3" json:"autoscaling,omitempty"` + // The name (project, location, cluster, node pool) of the node pool to set + // autoscaler settings. Specified in the format + // 'projects/*/locations/*/clusters/*/nodePools/*'. + Name string `protobuf:"bytes,6,opt,name=name,proto3" json:"name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *SetNodePoolAutoscalingRequest) Reset() { *m = SetNodePoolAutoscalingRequest{} } +func (m *SetNodePoolAutoscalingRequest) String() string { return proto.CompactTextString(m) } +func (*SetNodePoolAutoscalingRequest) ProtoMessage() {} +func (*SetNodePoolAutoscalingRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_cluster_service_e36c06b9432393ee, []int{20} +} +func (m *SetNodePoolAutoscalingRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_SetNodePoolAutoscalingRequest.Unmarshal(m, b) +} +func (m *SetNodePoolAutoscalingRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_SetNodePoolAutoscalingRequest.Marshal(b, m, deterministic) +} +func (dst *SetNodePoolAutoscalingRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_SetNodePoolAutoscalingRequest.Merge(dst, src) +} +func (m *SetNodePoolAutoscalingRequest) XXX_Size() int { + return xxx_messageInfo_SetNodePoolAutoscalingRequest.Size(m) +} +func (m *SetNodePoolAutoscalingRequest) XXX_DiscardUnknown() { + xxx_messageInfo_SetNodePoolAutoscalingRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_SetNodePoolAutoscalingRequest proto.InternalMessageInfo + +func (m *SetNodePoolAutoscalingRequest) GetProjectId() string { + if m != nil { + return m.ProjectId + } + return "" +} + +func (m *SetNodePoolAutoscalingRequest) GetZone() string { + if m != nil { + return m.Zone + } + return "" +} + +func (m *SetNodePoolAutoscalingRequest) GetClusterId() string { + if m != nil { + return m.ClusterId + } + return "" +} + +func (m *SetNodePoolAutoscalingRequest) GetNodePoolId() string { + if m != nil { + return m.NodePoolId + } + return "" +} + +func (m *SetNodePoolAutoscalingRequest) GetAutoscaling() *NodePoolAutoscaling { + if m != nil { + return m.Autoscaling + } + return nil +} + +func (m *SetNodePoolAutoscalingRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +// SetLoggingServiceRequest sets the logging service of a cluster. +type SetLoggingServiceRequest struct { + // The Google Developers Console [project ID or project + // number](https://support.google.com/cloud/answer/6158840). + // This field is deprecated, use name instead. + ProjectId string `protobuf:"bytes,1,opt,name=project_id,json=projectId,proto3" json:"project_id,omitempty"` + // The name of the Google Compute Engine + // [zone](/compute/docs/zones#available) in which the cluster + // resides. + Zone string `protobuf:"bytes,2,opt,name=zone,proto3" json:"zone,omitempty"` + // The name of the cluster to upgrade. + // This field is deprecated, use name instead. + ClusterId string `protobuf:"bytes,3,opt,name=cluster_id,json=clusterId,proto3" json:"cluster_id,omitempty"` + // The logging service the cluster should use to write metrics. + // Currently available options: + // + // * "logging.googleapis.com" - the Google Cloud Logging service + // * "none" - no metrics will be exported from the cluster + LoggingService string `protobuf:"bytes,4,opt,name=logging_service,json=loggingService,proto3" json:"logging_service,omitempty"` + // The name (project, location, cluster) of the cluster to set logging. + // Specified in the format 'projects/*/locations/*/clusters/*'. + Name string `protobuf:"bytes,5,opt,name=name,proto3" json:"name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *SetLoggingServiceRequest) Reset() { *m = SetLoggingServiceRequest{} } +func (m *SetLoggingServiceRequest) String() string { return proto.CompactTextString(m) } +func (*SetLoggingServiceRequest) ProtoMessage() {} +func (*SetLoggingServiceRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_cluster_service_e36c06b9432393ee, []int{21} +} +func (m *SetLoggingServiceRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_SetLoggingServiceRequest.Unmarshal(m, b) +} +func (m *SetLoggingServiceRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_SetLoggingServiceRequest.Marshal(b, m, deterministic) +} +func (dst *SetLoggingServiceRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_SetLoggingServiceRequest.Merge(dst, src) +} +func (m *SetLoggingServiceRequest) XXX_Size() int { + return xxx_messageInfo_SetLoggingServiceRequest.Size(m) +} +func (m *SetLoggingServiceRequest) XXX_DiscardUnknown() { + xxx_messageInfo_SetLoggingServiceRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_SetLoggingServiceRequest proto.InternalMessageInfo + +func (m *SetLoggingServiceRequest) GetProjectId() string { + if m != nil { + return m.ProjectId + } + return "" +} + +func (m *SetLoggingServiceRequest) GetZone() string { + if m != nil { + return m.Zone + } + return "" +} + +func (m *SetLoggingServiceRequest) GetClusterId() string { + if m != nil { + return m.ClusterId + } + return "" +} + +func (m *SetLoggingServiceRequest) GetLoggingService() string { + if m != nil { + return m.LoggingService + } + return "" +} + +func (m *SetLoggingServiceRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +// SetMonitoringServiceRequest sets the monitoring service of a cluster. +type SetMonitoringServiceRequest struct { + // The Google Developers Console [project ID or project + // number](https://support.google.com/cloud/answer/6158840). + // This field is deprecated, use name instead. + ProjectId string `protobuf:"bytes,1,opt,name=project_id,json=projectId,proto3" json:"project_id,omitempty"` + // The name of the Google Compute Engine + // [zone](/compute/docs/zones#available) in which the cluster + // resides. + // This field is deprecated, use name instead. + Zone string `protobuf:"bytes,2,opt,name=zone,proto3" json:"zone,omitempty"` + // The name of the cluster to upgrade. + // This field is deprecated, use name instead. + ClusterId string `protobuf:"bytes,3,opt,name=cluster_id,json=clusterId,proto3" json:"cluster_id,omitempty"` + // The monitoring service the cluster should use to write metrics. + // Currently available options: + // + // * "monitoring.googleapis.com" - the Google Cloud Monitoring service + // * "none" - no metrics will be exported from the cluster + MonitoringService string `protobuf:"bytes,4,opt,name=monitoring_service,json=monitoringService,proto3" json:"monitoring_service,omitempty"` + // The name (project, location, cluster) of the cluster to set monitoring. + // Specified in the format 'projects/*/locations/*/clusters/*'. + Name string `protobuf:"bytes,6,opt,name=name,proto3" json:"name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *SetMonitoringServiceRequest) Reset() { *m = SetMonitoringServiceRequest{} } +func (m *SetMonitoringServiceRequest) String() string { return proto.CompactTextString(m) } +func (*SetMonitoringServiceRequest) ProtoMessage() {} +func (*SetMonitoringServiceRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_cluster_service_e36c06b9432393ee, []int{22} +} +func (m *SetMonitoringServiceRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_SetMonitoringServiceRequest.Unmarshal(m, b) +} +func (m *SetMonitoringServiceRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_SetMonitoringServiceRequest.Marshal(b, m, deterministic) +} +func (dst *SetMonitoringServiceRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_SetMonitoringServiceRequest.Merge(dst, src) +} +func (m *SetMonitoringServiceRequest) XXX_Size() int { + return xxx_messageInfo_SetMonitoringServiceRequest.Size(m) +} +func (m *SetMonitoringServiceRequest) XXX_DiscardUnknown() { + xxx_messageInfo_SetMonitoringServiceRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_SetMonitoringServiceRequest proto.InternalMessageInfo + +func (m *SetMonitoringServiceRequest) GetProjectId() string { + if m != nil { + return m.ProjectId + } + return "" +} + +func (m *SetMonitoringServiceRequest) GetZone() string { + if m != nil { + return m.Zone + } + return "" +} + +func (m *SetMonitoringServiceRequest) GetClusterId() string { + if m != nil { + return m.ClusterId + } + return "" +} + +func (m *SetMonitoringServiceRequest) GetMonitoringService() string { + if m != nil { + return m.MonitoringService + } + return "" +} + +func (m *SetMonitoringServiceRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +// SetAddonsRequest sets the addons associated with the cluster. +type SetAddonsConfigRequest struct { + // The Google Developers Console [project ID or project + // number](https://support.google.com/cloud/answer/6158840). + // This field is deprecated, use name instead. + ProjectId string `protobuf:"bytes,1,opt,name=project_id,json=projectId,proto3" json:"project_id,omitempty"` + // The name of the Google Compute Engine + // [zone](/compute/docs/zones#available) in which the cluster + // resides. + // This field is deprecated, use name instead. + Zone string `protobuf:"bytes,2,opt,name=zone,proto3" json:"zone,omitempty"` + // The name of the cluster to upgrade. + // This field is deprecated, use name instead. + ClusterId string `protobuf:"bytes,3,opt,name=cluster_id,json=clusterId,proto3" json:"cluster_id,omitempty"` + // The desired configurations for the various addons available to run in the + // cluster. + AddonsConfig *AddonsConfig `protobuf:"bytes,4,opt,name=addons_config,json=addonsConfig,proto3" json:"addons_config,omitempty"` + // The name (project, location, cluster) of the cluster to set addons. + // Specified in the format 'projects/*/locations/*/clusters/*'. + Name string `protobuf:"bytes,6,opt,name=name,proto3" json:"name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *SetAddonsConfigRequest) Reset() { *m = SetAddonsConfigRequest{} } +func (m *SetAddonsConfigRequest) String() string { return proto.CompactTextString(m) } +func (*SetAddonsConfigRequest) ProtoMessage() {} +func (*SetAddonsConfigRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_cluster_service_e36c06b9432393ee, []int{23} +} +func (m *SetAddonsConfigRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_SetAddonsConfigRequest.Unmarshal(m, b) +} +func (m *SetAddonsConfigRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_SetAddonsConfigRequest.Marshal(b, m, deterministic) +} +func (dst *SetAddonsConfigRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_SetAddonsConfigRequest.Merge(dst, src) +} +func (m *SetAddonsConfigRequest) XXX_Size() int { + return xxx_messageInfo_SetAddonsConfigRequest.Size(m) +} +func (m *SetAddonsConfigRequest) XXX_DiscardUnknown() { + xxx_messageInfo_SetAddonsConfigRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_SetAddonsConfigRequest proto.InternalMessageInfo + +func (m *SetAddonsConfigRequest) GetProjectId() string { + if m != nil { + return m.ProjectId + } + return "" +} + +func (m *SetAddonsConfigRequest) GetZone() string { + if m != nil { + return m.Zone + } + return "" +} + +func (m *SetAddonsConfigRequest) GetClusterId() string { + if m != nil { + return m.ClusterId + } + return "" +} + +func (m *SetAddonsConfigRequest) GetAddonsConfig() *AddonsConfig { + if m != nil { + return m.AddonsConfig + } + return nil +} + +func (m *SetAddonsConfigRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +// SetLocationsRequest sets the locations of the cluster. +type SetLocationsRequest struct { + // The Google Developers Console [project ID or project + // number](https://support.google.com/cloud/answer/6158840). + // This field is deprecated, use name instead. + ProjectId string `protobuf:"bytes,1,opt,name=project_id,json=projectId,proto3" json:"project_id,omitempty"` + // The name of the Google Compute Engine + // [zone](/compute/docs/zones#available) in which the cluster + // resides. + // This field is deprecated, use name instead. + Zone string `protobuf:"bytes,2,opt,name=zone,proto3" json:"zone,omitempty"` + // The name of the cluster to upgrade. + // This field is deprecated, use name instead. + ClusterId string `protobuf:"bytes,3,opt,name=cluster_id,json=clusterId,proto3" json:"cluster_id,omitempty"` + // The desired list of Google Compute Engine + // [locations](/compute/docs/zones#available) in which the cluster's nodes + // should be located. Changing the locations a cluster is in will result + // in nodes being either created or removed from the cluster, depending on + // whether locations are being added or removed. + // + // This list must always include the cluster's primary zone. + Locations []string `protobuf:"bytes,4,rep,name=locations,proto3" json:"locations,omitempty"` + // The name (project, location, cluster) of the cluster to set locations. + // Specified in the format 'projects/*/locations/*/clusters/*'. + Name string `protobuf:"bytes,6,opt,name=name,proto3" json:"name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *SetLocationsRequest) Reset() { *m = SetLocationsRequest{} } +func (m *SetLocationsRequest) String() string { return proto.CompactTextString(m) } +func (*SetLocationsRequest) ProtoMessage() {} +func (*SetLocationsRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_cluster_service_e36c06b9432393ee, []int{24} +} +func (m *SetLocationsRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_SetLocationsRequest.Unmarshal(m, b) +} +func (m *SetLocationsRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_SetLocationsRequest.Marshal(b, m, deterministic) +} +func (dst *SetLocationsRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_SetLocationsRequest.Merge(dst, src) +} +func (m *SetLocationsRequest) XXX_Size() int { + return xxx_messageInfo_SetLocationsRequest.Size(m) +} +func (m *SetLocationsRequest) XXX_DiscardUnknown() { + xxx_messageInfo_SetLocationsRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_SetLocationsRequest proto.InternalMessageInfo + +func (m *SetLocationsRequest) GetProjectId() string { + if m != nil { + return m.ProjectId + } + return "" +} + +func (m *SetLocationsRequest) GetZone() string { + if m != nil { + return m.Zone + } + return "" +} + +func (m *SetLocationsRequest) GetClusterId() string { + if m != nil { + return m.ClusterId + } + return "" +} + +func (m *SetLocationsRequest) GetLocations() []string { + if m != nil { + return m.Locations + } + return nil +} + +func (m *SetLocationsRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +// UpdateMasterRequest updates the master of the cluster. +type UpdateMasterRequest struct { + // The Google Developers Console [project ID or project + // number](https://support.google.com/cloud/answer/6158840). + ProjectId string `protobuf:"bytes,1,opt,name=project_id,json=projectId,proto3" json:"project_id,omitempty"` + // The name of the Google Compute Engine + // [zone](/compute/docs/zones#available) in which the cluster + // resides. + // This field is deprecated, use name instead. + Zone string `protobuf:"bytes,2,opt,name=zone,proto3" json:"zone,omitempty"` + // The name of the cluster to upgrade. + // This field is deprecated, use name instead. + ClusterId string `protobuf:"bytes,3,opt,name=cluster_id,json=clusterId,proto3" json:"cluster_id,omitempty"` + // The Kubernetes version to change the master to. The only valid value is the + // latest supported version. Use "-" to have the server automatically select + // the latest version. + MasterVersion string `protobuf:"bytes,4,opt,name=master_version,json=masterVersion,proto3" json:"master_version,omitempty"` + // The name (project, location, cluster) of the cluster to update. + // Specified in the format 'projects/*/locations/*/clusters/*'. + Name string `protobuf:"bytes,7,opt,name=name,proto3" json:"name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *UpdateMasterRequest) Reset() { *m = UpdateMasterRequest{} } +func (m *UpdateMasterRequest) String() string { return proto.CompactTextString(m) } +func (*UpdateMasterRequest) ProtoMessage() {} +func (*UpdateMasterRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_cluster_service_e36c06b9432393ee, []int{25} +} +func (m *UpdateMasterRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_UpdateMasterRequest.Unmarshal(m, b) +} +func (m *UpdateMasterRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_UpdateMasterRequest.Marshal(b, m, deterministic) +} +func (dst *UpdateMasterRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_UpdateMasterRequest.Merge(dst, src) +} +func (m *UpdateMasterRequest) XXX_Size() int { + return xxx_messageInfo_UpdateMasterRequest.Size(m) +} +func (m *UpdateMasterRequest) XXX_DiscardUnknown() { + xxx_messageInfo_UpdateMasterRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_UpdateMasterRequest proto.InternalMessageInfo + +func (m *UpdateMasterRequest) GetProjectId() string { + if m != nil { + return m.ProjectId + } + return "" +} + +func (m *UpdateMasterRequest) GetZone() string { + if m != nil { + return m.Zone + } + return "" +} + +func (m *UpdateMasterRequest) GetClusterId() string { + if m != nil { + return m.ClusterId + } + return "" +} + +func (m *UpdateMasterRequest) GetMasterVersion() string { + if m != nil { + return m.MasterVersion + } + return "" +} + +func (m *UpdateMasterRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +// SetMasterAuthRequest updates the admin password of a cluster. +type SetMasterAuthRequest struct { + // The Google Developers Console [project ID or project + // number](https://support.google.com/cloud/answer/6158840). + // This field is deprecated, use name instead. + ProjectId string `protobuf:"bytes,1,opt,name=project_id,json=projectId,proto3" json:"project_id,omitempty"` + // The name of the Google Compute Engine + // [zone](/compute/docs/zones#available) in which the cluster + // resides. + // This field is deprecated, use name instead. + Zone string `protobuf:"bytes,2,opt,name=zone,proto3" json:"zone,omitempty"` + // The name of the cluster to upgrade. + // This field is deprecated, use name instead. + ClusterId string `protobuf:"bytes,3,opt,name=cluster_id,json=clusterId,proto3" json:"cluster_id,omitempty"` + // The exact form of action to be taken on the master auth. + Action SetMasterAuthRequest_Action `protobuf:"varint,4,opt,name=action,proto3,enum=google.container.v1alpha1.SetMasterAuthRequest_Action" json:"action,omitempty"` + // A description of the update. + Update *MasterAuth `protobuf:"bytes,5,opt,name=update,proto3" json:"update,omitempty"` + // The name (project, location, cluster) of the cluster to set auth. + // Specified in the format 'projects/*/locations/*/clusters/*'. + Name string `protobuf:"bytes,7,opt,name=name,proto3" json:"name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *SetMasterAuthRequest) Reset() { *m = SetMasterAuthRequest{} } +func (m *SetMasterAuthRequest) String() string { return proto.CompactTextString(m) } +func (*SetMasterAuthRequest) ProtoMessage() {} +func (*SetMasterAuthRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_cluster_service_e36c06b9432393ee, []int{26} +} +func (m *SetMasterAuthRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_SetMasterAuthRequest.Unmarshal(m, b) +} +func (m *SetMasterAuthRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_SetMasterAuthRequest.Marshal(b, m, deterministic) +} +func (dst *SetMasterAuthRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_SetMasterAuthRequest.Merge(dst, src) +} +func (m *SetMasterAuthRequest) XXX_Size() int { + return xxx_messageInfo_SetMasterAuthRequest.Size(m) +} +func (m *SetMasterAuthRequest) XXX_DiscardUnknown() { + xxx_messageInfo_SetMasterAuthRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_SetMasterAuthRequest proto.InternalMessageInfo + +func (m *SetMasterAuthRequest) GetProjectId() string { + if m != nil { + return m.ProjectId + } + return "" +} + +func (m *SetMasterAuthRequest) GetZone() string { + if m != nil { + return m.Zone + } + return "" +} + +func (m *SetMasterAuthRequest) GetClusterId() string { + if m != nil { + return m.ClusterId + } + return "" +} + +func (m *SetMasterAuthRequest) GetAction() SetMasterAuthRequest_Action { + if m != nil { + return m.Action + } + return SetMasterAuthRequest_UNKNOWN +} + +func (m *SetMasterAuthRequest) GetUpdate() *MasterAuth { + if m != nil { + return m.Update + } + return nil +} + +func (m *SetMasterAuthRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +// DeleteClusterRequest deletes a cluster. +type DeleteClusterRequest struct { + // The Google Developers Console [project ID or project + // number](https://support.google.com/cloud/answer/6158840). + // This field is deprecated, use name instead. + ProjectId string `protobuf:"bytes,1,opt,name=project_id,json=projectId,proto3" json:"project_id,omitempty"` + // The name of the Google Compute Engine + // [zone](/compute/docs/zones#available) in which the cluster + // resides. + // This field is deprecated, use name instead. + Zone string `protobuf:"bytes,2,opt,name=zone,proto3" json:"zone,omitempty"` + // The name of the cluster to delete. + // This field is deprecated, use name instead. + ClusterId string `protobuf:"bytes,3,opt,name=cluster_id,json=clusterId,proto3" json:"cluster_id,omitempty"` + // The name (project, location, cluster) of the cluster to delete. + // Specified in the format 'projects/*/locations/*/clusters/*'. + Name string `protobuf:"bytes,4,opt,name=name,proto3" json:"name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *DeleteClusterRequest) Reset() { *m = DeleteClusterRequest{} } +func (m *DeleteClusterRequest) String() string { return proto.CompactTextString(m) } +func (*DeleteClusterRequest) ProtoMessage() {} +func (*DeleteClusterRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_cluster_service_e36c06b9432393ee, []int{27} +} +func (m *DeleteClusterRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_DeleteClusterRequest.Unmarshal(m, b) +} +func (m *DeleteClusterRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_DeleteClusterRequest.Marshal(b, m, deterministic) +} +func (dst *DeleteClusterRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_DeleteClusterRequest.Merge(dst, src) +} +func (m *DeleteClusterRequest) XXX_Size() int { + return xxx_messageInfo_DeleteClusterRequest.Size(m) +} +func (m *DeleteClusterRequest) XXX_DiscardUnknown() { + xxx_messageInfo_DeleteClusterRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_DeleteClusterRequest proto.InternalMessageInfo + +func (m *DeleteClusterRequest) GetProjectId() string { + if m != nil { + return m.ProjectId + } + return "" +} + +func (m *DeleteClusterRequest) GetZone() string { + if m != nil { + return m.Zone + } + return "" +} + +func (m *DeleteClusterRequest) GetClusterId() string { + if m != nil { + return m.ClusterId + } + return "" +} + +func (m *DeleteClusterRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +// ListClustersRequest lists clusters. +type ListClustersRequest struct { + // The Google Developers Console [project ID or project + // number](https://support.google.com/cloud/answer/6158840). + // This field is deprecated, use parent instead. + ProjectId string `protobuf:"bytes,1,opt,name=project_id,json=projectId,proto3" json:"project_id,omitempty"` + // The name of the Google Compute Engine + // [zone](/compute/docs/zones#available) in which the cluster + // resides, or "-" for all zones. + // This field is deprecated, use parent instead. + Zone string `protobuf:"bytes,2,opt,name=zone,proto3" json:"zone,omitempty"` + // The parent (project and location) where the clusters will be listed. + // Specified in the format 'projects/*/locations/*'. + // Location "-" matches all zones and all regions. + Parent string `protobuf:"bytes,4,opt,name=parent,proto3" json:"parent,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ListClustersRequest) Reset() { *m = ListClustersRequest{} } +func (m *ListClustersRequest) String() string { return proto.CompactTextString(m) } +func (*ListClustersRequest) ProtoMessage() {} +func (*ListClustersRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_cluster_service_e36c06b9432393ee, []int{28} +} +func (m *ListClustersRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ListClustersRequest.Unmarshal(m, b) +} +func (m *ListClustersRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ListClustersRequest.Marshal(b, m, deterministic) +} +func (dst *ListClustersRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_ListClustersRequest.Merge(dst, src) +} +func (m *ListClustersRequest) XXX_Size() int { + return xxx_messageInfo_ListClustersRequest.Size(m) +} +func (m *ListClustersRequest) XXX_DiscardUnknown() { + xxx_messageInfo_ListClustersRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_ListClustersRequest proto.InternalMessageInfo + +func (m *ListClustersRequest) GetProjectId() string { + if m != nil { + return m.ProjectId + } + return "" +} + +func (m *ListClustersRequest) GetZone() string { + if m != nil { + return m.Zone + } + return "" +} + +func (m *ListClustersRequest) GetParent() string { + if m != nil { + return m.Parent + } + return "" +} + +// ListClustersResponse is the result of ListClustersRequest. +type ListClustersResponse struct { + // A list of clusters in the project in the specified zone, or + // across all ones. + Clusters []*Cluster `protobuf:"bytes,1,rep,name=clusters,proto3" json:"clusters,omitempty"` + // If any zones are listed here, the list of clusters returned + // may be missing those zones. + MissingZones []string `protobuf:"bytes,2,rep,name=missing_zones,json=missingZones,proto3" json:"missing_zones,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ListClustersResponse) Reset() { *m = ListClustersResponse{} } +func (m *ListClustersResponse) String() string { return proto.CompactTextString(m) } +func (*ListClustersResponse) ProtoMessage() {} +func (*ListClustersResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_cluster_service_e36c06b9432393ee, []int{29} +} +func (m *ListClustersResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ListClustersResponse.Unmarshal(m, b) +} +func (m *ListClustersResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ListClustersResponse.Marshal(b, m, deterministic) +} +func (dst *ListClustersResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_ListClustersResponse.Merge(dst, src) +} +func (m *ListClustersResponse) XXX_Size() int { + return xxx_messageInfo_ListClustersResponse.Size(m) +} +func (m *ListClustersResponse) XXX_DiscardUnknown() { + xxx_messageInfo_ListClustersResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_ListClustersResponse proto.InternalMessageInfo + +func (m *ListClustersResponse) GetClusters() []*Cluster { + if m != nil { + return m.Clusters + } + return nil +} + +func (m *ListClustersResponse) GetMissingZones() []string { + if m != nil { + return m.MissingZones + } + return nil +} + +// GetOperationRequest gets a single operation. +type GetOperationRequest struct { + // The Google Developers Console [project ID or project + // number](https://support.google.com/cloud/answer/6158840). + // This field is deprecated, use name instead. + ProjectId string `protobuf:"bytes,1,opt,name=project_id,json=projectId,proto3" json:"project_id,omitempty"` + // The name of the Google Compute Engine + // [zone](/compute/docs/zones#available) in which the cluster + // resides. + // This field is deprecated, use name instead. + Zone string `protobuf:"bytes,2,opt,name=zone,proto3" json:"zone,omitempty"` + // The server-assigned `name` of the operation. + // This field is deprecated, use name instead. + OperationId string `protobuf:"bytes,3,opt,name=operation_id,json=operationId,proto3" json:"operation_id,omitempty"` + // The name (project, location, operation id) of the operation to get. + // Specified in the format 'projects/*/locations/*/operations/*'. + Name string `protobuf:"bytes,5,opt,name=name,proto3" json:"name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GetOperationRequest) Reset() { *m = GetOperationRequest{} } +func (m *GetOperationRequest) String() string { return proto.CompactTextString(m) } +func (*GetOperationRequest) ProtoMessage() {} +func (*GetOperationRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_cluster_service_e36c06b9432393ee, []int{30} +} +func (m *GetOperationRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GetOperationRequest.Unmarshal(m, b) +} +func (m *GetOperationRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GetOperationRequest.Marshal(b, m, deterministic) +} +func (dst *GetOperationRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_GetOperationRequest.Merge(dst, src) +} +func (m *GetOperationRequest) XXX_Size() int { + return xxx_messageInfo_GetOperationRequest.Size(m) +} +func (m *GetOperationRequest) XXX_DiscardUnknown() { + xxx_messageInfo_GetOperationRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_GetOperationRequest proto.InternalMessageInfo + +func (m *GetOperationRequest) GetProjectId() string { + if m != nil { + return m.ProjectId + } + return "" +} + +func (m *GetOperationRequest) GetZone() string { + if m != nil { + return m.Zone + } + return "" +} + +func (m *GetOperationRequest) GetOperationId() string { + if m != nil { + return m.OperationId + } + return "" +} + +func (m *GetOperationRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +// ListOperationsRequest lists operations. +type ListOperationsRequest struct { + // The Google Developers Console [project ID or project + // number](https://support.google.com/cloud/answer/6158840). + // This field is deprecated, use parent instead. + ProjectId string `protobuf:"bytes,1,opt,name=project_id,json=projectId,proto3" json:"project_id,omitempty"` + // The name of the Google Compute Engine [zone](/compute/docs/zones#available) + // to return operations for, or `-` for all zones. + // This field is deprecated, use parent instead. + Zone string `protobuf:"bytes,2,opt,name=zone,proto3" json:"zone,omitempty"` + // The parent (project and location) where the operations will be listed. + // Specified in the format 'projects/*/locations/*'. + // Location "-" matches all zones and all regions. + Parent string `protobuf:"bytes,4,opt,name=parent,proto3" json:"parent,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ListOperationsRequest) Reset() { *m = ListOperationsRequest{} } +func (m *ListOperationsRequest) String() string { return proto.CompactTextString(m) } +func (*ListOperationsRequest) ProtoMessage() {} +func (*ListOperationsRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_cluster_service_e36c06b9432393ee, []int{31} +} +func (m *ListOperationsRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ListOperationsRequest.Unmarshal(m, b) +} +func (m *ListOperationsRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ListOperationsRequest.Marshal(b, m, deterministic) +} +func (dst *ListOperationsRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_ListOperationsRequest.Merge(dst, src) +} +func (m *ListOperationsRequest) XXX_Size() int { + return xxx_messageInfo_ListOperationsRequest.Size(m) +} +func (m *ListOperationsRequest) XXX_DiscardUnknown() { + xxx_messageInfo_ListOperationsRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_ListOperationsRequest proto.InternalMessageInfo + +func (m *ListOperationsRequest) GetProjectId() string { + if m != nil { + return m.ProjectId + } + return "" +} + +func (m *ListOperationsRequest) GetZone() string { + if m != nil { + return m.Zone + } + return "" +} + +func (m *ListOperationsRequest) GetParent() string { + if m != nil { + return m.Parent + } + return "" +} + +// CancelOperationRequest cancels a single operation. +type CancelOperationRequest struct { + // The Google Developers Console [project ID or project + // number](https://support.google.com/cloud/answer/6158840). + // This field is deprecated, use name instead. + ProjectId string `protobuf:"bytes,1,opt,name=project_id,json=projectId,proto3" json:"project_id,omitempty"` + // The name of the Google Compute Engine + // [zone](/compute/docs/zones#available) in which the operation resides. + // This field is deprecated, use name instead. + Zone string `protobuf:"bytes,2,opt,name=zone,proto3" json:"zone,omitempty"` + // The server-assigned `name` of the operation. + // This field is deprecated, use name instead. + OperationId string `protobuf:"bytes,3,opt,name=operation_id,json=operationId,proto3" json:"operation_id,omitempty"` + // The name (project, location, operation id) of the operation to cancel. + // Specified in the format 'projects/*/locations/*/operations/*'. + Name string `protobuf:"bytes,4,opt,name=name,proto3" json:"name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *CancelOperationRequest) Reset() { *m = CancelOperationRequest{} } +func (m *CancelOperationRequest) String() string { return proto.CompactTextString(m) } +func (*CancelOperationRequest) ProtoMessage() {} +func (*CancelOperationRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_cluster_service_e36c06b9432393ee, []int{32} +} +func (m *CancelOperationRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_CancelOperationRequest.Unmarshal(m, b) +} +func (m *CancelOperationRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_CancelOperationRequest.Marshal(b, m, deterministic) +} +func (dst *CancelOperationRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_CancelOperationRequest.Merge(dst, src) +} +func (m *CancelOperationRequest) XXX_Size() int { + return xxx_messageInfo_CancelOperationRequest.Size(m) +} +func (m *CancelOperationRequest) XXX_DiscardUnknown() { + xxx_messageInfo_CancelOperationRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_CancelOperationRequest proto.InternalMessageInfo + +func (m *CancelOperationRequest) GetProjectId() string { + if m != nil { + return m.ProjectId + } + return "" +} + +func (m *CancelOperationRequest) GetZone() string { + if m != nil { + return m.Zone + } + return "" +} + +func (m *CancelOperationRequest) GetOperationId() string { + if m != nil { + return m.OperationId + } + return "" +} + +func (m *CancelOperationRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +// ListOperationsResponse is the result of ListOperationsRequest. +type ListOperationsResponse struct { + // A list of operations in the project in the specified zone. + Operations []*Operation `protobuf:"bytes,1,rep,name=operations,proto3" json:"operations,omitempty"` + // If any zones are listed here, the list of operations returned + // may be missing the operations from those zones. + MissingZones []string `protobuf:"bytes,2,rep,name=missing_zones,json=missingZones,proto3" json:"missing_zones,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ListOperationsResponse) Reset() { *m = ListOperationsResponse{} } +func (m *ListOperationsResponse) String() string { return proto.CompactTextString(m) } +func (*ListOperationsResponse) ProtoMessage() {} +func (*ListOperationsResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_cluster_service_e36c06b9432393ee, []int{33} +} +func (m *ListOperationsResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ListOperationsResponse.Unmarshal(m, b) +} +func (m *ListOperationsResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ListOperationsResponse.Marshal(b, m, deterministic) +} +func (dst *ListOperationsResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_ListOperationsResponse.Merge(dst, src) +} +func (m *ListOperationsResponse) XXX_Size() int { + return xxx_messageInfo_ListOperationsResponse.Size(m) +} +func (m *ListOperationsResponse) XXX_DiscardUnknown() { + xxx_messageInfo_ListOperationsResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_ListOperationsResponse proto.InternalMessageInfo + +func (m *ListOperationsResponse) GetOperations() []*Operation { + if m != nil { + return m.Operations + } + return nil +} + +func (m *ListOperationsResponse) GetMissingZones() []string { + if m != nil { + return m.MissingZones + } + return nil +} + +// Gets the current Container Engine service configuration. +type GetServerConfigRequest struct { + // The Google Developers Console [project ID or project + // number](https://support.google.com/cloud/answer/6158840). + // This field is deprecated, use name instead. + ProjectId string `protobuf:"bytes,1,opt,name=project_id,json=projectId,proto3" json:"project_id,omitempty"` + // The name of the Google Compute Engine [zone](/compute/docs/zones#available) + // to return operations for. + // This field is deprecated, use name instead. + Zone string `protobuf:"bytes,2,opt,name=zone,proto3" json:"zone,omitempty"` + // The name (project and location) of the server config to get + // Specified in the format 'projects/*/locations/*'. + Name string `protobuf:"bytes,4,opt,name=name,proto3" json:"name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GetServerConfigRequest) Reset() { *m = GetServerConfigRequest{} } +func (m *GetServerConfigRequest) String() string { return proto.CompactTextString(m) } +func (*GetServerConfigRequest) ProtoMessage() {} +func (*GetServerConfigRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_cluster_service_e36c06b9432393ee, []int{34} +} +func (m *GetServerConfigRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GetServerConfigRequest.Unmarshal(m, b) +} +func (m *GetServerConfigRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GetServerConfigRequest.Marshal(b, m, deterministic) +} +func (dst *GetServerConfigRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_GetServerConfigRequest.Merge(dst, src) +} +func (m *GetServerConfigRequest) XXX_Size() int { + return xxx_messageInfo_GetServerConfigRequest.Size(m) +} +func (m *GetServerConfigRequest) XXX_DiscardUnknown() { + xxx_messageInfo_GetServerConfigRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_GetServerConfigRequest proto.InternalMessageInfo + +func (m *GetServerConfigRequest) GetProjectId() string { + if m != nil { + return m.ProjectId + } + return "" +} + +func (m *GetServerConfigRequest) GetZone() string { + if m != nil { + return m.Zone + } + return "" +} + +func (m *GetServerConfigRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +// Container Engine service configuration. +type ServerConfig struct { + // Version of Kubernetes the service deploys by default. + DefaultClusterVersion string `protobuf:"bytes,1,opt,name=default_cluster_version,json=defaultClusterVersion,proto3" json:"default_cluster_version,omitempty"` + // List of valid node upgrade target versions. + ValidNodeVersions []string `protobuf:"bytes,3,rep,name=valid_node_versions,json=validNodeVersions,proto3" json:"valid_node_versions,omitempty"` + // Default image type. + DefaultImageType string `protobuf:"bytes,4,opt,name=default_image_type,json=defaultImageType,proto3" json:"default_image_type,omitempty"` + // List of valid image types. + ValidImageTypes []string `protobuf:"bytes,5,rep,name=valid_image_types,json=validImageTypes,proto3" json:"valid_image_types,omitempty"` + // List of valid master versions. + ValidMasterVersions []string `protobuf:"bytes,6,rep,name=valid_master_versions,json=validMasterVersions,proto3" json:"valid_master_versions,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ServerConfig) Reset() { *m = ServerConfig{} } +func (m *ServerConfig) String() string { return proto.CompactTextString(m) } +func (*ServerConfig) ProtoMessage() {} +func (*ServerConfig) Descriptor() ([]byte, []int) { + return fileDescriptor_cluster_service_e36c06b9432393ee, []int{35} +} +func (m *ServerConfig) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ServerConfig.Unmarshal(m, b) +} +func (m *ServerConfig) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ServerConfig.Marshal(b, m, deterministic) +} +func (dst *ServerConfig) XXX_Merge(src proto.Message) { + xxx_messageInfo_ServerConfig.Merge(dst, src) +} +func (m *ServerConfig) XXX_Size() int { + return xxx_messageInfo_ServerConfig.Size(m) +} +func (m *ServerConfig) XXX_DiscardUnknown() { + xxx_messageInfo_ServerConfig.DiscardUnknown(m) +} + +var xxx_messageInfo_ServerConfig proto.InternalMessageInfo + +func (m *ServerConfig) GetDefaultClusterVersion() string { + if m != nil { + return m.DefaultClusterVersion + } + return "" +} + +func (m *ServerConfig) GetValidNodeVersions() []string { + if m != nil { + return m.ValidNodeVersions + } + return nil +} + +func (m *ServerConfig) GetDefaultImageType() string { + if m != nil { + return m.DefaultImageType + } + return "" +} + +func (m *ServerConfig) GetValidImageTypes() []string { + if m != nil { + return m.ValidImageTypes + } + return nil +} + +func (m *ServerConfig) GetValidMasterVersions() []string { + if m != nil { + return m.ValidMasterVersions + } + return nil +} + +// CreateNodePoolRequest creates a node pool for a cluster. +type CreateNodePoolRequest struct { + // The Google Developers Console [project ID or project + // number](https://developers.google.com/console/help/new/#projectnumber). + // This field is deprecated, use parent instead. + ProjectId string `protobuf:"bytes,1,opt,name=project_id,json=projectId,proto3" json:"project_id,omitempty"` + // The name of the Google Compute Engine + // [zone](/compute/docs/zones#available) in which the cluster + // resides. + // This field is deprecated, use parent instead. + Zone string `protobuf:"bytes,2,opt,name=zone,proto3" json:"zone,omitempty"` + // The name of the cluster. + // This field is deprecated, use parent instead. + ClusterId string `protobuf:"bytes,3,opt,name=cluster_id,json=clusterId,proto3" json:"cluster_id,omitempty"` + // The node pool to create. + NodePool *NodePool `protobuf:"bytes,4,opt,name=node_pool,json=nodePool,proto3" json:"node_pool,omitempty"` + // The parent (project, location, cluster id) where the node pool will be created. + // Specified in the format 'projects/*/locations/*/clusters/*/nodePools/*'. + Parent string `protobuf:"bytes,6,opt,name=parent,proto3" json:"parent,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *CreateNodePoolRequest) Reset() { *m = CreateNodePoolRequest{} } +func (m *CreateNodePoolRequest) String() string { return proto.CompactTextString(m) } +func (*CreateNodePoolRequest) ProtoMessage() {} +func (*CreateNodePoolRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_cluster_service_e36c06b9432393ee, []int{36} +} +func (m *CreateNodePoolRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_CreateNodePoolRequest.Unmarshal(m, b) +} +func (m *CreateNodePoolRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_CreateNodePoolRequest.Marshal(b, m, deterministic) +} +func (dst *CreateNodePoolRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_CreateNodePoolRequest.Merge(dst, src) +} +func (m *CreateNodePoolRequest) XXX_Size() int { + return xxx_messageInfo_CreateNodePoolRequest.Size(m) +} +func (m *CreateNodePoolRequest) XXX_DiscardUnknown() { + xxx_messageInfo_CreateNodePoolRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_CreateNodePoolRequest proto.InternalMessageInfo + +func (m *CreateNodePoolRequest) GetProjectId() string { + if m != nil { + return m.ProjectId + } + return "" +} + +func (m *CreateNodePoolRequest) GetZone() string { + if m != nil { + return m.Zone + } + return "" +} + +func (m *CreateNodePoolRequest) GetClusterId() string { + if m != nil { + return m.ClusterId + } + return "" +} + +func (m *CreateNodePoolRequest) GetNodePool() *NodePool { + if m != nil { + return m.NodePool + } + return nil +} + +func (m *CreateNodePoolRequest) GetParent() string { + if m != nil { + return m.Parent + } + return "" +} + +// DeleteNodePoolRequest deletes a node pool for a cluster. +type DeleteNodePoolRequest struct { + // The Google Developers Console [project ID or project + // number](https://developers.google.com/console/help/new/#projectnumber). + // This field is deprecated, use name instead. + ProjectId string `protobuf:"bytes,1,opt,name=project_id,json=projectId,proto3" json:"project_id,omitempty"` + // The name of the Google Compute Engine + // [zone](/compute/docs/zones#available) in which the cluster + // resides. + // This field is deprecated, use name instead. + Zone string `protobuf:"bytes,2,opt,name=zone,proto3" json:"zone,omitempty"` + // The name of the cluster. + // This field is deprecated, use name instead. + ClusterId string `protobuf:"bytes,3,opt,name=cluster_id,json=clusterId,proto3" json:"cluster_id,omitempty"` + // The name of the node pool to delete. + // This field is deprecated, use name instead. + NodePoolId string `protobuf:"bytes,4,opt,name=node_pool_id,json=nodePoolId,proto3" json:"node_pool_id,omitempty"` + // The name (project, location, cluster, node pool id) of the node pool to delete. + // Specified in the format 'projects/*/locations/*/clusters/*/nodePools/*'. + Name string `protobuf:"bytes,6,opt,name=name,proto3" json:"name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *DeleteNodePoolRequest) Reset() { *m = DeleteNodePoolRequest{} } +func (m *DeleteNodePoolRequest) String() string { return proto.CompactTextString(m) } +func (*DeleteNodePoolRequest) ProtoMessage() {} +func (*DeleteNodePoolRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_cluster_service_e36c06b9432393ee, []int{37} +} +func (m *DeleteNodePoolRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_DeleteNodePoolRequest.Unmarshal(m, b) +} +func (m *DeleteNodePoolRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_DeleteNodePoolRequest.Marshal(b, m, deterministic) +} +func (dst *DeleteNodePoolRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_DeleteNodePoolRequest.Merge(dst, src) +} +func (m *DeleteNodePoolRequest) XXX_Size() int { + return xxx_messageInfo_DeleteNodePoolRequest.Size(m) +} +func (m *DeleteNodePoolRequest) XXX_DiscardUnknown() { + xxx_messageInfo_DeleteNodePoolRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_DeleteNodePoolRequest proto.InternalMessageInfo + +func (m *DeleteNodePoolRequest) GetProjectId() string { + if m != nil { + return m.ProjectId + } + return "" +} + +func (m *DeleteNodePoolRequest) GetZone() string { + if m != nil { + return m.Zone + } + return "" +} + +func (m *DeleteNodePoolRequest) GetClusterId() string { + if m != nil { + return m.ClusterId + } + return "" +} + +func (m *DeleteNodePoolRequest) GetNodePoolId() string { + if m != nil { + return m.NodePoolId + } + return "" +} + +func (m *DeleteNodePoolRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +// ListNodePoolsRequest lists the node pool(s) for a cluster. +type ListNodePoolsRequest struct { + // The Google Developers Console [project ID or project + // number](https://developers.google.com/console/help/new/#projectnumber). + // This field is deprecated, use parent instead. + ProjectId string `protobuf:"bytes,1,opt,name=project_id,json=projectId,proto3" json:"project_id,omitempty"` + // The name of the Google Compute Engine + // [zone](/compute/docs/zones#available) in which the cluster + // resides. + // This field is deprecated, use parent instead. + Zone string `protobuf:"bytes,2,opt,name=zone,proto3" json:"zone,omitempty"` + // The name of the cluster. + // This field is deprecated, use parent instead. + ClusterId string `protobuf:"bytes,3,opt,name=cluster_id,json=clusterId,proto3" json:"cluster_id,omitempty"` + // The parent (project, location, cluster id) where the node pools will be listed. + // Specified in the format 'projects/*/locations/*/clusters/*'. + Parent string `protobuf:"bytes,5,opt,name=parent,proto3" json:"parent,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ListNodePoolsRequest) Reset() { *m = ListNodePoolsRequest{} } +func (m *ListNodePoolsRequest) String() string { return proto.CompactTextString(m) } +func (*ListNodePoolsRequest) ProtoMessage() {} +func (*ListNodePoolsRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_cluster_service_e36c06b9432393ee, []int{38} +} +func (m *ListNodePoolsRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ListNodePoolsRequest.Unmarshal(m, b) +} +func (m *ListNodePoolsRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ListNodePoolsRequest.Marshal(b, m, deterministic) +} +func (dst *ListNodePoolsRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_ListNodePoolsRequest.Merge(dst, src) +} +func (m *ListNodePoolsRequest) XXX_Size() int { + return xxx_messageInfo_ListNodePoolsRequest.Size(m) +} +func (m *ListNodePoolsRequest) XXX_DiscardUnknown() { + xxx_messageInfo_ListNodePoolsRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_ListNodePoolsRequest proto.InternalMessageInfo + +func (m *ListNodePoolsRequest) GetProjectId() string { + if m != nil { + return m.ProjectId + } + return "" +} + +func (m *ListNodePoolsRequest) GetZone() string { + if m != nil { + return m.Zone + } + return "" +} + +func (m *ListNodePoolsRequest) GetClusterId() string { + if m != nil { + return m.ClusterId + } + return "" +} + +func (m *ListNodePoolsRequest) GetParent() string { + if m != nil { + return m.Parent + } + return "" +} + +// GetNodePoolRequest retrieves a node pool for a cluster. +type GetNodePoolRequest struct { + // The Google Developers Console [project ID or project + // number](https://developers.google.com/console/help/new/#projectnumber). + // This field is deprecated, use name instead. + ProjectId string `protobuf:"bytes,1,opt,name=project_id,json=projectId,proto3" json:"project_id,omitempty"` + // The name of the Google Compute Engine + // [zone](/compute/docs/zones#available) in which the cluster + // resides. + // This field is deprecated, use name instead. + Zone string `protobuf:"bytes,2,opt,name=zone,proto3" json:"zone,omitempty"` + // The name of the cluster. + // This field is deprecated, use name instead. + ClusterId string `protobuf:"bytes,3,opt,name=cluster_id,json=clusterId,proto3" json:"cluster_id,omitempty"` + // The name of the node pool. + // This field is deprecated, use name instead. + NodePoolId string `protobuf:"bytes,4,opt,name=node_pool_id,json=nodePoolId,proto3" json:"node_pool_id,omitempty"` + // The name (project, location, cluster, node pool id) of the node pool to get. + // Specified in the format 'projects/*/locations/*/clusters/*/nodePools/*'. + Name string `protobuf:"bytes,6,opt,name=name,proto3" json:"name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GetNodePoolRequest) Reset() { *m = GetNodePoolRequest{} } +func (m *GetNodePoolRequest) String() string { return proto.CompactTextString(m) } +func (*GetNodePoolRequest) ProtoMessage() {} +func (*GetNodePoolRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_cluster_service_e36c06b9432393ee, []int{39} +} +func (m *GetNodePoolRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GetNodePoolRequest.Unmarshal(m, b) +} +func (m *GetNodePoolRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GetNodePoolRequest.Marshal(b, m, deterministic) +} +func (dst *GetNodePoolRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_GetNodePoolRequest.Merge(dst, src) +} +func (m *GetNodePoolRequest) XXX_Size() int { + return xxx_messageInfo_GetNodePoolRequest.Size(m) +} +func (m *GetNodePoolRequest) XXX_DiscardUnknown() { + xxx_messageInfo_GetNodePoolRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_GetNodePoolRequest proto.InternalMessageInfo + +func (m *GetNodePoolRequest) GetProjectId() string { + if m != nil { + return m.ProjectId + } + return "" +} + +func (m *GetNodePoolRequest) GetZone() string { + if m != nil { + return m.Zone + } + return "" +} + +func (m *GetNodePoolRequest) GetClusterId() string { + if m != nil { + return m.ClusterId + } + return "" +} + +func (m *GetNodePoolRequest) GetNodePoolId() string { + if m != nil { + return m.NodePoolId + } + return "" +} + +func (m *GetNodePoolRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +// NodePool contains the name and configuration for a cluster's node pool. +// Node pools are a set of nodes (i.e. VM's), with a common configuration and +// specification, under the control of the cluster master. They may have a set +// of Kubernetes labels applied to them, which may be used to reference them +// during pod scheduling. They may also be resized up or down, to accommodate +// the workload. +type NodePool struct { + // The name of the node pool. + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // The node configuration of the pool. + Config *NodeConfig `protobuf:"bytes,2,opt,name=config,proto3" json:"config,omitempty"` + // The initial node count for the pool. You must ensure that your + // Compute Engine resource quota + // is sufficient for this number of instances. You must also have available + // firewall and routes quota. + InitialNodeCount int32 `protobuf:"varint,3,opt,name=initial_node_count,json=initialNodeCount,proto3" json:"initial_node_count,omitempty"` + // Autoscaler configuration for this NodePool. Autoscaler is enabled + // only if a valid configuration is present. + Autoscaling *NodePoolAutoscaling `protobuf:"bytes,4,opt,name=autoscaling,proto3" json:"autoscaling,omitempty"` + // NodeManagement configuration for this NodePool. + Management *NodeManagement `protobuf:"bytes,5,opt,name=management,proto3" json:"management,omitempty"` + // [Output only] Server-defined URL for the resource. + SelfLink string `protobuf:"bytes,100,opt,name=self_link,json=selfLink,proto3" json:"self_link,omitempty"` + // [Output only] The version of the Kubernetes of this node. + Version string `protobuf:"bytes,101,opt,name=version,proto3" json:"version,omitempty"` + // [Output only] The resource URLs of [instance + // groups](/compute/docs/instance-groups/) associated with this + // node pool. + InstanceGroupUrls []string `protobuf:"bytes,102,rep,name=instance_group_urls,json=instanceGroupUrls,proto3" json:"instance_group_urls,omitempty"` + // [Output only] The status of the nodes in this pool instance. + Status NodePool_Status `protobuf:"varint,103,opt,name=status,proto3,enum=google.container.v1alpha1.NodePool_Status" json:"status,omitempty"` + // [Output only] Additional information about the current status of this + // node pool instance, if available. + StatusMessage string `protobuf:"bytes,104,opt,name=status_message,json=statusMessage,proto3" json:"status_message,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *NodePool) Reset() { *m = NodePool{} } +func (m *NodePool) String() string { return proto.CompactTextString(m) } +func (*NodePool) ProtoMessage() {} +func (*NodePool) Descriptor() ([]byte, []int) { + return fileDescriptor_cluster_service_e36c06b9432393ee, []int{40} +} +func (m *NodePool) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_NodePool.Unmarshal(m, b) +} +func (m *NodePool) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_NodePool.Marshal(b, m, deterministic) +} +func (dst *NodePool) XXX_Merge(src proto.Message) { + xxx_messageInfo_NodePool.Merge(dst, src) +} +func (m *NodePool) XXX_Size() int { + return xxx_messageInfo_NodePool.Size(m) +} +func (m *NodePool) XXX_DiscardUnknown() { + xxx_messageInfo_NodePool.DiscardUnknown(m) +} + +var xxx_messageInfo_NodePool proto.InternalMessageInfo + +func (m *NodePool) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *NodePool) GetConfig() *NodeConfig { + if m != nil { + return m.Config + } + return nil +} + +func (m *NodePool) GetInitialNodeCount() int32 { + if m != nil { + return m.InitialNodeCount + } + return 0 +} + +func (m *NodePool) GetAutoscaling() *NodePoolAutoscaling { + if m != nil { + return m.Autoscaling + } + return nil +} + +func (m *NodePool) GetManagement() *NodeManagement { + if m != nil { + return m.Management + } + return nil +} + +func (m *NodePool) GetSelfLink() string { + if m != nil { + return m.SelfLink + } + return "" +} + +func (m *NodePool) GetVersion() string { + if m != nil { + return m.Version + } + return "" +} + +func (m *NodePool) GetInstanceGroupUrls() []string { + if m != nil { + return m.InstanceGroupUrls + } + return nil +} + +func (m *NodePool) GetStatus() NodePool_Status { + if m != nil { + return m.Status + } + return NodePool_STATUS_UNSPECIFIED +} + +func (m *NodePool) GetStatusMessage() string { + if m != nil { + return m.StatusMessage + } + return "" +} + +// NodeManagement defines the set of node management services turned on for the +// node pool. +type NodeManagement struct { + // Whether the nodes will be automatically upgraded. + AutoUpgrade bool `protobuf:"varint,1,opt,name=auto_upgrade,json=autoUpgrade,proto3" json:"auto_upgrade,omitempty"` + // Whether the nodes will be automatically repaired. + AutoRepair bool `protobuf:"varint,2,opt,name=auto_repair,json=autoRepair,proto3" json:"auto_repair,omitempty"` + // Specifies the Auto Upgrade knobs for the node pool. + UpgradeOptions *AutoUpgradeOptions `protobuf:"bytes,10,opt,name=upgrade_options,json=upgradeOptions,proto3" json:"upgrade_options,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *NodeManagement) Reset() { *m = NodeManagement{} } +func (m *NodeManagement) String() string { return proto.CompactTextString(m) } +func (*NodeManagement) ProtoMessage() {} +func (*NodeManagement) Descriptor() ([]byte, []int) { + return fileDescriptor_cluster_service_e36c06b9432393ee, []int{41} +} +func (m *NodeManagement) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_NodeManagement.Unmarshal(m, b) +} +func (m *NodeManagement) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_NodeManagement.Marshal(b, m, deterministic) +} +func (dst *NodeManagement) XXX_Merge(src proto.Message) { + xxx_messageInfo_NodeManagement.Merge(dst, src) +} +func (m *NodeManagement) XXX_Size() int { + return xxx_messageInfo_NodeManagement.Size(m) +} +func (m *NodeManagement) XXX_DiscardUnknown() { + xxx_messageInfo_NodeManagement.DiscardUnknown(m) +} + +var xxx_messageInfo_NodeManagement proto.InternalMessageInfo + +func (m *NodeManagement) GetAutoUpgrade() bool { + if m != nil { + return m.AutoUpgrade + } + return false +} + +func (m *NodeManagement) GetAutoRepair() bool { + if m != nil { + return m.AutoRepair + } + return false +} + +func (m *NodeManagement) GetUpgradeOptions() *AutoUpgradeOptions { + if m != nil { + return m.UpgradeOptions + } + return nil +} + +// AutoUpgradeOptions defines the set of options for the user to control how +// the Auto Upgrades will proceed. +type AutoUpgradeOptions struct { + // [Output only] This field is set when upgrades are about to commence + // with the approximate start time for the upgrades, in + // [RFC3339](https://www.ietf.org/rfc/rfc3339.txt) text format. + AutoUpgradeStartTime string `protobuf:"bytes,1,opt,name=auto_upgrade_start_time,json=autoUpgradeStartTime,proto3" json:"auto_upgrade_start_time,omitempty"` + // [Output only] This field is set when upgrades are about to commence + // with the description of the upgrade. + Description string `protobuf:"bytes,2,opt,name=description,proto3" json:"description,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *AutoUpgradeOptions) Reset() { *m = AutoUpgradeOptions{} } +func (m *AutoUpgradeOptions) String() string { return proto.CompactTextString(m) } +func (*AutoUpgradeOptions) ProtoMessage() {} +func (*AutoUpgradeOptions) Descriptor() ([]byte, []int) { + return fileDescriptor_cluster_service_e36c06b9432393ee, []int{42} +} +func (m *AutoUpgradeOptions) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_AutoUpgradeOptions.Unmarshal(m, b) +} +func (m *AutoUpgradeOptions) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_AutoUpgradeOptions.Marshal(b, m, deterministic) +} +func (dst *AutoUpgradeOptions) XXX_Merge(src proto.Message) { + xxx_messageInfo_AutoUpgradeOptions.Merge(dst, src) +} +func (m *AutoUpgradeOptions) XXX_Size() int { + return xxx_messageInfo_AutoUpgradeOptions.Size(m) +} +func (m *AutoUpgradeOptions) XXX_DiscardUnknown() { + xxx_messageInfo_AutoUpgradeOptions.DiscardUnknown(m) +} + +var xxx_messageInfo_AutoUpgradeOptions proto.InternalMessageInfo + +func (m *AutoUpgradeOptions) GetAutoUpgradeStartTime() string { + if m != nil { + return m.AutoUpgradeStartTime + } + return "" +} + +func (m *AutoUpgradeOptions) GetDescription() string { + if m != nil { + return m.Description + } + return "" +} + +// MaintenancePolicy defines the maintenance policy to be used for the cluster. +type MaintenancePolicy struct { + // Specifies the maintenance window in which maintenance may be performed. + Window *MaintenanceWindow `protobuf:"bytes,1,opt,name=window,proto3" json:"window,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *MaintenancePolicy) Reset() { *m = MaintenancePolicy{} } +func (m *MaintenancePolicy) String() string { return proto.CompactTextString(m) } +func (*MaintenancePolicy) ProtoMessage() {} +func (*MaintenancePolicy) Descriptor() ([]byte, []int) { + return fileDescriptor_cluster_service_e36c06b9432393ee, []int{43} +} +func (m *MaintenancePolicy) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_MaintenancePolicy.Unmarshal(m, b) +} +func (m *MaintenancePolicy) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_MaintenancePolicy.Marshal(b, m, deterministic) +} +func (dst *MaintenancePolicy) XXX_Merge(src proto.Message) { + xxx_messageInfo_MaintenancePolicy.Merge(dst, src) +} +func (m *MaintenancePolicy) XXX_Size() int { + return xxx_messageInfo_MaintenancePolicy.Size(m) +} +func (m *MaintenancePolicy) XXX_DiscardUnknown() { + xxx_messageInfo_MaintenancePolicy.DiscardUnknown(m) +} + +var xxx_messageInfo_MaintenancePolicy proto.InternalMessageInfo + +func (m *MaintenancePolicy) GetWindow() *MaintenanceWindow { + if m != nil { + return m.Window + } + return nil +} + +// MaintenanceWindow defines the maintenance window to be used for the cluster. +type MaintenanceWindow struct { + // Unimplemented, reserved for future use. + // HourlyMaintenanceWindow hourly_maintenance_window = 1; + // + // Types that are valid to be assigned to Policy: + // *MaintenanceWindow_DailyMaintenanceWindow + Policy isMaintenanceWindow_Policy `protobuf_oneof:"policy"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *MaintenanceWindow) Reset() { *m = MaintenanceWindow{} } +func (m *MaintenanceWindow) String() string { return proto.CompactTextString(m) } +func (*MaintenanceWindow) ProtoMessage() {} +func (*MaintenanceWindow) Descriptor() ([]byte, []int) { + return fileDescriptor_cluster_service_e36c06b9432393ee, []int{44} +} +func (m *MaintenanceWindow) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_MaintenanceWindow.Unmarshal(m, b) +} +func (m *MaintenanceWindow) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_MaintenanceWindow.Marshal(b, m, deterministic) +} +func (dst *MaintenanceWindow) XXX_Merge(src proto.Message) { + xxx_messageInfo_MaintenanceWindow.Merge(dst, src) +} +func (m *MaintenanceWindow) XXX_Size() int { + return xxx_messageInfo_MaintenanceWindow.Size(m) +} +func (m *MaintenanceWindow) XXX_DiscardUnknown() { + xxx_messageInfo_MaintenanceWindow.DiscardUnknown(m) +} + +var xxx_messageInfo_MaintenanceWindow proto.InternalMessageInfo + +type isMaintenanceWindow_Policy interface { + isMaintenanceWindow_Policy() +} + +type MaintenanceWindow_DailyMaintenanceWindow struct { + DailyMaintenanceWindow *DailyMaintenanceWindow `protobuf:"bytes,2,opt,name=daily_maintenance_window,json=dailyMaintenanceWindow,proto3,oneof"` +} + +func (*MaintenanceWindow_DailyMaintenanceWindow) isMaintenanceWindow_Policy() {} + +func (m *MaintenanceWindow) GetPolicy() isMaintenanceWindow_Policy { + if m != nil { + return m.Policy + } + return nil +} + +func (m *MaintenanceWindow) GetDailyMaintenanceWindow() *DailyMaintenanceWindow { + if x, ok := m.GetPolicy().(*MaintenanceWindow_DailyMaintenanceWindow); ok { + return x.DailyMaintenanceWindow + } + return nil +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*MaintenanceWindow) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _MaintenanceWindow_OneofMarshaler, _MaintenanceWindow_OneofUnmarshaler, _MaintenanceWindow_OneofSizer, []interface{}{ + (*MaintenanceWindow_DailyMaintenanceWindow)(nil), + } +} + +func _MaintenanceWindow_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*MaintenanceWindow) + // policy + switch x := m.Policy.(type) { + case *MaintenanceWindow_DailyMaintenanceWindow: + b.EncodeVarint(2<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.DailyMaintenanceWindow); err != nil { + return err + } + case nil: + default: + return fmt.Errorf("MaintenanceWindow.Policy has unexpected type %T", x) + } + return nil +} + +func _MaintenanceWindow_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*MaintenanceWindow) + switch tag { + case 2: // policy.daily_maintenance_window + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(DailyMaintenanceWindow) + err := b.DecodeMessage(msg) + m.Policy = &MaintenanceWindow_DailyMaintenanceWindow{msg} + return true, err + default: + return false, nil + } +} + +func _MaintenanceWindow_OneofSizer(msg proto.Message) (n int) { + m := msg.(*MaintenanceWindow) + // policy + switch x := m.Policy.(type) { + case *MaintenanceWindow_DailyMaintenanceWindow: + s := proto.Size(x.DailyMaintenanceWindow) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +// Time window specified for daily maintenance operations. +type DailyMaintenanceWindow struct { + // Time within the maintenance window to start the maintenance operations. + // It must be in format "HH:MM”, where HH : [00-23] and MM : [00-59] GMT. + StartTime string `protobuf:"bytes,2,opt,name=start_time,json=startTime,proto3" json:"start_time,omitempty"` + // [Output only] Duration of the time window, automatically chosen to be + // smallest possible in the given scenario. + Duration string `protobuf:"bytes,3,opt,name=duration,proto3" json:"duration,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *DailyMaintenanceWindow) Reset() { *m = DailyMaintenanceWindow{} } +func (m *DailyMaintenanceWindow) String() string { return proto.CompactTextString(m) } +func (*DailyMaintenanceWindow) ProtoMessage() {} +func (*DailyMaintenanceWindow) Descriptor() ([]byte, []int) { + return fileDescriptor_cluster_service_e36c06b9432393ee, []int{45} +} +func (m *DailyMaintenanceWindow) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_DailyMaintenanceWindow.Unmarshal(m, b) +} +func (m *DailyMaintenanceWindow) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_DailyMaintenanceWindow.Marshal(b, m, deterministic) +} +func (dst *DailyMaintenanceWindow) XXX_Merge(src proto.Message) { + xxx_messageInfo_DailyMaintenanceWindow.Merge(dst, src) +} +func (m *DailyMaintenanceWindow) XXX_Size() int { + return xxx_messageInfo_DailyMaintenanceWindow.Size(m) +} +func (m *DailyMaintenanceWindow) XXX_DiscardUnknown() { + xxx_messageInfo_DailyMaintenanceWindow.DiscardUnknown(m) +} + +var xxx_messageInfo_DailyMaintenanceWindow proto.InternalMessageInfo + +func (m *DailyMaintenanceWindow) GetStartTime() string { + if m != nil { + return m.StartTime + } + return "" +} + +func (m *DailyMaintenanceWindow) GetDuration() string { + if m != nil { + return m.Duration + } + return "" +} + +// SetNodePoolManagementRequest sets the node management properties of a node +// pool. +type SetNodePoolManagementRequest struct { + // The Google Developers Console [project ID or project + // number](https://support.google.com/cloud/answer/6158840). + // This field is deprecated, use name instead. + ProjectId string `protobuf:"bytes,1,opt,name=project_id,json=projectId,proto3" json:"project_id,omitempty"` + // The name of the Google Compute Engine + // [zone](/compute/docs/zones#available) in which the cluster + // resides. + // This field is deprecated, use name instead. + Zone string `protobuf:"bytes,2,opt,name=zone,proto3" json:"zone,omitempty"` + // The name of the cluster to update. + // This field is deprecated, use name instead. + ClusterId string `protobuf:"bytes,3,opt,name=cluster_id,json=clusterId,proto3" json:"cluster_id,omitempty"` + // The name of the node pool to update. + // This field is deprecated, use name instead. + NodePoolId string `protobuf:"bytes,4,opt,name=node_pool_id,json=nodePoolId,proto3" json:"node_pool_id,omitempty"` + // NodeManagement configuration for the node pool. + Management *NodeManagement `protobuf:"bytes,5,opt,name=management,proto3" json:"management,omitempty"` + // The name (project, location, cluster, node pool id) of the node pool to set + // management properties. Specified in the format + // 'projects/*/locations/*/clusters/*/nodePools/*'. + Name string `protobuf:"bytes,7,opt,name=name,proto3" json:"name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *SetNodePoolManagementRequest) Reset() { *m = SetNodePoolManagementRequest{} } +func (m *SetNodePoolManagementRequest) String() string { return proto.CompactTextString(m) } +func (*SetNodePoolManagementRequest) ProtoMessage() {} +func (*SetNodePoolManagementRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_cluster_service_e36c06b9432393ee, []int{46} +} +func (m *SetNodePoolManagementRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_SetNodePoolManagementRequest.Unmarshal(m, b) +} +func (m *SetNodePoolManagementRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_SetNodePoolManagementRequest.Marshal(b, m, deterministic) +} +func (dst *SetNodePoolManagementRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_SetNodePoolManagementRequest.Merge(dst, src) +} +func (m *SetNodePoolManagementRequest) XXX_Size() int { + return xxx_messageInfo_SetNodePoolManagementRequest.Size(m) +} +func (m *SetNodePoolManagementRequest) XXX_DiscardUnknown() { + xxx_messageInfo_SetNodePoolManagementRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_SetNodePoolManagementRequest proto.InternalMessageInfo + +func (m *SetNodePoolManagementRequest) GetProjectId() string { + if m != nil { + return m.ProjectId + } + return "" +} + +func (m *SetNodePoolManagementRequest) GetZone() string { + if m != nil { + return m.Zone + } + return "" +} + +func (m *SetNodePoolManagementRequest) GetClusterId() string { + if m != nil { + return m.ClusterId + } + return "" +} + +func (m *SetNodePoolManagementRequest) GetNodePoolId() string { + if m != nil { + return m.NodePoolId + } + return "" +} + +func (m *SetNodePoolManagementRequest) GetManagement() *NodeManagement { + if m != nil { + return m.Management + } + return nil +} + +func (m *SetNodePoolManagementRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +// SetNodePoolSizeRequest sets the size a node +// pool. +type SetNodePoolSizeRequest struct { + // The Google Developers Console [project ID or project + // number](https://support.google.com/cloud/answer/6158840). + ProjectId string `protobuf:"bytes,1,opt,name=project_id,json=projectId,proto3" json:"project_id,omitempty"` + // The name of the Google Compute Engine + // [zone](/compute/docs/zones#available) in which the cluster + // resides. + // This field is deprecated, use name instead. + Zone string `protobuf:"bytes,2,opt,name=zone,proto3" json:"zone,omitempty"` + // The name of the cluster to update. + // This field is deprecated, use name instead. + ClusterId string `protobuf:"bytes,3,opt,name=cluster_id,json=clusterId,proto3" json:"cluster_id,omitempty"` + // The name of the node pool to update. + // This field is deprecated, use name instead. + NodePoolId string `protobuf:"bytes,4,opt,name=node_pool_id,json=nodePoolId,proto3" json:"node_pool_id,omitempty"` + // The desired node count for the pool. + NodeCount int32 `protobuf:"varint,5,opt,name=node_count,json=nodeCount,proto3" json:"node_count,omitempty"` + // The name (project, location, cluster, node pool id) of the node pool to set + // size. + // Specified in the format 'projects/*/locations/*/clusters/*/nodePools/*'. + Name string `protobuf:"bytes,7,opt,name=name,proto3" json:"name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *SetNodePoolSizeRequest) Reset() { *m = SetNodePoolSizeRequest{} } +func (m *SetNodePoolSizeRequest) String() string { return proto.CompactTextString(m) } +func (*SetNodePoolSizeRequest) ProtoMessage() {} +func (*SetNodePoolSizeRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_cluster_service_e36c06b9432393ee, []int{47} +} +func (m *SetNodePoolSizeRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_SetNodePoolSizeRequest.Unmarshal(m, b) +} +func (m *SetNodePoolSizeRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_SetNodePoolSizeRequest.Marshal(b, m, deterministic) +} +func (dst *SetNodePoolSizeRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_SetNodePoolSizeRequest.Merge(dst, src) +} +func (m *SetNodePoolSizeRequest) XXX_Size() int { + return xxx_messageInfo_SetNodePoolSizeRequest.Size(m) +} +func (m *SetNodePoolSizeRequest) XXX_DiscardUnknown() { + xxx_messageInfo_SetNodePoolSizeRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_SetNodePoolSizeRequest proto.InternalMessageInfo + +func (m *SetNodePoolSizeRequest) GetProjectId() string { + if m != nil { + return m.ProjectId + } + return "" +} + +func (m *SetNodePoolSizeRequest) GetZone() string { + if m != nil { + return m.Zone + } + return "" +} + +func (m *SetNodePoolSizeRequest) GetClusterId() string { + if m != nil { + return m.ClusterId + } + return "" +} + +func (m *SetNodePoolSizeRequest) GetNodePoolId() string { + if m != nil { + return m.NodePoolId + } + return "" +} + +func (m *SetNodePoolSizeRequest) GetNodeCount() int32 { + if m != nil { + return m.NodeCount + } + return 0 +} + +func (m *SetNodePoolSizeRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +// RollbackNodePoolUpgradeRequest rollbacks the previously Aborted or Failed +// NodePool upgrade. This will be an no-op if the last upgrade successfully +// completed. +type RollbackNodePoolUpgradeRequest struct { + // The Google Developers Console [project ID or project + // number](https://support.google.com/cloud/answer/6158840). + // This field is deprecated, use name instead. + ProjectId string `protobuf:"bytes,1,opt,name=project_id,json=projectId,proto3" json:"project_id,omitempty"` + // The name of the Google Compute Engine + // [zone](/compute/docs/zones#available) in which the cluster + // resides. + // This field is deprecated, use name instead. + Zone string `protobuf:"bytes,2,opt,name=zone,proto3" json:"zone,omitempty"` + // The name of the cluster to rollback. + // This field is deprecated, use name instead. + ClusterId string `protobuf:"bytes,3,opt,name=cluster_id,json=clusterId,proto3" json:"cluster_id,omitempty"` + // The name of the node pool to rollback. + // This field is deprecated, use name instead. + NodePoolId string `protobuf:"bytes,4,opt,name=node_pool_id,json=nodePoolId,proto3" json:"node_pool_id,omitempty"` + // The name (project, location, cluster, node pool id) of the node poll to + // rollback upgrade. + // Specified in the format 'projects/*/locations/*/clusters/*/nodePools/*'. + Name string `protobuf:"bytes,6,opt,name=name,proto3" json:"name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *RollbackNodePoolUpgradeRequest) Reset() { *m = RollbackNodePoolUpgradeRequest{} } +func (m *RollbackNodePoolUpgradeRequest) String() string { return proto.CompactTextString(m) } +func (*RollbackNodePoolUpgradeRequest) ProtoMessage() {} +func (*RollbackNodePoolUpgradeRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_cluster_service_e36c06b9432393ee, []int{48} +} +func (m *RollbackNodePoolUpgradeRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_RollbackNodePoolUpgradeRequest.Unmarshal(m, b) +} +func (m *RollbackNodePoolUpgradeRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_RollbackNodePoolUpgradeRequest.Marshal(b, m, deterministic) +} +func (dst *RollbackNodePoolUpgradeRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_RollbackNodePoolUpgradeRequest.Merge(dst, src) +} +func (m *RollbackNodePoolUpgradeRequest) XXX_Size() int { + return xxx_messageInfo_RollbackNodePoolUpgradeRequest.Size(m) +} +func (m *RollbackNodePoolUpgradeRequest) XXX_DiscardUnknown() { + xxx_messageInfo_RollbackNodePoolUpgradeRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_RollbackNodePoolUpgradeRequest proto.InternalMessageInfo + +func (m *RollbackNodePoolUpgradeRequest) GetProjectId() string { + if m != nil { + return m.ProjectId + } + return "" +} + +func (m *RollbackNodePoolUpgradeRequest) GetZone() string { + if m != nil { + return m.Zone + } + return "" +} + +func (m *RollbackNodePoolUpgradeRequest) GetClusterId() string { + if m != nil { + return m.ClusterId + } + return "" +} + +func (m *RollbackNodePoolUpgradeRequest) GetNodePoolId() string { + if m != nil { + return m.NodePoolId + } + return "" +} + +func (m *RollbackNodePoolUpgradeRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +// ListNodePoolsResponse is the result of ListNodePoolsRequest. +type ListNodePoolsResponse struct { + // A list of node pools for a cluster. + NodePools []*NodePool `protobuf:"bytes,1,rep,name=node_pools,json=nodePools,proto3" json:"node_pools,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ListNodePoolsResponse) Reset() { *m = ListNodePoolsResponse{} } +func (m *ListNodePoolsResponse) String() string { return proto.CompactTextString(m) } +func (*ListNodePoolsResponse) ProtoMessage() {} +func (*ListNodePoolsResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_cluster_service_e36c06b9432393ee, []int{49} +} +func (m *ListNodePoolsResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ListNodePoolsResponse.Unmarshal(m, b) +} +func (m *ListNodePoolsResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ListNodePoolsResponse.Marshal(b, m, deterministic) +} +func (dst *ListNodePoolsResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_ListNodePoolsResponse.Merge(dst, src) +} +func (m *ListNodePoolsResponse) XXX_Size() int { + return xxx_messageInfo_ListNodePoolsResponse.Size(m) +} +func (m *ListNodePoolsResponse) XXX_DiscardUnknown() { + xxx_messageInfo_ListNodePoolsResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_ListNodePoolsResponse proto.InternalMessageInfo + +func (m *ListNodePoolsResponse) GetNodePools() []*NodePool { + if m != nil { + return m.NodePools + } + return nil +} + +// NodePoolAutoscaling contains information required by cluster autoscaler to +// adjust the size of the node pool to the current cluster usage. +type NodePoolAutoscaling struct { + // Is autoscaling enabled for this node pool. + Enabled bool `protobuf:"varint,1,opt,name=enabled,proto3" json:"enabled,omitempty"` + // Minimum number of nodes in the NodePool. Must be >= 1 and <= + // max_node_count. + MinNodeCount int32 `protobuf:"varint,2,opt,name=min_node_count,json=minNodeCount,proto3" json:"min_node_count,omitempty"` + // Maximum number of nodes in the NodePool. Must be >= min_node_count. There + // has to enough quota to scale up the cluster. + MaxNodeCount int32 `protobuf:"varint,3,opt,name=max_node_count,json=maxNodeCount,proto3" json:"max_node_count,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *NodePoolAutoscaling) Reset() { *m = NodePoolAutoscaling{} } +func (m *NodePoolAutoscaling) String() string { return proto.CompactTextString(m) } +func (*NodePoolAutoscaling) ProtoMessage() {} +func (*NodePoolAutoscaling) Descriptor() ([]byte, []int) { + return fileDescriptor_cluster_service_e36c06b9432393ee, []int{50} +} +func (m *NodePoolAutoscaling) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_NodePoolAutoscaling.Unmarshal(m, b) +} +func (m *NodePoolAutoscaling) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_NodePoolAutoscaling.Marshal(b, m, deterministic) +} +func (dst *NodePoolAutoscaling) XXX_Merge(src proto.Message) { + xxx_messageInfo_NodePoolAutoscaling.Merge(dst, src) +} +func (m *NodePoolAutoscaling) XXX_Size() int { + return xxx_messageInfo_NodePoolAutoscaling.Size(m) +} +func (m *NodePoolAutoscaling) XXX_DiscardUnknown() { + xxx_messageInfo_NodePoolAutoscaling.DiscardUnknown(m) +} + +var xxx_messageInfo_NodePoolAutoscaling proto.InternalMessageInfo + +func (m *NodePoolAutoscaling) GetEnabled() bool { + if m != nil { + return m.Enabled + } + return false +} + +func (m *NodePoolAutoscaling) GetMinNodeCount() int32 { + if m != nil { + return m.MinNodeCount + } + return 0 +} + +func (m *NodePoolAutoscaling) GetMaxNodeCount() int32 { + if m != nil { + return m.MaxNodeCount + } + return 0 +} + +// SetLabelsRequest sets the Google Cloud Platform labels on a Google Container +// Engine cluster, which will in turn set them for Google Compute Engine +// resources used by that cluster +type SetLabelsRequest struct { + // The Google Developers Console [project ID or project + // number](https://developers.google.com/console/help/new/#projectnumber). + // This field is deprecated, use name instead. + ProjectId string `protobuf:"bytes,1,opt,name=project_id,json=projectId,proto3" json:"project_id,omitempty"` + // The name of the Google Compute Engine + // [zone](/compute/docs/zones#available) in which the cluster + // resides. + // This field is deprecated, use name instead. + Zone string `protobuf:"bytes,2,opt,name=zone,proto3" json:"zone,omitempty"` + // The name of the cluster. + // This field is deprecated, use name instead. + ClusterId string `protobuf:"bytes,3,opt,name=cluster_id,json=clusterId,proto3" json:"cluster_id,omitempty"` + // The labels to set for that cluster. + ResourceLabels map[string]string `protobuf:"bytes,4,rep,name=resource_labels,json=resourceLabels,proto3" json:"resource_labels,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` + // The fingerprint of the previous set of labels for this resource, + // used to detect conflicts. The fingerprint is initially generated by + // Container Engine and changes after every request to modify or update + // labels. You must always provide an up-to-date fingerprint hash when + // updating or changing labels. Make a get() request to the + // resource to get the latest fingerprint. + LabelFingerprint string `protobuf:"bytes,5,opt,name=label_fingerprint,json=labelFingerprint,proto3" json:"label_fingerprint,omitempty"` + // The name (project, location, cluster id) of the cluster to set labels. + // Specified in the format 'projects/*/locations/*/clusters/*'. + Name string `protobuf:"bytes,7,opt,name=name,proto3" json:"name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *SetLabelsRequest) Reset() { *m = SetLabelsRequest{} } +func (m *SetLabelsRequest) String() string { return proto.CompactTextString(m) } +func (*SetLabelsRequest) ProtoMessage() {} +func (*SetLabelsRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_cluster_service_e36c06b9432393ee, []int{51} +} +func (m *SetLabelsRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_SetLabelsRequest.Unmarshal(m, b) +} +func (m *SetLabelsRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_SetLabelsRequest.Marshal(b, m, deterministic) +} +func (dst *SetLabelsRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_SetLabelsRequest.Merge(dst, src) +} +func (m *SetLabelsRequest) XXX_Size() int { + return xxx_messageInfo_SetLabelsRequest.Size(m) +} +func (m *SetLabelsRequest) XXX_DiscardUnknown() { + xxx_messageInfo_SetLabelsRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_SetLabelsRequest proto.InternalMessageInfo + +func (m *SetLabelsRequest) GetProjectId() string { + if m != nil { + return m.ProjectId + } + return "" +} + +func (m *SetLabelsRequest) GetZone() string { + if m != nil { + return m.Zone + } + return "" +} + +func (m *SetLabelsRequest) GetClusterId() string { + if m != nil { + return m.ClusterId + } + return "" +} + +func (m *SetLabelsRequest) GetResourceLabels() map[string]string { + if m != nil { + return m.ResourceLabels + } + return nil +} + +func (m *SetLabelsRequest) GetLabelFingerprint() string { + if m != nil { + return m.LabelFingerprint + } + return "" +} + +func (m *SetLabelsRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +// SetLegacyAbacRequest enables or disables the ABAC authorization mechanism for +// a cluster. +type SetLegacyAbacRequest struct { + // The Google Developers Console [project ID or project + // number](https://support.google.com/cloud/answer/6158840). + // This field is deprecated, use name instead. + ProjectId string `protobuf:"bytes,1,opt,name=project_id,json=projectId,proto3" json:"project_id,omitempty"` + // The name of the Google Compute Engine + // [zone](/compute/docs/zones#available) in which the cluster + // resides. + // This field is deprecated, use name instead. + Zone string `protobuf:"bytes,2,opt,name=zone,proto3" json:"zone,omitempty"` + // The name of the cluster to update. + // This field is deprecated, use name instead. + ClusterId string `protobuf:"bytes,3,opt,name=cluster_id,json=clusterId,proto3" json:"cluster_id,omitempty"` + // Whether ABAC authorization will be enabled in the cluster. + Enabled bool `protobuf:"varint,4,opt,name=enabled,proto3" json:"enabled,omitempty"` + // The name (project, location, cluster id) of the cluster to set legacy abac. + // Specified in the format 'projects/*/locations/*/clusters/*'. + Name string `protobuf:"bytes,6,opt,name=name,proto3" json:"name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *SetLegacyAbacRequest) Reset() { *m = SetLegacyAbacRequest{} } +func (m *SetLegacyAbacRequest) String() string { return proto.CompactTextString(m) } +func (*SetLegacyAbacRequest) ProtoMessage() {} +func (*SetLegacyAbacRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_cluster_service_e36c06b9432393ee, []int{52} +} +func (m *SetLegacyAbacRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_SetLegacyAbacRequest.Unmarshal(m, b) +} +func (m *SetLegacyAbacRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_SetLegacyAbacRequest.Marshal(b, m, deterministic) +} +func (dst *SetLegacyAbacRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_SetLegacyAbacRequest.Merge(dst, src) +} +func (m *SetLegacyAbacRequest) XXX_Size() int { + return xxx_messageInfo_SetLegacyAbacRequest.Size(m) +} +func (m *SetLegacyAbacRequest) XXX_DiscardUnknown() { + xxx_messageInfo_SetLegacyAbacRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_SetLegacyAbacRequest proto.InternalMessageInfo + +func (m *SetLegacyAbacRequest) GetProjectId() string { + if m != nil { + return m.ProjectId + } + return "" +} + +func (m *SetLegacyAbacRequest) GetZone() string { + if m != nil { + return m.Zone + } + return "" +} + +func (m *SetLegacyAbacRequest) GetClusterId() string { + if m != nil { + return m.ClusterId + } + return "" +} + +func (m *SetLegacyAbacRequest) GetEnabled() bool { + if m != nil { + return m.Enabled + } + return false +} + +func (m *SetLegacyAbacRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +// StartIPRotationRequest creates a new IP for the cluster and then performs +// a node upgrade on each node pool to point to the new IP. +type StartIPRotationRequest struct { + // The Google Developers Console [project ID or project + // number](https://developers.google.com/console/help/new/#projectnumber). + // This field is deprecated, use name instead. + ProjectId string `protobuf:"bytes,1,opt,name=project_id,json=projectId,proto3" json:"project_id,omitempty"` + // The name of the Google Compute Engine + // [zone](/compute/docs/zones#available) in which the cluster + // resides. + // This field is deprecated, use name instead. + Zone string `protobuf:"bytes,2,opt,name=zone,proto3" json:"zone,omitempty"` + // The name of the cluster. + // This field is deprecated, use name instead. + ClusterId string `protobuf:"bytes,3,opt,name=cluster_id,json=clusterId,proto3" json:"cluster_id,omitempty"` + // The name (project, location, cluster id) of the cluster to start IP rotation. + // Specified in the format 'projects/*/locations/*/clusters/*'. + Name string `protobuf:"bytes,6,opt,name=name,proto3" json:"name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *StartIPRotationRequest) Reset() { *m = StartIPRotationRequest{} } +func (m *StartIPRotationRequest) String() string { return proto.CompactTextString(m) } +func (*StartIPRotationRequest) ProtoMessage() {} +func (*StartIPRotationRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_cluster_service_e36c06b9432393ee, []int{53} +} +func (m *StartIPRotationRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_StartIPRotationRequest.Unmarshal(m, b) +} +func (m *StartIPRotationRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_StartIPRotationRequest.Marshal(b, m, deterministic) +} +func (dst *StartIPRotationRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_StartIPRotationRequest.Merge(dst, src) +} +func (m *StartIPRotationRequest) XXX_Size() int { + return xxx_messageInfo_StartIPRotationRequest.Size(m) +} +func (m *StartIPRotationRequest) XXX_DiscardUnknown() { + xxx_messageInfo_StartIPRotationRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_StartIPRotationRequest proto.InternalMessageInfo + +func (m *StartIPRotationRequest) GetProjectId() string { + if m != nil { + return m.ProjectId + } + return "" +} + +func (m *StartIPRotationRequest) GetZone() string { + if m != nil { + return m.Zone + } + return "" +} + +func (m *StartIPRotationRequest) GetClusterId() string { + if m != nil { + return m.ClusterId + } + return "" +} + +func (m *StartIPRotationRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +// CompleteIPRotationRequest moves the cluster master back into single-IP mode. +type CompleteIPRotationRequest struct { + // The Google Developers Console [project ID or project + // number](https://developers.google.com/console/help/new/#projectnumber). + // This field is deprecated, use name instead. + ProjectId string `protobuf:"bytes,1,opt,name=project_id,json=projectId,proto3" json:"project_id,omitempty"` + // The name of the Google Compute Engine + // [zone](/compute/docs/zones#available) in which the cluster + // resides. + // This field is deprecated, use name instead. + Zone string `protobuf:"bytes,2,opt,name=zone,proto3" json:"zone,omitempty"` + // The name of the cluster. + // This field is deprecated, use name instead. + ClusterId string `protobuf:"bytes,3,opt,name=cluster_id,json=clusterId,proto3" json:"cluster_id,omitempty"` + // The name (project, location, cluster id) of the cluster to complete IP rotation. + // Specified in the format 'projects/*/locations/*/clusters/*'. + Name string `protobuf:"bytes,7,opt,name=name,proto3" json:"name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *CompleteIPRotationRequest) Reset() { *m = CompleteIPRotationRequest{} } +func (m *CompleteIPRotationRequest) String() string { return proto.CompactTextString(m) } +func (*CompleteIPRotationRequest) ProtoMessage() {} +func (*CompleteIPRotationRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_cluster_service_e36c06b9432393ee, []int{54} +} +func (m *CompleteIPRotationRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_CompleteIPRotationRequest.Unmarshal(m, b) +} +func (m *CompleteIPRotationRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_CompleteIPRotationRequest.Marshal(b, m, deterministic) +} +func (dst *CompleteIPRotationRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_CompleteIPRotationRequest.Merge(dst, src) +} +func (m *CompleteIPRotationRequest) XXX_Size() int { + return xxx_messageInfo_CompleteIPRotationRequest.Size(m) +} +func (m *CompleteIPRotationRequest) XXX_DiscardUnknown() { + xxx_messageInfo_CompleteIPRotationRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_CompleteIPRotationRequest proto.InternalMessageInfo + +func (m *CompleteIPRotationRequest) GetProjectId() string { + if m != nil { + return m.ProjectId + } + return "" +} + +func (m *CompleteIPRotationRequest) GetZone() string { + if m != nil { + return m.Zone + } + return "" +} + +func (m *CompleteIPRotationRequest) GetClusterId() string { + if m != nil { + return m.ClusterId + } + return "" +} + +func (m *CompleteIPRotationRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +// AcceleratorConfig represents a Hardware Accelerator request. +type AcceleratorConfig struct { + // The number of the accelerator cards exposed to an instance. + AcceleratorCount int64 `protobuf:"varint,1,opt,name=accelerator_count,json=acceleratorCount,proto3" json:"accelerator_count,omitempty"` + // The accelerator type resource name. List of supported accelerators + // [here](/compute/docs/gpus/#Introduction) + AcceleratorType string `protobuf:"bytes,2,opt,name=accelerator_type,json=acceleratorType,proto3" json:"accelerator_type,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *AcceleratorConfig) Reset() { *m = AcceleratorConfig{} } +func (m *AcceleratorConfig) String() string { return proto.CompactTextString(m) } +func (*AcceleratorConfig) ProtoMessage() {} +func (*AcceleratorConfig) Descriptor() ([]byte, []int) { + return fileDescriptor_cluster_service_e36c06b9432393ee, []int{55} +} +func (m *AcceleratorConfig) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_AcceleratorConfig.Unmarshal(m, b) +} +func (m *AcceleratorConfig) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_AcceleratorConfig.Marshal(b, m, deterministic) +} +func (dst *AcceleratorConfig) XXX_Merge(src proto.Message) { + xxx_messageInfo_AcceleratorConfig.Merge(dst, src) +} +func (m *AcceleratorConfig) XXX_Size() int { + return xxx_messageInfo_AcceleratorConfig.Size(m) +} +func (m *AcceleratorConfig) XXX_DiscardUnknown() { + xxx_messageInfo_AcceleratorConfig.DiscardUnknown(m) +} + +var xxx_messageInfo_AcceleratorConfig proto.InternalMessageInfo + +func (m *AcceleratorConfig) GetAcceleratorCount() int64 { + if m != nil { + return m.AcceleratorCount + } + return 0 +} + +func (m *AcceleratorConfig) GetAcceleratorType() string { + if m != nil { + return m.AcceleratorType + } + return "" +} + +// SetNetworkPolicyRequest enables/disables network policy for a cluster. +type SetNetworkPolicyRequest struct { + // The Google Developers Console [project ID or project + // number](https://developers.google.com/console/help/new/#projectnumber). + // This field is deprecated, use name instead. + ProjectId string `protobuf:"bytes,1,opt,name=project_id,json=projectId,proto3" json:"project_id,omitempty"` + // The name of the Google Compute Engine + // [zone](/compute/docs/zones#available) in which the cluster + // resides. + // This field is deprecated, use name instead. + Zone string `protobuf:"bytes,2,opt,name=zone,proto3" json:"zone,omitempty"` + // The name of the cluster. + // This field is deprecated, use name instead. + ClusterId string `protobuf:"bytes,3,opt,name=cluster_id,json=clusterId,proto3" json:"cluster_id,omitempty"` + // Configuration options for the NetworkPolicy feature. + NetworkPolicy *NetworkPolicy `protobuf:"bytes,4,opt,name=network_policy,json=networkPolicy,proto3" json:"network_policy,omitempty"` + // The name (project, location, cluster id) of the cluster to set networking + // policy. + // Specified in the format 'projects/*/locations/*/clusters/*'. + Name string `protobuf:"bytes,6,opt,name=name,proto3" json:"name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *SetNetworkPolicyRequest) Reset() { *m = SetNetworkPolicyRequest{} } +func (m *SetNetworkPolicyRequest) String() string { return proto.CompactTextString(m) } +func (*SetNetworkPolicyRequest) ProtoMessage() {} +func (*SetNetworkPolicyRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_cluster_service_e36c06b9432393ee, []int{56} +} +func (m *SetNetworkPolicyRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_SetNetworkPolicyRequest.Unmarshal(m, b) +} +func (m *SetNetworkPolicyRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_SetNetworkPolicyRequest.Marshal(b, m, deterministic) +} +func (dst *SetNetworkPolicyRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_SetNetworkPolicyRequest.Merge(dst, src) +} +func (m *SetNetworkPolicyRequest) XXX_Size() int { + return xxx_messageInfo_SetNetworkPolicyRequest.Size(m) +} +func (m *SetNetworkPolicyRequest) XXX_DiscardUnknown() { + xxx_messageInfo_SetNetworkPolicyRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_SetNetworkPolicyRequest proto.InternalMessageInfo + +func (m *SetNetworkPolicyRequest) GetProjectId() string { + if m != nil { + return m.ProjectId + } + return "" +} + +func (m *SetNetworkPolicyRequest) GetZone() string { + if m != nil { + return m.Zone + } + return "" +} + +func (m *SetNetworkPolicyRequest) GetClusterId() string { + if m != nil { + return m.ClusterId + } + return "" +} + +func (m *SetNetworkPolicyRequest) GetNetworkPolicy() *NetworkPolicy { + if m != nil { + return m.NetworkPolicy + } + return nil +} + +func (m *SetNetworkPolicyRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +// SetMaintenancePolicyRequest sets the maintenance policy for a cluster. +type SetMaintenancePolicyRequest struct { + // The Google Developers Console [project ID or project + // number](https://support.google.com/cloud/answer/6158840). + ProjectId string `protobuf:"bytes,1,opt,name=project_id,json=projectId,proto3" json:"project_id,omitempty"` + // The name of the Google Compute Engine + // [zone](/compute/docs/zones#available) in which the cluster + // resides. + Zone string `protobuf:"bytes,2,opt,name=zone,proto3" json:"zone,omitempty"` + // The name of the cluster to update. + ClusterId string `protobuf:"bytes,3,opt,name=cluster_id,json=clusterId,proto3" json:"cluster_id,omitempty"` + // The maintenance policy to be set for the cluster. An empty field + // clears the existing maintenance policy. + MaintenancePolicy *MaintenancePolicy `protobuf:"bytes,4,opt,name=maintenance_policy,json=maintenancePolicy,proto3" json:"maintenance_policy,omitempty"` + // The name (project, location, cluster id) of the cluster to set maintenance + // policy. + // Specified in the format 'projects/*/locations/*/clusters/*'. + Name string `protobuf:"bytes,5,opt,name=name,proto3" json:"name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *SetMaintenancePolicyRequest) Reset() { *m = SetMaintenancePolicyRequest{} } +func (m *SetMaintenancePolicyRequest) String() string { return proto.CompactTextString(m) } +func (*SetMaintenancePolicyRequest) ProtoMessage() {} +func (*SetMaintenancePolicyRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_cluster_service_e36c06b9432393ee, []int{57} +} +func (m *SetMaintenancePolicyRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_SetMaintenancePolicyRequest.Unmarshal(m, b) +} +func (m *SetMaintenancePolicyRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_SetMaintenancePolicyRequest.Marshal(b, m, deterministic) +} +func (dst *SetMaintenancePolicyRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_SetMaintenancePolicyRequest.Merge(dst, src) +} +func (m *SetMaintenancePolicyRequest) XXX_Size() int { + return xxx_messageInfo_SetMaintenancePolicyRequest.Size(m) +} +func (m *SetMaintenancePolicyRequest) XXX_DiscardUnknown() { + xxx_messageInfo_SetMaintenancePolicyRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_SetMaintenancePolicyRequest proto.InternalMessageInfo + +func (m *SetMaintenancePolicyRequest) GetProjectId() string { + if m != nil { + return m.ProjectId + } + return "" +} + +func (m *SetMaintenancePolicyRequest) GetZone() string { + if m != nil { + return m.Zone + } + return "" +} + +func (m *SetMaintenancePolicyRequest) GetClusterId() string { + if m != nil { + return m.ClusterId + } + return "" +} + +func (m *SetMaintenancePolicyRequest) GetMaintenancePolicy() *MaintenancePolicy { + if m != nil { + return m.MaintenancePolicy + } + return nil +} + +func (m *SetMaintenancePolicyRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func init() { + proto.RegisterType((*NodeConfig)(nil), "google.container.v1alpha1.NodeConfig") + proto.RegisterMapType((map[string]string)(nil), "google.container.v1alpha1.NodeConfig.LabelsEntry") + proto.RegisterMapType((map[string]string)(nil), "google.container.v1alpha1.NodeConfig.MetadataEntry") + proto.RegisterType((*NodeTaint)(nil), "google.container.v1alpha1.NodeTaint") + proto.RegisterType((*MasterAuth)(nil), "google.container.v1alpha1.MasterAuth") + proto.RegisterType((*ClientCertificateConfig)(nil), "google.container.v1alpha1.ClientCertificateConfig") + proto.RegisterType((*AddonsConfig)(nil), "google.container.v1alpha1.AddonsConfig") + proto.RegisterType((*HttpLoadBalancing)(nil), "google.container.v1alpha1.HttpLoadBalancing") + proto.RegisterType((*HorizontalPodAutoscaling)(nil), "google.container.v1alpha1.HorizontalPodAutoscaling") + proto.RegisterType((*KubernetesDashboard)(nil), "google.container.v1alpha1.KubernetesDashboard") + proto.RegisterType((*NetworkPolicyConfig)(nil), "google.container.v1alpha1.NetworkPolicyConfig") + proto.RegisterType((*MasterAuthorizedNetworksConfig)(nil), "google.container.v1alpha1.MasterAuthorizedNetworksConfig") + proto.RegisterType((*MasterAuthorizedNetworksConfig_CidrBlock)(nil), "google.container.v1alpha1.MasterAuthorizedNetworksConfig.CidrBlock") + proto.RegisterType((*NetworkPolicy)(nil), "google.container.v1alpha1.NetworkPolicy") + proto.RegisterType((*IPAllocationPolicy)(nil), "google.container.v1alpha1.IPAllocationPolicy") + proto.RegisterType((*PodSecurityPolicyConfig)(nil), "google.container.v1alpha1.PodSecurityPolicyConfig") + proto.RegisterType((*Cluster)(nil), "google.container.v1alpha1.Cluster") + proto.RegisterType((*ClusterUpdate)(nil), "google.container.v1alpha1.ClusterUpdate") + proto.RegisterType((*Operation)(nil), "google.container.v1alpha1.Operation") + proto.RegisterType((*CreateClusterRequest)(nil), "google.container.v1alpha1.CreateClusterRequest") + proto.RegisterType((*GetClusterRequest)(nil), "google.container.v1alpha1.GetClusterRequest") + proto.RegisterType((*UpdateClusterRequest)(nil), "google.container.v1alpha1.UpdateClusterRequest") + proto.RegisterType((*UpdateNodePoolRequest)(nil), "google.container.v1alpha1.UpdateNodePoolRequest") + proto.RegisterType((*SetNodePoolAutoscalingRequest)(nil), "google.container.v1alpha1.SetNodePoolAutoscalingRequest") + proto.RegisterType((*SetLoggingServiceRequest)(nil), "google.container.v1alpha1.SetLoggingServiceRequest") + proto.RegisterType((*SetMonitoringServiceRequest)(nil), "google.container.v1alpha1.SetMonitoringServiceRequest") + proto.RegisterType((*SetAddonsConfigRequest)(nil), "google.container.v1alpha1.SetAddonsConfigRequest") + proto.RegisterType((*SetLocationsRequest)(nil), "google.container.v1alpha1.SetLocationsRequest") + proto.RegisterType((*UpdateMasterRequest)(nil), "google.container.v1alpha1.UpdateMasterRequest") + proto.RegisterType((*SetMasterAuthRequest)(nil), "google.container.v1alpha1.SetMasterAuthRequest") + proto.RegisterType((*DeleteClusterRequest)(nil), "google.container.v1alpha1.DeleteClusterRequest") + proto.RegisterType((*ListClustersRequest)(nil), "google.container.v1alpha1.ListClustersRequest") + proto.RegisterType((*ListClustersResponse)(nil), "google.container.v1alpha1.ListClustersResponse") + proto.RegisterType((*GetOperationRequest)(nil), "google.container.v1alpha1.GetOperationRequest") + proto.RegisterType((*ListOperationsRequest)(nil), "google.container.v1alpha1.ListOperationsRequest") + proto.RegisterType((*CancelOperationRequest)(nil), "google.container.v1alpha1.CancelOperationRequest") + proto.RegisterType((*ListOperationsResponse)(nil), "google.container.v1alpha1.ListOperationsResponse") + proto.RegisterType((*GetServerConfigRequest)(nil), "google.container.v1alpha1.GetServerConfigRequest") + proto.RegisterType((*ServerConfig)(nil), "google.container.v1alpha1.ServerConfig") + proto.RegisterType((*CreateNodePoolRequest)(nil), "google.container.v1alpha1.CreateNodePoolRequest") + proto.RegisterType((*DeleteNodePoolRequest)(nil), "google.container.v1alpha1.DeleteNodePoolRequest") + proto.RegisterType((*ListNodePoolsRequest)(nil), "google.container.v1alpha1.ListNodePoolsRequest") + proto.RegisterType((*GetNodePoolRequest)(nil), "google.container.v1alpha1.GetNodePoolRequest") + proto.RegisterType((*NodePool)(nil), "google.container.v1alpha1.NodePool") + proto.RegisterType((*NodeManagement)(nil), "google.container.v1alpha1.NodeManagement") + proto.RegisterType((*AutoUpgradeOptions)(nil), "google.container.v1alpha1.AutoUpgradeOptions") + proto.RegisterType((*MaintenancePolicy)(nil), "google.container.v1alpha1.MaintenancePolicy") + proto.RegisterType((*MaintenanceWindow)(nil), "google.container.v1alpha1.MaintenanceWindow") + proto.RegisterType((*DailyMaintenanceWindow)(nil), "google.container.v1alpha1.DailyMaintenanceWindow") + proto.RegisterType((*SetNodePoolManagementRequest)(nil), "google.container.v1alpha1.SetNodePoolManagementRequest") + proto.RegisterType((*SetNodePoolSizeRequest)(nil), "google.container.v1alpha1.SetNodePoolSizeRequest") + proto.RegisterType((*RollbackNodePoolUpgradeRequest)(nil), "google.container.v1alpha1.RollbackNodePoolUpgradeRequest") + proto.RegisterType((*ListNodePoolsResponse)(nil), "google.container.v1alpha1.ListNodePoolsResponse") + proto.RegisterType((*NodePoolAutoscaling)(nil), "google.container.v1alpha1.NodePoolAutoscaling") + proto.RegisterType((*SetLabelsRequest)(nil), "google.container.v1alpha1.SetLabelsRequest") + proto.RegisterMapType((map[string]string)(nil), "google.container.v1alpha1.SetLabelsRequest.ResourceLabelsEntry") + proto.RegisterType((*SetLegacyAbacRequest)(nil), "google.container.v1alpha1.SetLegacyAbacRequest") + proto.RegisterType((*StartIPRotationRequest)(nil), "google.container.v1alpha1.StartIPRotationRequest") + proto.RegisterType((*CompleteIPRotationRequest)(nil), "google.container.v1alpha1.CompleteIPRotationRequest") + proto.RegisterType((*AcceleratorConfig)(nil), "google.container.v1alpha1.AcceleratorConfig") + proto.RegisterType((*SetNetworkPolicyRequest)(nil), "google.container.v1alpha1.SetNetworkPolicyRequest") + proto.RegisterType((*SetMaintenancePolicyRequest)(nil), "google.container.v1alpha1.SetMaintenancePolicyRequest") + proto.RegisterEnum("google.container.v1alpha1.NodeTaint_Effect", NodeTaint_Effect_name, NodeTaint_Effect_value) + proto.RegisterEnum("google.container.v1alpha1.NetworkPolicy_Provider", NetworkPolicy_Provider_name, NetworkPolicy_Provider_value) + proto.RegisterEnum("google.container.v1alpha1.Cluster_Status", Cluster_Status_name, Cluster_Status_value) + proto.RegisterEnum("google.container.v1alpha1.Operation_Status", Operation_Status_name, Operation_Status_value) + proto.RegisterEnum("google.container.v1alpha1.Operation_Type", Operation_Type_name, Operation_Type_value) + proto.RegisterEnum("google.container.v1alpha1.SetMasterAuthRequest_Action", SetMasterAuthRequest_Action_name, SetMasterAuthRequest_Action_value) + proto.RegisterEnum("google.container.v1alpha1.NodePool_Status", NodePool_Status_name, NodePool_Status_value) +} + +// Reference imports to suppress errors if they are not otherwise used. +var _ context.Context +var _ grpc.ClientConn + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the grpc package it is being compiled against. +const _ = grpc.SupportPackageIsVersion4 + +// ClusterManagerClient is the client API for ClusterManager service. +// +// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://godoc.org/google.golang.org/grpc#ClientConn.NewStream. +type ClusterManagerClient interface { + // Lists all clusters owned by a project in either the specified zone or all + // zones. + ListClusters(ctx context.Context, in *ListClustersRequest, opts ...grpc.CallOption) (*ListClustersResponse, error) + // Gets the details of a specific cluster. + GetCluster(ctx context.Context, in *GetClusterRequest, opts ...grpc.CallOption) (*Cluster, error) + // Creates a cluster, consisting of the specified number and type of Google + // Compute Engine instances. + // + // By default, the cluster is created in the project's + // [default network](/compute/docs/networks-and-firewalls#networks). + // + // One firewall is added for the cluster. After cluster creation, + // the cluster creates routes for each node to allow the containers + // on that node to communicate with all other instances in the + // cluster. + // + // Finally, an entry is added to the project's global metadata indicating + // which CIDR range is being used by the cluster. + CreateCluster(ctx context.Context, in *CreateClusterRequest, opts ...grpc.CallOption) (*Operation, error) + // Updates the settings of a specific cluster. + UpdateCluster(ctx context.Context, in *UpdateClusterRequest, opts ...grpc.CallOption) (*Operation, error) + // Updates the version and/or iamge type of a specific node pool. + UpdateNodePool(ctx context.Context, in *UpdateNodePoolRequest, opts ...grpc.CallOption) (*Operation, error) + // Sets the autoscaling settings of a specific node pool. + SetNodePoolAutoscaling(ctx context.Context, in *SetNodePoolAutoscalingRequest, opts ...grpc.CallOption) (*Operation, error) + // Sets the logging service of a specific cluster. + SetLoggingService(ctx context.Context, in *SetLoggingServiceRequest, opts ...grpc.CallOption) (*Operation, error) + // Sets the monitoring service of a specific cluster. + SetMonitoringService(ctx context.Context, in *SetMonitoringServiceRequest, opts ...grpc.CallOption) (*Operation, error) + // Sets the addons of a specific cluster. + SetAddonsConfig(ctx context.Context, in *SetAddonsConfigRequest, opts ...grpc.CallOption) (*Operation, error) + // Sets the locations of a specific cluster. + SetLocations(ctx context.Context, in *SetLocationsRequest, opts ...grpc.CallOption) (*Operation, error) + // Updates the master of a specific cluster. + UpdateMaster(ctx context.Context, in *UpdateMasterRequest, opts ...grpc.CallOption) (*Operation, error) + // Used to set master auth materials. Currently supports :- + // Changing the admin password of a specific cluster. + // This can be either via password generation or explicitly set. + // Modify basic_auth.csv and reset the K8S API server. + SetMasterAuth(ctx context.Context, in *SetMasterAuthRequest, opts ...grpc.CallOption) (*Operation, error) + // Deletes the cluster, including the Kubernetes endpoint and all worker + // nodes. + // + // Firewalls and routes that were configured during cluster creation + // are also deleted. + // + // Other Google Compute Engine resources that might be in use by the cluster + // (e.g. load balancer resources) will not be deleted if they weren't present + // at the initial create time. + DeleteCluster(ctx context.Context, in *DeleteClusterRequest, opts ...grpc.CallOption) (*Operation, error) + // Lists all operations in a project in a specific zone or all zones. + ListOperations(ctx context.Context, in *ListOperationsRequest, opts ...grpc.CallOption) (*ListOperationsResponse, error) + // Gets the specified operation. + GetOperation(ctx context.Context, in *GetOperationRequest, opts ...grpc.CallOption) (*Operation, error) + // Cancels the specified operation. + CancelOperation(ctx context.Context, in *CancelOperationRequest, opts ...grpc.CallOption) (*empty.Empty, error) + // Returns configuration info about the Container Engine service. + GetServerConfig(ctx context.Context, in *GetServerConfigRequest, opts ...grpc.CallOption) (*ServerConfig, error) + // Lists the node pools for a cluster. + ListNodePools(ctx context.Context, in *ListNodePoolsRequest, opts ...grpc.CallOption) (*ListNodePoolsResponse, error) + // Retrieves the node pool requested. + GetNodePool(ctx context.Context, in *GetNodePoolRequest, opts ...grpc.CallOption) (*NodePool, error) + // Creates a node pool for a cluster. + CreateNodePool(ctx context.Context, in *CreateNodePoolRequest, opts ...grpc.CallOption) (*Operation, error) + // Deletes a node pool from a cluster. + DeleteNodePool(ctx context.Context, in *DeleteNodePoolRequest, opts ...grpc.CallOption) (*Operation, error) + // Roll back the previously Aborted or Failed NodePool upgrade. + // This will be an no-op if the last upgrade successfully completed. + RollbackNodePoolUpgrade(ctx context.Context, in *RollbackNodePoolUpgradeRequest, opts ...grpc.CallOption) (*Operation, error) + // Sets the NodeManagement options for a node pool. + SetNodePoolManagement(ctx context.Context, in *SetNodePoolManagementRequest, opts ...grpc.CallOption) (*Operation, error) + // Sets labels on a cluster. + SetLabels(ctx context.Context, in *SetLabelsRequest, opts ...grpc.CallOption) (*Operation, error) + // Enables or disables the ABAC authorization mechanism on a cluster. + SetLegacyAbac(ctx context.Context, in *SetLegacyAbacRequest, opts ...grpc.CallOption) (*Operation, error) + // Start master IP rotation. + StartIPRotation(ctx context.Context, in *StartIPRotationRequest, opts ...grpc.CallOption) (*Operation, error) + // Completes master IP rotation. + CompleteIPRotation(ctx context.Context, in *CompleteIPRotationRequest, opts ...grpc.CallOption) (*Operation, error) + // Sets the size of a specific node pool. + SetNodePoolSize(ctx context.Context, in *SetNodePoolSizeRequest, opts ...grpc.CallOption) (*Operation, error) + // Enables/Disables Network Policy for a cluster. + SetNetworkPolicy(ctx context.Context, in *SetNetworkPolicyRequest, opts ...grpc.CallOption) (*Operation, error) + // Sets the maintenance policy for a cluster. + SetMaintenancePolicy(ctx context.Context, in *SetMaintenancePolicyRequest, opts ...grpc.CallOption) (*Operation, error) +} + +type clusterManagerClient struct { + cc *grpc.ClientConn +} + +func NewClusterManagerClient(cc *grpc.ClientConn) ClusterManagerClient { + return &clusterManagerClient{cc} +} + +func (c *clusterManagerClient) ListClusters(ctx context.Context, in *ListClustersRequest, opts ...grpc.CallOption) (*ListClustersResponse, error) { + out := new(ListClustersResponse) + err := c.cc.Invoke(ctx, "/google.container.v1alpha1.ClusterManager/ListClusters", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *clusterManagerClient) GetCluster(ctx context.Context, in *GetClusterRequest, opts ...grpc.CallOption) (*Cluster, error) { + out := new(Cluster) + err := c.cc.Invoke(ctx, "/google.container.v1alpha1.ClusterManager/GetCluster", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *clusterManagerClient) CreateCluster(ctx context.Context, in *CreateClusterRequest, opts ...grpc.CallOption) (*Operation, error) { + out := new(Operation) + err := c.cc.Invoke(ctx, "/google.container.v1alpha1.ClusterManager/CreateCluster", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *clusterManagerClient) UpdateCluster(ctx context.Context, in *UpdateClusterRequest, opts ...grpc.CallOption) (*Operation, error) { + out := new(Operation) + err := c.cc.Invoke(ctx, "/google.container.v1alpha1.ClusterManager/UpdateCluster", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *clusterManagerClient) UpdateNodePool(ctx context.Context, in *UpdateNodePoolRequest, opts ...grpc.CallOption) (*Operation, error) { + out := new(Operation) + err := c.cc.Invoke(ctx, "/google.container.v1alpha1.ClusterManager/UpdateNodePool", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *clusterManagerClient) SetNodePoolAutoscaling(ctx context.Context, in *SetNodePoolAutoscalingRequest, opts ...grpc.CallOption) (*Operation, error) { + out := new(Operation) + err := c.cc.Invoke(ctx, "/google.container.v1alpha1.ClusterManager/SetNodePoolAutoscaling", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *clusterManagerClient) SetLoggingService(ctx context.Context, in *SetLoggingServiceRequest, opts ...grpc.CallOption) (*Operation, error) { + out := new(Operation) + err := c.cc.Invoke(ctx, "/google.container.v1alpha1.ClusterManager/SetLoggingService", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *clusterManagerClient) SetMonitoringService(ctx context.Context, in *SetMonitoringServiceRequest, opts ...grpc.CallOption) (*Operation, error) { + out := new(Operation) + err := c.cc.Invoke(ctx, "/google.container.v1alpha1.ClusterManager/SetMonitoringService", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *clusterManagerClient) SetAddonsConfig(ctx context.Context, in *SetAddonsConfigRequest, opts ...grpc.CallOption) (*Operation, error) { + out := new(Operation) + err := c.cc.Invoke(ctx, "/google.container.v1alpha1.ClusterManager/SetAddonsConfig", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *clusterManagerClient) SetLocations(ctx context.Context, in *SetLocationsRequest, opts ...grpc.CallOption) (*Operation, error) { + out := new(Operation) + err := c.cc.Invoke(ctx, "/google.container.v1alpha1.ClusterManager/SetLocations", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *clusterManagerClient) UpdateMaster(ctx context.Context, in *UpdateMasterRequest, opts ...grpc.CallOption) (*Operation, error) { + out := new(Operation) + err := c.cc.Invoke(ctx, "/google.container.v1alpha1.ClusterManager/UpdateMaster", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *clusterManagerClient) SetMasterAuth(ctx context.Context, in *SetMasterAuthRequest, opts ...grpc.CallOption) (*Operation, error) { + out := new(Operation) + err := c.cc.Invoke(ctx, "/google.container.v1alpha1.ClusterManager/SetMasterAuth", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *clusterManagerClient) DeleteCluster(ctx context.Context, in *DeleteClusterRequest, opts ...grpc.CallOption) (*Operation, error) { + out := new(Operation) + err := c.cc.Invoke(ctx, "/google.container.v1alpha1.ClusterManager/DeleteCluster", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *clusterManagerClient) ListOperations(ctx context.Context, in *ListOperationsRequest, opts ...grpc.CallOption) (*ListOperationsResponse, error) { + out := new(ListOperationsResponse) + err := c.cc.Invoke(ctx, "/google.container.v1alpha1.ClusterManager/ListOperations", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *clusterManagerClient) GetOperation(ctx context.Context, in *GetOperationRequest, opts ...grpc.CallOption) (*Operation, error) { + out := new(Operation) + err := c.cc.Invoke(ctx, "/google.container.v1alpha1.ClusterManager/GetOperation", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *clusterManagerClient) CancelOperation(ctx context.Context, in *CancelOperationRequest, opts ...grpc.CallOption) (*empty.Empty, error) { + out := new(empty.Empty) + err := c.cc.Invoke(ctx, "/google.container.v1alpha1.ClusterManager/CancelOperation", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *clusterManagerClient) GetServerConfig(ctx context.Context, in *GetServerConfigRequest, opts ...grpc.CallOption) (*ServerConfig, error) { + out := new(ServerConfig) + err := c.cc.Invoke(ctx, "/google.container.v1alpha1.ClusterManager/GetServerConfig", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *clusterManagerClient) ListNodePools(ctx context.Context, in *ListNodePoolsRequest, opts ...grpc.CallOption) (*ListNodePoolsResponse, error) { + out := new(ListNodePoolsResponse) + err := c.cc.Invoke(ctx, "/google.container.v1alpha1.ClusterManager/ListNodePools", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *clusterManagerClient) GetNodePool(ctx context.Context, in *GetNodePoolRequest, opts ...grpc.CallOption) (*NodePool, error) { + out := new(NodePool) + err := c.cc.Invoke(ctx, "/google.container.v1alpha1.ClusterManager/GetNodePool", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *clusterManagerClient) CreateNodePool(ctx context.Context, in *CreateNodePoolRequest, opts ...grpc.CallOption) (*Operation, error) { + out := new(Operation) + err := c.cc.Invoke(ctx, "/google.container.v1alpha1.ClusterManager/CreateNodePool", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *clusterManagerClient) DeleteNodePool(ctx context.Context, in *DeleteNodePoolRequest, opts ...grpc.CallOption) (*Operation, error) { + out := new(Operation) + err := c.cc.Invoke(ctx, "/google.container.v1alpha1.ClusterManager/DeleteNodePool", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *clusterManagerClient) RollbackNodePoolUpgrade(ctx context.Context, in *RollbackNodePoolUpgradeRequest, opts ...grpc.CallOption) (*Operation, error) { + out := new(Operation) + err := c.cc.Invoke(ctx, "/google.container.v1alpha1.ClusterManager/RollbackNodePoolUpgrade", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *clusterManagerClient) SetNodePoolManagement(ctx context.Context, in *SetNodePoolManagementRequest, opts ...grpc.CallOption) (*Operation, error) { + out := new(Operation) + err := c.cc.Invoke(ctx, "/google.container.v1alpha1.ClusterManager/SetNodePoolManagement", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *clusterManagerClient) SetLabels(ctx context.Context, in *SetLabelsRequest, opts ...grpc.CallOption) (*Operation, error) { + out := new(Operation) + err := c.cc.Invoke(ctx, "/google.container.v1alpha1.ClusterManager/SetLabels", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *clusterManagerClient) SetLegacyAbac(ctx context.Context, in *SetLegacyAbacRequest, opts ...grpc.CallOption) (*Operation, error) { + out := new(Operation) + err := c.cc.Invoke(ctx, "/google.container.v1alpha1.ClusterManager/SetLegacyAbac", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *clusterManagerClient) StartIPRotation(ctx context.Context, in *StartIPRotationRequest, opts ...grpc.CallOption) (*Operation, error) { + out := new(Operation) + err := c.cc.Invoke(ctx, "/google.container.v1alpha1.ClusterManager/StartIPRotation", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *clusterManagerClient) CompleteIPRotation(ctx context.Context, in *CompleteIPRotationRequest, opts ...grpc.CallOption) (*Operation, error) { + out := new(Operation) + err := c.cc.Invoke(ctx, "/google.container.v1alpha1.ClusterManager/CompleteIPRotation", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *clusterManagerClient) SetNodePoolSize(ctx context.Context, in *SetNodePoolSizeRequest, opts ...grpc.CallOption) (*Operation, error) { + out := new(Operation) + err := c.cc.Invoke(ctx, "/google.container.v1alpha1.ClusterManager/SetNodePoolSize", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *clusterManagerClient) SetNetworkPolicy(ctx context.Context, in *SetNetworkPolicyRequest, opts ...grpc.CallOption) (*Operation, error) { + out := new(Operation) + err := c.cc.Invoke(ctx, "/google.container.v1alpha1.ClusterManager/SetNetworkPolicy", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *clusterManagerClient) SetMaintenancePolicy(ctx context.Context, in *SetMaintenancePolicyRequest, opts ...grpc.CallOption) (*Operation, error) { + out := new(Operation) + err := c.cc.Invoke(ctx, "/google.container.v1alpha1.ClusterManager/SetMaintenancePolicy", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +// ClusterManagerServer is the server API for ClusterManager service. +type ClusterManagerServer interface { + // Lists all clusters owned by a project in either the specified zone or all + // zones. + ListClusters(context.Context, *ListClustersRequest) (*ListClustersResponse, error) + // Gets the details of a specific cluster. + GetCluster(context.Context, *GetClusterRequest) (*Cluster, error) + // Creates a cluster, consisting of the specified number and type of Google + // Compute Engine instances. + // + // By default, the cluster is created in the project's + // [default network](/compute/docs/networks-and-firewalls#networks). + // + // One firewall is added for the cluster. After cluster creation, + // the cluster creates routes for each node to allow the containers + // on that node to communicate with all other instances in the + // cluster. + // + // Finally, an entry is added to the project's global metadata indicating + // which CIDR range is being used by the cluster. + CreateCluster(context.Context, *CreateClusterRequest) (*Operation, error) + // Updates the settings of a specific cluster. + UpdateCluster(context.Context, *UpdateClusterRequest) (*Operation, error) + // Updates the version and/or iamge type of a specific node pool. + UpdateNodePool(context.Context, *UpdateNodePoolRequest) (*Operation, error) + // Sets the autoscaling settings of a specific node pool. + SetNodePoolAutoscaling(context.Context, *SetNodePoolAutoscalingRequest) (*Operation, error) + // Sets the logging service of a specific cluster. + SetLoggingService(context.Context, *SetLoggingServiceRequest) (*Operation, error) + // Sets the monitoring service of a specific cluster. + SetMonitoringService(context.Context, *SetMonitoringServiceRequest) (*Operation, error) + // Sets the addons of a specific cluster. + SetAddonsConfig(context.Context, *SetAddonsConfigRequest) (*Operation, error) + // Sets the locations of a specific cluster. + SetLocations(context.Context, *SetLocationsRequest) (*Operation, error) + // Updates the master of a specific cluster. + UpdateMaster(context.Context, *UpdateMasterRequest) (*Operation, error) + // Used to set master auth materials. Currently supports :- + // Changing the admin password of a specific cluster. + // This can be either via password generation or explicitly set. + // Modify basic_auth.csv and reset the K8S API server. + SetMasterAuth(context.Context, *SetMasterAuthRequest) (*Operation, error) + // Deletes the cluster, including the Kubernetes endpoint and all worker + // nodes. + // + // Firewalls and routes that were configured during cluster creation + // are also deleted. + // + // Other Google Compute Engine resources that might be in use by the cluster + // (e.g. load balancer resources) will not be deleted if they weren't present + // at the initial create time. + DeleteCluster(context.Context, *DeleteClusterRequest) (*Operation, error) + // Lists all operations in a project in a specific zone or all zones. + ListOperations(context.Context, *ListOperationsRequest) (*ListOperationsResponse, error) + // Gets the specified operation. + GetOperation(context.Context, *GetOperationRequest) (*Operation, error) + // Cancels the specified operation. + CancelOperation(context.Context, *CancelOperationRequest) (*empty.Empty, error) + // Returns configuration info about the Container Engine service. + GetServerConfig(context.Context, *GetServerConfigRequest) (*ServerConfig, error) + // Lists the node pools for a cluster. + ListNodePools(context.Context, *ListNodePoolsRequest) (*ListNodePoolsResponse, error) + // Retrieves the node pool requested. + GetNodePool(context.Context, *GetNodePoolRequest) (*NodePool, error) + // Creates a node pool for a cluster. + CreateNodePool(context.Context, *CreateNodePoolRequest) (*Operation, error) + // Deletes a node pool from a cluster. + DeleteNodePool(context.Context, *DeleteNodePoolRequest) (*Operation, error) + // Roll back the previously Aborted or Failed NodePool upgrade. + // This will be an no-op if the last upgrade successfully completed. + RollbackNodePoolUpgrade(context.Context, *RollbackNodePoolUpgradeRequest) (*Operation, error) + // Sets the NodeManagement options for a node pool. + SetNodePoolManagement(context.Context, *SetNodePoolManagementRequest) (*Operation, error) + // Sets labels on a cluster. + SetLabels(context.Context, *SetLabelsRequest) (*Operation, error) + // Enables or disables the ABAC authorization mechanism on a cluster. + SetLegacyAbac(context.Context, *SetLegacyAbacRequest) (*Operation, error) + // Start master IP rotation. + StartIPRotation(context.Context, *StartIPRotationRequest) (*Operation, error) + // Completes master IP rotation. + CompleteIPRotation(context.Context, *CompleteIPRotationRequest) (*Operation, error) + // Sets the size of a specific node pool. + SetNodePoolSize(context.Context, *SetNodePoolSizeRequest) (*Operation, error) + // Enables/Disables Network Policy for a cluster. + SetNetworkPolicy(context.Context, *SetNetworkPolicyRequest) (*Operation, error) + // Sets the maintenance policy for a cluster. + SetMaintenancePolicy(context.Context, *SetMaintenancePolicyRequest) (*Operation, error) +} + +func RegisterClusterManagerServer(s *grpc.Server, srv ClusterManagerServer) { + s.RegisterService(&_ClusterManager_serviceDesc, srv) +} + +func _ClusterManager_ListClusters_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(ListClustersRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(ClusterManagerServer).ListClusters(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.container.v1alpha1.ClusterManager/ListClusters", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(ClusterManagerServer).ListClusters(ctx, req.(*ListClustersRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _ClusterManager_GetCluster_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(GetClusterRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(ClusterManagerServer).GetCluster(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.container.v1alpha1.ClusterManager/GetCluster", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(ClusterManagerServer).GetCluster(ctx, req.(*GetClusterRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _ClusterManager_CreateCluster_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(CreateClusterRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(ClusterManagerServer).CreateCluster(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.container.v1alpha1.ClusterManager/CreateCluster", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(ClusterManagerServer).CreateCluster(ctx, req.(*CreateClusterRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _ClusterManager_UpdateCluster_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(UpdateClusterRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(ClusterManagerServer).UpdateCluster(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.container.v1alpha1.ClusterManager/UpdateCluster", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(ClusterManagerServer).UpdateCluster(ctx, req.(*UpdateClusterRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _ClusterManager_UpdateNodePool_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(UpdateNodePoolRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(ClusterManagerServer).UpdateNodePool(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.container.v1alpha1.ClusterManager/UpdateNodePool", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(ClusterManagerServer).UpdateNodePool(ctx, req.(*UpdateNodePoolRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _ClusterManager_SetNodePoolAutoscaling_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(SetNodePoolAutoscalingRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(ClusterManagerServer).SetNodePoolAutoscaling(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.container.v1alpha1.ClusterManager/SetNodePoolAutoscaling", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(ClusterManagerServer).SetNodePoolAutoscaling(ctx, req.(*SetNodePoolAutoscalingRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _ClusterManager_SetLoggingService_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(SetLoggingServiceRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(ClusterManagerServer).SetLoggingService(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.container.v1alpha1.ClusterManager/SetLoggingService", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(ClusterManagerServer).SetLoggingService(ctx, req.(*SetLoggingServiceRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _ClusterManager_SetMonitoringService_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(SetMonitoringServiceRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(ClusterManagerServer).SetMonitoringService(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.container.v1alpha1.ClusterManager/SetMonitoringService", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(ClusterManagerServer).SetMonitoringService(ctx, req.(*SetMonitoringServiceRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _ClusterManager_SetAddonsConfig_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(SetAddonsConfigRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(ClusterManagerServer).SetAddonsConfig(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.container.v1alpha1.ClusterManager/SetAddonsConfig", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(ClusterManagerServer).SetAddonsConfig(ctx, req.(*SetAddonsConfigRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _ClusterManager_SetLocations_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(SetLocationsRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(ClusterManagerServer).SetLocations(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.container.v1alpha1.ClusterManager/SetLocations", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(ClusterManagerServer).SetLocations(ctx, req.(*SetLocationsRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _ClusterManager_UpdateMaster_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(UpdateMasterRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(ClusterManagerServer).UpdateMaster(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.container.v1alpha1.ClusterManager/UpdateMaster", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(ClusterManagerServer).UpdateMaster(ctx, req.(*UpdateMasterRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _ClusterManager_SetMasterAuth_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(SetMasterAuthRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(ClusterManagerServer).SetMasterAuth(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.container.v1alpha1.ClusterManager/SetMasterAuth", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(ClusterManagerServer).SetMasterAuth(ctx, req.(*SetMasterAuthRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _ClusterManager_DeleteCluster_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(DeleteClusterRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(ClusterManagerServer).DeleteCluster(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.container.v1alpha1.ClusterManager/DeleteCluster", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(ClusterManagerServer).DeleteCluster(ctx, req.(*DeleteClusterRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _ClusterManager_ListOperations_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(ListOperationsRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(ClusterManagerServer).ListOperations(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.container.v1alpha1.ClusterManager/ListOperations", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(ClusterManagerServer).ListOperations(ctx, req.(*ListOperationsRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _ClusterManager_GetOperation_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(GetOperationRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(ClusterManagerServer).GetOperation(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.container.v1alpha1.ClusterManager/GetOperation", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(ClusterManagerServer).GetOperation(ctx, req.(*GetOperationRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _ClusterManager_CancelOperation_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(CancelOperationRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(ClusterManagerServer).CancelOperation(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.container.v1alpha1.ClusterManager/CancelOperation", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(ClusterManagerServer).CancelOperation(ctx, req.(*CancelOperationRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _ClusterManager_GetServerConfig_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(GetServerConfigRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(ClusterManagerServer).GetServerConfig(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.container.v1alpha1.ClusterManager/GetServerConfig", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(ClusterManagerServer).GetServerConfig(ctx, req.(*GetServerConfigRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _ClusterManager_ListNodePools_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(ListNodePoolsRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(ClusterManagerServer).ListNodePools(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.container.v1alpha1.ClusterManager/ListNodePools", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(ClusterManagerServer).ListNodePools(ctx, req.(*ListNodePoolsRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _ClusterManager_GetNodePool_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(GetNodePoolRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(ClusterManagerServer).GetNodePool(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.container.v1alpha1.ClusterManager/GetNodePool", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(ClusterManagerServer).GetNodePool(ctx, req.(*GetNodePoolRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _ClusterManager_CreateNodePool_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(CreateNodePoolRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(ClusterManagerServer).CreateNodePool(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.container.v1alpha1.ClusterManager/CreateNodePool", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(ClusterManagerServer).CreateNodePool(ctx, req.(*CreateNodePoolRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _ClusterManager_DeleteNodePool_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(DeleteNodePoolRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(ClusterManagerServer).DeleteNodePool(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.container.v1alpha1.ClusterManager/DeleteNodePool", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(ClusterManagerServer).DeleteNodePool(ctx, req.(*DeleteNodePoolRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _ClusterManager_RollbackNodePoolUpgrade_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(RollbackNodePoolUpgradeRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(ClusterManagerServer).RollbackNodePoolUpgrade(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.container.v1alpha1.ClusterManager/RollbackNodePoolUpgrade", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(ClusterManagerServer).RollbackNodePoolUpgrade(ctx, req.(*RollbackNodePoolUpgradeRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _ClusterManager_SetNodePoolManagement_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(SetNodePoolManagementRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(ClusterManagerServer).SetNodePoolManagement(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.container.v1alpha1.ClusterManager/SetNodePoolManagement", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(ClusterManagerServer).SetNodePoolManagement(ctx, req.(*SetNodePoolManagementRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _ClusterManager_SetLabels_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(SetLabelsRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(ClusterManagerServer).SetLabels(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.container.v1alpha1.ClusterManager/SetLabels", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(ClusterManagerServer).SetLabels(ctx, req.(*SetLabelsRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _ClusterManager_SetLegacyAbac_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(SetLegacyAbacRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(ClusterManagerServer).SetLegacyAbac(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.container.v1alpha1.ClusterManager/SetLegacyAbac", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(ClusterManagerServer).SetLegacyAbac(ctx, req.(*SetLegacyAbacRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _ClusterManager_StartIPRotation_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(StartIPRotationRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(ClusterManagerServer).StartIPRotation(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.container.v1alpha1.ClusterManager/StartIPRotation", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(ClusterManagerServer).StartIPRotation(ctx, req.(*StartIPRotationRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _ClusterManager_CompleteIPRotation_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(CompleteIPRotationRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(ClusterManagerServer).CompleteIPRotation(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.container.v1alpha1.ClusterManager/CompleteIPRotation", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(ClusterManagerServer).CompleteIPRotation(ctx, req.(*CompleteIPRotationRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _ClusterManager_SetNodePoolSize_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(SetNodePoolSizeRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(ClusterManagerServer).SetNodePoolSize(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.container.v1alpha1.ClusterManager/SetNodePoolSize", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(ClusterManagerServer).SetNodePoolSize(ctx, req.(*SetNodePoolSizeRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _ClusterManager_SetNetworkPolicy_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(SetNetworkPolicyRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(ClusterManagerServer).SetNetworkPolicy(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.container.v1alpha1.ClusterManager/SetNetworkPolicy", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(ClusterManagerServer).SetNetworkPolicy(ctx, req.(*SetNetworkPolicyRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _ClusterManager_SetMaintenancePolicy_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(SetMaintenancePolicyRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(ClusterManagerServer).SetMaintenancePolicy(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.container.v1alpha1.ClusterManager/SetMaintenancePolicy", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(ClusterManagerServer).SetMaintenancePolicy(ctx, req.(*SetMaintenancePolicyRequest)) + } + return interceptor(ctx, in, info, handler) +} + +var _ClusterManager_serviceDesc = grpc.ServiceDesc{ + ServiceName: "google.container.v1alpha1.ClusterManager", + HandlerType: (*ClusterManagerServer)(nil), + Methods: []grpc.MethodDesc{ + { + MethodName: "ListClusters", + Handler: _ClusterManager_ListClusters_Handler, + }, + { + MethodName: "GetCluster", + Handler: _ClusterManager_GetCluster_Handler, + }, + { + MethodName: "CreateCluster", + Handler: _ClusterManager_CreateCluster_Handler, + }, + { + MethodName: "UpdateCluster", + Handler: _ClusterManager_UpdateCluster_Handler, + }, + { + MethodName: "UpdateNodePool", + Handler: _ClusterManager_UpdateNodePool_Handler, + }, + { + MethodName: "SetNodePoolAutoscaling", + Handler: _ClusterManager_SetNodePoolAutoscaling_Handler, + }, + { + MethodName: "SetLoggingService", + Handler: _ClusterManager_SetLoggingService_Handler, + }, + { + MethodName: "SetMonitoringService", + Handler: _ClusterManager_SetMonitoringService_Handler, + }, + { + MethodName: "SetAddonsConfig", + Handler: _ClusterManager_SetAddonsConfig_Handler, + }, + { + MethodName: "SetLocations", + Handler: _ClusterManager_SetLocations_Handler, + }, + { + MethodName: "UpdateMaster", + Handler: _ClusterManager_UpdateMaster_Handler, + }, + { + MethodName: "SetMasterAuth", + Handler: _ClusterManager_SetMasterAuth_Handler, + }, + { + MethodName: "DeleteCluster", + Handler: _ClusterManager_DeleteCluster_Handler, + }, + { + MethodName: "ListOperations", + Handler: _ClusterManager_ListOperations_Handler, + }, + { + MethodName: "GetOperation", + Handler: _ClusterManager_GetOperation_Handler, + }, + { + MethodName: "CancelOperation", + Handler: _ClusterManager_CancelOperation_Handler, + }, + { + MethodName: "GetServerConfig", + Handler: _ClusterManager_GetServerConfig_Handler, + }, + { + MethodName: "ListNodePools", + Handler: _ClusterManager_ListNodePools_Handler, + }, + { + MethodName: "GetNodePool", + Handler: _ClusterManager_GetNodePool_Handler, + }, + { + MethodName: "CreateNodePool", + Handler: _ClusterManager_CreateNodePool_Handler, + }, + { + MethodName: "DeleteNodePool", + Handler: _ClusterManager_DeleteNodePool_Handler, + }, + { + MethodName: "RollbackNodePoolUpgrade", + Handler: _ClusterManager_RollbackNodePoolUpgrade_Handler, + }, + { + MethodName: "SetNodePoolManagement", + Handler: _ClusterManager_SetNodePoolManagement_Handler, + }, + { + MethodName: "SetLabels", + Handler: _ClusterManager_SetLabels_Handler, + }, + { + MethodName: "SetLegacyAbac", + Handler: _ClusterManager_SetLegacyAbac_Handler, + }, + { + MethodName: "StartIPRotation", + Handler: _ClusterManager_StartIPRotation_Handler, + }, + { + MethodName: "CompleteIPRotation", + Handler: _ClusterManager_CompleteIPRotation_Handler, + }, + { + MethodName: "SetNodePoolSize", + Handler: _ClusterManager_SetNodePoolSize_Handler, + }, + { + MethodName: "SetNetworkPolicy", + Handler: _ClusterManager_SetNetworkPolicy_Handler, + }, + { + MethodName: "SetMaintenancePolicy", + Handler: _ClusterManager_SetMaintenancePolicy_Handler, + }, + }, + Streams: []grpc.StreamDesc{}, + Metadata: "google/container/v1alpha1/cluster_service.proto", +} + +func init() { + proto.RegisterFile("google/container/v1alpha1/cluster_service.proto", fileDescriptor_cluster_service_e36c06b9432393ee) +} + +var fileDescriptor_cluster_service_e36c06b9432393ee = []byte{ + // 4786 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xcc, 0x7c, 0x5d, 0x6c, 0x23, 0xd7, + 0x75, 0x7f, 0x46, 0xa2, 0x28, 0xf1, 0x90, 0xa2, 0xa8, 0xab, 0x2f, 0x2e, 0xed, 0xb5, 0xd7, 0x13, + 0xfb, 0xef, 0xf5, 0x6e, 0x2c, 0x79, 0xd7, 0x1b, 0xdb, 0xf1, 0x37, 0x45, 0xcd, 0x6a, 0x99, 0x95, + 0x48, 0x66, 0x28, 0xed, 0xc6, 0x1f, 0xc0, 0xfc, 0x47, 0x9c, 0x2b, 0x6a, 0x22, 0x72, 0x66, 0x3c, + 0x33, 0x5c, 0x5b, 0xeb, 0x3a, 0x6d, 0x52, 0xf7, 0xad, 0x6f, 0x01, 0x0a, 0xb4, 0x28, 0x10, 0xc0, + 0xe8, 0x57, 0x92, 0x02, 0x2d, 0x5a, 0x14, 0x48, 0x8b, 0x36, 0x45, 0xdb, 0x97, 0xa2, 0x28, 0xda, + 0x22, 0x79, 0x2e, 0xd0, 0x02, 0x7d, 0xe8, 0x5b, 0x91, 0xc7, 0x3e, 0xb4, 0x28, 0xee, 0xc7, 0x0c, + 0xef, 0x90, 0xc3, 0x21, 0x25, 0x45, 0x6b, 0xbf, 0x69, 0xce, 0xbd, 0xe7, 0xde, 0xdf, 0x39, 0x73, + 0xee, 0x39, 0xe7, 0x9e, 0x33, 0x14, 0x6c, 0xb4, 0x6d, 0xbb, 0xdd, 0xc1, 0x1b, 0x2d, 0xdb, 0xf2, + 0x75, 0xd3, 0xc2, 0xee, 0xc6, 0x83, 0x1b, 0x7a, 0xc7, 0x39, 0xd2, 0x6f, 0x6c, 0xb4, 0x3a, 0x3d, + 0xcf, 0xc7, 0xae, 0xe6, 0x61, 0xf7, 0x81, 0xd9, 0xc2, 0xeb, 0x8e, 0x6b, 0xfb, 0x36, 0xba, 0xc4, + 0x18, 0xd6, 0x43, 0x86, 0xf5, 0x80, 0xa1, 0xf4, 0x38, 0x5f, 0x4b, 0x77, 0xcc, 0x0d, 0xdd, 0xb2, + 0x6c, 0x5f, 0xf7, 0x4d, 0xdb, 0xf2, 0x18, 0x63, 0xe9, 0x31, 0x3e, 0x4a, 0x9f, 0x0e, 0x7a, 0x87, + 0x1b, 0xb8, 0xeb, 0xf8, 0x27, 0x6c, 0x50, 0xfe, 0xcf, 0x19, 0x80, 0x9a, 0x6d, 0xe0, 0x8a, 0x6d, + 0x1d, 0x9a, 0x6d, 0xf4, 0x14, 0xe4, 0xba, 0x7a, 0xeb, 0xc8, 0xb4, 0xb0, 0xe6, 0x9f, 0x38, 0xb8, + 0x28, 0x5d, 0x91, 0xae, 0x66, 0xd4, 0x2c, 0xa7, 0xed, 0x9d, 0x38, 0x18, 0x5d, 0x81, 0x9c, 0x61, + 0x7a, 0xc7, 0x9a, 0x67, 0x3e, 0xc4, 0x5a, 0xfb, 0xa0, 0x38, 0x75, 0x45, 0xba, 0x3a, 0xa3, 0x02, + 0xa1, 0x35, 0xcd, 0x87, 0x78, 0xfb, 0x80, 0x2c, 0x62, 0xeb, 0x3d, 0xff, 0x48, 0xf3, 0x5a, 0xb6, + 0x83, 0xbd, 0xe2, 0xf4, 0x95, 0x69, 0xb2, 0x08, 0xa5, 0x35, 0x29, 0x09, 0x3d, 0x0b, 0x0b, 0x5c, + 0x3a, 0x4d, 0x6f, 0xb5, 0xec, 0x9e, 0xe5, 0x17, 0x33, 0x74, 0xab, 0x3c, 0x27, 0x97, 0x19, 0x15, + 0xd5, 0x61, 0xae, 0x8b, 0x7d, 0xdd, 0xd0, 0x7d, 0xbd, 0x98, 0xba, 0x32, 0x7d, 0x35, 0x7b, 0xf3, + 0xc5, 0xf5, 0x91, 0x8a, 0x58, 0xef, 0x4b, 0xb2, 0xbe, 0xcb, 0xb9, 0x14, 0xcb, 0x77, 0x4f, 0xd4, + 0x70, 0x11, 0x74, 0x19, 0xc0, 0xec, 0xea, 0x6d, 0x2e, 0xdf, 0x0c, 0xdd, 0x34, 0x43, 0x29, 0x54, + 0xba, 0x2a, 0xa4, 0x3b, 0xfa, 0x01, 0xee, 0x78, 0xc5, 0x34, 0xdd, 0xed, 0xc6, 0x64, 0xbb, 0xed, + 0x50, 0x1e, 0xb6, 0x17, 0x5f, 0x00, 0xfd, 0x3f, 0x58, 0xe8, 0xd8, 0x2d, 0xbd, 0xa3, 0x79, 0x9e, + 0xa1, 0x31, 0x19, 0x67, 0xa9, 0xae, 0xe6, 0x29, 0xb9, 0xe9, 0x19, 0x15, 0x2a, 0x22, 0x82, 0x94, + 0xaf, 0xb7, 0xbd, 0xe2, 0x1c, 0x55, 0x13, 0xfd, 0x1b, 0x5d, 0x81, 0xac, 0xe3, 0x62, 0xf2, 0xa2, + 0xcc, 0x83, 0x0e, 0x2e, 0xc2, 0x15, 0xe9, 0xea, 0x9c, 0x2a, 0x92, 0x50, 0x03, 0x72, 0x7a, 0xab, + 0x85, 0x3b, 0xd8, 0xd5, 0x7d, 0xdb, 0xf5, 0x8a, 0x59, 0x0a, 0xf7, 0x2b, 0x09, 0x70, 0xcb, 0xfd, + 0xe9, 0x0c, 0xb5, 0x1a, 0x59, 0x01, 0x5d, 0x85, 0x42, 0xd7, 0xb4, 0xb4, 0x96, 0xd3, 0xd3, 0x9c, + 0x8e, 0xee, 0x1f, 0xda, 0x6e, 0xb7, 0x38, 0xcf, 0x5e, 0x4a, 0xd7, 0xb4, 0x2a, 0x4e, 0xaf, 0xc1, + 0xa9, 0xe8, 0x75, 0x48, 0x93, 0xc5, 0x7d, 0xaf, 0xb8, 0x40, 0x77, 0x7d, 0x7a, 0x8c, 0x92, 0xf6, + 0xc8, 0x64, 0x95, 0xf3, 0x94, 0x5e, 0x83, 0xf9, 0xc8, 0xcb, 0x41, 0x05, 0x98, 0x3e, 0xc6, 0x27, + 0xdc, 0xd6, 0xc8, 0x9f, 0x68, 0x19, 0x66, 0x1e, 0xe8, 0x9d, 0x1e, 0xa6, 0xc6, 0x95, 0x51, 0xd9, + 0xc3, 0xab, 0x53, 0xaf, 0x48, 0xa5, 0xaf, 0x41, 0x56, 0xd0, 0xf5, 0x69, 0x58, 0xe5, 0x9f, 0x49, + 0x90, 0x09, 0xd1, 0x4c, 0xca, 0x89, 0x2a, 0x90, 0xc6, 0x87, 0x87, 0xb8, 0xe5, 0x17, 0xa7, 0xaf, + 0x48, 0x57, 0xf3, 0x37, 0xaf, 0x4f, 0x22, 0xeb, 0xba, 0x42, 0x59, 0x54, 0xce, 0x2a, 0xbf, 0x03, + 0x69, 0x46, 0x41, 0xab, 0x80, 0x94, 0xdb, 0xb7, 0x95, 0xca, 0x9e, 0xb6, 0x5f, 0x6b, 0x36, 0x94, + 0x4a, 0xf5, 0x76, 0x55, 0xd9, 0x2a, 0x7c, 0x09, 0x2d, 0x40, 0xb6, 0x56, 0xd7, 0x9a, 0x95, 0x3b, + 0xca, 0xd6, 0xfe, 0x8e, 0x52, 0x90, 0xc8, 0xc4, 0x86, 0xaa, 0xdc, 0x56, 0x54, 0x4d, 0xa4, 0x4f, + 0xa1, 0x3c, 0x40, 0xad, 0xae, 0x29, 0xdf, 0x54, 0x2a, 0xfb, 0x7b, 0x4a, 0x61, 0x5a, 0xfe, 0xf1, + 0x14, 0xc0, 0xae, 0x4e, 0xfc, 0x45, 0xb9, 0xe7, 0x1f, 0xa1, 0x12, 0xcc, 0xf5, 0x3c, 0xec, 0x5a, + 0x7a, 0x37, 0x38, 0xbc, 0xe1, 0x33, 0x19, 0x73, 0x74, 0xcf, 0xfb, 0xd0, 0x76, 0x0d, 0x2e, 0x63, + 0xf8, 0x8c, 0x2c, 0xb8, 0xd4, 0xea, 0x98, 0xd8, 0xf2, 0xb5, 0x16, 0x76, 0x7d, 0xf3, 0xd0, 0x6c, + 0xe9, 0x3e, 0xd6, 0x5a, 0xd4, 0x4e, 0xa8, 0xe4, 0xd9, 0x9b, 0x37, 0x13, 0x24, 0xaf, 0x50, 0xde, + 0x4a, 0x9f, 0x95, 0x5b, 0xd8, 0x5a, 0x2b, 0x7e, 0x00, 0xdd, 0x82, 0xd5, 0xc0, 0xcd, 0xb5, 0x74, + 0x71, 0xcf, 0xa2, 0x41, 0x91, 0x2d, 0xf3, 0xd1, 0x8a, 0x2e, 0xf0, 0xa2, 0xe7, 0x01, 0x0d, 0xa3, + 0x2c, 0x62, 0xca, 0xb1, 0x38, 0xb4, 0x15, 0x39, 0xeb, 0x7c, 0x3a, 0x79, 0xd5, 0x87, 0xec, 0xac, + 0x33, 0xca, 0x5d, 0x7c, 0x22, 0x37, 0x61, 0x6d, 0x04, 0x6e, 0xf4, 0x0a, 0x14, 0x4d, 0xcf, 0xeb, + 0x61, 0x2d, 0x66, 0x3b, 0x89, 0x1e, 0xc6, 0x55, 0x3a, 0x3e, 0xc4, 0x2f, 0xff, 0xde, 0x34, 0xe4, + 0xca, 0x86, 0x61, 0x5b, 0x1e, 0x5f, 0xea, 0x7d, 0x58, 0x3a, 0xf2, 0x7d, 0x47, 0xeb, 0xd8, 0xba, + 0xa1, 0x1d, 0xe8, 0x1d, 0xdd, 0x6a, 0x99, 0x56, 0x9b, 0xae, 0x92, 0x7c, 0x5e, 0xef, 0xf8, 0xbe, + 0xb3, 0x63, 0xeb, 0xc6, 0x66, 0xc0, 0xa3, 0x2e, 0x1e, 0x0d, 0x92, 0xd0, 0x07, 0x50, 0x3a, 0xb2, + 0x5d, 0xf3, 0x21, 0x61, 0xef, 0x68, 0x8e, 0x6d, 0x68, 0x7a, 0xcf, 0xb7, 0xbd, 0x96, 0xde, 0x21, + 0x9b, 0x4c, 0xd1, 0x4d, 0x92, 0x3c, 0xe6, 0x9d, 0x90, 0xb9, 0x61, 0x1b, 0xe5, 0x3e, 0xab, 0x5a, + 0x3c, 0x1a, 0x31, 0x82, 0x74, 0x58, 0x3e, 0xee, 0x1d, 0x60, 0xd7, 0xc2, 0x3e, 0xf6, 0x34, 0x43, + 0xf7, 0x8e, 0x0e, 0x6c, 0xdd, 0x35, 0xb8, 0x95, 0xac, 0x27, 0x6c, 0x76, 0x37, 0x64, 0xdb, 0x0a, + 0xb8, 0xd4, 0xa5, 0xe3, 0x61, 0x22, 0x3a, 0x80, 0x15, 0x0b, 0xfb, 0x1f, 0xda, 0xee, 0xb1, 0xe6, + 0xd8, 0x1d, 0xb3, 0x75, 0x12, 0x58, 0x62, 0x6a, 0xec, 0x1e, 0x35, 0xc6, 0xd7, 0xa0, 0x6c, 0xdc, + 0x0a, 0x97, 0xac, 0x61, 0xa2, 0xbc, 0x01, 0x8b, 0x43, 0x1a, 0x26, 0x47, 0xc4, 0x30, 0x3d, 0xfd, + 0xa0, 0x83, 0x0d, 0xfe, 0x9e, 0xc3, 0x67, 0xf9, 0x25, 0x28, 0x8e, 0xd2, 0x56, 0x22, 0xdf, 0x0d, + 0x58, 0x8a, 0x11, 0x7c, 0x1c, 0x4b, 0x8c, 0x1c, 0x89, 0x2c, 0xff, 0x25, 0xc1, 0x13, 0x7d, 0x3f, + 0x40, 0x70, 0x62, 0x83, 0xaf, 0x11, 0x58, 0x62, 0x11, 0x66, 0xb1, 0x25, 0x72, 0x07, 0x8f, 0xc8, + 0x80, 0x6c, 0xcb, 0x34, 0x5c, 0xed, 0xa0, 0x63, 0xb7, 0x8e, 0xbd, 0xe2, 0x14, 0xf5, 0xea, 0x95, + 0x04, 0x2d, 0x27, 0xef, 0xb4, 0x5e, 0x31, 0x0d, 0x77, 0x93, 0xac, 0xa5, 0x42, 0x2b, 0xf8, 0xd3, + 0x2b, 0xed, 0x42, 0x26, 0x1c, 0x20, 0x49, 0x82, 0x61, 0x7a, 0x4e, 0x47, 0x3f, 0xd1, 0x04, 0x67, + 0x95, 0xe5, 0xb4, 0x1a, 0xf1, 0x57, 0xe4, 0xf8, 0x86, 0xa8, 0xb8, 0xc7, 0xca, 0x84, 0xeb, 0xc9, + 0x3f, 0x94, 0x60, 0x3e, 0xa2, 0x25, 0xb4, 0x0b, 0x73, 0x8e, 0x6b, 0x3f, 0x30, 0x0d, 0xec, 0xd2, + 0xf5, 0xf2, 0xc9, 0xe1, 0x5b, 0xe4, 0x5d, 0x6f, 0x70, 0x46, 0x35, 0x5c, 0x42, 0xd4, 0xd7, 0x54, + 0x44, 0x5f, 0xf2, 0x0b, 0x30, 0xd7, 0xe8, 0xcf, 0x5a, 0x6e, 0xa8, 0xf5, 0x7b, 0xd5, 0x2d, 0x45, + 0x1d, 0xf0, 0xe9, 0x00, 0xe9, 0x4a, 0x79, 0xa7, 0x5a, 0xa9, 0x17, 0x24, 0xf9, 0xcf, 0x52, 0x80, + 0xaa, 0x8d, 0x72, 0x87, 0x84, 0x7e, 0x92, 0x9c, 0x71, 0xc4, 0x4f, 0x43, 0xbe, 0xe7, 0x61, 0xcd, + 0x74, 0x34, 0xbd, 0x63, 0xea, 0x1e, 0xf6, 0xf8, 0x9b, 0xc9, 0xf5, 0x3c, 0x5c, 0x75, 0xca, 0x8c, + 0x86, 0xae, 0xc3, 0x62, 0xcb, 0xc5, 0xc4, 0x21, 0x7b, 0xbd, 0x03, 0x6e, 0xcb, 0x1c, 0x52, 0x81, + 0x0d, 0x34, 0x43, 0x3a, 0x4d, 0xad, 0xc2, 0x27, 0xa6, 0xdb, 0x69, 0x9e, 0x5a, 0x85, 0x64, 0xaa, + 0xde, 0x6b, 0xb0, 0x18, 0xb8, 0x60, 0xd3, 0x79, 0x70, 0x4b, 0x23, 0x9a, 0xa5, 0x07, 0x2c, 0xa3, + 0x2e, 0xf0, 0x81, 0xaa, 0xf3, 0xe0, 0x16, 0x79, 0x65, 0x04, 0xa7, 0x65, 0x1b, 0x58, 0x98, 0xc8, + 0x32, 0xa7, 0x1c, 0xa1, 0x86, 0xb3, 0xbe, 0x02, 0x88, 0xa7, 0x6f, 0x9e, 0x30, 0x33, 0x4d, 0x67, + 0x16, 0x82, 0x91, 0x70, 0xf6, 0x5b, 0xf0, 0x78, 0x3f, 0xd3, 0x6d, 0xd9, 0x96, 0xa1, 0xbb, 0x27, + 0x9a, 0xab, 0x5b, 0x6d, 0xcc, 0x50, 0xcf, 0x52, 0xbe, 0x4b, 0x7c, 0x4e, 0x33, 0x98, 0xa2, 0x92, + 0x19, 0x54, 0x80, 0x32, 0x5c, 0x0e, 0xb7, 0x8b, 0x5d, 0x61, 0x8e, 0xae, 0x50, 0x0a, 0x26, 0xc5, + 0x2c, 0xf1, 0x55, 0x58, 0x1b, 0xd2, 0x01, 0xb7, 0xb7, 0x4c, 0x24, 0x0e, 0x05, 0xa8, 0x99, 0xf1, + 0x6e, 0xc0, 0x72, 0x54, 0x1d, 0x9c, 0x07, 0x58, 0x24, 0x12, 0x95, 0xc2, 0x18, 0x5e, 0x86, 0xe2, + 0xb0, 0x66, 0x38, 0x53, 0x96, 0x32, 0xad, 0x0c, 0xea, 0x87, 0x19, 0xf9, 0x8b, 0xb0, 0xd6, 0xb0, + 0x8d, 0x26, 0x6e, 0xf5, 0x5c, 0xd3, 0x3f, 0x89, 0x78, 0x83, 0x91, 0xc7, 0x59, 0xfe, 0xb5, 0x05, + 0x98, 0xad, 0x30, 0xdc, 0x24, 0xbb, 0x14, 0xce, 0x17, 0xfd, 0x9b, 0x64, 0x97, 0x06, 0xf6, 0x5a, + 0xae, 0xe9, 0x10, 0x53, 0xe4, 0x27, 0x4b, 0x24, 0x91, 0x37, 0x69, 0x5a, 0xa6, 0x6f, 0xea, 0x1d, + 0x8d, 0x0a, 0xca, 0xd2, 0xd7, 0x69, 0x9a, 0xbe, 0x16, 0xf8, 0x08, 0x4b, 0x7f, 0x49, 0x06, 0x7b, + 0x1b, 0xb2, 0x7c, 0x96, 0xe0, 0xa4, 0x9f, 0x99, 0x28, 0x73, 0x56, 0xc1, 0xea, 0xdf, 0x3e, 0x6e, + 0x43, 0xb6, 0x4b, 0x1d, 0x0b, 0x09, 0x62, 0x47, 0xd4, 0xc4, 0x92, 0xd7, 0xe9, 0xbb, 0x21, 0x15, + 0xba, 0xfd, 0x24, 0xe8, 0x59, 0x92, 0x79, 0xb7, 0xdb, 0xa6, 0xd5, 0x0e, 0xee, 0x50, 0xdc, 0x08, + 0xf3, 0x9c, 0xdc, 0x64, 0x54, 0x92, 0x4f, 0x74, 0x6d, 0xcb, 0xf4, 0x6d, 0x57, 0x9c, 0xcb, 0x0c, + 0x6f, 0xb1, 0x3f, 0x12, 0x4c, 0x2f, 0xc2, 0x6c, 0x70, 0xfa, 0x98, 0x69, 0x05, 0x8f, 0xf1, 0x67, + 0x29, 0x13, 0x7f, 0x96, 0x76, 0x60, 0x5e, 0xa7, 0x09, 0x42, 0xa0, 0x2f, 0xa0, 0x72, 0x3e, 0x9b, + 0x94, 0xba, 0x0b, 0x09, 0x85, 0x9a, 0xd3, 0xc5, 0xf4, 0xe2, 0x09, 0x00, 0xc1, 0x29, 0x30, 0x5b, + 0x12, 0x28, 0x68, 0x13, 0xa8, 0x86, 0x35, 0xc7, 0xb6, 0x3b, 0x5e, 0x31, 0x47, 0x3d, 0xfb, 0x97, + 0xc7, 0xbc, 0x9a, 0x86, 0x6d, 0x77, 0xd4, 0x8c, 0xc5, 0xff, 0xf2, 0xd0, 0xe3, 0x90, 0x09, 0xfc, + 0x96, 0x57, 0x9c, 0xa7, 0xd7, 0x94, 0x3e, 0x01, 0xbd, 0x04, 0x6b, 0xcc, 0xf0, 0x34, 0x21, 0x2d, + 0xa0, 0xab, 0x15, 0xf3, 0xd4, 0x2e, 0x57, 0xd8, 0x70, 0x3f, 0x08, 0x96, 0xc9, 0x20, 0xaa, 0x43, + 0x3e, 0x1a, 0xe4, 0x8b, 0x4b, 0x54, 0x11, 0x57, 0x27, 0xf5, 0xd9, 0xea, 0x7c, 0x24, 0xae, 0x23, + 0x0d, 0x96, 0xa9, 0x23, 0x0d, 0xa0, 0x05, 0xcb, 0x2e, 0xd3, 0x65, 0x9f, 0x4f, 0x58, 0x76, 0xd8, + 0x33, 0xab, 0xc8, 0x74, 0x86, 0xbc, 0xf5, 0xa7, 0x12, 0x3c, 0x25, 0x18, 0x28, 0x0b, 0x7d, 0x1a, + 0x07, 0x11, 0xbe, 0xce, 0x55, 0xba, 0xdd, 0xd7, 0xce, 0x1c, 0x3d, 0xd5, 0x27, 0xba, 0xc9, 0x71, + 0xfc, 0x3d, 0x40, 0x5d, 0x72, 0xcb, 0xc0, 0x96, 0x6e, 0xb5, 0x70, 0x20, 0xe5, 0xda, 0xd8, 0x84, + 0x72, 0xb7, 0xcf, 0xc4, 0x85, 0x5c, 0xec, 0x0e, 0x92, 0x90, 0x0d, 0x25, 0x92, 0x45, 0x7a, 0xdc, + 0xe3, 0x0c, 0xe4, 0x5f, 0x97, 0xc6, 0xde, 0x04, 0x46, 0x78, 0x2b, 0x75, 0xcd, 0x19, 0xe1, 0xc6, + 0x1e, 0x83, 0x8c, 0x87, 0x3b, 0x87, 0x5a, 0xc7, 0xb4, 0x8e, 0x79, 0xf2, 0x3f, 0x47, 0x08, 0x3b, + 0xa6, 0x75, 0x4c, 0xbc, 0xd7, 0x43, 0xdb, 0x0a, 0x52, 0x7c, 0xfa, 0x37, 0xc9, 0x82, 0xb0, 0x65, + 0x38, 0xb6, 0x69, 0xf9, 0x3c, 0xa7, 0x0f, 0x9f, 0x89, 0x2d, 0x06, 0x7e, 0x2b, 0x38, 0x8f, 0x0f, + 0xb0, 0xeb, 0x11, 0x2f, 0xd7, 0x66, 0x6e, 0x96, 0x0f, 0x73, 0xf7, 0x78, 0x8f, 0x0d, 0xd2, 0xeb, + 0x48, 0xcf, 0x75, 0x49, 0xaa, 0xcf, 0x5f, 0x70, 0xc0, 0x76, 0xc4, 0xc3, 0x00, 0x1b, 0x65, 0x6f, + 0x2e, 0xe0, 0x7a, 0x01, 0x02, 0x3a, 0xf3, 0x92, 0x01, 0x8f, 0x49, 0x79, 0x10, 0x1f, 0x23, 0x27, + 0x2a, 0xe0, 0x78, 0x12, 0xb2, 0x3c, 0x92, 0xfb, 0x66, 0x17, 0x17, 0xbf, 0xc5, 0x8e, 0x2b, 0x23, + 0xed, 0x99, 0x34, 0xa6, 0xa5, 0x3d, 0x5f, 0xf7, 0x7b, 0x5e, 0xf1, 0x98, 0x26, 0x30, 0xcf, 0x25, + 0x5e, 0xba, 0xa8, 0x0c, 0xeb, 0x4d, 0xca, 0xa0, 0x72, 0x46, 0xf4, 0x0c, 0xe4, 0xd9, 0x5f, 0x5a, + 0x17, 0x7b, 0x9e, 0xde, 0xc6, 0xc5, 0x0e, 0xdd, 0x66, 0x9e, 0x51, 0x77, 0x19, 0x11, 0x3d, 0x0f, + 0x4b, 0x03, 0x31, 0xcc, 0x33, 0x1f, 0xe2, 0x62, 0x97, 0xf9, 0x78, 0x31, 0x84, 0x35, 0xcd, 0x87, + 0x78, 0x44, 0x6c, 0xb7, 0x46, 0xc4, 0xf6, 0x75, 0x58, 0x32, 0x2d, 0xcf, 0xa7, 0xf6, 0xd9, 0x76, + 0xed, 0x9e, 0xa3, 0xf5, 0xdc, 0x8e, 0x57, 0xb4, 0xa9, 0xef, 0x58, 0x0c, 0x86, 0xb6, 0xc9, 0xc8, + 0xbe, 0xdb, 0xf1, 0xc8, 0xea, 0x11, 0x4d, 0xb2, 0x78, 0xe3, 0x30, 0x2c, 0x82, 0x1e, 0x59, 0xbc, + 0x79, 0x12, 0xb2, 0xf8, 0x23, 0xc7, 0x74, 0xb9, 0x16, 0x3f, 0x60, 0x5a, 0x64, 0x24, 0xaa, 0xc5, + 0x12, 0xcc, 0x05, 0x47, 0xb7, 0xe8, 0x32, 0x13, 0x09, 0x9e, 0x65, 0x13, 0xd2, 0x4c, 0x61, 0xe4, + 0x8a, 0xdd, 0xdc, 0x2b, 0xef, 0xed, 0x37, 0x07, 0xf2, 0xb6, 0x02, 0xe4, 0x68, 0x46, 0xd7, 0xac, + 0xd6, 0x6b, 0xd5, 0xda, 0x76, 0x41, 0x42, 0x59, 0x98, 0x55, 0xf7, 0x6b, 0xf4, 0x61, 0x8a, 0x5c, + 0xd5, 0x55, 0xa5, 0x52, 0xaf, 0x55, 0xaa, 0x3b, 0x84, 0x30, 0x8d, 0x72, 0x30, 0xd7, 0xdc, 0xab, + 0x37, 0x1a, 0xe4, 0x29, 0x85, 0x32, 0x30, 0xa3, 0xa8, 0x6a, 0x5d, 0x2d, 0xcc, 0xc8, 0xbf, 0x9f, + 0x86, 0x79, 0xfe, 0x92, 0xf6, 0x1d, 0x83, 0xdc, 0x48, 0x5f, 0x80, 0x65, 0x03, 0x7b, 0xa6, 0x4b, + 0xdc, 0x86, 0x68, 0x31, 0x2c, 0xed, 0x42, 0x7c, 0x4c, 0xb4, 0x98, 0xd7, 0xa1, 0x14, 0x70, 0xc4, + 0x84, 0x2a, 0x96, 0x85, 0x15, 0xf9, 0x8c, 0xdd, 0xa1, 0x88, 0xf5, 0x1e, 0xac, 0x04, 0xdc, 0xd1, + 0x98, 0x93, 0x3e, 0x5d, 0xcc, 0x59, 0xe2, 0xab, 0x44, 0x6e, 0xb6, 0x1b, 0x03, 0xc2, 0x90, 0x10, + 0xa3, 0x99, 0x46, 0x10, 0x3f, 0x05, 0x61, 0x48, 0x18, 0xa9, 0x1a, 0xe4, 0x2d, 0x07, 0x0c, 0x42, + 0x0d, 0x8e, 0x85, 0xd2, 0x02, 0x1f, 0xa9, 0x86, 0xa5, 0xb8, 0x0f, 0xe0, 0xf2, 0xf0, 0xf2, 0xe2, + 0xed, 0x36, 0x33, 0xfe, 0x32, 0xc8, 0xf7, 0x16, 0x2f, 0xb6, 0xa5, 0x01, 0x5c, 0xe2, 0x35, 0xee, + 0x3a, 0x04, 0xa8, 0xb5, 0x7e, 0xc0, 0x03, 0x6a, 0xb4, 0x01, 0xbe, 0x9d, 0x30, 0xee, 0x7d, 0x4f, + 0x82, 0xe7, 0xc2, 0x57, 0x33, 0x36, 0x2a, 0xe4, 0xce, 0x1b, 0x15, 0x9e, 0x09, 0x5e, 0x72, 0x72, + 0x70, 0xf8, 0x36, 0xc8, 0x01, 0xa8, 0x04, 0x3f, 0x9e, 0x3f, 0xb3, 0x1f, 0x7f, 0x82, 0xaf, 0x3e, + 0x2a, 0x2b, 0xbd, 0x05, 0xab, 0x03, 0x4a, 0x09, 0x6c, 0x9c, 0x17, 0x76, 0x22, 0x62, 0x70, 0x2b, + 0x97, 0x7f, 0x9e, 0x86, 0x4c, 0xdd, 0xc1, 0x2e, 0x55, 0x6d, 0x6c, 0xce, 0x1a, 0x44, 0x82, 0x29, + 0x21, 0x12, 0x34, 0x20, 0x6f, 0x07, 0x4c, 0xcc, 0x96, 0xa6, 0xc7, 0x3a, 0xcd, 0x70, 0x97, 0x75, + 0x62, 0x64, 0xea, 0x7c, 0xb8, 0x00, 0xb5, 0xb9, 0x4a, 0xe8, 0x7e, 0x53, 0x63, 0xab, 0x7d, 0xfd, + 0x95, 0x06, 0x1c, 0xf0, 0x2a, 0xa4, 0x0d, 0xec, 0xeb, 0x66, 0x87, 0x9b, 0x36, 0x7f, 0x8a, 0x71, + 0xcc, 0x33, 0x71, 0x8e, 0x39, 0x12, 0x10, 0xd3, 0x03, 0x01, 0xf1, 0x49, 0xc8, 0xfa, 0xba, 0xdb, + 0xc6, 0x3e, 0x1b, 0x66, 0x47, 0x0d, 0x18, 0x89, 0x4e, 0x10, 0x5d, 0x5f, 0x26, 0xea, 0xfa, 0xc8, + 0x85, 0xda, 0xf3, 0x75, 0xd7, 0x67, 0x6e, 0x93, 0x5d, 0x56, 0x32, 0x94, 0x42, 0xbd, 0xe6, 0x25, + 0x1a, 0x58, 0xd9, 0x20, 0x4b, 0x24, 0x67, 0xb1, 0x65, 0x90, 0x21, 0x59, 0x1d, 0xeb, 0x34, 0xb3, + 0x30, 0xdb, 0x50, 0x6a, 0x5b, 0x31, 0xfe, 0x72, 0x0e, 0x52, 0x5b, 0xf5, 0x9a, 0xc2, 0x1c, 0x65, + 0x79, 0xb3, 0xae, 0xee, 0x51, 0x47, 0x29, 0xff, 0xcf, 0x14, 0xa4, 0xa8, 0xd2, 0x97, 0xa1, 0xb0, + 0xf7, 0x4e, 0x43, 0x19, 0x58, 0x10, 0x41, 0xbe, 0xa2, 0x2a, 0xe5, 0x3d, 0x45, 0xab, 0xec, 0xec, + 0x37, 0xf7, 0x14, 0xb5, 0x20, 0x11, 0xda, 0x96, 0xb2, 0xa3, 0x08, 0xb4, 0x29, 0x42, 0xdb, 0x6f, + 0x6c, 0xab, 0xe5, 0x2d, 0x45, 0xdb, 0x2d, 0x53, 0xda, 0x34, 0x5a, 0x84, 0xf9, 0x80, 0x56, 0xab, + 0x6f, 0x29, 0xcd, 0x42, 0x8a, 0x4c, 0x53, 0x95, 0x46, 0xb9, 0xaa, 0x86, 0xac, 0x33, 0x8c, 0x75, + 0x4b, 0xdc, 0x22, 0x4d, 0xc0, 0xf0, 0x6d, 0x09, 0xa7, 0xd6, 0xa8, 0xd7, 0x77, 0x0a, 0xb3, 0x84, + 0xca, 0x37, 0xee, 0x53, 0xe7, 0xd0, 0xe3, 0x50, 0x6c, 0x2a, 0x7b, 0x7d, 0x92, 0xb6, 0x5b, 0xae, + 0x95, 0xb7, 0x95, 0x5d, 0xa5, 0xb6, 0x57, 0xc8, 0xa0, 0x15, 0x58, 0x2c, 0xef, 0xef, 0xd5, 0x35, + 0xbe, 0x2d, 0x03, 0x02, 0x44, 0x81, 0x94, 0x1c, 0x05, 0x98, 0x45, 0x79, 0x00, 0xb2, 0xd8, 0x4e, + 0x79, 0x53, 0xd9, 0x69, 0x16, 0x72, 0x68, 0x09, 0x16, 0xc8, 0x33, 0x93, 0x49, 0x2b, 0xef, 0xef, + 0xdd, 0x29, 0xcc, 0x53, 0xed, 0x47, 0x76, 0x6c, 0x56, 0xdf, 0x55, 0x0a, 0xf9, 0x90, 0xae, 0xec, + 0xdd, 0xaf, 0xab, 0x77, 0xb5, 0x46, 0x7d, 0xa7, 0x5a, 0x79, 0xa7, 0xb0, 0x80, 0x4a, 0xb0, 0xca, + 0x16, 0xa9, 0xd6, 0xf6, 0x94, 0x5a, 0xb9, 0x56, 0x51, 0x82, 0xb1, 0x82, 0xfc, 0x7d, 0x09, 0x96, + 0x2b, 0x34, 0xf3, 0xe0, 0x31, 0x4a, 0xc5, 0x1f, 0xf4, 0xb0, 0xe7, 0x13, 0x33, 0x71, 0x5c, 0xfb, + 0x5b, 0xb8, 0xe5, 0x13, 0x6f, 0xce, 0x0e, 0x61, 0x86, 0x53, 0xaa, 0x46, 0xec, 0x49, 0x7c, 0x1d, + 0x66, 0x79, 0xbe, 0xc5, 0xcb, 0x80, 0xf2, 0xf8, 0xbc, 0x45, 0x0d, 0x58, 0xc8, 0x81, 0x71, 0x74, + 0x12, 0xe2, 0xf9, 0x81, 0xe0, 0x4f, 0xf2, 0x09, 0x2c, 0x6e, 0x63, 0xff, 0xfc, 0xe8, 0x68, 0x1d, + 0x98, 0xdf, 0xce, 0x0c, 0x5e, 0x0d, 0xc9, 0x04, 0xd7, 0x32, 0x23, 0x74, 0x37, 0x33, 0x7d, 0x77, + 0x23, 0xff, 0x44, 0x82, 0x65, 0x16, 0xb3, 0x2f, 0x7c, 0xfb, 0xb7, 0x21, 0xdd, 0xa3, 0x3b, 0xf1, + 0x8b, 0xf3, 0xd5, 0xf1, 0xaa, 0x63, 0xc8, 0x54, 0xce, 0x17, 0x2b, 0xc0, 0xbf, 0x4b, 0xb0, 0xc2, + 0xa6, 0x85, 0x37, 0xba, 0x0b, 0x93, 0xe0, 0x0a, 0xe4, 0x22, 0x09, 0x00, 0xcb, 0x66, 0xc0, 0xea, + 0x47, 0xfe, 0xa7, 0xf8, 0x8c, 0x20, 0x16, 0x30, 0xa4, 0xb4, 0x6a, 0x10, 0x24, 0x3a, 0xd1, 0xc6, + 0x5c, 0x7a, 0xb0, 0x31, 0x17, 0xc8, 0x38, 0x27, 0xc8, 0xf8, 0xdf, 0x12, 0x5c, 0x6e, 0x62, 0x3f, + 0x2e, 0xca, 0x7f, 0x8e, 0xb2, 0x36, 0x20, 0x2b, 0x66, 0x29, 0x33, 0x67, 0xca, 0x52, 0xc4, 0x25, + 0x42, 0xd9, 0xd3, 0x82, 0xec, 0x3f, 0x90, 0xa0, 0xd8, 0xc4, 0xfe, 0x4e, 0xa4, 0xa0, 0x71, 0x71, + 0x62, 0xc7, 0x94, 0x54, 0x52, 0xb1, 0x25, 0x95, 0x38, 0x5b, 0xfc, 0x13, 0x09, 0x1e, 0x6b, 0x62, + 0x7f, 0x28, 0x3d, 0xbd, 0x38, 0xb8, 0xf1, 0x85, 0x9d, 0xd4, 0xa8, 0xc2, 0x4e, 0x9c, 0x82, 0xff, + 0x51, 0x82, 0xd5, 0x26, 0xf6, 0x23, 0x69, 0xf0, 0x85, 0xe1, 0x1d, 0xaa, 0x09, 0xa5, 0xce, 0x53, + 0x13, 0x8a, 0x13, 0xe7, 0x37, 0x25, 0x58, 0xa2, 0xf6, 0xc2, 0xd3, 0xd7, 0x8b, 0x93, 0x25, 0x52, + 0x2d, 0x4a, 0x0d, 0x56, 0x8b, 0xe2, 0xb0, 0x7d, 0x26, 0xc1, 0x12, 0xf3, 0x55, 0x2c, 0x2b, 0xbc, + 0x38, 0x6c, 0xcf, 0x40, 0x7e, 0x20, 0x2b, 0x65, 0x36, 0x31, 0xdf, 0x8d, 0x5c, 0xec, 0x03, 0x90, + 0xb3, 0x02, 0xc8, 0x7f, 0x9d, 0x82, 0x65, 0x62, 0xc4, 0xfd, 0x92, 0xe3, 0x85, 0xa1, 0xac, 0x41, + 0x5a, 0x6f, 0xf9, 0x01, 0xba, 0xfc, 0xcd, 0x97, 0x12, 0xcc, 0x20, 0x0e, 0xd2, 0x7a, 0x99, 0x72, + 0xab, 0x7c, 0x15, 0xf4, 0x46, 0x18, 0x61, 0x4e, 0x55, 0x52, 0x1d, 0x0c, 0x2f, 0xa2, 0x36, 0x1a, + 0x90, 0x66, 0x9b, 0x90, 0x5c, 0x6f, 0xbf, 0x76, 0xb7, 0x56, 0xbf, 0x5f, 0x63, 0x57, 0x67, 0x92, + 0x6f, 0x34, 0xca, 0xcd, 0xe6, 0xfd, 0xba, 0xba, 0x55, 0x90, 0x48, 0x16, 0xb4, 0xad, 0xd4, 0x14, + 0x95, 0x64, 0x54, 0x21, 0x79, 0x2a, 0x98, 0xb8, 0xdf, 0x54, 0xd4, 0x5a, 0x79, 0x57, 0x29, 0x4c, + 0xcb, 0xbf, 0x04, 0xcb, 0x5b, 0xb8, 0x83, 0x1f, 0x41, 0xc0, 0x0d, 0xe4, 0x49, 0x09, 0xf2, 0xfc, + 0x7f, 0x58, 0xda, 0x31, 0xbd, 0x20, 0xd7, 0x38, 0xcf, 0xe9, 0xe8, 0x27, 0x33, 0xa9, 0x48, 0x32, + 0xf3, 0x31, 0x2c, 0x47, 0x77, 0xf0, 0x1c, 0xdb, 0xf2, 0x30, 0x7a, 0x13, 0xe6, 0x38, 0x34, 0xaf, + 0x28, 0xd1, 0xf2, 0xec, 0x24, 0xb9, 0x53, 0xc8, 0x83, 0xbe, 0x0c, 0xf3, 0x5d, 0xd3, 0xf3, 0x88, + 0x9f, 0x23, 0xfb, 0xb3, 0xee, 0x5d, 0x46, 0xcd, 0x71, 0xe2, 0xbb, 0x84, 0x26, 0xff, 0x32, 0x2c, + 0x6d, 0x63, 0x3f, 0xbc, 0xb1, 0x9c, 0x43, 0xbc, 0xa7, 0x20, 0xd7, 0xbf, 0x73, 0x85, 0xda, 0xcd, + 0x86, 0xb4, 0x11, 0xf9, 0xd4, 0x01, 0xac, 0x10, 0xe9, 0x43, 0x04, 0x17, 0xa1, 0xe1, 0xef, 0x4a, + 0xb0, 0x5a, 0xd1, 0xad, 0x16, 0xee, 0x3c, 0x62, 0x41, 0x45, 0x43, 0xfa, 0x55, 0x09, 0x56, 0x07, + 0x25, 0xe5, 0x6f, 0x7a, 0x0b, 0x20, 0xe4, 0x0e, 0xde, 0xf5, 0xd3, 0x93, 0x5c, 0x30, 0x55, 0x81, + 0x6f, 0xb2, 0xf7, 0xad, 0xc1, 0xea, 0x36, 0xf6, 0x49, 0x78, 0xc3, 0xee, 0xb9, 0x63, 0x57, 0x9c, + 0x98, 0x9f, 0x4e, 0x41, 0x4e, 0x5c, 0x1e, 0xbd, 0x04, 0x6b, 0x06, 0x3e, 0xd4, 0x7b, 0x1d, 0x7f, + 0xa8, 0xf2, 0xca, 0x36, 0x59, 0xe1, 0xc3, 0x03, 0x95, 0xd7, 0x75, 0x58, 0x7a, 0xa0, 0x77, 0xcc, + 0x68, 0x3d, 0x2c, 0xf8, 0x66, 0x6c, 0x91, 0x0e, 0x09, 0xe5, 0x30, 0x8f, 0xd5, 0x90, 0xd8, 0x3e, + 0x42, 0xba, 0x98, 0x0a, 0x6a, 0x48, 0x74, 0xa4, 0x5f, 0x43, 0xba, 0x06, 0x6c, 0x09, 0x61, 0xae, + 0x57, 0x9c, 0xa1, 0x6b, 0x2f, 0xd0, 0x81, 0x70, 0xaa, 0x87, 0x6e, 0xc2, 0x0a, 0x9b, 0x1b, 0x8d, + 0x10, 0xec, 0x4b, 0xb0, 0x8c, 0xca, 0x60, 0x46, 0xca, 0x16, 0x9e, 0xfc, 0x77, 0x12, 0xac, 0xb0, + 0x3b, 0xd4, 0xc5, 0x67, 0xd9, 0x6f, 0x43, 0x26, 0xcc, 0x3c, 0x79, 0x7e, 0x30, 0x51, 0x23, 0x67, + 0x2e, 0xc8, 0x4d, 0x85, 0x83, 0x93, 0x8e, 0x1c, 0x9c, 0xef, 0x4b, 0xb0, 0xc2, 0x7c, 0xef, 0x17, + 0xe1, 0xae, 0x10, 0x97, 0x21, 0xfc, 0x8a, 0xc4, 0xbc, 0x67, 0x80, 0xef, 0x02, 0xd3, 0x97, 0x51, + 0x97, 0xd1, 0xdf, 0x96, 0x00, 0x6d, 0xf7, 0x2f, 0x1b, 0x5f, 0x34, 0x0d, 0xfd, 0x6f, 0x0a, 0xe6, + 0x02, 0x6c, 0xb1, 0x05, 0xb4, 0x37, 0x20, 0xcd, 0x73, 0xcb, 0xa9, 0xd3, 0xf4, 0x67, 0x39, 0xd3, + 0x29, 0x3b, 0xc2, 0x03, 0x77, 0xa0, 0xd4, 0xf9, 0xef, 0x40, 0x55, 0x80, 0xae, 0x6e, 0xe9, 0x6d, + 0xdc, 0x0d, 0x5e, 0x4d, 0x36, 0xb1, 0xce, 0x47, 0x16, 0xdc, 0x0d, 0x19, 0x54, 0x81, 0x39, 0xb9, + 0xe3, 0x54, 0x84, 0xd9, 0xc0, 0x6f, 0xb1, 0xa6, 0x53, 0xf0, 0x38, 0xaa, 0xa7, 0x71, 0x38, 0xaa, + 0xa7, 0xb1, 0x19, 0xd6, 0x12, 0xdb, 0x34, 0x8b, 0xbb, 0x36, 0x81, 0xf8, 0xe3, 0x7b, 0x39, 0x47, + 0x31, 0x25, 0x43, 0xf9, 0x3b, 0xd2, 0x79, 0x9b, 0x1a, 0xab, 0x80, 0xf8, 0x83, 0x76, 0xbf, 0xba, + 0x77, 0x47, 0x63, 0x2d, 0x8c, 0xe9, 0xc1, 0x66, 0x47, 0x2a, 0xd2, 0xec, 0x98, 0xe9, 0x37, 0x3b, + 0xd2, 0xf2, 0x1f, 0x4a, 0x90, 0x8f, 0x2a, 0x9d, 0x84, 0x50, 0xf2, 0x0a, 0xb5, 0x9e, 0xd3, 0x76, + 0x75, 0x23, 0xf8, 0x72, 0x8e, 0xbe, 0xd6, 0x7d, 0x46, 0x42, 0x4f, 0x32, 0x43, 0xd1, 0x5c, 0xec, + 0xe8, 0xa6, 0xcb, 0x3f, 0x6a, 0x01, 0x42, 0x52, 0x29, 0x05, 0xdd, 0x83, 0x05, 0xce, 0xae, 0xd9, + 0x4e, 0x50, 0x90, 0x1f, 0xd7, 0xcf, 0x2d, 0xf7, 0x77, 0xa8, 0x33, 0x26, 0x35, 0xdf, 0x8b, 0x3c, + 0xcb, 0x5d, 0x40, 0xc3, 0xb3, 0xd0, 0x57, 0x61, 0x4d, 0x44, 0xac, 0x09, 0xe5, 0x52, 0x76, 0x96, + 0x96, 0x05, 0xf0, 0xcd, 0xb0, 0x72, 0x3a, 0xf6, 0x83, 0x0a, 0xf9, 0x1d, 0x58, 0x1c, 0x6a, 0xbf, + 0xa2, 0x2d, 0x48, 0x7f, 0x68, 0x5a, 0x86, 0xfd, 0xe1, 0x04, 0x5f, 0x03, 0x0a, 0xdc, 0xf7, 0x29, + 0x8f, 0xca, 0x79, 0xe5, 0x5f, 0x97, 0x22, 0x6b, 0xb3, 0x51, 0xd4, 0x85, 0xa2, 0xa1, 0x9b, 0x9d, + 0x13, 0x4d, 0x6c, 0x15, 0xf3, 0xdd, 0x98, 0x03, 0x48, 0xfa, 0x36, 0x6a, 0x8b, 0xb0, 0x0e, 0x2d, + 0x7a, 0xe7, 0x4b, 0xea, 0xaa, 0x11, 0x3b, 0xb2, 0x39, 0x07, 0x69, 0xd6, 0x61, 0x90, 0x9b, 0xb0, + 0x1a, 0xcf, 0x3d, 0x50, 0x7e, 0x9e, 0x1a, 0x2c, 0x3f, 0x97, 0x60, 0xce, 0xe8, 0xb1, 0x2c, 0x87, + 0x3b, 0xc5, 0xf0, 0x59, 0xfe, 0xb9, 0x04, 0x8f, 0x0b, 0x95, 0x1e, 0xe1, 0x60, 0x7f, 0x8e, 0x6e, + 0xf8, 0x17, 0xe8, 0x92, 0xe2, 0xae, 0x58, 0x7f, 0xcd, 0x0a, 0x10, 0x81, 0xcc, 0x4d, 0xf3, 0x21, + 0xfe, 0x3c, 0xa5, 0xbd, 0xcc, 0x3f, 0x24, 0x61, 0x8e, 0x7f, 0x86, 0x3a, 0xfe, 0x8c, 0x15, 0x7a, + 0xfc, 0x38, 0x09, 0xfe, 0x40, 0x82, 0x27, 0x54, 0xbb, 0xd3, 0x39, 0xd0, 0x5b, 0xc7, 0x81, 0x18, + 0xfc, 0xec, 0x7c, 0xd1, 0xc2, 0xe7, 0x7b, 0xec, 0x7e, 0x22, 0xe4, 0x17, 0x3c, 0x69, 0x8f, 0x7e, + 0x3f, 0x23, 0x9d, 0xe5, 0xfb, 0x19, 0xf9, 0x63, 0x58, 0x8a, 0xeb, 0x36, 0x8e, 0xfe, 0x1e, 0xf3, + 0x69, 0xc8, 0x77, 0x4d, 0x4b, 0x0c, 0xb4, 0xec, 0x57, 0x16, 0xb9, 0xae, 0x69, 0xf5, 0x83, 0x2c, + 0x99, 0xa5, 0x7f, 0x34, 0x1c, 0x8e, 0x73, 0x5d, 0xfd, 0xa3, 0x70, 0x96, 0xfc, 0xd3, 0x29, 0x28, + 0x34, 0xb1, 0xcf, 0xbe, 0x9a, 0xbf, 0x38, 0xb5, 0x1f, 0xc1, 0x82, 0x8b, 0x3d, 0xbb, 0xe7, 0xb6, + 0xb0, 0xc6, 0x7f, 0x41, 0xc1, 0x7e, 0xaf, 0xf1, 0x56, 0x72, 0xf1, 0x22, 0x82, 0x6b, 0x5d, 0xe5, + 0x4b, 0x88, 0xbf, 0xa7, 0xc8, 0xbb, 0x11, 0x22, 0xba, 0x0e, 0x8b, 0x74, 0x03, 0xed, 0xd0, 0xb4, + 0xda, 0xd8, 0x75, 0x5c, 0x33, 0xcc, 0xd5, 0x0a, 0x74, 0xe0, 0x76, 0x9f, 0x1e, 0x67, 0x96, 0xa5, + 0x32, 0x2c, 0xc5, 0xec, 0x73, 0xaa, 0xdf, 0x12, 0xfc, 0x86, 0x44, 0x8b, 0x41, 0x3b, 0xb8, 0xad, + 0xb7, 0x4e, 0xca, 0x07, 0x7a, 0xeb, 0xe2, 0x14, 0x2b, 0x58, 0x49, 0x2a, 0x6a, 0x25, 0x71, 0x76, + 0xfc, 0x6d, 0x58, 0xa5, 0x61, 0xa9, 0xda, 0x50, 0xf9, 0xcf, 0x80, 0x2e, 0xbe, 0x8e, 0x22, 0xee, + 0xff, 0x1d, 0x09, 0x2e, 0x55, 0xec, 0xae, 0x43, 0x2e, 0x13, 0x8f, 0x12, 0x83, 0xe8, 0x76, 0x8e, + 0x61, 0x71, 0xe8, 0xb7, 0x2e, 0xc4, 0x6a, 0x84, 0x5f, 0xbb, 0xf0, 0xf3, 0x42, 0x10, 0x4c, 0xab, + 0x05, 0x5d, 0x9c, 0x4d, 0x4e, 0xd6, 0x73, 0x20, 0xd2, 0xd8, 0x15, 0x93, 0x81, 0x5a, 0x10, 0xe8, + 0xe4, 0xda, 0x28, 0xff, 0x8b, 0x04, 0x6b, 0xc4, 0x4b, 0x47, 0x3e, 0x4c, 0xbb, 0x30, 0x71, 0x87, + 0xbf, 0x99, 0x4b, 0x9d, 0xef, 0x9b, 0xb9, 0xb8, 0x77, 0xf8, 0x6f, 0xbc, 0x5c, 0x3f, 0xf4, 0xb9, + 0xd8, 0x85, 0x89, 0x15, 0xff, 0x45, 0x5b, 0xea, 0x17, 0xf3, 0x45, 0x5b, 0x4c, 0x39, 0xea, 0xe6, + 0xa7, 0xd7, 0x21, 0xcf, 0x0b, 0x11, 0x2c, 0x22, 0xbb, 0xe8, 0x47, 0x12, 0xe4, 0xc4, 0x02, 0x1d, + 0x4a, 0xba, 0xae, 0xc4, 0xd4, 0x0a, 0x4b, 0x1b, 0x13, 0xcf, 0x67, 0xa1, 0x45, 0x7e, 0xf5, 0xbb, + 0x3f, 0xfb, 0x8f, 0xef, 0x4d, 0xdd, 0x42, 0x37, 0xfb, 0x3f, 0xfd, 0xfb, 0x98, 0x5d, 0x36, 0xdf, + 0xe0, 0xda, 0xf4, 0x36, 0xae, 0x6d, 0x84, 0xa5, 0xf3, 0x8d, 0x6b, 0x9f, 0x6c, 0x84, 0x55, 0xbf, + 0xdf, 0x92, 0x00, 0xfa, 0xbd, 0x51, 0x94, 0xa4, 0xa4, 0xa1, 0x16, 0x6a, 0x69, 0x82, 0x02, 0x63, + 0x2c, 0x38, 0xa2, 0xba, 0x11, 0xd0, 0x42, 0x64, 0x1b, 0xd7, 0x3e, 0x41, 0xbf, 0x2b, 0xc1, 0x7c, + 0xa4, 0xb3, 0x8c, 0x92, 0x74, 0x13, 0xd7, 0x83, 0x2e, 0x4d, 0x54, 0x17, 0x93, 0xdf, 0xa0, 0x20, + 0x5f, 0x96, 0xcf, 0xa0, 0xc1, 0x57, 0xa5, 0x6b, 0x14, 0x67, 0xa4, 0xc9, 0x9b, 0x88, 0x33, 0xae, + 0x1d, 0x7c, 0x3a, 0x9c, 0xa5, 0x33, 0x28, 0x93, 0xe0, 0xfc, 0x53, 0x09, 0xf2, 0xd1, 0x5e, 0x2e, + 0x7a, 0x61, 0x2c, 0xd0, 0x81, 0x42, 0xc5, 0x84, 0x48, 0xab, 0x14, 0x69, 0xa5, 0xf4, 0xe6, 0xa9, + 0x91, 0x6e, 0x84, 0xf9, 0x0e, 0x47, 0xfd, 0xd3, 0x68, 0xfe, 0x2a, 0x66, 0x3e, 0xaf, 0x24, 0xe7, + 0x04, 0xa3, 0x1b, 0xba, 0x13, 0x4a, 0xf1, 0x4d, 0x2a, 0x85, 0x2a, 0xef, 0x9e, 0x53, 0x0a, 0x0f, + 0xfb, 0x02, 0x06, 0x22, 0xd4, 0x8f, 0x25, 0x58, 0x1c, 0x6a, 0xbb, 0xa2, 0x17, 0xc7, 0xe4, 0x38, + 0x71, 0x4d, 0xda, 0x09, 0x45, 0xb9, 0x43, 0x45, 0xd9, 0x94, 0xdf, 0x38, 0x83, 0xe9, 0x78, 0xe1, + 0xd6, 0x04, 0xfa, 0xdf, 0xb0, 0x9c, 0x65, 0xf8, 0x23, 0xc1, 0x71, 0xed, 0xa5, 0x11, 0x6d, 0xdb, + 0x09, 0x05, 0xb8, 0x4b, 0x05, 0x50, 0xe4, 0xb7, 0xcf, 0x26, 0x40, 0x7f, 0x77, 0x7e, 0x12, 0x16, + 0x06, 0x9a, 0xb2, 0xe8, 0x46, 0x32, 0xfc, 0x98, 0x06, 0xee, 0x84, 0xc8, 0xb7, 0x29, 0xf2, 0xb2, + 0xfc, 0xfa, 0xd9, 0x90, 0xb3, 0x8d, 0x09, 0xea, 0x3f, 0x96, 0x20, 0x27, 0xf6, 0x5e, 0x13, 0x43, + 0x4b, 0x4c, 0x93, 0x76, 0x42, 0xbc, 0x5f, 0xa7, 0x78, 0xb7, 0xe4, 0xb7, 0xce, 0x6a, 0x2a, 0x7c, + 0x28, 0x80, 0x2c, 0xb6, 0x64, 0x13, 0x21, 0xc7, 0xf4, 0x6e, 0x1f, 0x01, 0xe4, 0x9e, 0xb0, 0x2b, + 0xb7, 0x8d, 0xf9, 0x48, 0x37, 0x34, 0xd1, 0x9b, 0xc7, 0xf5, 0x4d, 0x1f, 0x91, 0x45, 0x87, 0xdb, + 0x12, 0xd4, 0x9f, 0x49, 0x30, 0x1f, 0xe9, 0x7b, 0x26, 0xa2, 0x8e, 0xeb, 0x90, 0x4e, 0x88, 0x9a, + 0x07, 0xf4, 0x6b, 0x67, 0x09, 0xe8, 0x24, 0x00, 0x45, 0x9b, 0x5a, 0x89, 0x01, 0x28, 0xb6, 0xd3, + 0x57, 0xba, 0x71, 0x0a, 0x0e, 0x9e, 0x21, 0xbd, 0x4e, 0x31, 0xbf, 0x84, 0x6e, 0x4d, 0x1e, 0xdf, + 0x85, 0x4e, 0xd9, 0x67, 0x12, 0xe4, 0xc4, 0xae, 0x67, 0xa2, 0x0d, 0xc7, 0xb4, 0x47, 0x27, 0x54, + 0x6c, 0x1c, 0xc8, 0x24, 0xc5, 0xf6, 0x11, 0x12, 0xd5, 0xfe, 0x8e, 0x04, 0x0b, 0x03, 0x4d, 0xcb, + 0x44, 0x8f, 0x16, 0xdf, 0xe0, 0x2c, 0xad, 0x06, 0x2c, 0xc1, 0x7f, 0x79, 0x58, 0x57, 0xba, 0x8e, + 0x7f, 0x22, 0xdf, 0xa6, 0xe0, 0xde, 0x96, 0x5f, 0x3b, 0x0b, 0xb8, 0x57, 0x5b, 0x74, 0x33, 0x62, + 0xa6, 0x3f, 0x92, 0x60, 0x61, 0xa0, 0xa3, 0x98, 0x08, 0x33, 0xbe, 0xfb, 0x58, 0x7a, 0x36, 0xf1, + 0x44, 0xf6, 0xe7, 0x9f, 0x56, 0xa9, 0x9f, 0x6c, 0x78, 0x22, 0xb2, 0xbf, 0x90, 0x60, 0x3e, 0x52, + 0xce, 0x41, 0xe3, 0x92, 0xf3, 0xc1, 0xc6, 0x52, 0xe9, 0x85, 0xc9, 0x19, 0xb8, 0xb1, 0x72, 0x55, + 0xa3, 0x37, 0x27, 0x35, 0x56, 0xf1, 0x88, 0xf5, 0xf3, 0x0e, 0xf4, 0x03, 0x09, 0xb2, 0x42, 0xa3, + 0x09, 0x3d, 0x9f, 0xac, 0xe6, 0xc1, 0x3c, 0x6f, 0x92, 0xe2, 0x54, 0x2c, 0xd6, 0x33, 0x24, 0x48, + 0xd4, 0x31, 0x44, 0xfb, 0x9f, 0x89, 0x8e, 0x21, 0xb6, 0x55, 0x7a, 0xba, 0xcc, 0x54, 0x3e, 0xa7, + 0x7a, 0x79, 0x70, 0xcb, 0x47, 0xfb, 0x9d, 0x89, 0xa8, 0x63, 0x5b, 0xa3, 0x13, 0xa2, 0xe6, 0x8a, + 0xbe, 0x76, 0x5e, 0x45, 0xff, 0x93, 0x04, 0x6b, 0x23, 0x4a, 0xa9, 0x28, 0xe9, 0x47, 0x05, 0xc9, + 0xe5, 0xd7, 0x09, 0x85, 0x50, 0xa9, 0x10, 0x3b, 0xf2, 0xf6, 0x39, 0xd3, 0x69, 0x97, 0x83, 0x21, + 0xef, 0xe0, 0x9f, 0x25, 0x58, 0x89, 0xad, 0xe8, 0xa3, 0x97, 0x27, 0xbb, 0x1c, 0x0c, 0xf5, 0x00, + 0x26, 0x14, 0xe6, 0x3e, 0x15, 0xe6, 0x1b, 0xf2, 0xce, 0xf9, 0xef, 0x06, 0x7d, 0x08, 0x44, 0xa2, + 0x3f, 0x92, 0x20, 0x13, 0x16, 0x34, 0xd1, 0xf5, 0x53, 0x94, 0x3d, 0x27, 0x44, 0x5e, 0xa7, 0xc8, + 0xab, 0xf2, 0xd6, 0xd9, 0xf2, 0x8e, 0x68, 0xcd, 0x53, 0xc8, 0x98, 0xfa, 0x55, 0xcc, 0x71, 0x19, + 0xd3, 0x50, 0xbd, 0xf3, 0xd1, 0x64, 0x4c, 0xfd, 0x6d, 0x09, 0xea, 0x3f, 0x27, 0x77, 0x80, 0x68, + 0x8d, 0x33, 0xf9, 0x0e, 0x10, 0x5b, 0x0f, 0x9d, 0x10, 0xf9, 0x2e, 0x45, 0xbe, 0x2d, 0x6f, 0x9e, + 0x05, 0x39, 0xdd, 0xd8, 0x09, 0x36, 0x26, 0xd8, 0xff, 0x56, 0x02, 0x34, 0x5c, 0x1e, 0x45, 0xb7, + 0x92, 0x7c, 0xe6, 0xa8, 0x6a, 0xea, 0x84, 0x12, 0x34, 0xa8, 0x04, 0x5f, 0x97, 0x95, 0x33, 0x48, + 0xd0, 0x0a, 0xf6, 0x8e, 0x08, 0xf1, 0x57, 0xec, 0x12, 0x26, 0x36, 0xa6, 0xc6, 0x5d, 0xc2, 0x62, + 0x9a, 0x58, 0x13, 0xc2, 0xff, 0x06, 0x85, 0x7f, 0x57, 0xbe, 0x7d, 0xfe, 0xe3, 0x4a, 0x36, 0x27, + 0xf8, 0xff, 0x52, 0xa2, 0x1d, 0x91, 0xe8, 0xff, 0x8e, 0xb8, 0x39, 0x46, 0x80, 0x98, 0xfa, 0xee, + 0x84, 0x12, 0xd4, 0xa8, 0x04, 0x77, 0xe4, 0xca, 0xd9, 0x8c, 0x3f, 0xb2, 0x33, 0x81, 0xff, 0xf7, + 0xfc, 0x1e, 0x3f, 0x54, 0xe8, 0x1c, 0xfb, 0x99, 0x68, 0x7c, 0x3d, 0xf7, 0xc2, 0x83, 0x00, 0xf7, + 0x95, 0x03, 0xbb, 0xbf, 0x2a, 0x5d, 0xdb, 0xfc, 0x89, 0x04, 0x97, 0x5b, 0x76, 0x77, 0xf4, 0xfe, + 0x9b, 0x4b, 0x95, 0xe0, 0xdf, 0x3f, 0xd0, 0xd2, 0x43, 0x83, 0x64, 0xb7, 0x0d, 0xe9, 0xdd, 0x4d, + 0xce, 0xd1, 0xb6, 0x3b, 0xba, 0xd5, 0x5e, 0xb7, 0xdd, 0xf6, 0x46, 0x1b, 0x5b, 0x34, 0xf7, 0xe5, + 0xff, 0x58, 0x4d, 0x77, 0x4c, 0x2f, 0xe6, 0x9f, 0xab, 0xbd, 0x16, 0x92, 0x7e, 0x38, 0xf5, 0xe4, + 0x36, 0x5b, 0xa4, 0xd2, 0xb1, 0x7b, 0xc6, 0x7a, 0x25, 0xdc, 0xfc, 0xde, 0x0d, 0xfa, 0x03, 0xf5, + 0x1b, 0xff, 0x10, 0xcc, 0x78, 0x9f, 0xce, 0x78, 0x3f, 0x9c, 0xf1, 0xfe, 0x3d, 0xbe, 0xda, 0x41, + 0x9a, 0x6e, 0xf9, 0xe2, 0xff, 0x05, 0x00, 0x00, 0xff, 0xff, 0xec, 0x17, 0x6d, 0x5d, 0xce, 0x4d, + 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/container/v1beta1/cluster_service.pb.go b/vendor/google.golang.org/genproto/googleapis/container/v1beta1/cluster_service.pb.go new file mode 100644 index 0000000..a6f0e87 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/container/v1beta1/cluster_service.pb.go @@ -0,0 +1,9135 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/container/v1beta1/cluster_service.proto + +package container // import "google.golang.org/genproto/googleapis/container/v1beta1" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import empty "github.com/golang/protobuf/ptypes/empty" +import _ "google.golang.org/genproto/googleapis/api/annotations" +import _ "google.golang.org/genproto/googleapis/iam/v1" + +import ( + context "golang.org/x/net/context" + grpc "google.golang.org/grpc" +) + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Possible values for Effect in taint. +type NodeTaint_Effect int32 + +const ( + // Not set + NodeTaint_EFFECT_UNSPECIFIED NodeTaint_Effect = 0 + // NoSchedule + NodeTaint_NO_SCHEDULE NodeTaint_Effect = 1 + // PreferNoSchedule + NodeTaint_PREFER_NO_SCHEDULE NodeTaint_Effect = 2 + // NoExecute + NodeTaint_NO_EXECUTE NodeTaint_Effect = 3 +) + +var NodeTaint_Effect_name = map[int32]string{ + 0: "EFFECT_UNSPECIFIED", + 1: "NO_SCHEDULE", + 2: "PREFER_NO_SCHEDULE", + 3: "NO_EXECUTE", +} +var NodeTaint_Effect_value = map[string]int32{ + "EFFECT_UNSPECIFIED": 0, + "NO_SCHEDULE": 1, + "PREFER_NO_SCHEDULE": 2, + "NO_EXECUTE": 3, +} + +func (x NodeTaint_Effect) String() string { + return proto.EnumName(NodeTaint_Effect_name, int32(x)) +} +func (NodeTaint_Effect) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_cluster_service_b093026a38af0865, []int{1, 0} +} + +// Istio auth mode, https://istio.io/docs/concepts/security/mutual-tls.html +type IstioConfig_IstioAuthMode int32 + +const ( + // auth not enabled + IstioConfig_AUTH_NONE IstioConfig_IstioAuthMode = 0 + // auth mutual TLS enabled + IstioConfig_AUTH_MUTUAL_TLS IstioConfig_IstioAuthMode = 1 +) + +var IstioConfig_IstioAuthMode_name = map[int32]string{ + 0: "AUTH_NONE", + 1: "AUTH_MUTUAL_TLS", +} +var IstioConfig_IstioAuthMode_value = map[string]int32{ + "AUTH_NONE": 0, + "AUTH_MUTUAL_TLS": 1, +} + +func (x IstioConfig_IstioAuthMode) String() string { + return proto.EnumName(IstioConfig_IstioAuthMode_name, int32(x)) +} +func (IstioConfig_IstioAuthMode) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_cluster_service_b093026a38af0865, []int{10, 0} +} + +// Allowed Network Policy providers. +type NetworkPolicy_Provider int32 + +const ( + // Not set + NetworkPolicy_PROVIDER_UNSPECIFIED NetworkPolicy_Provider = 0 + // Tigera (Calico Felix). + NetworkPolicy_CALICO NetworkPolicy_Provider = 1 +) + +var NetworkPolicy_Provider_name = map[int32]string{ + 0: "PROVIDER_UNSPECIFIED", + 1: "CALICO", +} +var NetworkPolicy_Provider_value = map[string]int32{ + "PROVIDER_UNSPECIFIED": 0, + "CALICO": 1, +} + +func (x NetworkPolicy_Provider) String() string { + return proto.EnumName(NetworkPolicy_Provider_name, int32(x)) +} +func (NetworkPolicy_Provider) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_cluster_service_b093026a38af0865, []int{14, 0} +} + +// The current status of the cluster. +type Cluster_Status int32 + +const ( + // Not set. + Cluster_STATUS_UNSPECIFIED Cluster_Status = 0 + // The PROVISIONING state indicates the cluster is being created. + Cluster_PROVISIONING Cluster_Status = 1 + // The RUNNING state indicates the cluster has been created and is fully + // usable. + Cluster_RUNNING Cluster_Status = 2 + // The RECONCILING state indicates that some work is actively being done on + // the cluster, such as upgrading the master or node software. Details can + // be found in the `statusMessage` field. + Cluster_RECONCILING Cluster_Status = 3 + // The STOPPING state indicates the cluster is being deleted. + Cluster_STOPPING Cluster_Status = 4 + // The ERROR state indicates the cluster may be unusable. Details + // can be found in the `statusMessage` field. + Cluster_ERROR Cluster_Status = 5 + // The DEGRADED state indicates the cluster requires user action to restore + // full functionality. Details can be found in the `statusMessage` field. + Cluster_DEGRADED Cluster_Status = 6 +) + +var Cluster_Status_name = map[int32]string{ + 0: "STATUS_UNSPECIFIED", + 1: "PROVISIONING", + 2: "RUNNING", + 3: "RECONCILING", + 4: "STOPPING", + 5: "ERROR", + 6: "DEGRADED", +} +var Cluster_Status_value = map[string]int32{ + "STATUS_UNSPECIFIED": 0, + "PROVISIONING": 1, + "RUNNING": 2, + "RECONCILING": 3, + "STOPPING": 4, + "ERROR": 5, + "DEGRADED": 6, +} + +func (x Cluster_Status) String() string { + return proto.EnumName(Cluster_Status_name, int32(x)) +} +func (Cluster_Status) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_cluster_service_b093026a38af0865, []int{18, 0} +} + +// Current status of the operation. +type Operation_Status int32 + +const ( + // Not set. + Operation_STATUS_UNSPECIFIED Operation_Status = 0 + // The operation has been created. + Operation_PENDING Operation_Status = 1 + // The operation is currently running. + Operation_RUNNING Operation_Status = 2 + // The operation is done, either cancelled or completed. + Operation_DONE Operation_Status = 3 + // The operation is aborting. + Operation_ABORTING Operation_Status = 4 +) + +var Operation_Status_name = map[int32]string{ + 0: "STATUS_UNSPECIFIED", + 1: "PENDING", + 2: "RUNNING", + 3: "DONE", + 4: "ABORTING", +} +var Operation_Status_value = map[string]int32{ + "STATUS_UNSPECIFIED": 0, + "PENDING": 1, + "RUNNING": 2, + "DONE": 3, + "ABORTING": 4, +} + +func (x Operation_Status) String() string { + return proto.EnumName(Operation_Status_name, int32(x)) +} +func (Operation_Status) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_cluster_service_b093026a38af0865, []int{20, 0} +} + +// Operation type. +type Operation_Type int32 + +const ( + // Not set. + Operation_TYPE_UNSPECIFIED Operation_Type = 0 + // Cluster create. + Operation_CREATE_CLUSTER Operation_Type = 1 + // Cluster delete. + Operation_DELETE_CLUSTER Operation_Type = 2 + // A master upgrade. + Operation_UPGRADE_MASTER Operation_Type = 3 + // A node upgrade. + Operation_UPGRADE_NODES Operation_Type = 4 + // Cluster repair. + Operation_REPAIR_CLUSTER Operation_Type = 5 + // Cluster update. + Operation_UPDATE_CLUSTER Operation_Type = 6 + // Node pool create. + Operation_CREATE_NODE_POOL Operation_Type = 7 + // Node pool delete. + Operation_DELETE_NODE_POOL Operation_Type = 8 + // Set node pool management. + Operation_SET_NODE_POOL_MANAGEMENT Operation_Type = 9 + // Automatic node pool repair. + Operation_AUTO_REPAIR_NODES Operation_Type = 10 + // Automatic node upgrade. + Operation_AUTO_UPGRADE_NODES Operation_Type = 11 + // Set labels. + Operation_SET_LABELS Operation_Type = 12 + // Set/generate master auth materials + Operation_SET_MASTER_AUTH Operation_Type = 13 + // Set node pool size. + Operation_SET_NODE_POOL_SIZE Operation_Type = 14 + // Updates network policy for a cluster. + Operation_SET_NETWORK_POLICY Operation_Type = 15 + // Set the maintenance policy. + Operation_SET_MAINTENANCE_POLICY Operation_Type = 16 +) + +var Operation_Type_name = map[int32]string{ + 0: "TYPE_UNSPECIFIED", + 1: "CREATE_CLUSTER", + 2: "DELETE_CLUSTER", + 3: "UPGRADE_MASTER", + 4: "UPGRADE_NODES", + 5: "REPAIR_CLUSTER", + 6: "UPDATE_CLUSTER", + 7: "CREATE_NODE_POOL", + 8: "DELETE_NODE_POOL", + 9: "SET_NODE_POOL_MANAGEMENT", + 10: "AUTO_REPAIR_NODES", + 11: "AUTO_UPGRADE_NODES", + 12: "SET_LABELS", + 13: "SET_MASTER_AUTH", + 14: "SET_NODE_POOL_SIZE", + 15: "SET_NETWORK_POLICY", + 16: "SET_MAINTENANCE_POLICY", +} +var Operation_Type_value = map[string]int32{ + "TYPE_UNSPECIFIED": 0, + "CREATE_CLUSTER": 1, + "DELETE_CLUSTER": 2, + "UPGRADE_MASTER": 3, + "UPGRADE_NODES": 4, + "REPAIR_CLUSTER": 5, + "UPDATE_CLUSTER": 6, + "CREATE_NODE_POOL": 7, + "DELETE_NODE_POOL": 8, + "SET_NODE_POOL_MANAGEMENT": 9, + "AUTO_REPAIR_NODES": 10, + "AUTO_UPGRADE_NODES": 11, + "SET_LABELS": 12, + "SET_MASTER_AUTH": 13, + "SET_NODE_POOL_SIZE": 14, + "SET_NETWORK_POLICY": 15, + "SET_MAINTENANCE_POLICY": 16, +} + +func (x Operation_Type) String() string { + return proto.EnumName(Operation_Type_name, int32(x)) +} +func (Operation_Type) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_cluster_service_b093026a38af0865, []int{20, 1} +} + +// Operation type: what type update to perform. +type SetMasterAuthRequest_Action int32 + +const ( + // Operation is unknown and will error out. + SetMasterAuthRequest_UNKNOWN SetMasterAuthRequest_Action = 0 + // Set the password to a user generated value. + SetMasterAuthRequest_SET_PASSWORD SetMasterAuthRequest_Action = 1 + // Generate a new password and set it to that. + SetMasterAuthRequest_GENERATE_PASSWORD SetMasterAuthRequest_Action = 2 + // Set the username. If an empty username is provided, basic authentication + // is disabled for the cluster. If a non-empty username is provided, basic + // authentication is enabled, with either a provided password or a generated + // one. + SetMasterAuthRequest_SET_USERNAME SetMasterAuthRequest_Action = 3 +) + +var SetMasterAuthRequest_Action_name = map[int32]string{ + 0: "UNKNOWN", + 1: "SET_PASSWORD", + 2: "GENERATE_PASSWORD", + 3: "SET_USERNAME", +} +var SetMasterAuthRequest_Action_value = map[string]int32{ + "UNKNOWN": 0, + "SET_PASSWORD": 1, + "GENERATE_PASSWORD": 2, + "SET_USERNAME": 3, +} + +func (x SetMasterAuthRequest_Action) String() string { + return proto.EnumName(SetMasterAuthRequest_Action_name, int32(x)) +} +func (SetMasterAuthRequest_Action) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_cluster_service_b093026a38af0865, []int{32, 0} +} + +// The current status of the node pool instance. +type NodePool_Status int32 + +const ( + // Not set. + NodePool_STATUS_UNSPECIFIED NodePool_Status = 0 + // The PROVISIONING state indicates the node pool is being created. + NodePool_PROVISIONING NodePool_Status = 1 + // The RUNNING state indicates the node pool has been created + // and is fully usable. + NodePool_RUNNING NodePool_Status = 2 + // The RUNNING_WITH_ERROR state indicates the node pool has been created + // and is partially usable. Some error state has occurred and some + // functionality may be impaired. Customer may need to reissue a request + // or trigger a new update. + NodePool_RUNNING_WITH_ERROR NodePool_Status = 3 + // The RECONCILING state indicates that some work is actively being done on + // the node pool, such as upgrading node software. Details can + // be found in the `statusMessage` field. + NodePool_RECONCILING NodePool_Status = 4 + // The STOPPING state indicates the node pool is being deleted. + NodePool_STOPPING NodePool_Status = 5 + // The ERROR state indicates the node pool may be unusable. Details + // can be found in the `statusMessage` field. + NodePool_ERROR NodePool_Status = 6 +) + +var NodePool_Status_name = map[int32]string{ + 0: "STATUS_UNSPECIFIED", + 1: "PROVISIONING", + 2: "RUNNING", + 3: "RUNNING_WITH_ERROR", + 4: "RECONCILING", + 5: "STOPPING", + 6: "ERROR", +} +var NodePool_Status_value = map[string]int32{ + "STATUS_UNSPECIFIED": 0, + "PROVISIONING": 1, + "RUNNING": 2, + "RUNNING_WITH_ERROR": 3, + "RECONCILING": 4, + "STOPPING": 5, + "ERROR": 6, +} + +func (x NodePool_Status) String() string { + return proto.EnumName(NodePool_Status_name, int32(x)) +} +func (NodePool_Status) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_cluster_service_b093026a38af0865, []int{46, 0} +} + +// NodeMetadata is the configuration for if and how to expose the node +// metadata to the workload running on the node. +type WorkloadMetadataConfig_NodeMetadata int32 + +const ( + // Not set. + WorkloadMetadataConfig_UNSPECIFIED WorkloadMetadataConfig_NodeMetadata = 0 + // Prevent workloads not in hostNetwork from accessing certain VM metadata, + // specifically kube-env, which contains Kubelet credentials, and the + // instance identity token. + // + // Metadata concealment is a temporary security solution available while the + // bootstrapping process for cluster nodes is being redesigned with + // significant security improvements. This feature is scheduled to be + // deprecated in the future and later removed. + WorkloadMetadataConfig_SECURE WorkloadMetadataConfig_NodeMetadata = 1 + // Expose all VM metadata to pods. + WorkloadMetadataConfig_EXPOSE WorkloadMetadataConfig_NodeMetadata = 2 +) + +var WorkloadMetadataConfig_NodeMetadata_name = map[int32]string{ + 0: "UNSPECIFIED", + 1: "SECURE", + 2: "EXPOSE", +} +var WorkloadMetadataConfig_NodeMetadata_value = map[string]int32{ + "UNSPECIFIED": 0, + "SECURE": 1, + "EXPOSE": 2, +} + +func (x WorkloadMetadataConfig_NodeMetadata) String() string { + return proto.EnumName(WorkloadMetadataConfig_NodeMetadata_name, int32(x)) +} +func (WorkloadMetadataConfig_NodeMetadata) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_cluster_service_b093026a38af0865, []int{64, 0} +} + +// LocationType is the type of GKE location, regional or zonal. +type Location_LocationType int32 + +const ( + // LOCATION_TYPE_UNSPECIFIED means the location type was not determined. + Location_LOCATION_TYPE_UNSPECIFIED Location_LocationType = 0 + // A GKE Location where Zonal clusters can be created. + Location_ZONE Location_LocationType = 1 + // A GKE Location where Regional clusters can be created. + Location_REGION Location_LocationType = 2 +) + +var Location_LocationType_name = map[int32]string{ + 0: "LOCATION_TYPE_UNSPECIFIED", + 1: "ZONE", + 2: "REGION", +} +var Location_LocationType_value = map[string]int32{ + "LOCATION_TYPE_UNSPECIFIED": 0, + "ZONE": 1, + "REGION": 2, +} + +func (x Location_LocationType) String() string { + return proto.EnumName(Location_LocationType_name, int32(x)) +} +func (Location_LocationType) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_cluster_service_b093026a38af0865, []int{69, 0} +} + +// Code for each condition +type StatusCondition_Code int32 + +const ( + // UNKNOWN indicates a generic condition. + StatusCondition_UNKNOWN StatusCondition_Code = 0 + // GCE_STOCKOUT indicates a Google Compute Engine stockout. + StatusCondition_GCE_STOCKOUT StatusCondition_Code = 1 + // GKE_SERVICE_ACCOUNT_DELETED indicates that the user deleted their robot + // service account. + StatusCondition_GKE_SERVICE_ACCOUNT_DELETED StatusCondition_Code = 2 + // Google Compute Engine quota was exceeded. + StatusCondition_GCE_QUOTA_EXCEEDED StatusCondition_Code = 3 + // Cluster state was manually changed by an SRE due to a system logic error. + // More codes TBA + StatusCondition_SET_BY_OPERATOR StatusCondition_Code = 4 +) + +var StatusCondition_Code_name = map[int32]string{ + 0: "UNKNOWN", + 1: "GCE_STOCKOUT", + 2: "GKE_SERVICE_ACCOUNT_DELETED", + 3: "GCE_QUOTA_EXCEEDED", + 4: "SET_BY_OPERATOR", +} +var StatusCondition_Code_value = map[string]int32{ + "UNKNOWN": 0, + "GCE_STOCKOUT": 1, + "GKE_SERVICE_ACCOUNT_DELETED": 2, + "GCE_QUOTA_EXCEEDED": 3, + "SET_BY_OPERATOR": 4, +} + +func (x StatusCondition_Code) String() string { + return proto.EnumName(StatusCondition_Code_name, int32(x)) +} +func (StatusCondition_Code) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_cluster_service_b093026a38af0865, []int{70, 0} +} + +// Status shows the current usage of a secondary IP range. +type UsableSubnetworkSecondaryRange_Status int32 + +const ( + // UNKNOWN is the zero value of the Status enum. It's not a valid status. + UsableSubnetworkSecondaryRange_UNKNOWN UsableSubnetworkSecondaryRange_Status = 0 + // UNUSED denotes that this range is unclaimed by any cluster. + UsableSubnetworkSecondaryRange_UNUSED UsableSubnetworkSecondaryRange_Status = 1 + // IN_USE_SERVICE denotes that this range is claimed by a cluster for + // services. It cannot be used for other clusters. + UsableSubnetworkSecondaryRange_IN_USE_SERVICE UsableSubnetworkSecondaryRange_Status = 2 + // IN_USE_SHAREABLE_POD denotes this range was created by the network admin + // and is currently claimed by a cluster for pods. It can only be used by + // other clusters as a pod range. + UsableSubnetworkSecondaryRange_IN_USE_SHAREABLE_POD UsableSubnetworkSecondaryRange_Status = 3 + // IN_USE_MANAGED_POD denotes this range was created by GKE and is claimed + // for pods. It cannot be used for other clusters. + UsableSubnetworkSecondaryRange_IN_USE_MANAGED_POD UsableSubnetworkSecondaryRange_Status = 4 +) + +var UsableSubnetworkSecondaryRange_Status_name = map[int32]string{ + 0: "UNKNOWN", + 1: "UNUSED", + 2: "IN_USE_SERVICE", + 3: "IN_USE_SHAREABLE_POD", + 4: "IN_USE_MANAGED_POD", +} +var UsableSubnetworkSecondaryRange_Status_value = map[string]int32{ + "UNKNOWN": 0, + "UNUSED": 1, + "IN_USE_SERVICE": 2, + "IN_USE_SHAREABLE_POD": 3, + "IN_USE_MANAGED_POD": 4, +} + +func (x UsableSubnetworkSecondaryRange_Status) String() string { + return proto.EnumName(UsableSubnetworkSecondaryRange_Status_name, int32(x)) +} +func (UsableSubnetworkSecondaryRange_Status) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_cluster_service_b093026a38af0865, []int{74, 0} +} + +// Parameters that describe the nodes in a cluster. +type NodeConfig struct { + // The name of a Google Compute Engine [machine + // type](/compute/docs/machine-types) (e.g. + // `n1-standard-1`). + // + // If unspecified, the default machine type is + // `n1-standard-1`. + MachineType string `protobuf:"bytes,1,opt,name=machine_type,json=machineType,proto3" json:"machine_type,omitempty"` + // Size of the disk attached to each node, specified in GB. + // The smallest allowed disk size is 10GB. + // + // If unspecified, the default disk size is 100GB. + DiskSizeGb int32 `protobuf:"varint,2,opt,name=disk_size_gb,json=diskSizeGb,proto3" json:"disk_size_gb,omitempty"` + // The set of Google API scopes to be made available on all of the + // node VMs under the "default" service account. + // + // The following scopes are recommended, but not required, and by default are + // not included: + // + // * `https://www.googleapis.com/auth/compute` is required for mounting + // persistent storage on your nodes. + // * `https://www.googleapis.com/auth/devstorage.read_only` is required for + // communicating with **gcr.io** + // (the [Google Container Registry](/container-registry/)). + // + // If unspecified, no scopes are added, unless Cloud Logging or Cloud + // Monitoring are enabled, in which case their required scopes will be added. + OauthScopes []string `protobuf:"bytes,3,rep,name=oauth_scopes,json=oauthScopes,proto3" json:"oauth_scopes,omitempty"` + // The Google Cloud Platform Service Account to be used by the node VMs. If + // no Service Account is specified, the "default" service account is used. + ServiceAccount string `protobuf:"bytes,9,opt,name=service_account,json=serviceAccount,proto3" json:"service_account,omitempty"` + // The metadata key/value pairs assigned to instances in the cluster. + // + // Keys must conform to the regexp [a-zA-Z0-9-_]+ and be less than 128 bytes + // in length. These are reflected as part of a URL in the metadata server. + // Additionally, to avoid ambiguity, keys must not conflict with any other + // metadata keys for the project or be one of the reserved keys: + // "cluster-location" + // "cluster-name" + // "cluster-uid" + // "configure-sh" + // "enable-oslogin" + // "gci-ensure-gke-docker" + // "gci-update-strategy" + // "instance-template" + // "kube-env" + // "startup-script" + // "user-data" + // + // Values are free-form strings, and only have meaning as interpreted by + // the image running in the instance. The only restriction placed on them is + // that each value's size must be less than or equal to 32 KB. + // + // The total size of all keys and values must be less than 512 KB. + Metadata map[string]string `protobuf:"bytes,4,rep,name=metadata,proto3" json:"metadata,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` + // The image type to use for this node. Note that for a given image type, + // the latest version of it will be used. + ImageType string `protobuf:"bytes,5,opt,name=image_type,json=imageType,proto3" json:"image_type,omitempty"` + // The map of Kubernetes labels (key/value pairs) to be applied to each node. + // These will added in addition to any default label(s) that + // Kubernetes may apply to the node. + // In case of conflict in label keys, the applied set may differ depending on + // the Kubernetes version -- it's best to assume the behavior is undefined + // and conflicts should be avoided. + // For more information, including usage and the valid values, see: + // https://kubernetes.io/docs/concepts/overview/working-with-objects/labels/ + Labels map[string]string `protobuf:"bytes,6,rep,name=labels,proto3" json:"labels,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` + // The number of local SSD disks to be attached to the node. + // + // The limit for this value is dependant upon the maximum number of + // disks available on a machine per zone. See: + // https://cloud.google.com/compute/docs/disks/local-ssd#local_ssd_limits + // for more information. + LocalSsdCount int32 `protobuf:"varint,7,opt,name=local_ssd_count,json=localSsdCount,proto3" json:"local_ssd_count,omitempty"` + // The list of instance tags applied to all nodes. Tags are used to identify + // valid sources or targets for network firewalls and are specified by + // the client during cluster or node pool creation. Each tag within the list + // must comply with RFC1035. + Tags []string `protobuf:"bytes,8,rep,name=tags,proto3" json:"tags,omitempty"` + // Whether the nodes are created as preemptible VM instances. See: + // https://cloud.google.com/compute/docs/instances/preemptible for more + // inforamtion about preemptible VM instances. + Preemptible bool `protobuf:"varint,10,opt,name=preemptible,proto3" json:"preemptible,omitempty"` + // A list of hardware accelerators to be attached to each node. + // See https://cloud.google.com/compute/docs/gpus for more information about + // support for GPUs. + Accelerators []*AcceleratorConfig `protobuf:"bytes,11,rep,name=accelerators,proto3" json:"accelerators,omitempty"` + // Type of the disk attached to each node (e.g. 'pd-standard' or 'pd-ssd') + // + // If unspecified, the default disk type is 'pd-standard' + DiskType string `protobuf:"bytes,12,opt,name=disk_type,json=diskType,proto3" json:"disk_type,omitempty"` + // Minimum CPU platform to be used by this instance. The instance may be + // scheduled on the specified or newer CPU platform. Applicable values are the + // friendly names of CPU platforms, such as + // minCpuPlatform: "Intel Haswell" or + // minCpuPlatform: "Intel Sandy Bridge". For more + // information, read [how to specify min CPU + // platform](https://cloud.google.com/compute/docs/instances/specify-min-cpu-platform) + MinCpuPlatform string `protobuf:"bytes,13,opt,name=min_cpu_platform,json=minCpuPlatform,proto3" json:"min_cpu_platform,omitempty"` + // The workload metadata configuration for this node. + WorkloadMetadataConfig *WorkloadMetadataConfig `protobuf:"bytes,14,opt,name=workload_metadata_config,json=workloadMetadataConfig,proto3" json:"workload_metadata_config,omitempty"` + // List of kubernetes taints to be applied to each node. + // + // For more information, including usage and the valid values, see: + // https://kubernetes.io/docs/concepts/configuration/taint-and-toleration/ + Taints []*NodeTaint `protobuf:"bytes,15,rep,name=taints,proto3" json:"taints,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *NodeConfig) Reset() { *m = NodeConfig{} } +func (m *NodeConfig) String() string { return proto.CompactTextString(m) } +func (*NodeConfig) ProtoMessage() {} +func (*NodeConfig) Descriptor() ([]byte, []int) { + return fileDescriptor_cluster_service_b093026a38af0865, []int{0} +} +func (m *NodeConfig) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_NodeConfig.Unmarshal(m, b) +} +func (m *NodeConfig) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_NodeConfig.Marshal(b, m, deterministic) +} +func (dst *NodeConfig) XXX_Merge(src proto.Message) { + xxx_messageInfo_NodeConfig.Merge(dst, src) +} +func (m *NodeConfig) XXX_Size() int { + return xxx_messageInfo_NodeConfig.Size(m) +} +func (m *NodeConfig) XXX_DiscardUnknown() { + xxx_messageInfo_NodeConfig.DiscardUnknown(m) +} + +var xxx_messageInfo_NodeConfig proto.InternalMessageInfo + +func (m *NodeConfig) GetMachineType() string { + if m != nil { + return m.MachineType + } + return "" +} + +func (m *NodeConfig) GetDiskSizeGb() int32 { + if m != nil { + return m.DiskSizeGb + } + return 0 +} + +func (m *NodeConfig) GetOauthScopes() []string { + if m != nil { + return m.OauthScopes + } + return nil +} + +func (m *NodeConfig) GetServiceAccount() string { + if m != nil { + return m.ServiceAccount + } + return "" +} + +func (m *NodeConfig) GetMetadata() map[string]string { + if m != nil { + return m.Metadata + } + return nil +} + +func (m *NodeConfig) GetImageType() string { + if m != nil { + return m.ImageType + } + return "" +} + +func (m *NodeConfig) GetLabels() map[string]string { + if m != nil { + return m.Labels + } + return nil +} + +func (m *NodeConfig) GetLocalSsdCount() int32 { + if m != nil { + return m.LocalSsdCount + } + return 0 +} + +func (m *NodeConfig) GetTags() []string { + if m != nil { + return m.Tags + } + return nil +} + +func (m *NodeConfig) GetPreemptible() bool { + if m != nil { + return m.Preemptible + } + return false +} + +func (m *NodeConfig) GetAccelerators() []*AcceleratorConfig { + if m != nil { + return m.Accelerators + } + return nil +} + +func (m *NodeConfig) GetDiskType() string { + if m != nil { + return m.DiskType + } + return "" +} + +func (m *NodeConfig) GetMinCpuPlatform() string { + if m != nil { + return m.MinCpuPlatform + } + return "" +} + +func (m *NodeConfig) GetWorkloadMetadataConfig() *WorkloadMetadataConfig { + if m != nil { + return m.WorkloadMetadataConfig + } + return nil +} + +func (m *NodeConfig) GetTaints() []*NodeTaint { + if m != nil { + return m.Taints + } + return nil +} + +// Kubernetes taint is comprised of three fields: key, value, and effect. Effect +// can only be one of three types: NoSchedule, PreferNoSchedule or NoExecute. +// +// For more information, including usage and the valid values, see: +// https://kubernetes.io/docs/concepts/configuration/taint-and-toleration/ +type NodeTaint struct { + // Key for taint. + Key string `protobuf:"bytes,1,opt,name=key,proto3" json:"key,omitempty"` + // Value for taint. + Value string `protobuf:"bytes,2,opt,name=value,proto3" json:"value,omitempty"` + // Effect for taint. + Effect NodeTaint_Effect `protobuf:"varint,3,opt,name=effect,proto3,enum=google.container.v1beta1.NodeTaint_Effect" json:"effect,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *NodeTaint) Reset() { *m = NodeTaint{} } +func (m *NodeTaint) String() string { return proto.CompactTextString(m) } +func (*NodeTaint) ProtoMessage() {} +func (*NodeTaint) Descriptor() ([]byte, []int) { + return fileDescriptor_cluster_service_b093026a38af0865, []int{1} +} +func (m *NodeTaint) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_NodeTaint.Unmarshal(m, b) +} +func (m *NodeTaint) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_NodeTaint.Marshal(b, m, deterministic) +} +func (dst *NodeTaint) XXX_Merge(src proto.Message) { + xxx_messageInfo_NodeTaint.Merge(dst, src) +} +func (m *NodeTaint) XXX_Size() int { + return xxx_messageInfo_NodeTaint.Size(m) +} +func (m *NodeTaint) XXX_DiscardUnknown() { + xxx_messageInfo_NodeTaint.DiscardUnknown(m) +} + +var xxx_messageInfo_NodeTaint proto.InternalMessageInfo + +func (m *NodeTaint) GetKey() string { + if m != nil { + return m.Key + } + return "" +} + +func (m *NodeTaint) GetValue() string { + if m != nil { + return m.Value + } + return "" +} + +func (m *NodeTaint) GetEffect() NodeTaint_Effect { + if m != nil { + return m.Effect + } + return NodeTaint_EFFECT_UNSPECIFIED +} + +// The authentication information for accessing the master endpoint. +// Authentication can be done using HTTP basic auth or using client +// certificates. +type MasterAuth struct { + // The username to use for HTTP basic authentication to the master endpoint. + // For clusters v1.6.0 and later, basic authentication can be disabled by + // leaving username unspecified (or setting it to the empty string). + Username string `protobuf:"bytes,1,opt,name=username,proto3" json:"username,omitempty"` + // The password to use for HTTP basic authentication to the master endpoint. + // Because the master endpoint is open to the Internet, you should create a + // strong password. If a password is provided for cluster creation, username + // must be non-empty. + Password string `protobuf:"bytes,2,opt,name=password,proto3" json:"password,omitempty"` + // Configuration for client certificate authentication on the cluster. For + // clusters before v1.12, if no configuration is specified, a client + // certificate is issued. + ClientCertificateConfig *ClientCertificateConfig `protobuf:"bytes,3,opt,name=client_certificate_config,json=clientCertificateConfig,proto3" json:"client_certificate_config,omitempty"` + // [Output only] Base64-encoded public certificate that is the root of + // trust for the cluster. + ClusterCaCertificate string `protobuf:"bytes,100,opt,name=cluster_ca_certificate,json=clusterCaCertificate,proto3" json:"cluster_ca_certificate,omitempty"` + // [Output only] Base64-encoded public certificate used by clients to + // authenticate to the cluster endpoint. + ClientCertificate string `protobuf:"bytes,101,opt,name=client_certificate,json=clientCertificate,proto3" json:"client_certificate,omitempty"` + // [Output only] Base64-encoded private key used by clients to authenticate + // to the cluster endpoint. + ClientKey string `protobuf:"bytes,102,opt,name=client_key,json=clientKey,proto3" json:"client_key,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *MasterAuth) Reset() { *m = MasterAuth{} } +func (m *MasterAuth) String() string { return proto.CompactTextString(m) } +func (*MasterAuth) ProtoMessage() {} +func (*MasterAuth) Descriptor() ([]byte, []int) { + return fileDescriptor_cluster_service_b093026a38af0865, []int{2} +} +func (m *MasterAuth) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_MasterAuth.Unmarshal(m, b) +} +func (m *MasterAuth) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_MasterAuth.Marshal(b, m, deterministic) +} +func (dst *MasterAuth) XXX_Merge(src proto.Message) { + xxx_messageInfo_MasterAuth.Merge(dst, src) +} +func (m *MasterAuth) XXX_Size() int { + return xxx_messageInfo_MasterAuth.Size(m) +} +func (m *MasterAuth) XXX_DiscardUnknown() { + xxx_messageInfo_MasterAuth.DiscardUnknown(m) +} + +var xxx_messageInfo_MasterAuth proto.InternalMessageInfo + +func (m *MasterAuth) GetUsername() string { + if m != nil { + return m.Username + } + return "" +} + +func (m *MasterAuth) GetPassword() string { + if m != nil { + return m.Password + } + return "" +} + +func (m *MasterAuth) GetClientCertificateConfig() *ClientCertificateConfig { + if m != nil { + return m.ClientCertificateConfig + } + return nil +} + +func (m *MasterAuth) GetClusterCaCertificate() string { + if m != nil { + return m.ClusterCaCertificate + } + return "" +} + +func (m *MasterAuth) GetClientCertificate() string { + if m != nil { + return m.ClientCertificate + } + return "" +} + +func (m *MasterAuth) GetClientKey() string { + if m != nil { + return m.ClientKey + } + return "" +} + +// Configuration for client certificates on the cluster. +type ClientCertificateConfig struct { + // Issue a client certificate. + IssueClientCertificate bool `protobuf:"varint,1,opt,name=issue_client_certificate,json=issueClientCertificate,proto3" json:"issue_client_certificate,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ClientCertificateConfig) Reset() { *m = ClientCertificateConfig{} } +func (m *ClientCertificateConfig) String() string { return proto.CompactTextString(m) } +func (*ClientCertificateConfig) ProtoMessage() {} +func (*ClientCertificateConfig) Descriptor() ([]byte, []int) { + return fileDescriptor_cluster_service_b093026a38af0865, []int{3} +} +func (m *ClientCertificateConfig) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ClientCertificateConfig.Unmarshal(m, b) +} +func (m *ClientCertificateConfig) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ClientCertificateConfig.Marshal(b, m, deterministic) +} +func (dst *ClientCertificateConfig) XXX_Merge(src proto.Message) { + xxx_messageInfo_ClientCertificateConfig.Merge(dst, src) +} +func (m *ClientCertificateConfig) XXX_Size() int { + return xxx_messageInfo_ClientCertificateConfig.Size(m) +} +func (m *ClientCertificateConfig) XXX_DiscardUnknown() { + xxx_messageInfo_ClientCertificateConfig.DiscardUnknown(m) +} + +var xxx_messageInfo_ClientCertificateConfig proto.InternalMessageInfo + +func (m *ClientCertificateConfig) GetIssueClientCertificate() bool { + if m != nil { + return m.IssueClientCertificate + } + return false +} + +// Configuration for the addons that can be automatically spun up in the +// cluster, enabling additional functionality. +type AddonsConfig struct { + // Configuration for the HTTP (L7) load balancing controller addon, which + // makes it easy to set up HTTP load balancers for services in a cluster. + HttpLoadBalancing *HttpLoadBalancing `protobuf:"bytes,1,opt,name=http_load_balancing,json=httpLoadBalancing,proto3" json:"http_load_balancing,omitempty"` + // Configuration for the horizontal pod autoscaling feature, which + // increases or decreases the number of replica pods a replication controller + // has based on the resource usage of the existing pods. + HorizontalPodAutoscaling *HorizontalPodAutoscaling `protobuf:"bytes,2,opt,name=horizontal_pod_autoscaling,json=horizontalPodAutoscaling,proto3" json:"horizontal_pod_autoscaling,omitempty"` + // Configuration for the Kubernetes Dashboard. + KubernetesDashboard *KubernetesDashboard `protobuf:"bytes,3,opt,name=kubernetes_dashboard,json=kubernetesDashboard,proto3" json:"kubernetes_dashboard,omitempty"` + // Configuration for NetworkPolicy. This only tracks whether the addon + // is enabled or not on the Master, it does not track whether network policy + // is enabled for the nodes. + NetworkPolicyConfig *NetworkPolicyConfig `protobuf:"bytes,4,opt,name=network_policy_config,json=networkPolicyConfig,proto3" json:"network_policy_config,omitempty"` + // Configuration for Istio, an open platform to connect, manage, and secure + // microservices. + IstioConfig *IstioConfig `protobuf:"bytes,5,opt,name=istio_config,json=istioConfig,proto3" json:"istio_config,omitempty"` + // Configuration for the Cloud Run addon. The `IstioConfig` addon must be + // enabled in order to enable Cloud Run addon. This option can only be enabled + // at cluster creation time. + CloudRunConfig *CloudRunConfig `protobuf:"bytes,7,opt,name=cloud_run_config,json=cloudRunConfig,proto3" json:"cloud_run_config,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *AddonsConfig) Reset() { *m = AddonsConfig{} } +func (m *AddonsConfig) String() string { return proto.CompactTextString(m) } +func (*AddonsConfig) ProtoMessage() {} +func (*AddonsConfig) Descriptor() ([]byte, []int) { + return fileDescriptor_cluster_service_b093026a38af0865, []int{4} +} +func (m *AddonsConfig) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_AddonsConfig.Unmarshal(m, b) +} +func (m *AddonsConfig) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_AddonsConfig.Marshal(b, m, deterministic) +} +func (dst *AddonsConfig) XXX_Merge(src proto.Message) { + xxx_messageInfo_AddonsConfig.Merge(dst, src) +} +func (m *AddonsConfig) XXX_Size() int { + return xxx_messageInfo_AddonsConfig.Size(m) +} +func (m *AddonsConfig) XXX_DiscardUnknown() { + xxx_messageInfo_AddonsConfig.DiscardUnknown(m) +} + +var xxx_messageInfo_AddonsConfig proto.InternalMessageInfo + +func (m *AddonsConfig) GetHttpLoadBalancing() *HttpLoadBalancing { + if m != nil { + return m.HttpLoadBalancing + } + return nil +} + +func (m *AddonsConfig) GetHorizontalPodAutoscaling() *HorizontalPodAutoscaling { + if m != nil { + return m.HorizontalPodAutoscaling + } + return nil +} + +func (m *AddonsConfig) GetKubernetesDashboard() *KubernetesDashboard { + if m != nil { + return m.KubernetesDashboard + } + return nil +} + +func (m *AddonsConfig) GetNetworkPolicyConfig() *NetworkPolicyConfig { + if m != nil { + return m.NetworkPolicyConfig + } + return nil +} + +func (m *AddonsConfig) GetIstioConfig() *IstioConfig { + if m != nil { + return m.IstioConfig + } + return nil +} + +func (m *AddonsConfig) GetCloudRunConfig() *CloudRunConfig { + if m != nil { + return m.CloudRunConfig + } + return nil +} + +// Configuration options for the HTTP (L7) load balancing controller addon, +// which makes it easy to set up HTTP load balancers for services in a cluster. +type HttpLoadBalancing struct { + // Whether the HTTP Load Balancing controller is enabled in the cluster. + // When enabled, it runs a small pod in the cluster that manages the load + // balancers. + Disabled bool `protobuf:"varint,1,opt,name=disabled,proto3" json:"disabled,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *HttpLoadBalancing) Reset() { *m = HttpLoadBalancing{} } +func (m *HttpLoadBalancing) String() string { return proto.CompactTextString(m) } +func (*HttpLoadBalancing) ProtoMessage() {} +func (*HttpLoadBalancing) Descriptor() ([]byte, []int) { + return fileDescriptor_cluster_service_b093026a38af0865, []int{5} +} +func (m *HttpLoadBalancing) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_HttpLoadBalancing.Unmarshal(m, b) +} +func (m *HttpLoadBalancing) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_HttpLoadBalancing.Marshal(b, m, deterministic) +} +func (dst *HttpLoadBalancing) XXX_Merge(src proto.Message) { + xxx_messageInfo_HttpLoadBalancing.Merge(dst, src) +} +func (m *HttpLoadBalancing) XXX_Size() int { + return xxx_messageInfo_HttpLoadBalancing.Size(m) +} +func (m *HttpLoadBalancing) XXX_DiscardUnknown() { + xxx_messageInfo_HttpLoadBalancing.DiscardUnknown(m) +} + +var xxx_messageInfo_HttpLoadBalancing proto.InternalMessageInfo + +func (m *HttpLoadBalancing) GetDisabled() bool { + if m != nil { + return m.Disabled + } + return false +} + +// Configuration options for the horizontal pod autoscaling feature, which +// increases or decreases the number of replica pods a replication controller +// has based on the resource usage of the existing pods. +type HorizontalPodAutoscaling struct { + // Whether the Horizontal Pod Autoscaling feature is enabled in the cluster. + // When enabled, it ensures that a Heapster pod is running in the cluster, + // which is also used by the Cloud Monitoring service. + Disabled bool `protobuf:"varint,1,opt,name=disabled,proto3" json:"disabled,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *HorizontalPodAutoscaling) Reset() { *m = HorizontalPodAutoscaling{} } +func (m *HorizontalPodAutoscaling) String() string { return proto.CompactTextString(m) } +func (*HorizontalPodAutoscaling) ProtoMessage() {} +func (*HorizontalPodAutoscaling) Descriptor() ([]byte, []int) { + return fileDescriptor_cluster_service_b093026a38af0865, []int{6} +} +func (m *HorizontalPodAutoscaling) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_HorizontalPodAutoscaling.Unmarshal(m, b) +} +func (m *HorizontalPodAutoscaling) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_HorizontalPodAutoscaling.Marshal(b, m, deterministic) +} +func (dst *HorizontalPodAutoscaling) XXX_Merge(src proto.Message) { + xxx_messageInfo_HorizontalPodAutoscaling.Merge(dst, src) +} +func (m *HorizontalPodAutoscaling) XXX_Size() int { + return xxx_messageInfo_HorizontalPodAutoscaling.Size(m) +} +func (m *HorizontalPodAutoscaling) XXX_DiscardUnknown() { + xxx_messageInfo_HorizontalPodAutoscaling.DiscardUnknown(m) +} + +var xxx_messageInfo_HorizontalPodAutoscaling proto.InternalMessageInfo + +func (m *HorizontalPodAutoscaling) GetDisabled() bool { + if m != nil { + return m.Disabled + } + return false +} + +// Configuration for the Kubernetes Dashboard. +type KubernetesDashboard struct { + // Whether the Kubernetes Dashboard is enabled for this cluster. + Disabled bool `protobuf:"varint,1,opt,name=disabled,proto3" json:"disabled,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *KubernetesDashboard) Reset() { *m = KubernetesDashboard{} } +func (m *KubernetesDashboard) String() string { return proto.CompactTextString(m) } +func (*KubernetesDashboard) ProtoMessage() {} +func (*KubernetesDashboard) Descriptor() ([]byte, []int) { + return fileDescriptor_cluster_service_b093026a38af0865, []int{7} +} +func (m *KubernetesDashboard) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_KubernetesDashboard.Unmarshal(m, b) +} +func (m *KubernetesDashboard) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_KubernetesDashboard.Marshal(b, m, deterministic) +} +func (dst *KubernetesDashboard) XXX_Merge(src proto.Message) { + xxx_messageInfo_KubernetesDashboard.Merge(dst, src) +} +func (m *KubernetesDashboard) XXX_Size() int { + return xxx_messageInfo_KubernetesDashboard.Size(m) +} +func (m *KubernetesDashboard) XXX_DiscardUnknown() { + xxx_messageInfo_KubernetesDashboard.DiscardUnknown(m) +} + +var xxx_messageInfo_KubernetesDashboard proto.InternalMessageInfo + +func (m *KubernetesDashboard) GetDisabled() bool { + if m != nil { + return m.Disabled + } + return false +} + +// Configuration for NetworkPolicy. This only tracks whether the addon +// is enabled or not on the Master, it does not track whether network policy +// is enabled for the nodes. +type NetworkPolicyConfig struct { + // Whether NetworkPolicy is enabled for this cluster. + Disabled bool `protobuf:"varint,1,opt,name=disabled,proto3" json:"disabled,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *NetworkPolicyConfig) Reset() { *m = NetworkPolicyConfig{} } +func (m *NetworkPolicyConfig) String() string { return proto.CompactTextString(m) } +func (*NetworkPolicyConfig) ProtoMessage() {} +func (*NetworkPolicyConfig) Descriptor() ([]byte, []int) { + return fileDescriptor_cluster_service_b093026a38af0865, []int{8} +} +func (m *NetworkPolicyConfig) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_NetworkPolicyConfig.Unmarshal(m, b) +} +func (m *NetworkPolicyConfig) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_NetworkPolicyConfig.Marshal(b, m, deterministic) +} +func (dst *NetworkPolicyConfig) XXX_Merge(src proto.Message) { + xxx_messageInfo_NetworkPolicyConfig.Merge(dst, src) +} +func (m *NetworkPolicyConfig) XXX_Size() int { + return xxx_messageInfo_NetworkPolicyConfig.Size(m) +} +func (m *NetworkPolicyConfig) XXX_DiscardUnknown() { + xxx_messageInfo_NetworkPolicyConfig.DiscardUnknown(m) +} + +var xxx_messageInfo_NetworkPolicyConfig proto.InternalMessageInfo + +func (m *NetworkPolicyConfig) GetDisabled() bool { + if m != nil { + return m.Disabled + } + return false +} + +// Configuration options for private clusters. +type PrivateClusterConfig struct { + // Whether nodes have internal IP addresses only. If enabled, all nodes are + // given only RFC 1918 private addresses and communicate with the master via + // private networking. + EnablePrivateNodes bool `protobuf:"varint,1,opt,name=enable_private_nodes,json=enablePrivateNodes,proto3" json:"enable_private_nodes,omitempty"` + // Whether the master's internal IP address is used as the cluster endpoint. + EnablePrivateEndpoint bool `protobuf:"varint,2,opt,name=enable_private_endpoint,json=enablePrivateEndpoint,proto3" json:"enable_private_endpoint,omitempty"` + // The IP range in CIDR notation to use for the hosted master network. This + // range will be used for assigning internal IP addresses to the master or + // set of masters, as well as the ILB VIP. This range must not overlap with + // any other ranges in use within the cluster's network. + MasterIpv4CidrBlock string `protobuf:"bytes,3,opt,name=master_ipv4_cidr_block,json=masterIpv4CidrBlock,proto3" json:"master_ipv4_cidr_block,omitempty"` + // Output only. The internal IP address of this cluster's master endpoint. + PrivateEndpoint string `protobuf:"bytes,4,opt,name=private_endpoint,json=privateEndpoint,proto3" json:"private_endpoint,omitempty"` + // Output only. The external IP address of this cluster's master endpoint. + PublicEndpoint string `protobuf:"bytes,5,opt,name=public_endpoint,json=publicEndpoint,proto3" json:"public_endpoint,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *PrivateClusterConfig) Reset() { *m = PrivateClusterConfig{} } +func (m *PrivateClusterConfig) String() string { return proto.CompactTextString(m) } +func (*PrivateClusterConfig) ProtoMessage() {} +func (*PrivateClusterConfig) Descriptor() ([]byte, []int) { + return fileDescriptor_cluster_service_b093026a38af0865, []int{9} +} +func (m *PrivateClusterConfig) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_PrivateClusterConfig.Unmarshal(m, b) +} +func (m *PrivateClusterConfig) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_PrivateClusterConfig.Marshal(b, m, deterministic) +} +func (dst *PrivateClusterConfig) XXX_Merge(src proto.Message) { + xxx_messageInfo_PrivateClusterConfig.Merge(dst, src) +} +func (m *PrivateClusterConfig) XXX_Size() int { + return xxx_messageInfo_PrivateClusterConfig.Size(m) +} +func (m *PrivateClusterConfig) XXX_DiscardUnknown() { + xxx_messageInfo_PrivateClusterConfig.DiscardUnknown(m) +} + +var xxx_messageInfo_PrivateClusterConfig proto.InternalMessageInfo + +func (m *PrivateClusterConfig) GetEnablePrivateNodes() bool { + if m != nil { + return m.EnablePrivateNodes + } + return false +} + +func (m *PrivateClusterConfig) GetEnablePrivateEndpoint() bool { + if m != nil { + return m.EnablePrivateEndpoint + } + return false +} + +func (m *PrivateClusterConfig) GetMasterIpv4CidrBlock() string { + if m != nil { + return m.MasterIpv4CidrBlock + } + return "" +} + +func (m *PrivateClusterConfig) GetPrivateEndpoint() string { + if m != nil { + return m.PrivateEndpoint + } + return "" +} + +func (m *PrivateClusterConfig) GetPublicEndpoint() string { + if m != nil { + return m.PublicEndpoint + } + return "" +} + +// Configuration options for Istio addon. +type IstioConfig struct { + // Whether Istio is enabled for this cluster. + Disabled bool `protobuf:"varint,1,opt,name=disabled,proto3" json:"disabled,omitempty"` + // The specified Istio auth mode, either none, or mutual TLS. + Auth IstioConfig_IstioAuthMode `protobuf:"varint,2,opt,name=auth,proto3,enum=google.container.v1beta1.IstioConfig_IstioAuthMode" json:"auth,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *IstioConfig) Reset() { *m = IstioConfig{} } +func (m *IstioConfig) String() string { return proto.CompactTextString(m) } +func (*IstioConfig) ProtoMessage() {} +func (*IstioConfig) Descriptor() ([]byte, []int) { + return fileDescriptor_cluster_service_b093026a38af0865, []int{10} +} +func (m *IstioConfig) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_IstioConfig.Unmarshal(m, b) +} +func (m *IstioConfig) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_IstioConfig.Marshal(b, m, deterministic) +} +func (dst *IstioConfig) XXX_Merge(src proto.Message) { + xxx_messageInfo_IstioConfig.Merge(dst, src) +} +func (m *IstioConfig) XXX_Size() int { + return xxx_messageInfo_IstioConfig.Size(m) +} +func (m *IstioConfig) XXX_DiscardUnknown() { + xxx_messageInfo_IstioConfig.DiscardUnknown(m) +} + +var xxx_messageInfo_IstioConfig proto.InternalMessageInfo + +func (m *IstioConfig) GetDisabled() bool { + if m != nil { + return m.Disabled + } + return false +} + +func (m *IstioConfig) GetAuth() IstioConfig_IstioAuthMode { + if m != nil { + return m.Auth + } + return IstioConfig_AUTH_NONE +} + +// Configuration options for the Cloud Run feature. +type CloudRunConfig struct { + // Whether Cloud Run addon is enabled for this cluster. + Disabled bool `protobuf:"varint,1,opt,name=disabled,proto3" json:"disabled,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *CloudRunConfig) Reset() { *m = CloudRunConfig{} } +func (m *CloudRunConfig) String() string { return proto.CompactTextString(m) } +func (*CloudRunConfig) ProtoMessage() {} +func (*CloudRunConfig) Descriptor() ([]byte, []int) { + return fileDescriptor_cluster_service_b093026a38af0865, []int{11} +} +func (m *CloudRunConfig) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_CloudRunConfig.Unmarshal(m, b) +} +func (m *CloudRunConfig) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_CloudRunConfig.Marshal(b, m, deterministic) +} +func (dst *CloudRunConfig) XXX_Merge(src proto.Message) { + xxx_messageInfo_CloudRunConfig.Merge(dst, src) +} +func (m *CloudRunConfig) XXX_Size() int { + return xxx_messageInfo_CloudRunConfig.Size(m) +} +func (m *CloudRunConfig) XXX_DiscardUnknown() { + xxx_messageInfo_CloudRunConfig.DiscardUnknown(m) +} + +var xxx_messageInfo_CloudRunConfig proto.InternalMessageInfo + +func (m *CloudRunConfig) GetDisabled() bool { + if m != nil { + return m.Disabled + } + return false +} + +// Configuration options for the master authorized networks feature. Enabled +// master authorized networks will disallow all external traffic to access +// Kubernetes master through HTTPS except traffic from the given CIDR blocks, +// Google Compute Engine Public IPs and Google Prod IPs. +type MasterAuthorizedNetworksConfig struct { + // Whether or not master authorized networks is enabled. + Enabled bool `protobuf:"varint,1,opt,name=enabled,proto3" json:"enabled,omitempty"` + // cidr_blocks define up to 10 external networks that could access + // Kubernetes master through HTTPS. + CidrBlocks []*MasterAuthorizedNetworksConfig_CidrBlock `protobuf:"bytes,2,rep,name=cidr_blocks,json=cidrBlocks,proto3" json:"cidr_blocks,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *MasterAuthorizedNetworksConfig) Reset() { *m = MasterAuthorizedNetworksConfig{} } +func (m *MasterAuthorizedNetworksConfig) String() string { return proto.CompactTextString(m) } +func (*MasterAuthorizedNetworksConfig) ProtoMessage() {} +func (*MasterAuthorizedNetworksConfig) Descriptor() ([]byte, []int) { + return fileDescriptor_cluster_service_b093026a38af0865, []int{12} +} +func (m *MasterAuthorizedNetworksConfig) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_MasterAuthorizedNetworksConfig.Unmarshal(m, b) +} +func (m *MasterAuthorizedNetworksConfig) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_MasterAuthorizedNetworksConfig.Marshal(b, m, deterministic) +} +func (dst *MasterAuthorizedNetworksConfig) XXX_Merge(src proto.Message) { + xxx_messageInfo_MasterAuthorizedNetworksConfig.Merge(dst, src) +} +func (m *MasterAuthorizedNetworksConfig) XXX_Size() int { + return xxx_messageInfo_MasterAuthorizedNetworksConfig.Size(m) +} +func (m *MasterAuthorizedNetworksConfig) XXX_DiscardUnknown() { + xxx_messageInfo_MasterAuthorizedNetworksConfig.DiscardUnknown(m) +} + +var xxx_messageInfo_MasterAuthorizedNetworksConfig proto.InternalMessageInfo + +func (m *MasterAuthorizedNetworksConfig) GetEnabled() bool { + if m != nil { + return m.Enabled + } + return false +} + +func (m *MasterAuthorizedNetworksConfig) GetCidrBlocks() []*MasterAuthorizedNetworksConfig_CidrBlock { + if m != nil { + return m.CidrBlocks + } + return nil +} + +// CidrBlock contains an optional name and one CIDR block. +type MasterAuthorizedNetworksConfig_CidrBlock struct { + // display_name is an optional field for users to identify CIDR blocks. + DisplayName string `protobuf:"bytes,1,opt,name=display_name,json=displayName,proto3" json:"display_name,omitempty"` + // cidr_block must be specified in CIDR notation. + CidrBlock string `protobuf:"bytes,2,opt,name=cidr_block,json=cidrBlock,proto3" json:"cidr_block,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *MasterAuthorizedNetworksConfig_CidrBlock) Reset() { + *m = MasterAuthorizedNetworksConfig_CidrBlock{} +} +func (m *MasterAuthorizedNetworksConfig_CidrBlock) String() string { return proto.CompactTextString(m) } +func (*MasterAuthorizedNetworksConfig_CidrBlock) ProtoMessage() {} +func (*MasterAuthorizedNetworksConfig_CidrBlock) Descriptor() ([]byte, []int) { + return fileDescriptor_cluster_service_b093026a38af0865, []int{12, 0} +} +func (m *MasterAuthorizedNetworksConfig_CidrBlock) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_MasterAuthorizedNetworksConfig_CidrBlock.Unmarshal(m, b) +} +func (m *MasterAuthorizedNetworksConfig_CidrBlock) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_MasterAuthorizedNetworksConfig_CidrBlock.Marshal(b, m, deterministic) +} +func (dst *MasterAuthorizedNetworksConfig_CidrBlock) XXX_Merge(src proto.Message) { + xxx_messageInfo_MasterAuthorizedNetworksConfig_CidrBlock.Merge(dst, src) +} +func (m *MasterAuthorizedNetworksConfig_CidrBlock) XXX_Size() int { + return xxx_messageInfo_MasterAuthorizedNetworksConfig_CidrBlock.Size(m) +} +func (m *MasterAuthorizedNetworksConfig_CidrBlock) XXX_DiscardUnknown() { + xxx_messageInfo_MasterAuthorizedNetworksConfig_CidrBlock.DiscardUnknown(m) +} + +var xxx_messageInfo_MasterAuthorizedNetworksConfig_CidrBlock proto.InternalMessageInfo + +func (m *MasterAuthorizedNetworksConfig_CidrBlock) GetDisplayName() string { + if m != nil { + return m.DisplayName + } + return "" +} + +func (m *MasterAuthorizedNetworksConfig_CidrBlock) GetCidrBlock() string { + if m != nil { + return m.CidrBlock + } + return "" +} + +// Configuration for the legacy Attribute Based Access Control authorization +// mode. +type LegacyAbac struct { + // Whether the ABAC authorizer is enabled for this cluster. When enabled, + // identities in the system, including service accounts, nodes, and + // controllers, will have statically granted permissions beyond those + // provided by the RBAC configuration or IAM. + Enabled bool `protobuf:"varint,1,opt,name=enabled,proto3" json:"enabled,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *LegacyAbac) Reset() { *m = LegacyAbac{} } +func (m *LegacyAbac) String() string { return proto.CompactTextString(m) } +func (*LegacyAbac) ProtoMessage() {} +func (*LegacyAbac) Descriptor() ([]byte, []int) { + return fileDescriptor_cluster_service_b093026a38af0865, []int{13} +} +func (m *LegacyAbac) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_LegacyAbac.Unmarshal(m, b) +} +func (m *LegacyAbac) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_LegacyAbac.Marshal(b, m, deterministic) +} +func (dst *LegacyAbac) XXX_Merge(src proto.Message) { + xxx_messageInfo_LegacyAbac.Merge(dst, src) +} +func (m *LegacyAbac) XXX_Size() int { + return xxx_messageInfo_LegacyAbac.Size(m) +} +func (m *LegacyAbac) XXX_DiscardUnknown() { + xxx_messageInfo_LegacyAbac.DiscardUnknown(m) +} + +var xxx_messageInfo_LegacyAbac proto.InternalMessageInfo + +func (m *LegacyAbac) GetEnabled() bool { + if m != nil { + return m.Enabled + } + return false +} + +// Configuration options for the NetworkPolicy feature. +// https://kubernetes.io/docs/concepts/services-networking/networkpolicies/ +type NetworkPolicy struct { + // The selected network policy provider. + Provider NetworkPolicy_Provider `protobuf:"varint,1,opt,name=provider,proto3,enum=google.container.v1beta1.NetworkPolicy_Provider" json:"provider,omitempty"` + // Whether network policy is enabled on the cluster. + Enabled bool `protobuf:"varint,2,opt,name=enabled,proto3" json:"enabled,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *NetworkPolicy) Reset() { *m = NetworkPolicy{} } +func (m *NetworkPolicy) String() string { return proto.CompactTextString(m) } +func (*NetworkPolicy) ProtoMessage() {} +func (*NetworkPolicy) Descriptor() ([]byte, []int) { + return fileDescriptor_cluster_service_b093026a38af0865, []int{14} +} +func (m *NetworkPolicy) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_NetworkPolicy.Unmarshal(m, b) +} +func (m *NetworkPolicy) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_NetworkPolicy.Marshal(b, m, deterministic) +} +func (dst *NetworkPolicy) XXX_Merge(src proto.Message) { + xxx_messageInfo_NetworkPolicy.Merge(dst, src) +} +func (m *NetworkPolicy) XXX_Size() int { + return xxx_messageInfo_NetworkPolicy.Size(m) +} +func (m *NetworkPolicy) XXX_DiscardUnknown() { + xxx_messageInfo_NetworkPolicy.DiscardUnknown(m) +} + +var xxx_messageInfo_NetworkPolicy proto.InternalMessageInfo + +func (m *NetworkPolicy) GetProvider() NetworkPolicy_Provider { + if m != nil { + return m.Provider + } + return NetworkPolicy_PROVIDER_UNSPECIFIED +} + +func (m *NetworkPolicy) GetEnabled() bool { + if m != nil { + return m.Enabled + } + return false +} + +// Configuration for controlling how IPs are allocated in the cluster. +type IPAllocationPolicy struct { + // Whether alias IPs will be used for pod IPs in the cluster. + UseIpAliases bool `protobuf:"varint,1,opt,name=use_ip_aliases,json=useIpAliases,proto3" json:"use_ip_aliases,omitempty"` + // Whether a new subnetwork will be created automatically for the cluster. + // + // This field is only applicable when `use_ip_aliases` is true. + CreateSubnetwork bool `protobuf:"varint,2,opt,name=create_subnetwork,json=createSubnetwork,proto3" json:"create_subnetwork,omitempty"` + // A custom subnetwork name to be used if `create_subnetwork` is true. If + // this field is empty, then an automatic name will be chosen for the new + // subnetwork. + SubnetworkName string `protobuf:"bytes,3,opt,name=subnetwork_name,json=subnetworkName,proto3" json:"subnetwork_name,omitempty"` + // This field is deprecated, use cluster_ipv4_cidr_block. + ClusterIpv4Cidr string `protobuf:"bytes,4,opt,name=cluster_ipv4_cidr,json=clusterIpv4Cidr,proto3" json:"cluster_ipv4_cidr,omitempty"` // Deprecated: Do not use. + // This field is deprecated, use node_ipv4_cidr_block. + NodeIpv4Cidr string `protobuf:"bytes,5,opt,name=node_ipv4_cidr,json=nodeIpv4Cidr,proto3" json:"node_ipv4_cidr,omitempty"` // Deprecated: Do not use. + // This field is deprecated, use services_ipv4_cidr_block. + ServicesIpv4Cidr string `protobuf:"bytes,6,opt,name=services_ipv4_cidr,json=servicesIpv4Cidr,proto3" json:"services_ipv4_cidr,omitempty"` // Deprecated: Do not use. + // The name of the secondary range to be used for the cluster CIDR + // block. The secondary range will be used for pod IP + // addresses. This must be an existing secondary range associated + // with the cluster subnetwork. + // + // This field is only applicable with use_ip_aliases and + // create_subnetwork is false. + ClusterSecondaryRangeName string `protobuf:"bytes,7,opt,name=cluster_secondary_range_name,json=clusterSecondaryRangeName,proto3" json:"cluster_secondary_range_name,omitempty"` + // The name of the secondary range to be used as for the services + // CIDR block. The secondary range will be used for service + // ClusterIPs. This must be an existing secondary range associated + // with the cluster subnetwork. + // + // This field is only applicable with use_ip_aliases and + // create_subnetwork is false. + ServicesSecondaryRangeName string `protobuf:"bytes,8,opt,name=services_secondary_range_name,json=servicesSecondaryRangeName,proto3" json:"services_secondary_range_name,omitempty"` + // The IP address range for the cluster pod IPs. If this field is set, then + // `cluster.cluster_ipv4_cidr` must be left blank. + // + // This field is only applicable when `use_ip_aliases` is true. + // + // Set to blank to have a range chosen with the default size. + // + // Set to /netmask (e.g. `/14`) to have a range chosen with a specific + // netmask. + // + // Set to a + // [CIDR](http://en.wikipedia.org/wiki/Classless_Inter-Domain_Routing) + // notation (e.g. `10.96.0.0/14`) from the RFC-1918 private networks (e.g. + // `10.0.0.0/8`, `172.16.0.0/12`, `192.168.0.0/16`) to pick a specific range + // to use. + ClusterIpv4CidrBlock string `protobuf:"bytes,9,opt,name=cluster_ipv4_cidr_block,json=clusterIpv4CidrBlock,proto3" json:"cluster_ipv4_cidr_block,omitempty"` + // The IP address range of the instance IPs in this cluster. + // + // This is applicable only if `create_subnetwork` is true. + // + // Set to blank to have a range chosen with the default size. + // + // Set to /netmask (e.g. `/14`) to have a range chosen with a specific + // netmask. + // + // Set to a + // [CIDR](http://en.wikipedia.org/wiki/Classless_Inter-Domain_Routing) + // notation (e.g. `10.96.0.0/14`) from the RFC-1918 private networks (e.g. + // `10.0.0.0/8`, `172.16.0.0/12`, `192.168.0.0/16`) to pick a specific range + // to use. + NodeIpv4CidrBlock string `protobuf:"bytes,10,opt,name=node_ipv4_cidr_block,json=nodeIpv4CidrBlock,proto3" json:"node_ipv4_cidr_block,omitempty"` + // The IP address range of the services IPs in this cluster. If blank, a range + // will be automatically chosen with the default size. + // + // This field is only applicable when `use_ip_aliases` is true. + // + // Set to blank to have a range chosen with the default size. + // + // Set to /netmask (e.g. `/14`) to have a range chosen with a specific + // netmask. + // + // Set to a + // [CIDR](http://en.wikipedia.org/wiki/Classless_Inter-Domain_Routing) + // notation (e.g. `10.96.0.0/14`) from the RFC-1918 private networks (e.g. + // `10.0.0.0/8`, `172.16.0.0/12`, `192.168.0.0/16`) to pick a specific range + // to use. + ServicesIpv4CidrBlock string `protobuf:"bytes,11,opt,name=services_ipv4_cidr_block,json=servicesIpv4CidrBlock,proto3" json:"services_ipv4_cidr_block,omitempty"` + // If true, allow allocation of cluster CIDR ranges that overlap with certain + // kinds of network routes. By default we do not allow cluster CIDR ranges to + // intersect with any user declared routes. With allow_route_overlap == true, + // we allow overlapping with CIDR ranges that are larger than the cluster CIDR + // range. + // + // If this field is set to true, then cluster and services CIDRs must be + // fully-specified (e.g. `10.96.0.0/14`, but not `/14`), which means: + // 1) When `use_ip_aliases` is true, `cluster_ipv4_cidr_block` and + // `services_ipv4_cidr_block` must be fully-specified. + // 2) When `use_ip_aliases` is false, `cluster.cluster_ipv4_cidr` muse be + // fully-specified. + AllowRouteOverlap bool `protobuf:"varint,12,opt,name=allow_route_overlap,json=allowRouteOverlap,proto3" json:"allow_route_overlap,omitempty"` + // The IP address range of the Cloud TPUs in this cluster. If unspecified, a + // range will be automatically chosen with the default size. + // + // This field is only applicable when `use_ip_aliases` is true. + // + // If unspecified, the range will use the default size. + // + // Set to /netmask (e.g. `/14`) to have a range chosen with a specific + // netmask. + // + // Set to a + // [CIDR](http://en.wikipedia.org/wiki/Classless_Inter-Domain_Routing) + // notation (e.g. `10.96.0.0/14`) from the RFC-1918 private networks (e.g. + // `10.0.0.0/8`, `172.16.0.0/12`, `192.168.0.0/16`) to pick a specific range + // to use. + TpuIpv4CidrBlock string `protobuf:"bytes,13,opt,name=tpu_ipv4_cidr_block,json=tpuIpv4CidrBlock,proto3" json:"tpu_ipv4_cidr_block,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *IPAllocationPolicy) Reset() { *m = IPAllocationPolicy{} } +func (m *IPAllocationPolicy) String() string { return proto.CompactTextString(m) } +func (*IPAllocationPolicy) ProtoMessage() {} +func (*IPAllocationPolicy) Descriptor() ([]byte, []int) { + return fileDescriptor_cluster_service_b093026a38af0865, []int{15} +} +func (m *IPAllocationPolicy) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_IPAllocationPolicy.Unmarshal(m, b) +} +func (m *IPAllocationPolicy) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_IPAllocationPolicy.Marshal(b, m, deterministic) +} +func (dst *IPAllocationPolicy) XXX_Merge(src proto.Message) { + xxx_messageInfo_IPAllocationPolicy.Merge(dst, src) +} +func (m *IPAllocationPolicy) XXX_Size() int { + return xxx_messageInfo_IPAllocationPolicy.Size(m) +} +func (m *IPAllocationPolicy) XXX_DiscardUnknown() { + xxx_messageInfo_IPAllocationPolicy.DiscardUnknown(m) +} + +var xxx_messageInfo_IPAllocationPolicy proto.InternalMessageInfo + +func (m *IPAllocationPolicy) GetUseIpAliases() bool { + if m != nil { + return m.UseIpAliases + } + return false +} + +func (m *IPAllocationPolicy) GetCreateSubnetwork() bool { + if m != nil { + return m.CreateSubnetwork + } + return false +} + +func (m *IPAllocationPolicy) GetSubnetworkName() string { + if m != nil { + return m.SubnetworkName + } + return "" +} + +// Deprecated: Do not use. +func (m *IPAllocationPolicy) GetClusterIpv4Cidr() string { + if m != nil { + return m.ClusterIpv4Cidr + } + return "" +} + +// Deprecated: Do not use. +func (m *IPAllocationPolicy) GetNodeIpv4Cidr() string { + if m != nil { + return m.NodeIpv4Cidr + } + return "" +} + +// Deprecated: Do not use. +func (m *IPAllocationPolicy) GetServicesIpv4Cidr() string { + if m != nil { + return m.ServicesIpv4Cidr + } + return "" +} + +func (m *IPAllocationPolicy) GetClusterSecondaryRangeName() string { + if m != nil { + return m.ClusterSecondaryRangeName + } + return "" +} + +func (m *IPAllocationPolicy) GetServicesSecondaryRangeName() string { + if m != nil { + return m.ServicesSecondaryRangeName + } + return "" +} + +func (m *IPAllocationPolicy) GetClusterIpv4CidrBlock() string { + if m != nil { + return m.ClusterIpv4CidrBlock + } + return "" +} + +func (m *IPAllocationPolicy) GetNodeIpv4CidrBlock() string { + if m != nil { + return m.NodeIpv4CidrBlock + } + return "" +} + +func (m *IPAllocationPolicy) GetServicesIpv4CidrBlock() string { + if m != nil { + return m.ServicesIpv4CidrBlock + } + return "" +} + +func (m *IPAllocationPolicy) GetAllowRouteOverlap() bool { + if m != nil { + return m.AllowRouteOverlap + } + return false +} + +func (m *IPAllocationPolicy) GetTpuIpv4CidrBlock() string { + if m != nil { + return m.TpuIpv4CidrBlock + } + return "" +} + +// Configuration for Binary Authorization. +type BinaryAuthorization struct { + // Enable Binary Authorization for this cluster. If enabled, all container + // images will be validated by Google Binauthz. + Enabled bool `protobuf:"varint,1,opt,name=enabled,proto3" json:"enabled,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *BinaryAuthorization) Reset() { *m = BinaryAuthorization{} } +func (m *BinaryAuthorization) String() string { return proto.CompactTextString(m) } +func (*BinaryAuthorization) ProtoMessage() {} +func (*BinaryAuthorization) Descriptor() ([]byte, []int) { + return fileDescriptor_cluster_service_b093026a38af0865, []int{16} +} +func (m *BinaryAuthorization) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_BinaryAuthorization.Unmarshal(m, b) +} +func (m *BinaryAuthorization) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_BinaryAuthorization.Marshal(b, m, deterministic) +} +func (dst *BinaryAuthorization) XXX_Merge(src proto.Message) { + xxx_messageInfo_BinaryAuthorization.Merge(dst, src) +} +func (m *BinaryAuthorization) XXX_Size() int { + return xxx_messageInfo_BinaryAuthorization.Size(m) +} +func (m *BinaryAuthorization) XXX_DiscardUnknown() { + xxx_messageInfo_BinaryAuthorization.DiscardUnknown(m) +} + +var xxx_messageInfo_BinaryAuthorization proto.InternalMessageInfo + +func (m *BinaryAuthorization) GetEnabled() bool { + if m != nil { + return m.Enabled + } + return false +} + +// Configuration for the PodSecurityPolicy feature. +type PodSecurityPolicyConfig struct { + // Enable the PodSecurityPolicy controller for this cluster. If enabled, pods + // must be valid under a PodSecurityPolicy to be created. + Enabled bool `protobuf:"varint,1,opt,name=enabled,proto3" json:"enabled,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *PodSecurityPolicyConfig) Reset() { *m = PodSecurityPolicyConfig{} } +func (m *PodSecurityPolicyConfig) String() string { return proto.CompactTextString(m) } +func (*PodSecurityPolicyConfig) ProtoMessage() {} +func (*PodSecurityPolicyConfig) Descriptor() ([]byte, []int) { + return fileDescriptor_cluster_service_b093026a38af0865, []int{17} +} +func (m *PodSecurityPolicyConfig) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_PodSecurityPolicyConfig.Unmarshal(m, b) +} +func (m *PodSecurityPolicyConfig) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_PodSecurityPolicyConfig.Marshal(b, m, deterministic) +} +func (dst *PodSecurityPolicyConfig) XXX_Merge(src proto.Message) { + xxx_messageInfo_PodSecurityPolicyConfig.Merge(dst, src) +} +func (m *PodSecurityPolicyConfig) XXX_Size() int { + return xxx_messageInfo_PodSecurityPolicyConfig.Size(m) +} +func (m *PodSecurityPolicyConfig) XXX_DiscardUnknown() { + xxx_messageInfo_PodSecurityPolicyConfig.DiscardUnknown(m) +} + +var xxx_messageInfo_PodSecurityPolicyConfig proto.InternalMessageInfo + +func (m *PodSecurityPolicyConfig) GetEnabled() bool { + if m != nil { + return m.Enabled + } + return false +} + +// A Google Kubernetes Engine cluster. +type Cluster struct { + // The name of this cluster. The name must be unique within this project + // and zone, and can be up to 40 characters with the following restrictions: + // + // * Lowercase letters, numbers, and hyphens only. + // * Must start with a letter. + // * Must end with a number or a letter. + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // An optional description of this cluster. + Description string `protobuf:"bytes,2,opt,name=description,proto3" json:"description,omitempty"` + // The number of nodes to create in this cluster. You must ensure that your + // Compute Engine resource quota + // is sufficient for this number of instances. You must also have available + // firewall and routes quota. + // For requests, this field should only be used in lieu of a + // "node_pool" object, since this configuration (along with the + // "node_config") will be used to create a "NodePool" object with an + // auto-generated name. Do not use this and a node_pool at the same time. + InitialNodeCount int32 `protobuf:"varint,3,opt,name=initial_node_count,json=initialNodeCount,proto3" json:"initial_node_count,omitempty"` + // Parameters used in creating the cluster's nodes. + // See `nodeConfig` for the description of its properties. + // For requests, this field should only be used in lieu of a + // "node_pool" object, since this configuration (along with the + // "initial_node_count") will be used to create a "NodePool" object with an + // auto-generated name. Do not use this and a node_pool at the same time. + // For responses, this field will be populated with the node configuration of + // the first node pool. + // + // If unspecified, the defaults are used. + NodeConfig *NodeConfig `protobuf:"bytes,4,opt,name=node_config,json=nodeConfig,proto3" json:"node_config,omitempty"` + // The authentication information for accessing the master endpoint. + // If unspecified, the defaults are used: + // For clusters before v1.12, if master_auth is unspecified, `username` will + // be set to "admin", a random password will be generated, and a client + // certificate will be issued. + MasterAuth *MasterAuth `protobuf:"bytes,5,opt,name=master_auth,json=masterAuth,proto3" json:"master_auth,omitempty"` + // The logging service the cluster should use to write logs. + // Currently available options: + // + // * `logging.googleapis.com` - the Google Cloud Logging service. + // * `none` - no logs will be exported from the cluster. + // * if left as an empty string,`logging.googleapis.com` will be used. + LoggingService string `protobuf:"bytes,6,opt,name=logging_service,json=loggingService,proto3" json:"logging_service,omitempty"` + // The monitoring service the cluster should use to write metrics. + // Currently available options: + // + // * `monitoring.googleapis.com` - the Google Cloud Monitoring service. + // * `none` - no metrics will be exported from the cluster. + // * if left as an empty string, `monitoring.googleapis.com` will be used. + MonitoringService string `protobuf:"bytes,7,opt,name=monitoring_service,json=monitoringService,proto3" json:"monitoring_service,omitempty"` + // The name of the Google Compute Engine + // [network](/compute/docs/networks-and-firewalls#networks) to which the + // cluster is connected. If left unspecified, the `default` network + // will be used. On output this shows the network ID instead of + // the name. + Network string `protobuf:"bytes,8,opt,name=network,proto3" json:"network,omitempty"` + // The IP address range of the container pods in this cluster, in + // [CIDR](http://en.wikipedia.org/wiki/Classless_Inter-Domain_Routing) + // notation (e.g. `10.96.0.0/14`). Leave blank to have + // one automatically chosen or specify a `/14` block in `10.0.0.0/8`. + ClusterIpv4Cidr string `protobuf:"bytes,9,opt,name=cluster_ipv4_cidr,json=clusterIpv4Cidr,proto3" json:"cluster_ipv4_cidr,omitempty"` + // Configurations for the various addons available to run in the cluster. + AddonsConfig *AddonsConfig `protobuf:"bytes,10,opt,name=addons_config,json=addonsConfig,proto3" json:"addons_config,omitempty"` + // The name of the Google Compute Engine + // [subnetwork](/compute/docs/subnetworks) to which the + // cluster is connected. On output this shows the subnetwork ID instead of + // the name. + Subnetwork string `protobuf:"bytes,11,opt,name=subnetwork,proto3" json:"subnetwork,omitempty"` + // The node pools associated with this cluster. + // This field should not be set if "node_config" or "initial_node_count" are + // specified. + NodePools []*NodePool `protobuf:"bytes,12,rep,name=node_pools,json=nodePools,proto3" json:"node_pools,omitempty"` + // The list of Google Compute Engine + // [zones](/compute/docs/zones#available) in which the cluster's nodes + // should be located. + Locations []string `protobuf:"bytes,13,rep,name=locations,proto3" json:"locations,omitempty"` + // Kubernetes alpha features are enabled on this cluster. This includes alpha + // API groups (e.g. v1beta1) and features that may not be production ready in + // the kubernetes version of the master and nodes. + // The cluster has no SLA for uptime and master/node upgrades are disabled. + // Alpha enabled clusters are automatically deleted thirty days after + // creation. + EnableKubernetesAlpha bool `protobuf:"varint,14,opt,name=enable_kubernetes_alpha,json=enableKubernetesAlpha,proto3" json:"enable_kubernetes_alpha,omitempty"` + // The resource labels for the cluster to use to annotate any related + // Google Compute Engine resources. + ResourceLabels map[string]string `protobuf:"bytes,15,rep,name=resource_labels,json=resourceLabels,proto3" json:"resource_labels,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` + // The fingerprint of the set of labels for this cluster. + LabelFingerprint string `protobuf:"bytes,16,opt,name=label_fingerprint,json=labelFingerprint,proto3" json:"label_fingerprint,omitempty"` + // Configuration for the legacy ABAC authorization mode. + LegacyAbac *LegacyAbac `protobuf:"bytes,18,opt,name=legacy_abac,json=legacyAbac,proto3" json:"legacy_abac,omitempty"` + // Configuration options for the NetworkPolicy feature. + NetworkPolicy *NetworkPolicy `protobuf:"bytes,19,opt,name=network_policy,json=networkPolicy,proto3" json:"network_policy,omitempty"` + // Configuration for cluster IP allocation. + IpAllocationPolicy *IPAllocationPolicy `protobuf:"bytes,20,opt,name=ip_allocation_policy,json=ipAllocationPolicy,proto3" json:"ip_allocation_policy,omitempty"` + // The configuration options for master authorized networks feature. + MasterAuthorizedNetworksConfig *MasterAuthorizedNetworksConfig `protobuf:"bytes,22,opt,name=master_authorized_networks_config,json=masterAuthorizedNetworksConfig,proto3" json:"master_authorized_networks_config,omitempty"` + // Configure the maintenance policy for this cluster. + MaintenancePolicy *MaintenancePolicy `protobuf:"bytes,23,opt,name=maintenance_policy,json=maintenancePolicy,proto3" json:"maintenance_policy,omitempty"` + // Configuration for Binary Authorization. + BinaryAuthorization *BinaryAuthorization `protobuf:"bytes,24,opt,name=binary_authorization,json=binaryAuthorization,proto3" json:"binary_authorization,omitempty"` + // Configuration for the PodSecurityPolicy feature. + PodSecurityPolicyConfig *PodSecurityPolicyConfig `protobuf:"bytes,25,opt,name=pod_security_policy_config,json=podSecurityPolicyConfig,proto3" json:"pod_security_policy_config,omitempty"` + // Cluster-level autoscaling configuration. + Autoscaling *ClusterAutoscaling `protobuf:"bytes,26,opt,name=autoscaling,proto3" json:"autoscaling,omitempty"` + // Configuration for cluster networking. + NetworkConfig *NetworkConfig `protobuf:"bytes,27,opt,name=network_config,json=networkConfig,proto3" json:"network_config,omitempty"` + // If this is a private cluster setup. Private clusters are clusters that, by + // default have no external IP addresses on the nodes and where nodes and the + // master communicate over private IP addresses. + // This field is deprecated, use private_cluster_config.enable_private_nodes + // instead. + PrivateCluster bool `protobuf:"varint,28,opt,name=private_cluster,json=privateCluster,proto3" json:"private_cluster,omitempty"` // Deprecated: Do not use. + // The IP prefix in CIDR notation to use for the hosted master network. + // This prefix will be used for assigning private IP addresses to the + // master or set of masters, as well as the ILB VIP. + // This field is deprecated, use + // private_cluster_config.master_ipv4_cidr_block instead. + MasterIpv4CidrBlock string `protobuf:"bytes,29,opt,name=master_ipv4_cidr_block,json=masterIpv4CidrBlock,proto3" json:"master_ipv4_cidr_block,omitempty"` // Deprecated: Do not use. + // The default constraint on the maximum number of pods that can be run + // simultaneously on a node in the node pool of this cluster. Only honored + // if cluster created with IP Alias support. + DefaultMaxPodsConstraint *MaxPodsConstraint `protobuf:"bytes,30,opt,name=default_max_pods_constraint,json=defaultMaxPodsConstraint,proto3" json:"default_max_pods_constraint,omitempty"` + // Configuration for exporting resource usages. Resource usage export is + // disabled when this config unspecified. + ResourceUsageExportConfig *ResourceUsageExportConfig `protobuf:"bytes,33,opt,name=resource_usage_export_config,json=resourceUsageExportConfig,proto3" json:"resource_usage_export_config,omitempty"` + // Configuration for private cluster. + PrivateClusterConfig *PrivateClusterConfig `protobuf:"bytes,37,opt,name=private_cluster_config,json=privateClusterConfig,proto3" json:"private_cluster_config,omitempty"` + // Cluster-level Vertical Pod Autoscaling configuration. + VerticalPodAutoscaling *VerticalPodAutoscaling `protobuf:"bytes,39,opt,name=vertical_pod_autoscaling,json=verticalPodAutoscaling,proto3" json:"vertical_pod_autoscaling,omitempty"` + // [Output only] Server-defined URL for the resource. + SelfLink string `protobuf:"bytes,100,opt,name=self_link,json=selfLink,proto3" json:"self_link,omitempty"` + // [Output only] The name of the Google Compute Engine + // [zone](/compute/docs/zones#available) in which the cluster + // resides. + // This field is deprecated, use location instead. + Zone string `protobuf:"bytes,101,opt,name=zone,proto3" json:"zone,omitempty"` // Deprecated: Do not use. + // [Output only] The IP address of this cluster's master endpoint. + // The endpoint can be accessed from the internet at + // `https://username:password@endpoint/`. + // + // See the `masterAuth` property of this resource for username and + // password information. + Endpoint string `protobuf:"bytes,102,opt,name=endpoint,proto3" json:"endpoint,omitempty"` + // The initial Kubernetes version for this cluster. Valid versions are those + // found in validMasterVersions returned by getServerConfig. The version can + // be upgraded over time; such upgrades are reflected in + // currentMasterVersion and currentNodeVersion. + // + // Users may specify either explicit versions offered by + // Kubernetes Engine or version aliases, which have the following behavior: + // + // - "latest": picks the highest valid Kubernetes version + // - "1.X": picks the highest valid patch+gke.N patch in the 1.X version + // - "1.X.Y": picks the highest valid gke.N patch in the 1.X.Y version + // - "1.X.Y-gke.N": picks an explicit Kubernetes version + // - "","-": picks the default Kubernetes version + InitialClusterVersion string `protobuf:"bytes,103,opt,name=initial_cluster_version,json=initialClusterVersion,proto3" json:"initial_cluster_version,omitempty"` + // [Output only] The current software version of the master endpoint. + CurrentMasterVersion string `protobuf:"bytes,104,opt,name=current_master_version,json=currentMasterVersion,proto3" json:"current_master_version,omitempty"` + // [Output only] Deprecated, use + // [NodePool.version](/kubernetes-engine/docs/reference/rest/v1beta1/projects.zones.clusters.nodePool) + // instead. The current version of the node software components. + // If they are currently at multiple versions because they're in the process + // of being upgraded, this reflects the minimum version of all nodes. + CurrentNodeVersion string `protobuf:"bytes,105,opt,name=current_node_version,json=currentNodeVersion,proto3" json:"current_node_version,omitempty"` // Deprecated: Do not use. + // [Output only] The time the cluster was created, in + // [RFC3339](https://www.ietf.org/rfc/rfc3339.txt) text format. + CreateTime string `protobuf:"bytes,106,opt,name=create_time,json=createTime,proto3" json:"create_time,omitempty"` + // [Output only] The current status of this cluster. + Status Cluster_Status `protobuf:"varint,107,opt,name=status,proto3,enum=google.container.v1beta1.Cluster_Status" json:"status,omitempty"` + // [Output only] Additional information about the current status of this + // cluster, if available. + StatusMessage string `protobuf:"bytes,108,opt,name=status_message,json=statusMessage,proto3" json:"status_message,omitempty"` + // [Output only] The size of the address space on each node for hosting + // containers. This is provisioned from within the `container_ipv4_cidr` + // range. + NodeIpv4CidrSize int32 `protobuf:"varint,109,opt,name=node_ipv4_cidr_size,json=nodeIpv4CidrSize,proto3" json:"node_ipv4_cidr_size,omitempty"` + // [Output only] The IP address range of the Kubernetes services in + // this cluster, in + // [CIDR](http://en.wikipedia.org/wiki/Classless_Inter-Domain_Routing) + // notation (e.g. `1.2.3.4/29`). Service addresses are + // typically put in the last `/16` from the container CIDR. + ServicesIpv4Cidr string `protobuf:"bytes,110,opt,name=services_ipv4_cidr,json=servicesIpv4Cidr,proto3" json:"services_ipv4_cidr,omitempty"` + // Deprecated. Use node_pools.instance_group_urls. + InstanceGroupUrls []string `protobuf:"bytes,111,rep,name=instance_group_urls,json=instanceGroupUrls,proto3" json:"instance_group_urls,omitempty"` // Deprecated: Do not use. + // [Output only] The number of nodes currently in the cluster. Deprecated. + // Call Kubernetes API directly to retrieve node information. + CurrentNodeCount int32 `protobuf:"varint,112,opt,name=current_node_count,json=currentNodeCount,proto3" json:"current_node_count,omitempty"` // Deprecated: Do not use. + // [Output only] The time the cluster will be automatically + // deleted in [RFC3339](https://www.ietf.org/rfc/rfc3339.txt) text format. + ExpireTime string `protobuf:"bytes,113,opt,name=expire_time,json=expireTime,proto3" json:"expire_time,omitempty"` + // [Output only] The name of the Google Compute Engine + // [zone](/compute/docs/regions-zones/regions-zones#available) or + // [region](/compute/docs/regions-zones/regions-zones#available) in which + // the cluster resides. + Location string `protobuf:"bytes,114,opt,name=location,proto3" json:"location,omitempty"` + // Enable the ability to use Cloud TPUs in this cluster. + EnableTpu bool `protobuf:"varint,115,opt,name=enable_tpu,json=enableTpu,proto3" json:"enable_tpu,omitempty"` + // [Output only] The IP address range of the Cloud TPUs in this cluster, in + // [CIDR](http://en.wikipedia.org/wiki/Classless_Inter-Domain_Routing) + // notation (e.g. `1.2.3.4/29`). + TpuIpv4CidrBlock string `protobuf:"bytes,116,opt,name=tpu_ipv4_cidr_block,json=tpuIpv4CidrBlock,proto3" json:"tpu_ipv4_cidr_block,omitempty"` + // Which conditions caused the current cluster state. + Conditions []*StatusCondition `protobuf:"bytes,118,rep,name=conditions,proto3" json:"conditions,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Cluster) Reset() { *m = Cluster{} } +func (m *Cluster) String() string { return proto.CompactTextString(m) } +func (*Cluster) ProtoMessage() {} +func (*Cluster) Descriptor() ([]byte, []int) { + return fileDescriptor_cluster_service_b093026a38af0865, []int{18} +} +func (m *Cluster) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Cluster.Unmarshal(m, b) +} +func (m *Cluster) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Cluster.Marshal(b, m, deterministic) +} +func (dst *Cluster) XXX_Merge(src proto.Message) { + xxx_messageInfo_Cluster.Merge(dst, src) +} +func (m *Cluster) XXX_Size() int { + return xxx_messageInfo_Cluster.Size(m) +} +func (m *Cluster) XXX_DiscardUnknown() { + xxx_messageInfo_Cluster.DiscardUnknown(m) +} + +var xxx_messageInfo_Cluster proto.InternalMessageInfo + +func (m *Cluster) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *Cluster) GetDescription() string { + if m != nil { + return m.Description + } + return "" +} + +func (m *Cluster) GetInitialNodeCount() int32 { + if m != nil { + return m.InitialNodeCount + } + return 0 +} + +func (m *Cluster) GetNodeConfig() *NodeConfig { + if m != nil { + return m.NodeConfig + } + return nil +} + +func (m *Cluster) GetMasterAuth() *MasterAuth { + if m != nil { + return m.MasterAuth + } + return nil +} + +func (m *Cluster) GetLoggingService() string { + if m != nil { + return m.LoggingService + } + return "" +} + +func (m *Cluster) GetMonitoringService() string { + if m != nil { + return m.MonitoringService + } + return "" +} + +func (m *Cluster) GetNetwork() string { + if m != nil { + return m.Network + } + return "" +} + +func (m *Cluster) GetClusterIpv4Cidr() string { + if m != nil { + return m.ClusterIpv4Cidr + } + return "" +} + +func (m *Cluster) GetAddonsConfig() *AddonsConfig { + if m != nil { + return m.AddonsConfig + } + return nil +} + +func (m *Cluster) GetSubnetwork() string { + if m != nil { + return m.Subnetwork + } + return "" +} + +func (m *Cluster) GetNodePools() []*NodePool { + if m != nil { + return m.NodePools + } + return nil +} + +func (m *Cluster) GetLocations() []string { + if m != nil { + return m.Locations + } + return nil +} + +func (m *Cluster) GetEnableKubernetesAlpha() bool { + if m != nil { + return m.EnableKubernetesAlpha + } + return false +} + +func (m *Cluster) GetResourceLabels() map[string]string { + if m != nil { + return m.ResourceLabels + } + return nil +} + +func (m *Cluster) GetLabelFingerprint() string { + if m != nil { + return m.LabelFingerprint + } + return "" +} + +func (m *Cluster) GetLegacyAbac() *LegacyAbac { + if m != nil { + return m.LegacyAbac + } + return nil +} + +func (m *Cluster) GetNetworkPolicy() *NetworkPolicy { + if m != nil { + return m.NetworkPolicy + } + return nil +} + +func (m *Cluster) GetIpAllocationPolicy() *IPAllocationPolicy { + if m != nil { + return m.IpAllocationPolicy + } + return nil +} + +func (m *Cluster) GetMasterAuthorizedNetworksConfig() *MasterAuthorizedNetworksConfig { + if m != nil { + return m.MasterAuthorizedNetworksConfig + } + return nil +} + +func (m *Cluster) GetMaintenancePolicy() *MaintenancePolicy { + if m != nil { + return m.MaintenancePolicy + } + return nil +} + +func (m *Cluster) GetBinaryAuthorization() *BinaryAuthorization { + if m != nil { + return m.BinaryAuthorization + } + return nil +} + +func (m *Cluster) GetPodSecurityPolicyConfig() *PodSecurityPolicyConfig { + if m != nil { + return m.PodSecurityPolicyConfig + } + return nil +} + +func (m *Cluster) GetAutoscaling() *ClusterAutoscaling { + if m != nil { + return m.Autoscaling + } + return nil +} + +func (m *Cluster) GetNetworkConfig() *NetworkConfig { + if m != nil { + return m.NetworkConfig + } + return nil +} + +// Deprecated: Do not use. +func (m *Cluster) GetPrivateCluster() bool { + if m != nil { + return m.PrivateCluster + } + return false +} + +// Deprecated: Do not use. +func (m *Cluster) GetMasterIpv4CidrBlock() string { + if m != nil { + return m.MasterIpv4CidrBlock + } + return "" +} + +func (m *Cluster) GetDefaultMaxPodsConstraint() *MaxPodsConstraint { + if m != nil { + return m.DefaultMaxPodsConstraint + } + return nil +} + +func (m *Cluster) GetResourceUsageExportConfig() *ResourceUsageExportConfig { + if m != nil { + return m.ResourceUsageExportConfig + } + return nil +} + +func (m *Cluster) GetPrivateClusterConfig() *PrivateClusterConfig { + if m != nil { + return m.PrivateClusterConfig + } + return nil +} + +func (m *Cluster) GetVerticalPodAutoscaling() *VerticalPodAutoscaling { + if m != nil { + return m.VerticalPodAutoscaling + } + return nil +} + +func (m *Cluster) GetSelfLink() string { + if m != nil { + return m.SelfLink + } + return "" +} + +// Deprecated: Do not use. +func (m *Cluster) GetZone() string { + if m != nil { + return m.Zone + } + return "" +} + +func (m *Cluster) GetEndpoint() string { + if m != nil { + return m.Endpoint + } + return "" +} + +func (m *Cluster) GetInitialClusterVersion() string { + if m != nil { + return m.InitialClusterVersion + } + return "" +} + +func (m *Cluster) GetCurrentMasterVersion() string { + if m != nil { + return m.CurrentMasterVersion + } + return "" +} + +// Deprecated: Do not use. +func (m *Cluster) GetCurrentNodeVersion() string { + if m != nil { + return m.CurrentNodeVersion + } + return "" +} + +func (m *Cluster) GetCreateTime() string { + if m != nil { + return m.CreateTime + } + return "" +} + +func (m *Cluster) GetStatus() Cluster_Status { + if m != nil { + return m.Status + } + return Cluster_STATUS_UNSPECIFIED +} + +func (m *Cluster) GetStatusMessage() string { + if m != nil { + return m.StatusMessage + } + return "" +} + +func (m *Cluster) GetNodeIpv4CidrSize() int32 { + if m != nil { + return m.NodeIpv4CidrSize + } + return 0 +} + +func (m *Cluster) GetServicesIpv4Cidr() string { + if m != nil { + return m.ServicesIpv4Cidr + } + return "" +} + +// Deprecated: Do not use. +func (m *Cluster) GetInstanceGroupUrls() []string { + if m != nil { + return m.InstanceGroupUrls + } + return nil +} + +// Deprecated: Do not use. +func (m *Cluster) GetCurrentNodeCount() int32 { + if m != nil { + return m.CurrentNodeCount + } + return 0 +} + +func (m *Cluster) GetExpireTime() string { + if m != nil { + return m.ExpireTime + } + return "" +} + +func (m *Cluster) GetLocation() string { + if m != nil { + return m.Location + } + return "" +} + +func (m *Cluster) GetEnableTpu() bool { + if m != nil { + return m.EnableTpu + } + return false +} + +func (m *Cluster) GetTpuIpv4CidrBlock() string { + if m != nil { + return m.TpuIpv4CidrBlock + } + return "" +} + +func (m *Cluster) GetConditions() []*StatusCondition { + if m != nil { + return m.Conditions + } + return nil +} + +// ClusterUpdate describes an update to the cluster. Exactly one update can +// be applied to a cluster with each request, so at most one field can be +// provided. +type ClusterUpdate struct { + // The Kubernetes version to change the nodes to (typically an + // upgrade). + // + // Users may specify either explicit versions offered by + // Kubernetes Engine or version aliases, which have the following behavior: + // + // - "latest": picks the highest valid Kubernetes version + // - "1.X": picks the highest valid patch+gke.N patch in the 1.X version + // - "1.X.Y": picks the highest valid gke.N patch in the 1.X.Y version + // - "1.X.Y-gke.N": picks an explicit Kubernetes version + // - "-": picks the Kubernetes master version + DesiredNodeVersion string `protobuf:"bytes,4,opt,name=desired_node_version,json=desiredNodeVersion,proto3" json:"desired_node_version,omitempty"` + // The monitoring service the cluster should use to write metrics. + // Currently available options: + // + // * "monitoring.googleapis.com/kubernetes" - the Google Cloud Monitoring + // service with Kubernetes-native resource model in Stackdriver + // * "monitoring.googleapis.com" - the Google Cloud Monitoring service + // * "none" - no metrics will be exported from the cluster + DesiredMonitoringService string `protobuf:"bytes,5,opt,name=desired_monitoring_service,json=desiredMonitoringService,proto3" json:"desired_monitoring_service,omitempty"` + // Configurations for the various addons available to run in the cluster. + DesiredAddonsConfig *AddonsConfig `protobuf:"bytes,6,opt,name=desired_addons_config,json=desiredAddonsConfig,proto3" json:"desired_addons_config,omitempty"` + // The node pool to be upgraded. This field is mandatory if + // "desired_node_version", "desired_image_family" or + // "desired_node_pool_autoscaling" is specified and there is more than one + // node pool on the cluster. + DesiredNodePoolId string `protobuf:"bytes,7,opt,name=desired_node_pool_id,json=desiredNodePoolId,proto3" json:"desired_node_pool_id,omitempty"` + // The desired image type for the node pool. + // NOTE: Set the "desired_node_pool" field as well. + DesiredImageType string `protobuf:"bytes,8,opt,name=desired_image_type,json=desiredImageType,proto3" json:"desired_image_type,omitempty"` + // Autoscaler configuration for the node pool specified in + // desired_node_pool_id. If there is only one pool in the + // cluster and desired_node_pool_id is not provided then + // the change applies to that single node pool. + DesiredNodePoolAutoscaling *NodePoolAutoscaling `protobuf:"bytes,9,opt,name=desired_node_pool_autoscaling,json=desiredNodePoolAutoscaling,proto3" json:"desired_node_pool_autoscaling,omitempty"` + // The desired list of Google Compute Engine + // [zones](/compute/docs/zones#available) in which the cluster's nodes + // should be located. Changing the locations a cluster is in will result + // in nodes being either created or removed from the cluster, depending on + // whether locations are being added or removed. + // + // This list must always include the cluster's primary zone. + DesiredLocations []string `protobuf:"bytes,10,rep,name=desired_locations,json=desiredLocations,proto3" json:"desired_locations,omitempty"` + // The desired configuration options for master authorized networks feature. + DesiredMasterAuthorizedNetworksConfig *MasterAuthorizedNetworksConfig `protobuf:"bytes,12,opt,name=desired_master_authorized_networks_config,json=desiredMasterAuthorizedNetworksConfig,proto3" json:"desired_master_authorized_networks_config,omitempty"` + // The desired configuration options for the PodSecurityPolicy feature. + DesiredPodSecurityPolicyConfig *PodSecurityPolicyConfig `protobuf:"bytes,14,opt,name=desired_pod_security_policy_config,json=desiredPodSecurityPolicyConfig,proto3" json:"desired_pod_security_policy_config,omitempty"` + // Cluster-level autoscaling configuration. + DesiredClusterAutoscaling *ClusterAutoscaling `protobuf:"bytes,15,opt,name=desired_cluster_autoscaling,json=desiredClusterAutoscaling,proto3" json:"desired_cluster_autoscaling,omitempty"` + // The desired configuration options for the Binary Authorization feature. + DesiredBinaryAuthorization *BinaryAuthorization `protobuf:"bytes,16,opt,name=desired_binary_authorization,json=desiredBinaryAuthorization,proto3" json:"desired_binary_authorization,omitempty"` + // The logging service the cluster should use to write metrics. + // Currently available options: + // + // * "logging.googleapis.com/kubernetes" - the Google Cloud Logging + // service with Kubernetes-native resource model in Stackdriver + // * "logging.googleapis.com" - the Google Cloud Logging service + // * "none" - no logs will be exported from the cluster + DesiredLoggingService string `protobuf:"bytes,19,opt,name=desired_logging_service,json=desiredLoggingService,proto3" json:"desired_logging_service,omitempty"` + // The desired configuration for exporting resource usage. + DesiredResourceUsageExportConfig *ResourceUsageExportConfig `protobuf:"bytes,21,opt,name=desired_resource_usage_export_config,json=desiredResourceUsageExportConfig,proto3" json:"desired_resource_usage_export_config,omitempty"` + // Cluster-level Vertical Pod Autoscaling configuration. + DesiredVerticalPodAutoscaling *VerticalPodAutoscaling `protobuf:"bytes,22,opt,name=desired_vertical_pod_autoscaling,json=desiredVerticalPodAutoscaling,proto3" json:"desired_vertical_pod_autoscaling,omitempty"` + // The Kubernetes version to change the master to. The only valid value is the + // latest supported version. + // + // Users may specify either explicit versions offered by + // Kubernetes Engine or version aliases, which have the following behavior: + // + // - "latest": picks the highest valid Kubernetes version + // - "1.X": picks the highest valid patch+gke.N patch in the 1.X version + // - "1.X.Y": picks the highest valid gke.N patch in the 1.X.Y version + // - "1.X.Y-gke.N": picks an explicit Kubernetes version + // - "-": picks the default Kubernetes version + DesiredMasterVersion string `protobuf:"bytes,100,opt,name=desired_master_version,json=desiredMasterVersion,proto3" json:"desired_master_version,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ClusterUpdate) Reset() { *m = ClusterUpdate{} } +func (m *ClusterUpdate) String() string { return proto.CompactTextString(m) } +func (*ClusterUpdate) ProtoMessage() {} +func (*ClusterUpdate) Descriptor() ([]byte, []int) { + return fileDescriptor_cluster_service_b093026a38af0865, []int{19} +} +func (m *ClusterUpdate) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ClusterUpdate.Unmarshal(m, b) +} +func (m *ClusterUpdate) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ClusterUpdate.Marshal(b, m, deterministic) +} +func (dst *ClusterUpdate) XXX_Merge(src proto.Message) { + xxx_messageInfo_ClusterUpdate.Merge(dst, src) +} +func (m *ClusterUpdate) XXX_Size() int { + return xxx_messageInfo_ClusterUpdate.Size(m) +} +func (m *ClusterUpdate) XXX_DiscardUnknown() { + xxx_messageInfo_ClusterUpdate.DiscardUnknown(m) +} + +var xxx_messageInfo_ClusterUpdate proto.InternalMessageInfo + +func (m *ClusterUpdate) GetDesiredNodeVersion() string { + if m != nil { + return m.DesiredNodeVersion + } + return "" +} + +func (m *ClusterUpdate) GetDesiredMonitoringService() string { + if m != nil { + return m.DesiredMonitoringService + } + return "" +} + +func (m *ClusterUpdate) GetDesiredAddonsConfig() *AddonsConfig { + if m != nil { + return m.DesiredAddonsConfig + } + return nil +} + +func (m *ClusterUpdate) GetDesiredNodePoolId() string { + if m != nil { + return m.DesiredNodePoolId + } + return "" +} + +func (m *ClusterUpdate) GetDesiredImageType() string { + if m != nil { + return m.DesiredImageType + } + return "" +} + +func (m *ClusterUpdate) GetDesiredNodePoolAutoscaling() *NodePoolAutoscaling { + if m != nil { + return m.DesiredNodePoolAutoscaling + } + return nil +} + +func (m *ClusterUpdate) GetDesiredLocations() []string { + if m != nil { + return m.DesiredLocations + } + return nil +} + +func (m *ClusterUpdate) GetDesiredMasterAuthorizedNetworksConfig() *MasterAuthorizedNetworksConfig { + if m != nil { + return m.DesiredMasterAuthorizedNetworksConfig + } + return nil +} + +func (m *ClusterUpdate) GetDesiredPodSecurityPolicyConfig() *PodSecurityPolicyConfig { + if m != nil { + return m.DesiredPodSecurityPolicyConfig + } + return nil +} + +func (m *ClusterUpdate) GetDesiredClusterAutoscaling() *ClusterAutoscaling { + if m != nil { + return m.DesiredClusterAutoscaling + } + return nil +} + +func (m *ClusterUpdate) GetDesiredBinaryAuthorization() *BinaryAuthorization { + if m != nil { + return m.DesiredBinaryAuthorization + } + return nil +} + +func (m *ClusterUpdate) GetDesiredLoggingService() string { + if m != nil { + return m.DesiredLoggingService + } + return "" +} + +func (m *ClusterUpdate) GetDesiredResourceUsageExportConfig() *ResourceUsageExportConfig { + if m != nil { + return m.DesiredResourceUsageExportConfig + } + return nil +} + +func (m *ClusterUpdate) GetDesiredVerticalPodAutoscaling() *VerticalPodAutoscaling { + if m != nil { + return m.DesiredVerticalPodAutoscaling + } + return nil +} + +func (m *ClusterUpdate) GetDesiredMasterVersion() string { + if m != nil { + return m.DesiredMasterVersion + } + return "" +} + +// This operation resource represents operations that may have happened or are +// happening on the cluster. All fields are output only. +type Operation struct { + // The server-assigned ID for the operation. + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // The name of the Google Compute Engine + // [zone](/compute/docs/zones#available) in which the operation + // is taking place. + // This field is deprecated, use location instead. + Zone string `protobuf:"bytes,2,opt,name=zone,proto3" json:"zone,omitempty"` // Deprecated: Do not use. + // The operation type. + OperationType Operation_Type `protobuf:"varint,3,opt,name=operation_type,json=operationType,proto3,enum=google.container.v1beta1.Operation_Type" json:"operation_type,omitempty"` + // The current status of the operation. + Status Operation_Status `protobuf:"varint,4,opt,name=status,proto3,enum=google.container.v1beta1.Operation_Status" json:"status,omitempty"` + // Detailed operation progress, if available. + Detail string `protobuf:"bytes,8,opt,name=detail,proto3" json:"detail,omitempty"` + // If an error has occurred, a textual description of the error. + StatusMessage string `protobuf:"bytes,5,opt,name=status_message,json=statusMessage,proto3" json:"status_message,omitempty"` + // Server-defined URL for the resource. + SelfLink string `protobuf:"bytes,6,opt,name=self_link,json=selfLink,proto3" json:"self_link,omitempty"` + // Server-defined URL for the target of the operation. + TargetLink string `protobuf:"bytes,7,opt,name=target_link,json=targetLink,proto3" json:"target_link,omitempty"` + // [Output only] The name of the Google Compute Engine + // [zone](/compute/docs/regions-zones/regions-zones#available) or + // [region](/compute/docs/regions-zones/regions-zones#available) in which + // the cluster resides. + Location string `protobuf:"bytes,9,opt,name=location,proto3" json:"location,omitempty"` + // [Output only] The time the operation started, in + // [RFC3339](https://www.ietf.org/rfc/rfc3339.txt) text format. + StartTime string `protobuf:"bytes,10,opt,name=start_time,json=startTime,proto3" json:"start_time,omitempty"` + // [Output only] The time the operation completed, in + // [RFC3339](https://www.ietf.org/rfc/rfc3339.txt) text format. + EndTime string `protobuf:"bytes,11,opt,name=end_time,json=endTime,proto3" json:"end_time,omitempty"` + // [Output only] Progress information for an operation. + Progress *OperationProgress `protobuf:"bytes,12,opt,name=progress,proto3" json:"progress,omitempty"` + // Which conditions caused the current cluster state. + ClusterConditions []*StatusCondition `protobuf:"bytes,13,rep,name=cluster_conditions,json=clusterConditions,proto3" json:"cluster_conditions,omitempty"` + // Which conditions caused the current node pool state. + NodepoolConditions []*StatusCondition `protobuf:"bytes,14,rep,name=nodepool_conditions,json=nodepoolConditions,proto3" json:"nodepool_conditions,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Operation) Reset() { *m = Operation{} } +func (m *Operation) String() string { return proto.CompactTextString(m) } +func (*Operation) ProtoMessage() {} +func (*Operation) Descriptor() ([]byte, []int) { + return fileDescriptor_cluster_service_b093026a38af0865, []int{20} +} +func (m *Operation) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Operation.Unmarshal(m, b) +} +func (m *Operation) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Operation.Marshal(b, m, deterministic) +} +func (dst *Operation) XXX_Merge(src proto.Message) { + xxx_messageInfo_Operation.Merge(dst, src) +} +func (m *Operation) XXX_Size() int { + return xxx_messageInfo_Operation.Size(m) +} +func (m *Operation) XXX_DiscardUnknown() { + xxx_messageInfo_Operation.DiscardUnknown(m) +} + +var xxx_messageInfo_Operation proto.InternalMessageInfo + +func (m *Operation) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +// Deprecated: Do not use. +func (m *Operation) GetZone() string { + if m != nil { + return m.Zone + } + return "" +} + +func (m *Operation) GetOperationType() Operation_Type { + if m != nil { + return m.OperationType + } + return Operation_TYPE_UNSPECIFIED +} + +func (m *Operation) GetStatus() Operation_Status { + if m != nil { + return m.Status + } + return Operation_STATUS_UNSPECIFIED +} + +func (m *Operation) GetDetail() string { + if m != nil { + return m.Detail + } + return "" +} + +func (m *Operation) GetStatusMessage() string { + if m != nil { + return m.StatusMessage + } + return "" +} + +func (m *Operation) GetSelfLink() string { + if m != nil { + return m.SelfLink + } + return "" +} + +func (m *Operation) GetTargetLink() string { + if m != nil { + return m.TargetLink + } + return "" +} + +func (m *Operation) GetLocation() string { + if m != nil { + return m.Location + } + return "" +} + +func (m *Operation) GetStartTime() string { + if m != nil { + return m.StartTime + } + return "" +} + +func (m *Operation) GetEndTime() string { + if m != nil { + return m.EndTime + } + return "" +} + +func (m *Operation) GetProgress() *OperationProgress { + if m != nil { + return m.Progress + } + return nil +} + +func (m *Operation) GetClusterConditions() []*StatusCondition { + if m != nil { + return m.ClusterConditions + } + return nil +} + +func (m *Operation) GetNodepoolConditions() []*StatusCondition { + if m != nil { + return m.NodepoolConditions + } + return nil +} + +// Information about operation (or operation stage) progress. +type OperationProgress struct { + // A non-parameterized string describing an operation stage. + // Unset for single-stage operations. + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // Status of an operation stage. + // Unset for single-stage operations. + Status Operation_Status `protobuf:"varint,2,opt,name=status,proto3,enum=google.container.v1beta1.Operation_Status" json:"status,omitempty"` + // Progress metric bundle, for example: + // metrics: [{name: "nodes done", int_value: 15}, + // {name: "nodes total", int_value: 32}] + // or + // metrics: [{name: "progress", double_value: 0.56}, + // {name: "progress scale", double_value: 1.0}] + Metrics []*OperationProgress_Metric `protobuf:"bytes,3,rep,name=metrics,proto3" json:"metrics,omitempty"` + // Substages of an operation or a stage. + Stages []*OperationProgress `protobuf:"bytes,4,rep,name=stages,proto3" json:"stages,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *OperationProgress) Reset() { *m = OperationProgress{} } +func (m *OperationProgress) String() string { return proto.CompactTextString(m) } +func (*OperationProgress) ProtoMessage() {} +func (*OperationProgress) Descriptor() ([]byte, []int) { + return fileDescriptor_cluster_service_b093026a38af0865, []int{21} +} +func (m *OperationProgress) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_OperationProgress.Unmarshal(m, b) +} +func (m *OperationProgress) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_OperationProgress.Marshal(b, m, deterministic) +} +func (dst *OperationProgress) XXX_Merge(src proto.Message) { + xxx_messageInfo_OperationProgress.Merge(dst, src) +} +func (m *OperationProgress) XXX_Size() int { + return xxx_messageInfo_OperationProgress.Size(m) +} +func (m *OperationProgress) XXX_DiscardUnknown() { + xxx_messageInfo_OperationProgress.DiscardUnknown(m) +} + +var xxx_messageInfo_OperationProgress proto.InternalMessageInfo + +func (m *OperationProgress) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *OperationProgress) GetStatus() Operation_Status { + if m != nil { + return m.Status + } + return Operation_STATUS_UNSPECIFIED +} + +func (m *OperationProgress) GetMetrics() []*OperationProgress_Metric { + if m != nil { + return m.Metrics + } + return nil +} + +func (m *OperationProgress) GetStages() []*OperationProgress { + if m != nil { + return m.Stages + } + return nil +} + +// Progress metric is (string, int|float|string) pair. +type OperationProgress_Metric struct { + // Metric name, required. + // e.g., "nodes total", "percent done" + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // Strictly one of the values is required. + // + // Types that are valid to be assigned to Value: + // *OperationProgress_Metric_IntValue + // *OperationProgress_Metric_DoubleValue + // *OperationProgress_Metric_StringValue + Value isOperationProgress_Metric_Value `protobuf_oneof:"value"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *OperationProgress_Metric) Reset() { *m = OperationProgress_Metric{} } +func (m *OperationProgress_Metric) String() string { return proto.CompactTextString(m) } +func (*OperationProgress_Metric) ProtoMessage() {} +func (*OperationProgress_Metric) Descriptor() ([]byte, []int) { + return fileDescriptor_cluster_service_b093026a38af0865, []int{21, 0} +} +func (m *OperationProgress_Metric) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_OperationProgress_Metric.Unmarshal(m, b) +} +func (m *OperationProgress_Metric) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_OperationProgress_Metric.Marshal(b, m, deterministic) +} +func (dst *OperationProgress_Metric) XXX_Merge(src proto.Message) { + xxx_messageInfo_OperationProgress_Metric.Merge(dst, src) +} +func (m *OperationProgress_Metric) XXX_Size() int { + return xxx_messageInfo_OperationProgress_Metric.Size(m) +} +func (m *OperationProgress_Metric) XXX_DiscardUnknown() { + xxx_messageInfo_OperationProgress_Metric.DiscardUnknown(m) +} + +var xxx_messageInfo_OperationProgress_Metric proto.InternalMessageInfo + +func (m *OperationProgress_Metric) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +type isOperationProgress_Metric_Value interface { + isOperationProgress_Metric_Value() +} + +type OperationProgress_Metric_IntValue struct { + IntValue int64 `protobuf:"varint,2,opt,name=int_value,json=intValue,proto3,oneof"` +} + +type OperationProgress_Metric_DoubleValue struct { + DoubleValue float64 `protobuf:"fixed64,3,opt,name=double_value,json=doubleValue,proto3,oneof"` +} + +type OperationProgress_Metric_StringValue struct { + StringValue string `protobuf:"bytes,4,opt,name=string_value,json=stringValue,proto3,oneof"` +} + +func (*OperationProgress_Metric_IntValue) isOperationProgress_Metric_Value() {} + +func (*OperationProgress_Metric_DoubleValue) isOperationProgress_Metric_Value() {} + +func (*OperationProgress_Metric_StringValue) isOperationProgress_Metric_Value() {} + +func (m *OperationProgress_Metric) GetValue() isOperationProgress_Metric_Value { + if m != nil { + return m.Value + } + return nil +} + +func (m *OperationProgress_Metric) GetIntValue() int64 { + if x, ok := m.GetValue().(*OperationProgress_Metric_IntValue); ok { + return x.IntValue + } + return 0 +} + +func (m *OperationProgress_Metric) GetDoubleValue() float64 { + if x, ok := m.GetValue().(*OperationProgress_Metric_DoubleValue); ok { + return x.DoubleValue + } + return 0 +} + +func (m *OperationProgress_Metric) GetStringValue() string { + if x, ok := m.GetValue().(*OperationProgress_Metric_StringValue); ok { + return x.StringValue + } + return "" +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*OperationProgress_Metric) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _OperationProgress_Metric_OneofMarshaler, _OperationProgress_Metric_OneofUnmarshaler, _OperationProgress_Metric_OneofSizer, []interface{}{ + (*OperationProgress_Metric_IntValue)(nil), + (*OperationProgress_Metric_DoubleValue)(nil), + (*OperationProgress_Metric_StringValue)(nil), + } +} + +func _OperationProgress_Metric_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*OperationProgress_Metric) + // value + switch x := m.Value.(type) { + case *OperationProgress_Metric_IntValue: + b.EncodeVarint(2<<3 | proto.WireVarint) + b.EncodeVarint(uint64(x.IntValue)) + case *OperationProgress_Metric_DoubleValue: + b.EncodeVarint(3<<3 | proto.WireFixed64) + b.EncodeFixed64(math.Float64bits(x.DoubleValue)) + case *OperationProgress_Metric_StringValue: + b.EncodeVarint(4<<3 | proto.WireBytes) + b.EncodeStringBytes(x.StringValue) + case nil: + default: + return fmt.Errorf("OperationProgress_Metric.Value has unexpected type %T", x) + } + return nil +} + +func _OperationProgress_Metric_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*OperationProgress_Metric) + switch tag { + case 2: // value.int_value + if wire != proto.WireVarint { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeVarint() + m.Value = &OperationProgress_Metric_IntValue{int64(x)} + return true, err + case 3: // value.double_value + if wire != proto.WireFixed64 { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeFixed64() + m.Value = &OperationProgress_Metric_DoubleValue{math.Float64frombits(x)} + return true, err + case 4: // value.string_value + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeStringBytes() + m.Value = &OperationProgress_Metric_StringValue{x} + return true, err + default: + return false, nil + } +} + +func _OperationProgress_Metric_OneofSizer(msg proto.Message) (n int) { + m := msg.(*OperationProgress_Metric) + // value + switch x := m.Value.(type) { + case *OperationProgress_Metric_IntValue: + n += 1 // tag and wire + n += proto.SizeVarint(uint64(x.IntValue)) + case *OperationProgress_Metric_DoubleValue: + n += 1 // tag and wire + n += 8 + case *OperationProgress_Metric_StringValue: + n += 1 // tag and wire + n += proto.SizeVarint(uint64(len(x.StringValue))) + n += len(x.StringValue) + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +// CreateClusterRequest creates a cluster. +type CreateClusterRequest struct { + // Deprecated. The Google Developers Console [project ID or project + // number](https://support.google.com/cloud/answer/6158840). + // This field has been deprecated and replaced by the parent field. + ProjectId string `protobuf:"bytes,1,opt,name=project_id,json=projectId,proto3" json:"project_id,omitempty"` // Deprecated: Do not use. + // Deprecated. The name of the Google Compute Engine + // [zone](/compute/docs/zones#available) in which the cluster + // resides. + // This field has been deprecated and replaced by the parent field. + Zone string `protobuf:"bytes,2,opt,name=zone,proto3" json:"zone,omitempty"` // Deprecated: Do not use. + // A [cluster + // resource](/container-engine/reference/rest/v1beta1/projects.zones.clusters) + Cluster *Cluster `protobuf:"bytes,3,opt,name=cluster,proto3" json:"cluster,omitempty"` + // The parent (project and location) where the cluster will be created. + // Specified in the format 'projects/*/locations/*'. + Parent string `protobuf:"bytes,5,opt,name=parent,proto3" json:"parent,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *CreateClusterRequest) Reset() { *m = CreateClusterRequest{} } +func (m *CreateClusterRequest) String() string { return proto.CompactTextString(m) } +func (*CreateClusterRequest) ProtoMessage() {} +func (*CreateClusterRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_cluster_service_b093026a38af0865, []int{22} +} +func (m *CreateClusterRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_CreateClusterRequest.Unmarshal(m, b) +} +func (m *CreateClusterRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_CreateClusterRequest.Marshal(b, m, deterministic) +} +func (dst *CreateClusterRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_CreateClusterRequest.Merge(dst, src) +} +func (m *CreateClusterRequest) XXX_Size() int { + return xxx_messageInfo_CreateClusterRequest.Size(m) +} +func (m *CreateClusterRequest) XXX_DiscardUnknown() { + xxx_messageInfo_CreateClusterRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_CreateClusterRequest proto.InternalMessageInfo + +// Deprecated: Do not use. +func (m *CreateClusterRequest) GetProjectId() string { + if m != nil { + return m.ProjectId + } + return "" +} + +// Deprecated: Do not use. +func (m *CreateClusterRequest) GetZone() string { + if m != nil { + return m.Zone + } + return "" +} + +func (m *CreateClusterRequest) GetCluster() *Cluster { + if m != nil { + return m.Cluster + } + return nil +} + +func (m *CreateClusterRequest) GetParent() string { + if m != nil { + return m.Parent + } + return "" +} + +// GetClusterRequest gets the settings of a cluster. +type GetClusterRequest struct { + // Deprecated. The Google Developers Console [project ID or project + // number](https://support.google.com/cloud/answer/6158840). + // This field has been deprecated and replaced by the name field. + ProjectId string `protobuf:"bytes,1,opt,name=project_id,json=projectId,proto3" json:"project_id,omitempty"` // Deprecated: Do not use. + // Deprecated. The name of the Google Compute Engine + // [zone](/compute/docs/zones#available) in which the cluster + // resides. + // This field has been deprecated and replaced by the name field. + Zone string `protobuf:"bytes,2,opt,name=zone,proto3" json:"zone,omitempty"` // Deprecated: Do not use. + // Deprecated. The name of the cluster to retrieve. + // This field has been deprecated and replaced by the name field. + ClusterId string `protobuf:"bytes,3,opt,name=cluster_id,json=clusterId,proto3" json:"cluster_id,omitempty"` // Deprecated: Do not use. + // The name (project, location, cluster) of the cluster to retrieve. + // Specified in the format 'projects/*/locations/*/clusters/*'. + Name string `protobuf:"bytes,5,opt,name=name,proto3" json:"name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GetClusterRequest) Reset() { *m = GetClusterRequest{} } +func (m *GetClusterRequest) String() string { return proto.CompactTextString(m) } +func (*GetClusterRequest) ProtoMessage() {} +func (*GetClusterRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_cluster_service_b093026a38af0865, []int{23} +} +func (m *GetClusterRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GetClusterRequest.Unmarshal(m, b) +} +func (m *GetClusterRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GetClusterRequest.Marshal(b, m, deterministic) +} +func (dst *GetClusterRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_GetClusterRequest.Merge(dst, src) +} +func (m *GetClusterRequest) XXX_Size() int { + return xxx_messageInfo_GetClusterRequest.Size(m) +} +func (m *GetClusterRequest) XXX_DiscardUnknown() { + xxx_messageInfo_GetClusterRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_GetClusterRequest proto.InternalMessageInfo + +// Deprecated: Do not use. +func (m *GetClusterRequest) GetProjectId() string { + if m != nil { + return m.ProjectId + } + return "" +} + +// Deprecated: Do not use. +func (m *GetClusterRequest) GetZone() string { + if m != nil { + return m.Zone + } + return "" +} + +// Deprecated: Do not use. +func (m *GetClusterRequest) GetClusterId() string { + if m != nil { + return m.ClusterId + } + return "" +} + +func (m *GetClusterRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +// UpdateClusterRequest updates the settings of a cluster. +type UpdateClusterRequest struct { + // Deprecated. The Google Developers Console [project ID or project + // number](https://support.google.com/cloud/answer/6158840). + // This field has been deprecated and replaced by the name field. + ProjectId string `protobuf:"bytes,1,opt,name=project_id,json=projectId,proto3" json:"project_id,omitempty"` // Deprecated: Do not use. + // Deprecated. The name of the Google Compute Engine + // [zone](/compute/docs/zones#available) in which the cluster + // resides. + // This field has been deprecated and replaced by the name field. + Zone string `protobuf:"bytes,2,opt,name=zone,proto3" json:"zone,omitempty"` // Deprecated: Do not use. + // Deprecated. The name of the cluster to upgrade. + // This field has been deprecated and replaced by the name field. + ClusterId string `protobuf:"bytes,3,opt,name=cluster_id,json=clusterId,proto3" json:"cluster_id,omitempty"` // Deprecated: Do not use. + // A description of the update. + Update *ClusterUpdate `protobuf:"bytes,4,opt,name=update,proto3" json:"update,omitempty"` + // The name (project, location, cluster) of the cluster to update. + // Specified in the format 'projects/*/locations/*/clusters/*'. + Name string `protobuf:"bytes,5,opt,name=name,proto3" json:"name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *UpdateClusterRequest) Reset() { *m = UpdateClusterRequest{} } +func (m *UpdateClusterRequest) String() string { return proto.CompactTextString(m) } +func (*UpdateClusterRequest) ProtoMessage() {} +func (*UpdateClusterRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_cluster_service_b093026a38af0865, []int{24} +} +func (m *UpdateClusterRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_UpdateClusterRequest.Unmarshal(m, b) +} +func (m *UpdateClusterRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_UpdateClusterRequest.Marshal(b, m, deterministic) +} +func (dst *UpdateClusterRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_UpdateClusterRequest.Merge(dst, src) +} +func (m *UpdateClusterRequest) XXX_Size() int { + return xxx_messageInfo_UpdateClusterRequest.Size(m) +} +func (m *UpdateClusterRequest) XXX_DiscardUnknown() { + xxx_messageInfo_UpdateClusterRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_UpdateClusterRequest proto.InternalMessageInfo + +// Deprecated: Do not use. +func (m *UpdateClusterRequest) GetProjectId() string { + if m != nil { + return m.ProjectId + } + return "" +} + +// Deprecated: Do not use. +func (m *UpdateClusterRequest) GetZone() string { + if m != nil { + return m.Zone + } + return "" +} + +// Deprecated: Do not use. +func (m *UpdateClusterRequest) GetClusterId() string { + if m != nil { + return m.ClusterId + } + return "" +} + +func (m *UpdateClusterRequest) GetUpdate() *ClusterUpdate { + if m != nil { + return m.Update + } + return nil +} + +func (m *UpdateClusterRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +// SetNodePoolVersionRequest updates the version of a node pool. +type UpdateNodePoolRequest struct { + // Deprecated. The Google Developers Console [project ID or project + // number](https://support.google.com/cloud/answer/6158840). + // This field has been deprecated and replaced by the name field. + ProjectId string `protobuf:"bytes,1,opt,name=project_id,json=projectId,proto3" json:"project_id,omitempty"` // Deprecated: Do not use. + // Deprecated. The name of the Google Compute Engine + // [zone](/compute/docs/zones#available) in which the cluster + // resides. + // This field has been deprecated and replaced by the name field. + Zone string `protobuf:"bytes,2,opt,name=zone,proto3" json:"zone,omitempty"` // Deprecated: Do not use. + // Deprecated. The name of the cluster to upgrade. + // This field has been deprecated and replaced by the name field. + ClusterId string `protobuf:"bytes,3,opt,name=cluster_id,json=clusterId,proto3" json:"cluster_id,omitempty"` // Deprecated: Do not use. + // Deprecated. The name of the node pool to upgrade. + // This field has been deprecated and replaced by the name field. + NodePoolId string `protobuf:"bytes,4,opt,name=node_pool_id,json=nodePoolId,proto3" json:"node_pool_id,omitempty"` // Deprecated: Do not use. + // The Kubernetes version to change the nodes to (typically an + // upgrade). + // + // Users may specify either explicit versions offered by Kubernetes Engine or + // version aliases, which have the following behavior: + // + // - "latest": picks the highest valid Kubernetes version + // - "1.X": picks the highest valid patch+gke.N patch in the 1.X version + // - "1.X.Y": picks the highest valid gke.N patch in the 1.X.Y version + // - "1.X.Y-gke.N": picks an explicit Kubernetes version + // - "-": picks the Kubernetes master version + NodeVersion string `protobuf:"bytes,5,opt,name=node_version,json=nodeVersion,proto3" json:"node_version,omitempty"` + // The desired image type for the node pool. + ImageType string `protobuf:"bytes,6,opt,name=image_type,json=imageType,proto3" json:"image_type,omitempty"` + // The name (project, location, cluster, node pool) of the node pool to + // update. Specified in the format + // 'projects/*/locations/*/clusters/*/nodePools/*'. + Name string `protobuf:"bytes,8,opt,name=name,proto3" json:"name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *UpdateNodePoolRequest) Reset() { *m = UpdateNodePoolRequest{} } +func (m *UpdateNodePoolRequest) String() string { return proto.CompactTextString(m) } +func (*UpdateNodePoolRequest) ProtoMessage() {} +func (*UpdateNodePoolRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_cluster_service_b093026a38af0865, []int{25} +} +func (m *UpdateNodePoolRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_UpdateNodePoolRequest.Unmarshal(m, b) +} +func (m *UpdateNodePoolRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_UpdateNodePoolRequest.Marshal(b, m, deterministic) +} +func (dst *UpdateNodePoolRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_UpdateNodePoolRequest.Merge(dst, src) +} +func (m *UpdateNodePoolRequest) XXX_Size() int { + return xxx_messageInfo_UpdateNodePoolRequest.Size(m) +} +func (m *UpdateNodePoolRequest) XXX_DiscardUnknown() { + xxx_messageInfo_UpdateNodePoolRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_UpdateNodePoolRequest proto.InternalMessageInfo + +// Deprecated: Do not use. +func (m *UpdateNodePoolRequest) GetProjectId() string { + if m != nil { + return m.ProjectId + } + return "" +} + +// Deprecated: Do not use. +func (m *UpdateNodePoolRequest) GetZone() string { + if m != nil { + return m.Zone + } + return "" +} + +// Deprecated: Do not use. +func (m *UpdateNodePoolRequest) GetClusterId() string { + if m != nil { + return m.ClusterId + } + return "" +} + +// Deprecated: Do not use. +func (m *UpdateNodePoolRequest) GetNodePoolId() string { + if m != nil { + return m.NodePoolId + } + return "" +} + +func (m *UpdateNodePoolRequest) GetNodeVersion() string { + if m != nil { + return m.NodeVersion + } + return "" +} + +func (m *UpdateNodePoolRequest) GetImageType() string { + if m != nil { + return m.ImageType + } + return "" +} + +func (m *UpdateNodePoolRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +// SetNodePoolAutoscalingRequest sets the autoscaler settings of a node pool. +type SetNodePoolAutoscalingRequest struct { + // Deprecated. The Google Developers Console [project ID or project + // number](https://support.google.com/cloud/answer/6158840). + // This field has been deprecated and replaced by the name field. + ProjectId string `protobuf:"bytes,1,opt,name=project_id,json=projectId,proto3" json:"project_id,omitempty"` // Deprecated: Do not use. + // Deprecated. The name of the Google Compute Engine + // [zone](/compute/docs/zones#available) in which the cluster + // resides. + // This field has been deprecated and replaced by the name field. + Zone string `protobuf:"bytes,2,opt,name=zone,proto3" json:"zone,omitempty"` // Deprecated: Do not use. + // Deprecated. The name of the cluster to upgrade. + // This field has been deprecated and replaced by the name field. + ClusterId string `protobuf:"bytes,3,opt,name=cluster_id,json=clusterId,proto3" json:"cluster_id,omitempty"` // Deprecated: Do not use. + // Deprecated. The name of the node pool to upgrade. + // This field has been deprecated and replaced by the name field. + NodePoolId string `protobuf:"bytes,4,opt,name=node_pool_id,json=nodePoolId,proto3" json:"node_pool_id,omitempty"` // Deprecated: Do not use. + // Autoscaling configuration for the node pool. + Autoscaling *NodePoolAutoscaling `protobuf:"bytes,5,opt,name=autoscaling,proto3" json:"autoscaling,omitempty"` + // The name (project, location, cluster, node pool) of the node pool to set + // autoscaler settings. Specified in the format + // 'projects/*/locations/*/clusters/*/nodePools/*'. + Name string `protobuf:"bytes,6,opt,name=name,proto3" json:"name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *SetNodePoolAutoscalingRequest) Reset() { *m = SetNodePoolAutoscalingRequest{} } +func (m *SetNodePoolAutoscalingRequest) String() string { return proto.CompactTextString(m) } +func (*SetNodePoolAutoscalingRequest) ProtoMessage() {} +func (*SetNodePoolAutoscalingRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_cluster_service_b093026a38af0865, []int{26} +} +func (m *SetNodePoolAutoscalingRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_SetNodePoolAutoscalingRequest.Unmarshal(m, b) +} +func (m *SetNodePoolAutoscalingRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_SetNodePoolAutoscalingRequest.Marshal(b, m, deterministic) +} +func (dst *SetNodePoolAutoscalingRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_SetNodePoolAutoscalingRequest.Merge(dst, src) +} +func (m *SetNodePoolAutoscalingRequest) XXX_Size() int { + return xxx_messageInfo_SetNodePoolAutoscalingRequest.Size(m) +} +func (m *SetNodePoolAutoscalingRequest) XXX_DiscardUnknown() { + xxx_messageInfo_SetNodePoolAutoscalingRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_SetNodePoolAutoscalingRequest proto.InternalMessageInfo + +// Deprecated: Do not use. +func (m *SetNodePoolAutoscalingRequest) GetProjectId() string { + if m != nil { + return m.ProjectId + } + return "" +} + +// Deprecated: Do not use. +func (m *SetNodePoolAutoscalingRequest) GetZone() string { + if m != nil { + return m.Zone + } + return "" +} + +// Deprecated: Do not use. +func (m *SetNodePoolAutoscalingRequest) GetClusterId() string { + if m != nil { + return m.ClusterId + } + return "" +} + +// Deprecated: Do not use. +func (m *SetNodePoolAutoscalingRequest) GetNodePoolId() string { + if m != nil { + return m.NodePoolId + } + return "" +} + +func (m *SetNodePoolAutoscalingRequest) GetAutoscaling() *NodePoolAutoscaling { + if m != nil { + return m.Autoscaling + } + return nil +} + +func (m *SetNodePoolAutoscalingRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +// SetLoggingServiceRequest sets the logging service of a cluster. +type SetLoggingServiceRequest struct { + // Deprecated. The Google Developers Console [project ID or project + // number](https://support.google.com/cloud/answer/6158840). + // This field has been deprecated and replaced by the name field. + ProjectId string `protobuf:"bytes,1,opt,name=project_id,json=projectId,proto3" json:"project_id,omitempty"` // Deprecated: Do not use. + // Deprecated. The name of the Google Compute Engine + // [zone](/compute/docs/zones#available) in which the cluster + // resides. + // This field has been deprecated and replaced by the name field. + Zone string `protobuf:"bytes,2,opt,name=zone,proto3" json:"zone,omitempty"` + // Deprecated. The name of the cluster to upgrade. + // This field has been deprecated and replaced by the name field. + ClusterId string `protobuf:"bytes,3,opt,name=cluster_id,json=clusterId,proto3" json:"cluster_id,omitempty"` // Deprecated: Do not use. + // The logging service the cluster should use to write metrics. + // Currently available options: + // + // * "logging.googleapis.com" - the Google Cloud Logging service + // * "none" - no metrics will be exported from the cluster + LoggingService string `protobuf:"bytes,4,opt,name=logging_service,json=loggingService,proto3" json:"logging_service,omitempty"` + // The name (project, location, cluster) of the cluster to set logging. + // Specified in the format 'projects/*/locations/*/clusters/*'. + Name string `protobuf:"bytes,5,opt,name=name,proto3" json:"name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *SetLoggingServiceRequest) Reset() { *m = SetLoggingServiceRequest{} } +func (m *SetLoggingServiceRequest) String() string { return proto.CompactTextString(m) } +func (*SetLoggingServiceRequest) ProtoMessage() {} +func (*SetLoggingServiceRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_cluster_service_b093026a38af0865, []int{27} +} +func (m *SetLoggingServiceRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_SetLoggingServiceRequest.Unmarshal(m, b) +} +func (m *SetLoggingServiceRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_SetLoggingServiceRequest.Marshal(b, m, deterministic) +} +func (dst *SetLoggingServiceRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_SetLoggingServiceRequest.Merge(dst, src) +} +func (m *SetLoggingServiceRequest) XXX_Size() int { + return xxx_messageInfo_SetLoggingServiceRequest.Size(m) +} +func (m *SetLoggingServiceRequest) XXX_DiscardUnknown() { + xxx_messageInfo_SetLoggingServiceRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_SetLoggingServiceRequest proto.InternalMessageInfo + +// Deprecated: Do not use. +func (m *SetLoggingServiceRequest) GetProjectId() string { + if m != nil { + return m.ProjectId + } + return "" +} + +func (m *SetLoggingServiceRequest) GetZone() string { + if m != nil { + return m.Zone + } + return "" +} + +// Deprecated: Do not use. +func (m *SetLoggingServiceRequest) GetClusterId() string { + if m != nil { + return m.ClusterId + } + return "" +} + +func (m *SetLoggingServiceRequest) GetLoggingService() string { + if m != nil { + return m.LoggingService + } + return "" +} + +func (m *SetLoggingServiceRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +// SetMonitoringServiceRequest sets the monitoring service of a cluster. +type SetMonitoringServiceRequest struct { + // Deprecated. The Google Developers Console [project ID or project + // number](https://support.google.com/cloud/answer/6158840). + // This field has been deprecated and replaced by the name field. + ProjectId string `protobuf:"bytes,1,opt,name=project_id,json=projectId,proto3" json:"project_id,omitempty"` // Deprecated: Do not use. + // Deprecated. The name of the Google Compute Engine + // [zone](/compute/docs/zones#available) in which the cluster + // resides. + // This field has been deprecated and replaced by the name field. + Zone string `protobuf:"bytes,2,opt,name=zone,proto3" json:"zone,omitempty"` // Deprecated: Do not use. + // Deprecated. The name of the cluster to upgrade. + // This field has been deprecated and replaced by the name field. + ClusterId string `protobuf:"bytes,3,opt,name=cluster_id,json=clusterId,proto3" json:"cluster_id,omitempty"` // Deprecated: Do not use. + // The monitoring service the cluster should use to write metrics. + // Currently available options: + // + // * "monitoring.googleapis.com" - the Google Cloud Monitoring service + // * "none" - no metrics will be exported from the cluster + MonitoringService string `protobuf:"bytes,4,opt,name=monitoring_service,json=monitoringService,proto3" json:"monitoring_service,omitempty"` + // The name (project, location, cluster) of the cluster to set monitoring. + // Specified in the format 'projects/*/locations/*/clusters/*'. + Name string `protobuf:"bytes,6,opt,name=name,proto3" json:"name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *SetMonitoringServiceRequest) Reset() { *m = SetMonitoringServiceRequest{} } +func (m *SetMonitoringServiceRequest) String() string { return proto.CompactTextString(m) } +func (*SetMonitoringServiceRequest) ProtoMessage() {} +func (*SetMonitoringServiceRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_cluster_service_b093026a38af0865, []int{28} +} +func (m *SetMonitoringServiceRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_SetMonitoringServiceRequest.Unmarshal(m, b) +} +func (m *SetMonitoringServiceRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_SetMonitoringServiceRequest.Marshal(b, m, deterministic) +} +func (dst *SetMonitoringServiceRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_SetMonitoringServiceRequest.Merge(dst, src) +} +func (m *SetMonitoringServiceRequest) XXX_Size() int { + return xxx_messageInfo_SetMonitoringServiceRequest.Size(m) +} +func (m *SetMonitoringServiceRequest) XXX_DiscardUnknown() { + xxx_messageInfo_SetMonitoringServiceRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_SetMonitoringServiceRequest proto.InternalMessageInfo + +// Deprecated: Do not use. +func (m *SetMonitoringServiceRequest) GetProjectId() string { + if m != nil { + return m.ProjectId + } + return "" +} + +// Deprecated: Do not use. +func (m *SetMonitoringServiceRequest) GetZone() string { + if m != nil { + return m.Zone + } + return "" +} + +// Deprecated: Do not use. +func (m *SetMonitoringServiceRequest) GetClusterId() string { + if m != nil { + return m.ClusterId + } + return "" +} + +func (m *SetMonitoringServiceRequest) GetMonitoringService() string { + if m != nil { + return m.MonitoringService + } + return "" +} + +func (m *SetMonitoringServiceRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +// SetAddonsRequest sets the addons associated with the cluster. +type SetAddonsConfigRequest struct { + // Deprecated. The Google Developers Console [project ID or project + // number](https://support.google.com/cloud/answer/6158840). + // This field has been deprecated and replaced by the name field. + ProjectId string `protobuf:"bytes,1,opt,name=project_id,json=projectId,proto3" json:"project_id,omitempty"` // Deprecated: Do not use. + // Deprecated. The name of the Google Compute Engine + // [zone](/compute/docs/zones#available) in which the cluster + // resides. + // This field has been deprecated and replaced by the name field. + Zone string `protobuf:"bytes,2,opt,name=zone,proto3" json:"zone,omitempty"` // Deprecated: Do not use. + // Deprecated. The name of the cluster to upgrade. + // This field has been deprecated and replaced by the name field. + ClusterId string `protobuf:"bytes,3,opt,name=cluster_id,json=clusterId,proto3" json:"cluster_id,omitempty"` // Deprecated: Do not use. + // The desired configurations for the various addons available to run in the + // cluster. + AddonsConfig *AddonsConfig `protobuf:"bytes,4,opt,name=addons_config,json=addonsConfig,proto3" json:"addons_config,omitempty"` + // The name (project, location, cluster) of the cluster to set addons. + // Specified in the format 'projects/*/locations/*/clusters/*'. + Name string `protobuf:"bytes,6,opt,name=name,proto3" json:"name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *SetAddonsConfigRequest) Reset() { *m = SetAddonsConfigRequest{} } +func (m *SetAddonsConfigRequest) String() string { return proto.CompactTextString(m) } +func (*SetAddonsConfigRequest) ProtoMessage() {} +func (*SetAddonsConfigRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_cluster_service_b093026a38af0865, []int{29} +} +func (m *SetAddonsConfigRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_SetAddonsConfigRequest.Unmarshal(m, b) +} +func (m *SetAddonsConfigRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_SetAddonsConfigRequest.Marshal(b, m, deterministic) +} +func (dst *SetAddonsConfigRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_SetAddonsConfigRequest.Merge(dst, src) +} +func (m *SetAddonsConfigRequest) XXX_Size() int { + return xxx_messageInfo_SetAddonsConfigRequest.Size(m) +} +func (m *SetAddonsConfigRequest) XXX_DiscardUnknown() { + xxx_messageInfo_SetAddonsConfigRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_SetAddonsConfigRequest proto.InternalMessageInfo + +// Deprecated: Do not use. +func (m *SetAddonsConfigRequest) GetProjectId() string { + if m != nil { + return m.ProjectId + } + return "" +} + +// Deprecated: Do not use. +func (m *SetAddonsConfigRequest) GetZone() string { + if m != nil { + return m.Zone + } + return "" +} + +// Deprecated: Do not use. +func (m *SetAddonsConfigRequest) GetClusterId() string { + if m != nil { + return m.ClusterId + } + return "" +} + +func (m *SetAddonsConfigRequest) GetAddonsConfig() *AddonsConfig { + if m != nil { + return m.AddonsConfig + } + return nil +} + +func (m *SetAddonsConfigRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +// SetLocationsRequest sets the locations of the cluster. +type SetLocationsRequest struct { + // Deprecated. The Google Developers Console [project ID or project + // number](https://support.google.com/cloud/answer/6158840). + // This field has been deprecated and replaced by the name field. + ProjectId string `protobuf:"bytes,1,opt,name=project_id,json=projectId,proto3" json:"project_id,omitempty"` // Deprecated: Do not use. + // Deprecated. The name of the Google Compute Engine + // [zone](/compute/docs/zones#available) in which the cluster + // resides. + // This field has been deprecated and replaced by the name field. + Zone string `protobuf:"bytes,2,opt,name=zone,proto3" json:"zone,omitempty"` // Deprecated: Do not use. + // Deprecated. The name of the cluster to upgrade. + // This field has been deprecated and replaced by the name field. + ClusterId string `protobuf:"bytes,3,opt,name=cluster_id,json=clusterId,proto3" json:"cluster_id,omitempty"` // Deprecated: Do not use. + // The desired list of Google Compute Engine + // [zones](/compute/docs/zones#available) in which the cluster's nodes + // should be located. Changing the locations a cluster is in will result + // in nodes being either created or removed from the cluster, depending on + // whether locations are being added or removed. + // + // This list must always include the cluster's primary zone. + Locations []string `protobuf:"bytes,4,rep,name=locations,proto3" json:"locations,omitempty"` + // The name (project, location, cluster) of the cluster to set locations. + // Specified in the format 'projects/*/locations/*/clusters/*'. + Name string `protobuf:"bytes,6,opt,name=name,proto3" json:"name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *SetLocationsRequest) Reset() { *m = SetLocationsRequest{} } +func (m *SetLocationsRequest) String() string { return proto.CompactTextString(m) } +func (*SetLocationsRequest) ProtoMessage() {} +func (*SetLocationsRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_cluster_service_b093026a38af0865, []int{30} +} +func (m *SetLocationsRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_SetLocationsRequest.Unmarshal(m, b) +} +func (m *SetLocationsRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_SetLocationsRequest.Marshal(b, m, deterministic) +} +func (dst *SetLocationsRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_SetLocationsRequest.Merge(dst, src) +} +func (m *SetLocationsRequest) XXX_Size() int { + return xxx_messageInfo_SetLocationsRequest.Size(m) +} +func (m *SetLocationsRequest) XXX_DiscardUnknown() { + xxx_messageInfo_SetLocationsRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_SetLocationsRequest proto.InternalMessageInfo + +// Deprecated: Do not use. +func (m *SetLocationsRequest) GetProjectId() string { + if m != nil { + return m.ProjectId + } + return "" +} + +// Deprecated: Do not use. +func (m *SetLocationsRequest) GetZone() string { + if m != nil { + return m.Zone + } + return "" +} + +// Deprecated: Do not use. +func (m *SetLocationsRequest) GetClusterId() string { + if m != nil { + return m.ClusterId + } + return "" +} + +func (m *SetLocationsRequest) GetLocations() []string { + if m != nil { + return m.Locations + } + return nil +} + +func (m *SetLocationsRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +// UpdateMasterRequest updates the master of the cluster. +type UpdateMasterRequest struct { + // Deprecated. The Google Developers Console [project ID or project + // number](https://support.google.com/cloud/answer/6158840). + // This field has been deprecated and replaced by the name field. + ProjectId string `protobuf:"bytes,1,opt,name=project_id,json=projectId,proto3" json:"project_id,omitempty"` // Deprecated: Do not use. + // Deprecated. The name of the Google Compute Engine + // [zone](/compute/docs/zones#available) in which the cluster + // resides. + // This field has been deprecated and replaced by the name field. + Zone string `protobuf:"bytes,2,opt,name=zone,proto3" json:"zone,omitempty"` // Deprecated: Do not use. + // Deprecated. The name of the cluster to upgrade. + // This field has been deprecated and replaced by the name field. + ClusterId string `protobuf:"bytes,3,opt,name=cluster_id,json=clusterId,proto3" json:"cluster_id,omitempty"` // Deprecated: Do not use. + // The Kubernetes version to change the master to. + // + // Users may specify either explicit versions offered by + // Kubernetes Engine or version aliases, which have the following behavior: + // + // - "latest": picks the highest valid Kubernetes version + // - "1.X": picks the highest valid patch+gke.N patch in the 1.X version + // - "1.X.Y": picks the highest valid gke.N patch in the 1.X.Y version + // - "1.X.Y-gke.N": picks an explicit Kubernetes version + // - "-": picks the default Kubernetes version + MasterVersion string `protobuf:"bytes,4,opt,name=master_version,json=masterVersion,proto3" json:"master_version,omitempty"` + // The name (project, location, cluster) of the cluster to update. + // Specified in the format 'projects/*/locations/*/clusters/*'. + Name string `protobuf:"bytes,7,opt,name=name,proto3" json:"name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *UpdateMasterRequest) Reset() { *m = UpdateMasterRequest{} } +func (m *UpdateMasterRequest) String() string { return proto.CompactTextString(m) } +func (*UpdateMasterRequest) ProtoMessage() {} +func (*UpdateMasterRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_cluster_service_b093026a38af0865, []int{31} +} +func (m *UpdateMasterRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_UpdateMasterRequest.Unmarshal(m, b) +} +func (m *UpdateMasterRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_UpdateMasterRequest.Marshal(b, m, deterministic) +} +func (dst *UpdateMasterRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_UpdateMasterRequest.Merge(dst, src) +} +func (m *UpdateMasterRequest) XXX_Size() int { + return xxx_messageInfo_UpdateMasterRequest.Size(m) +} +func (m *UpdateMasterRequest) XXX_DiscardUnknown() { + xxx_messageInfo_UpdateMasterRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_UpdateMasterRequest proto.InternalMessageInfo + +// Deprecated: Do not use. +func (m *UpdateMasterRequest) GetProjectId() string { + if m != nil { + return m.ProjectId + } + return "" +} + +// Deprecated: Do not use. +func (m *UpdateMasterRequest) GetZone() string { + if m != nil { + return m.Zone + } + return "" +} + +// Deprecated: Do not use. +func (m *UpdateMasterRequest) GetClusterId() string { + if m != nil { + return m.ClusterId + } + return "" +} + +func (m *UpdateMasterRequest) GetMasterVersion() string { + if m != nil { + return m.MasterVersion + } + return "" +} + +func (m *UpdateMasterRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +// SetMasterAuthRequest updates the admin password of a cluster. +type SetMasterAuthRequest struct { + // Deprecated. The Google Developers Console [project ID or project + // number](https://support.google.com/cloud/answer/6158840). + // This field has been deprecated and replaced by the name field. + ProjectId string `protobuf:"bytes,1,opt,name=project_id,json=projectId,proto3" json:"project_id,omitempty"` // Deprecated: Do not use. + // Deprecated. The name of the Google Compute Engine + // [zone](/compute/docs/zones#available) in which the cluster + // resides. + // This field has been deprecated and replaced by the name field. + Zone string `protobuf:"bytes,2,opt,name=zone,proto3" json:"zone,omitempty"` // Deprecated: Do not use. + // Deprecated. The name of the cluster to upgrade. + // This field has been deprecated and replaced by the name field. + ClusterId string `protobuf:"bytes,3,opt,name=cluster_id,json=clusterId,proto3" json:"cluster_id,omitempty"` // Deprecated: Do not use. + // The exact form of action to be taken on the master auth. + Action SetMasterAuthRequest_Action `protobuf:"varint,4,opt,name=action,proto3,enum=google.container.v1beta1.SetMasterAuthRequest_Action" json:"action,omitempty"` + // A description of the update. + Update *MasterAuth `protobuf:"bytes,5,opt,name=update,proto3" json:"update,omitempty"` + // The name (project, location, cluster) of the cluster to set auth. + // Specified in the format 'projects/*/locations/*/clusters/*'. + Name string `protobuf:"bytes,7,opt,name=name,proto3" json:"name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *SetMasterAuthRequest) Reset() { *m = SetMasterAuthRequest{} } +func (m *SetMasterAuthRequest) String() string { return proto.CompactTextString(m) } +func (*SetMasterAuthRequest) ProtoMessage() {} +func (*SetMasterAuthRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_cluster_service_b093026a38af0865, []int{32} +} +func (m *SetMasterAuthRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_SetMasterAuthRequest.Unmarshal(m, b) +} +func (m *SetMasterAuthRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_SetMasterAuthRequest.Marshal(b, m, deterministic) +} +func (dst *SetMasterAuthRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_SetMasterAuthRequest.Merge(dst, src) +} +func (m *SetMasterAuthRequest) XXX_Size() int { + return xxx_messageInfo_SetMasterAuthRequest.Size(m) +} +func (m *SetMasterAuthRequest) XXX_DiscardUnknown() { + xxx_messageInfo_SetMasterAuthRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_SetMasterAuthRequest proto.InternalMessageInfo + +// Deprecated: Do not use. +func (m *SetMasterAuthRequest) GetProjectId() string { + if m != nil { + return m.ProjectId + } + return "" +} + +// Deprecated: Do not use. +func (m *SetMasterAuthRequest) GetZone() string { + if m != nil { + return m.Zone + } + return "" +} + +// Deprecated: Do not use. +func (m *SetMasterAuthRequest) GetClusterId() string { + if m != nil { + return m.ClusterId + } + return "" +} + +func (m *SetMasterAuthRequest) GetAction() SetMasterAuthRequest_Action { + if m != nil { + return m.Action + } + return SetMasterAuthRequest_UNKNOWN +} + +func (m *SetMasterAuthRequest) GetUpdate() *MasterAuth { + if m != nil { + return m.Update + } + return nil +} + +func (m *SetMasterAuthRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +// DeleteClusterRequest deletes a cluster. +type DeleteClusterRequest struct { + // Deprecated. The Google Developers Console [project ID or project + // number](https://support.google.com/cloud/answer/6158840). + // This field has been deprecated and replaced by the name field. + ProjectId string `protobuf:"bytes,1,opt,name=project_id,json=projectId,proto3" json:"project_id,omitempty"` // Deprecated: Do not use. + // Deprecated. The name of the Google Compute Engine + // [zone](/compute/docs/zones#available) in which the cluster + // resides. + // This field has been deprecated and replaced by the name field. + Zone string `protobuf:"bytes,2,opt,name=zone,proto3" json:"zone,omitempty"` // Deprecated: Do not use. + // Deprecated. The name of the cluster to delete. + // This field has been deprecated and replaced by the name field. + ClusterId string `protobuf:"bytes,3,opt,name=cluster_id,json=clusterId,proto3" json:"cluster_id,omitempty"` // Deprecated: Do not use. + // The name (project, location, cluster) of the cluster to delete. + // Specified in the format 'projects/*/locations/*/clusters/*'. + Name string `protobuf:"bytes,4,opt,name=name,proto3" json:"name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *DeleteClusterRequest) Reset() { *m = DeleteClusterRequest{} } +func (m *DeleteClusterRequest) String() string { return proto.CompactTextString(m) } +func (*DeleteClusterRequest) ProtoMessage() {} +func (*DeleteClusterRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_cluster_service_b093026a38af0865, []int{33} +} +func (m *DeleteClusterRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_DeleteClusterRequest.Unmarshal(m, b) +} +func (m *DeleteClusterRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_DeleteClusterRequest.Marshal(b, m, deterministic) +} +func (dst *DeleteClusterRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_DeleteClusterRequest.Merge(dst, src) +} +func (m *DeleteClusterRequest) XXX_Size() int { + return xxx_messageInfo_DeleteClusterRequest.Size(m) +} +func (m *DeleteClusterRequest) XXX_DiscardUnknown() { + xxx_messageInfo_DeleteClusterRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_DeleteClusterRequest proto.InternalMessageInfo + +// Deprecated: Do not use. +func (m *DeleteClusterRequest) GetProjectId() string { + if m != nil { + return m.ProjectId + } + return "" +} + +// Deprecated: Do not use. +func (m *DeleteClusterRequest) GetZone() string { + if m != nil { + return m.Zone + } + return "" +} + +// Deprecated: Do not use. +func (m *DeleteClusterRequest) GetClusterId() string { + if m != nil { + return m.ClusterId + } + return "" +} + +func (m *DeleteClusterRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +// ListClustersRequest lists clusters. +type ListClustersRequest struct { + // Deprecated. The Google Developers Console [project ID or project + // number](https://support.google.com/cloud/answer/6158840). + // This field has been deprecated and replaced by the parent field. + ProjectId string `protobuf:"bytes,1,opt,name=project_id,json=projectId,proto3" json:"project_id,omitempty"` // Deprecated: Do not use. + // Deprecated. The name of the Google Compute Engine + // [zone](/compute/docs/zones#available) in which the cluster + // resides, or "-" for all zones. + // This field has been deprecated and replaced by the parent field. + Zone string `protobuf:"bytes,2,opt,name=zone,proto3" json:"zone,omitempty"` // Deprecated: Do not use. + // The parent (project and location) where the clusters will be listed. + // Specified in the format 'projects/*/locations/*'. + // Location "-" matches all zones and all regions. + Parent string `protobuf:"bytes,4,opt,name=parent,proto3" json:"parent,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ListClustersRequest) Reset() { *m = ListClustersRequest{} } +func (m *ListClustersRequest) String() string { return proto.CompactTextString(m) } +func (*ListClustersRequest) ProtoMessage() {} +func (*ListClustersRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_cluster_service_b093026a38af0865, []int{34} +} +func (m *ListClustersRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ListClustersRequest.Unmarshal(m, b) +} +func (m *ListClustersRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ListClustersRequest.Marshal(b, m, deterministic) +} +func (dst *ListClustersRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_ListClustersRequest.Merge(dst, src) +} +func (m *ListClustersRequest) XXX_Size() int { + return xxx_messageInfo_ListClustersRequest.Size(m) +} +func (m *ListClustersRequest) XXX_DiscardUnknown() { + xxx_messageInfo_ListClustersRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_ListClustersRequest proto.InternalMessageInfo + +// Deprecated: Do not use. +func (m *ListClustersRequest) GetProjectId() string { + if m != nil { + return m.ProjectId + } + return "" +} + +// Deprecated: Do not use. +func (m *ListClustersRequest) GetZone() string { + if m != nil { + return m.Zone + } + return "" +} + +func (m *ListClustersRequest) GetParent() string { + if m != nil { + return m.Parent + } + return "" +} + +// ListClustersResponse is the result of ListClustersRequest. +type ListClustersResponse struct { + // A list of clusters in the project in the specified zone, or + // across all ones. + Clusters []*Cluster `protobuf:"bytes,1,rep,name=clusters,proto3" json:"clusters,omitempty"` + // If any zones are listed here, the list of clusters returned + // may be missing those zones. + MissingZones []string `protobuf:"bytes,2,rep,name=missing_zones,json=missingZones,proto3" json:"missing_zones,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ListClustersResponse) Reset() { *m = ListClustersResponse{} } +func (m *ListClustersResponse) String() string { return proto.CompactTextString(m) } +func (*ListClustersResponse) ProtoMessage() {} +func (*ListClustersResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_cluster_service_b093026a38af0865, []int{35} +} +func (m *ListClustersResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ListClustersResponse.Unmarshal(m, b) +} +func (m *ListClustersResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ListClustersResponse.Marshal(b, m, deterministic) +} +func (dst *ListClustersResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_ListClustersResponse.Merge(dst, src) +} +func (m *ListClustersResponse) XXX_Size() int { + return xxx_messageInfo_ListClustersResponse.Size(m) +} +func (m *ListClustersResponse) XXX_DiscardUnknown() { + xxx_messageInfo_ListClustersResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_ListClustersResponse proto.InternalMessageInfo + +func (m *ListClustersResponse) GetClusters() []*Cluster { + if m != nil { + return m.Clusters + } + return nil +} + +func (m *ListClustersResponse) GetMissingZones() []string { + if m != nil { + return m.MissingZones + } + return nil +} + +// GetOperationRequest gets a single operation. +type GetOperationRequest struct { + // Deprecated. The Google Developers Console [project ID or project + // number](https://support.google.com/cloud/answer/6158840). + // This field has been deprecated and replaced by the name field. + ProjectId string `protobuf:"bytes,1,opt,name=project_id,json=projectId,proto3" json:"project_id,omitempty"` // Deprecated: Do not use. + // Deprecated. The name of the Google Compute Engine + // [zone](/compute/docs/zones#available) in which the cluster + // resides. + // This field has been deprecated and replaced by the name field. + Zone string `protobuf:"bytes,2,opt,name=zone,proto3" json:"zone,omitempty"` // Deprecated: Do not use. + // Deprecated. The server-assigned `name` of the operation. + // This field has been deprecated and replaced by the name field. + OperationId string `protobuf:"bytes,3,opt,name=operation_id,json=operationId,proto3" json:"operation_id,omitempty"` // Deprecated: Do not use. + // The name (project, location, operation id) of the operation to get. + // Specified in the format 'projects/*/locations/*/operations/*'. + Name string `protobuf:"bytes,5,opt,name=name,proto3" json:"name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GetOperationRequest) Reset() { *m = GetOperationRequest{} } +func (m *GetOperationRequest) String() string { return proto.CompactTextString(m) } +func (*GetOperationRequest) ProtoMessage() {} +func (*GetOperationRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_cluster_service_b093026a38af0865, []int{36} +} +func (m *GetOperationRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GetOperationRequest.Unmarshal(m, b) +} +func (m *GetOperationRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GetOperationRequest.Marshal(b, m, deterministic) +} +func (dst *GetOperationRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_GetOperationRequest.Merge(dst, src) +} +func (m *GetOperationRequest) XXX_Size() int { + return xxx_messageInfo_GetOperationRequest.Size(m) +} +func (m *GetOperationRequest) XXX_DiscardUnknown() { + xxx_messageInfo_GetOperationRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_GetOperationRequest proto.InternalMessageInfo + +// Deprecated: Do not use. +func (m *GetOperationRequest) GetProjectId() string { + if m != nil { + return m.ProjectId + } + return "" +} + +// Deprecated: Do not use. +func (m *GetOperationRequest) GetZone() string { + if m != nil { + return m.Zone + } + return "" +} + +// Deprecated: Do not use. +func (m *GetOperationRequest) GetOperationId() string { + if m != nil { + return m.OperationId + } + return "" +} + +func (m *GetOperationRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +// ListOperationsRequest lists operations. +type ListOperationsRequest struct { + // Deprecated. The Google Developers Console [project ID or project + // number](https://support.google.com/cloud/answer/6158840). + // This field has been deprecated and replaced by the parent field. + ProjectId string `protobuf:"bytes,1,opt,name=project_id,json=projectId,proto3" json:"project_id,omitempty"` // Deprecated: Do not use. + // Deprecated. The name of the Google Compute Engine + // [zone](/compute/docs/zones#available) to return operations for, or `-` for + // all zones. This field has been deprecated and replaced by the parent field. + Zone string `protobuf:"bytes,2,opt,name=zone,proto3" json:"zone,omitempty"` // Deprecated: Do not use. + // The parent (project and location) where the operations will be listed. + // Specified in the format 'projects/*/locations/*'. + // Location "-" matches all zones and all regions. + Parent string `protobuf:"bytes,4,opt,name=parent,proto3" json:"parent,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ListOperationsRequest) Reset() { *m = ListOperationsRequest{} } +func (m *ListOperationsRequest) String() string { return proto.CompactTextString(m) } +func (*ListOperationsRequest) ProtoMessage() {} +func (*ListOperationsRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_cluster_service_b093026a38af0865, []int{37} +} +func (m *ListOperationsRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ListOperationsRequest.Unmarshal(m, b) +} +func (m *ListOperationsRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ListOperationsRequest.Marshal(b, m, deterministic) +} +func (dst *ListOperationsRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_ListOperationsRequest.Merge(dst, src) +} +func (m *ListOperationsRequest) XXX_Size() int { + return xxx_messageInfo_ListOperationsRequest.Size(m) +} +func (m *ListOperationsRequest) XXX_DiscardUnknown() { + xxx_messageInfo_ListOperationsRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_ListOperationsRequest proto.InternalMessageInfo + +// Deprecated: Do not use. +func (m *ListOperationsRequest) GetProjectId() string { + if m != nil { + return m.ProjectId + } + return "" +} + +// Deprecated: Do not use. +func (m *ListOperationsRequest) GetZone() string { + if m != nil { + return m.Zone + } + return "" +} + +func (m *ListOperationsRequest) GetParent() string { + if m != nil { + return m.Parent + } + return "" +} + +// CancelOperationRequest cancels a single operation. +type CancelOperationRequest struct { + // Deprecated. The Google Developers Console [project ID or project + // number](https://support.google.com/cloud/answer/6158840). + // This field has been deprecated and replaced by the name field. + ProjectId string `protobuf:"bytes,1,opt,name=project_id,json=projectId,proto3" json:"project_id,omitempty"` // Deprecated: Do not use. + // Deprecated. The name of the Google Compute Engine + // [zone](/compute/docs/zones#available) in which the operation resides. + // This field has been deprecated and replaced by the name field. + Zone string `protobuf:"bytes,2,opt,name=zone,proto3" json:"zone,omitempty"` // Deprecated: Do not use. + // Deprecated. The server-assigned `name` of the operation. + // This field has been deprecated and replaced by the name field. + OperationId string `protobuf:"bytes,3,opt,name=operation_id,json=operationId,proto3" json:"operation_id,omitempty"` // Deprecated: Do not use. + // The name (project, location, operation id) of the operation to cancel. + // Specified in the format 'projects/*/locations/*/operations/*'. + Name string `protobuf:"bytes,4,opt,name=name,proto3" json:"name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *CancelOperationRequest) Reset() { *m = CancelOperationRequest{} } +func (m *CancelOperationRequest) String() string { return proto.CompactTextString(m) } +func (*CancelOperationRequest) ProtoMessage() {} +func (*CancelOperationRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_cluster_service_b093026a38af0865, []int{38} +} +func (m *CancelOperationRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_CancelOperationRequest.Unmarshal(m, b) +} +func (m *CancelOperationRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_CancelOperationRequest.Marshal(b, m, deterministic) +} +func (dst *CancelOperationRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_CancelOperationRequest.Merge(dst, src) +} +func (m *CancelOperationRequest) XXX_Size() int { + return xxx_messageInfo_CancelOperationRequest.Size(m) +} +func (m *CancelOperationRequest) XXX_DiscardUnknown() { + xxx_messageInfo_CancelOperationRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_CancelOperationRequest proto.InternalMessageInfo + +// Deprecated: Do not use. +func (m *CancelOperationRequest) GetProjectId() string { + if m != nil { + return m.ProjectId + } + return "" +} + +// Deprecated: Do not use. +func (m *CancelOperationRequest) GetZone() string { + if m != nil { + return m.Zone + } + return "" +} + +// Deprecated: Do not use. +func (m *CancelOperationRequest) GetOperationId() string { + if m != nil { + return m.OperationId + } + return "" +} + +func (m *CancelOperationRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +// ListOperationsResponse is the result of ListOperationsRequest. +type ListOperationsResponse struct { + // A list of operations in the project in the specified zone. + Operations []*Operation `protobuf:"bytes,1,rep,name=operations,proto3" json:"operations,omitempty"` + // If any zones are listed here, the list of operations returned + // may be missing the operations from those zones. + MissingZones []string `protobuf:"bytes,2,rep,name=missing_zones,json=missingZones,proto3" json:"missing_zones,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ListOperationsResponse) Reset() { *m = ListOperationsResponse{} } +func (m *ListOperationsResponse) String() string { return proto.CompactTextString(m) } +func (*ListOperationsResponse) ProtoMessage() {} +func (*ListOperationsResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_cluster_service_b093026a38af0865, []int{39} +} +func (m *ListOperationsResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ListOperationsResponse.Unmarshal(m, b) +} +func (m *ListOperationsResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ListOperationsResponse.Marshal(b, m, deterministic) +} +func (dst *ListOperationsResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_ListOperationsResponse.Merge(dst, src) +} +func (m *ListOperationsResponse) XXX_Size() int { + return xxx_messageInfo_ListOperationsResponse.Size(m) +} +func (m *ListOperationsResponse) XXX_DiscardUnknown() { + xxx_messageInfo_ListOperationsResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_ListOperationsResponse proto.InternalMessageInfo + +func (m *ListOperationsResponse) GetOperations() []*Operation { + if m != nil { + return m.Operations + } + return nil +} + +func (m *ListOperationsResponse) GetMissingZones() []string { + if m != nil { + return m.MissingZones + } + return nil +} + +// Gets the current Kubernetes Engine service configuration. +type GetServerConfigRequest struct { + // Deprecated. The Google Developers Console [project ID or project + // number](https://support.google.com/cloud/answer/6158840). + // This field has been deprecated and replaced by the name field. + ProjectId string `protobuf:"bytes,1,opt,name=project_id,json=projectId,proto3" json:"project_id,omitempty"` // Deprecated: Do not use. + // Deprecated. The name of the Google Compute Engine + // [zone](/compute/docs/zones#available) to return operations for. + // This field has been deprecated and replaced by the name field. + Zone string `protobuf:"bytes,2,opt,name=zone,proto3" json:"zone,omitempty"` // Deprecated: Do not use. + // The name (project and location) of the server config to get + // Specified in the format 'projects/*/locations/*'. + Name string `protobuf:"bytes,4,opt,name=name,proto3" json:"name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GetServerConfigRequest) Reset() { *m = GetServerConfigRequest{} } +func (m *GetServerConfigRequest) String() string { return proto.CompactTextString(m) } +func (*GetServerConfigRequest) ProtoMessage() {} +func (*GetServerConfigRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_cluster_service_b093026a38af0865, []int{40} +} +func (m *GetServerConfigRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GetServerConfigRequest.Unmarshal(m, b) +} +func (m *GetServerConfigRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GetServerConfigRequest.Marshal(b, m, deterministic) +} +func (dst *GetServerConfigRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_GetServerConfigRequest.Merge(dst, src) +} +func (m *GetServerConfigRequest) XXX_Size() int { + return xxx_messageInfo_GetServerConfigRequest.Size(m) +} +func (m *GetServerConfigRequest) XXX_DiscardUnknown() { + xxx_messageInfo_GetServerConfigRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_GetServerConfigRequest proto.InternalMessageInfo + +// Deprecated: Do not use. +func (m *GetServerConfigRequest) GetProjectId() string { + if m != nil { + return m.ProjectId + } + return "" +} + +// Deprecated: Do not use. +func (m *GetServerConfigRequest) GetZone() string { + if m != nil { + return m.Zone + } + return "" +} + +func (m *GetServerConfigRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +// Kubernetes Engine service configuration. +type ServerConfig struct { + // Version of Kubernetes the service deploys by default. + DefaultClusterVersion string `protobuf:"bytes,1,opt,name=default_cluster_version,json=defaultClusterVersion,proto3" json:"default_cluster_version,omitempty"` + // List of valid node upgrade target versions. + ValidNodeVersions []string `protobuf:"bytes,3,rep,name=valid_node_versions,json=validNodeVersions,proto3" json:"valid_node_versions,omitempty"` + // Default image type. + DefaultImageType string `protobuf:"bytes,4,opt,name=default_image_type,json=defaultImageType,proto3" json:"default_image_type,omitempty"` + // List of valid image types. + ValidImageTypes []string `protobuf:"bytes,5,rep,name=valid_image_types,json=validImageTypes,proto3" json:"valid_image_types,omitempty"` + // List of valid master versions. + ValidMasterVersions []string `protobuf:"bytes,6,rep,name=valid_master_versions,json=validMasterVersions,proto3" json:"valid_master_versions,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ServerConfig) Reset() { *m = ServerConfig{} } +func (m *ServerConfig) String() string { return proto.CompactTextString(m) } +func (*ServerConfig) ProtoMessage() {} +func (*ServerConfig) Descriptor() ([]byte, []int) { + return fileDescriptor_cluster_service_b093026a38af0865, []int{41} +} +func (m *ServerConfig) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ServerConfig.Unmarshal(m, b) +} +func (m *ServerConfig) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ServerConfig.Marshal(b, m, deterministic) +} +func (dst *ServerConfig) XXX_Merge(src proto.Message) { + xxx_messageInfo_ServerConfig.Merge(dst, src) +} +func (m *ServerConfig) XXX_Size() int { + return xxx_messageInfo_ServerConfig.Size(m) +} +func (m *ServerConfig) XXX_DiscardUnknown() { + xxx_messageInfo_ServerConfig.DiscardUnknown(m) +} + +var xxx_messageInfo_ServerConfig proto.InternalMessageInfo + +func (m *ServerConfig) GetDefaultClusterVersion() string { + if m != nil { + return m.DefaultClusterVersion + } + return "" +} + +func (m *ServerConfig) GetValidNodeVersions() []string { + if m != nil { + return m.ValidNodeVersions + } + return nil +} + +func (m *ServerConfig) GetDefaultImageType() string { + if m != nil { + return m.DefaultImageType + } + return "" +} + +func (m *ServerConfig) GetValidImageTypes() []string { + if m != nil { + return m.ValidImageTypes + } + return nil +} + +func (m *ServerConfig) GetValidMasterVersions() []string { + if m != nil { + return m.ValidMasterVersions + } + return nil +} + +// CreateNodePoolRequest creates a node pool for a cluster. +type CreateNodePoolRequest struct { + // Deprecated. The Google Developers Console [project ID or project + // number](https://developers.google.com/console/help/new/#projectnumber). + // This field has been deprecated and replaced by the parent field. + ProjectId string `protobuf:"bytes,1,opt,name=project_id,json=projectId,proto3" json:"project_id,omitempty"` // Deprecated: Do not use. + // Deprecated. The name of the Google Compute Engine + // [zone](/compute/docs/zones#available) in which the cluster + // resides. + // This field has been deprecated and replaced by the parent field. + Zone string `protobuf:"bytes,2,opt,name=zone,proto3" json:"zone,omitempty"` // Deprecated: Do not use. + // Deprecated. The name of the cluster. + // This field has been deprecated and replaced by the parent field. + ClusterId string `protobuf:"bytes,3,opt,name=cluster_id,json=clusterId,proto3" json:"cluster_id,omitempty"` // Deprecated: Do not use. + // The node pool to create. + NodePool *NodePool `protobuf:"bytes,4,opt,name=node_pool,json=nodePool,proto3" json:"node_pool,omitempty"` + // The parent (project, location, cluster id) where the node pool will be + // created. Specified in the format + // 'projects/*/locations/*/clusters/*'. + Parent string `protobuf:"bytes,6,opt,name=parent,proto3" json:"parent,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *CreateNodePoolRequest) Reset() { *m = CreateNodePoolRequest{} } +func (m *CreateNodePoolRequest) String() string { return proto.CompactTextString(m) } +func (*CreateNodePoolRequest) ProtoMessage() {} +func (*CreateNodePoolRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_cluster_service_b093026a38af0865, []int{42} +} +func (m *CreateNodePoolRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_CreateNodePoolRequest.Unmarshal(m, b) +} +func (m *CreateNodePoolRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_CreateNodePoolRequest.Marshal(b, m, deterministic) +} +func (dst *CreateNodePoolRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_CreateNodePoolRequest.Merge(dst, src) +} +func (m *CreateNodePoolRequest) XXX_Size() int { + return xxx_messageInfo_CreateNodePoolRequest.Size(m) +} +func (m *CreateNodePoolRequest) XXX_DiscardUnknown() { + xxx_messageInfo_CreateNodePoolRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_CreateNodePoolRequest proto.InternalMessageInfo + +// Deprecated: Do not use. +func (m *CreateNodePoolRequest) GetProjectId() string { + if m != nil { + return m.ProjectId + } + return "" +} + +// Deprecated: Do not use. +func (m *CreateNodePoolRequest) GetZone() string { + if m != nil { + return m.Zone + } + return "" +} + +// Deprecated: Do not use. +func (m *CreateNodePoolRequest) GetClusterId() string { + if m != nil { + return m.ClusterId + } + return "" +} + +func (m *CreateNodePoolRequest) GetNodePool() *NodePool { + if m != nil { + return m.NodePool + } + return nil +} + +func (m *CreateNodePoolRequest) GetParent() string { + if m != nil { + return m.Parent + } + return "" +} + +// DeleteNodePoolRequest deletes a node pool for a cluster. +type DeleteNodePoolRequest struct { + // Deprecated. The Google Developers Console [project ID or project + // number](https://developers.google.com/console/help/new/#projectnumber). + // This field has been deprecated and replaced by the name field. + ProjectId string `protobuf:"bytes,1,opt,name=project_id,json=projectId,proto3" json:"project_id,omitempty"` // Deprecated: Do not use. + // Deprecated. The name of the Google Compute Engine + // [zone](/compute/docs/zones#available) in which the cluster + // resides. + // This field has been deprecated and replaced by the name field. + Zone string `protobuf:"bytes,2,opt,name=zone,proto3" json:"zone,omitempty"` // Deprecated: Do not use. + // Deprecated. The name of the cluster. + // This field has been deprecated and replaced by the name field. + ClusterId string `protobuf:"bytes,3,opt,name=cluster_id,json=clusterId,proto3" json:"cluster_id,omitempty"` // Deprecated: Do not use. + // Deprecated. The name of the node pool to delete. + // This field has been deprecated and replaced by the name field. + NodePoolId string `protobuf:"bytes,4,opt,name=node_pool_id,json=nodePoolId,proto3" json:"node_pool_id,omitempty"` // Deprecated: Do not use. + // The name (project, location, cluster, node pool id) of the node pool to + // delete. Specified in the format + // 'projects/*/locations/*/clusters/*/nodePools/*'. + Name string `protobuf:"bytes,6,opt,name=name,proto3" json:"name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *DeleteNodePoolRequest) Reset() { *m = DeleteNodePoolRequest{} } +func (m *DeleteNodePoolRequest) String() string { return proto.CompactTextString(m) } +func (*DeleteNodePoolRequest) ProtoMessage() {} +func (*DeleteNodePoolRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_cluster_service_b093026a38af0865, []int{43} +} +func (m *DeleteNodePoolRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_DeleteNodePoolRequest.Unmarshal(m, b) +} +func (m *DeleteNodePoolRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_DeleteNodePoolRequest.Marshal(b, m, deterministic) +} +func (dst *DeleteNodePoolRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_DeleteNodePoolRequest.Merge(dst, src) +} +func (m *DeleteNodePoolRequest) XXX_Size() int { + return xxx_messageInfo_DeleteNodePoolRequest.Size(m) +} +func (m *DeleteNodePoolRequest) XXX_DiscardUnknown() { + xxx_messageInfo_DeleteNodePoolRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_DeleteNodePoolRequest proto.InternalMessageInfo + +// Deprecated: Do not use. +func (m *DeleteNodePoolRequest) GetProjectId() string { + if m != nil { + return m.ProjectId + } + return "" +} + +// Deprecated: Do not use. +func (m *DeleteNodePoolRequest) GetZone() string { + if m != nil { + return m.Zone + } + return "" +} + +// Deprecated: Do not use. +func (m *DeleteNodePoolRequest) GetClusterId() string { + if m != nil { + return m.ClusterId + } + return "" +} + +// Deprecated: Do not use. +func (m *DeleteNodePoolRequest) GetNodePoolId() string { + if m != nil { + return m.NodePoolId + } + return "" +} + +func (m *DeleteNodePoolRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +// ListNodePoolsRequest lists the node pool(s) for a cluster. +type ListNodePoolsRequest struct { + // Deprecated. The Google Developers Console [project ID or project + // number](https://developers.google.com/console/help/new/#projectnumber). + // This field has been deprecated and replaced by the parent field. + ProjectId string `protobuf:"bytes,1,opt,name=project_id,json=projectId,proto3" json:"project_id,omitempty"` // Deprecated: Do not use. + // Deprecated. The name of the Google Compute Engine + // [zone](/compute/docs/zones#available) in which the cluster + // resides. + // This field has been deprecated and replaced by the parent field. + Zone string `protobuf:"bytes,2,opt,name=zone,proto3" json:"zone,omitempty"` // Deprecated: Do not use. + // Deprecated. The name of the cluster. + // This field has been deprecated and replaced by the parent field. + ClusterId string `protobuf:"bytes,3,opt,name=cluster_id,json=clusterId,proto3" json:"cluster_id,omitempty"` // Deprecated: Do not use. + // The parent (project, location, cluster id) where the node pools will be + // listed. Specified in the format 'projects/*/locations/*/clusters/*'. + Parent string `protobuf:"bytes,5,opt,name=parent,proto3" json:"parent,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ListNodePoolsRequest) Reset() { *m = ListNodePoolsRequest{} } +func (m *ListNodePoolsRequest) String() string { return proto.CompactTextString(m) } +func (*ListNodePoolsRequest) ProtoMessage() {} +func (*ListNodePoolsRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_cluster_service_b093026a38af0865, []int{44} +} +func (m *ListNodePoolsRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ListNodePoolsRequest.Unmarshal(m, b) +} +func (m *ListNodePoolsRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ListNodePoolsRequest.Marshal(b, m, deterministic) +} +func (dst *ListNodePoolsRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_ListNodePoolsRequest.Merge(dst, src) +} +func (m *ListNodePoolsRequest) XXX_Size() int { + return xxx_messageInfo_ListNodePoolsRequest.Size(m) +} +func (m *ListNodePoolsRequest) XXX_DiscardUnknown() { + xxx_messageInfo_ListNodePoolsRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_ListNodePoolsRequest proto.InternalMessageInfo + +// Deprecated: Do not use. +func (m *ListNodePoolsRequest) GetProjectId() string { + if m != nil { + return m.ProjectId + } + return "" +} + +// Deprecated: Do not use. +func (m *ListNodePoolsRequest) GetZone() string { + if m != nil { + return m.Zone + } + return "" +} + +// Deprecated: Do not use. +func (m *ListNodePoolsRequest) GetClusterId() string { + if m != nil { + return m.ClusterId + } + return "" +} + +func (m *ListNodePoolsRequest) GetParent() string { + if m != nil { + return m.Parent + } + return "" +} + +// GetNodePoolRequest retrieves a node pool for a cluster. +type GetNodePoolRequest struct { + // Deprecated. The Google Developers Console [project ID or project + // number](https://developers.google.com/console/help/new/#projectnumber). + // This field has been deprecated and replaced by the name field. + ProjectId string `protobuf:"bytes,1,opt,name=project_id,json=projectId,proto3" json:"project_id,omitempty"` // Deprecated: Do not use. + // Deprecated. The name of the Google Compute Engine + // [zone](/compute/docs/zones#available) in which the cluster + // resides. + // This field has been deprecated and replaced by the name field. + Zone string `protobuf:"bytes,2,opt,name=zone,proto3" json:"zone,omitempty"` // Deprecated: Do not use. + // Deprecated. The name of the cluster. + // This field has been deprecated and replaced by the name field. + ClusterId string `protobuf:"bytes,3,opt,name=cluster_id,json=clusterId,proto3" json:"cluster_id,omitempty"` // Deprecated: Do not use. + // Deprecated. The name of the node pool. + // This field has been deprecated and replaced by the name field. + NodePoolId string `protobuf:"bytes,4,opt,name=node_pool_id,json=nodePoolId,proto3" json:"node_pool_id,omitempty"` // Deprecated: Do not use. + // The name (project, location, cluster, node pool id) of the node pool to + // get. Specified in the format + // 'projects/*/locations/*/clusters/*/nodePools/*'. + Name string `protobuf:"bytes,6,opt,name=name,proto3" json:"name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GetNodePoolRequest) Reset() { *m = GetNodePoolRequest{} } +func (m *GetNodePoolRequest) String() string { return proto.CompactTextString(m) } +func (*GetNodePoolRequest) ProtoMessage() {} +func (*GetNodePoolRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_cluster_service_b093026a38af0865, []int{45} +} +func (m *GetNodePoolRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GetNodePoolRequest.Unmarshal(m, b) +} +func (m *GetNodePoolRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GetNodePoolRequest.Marshal(b, m, deterministic) +} +func (dst *GetNodePoolRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_GetNodePoolRequest.Merge(dst, src) +} +func (m *GetNodePoolRequest) XXX_Size() int { + return xxx_messageInfo_GetNodePoolRequest.Size(m) +} +func (m *GetNodePoolRequest) XXX_DiscardUnknown() { + xxx_messageInfo_GetNodePoolRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_GetNodePoolRequest proto.InternalMessageInfo + +// Deprecated: Do not use. +func (m *GetNodePoolRequest) GetProjectId() string { + if m != nil { + return m.ProjectId + } + return "" +} + +// Deprecated: Do not use. +func (m *GetNodePoolRequest) GetZone() string { + if m != nil { + return m.Zone + } + return "" +} + +// Deprecated: Do not use. +func (m *GetNodePoolRequest) GetClusterId() string { + if m != nil { + return m.ClusterId + } + return "" +} + +// Deprecated: Do not use. +func (m *GetNodePoolRequest) GetNodePoolId() string { + if m != nil { + return m.NodePoolId + } + return "" +} + +func (m *GetNodePoolRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +// NodePool contains the name and configuration for a cluster's node pool. +// Node pools are a set of nodes (i.e. VM's), with a common configuration and +// specification, under the control of the cluster master. They may have a set +// of Kubernetes labels applied to them, which may be used to reference them +// during pod scheduling. They may also be resized up or down, to accommodate +// the workload. +type NodePool struct { + // The name of the node pool. + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // The node configuration of the pool. + Config *NodeConfig `protobuf:"bytes,2,opt,name=config,proto3" json:"config,omitempty"` + // The initial node count for the pool. You must ensure that your + // Compute Engine resource quota + // is sufficient for this number of instances. You must also have available + // firewall and routes quota. + InitialNodeCount int32 `protobuf:"varint,3,opt,name=initial_node_count,json=initialNodeCount,proto3" json:"initial_node_count,omitempty"` + // [Output only] Server-defined URL for the resource. + SelfLink string `protobuf:"bytes,100,opt,name=self_link,json=selfLink,proto3" json:"self_link,omitempty"` + // The version of the Kubernetes of this node. + Version string `protobuf:"bytes,101,opt,name=version,proto3" json:"version,omitempty"` + // [Output only] The resource URLs of the [managed instance + // groups](/compute/docs/instance-groups/creating-groups-of-managed-instances) + // associated with this node pool. + InstanceGroupUrls []string `protobuf:"bytes,102,rep,name=instance_group_urls,json=instanceGroupUrls,proto3" json:"instance_group_urls,omitempty"` + // [Output only] The status of the nodes in this pool instance. + Status NodePool_Status `protobuf:"varint,103,opt,name=status,proto3,enum=google.container.v1beta1.NodePool_Status" json:"status,omitempty"` + // [Output only] Additional information about the current status of this + // node pool instance, if available. + StatusMessage string `protobuf:"bytes,104,opt,name=status_message,json=statusMessage,proto3" json:"status_message,omitempty"` + // Autoscaler configuration for this NodePool. Autoscaler is enabled + // only if a valid configuration is present. + Autoscaling *NodePoolAutoscaling `protobuf:"bytes,4,opt,name=autoscaling,proto3" json:"autoscaling,omitempty"` + // NodeManagement configuration for this NodePool. + Management *NodeManagement `protobuf:"bytes,5,opt,name=management,proto3" json:"management,omitempty"` + // The constraint on the maximum number of pods that can be run + // simultaneously on a node in the node pool. + MaxPodsConstraint *MaxPodsConstraint `protobuf:"bytes,6,opt,name=max_pods_constraint,json=maxPodsConstraint,proto3" json:"max_pods_constraint,omitempty"` + // Which conditions caused the current node pool state. + Conditions []*StatusCondition `protobuf:"bytes,105,rep,name=conditions,proto3" json:"conditions,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *NodePool) Reset() { *m = NodePool{} } +func (m *NodePool) String() string { return proto.CompactTextString(m) } +func (*NodePool) ProtoMessage() {} +func (*NodePool) Descriptor() ([]byte, []int) { + return fileDescriptor_cluster_service_b093026a38af0865, []int{46} +} +func (m *NodePool) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_NodePool.Unmarshal(m, b) +} +func (m *NodePool) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_NodePool.Marshal(b, m, deterministic) +} +func (dst *NodePool) XXX_Merge(src proto.Message) { + xxx_messageInfo_NodePool.Merge(dst, src) +} +func (m *NodePool) XXX_Size() int { + return xxx_messageInfo_NodePool.Size(m) +} +func (m *NodePool) XXX_DiscardUnknown() { + xxx_messageInfo_NodePool.DiscardUnknown(m) +} + +var xxx_messageInfo_NodePool proto.InternalMessageInfo + +func (m *NodePool) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *NodePool) GetConfig() *NodeConfig { + if m != nil { + return m.Config + } + return nil +} + +func (m *NodePool) GetInitialNodeCount() int32 { + if m != nil { + return m.InitialNodeCount + } + return 0 +} + +func (m *NodePool) GetSelfLink() string { + if m != nil { + return m.SelfLink + } + return "" +} + +func (m *NodePool) GetVersion() string { + if m != nil { + return m.Version + } + return "" +} + +func (m *NodePool) GetInstanceGroupUrls() []string { + if m != nil { + return m.InstanceGroupUrls + } + return nil +} + +func (m *NodePool) GetStatus() NodePool_Status { + if m != nil { + return m.Status + } + return NodePool_STATUS_UNSPECIFIED +} + +func (m *NodePool) GetStatusMessage() string { + if m != nil { + return m.StatusMessage + } + return "" +} + +func (m *NodePool) GetAutoscaling() *NodePoolAutoscaling { + if m != nil { + return m.Autoscaling + } + return nil +} + +func (m *NodePool) GetManagement() *NodeManagement { + if m != nil { + return m.Management + } + return nil +} + +func (m *NodePool) GetMaxPodsConstraint() *MaxPodsConstraint { + if m != nil { + return m.MaxPodsConstraint + } + return nil +} + +func (m *NodePool) GetConditions() []*StatusCondition { + if m != nil { + return m.Conditions + } + return nil +} + +// NodeManagement defines the set of node management services turned on for the +// node pool. +type NodeManagement struct { + // Whether the nodes will be automatically upgraded. + AutoUpgrade bool `protobuf:"varint,1,opt,name=auto_upgrade,json=autoUpgrade,proto3" json:"auto_upgrade,omitempty"` + // Whether the nodes will be automatically repaired. + AutoRepair bool `protobuf:"varint,2,opt,name=auto_repair,json=autoRepair,proto3" json:"auto_repair,omitempty"` + // Specifies the Auto Upgrade knobs for the node pool. + UpgradeOptions *AutoUpgradeOptions `protobuf:"bytes,10,opt,name=upgrade_options,json=upgradeOptions,proto3" json:"upgrade_options,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *NodeManagement) Reset() { *m = NodeManagement{} } +func (m *NodeManagement) String() string { return proto.CompactTextString(m) } +func (*NodeManagement) ProtoMessage() {} +func (*NodeManagement) Descriptor() ([]byte, []int) { + return fileDescriptor_cluster_service_b093026a38af0865, []int{47} +} +func (m *NodeManagement) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_NodeManagement.Unmarshal(m, b) +} +func (m *NodeManagement) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_NodeManagement.Marshal(b, m, deterministic) +} +func (dst *NodeManagement) XXX_Merge(src proto.Message) { + xxx_messageInfo_NodeManagement.Merge(dst, src) +} +func (m *NodeManagement) XXX_Size() int { + return xxx_messageInfo_NodeManagement.Size(m) +} +func (m *NodeManagement) XXX_DiscardUnknown() { + xxx_messageInfo_NodeManagement.DiscardUnknown(m) +} + +var xxx_messageInfo_NodeManagement proto.InternalMessageInfo + +func (m *NodeManagement) GetAutoUpgrade() bool { + if m != nil { + return m.AutoUpgrade + } + return false +} + +func (m *NodeManagement) GetAutoRepair() bool { + if m != nil { + return m.AutoRepair + } + return false +} + +func (m *NodeManagement) GetUpgradeOptions() *AutoUpgradeOptions { + if m != nil { + return m.UpgradeOptions + } + return nil +} + +// AutoUpgradeOptions defines the set of options for the user to control how +// the Auto Upgrades will proceed. +type AutoUpgradeOptions struct { + // [Output only] This field is set when upgrades are about to commence + // with the approximate start time for the upgrades, in + // [RFC3339](https://www.ietf.org/rfc/rfc3339.txt) text format. + AutoUpgradeStartTime string `protobuf:"bytes,1,opt,name=auto_upgrade_start_time,json=autoUpgradeStartTime,proto3" json:"auto_upgrade_start_time,omitempty"` + // [Output only] This field is set when upgrades are about to commence + // with the description of the upgrade. + Description string `protobuf:"bytes,2,opt,name=description,proto3" json:"description,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *AutoUpgradeOptions) Reset() { *m = AutoUpgradeOptions{} } +func (m *AutoUpgradeOptions) String() string { return proto.CompactTextString(m) } +func (*AutoUpgradeOptions) ProtoMessage() {} +func (*AutoUpgradeOptions) Descriptor() ([]byte, []int) { + return fileDescriptor_cluster_service_b093026a38af0865, []int{48} +} +func (m *AutoUpgradeOptions) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_AutoUpgradeOptions.Unmarshal(m, b) +} +func (m *AutoUpgradeOptions) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_AutoUpgradeOptions.Marshal(b, m, deterministic) +} +func (dst *AutoUpgradeOptions) XXX_Merge(src proto.Message) { + xxx_messageInfo_AutoUpgradeOptions.Merge(dst, src) +} +func (m *AutoUpgradeOptions) XXX_Size() int { + return xxx_messageInfo_AutoUpgradeOptions.Size(m) +} +func (m *AutoUpgradeOptions) XXX_DiscardUnknown() { + xxx_messageInfo_AutoUpgradeOptions.DiscardUnknown(m) +} + +var xxx_messageInfo_AutoUpgradeOptions proto.InternalMessageInfo + +func (m *AutoUpgradeOptions) GetAutoUpgradeStartTime() string { + if m != nil { + return m.AutoUpgradeStartTime + } + return "" +} + +func (m *AutoUpgradeOptions) GetDescription() string { + if m != nil { + return m.Description + } + return "" +} + +// MaintenancePolicy defines the maintenance policy to be used for the cluster. +type MaintenancePolicy struct { + // Specifies the maintenance window in which maintenance may be performed. + Window *MaintenanceWindow `protobuf:"bytes,1,opt,name=window,proto3" json:"window,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *MaintenancePolicy) Reset() { *m = MaintenancePolicy{} } +func (m *MaintenancePolicy) String() string { return proto.CompactTextString(m) } +func (*MaintenancePolicy) ProtoMessage() {} +func (*MaintenancePolicy) Descriptor() ([]byte, []int) { + return fileDescriptor_cluster_service_b093026a38af0865, []int{49} +} +func (m *MaintenancePolicy) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_MaintenancePolicy.Unmarshal(m, b) +} +func (m *MaintenancePolicy) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_MaintenancePolicy.Marshal(b, m, deterministic) +} +func (dst *MaintenancePolicy) XXX_Merge(src proto.Message) { + xxx_messageInfo_MaintenancePolicy.Merge(dst, src) +} +func (m *MaintenancePolicy) XXX_Size() int { + return xxx_messageInfo_MaintenancePolicy.Size(m) +} +func (m *MaintenancePolicy) XXX_DiscardUnknown() { + xxx_messageInfo_MaintenancePolicy.DiscardUnknown(m) +} + +var xxx_messageInfo_MaintenancePolicy proto.InternalMessageInfo + +func (m *MaintenancePolicy) GetWindow() *MaintenanceWindow { + if m != nil { + return m.Window + } + return nil +} + +// MaintenanceWindow defines the maintenance window to be used for the cluster. +type MaintenanceWindow struct { + // Unimplemented, reserved for future use. + // HourlyMaintenanceWindow hourly_maintenance_window = 1; + // + // Types that are valid to be assigned to Policy: + // *MaintenanceWindow_DailyMaintenanceWindow + Policy isMaintenanceWindow_Policy `protobuf_oneof:"policy"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *MaintenanceWindow) Reset() { *m = MaintenanceWindow{} } +func (m *MaintenanceWindow) String() string { return proto.CompactTextString(m) } +func (*MaintenanceWindow) ProtoMessage() {} +func (*MaintenanceWindow) Descriptor() ([]byte, []int) { + return fileDescriptor_cluster_service_b093026a38af0865, []int{50} +} +func (m *MaintenanceWindow) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_MaintenanceWindow.Unmarshal(m, b) +} +func (m *MaintenanceWindow) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_MaintenanceWindow.Marshal(b, m, deterministic) +} +func (dst *MaintenanceWindow) XXX_Merge(src proto.Message) { + xxx_messageInfo_MaintenanceWindow.Merge(dst, src) +} +func (m *MaintenanceWindow) XXX_Size() int { + return xxx_messageInfo_MaintenanceWindow.Size(m) +} +func (m *MaintenanceWindow) XXX_DiscardUnknown() { + xxx_messageInfo_MaintenanceWindow.DiscardUnknown(m) +} + +var xxx_messageInfo_MaintenanceWindow proto.InternalMessageInfo + +type isMaintenanceWindow_Policy interface { + isMaintenanceWindow_Policy() +} + +type MaintenanceWindow_DailyMaintenanceWindow struct { + DailyMaintenanceWindow *DailyMaintenanceWindow `protobuf:"bytes,2,opt,name=daily_maintenance_window,json=dailyMaintenanceWindow,proto3,oneof"` +} + +func (*MaintenanceWindow_DailyMaintenanceWindow) isMaintenanceWindow_Policy() {} + +func (m *MaintenanceWindow) GetPolicy() isMaintenanceWindow_Policy { + if m != nil { + return m.Policy + } + return nil +} + +func (m *MaintenanceWindow) GetDailyMaintenanceWindow() *DailyMaintenanceWindow { + if x, ok := m.GetPolicy().(*MaintenanceWindow_DailyMaintenanceWindow); ok { + return x.DailyMaintenanceWindow + } + return nil +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*MaintenanceWindow) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _MaintenanceWindow_OneofMarshaler, _MaintenanceWindow_OneofUnmarshaler, _MaintenanceWindow_OneofSizer, []interface{}{ + (*MaintenanceWindow_DailyMaintenanceWindow)(nil), + } +} + +func _MaintenanceWindow_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*MaintenanceWindow) + // policy + switch x := m.Policy.(type) { + case *MaintenanceWindow_DailyMaintenanceWindow: + b.EncodeVarint(2<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.DailyMaintenanceWindow); err != nil { + return err + } + case nil: + default: + return fmt.Errorf("MaintenanceWindow.Policy has unexpected type %T", x) + } + return nil +} + +func _MaintenanceWindow_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*MaintenanceWindow) + switch tag { + case 2: // policy.daily_maintenance_window + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(DailyMaintenanceWindow) + err := b.DecodeMessage(msg) + m.Policy = &MaintenanceWindow_DailyMaintenanceWindow{msg} + return true, err + default: + return false, nil + } +} + +func _MaintenanceWindow_OneofSizer(msg proto.Message) (n int) { + m := msg.(*MaintenanceWindow) + // policy + switch x := m.Policy.(type) { + case *MaintenanceWindow_DailyMaintenanceWindow: + s := proto.Size(x.DailyMaintenanceWindow) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +// Time window specified for daily maintenance operations. +type DailyMaintenanceWindow struct { + // Time within the maintenance window to start the maintenance operations. + // It must be in format "HH:MM”, where HH : [00-23] and MM : [00-59] GMT. + StartTime string `protobuf:"bytes,2,opt,name=start_time,json=startTime,proto3" json:"start_time,omitempty"` + // [Output only] Duration of the time window, automatically chosen to be + // smallest possible in the given scenario. + Duration string `protobuf:"bytes,3,opt,name=duration,proto3" json:"duration,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *DailyMaintenanceWindow) Reset() { *m = DailyMaintenanceWindow{} } +func (m *DailyMaintenanceWindow) String() string { return proto.CompactTextString(m) } +func (*DailyMaintenanceWindow) ProtoMessage() {} +func (*DailyMaintenanceWindow) Descriptor() ([]byte, []int) { + return fileDescriptor_cluster_service_b093026a38af0865, []int{51} +} +func (m *DailyMaintenanceWindow) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_DailyMaintenanceWindow.Unmarshal(m, b) +} +func (m *DailyMaintenanceWindow) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_DailyMaintenanceWindow.Marshal(b, m, deterministic) +} +func (dst *DailyMaintenanceWindow) XXX_Merge(src proto.Message) { + xxx_messageInfo_DailyMaintenanceWindow.Merge(dst, src) +} +func (m *DailyMaintenanceWindow) XXX_Size() int { + return xxx_messageInfo_DailyMaintenanceWindow.Size(m) +} +func (m *DailyMaintenanceWindow) XXX_DiscardUnknown() { + xxx_messageInfo_DailyMaintenanceWindow.DiscardUnknown(m) +} + +var xxx_messageInfo_DailyMaintenanceWindow proto.InternalMessageInfo + +func (m *DailyMaintenanceWindow) GetStartTime() string { + if m != nil { + return m.StartTime + } + return "" +} + +func (m *DailyMaintenanceWindow) GetDuration() string { + if m != nil { + return m.Duration + } + return "" +} + +// SetNodePoolManagementRequest sets the node management properties of a node +// pool. +type SetNodePoolManagementRequest struct { + // Deprecated. The Google Developers Console [project ID or project + // number](https://support.google.com/cloud/answer/6158840). + // This field has been deprecated and replaced by the name field. + ProjectId string `protobuf:"bytes,1,opt,name=project_id,json=projectId,proto3" json:"project_id,omitempty"` // Deprecated: Do not use. + // Deprecated. The name of the Google Compute Engine + // [zone](/compute/docs/zones#available) in which the cluster + // resides. + // This field has been deprecated and replaced by the name field. + Zone string `protobuf:"bytes,2,opt,name=zone,proto3" json:"zone,omitempty"` // Deprecated: Do not use. + // Deprecated. The name of the cluster to update. + // This field has been deprecated and replaced by the name field. + ClusterId string `protobuf:"bytes,3,opt,name=cluster_id,json=clusterId,proto3" json:"cluster_id,omitempty"` // Deprecated: Do not use. + // Deprecated. The name of the node pool to update. + // This field has been deprecated and replaced by the name field. + NodePoolId string `protobuf:"bytes,4,opt,name=node_pool_id,json=nodePoolId,proto3" json:"node_pool_id,omitempty"` // Deprecated: Do not use. + // NodeManagement configuration for the node pool. + Management *NodeManagement `protobuf:"bytes,5,opt,name=management,proto3" json:"management,omitempty"` + // The name (project, location, cluster, node pool id) of the node pool to set + // management properties. Specified in the format + // 'projects/*/locations/*/clusters/*/nodePools/*'. + Name string `protobuf:"bytes,7,opt,name=name,proto3" json:"name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *SetNodePoolManagementRequest) Reset() { *m = SetNodePoolManagementRequest{} } +func (m *SetNodePoolManagementRequest) String() string { return proto.CompactTextString(m) } +func (*SetNodePoolManagementRequest) ProtoMessage() {} +func (*SetNodePoolManagementRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_cluster_service_b093026a38af0865, []int{52} +} +func (m *SetNodePoolManagementRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_SetNodePoolManagementRequest.Unmarshal(m, b) +} +func (m *SetNodePoolManagementRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_SetNodePoolManagementRequest.Marshal(b, m, deterministic) +} +func (dst *SetNodePoolManagementRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_SetNodePoolManagementRequest.Merge(dst, src) +} +func (m *SetNodePoolManagementRequest) XXX_Size() int { + return xxx_messageInfo_SetNodePoolManagementRequest.Size(m) +} +func (m *SetNodePoolManagementRequest) XXX_DiscardUnknown() { + xxx_messageInfo_SetNodePoolManagementRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_SetNodePoolManagementRequest proto.InternalMessageInfo + +// Deprecated: Do not use. +func (m *SetNodePoolManagementRequest) GetProjectId() string { + if m != nil { + return m.ProjectId + } + return "" +} + +// Deprecated: Do not use. +func (m *SetNodePoolManagementRequest) GetZone() string { + if m != nil { + return m.Zone + } + return "" +} + +// Deprecated: Do not use. +func (m *SetNodePoolManagementRequest) GetClusterId() string { + if m != nil { + return m.ClusterId + } + return "" +} + +// Deprecated: Do not use. +func (m *SetNodePoolManagementRequest) GetNodePoolId() string { + if m != nil { + return m.NodePoolId + } + return "" +} + +func (m *SetNodePoolManagementRequest) GetManagement() *NodeManagement { + if m != nil { + return m.Management + } + return nil +} + +func (m *SetNodePoolManagementRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +// SetNodePoolSizeRequest sets the size a node +// pool. +type SetNodePoolSizeRequest struct { + // Deprecated. The Google Developers Console [project ID or project + // number](https://support.google.com/cloud/answer/6158840). + // This field has been deprecated and replaced by the name field. + ProjectId string `protobuf:"bytes,1,opt,name=project_id,json=projectId,proto3" json:"project_id,omitempty"` // Deprecated: Do not use. + // Deprecated. The name of the Google Compute Engine + // [zone](/compute/docs/zones#available) in which the cluster + // resides. + // This field has been deprecated and replaced by the name field. + Zone string `protobuf:"bytes,2,opt,name=zone,proto3" json:"zone,omitempty"` // Deprecated: Do not use. + // Deprecated. The name of the cluster to update. + // This field has been deprecated and replaced by the name field. + ClusterId string `protobuf:"bytes,3,opt,name=cluster_id,json=clusterId,proto3" json:"cluster_id,omitempty"` // Deprecated: Do not use. + // Deprecated. The name of the node pool to update. + // This field has been deprecated and replaced by the name field. + NodePoolId string `protobuf:"bytes,4,opt,name=node_pool_id,json=nodePoolId,proto3" json:"node_pool_id,omitempty"` // Deprecated: Do not use. + // The desired node count for the pool. + NodeCount int32 `protobuf:"varint,5,opt,name=node_count,json=nodeCount,proto3" json:"node_count,omitempty"` + // The name (project, location, cluster, node pool id) of the node pool to set + // size. + // Specified in the format 'projects/*/locations/*/clusters/*/nodePools/*'. + Name string `protobuf:"bytes,7,opt,name=name,proto3" json:"name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *SetNodePoolSizeRequest) Reset() { *m = SetNodePoolSizeRequest{} } +func (m *SetNodePoolSizeRequest) String() string { return proto.CompactTextString(m) } +func (*SetNodePoolSizeRequest) ProtoMessage() {} +func (*SetNodePoolSizeRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_cluster_service_b093026a38af0865, []int{53} +} +func (m *SetNodePoolSizeRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_SetNodePoolSizeRequest.Unmarshal(m, b) +} +func (m *SetNodePoolSizeRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_SetNodePoolSizeRequest.Marshal(b, m, deterministic) +} +func (dst *SetNodePoolSizeRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_SetNodePoolSizeRequest.Merge(dst, src) +} +func (m *SetNodePoolSizeRequest) XXX_Size() int { + return xxx_messageInfo_SetNodePoolSizeRequest.Size(m) +} +func (m *SetNodePoolSizeRequest) XXX_DiscardUnknown() { + xxx_messageInfo_SetNodePoolSizeRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_SetNodePoolSizeRequest proto.InternalMessageInfo + +// Deprecated: Do not use. +func (m *SetNodePoolSizeRequest) GetProjectId() string { + if m != nil { + return m.ProjectId + } + return "" +} + +// Deprecated: Do not use. +func (m *SetNodePoolSizeRequest) GetZone() string { + if m != nil { + return m.Zone + } + return "" +} + +// Deprecated: Do not use. +func (m *SetNodePoolSizeRequest) GetClusterId() string { + if m != nil { + return m.ClusterId + } + return "" +} + +// Deprecated: Do not use. +func (m *SetNodePoolSizeRequest) GetNodePoolId() string { + if m != nil { + return m.NodePoolId + } + return "" +} + +func (m *SetNodePoolSizeRequest) GetNodeCount() int32 { + if m != nil { + return m.NodeCount + } + return 0 +} + +func (m *SetNodePoolSizeRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +// RollbackNodePoolUpgradeRequest rollbacks the previously Aborted or Failed +// NodePool upgrade. This will be an no-op if the last upgrade successfully +// completed. +type RollbackNodePoolUpgradeRequest struct { + // Deprecated. The Google Developers Console [project ID or project + // number](https://support.google.com/cloud/answer/6158840). + // This field has been deprecated and replaced by the name field. + ProjectId string `protobuf:"bytes,1,opt,name=project_id,json=projectId,proto3" json:"project_id,omitempty"` // Deprecated: Do not use. + // Deprecated. The name of the Google Compute Engine + // [zone](/compute/docs/zones#available) in which the cluster + // resides. + // This field has been deprecated and replaced by the name field. + Zone string `protobuf:"bytes,2,opt,name=zone,proto3" json:"zone,omitempty"` // Deprecated: Do not use. + // Deprecated. The name of the cluster to rollback. + // This field has been deprecated and replaced by the name field. + ClusterId string `protobuf:"bytes,3,opt,name=cluster_id,json=clusterId,proto3" json:"cluster_id,omitempty"` // Deprecated: Do not use. + // Deprecated. The name of the node pool to rollback. + // This field has been deprecated and replaced by the name field. + NodePoolId string `protobuf:"bytes,4,opt,name=node_pool_id,json=nodePoolId,proto3" json:"node_pool_id,omitempty"` // Deprecated: Do not use. + // The name (project, location, cluster, node pool id) of the node poll to + // rollback upgrade. + // Specified in the format 'projects/*/locations/*/clusters/*/nodePools/*'. + Name string `protobuf:"bytes,6,opt,name=name,proto3" json:"name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *RollbackNodePoolUpgradeRequest) Reset() { *m = RollbackNodePoolUpgradeRequest{} } +func (m *RollbackNodePoolUpgradeRequest) String() string { return proto.CompactTextString(m) } +func (*RollbackNodePoolUpgradeRequest) ProtoMessage() {} +func (*RollbackNodePoolUpgradeRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_cluster_service_b093026a38af0865, []int{54} +} +func (m *RollbackNodePoolUpgradeRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_RollbackNodePoolUpgradeRequest.Unmarshal(m, b) +} +func (m *RollbackNodePoolUpgradeRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_RollbackNodePoolUpgradeRequest.Marshal(b, m, deterministic) +} +func (dst *RollbackNodePoolUpgradeRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_RollbackNodePoolUpgradeRequest.Merge(dst, src) +} +func (m *RollbackNodePoolUpgradeRequest) XXX_Size() int { + return xxx_messageInfo_RollbackNodePoolUpgradeRequest.Size(m) +} +func (m *RollbackNodePoolUpgradeRequest) XXX_DiscardUnknown() { + xxx_messageInfo_RollbackNodePoolUpgradeRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_RollbackNodePoolUpgradeRequest proto.InternalMessageInfo + +// Deprecated: Do not use. +func (m *RollbackNodePoolUpgradeRequest) GetProjectId() string { + if m != nil { + return m.ProjectId + } + return "" +} + +// Deprecated: Do not use. +func (m *RollbackNodePoolUpgradeRequest) GetZone() string { + if m != nil { + return m.Zone + } + return "" +} + +// Deprecated: Do not use. +func (m *RollbackNodePoolUpgradeRequest) GetClusterId() string { + if m != nil { + return m.ClusterId + } + return "" +} + +// Deprecated: Do not use. +func (m *RollbackNodePoolUpgradeRequest) GetNodePoolId() string { + if m != nil { + return m.NodePoolId + } + return "" +} + +func (m *RollbackNodePoolUpgradeRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +// ListNodePoolsResponse is the result of ListNodePoolsRequest. +type ListNodePoolsResponse struct { + // A list of node pools for a cluster. + NodePools []*NodePool `protobuf:"bytes,1,rep,name=node_pools,json=nodePools,proto3" json:"node_pools,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ListNodePoolsResponse) Reset() { *m = ListNodePoolsResponse{} } +func (m *ListNodePoolsResponse) String() string { return proto.CompactTextString(m) } +func (*ListNodePoolsResponse) ProtoMessage() {} +func (*ListNodePoolsResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_cluster_service_b093026a38af0865, []int{55} +} +func (m *ListNodePoolsResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ListNodePoolsResponse.Unmarshal(m, b) +} +func (m *ListNodePoolsResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ListNodePoolsResponse.Marshal(b, m, deterministic) +} +func (dst *ListNodePoolsResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_ListNodePoolsResponse.Merge(dst, src) +} +func (m *ListNodePoolsResponse) XXX_Size() int { + return xxx_messageInfo_ListNodePoolsResponse.Size(m) +} +func (m *ListNodePoolsResponse) XXX_DiscardUnknown() { + xxx_messageInfo_ListNodePoolsResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_ListNodePoolsResponse proto.InternalMessageInfo + +func (m *ListNodePoolsResponse) GetNodePools() []*NodePool { + if m != nil { + return m.NodePools + } + return nil +} + +// ClusterAutoscaling contains global, per-cluster information +// required by Cluster Autoscaler to automatically adjust +// the size of the cluster and create/delete +// node pools based on the current needs. +type ClusterAutoscaling struct { + // Enables automatic node pool creation and deletion. + EnableNodeAutoprovisioning bool `protobuf:"varint,1,opt,name=enable_node_autoprovisioning,json=enableNodeAutoprovisioning,proto3" json:"enable_node_autoprovisioning,omitempty"` + // Contains global constraints regarding minimum and maximum + // amount of resources in the cluster. + ResourceLimits []*ResourceLimit `protobuf:"bytes,2,rep,name=resource_limits,json=resourceLimits,proto3" json:"resource_limits,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ClusterAutoscaling) Reset() { *m = ClusterAutoscaling{} } +func (m *ClusterAutoscaling) String() string { return proto.CompactTextString(m) } +func (*ClusterAutoscaling) ProtoMessage() {} +func (*ClusterAutoscaling) Descriptor() ([]byte, []int) { + return fileDescriptor_cluster_service_b093026a38af0865, []int{56} +} +func (m *ClusterAutoscaling) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ClusterAutoscaling.Unmarshal(m, b) +} +func (m *ClusterAutoscaling) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ClusterAutoscaling.Marshal(b, m, deterministic) +} +func (dst *ClusterAutoscaling) XXX_Merge(src proto.Message) { + xxx_messageInfo_ClusterAutoscaling.Merge(dst, src) +} +func (m *ClusterAutoscaling) XXX_Size() int { + return xxx_messageInfo_ClusterAutoscaling.Size(m) +} +func (m *ClusterAutoscaling) XXX_DiscardUnknown() { + xxx_messageInfo_ClusterAutoscaling.DiscardUnknown(m) +} + +var xxx_messageInfo_ClusterAutoscaling proto.InternalMessageInfo + +func (m *ClusterAutoscaling) GetEnableNodeAutoprovisioning() bool { + if m != nil { + return m.EnableNodeAutoprovisioning + } + return false +} + +func (m *ClusterAutoscaling) GetResourceLimits() []*ResourceLimit { + if m != nil { + return m.ResourceLimits + } + return nil +} + +// Contains information about amount of some resource in the cluster. +// For memory, value should be in GB. +type ResourceLimit struct { + // Resource name "cpu", "memory" or gpu-specific string. + ResourceType string `protobuf:"bytes,1,opt,name=resource_type,json=resourceType,proto3" json:"resource_type,omitempty"` + // Minimum amount of the resource in the cluster. + Minimum int64 `protobuf:"varint,2,opt,name=minimum,proto3" json:"minimum,omitempty"` + // Maximum amount of the resource in the cluster. + Maximum int64 `protobuf:"varint,3,opt,name=maximum,proto3" json:"maximum,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ResourceLimit) Reset() { *m = ResourceLimit{} } +func (m *ResourceLimit) String() string { return proto.CompactTextString(m) } +func (*ResourceLimit) ProtoMessage() {} +func (*ResourceLimit) Descriptor() ([]byte, []int) { + return fileDescriptor_cluster_service_b093026a38af0865, []int{57} +} +func (m *ResourceLimit) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ResourceLimit.Unmarshal(m, b) +} +func (m *ResourceLimit) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ResourceLimit.Marshal(b, m, deterministic) +} +func (dst *ResourceLimit) XXX_Merge(src proto.Message) { + xxx_messageInfo_ResourceLimit.Merge(dst, src) +} +func (m *ResourceLimit) XXX_Size() int { + return xxx_messageInfo_ResourceLimit.Size(m) +} +func (m *ResourceLimit) XXX_DiscardUnknown() { + xxx_messageInfo_ResourceLimit.DiscardUnknown(m) +} + +var xxx_messageInfo_ResourceLimit proto.InternalMessageInfo + +func (m *ResourceLimit) GetResourceType() string { + if m != nil { + return m.ResourceType + } + return "" +} + +func (m *ResourceLimit) GetMinimum() int64 { + if m != nil { + return m.Minimum + } + return 0 +} + +func (m *ResourceLimit) GetMaximum() int64 { + if m != nil { + return m.Maximum + } + return 0 +} + +// NodePoolAutoscaling contains information required by cluster autoscaler to +// adjust the size of the node pool to the current cluster usage. +type NodePoolAutoscaling struct { + // Is autoscaling enabled for this node pool. + Enabled bool `protobuf:"varint,1,opt,name=enabled,proto3" json:"enabled,omitempty"` + // Minimum number of nodes in the NodePool. Must be >= 1 and <= + // max_node_count. + MinNodeCount int32 `protobuf:"varint,2,opt,name=min_node_count,json=minNodeCount,proto3" json:"min_node_count,omitempty"` + // Maximum number of nodes in the NodePool. Must be >= min_node_count. There + // has to enough quota to scale up the cluster. + MaxNodeCount int32 `protobuf:"varint,3,opt,name=max_node_count,json=maxNodeCount,proto3" json:"max_node_count,omitempty"` + // Can this node pool be deleted automatically. + Autoprovisioned bool `protobuf:"varint,4,opt,name=autoprovisioned,proto3" json:"autoprovisioned,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *NodePoolAutoscaling) Reset() { *m = NodePoolAutoscaling{} } +func (m *NodePoolAutoscaling) String() string { return proto.CompactTextString(m) } +func (*NodePoolAutoscaling) ProtoMessage() {} +func (*NodePoolAutoscaling) Descriptor() ([]byte, []int) { + return fileDescriptor_cluster_service_b093026a38af0865, []int{58} +} +func (m *NodePoolAutoscaling) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_NodePoolAutoscaling.Unmarshal(m, b) +} +func (m *NodePoolAutoscaling) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_NodePoolAutoscaling.Marshal(b, m, deterministic) +} +func (dst *NodePoolAutoscaling) XXX_Merge(src proto.Message) { + xxx_messageInfo_NodePoolAutoscaling.Merge(dst, src) +} +func (m *NodePoolAutoscaling) XXX_Size() int { + return xxx_messageInfo_NodePoolAutoscaling.Size(m) +} +func (m *NodePoolAutoscaling) XXX_DiscardUnknown() { + xxx_messageInfo_NodePoolAutoscaling.DiscardUnknown(m) +} + +var xxx_messageInfo_NodePoolAutoscaling proto.InternalMessageInfo + +func (m *NodePoolAutoscaling) GetEnabled() bool { + if m != nil { + return m.Enabled + } + return false +} + +func (m *NodePoolAutoscaling) GetMinNodeCount() int32 { + if m != nil { + return m.MinNodeCount + } + return 0 +} + +func (m *NodePoolAutoscaling) GetMaxNodeCount() int32 { + if m != nil { + return m.MaxNodeCount + } + return 0 +} + +func (m *NodePoolAutoscaling) GetAutoprovisioned() bool { + if m != nil { + return m.Autoprovisioned + } + return false +} + +// SetLabelsRequest sets the Google Cloud Platform labels on a Google Container +// Engine cluster, which will in turn set them for Google Compute Engine +// resources used by that cluster +type SetLabelsRequest struct { + // Deprecated. The Google Developers Console [project ID or project + // number](https://developers.google.com/console/help/new/#projectnumber). + // This field has been deprecated and replaced by the name field. + ProjectId string `protobuf:"bytes,1,opt,name=project_id,json=projectId,proto3" json:"project_id,omitempty"` // Deprecated: Do not use. + // Deprecated. The name of the Google Compute Engine + // [zone](/compute/docs/zones#available) in which the cluster + // resides. + // This field has been deprecated and replaced by the name field. + Zone string `protobuf:"bytes,2,opt,name=zone,proto3" json:"zone,omitempty"` // Deprecated: Do not use. + // Deprecated. The name of the cluster. + // This field has been deprecated and replaced by the name field. + ClusterId string `protobuf:"bytes,3,opt,name=cluster_id,json=clusterId,proto3" json:"cluster_id,omitempty"` // Deprecated: Do not use. + // The labels to set for that cluster. + ResourceLabels map[string]string `protobuf:"bytes,4,rep,name=resource_labels,json=resourceLabels,proto3" json:"resource_labels,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` + // The fingerprint of the previous set of labels for this resource, + // used to detect conflicts. The fingerprint is initially generated by + // Kubernetes Engine and changes after every request to modify or update + // labels. You must always provide an up-to-date fingerprint hash when + // updating or changing labels. Make a get() request to the + // resource to get the latest fingerprint. + LabelFingerprint string `protobuf:"bytes,5,opt,name=label_fingerprint,json=labelFingerprint,proto3" json:"label_fingerprint,omitempty"` + // The name (project, location, cluster id) of the cluster to set labels. + // Specified in the format 'projects/*/locations/*/clusters/*'. + Name string `protobuf:"bytes,7,opt,name=name,proto3" json:"name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *SetLabelsRequest) Reset() { *m = SetLabelsRequest{} } +func (m *SetLabelsRequest) String() string { return proto.CompactTextString(m) } +func (*SetLabelsRequest) ProtoMessage() {} +func (*SetLabelsRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_cluster_service_b093026a38af0865, []int{59} +} +func (m *SetLabelsRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_SetLabelsRequest.Unmarshal(m, b) +} +func (m *SetLabelsRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_SetLabelsRequest.Marshal(b, m, deterministic) +} +func (dst *SetLabelsRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_SetLabelsRequest.Merge(dst, src) +} +func (m *SetLabelsRequest) XXX_Size() int { + return xxx_messageInfo_SetLabelsRequest.Size(m) +} +func (m *SetLabelsRequest) XXX_DiscardUnknown() { + xxx_messageInfo_SetLabelsRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_SetLabelsRequest proto.InternalMessageInfo + +// Deprecated: Do not use. +func (m *SetLabelsRequest) GetProjectId() string { + if m != nil { + return m.ProjectId + } + return "" +} + +// Deprecated: Do not use. +func (m *SetLabelsRequest) GetZone() string { + if m != nil { + return m.Zone + } + return "" +} + +// Deprecated: Do not use. +func (m *SetLabelsRequest) GetClusterId() string { + if m != nil { + return m.ClusterId + } + return "" +} + +func (m *SetLabelsRequest) GetResourceLabels() map[string]string { + if m != nil { + return m.ResourceLabels + } + return nil +} + +func (m *SetLabelsRequest) GetLabelFingerprint() string { + if m != nil { + return m.LabelFingerprint + } + return "" +} + +func (m *SetLabelsRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +// SetLegacyAbacRequest enables or disables the ABAC authorization mechanism for +// a cluster. +type SetLegacyAbacRequest struct { + // Deprecated. The Google Developers Console [project ID or project + // number](https://support.google.com/cloud/answer/6158840). + // This field has been deprecated and replaced by the name field. + ProjectId string `protobuf:"bytes,1,opt,name=project_id,json=projectId,proto3" json:"project_id,omitempty"` // Deprecated: Do not use. + // Deprecated. The name of the Google Compute Engine + // [zone](/compute/docs/zones#available) in which the cluster + // resides. + // This field has been deprecated and replaced by the name field. + Zone string `protobuf:"bytes,2,opt,name=zone,proto3" json:"zone,omitempty"` // Deprecated: Do not use. + // Deprecated. The name of the cluster to update. + // This field has been deprecated and replaced by the name field. + ClusterId string `protobuf:"bytes,3,opt,name=cluster_id,json=clusterId,proto3" json:"cluster_id,omitempty"` // Deprecated: Do not use. + // Whether ABAC authorization will be enabled in the cluster. + Enabled bool `protobuf:"varint,4,opt,name=enabled,proto3" json:"enabled,omitempty"` + // The name (project, location, cluster id) of the cluster to set legacy abac. + // Specified in the format 'projects/*/locations/*/clusters/*'. + Name string `protobuf:"bytes,6,opt,name=name,proto3" json:"name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *SetLegacyAbacRequest) Reset() { *m = SetLegacyAbacRequest{} } +func (m *SetLegacyAbacRequest) String() string { return proto.CompactTextString(m) } +func (*SetLegacyAbacRequest) ProtoMessage() {} +func (*SetLegacyAbacRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_cluster_service_b093026a38af0865, []int{60} +} +func (m *SetLegacyAbacRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_SetLegacyAbacRequest.Unmarshal(m, b) +} +func (m *SetLegacyAbacRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_SetLegacyAbacRequest.Marshal(b, m, deterministic) +} +func (dst *SetLegacyAbacRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_SetLegacyAbacRequest.Merge(dst, src) +} +func (m *SetLegacyAbacRequest) XXX_Size() int { + return xxx_messageInfo_SetLegacyAbacRequest.Size(m) +} +func (m *SetLegacyAbacRequest) XXX_DiscardUnknown() { + xxx_messageInfo_SetLegacyAbacRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_SetLegacyAbacRequest proto.InternalMessageInfo + +// Deprecated: Do not use. +func (m *SetLegacyAbacRequest) GetProjectId() string { + if m != nil { + return m.ProjectId + } + return "" +} + +// Deprecated: Do not use. +func (m *SetLegacyAbacRequest) GetZone() string { + if m != nil { + return m.Zone + } + return "" +} + +// Deprecated: Do not use. +func (m *SetLegacyAbacRequest) GetClusterId() string { + if m != nil { + return m.ClusterId + } + return "" +} + +func (m *SetLegacyAbacRequest) GetEnabled() bool { + if m != nil { + return m.Enabled + } + return false +} + +func (m *SetLegacyAbacRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +// StartIPRotationRequest creates a new IP for the cluster and then performs +// a node upgrade on each node pool to point to the new IP. +type StartIPRotationRequest struct { + // Deprecated. The Google Developers Console [project ID or project + // number](https://developers.google.com/console/help/new/#projectnumber). + // This field has been deprecated and replaced by the name field. + ProjectId string `protobuf:"bytes,1,opt,name=project_id,json=projectId,proto3" json:"project_id,omitempty"` // Deprecated: Do not use. + // Deprecated. The name of the Google Compute Engine + // [zone](/compute/docs/zones#available) in which the cluster + // resides. + // This field has been deprecated and replaced by the name field. + Zone string `protobuf:"bytes,2,opt,name=zone,proto3" json:"zone,omitempty"` // Deprecated: Do not use. + // Deprecated. The name of the cluster. + // This field has been deprecated and replaced by the name field. + ClusterId string `protobuf:"bytes,3,opt,name=cluster_id,json=clusterId,proto3" json:"cluster_id,omitempty"` // Deprecated: Do not use. + // The name (project, location, cluster id) of the cluster to start IP + // rotation. Specified in the format 'projects/*/locations/*/clusters/*'. + Name string `protobuf:"bytes,6,opt,name=name,proto3" json:"name,omitempty"` + // Whether to rotate credentials during IP rotation. + RotateCredentials bool `protobuf:"varint,7,opt,name=rotate_credentials,json=rotateCredentials,proto3" json:"rotate_credentials,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *StartIPRotationRequest) Reset() { *m = StartIPRotationRequest{} } +func (m *StartIPRotationRequest) String() string { return proto.CompactTextString(m) } +func (*StartIPRotationRequest) ProtoMessage() {} +func (*StartIPRotationRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_cluster_service_b093026a38af0865, []int{61} +} +func (m *StartIPRotationRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_StartIPRotationRequest.Unmarshal(m, b) +} +func (m *StartIPRotationRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_StartIPRotationRequest.Marshal(b, m, deterministic) +} +func (dst *StartIPRotationRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_StartIPRotationRequest.Merge(dst, src) +} +func (m *StartIPRotationRequest) XXX_Size() int { + return xxx_messageInfo_StartIPRotationRequest.Size(m) +} +func (m *StartIPRotationRequest) XXX_DiscardUnknown() { + xxx_messageInfo_StartIPRotationRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_StartIPRotationRequest proto.InternalMessageInfo + +// Deprecated: Do not use. +func (m *StartIPRotationRequest) GetProjectId() string { + if m != nil { + return m.ProjectId + } + return "" +} + +// Deprecated: Do not use. +func (m *StartIPRotationRequest) GetZone() string { + if m != nil { + return m.Zone + } + return "" +} + +// Deprecated: Do not use. +func (m *StartIPRotationRequest) GetClusterId() string { + if m != nil { + return m.ClusterId + } + return "" +} + +func (m *StartIPRotationRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *StartIPRotationRequest) GetRotateCredentials() bool { + if m != nil { + return m.RotateCredentials + } + return false +} + +// CompleteIPRotationRequest moves the cluster master back into single-IP mode. +type CompleteIPRotationRequest struct { + // Deprecated. The Google Developers Console [project ID or project + // number](https://developers.google.com/console/help/new/#projectnumber). + // This field has been deprecated and replaced by the name field. + ProjectId string `protobuf:"bytes,1,opt,name=project_id,json=projectId,proto3" json:"project_id,omitempty"` // Deprecated: Do not use. + // Deprecated. The name of the Google Compute Engine + // [zone](/compute/docs/zones#available) in which the cluster + // resides. + // This field has been deprecated and replaced by the name field. + Zone string `protobuf:"bytes,2,opt,name=zone,proto3" json:"zone,omitempty"` // Deprecated: Do not use. + // Deprecated. The name of the cluster. + // This field has been deprecated and replaced by the name field. + ClusterId string `protobuf:"bytes,3,opt,name=cluster_id,json=clusterId,proto3" json:"cluster_id,omitempty"` // Deprecated: Do not use. + // The name (project, location, cluster id) of the cluster to complete IP + // rotation. Specified in the format 'projects/*/locations/*/clusters/*'. + Name string `protobuf:"bytes,7,opt,name=name,proto3" json:"name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *CompleteIPRotationRequest) Reset() { *m = CompleteIPRotationRequest{} } +func (m *CompleteIPRotationRequest) String() string { return proto.CompactTextString(m) } +func (*CompleteIPRotationRequest) ProtoMessage() {} +func (*CompleteIPRotationRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_cluster_service_b093026a38af0865, []int{62} +} +func (m *CompleteIPRotationRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_CompleteIPRotationRequest.Unmarshal(m, b) +} +func (m *CompleteIPRotationRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_CompleteIPRotationRequest.Marshal(b, m, deterministic) +} +func (dst *CompleteIPRotationRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_CompleteIPRotationRequest.Merge(dst, src) +} +func (m *CompleteIPRotationRequest) XXX_Size() int { + return xxx_messageInfo_CompleteIPRotationRequest.Size(m) +} +func (m *CompleteIPRotationRequest) XXX_DiscardUnknown() { + xxx_messageInfo_CompleteIPRotationRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_CompleteIPRotationRequest proto.InternalMessageInfo + +// Deprecated: Do not use. +func (m *CompleteIPRotationRequest) GetProjectId() string { + if m != nil { + return m.ProjectId + } + return "" +} + +// Deprecated: Do not use. +func (m *CompleteIPRotationRequest) GetZone() string { + if m != nil { + return m.Zone + } + return "" +} + +// Deprecated: Do not use. +func (m *CompleteIPRotationRequest) GetClusterId() string { + if m != nil { + return m.ClusterId + } + return "" +} + +func (m *CompleteIPRotationRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +// AcceleratorConfig represents a Hardware Accelerator request. +type AcceleratorConfig struct { + // The number of the accelerator cards exposed to an instance. + AcceleratorCount int64 `protobuf:"varint,1,opt,name=accelerator_count,json=acceleratorCount,proto3" json:"accelerator_count,omitempty"` + // The accelerator type resource name. List of supported accelerators + // [here](/compute/docs/gpus/#Introduction) + AcceleratorType string `protobuf:"bytes,2,opt,name=accelerator_type,json=acceleratorType,proto3" json:"accelerator_type,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *AcceleratorConfig) Reset() { *m = AcceleratorConfig{} } +func (m *AcceleratorConfig) String() string { return proto.CompactTextString(m) } +func (*AcceleratorConfig) ProtoMessage() {} +func (*AcceleratorConfig) Descriptor() ([]byte, []int) { + return fileDescriptor_cluster_service_b093026a38af0865, []int{63} +} +func (m *AcceleratorConfig) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_AcceleratorConfig.Unmarshal(m, b) +} +func (m *AcceleratorConfig) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_AcceleratorConfig.Marshal(b, m, deterministic) +} +func (dst *AcceleratorConfig) XXX_Merge(src proto.Message) { + xxx_messageInfo_AcceleratorConfig.Merge(dst, src) +} +func (m *AcceleratorConfig) XXX_Size() int { + return xxx_messageInfo_AcceleratorConfig.Size(m) +} +func (m *AcceleratorConfig) XXX_DiscardUnknown() { + xxx_messageInfo_AcceleratorConfig.DiscardUnknown(m) +} + +var xxx_messageInfo_AcceleratorConfig proto.InternalMessageInfo + +func (m *AcceleratorConfig) GetAcceleratorCount() int64 { + if m != nil { + return m.AcceleratorCount + } + return 0 +} + +func (m *AcceleratorConfig) GetAcceleratorType() string { + if m != nil { + return m.AcceleratorType + } + return "" +} + +// WorkloadMetadataConfig defines the metadata configuration to expose to +// workloads on the node pool. +type WorkloadMetadataConfig struct { + // NodeMetadata is the configuration for how to expose the node metadata to + // the workload running on the node. + NodeMetadata WorkloadMetadataConfig_NodeMetadata `protobuf:"varint,1,opt,name=node_metadata,json=nodeMetadata,proto3,enum=google.container.v1beta1.WorkloadMetadataConfig_NodeMetadata" json:"node_metadata,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *WorkloadMetadataConfig) Reset() { *m = WorkloadMetadataConfig{} } +func (m *WorkloadMetadataConfig) String() string { return proto.CompactTextString(m) } +func (*WorkloadMetadataConfig) ProtoMessage() {} +func (*WorkloadMetadataConfig) Descriptor() ([]byte, []int) { + return fileDescriptor_cluster_service_b093026a38af0865, []int{64} +} +func (m *WorkloadMetadataConfig) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_WorkloadMetadataConfig.Unmarshal(m, b) +} +func (m *WorkloadMetadataConfig) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_WorkloadMetadataConfig.Marshal(b, m, deterministic) +} +func (dst *WorkloadMetadataConfig) XXX_Merge(src proto.Message) { + xxx_messageInfo_WorkloadMetadataConfig.Merge(dst, src) +} +func (m *WorkloadMetadataConfig) XXX_Size() int { + return xxx_messageInfo_WorkloadMetadataConfig.Size(m) +} +func (m *WorkloadMetadataConfig) XXX_DiscardUnknown() { + xxx_messageInfo_WorkloadMetadataConfig.DiscardUnknown(m) +} + +var xxx_messageInfo_WorkloadMetadataConfig proto.InternalMessageInfo + +func (m *WorkloadMetadataConfig) GetNodeMetadata() WorkloadMetadataConfig_NodeMetadata { + if m != nil { + return m.NodeMetadata + } + return WorkloadMetadataConfig_UNSPECIFIED +} + +// SetNetworkPolicyRequest enables/disables network policy for a cluster. +type SetNetworkPolicyRequest struct { + // Deprecated. The Google Developers Console [project ID or project + // number](https://developers.google.com/console/help/new/#projectnumber). + // This field has been deprecated and replaced by the name field. + ProjectId string `protobuf:"bytes,1,opt,name=project_id,json=projectId,proto3" json:"project_id,omitempty"` // Deprecated: Do not use. + // Deprecated. The name of the Google Compute Engine + // [zone](/compute/docs/zones#available) in which the cluster + // resides. + // This field has been deprecated and replaced by the name field. + Zone string `protobuf:"bytes,2,opt,name=zone,proto3" json:"zone,omitempty"` // Deprecated: Do not use. + // Deprecated. The name of the cluster. + // This field has been deprecated and replaced by the name field. + ClusterId string `protobuf:"bytes,3,opt,name=cluster_id,json=clusterId,proto3" json:"cluster_id,omitempty"` // Deprecated: Do not use. + // Configuration options for the NetworkPolicy feature. + NetworkPolicy *NetworkPolicy `protobuf:"bytes,4,opt,name=network_policy,json=networkPolicy,proto3" json:"network_policy,omitempty"` + // The name (project, location, cluster id) of the cluster to set networking + // policy. Specified in the format 'projects/*/locations/*/clusters/*'. + Name string `protobuf:"bytes,6,opt,name=name,proto3" json:"name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *SetNetworkPolicyRequest) Reset() { *m = SetNetworkPolicyRequest{} } +func (m *SetNetworkPolicyRequest) String() string { return proto.CompactTextString(m) } +func (*SetNetworkPolicyRequest) ProtoMessage() {} +func (*SetNetworkPolicyRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_cluster_service_b093026a38af0865, []int{65} +} +func (m *SetNetworkPolicyRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_SetNetworkPolicyRequest.Unmarshal(m, b) +} +func (m *SetNetworkPolicyRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_SetNetworkPolicyRequest.Marshal(b, m, deterministic) +} +func (dst *SetNetworkPolicyRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_SetNetworkPolicyRequest.Merge(dst, src) +} +func (m *SetNetworkPolicyRequest) XXX_Size() int { + return xxx_messageInfo_SetNetworkPolicyRequest.Size(m) +} +func (m *SetNetworkPolicyRequest) XXX_DiscardUnknown() { + xxx_messageInfo_SetNetworkPolicyRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_SetNetworkPolicyRequest proto.InternalMessageInfo + +// Deprecated: Do not use. +func (m *SetNetworkPolicyRequest) GetProjectId() string { + if m != nil { + return m.ProjectId + } + return "" +} + +// Deprecated: Do not use. +func (m *SetNetworkPolicyRequest) GetZone() string { + if m != nil { + return m.Zone + } + return "" +} + +// Deprecated: Do not use. +func (m *SetNetworkPolicyRequest) GetClusterId() string { + if m != nil { + return m.ClusterId + } + return "" +} + +func (m *SetNetworkPolicyRequest) GetNetworkPolicy() *NetworkPolicy { + if m != nil { + return m.NetworkPolicy + } + return nil +} + +func (m *SetNetworkPolicyRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +// SetMaintenancePolicyRequest sets the maintenance policy for a cluster. +type SetMaintenancePolicyRequest struct { + // The Google Developers Console [project ID or project + // number](https://support.google.com/cloud/answer/6158840). + ProjectId string `protobuf:"bytes,1,opt,name=project_id,json=projectId,proto3" json:"project_id,omitempty"` + // The name of the Google Compute Engine + // [zone](/compute/docs/zones#available) in which the cluster + // resides. + Zone string `protobuf:"bytes,2,opt,name=zone,proto3" json:"zone,omitempty"` + // The name of the cluster to update. + ClusterId string `protobuf:"bytes,3,opt,name=cluster_id,json=clusterId,proto3" json:"cluster_id,omitempty"` + // The maintenance policy to be set for the cluster. An empty field + // clears the existing maintenance policy. + MaintenancePolicy *MaintenancePolicy `protobuf:"bytes,4,opt,name=maintenance_policy,json=maintenancePolicy,proto3" json:"maintenance_policy,omitempty"` + // The name (project, location, cluster id) of the cluster to set maintenance + // policy. + // Specified in the format 'projects/*/locations/*/clusters/*'. + Name string `protobuf:"bytes,5,opt,name=name,proto3" json:"name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *SetMaintenancePolicyRequest) Reset() { *m = SetMaintenancePolicyRequest{} } +func (m *SetMaintenancePolicyRequest) String() string { return proto.CompactTextString(m) } +func (*SetMaintenancePolicyRequest) ProtoMessage() {} +func (*SetMaintenancePolicyRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_cluster_service_b093026a38af0865, []int{66} +} +func (m *SetMaintenancePolicyRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_SetMaintenancePolicyRequest.Unmarshal(m, b) +} +func (m *SetMaintenancePolicyRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_SetMaintenancePolicyRequest.Marshal(b, m, deterministic) +} +func (dst *SetMaintenancePolicyRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_SetMaintenancePolicyRequest.Merge(dst, src) +} +func (m *SetMaintenancePolicyRequest) XXX_Size() int { + return xxx_messageInfo_SetMaintenancePolicyRequest.Size(m) +} +func (m *SetMaintenancePolicyRequest) XXX_DiscardUnknown() { + xxx_messageInfo_SetMaintenancePolicyRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_SetMaintenancePolicyRequest proto.InternalMessageInfo + +func (m *SetMaintenancePolicyRequest) GetProjectId() string { + if m != nil { + return m.ProjectId + } + return "" +} + +func (m *SetMaintenancePolicyRequest) GetZone() string { + if m != nil { + return m.Zone + } + return "" +} + +func (m *SetMaintenancePolicyRequest) GetClusterId() string { + if m != nil { + return m.ClusterId + } + return "" +} + +func (m *SetMaintenancePolicyRequest) GetMaintenancePolicy() *MaintenancePolicy { + if m != nil { + return m.MaintenancePolicy + } + return nil +} + +func (m *SetMaintenancePolicyRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +// ListLocationsRequest is used to request the locations that offer GKE. +type ListLocationsRequest struct { + // Contains the name of the resource requested. + // Specified in the format 'projects/*'. + Parent string `protobuf:"bytes,1,opt,name=parent,proto3" json:"parent,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ListLocationsRequest) Reset() { *m = ListLocationsRequest{} } +func (m *ListLocationsRequest) String() string { return proto.CompactTextString(m) } +func (*ListLocationsRequest) ProtoMessage() {} +func (*ListLocationsRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_cluster_service_b093026a38af0865, []int{67} +} +func (m *ListLocationsRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ListLocationsRequest.Unmarshal(m, b) +} +func (m *ListLocationsRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ListLocationsRequest.Marshal(b, m, deterministic) +} +func (dst *ListLocationsRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_ListLocationsRequest.Merge(dst, src) +} +func (m *ListLocationsRequest) XXX_Size() int { + return xxx_messageInfo_ListLocationsRequest.Size(m) +} +func (m *ListLocationsRequest) XXX_DiscardUnknown() { + xxx_messageInfo_ListLocationsRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_ListLocationsRequest proto.InternalMessageInfo + +func (m *ListLocationsRequest) GetParent() string { + if m != nil { + return m.Parent + } + return "" +} + +// ListLocationsResponse returns the list of all GKE locations and their +// recommendation state. +type ListLocationsResponse struct { + // A full list of GKE locations. + Locations []*Location `protobuf:"bytes,1,rep,name=locations,proto3" json:"locations,omitempty"` + // Only return ListLocationsResponse that occur after the page_token. This + // value should be populated from the ListLocationsResponse.next_page_token if + // that response token was set (which happens when listing more Locations than + // fit in a single ListLocationsResponse). + NextPageToken string `protobuf:"bytes,2,opt,name=next_page_token,json=nextPageToken,proto3" json:"next_page_token,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ListLocationsResponse) Reset() { *m = ListLocationsResponse{} } +func (m *ListLocationsResponse) String() string { return proto.CompactTextString(m) } +func (*ListLocationsResponse) ProtoMessage() {} +func (*ListLocationsResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_cluster_service_b093026a38af0865, []int{68} +} +func (m *ListLocationsResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ListLocationsResponse.Unmarshal(m, b) +} +func (m *ListLocationsResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ListLocationsResponse.Marshal(b, m, deterministic) +} +func (dst *ListLocationsResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_ListLocationsResponse.Merge(dst, src) +} +func (m *ListLocationsResponse) XXX_Size() int { + return xxx_messageInfo_ListLocationsResponse.Size(m) +} +func (m *ListLocationsResponse) XXX_DiscardUnknown() { + xxx_messageInfo_ListLocationsResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_ListLocationsResponse proto.InternalMessageInfo + +func (m *ListLocationsResponse) GetLocations() []*Location { + if m != nil { + return m.Locations + } + return nil +} + +func (m *ListLocationsResponse) GetNextPageToken() string { + if m != nil { + return m.NextPageToken + } + return "" +} + +// Location returns the location name, and if the location is recommended +// for GKE cluster scheduling. +type Location struct { + // Contains the type of location this Location is for. + // Regional or Zonal. + Type Location_LocationType `protobuf:"varint,1,opt,name=type,proto3,enum=google.container.v1beta1.Location_LocationType" json:"type,omitempty"` + // Contains the name of the resource requested. + // Specified in the format 'projects/*/locations/*'. + Name string `protobuf:"bytes,2,opt,name=name,proto3" json:"name,omitempty"` + // Whether the location is recomended for GKE cluster scheduling. + Recommended bool `protobuf:"varint,3,opt,name=recommended,proto3" json:"recommended,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Location) Reset() { *m = Location{} } +func (m *Location) String() string { return proto.CompactTextString(m) } +func (*Location) ProtoMessage() {} +func (*Location) Descriptor() ([]byte, []int) { + return fileDescriptor_cluster_service_b093026a38af0865, []int{69} +} +func (m *Location) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Location.Unmarshal(m, b) +} +func (m *Location) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Location.Marshal(b, m, deterministic) +} +func (dst *Location) XXX_Merge(src proto.Message) { + xxx_messageInfo_Location.Merge(dst, src) +} +func (m *Location) XXX_Size() int { + return xxx_messageInfo_Location.Size(m) +} +func (m *Location) XXX_DiscardUnknown() { + xxx_messageInfo_Location.DiscardUnknown(m) +} + +var xxx_messageInfo_Location proto.InternalMessageInfo + +func (m *Location) GetType() Location_LocationType { + if m != nil { + return m.Type + } + return Location_LOCATION_TYPE_UNSPECIFIED +} + +func (m *Location) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *Location) GetRecommended() bool { + if m != nil { + return m.Recommended + } + return false +} + +// StatusCondition describes why a cluster or a node pool has a certain status +// (e.g., ERROR or DEGRADED). +type StatusCondition struct { + // Machine-friendly representation of the condition + Code StatusCondition_Code `protobuf:"varint,1,opt,name=code,proto3,enum=google.container.v1beta1.StatusCondition_Code" json:"code,omitempty"` + // Human-friendly representation of the condition + Message string `protobuf:"bytes,2,opt,name=message,proto3" json:"message,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *StatusCondition) Reset() { *m = StatusCondition{} } +func (m *StatusCondition) String() string { return proto.CompactTextString(m) } +func (*StatusCondition) ProtoMessage() {} +func (*StatusCondition) Descriptor() ([]byte, []int) { + return fileDescriptor_cluster_service_b093026a38af0865, []int{70} +} +func (m *StatusCondition) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_StatusCondition.Unmarshal(m, b) +} +func (m *StatusCondition) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_StatusCondition.Marshal(b, m, deterministic) +} +func (dst *StatusCondition) XXX_Merge(src proto.Message) { + xxx_messageInfo_StatusCondition.Merge(dst, src) +} +func (m *StatusCondition) XXX_Size() int { + return xxx_messageInfo_StatusCondition.Size(m) +} +func (m *StatusCondition) XXX_DiscardUnknown() { + xxx_messageInfo_StatusCondition.DiscardUnknown(m) +} + +var xxx_messageInfo_StatusCondition proto.InternalMessageInfo + +func (m *StatusCondition) GetCode() StatusCondition_Code { + if m != nil { + return m.Code + } + return StatusCondition_UNKNOWN +} + +func (m *StatusCondition) GetMessage() string { + if m != nil { + return m.Message + } + return "" +} + +// NetworkConfig reports the relative names of network & subnetwork. +type NetworkConfig struct { + // Output only. The relative name of the Google Compute Engine + // [network][google.container.v1beta1.NetworkConfig.network](/compute/docs/networks-and-firewalls#networks) to which + // the cluster is connected. + // Example: projects/my-project/global/networks/my-network + Network string `protobuf:"bytes,1,opt,name=network,proto3" json:"network,omitempty"` + // Output only. The relative name of the Google Compute Engine + // [subnetwork](/compute/docs/vpc) to which the cluster is connected. + // Example: projects/my-project/regions/us-central1/subnetworks/my-subnet + Subnetwork string `protobuf:"bytes,2,opt,name=subnetwork,proto3" json:"subnetwork,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *NetworkConfig) Reset() { *m = NetworkConfig{} } +func (m *NetworkConfig) String() string { return proto.CompactTextString(m) } +func (*NetworkConfig) ProtoMessage() {} +func (*NetworkConfig) Descriptor() ([]byte, []int) { + return fileDescriptor_cluster_service_b093026a38af0865, []int{71} +} +func (m *NetworkConfig) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_NetworkConfig.Unmarshal(m, b) +} +func (m *NetworkConfig) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_NetworkConfig.Marshal(b, m, deterministic) +} +func (dst *NetworkConfig) XXX_Merge(src proto.Message) { + xxx_messageInfo_NetworkConfig.Merge(dst, src) +} +func (m *NetworkConfig) XXX_Size() int { + return xxx_messageInfo_NetworkConfig.Size(m) +} +func (m *NetworkConfig) XXX_DiscardUnknown() { + xxx_messageInfo_NetworkConfig.DiscardUnknown(m) +} + +var xxx_messageInfo_NetworkConfig proto.InternalMessageInfo + +func (m *NetworkConfig) GetNetwork() string { + if m != nil { + return m.Network + } + return "" +} + +func (m *NetworkConfig) GetSubnetwork() string { + if m != nil { + return m.Subnetwork + } + return "" +} + +// ListUsableSubnetworksRequest requests the list of usable subnetworks. +// available to a user for creating clusters. +type ListUsableSubnetworksRequest struct { + // The parent project where subnetworks are usable. + // Specified in the format 'projects/*'. + Parent string `protobuf:"bytes,1,opt,name=parent,proto3" json:"parent,omitempty"` + // Filtering currently only supports equality on the networkProjectId and must + // be in the form: "networkProjectId=[PROJECTID]", where `networkProjectId` + // is the project which owns the listed subnetworks. This defaults to the + // parent project ID. + Filter string `protobuf:"bytes,2,opt,name=filter,proto3" json:"filter,omitempty"` + // The max number of results per page that should be returned. If the number + // of available results is larger than `page_size`, a `next_page_token` is + // returned which can be used to get the next page of results in subsequent + // requests. Acceptable values are 0 to 500, inclusive. (Default: 500) + PageSize int32 `protobuf:"varint,3,opt,name=page_size,json=pageSize,proto3" json:"page_size,omitempty"` + // Specifies a page token to use. Set this to the nextPageToken returned by + // previous list requests to get the next page of results. + PageToken string `protobuf:"bytes,4,opt,name=page_token,json=pageToken,proto3" json:"page_token,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ListUsableSubnetworksRequest) Reset() { *m = ListUsableSubnetworksRequest{} } +func (m *ListUsableSubnetworksRequest) String() string { return proto.CompactTextString(m) } +func (*ListUsableSubnetworksRequest) ProtoMessage() {} +func (*ListUsableSubnetworksRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_cluster_service_b093026a38af0865, []int{72} +} +func (m *ListUsableSubnetworksRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ListUsableSubnetworksRequest.Unmarshal(m, b) +} +func (m *ListUsableSubnetworksRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ListUsableSubnetworksRequest.Marshal(b, m, deterministic) +} +func (dst *ListUsableSubnetworksRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_ListUsableSubnetworksRequest.Merge(dst, src) +} +func (m *ListUsableSubnetworksRequest) XXX_Size() int { + return xxx_messageInfo_ListUsableSubnetworksRequest.Size(m) +} +func (m *ListUsableSubnetworksRequest) XXX_DiscardUnknown() { + xxx_messageInfo_ListUsableSubnetworksRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_ListUsableSubnetworksRequest proto.InternalMessageInfo + +func (m *ListUsableSubnetworksRequest) GetParent() string { + if m != nil { + return m.Parent + } + return "" +} + +func (m *ListUsableSubnetworksRequest) GetFilter() string { + if m != nil { + return m.Filter + } + return "" +} + +func (m *ListUsableSubnetworksRequest) GetPageSize() int32 { + if m != nil { + return m.PageSize + } + return 0 +} + +func (m *ListUsableSubnetworksRequest) GetPageToken() string { + if m != nil { + return m.PageToken + } + return "" +} + +// ListUsableSubnetworksResponse is the response of +// ListUsableSubnetworksRequest. +type ListUsableSubnetworksResponse struct { + // A list of usable subnetworks in the specified network project. + Subnetworks []*UsableSubnetwork `protobuf:"bytes,1,rep,name=subnetworks,proto3" json:"subnetworks,omitempty"` + // This token allows you to get the next page of results for list requests. + // If the number of results is larger than `page_size`, use the + // `next_page_token` as a value for the query parameter `page_token` in the + // next request. The value will become empty when there are no more pages. + NextPageToken string `protobuf:"bytes,2,opt,name=next_page_token,json=nextPageToken,proto3" json:"next_page_token,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ListUsableSubnetworksResponse) Reset() { *m = ListUsableSubnetworksResponse{} } +func (m *ListUsableSubnetworksResponse) String() string { return proto.CompactTextString(m) } +func (*ListUsableSubnetworksResponse) ProtoMessage() {} +func (*ListUsableSubnetworksResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_cluster_service_b093026a38af0865, []int{73} +} +func (m *ListUsableSubnetworksResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ListUsableSubnetworksResponse.Unmarshal(m, b) +} +func (m *ListUsableSubnetworksResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ListUsableSubnetworksResponse.Marshal(b, m, deterministic) +} +func (dst *ListUsableSubnetworksResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_ListUsableSubnetworksResponse.Merge(dst, src) +} +func (m *ListUsableSubnetworksResponse) XXX_Size() int { + return xxx_messageInfo_ListUsableSubnetworksResponse.Size(m) +} +func (m *ListUsableSubnetworksResponse) XXX_DiscardUnknown() { + xxx_messageInfo_ListUsableSubnetworksResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_ListUsableSubnetworksResponse proto.InternalMessageInfo + +func (m *ListUsableSubnetworksResponse) GetSubnetworks() []*UsableSubnetwork { + if m != nil { + return m.Subnetworks + } + return nil +} + +func (m *ListUsableSubnetworksResponse) GetNextPageToken() string { + if m != nil { + return m.NextPageToken + } + return "" +} + +// Secondary IP range of a usable subnetwork. +type UsableSubnetworkSecondaryRange struct { + // The name associated with this subnetwork secondary range, used when adding + // an alias IP range to a VM instance. + RangeName string `protobuf:"bytes,1,opt,name=range_name,json=rangeName,proto3" json:"range_name,omitempty"` + // The range of IP addresses belonging to this subnetwork secondary range. + IpCidrRange string `protobuf:"bytes,2,opt,name=ip_cidr_range,json=ipCidrRange,proto3" json:"ip_cidr_range,omitempty"` + // This field is to determine the status of the secondary range programmably. + Status UsableSubnetworkSecondaryRange_Status `protobuf:"varint,3,opt,name=status,proto3,enum=google.container.v1beta1.UsableSubnetworkSecondaryRange_Status" json:"status,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *UsableSubnetworkSecondaryRange) Reset() { *m = UsableSubnetworkSecondaryRange{} } +func (m *UsableSubnetworkSecondaryRange) String() string { return proto.CompactTextString(m) } +func (*UsableSubnetworkSecondaryRange) ProtoMessage() {} +func (*UsableSubnetworkSecondaryRange) Descriptor() ([]byte, []int) { + return fileDescriptor_cluster_service_b093026a38af0865, []int{74} +} +func (m *UsableSubnetworkSecondaryRange) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_UsableSubnetworkSecondaryRange.Unmarshal(m, b) +} +func (m *UsableSubnetworkSecondaryRange) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_UsableSubnetworkSecondaryRange.Marshal(b, m, deterministic) +} +func (dst *UsableSubnetworkSecondaryRange) XXX_Merge(src proto.Message) { + xxx_messageInfo_UsableSubnetworkSecondaryRange.Merge(dst, src) +} +func (m *UsableSubnetworkSecondaryRange) XXX_Size() int { + return xxx_messageInfo_UsableSubnetworkSecondaryRange.Size(m) +} +func (m *UsableSubnetworkSecondaryRange) XXX_DiscardUnknown() { + xxx_messageInfo_UsableSubnetworkSecondaryRange.DiscardUnknown(m) +} + +var xxx_messageInfo_UsableSubnetworkSecondaryRange proto.InternalMessageInfo + +func (m *UsableSubnetworkSecondaryRange) GetRangeName() string { + if m != nil { + return m.RangeName + } + return "" +} + +func (m *UsableSubnetworkSecondaryRange) GetIpCidrRange() string { + if m != nil { + return m.IpCidrRange + } + return "" +} + +func (m *UsableSubnetworkSecondaryRange) GetStatus() UsableSubnetworkSecondaryRange_Status { + if m != nil { + return m.Status + } + return UsableSubnetworkSecondaryRange_UNKNOWN +} + +// UsableSubnetwork resource returns the subnetwork name, its associated network +// and the primary CIDR range. +type UsableSubnetwork struct { + // Subnetwork Name. + // Example: projects/my-project/regions/us-central1/subnetworks/my-subnet + Subnetwork string `protobuf:"bytes,1,opt,name=subnetwork,proto3" json:"subnetwork,omitempty"` + // Network Name. + // Example: projects/my-project/global/networks/my-network + Network string `protobuf:"bytes,2,opt,name=network,proto3" json:"network,omitempty"` + // The range of internal addresses that are owned by this subnetwork. + IpCidrRange string `protobuf:"bytes,3,opt,name=ip_cidr_range,json=ipCidrRange,proto3" json:"ip_cidr_range,omitempty"` + // Secondary IP ranges. + SecondaryIpRanges []*UsableSubnetworkSecondaryRange `protobuf:"bytes,4,rep,name=secondary_ip_ranges,json=secondaryIpRanges,proto3" json:"secondary_ip_ranges,omitempty"` + // A human readable status message representing the reasons for cases where + // the caller cannot use the secondary ranges under the subnet. For example if + // the secondary_ip_ranges is empty due to a permission issue, an insufficient + // permission message will be given by status_message. + StatusMessage string `protobuf:"bytes,5,opt,name=status_message,json=statusMessage,proto3" json:"status_message,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *UsableSubnetwork) Reset() { *m = UsableSubnetwork{} } +func (m *UsableSubnetwork) String() string { return proto.CompactTextString(m) } +func (*UsableSubnetwork) ProtoMessage() {} +func (*UsableSubnetwork) Descriptor() ([]byte, []int) { + return fileDescriptor_cluster_service_b093026a38af0865, []int{75} +} +func (m *UsableSubnetwork) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_UsableSubnetwork.Unmarshal(m, b) +} +func (m *UsableSubnetwork) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_UsableSubnetwork.Marshal(b, m, deterministic) +} +func (dst *UsableSubnetwork) XXX_Merge(src proto.Message) { + xxx_messageInfo_UsableSubnetwork.Merge(dst, src) +} +func (m *UsableSubnetwork) XXX_Size() int { + return xxx_messageInfo_UsableSubnetwork.Size(m) +} +func (m *UsableSubnetwork) XXX_DiscardUnknown() { + xxx_messageInfo_UsableSubnetwork.DiscardUnknown(m) +} + +var xxx_messageInfo_UsableSubnetwork proto.InternalMessageInfo + +func (m *UsableSubnetwork) GetSubnetwork() string { + if m != nil { + return m.Subnetwork + } + return "" +} + +func (m *UsableSubnetwork) GetNetwork() string { + if m != nil { + return m.Network + } + return "" +} + +func (m *UsableSubnetwork) GetIpCidrRange() string { + if m != nil { + return m.IpCidrRange + } + return "" +} + +func (m *UsableSubnetwork) GetSecondaryIpRanges() []*UsableSubnetworkSecondaryRange { + if m != nil { + return m.SecondaryIpRanges + } + return nil +} + +func (m *UsableSubnetwork) GetStatusMessage() string { + if m != nil { + return m.StatusMessage + } + return "" +} + +// VerticalPodAutoscaling contains global, per-cluster information +// required by Vertical Pod Autoscaler to automatically adjust +// the resources of pods controlled by it. +type VerticalPodAutoscaling struct { + // Enables vertical pod autoscaling. + Enabled bool `protobuf:"varint,1,opt,name=enabled,proto3" json:"enabled,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *VerticalPodAutoscaling) Reset() { *m = VerticalPodAutoscaling{} } +func (m *VerticalPodAutoscaling) String() string { return proto.CompactTextString(m) } +func (*VerticalPodAutoscaling) ProtoMessage() {} +func (*VerticalPodAutoscaling) Descriptor() ([]byte, []int) { + return fileDescriptor_cluster_service_b093026a38af0865, []int{76} +} +func (m *VerticalPodAutoscaling) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_VerticalPodAutoscaling.Unmarshal(m, b) +} +func (m *VerticalPodAutoscaling) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_VerticalPodAutoscaling.Marshal(b, m, deterministic) +} +func (dst *VerticalPodAutoscaling) XXX_Merge(src proto.Message) { + xxx_messageInfo_VerticalPodAutoscaling.Merge(dst, src) +} +func (m *VerticalPodAutoscaling) XXX_Size() int { + return xxx_messageInfo_VerticalPodAutoscaling.Size(m) +} +func (m *VerticalPodAutoscaling) XXX_DiscardUnknown() { + xxx_messageInfo_VerticalPodAutoscaling.DiscardUnknown(m) +} + +var xxx_messageInfo_VerticalPodAutoscaling proto.InternalMessageInfo + +func (m *VerticalPodAutoscaling) GetEnabled() bool { + if m != nil { + return m.Enabled + } + return false +} + +// Constraints applied to pods. +type MaxPodsConstraint struct { + // Constraint enforced on the max num of pods per node. + MaxPodsPerNode int64 `protobuf:"varint,1,opt,name=max_pods_per_node,json=maxPodsPerNode,proto3" json:"max_pods_per_node,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *MaxPodsConstraint) Reset() { *m = MaxPodsConstraint{} } +func (m *MaxPodsConstraint) String() string { return proto.CompactTextString(m) } +func (*MaxPodsConstraint) ProtoMessage() {} +func (*MaxPodsConstraint) Descriptor() ([]byte, []int) { + return fileDescriptor_cluster_service_b093026a38af0865, []int{77} +} +func (m *MaxPodsConstraint) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_MaxPodsConstraint.Unmarshal(m, b) +} +func (m *MaxPodsConstraint) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_MaxPodsConstraint.Marshal(b, m, deterministic) +} +func (dst *MaxPodsConstraint) XXX_Merge(src proto.Message) { + xxx_messageInfo_MaxPodsConstraint.Merge(dst, src) +} +func (m *MaxPodsConstraint) XXX_Size() int { + return xxx_messageInfo_MaxPodsConstraint.Size(m) +} +func (m *MaxPodsConstraint) XXX_DiscardUnknown() { + xxx_messageInfo_MaxPodsConstraint.DiscardUnknown(m) +} + +var xxx_messageInfo_MaxPodsConstraint proto.InternalMessageInfo + +func (m *MaxPodsConstraint) GetMaxPodsPerNode() int64 { + if m != nil { + return m.MaxPodsPerNode + } + return 0 +} + +// Configuration for exporting cluster resource usages. +type ResourceUsageExportConfig struct { + // Configuration to use BigQuery as usage export destination. + BigqueryDestination *ResourceUsageExportConfig_BigQueryDestination `protobuf:"bytes,1,opt,name=bigquery_destination,json=bigqueryDestination,proto3" json:"bigquery_destination,omitempty"` + // Whether to enable network egress metering for this cluster. If enabled, a + // daemonset will be created in the cluster to meter network egress traffic. + EnableNetworkEgressMetering bool `protobuf:"varint,2,opt,name=enable_network_egress_metering,json=enableNetworkEgressMetering,proto3" json:"enable_network_egress_metering,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ResourceUsageExportConfig) Reset() { *m = ResourceUsageExportConfig{} } +func (m *ResourceUsageExportConfig) String() string { return proto.CompactTextString(m) } +func (*ResourceUsageExportConfig) ProtoMessage() {} +func (*ResourceUsageExportConfig) Descriptor() ([]byte, []int) { + return fileDescriptor_cluster_service_b093026a38af0865, []int{78} +} +func (m *ResourceUsageExportConfig) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ResourceUsageExportConfig.Unmarshal(m, b) +} +func (m *ResourceUsageExportConfig) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ResourceUsageExportConfig.Marshal(b, m, deterministic) +} +func (dst *ResourceUsageExportConfig) XXX_Merge(src proto.Message) { + xxx_messageInfo_ResourceUsageExportConfig.Merge(dst, src) +} +func (m *ResourceUsageExportConfig) XXX_Size() int { + return xxx_messageInfo_ResourceUsageExportConfig.Size(m) +} +func (m *ResourceUsageExportConfig) XXX_DiscardUnknown() { + xxx_messageInfo_ResourceUsageExportConfig.DiscardUnknown(m) +} + +var xxx_messageInfo_ResourceUsageExportConfig proto.InternalMessageInfo + +func (m *ResourceUsageExportConfig) GetBigqueryDestination() *ResourceUsageExportConfig_BigQueryDestination { + if m != nil { + return m.BigqueryDestination + } + return nil +} + +func (m *ResourceUsageExportConfig) GetEnableNetworkEgressMetering() bool { + if m != nil { + return m.EnableNetworkEgressMetering + } + return false +} + +// Parameters for using BigQuery as the destination of resource usage export. +type ResourceUsageExportConfig_BigQueryDestination struct { + // The ID of a BigQuery Dataset. + DatasetId string `protobuf:"bytes,1,opt,name=dataset_id,json=datasetId,proto3" json:"dataset_id,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ResourceUsageExportConfig_BigQueryDestination) Reset() { + *m = ResourceUsageExportConfig_BigQueryDestination{} +} +func (m *ResourceUsageExportConfig_BigQueryDestination) String() string { + return proto.CompactTextString(m) +} +func (*ResourceUsageExportConfig_BigQueryDestination) ProtoMessage() {} +func (*ResourceUsageExportConfig_BigQueryDestination) Descriptor() ([]byte, []int) { + return fileDescriptor_cluster_service_b093026a38af0865, []int{78, 0} +} +func (m *ResourceUsageExportConfig_BigQueryDestination) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ResourceUsageExportConfig_BigQueryDestination.Unmarshal(m, b) +} +func (m *ResourceUsageExportConfig_BigQueryDestination) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ResourceUsageExportConfig_BigQueryDestination.Marshal(b, m, deterministic) +} +func (dst *ResourceUsageExportConfig_BigQueryDestination) XXX_Merge(src proto.Message) { + xxx_messageInfo_ResourceUsageExportConfig_BigQueryDestination.Merge(dst, src) +} +func (m *ResourceUsageExportConfig_BigQueryDestination) XXX_Size() int { + return xxx_messageInfo_ResourceUsageExportConfig_BigQueryDestination.Size(m) +} +func (m *ResourceUsageExportConfig_BigQueryDestination) XXX_DiscardUnknown() { + xxx_messageInfo_ResourceUsageExportConfig_BigQueryDestination.DiscardUnknown(m) +} + +var xxx_messageInfo_ResourceUsageExportConfig_BigQueryDestination proto.InternalMessageInfo + +func (m *ResourceUsageExportConfig_BigQueryDestination) GetDatasetId() string { + if m != nil { + return m.DatasetId + } + return "" +} + +func init() { + proto.RegisterType((*NodeConfig)(nil), "google.container.v1beta1.NodeConfig") + proto.RegisterMapType((map[string]string)(nil), "google.container.v1beta1.NodeConfig.LabelsEntry") + proto.RegisterMapType((map[string]string)(nil), "google.container.v1beta1.NodeConfig.MetadataEntry") + proto.RegisterType((*NodeTaint)(nil), "google.container.v1beta1.NodeTaint") + proto.RegisterType((*MasterAuth)(nil), "google.container.v1beta1.MasterAuth") + proto.RegisterType((*ClientCertificateConfig)(nil), "google.container.v1beta1.ClientCertificateConfig") + proto.RegisterType((*AddonsConfig)(nil), "google.container.v1beta1.AddonsConfig") + proto.RegisterType((*HttpLoadBalancing)(nil), "google.container.v1beta1.HttpLoadBalancing") + proto.RegisterType((*HorizontalPodAutoscaling)(nil), "google.container.v1beta1.HorizontalPodAutoscaling") + proto.RegisterType((*KubernetesDashboard)(nil), "google.container.v1beta1.KubernetesDashboard") + proto.RegisterType((*NetworkPolicyConfig)(nil), "google.container.v1beta1.NetworkPolicyConfig") + proto.RegisterType((*PrivateClusterConfig)(nil), "google.container.v1beta1.PrivateClusterConfig") + proto.RegisterType((*IstioConfig)(nil), "google.container.v1beta1.IstioConfig") + proto.RegisterType((*CloudRunConfig)(nil), "google.container.v1beta1.CloudRunConfig") + proto.RegisterType((*MasterAuthorizedNetworksConfig)(nil), "google.container.v1beta1.MasterAuthorizedNetworksConfig") + proto.RegisterType((*MasterAuthorizedNetworksConfig_CidrBlock)(nil), "google.container.v1beta1.MasterAuthorizedNetworksConfig.CidrBlock") + proto.RegisterType((*LegacyAbac)(nil), "google.container.v1beta1.LegacyAbac") + proto.RegisterType((*NetworkPolicy)(nil), "google.container.v1beta1.NetworkPolicy") + proto.RegisterType((*IPAllocationPolicy)(nil), "google.container.v1beta1.IPAllocationPolicy") + proto.RegisterType((*BinaryAuthorization)(nil), "google.container.v1beta1.BinaryAuthorization") + proto.RegisterType((*PodSecurityPolicyConfig)(nil), "google.container.v1beta1.PodSecurityPolicyConfig") + proto.RegisterType((*Cluster)(nil), "google.container.v1beta1.Cluster") + proto.RegisterMapType((map[string]string)(nil), "google.container.v1beta1.Cluster.ResourceLabelsEntry") + proto.RegisterType((*ClusterUpdate)(nil), "google.container.v1beta1.ClusterUpdate") + proto.RegisterType((*Operation)(nil), "google.container.v1beta1.Operation") + proto.RegisterType((*OperationProgress)(nil), "google.container.v1beta1.OperationProgress") + proto.RegisterType((*OperationProgress_Metric)(nil), "google.container.v1beta1.OperationProgress.Metric") + proto.RegisterType((*CreateClusterRequest)(nil), "google.container.v1beta1.CreateClusterRequest") + proto.RegisterType((*GetClusterRequest)(nil), "google.container.v1beta1.GetClusterRequest") + proto.RegisterType((*UpdateClusterRequest)(nil), "google.container.v1beta1.UpdateClusterRequest") + proto.RegisterType((*UpdateNodePoolRequest)(nil), "google.container.v1beta1.UpdateNodePoolRequest") + proto.RegisterType((*SetNodePoolAutoscalingRequest)(nil), "google.container.v1beta1.SetNodePoolAutoscalingRequest") + proto.RegisterType((*SetLoggingServiceRequest)(nil), "google.container.v1beta1.SetLoggingServiceRequest") + proto.RegisterType((*SetMonitoringServiceRequest)(nil), "google.container.v1beta1.SetMonitoringServiceRequest") + proto.RegisterType((*SetAddonsConfigRequest)(nil), "google.container.v1beta1.SetAddonsConfigRequest") + proto.RegisterType((*SetLocationsRequest)(nil), "google.container.v1beta1.SetLocationsRequest") + proto.RegisterType((*UpdateMasterRequest)(nil), "google.container.v1beta1.UpdateMasterRequest") + proto.RegisterType((*SetMasterAuthRequest)(nil), "google.container.v1beta1.SetMasterAuthRequest") + proto.RegisterType((*DeleteClusterRequest)(nil), "google.container.v1beta1.DeleteClusterRequest") + proto.RegisterType((*ListClustersRequest)(nil), "google.container.v1beta1.ListClustersRequest") + proto.RegisterType((*ListClustersResponse)(nil), "google.container.v1beta1.ListClustersResponse") + proto.RegisterType((*GetOperationRequest)(nil), "google.container.v1beta1.GetOperationRequest") + proto.RegisterType((*ListOperationsRequest)(nil), "google.container.v1beta1.ListOperationsRequest") + proto.RegisterType((*CancelOperationRequest)(nil), "google.container.v1beta1.CancelOperationRequest") + proto.RegisterType((*ListOperationsResponse)(nil), "google.container.v1beta1.ListOperationsResponse") + proto.RegisterType((*GetServerConfigRequest)(nil), "google.container.v1beta1.GetServerConfigRequest") + proto.RegisterType((*ServerConfig)(nil), "google.container.v1beta1.ServerConfig") + proto.RegisterType((*CreateNodePoolRequest)(nil), "google.container.v1beta1.CreateNodePoolRequest") + proto.RegisterType((*DeleteNodePoolRequest)(nil), "google.container.v1beta1.DeleteNodePoolRequest") + proto.RegisterType((*ListNodePoolsRequest)(nil), "google.container.v1beta1.ListNodePoolsRequest") + proto.RegisterType((*GetNodePoolRequest)(nil), "google.container.v1beta1.GetNodePoolRequest") + proto.RegisterType((*NodePool)(nil), "google.container.v1beta1.NodePool") + proto.RegisterType((*NodeManagement)(nil), "google.container.v1beta1.NodeManagement") + proto.RegisterType((*AutoUpgradeOptions)(nil), "google.container.v1beta1.AutoUpgradeOptions") + proto.RegisterType((*MaintenancePolicy)(nil), "google.container.v1beta1.MaintenancePolicy") + proto.RegisterType((*MaintenanceWindow)(nil), "google.container.v1beta1.MaintenanceWindow") + proto.RegisterType((*DailyMaintenanceWindow)(nil), "google.container.v1beta1.DailyMaintenanceWindow") + proto.RegisterType((*SetNodePoolManagementRequest)(nil), "google.container.v1beta1.SetNodePoolManagementRequest") + proto.RegisterType((*SetNodePoolSizeRequest)(nil), "google.container.v1beta1.SetNodePoolSizeRequest") + proto.RegisterType((*RollbackNodePoolUpgradeRequest)(nil), "google.container.v1beta1.RollbackNodePoolUpgradeRequest") + proto.RegisterType((*ListNodePoolsResponse)(nil), "google.container.v1beta1.ListNodePoolsResponse") + proto.RegisterType((*ClusterAutoscaling)(nil), "google.container.v1beta1.ClusterAutoscaling") + proto.RegisterType((*ResourceLimit)(nil), "google.container.v1beta1.ResourceLimit") + proto.RegisterType((*NodePoolAutoscaling)(nil), "google.container.v1beta1.NodePoolAutoscaling") + proto.RegisterType((*SetLabelsRequest)(nil), "google.container.v1beta1.SetLabelsRequest") + proto.RegisterMapType((map[string]string)(nil), "google.container.v1beta1.SetLabelsRequest.ResourceLabelsEntry") + proto.RegisterType((*SetLegacyAbacRequest)(nil), "google.container.v1beta1.SetLegacyAbacRequest") + proto.RegisterType((*StartIPRotationRequest)(nil), "google.container.v1beta1.StartIPRotationRequest") + proto.RegisterType((*CompleteIPRotationRequest)(nil), "google.container.v1beta1.CompleteIPRotationRequest") + proto.RegisterType((*AcceleratorConfig)(nil), "google.container.v1beta1.AcceleratorConfig") + proto.RegisterType((*WorkloadMetadataConfig)(nil), "google.container.v1beta1.WorkloadMetadataConfig") + proto.RegisterType((*SetNetworkPolicyRequest)(nil), "google.container.v1beta1.SetNetworkPolicyRequest") + proto.RegisterType((*SetMaintenancePolicyRequest)(nil), "google.container.v1beta1.SetMaintenancePolicyRequest") + proto.RegisterType((*ListLocationsRequest)(nil), "google.container.v1beta1.ListLocationsRequest") + proto.RegisterType((*ListLocationsResponse)(nil), "google.container.v1beta1.ListLocationsResponse") + proto.RegisterType((*Location)(nil), "google.container.v1beta1.Location") + proto.RegisterType((*StatusCondition)(nil), "google.container.v1beta1.StatusCondition") + proto.RegisterType((*NetworkConfig)(nil), "google.container.v1beta1.NetworkConfig") + proto.RegisterType((*ListUsableSubnetworksRequest)(nil), "google.container.v1beta1.ListUsableSubnetworksRequest") + proto.RegisterType((*ListUsableSubnetworksResponse)(nil), "google.container.v1beta1.ListUsableSubnetworksResponse") + proto.RegisterType((*UsableSubnetworkSecondaryRange)(nil), "google.container.v1beta1.UsableSubnetworkSecondaryRange") + proto.RegisterType((*UsableSubnetwork)(nil), "google.container.v1beta1.UsableSubnetwork") + proto.RegisterType((*VerticalPodAutoscaling)(nil), "google.container.v1beta1.VerticalPodAutoscaling") + proto.RegisterType((*MaxPodsConstraint)(nil), "google.container.v1beta1.MaxPodsConstraint") + proto.RegisterType((*ResourceUsageExportConfig)(nil), "google.container.v1beta1.ResourceUsageExportConfig") + proto.RegisterType((*ResourceUsageExportConfig_BigQueryDestination)(nil), "google.container.v1beta1.ResourceUsageExportConfig.BigQueryDestination") + proto.RegisterEnum("google.container.v1beta1.NodeTaint_Effect", NodeTaint_Effect_name, NodeTaint_Effect_value) + proto.RegisterEnum("google.container.v1beta1.IstioConfig_IstioAuthMode", IstioConfig_IstioAuthMode_name, IstioConfig_IstioAuthMode_value) + proto.RegisterEnum("google.container.v1beta1.NetworkPolicy_Provider", NetworkPolicy_Provider_name, NetworkPolicy_Provider_value) + proto.RegisterEnum("google.container.v1beta1.Cluster_Status", Cluster_Status_name, Cluster_Status_value) + proto.RegisterEnum("google.container.v1beta1.Operation_Status", Operation_Status_name, Operation_Status_value) + proto.RegisterEnum("google.container.v1beta1.Operation_Type", Operation_Type_name, Operation_Type_value) + proto.RegisterEnum("google.container.v1beta1.SetMasterAuthRequest_Action", SetMasterAuthRequest_Action_name, SetMasterAuthRequest_Action_value) + proto.RegisterEnum("google.container.v1beta1.NodePool_Status", NodePool_Status_name, NodePool_Status_value) + proto.RegisterEnum("google.container.v1beta1.WorkloadMetadataConfig_NodeMetadata", WorkloadMetadataConfig_NodeMetadata_name, WorkloadMetadataConfig_NodeMetadata_value) + proto.RegisterEnum("google.container.v1beta1.Location_LocationType", Location_LocationType_name, Location_LocationType_value) + proto.RegisterEnum("google.container.v1beta1.StatusCondition_Code", StatusCondition_Code_name, StatusCondition_Code_value) + proto.RegisterEnum("google.container.v1beta1.UsableSubnetworkSecondaryRange_Status", UsableSubnetworkSecondaryRange_Status_name, UsableSubnetworkSecondaryRange_Status_value) +} + +// Reference imports to suppress errors if they are not otherwise used. +var _ context.Context +var _ grpc.ClientConn + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the grpc package it is being compiled against. +const _ = grpc.SupportPackageIsVersion4 + +// ClusterManagerClient is the client API for ClusterManager service. +// +// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://godoc.org/google.golang.org/grpc#ClientConn.NewStream. +type ClusterManagerClient interface { + // Lists all clusters owned by a project in either the specified zone or all + // zones. + ListClusters(ctx context.Context, in *ListClustersRequest, opts ...grpc.CallOption) (*ListClustersResponse, error) + // Gets the details for a specific cluster. + GetCluster(ctx context.Context, in *GetClusterRequest, opts ...grpc.CallOption) (*Cluster, error) + // Creates a cluster, consisting of the specified number and type of Google + // Compute Engine instances. + // + // By default, the cluster is created in the project's + // [default network](/compute/docs/networks-and-firewalls#networks). + // + // One firewall is added for the cluster. After cluster creation, + // the cluster creates routes for each node to allow the containers + // on that node to communicate with all other instances in the + // cluster. + // + // Finally, an entry is added to the project's global metadata indicating + // which CIDR range is being used by the cluster. + CreateCluster(ctx context.Context, in *CreateClusterRequest, opts ...grpc.CallOption) (*Operation, error) + // Updates the settings for a specific cluster. + UpdateCluster(ctx context.Context, in *UpdateClusterRequest, opts ...grpc.CallOption) (*Operation, error) + // Updates the version and/or image type of a specific node pool. + UpdateNodePool(ctx context.Context, in *UpdateNodePoolRequest, opts ...grpc.CallOption) (*Operation, error) + // Sets the autoscaling settings of a specific node pool. + SetNodePoolAutoscaling(ctx context.Context, in *SetNodePoolAutoscalingRequest, opts ...grpc.CallOption) (*Operation, error) + // Sets the logging service for a specific cluster. + SetLoggingService(ctx context.Context, in *SetLoggingServiceRequest, opts ...grpc.CallOption) (*Operation, error) + // Sets the monitoring service for a specific cluster. + SetMonitoringService(ctx context.Context, in *SetMonitoringServiceRequest, opts ...grpc.CallOption) (*Operation, error) + // Sets the addons for a specific cluster. + SetAddonsConfig(ctx context.Context, in *SetAddonsConfigRequest, opts ...grpc.CallOption) (*Operation, error) + // Sets the locations for a specific cluster. + SetLocations(ctx context.Context, in *SetLocationsRequest, opts ...grpc.CallOption) (*Operation, error) + // Updates the master for a specific cluster. + UpdateMaster(ctx context.Context, in *UpdateMasterRequest, opts ...grpc.CallOption) (*Operation, error) + // Used to set master auth materials. Currently supports :- + // Changing the admin password for a specific cluster. + // This can be either via password generation or explicitly set. + // Modify basic_auth.csv and reset the K8S API server. + SetMasterAuth(ctx context.Context, in *SetMasterAuthRequest, opts ...grpc.CallOption) (*Operation, error) + // Deletes the cluster, including the Kubernetes endpoint and all worker + // nodes. + // + // Firewalls and routes that were configured during cluster creation + // are also deleted. + // + // Other Google Compute Engine resources that might be in use by the cluster + // (e.g. load balancer resources) will not be deleted if they weren't present + // at the initial create time. + DeleteCluster(ctx context.Context, in *DeleteClusterRequest, opts ...grpc.CallOption) (*Operation, error) + // Lists all operations in a project in a specific zone or all zones. + ListOperations(ctx context.Context, in *ListOperationsRequest, opts ...grpc.CallOption) (*ListOperationsResponse, error) + // Gets the specified operation. + GetOperation(ctx context.Context, in *GetOperationRequest, opts ...grpc.CallOption) (*Operation, error) + // Cancels the specified operation. + CancelOperation(ctx context.Context, in *CancelOperationRequest, opts ...grpc.CallOption) (*empty.Empty, error) + // Returns configuration info about the Kubernetes Engine service. + GetServerConfig(ctx context.Context, in *GetServerConfigRequest, opts ...grpc.CallOption) (*ServerConfig, error) + // Lists the node pools for a cluster. + ListNodePools(ctx context.Context, in *ListNodePoolsRequest, opts ...grpc.CallOption) (*ListNodePoolsResponse, error) + // Retrieves the node pool requested. + GetNodePool(ctx context.Context, in *GetNodePoolRequest, opts ...grpc.CallOption) (*NodePool, error) + // Creates a node pool for a cluster. + CreateNodePool(ctx context.Context, in *CreateNodePoolRequest, opts ...grpc.CallOption) (*Operation, error) + // Deletes a node pool from a cluster. + DeleteNodePool(ctx context.Context, in *DeleteNodePoolRequest, opts ...grpc.CallOption) (*Operation, error) + // Roll back the previously Aborted or Failed NodePool upgrade. + // This will be an no-op if the last upgrade successfully completed. + RollbackNodePoolUpgrade(ctx context.Context, in *RollbackNodePoolUpgradeRequest, opts ...grpc.CallOption) (*Operation, error) + // Sets the NodeManagement options for a node pool. + SetNodePoolManagement(ctx context.Context, in *SetNodePoolManagementRequest, opts ...grpc.CallOption) (*Operation, error) + // Sets labels on a cluster. + SetLabels(ctx context.Context, in *SetLabelsRequest, opts ...grpc.CallOption) (*Operation, error) + // Enables or disables the ABAC authorization mechanism on a cluster. + SetLegacyAbac(ctx context.Context, in *SetLegacyAbacRequest, opts ...grpc.CallOption) (*Operation, error) + // Start master IP rotation. + StartIPRotation(ctx context.Context, in *StartIPRotationRequest, opts ...grpc.CallOption) (*Operation, error) + // Completes master IP rotation. + CompleteIPRotation(ctx context.Context, in *CompleteIPRotationRequest, opts ...grpc.CallOption) (*Operation, error) + // Sets the size for a specific node pool. + SetNodePoolSize(ctx context.Context, in *SetNodePoolSizeRequest, opts ...grpc.CallOption) (*Operation, error) + // Enables/Disables Network Policy for a cluster. + SetNetworkPolicy(ctx context.Context, in *SetNetworkPolicyRequest, opts ...grpc.CallOption) (*Operation, error) + // Sets the maintenance policy for a cluster. + SetMaintenancePolicy(ctx context.Context, in *SetMaintenancePolicyRequest, opts ...grpc.CallOption) (*Operation, error) + // Lists subnetworks that are usable for creating clusters in a project. + ListUsableSubnetworks(ctx context.Context, in *ListUsableSubnetworksRequest, opts ...grpc.CallOption) (*ListUsableSubnetworksResponse, error) + // Used to fetch locations that offer GKE. + ListLocations(ctx context.Context, in *ListLocationsRequest, opts ...grpc.CallOption) (*ListLocationsResponse, error) +} + +type clusterManagerClient struct { + cc *grpc.ClientConn +} + +func NewClusterManagerClient(cc *grpc.ClientConn) ClusterManagerClient { + return &clusterManagerClient{cc} +} + +func (c *clusterManagerClient) ListClusters(ctx context.Context, in *ListClustersRequest, opts ...grpc.CallOption) (*ListClustersResponse, error) { + out := new(ListClustersResponse) + err := c.cc.Invoke(ctx, "/google.container.v1beta1.ClusterManager/ListClusters", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *clusterManagerClient) GetCluster(ctx context.Context, in *GetClusterRequest, opts ...grpc.CallOption) (*Cluster, error) { + out := new(Cluster) + err := c.cc.Invoke(ctx, "/google.container.v1beta1.ClusterManager/GetCluster", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *clusterManagerClient) CreateCluster(ctx context.Context, in *CreateClusterRequest, opts ...grpc.CallOption) (*Operation, error) { + out := new(Operation) + err := c.cc.Invoke(ctx, "/google.container.v1beta1.ClusterManager/CreateCluster", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *clusterManagerClient) UpdateCluster(ctx context.Context, in *UpdateClusterRequest, opts ...grpc.CallOption) (*Operation, error) { + out := new(Operation) + err := c.cc.Invoke(ctx, "/google.container.v1beta1.ClusterManager/UpdateCluster", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *clusterManagerClient) UpdateNodePool(ctx context.Context, in *UpdateNodePoolRequest, opts ...grpc.CallOption) (*Operation, error) { + out := new(Operation) + err := c.cc.Invoke(ctx, "/google.container.v1beta1.ClusterManager/UpdateNodePool", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *clusterManagerClient) SetNodePoolAutoscaling(ctx context.Context, in *SetNodePoolAutoscalingRequest, opts ...grpc.CallOption) (*Operation, error) { + out := new(Operation) + err := c.cc.Invoke(ctx, "/google.container.v1beta1.ClusterManager/SetNodePoolAutoscaling", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *clusterManagerClient) SetLoggingService(ctx context.Context, in *SetLoggingServiceRequest, opts ...grpc.CallOption) (*Operation, error) { + out := new(Operation) + err := c.cc.Invoke(ctx, "/google.container.v1beta1.ClusterManager/SetLoggingService", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *clusterManagerClient) SetMonitoringService(ctx context.Context, in *SetMonitoringServiceRequest, opts ...grpc.CallOption) (*Operation, error) { + out := new(Operation) + err := c.cc.Invoke(ctx, "/google.container.v1beta1.ClusterManager/SetMonitoringService", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *clusterManagerClient) SetAddonsConfig(ctx context.Context, in *SetAddonsConfigRequest, opts ...grpc.CallOption) (*Operation, error) { + out := new(Operation) + err := c.cc.Invoke(ctx, "/google.container.v1beta1.ClusterManager/SetAddonsConfig", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *clusterManagerClient) SetLocations(ctx context.Context, in *SetLocationsRequest, opts ...grpc.CallOption) (*Operation, error) { + out := new(Operation) + err := c.cc.Invoke(ctx, "/google.container.v1beta1.ClusterManager/SetLocations", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *clusterManagerClient) UpdateMaster(ctx context.Context, in *UpdateMasterRequest, opts ...grpc.CallOption) (*Operation, error) { + out := new(Operation) + err := c.cc.Invoke(ctx, "/google.container.v1beta1.ClusterManager/UpdateMaster", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *clusterManagerClient) SetMasterAuth(ctx context.Context, in *SetMasterAuthRequest, opts ...grpc.CallOption) (*Operation, error) { + out := new(Operation) + err := c.cc.Invoke(ctx, "/google.container.v1beta1.ClusterManager/SetMasterAuth", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *clusterManagerClient) DeleteCluster(ctx context.Context, in *DeleteClusterRequest, opts ...grpc.CallOption) (*Operation, error) { + out := new(Operation) + err := c.cc.Invoke(ctx, "/google.container.v1beta1.ClusterManager/DeleteCluster", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *clusterManagerClient) ListOperations(ctx context.Context, in *ListOperationsRequest, opts ...grpc.CallOption) (*ListOperationsResponse, error) { + out := new(ListOperationsResponse) + err := c.cc.Invoke(ctx, "/google.container.v1beta1.ClusterManager/ListOperations", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *clusterManagerClient) GetOperation(ctx context.Context, in *GetOperationRequest, opts ...grpc.CallOption) (*Operation, error) { + out := new(Operation) + err := c.cc.Invoke(ctx, "/google.container.v1beta1.ClusterManager/GetOperation", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *clusterManagerClient) CancelOperation(ctx context.Context, in *CancelOperationRequest, opts ...grpc.CallOption) (*empty.Empty, error) { + out := new(empty.Empty) + err := c.cc.Invoke(ctx, "/google.container.v1beta1.ClusterManager/CancelOperation", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *clusterManagerClient) GetServerConfig(ctx context.Context, in *GetServerConfigRequest, opts ...grpc.CallOption) (*ServerConfig, error) { + out := new(ServerConfig) + err := c.cc.Invoke(ctx, "/google.container.v1beta1.ClusterManager/GetServerConfig", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *clusterManagerClient) ListNodePools(ctx context.Context, in *ListNodePoolsRequest, opts ...grpc.CallOption) (*ListNodePoolsResponse, error) { + out := new(ListNodePoolsResponse) + err := c.cc.Invoke(ctx, "/google.container.v1beta1.ClusterManager/ListNodePools", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *clusterManagerClient) GetNodePool(ctx context.Context, in *GetNodePoolRequest, opts ...grpc.CallOption) (*NodePool, error) { + out := new(NodePool) + err := c.cc.Invoke(ctx, "/google.container.v1beta1.ClusterManager/GetNodePool", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *clusterManagerClient) CreateNodePool(ctx context.Context, in *CreateNodePoolRequest, opts ...grpc.CallOption) (*Operation, error) { + out := new(Operation) + err := c.cc.Invoke(ctx, "/google.container.v1beta1.ClusterManager/CreateNodePool", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *clusterManagerClient) DeleteNodePool(ctx context.Context, in *DeleteNodePoolRequest, opts ...grpc.CallOption) (*Operation, error) { + out := new(Operation) + err := c.cc.Invoke(ctx, "/google.container.v1beta1.ClusterManager/DeleteNodePool", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *clusterManagerClient) RollbackNodePoolUpgrade(ctx context.Context, in *RollbackNodePoolUpgradeRequest, opts ...grpc.CallOption) (*Operation, error) { + out := new(Operation) + err := c.cc.Invoke(ctx, "/google.container.v1beta1.ClusterManager/RollbackNodePoolUpgrade", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *clusterManagerClient) SetNodePoolManagement(ctx context.Context, in *SetNodePoolManagementRequest, opts ...grpc.CallOption) (*Operation, error) { + out := new(Operation) + err := c.cc.Invoke(ctx, "/google.container.v1beta1.ClusterManager/SetNodePoolManagement", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *clusterManagerClient) SetLabels(ctx context.Context, in *SetLabelsRequest, opts ...grpc.CallOption) (*Operation, error) { + out := new(Operation) + err := c.cc.Invoke(ctx, "/google.container.v1beta1.ClusterManager/SetLabels", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *clusterManagerClient) SetLegacyAbac(ctx context.Context, in *SetLegacyAbacRequest, opts ...grpc.CallOption) (*Operation, error) { + out := new(Operation) + err := c.cc.Invoke(ctx, "/google.container.v1beta1.ClusterManager/SetLegacyAbac", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *clusterManagerClient) StartIPRotation(ctx context.Context, in *StartIPRotationRequest, opts ...grpc.CallOption) (*Operation, error) { + out := new(Operation) + err := c.cc.Invoke(ctx, "/google.container.v1beta1.ClusterManager/StartIPRotation", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *clusterManagerClient) CompleteIPRotation(ctx context.Context, in *CompleteIPRotationRequest, opts ...grpc.CallOption) (*Operation, error) { + out := new(Operation) + err := c.cc.Invoke(ctx, "/google.container.v1beta1.ClusterManager/CompleteIPRotation", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *clusterManagerClient) SetNodePoolSize(ctx context.Context, in *SetNodePoolSizeRequest, opts ...grpc.CallOption) (*Operation, error) { + out := new(Operation) + err := c.cc.Invoke(ctx, "/google.container.v1beta1.ClusterManager/SetNodePoolSize", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *clusterManagerClient) SetNetworkPolicy(ctx context.Context, in *SetNetworkPolicyRequest, opts ...grpc.CallOption) (*Operation, error) { + out := new(Operation) + err := c.cc.Invoke(ctx, "/google.container.v1beta1.ClusterManager/SetNetworkPolicy", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *clusterManagerClient) SetMaintenancePolicy(ctx context.Context, in *SetMaintenancePolicyRequest, opts ...grpc.CallOption) (*Operation, error) { + out := new(Operation) + err := c.cc.Invoke(ctx, "/google.container.v1beta1.ClusterManager/SetMaintenancePolicy", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *clusterManagerClient) ListUsableSubnetworks(ctx context.Context, in *ListUsableSubnetworksRequest, opts ...grpc.CallOption) (*ListUsableSubnetworksResponse, error) { + out := new(ListUsableSubnetworksResponse) + err := c.cc.Invoke(ctx, "/google.container.v1beta1.ClusterManager/ListUsableSubnetworks", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *clusterManagerClient) ListLocations(ctx context.Context, in *ListLocationsRequest, opts ...grpc.CallOption) (*ListLocationsResponse, error) { + out := new(ListLocationsResponse) + err := c.cc.Invoke(ctx, "/google.container.v1beta1.ClusterManager/ListLocations", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +// ClusterManagerServer is the server API for ClusterManager service. +type ClusterManagerServer interface { + // Lists all clusters owned by a project in either the specified zone or all + // zones. + ListClusters(context.Context, *ListClustersRequest) (*ListClustersResponse, error) + // Gets the details for a specific cluster. + GetCluster(context.Context, *GetClusterRequest) (*Cluster, error) + // Creates a cluster, consisting of the specified number and type of Google + // Compute Engine instances. + // + // By default, the cluster is created in the project's + // [default network](/compute/docs/networks-and-firewalls#networks). + // + // One firewall is added for the cluster. After cluster creation, + // the cluster creates routes for each node to allow the containers + // on that node to communicate with all other instances in the + // cluster. + // + // Finally, an entry is added to the project's global metadata indicating + // which CIDR range is being used by the cluster. + CreateCluster(context.Context, *CreateClusterRequest) (*Operation, error) + // Updates the settings for a specific cluster. + UpdateCluster(context.Context, *UpdateClusterRequest) (*Operation, error) + // Updates the version and/or image type of a specific node pool. + UpdateNodePool(context.Context, *UpdateNodePoolRequest) (*Operation, error) + // Sets the autoscaling settings of a specific node pool. + SetNodePoolAutoscaling(context.Context, *SetNodePoolAutoscalingRequest) (*Operation, error) + // Sets the logging service for a specific cluster. + SetLoggingService(context.Context, *SetLoggingServiceRequest) (*Operation, error) + // Sets the monitoring service for a specific cluster. + SetMonitoringService(context.Context, *SetMonitoringServiceRequest) (*Operation, error) + // Sets the addons for a specific cluster. + SetAddonsConfig(context.Context, *SetAddonsConfigRequest) (*Operation, error) + // Sets the locations for a specific cluster. + SetLocations(context.Context, *SetLocationsRequest) (*Operation, error) + // Updates the master for a specific cluster. + UpdateMaster(context.Context, *UpdateMasterRequest) (*Operation, error) + // Used to set master auth materials. Currently supports :- + // Changing the admin password for a specific cluster. + // This can be either via password generation or explicitly set. + // Modify basic_auth.csv and reset the K8S API server. + SetMasterAuth(context.Context, *SetMasterAuthRequest) (*Operation, error) + // Deletes the cluster, including the Kubernetes endpoint and all worker + // nodes. + // + // Firewalls and routes that were configured during cluster creation + // are also deleted. + // + // Other Google Compute Engine resources that might be in use by the cluster + // (e.g. load balancer resources) will not be deleted if they weren't present + // at the initial create time. + DeleteCluster(context.Context, *DeleteClusterRequest) (*Operation, error) + // Lists all operations in a project in a specific zone or all zones. + ListOperations(context.Context, *ListOperationsRequest) (*ListOperationsResponse, error) + // Gets the specified operation. + GetOperation(context.Context, *GetOperationRequest) (*Operation, error) + // Cancels the specified operation. + CancelOperation(context.Context, *CancelOperationRequest) (*empty.Empty, error) + // Returns configuration info about the Kubernetes Engine service. + GetServerConfig(context.Context, *GetServerConfigRequest) (*ServerConfig, error) + // Lists the node pools for a cluster. + ListNodePools(context.Context, *ListNodePoolsRequest) (*ListNodePoolsResponse, error) + // Retrieves the node pool requested. + GetNodePool(context.Context, *GetNodePoolRequest) (*NodePool, error) + // Creates a node pool for a cluster. + CreateNodePool(context.Context, *CreateNodePoolRequest) (*Operation, error) + // Deletes a node pool from a cluster. + DeleteNodePool(context.Context, *DeleteNodePoolRequest) (*Operation, error) + // Roll back the previously Aborted or Failed NodePool upgrade. + // This will be an no-op if the last upgrade successfully completed. + RollbackNodePoolUpgrade(context.Context, *RollbackNodePoolUpgradeRequest) (*Operation, error) + // Sets the NodeManagement options for a node pool. + SetNodePoolManagement(context.Context, *SetNodePoolManagementRequest) (*Operation, error) + // Sets labels on a cluster. + SetLabels(context.Context, *SetLabelsRequest) (*Operation, error) + // Enables or disables the ABAC authorization mechanism on a cluster. + SetLegacyAbac(context.Context, *SetLegacyAbacRequest) (*Operation, error) + // Start master IP rotation. + StartIPRotation(context.Context, *StartIPRotationRequest) (*Operation, error) + // Completes master IP rotation. + CompleteIPRotation(context.Context, *CompleteIPRotationRequest) (*Operation, error) + // Sets the size for a specific node pool. + SetNodePoolSize(context.Context, *SetNodePoolSizeRequest) (*Operation, error) + // Enables/Disables Network Policy for a cluster. + SetNetworkPolicy(context.Context, *SetNetworkPolicyRequest) (*Operation, error) + // Sets the maintenance policy for a cluster. + SetMaintenancePolicy(context.Context, *SetMaintenancePolicyRequest) (*Operation, error) + // Lists subnetworks that are usable for creating clusters in a project. + ListUsableSubnetworks(context.Context, *ListUsableSubnetworksRequest) (*ListUsableSubnetworksResponse, error) + // Used to fetch locations that offer GKE. + ListLocations(context.Context, *ListLocationsRequest) (*ListLocationsResponse, error) +} + +func RegisterClusterManagerServer(s *grpc.Server, srv ClusterManagerServer) { + s.RegisterService(&_ClusterManager_serviceDesc, srv) +} + +func _ClusterManager_ListClusters_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(ListClustersRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(ClusterManagerServer).ListClusters(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.container.v1beta1.ClusterManager/ListClusters", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(ClusterManagerServer).ListClusters(ctx, req.(*ListClustersRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _ClusterManager_GetCluster_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(GetClusterRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(ClusterManagerServer).GetCluster(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.container.v1beta1.ClusterManager/GetCluster", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(ClusterManagerServer).GetCluster(ctx, req.(*GetClusterRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _ClusterManager_CreateCluster_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(CreateClusterRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(ClusterManagerServer).CreateCluster(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.container.v1beta1.ClusterManager/CreateCluster", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(ClusterManagerServer).CreateCluster(ctx, req.(*CreateClusterRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _ClusterManager_UpdateCluster_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(UpdateClusterRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(ClusterManagerServer).UpdateCluster(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.container.v1beta1.ClusterManager/UpdateCluster", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(ClusterManagerServer).UpdateCluster(ctx, req.(*UpdateClusterRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _ClusterManager_UpdateNodePool_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(UpdateNodePoolRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(ClusterManagerServer).UpdateNodePool(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.container.v1beta1.ClusterManager/UpdateNodePool", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(ClusterManagerServer).UpdateNodePool(ctx, req.(*UpdateNodePoolRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _ClusterManager_SetNodePoolAutoscaling_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(SetNodePoolAutoscalingRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(ClusterManagerServer).SetNodePoolAutoscaling(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.container.v1beta1.ClusterManager/SetNodePoolAutoscaling", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(ClusterManagerServer).SetNodePoolAutoscaling(ctx, req.(*SetNodePoolAutoscalingRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _ClusterManager_SetLoggingService_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(SetLoggingServiceRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(ClusterManagerServer).SetLoggingService(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.container.v1beta1.ClusterManager/SetLoggingService", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(ClusterManagerServer).SetLoggingService(ctx, req.(*SetLoggingServiceRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _ClusterManager_SetMonitoringService_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(SetMonitoringServiceRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(ClusterManagerServer).SetMonitoringService(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.container.v1beta1.ClusterManager/SetMonitoringService", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(ClusterManagerServer).SetMonitoringService(ctx, req.(*SetMonitoringServiceRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _ClusterManager_SetAddonsConfig_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(SetAddonsConfigRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(ClusterManagerServer).SetAddonsConfig(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.container.v1beta1.ClusterManager/SetAddonsConfig", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(ClusterManagerServer).SetAddonsConfig(ctx, req.(*SetAddonsConfigRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _ClusterManager_SetLocations_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(SetLocationsRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(ClusterManagerServer).SetLocations(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.container.v1beta1.ClusterManager/SetLocations", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(ClusterManagerServer).SetLocations(ctx, req.(*SetLocationsRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _ClusterManager_UpdateMaster_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(UpdateMasterRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(ClusterManagerServer).UpdateMaster(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.container.v1beta1.ClusterManager/UpdateMaster", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(ClusterManagerServer).UpdateMaster(ctx, req.(*UpdateMasterRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _ClusterManager_SetMasterAuth_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(SetMasterAuthRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(ClusterManagerServer).SetMasterAuth(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.container.v1beta1.ClusterManager/SetMasterAuth", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(ClusterManagerServer).SetMasterAuth(ctx, req.(*SetMasterAuthRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _ClusterManager_DeleteCluster_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(DeleteClusterRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(ClusterManagerServer).DeleteCluster(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.container.v1beta1.ClusterManager/DeleteCluster", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(ClusterManagerServer).DeleteCluster(ctx, req.(*DeleteClusterRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _ClusterManager_ListOperations_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(ListOperationsRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(ClusterManagerServer).ListOperations(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.container.v1beta1.ClusterManager/ListOperations", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(ClusterManagerServer).ListOperations(ctx, req.(*ListOperationsRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _ClusterManager_GetOperation_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(GetOperationRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(ClusterManagerServer).GetOperation(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.container.v1beta1.ClusterManager/GetOperation", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(ClusterManagerServer).GetOperation(ctx, req.(*GetOperationRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _ClusterManager_CancelOperation_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(CancelOperationRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(ClusterManagerServer).CancelOperation(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.container.v1beta1.ClusterManager/CancelOperation", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(ClusterManagerServer).CancelOperation(ctx, req.(*CancelOperationRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _ClusterManager_GetServerConfig_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(GetServerConfigRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(ClusterManagerServer).GetServerConfig(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.container.v1beta1.ClusterManager/GetServerConfig", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(ClusterManagerServer).GetServerConfig(ctx, req.(*GetServerConfigRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _ClusterManager_ListNodePools_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(ListNodePoolsRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(ClusterManagerServer).ListNodePools(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.container.v1beta1.ClusterManager/ListNodePools", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(ClusterManagerServer).ListNodePools(ctx, req.(*ListNodePoolsRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _ClusterManager_GetNodePool_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(GetNodePoolRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(ClusterManagerServer).GetNodePool(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.container.v1beta1.ClusterManager/GetNodePool", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(ClusterManagerServer).GetNodePool(ctx, req.(*GetNodePoolRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _ClusterManager_CreateNodePool_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(CreateNodePoolRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(ClusterManagerServer).CreateNodePool(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.container.v1beta1.ClusterManager/CreateNodePool", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(ClusterManagerServer).CreateNodePool(ctx, req.(*CreateNodePoolRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _ClusterManager_DeleteNodePool_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(DeleteNodePoolRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(ClusterManagerServer).DeleteNodePool(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.container.v1beta1.ClusterManager/DeleteNodePool", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(ClusterManagerServer).DeleteNodePool(ctx, req.(*DeleteNodePoolRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _ClusterManager_RollbackNodePoolUpgrade_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(RollbackNodePoolUpgradeRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(ClusterManagerServer).RollbackNodePoolUpgrade(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.container.v1beta1.ClusterManager/RollbackNodePoolUpgrade", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(ClusterManagerServer).RollbackNodePoolUpgrade(ctx, req.(*RollbackNodePoolUpgradeRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _ClusterManager_SetNodePoolManagement_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(SetNodePoolManagementRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(ClusterManagerServer).SetNodePoolManagement(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.container.v1beta1.ClusterManager/SetNodePoolManagement", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(ClusterManagerServer).SetNodePoolManagement(ctx, req.(*SetNodePoolManagementRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _ClusterManager_SetLabels_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(SetLabelsRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(ClusterManagerServer).SetLabels(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.container.v1beta1.ClusterManager/SetLabels", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(ClusterManagerServer).SetLabels(ctx, req.(*SetLabelsRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _ClusterManager_SetLegacyAbac_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(SetLegacyAbacRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(ClusterManagerServer).SetLegacyAbac(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.container.v1beta1.ClusterManager/SetLegacyAbac", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(ClusterManagerServer).SetLegacyAbac(ctx, req.(*SetLegacyAbacRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _ClusterManager_StartIPRotation_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(StartIPRotationRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(ClusterManagerServer).StartIPRotation(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.container.v1beta1.ClusterManager/StartIPRotation", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(ClusterManagerServer).StartIPRotation(ctx, req.(*StartIPRotationRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _ClusterManager_CompleteIPRotation_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(CompleteIPRotationRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(ClusterManagerServer).CompleteIPRotation(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.container.v1beta1.ClusterManager/CompleteIPRotation", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(ClusterManagerServer).CompleteIPRotation(ctx, req.(*CompleteIPRotationRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _ClusterManager_SetNodePoolSize_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(SetNodePoolSizeRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(ClusterManagerServer).SetNodePoolSize(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.container.v1beta1.ClusterManager/SetNodePoolSize", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(ClusterManagerServer).SetNodePoolSize(ctx, req.(*SetNodePoolSizeRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _ClusterManager_SetNetworkPolicy_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(SetNetworkPolicyRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(ClusterManagerServer).SetNetworkPolicy(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.container.v1beta1.ClusterManager/SetNetworkPolicy", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(ClusterManagerServer).SetNetworkPolicy(ctx, req.(*SetNetworkPolicyRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _ClusterManager_SetMaintenancePolicy_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(SetMaintenancePolicyRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(ClusterManagerServer).SetMaintenancePolicy(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.container.v1beta1.ClusterManager/SetMaintenancePolicy", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(ClusterManagerServer).SetMaintenancePolicy(ctx, req.(*SetMaintenancePolicyRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _ClusterManager_ListUsableSubnetworks_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(ListUsableSubnetworksRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(ClusterManagerServer).ListUsableSubnetworks(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.container.v1beta1.ClusterManager/ListUsableSubnetworks", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(ClusterManagerServer).ListUsableSubnetworks(ctx, req.(*ListUsableSubnetworksRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _ClusterManager_ListLocations_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(ListLocationsRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(ClusterManagerServer).ListLocations(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.container.v1beta1.ClusterManager/ListLocations", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(ClusterManagerServer).ListLocations(ctx, req.(*ListLocationsRequest)) + } + return interceptor(ctx, in, info, handler) +} + +var _ClusterManager_serviceDesc = grpc.ServiceDesc{ + ServiceName: "google.container.v1beta1.ClusterManager", + HandlerType: (*ClusterManagerServer)(nil), + Methods: []grpc.MethodDesc{ + { + MethodName: "ListClusters", + Handler: _ClusterManager_ListClusters_Handler, + }, + { + MethodName: "GetCluster", + Handler: _ClusterManager_GetCluster_Handler, + }, + { + MethodName: "CreateCluster", + Handler: _ClusterManager_CreateCluster_Handler, + }, + { + MethodName: "UpdateCluster", + Handler: _ClusterManager_UpdateCluster_Handler, + }, + { + MethodName: "UpdateNodePool", + Handler: _ClusterManager_UpdateNodePool_Handler, + }, + { + MethodName: "SetNodePoolAutoscaling", + Handler: _ClusterManager_SetNodePoolAutoscaling_Handler, + }, + { + MethodName: "SetLoggingService", + Handler: _ClusterManager_SetLoggingService_Handler, + }, + { + MethodName: "SetMonitoringService", + Handler: _ClusterManager_SetMonitoringService_Handler, + }, + { + MethodName: "SetAddonsConfig", + Handler: _ClusterManager_SetAddonsConfig_Handler, + }, + { + MethodName: "SetLocations", + Handler: _ClusterManager_SetLocations_Handler, + }, + { + MethodName: "UpdateMaster", + Handler: _ClusterManager_UpdateMaster_Handler, + }, + { + MethodName: "SetMasterAuth", + Handler: _ClusterManager_SetMasterAuth_Handler, + }, + { + MethodName: "DeleteCluster", + Handler: _ClusterManager_DeleteCluster_Handler, + }, + { + MethodName: "ListOperations", + Handler: _ClusterManager_ListOperations_Handler, + }, + { + MethodName: "GetOperation", + Handler: _ClusterManager_GetOperation_Handler, + }, + { + MethodName: "CancelOperation", + Handler: _ClusterManager_CancelOperation_Handler, + }, + { + MethodName: "GetServerConfig", + Handler: _ClusterManager_GetServerConfig_Handler, + }, + { + MethodName: "ListNodePools", + Handler: _ClusterManager_ListNodePools_Handler, + }, + { + MethodName: "GetNodePool", + Handler: _ClusterManager_GetNodePool_Handler, + }, + { + MethodName: "CreateNodePool", + Handler: _ClusterManager_CreateNodePool_Handler, + }, + { + MethodName: "DeleteNodePool", + Handler: _ClusterManager_DeleteNodePool_Handler, + }, + { + MethodName: "RollbackNodePoolUpgrade", + Handler: _ClusterManager_RollbackNodePoolUpgrade_Handler, + }, + { + MethodName: "SetNodePoolManagement", + Handler: _ClusterManager_SetNodePoolManagement_Handler, + }, + { + MethodName: "SetLabels", + Handler: _ClusterManager_SetLabels_Handler, + }, + { + MethodName: "SetLegacyAbac", + Handler: _ClusterManager_SetLegacyAbac_Handler, + }, + { + MethodName: "StartIPRotation", + Handler: _ClusterManager_StartIPRotation_Handler, + }, + { + MethodName: "CompleteIPRotation", + Handler: _ClusterManager_CompleteIPRotation_Handler, + }, + { + MethodName: "SetNodePoolSize", + Handler: _ClusterManager_SetNodePoolSize_Handler, + }, + { + MethodName: "SetNetworkPolicy", + Handler: _ClusterManager_SetNetworkPolicy_Handler, + }, + { + MethodName: "SetMaintenancePolicy", + Handler: _ClusterManager_SetMaintenancePolicy_Handler, + }, + { + MethodName: "ListUsableSubnetworks", + Handler: _ClusterManager_ListUsableSubnetworks_Handler, + }, + { + MethodName: "ListLocations", + Handler: _ClusterManager_ListLocations_Handler, + }, + }, + Streams: []grpc.StreamDesc{}, + Metadata: "google/container/v1beta1/cluster_service.proto", +} + +func init() { + proto.RegisterFile("google/container/v1beta1/cluster_service.proto", fileDescriptor_cluster_service_b093026a38af0865) +} + +var fileDescriptor_cluster_service_b093026a38af0865 = []byte{ + // 7032 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xcc, 0x5d, 0x6b, 0x6c, 0x23, 0xd7, + 0x75, 0xf6, 0x50, 0x94, 0x44, 0x1d, 0x49, 0x14, 0x75, 0xa5, 0xd5, 0x72, 0xb9, 0x0f, 0xef, 0x8e, + 0x5f, 0xeb, 0xb5, 0x2d, 0x79, 0xd7, 0x8e, 0xed, 0xf8, 0x11, 0x87, 0xa2, 0xc6, 0x5a, 0x61, 0x25, + 0x92, 0x1e, 0x52, 0xbb, 0xb6, 0x62, 0x64, 0x3a, 0x22, 0xef, 0x52, 0xe3, 0x25, 0x67, 0xc6, 0x33, + 0xc3, 0x7d, 0x19, 0x1b, 0xd4, 0x79, 0xb4, 0xe9, 0x23, 0x49, 0xf3, 0xea, 0x23, 0x4d, 0xd3, 0x34, + 0x88, 0x93, 0x34, 0x29, 0x1a, 0x04, 0x45, 0x90, 0x36, 0x6d, 0xd3, 0xfe, 0x69, 0x81, 0xa6, 0x40, + 0x0b, 0xb4, 0x3f, 0xfa, 0xa3, 0x0d, 0xda, 0x1f, 0x29, 0x8a, 0xb6, 0x68, 0x81, 0xf6, 0x4f, 0x7f, + 0xb5, 0x28, 0xee, 0x6b, 0x1e, 0xe4, 0xf0, 0x21, 0x6a, 0xad, 0xf8, 0xd7, 0x6a, 0xce, 0xbd, 0xe7, + 0x3e, 0xce, 0x3d, 0xf7, 0xdc, 0xef, 0x9c, 0x7b, 0x2e, 0x17, 0x96, 0x1b, 0x96, 0xd5, 0x68, 0xe2, + 0x95, 0x9a, 0x65, 0x7a, 0xba, 0x61, 0x62, 0x67, 0xe5, 0xfa, 0xf9, 0x5d, 0xec, 0xe9, 0xe7, 0x57, + 0x6a, 0xcd, 0xb6, 0xeb, 0x61, 0x47, 0x73, 0xb1, 0x73, 0xdd, 0xa8, 0xe1, 0x65, 0xdb, 0xb1, 0x3c, + 0x0b, 0x65, 0x59, 0xfd, 0x65, 0xbf, 0xfe, 0x32, 0xaf, 0x9f, 0x3b, 0xc1, 0x5b, 0xd2, 0x6d, 0x63, + 0x45, 0x37, 0x4d, 0xcb, 0xd3, 0x3d, 0xc3, 0x32, 0x5d, 0xc6, 0x97, 0x3b, 0xc5, 0x4b, 0x0d, 0xbd, + 0xb5, 0x72, 0xfd, 0x3c, 0xf9, 0x47, 0xb3, 0xad, 0xa6, 0x51, 0xbb, 0xc5, 0xcb, 0x73, 0xd1, 0xf2, + 0x48, 0xd9, 0x71, 0x5e, 0x46, 0xbf, 0x76, 0xdb, 0x57, 0x57, 0x70, 0xcb, 0xf6, 0x78, 0xa1, 0xfc, + 0x1f, 0x13, 0x00, 0x45, 0xab, 0x8e, 0x0b, 0x96, 0x79, 0xd5, 0x68, 0xa0, 0x33, 0x30, 0xd3, 0xd2, + 0x6b, 0x7b, 0x86, 0x89, 0x35, 0xef, 0x96, 0x8d, 0xb3, 0xd2, 0x69, 0xe9, 0xec, 0x94, 0x3a, 0xcd, + 0x69, 0xd5, 0x5b, 0x36, 0x46, 0xa7, 0x61, 0xa6, 0x6e, 0xb8, 0xd7, 0x34, 0xd7, 0xb8, 0x8d, 0xb5, + 0xc6, 0x6e, 0x36, 0x71, 0x5a, 0x3a, 0x3b, 0xae, 0x02, 0xa1, 0x55, 0x8c, 0xdb, 0x78, 0x7d, 0x97, + 0x34, 0x62, 0xe9, 0x6d, 0x6f, 0x4f, 0x73, 0x6b, 0x96, 0x8d, 0xdd, 0xec, 0xd8, 0xe9, 0x31, 0xd2, + 0x08, 0xa5, 0x55, 0x28, 0x09, 0x3d, 0x04, 0x73, 0x5c, 0x30, 0x9a, 0x5e, 0xab, 0x59, 0x6d, 0xd3, + 0xcb, 0x4e, 0xd1, 0xae, 0xd2, 0x9c, 0x9c, 0x67, 0x54, 0x54, 0x84, 0x54, 0x0b, 0x7b, 0x7a, 0x5d, + 0xf7, 0xf4, 0x6c, 0xf2, 0xf4, 0xd8, 0xd9, 0xe9, 0x0b, 0x17, 0x96, 0x7b, 0xc9, 0x70, 0x39, 0x98, + 0xc8, 0xf2, 0x16, 0x67, 0x52, 0x4c, 0xcf, 0xb9, 0xa5, 0xfa, 0x6d, 0xa0, 0x93, 0x00, 0x46, 0x4b, + 0x6f, 0xf0, 0xe9, 0x8d, 0xd3, 0x3e, 0xa7, 0x28, 0x85, 0x4e, 0xee, 0x22, 0x4c, 0x34, 0xf5, 0x5d, + 0xdc, 0x74, 0xb3, 0x13, 0xb4, 0xb3, 0xc7, 0x87, 0xea, 0x6c, 0x93, 0xb2, 0xb0, 0xae, 0x38, 0x3f, + 0x7a, 0x10, 0xe6, 0x9a, 0x56, 0x4d, 0x6f, 0x6a, 0xae, 0x5b, 0xd7, 0xd8, 0x0c, 0x27, 0xa9, 0xa4, + 0x66, 0x29, 0xb9, 0xe2, 0xd6, 0x0b, 0x74, 0x82, 0x08, 0x92, 0x9e, 0xde, 0x70, 0xb3, 0x29, 0x2a, + 0x24, 0xfa, 0x37, 0x3a, 0x0d, 0xd3, 0xb6, 0x83, 0xc9, 0x32, 0x19, 0xbb, 0x4d, 0x9c, 0x85, 0xd3, + 0xd2, 0xd9, 0x94, 0x1a, 0x26, 0xa1, 0x12, 0xcc, 0xe8, 0xb5, 0x1a, 0x6e, 0x62, 0x47, 0xf7, 0x2c, + 0xc7, 0xcd, 0x4e, 0xd3, 0xd1, 0x3e, 0xd2, 0x7b, 0xb4, 0xf9, 0xa0, 0x36, 0x1b, 0xb4, 0x1a, 0x69, + 0x00, 0x1d, 0x87, 0x29, 0xba, 0xaa, 0x54, 0x2c, 0x33, 0x54, 0x2c, 0x29, 0x42, 0xa0, 0x52, 0x39, + 0x0b, 0x99, 0x96, 0x61, 0x6a, 0x35, 0xbb, 0xad, 0xd9, 0x4d, 0xdd, 0xbb, 0x6a, 0x39, 0xad, 0xec, + 0x2c, 0x5b, 0xae, 0x96, 0x61, 0x16, 0xec, 0x76, 0x99, 0x53, 0xd1, 0xeb, 0x90, 0xbd, 0x61, 0x39, + 0xd7, 0x9a, 0x96, 0x5e, 0xd7, 0x84, 0xcc, 0xb5, 0x1a, 0xed, 0x30, 0x9b, 0x3e, 0x2d, 0xf5, 0x97, + 0xe8, 0x15, 0xce, 0x29, 0xd6, 0x8e, 0x0f, 0x74, 0xe9, 0x46, 0x2c, 0x1d, 0x3d, 0x07, 0x13, 0xa4, + 0x01, 0xcf, 0xcd, 0xce, 0xd1, 0xd9, 0xdf, 0xd7, 0x7f, 0xad, 0xaa, 0xa4, 0xae, 0xca, 0x59, 0x72, + 0xcf, 0xc1, 0x6c, 0x44, 0x45, 0x50, 0x06, 0xc6, 0xae, 0xe1, 0x5b, 0x5c, 0xe1, 0xc9, 0x9f, 0x68, + 0x11, 0xc6, 0xaf, 0xeb, 0xcd, 0x36, 0xa6, 0x1a, 0x3e, 0xa5, 0xb2, 0x8f, 0x67, 0x13, 0xcf, 0x48, + 0xb9, 0xf7, 0xc2, 0x74, 0x68, 0xc9, 0xf7, 0xc3, 0x2a, 0xff, 0x8d, 0x04, 0x53, 0xfe, 0x68, 0x86, + 0xe5, 0x44, 0xab, 0x30, 0x81, 0xaf, 0x5e, 0xc5, 0x35, 0x2f, 0x3b, 0x76, 0x5a, 0x3a, 0x9b, 0xbe, + 0x70, 0x6e, 0x88, 0xa9, 0x2e, 0x2b, 0x94, 0x43, 0xe5, 0x9c, 0xf2, 0xab, 0x30, 0xc1, 0x28, 0x68, + 0x09, 0x90, 0xf2, 0xd2, 0x4b, 0x4a, 0xa1, 0xaa, 0x6d, 0x17, 0x2b, 0x65, 0xa5, 0xb0, 0xf1, 0xd2, + 0x86, 0xb2, 0x96, 0xb9, 0x07, 0xcd, 0xc1, 0x74, 0xb1, 0xa4, 0x55, 0x0a, 0x17, 0x95, 0xb5, 0xed, + 0x4d, 0x25, 0x23, 0x91, 0x8a, 0x65, 0x55, 0x79, 0x49, 0x51, 0xb5, 0x30, 0x3d, 0x81, 0xd2, 0x00, + 0xc5, 0x92, 0xa6, 0xbc, 0xa2, 0x14, 0xb6, 0xab, 0x4a, 0x66, 0x4c, 0xfe, 0x5e, 0x02, 0x60, 0x4b, + 0x27, 0xe6, 0x2e, 0xdf, 0xf6, 0xf6, 0x50, 0x0e, 0x52, 0x6d, 0x17, 0x3b, 0xa6, 0xde, 0x12, 0x06, + 0xc4, 0xff, 0x26, 0x65, 0xb6, 0xee, 0xba, 0x37, 0x2c, 0xa7, 0xce, 0xa7, 0xe8, 0x7f, 0xa3, 0x16, + 0x1c, 0xab, 0x35, 0x0d, 0x6c, 0x7a, 0x5a, 0x0d, 0x3b, 0x9e, 0x71, 0xd5, 0xa8, 0xe9, 0x1e, 0x16, + 0xda, 0x33, 0x46, 0xb5, 0xe7, 0x7c, 0xef, 0x89, 0x17, 0x28, 0x6b, 0x21, 0xe0, 0xe4, 0xea, 0x73, + 0xb4, 0x16, 0x5f, 0x80, 0x9e, 0x84, 0x25, 0x61, 0xa4, 0x6b, 0x7a, 0xb8, 0xcb, 0x6c, 0x9d, 0x0e, + 0x6c, 0x91, 0x97, 0x16, 0xf4, 0x10, 0x2f, 0x7a, 0x0c, 0x50, 0xf7, 0x20, 0xb3, 0x98, 0x72, 0xcc, + 0x77, 0x75, 0x45, 0xec, 0x0d, 0xaf, 0x4e, 0x16, 0xfa, 0x2a, 0xb3, 0x37, 0x8c, 0x72, 0x09, 0xdf, + 0x92, 0x2b, 0x70, 0xb4, 0xc7, 0xb8, 0xd1, 0x33, 0x90, 0x35, 0x5c, 0xb7, 0x8d, 0xb5, 0x98, 0xee, + 0x24, 0x6a, 0x11, 0x96, 0x68, 0x79, 0x17, 0xbf, 0xfc, 0xa7, 0x49, 0x98, 0xc9, 0xd7, 0xeb, 0x96, + 0xe9, 0xf2, 0xa6, 0x3e, 0x00, 0x0b, 0x7b, 0x9e, 0x67, 0x6b, 0x74, 0x5b, 0xee, 0xea, 0x4d, 0xdd, + 0xac, 0x19, 0x66, 0x83, 0xb6, 0xd2, 0xd7, 0x68, 0x5c, 0xf4, 0x3c, 0x7b, 0xd3, 0xd2, 0xeb, 0xab, + 0x82, 0x45, 0x9d, 0xdf, 0xeb, 0x24, 0x21, 0x1b, 0x72, 0x7b, 0x96, 0x63, 0xdc, 0x26, 0xdc, 0x4d, + 0xcd, 0xb6, 0xea, 0x9a, 0xde, 0xf6, 0x2c, 0xb7, 0xa6, 0x37, 0x49, 0x1f, 0x09, 0xda, 0x47, 0x1f, + 0x9b, 0x7d, 0xd1, 0xe7, 0x2d, 0x5b, 0xf5, 0x7c, 0xc0, 0xa9, 0x66, 0xf7, 0x7a, 0x94, 0xa0, 0x9f, + 0x82, 0xc5, 0x6b, 0xed, 0x5d, 0xec, 0x98, 0xd8, 0xc3, 0xae, 0x56, 0xd7, 0xdd, 0xbd, 0x5d, 0x4b, + 0x77, 0xea, 0x5c, 0x45, 0x1e, 0xeb, 0xdd, 0xd7, 0x25, 0x9f, 0x6b, 0x4d, 0x30, 0xa9, 0x0b, 0xd7, + 0xba, 0x89, 0x48, 0x87, 0x23, 0x26, 0xf6, 0x88, 0xdd, 0xe1, 0xc7, 0xac, 0xd0, 0xc2, 0xe4, 0xa0, + 0x2e, 0x8a, 0x8c, 0xad, 0x4c, 0xb9, 0xb8, 0x06, 0x2e, 0x98, 0xdd, 0x44, 0x74, 0x11, 0x66, 0x0c, + 0xd7, 0x33, 0x2c, 0xd1, 0xf2, 0x38, 0x6d, 0xf9, 0x81, 0xde, 0x2d, 0x6f, 0x90, 0xda, 0xbc, 0xc5, + 0x69, 0x23, 0xf8, 0x40, 0x2a, 0x64, 0x6a, 0x4d, 0xab, 0x5d, 0xd7, 0x9c, 0xb6, 0x29, 0x5a, 0x9b, + 0xa4, 0xad, 0x9d, 0xed, 0xb7, 0x5b, 0xac, 0x76, 0x5d, 0x6d, 0x9b, 0xbc, 0xc1, 0x74, 0x2d, 0xf2, + 0x2d, 0xaf, 0xc0, 0x7c, 0xd7, 0xe2, 0x93, 0xbd, 0x5b, 0x37, 0x5c, 0x7d, 0xb7, 0x89, 0xeb, 0x5c, + 0x03, 0xfd, 0x6f, 0xf9, 0x29, 0xc8, 0xf6, 0x5a, 0xc9, 0xbe, 0x7c, 0xe7, 0x61, 0x21, 0x66, 0x55, + 0x06, 0xb1, 0xc4, 0x48, 0xb9, 0x2f, 0xcb, 0xcf, 0x25, 0x60, 0xb1, 0xec, 0x18, 0xd7, 0xc9, 0xee, + 0xe2, 0x9b, 0x9a, 0x31, 0x3d, 0x0e, 0x8b, 0xd8, 0x24, 0x75, 0x34, 0x9b, 0x15, 0x6b, 0xa6, 0x55, + 0xc7, 0x2e, 0x6f, 0x00, 0xb1, 0x32, 0xce, 0x49, 0x2c, 0xab, 0x8b, 0x9e, 0x82, 0xa3, 0x1d, 0x1c, + 0xd8, 0xac, 0xdb, 0x96, 0x61, 0x7a, 0x54, 0xd7, 0x53, 0xea, 0x91, 0x08, 0x93, 0xc2, 0x0b, 0xd1, + 0x13, 0xb0, 0xd4, 0xa2, 0x26, 0x52, 0x33, 0xec, 0xeb, 0x4f, 0x6a, 0x35, 0xa3, 0xee, 0x68, 0xbb, + 0x4d, 0xab, 0x76, 0x8d, 0xaa, 0xed, 0x94, 0xba, 0xc0, 0x4a, 0x37, 0xec, 0xeb, 0x4f, 0x16, 0x8c, + 0xba, 0xb3, 0x4a, 0x8a, 0xd0, 0xc3, 0x90, 0xe9, 0xea, 0x25, 0x49, 0xab, 0xcf, 0xd9, 0x1d, 0xed, + 0x3f, 0x04, 0x73, 0x76, 0x7b, 0xb7, 0x69, 0xd4, 0x82, 0x9a, 0x0c, 0xdd, 0xa4, 0x19, 0x59, 0x54, + 0x94, 0xbf, 0x2e, 0xc1, 0x74, 0x48, 0x97, 0xfa, 0xc9, 0x0d, 0xad, 0x43, 0x92, 0x80, 0x36, 0x3a, + 0xb3, 0xf4, 0x85, 0x27, 0x86, 0x52, 0x4e, 0xf6, 0x37, 0x39, 0x08, 0xb6, 0xac, 0x3a, 0x56, 0x69, + 0x03, 0xf2, 0x13, 0x30, 0x1b, 0x21, 0xa3, 0x59, 0x98, 0xca, 0x6f, 0x57, 0x2f, 0x6a, 0xc5, 0x52, + 0x51, 0xc9, 0xdc, 0x83, 0x16, 0x60, 0x8e, 0x7e, 0x6e, 0x6d, 0x57, 0xb7, 0xf3, 0x9b, 0x5a, 0x75, + 0xb3, 0x92, 0x91, 0xe4, 0x47, 0x21, 0x1d, 0x55, 0xd3, 0xbe, 0x6b, 0xfc, 0x9f, 0x12, 0x9c, 0x0a, + 0x0e, 0x21, 0xa2, 0x8b, 0xb8, 0xce, 0xf5, 0x44, 0xd8, 0xc1, 0x2c, 0x4c, 0xb2, 0xc5, 0x11, 0xdc, + 0xe2, 0x13, 0xd5, 0x60, 0x3a, 0x58, 0x11, 0x37, 0x9b, 0xa0, 0x80, 0x62, 0xb5, 0xf7, 0x7c, 0xfb, + 0x77, 0xb4, 0xec, 0xaf, 0xa0, 0x0a, 0x35, 0xf1, 0xa7, 0x9b, 0xdb, 0x82, 0xa9, 0x60, 0x69, 0xcf, + 0x50, 0x18, 0x6d, 0x37, 0xf5, 0x5b, 0x5a, 0xe8, 0xa0, 0x9c, 0xe6, 0xb4, 0x22, 0x39, 0x2b, 0xc9, + 0xd9, 0x11, 0xa8, 0x49, 0x82, 0x9f, 0x1d, 0xa2, 0x05, 0xf9, 0x41, 0x80, 0x4d, 0xdc, 0xd0, 0x6b, + 0xb7, 0xf2, 0xbb, 0x7a, 0xad, 0xf7, 0xdc, 0xe4, 0xdf, 0x96, 0x60, 0x36, 0xb2, 0x61, 0xd0, 0x26, + 0xa4, 0x6c, 0xc7, 0xba, 0x6e, 0xd4, 0xb1, 0x43, 0x2b, 0xa7, 0xfb, 0xe2, 0xdc, 0x30, 0xeb, 0x72, + 0x99, 0xf3, 0xa9, 0x7e, 0x0b, 0xe1, 0x9e, 0x13, 0xd1, 0x9e, 0x1f, 0x87, 0x54, 0x39, 0xa8, 0xb5, + 0x58, 0x56, 0x4b, 0x97, 0x37, 0xd6, 0x14, 0xb5, 0x03, 0x76, 0x00, 0x4c, 0x14, 0xf2, 0x9b, 0x1b, + 0x85, 0x52, 0x46, 0x92, 0xbf, 0x3c, 0x0e, 0x68, 0xa3, 0x9c, 0x6f, 0x12, 0x8c, 0x4c, 0xfc, 0x1f, + 0x3e, 0xe0, 0xfb, 0x21, 0xdd, 0x76, 0xb1, 0x66, 0xd8, 0x9a, 0xde, 0x34, 0x74, 0xd7, 0xdf, 0xa0, + 0x33, 0x6d, 0x17, 0x6f, 0xd8, 0x79, 0x46, 0x43, 0x8f, 0xc0, 0x7c, 0xcd, 0xc1, 0x64, 0xb3, 0xb8, + 0xed, 0x5d, 0x6e, 0x73, 0xf9, 0x90, 0x32, 0xac, 0xa0, 0xe2, 0xd3, 0xa9, 0x07, 0xe2, 0x7f, 0xb1, + 0x25, 0x18, 0xe3, 0x1e, 0x88, 0x4f, 0xa6, 0xab, 0xb0, 0x0c, 0xf3, 0x02, 0x26, 0xf8, 0x3b, 0x97, + 0x6d, 0xc2, 0xd5, 0x44, 0x56, 0x52, 0xe7, 0x78, 0xa1, 0xd8, 0xb8, 0xe8, 0x2c, 0xa4, 0x89, 0x0d, + 0x09, 0x55, 0x1e, 0xf7, 0x2b, 0xcf, 0x90, 0x12, 0xbf, 0xe6, 0xe3, 0x80, 0xb8, 0xb7, 0xe3, 0x86, + 0x6a, 0x4f, 0xf8, 0xb5, 0x33, 0xa2, 0xd4, 0xe7, 0x78, 0x11, 0x4e, 0x04, 0x7e, 0x65, 0xcd, 0x32, + 0xeb, 0xba, 0x73, 0x4b, 0x73, 0x74, 0xb3, 0x81, 0xd9, 0x0c, 0x26, 0xe9, 0x0c, 0x8e, 0xf1, 0x3a, + 0x15, 0x51, 0x45, 0x25, 0x35, 0xe8, 0x64, 0xf2, 0x70, 0xd2, 0xef, 0x32, 0xb6, 0x85, 0x14, 0x6d, + 0x21, 0x27, 0x2a, 0xc5, 0x34, 0xf1, 0x1e, 0x38, 0xda, 0x25, 0x0f, 0xae, 0xa2, 0x53, 0x11, 0xdc, + 0x14, 0x35, 0x65, 0x2b, 0xb0, 0x18, 0x15, 0x0b, 0xe7, 0x01, 0x86, 0x9c, 0xc2, 0x82, 0x61, 0x0c, + 0x4f, 0x43, 0xb6, 0x5b, 0x3a, 0x9c, 0x69, 0x9a, 0x32, 0x1d, 0xe9, 0x94, 0x0f, 0x63, 0x5c, 0x86, + 0x05, 0xbd, 0xd9, 0xb4, 0x6e, 0x68, 0x8e, 0xd5, 0xf6, 0xb0, 0x66, 0x5d, 0xc7, 0x4e, 0x53, 0xb7, + 0xa9, 0x53, 0x93, 0x52, 0xe7, 0x69, 0x91, 0x4a, 0x4a, 0x4a, 0xac, 0x00, 0x3d, 0x06, 0x0b, 0x9e, + 0xdd, 0xee, 0xea, 0x83, 0x39, 0x38, 0x19, 0xcf, 0x6e, 0x47, 0x9a, 0x97, 0x57, 0x60, 0x61, 0xd5, + 0x30, 0x75, 0xe7, 0x96, 0xd8, 0xfd, 0x54, 0x51, 0xfb, 0xec, 0xbf, 0x27, 0xe0, 0x68, 0xd9, 0xaa, + 0x57, 0x70, 0xad, 0xed, 0x18, 0xde, 0xad, 0xc8, 0x99, 0xd5, 0x9b, 0xe9, 0x9f, 0xb3, 0x30, 0xc9, + 0x8f, 0x2a, 0xe2, 0x22, 0x86, 0x4c, 0x04, 0xfd, 0x9b, 0xb8, 0x88, 0x75, 0xec, 0xd6, 0x1c, 0xc3, + 0x26, 0xbd, 0x73, 0xe3, 0x10, 0x26, 0xa1, 0x47, 0x01, 0x19, 0xa6, 0xe1, 0x19, 0x7a, 0x93, 0x9e, + 0x69, 0xdc, 0x07, 0x1d, 0xa3, 0x3e, 0x68, 0x86, 0x97, 0x30, 0x1f, 0x96, 0xb8, 0xa1, 0x0a, 0x4c, + 0xf3, 0x5a, 0x21, 0x9c, 0x73, 0xff, 0x30, 0xde, 0xaf, 0x0a, 0x66, 0x10, 0x3f, 0x50, 0x60, 0x9a, + 0x9f, 0x72, 0xf4, 0xdc, 0x18, 0x1f, 0xd4, 0x4c, 0x60, 0x47, 0x55, 0x68, 0x05, 0x1e, 0xc4, 0x43, + 0xc4, 0x79, 0x6e, 0x34, 0x0c, 0xb3, 0x21, 0xe2, 0x27, 0x6c, 0x5b, 0xa8, 0x69, 0x4e, 0xae, 0x30, + 0x2a, 0x41, 0xe3, 0x2d, 0xcb, 0x34, 0x3c, 0xcb, 0x09, 0xd7, 0x65, 0xdb, 0x60, 0x3e, 0x28, 0x11, + 0xd5, 0xb3, 0x30, 0x29, 0xec, 0x02, 0x53, 0x74, 0xf1, 0x89, 0xce, 0xc5, 0xed, 0x72, 0xa6, 0xcf, + 0x5d, 0x3b, 0xfc, 0x12, 0xcc, 0xea, 0x14, 0x5e, 0x0b, 0x69, 0x01, 0x9d, 0xe6, 0x83, 0x7d, 0xbc, + 0xef, 0x10, 0x1a, 0x57, 0x67, 0xf4, 0x30, 0x36, 0x3f, 0x05, 0x10, 0xb2, 0x56, 0x4c, 0xb1, 0x43, + 0x14, 0x94, 0x07, 0x2a, 0x5f, 0xcd, 0xb6, 0xac, 0xa6, 0x9b, 0x9d, 0xa1, 0x07, 0x93, 0xdc, 0x7f, + 0x5d, 0xca, 0x96, 0xd5, 0x54, 0xa7, 0x4c, 0xfe, 0x97, 0x8b, 0x4e, 0xc0, 0x94, 0xb0, 0xa7, 0x6e, + 0x76, 0x96, 0xc6, 0x19, 0x02, 0x42, 0x08, 0xd0, 0x84, 0x40, 0xb5, 0xde, 0xb4, 0xf7, 0x74, 0xea, + 0xb1, 0xfb, 0x80, 0x26, 0x80, 0x69, 0x79, 0x52, 0x88, 0x3e, 0x08, 0x73, 0x0e, 0x76, 0xad, 0xb6, + 0x53, 0xc3, 0x1a, 0x8f, 0x99, 0x30, 0x3f, 0xfc, 0x3d, 0xfd, 0x50, 0x27, 0x95, 0xe4, 0xb2, 0xca, + 0x19, 0xc3, 0x81, 0x93, 0xb4, 0x13, 0x21, 0x12, 0x6b, 0x4e, 0x9b, 0xd5, 0xae, 0x1a, 0x66, 0x03, + 0x3b, 0xb6, 0x43, 0x20, 0x4d, 0x86, 0x6d, 0x4a, 0x5a, 0xf0, 0x52, 0x40, 0x27, 0x7a, 0xd7, 0xa4, + 0x67, 0xa1, 0xa6, 0xef, 0xea, 0xb5, 0x2c, 0x1a, 0xa4, 0x77, 0xc1, 0xc1, 0xa9, 0x42, 0x33, 0x38, + 0x44, 0x8b, 0x90, 0x8e, 0xe2, 0xfe, 0xec, 0x02, 0x6d, 0xe9, 0xa1, 0x21, 0x8f, 0x47, 0x75, 0x36, + 0x02, 0xf5, 0xd1, 0x07, 0x61, 0x91, 0x9e, 0x59, 0x42, 0xda, 0xa2, 0xd5, 0x45, 0xda, 0xea, 0xa3, + 0x7d, 0xf0, 0x54, 0xd7, 0x19, 0xa8, 0x22, 0xc3, 0xee, 0x3a, 0x17, 0x3f, 0x22, 0xc1, 0x99, 0xd0, + 0x7e, 0x63, 0x58, 0x44, 0xe3, 0x63, 0xf0, 0xd5, 0x73, 0x89, 0xf6, 0xf6, 0xcc, 0xa8, 0x68, 0x46, + 0x3d, 0xd5, 0xea, 0x0f, 0xab, 0x76, 0x00, 0xb5, 0x74, 0xc3, 0xf4, 0xb0, 0xa9, 0x9b, 0x35, 0x2c, + 0xe6, 0x78, 0x74, 0x90, 0x77, 0xb9, 0x15, 0xf0, 0xf0, 0x29, 0xce, 0xb7, 0x3a, 0x49, 0xc4, 0xd7, + 0xdb, 0xa5, 0xd6, 0xd6, 0x9f, 0x20, 0x9d, 0x7f, 0x36, 0x3b, 0xc8, 0x11, 0x8b, 0xb1, 0xd1, 0xea, + 0xc2, 0x6e, 0x8c, 0xe1, 0x36, 0x21, 0x47, 0x9c, 0x56, 0x97, 0xdb, 0xe7, 0x0e, 0x87, 0xef, 0xd8, + 0xa0, 0xb0, 0x43, 0x0f, 0xd3, 0xae, 0x1e, 0xb5, 0x7b, 0xd8, 0xfc, 0x22, 0x4c, 0x87, 0x1d, 0xe4, + 0xdc, 0x20, 0x55, 0xe0, 0x7b, 0x26, 0xec, 0x1a, 0x87, 0x1b, 0x08, 0xeb, 0x2c, 0x1f, 0xf3, 0xf1, + 0x21, 0x75, 0x96, 0x8f, 0x54, 0xe8, 0x2c, 0x1f, 0xdf, 0x23, 0x20, 0x7c, 0x0b, 0x8d, 0x1b, 0xbe, + 0xec, 0x09, 0x62, 0x07, 0x28, 0x24, 0x49, 0xdb, 0x11, 0x2f, 0x0a, 0x3d, 0xdd, 0xd3, 0xab, 0x39, + 0xe9, 0xc3, 0x98, 0x58, 0xcf, 0xe6, 0x75, 0x38, 0x5e, 0xc7, 0x57, 0xf5, 0x76, 0xd3, 0xd3, 0x5a, + 0xfa, 0x4d, 0xcd, 0xb6, 0xea, 0x54, 0x5d, 0x5d, 0xcf, 0x21, 0x2a, 0x90, 0x3d, 0x35, 0x58, 0x79, + 0x6e, 0x96, 0xad, 0x3a, 0xd1, 0x40, 0xce, 0xa2, 0x66, 0x79, 0x7b, 0x5d, 0x25, 0xc8, 0x83, 0x13, + 0xbe, 0xa5, 0x6a, 0xbb, 0x7a, 0x03, 0x6b, 0xf8, 0xa6, 0x6d, 0x39, 0x9e, 0x90, 0xd7, 0x19, 0xda, + 0x59, 0x1f, 0xef, 0x46, 0x98, 0xab, 0x6d, 0xc2, 0xac, 0x50, 0x5e, 0x2e, 0xbb, 0x63, 0x4e, 0xaf, + 0x22, 0x54, 0x87, 0xa5, 0x0e, 0x39, 0x8a, 0xfe, 0x1e, 0xa0, 0xfd, 0x2d, 0xf7, 0xd1, 0xa9, 0x18, + 0x57, 0x55, 0x5d, 0xb4, 0xe3, 0x1c, 0xd8, 0xd7, 0x21, 0x7b, 0x1d, 0x3b, 0x9e, 0x51, 0x8b, 0x89, + 0xbd, 0x3c, 0x34, 0x28, 0xe0, 0x7a, 0x99, 0x73, 0x76, 0x44, 0x5e, 0x96, 0xae, 0xc7, 0xd2, 0xd1, + 0x71, 0x98, 0x72, 0x71, 0xf3, 0xaa, 0xd6, 0x34, 0xcc, 0x6b, 0x3c, 0x46, 0x96, 0x22, 0x84, 0x4d, + 0xc3, 0xbc, 0x86, 0x96, 0x20, 0x79, 0xdb, 0x32, 0x79, 0x24, 0x8c, 0xae, 0x3b, 0xfd, 0x26, 0x2e, + 0x9b, 0xef, 0x90, 0xb2, 0xf0, 0x97, 0xff, 0x4d, 0x8e, 0x1e, 0x01, 0x51, 0x84, 0x88, 0xae, 0x63, + 0xc7, 0x25, 0xfb, 0xbb, 0xc1, 0x10, 0x1e, 0x2f, 0xe6, 0x73, 0xbe, 0xcc, 0x0a, 0x69, 0xe4, 0xae, + 0xed, 0x38, 0xd8, 0x24, 0xca, 0x13, 0x61, 0xdb, 0xe3, 0x08, 0x94, 0x95, 0x32, 0xbb, 0x16, 0x70, + 0x09, 0x3a, 0x03, 0x44, 0x82, 0xc7, 0xf0, 0x47, 0x8c, 0x78, 0x39, 0x39, 0x44, 0x05, 0xd7, 0xbd, + 0x30, 0xcd, 0x9d, 0x0a, 0xcf, 0x68, 0xe1, 0xec, 0xeb, 0xec, 0x80, 0x66, 0xa4, 0xaa, 0xd1, 0xc2, + 0xe8, 0xfd, 0x30, 0xe1, 0x7a, 0xba, 0xd7, 0x76, 0xb3, 0xd7, 0xa8, 0x2b, 0x75, 0x76, 0xf0, 0xf1, + 0x57, 0xa1, 0xf5, 0x55, 0xce, 0x87, 0x1e, 0x80, 0x34, 0xfb, 0x4b, 0x6b, 0x61, 0x97, 0xe8, 0x51, + 0xb6, 0x49, 0x7b, 0x99, 0x65, 0xd4, 0x2d, 0x46, 0x24, 0x38, 0xb5, 0x03, 0x41, 0xbb, 0xc6, 0x6d, + 0x9c, 0x6d, 0x31, 0x44, 0x17, 0x06, 0xd0, 0x15, 0xe3, 0x36, 0x26, 0xf8, 0x2f, 0xc6, 0xbb, 0x30, + 0xd9, 0x01, 0xda, 0xe5, 0x59, 0x5c, 0x80, 0x05, 0xc3, 0x74, 0x3d, 0x6a, 0xc0, 0x1b, 0x8e, 0xd5, + 0xb6, 0xb5, 0xb6, 0xd3, 0x74, 0xb3, 0x16, 0x41, 0x0b, 0x54, 0x36, 0xf3, 0xa2, 0x78, 0x9d, 0x94, + 0x6e, 0x3b, 0x4d, 0x97, 0xf8, 0x2f, 0x11, 0x81, 0x32, 0x84, 0x69, 0x93, 0xf1, 0x30, 0xff, 0x25, + 0x24, 0x4e, 0x86, 0x32, 0xef, 0x85, 0x69, 0x7c, 0xd3, 0x36, 0x1c, 0x2e, 0xcc, 0x37, 0x98, 0x30, + 0x19, 0x89, 0x0a, 0x33, 0x07, 0x29, 0x71, 0xc4, 0x65, 0x1d, 0xa6, 0x2d, 0xe2, 0x9b, 0xb8, 0xc3, + 0x1c, 0xa8, 0x78, 0x76, 0x3b, 0xeb, 0x52, 0x6c, 0x32, 0xc5, 0x28, 0x55, 0xbb, 0xdd, 0x0b, 0xc6, + 0x7b, 0xf1, 0x30, 0x1e, 0x6d, 0x00, 0x10, 0xd7, 0xc6, 0x60, 0xa8, 0xe8, 0x3a, 0x45, 0x2e, 0x0f, + 0xf7, 0x5e, 0x3a, 0xb6, 0x64, 0x05, 0xc1, 0xa1, 0x86, 0x98, 0x73, 0x79, 0x58, 0x88, 0x01, 0x34, + 0xfb, 0xba, 0x16, 0xb8, 0x01, 0x13, 0xac, 0x07, 0xb4, 0x04, 0xa8, 0x52, 0xcd, 0x57, 0xb7, 0x2b, + 0x1d, 0x5e, 0x72, 0x06, 0x66, 0xa8, 0xff, 0x5c, 0xd9, 0x28, 0x15, 0x37, 0x8a, 0xeb, 0x19, 0x09, + 0x4d, 0xc3, 0xa4, 0xba, 0x5d, 0xa4, 0x1f, 0x09, 0x34, 0x07, 0xd3, 0xaa, 0x52, 0x28, 0x15, 0x0b, + 0x1b, 0x9b, 0x84, 0x30, 0x86, 0x66, 0x20, 0x55, 0xa9, 0x96, 0xca, 0x65, 0xf2, 0x95, 0x44, 0x53, + 0x30, 0xae, 0xa8, 0x6a, 0x49, 0xcd, 0x8c, 0x93, 0x82, 0x35, 0x65, 0x5d, 0xcd, 0xaf, 0x29, 0x6b, + 0x99, 0x09, 0xf9, 0x0f, 0x01, 0x66, 0xb9, 0x5a, 0x6e, 0xdb, 0x75, 0xdd, 0xc3, 0xe8, 0x71, 0x58, + 0xac, 0x63, 0xd7, 0x70, 0x08, 0x90, 0x08, 0x6f, 0x13, 0x16, 0x77, 0x42, 0xbc, 0x2c, 0xbc, 0x45, + 0x9e, 0x87, 0x9c, 0xe0, 0x88, 0x01, 0xe3, 0x2c, 0x0a, 0x95, 0xe5, 0x35, 0xb6, 0xba, 0x30, 0xf9, + 0x0e, 0x1c, 0x11, 0xdc, 0x51, 0x54, 0x3d, 0xb1, 0x2f, 0x54, 0xbd, 0xc0, 0x1b, 0x89, 0x04, 0xbe, + 0x57, 0x3a, 0xe6, 0x42, 0x40, 0xb4, 0x66, 0xd4, 0x85, 0x83, 0x10, 0x9a, 0x0b, 0x41, 0xca, 0x1b, + 0x75, 0xb2, 0x69, 0x04, 0x43, 0xe8, 0x9a, 0x90, 0xf9, 0x0a, 0x19, 0x5e, 0xb2, 0xe1, 0xdf, 0x16, + 0xda, 0x70, 0xb2, 0xbb, 0xf9, 0xb0, 0x05, 0x9e, 0x1a, 0x18, 0x2e, 0xe6, 0x5d, 0x87, 0xcd, 0x6f, + 0xae, 0x63, 0x58, 0x61, 0x13, 0xfc, 0x08, 0x88, 0x41, 0x6b, 0x01, 0xa4, 0x07, 0x0a, 0xe9, 0xc5, + 0xf0, 0x36, 0x7d, 0x64, 0xff, 0x19, 0x09, 0x1e, 0xf6, 0x17, 0x66, 0x20, 0x4a, 0x9c, 0x39, 0x20, + 0x4a, 0x7c, 0x40, 0xac, 0x70, 0x7f, 0xb0, 0x78, 0x07, 0x64, 0x31, 0xa6, 0x3e, 0xb0, 0x2b, 0x3d, + 0x2a, 0xec, 0x3a, 0xc5, 0x1b, 0xef, 0xe5, 0x71, 0x37, 0x09, 0xee, 0x60, 0xdd, 0x8b, 0x23, 0x27, + 0xbc, 0x60, 0x73, 0x23, 0xa0, 0xb1, 0x63, 0xbc, 0xc1, 0xee, 0x22, 0x64, 0xc1, 0x09, 0xd1, 0x5b, + 0x2c, 0x8a, 0xcd, 0x8c, 0x82, 0x62, 0x85, 0x7e, 0xc4, 0x45, 0x21, 0x9e, 0x82, 0xa3, 0x81, 0x7e, + 0x44, 0x1d, 0xe8, 0x05, 0x76, 0xa2, 0xfa, 0x5a, 0x12, 0xf1, 0xa3, 0x3f, 0x22, 0xc1, 0xfd, 0x82, + 0xb1, 0x2f, 0x56, 0x3a, 0x32, 0x3a, 0x56, 0x3a, 0xcd, 0x3b, 0xe8, 0x59, 0x03, 0xdd, 0x02, 0x51, + 0x47, 0xeb, 0x09, 0x6a, 0x96, 0x46, 0x04, 0x35, 0x62, 0xa7, 0xc6, 0x17, 0x13, 0x48, 0xd1, 0xb1, + 0x55, 0x84, 0xdd, 0xe3, 0x97, 0x81, 0x11, 0xed, 0xe6, 0x96, 0x4f, 0xfe, 0xef, 0x14, 0x4c, 0x95, + 0x6c, 0xec, 0x30, 0xe1, 0xc7, 0xc5, 0x69, 0x04, 0x2c, 0x4a, 0x74, 0xc0, 0xa2, 0x12, 0xa4, 0x2d, + 0xc1, 0xc8, 0x8c, 0xcc, 0xd8, 0x20, 0xf4, 0xe0, 0x77, 0xb4, 0x4c, 0x8c, 0x8f, 0x3a, 0xeb, 0xf3, + 0x53, 0x5b, 0xb4, 0xea, 0xc3, 0x90, 0xe4, 0xa0, 0x2b, 0xe2, 0xa0, 0xa1, 0x0e, 0x20, 0xb2, 0x04, + 0x13, 0x75, 0xec, 0xe9, 0x46, 0x93, 0x5b, 0x3c, 0xfe, 0x15, 0x03, 0x50, 0xc6, 0xe3, 0x00, 0x4a, + 0x04, 0x1f, 0x4e, 0x74, 0xe0, 0xc3, 0x7b, 0x61, 0xda, 0xd3, 0x9d, 0x06, 0xf6, 0x58, 0x31, 0xb3, + 0xc0, 0xc0, 0x48, 0xb4, 0x42, 0xf8, 0xe8, 0x9f, 0xea, 0x3e, 0xfa, 0x5d, 0x4f, 0x77, 0x3c, 0x06, + 0x1b, 0x58, 0xc8, 0x70, 0x8a, 0x52, 0x28, 0x6a, 0x38, 0x46, 0x31, 0x26, 0x2b, 0x64, 0x11, 0x94, + 0x49, 0x6c, 0xd6, 0x69, 0xd1, 0x3a, 0x0d, 0x75, 0x37, 0x1c, 0xec, 0xba, 0xdc, 0xc2, 0x3d, 0x32, + 0x84, 0x60, 0xca, 0x9c, 0x45, 0xf5, 0x99, 0xd1, 0x2b, 0x80, 0x42, 0x30, 0x5e, 0xe0, 0x86, 0xd9, + 0xfd, 0xe2, 0x06, 0x11, 0x65, 0xf2, 0x29, 0x2e, 0xda, 0x61, 0xb8, 0x8e, 0x1e, 0x1e, 0xa1, 0xa6, + 0xd3, 0xfb, 0x6d, 0x1a, 0x89, 0x56, 0x82, 0xb6, 0x65, 0x75, 0x20, 0xae, 0x98, 0x86, 0xc9, 0xb2, + 0x52, 0x5c, 0x8b, 0x81, 0x14, 0x29, 0x48, 0xae, 0x95, 0x8a, 0x0a, 0xc3, 0x12, 0xf9, 0xd5, 0x92, + 0x5a, 0xa5, 0x58, 0x42, 0xfe, 0xdf, 0x04, 0x24, 0xa9, 0xca, 0x2d, 0x42, 0xa6, 0xfa, 0x6a, 0x59, + 0xe9, 0x68, 0x10, 0x41, 0xba, 0xa0, 0x2a, 0xf9, 0xaa, 0xa2, 0x15, 0x36, 0xb7, 0x2b, 0x55, 0x45, + 0xcd, 0x48, 0x84, 0xb6, 0xa6, 0x6c, 0x2a, 0x21, 0x5a, 0x82, 0xd0, 0xb6, 0xcb, 0x14, 0x87, 0x68, + 0x5b, 0x79, 0x4a, 0x1b, 0x43, 0xf3, 0x30, 0x2b, 0x68, 0xc5, 0xd2, 0x9a, 0x52, 0xc9, 0x24, 0x49, + 0x35, 0x55, 0x29, 0xe7, 0x37, 0x54, 0x9f, 0x75, 0x9c, 0xb1, 0xae, 0x85, 0xbb, 0x98, 0x20, 0x83, + 0xe1, 0xdd, 0x12, 0x4e, 0xad, 0x5c, 0x2a, 0x6d, 0x66, 0x26, 0x09, 0x95, 0x77, 0x1c, 0x50, 0x53, + 0xe8, 0x04, 0x64, 0x2b, 0x4a, 0x35, 0x20, 0x69, 0x5b, 0xf9, 0x62, 0x7e, 0x5d, 0xd9, 0x52, 0x8a, + 0xd5, 0xcc, 0x14, 0x3a, 0x02, 0xf3, 0xf9, 0xed, 0x6a, 0x49, 0xe3, 0xdd, 0xb2, 0x81, 0x00, 0x11, + 0x20, 0x25, 0x47, 0x07, 0x38, 0x8d, 0xd2, 0x00, 0xa4, 0xb1, 0xcd, 0xfc, 0xaa, 0xb2, 0x59, 0xc9, + 0xcc, 0xa0, 0x05, 0x98, 0x23, 0xdf, 0x6c, 0x4e, 0x5a, 0x7e, 0xbb, 0x7a, 0x31, 0x33, 0x4b, 0xa5, + 0x1f, 0xe9, 0xb1, 0xb2, 0xb1, 0xa3, 0x64, 0xd2, 0x3e, 0x5d, 0xa9, 0x5e, 0x29, 0xa9, 0x97, 0xb4, + 0x72, 0x69, 0x73, 0xa3, 0xf0, 0x6a, 0x66, 0x0e, 0xe5, 0x60, 0x89, 0x35, 0xb2, 0x51, 0xac, 0x2a, + 0xc5, 0x7c, 0xb1, 0xa0, 0x88, 0xb2, 0x8c, 0xfc, 0xa9, 0x31, 0x98, 0xef, 0xd2, 0xd4, 0x58, 0xe3, + 0x13, 0xd8, 0x84, 0xc4, 0xc8, 0x36, 0x61, 0x13, 0x26, 0x5b, 0xd8, 0x73, 0x8c, 0x1a, 0xcb, 0xe3, + 0xea, 0x7b, 0x97, 0xdf, 0x35, 0xaa, 0xe5, 0x2d, 0xca, 0xaa, 0x8a, 0x26, 0x50, 0x81, 0x8e, 0xa8, + 0x81, 0x5d, 0x9e, 0xcc, 0xb5, 0xaf, 0xcd, 0xc8, 0x59, 0x73, 0x9f, 0x94, 0x60, 0x82, 0x35, 0x1c, + 0x3b, 0xeb, 0x93, 0x30, 0x65, 0x98, 0x9e, 0x16, 0x20, 0xed, 0xb1, 0x8b, 0xf7, 0xa8, 0x29, 0xc3, + 0xf4, 0x2e, 0xd3, 0x5c, 0x9a, 0xfb, 0x60, 0xa6, 0x6e, 0xb5, 0x89, 0x1b, 0xc1, 0x6a, 0x10, 0xbb, + 0x2b, 0x5d, 0xbc, 0x47, 0x9d, 0x66, 0x54, 0xbf, 0x92, 0xeb, 0x51, 0x18, 0xcb, 0x2a, 0x51, 0xf0, + 0x4b, 0x2a, 0x31, 0x2a, 0xad, 0xb4, 0x3a, 0xc9, 0xe1, 0xbc, 0xfc, 0x35, 0x09, 0x16, 0x0b, 0xd4, + 0x23, 0xe4, 0x18, 0x40, 0xc5, 0x6f, 0xb4, 0xb1, 0xeb, 0xa1, 0x33, 0x00, 0xb6, 0x63, 0xbd, 0x8e, + 0x6b, 0x1e, 0x41, 0x9d, 0x92, 0x7f, 0x06, 0x4c, 0x71, 0xea, 0x46, 0xbd, 0xe7, 0x01, 0xf1, 0x1c, + 0x4c, 0x8a, 0xf0, 0x0b, 0xcb, 0x6b, 0x38, 0x33, 0x10, 0x94, 0xa8, 0x82, 0x83, 0x18, 0x72, 0x5b, + 0x27, 0xae, 0x17, 0x37, 0xd4, 0xfc, 0x4b, 0xfe, 0x98, 0x04, 0xf3, 0xeb, 0xd8, 0xbb, 0x7b, 0xa3, + 0x3c, 0x03, 0xe0, 0x87, 0xcd, 0x59, 0x02, 0x06, 0x67, 0x15, 0x31, 0xf3, 0xba, 0xbf, 0x44, 0xe3, + 0xc1, 0x12, 0xc9, 0x7f, 0x21, 0xc1, 0x22, 0x73, 0x37, 0x0e, 0x75, 0x28, 0x2f, 0xc2, 0x44, 0x9b, + 0xf6, 0xca, 0xef, 0x37, 0x1e, 0x1a, 0x28, 0x52, 0x36, 0x48, 0x95, 0xb3, 0xc5, 0xce, 0xe5, 0xbf, + 0x24, 0x38, 0xc2, 0xaa, 0xf9, 0xb1, 0xf7, 0x43, 0x99, 0xcc, 0xfd, 0x30, 0x13, 0xf1, 0x69, 0x82, + 0x2b, 0x49, 0x30, 0x03, 0x87, 0xe6, 0x0c, 0xaf, 0x25, 0xd0, 0x0c, 0x1b, 0x39, 0xbd, 0xeb, 0x11, + 0xee, 0x5b, 0x34, 0x25, 0x72, 0xa2, 0x33, 0x25, 0x52, 0xcc, 0x39, 0x15, 0x9a, 0xf3, 0xc7, 0x13, + 0x70, 0xb2, 0x82, 0xbd, 0x38, 0xef, 0xe5, 0x5d, 0x34, 0xf7, 0x52, 0x34, 0xd2, 0x3a, 0x3e, 0x8a, + 0x33, 0x16, 0x09, 0xb5, 0x0a, 0x51, 0x4c, 0x84, 0x44, 0xf1, 0x1d, 0x09, 0xb2, 0x15, 0xec, 0x45, + 0xf1, 0xf4, 0x3e, 0xa4, 0x80, 0xc2, 0x52, 0x18, 0x5e, 0x02, 0x31, 0x37, 0x64, 0xc9, 0xd8, 0x1b, + 0xb2, 0x38, 0x95, 0xfd, 0x81, 0x04, 0xc7, 0x2b, 0xd8, 0xeb, 0xf2, 0xc5, 0x0f, 0x67, 0xf1, 0xe2, + 0xef, 0xec, 0x92, 0xbd, 0xee, 0xec, 0xe2, 0x84, 0xfe, 0x77, 0x12, 0x2c, 0x55, 0xb0, 0x17, 0x09, + 0x00, 0x1c, 0xca, 0xd8, 0xbb, 0xae, 0xfe, 0x92, 0x07, 0xb8, 0xfa, 0x8b, 0x9b, 0xd9, 0xdb, 0x12, + 0x2c, 0x50, 0x75, 0xe2, 0x4e, 0xfc, 0xe1, 0x4c, 0x2b, 0x72, 0x43, 0x98, 0xec, 0xbc, 0x21, 0x8c, + 0x1b, 0xe7, 0xef, 0x4a, 0xb0, 0xc0, 0xac, 0x1e, 0xf3, 0x88, 0x0e, 0x67, 0x9c, 0x0f, 0x40, 0xba, + 0xc3, 0x3b, 0x63, 0x6a, 0x33, 0xdb, 0x8a, 0x44, 0x7a, 0xc5, 0x80, 0x27, 0x43, 0x03, 0xfe, 0x97, + 0x04, 0x2c, 0x12, 0x9d, 0x0f, 0x2e, 0x9c, 0x0f, 0x65, 0xc4, 0x5b, 0x30, 0xa1, 0xd7, 0x3c, 0x31, + 0xd2, 0x74, 0xbf, 0xcb, 0xd1, 0xb8, 0xd1, 0x2d, 0xe7, 0x29, 0xb3, 0xca, 0x1b, 0x41, 0xcf, 0xfb, + 0x27, 0xd8, 0x7e, 0xae, 0xd6, 0x3b, 0x8f, 0xaf, 0xb0, 0x5c, 0xca, 0x30, 0xc1, 0xfa, 0x20, 0xe0, + 0x7f, 0xbb, 0x78, 0xa9, 0x58, 0xba, 0x52, 0x64, 0xe1, 0x46, 0x02, 0x40, 0xcb, 0xf9, 0x4a, 0xe5, + 0x4a, 0x49, 0x5d, 0xcb, 0x48, 0x04, 0x16, 0xaf, 0x2b, 0x45, 0x45, 0x25, 0x10, 0xdb, 0x27, 0x27, + 0x44, 0xc5, 0xed, 0x8a, 0xa2, 0x16, 0xf3, 0x5b, 0x4a, 0x66, 0x4c, 0xfe, 0xb8, 0x04, 0x8b, 0x6b, + 0xb8, 0x89, 0x0f, 0xf9, 0x70, 0x17, 0x93, 0x4b, 0x86, 0x26, 0xb7, 0x07, 0x0b, 0x9b, 0x86, 0x2b, + 0xf0, 0xce, 0xdd, 0xd8, 0x4c, 0x01, 0xb2, 0x4a, 0x46, 0x90, 0xd5, 0x6d, 0x58, 0x8c, 0xf6, 0xe4, + 0xda, 0x96, 0xe9, 0x62, 0xf4, 0x02, 0xa4, 0xf8, 0x10, 0xdd, 0xac, 0x44, 0x21, 0xef, 0x10, 0x38, + 0xce, 0x67, 0x41, 0xf7, 0xc1, 0x6c, 0xcb, 0x70, 0x5d, 0x62, 0x28, 0x49, 0xf7, 0x2c, 0x33, 0x6d, + 0x4a, 0x9d, 0xe1, 0xc4, 0x1d, 0x42, 0x93, 0x7f, 0x41, 0x82, 0x85, 0x75, 0xec, 0xf9, 0x80, 0xf9, + 0x2e, 0x4c, 0xf3, 0x01, 0x98, 0x09, 0xc2, 0x13, 0x11, 0x89, 0x4f, 0xfb, 0xf4, 0x1e, 0xd8, 0xee, + 0x75, 0x38, 0x42, 0x24, 0xe1, 0x8f, 0xe6, 0x9d, 0x94, 0xfa, 0x27, 0x25, 0x58, 0x2a, 0xe8, 0x66, + 0x0d, 0x37, 0x7f, 0x82, 0x93, 0x0f, 0x2b, 0xdc, 0x87, 0x25, 0x58, 0xea, 0x9c, 0x3d, 0xd7, 0x84, + 0x02, 0x80, 0xcf, 0x2d, 0x74, 0xe1, 0xbe, 0x21, 0xdc, 0x1f, 0x35, 0xc4, 0x36, 0x9c, 0x3e, 0x34, + 0x60, 0x69, 0x1d, 0x7b, 0xe4, 0xfc, 0xf4, 0x2f, 0x0f, 0x0f, 0x2e, 0x94, 0xb8, 0xd9, 0x7e, 0x34, + 0x01, 0x33, 0xe1, 0x6e, 0x58, 0xf8, 0x91, 0xdd, 0xea, 0x76, 0x5e, 0xe8, 0x49, 0x22, 0xfc, 0x48, + 0x8b, 0x3b, 0x2e, 0xf4, 0x96, 0x61, 0xe1, 0xba, 0xde, 0x34, 0xa2, 0x37, 0x0e, 0xe2, 0xe1, 0xd0, + 0x3c, 0x2d, 0x0a, 0x5d, 0x38, 0xb8, 0x2c, 0x4c, 0xcf, 0xfa, 0x09, 0x41, 0xd7, 0xa4, 0x08, 0xd3, + 0xd3, 0x92, 0x20, 0x4c, 0x7f, 0x0e, 0x58, 0x13, 0xa1, 0xba, 0x6e, 0x76, 0x9c, 0xb6, 0x3d, 0x47, + 0x0b, 0xfc, 0xaa, 0x2e, 0xba, 0x00, 0x47, 0x58, 0xdd, 0xe8, 0x39, 0xc3, 0xde, 0x03, 0x4d, 0xa9, + 0x6c, 0x98, 0x91, 0x20, 0xa0, 0x2b, 0xff, 0x95, 0x04, 0x47, 0x98, 0xfb, 0x77, 0xb8, 0x1e, 0xc0, + 0x8b, 0x30, 0xe5, 0xa3, 0x60, 0x0e, 0x44, 0x86, 0xc9, 0x0c, 0x4a, 0x09, 0x88, 0x1c, 0xda, 0x56, + 0x13, 0x91, 0x6d, 0xf5, 0x6d, 0x09, 0x8e, 0x30, 0x0b, 0xfe, 0x6e, 0x74, 0x69, 0xe2, 0xe0, 0xc8, + 0x2f, 0x4a, 0xcc, 0xfe, 0x8a, 0xf1, 0x1e, 0x12, 0x6e, 0xea, 0xe5, 0x67, 0xff, 0x8e, 0x04, 0x68, + 0x3d, 0xf0, 0x8f, 0xde, 0xed, 0xd2, 0xfb, 0xca, 0x04, 0xa4, 0xc4, 0x58, 0x63, 0x43, 0x2a, 0xcf, + 0xc3, 0x04, 0xc7, 0xbb, 0x89, 0x7d, 0x24, 0x06, 0x72, 0x9e, 0x7d, 0x66, 0x22, 0xf6, 0xcd, 0x32, + 0xc8, 0xc2, 0xa4, 0x30, 0x28, 0xec, 0xc9, 0x8d, 0xf8, 0x24, 0x26, 0x24, 0xee, 0x02, 0xfb, 0x2a, + 0x33, 0x21, 0xdd, 0x97, 0xd7, 0x79, 0x3f, 0x36, 0xd6, 0xa0, 0xc0, 0xec, 0xe1, 0xc1, 0x3b, 0x67, + 0xf0, 0xbd, 0xfd, 0x5e, 0x5c, 0x58, 0xbc, 0xc3, 0x0d, 0x4d, 0x1e, 0xd8, 0x0d, 0xbd, 0x08, 0xd0, + 0xd2, 0x4d, 0xbd, 0x81, 0x5b, 0x42, 0xf3, 0xfa, 0x3e, 0xf5, 0x20, 0xed, 0x6d, 0xf9, 0xf5, 0xd5, + 0x10, 0x2f, 0xfa, 0x00, 0x2c, 0xc4, 0x65, 0xdf, 0x4c, 0xec, 0x3f, 0xfb, 0x66, 0xbe, 0xd5, 0x95, + 0x76, 0x13, 0xbd, 0x61, 0x37, 0x0e, 0x70, 0xc3, 0x2e, 0xbf, 0x25, 0x1d, 0xf4, 0x7e, 0x7c, 0x09, + 0x10, 0xff, 0xd0, 0xae, 0x6c, 0x54, 0x2f, 0x6a, 0xec, 0x36, 0x7c, 0xac, 0xf3, 0xde, 0x3c, 0x19, + 0xb9, 0x37, 0x1f, 0x0f, 0xee, 0xcd, 0x27, 0xe4, 0x6f, 0x49, 0x90, 0x8e, 0x8a, 0x12, 0x9d, 0x81, + 0x19, 0xb2, 0x2e, 0x5a, 0xdb, 0x6e, 0x38, 0x7a, 0x5d, 0x3c, 0xcb, 0xa2, 0x6b, 0xb5, 0xcd, 0x48, + 0xe8, 0x5e, 0xb6, 0xf8, 0x9a, 0x83, 0x6d, 0xdd, 0x70, 0x78, 0x36, 0x3a, 0x10, 0x92, 0x4a, 0x29, + 0x68, 0x1b, 0xe6, 0x38, 0xbb, 0x66, 0xd9, 0xe2, 0x3e, 0x77, 0xc0, 0x25, 0x64, 0x3e, 0xe8, 0xa0, + 0xc4, 0x78, 0xd4, 0x74, 0x3b, 0xf2, 0x2d, 0xb7, 0x00, 0x75, 0xd7, 0x42, 0xef, 0x81, 0xa3, 0xe1, + 0x01, 0x6b, 0xa1, 0x4b, 0x15, 0xb6, 0xdd, 0x17, 0x43, 0x63, 0xaf, 0xf8, 0xf7, 0x2b, 0x03, 0x93, + 0x8d, 0xe5, 0x57, 0x60, 0xbe, 0x2b, 0x9d, 0x0f, 0x15, 0x60, 0xe2, 0x86, 0x61, 0xd6, 0xad, 0x1b, + 0x83, 0x5f, 0x9a, 0x85, 0x98, 0xaf, 0x50, 0x16, 0x95, 0xb3, 0x12, 0x70, 0x3b, 0xdf, 0x55, 0x8a, + 0x9a, 0x90, 0xad, 0xeb, 0x46, 0xf3, 0x96, 0x16, 0x4e, 0x3c, 0xe4, 0x9d, 0x25, 0x06, 0xdd, 0x10, + 0xae, 0x11, 0xce, 0xae, 0x36, 0x2f, 0xde, 0xa3, 0x2e, 0xd5, 0x63, 0x4b, 0x56, 0x53, 0x30, 0xc1, + 0xae, 0xa7, 0xe5, 0x0a, 0x2c, 0xc5, 0x73, 0x77, 0x5c, 0x51, 0x25, 0x3a, 0xaf, 0xa8, 0x72, 0x90, + 0xaa, 0xb7, 0x19, 0x8a, 0xe3, 0xef, 0x0c, 0xfc, 0x6f, 0xf9, 0xa7, 0x13, 0x70, 0x22, 0x14, 0x4d, + 0x0b, 0xed, 0xd5, 0x77, 0xd1, 0xb9, 0x71, 0xf7, 0x8c, 0x4e, 0x9c, 0x17, 0xfa, 0xd7, 0x2c, 0xa0, + 0x23, 0x44, 0x50, 0x31, 0x6e, 0xe3, 0x77, 0xd3, 0xe4, 0x4f, 0xf2, 0x24, 0x6c, 0x76, 0x72, 0x8d, + 0xd3, 0x93, 0x6b, 0xca, 0xf4, 0x8f, 0xac, 0xb8, 0x19, 0xfd, 0xbe, 0x04, 0xa7, 0x54, 0xab, 0xd9, + 0xdc, 0xd5, 0x6b, 0xd7, 0xc4, 0xb4, 0xf8, 0xc6, 0x7a, 0xb7, 0xc3, 0x81, 0x1d, 0xe6, 0xc1, 0x85, + 0xb0, 0x14, 0x77, 0x61, 0xa2, 0xb9, 0xe8, 0xd2, 0x08, 0xb9, 0xe8, 0xf2, 0x37, 0x24, 0x40, 0x31, + 0x89, 0x12, 0xef, 0x87, 0x13, 0x3c, 0xb7, 0x8b, 0x76, 0x40, 0xac, 0x10, 0x7d, 0x5f, 0x44, 0x8e, + 0x76, 0xf1, 0x54, 0x35, 0xa5, 0xe6, 0x58, 0x1d, 0xd2, 0x6e, 0xbe, 0xa3, 0x06, 0x2a, 0x87, 0xd3, + 0xd1, 0x8d, 0x96, 0xe1, 0x89, 0x57, 0x5c, 0x0f, 0x0d, 0xce, 0x55, 0xd8, 0x24, 0xf5, 0x43, 0x09, + 0xe8, 0x94, 0x5d, 0xde, 0x83, 0xd9, 0x48, 0x05, 0xe2, 0x7c, 0xf9, 0x5d, 0x84, 0x7e, 0x1d, 0x61, + 0x46, 0x10, 0xa9, 0xb3, 0x91, 0x85, 0xc9, 0x96, 0x61, 0x1a, 0xad, 0x76, 0x8b, 0xdd, 0x3d, 0xa9, + 0xe2, 0x93, 0x96, 0xe8, 0x37, 0x69, 0xc9, 0x18, 0x2f, 0x61, 0x9f, 0x34, 0xe8, 0x17, 0x97, 0xed, + 0xd3, 0xfb, 0xbd, 0xda, 0xfd, 0x90, 0x6e, 0x19, 0x66, 0x18, 0x4e, 0xb1, 0x9f, 0x61, 0x98, 0x69, + 0x19, 0x66, 0x00, 0xa5, 0x48, 0x2d, 0xfd, 0x66, 0x37, 0xe8, 0x9a, 0x69, 0xe9, 0x37, 0x83, 0x5a, + 0x67, 0x61, 0x2e, 0x22, 0x6f, 0xcc, 0x74, 0x25, 0xa5, 0x76, 0x92, 0xe5, 0x1f, 0x25, 0x20, 0x53, + 0xc1, 0x1e, 0x4b, 0x72, 0x3b, 0x1c, 0x2d, 0x6e, 0x74, 0xbf, 0x32, 0x60, 0x37, 0x87, 0xef, 0xeb, + 0x1b, 0x48, 0x8b, 0x0c, 0x71, 0xf4, 0xe7, 0x06, 0xe3, 0x3d, 0x9e, 0x1b, 0xc4, 0x6c, 0xf8, 0xbb, + 0x91, 0x05, 0xf8, 0x15, 0x89, 0xc6, 0x28, 0x43, 0x8f, 0x13, 0x0e, 0x45, 0xc6, 0x21, 0x35, 0x4b, + 0x46, 0xd5, 0x2c, 0xce, 0x3a, 0xfc, 0x01, 0x31, 0xd5, 0xe4, 0x5c, 0xdb, 0x28, 0xab, 0xfc, 0x57, + 0x4a, 0x0e, 0x37, 0xc0, 0x17, 0x1a, 0x0c, 0x7a, 0x0c, 0x90, 0x43, 0x06, 0x81, 0xb5, 0x9a, 0x83, + 0xeb, 0xd8, 0x24, 0xae, 0x84, 0x4b, 0x97, 0x25, 0xa5, 0xce, 0xb3, 0x92, 0x42, 0x50, 0x20, 0x7f, + 0x42, 0x82, 0x63, 0x05, 0xab, 0x65, 0x13, 0xd7, 0xf6, 0x27, 0x35, 0xfc, 0xf0, 0x21, 0x71, 0x0d, + 0xe6, 0xbb, 0x7e, 0x98, 0x83, 0x68, 0x62, 0xe8, 0xa7, 0x39, 0xf8, 0xc6, 0x95, 0xa8, 0xc5, 0xc8, + 0xe8, 0xe1, 0xda, 0x64, 0xf3, 0x3e, 0x0c, 0x61, 0x1a, 0x33, 0x4b, 0x4c, 0xaf, 0xe6, 0x42, 0x74, + 0x62, 0x99, 0xe4, 0xef, 0x4a, 0xb0, 0x14, 0xff, 0x13, 0x1b, 0x68, 0x17, 0x66, 0xa9, 0x91, 0xf0, + 0x7f, 0x6a, 0x85, 0xbd, 0x0a, 0x7d, 0x61, 0xbf, 0xbf, 0xd5, 0xc1, 0x8e, 0x7d, 0x4e, 0x62, 0xaf, + 0x1d, 0xc5, 0x97, 0xfc, 0x34, 0xcc, 0x84, 0x4b, 0x09, 0xf2, 0xee, 0x7a, 0x07, 0x5a, 0x51, 0x0a, + 0xdb, 0xaa, 0x92, 0x91, 0xc8, 0xdf, 0xca, 0x2b, 0xe5, 0x52, 0x45, 0xc9, 0x24, 0xe4, 0xbf, 0x97, + 0xe0, 0x28, 0xc1, 0x06, 0x91, 0x87, 0x36, 0x87, 0xb2, 0x64, 0xdd, 0xcf, 0x81, 0x92, 0x07, 0x7a, + 0x0e, 0x14, 0xb7, 0x9d, 0xfe, 0x91, 0xdf, 0xc5, 0x75, 0x3d, 0x86, 0xe1, 0x33, 0x3c, 0xd9, 0x3d, + 0xc3, 0x41, 0xd7, 0x87, 0x27, 0xbb, 0x67, 0x16, 0x9e, 0x55, 0xfc, 0x73, 0x9d, 0xe4, 0x5d, 0x79, + 0xae, 0x13, 0x17, 0x10, 0x5e, 0x66, 0xa1, 0x99, 0xae, 0x2b, 0xad, 0x20, 0x78, 0x22, 0x45, 0x82, + 0x27, 0x6f, 0x49, 0x0c, 0x7f, 0x84, 0x18, 0x38, 0xfe, 0x78, 0x7f, 0xf8, 0x9a, 0x6a, 0x20, 0xfc, + 0x10, 0xfc, 0xe1, 0xab, 0xac, 0x07, 0x61, 0xce, 0xc4, 0x37, 0x3d, 0xcd, 0xa6, 0x81, 0x40, 0xeb, + 0x1a, 0x16, 0xde, 0xcc, 0x2c, 0x21, 0x97, 0xf5, 0x06, 0xae, 0x12, 0xa2, 0xfc, 0x43, 0x09, 0x52, + 0x82, 0x1f, 0x15, 0x20, 0xe9, 0x1f, 0xf7, 0xe9, 0x0b, 0x2b, 0x83, 0x7b, 0xf4, 0xff, 0xa0, 0x89, + 0x7a, 0x94, 0xd9, 0x97, 0x4c, 0x22, 0xfa, 0x88, 0xd3, 0xc1, 0x35, 0xab, 0xd5, 0xc2, 0x66, 0x1d, + 0xb3, 0x95, 0x4a, 0xa9, 0x61, 0x92, 0x5c, 0x80, 0x99, 0x70, 0x5b, 0xe8, 0x24, 0x1c, 0xdb, 0x2c, + 0x15, 0xf2, 0xd5, 0x8d, 0x52, 0x51, 0x8b, 0xc9, 0xbd, 0x4a, 0x41, 0x72, 0xa7, 0x54, 0xe4, 0x1b, + 0x48, 0x55, 0xd6, 0x37, 0x4a, 0xc5, 0x4c, 0x42, 0xfe, 0xb1, 0x04, 0x73, 0x1d, 0xde, 0x35, 0x5a, + 0x85, 0x64, 0xcd, 0xaa, 0x8b, 0x39, 0x2d, 0x0f, 0xed, 0x96, 0x2f, 0x17, 0xe8, 0xa3, 0x7e, 0xc2, + 0x4b, 0x01, 0x0d, 0x0f, 0x7c, 0xb0, 0x59, 0x89, 0x4f, 0xd9, 0x85, 0x24, 0xa9, 0xd7, 0x75, 0xa5, + 0xb4, 0x5e, 0x50, 0xb4, 0x4a, 0xb5, 0x54, 0xb8, 0x54, 0xda, 0xae, 0x66, 0x24, 0x74, 0x2f, 0x1c, + 0x5f, 0xbf, 0xa4, 0x68, 0x15, 0x45, 0xbd, 0xbc, 0x51, 0x50, 0xb4, 0x7c, 0xa1, 0x50, 0xda, 0x2e, + 0x56, 0x35, 0x96, 0xb1, 0xb5, 0xc6, 0xbc, 0x76, 0xc2, 0xf2, 0xf2, 0x76, 0xa9, 0x9a, 0xd7, 0x94, + 0x57, 0x0a, 0x8a, 0xb2, 0xa6, 0xac, 0x65, 0xc6, 0x44, 0x8e, 0xd5, 0xea, 0xab, 0x5a, 0xa9, 0xac, + 0xa8, 0xf9, 0x6a, 0x49, 0xcd, 0x24, 0xe5, 0x0d, 0xff, 0x99, 0x7b, 0xf0, 0xba, 0x56, 0xbc, 0xab, + 0x94, 0xa2, 0xaf, 0x3d, 0xa3, 0x8f, 0x2e, 0x13, 0x9d, 0x8f, 0x2e, 0xe5, 0x9f, 0x97, 0xe0, 0x04, + 0x51, 0xc1, 0x6d, 0xfa, 0xdb, 0x02, 0xc1, 0xab, 0xf1, 0x41, 0xba, 0x4b, 0xe8, 0x57, 0x8d, 0xa6, + 0x87, 0x1d, 0xde, 0x28, 0xff, 0x42, 0xc7, 0x61, 0x8a, 0xaa, 0x1c, 0x7d, 0xb1, 0xc1, 0x40, 0x58, + 0x8a, 0x10, 0xe8, 0x4b, 0x0d, 0xb2, 0xc5, 0x03, 0x7d, 0x4c, 0xf2, 0x2d, 0xee, 0xeb, 0xe2, 0xe7, + 0x25, 0x38, 0xd9, 0x63, 0x30, 0x7c, 0x5f, 0x6c, 0xc2, 0x74, 0x30, 0x78, 0xb1, 0x33, 0xfa, 0x24, + 0x7b, 0x75, 0xb6, 0xa4, 0x86, 0xd9, 0x87, 0xde, 0x23, 0x6f, 0x27, 0xe0, 0x54, 0x67, 0x4b, 0xd1, + 0xf7, 0xe2, 0x64, 0x66, 0xa1, 0xb7, 0xe5, 0xdc, 0x78, 0x39, 0xfe, 0x53, 0x72, 0x19, 0x66, 0x0d, + 0x9b, 0x3d, 0xd6, 0xa0, 0x44, 0x11, 0x59, 0x30, 0xec, 0x82, 0x51, 0x77, 0x58, 0x13, 0x57, 0xfc, + 0x38, 0x1d, 0x4b, 0x90, 0x7d, 0x71, 0xf8, 0x69, 0x45, 0x07, 0xd3, 0x11, 0xbd, 0x93, 0x1b, 0x7e, + 0x48, 0x29, 0xa2, 0xa5, 0x00, 0x13, 0xdb, 0xc5, 0xed, 0x8a, 0xb2, 0xc6, 0xd2, 0x16, 0x37, 0x8a, + 0xda, 0x76, 0xc5, 0x57, 0xd1, 0x4c, 0x02, 0x65, 0x61, 0x51, 0xd0, 0x2e, 0xe6, 0x55, 0x25, 0xbf, + 0xba, 0xa9, 0x68, 0xe5, 0x12, 0x51, 0xca, 0x25, 0x40, 0xbc, 0x84, 0xa5, 0x13, 0xae, 0x51, 0x7a, + 0x52, 0x7e, 0x2b, 0x01, 0x99, 0xce, 0xa1, 0x75, 0x68, 0xa0, 0xd4, 0xf5, 0xec, 0x37, 0xa4, 0xbb, + 0x89, 0xa8, 0xee, 0x76, 0x09, 0x6d, 0xac, 0x5b, 0x68, 0x7b, 0xb0, 0x10, 0xbc, 0xee, 0x37, 0x6c, + 0x56, 0x51, 0x20, 0xe7, 0x67, 0x46, 0x95, 0xa0, 0x3a, 0xef, 0x37, 0xba, 0x61, 0x53, 0x8a, 0x3b, + 0x64, 0x6a, 0xb0, 0x7c, 0x01, 0x96, 0x7a, 0x24, 0x5e, 0xf7, 0x7e, 0x02, 0xff, 0x3e, 0x98, 0xef, + 0x7e, 0xcb, 0xf7, 0x30, 0xcc, 0xfb, 0x11, 0x4b, 0x1b, 0x3b, 0xd4, 0xb7, 0xe1, 0xe0, 0x28, 0xcd, + 0x43, 0x90, 0x65, 0xec, 0x10, 0xa0, 0x21, 0x7f, 0x26, 0x01, 0xc7, 0x7a, 0xe7, 0x9a, 0xdf, 0x86, + 0xc5, 0x5d, 0xa3, 0xf1, 0x46, 0x1b, 0x3b, 0xb7, 0xb4, 0x3a, 0x76, 0x3d, 0xc3, 0x64, 0xc1, 0x19, + 0x16, 0xaa, 0x5a, 0x1f, 0x21, 0xc1, 0x7d, 0x79, 0xd5, 0x68, 0xbc, 0x4c, 0xda, 0x5b, 0x0b, 0x9a, + 0x53, 0x17, 0x44, 0x27, 0x21, 0x22, 0x2a, 0xc0, 0x29, 0xe1, 0xed, 0x72, 0x78, 0x81, 0x69, 0x86, + 0x23, 0xc1, 0x5f, 0xd8, 0x11, 0x3f, 0x9b, 0x94, 0x52, 0x8f, 0x73, 0x7f, 0x97, 0x55, 0x52, 0x68, + 0x9d, 0x2d, 0x5e, 0x25, 0xf7, 0x24, 0x2c, 0xc4, 0x74, 0x48, 0xb6, 0x1c, 0x81, 0x57, 0x2e, 0x0e, + 0xe3, 0x05, 0x4e, 0xd9, 0xa8, 0x5f, 0xf8, 0x74, 0x1e, 0xd2, 0xdc, 0xff, 0x66, 0xe1, 0x19, 0x07, + 0xfd, 0x93, 0x04, 0x33, 0xe1, 0xbb, 0x6b, 0xd4, 0x27, 0x36, 0x1d, 0x73, 0x9b, 0x9e, 0x5b, 0x1e, + 0xb6, 0x3a, 0xb3, 0x56, 0xf2, 0x1b, 0x1f, 0xfe, 0xdb, 0x1f, 0x7f, 0x36, 0x71, 0x0d, 0x9d, 0xf7, + 0x7f, 0x2b, 0xf1, 0x4d, 0x66, 0x3d, 0x5f, 0xe0, 0xb0, 0xc6, 0x5d, 0x39, 0xb7, 0xe2, 0x1f, 0xd9, + 0x2b, 0xe7, 0xee, 0x88, 0xdf, 0x52, 0x74, 0x77, 0x9e, 0x42, 0x4f, 0xfa, 0x4c, 0x7e, 0xe5, 0x37, + 0x03, 0x7c, 0x74, 0x67, 0x85, 0xde, 0x8a, 0xae, 0xbc, 0x49, 0xfe, 0x09, 0xf8, 0xd0, 0x3f, 0x48, + 0x00, 0x41, 0xde, 0x23, 0xea, 0x83, 0x72, 0xba, 0xb2, 0x23, 0x73, 0x83, 0xef, 0xeb, 0xe5, 0x0f, + 0xd1, 0x19, 0xdd, 0x0c, 0xcf, 0x88, 0x18, 0xbc, 0x1e, 0xf3, 0xf1, 0x87, 0xb5, 0x72, 0xee, 0xce, + 0x4e, 0x01, 0xe5, 0x47, 0x99, 0xd1, 0xca, 0x9b, 0x01, 0xc2, 0xbb, 0x83, 0x7e, 0x24, 0xc1, 0x6c, + 0x24, 0xff, 0x14, 0xf5, 0x59, 0x93, 0xb8, 0x44, 0xd5, 0xdc, 0x30, 0x17, 0xd1, 0xf2, 0x0d, 0x3a, + 0xcd, 0x37, 0xe4, 0xfd, 0x2f, 0xdc, 0xb3, 0xd2, 0xb9, 0x9d, 0xf7, 0xca, 0x23, 0xad, 0xdd, 0xb3, + 0xd2, 0x39, 0xf4, 0xef, 0x12, 0xcc, 0x46, 0xd2, 0x45, 0xfb, 0xcd, 0x2f, 0x2e, 0xaf, 0x74, 0xb8, + 0xf9, 0x7d, 0x54, 0xa2, 0x13, 0xfc, 0x50, 0x6e, 0xff, 0xeb, 0x48, 0x26, 0xf8, 0x52, 0xee, 0xe0, + 0x4b, 0x49, 0x66, 0xfb, 0x85, 0x04, 0xa4, 0xa3, 0x09, 0xa5, 0x68, 0x65, 0xd0, 0x74, 0x3b, 0x6e, + 0x1a, 0x87, 0x9b, 0xef, 0x77, 0xd8, 0x7c, 0xbf, 0x29, 0xe5, 0x5e, 0xd8, 0xef, 0x84, 0x57, 0xfc, + 0x88, 0x1e, 0x9f, 0xfc, 0x55, 0x59, 0x3f, 0xf0, 0xe4, 0x43, 0x8d, 0xbe, 0x19, 0x0e, 0x5d, 0xde, + 0x59, 0x61, 0xf9, 0x4b, 0x44, 0x38, 0x3f, 0x48, 0x44, 0x02, 0xc5, 0xe1, 0x93, 0xe4, 0xe9, 0xbe, + 0x11, 0xa1, 0xde, 0xb9, 0xaa, 0xc3, 0x09, 0xeb, 0xcf, 0x98, 0xb0, 0x7e, 0x20, 0xc9, 0x9b, 0x07, + 0x13, 0x96, 0x8b, 0xbd, 0xd0, 0x18, 0x88, 0xec, 0xae, 0xc9, 0x57, 0xdf, 0x41, 0xd9, 0xe9, 0x91, + 0xce, 0xd0, 0xcf, 0x26, 0x60, 0xbe, 0x2b, 0x5f, 0x15, 0x5d, 0xe8, 0x1f, 0x4d, 0x8b, 0x4b, 0x6e, + 0x1d, 0x4e, 0x6c, 0xbf, 0xca, 0xc4, 0xf6, 0x69, 0x49, 0x7e, 0x7e, 0xff, 0x9b, 0xca, 0xf5, 0xbb, + 0x26, 0x62, 0x2a, 0xca, 0x1b, 0x07, 0x17, 0x53, 0xd3, 0x6f, 0x0f, 0x7d, 0x96, 0x67, 0x04, 0x76, + 0x65, 0x9c, 0x0e, 0xc8, 0xd1, 0xeb, 0x91, 0x35, 0x3b, 0x9c, 0x3c, 0xbe, 0xc4, 0xe4, 0xf1, 0x2b, + 0x92, 0xfc, 0xe2, 0x48, 0xf2, 0x08, 0x7a, 0x27, 0x22, 0x79, 0x39, 0xa4, 0x8c, 0x23, 0x8b, 0xa4, + 0x15, 0x6e, 0x12, 0xbd, 0x95, 0x80, 0xb9, 0x8e, 0xd4, 0x5a, 0xf4, 0x78, 0x5f, 0x81, 0xc4, 0x64, + 0xe1, 0x0e, 0x27, 0x8b, 0x5f, 0x66, 0xb2, 0xf8, 0x94, 0x24, 0x3f, 0x37, 0x92, 0x2c, 0x58, 0xc7, + 0x44, 0x0e, 0x5b, 0xf2, 0xc5, 0x83, 0xcb, 0x41, 0x17, 0xcd, 0x11, 0x19, 0xcc, 0x84, 0x93, 0x70, + 0xfb, 0x21, 0xa2, 0x98, 0x64, 0xdd, 0xe1, 0x66, 0xff, 0x45, 0x36, 0xfb, 0xcf, 0x4b, 0xf2, 0xfb, + 0x46, 0xdc, 0x19, 0xbc, 0x88, 0x08, 0xa0, 0x2c, 0x5f, 0xba, 0x1b, 0x7b, 0x23, 0x68, 0x11, 0xfd, + 0x9f, 0x04, 0x33, 0xe1, 0x04, 0xdf, 0x7e, 0x32, 0x88, 0x49, 0x04, 0x1e, 0x4e, 0x06, 0xbf, 0xc6, + 0x64, 0xf0, 0x99, 0x91, 0x64, 0xd0, 0x0e, 0xf5, 0x7a, 0xd7, 0x94, 0xa0, 0x25, 0x9a, 0x43, 0x1f, + 0x4f, 0xc0, 0x6c, 0x24, 0x25, 0xb7, 0x1f, 0xe8, 0x88, 0xcb, 0xdd, 0x1d, 0x4e, 0x04, 0xbf, 0xc5, + 0x44, 0xf0, 0x85, 0x91, 0x0d, 0x82, 0xdf, 0x2d, 0x91, 0x41, 0x55, 0x2e, 0x1d, 0x1c, 0x83, 0x74, + 0xb6, 0x8a, 0x7e, 0x2c, 0xc1, 0x6c, 0x24, 0xa3, 0xb7, 0x9f, 0x28, 0xe2, 0x52, 0x7f, 0x87, 0x13, + 0x05, 0x87, 0xd1, 0xe7, 0x46, 0x81, 0xd1, 0xe7, 0xee, 0x02, 0x8c, 0xfe, 0x37, 0x09, 0xd2, 0xd1, + 0xe4, 0xcd, 0x7e, 0xc0, 0x2b, 0x36, 0xc9, 0x35, 0xf7, 0xf8, 0xf0, 0x0c, 0xdc, 0x1d, 0x6a, 0xd3, + 0x59, 0x5b, 0xe8, 0x89, 0xa1, 0x51, 0x75, 0x90, 0x0f, 0xba, 0xf3, 0x0c, 0x7a, 0x6a, 0x3f, 0xf3, + 0x0e, 0x65, 0x92, 0xfe, 0xab, 0x04, 0x33, 0xe1, 0xa4, 0xe1, 0x7e, 0xfb, 0x3b, 0x26, 0xb9, 0x78, + 0x7f, 0x88, 0x3a, 0x3c, 0xb9, 0x7e, 0x4b, 0x1a, 0x8c, 0x8f, 0x2c, 0xea, 0x3a, 0x52, 0x46, 0x9b, + 0xdc, 0xca, 0x9b, 0xe1, 0xfc, 0xdd, 0x3b, 0xe8, 0x7f, 0x24, 0x98, 0xeb, 0x48, 0x13, 0xee, 0x77, + 0xa6, 0xc5, 0x67, 0x14, 0xe7, 0x96, 0x04, 0x87, 0xf8, 0x21, 0xfd, 0x65, 0xa5, 0x65, 0x7b, 0xb7, + 0x42, 0x46, 0xec, 0xd9, 0x11, 0x66, 0xf9, 0x6c, 0x8d, 0xf6, 0x36, 0xc2, 0x69, 0xde, 0x73, 0xbe, + 0x41, 0x93, 0x64, 0xe7, 0xce, 0x75, 0xe4, 0x02, 0xf7, 0x9b, 0x79, 0x7c, 0xda, 0x70, 0xee, 0xc1, + 0x7e, 0x86, 0x2f, 0xa8, 0x2e, 0x1c, 0xc4, 0x21, 0x57, 0xfb, 0xce, 0x8a, 0x1b, 0x62, 0xde, 0x79, + 0x16, 0x3d, 0xb3, 0x9f, 0xd9, 0x33, 0x5e, 0x9e, 0x61, 0x48, 0x6c, 0x75, 0x24, 0x65, 0x01, 0x0d, + 0x08, 0x4a, 0x74, 0xe6, 0x89, 0xe6, 0x56, 0x86, 0xae, 0xcf, 0xb7, 0xed, 0xe7, 0xd9, 0xaa, 0x7f, + 0x52, 0x42, 0x2f, 0x0c, 0xb9, 0x71, 0xc3, 0x06, 0x2b, 0x80, 0xeb, 0x3b, 0x5b, 0xe8, 0xd2, 0x5d, + 0x44, 0xff, 0xe8, 0x67, 0x12, 0x30, 0x1d, 0x4a, 0x3d, 0x45, 0x8f, 0xf6, 0x5d, 0xed, 0x4e, 0xbf, + 0x71, 0x88, 0x6c, 0x0e, 0xf9, 0xab, 0x6c, 0xe6, 0xbf, 0x11, 0x99, 0xf9, 0x08, 0x9e, 0xd0, 0xce, + 0x6b, 0x68, 0xe7, 0x9d, 0xf3, 0x7b, 0xd0, 0x47, 0x13, 0x90, 0x8e, 0x66, 0x65, 0xf7, 0xb3, 0xe6, + 0xb1, 0xf9, 0xdb, 0xc3, 0x19, 0xb9, 0x5f, 0x67, 0xf2, 0xf8, 0x9c, 0x24, 0x1f, 0x4c, 0x13, 0xee, + 0x1a, 0x8e, 0x0b, 0xb7, 0x88, 0x3e, 0x91, 0x80, 0x74, 0x34, 0x97, 0xbb, 0x9f, 0x18, 0x62, 0xb3, + 0xbe, 0x87, 0x13, 0x83, 0xaf, 0x16, 0xe7, 0x0e, 0xaa, 0x16, 0xe7, 0xde, 0x49, 0xb5, 0xf8, 0x5e, + 0x02, 0x8e, 0xf6, 0xc8, 0xcb, 0x42, 0x7d, 0x22, 0xe3, 0xfd, 0x53, 0xb9, 0x86, 0x93, 0xd0, 0x1f, + 0x31, 0x09, 0x7d, 0x57, 0x92, 0x5f, 0x3a, 0x58, 0x08, 0xc1, 0xe1, 0x83, 0x21, 0x1a, 0xb3, 0x27, + 0xd7, 0xde, 0x39, 0x69, 0x85, 0x7b, 0x42, 0x7f, 0x9c, 0x80, 0x23, 0xb1, 0x69, 0x8a, 0xe8, 0xa9, + 0xa1, 0x22, 0x2f, 0x5d, 0x79, 0x8d, 0xc3, 0x49, 0xed, 0xcf, 0x99, 0xd4, 0xfe, 0x44, 0x0a, 0x6d, + 0x8d, 0x51, 0x03, 0x2f, 0xc1, 0x10, 0x88, 0xe8, 0x5a, 0xf2, 0xde, 0x3b, 0x18, 0x77, 0xe9, 0xec, + 0x0e, 0x7d, 0x2c, 0x01, 0x53, 0x7e, 0x6a, 0x12, 0x3a, 0x37, 0x7c, 0xfe, 0xd2, 0x70, 0x72, 0xfa, + 0x1a, 0x93, 0xd3, 0x6f, 0x4a, 0x72, 0x61, 0x24, 0x47, 0x22, 0x9a, 0xbc, 0x44, 0xe4, 0xb3, 0x2d, + 0x97, 0x0f, 0x2e, 0x1f, 0xa7, 0xb3, 0x59, 0x22, 0x87, 0xd9, 0x48, 0x96, 0xd3, 0x00, 0xc7, 0xaa, + 0x2b, 0x1d, 0xea, 0x70, 0x22, 0x2d, 0x41, 0xb7, 0x77, 0x2d, 0xd2, 0xd2, 0x0c, 0x37, 0x89, 0x7e, + 0x29, 0x41, 0xaf, 0xe5, 0xc3, 0x89, 0x54, 0x7d, 0x23, 0x2d, 0xb1, 0x39, 0x57, 0xc3, 0xc9, 0xe2, + 0x6d, 0x26, 0x8b, 0x2f, 0x49, 0x72, 0x7e, 0x04, 0x59, 0xd0, 0x8e, 0x6d, 0xd1, 0x31, 0x91, 0xc6, + 0x65, 0xf9, 0xe5, 0xbb, 0xe0, 0x66, 0x76, 0xb5, 0x8b, 0xbe, 0x98, 0x00, 0xd4, 0x9d, 0x9f, 0x85, + 0xfa, 0xfc, 0xdc, 0x54, 0xcf, 0x6c, 0xae, 0xe1, 0x04, 0xf3, 0x2d, 0x26, 0x98, 0xaf, 0x4a, 0xf2, + 0xda, 0xfe, 0x05, 0x53, 0x13, 0x7d, 0x47, 0x64, 0xf3, 0xaa, 0x5c, 0x3d, 0xb8, 0x6c, 0x62, 0x9b, + 0x46, 0x6f, 0xb3, 0xd8, 0x5c, 0x38, 0x4b, 0x7a, 0x40, 0x6c, 0x2e, 0x26, 0xa1, 0x7a, 0x38, 0xc1, + 0x7c, 0x9f, 0x09, 0xe6, 0xf7, 0x24, 0x59, 0x39, 0xb0, 0xd5, 0x25, 0x9d, 0x13, 0xc9, 0x34, 0xe4, + 0xdd, 0x77, 0xd6, 0xde, 0xf2, 0x8e, 0xd0, 0xe7, 0x58, 0x9e, 0x6a, 0xf4, 0x3f, 0x3d, 0x38, 0xdf, + 0x5f, 0x50, 0x31, 0xd9, 0x65, 0xc3, 0x49, 0xea, 0xeb, 0x4c, 0x52, 0x5f, 0x96, 0xe4, 0xd5, 0x91, + 0xec, 0x4c, 0xa4, 0x67, 0x22, 0xa6, 0x2b, 0xb2, 0x7a, 0x57, 0x62, 0x38, 0x9d, 0x0d, 0xa3, 0xaf, + 0x8a, 0x27, 0xf0, 0x9d, 0x59, 0x5b, 0x83, 0x1e, 0xa5, 0xc7, 0xa7, 0xa6, 0x0d, 0x27, 0x9e, 0x6f, + 0x33, 0xf1, 0x7c, 0x63, 0x04, 0xd0, 0xc3, 0x8f, 0xec, 0x8e, 0xde, 0x89, 0x88, 0x3e, 0x20, 0x5f, + 0xbe, 0x4b, 0x61, 0xae, 0xee, 0xc6, 0xd1, 0x5f, 0xf2, 0xfc, 0xb3, 0xae, 0x7c, 0x9b, 0x7e, 0x38, + 0xa7, 0x5f, 0xb6, 0x50, 0xee, 0xe9, 0x7d, 0xf3, 0x71, 0x27, 0x33, 0x4f, 0x65, 0xf7, 0x1c, 0x7a, + 0x6f, 0x1f, 0xc7, 0xe2, 0xce, 0x8a, 0xde, 0x68, 0x38, 0xb8, 0xa1, 0x7b, 0xb8, 0xbe, 0xd2, 0xee, + 0x1a, 0xf3, 0x97, 0x25, 0xe6, 0x1a, 0x07, 0xc1, 0xec, 0x01, 0xae, 0x71, 0x57, 0x34, 0x7b, 0x65, + 0xe8, 0xfa, 0x7c, 0xd4, 0xcb, 0x74, 0xd4, 0x67, 0xd1, 0x83, 0x7d, 0x47, 0xed, 0xaf, 0xf9, 0xea, + 0xf7, 0x25, 0x38, 0x51, 0xb3, 0x5a, 0x3d, 0xbb, 0x59, 0x5d, 0x28, 0x88, 0xff, 0xd0, 0x82, 0x5e, + 0xb8, 0x94, 0x1d, 0xcb, 0xb3, 0xca, 0xd2, 0x4e, 0x9e, 0x33, 0x34, 0xac, 0xa6, 0x6e, 0x36, 0x96, + 0x2d, 0xa7, 0xb1, 0xd2, 0xc0, 0x26, 0x8d, 0xce, 0xac, 0xb0, 0x22, 0xdd, 0x36, 0xdc, 0xee, 0xff, + 0x9b, 0xf1, 0x39, 0x9f, 0xf2, 0xcd, 0xc4, 0xa9, 0x75, 0xd6, 0x06, 0xfd, 0x8f, 0x66, 0x96, 0x0b, + 0x7e, 0xd7, 0x97, 0xcf, 0xaf, 0x92, 0xaa, 0x3f, 0x14, 0x15, 0x5e, 0xa3, 0x15, 0x5e, 0xf3, 0x2b, + 0xbc, 0x76, 0x99, 0xb5, 0xb5, 0x3b, 0x41, 0xfb, 0x7b, 0xe2, 0xff, 0x03, 0x00, 0x00, 0xff, 0xff, + 0x29, 0xa8, 0xb0, 0xd5, 0x0a, 0x72, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/datastore/admin/v1/datastore_admin.pb.go b/vendor/google.golang.org/genproto/googleapis/datastore/admin/v1/datastore_admin.pb.go new file mode 100644 index 0000000..33d67d2 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/datastore/admin/v1/datastore_admin.pb.go @@ -0,0 +1,1225 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/datastore/admin/v1/datastore_admin.proto + +package admin // import "google.golang.org/genproto/googleapis/datastore/admin/v1" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import timestamp "github.com/golang/protobuf/ptypes/timestamp" +import _ "google.golang.org/genproto/googleapis/api/annotations" +import longrunning "google.golang.org/genproto/googleapis/longrunning" + +import ( + context "golang.org/x/net/context" + grpc "google.golang.org/grpc" +) + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Operation types. +type OperationType int32 + +const ( + // Unspecified. + OperationType_OPERATION_TYPE_UNSPECIFIED OperationType = 0 + // ExportEntities. + OperationType_EXPORT_ENTITIES OperationType = 1 + // ImportEntities. + OperationType_IMPORT_ENTITIES OperationType = 2 + // CreateIndex. + OperationType_CREATE_INDEX OperationType = 3 + // DeleteIndex. + OperationType_DELETE_INDEX OperationType = 4 +) + +var OperationType_name = map[int32]string{ + 0: "OPERATION_TYPE_UNSPECIFIED", + 1: "EXPORT_ENTITIES", + 2: "IMPORT_ENTITIES", + 3: "CREATE_INDEX", + 4: "DELETE_INDEX", +} +var OperationType_value = map[string]int32{ + "OPERATION_TYPE_UNSPECIFIED": 0, + "EXPORT_ENTITIES": 1, + "IMPORT_ENTITIES": 2, + "CREATE_INDEX": 3, + "DELETE_INDEX": 4, +} + +func (x OperationType) String() string { + return proto.EnumName(OperationType_name, int32(x)) +} +func (OperationType) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_datastore_admin_35408641d3652505, []int{0} +} + +// The various possible states for an ongoing Operation. +type CommonMetadata_State int32 + +const ( + // Unspecified. + CommonMetadata_STATE_UNSPECIFIED CommonMetadata_State = 0 + // Request is being prepared for processing. + CommonMetadata_INITIALIZING CommonMetadata_State = 1 + // Request is actively being processed. + CommonMetadata_PROCESSING CommonMetadata_State = 2 + // Request is in the process of being cancelled after user called + // google.longrunning.Operations.CancelOperation on the operation. + CommonMetadata_CANCELLING CommonMetadata_State = 3 + // Request has been processed and is in its finalization stage. + CommonMetadata_FINALIZING CommonMetadata_State = 4 + // Request has completed successfully. + CommonMetadata_SUCCESSFUL CommonMetadata_State = 5 + // Request has finished being processed, but encountered an error. + CommonMetadata_FAILED CommonMetadata_State = 6 + // Request has finished being cancelled after user called + // google.longrunning.Operations.CancelOperation. + CommonMetadata_CANCELLED CommonMetadata_State = 7 +) + +var CommonMetadata_State_name = map[int32]string{ + 0: "STATE_UNSPECIFIED", + 1: "INITIALIZING", + 2: "PROCESSING", + 3: "CANCELLING", + 4: "FINALIZING", + 5: "SUCCESSFUL", + 6: "FAILED", + 7: "CANCELLED", +} +var CommonMetadata_State_value = map[string]int32{ + "STATE_UNSPECIFIED": 0, + "INITIALIZING": 1, + "PROCESSING": 2, + "CANCELLING": 3, + "FINALIZING": 4, + "SUCCESSFUL": 5, + "FAILED": 6, + "CANCELLED": 7, +} + +func (x CommonMetadata_State) String() string { + return proto.EnumName(CommonMetadata_State_name, int32(x)) +} +func (CommonMetadata_State) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_datastore_admin_35408641d3652505, []int{0, 0} +} + +// Metadata common to all Datastore Admin operations. +type CommonMetadata struct { + // The time that work began on the operation. + StartTime *timestamp.Timestamp `protobuf:"bytes,1,opt,name=start_time,json=startTime,proto3" json:"start_time,omitempty"` + // The time the operation ended, either successfully or otherwise. + EndTime *timestamp.Timestamp `protobuf:"bytes,2,opt,name=end_time,json=endTime,proto3" json:"end_time,omitempty"` + // The type of the operation. Can be used as a filter in + // ListOperationsRequest. + OperationType OperationType `protobuf:"varint,3,opt,name=operation_type,json=operationType,proto3,enum=google.datastore.admin.v1.OperationType" json:"operation_type,omitempty"` + // The client-assigned labels which were provided when the operation was + // created. May also include additional labels. + Labels map[string]string `protobuf:"bytes,4,rep,name=labels,proto3" json:"labels,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` + // The current state of the Operation. + State CommonMetadata_State `protobuf:"varint,5,opt,name=state,proto3,enum=google.datastore.admin.v1.CommonMetadata_State" json:"state,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *CommonMetadata) Reset() { *m = CommonMetadata{} } +func (m *CommonMetadata) String() string { return proto.CompactTextString(m) } +func (*CommonMetadata) ProtoMessage() {} +func (*CommonMetadata) Descriptor() ([]byte, []int) { + return fileDescriptor_datastore_admin_35408641d3652505, []int{0} +} +func (m *CommonMetadata) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_CommonMetadata.Unmarshal(m, b) +} +func (m *CommonMetadata) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_CommonMetadata.Marshal(b, m, deterministic) +} +func (dst *CommonMetadata) XXX_Merge(src proto.Message) { + xxx_messageInfo_CommonMetadata.Merge(dst, src) +} +func (m *CommonMetadata) XXX_Size() int { + return xxx_messageInfo_CommonMetadata.Size(m) +} +func (m *CommonMetadata) XXX_DiscardUnknown() { + xxx_messageInfo_CommonMetadata.DiscardUnknown(m) +} + +var xxx_messageInfo_CommonMetadata proto.InternalMessageInfo + +func (m *CommonMetadata) GetStartTime() *timestamp.Timestamp { + if m != nil { + return m.StartTime + } + return nil +} + +func (m *CommonMetadata) GetEndTime() *timestamp.Timestamp { + if m != nil { + return m.EndTime + } + return nil +} + +func (m *CommonMetadata) GetOperationType() OperationType { + if m != nil { + return m.OperationType + } + return OperationType_OPERATION_TYPE_UNSPECIFIED +} + +func (m *CommonMetadata) GetLabels() map[string]string { + if m != nil { + return m.Labels + } + return nil +} + +func (m *CommonMetadata) GetState() CommonMetadata_State { + if m != nil { + return m.State + } + return CommonMetadata_STATE_UNSPECIFIED +} + +// Measures the progress of a particular metric. +type Progress struct { + // The amount of work that has been completed. Note that this may be greater + // than work_estimated. + WorkCompleted int64 `protobuf:"varint,1,opt,name=work_completed,json=workCompleted,proto3" json:"work_completed,omitempty"` + // An estimate of how much work needs to be performed. May be zero if the + // work estimate is unavailable. + WorkEstimated int64 `protobuf:"varint,2,opt,name=work_estimated,json=workEstimated,proto3" json:"work_estimated,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Progress) Reset() { *m = Progress{} } +func (m *Progress) String() string { return proto.CompactTextString(m) } +func (*Progress) ProtoMessage() {} +func (*Progress) Descriptor() ([]byte, []int) { + return fileDescriptor_datastore_admin_35408641d3652505, []int{1} +} +func (m *Progress) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Progress.Unmarshal(m, b) +} +func (m *Progress) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Progress.Marshal(b, m, deterministic) +} +func (dst *Progress) XXX_Merge(src proto.Message) { + xxx_messageInfo_Progress.Merge(dst, src) +} +func (m *Progress) XXX_Size() int { + return xxx_messageInfo_Progress.Size(m) +} +func (m *Progress) XXX_DiscardUnknown() { + xxx_messageInfo_Progress.DiscardUnknown(m) +} + +var xxx_messageInfo_Progress proto.InternalMessageInfo + +func (m *Progress) GetWorkCompleted() int64 { + if m != nil { + return m.WorkCompleted + } + return 0 +} + +func (m *Progress) GetWorkEstimated() int64 { + if m != nil { + return m.WorkEstimated + } + return 0 +} + +// The request for +// [google.datastore.admin.v1.DatastoreAdmin.ExportEntities][google.datastore.admin.v1.DatastoreAdmin.ExportEntities]. +type ExportEntitiesRequest struct { + // Project ID against which to make the request. + ProjectId string `protobuf:"bytes,1,opt,name=project_id,json=projectId,proto3" json:"project_id,omitempty"` + // Client-assigned labels. + Labels map[string]string `protobuf:"bytes,2,rep,name=labels,proto3" json:"labels,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` + // Description of what data from the project is included in the export. + EntityFilter *EntityFilter `protobuf:"bytes,3,opt,name=entity_filter,json=entityFilter,proto3" json:"entity_filter,omitempty"` + // Location for the export metadata and data files. + // + // The full resource URL of the external storage location. Currently, only + // Google Cloud Storage is supported. So output_url_prefix should be of the + // form: `gs://BUCKET_NAME[/NAMESPACE_PATH]`, where `BUCKET_NAME` is the + // name of the Cloud Storage bucket and `NAMESPACE_PATH` is an optional Cloud + // Storage namespace path (this is not a Cloud Datastore namespace). For more + // information about Cloud Storage namespace paths, see + // [Object name + // considerations](https://cloud.google.com/storage/docs/naming#object-considerations). + // + // The resulting files will be nested deeper than the specified URL prefix. + // The final output URL will be provided in the + // [google.datastore.admin.v1.ExportEntitiesResponse.output_url][google.datastore.admin.v1.ExportEntitiesResponse.output_url] + // field. That value should be used for subsequent ImportEntities operations. + // + // By nesting the data files deeper, the same Cloud Storage bucket can be used + // in multiple ExportEntities operations without conflict. + OutputUrlPrefix string `protobuf:"bytes,4,opt,name=output_url_prefix,json=outputUrlPrefix,proto3" json:"output_url_prefix,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ExportEntitiesRequest) Reset() { *m = ExportEntitiesRequest{} } +func (m *ExportEntitiesRequest) String() string { return proto.CompactTextString(m) } +func (*ExportEntitiesRequest) ProtoMessage() {} +func (*ExportEntitiesRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_datastore_admin_35408641d3652505, []int{2} +} +func (m *ExportEntitiesRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ExportEntitiesRequest.Unmarshal(m, b) +} +func (m *ExportEntitiesRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ExportEntitiesRequest.Marshal(b, m, deterministic) +} +func (dst *ExportEntitiesRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_ExportEntitiesRequest.Merge(dst, src) +} +func (m *ExportEntitiesRequest) XXX_Size() int { + return xxx_messageInfo_ExportEntitiesRequest.Size(m) +} +func (m *ExportEntitiesRequest) XXX_DiscardUnknown() { + xxx_messageInfo_ExportEntitiesRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_ExportEntitiesRequest proto.InternalMessageInfo + +func (m *ExportEntitiesRequest) GetProjectId() string { + if m != nil { + return m.ProjectId + } + return "" +} + +func (m *ExportEntitiesRequest) GetLabels() map[string]string { + if m != nil { + return m.Labels + } + return nil +} + +func (m *ExportEntitiesRequest) GetEntityFilter() *EntityFilter { + if m != nil { + return m.EntityFilter + } + return nil +} + +func (m *ExportEntitiesRequest) GetOutputUrlPrefix() string { + if m != nil { + return m.OutputUrlPrefix + } + return "" +} + +// The request for +// [google.datastore.admin.v1.DatastoreAdmin.ImportEntities][google.datastore.admin.v1.DatastoreAdmin.ImportEntities]. +type ImportEntitiesRequest struct { + // Project ID against which to make the request. + ProjectId string `protobuf:"bytes,1,opt,name=project_id,json=projectId,proto3" json:"project_id,omitempty"` + // Client-assigned labels. + Labels map[string]string `protobuf:"bytes,2,rep,name=labels,proto3" json:"labels,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` + // The full resource URL of the external storage location. Currently, only + // Google Cloud Storage is supported. So input_url should be of the form: + // `gs://BUCKET_NAME[/NAMESPACE_PATH]/OVERALL_EXPORT_METADATA_FILE`, where + // `BUCKET_NAME` is the name of the Cloud Storage bucket, `NAMESPACE_PATH` is + // an optional Cloud Storage namespace path (this is not a Cloud Datastore + // namespace), and `OVERALL_EXPORT_METADATA_FILE` is the metadata file written + // by the ExportEntities operation. For more information about Cloud Storage + // namespace paths, see + // [Object name + // considerations](https://cloud.google.com/storage/docs/naming#object-considerations). + // + // For more information, see + // [google.datastore.admin.v1.ExportEntitiesResponse.output_url][google.datastore.admin.v1.ExportEntitiesResponse.output_url]. + InputUrl string `protobuf:"bytes,3,opt,name=input_url,json=inputUrl,proto3" json:"input_url,omitempty"` + // Optionally specify which kinds/namespaces are to be imported. If provided, + // the list must be a subset of the EntityFilter used in creating the export, + // otherwise a FAILED_PRECONDITION error will be returned. If no filter is + // specified then all entities from the export are imported. + EntityFilter *EntityFilter `protobuf:"bytes,4,opt,name=entity_filter,json=entityFilter,proto3" json:"entity_filter,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ImportEntitiesRequest) Reset() { *m = ImportEntitiesRequest{} } +func (m *ImportEntitiesRequest) String() string { return proto.CompactTextString(m) } +func (*ImportEntitiesRequest) ProtoMessage() {} +func (*ImportEntitiesRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_datastore_admin_35408641d3652505, []int{3} +} +func (m *ImportEntitiesRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ImportEntitiesRequest.Unmarshal(m, b) +} +func (m *ImportEntitiesRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ImportEntitiesRequest.Marshal(b, m, deterministic) +} +func (dst *ImportEntitiesRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_ImportEntitiesRequest.Merge(dst, src) +} +func (m *ImportEntitiesRequest) XXX_Size() int { + return xxx_messageInfo_ImportEntitiesRequest.Size(m) +} +func (m *ImportEntitiesRequest) XXX_DiscardUnknown() { + xxx_messageInfo_ImportEntitiesRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_ImportEntitiesRequest proto.InternalMessageInfo + +func (m *ImportEntitiesRequest) GetProjectId() string { + if m != nil { + return m.ProjectId + } + return "" +} + +func (m *ImportEntitiesRequest) GetLabels() map[string]string { + if m != nil { + return m.Labels + } + return nil +} + +func (m *ImportEntitiesRequest) GetInputUrl() string { + if m != nil { + return m.InputUrl + } + return "" +} + +func (m *ImportEntitiesRequest) GetEntityFilter() *EntityFilter { + if m != nil { + return m.EntityFilter + } + return nil +} + +// The response for +// [google.datastore.admin.v1.DatastoreAdmin.ExportEntities][google.datastore.admin.v1.DatastoreAdmin.ExportEntities]. +type ExportEntitiesResponse struct { + // Location of the output metadata file. This can be used to begin an import + // into Cloud Datastore (this project or another project). See + // [google.datastore.admin.v1.ImportEntitiesRequest.input_url][google.datastore.admin.v1.ImportEntitiesRequest.input_url]. + // Only present if the operation completed successfully. + OutputUrl string `protobuf:"bytes,1,opt,name=output_url,json=outputUrl,proto3" json:"output_url,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ExportEntitiesResponse) Reset() { *m = ExportEntitiesResponse{} } +func (m *ExportEntitiesResponse) String() string { return proto.CompactTextString(m) } +func (*ExportEntitiesResponse) ProtoMessage() {} +func (*ExportEntitiesResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_datastore_admin_35408641d3652505, []int{4} +} +func (m *ExportEntitiesResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ExportEntitiesResponse.Unmarshal(m, b) +} +func (m *ExportEntitiesResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ExportEntitiesResponse.Marshal(b, m, deterministic) +} +func (dst *ExportEntitiesResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_ExportEntitiesResponse.Merge(dst, src) +} +func (m *ExportEntitiesResponse) XXX_Size() int { + return xxx_messageInfo_ExportEntitiesResponse.Size(m) +} +func (m *ExportEntitiesResponse) XXX_DiscardUnknown() { + xxx_messageInfo_ExportEntitiesResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_ExportEntitiesResponse proto.InternalMessageInfo + +func (m *ExportEntitiesResponse) GetOutputUrl() string { + if m != nil { + return m.OutputUrl + } + return "" +} + +// Metadata for ExportEntities operations. +type ExportEntitiesMetadata struct { + // Metadata common to all Datastore Admin operations. + Common *CommonMetadata `protobuf:"bytes,1,opt,name=common,proto3" json:"common,omitempty"` + // An estimate of the number of entities processed. + ProgressEntities *Progress `protobuf:"bytes,2,opt,name=progress_entities,json=progressEntities,proto3" json:"progress_entities,omitempty"` + // An estimate of the number of bytes processed. + ProgressBytes *Progress `protobuf:"bytes,3,opt,name=progress_bytes,json=progressBytes,proto3" json:"progress_bytes,omitempty"` + // Description of which entities are being exported. + EntityFilter *EntityFilter `protobuf:"bytes,4,opt,name=entity_filter,json=entityFilter,proto3" json:"entity_filter,omitempty"` + // Location for the export metadata and data files. This will be the same + // value as the + // [google.datastore.admin.v1.ExportEntitiesRequest.output_url_prefix][google.datastore.admin.v1.ExportEntitiesRequest.output_url_prefix] + // field. The final output location is provided in + // [google.datastore.admin.v1.ExportEntitiesResponse.output_url][google.datastore.admin.v1.ExportEntitiesResponse.output_url]. + OutputUrlPrefix string `protobuf:"bytes,5,opt,name=output_url_prefix,json=outputUrlPrefix,proto3" json:"output_url_prefix,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ExportEntitiesMetadata) Reset() { *m = ExportEntitiesMetadata{} } +func (m *ExportEntitiesMetadata) String() string { return proto.CompactTextString(m) } +func (*ExportEntitiesMetadata) ProtoMessage() {} +func (*ExportEntitiesMetadata) Descriptor() ([]byte, []int) { + return fileDescriptor_datastore_admin_35408641d3652505, []int{5} +} +func (m *ExportEntitiesMetadata) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ExportEntitiesMetadata.Unmarshal(m, b) +} +func (m *ExportEntitiesMetadata) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ExportEntitiesMetadata.Marshal(b, m, deterministic) +} +func (dst *ExportEntitiesMetadata) XXX_Merge(src proto.Message) { + xxx_messageInfo_ExportEntitiesMetadata.Merge(dst, src) +} +func (m *ExportEntitiesMetadata) XXX_Size() int { + return xxx_messageInfo_ExportEntitiesMetadata.Size(m) +} +func (m *ExportEntitiesMetadata) XXX_DiscardUnknown() { + xxx_messageInfo_ExportEntitiesMetadata.DiscardUnknown(m) +} + +var xxx_messageInfo_ExportEntitiesMetadata proto.InternalMessageInfo + +func (m *ExportEntitiesMetadata) GetCommon() *CommonMetadata { + if m != nil { + return m.Common + } + return nil +} + +func (m *ExportEntitiesMetadata) GetProgressEntities() *Progress { + if m != nil { + return m.ProgressEntities + } + return nil +} + +func (m *ExportEntitiesMetadata) GetProgressBytes() *Progress { + if m != nil { + return m.ProgressBytes + } + return nil +} + +func (m *ExportEntitiesMetadata) GetEntityFilter() *EntityFilter { + if m != nil { + return m.EntityFilter + } + return nil +} + +func (m *ExportEntitiesMetadata) GetOutputUrlPrefix() string { + if m != nil { + return m.OutputUrlPrefix + } + return "" +} + +// Metadata for ImportEntities operations. +type ImportEntitiesMetadata struct { + // Metadata common to all Datastore Admin operations. + Common *CommonMetadata `protobuf:"bytes,1,opt,name=common,proto3" json:"common,omitempty"` + // An estimate of the number of entities processed. + ProgressEntities *Progress `protobuf:"bytes,2,opt,name=progress_entities,json=progressEntities,proto3" json:"progress_entities,omitempty"` + // An estimate of the number of bytes processed. + ProgressBytes *Progress `protobuf:"bytes,3,opt,name=progress_bytes,json=progressBytes,proto3" json:"progress_bytes,omitempty"` + // Description of which entities are being imported. + EntityFilter *EntityFilter `protobuf:"bytes,4,opt,name=entity_filter,json=entityFilter,proto3" json:"entity_filter,omitempty"` + // The location of the import metadata file. This will be the same value as + // the + // [google.datastore.admin.v1.ExportEntitiesResponse.output_url][google.datastore.admin.v1.ExportEntitiesResponse.output_url] + // field. + InputUrl string `protobuf:"bytes,5,opt,name=input_url,json=inputUrl,proto3" json:"input_url,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ImportEntitiesMetadata) Reset() { *m = ImportEntitiesMetadata{} } +func (m *ImportEntitiesMetadata) String() string { return proto.CompactTextString(m) } +func (*ImportEntitiesMetadata) ProtoMessage() {} +func (*ImportEntitiesMetadata) Descriptor() ([]byte, []int) { + return fileDescriptor_datastore_admin_35408641d3652505, []int{6} +} +func (m *ImportEntitiesMetadata) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ImportEntitiesMetadata.Unmarshal(m, b) +} +func (m *ImportEntitiesMetadata) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ImportEntitiesMetadata.Marshal(b, m, deterministic) +} +func (dst *ImportEntitiesMetadata) XXX_Merge(src proto.Message) { + xxx_messageInfo_ImportEntitiesMetadata.Merge(dst, src) +} +func (m *ImportEntitiesMetadata) XXX_Size() int { + return xxx_messageInfo_ImportEntitiesMetadata.Size(m) +} +func (m *ImportEntitiesMetadata) XXX_DiscardUnknown() { + xxx_messageInfo_ImportEntitiesMetadata.DiscardUnknown(m) +} + +var xxx_messageInfo_ImportEntitiesMetadata proto.InternalMessageInfo + +func (m *ImportEntitiesMetadata) GetCommon() *CommonMetadata { + if m != nil { + return m.Common + } + return nil +} + +func (m *ImportEntitiesMetadata) GetProgressEntities() *Progress { + if m != nil { + return m.ProgressEntities + } + return nil +} + +func (m *ImportEntitiesMetadata) GetProgressBytes() *Progress { + if m != nil { + return m.ProgressBytes + } + return nil +} + +func (m *ImportEntitiesMetadata) GetEntityFilter() *EntityFilter { + if m != nil { + return m.EntityFilter + } + return nil +} + +func (m *ImportEntitiesMetadata) GetInputUrl() string { + if m != nil { + return m.InputUrl + } + return "" +} + +// Identifies a subset of entities in a project. This is specified as +// combinations of kinds and namespaces (either or both of which may be all, as +// described in the following examples). +// Example usage: +// +// Entire project: +// kinds=[], namespace_ids=[] +// +// Kinds Foo and Bar in all namespaces: +// kinds=['Foo', 'Bar'], namespace_ids=[] +// +// Kinds Foo and Bar only in the default namespace: +// kinds=['Foo', 'Bar'], namespace_ids=[''] +// +// Kinds Foo and Bar in both the default and Baz namespaces: +// kinds=['Foo', 'Bar'], namespace_ids=['', 'Baz'] +// +// The entire Baz namespace: +// kinds=[], namespace_ids=['Baz'] +type EntityFilter struct { + // If empty, then this represents all kinds. + Kinds []string `protobuf:"bytes,1,rep,name=kinds,proto3" json:"kinds,omitempty"` + // An empty list represents all namespaces. This is the preferred + // usage for projects that don't use namespaces. + // + // An empty string element represents the default namespace. This should be + // used if the project has data in non-default namespaces, but doesn't want to + // include them. + // Each namespace in this list must be unique. + NamespaceIds []string `protobuf:"bytes,2,rep,name=namespace_ids,json=namespaceIds,proto3" json:"namespace_ids,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *EntityFilter) Reset() { *m = EntityFilter{} } +func (m *EntityFilter) String() string { return proto.CompactTextString(m) } +func (*EntityFilter) ProtoMessage() {} +func (*EntityFilter) Descriptor() ([]byte, []int) { + return fileDescriptor_datastore_admin_35408641d3652505, []int{7} +} +func (m *EntityFilter) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_EntityFilter.Unmarshal(m, b) +} +func (m *EntityFilter) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_EntityFilter.Marshal(b, m, deterministic) +} +func (dst *EntityFilter) XXX_Merge(src proto.Message) { + xxx_messageInfo_EntityFilter.Merge(dst, src) +} +func (m *EntityFilter) XXX_Size() int { + return xxx_messageInfo_EntityFilter.Size(m) +} +func (m *EntityFilter) XXX_DiscardUnknown() { + xxx_messageInfo_EntityFilter.DiscardUnknown(m) +} + +var xxx_messageInfo_EntityFilter proto.InternalMessageInfo + +func (m *EntityFilter) GetKinds() []string { + if m != nil { + return m.Kinds + } + return nil +} + +func (m *EntityFilter) GetNamespaceIds() []string { + if m != nil { + return m.NamespaceIds + } + return nil +} + +// The request for +// [google.datastore.admin.v1.DatastoreAdmin.GetIndex][google.datastore.admin.v1.DatastoreAdmin.GetIndex]. +type GetIndexRequest struct { + // Project ID against which to make the request. + ProjectId string `protobuf:"bytes,1,opt,name=project_id,json=projectId,proto3" json:"project_id,omitempty"` + // The resource ID of the index to get. + IndexId string `protobuf:"bytes,3,opt,name=index_id,json=indexId,proto3" json:"index_id,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GetIndexRequest) Reset() { *m = GetIndexRequest{} } +func (m *GetIndexRequest) String() string { return proto.CompactTextString(m) } +func (*GetIndexRequest) ProtoMessage() {} +func (*GetIndexRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_datastore_admin_35408641d3652505, []int{8} +} +func (m *GetIndexRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GetIndexRequest.Unmarshal(m, b) +} +func (m *GetIndexRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GetIndexRequest.Marshal(b, m, deterministic) +} +func (dst *GetIndexRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_GetIndexRequest.Merge(dst, src) +} +func (m *GetIndexRequest) XXX_Size() int { + return xxx_messageInfo_GetIndexRequest.Size(m) +} +func (m *GetIndexRequest) XXX_DiscardUnknown() { + xxx_messageInfo_GetIndexRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_GetIndexRequest proto.InternalMessageInfo + +func (m *GetIndexRequest) GetProjectId() string { + if m != nil { + return m.ProjectId + } + return "" +} + +func (m *GetIndexRequest) GetIndexId() string { + if m != nil { + return m.IndexId + } + return "" +} + +// The request for +// [google.datastore.admin.v1.DatastoreAdmin.ListIndexes][google.datastore.admin.v1.DatastoreAdmin.ListIndexes]. +type ListIndexesRequest struct { + // Project ID against which to make the request. + ProjectId string `protobuf:"bytes,1,opt,name=project_id,json=projectId,proto3" json:"project_id,omitempty"` + Filter string `protobuf:"bytes,3,opt,name=filter,proto3" json:"filter,omitempty"` + // The maximum number of items to return. If zero, then all results will be + // returned. + PageSize int32 `protobuf:"varint,4,opt,name=page_size,json=pageSize,proto3" json:"page_size,omitempty"` + // The next_page_token value returned from a previous List request, if any. + PageToken string `protobuf:"bytes,5,opt,name=page_token,json=pageToken,proto3" json:"page_token,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ListIndexesRequest) Reset() { *m = ListIndexesRequest{} } +func (m *ListIndexesRequest) String() string { return proto.CompactTextString(m) } +func (*ListIndexesRequest) ProtoMessage() {} +func (*ListIndexesRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_datastore_admin_35408641d3652505, []int{9} +} +func (m *ListIndexesRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ListIndexesRequest.Unmarshal(m, b) +} +func (m *ListIndexesRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ListIndexesRequest.Marshal(b, m, deterministic) +} +func (dst *ListIndexesRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_ListIndexesRequest.Merge(dst, src) +} +func (m *ListIndexesRequest) XXX_Size() int { + return xxx_messageInfo_ListIndexesRequest.Size(m) +} +func (m *ListIndexesRequest) XXX_DiscardUnknown() { + xxx_messageInfo_ListIndexesRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_ListIndexesRequest proto.InternalMessageInfo + +func (m *ListIndexesRequest) GetProjectId() string { + if m != nil { + return m.ProjectId + } + return "" +} + +func (m *ListIndexesRequest) GetFilter() string { + if m != nil { + return m.Filter + } + return "" +} + +func (m *ListIndexesRequest) GetPageSize() int32 { + if m != nil { + return m.PageSize + } + return 0 +} + +func (m *ListIndexesRequest) GetPageToken() string { + if m != nil { + return m.PageToken + } + return "" +} + +// The response for +// [google.datastore.admin.v1.DatastoreAdmin.ListIndexes][google.datastore.admin.v1.DatastoreAdmin.ListIndexes]. +type ListIndexesResponse struct { + // The indexes. + Indexes []*Index `protobuf:"bytes,1,rep,name=indexes,proto3" json:"indexes,omitempty"` + // The standard List next-page token. + NextPageToken string `protobuf:"bytes,2,opt,name=next_page_token,json=nextPageToken,proto3" json:"next_page_token,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ListIndexesResponse) Reset() { *m = ListIndexesResponse{} } +func (m *ListIndexesResponse) String() string { return proto.CompactTextString(m) } +func (*ListIndexesResponse) ProtoMessage() {} +func (*ListIndexesResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_datastore_admin_35408641d3652505, []int{10} +} +func (m *ListIndexesResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ListIndexesResponse.Unmarshal(m, b) +} +func (m *ListIndexesResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ListIndexesResponse.Marshal(b, m, deterministic) +} +func (dst *ListIndexesResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_ListIndexesResponse.Merge(dst, src) +} +func (m *ListIndexesResponse) XXX_Size() int { + return xxx_messageInfo_ListIndexesResponse.Size(m) +} +func (m *ListIndexesResponse) XXX_DiscardUnknown() { + xxx_messageInfo_ListIndexesResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_ListIndexesResponse proto.InternalMessageInfo + +func (m *ListIndexesResponse) GetIndexes() []*Index { + if m != nil { + return m.Indexes + } + return nil +} + +func (m *ListIndexesResponse) GetNextPageToken() string { + if m != nil { + return m.NextPageToken + } + return "" +} + +// Metadata for Index operations. +type IndexOperationMetadata struct { + // Metadata common to all Datastore Admin operations. + Common *CommonMetadata `protobuf:"bytes,1,opt,name=common,proto3" json:"common,omitempty"` + // An estimate of the number of entities processed. + ProgressEntities *Progress `protobuf:"bytes,2,opt,name=progress_entities,json=progressEntities,proto3" json:"progress_entities,omitempty"` + // The index resource ID that this operation is acting on. + IndexId string `protobuf:"bytes,3,opt,name=index_id,json=indexId,proto3" json:"index_id,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *IndexOperationMetadata) Reset() { *m = IndexOperationMetadata{} } +func (m *IndexOperationMetadata) String() string { return proto.CompactTextString(m) } +func (*IndexOperationMetadata) ProtoMessage() {} +func (*IndexOperationMetadata) Descriptor() ([]byte, []int) { + return fileDescriptor_datastore_admin_35408641d3652505, []int{11} +} +func (m *IndexOperationMetadata) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_IndexOperationMetadata.Unmarshal(m, b) +} +func (m *IndexOperationMetadata) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_IndexOperationMetadata.Marshal(b, m, deterministic) +} +func (dst *IndexOperationMetadata) XXX_Merge(src proto.Message) { + xxx_messageInfo_IndexOperationMetadata.Merge(dst, src) +} +func (m *IndexOperationMetadata) XXX_Size() int { + return xxx_messageInfo_IndexOperationMetadata.Size(m) +} +func (m *IndexOperationMetadata) XXX_DiscardUnknown() { + xxx_messageInfo_IndexOperationMetadata.DiscardUnknown(m) +} + +var xxx_messageInfo_IndexOperationMetadata proto.InternalMessageInfo + +func (m *IndexOperationMetadata) GetCommon() *CommonMetadata { + if m != nil { + return m.Common + } + return nil +} + +func (m *IndexOperationMetadata) GetProgressEntities() *Progress { + if m != nil { + return m.ProgressEntities + } + return nil +} + +func (m *IndexOperationMetadata) GetIndexId() string { + if m != nil { + return m.IndexId + } + return "" +} + +func init() { + proto.RegisterType((*CommonMetadata)(nil), "google.datastore.admin.v1.CommonMetadata") + proto.RegisterMapType((map[string]string)(nil), "google.datastore.admin.v1.CommonMetadata.LabelsEntry") + proto.RegisterType((*Progress)(nil), "google.datastore.admin.v1.Progress") + proto.RegisterType((*ExportEntitiesRequest)(nil), "google.datastore.admin.v1.ExportEntitiesRequest") + proto.RegisterMapType((map[string]string)(nil), "google.datastore.admin.v1.ExportEntitiesRequest.LabelsEntry") + proto.RegisterType((*ImportEntitiesRequest)(nil), "google.datastore.admin.v1.ImportEntitiesRequest") + proto.RegisterMapType((map[string]string)(nil), "google.datastore.admin.v1.ImportEntitiesRequest.LabelsEntry") + proto.RegisterType((*ExportEntitiesResponse)(nil), "google.datastore.admin.v1.ExportEntitiesResponse") + proto.RegisterType((*ExportEntitiesMetadata)(nil), "google.datastore.admin.v1.ExportEntitiesMetadata") + proto.RegisterType((*ImportEntitiesMetadata)(nil), "google.datastore.admin.v1.ImportEntitiesMetadata") + proto.RegisterType((*EntityFilter)(nil), "google.datastore.admin.v1.EntityFilter") + proto.RegisterType((*GetIndexRequest)(nil), "google.datastore.admin.v1.GetIndexRequest") + proto.RegisterType((*ListIndexesRequest)(nil), "google.datastore.admin.v1.ListIndexesRequest") + proto.RegisterType((*ListIndexesResponse)(nil), "google.datastore.admin.v1.ListIndexesResponse") + proto.RegisterType((*IndexOperationMetadata)(nil), "google.datastore.admin.v1.IndexOperationMetadata") + proto.RegisterEnum("google.datastore.admin.v1.OperationType", OperationType_name, OperationType_value) + proto.RegisterEnum("google.datastore.admin.v1.CommonMetadata_State", CommonMetadata_State_name, CommonMetadata_State_value) +} + +// Reference imports to suppress errors if they are not otherwise used. +var _ context.Context +var _ grpc.ClientConn + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the grpc package it is being compiled against. +const _ = grpc.SupportPackageIsVersion4 + +// DatastoreAdminClient is the client API for DatastoreAdmin service. +// +// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://godoc.org/google.golang.org/grpc#ClientConn.NewStream. +type DatastoreAdminClient interface { + // Exports a copy of all or a subset of entities from Google Cloud Datastore + // to another storage system, such as Google Cloud Storage. Recent updates to + // entities may not be reflected in the export. The export occurs in the + // background and its progress can be monitored and managed via the + // Operation resource that is created. The output of an export may only be + // used once the associated operation is done. If an export operation is + // cancelled before completion it may leave partial data behind in Google + // Cloud Storage. + ExportEntities(ctx context.Context, in *ExportEntitiesRequest, opts ...grpc.CallOption) (*longrunning.Operation, error) + // Imports entities into Google Cloud Datastore. Existing entities with the + // same key are overwritten. The import occurs in the background and its + // progress can be monitored and managed via the Operation resource that is + // created. If an ImportEntities operation is cancelled, it is possible + // that a subset of the data has already been imported to Cloud Datastore. + ImportEntities(ctx context.Context, in *ImportEntitiesRequest, opts ...grpc.CallOption) (*longrunning.Operation, error) + // Gets an index. + GetIndex(ctx context.Context, in *GetIndexRequest, opts ...grpc.CallOption) (*Index, error) + // Lists the indexes that match the specified filters. Datastore uses an + // eventually consistent query to fetch the list of indexes and may + // occasionally return stale results. + ListIndexes(ctx context.Context, in *ListIndexesRequest, opts ...grpc.CallOption) (*ListIndexesResponse, error) +} + +type datastoreAdminClient struct { + cc *grpc.ClientConn +} + +func NewDatastoreAdminClient(cc *grpc.ClientConn) DatastoreAdminClient { + return &datastoreAdminClient{cc} +} + +func (c *datastoreAdminClient) ExportEntities(ctx context.Context, in *ExportEntitiesRequest, opts ...grpc.CallOption) (*longrunning.Operation, error) { + out := new(longrunning.Operation) + err := c.cc.Invoke(ctx, "/google.datastore.admin.v1.DatastoreAdmin/ExportEntities", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *datastoreAdminClient) ImportEntities(ctx context.Context, in *ImportEntitiesRequest, opts ...grpc.CallOption) (*longrunning.Operation, error) { + out := new(longrunning.Operation) + err := c.cc.Invoke(ctx, "/google.datastore.admin.v1.DatastoreAdmin/ImportEntities", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *datastoreAdminClient) GetIndex(ctx context.Context, in *GetIndexRequest, opts ...grpc.CallOption) (*Index, error) { + out := new(Index) + err := c.cc.Invoke(ctx, "/google.datastore.admin.v1.DatastoreAdmin/GetIndex", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *datastoreAdminClient) ListIndexes(ctx context.Context, in *ListIndexesRequest, opts ...grpc.CallOption) (*ListIndexesResponse, error) { + out := new(ListIndexesResponse) + err := c.cc.Invoke(ctx, "/google.datastore.admin.v1.DatastoreAdmin/ListIndexes", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +// DatastoreAdminServer is the server API for DatastoreAdmin service. +type DatastoreAdminServer interface { + // Exports a copy of all or a subset of entities from Google Cloud Datastore + // to another storage system, such as Google Cloud Storage. Recent updates to + // entities may not be reflected in the export. The export occurs in the + // background and its progress can be monitored and managed via the + // Operation resource that is created. The output of an export may only be + // used once the associated operation is done. If an export operation is + // cancelled before completion it may leave partial data behind in Google + // Cloud Storage. + ExportEntities(context.Context, *ExportEntitiesRequest) (*longrunning.Operation, error) + // Imports entities into Google Cloud Datastore. Existing entities with the + // same key are overwritten. The import occurs in the background and its + // progress can be monitored and managed via the Operation resource that is + // created. If an ImportEntities operation is cancelled, it is possible + // that a subset of the data has already been imported to Cloud Datastore. + ImportEntities(context.Context, *ImportEntitiesRequest) (*longrunning.Operation, error) + // Gets an index. + GetIndex(context.Context, *GetIndexRequest) (*Index, error) + // Lists the indexes that match the specified filters. Datastore uses an + // eventually consistent query to fetch the list of indexes and may + // occasionally return stale results. + ListIndexes(context.Context, *ListIndexesRequest) (*ListIndexesResponse, error) +} + +func RegisterDatastoreAdminServer(s *grpc.Server, srv DatastoreAdminServer) { + s.RegisterService(&_DatastoreAdmin_serviceDesc, srv) +} + +func _DatastoreAdmin_ExportEntities_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(ExportEntitiesRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(DatastoreAdminServer).ExportEntities(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.datastore.admin.v1.DatastoreAdmin/ExportEntities", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(DatastoreAdminServer).ExportEntities(ctx, req.(*ExportEntitiesRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _DatastoreAdmin_ImportEntities_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(ImportEntitiesRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(DatastoreAdminServer).ImportEntities(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.datastore.admin.v1.DatastoreAdmin/ImportEntities", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(DatastoreAdminServer).ImportEntities(ctx, req.(*ImportEntitiesRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _DatastoreAdmin_GetIndex_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(GetIndexRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(DatastoreAdminServer).GetIndex(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.datastore.admin.v1.DatastoreAdmin/GetIndex", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(DatastoreAdminServer).GetIndex(ctx, req.(*GetIndexRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _DatastoreAdmin_ListIndexes_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(ListIndexesRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(DatastoreAdminServer).ListIndexes(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.datastore.admin.v1.DatastoreAdmin/ListIndexes", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(DatastoreAdminServer).ListIndexes(ctx, req.(*ListIndexesRequest)) + } + return interceptor(ctx, in, info, handler) +} + +var _DatastoreAdmin_serviceDesc = grpc.ServiceDesc{ + ServiceName: "google.datastore.admin.v1.DatastoreAdmin", + HandlerType: (*DatastoreAdminServer)(nil), + Methods: []grpc.MethodDesc{ + { + MethodName: "ExportEntities", + Handler: _DatastoreAdmin_ExportEntities_Handler, + }, + { + MethodName: "ImportEntities", + Handler: _DatastoreAdmin_ImportEntities_Handler, + }, + { + MethodName: "GetIndex", + Handler: _DatastoreAdmin_GetIndex_Handler, + }, + { + MethodName: "ListIndexes", + Handler: _DatastoreAdmin_ListIndexes_Handler, + }, + }, + Streams: []grpc.StreamDesc{}, + Metadata: "google/datastore/admin/v1/datastore_admin.proto", +} + +func init() { + proto.RegisterFile("google/datastore/admin/v1/datastore_admin.proto", fileDescriptor_datastore_admin_35408641d3652505) +} + +var fileDescriptor_datastore_admin_35408641d3652505 = []byte{ + // 1216 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xec, 0x57, 0xcd, 0x92, 0xdb, 0xc4, + 0x13, 0xff, 0x4b, 0xfe, 0x88, 0xdd, 0x5e, 0x7b, 0x9d, 0xc9, 0x3f, 0x5b, 0x8e, 0x21, 0xc4, 0x68, + 0x2b, 0xb0, 0x59, 0x40, 0x22, 0x86, 0x14, 0x64, 0xa1, 0xa8, 0x72, 0x6c, 0x6d, 0x4a, 0xe0, 0xd8, + 0x2e, 0x59, 0x4b, 0x85, 0x5c, 0x54, 0xda, 0xd5, 0xc4, 0x25, 0x56, 0xd6, 0x08, 0x69, 0x1c, 0xd6, + 0x49, 0xe5, 0xc2, 0x85, 0x03, 0x55, 0x5c, 0x38, 0xc0, 0x33, 0x70, 0xcc, 0x1b, 0x70, 0xe3, 0xce, + 0x99, 0x5b, 0xde, 0x80, 0x17, 0xa0, 0x66, 0xf4, 0xb1, 0xf6, 0xc6, 0xeb, 0x35, 0x6c, 0xa8, 0xe2, + 0xc0, 0xcd, 0xdd, 0xd3, 0xbf, 0xee, 0x9e, 0xdf, 0x74, 0xb7, 0x5b, 0xa0, 0x8c, 0x08, 0x19, 0xb9, + 0x58, 0xb1, 0x2d, 0x6a, 0x85, 0x94, 0x04, 0x58, 0xb1, 0xec, 0xb1, 0xe3, 0x29, 0x8f, 0x6e, 0x1e, + 0xab, 0x4c, 0xae, 0x92, 0xfd, 0x80, 0x50, 0x82, 0xae, 0x44, 0x00, 0x39, 0x3d, 0x95, 0xa3, 0xd3, + 0x47, 0x37, 0xeb, 0xaf, 0xc6, 0xbe, 0x2c, 0xdf, 0x51, 0x2c, 0xcf, 0x23, 0xd4, 0xa2, 0x0e, 0xf1, + 0xc2, 0x08, 0x58, 0xbf, 0x7e, 0x7a, 0x24, 0xc7, 0xb3, 0xf1, 0x51, 0x6c, 0xb6, 0x19, 0x9b, 0xb9, + 0xc4, 0x1b, 0x05, 0x13, 0xcf, 0x73, 0xbc, 0x91, 0x42, 0x7c, 0x1c, 0xcc, 0xf9, 0xba, 0x16, 0x1b, + 0x71, 0x69, 0x7f, 0xf2, 0x50, 0xa1, 0xce, 0x18, 0x87, 0xd4, 0x1a, 0xfb, 0x91, 0x81, 0xf4, 0x2c, + 0x0b, 0x95, 0x36, 0x19, 0x8f, 0x89, 0x77, 0x0f, 0x53, 0x8b, 0x85, 0x44, 0xb7, 0x01, 0x42, 0x6a, + 0x05, 0xd4, 0x64, 0xb6, 0x35, 0xa1, 0x21, 0x6c, 0x95, 0x9a, 0x75, 0x39, 0xbe, 0x4d, 0xe2, 0x48, + 0x36, 0x12, 0x47, 0x7a, 0x91, 0x5b, 0x33, 0x19, 0xdd, 0x82, 0x02, 0xf6, 0xec, 0x08, 0x28, 0x9e, + 0x09, 0xbc, 0x80, 0x3d, 0x9b, 0xc3, 0xfa, 0x50, 0x49, 0x33, 0x37, 0xe9, 0xd4, 0xc7, 0xb5, 0x4c, + 0x43, 0xd8, 0xaa, 0x34, 0xb7, 0xe4, 0x53, 0x39, 0x94, 0xfb, 0x09, 0xc0, 0x98, 0xfa, 0x58, 0x2f, + 0x93, 0x59, 0x11, 0xdd, 0x83, 0xbc, 0x6b, 0xed, 0x63, 0x37, 0xac, 0x65, 0x1b, 0x99, 0xad, 0x52, + 0xf3, 0xd6, 0x12, 0x47, 0xf3, 0xb7, 0x97, 0xbb, 0x1c, 0xa7, 0x7a, 0x34, 0x98, 0xea, 0xb1, 0x13, + 0xa4, 0x42, 0x2e, 0xa4, 0x16, 0xc5, 0xb5, 0x1c, 0x4f, 0x4b, 0x59, 0xdd, 0xdb, 0x90, 0xc1, 0xf4, + 0x08, 0x5d, 0xbf, 0x0d, 0xa5, 0x19, 0xef, 0xa8, 0x0a, 0x99, 0x43, 0x3c, 0xe5, 0x04, 0x17, 0x75, + 0xf6, 0x13, 0xfd, 0x1f, 0x72, 0x8f, 0x2c, 0x77, 0x12, 0x71, 0x57, 0xd4, 0x23, 0x61, 0x47, 0xfc, + 0x50, 0x90, 0xbe, 0x13, 0x20, 0xc7, 0x7d, 0xa1, 0xcb, 0x70, 0x71, 0x68, 0xb4, 0x0c, 0xd5, 0xdc, + 0xeb, 0x0d, 0x07, 0x6a, 0x5b, 0xdb, 0xd5, 0xd4, 0x4e, 0xf5, 0x7f, 0xa8, 0x0a, 0x6b, 0x5a, 0x4f, + 0x33, 0xb4, 0x56, 0x57, 0x7b, 0xa0, 0xf5, 0xee, 0x56, 0x05, 0x54, 0x01, 0x18, 0xe8, 0xfd, 0xb6, + 0x3a, 0x1c, 0x32, 0x59, 0x64, 0x72, 0xbb, 0xd5, 0x6b, 0xab, 0xdd, 0x2e, 0x93, 0x33, 0x4c, 0xde, + 0xd5, 0x7a, 0x89, 0x7d, 0x96, 0xc9, 0xc3, 0xbd, 0x36, 0xb3, 0xdf, 0xdd, 0xeb, 0x56, 0x73, 0x08, + 0x20, 0xbf, 0xdb, 0xd2, 0xba, 0x6a, 0xa7, 0x9a, 0x47, 0x65, 0x28, 0xc6, 0x58, 0xb5, 0x53, 0xbd, + 0x20, 0xdd, 0x87, 0xc2, 0x20, 0x20, 0xa3, 0x00, 0x87, 0x21, 0xba, 0x0e, 0x95, 0xaf, 0x49, 0x70, + 0x68, 0x1e, 0x90, 0xb1, 0xef, 0x62, 0x8a, 0x6d, 0x7e, 0xa1, 0x8c, 0x5e, 0x66, 0xda, 0x76, 0xa2, + 0x4c, 0xcd, 0x70, 0x48, 0x9d, 0xb1, 0xc5, 0xcc, 0xc4, 0x63, 0x33, 0x35, 0x51, 0x4a, 0xbf, 0x88, + 0x70, 0x59, 0x3d, 0xf2, 0x49, 0x40, 0x55, 0x8f, 0x3a, 0xd4, 0xc1, 0xa1, 0x8e, 0xbf, 0x9a, 0xe0, + 0x90, 0xa2, 0xab, 0x00, 0x7e, 0x40, 0xbe, 0xc4, 0x07, 0xd4, 0x74, 0xec, 0x98, 0xb4, 0x62, 0xac, + 0xd1, 0x6c, 0x64, 0xa4, 0x2f, 0x2e, 0xf2, 0x17, 0xff, 0x78, 0xc9, 0x1b, 0x2d, 0x0c, 0xb0, 0xf0, + 0xe1, 0xbb, 0x50, 0xc6, 0xcc, 0x6c, 0x6a, 0x3e, 0x74, 0x5c, 0x8a, 0x03, 0x5e, 0x97, 0xa5, 0xe6, + 0x9b, 0xcb, 0x9c, 0x73, 0xfb, 0x5d, 0x6e, 0xae, 0xaf, 0xe1, 0x19, 0x09, 0x6d, 0xc3, 0x45, 0x32, + 0xa1, 0xfe, 0x84, 0x9a, 0x93, 0xc0, 0x35, 0xfd, 0x00, 0x3f, 0x74, 0x8e, 0x6a, 0x59, 0x7e, 0x93, + 0xf5, 0xe8, 0x60, 0x2f, 0x70, 0x07, 0x5c, 0x7d, 0x9e, 0x5a, 0x79, 0x26, 0xc2, 0x65, 0x6d, 0xfc, + 0x0f, 0x73, 0xb8, 0x30, 0xc0, 0x42, 0x0e, 0x5f, 0x81, 0xa2, 0xe3, 0xc5, 0x97, 0xe6, 0xfc, 0x15, + 0xf5, 0x02, 0x57, 0xec, 0x05, 0xee, 0x8b, 0x04, 0x67, 0xcf, 0x41, 0xf0, 0x79, 0x48, 0xfb, 0x00, + 0x36, 0x4e, 0x96, 0x45, 0xe8, 0x13, 0x2f, 0xc4, 0x8c, 0xb4, 0xe3, 0x57, 0x4b, 0x48, 0x4b, 0x9f, + 0x4b, 0xfa, 0x43, 0x3c, 0x89, 0x4c, 0x07, 0x69, 0x0b, 0xf2, 0x07, 0x7c, 0x1c, 0xc4, 0x43, 0xf4, + 0xc6, 0xca, 0x73, 0x43, 0x8f, 0x81, 0x68, 0x00, 0x17, 0xfd, 0xb8, 0xd3, 0x4c, 0x1c, 0xfb, 0x8f, + 0x27, 0xeb, 0xe6, 0x12, 0x6f, 0x49, 0x77, 0xea, 0xd5, 0x04, 0x9d, 0x24, 0x87, 0x3e, 0x85, 0x4a, + 0xea, 0x71, 0x7f, 0x4a, 0x71, 0x18, 0xd7, 0xf4, 0x4a, 0xee, 0xca, 0x09, 0xf4, 0x0e, 0x43, 0xbe, + 0xdc, 0xd7, 0x5b, 0xdc, 0x1e, 0xb9, 0x85, 0xed, 0x21, 0x3d, 0x17, 0x61, 0x63, 0xbe, 0x04, 0xff, + 0x63, 0xfd, 0xef, 0xb1, 0x3e, 0xd7, 0x9e, 0xb9, 0xf9, 0xf6, 0x94, 0x34, 0x58, 0x9b, 0x85, 0xb2, + 0xfe, 0x39, 0x74, 0x3c, 0x3b, 0xac, 0x09, 0x8d, 0x0c, 0xeb, 0x1f, 0x2e, 0xa0, 0x4d, 0x28, 0x7b, + 0xd6, 0x18, 0x87, 0xbe, 0x75, 0x80, 0x4d, 0xc7, 0x8e, 0xc6, 0x47, 0x51, 0x5f, 0x4b, 0x95, 0x9a, + 0x1d, 0x4a, 0x9f, 0xc1, 0xfa, 0x5d, 0x4c, 0x35, 0xb6, 0xc0, 0xac, 0x38, 0x8e, 0xae, 0x40, 0x81, + 0xef, 0x3b, 0xec, 0x30, 0x9a, 0x1b, 0x17, 0xb8, 0xac, 0xd9, 0xd2, 0xb7, 0x02, 0xa0, 0xae, 0x13, + 0x46, 0xee, 0x56, 0x9e, 0x6f, 0x1b, 0x90, 0x9f, 0x19, 0xe3, 0x45, 0x3d, 0x96, 0x18, 0x05, 0xbe, + 0x35, 0xc2, 0x66, 0xe8, 0x3c, 0xc6, 0x9c, 0xcc, 0x9c, 0x5e, 0x60, 0x8a, 0xa1, 0xf3, 0x98, 0xb7, + 0x3f, 0x3f, 0xa4, 0xe4, 0x10, 0x7b, 0x31, 0x41, 0xdc, 0xdc, 0x60, 0x0a, 0x69, 0x0a, 0x97, 0xe6, + 0x12, 0x89, 0x87, 0xc6, 0x0e, 0x44, 0xb9, 0xe2, 0x88, 0xaa, 0x52, 0xb3, 0xb1, 0x6c, 0x96, 0x72, + 0x52, 0x12, 0x00, 0x7a, 0x03, 0xd6, 0x3d, 0x7c, 0x44, 0xcd, 0x99, 0xb0, 0xd1, 0xb8, 0x2a, 0x33, + 0xf5, 0x20, 0x0d, 0xfd, 0xab, 0x00, 0x1b, 0x1c, 0x9a, 0xae, 0x42, 0xff, 0xee, 0x1e, 0x38, 0xfd, + 0x3d, 0xb7, 0x9f, 0x42, 0x79, 0x6e, 0x9f, 0x43, 0xaf, 0x41, 0xbd, 0x3f, 0x50, 0xf5, 0x96, 0xa1, + 0xf5, 0x7b, 0xa6, 0xf1, 0xc5, 0xe0, 0xe4, 0xba, 0x73, 0x09, 0xd6, 0xd5, 0xfb, 0x83, 0xbe, 0x6e, + 0x98, 0x6a, 0xcf, 0xd0, 0x0c, 0x4d, 0x1d, 0x56, 0x05, 0xa6, 0xd4, 0xee, 0xcd, 0x2b, 0x45, 0xb6, + 0x18, 0xb5, 0x75, 0x95, 0x2d, 0x4c, 0x5a, 0xaf, 0xa3, 0xde, 0xaf, 0x66, 0x98, 0xa6, 0xa3, 0x76, + 0xd5, 0x54, 0x93, 0x6d, 0xfe, 0x9e, 0x85, 0x4a, 0x27, 0xb9, 0x4b, 0x8b, 0x5d, 0x05, 0x7d, 0x2f, + 0x40, 0x65, 0x7e, 0xac, 0xa3, 0x77, 0xff, 0xea, 0x4a, 0x51, 0xbf, 0x9a, 0x20, 0x66, 0x76, 0xf4, + 0xe3, 0xc5, 0x55, 0x7a, 0xeb, 0x9b, 0xdf, 0x9e, 0xff, 0x20, 0x5e, 0x97, 0x1a, 0x6c, 0xb5, 0x8f, + 0xcb, 0x34, 0x54, 0x9e, 0x1c, 0x97, 0xf0, 0xd3, 0x1d, 0xcc, 0xfd, 0xee, 0x08, 0xdb, 0x3c, 0xa1, + 0xf9, 0x89, 0xb7, 0x34, 0xa1, 0x85, 0xff, 0xcf, 0x2f, 0x21, 0x21, 0x67, 0x3c, 0x93, 0x50, 0x21, + 0xe9, 0x68, 0xb4, 0xbd, 0x24, 0x95, 0x13, 0x6d, 0x5f, 0x3f, 0xb3, 0x15, 0xa4, 0xf7, 0x79, 0x1e, + 0x32, 0x7a, 0xfb, 0xd4, 0x3c, 0x94, 0xb8, 0x59, 0x94, 0x27, 0x49, 0x49, 0x3d, 0x45, 0x3f, 0x0a, + 0x50, 0x9a, 0xe9, 0x45, 0xf4, 0xce, 0x92, 0x38, 0x2f, 0x0e, 0x8f, 0xba, 0xbc, 0xaa, 0x79, 0xd4, + 0xe2, 0xd2, 0x0d, 0x9e, 0xe4, 0x26, 0x7a, 0xfd, 0xcc, 0x24, 0xef, 0xfc, 0x24, 0xc0, 0xd5, 0x03, + 0x32, 0x3e, 0x3d, 0xc0, 0x9d, 0x4b, 0xf3, 0xe5, 0x37, 0x60, 0x1f, 0x4b, 0x03, 0xe1, 0xc1, 0x27, + 0x31, 0x62, 0x44, 0x5c, 0xcb, 0x1b, 0xc9, 0x24, 0x18, 0x29, 0x23, 0xec, 0xf1, 0x4f, 0xa9, 0xf8, + 0x7b, 0xd4, 0xf2, 0x9d, 0x70, 0xc1, 0x97, 0xe2, 0x47, 0xfc, 0xc7, 0xcf, 0xe2, 0xb5, 0xbb, 0x91, + 0x83, 0xb6, 0x4b, 0x26, 0xb6, 0x9c, 0x06, 0x91, 0x79, 0x14, 0xf9, 0xf3, 0x9b, 0xfb, 0x79, 0xee, + 0xec, 0xbd, 0x3f, 0x03, 0x00, 0x00, 0xff, 0xff, 0x9b, 0xe0, 0xf0, 0x1a, 0xdf, 0x0e, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/datastore/admin/v1/index.pb.go b/vendor/google.golang.org/genproto/googleapis/datastore/admin/v1/index.pb.go new file mode 100644 index 0000000..25cbfa2 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/datastore/admin/v1/index.pb.go @@ -0,0 +1,324 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/datastore/admin/v1/index.proto + +package admin // import "google.golang.org/genproto/googleapis/datastore/admin/v1" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// For an ordered index, specifies whether each of the entity's ancestors +// will be included. +type Index_AncestorMode int32 + +const ( + // The ancestor mode is unspecified. + Index_ANCESTOR_MODE_UNSPECIFIED Index_AncestorMode = 0 + // Do not include the entity's ancestors in the index. + Index_NONE Index_AncestorMode = 1 + // Include all the entity's ancestors in the index. + Index_ALL_ANCESTORS Index_AncestorMode = 2 +) + +var Index_AncestorMode_name = map[int32]string{ + 0: "ANCESTOR_MODE_UNSPECIFIED", + 1: "NONE", + 2: "ALL_ANCESTORS", +} +var Index_AncestorMode_value = map[string]int32{ + "ANCESTOR_MODE_UNSPECIFIED": 0, + "NONE": 1, + "ALL_ANCESTORS": 2, +} + +func (x Index_AncestorMode) String() string { + return proto.EnumName(Index_AncestorMode_name, int32(x)) +} +func (Index_AncestorMode) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_index_06304ce39414dba4, []int{0, 0} +} + +// The direction determines how a property is indexed. +type Index_Direction int32 + +const ( + // The direction is unspecified. + Index_DIRECTION_UNSPECIFIED Index_Direction = 0 + // The property's values are indexed so as to support sequencing in + // ascending order and also query by <, >, <=, >=, and =. + Index_ASCENDING Index_Direction = 1 + // The property's values are indexed so as to support sequencing in + // descending order and also query by <, >, <=, >=, and =. + Index_DESCENDING Index_Direction = 2 +) + +var Index_Direction_name = map[int32]string{ + 0: "DIRECTION_UNSPECIFIED", + 1: "ASCENDING", + 2: "DESCENDING", +} +var Index_Direction_value = map[string]int32{ + "DIRECTION_UNSPECIFIED": 0, + "ASCENDING": 1, + "DESCENDING": 2, +} + +func (x Index_Direction) String() string { + return proto.EnumName(Index_Direction_name, int32(x)) +} +func (Index_Direction) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_index_06304ce39414dba4, []int{0, 1} +} + +// The possible set of states of an index. +type Index_State int32 + +const ( + // The state is unspecified. + Index_STATE_UNSPECIFIED Index_State = 0 + // The index is being created, and cannot be used by queries. + // There is an active long-running operation for the index. + // The index is updated when writing an entity. + // Some index data may exist. + Index_CREATING Index_State = 1 + // The index is ready to be used. + // The index is updated when writing an entity. + // The index is fully populated from all stored entities it applies to. + Index_READY Index_State = 2 + // The index is being deleted, and cannot be used by queries. + // There is an active long-running operation for the index. + // The index is not updated when writing an entity. + // Some index data may exist. + Index_DELETING Index_State = 3 + // The index was being created or deleted, but something went wrong. + // The index cannot by used by queries. + // There is no active long-running operation for the index, + // and the most recently finished long-running operation failed. + // The index is not updated when writing an entity. + // Some index data may exist. + Index_ERROR Index_State = 4 +) + +var Index_State_name = map[int32]string{ + 0: "STATE_UNSPECIFIED", + 1: "CREATING", + 2: "READY", + 3: "DELETING", + 4: "ERROR", +} +var Index_State_value = map[string]int32{ + "STATE_UNSPECIFIED": 0, + "CREATING": 1, + "READY": 2, + "DELETING": 3, + "ERROR": 4, +} + +func (x Index_State) String() string { + return proto.EnumName(Index_State_name, int32(x)) +} +func (Index_State) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_index_06304ce39414dba4, []int{0, 2} +} + +// A minimal index definition. +// Next tag: 8 +type Index struct { + // Project ID. + // Output only. + ProjectId string `protobuf:"bytes,1,opt,name=project_id,json=projectId,proto3" json:"project_id,omitempty"` + // The resource ID of the index. + // Output only. + IndexId string `protobuf:"bytes,3,opt,name=index_id,json=indexId,proto3" json:"index_id,omitempty"` + // The entity kind to which this index applies. + // Required. + Kind string `protobuf:"bytes,4,opt,name=kind,proto3" json:"kind,omitempty"` + // The index's ancestor mode. Must not be ANCESTOR_MODE_UNSPECIFIED. + // Required. + Ancestor Index_AncestorMode `protobuf:"varint,5,opt,name=ancestor,proto3,enum=google.datastore.admin.v1.Index_AncestorMode" json:"ancestor,omitempty"` + // An ordered sequence of property names and their index attributes. + // Required. + Properties []*Index_IndexedProperty `protobuf:"bytes,6,rep,name=properties,proto3" json:"properties,omitempty"` + // The state of the index. + // Output only. + State Index_State `protobuf:"varint,7,opt,name=state,proto3,enum=google.datastore.admin.v1.Index_State" json:"state,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Index) Reset() { *m = Index{} } +func (m *Index) String() string { return proto.CompactTextString(m) } +func (*Index) ProtoMessage() {} +func (*Index) Descriptor() ([]byte, []int) { + return fileDescriptor_index_06304ce39414dba4, []int{0} +} +func (m *Index) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Index.Unmarshal(m, b) +} +func (m *Index) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Index.Marshal(b, m, deterministic) +} +func (dst *Index) XXX_Merge(src proto.Message) { + xxx_messageInfo_Index.Merge(dst, src) +} +func (m *Index) XXX_Size() int { + return xxx_messageInfo_Index.Size(m) +} +func (m *Index) XXX_DiscardUnknown() { + xxx_messageInfo_Index.DiscardUnknown(m) +} + +var xxx_messageInfo_Index proto.InternalMessageInfo + +func (m *Index) GetProjectId() string { + if m != nil { + return m.ProjectId + } + return "" +} + +func (m *Index) GetIndexId() string { + if m != nil { + return m.IndexId + } + return "" +} + +func (m *Index) GetKind() string { + if m != nil { + return m.Kind + } + return "" +} + +func (m *Index) GetAncestor() Index_AncestorMode { + if m != nil { + return m.Ancestor + } + return Index_ANCESTOR_MODE_UNSPECIFIED +} + +func (m *Index) GetProperties() []*Index_IndexedProperty { + if m != nil { + return m.Properties + } + return nil +} + +func (m *Index) GetState() Index_State { + if m != nil { + return m.State + } + return Index_STATE_UNSPECIFIED +} + +// Next tag: 3 +type Index_IndexedProperty struct { + // The property name to index. + // Required. + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // The indexed property's direction. Must not be DIRECTION_UNSPECIFIED. + // Required. + Direction Index_Direction `protobuf:"varint,2,opt,name=direction,proto3,enum=google.datastore.admin.v1.Index_Direction" json:"direction,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Index_IndexedProperty) Reset() { *m = Index_IndexedProperty{} } +func (m *Index_IndexedProperty) String() string { return proto.CompactTextString(m) } +func (*Index_IndexedProperty) ProtoMessage() {} +func (*Index_IndexedProperty) Descriptor() ([]byte, []int) { + return fileDescriptor_index_06304ce39414dba4, []int{0, 0} +} +func (m *Index_IndexedProperty) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Index_IndexedProperty.Unmarshal(m, b) +} +func (m *Index_IndexedProperty) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Index_IndexedProperty.Marshal(b, m, deterministic) +} +func (dst *Index_IndexedProperty) XXX_Merge(src proto.Message) { + xxx_messageInfo_Index_IndexedProperty.Merge(dst, src) +} +func (m *Index_IndexedProperty) XXX_Size() int { + return xxx_messageInfo_Index_IndexedProperty.Size(m) +} +func (m *Index_IndexedProperty) XXX_DiscardUnknown() { + xxx_messageInfo_Index_IndexedProperty.DiscardUnknown(m) +} + +var xxx_messageInfo_Index_IndexedProperty proto.InternalMessageInfo + +func (m *Index_IndexedProperty) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *Index_IndexedProperty) GetDirection() Index_Direction { + if m != nil { + return m.Direction + } + return Index_DIRECTION_UNSPECIFIED +} + +func init() { + proto.RegisterType((*Index)(nil), "google.datastore.admin.v1.Index") + proto.RegisterType((*Index_IndexedProperty)(nil), "google.datastore.admin.v1.Index.IndexedProperty") + proto.RegisterEnum("google.datastore.admin.v1.Index_AncestorMode", Index_AncestorMode_name, Index_AncestorMode_value) + proto.RegisterEnum("google.datastore.admin.v1.Index_Direction", Index_Direction_name, Index_Direction_value) + proto.RegisterEnum("google.datastore.admin.v1.Index_State", Index_State_name, Index_State_value) +} + +func init() { + proto.RegisterFile("google/datastore/admin/v1/index.proto", fileDescriptor_index_06304ce39414dba4) +} + +var fileDescriptor_index_06304ce39414dba4 = []byte{ + // 492 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x84, 0x53, 0x6f, 0x6b, 0xd3, 0x4e, + 0x1c, 0xff, 0xa5, 0x6d, 0xb6, 0xe6, 0xfb, 0xdb, 0x66, 0x76, 0x30, 0x48, 0x87, 0xc5, 0x52, 0x50, + 0x8a, 0x60, 0x62, 0xe7, 0x43, 0x45, 0xc8, 0x92, 0x73, 0x46, 0xba, 0x34, 0x5c, 0xa2, 0xa0, 0x4f, + 0xca, 0xd9, 0x3b, 0x42, 0xb4, 0xbd, 0x0b, 0x49, 0x1c, 0xfa, 0x06, 0x7c, 0xea, 0xfb, 0xf0, 0x55, + 0x4a, 0x2e, 0x59, 0x1c, 0xc5, 0xd1, 0x27, 0xe1, 0x7b, 0xf7, 0xfd, 0xfc, 0xe3, 0xc3, 0x05, 0x1e, + 0xa7, 0x52, 0xa6, 0x1b, 0xee, 0x30, 0x5a, 0xd1, 0xb2, 0x92, 0x05, 0x77, 0x28, 0xdb, 0x66, 0xc2, + 0xb9, 0x99, 0x3b, 0x99, 0x60, 0xfc, 0xbb, 0x9d, 0x17, 0xb2, 0x92, 0x68, 0xd4, 0xc0, 0xec, 0x0e, + 0x66, 0x2b, 0x98, 0x7d, 0x33, 0x3f, 0x7f, 0xd8, 0x2a, 0xd0, 0x3c, 0x73, 0xa8, 0x10, 0xb2, 0xa2, + 0x55, 0x26, 0x45, 0xd9, 0x10, 0xa7, 0x3f, 0x75, 0xd0, 0x83, 0x5a, 0x08, 0x8d, 0x01, 0xf2, 0x42, + 0x7e, 0xe1, 0xeb, 0x6a, 0x95, 0x31, 0x4b, 0x9b, 0x68, 0x33, 0x83, 0x18, 0xed, 0x4d, 0xc0, 0xd0, + 0x08, 0x86, 0xca, 0xb0, 0x5e, 0xf6, 0xd5, 0xf2, 0x50, 0x9d, 0x03, 0x86, 0x10, 0x0c, 0xbe, 0x66, + 0x82, 0x59, 0x03, 0x75, 0xad, 0x66, 0x14, 0xc0, 0x90, 0x8a, 0x35, 0xaf, 0xb3, 0x58, 0xfa, 0x44, + 0x9b, 0x9d, 0x5c, 0x3c, 0xb3, 0xef, 0xcd, 0x68, 0xab, 0x04, 0xb6, 0xdb, 0x12, 0xae, 0x25, 0xe3, + 0xa4, 0xa3, 0xa3, 0x48, 0x05, 0xcb, 0x79, 0x51, 0x65, 0xbc, 0xb4, 0x0e, 0x26, 0xfd, 0xd9, 0xff, + 0x17, 0xcf, 0xf7, 0x8a, 0xa9, 0x2f, 0x67, 0x51, 0xc3, 0xfc, 0x41, 0xee, 0x68, 0xa0, 0x57, 0xa0, + 0x97, 0x15, 0xad, 0xb8, 0x75, 0xa8, 0x92, 0x3d, 0xd9, 0x2b, 0x16, 0xd7, 0x68, 0xd2, 0x90, 0xce, + 0x25, 0x3c, 0xd8, 0x11, 0xaf, 0x1b, 0x10, 0x74, 0xcb, 0xdb, 0xd6, 0xd4, 0x8c, 0xde, 0x82, 0xc1, + 0xb2, 0x82, 0xaf, 0xeb, 0xb6, 0xad, 0x9e, 0x32, 0x7a, 0xba, 0xd7, 0xc8, 0xbf, 0x65, 0x90, 0xbf, + 0xe4, 0xe9, 0x3b, 0x38, 0xba, 0x5b, 0x0d, 0x1a, 0xc3, 0xc8, 0x0d, 0x3d, 0x1c, 0x27, 0x4b, 0xb2, + 0xba, 0x5e, 0xfa, 0x78, 0xf5, 0x3e, 0x8c, 0x23, 0xec, 0x05, 0x6f, 0x02, 0xec, 0x9b, 0xff, 0xa1, + 0x21, 0x0c, 0xc2, 0x65, 0x88, 0x4d, 0x0d, 0x9d, 0xc2, 0xb1, 0xbb, 0x58, 0xac, 0x6e, 0xc1, 0xb1, + 0xd9, 0x9b, 0x62, 0x30, 0x3a, 0x0f, 0x34, 0x82, 0x33, 0x3f, 0x20, 0xd8, 0x4b, 0x82, 0x65, 0xb8, + 0x23, 0x72, 0x0c, 0x86, 0x1b, 0x7b, 0x38, 0xf4, 0x83, 0xf0, 0xca, 0xd4, 0xd0, 0x09, 0x80, 0x8f, + 0xbb, 0x73, 0x6f, 0x1a, 0x81, 0xae, 0x3a, 0x41, 0x67, 0x70, 0x1a, 0x27, 0x6e, 0xb2, 0x9b, 0xe1, + 0x08, 0x86, 0x1e, 0xc1, 0x6e, 0xd2, 0xb0, 0x0d, 0xd0, 0x09, 0x76, 0xfd, 0x8f, 0x66, 0xaf, 0x5e, + 0xf8, 0x78, 0x81, 0xd5, 0xa2, 0x5f, 0x2f, 0x30, 0x21, 0x4b, 0x62, 0x0e, 0x2e, 0x7f, 0x69, 0x30, + 0x5e, 0xcb, 0xed, 0xfd, 0x0d, 0x5d, 0x82, 0xaa, 0x28, 0xaa, 0x9f, 0x6d, 0xa4, 0x7d, 0x7a, 0xdd, + 0x02, 0x53, 0xb9, 0xa1, 0x22, 0xb5, 0x65, 0x91, 0x3a, 0x29, 0x17, 0xea, 0x51, 0x3b, 0xcd, 0x8a, + 0xe6, 0x59, 0xf9, 0x8f, 0xff, 0xe6, 0xa5, 0x1a, 0x7e, 0xf7, 0x1e, 0x5d, 0x35, 0x02, 0xde, 0x46, + 0x7e, 0x63, 0xb6, 0xdf, 0xf9, 0xb9, 0xca, 0xef, 0xc3, 0xfc, 0xf3, 0x81, 0x12, 0x7b, 0xf1, 0x27, + 0x00, 0x00, 0xff, 0xff, 0x3b, 0x8d, 0xf4, 0xff, 0x83, 0x03, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/datastore/admin/v1beta1/datastore_admin.pb.go b/vendor/google.golang.org/genproto/googleapis/datastore/admin/v1beta1/datastore_admin.pb.go new file mode 100644 index 0000000..850de01 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/datastore/admin/v1beta1/datastore_admin.pb.go @@ -0,0 +1,900 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/datastore/admin/v1beta1/datastore_admin.proto + +package admin // import "google.golang.org/genproto/googleapis/datastore/admin/v1beta1" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import timestamp "github.com/golang/protobuf/ptypes/timestamp" +import _ "google.golang.org/genproto/googleapis/api/annotations" +import longrunning "google.golang.org/genproto/googleapis/longrunning" + +import ( + context "golang.org/x/net/context" + grpc "google.golang.org/grpc" +) + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Operation types. +type OperationType int32 + +const ( + // Unspecified. + OperationType_OPERATION_TYPE_UNSPECIFIED OperationType = 0 + // ExportEntities. + OperationType_EXPORT_ENTITIES OperationType = 1 + // ImportEntities. + OperationType_IMPORT_ENTITIES OperationType = 2 +) + +var OperationType_name = map[int32]string{ + 0: "OPERATION_TYPE_UNSPECIFIED", + 1: "EXPORT_ENTITIES", + 2: "IMPORT_ENTITIES", +} +var OperationType_value = map[string]int32{ + "OPERATION_TYPE_UNSPECIFIED": 0, + "EXPORT_ENTITIES": 1, + "IMPORT_ENTITIES": 2, +} + +func (x OperationType) String() string { + return proto.EnumName(OperationType_name, int32(x)) +} +func (OperationType) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_datastore_admin_f5b05ee5645ebe6f, []int{0} +} + +// The various possible states for an ongoing Operation. +type CommonMetadata_State int32 + +const ( + // Unspecified. + CommonMetadata_STATE_UNSPECIFIED CommonMetadata_State = 0 + // Request is being prepared for processing. + CommonMetadata_INITIALIZING CommonMetadata_State = 1 + // Request is actively being processed. + CommonMetadata_PROCESSING CommonMetadata_State = 2 + // Request is in the process of being cancelled after user called + // google.longrunning.Operations.CancelOperation on the operation. + CommonMetadata_CANCELLING CommonMetadata_State = 3 + // Request has been processed and is in its finalization stage. + CommonMetadata_FINALIZING CommonMetadata_State = 4 + // Request has completed successfully. + CommonMetadata_SUCCESSFUL CommonMetadata_State = 5 + // Request has finished being processed, but encountered an error. + CommonMetadata_FAILED CommonMetadata_State = 6 + // Request has finished being cancelled after user called + // google.longrunning.Operations.CancelOperation. + CommonMetadata_CANCELLED CommonMetadata_State = 7 +) + +var CommonMetadata_State_name = map[int32]string{ + 0: "STATE_UNSPECIFIED", + 1: "INITIALIZING", + 2: "PROCESSING", + 3: "CANCELLING", + 4: "FINALIZING", + 5: "SUCCESSFUL", + 6: "FAILED", + 7: "CANCELLED", +} +var CommonMetadata_State_value = map[string]int32{ + "STATE_UNSPECIFIED": 0, + "INITIALIZING": 1, + "PROCESSING": 2, + "CANCELLING": 3, + "FINALIZING": 4, + "SUCCESSFUL": 5, + "FAILED": 6, + "CANCELLED": 7, +} + +func (x CommonMetadata_State) String() string { + return proto.EnumName(CommonMetadata_State_name, int32(x)) +} +func (CommonMetadata_State) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_datastore_admin_f5b05ee5645ebe6f, []int{0, 0} +} + +// Metadata common to all Datastore Admin operations. +type CommonMetadata struct { + // The time that work began on the operation. + StartTime *timestamp.Timestamp `protobuf:"bytes,1,opt,name=start_time,json=startTime,proto3" json:"start_time,omitempty"` + // The time the operation ended, either successfully or otherwise. + EndTime *timestamp.Timestamp `protobuf:"bytes,2,opt,name=end_time,json=endTime,proto3" json:"end_time,omitempty"` + // The type of the operation. Can be used as a filter in + // ListOperationsRequest. + OperationType OperationType `protobuf:"varint,3,opt,name=operation_type,json=operationType,proto3,enum=google.datastore.admin.v1beta1.OperationType" json:"operation_type,omitempty"` + // The client-assigned labels which were provided when the operation was + // created. May also include additional labels. + Labels map[string]string `protobuf:"bytes,4,rep,name=labels,proto3" json:"labels,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` + // The current state of the Operation. + State CommonMetadata_State `protobuf:"varint,5,opt,name=state,proto3,enum=google.datastore.admin.v1beta1.CommonMetadata_State" json:"state,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *CommonMetadata) Reset() { *m = CommonMetadata{} } +func (m *CommonMetadata) String() string { return proto.CompactTextString(m) } +func (*CommonMetadata) ProtoMessage() {} +func (*CommonMetadata) Descriptor() ([]byte, []int) { + return fileDescriptor_datastore_admin_f5b05ee5645ebe6f, []int{0} +} +func (m *CommonMetadata) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_CommonMetadata.Unmarshal(m, b) +} +func (m *CommonMetadata) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_CommonMetadata.Marshal(b, m, deterministic) +} +func (dst *CommonMetadata) XXX_Merge(src proto.Message) { + xxx_messageInfo_CommonMetadata.Merge(dst, src) +} +func (m *CommonMetadata) XXX_Size() int { + return xxx_messageInfo_CommonMetadata.Size(m) +} +func (m *CommonMetadata) XXX_DiscardUnknown() { + xxx_messageInfo_CommonMetadata.DiscardUnknown(m) +} + +var xxx_messageInfo_CommonMetadata proto.InternalMessageInfo + +func (m *CommonMetadata) GetStartTime() *timestamp.Timestamp { + if m != nil { + return m.StartTime + } + return nil +} + +func (m *CommonMetadata) GetEndTime() *timestamp.Timestamp { + if m != nil { + return m.EndTime + } + return nil +} + +func (m *CommonMetadata) GetOperationType() OperationType { + if m != nil { + return m.OperationType + } + return OperationType_OPERATION_TYPE_UNSPECIFIED +} + +func (m *CommonMetadata) GetLabels() map[string]string { + if m != nil { + return m.Labels + } + return nil +} + +func (m *CommonMetadata) GetState() CommonMetadata_State { + if m != nil { + return m.State + } + return CommonMetadata_STATE_UNSPECIFIED +} + +// Measures the progress of a particular metric. +type Progress struct { + // The amount of work that has been completed. Note that this may be greater + // than work_estimated. + WorkCompleted int64 `protobuf:"varint,1,opt,name=work_completed,json=workCompleted,proto3" json:"work_completed,omitempty"` + // An estimate of how much work needs to be performed. May be zero if the + // work estimate is unavailable. + WorkEstimated int64 `protobuf:"varint,2,opt,name=work_estimated,json=workEstimated,proto3" json:"work_estimated,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Progress) Reset() { *m = Progress{} } +func (m *Progress) String() string { return proto.CompactTextString(m) } +func (*Progress) ProtoMessage() {} +func (*Progress) Descriptor() ([]byte, []int) { + return fileDescriptor_datastore_admin_f5b05ee5645ebe6f, []int{1} +} +func (m *Progress) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Progress.Unmarshal(m, b) +} +func (m *Progress) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Progress.Marshal(b, m, deterministic) +} +func (dst *Progress) XXX_Merge(src proto.Message) { + xxx_messageInfo_Progress.Merge(dst, src) +} +func (m *Progress) XXX_Size() int { + return xxx_messageInfo_Progress.Size(m) +} +func (m *Progress) XXX_DiscardUnknown() { + xxx_messageInfo_Progress.DiscardUnknown(m) +} + +var xxx_messageInfo_Progress proto.InternalMessageInfo + +func (m *Progress) GetWorkCompleted() int64 { + if m != nil { + return m.WorkCompleted + } + return 0 +} + +func (m *Progress) GetWorkEstimated() int64 { + if m != nil { + return m.WorkEstimated + } + return 0 +} + +// The request for +// [google.datastore.admin.v1beta1.DatastoreAdmin.ExportEntities][google.datastore.admin.v1beta1.DatastoreAdmin.ExportEntities]. +type ExportEntitiesRequest struct { + // Project ID against which to make the request. + ProjectId string `protobuf:"bytes,1,opt,name=project_id,json=projectId,proto3" json:"project_id,omitempty"` + // Client-assigned labels. + Labels map[string]string `protobuf:"bytes,2,rep,name=labels,proto3" json:"labels,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` + // Description of what data from the project is included in the export. + EntityFilter *EntityFilter `protobuf:"bytes,3,opt,name=entity_filter,json=entityFilter,proto3" json:"entity_filter,omitempty"` + // Location for the export metadata and data files. + // + // The full resource URL of the external storage location. Currently, only + // Google Cloud Storage is supported. So output_url_prefix should be of the + // form: `gs://BUCKET_NAME[/NAMESPACE_PATH]`, where `BUCKET_NAME` is the + // name of the Cloud Storage bucket and `NAMESPACE_PATH` is an optional Cloud + // Storage namespace path (this is not a Cloud Datastore namespace). For more + // information about Cloud Storage namespace paths, see + // [Object name + // considerations](https://cloud.google.com/storage/docs/naming#object-considerations). + // + // The resulting files will be nested deeper than the specified URL prefix. + // The final output URL will be provided in the + // [google.datastore.admin.v1beta1.ExportEntitiesResponse.output_url][google.datastore.admin.v1beta1.ExportEntitiesResponse.output_url] + // field. That value should be used for subsequent ImportEntities operations. + // + // By nesting the data files deeper, the same Cloud Storage bucket can be used + // in multiple ExportEntities operations without conflict. + OutputUrlPrefix string `protobuf:"bytes,4,opt,name=output_url_prefix,json=outputUrlPrefix,proto3" json:"output_url_prefix,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ExportEntitiesRequest) Reset() { *m = ExportEntitiesRequest{} } +func (m *ExportEntitiesRequest) String() string { return proto.CompactTextString(m) } +func (*ExportEntitiesRequest) ProtoMessage() {} +func (*ExportEntitiesRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_datastore_admin_f5b05ee5645ebe6f, []int{2} +} +func (m *ExportEntitiesRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ExportEntitiesRequest.Unmarshal(m, b) +} +func (m *ExportEntitiesRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ExportEntitiesRequest.Marshal(b, m, deterministic) +} +func (dst *ExportEntitiesRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_ExportEntitiesRequest.Merge(dst, src) +} +func (m *ExportEntitiesRequest) XXX_Size() int { + return xxx_messageInfo_ExportEntitiesRequest.Size(m) +} +func (m *ExportEntitiesRequest) XXX_DiscardUnknown() { + xxx_messageInfo_ExportEntitiesRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_ExportEntitiesRequest proto.InternalMessageInfo + +func (m *ExportEntitiesRequest) GetProjectId() string { + if m != nil { + return m.ProjectId + } + return "" +} + +func (m *ExportEntitiesRequest) GetLabels() map[string]string { + if m != nil { + return m.Labels + } + return nil +} + +func (m *ExportEntitiesRequest) GetEntityFilter() *EntityFilter { + if m != nil { + return m.EntityFilter + } + return nil +} + +func (m *ExportEntitiesRequest) GetOutputUrlPrefix() string { + if m != nil { + return m.OutputUrlPrefix + } + return "" +} + +// The request for +// [google.datastore.admin.v1beta1.DatastoreAdmin.ImportEntities][google.datastore.admin.v1beta1.DatastoreAdmin.ImportEntities]. +type ImportEntitiesRequest struct { + // Project ID against which to make the request. + ProjectId string `protobuf:"bytes,1,opt,name=project_id,json=projectId,proto3" json:"project_id,omitempty"` + // Client-assigned labels. + Labels map[string]string `protobuf:"bytes,2,rep,name=labels,proto3" json:"labels,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` + // The full resource URL of the external storage location. Currently, only + // Google Cloud Storage is supported. So input_url should be of the form: + // `gs://BUCKET_NAME[/NAMESPACE_PATH]/OVERALL_EXPORT_METADATA_FILE`, where + // `BUCKET_NAME` is the name of the Cloud Storage bucket, `NAMESPACE_PATH` is + // an optional Cloud Storage namespace path (this is not a Cloud Datastore + // namespace), and `OVERALL_EXPORT_METADATA_FILE` is the metadata file written + // by the ExportEntities operation. For more information about Cloud Storage + // namespace paths, see + // [Object name + // considerations](https://cloud.google.com/storage/docs/naming#object-considerations). + // + // For more information, see + // [google.datastore.admin.v1beta1.ExportEntitiesResponse.output_url][google.datastore.admin.v1beta1.ExportEntitiesResponse.output_url]. + InputUrl string `protobuf:"bytes,3,opt,name=input_url,json=inputUrl,proto3" json:"input_url,omitempty"` + // Optionally specify which kinds/namespaces are to be imported. If provided, + // the list must be a subset of the EntityFilter used in creating the export, + // otherwise a FAILED_PRECONDITION error will be returned. If no filter is + // specified then all entities from the export are imported. + EntityFilter *EntityFilter `protobuf:"bytes,4,opt,name=entity_filter,json=entityFilter,proto3" json:"entity_filter,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ImportEntitiesRequest) Reset() { *m = ImportEntitiesRequest{} } +func (m *ImportEntitiesRequest) String() string { return proto.CompactTextString(m) } +func (*ImportEntitiesRequest) ProtoMessage() {} +func (*ImportEntitiesRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_datastore_admin_f5b05ee5645ebe6f, []int{3} +} +func (m *ImportEntitiesRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ImportEntitiesRequest.Unmarshal(m, b) +} +func (m *ImportEntitiesRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ImportEntitiesRequest.Marshal(b, m, deterministic) +} +func (dst *ImportEntitiesRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_ImportEntitiesRequest.Merge(dst, src) +} +func (m *ImportEntitiesRequest) XXX_Size() int { + return xxx_messageInfo_ImportEntitiesRequest.Size(m) +} +func (m *ImportEntitiesRequest) XXX_DiscardUnknown() { + xxx_messageInfo_ImportEntitiesRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_ImportEntitiesRequest proto.InternalMessageInfo + +func (m *ImportEntitiesRequest) GetProjectId() string { + if m != nil { + return m.ProjectId + } + return "" +} + +func (m *ImportEntitiesRequest) GetLabels() map[string]string { + if m != nil { + return m.Labels + } + return nil +} + +func (m *ImportEntitiesRequest) GetInputUrl() string { + if m != nil { + return m.InputUrl + } + return "" +} + +func (m *ImportEntitiesRequest) GetEntityFilter() *EntityFilter { + if m != nil { + return m.EntityFilter + } + return nil +} + +// The response for +// [google.datastore.admin.v1beta1.DatastoreAdmin.ExportEntities][google.datastore.admin.v1beta1.DatastoreAdmin.ExportEntities]. +type ExportEntitiesResponse struct { + // Location of the output metadata file. This can be used to begin an import + // into Cloud Datastore (this project or another project). See + // [google.datastore.admin.v1beta1.ImportEntitiesRequest.input_url][google.datastore.admin.v1beta1.ImportEntitiesRequest.input_url]. + // Only present if the operation completed successfully. + OutputUrl string `protobuf:"bytes,1,opt,name=output_url,json=outputUrl,proto3" json:"output_url,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ExportEntitiesResponse) Reset() { *m = ExportEntitiesResponse{} } +func (m *ExportEntitiesResponse) String() string { return proto.CompactTextString(m) } +func (*ExportEntitiesResponse) ProtoMessage() {} +func (*ExportEntitiesResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_datastore_admin_f5b05ee5645ebe6f, []int{4} +} +func (m *ExportEntitiesResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ExportEntitiesResponse.Unmarshal(m, b) +} +func (m *ExportEntitiesResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ExportEntitiesResponse.Marshal(b, m, deterministic) +} +func (dst *ExportEntitiesResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_ExportEntitiesResponse.Merge(dst, src) +} +func (m *ExportEntitiesResponse) XXX_Size() int { + return xxx_messageInfo_ExportEntitiesResponse.Size(m) +} +func (m *ExportEntitiesResponse) XXX_DiscardUnknown() { + xxx_messageInfo_ExportEntitiesResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_ExportEntitiesResponse proto.InternalMessageInfo + +func (m *ExportEntitiesResponse) GetOutputUrl() string { + if m != nil { + return m.OutputUrl + } + return "" +} + +// Metadata for ExportEntities operations. +type ExportEntitiesMetadata struct { + // Metadata common to all Datastore Admin operations. + Common *CommonMetadata `protobuf:"bytes,1,opt,name=common,proto3" json:"common,omitempty"` + // An estimate of the number of entities processed. + ProgressEntities *Progress `protobuf:"bytes,2,opt,name=progress_entities,json=progressEntities,proto3" json:"progress_entities,omitempty"` + // An estimate of the number of bytes processed. + ProgressBytes *Progress `protobuf:"bytes,3,opt,name=progress_bytes,json=progressBytes,proto3" json:"progress_bytes,omitempty"` + // Description of which entities are being exported. + EntityFilter *EntityFilter `protobuf:"bytes,4,opt,name=entity_filter,json=entityFilter,proto3" json:"entity_filter,omitempty"` + // Location for the export metadata and data files. This will be the same + // value as the + // [google.datastore.admin.v1beta1.ExportEntitiesRequest.output_url_prefix][google.datastore.admin.v1beta1.ExportEntitiesRequest.output_url_prefix] + // field. The final output location is provided in + // [google.datastore.admin.v1beta1.ExportEntitiesResponse.output_url][google.datastore.admin.v1beta1.ExportEntitiesResponse.output_url]. + OutputUrlPrefix string `protobuf:"bytes,5,opt,name=output_url_prefix,json=outputUrlPrefix,proto3" json:"output_url_prefix,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ExportEntitiesMetadata) Reset() { *m = ExportEntitiesMetadata{} } +func (m *ExportEntitiesMetadata) String() string { return proto.CompactTextString(m) } +func (*ExportEntitiesMetadata) ProtoMessage() {} +func (*ExportEntitiesMetadata) Descriptor() ([]byte, []int) { + return fileDescriptor_datastore_admin_f5b05ee5645ebe6f, []int{5} +} +func (m *ExportEntitiesMetadata) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ExportEntitiesMetadata.Unmarshal(m, b) +} +func (m *ExportEntitiesMetadata) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ExportEntitiesMetadata.Marshal(b, m, deterministic) +} +func (dst *ExportEntitiesMetadata) XXX_Merge(src proto.Message) { + xxx_messageInfo_ExportEntitiesMetadata.Merge(dst, src) +} +func (m *ExportEntitiesMetadata) XXX_Size() int { + return xxx_messageInfo_ExportEntitiesMetadata.Size(m) +} +func (m *ExportEntitiesMetadata) XXX_DiscardUnknown() { + xxx_messageInfo_ExportEntitiesMetadata.DiscardUnknown(m) +} + +var xxx_messageInfo_ExportEntitiesMetadata proto.InternalMessageInfo + +func (m *ExportEntitiesMetadata) GetCommon() *CommonMetadata { + if m != nil { + return m.Common + } + return nil +} + +func (m *ExportEntitiesMetadata) GetProgressEntities() *Progress { + if m != nil { + return m.ProgressEntities + } + return nil +} + +func (m *ExportEntitiesMetadata) GetProgressBytes() *Progress { + if m != nil { + return m.ProgressBytes + } + return nil +} + +func (m *ExportEntitiesMetadata) GetEntityFilter() *EntityFilter { + if m != nil { + return m.EntityFilter + } + return nil +} + +func (m *ExportEntitiesMetadata) GetOutputUrlPrefix() string { + if m != nil { + return m.OutputUrlPrefix + } + return "" +} + +// Metadata for ImportEntities operations. +type ImportEntitiesMetadata struct { + // Metadata common to all Datastore Admin operations. + Common *CommonMetadata `protobuf:"bytes,1,opt,name=common,proto3" json:"common,omitempty"` + // An estimate of the number of entities processed. + ProgressEntities *Progress `protobuf:"bytes,2,opt,name=progress_entities,json=progressEntities,proto3" json:"progress_entities,omitempty"` + // An estimate of the number of bytes processed. + ProgressBytes *Progress `protobuf:"bytes,3,opt,name=progress_bytes,json=progressBytes,proto3" json:"progress_bytes,omitempty"` + // Description of which entities are being imported. + EntityFilter *EntityFilter `protobuf:"bytes,4,opt,name=entity_filter,json=entityFilter,proto3" json:"entity_filter,omitempty"` + // The location of the import metadata file. This will be the same value as + // the + // [google.datastore.admin.v1beta1.ExportEntitiesResponse.output_url][google.datastore.admin.v1beta1.ExportEntitiesResponse.output_url] + // field. + InputUrl string `protobuf:"bytes,5,opt,name=input_url,json=inputUrl,proto3" json:"input_url,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ImportEntitiesMetadata) Reset() { *m = ImportEntitiesMetadata{} } +func (m *ImportEntitiesMetadata) String() string { return proto.CompactTextString(m) } +func (*ImportEntitiesMetadata) ProtoMessage() {} +func (*ImportEntitiesMetadata) Descriptor() ([]byte, []int) { + return fileDescriptor_datastore_admin_f5b05ee5645ebe6f, []int{6} +} +func (m *ImportEntitiesMetadata) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ImportEntitiesMetadata.Unmarshal(m, b) +} +func (m *ImportEntitiesMetadata) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ImportEntitiesMetadata.Marshal(b, m, deterministic) +} +func (dst *ImportEntitiesMetadata) XXX_Merge(src proto.Message) { + xxx_messageInfo_ImportEntitiesMetadata.Merge(dst, src) +} +func (m *ImportEntitiesMetadata) XXX_Size() int { + return xxx_messageInfo_ImportEntitiesMetadata.Size(m) +} +func (m *ImportEntitiesMetadata) XXX_DiscardUnknown() { + xxx_messageInfo_ImportEntitiesMetadata.DiscardUnknown(m) +} + +var xxx_messageInfo_ImportEntitiesMetadata proto.InternalMessageInfo + +func (m *ImportEntitiesMetadata) GetCommon() *CommonMetadata { + if m != nil { + return m.Common + } + return nil +} + +func (m *ImportEntitiesMetadata) GetProgressEntities() *Progress { + if m != nil { + return m.ProgressEntities + } + return nil +} + +func (m *ImportEntitiesMetadata) GetProgressBytes() *Progress { + if m != nil { + return m.ProgressBytes + } + return nil +} + +func (m *ImportEntitiesMetadata) GetEntityFilter() *EntityFilter { + if m != nil { + return m.EntityFilter + } + return nil +} + +func (m *ImportEntitiesMetadata) GetInputUrl() string { + if m != nil { + return m.InputUrl + } + return "" +} + +// Identifies a subset of entities in a project. This is specified as +// combinations of kinds and namespaces (either or both of which may be all, as +// described in the following examples). +// Example usage: +// +// Entire project: +// kinds=[], namespace_ids=[] +// +// Kinds Foo and Bar in all namespaces: +// kinds=['Foo', 'Bar'], namespace_ids=[] +// +// Kinds Foo and Bar only in the default namespace: +// kinds=['Foo', 'Bar'], namespace_ids=[''] +// +// Kinds Foo and Bar in both the default and Baz namespaces: +// kinds=['Foo', 'Bar'], namespace_ids=['', 'Baz'] +// +// The entire Baz namespace: +// kinds=[], namespace_ids=['Baz'] +type EntityFilter struct { + // If empty, then this represents all kinds. + Kinds []string `protobuf:"bytes,1,rep,name=kinds,proto3" json:"kinds,omitempty"` + // An empty list represents all namespaces. This is the preferred + // usage for projects that don't use namespaces. + // + // An empty string element represents the default namespace. This should be + // used if the project has data in non-default namespaces, but doesn't want to + // include them. + // Each namespace in this list must be unique. + NamespaceIds []string `protobuf:"bytes,2,rep,name=namespace_ids,json=namespaceIds,proto3" json:"namespace_ids,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *EntityFilter) Reset() { *m = EntityFilter{} } +func (m *EntityFilter) String() string { return proto.CompactTextString(m) } +func (*EntityFilter) ProtoMessage() {} +func (*EntityFilter) Descriptor() ([]byte, []int) { + return fileDescriptor_datastore_admin_f5b05ee5645ebe6f, []int{7} +} +func (m *EntityFilter) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_EntityFilter.Unmarshal(m, b) +} +func (m *EntityFilter) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_EntityFilter.Marshal(b, m, deterministic) +} +func (dst *EntityFilter) XXX_Merge(src proto.Message) { + xxx_messageInfo_EntityFilter.Merge(dst, src) +} +func (m *EntityFilter) XXX_Size() int { + return xxx_messageInfo_EntityFilter.Size(m) +} +func (m *EntityFilter) XXX_DiscardUnknown() { + xxx_messageInfo_EntityFilter.DiscardUnknown(m) +} + +var xxx_messageInfo_EntityFilter proto.InternalMessageInfo + +func (m *EntityFilter) GetKinds() []string { + if m != nil { + return m.Kinds + } + return nil +} + +func (m *EntityFilter) GetNamespaceIds() []string { + if m != nil { + return m.NamespaceIds + } + return nil +} + +func init() { + proto.RegisterType((*CommonMetadata)(nil), "google.datastore.admin.v1beta1.CommonMetadata") + proto.RegisterMapType((map[string]string)(nil), "google.datastore.admin.v1beta1.CommonMetadata.LabelsEntry") + proto.RegisterType((*Progress)(nil), "google.datastore.admin.v1beta1.Progress") + proto.RegisterType((*ExportEntitiesRequest)(nil), "google.datastore.admin.v1beta1.ExportEntitiesRequest") + proto.RegisterMapType((map[string]string)(nil), "google.datastore.admin.v1beta1.ExportEntitiesRequest.LabelsEntry") + proto.RegisterType((*ImportEntitiesRequest)(nil), "google.datastore.admin.v1beta1.ImportEntitiesRequest") + proto.RegisterMapType((map[string]string)(nil), "google.datastore.admin.v1beta1.ImportEntitiesRequest.LabelsEntry") + proto.RegisterType((*ExportEntitiesResponse)(nil), "google.datastore.admin.v1beta1.ExportEntitiesResponse") + proto.RegisterType((*ExportEntitiesMetadata)(nil), "google.datastore.admin.v1beta1.ExportEntitiesMetadata") + proto.RegisterType((*ImportEntitiesMetadata)(nil), "google.datastore.admin.v1beta1.ImportEntitiesMetadata") + proto.RegisterType((*EntityFilter)(nil), "google.datastore.admin.v1beta1.EntityFilter") + proto.RegisterEnum("google.datastore.admin.v1beta1.OperationType", OperationType_name, OperationType_value) + proto.RegisterEnum("google.datastore.admin.v1beta1.CommonMetadata_State", CommonMetadata_State_name, CommonMetadata_State_value) +} + +// Reference imports to suppress errors if they are not otherwise used. +var _ context.Context +var _ grpc.ClientConn + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the grpc package it is being compiled against. +const _ = grpc.SupportPackageIsVersion4 + +// DatastoreAdminClient is the client API for DatastoreAdmin service. +// +// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://godoc.org/google.golang.org/grpc#ClientConn.NewStream. +type DatastoreAdminClient interface { + // Exports a copy of all or a subset of entities from Google Cloud Datastore + // to another storage system, such as Google Cloud Storage. Recent updates to + // entities may not be reflected in the export. The export occurs in the + // background and its progress can be monitored and managed via the + // Operation resource that is created. The output of an export may only be + // used once the associated operation is done. If an export operation is + // cancelled before completion it may leave partial data behind in Google + // Cloud Storage. + ExportEntities(ctx context.Context, in *ExportEntitiesRequest, opts ...grpc.CallOption) (*longrunning.Operation, error) + // Imports entities into Google Cloud Datastore. Existing entities with the + // same key are overwritten. The import occurs in the background and its + // progress can be monitored and managed via the Operation resource that is + // created. If an ImportEntities operation is cancelled, it is possible + // that a subset of the data has already been imported to Cloud Datastore. + ImportEntities(ctx context.Context, in *ImportEntitiesRequest, opts ...grpc.CallOption) (*longrunning.Operation, error) +} + +type datastoreAdminClient struct { + cc *grpc.ClientConn +} + +func NewDatastoreAdminClient(cc *grpc.ClientConn) DatastoreAdminClient { + return &datastoreAdminClient{cc} +} + +func (c *datastoreAdminClient) ExportEntities(ctx context.Context, in *ExportEntitiesRequest, opts ...grpc.CallOption) (*longrunning.Operation, error) { + out := new(longrunning.Operation) + err := c.cc.Invoke(ctx, "/google.datastore.admin.v1beta1.DatastoreAdmin/ExportEntities", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *datastoreAdminClient) ImportEntities(ctx context.Context, in *ImportEntitiesRequest, opts ...grpc.CallOption) (*longrunning.Operation, error) { + out := new(longrunning.Operation) + err := c.cc.Invoke(ctx, "/google.datastore.admin.v1beta1.DatastoreAdmin/ImportEntities", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +// DatastoreAdminServer is the server API for DatastoreAdmin service. +type DatastoreAdminServer interface { + // Exports a copy of all or a subset of entities from Google Cloud Datastore + // to another storage system, such as Google Cloud Storage. Recent updates to + // entities may not be reflected in the export. The export occurs in the + // background and its progress can be monitored and managed via the + // Operation resource that is created. The output of an export may only be + // used once the associated operation is done. If an export operation is + // cancelled before completion it may leave partial data behind in Google + // Cloud Storage. + ExportEntities(context.Context, *ExportEntitiesRequest) (*longrunning.Operation, error) + // Imports entities into Google Cloud Datastore. Existing entities with the + // same key are overwritten. The import occurs in the background and its + // progress can be monitored and managed via the Operation resource that is + // created. If an ImportEntities operation is cancelled, it is possible + // that a subset of the data has already been imported to Cloud Datastore. + ImportEntities(context.Context, *ImportEntitiesRequest) (*longrunning.Operation, error) +} + +func RegisterDatastoreAdminServer(s *grpc.Server, srv DatastoreAdminServer) { + s.RegisterService(&_DatastoreAdmin_serviceDesc, srv) +} + +func _DatastoreAdmin_ExportEntities_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(ExportEntitiesRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(DatastoreAdminServer).ExportEntities(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.datastore.admin.v1beta1.DatastoreAdmin/ExportEntities", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(DatastoreAdminServer).ExportEntities(ctx, req.(*ExportEntitiesRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _DatastoreAdmin_ImportEntities_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(ImportEntitiesRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(DatastoreAdminServer).ImportEntities(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.datastore.admin.v1beta1.DatastoreAdmin/ImportEntities", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(DatastoreAdminServer).ImportEntities(ctx, req.(*ImportEntitiesRequest)) + } + return interceptor(ctx, in, info, handler) +} + +var _DatastoreAdmin_serviceDesc = grpc.ServiceDesc{ + ServiceName: "google.datastore.admin.v1beta1.DatastoreAdmin", + HandlerType: (*DatastoreAdminServer)(nil), + Methods: []grpc.MethodDesc{ + { + MethodName: "ExportEntities", + Handler: _DatastoreAdmin_ExportEntities_Handler, + }, + { + MethodName: "ImportEntities", + Handler: _DatastoreAdmin_ImportEntities_Handler, + }, + }, + Streams: []grpc.StreamDesc{}, + Metadata: "google/datastore/admin/v1beta1/datastore_admin.proto", +} + +func init() { + proto.RegisterFile("google/datastore/admin/v1beta1/datastore_admin.proto", fileDescriptor_datastore_admin_f5b05ee5645ebe6f) +} + +var fileDescriptor_datastore_admin_f5b05ee5645ebe6f = []byte{ + // 996 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xec, 0x56, 0x41, 0x8f, 0xdb, 0x44, + 0x14, 0xc6, 0xce, 0x26, 0x6d, 0xde, 0x6e, 0xd2, 0xec, 0x94, 0xad, 0xa2, 0x40, 0xcb, 0xca, 0xa5, + 0xd2, 0x6a, 0x05, 0x0e, 0x1b, 0x5a, 0x41, 0x97, 0x53, 0x36, 0xeb, 0x54, 0x46, 0x69, 0x12, 0x1c, + 0x07, 0x75, 0x7b, 0xb1, 0x9c, 0x78, 0x36, 0x32, 0x6b, 0x7b, 0x8c, 0x3d, 0x29, 0x8d, 0x10, 0x17, + 0x2e, 0x1c, 0x38, 0x72, 0xe1, 0x1f, 0x20, 0xf1, 0x1b, 0xb8, 0x70, 0xe1, 0xc2, 0x91, 0xbf, 0xc0, + 0x8f, 0xe0, 0x88, 0x66, 0x3c, 0x76, 0xe2, 0x25, 0x10, 0xca, 0x16, 0x4e, 0xdc, 0xfc, 0xde, 0xbc, + 0xef, 0x9b, 0x37, 0xdf, 0x9b, 0xf7, 0x3c, 0x70, 0x7f, 0x46, 0xc8, 0xcc, 0xc3, 0x4d, 0xc7, 0xa6, + 0x76, 0x4c, 0x49, 0x84, 0x9b, 0xb6, 0xe3, 0xbb, 0x41, 0xf3, 0xd9, 0xd1, 0x04, 0x53, 0xfb, 0x68, + 0xe9, 0xb7, 0xb8, 0x5f, 0x0d, 0x23, 0x42, 0x09, 0xba, 0x93, 0xa0, 0xd4, 0x6c, 0x55, 0x4d, 0x56, + 0x05, 0xaa, 0xf1, 0xba, 0x60, 0xb5, 0x43, 0xb7, 0x69, 0x07, 0x01, 0xa1, 0x36, 0x75, 0x49, 0x10, + 0x27, 0xe8, 0xc6, 0x5d, 0xb1, 0xea, 0x91, 0x60, 0x16, 0xcd, 0x83, 0xc0, 0x0d, 0x66, 0x4d, 0x12, + 0xe2, 0x28, 0x17, 0xf4, 0x86, 0x08, 0xe2, 0xd6, 0x64, 0x7e, 0xde, 0xa4, 0xae, 0x8f, 0x63, 0x6a, + 0xfb, 0x61, 0x12, 0xa0, 0xfc, 0xb8, 0x05, 0xd5, 0x0e, 0xf1, 0x7d, 0x12, 0x3c, 0xc6, 0xd4, 0x66, + 0x99, 0xa0, 0x87, 0x00, 0x31, 0xb5, 0x23, 0x6a, 0xb1, 0xd8, 0xba, 0xb4, 0x2f, 0x1d, 0x6c, 0xb7, + 0x1a, 0xaa, 0xc8, 0x35, 0x25, 0x52, 0xcd, 0x94, 0xc8, 0x28, 0xf3, 0x68, 0x66, 0xa3, 0x07, 0x70, + 0x1d, 0x07, 0x4e, 0x02, 0x94, 0x37, 0x02, 0xaf, 0xe1, 0xc0, 0xe1, 0x30, 0x13, 0xaa, 0x59, 0xe6, + 0x16, 0x5d, 0x84, 0xb8, 0x5e, 0xd8, 0x97, 0x0e, 0xaa, 0xad, 0xb7, 0xd5, 0xbf, 0x56, 0x48, 0x1d, + 0xa4, 0x28, 0x73, 0x11, 0x62, 0xa3, 0x42, 0x56, 0x4d, 0x64, 0x40, 0xc9, 0xb3, 0x27, 0xd8, 0x8b, + 0xeb, 0x5b, 0xfb, 0x85, 0x83, 0xed, 0xd6, 0xf1, 0x26, 0xb6, 0xbc, 0x0e, 0x6a, 0x8f, 0x83, 0xb5, + 0x80, 0x46, 0x0b, 0x43, 0x30, 0xa1, 0x0f, 0xa1, 0x18, 0x53, 0x9b, 0xe2, 0x7a, 0x91, 0x27, 0x78, + 0xff, 0x05, 0x29, 0x47, 0x0c, 0x6b, 0x24, 0x14, 0x8d, 0x87, 0xb0, 0xbd, 0xb2, 0x05, 0xaa, 0x41, + 0xe1, 0x02, 0x2f, 0xb8, 0xde, 0x65, 0x83, 0x7d, 0xa2, 0x57, 0xa1, 0xf8, 0xcc, 0xf6, 0xe6, 0x89, + 0x94, 0x65, 0x23, 0x31, 0x8e, 0xe5, 0xf7, 0x25, 0xe5, 0x6b, 0x09, 0x8a, 0x9c, 0x0b, 0xed, 0xc1, + 0xee, 0xc8, 0x6c, 0x9b, 0x9a, 0x35, 0xee, 0x8f, 0x86, 0x5a, 0x47, 0xef, 0xea, 0xda, 0x69, 0xed, + 0x15, 0x54, 0x83, 0x1d, 0xbd, 0xaf, 0x9b, 0x7a, 0xbb, 0xa7, 0x3f, 0xd5, 0xfb, 0x8f, 0x6a, 0x12, + 0xaa, 0x02, 0x0c, 0x8d, 0x41, 0x47, 0x1b, 0x8d, 0x98, 0x2d, 0x33, 0xbb, 0xd3, 0xee, 0x77, 0xb4, + 0x5e, 0x8f, 0xd9, 0x05, 0x66, 0x77, 0xf5, 0x7e, 0x1a, 0xbf, 0xc5, 0xec, 0xd1, 0xb8, 0xc3, 0xe2, + 0xbb, 0xe3, 0x5e, 0xad, 0x88, 0x00, 0x4a, 0xdd, 0xb6, 0xde, 0xd3, 0x4e, 0x6b, 0x25, 0x54, 0x81, + 0xb2, 0xc0, 0x6a, 0xa7, 0xb5, 0x6b, 0xca, 0x13, 0xb8, 0x3e, 0x8c, 0xc8, 0x2c, 0xc2, 0x71, 0x8c, + 0xee, 0x41, 0xf5, 0x33, 0x12, 0x5d, 0x58, 0x53, 0xe2, 0x87, 0x1e, 0xa6, 0xd8, 0xe1, 0x07, 0x2a, + 0x18, 0x15, 0xe6, 0xed, 0xa4, 0xce, 0x2c, 0x0c, 0xc7, 0xd4, 0xf5, 0x6d, 0x16, 0x26, 0x2f, 0xc3, + 0xb4, 0xd4, 0xa9, 0xfc, 0x2c, 0xc3, 0x9e, 0xf6, 0x3c, 0x24, 0x11, 0xd5, 0x02, 0xea, 0x52, 0x17, + 0xc7, 0x06, 0xfe, 0x74, 0x8e, 0x63, 0x8a, 0x6e, 0x03, 0x84, 0x11, 0xf9, 0x04, 0x4f, 0xa9, 0xe5, + 0x3a, 0x42, 0xb4, 0xb2, 0xf0, 0xe8, 0x0e, 0x3a, 0xcb, 0x6a, 0x2f, 0xf3, 0xda, 0xb7, 0x37, 0x15, + 0x6a, 0xed, 0x2e, 0x6b, 0xaf, 0xc0, 0x47, 0x50, 0xc1, 0x2c, 0x6c, 0x61, 0x9d, 0xbb, 0x1e, 0xc5, + 0x11, 0xbf, 0xab, 0xdb, 0xad, 0xb7, 0x36, 0xee, 0xc0, 0x41, 0x5d, 0x8e, 0x31, 0x76, 0xf0, 0x8a, + 0x85, 0x0e, 0x61, 0x97, 0xcc, 0x69, 0x38, 0xa7, 0xd6, 0x3c, 0xf2, 0xac, 0x30, 0xc2, 0xe7, 0xee, + 0xf3, 0xfa, 0x16, 0x3f, 0xd3, 0x8d, 0x64, 0x61, 0x1c, 0x79, 0x43, 0xee, 0xbe, 0xca, 0xad, 0xf9, + 0x41, 0x86, 0x3d, 0xdd, 0xff, 0x2f, 0xd4, 0x5c, 0xbb, 0xcb, 0x5a, 0x35, 0x5f, 0x83, 0xb2, 0x1b, + 0x88, 0x93, 0x73, 0x25, 0xcb, 0xc6, 0x75, 0xee, 0x18, 0x47, 0xde, 0x1f, 0xa5, 0xde, 0xba, 0xaa, + 0xd4, 0x57, 0x91, 0xef, 0x3d, 0xb8, 0x75, 0xf9, 0x96, 0xc4, 0x21, 0x09, 0x62, 0xcc, 0xe4, 0x5b, + 0xd6, 0x2f, 0x95, 0x2f, 0x2b, 0x9c, 0xf2, 0x55, 0xe1, 0x32, 0x32, 0x9b, 0xb5, 0x5d, 0x28, 0x4d, + 0xf9, 0x88, 0x10, 0x73, 0x56, 0x7d, 0xb1, 0x81, 0x62, 0x08, 0x34, 0x1a, 0xc3, 0x6e, 0x28, 0x5a, + 0xd0, 0xc2, 0x62, 0x13, 0x31, 0x81, 0x0f, 0x36, 0x51, 0xa6, 0xbd, 0x6b, 0xd4, 0x52, 0x8a, 0x34, + 0x4d, 0x34, 0x80, 0x6a, 0x46, 0x3b, 0x59, 0x50, 0x1c, 0x8b, 0xcb, 0xfe, 0xf7, 0x39, 0x2b, 0x29, + 0xfe, 0x84, 0xc1, 0xff, 0x85, 0x8a, 0xae, 0x6f, 0x9e, 0xe2, 0xda, 0xe6, 0x51, 0x7e, 0x93, 0xe1, + 0x56, 0xfe, 0x6e, 0xfe, 0x5f, 0x89, 0x97, 0x57, 0x89, 0x5c, 0x2f, 0x17, 0xf3, 0xbd, 0xac, 0xe8, + 0xb0, 0xb3, 0x0a, 0x65, 0x7d, 0x76, 0xe1, 0x06, 0x4e, 0x5c, 0x97, 0xf6, 0x0b, 0xac, 0xcf, 0xb8, + 0x81, 0xee, 0x42, 0x25, 0xb0, 0x7d, 0x1c, 0x87, 0xf6, 0x14, 0x5b, 0xae, 0x93, 0x0c, 0x9c, 0xb2, + 0xb1, 0x93, 0x39, 0x75, 0x27, 0x3e, 0x3c, 0x83, 0x4a, 0xee, 0xc7, 0x8f, 0xee, 0x40, 0x63, 0x30, + 0xd4, 0x8c, 0xb6, 0xa9, 0x0f, 0xfa, 0x96, 0x79, 0x36, 0xbc, 0xfc, 0x37, 0xbc, 0x09, 0x37, 0xb4, + 0x27, 0xc3, 0x81, 0x61, 0x5a, 0x5a, 0xdf, 0xd4, 0x4d, 0x5d, 0x1b, 0xd5, 0x24, 0xe6, 0xd4, 0x1f, + 0xe7, 0x9d, 0x72, 0xeb, 0x27, 0x19, 0xaa, 0xa7, 0xe9, 0xc9, 0xdb, 0xec, 0xe0, 0xe8, 0x5b, 0x09, + 0xaa, 0xf9, 0xee, 0x45, 0x0f, 0xfe, 0xd1, 0xdf, 0xa4, 0x71, 0x3b, 0x85, 0xad, 0x3c, 0xd9, 0x96, + 0x4f, 0x18, 0xe5, 0x9d, 0x2f, 0x7f, 0xf9, 0xf5, 0x1b, 0xf9, 0x50, 0xb9, 0x97, 0x3d, 0x1b, 0xc5, + 0x04, 0x8e, 0x9b, 0x9f, 0x2f, 0xa7, 0xf3, 0x17, 0xc7, 0x98, 0x93, 0x1f, 0x4b, 0x87, 0x3c, 0xb5, + 0xfc, 0x75, 0xde, 0x9c, 0xda, 0xda, 0xd1, 0xfc, 0xb2, 0x52, 0x73, 0x7d, 0x91, 0xda, 0xc9, 0x77, + 0x12, 0x28, 0x53, 0xe2, 0x6f, 0xc8, 0xe6, 0xe4, 0x66, 0x5e, 0xec, 0x21, 0x7b, 0x24, 0x0e, 0xa5, + 0xa7, 0x1d, 0x01, 0x9b, 0x11, 0xcf, 0x0e, 0x66, 0x2a, 0x89, 0x66, 0xcd, 0x19, 0x0e, 0xf8, 0x13, + 0xb2, 0x99, 0x2c, 0xd9, 0xa1, 0x1b, 0xff, 0xd9, 0x73, 0xfb, 0x03, 0x6e, 0x7d, 0x2f, 0xbf, 0xf9, + 0x28, 0x61, 0xe9, 0x78, 0x64, 0xee, 0xa8, 0xd9, 0x4e, 0x2a, 0xdf, 0x4a, 0xfd, 0xf8, 0xe8, 0x84, + 0x05, 0x4f, 0x4a, 0x9c, 0xf6, 0xdd, 0xdf, 0x03, 0x00, 0x00, 0xff, 0xff, 0x77, 0x71, 0x2d, 0x88, + 0xc4, 0x0b, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/datastore/v1/datastore.pb.go b/vendor/google.golang.org/genproto/googleapis/datastore/v1/datastore.pb.go new file mode 100644 index 0000000..b7de20b --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/datastore/v1/datastore.pb.go @@ -0,0 +1,2116 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/datastore/v1/datastore.proto + +package datastore // import "google.golang.org/genproto/googleapis/datastore/v1" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +import ( + context "golang.org/x/net/context" + grpc "google.golang.org/grpc" +) + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// The modes available for commits. +type CommitRequest_Mode int32 + +const ( + // Unspecified. This value must not be used. + CommitRequest_MODE_UNSPECIFIED CommitRequest_Mode = 0 + // Transactional: The mutations are either all applied, or none are applied. + // Learn about transactions + // [here](https://cloud.google.com/datastore/docs/concepts/transactions). + CommitRequest_TRANSACTIONAL CommitRequest_Mode = 1 + // Non-transactional: The mutations may not apply as all or none. + CommitRequest_NON_TRANSACTIONAL CommitRequest_Mode = 2 +) + +var CommitRequest_Mode_name = map[int32]string{ + 0: "MODE_UNSPECIFIED", + 1: "TRANSACTIONAL", + 2: "NON_TRANSACTIONAL", +} +var CommitRequest_Mode_value = map[string]int32{ + "MODE_UNSPECIFIED": 0, + "TRANSACTIONAL": 1, + "NON_TRANSACTIONAL": 2, +} + +func (x CommitRequest_Mode) String() string { + return proto.EnumName(CommitRequest_Mode_name, int32(x)) +} +func (CommitRequest_Mode) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_datastore_834dd4313146b835, []int{8, 0} +} + +// The possible values for read consistencies. +type ReadOptions_ReadConsistency int32 + +const ( + // Unspecified. This value must not be used. + ReadOptions_READ_CONSISTENCY_UNSPECIFIED ReadOptions_ReadConsistency = 0 + // Strong consistency. + ReadOptions_STRONG ReadOptions_ReadConsistency = 1 + // Eventual consistency. + ReadOptions_EVENTUAL ReadOptions_ReadConsistency = 2 +) + +var ReadOptions_ReadConsistency_name = map[int32]string{ + 0: "READ_CONSISTENCY_UNSPECIFIED", + 1: "STRONG", + 2: "EVENTUAL", +} +var ReadOptions_ReadConsistency_value = map[string]int32{ + "READ_CONSISTENCY_UNSPECIFIED": 0, + "STRONG": 1, + "EVENTUAL": 2, +} + +func (x ReadOptions_ReadConsistency) String() string { + return proto.EnumName(ReadOptions_ReadConsistency_name, int32(x)) +} +func (ReadOptions_ReadConsistency) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_datastore_834dd4313146b835, []int{16, 0} +} + +// The request for [Datastore.Lookup][google.datastore.v1.Datastore.Lookup]. +type LookupRequest struct { + // The ID of the project against which to make the request. + ProjectId string `protobuf:"bytes,8,opt,name=project_id,json=projectId,proto3" json:"project_id,omitempty"` + // The options for this lookup request. + ReadOptions *ReadOptions `protobuf:"bytes,1,opt,name=read_options,json=readOptions,proto3" json:"read_options,omitempty"` + // Keys of entities to look up. + Keys []*Key `protobuf:"bytes,3,rep,name=keys,proto3" json:"keys,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *LookupRequest) Reset() { *m = LookupRequest{} } +func (m *LookupRequest) String() string { return proto.CompactTextString(m) } +func (*LookupRequest) ProtoMessage() {} +func (*LookupRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_datastore_834dd4313146b835, []int{0} +} +func (m *LookupRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_LookupRequest.Unmarshal(m, b) +} +func (m *LookupRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_LookupRequest.Marshal(b, m, deterministic) +} +func (dst *LookupRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_LookupRequest.Merge(dst, src) +} +func (m *LookupRequest) XXX_Size() int { + return xxx_messageInfo_LookupRequest.Size(m) +} +func (m *LookupRequest) XXX_DiscardUnknown() { + xxx_messageInfo_LookupRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_LookupRequest proto.InternalMessageInfo + +func (m *LookupRequest) GetProjectId() string { + if m != nil { + return m.ProjectId + } + return "" +} + +func (m *LookupRequest) GetReadOptions() *ReadOptions { + if m != nil { + return m.ReadOptions + } + return nil +} + +func (m *LookupRequest) GetKeys() []*Key { + if m != nil { + return m.Keys + } + return nil +} + +// The response for [Datastore.Lookup][google.datastore.v1.Datastore.Lookup]. +type LookupResponse struct { + // Entities found as `ResultType.FULL` entities. The order of results in this + // field is undefined and has no relation to the order of the keys in the + // input. + Found []*EntityResult `protobuf:"bytes,1,rep,name=found,proto3" json:"found,omitempty"` + // Entities not found as `ResultType.KEY_ONLY` entities. The order of results + // in this field is undefined and has no relation to the order of the keys + // in the input. + Missing []*EntityResult `protobuf:"bytes,2,rep,name=missing,proto3" json:"missing,omitempty"` + // A list of keys that were not looked up due to resource constraints. The + // order of results in this field is undefined and has no relation to the + // order of the keys in the input. + Deferred []*Key `protobuf:"bytes,3,rep,name=deferred,proto3" json:"deferred,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *LookupResponse) Reset() { *m = LookupResponse{} } +func (m *LookupResponse) String() string { return proto.CompactTextString(m) } +func (*LookupResponse) ProtoMessage() {} +func (*LookupResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_datastore_834dd4313146b835, []int{1} +} +func (m *LookupResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_LookupResponse.Unmarshal(m, b) +} +func (m *LookupResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_LookupResponse.Marshal(b, m, deterministic) +} +func (dst *LookupResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_LookupResponse.Merge(dst, src) +} +func (m *LookupResponse) XXX_Size() int { + return xxx_messageInfo_LookupResponse.Size(m) +} +func (m *LookupResponse) XXX_DiscardUnknown() { + xxx_messageInfo_LookupResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_LookupResponse proto.InternalMessageInfo + +func (m *LookupResponse) GetFound() []*EntityResult { + if m != nil { + return m.Found + } + return nil +} + +func (m *LookupResponse) GetMissing() []*EntityResult { + if m != nil { + return m.Missing + } + return nil +} + +func (m *LookupResponse) GetDeferred() []*Key { + if m != nil { + return m.Deferred + } + return nil +} + +// The request for [Datastore.RunQuery][google.datastore.v1.Datastore.RunQuery]. +type RunQueryRequest struct { + // The ID of the project against which to make the request. + ProjectId string `protobuf:"bytes,8,opt,name=project_id,json=projectId,proto3" json:"project_id,omitempty"` + // Entities are partitioned into subsets, identified by a partition ID. + // Queries are scoped to a single partition. + // This partition ID is normalized with the standard default context + // partition ID. + PartitionId *PartitionId `protobuf:"bytes,2,opt,name=partition_id,json=partitionId,proto3" json:"partition_id,omitempty"` + // The options for this query. + ReadOptions *ReadOptions `protobuf:"bytes,1,opt,name=read_options,json=readOptions,proto3" json:"read_options,omitempty"` + // The type of query. + // + // Types that are valid to be assigned to QueryType: + // *RunQueryRequest_Query + // *RunQueryRequest_GqlQuery + QueryType isRunQueryRequest_QueryType `protobuf_oneof:"query_type"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *RunQueryRequest) Reset() { *m = RunQueryRequest{} } +func (m *RunQueryRequest) String() string { return proto.CompactTextString(m) } +func (*RunQueryRequest) ProtoMessage() {} +func (*RunQueryRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_datastore_834dd4313146b835, []int{2} +} +func (m *RunQueryRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_RunQueryRequest.Unmarshal(m, b) +} +func (m *RunQueryRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_RunQueryRequest.Marshal(b, m, deterministic) +} +func (dst *RunQueryRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_RunQueryRequest.Merge(dst, src) +} +func (m *RunQueryRequest) XXX_Size() int { + return xxx_messageInfo_RunQueryRequest.Size(m) +} +func (m *RunQueryRequest) XXX_DiscardUnknown() { + xxx_messageInfo_RunQueryRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_RunQueryRequest proto.InternalMessageInfo + +func (m *RunQueryRequest) GetProjectId() string { + if m != nil { + return m.ProjectId + } + return "" +} + +func (m *RunQueryRequest) GetPartitionId() *PartitionId { + if m != nil { + return m.PartitionId + } + return nil +} + +func (m *RunQueryRequest) GetReadOptions() *ReadOptions { + if m != nil { + return m.ReadOptions + } + return nil +} + +type isRunQueryRequest_QueryType interface { + isRunQueryRequest_QueryType() +} + +type RunQueryRequest_Query struct { + Query *Query `protobuf:"bytes,3,opt,name=query,proto3,oneof"` +} + +type RunQueryRequest_GqlQuery struct { + GqlQuery *GqlQuery `protobuf:"bytes,7,opt,name=gql_query,json=gqlQuery,proto3,oneof"` +} + +func (*RunQueryRequest_Query) isRunQueryRequest_QueryType() {} + +func (*RunQueryRequest_GqlQuery) isRunQueryRequest_QueryType() {} + +func (m *RunQueryRequest) GetQueryType() isRunQueryRequest_QueryType { + if m != nil { + return m.QueryType + } + return nil +} + +func (m *RunQueryRequest) GetQuery() *Query { + if x, ok := m.GetQueryType().(*RunQueryRequest_Query); ok { + return x.Query + } + return nil +} + +func (m *RunQueryRequest) GetGqlQuery() *GqlQuery { + if x, ok := m.GetQueryType().(*RunQueryRequest_GqlQuery); ok { + return x.GqlQuery + } + return nil +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*RunQueryRequest) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _RunQueryRequest_OneofMarshaler, _RunQueryRequest_OneofUnmarshaler, _RunQueryRequest_OneofSizer, []interface{}{ + (*RunQueryRequest_Query)(nil), + (*RunQueryRequest_GqlQuery)(nil), + } +} + +func _RunQueryRequest_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*RunQueryRequest) + // query_type + switch x := m.QueryType.(type) { + case *RunQueryRequest_Query: + b.EncodeVarint(3<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.Query); err != nil { + return err + } + case *RunQueryRequest_GqlQuery: + b.EncodeVarint(7<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.GqlQuery); err != nil { + return err + } + case nil: + default: + return fmt.Errorf("RunQueryRequest.QueryType has unexpected type %T", x) + } + return nil +} + +func _RunQueryRequest_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*RunQueryRequest) + switch tag { + case 3: // query_type.query + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(Query) + err := b.DecodeMessage(msg) + m.QueryType = &RunQueryRequest_Query{msg} + return true, err + case 7: // query_type.gql_query + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(GqlQuery) + err := b.DecodeMessage(msg) + m.QueryType = &RunQueryRequest_GqlQuery{msg} + return true, err + default: + return false, nil + } +} + +func _RunQueryRequest_OneofSizer(msg proto.Message) (n int) { + m := msg.(*RunQueryRequest) + // query_type + switch x := m.QueryType.(type) { + case *RunQueryRequest_Query: + s := proto.Size(x.Query) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *RunQueryRequest_GqlQuery: + s := proto.Size(x.GqlQuery) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +// The response for +// [Datastore.RunQuery][google.datastore.v1.Datastore.RunQuery]. +type RunQueryResponse struct { + // A batch of query results (always present). + Batch *QueryResultBatch `protobuf:"bytes,1,opt,name=batch,proto3" json:"batch,omitempty"` + // The parsed form of the `GqlQuery` from the request, if it was set. + Query *Query `protobuf:"bytes,2,opt,name=query,proto3" json:"query,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *RunQueryResponse) Reset() { *m = RunQueryResponse{} } +func (m *RunQueryResponse) String() string { return proto.CompactTextString(m) } +func (*RunQueryResponse) ProtoMessage() {} +func (*RunQueryResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_datastore_834dd4313146b835, []int{3} +} +func (m *RunQueryResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_RunQueryResponse.Unmarshal(m, b) +} +func (m *RunQueryResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_RunQueryResponse.Marshal(b, m, deterministic) +} +func (dst *RunQueryResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_RunQueryResponse.Merge(dst, src) +} +func (m *RunQueryResponse) XXX_Size() int { + return xxx_messageInfo_RunQueryResponse.Size(m) +} +func (m *RunQueryResponse) XXX_DiscardUnknown() { + xxx_messageInfo_RunQueryResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_RunQueryResponse proto.InternalMessageInfo + +func (m *RunQueryResponse) GetBatch() *QueryResultBatch { + if m != nil { + return m.Batch + } + return nil +} + +func (m *RunQueryResponse) GetQuery() *Query { + if m != nil { + return m.Query + } + return nil +} + +// The request for +// [Datastore.BeginTransaction][google.datastore.v1.Datastore.BeginTransaction]. +type BeginTransactionRequest struct { + // The ID of the project against which to make the request. + ProjectId string `protobuf:"bytes,8,opt,name=project_id,json=projectId,proto3" json:"project_id,omitempty"` + // Options for a new transaction. + TransactionOptions *TransactionOptions `protobuf:"bytes,10,opt,name=transaction_options,json=transactionOptions,proto3" json:"transaction_options,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *BeginTransactionRequest) Reset() { *m = BeginTransactionRequest{} } +func (m *BeginTransactionRequest) String() string { return proto.CompactTextString(m) } +func (*BeginTransactionRequest) ProtoMessage() {} +func (*BeginTransactionRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_datastore_834dd4313146b835, []int{4} +} +func (m *BeginTransactionRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_BeginTransactionRequest.Unmarshal(m, b) +} +func (m *BeginTransactionRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_BeginTransactionRequest.Marshal(b, m, deterministic) +} +func (dst *BeginTransactionRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_BeginTransactionRequest.Merge(dst, src) +} +func (m *BeginTransactionRequest) XXX_Size() int { + return xxx_messageInfo_BeginTransactionRequest.Size(m) +} +func (m *BeginTransactionRequest) XXX_DiscardUnknown() { + xxx_messageInfo_BeginTransactionRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_BeginTransactionRequest proto.InternalMessageInfo + +func (m *BeginTransactionRequest) GetProjectId() string { + if m != nil { + return m.ProjectId + } + return "" +} + +func (m *BeginTransactionRequest) GetTransactionOptions() *TransactionOptions { + if m != nil { + return m.TransactionOptions + } + return nil +} + +// The response for +// [Datastore.BeginTransaction][google.datastore.v1.Datastore.BeginTransaction]. +type BeginTransactionResponse struct { + // The transaction identifier (always present). + Transaction []byte `protobuf:"bytes,1,opt,name=transaction,proto3" json:"transaction,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *BeginTransactionResponse) Reset() { *m = BeginTransactionResponse{} } +func (m *BeginTransactionResponse) String() string { return proto.CompactTextString(m) } +func (*BeginTransactionResponse) ProtoMessage() {} +func (*BeginTransactionResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_datastore_834dd4313146b835, []int{5} +} +func (m *BeginTransactionResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_BeginTransactionResponse.Unmarshal(m, b) +} +func (m *BeginTransactionResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_BeginTransactionResponse.Marshal(b, m, deterministic) +} +func (dst *BeginTransactionResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_BeginTransactionResponse.Merge(dst, src) +} +func (m *BeginTransactionResponse) XXX_Size() int { + return xxx_messageInfo_BeginTransactionResponse.Size(m) +} +func (m *BeginTransactionResponse) XXX_DiscardUnknown() { + xxx_messageInfo_BeginTransactionResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_BeginTransactionResponse proto.InternalMessageInfo + +func (m *BeginTransactionResponse) GetTransaction() []byte { + if m != nil { + return m.Transaction + } + return nil +} + +// The request for [Datastore.Rollback][google.datastore.v1.Datastore.Rollback]. +type RollbackRequest struct { + // The ID of the project against which to make the request. + ProjectId string `protobuf:"bytes,8,opt,name=project_id,json=projectId,proto3" json:"project_id,omitempty"` + // The transaction identifier, returned by a call to + // [Datastore.BeginTransaction][google.datastore.v1.Datastore.BeginTransaction]. + Transaction []byte `protobuf:"bytes,1,opt,name=transaction,proto3" json:"transaction,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *RollbackRequest) Reset() { *m = RollbackRequest{} } +func (m *RollbackRequest) String() string { return proto.CompactTextString(m) } +func (*RollbackRequest) ProtoMessage() {} +func (*RollbackRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_datastore_834dd4313146b835, []int{6} +} +func (m *RollbackRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_RollbackRequest.Unmarshal(m, b) +} +func (m *RollbackRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_RollbackRequest.Marshal(b, m, deterministic) +} +func (dst *RollbackRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_RollbackRequest.Merge(dst, src) +} +func (m *RollbackRequest) XXX_Size() int { + return xxx_messageInfo_RollbackRequest.Size(m) +} +func (m *RollbackRequest) XXX_DiscardUnknown() { + xxx_messageInfo_RollbackRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_RollbackRequest proto.InternalMessageInfo + +func (m *RollbackRequest) GetProjectId() string { + if m != nil { + return m.ProjectId + } + return "" +} + +func (m *RollbackRequest) GetTransaction() []byte { + if m != nil { + return m.Transaction + } + return nil +} + +// The response for +// [Datastore.Rollback][google.datastore.v1.Datastore.Rollback]. (an empty +// message). +type RollbackResponse struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *RollbackResponse) Reset() { *m = RollbackResponse{} } +func (m *RollbackResponse) String() string { return proto.CompactTextString(m) } +func (*RollbackResponse) ProtoMessage() {} +func (*RollbackResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_datastore_834dd4313146b835, []int{7} +} +func (m *RollbackResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_RollbackResponse.Unmarshal(m, b) +} +func (m *RollbackResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_RollbackResponse.Marshal(b, m, deterministic) +} +func (dst *RollbackResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_RollbackResponse.Merge(dst, src) +} +func (m *RollbackResponse) XXX_Size() int { + return xxx_messageInfo_RollbackResponse.Size(m) +} +func (m *RollbackResponse) XXX_DiscardUnknown() { + xxx_messageInfo_RollbackResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_RollbackResponse proto.InternalMessageInfo + +// The request for [Datastore.Commit][google.datastore.v1.Datastore.Commit]. +type CommitRequest struct { + // The ID of the project against which to make the request. + ProjectId string `protobuf:"bytes,8,opt,name=project_id,json=projectId,proto3" json:"project_id,omitempty"` + // The type of commit to perform. Defaults to `TRANSACTIONAL`. + Mode CommitRequest_Mode `protobuf:"varint,5,opt,name=mode,proto3,enum=google.datastore.v1.CommitRequest_Mode" json:"mode,omitempty"` + // Must be set when mode is `TRANSACTIONAL`. + // + // Types that are valid to be assigned to TransactionSelector: + // *CommitRequest_Transaction + TransactionSelector isCommitRequest_TransactionSelector `protobuf_oneof:"transaction_selector"` + // The mutations to perform. + // + // When mode is `TRANSACTIONAL`, mutations affecting a single entity are + // applied in order. The following sequences of mutations affecting a single + // entity are not permitted in a single `Commit` request: + // + // - `insert` followed by `insert` + // - `update` followed by `insert` + // - `upsert` followed by `insert` + // - `delete` followed by `update` + // + // When mode is `NON_TRANSACTIONAL`, no two mutations may affect a single + // entity. + Mutations []*Mutation `protobuf:"bytes,6,rep,name=mutations,proto3" json:"mutations,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *CommitRequest) Reset() { *m = CommitRequest{} } +func (m *CommitRequest) String() string { return proto.CompactTextString(m) } +func (*CommitRequest) ProtoMessage() {} +func (*CommitRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_datastore_834dd4313146b835, []int{8} +} +func (m *CommitRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_CommitRequest.Unmarshal(m, b) +} +func (m *CommitRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_CommitRequest.Marshal(b, m, deterministic) +} +func (dst *CommitRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_CommitRequest.Merge(dst, src) +} +func (m *CommitRequest) XXX_Size() int { + return xxx_messageInfo_CommitRequest.Size(m) +} +func (m *CommitRequest) XXX_DiscardUnknown() { + xxx_messageInfo_CommitRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_CommitRequest proto.InternalMessageInfo + +func (m *CommitRequest) GetProjectId() string { + if m != nil { + return m.ProjectId + } + return "" +} + +func (m *CommitRequest) GetMode() CommitRequest_Mode { + if m != nil { + return m.Mode + } + return CommitRequest_MODE_UNSPECIFIED +} + +type isCommitRequest_TransactionSelector interface { + isCommitRequest_TransactionSelector() +} + +type CommitRequest_Transaction struct { + Transaction []byte `protobuf:"bytes,1,opt,name=transaction,proto3,oneof"` +} + +func (*CommitRequest_Transaction) isCommitRequest_TransactionSelector() {} + +func (m *CommitRequest) GetTransactionSelector() isCommitRequest_TransactionSelector { + if m != nil { + return m.TransactionSelector + } + return nil +} + +func (m *CommitRequest) GetTransaction() []byte { + if x, ok := m.GetTransactionSelector().(*CommitRequest_Transaction); ok { + return x.Transaction + } + return nil +} + +func (m *CommitRequest) GetMutations() []*Mutation { + if m != nil { + return m.Mutations + } + return nil +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*CommitRequest) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _CommitRequest_OneofMarshaler, _CommitRequest_OneofUnmarshaler, _CommitRequest_OneofSizer, []interface{}{ + (*CommitRequest_Transaction)(nil), + } +} + +func _CommitRequest_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*CommitRequest) + // transaction_selector + switch x := m.TransactionSelector.(type) { + case *CommitRequest_Transaction: + b.EncodeVarint(1<<3 | proto.WireBytes) + b.EncodeRawBytes(x.Transaction) + case nil: + default: + return fmt.Errorf("CommitRequest.TransactionSelector has unexpected type %T", x) + } + return nil +} + +func _CommitRequest_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*CommitRequest) + switch tag { + case 1: // transaction_selector.transaction + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeRawBytes(true) + m.TransactionSelector = &CommitRequest_Transaction{x} + return true, err + default: + return false, nil + } +} + +func _CommitRequest_OneofSizer(msg proto.Message) (n int) { + m := msg.(*CommitRequest) + // transaction_selector + switch x := m.TransactionSelector.(type) { + case *CommitRequest_Transaction: + n += 1 // tag and wire + n += proto.SizeVarint(uint64(len(x.Transaction))) + n += len(x.Transaction) + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +// The response for [Datastore.Commit][google.datastore.v1.Datastore.Commit]. +type CommitResponse struct { + // The result of performing the mutations. + // The i-th mutation result corresponds to the i-th mutation in the request. + MutationResults []*MutationResult `protobuf:"bytes,3,rep,name=mutation_results,json=mutationResults,proto3" json:"mutation_results,omitempty"` + // The number of index entries updated during the commit, or zero if none were + // updated. + IndexUpdates int32 `protobuf:"varint,4,opt,name=index_updates,json=indexUpdates,proto3" json:"index_updates,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *CommitResponse) Reset() { *m = CommitResponse{} } +func (m *CommitResponse) String() string { return proto.CompactTextString(m) } +func (*CommitResponse) ProtoMessage() {} +func (*CommitResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_datastore_834dd4313146b835, []int{9} +} +func (m *CommitResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_CommitResponse.Unmarshal(m, b) +} +func (m *CommitResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_CommitResponse.Marshal(b, m, deterministic) +} +func (dst *CommitResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_CommitResponse.Merge(dst, src) +} +func (m *CommitResponse) XXX_Size() int { + return xxx_messageInfo_CommitResponse.Size(m) +} +func (m *CommitResponse) XXX_DiscardUnknown() { + xxx_messageInfo_CommitResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_CommitResponse proto.InternalMessageInfo + +func (m *CommitResponse) GetMutationResults() []*MutationResult { + if m != nil { + return m.MutationResults + } + return nil +} + +func (m *CommitResponse) GetIndexUpdates() int32 { + if m != nil { + return m.IndexUpdates + } + return 0 +} + +// The request for +// [Datastore.AllocateIds][google.datastore.v1.Datastore.AllocateIds]. +type AllocateIdsRequest struct { + // The ID of the project against which to make the request. + ProjectId string `protobuf:"bytes,8,opt,name=project_id,json=projectId,proto3" json:"project_id,omitempty"` + // A list of keys with incomplete key paths for which to allocate IDs. + // No key may be reserved/read-only. + Keys []*Key `protobuf:"bytes,1,rep,name=keys,proto3" json:"keys,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *AllocateIdsRequest) Reset() { *m = AllocateIdsRequest{} } +func (m *AllocateIdsRequest) String() string { return proto.CompactTextString(m) } +func (*AllocateIdsRequest) ProtoMessage() {} +func (*AllocateIdsRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_datastore_834dd4313146b835, []int{10} +} +func (m *AllocateIdsRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_AllocateIdsRequest.Unmarshal(m, b) +} +func (m *AllocateIdsRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_AllocateIdsRequest.Marshal(b, m, deterministic) +} +func (dst *AllocateIdsRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_AllocateIdsRequest.Merge(dst, src) +} +func (m *AllocateIdsRequest) XXX_Size() int { + return xxx_messageInfo_AllocateIdsRequest.Size(m) +} +func (m *AllocateIdsRequest) XXX_DiscardUnknown() { + xxx_messageInfo_AllocateIdsRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_AllocateIdsRequest proto.InternalMessageInfo + +func (m *AllocateIdsRequest) GetProjectId() string { + if m != nil { + return m.ProjectId + } + return "" +} + +func (m *AllocateIdsRequest) GetKeys() []*Key { + if m != nil { + return m.Keys + } + return nil +} + +// The response for +// [Datastore.AllocateIds][google.datastore.v1.Datastore.AllocateIds]. +type AllocateIdsResponse struct { + // The keys specified in the request (in the same order), each with + // its key path completed with a newly allocated ID. + Keys []*Key `protobuf:"bytes,1,rep,name=keys,proto3" json:"keys,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *AllocateIdsResponse) Reset() { *m = AllocateIdsResponse{} } +func (m *AllocateIdsResponse) String() string { return proto.CompactTextString(m) } +func (*AllocateIdsResponse) ProtoMessage() {} +func (*AllocateIdsResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_datastore_834dd4313146b835, []int{11} +} +func (m *AllocateIdsResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_AllocateIdsResponse.Unmarshal(m, b) +} +func (m *AllocateIdsResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_AllocateIdsResponse.Marshal(b, m, deterministic) +} +func (dst *AllocateIdsResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_AllocateIdsResponse.Merge(dst, src) +} +func (m *AllocateIdsResponse) XXX_Size() int { + return xxx_messageInfo_AllocateIdsResponse.Size(m) +} +func (m *AllocateIdsResponse) XXX_DiscardUnknown() { + xxx_messageInfo_AllocateIdsResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_AllocateIdsResponse proto.InternalMessageInfo + +func (m *AllocateIdsResponse) GetKeys() []*Key { + if m != nil { + return m.Keys + } + return nil +} + +// The request for +// [Datastore.ReserveIds][google.datastore.v1.Datastore.ReserveIds]. +type ReserveIdsRequest struct { + // The ID of the project against which to make the request. + ProjectId string `protobuf:"bytes,8,opt,name=project_id,json=projectId,proto3" json:"project_id,omitempty"` + // If not empty, the ID of the database against which to make the request. + DatabaseId string `protobuf:"bytes,9,opt,name=database_id,json=databaseId,proto3" json:"database_id,omitempty"` + // A list of keys with complete key paths whose numeric IDs should not be + // auto-allocated. + Keys []*Key `protobuf:"bytes,1,rep,name=keys,proto3" json:"keys,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ReserveIdsRequest) Reset() { *m = ReserveIdsRequest{} } +func (m *ReserveIdsRequest) String() string { return proto.CompactTextString(m) } +func (*ReserveIdsRequest) ProtoMessage() {} +func (*ReserveIdsRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_datastore_834dd4313146b835, []int{12} +} +func (m *ReserveIdsRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ReserveIdsRequest.Unmarshal(m, b) +} +func (m *ReserveIdsRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ReserveIdsRequest.Marshal(b, m, deterministic) +} +func (dst *ReserveIdsRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_ReserveIdsRequest.Merge(dst, src) +} +func (m *ReserveIdsRequest) XXX_Size() int { + return xxx_messageInfo_ReserveIdsRequest.Size(m) +} +func (m *ReserveIdsRequest) XXX_DiscardUnknown() { + xxx_messageInfo_ReserveIdsRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_ReserveIdsRequest proto.InternalMessageInfo + +func (m *ReserveIdsRequest) GetProjectId() string { + if m != nil { + return m.ProjectId + } + return "" +} + +func (m *ReserveIdsRequest) GetDatabaseId() string { + if m != nil { + return m.DatabaseId + } + return "" +} + +func (m *ReserveIdsRequest) GetKeys() []*Key { + if m != nil { + return m.Keys + } + return nil +} + +// The response for +// [Datastore.ReserveIds][google.datastore.v1.Datastore.ReserveIds]. +type ReserveIdsResponse struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ReserveIdsResponse) Reset() { *m = ReserveIdsResponse{} } +func (m *ReserveIdsResponse) String() string { return proto.CompactTextString(m) } +func (*ReserveIdsResponse) ProtoMessage() {} +func (*ReserveIdsResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_datastore_834dd4313146b835, []int{13} +} +func (m *ReserveIdsResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ReserveIdsResponse.Unmarshal(m, b) +} +func (m *ReserveIdsResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ReserveIdsResponse.Marshal(b, m, deterministic) +} +func (dst *ReserveIdsResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_ReserveIdsResponse.Merge(dst, src) +} +func (m *ReserveIdsResponse) XXX_Size() int { + return xxx_messageInfo_ReserveIdsResponse.Size(m) +} +func (m *ReserveIdsResponse) XXX_DiscardUnknown() { + xxx_messageInfo_ReserveIdsResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_ReserveIdsResponse proto.InternalMessageInfo + +// A mutation to apply to an entity. +type Mutation struct { + // The mutation operation. + // + // For `insert`, `update`, and `upsert`: + // - The entity's key must not be reserved/read-only. + // - No property in the entity may have a reserved name, + // not even a property in an entity in a value. + // - No value in the entity may have meaning 18, + // not even a value in an entity in another value. + // + // Types that are valid to be assigned to Operation: + // *Mutation_Insert + // *Mutation_Update + // *Mutation_Upsert + // *Mutation_Delete + Operation isMutation_Operation `protobuf_oneof:"operation"` + // When set, the server will detect whether or not this mutation conflicts + // with the current version of the entity on the server. Conflicting mutations + // are not applied, and are marked as such in MutationResult. + // + // Types that are valid to be assigned to ConflictDetectionStrategy: + // *Mutation_BaseVersion + ConflictDetectionStrategy isMutation_ConflictDetectionStrategy `protobuf_oneof:"conflict_detection_strategy"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Mutation) Reset() { *m = Mutation{} } +func (m *Mutation) String() string { return proto.CompactTextString(m) } +func (*Mutation) ProtoMessage() {} +func (*Mutation) Descriptor() ([]byte, []int) { + return fileDescriptor_datastore_834dd4313146b835, []int{14} +} +func (m *Mutation) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Mutation.Unmarshal(m, b) +} +func (m *Mutation) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Mutation.Marshal(b, m, deterministic) +} +func (dst *Mutation) XXX_Merge(src proto.Message) { + xxx_messageInfo_Mutation.Merge(dst, src) +} +func (m *Mutation) XXX_Size() int { + return xxx_messageInfo_Mutation.Size(m) +} +func (m *Mutation) XXX_DiscardUnknown() { + xxx_messageInfo_Mutation.DiscardUnknown(m) +} + +var xxx_messageInfo_Mutation proto.InternalMessageInfo + +type isMutation_Operation interface { + isMutation_Operation() +} + +type Mutation_Insert struct { + Insert *Entity `protobuf:"bytes,4,opt,name=insert,proto3,oneof"` +} + +type Mutation_Update struct { + Update *Entity `protobuf:"bytes,5,opt,name=update,proto3,oneof"` +} + +type Mutation_Upsert struct { + Upsert *Entity `protobuf:"bytes,6,opt,name=upsert,proto3,oneof"` +} + +type Mutation_Delete struct { + Delete *Key `protobuf:"bytes,7,opt,name=delete,proto3,oneof"` +} + +func (*Mutation_Insert) isMutation_Operation() {} + +func (*Mutation_Update) isMutation_Operation() {} + +func (*Mutation_Upsert) isMutation_Operation() {} + +func (*Mutation_Delete) isMutation_Operation() {} + +func (m *Mutation) GetOperation() isMutation_Operation { + if m != nil { + return m.Operation + } + return nil +} + +func (m *Mutation) GetInsert() *Entity { + if x, ok := m.GetOperation().(*Mutation_Insert); ok { + return x.Insert + } + return nil +} + +func (m *Mutation) GetUpdate() *Entity { + if x, ok := m.GetOperation().(*Mutation_Update); ok { + return x.Update + } + return nil +} + +func (m *Mutation) GetUpsert() *Entity { + if x, ok := m.GetOperation().(*Mutation_Upsert); ok { + return x.Upsert + } + return nil +} + +func (m *Mutation) GetDelete() *Key { + if x, ok := m.GetOperation().(*Mutation_Delete); ok { + return x.Delete + } + return nil +} + +type isMutation_ConflictDetectionStrategy interface { + isMutation_ConflictDetectionStrategy() +} + +type Mutation_BaseVersion struct { + BaseVersion int64 `protobuf:"varint,8,opt,name=base_version,json=baseVersion,proto3,oneof"` +} + +func (*Mutation_BaseVersion) isMutation_ConflictDetectionStrategy() {} + +func (m *Mutation) GetConflictDetectionStrategy() isMutation_ConflictDetectionStrategy { + if m != nil { + return m.ConflictDetectionStrategy + } + return nil +} + +func (m *Mutation) GetBaseVersion() int64 { + if x, ok := m.GetConflictDetectionStrategy().(*Mutation_BaseVersion); ok { + return x.BaseVersion + } + return 0 +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*Mutation) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _Mutation_OneofMarshaler, _Mutation_OneofUnmarshaler, _Mutation_OneofSizer, []interface{}{ + (*Mutation_Insert)(nil), + (*Mutation_Update)(nil), + (*Mutation_Upsert)(nil), + (*Mutation_Delete)(nil), + (*Mutation_BaseVersion)(nil), + } +} + +func _Mutation_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*Mutation) + // operation + switch x := m.Operation.(type) { + case *Mutation_Insert: + b.EncodeVarint(4<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.Insert); err != nil { + return err + } + case *Mutation_Update: + b.EncodeVarint(5<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.Update); err != nil { + return err + } + case *Mutation_Upsert: + b.EncodeVarint(6<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.Upsert); err != nil { + return err + } + case *Mutation_Delete: + b.EncodeVarint(7<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.Delete); err != nil { + return err + } + case nil: + default: + return fmt.Errorf("Mutation.Operation has unexpected type %T", x) + } + // conflict_detection_strategy + switch x := m.ConflictDetectionStrategy.(type) { + case *Mutation_BaseVersion: + b.EncodeVarint(8<<3 | proto.WireVarint) + b.EncodeVarint(uint64(x.BaseVersion)) + case nil: + default: + return fmt.Errorf("Mutation.ConflictDetectionStrategy has unexpected type %T", x) + } + return nil +} + +func _Mutation_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*Mutation) + switch tag { + case 4: // operation.insert + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(Entity) + err := b.DecodeMessage(msg) + m.Operation = &Mutation_Insert{msg} + return true, err + case 5: // operation.update + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(Entity) + err := b.DecodeMessage(msg) + m.Operation = &Mutation_Update{msg} + return true, err + case 6: // operation.upsert + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(Entity) + err := b.DecodeMessage(msg) + m.Operation = &Mutation_Upsert{msg} + return true, err + case 7: // operation.delete + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(Key) + err := b.DecodeMessage(msg) + m.Operation = &Mutation_Delete{msg} + return true, err + case 8: // conflict_detection_strategy.base_version + if wire != proto.WireVarint { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeVarint() + m.ConflictDetectionStrategy = &Mutation_BaseVersion{int64(x)} + return true, err + default: + return false, nil + } +} + +func _Mutation_OneofSizer(msg proto.Message) (n int) { + m := msg.(*Mutation) + // operation + switch x := m.Operation.(type) { + case *Mutation_Insert: + s := proto.Size(x.Insert) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *Mutation_Update: + s := proto.Size(x.Update) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *Mutation_Upsert: + s := proto.Size(x.Upsert) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *Mutation_Delete: + s := proto.Size(x.Delete) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + // conflict_detection_strategy + switch x := m.ConflictDetectionStrategy.(type) { + case *Mutation_BaseVersion: + n += 1 // tag and wire + n += proto.SizeVarint(uint64(x.BaseVersion)) + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +// The result of applying a mutation. +type MutationResult struct { + // The automatically allocated key. + // Set only when the mutation allocated a key. + Key *Key `protobuf:"bytes,3,opt,name=key,proto3" json:"key,omitempty"` + // The version of the entity on the server after processing the mutation. If + // the mutation doesn't change anything on the server, then the version will + // be the version of the current entity or, if no entity is present, a version + // that is strictly greater than the version of any previous entity and less + // than the version of any possible future entity. + Version int64 `protobuf:"varint,4,opt,name=version,proto3" json:"version,omitempty"` + // Whether a conflict was detected for this mutation. Always false when a + // conflict detection strategy field is not set in the mutation. + ConflictDetected bool `protobuf:"varint,5,opt,name=conflict_detected,json=conflictDetected,proto3" json:"conflict_detected,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *MutationResult) Reset() { *m = MutationResult{} } +func (m *MutationResult) String() string { return proto.CompactTextString(m) } +func (*MutationResult) ProtoMessage() {} +func (*MutationResult) Descriptor() ([]byte, []int) { + return fileDescriptor_datastore_834dd4313146b835, []int{15} +} +func (m *MutationResult) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_MutationResult.Unmarshal(m, b) +} +func (m *MutationResult) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_MutationResult.Marshal(b, m, deterministic) +} +func (dst *MutationResult) XXX_Merge(src proto.Message) { + xxx_messageInfo_MutationResult.Merge(dst, src) +} +func (m *MutationResult) XXX_Size() int { + return xxx_messageInfo_MutationResult.Size(m) +} +func (m *MutationResult) XXX_DiscardUnknown() { + xxx_messageInfo_MutationResult.DiscardUnknown(m) +} + +var xxx_messageInfo_MutationResult proto.InternalMessageInfo + +func (m *MutationResult) GetKey() *Key { + if m != nil { + return m.Key + } + return nil +} + +func (m *MutationResult) GetVersion() int64 { + if m != nil { + return m.Version + } + return 0 +} + +func (m *MutationResult) GetConflictDetected() bool { + if m != nil { + return m.ConflictDetected + } + return false +} + +// The options shared by read requests. +type ReadOptions struct { + // If not specified, lookups and ancestor queries default to + // `read_consistency`=`STRONG`, global queries default to + // `read_consistency`=`EVENTUAL`. + // + // Types that are valid to be assigned to ConsistencyType: + // *ReadOptions_ReadConsistency_ + // *ReadOptions_Transaction + ConsistencyType isReadOptions_ConsistencyType `protobuf_oneof:"consistency_type"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ReadOptions) Reset() { *m = ReadOptions{} } +func (m *ReadOptions) String() string { return proto.CompactTextString(m) } +func (*ReadOptions) ProtoMessage() {} +func (*ReadOptions) Descriptor() ([]byte, []int) { + return fileDescriptor_datastore_834dd4313146b835, []int{16} +} +func (m *ReadOptions) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ReadOptions.Unmarshal(m, b) +} +func (m *ReadOptions) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ReadOptions.Marshal(b, m, deterministic) +} +func (dst *ReadOptions) XXX_Merge(src proto.Message) { + xxx_messageInfo_ReadOptions.Merge(dst, src) +} +func (m *ReadOptions) XXX_Size() int { + return xxx_messageInfo_ReadOptions.Size(m) +} +func (m *ReadOptions) XXX_DiscardUnknown() { + xxx_messageInfo_ReadOptions.DiscardUnknown(m) +} + +var xxx_messageInfo_ReadOptions proto.InternalMessageInfo + +type isReadOptions_ConsistencyType interface { + isReadOptions_ConsistencyType() +} + +type ReadOptions_ReadConsistency_ struct { + ReadConsistency ReadOptions_ReadConsistency `protobuf:"varint,1,opt,name=read_consistency,json=readConsistency,proto3,enum=google.datastore.v1.ReadOptions_ReadConsistency,oneof"` +} + +type ReadOptions_Transaction struct { + Transaction []byte `protobuf:"bytes,2,opt,name=transaction,proto3,oneof"` +} + +func (*ReadOptions_ReadConsistency_) isReadOptions_ConsistencyType() {} + +func (*ReadOptions_Transaction) isReadOptions_ConsistencyType() {} + +func (m *ReadOptions) GetConsistencyType() isReadOptions_ConsistencyType { + if m != nil { + return m.ConsistencyType + } + return nil +} + +func (m *ReadOptions) GetReadConsistency() ReadOptions_ReadConsistency { + if x, ok := m.GetConsistencyType().(*ReadOptions_ReadConsistency_); ok { + return x.ReadConsistency + } + return ReadOptions_READ_CONSISTENCY_UNSPECIFIED +} + +func (m *ReadOptions) GetTransaction() []byte { + if x, ok := m.GetConsistencyType().(*ReadOptions_Transaction); ok { + return x.Transaction + } + return nil +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*ReadOptions) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _ReadOptions_OneofMarshaler, _ReadOptions_OneofUnmarshaler, _ReadOptions_OneofSizer, []interface{}{ + (*ReadOptions_ReadConsistency_)(nil), + (*ReadOptions_Transaction)(nil), + } +} + +func _ReadOptions_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*ReadOptions) + // consistency_type + switch x := m.ConsistencyType.(type) { + case *ReadOptions_ReadConsistency_: + b.EncodeVarint(1<<3 | proto.WireVarint) + b.EncodeVarint(uint64(x.ReadConsistency)) + case *ReadOptions_Transaction: + b.EncodeVarint(2<<3 | proto.WireBytes) + b.EncodeRawBytes(x.Transaction) + case nil: + default: + return fmt.Errorf("ReadOptions.ConsistencyType has unexpected type %T", x) + } + return nil +} + +func _ReadOptions_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*ReadOptions) + switch tag { + case 1: // consistency_type.read_consistency + if wire != proto.WireVarint { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeVarint() + m.ConsistencyType = &ReadOptions_ReadConsistency_{ReadOptions_ReadConsistency(x)} + return true, err + case 2: // consistency_type.transaction + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeRawBytes(true) + m.ConsistencyType = &ReadOptions_Transaction{x} + return true, err + default: + return false, nil + } +} + +func _ReadOptions_OneofSizer(msg proto.Message) (n int) { + m := msg.(*ReadOptions) + // consistency_type + switch x := m.ConsistencyType.(type) { + case *ReadOptions_ReadConsistency_: + n += 1 // tag and wire + n += proto.SizeVarint(uint64(x.ReadConsistency)) + case *ReadOptions_Transaction: + n += 1 // tag and wire + n += proto.SizeVarint(uint64(len(x.Transaction))) + n += len(x.Transaction) + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +// Options for beginning a new transaction. +// +// Transactions can be created explicitly with calls to +// [Datastore.BeginTransaction][google.datastore.v1.Datastore.BeginTransaction] +// or implicitly by setting +// [ReadOptions.new_transaction][google.datastore.v1.ReadOptions.new_transaction] +// in read requests. +type TransactionOptions struct { + // The `mode` of the transaction, indicating whether write operations are + // supported. + // + // Types that are valid to be assigned to Mode: + // *TransactionOptions_ReadWrite_ + // *TransactionOptions_ReadOnly_ + Mode isTransactionOptions_Mode `protobuf_oneof:"mode"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *TransactionOptions) Reset() { *m = TransactionOptions{} } +func (m *TransactionOptions) String() string { return proto.CompactTextString(m) } +func (*TransactionOptions) ProtoMessage() {} +func (*TransactionOptions) Descriptor() ([]byte, []int) { + return fileDescriptor_datastore_834dd4313146b835, []int{17} +} +func (m *TransactionOptions) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_TransactionOptions.Unmarshal(m, b) +} +func (m *TransactionOptions) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_TransactionOptions.Marshal(b, m, deterministic) +} +func (dst *TransactionOptions) XXX_Merge(src proto.Message) { + xxx_messageInfo_TransactionOptions.Merge(dst, src) +} +func (m *TransactionOptions) XXX_Size() int { + return xxx_messageInfo_TransactionOptions.Size(m) +} +func (m *TransactionOptions) XXX_DiscardUnknown() { + xxx_messageInfo_TransactionOptions.DiscardUnknown(m) +} + +var xxx_messageInfo_TransactionOptions proto.InternalMessageInfo + +type isTransactionOptions_Mode interface { + isTransactionOptions_Mode() +} + +type TransactionOptions_ReadWrite_ struct { + ReadWrite *TransactionOptions_ReadWrite `protobuf:"bytes,1,opt,name=read_write,json=readWrite,proto3,oneof"` +} + +type TransactionOptions_ReadOnly_ struct { + ReadOnly *TransactionOptions_ReadOnly `protobuf:"bytes,2,opt,name=read_only,json=readOnly,proto3,oneof"` +} + +func (*TransactionOptions_ReadWrite_) isTransactionOptions_Mode() {} + +func (*TransactionOptions_ReadOnly_) isTransactionOptions_Mode() {} + +func (m *TransactionOptions) GetMode() isTransactionOptions_Mode { + if m != nil { + return m.Mode + } + return nil +} + +func (m *TransactionOptions) GetReadWrite() *TransactionOptions_ReadWrite { + if x, ok := m.GetMode().(*TransactionOptions_ReadWrite_); ok { + return x.ReadWrite + } + return nil +} + +func (m *TransactionOptions) GetReadOnly() *TransactionOptions_ReadOnly { + if x, ok := m.GetMode().(*TransactionOptions_ReadOnly_); ok { + return x.ReadOnly + } + return nil +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*TransactionOptions) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _TransactionOptions_OneofMarshaler, _TransactionOptions_OneofUnmarshaler, _TransactionOptions_OneofSizer, []interface{}{ + (*TransactionOptions_ReadWrite_)(nil), + (*TransactionOptions_ReadOnly_)(nil), + } +} + +func _TransactionOptions_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*TransactionOptions) + // mode + switch x := m.Mode.(type) { + case *TransactionOptions_ReadWrite_: + b.EncodeVarint(1<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.ReadWrite); err != nil { + return err + } + case *TransactionOptions_ReadOnly_: + b.EncodeVarint(2<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.ReadOnly); err != nil { + return err + } + case nil: + default: + return fmt.Errorf("TransactionOptions.Mode has unexpected type %T", x) + } + return nil +} + +func _TransactionOptions_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*TransactionOptions) + switch tag { + case 1: // mode.read_write + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(TransactionOptions_ReadWrite) + err := b.DecodeMessage(msg) + m.Mode = &TransactionOptions_ReadWrite_{msg} + return true, err + case 2: // mode.read_only + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(TransactionOptions_ReadOnly) + err := b.DecodeMessage(msg) + m.Mode = &TransactionOptions_ReadOnly_{msg} + return true, err + default: + return false, nil + } +} + +func _TransactionOptions_OneofSizer(msg proto.Message) (n int) { + m := msg.(*TransactionOptions) + // mode + switch x := m.Mode.(type) { + case *TransactionOptions_ReadWrite_: + s := proto.Size(x.ReadWrite) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *TransactionOptions_ReadOnly_: + s := proto.Size(x.ReadOnly) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +// Options specific to read / write transactions. +type TransactionOptions_ReadWrite struct { + // The transaction identifier of the transaction being retried. + PreviousTransaction []byte `protobuf:"bytes,1,opt,name=previous_transaction,json=previousTransaction,proto3" json:"previous_transaction,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *TransactionOptions_ReadWrite) Reset() { *m = TransactionOptions_ReadWrite{} } +func (m *TransactionOptions_ReadWrite) String() string { return proto.CompactTextString(m) } +func (*TransactionOptions_ReadWrite) ProtoMessage() {} +func (*TransactionOptions_ReadWrite) Descriptor() ([]byte, []int) { + return fileDescriptor_datastore_834dd4313146b835, []int{17, 0} +} +func (m *TransactionOptions_ReadWrite) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_TransactionOptions_ReadWrite.Unmarshal(m, b) +} +func (m *TransactionOptions_ReadWrite) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_TransactionOptions_ReadWrite.Marshal(b, m, deterministic) +} +func (dst *TransactionOptions_ReadWrite) XXX_Merge(src proto.Message) { + xxx_messageInfo_TransactionOptions_ReadWrite.Merge(dst, src) +} +func (m *TransactionOptions_ReadWrite) XXX_Size() int { + return xxx_messageInfo_TransactionOptions_ReadWrite.Size(m) +} +func (m *TransactionOptions_ReadWrite) XXX_DiscardUnknown() { + xxx_messageInfo_TransactionOptions_ReadWrite.DiscardUnknown(m) +} + +var xxx_messageInfo_TransactionOptions_ReadWrite proto.InternalMessageInfo + +func (m *TransactionOptions_ReadWrite) GetPreviousTransaction() []byte { + if m != nil { + return m.PreviousTransaction + } + return nil +} + +// Options specific to read-only transactions. +type TransactionOptions_ReadOnly struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *TransactionOptions_ReadOnly) Reset() { *m = TransactionOptions_ReadOnly{} } +func (m *TransactionOptions_ReadOnly) String() string { return proto.CompactTextString(m) } +func (*TransactionOptions_ReadOnly) ProtoMessage() {} +func (*TransactionOptions_ReadOnly) Descriptor() ([]byte, []int) { + return fileDescriptor_datastore_834dd4313146b835, []int{17, 1} +} +func (m *TransactionOptions_ReadOnly) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_TransactionOptions_ReadOnly.Unmarshal(m, b) +} +func (m *TransactionOptions_ReadOnly) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_TransactionOptions_ReadOnly.Marshal(b, m, deterministic) +} +func (dst *TransactionOptions_ReadOnly) XXX_Merge(src proto.Message) { + xxx_messageInfo_TransactionOptions_ReadOnly.Merge(dst, src) +} +func (m *TransactionOptions_ReadOnly) XXX_Size() int { + return xxx_messageInfo_TransactionOptions_ReadOnly.Size(m) +} +func (m *TransactionOptions_ReadOnly) XXX_DiscardUnknown() { + xxx_messageInfo_TransactionOptions_ReadOnly.DiscardUnknown(m) +} + +var xxx_messageInfo_TransactionOptions_ReadOnly proto.InternalMessageInfo + +func init() { + proto.RegisterType((*LookupRequest)(nil), "google.datastore.v1.LookupRequest") + proto.RegisterType((*LookupResponse)(nil), "google.datastore.v1.LookupResponse") + proto.RegisterType((*RunQueryRequest)(nil), "google.datastore.v1.RunQueryRequest") + proto.RegisterType((*RunQueryResponse)(nil), "google.datastore.v1.RunQueryResponse") + proto.RegisterType((*BeginTransactionRequest)(nil), "google.datastore.v1.BeginTransactionRequest") + proto.RegisterType((*BeginTransactionResponse)(nil), "google.datastore.v1.BeginTransactionResponse") + proto.RegisterType((*RollbackRequest)(nil), "google.datastore.v1.RollbackRequest") + proto.RegisterType((*RollbackResponse)(nil), "google.datastore.v1.RollbackResponse") + proto.RegisterType((*CommitRequest)(nil), "google.datastore.v1.CommitRequest") + proto.RegisterType((*CommitResponse)(nil), "google.datastore.v1.CommitResponse") + proto.RegisterType((*AllocateIdsRequest)(nil), "google.datastore.v1.AllocateIdsRequest") + proto.RegisterType((*AllocateIdsResponse)(nil), "google.datastore.v1.AllocateIdsResponse") + proto.RegisterType((*ReserveIdsRequest)(nil), "google.datastore.v1.ReserveIdsRequest") + proto.RegisterType((*ReserveIdsResponse)(nil), "google.datastore.v1.ReserveIdsResponse") + proto.RegisterType((*Mutation)(nil), "google.datastore.v1.Mutation") + proto.RegisterType((*MutationResult)(nil), "google.datastore.v1.MutationResult") + proto.RegisterType((*ReadOptions)(nil), "google.datastore.v1.ReadOptions") + proto.RegisterType((*TransactionOptions)(nil), "google.datastore.v1.TransactionOptions") + proto.RegisterType((*TransactionOptions_ReadWrite)(nil), "google.datastore.v1.TransactionOptions.ReadWrite") + proto.RegisterType((*TransactionOptions_ReadOnly)(nil), "google.datastore.v1.TransactionOptions.ReadOnly") + proto.RegisterEnum("google.datastore.v1.CommitRequest_Mode", CommitRequest_Mode_name, CommitRequest_Mode_value) + proto.RegisterEnum("google.datastore.v1.ReadOptions_ReadConsistency", ReadOptions_ReadConsistency_name, ReadOptions_ReadConsistency_value) +} + +// Reference imports to suppress errors if they are not otherwise used. +var _ context.Context +var _ grpc.ClientConn + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the grpc package it is being compiled against. +const _ = grpc.SupportPackageIsVersion4 + +// DatastoreClient is the client API for Datastore service. +// +// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://godoc.org/google.golang.org/grpc#ClientConn.NewStream. +type DatastoreClient interface { + // Looks up entities by key. + Lookup(ctx context.Context, in *LookupRequest, opts ...grpc.CallOption) (*LookupResponse, error) + // Queries for entities. + RunQuery(ctx context.Context, in *RunQueryRequest, opts ...grpc.CallOption) (*RunQueryResponse, error) + // Begins a new transaction. + BeginTransaction(ctx context.Context, in *BeginTransactionRequest, opts ...grpc.CallOption) (*BeginTransactionResponse, error) + // Commits a transaction, optionally creating, deleting or modifying some + // entities. + Commit(ctx context.Context, in *CommitRequest, opts ...grpc.CallOption) (*CommitResponse, error) + // Rolls back a transaction. + Rollback(ctx context.Context, in *RollbackRequest, opts ...grpc.CallOption) (*RollbackResponse, error) + // Allocates IDs for the given keys, which is useful for referencing an entity + // before it is inserted. + AllocateIds(ctx context.Context, in *AllocateIdsRequest, opts ...grpc.CallOption) (*AllocateIdsResponse, error) + // Prevents the supplied keys' IDs from being auto-allocated by Cloud + // Datastore. + ReserveIds(ctx context.Context, in *ReserveIdsRequest, opts ...grpc.CallOption) (*ReserveIdsResponse, error) +} + +type datastoreClient struct { + cc *grpc.ClientConn +} + +func NewDatastoreClient(cc *grpc.ClientConn) DatastoreClient { + return &datastoreClient{cc} +} + +func (c *datastoreClient) Lookup(ctx context.Context, in *LookupRequest, opts ...grpc.CallOption) (*LookupResponse, error) { + out := new(LookupResponse) + err := c.cc.Invoke(ctx, "/google.datastore.v1.Datastore/Lookup", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *datastoreClient) RunQuery(ctx context.Context, in *RunQueryRequest, opts ...grpc.CallOption) (*RunQueryResponse, error) { + out := new(RunQueryResponse) + err := c.cc.Invoke(ctx, "/google.datastore.v1.Datastore/RunQuery", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *datastoreClient) BeginTransaction(ctx context.Context, in *BeginTransactionRequest, opts ...grpc.CallOption) (*BeginTransactionResponse, error) { + out := new(BeginTransactionResponse) + err := c.cc.Invoke(ctx, "/google.datastore.v1.Datastore/BeginTransaction", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *datastoreClient) Commit(ctx context.Context, in *CommitRequest, opts ...grpc.CallOption) (*CommitResponse, error) { + out := new(CommitResponse) + err := c.cc.Invoke(ctx, "/google.datastore.v1.Datastore/Commit", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *datastoreClient) Rollback(ctx context.Context, in *RollbackRequest, opts ...grpc.CallOption) (*RollbackResponse, error) { + out := new(RollbackResponse) + err := c.cc.Invoke(ctx, "/google.datastore.v1.Datastore/Rollback", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *datastoreClient) AllocateIds(ctx context.Context, in *AllocateIdsRequest, opts ...grpc.CallOption) (*AllocateIdsResponse, error) { + out := new(AllocateIdsResponse) + err := c.cc.Invoke(ctx, "/google.datastore.v1.Datastore/AllocateIds", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *datastoreClient) ReserveIds(ctx context.Context, in *ReserveIdsRequest, opts ...grpc.CallOption) (*ReserveIdsResponse, error) { + out := new(ReserveIdsResponse) + err := c.cc.Invoke(ctx, "/google.datastore.v1.Datastore/ReserveIds", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +// DatastoreServer is the server API for Datastore service. +type DatastoreServer interface { + // Looks up entities by key. + Lookup(context.Context, *LookupRequest) (*LookupResponse, error) + // Queries for entities. + RunQuery(context.Context, *RunQueryRequest) (*RunQueryResponse, error) + // Begins a new transaction. + BeginTransaction(context.Context, *BeginTransactionRequest) (*BeginTransactionResponse, error) + // Commits a transaction, optionally creating, deleting or modifying some + // entities. + Commit(context.Context, *CommitRequest) (*CommitResponse, error) + // Rolls back a transaction. + Rollback(context.Context, *RollbackRequest) (*RollbackResponse, error) + // Allocates IDs for the given keys, which is useful for referencing an entity + // before it is inserted. + AllocateIds(context.Context, *AllocateIdsRequest) (*AllocateIdsResponse, error) + // Prevents the supplied keys' IDs from being auto-allocated by Cloud + // Datastore. + ReserveIds(context.Context, *ReserveIdsRequest) (*ReserveIdsResponse, error) +} + +func RegisterDatastoreServer(s *grpc.Server, srv DatastoreServer) { + s.RegisterService(&_Datastore_serviceDesc, srv) +} + +func _Datastore_Lookup_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(LookupRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(DatastoreServer).Lookup(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.datastore.v1.Datastore/Lookup", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(DatastoreServer).Lookup(ctx, req.(*LookupRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _Datastore_RunQuery_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(RunQueryRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(DatastoreServer).RunQuery(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.datastore.v1.Datastore/RunQuery", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(DatastoreServer).RunQuery(ctx, req.(*RunQueryRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _Datastore_BeginTransaction_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(BeginTransactionRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(DatastoreServer).BeginTransaction(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.datastore.v1.Datastore/BeginTransaction", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(DatastoreServer).BeginTransaction(ctx, req.(*BeginTransactionRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _Datastore_Commit_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(CommitRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(DatastoreServer).Commit(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.datastore.v1.Datastore/Commit", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(DatastoreServer).Commit(ctx, req.(*CommitRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _Datastore_Rollback_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(RollbackRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(DatastoreServer).Rollback(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.datastore.v1.Datastore/Rollback", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(DatastoreServer).Rollback(ctx, req.(*RollbackRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _Datastore_AllocateIds_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(AllocateIdsRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(DatastoreServer).AllocateIds(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.datastore.v1.Datastore/AllocateIds", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(DatastoreServer).AllocateIds(ctx, req.(*AllocateIdsRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _Datastore_ReserveIds_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(ReserveIdsRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(DatastoreServer).ReserveIds(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.datastore.v1.Datastore/ReserveIds", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(DatastoreServer).ReserveIds(ctx, req.(*ReserveIdsRequest)) + } + return interceptor(ctx, in, info, handler) +} + +var _Datastore_serviceDesc = grpc.ServiceDesc{ + ServiceName: "google.datastore.v1.Datastore", + HandlerType: (*DatastoreServer)(nil), + Methods: []grpc.MethodDesc{ + { + MethodName: "Lookup", + Handler: _Datastore_Lookup_Handler, + }, + { + MethodName: "RunQuery", + Handler: _Datastore_RunQuery_Handler, + }, + { + MethodName: "BeginTransaction", + Handler: _Datastore_BeginTransaction_Handler, + }, + { + MethodName: "Commit", + Handler: _Datastore_Commit_Handler, + }, + { + MethodName: "Rollback", + Handler: _Datastore_Rollback_Handler, + }, + { + MethodName: "AllocateIds", + Handler: _Datastore_AllocateIds_Handler, + }, + { + MethodName: "ReserveIds", + Handler: _Datastore_ReserveIds_Handler, + }, + }, + Streams: []grpc.StreamDesc{}, + Metadata: "google/datastore/v1/datastore.proto", +} + +func init() { + proto.RegisterFile("google/datastore/v1/datastore.proto", fileDescriptor_datastore_834dd4313146b835) +} + +var fileDescriptor_datastore_834dd4313146b835 = []byte{ + // 1390 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xac, 0x57, 0xdf, 0x6f, 0x1b, 0xc5, + 0x13, 0xcf, 0x3a, 0x89, 0x63, 0x8f, 0xf3, 0xc3, 0xd9, 0xe4, 0xfb, 0xad, 0x71, 0x5b, 0xd5, 0x5c, + 0x1a, 0x1a, 0xd2, 0xd6, 0x4e, 0x0c, 0x15, 0x52, 0x53, 0x21, 0xc5, 0x8e, 0xdb, 0x58, 0x34, 0x76, + 0xd8, 0xa4, 0xe1, 0x87, 0x8a, 0xac, 0x8b, 0x6f, 0x6b, 0x8e, 0x9c, 0x6f, 0x2f, 0x77, 0xeb, 0x80, + 0x85, 0xa8, 0x54, 0x10, 0xbc, 0xc1, 0x43, 0xf9, 0x0b, 0xfa, 0xc2, 0x03, 0xe2, 0x91, 0x27, 0xc4, + 0x5f, 0xc0, 0x2b, 0xff, 0x02, 0x8f, 0xbc, 0xf1, 0x0f, 0xa0, 0xdb, 0xdb, 0xb3, 0x7d, 0xce, 0x5d, + 0xec, 0x48, 0xbc, 0x79, 0x67, 0xe7, 0x33, 0xf3, 0x99, 0x99, 0xbd, 0x99, 0x31, 0xac, 0xb4, 0x18, + 0x6b, 0x19, 0xb4, 0xa0, 0xa9, 0x5c, 0x75, 0x38, 0xb3, 0x69, 0xe1, 0x6c, 0xb3, 0x7f, 0xc8, 0x5b, + 0x36, 0xe3, 0x0c, 0x2f, 0x79, 0x4a, 0xf9, 0xbe, 0xfc, 0x6c, 0x33, 0x7b, 0x4d, 0x22, 0x55, 0x4b, + 0x2f, 0xa8, 0xa6, 0xc9, 0xb8, 0xca, 0x75, 0x66, 0x3a, 0x1e, 0x24, 0x9b, 0x0b, 0xb3, 0x4b, 0x4d, + 0xae, 0xf3, 0xae, 0xd4, 0xb8, 0x11, 0xa6, 0x71, 0xda, 0xa1, 0xb6, 0x54, 0x50, 0x5e, 0x21, 0x98, + 0x7b, 0xcc, 0xd8, 0x49, 0xc7, 0x22, 0xf4, 0xb4, 0x43, 0x1d, 0x8e, 0xaf, 0x03, 0x58, 0x36, 0xfb, + 0x8c, 0x36, 0x79, 0x43, 0xd7, 0x32, 0x89, 0x1c, 0x5a, 0x4b, 0x92, 0xa4, 0x94, 0x54, 0x35, 0x5c, + 0x86, 0x59, 0x9b, 0xaa, 0x5a, 0x83, 0x59, 0x82, 0x49, 0x06, 0xe5, 0xd0, 0x5a, 0xaa, 0x98, 0xcb, + 0x87, 0xb0, 0xcf, 0x13, 0xaa, 0x6a, 0x75, 0x4f, 0x8f, 0xa4, 0xec, 0xfe, 0x01, 0xdf, 0x81, 0xa9, + 0x13, 0xda, 0x75, 0x32, 0x93, 0xb9, 0xc9, 0xb5, 0x54, 0x31, 0x13, 0x0a, 0x7e, 0x8f, 0x76, 0x89, + 0xd0, 0x52, 0x7e, 0x47, 0x30, 0xef, 0x73, 0x74, 0x2c, 0x66, 0x3a, 0x14, 0xbf, 0x03, 0xd3, 0xcf, + 0x58, 0xc7, 0xd4, 0x32, 0x48, 0x58, 0x78, 0x3d, 0xd4, 0x42, 0x45, 0x64, 0x82, 0x50, 0xa7, 0x63, + 0x70, 0xe2, 0xe9, 0xe3, 0x2d, 0x98, 0x69, 0xeb, 0x8e, 0xa3, 0x9b, 0xad, 0x4c, 0x6c, 0x5c, 0xa8, + 0x8f, 0xc0, 0x6f, 0x43, 0x42, 0xa3, 0xcf, 0xa8, 0x6d, 0x53, 0x6d, 0x24, 0xf5, 0x9e, 0xa6, 0xf2, + 0x5b, 0x0c, 0x16, 0x48, 0xc7, 0x7c, 0xdf, 0xcd, 0xfa, 0xf8, 0x49, 0xb6, 0x54, 0x9b, 0xeb, 0x6e, + 0xb6, 0x5c, 0x85, 0xd8, 0x05, 0x49, 0xde, 0xf7, 0x15, 0xab, 0x1a, 0x49, 0x59, 0xfd, 0xc3, 0x7f, + 0x53, 0xa9, 0x22, 0x4c, 0x8b, 0xe7, 0x92, 0x99, 0x14, 0xe8, 0x6c, 0x28, 0x5a, 0x84, 0xb6, 0x3b, + 0x41, 0x3c, 0x55, 0xfc, 0x00, 0x92, 0xad, 0x53, 0xa3, 0xe1, 0xe1, 0x66, 0x04, 0xee, 0x7a, 0x28, + 0xee, 0xd1, 0xa9, 0xe1, 0x43, 0x13, 0x2d, 0xf9, 0xbb, 0x34, 0x0b, 0x20, 0x90, 0x0d, 0xde, 0xb5, + 0xa8, 0xf2, 0x02, 0x41, 0xba, 0x9f, 0x3c, 0x59, 0xfd, 0x2d, 0x98, 0x3e, 0x56, 0x79, 0xf3, 0x53, + 0x19, 0xd2, 0x6a, 0x34, 0x29, 0xaf, 0x82, 0x25, 0x57, 0x99, 0x78, 0x18, 0xbc, 0xe1, 0x47, 0x14, + 0x1b, 0x15, 0x91, 0x8c, 0x47, 0x79, 0x89, 0xe0, 0x4a, 0x89, 0xb6, 0x74, 0xf3, 0xd0, 0x56, 0x4d, + 0x47, 0x6d, 0xba, 0x99, 0x19, 0xb3, 0x90, 0x1f, 0xc2, 0x12, 0xef, 0x83, 0x7a, 0xa5, 0x00, 0xe1, + 0xfa, 0x56, 0xa8, 0xeb, 0x01, 0x27, 0x7e, 0x45, 0x30, 0x3f, 0x27, 0x53, 0x1e, 0x40, 0xe6, 0x3c, + 0x27, 0x99, 0x9f, 0x1c, 0xa4, 0x06, 0x10, 0x22, 0x4b, 0xb3, 0x64, 0x50, 0xa4, 0x10, 0x58, 0x20, + 0xcc, 0x30, 0x8e, 0xd5, 0xe6, 0xc9, 0x98, 0x91, 0x8c, 0xb6, 0x89, 0x21, 0xdd, 0xb7, 0xe9, 0x31, + 0x51, 0x7e, 0x89, 0xc1, 0x5c, 0x99, 0xb5, 0xdb, 0x3a, 0x1f, 0xd3, 0xcd, 0x16, 0x4c, 0xb5, 0x99, + 0x46, 0x33, 0xd3, 0x39, 0xb4, 0x36, 0x1f, 0x91, 0xa1, 0x80, 0xc1, 0xfc, 0x1e, 0xd3, 0x28, 0x11, + 0x20, 0xac, 0x84, 0x70, 0xdc, 0x9d, 0x08, 0xb0, 0xc4, 0x5b, 0x90, 0x6c, 0x77, 0x64, 0x1b, 0xcd, + 0xc4, 0xc5, 0x47, 0x1c, 0xfe, 0x38, 0xf7, 0xa4, 0x16, 0xe9, 0xeb, 0x2b, 0x0f, 0x61, 0xca, 0x75, + 0x87, 0x97, 0x21, 0xbd, 0x57, 0xdf, 0xa9, 0x34, 0x9e, 0xd4, 0x0e, 0xf6, 0x2b, 0xe5, 0xea, 0xc3, + 0x6a, 0x65, 0x27, 0x3d, 0x81, 0x17, 0x61, 0xee, 0x90, 0x6c, 0xd7, 0x0e, 0xb6, 0xcb, 0x87, 0xd5, + 0x7a, 0x6d, 0xfb, 0x71, 0x1a, 0xe1, 0xff, 0xc1, 0x62, 0xad, 0x5e, 0x6b, 0x04, 0xc5, 0xb1, 0xd2, + 0xff, 0x61, 0x79, 0xf0, 0x59, 0x38, 0xd4, 0xa0, 0x4d, 0xce, 0x6c, 0xe5, 0x5b, 0x04, 0xf3, 0x7e, + 0x74, 0xb2, 0x96, 0x35, 0x48, 0xfb, 0xfe, 0x1b, 0xb6, 0x78, 0xcd, 0x7e, 0xdb, 0x5c, 0xb9, 0x98, + 0xb6, 0xd7, 0xbb, 0x16, 0xda, 0x81, 0xb3, 0x83, 0x57, 0x60, 0x4e, 0x37, 0x35, 0xfa, 0x45, 0xa3, + 0x63, 0x69, 0x2a, 0xa7, 0x4e, 0x66, 0x2a, 0x87, 0xd6, 0xa6, 0xc9, 0xac, 0x10, 0x3e, 0xf1, 0x64, + 0x8a, 0x0a, 0x78, 0xdb, 0x30, 0x58, 0x53, 0xe5, 0xb4, 0xaa, 0x39, 0x63, 0x96, 0xce, 0x6f, 0xea, + 0x68, 0xac, 0xa6, 0x5e, 0x86, 0xa5, 0x80, 0x0b, 0x19, 0xee, 0xe5, 0x8c, 0xbc, 0x40, 0xb0, 0x48, + 0xa8, 0x43, 0xed, 0xb3, 0x4b, 0xf0, 0xbc, 0x01, 0x29, 0xd7, 0xdc, 0xb1, 0xea, 0x50, 0xf7, 0x3e, + 0x29, 0xee, 0xc1, 0x17, 0x5d, 0x3a, 0x90, 0x65, 0xc0, 0x83, 0x14, 0xe4, 0xc3, 0xff, 0x35, 0x06, + 0x09, 0xbf, 0x14, 0xf8, 0x1e, 0xc4, 0x75, 0xd3, 0xa1, 0x36, 0x17, 0xc9, 0x4e, 0x15, 0xaf, 0x5e, + 0x30, 0x73, 0x76, 0x27, 0x88, 0x54, 0x76, 0x61, 0x5e, 0x91, 0xc4, 0xd7, 0x30, 0x1a, 0xe6, 0x29, + 0x7b, 0x30, 0xe1, 0x2d, 0x3e, 0x26, 0x4c, 0x78, 0x2b, 0x42, 0x5c, 0xa3, 0x06, 0xe5, 0x54, 0xb6, + 0xec, 0xc8, 0xb8, 0x5d, 0x8c, 0xa7, 0x89, 0x57, 0x60, 0x56, 0xa4, 0xf1, 0x8c, 0xda, 0x8e, 0xfb, + 0xc5, 0xb9, 0xb9, 0x9e, 0xdc, 0x45, 0x24, 0xe5, 0x4a, 0x8f, 0x3c, 0x61, 0x29, 0x05, 0x49, 0x66, + 0x51, 0x5b, 0xa4, 0xa2, 0x74, 0x1d, 0xae, 0x36, 0x99, 0xf9, 0xcc, 0xd0, 0x9b, 0xbc, 0xa1, 0x51, + 0x4e, 0xe5, 0x07, 0xc0, 0x6d, 0x95, 0xd3, 0x56, 0x57, 0xf9, 0x06, 0xc1, 0x7c, 0xf0, 0x05, 0xe3, + 0x75, 0x98, 0x3c, 0xa1, 0xfe, 0xfc, 0x89, 0x2e, 0x86, 0xab, 0x84, 0x33, 0x30, 0xe3, 0x53, 0x71, + 0x33, 0x3d, 0x49, 0xfc, 0x23, 0xbe, 0x0d, 0x8b, 0x43, 0x7e, 0xa9, 0x26, 0xd2, 0x9a, 0x20, 0x69, + 0xff, 0x62, 0x47, 0xca, 0x95, 0x7f, 0x10, 0xa4, 0x06, 0x26, 0x22, 0xfe, 0x04, 0xd2, 0x62, 0x92, + 0x36, 0x99, 0xe9, 0xe8, 0x0e, 0xa7, 0x66, 0xb3, 0x2b, 0x9a, 0xcb, 0x7c, 0x71, 0x63, 0xd4, 0x34, + 0x15, 0xbf, 0xcb, 0x7d, 0xdc, 0xee, 0x04, 0x59, 0xb0, 0x83, 0xa2, 0xe1, 0xb6, 0x15, 0x0b, 0x69, + 0x5b, 0xca, 0x1e, 0x2c, 0x0c, 0x59, 0xc2, 0x39, 0xb8, 0x46, 0x2a, 0xdb, 0x3b, 0x8d, 0x72, 0xbd, + 0x76, 0x50, 0x3d, 0x38, 0xac, 0xd4, 0xca, 0x1f, 0x0d, 0x35, 0x24, 0x80, 0xf8, 0xc1, 0x21, 0xa9, + 0xd7, 0x1e, 0xa5, 0x11, 0x9e, 0x85, 0x44, 0xe5, 0xa8, 0x52, 0x3b, 0x7c, 0x22, 0x1a, 0x10, 0x86, + 0xf4, 0x40, 0x30, 0xde, 0xa8, 0xfd, 0x3e, 0x06, 0xf8, 0xfc, 0xf0, 0xc1, 0x04, 0x40, 0x04, 0xff, + 0xb9, 0xad, 0x73, 0x2a, 0x27, 0xee, 0xe6, 0x98, 0x93, 0x4b, 0x44, 0xff, 0x81, 0x0b, 0xdc, 0x9d, + 0x20, 0x49, 0xdb, 0x3f, 0xe0, 0x3a, 0x24, 0xbd, 0xd5, 0xc4, 0x34, 0xfc, 0x39, 0xbc, 0x71, 0x19, + 0x93, 0x75, 0xd3, 0x10, 0x4b, 0x83, 0x2d, 0x7f, 0x67, 0xdf, 0x85, 0x64, 0xcf, 0x15, 0xde, 0x84, + 0x65, 0xcb, 0xa6, 0x67, 0x3a, 0xeb, 0x38, 0x8d, 0xf3, 0x33, 0x6b, 0xc9, 0xbf, 0x1b, 0xb0, 0x9d, + 0x05, 0x48, 0xf8, 0x76, 0x4b, 0x71, 0x6f, 0x04, 0x15, 0xff, 0x9e, 0x81, 0xe4, 0x8e, 0x4f, 0x06, + 0x3f, 0x87, 0xb8, 0xb7, 0x83, 0x62, 0x25, 0x94, 0x69, 0x60, 0x89, 0xce, 0xae, 0x5c, 0xa8, 0x23, + 0x7b, 0xc4, 0xed, 0xaf, 0xff, 0xfc, 0xeb, 0xc7, 0xd8, 0xaa, 0x92, 0x73, 0x97, 0x72, 0xd9, 0x9f, + 0x9c, 0xc2, 0x97, 0xfd, 0xde, 0xf5, 0xd5, 0x7d, 0x43, 0x20, 0xee, 0xa3, 0x75, 0xfc, 0x1d, 0x82, + 0x84, 0xbf, 0x08, 0xe1, 0x9b, 0xe1, 0xcf, 0x2e, 0xb8, 0x64, 0x66, 0x57, 0x47, 0x68, 0x49, 0x1a, + 0x77, 0x05, 0x8d, 0x5b, 0x8a, 0x12, 0x4d, 0xc3, 0x96, 0x18, 0x97, 0xc8, 0x4f, 0x08, 0xd2, 0xc3, + 0x9b, 0x07, 0xbe, 0x13, 0xea, 0x2a, 0x62, 0x69, 0xca, 0xde, 0x1d, 0x53, 0x5b, 0x12, 0xbc, 0x27, + 0x08, 0x16, 0x94, 0xf5, 0x68, 0x82, 0xc7, 0x43, 0x58, 0x97, 0xe8, 0x73, 0x88, 0x7b, 0xb3, 0x34, + 0xa2, 0x62, 0x81, 0x35, 0x22, 0xa2, 0x62, 0xc1, 0x61, 0x3c, 0x4e, 0xc5, 0x9a, 0x02, 0xd1, 0xab, + 0x98, 0x5c, 0x88, 0xa2, 0x2a, 0x16, 0xdc, 0xc1, 0xa2, 0x2a, 0x36, 0xbc, 0x55, 0x8d, 0x53, 0x31, + 0x89, 0x71, 0x89, 0xbc, 0x44, 0x90, 0x1a, 0x98, 0xb5, 0x38, 0x7c, 0xab, 0x3a, 0x3f, 0xf0, 0xb3, + 0x6b, 0xa3, 0x15, 0x25, 0xa3, 0x0d, 0xc1, 0x68, 0x5d, 0x59, 0x8d, 0x66, 0xa4, 0xf6, 0x61, 0x2e, + 0xa9, 0x1f, 0x10, 0x40, 0x7f, 0x6e, 0xe2, 0x37, 0x22, 0x1a, 0xe9, 0xd0, 0x6c, 0xcf, 0xde, 0x1a, + 0xa9, 0x27, 0x19, 0x15, 0x04, 0xa3, 0x37, 0x95, 0x9b, 0x17, 0xe4, 0xa8, 0x87, 0xba, 0x8f, 0xd6, + 0x4b, 0xaf, 0x10, 0x5c, 0x69, 0xb2, 0x76, 0x98, 0xfd, 0xd2, 0x7c, 0xaf, 0x0f, 0xec, 0xbb, 0xff, + 0x9a, 0xf7, 0xd1, 0xc7, 0x0f, 0xa4, 0x5a, 0x8b, 0x19, 0xaa, 0xd9, 0xca, 0x33, 0xbb, 0x55, 0x68, + 0x51, 0x53, 0xfc, 0xa7, 0x2e, 0x78, 0x57, 0xaa, 0xa5, 0x3b, 0x81, 0xff, 0xdd, 0x5b, 0xbd, 0xc3, + 0xcf, 0xb1, 0xd7, 0x1e, 0x79, 0xf0, 0xb2, 0xc1, 0x3a, 0x5a, 0xbe, 0x67, 0x3d, 0x7f, 0xb4, 0xf9, + 0x87, 0x7f, 0xf7, 0x54, 0xdc, 0x3d, 0xed, 0xdd, 0x3d, 0x3d, 0xda, 0x3c, 0x8e, 0x0b, 0x07, 0x6f, + 0xfd, 0x1b, 0x00, 0x00, 0xff, 0xff, 0x25, 0x27, 0xe9, 0x95, 0x51, 0x10, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/datastore/v1/entity.pb.go b/vendor/google.golang.org/genproto/googleapis/datastore/v1/entity.pb.go new file mode 100644 index 0000000..ec4b4dc --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/datastore/v1/entity.pb.go @@ -0,0 +1,927 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/datastore/v1/entity.proto + +package datastore // import "google.golang.org/genproto/googleapis/datastore/v1" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _struct "github.com/golang/protobuf/ptypes/struct" +import timestamp "github.com/golang/protobuf/ptypes/timestamp" +import _ "google.golang.org/genproto/googleapis/api/annotations" +import latlng "google.golang.org/genproto/googleapis/type/latlng" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// A partition ID identifies a grouping of entities. The grouping is always +// by project and namespace, however the namespace ID may be empty. +// +// A partition ID contains several dimensions: +// project ID and namespace ID. +// +// Partition dimensions: +// +// - May be `""`. +// - Must be valid UTF-8 bytes. +// - Must have values that match regex `[A-Za-z\d\.\-_]{1,100}` +// If the value of any dimension matches regex `__.*__`, the partition is +// reserved/read-only. +// A reserved/read-only partition ID is forbidden in certain documented +// contexts. +// +// Foreign partition IDs (in which the project ID does +// not match the context project ID ) are discouraged. +// Reads and writes of foreign partition IDs may fail if the project is not in +// an active state. +type PartitionId struct { + // The ID of the project to which the entities belong. + ProjectId string `protobuf:"bytes,2,opt,name=project_id,json=projectId,proto3" json:"project_id,omitempty"` + // If not empty, the ID of the namespace to which the entities belong. + NamespaceId string `protobuf:"bytes,4,opt,name=namespace_id,json=namespaceId,proto3" json:"namespace_id,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *PartitionId) Reset() { *m = PartitionId{} } +func (m *PartitionId) String() string { return proto.CompactTextString(m) } +func (*PartitionId) ProtoMessage() {} +func (*PartitionId) Descriptor() ([]byte, []int) { + return fileDescriptor_entity_3e78f5f5131c3012, []int{0} +} +func (m *PartitionId) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_PartitionId.Unmarshal(m, b) +} +func (m *PartitionId) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_PartitionId.Marshal(b, m, deterministic) +} +func (dst *PartitionId) XXX_Merge(src proto.Message) { + xxx_messageInfo_PartitionId.Merge(dst, src) +} +func (m *PartitionId) XXX_Size() int { + return xxx_messageInfo_PartitionId.Size(m) +} +func (m *PartitionId) XXX_DiscardUnknown() { + xxx_messageInfo_PartitionId.DiscardUnknown(m) +} + +var xxx_messageInfo_PartitionId proto.InternalMessageInfo + +func (m *PartitionId) GetProjectId() string { + if m != nil { + return m.ProjectId + } + return "" +} + +func (m *PartitionId) GetNamespaceId() string { + if m != nil { + return m.NamespaceId + } + return "" +} + +// A unique identifier for an entity. +// If a key's partition ID or any of its path kinds or names are +// reserved/read-only, the key is reserved/read-only. +// A reserved/read-only key is forbidden in certain documented contexts. +type Key struct { + // Entities are partitioned into subsets, currently identified by a project + // ID and namespace ID. + // Queries are scoped to a single partition. + PartitionId *PartitionId `protobuf:"bytes,1,opt,name=partition_id,json=partitionId,proto3" json:"partition_id,omitempty"` + // The entity path. + // An entity path consists of one or more elements composed of a kind and a + // string or numerical identifier, which identify entities. The first + // element identifies a _root entity_, the second element identifies + // a _child_ of the root entity, the third element identifies a child of the + // second entity, and so forth. The entities identified by all prefixes of + // the path are called the element's _ancestors_. + // + // An entity path is always fully complete: *all* of the entity's ancestors + // are required to be in the path along with the entity identifier itself. + // The only exception is that in some documented cases, the identifier in the + // last path element (for the entity) itself may be omitted. For example, + // the last path element of the key of `Mutation.insert` may have no + // identifier. + // + // A path can never be empty, and a path can have at most 100 elements. + Path []*Key_PathElement `protobuf:"bytes,2,rep,name=path,proto3" json:"path,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Key) Reset() { *m = Key{} } +func (m *Key) String() string { return proto.CompactTextString(m) } +func (*Key) ProtoMessage() {} +func (*Key) Descriptor() ([]byte, []int) { + return fileDescriptor_entity_3e78f5f5131c3012, []int{1} +} +func (m *Key) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Key.Unmarshal(m, b) +} +func (m *Key) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Key.Marshal(b, m, deterministic) +} +func (dst *Key) XXX_Merge(src proto.Message) { + xxx_messageInfo_Key.Merge(dst, src) +} +func (m *Key) XXX_Size() int { + return xxx_messageInfo_Key.Size(m) +} +func (m *Key) XXX_DiscardUnknown() { + xxx_messageInfo_Key.DiscardUnknown(m) +} + +var xxx_messageInfo_Key proto.InternalMessageInfo + +func (m *Key) GetPartitionId() *PartitionId { + if m != nil { + return m.PartitionId + } + return nil +} + +func (m *Key) GetPath() []*Key_PathElement { + if m != nil { + return m.Path + } + return nil +} + +// A (kind, ID/name) pair used to construct a key path. +// +// If either name or ID is set, the element is complete. +// If neither is set, the element is incomplete. +type Key_PathElement struct { + // The kind of the entity. + // A kind matching regex `__.*__` is reserved/read-only. + // A kind must not contain more than 1500 bytes when UTF-8 encoded. + // Cannot be `""`. + Kind string `protobuf:"bytes,1,opt,name=kind,proto3" json:"kind,omitempty"` + // The type of ID. + // + // Types that are valid to be assigned to IdType: + // *Key_PathElement_Id + // *Key_PathElement_Name + IdType isKey_PathElement_IdType `protobuf_oneof:"id_type"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Key_PathElement) Reset() { *m = Key_PathElement{} } +func (m *Key_PathElement) String() string { return proto.CompactTextString(m) } +func (*Key_PathElement) ProtoMessage() {} +func (*Key_PathElement) Descriptor() ([]byte, []int) { + return fileDescriptor_entity_3e78f5f5131c3012, []int{1, 0} +} +func (m *Key_PathElement) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Key_PathElement.Unmarshal(m, b) +} +func (m *Key_PathElement) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Key_PathElement.Marshal(b, m, deterministic) +} +func (dst *Key_PathElement) XXX_Merge(src proto.Message) { + xxx_messageInfo_Key_PathElement.Merge(dst, src) +} +func (m *Key_PathElement) XXX_Size() int { + return xxx_messageInfo_Key_PathElement.Size(m) +} +func (m *Key_PathElement) XXX_DiscardUnknown() { + xxx_messageInfo_Key_PathElement.DiscardUnknown(m) +} + +var xxx_messageInfo_Key_PathElement proto.InternalMessageInfo + +func (m *Key_PathElement) GetKind() string { + if m != nil { + return m.Kind + } + return "" +} + +type isKey_PathElement_IdType interface { + isKey_PathElement_IdType() +} + +type Key_PathElement_Id struct { + Id int64 `protobuf:"varint,2,opt,name=id,proto3,oneof"` +} + +type Key_PathElement_Name struct { + Name string `protobuf:"bytes,3,opt,name=name,proto3,oneof"` +} + +func (*Key_PathElement_Id) isKey_PathElement_IdType() {} + +func (*Key_PathElement_Name) isKey_PathElement_IdType() {} + +func (m *Key_PathElement) GetIdType() isKey_PathElement_IdType { + if m != nil { + return m.IdType + } + return nil +} + +func (m *Key_PathElement) GetId() int64 { + if x, ok := m.GetIdType().(*Key_PathElement_Id); ok { + return x.Id + } + return 0 +} + +func (m *Key_PathElement) GetName() string { + if x, ok := m.GetIdType().(*Key_PathElement_Name); ok { + return x.Name + } + return "" +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*Key_PathElement) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _Key_PathElement_OneofMarshaler, _Key_PathElement_OneofUnmarshaler, _Key_PathElement_OneofSizer, []interface{}{ + (*Key_PathElement_Id)(nil), + (*Key_PathElement_Name)(nil), + } +} + +func _Key_PathElement_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*Key_PathElement) + // id_type + switch x := m.IdType.(type) { + case *Key_PathElement_Id: + b.EncodeVarint(2<<3 | proto.WireVarint) + b.EncodeVarint(uint64(x.Id)) + case *Key_PathElement_Name: + b.EncodeVarint(3<<3 | proto.WireBytes) + b.EncodeStringBytes(x.Name) + case nil: + default: + return fmt.Errorf("Key_PathElement.IdType has unexpected type %T", x) + } + return nil +} + +func _Key_PathElement_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*Key_PathElement) + switch tag { + case 2: // id_type.id + if wire != proto.WireVarint { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeVarint() + m.IdType = &Key_PathElement_Id{int64(x)} + return true, err + case 3: // id_type.name + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeStringBytes() + m.IdType = &Key_PathElement_Name{x} + return true, err + default: + return false, nil + } +} + +func _Key_PathElement_OneofSizer(msg proto.Message) (n int) { + m := msg.(*Key_PathElement) + // id_type + switch x := m.IdType.(type) { + case *Key_PathElement_Id: + n += 1 // tag and wire + n += proto.SizeVarint(uint64(x.Id)) + case *Key_PathElement_Name: + n += 1 // tag and wire + n += proto.SizeVarint(uint64(len(x.Name))) + n += len(x.Name) + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +// An array value. +type ArrayValue struct { + // Values in the array. + // The order of this array may not be preserved if it contains a mix of + // indexed and unindexed values. + Values []*Value `protobuf:"bytes,1,rep,name=values,proto3" json:"values,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ArrayValue) Reset() { *m = ArrayValue{} } +func (m *ArrayValue) String() string { return proto.CompactTextString(m) } +func (*ArrayValue) ProtoMessage() {} +func (*ArrayValue) Descriptor() ([]byte, []int) { + return fileDescriptor_entity_3e78f5f5131c3012, []int{2} +} +func (m *ArrayValue) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ArrayValue.Unmarshal(m, b) +} +func (m *ArrayValue) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ArrayValue.Marshal(b, m, deterministic) +} +func (dst *ArrayValue) XXX_Merge(src proto.Message) { + xxx_messageInfo_ArrayValue.Merge(dst, src) +} +func (m *ArrayValue) XXX_Size() int { + return xxx_messageInfo_ArrayValue.Size(m) +} +func (m *ArrayValue) XXX_DiscardUnknown() { + xxx_messageInfo_ArrayValue.DiscardUnknown(m) +} + +var xxx_messageInfo_ArrayValue proto.InternalMessageInfo + +func (m *ArrayValue) GetValues() []*Value { + if m != nil { + return m.Values + } + return nil +} + +// A message that can hold any of the supported value types and associated +// metadata. +type Value struct { + // Must have a value set. + // + // Types that are valid to be assigned to ValueType: + // *Value_NullValue + // *Value_BooleanValue + // *Value_IntegerValue + // *Value_DoubleValue + // *Value_TimestampValue + // *Value_KeyValue + // *Value_StringValue + // *Value_BlobValue + // *Value_GeoPointValue + // *Value_EntityValue + // *Value_ArrayValue + ValueType isValue_ValueType `protobuf_oneof:"value_type"` + // The `meaning` field should only be populated for backwards compatibility. + Meaning int32 `protobuf:"varint,14,opt,name=meaning,proto3" json:"meaning,omitempty"` + // If the value should be excluded from all indexes including those defined + // explicitly. + ExcludeFromIndexes bool `protobuf:"varint,19,opt,name=exclude_from_indexes,json=excludeFromIndexes,proto3" json:"exclude_from_indexes,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Value) Reset() { *m = Value{} } +func (m *Value) String() string { return proto.CompactTextString(m) } +func (*Value) ProtoMessage() {} +func (*Value) Descriptor() ([]byte, []int) { + return fileDescriptor_entity_3e78f5f5131c3012, []int{3} +} +func (m *Value) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Value.Unmarshal(m, b) +} +func (m *Value) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Value.Marshal(b, m, deterministic) +} +func (dst *Value) XXX_Merge(src proto.Message) { + xxx_messageInfo_Value.Merge(dst, src) +} +func (m *Value) XXX_Size() int { + return xxx_messageInfo_Value.Size(m) +} +func (m *Value) XXX_DiscardUnknown() { + xxx_messageInfo_Value.DiscardUnknown(m) +} + +var xxx_messageInfo_Value proto.InternalMessageInfo + +type isValue_ValueType interface { + isValue_ValueType() +} + +type Value_NullValue struct { + NullValue _struct.NullValue `protobuf:"varint,11,opt,name=null_value,json=nullValue,proto3,enum=google.protobuf.NullValue,oneof"` +} + +type Value_BooleanValue struct { + BooleanValue bool `protobuf:"varint,1,opt,name=boolean_value,json=booleanValue,proto3,oneof"` +} + +type Value_IntegerValue struct { + IntegerValue int64 `protobuf:"varint,2,opt,name=integer_value,json=integerValue,proto3,oneof"` +} + +type Value_DoubleValue struct { + DoubleValue float64 `protobuf:"fixed64,3,opt,name=double_value,json=doubleValue,proto3,oneof"` +} + +type Value_TimestampValue struct { + TimestampValue *timestamp.Timestamp `protobuf:"bytes,10,opt,name=timestamp_value,json=timestampValue,proto3,oneof"` +} + +type Value_KeyValue struct { + KeyValue *Key `protobuf:"bytes,5,opt,name=key_value,json=keyValue,proto3,oneof"` +} + +type Value_StringValue struct { + StringValue string `protobuf:"bytes,17,opt,name=string_value,json=stringValue,proto3,oneof"` +} + +type Value_BlobValue struct { + BlobValue []byte `protobuf:"bytes,18,opt,name=blob_value,json=blobValue,proto3,oneof"` +} + +type Value_GeoPointValue struct { + GeoPointValue *latlng.LatLng `protobuf:"bytes,8,opt,name=geo_point_value,json=geoPointValue,proto3,oneof"` +} + +type Value_EntityValue struct { + EntityValue *Entity `protobuf:"bytes,6,opt,name=entity_value,json=entityValue,proto3,oneof"` +} + +type Value_ArrayValue struct { + ArrayValue *ArrayValue `protobuf:"bytes,9,opt,name=array_value,json=arrayValue,proto3,oneof"` +} + +func (*Value_NullValue) isValue_ValueType() {} + +func (*Value_BooleanValue) isValue_ValueType() {} + +func (*Value_IntegerValue) isValue_ValueType() {} + +func (*Value_DoubleValue) isValue_ValueType() {} + +func (*Value_TimestampValue) isValue_ValueType() {} + +func (*Value_KeyValue) isValue_ValueType() {} + +func (*Value_StringValue) isValue_ValueType() {} + +func (*Value_BlobValue) isValue_ValueType() {} + +func (*Value_GeoPointValue) isValue_ValueType() {} + +func (*Value_EntityValue) isValue_ValueType() {} + +func (*Value_ArrayValue) isValue_ValueType() {} + +func (m *Value) GetValueType() isValue_ValueType { + if m != nil { + return m.ValueType + } + return nil +} + +func (m *Value) GetNullValue() _struct.NullValue { + if x, ok := m.GetValueType().(*Value_NullValue); ok { + return x.NullValue + } + return _struct.NullValue_NULL_VALUE +} + +func (m *Value) GetBooleanValue() bool { + if x, ok := m.GetValueType().(*Value_BooleanValue); ok { + return x.BooleanValue + } + return false +} + +func (m *Value) GetIntegerValue() int64 { + if x, ok := m.GetValueType().(*Value_IntegerValue); ok { + return x.IntegerValue + } + return 0 +} + +func (m *Value) GetDoubleValue() float64 { + if x, ok := m.GetValueType().(*Value_DoubleValue); ok { + return x.DoubleValue + } + return 0 +} + +func (m *Value) GetTimestampValue() *timestamp.Timestamp { + if x, ok := m.GetValueType().(*Value_TimestampValue); ok { + return x.TimestampValue + } + return nil +} + +func (m *Value) GetKeyValue() *Key { + if x, ok := m.GetValueType().(*Value_KeyValue); ok { + return x.KeyValue + } + return nil +} + +func (m *Value) GetStringValue() string { + if x, ok := m.GetValueType().(*Value_StringValue); ok { + return x.StringValue + } + return "" +} + +func (m *Value) GetBlobValue() []byte { + if x, ok := m.GetValueType().(*Value_BlobValue); ok { + return x.BlobValue + } + return nil +} + +func (m *Value) GetGeoPointValue() *latlng.LatLng { + if x, ok := m.GetValueType().(*Value_GeoPointValue); ok { + return x.GeoPointValue + } + return nil +} + +func (m *Value) GetEntityValue() *Entity { + if x, ok := m.GetValueType().(*Value_EntityValue); ok { + return x.EntityValue + } + return nil +} + +func (m *Value) GetArrayValue() *ArrayValue { + if x, ok := m.GetValueType().(*Value_ArrayValue); ok { + return x.ArrayValue + } + return nil +} + +func (m *Value) GetMeaning() int32 { + if m != nil { + return m.Meaning + } + return 0 +} + +func (m *Value) GetExcludeFromIndexes() bool { + if m != nil { + return m.ExcludeFromIndexes + } + return false +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*Value) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _Value_OneofMarshaler, _Value_OneofUnmarshaler, _Value_OneofSizer, []interface{}{ + (*Value_NullValue)(nil), + (*Value_BooleanValue)(nil), + (*Value_IntegerValue)(nil), + (*Value_DoubleValue)(nil), + (*Value_TimestampValue)(nil), + (*Value_KeyValue)(nil), + (*Value_StringValue)(nil), + (*Value_BlobValue)(nil), + (*Value_GeoPointValue)(nil), + (*Value_EntityValue)(nil), + (*Value_ArrayValue)(nil), + } +} + +func _Value_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*Value) + // value_type + switch x := m.ValueType.(type) { + case *Value_NullValue: + b.EncodeVarint(11<<3 | proto.WireVarint) + b.EncodeVarint(uint64(x.NullValue)) + case *Value_BooleanValue: + t := uint64(0) + if x.BooleanValue { + t = 1 + } + b.EncodeVarint(1<<3 | proto.WireVarint) + b.EncodeVarint(t) + case *Value_IntegerValue: + b.EncodeVarint(2<<3 | proto.WireVarint) + b.EncodeVarint(uint64(x.IntegerValue)) + case *Value_DoubleValue: + b.EncodeVarint(3<<3 | proto.WireFixed64) + b.EncodeFixed64(math.Float64bits(x.DoubleValue)) + case *Value_TimestampValue: + b.EncodeVarint(10<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.TimestampValue); err != nil { + return err + } + case *Value_KeyValue: + b.EncodeVarint(5<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.KeyValue); err != nil { + return err + } + case *Value_StringValue: + b.EncodeVarint(17<<3 | proto.WireBytes) + b.EncodeStringBytes(x.StringValue) + case *Value_BlobValue: + b.EncodeVarint(18<<3 | proto.WireBytes) + b.EncodeRawBytes(x.BlobValue) + case *Value_GeoPointValue: + b.EncodeVarint(8<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.GeoPointValue); err != nil { + return err + } + case *Value_EntityValue: + b.EncodeVarint(6<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.EntityValue); err != nil { + return err + } + case *Value_ArrayValue: + b.EncodeVarint(9<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.ArrayValue); err != nil { + return err + } + case nil: + default: + return fmt.Errorf("Value.ValueType has unexpected type %T", x) + } + return nil +} + +func _Value_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*Value) + switch tag { + case 11: // value_type.null_value + if wire != proto.WireVarint { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeVarint() + m.ValueType = &Value_NullValue{_struct.NullValue(x)} + return true, err + case 1: // value_type.boolean_value + if wire != proto.WireVarint { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeVarint() + m.ValueType = &Value_BooleanValue{x != 0} + return true, err + case 2: // value_type.integer_value + if wire != proto.WireVarint { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeVarint() + m.ValueType = &Value_IntegerValue{int64(x)} + return true, err + case 3: // value_type.double_value + if wire != proto.WireFixed64 { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeFixed64() + m.ValueType = &Value_DoubleValue{math.Float64frombits(x)} + return true, err + case 10: // value_type.timestamp_value + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(timestamp.Timestamp) + err := b.DecodeMessage(msg) + m.ValueType = &Value_TimestampValue{msg} + return true, err + case 5: // value_type.key_value + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(Key) + err := b.DecodeMessage(msg) + m.ValueType = &Value_KeyValue{msg} + return true, err + case 17: // value_type.string_value + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeStringBytes() + m.ValueType = &Value_StringValue{x} + return true, err + case 18: // value_type.blob_value + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeRawBytes(true) + m.ValueType = &Value_BlobValue{x} + return true, err + case 8: // value_type.geo_point_value + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(latlng.LatLng) + err := b.DecodeMessage(msg) + m.ValueType = &Value_GeoPointValue{msg} + return true, err + case 6: // value_type.entity_value + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(Entity) + err := b.DecodeMessage(msg) + m.ValueType = &Value_EntityValue{msg} + return true, err + case 9: // value_type.array_value + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(ArrayValue) + err := b.DecodeMessage(msg) + m.ValueType = &Value_ArrayValue{msg} + return true, err + default: + return false, nil + } +} + +func _Value_OneofSizer(msg proto.Message) (n int) { + m := msg.(*Value) + // value_type + switch x := m.ValueType.(type) { + case *Value_NullValue: + n += 1 // tag and wire + n += proto.SizeVarint(uint64(x.NullValue)) + case *Value_BooleanValue: + n += 1 // tag and wire + n += 1 + case *Value_IntegerValue: + n += 1 // tag and wire + n += proto.SizeVarint(uint64(x.IntegerValue)) + case *Value_DoubleValue: + n += 1 // tag and wire + n += 8 + case *Value_TimestampValue: + s := proto.Size(x.TimestampValue) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *Value_KeyValue: + s := proto.Size(x.KeyValue) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *Value_StringValue: + n += 2 // tag and wire + n += proto.SizeVarint(uint64(len(x.StringValue))) + n += len(x.StringValue) + case *Value_BlobValue: + n += 2 // tag and wire + n += proto.SizeVarint(uint64(len(x.BlobValue))) + n += len(x.BlobValue) + case *Value_GeoPointValue: + s := proto.Size(x.GeoPointValue) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *Value_EntityValue: + s := proto.Size(x.EntityValue) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *Value_ArrayValue: + s := proto.Size(x.ArrayValue) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +// A Datastore data object. +// +// An entity is limited to 1 megabyte when stored. That _roughly_ +// corresponds to a limit of 1 megabyte for the serialized form of this +// message. +type Entity struct { + // The entity's key. + // + // An entity must have a key, unless otherwise documented (for example, + // an entity in `Value.entity_value` may have no key). + // An entity's kind is its key path's last element's kind, + // or null if it has no key. + Key *Key `protobuf:"bytes,1,opt,name=key,proto3" json:"key,omitempty"` + // The entity's properties. + // The map's keys are property names. + // A property name matching regex `__.*__` is reserved. + // A reserved property name is forbidden in certain documented contexts. + // The name must not contain more than 500 characters. + // The name cannot be `""`. + Properties map[string]*Value `protobuf:"bytes,3,rep,name=properties,proto3" json:"properties,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Entity) Reset() { *m = Entity{} } +func (m *Entity) String() string { return proto.CompactTextString(m) } +func (*Entity) ProtoMessage() {} +func (*Entity) Descriptor() ([]byte, []int) { + return fileDescriptor_entity_3e78f5f5131c3012, []int{4} +} +func (m *Entity) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Entity.Unmarshal(m, b) +} +func (m *Entity) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Entity.Marshal(b, m, deterministic) +} +func (dst *Entity) XXX_Merge(src proto.Message) { + xxx_messageInfo_Entity.Merge(dst, src) +} +func (m *Entity) XXX_Size() int { + return xxx_messageInfo_Entity.Size(m) +} +func (m *Entity) XXX_DiscardUnknown() { + xxx_messageInfo_Entity.DiscardUnknown(m) +} + +var xxx_messageInfo_Entity proto.InternalMessageInfo + +func (m *Entity) GetKey() *Key { + if m != nil { + return m.Key + } + return nil +} + +func (m *Entity) GetProperties() map[string]*Value { + if m != nil { + return m.Properties + } + return nil +} + +func init() { + proto.RegisterType((*PartitionId)(nil), "google.datastore.v1.PartitionId") + proto.RegisterType((*Key)(nil), "google.datastore.v1.Key") + proto.RegisterType((*Key_PathElement)(nil), "google.datastore.v1.Key.PathElement") + proto.RegisterType((*ArrayValue)(nil), "google.datastore.v1.ArrayValue") + proto.RegisterType((*Value)(nil), "google.datastore.v1.Value") + proto.RegisterType((*Entity)(nil), "google.datastore.v1.Entity") + proto.RegisterMapType((map[string]*Value)(nil), "google.datastore.v1.Entity.PropertiesEntry") +} + +func init() { + proto.RegisterFile("google/datastore/v1/entity.proto", fileDescriptor_entity_3e78f5f5131c3012) +} + +var fileDescriptor_entity_3e78f5f5131c3012 = []byte{ + // 780 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x7c, 0x94, 0xff, 0x6e, 0xdc, 0x44, + 0x10, 0xc7, 0xed, 0xbb, 0x5c, 0x1a, 0x8f, 0xdd, 0xa4, 0x6c, 0x2a, 0x61, 0x02, 0x28, 0x26, 0x80, + 0x74, 0x02, 0xc9, 0x6e, 0xc2, 0x1f, 0x54, 0x14, 0xa4, 0x72, 0x25, 0xe0, 0x28, 0x15, 0x9c, 0x56, + 0x55, 0x24, 0x50, 0xa4, 0xd3, 0xde, 0x79, 0xeb, 0x2e, 0x67, 0xef, 0x5a, 0xf6, 0x3a, 0xaa, 0xdf, + 0x05, 0xf1, 0x00, 0x3c, 0x0a, 0x8f, 0x80, 0x78, 0x18, 0xb4, 0x3f, 0xec, 0x0b, 0xed, 0x35, 0xff, + 0x79, 0x67, 0x3e, 0xdf, 0xd9, 0xef, 0xec, 0xce, 0x1a, 0xa2, 0x5c, 0x88, 0xbc, 0xa0, 0x49, 0x46, + 0x24, 0x69, 0xa4, 0xa8, 0x69, 0x72, 0x73, 0x9a, 0x50, 0x2e, 0x99, 0xec, 0xe2, 0xaa, 0x16, 0x52, + 0xa0, 0x43, 0x43, 0xc4, 0x03, 0x11, 0xdf, 0x9c, 0x1e, 0x7d, 0x64, 0x65, 0xa4, 0x62, 0x09, 0xe1, + 0x5c, 0x48, 0x22, 0x99, 0xe0, 0x8d, 0x91, 0x0c, 0x59, 0xbd, 0x5a, 0xb6, 0x2f, 0x93, 0x46, 0xd6, + 0xed, 0x4a, 0xda, 0xec, 0xf1, 0x9b, 0x59, 0xc9, 0x4a, 0xda, 0x48, 0x52, 0x56, 0x16, 0x08, 0x2d, + 0x20, 0xbb, 0x8a, 0x26, 0x05, 0x91, 0x05, 0xcf, 0x4d, 0xe6, 0xe4, 0x17, 0xf0, 0xe7, 0xa4, 0x96, + 0x4c, 0x6d, 0x76, 0x91, 0xa1, 0x8f, 0x01, 0xaa, 0x5a, 0xfc, 0x4e, 0x57, 0x72, 0xc1, 0xb2, 0x70, + 0x14, 0xb9, 0x53, 0x0f, 0x7b, 0x36, 0x72, 0x91, 0xa1, 0x4f, 0x20, 0xe0, 0xa4, 0xa4, 0x4d, 0x45, + 0x56, 0x54, 0x01, 0x3b, 0x1a, 0xf0, 0x87, 0xd8, 0x45, 0x76, 0xf2, 0x8f, 0x0b, 0xe3, 0x4b, 0xda, + 0xa1, 0x67, 0x10, 0x54, 0x7d, 0x61, 0x85, 0xba, 0x91, 0x3b, 0xf5, 0xcf, 0xa2, 0x78, 0x4b, 0xef, + 0xf1, 0x2d, 0x07, 0xd8, 0xaf, 0x6e, 0xd9, 0x79, 0x0c, 0x3b, 0x15, 0x91, 0xaf, 0xc2, 0x51, 0x34, + 0x9e, 0xfa, 0x67, 0x9f, 0x6d, 0x15, 0x5f, 0xd2, 0x2e, 0x9e, 0x13, 0xf9, 0xea, 0xbc, 0xa0, 0x25, + 0xe5, 0x12, 0x6b, 0xc5, 0xd1, 0x0b, 0xd5, 0xd7, 0x10, 0x44, 0x08, 0x76, 0xd6, 0x8c, 0x1b, 0x17, + 0x1e, 0xd6, 0xdf, 0xe8, 0x01, 0x8c, 0x6c, 0x8f, 0xe3, 0xd4, 0xc1, 0x23, 0x96, 0xa1, 0x87, 0xb0, + 0xa3, 0x5a, 0x09, 0xc7, 0x8a, 0x4a, 0x1d, 0xac, 0x57, 0x33, 0x0f, 0xee, 0xb1, 0x6c, 0xa1, 0x8e, + 0xee, 0xe4, 0x29, 0xc0, 0xf7, 0x75, 0x4d, 0xba, 0x2b, 0x52, 0xb4, 0x14, 0x9d, 0xc1, 0xee, 0x8d, + 0xfa, 0x68, 0x42, 0x57, 0xfb, 0x3b, 0xda, 0xea, 0x4f, 0xb3, 0xd8, 0x92, 0x27, 0x7f, 0x4c, 0x60, + 0x62, 0xd4, 0x4f, 0x00, 0x78, 0x5b, 0x14, 0x0b, 0x9d, 0x08, 0xfd, 0xc8, 0x9d, 0xee, 0x6f, 0x2a, + 0xf4, 0x37, 0x19, 0xff, 0xdc, 0x16, 0x85, 0xe6, 0x53, 0x07, 0x7b, 0xbc, 0x5f, 0xa0, 0xcf, 0xe1, + 0xfe, 0x52, 0x88, 0x82, 0x12, 0x6e, 0xf5, 0xaa, 0xb1, 0xbd, 0xd4, 0xc1, 0x81, 0x0d, 0x0f, 0x18, + 0xe3, 0x92, 0xe6, 0xb4, 0xb6, 0x58, 0xdf, 0x6d, 0x60, 0xc3, 0x06, 0xfb, 0x14, 0x82, 0x4c, 0xb4, + 0xcb, 0x82, 0x5a, 0x4a, 0xf5, 0xef, 0xa6, 0x0e, 0xf6, 0x4d, 0xd4, 0x40, 0xe7, 0x70, 0x30, 0x8c, + 0x95, 0xe5, 0x40, 0xdf, 0xe9, 0xdb, 0xa6, 0x5f, 0xf4, 0x5c, 0xea, 0xe0, 0xfd, 0x41, 0x64, 0xca, + 0x7c, 0x0d, 0xde, 0x9a, 0x76, 0xb6, 0xc0, 0x44, 0x17, 0x08, 0xdf, 0x75, 0xaf, 0xa9, 0x83, 0xf7, + 0xd6, 0xb4, 0x1b, 0x4c, 0x36, 0xb2, 0x66, 0x3c, 0xb7, 0xda, 0xf7, 0xec, 0x25, 0xf9, 0x26, 0x6a, + 0xa0, 0x63, 0x80, 0x65, 0x21, 0x96, 0x16, 0x41, 0x91, 0x3b, 0x0d, 0xd4, 0xc1, 0xa9, 0x98, 0x01, + 0xbe, 0x83, 0x83, 0x9c, 0x8a, 0x45, 0x25, 0x18, 0x97, 0x96, 0xda, 0xd3, 0x26, 0x0e, 0x7b, 0x13, + 0xea, 0xa2, 0xe3, 0xe7, 0x44, 0x3e, 0xe7, 0x79, 0xea, 0xe0, 0xfb, 0x39, 0x15, 0x73, 0x05, 0x1b, + 0xf9, 0x53, 0x08, 0xcc, 0x53, 0xb6, 0xda, 0x5d, 0xad, 0xfd, 0x70, 0x6b, 0x03, 0xe7, 0x1a, 0x54, + 0x0e, 0x8d, 0xc4, 0x54, 0x98, 0x81, 0x4f, 0xd4, 0x08, 0xd9, 0x02, 0x9e, 0x2e, 0x70, 0xbc, 0xb5, + 0xc0, 0x66, 0xd4, 0x52, 0x07, 0x03, 0xd9, 0x0c, 0x5e, 0x08, 0xf7, 0x4a, 0x4a, 0x38, 0xe3, 0x79, + 0xb8, 0x1f, 0xb9, 0xd3, 0x09, 0xee, 0x97, 0xe8, 0x11, 0x3c, 0xa4, 0xaf, 0x57, 0x45, 0x9b, 0xd1, + 0xc5, 0xcb, 0x5a, 0x94, 0x0b, 0xc6, 0x33, 0xfa, 0x9a, 0x36, 0xe1, 0xa1, 0x1a, 0x0f, 0x8c, 0x6c, + 0xee, 0xc7, 0x5a, 0x94, 0x17, 0x26, 0x33, 0x0b, 0x00, 0xb4, 0x13, 0x33, 0xe0, 0xff, 0xba, 0xb0, + 0x6b, 0x7c, 0xa3, 0x2f, 0x60, 0xbc, 0xa6, 0x9d, 0x7d, 0xb7, 0xef, 0xbc, 0x22, 0xac, 0x20, 0x74, + 0xa9, 0x7f, 0x1b, 0x15, 0xad, 0x25, 0xa3, 0x4d, 0x38, 0xd6, 0xaf, 0xe1, 0xcb, 0x3b, 0x0e, 0x25, + 0x9e, 0x0f, 0xf4, 0x39, 0x97, 0x75, 0x87, 0x6f, 0xc9, 0x8f, 0x7e, 0x85, 0x83, 0x37, 0xd2, 0xe8, + 0xc1, 0xc6, 0x8b, 0x67, 0x76, 0x7c, 0x04, 0x93, 0xcd, 0x44, 0xdf, 0xfd, 0xf4, 0x0c, 0xf8, 0xcd, + 0xe8, 0xb1, 0x3b, 0xfb, 0xd3, 0x85, 0xf7, 0x57, 0xa2, 0xdc, 0x06, 0xcf, 0x7c, 0x63, 0x6d, 0xae, + 0x86, 0x78, 0xee, 0xfe, 0xf6, 0xad, 0x65, 0x72, 0x51, 0x10, 0x9e, 0xc7, 0xa2, 0xce, 0x93, 0x9c, + 0x72, 0x3d, 0xe2, 0x89, 0x49, 0x91, 0x8a, 0x35, 0xff, 0xfb, 0xcb, 0x3f, 0x19, 0x16, 0x7f, 0x8d, + 0x3e, 0xf8, 0xc9, 0xc8, 0x9f, 0x15, 0xa2, 0xcd, 0xe2, 0x1f, 0x86, 0x8d, 0xae, 0x4e, 0xff, 0xee, + 0x73, 0xd7, 0x3a, 0x77, 0x3d, 0xe4, 0xae, 0xaf, 0x4e, 0x97, 0xbb, 0x7a, 0x83, 0xaf, 0xfe, 0x0b, + 0x00, 0x00, 0xff, 0xff, 0xf3, 0xdd, 0x11, 0x96, 0x45, 0x06, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/datastore/v1/query.pb.go b/vendor/google.golang.org/genproto/googleapis/datastore/v1/query.pb.go new file mode 100644 index 0000000..f703091 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/datastore/v1/query.pb.go @@ -0,0 +1,1258 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/datastore/v1/query.proto + +package datastore // import "google.golang.org/genproto/googleapis/datastore/v1" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import wrappers "github.com/golang/protobuf/ptypes/wrappers" +import _ "google.golang.org/genproto/googleapis/api/annotations" +import _ "google.golang.org/genproto/googleapis/type/latlng" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Specifies what data the 'entity' field contains. +// A `ResultType` is either implied (for example, in `LookupResponse.missing` +// from `datastore.proto`, it is always `KEY_ONLY`) or specified by context +// (for example, in message `QueryResultBatch`, field `entity_result_type` +// specifies a `ResultType` for all the values in field `entity_results`). +type EntityResult_ResultType int32 + +const ( + // Unspecified. This value is never used. + EntityResult_RESULT_TYPE_UNSPECIFIED EntityResult_ResultType = 0 + // The key and properties. + EntityResult_FULL EntityResult_ResultType = 1 + // A projected subset of properties. The entity may have no key. + EntityResult_PROJECTION EntityResult_ResultType = 2 + // Only the key. + EntityResult_KEY_ONLY EntityResult_ResultType = 3 +) + +var EntityResult_ResultType_name = map[int32]string{ + 0: "RESULT_TYPE_UNSPECIFIED", + 1: "FULL", + 2: "PROJECTION", + 3: "KEY_ONLY", +} +var EntityResult_ResultType_value = map[string]int32{ + "RESULT_TYPE_UNSPECIFIED": 0, + "FULL": 1, + "PROJECTION": 2, + "KEY_ONLY": 3, +} + +func (x EntityResult_ResultType) String() string { + return proto.EnumName(EntityResult_ResultType_name, int32(x)) +} +func (EntityResult_ResultType) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_query_e7d74dc9c1327cf5, []int{0, 0} +} + +// The sort direction. +type PropertyOrder_Direction int32 + +const ( + // Unspecified. This value must not be used. + PropertyOrder_DIRECTION_UNSPECIFIED PropertyOrder_Direction = 0 + // Ascending. + PropertyOrder_ASCENDING PropertyOrder_Direction = 1 + // Descending. + PropertyOrder_DESCENDING PropertyOrder_Direction = 2 +) + +var PropertyOrder_Direction_name = map[int32]string{ + 0: "DIRECTION_UNSPECIFIED", + 1: "ASCENDING", + 2: "DESCENDING", +} +var PropertyOrder_Direction_value = map[string]int32{ + "DIRECTION_UNSPECIFIED": 0, + "ASCENDING": 1, + "DESCENDING": 2, +} + +func (x PropertyOrder_Direction) String() string { + return proto.EnumName(PropertyOrder_Direction_name, int32(x)) +} +func (PropertyOrder_Direction) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_query_e7d74dc9c1327cf5, []int{5, 0} +} + +// A composite filter operator. +type CompositeFilter_Operator int32 + +const ( + // Unspecified. This value must not be used. + CompositeFilter_OPERATOR_UNSPECIFIED CompositeFilter_Operator = 0 + // The results are required to satisfy each of the combined filters. + CompositeFilter_AND CompositeFilter_Operator = 1 +) + +var CompositeFilter_Operator_name = map[int32]string{ + 0: "OPERATOR_UNSPECIFIED", + 1: "AND", +} +var CompositeFilter_Operator_value = map[string]int32{ + "OPERATOR_UNSPECIFIED": 0, + "AND": 1, +} + +func (x CompositeFilter_Operator) String() string { + return proto.EnumName(CompositeFilter_Operator_name, int32(x)) +} +func (CompositeFilter_Operator) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_query_e7d74dc9c1327cf5, []int{7, 0} +} + +// A property filter operator. +type PropertyFilter_Operator int32 + +const ( + // Unspecified. This value must not be used. + PropertyFilter_OPERATOR_UNSPECIFIED PropertyFilter_Operator = 0 + // Less than. + PropertyFilter_LESS_THAN PropertyFilter_Operator = 1 + // Less than or equal. + PropertyFilter_LESS_THAN_OR_EQUAL PropertyFilter_Operator = 2 + // Greater than. + PropertyFilter_GREATER_THAN PropertyFilter_Operator = 3 + // Greater than or equal. + PropertyFilter_GREATER_THAN_OR_EQUAL PropertyFilter_Operator = 4 + // Equal. + PropertyFilter_EQUAL PropertyFilter_Operator = 5 + // Has ancestor. + PropertyFilter_HAS_ANCESTOR PropertyFilter_Operator = 11 +) + +var PropertyFilter_Operator_name = map[int32]string{ + 0: "OPERATOR_UNSPECIFIED", + 1: "LESS_THAN", + 2: "LESS_THAN_OR_EQUAL", + 3: "GREATER_THAN", + 4: "GREATER_THAN_OR_EQUAL", + 5: "EQUAL", + 11: "HAS_ANCESTOR", +} +var PropertyFilter_Operator_value = map[string]int32{ + "OPERATOR_UNSPECIFIED": 0, + "LESS_THAN": 1, + "LESS_THAN_OR_EQUAL": 2, + "GREATER_THAN": 3, + "GREATER_THAN_OR_EQUAL": 4, + "EQUAL": 5, + "HAS_ANCESTOR": 11, +} + +func (x PropertyFilter_Operator) String() string { + return proto.EnumName(PropertyFilter_Operator_name, int32(x)) +} +func (PropertyFilter_Operator) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_query_e7d74dc9c1327cf5, []int{8, 0} +} + +// The possible values for the `more_results` field. +type QueryResultBatch_MoreResultsType int32 + +const ( + // Unspecified. This value is never used. + QueryResultBatch_MORE_RESULTS_TYPE_UNSPECIFIED QueryResultBatch_MoreResultsType = 0 + // There may be additional batches to fetch from this query. + QueryResultBatch_NOT_FINISHED QueryResultBatch_MoreResultsType = 1 + // The query is finished, but there may be more results after the limit. + QueryResultBatch_MORE_RESULTS_AFTER_LIMIT QueryResultBatch_MoreResultsType = 2 + // The query is finished, but there may be more results after the end + // cursor. + QueryResultBatch_MORE_RESULTS_AFTER_CURSOR QueryResultBatch_MoreResultsType = 4 + // The query is finished, and there are no more results. + QueryResultBatch_NO_MORE_RESULTS QueryResultBatch_MoreResultsType = 3 +) + +var QueryResultBatch_MoreResultsType_name = map[int32]string{ + 0: "MORE_RESULTS_TYPE_UNSPECIFIED", + 1: "NOT_FINISHED", + 2: "MORE_RESULTS_AFTER_LIMIT", + 4: "MORE_RESULTS_AFTER_CURSOR", + 3: "NO_MORE_RESULTS", +} +var QueryResultBatch_MoreResultsType_value = map[string]int32{ + "MORE_RESULTS_TYPE_UNSPECIFIED": 0, + "NOT_FINISHED": 1, + "MORE_RESULTS_AFTER_LIMIT": 2, + "MORE_RESULTS_AFTER_CURSOR": 4, + "NO_MORE_RESULTS": 3, +} + +func (x QueryResultBatch_MoreResultsType) String() string { + return proto.EnumName(QueryResultBatch_MoreResultsType_name, int32(x)) +} +func (QueryResultBatch_MoreResultsType) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_query_e7d74dc9c1327cf5, []int{11, 0} +} + +// The result of fetching an entity from Datastore. +type EntityResult struct { + // The resulting entity. + Entity *Entity `protobuf:"bytes,1,opt,name=entity,proto3" json:"entity,omitempty"` + // The version of the entity, a strictly positive number that monotonically + // increases with changes to the entity. + // + // This field is set for + // [`FULL`][google.datastore.v1.EntityResult.ResultType.FULL] entity results. + // + // For [missing][google.datastore.v1.LookupResponse.missing] entities in + // `LookupResponse`, this is the version of the snapshot that was used to look + // up the entity, and it is always set except for eventually consistent reads. + Version int64 `protobuf:"varint,4,opt,name=version,proto3" json:"version,omitempty"` + // A cursor that points to the position after the result entity. + // Set only when the `EntityResult` is part of a `QueryResultBatch` message. + Cursor []byte `protobuf:"bytes,3,opt,name=cursor,proto3" json:"cursor,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *EntityResult) Reset() { *m = EntityResult{} } +func (m *EntityResult) String() string { return proto.CompactTextString(m) } +func (*EntityResult) ProtoMessage() {} +func (*EntityResult) Descriptor() ([]byte, []int) { + return fileDescriptor_query_e7d74dc9c1327cf5, []int{0} +} +func (m *EntityResult) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_EntityResult.Unmarshal(m, b) +} +func (m *EntityResult) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_EntityResult.Marshal(b, m, deterministic) +} +func (dst *EntityResult) XXX_Merge(src proto.Message) { + xxx_messageInfo_EntityResult.Merge(dst, src) +} +func (m *EntityResult) XXX_Size() int { + return xxx_messageInfo_EntityResult.Size(m) +} +func (m *EntityResult) XXX_DiscardUnknown() { + xxx_messageInfo_EntityResult.DiscardUnknown(m) +} + +var xxx_messageInfo_EntityResult proto.InternalMessageInfo + +func (m *EntityResult) GetEntity() *Entity { + if m != nil { + return m.Entity + } + return nil +} + +func (m *EntityResult) GetVersion() int64 { + if m != nil { + return m.Version + } + return 0 +} + +func (m *EntityResult) GetCursor() []byte { + if m != nil { + return m.Cursor + } + return nil +} + +// A query for entities. +type Query struct { + // The projection to return. Defaults to returning all properties. + Projection []*Projection `protobuf:"bytes,2,rep,name=projection,proto3" json:"projection,omitempty"` + // The kinds to query (if empty, returns entities of all kinds). + // Currently at most 1 kind may be specified. + Kind []*KindExpression `protobuf:"bytes,3,rep,name=kind,proto3" json:"kind,omitempty"` + // The filter to apply. + Filter *Filter `protobuf:"bytes,4,opt,name=filter,proto3" json:"filter,omitempty"` + // The order to apply to the query results (if empty, order is unspecified). + Order []*PropertyOrder `protobuf:"bytes,5,rep,name=order,proto3" json:"order,omitempty"` + // The properties to make distinct. The query results will contain the first + // result for each distinct combination of values for the given properties + // (if empty, all results are returned). + DistinctOn []*PropertyReference `protobuf:"bytes,6,rep,name=distinct_on,json=distinctOn,proto3" json:"distinct_on,omitempty"` + // A starting point for the query results. Query cursors are + // returned in query result batches and + // [can only be used to continue the same + // query](https://cloud.google.com/datastore/docs/concepts/queries#cursors_limits_and_offsets). + StartCursor []byte `protobuf:"bytes,7,opt,name=start_cursor,json=startCursor,proto3" json:"start_cursor,omitempty"` + // An ending point for the query results. Query cursors are + // returned in query result batches and + // [can only be used to limit the same + // query](https://cloud.google.com/datastore/docs/concepts/queries#cursors_limits_and_offsets). + EndCursor []byte `protobuf:"bytes,8,opt,name=end_cursor,json=endCursor,proto3" json:"end_cursor,omitempty"` + // The number of results to skip. Applies before limit, but after all other + // constraints. Optional. Must be >= 0 if specified. + Offset int32 `protobuf:"varint,10,opt,name=offset,proto3" json:"offset,omitempty"` + // The maximum number of results to return. Applies after all other + // constraints. Optional. + // Unspecified is interpreted as no limit. + // Must be >= 0 if specified. + Limit *wrappers.Int32Value `protobuf:"bytes,12,opt,name=limit,proto3" json:"limit,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Query) Reset() { *m = Query{} } +func (m *Query) String() string { return proto.CompactTextString(m) } +func (*Query) ProtoMessage() {} +func (*Query) Descriptor() ([]byte, []int) { + return fileDescriptor_query_e7d74dc9c1327cf5, []int{1} +} +func (m *Query) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Query.Unmarshal(m, b) +} +func (m *Query) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Query.Marshal(b, m, deterministic) +} +func (dst *Query) XXX_Merge(src proto.Message) { + xxx_messageInfo_Query.Merge(dst, src) +} +func (m *Query) XXX_Size() int { + return xxx_messageInfo_Query.Size(m) +} +func (m *Query) XXX_DiscardUnknown() { + xxx_messageInfo_Query.DiscardUnknown(m) +} + +var xxx_messageInfo_Query proto.InternalMessageInfo + +func (m *Query) GetProjection() []*Projection { + if m != nil { + return m.Projection + } + return nil +} + +func (m *Query) GetKind() []*KindExpression { + if m != nil { + return m.Kind + } + return nil +} + +func (m *Query) GetFilter() *Filter { + if m != nil { + return m.Filter + } + return nil +} + +func (m *Query) GetOrder() []*PropertyOrder { + if m != nil { + return m.Order + } + return nil +} + +func (m *Query) GetDistinctOn() []*PropertyReference { + if m != nil { + return m.DistinctOn + } + return nil +} + +func (m *Query) GetStartCursor() []byte { + if m != nil { + return m.StartCursor + } + return nil +} + +func (m *Query) GetEndCursor() []byte { + if m != nil { + return m.EndCursor + } + return nil +} + +func (m *Query) GetOffset() int32 { + if m != nil { + return m.Offset + } + return 0 +} + +func (m *Query) GetLimit() *wrappers.Int32Value { + if m != nil { + return m.Limit + } + return nil +} + +// A representation of a kind. +type KindExpression struct { + // The name of the kind. + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *KindExpression) Reset() { *m = KindExpression{} } +func (m *KindExpression) String() string { return proto.CompactTextString(m) } +func (*KindExpression) ProtoMessage() {} +func (*KindExpression) Descriptor() ([]byte, []int) { + return fileDescriptor_query_e7d74dc9c1327cf5, []int{2} +} +func (m *KindExpression) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_KindExpression.Unmarshal(m, b) +} +func (m *KindExpression) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_KindExpression.Marshal(b, m, deterministic) +} +func (dst *KindExpression) XXX_Merge(src proto.Message) { + xxx_messageInfo_KindExpression.Merge(dst, src) +} +func (m *KindExpression) XXX_Size() int { + return xxx_messageInfo_KindExpression.Size(m) +} +func (m *KindExpression) XXX_DiscardUnknown() { + xxx_messageInfo_KindExpression.DiscardUnknown(m) +} + +var xxx_messageInfo_KindExpression proto.InternalMessageInfo + +func (m *KindExpression) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +// A reference to a property relative to the kind expressions. +type PropertyReference struct { + // The name of the property. + // If name includes "."s, it may be interpreted as a property name path. + Name string `protobuf:"bytes,2,opt,name=name,proto3" json:"name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *PropertyReference) Reset() { *m = PropertyReference{} } +func (m *PropertyReference) String() string { return proto.CompactTextString(m) } +func (*PropertyReference) ProtoMessage() {} +func (*PropertyReference) Descriptor() ([]byte, []int) { + return fileDescriptor_query_e7d74dc9c1327cf5, []int{3} +} +func (m *PropertyReference) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_PropertyReference.Unmarshal(m, b) +} +func (m *PropertyReference) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_PropertyReference.Marshal(b, m, deterministic) +} +func (dst *PropertyReference) XXX_Merge(src proto.Message) { + xxx_messageInfo_PropertyReference.Merge(dst, src) +} +func (m *PropertyReference) XXX_Size() int { + return xxx_messageInfo_PropertyReference.Size(m) +} +func (m *PropertyReference) XXX_DiscardUnknown() { + xxx_messageInfo_PropertyReference.DiscardUnknown(m) +} + +var xxx_messageInfo_PropertyReference proto.InternalMessageInfo + +func (m *PropertyReference) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +// A representation of a property in a projection. +type Projection struct { + // The property to project. + Property *PropertyReference `protobuf:"bytes,1,opt,name=property,proto3" json:"property,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Projection) Reset() { *m = Projection{} } +func (m *Projection) String() string { return proto.CompactTextString(m) } +func (*Projection) ProtoMessage() {} +func (*Projection) Descriptor() ([]byte, []int) { + return fileDescriptor_query_e7d74dc9c1327cf5, []int{4} +} +func (m *Projection) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Projection.Unmarshal(m, b) +} +func (m *Projection) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Projection.Marshal(b, m, deterministic) +} +func (dst *Projection) XXX_Merge(src proto.Message) { + xxx_messageInfo_Projection.Merge(dst, src) +} +func (m *Projection) XXX_Size() int { + return xxx_messageInfo_Projection.Size(m) +} +func (m *Projection) XXX_DiscardUnknown() { + xxx_messageInfo_Projection.DiscardUnknown(m) +} + +var xxx_messageInfo_Projection proto.InternalMessageInfo + +func (m *Projection) GetProperty() *PropertyReference { + if m != nil { + return m.Property + } + return nil +} + +// The desired order for a specific property. +type PropertyOrder struct { + // The property to order by. + Property *PropertyReference `protobuf:"bytes,1,opt,name=property,proto3" json:"property,omitempty"` + // The direction to order by. Defaults to `ASCENDING`. + Direction PropertyOrder_Direction `protobuf:"varint,2,opt,name=direction,proto3,enum=google.datastore.v1.PropertyOrder_Direction" json:"direction,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *PropertyOrder) Reset() { *m = PropertyOrder{} } +func (m *PropertyOrder) String() string { return proto.CompactTextString(m) } +func (*PropertyOrder) ProtoMessage() {} +func (*PropertyOrder) Descriptor() ([]byte, []int) { + return fileDescriptor_query_e7d74dc9c1327cf5, []int{5} +} +func (m *PropertyOrder) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_PropertyOrder.Unmarshal(m, b) +} +func (m *PropertyOrder) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_PropertyOrder.Marshal(b, m, deterministic) +} +func (dst *PropertyOrder) XXX_Merge(src proto.Message) { + xxx_messageInfo_PropertyOrder.Merge(dst, src) +} +func (m *PropertyOrder) XXX_Size() int { + return xxx_messageInfo_PropertyOrder.Size(m) +} +func (m *PropertyOrder) XXX_DiscardUnknown() { + xxx_messageInfo_PropertyOrder.DiscardUnknown(m) +} + +var xxx_messageInfo_PropertyOrder proto.InternalMessageInfo + +func (m *PropertyOrder) GetProperty() *PropertyReference { + if m != nil { + return m.Property + } + return nil +} + +func (m *PropertyOrder) GetDirection() PropertyOrder_Direction { + if m != nil { + return m.Direction + } + return PropertyOrder_DIRECTION_UNSPECIFIED +} + +// A holder for any type of filter. +type Filter struct { + // The type of filter. + // + // Types that are valid to be assigned to FilterType: + // *Filter_CompositeFilter + // *Filter_PropertyFilter + FilterType isFilter_FilterType `protobuf_oneof:"filter_type"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Filter) Reset() { *m = Filter{} } +func (m *Filter) String() string { return proto.CompactTextString(m) } +func (*Filter) ProtoMessage() {} +func (*Filter) Descriptor() ([]byte, []int) { + return fileDescriptor_query_e7d74dc9c1327cf5, []int{6} +} +func (m *Filter) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Filter.Unmarshal(m, b) +} +func (m *Filter) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Filter.Marshal(b, m, deterministic) +} +func (dst *Filter) XXX_Merge(src proto.Message) { + xxx_messageInfo_Filter.Merge(dst, src) +} +func (m *Filter) XXX_Size() int { + return xxx_messageInfo_Filter.Size(m) +} +func (m *Filter) XXX_DiscardUnknown() { + xxx_messageInfo_Filter.DiscardUnknown(m) +} + +var xxx_messageInfo_Filter proto.InternalMessageInfo + +type isFilter_FilterType interface { + isFilter_FilterType() +} + +type Filter_CompositeFilter struct { + CompositeFilter *CompositeFilter `protobuf:"bytes,1,opt,name=composite_filter,json=compositeFilter,proto3,oneof"` +} + +type Filter_PropertyFilter struct { + PropertyFilter *PropertyFilter `protobuf:"bytes,2,opt,name=property_filter,json=propertyFilter,proto3,oneof"` +} + +func (*Filter_CompositeFilter) isFilter_FilterType() {} + +func (*Filter_PropertyFilter) isFilter_FilterType() {} + +func (m *Filter) GetFilterType() isFilter_FilterType { + if m != nil { + return m.FilterType + } + return nil +} + +func (m *Filter) GetCompositeFilter() *CompositeFilter { + if x, ok := m.GetFilterType().(*Filter_CompositeFilter); ok { + return x.CompositeFilter + } + return nil +} + +func (m *Filter) GetPropertyFilter() *PropertyFilter { + if x, ok := m.GetFilterType().(*Filter_PropertyFilter); ok { + return x.PropertyFilter + } + return nil +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*Filter) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _Filter_OneofMarshaler, _Filter_OneofUnmarshaler, _Filter_OneofSizer, []interface{}{ + (*Filter_CompositeFilter)(nil), + (*Filter_PropertyFilter)(nil), + } +} + +func _Filter_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*Filter) + // filter_type + switch x := m.FilterType.(type) { + case *Filter_CompositeFilter: + b.EncodeVarint(1<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.CompositeFilter); err != nil { + return err + } + case *Filter_PropertyFilter: + b.EncodeVarint(2<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.PropertyFilter); err != nil { + return err + } + case nil: + default: + return fmt.Errorf("Filter.FilterType has unexpected type %T", x) + } + return nil +} + +func _Filter_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*Filter) + switch tag { + case 1: // filter_type.composite_filter + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(CompositeFilter) + err := b.DecodeMessage(msg) + m.FilterType = &Filter_CompositeFilter{msg} + return true, err + case 2: // filter_type.property_filter + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(PropertyFilter) + err := b.DecodeMessage(msg) + m.FilterType = &Filter_PropertyFilter{msg} + return true, err + default: + return false, nil + } +} + +func _Filter_OneofSizer(msg proto.Message) (n int) { + m := msg.(*Filter) + // filter_type + switch x := m.FilterType.(type) { + case *Filter_CompositeFilter: + s := proto.Size(x.CompositeFilter) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *Filter_PropertyFilter: + s := proto.Size(x.PropertyFilter) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +// A filter that merges multiple other filters using the given operator. +type CompositeFilter struct { + // The operator for combining multiple filters. + Op CompositeFilter_Operator `protobuf:"varint,1,opt,name=op,proto3,enum=google.datastore.v1.CompositeFilter_Operator" json:"op,omitempty"` + // The list of filters to combine. + // Must contain at least one filter. + Filters []*Filter `protobuf:"bytes,2,rep,name=filters,proto3" json:"filters,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *CompositeFilter) Reset() { *m = CompositeFilter{} } +func (m *CompositeFilter) String() string { return proto.CompactTextString(m) } +func (*CompositeFilter) ProtoMessage() {} +func (*CompositeFilter) Descriptor() ([]byte, []int) { + return fileDescriptor_query_e7d74dc9c1327cf5, []int{7} +} +func (m *CompositeFilter) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_CompositeFilter.Unmarshal(m, b) +} +func (m *CompositeFilter) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_CompositeFilter.Marshal(b, m, deterministic) +} +func (dst *CompositeFilter) XXX_Merge(src proto.Message) { + xxx_messageInfo_CompositeFilter.Merge(dst, src) +} +func (m *CompositeFilter) XXX_Size() int { + return xxx_messageInfo_CompositeFilter.Size(m) +} +func (m *CompositeFilter) XXX_DiscardUnknown() { + xxx_messageInfo_CompositeFilter.DiscardUnknown(m) +} + +var xxx_messageInfo_CompositeFilter proto.InternalMessageInfo + +func (m *CompositeFilter) GetOp() CompositeFilter_Operator { + if m != nil { + return m.Op + } + return CompositeFilter_OPERATOR_UNSPECIFIED +} + +func (m *CompositeFilter) GetFilters() []*Filter { + if m != nil { + return m.Filters + } + return nil +} + +// A filter on a specific property. +type PropertyFilter struct { + // The property to filter by. + Property *PropertyReference `protobuf:"bytes,1,opt,name=property,proto3" json:"property,omitempty"` + // The operator to filter by. + Op PropertyFilter_Operator `protobuf:"varint,2,opt,name=op,proto3,enum=google.datastore.v1.PropertyFilter_Operator" json:"op,omitempty"` + // The value to compare the property to. + Value *Value `protobuf:"bytes,3,opt,name=value,proto3" json:"value,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *PropertyFilter) Reset() { *m = PropertyFilter{} } +func (m *PropertyFilter) String() string { return proto.CompactTextString(m) } +func (*PropertyFilter) ProtoMessage() {} +func (*PropertyFilter) Descriptor() ([]byte, []int) { + return fileDescriptor_query_e7d74dc9c1327cf5, []int{8} +} +func (m *PropertyFilter) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_PropertyFilter.Unmarshal(m, b) +} +func (m *PropertyFilter) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_PropertyFilter.Marshal(b, m, deterministic) +} +func (dst *PropertyFilter) XXX_Merge(src proto.Message) { + xxx_messageInfo_PropertyFilter.Merge(dst, src) +} +func (m *PropertyFilter) XXX_Size() int { + return xxx_messageInfo_PropertyFilter.Size(m) +} +func (m *PropertyFilter) XXX_DiscardUnknown() { + xxx_messageInfo_PropertyFilter.DiscardUnknown(m) +} + +var xxx_messageInfo_PropertyFilter proto.InternalMessageInfo + +func (m *PropertyFilter) GetProperty() *PropertyReference { + if m != nil { + return m.Property + } + return nil +} + +func (m *PropertyFilter) GetOp() PropertyFilter_Operator { + if m != nil { + return m.Op + } + return PropertyFilter_OPERATOR_UNSPECIFIED +} + +func (m *PropertyFilter) GetValue() *Value { + if m != nil { + return m.Value + } + return nil +} + +// A [GQL +// query](https://cloud.google.com/datastore/docs/apis/gql/gql_reference). +type GqlQuery struct { + // A string of the format described + // [here](https://cloud.google.com/datastore/docs/apis/gql/gql_reference). + QueryString string `protobuf:"bytes,1,opt,name=query_string,json=queryString,proto3" json:"query_string,omitempty"` + // When false, the query string must not contain any literals and instead must + // bind all values. For example, + // `SELECT * FROM Kind WHERE a = 'string literal'` is not allowed, while + // `SELECT * FROM Kind WHERE a = @value` is. + AllowLiterals bool `protobuf:"varint,2,opt,name=allow_literals,json=allowLiterals,proto3" json:"allow_literals,omitempty"` + // For each non-reserved named binding site in the query string, there must be + // a named parameter with that name, but not necessarily the inverse. + // + // Key must match regex `[A-Za-z_$][A-Za-z_$0-9]*`, must not match regex + // `__.*__`, and must not be `""`. + NamedBindings map[string]*GqlQueryParameter `protobuf:"bytes,5,rep,name=named_bindings,json=namedBindings,proto3" json:"named_bindings,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` + // Numbered binding site @1 references the first numbered parameter, + // effectively using 1-based indexing, rather than the usual 0. + // + // For each binding site numbered i in `query_string`, there must be an i-th + // numbered parameter. The inverse must also be true. + PositionalBindings []*GqlQueryParameter `protobuf:"bytes,4,rep,name=positional_bindings,json=positionalBindings,proto3" json:"positional_bindings,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GqlQuery) Reset() { *m = GqlQuery{} } +func (m *GqlQuery) String() string { return proto.CompactTextString(m) } +func (*GqlQuery) ProtoMessage() {} +func (*GqlQuery) Descriptor() ([]byte, []int) { + return fileDescriptor_query_e7d74dc9c1327cf5, []int{9} +} +func (m *GqlQuery) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GqlQuery.Unmarshal(m, b) +} +func (m *GqlQuery) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GqlQuery.Marshal(b, m, deterministic) +} +func (dst *GqlQuery) XXX_Merge(src proto.Message) { + xxx_messageInfo_GqlQuery.Merge(dst, src) +} +func (m *GqlQuery) XXX_Size() int { + return xxx_messageInfo_GqlQuery.Size(m) +} +func (m *GqlQuery) XXX_DiscardUnknown() { + xxx_messageInfo_GqlQuery.DiscardUnknown(m) +} + +var xxx_messageInfo_GqlQuery proto.InternalMessageInfo + +func (m *GqlQuery) GetQueryString() string { + if m != nil { + return m.QueryString + } + return "" +} + +func (m *GqlQuery) GetAllowLiterals() bool { + if m != nil { + return m.AllowLiterals + } + return false +} + +func (m *GqlQuery) GetNamedBindings() map[string]*GqlQueryParameter { + if m != nil { + return m.NamedBindings + } + return nil +} + +func (m *GqlQuery) GetPositionalBindings() []*GqlQueryParameter { + if m != nil { + return m.PositionalBindings + } + return nil +} + +// A binding parameter for a GQL query. +type GqlQueryParameter struct { + // The type of parameter. + // + // Types that are valid to be assigned to ParameterType: + // *GqlQueryParameter_Value + // *GqlQueryParameter_Cursor + ParameterType isGqlQueryParameter_ParameterType `protobuf_oneof:"parameter_type"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GqlQueryParameter) Reset() { *m = GqlQueryParameter{} } +func (m *GqlQueryParameter) String() string { return proto.CompactTextString(m) } +func (*GqlQueryParameter) ProtoMessage() {} +func (*GqlQueryParameter) Descriptor() ([]byte, []int) { + return fileDescriptor_query_e7d74dc9c1327cf5, []int{10} +} +func (m *GqlQueryParameter) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GqlQueryParameter.Unmarshal(m, b) +} +func (m *GqlQueryParameter) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GqlQueryParameter.Marshal(b, m, deterministic) +} +func (dst *GqlQueryParameter) XXX_Merge(src proto.Message) { + xxx_messageInfo_GqlQueryParameter.Merge(dst, src) +} +func (m *GqlQueryParameter) XXX_Size() int { + return xxx_messageInfo_GqlQueryParameter.Size(m) +} +func (m *GqlQueryParameter) XXX_DiscardUnknown() { + xxx_messageInfo_GqlQueryParameter.DiscardUnknown(m) +} + +var xxx_messageInfo_GqlQueryParameter proto.InternalMessageInfo + +type isGqlQueryParameter_ParameterType interface { + isGqlQueryParameter_ParameterType() +} + +type GqlQueryParameter_Value struct { + Value *Value `protobuf:"bytes,2,opt,name=value,proto3,oneof"` +} + +type GqlQueryParameter_Cursor struct { + Cursor []byte `protobuf:"bytes,3,opt,name=cursor,proto3,oneof"` +} + +func (*GqlQueryParameter_Value) isGqlQueryParameter_ParameterType() {} + +func (*GqlQueryParameter_Cursor) isGqlQueryParameter_ParameterType() {} + +func (m *GqlQueryParameter) GetParameterType() isGqlQueryParameter_ParameterType { + if m != nil { + return m.ParameterType + } + return nil +} + +func (m *GqlQueryParameter) GetValue() *Value { + if x, ok := m.GetParameterType().(*GqlQueryParameter_Value); ok { + return x.Value + } + return nil +} + +func (m *GqlQueryParameter) GetCursor() []byte { + if x, ok := m.GetParameterType().(*GqlQueryParameter_Cursor); ok { + return x.Cursor + } + return nil +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*GqlQueryParameter) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _GqlQueryParameter_OneofMarshaler, _GqlQueryParameter_OneofUnmarshaler, _GqlQueryParameter_OneofSizer, []interface{}{ + (*GqlQueryParameter_Value)(nil), + (*GqlQueryParameter_Cursor)(nil), + } +} + +func _GqlQueryParameter_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*GqlQueryParameter) + // parameter_type + switch x := m.ParameterType.(type) { + case *GqlQueryParameter_Value: + b.EncodeVarint(2<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.Value); err != nil { + return err + } + case *GqlQueryParameter_Cursor: + b.EncodeVarint(3<<3 | proto.WireBytes) + b.EncodeRawBytes(x.Cursor) + case nil: + default: + return fmt.Errorf("GqlQueryParameter.ParameterType has unexpected type %T", x) + } + return nil +} + +func _GqlQueryParameter_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*GqlQueryParameter) + switch tag { + case 2: // parameter_type.value + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(Value) + err := b.DecodeMessage(msg) + m.ParameterType = &GqlQueryParameter_Value{msg} + return true, err + case 3: // parameter_type.cursor + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeRawBytes(true) + m.ParameterType = &GqlQueryParameter_Cursor{x} + return true, err + default: + return false, nil + } +} + +func _GqlQueryParameter_OneofSizer(msg proto.Message) (n int) { + m := msg.(*GqlQueryParameter) + // parameter_type + switch x := m.ParameterType.(type) { + case *GqlQueryParameter_Value: + s := proto.Size(x.Value) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *GqlQueryParameter_Cursor: + n += 1 // tag and wire + n += proto.SizeVarint(uint64(len(x.Cursor))) + n += len(x.Cursor) + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +// A batch of results produced by a query. +type QueryResultBatch struct { + // The number of results skipped, typically because of an offset. + SkippedResults int32 `protobuf:"varint,6,opt,name=skipped_results,json=skippedResults,proto3" json:"skipped_results,omitempty"` + // A cursor that points to the position after the last skipped result. + // Will be set when `skipped_results` != 0. + SkippedCursor []byte `protobuf:"bytes,3,opt,name=skipped_cursor,json=skippedCursor,proto3" json:"skipped_cursor,omitempty"` + // The result type for every entity in `entity_results`. + EntityResultType EntityResult_ResultType `protobuf:"varint,1,opt,name=entity_result_type,json=entityResultType,proto3,enum=google.datastore.v1.EntityResult_ResultType" json:"entity_result_type,omitempty"` + // The results for this batch. + EntityResults []*EntityResult `protobuf:"bytes,2,rep,name=entity_results,json=entityResults,proto3" json:"entity_results,omitempty"` + // A cursor that points to the position after the last result in the batch. + EndCursor []byte `protobuf:"bytes,4,opt,name=end_cursor,json=endCursor,proto3" json:"end_cursor,omitempty"` + // The state of the query after the current batch. + MoreResults QueryResultBatch_MoreResultsType `protobuf:"varint,5,opt,name=more_results,json=moreResults,proto3,enum=google.datastore.v1.QueryResultBatch_MoreResultsType" json:"more_results,omitempty"` + // The version number of the snapshot this batch was returned from. + // This applies to the range of results from the query's `start_cursor` (or + // the beginning of the query if no cursor was given) to this batch's + // `end_cursor` (not the query's `end_cursor`). + // + // In a single transaction, subsequent query result batches for the same query + // can have a greater snapshot version number. Each batch's snapshot version + // is valid for all preceding batches. + // The value will be zero for eventually consistent queries. + SnapshotVersion int64 `protobuf:"varint,7,opt,name=snapshot_version,json=snapshotVersion,proto3" json:"snapshot_version,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *QueryResultBatch) Reset() { *m = QueryResultBatch{} } +func (m *QueryResultBatch) String() string { return proto.CompactTextString(m) } +func (*QueryResultBatch) ProtoMessage() {} +func (*QueryResultBatch) Descriptor() ([]byte, []int) { + return fileDescriptor_query_e7d74dc9c1327cf5, []int{11} +} +func (m *QueryResultBatch) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_QueryResultBatch.Unmarshal(m, b) +} +func (m *QueryResultBatch) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_QueryResultBatch.Marshal(b, m, deterministic) +} +func (dst *QueryResultBatch) XXX_Merge(src proto.Message) { + xxx_messageInfo_QueryResultBatch.Merge(dst, src) +} +func (m *QueryResultBatch) XXX_Size() int { + return xxx_messageInfo_QueryResultBatch.Size(m) +} +func (m *QueryResultBatch) XXX_DiscardUnknown() { + xxx_messageInfo_QueryResultBatch.DiscardUnknown(m) +} + +var xxx_messageInfo_QueryResultBatch proto.InternalMessageInfo + +func (m *QueryResultBatch) GetSkippedResults() int32 { + if m != nil { + return m.SkippedResults + } + return 0 +} + +func (m *QueryResultBatch) GetSkippedCursor() []byte { + if m != nil { + return m.SkippedCursor + } + return nil +} + +func (m *QueryResultBatch) GetEntityResultType() EntityResult_ResultType { + if m != nil { + return m.EntityResultType + } + return EntityResult_RESULT_TYPE_UNSPECIFIED +} + +func (m *QueryResultBatch) GetEntityResults() []*EntityResult { + if m != nil { + return m.EntityResults + } + return nil +} + +func (m *QueryResultBatch) GetEndCursor() []byte { + if m != nil { + return m.EndCursor + } + return nil +} + +func (m *QueryResultBatch) GetMoreResults() QueryResultBatch_MoreResultsType { + if m != nil { + return m.MoreResults + } + return QueryResultBatch_MORE_RESULTS_TYPE_UNSPECIFIED +} + +func (m *QueryResultBatch) GetSnapshotVersion() int64 { + if m != nil { + return m.SnapshotVersion + } + return 0 +} + +func init() { + proto.RegisterType((*EntityResult)(nil), "google.datastore.v1.EntityResult") + proto.RegisterType((*Query)(nil), "google.datastore.v1.Query") + proto.RegisterType((*KindExpression)(nil), "google.datastore.v1.KindExpression") + proto.RegisterType((*PropertyReference)(nil), "google.datastore.v1.PropertyReference") + proto.RegisterType((*Projection)(nil), "google.datastore.v1.Projection") + proto.RegisterType((*PropertyOrder)(nil), "google.datastore.v1.PropertyOrder") + proto.RegisterType((*Filter)(nil), "google.datastore.v1.Filter") + proto.RegisterType((*CompositeFilter)(nil), "google.datastore.v1.CompositeFilter") + proto.RegisterType((*PropertyFilter)(nil), "google.datastore.v1.PropertyFilter") + proto.RegisterType((*GqlQuery)(nil), "google.datastore.v1.GqlQuery") + proto.RegisterMapType((map[string]*GqlQueryParameter)(nil), "google.datastore.v1.GqlQuery.NamedBindingsEntry") + proto.RegisterType((*GqlQueryParameter)(nil), "google.datastore.v1.GqlQueryParameter") + proto.RegisterType((*QueryResultBatch)(nil), "google.datastore.v1.QueryResultBatch") + proto.RegisterEnum("google.datastore.v1.EntityResult_ResultType", EntityResult_ResultType_name, EntityResult_ResultType_value) + proto.RegisterEnum("google.datastore.v1.PropertyOrder_Direction", PropertyOrder_Direction_name, PropertyOrder_Direction_value) + proto.RegisterEnum("google.datastore.v1.CompositeFilter_Operator", CompositeFilter_Operator_name, CompositeFilter_Operator_value) + proto.RegisterEnum("google.datastore.v1.PropertyFilter_Operator", PropertyFilter_Operator_name, PropertyFilter_Operator_value) + proto.RegisterEnum("google.datastore.v1.QueryResultBatch_MoreResultsType", QueryResultBatch_MoreResultsType_name, QueryResultBatch_MoreResultsType_value) +} + +func init() { + proto.RegisterFile("google/datastore/v1/query.proto", fileDescriptor_query_e7d74dc9c1327cf5) +} + +var fileDescriptor_query_e7d74dc9c1327cf5 = []byte{ + // 1313 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xac, 0x56, 0xdd, 0x72, 0xd3, 0x46, + 0x14, 0x8e, 0x64, 0x3b, 0x89, 0x8f, 0xff, 0xc4, 0xd2, 0x82, 0x08, 0x50, 0x12, 0x41, 0x4b, 0x3a, + 0x03, 0x36, 0x31, 0xc3, 0x94, 0x69, 0xe9, 0x74, 0xfc, 0xa3, 0xc4, 0x06, 0x63, 0x39, 0x6b, 0x27, + 0x14, 0x86, 0x19, 0x8d, 0xb0, 0x37, 0x46, 0x45, 0x96, 0xc4, 0x4a, 0x09, 0xcd, 0x83, 0x74, 0xa6, + 0x37, 0x7d, 0x81, 0x3e, 0x40, 0x2f, 0xfa, 0x00, 0x6d, 0xa7, 0xcf, 0xd0, 0xeb, 0x5e, 0xf7, 0x11, + 0x3a, 0xda, 0x5d, 0xf9, 0x2f, 0xc6, 0xe4, 0x82, 0x3b, 0xed, 0xd9, 0xef, 0xfb, 0xce, 0x9e, 0xb3, + 0x67, 0x8f, 0x0e, 0xdc, 0x18, 0x7a, 0xde, 0xd0, 0x21, 0xa5, 0x81, 0x15, 0x5a, 0x41, 0xe8, 0x51, + 0x52, 0x3a, 0xd9, 0x29, 0xbd, 0x3d, 0x26, 0xf4, 0xb4, 0xe8, 0x53, 0x2f, 0xf4, 0xd0, 0x45, 0x0e, + 0x28, 0x8e, 0x01, 0xc5, 0x93, 0x9d, 0x8d, 0x6b, 0x82, 0x65, 0xf9, 0x76, 0xc9, 0x72, 0x5d, 0x2f, + 0xb4, 0x42, 0xdb, 0x73, 0x03, 0x4e, 0xd9, 0xd8, 0x5c, 0xa4, 0x49, 0xdc, 0xd0, 0x0e, 0x85, 0xe8, + 0xc6, 0x67, 0x02, 0xc1, 0x56, 0xaf, 0x8e, 0x8f, 0x4a, 0xef, 0xa8, 0xe5, 0xfb, 0x84, 0xc6, 0x0a, + 0xaa, 0xd8, 0x0f, 0x4f, 0x7d, 0x52, 0x72, 0xac, 0xd0, 0x71, 0x87, 0x7c, 0x47, 0xfb, 0x4b, 0x82, + 0xac, 0xce, 0xa4, 0x30, 0x09, 0x8e, 0x9d, 0x10, 0xdd, 0x87, 0x55, 0x2e, 0xad, 0x4a, 0x9b, 0xd2, + 0x76, 0xa6, 0x7c, 0xb5, 0xb8, 0xe0, 0xc0, 0x45, 0x41, 0x11, 0x50, 0xa4, 0xc2, 0xda, 0x09, 0xa1, + 0x81, 0xed, 0xb9, 0x6a, 0x72, 0x53, 0xda, 0x4e, 0xe0, 0x78, 0x89, 0x2e, 0xc1, 0x6a, 0xff, 0x98, + 0x06, 0x1e, 0x55, 0x13, 0x9b, 0xd2, 0x76, 0x16, 0x8b, 0x95, 0xb6, 0x0f, 0xc0, 0x1d, 0xf6, 0x4e, + 0x7d, 0x82, 0xae, 0xc2, 0x65, 0xac, 0x77, 0x0f, 0x5a, 0x3d, 0xb3, 0xf7, 0xbc, 0xa3, 0x9b, 0x07, + 0xed, 0x6e, 0x47, 0xaf, 0x35, 0x77, 0x9b, 0x7a, 0x5d, 0x59, 0x41, 0xeb, 0x90, 0xdc, 0x3d, 0x68, + 0xb5, 0x14, 0x09, 0xe5, 0x01, 0x3a, 0xd8, 0x78, 0xac, 0xd7, 0x7a, 0x4d, 0xa3, 0xad, 0xc8, 0x28, + 0x0b, 0xeb, 0x4f, 0xf4, 0xe7, 0xa6, 0xd1, 0x6e, 0x3d, 0x57, 0x12, 0xda, 0x1f, 0x09, 0x48, 0xed, + 0x47, 0x99, 0x46, 0xdf, 0x01, 0xf8, 0xd4, 0xfb, 0x81, 0xf4, 0xa3, 0x2c, 0xaa, 0xf2, 0x66, 0x62, + 0x3b, 0x53, 0xbe, 0xb1, 0x30, 0x8e, 0xce, 0x18, 0x86, 0xa7, 0x28, 0xe8, 0x2b, 0x48, 0xbe, 0xb1, + 0xdd, 0x81, 0x9a, 0x60, 0xd4, 0x9b, 0x0b, 0xa9, 0x4f, 0x6c, 0x77, 0xa0, 0xff, 0xe8, 0x53, 0x12, + 0x44, 0x81, 0x62, 0x46, 0x88, 0xb2, 0x77, 0x64, 0x3b, 0x21, 0xa1, 0x2c, 0x0f, 0xef, 0xcb, 0xde, + 0x2e, 0x83, 0x60, 0x01, 0x45, 0x0f, 0x21, 0xe5, 0xd1, 0x01, 0xa1, 0x6a, 0x8a, 0xb9, 0xd3, 0xde, + 0x77, 0x52, 0x9f, 0xd0, 0xf0, 0xd4, 0x88, 0x90, 0x98, 0x13, 0xd0, 0x1e, 0x64, 0x06, 0x76, 0x10, + 0xda, 0x6e, 0x3f, 0x34, 0x3d, 0x57, 0x5d, 0x65, 0xfc, 0x2f, 0x96, 0xf2, 0x31, 0x39, 0x22, 0x94, + 0xb8, 0x7d, 0x82, 0x21, 0xa6, 0x1a, 0x2e, 0xda, 0x82, 0x6c, 0x10, 0x5a, 0x34, 0x34, 0xc5, 0x65, + 0xad, 0xb1, 0xcb, 0xca, 0x30, 0x5b, 0x8d, 0x99, 0xd0, 0x75, 0x00, 0xe2, 0x0e, 0x62, 0xc0, 0x3a, + 0x03, 0xa4, 0x89, 0x3b, 0x10, 0xdb, 0x97, 0x60, 0xd5, 0x3b, 0x3a, 0x0a, 0x48, 0xa8, 0xc2, 0xa6, + 0xb4, 0x9d, 0xc2, 0x62, 0x85, 0x76, 0x20, 0xe5, 0xd8, 0x23, 0x3b, 0x54, 0xb3, 0xb3, 0x09, 0x89, + 0x4b, 0xb5, 0xd8, 0x74, 0xc3, 0xfb, 0xe5, 0x43, 0xcb, 0x39, 0x26, 0x98, 0x23, 0xb5, 0x5b, 0x90, + 0x9f, 0x4d, 0x2e, 0x42, 0x90, 0x74, 0xad, 0x11, 0x61, 0x25, 0x99, 0xc6, 0xec, 0x5b, 0xbb, 0x0d, + 0x17, 0xce, 0xc4, 0x34, 0x06, 0xca, 0x53, 0xc0, 0x0e, 0xc0, 0xe4, 0x9a, 0x51, 0x15, 0xd6, 0x7d, + 0x41, 0x13, 0x15, 0x7e, 0xde, 0x7c, 0x8d, 0x79, 0xda, 0xbf, 0x12, 0xe4, 0x66, 0xee, 0xe3, 0x63, + 0xa8, 0xa2, 0xc7, 0x90, 0x1e, 0xd8, 0x74, 0x5c, 0xb4, 0xd2, 0x76, 0xbe, 0x7c, 0xe7, 0xc3, 0xa5, + 0x50, 0xac, 0xc7, 0x1c, 0x3c, 0xa1, 0x6b, 0x3a, 0xa4, 0xc7, 0x76, 0x74, 0x05, 0x3e, 0xad, 0x37, + 0x31, 0x7f, 0x35, 0x73, 0x6f, 0x2b, 0x07, 0xe9, 0x4a, 0xb7, 0xa6, 0xb7, 0xeb, 0xcd, 0xf6, 0x1e, + 0x7f, 0x60, 0x75, 0x7d, 0xbc, 0x96, 0xb5, 0xdf, 0x25, 0x58, 0xe5, 0xc5, 0x8a, 0xf6, 0x41, 0xe9, + 0x7b, 0x23, 0xdf, 0x0b, 0xec, 0x90, 0x98, 0xa2, 0xc6, 0x79, 0xa4, 0xb7, 0x16, 0x1e, 0xb2, 0x16, + 0x83, 0x39, 0xbf, 0xb1, 0x82, 0x0b, 0xfd, 0x59, 0x13, 0x6a, 0x43, 0x21, 0x0e, 0x3e, 0x56, 0x94, + 0x99, 0xe2, 0xcd, 0xa5, 0x61, 0x8f, 0x05, 0xf3, 0xfe, 0x8c, 0xa5, 0x9a, 0x83, 0x0c, 0x97, 0x31, + 0xa3, 0x3e, 0xa7, 0xfd, 0x26, 0x41, 0x61, 0xee, 0x14, 0xe8, 0x5b, 0x90, 0x3d, 0x9f, 0x9d, 0x3b, + 0x5f, 0xbe, 0x7b, 0x9e, 0x73, 0x17, 0x0d, 0x9f, 0x50, 0x2b, 0xf4, 0x28, 0x96, 0x3d, 0x1f, 0x3d, + 0x80, 0x35, 0xee, 0x21, 0x10, 0x5d, 0x65, 0xe9, 0xfb, 0x8e, 0xb1, 0xda, 0x5d, 0x58, 0x8f, 0x65, + 0x90, 0x0a, 0x9f, 0x18, 0x1d, 0x1d, 0x57, 0x7a, 0x06, 0x9e, 0xbb, 0x8b, 0x35, 0x48, 0x54, 0xda, + 0x75, 0x45, 0xd2, 0xfe, 0x91, 0x21, 0x3f, 0x1b, 0xec, 0x47, 0xa9, 0xaf, 0x47, 0x2c, 0xf6, 0xf3, + 0x14, 0xd6, 0xa2, 0xd0, 0xef, 0x41, 0xea, 0x24, 0x7a, 0xa4, 0xac, 0x8f, 0x67, 0xca, 0x1b, 0x0b, + 0x05, 0xc4, 0x33, 0x66, 0x40, 0xed, 0x27, 0xe9, 0x5c, 0x61, 0xe7, 0x20, 0xdd, 0xd2, 0xbb, 0x5d, + 0xb3, 0xd7, 0xa8, 0xb4, 0x15, 0x09, 0x5d, 0x02, 0x34, 0x5e, 0x9a, 0x06, 0x36, 0xf5, 0xfd, 0x83, + 0x4a, 0x4b, 0x91, 0x91, 0x02, 0xd9, 0x3d, 0xac, 0x57, 0x7a, 0x3a, 0xe6, 0xc8, 0x44, 0x54, 0xd6, + 0xd3, 0x96, 0x09, 0x38, 0x89, 0xd2, 0x90, 0xe2, 0x9f, 0xa9, 0x88, 0xd7, 0xa8, 0x74, 0xcd, 0x4a, + 0xbb, 0xa6, 0x77, 0x7b, 0x06, 0x56, 0x32, 0xda, 0x7f, 0x32, 0xac, 0xef, 0xbd, 0x75, 0xf8, 0xaf, + 0x62, 0x0b, 0xb2, 0xec, 0xef, 0x6c, 0x06, 0x21, 0xb5, 0xdd, 0xa1, 0xe8, 0x30, 0x19, 0x66, 0xeb, + 0x32, 0x13, 0xfa, 0x1c, 0xf2, 0x96, 0xe3, 0x78, 0xef, 0x4c, 0xc7, 0x0e, 0x09, 0xb5, 0x9c, 0x80, + 0xe5, 0x70, 0x1d, 0xe7, 0x98, 0xb5, 0x25, 0x8c, 0xe8, 0x19, 0xe4, 0xa3, 0x76, 0x33, 0x30, 0x5f, + 0xd9, 0xee, 0xc0, 0x76, 0x87, 0x81, 0x68, 0xe7, 0xf7, 0x16, 0x66, 0x2a, 0x3e, 0x40, 0xb1, 0x1d, + 0x71, 0xaa, 0x82, 0xa2, 0xbb, 0x21, 0x3d, 0xc5, 0x39, 0x77, 0xda, 0x86, 0x9e, 0xc1, 0x45, 0x56, + 0x91, 0xb6, 0xe7, 0x5a, 0xce, 0x44, 0x3d, 0xb9, 0xa4, 0xd9, 0xc7, 0xea, 0x1d, 0x8b, 0x5a, 0x23, + 0x12, 0xd5, 0x22, 0x9a, 0x48, 0xc4, 0xc2, 0x1b, 0xaf, 0x01, 0x9d, 0xf5, 0x8e, 0x14, 0x48, 0xbc, + 0x21, 0xa7, 0x22, 0x11, 0xd1, 0x27, 0x7a, 0x14, 0x5f, 0xbd, 0xbc, 0xa4, 0xf2, 0xce, 0xba, 0xe4, + 0xa4, 0xaf, 0xe5, 0x87, 0x92, 0x16, 0xc0, 0x85, 0x33, 0xfb, 0xa8, 0x3c, 0x2b, 0xbb, 0xa4, 0xa2, + 0x1a, 0x2b, 0x42, 0x0c, 0xa9, 0xb3, 0xe3, 0x44, 0x63, 0x25, 0x1e, 0x28, 0xaa, 0x0a, 0xe4, 0xfd, + 0x58, 0x9a, 0xbf, 0xff, 0x3f, 0x93, 0xa0, 0x30, 0x97, 0x7c, 0xd0, 0xa8, 0x5a, 0x61, 0xff, 0x35, + 0xba, 0x0d, 0x85, 0xe0, 0x8d, 0xed, 0xfb, 0x64, 0x60, 0x52, 0x66, 0x0e, 0xd4, 0x55, 0xf6, 0xbf, + 0xca, 0x0b, 0x33, 0x07, 0x07, 0xd1, 0xad, 0xc7, 0xc0, 0x99, 0x01, 0x26, 0x27, 0xac, 0xe2, 0xb7, + 0xf7, 0x02, 0x10, 0x9f, 0x81, 0x84, 0x1c, 0x73, 0x2d, 0x1a, 0xcc, 0x9d, 0x65, 0xa3, 0x13, 0x43, + 0x17, 0x27, 0x33, 0x10, 0x56, 0xc8, 0xd4, 0x06, 0x9b, 0x8a, 0x1a, 0x90, 0x9f, 0xd1, 0x8e, 0x9b, + 0xce, 0xd6, 0x07, 0x75, 0x71, 0x6e, 0x5a, 0x2c, 0x98, 0xfb, 0x77, 0x27, 0xe7, 0xff, 0xdd, 0xdf, + 0x43, 0x76, 0xe4, 0x51, 0x32, 0x76, 0x93, 0x62, 0xc7, 0x7f, 0xb0, 0xd0, 0xcd, 0x7c, 0x46, 0x8b, + 0x4f, 0x3d, 0x4a, 0x84, 0x1f, 0x16, 0x47, 0x66, 0x34, 0x31, 0xa0, 0x2f, 0x41, 0x09, 0x5c, 0xcb, + 0x0f, 0x5e, 0x7b, 0xa1, 0x19, 0x4f, 0x88, 0x6b, 0x6c, 0x42, 0x2c, 0xc4, 0xf6, 0x43, 0x6e, 0xd6, + 0x7e, 0x96, 0xa0, 0x30, 0xa7, 0x85, 0xb6, 0xe0, 0xfa, 0x53, 0x03, 0xeb, 0x26, 0x1f, 0x0e, 0xbb, + 0x8b, 0xa6, 0x43, 0x05, 0xb2, 0x6d, 0xa3, 0x67, 0xee, 0x36, 0xdb, 0xcd, 0x6e, 0x43, 0xaf, 0x2b, + 0x12, 0xba, 0x06, 0xea, 0x0c, 0xa9, 0xb2, 0x1b, 0xb5, 0x88, 0x56, 0xf3, 0x69, 0xb3, 0xa7, 0xc8, + 0xe8, 0x3a, 0x5c, 0x59, 0xb0, 0x5b, 0x3b, 0xc0, 0x5d, 0x03, 0x2b, 0x49, 0x74, 0x11, 0x0a, 0x6d, + 0xc3, 0x9c, 0x46, 0x28, 0x89, 0xea, 0x2f, 0x12, 0x5c, 0xee, 0x7b, 0xa3, 0x45, 0xf9, 0xa8, 0x02, + 0xaf, 0xea, 0x68, 0x9a, 0xe9, 0x48, 0x2f, 0x1e, 0x09, 0xc8, 0xd0, 0x73, 0x2c, 0x77, 0x58, 0xf4, + 0xe8, 0xb0, 0x34, 0x24, 0x2e, 0x9b, 0x75, 0x4a, 0x7c, 0xcb, 0xf2, 0xed, 0x60, 0x66, 0x92, 0xff, + 0x66, 0xbc, 0xf8, 0x55, 0xbe, 0xb2, 0xc7, 0xe9, 0x35, 0xc7, 0x3b, 0x1e, 0x14, 0xeb, 0x63, 0x3f, + 0x87, 0x3b, 0x7f, 0xc7, 0x7b, 0x2f, 0xd9, 0xde, 0xcb, 0xf1, 0xde, 0xcb, 0xc3, 0x9d, 0x57, 0xab, + 0xcc, 0xc1, 0xfd, 0xff, 0x03, 0x00, 0x00, 0xff, 0xff, 0xcd, 0x38, 0x05, 0xaa, 0x7d, 0x0c, 0x00, + 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/datastore/v1beta3/datastore.pb.go b/vendor/google.golang.org/genproto/googleapis/datastore/v1beta3/datastore.pb.go new file mode 100644 index 0000000..e4a2a3c --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/datastore/v1beta3/datastore.pb.go @@ -0,0 +1,2123 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/datastore/v1beta3/datastore.proto + +package datastore // import "google.golang.org/genproto/googleapis/datastore/v1beta3" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +import ( + context "golang.org/x/net/context" + grpc "google.golang.org/grpc" +) + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// The modes available for commits. +type CommitRequest_Mode int32 + +const ( + // Unspecified. This value must not be used. + CommitRequest_MODE_UNSPECIFIED CommitRequest_Mode = 0 + // Transactional: The mutations are either all applied, or none are applied. + // Learn about transactions + // [here](https://cloud.google.com/datastore/docs/concepts/transactions). + CommitRequest_TRANSACTIONAL CommitRequest_Mode = 1 + // Non-transactional: The mutations may not apply as all or none. + CommitRequest_NON_TRANSACTIONAL CommitRequest_Mode = 2 +) + +var CommitRequest_Mode_name = map[int32]string{ + 0: "MODE_UNSPECIFIED", + 1: "TRANSACTIONAL", + 2: "NON_TRANSACTIONAL", +} +var CommitRequest_Mode_value = map[string]int32{ + "MODE_UNSPECIFIED": 0, + "TRANSACTIONAL": 1, + "NON_TRANSACTIONAL": 2, +} + +func (x CommitRequest_Mode) String() string { + return proto.EnumName(CommitRequest_Mode_name, int32(x)) +} +func (CommitRequest_Mode) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_datastore_062a2d11f20b0b0e, []int{8, 0} +} + +// The possible values for read consistencies. +type ReadOptions_ReadConsistency int32 + +const ( + // Unspecified. This value must not be used. + ReadOptions_READ_CONSISTENCY_UNSPECIFIED ReadOptions_ReadConsistency = 0 + // Strong consistency. + ReadOptions_STRONG ReadOptions_ReadConsistency = 1 + // Eventual consistency. + ReadOptions_EVENTUAL ReadOptions_ReadConsistency = 2 +) + +var ReadOptions_ReadConsistency_name = map[int32]string{ + 0: "READ_CONSISTENCY_UNSPECIFIED", + 1: "STRONG", + 2: "EVENTUAL", +} +var ReadOptions_ReadConsistency_value = map[string]int32{ + "READ_CONSISTENCY_UNSPECIFIED": 0, + "STRONG": 1, + "EVENTUAL": 2, +} + +func (x ReadOptions_ReadConsistency) String() string { + return proto.EnumName(ReadOptions_ReadConsistency_name, int32(x)) +} +func (ReadOptions_ReadConsistency) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_datastore_062a2d11f20b0b0e, []int{16, 0} +} + +// The request for +// [Datastore.Lookup][google.datastore.v1beta3.Datastore.Lookup]. +type LookupRequest struct { + // The ID of the project against which to make the request. + ProjectId string `protobuf:"bytes,8,opt,name=project_id,json=projectId,proto3" json:"project_id,omitempty"` + // The options for this lookup request. + ReadOptions *ReadOptions `protobuf:"bytes,1,opt,name=read_options,json=readOptions,proto3" json:"read_options,omitempty"` + // Keys of entities to look up. + Keys []*Key `protobuf:"bytes,3,rep,name=keys,proto3" json:"keys,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *LookupRequest) Reset() { *m = LookupRequest{} } +func (m *LookupRequest) String() string { return proto.CompactTextString(m) } +func (*LookupRequest) ProtoMessage() {} +func (*LookupRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_datastore_062a2d11f20b0b0e, []int{0} +} +func (m *LookupRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_LookupRequest.Unmarshal(m, b) +} +func (m *LookupRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_LookupRequest.Marshal(b, m, deterministic) +} +func (dst *LookupRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_LookupRequest.Merge(dst, src) +} +func (m *LookupRequest) XXX_Size() int { + return xxx_messageInfo_LookupRequest.Size(m) +} +func (m *LookupRequest) XXX_DiscardUnknown() { + xxx_messageInfo_LookupRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_LookupRequest proto.InternalMessageInfo + +func (m *LookupRequest) GetProjectId() string { + if m != nil { + return m.ProjectId + } + return "" +} + +func (m *LookupRequest) GetReadOptions() *ReadOptions { + if m != nil { + return m.ReadOptions + } + return nil +} + +func (m *LookupRequest) GetKeys() []*Key { + if m != nil { + return m.Keys + } + return nil +} + +// The response for +// [Datastore.Lookup][google.datastore.v1beta3.Datastore.Lookup]. +type LookupResponse struct { + // Entities found as `ResultType.FULL` entities. The order of results in this + // field is undefined and has no relation to the order of the keys in the + // input. + Found []*EntityResult `protobuf:"bytes,1,rep,name=found,proto3" json:"found,omitempty"` + // Entities not found as `ResultType.KEY_ONLY` entities. The order of results + // in this field is undefined and has no relation to the order of the keys + // in the input. + Missing []*EntityResult `protobuf:"bytes,2,rep,name=missing,proto3" json:"missing,omitempty"` + // A list of keys that were not looked up due to resource constraints. The + // order of results in this field is undefined and has no relation to the + // order of the keys in the input. + Deferred []*Key `protobuf:"bytes,3,rep,name=deferred,proto3" json:"deferred,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *LookupResponse) Reset() { *m = LookupResponse{} } +func (m *LookupResponse) String() string { return proto.CompactTextString(m) } +func (*LookupResponse) ProtoMessage() {} +func (*LookupResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_datastore_062a2d11f20b0b0e, []int{1} +} +func (m *LookupResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_LookupResponse.Unmarshal(m, b) +} +func (m *LookupResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_LookupResponse.Marshal(b, m, deterministic) +} +func (dst *LookupResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_LookupResponse.Merge(dst, src) +} +func (m *LookupResponse) XXX_Size() int { + return xxx_messageInfo_LookupResponse.Size(m) +} +func (m *LookupResponse) XXX_DiscardUnknown() { + xxx_messageInfo_LookupResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_LookupResponse proto.InternalMessageInfo + +func (m *LookupResponse) GetFound() []*EntityResult { + if m != nil { + return m.Found + } + return nil +} + +func (m *LookupResponse) GetMissing() []*EntityResult { + if m != nil { + return m.Missing + } + return nil +} + +func (m *LookupResponse) GetDeferred() []*Key { + if m != nil { + return m.Deferred + } + return nil +} + +// The request for +// [Datastore.RunQuery][google.datastore.v1beta3.Datastore.RunQuery]. +type RunQueryRequest struct { + // The ID of the project against which to make the request. + ProjectId string `protobuf:"bytes,8,opt,name=project_id,json=projectId,proto3" json:"project_id,omitempty"` + // Entities are partitioned into subsets, identified by a partition ID. + // Queries are scoped to a single partition. + // This partition ID is normalized with the standard default context + // partition ID. + PartitionId *PartitionId `protobuf:"bytes,2,opt,name=partition_id,json=partitionId,proto3" json:"partition_id,omitempty"` + // The options for this query. + ReadOptions *ReadOptions `protobuf:"bytes,1,opt,name=read_options,json=readOptions,proto3" json:"read_options,omitempty"` + // The type of query. + // + // Types that are valid to be assigned to QueryType: + // *RunQueryRequest_Query + // *RunQueryRequest_GqlQuery + QueryType isRunQueryRequest_QueryType `protobuf_oneof:"query_type"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *RunQueryRequest) Reset() { *m = RunQueryRequest{} } +func (m *RunQueryRequest) String() string { return proto.CompactTextString(m) } +func (*RunQueryRequest) ProtoMessage() {} +func (*RunQueryRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_datastore_062a2d11f20b0b0e, []int{2} +} +func (m *RunQueryRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_RunQueryRequest.Unmarshal(m, b) +} +func (m *RunQueryRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_RunQueryRequest.Marshal(b, m, deterministic) +} +func (dst *RunQueryRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_RunQueryRequest.Merge(dst, src) +} +func (m *RunQueryRequest) XXX_Size() int { + return xxx_messageInfo_RunQueryRequest.Size(m) +} +func (m *RunQueryRequest) XXX_DiscardUnknown() { + xxx_messageInfo_RunQueryRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_RunQueryRequest proto.InternalMessageInfo + +func (m *RunQueryRequest) GetProjectId() string { + if m != nil { + return m.ProjectId + } + return "" +} + +func (m *RunQueryRequest) GetPartitionId() *PartitionId { + if m != nil { + return m.PartitionId + } + return nil +} + +func (m *RunQueryRequest) GetReadOptions() *ReadOptions { + if m != nil { + return m.ReadOptions + } + return nil +} + +type isRunQueryRequest_QueryType interface { + isRunQueryRequest_QueryType() +} + +type RunQueryRequest_Query struct { + Query *Query `protobuf:"bytes,3,opt,name=query,proto3,oneof"` +} + +type RunQueryRequest_GqlQuery struct { + GqlQuery *GqlQuery `protobuf:"bytes,7,opt,name=gql_query,json=gqlQuery,proto3,oneof"` +} + +func (*RunQueryRequest_Query) isRunQueryRequest_QueryType() {} + +func (*RunQueryRequest_GqlQuery) isRunQueryRequest_QueryType() {} + +func (m *RunQueryRequest) GetQueryType() isRunQueryRequest_QueryType { + if m != nil { + return m.QueryType + } + return nil +} + +func (m *RunQueryRequest) GetQuery() *Query { + if x, ok := m.GetQueryType().(*RunQueryRequest_Query); ok { + return x.Query + } + return nil +} + +func (m *RunQueryRequest) GetGqlQuery() *GqlQuery { + if x, ok := m.GetQueryType().(*RunQueryRequest_GqlQuery); ok { + return x.GqlQuery + } + return nil +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*RunQueryRequest) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _RunQueryRequest_OneofMarshaler, _RunQueryRequest_OneofUnmarshaler, _RunQueryRequest_OneofSizer, []interface{}{ + (*RunQueryRequest_Query)(nil), + (*RunQueryRequest_GqlQuery)(nil), + } +} + +func _RunQueryRequest_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*RunQueryRequest) + // query_type + switch x := m.QueryType.(type) { + case *RunQueryRequest_Query: + b.EncodeVarint(3<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.Query); err != nil { + return err + } + case *RunQueryRequest_GqlQuery: + b.EncodeVarint(7<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.GqlQuery); err != nil { + return err + } + case nil: + default: + return fmt.Errorf("RunQueryRequest.QueryType has unexpected type %T", x) + } + return nil +} + +func _RunQueryRequest_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*RunQueryRequest) + switch tag { + case 3: // query_type.query + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(Query) + err := b.DecodeMessage(msg) + m.QueryType = &RunQueryRequest_Query{msg} + return true, err + case 7: // query_type.gql_query + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(GqlQuery) + err := b.DecodeMessage(msg) + m.QueryType = &RunQueryRequest_GqlQuery{msg} + return true, err + default: + return false, nil + } +} + +func _RunQueryRequest_OneofSizer(msg proto.Message) (n int) { + m := msg.(*RunQueryRequest) + // query_type + switch x := m.QueryType.(type) { + case *RunQueryRequest_Query: + s := proto.Size(x.Query) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *RunQueryRequest_GqlQuery: + s := proto.Size(x.GqlQuery) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +// The response for +// [Datastore.RunQuery][google.datastore.v1beta3.Datastore.RunQuery]. +type RunQueryResponse struct { + // A batch of query results (always present). + Batch *QueryResultBatch `protobuf:"bytes,1,opt,name=batch,proto3" json:"batch,omitempty"` + // The parsed form of the `GqlQuery` from the request, if it was set. + Query *Query `protobuf:"bytes,2,opt,name=query,proto3" json:"query,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *RunQueryResponse) Reset() { *m = RunQueryResponse{} } +func (m *RunQueryResponse) String() string { return proto.CompactTextString(m) } +func (*RunQueryResponse) ProtoMessage() {} +func (*RunQueryResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_datastore_062a2d11f20b0b0e, []int{3} +} +func (m *RunQueryResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_RunQueryResponse.Unmarshal(m, b) +} +func (m *RunQueryResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_RunQueryResponse.Marshal(b, m, deterministic) +} +func (dst *RunQueryResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_RunQueryResponse.Merge(dst, src) +} +func (m *RunQueryResponse) XXX_Size() int { + return xxx_messageInfo_RunQueryResponse.Size(m) +} +func (m *RunQueryResponse) XXX_DiscardUnknown() { + xxx_messageInfo_RunQueryResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_RunQueryResponse proto.InternalMessageInfo + +func (m *RunQueryResponse) GetBatch() *QueryResultBatch { + if m != nil { + return m.Batch + } + return nil +} + +func (m *RunQueryResponse) GetQuery() *Query { + if m != nil { + return m.Query + } + return nil +} + +// The request for +// [Datastore.BeginTransaction][google.datastore.v1beta3.Datastore.BeginTransaction]. +type BeginTransactionRequest struct { + // The ID of the project against which to make the request. + ProjectId string `protobuf:"bytes,8,opt,name=project_id,json=projectId,proto3" json:"project_id,omitempty"` + // Options for a new transaction. + TransactionOptions *TransactionOptions `protobuf:"bytes,10,opt,name=transaction_options,json=transactionOptions,proto3" json:"transaction_options,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *BeginTransactionRequest) Reset() { *m = BeginTransactionRequest{} } +func (m *BeginTransactionRequest) String() string { return proto.CompactTextString(m) } +func (*BeginTransactionRequest) ProtoMessage() {} +func (*BeginTransactionRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_datastore_062a2d11f20b0b0e, []int{4} +} +func (m *BeginTransactionRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_BeginTransactionRequest.Unmarshal(m, b) +} +func (m *BeginTransactionRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_BeginTransactionRequest.Marshal(b, m, deterministic) +} +func (dst *BeginTransactionRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_BeginTransactionRequest.Merge(dst, src) +} +func (m *BeginTransactionRequest) XXX_Size() int { + return xxx_messageInfo_BeginTransactionRequest.Size(m) +} +func (m *BeginTransactionRequest) XXX_DiscardUnknown() { + xxx_messageInfo_BeginTransactionRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_BeginTransactionRequest proto.InternalMessageInfo + +func (m *BeginTransactionRequest) GetProjectId() string { + if m != nil { + return m.ProjectId + } + return "" +} + +func (m *BeginTransactionRequest) GetTransactionOptions() *TransactionOptions { + if m != nil { + return m.TransactionOptions + } + return nil +} + +// The response for +// [Datastore.BeginTransaction][google.datastore.v1beta3.Datastore.BeginTransaction]. +type BeginTransactionResponse struct { + // The transaction identifier (always present). + Transaction []byte `protobuf:"bytes,1,opt,name=transaction,proto3" json:"transaction,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *BeginTransactionResponse) Reset() { *m = BeginTransactionResponse{} } +func (m *BeginTransactionResponse) String() string { return proto.CompactTextString(m) } +func (*BeginTransactionResponse) ProtoMessage() {} +func (*BeginTransactionResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_datastore_062a2d11f20b0b0e, []int{5} +} +func (m *BeginTransactionResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_BeginTransactionResponse.Unmarshal(m, b) +} +func (m *BeginTransactionResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_BeginTransactionResponse.Marshal(b, m, deterministic) +} +func (dst *BeginTransactionResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_BeginTransactionResponse.Merge(dst, src) +} +func (m *BeginTransactionResponse) XXX_Size() int { + return xxx_messageInfo_BeginTransactionResponse.Size(m) +} +func (m *BeginTransactionResponse) XXX_DiscardUnknown() { + xxx_messageInfo_BeginTransactionResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_BeginTransactionResponse proto.InternalMessageInfo + +func (m *BeginTransactionResponse) GetTransaction() []byte { + if m != nil { + return m.Transaction + } + return nil +} + +// The request for +// [Datastore.Rollback][google.datastore.v1beta3.Datastore.Rollback]. +type RollbackRequest struct { + // The ID of the project against which to make the request. + ProjectId string `protobuf:"bytes,8,opt,name=project_id,json=projectId,proto3" json:"project_id,omitempty"` + // The transaction identifier, returned by a call to + // [Datastore.BeginTransaction][google.datastore.v1beta3.Datastore.BeginTransaction]. + Transaction []byte `protobuf:"bytes,1,opt,name=transaction,proto3" json:"transaction,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *RollbackRequest) Reset() { *m = RollbackRequest{} } +func (m *RollbackRequest) String() string { return proto.CompactTextString(m) } +func (*RollbackRequest) ProtoMessage() {} +func (*RollbackRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_datastore_062a2d11f20b0b0e, []int{6} +} +func (m *RollbackRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_RollbackRequest.Unmarshal(m, b) +} +func (m *RollbackRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_RollbackRequest.Marshal(b, m, deterministic) +} +func (dst *RollbackRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_RollbackRequest.Merge(dst, src) +} +func (m *RollbackRequest) XXX_Size() int { + return xxx_messageInfo_RollbackRequest.Size(m) +} +func (m *RollbackRequest) XXX_DiscardUnknown() { + xxx_messageInfo_RollbackRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_RollbackRequest proto.InternalMessageInfo + +func (m *RollbackRequest) GetProjectId() string { + if m != nil { + return m.ProjectId + } + return "" +} + +func (m *RollbackRequest) GetTransaction() []byte { + if m != nil { + return m.Transaction + } + return nil +} + +// The response for +// [Datastore.Rollback][google.datastore.v1beta3.Datastore.Rollback]. (an empty +// message). +type RollbackResponse struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *RollbackResponse) Reset() { *m = RollbackResponse{} } +func (m *RollbackResponse) String() string { return proto.CompactTextString(m) } +func (*RollbackResponse) ProtoMessage() {} +func (*RollbackResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_datastore_062a2d11f20b0b0e, []int{7} +} +func (m *RollbackResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_RollbackResponse.Unmarshal(m, b) +} +func (m *RollbackResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_RollbackResponse.Marshal(b, m, deterministic) +} +func (dst *RollbackResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_RollbackResponse.Merge(dst, src) +} +func (m *RollbackResponse) XXX_Size() int { + return xxx_messageInfo_RollbackResponse.Size(m) +} +func (m *RollbackResponse) XXX_DiscardUnknown() { + xxx_messageInfo_RollbackResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_RollbackResponse proto.InternalMessageInfo + +// The request for +// [Datastore.Commit][google.datastore.v1beta3.Datastore.Commit]. +type CommitRequest struct { + // The ID of the project against which to make the request. + ProjectId string `protobuf:"bytes,8,opt,name=project_id,json=projectId,proto3" json:"project_id,omitempty"` + // The type of commit to perform. Defaults to `TRANSACTIONAL`. + Mode CommitRequest_Mode `protobuf:"varint,5,opt,name=mode,proto3,enum=google.datastore.v1beta3.CommitRequest_Mode" json:"mode,omitempty"` + // Must be set when mode is `TRANSACTIONAL`. + // + // Types that are valid to be assigned to TransactionSelector: + // *CommitRequest_Transaction + TransactionSelector isCommitRequest_TransactionSelector `protobuf_oneof:"transaction_selector"` + // The mutations to perform. + // + // When mode is `TRANSACTIONAL`, mutations affecting a single entity are + // applied in order. The following sequences of mutations affecting a single + // entity are not permitted in a single `Commit` request: + // + // - `insert` followed by `insert` + // - `update` followed by `insert` + // - `upsert` followed by `insert` + // - `delete` followed by `update` + // + // When mode is `NON_TRANSACTIONAL`, no two mutations may affect a single + // entity. + Mutations []*Mutation `protobuf:"bytes,6,rep,name=mutations,proto3" json:"mutations,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *CommitRequest) Reset() { *m = CommitRequest{} } +func (m *CommitRequest) String() string { return proto.CompactTextString(m) } +func (*CommitRequest) ProtoMessage() {} +func (*CommitRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_datastore_062a2d11f20b0b0e, []int{8} +} +func (m *CommitRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_CommitRequest.Unmarshal(m, b) +} +func (m *CommitRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_CommitRequest.Marshal(b, m, deterministic) +} +func (dst *CommitRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_CommitRequest.Merge(dst, src) +} +func (m *CommitRequest) XXX_Size() int { + return xxx_messageInfo_CommitRequest.Size(m) +} +func (m *CommitRequest) XXX_DiscardUnknown() { + xxx_messageInfo_CommitRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_CommitRequest proto.InternalMessageInfo + +func (m *CommitRequest) GetProjectId() string { + if m != nil { + return m.ProjectId + } + return "" +} + +func (m *CommitRequest) GetMode() CommitRequest_Mode { + if m != nil { + return m.Mode + } + return CommitRequest_MODE_UNSPECIFIED +} + +type isCommitRequest_TransactionSelector interface { + isCommitRequest_TransactionSelector() +} + +type CommitRequest_Transaction struct { + Transaction []byte `protobuf:"bytes,1,opt,name=transaction,proto3,oneof"` +} + +func (*CommitRequest_Transaction) isCommitRequest_TransactionSelector() {} + +func (m *CommitRequest) GetTransactionSelector() isCommitRequest_TransactionSelector { + if m != nil { + return m.TransactionSelector + } + return nil +} + +func (m *CommitRequest) GetTransaction() []byte { + if x, ok := m.GetTransactionSelector().(*CommitRequest_Transaction); ok { + return x.Transaction + } + return nil +} + +func (m *CommitRequest) GetMutations() []*Mutation { + if m != nil { + return m.Mutations + } + return nil +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*CommitRequest) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _CommitRequest_OneofMarshaler, _CommitRequest_OneofUnmarshaler, _CommitRequest_OneofSizer, []interface{}{ + (*CommitRequest_Transaction)(nil), + } +} + +func _CommitRequest_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*CommitRequest) + // transaction_selector + switch x := m.TransactionSelector.(type) { + case *CommitRequest_Transaction: + b.EncodeVarint(1<<3 | proto.WireBytes) + b.EncodeRawBytes(x.Transaction) + case nil: + default: + return fmt.Errorf("CommitRequest.TransactionSelector has unexpected type %T", x) + } + return nil +} + +func _CommitRequest_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*CommitRequest) + switch tag { + case 1: // transaction_selector.transaction + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeRawBytes(true) + m.TransactionSelector = &CommitRequest_Transaction{x} + return true, err + default: + return false, nil + } +} + +func _CommitRequest_OneofSizer(msg proto.Message) (n int) { + m := msg.(*CommitRequest) + // transaction_selector + switch x := m.TransactionSelector.(type) { + case *CommitRequest_Transaction: + n += 1 // tag and wire + n += proto.SizeVarint(uint64(len(x.Transaction))) + n += len(x.Transaction) + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +// The response for +// [Datastore.Commit][google.datastore.v1beta3.Datastore.Commit]. +type CommitResponse struct { + // The result of performing the mutations. + // The i-th mutation result corresponds to the i-th mutation in the request. + MutationResults []*MutationResult `protobuf:"bytes,3,rep,name=mutation_results,json=mutationResults,proto3" json:"mutation_results,omitempty"` + // The number of index entries updated during the commit, or zero if none were + // updated. + IndexUpdates int32 `protobuf:"varint,4,opt,name=index_updates,json=indexUpdates,proto3" json:"index_updates,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *CommitResponse) Reset() { *m = CommitResponse{} } +func (m *CommitResponse) String() string { return proto.CompactTextString(m) } +func (*CommitResponse) ProtoMessage() {} +func (*CommitResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_datastore_062a2d11f20b0b0e, []int{9} +} +func (m *CommitResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_CommitResponse.Unmarshal(m, b) +} +func (m *CommitResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_CommitResponse.Marshal(b, m, deterministic) +} +func (dst *CommitResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_CommitResponse.Merge(dst, src) +} +func (m *CommitResponse) XXX_Size() int { + return xxx_messageInfo_CommitResponse.Size(m) +} +func (m *CommitResponse) XXX_DiscardUnknown() { + xxx_messageInfo_CommitResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_CommitResponse proto.InternalMessageInfo + +func (m *CommitResponse) GetMutationResults() []*MutationResult { + if m != nil { + return m.MutationResults + } + return nil +} + +func (m *CommitResponse) GetIndexUpdates() int32 { + if m != nil { + return m.IndexUpdates + } + return 0 +} + +// The request for +// [Datastore.AllocateIds][google.datastore.v1beta3.Datastore.AllocateIds]. +type AllocateIdsRequest struct { + // The ID of the project against which to make the request. + ProjectId string `protobuf:"bytes,8,opt,name=project_id,json=projectId,proto3" json:"project_id,omitempty"` + // A list of keys with incomplete key paths for which to allocate IDs. + // No key may be reserved/read-only. + Keys []*Key `protobuf:"bytes,1,rep,name=keys,proto3" json:"keys,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *AllocateIdsRequest) Reset() { *m = AllocateIdsRequest{} } +func (m *AllocateIdsRequest) String() string { return proto.CompactTextString(m) } +func (*AllocateIdsRequest) ProtoMessage() {} +func (*AllocateIdsRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_datastore_062a2d11f20b0b0e, []int{10} +} +func (m *AllocateIdsRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_AllocateIdsRequest.Unmarshal(m, b) +} +func (m *AllocateIdsRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_AllocateIdsRequest.Marshal(b, m, deterministic) +} +func (dst *AllocateIdsRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_AllocateIdsRequest.Merge(dst, src) +} +func (m *AllocateIdsRequest) XXX_Size() int { + return xxx_messageInfo_AllocateIdsRequest.Size(m) +} +func (m *AllocateIdsRequest) XXX_DiscardUnknown() { + xxx_messageInfo_AllocateIdsRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_AllocateIdsRequest proto.InternalMessageInfo + +func (m *AllocateIdsRequest) GetProjectId() string { + if m != nil { + return m.ProjectId + } + return "" +} + +func (m *AllocateIdsRequest) GetKeys() []*Key { + if m != nil { + return m.Keys + } + return nil +} + +// The response for +// [Datastore.AllocateIds][google.datastore.v1beta3.Datastore.AllocateIds]. +type AllocateIdsResponse struct { + // The keys specified in the request (in the same order), each with + // its key path completed with a newly allocated ID. + Keys []*Key `protobuf:"bytes,1,rep,name=keys,proto3" json:"keys,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *AllocateIdsResponse) Reset() { *m = AllocateIdsResponse{} } +func (m *AllocateIdsResponse) String() string { return proto.CompactTextString(m) } +func (*AllocateIdsResponse) ProtoMessage() {} +func (*AllocateIdsResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_datastore_062a2d11f20b0b0e, []int{11} +} +func (m *AllocateIdsResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_AllocateIdsResponse.Unmarshal(m, b) +} +func (m *AllocateIdsResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_AllocateIdsResponse.Marshal(b, m, deterministic) +} +func (dst *AllocateIdsResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_AllocateIdsResponse.Merge(dst, src) +} +func (m *AllocateIdsResponse) XXX_Size() int { + return xxx_messageInfo_AllocateIdsResponse.Size(m) +} +func (m *AllocateIdsResponse) XXX_DiscardUnknown() { + xxx_messageInfo_AllocateIdsResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_AllocateIdsResponse proto.InternalMessageInfo + +func (m *AllocateIdsResponse) GetKeys() []*Key { + if m != nil { + return m.Keys + } + return nil +} + +// The request for +// [Datastore.ReserveIds][google.datastore.v1beta3.Datastore.ReserveIds]. +type ReserveIdsRequest struct { + // The ID of the project against which to make the request. + ProjectId string `protobuf:"bytes,8,opt,name=project_id,json=projectId,proto3" json:"project_id,omitempty"` + // If not empty, the ID of the database against which to make the request. + DatabaseId string `protobuf:"bytes,9,opt,name=database_id,json=databaseId,proto3" json:"database_id,omitempty"` + // A list of keys with complete key paths whose numeric IDs should not be + // auto-allocated. + Keys []*Key `protobuf:"bytes,1,rep,name=keys,proto3" json:"keys,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ReserveIdsRequest) Reset() { *m = ReserveIdsRequest{} } +func (m *ReserveIdsRequest) String() string { return proto.CompactTextString(m) } +func (*ReserveIdsRequest) ProtoMessage() {} +func (*ReserveIdsRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_datastore_062a2d11f20b0b0e, []int{12} +} +func (m *ReserveIdsRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ReserveIdsRequest.Unmarshal(m, b) +} +func (m *ReserveIdsRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ReserveIdsRequest.Marshal(b, m, deterministic) +} +func (dst *ReserveIdsRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_ReserveIdsRequest.Merge(dst, src) +} +func (m *ReserveIdsRequest) XXX_Size() int { + return xxx_messageInfo_ReserveIdsRequest.Size(m) +} +func (m *ReserveIdsRequest) XXX_DiscardUnknown() { + xxx_messageInfo_ReserveIdsRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_ReserveIdsRequest proto.InternalMessageInfo + +func (m *ReserveIdsRequest) GetProjectId() string { + if m != nil { + return m.ProjectId + } + return "" +} + +func (m *ReserveIdsRequest) GetDatabaseId() string { + if m != nil { + return m.DatabaseId + } + return "" +} + +func (m *ReserveIdsRequest) GetKeys() []*Key { + if m != nil { + return m.Keys + } + return nil +} + +// The response for +// [Datastore.ReserveIds][google.datastore.v1beta3.Datastore.ReserveIds]. +type ReserveIdsResponse struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ReserveIdsResponse) Reset() { *m = ReserveIdsResponse{} } +func (m *ReserveIdsResponse) String() string { return proto.CompactTextString(m) } +func (*ReserveIdsResponse) ProtoMessage() {} +func (*ReserveIdsResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_datastore_062a2d11f20b0b0e, []int{13} +} +func (m *ReserveIdsResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ReserveIdsResponse.Unmarshal(m, b) +} +func (m *ReserveIdsResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ReserveIdsResponse.Marshal(b, m, deterministic) +} +func (dst *ReserveIdsResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_ReserveIdsResponse.Merge(dst, src) +} +func (m *ReserveIdsResponse) XXX_Size() int { + return xxx_messageInfo_ReserveIdsResponse.Size(m) +} +func (m *ReserveIdsResponse) XXX_DiscardUnknown() { + xxx_messageInfo_ReserveIdsResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_ReserveIdsResponse proto.InternalMessageInfo + +// A mutation to apply to an entity. +type Mutation struct { + // The mutation operation. + // + // For `insert`, `update`, and `upsert`: + // - The entity's key must not be reserved/read-only. + // - No property in the entity may have a reserved name, + // not even a property in an entity in a value. + // - No value in the entity may have meaning 18, + // not even a value in an entity in another value. + // + // Types that are valid to be assigned to Operation: + // *Mutation_Insert + // *Mutation_Update + // *Mutation_Upsert + // *Mutation_Delete + Operation isMutation_Operation `protobuf_oneof:"operation"` + // When set, the server will detect whether or not this mutation conflicts + // with the current version of the entity on the server. Conflicting mutations + // are not applied, and are marked as such in MutationResult. + // + // Types that are valid to be assigned to ConflictDetectionStrategy: + // *Mutation_BaseVersion + ConflictDetectionStrategy isMutation_ConflictDetectionStrategy `protobuf_oneof:"conflict_detection_strategy"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Mutation) Reset() { *m = Mutation{} } +func (m *Mutation) String() string { return proto.CompactTextString(m) } +func (*Mutation) ProtoMessage() {} +func (*Mutation) Descriptor() ([]byte, []int) { + return fileDescriptor_datastore_062a2d11f20b0b0e, []int{14} +} +func (m *Mutation) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Mutation.Unmarshal(m, b) +} +func (m *Mutation) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Mutation.Marshal(b, m, deterministic) +} +func (dst *Mutation) XXX_Merge(src proto.Message) { + xxx_messageInfo_Mutation.Merge(dst, src) +} +func (m *Mutation) XXX_Size() int { + return xxx_messageInfo_Mutation.Size(m) +} +func (m *Mutation) XXX_DiscardUnknown() { + xxx_messageInfo_Mutation.DiscardUnknown(m) +} + +var xxx_messageInfo_Mutation proto.InternalMessageInfo + +type isMutation_Operation interface { + isMutation_Operation() +} + +type Mutation_Insert struct { + Insert *Entity `protobuf:"bytes,4,opt,name=insert,proto3,oneof"` +} + +type Mutation_Update struct { + Update *Entity `protobuf:"bytes,5,opt,name=update,proto3,oneof"` +} + +type Mutation_Upsert struct { + Upsert *Entity `protobuf:"bytes,6,opt,name=upsert,proto3,oneof"` +} + +type Mutation_Delete struct { + Delete *Key `protobuf:"bytes,7,opt,name=delete,proto3,oneof"` +} + +func (*Mutation_Insert) isMutation_Operation() {} + +func (*Mutation_Update) isMutation_Operation() {} + +func (*Mutation_Upsert) isMutation_Operation() {} + +func (*Mutation_Delete) isMutation_Operation() {} + +func (m *Mutation) GetOperation() isMutation_Operation { + if m != nil { + return m.Operation + } + return nil +} + +func (m *Mutation) GetInsert() *Entity { + if x, ok := m.GetOperation().(*Mutation_Insert); ok { + return x.Insert + } + return nil +} + +func (m *Mutation) GetUpdate() *Entity { + if x, ok := m.GetOperation().(*Mutation_Update); ok { + return x.Update + } + return nil +} + +func (m *Mutation) GetUpsert() *Entity { + if x, ok := m.GetOperation().(*Mutation_Upsert); ok { + return x.Upsert + } + return nil +} + +func (m *Mutation) GetDelete() *Key { + if x, ok := m.GetOperation().(*Mutation_Delete); ok { + return x.Delete + } + return nil +} + +type isMutation_ConflictDetectionStrategy interface { + isMutation_ConflictDetectionStrategy() +} + +type Mutation_BaseVersion struct { + BaseVersion int64 `protobuf:"varint,8,opt,name=base_version,json=baseVersion,proto3,oneof"` +} + +func (*Mutation_BaseVersion) isMutation_ConflictDetectionStrategy() {} + +func (m *Mutation) GetConflictDetectionStrategy() isMutation_ConflictDetectionStrategy { + if m != nil { + return m.ConflictDetectionStrategy + } + return nil +} + +func (m *Mutation) GetBaseVersion() int64 { + if x, ok := m.GetConflictDetectionStrategy().(*Mutation_BaseVersion); ok { + return x.BaseVersion + } + return 0 +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*Mutation) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _Mutation_OneofMarshaler, _Mutation_OneofUnmarshaler, _Mutation_OneofSizer, []interface{}{ + (*Mutation_Insert)(nil), + (*Mutation_Update)(nil), + (*Mutation_Upsert)(nil), + (*Mutation_Delete)(nil), + (*Mutation_BaseVersion)(nil), + } +} + +func _Mutation_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*Mutation) + // operation + switch x := m.Operation.(type) { + case *Mutation_Insert: + b.EncodeVarint(4<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.Insert); err != nil { + return err + } + case *Mutation_Update: + b.EncodeVarint(5<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.Update); err != nil { + return err + } + case *Mutation_Upsert: + b.EncodeVarint(6<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.Upsert); err != nil { + return err + } + case *Mutation_Delete: + b.EncodeVarint(7<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.Delete); err != nil { + return err + } + case nil: + default: + return fmt.Errorf("Mutation.Operation has unexpected type %T", x) + } + // conflict_detection_strategy + switch x := m.ConflictDetectionStrategy.(type) { + case *Mutation_BaseVersion: + b.EncodeVarint(8<<3 | proto.WireVarint) + b.EncodeVarint(uint64(x.BaseVersion)) + case nil: + default: + return fmt.Errorf("Mutation.ConflictDetectionStrategy has unexpected type %T", x) + } + return nil +} + +func _Mutation_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*Mutation) + switch tag { + case 4: // operation.insert + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(Entity) + err := b.DecodeMessage(msg) + m.Operation = &Mutation_Insert{msg} + return true, err + case 5: // operation.update + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(Entity) + err := b.DecodeMessage(msg) + m.Operation = &Mutation_Update{msg} + return true, err + case 6: // operation.upsert + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(Entity) + err := b.DecodeMessage(msg) + m.Operation = &Mutation_Upsert{msg} + return true, err + case 7: // operation.delete + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(Key) + err := b.DecodeMessage(msg) + m.Operation = &Mutation_Delete{msg} + return true, err + case 8: // conflict_detection_strategy.base_version + if wire != proto.WireVarint { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeVarint() + m.ConflictDetectionStrategy = &Mutation_BaseVersion{int64(x)} + return true, err + default: + return false, nil + } +} + +func _Mutation_OneofSizer(msg proto.Message) (n int) { + m := msg.(*Mutation) + // operation + switch x := m.Operation.(type) { + case *Mutation_Insert: + s := proto.Size(x.Insert) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *Mutation_Update: + s := proto.Size(x.Update) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *Mutation_Upsert: + s := proto.Size(x.Upsert) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *Mutation_Delete: + s := proto.Size(x.Delete) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + // conflict_detection_strategy + switch x := m.ConflictDetectionStrategy.(type) { + case *Mutation_BaseVersion: + n += 1 // tag and wire + n += proto.SizeVarint(uint64(x.BaseVersion)) + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +// The result of applying a mutation. +type MutationResult struct { + // The automatically allocated key. + // Set only when the mutation allocated a key. + Key *Key `protobuf:"bytes,3,opt,name=key,proto3" json:"key,omitempty"` + // The version of the entity on the server after processing the mutation. If + // the mutation doesn't change anything on the server, then the version will + // be the version of the current entity or, if no entity is present, a version + // that is strictly greater than the version of any previous entity and less + // than the version of any possible future entity. + Version int64 `protobuf:"varint,4,opt,name=version,proto3" json:"version,omitempty"` + // Whether a conflict was detected for this mutation. Always false when a + // conflict detection strategy field is not set in the mutation. + ConflictDetected bool `protobuf:"varint,5,opt,name=conflict_detected,json=conflictDetected,proto3" json:"conflict_detected,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *MutationResult) Reset() { *m = MutationResult{} } +func (m *MutationResult) String() string { return proto.CompactTextString(m) } +func (*MutationResult) ProtoMessage() {} +func (*MutationResult) Descriptor() ([]byte, []int) { + return fileDescriptor_datastore_062a2d11f20b0b0e, []int{15} +} +func (m *MutationResult) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_MutationResult.Unmarshal(m, b) +} +func (m *MutationResult) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_MutationResult.Marshal(b, m, deterministic) +} +func (dst *MutationResult) XXX_Merge(src proto.Message) { + xxx_messageInfo_MutationResult.Merge(dst, src) +} +func (m *MutationResult) XXX_Size() int { + return xxx_messageInfo_MutationResult.Size(m) +} +func (m *MutationResult) XXX_DiscardUnknown() { + xxx_messageInfo_MutationResult.DiscardUnknown(m) +} + +var xxx_messageInfo_MutationResult proto.InternalMessageInfo + +func (m *MutationResult) GetKey() *Key { + if m != nil { + return m.Key + } + return nil +} + +func (m *MutationResult) GetVersion() int64 { + if m != nil { + return m.Version + } + return 0 +} + +func (m *MutationResult) GetConflictDetected() bool { + if m != nil { + return m.ConflictDetected + } + return false +} + +// The options shared by read requests. +type ReadOptions struct { + // If not specified, lookups and ancestor queries default to + // `read_consistency`=`STRONG`, global queries default to + // `read_consistency`=`EVENTUAL`. + // + // Types that are valid to be assigned to ConsistencyType: + // *ReadOptions_ReadConsistency_ + // *ReadOptions_Transaction + ConsistencyType isReadOptions_ConsistencyType `protobuf_oneof:"consistency_type"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ReadOptions) Reset() { *m = ReadOptions{} } +func (m *ReadOptions) String() string { return proto.CompactTextString(m) } +func (*ReadOptions) ProtoMessage() {} +func (*ReadOptions) Descriptor() ([]byte, []int) { + return fileDescriptor_datastore_062a2d11f20b0b0e, []int{16} +} +func (m *ReadOptions) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ReadOptions.Unmarshal(m, b) +} +func (m *ReadOptions) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ReadOptions.Marshal(b, m, deterministic) +} +func (dst *ReadOptions) XXX_Merge(src proto.Message) { + xxx_messageInfo_ReadOptions.Merge(dst, src) +} +func (m *ReadOptions) XXX_Size() int { + return xxx_messageInfo_ReadOptions.Size(m) +} +func (m *ReadOptions) XXX_DiscardUnknown() { + xxx_messageInfo_ReadOptions.DiscardUnknown(m) +} + +var xxx_messageInfo_ReadOptions proto.InternalMessageInfo + +type isReadOptions_ConsistencyType interface { + isReadOptions_ConsistencyType() +} + +type ReadOptions_ReadConsistency_ struct { + ReadConsistency ReadOptions_ReadConsistency `protobuf:"varint,1,opt,name=read_consistency,json=readConsistency,proto3,enum=google.datastore.v1beta3.ReadOptions_ReadConsistency,oneof"` +} + +type ReadOptions_Transaction struct { + Transaction []byte `protobuf:"bytes,2,opt,name=transaction,proto3,oneof"` +} + +func (*ReadOptions_ReadConsistency_) isReadOptions_ConsistencyType() {} + +func (*ReadOptions_Transaction) isReadOptions_ConsistencyType() {} + +func (m *ReadOptions) GetConsistencyType() isReadOptions_ConsistencyType { + if m != nil { + return m.ConsistencyType + } + return nil +} + +func (m *ReadOptions) GetReadConsistency() ReadOptions_ReadConsistency { + if x, ok := m.GetConsistencyType().(*ReadOptions_ReadConsistency_); ok { + return x.ReadConsistency + } + return ReadOptions_READ_CONSISTENCY_UNSPECIFIED +} + +func (m *ReadOptions) GetTransaction() []byte { + if x, ok := m.GetConsistencyType().(*ReadOptions_Transaction); ok { + return x.Transaction + } + return nil +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*ReadOptions) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _ReadOptions_OneofMarshaler, _ReadOptions_OneofUnmarshaler, _ReadOptions_OneofSizer, []interface{}{ + (*ReadOptions_ReadConsistency_)(nil), + (*ReadOptions_Transaction)(nil), + } +} + +func _ReadOptions_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*ReadOptions) + // consistency_type + switch x := m.ConsistencyType.(type) { + case *ReadOptions_ReadConsistency_: + b.EncodeVarint(1<<3 | proto.WireVarint) + b.EncodeVarint(uint64(x.ReadConsistency)) + case *ReadOptions_Transaction: + b.EncodeVarint(2<<3 | proto.WireBytes) + b.EncodeRawBytes(x.Transaction) + case nil: + default: + return fmt.Errorf("ReadOptions.ConsistencyType has unexpected type %T", x) + } + return nil +} + +func _ReadOptions_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*ReadOptions) + switch tag { + case 1: // consistency_type.read_consistency + if wire != proto.WireVarint { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeVarint() + m.ConsistencyType = &ReadOptions_ReadConsistency_{ReadOptions_ReadConsistency(x)} + return true, err + case 2: // consistency_type.transaction + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeRawBytes(true) + m.ConsistencyType = &ReadOptions_Transaction{x} + return true, err + default: + return false, nil + } +} + +func _ReadOptions_OneofSizer(msg proto.Message) (n int) { + m := msg.(*ReadOptions) + // consistency_type + switch x := m.ConsistencyType.(type) { + case *ReadOptions_ReadConsistency_: + n += 1 // tag and wire + n += proto.SizeVarint(uint64(x.ReadConsistency)) + case *ReadOptions_Transaction: + n += 1 // tag and wire + n += proto.SizeVarint(uint64(len(x.Transaction))) + n += len(x.Transaction) + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +// Options for beginning a new transaction. +// +// Transactions can be created explicitly with calls to +// [Datastore.BeginTransaction][google.datastore.v1beta3.Datastore.BeginTransaction] +// or implicitly by setting +// [ReadOptions.new_transaction][google.datastore.v1beta3.ReadOptions.new_transaction] +// in read requests. +type TransactionOptions struct { + // The `mode` of the transaction, indicating whether write operations are + // supported. + // + // Types that are valid to be assigned to Mode: + // *TransactionOptions_ReadWrite_ + // *TransactionOptions_ReadOnly_ + Mode isTransactionOptions_Mode `protobuf_oneof:"mode"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *TransactionOptions) Reset() { *m = TransactionOptions{} } +func (m *TransactionOptions) String() string { return proto.CompactTextString(m) } +func (*TransactionOptions) ProtoMessage() {} +func (*TransactionOptions) Descriptor() ([]byte, []int) { + return fileDescriptor_datastore_062a2d11f20b0b0e, []int{17} +} +func (m *TransactionOptions) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_TransactionOptions.Unmarshal(m, b) +} +func (m *TransactionOptions) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_TransactionOptions.Marshal(b, m, deterministic) +} +func (dst *TransactionOptions) XXX_Merge(src proto.Message) { + xxx_messageInfo_TransactionOptions.Merge(dst, src) +} +func (m *TransactionOptions) XXX_Size() int { + return xxx_messageInfo_TransactionOptions.Size(m) +} +func (m *TransactionOptions) XXX_DiscardUnknown() { + xxx_messageInfo_TransactionOptions.DiscardUnknown(m) +} + +var xxx_messageInfo_TransactionOptions proto.InternalMessageInfo + +type isTransactionOptions_Mode interface { + isTransactionOptions_Mode() +} + +type TransactionOptions_ReadWrite_ struct { + ReadWrite *TransactionOptions_ReadWrite `protobuf:"bytes,1,opt,name=read_write,json=readWrite,proto3,oneof"` +} + +type TransactionOptions_ReadOnly_ struct { + ReadOnly *TransactionOptions_ReadOnly `protobuf:"bytes,2,opt,name=read_only,json=readOnly,proto3,oneof"` +} + +func (*TransactionOptions_ReadWrite_) isTransactionOptions_Mode() {} + +func (*TransactionOptions_ReadOnly_) isTransactionOptions_Mode() {} + +func (m *TransactionOptions) GetMode() isTransactionOptions_Mode { + if m != nil { + return m.Mode + } + return nil +} + +func (m *TransactionOptions) GetReadWrite() *TransactionOptions_ReadWrite { + if x, ok := m.GetMode().(*TransactionOptions_ReadWrite_); ok { + return x.ReadWrite + } + return nil +} + +func (m *TransactionOptions) GetReadOnly() *TransactionOptions_ReadOnly { + if x, ok := m.GetMode().(*TransactionOptions_ReadOnly_); ok { + return x.ReadOnly + } + return nil +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*TransactionOptions) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _TransactionOptions_OneofMarshaler, _TransactionOptions_OneofUnmarshaler, _TransactionOptions_OneofSizer, []interface{}{ + (*TransactionOptions_ReadWrite_)(nil), + (*TransactionOptions_ReadOnly_)(nil), + } +} + +func _TransactionOptions_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*TransactionOptions) + // mode + switch x := m.Mode.(type) { + case *TransactionOptions_ReadWrite_: + b.EncodeVarint(1<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.ReadWrite); err != nil { + return err + } + case *TransactionOptions_ReadOnly_: + b.EncodeVarint(2<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.ReadOnly); err != nil { + return err + } + case nil: + default: + return fmt.Errorf("TransactionOptions.Mode has unexpected type %T", x) + } + return nil +} + +func _TransactionOptions_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*TransactionOptions) + switch tag { + case 1: // mode.read_write + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(TransactionOptions_ReadWrite) + err := b.DecodeMessage(msg) + m.Mode = &TransactionOptions_ReadWrite_{msg} + return true, err + case 2: // mode.read_only + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(TransactionOptions_ReadOnly) + err := b.DecodeMessage(msg) + m.Mode = &TransactionOptions_ReadOnly_{msg} + return true, err + default: + return false, nil + } +} + +func _TransactionOptions_OneofSizer(msg proto.Message) (n int) { + m := msg.(*TransactionOptions) + // mode + switch x := m.Mode.(type) { + case *TransactionOptions_ReadWrite_: + s := proto.Size(x.ReadWrite) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *TransactionOptions_ReadOnly_: + s := proto.Size(x.ReadOnly) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +// Options specific to read / write transactions. +type TransactionOptions_ReadWrite struct { + // The transaction identifier of the transaction being retried. + PreviousTransaction []byte `protobuf:"bytes,1,opt,name=previous_transaction,json=previousTransaction,proto3" json:"previous_transaction,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *TransactionOptions_ReadWrite) Reset() { *m = TransactionOptions_ReadWrite{} } +func (m *TransactionOptions_ReadWrite) String() string { return proto.CompactTextString(m) } +func (*TransactionOptions_ReadWrite) ProtoMessage() {} +func (*TransactionOptions_ReadWrite) Descriptor() ([]byte, []int) { + return fileDescriptor_datastore_062a2d11f20b0b0e, []int{17, 0} +} +func (m *TransactionOptions_ReadWrite) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_TransactionOptions_ReadWrite.Unmarshal(m, b) +} +func (m *TransactionOptions_ReadWrite) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_TransactionOptions_ReadWrite.Marshal(b, m, deterministic) +} +func (dst *TransactionOptions_ReadWrite) XXX_Merge(src proto.Message) { + xxx_messageInfo_TransactionOptions_ReadWrite.Merge(dst, src) +} +func (m *TransactionOptions_ReadWrite) XXX_Size() int { + return xxx_messageInfo_TransactionOptions_ReadWrite.Size(m) +} +func (m *TransactionOptions_ReadWrite) XXX_DiscardUnknown() { + xxx_messageInfo_TransactionOptions_ReadWrite.DiscardUnknown(m) +} + +var xxx_messageInfo_TransactionOptions_ReadWrite proto.InternalMessageInfo + +func (m *TransactionOptions_ReadWrite) GetPreviousTransaction() []byte { + if m != nil { + return m.PreviousTransaction + } + return nil +} + +// Options specific to read-only transactions. +type TransactionOptions_ReadOnly struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *TransactionOptions_ReadOnly) Reset() { *m = TransactionOptions_ReadOnly{} } +func (m *TransactionOptions_ReadOnly) String() string { return proto.CompactTextString(m) } +func (*TransactionOptions_ReadOnly) ProtoMessage() {} +func (*TransactionOptions_ReadOnly) Descriptor() ([]byte, []int) { + return fileDescriptor_datastore_062a2d11f20b0b0e, []int{17, 1} +} +func (m *TransactionOptions_ReadOnly) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_TransactionOptions_ReadOnly.Unmarshal(m, b) +} +func (m *TransactionOptions_ReadOnly) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_TransactionOptions_ReadOnly.Marshal(b, m, deterministic) +} +func (dst *TransactionOptions_ReadOnly) XXX_Merge(src proto.Message) { + xxx_messageInfo_TransactionOptions_ReadOnly.Merge(dst, src) +} +func (m *TransactionOptions_ReadOnly) XXX_Size() int { + return xxx_messageInfo_TransactionOptions_ReadOnly.Size(m) +} +func (m *TransactionOptions_ReadOnly) XXX_DiscardUnknown() { + xxx_messageInfo_TransactionOptions_ReadOnly.DiscardUnknown(m) +} + +var xxx_messageInfo_TransactionOptions_ReadOnly proto.InternalMessageInfo + +func init() { + proto.RegisterType((*LookupRequest)(nil), "google.datastore.v1beta3.LookupRequest") + proto.RegisterType((*LookupResponse)(nil), "google.datastore.v1beta3.LookupResponse") + proto.RegisterType((*RunQueryRequest)(nil), "google.datastore.v1beta3.RunQueryRequest") + proto.RegisterType((*RunQueryResponse)(nil), "google.datastore.v1beta3.RunQueryResponse") + proto.RegisterType((*BeginTransactionRequest)(nil), "google.datastore.v1beta3.BeginTransactionRequest") + proto.RegisterType((*BeginTransactionResponse)(nil), "google.datastore.v1beta3.BeginTransactionResponse") + proto.RegisterType((*RollbackRequest)(nil), "google.datastore.v1beta3.RollbackRequest") + proto.RegisterType((*RollbackResponse)(nil), "google.datastore.v1beta3.RollbackResponse") + proto.RegisterType((*CommitRequest)(nil), "google.datastore.v1beta3.CommitRequest") + proto.RegisterType((*CommitResponse)(nil), "google.datastore.v1beta3.CommitResponse") + proto.RegisterType((*AllocateIdsRequest)(nil), "google.datastore.v1beta3.AllocateIdsRequest") + proto.RegisterType((*AllocateIdsResponse)(nil), "google.datastore.v1beta3.AllocateIdsResponse") + proto.RegisterType((*ReserveIdsRequest)(nil), "google.datastore.v1beta3.ReserveIdsRequest") + proto.RegisterType((*ReserveIdsResponse)(nil), "google.datastore.v1beta3.ReserveIdsResponse") + proto.RegisterType((*Mutation)(nil), "google.datastore.v1beta3.Mutation") + proto.RegisterType((*MutationResult)(nil), "google.datastore.v1beta3.MutationResult") + proto.RegisterType((*ReadOptions)(nil), "google.datastore.v1beta3.ReadOptions") + proto.RegisterType((*TransactionOptions)(nil), "google.datastore.v1beta3.TransactionOptions") + proto.RegisterType((*TransactionOptions_ReadWrite)(nil), "google.datastore.v1beta3.TransactionOptions.ReadWrite") + proto.RegisterType((*TransactionOptions_ReadOnly)(nil), "google.datastore.v1beta3.TransactionOptions.ReadOnly") + proto.RegisterEnum("google.datastore.v1beta3.CommitRequest_Mode", CommitRequest_Mode_name, CommitRequest_Mode_value) + proto.RegisterEnum("google.datastore.v1beta3.ReadOptions_ReadConsistency", ReadOptions_ReadConsistency_name, ReadOptions_ReadConsistency_value) +} + +// Reference imports to suppress errors if they are not otherwise used. +var _ context.Context +var _ grpc.ClientConn + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the grpc package it is being compiled against. +const _ = grpc.SupportPackageIsVersion4 + +// DatastoreClient is the client API for Datastore service. +// +// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://godoc.org/google.golang.org/grpc#ClientConn.NewStream. +type DatastoreClient interface { + // Looks up entities by key. + Lookup(ctx context.Context, in *LookupRequest, opts ...grpc.CallOption) (*LookupResponse, error) + // Queries for entities. + RunQuery(ctx context.Context, in *RunQueryRequest, opts ...grpc.CallOption) (*RunQueryResponse, error) + // Begins a new transaction. + BeginTransaction(ctx context.Context, in *BeginTransactionRequest, opts ...grpc.CallOption) (*BeginTransactionResponse, error) + // Commits a transaction, optionally creating, deleting or modifying some + // entities. + Commit(ctx context.Context, in *CommitRequest, opts ...grpc.CallOption) (*CommitResponse, error) + // Rolls back a transaction. + Rollback(ctx context.Context, in *RollbackRequest, opts ...grpc.CallOption) (*RollbackResponse, error) + // Allocates IDs for the given keys, which is useful for referencing an entity + // before it is inserted. + AllocateIds(ctx context.Context, in *AllocateIdsRequest, opts ...grpc.CallOption) (*AllocateIdsResponse, error) + // Prevents the supplied keys' IDs from being auto-allocated by Cloud + // Datastore. + ReserveIds(ctx context.Context, in *ReserveIdsRequest, opts ...grpc.CallOption) (*ReserveIdsResponse, error) +} + +type datastoreClient struct { + cc *grpc.ClientConn +} + +func NewDatastoreClient(cc *grpc.ClientConn) DatastoreClient { + return &datastoreClient{cc} +} + +func (c *datastoreClient) Lookup(ctx context.Context, in *LookupRequest, opts ...grpc.CallOption) (*LookupResponse, error) { + out := new(LookupResponse) + err := c.cc.Invoke(ctx, "/google.datastore.v1beta3.Datastore/Lookup", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *datastoreClient) RunQuery(ctx context.Context, in *RunQueryRequest, opts ...grpc.CallOption) (*RunQueryResponse, error) { + out := new(RunQueryResponse) + err := c.cc.Invoke(ctx, "/google.datastore.v1beta3.Datastore/RunQuery", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *datastoreClient) BeginTransaction(ctx context.Context, in *BeginTransactionRequest, opts ...grpc.CallOption) (*BeginTransactionResponse, error) { + out := new(BeginTransactionResponse) + err := c.cc.Invoke(ctx, "/google.datastore.v1beta3.Datastore/BeginTransaction", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *datastoreClient) Commit(ctx context.Context, in *CommitRequest, opts ...grpc.CallOption) (*CommitResponse, error) { + out := new(CommitResponse) + err := c.cc.Invoke(ctx, "/google.datastore.v1beta3.Datastore/Commit", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *datastoreClient) Rollback(ctx context.Context, in *RollbackRequest, opts ...grpc.CallOption) (*RollbackResponse, error) { + out := new(RollbackResponse) + err := c.cc.Invoke(ctx, "/google.datastore.v1beta3.Datastore/Rollback", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *datastoreClient) AllocateIds(ctx context.Context, in *AllocateIdsRequest, opts ...grpc.CallOption) (*AllocateIdsResponse, error) { + out := new(AllocateIdsResponse) + err := c.cc.Invoke(ctx, "/google.datastore.v1beta3.Datastore/AllocateIds", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *datastoreClient) ReserveIds(ctx context.Context, in *ReserveIdsRequest, opts ...grpc.CallOption) (*ReserveIdsResponse, error) { + out := new(ReserveIdsResponse) + err := c.cc.Invoke(ctx, "/google.datastore.v1beta3.Datastore/ReserveIds", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +// DatastoreServer is the server API for Datastore service. +type DatastoreServer interface { + // Looks up entities by key. + Lookup(context.Context, *LookupRequest) (*LookupResponse, error) + // Queries for entities. + RunQuery(context.Context, *RunQueryRequest) (*RunQueryResponse, error) + // Begins a new transaction. + BeginTransaction(context.Context, *BeginTransactionRequest) (*BeginTransactionResponse, error) + // Commits a transaction, optionally creating, deleting or modifying some + // entities. + Commit(context.Context, *CommitRequest) (*CommitResponse, error) + // Rolls back a transaction. + Rollback(context.Context, *RollbackRequest) (*RollbackResponse, error) + // Allocates IDs for the given keys, which is useful for referencing an entity + // before it is inserted. + AllocateIds(context.Context, *AllocateIdsRequest) (*AllocateIdsResponse, error) + // Prevents the supplied keys' IDs from being auto-allocated by Cloud + // Datastore. + ReserveIds(context.Context, *ReserveIdsRequest) (*ReserveIdsResponse, error) +} + +func RegisterDatastoreServer(s *grpc.Server, srv DatastoreServer) { + s.RegisterService(&_Datastore_serviceDesc, srv) +} + +func _Datastore_Lookup_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(LookupRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(DatastoreServer).Lookup(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.datastore.v1beta3.Datastore/Lookup", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(DatastoreServer).Lookup(ctx, req.(*LookupRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _Datastore_RunQuery_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(RunQueryRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(DatastoreServer).RunQuery(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.datastore.v1beta3.Datastore/RunQuery", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(DatastoreServer).RunQuery(ctx, req.(*RunQueryRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _Datastore_BeginTransaction_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(BeginTransactionRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(DatastoreServer).BeginTransaction(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.datastore.v1beta3.Datastore/BeginTransaction", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(DatastoreServer).BeginTransaction(ctx, req.(*BeginTransactionRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _Datastore_Commit_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(CommitRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(DatastoreServer).Commit(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.datastore.v1beta3.Datastore/Commit", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(DatastoreServer).Commit(ctx, req.(*CommitRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _Datastore_Rollback_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(RollbackRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(DatastoreServer).Rollback(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.datastore.v1beta3.Datastore/Rollback", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(DatastoreServer).Rollback(ctx, req.(*RollbackRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _Datastore_AllocateIds_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(AllocateIdsRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(DatastoreServer).AllocateIds(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.datastore.v1beta3.Datastore/AllocateIds", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(DatastoreServer).AllocateIds(ctx, req.(*AllocateIdsRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _Datastore_ReserveIds_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(ReserveIdsRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(DatastoreServer).ReserveIds(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.datastore.v1beta3.Datastore/ReserveIds", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(DatastoreServer).ReserveIds(ctx, req.(*ReserveIdsRequest)) + } + return interceptor(ctx, in, info, handler) +} + +var _Datastore_serviceDesc = grpc.ServiceDesc{ + ServiceName: "google.datastore.v1beta3.Datastore", + HandlerType: (*DatastoreServer)(nil), + Methods: []grpc.MethodDesc{ + { + MethodName: "Lookup", + Handler: _Datastore_Lookup_Handler, + }, + { + MethodName: "RunQuery", + Handler: _Datastore_RunQuery_Handler, + }, + { + MethodName: "BeginTransaction", + Handler: _Datastore_BeginTransaction_Handler, + }, + { + MethodName: "Commit", + Handler: _Datastore_Commit_Handler, + }, + { + MethodName: "Rollback", + Handler: _Datastore_Rollback_Handler, + }, + { + MethodName: "AllocateIds", + Handler: _Datastore_AllocateIds_Handler, + }, + { + MethodName: "ReserveIds", + Handler: _Datastore_ReserveIds_Handler, + }, + }, + Streams: []grpc.StreamDesc{}, + Metadata: "google/datastore/v1beta3/datastore.proto", +} + +func init() { + proto.RegisterFile("google/datastore/v1beta3/datastore.proto", fileDescriptor_datastore_062a2d11f20b0b0e) +} + +var fileDescriptor_datastore_062a2d11f20b0b0e = []byte{ + // 1403 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xac, 0x58, 0xcf, 0x6f, 0x1b, 0xc5, + 0x17, 0xcf, 0x38, 0x89, 0x6b, 0x3f, 0xe7, 0x87, 0x33, 0xcd, 0xf7, 0x8b, 0x65, 0x5a, 0x6a, 0x6d, + 0x29, 0x75, 0xd3, 0x62, 0x13, 0xb7, 0xa1, 0x22, 0x54, 0x28, 0xb6, 0xe3, 0xd6, 0x16, 0x8d, 0x1d, + 0x26, 0x6e, 0x2a, 0x50, 0x91, 0xb5, 0xf6, 0x4e, 0xcc, 0x92, 0xf5, 0xce, 0x66, 0x77, 0x1c, 0x88, + 0x10, 0x17, 0x0e, 0x08, 0x81, 0x38, 0x21, 0xd4, 0x13, 0x07, 0xae, 0x70, 0x2e, 0x7f, 0x03, 0x02, + 0x89, 0x0b, 0x07, 0xfe, 0x01, 0xfe, 0x08, 0x8e, 0x68, 0x67, 0x67, 0xfd, 0x2b, 0xb5, 0xbd, 0xae, + 0xb8, 0x79, 0xdf, 0x7e, 0x3e, 0xef, 0x7d, 0xe6, 0xbd, 0xd9, 0xf7, 0x5e, 0x02, 0xe9, 0x36, 0x63, + 0x6d, 0x83, 0x66, 0x35, 0x95, 0xab, 0x0e, 0x67, 0x36, 0xcd, 0x9e, 0x6e, 0x36, 0x29, 0x57, 0x6f, + 0xf7, 0x2d, 0x19, 0xcb, 0x66, 0x9c, 0xe1, 0x84, 0x87, 0xcc, 0xf4, 0xed, 0x12, 0x99, 0xbc, 0x24, + 0x7d, 0xa8, 0x96, 0x9e, 0x55, 0x4d, 0x93, 0x71, 0x95, 0xeb, 0xcc, 0x74, 0x3c, 0x5e, 0xf2, 0xda, + 0xd8, 0x08, 0xd4, 0xe4, 0x3a, 0x3f, 0x93, 0xb0, 0x57, 0xc7, 0xc2, 0x4e, 0xba, 0xd4, 0x96, 0x28, + 0xe5, 0x67, 0x04, 0xcb, 0x0f, 0x19, 0x3b, 0xee, 0x5a, 0x84, 0x9e, 0x74, 0xa9, 0xc3, 0xf1, 0x65, + 0x00, 0xcb, 0x66, 0x1f, 0xd3, 0x16, 0x6f, 0xe8, 0x5a, 0x22, 0x92, 0x42, 0xe9, 0x28, 0x89, 0x4a, + 0x4b, 0x45, 0xc3, 0x65, 0x58, 0xb2, 0xa9, 0xaa, 0x35, 0x98, 0x25, 0x34, 0x25, 0x50, 0x0a, 0xa5, + 0x63, 0xb9, 0x6b, 0x99, 0x71, 0x87, 0xc9, 0x10, 0xaa, 0x6a, 0x35, 0x0f, 0x4c, 0x62, 0x76, 0xff, + 0x01, 0x6f, 0xc2, 0xc2, 0x31, 0x3d, 0x73, 0x12, 0xf3, 0xa9, 0xf9, 0x74, 0x2c, 0x77, 0x79, 0xbc, + 0x87, 0x77, 0xe9, 0x19, 0x11, 0x50, 0xe5, 0x77, 0x04, 0x2b, 0xbe, 0x5a, 0xc7, 0x62, 0xa6, 0x43, + 0xf1, 0x3d, 0x58, 0x3c, 0x62, 0x5d, 0x53, 0x4b, 0x20, 0xe1, 0xe6, 0xb5, 0xf1, 0x6e, 0x4a, 0x22, + 0x3b, 0x84, 0x3a, 0x5d, 0x83, 0x13, 0x8f, 0x84, 0x77, 0xe0, 0x42, 0x47, 0x77, 0x1c, 0xdd, 0x6c, + 0x27, 0x42, 0x33, 0xf1, 0x7d, 0x1a, 0x7e, 0x0b, 0x22, 0x1a, 0x3d, 0xa2, 0xb6, 0x4d, 0xb5, 0x60, + 0x27, 0xe9, 0xc1, 0x95, 0x3f, 0x42, 0xb0, 0x4a, 0xba, 0xe6, 0x7b, 0x6e, 0x39, 0x82, 0x67, 0xdf, + 0x52, 0x6d, 0xae, 0xbb, 0x19, 0x74, 0x01, 0xa1, 0x69, 0xd9, 0xdf, 0xf7, 0xd1, 0x15, 0x8d, 0xc4, + 0xac, 0xfe, 0xc3, 0x7f, 0x58, 0xc7, 0xbb, 0xb0, 0x28, 0x6e, 0x54, 0x62, 0x5e, 0xb8, 0xb8, 0x32, + 0xde, 0x85, 0x38, 0x69, 0x79, 0x8e, 0x78, 0x78, 0x9c, 0x87, 0x68, 0xfb, 0xc4, 0x68, 0x78, 0xe4, + 0x0b, 0x82, 0xac, 0x8c, 0x27, 0x3f, 0x38, 0x31, 0x7c, 0x7e, 0xa4, 0x2d, 0x7f, 0x17, 0x96, 0x00, + 0x04, 0xbd, 0xc1, 0xcf, 0x2c, 0xaa, 0x7c, 0x83, 0x20, 0xde, 0x4f, 0xa8, 0xbc, 0x20, 0x3b, 0xb0, + 0xd8, 0x54, 0x79, 0xeb, 0x23, 0x79, 0xc2, 0x8d, 0x29, 0xf2, 0xbc, 0xfa, 0x16, 0x5c, 0x06, 0xf1, + 0x88, 0x78, 0xcb, 0x3f, 0x60, 0x28, 0xd0, 0x01, 0xe5, 0xf1, 0x94, 0xa7, 0x08, 0x5e, 0x2a, 0xd0, + 0xb6, 0x6e, 0xd6, 0x6d, 0xd5, 0x74, 0xd4, 0x96, 0x9b, 0xad, 0x80, 0x65, 0xfe, 0x10, 0x2e, 0xf2, + 0x3e, 0xa9, 0x57, 0x23, 0x10, 0xf1, 0x6f, 0x8d, 0x8f, 0x3f, 0x10, 0xc9, 0x2f, 0x15, 0xe6, 0xe7, + 0x6c, 0xca, 0x3d, 0x48, 0x9c, 0x17, 0x26, 0xd3, 0x95, 0x82, 0xd8, 0x00, 0x43, 0x24, 0x6d, 0x89, + 0x0c, 0x9a, 0x14, 0x02, 0xab, 0x84, 0x19, 0x46, 0x53, 0x6d, 0x1d, 0x07, 0x3c, 0xce, 0x74, 0x9f, + 0x18, 0xe2, 0x7d, 0x9f, 0x9e, 0x12, 0xe5, 0x97, 0x10, 0x2c, 0x17, 0x59, 0xa7, 0xa3, 0xf3, 0x80, + 0x61, 0x76, 0x60, 0xa1, 0xc3, 0x34, 0x9a, 0x58, 0x4c, 0xa1, 0xf4, 0xca, 0xa4, 0x34, 0x0d, 0x79, + 0xcd, 0xec, 0x31, 0x8d, 0x12, 0xc1, 0xc4, 0xca, 0x73, 0x84, 0x96, 0xe7, 0x86, 0xa4, 0xe2, 0x1d, + 0x88, 0x76, 0xba, 0xb2, 0x23, 0x27, 0xc2, 0xe2, 0x8b, 0x9f, 0x70, 0x6b, 0xf7, 0x24, 0x94, 0xf4, + 0x49, 0xca, 0x7d, 0x58, 0x70, 0x63, 0xe2, 0x75, 0x88, 0xef, 0xd5, 0x76, 0x4b, 0x8d, 0x47, 0xd5, + 0x83, 0xfd, 0x52, 0xb1, 0x72, 0xbf, 0x52, 0xda, 0x8d, 0xcf, 0xe1, 0x35, 0x58, 0xae, 0x93, 0x7c, + 0xf5, 0x20, 0x5f, 0xac, 0x57, 0x6a, 0xd5, 0xfc, 0xc3, 0x38, 0xc2, 0xff, 0x83, 0xb5, 0x6a, 0xad, + 0xda, 0x18, 0x36, 0x87, 0x0a, 0xff, 0x87, 0xf5, 0xc1, 0x5b, 0xe2, 0x50, 0x83, 0xb6, 0x38, 0xb3, + 0x95, 0xaf, 0x11, 0xac, 0xf8, 0x47, 0x94, 0x55, 0x3d, 0x80, 0xb8, 0x1f, 0xbf, 0x61, 0x8b, 0x1b, + 0xee, 0xf7, 0xdd, 0x74, 0x00, 0xed, 0x5e, 0xcb, 0x5b, 0xed, 0x0c, 0x3d, 0x3b, 0xf8, 0x2a, 0x2c, + 0xeb, 0xa6, 0x46, 0x3f, 0x6d, 0x74, 0x2d, 0x4d, 0xe5, 0xd4, 0x49, 0x2c, 0xa4, 0x50, 0x7a, 0x91, + 0x2c, 0x09, 0xe3, 0x23, 0xcf, 0xa6, 0x1c, 0x01, 0xce, 0x1b, 0x06, 0x6b, 0xa9, 0x9c, 0x56, 0x34, + 0x27, 0x60, 0x25, 0xfd, 0xd1, 0x80, 0x82, 0x8f, 0x86, 0x32, 0x5c, 0x1c, 0x8a, 0x23, 0x0f, 0xfe, + 0x02, 0x9e, 0xbe, 0x44, 0xb0, 0x46, 0xa8, 0x43, 0xed, 0xd3, 0x19, 0x14, 0x5f, 0x81, 0x98, 0xeb, + 0xb3, 0xa9, 0x3a, 0xd4, 0x7d, 0x1f, 0x15, 0xef, 0xc1, 0x37, 0xbd, 0xd8, 0x91, 0xd6, 0x01, 0x0f, + 0xea, 0x90, 0x9f, 0xc5, 0xaf, 0x21, 0x88, 0xf8, 0x95, 0xc1, 0xdb, 0x10, 0xd6, 0x4d, 0x87, 0xda, + 0x5c, 0xe4, 0x3e, 0x96, 0x4b, 0x4d, 0x1b, 0x5f, 0xe5, 0x39, 0x22, 0x19, 0x2e, 0xd7, 0x2b, 0x9c, + 0xf8, 0x60, 0x02, 0x72, 0x3d, 0x86, 0xc7, 0x15, 0x71, 0xc3, 0xb3, 0x70, 0x45, 0xdc, 0xbb, 0x10, + 0xd6, 0xa8, 0x41, 0x39, 0x95, 0x3d, 0x7f, 0x72, 0x2e, 0x5c, 0xa2, 0x07, 0xc7, 0x57, 0x61, 0x49, + 0xe4, 0xf7, 0x94, 0xda, 0x8e, 0xfb, 0x79, 0xba, 0x45, 0x98, 0x2f, 0x23, 0x12, 0x73, 0xad, 0x87, + 0x9e, 0xb1, 0x10, 0x83, 0x28, 0xb3, 0xa8, 0x2d, 0xd2, 0x53, 0xb8, 0x0c, 0x2f, 0xb7, 0x98, 0x79, + 0x64, 0xe8, 0x2d, 0xde, 0xd0, 0x28, 0xa7, 0xf2, 0x43, 0xe1, 0xb6, 0xca, 0x69, 0xfb, 0x4c, 0xf9, + 0x0a, 0xc1, 0xca, 0xf0, 0x25, 0xc7, 0x59, 0x98, 0x3f, 0xa6, 0xfe, 0x28, 0x9b, 0x52, 0x25, 0x17, + 0x89, 0x13, 0x70, 0xc1, 0xd7, 0xe3, 0x96, 0x60, 0x9e, 0xf8, 0x8f, 0xf8, 0x26, 0xac, 0x8d, 0x04, + 0xa7, 0x9a, 0x48, 0x75, 0x84, 0xc4, 0xfd, 0x17, 0xbb, 0xd2, 0xae, 0xfc, 0x83, 0x20, 0x36, 0x30, + 0x61, 0x71, 0x13, 0xe2, 0x62, 0x3c, 0xb7, 0x98, 0xe9, 0xe8, 0x0e, 0xa7, 0x66, 0xeb, 0x4c, 0xb4, + 0xa3, 0x95, 0xdc, 0x56, 0xa0, 0x11, 0x2d, 0x7e, 0x17, 0xfb, 0xe4, 0xf2, 0x1c, 0x59, 0xb5, 0x87, + 0x4d, 0xa3, 0xdd, 0x2e, 0xf4, 0x9c, 0x6e, 0xa7, 0xec, 0xc1, 0xea, 0x88, 0x27, 0x9c, 0x82, 0x4b, + 0xa4, 0x94, 0xdf, 0x6d, 0x14, 0x6b, 0xd5, 0x83, 0xca, 0x41, 0xbd, 0x54, 0x2d, 0xbe, 0x3f, 0xd2, + 0xc2, 0x00, 0xc2, 0x07, 0x75, 0x52, 0xab, 0x3e, 0x88, 0x23, 0xbc, 0x04, 0x91, 0xd2, 0x61, 0xa9, + 0x5a, 0x7f, 0x24, 0x5a, 0x16, 0x86, 0xf8, 0xc0, 0x89, 0xbc, 0xa9, 0xfd, 0x34, 0x04, 0xf8, 0xfc, + 0xe0, 0xc2, 0x8f, 0x01, 0x44, 0x06, 0x3e, 0xb1, 0x75, 0x4e, 0xe5, 0xf0, 0x7e, 0x73, 0x96, 0xd1, + 0x27, 0x52, 0xf0, 0xd8, 0x65, 0x97, 0xe7, 0x48, 0xd4, 0xf6, 0x1f, 0x70, 0x1d, 0xa2, 0xde, 0xe6, + 0x63, 0x1a, 0xfe, 0x48, 0xdf, 0x9a, 0xd9, 0x6f, 0xcd, 0x34, 0xc4, 0x26, 0x62, 0xcb, 0xdf, 0xc9, + 0x77, 0x20, 0xda, 0x8b, 0x87, 0x37, 0x61, 0xdd, 0xb2, 0xe9, 0xa9, 0xce, 0xba, 0x4e, 0xe3, 0xfc, + 0xe4, 0xbb, 0xe8, 0xbf, 0x1b, 0xf0, 0x9d, 0x04, 0x88, 0xf8, 0x7e, 0x0b, 0x61, 0x6f, 0x90, 0xe5, + 0xfe, 0x8a, 0x40, 0x74, 0xd7, 0x57, 0x84, 0xbf, 0x45, 0x10, 0xf6, 0x96, 0x5f, 0x7c, 0x7d, 0xbc, + 0xde, 0xa1, 0x65, 0x3e, 0x99, 0x9e, 0x0e, 0x94, 0x6d, 0xe5, 0x8d, 0x2f, 0xfe, 0xfc, 0xfb, 0xbb, + 0xd0, 0x86, 0x72, 0xad, 0xf7, 0x67, 0x82, 0x6c, 0x6e, 0x4e, 0xf6, 0xb3, 0x7e, 0xe3, 0xfb, 0x7c, + 0xdb, 0x10, 0xb4, 0x6d, 0xb4, 0x81, 0xbf, 0x47, 0x10, 0xf1, 0xb7, 0x2d, 0x7c, 0x63, 0xc2, 0xad, + 0x1c, 0x5e, 0x71, 0x93, 0x1b, 0x41, 0xa0, 0x52, 0x55, 0x4e, 0xa8, 0xba, 0xa5, 0x5c, 0x9f, 0xa2, + 0xca, 0x96, 0x44, 0x57, 0xd7, 0x33, 0x04, 0xf1, 0xd1, 0xf5, 0x06, 0x6f, 0x8e, 0x0f, 0x3a, 0x66, + 0x47, 0x4b, 0xe6, 0x66, 0xa1, 0x48, 0xbd, 0xdb, 0x42, 0xef, 0x1d, 0x25, 0x3b, 0x45, 0x6f, 0x73, + 0xc4, 0x81, 0xab, 0xdb, 0xad, 0xaf, 0x37, 0xb6, 0x27, 0xd5, 0x77, 0x68, 0x77, 0x99, 0x54, 0xdf, + 0xe1, 0x0d, 0x20, 0x70, 0x7d, 0x5b, 0x82, 0xd6, 0xab, 0xaf, 0x5c, 0xca, 0x26, 0xd6, 0x77, 0x78, + 0x19, 0x9c, 0x58, 0xdf, 0xd1, 0x1d, 0x2f, 0x70, 0x7d, 0x25, 0xd1, 0xd5, 0xf5, 0x23, 0x82, 0xd8, + 0xc0, 0xa8, 0xc7, 0x13, 0x16, 0xbd, 0xf3, 0x9b, 0x47, 0xf2, 0xf5, 0x80, 0x68, 0x29, 0x70, 0x4b, + 0x08, 0xcc, 0x2a, 0x1b, 0x53, 0x04, 0xaa, 0x7d, 0xae, 0xab, 0xf1, 0x07, 0x04, 0xd0, 0x9f, 0xdd, + 0xf8, 0xe6, 0xa4, 0x9e, 0x3d, 0xb2, 0x69, 0x24, 0x6f, 0x05, 0x03, 0x4b, 0x81, 0x77, 0x84, 0xc0, + 0x8c, 0x72, 0x63, 0x5a, 0x06, 0x7b, 0xd4, 0x6d, 0xb4, 0x51, 0x78, 0x86, 0xe0, 0x52, 0x8b, 0x75, + 0xc6, 0x46, 0x2a, 0xac, 0xf4, 0xfa, 0xce, 0xbe, 0xcd, 0x38, 0xdb, 0x47, 0x1f, 0xe4, 0x25, 0xb6, + 0xcd, 0x0c, 0xd5, 0x6c, 0x67, 0x98, 0xdd, 0xce, 0xb6, 0xa9, 0x29, 0xfe, 0x8b, 0x90, 0xf5, 0x5e, + 0xa9, 0x96, 0xee, 0x9c, 0xff, 0x77, 0xc3, 0xdb, 0x3d, 0xcb, 0x4f, 0xa1, 0x57, 0x1e, 0x78, 0x3e, + 0x8a, 0x06, 0xeb, 0x6a, 0x99, 0x5e, 0x88, 0xcc, 0xe1, 0x66, 0xc1, 0x85, 0xfe, 0xe6, 0x03, 0x9e, + 0x08, 0xc0, 0x93, 0x1e, 0xe0, 0xc9, 0xa1, 0xe7, 0xab, 0x19, 0x16, 0xf1, 0x6e, 0xff, 0x1b, 0x00, + 0x00, 0xff, 0xff, 0x6a, 0xaa, 0xbe, 0x57, 0x66, 0x11, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/datastore/v1beta3/entity.pb.go b/vendor/google.golang.org/genproto/googleapis/datastore/v1beta3/entity.pb.go new file mode 100644 index 0000000..462495d --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/datastore/v1beta3/entity.pb.go @@ -0,0 +1,928 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/datastore/v1beta3/entity.proto + +package datastore // import "google.golang.org/genproto/googleapis/datastore/v1beta3" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _struct "github.com/golang/protobuf/ptypes/struct" +import timestamp "github.com/golang/protobuf/ptypes/timestamp" +import _ "google.golang.org/genproto/googleapis/api/annotations" +import latlng "google.golang.org/genproto/googleapis/type/latlng" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// A partition ID identifies a grouping of entities. The grouping is always +// by project and namespace, however the namespace ID may be empty. +// +// A partition ID contains several dimensions: +// project ID and namespace ID. +// +// Partition dimensions: +// +// - May be `""`. +// - Must be valid UTF-8 bytes. +// - Must have values that match regex `[A-Za-z\d\.\-_]{1,100}` +// If the value of any dimension matches regex `__.*__`, the partition is +// reserved/read-only. +// A reserved/read-only partition ID is forbidden in certain documented +// contexts. +// +// Foreign partition IDs (in which the project ID does +// not match the context project ID ) are discouraged. +// Reads and writes of foreign partition IDs may fail if the project is not in +// an active state. +type PartitionId struct { + // The ID of the project to which the entities belong. + ProjectId string `protobuf:"bytes,2,opt,name=project_id,json=projectId,proto3" json:"project_id,omitempty"` + // If not empty, the ID of the namespace to which the entities belong. + NamespaceId string `protobuf:"bytes,4,opt,name=namespace_id,json=namespaceId,proto3" json:"namespace_id,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *PartitionId) Reset() { *m = PartitionId{} } +func (m *PartitionId) String() string { return proto.CompactTextString(m) } +func (*PartitionId) ProtoMessage() {} +func (*PartitionId) Descriptor() ([]byte, []int) { + return fileDescriptor_entity_50acbc7f8774bafa, []int{0} +} +func (m *PartitionId) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_PartitionId.Unmarshal(m, b) +} +func (m *PartitionId) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_PartitionId.Marshal(b, m, deterministic) +} +func (dst *PartitionId) XXX_Merge(src proto.Message) { + xxx_messageInfo_PartitionId.Merge(dst, src) +} +func (m *PartitionId) XXX_Size() int { + return xxx_messageInfo_PartitionId.Size(m) +} +func (m *PartitionId) XXX_DiscardUnknown() { + xxx_messageInfo_PartitionId.DiscardUnknown(m) +} + +var xxx_messageInfo_PartitionId proto.InternalMessageInfo + +func (m *PartitionId) GetProjectId() string { + if m != nil { + return m.ProjectId + } + return "" +} + +func (m *PartitionId) GetNamespaceId() string { + if m != nil { + return m.NamespaceId + } + return "" +} + +// A unique identifier for an entity. +// If a key's partition ID or any of its path kinds or names are +// reserved/read-only, the key is reserved/read-only. +// A reserved/read-only key is forbidden in certain documented contexts. +type Key struct { + // Entities are partitioned into subsets, currently identified by a project + // ID and namespace ID. + // Queries are scoped to a single partition. + PartitionId *PartitionId `protobuf:"bytes,1,opt,name=partition_id,json=partitionId,proto3" json:"partition_id,omitempty"` + // The entity path. + // An entity path consists of one or more elements composed of a kind and a + // string or numerical identifier, which identify entities. The first + // element identifies a _root entity_, the second element identifies + // a _child_ of the root entity, the third element identifies a child of the + // second entity, and so forth. The entities identified by all prefixes of + // the path are called the element's _ancestors_. + // + // An entity path is always fully complete: *all* of the entity's ancestors + // are required to be in the path along with the entity identifier itself. + // The only exception is that in some documented cases, the identifier in the + // last path element (for the entity) itself may be omitted. For example, + // the last path element of the key of `Mutation.insert` may have no + // identifier. + // + // A path can never be empty, and a path can have at most 100 elements. + Path []*Key_PathElement `protobuf:"bytes,2,rep,name=path,proto3" json:"path,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Key) Reset() { *m = Key{} } +func (m *Key) String() string { return proto.CompactTextString(m) } +func (*Key) ProtoMessage() {} +func (*Key) Descriptor() ([]byte, []int) { + return fileDescriptor_entity_50acbc7f8774bafa, []int{1} +} +func (m *Key) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Key.Unmarshal(m, b) +} +func (m *Key) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Key.Marshal(b, m, deterministic) +} +func (dst *Key) XXX_Merge(src proto.Message) { + xxx_messageInfo_Key.Merge(dst, src) +} +func (m *Key) XXX_Size() int { + return xxx_messageInfo_Key.Size(m) +} +func (m *Key) XXX_DiscardUnknown() { + xxx_messageInfo_Key.DiscardUnknown(m) +} + +var xxx_messageInfo_Key proto.InternalMessageInfo + +func (m *Key) GetPartitionId() *PartitionId { + if m != nil { + return m.PartitionId + } + return nil +} + +func (m *Key) GetPath() []*Key_PathElement { + if m != nil { + return m.Path + } + return nil +} + +// A (kind, ID/name) pair used to construct a key path. +// +// If either name or ID is set, the element is complete. +// If neither is set, the element is incomplete. +type Key_PathElement struct { + // The kind of the entity. + // A kind matching regex `__.*__` is reserved/read-only. + // A kind must not contain more than 1500 bytes when UTF-8 encoded. + // Cannot be `""`. + Kind string `protobuf:"bytes,1,opt,name=kind,proto3" json:"kind,omitempty"` + // The type of ID. + // + // Types that are valid to be assigned to IdType: + // *Key_PathElement_Id + // *Key_PathElement_Name + IdType isKey_PathElement_IdType `protobuf_oneof:"id_type"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Key_PathElement) Reset() { *m = Key_PathElement{} } +func (m *Key_PathElement) String() string { return proto.CompactTextString(m) } +func (*Key_PathElement) ProtoMessage() {} +func (*Key_PathElement) Descriptor() ([]byte, []int) { + return fileDescriptor_entity_50acbc7f8774bafa, []int{1, 0} +} +func (m *Key_PathElement) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Key_PathElement.Unmarshal(m, b) +} +func (m *Key_PathElement) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Key_PathElement.Marshal(b, m, deterministic) +} +func (dst *Key_PathElement) XXX_Merge(src proto.Message) { + xxx_messageInfo_Key_PathElement.Merge(dst, src) +} +func (m *Key_PathElement) XXX_Size() int { + return xxx_messageInfo_Key_PathElement.Size(m) +} +func (m *Key_PathElement) XXX_DiscardUnknown() { + xxx_messageInfo_Key_PathElement.DiscardUnknown(m) +} + +var xxx_messageInfo_Key_PathElement proto.InternalMessageInfo + +func (m *Key_PathElement) GetKind() string { + if m != nil { + return m.Kind + } + return "" +} + +type isKey_PathElement_IdType interface { + isKey_PathElement_IdType() +} + +type Key_PathElement_Id struct { + Id int64 `protobuf:"varint,2,opt,name=id,proto3,oneof"` +} + +type Key_PathElement_Name struct { + Name string `protobuf:"bytes,3,opt,name=name,proto3,oneof"` +} + +func (*Key_PathElement_Id) isKey_PathElement_IdType() {} + +func (*Key_PathElement_Name) isKey_PathElement_IdType() {} + +func (m *Key_PathElement) GetIdType() isKey_PathElement_IdType { + if m != nil { + return m.IdType + } + return nil +} + +func (m *Key_PathElement) GetId() int64 { + if x, ok := m.GetIdType().(*Key_PathElement_Id); ok { + return x.Id + } + return 0 +} + +func (m *Key_PathElement) GetName() string { + if x, ok := m.GetIdType().(*Key_PathElement_Name); ok { + return x.Name + } + return "" +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*Key_PathElement) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _Key_PathElement_OneofMarshaler, _Key_PathElement_OneofUnmarshaler, _Key_PathElement_OneofSizer, []interface{}{ + (*Key_PathElement_Id)(nil), + (*Key_PathElement_Name)(nil), + } +} + +func _Key_PathElement_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*Key_PathElement) + // id_type + switch x := m.IdType.(type) { + case *Key_PathElement_Id: + b.EncodeVarint(2<<3 | proto.WireVarint) + b.EncodeVarint(uint64(x.Id)) + case *Key_PathElement_Name: + b.EncodeVarint(3<<3 | proto.WireBytes) + b.EncodeStringBytes(x.Name) + case nil: + default: + return fmt.Errorf("Key_PathElement.IdType has unexpected type %T", x) + } + return nil +} + +func _Key_PathElement_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*Key_PathElement) + switch tag { + case 2: // id_type.id + if wire != proto.WireVarint { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeVarint() + m.IdType = &Key_PathElement_Id{int64(x)} + return true, err + case 3: // id_type.name + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeStringBytes() + m.IdType = &Key_PathElement_Name{x} + return true, err + default: + return false, nil + } +} + +func _Key_PathElement_OneofSizer(msg proto.Message) (n int) { + m := msg.(*Key_PathElement) + // id_type + switch x := m.IdType.(type) { + case *Key_PathElement_Id: + n += 1 // tag and wire + n += proto.SizeVarint(uint64(x.Id)) + case *Key_PathElement_Name: + n += 1 // tag and wire + n += proto.SizeVarint(uint64(len(x.Name))) + n += len(x.Name) + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +// An array value. +type ArrayValue struct { + // Values in the array. + // The order of this array may not be preserved if it contains a mix of + // indexed and unindexed values. + Values []*Value `protobuf:"bytes,1,rep,name=values,proto3" json:"values,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ArrayValue) Reset() { *m = ArrayValue{} } +func (m *ArrayValue) String() string { return proto.CompactTextString(m) } +func (*ArrayValue) ProtoMessage() {} +func (*ArrayValue) Descriptor() ([]byte, []int) { + return fileDescriptor_entity_50acbc7f8774bafa, []int{2} +} +func (m *ArrayValue) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ArrayValue.Unmarshal(m, b) +} +func (m *ArrayValue) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ArrayValue.Marshal(b, m, deterministic) +} +func (dst *ArrayValue) XXX_Merge(src proto.Message) { + xxx_messageInfo_ArrayValue.Merge(dst, src) +} +func (m *ArrayValue) XXX_Size() int { + return xxx_messageInfo_ArrayValue.Size(m) +} +func (m *ArrayValue) XXX_DiscardUnknown() { + xxx_messageInfo_ArrayValue.DiscardUnknown(m) +} + +var xxx_messageInfo_ArrayValue proto.InternalMessageInfo + +func (m *ArrayValue) GetValues() []*Value { + if m != nil { + return m.Values + } + return nil +} + +// A message that can hold any of the supported value types and associated +// metadata. +type Value struct { + // Must have a value set. + // + // Types that are valid to be assigned to ValueType: + // *Value_NullValue + // *Value_BooleanValue + // *Value_IntegerValue + // *Value_DoubleValue + // *Value_TimestampValue + // *Value_KeyValue + // *Value_StringValue + // *Value_BlobValue + // *Value_GeoPointValue + // *Value_EntityValue + // *Value_ArrayValue + ValueType isValue_ValueType `protobuf_oneof:"value_type"` + // The `meaning` field should only be populated for backwards compatibility. + Meaning int32 `protobuf:"varint,14,opt,name=meaning,proto3" json:"meaning,omitempty"` + // If the value should be excluded from all indexes including those defined + // explicitly. + ExcludeFromIndexes bool `protobuf:"varint,19,opt,name=exclude_from_indexes,json=excludeFromIndexes,proto3" json:"exclude_from_indexes,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Value) Reset() { *m = Value{} } +func (m *Value) String() string { return proto.CompactTextString(m) } +func (*Value) ProtoMessage() {} +func (*Value) Descriptor() ([]byte, []int) { + return fileDescriptor_entity_50acbc7f8774bafa, []int{3} +} +func (m *Value) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Value.Unmarshal(m, b) +} +func (m *Value) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Value.Marshal(b, m, deterministic) +} +func (dst *Value) XXX_Merge(src proto.Message) { + xxx_messageInfo_Value.Merge(dst, src) +} +func (m *Value) XXX_Size() int { + return xxx_messageInfo_Value.Size(m) +} +func (m *Value) XXX_DiscardUnknown() { + xxx_messageInfo_Value.DiscardUnknown(m) +} + +var xxx_messageInfo_Value proto.InternalMessageInfo + +type isValue_ValueType interface { + isValue_ValueType() +} + +type Value_NullValue struct { + NullValue _struct.NullValue `protobuf:"varint,11,opt,name=null_value,json=nullValue,proto3,enum=google.protobuf.NullValue,oneof"` +} + +type Value_BooleanValue struct { + BooleanValue bool `protobuf:"varint,1,opt,name=boolean_value,json=booleanValue,proto3,oneof"` +} + +type Value_IntegerValue struct { + IntegerValue int64 `protobuf:"varint,2,opt,name=integer_value,json=integerValue,proto3,oneof"` +} + +type Value_DoubleValue struct { + DoubleValue float64 `protobuf:"fixed64,3,opt,name=double_value,json=doubleValue,proto3,oneof"` +} + +type Value_TimestampValue struct { + TimestampValue *timestamp.Timestamp `protobuf:"bytes,10,opt,name=timestamp_value,json=timestampValue,proto3,oneof"` +} + +type Value_KeyValue struct { + KeyValue *Key `protobuf:"bytes,5,opt,name=key_value,json=keyValue,proto3,oneof"` +} + +type Value_StringValue struct { + StringValue string `protobuf:"bytes,17,opt,name=string_value,json=stringValue,proto3,oneof"` +} + +type Value_BlobValue struct { + BlobValue []byte `protobuf:"bytes,18,opt,name=blob_value,json=blobValue,proto3,oneof"` +} + +type Value_GeoPointValue struct { + GeoPointValue *latlng.LatLng `protobuf:"bytes,8,opt,name=geo_point_value,json=geoPointValue,proto3,oneof"` +} + +type Value_EntityValue struct { + EntityValue *Entity `protobuf:"bytes,6,opt,name=entity_value,json=entityValue,proto3,oneof"` +} + +type Value_ArrayValue struct { + ArrayValue *ArrayValue `protobuf:"bytes,9,opt,name=array_value,json=arrayValue,proto3,oneof"` +} + +func (*Value_NullValue) isValue_ValueType() {} + +func (*Value_BooleanValue) isValue_ValueType() {} + +func (*Value_IntegerValue) isValue_ValueType() {} + +func (*Value_DoubleValue) isValue_ValueType() {} + +func (*Value_TimestampValue) isValue_ValueType() {} + +func (*Value_KeyValue) isValue_ValueType() {} + +func (*Value_StringValue) isValue_ValueType() {} + +func (*Value_BlobValue) isValue_ValueType() {} + +func (*Value_GeoPointValue) isValue_ValueType() {} + +func (*Value_EntityValue) isValue_ValueType() {} + +func (*Value_ArrayValue) isValue_ValueType() {} + +func (m *Value) GetValueType() isValue_ValueType { + if m != nil { + return m.ValueType + } + return nil +} + +func (m *Value) GetNullValue() _struct.NullValue { + if x, ok := m.GetValueType().(*Value_NullValue); ok { + return x.NullValue + } + return _struct.NullValue_NULL_VALUE +} + +func (m *Value) GetBooleanValue() bool { + if x, ok := m.GetValueType().(*Value_BooleanValue); ok { + return x.BooleanValue + } + return false +} + +func (m *Value) GetIntegerValue() int64 { + if x, ok := m.GetValueType().(*Value_IntegerValue); ok { + return x.IntegerValue + } + return 0 +} + +func (m *Value) GetDoubleValue() float64 { + if x, ok := m.GetValueType().(*Value_DoubleValue); ok { + return x.DoubleValue + } + return 0 +} + +func (m *Value) GetTimestampValue() *timestamp.Timestamp { + if x, ok := m.GetValueType().(*Value_TimestampValue); ok { + return x.TimestampValue + } + return nil +} + +func (m *Value) GetKeyValue() *Key { + if x, ok := m.GetValueType().(*Value_KeyValue); ok { + return x.KeyValue + } + return nil +} + +func (m *Value) GetStringValue() string { + if x, ok := m.GetValueType().(*Value_StringValue); ok { + return x.StringValue + } + return "" +} + +func (m *Value) GetBlobValue() []byte { + if x, ok := m.GetValueType().(*Value_BlobValue); ok { + return x.BlobValue + } + return nil +} + +func (m *Value) GetGeoPointValue() *latlng.LatLng { + if x, ok := m.GetValueType().(*Value_GeoPointValue); ok { + return x.GeoPointValue + } + return nil +} + +func (m *Value) GetEntityValue() *Entity { + if x, ok := m.GetValueType().(*Value_EntityValue); ok { + return x.EntityValue + } + return nil +} + +func (m *Value) GetArrayValue() *ArrayValue { + if x, ok := m.GetValueType().(*Value_ArrayValue); ok { + return x.ArrayValue + } + return nil +} + +func (m *Value) GetMeaning() int32 { + if m != nil { + return m.Meaning + } + return 0 +} + +func (m *Value) GetExcludeFromIndexes() bool { + if m != nil { + return m.ExcludeFromIndexes + } + return false +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*Value) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _Value_OneofMarshaler, _Value_OneofUnmarshaler, _Value_OneofSizer, []interface{}{ + (*Value_NullValue)(nil), + (*Value_BooleanValue)(nil), + (*Value_IntegerValue)(nil), + (*Value_DoubleValue)(nil), + (*Value_TimestampValue)(nil), + (*Value_KeyValue)(nil), + (*Value_StringValue)(nil), + (*Value_BlobValue)(nil), + (*Value_GeoPointValue)(nil), + (*Value_EntityValue)(nil), + (*Value_ArrayValue)(nil), + } +} + +func _Value_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*Value) + // value_type + switch x := m.ValueType.(type) { + case *Value_NullValue: + b.EncodeVarint(11<<3 | proto.WireVarint) + b.EncodeVarint(uint64(x.NullValue)) + case *Value_BooleanValue: + t := uint64(0) + if x.BooleanValue { + t = 1 + } + b.EncodeVarint(1<<3 | proto.WireVarint) + b.EncodeVarint(t) + case *Value_IntegerValue: + b.EncodeVarint(2<<3 | proto.WireVarint) + b.EncodeVarint(uint64(x.IntegerValue)) + case *Value_DoubleValue: + b.EncodeVarint(3<<3 | proto.WireFixed64) + b.EncodeFixed64(math.Float64bits(x.DoubleValue)) + case *Value_TimestampValue: + b.EncodeVarint(10<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.TimestampValue); err != nil { + return err + } + case *Value_KeyValue: + b.EncodeVarint(5<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.KeyValue); err != nil { + return err + } + case *Value_StringValue: + b.EncodeVarint(17<<3 | proto.WireBytes) + b.EncodeStringBytes(x.StringValue) + case *Value_BlobValue: + b.EncodeVarint(18<<3 | proto.WireBytes) + b.EncodeRawBytes(x.BlobValue) + case *Value_GeoPointValue: + b.EncodeVarint(8<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.GeoPointValue); err != nil { + return err + } + case *Value_EntityValue: + b.EncodeVarint(6<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.EntityValue); err != nil { + return err + } + case *Value_ArrayValue: + b.EncodeVarint(9<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.ArrayValue); err != nil { + return err + } + case nil: + default: + return fmt.Errorf("Value.ValueType has unexpected type %T", x) + } + return nil +} + +func _Value_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*Value) + switch tag { + case 11: // value_type.null_value + if wire != proto.WireVarint { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeVarint() + m.ValueType = &Value_NullValue{_struct.NullValue(x)} + return true, err + case 1: // value_type.boolean_value + if wire != proto.WireVarint { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeVarint() + m.ValueType = &Value_BooleanValue{x != 0} + return true, err + case 2: // value_type.integer_value + if wire != proto.WireVarint { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeVarint() + m.ValueType = &Value_IntegerValue{int64(x)} + return true, err + case 3: // value_type.double_value + if wire != proto.WireFixed64 { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeFixed64() + m.ValueType = &Value_DoubleValue{math.Float64frombits(x)} + return true, err + case 10: // value_type.timestamp_value + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(timestamp.Timestamp) + err := b.DecodeMessage(msg) + m.ValueType = &Value_TimestampValue{msg} + return true, err + case 5: // value_type.key_value + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(Key) + err := b.DecodeMessage(msg) + m.ValueType = &Value_KeyValue{msg} + return true, err + case 17: // value_type.string_value + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeStringBytes() + m.ValueType = &Value_StringValue{x} + return true, err + case 18: // value_type.blob_value + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeRawBytes(true) + m.ValueType = &Value_BlobValue{x} + return true, err + case 8: // value_type.geo_point_value + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(latlng.LatLng) + err := b.DecodeMessage(msg) + m.ValueType = &Value_GeoPointValue{msg} + return true, err + case 6: // value_type.entity_value + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(Entity) + err := b.DecodeMessage(msg) + m.ValueType = &Value_EntityValue{msg} + return true, err + case 9: // value_type.array_value + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(ArrayValue) + err := b.DecodeMessage(msg) + m.ValueType = &Value_ArrayValue{msg} + return true, err + default: + return false, nil + } +} + +func _Value_OneofSizer(msg proto.Message) (n int) { + m := msg.(*Value) + // value_type + switch x := m.ValueType.(type) { + case *Value_NullValue: + n += 1 // tag and wire + n += proto.SizeVarint(uint64(x.NullValue)) + case *Value_BooleanValue: + n += 1 // tag and wire + n += 1 + case *Value_IntegerValue: + n += 1 // tag and wire + n += proto.SizeVarint(uint64(x.IntegerValue)) + case *Value_DoubleValue: + n += 1 // tag and wire + n += 8 + case *Value_TimestampValue: + s := proto.Size(x.TimestampValue) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *Value_KeyValue: + s := proto.Size(x.KeyValue) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *Value_StringValue: + n += 2 // tag and wire + n += proto.SizeVarint(uint64(len(x.StringValue))) + n += len(x.StringValue) + case *Value_BlobValue: + n += 2 // tag and wire + n += proto.SizeVarint(uint64(len(x.BlobValue))) + n += len(x.BlobValue) + case *Value_GeoPointValue: + s := proto.Size(x.GeoPointValue) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *Value_EntityValue: + s := proto.Size(x.EntityValue) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *Value_ArrayValue: + s := proto.Size(x.ArrayValue) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +// A Datastore data object. +// +// An entity is limited to 1 megabyte when stored. That _roughly_ +// corresponds to a limit of 1 megabyte for the serialized form of this +// message. +type Entity struct { + // The entity's key. + // + // An entity must have a key, unless otherwise documented (for example, + // an entity in `Value.entity_value` may have no key). + // An entity's kind is its key path's last element's kind, + // or null if it has no key. + Key *Key `protobuf:"bytes,1,opt,name=key,proto3" json:"key,omitempty"` + // The entity's properties. + // The map's keys are property names. + // A property name matching regex `__.*__` is reserved. + // A reserved property name is forbidden in certain documented contexts. + // The name must not contain more than 500 characters. + // The name cannot be `""`. + Properties map[string]*Value `protobuf:"bytes,3,rep,name=properties,proto3" json:"properties,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Entity) Reset() { *m = Entity{} } +func (m *Entity) String() string { return proto.CompactTextString(m) } +func (*Entity) ProtoMessage() {} +func (*Entity) Descriptor() ([]byte, []int) { + return fileDescriptor_entity_50acbc7f8774bafa, []int{4} +} +func (m *Entity) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Entity.Unmarshal(m, b) +} +func (m *Entity) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Entity.Marshal(b, m, deterministic) +} +func (dst *Entity) XXX_Merge(src proto.Message) { + xxx_messageInfo_Entity.Merge(dst, src) +} +func (m *Entity) XXX_Size() int { + return xxx_messageInfo_Entity.Size(m) +} +func (m *Entity) XXX_DiscardUnknown() { + xxx_messageInfo_Entity.DiscardUnknown(m) +} + +var xxx_messageInfo_Entity proto.InternalMessageInfo + +func (m *Entity) GetKey() *Key { + if m != nil { + return m.Key + } + return nil +} + +func (m *Entity) GetProperties() map[string]*Value { + if m != nil { + return m.Properties + } + return nil +} + +func init() { + proto.RegisterType((*PartitionId)(nil), "google.datastore.v1beta3.PartitionId") + proto.RegisterType((*Key)(nil), "google.datastore.v1beta3.Key") + proto.RegisterType((*Key_PathElement)(nil), "google.datastore.v1beta3.Key.PathElement") + proto.RegisterType((*ArrayValue)(nil), "google.datastore.v1beta3.ArrayValue") + proto.RegisterType((*Value)(nil), "google.datastore.v1beta3.Value") + proto.RegisterType((*Entity)(nil), "google.datastore.v1beta3.Entity") + proto.RegisterMapType((map[string]*Value)(nil), "google.datastore.v1beta3.Entity.PropertiesEntry") +} + +func init() { + proto.RegisterFile("google/datastore/v1beta3/entity.proto", fileDescriptor_entity_50acbc7f8774bafa) +} + +var fileDescriptor_entity_50acbc7f8774bafa = []byte{ + // 789 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x8c, 0x95, 0xdf, 0x8e, 0xdb, 0x44, + 0x14, 0xc6, 0xed, 0x64, 0xb3, 0x5d, 0x1f, 0xbb, 0xbb, 0x65, 0xda, 0x0b, 0x2b, 0x6a, 0xd9, 0x10, + 0x58, 0x29, 0xdc, 0xd8, 0xed, 0x56, 0x08, 0x44, 0xe9, 0x45, 0x03, 0xa1, 0x8e, 0x5a, 0x41, 0x34, + 0xaa, 0xf6, 0x02, 0xad, 0x88, 0x26, 0xf1, 0xd4, 0x1d, 0x62, 0xcf, 0x58, 0xf6, 0xb8, 0xaa, 0x5f, + 0x09, 0xee, 0x78, 0x0c, 0x9e, 0x83, 0x3b, 0x5e, 0x02, 0xcd, 0x1f, 0x3b, 0xab, 0x56, 0x29, 0xdc, + 0x79, 0xce, 0xf9, 0x9d, 0x6f, 0xbe, 0x99, 0x73, 0x26, 0x81, 0x8b, 0x4c, 0x88, 0x2c, 0xa7, 0x71, + 0x4a, 0x24, 0xa9, 0xa5, 0xa8, 0x68, 0xfc, 0xf6, 0xd1, 0x86, 0x4a, 0xf2, 0x38, 0xa6, 0x5c, 0x32, + 0xd9, 0x46, 0x65, 0x25, 0xa4, 0x40, 0xa1, 0xc1, 0xa2, 0x1e, 0x8b, 0x2c, 0x36, 0xbe, 0x6f, 0x05, + 0x48, 0xc9, 0x62, 0xc2, 0xb9, 0x90, 0x44, 0x32, 0xc1, 0x6b, 0x53, 0xd7, 0x67, 0xf5, 0x6a, 0xd3, + 0xbc, 0x8e, 0x6b, 0x59, 0x35, 0x5b, 0x69, 0xb3, 0xe7, 0xef, 0x67, 0x25, 0x2b, 0x68, 0x2d, 0x49, + 0x51, 0x5a, 0xc0, 0x6e, 0x1b, 0xcb, 0xb6, 0xa4, 0x71, 0x4e, 0x64, 0xce, 0x33, 0x93, 0x99, 0xfe, + 0x0c, 0xfe, 0x8a, 0x54, 0x92, 0xa9, 0xcd, 0x96, 0x29, 0x7a, 0x00, 0x50, 0x56, 0xe2, 0x37, 0xba, + 0x95, 0x6b, 0x96, 0x86, 0x83, 0x89, 0x3b, 0xf3, 0xb0, 0x67, 0x23, 0xcb, 0x14, 0x7d, 0x06, 0x01, + 0x27, 0x05, 0xad, 0x4b, 0xb2, 0xa5, 0x0a, 0x38, 0xd2, 0x80, 0xdf, 0xc7, 0x96, 0xe9, 0xf4, 0x6f, + 0x17, 0x86, 0x2f, 0x68, 0x8b, 0x12, 0x08, 0xca, 0x4e, 0x58, 0xa1, 0xee, 0xc4, 0x9d, 0xf9, 0x97, + 0x17, 0xd1, 0xa1, 0x0b, 0x88, 0x6e, 0xd8, 0xc0, 0x7e, 0x79, 0xc3, 0xd3, 0x53, 0x38, 0x2a, 0x89, + 0x7c, 0x13, 0x0e, 0x26, 0xc3, 0x99, 0x7f, 0xf9, 0xe5, 0x61, 0x85, 0x17, 0xb4, 0x8d, 0x56, 0x44, + 0xbe, 0x59, 0xe4, 0xb4, 0xa0, 0x5c, 0x62, 0x5d, 0x36, 0x7e, 0xa5, 0x4e, 0xd8, 0x07, 0x11, 0x82, + 0xa3, 0x1d, 0xe3, 0xc6, 0x8f, 0x87, 0xf5, 0x37, 0xba, 0x03, 0x03, 0x7b, 0xda, 0x61, 0xe2, 0xe0, + 0x01, 0x4b, 0xd1, 0x3d, 0x38, 0x52, 0x87, 0x0a, 0x87, 0x8a, 0x4a, 0x1c, 0xac, 0x57, 0x73, 0x0f, + 0x6e, 0xb1, 0x74, 0xad, 0x2e, 0x71, 0xba, 0x00, 0x78, 0x56, 0x55, 0xa4, 0xbd, 0x22, 0x79, 0x43, + 0xd1, 0xd7, 0x70, 0xfc, 0x56, 0x7d, 0xd4, 0xa1, 0xab, 0x4d, 0x9e, 0x1f, 0x36, 0xa9, 0x0b, 0xb0, + 0xc5, 0xa7, 0x7f, 0x8c, 0x60, 0x64, 0x24, 0x9e, 0x00, 0xf0, 0x26, 0xcf, 0xd7, 0x3a, 0x11, 0xfa, + 0x13, 0x77, 0x76, 0x7a, 0x39, 0xee, 0x64, 0xba, 0xc6, 0x46, 0x3f, 0x35, 0x79, 0xae, 0xf9, 0xc4, + 0xc1, 0x1e, 0xef, 0x16, 0xe8, 0x02, 0x6e, 0x6f, 0x84, 0xc8, 0x29, 0xe1, 0xb6, 0x5e, 0x9d, 0xee, + 0x24, 0x71, 0x70, 0x60, 0xc3, 0x3d, 0xc6, 0xb8, 0xa4, 0x19, 0xad, 0x2c, 0xd6, 0x1d, 0x39, 0xb0, + 0x61, 0x83, 0x7d, 0x0e, 0x41, 0x2a, 0x9a, 0x4d, 0x4e, 0x2d, 0xa5, 0x2e, 0xc1, 0x4d, 0x1c, 0xec, + 0x9b, 0xa8, 0x81, 0x16, 0x70, 0xd6, 0x4f, 0x99, 0xe5, 0x40, 0xb7, 0xf8, 0x43, 0xd3, 0xaf, 0x3a, + 0x2e, 0x71, 0xf0, 0x69, 0x5f, 0x64, 0x64, 0xbe, 0x03, 0x6f, 0x47, 0x5b, 0x2b, 0x30, 0xd2, 0x02, + 0x0f, 0x3e, 0xda, 0xe1, 0xc4, 0xc1, 0x27, 0x3b, 0xda, 0xf6, 0x4e, 0x6b, 0x59, 0x31, 0x9e, 0x59, + 0x81, 0x4f, 0x6c, 0xbb, 0x7c, 0x13, 0x35, 0xd0, 0x39, 0xc0, 0x26, 0x17, 0x1b, 0x8b, 0xa0, 0x89, + 0x3b, 0x0b, 0xd4, 0xed, 0xa9, 0x98, 0x01, 0x9e, 0xc2, 0x59, 0x46, 0xc5, 0xba, 0x14, 0x8c, 0x4b, + 0x4b, 0x9d, 0x68, 0x27, 0x77, 0x3b, 0x27, 0xaa, 0xe5, 0xd1, 0x4b, 0x22, 0x5f, 0xf2, 0x2c, 0x71, + 0xf0, 0xed, 0x8c, 0x8a, 0x95, 0x82, 0xbb, 0x9b, 0x08, 0xcc, 0x1b, 0xb7, 0xb5, 0xc7, 0xba, 0x76, + 0x72, 0xf8, 0x14, 0x0b, 0x4d, 0x2b, 0x9b, 0xa6, 0xce, 0xc8, 0x3c, 0x07, 0x9f, 0xa8, 0x89, 0xb2, + 0x2a, 0x9e, 0x56, 0xf9, 0xe2, 0xb0, 0xca, 0x7e, 0xfc, 0x12, 0x07, 0x03, 0xd9, 0x0f, 0x63, 0x08, + 0xb7, 0x0a, 0x4a, 0x38, 0xe3, 0x59, 0x78, 0x3a, 0x71, 0x67, 0x23, 0xdc, 0x2d, 0xd1, 0x43, 0xb8, + 0x47, 0xdf, 0x6d, 0xf3, 0x26, 0xa5, 0xeb, 0xd7, 0x95, 0x28, 0xd6, 0x8c, 0xa7, 0xf4, 0x1d, 0xad, + 0xc3, 0xbb, 0x6a, 0x5a, 0x30, 0xb2, 0xb9, 0x1f, 0x2b, 0x51, 0x2c, 0x4d, 0x66, 0x1e, 0x00, 0x68, + 0x3b, 0x66, 0xe8, 0xff, 0x71, 0xe1, 0xd8, 0x98, 0x47, 0x31, 0x0c, 0x77, 0xb4, 0xb5, 0xaf, 0xfa, + 0xe3, 0x1d, 0xc3, 0x8a, 0x44, 0x2b, 0xfd, 0xcb, 0x52, 0xd2, 0x4a, 0x32, 0x5a, 0x87, 0x43, 0xfd, + 0x4c, 0x1e, 0xfe, 0xd7, 0x1d, 0x45, 0xab, 0xbe, 0x64, 0xc1, 0x65, 0xd5, 0xe2, 0x1b, 0x1a, 0xe3, + 0x5f, 0xe1, 0xec, 0xbd, 0x34, 0xba, 0xb3, 0x77, 0xe5, 0x99, 0x6d, 0xbf, 0x82, 0xd1, 0x7e, 0xd4, + 0xff, 0xc7, 0xc3, 0x34, 0xf4, 0xb7, 0x83, 0x6f, 0xdc, 0xf9, 0x9f, 0x2e, 0xdc, 0xdf, 0x8a, 0xe2, + 0x60, 0xc5, 0xdc, 0x37, 0x26, 0x57, 0x6a, 0xce, 0x57, 0xee, 0x2f, 0xcf, 0x2c, 0x98, 0x89, 0x9c, + 0xf0, 0x2c, 0x12, 0x55, 0x16, 0x67, 0x94, 0xeb, 0x57, 0x10, 0x9b, 0x14, 0x29, 0x59, 0xfd, 0xe1, + 0x3f, 0xc4, 0x93, 0x3e, 0xf2, 0xfb, 0xe0, 0xd3, 0xe7, 0x46, 0xe3, 0xfb, 0x5c, 0x34, 0x69, 0xf4, + 0x43, 0xbf, 0xe5, 0xd5, 0xa3, 0xb9, 0x42, 0xff, 0xea, 0x80, 0x6b, 0x0d, 0x5c, 0xf7, 0xc0, 0xf5, + 0x95, 0xd1, 0xda, 0x1c, 0xeb, 0xfd, 0x1e, 0xff, 0x1b, 0x00, 0x00, 0xff, 0xff, 0x2c, 0x6d, 0x30, + 0xe2, 0x90, 0x06, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/datastore/v1beta3/query.pb.go b/vendor/google.golang.org/genproto/googleapis/datastore/v1beta3/query.pb.go new file mode 100644 index 0000000..ed8eac9 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/datastore/v1beta3/query.pb.go @@ -0,0 +1,1259 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/datastore/v1beta3/query.proto + +package datastore // import "google.golang.org/genproto/googleapis/datastore/v1beta3" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import wrappers "github.com/golang/protobuf/ptypes/wrappers" +import _ "google.golang.org/genproto/googleapis/api/annotations" +import _ "google.golang.org/genproto/googleapis/type/latlng" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Specifies what data the 'entity' field contains. +// A `ResultType` is either implied (for example, in `LookupResponse.missing` +// from `datastore.proto`, it is always `KEY_ONLY`) or specified by context +// (for example, in message `QueryResultBatch`, field `entity_result_type` +// specifies a `ResultType` for all the values in field `entity_results`). +type EntityResult_ResultType int32 + +const ( + // Unspecified. This value is never used. + EntityResult_RESULT_TYPE_UNSPECIFIED EntityResult_ResultType = 0 + // The key and properties. + EntityResult_FULL EntityResult_ResultType = 1 + // A projected subset of properties. The entity may have no key. + EntityResult_PROJECTION EntityResult_ResultType = 2 + // Only the key. + EntityResult_KEY_ONLY EntityResult_ResultType = 3 +) + +var EntityResult_ResultType_name = map[int32]string{ + 0: "RESULT_TYPE_UNSPECIFIED", + 1: "FULL", + 2: "PROJECTION", + 3: "KEY_ONLY", +} +var EntityResult_ResultType_value = map[string]int32{ + "RESULT_TYPE_UNSPECIFIED": 0, + "FULL": 1, + "PROJECTION": 2, + "KEY_ONLY": 3, +} + +func (x EntityResult_ResultType) String() string { + return proto.EnumName(EntityResult_ResultType_name, int32(x)) +} +func (EntityResult_ResultType) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_query_74b0994032fcddf6, []int{0, 0} +} + +// The sort direction. +type PropertyOrder_Direction int32 + +const ( + // Unspecified. This value must not be used. + PropertyOrder_DIRECTION_UNSPECIFIED PropertyOrder_Direction = 0 + // Ascending. + PropertyOrder_ASCENDING PropertyOrder_Direction = 1 + // Descending. + PropertyOrder_DESCENDING PropertyOrder_Direction = 2 +) + +var PropertyOrder_Direction_name = map[int32]string{ + 0: "DIRECTION_UNSPECIFIED", + 1: "ASCENDING", + 2: "DESCENDING", +} +var PropertyOrder_Direction_value = map[string]int32{ + "DIRECTION_UNSPECIFIED": 0, + "ASCENDING": 1, + "DESCENDING": 2, +} + +func (x PropertyOrder_Direction) String() string { + return proto.EnumName(PropertyOrder_Direction_name, int32(x)) +} +func (PropertyOrder_Direction) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_query_74b0994032fcddf6, []int{5, 0} +} + +// A composite filter operator. +type CompositeFilter_Operator int32 + +const ( + // Unspecified. This value must not be used. + CompositeFilter_OPERATOR_UNSPECIFIED CompositeFilter_Operator = 0 + // The results are required to satisfy each of the combined filters. + CompositeFilter_AND CompositeFilter_Operator = 1 +) + +var CompositeFilter_Operator_name = map[int32]string{ + 0: "OPERATOR_UNSPECIFIED", + 1: "AND", +} +var CompositeFilter_Operator_value = map[string]int32{ + "OPERATOR_UNSPECIFIED": 0, + "AND": 1, +} + +func (x CompositeFilter_Operator) String() string { + return proto.EnumName(CompositeFilter_Operator_name, int32(x)) +} +func (CompositeFilter_Operator) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_query_74b0994032fcddf6, []int{7, 0} +} + +// A property filter operator. +type PropertyFilter_Operator int32 + +const ( + // Unspecified. This value must not be used. + PropertyFilter_OPERATOR_UNSPECIFIED PropertyFilter_Operator = 0 + // Less than. + PropertyFilter_LESS_THAN PropertyFilter_Operator = 1 + // Less than or equal. + PropertyFilter_LESS_THAN_OR_EQUAL PropertyFilter_Operator = 2 + // Greater than. + PropertyFilter_GREATER_THAN PropertyFilter_Operator = 3 + // Greater than or equal. + PropertyFilter_GREATER_THAN_OR_EQUAL PropertyFilter_Operator = 4 + // Equal. + PropertyFilter_EQUAL PropertyFilter_Operator = 5 + // Has ancestor. + PropertyFilter_HAS_ANCESTOR PropertyFilter_Operator = 11 +) + +var PropertyFilter_Operator_name = map[int32]string{ + 0: "OPERATOR_UNSPECIFIED", + 1: "LESS_THAN", + 2: "LESS_THAN_OR_EQUAL", + 3: "GREATER_THAN", + 4: "GREATER_THAN_OR_EQUAL", + 5: "EQUAL", + 11: "HAS_ANCESTOR", +} +var PropertyFilter_Operator_value = map[string]int32{ + "OPERATOR_UNSPECIFIED": 0, + "LESS_THAN": 1, + "LESS_THAN_OR_EQUAL": 2, + "GREATER_THAN": 3, + "GREATER_THAN_OR_EQUAL": 4, + "EQUAL": 5, + "HAS_ANCESTOR": 11, +} + +func (x PropertyFilter_Operator) String() string { + return proto.EnumName(PropertyFilter_Operator_name, int32(x)) +} +func (PropertyFilter_Operator) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_query_74b0994032fcddf6, []int{8, 0} +} + +// The possible values for the `more_results` field. +type QueryResultBatch_MoreResultsType int32 + +const ( + // Unspecified. This value is never used. + QueryResultBatch_MORE_RESULTS_TYPE_UNSPECIFIED QueryResultBatch_MoreResultsType = 0 + // There may be additional batches to fetch from this query. + QueryResultBatch_NOT_FINISHED QueryResultBatch_MoreResultsType = 1 + // The query is finished, but there may be more results after the limit. + QueryResultBatch_MORE_RESULTS_AFTER_LIMIT QueryResultBatch_MoreResultsType = 2 + // The query is finished, but there may be more results after the end + // cursor. + QueryResultBatch_MORE_RESULTS_AFTER_CURSOR QueryResultBatch_MoreResultsType = 4 + // The query is finished, and there are no more results. + QueryResultBatch_NO_MORE_RESULTS QueryResultBatch_MoreResultsType = 3 +) + +var QueryResultBatch_MoreResultsType_name = map[int32]string{ + 0: "MORE_RESULTS_TYPE_UNSPECIFIED", + 1: "NOT_FINISHED", + 2: "MORE_RESULTS_AFTER_LIMIT", + 4: "MORE_RESULTS_AFTER_CURSOR", + 3: "NO_MORE_RESULTS", +} +var QueryResultBatch_MoreResultsType_value = map[string]int32{ + "MORE_RESULTS_TYPE_UNSPECIFIED": 0, + "NOT_FINISHED": 1, + "MORE_RESULTS_AFTER_LIMIT": 2, + "MORE_RESULTS_AFTER_CURSOR": 4, + "NO_MORE_RESULTS": 3, +} + +func (x QueryResultBatch_MoreResultsType) String() string { + return proto.EnumName(QueryResultBatch_MoreResultsType_name, int32(x)) +} +func (QueryResultBatch_MoreResultsType) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_query_74b0994032fcddf6, []int{11, 0} +} + +// The result of fetching an entity from Datastore. +type EntityResult struct { + // The resulting entity. + Entity *Entity `protobuf:"bytes,1,opt,name=entity,proto3" json:"entity,omitempty"` + // The version of the entity, a strictly positive number that monotonically + // increases with changes to the entity. + // + // This field is set for + // [`FULL`][google.datastore.v1beta3.EntityResult.ResultType.FULL] entity + // results. + // + // For [missing][google.datastore.v1beta3.LookupResponse.missing] entities in + // `LookupResponse`, this is the version of the snapshot that was used to look + // up the entity, and it is always set except for eventually consistent reads. + Version int64 `protobuf:"varint,4,opt,name=version,proto3" json:"version,omitempty"` + // A cursor that points to the position after the result entity. + // Set only when the `EntityResult` is part of a `QueryResultBatch` message. + Cursor []byte `protobuf:"bytes,3,opt,name=cursor,proto3" json:"cursor,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *EntityResult) Reset() { *m = EntityResult{} } +func (m *EntityResult) String() string { return proto.CompactTextString(m) } +func (*EntityResult) ProtoMessage() {} +func (*EntityResult) Descriptor() ([]byte, []int) { + return fileDescriptor_query_74b0994032fcddf6, []int{0} +} +func (m *EntityResult) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_EntityResult.Unmarshal(m, b) +} +func (m *EntityResult) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_EntityResult.Marshal(b, m, deterministic) +} +func (dst *EntityResult) XXX_Merge(src proto.Message) { + xxx_messageInfo_EntityResult.Merge(dst, src) +} +func (m *EntityResult) XXX_Size() int { + return xxx_messageInfo_EntityResult.Size(m) +} +func (m *EntityResult) XXX_DiscardUnknown() { + xxx_messageInfo_EntityResult.DiscardUnknown(m) +} + +var xxx_messageInfo_EntityResult proto.InternalMessageInfo + +func (m *EntityResult) GetEntity() *Entity { + if m != nil { + return m.Entity + } + return nil +} + +func (m *EntityResult) GetVersion() int64 { + if m != nil { + return m.Version + } + return 0 +} + +func (m *EntityResult) GetCursor() []byte { + if m != nil { + return m.Cursor + } + return nil +} + +// A query for entities. +type Query struct { + // The projection to return. Defaults to returning all properties. + Projection []*Projection `protobuf:"bytes,2,rep,name=projection,proto3" json:"projection,omitempty"` + // The kinds to query (if empty, returns entities of all kinds). + // Currently at most 1 kind may be specified. + Kind []*KindExpression `protobuf:"bytes,3,rep,name=kind,proto3" json:"kind,omitempty"` + // The filter to apply. + Filter *Filter `protobuf:"bytes,4,opt,name=filter,proto3" json:"filter,omitempty"` + // The order to apply to the query results (if empty, order is unspecified). + Order []*PropertyOrder `protobuf:"bytes,5,rep,name=order,proto3" json:"order,omitempty"` + // The properties to make distinct. The query results will contain the first + // result for each distinct combination of values for the given properties + // (if empty, all results are returned). + DistinctOn []*PropertyReference `protobuf:"bytes,6,rep,name=distinct_on,json=distinctOn,proto3" json:"distinct_on,omitempty"` + // A starting point for the query results. Query cursors are + // returned in query result batches and + // [can only be used to continue the same + // query](https://cloud.google.com/datastore/docs/concepts/queries#cursors_limits_and_offsets). + StartCursor []byte `protobuf:"bytes,7,opt,name=start_cursor,json=startCursor,proto3" json:"start_cursor,omitempty"` + // An ending point for the query results. Query cursors are + // returned in query result batches and + // [can only be used to limit the same + // query](https://cloud.google.com/datastore/docs/concepts/queries#cursors_limits_and_offsets). + EndCursor []byte `protobuf:"bytes,8,opt,name=end_cursor,json=endCursor,proto3" json:"end_cursor,omitempty"` + // The number of results to skip. Applies before limit, but after all other + // constraints. Optional. Must be >= 0 if specified. + Offset int32 `protobuf:"varint,10,opt,name=offset,proto3" json:"offset,omitempty"` + // The maximum number of results to return. Applies after all other + // constraints. Optional. + // Unspecified is interpreted as no limit. + // Must be >= 0 if specified. + Limit *wrappers.Int32Value `protobuf:"bytes,12,opt,name=limit,proto3" json:"limit,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Query) Reset() { *m = Query{} } +func (m *Query) String() string { return proto.CompactTextString(m) } +func (*Query) ProtoMessage() {} +func (*Query) Descriptor() ([]byte, []int) { + return fileDescriptor_query_74b0994032fcddf6, []int{1} +} +func (m *Query) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Query.Unmarshal(m, b) +} +func (m *Query) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Query.Marshal(b, m, deterministic) +} +func (dst *Query) XXX_Merge(src proto.Message) { + xxx_messageInfo_Query.Merge(dst, src) +} +func (m *Query) XXX_Size() int { + return xxx_messageInfo_Query.Size(m) +} +func (m *Query) XXX_DiscardUnknown() { + xxx_messageInfo_Query.DiscardUnknown(m) +} + +var xxx_messageInfo_Query proto.InternalMessageInfo + +func (m *Query) GetProjection() []*Projection { + if m != nil { + return m.Projection + } + return nil +} + +func (m *Query) GetKind() []*KindExpression { + if m != nil { + return m.Kind + } + return nil +} + +func (m *Query) GetFilter() *Filter { + if m != nil { + return m.Filter + } + return nil +} + +func (m *Query) GetOrder() []*PropertyOrder { + if m != nil { + return m.Order + } + return nil +} + +func (m *Query) GetDistinctOn() []*PropertyReference { + if m != nil { + return m.DistinctOn + } + return nil +} + +func (m *Query) GetStartCursor() []byte { + if m != nil { + return m.StartCursor + } + return nil +} + +func (m *Query) GetEndCursor() []byte { + if m != nil { + return m.EndCursor + } + return nil +} + +func (m *Query) GetOffset() int32 { + if m != nil { + return m.Offset + } + return 0 +} + +func (m *Query) GetLimit() *wrappers.Int32Value { + if m != nil { + return m.Limit + } + return nil +} + +// A representation of a kind. +type KindExpression struct { + // The name of the kind. + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *KindExpression) Reset() { *m = KindExpression{} } +func (m *KindExpression) String() string { return proto.CompactTextString(m) } +func (*KindExpression) ProtoMessage() {} +func (*KindExpression) Descriptor() ([]byte, []int) { + return fileDescriptor_query_74b0994032fcddf6, []int{2} +} +func (m *KindExpression) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_KindExpression.Unmarshal(m, b) +} +func (m *KindExpression) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_KindExpression.Marshal(b, m, deterministic) +} +func (dst *KindExpression) XXX_Merge(src proto.Message) { + xxx_messageInfo_KindExpression.Merge(dst, src) +} +func (m *KindExpression) XXX_Size() int { + return xxx_messageInfo_KindExpression.Size(m) +} +func (m *KindExpression) XXX_DiscardUnknown() { + xxx_messageInfo_KindExpression.DiscardUnknown(m) +} + +var xxx_messageInfo_KindExpression proto.InternalMessageInfo + +func (m *KindExpression) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +// A reference to a property relative to the kind expressions. +type PropertyReference struct { + // The name of the property. + // If name includes "."s, it may be interpreted as a property name path. + Name string `protobuf:"bytes,2,opt,name=name,proto3" json:"name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *PropertyReference) Reset() { *m = PropertyReference{} } +func (m *PropertyReference) String() string { return proto.CompactTextString(m) } +func (*PropertyReference) ProtoMessage() {} +func (*PropertyReference) Descriptor() ([]byte, []int) { + return fileDescriptor_query_74b0994032fcddf6, []int{3} +} +func (m *PropertyReference) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_PropertyReference.Unmarshal(m, b) +} +func (m *PropertyReference) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_PropertyReference.Marshal(b, m, deterministic) +} +func (dst *PropertyReference) XXX_Merge(src proto.Message) { + xxx_messageInfo_PropertyReference.Merge(dst, src) +} +func (m *PropertyReference) XXX_Size() int { + return xxx_messageInfo_PropertyReference.Size(m) +} +func (m *PropertyReference) XXX_DiscardUnknown() { + xxx_messageInfo_PropertyReference.DiscardUnknown(m) +} + +var xxx_messageInfo_PropertyReference proto.InternalMessageInfo + +func (m *PropertyReference) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +// A representation of a property in a projection. +type Projection struct { + // The property to project. + Property *PropertyReference `protobuf:"bytes,1,opt,name=property,proto3" json:"property,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Projection) Reset() { *m = Projection{} } +func (m *Projection) String() string { return proto.CompactTextString(m) } +func (*Projection) ProtoMessage() {} +func (*Projection) Descriptor() ([]byte, []int) { + return fileDescriptor_query_74b0994032fcddf6, []int{4} +} +func (m *Projection) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Projection.Unmarshal(m, b) +} +func (m *Projection) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Projection.Marshal(b, m, deterministic) +} +func (dst *Projection) XXX_Merge(src proto.Message) { + xxx_messageInfo_Projection.Merge(dst, src) +} +func (m *Projection) XXX_Size() int { + return xxx_messageInfo_Projection.Size(m) +} +func (m *Projection) XXX_DiscardUnknown() { + xxx_messageInfo_Projection.DiscardUnknown(m) +} + +var xxx_messageInfo_Projection proto.InternalMessageInfo + +func (m *Projection) GetProperty() *PropertyReference { + if m != nil { + return m.Property + } + return nil +} + +// The desired order for a specific property. +type PropertyOrder struct { + // The property to order by. + Property *PropertyReference `protobuf:"bytes,1,opt,name=property,proto3" json:"property,omitempty"` + // The direction to order by. Defaults to `ASCENDING`. + Direction PropertyOrder_Direction `protobuf:"varint,2,opt,name=direction,proto3,enum=google.datastore.v1beta3.PropertyOrder_Direction" json:"direction,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *PropertyOrder) Reset() { *m = PropertyOrder{} } +func (m *PropertyOrder) String() string { return proto.CompactTextString(m) } +func (*PropertyOrder) ProtoMessage() {} +func (*PropertyOrder) Descriptor() ([]byte, []int) { + return fileDescriptor_query_74b0994032fcddf6, []int{5} +} +func (m *PropertyOrder) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_PropertyOrder.Unmarshal(m, b) +} +func (m *PropertyOrder) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_PropertyOrder.Marshal(b, m, deterministic) +} +func (dst *PropertyOrder) XXX_Merge(src proto.Message) { + xxx_messageInfo_PropertyOrder.Merge(dst, src) +} +func (m *PropertyOrder) XXX_Size() int { + return xxx_messageInfo_PropertyOrder.Size(m) +} +func (m *PropertyOrder) XXX_DiscardUnknown() { + xxx_messageInfo_PropertyOrder.DiscardUnknown(m) +} + +var xxx_messageInfo_PropertyOrder proto.InternalMessageInfo + +func (m *PropertyOrder) GetProperty() *PropertyReference { + if m != nil { + return m.Property + } + return nil +} + +func (m *PropertyOrder) GetDirection() PropertyOrder_Direction { + if m != nil { + return m.Direction + } + return PropertyOrder_DIRECTION_UNSPECIFIED +} + +// A holder for any type of filter. +type Filter struct { + // The type of filter. + // + // Types that are valid to be assigned to FilterType: + // *Filter_CompositeFilter + // *Filter_PropertyFilter + FilterType isFilter_FilterType `protobuf_oneof:"filter_type"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Filter) Reset() { *m = Filter{} } +func (m *Filter) String() string { return proto.CompactTextString(m) } +func (*Filter) ProtoMessage() {} +func (*Filter) Descriptor() ([]byte, []int) { + return fileDescriptor_query_74b0994032fcddf6, []int{6} +} +func (m *Filter) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Filter.Unmarshal(m, b) +} +func (m *Filter) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Filter.Marshal(b, m, deterministic) +} +func (dst *Filter) XXX_Merge(src proto.Message) { + xxx_messageInfo_Filter.Merge(dst, src) +} +func (m *Filter) XXX_Size() int { + return xxx_messageInfo_Filter.Size(m) +} +func (m *Filter) XXX_DiscardUnknown() { + xxx_messageInfo_Filter.DiscardUnknown(m) +} + +var xxx_messageInfo_Filter proto.InternalMessageInfo + +type isFilter_FilterType interface { + isFilter_FilterType() +} + +type Filter_CompositeFilter struct { + CompositeFilter *CompositeFilter `protobuf:"bytes,1,opt,name=composite_filter,json=compositeFilter,proto3,oneof"` +} + +type Filter_PropertyFilter struct { + PropertyFilter *PropertyFilter `protobuf:"bytes,2,opt,name=property_filter,json=propertyFilter,proto3,oneof"` +} + +func (*Filter_CompositeFilter) isFilter_FilterType() {} + +func (*Filter_PropertyFilter) isFilter_FilterType() {} + +func (m *Filter) GetFilterType() isFilter_FilterType { + if m != nil { + return m.FilterType + } + return nil +} + +func (m *Filter) GetCompositeFilter() *CompositeFilter { + if x, ok := m.GetFilterType().(*Filter_CompositeFilter); ok { + return x.CompositeFilter + } + return nil +} + +func (m *Filter) GetPropertyFilter() *PropertyFilter { + if x, ok := m.GetFilterType().(*Filter_PropertyFilter); ok { + return x.PropertyFilter + } + return nil +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*Filter) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _Filter_OneofMarshaler, _Filter_OneofUnmarshaler, _Filter_OneofSizer, []interface{}{ + (*Filter_CompositeFilter)(nil), + (*Filter_PropertyFilter)(nil), + } +} + +func _Filter_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*Filter) + // filter_type + switch x := m.FilterType.(type) { + case *Filter_CompositeFilter: + b.EncodeVarint(1<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.CompositeFilter); err != nil { + return err + } + case *Filter_PropertyFilter: + b.EncodeVarint(2<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.PropertyFilter); err != nil { + return err + } + case nil: + default: + return fmt.Errorf("Filter.FilterType has unexpected type %T", x) + } + return nil +} + +func _Filter_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*Filter) + switch tag { + case 1: // filter_type.composite_filter + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(CompositeFilter) + err := b.DecodeMessage(msg) + m.FilterType = &Filter_CompositeFilter{msg} + return true, err + case 2: // filter_type.property_filter + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(PropertyFilter) + err := b.DecodeMessage(msg) + m.FilterType = &Filter_PropertyFilter{msg} + return true, err + default: + return false, nil + } +} + +func _Filter_OneofSizer(msg proto.Message) (n int) { + m := msg.(*Filter) + // filter_type + switch x := m.FilterType.(type) { + case *Filter_CompositeFilter: + s := proto.Size(x.CompositeFilter) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *Filter_PropertyFilter: + s := proto.Size(x.PropertyFilter) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +// A filter that merges multiple other filters using the given operator. +type CompositeFilter struct { + // The operator for combining multiple filters. + Op CompositeFilter_Operator `protobuf:"varint,1,opt,name=op,proto3,enum=google.datastore.v1beta3.CompositeFilter_Operator" json:"op,omitempty"` + // The list of filters to combine. + // Must contain at least one filter. + Filters []*Filter `protobuf:"bytes,2,rep,name=filters,proto3" json:"filters,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *CompositeFilter) Reset() { *m = CompositeFilter{} } +func (m *CompositeFilter) String() string { return proto.CompactTextString(m) } +func (*CompositeFilter) ProtoMessage() {} +func (*CompositeFilter) Descriptor() ([]byte, []int) { + return fileDescriptor_query_74b0994032fcddf6, []int{7} +} +func (m *CompositeFilter) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_CompositeFilter.Unmarshal(m, b) +} +func (m *CompositeFilter) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_CompositeFilter.Marshal(b, m, deterministic) +} +func (dst *CompositeFilter) XXX_Merge(src proto.Message) { + xxx_messageInfo_CompositeFilter.Merge(dst, src) +} +func (m *CompositeFilter) XXX_Size() int { + return xxx_messageInfo_CompositeFilter.Size(m) +} +func (m *CompositeFilter) XXX_DiscardUnknown() { + xxx_messageInfo_CompositeFilter.DiscardUnknown(m) +} + +var xxx_messageInfo_CompositeFilter proto.InternalMessageInfo + +func (m *CompositeFilter) GetOp() CompositeFilter_Operator { + if m != nil { + return m.Op + } + return CompositeFilter_OPERATOR_UNSPECIFIED +} + +func (m *CompositeFilter) GetFilters() []*Filter { + if m != nil { + return m.Filters + } + return nil +} + +// A filter on a specific property. +type PropertyFilter struct { + // The property to filter by. + Property *PropertyReference `protobuf:"bytes,1,opt,name=property,proto3" json:"property,omitempty"` + // The operator to filter by. + Op PropertyFilter_Operator `protobuf:"varint,2,opt,name=op,proto3,enum=google.datastore.v1beta3.PropertyFilter_Operator" json:"op,omitempty"` + // The value to compare the property to. + Value *Value `protobuf:"bytes,3,opt,name=value,proto3" json:"value,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *PropertyFilter) Reset() { *m = PropertyFilter{} } +func (m *PropertyFilter) String() string { return proto.CompactTextString(m) } +func (*PropertyFilter) ProtoMessage() {} +func (*PropertyFilter) Descriptor() ([]byte, []int) { + return fileDescriptor_query_74b0994032fcddf6, []int{8} +} +func (m *PropertyFilter) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_PropertyFilter.Unmarshal(m, b) +} +func (m *PropertyFilter) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_PropertyFilter.Marshal(b, m, deterministic) +} +func (dst *PropertyFilter) XXX_Merge(src proto.Message) { + xxx_messageInfo_PropertyFilter.Merge(dst, src) +} +func (m *PropertyFilter) XXX_Size() int { + return xxx_messageInfo_PropertyFilter.Size(m) +} +func (m *PropertyFilter) XXX_DiscardUnknown() { + xxx_messageInfo_PropertyFilter.DiscardUnknown(m) +} + +var xxx_messageInfo_PropertyFilter proto.InternalMessageInfo + +func (m *PropertyFilter) GetProperty() *PropertyReference { + if m != nil { + return m.Property + } + return nil +} + +func (m *PropertyFilter) GetOp() PropertyFilter_Operator { + if m != nil { + return m.Op + } + return PropertyFilter_OPERATOR_UNSPECIFIED +} + +func (m *PropertyFilter) GetValue() *Value { + if m != nil { + return m.Value + } + return nil +} + +// A [GQL +// query](https://cloud.google.com/datastore/docs/apis/gql/gql_reference). +type GqlQuery struct { + // A string of the format described + // [here](https://cloud.google.com/datastore/docs/apis/gql/gql_reference). + QueryString string `protobuf:"bytes,1,opt,name=query_string,json=queryString,proto3" json:"query_string,omitempty"` + // When false, the query string must not contain any literals and instead must + // bind all values. For example, + // `SELECT * FROM Kind WHERE a = 'string literal'` is not allowed, while + // `SELECT * FROM Kind WHERE a = @value` is. + AllowLiterals bool `protobuf:"varint,2,opt,name=allow_literals,json=allowLiterals,proto3" json:"allow_literals,omitempty"` + // For each non-reserved named binding site in the query string, there must be + // a named parameter with that name, but not necessarily the inverse. + // + // Key must match regex `[A-Za-z_$][A-Za-z_$0-9]*`, must not match regex + // `__.*__`, and must not be `""`. + NamedBindings map[string]*GqlQueryParameter `protobuf:"bytes,5,rep,name=named_bindings,json=namedBindings,proto3" json:"named_bindings,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` + // Numbered binding site @1 references the first numbered parameter, + // effectively using 1-based indexing, rather than the usual 0. + // + // For each binding site numbered i in `query_string`, there must be an i-th + // numbered parameter. The inverse must also be true. + PositionalBindings []*GqlQueryParameter `protobuf:"bytes,4,rep,name=positional_bindings,json=positionalBindings,proto3" json:"positional_bindings,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GqlQuery) Reset() { *m = GqlQuery{} } +func (m *GqlQuery) String() string { return proto.CompactTextString(m) } +func (*GqlQuery) ProtoMessage() {} +func (*GqlQuery) Descriptor() ([]byte, []int) { + return fileDescriptor_query_74b0994032fcddf6, []int{9} +} +func (m *GqlQuery) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GqlQuery.Unmarshal(m, b) +} +func (m *GqlQuery) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GqlQuery.Marshal(b, m, deterministic) +} +func (dst *GqlQuery) XXX_Merge(src proto.Message) { + xxx_messageInfo_GqlQuery.Merge(dst, src) +} +func (m *GqlQuery) XXX_Size() int { + return xxx_messageInfo_GqlQuery.Size(m) +} +func (m *GqlQuery) XXX_DiscardUnknown() { + xxx_messageInfo_GqlQuery.DiscardUnknown(m) +} + +var xxx_messageInfo_GqlQuery proto.InternalMessageInfo + +func (m *GqlQuery) GetQueryString() string { + if m != nil { + return m.QueryString + } + return "" +} + +func (m *GqlQuery) GetAllowLiterals() bool { + if m != nil { + return m.AllowLiterals + } + return false +} + +func (m *GqlQuery) GetNamedBindings() map[string]*GqlQueryParameter { + if m != nil { + return m.NamedBindings + } + return nil +} + +func (m *GqlQuery) GetPositionalBindings() []*GqlQueryParameter { + if m != nil { + return m.PositionalBindings + } + return nil +} + +// A binding parameter for a GQL query. +type GqlQueryParameter struct { + // The type of parameter. + // + // Types that are valid to be assigned to ParameterType: + // *GqlQueryParameter_Value + // *GqlQueryParameter_Cursor + ParameterType isGqlQueryParameter_ParameterType `protobuf_oneof:"parameter_type"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GqlQueryParameter) Reset() { *m = GqlQueryParameter{} } +func (m *GqlQueryParameter) String() string { return proto.CompactTextString(m) } +func (*GqlQueryParameter) ProtoMessage() {} +func (*GqlQueryParameter) Descriptor() ([]byte, []int) { + return fileDescriptor_query_74b0994032fcddf6, []int{10} +} +func (m *GqlQueryParameter) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GqlQueryParameter.Unmarshal(m, b) +} +func (m *GqlQueryParameter) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GqlQueryParameter.Marshal(b, m, deterministic) +} +func (dst *GqlQueryParameter) XXX_Merge(src proto.Message) { + xxx_messageInfo_GqlQueryParameter.Merge(dst, src) +} +func (m *GqlQueryParameter) XXX_Size() int { + return xxx_messageInfo_GqlQueryParameter.Size(m) +} +func (m *GqlQueryParameter) XXX_DiscardUnknown() { + xxx_messageInfo_GqlQueryParameter.DiscardUnknown(m) +} + +var xxx_messageInfo_GqlQueryParameter proto.InternalMessageInfo + +type isGqlQueryParameter_ParameterType interface { + isGqlQueryParameter_ParameterType() +} + +type GqlQueryParameter_Value struct { + Value *Value `protobuf:"bytes,2,opt,name=value,proto3,oneof"` +} + +type GqlQueryParameter_Cursor struct { + Cursor []byte `protobuf:"bytes,3,opt,name=cursor,proto3,oneof"` +} + +func (*GqlQueryParameter_Value) isGqlQueryParameter_ParameterType() {} + +func (*GqlQueryParameter_Cursor) isGqlQueryParameter_ParameterType() {} + +func (m *GqlQueryParameter) GetParameterType() isGqlQueryParameter_ParameterType { + if m != nil { + return m.ParameterType + } + return nil +} + +func (m *GqlQueryParameter) GetValue() *Value { + if x, ok := m.GetParameterType().(*GqlQueryParameter_Value); ok { + return x.Value + } + return nil +} + +func (m *GqlQueryParameter) GetCursor() []byte { + if x, ok := m.GetParameterType().(*GqlQueryParameter_Cursor); ok { + return x.Cursor + } + return nil +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*GqlQueryParameter) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _GqlQueryParameter_OneofMarshaler, _GqlQueryParameter_OneofUnmarshaler, _GqlQueryParameter_OneofSizer, []interface{}{ + (*GqlQueryParameter_Value)(nil), + (*GqlQueryParameter_Cursor)(nil), + } +} + +func _GqlQueryParameter_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*GqlQueryParameter) + // parameter_type + switch x := m.ParameterType.(type) { + case *GqlQueryParameter_Value: + b.EncodeVarint(2<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.Value); err != nil { + return err + } + case *GqlQueryParameter_Cursor: + b.EncodeVarint(3<<3 | proto.WireBytes) + b.EncodeRawBytes(x.Cursor) + case nil: + default: + return fmt.Errorf("GqlQueryParameter.ParameterType has unexpected type %T", x) + } + return nil +} + +func _GqlQueryParameter_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*GqlQueryParameter) + switch tag { + case 2: // parameter_type.value + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(Value) + err := b.DecodeMessage(msg) + m.ParameterType = &GqlQueryParameter_Value{msg} + return true, err + case 3: // parameter_type.cursor + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeRawBytes(true) + m.ParameterType = &GqlQueryParameter_Cursor{x} + return true, err + default: + return false, nil + } +} + +func _GqlQueryParameter_OneofSizer(msg proto.Message) (n int) { + m := msg.(*GqlQueryParameter) + // parameter_type + switch x := m.ParameterType.(type) { + case *GqlQueryParameter_Value: + s := proto.Size(x.Value) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *GqlQueryParameter_Cursor: + n += 1 // tag and wire + n += proto.SizeVarint(uint64(len(x.Cursor))) + n += len(x.Cursor) + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +// A batch of results produced by a query. +type QueryResultBatch struct { + // The number of results skipped, typically because of an offset. + SkippedResults int32 `protobuf:"varint,6,opt,name=skipped_results,json=skippedResults,proto3" json:"skipped_results,omitempty"` + // A cursor that points to the position after the last skipped result. + // Will be set when `skipped_results` != 0. + SkippedCursor []byte `protobuf:"bytes,3,opt,name=skipped_cursor,json=skippedCursor,proto3" json:"skipped_cursor,omitempty"` + // The result type for every entity in `entity_results`. + EntityResultType EntityResult_ResultType `protobuf:"varint,1,opt,name=entity_result_type,json=entityResultType,proto3,enum=google.datastore.v1beta3.EntityResult_ResultType" json:"entity_result_type,omitempty"` + // The results for this batch. + EntityResults []*EntityResult `protobuf:"bytes,2,rep,name=entity_results,json=entityResults,proto3" json:"entity_results,omitempty"` + // A cursor that points to the position after the last result in the batch. + EndCursor []byte `protobuf:"bytes,4,opt,name=end_cursor,json=endCursor,proto3" json:"end_cursor,omitempty"` + // The state of the query after the current batch. + MoreResults QueryResultBatch_MoreResultsType `protobuf:"varint,5,opt,name=more_results,json=moreResults,proto3,enum=google.datastore.v1beta3.QueryResultBatch_MoreResultsType" json:"more_results,omitempty"` + // The version number of the snapshot this batch was returned from. + // This applies to the range of results from the query's `start_cursor` (or + // the beginning of the query if no cursor was given) to this batch's + // `end_cursor` (not the query's `end_cursor`). + // + // In a single transaction, subsequent query result batches for the same query + // can have a greater snapshot version number. Each batch's snapshot version + // is valid for all preceding batches. + // The value will be zero for eventually consistent queries. + SnapshotVersion int64 `protobuf:"varint,7,opt,name=snapshot_version,json=snapshotVersion,proto3" json:"snapshot_version,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *QueryResultBatch) Reset() { *m = QueryResultBatch{} } +func (m *QueryResultBatch) String() string { return proto.CompactTextString(m) } +func (*QueryResultBatch) ProtoMessage() {} +func (*QueryResultBatch) Descriptor() ([]byte, []int) { + return fileDescriptor_query_74b0994032fcddf6, []int{11} +} +func (m *QueryResultBatch) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_QueryResultBatch.Unmarshal(m, b) +} +func (m *QueryResultBatch) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_QueryResultBatch.Marshal(b, m, deterministic) +} +func (dst *QueryResultBatch) XXX_Merge(src proto.Message) { + xxx_messageInfo_QueryResultBatch.Merge(dst, src) +} +func (m *QueryResultBatch) XXX_Size() int { + return xxx_messageInfo_QueryResultBatch.Size(m) +} +func (m *QueryResultBatch) XXX_DiscardUnknown() { + xxx_messageInfo_QueryResultBatch.DiscardUnknown(m) +} + +var xxx_messageInfo_QueryResultBatch proto.InternalMessageInfo + +func (m *QueryResultBatch) GetSkippedResults() int32 { + if m != nil { + return m.SkippedResults + } + return 0 +} + +func (m *QueryResultBatch) GetSkippedCursor() []byte { + if m != nil { + return m.SkippedCursor + } + return nil +} + +func (m *QueryResultBatch) GetEntityResultType() EntityResult_ResultType { + if m != nil { + return m.EntityResultType + } + return EntityResult_RESULT_TYPE_UNSPECIFIED +} + +func (m *QueryResultBatch) GetEntityResults() []*EntityResult { + if m != nil { + return m.EntityResults + } + return nil +} + +func (m *QueryResultBatch) GetEndCursor() []byte { + if m != nil { + return m.EndCursor + } + return nil +} + +func (m *QueryResultBatch) GetMoreResults() QueryResultBatch_MoreResultsType { + if m != nil { + return m.MoreResults + } + return QueryResultBatch_MORE_RESULTS_TYPE_UNSPECIFIED +} + +func (m *QueryResultBatch) GetSnapshotVersion() int64 { + if m != nil { + return m.SnapshotVersion + } + return 0 +} + +func init() { + proto.RegisterType((*EntityResult)(nil), "google.datastore.v1beta3.EntityResult") + proto.RegisterType((*Query)(nil), "google.datastore.v1beta3.Query") + proto.RegisterType((*KindExpression)(nil), "google.datastore.v1beta3.KindExpression") + proto.RegisterType((*PropertyReference)(nil), "google.datastore.v1beta3.PropertyReference") + proto.RegisterType((*Projection)(nil), "google.datastore.v1beta3.Projection") + proto.RegisterType((*PropertyOrder)(nil), "google.datastore.v1beta3.PropertyOrder") + proto.RegisterType((*Filter)(nil), "google.datastore.v1beta3.Filter") + proto.RegisterType((*CompositeFilter)(nil), "google.datastore.v1beta3.CompositeFilter") + proto.RegisterType((*PropertyFilter)(nil), "google.datastore.v1beta3.PropertyFilter") + proto.RegisterType((*GqlQuery)(nil), "google.datastore.v1beta3.GqlQuery") + proto.RegisterMapType((map[string]*GqlQueryParameter)(nil), "google.datastore.v1beta3.GqlQuery.NamedBindingsEntry") + proto.RegisterType((*GqlQueryParameter)(nil), "google.datastore.v1beta3.GqlQueryParameter") + proto.RegisterType((*QueryResultBatch)(nil), "google.datastore.v1beta3.QueryResultBatch") + proto.RegisterEnum("google.datastore.v1beta3.EntityResult_ResultType", EntityResult_ResultType_name, EntityResult_ResultType_value) + proto.RegisterEnum("google.datastore.v1beta3.PropertyOrder_Direction", PropertyOrder_Direction_name, PropertyOrder_Direction_value) + proto.RegisterEnum("google.datastore.v1beta3.CompositeFilter_Operator", CompositeFilter_Operator_name, CompositeFilter_Operator_value) + proto.RegisterEnum("google.datastore.v1beta3.PropertyFilter_Operator", PropertyFilter_Operator_name, PropertyFilter_Operator_value) + proto.RegisterEnum("google.datastore.v1beta3.QueryResultBatch_MoreResultsType", QueryResultBatch_MoreResultsType_name, QueryResultBatch_MoreResultsType_value) +} + +func init() { + proto.RegisterFile("google/datastore/v1beta3/query.proto", fileDescriptor_query_74b0994032fcddf6) +} + +var fileDescriptor_query_74b0994032fcddf6 = []byte{ + // 1323 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xac, 0x57, 0xcb, 0x6e, 0xdb, 0x46, + 0x14, 0x35, 0xa9, 0x87, 0xa5, 0xab, 0x17, 0x33, 0x69, 0x53, 0xc6, 0x79, 0xd4, 0x21, 0x92, 0x46, + 0x41, 0x51, 0x09, 0x56, 0x10, 0x34, 0x48, 0xdb, 0x85, 0x1e, 0xb4, 0xad, 0x46, 0x16, 0x95, 0x91, + 0x6c, 0x20, 0x85, 0x0b, 0x82, 0x96, 0xc6, 0x0a, 0x1b, 0x8a, 0x64, 0xc8, 0x71, 0x12, 0x7f, 0x48, + 0x81, 0x7e, 0x43, 0x77, 0xfd, 0x83, 0x2e, 0xba, 0x2a, 0xd0, 0x6d, 0xb7, 0xfd, 0x80, 0x6e, 0xfa, + 0x07, 0x2d, 0x38, 0x33, 0xd4, 0xcb, 0x51, 0xe4, 0x02, 0xd9, 0x69, 0xee, 0x9c, 0x73, 0xee, 0xcc, + 0xe1, 0x9d, 0x99, 0x2b, 0xb8, 0x3b, 0xf6, 0xbc, 0xb1, 0x43, 0xaa, 0x23, 0x8b, 0x5a, 0x21, 0xf5, + 0x02, 0x52, 0x7d, 0xbd, 0x73, 0x42, 0xa8, 0xf5, 0xb0, 0xfa, 0xea, 0x8c, 0x04, 0xe7, 0x15, 0x3f, + 0xf0, 0xa8, 0x87, 0x54, 0x8e, 0xaa, 0x4c, 0x51, 0x15, 0x81, 0xda, 0xba, 0x29, 0xf8, 0x96, 0x6f, + 0x57, 0x2d, 0xd7, 0xf5, 0xa8, 0x45, 0x6d, 0xcf, 0x0d, 0x39, 0x6f, 0xeb, 0xde, 0x4a, 0x75, 0xe2, + 0x52, 0x9b, 0x0a, 0xf9, 0xad, 0xdb, 0x02, 0xc6, 0x46, 0x27, 0x67, 0xa7, 0xd5, 0x37, 0x81, 0xe5, + 0xfb, 0x24, 0x88, 0x65, 0x44, 0xfa, 0x2a, 0x3d, 0xf7, 0x49, 0xd5, 0xb1, 0xa8, 0xe3, 0x8e, 0xf9, + 0x8c, 0xf6, 0x87, 0x04, 0x79, 0x9d, 0x49, 0x61, 0x12, 0x9e, 0x39, 0x14, 0x3d, 0x86, 0x34, 0x97, + 0x56, 0xa5, 0x6d, 0xa9, 0x9c, 0xab, 0x6d, 0x57, 0x56, 0x2d, 0xbd, 0x22, 0x78, 0x02, 0x8f, 0x54, + 0xd8, 0x7c, 0x4d, 0x82, 0xd0, 0xf6, 0x5c, 0x35, 0xb9, 0x2d, 0x95, 0x13, 0x38, 0x1e, 0xa2, 0x6b, + 0x90, 0x1e, 0x9e, 0x05, 0xa1, 0x17, 0xa8, 0x89, 0x6d, 0xa9, 0x9c, 0xc7, 0x62, 0xa4, 0x3d, 0x03, + 0xe0, 0x59, 0x07, 0xe7, 0x3e, 0x41, 0x37, 0xe0, 0x13, 0xac, 0xf7, 0x0f, 0x3b, 0x03, 0x73, 0xf0, + 0xbc, 0xa7, 0x9b, 0x87, 0xdd, 0x7e, 0x4f, 0x6f, 0xb6, 0x77, 0xdb, 0x7a, 0x4b, 0xd9, 0x40, 0x19, + 0x48, 0xee, 0x1e, 0x76, 0x3a, 0x8a, 0x84, 0x8a, 0x00, 0x3d, 0x6c, 0x7c, 0xab, 0x37, 0x07, 0x6d, + 0xa3, 0xab, 0xc8, 0x28, 0x0f, 0x99, 0xa7, 0xfa, 0x73, 0xd3, 0xe8, 0x76, 0x9e, 0x2b, 0x09, 0xed, + 0xaf, 0x04, 0xa4, 0x9e, 0x45, 0xc6, 0xa3, 0x16, 0x80, 0x1f, 0x78, 0x3f, 0x90, 0x61, 0xe4, 0xa7, + 0x2a, 0x6f, 0x27, 0xca, 0xb9, 0xda, 0xdd, 0xd5, 0x9b, 0xe9, 0x4d, 0xb1, 0x78, 0x8e, 0x87, 0xbe, + 0x86, 0xe4, 0x4b, 0xdb, 0x1d, 0xa9, 0x09, 0xc6, 0x2f, 0xaf, 0xe6, 0x3f, 0xb5, 0xdd, 0x91, 0xfe, + 0xd6, 0x0f, 0x48, 0x18, 0x6d, 0x19, 0x33, 0x56, 0x64, 0xe6, 0xa9, 0xed, 0x50, 0x12, 0x30, 0x47, + 0xde, 0x6b, 0xe6, 0x2e, 0xc3, 0x61, 0x81, 0x47, 0xdf, 0x40, 0xca, 0x0b, 0x46, 0x24, 0x50, 0x53, + 0x2c, 0xf1, 0xfd, 0xf7, 0x2e, 0xdc, 0x27, 0x01, 0x3d, 0x37, 0x22, 0x38, 0xe6, 0x2c, 0xd4, 0x81, + 0xdc, 0xc8, 0x0e, 0xa9, 0xed, 0x0e, 0xa9, 0xe9, 0xb9, 0x6a, 0x9a, 0x89, 0x7c, 0xbe, 0x5e, 0x04, + 0x93, 0x53, 0x12, 0x10, 0x77, 0x48, 0x30, 0xc4, 0x7c, 0xc3, 0x45, 0x77, 0x20, 0x1f, 0x52, 0x2b, + 0xa0, 0xa6, 0xf8, 0x8a, 0x9b, 0xec, 0x2b, 0xe6, 0x58, 0xac, 0xc9, 0x42, 0xe8, 0x16, 0x00, 0x71, + 0x47, 0x31, 0x20, 0xc3, 0x00, 0x59, 0xe2, 0x8e, 0xc4, 0xf4, 0x35, 0x48, 0x7b, 0xa7, 0xa7, 0x21, + 0xa1, 0x2a, 0x6c, 0x4b, 0xe5, 0x14, 0x16, 0x23, 0xb4, 0x03, 0x29, 0xc7, 0x9e, 0xd8, 0x54, 0xcd, + 0x33, 0x7f, 0x6e, 0xc4, 0x2b, 0x8c, 0x0b, 0xb9, 0xd2, 0x76, 0xe9, 0xc3, 0xda, 0x91, 0xe5, 0x9c, + 0x11, 0xcc, 0x91, 0xda, 0x5d, 0x28, 0x2e, 0x7a, 0x8d, 0x10, 0x24, 0x5d, 0x6b, 0x42, 0x58, 0xc1, + 0x66, 0x31, 0xfb, 0xad, 0xdd, 0x87, 0x2b, 0x17, 0xf6, 0x34, 0x05, 0xca, 0x73, 0xc0, 0x43, 0x80, + 0xd9, 0xa7, 0x47, 0x7b, 0x90, 0xf1, 0x05, 0x4d, 0xd4, 0xff, 0xff, 0x32, 0x6d, 0x4a, 0xd6, 0xfe, + 0x91, 0xa0, 0xb0, 0xf0, 0x65, 0x3e, 0x98, 0x34, 0x32, 0x20, 0x3b, 0xb2, 0x83, 0x69, 0x5d, 0x4b, + 0xe5, 0x62, 0x6d, 0xe7, 0x92, 0xe5, 0x51, 0x69, 0xc5, 0x44, 0x3c, 0xd3, 0xd0, 0x74, 0xc8, 0x4e, + 0xe3, 0xe8, 0x3a, 0x7c, 0xdc, 0x6a, 0x63, 0x7e, 0xba, 0x96, 0xce, 0x60, 0x01, 0xb2, 0xf5, 0x7e, + 0x53, 0xef, 0xb6, 0xda, 0xdd, 0x3d, 0x7e, 0x10, 0x5b, 0xfa, 0x74, 0x2c, 0x6b, 0xbf, 0x49, 0x90, + 0xe6, 0x55, 0x8c, 0x8e, 0x40, 0x19, 0x7a, 0x13, 0xdf, 0x0b, 0x6d, 0x4a, 0x4c, 0x71, 0x02, 0xf8, + 0x9e, 0x1f, 0xac, 0x5e, 0x69, 0x33, 0x66, 0x70, 0x91, 0xfd, 0x0d, 0x5c, 0x1a, 0x2e, 0x86, 0x50, + 0x1f, 0x4a, 0xb1, 0x0d, 0xb1, 0xac, 0xcc, 0x64, 0xcb, 0xeb, 0x0d, 0x98, 0xaa, 0x16, 0xfd, 0x85, + 0x48, 0xa3, 0x00, 0x39, 0xae, 0x65, 0x46, 0xd7, 0xa3, 0xf6, 0xab, 0x04, 0xa5, 0xa5, 0xa5, 0xa0, + 0x06, 0xc8, 0x9e, 0xcf, 0x76, 0x50, 0xac, 0xd5, 0x2e, 0xbd, 0x83, 0x8a, 0xe1, 0x93, 0xc0, 0xa2, + 0x5e, 0x80, 0x65, 0xcf, 0x47, 0x4f, 0x60, 0x93, 0xa7, 0x09, 0xc5, 0x65, 0xb4, 0xfe, 0x32, 0x88, + 0x09, 0xda, 0x17, 0x90, 0x89, 0xb5, 0x90, 0x0a, 0x1f, 0x19, 0x3d, 0x1d, 0xd7, 0x07, 0x06, 0x5e, + 0xfa, 0x3e, 0x9b, 0x90, 0xa8, 0x77, 0x5b, 0x8a, 0xa4, 0xfd, 0x2d, 0x43, 0x71, 0x71, 0xdb, 0x1f, + 0xae, 0xfa, 0xea, 0xcc, 0x8a, 0x4b, 0x97, 0xdd, 0xbb, 0x9c, 0x78, 0x04, 0xa9, 0xd7, 0xd1, 0x89, + 0x66, 0xaf, 0x41, 0xae, 0xf6, 0xe9, 0x6a, 0x15, 0x71, 0xf0, 0x19, 0x5a, 0xfb, 0x51, 0xba, 0x94, + 0x0b, 0x05, 0xc8, 0x76, 0xf4, 0x7e, 0xdf, 0x1c, 0xec, 0xd7, 0xbb, 0x8a, 0x84, 0xae, 0x01, 0x9a, + 0x0e, 0x4d, 0x03, 0x9b, 0xfa, 0xb3, 0xc3, 0x7a, 0x47, 0x91, 0x91, 0x02, 0xf9, 0x3d, 0xac, 0xd7, + 0x07, 0x3a, 0xe6, 0xc8, 0x44, 0x54, 0xf9, 0xf3, 0x91, 0x19, 0x38, 0x89, 0xb2, 0x90, 0xe2, 0x3f, + 0x53, 0x11, 0x6f, 0xbf, 0xde, 0x37, 0xeb, 0xdd, 0xa6, 0xde, 0x1f, 0x18, 0x58, 0xc9, 0x69, 0xff, + 0xca, 0x90, 0xd9, 0x7b, 0xe5, 0xf0, 0x57, 0xe7, 0x0e, 0xe4, 0xd9, 0xbb, 0x6f, 0x86, 0x34, 0xb0, + 0xdd, 0xb1, 0xb8, 0x93, 0x72, 0x2c, 0xd6, 0x67, 0x21, 0x74, 0x0f, 0x8a, 0x96, 0xe3, 0x78, 0x6f, + 0x4c, 0xc7, 0xa6, 0x24, 0xb0, 0x9c, 0x90, 0xb9, 0x99, 0xc1, 0x05, 0x16, 0xed, 0x88, 0x20, 0x3a, + 0x86, 0x62, 0x74, 0x41, 0x8d, 0xcc, 0x13, 0xdb, 0x1d, 0xd9, 0xee, 0x38, 0x14, 0x4f, 0xc1, 0xa3, + 0xd5, 0x76, 0xc5, 0xab, 0xa8, 0x74, 0x23, 0x62, 0x43, 0xf0, 0x74, 0x97, 0x06, 0xe7, 0xb8, 0xe0, + 0xce, 0xc7, 0xd0, 0x31, 0x5c, 0x65, 0xa5, 0x6a, 0x7b, 0xae, 0xe5, 0xcc, 0x52, 0x24, 0xd7, 0x3d, + 0x14, 0x71, 0x8a, 0x9e, 0x15, 0x58, 0x13, 0x12, 0x15, 0x29, 0x9a, 0xe9, 0xc4, 0xea, 0x5b, 0x13, + 0x40, 0x17, 0x97, 0x80, 0x14, 0x48, 0xbc, 0x24, 0xe7, 0xc2, 0x92, 0xe8, 0x27, 0xaa, 0xc7, 0x95, + 0x20, 0xaf, 0x2b, 0xc9, 0x8b, 0x79, 0x39, 0xf3, 0x89, 0xfc, 0x58, 0xd2, 0xde, 0xc2, 0x95, 0x0b, + 0xf3, 0xe8, 0xcb, 0x45, 0xed, 0x75, 0x55, 0xb6, 0xbf, 0x21, 0x14, 0x91, 0xba, 0xd8, 0xad, 0xec, + 0x6f, 0xc4, 0xfd, 0x4a, 0x43, 0x81, 0xa2, 0x1f, 0xeb, 0xf3, 0xcb, 0xe2, 0xcf, 0x24, 0x28, 0x2c, + 0x2f, 0xef, 0x63, 0x1a, 0x16, 0x1d, 0xbe, 0x40, 0xf7, 0xa1, 0x14, 0xbe, 0xb4, 0x7d, 0x9f, 0x8c, + 0xcc, 0x80, 0x85, 0x43, 0x35, 0xcd, 0x5e, 0xbd, 0xa2, 0x08, 0x73, 0x70, 0x18, 0x55, 0x42, 0x0c, + 0x5c, 0xe8, 0x8f, 0x0a, 0x22, 0x2a, 0x1e, 0x4f, 0x13, 0x10, 0x6f, 0xb1, 0x84, 0x1c, 0x4b, 0x2d, + 0x6e, 0xa3, 0x9d, 0xb5, 0xed, 0x19, 0xa3, 0x54, 0x66, 0x7d, 0x16, 0x56, 0xc8, 0xdc, 0x04, 0xeb, + 0xbc, 0x0e, 0xa0, 0xb8, 0x90, 0x20, 0xbe, 0xa1, 0x3e, 0xbb, 0x9c, 0x38, 0x2e, 0xcc, 0x2b, 0x86, + 0x4b, 0xbd, 0x40, 0x72, 0xb9, 0x17, 0xf8, 0x1e, 0xf2, 0x13, 0x2f, 0x20, 0xd3, 0x5c, 0x29, 0xb6, + 0x91, 0x27, 0xab, 0x73, 0x2d, 0x1b, 0x5c, 0x39, 0xf0, 0x02, 0x22, 0x92, 0xb1, 0x1d, 0xe5, 0x26, + 0xb3, 0x00, 0x7a, 0x00, 0x4a, 0xe8, 0x5a, 0x7e, 0xf8, 0xc2, 0xa3, 0x66, 0xdc, 0x8f, 0x6e, 0xb2, + 0x7e, 0xb4, 0x14, 0xc7, 0x8f, 0x78, 0x58, 0xfb, 0x49, 0x82, 0xd2, 0x92, 0x16, 0xba, 0x03, 0xb7, + 0x0e, 0x0c, 0xac, 0x9b, 0xbc, 0x15, 0xed, 0xbf, 0xab, 0x17, 0x55, 0x20, 0xdf, 0x35, 0x06, 0xe6, + 0x6e, 0xbb, 0xdb, 0xee, 0xef, 0xeb, 0x2d, 0x45, 0x42, 0x37, 0x41, 0x5d, 0x20, 0xd5, 0x77, 0xa3, + 0x5b, 0xa4, 0xd3, 0x3e, 0x68, 0x0f, 0x14, 0x19, 0xdd, 0x82, 0xeb, 0xef, 0x98, 0x6d, 0x1e, 0xe2, + 0xbe, 0x81, 0x95, 0x24, 0xba, 0x0a, 0xa5, 0xae, 0x61, 0xce, 0x23, 0x94, 0x44, 0xe3, 0x17, 0x09, + 0x6e, 0x0e, 0xbd, 0xc9, 0x4a, 0x53, 0x1a, 0xc0, 0xcb, 0x3d, 0xea, 0x93, 0x7a, 0xd2, 0x77, 0x75, + 0x81, 0x1b, 0x7b, 0x8e, 0xe5, 0x8e, 0x2b, 0x5e, 0x30, 0xae, 0x8e, 0x89, 0xcb, 0xba, 0xa8, 0x2a, + 0x9f, 0xb2, 0x7c, 0x3b, 0xbc, 0xf8, 0x37, 0xe2, 0xab, 0x69, 0xe4, 0x67, 0xf9, 0xf6, 0x1e, 0xd7, + 0x68, 0x3a, 0xde, 0xd9, 0xa8, 0xd2, 0x9a, 0x66, 0x3c, 0xda, 0x69, 0x44, 0xd0, 0xdf, 0x63, 0xc0, + 0x31, 0x03, 0x1c, 0x4f, 0x01, 0xc7, 0x47, 0x5c, 0xeb, 0x24, 0xcd, 0xf2, 0x3d, 0xfc, 0x2f, 0x00, + 0x00, 0xff, 0xff, 0x61, 0xdf, 0x90, 0xd9, 0x13, 0x0d, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/devtools/build/v1/build_events.pb.go b/vendor/google.golang.org/genproto/googleapis/devtools/build/v1/build_events.pb.go new file mode 100644 index 0000000..01a4cbe --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/devtools/build/v1/build_events.pb.go @@ -0,0 +1,1013 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/devtools/build/v1/build_events.proto + +package build // import "google.golang.org/genproto/googleapis/devtools/build/v1" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import any "github.com/golang/protobuf/ptypes/any" +import timestamp "github.com/golang/protobuf/ptypes/timestamp" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// The type of console output stream. +type ConsoleOutputStream int32 + +const ( + // Unspecified or unknown. + ConsoleOutputStream_UNKNOWN ConsoleOutputStream = 0 + // Normal output stream. + ConsoleOutputStream_STDOUT ConsoleOutputStream = 1 + // Error output stream. + ConsoleOutputStream_STDERR ConsoleOutputStream = 2 +) + +var ConsoleOutputStream_name = map[int32]string{ + 0: "UNKNOWN", + 1: "STDOUT", + 2: "STDERR", +} +var ConsoleOutputStream_value = map[string]int32{ + "UNKNOWN": 0, + "STDOUT": 1, + "STDERR": 2, +} + +func (x ConsoleOutputStream) String() string { + return proto.EnumName(ConsoleOutputStream_name, int32(x)) +} +func (ConsoleOutputStream) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_build_events_fb145f4342778ee6, []int{0} +} + +// How did the event stream finish. +type BuildEvent_BuildComponentStreamFinished_FinishType int32 + +const ( + // Unknown or unspecified; callers should never set this value. + BuildEvent_BuildComponentStreamFinished_FINISH_TYPE_UNSPECIFIED BuildEvent_BuildComponentStreamFinished_FinishType = 0 + // Set by the event publisher to indicate a build event stream is + // finished. + BuildEvent_BuildComponentStreamFinished_FINISHED BuildEvent_BuildComponentStreamFinished_FinishType = 1 + // Set by the WatchBuild RPC server when the publisher of a build event + // stream stops publishing events without publishing a + // BuildComponentStreamFinished event whose type equals FINISHED. + BuildEvent_BuildComponentStreamFinished_EXPIRED BuildEvent_BuildComponentStreamFinished_FinishType = 2 +) + +var BuildEvent_BuildComponentStreamFinished_FinishType_name = map[int32]string{ + 0: "FINISH_TYPE_UNSPECIFIED", + 1: "FINISHED", + 2: "EXPIRED", +} +var BuildEvent_BuildComponentStreamFinished_FinishType_value = map[string]int32{ + "FINISH_TYPE_UNSPECIFIED": 0, + "FINISHED": 1, + "EXPIRED": 2, +} + +func (x BuildEvent_BuildComponentStreamFinished_FinishType) String() string { + return proto.EnumName(BuildEvent_BuildComponentStreamFinished_FinishType_name, int32(x)) +} +func (BuildEvent_BuildComponentStreamFinished_FinishType) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_build_events_fb145f4342778ee6, []int{0, 5, 0} +} + +// Which build component generates this event stream. Each build component +// may generate one event stream. +type StreamId_BuildComponent int32 + +const ( + // Unknown or unspecified; callers should never set this value. + StreamId_UNKNOWN_COMPONENT StreamId_BuildComponent = 0 + // A component that coordinates builds. + StreamId_CONTROLLER StreamId_BuildComponent = 1 + // A component that runs executables needed to complete a build. + StreamId_WORKER StreamId_BuildComponent = 2 + // A component that builds something. + StreamId_TOOL StreamId_BuildComponent = 3 +) + +var StreamId_BuildComponent_name = map[int32]string{ + 0: "UNKNOWN_COMPONENT", + 1: "CONTROLLER", + 2: "WORKER", + 3: "TOOL", +} +var StreamId_BuildComponent_value = map[string]int32{ + "UNKNOWN_COMPONENT": 0, + "CONTROLLER": 1, + "WORKER": 2, + "TOOL": 3, +} + +func (x StreamId_BuildComponent) String() string { + return proto.EnumName(StreamId_BuildComponent_name, int32(x)) +} +func (StreamId_BuildComponent) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_build_events_fb145f4342778ee6, []int{1, 0} +} + +// An event representing some state change that occurred in the build. This +// message does not include field for uniquely identifying an event. +type BuildEvent struct { + // The timestamp of this event. + EventTime *timestamp.Timestamp `protobuf:"bytes,1,opt,name=event_time,json=eventTime,proto3" json:"event_time,omitempty"` + // ////////////////////////////////////////////////////////////////////////// + // Events that indicate a state change of a build request in the build + // queue. + // + // Types that are valid to be assigned to Event: + // *BuildEvent_InvocationAttemptStarted_ + // *BuildEvent_InvocationAttemptFinished_ + // *BuildEvent_BuildEnqueued_ + // *BuildEvent_BuildFinished_ + // *BuildEvent_ConsoleOutput_ + // *BuildEvent_ComponentStreamFinished + // *BuildEvent_BazelEvent + // *BuildEvent_BuildExecutionEvent + // *BuildEvent_SourceFetchEvent + Event isBuildEvent_Event `protobuf_oneof:"event"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *BuildEvent) Reset() { *m = BuildEvent{} } +func (m *BuildEvent) String() string { return proto.CompactTextString(m) } +func (*BuildEvent) ProtoMessage() {} +func (*BuildEvent) Descriptor() ([]byte, []int) { + return fileDescriptor_build_events_fb145f4342778ee6, []int{0} +} +func (m *BuildEvent) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_BuildEvent.Unmarshal(m, b) +} +func (m *BuildEvent) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_BuildEvent.Marshal(b, m, deterministic) +} +func (dst *BuildEvent) XXX_Merge(src proto.Message) { + xxx_messageInfo_BuildEvent.Merge(dst, src) +} +func (m *BuildEvent) XXX_Size() int { + return xxx_messageInfo_BuildEvent.Size(m) +} +func (m *BuildEvent) XXX_DiscardUnknown() { + xxx_messageInfo_BuildEvent.DiscardUnknown(m) +} + +var xxx_messageInfo_BuildEvent proto.InternalMessageInfo + +func (m *BuildEvent) GetEventTime() *timestamp.Timestamp { + if m != nil { + return m.EventTime + } + return nil +} + +type isBuildEvent_Event interface { + isBuildEvent_Event() +} + +type BuildEvent_InvocationAttemptStarted_ struct { + InvocationAttemptStarted *BuildEvent_InvocationAttemptStarted `protobuf:"bytes,51,opt,name=invocation_attempt_started,json=invocationAttemptStarted,proto3,oneof"` +} + +type BuildEvent_InvocationAttemptFinished_ struct { + InvocationAttemptFinished *BuildEvent_InvocationAttemptFinished `protobuf:"bytes,52,opt,name=invocation_attempt_finished,json=invocationAttemptFinished,proto3,oneof"` +} + +type BuildEvent_BuildEnqueued_ struct { + BuildEnqueued *BuildEvent_BuildEnqueued `protobuf:"bytes,53,opt,name=build_enqueued,json=buildEnqueued,proto3,oneof"` +} + +type BuildEvent_BuildFinished_ struct { + BuildFinished *BuildEvent_BuildFinished `protobuf:"bytes,55,opt,name=build_finished,json=buildFinished,proto3,oneof"` +} + +type BuildEvent_ConsoleOutput_ struct { + ConsoleOutput *BuildEvent_ConsoleOutput `protobuf:"bytes,56,opt,name=console_output,json=consoleOutput,proto3,oneof"` +} + +type BuildEvent_ComponentStreamFinished struct { + ComponentStreamFinished *BuildEvent_BuildComponentStreamFinished `protobuf:"bytes,59,opt,name=component_stream_finished,json=componentStreamFinished,proto3,oneof"` +} + +type BuildEvent_BazelEvent struct { + BazelEvent *any.Any `protobuf:"bytes,60,opt,name=bazel_event,json=bazelEvent,proto3,oneof"` +} + +type BuildEvent_BuildExecutionEvent struct { + BuildExecutionEvent *any.Any `protobuf:"bytes,61,opt,name=build_execution_event,json=buildExecutionEvent,proto3,oneof"` +} + +type BuildEvent_SourceFetchEvent struct { + SourceFetchEvent *any.Any `protobuf:"bytes,62,opt,name=source_fetch_event,json=sourceFetchEvent,proto3,oneof"` +} + +func (*BuildEvent_InvocationAttemptStarted_) isBuildEvent_Event() {} + +func (*BuildEvent_InvocationAttemptFinished_) isBuildEvent_Event() {} + +func (*BuildEvent_BuildEnqueued_) isBuildEvent_Event() {} + +func (*BuildEvent_BuildFinished_) isBuildEvent_Event() {} + +func (*BuildEvent_ConsoleOutput_) isBuildEvent_Event() {} + +func (*BuildEvent_ComponentStreamFinished) isBuildEvent_Event() {} + +func (*BuildEvent_BazelEvent) isBuildEvent_Event() {} + +func (*BuildEvent_BuildExecutionEvent) isBuildEvent_Event() {} + +func (*BuildEvent_SourceFetchEvent) isBuildEvent_Event() {} + +func (m *BuildEvent) GetEvent() isBuildEvent_Event { + if m != nil { + return m.Event + } + return nil +} + +func (m *BuildEvent) GetInvocationAttemptStarted() *BuildEvent_InvocationAttemptStarted { + if x, ok := m.GetEvent().(*BuildEvent_InvocationAttemptStarted_); ok { + return x.InvocationAttemptStarted + } + return nil +} + +func (m *BuildEvent) GetInvocationAttemptFinished() *BuildEvent_InvocationAttemptFinished { + if x, ok := m.GetEvent().(*BuildEvent_InvocationAttemptFinished_); ok { + return x.InvocationAttemptFinished + } + return nil +} + +func (m *BuildEvent) GetBuildEnqueued() *BuildEvent_BuildEnqueued { + if x, ok := m.GetEvent().(*BuildEvent_BuildEnqueued_); ok { + return x.BuildEnqueued + } + return nil +} + +func (m *BuildEvent) GetBuildFinished() *BuildEvent_BuildFinished { + if x, ok := m.GetEvent().(*BuildEvent_BuildFinished_); ok { + return x.BuildFinished + } + return nil +} + +func (m *BuildEvent) GetConsoleOutput() *BuildEvent_ConsoleOutput { + if x, ok := m.GetEvent().(*BuildEvent_ConsoleOutput_); ok { + return x.ConsoleOutput + } + return nil +} + +func (m *BuildEvent) GetComponentStreamFinished() *BuildEvent_BuildComponentStreamFinished { + if x, ok := m.GetEvent().(*BuildEvent_ComponentStreamFinished); ok { + return x.ComponentStreamFinished + } + return nil +} + +func (m *BuildEvent) GetBazelEvent() *any.Any { + if x, ok := m.GetEvent().(*BuildEvent_BazelEvent); ok { + return x.BazelEvent + } + return nil +} + +func (m *BuildEvent) GetBuildExecutionEvent() *any.Any { + if x, ok := m.GetEvent().(*BuildEvent_BuildExecutionEvent); ok { + return x.BuildExecutionEvent + } + return nil +} + +func (m *BuildEvent) GetSourceFetchEvent() *any.Any { + if x, ok := m.GetEvent().(*BuildEvent_SourceFetchEvent); ok { + return x.SourceFetchEvent + } + return nil +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*BuildEvent) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _BuildEvent_OneofMarshaler, _BuildEvent_OneofUnmarshaler, _BuildEvent_OneofSizer, []interface{}{ + (*BuildEvent_InvocationAttemptStarted_)(nil), + (*BuildEvent_InvocationAttemptFinished_)(nil), + (*BuildEvent_BuildEnqueued_)(nil), + (*BuildEvent_BuildFinished_)(nil), + (*BuildEvent_ConsoleOutput_)(nil), + (*BuildEvent_ComponentStreamFinished)(nil), + (*BuildEvent_BazelEvent)(nil), + (*BuildEvent_BuildExecutionEvent)(nil), + (*BuildEvent_SourceFetchEvent)(nil), + } +} + +func _BuildEvent_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*BuildEvent) + // event + switch x := m.Event.(type) { + case *BuildEvent_InvocationAttemptStarted_: + b.EncodeVarint(51<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.InvocationAttemptStarted); err != nil { + return err + } + case *BuildEvent_InvocationAttemptFinished_: + b.EncodeVarint(52<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.InvocationAttemptFinished); err != nil { + return err + } + case *BuildEvent_BuildEnqueued_: + b.EncodeVarint(53<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.BuildEnqueued); err != nil { + return err + } + case *BuildEvent_BuildFinished_: + b.EncodeVarint(55<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.BuildFinished); err != nil { + return err + } + case *BuildEvent_ConsoleOutput_: + b.EncodeVarint(56<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.ConsoleOutput); err != nil { + return err + } + case *BuildEvent_ComponentStreamFinished: + b.EncodeVarint(59<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.ComponentStreamFinished); err != nil { + return err + } + case *BuildEvent_BazelEvent: + b.EncodeVarint(60<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.BazelEvent); err != nil { + return err + } + case *BuildEvent_BuildExecutionEvent: + b.EncodeVarint(61<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.BuildExecutionEvent); err != nil { + return err + } + case *BuildEvent_SourceFetchEvent: + b.EncodeVarint(62<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.SourceFetchEvent); err != nil { + return err + } + case nil: + default: + return fmt.Errorf("BuildEvent.Event has unexpected type %T", x) + } + return nil +} + +func _BuildEvent_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*BuildEvent) + switch tag { + case 51: // event.invocation_attempt_started + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(BuildEvent_InvocationAttemptStarted) + err := b.DecodeMessage(msg) + m.Event = &BuildEvent_InvocationAttemptStarted_{msg} + return true, err + case 52: // event.invocation_attempt_finished + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(BuildEvent_InvocationAttemptFinished) + err := b.DecodeMessage(msg) + m.Event = &BuildEvent_InvocationAttemptFinished_{msg} + return true, err + case 53: // event.build_enqueued + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(BuildEvent_BuildEnqueued) + err := b.DecodeMessage(msg) + m.Event = &BuildEvent_BuildEnqueued_{msg} + return true, err + case 55: // event.build_finished + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(BuildEvent_BuildFinished) + err := b.DecodeMessage(msg) + m.Event = &BuildEvent_BuildFinished_{msg} + return true, err + case 56: // event.console_output + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(BuildEvent_ConsoleOutput) + err := b.DecodeMessage(msg) + m.Event = &BuildEvent_ConsoleOutput_{msg} + return true, err + case 59: // event.component_stream_finished + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(BuildEvent_BuildComponentStreamFinished) + err := b.DecodeMessage(msg) + m.Event = &BuildEvent_ComponentStreamFinished{msg} + return true, err + case 60: // event.bazel_event + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(any.Any) + err := b.DecodeMessage(msg) + m.Event = &BuildEvent_BazelEvent{msg} + return true, err + case 61: // event.build_execution_event + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(any.Any) + err := b.DecodeMessage(msg) + m.Event = &BuildEvent_BuildExecutionEvent{msg} + return true, err + case 62: // event.source_fetch_event + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(any.Any) + err := b.DecodeMessage(msg) + m.Event = &BuildEvent_SourceFetchEvent{msg} + return true, err + default: + return false, nil + } +} + +func _BuildEvent_OneofSizer(msg proto.Message) (n int) { + m := msg.(*BuildEvent) + // event + switch x := m.Event.(type) { + case *BuildEvent_InvocationAttemptStarted_: + s := proto.Size(x.InvocationAttemptStarted) + n += 2 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *BuildEvent_InvocationAttemptFinished_: + s := proto.Size(x.InvocationAttemptFinished) + n += 2 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *BuildEvent_BuildEnqueued_: + s := proto.Size(x.BuildEnqueued) + n += 2 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *BuildEvent_BuildFinished_: + s := proto.Size(x.BuildFinished) + n += 2 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *BuildEvent_ConsoleOutput_: + s := proto.Size(x.ConsoleOutput) + n += 2 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *BuildEvent_ComponentStreamFinished: + s := proto.Size(x.ComponentStreamFinished) + n += 2 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *BuildEvent_BazelEvent: + s := proto.Size(x.BazelEvent) + n += 2 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *BuildEvent_BuildExecutionEvent: + s := proto.Size(x.BuildExecutionEvent) + n += 2 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *BuildEvent_SourceFetchEvent: + s := proto.Size(x.SourceFetchEvent) + n += 2 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +// Notification that the build system has attempted to run the build tool. +type BuildEvent_InvocationAttemptStarted struct { + // The number of the invocation attempt, starting at 1 and increasing by 1 + // for each new attempt. Can be used to determine if there is a later + // invocation attempt replacing the current one a client is processing. + AttemptNumber int64 `protobuf:"varint,1,opt,name=attempt_number,json=attemptNumber,proto3" json:"attempt_number,omitempty"` + // Additional details about the invocation. + Details *any.Any `protobuf:"bytes,2,opt,name=details,proto3" json:"details,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *BuildEvent_InvocationAttemptStarted) Reset() { *m = BuildEvent_InvocationAttemptStarted{} } +func (m *BuildEvent_InvocationAttemptStarted) String() string { return proto.CompactTextString(m) } +func (*BuildEvent_InvocationAttemptStarted) ProtoMessage() {} +func (*BuildEvent_InvocationAttemptStarted) Descriptor() ([]byte, []int) { + return fileDescriptor_build_events_fb145f4342778ee6, []int{0, 0} +} +func (m *BuildEvent_InvocationAttemptStarted) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_BuildEvent_InvocationAttemptStarted.Unmarshal(m, b) +} +func (m *BuildEvent_InvocationAttemptStarted) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_BuildEvent_InvocationAttemptStarted.Marshal(b, m, deterministic) +} +func (dst *BuildEvent_InvocationAttemptStarted) XXX_Merge(src proto.Message) { + xxx_messageInfo_BuildEvent_InvocationAttemptStarted.Merge(dst, src) +} +func (m *BuildEvent_InvocationAttemptStarted) XXX_Size() int { + return xxx_messageInfo_BuildEvent_InvocationAttemptStarted.Size(m) +} +func (m *BuildEvent_InvocationAttemptStarted) XXX_DiscardUnknown() { + xxx_messageInfo_BuildEvent_InvocationAttemptStarted.DiscardUnknown(m) +} + +var xxx_messageInfo_BuildEvent_InvocationAttemptStarted proto.InternalMessageInfo + +func (m *BuildEvent_InvocationAttemptStarted) GetAttemptNumber() int64 { + if m != nil { + return m.AttemptNumber + } + return 0 +} + +func (m *BuildEvent_InvocationAttemptStarted) GetDetails() *any.Any { + if m != nil { + return m.Details + } + return nil +} + +// Notification that an invocation attempt has finished. +type BuildEvent_InvocationAttemptFinished struct { + // Final status of the invocation. + InvocationStatus *BuildStatus `protobuf:"bytes,3,opt,name=invocation_status,json=invocationStatus,proto3" json:"invocation_status,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *BuildEvent_InvocationAttemptFinished) Reset() { *m = BuildEvent_InvocationAttemptFinished{} } +func (m *BuildEvent_InvocationAttemptFinished) String() string { return proto.CompactTextString(m) } +func (*BuildEvent_InvocationAttemptFinished) ProtoMessage() {} +func (*BuildEvent_InvocationAttemptFinished) Descriptor() ([]byte, []int) { + return fileDescriptor_build_events_fb145f4342778ee6, []int{0, 1} +} +func (m *BuildEvent_InvocationAttemptFinished) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_BuildEvent_InvocationAttemptFinished.Unmarshal(m, b) +} +func (m *BuildEvent_InvocationAttemptFinished) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_BuildEvent_InvocationAttemptFinished.Marshal(b, m, deterministic) +} +func (dst *BuildEvent_InvocationAttemptFinished) XXX_Merge(src proto.Message) { + xxx_messageInfo_BuildEvent_InvocationAttemptFinished.Merge(dst, src) +} +func (m *BuildEvent_InvocationAttemptFinished) XXX_Size() int { + return xxx_messageInfo_BuildEvent_InvocationAttemptFinished.Size(m) +} +func (m *BuildEvent_InvocationAttemptFinished) XXX_DiscardUnknown() { + xxx_messageInfo_BuildEvent_InvocationAttemptFinished.DiscardUnknown(m) +} + +var xxx_messageInfo_BuildEvent_InvocationAttemptFinished proto.InternalMessageInfo + +func (m *BuildEvent_InvocationAttemptFinished) GetInvocationStatus() *BuildStatus { + if m != nil { + return m.InvocationStatus + } + return nil +} + +// Notification that the build request is enqueued. +type BuildEvent_BuildEnqueued struct { + // Additional details about the Build. + Details *any.Any `protobuf:"bytes,1,opt,name=details,proto3" json:"details,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *BuildEvent_BuildEnqueued) Reset() { *m = BuildEvent_BuildEnqueued{} } +func (m *BuildEvent_BuildEnqueued) String() string { return proto.CompactTextString(m) } +func (*BuildEvent_BuildEnqueued) ProtoMessage() {} +func (*BuildEvent_BuildEnqueued) Descriptor() ([]byte, []int) { + return fileDescriptor_build_events_fb145f4342778ee6, []int{0, 2} +} +func (m *BuildEvent_BuildEnqueued) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_BuildEvent_BuildEnqueued.Unmarshal(m, b) +} +func (m *BuildEvent_BuildEnqueued) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_BuildEvent_BuildEnqueued.Marshal(b, m, deterministic) +} +func (dst *BuildEvent_BuildEnqueued) XXX_Merge(src proto.Message) { + xxx_messageInfo_BuildEvent_BuildEnqueued.Merge(dst, src) +} +func (m *BuildEvent_BuildEnqueued) XXX_Size() int { + return xxx_messageInfo_BuildEvent_BuildEnqueued.Size(m) +} +func (m *BuildEvent_BuildEnqueued) XXX_DiscardUnknown() { + xxx_messageInfo_BuildEvent_BuildEnqueued.DiscardUnknown(m) +} + +var xxx_messageInfo_BuildEvent_BuildEnqueued proto.InternalMessageInfo + +func (m *BuildEvent_BuildEnqueued) GetDetails() *any.Any { + if m != nil { + return m.Details + } + return nil +} + +// Notification that the build request has finished, and no further +// invocations will occur. Note that this applies to the entire Build. +// Individual invocations trigger InvocationFinished when they finish. +type BuildEvent_BuildFinished struct { + // Final status of the build. + Status *BuildStatus `protobuf:"bytes,1,opt,name=status,proto3" json:"status,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *BuildEvent_BuildFinished) Reset() { *m = BuildEvent_BuildFinished{} } +func (m *BuildEvent_BuildFinished) String() string { return proto.CompactTextString(m) } +func (*BuildEvent_BuildFinished) ProtoMessage() {} +func (*BuildEvent_BuildFinished) Descriptor() ([]byte, []int) { + return fileDescriptor_build_events_fb145f4342778ee6, []int{0, 3} +} +func (m *BuildEvent_BuildFinished) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_BuildEvent_BuildFinished.Unmarshal(m, b) +} +func (m *BuildEvent_BuildFinished) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_BuildEvent_BuildFinished.Marshal(b, m, deterministic) +} +func (dst *BuildEvent_BuildFinished) XXX_Merge(src proto.Message) { + xxx_messageInfo_BuildEvent_BuildFinished.Merge(dst, src) +} +func (m *BuildEvent_BuildFinished) XXX_Size() int { + return xxx_messageInfo_BuildEvent_BuildFinished.Size(m) +} +func (m *BuildEvent_BuildFinished) XXX_DiscardUnknown() { + xxx_messageInfo_BuildEvent_BuildFinished.DiscardUnknown(m) +} + +var xxx_messageInfo_BuildEvent_BuildFinished proto.InternalMessageInfo + +func (m *BuildEvent_BuildFinished) GetStatus() *BuildStatus { + if m != nil { + return m.Status + } + return nil +} + +// Textual output written to standard output or standard error. +type BuildEvent_ConsoleOutput struct { + // The output stream type. + Type ConsoleOutputStream `protobuf:"varint,1,opt,name=type,proto3,enum=google.devtools.build.v1.ConsoleOutputStream" json:"type,omitempty"` + // The output stream content. + // + // Types that are valid to be assigned to Output: + // *BuildEvent_ConsoleOutput_TextOutput + // *BuildEvent_ConsoleOutput_BinaryOutput + Output isBuildEvent_ConsoleOutput_Output `protobuf_oneof:"output"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *BuildEvent_ConsoleOutput) Reset() { *m = BuildEvent_ConsoleOutput{} } +func (m *BuildEvent_ConsoleOutput) String() string { return proto.CompactTextString(m) } +func (*BuildEvent_ConsoleOutput) ProtoMessage() {} +func (*BuildEvent_ConsoleOutput) Descriptor() ([]byte, []int) { + return fileDescriptor_build_events_fb145f4342778ee6, []int{0, 4} +} +func (m *BuildEvent_ConsoleOutput) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_BuildEvent_ConsoleOutput.Unmarshal(m, b) +} +func (m *BuildEvent_ConsoleOutput) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_BuildEvent_ConsoleOutput.Marshal(b, m, deterministic) +} +func (dst *BuildEvent_ConsoleOutput) XXX_Merge(src proto.Message) { + xxx_messageInfo_BuildEvent_ConsoleOutput.Merge(dst, src) +} +func (m *BuildEvent_ConsoleOutput) XXX_Size() int { + return xxx_messageInfo_BuildEvent_ConsoleOutput.Size(m) +} +func (m *BuildEvent_ConsoleOutput) XXX_DiscardUnknown() { + xxx_messageInfo_BuildEvent_ConsoleOutput.DiscardUnknown(m) +} + +var xxx_messageInfo_BuildEvent_ConsoleOutput proto.InternalMessageInfo + +func (m *BuildEvent_ConsoleOutput) GetType() ConsoleOutputStream { + if m != nil { + return m.Type + } + return ConsoleOutputStream_UNKNOWN +} + +type isBuildEvent_ConsoleOutput_Output interface { + isBuildEvent_ConsoleOutput_Output() +} + +type BuildEvent_ConsoleOutput_TextOutput struct { + TextOutput string `protobuf:"bytes,2,opt,name=text_output,json=textOutput,proto3,oneof"` +} + +type BuildEvent_ConsoleOutput_BinaryOutput struct { + BinaryOutput []byte `protobuf:"bytes,3,opt,name=binary_output,json=binaryOutput,proto3,oneof"` +} + +func (*BuildEvent_ConsoleOutput_TextOutput) isBuildEvent_ConsoleOutput_Output() {} + +func (*BuildEvent_ConsoleOutput_BinaryOutput) isBuildEvent_ConsoleOutput_Output() {} + +func (m *BuildEvent_ConsoleOutput) GetOutput() isBuildEvent_ConsoleOutput_Output { + if m != nil { + return m.Output + } + return nil +} + +func (m *BuildEvent_ConsoleOutput) GetTextOutput() string { + if x, ok := m.GetOutput().(*BuildEvent_ConsoleOutput_TextOutput); ok { + return x.TextOutput + } + return "" +} + +func (m *BuildEvent_ConsoleOutput) GetBinaryOutput() []byte { + if x, ok := m.GetOutput().(*BuildEvent_ConsoleOutput_BinaryOutput); ok { + return x.BinaryOutput + } + return nil +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*BuildEvent_ConsoleOutput) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _BuildEvent_ConsoleOutput_OneofMarshaler, _BuildEvent_ConsoleOutput_OneofUnmarshaler, _BuildEvent_ConsoleOutput_OneofSizer, []interface{}{ + (*BuildEvent_ConsoleOutput_TextOutput)(nil), + (*BuildEvent_ConsoleOutput_BinaryOutput)(nil), + } +} + +func _BuildEvent_ConsoleOutput_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*BuildEvent_ConsoleOutput) + // output + switch x := m.Output.(type) { + case *BuildEvent_ConsoleOutput_TextOutput: + b.EncodeVarint(2<<3 | proto.WireBytes) + b.EncodeStringBytes(x.TextOutput) + case *BuildEvent_ConsoleOutput_BinaryOutput: + b.EncodeVarint(3<<3 | proto.WireBytes) + b.EncodeRawBytes(x.BinaryOutput) + case nil: + default: + return fmt.Errorf("BuildEvent_ConsoleOutput.Output has unexpected type %T", x) + } + return nil +} + +func _BuildEvent_ConsoleOutput_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*BuildEvent_ConsoleOutput) + switch tag { + case 2: // output.text_output + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeStringBytes() + m.Output = &BuildEvent_ConsoleOutput_TextOutput{x} + return true, err + case 3: // output.binary_output + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeRawBytes(true) + m.Output = &BuildEvent_ConsoleOutput_BinaryOutput{x} + return true, err + default: + return false, nil + } +} + +func _BuildEvent_ConsoleOutput_OneofSizer(msg proto.Message) (n int) { + m := msg.(*BuildEvent_ConsoleOutput) + // output + switch x := m.Output.(type) { + case *BuildEvent_ConsoleOutput_TextOutput: + n += 1 // tag and wire + n += proto.SizeVarint(uint64(len(x.TextOutput))) + n += len(x.TextOutput) + case *BuildEvent_ConsoleOutput_BinaryOutput: + n += 1 // tag and wire + n += proto.SizeVarint(uint64(len(x.BinaryOutput))) + n += len(x.BinaryOutput) + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +// Notification of the end of a build event stream published by a build +// component other than CONTROLLER (See StreamId.BuildComponents). +type BuildEvent_BuildComponentStreamFinished struct { + // How the event stream finished. + Type BuildEvent_BuildComponentStreamFinished_FinishType `protobuf:"varint,1,opt,name=type,proto3,enum=google.devtools.build.v1.BuildEvent_BuildComponentStreamFinished_FinishType" json:"type,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *BuildEvent_BuildComponentStreamFinished) Reset() { + *m = BuildEvent_BuildComponentStreamFinished{} +} +func (m *BuildEvent_BuildComponentStreamFinished) String() string { return proto.CompactTextString(m) } +func (*BuildEvent_BuildComponentStreamFinished) ProtoMessage() {} +func (*BuildEvent_BuildComponentStreamFinished) Descriptor() ([]byte, []int) { + return fileDescriptor_build_events_fb145f4342778ee6, []int{0, 5} +} +func (m *BuildEvent_BuildComponentStreamFinished) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_BuildEvent_BuildComponentStreamFinished.Unmarshal(m, b) +} +func (m *BuildEvent_BuildComponentStreamFinished) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_BuildEvent_BuildComponentStreamFinished.Marshal(b, m, deterministic) +} +func (dst *BuildEvent_BuildComponentStreamFinished) XXX_Merge(src proto.Message) { + xxx_messageInfo_BuildEvent_BuildComponentStreamFinished.Merge(dst, src) +} +func (m *BuildEvent_BuildComponentStreamFinished) XXX_Size() int { + return xxx_messageInfo_BuildEvent_BuildComponentStreamFinished.Size(m) +} +func (m *BuildEvent_BuildComponentStreamFinished) XXX_DiscardUnknown() { + xxx_messageInfo_BuildEvent_BuildComponentStreamFinished.DiscardUnknown(m) +} + +var xxx_messageInfo_BuildEvent_BuildComponentStreamFinished proto.InternalMessageInfo + +func (m *BuildEvent_BuildComponentStreamFinished) GetType() BuildEvent_BuildComponentStreamFinished_FinishType { + if m != nil { + return m.Type + } + return BuildEvent_BuildComponentStreamFinished_FINISH_TYPE_UNSPECIFIED +} + +// Unique identifier for a build event stream. +type StreamId struct { + // The id of a Build message. + BuildId string `protobuf:"bytes,1,opt,name=build_id,json=buildId,proto3" json:"build_id,omitempty"` + // The unique invocation ID within this build. + // It should be the same as {invocation} (below) during the migration. + InvocationId string `protobuf:"bytes,6,opt,name=invocation_id,json=invocationId,proto3" json:"invocation_id,omitempty"` + // The component that emitted this event. + Component StreamId_BuildComponent `protobuf:"varint,3,opt,name=component,proto3,enum=google.devtools.build.v1.StreamId_BuildComponent" json:"component,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *StreamId) Reset() { *m = StreamId{} } +func (m *StreamId) String() string { return proto.CompactTextString(m) } +func (*StreamId) ProtoMessage() {} +func (*StreamId) Descriptor() ([]byte, []int) { + return fileDescriptor_build_events_fb145f4342778ee6, []int{1} +} +func (m *StreamId) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_StreamId.Unmarshal(m, b) +} +func (m *StreamId) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_StreamId.Marshal(b, m, deterministic) +} +func (dst *StreamId) XXX_Merge(src proto.Message) { + xxx_messageInfo_StreamId.Merge(dst, src) +} +func (m *StreamId) XXX_Size() int { + return xxx_messageInfo_StreamId.Size(m) +} +func (m *StreamId) XXX_DiscardUnknown() { + xxx_messageInfo_StreamId.DiscardUnknown(m) +} + +var xxx_messageInfo_StreamId proto.InternalMessageInfo + +func (m *StreamId) GetBuildId() string { + if m != nil { + return m.BuildId + } + return "" +} + +func (m *StreamId) GetInvocationId() string { + if m != nil { + return m.InvocationId + } + return "" +} + +func (m *StreamId) GetComponent() StreamId_BuildComponent { + if m != nil { + return m.Component + } + return StreamId_UNKNOWN_COMPONENT +} + +func init() { + proto.RegisterType((*BuildEvent)(nil), "google.devtools.build.v1.BuildEvent") + proto.RegisterType((*BuildEvent_InvocationAttemptStarted)(nil), "google.devtools.build.v1.BuildEvent.InvocationAttemptStarted") + proto.RegisterType((*BuildEvent_InvocationAttemptFinished)(nil), "google.devtools.build.v1.BuildEvent.InvocationAttemptFinished") + proto.RegisterType((*BuildEvent_BuildEnqueued)(nil), "google.devtools.build.v1.BuildEvent.BuildEnqueued") + proto.RegisterType((*BuildEvent_BuildFinished)(nil), "google.devtools.build.v1.BuildEvent.BuildFinished") + proto.RegisterType((*BuildEvent_ConsoleOutput)(nil), "google.devtools.build.v1.BuildEvent.ConsoleOutput") + proto.RegisterType((*BuildEvent_BuildComponentStreamFinished)(nil), "google.devtools.build.v1.BuildEvent.BuildComponentStreamFinished") + proto.RegisterType((*StreamId)(nil), "google.devtools.build.v1.StreamId") + proto.RegisterEnum("google.devtools.build.v1.ConsoleOutputStream", ConsoleOutputStream_name, ConsoleOutputStream_value) + proto.RegisterEnum("google.devtools.build.v1.BuildEvent_BuildComponentStreamFinished_FinishType", BuildEvent_BuildComponentStreamFinished_FinishType_name, BuildEvent_BuildComponentStreamFinished_FinishType_value) + proto.RegisterEnum("google.devtools.build.v1.StreamId_BuildComponent", StreamId_BuildComponent_name, StreamId_BuildComponent_value) +} + +func init() { + proto.RegisterFile("google/devtools/build/v1/build_events.proto", fileDescriptor_build_events_fb145f4342778ee6) +} + +var fileDescriptor_build_events_fb145f4342778ee6 = []byte{ + // 896 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xa4, 0x96, 0x6d, 0x6f, 0xe3, 0x44, + 0x10, 0xc7, 0xe3, 0xf6, 0x48, 0xd3, 0x69, 0x13, 0x7c, 0x7b, 0x9c, 0xce, 0xf1, 0x55, 0x02, 0x8a, + 0x2a, 0x21, 0x10, 0x8e, 0xda, 0x03, 0x1d, 0x70, 0xf4, 0x50, 0x93, 0xb8, 0x8a, 0xb9, 0x9e, 0x1d, + 0x6d, 0x5c, 0x1d, 0x0f, 0x2f, 0x82, 0x63, 0x6f, 0x73, 0x96, 0x12, 0xaf, 0x2f, 0x5e, 0x47, 0x17, + 0x24, 0x04, 0x1f, 0x84, 0xd7, 0x7c, 0x14, 0x3e, 0x0c, 0x9f, 0x80, 0x97, 0xc8, 0xbb, 0xeb, 0xc4, + 0xb9, 0xab, 0xfb, 0x00, 0xef, 0x36, 0x33, 0xff, 0xf9, 0xcd, 0xcc, 0xee, 0x8c, 0x15, 0xf8, 0x74, + 0x4c, 0xe9, 0x78, 0x42, 0x5a, 0x01, 0x99, 0x33, 0x4a, 0x27, 0x49, 0x6b, 0x94, 0x86, 0x93, 0xa0, + 0x35, 0x3f, 0x14, 0x87, 0x21, 0x99, 0x93, 0x88, 0x25, 0x46, 0x3c, 0xa3, 0x8c, 0x22, 0x4d, 0x88, + 0x8d, 0x5c, 0x6c, 0x70, 0x8d, 0x31, 0x3f, 0xd4, 0xf7, 0x24, 0xc6, 0x8b, 0xc3, 0x96, 0x17, 0x45, + 0x94, 0x79, 0x2c, 0xa4, 0x91, 0x8c, 0xd3, 0xaf, 0x4b, 0x92, 0x30, 0x8f, 0xa5, 0xb9, 0xb8, 0x29, + 0xc5, 0xfc, 0xd7, 0x28, 0xbd, 0x68, 0x79, 0xd1, 0x42, 0xba, 0xde, 0x7f, 0xd3, 0xc5, 0xc2, 0x29, + 0x49, 0x98, 0x37, 0x8d, 0x85, 0x60, 0xff, 0x8f, 0x3a, 0x40, 0x3b, 0x43, 0x9a, 0x59, 0xd9, 0xe8, + 0x2b, 0x00, 0x5e, 0xff, 0x30, 0xd3, 0x69, 0xca, 0x07, 0xca, 0xc7, 0x3b, 0x47, 0xba, 0x21, 0x9b, + 0xc8, 0x21, 0x86, 0x9b, 0x43, 0xf0, 0x36, 0x57, 0x67, 0xbf, 0xd1, 0xaf, 0xa0, 0x87, 0xd1, 0x9c, + 0xfa, 0xbc, 0x8f, 0xa1, 0xc7, 0x18, 0x99, 0xc6, 0x2c, 0x2b, 0x74, 0xc6, 0x48, 0xa0, 0x3d, 0xe2, + 0xa8, 0x63, 0xa3, 0xec, 0x3e, 0x8c, 0x55, 0x11, 0x86, 0xb5, 0xc4, 0x9c, 0x08, 0xca, 0x40, 0x40, + 0x7a, 0x15, 0xac, 0x85, 0x25, 0x3e, 0xf4, 0xbb, 0x02, 0x0f, 0x2f, 0xc9, 0x7f, 0x11, 0x46, 0x61, + 0xf2, 0x92, 0x04, 0xda, 0xe7, 0xbc, 0x80, 0xa7, 0xff, 0xad, 0x80, 0x53, 0x49, 0xe9, 0x55, 0x70, + 0x33, 0x2c, 0x73, 0xa2, 0x9f, 0xa0, 0x21, 0x47, 0x20, 0x7a, 0x95, 0x92, 0x94, 0x04, 0xda, 0x17, + 0x3c, 0xe9, 0xd1, 0x8d, 0x92, 0x8a, 0xa3, 0x8c, 0xec, 0x55, 0x70, 0x7d, 0x54, 0x34, 0xac, 0xe0, + 0xcb, 0x8e, 0x1e, 0xdf, 0x16, 0x5e, 0xe8, 0x42, 0xc0, 0x8b, 0x95, 0xfb, 0x34, 0x4a, 0xe8, 0x84, + 0x0c, 0x69, 0xca, 0xe2, 0x94, 0x69, 0x5f, 0xde, 0x02, 0xde, 0x11, 0xa1, 0x0e, 0x8f, 0xcc, 0xe0, + 0x7e, 0xd1, 0x80, 0x7e, 0x83, 0xa6, 0x4f, 0xa7, 0x31, 0x8d, 0xb2, 0xb9, 0x4a, 0xd8, 0x8c, 0x78, + 0xd3, 0x55, 0x13, 0x4f, 0x78, 0x9e, 0x93, 0x9b, 0x37, 0xd1, 0xc9, 0x51, 0x03, 0x4e, 0x2a, 0xf4, + 0xf4, 0xc0, 0xbf, 0xdc, 0x85, 0x1e, 0xc3, 0xce, 0xc8, 0xfb, 0x85, 0x4c, 0xc4, 0x6a, 0x6a, 0xdf, + 0xf0, 0x94, 0xef, 0xbd, 0x35, 0xd5, 0x27, 0xd1, 0xa2, 0x57, 0xc1, 0xc0, 0xa5, 0x62, 0x1b, 0xbe, + 0x83, 0xfb, 0xf2, 0x41, 0x5f, 0x13, 0x3f, 0xe5, 0x73, 0x25, 0x10, 0xc7, 0x57, 0x22, 0xee, 0x89, + 0x97, 0xcb, 0x63, 0x04, 0xab, 0x0b, 0x28, 0xa1, 0xe9, 0xcc, 0x27, 0xc3, 0x0b, 0xc2, 0xfc, 0x97, + 0x12, 0xf4, 0xf4, 0x4a, 0x90, 0x2a, 0x22, 0x4e, 0xb3, 0x00, 0x4e, 0xd1, 0x5f, 0x81, 0x56, 0xb6, + 0x1d, 0xe8, 0x00, 0x1a, 0xf9, 0xd4, 0x47, 0xe9, 0x74, 0x44, 0x66, 0x7c, 0x7f, 0x37, 0x71, 0x5d, + 0x5a, 0x6d, 0x6e, 0x44, 0x06, 0x6c, 0x05, 0x84, 0x79, 0xe1, 0x24, 0xd1, 0x36, 0xca, 0xb3, 0xe3, + 0x5c, 0xa4, 0x53, 0x68, 0x96, 0xee, 0x03, 0xc2, 0x70, 0xb7, 0xb0, 0x74, 0xe2, 0xab, 0xa4, 0x6d, + 0x72, 0xec, 0xc1, 0x35, 0x6f, 0x3a, 0xe0, 0x62, 0xac, 0xae, 0xe2, 0x85, 0x45, 0xff, 0x16, 0xea, + 0x6b, 0xbb, 0x50, 0xac, 0x58, 0xb9, 0x49, 0xc5, 0xb6, 0x04, 0x2c, 0xab, 0x3c, 0x86, 0xaa, 0x2c, + 0x4d, 0xb9, 0x4d, 0x69, 0x32, 0x48, 0xff, 0x53, 0x81, 0xfa, 0xda, 0x8c, 0xa3, 0x13, 0xb8, 0xc3, + 0x16, 0xb1, 0xf8, 0x40, 0x36, 0x8e, 0x3e, 0x2b, 0xc7, 0xad, 0x85, 0x89, 0xb1, 0xc4, 0x3c, 0x14, + 0x7d, 0x08, 0x3b, 0x8c, 0xbc, 0x66, 0xf9, 0xbe, 0x65, 0x4f, 0xb1, 0x9d, 0x8d, 0x5f, 0x66, 0x94, + 0x59, 0x0e, 0xa0, 0x3e, 0x0a, 0x23, 0x6f, 0xb6, 0xc8, 0x45, 0xd9, 0xc5, 0xee, 0xf6, 0x2a, 0x78, + 0x57, 0x98, 0x85, 0xac, 0x5d, 0x83, 0xaa, 0xf0, 0xeb, 0x7f, 0x29, 0xb0, 0x77, 0xd5, 0x92, 0xa0, + 0x9f, 0xd7, 0xea, 0x3e, 0xfb, 0xdf, 0x5b, 0x67, 0x88, 0x83, 0xbb, 0x88, 0x89, 0x68, 0x6b, 0xbf, + 0x0b, 0xb0, 0xb2, 0xa1, 0x87, 0xf0, 0xe0, 0xd4, 0xb2, 0xad, 0x41, 0x6f, 0xe8, 0xfe, 0xd0, 0x37, + 0x87, 0xe7, 0xf6, 0xa0, 0x6f, 0x76, 0xac, 0x53, 0xcb, 0xec, 0xaa, 0x15, 0xb4, 0x0b, 0x35, 0xe1, + 0x34, 0xbb, 0xaa, 0x82, 0x76, 0x60, 0xcb, 0xfc, 0xbe, 0x6f, 0x61, 0xb3, 0xab, 0x6e, 0xb4, 0xb7, + 0xe0, 0x1d, 0xbe, 0x1f, 0xfb, 0x7f, 0x2b, 0x50, 0x13, 0x29, 0xad, 0x00, 0x35, 0xa1, 0x26, 0xd6, + 0x31, 0x0c, 0x78, 0x07, 0xdb, 0x78, 0x8b, 0xff, 0xb6, 0x02, 0xf4, 0x11, 0xd4, 0x0b, 0x73, 0x18, + 0x06, 0x5a, 0x95, 0xfb, 0x77, 0x57, 0x46, 0x2b, 0x40, 0x0e, 0x6c, 0x2f, 0x3f, 0x11, 0xfc, 0x2e, + 0x1b, 0x47, 0x87, 0xe5, 0x57, 0x90, 0xa7, 0x7d, 0xe3, 0x02, 0xf0, 0x8a, 0xb1, 0xff, 0x1c, 0x1a, + 0xeb, 0x4e, 0x74, 0x1f, 0xee, 0x9e, 0xdb, 0xcf, 0x6c, 0xe7, 0x85, 0x3d, 0xec, 0x38, 0xcf, 0xfb, + 0x8e, 0x6d, 0xda, 0xae, 0x5a, 0x41, 0x0d, 0x80, 0x8e, 0x63, 0xbb, 0xd8, 0x39, 0x3b, 0x33, 0xb1, + 0xaa, 0x20, 0x80, 0xea, 0x0b, 0x07, 0x3f, 0x33, 0xb1, 0xba, 0x81, 0x6a, 0x70, 0xc7, 0x75, 0x9c, + 0x33, 0x75, 0xf3, 0x93, 0xaf, 0xe1, 0xde, 0x25, 0xf3, 0x92, 0xdd, 0x8c, 0x64, 0xaa, 0x95, 0x2c, + 0x72, 0xe0, 0x76, 0x9d, 0x73, 0x57, 0x50, 0x06, 0x6e, 0xd7, 0xc4, 0x58, 0xdd, 0x68, 0x27, 0xb0, + 0xe7, 0xd3, 0x69, 0x69, 0x37, 0xed, 0x77, 0x57, 0x2f, 0xda, 0xcf, 0x96, 0xa6, 0xaf, 0xfc, 0x78, + 0x2c, 0xc5, 0x63, 0x3a, 0xf1, 0xa2, 0xb1, 0x41, 0x67, 0xe3, 0xd6, 0x98, 0x44, 0x7c, 0xa5, 0x5a, + 0xc2, 0xe5, 0xc5, 0x61, 0xf2, 0xf6, 0x5f, 0x90, 0x27, 0xfc, 0xf0, 0x8f, 0xa2, 0x8c, 0xaa, 0x5c, + 0xfc, 0xe8, 0xdf, 0x00, 0x00, 0x00, 0xff, 0xff, 0x6f, 0xcf, 0xda, 0xac, 0x13, 0x09, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/devtools/build/v1/build_status.pb.go b/vendor/google.golang.org/genproto/googleapis/devtools/build/v1/build_status.pb.go new file mode 100644 index 0000000..9d69f8c --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/devtools/build/v1/build_status.pb.go @@ -0,0 +1,162 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/devtools/build/v1/build_status.proto + +package build // import "google.golang.org/genproto/googleapis/devtools/build/v1" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import any "github.com/golang/protobuf/ptypes/any" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// The end result of the Build. +type BuildStatus_Result int32 + +const ( + // Unspecified or unknown. + BuildStatus_UNKNOWN_STATUS BuildStatus_Result = 0 + // Build was successful and tests (if requested) all pass. + BuildStatus_COMMAND_SUCCEEDED BuildStatus_Result = 1 + // Build error and/or test failure. + BuildStatus_COMMAND_FAILED BuildStatus_Result = 2 + // Unable to obtain a result due to input provided by the user. + BuildStatus_USER_ERROR BuildStatus_Result = 3 + // Unable to obtain a result due to a failure within the build system. + BuildStatus_SYSTEM_ERROR BuildStatus_Result = 4 + // Build required too many resources, such as build tool RAM. + BuildStatus_RESOURCE_EXHAUSTED BuildStatus_Result = 5 + // An invocation attempt time exceeded its deadline. + BuildStatus_INVOCATION_DEADLINE_EXCEEDED BuildStatus_Result = 6 + // Build request time exceeded the request_deadline + BuildStatus_REQUEST_DEADLINE_EXCEEDED BuildStatus_Result = 8 + // The build was cancelled by a call to CancelBuild. + BuildStatus_CANCELLED BuildStatus_Result = 7 +) + +var BuildStatus_Result_name = map[int32]string{ + 0: "UNKNOWN_STATUS", + 1: "COMMAND_SUCCEEDED", + 2: "COMMAND_FAILED", + 3: "USER_ERROR", + 4: "SYSTEM_ERROR", + 5: "RESOURCE_EXHAUSTED", + 6: "INVOCATION_DEADLINE_EXCEEDED", + 8: "REQUEST_DEADLINE_EXCEEDED", + 7: "CANCELLED", +} +var BuildStatus_Result_value = map[string]int32{ + "UNKNOWN_STATUS": 0, + "COMMAND_SUCCEEDED": 1, + "COMMAND_FAILED": 2, + "USER_ERROR": 3, + "SYSTEM_ERROR": 4, + "RESOURCE_EXHAUSTED": 5, + "INVOCATION_DEADLINE_EXCEEDED": 6, + "REQUEST_DEADLINE_EXCEEDED": 8, + "CANCELLED": 7, +} + +func (x BuildStatus_Result) String() string { + return proto.EnumName(BuildStatus_Result_name, int32(x)) +} +func (BuildStatus_Result) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_build_status_9c10e159cbe6c1da, []int{0, 0} +} + +// Status used for both invocation attempt and overall build completion. +type BuildStatus struct { + // The end result. + Result BuildStatus_Result `protobuf:"varint,1,opt,name=result,proto3,enum=google.devtools.build.v1.BuildStatus_Result" json:"result,omitempty"` + // Fine-grained diagnostic information to complement the status. + Details *any.Any `protobuf:"bytes,2,opt,name=details,proto3" json:"details,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *BuildStatus) Reset() { *m = BuildStatus{} } +func (m *BuildStatus) String() string { return proto.CompactTextString(m) } +func (*BuildStatus) ProtoMessage() {} +func (*BuildStatus) Descriptor() ([]byte, []int) { + return fileDescriptor_build_status_9c10e159cbe6c1da, []int{0} +} +func (m *BuildStatus) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_BuildStatus.Unmarshal(m, b) +} +func (m *BuildStatus) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_BuildStatus.Marshal(b, m, deterministic) +} +func (dst *BuildStatus) XXX_Merge(src proto.Message) { + xxx_messageInfo_BuildStatus.Merge(dst, src) +} +func (m *BuildStatus) XXX_Size() int { + return xxx_messageInfo_BuildStatus.Size(m) +} +func (m *BuildStatus) XXX_DiscardUnknown() { + xxx_messageInfo_BuildStatus.DiscardUnknown(m) +} + +var xxx_messageInfo_BuildStatus proto.InternalMessageInfo + +func (m *BuildStatus) GetResult() BuildStatus_Result { + if m != nil { + return m.Result + } + return BuildStatus_UNKNOWN_STATUS +} + +func (m *BuildStatus) GetDetails() *any.Any { + if m != nil { + return m.Details + } + return nil +} + +func init() { + proto.RegisterType((*BuildStatus)(nil), "google.devtools.build.v1.BuildStatus") + proto.RegisterEnum("google.devtools.build.v1.BuildStatus_Result", BuildStatus_Result_name, BuildStatus_Result_value) +} + +func init() { + proto.RegisterFile("google/devtools/build/v1/build_status.proto", fileDescriptor_build_status_9c10e159cbe6c1da) +} + +var fileDescriptor_build_status_9c10e159cbe6c1da = []byte{ + // 390 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x74, 0x92, 0x4f, 0x0b, 0xd3, 0x30, + 0x18, 0xc6, 0xcd, 0xd4, 0x4e, 0x33, 0x1d, 0x35, 0xa8, 0x6c, 0x63, 0xc2, 0xd8, 0x69, 0xa0, 0xa4, + 0x6c, 0x1e, 0xc5, 0x43, 0xd6, 0x44, 0x2c, 0x6e, 0xe9, 0x4c, 0x5a, 0xff, 0x5d, 0x4a, 0xe6, 0x6a, + 0x29, 0xd4, 0x66, 0xac, 0xe9, 0x60, 0x1f, 0xd3, 0x93, 0x5f, 0xc5, 0xa3, 0xf4, 0x1f, 0x0c, 0x74, + 0xb7, 0xf4, 0x7d, 0x7e, 0xcf, 0xf3, 0xbe, 0x3c, 0x14, 0xbe, 0x4c, 0xb4, 0x4e, 0xb2, 0xd8, 0x39, + 0xc4, 0x67, 0xa3, 0x75, 0x56, 0x38, 0xfb, 0x32, 0xcd, 0x0e, 0xce, 0x79, 0xd9, 0x3c, 0xa2, 0xc2, + 0x28, 0x53, 0x16, 0xf8, 0x78, 0xd2, 0x46, 0xa3, 0x51, 0x03, 0xe3, 0x0e, 0xc6, 0x35, 0x83, 0xcf, + 0xcb, 0xc9, 0xb4, 0x8d, 0x51, 0xc7, 0xd4, 0x51, 0x79, 0xae, 0x8d, 0x32, 0xa9, 0xce, 0x5b, 0xdf, + 0x64, 0xdc, 0xaa, 0xf5, 0xd7, 0xbe, 0xfc, 0xe1, 0xa8, 0xfc, 0xd2, 0x48, 0xf3, 0xdf, 0x3d, 0x38, + 0x58, 0x57, 0x29, 0xb2, 0x5e, 0x84, 0x28, 0xb4, 0x4e, 0x71, 0x51, 0x66, 0x66, 0x04, 0x66, 0x60, + 0x31, 0x5c, 0xbd, 0xc2, 0xb7, 0x76, 0xe2, 0x2b, 0x1b, 0x16, 0xb5, 0x47, 0xb4, 0x5e, 0x84, 0x61, + 0xff, 0x10, 0x1b, 0x95, 0x66, 0xc5, 0xa8, 0x37, 0x03, 0x8b, 0xc1, 0xea, 0x69, 0x17, 0xd3, 0x9d, + 0x80, 0x49, 0x7e, 0x11, 0x1d, 0x34, 0xff, 0x05, 0xa0, 0xd5, 0x44, 0x20, 0x04, 0x87, 0x21, 0xff, + 0xc0, 0xfd, 0xcf, 0x3c, 0x92, 0x01, 0x09, 0x42, 0x69, 0xdf, 0x41, 0xcf, 0xe0, 0x13, 0xd7, 0xdf, + 0x6e, 0x09, 0xa7, 0x91, 0x0c, 0x5d, 0x97, 0x31, 0xca, 0xa8, 0x0d, 0x2a, 0xb4, 0x1b, 0xbf, 0x23, + 0xde, 0x86, 0x51, 0xbb, 0x87, 0x86, 0x10, 0x86, 0x92, 0x89, 0x88, 0x09, 0xe1, 0x0b, 0xfb, 0x2e, + 0xb2, 0xe1, 0x23, 0xf9, 0x55, 0x06, 0x6c, 0xdb, 0x4e, 0xee, 0xa1, 0xe7, 0x10, 0x09, 0x26, 0xfd, + 0x50, 0xb8, 0x2c, 0x62, 0x5f, 0xde, 0x93, 0x50, 0x06, 0x8c, 0xda, 0xf7, 0xd1, 0x0c, 0x4e, 0x3d, + 0xfe, 0xc9, 0x77, 0x49, 0xe0, 0xf9, 0x3c, 0xa2, 0x8c, 0xd0, 0x8d, 0xc7, 0x2b, 0xa4, 0xdd, 0x67, + 0xa1, 0x17, 0x70, 0x2c, 0xd8, 0xc7, 0x90, 0xc9, 0xe0, 0x3f, 0xf2, 0x03, 0xf4, 0x18, 0x3e, 0x74, + 0x09, 0x77, 0xd9, 0xa6, 0xba, 0xa4, 0xbf, 0x36, 0x70, 0xfa, 0x5d, 0xff, 0xbc, 0x59, 0xdf, 0xda, + 0xbe, 0xea, 0x6f, 0x57, 0xb5, 0xb2, 0x03, 0xdf, 0xde, 0xb6, 0x74, 0xa2, 0x33, 0x95, 0x27, 0x58, + 0x9f, 0x12, 0x27, 0x89, 0xf3, 0xba, 0x33, 0xa7, 0x91, 0xd4, 0x31, 0x2d, 0xfe, 0xfd, 0x59, 0xde, + 0xd4, 0x8f, 0x3f, 0x00, 0xec, 0xad, 0x1a, 0x7e, 0xfd, 0x37, 0x00, 0x00, 0xff, 0xff, 0xd6, 0x3d, + 0xf5, 0x87, 0x58, 0x02, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/devtools/build/v1/publish_build_event.pb.go b/vendor/google.golang.org/genproto/googleapis/devtools/build/v1/publish_build_event.pb.go new file mode 100644 index 0000000..1bdea94 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/devtools/build/v1/publish_build_event.pb.go @@ -0,0 +1,542 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/devtools/build/v1/publish_build_event.proto + +package build // import "google.golang.org/genproto/googleapis/devtools/build/v1" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import duration "github.com/golang/protobuf/ptypes/duration" +import empty "github.com/golang/protobuf/ptypes/empty" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +import ( + context "golang.org/x/net/context" + grpc "google.golang.org/grpc" +) + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// The service level of the build request. Backends only uses this value when +// the BuildEnqueued event is published to determine what level of service +// this build should receive. +type PublishLifecycleEventRequest_ServiceLevel int32 + +const ( + // Non-interactive builds can tolerate longer event latencies. This is the + // default ServiceLevel if callers do not specify one. + PublishLifecycleEventRequest_NONINTERACTIVE PublishLifecycleEventRequest_ServiceLevel = 0 + // The events of an interactive build should be delivered with low latency. + PublishLifecycleEventRequest_INTERACTIVE PublishLifecycleEventRequest_ServiceLevel = 1 +) + +var PublishLifecycleEventRequest_ServiceLevel_name = map[int32]string{ + 0: "NONINTERACTIVE", + 1: "INTERACTIVE", +} +var PublishLifecycleEventRequest_ServiceLevel_value = map[string]int32{ + "NONINTERACTIVE": 0, + "INTERACTIVE": 1, +} + +func (x PublishLifecycleEventRequest_ServiceLevel) String() string { + return proto.EnumName(PublishLifecycleEventRequest_ServiceLevel_name, int32(x)) +} +func (PublishLifecycleEventRequest_ServiceLevel) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_publish_build_event_d745bb02eff36c4f, []int{0, 0} +} + +// Publishes 'lifecycle events' that update the high-level state of a build: +// - BuildEnqueued: When a build is scheduled. +// - InvocationAttemptStarted: When work for a build starts; there can be +// multiple invocations for a build (e.g. retries). +// - InvocationAttemptCompleted: When work for a build finishes. +// - BuildFinished: When a build is finished. +type PublishLifecycleEventRequest struct { + // The interactivity of this build. + ServiceLevel PublishLifecycleEventRequest_ServiceLevel `protobuf:"varint,1,opt,name=service_level,json=serviceLevel,proto3,enum=google.devtools.build.v1.PublishLifecycleEventRequest_ServiceLevel" json:"service_level,omitempty"` + // The lifecycle build event. If this is a build tool event, the RPC will fail + // with INVALID_REQUEST. + BuildEvent *OrderedBuildEvent `protobuf:"bytes,2,opt,name=build_event,json=buildEvent,proto3" json:"build_event,omitempty"` + // If the next event for this build or invocation (depending on the event + // type) hasn't been published after this duration from when {build_event} + // is written to BES, consider this stream expired. If this field is not set, + // BES backend will use its own default value. + StreamTimeout *duration.Duration `protobuf:"bytes,3,opt,name=stream_timeout,json=streamTimeout,proto3" json:"stream_timeout,omitempty"` + // Additional information about a build request. These are define by the event + // publishers, and the Build Event Service does not validate or interpret + // them. They are used while notifying internal systems of new builds and + // invocations if the OrderedBuildEvent.event type is + // BuildEnqueued/InvocationAttemptStarted. + NotificationKeywords []string `protobuf:"bytes,4,rep,name=notification_keywords,json=notificationKeywords,proto3" json:"notification_keywords,omitempty"` + // The project this build is associated with. + // This should match the project used for the initial call to + // PublishLifecycleEvent (containing a BuildEnqueued message). + ProjectId string `protobuf:"bytes,6,opt,name=project_id,json=projectId,proto3" json:"project_id,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *PublishLifecycleEventRequest) Reset() { *m = PublishLifecycleEventRequest{} } +func (m *PublishLifecycleEventRequest) String() string { return proto.CompactTextString(m) } +func (*PublishLifecycleEventRequest) ProtoMessage() {} +func (*PublishLifecycleEventRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_publish_build_event_d745bb02eff36c4f, []int{0} +} +func (m *PublishLifecycleEventRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_PublishLifecycleEventRequest.Unmarshal(m, b) +} +func (m *PublishLifecycleEventRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_PublishLifecycleEventRequest.Marshal(b, m, deterministic) +} +func (dst *PublishLifecycleEventRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_PublishLifecycleEventRequest.Merge(dst, src) +} +func (m *PublishLifecycleEventRequest) XXX_Size() int { + return xxx_messageInfo_PublishLifecycleEventRequest.Size(m) +} +func (m *PublishLifecycleEventRequest) XXX_DiscardUnknown() { + xxx_messageInfo_PublishLifecycleEventRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_PublishLifecycleEventRequest proto.InternalMessageInfo + +func (m *PublishLifecycleEventRequest) GetServiceLevel() PublishLifecycleEventRequest_ServiceLevel { + if m != nil { + return m.ServiceLevel + } + return PublishLifecycleEventRequest_NONINTERACTIVE +} + +func (m *PublishLifecycleEventRequest) GetBuildEvent() *OrderedBuildEvent { + if m != nil { + return m.BuildEvent + } + return nil +} + +func (m *PublishLifecycleEventRequest) GetStreamTimeout() *duration.Duration { + if m != nil { + return m.StreamTimeout + } + return nil +} + +func (m *PublishLifecycleEventRequest) GetNotificationKeywords() []string { + if m != nil { + return m.NotificationKeywords + } + return nil +} + +func (m *PublishLifecycleEventRequest) GetProjectId() string { + if m != nil { + return m.ProjectId + } + return "" +} + +// States which event has been committed. Any failure to commit will cause +// RPC errors, hence not recorded by this proto. +type PublishBuildToolEventStreamResponse struct { + // The stream that contains this event. + StreamId *StreamId `protobuf:"bytes,1,opt,name=stream_id,json=streamId,proto3" json:"stream_id,omitempty"` + // The sequence number of this event that has been committed. + SequenceNumber int64 `protobuf:"varint,2,opt,name=sequence_number,json=sequenceNumber,proto3" json:"sequence_number,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *PublishBuildToolEventStreamResponse) Reset() { *m = PublishBuildToolEventStreamResponse{} } +func (m *PublishBuildToolEventStreamResponse) String() string { return proto.CompactTextString(m) } +func (*PublishBuildToolEventStreamResponse) ProtoMessage() {} +func (*PublishBuildToolEventStreamResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_publish_build_event_d745bb02eff36c4f, []int{1} +} +func (m *PublishBuildToolEventStreamResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_PublishBuildToolEventStreamResponse.Unmarshal(m, b) +} +func (m *PublishBuildToolEventStreamResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_PublishBuildToolEventStreamResponse.Marshal(b, m, deterministic) +} +func (dst *PublishBuildToolEventStreamResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_PublishBuildToolEventStreamResponse.Merge(dst, src) +} +func (m *PublishBuildToolEventStreamResponse) XXX_Size() int { + return xxx_messageInfo_PublishBuildToolEventStreamResponse.Size(m) +} +func (m *PublishBuildToolEventStreamResponse) XXX_DiscardUnknown() { + xxx_messageInfo_PublishBuildToolEventStreamResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_PublishBuildToolEventStreamResponse proto.InternalMessageInfo + +func (m *PublishBuildToolEventStreamResponse) GetStreamId() *StreamId { + if m != nil { + return m.StreamId + } + return nil +} + +func (m *PublishBuildToolEventStreamResponse) GetSequenceNumber() int64 { + if m != nil { + return m.SequenceNumber + } + return 0 +} + +// Build event with contextual information about the stream it belongs to and +// its position in that stream. +type OrderedBuildEvent struct { + // Which build event stream this event belongs to. + StreamId *StreamId `protobuf:"bytes,1,opt,name=stream_id,json=streamId,proto3" json:"stream_id,omitempty"` + // The position of this event in the stream. The sequence numbers for a build + // event stream should be a sequence of consecutive natural numbers starting + // from one. (1, 2, 3, ...) + SequenceNumber int64 `protobuf:"varint,2,opt,name=sequence_number,json=sequenceNumber,proto3" json:"sequence_number,omitempty"` + // The actual event. + Event *BuildEvent `protobuf:"bytes,3,opt,name=event,proto3" json:"event,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *OrderedBuildEvent) Reset() { *m = OrderedBuildEvent{} } +func (m *OrderedBuildEvent) String() string { return proto.CompactTextString(m) } +func (*OrderedBuildEvent) ProtoMessage() {} +func (*OrderedBuildEvent) Descriptor() ([]byte, []int) { + return fileDescriptor_publish_build_event_d745bb02eff36c4f, []int{2} +} +func (m *OrderedBuildEvent) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_OrderedBuildEvent.Unmarshal(m, b) +} +func (m *OrderedBuildEvent) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_OrderedBuildEvent.Marshal(b, m, deterministic) +} +func (dst *OrderedBuildEvent) XXX_Merge(src proto.Message) { + xxx_messageInfo_OrderedBuildEvent.Merge(dst, src) +} +func (m *OrderedBuildEvent) XXX_Size() int { + return xxx_messageInfo_OrderedBuildEvent.Size(m) +} +func (m *OrderedBuildEvent) XXX_DiscardUnknown() { + xxx_messageInfo_OrderedBuildEvent.DiscardUnknown(m) +} + +var xxx_messageInfo_OrderedBuildEvent proto.InternalMessageInfo + +func (m *OrderedBuildEvent) GetStreamId() *StreamId { + if m != nil { + return m.StreamId + } + return nil +} + +func (m *OrderedBuildEvent) GetSequenceNumber() int64 { + if m != nil { + return m.SequenceNumber + } + return 0 +} + +func (m *OrderedBuildEvent) GetEvent() *BuildEvent { + if m != nil { + return m.Event + } + return nil +} + +// Streaming request message for PublishBuildToolEventStream. +type PublishBuildToolEventStreamRequest struct { + // The build event with position info. + // New publishing clients should use this field rather than the 3 above. + OrderedBuildEvent *OrderedBuildEvent `protobuf:"bytes,4,opt,name=ordered_build_event,json=orderedBuildEvent,proto3" json:"ordered_build_event,omitempty"` + // The keywords to be attached to the notification which notifies the start + // of a new build event stream. BES only reads this field when sequence_number + // or ordered_build_event.sequence_number is 1 in this message. If this field + // is empty, BES will not publish notification messages for this stream. + NotificationKeywords []string `protobuf:"bytes,5,rep,name=notification_keywords,json=notificationKeywords,proto3" json:"notification_keywords,omitempty"` + // The project this build is associated with. + // This should match the project used for the initial call to + // PublishLifecycleEvent (containing a BuildEnqueued message). + ProjectId string `protobuf:"bytes,6,opt,name=project_id,json=projectId,proto3" json:"project_id,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *PublishBuildToolEventStreamRequest) Reset() { *m = PublishBuildToolEventStreamRequest{} } +func (m *PublishBuildToolEventStreamRequest) String() string { return proto.CompactTextString(m) } +func (*PublishBuildToolEventStreamRequest) ProtoMessage() {} +func (*PublishBuildToolEventStreamRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_publish_build_event_d745bb02eff36c4f, []int{3} +} +func (m *PublishBuildToolEventStreamRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_PublishBuildToolEventStreamRequest.Unmarshal(m, b) +} +func (m *PublishBuildToolEventStreamRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_PublishBuildToolEventStreamRequest.Marshal(b, m, deterministic) +} +func (dst *PublishBuildToolEventStreamRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_PublishBuildToolEventStreamRequest.Merge(dst, src) +} +func (m *PublishBuildToolEventStreamRequest) XXX_Size() int { + return xxx_messageInfo_PublishBuildToolEventStreamRequest.Size(m) +} +func (m *PublishBuildToolEventStreamRequest) XXX_DiscardUnknown() { + xxx_messageInfo_PublishBuildToolEventStreamRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_PublishBuildToolEventStreamRequest proto.InternalMessageInfo + +func (m *PublishBuildToolEventStreamRequest) GetOrderedBuildEvent() *OrderedBuildEvent { + if m != nil { + return m.OrderedBuildEvent + } + return nil +} + +func (m *PublishBuildToolEventStreamRequest) GetNotificationKeywords() []string { + if m != nil { + return m.NotificationKeywords + } + return nil +} + +func (m *PublishBuildToolEventStreamRequest) GetProjectId() string { + if m != nil { + return m.ProjectId + } + return "" +} + +func init() { + proto.RegisterType((*PublishLifecycleEventRequest)(nil), "google.devtools.build.v1.PublishLifecycleEventRequest") + proto.RegisterType((*PublishBuildToolEventStreamResponse)(nil), "google.devtools.build.v1.PublishBuildToolEventStreamResponse") + proto.RegisterType((*OrderedBuildEvent)(nil), "google.devtools.build.v1.OrderedBuildEvent") + proto.RegisterType((*PublishBuildToolEventStreamRequest)(nil), "google.devtools.build.v1.PublishBuildToolEventStreamRequest") + proto.RegisterEnum("google.devtools.build.v1.PublishLifecycleEventRequest_ServiceLevel", PublishLifecycleEventRequest_ServiceLevel_name, PublishLifecycleEventRequest_ServiceLevel_value) +} + +// Reference imports to suppress errors if they are not otherwise used. +var _ context.Context +var _ grpc.ClientConn + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the grpc package it is being compiled against. +const _ = grpc.SupportPackageIsVersion4 + +// PublishBuildEventClient is the client API for PublishBuildEvent service. +// +// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://godoc.org/google.golang.org/grpc#ClientConn.NewStream. +type PublishBuildEventClient interface { + // Publish a build event stating the new state of a build (typically from the + // build queue). The BuildEnqueued event must be publishd before all other + // events for the same build ID. + // + // The backend will persist the event and deliver it to registered frontend + // jobs immediately without batching. + // + // The commit status of the request is reported by the RPC's util_status() + // function. The error code is the canoncial error code defined in + // //util/task/codes.proto. + PublishLifecycleEvent(ctx context.Context, in *PublishLifecycleEventRequest, opts ...grpc.CallOption) (*empty.Empty, error) + // Publish build tool events belonging to the same stream to a backend job + // using bidirectional streaming. + PublishBuildToolEventStream(ctx context.Context, opts ...grpc.CallOption) (PublishBuildEvent_PublishBuildToolEventStreamClient, error) +} + +type publishBuildEventClient struct { + cc *grpc.ClientConn +} + +func NewPublishBuildEventClient(cc *grpc.ClientConn) PublishBuildEventClient { + return &publishBuildEventClient{cc} +} + +func (c *publishBuildEventClient) PublishLifecycleEvent(ctx context.Context, in *PublishLifecycleEventRequest, opts ...grpc.CallOption) (*empty.Empty, error) { + out := new(empty.Empty) + err := c.cc.Invoke(ctx, "/google.devtools.build.v1.PublishBuildEvent/PublishLifecycleEvent", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *publishBuildEventClient) PublishBuildToolEventStream(ctx context.Context, opts ...grpc.CallOption) (PublishBuildEvent_PublishBuildToolEventStreamClient, error) { + stream, err := c.cc.NewStream(ctx, &_PublishBuildEvent_serviceDesc.Streams[0], "/google.devtools.build.v1.PublishBuildEvent/PublishBuildToolEventStream", opts...) + if err != nil { + return nil, err + } + x := &publishBuildEventPublishBuildToolEventStreamClient{stream} + return x, nil +} + +type PublishBuildEvent_PublishBuildToolEventStreamClient interface { + Send(*PublishBuildToolEventStreamRequest) error + Recv() (*PublishBuildToolEventStreamResponse, error) + grpc.ClientStream +} + +type publishBuildEventPublishBuildToolEventStreamClient struct { + grpc.ClientStream +} + +func (x *publishBuildEventPublishBuildToolEventStreamClient) Send(m *PublishBuildToolEventStreamRequest) error { + return x.ClientStream.SendMsg(m) +} + +func (x *publishBuildEventPublishBuildToolEventStreamClient) Recv() (*PublishBuildToolEventStreamResponse, error) { + m := new(PublishBuildToolEventStreamResponse) + if err := x.ClientStream.RecvMsg(m); err != nil { + return nil, err + } + return m, nil +} + +// PublishBuildEventServer is the server API for PublishBuildEvent service. +type PublishBuildEventServer interface { + // Publish a build event stating the new state of a build (typically from the + // build queue). The BuildEnqueued event must be publishd before all other + // events for the same build ID. + // + // The backend will persist the event and deliver it to registered frontend + // jobs immediately without batching. + // + // The commit status of the request is reported by the RPC's util_status() + // function. The error code is the canoncial error code defined in + // //util/task/codes.proto. + PublishLifecycleEvent(context.Context, *PublishLifecycleEventRequest) (*empty.Empty, error) + // Publish build tool events belonging to the same stream to a backend job + // using bidirectional streaming. + PublishBuildToolEventStream(PublishBuildEvent_PublishBuildToolEventStreamServer) error +} + +func RegisterPublishBuildEventServer(s *grpc.Server, srv PublishBuildEventServer) { + s.RegisterService(&_PublishBuildEvent_serviceDesc, srv) +} + +func _PublishBuildEvent_PublishLifecycleEvent_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(PublishLifecycleEventRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(PublishBuildEventServer).PublishLifecycleEvent(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.devtools.build.v1.PublishBuildEvent/PublishLifecycleEvent", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(PublishBuildEventServer).PublishLifecycleEvent(ctx, req.(*PublishLifecycleEventRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _PublishBuildEvent_PublishBuildToolEventStream_Handler(srv interface{}, stream grpc.ServerStream) error { + return srv.(PublishBuildEventServer).PublishBuildToolEventStream(&publishBuildEventPublishBuildToolEventStreamServer{stream}) +} + +type PublishBuildEvent_PublishBuildToolEventStreamServer interface { + Send(*PublishBuildToolEventStreamResponse) error + Recv() (*PublishBuildToolEventStreamRequest, error) + grpc.ServerStream +} + +type publishBuildEventPublishBuildToolEventStreamServer struct { + grpc.ServerStream +} + +func (x *publishBuildEventPublishBuildToolEventStreamServer) Send(m *PublishBuildToolEventStreamResponse) error { + return x.ServerStream.SendMsg(m) +} + +func (x *publishBuildEventPublishBuildToolEventStreamServer) Recv() (*PublishBuildToolEventStreamRequest, error) { + m := new(PublishBuildToolEventStreamRequest) + if err := x.ServerStream.RecvMsg(m); err != nil { + return nil, err + } + return m, nil +} + +var _PublishBuildEvent_serviceDesc = grpc.ServiceDesc{ + ServiceName: "google.devtools.build.v1.PublishBuildEvent", + HandlerType: (*PublishBuildEventServer)(nil), + Methods: []grpc.MethodDesc{ + { + MethodName: "PublishLifecycleEvent", + Handler: _PublishBuildEvent_PublishLifecycleEvent_Handler, + }, + }, + Streams: []grpc.StreamDesc{ + { + StreamName: "PublishBuildToolEventStream", + Handler: _PublishBuildEvent_PublishBuildToolEventStream_Handler, + ServerStreams: true, + ClientStreams: true, + }, + }, + Metadata: "google/devtools/build/v1/publish_build_event.proto", +} + +func init() { + proto.RegisterFile("google/devtools/build/v1/publish_build_event.proto", fileDescriptor_publish_build_event_d745bb02eff36c4f) +} + +var fileDescriptor_publish_build_event_d745bb02eff36c4f = []byte{ + // 670 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xbc, 0x55, 0xcd, 0x6e, 0xd3, 0x40, + 0x10, 0x66, 0x9b, 0xb6, 0x22, 0x9b, 0x36, 0xa5, 0x0b, 0x05, 0x93, 0x16, 0x14, 0x2d, 0x48, 0x44, + 0xad, 0x64, 0xd3, 0x44, 0xe2, 0x50, 0xa8, 0x80, 0x94, 0x48, 0x44, 0x54, 0x69, 0xe5, 0x46, 0x1c, + 0xca, 0xc1, 0x38, 0xf6, 0x34, 0x35, 0x75, 0xbc, 0xc6, 0xbb, 0x0e, 0xaa, 0x10, 0x17, 0x5e, 0x80, + 0x03, 0x4f, 0xc0, 0x63, 0xf0, 0x08, 0xdc, 0x90, 0x10, 0x6f, 0xc0, 0x99, 0x33, 0x47, 0xe4, 0x5d, + 0x07, 0x0c, 0xc1, 0x41, 0xe4, 0xc0, 0xcd, 0xbb, 0x33, 0xdf, 0x37, 0xf3, 0xcd, 0xcf, 0x1a, 0xd7, + 0xfb, 0x8c, 0xf5, 0x7d, 0x30, 0x5c, 0x18, 0x0a, 0xc6, 0x7c, 0x6e, 0xf4, 0x62, 0xcf, 0x77, 0x8d, + 0xe1, 0xa6, 0x11, 0xc6, 0x3d, 0xdf, 0xe3, 0xc7, 0x96, 0xbc, 0xb0, 0x60, 0x08, 0x81, 0xd0, 0xc3, + 0x88, 0x09, 0x46, 0x34, 0x85, 0xd1, 0x47, 0x18, 0x5d, 0xba, 0xe8, 0xc3, 0xcd, 0xca, 0x5a, 0xca, + 0x66, 0x87, 0x9e, 0x61, 0x07, 0x01, 0x13, 0xb6, 0xf0, 0x58, 0xc0, 0x15, 0xae, 0xb2, 0x91, 0x1b, + 0x2b, 0x13, 0x63, 0xe4, 0x7c, 0x35, 0x75, 0x96, 0xa7, 0x5e, 0x7c, 0x64, 0xb8, 0x71, 0x24, 0xd9, + 0x52, 0xfb, 0xea, 0xef, 0x76, 0x18, 0x84, 0xe2, 0x54, 0x19, 0xe9, 0xbb, 0x02, 0x5e, 0xdb, 0x57, + 0xf9, 0xef, 0x7a, 0x47, 0xe0, 0x9c, 0x3a, 0x3e, 0xb4, 0x12, 0x76, 0x13, 0x9e, 0xc7, 0xc0, 0x05, + 0x39, 0xc6, 0x8b, 0x1c, 0xa2, 0xa1, 0xe7, 0x80, 0xe5, 0xc3, 0x10, 0x7c, 0x0d, 0x55, 0x51, 0xad, + 0x5c, 0xdf, 0xd1, 0xf3, 0xa4, 0xe9, 0x93, 0xe8, 0xf4, 0x03, 0xc5, 0xb5, 0x9b, 0x50, 0x99, 0x0b, + 0x3c, 0x73, 0x22, 0xbb, 0xb8, 0x94, 0x51, 0xa7, 0xcd, 0x54, 0x51, 0xad, 0x54, 0xdf, 0xc8, 0x8f, + 0xb3, 0x17, 0xb9, 0x10, 0x81, 0xdb, 0x4c, 0xce, 0x2a, 0x06, 0xee, 0xfd, 0xf8, 0x26, 0xf7, 0x70, + 0x99, 0x8b, 0x08, 0xec, 0x81, 0x25, 0xbc, 0x01, 0xb0, 0x58, 0x68, 0x05, 0x49, 0x78, 0x79, 0x44, + 0x38, 0x2a, 0x87, 0xfe, 0x20, 0x2d, 0x97, 0xb9, 0xa8, 0x00, 0x5d, 0xe5, 0x4f, 0x1a, 0x78, 0x25, + 0x60, 0xc2, 0x3b, 0xf2, 0x1c, 0x69, 0xb6, 0x4e, 0xe0, 0xf4, 0x05, 0x8b, 0x5c, 0xae, 0xcd, 0x56, + 0x0b, 0xb5, 0xa2, 0x79, 0x21, 0x6b, 0x7c, 0x94, 0xda, 0xc8, 0x15, 0x8c, 0xc3, 0x88, 0x3d, 0x03, + 0x47, 0x58, 0x9e, 0xab, 0xcd, 0x57, 0x51, 0xad, 0x68, 0x16, 0xd3, 0x9b, 0xb6, 0x4b, 0x1b, 0x78, + 0x21, 0x5b, 0x01, 0x42, 0x70, 0xb9, 0xb3, 0xd7, 0x69, 0x77, 0xba, 0x2d, 0xf3, 0xfe, 0x4e, 0xb7, + 0xfd, 0xb8, 0x75, 0xee, 0x0c, 0x59, 0xc2, 0xa5, 0xec, 0x05, 0xa2, 0x6f, 0x10, 0xbe, 0x96, 0x16, + 0x55, 0x8a, 0xed, 0x32, 0xe6, 0x4b, 0x91, 0x07, 0x32, 0x5f, 0x13, 0x78, 0xc8, 0x02, 0x0e, 0xe4, + 0x2e, 0x2e, 0xa6, 0x92, 0x3d, 0x57, 0xb6, 0xa9, 0x54, 0xa7, 0xf9, 0xe5, 0x53, 0xe0, 0xb6, 0x6b, + 0x9e, 0xe5, 0xe9, 0x17, 0xb9, 0x81, 0x97, 0x78, 0xd2, 0xa7, 0xc0, 0x01, 0x2b, 0x88, 0x07, 0x3d, + 0x88, 0x64, 0x17, 0x0a, 0x66, 0x79, 0x74, 0xdd, 0x91, 0xb7, 0xf4, 0x3d, 0xc2, 0xcb, 0x63, 0xe5, + 0xff, 0x7f, 0xf1, 0xc9, 0x16, 0x9e, 0x53, 0x43, 0xa2, 0x7a, 0x7a, 0x3d, 0x3f, 0x4a, 0x66, 0x3a, + 0x14, 0x84, 0x7e, 0x46, 0x98, 0x4e, 0xac, 0xa6, 0x9a, 0xfb, 0x27, 0xf8, 0x3c, 0x53, 0x0a, 0xb3, + 0x7b, 0xad, 0xcd, 0xfe, 0xfb, 0x54, 0x2e, 0xb3, 0xb1, 0x4a, 0xe5, 0x8e, 0xd6, 0xdc, 0xd4, 0xa3, + 0x55, 0xff, 0x58, 0xc0, 0xcb, 0x59, 0x5d, 0x2a, 0xd2, 0x07, 0x84, 0x57, 0xfe, 0xb8, 0x90, 0xe4, + 0xd6, 0x74, 0x1b, 0x5c, 0xb9, 0x38, 0xb6, 0x40, 0xad, 0xe4, 0x3d, 0xa1, 0x4f, 0x5f, 0x7f, 0xfa, + 0xf2, 0x76, 0xe6, 0x90, 0x36, 0xe4, 0x93, 0xa8, 0xd2, 0xe2, 0xc6, 0xcb, 0x9f, 0x29, 0x6f, 0xaf, + 0xbf, 0x32, 0xfc, 0x5f, 0x28, 0xf9, 0x56, 0xfa, 0x74, 0x6e, 0xa1, 0xf5, 0xc3, 0x2a, 0x5d, 0x4d, + 0x90, 0xf9, 0x1e, 0xe4, 0x2b, 0xc2, 0xab, 0x13, 0x3a, 0x47, 0xee, 0xfc, 0x55, 0xd1, 0x84, 0x86, + 0x57, 0xb6, 0xa7, 0x44, 0xab, 0xe5, 0xa3, 0x1d, 0x29, 0xff, 0x21, 0x5d, 0x9f, 0x24, 0x1f, 0xc6, + 0x54, 0x5f, 0xa2, 0x24, 0x01, 0x8c, 0x19, 0x6a, 0xe8, 0x26, 0x6a, 0x86, 0x78, 0xcd, 0x61, 0x83, + 0xdc, 0xbc, 0x9a, 0x0b, 0x4d, 0xdb, 0x39, 0x81, 0xc0, 0xdd, 0x4f, 0x3a, 0xb1, 0x8f, 0x0e, 0xb7, + 0x53, 0xcf, 0x3e, 0xf3, 0xed, 0xa0, 0xaf, 0xb3, 0xa8, 0x6f, 0xf4, 0x21, 0x90, 0x7d, 0x32, 0x94, + 0xc9, 0x0e, 0x3d, 0x3e, 0xfe, 0x57, 0xb9, 0x2d, 0x3f, 0xbe, 0x21, 0xd4, 0x9b, 0x97, 0xce, 0x8d, + 0xef, 0x01, 0x00, 0x00, 0xff, 0xff, 0xdf, 0x68, 0x27, 0x13, 0xed, 0x06, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/devtools/cloudbuild/v1/cloudbuild.pb.go b/vendor/google.golang.org/genproto/googleapis/devtools/cloudbuild/v1/cloudbuild.pb.go new file mode 100644 index 0000000..de367cb --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/devtools/cloudbuild/v1/cloudbuild.pb.go @@ -0,0 +1,3633 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/devtools/cloudbuild/v1/cloudbuild.proto + +package cloudbuild // import "google.golang.org/genproto/googleapis/devtools/cloudbuild/v1" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import duration "github.com/golang/protobuf/ptypes/duration" +import empty "github.com/golang/protobuf/ptypes/empty" +import timestamp "github.com/golang/protobuf/ptypes/timestamp" +import _ "google.golang.org/genproto/googleapis/api/annotations" +import _ "google.golang.org/genproto/googleapis/api/httpbody" +import _ "google.golang.org/genproto/googleapis/cloud/audit" +import longrunning "google.golang.org/genproto/googleapis/longrunning" +import _ "google.golang.org/genproto/protobuf/field_mask" + +import ( + context "golang.org/x/net/context" + grpc "google.golang.org/grpc" +) + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Possible status of a build or build step. +type Build_Status int32 + +const ( + // Status of the build is unknown. + Build_STATUS_UNKNOWN Build_Status = 0 + // Build or step is queued; work has not yet begun. + Build_QUEUED Build_Status = 1 + // Build or step is being executed. + Build_WORKING Build_Status = 2 + // Build or step finished successfully. + Build_SUCCESS Build_Status = 3 + // Build or step failed to complete successfully. + Build_FAILURE Build_Status = 4 + // Build or step failed due to an internal cause. + Build_INTERNAL_ERROR Build_Status = 5 + // Build or step took longer than was allowed. + Build_TIMEOUT Build_Status = 6 + // Build or step was canceled by a user. + Build_CANCELLED Build_Status = 7 +) + +var Build_Status_name = map[int32]string{ + 0: "STATUS_UNKNOWN", + 1: "QUEUED", + 2: "WORKING", + 3: "SUCCESS", + 4: "FAILURE", + 5: "INTERNAL_ERROR", + 6: "TIMEOUT", + 7: "CANCELLED", +} +var Build_Status_value = map[string]int32{ + "STATUS_UNKNOWN": 0, + "QUEUED": 1, + "WORKING": 2, + "SUCCESS": 3, + "FAILURE": 4, + "INTERNAL_ERROR": 5, + "TIMEOUT": 6, + "CANCELLED": 7, +} + +func (x Build_Status) String() string { + return proto.EnumName(Build_Status_name, int32(x)) +} +func (Build_Status) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_cloudbuild_bcd614f113dbc5d4, []int{10, 0} +} + +// Specifies the hash algorithm, if any. +type Hash_HashType int32 + +const ( + // No hash requested. + Hash_NONE Hash_HashType = 0 + // Use a sha256 hash. + Hash_SHA256 Hash_HashType = 1 + // Use a md5 hash. + Hash_MD5 Hash_HashType = 2 +) + +var Hash_HashType_name = map[int32]string{ + 0: "NONE", + 1: "SHA256", + 2: "MD5", +} +var Hash_HashType_value = map[string]int32{ + "NONE": 0, + "SHA256": 1, + "MD5": 2, +} + +func (x Hash_HashType) String() string { + return proto.EnumName(Hash_HashType_name, int32(x)) +} +func (Hash_HashType) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_cloudbuild_bcd614f113dbc5d4, []int{16, 0} +} + +// Specifies the manner in which the build should be verified, if at all. +type BuildOptions_VerifyOption int32 + +const ( + // Not a verifiable build. (default) + BuildOptions_NOT_VERIFIED BuildOptions_VerifyOption = 0 + // Verified build. + BuildOptions_VERIFIED BuildOptions_VerifyOption = 1 +) + +var BuildOptions_VerifyOption_name = map[int32]string{ + 0: "NOT_VERIFIED", + 1: "VERIFIED", +} +var BuildOptions_VerifyOption_value = map[string]int32{ + "NOT_VERIFIED": 0, + "VERIFIED": 1, +} + +func (x BuildOptions_VerifyOption) String() string { + return proto.EnumName(BuildOptions_VerifyOption_name, int32(x)) +} +func (BuildOptions_VerifyOption) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_cloudbuild_bcd614f113dbc5d4, []int{30, 0} +} + +// Supported VM sizes. +type BuildOptions_MachineType int32 + +const ( + // Standard machine type. + BuildOptions_UNSPECIFIED BuildOptions_MachineType = 0 + // Highcpu machine with 8 CPUs. + BuildOptions_N1_HIGHCPU_8 BuildOptions_MachineType = 1 + // Highcpu machine with 32 CPUs. + BuildOptions_N1_HIGHCPU_32 BuildOptions_MachineType = 2 +) + +var BuildOptions_MachineType_name = map[int32]string{ + 0: "UNSPECIFIED", + 1: "N1_HIGHCPU_8", + 2: "N1_HIGHCPU_32", +} +var BuildOptions_MachineType_value = map[string]int32{ + "UNSPECIFIED": 0, + "N1_HIGHCPU_8": 1, + "N1_HIGHCPU_32": 2, +} + +func (x BuildOptions_MachineType) String() string { + return proto.EnumName(BuildOptions_MachineType_name, int32(x)) +} +func (BuildOptions_MachineType) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_cloudbuild_bcd614f113dbc5d4, []int{30, 1} +} + +// Specifies the behavior when there is an error in the substitution checks. +type BuildOptions_SubstitutionOption int32 + +const ( + // Fails the build if error in substitutions checks, like missing + // a substitution in the template or in the map. + BuildOptions_MUST_MATCH BuildOptions_SubstitutionOption = 0 + // Do not fail the build if error in substitutions checks. + BuildOptions_ALLOW_LOOSE BuildOptions_SubstitutionOption = 1 +) + +var BuildOptions_SubstitutionOption_name = map[int32]string{ + 0: "MUST_MATCH", + 1: "ALLOW_LOOSE", +} +var BuildOptions_SubstitutionOption_value = map[string]int32{ + "MUST_MATCH": 0, + "ALLOW_LOOSE": 1, +} + +func (x BuildOptions_SubstitutionOption) String() string { + return proto.EnumName(BuildOptions_SubstitutionOption_name, int32(x)) +} +func (BuildOptions_SubstitutionOption) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_cloudbuild_bcd614f113dbc5d4, []int{30, 2} +} + +// Specifies the behavior when writing build logs to Google Cloud Storage. +type BuildOptions_LogStreamingOption int32 + +const ( + // Service may automatically determine build log streaming behavior. + BuildOptions_STREAM_DEFAULT BuildOptions_LogStreamingOption = 0 + // Build logs should be streamed to Google Cloud Storage. + BuildOptions_STREAM_ON BuildOptions_LogStreamingOption = 1 + // Build logs should not be streamed to Google Cloud Storage; they will be + // written when the build is completed. + BuildOptions_STREAM_OFF BuildOptions_LogStreamingOption = 2 +) + +var BuildOptions_LogStreamingOption_name = map[int32]string{ + 0: "STREAM_DEFAULT", + 1: "STREAM_ON", + 2: "STREAM_OFF", +} +var BuildOptions_LogStreamingOption_value = map[string]int32{ + "STREAM_DEFAULT": 0, + "STREAM_ON": 1, + "STREAM_OFF": 2, +} + +func (x BuildOptions_LogStreamingOption) String() string { + return proto.EnumName(BuildOptions_LogStreamingOption_name, int32(x)) +} +func (BuildOptions_LogStreamingOption) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_cloudbuild_bcd614f113dbc5d4, []int{30, 3} +} + +// Specifies the logging mode. +type BuildOptions_LoggingMode int32 + +const ( + // The service determines the logging mode. The default is `LEGACY`. Do not + // rely on the default logging behavior as it may change in the future. + BuildOptions_LOGGING_UNSPECIFIED BuildOptions_LoggingMode = 0 + // Stackdriver logging and Cloud Storage logging are enabled. + BuildOptions_LEGACY BuildOptions_LoggingMode = 1 + // Only Cloud Storage logging is enabled. + BuildOptions_GCS_ONLY BuildOptions_LoggingMode = 2 +) + +var BuildOptions_LoggingMode_name = map[int32]string{ + 0: "LOGGING_UNSPECIFIED", + 1: "LEGACY", + 2: "GCS_ONLY", +} +var BuildOptions_LoggingMode_value = map[string]int32{ + "LOGGING_UNSPECIFIED": 0, + "LEGACY": 1, + "GCS_ONLY": 2, +} + +func (x BuildOptions_LoggingMode) String() string { + return proto.EnumName(BuildOptions_LoggingMode_name, int32(x)) +} +func (BuildOptions_LoggingMode) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_cloudbuild_bcd614f113dbc5d4, []int{30, 4} +} + +// Specifies a build to retry. +type RetryBuildRequest struct { + // ID of the project. + ProjectId string `protobuf:"bytes,1,opt,name=project_id,json=projectId,proto3" json:"project_id,omitempty"` + // Build ID of the original build. + Id string `protobuf:"bytes,2,opt,name=id,proto3" json:"id,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *RetryBuildRequest) Reset() { *m = RetryBuildRequest{} } +func (m *RetryBuildRequest) String() string { return proto.CompactTextString(m) } +func (*RetryBuildRequest) ProtoMessage() {} +func (*RetryBuildRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_cloudbuild_bcd614f113dbc5d4, []int{0} +} +func (m *RetryBuildRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_RetryBuildRequest.Unmarshal(m, b) +} +func (m *RetryBuildRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_RetryBuildRequest.Marshal(b, m, deterministic) +} +func (dst *RetryBuildRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_RetryBuildRequest.Merge(dst, src) +} +func (m *RetryBuildRequest) XXX_Size() int { + return xxx_messageInfo_RetryBuildRequest.Size(m) +} +func (m *RetryBuildRequest) XXX_DiscardUnknown() { + xxx_messageInfo_RetryBuildRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_RetryBuildRequest proto.InternalMessageInfo + +func (m *RetryBuildRequest) GetProjectId() string { + if m != nil { + return m.ProjectId + } + return "" +} + +func (m *RetryBuildRequest) GetId() string { + if m != nil { + return m.Id + } + return "" +} + +// Specifies a build trigger to run and the source to use. +type RunBuildTriggerRequest struct { + // ID of the project. + ProjectId string `protobuf:"bytes,1,opt,name=project_id,json=projectId,proto3" json:"project_id,omitempty"` + // ID of the trigger. + TriggerId string `protobuf:"bytes,2,opt,name=trigger_id,json=triggerId,proto3" json:"trigger_id,omitempty"` + // Source to build against this trigger. + Source *RepoSource `protobuf:"bytes,3,opt,name=source,proto3" json:"source,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *RunBuildTriggerRequest) Reset() { *m = RunBuildTriggerRequest{} } +func (m *RunBuildTriggerRequest) String() string { return proto.CompactTextString(m) } +func (*RunBuildTriggerRequest) ProtoMessage() {} +func (*RunBuildTriggerRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_cloudbuild_bcd614f113dbc5d4, []int{1} +} +func (m *RunBuildTriggerRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_RunBuildTriggerRequest.Unmarshal(m, b) +} +func (m *RunBuildTriggerRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_RunBuildTriggerRequest.Marshal(b, m, deterministic) +} +func (dst *RunBuildTriggerRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_RunBuildTriggerRequest.Merge(dst, src) +} +func (m *RunBuildTriggerRequest) XXX_Size() int { + return xxx_messageInfo_RunBuildTriggerRequest.Size(m) +} +func (m *RunBuildTriggerRequest) XXX_DiscardUnknown() { + xxx_messageInfo_RunBuildTriggerRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_RunBuildTriggerRequest proto.InternalMessageInfo + +func (m *RunBuildTriggerRequest) GetProjectId() string { + if m != nil { + return m.ProjectId + } + return "" +} + +func (m *RunBuildTriggerRequest) GetTriggerId() string { + if m != nil { + return m.TriggerId + } + return "" +} + +func (m *RunBuildTriggerRequest) GetSource() *RepoSource { + if m != nil { + return m.Source + } + return nil +} + +// Location of the source in an archive file in Google Cloud Storage. +type StorageSource struct { + // Google Cloud Storage bucket containing the source (see + // [Bucket Name + // Requirements](https://cloud.google.com/storage/docs/bucket-naming#requirements)). + Bucket string `protobuf:"bytes,1,opt,name=bucket,proto3" json:"bucket,omitempty"` + // Google Cloud Storage object containing the source. + // + // This object must be a gzipped archive file (`.tar.gz`) containing source to + // build. + Object string `protobuf:"bytes,2,opt,name=object,proto3" json:"object,omitempty"` + // Google Cloud Storage generation for the object. If the generation is + // omitted, the latest generation will be used. + Generation int64 `protobuf:"varint,3,opt,name=generation,proto3" json:"generation,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *StorageSource) Reset() { *m = StorageSource{} } +func (m *StorageSource) String() string { return proto.CompactTextString(m) } +func (*StorageSource) ProtoMessage() {} +func (*StorageSource) Descriptor() ([]byte, []int) { + return fileDescriptor_cloudbuild_bcd614f113dbc5d4, []int{2} +} +func (m *StorageSource) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_StorageSource.Unmarshal(m, b) +} +func (m *StorageSource) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_StorageSource.Marshal(b, m, deterministic) +} +func (dst *StorageSource) XXX_Merge(src proto.Message) { + xxx_messageInfo_StorageSource.Merge(dst, src) +} +func (m *StorageSource) XXX_Size() int { + return xxx_messageInfo_StorageSource.Size(m) +} +func (m *StorageSource) XXX_DiscardUnknown() { + xxx_messageInfo_StorageSource.DiscardUnknown(m) +} + +var xxx_messageInfo_StorageSource proto.InternalMessageInfo + +func (m *StorageSource) GetBucket() string { + if m != nil { + return m.Bucket + } + return "" +} + +func (m *StorageSource) GetObject() string { + if m != nil { + return m.Object + } + return "" +} + +func (m *StorageSource) GetGeneration() int64 { + if m != nil { + return m.Generation + } + return 0 +} + +// Location of the source in a Google Cloud Source Repository. +type RepoSource struct { + // ID of the project that owns the Cloud Source Repository. If omitted, the + // project ID requesting the build is assumed. + ProjectId string `protobuf:"bytes,1,opt,name=project_id,json=projectId,proto3" json:"project_id,omitempty"` + // Name of the Cloud Source Repository. If omitted, the name "default" is + // assumed. + RepoName string `protobuf:"bytes,2,opt,name=repo_name,json=repoName,proto3" json:"repo_name,omitempty"` + // A revision within the Cloud Source Repository must be specified in + // one of these ways. + // + // Types that are valid to be assigned to Revision: + // *RepoSource_BranchName + // *RepoSource_TagName + // *RepoSource_CommitSha + Revision isRepoSource_Revision `protobuf_oneof:"revision"` + // Directory, relative to the source root, in which to run the build. + // + // This must be a relative path. If a step's `dir` is specified and is an + // absolute path, this value is ignored for that step's execution. + Dir string `protobuf:"bytes,7,opt,name=dir,proto3" json:"dir,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *RepoSource) Reset() { *m = RepoSource{} } +func (m *RepoSource) String() string { return proto.CompactTextString(m) } +func (*RepoSource) ProtoMessage() {} +func (*RepoSource) Descriptor() ([]byte, []int) { + return fileDescriptor_cloudbuild_bcd614f113dbc5d4, []int{3} +} +func (m *RepoSource) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_RepoSource.Unmarshal(m, b) +} +func (m *RepoSource) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_RepoSource.Marshal(b, m, deterministic) +} +func (dst *RepoSource) XXX_Merge(src proto.Message) { + xxx_messageInfo_RepoSource.Merge(dst, src) +} +func (m *RepoSource) XXX_Size() int { + return xxx_messageInfo_RepoSource.Size(m) +} +func (m *RepoSource) XXX_DiscardUnknown() { + xxx_messageInfo_RepoSource.DiscardUnknown(m) +} + +var xxx_messageInfo_RepoSource proto.InternalMessageInfo + +func (m *RepoSource) GetProjectId() string { + if m != nil { + return m.ProjectId + } + return "" +} + +func (m *RepoSource) GetRepoName() string { + if m != nil { + return m.RepoName + } + return "" +} + +type isRepoSource_Revision interface { + isRepoSource_Revision() +} + +type RepoSource_BranchName struct { + BranchName string `protobuf:"bytes,3,opt,name=branch_name,json=branchName,proto3,oneof"` +} + +type RepoSource_TagName struct { + TagName string `protobuf:"bytes,4,opt,name=tag_name,json=tagName,proto3,oneof"` +} + +type RepoSource_CommitSha struct { + CommitSha string `protobuf:"bytes,5,opt,name=commit_sha,json=commitSha,proto3,oneof"` +} + +func (*RepoSource_BranchName) isRepoSource_Revision() {} + +func (*RepoSource_TagName) isRepoSource_Revision() {} + +func (*RepoSource_CommitSha) isRepoSource_Revision() {} + +func (m *RepoSource) GetRevision() isRepoSource_Revision { + if m != nil { + return m.Revision + } + return nil +} + +func (m *RepoSource) GetBranchName() string { + if x, ok := m.GetRevision().(*RepoSource_BranchName); ok { + return x.BranchName + } + return "" +} + +func (m *RepoSource) GetTagName() string { + if x, ok := m.GetRevision().(*RepoSource_TagName); ok { + return x.TagName + } + return "" +} + +func (m *RepoSource) GetCommitSha() string { + if x, ok := m.GetRevision().(*RepoSource_CommitSha); ok { + return x.CommitSha + } + return "" +} + +func (m *RepoSource) GetDir() string { + if m != nil { + return m.Dir + } + return "" +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*RepoSource) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _RepoSource_OneofMarshaler, _RepoSource_OneofUnmarshaler, _RepoSource_OneofSizer, []interface{}{ + (*RepoSource_BranchName)(nil), + (*RepoSource_TagName)(nil), + (*RepoSource_CommitSha)(nil), + } +} + +func _RepoSource_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*RepoSource) + // revision + switch x := m.Revision.(type) { + case *RepoSource_BranchName: + b.EncodeVarint(3<<3 | proto.WireBytes) + b.EncodeStringBytes(x.BranchName) + case *RepoSource_TagName: + b.EncodeVarint(4<<3 | proto.WireBytes) + b.EncodeStringBytes(x.TagName) + case *RepoSource_CommitSha: + b.EncodeVarint(5<<3 | proto.WireBytes) + b.EncodeStringBytes(x.CommitSha) + case nil: + default: + return fmt.Errorf("RepoSource.Revision has unexpected type %T", x) + } + return nil +} + +func _RepoSource_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*RepoSource) + switch tag { + case 3: // revision.branch_name + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeStringBytes() + m.Revision = &RepoSource_BranchName{x} + return true, err + case 4: // revision.tag_name + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeStringBytes() + m.Revision = &RepoSource_TagName{x} + return true, err + case 5: // revision.commit_sha + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeStringBytes() + m.Revision = &RepoSource_CommitSha{x} + return true, err + default: + return false, nil + } +} + +func _RepoSource_OneofSizer(msg proto.Message) (n int) { + m := msg.(*RepoSource) + // revision + switch x := m.Revision.(type) { + case *RepoSource_BranchName: + n += 1 // tag and wire + n += proto.SizeVarint(uint64(len(x.BranchName))) + n += len(x.BranchName) + case *RepoSource_TagName: + n += 1 // tag and wire + n += proto.SizeVarint(uint64(len(x.TagName))) + n += len(x.TagName) + case *RepoSource_CommitSha: + n += 1 // tag and wire + n += proto.SizeVarint(uint64(len(x.CommitSha))) + n += len(x.CommitSha) + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +// Location of the source in a supported storage service. +type Source struct { + // Location of source. + // + // Types that are valid to be assigned to Source: + // *Source_StorageSource + // *Source_RepoSource + Source isSource_Source `protobuf_oneof:"source"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Source) Reset() { *m = Source{} } +func (m *Source) String() string { return proto.CompactTextString(m) } +func (*Source) ProtoMessage() {} +func (*Source) Descriptor() ([]byte, []int) { + return fileDescriptor_cloudbuild_bcd614f113dbc5d4, []int{4} +} +func (m *Source) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Source.Unmarshal(m, b) +} +func (m *Source) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Source.Marshal(b, m, deterministic) +} +func (dst *Source) XXX_Merge(src proto.Message) { + xxx_messageInfo_Source.Merge(dst, src) +} +func (m *Source) XXX_Size() int { + return xxx_messageInfo_Source.Size(m) +} +func (m *Source) XXX_DiscardUnknown() { + xxx_messageInfo_Source.DiscardUnknown(m) +} + +var xxx_messageInfo_Source proto.InternalMessageInfo + +type isSource_Source interface { + isSource_Source() +} + +type Source_StorageSource struct { + StorageSource *StorageSource `protobuf:"bytes,2,opt,name=storage_source,json=storageSource,proto3,oneof"` +} + +type Source_RepoSource struct { + RepoSource *RepoSource `protobuf:"bytes,3,opt,name=repo_source,json=repoSource,proto3,oneof"` +} + +func (*Source_StorageSource) isSource_Source() {} + +func (*Source_RepoSource) isSource_Source() {} + +func (m *Source) GetSource() isSource_Source { + if m != nil { + return m.Source + } + return nil +} + +func (m *Source) GetStorageSource() *StorageSource { + if x, ok := m.GetSource().(*Source_StorageSource); ok { + return x.StorageSource + } + return nil +} + +func (m *Source) GetRepoSource() *RepoSource { + if x, ok := m.GetSource().(*Source_RepoSource); ok { + return x.RepoSource + } + return nil +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*Source) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _Source_OneofMarshaler, _Source_OneofUnmarshaler, _Source_OneofSizer, []interface{}{ + (*Source_StorageSource)(nil), + (*Source_RepoSource)(nil), + } +} + +func _Source_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*Source) + // source + switch x := m.Source.(type) { + case *Source_StorageSource: + b.EncodeVarint(2<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.StorageSource); err != nil { + return err + } + case *Source_RepoSource: + b.EncodeVarint(3<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.RepoSource); err != nil { + return err + } + case nil: + default: + return fmt.Errorf("Source.Source has unexpected type %T", x) + } + return nil +} + +func _Source_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*Source) + switch tag { + case 2: // source.storage_source + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(StorageSource) + err := b.DecodeMessage(msg) + m.Source = &Source_StorageSource{msg} + return true, err + case 3: // source.repo_source + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(RepoSource) + err := b.DecodeMessage(msg) + m.Source = &Source_RepoSource{msg} + return true, err + default: + return false, nil + } +} + +func _Source_OneofSizer(msg proto.Message) (n int) { + m := msg.(*Source) + // source + switch x := m.Source.(type) { + case *Source_StorageSource: + s := proto.Size(x.StorageSource) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *Source_RepoSource: + s := proto.Size(x.RepoSource) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +// An image built by the pipeline. +type BuiltImage struct { + // Name used to push the container image to Google Container Registry, as + // presented to `docker push`. + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // Docker Registry 2.0 digest. + Digest string `protobuf:"bytes,3,opt,name=digest,proto3" json:"digest,omitempty"` + // Output only. Stores timing information for pushing the specified image. + PushTiming *TimeSpan `protobuf:"bytes,4,opt,name=push_timing,json=pushTiming,proto3" json:"push_timing,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *BuiltImage) Reset() { *m = BuiltImage{} } +func (m *BuiltImage) String() string { return proto.CompactTextString(m) } +func (*BuiltImage) ProtoMessage() {} +func (*BuiltImage) Descriptor() ([]byte, []int) { + return fileDescriptor_cloudbuild_bcd614f113dbc5d4, []int{5} +} +func (m *BuiltImage) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_BuiltImage.Unmarshal(m, b) +} +func (m *BuiltImage) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_BuiltImage.Marshal(b, m, deterministic) +} +func (dst *BuiltImage) XXX_Merge(src proto.Message) { + xxx_messageInfo_BuiltImage.Merge(dst, src) +} +func (m *BuiltImage) XXX_Size() int { + return xxx_messageInfo_BuiltImage.Size(m) +} +func (m *BuiltImage) XXX_DiscardUnknown() { + xxx_messageInfo_BuiltImage.DiscardUnknown(m) +} + +var xxx_messageInfo_BuiltImage proto.InternalMessageInfo + +func (m *BuiltImage) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *BuiltImage) GetDigest() string { + if m != nil { + return m.Digest + } + return "" +} + +func (m *BuiltImage) GetPushTiming() *TimeSpan { + if m != nil { + return m.PushTiming + } + return nil +} + +// A step in the build pipeline. +type BuildStep struct { + // Required. The name of the container image that will run this particular + // build step. + // + // If the image is available in the host's Docker daemon's cache, it + // will be run directly. If not, the host will attempt to pull the image + // first, using the builder service account's credentials if necessary. + // + // The Docker daemon's cache will already have the latest versions of all of + // the officially supported build steps + // ([https://github.com/GoogleCloudPlatform/cloud-builders](https://github.com/GoogleCloudPlatform/cloud-builders)). + // The Docker daemon will also have cached many of the layers for some popular + // images, like "ubuntu", "debian", but they will be refreshed at the time you + // attempt to use them. + // + // If you built an image in a previous build step, it will be stored in the + // host's Docker daemon's cache and is available to use as the name for a + // later build step. + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // A list of environment variable definitions to be used when running a step. + // + // The elements are of the form "KEY=VALUE" for the environment variable "KEY" + // being given the value "VALUE". + Env []string `protobuf:"bytes,2,rep,name=env,proto3" json:"env,omitempty"` + // A list of arguments that will be presented to the step when it is started. + // + // If the image used to run the step's container has an entrypoint, the `args` + // are used as arguments to that entrypoint. If the image does not define + // an entrypoint, the first element in args is used as the entrypoint, + // and the remainder will be used as arguments. + Args []string `protobuf:"bytes,3,rep,name=args,proto3" json:"args,omitempty"` + // Working directory to use when running this step's container. + // + // If this value is a relative path, it is relative to the build's working + // directory. If this value is absolute, it may be outside the build's working + // directory, in which case the contents of the path may not be persisted + // across build step executions, unless a `volume` for that path is specified. + // + // If the build specifies a `RepoSource` with `dir` and a step with a `dir`, + // which specifies an absolute path, the `RepoSource` `dir` is ignored for + // the step's execution. + Dir string `protobuf:"bytes,4,opt,name=dir,proto3" json:"dir,omitempty"` + // Unique identifier for this build step, used in `wait_for` to + // reference this build step as a dependency. + Id string `protobuf:"bytes,5,opt,name=id,proto3" json:"id,omitempty"` + // The ID(s) of the step(s) that this build step depends on. + // This build step will not start until all the build steps in `wait_for` + // have completed successfully. If `wait_for` is empty, this build step will + // start when all previous build steps in the `Build.Steps` list have + // completed successfully. + WaitFor []string `protobuf:"bytes,6,rep,name=wait_for,json=waitFor,proto3" json:"wait_for,omitempty"` + // Entrypoint to be used instead of the build step image's default entrypoint. + // If unset, the image's default entrypoint is used. + Entrypoint string `protobuf:"bytes,7,opt,name=entrypoint,proto3" json:"entrypoint,omitempty"` + // A list of environment variables which are encrypted using a Cloud Key + // Management Service crypto key. These values must be specified in the + // build's `Secret`. + SecretEnv []string `protobuf:"bytes,8,rep,name=secret_env,json=secretEnv,proto3" json:"secret_env,omitempty"` + // List of volumes to mount into the build step. + // + // Each volume is created as an empty volume prior to execution of the + // build step. Upon completion of the build, volumes and their contents are + // discarded. + // + // Using a named volume in only one step is not valid as it is indicative + // of a build request with an incorrect configuration. + Volumes []*Volume `protobuf:"bytes,9,rep,name=volumes,proto3" json:"volumes,omitempty"` + // Output only. Stores timing information for executing this build step. + Timing *TimeSpan `protobuf:"bytes,10,opt,name=timing,proto3" json:"timing,omitempty"` + // Output only. Stores timing information for pulling this build step's + // builder image only. + PullTiming *TimeSpan `protobuf:"bytes,13,opt,name=pull_timing,json=pullTiming,proto3" json:"pull_timing,omitempty"` + // Time limit for executing this build step. If not defined, the step has no + // time limit and will be allowed to continue to run until either it completes + // or the build itself times out. + Timeout *duration.Duration `protobuf:"bytes,11,opt,name=timeout,proto3" json:"timeout,omitempty"` + // Output only. Status of the build step. At this time, build step status is + // only updated on build completion; step status is not updated in real-time + // as the build progresses. + Status Build_Status `protobuf:"varint,12,opt,name=status,proto3,enum=google.devtools.cloudbuild.v1.Build_Status" json:"status,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *BuildStep) Reset() { *m = BuildStep{} } +func (m *BuildStep) String() string { return proto.CompactTextString(m) } +func (*BuildStep) ProtoMessage() {} +func (*BuildStep) Descriptor() ([]byte, []int) { + return fileDescriptor_cloudbuild_bcd614f113dbc5d4, []int{6} +} +func (m *BuildStep) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_BuildStep.Unmarshal(m, b) +} +func (m *BuildStep) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_BuildStep.Marshal(b, m, deterministic) +} +func (dst *BuildStep) XXX_Merge(src proto.Message) { + xxx_messageInfo_BuildStep.Merge(dst, src) +} +func (m *BuildStep) XXX_Size() int { + return xxx_messageInfo_BuildStep.Size(m) +} +func (m *BuildStep) XXX_DiscardUnknown() { + xxx_messageInfo_BuildStep.DiscardUnknown(m) +} + +var xxx_messageInfo_BuildStep proto.InternalMessageInfo + +func (m *BuildStep) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *BuildStep) GetEnv() []string { + if m != nil { + return m.Env + } + return nil +} + +func (m *BuildStep) GetArgs() []string { + if m != nil { + return m.Args + } + return nil +} + +func (m *BuildStep) GetDir() string { + if m != nil { + return m.Dir + } + return "" +} + +func (m *BuildStep) GetId() string { + if m != nil { + return m.Id + } + return "" +} + +func (m *BuildStep) GetWaitFor() []string { + if m != nil { + return m.WaitFor + } + return nil +} + +func (m *BuildStep) GetEntrypoint() string { + if m != nil { + return m.Entrypoint + } + return "" +} + +func (m *BuildStep) GetSecretEnv() []string { + if m != nil { + return m.SecretEnv + } + return nil +} + +func (m *BuildStep) GetVolumes() []*Volume { + if m != nil { + return m.Volumes + } + return nil +} + +func (m *BuildStep) GetTiming() *TimeSpan { + if m != nil { + return m.Timing + } + return nil +} + +func (m *BuildStep) GetPullTiming() *TimeSpan { + if m != nil { + return m.PullTiming + } + return nil +} + +func (m *BuildStep) GetTimeout() *duration.Duration { + if m != nil { + return m.Timeout + } + return nil +} + +func (m *BuildStep) GetStatus() Build_Status { + if m != nil { + return m.Status + } + return Build_STATUS_UNKNOWN +} + +// Volume describes a Docker container volume which is mounted into build steps +// in order to persist files across build step execution. +type Volume struct { + // Name of the volume to mount. + // + // Volume names must be unique per build step and must be valid names for + // Docker volumes. Each named volume must be used by at least two build steps. + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // Path at which to mount the volume. + // + // Paths must be absolute and cannot conflict with other volume paths on the + // same build step or with certain reserved volume paths. + Path string `protobuf:"bytes,2,opt,name=path,proto3" json:"path,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Volume) Reset() { *m = Volume{} } +func (m *Volume) String() string { return proto.CompactTextString(m) } +func (*Volume) ProtoMessage() {} +func (*Volume) Descriptor() ([]byte, []int) { + return fileDescriptor_cloudbuild_bcd614f113dbc5d4, []int{7} +} +func (m *Volume) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Volume.Unmarshal(m, b) +} +func (m *Volume) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Volume.Marshal(b, m, deterministic) +} +func (dst *Volume) XXX_Merge(src proto.Message) { + xxx_messageInfo_Volume.Merge(dst, src) +} +func (m *Volume) XXX_Size() int { + return xxx_messageInfo_Volume.Size(m) +} +func (m *Volume) XXX_DiscardUnknown() { + xxx_messageInfo_Volume.DiscardUnknown(m) +} + +var xxx_messageInfo_Volume proto.InternalMessageInfo + +func (m *Volume) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *Volume) GetPath() string { + if m != nil { + return m.Path + } + return "" +} + +// Artifacts created by the build pipeline. +type Results struct { + // Container images that were built as a part of the build. + Images []*BuiltImage `protobuf:"bytes,2,rep,name=images,proto3" json:"images,omitempty"` + // List of build step digests, in the order corresponding to build step + // indices. + BuildStepImages []string `protobuf:"bytes,3,rep,name=build_step_images,json=buildStepImages,proto3" json:"build_step_images,omitempty"` + // Path to the artifact manifest. Only populated when artifacts are uploaded. + ArtifactManifest string `protobuf:"bytes,4,opt,name=artifact_manifest,json=artifactManifest,proto3" json:"artifact_manifest,omitempty"` + // Number of artifacts uploaded. Only populated when artifacts are uploaded. + NumArtifacts int64 `protobuf:"varint,5,opt,name=num_artifacts,json=numArtifacts,proto3" json:"num_artifacts,omitempty"` + // List of build step outputs, produced by builder images, in the order + // corresponding to build step indices. + // + // [Cloud Builders](https://cloud.google.com/cloud-build/docs/cloud-builders) + // can produce this output by writing to `$BUILDER_OUTPUT/output`. + // Only the first 4KB of data is stored. + BuildStepOutputs [][]byte `protobuf:"bytes,6,rep,name=build_step_outputs,json=buildStepOutputs,proto3" json:"build_step_outputs,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Results) Reset() { *m = Results{} } +func (m *Results) String() string { return proto.CompactTextString(m) } +func (*Results) ProtoMessage() {} +func (*Results) Descriptor() ([]byte, []int) { + return fileDescriptor_cloudbuild_bcd614f113dbc5d4, []int{8} +} +func (m *Results) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Results.Unmarshal(m, b) +} +func (m *Results) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Results.Marshal(b, m, deterministic) +} +func (dst *Results) XXX_Merge(src proto.Message) { + xxx_messageInfo_Results.Merge(dst, src) +} +func (m *Results) XXX_Size() int { + return xxx_messageInfo_Results.Size(m) +} +func (m *Results) XXX_DiscardUnknown() { + xxx_messageInfo_Results.DiscardUnknown(m) +} + +var xxx_messageInfo_Results proto.InternalMessageInfo + +func (m *Results) GetImages() []*BuiltImage { + if m != nil { + return m.Images + } + return nil +} + +func (m *Results) GetBuildStepImages() []string { + if m != nil { + return m.BuildStepImages + } + return nil +} + +func (m *Results) GetArtifactManifest() string { + if m != nil { + return m.ArtifactManifest + } + return "" +} + +func (m *Results) GetNumArtifacts() int64 { + if m != nil { + return m.NumArtifacts + } + return 0 +} + +func (m *Results) GetBuildStepOutputs() [][]byte { + if m != nil { + return m.BuildStepOutputs + } + return nil +} + +// An artifact that was uploaded during a build. This +// is a single record in the artifact manifest JSON file. +type ArtifactResult struct { + // The path of an artifact in a Google Cloud Storage bucket, with the + // generation number. For example, + // `gs://mybucket/path/to/output.jar#generation`. + Location string `protobuf:"bytes,1,opt,name=location,proto3" json:"location,omitempty"` + // The file hash of the artifact. + FileHash []*FileHashes `protobuf:"bytes,2,rep,name=file_hash,json=fileHash,proto3" json:"file_hash,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ArtifactResult) Reset() { *m = ArtifactResult{} } +func (m *ArtifactResult) String() string { return proto.CompactTextString(m) } +func (*ArtifactResult) ProtoMessage() {} +func (*ArtifactResult) Descriptor() ([]byte, []int) { + return fileDescriptor_cloudbuild_bcd614f113dbc5d4, []int{9} +} +func (m *ArtifactResult) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ArtifactResult.Unmarshal(m, b) +} +func (m *ArtifactResult) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ArtifactResult.Marshal(b, m, deterministic) +} +func (dst *ArtifactResult) XXX_Merge(src proto.Message) { + xxx_messageInfo_ArtifactResult.Merge(dst, src) +} +func (m *ArtifactResult) XXX_Size() int { + return xxx_messageInfo_ArtifactResult.Size(m) +} +func (m *ArtifactResult) XXX_DiscardUnknown() { + xxx_messageInfo_ArtifactResult.DiscardUnknown(m) +} + +var xxx_messageInfo_ArtifactResult proto.InternalMessageInfo + +func (m *ArtifactResult) GetLocation() string { + if m != nil { + return m.Location + } + return "" +} + +func (m *ArtifactResult) GetFileHash() []*FileHashes { + if m != nil { + return m.FileHash + } + return nil +} + +// A build resource in the Cloud Build API. +// +// At a high level, a `Build` describes where to find source code, how to build +// it (for example, the builder image to run on the source), and where to store +// the built artifacts. +// +// Fields can include the following variables, which will be expanded when the +// build is created: +// +// - $PROJECT_ID: the project ID of the build. +// - $BUILD_ID: the autogenerated ID of the build. +// - $REPO_NAME: the source repository name specified by RepoSource. +// - $BRANCH_NAME: the branch name specified by RepoSource. +// - $TAG_NAME: the tag name specified by RepoSource. +// - $REVISION_ID or $COMMIT_SHA: the commit SHA specified by RepoSource or +// resolved from the specified branch or tag. +// - $SHORT_SHA: first 7 characters of $REVISION_ID or $COMMIT_SHA. +type Build struct { + // Output only. Unique identifier of the build. + Id string `protobuf:"bytes,1,opt,name=id,proto3" json:"id,omitempty"` + // Output only. ID of the project. + ProjectId string `protobuf:"bytes,16,opt,name=project_id,json=projectId,proto3" json:"project_id,omitempty"` + // Output only. Status of the build. + Status Build_Status `protobuf:"varint,2,opt,name=status,proto3,enum=google.devtools.cloudbuild.v1.Build_Status" json:"status,omitempty"` + // Output only. Customer-readable message about the current status. + StatusDetail string `protobuf:"bytes,24,opt,name=status_detail,json=statusDetail,proto3" json:"status_detail,omitempty"` + // The location of the source files to build. + Source *Source `protobuf:"bytes,3,opt,name=source,proto3" json:"source,omitempty"` + // Required. The operations to be performed on the workspace. + Steps []*BuildStep `protobuf:"bytes,11,rep,name=steps,proto3" json:"steps,omitempty"` + // Output only. Results of the build. + Results *Results `protobuf:"bytes,10,opt,name=results,proto3" json:"results,omitempty"` + // Output only. Time at which the request to create the build was received. + CreateTime *timestamp.Timestamp `protobuf:"bytes,6,opt,name=create_time,json=createTime,proto3" json:"create_time,omitempty"` + // Output only. Time at which execution of the build was started. + StartTime *timestamp.Timestamp `protobuf:"bytes,7,opt,name=start_time,json=startTime,proto3" json:"start_time,omitempty"` + // Output only. Time at which execution of the build was finished. + // + // The difference between finish_time and start_time is the duration of the + // build's execution. + FinishTime *timestamp.Timestamp `protobuf:"bytes,8,opt,name=finish_time,json=finishTime,proto3" json:"finish_time,omitempty"` + // Amount of time that this build should be allowed to run, to second + // granularity. If this amount of time elapses, work on the build will cease + // and the build status will be `TIMEOUT`. + // + // Default time is ten minutes. + Timeout *duration.Duration `protobuf:"bytes,12,opt,name=timeout,proto3" json:"timeout,omitempty"` + // A list of images to be pushed upon the successful completion of all build + // steps. + // + // The images are pushed using the builder service account's credentials. + // + // The digests of the pushed images will be stored in the `Build` resource's + // results field. + // + // If any of the images fail to be pushed, the build status is marked + // `FAILURE`. + Images []string `protobuf:"bytes,13,rep,name=images,proto3" json:"images,omitempty"` + // Artifacts produced by the build that should be uploaded upon + // successful completion of all build steps. + Artifacts *Artifacts `protobuf:"bytes,37,opt,name=artifacts,proto3" json:"artifacts,omitempty"` + // Google Cloud Storage bucket where logs should be written (see + // [Bucket Name + // Requirements](https://cloud.google.com/storage/docs/bucket-naming#requirements)). + // Logs file names will be of the format `${logs_bucket}/log-${build_id}.txt`. + LogsBucket string `protobuf:"bytes,19,opt,name=logs_bucket,json=logsBucket,proto3" json:"logs_bucket,omitempty"` + // Output only. A permanent fixed identifier for source. + SourceProvenance *SourceProvenance `protobuf:"bytes,21,opt,name=source_provenance,json=sourceProvenance,proto3" json:"source_provenance,omitempty"` + // Output only. The ID of the `BuildTrigger` that triggered this build, if it + // was triggered automatically. + BuildTriggerId string `protobuf:"bytes,22,opt,name=build_trigger_id,json=buildTriggerId,proto3" json:"build_trigger_id,omitempty"` + // Special options for this build. + Options *BuildOptions `protobuf:"bytes,23,opt,name=options,proto3" json:"options,omitempty"` + // Output only. URL to logs for this build in Google Cloud Console. + LogUrl string `protobuf:"bytes,25,opt,name=log_url,json=logUrl,proto3" json:"log_url,omitempty"` + // Substitutions data for `Build` resource. + Substitutions map[string]string `protobuf:"bytes,29,rep,name=substitutions,proto3" json:"substitutions,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` + // Tags for annotation of a `Build`. These are not docker tags. + Tags []string `protobuf:"bytes,31,rep,name=tags,proto3" json:"tags,omitempty"` + // Secrets to decrypt using Cloud Key Management Service. + Secrets []*Secret `protobuf:"bytes,32,rep,name=secrets,proto3" json:"secrets,omitempty"` + // Output only. Stores timing information for phases of the build. Valid keys + // are: + // + // * BUILD: time to execute all build steps + // * PUSH: time to push all specified images. + // * FETCHSOURCE: time to fetch source. + // + // If the build does not specify source or images, + // these keys will not be included. + Timing map[string]*TimeSpan `protobuf:"bytes,33,rep,name=timing,proto3" json:"timing,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Build) Reset() { *m = Build{} } +func (m *Build) String() string { return proto.CompactTextString(m) } +func (*Build) ProtoMessage() {} +func (*Build) Descriptor() ([]byte, []int) { + return fileDescriptor_cloudbuild_bcd614f113dbc5d4, []int{10} +} +func (m *Build) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Build.Unmarshal(m, b) +} +func (m *Build) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Build.Marshal(b, m, deterministic) +} +func (dst *Build) XXX_Merge(src proto.Message) { + xxx_messageInfo_Build.Merge(dst, src) +} +func (m *Build) XXX_Size() int { + return xxx_messageInfo_Build.Size(m) +} +func (m *Build) XXX_DiscardUnknown() { + xxx_messageInfo_Build.DiscardUnknown(m) +} + +var xxx_messageInfo_Build proto.InternalMessageInfo + +func (m *Build) GetId() string { + if m != nil { + return m.Id + } + return "" +} + +func (m *Build) GetProjectId() string { + if m != nil { + return m.ProjectId + } + return "" +} + +func (m *Build) GetStatus() Build_Status { + if m != nil { + return m.Status + } + return Build_STATUS_UNKNOWN +} + +func (m *Build) GetStatusDetail() string { + if m != nil { + return m.StatusDetail + } + return "" +} + +func (m *Build) GetSource() *Source { + if m != nil { + return m.Source + } + return nil +} + +func (m *Build) GetSteps() []*BuildStep { + if m != nil { + return m.Steps + } + return nil +} + +func (m *Build) GetResults() *Results { + if m != nil { + return m.Results + } + return nil +} + +func (m *Build) GetCreateTime() *timestamp.Timestamp { + if m != nil { + return m.CreateTime + } + return nil +} + +func (m *Build) GetStartTime() *timestamp.Timestamp { + if m != nil { + return m.StartTime + } + return nil +} + +func (m *Build) GetFinishTime() *timestamp.Timestamp { + if m != nil { + return m.FinishTime + } + return nil +} + +func (m *Build) GetTimeout() *duration.Duration { + if m != nil { + return m.Timeout + } + return nil +} + +func (m *Build) GetImages() []string { + if m != nil { + return m.Images + } + return nil +} + +func (m *Build) GetArtifacts() *Artifacts { + if m != nil { + return m.Artifacts + } + return nil +} + +func (m *Build) GetLogsBucket() string { + if m != nil { + return m.LogsBucket + } + return "" +} + +func (m *Build) GetSourceProvenance() *SourceProvenance { + if m != nil { + return m.SourceProvenance + } + return nil +} + +func (m *Build) GetBuildTriggerId() string { + if m != nil { + return m.BuildTriggerId + } + return "" +} + +func (m *Build) GetOptions() *BuildOptions { + if m != nil { + return m.Options + } + return nil +} + +func (m *Build) GetLogUrl() string { + if m != nil { + return m.LogUrl + } + return "" +} + +func (m *Build) GetSubstitutions() map[string]string { + if m != nil { + return m.Substitutions + } + return nil +} + +func (m *Build) GetTags() []string { + if m != nil { + return m.Tags + } + return nil +} + +func (m *Build) GetSecrets() []*Secret { + if m != nil { + return m.Secrets + } + return nil +} + +func (m *Build) GetTiming() map[string]*TimeSpan { + if m != nil { + return m.Timing + } + return nil +} + +// Artifacts produced by a build that should be uploaded upon +// successful completion of all build steps. +type Artifacts struct { + // A list of images to be pushed upon the successful completion of all build + // steps. + // + // The images will be pushed using the builder service account's credentials. + // + // The digests of the pushed images will be stored in the Build resource's + // results field. + // + // If any of the images fail to be pushed, the build is marked FAILURE. + Images []string `protobuf:"bytes,1,rep,name=images,proto3" json:"images,omitempty"` + // A list of objects to be uploaded to Cloud Storage upon successful + // completion of all build steps. + // + // Files in the workspace matching specified paths globs will be uploaded to + // the specified Cloud Storage location using the builder service account's + // credentials. + // + // The location and generation of the uploaded objects will be stored in the + // Build resource's results field. + // + // If any objects fail to be pushed, the build is marked FAILURE. + Objects *Artifacts_ArtifactObjects `protobuf:"bytes,2,opt,name=objects,proto3" json:"objects,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Artifacts) Reset() { *m = Artifacts{} } +func (m *Artifacts) String() string { return proto.CompactTextString(m) } +func (*Artifacts) ProtoMessage() {} +func (*Artifacts) Descriptor() ([]byte, []int) { + return fileDescriptor_cloudbuild_bcd614f113dbc5d4, []int{11} +} +func (m *Artifacts) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Artifacts.Unmarshal(m, b) +} +func (m *Artifacts) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Artifacts.Marshal(b, m, deterministic) +} +func (dst *Artifacts) XXX_Merge(src proto.Message) { + xxx_messageInfo_Artifacts.Merge(dst, src) +} +func (m *Artifacts) XXX_Size() int { + return xxx_messageInfo_Artifacts.Size(m) +} +func (m *Artifacts) XXX_DiscardUnknown() { + xxx_messageInfo_Artifacts.DiscardUnknown(m) +} + +var xxx_messageInfo_Artifacts proto.InternalMessageInfo + +func (m *Artifacts) GetImages() []string { + if m != nil { + return m.Images + } + return nil +} + +func (m *Artifacts) GetObjects() *Artifacts_ArtifactObjects { + if m != nil { + return m.Objects + } + return nil +} + +// Files in the workspace to upload to Cloud Storage upon successful +// completion of all build steps. +type Artifacts_ArtifactObjects struct { + // Cloud Storage bucket and optional object path, in the form + // "gs://bucket/path/to/somewhere/". (see [Bucket Name + // Requirements](https://cloud.google.com/storage/docs/bucket-naming#requirements)). + // + // Files in the workspace matching any path pattern will be uploaded to + // Cloud Storage with this location as a prefix. + Location string `protobuf:"bytes,1,opt,name=location,proto3" json:"location,omitempty"` + // Path globs used to match files in the build's workspace. + Paths []string `protobuf:"bytes,2,rep,name=paths,proto3" json:"paths,omitempty"` + // Output only. Stores timing information for pushing all artifact objects. + Timing *TimeSpan `protobuf:"bytes,3,opt,name=timing,proto3" json:"timing,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Artifacts_ArtifactObjects) Reset() { *m = Artifacts_ArtifactObjects{} } +func (m *Artifacts_ArtifactObjects) String() string { return proto.CompactTextString(m) } +func (*Artifacts_ArtifactObjects) ProtoMessage() {} +func (*Artifacts_ArtifactObjects) Descriptor() ([]byte, []int) { + return fileDescriptor_cloudbuild_bcd614f113dbc5d4, []int{11, 0} +} +func (m *Artifacts_ArtifactObjects) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Artifacts_ArtifactObjects.Unmarshal(m, b) +} +func (m *Artifacts_ArtifactObjects) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Artifacts_ArtifactObjects.Marshal(b, m, deterministic) +} +func (dst *Artifacts_ArtifactObjects) XXX_Merge(src proto.Message) { + xxx_messageInfo_Artifacts_ArtifactObjects.Merge(dst, src) +} +func (m *Artifacts_ArtifactObjects) XXX_Size() int { + return xxx_messageInfo_Artifacts_ArtifactObjects.Size(m) +} +func (m *Artifacts_ArtifactObjects) XXX_DiscardUnknown() { + xxx_messageInfo_Artifacts_ArtifactObjects.DiscardUnknown(m) +} + +var xxx_messageInfo_Artifacts_ArtifactObjects proto.InternalMessageInfo + +func (m *Artifacts_ArtifactObjects) GetLocation() string { + if m != nil { + return m.Location + } + return "" +} + +func (m *Artifacts_ArtifactObjects) GetPaths() []string { + if m != nil { + return m.Paths + } + return nil +} + +func (m *Artifacts_ArtifactObjects) GetTiming() *TimeSpan { + if m != nil { + return m.Timing + } + return nil +} + +// Start and end times for a build execution phase. +type TimeSpan struct { + // Start of time span. + StartTime *timestamp.Timestamp `protobuf:"bytes,1,opt,name=start_time,json=startTime,proto3" json:"start_time,omitempty"` + // End of time span. + EndTime *timestamp.Timestamp `protobuf:"bytes,2,opt,name=end_time,json=endTime,proto3" json:"end_time,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *TimeSpan) Reset() { *m = TimeSpan{} } +func (m *TimeSpan) String() string { return proto.CompactTextString(m) } +func (*TimeSpan) ProtoMessage() {} +func (*TimeSpan) Descriptor() ([]byte, []int) { + return fileDescriptor_cloudbuild_bcd614f113dbc5d4, []int{12} +} +func (m *TimeSpan) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_TimeSpan.Unmarshal(m, b) +} +func (m *TimeSpan) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_TimeSpan.Marshal(b, m, deterministic) +} +func (dst *TimeSpan) XXX_Merge(src proto.Message) { + xxx_messageInfo_TimeSpan.Merge(dst, src) +} +func (m *TimeSpan) XXX_Size() int { + return xxx_messageInfo_TimeSpan.Size(m) +} +func (m *TimeSpan) XXX_DiscardUnknown() { + xxx_messageInfo_TimeSpan.DiscardUnknown(m) +} + +var xxx_messageInfo_TimeSpan proto.InternalMessageInfo + +func (m *TimeSpan) GetStartTime() *timestamp.Timestamp { + if m != nil { + return m.StartTime + } + return nil +} + +func (m *TimeSpan) GetEndTime() *timestamp.Timestamp { + if m != nil { + return m.EndTime + } + return nil +} + +// Metadata for build operations. +type BuildOperationMetadata struct { + // The build that the operation is tracking. + Build *Build `protobuf:"bytes,1,opt,name=build,proto3" json:"build,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *BuildOperationMetadata) Reset() { *m = BuildOperationMetadata{} } +func (m *BuildOperationMetadata) String() string { return proto.CompactTextString(m) } +func (*BuildOperationMetadata) ProtoMessage() {} +func (*BuildOperationMetadata) Descriptor() ([]byte, []int) { + return fileDescriptor_cloudbuild_bcd614f113dbc5d4, []int{13} +} +func (m *BuildOperationMetadata) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_BuildOperationMetadata.Unmarshal(m, b) +} +func (m *BuildOperationMetadata) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_BuildOperationMetadata.Marshal(b, m, deterministic) +} +func (dst *BuildOperationMetadata) XXX_Merge(src proto.Message) { + xxx_messageInfo_BuildOperationMetadata.Merge(dst, src) +} +func (m *BuildOperationMetadata) XXX_Size() int { + return xxx_messageInfo_BuildOperationMetadata.Size(m) +} +func (m *BuildOperationMetadata) XXX_DiscardUnknown() { + xxx_messageInfo_BuildOperationMetadata.DiscardUnknown(m) +} + +var xxx_messageInfo_BuildOperationMetadata proto.InternalMessageInfo + +func (m *BuildOperationMetadata) GetBuild() *Build { + if m != nil { + return m.Build + } + return nil +} + +// Provenance of the source. Ways to find the original source, or verify that +// some source was used for this build. +type SourceProvenance struct { + // A copy of the build's `source.storage_source`, if exists, with any + // generations resolved. + ResolvedStorageSource *StorageSource `protobuf:"bytes,3,opt,name=resolved_storage_source,json=resolvedStorageSource,proto3" json:"resolved_storage_source,omitempty"` + // A copy of the build's `source.repo_source`, if exists, with any + // revisions resolved. + ResolvedRepoSource *RepoSource `protobuf:"bytes,6,opt,name=resolved_repo_source,json=resolvedRepoSource,proto3" json:"resolved_repo_source,omitempty"` + // Output only. Hash(es) of the build source, which can be used to verify that + // the originalsource integrity was maintained in the build. Note that + // `FileHashes` willonly be populated if `BuildOptions` has requested a + // `SourceProvenanceHash`. + // + // The keys to this map are file paths used as build source and the values + // contain the hash values for those files. + // + // If the build source came in a single package such as a gzipped tarfile + // (`.tar.gz`), the `FileHash` will be for the single path to that file. + FileHashes map[string]*FileHashes `protobuf:"bytes,4,rep,name=file_hashes,json=fileHashes,proto3" json:"file_hashes,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *SourceProvenance) Reset() { *m = SourceProvenance{} } +func (m *SourceProvenance) String() string { return proto.CompactTextString(m) } +func (*SourceProvenance) ProtoMessage() {} +func (*SourceProvenance) Descriptor() ([]byte, []int) { + return fileDescriptor_cloudbuild_bcd614f113dbc5d4, []int{14} +} +func (m *SourceProvenance) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_SourceProvenance.Unmarshal(m, b) +} +func (m *SourceProvenance) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_SourceProvenance.Marshal(b, m, deterministic) +} +func (dst *SourceProvenance) XXX_Merge(src proto.Message) { + xxx_messageInfo_SourceProvenance.Merge(dst, src) +} +func (m *SourceProvenance) XXX_Size() int { + return xxx_messageInfo_SourceProvenance.Size(m) +} +func (m *SourceProvenance) XXX_DiscardUnknown() { + xxx_messageInfo_SourceProvenance.DiscardUnknown(m) +} + +var xxx_messageInfo_SourceProvenance proto.InternalMessageInfo + +func (m *SourceProvenance) GetResolvedStorageSource() *StorageSource { + if m != nil { + return m.ResolvedStorageSource + } + return nil +} + +func (m *SourceProvenance) GetResolvedRepoSource() *RepoSource { + if m != nil { + return m.ResolvedRepoSource + } + return nil +} + +func (m *SourceProvenance) GetFileHashes() map[string]*FileHashes { + if m != nil { + return m.FileHashes + } + return nil +} + +// Container message for hashes of byte content of files, used in +// SourceProvenance messages to verify integrity of source input to the build. +type FileHashes struct { + // Collection of file hashes. + FileHash []*Hash `protobuf:"bytes,1,rep,name=file_hash,json=fileHash,proto3" json:"file_hash,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *FileHashes) Reset() { *m = FileHashes{} } +func (m *FileHashes) String() string { return proto.CompactTextString(m) } +func (*FileHashes) ProtoMessage() {} +func (*FileHashes) Descriptor() ([]byte, []int) { + return fileDescriptor_cloudbuild_bcd614f113dbc5d4, []int{15} +} +func (m *FileHashes) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_FileHashes.Unmarshal(m, b) +} +func (m *FileHashes) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_FileHashes.Marshal(b, m, deterministic) +} +func (dst *FileHashes) XXX_Merge(src proto.Message) { + xxx_messageInfo_FileHashes.Merge(dst, src) +} +func (m *FileHashes) XXX_Size() int { + return xxx_messageInfo_FileHashes.Size(m) +} +func (m *FileHashes) XXX_DiscardUnknown() { + xxx_messageInfo_FileHashes.DiscardUnknown(m) +} + +var xxx_messageInfo_FileHashes proto.InternalMessageInfo + +func (m *FileHashes) GetFileHash() []*Hash { + if m != nil { + return m.FileHash + } + return nil +} + +// Container message for hash values. +type Hash struct { + // The type of hash that was performed. + Type Hash_HashType `protobuf:"varint,1,opt,name=type,proto3,enum=google.devtools.cloudbuild.v1.Hash_HashType" json:"type,omitempty"` + // The hash value. + Value []byte `protobuf:"bytes,2,opt,name=value,proto3" json:"value,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Hash) Reset() { *m = Hash{} } +func (m *Hash) String() string { return proto.CompactTextString(m) } +func (*Hash) ProtoMessage() {} +func (*Hash) Descriptor() ([]byte, []int) { + return fileDescriptor_cloudbuild_bcd614f113dbc5d4, []int{16} +} +func (m *Hash) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Hash.Unmarshal(m, b) +} +func (m *Hash) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Hash.Marshal(b, m, deterministic) +} +func (dst *Hash) XXX_Merge(src proto.Message) { + xxx_messageInfo_Hash.Merge(dst, src) +} +func (m *Hash) XXX_Size() int { + return xxx_messageInfo_Hash.Size(m) +} +func (m *Hash) XXX_DiscardUnknown() { + xxx_messageInfo_Hash.DiscardUnknown(m) +} + +var xxx_messageInfo_Hash proto.InternalMessageInfo + +func (m *Hash) GetType() Hash_HashType { + if m != nil { + return m.Type + } + return Hash_NONE +} + +func (m *Hash) GetValue() []byte { + if m != nil { + return m.Value + } + return nil +} + +// Pairs a set of secret environment variables containing encrypted +// values with the Cloud KMS key to use to decrypt the value. +type Secret struct { + // Cloud KMS key name to use to decrypt these envs. + KmsKeyName string `protobuf:"bytes,1,opt,name=kms_key_name,json=kmsKeyName,proto3" json:"kms_key_name,omitempty"` + // Map of environment variable name to its encrypted value. + // + // Secret environment variables must be unique across all of a build's + // secrets, and must be used by at least one build step. Values can be at most + // 64 KB in size. There can be at most 100 secret values across all of a + // build's secrets. + SecretEnv map[string][]byte `protobuf:"bytes,3,rep,name=secret_env,json=secretEnv,proto3" json:"secret_env,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Secret) Reset() { *m = Secret{} } +func (m *Secret) String() string { return proto.CompactTextString(m) } +func (*Secret) ProtoMessage() {} +func (*Secret) Descriptor() ([]byte, []int) { + return fileDescriptor_cloudbuild_bcd614f113dbc5d4, []int{17} +} +func (m *Secret) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Secret.Unmarshal(m, b) +} +func (m *Secret) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Secret.Marshal(b, m, deterministic) +} +func (dst *Secret) XXX_Merge(src proto.Message) { + xxx_messageInfo_Secret.Merge(dst, src) +} +func (m *Secret) XXX_Size() int { + return xxx_messageInfo_Secret.Size(m) +} +func (m *Secret) XXX_DiscardUnknown() { + xxx_messageInfo_Secret.DiscardUnknown(m) +} + +var xxx_messageInfo_Secret proto.InternalMessageInfo + +func (m *Secret) GetKmsKeyName() string { + if m != nil { + return m.KmsKeyName + } + return "" +} + +func (m *Secret) GetSecretEnv() map[string][]byte { + if m != nil { + return m.SecretEnv + } + return nil +} + +// Request to create a new build. +type CreateBuildRequest struct { + // ID of the project. + ProjectId string `protobuf:"bytes,1,opt,name=project_id,json=projectId,proto3" json:"project_id,omitempty"` + // Build resource to create. + Build *Build `protobuf:"bytes,2,opt,name=build,proto3" json:"build,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *CreateBuildRequest) Reset() { *m = CreateBuildRequest{} } +func (m *CreateBuildRequest) String() string { return proto.CompactTextString(m) } +func (*CreateBuildRequest) ProtoMessage() {} +func (*CreateBuildRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_cloudbuild_bcd614f113dbc5d4, []int{18} +} +func (m *CreateBuildRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_CreateBuildRequest.Unmarshal(m, b) +} +func (m *CreateBuildRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_CreateBuildRequest.Marshal(b, m, deterministic) +} +func (dst *CreateBuildRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_CreateBuildRequest.Merge(dst, src) +} +func (m *CreateBuildRequest) XXX_Size() int { + return xxx_messageInfo_CreateBuildRequest.Size(m) +} +func (m *CreateBuildRequest) XXX_DiscardUnknown() { + xxx_messageInfo_CreateBuildRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_CreateBuildRequest proto.InternalMessageInfo + +func (m *CreateBuildRequest) GetProjectId() string { + if m != nil { + return m.ProjectId + } + return "" +} + +func (m *CreateBuildRequest) GetBuild() *Build { + if m != nil { + return m.Build + } + return nil +} + +// Request to get a build. +type GetBuildRequest struct { + // ID of the project. + ProjectId string `protobuf:"bytes,1,opt,name=project_id,json=projectId,proto3" json:"project_id,omitempty"` + // ID of the build. + Id string `protobuf:"bytes,2,opt,name=id,proto3" json:"id,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GetBuildRequest) Reset() { *m = GetBuildRequest{} } +func (m *GetBuildRequest) String() string { return proto.CompactTextString(m) } +func (*GetBuildRequest) ProtoMessage() {} +func (*GetBuildRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_cloudbuild_bcd614f113dbc5d4, []int{19} +} +func (m *GetBuildRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GetBuildRequest.Unmarshal(m, b) +} +func (m *GetBuildRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GetBuildRequest.Marshal(b, m, deterministic) +} +func (dst *GetBuildRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_GetBuildRequest.Merge(dst, src) +} +func (m *GetBuildRequest) XXX_Size() int { + return xxx_messageInfo_GetBuildRequest.Size(m) +} +func (m *GetBuildRequest) XXX_DiscardUnknown() { + xxx_messageInfo_GetBuildRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_GetBuildRequest proto.InternalMessageInfo + +func (m *GetBuildRequest) GetProjectId() string { + if m != nil { + return m.ProjectId + } + return "" +} + +func (m *GetBuildRequest) GetId() string { + if m != nil { + return m.Id + } + return "" +} + +// Request to list builds. +type ListBuildsRequest struct { + // ID of the project. + ProjectId string `protobuf:"bytes,1,opt,name=project_id,json=projectId,proto3" json:"project_id,omitempty"` + // Number of results to return in the list. + PageSize int32 `protobuf:"varint,2,opt,name=page_size,json=pageSize,proto3" json:"page_size,omitempty"` + // Token to provide to skip to a particular spot in the list. + PageToken string `protobuf:"bytes,3,opt,name=page_token,json=pageToken,proto3" json:"page_token,omitempty"` + // The raw filter text to constrain the results. + Filter string `protobuf:"bytes,8,opt,name=filter,proto3" json:"filter,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ListBuildsRequest) Reset() { *m = ListBuildsRequest{} } +func (m *ListBuildsRequest) String() string { return proto.CompactTextString(m) } +func (*ListBuildsRequest) ProtoMessage() {} +func (*ListBuildsRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_cloudbuild_bcd614f113dbc5d4, []int{20} +} +func (m *ListBuildsRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ListBuildsRequest.Unmarshal(m, b) +} +func (m *ListBuildsRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ListBuildsRequest.Marshal(b, m, deterministic) +} +func (dst *ListBuildsRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_ListBuildsRequest.Merge(dst, src) +} +func (m *ListBuildsRequest) XXX_Size() int { + return xxx_messageInfo_ListBuildsRequest.Size(m) +} +func (m *ListBuildsRequest) XXX_DiscardUnknown() { + xxx_messageInfo_ListBuildsRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_ListBuildsRequest proto.InternalMessageInfo + +func (m *ListBuildsRequest) GetProjectId() string { + if m != nil { + return m.ProjectId + } + return "" +} + +func (m *ListBuildsRequest) GetPageSize() int32 { + if m != nil { + return m.PageSize + } + return 0 +} + +func (m *ListBuildsRequest) GetPageToken() string { + if m != nil { + return m.PageToken + } + return "" +} + +func (m *ListBuildsRequest) GetFilter() string { + if m != nil { + return m.Filter + } + return "" +} + +// Response including listed builds. +type ListBuildsResponse struct { + // Builds will be sorted by `create_time`, descending. + Builds []*Build `protobuf:"bytes,1,rep,name=builds,proto3" json:"builds,omitempty"` + // Token to receive the next page of results. + NextPageToken string `protobuf:"bytes,2,opt,name=next_page_token,json=nextPageToken,proto3" json:"next_page_token,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ListBuildsResponse) Reset() { *m = ListBuildsResponse{} } +func (m *ListBuildsResponse) String() string { return proto.CompactTextString(m) } +func (*ListBuildsResponse) ProtoMessage() {} +func (*ListBuildsResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_cloudbuild_bcd614f113dbc5d4, []int{21} +} +func (m *ListBuildsResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ListBuildsResponse.Unmarshal(m, b) +} +func (m *ListBuildsResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ListBuildsResponse.Marshal(b, m, deterministic) +} +func (dst *ListBuildsResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_ListBuildsResponse.Merge(dst, src) +} +func (m *ListBuildsResponse) XXX_Size() int { + return xxx_messageInfo_ListBuildsResponse.Size(m) +} +func (m *ListBuildsResponse) XXX_DiscardUnknown() { + xxx_messageInfo_ListBuildsResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_ListBuildsResponse proto.InternalMessageInfo + +func (m *ListBuildsResponse) GetBuilds() []*Build { + if m != nil { + return m.Builds + } + return nil +} + +func (m *ListBuildsResponse) GetNextPageToken() string { + if m != nil { + return m.NextPageToken + } + return "" +} + +// Request to cancel an ongoing build. +type CancelBuildRequest struct { + // ID of the project. + ProjectId string `protobuf:"bytes,1,opt,name=project_id,json=projectId,proto3" json:"project_id,omitempty"` + // ID of the build. + Id string `protobuf:"bytes,2,opt,name=id,proto3" json:"id,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *CancelBuildRequest) Reset() { *m = CancelBuildRequest{} } +func (m *CancelBuildRequest) String() string { return proto.CompactTextString(m) } +func (*CancelBuildRequest) ProtoMessage() {} +func (*CancelBuildRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_cloudbuild_bcd614f113dbc5d4, []int{22} +} +func (m *CancelBuildRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_CancelBuildRequest.Unmarshal(m, b) +} +func (m *CancelBuildRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_CancelBuildRequest.Marshal(b, m, deterministic) +} +func (dst *CancelBuildRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_CancelBuildRequest.Merge(dst, src) +} +func (m *CancelBuildRequest) XXX_Size() int { + return xxx_messageInfo_CancelBuildRequest.Size(m) +} +func (m *CancelBuildRequest) XXX_DiscardUnknown() { + xxx_messageInfo_CancelBuildRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_CancelBuildRequest proto.InternalMessageInfo + +func (m *CancelBuildRequest) GetProjectId() string { + if m != nil { + return m.ProjectId + } + return "" +} + +func (m *CancelBuildRequest) GetId() string { + if m != nil { + return m.Id + } + return "" +} + +// Configuration for an automated build in response to source repository +// changes. +type BuildTrigger struct { + // Output only. Unique identifier of the trigger. + Id string `protobuf:"bytes,1,opt,name=id,proto3" json:"id,omitempty"` + // Human-readable description of this trigger. + Description string `protobuf:"bytes,10,opt,name=description,proto3" json:"description,omitempty"` + // Template describing the types of source changes to trigger a build. + // + // Branch and tag names in trigger templates are interpreted as regular + // expressions. Any branch or tag change that matches that regular expression + // will trigger a build. + TriggerTemplate *RepoSource `protobuf:"bytes,7,opt,name=trigger_template,json=triggerTemplate,proto3" json:"trigger_template,omitempty"` + // Template describing the Build request to make when the trigger is matched. + // + // Types that are valid to be assigned to BuildTemplate: + // *BuildTrigger_Build + // *BuildTrigger_Filename + BuildTemplate isBuildTrigger_BuildTemplate `protobuf_oneof:"build_template"` + // Output only. Time when the trigger was created. + CreateTime *timestamp.Timestamp `protobuf:"bytes,5,opt,name=create_time,json=createTime,proto3" json:"create_time,omitempty"` + // If true, the trigger will never result in a build. + Disabled bool `protobuf:"varint,9,opt,name=disabled,proto3" json:"disabled,omitempty"` + // Substitutions data for Build resource. + Substitutions map[string]string `protobuf:"bytes,11,rep,name=substitutions,proto3" json:"substitutions,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` + // ignored_files and included_files are file glob matches using + // http://godoc/pkg/path/filepath#Match extended with support for "**". + // + // If ignored_files and changed files are both empty, then they are + // not used to determine whether or not to trigger a build. + // + // If ignored_files is not empty, then we ignore any files that match + // any of the ignored_file globs. If the change has no files that are + // outside of the ignored_files globs, then we do not trigger a build. + IgnoredFiles []string `protobuf:"bytes,15,rep,name=ignored_files,json=ignoredFiles,proto3" json:"ignored_files,omitempty"` + // If any of the files altered in the commit pass the ignored_files + // filter and included_files is empty, then as far as this filter is + // concerned, we should trigger the build. + // + // If any of the files altered in the commit pass the ignored_files + // filter and included_files is not empty, then we make sure that at + // least one of those files matches a included_files glob. If not, + // then we do not trigger a build. + IncludedFiles []string `protobuf:"bytes,16,rep,name=included_files,json=includedFiles,proto3" json:"included_files,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *BuildTrigger) Reset() { *m = BuildTrigger{} } +func (m *BuildTrigger) String() string { return proto.CompactTextString(m) } +func (*BuildTrigger) ProtoMessage() {} +func (*BuildTrigger) Descriptor() ([]byte, []int) { + return fileDescriptor_cloudbuild_bcd614f113dbc5d4, []int{23} +} +func (m *BuildTrigger) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_BuildTrigger.Unmarshal(m, b) +} +func (m *BuildTrigger) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_BuildTrigger.Marshal(b, m, deterministic) +} +func (dst *BuildTrigger) XXX_Merge(src proto.Message) { + xxx_messageInfo_BuildTrigger.Merge(dst, src) +} +func (m *BuildTrigger) XXX_Size() int { + return xxx_messageInfo_BuildTrigger.Size(m) +} +func (m *BuildTrigger) XXX_DiscardUnknown() { + xxx_messageInfo_BuildTrigger.DiscardUnknown(m) +} + +var xxx_messageInfo_BuildTrigger proto.InternalMessageInfo + +func (m *BuildTrigger) GetId() string { + if m != nil { + return m.Id + } + return "" +} + +func (m *BuildTrigger) GetDescription() string { + if m != nil { + return m.Description + } + return "" +} + +func (m *BuildTrigger) GetTriggerTemplate() *RepoSource { + if m != nil { + return m.TriggerTemplate + } + return nil +} + +type isBuildTrigger_BuildTemplate interface { + isBuildTrigger_BuildTemplate() +} + +type BuildTrigger_Build struct { + Build *Build `protobuf:"bytes,4,opt,name=build,proto3,oneof"` +} + +type BuildTrigger_Filename struct { + Filename string `protobuf:"bytes,8,opt,name=filename,proto3,oneof"` +} + +func (*BuildTrigger_Build) isBuildTrigger_BuildTemplate() {} + +func (*BuildTrigger_Filename) isBuildTrigger_BuildTemplate() {} + +func (m *BuildTrigger) GetBuildTemplate() isBuildTrigger_BuildTemplate { + if m != nil { + return m.BuildTemplate + } + return nil +} + +func (m *BuildTrigger) GetBuild() *Build { + if x, ok := m.GetBuildTemplate().(*BuildTrigger_Build); ok { + return x.Build + } + return nil +} + +func (m *BuildTrigger) GetFilename() string { + if x, ok := m.GetBuildTemplate().(*BuildTrigger_Filename); ok { + return x.Filename + } + return "" +} + +func (m *BuildTrigger) GetCreateTime() *timestamp.Timestamp { + if m != nil { + return m.CreateTime + } + return nil +} + +func (m *BuildTrigger) GetDisabled() bool { + if m != nil { + return m.Disabled + } + return false +} + +func (m *BuildTrigger) GetSubstitutions() map[string]string { + if m != nil { + return m.Substitutions + } + return nil +} + +func (m *BuildTrigger) GetIgnoredFiles() []string { + if m != nil { + return m.IgnoredFiles + } + return nil +} + +func (m *BuildTrigger) GetIncludedFiles() []string { + if m != nil { + return m.IncludedFiles + } + return nil +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*BuildTrigger) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _BuildTrigger_OneofMarshaler, _BuildTrigger_OneofUnmarshaler, _BuildTrigger_OneofSizer, []interface{}{ + (*BuildTrigger_Build)(nil), + (*BuildTrigger_Filename)(nil), + } +} + +func _BuildTrigger_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*BuildTrigger) + // build_template + switch x := m.BuildTemplate.(type) { + case *BuildTrigger_Build: + b.EncodeVarint(4<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.Build); err != nil { + return err + } + case *BuildTrigger_Filename: + b.EncodeVarint(8<<3 | proto.WireBytes) + b.EncodeStringBytes(x.Filename) + case nil: + default: + return fmt.Errorf("BuildTrigger.BuildTemplate has unexpected type %T", x) + } + return nil +} + +func _BuildTrigger_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*BuildTrigger) + switch tag { + case 4: // build_template.build + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(Build) + err := b.DecodeMessage(msg) + m.BuildTemplate = &BuildTrigger_Build{msg} + return true, err + case 8: // build_template.filename + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeStringBytes() + m.BuildTemplate = &BuildTrigger_Filename{x} + return true, err + default: + return false, nil + } +} + +func _BuildTrigger_OneofSizer(msg proto.Message) (n int) { + m := msg.(*BuildTrigger) + // build_template + switch x := m.BuildTemplate.(type) { + case *BuildTrigger_Build: + s := proto.Size(x.Build) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *BuildTrigger_Filename: + n += 1 // tag and wire + n += proto.SizeVarint(uint64(len(x.Filename))) + n += len(x.Filename) + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +// Request to create a new `BuildTrigger`. +type CreateBuildTriggerRequest struct { + // ID of the project for which to configure automatic builds. + ProjectId string `protobuf:"bytes,1,opt,name=project_id,json=projectId,proto3" json:"project_id,omitempty"` + // `BuildTrigger` to create. + Trigger *BuildTrigger `protobuf:"bytes,2,opt,name=trigger,proto3" json:"trigger,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *CreateBuildTriggerRequest) Reset() { *m = CreateBuildTriggerRequest{} } +func (m *CreateBuildTriggerRequest) String() string { return proto.CompactTextString(m) } +func (*CreateBuildTriggerRequest) ProtoMessage() {} +func (*CreateBuildTriggerRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_cloudbuild_bcd614f113dbc5d4, []int{24} +} +func (m *CreateBuildTriggerRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_CreateBuildTriggerRequest.Unmarshal(m, b) +} +func (m *CreateBuildTriggerRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_CreateBuildTriggerRequest.Marshal(b, m, deterministic) +} +func (dst *CreateBuildTriggerRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_CreateBuildTriggerRequest.Merge(dst, src) +} +func (m *CreateBuildTriggerRequest) XXX_Size() int { + return xxx_messageInfo_CreateBuildTriggerRequest.Size(m) +} +func (m *CreateBuildTriggerRequest) XXX_DiscardUnknown() { + xxx_messageInfo_CreateBuildTriggerRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_CreateBuildTriggerRequest proto.InternalMessageInfo + +func (m *CreateBuildTriggerRequest) GetProjectId() string { + if m != nil { + return m.ProjectId + } + return "" +} + +func (m *CreateBuildTriggerRequest) GetTrigger() *BuildTrigger { + if m != nil { + return m.Trigger + } + return nil +} + +// Returns the `BuildTrigger` with the specified ID. +type GetBuildTriggerRequest struct { + // ID of the project that owns the trigger. + ProjectId string `protobuf:"bytes,1,opt,name=project_id,json=projectId,proto3" json:"project_id,omitempty"` + // ID of the `BuildTrigger` to get. + TriggerId string `protobuf:"bytes,2,opt,name=trigger_id,json=triggerId,proto3" json:"trigger_id,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GetBuildTriggerRequest) Reset() { *m = GetBuildTriggerRequest{} } +func (m *GetBuildTriggerRequest) String() string { return proto.CompactTextString(m) } +func (*GetBuildTriggerRequest) ProtoMessage() {} +func (*GetBuildTriggerRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_cloudbuild_bcd614f113dbc5d4, []int{25} +} +func (m *GetBuildTriggerRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GetBuildTriggerRequest.Unmarshal(m, b) +} +func (m *GetBuildTriggerRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GetBuildTriggerRequest.Marshal(b, m, deterministic) +} +func (dst *GetBuildTriggerRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_GetBuildTriggerRequest.Merge(dst, src) +} +func (m *GetBuildTriggerRequest) XXX_Size() int { + return xxx_messageInfo_GetBuildTriggerRequest.Size(m) +} +func (m *GetBuildTriggerRequest) XXX_DiscardUnknown() { + xxx_messageInfo_GetBuildTriggerRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_GetBuildTriggerRequest proto.InternalMessageInfo + +func (m *GetBuildTriggerRequest) GetProjectId() string { + if m != nil { + return m.ProjectId + } + return "" +} + +func (m *GetBuildTriggerRequest) GetTriggerId() string { + if m != nil { + return m.TriggerId + } + return "" +} + +// Request to list existing `BuildTriggers`. +type ListBuildTriggersRequest struct { + // ID of the project for which to list BuildTriggers. + ProjectId string `protobuf:"bytes,1,opt,name=project_id,json=projectId,proto3" json:"project_id,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ListBuildTriggersRequest) Reset() { *m = ListBuildTriggersRequest{} } +func (m *ListBuildTriggersRequest) String() string { return proto.CompactTextString(m) } +func (*ListBuildTriggersRequest) ProtoMessage() {} +func (*ListBuildTriggersRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_cloudbuild_bcd614f113dbc5d4, []int{26} +} +func (m *ListBuildTriggersRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ListBuildTriggersRequest.Unmarshal(m, b) +} +func (m *ListBuildTriggersRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ListBuildTriggersRequest.Marshal(b, m, deterministic) +} +func (dst *ListBuildTriggersRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_ListBuildTriggersRequest.Merge(dst, src) +} +func (m *ListBuildTriggersRequest) XXX_Size() int { + return xxx_messageInfo_ListBuildTriggersRequest.Size(m) +} +func (m *ListBuildTriggersRequest) XXX_DiscardUnknown() { + xxx_messageInfo_ListBuildTriggersRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_ListBuildTriggersRequest proto.InternalMessageInfo + +func (m *ListBuildTriggersRequest) GetProjectId() string { + if m != nil { + return m.ProjectId + } + return "" +} + +// Response containing existing `BuildTriggers`. +type ListBuildTriggersResponse struct { + // `BuildTriggers` for the project, sorted by `create_time` descending. + Triggers []*BuildTrigger `protobuf:"bytes,1,rep,name=triggers,proto3" json:"triggers,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ListBuildTriggersResponse) Reset() { *m = ListBuildTriggersResponse{} } +func (m *ListBuildTriggersResponse) String() string { return proto.CompactTextString(m) } +func (*ListBuildTriggersResponse) ProtoMessage() {} +func (*ListBuildTriggersResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_cloudbuild_bcd614f113dbc5d4, []int{27} +} +func (m *ListBuildTriggersResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ListBuildTriggersResponse.Unmarshal(m, b) +} +func (m *ListBuildTriggersResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ListBuildTriggersResponse.Marshal(b, m, deterministic) +} +func (dst *ListBuildTriggersResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_ListBuildTriggersResponse.Merge(dst, src) +} +func (m *ListBuildTriggersResponse) XXX_Size() int { + return xxx_messageInfo_ListBuildTriggersResponse.Size(m) +} +func (m *ListBuildTriggersResponse) XXX_DiscardUnknown() { + xxx_messageInfo_ListBuildTriggersResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_ListBuildTriggersResponse proto.InternalMessageInfo + +func (m *ListBuildTriggersResponse) GetTriggers() []*BuildTrigger { + if m != nil { + return m.Triggers + } + return nil +} + +// Request to delete a `BuildTrigger`. +type DeleteBuildTriggerRequest struct { + // ID of the project that owns the trigger. + ProjectId string `protobuf:"bytes,1,opt,name=project_id,json=projectId,proto3" json:"project_id,omitempty"` + // ID of the `BuildTrigger` to delete. + TriggerId string `protobuf:"bytes,2,opt,name=trigger_id,json=triggerId,proto3" json:"trigger_id,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *DeleteBuildTriggerRequest) Reset() { *m = DeleteBuildTriggerRequest{} } +func (m *DeleteBuildTriggerRequest) String() string { return proto.CompactTextString(m) } +func (*DeleteBuildTriggerRequest) ProtoMessage() {} +func (*DeleteBuildTriggerRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_cloudbuild_bcd614f113dbc5d4, []int{28} +} +func (m *DeleteBuildTriggerRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_DeleteBuildTriggerRequest.Unmarshal(m, b) +} +func (m *DeleteBuildTriggerRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_DeleteBuildTriggerRequest.Marshal(b, m, deterministic) +} +func (dst *DeleteBuildTriggerRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_DeleteBuildTriggerRequest.Merge(dst, src) +} +func (m *DeleteBuildTriggerRequest) XXX_Size() int { + return xxx_messageInfo_DeleteBuildTriggerRequest.Size(m) +} +func (m *DeleteBuildTriggerRequest) XXX_DiscardUnknown() { + xxx_messageInfo_DeleteBuildTriggerRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_DeleteBuildTriggerRequest proto.InternalMessageInfo + +func (m *DeleteBuildTriggerRequest) GetProjectId() string { + if m != nil { + return m.ProjectId + } + return "" +} + +func (m *DeleteBuildTriggerRequest) GetTriggerId() string { + if m != nil { + return m.TriggerId + } + return "" +} + +// Request to update an existing `BuildTrigger`. +type UpdateBuildTriggerRequest struct { + // ID of the project that owns the trigger. + ProjectId string `protobuf:"bytes,1,opt,name=project_id,json=projectId,proto3" json:"project_id,omitempty"` + // ID of the `BuildTrigger` to update. + TriggerId string `protobuf:"bytes,2,opt,name=trigger_id,json=triggerId,proto3" json:"trigger_id,omitempty"` + // `BuildTrigger` to update. + Trigger *BuildTrigger `protobuf:"bytes,3,opt,name=trigger,proto3" json:"trigger,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *UpdateBuildTriggerRequest) Reset() { *m = UpdateBuildTriggerRequest{} } +func (m *UpdateBuildTriggerRequest) String() string { return proto.CompactTextString(m) } +func (*UpdateBuildTriggerRequest) ProtoMessage() {} +func (*UpdateBuildTriggerRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_cloudbuild_bcd614f113dbc5d4, []int{29} +} +func (m *UpdateBuildTriggerRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_UpdateBuildTriggerRequest.Unmarshal(m, b) +} +func (m *UpdateBuildTriggerRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_UpdateBuildTriggerRequest.Marshal(b, m, deterministic) +} +func (dst *UpdateBuildTriggerRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_UpdateBuildTriggerRequest.Merge(dst, src) +} +func (m *UpdateBuildTriggerRequest) XXX_Size() int { + return xxx_messageInfo_UpdateBuildTriggerRequest.Size(m) +} +func (m *UpdateBuildTriggerRequest) XXX_DiscardUnknown() { + xxx_messageInfo_UpdateBuildTriggerRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_UpdateBuildTriggerRequest proto.InternalMessageInfo + +func (m *UpdateBuildTriggerRequest) GetProjectId() string { + if m != nil { + return m.ProjectId + } + return "" +} + +func (m *UpdateBuildTriggerRequest) GetTriggerId() string { + if m != nil { + return m.TriggerId + } + return "" +} + +func (m *UpdateBuildTriggerRequest) GetTrigger() *BuildTrigger { + if m != nil { + return m.Trigger + } + return nil +} + +// Optional arguments to enable specific features of builds. +type BuildOptions struct { + // Requested hash for SourceProvenance. + SourceProvenanceHash []Hash_HashType `protobuf:"varint,1,rep,packed,name=source_provenance_hash,json=sourceProvenanceHash,proto3,enum=google.devtools.cloudbuild.v1.Hash_HashType" json:"source_provenance_hash,omitempty"` + // Requested verifiability options. + RequestedVerifyOption BuildOptions_VerifyOption `protobuf:"varint,2,opt,name=requested_verify_option,json=requestedVerifyOption,proto3,enum=google.devtools.cloudbuild.v1.BuildOptions_VerifyOption" json:"requested_verify_option,omitempty"` + // Compute Engine machine type on which to run the build. + MachineType BuildOptions_MachineType `protobuf:"varint,3,opt,name=machine_type,json=machineType,proto3,enum=google.devtools.cloudbuild.v1.BuildOptions_MachineType" json:"machine_type,omitempty"` + // Requested disk size for the VM that runs the build. Note that this is *NOT* + // "disk free"; some of the space will be used by the operating system and + // build utilities. Also note that this is the minimum disk size that will be + // allocated for the build -- the build may run with a larger disk than + // requested. At present, the maximum disk size is 1000GB; builds that request + // more than the maximum are rejected with an error. + DiskSizeGb int64 `protobuf:"varint,6,opt,name=disk_size_gb,json=diskSizeGb,proto3" json:"disk_size_gb,omitempty"` + // Option to specify behavior when there is an error in the substitution + // checks. + SubstitutionOption BuildOptions_SubstitutionOption `protobuf:"varint,4,opt,name=substitution_option,json=substitutionOption,proto3,enum=google.devtools.cloudbuild.v1.BuildOptions_SubstitutionOption" json:"substitution_option,omitempty"` + // Option to define build log streaming behavior to Google Cloud + // Storage. + LogStreamingOption BuildOptions_LogStreamingOption `protobuf:"varint,5,opt,name=log_streaming_option,json=logStreamingOption,proto3,enum=google.devtools.cloudbuild.v1.BuildOptions_LogStreamingOption" json:"log_streaming_option,omitempty"` + // Option to specify a `WorkerPool` for the build. User specifies the pool + // with the format "[WORKERPOOL_PROJECT_ID]/[WORKERPOOL_NAME]". + // This is an experimental field. + WorkerPool string `protobuf:"bytes,7,opt,name=worker_pool,json=workerPool,proto3" json:"worker_pool,omitempty"` + // Option to specify the logging mode, which determines where the logs are + // stored. + Logging BuildOptions_LoggingMode `protobuf:"varint,11,opt,name=logging,proto3,enum=google.devtools.cloudbuild.v1.BuildOptions_LoggingMode" json:"logging,omitempty"` + // A list of global environment variable definitions that will exist for all + // build steps in this build. If a variable is defined in both globally and in + // a build step, the variable will use the build step value. + // + // The elements are of the form "KEY=VALUE" for the environment variable "KEY" + // being given the value "VALUE". + Env []string `protobuf:"bytes,12,rep,name=env,proto3" json:"env,omitempty"` + // A list of global environment variables, which are encrypted using a Cloud + // Key Management Service crypto key. These values must be specified in the + // build's `Secret`. These variables will be available to all build steps + // in this build. + SecretEnv []string `protobuf:"bytes,13,rep,name=secret_env,json=secretEnv,proto3" json:"secret_env,omitempty"` + // Global list of volumes to mount for ALL build steps + // + // Each volume is created as an empty volume prior to starting the build + // process. Upon completion of the build, volumes and their contents are + // discarded. Global volume names and paths cannot conflict with the volumes + // defined a build step. + // + // Using a global volume in a build with only one step is not valid as + // it is indicative of a build request with an incorrect configuration. + Volumes []*Volume `protobuf:"bytes,14,rep,name=volumes,proto3" json:"volumes,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *BuildOptions) Reset() { *m = BuildOptions{} } +func (m *BuildOptions) String() string { return proto.CompactTextString(m) } +func (*BuildOptions) ProtoMessage() {} +func (*BuildOptions) Descriptor() ([]byte, []int) { + return fileDescriptor_cloudbuild_bcd614f113dbc5d4, []int{30} +} +func (m *BuildOptions) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_BuildOptions.Unmarshal(m, b) +} +func (m *BuildOptions) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_BuildOptions.Marshal(b, m, deterministic) +} +func (dst *BuildOptions) XXX_Merge(src proto.Message) { + xxx_messageInfo_BuildOptions.Merge(dst, src) +} +func (m *BuildOptions) XXX_Size() int { + return xxx_messageInfo_BuildOptions.Size(m) +} +func (m *BuildOptions) XXX_DiscardUnknown() { + xxx_messageInfo_BuildOptions.DiscardUnknown(m) +} + +var xxx_messageInfo_BuildOptions proto.InternalMessageInfo + +func (m *BuildOptions) GetSourceProvenanceHash() []Hash_HashType { + if m != nil { + return m.SourceProvenanceHash + } + return nil +} + +func (m *BuildOptions) GetRequestedVerifyOption() BuildOptions_VerifyOption { + if m != nil { + return m.RequestedVerifyOption + } + return BuildOptions_NOT_VERIFIED +} + +func (m *BuildOptions) GetMachineType() BuildOptions_MachineType { + if m != nil { + return m.MachineType + } + return BuildOptions_UNSPECIFIED +} + +func (m *BuildOptions) GetDiskSizeGb() int64 { + if m != nil { + return m.DiskSizeGb + } + return 0 +} + +func (m *BuildOptions) GetSubstitutionOption() BuildOptions_SubstitutionOption { + if m != nil { + return m.SubstitutionOption + } + return BuildOptions_MUST_MATCH +} + +func (m *BuildOptions) GetLogStreamingOption() BuildOptions_LogStreamingOption { + if m != nil { + return m.LogStreamingOption + } + return BuildOptions_STREAM_DEFAULT +} + +func (m *BuildOptions) GetWorkerPool() string { + if m != nil { + return m.WorkerPool + } + return "" +} + +func (m *BuildOptions) GetLogging() BuildOptions_LoggingMode { + if m != nil { + return m.Logging + } + return BuildOptions_LOGGING_UNSPECIFIED +} + +func (m *BuildOptions) GetEnv() []string { + if m != nil { + return m.Env + } + return nil +} + +func (m *BuildOptions) GetSecretEnv() []string { + if m != nil { + return m.SecretEnv + } + return nil +} + +func (m *BuildOptions) GetVolumes() []*Volume { + if m != nil { + return m.Volumes + } + return nil +} + +func init() { + proto.RegisterType((*RetryBuildRequest)(nil), "google.devtools.cloudbuild.v1.RetryBuildRequest") + proto.RegisterType((*RunBuildTriggerRequest)(nil), "google.devtools.cloudbuild.v1.RunBuildTriggerRequest") + proto.RegisterType((*StorageSource)(nil), "google.devtools.cloudbuild.v1.StorageSource") + proto.RegisterType((*RepoSource)(nil), "google.devtools.cloudbuild.v1.RepoSource") + proto.RegisterType((*Source)(nil), "google.devtools.cloudbuild.v1.Source") + proto.RegisterType((*BuiltImage)(nil), "google.devtools.cloudbuild.v1.BuiltImage") + proto.RegisterType((*BuildStep)(nil), "google.devtools.cloudbuild.v1.BuildStep") + proto.RegisterType((*Volume)(nil), "google.devtools.cloudbuild.v1.Volume") + proto.RegisterType((*Results)(nil), "google.devtools.cloudbuild.v1.Results") + proto.RegisterType((*ArtifactResult)(nil), "google.devtools.cloudbuild.v1.ArtifactResult") + proto.RegisterType((*Build)(nil), "google.devtools.cloudbuild.v1.Build") + proto.RegisterMapType((map[string]string)(nil), "google.devtools.cloudbuild.v1.Build.SubstitutionsEntry") + proto.RegisterMapType((map[string]*TimeSpan)(nil), "google.devtools.cloudbuild.v1.Build.TimingEntry") + proto.RegisterType((*Artifacts)(nil), "google.devtools.cloudbuild.v1.Artifacts") + proto.RegisterType((*Artifacts_ArtifactObjects)(nil), "google.devtools.cloudbuild.v1.Artifacts.ArtifactObjects") + proto.RegisterType((*TimeSpan)(nil), "google.devtools.cloudbuild.v1.TimeSpan") + proto.RegisterType((*BuildOperationMetadata)(nil), "google.devtools.cloudbuild.v1.BuildOperationMetadata") + proto.RegisterType((*SourceProvenance)(nil), "google.devtools.cloudbuild.v1.SourceProvenance") + proto.RegisterMapType((map[string]*FileHashes)(nil), "google.devtools.cloudbuild.v1.SourceProvenance.FileHashesEntry") + proto.RegisterType((*FileHashes)(nil), "google.devtools.cloudbuild.v1.FileHashes") + proto.RegisterType((*Hash)(nil), "google.devtools.cloudbuild.v1.Hash") + proto.RegisterType((*Secret)(nil), "google.devtools.cloudbuild.v1.Secret") + proto.RegisterMapType((map[string][]byte)(nil), "google.devtools.cloudbuild.v1.Secret.SecretEnvEntry") + proto.RegisterType((*CreateBuildRequest)(nil), "google.devtools.cloudbuild.v1.CreateBuildRequest") + proto.RegisterType((*GetBuildRequest)(nil), "google.devtools.cloudbuild.v1.GetBuildRequest") + proto.RegisterType((*ListBuildsRequest)(nil), "google.devtools.cloudbuild.v1.ListBuildsRequest") + proto.RegisterType((*ListBuildsResponse)(nil), "google.devtools.cloudbuild.v1.ListBuildsResponse") + proto.RegisterType((*CancelBuildRequest)(nil), "google.devtools.cloudbuild.v1.CancelBuildRequest") + proto.RegisterType((*BuildTrigger)(nil), "google.devtools.cloudbuild.v1.BuildTrigger") + proto.RegisterMapType((map[string]string)(nil), "google.devtools.cloudbuild.v1.BuildTrigger.SubstitutionsEntry") + proto.RegisterType((*CreateBuildTriggerRequest)(nil), "google.devtools.cloudbuild.v1.CreateBuildTriggerRequest") + proto.RegisterType((*GetBuildTriggerRequest)(nil), "google.devtools.cloudbuild.v1.GetBuildTriggerRequest") + proto.RegisterType((*ListBuildTriggersRequest)(nil), "google.devtools.cloudbuild.v1.ListBuildTriggersRequest") + proto.RegisterType((*ListBuildTriggersResponse)(nil), "google.devtools.cloudbuild.v1.ListBuildTriggersResponse") + proto.RegisterType((*DeleteBuildTriggerRequest)(nil), "google.devtools.cloudbuild.v1.DeleteBuildTriggerRequest") + proto.RegisterType((*UpdateBuildTriggerRequest)(nil), "google.devtools.cloudbuild.v1.UpdateBuildTriggerRequest") + proto.RegisterType((*BuildOptions)(nil), "google.devtools.cloudbuild.v1.BuildOptions") + proto.RegisterEnum("google.devtools.cloudbuild.v1.Build_Status", Build_Status_name, Build_Status_value) + proto.RegisterEnum("google.devtools.cloudbuild.v1.Hash_HashType", Hash_HashType_name, Hash_HashType_value) + proto.RegisterEnum("google.devtools.cloudbuild.v1.BuildOptions_VerifyOption", BuildOptions_VerifyOption_name, BuildOptions_VerifyOption_value) + proto.RegisterEnum("google.devtools.cloudbuild.v1.BuildOptions_MachineType", BuildOptions_MachineType_name, BuildOptions_MachineType_value) + proto.RegisterEnum("google.devtools.cloudbuild.v1.BuildOptions_SubstitutionOption", BuildOptions_SubstitutionOption_name, BuildOptions_SubstitutionOption_value) + proto.RegisterEnum("google.devtools.cloudbuild.v1.BuildOptions_LogStreamingOption", BuildOptions_LogStreamingOption_name, BuildOptions_LogStreamingOption_value) + proto.RegisterEnum("google.devtools.cloudbuild.v1.BuildOptions_LoggingMode", BuildOptions_LoggingMode_name, BuildOptions_LoggingMode_value) +} + +// Reference imports to suppress errors if they are not otherwise used. +var _ context.Context +var _ grpc.ClientConn + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the grpc package it is being compiled against. +const _ = grpc.SupportPackageIsVersion4 + +// CloudBuildClient is the client API for CloudBuild service. +// +// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://godoc.org/google.golang.org/grpc#ClientConn.NewStream. +type CloudBuildClient interface { + // Starts a build with the specified configuration. + // + // This method returns a long-running `Operation`, which includes the build + // ID. Pass the build ID to `GetBuild` to determine the build status (such as + // `SUCCESS` or `FAILURE`). + CreateBuild(ctx context.Context, in *CreateBuildRequest, opts ...grpc.CallOption) (*longrunning.Operation, error) + // Returns information about a previously requested build. + // + // The `Build` that is returned includes its status (such as `SUCCESS`, + // `FAILURE`, or `WORKING`), and timing information. + GetBuild(ctx context.Context, in *GetBuildRequest, opts ...grpc.CallOption) (*Build, error) + // Lists previously requested builds. + // + // Previously requested builds may still be in-progress, or may have finished + // successfully or unsuccessfully. + ListBuilds(ctx context.Context, in *ListBuildsRequest, opts ...grpc.CallOption) (*ListBuildsResponse, error) + // Cancels a build in progress. + CancelBuild(ctx context.Context, in *CancelBuildRequest, opts ...grpc.CallOption) (*Build, error) + // Creates a new build based on the specified build. + // + // This method creates a new build using the original build request, which may + // or may not result in an identical build. + // + // For triggered builds: + // + // * Triggered builds resolve to a precise revision; therefore a retry of a + // triggered build will result in a build that uses the same revision. + // + // For non-triggered builds that specify `RepoSource`: + // + // * If the original build built from the tip of a branch, the retried build + // will build from the tip of that branch, which may not be the same revision + // as the original build. + // * If the original build specified a commit sha or revision ID, the retried + // build will use the identical source. + // + // For builds that specify `StorageSource`: + // + // * If the original build pulled source from Google Cloud Storage without + // specifying the generation of the object, the new build will use the current + // object, which may be different from the original build source. + // * If the original build pulled source from Cloud Storage and specified the + // generation of the object, the new build will attempt to use the same + // object, which may or may not be available depending on the bucket's + // lifecycle management settings. + RetryBuild(ctx context.Context, in *RetryBuildRequest, opts ...grpc.CallOption) (*longrunning.Operation, error) + // Creates a new `BuildTrigger`. + // + // This API is experimental. + CreateBuildTrigger(ctx context.Context, in *CreateBuildTriggerRequest, opts ...grpc.CallOption) (*BuildTrigger, error) + // Returns information about a `BuildTrigger`. + // + // This API is experimental. + GetBuildTrigger(ctx context.Context, in *GetBuildTriggerRequest, opts ...grpc.CallOption) (*BuildTrigger, error) + // Lists existing `BuildTrigger`s. + // + // This API is experimental. + ListBuildTriggers(ctx context.Context, in *ListBuildTriggersRequest, opts ...grpc.CallOption) (*ListBuildTriggersResponse, error) + // Deletes a `BuildTrigger` by its project ID and trigger ID. + // + // This API is experimental. + DeleteBuildTrigger(ctx context.Context, in *DeleteBuildTriggerRequest, opts ...grpc.CallOption) (*empty.Empty, error) + // Updates a `BuildTrigger` by its project ID and trigger ID. + // + // This API is experimental. + UpdateBuildTrigger(ctx context.Context, in *UpdateBuildTriggerRequest, opts ...grpc.CallOption) (*BuildTrigger, error) + // Runs a `BuildTrigger` at a particular source revision. + RunBuildTrigger(ctx context.Context, in *RunBuildTriggerRequest, opts ...grpc.CallOption) (*longrunning.Operation, error) +} + +type cloudBuildClient struct { + cc *grpc.ClientConn +} + +func NewCloudBuildClient(cc *grpc.ClientConn) CloudBuildClient { + return &cloudBuildClient{cc} +} + +func (c *cloudBuildClient) CreateBuild(ctx context.Context, in *CreateBuildRequest, opts ...grpc.CallOption) (*longrunning.Operation, error) { + out := new(longrunning.Operation) + err := c.cc.Invoke(ctx, "/google.devtools.cloudbuild.v1.CloudBuild/CreateBuild", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *cloudBuildClient) GetBuild(ctx context.Context, in *GetBuildRequest, opts ...grpc.CallOption) (*Build, error) { + out := new(Build) + err := c.cc.Invoke(ctx, "/google.devtools.cloudbuild.v1.CloudBuild/GetBuild", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *cloudBuildClient) ListBuilds(ctx context.Context, in *ListBuildsRequest, opts ...grpc.CallOption) (*ListBuildsResponse, error) { + out := new(ListBuildsResponse) + err := c.cc.Invoke(ctx, "/google.devtools.cloudbuild.v1.CloudBuild/ListBuilds", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *cloudBuildClient) CancelBuild(ctx context.Context, in *CancelBuildRequest, opts ...grpc.CallOption) (*Build, error) { + out := new(Build) + err := c.cc.Invoke(ctx, "/google.devtools.cloudbuild.v1.CloudBuild/CancelBuild", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *cloudBuildClient) RetryBuild(ctx context.Context, in *RetryBuildRequest, opts ...grpc.CallOption) (*longrunning.Operation, error) { + out := new(longrunning.Operation) + err := c.cc.Invoke(ctx, "/google.devtools.cloudbuild.v1.CloudBuild/RetryBuild", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *cloudBuildClient) CreateBuildTrigger(ctx context.Context, in *CreateBuildTriggerRequest, opts ...grpc.CallOption) (*BuildTrigger, error) { + out := new(BuildTrigger) + err := c.cc.Invoke(ctx, "/google.devtools.cloudbuild.v1.CloudBuild/CreateBuildTrigger", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *cloudBuildClient) GetBuildTrigger(ctx context.Context, in *GetBuildTriggerRequest, opts ...grpc.CallOption) (*BuildTrigger, error) { + out := new(BuildTrigger) + err := c.cc.Invoke(ctx, "/google.devtools.cloudbuild.v1.CloudBuild/GetBuildTrigger", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *cloudBuildClient) ListBuildTriggers(ctx context.Context, in *ListBuildTriggersRequest, opts ...grpc.CallOption) (*ListBuildTriggersResponse, error) { + out := new(ListBuildTriggersResponse) + err := c.cc.Invoke(ctx, "/google.devtools.cloudbuild.v1.CloudBuild/ListBuildTriggers", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *cloudBuildClient) DeleteBuildTrigger(ctx context.Context, in *DeleteBuildTriggerRequest, opts ...grpc.CallOption) (*empty.Empty, error) { + out := new(empty.Empty) + err := c.cc.Invoke(ctx, "/google.devtools.cloudbuild.v1.CloudBuild/DeleteBuildTrigger", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *cloudBuildClient) UpdateBuildTrigger(ctx context.Context, in *UpdateBuildTriggerRequest, opts ...grpc.CallOption) (*BuildTrigger, error) { + out := new(BuildTrigger) + err := c.cc.Invoke(ctx, "/google.devtools.cloudbuild.v1.CloudBuild/UpdateBuildTrigger", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *cloudBuildClient) RunBuildTrigger(ctx context.Context, in *RunBuildTriggerRequest, opts ...grpc.CallOption) (*longrunning.Operation, error) { + out := new(longrunning.Operation) + err := c.cc.Invoke(ctx, "/google.devtools.cloudbuild.v1.CloudBuild/RunBuildTrigger", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +// CloudBuildServer is the server API for CloudBuild service. +type CloudBuildServer interface { + // Starts a build with the specified configuration. + // + // This method returns a long-running `Operation`, which includes the build + // ID. Pass the build ID to `GetBuild` to determine the build status (such as + // `SUCCESS` or `FAILURE`). + CreateBuild(context.Context, *CreateBuildRequest) (*longrunning.Operation, error) + // Returns information about a previously requested build. + // + // The `Build` that is returned includes its status (such as `SUCCESS`, + // `FAILURE`, or `WORKING`), and timing information. + GetBuild(context.Context, *GetBuildRequest) (*Build, error) + // Lists previously requested builds. + // + // Previously requested builds may still be in-progress, or may have finished + // successfully or unsuccessfully. + ListBuilds(context.Context, *ListBuildsRequest) (*ListBuildsResponse, error) + // Cancels a build in progress. + CancelBuild(context.Context, *CancelBuildRequest) (*Build, error) + // Creates a new build based on the specified build. + // + // This method creates a new build using the original build request, which may + // or may not result in an identical build. + // + // For triggered builds: + // + // * Triggered builds resolve to a precise revision; therefore a retry of a + // triggered build will result in a build that uses the same revision. + // + // For non-triggered builds that specify `RepoSource`: + // + // * If the original build built from the tip of a branch, the retried build + // will build from the tip of that branch, which may not be the same revision + // as the original build. + // * If the original build specified a commit sha or revision ID, the retried + // build will use the identical source. + // + // For builds that specify `StorageSource`: + // + // * If the original build pulled source from Google Cloud Storage without + // specifying the generation of the object, the new build will use the current + // object, which may be different from the original build source. + // * If the original build pulled source from Cloud Storage and specified the + // generation of the object, the new build will attempt to use the same + // object, which may or may not be available depending on the bucket's + // lifecycle management settings. + RetryBuild(context.Context, *RetryBuildRequest) (*longrunning.Operation, error) + // Creates a new `BuildTrigger`. + // + // This API is experimental. + CreateBuildTrigger(context.Context, *CreateBuildTriggerRequest) (*BuildTrigger, error) + // Returns information about a `BuildTrigger`. + // + // This API is experimental. + GetBuildTrigger(context.Context, *GetBuildTriggerRequest) (*BuildTrigger, error) + // Lists existing `BuildTrigger`s. + // + // This API is experimental. + ListBuildTriggers(context.Context, *ListBuildTriggersRequest) (*ListBuildTriggersResponse, error) + // Deletes a `BuildTrigger` by its project ID and trigger ID. + // + // This API is experimental. + DeleteBuildTrigger(context.Context, *DeleteBuildTriggerRequest) (*empty.Empty, error) + // Updates a `BuildTrigger` by its project ID and trigger ID. + // + // This API is experimental. + UpdateBuildTrigger(context.Context, *UpdateBuildTriggerRequest) (*BuildTrigger, error) + // Runs a `BuildTrigger` at a particular source revision. + RunBuildTrigger(context.Context, *RunBuildTriggerRequest) (*longrunning.Operation, error) +} + +func RegisterCloudBuildServer(s *grpc.Server, srv CloudBuildServer) { + s.RegisterService(&_CloudBuild_serviceDesc, srv) +} + +func _CloudBuild_CreateBuild_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(CreateBuildRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(CloudBuildServer).CreateBuild(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.devtools.cloudbuild.v1.CloudBuild/CreateBuild", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(CloudBuildServer).CreateBuild(ctx, req.(*CreateBuildRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _CloudBuild_GetBuild_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(GetBuildRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(CloudBuildServer).GetBuild(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.devtools.cloudbuild.v1.CloudBuild/GetBuild", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(CloudBuildServer).GetBuild(ctx, req.(*GetBuildRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _CloudBuild_ListBuilds_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(ListBuildsRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(CloudBuildServer).ListBuilds(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.devtools.cloudbuild.v1.CloudBuild/ListBuilds", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(CloudBuildServer).ListBuilds(ctx, req.(*ListBuildsRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _CloudBuild_CancelBuild_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(CancelBuildRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(CloudBuildServer).CancelBuild(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.devtools.cloudbuild.v1.CloudBuild/CancelBuild", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(CloudBuildServer).CancelBuild(ctx, req.(*CancelBuildRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _CloudBuild_RetryBuild_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(RetryBuildRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(CloudBuildServer).RetryBuild(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.devtools.cloudbuild.v1.CloudBuild/RetryBuild", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(CloudBuildServer).RetryBuild(ctx, req.(*RetryBuildRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _CloudBuild_CreateBuildTrigger_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(CreateBuildTriggerRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(CloudBuildServer).CreateBuildTrigger(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.devtools.cloudbuild.v1.CloudBuild/CreateBuildTrigger", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(CloudBuildServer).CreateBuildTrigger(ctx, req.(*CreateBuildTriggerRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _CloudBuild_GetBuildTrigger_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(GetBuildTriggerRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(CloudBuildServer).GetBuildTrigger(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.devtools.cloudbuild.v1.CloudBuild/GetBuildTrigger", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(CloudBuildServer).GetBuildTrigger(ctx, req.(*GetBuildTriggerRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _CloudBuild_ListBuildTriggers_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(ListBuildTriggersRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(CloudBuildServer).ListBuildTriggers(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.devtools.cloudbuild.v1.CloudBuild/ListBuildTriggers", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(CloudBuildServer).ListBuildTriggers(ctx, req.(*ListBuildTriggersRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _CloudBuild_DeleteBuildTrigger_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(DeleteBuildTriggerRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(CloudBuildServer).DeleteBuildTrigger(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.devtools.cloudbuild.v1.CloudBuild/DeleteBuildTrigger", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(CloudBuildServer).DeleteBuildTrigger(ctx, req.(*DeleteBuildTriggerRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _CloudBuild_UpdateBuildTrigger_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(UpdateBuildTriggerRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(CloudBuildServer).UpdateBuildTrigger(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.devtools.cloudbuild.v1.CloudBuild/UpdateBuildTrigger", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(CloudBuildServer).UpdateBuildTrigger(ctx, req.(*UpdateBuildTriggerRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _CloudBuild_RunBuildTrigger_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(RunBuildTriggerRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(CloudBuildServer).RunBuildTrigger(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.devtools.cloudbuild.v1.CloudBuild/RunBuildTrigger", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(CloudBuildServer).RunBuildTrigger(ctx, req.(*RunBuildTriggerRequest)) + } + return interceptor(ctx, in, info, handler) +} + +var _CloudBuild_serviceDesc = grpc.ServiceDesc{ + ServiceName: "google.devtools.cloudbuild.v1.CloudBuild", + HandlerType: (*CloudBuildServer)(nil), + Methods: []grpc.MethodDesc{ + { + MethodName: "CreateBuild", + Handler: _CloudBuild_CreateBuild_Handler, + }, + { + MethodName: "GetBuild", + Handler: _CloudBuild_GetBuild_Handler, + }, + { + MethodName: "ListBuilds", + Handler: _CloudBuild_ListBuilds_Handler, + }, + { + MethodName: "CancelBuild", + Handler: _CloudBuild_CancelBuild_Handler, + }, + { + MethodName: "RetryBuild", + Handler: _CloudBuild_RetryBuild_Handler, + }, + { + MethodName: "CreateBuildTrigger", + Handler: _CloudBuild_CreateBuildTrigger_Handler, + }, + { + MethodName: "GetBuildTrigger", + Handler: _CloudBuild_GetBuildTrigger_Handler, + }, + { + MethodName: "ListBuildTriggers", + Handler: _CloudBuild_ListBuildTriggers_Handler, + }, + { + MethodName: "DeleteBuildTrigger", + Handler: _CloudBuild_DeleteBuildTrigger_Handler, + }, + { + MethodName: "UpdateBuildTrigger", + Handler: _CloudBuild_UpdateBuildTrigger_Handler, + }, + { + MethodName: "RunBuildTrigger", + Handler: _CloudBuild_RunBuildTrigger_Handler, + }, + }, + Streams: []grpc.StreamDesc{}, + Metadata: "google/devtools/cloudbuild/v1/cloudbuild.proto", +} + +func init() { + proto.RegisterFile("google/devtools/cloudbuild/v1/cloudbuild.proto", fileDescriptor_cloudbuild_bcd614f113dbc5d4) +} + +var fileDescriptor_cloudbuild_bcd614f113dbc5d4 = []byte{ + // 2941 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xb4, 0x5a, 0xcb, 0x73, 0xdb, 0xd6, + 0xd5, 0x17, 0x48, 0x89, 0x8f, 0x43, 0x4a, 0x82, 0x6f, 0x1c, 0x99, 0xa2, 0xe3, 0x58, 0x81, 0xe3, + 0x44, 0xb1, 0x13, 0x31, 0x92, 0x3f, 0xc7, 0x8e, 0xf2, 0xb0, 0x24, 0x8a, 0x7a, 0x4c, 0x28, 0xd2, + 0x01, 0x49, 0x67, 0x92, 0xef, 0xfb, 0x06, 0x05, 0x89, 0x2b, 0x0a, 0x15, 0x08, 0xa0, 0xc0, 0x25, + 0x53, 0x25, 0xcd, 0x74, 0x9a, 0xe9, 0xb4, 0xd3, 0x55, 0x3b, 0xd3, 0xe9, 0xa2, 0xd3, 0x45, 0x1f, + 0xeb, 0x4e, 0xa7, 0xd3, 0x2e, 0xba, 0xca, 0xba, 0xeb, 0x4e, 0x57, 0xdd, 0x76, 0xfa, 0x47, 0x74, + 0xd5, 0xe9, 0xdc, 0x07, 0x48, 0x90, 0xb4, 0x03, 0xc2, 0x6e, 0x37, 0x36, 0xee, 0xb9, 0xf7, 0x9c, + 0x7b, 0xee, 0x79, 0xdd, 0xdf, 0xb9, 0x14, 0x6c, 0x74, 0x1d, 0xa7, 0x6b, 0xe1, 0x92, 0x81, 0x07, + 0xc4, 0x71, 0x2c, 0xbf, 0xd4, 0xb1, 0x9c, 0xbe, 0xd1, 0xee, 0x9b, 0x96, 0x51, 0x1a, 0x6c, 0x86, + 0x46, 0x1b, 0xae, 0xe7, 0x10, 0x07, 0x5d, 0xe3, 0xeb, 0x37, 0x82, 0xf5, 0x1b, 0xa1, 0x15, 0x83, + 0xcd, 0xe2, 0x0b, 0x42, 0x9c, 0xee, 0x9a, 0x25, 0xdd, 0xb6, 0x1d, 0xa2, 0x13, 0xd3, 0xb1, 0x7d, + 0xce, 0x5c, 0x5c, 0x0d, 0xcd, 0x9e, 0x11, 0xe2, 0xb6, 0x1d, 0xe3, 0x42, 0x4c, 0x29, 0x62, 0x8a, + 0x89, 0x2b, 0xe9, 0x7d, 0xc3, 0x24, 0xfc, 0x5f, 0xcd, 0x72, 0xba, 0x62, 0xcd, 0x0d, 0xb1, 0xc6, + 0x72, 0xec, 0xae, 0xd7, 0xb7, 0x6d, 0xd3, 0xee, 0x96, 0x1c, 0x17, 0x7b, 0x63, 0x7b, 0xbc, 0x28, + 0x16, 0xb1, 0x51, 0xbb, 0x7f, 0x5a, 0x32, 0xfa, 0x7c, 0x81, 0x98, 0xbf, 0x3a, 0x39, 0x8f, 0x7b, + 0x2e, 0x09, 0xb4, 0x58, 0x9b, 0x9c, 0x3c, 0x35, 0xb1, 0x65, 0x68, 0x3d, 0xdd, 0x3f, 0x17, 0x2b, + 0xae, 0x4f, 0xae, 0x20, 0x66, 0x0f, 0xfb, 0x44, 0xef, 0xb9, 0x7c, 0x81, 0xb2, 0x07, 0x97, 0x54, + 0x4c, 0xbc, 0x8b, 0x3d, 0x6a, 0x12, 0x15, 0x7f, 0xab, 0x8f, 0x7d, 0x82, 0xae, 0x01, 0xb8, 0x9e, + 0xf3, 0x4d, 0xdc, 0x21, 0x9a, 0x69, 0x14, 0xa4, 0x35, 0x69, 0x3d, 0xab, 0x66, 0x05, 0xe5, 0xd8, + 0x40, 0x4b, 0x90, 0x30, 0x8d, 0x42, 0x82, 0x91, 0x13, 0xa6, 0xa1, 0xfc, 0x5c, 0x82, 0x15, 0xb5, + 0x6f, 0x33, 0x11, 0x4d, 0xcf, 0xec, 0x76, 0xb1, 0x37, 0xa3, 0xa4, 0x6b, 0x00, 0x84, 0x33, 0x68, + 0x43, 0x89, 0x59, 0x41, 0x39, 0x36, 0xd0, 0x2e, 0xa4, 0x7c, 0xa7, 0xef, 0x75, 0x70, 0x21, 0xb9, + 0x26, 0xad, 0xe7, 0xb6, 0x5e, 0xdb, 0xf8, 0x5a, 0x77, 0x6e, 0xa8, 0xd8, 0x75, 0x1a, 0x8c, 0x41, + 0x15, 0x8c, 0x8a, 0x06, 0x8b, 0x0d, 0xe2, 0x78, 0x7a, 0x17, 0xf3, 0x09, 0xb4, 0x02, 0xa9, 0x76, + 0xbf, 0x73, 0x8e, 0x89, 0xd0, 0x46, 0x8c, 0x28, 0xdd, 0x69, 0x53, 0xb5, 0x84, 0x1a, 0x62, 0x84, + 0x5e, 0x04, 0xe8, 0x62, 0x5b, 0x78, 0x8d, 0xe9, 0x91, 0x54, 0x43, 0x14, 0xe5, 0xcf, 0x12, 0xc0, + 0x68, 0xdf, 0xa8, 0x03, 0x5f, 0x85, 0xac, 0x87, 0x5d, 0x47, 0xb3, 0xf5, 0x1e, 0x16, 0x1b, 0x65, + 0x28, 0xa1, 0xa6, 0xf7, 0x30, 0x7a, 0x09, 0x72, 0x6d, 0x4f, 0xb7, 0x3b, 0x67, 0x7c, 0x9a, 0xee, + 0x95, 0x3d, 0x9a, 0x53, 0x81, 0x13, 0xd9, 0x92, 0xab, 0x90, 0x21, 0x7a, 0x97, 0xcf, 0xcf, 0x8b, + 0xf9, 0x34, 0xd1, 0xbb, 0x6c, 0xf2, 0x3a, 0x40, 0xc7, 0xe9, 0xf5, 0x4c, 0xa2, 0xf9, 0x67, 0x7a, + 0x61, 0x41, 0x4c, 0x67, 0x39, 0xad, 0x71, 0xa6, 0x23, 0x19, 0x92, 0x86, 0xe9, 0x15, 0xd2, 0x6c, + 0x5f, 0xfa, 0xb9, 0x07, 0x90, 0xf1, 0xf0, 0xc0, 0xf4, 0xe9, 0x49, 0xfe, 0x24, 0x41, 0x4a, 0x9c, + 0xa2, 0x05, 0x4b, 0x3e, 0xb7, 0x9a, 0x26, 0x1c, 0x90, 0x60, 0x0e, 0x78, 0x3d, 0xc2, 0x01, 0x63, + 0xa6, 0x3e, 0x9a, 0x53, 0x17, 0xfd, 0x31, 0xdb, 0x57, 0x21, 0xc7, 0x4e, 0xff, 0x94, 0x4e, 0xa5, + 0xb6, 0xf0, 0x86, 0xa3, 0xbd, 0x4c, 0x10, 0x1d, 0xca, 0x97, 0x12, 0x00, 0x8d, 0x3e, 0x72, 0xdc, + 0xd3, 0xbb, 0x18, 0x21, 0x98, 0x67, 0x06, 0xe2, 0xd6, 0x67, 0xdf, 0xd4, 0xbd, 0x86, 0xd9, 0xc5, + 0x3e, 0xe1, 0x66, 0x55, 0xc5, 0x08, 0x1d, 0x41, 0xce, 0xed, 0xfb, 0x67, 0x1a, 0x31, 0x7b, 0xa6, + 0xdd, 0x65, 0x36, 0xcd, 0x6d, 0xbd, 0x1a, 0xa1, 0x52, 0xd3, 0xec, 0xe1, 0x86, 0xab, 0xdb, 0x2a, + 0x50, 0xde, 0x26, 0x63, 0x55, 0x7e, 0x38, 0x0f, 0x59, 0x96, 0x02, 0x0d, 0x82, 0xdd, 0xc7, 0xea, + 0x20, 0x43, 0x12, 0xdb, 0x83, 0x42, 0x62, 0x2d, 0x49, 0xcd, 0x8f, 0xed, 0x01, 0x5d, 0xa5, 0x7b, + 0x5d, 0xbf, 0x90, 0x64, 0x24, 0xf6, 0x1d, 0x38, 0x69, 0x7e, 0xe8, 0x24, 0x91, 0x6f, 0x0b, 0x41, + 0xbe, 0xa1, 0x55, 0xc8, 0x7c, 0xaa, 0x9b, 0x44, 0x3b, 0x75, 0xbc, 0x42, 0x8a, 0x71, 0xa6, 0xe9, + 0xf8, 0xc0, 0xf1, 0x68, 0xb4, 0x62, 0x9b, 0x78, 0x17, 0xae, 0x63, 0xda, 0x44, 0x38, 0x3a, 0x44, + 0xa1, 0xe1, 0xe9, 0xe3, 0x8e, 0x87, 0x89, 0x46, 0x35, 0xc9, 0x30, 0xe6, 0x2c, 0xa7, 0x54, 0xec, + 0x01, 0x7a, 0x00, 0xe9, 0x81, 0x63, 0xf5, 0x7b, 0xd8, 0x2f, 0x64, 0xd7, 0x92, 0xeb, 0xb9, 0xad, + 0x9b, 0x11, 0x96, 0x78, 0xc4, 0x56, 0xab, 0x01, 0x17, 0x7a, 0x00, 0x29, 0x61, 0x49, 0x88, 0x67, + 0x49, 0xc1, 0xc6, 0xfd, 0x61, 0x59, 0x81, 0x3f, 0x16, 0x63, 0xfb, 0xc3, 0xb2, 0xb8, 0x3f, 0xd0, + 0x1d, 0x48, 0xd3, 0x62, 0xe7, 0xf4, 0x49, 0x21, 0xc7, 0xa4, 0xac, 0x06, 0x52, 0x82, 0x62, 0xb8, + 0xb1, 0x2f, 0x6a, 0xad, 0x1a, 0xac, 0x44, 0x65, 0x48, 0xf9, 0x44, 0x27, 0x7d, 0xbf, 0x90, 0x5f, + 0x93, 0xd6, 0x97, 0xb6, 0x6e, 0x47, 0xec, 0xcc, 0x1c, 0xbe, 0xd1, 0x60, 0x2c, 0xaa, 0x60, 0x55, + 0xde, 0x84, 0x14, 0xb7, 0xcb, 0x63, 0xa3, 0x00, 0xc1, 0xbc, 0xab, 0x93, 0x33, 0x91, 0xfd, 0xec, + 0x5b, 0xf9, 0xa7, 0x04, 0x69, 0x15, 0xfb, 0x7d, 0x8b, 0xf8, 0xb4, 0xe8, 0x99, 0x34, 0x8c, 0x7d, + 0x16, 0x28, 0xd1, 0xf9, 0x31, 0x0a, 0x7c, 0x55, 0x30, 0xa2, 0x5b, 0x70, 0x89, 0x4d, 0x6b, 0x3e, + 0xc1, 0xae, 0x26, 0xa4, 0xf1, 0x18, 0x5b, 0x6e, 0x07, 0x21, 0x7a, 0xcc, 0xd7, 0xde, 0x86, 0x4b, + 0xba, 0x47, 0xcc, 0x53, 0xbd, 0x43, 0xb4, 0x9e, 0x6e, 0x9b, 0xa7, 0x34, 0x47, 0x78, 0xf0, 0xc9, + 0xc1, 0xc4, 0x89, 0xa0, 0xa3, 0x1b, 0xb0, 0x68, 0xf7, 0x7b, 0x5a, 0x40, 0xf7, 0x59, 0x50, 0x26, + 0xd5, 0xbc, 0xdd, 0xef, 0xed, 0x06, 0x34, 0xf4, 0x3a, 0xa0, 0xd0, 0xee, 0x4e, 0x9f, 0xb8, 0x7d, + 0xe2, 0xb3, 0x40, 0xcd, 0xab, 0xf2, 0x70, 0xfb, 0x3a, 0xa7, 0x2b, 0x04, 0x96, 0x02, 0x56, 0x6e, + 0x01, 0x54, 0x84, 0x8c, 0xe5, 0x74, 0x78, 0xbd, 0xe5, 0x86, 0x1b, 0x8e, 0xd1, 0x01, 0x64, 0x4f, + 0x4d, 0x0b, 0x6b, 0x67, 0xba, 0x7f, 0x36, 0xa3, 0x7d, 0x0e, 0x4c, 0x0b, 0x1f, 0xe9, 0xfe, 0x19, + 0xf6, 0xd5, 0xcc, 0xa9, 0xf8, 0x56, 0xfe, 0x96, 0x83, 0x05, 0xe6, 0x3b, 0x91, 0x5c, 0xd2, 0x30, + 0xb9, 0xc6, 0x0b, 0xb8, 0x3c, 0x59, 0xc0, 0x47, 0x01, 0x92, 0x78, 0xea, 0x00, 0xa1, 0x66, 0xe4, + 0x5f, 0x9a, 0x81, 0x89, 0x6e, 0x5a, 0x85, 0x02, 0xdb, 0x26, 0xcf, 0x89, 0xfb, 0x8c, 0x86, 0xde, + 0x9b, 0xb8, 0xfc, 0xa2, 0x52, 0x71, 0xfc, 0xe2, 0x43, 0xef, 0xc3, 0x02, 0xb5, 0xbf, 0x5f, 0xc8, + 0x31, 0x2b, 0xad, 0xcf, 0xa2, 0x27, 0xf5, 0x8b, 0xca, 0xd9, 0xd0, 0x0e, 0xa4, 0x3d, 0x1e, 0x91, + 0x22, 0x95, 0x5f, 0x89, 0xac, 0xd3, 0x6c, 0xb5, 0x1a, 0xb0, 0xa1, 0x77, 0x20, 0xd7, 0xf1, 0xb0, + 0x4e, 0x30, 0x4d, 0x66, 0x5c, 0x48, 0x31, 0x29, 0xc5, 0xa9, 0x24, 0x6c, 0x06, 0x88, 0x44, 0x05, + 0xbe, 0x9c, 0x12, 0xd0, 0xdb, 0x00, 0x3e, 0xd1, 0x3d, 0xc2, 0x79, 0xd3, 0x91, 0xbc, 0x59, 0xb6, + 0x9a, 0xb1, 0xbe, 0x03, 0xb9, 0x53, 0xd3, 0x36, 0x79, 0x51, 0xc7, 0x85, 0x4c, 0xf4, 0xbe, 0x7c, + 0x39, 0x63, 0x0e, 0x55, 0x8d, 0xfc, 0xcc, 0x55, 0x63, 0x65, 0x98, 0xb2, 0x8b, 0x2c, 0xc9, 0x82, + 0x3c, 0x3c, 0x80, 0xec, 0x28, 0x55, 0x6e, 0x32, 0x71, 0x51, 0x7e, 0x18, 0xa6, 0x91, 0x3a, 0x62, + 0x45, 0xd7, 0x21, 0x67, 0x39, 0x5d, 0x5f, 0x13, 0xc0, 0xe5, 0x39, 0x5e, 0xd6, 0x29, 0x69, 0x8f, + 0x83, 0x97, 0xff, 0x83, 0x4b, 0xdc, 0xed, 0x9a, 0xeb, 0x39, 0x03, 0x6c, 0xeb, 0x76, 0x07, 0x17, + 0x9e, 0x67, 0x1b, 0x96, 0x66, 0x0a, 0x9b, 0x87, 0x43, 0x36, 0x55, 0xf6, 0x27, 0x28, 0x68, 0x1d, + 0x78, 0xda, 0x6a, 0x21, 0xac, 0xb6, 0xc2, 0x74, 0x58, 0x6a, 0x87, 0x30, 0xdf, 0xb1, 0x81, 0x2a, + 0x90, 0x76, 0x5c, 0x06, 0x6f, 0x0b, 0x57, 0xd8, 0xee, 0x33, 0xa5, 0x47, 0x9d, 0xb3, 0xa8, 0x01, + 0x2f, 0xba, 0x02, 0x69, 0xcb, 0xe9, 0x6a, 0x7d, 0xcf, 0x2a, 0xac, 0xf2, 0xdb, 0xda, 0x72, 0xba, + 0x2d, 0xcf, 0x42, 0xff, 0x0f, 0x8b, 0x7e, 0xbf, 0xed, 0x13, 0x93, 0xf4, 0xf9, 0x2e, 0xd7, 0x58, + 0x70, 0xdf, 0x9b, 0x2d, 0x09, 0xc3, 0x9c, 0x15, 0x7a, 0x27, 0xaa, 0xe3, 0xd2, 0x68, 0x69, 0x26, + 0x7a, 0xd7, 0x2f, 0x5c, 0xe7, 0xd7, 0x31, 0xfd, 0xa6, 0x57, 0x22, 0xbf, 0x1f, 0xfd, 0xc2, 0xda, + 0x4c, 0x57, 0x62, 0x83, 0xad, 0x56, 0x03, 0x2e, 0x74, 0x34, 0xbc, 0x12, 0x5f, 0x62, 0xfc, 0x6f, + 0xce, 0xa4, 0x2c, 0xbf, 0xc4, 0xb8, 0x96, 0x82, 0xbf, 0xb8, 0x03, 0x68, 0xfa, 0x0c, 0x14, 0x2f, + 0x9c, 0xe3, 0x0b, 0x51, 0xc1, 0xe8, 0x27, 0xba, 0x0c, 0x0b, 0x03, 0xdd, 0xea, 0x07, 0x00, 0x93, + 0x0f, 0xb6, 0x13, 0xf7, 0xa5, 0x62, 0x1b, 0x72, 0x21, 0xc1, 0x8f, 0x61, 0x7d, 0x2f, 0xcc, 0x1a, + 0xe3, 0xe2, 0x1d, 0xed, 0xa1, 0x7c, 0x17, 0x52, 0xbc, 0xdc, 0x21, 0x04, 0x4b, 0x8d, 0xe6, 0x6e, + 0xb3, 0xd5, 0xd0, 0x5a, 0xb5, 0x0f, 0x6a, 0xf5, 0x8f, 0x6a, 0xf2, 0x1c, 0x02, 0x48, 0x7d, 0xd8, + 0xaa, 0xb4, 0x2a, 0xfb, 0xb2, 0x84, 0x72, 0x90, 0xfe, 0xa8, 0xae, 0x7e, 0x70, 0x5c, 0x3b, 0x94, + 0x13, 0x74, 0xd0, 0x68, 0x95, 0xcb, 0x95, 0x46, 0x43, 0x4e, 0xd2, 0xc1, 0xc1, 0xee, 0x71, 0xb5, + 0xa5, 0x56, 0xe4, 0x79, 0x2a, 0xe6, 0xb8, 0xd6, 0xac, 0xa8, 0xb5, 0xdd, 0xaa, 0x56, 0x51, 0xd5, + 0xba, 0x2a, 0x2f, 0xd0, 0x05, 0xcd, 0xe3, 0x93, 0x4a, 0xbd, 0xd5, 0x94, 0x53, 0x68, 0x11, 0xb2, + 0xe5, 0xdd, 0x5a, 0xb9, 0x52, 0xad, 0x56, 0xf6, 0xe5, 0xb4, 0xf2, 0x2f, 0x09, 0xb2, 0xa3, 0xdb, + 0x68, 0x94, 0x9b, 0xd2, 0x58, 0x6e, 0xaa, 0x90, 0xe6, 0x08, 0xdf, 0x17, 0x67, 0xbd, 0x3f, 0x6b, + 0x66, 0x0e, 0xbf, 0xea, 0x9c, 0x5f, 0x0d, 0x04, 0x15, 0xbf, 0x2f, 0xc1, 0xf2, 0xc4, 0xe4, 0xd7, + 0xde, 0x66, 0x97, 0x61, 0x81, 0x5e, 0xff, 0xbe, 0x80, 0x84, 0x7c, 0x10, 0xc2, 0x50, 0xc9, 0xa7, + 0xc2, 0x50, 0xca, 0x77, 0x20, 0x13, 0xd0, 0x26, 0xea, 0xa8, 0x14, 0xa7, 0x8e, 0xde, 0x85, 0x0c, + 0xb6, 0x0d, 0xce, 0x98, 0x88, 0x64, 0x4c, 0x63, 0xdb, 0xa0, 0x23, 0xa5, 0x09, 0x2b, 0x22, 0xab, + 0x45, 0x8b, 0x74, 0x82, 0x89, 0x6e, 0xe8, 0x44, 0x47, 0xdb, 0xb0, 0xc0, 0x94, 0x16, 0x6a, 0xbc, + 0x3c, 0x4b, 0x22, 0xa8, 0x9c, 0x45, 0xf9, 0x75, 0x12, 0xe4, 0xc9, 0x52, 0x85, 0x0c, 0xb8, 0xe2, + 0x61, 0xdf, 0xb1, 0x06, 0x98, 0x82, 0x8d, 0xb1, 0x7e, 0x25, 0x19, 0xbf, 0x5f, 0x51, 0x9f, 0x0f, + 0x84, 0x8d, 0x77, 0x8c, 0xff, 0x0b, 0x97, 0x87, 0xbb, 0x84, 0xdb, 0x97, 0x54, 0xdc, 0x9e, 0x14, + 0x05, 0x62, 0x42, 0xfd, 0xe2, 0x37, 0xe8, 0x65, 0x25, 0x00, 0x0d, 0xf6, 0x0b, 0xf3, 0xac, 0x44, + 0x3c, 0x88, 0x59, 0xb3, 0x43, 0x18, 0x87, 0x57, 0x0c, 0x38, 0x1d, 0x12, 0x8a, 0x67, 0xb0, 0x3c, + 0x31, 0xfd, 0x98, 0xbc, 0x7f, 0x30, 0x9e, 0xf7, 0x31, 0x30, 0x55, 0x28, 0xf3, 0x6b, 0x00, 0xa3, + 0x09, 0xb4, 0x13, 0x86, 0x6a, 0x12, 0x3b, 0xd7, 0x8d, 0x08, 0xb1, 0x94, 0x33, 0x04, 0xd2, 0x7e, + 0x24, 0xc1, 0x3c, 0xfd, 0x40, 0x3b, 0x30, 0x4f, 0x2e, 0x5c, 0x1e, 0xbe, 0x4b, 0x91, 0x4e, 0xa5, + 0x2c, 0xec, 0x9f, 0xe6, 0x85, 0x8b, 0x55, 0xc6, 0x39, 0x5e, 0x12, 0xf3, 0x42, 0x69, 0xe5, 0x35, + 0xc8, 0x04, 0xeb, 0x50, 0x06, 0xe6, 0x6b, 0xf5, 0x5a, 0x85, 0x97, 0xa8, 0xc6, 0xd1, 0xee, 0xd6, + 0xdd, 0xb7, 0x64, 0x09, 0xa5, 0x21, 0x79, 0xb2, 0x7f, 0x57, 0x4e, 0x28, 0x5f, 0xd1, 0xe6, 0x98, + 0x55, 0x74, 0xb4, 0x06, 0xf9, 0xf3, 0x9e, 0xaf, 0x9d, 0xe3, 0x0b, 0x2d, 0x04, 0xee, 0xe1, 0xbc, + 0xe7, 0x7f, 0x80, 0x2f, 0x58, 0x23, 0xde, 0x18, 0xeb, 0xb2, 0x92, 0xec, 0xec, 0xff, 0x33, 0xd3, + 0xb5, 0x21, 0xfe, 0xab, 0xd8, 0x03, 0xee, 0xc8, 0x51, 0x6f, 0x56, 0x7c, 0x17, 0x96, 0xc6, 0x27, + 0xa3, 0x2a, 0x7f, 0x3e, 0xec, 0x1b, 0x07, 0x50, 0x99, 0xa1, 0xab, 0x38, 0x0f, 0x3d, 0xc3, 0x84, + 0x4d, 0xc4, 0x4f, 0xd8, 0x1d, 0x58, 0x3e, 0xc4, 0xe4, 0x59, 0x9e, 0x95, 0x7e, 0x20, 0xc1, 0xa5, + 0xaa, 0xe9, 0x73, 0x19, 0xfe, 0x8c, 0x42, 0xae, 0x42, 0xd6, 0x65, 0x65, 0xc0, 0xfc, 0x8c, 0x5b, + 0x61, 0x41, 0xcd, 0x50, 0x42, 0xc3, 0xfc, 0x8c, 0x3f, 0xce, 0xd0, 0x49, 0xe2, 0x9c, 0x63, 0x5b, + 0x3c, 0x04, 0xb0, 0xe5, 0x4d, 0x4a, 0xa0, 0x57, 0xc5, 0xa9, 0x69, 0x11, 0xec, 0x31, 0xcc, 0x98, + 0x55, 0xc5, 0x48, 0xf9, 0x0c, 0x50, 0x58, 0x0f, 0xdf, 0x75, 0x6c, 0x1f, 0xa3, 0x77, 0x21, 0xc5, + 0x4e, 0xea, 0x8b, 0xe0, 0x9e, 0xcd, 0x3a, 0x82, 0x07, 0xbd, 0x02, 0xcb, 0x36, 0xfe, 0x36, 0xd1, + 0x42, 0xfa, 0xf0, 0x93, 0x2f, 0x52, 0xf2, 0xc3, 0x40, 0x27, 0xa5, 0x0c, 0xa8, 0x4c, 0x53, 0xdc, + 0x7a, 0x16, 0x4b, 0xfe, 0x65, 0x1e, 0xf2, 0xe1, 0xd7, 0xb9, 0xa9, 0xa6, 0x67, 0x0d, 0x72, 0x06, + 0xf6, 0x3b, 0x9e, 0xc9, 0x00, 0x18, 0x03, 0xfc, 0x59, 0x35, 0x4c, 0x42, 0x4d, 0x90, 0x03, 0xf4, + 0x47, 0x70, 0xcf, 0xb5, 0x74, 0x12, 0xa0, 0xf2, 0x18, 0x05, 0x70, 0x59, 0x88, 0x68, 0x0a, 0x09, + 0xe8, 0xdd, 0x20, 0xc0, 0xe6, 0x67, 0x0f, 0xb0, 0xa3, 0x39, 0x11, 0x62, 0xe8, 0x05, 0x60, 0xb5, + 0x82, 0x25, 0x61, 0x46, 0xbc, 0x76, 0x0d, 0x29, 0x93, 0xed, 0xc7, 0x42, 0xac, 0xf6, 0xa3, 0x08, + 0x19, 0xc3, 0xf4, 0xf5, 0xb6, 0x85, 0x8d, 0x42, 0x76, 0x4d, 0x5a, 0xcf, 0xa8, 0xc3, 0x31, 0x32, + 0x26, 0x41, 0x28, 0xef, 0xb0, 0xde, 0x9f, 0x45, 0x79, 0xe1, 0x80, 0x19, 0xb0, 0xe8, 0x0d, 0x58, + 0x34, 0xbb, 0xb6, 0xe3, 0x61, 0x43, 0xa3, 0x47, 0xf2, 0x0b, 0xcb, 0x0c, 0x23, 0xe4, 0x05, 0x91, + 0x16, 0x5a, 0x1f, 0xdd, 0x84, 0x25, 0xd3, 0xee, 0x58, 0x7d, 0x63, 0xb8, 0x4a, 0x66, 0xab, 0x16, + 0x03, 0x2a, 0x5b, 0xf6, 0xec, 0xc0, 0x71, 0x4f, 0x86, 0x25, 0xd1, 0x02, 0x08, 0xd7, 0x29, 0xdf, + 0x93, 0x60, 0x35, 0x54, 0x51, 0xe2, 0xbd, 0xfb, 0x56, 0x20, 0x2d, 0x42, 0x41, 0x94, 0x96, 0xdb, + 0x31, 0x8c, 0xa7, 0x06, 0xbc, 0xca, 0x23, 0x58, 0x09, 0x6a, 0xcc, 0x7f, 0xf2, 0xdd, 0x59, 0x79, + 0x1b, 0x0a, 0xc3, 0x84, 0x17, 0x82, 0x67, 0xac, 0x3f, 0x8a, 0x01, 0xab, 0x8f, 0x61, 0x15, 0x25, + 0xe3, 0x10, 0x32, 0x62, 0x93, 0xa0, 0x68, 0xc4, 0x3a, 0xf7, 0x90, 0x59, 0xf9, 0x18, 0x56, 0xf7, + 0xb1, 0x85, 0x9f, 0xca, 0xf6, 0x11, 0x67, 0xff, 0x95, 0x04, 0xab, 0x2d, 0xd7, 0xd0, 0xff, 0x0b, + 0xb2, 0xc3, 0x6e, 0x4f, 0x3e, 0x83, 0xdb, 0xff, 0x9e, 0x11, 0xe5, 0x4c, 0x34, 0x8e, 0xa8, 0x0d, + 0x2b, 0x53, 0xed, 0xef, 0x08, 0x77, 0xc4, 0x45, 0x0c, 0x97, 0x27, 0x1b, 0x60, 0x86, 0x41, 0x5c, + 0x8a, 0x35, 0x99, 0x11, 0xb0, 0xa1, 0x0d, 0xb0, 0x67, 0x9e, 0x5e, 0x68, 0xbc, 0x5f, 0x15, 0x2f, + 0x41, 0xf7, 0x63, 0xb4, 0xba, 0x1b, 0x8f, 0x98, 0x00, 0x3e, 0xa2, 0xb8, 0x53, 0x08, 0x0e, 0x93, + 0xd1, 0x27, 0x90, 0xef, 0xe9, 0x9d, 0x33, 0xd3, 0xc6, 0x1a, 0x43, 0x3f, 0x49, 0xb6, 0xcd, 0xbd, + 0x38, 0xdb, 0x9c, 0x70, 0x7e, 0x76, 0xac, 0x5c, 0x6f, 0x34, 0xa0, 0x18, 0xc6, 0x30, 0xfd, 0x73, + 0x76, 0x4d, 0x6a, 0xdd, 0x36, 0xc3, 0xb2, 0x49, 0x15, 0x28, 0x8d, 0xde, 0x94, 0x87, 0x6d, 0xe4, + 0xc0, 0x73, 0xe1, 0x82, 0x14, 0x9c, 0x75, 0x9e, 0x29, 0xf1, 0x7e, 0x1c, 0x25, 0xc2, 0xa5, 0x47, + 0x9c, 0x18, 0xf9, 0x53, 0x34, 0xe4, 0xc2, 0x65, 0xda, 0xf4, 0xfb, 0xc4, 0xc3, 0x3a, 0x6d, 0x63, + 0x82, 0x1d, 0x17, 0xe2, 0xef, 0x58, 0x75, 0xba, 0x8d, 0x40, 0x4c, 0xb0, 0xa3, 0x35, 0x45, 0x43, + 0xd7, 0x21, 0xf7, 0xa9, 0xe3, 0x9d, 0x63, 0x4f, 0x73, 0x1d, 0xc7, 0x0a, 0x5e, 0xcb, 0x39, 0xe9, + 0xa1, 0xe3, 0x58, 0xe8, 0x43, 0xf6, 0x0e, 0xd1, 0xa5, 0xad, 0x58, 0x2e, 0xbe, 0xf1, 0xab, 0x9c, + 0xf5, 0xc4, 0x31, 0xb0, 0x1a, 0xc8, 0x09, 0x7e, 0x03, 0xc8, 0x8f, 0x7e, 0x03, 0x18, 0x7f, 0x92, + 0x5f, 0xfc, 0x9a, 0x27, 0xf9, 0xa5, 0xa7, 0x79, 0x92, 0x57, 0x36, 0x20, 0x3f, 0x16, 0x56, 0x32, + 0xe4, 0x6b, 0xf5, 0xa6, 0xf6, 0xa8, 0xa2, 0x1e, 0x1f, 0x1c, 0x57, 0xf6, 0xe5, 0x39, 0x94, 0x87, + 0xcc, 0x70, 0x24, 0x29, 0x65, 0xc8, 0x85, 0xc2, 0x06, 0x2d, 0x43, 0xae, 0x55, 0x6b, 0x3c, 0xac, + 0x94, 0x83, 0xd5, 0x94, 0x7f, 0x53, 0x3b, 0x3a, 0x3e, 0x3c, 0x2a, 0x3f, 0x6c, 0x69, 0xf7, 0x65, + 0x09, 0x5d, 0x82, 0xc5, 0x10, 0xe5, 0xce, 0x96, 0x9c, 0x50, 0xee, 0x8e, 0xdf, 0x38, 0x62, 0xeb, + 0x25, 0x80, 0x93, 0x56, 0xa3, 0xa9, 0x9d, 0xec, 0x36, 0xcb, 0x47, 0xf2, 0x1c, 0x95, 0xbd, 0x5b, + 0xad, 0xd6, 0x3f, 0xd2, 0xaa, 0xf5, 0x7a, 0xa3, 0x22, 0x4b, 0xca, 0x21, 0xa0, 0x69, 0xdf, 0xf1, + 0x77, 0x04, 0xb5, 0xb2, 0x7b, 0xa2, 0xed, 0x57, 0x0e, 0x76, 0x5b, 0xd5, 0xa6, 0x3c, 0x47, 0x7b, + 0x7e, 0x41, 0xab, 0xd7, 0x64, 0x89, 0x4a, 0x0e, 0x86, 0x07, 0x07, 0x72, 0x42, 0xd9, 0x81, 0x5c, + 0xc8, 0xfc, 0xe8, 0x0a, 0x3c, 0x57, 0xad, 0x1f, 0x1e, 0x1e, 0xd7, 0x0e, 0xb5, 0xf1, 0xc3, 0x00, + 0xa4, 0xaa, 0x95, 0xc3, 0xdd, 0xf2, 0xc7, 0xb2, 0x44, 0xcd, 0x70, 0x58, 0x6e, 0x68, 0xf5, 0x5a, + 0xf5, 0x63, 0x39, 0xb1, 0xf5, 0xc7, 0x25, 0x80, 0x32, 0x35, 0x2c, 0x7f, 0x26, 0xfe, 0x89, 0x04, + 0xb9, 0xd0, 0x75, 0x87, 0x36, 0x23, 0xbc, 0x30, 0x0d, 0xb6, 0x8b, 0xd7, 0x02, 0x96, 0xd0, 0x0f, + 0xc2, 0x1b, 0xc3, 0x2e, 0x59, 0x29, 0x7d, 0xf9, 0xd7, 0x7f, 0xfc, 0x34, 0xf1, 0x9a, 0xb2, 0x56, + 0x1a, 0x6c, 0x96, 0x44, 0x49, 0xf5, 0x4b, 0x9f, 0x8f, 0xca, 0xed, 0x17, 0x25, 0x8e, 0x1d, 0xb7, + 0x05, 0xfc, 0xf9, 0xb1, 0x04, 0x99, 0xe0, 0xfa, 0x43, 0x1b, 0x11, 0xfa, 0x4c, 0x60, 0xf1, 0xe2, + 0x4c, 0x50, 0x4b, 0x79, 0x83, 0xe9, 0xf4, 0x2a, 0xba, 0x19, 0xa5, 0x53, 0xe9, 0x73, 0xd3, 0xf8, + 0x02, 0xfd, 0x42, 0x02, 0x18, 0x21, 0x65, 0x14, 0xf5, 0xd2, 0x35, 0x05, 0xee, 0x8b, 0x9b, 0x31, + 0x38, 0xf8, 0x9d, 0xaa, 0xac, 0x33, 0x15, 0x15, 0x14, 0x69, 0x36, 0xf4, 0x4b, 0xea, 0xc2, 0x11, + 0x96, 0x8e, 0x76, 0xe1, 0x14, 0xee, 0x9e, 0xd1, 0x6a, 0xf7, 0x98, 0x4a, 0x9b, 0xca, 0xeb, 0x33, + 0x59, 0x6d, 0xbb, 0xc3, 0xf6, 0xd9, 0x96, 0x6e, 0xa1, 0x9f, 0xb1, 0xdf, 0x92, 0x83, 0x5f, 0xe3, + 0x23, 0xed, 0x37, 0xf5, 0xc3, 0x7d, 0x54, 0x88, 0xbd, 0xc5, 0x14, 0x7b, 0x53, 0xb9, 0x3d, 0x9b, + 0x62, 0x1e, 0x95, 0x4f, 0xf5, 0xfa, 0xbd, 0x34, 0xd6, 0x3d, 0x06, 0x5d, 0xc4, 0xfd, 0xd9, 0x73, + 0x60, 0x1c, 0x46, 0x14, 0xe3, 0xdc, 0xfb, 0xca, 0x1d, 0xa6, 0xf5, 0x1b, 0x8a, 0xf2, 0x64, 0xad, + 0x03, 0x60, 0xb4, 0x1d, 0x60, 0x04, 0xf4, 0x3b, 0x69, 0xd4, 0x7f, 0x06, 0xfa, 0xde, 0x9d, 0x31, + 0x47, 0x9e, 0x45, 0x59, 0xe1, 0x7b, 0x54, 0x8a, 0x56, 0xb6, 0xf4, 0xf9, 0x08, 0x2b, 0x7d, 0x81, + 0xfe, 0x10, 0xee, 0x76, 0x03, 0xe4, 0x88, 0xee, 0xcd, 0x9a, 0x10, 0x13, 0x30, 0xb5, 0x78, 0x3f, + 0x3e, 0xa3, 0x48, 0xa8, 0x5b, 0xec, 0x04, 0x2f, 0xa3, 0x19, 0xcc, 0x4d, 0x53, 0x0a, 0x4d, 0x03, + 0xd1, 0xc8, 0xc0, 0x78, 0x22, 0x76, 0x2d, 0xae, 0x4c, 0xf5, 0x67, 0x95, 0x9e, 0x4b, 0x2e, 0x02, + 0xb3, 0xde, 0x8a, 0x6d, 0xd6, 0xaf, 0x24, 0x40, 0xd3, 0x70, 0x36, 0x52, 0xc3, 0x27, 0x22, 0xe0, + 0x78, 0xd1, 0xb0, 0xc3, 0xd4, 0xde, 0xde, 0x8a, 0xab, 0xf6, 0x28, 0x8e, 0x7f, 0x2b, 0xc1, 0xf2, + 0xc4, 0xdf, 0xd6, 0x44, 0xc6, 0xf1, 0xe3, 0xff, 0x16, 0x27, 0xaa, 0x38, 0x94, 0x99, 0xae, 0xef, + 0x29, 0x77, 0xe2, 0xea, 0xea, 0xf5, 0xed, 0x6d, 0xf1, 0xab, 0xe3, 0xde, 0x39, 0x14, 0x3a, 0x4e, + 0x2f, 0xd8, 0x68, 0x4c, 0xad, 0x87, 0xd2, 0x27, 0x87, 0x82, 0xde, 0x75, 0x2c, 0xdd, 0xee, 0x6e, + 0x38, 0x5e, 0xb7, 0xd4, 0xc5, 0x36, 0x73, 0x75, 0x89, 0x4f, 0xe9, 0xae, 0xe9, 0x3f, 0xe1, 0x8f, + 0xbb, 0xde, 0x19, 0x8d, 0x7e, 0x93, 0x48, 0x1e, 0x96, 0xf7, 0xda, 0x29, 0xc6, 0x79, 0xe7, 0xdf, + 0x01, 0x00, 0x00, 0xff, 0xff, 0x87, 0x2f, 0xf0, 0xd9, 0x15, 0x26, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/devtools/clouddebugger/v2/controller.pb.go b/vendor/google.golang.org/genproto/googleapis/devtools/clouddebugger/v2/controller.pb.go new file mode 100644 index 0000000..3f8e0c3 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/devtools/clouddebugger/v2/controller.pb.go @@ -0,0 +1,578 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/devtools/clouddebugger/v2/controller.proto + +package clouddebugger // import "google.golang.org/genproto/googleapis/devtools/clouddebugger/v2" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "github.com/golang/protobuf/ptypes/empty" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +import ( + context "golang.org/x/net/context" + grpc "google.golang.org/grpc" +) + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Request to register a debuggee. +type RegisterDebuggeeRequest struct { + // Debuggee information to register. + // The fields `project`, `uniquifier`, `description` and `agent_version` + // of the debuggee must be set. + Debuggee *Debuggee `protobuf:"bytes,1,opt,name=debuggee,proto3" json:"debuggee,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *RegisterDebuggeeRequest) Reset() { *m = RegisterDebuggeeRequest{} } +func (m *RegisterDebuggeeRequest) String() string { return proto.CompactTextString(m) } +func (*RegisterDebuggeeRequest) ProtoMessage() {} +func (*RegisterDebuggeeRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_controller_a5860eef34f5ab07, []int{0} +} +func (m *RegisterDebuggeeRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_RegisterDebuggeeRequest.Unmarshal(m, b) +} +func (m *RegisterDebuggeeRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_RegisterDebuggeeRequest.Marshal(b, m, deterministic) +} +func (dst *RegisterDebuggeeRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_RegisterDebuggeeRequest.Merge(dst, src) +} +func (m *RegisterDebuggeeRequest) XXX_Size() int { + return xxx_messageInfo_RegisterDebuggeeRequest.Size(m) +} +func (m *RegisterDebuggeeRequest) XXX_DiscardUnknown() { + xxx_messageInfo_RegisterDebuggeeRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_RegisterDebuggeeRequest proto.InternalMessageInfo + +func (m *RegisterDebuggeeRequest) GetDebuggee() *Debuggee { + if m != nil { + return m.Debuggee + } + return nil +} + +// Response for registering a debuggee. +type RegisterDebuggeeResponse struct { + // Debuggee resource. + // The field `id` is guaranteed to be set (in addition to the echoed fields). + // If the field `is_disabled` is set to `true`, the agent should disable + // itself by removing all breakpoints and detaching from the application. + // It should however continue to poll `RegisterDebuggee` until reenabled. + Debuggee *Debuggee `protobuf:"bytes,1,opt,name=debuggee,proto3" json:"debuggee,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *RegisterDebuggeeResponse) Reset() { *m = RegisterDebuggeeResponse{} } +func (m *RegisterDebuggeeResponse) String() string { return proto.CompactTextString(m) } +func (*RegisterDebuggeeResponse) ProtoMessage() {} +func (*RegisterDebuggeeResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_controller_a5860eef34f5ab07, []int{1} +} +func (m *RegisterDebuggeeResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_RegisterDebuggeeResponse.Unmarshal(m, b) +} +func (m *RegisterDebuggeeResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_RegisterDebuggeeResponse.Marshal(b, m, deterministic) +} +func (dst *RegisterDebuggeeResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_RegisterDebuggeeResponse.Merge(dst, src) +} +func (m *RegisterDebuggeeResponse) XXX_Size() int { + return xxx_messageInfo_RegisterDebuggeeResponse.Size(m) +} +func (m *RegisterDebuggeeResponse) XXX_DiscardUnknown() { + xxx_messageInfo_RegisterDebuggeeResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_RegisterDebuggeeResponse proto.InternalMessageInfo + +func (m *RegisterDebuggeeResponse) GetDebuggee() *Debuggee { + if m != nil { + return m.Debuggee + } + return nil +} + +// Request to list active breakpoints. +type ListActiveBreakpointsRequest struct { + // Identifies the debuggee. + DebuggeeId string `protobuf:"bytes,1,opt,name=debuggee_id,json=debuggeeId,proto3" json:"debuggee_id,omitempty"` + // A token that, if specified, blocks the method call until the list + // of active breakpoints has changed, or a server-selected timeout has + // expired. The value should be set from the `next_wait_token` field in + // the last response. The initial value should be set to `"init"`. + WaitToken string `protobuf:"bytes,2,opt,name=wait_token,json=waitToken,proto3" json:"wait_token,omitempty"` + // If set to `true` (recommended), returns `google.rpc.Code.OK` status and + // sets the `wait_expired` response field to `true` when the server-selected + // timeout has expired. + // + // If set to `false` (deprecated), returns `google.rpc.Code.ABORTED` status + // when the server-selected timeout has expired. + SuccessOnTimeout bool `protobuf:"varint,3,opt,name=success_on_timeout,json=successOnTimeout,proto3" json:"success_on_timeout,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ListActiveBreakpointsRequest) Reset() { *m = ListActiveBreakpointsRequest{} } +func (m *ListActiveBreakpointsRequest) String() string { return proto.CompactTextString(m) } +func (*ListActiveBreakpointsRequest) ProtoMessage() {} +func (*ListActiveBreakpointsRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_controller_a5860eef34f5ab07, []int{2} +} +func (m *ListActiveBreakpointsRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ListActiveBreakpointsRequest.Unmarshal(m, b) +} +func (m *ListActiveBreakpointsRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ListActiveBreakpointsRequest.Marshal(b, m, deterministic) +} +func (dst *ListActiveBreakpointsRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_ListActiveBreakpointsRequest.Merge(dst, src) +} +func (m *ListActiveBreakpointsRequest) XXX_Size() int { + return xxx_messageInfo_ListActiveBreakpointsRequest.Size(m) +} +func (m *ListActiveBreakpointsRequest) XXX_DiscardUnknown() { + xxx_messageInfo_ListActiveBreakpointsRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_ListActiveBreakpointsRequest proto.InternalMessageInfo + +func (m *ListActiveBreakpointsRequest) GetDebuggeeId() string { + if m != nil { + return m.DebuggeeId + } + return "" +} + +func (m *ListActiveBreakpointsRequest) GetWaitToken() string { + if m != nil { + return m.WaitToken + } + return "" +} + +func (m *ListActiveBreakpointsRequest) GetSuccessOnTimeout() bool { + if m != nil { + return m.SuccessOnTimeout + } + return false +} + +// Response for listing active breakpoints. +type ListActiveBreakpointsResponse struct { + // List of all active breakpoints. + // The fields `id` and `location` are guaranteed to be set on each breakpoint. + Breakpoints []*Breakpoint `protobuf:"bytes,1,rep,name=breakpoints,proto3" json:"breakpoints,omitempty"` + // A token that can be used in the next method call to block until + // the list of breakpoints changes. + NextWaitToken string `protobuf:"bytes,2,opt,name=next_wait_token,json=nextWaitToken,proto3" json:"next_wait_token,omitempty"` + // If set to `true`, indicates that there is no change to the + // list of active breakpoints and the server-selected timeout has expired. + // The `breakpoints` field would be empty and should be ignored. + WaitExpired bool `protobuf:"varint,3,opt,name=wait_expired,json=waitExpired,proto3" json:"wait_expired,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ListActiveBreakpointsResponse) Reset() { *m = ListActiveBreakpointsResponse{} } +func (m *ListActiveBreakpointsResponse) String() string { return proto.CompactTextString(m) } +func (*ListActiveBreakpointsResponse) ProtoMessage() {} +func (*ListActiveBreakpointsResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_controller_a5860eef34f5ab07, []int{3} +} +func (m *ListActiveBreakpointsResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ListActiveBreakpointsResponse.Unmarshal(m, b) +} +func (m *ListActiveBreakpointsResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ListActiveBreakpointsResponse.Marshal(b, m, deterministic) +} +func (dst *ListActiveBreakpointsResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_ListActiveBreakpointsResponse.Merge(dst, src) +} +func (m *ListActiveBreakpointsResponse) XXX_Size() int { + return xxx_messageInfo_ListActiveBreakpointsResponse.Size(m) +} +func (m *ListActiveBreakpointsResponse) XXX_DiscardUnknown() { + xxx_messageInfo_ListActiveBreakpointsResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_ListActiveBreakpointsResponse proto.InternalMessageInfo + +func (m *ListActiveBreakpointsResponse) GetBreakpoints() []*Breakpoint { + if m != nil { + return m.Breakpoints + } + return nil +} + +func (m *ListActiveBreakpointsResponse) GetNextWaitToken() string { + if m != nil { + return m.NextWaitToken + } + return "" +} + +func (m *ListActiveBreakpointsResponse) GetWaitExpired() bool { + if m != nil { + return m.WaitExpired + } + return false +} + +// Request to update an active breakpoint. +type UpdateActiveBreakpointRequest struct { + // Identifies the debuggee being debugged. + DebuggeeId string `protobuf:"bytes,1,opt,name=debuggee_id,json=debuggeeId,proto3" json:"debuggee_id,omitempty"` + // Updated breakpoint information. + // The field `id` must be set. + // The agent must echo all Breakpoint specification fields in the update. + Breakpoint *Breakpoint `protobuf:"bytes,2,opt,name=breakpoint,proto3" json:"breakpoint,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *UpdateActiveBreakpointRequest) Reset() { *m = UpdateActiveBreakpointRequest{} } +func (m *UpdateActiveBreakpointRequest) String() string { return proto.CompactTextString(m) } +func (*UpdateActiveBreakpointRequest) ProtoMessage() {} +func (*UpdateActiveBreakpointRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_controller_a5860eef34f5ab07, []int{4} +} +func (m *UpdateActiveBreakpointRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_UpdateActiveBreakpointRequest.Unmarshal(m, b) +} +func (m *UpdateActiveBreakpointRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_UpdateActiveBreakpointRequest.Marshal(b, m, deterministic) +} +func (dst *UpdateActiveBreakpointRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_UpdateActiveBreakpointRequest.Merge(dst, src) +} +func (m *UpdateActiveBreakpointRequest) XXX_Size() int { + return xxx_messageInfo_UpdateActiveBreakpointRequest.Size(m) +} +func (m *UpdateActiveBreakpointRequest) XXX_DiscardUnknown() { + xxx_messageInfo_UpdateActiveBreakpointRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_UpdateActiveBreakpointRequest proto.InternalMessageInfo + +func (m *UpdateActiveBreakpointRequest) GetDebuggeeId() string { + if m != nil { + return m.DebuggeeId + } + return "" +} + +func (m *UpdateActiveBreakpointRequest) GetBreakpoint() *Breakpoint { + if m != nil { + return m.Breakpoint + } + return nil +} + +// Response for updating an active breakpoint. +// The message is defined to allow future extensions. +type UpdateActiveBreakpointResponse struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *UpdateActiveBreakpointResponse) Reset() { *m = UpdateActiveBreakpointResponse{} } +func (m *UpdateActiveBreakpointResponse) String() string { return proto.CompactTextString(m) } +func (*UpdateActiveBreakpointResponse) ProtoMessage() {} +func (*UpdateActiveBreakpointResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_controller_a5860eef34f5ab07, []int{5} +} +func (m *UpdateActiveBreakpointResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_UpdateActiveBreakpointResponse.Unmarshal(m, b) +} +func (m *UpdateActiveBreakpointResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_UpdateActiveBreakpointResponse.Marshal(b, m, deterministic) +} +func (dst *UpdateActiveBreakpointResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_UpdateActiveBreakpointResponse.Merge(dst, src) +} +func (m *UpdateActiveBreakpointResponse) XXX_Size() int { + return xxx_messageInfo_UpdateActiveBreakpointResponse.Size(m) +} +func (m *UpdateActiveBreakpointResponse) XXX_DiscardUnknown() { + xxx_messageInfo_UpdateActiveBreakpointResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_UpdateActiveBreakpointResponse proto.InternalMessageInfo + +func init() { + proto.RegisterType((*RegisterDebuggeeRequest)(nil), "google.devtools.clouddebugger.v2.RegisterDebuggeeRequest") + proto.RegisterType((*RegisterDebuggeeResponse)(nil), "google.devtools.clouddebugger.v2.RegisterDebuggeeResponse") + proto.RegisterType((*ListActiveBreakpointsRequest)(nil), "google.devtools.clouddebugger.v2.ListActiveBreakpointsRequest") + proto.RegisterType((*ListActiveBreakpointsResponse)(nil), "google.devtools.clouddebugger.v2.ListActiveBreakpointsResponse") + proto.RegisterType((*UpdateActiveBreakpointRequest)(nil), "google.devtools.clouddebugger.v2.UpdateActiveBreakpointRequest") + proto.RegisterType((*UpdateActiveBreakpointResponse)(nil), "google.devtools.clouddebugger.v2.UpdateActiveBreakpointResponse") +} + +// Reference imports to suppress errors if they are not otherwise used. +var _ context.Context +var _ grpc.ClientConn + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the grpc package it is being compiled against. +const _ = grpc.SupportPackageIsVersion4 + +// Controller2Client is the client API for Controller2 service. +// +// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://godoc.org/google.golang.org/grpc#ClientConn.NewStream. +type Controller2Client interface { + // Registers the debuggee with the controller service. + // + // All agents attached to the same application must call this method with + // exactly the same request content to get back the same stable `debuggee_id`. + // Agents should call this method again whenever `google.rpc.Code.NOT_FOUND` + // is returned from any controller method. + // + // This protocol allows the controller service to disable debuggees, recover + // from data loss, or change the `debuggee_id` format. Agents must handle + // `debuggee_id` value changing upon re-registration. + RegisterDebuggee(ctx context.Context, in *RegisterDebuggeeRequest, opts ...grpc.CallOption) (*RegisterDebuggeeResponse, error) + // Returns the list of all active breakpoints for the debuggee. + // + // The breakpoint specification (`location`, `condition`, and `expressions` + // fields) is semantically immutable, although the field values may + // change. For example, an agent may update the location line number + // to reflect the actual line where the breakpoint was set, but this + // doesn't change the breakpoint semantics. + // + // This means that an agent does not need to check if a breakpoint has changed + // when it encounters the same breakpoint on a successive call. + // Moreover, an agent should remember the breakpoints that are completed + // until the controller removes them from the active list to avoid + // setting those breakpoints again. + ListActiveBreakpoints(ctx context.Context, in *ListActiveBreakpointsRequest, opts ...grpc.CallOption) (*ListActiveBreakpointsResponse, error) + // Updates the breakpoint state or mutable fields. + // The entire Breakpoint message must be sent back to the controller service. + // + // Updates to active breakpoint fields are only allowed if the new value + // does not change the breakpoint specification. Updates to the `location`, + // `condition` and `expressions` fields should not alter the breakpoint + // semantics. These may only make changes such as canonicalizing a value + // or snapping the location to the correct line of code. + UpdateActiveBreakpoint(ctx context.Context, in *UpdateActiveBreakpointRequest, opts ...grpc.CallOption) (*UpdateActiveBreakpointResponse, error) +} + +type controller2Client struct { + cc *grpc.ClientConn +} + +func NewController2Client(cc *grpc.ClientConn) Controller2Client { + return &controller2Client{cc} +} + +func (c *controller2Client) RegisterDebuggee(ctx context.Context, in *RegisterDebuggeeRequest, opts ...grpc.CallOption) (*RegisterDebuggeeResponse, error) { + out := new(RegisterDebuggeeResponse) + err := c.cc.Invoke(ctx, "/google.devtools.clouddebugger.v2.Controller2/RegisterDebuggee", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *controller2Client) ListActiveBreakpoints(ctx context.Context, in *ListActiveBreakpointsRequest, opts ...grpc.CallOption) (*ListActiveBreakpointsResponse, error) { + out := new(ListActiveBreakpointsResponse) + err := c.cc.Invoke(ctx, "/google.devtools.clouddebugger.v2.Controller2/ListActiveBreakpoints", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *controller2Client) UpdateActiveBreakpoint(ctx context.Context, in *UpdateActiveBreakpointRequest, opts ...grpc.CallOption) (*UpdateActiveBreakpointResponse, error) { + out := new(UpdateActiveBreakpointResponse) + err := c.cc.Invoke(ctx, "/google.devtools.clouddebugger.v2.Controller2/UpdateActiveBreakpoint", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +// Controller2Server is the server API for Controller2 service. +type Controller2Server interface { + // Registers the debuggee with the controller service. + // + // All agents attached to the same application must call this method with + // exactly the same request content to get back the same stable `debuggee_id`. + // Agents should call this method again whenever `google.rpc.Code.NOT_FOUND` + // is returned from any controller method. + // + // This protocol allows the controller service to disable debuggees, recover + // from data loss, or change the `debuggee_id` format. Agents must handle + // `debuggee_id` value changing upon re-registration. + RegisterDebuggee(context.Context, *RegisterDebuggeeRequest) (*RegisterDebuggeeResponse, error) + // Returns the list of all active breakpoints for the debuggee. + // + // The breakpoint specification (`location`, `condition`, and `expressions` + // fields) is semantically immutable, although the field values may + // change. For example, an agent may update the location line number + // to reflect the actual line where the breakpoint was set, but this + // doesn't change the breakpoint semantics. + // + // This means that an agent does not need to check if a breakpoint has changed + // when it encounters the same breakpoint on a successive call. + // Moreover, an agent should remember the breakpoints that are completed + // until the controller removes them from the active list to avoid + // setting those breakpoints again. + ListActiveBreakpoints(context.Context, *ListActiveBreakpointsRequest) (*ListActiveBreakpointsResponse, error) + // Updates the breakpoint state or mutable fields. + // The entire Breakpoint message must be sent back to the controller service. + // + // Updates to active breakpoint fields are only allowed if the new value + // does not change the breakpoint specification. Updates to the `location`, + // `condition` and `expressions` fields should not alter the breakpoint + // semantics. These may only make changes such as canonicalizing a value + // or snapping the location to the correct line of code. + UpdateActiveBreakpoint(context.Context, *UpdateActiveBreakpointRequest) (*UpdateActiveBreakpointResponse, error) +} + +func RegisterController2Server(s *grpc.Server, srv Controller2Server) { + s.RegisterService(&_Controller2_serviceDesc, srv) +} + +func _Controller2_RegisterDebuggee_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(RegisterDebuggeeRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(Controller2Server).RegisterDebuggee(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.devtools.clouddebugger.v2.Controller2/RegisterDebuggee", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(Controller2Server).RegisterDebuggee(ctx, req.(*RegisterDebuggeeRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _Controller2_ListActiveBreakpoints_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(ListActiveBreakpointsRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(Controller2Server).ListActiveBreakpoints(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.devtools.clouddebugger.v2.Controller2/ListActiveBreakpoints", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(Controller2Server).ListActiveBreakpoints(ctx, req.(*ListActiveBreakpointsRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _Controller2_UpdateActiveBreakpoint_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(UpdateActiveBreakpointRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(Controller2Server).UpdateActiveBreakpoint(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.devtools.clouddebugger.v2.Controller2/UpdateActiveBreakpoint", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(Controller2Server).UpdateActiveBreakpoint(ctx, req.(*UpdateActiveBreakpointRequest)) + } + return interceptor(ctx, in, info, handler) +} + +var _Controller2_serviceDesc = grpc.ServiceDesc{ + ServiceName: "google.devtools.clouddebugger.v2.Controller2", + HandlerType: (*Controller2Server)(nil), + Methods: []grpc.MethodDesc{ + { + MethodName: "RegisterDebuggee", + Handler: _Controller2_RegisterDebuggee_Handler, + }, + { + MethodName: "ListActiveBreakpoints", + Handler: _Controller2_ListActiveBreakpoints_Handler, + }, + { + MethodName: "UpdateActiveBreakpoint", + Handler: _Controller2_UpdateActiveBreakpoint_Handler, + }, + }, + Streams: []grpc.StreamDesc{}, + Metadata: "google/devtools/clouddebugger/v2/controller.proto", +} + +func init() { + proto.RegisterFile("google/devtools/clouddebugger/v2/controller.proto", fileDescriptor_controller_a5860eef34f5ab07) +} + +var fileDescriptor_controller_a5860eef34f5ab07 = []byte{ + // 602 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xac, 0x54, 0xdd, 0x6a, 0xd4, 0x40, + 0x14, 0x66, 0x5a, 0x94, 0x76, 0xa2, 0xb4, 0x0c, 0xa8, 0x21, 0xb6, 0xba, 0x0d, 0x52, 0x96, 0x75, + 0xc9, 0x60, 0xf4, 0xc6, 0x15, 0xfc, 0xd9, 0xaa, 0x45, 0x68, 0xb5, 0x2c, 0xb5, 0x82, 0x2c, 0x2c, + 0xd9, 0xe4, 0x18, 0x86, 0x66, 0x67, 0x62, 0x66, 0xb2, 0x56, 0x4a, 0x6f, 0xbc, 0x55, 0xbc, 0xf1, + 0x2d, 0x7c, 0x01, 0xc1, 0x0b, 0x1f, 0xc0, 0x5b, 0x7d, 0x04, 0xaf, 0x7c, 0x0a, 0xc9, 0xdf, 0xfe, + 0xb4, 0xdd, 0xa6, 0x5d, 0xbc, 0xcc, 0x77, 0xe6, 0xfb, 0xce, 0xf7, 0x4d, 0xce, 0x1c, 0x7c, 0xcb, + 0x17, 0xc2, 0x0f, 0x80, 0x7a, 0xd0, 0x57, 0x42, 0x04, 0x92, 0xba, 0x81, 0x88, 0x3d, 0x0f, 0xba, + 0xb1, 0xef, 0x43, 0x44, 0xfb, 0x36, 0x75, 0x05, 0x57, 0x91, 0x08, 0x02, 0x88, 0xac, 0x30, 0x12, + 0x4a, 0x90, 0x4a, 0x46, 0xb1, 0x0a, 0x8a, 0x35, 0x46, 0xb1, 0xfa, 0xb6, 0xb1, 0x94, 0x8b, 0x3a, + 0x21, 0xa3, 0x0e, 0xe7, 0x42, 0x39, 0x8a, 0x09, 0x2e, 0x33, 0xbe, 0x71, 0xb3, 0xb4, 0xa5, 0xe7, + 0x28, 0x27, 0x3f, 0x7c, 0x35, 0x3f, 0x9c, 0x7e, 0x75, 0xe3, 0x37, 0x14, 0x7a, 0xa1, 0x7a, 0x9f, + 0x15, 0x4d, 0x07, 0x5f, 0x69, 0x81, 0xcf, 0xa4, 0x82, 0xe8, 0x71, 0x46, 0x87, 0x16, 0xbc, 0x8d, + 0x41, 0x2a, 0xf2, 0x14, 0xcf, 0xe5, 0x8a, 0xa0, 0xa3, 0x0a, 0xaa, 0x6a, 0x76, 0xcd, 0x2a, 0xf3, + 0x6d, 0x0d, 0x44, 0x06, 0x5c, 0xb3, 0x8b, 0xf5, 0xa3, 0x2d, 0x64, 0x28, 0xb8, 0x84, 0xff, 0xd6, + 0xe3, 0x13, 0xc2, 0x4b, 0x1b, 0x4c, 0xaa, 0x47, 0xae, 0x62, 0x7d, 0x68, 0x46, 0xe0, 0xec, 0x86, + 0x82, 0x71, 0x25, 0x8b, 0x30, 0xd7, 0xb1, 0x56, 0x1c, 0xee, 0x30, 0x2f, 0xed, 0x35, 0xdf, 0xc2, + 0x05, 0xf4, 0xcc, 0x23, 0xcb, 0x18, 0xbf, 0x73, 0x98, 0xea, 0x28, 0xb1, 0x0b, 0x5c, 0x9f, 0x49, + 0xeb, 0xf3, 0x09, 0xb2, 0x9d, 0x00, 0xa4, 0x8e, 0x89, 0x8c, 0x5d, 0x17, 0xa4, 0xec, 0x08, 0xde, + 0x51, 0xac, 0x07, 0x22, 0x56, 0xfa, 0x6c, 0x05, 0x55, 0xe7, 0x5a, 0x8b, 0x79, 0xe5, 0x05, 0xdf, + 0xce, 0x70, 0xf3, 0x3b, 0xc2, 0xcb, 0x13, 0xec, 0xe4, 0xc1, 0x9f, 0x63, 0xad, 0x3b, 0x84, 0x75, + 0x54, 0x99, 0xad, 0x6a, 0x76, 0xbd, 0x3c, 0xfb, 0x50, 0xab, 0x35, 0x2a, 0x40, 0x56, 0xf1, 0x02, + 0x87, 0x3d, 0xd5, 0x39, 0x92, 0xe1, 0x62, 0x02, 0xbf, 0x1a, 0xe4, 0x58, 0xc1, 0x17, 0xd2, 0x23, + 0xb0, 0x17, 0xb2, 0x08, 0xbc, 0x3c, 0x81, 0x96, 0x60, 0x4f, 0x32, 0xc8, 0xfc, 0x8c, 0xf0, 0xf2, + 0xcb, 0xd0, 0x73, 0x14, 0x1c, 0xb6, 0x7f, 0xea, 0xcb, 0xdc, 0xc0, 0x78, 0x68, 0x2e, 0x35, 0x72, + 0xd6, 0x70, 0x23, 0x7c, 0xb3, 0x82, 0xaf, 0x4d, 0xf2, 0x93, 0xdd, 0xa6, 0xfd, 0xf1, 0x1c, 0xd6, + 0xd6, 0x06, 0x8f, 0xcc, 0x26, 0xdf, 0x10, 0x5e, 0x3c, 0x3c, 0x73, 0xe4, 0x6e, 0xb9, 0x81, 0x09, + 0x4f, 0xc1, 0x68, 0x4c, 0x43, 0xcd, 0xbc, 0x99, 0xf5, 0x0f, 0xbf, 0xfe, 0x7c, 0x99, 0x59, 0x35, + 0x57, 0xc6, 0x37, 0x01, 0x2d, 0xae, 0x4b, 0xd2, 0x28, 0xa7, 0x36, 0x50, 0x8d, 0xfc, 0x46, 0xf8, + 0xd2, 0xb1, 0x93, 0x43, 0xee, 0x97, 0x7b, 0x38, 0xe9, 0x05, 0x18, 0x0f, 0xa6, 0xe6, 0xe7, 0x41, + 0x1a, 0x69, 0x90, 0x3b, 0xc4, 0x9e, 0x18, 0x64, 0x7f, 0x64, 0x2a, 0x0e, 0xe8, 0xe8, 0x78, 0xfe, + 0x45, 0xf8, 0xf2, 0xf1, 0xff, 0x90, 0x9c, 0xc2, 0xd7, 0x89, 0xd3, 0x68, 0x3c, 0x9c, 0x5e, 0x20, + 0x4f, 0xb6, 0x99, 0x26, 0x5b, 0x37, 0x9a, 0x67, 0x4f, 0x46, 0xf7, 0x87, 0x1f, 0x16, 0xf3, 0x0e, + 0x1a, 0xa8, 0xd6, 0xfc, 0x81, 0xf0, 0x0d, 0x57, 0xf4, 0x4a, 0x6d, 0x35, 0x17, 0x86, 0x33, 0xbb, + 0x95, 0x6c, 0xe3, 0x2d, 0xf4, 0x7a, 0x33, 0x27, 0xf9, 0x22, 0x70, 0xb8, 0x6f, 0x89, 0xc8, 0xa7, + 0x3e, 0xf0, 0x74, 0x57, 0xd3, 0xac, 0xe4, 0x84, 0x4c, 0x4e, 0x5e, 0xfc, 0xf7, 0xc6, 0x80, 0xaf, + 0x33, 0xfa, 0x7a, 0xa6, 0xb7, 0x96, 0xc0, 0xc5, 0xe6, 0x8c, 0xac, 0x1d, 0xfb, 0x67, 0x51, 0x6a, + 0xa7, 0xa5, 0x76, 0x51, 0x6a, 0xef, 0xd8, 0xdd, 0xf3, 0x69, 0xbf, 0xdb, 0xff, 0x02, 0x00, 0x00, + 0xff, 0xff, 0x54, 0xe1, 0x5c, 0x2a, 0xda, 0x06, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/devtools/clouddebugger/v2/data.pb.go b/vendor/google.golang.org/genproto/googleapis/devtools/clouddebugger/v2/data.pb.go new file mode 100644 index 0000000..fb7f4d0 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/devtools/clouddebugger/v2/data.pb.go @@ -0,0 +1,1073 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/devtools/clouddebugger/v2/data.proto + +package clouddebugger // import "google.golang.org/genproto/googleapis/devtools/clouddebugger/v2" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "github.com/golang/protobuf/ptypes/duration" +import timestamp "github.com/golang/protobuf/ptypes/timestamp" +import wrappers "github.com/golang/protobuf/ptypes/wrappers" +import _ "google.golang.org/genproto/googleapis/api/annotations" +import v1 "google.golang.org/genproto/googleapis/devtools/source/v1" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Enumerates references to which the message applies. +type StatusMessage_Reference int32 + +const ( + // Status doesn't refer to any particular input. + StatusMessage_UNSPECIFIED StatusMessage_Reference = 0 + // Status applies to the breakpoint and is related to its location. + StatusMessage_BREAKPOINT_SOURCE_LOCATION StatusMessage_Reference = 3 + // Status applies to the breakpoint and is related to its condition. + StatusMessage_BREAKPOINT_CONDITION StatusMessage_Reference = 4 + // Status applies to the breakpoint and is related to its expressions. + StatusMessage_BREAKPOINT_EXPRESSION StatusMessage_Reference = 7 + // Status applies to the breakpoint and is related to its age. + StatusMessage_BREAKPOINT_AGE StatusMessage_Reference = 8 + // Status applies to the entire variable. + StatusMessage_VARIABLE_NAME StatusMessage_Reference = 5 + // Status applies to variable value (variable name is valid). + StatusMessage_VARIABLE_VALUE StatusMessage_Reference = 6 +) + +var StatusMessage_Reference_name = map[int32]string{ + 0: "UNSPECIFIED", + 3: "BREAKPOINT_SOURCE_LOCATION", + 4: "BREAKPOINT_CONDITION", + 7: "BREAKPOINT_EXPRESSION", + 8: "BREAKPOINT_AGE", + 5: "VARIABLE_NAME", + 6: "VARIABLE_VALUE", +} +var StatusMessage_Reference_value = map[string]int32{ + "UNSPECIFIED": 0, + "BREAKPOINT_SOURCE_LOCATION": 3, + "BREAKPOINT_CONDITION": 4, + "BREAKPOINT_EXPRESSION": 7, + "BREAKPOINT_AGE": 8, + "VARIABLE_NAME": 5, + "VARIABLE_VALUE": 6, +} + +func (x StatusMessage_Reference) String() string { + return proto.EnumName(StatusMessage_Reference_name, int32(x)) +} +func (StatusMessage_Reference) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_data_3a367f88e807ff75, []int{1, 0} +} + +// Actions that can be taken when a breakpoint hits. +// Agents should reject breakpoints with unsupported or unknown action values. +type Breakpoint_Action int32 + +const ( + // Capture stack frame and variables and update the breakpoint. + // The data is only captured once. After that the breakpoint is set + // in a final state. + Breakpoint_CAPTURE Breakpoint_Action = 0 + // Log each breakpoint hit. The breakpoint remains active until + // deleted or expired. + Breakpoint_LOG Breakpoint_Action = 1 +) + +var Breakpoint_Action_name = map[int32]string{ + 0: "CAPTURE", + 1: "LOG", +} +var Breakpoint_Action_value = map[string]int32{ + "CAPTURE": 0, + "LOG": 1, +} + +func (x Breakpoint_Action) String() string { + return proto.EnumName(Breakpoint_Action_name, int32(x)) +} +func (Breakpoint_Action) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_data_3a367f88e807ff75, []int{5, 0} +} + +// Log severity levels. +type Breakpoint_LogLevel int32 + +const ( + // Information log message. + Breakpoint_INFO Breakpoint_LogLevel = 0 + // Warning log message. + Breakpoint_WARNING Breakpoint_LogLevel = 1 + // Error log message. + Breakpoint_ERROR Breakpoint_LogLevel = 2 +) + +var Breakpoint_LogLevel_name = map[int32]string{ + 0: "INFO", + 1: "WARNING", + 2: "ERROR", +} +var Breakpoint_LogLevel_value = map[string]int32{ + "INFO": 0, + "WARNING": 1, + "ERROR": 2, +} + +func (x Breakpoint_LogLevel) String() string { + return proto.EnumName(Breakpoint_LogLevel_name, int32(x)) +} +func (Breakpoint_LogLevel) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_data_3a367f88e807ff75, []int{5, 1} +} + +// Represents a message with parameters. +type FormatMessage struct { + // Format template for the message. The `format` uses placeholders `$0`, + // `$1`, etc. to reference parameters. `$$` can be used to denote the `$` + // character. + // + // Examples: + // + // * `Failed to load '$0' which helps debug $1 the first time it + // is loaded. Again, $0 is very important.` + // * `Please pay $$10 to use $0 instead of $1.` + Format string `protobuf:"bytes,1,opt,name=format,proto3" json:"format,omitempty"` + // Optional parameters to be embedded into the message. + Parameters []string `protobuf:"bytes,2,rep,name=parameters,proto3" json:"parameters,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *FormatMessage) Reset() { *m = FormatMessage{} } +func (m *FormatMessage) String() string { return proto.CompactTextString(m) } +func (*FormatMessage) ProtoMessage() {} +func (*FormatMessage) Descriptor() ([]byte, []int) { + return fileDescriptor_data_3a367f88e807ff75, []int{0} +} +func (m *FormatMessage) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_FormatMessage.Unmarshal(m, b) +} +func (m *FormatMessage) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_FormatMessage.Marshal(b, m, deterministic) +} +func (dst *FormatMessage) XXX_Merge(src proto.Message) { + xxx_messageInfo_FormatMessage.Merge(dst, src) +} +func (m *FormatMessage) XXX_Size() int { + return xxx_messageInfo_FormatMessage.Size(m) +} +func (m *FormatMessage) XXX_DiscardUnknown() { + xxx_messageInfo_FormatMessage.DiscardUnknown(m) +} + +var xxx_messageInfo_FormatMessage proto.InternalMessageInfo + +func (m *FormatMessage) GetFormat() string { + if m != nil { + return m.Format + } + return "" +} + +func (m *FormatMessage) GetParameters() []string { + if m != nil { + return m.Parameters + } + return nil +} + +// Represents a contextual status message. +// The message can indicate an error or informational status, and refer to +// specific parts of the containing object. +// For example, the `Breakpoint.status` field can indicate an error referring +// to the `BREAKPOINT_SOURCE_LOCATION` with the message `Location not found`. +type StatusMessage struct { + // Distinguishes errors from informational messages. + IsError bool `protobuf:"varint,1,opt,name=is_error,json=isError,proto3" json:"is_error,omitempty"` + // Reference to which the message applies. + RefersTo StatusMessage_Reference `protobuf:"varint,2,opt,name=refers_to,json=refersTo,proto3,enum=google.devtools.clouddebugger.v2.StatusMessage_Reference" json:"refers_to,omitempty"` + // Status message text. + Description *FormatMessage `protobuf:"bytes,3,opt,name=description,proto3" json:"description,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *StatusMessage) Reset() { *m = StatusMessage{} } +func (m *StatusMessage) String() string { return proto.CompactTextString(m) } +func (*StatusMessage) ProtoMessage() {} +func (*StatusMessage) Descriptor() ([]byte, []int) { + return fileDescriptor_data_3a367f88e807ff75, []int{1} +} +func (m *StatusMessage) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_StatusMessage.Unmarshal(m, b) +} +func (m *StatusMessage) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_StatusMessage.Marshal(b, m, deterministic) +} +func (dst *StatusMessage) XXX_Merge(src proto.Message) { + xxx_messageInfo_StatusMessage.Merge(dst, src) +} +func (m *StatusMessage) XXX_Size() int { + return xxx_messageInfo_StatusMessage.Size(m) +} +func (m *StatusMessage) XXX_DiscardUnknown() { + xxx_messageInfo_StatusMessage.DiscardUnknown(m) +} + +var xxx_messageInfo_StatusMessage proto.InternalMessageInfo + +func (m *StatusMessage) GetIsError() bool { + if m != nil { + return m.IsError + } + return false +} + +func (m *StatusMessage) GetRefersTo() StatusMessage_Reference { + if m != nil { + return m.RefersTo + } + return StatusMessage_UNSPECIFIED +} + +func (m *StatusMessage) GetDescription() *FormatMessage { + if m != nil { + return m.Description + } + return nil +} + +// Represents a location in the source code. +type SourceLocation struct { + // Path to the source file within the source context of the target binary. + Path string `protobuf:"bytes,1,opt,name=path,proto3" json:"path,omitempty"` + // Line inside the file. The first line in the file has the value `1`. + Line int32 `protobuf:"varint,2,opt,name=line,proto3" json:"line,omitempty"` + // Column within a line. The first column in a line as the value `1`. + // Agents that do not support setting breakpoints on specific columns ignore + // this field. + Column int32 `protobuf:"varint,3,opt,name=column,proto3" json:"column,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *SourceLocation) Reset() { *m = SourceLocation{} } +func (m *SourceLocation) String() string { return proto.CompactTextString(m) } +func (*SourceLocation) ProtoMessage() {} +func (*SourceLocation) Descriptor() ([]byte, []int) { + return fileDescriptor_data_3a367f88e807ff75, []int{2} +} +func (m *SourceLocation) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_SourceLocation.Unmarshal(m, b) +} +func (m *SourceLocation) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_SourceLocation.Marshal(b, m, deterministic) +} +func (dst *SourceLocation) XXX_Merge(src proto.Message) { + xxx_messageInfo_SourceLocation.Merge(dst, src) +} +func (m *SourceLocation) XXX_Size() int { + return xxx_messageInfo_SourceLocation.Size(m) +} +func (m *SourceLocation) XXX_DiscardUnknown() { + xxx_messageInfo_SourceLocation.DiscardUnknown(m) +} + +var xxx_messageInfo_SourceLocation proto.InternalMessageInfo + +func (m *SourceLocation) GetPath() string { + if m != nil { + return m.Path + } + return "" +} + +func (m *SourceLocation) GetLine() int32 { + if m != nil { + return m.Line + } + return 0 +} + +func (m *SourceLocation) GetColumn() int32 { + if m != nil { + return m.Column + } + return 0 +} + +// Represents a variable or an argument possibly of a compound object type. +// Note how the following variables are represented: +// +// 1) A simple variable: +// +// int x = 5 +// +// { name: "x", value: "5", type: "int" } // Captured variable +// +// 2) A compound object: +// +// struct T { +// int m1; +// int m2; +// }; +// T x = { 3, 7 }; +// +// { // Captured variable +// name: "x", +// type: "T", +// members { name: "m1", value: "3", type: "int" }, +// members { name: "m2", value: "7", type: "int" } +// } +// +// 3) A pointer where the pointee was captured: +// +// T x = { 3, 7 }; +// T* p = &x; +// +// { // Captured variable +// name: "p", +// type: "T*", +// value: "0x00500500", +// members { name: "m1", value: "3", type: "int" }, +// members { name: "m2", value: "7", type: "int" } +// } +// +// 4) A pointer where the pointee was not captured: +// +// T* p = new T; +// +// { // Captured variable +// name: "p", +// type: "T*", +// value: "0x00400400" +// status { is_error: true, description { format: "unavailable" } } +// } +// +// The status should describe the reason for the missing value, +// such as ``, ``, ``. +// +// Note that a null pointer should not have members. +// +// 5) An unnamed value: +// +// int* p = new int(7); +// +// { // Captured variable +// name: "p", +// value: "0x00500500", +// type: "int*", +// members { value: "7", type: "int" } } +// +// 6) An unnamed pointer where the pointee was not captured: +// +// int* p = new int(7); +// int** pp = &p; +// +// { // Captured variable +// name: "pp", +// value: "0x00500500", +// type: "int**", +// members { +// value: "0x00400400", +// type: "int*" +// status { +// is_error: true, +// description: { format: "unavailable" } } +// } +// } +// } +// +// To optimize computation, memory and network traffic, variables that +// repeat in the output multiple times can be stored once in a shared +// variable table and be referenced using the `var_table_index` field. The +// variables stored in the shared table are nameless and are essentially +// a partition of the complete variable. To reconstruct the complete +// variable, merge the referencing variable with the referenced variable. +// +// When using the shared variable table, the following variables: +// +// T x = { 3, 7 }; +// T* p = &x; +// T& r = x; +// +// { name: "x", var_table_index: 3, type: "T" } // Captured variables +// { name: "p", value "0x00500500", type="T*", var_table_index: 3 } +// { name: "r", type="T&", var_table_index: 3 } +// +// { // Shared variable table entry #3: +// members { name: "m1", value: "3", type: "int" }, +// members { name: "m2", value: "7", type: "int" } +// } +// +// Note that the pointer address is stored with the referencing variable +// and not with the referenced variable. This allows the referenced variable +// to be shared between pointers and references. +// +// The type field is optional. The debugger agent may or may not support it. +type Variable struct { + // Name of the variable, if any. + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // Simple value of the variable. + Value string `protobuf:"bytes,2,opt,name=value,proto3" json:"value,omitempty"` + // Variable type (e.g. `MyClass`). If the variable is split with + // `var_table_index`, `type` goes next to `value`. The interpretation of + // a type is agent specific. It is recommended to include the dynamic type + // rather than a static type of an object. + Type string `protobuf:"bytes,6,opt,name=type,proto3" json:"type,omitempty"` + // Members contained or pointed to by the variable. + Members []*Variable `protobuf:"bytes,3,rep,name=members,proto3" json:"members,omitempty"` + // Reference to a variable in the shared variable table. More than + // one variable can reference the same variable in the table. The + // `var_table_index` field is an index into `variable_table` in Breakpoint. + VarTableIndex *wrappers.Int32Value `protobuf:"bytes,4,opt,name=var_table_index,json=varTableIndex,proto3" json:"var_table_index,omitempty"` + // Status associated with the variable. This field will usually stay + // unset. A status of a single variable only applies to that variable or + // expression. The rest of breakpoint data still remains valid. Variables + // might be reported in error state even when breakpoint is not in final + // state. + // + // The message may refer to variable name with `refers_to` set to + // `VARIABLE_NAME`. Alternatively `refers_to` will be set to `VARIABLE_VALUE`. + // In either case variable value and members will be unset. + // + // Example of error message applied to name: `Invalid expression syntax`. + // + // Example of information message applied to value: `Not captured`. + // + // Examples of error message applied to value: + // + // * `Malformed string`, + // * `Field f not found in class C` + // * `Null pointer dereference` + Status *StatusMessage `protobuf:"bytes,5,opt,name=status,proto3" json:"status,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Variable) Reset() { *m = Variable{} } +func (m *Variable) String() string { return proto.CompactTextString(m) } +func (*Variable) ProtoMessage() {} +func (*Variable) Descriptor() ([]byte, []int) { + return fileDescriptor_data_3a367f88e807ff75, []int{3} +} +func (m *Variable) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Variable.Unmarshal(m, b) +} +func (m *Variable) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Variable.Marshal(b, m, deterministic) +} +func (dst *Variable) XXX_Merge(src proto.Message) { + xxx_messageInfo_Variable.Merge(dst, src) +} +func (m *Variable) XXX_Size() int { + return xxx_messageInfo_Variable.Size(m) +} +func (m *Variable) XXX_DiscardUnknown() { + xxx_messageInfo_Variable.DiscardUnknown(m) +} + +var xxx_messageInfo_Variable proto.InternalMessageInfo + +func (m *Variable) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *Variable) GetValue() string { + if m != nil { + return m.Value + } + return "" +} + +func (m *Variable) GetType() string { + if m != nil { + return m.Type + } + return "" +} + +func (m *Variable) GetMembers() []*Variable { + if m != nil { + return m.Members + } + return nil +} + +func (m *Variable) GetVarTableIndex() *wrappers.Int32Value { + if m != nil { + return m.VarTableIndex + } + return nil +} + +func (m *Variable) GetStatus() *StatusMessage { + if m != nil { + return m.Status + } + return nil +} + +// Represents a stack frame context. +type StackFrame struct { + // Demangled function name at the call site. + Function string `protobuf:"bytes,1,opt,name=function,proto3" json:"function,omitempty"` + // Source location of the call site. + Location *SourceLocation `protobuf:"bytes,2,opt,name=location,proto3" json:"location,omitempty"` + // Set of arguments passed to this function. + // Note that this might not be populated for all stack frames. + Arguments []*Variable `protobuf:"bytes,3,rep,name=arguments,proto3" json:"arguments,omitempty"` + // Set of local variables at the stack frame location. + // Note that this might not be populated for all stack frames. + Locals []*Variable `protobuf:"bytes,4,rep,name=locals,proto3" json:"locals,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *StackFrame) Reset() { *m = StackFrame{} } +func (m *StackFrame) String() string { return proto.CompactTextString(m) } +func (*StackFrame) ProtoMessage() {} +func (*StackFrame) Descriptor() ([]byte, []int) { + return fileDescriptor_data_3a367f88e807ff75, []int{4} +} +func (m *StackFrame) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_StackFrame.Unmarshal(m, b) +} +func (m *StackFrame) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_StackFrame.Marshal(b, m, deterministic) +} +func (dst *StackFrame) XXX_Merge(src proto.Message) { + xxx_messageInfo_StackFrame.Merge(dst, src) +} +func (m *StackFrame) XXX_Size() int { + return xxx_messageInfo_StackFrame.Size(m) +} +func (m *StackFrame) XXX_DiscardUnknown() { + xxx_messageInfo_StackFrame.DiscardUnknown(m) +} + +var xxx_messageInfo_StackFrame proto.InternalMessageInfo + +func (m *StackFrame) GetFunction() string { + if m != nil { + return m.Function + } + return "" +} + +func (m *StackFrame) GetLocation() *SourceLocation { + if m != nil { + return m.Location + } + return nil +} + +func (m *StackFrame) GetArguments() []*Variable { + if m != nil { + return m.Arguments + } + return nil +} + +func (m *StackFrame) GetLocals() []*Variable { + if m != nil { + return m.Locals + } + return nil +} + +// Represents the breakpoint specification, status and results. +type Breakpoint struct { + // Breakpoint identifier, unique in the scope of the debuggee. + Id string `protobuf:"bytes,1,opt,name=id,proto3" json:"id,omitempty"` + // Action that the agent should perform when the code at the + // breakpoint location is hit. + Action Breakpoint_Action `protobuf:"varint,13,opt,name=action,proto3,enum=google.devtools.clouddebugger.v2.Breakpoint_Action" json:"action,omitempty"` + // Breakpoint source location. + Location *SourceLocation `protobuf:"bytes,2,opt,name=location,proto3" json:"location,omitempty"` + // Condition that triggers the breakpoint. + // The condition is a compound boolean expression composed using expressions + // in a programming language at the source location. + Condition string `protobuf:"bytes,3,opt,name=condition,proto3" json:"condition,omitempty"` + // List of read-only expressions to evaluate at the breakpoint location. + // The expressions are composed using expressions in the programming language + // at the source location. If the breakpoint action is `LOG`, the evaluated + // expressions are included in log statements. + Expressions []string `protobuf:"bytes,4,rep,name=expressions,proto3" json:"expressions,omitempty"` + // Only relevant when action is `LOG`. Defines the message to log when + // the breakpoint hits. The message may include parameter placeholders `$0`, + // `$1`, etc. These placeholders are replaced with the evaluated value + // of the appropriate expression. Expressions not referenced in + // `log_message_format` are not logged. + // + // Example: `Message received, id = $0, count = $1` with + // `expressions` = `[ message.id, message.count ]`. + LogMessageFormat string `protobuf:"bytes,14,opt,name=log_message_format,json=logMessageFormat,proto3" json:"log_message_format,omitempty"` + // Indicates the severity of the log. Only relevant when action is `LOG`. + LogLevel Breakpoint_LogLevel `protobuf:"varint,15,opt,name=log_level,json=logLevel,proto3,enum=google.devtools.clouddebugger.v2.Breakpoint_LogLevel" json:"log_level,omitempty"` + // When true, indicates that this is a final result and the + // breakpoint state will not change from here on. + IsFinalState bool `protobuf:"varint,5,opt,name=is_final_state,json=isFinalState,proto3" json:"is_final_state,omitempty"` + // Time this breakpoint was created by the server in seconds resolution. + CreateTime *timestamp.Timestamp `protobuf:"bytes,11,opt,name=create_time,json=createTime,proto3" json:"create_time,omitempty"` + // Time this breakpoint was finalized as seen by the server in seconds + // resolution. + FinalTime *timestamp.Timestamp `protobuf:"bytes,12,opt,name=final_time,json=finalTime,proto3" json:"final_time,omitempty"` + // E-mail address of the user that created this breakpoint + UserEmail string `protobuf:"bytes,16,opt,name=user_email,json=userEmail,proto3" json:"user_email,omitempty"` + // Breakpoint status. + // + // The status includes an error flag and a human readable message. + // This field is usually unset. The message can be either + // informational or an error message. Regardless, clients should always + // display the text message back to the user. + // + // Error status indicates complete failure of the breakpoint. + // + // Example (non-final state): `Still loading symbols...` + // + // Examples (final state): + // + // * `Invalid line number` referring to location + // * `Field f not found in class C` referring to condition + Status *StatusMessage `protobuf:"bytes,10,opt,name=status,proto3" json:"status,omitempty"` + // The stack at breakpoint time, where stack_frames[0] represents the most + // recently entered function. + StackFrames []*StackFrame `protobuf:"bytes,7,rep,name=stack_frames,json=stackFrames,proto3" json:"stack_frames,omitempty"` + // Values of evaluated expressions at breakpoint time. + // The evaluated expressions appear in exactly the same order they + // are listed in the `expressions` field. + // The `name` field holds the original expression text, the `value` or + // `members` field holds the result of the evaluated expression. + // If the expression cannot be evaluated, the `status` inside the `Variable` + // will indicate an error and contain the error text. + EvaluatedExpressions []*Variable `protobuf:"bytes,8,rep,name=evaluated_expressions,json=evaluatedExpressions,proto3" json:"evaluated_expressions,omitempty"` + // The `variable_table` exists to aid with computation, memory and network + // traffic optimization. It enables storing a variable once and reference + // it from multiple variables, including variables stored in the + // `variable_table` itself. + // For example, the same `this` object, which may appear at many levels of + // the stack, can have all of its data stored once in this table. The + // stack frame variables then would hold only a reference to it. + // + // The variable `var_table_index` field is an index into this repeated field. + // The stored objects are nameless and get their name from the referencing + // variable. The effective variable is a merge of the referencing variable + // and the referenced variable. + VariableTable []*Variable `protobuf:"bytes,9,rep,name=variable_table,json=variableTable,proto3" json:"variable_table,omitempty"` + // A set of custom breakpoint properties, populated by the agent, to be + // displayed to the user. + Labels map[string]string `protobuf:"bytes,17,rep,name=labels,proto3" json:"labels,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Breakpoint) Reset() { *m = Breakpoint{} } +func (m *Breakpoint) String() string { return proto.CompactTextString(m) } +func (*Breakpoint) ProtoMessage() {} +func (*Breakpoint) Descriptor() ([]byte, []int) { + return fileDescriptor_data_3a367f88e807ff75, []int{5} +} +func (m *Breakpoint) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Breakpoint.Unmarshal(m, b) +} +func (m *Breakpoint) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Breakpoint.Marshal(b, m, deterministic) +} +func (dst *Breakpoint) XXX_Merge(src proto.Message) { + xxx_messageInfo_Breakpoint.Merge(dst, src) +} +func (m *Breakpoint) XXX_Size() int { + return xxx_messageInfo_Breakpoint.Size(m) +} +func (m *Breakpoint) XXX_DiscardUnknown() { + xxx_messageInfo_Breakpoint.DiscardUnknown(m) +} + +var xxx_messageInfo_Breakpoint proto.InternalMessageInfo + +func (m *Breakpoint) GetId() string { + if m != nil { + return m.Id + } + return "" +} + +func (m *Breakpoint) GetAction() Breakpoint_Action { + if m != nil { + return m.Action + } + return Breakpoint_CAPTURE +} + +func (m *Breakpoint) GetLocation() *SourceLocation { + if m != nil { + return m.Location + } + return nil +} + +func (m *Breakpoint) GetCondition() string { + if m != nil { + return m.Condition + } + return "" +} + +func (m *Breakpoint) GetExpressions() []string { + if m != nil { + return m.Expressions + } + return nil +} + +func (m *Breakpoint) GetLogMessageFormat() string { + if m != nil { + return m.LogMessageFormat + } + return "" +} + +func (m *Breakpoint) GetLogLevel() Breakpoint_LogLevel { + if m != nil { + return m.LogLevel + } + return Breakpoint_INFO +} + +func (m *Breakpoint) GetIsFinalState() bool { + if m != nil { + return m.IsFinalState + } + return false +} + +func (m *Breakpoint) GetCreateTime() *timestamp.Timestamp { + if m != nil { + return m.CreateTime + } + return nil +} + +func (m *Breakpoint) GetFinalTime() *timestamp.Timestamp { + if m != nil { + return m.FinalTime + } + return nil +} + +func (m *Breakpoint) GetUserEmail() string { + if m != nil { + return m.UserEmail + } + return "" +} + +func (m *Breakpoint) GetStatus() *StatusMessage { + if m != nil { + return m.Status + } + return nil +} + +func (m *Breakpoint) GetStackFrames() []*StackFrame { + if m != nil { + return m.StackFrames + } + return nil +} + +func (m *Breakpoint) GetEvaluatedExpressions() []*Variable { + if m != nil { + return m.EvaluatedExpressions + } + return nil +} + +func (m *Breakpoint) GetVariableTable() []*Variable { + if m != nil { + return m.VariableTable + } + return nil +} + +func (m *Breakpoint) GetLabels() map[string]string { + if m != nil { + return m.Labels + } + return nil +} + +// Represents the debugged application. The application may include one or more +// replicated processes executing the same code. Each of these processes is +// attached with a debugger agent, carrying out the debugging commands. +// Agents attached to the same debuggee identify themselves as such by using +// exactly the same Debuggee message value when registering. +type Debuggee struct { + // Unique identifier for the debuggee generated by the controller service. + Id string `protobuf:"bytes,1,opt,name=id,proto3" json:"id,omitempty"` + // Project the debuggee is associated with. + // Use project number or id when registering a Google Cloud Platform project. + Project string `protobuf:"bytes,2,opt,name=project,proto3" json:"project,omitempty"` + // Uniquifier to further distinguish the application. + // It is possible that different applications might have identical values in + // the debuggee message, thus, incorrectly identified as a single application + // by the Controller service. This field adds salt to further distinguish the + // application. Agents should consider seeding this field with value that + // identifies the code, binary, configuration and environment. + Uniquifier string `protobuf:"bytes,3,opt,name=uniquifier,proto3" json:"uniquifier,omitempty"` + // Human readable description of the debuggee. + // Including a human-readable project name, environment name and version + // information is recommended. + Description string `protobuf:"bytes,4,opt,name=description,proto3" json:"description,omitempty"` + // If set to `true`, indicates that Controller service does not detect any + // activity from the debuggee agents and the application is possibly stopped. + IsInactive bool `protobuf:"varint,5,opt,name=is_inactive,json=isInactive,proto3" json:"is_inactive,omitempty"` + // Version ID of the agent. + // Schema: `domain/language-platform/vmajor.minor` (for example + // `google.com/java-gcp/v1.1`). + AgentVersion string `protobuf:"bytes,6,opt,name=agent_version,json=agentVersion,proto3" json:"agent_version,omitempty"` + // If set to `true`, indicates that the agent should disable itself and + // detach from the debuggee. + IsDisabled bool `protobuf:"varint,7,opt,name=is_disabled,json=isDisabled,proto3" json:"is_disabled,omitempty"` + // Human readable message to be displayed to the user about this debuggee. + // Absence of this field indicates no status. The message can be either + // informational or an error status. + Status *StatusMessage `protobuf:"bytes,8,opt,name=status,proto3" json:"status,omitempty"` + // References to the locations and revisions of the source code used in the + // deployed application. + SourceContexts []*v1.SourceContext `protobuf:"bytes,9,rep,name=source_contexts,json=sourceContexts,proto3" json:"source_contexts,omitempty"` + // References to the locations and revisions of the source code used in the + // deployed application. + ExtSourceContexts []*v1.ExtendedSourceContext `protobuf:"bytes,13,rep,name=ext_source_contexts,json=extSourceContexts,proto3" json:"ext_source_contexts,omitempty"` // Deprecated: Do not use. + // A set of custom debuggee properties, populated by the agent, to be + // displayed to the user. + Labels map[string]string `protobuf:"bytes,11,rep,name=labels,proto3" json:"labels,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Debuggee) Reset() { *m = Debuggee{} } +func (m *Debuggee) String() string { return proto.CompactTextString(m) } +func (*Debuggee) ProtoMessage() {} +func (*Debuggee) Descriptor() ([]byte, []int) { + return fileDescriptor_data_3a367f88e807ff75, []int{6} +} +func (m *Debuggee) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Debuggee.Unmarshal(m, b) +} +func (m *Debuggee) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Debuggee.Marshal(b, m, deterministic) +} +func (dst *Debuggee) XXX_Merge(src proto.Message) { + xxx_messageInfo_Debuggee.Merge(dst, src) +} +func (m *Debuggee) XXX_Size() int { + return xxx_messageInfo_Debuggee.Size(m) +} +func (m *Debuggee) XXX_DiscardUnknown() { + xxx_messageInfo_Debuggee.DiscardUnknown(m) +} + +var xxx_messageInfo_Debuggee proto.InternalMessageInfo + +func (m *Debuggee) GetId() string { + if m != nil { + return m.Id + } + return "" +} + +func (m *Debuggee) GetProject() string { + if m != nil { + return m.Project + } + return "" +} + +func (m *Debuggee) GetUniquifier() string { + if m != nil { + return m.Uniquifier + } + return "" +} + +func (m *Debuggee) GetDescription() string { + if m != nil { + return m.Description + } + return "" +} + +func (m *Debuggee) GetIsInactive() bool { + if m != nil { + return m.IsInactive + } + return false +} + +func (m *Debuggee) GetAgentVersion() string { + if m != nil { + return m.AgentVersion + } + return "" +} + +func (m *Debuggee) GetIsDisabled() bool { + if m != nil { + return m.IsDisabled + } + return false +} + +func (m *Debuggee) GetStatus() *StatusMessage { + if m != nil { + return m.Status + } + return nil +} + +func (m *Debuggee) GetSourceContexts() []*v1.SourceContext { + if m != nil { + return m.SourceContexts + } + return nil +} + +// Deprecated: Do not use. +func (m *Debuggee) GetExtSourceContexts() []*v1.ExtendedSourceContext { + if m != nil { + return m.ExtSourceContexts + } + return nil +} + +func (m *Debuggee) GetLabels() map[string]string { + if m != nil { + return m.Labels + } + return nil +} + +func init() { + proto.RegisterType((*FormatMessage)(nil), "google.devtools.clouddebugger.v2.FormatMessage") + proto.RegisterType((*StatusMessage)(nil), "google.devtools.clouddebugger.v2.StatusMessage") + proto.RegisterType((*SourceLocation)(nil), "google.devtools.clouddebugger.v2.SourceLocation") + proto.RegisterType((*Variable)(nil), "google.devtools.clouddebugger.v2.Variable") + proto.RegisterType((*StackFrame)(nil), "google.devtools.clouddebugger.v2.StackFrame") + proto.RegisterType((*Breakpoint)(nil), "google.devtools.clouddebugger.v2.Breakpoint") + proto.RegisterMapType((map[string]string)(nil), "google.devtools.clouddebugger.v2.Breakpoint.LabelsEntry") + proto.RegisterType((*Debuggee)(nil), "google.devtools.clouddebugger.v2.Debuggee") + proto.RegisterMapType((map[string]string)(nil), "google.devtools.clouddebugger.v2.Debuggee.LabelsEntry") + proto.RegisterEnum("google.devtools.clouddebugger.v2.StatusMessage_Reference", StatusMessage_Reference_name, StatusMessage_Reference_value) + proto.RegisterEnum("google.devtools.clouddebugger.v2.Breakpoint_Action", Breakpoint_Action_name, Breakpoint_Action_value) + proto.RegisterEnum("google.devtools.clouddebugger.v2.Breakpoint_LogLevel", Breakpoint_LogLevel_name, Breakpoint_LogLevel_value) +} + +func init() { + proto.RegisterFile("google/devtools/clouddebugger/v2/data.proto", fileDescriptor_data_3a367f88e807ff75) +} + +var fileDescriptor_data_3a367f88e807ff75 = []byte{ + // 1293 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xac, 0x57, 0xdd, 0x72, 0xda, 0x46, + 0x14, 0x0e, 0x3f, 0x06, 0xe9, 0x60, 0x30, 0xd9, 0x26, 0x1d, 0xc5, 0x4d, 0x1d, 0x86, 0xe6, 0xc2, + 0xd3, 0x66, 0x20, 0x21, 0xd3, 0x4e, 0xd2, 0x5c, 0x01, 0x96, 0x5d, 0x26, 0x04, 0xc8, 0x62, 0xd3, + 0x4e, 0x27, 0x33, 0x9a, 0x35, 0x5a, 0x54, 0x35, 0x42, 0xa2, 0xbb, 0x0b, 0x75, 0xee, 0xf3, 0x18, + 0x7d, 0x82, 0x4e, 0xdf, 0xa0, 0x6f, 0xd0, 0x97, 0xe8, 0x7d, 0x9f, 0xa0, 0x97, 0x9d, 0x5d, 0xad, + 0x88, 0x48, 0x9a, 0x12, 0x37, 0xb9, 0xdb, 0xfd, 0xce, 0x39, 0xdf, 0x8a, 0x6f, 0xbf, 0x73, 0x24, + 0xe0, 0x0b, 0x2f, 0x8a, 0xbc, 0x80, 0x36, 0x5d, 0xba, 0x12, 0x51, 0x14, 0xf0, 0xe6, 0x34, 0x88, + 0x96, 0xae, 0x4b, 0xcf, 0x97, 0x9e, 0x47, 0x59, 0x73, 0xd5, 0x6a, 0xba, 0x44, 0x90, 0xc6, 0x82, + 0x45, 0x22, 0x42, 0xb5, 0x38, 0xb9, 0x91, 0x24, 0x37, 0x36, 0x92, 0x1b, 0xab, 0xd6, 0xfe, 0x4d, + 0x4d, 0x47, 0x16, 0x7e, 0x93, 0x84, 0x61, 0x24, 0x88, 0xf0, 0xa3, 0x90, 0xc7, 0xf5, 0xfb, 0x8d, + 0xd7, 0x0f, 0xe3, 0xd1, 0x92, 0x4d, 0x69, 0x73, 0x75, 0x4f, 0xaf, 0x9c, 0x69, 0x14, 0x0a, 0x7a, + 0x21, 0x74, 0xfe, 0x81, 0xce, 0x57, 0xbb, 0xf3, 0xe5, 0xac, 0xe9, 0x2e, 0x99, 0x22, 0xd4, 0xf1, + 0x5b, 0xaf, 0xc7, 0x85, 0x3f, 0xa7, 0x5c, 0x90, 0xf9, 0xe2, 0x6d, 0x04, 0x3f, 0x33, 0xb2, 0x58, + 0x50, 0xa6, 0x1f, 0xa8, 0x7e, 0x02, 0xe5, 0xe3, 0x88, 0xcd, 0x89, 0x78, 0x42, 0x39, 0x27, 0x1e, + 0x45, 0x1f, 0x43, 0x61, 0xa6, 0x00, 0x2b, 0x53, 0xcb, 0x1c, 0x9a, 0x58, 0xef, 0xd0, 0x01, 0xc0, + 0x82, 0x30, 0x32, 0xa7, 0x82, 0x32, 0x6e, 0x65, 0x6b, 0xb9, 0x43, 0x13, 0xa7, 0x90, 0xfa, 0xcb, + 0x1c, 0x94, 0xc7, 0x82, 0x88, 0x25, 0x4f, 0x98, 0x6e, 0x80, 0xe1, 0x73, 0x87, 0x32, 0x16, 0x31, + 0xc5, 0x65, 0xe0, 0xa2, 0xcf, 0x6d, 0xb9, 0x45, 0x13, 0x30, 0x19, 0x9d, 0x51, 0xc6, 0x1d, 0x11, + 0x59, 0xd9, 0x5a, 0xe6, 0xb0, 0xd2, 0x7a, 0xd8, 0xd8, 0x26, 0x6d, 0x63, 0x83, 0xbe, 0x81, 0x25, + 0x01, 0x0d, 0xa7, 0x14, 0x1b, 0x31, 0xd7, 0x69, 0x84, 0x9e, 0x42, 0xc9, 0xa5, 0x7c, 0xca, 0xfc, + 0x85, 0xd4, 0xc8, 0xca, 0xd5, 0x32, 0x87, 0xa5, 0x56, 0x73, 0x3b, 0xf3, 0x86, 0x04, 0x38, 0xcd, + 0x51, 0xff, 0x2d, 0x03, 0xe6, 0xfa, 0x28, 0xb4, 0x07, 0xa5, 0xb3, 0xc1, 0x78, 0x64, 0x77, 0x7b, + 0xc7, 0x3d, 0xfb, 0xa8, 0x7a, 0x05, 0x1d, 0xc0, 0x7e, 0x07, 0xdb, 0xed, 0xc7, 0xa3, 0x61, 0x6f, + 0x70, 0xea, 0x8c, 0x87, 0x67, 0xb8, 0x6b, 0x3b, 0xfd, 0x61, 0xb7, 0x7d, 0xda, 0x1b, 0x0e, 0xaa, + 0x39, 0x64, 0xc1, 0xb5, 0x54, 0xbc, 0x3b, 0x1c, 0x1c, 0xf5, 0x54, 0x24, 0x8f, 0x6e, 0xc0, 0xf5, + 0x54, 0xc4, 0xfe, 0x6e, 0x84, 0xed, 0xf1, 0x58, 0x86, 0x8a, 0x08, 0x41, 0x25, 0x15, 0x6a, 0x9f, + 0xd8, 0x55, 0x03, 0x5d, 0x85, 0xf2, 0xa4, 0x8d, 0x7b, 0xed, 0x4e, 0xdf, 0x76, 0x06, 0xed, 0x27, + 0x76, 0x75, 0x47, 0xa6, 0xad, 0xa1, 0x49, 0xbb, 0x7f, 0x66, 0x57, 0x0b, 0xf5, 0x11, 0x54, 0xc6, + 0xca, 0x48, 0xfd, 0x68, 0xaa, 0x8c, 0x82, 0x10, 0xe4, 0x17, 0x44, 0xfc, 0xa0, 0xaf, 0x53, 0xad, + 0x25, 0x16, 0xf8, 0x21, 0x55, 0xd2, 0xef, 0x60, 0xb5, 0x96, 0x17, 0x3f, 0x8d, 0x82, 0xe5, 0x3c, + 0x96, 0x6d, 0x07, 0xeb, 0x5d, 0xfd, 0x97, 0x2c, 0x18, 0x13, 0xc2, 0x7c, 0x72, 0x1e, 0x50, 0x59, + 0x18, 0x92, 0x39, 0x4d, 0xc8, 0xe4, 0x1a, 0x5d, 0x83, 0x9d, 0x15, 0x09, 0x96, 0x31, 0x9b, 0x89, + 0xe3, 0x8d, 0xcc, 0x14, 0x2f, 0x16, 0xd4, 0x2a, 0xc4, 0x99, 0x72, 0x8d, 0x8e, 0xa0, 0x38, 0xa7, + 0xf3, 0x73, 0x69, 0xa0, 0x5c, 0x2d, 0x77, 0x58, 0x6a, 0x7d, 0xbe, 0xfd, 0x6a, 0x92, 0xa3, 0x71, + 0x52, 0x8a, 0xba, 0xb0, 0xb7, 0x22, 0xcc, 0x11, 0x12, 0x75, 0xfc, 0xd0, 0xa5, 0x17, 0x56, 0x5e, + 0x5d, 0xf4, 0x27, 0x09, 0x5b, 0x62, 0xf6, 0x46, 0x2f, 0x14, 0xf7, 0x5b, 0x13, 0xf9, 0x3c, 0xb8, + 0xbc, 0x22, 0xec, 0x54, 0x96, 0xf4, 0x64, 0x05, 0x3a, 0x81, 0x02, 0x57, 0x76, 0xb2, 0x76, 0xde, + 0xd5, 0x24, 0x1b, 0xf6, 0xc3, 0xba, 0xbc, 0xfe, 0x32, 0x0b, 0x30, 0x16, 0x64, 0xfa, 0xfc, 0x58, + 0xb6, 0x02, 0xda, 0x07, 0x63, 0xb6, 0x0c, 0xa7, 0xca, 0x7e, 0xb1, 0x48, 0xeb, 0x3d, 0xea, 0x83, + 0x11, 0xe8, 0x5b, 0x51, 0x5a, 0x95, 0x5a, 0x77, 0xdf, 0xe1, 0xd4, 0x8d, 0xdb, 0xc4, 0x6b, 0x06, + 0xf4, 0x0d, 0x98, 0x84, 0x79, 0xcb, 0x39, 0x0d, 0xc5, 0xff, 0x91, 0xf3, 0x55, 0x31, 0xea, 0x40, + 0x41, 0xb2, 0x06, 0xdc, 0xca, 0x5f, 0x9a, 0x46, 0x57, 0xd6, 0xff, 0x34, 0x00, 0x3a, 0x8c, 0x92, + 0xe7, 0x8b, 0xc8, 0x0f, 0x05, 0xaa, 0x40, 0xd6, 0x77, 0xb5, 0x00, 0x59, 0xdf, 0x45, 0x8f, 0xa1, + 0x40, 0x62, 0x51, 0xca, 0xaa, 0xdb, 0xef, 0x6f, 0x3f, 0xe2, 0x15, 0x5b, 0xa3, 0xad, 0x4a, 0xb1, + 0xa6, 0xf8, 0xc0, 0x3a, 0xde, 0x04, 0x73, 0x1a, 0x85, 0xae, 0xbf, 0x9e, 0x18, 0x26, 0x7e, 0x05, + 0xa0, 0x1a, 0x94, 0xe8, 0xc5, 0x82, 0x51, 0xce, 0xe5, 0x14, 0x57, 0x02, 0x99, 0x38, 0x0d, 0xa1, + 0x3b, 0x80, 0x82, 0xc8, 0x73, 0xe6, 0xb1, 0x2f, 0x1c, 0x3d, 0x3c, 0x2b, 0x8a, 0xa8, 0x1a, 0x44, + 0x9e, 0x36, 0x4c, 0x3c, 0x62, 0x10, 0x06, 0x53, 0x66, 0x07, 0x74, 0x45, 0x03, 0x6b, 0x4f, 0x69, + 0xf1, 0xe5, 0xa5, 0xb4, 0xe8, 0x47, 0x5e, 0x5f, 0x16, 0xcb, 0x5f, 0x10, 0xaf, 0xd0, 0x6d, 0xa8, + 0xf8, 0xdc, 0x99, 0xf9, 0x21, 0x09, 0x1c, 0xe9, 0x4a, 0xaa, 0x3c, 0x6d, 0xe0, 0x5d, 0x9f, 0x1f, + 0x4b, 0x50, 0x1a, 0x97, 0xa2, 0x47, 0x50, 0x9a, 0x32, 0x4a, 0x04, 0x75, 0xe4, 0x3b, 0xc2, 0x2a, + 0x29, 0xe1, 0xf6, 0xdf, 0x68, 0x99, 0xd3, 0xe4, 0x05, 0x82, 0x21, 0x4e, 0x97, 0x00, 0x7a, 0x08, + 0x10, 0xf3, 0xab, 0xda, 0xdd, 0xad, 0xb5, 0xa6, 0xca, 0x56, 0xa5, 0x9f, 0x02, 0x2c, 0x39, 0x65, + 0x0e, 0x9d, 0x13, 0x3f, 0xb0, 0xaa, 0xb1, 0xc0, 0x12, 0xb1, 0x25, 0x90, 0x6a, 0x44, 0x78, 0xaf, + 0x46, 0x44, 0x43, 0xd8, 0xe5, 0xb2, 0x0f, 0x9d, 0x99, 0x6c, 0x44, 0x6e, 0x15, 0x95, 0x97, 0xef, + 0xbc, 0x13, 0x9d, 0xee, 0x5e, 0x5c, 0xe2, 0xeb, 0x35, 0x47, 0x0e, 0x5c, 0xa7, 0x72, 0x96, 0x11, + 0x41, 0x5d, 0x27, 0x6d, 0x02, 0xe3, 0xd2, 0x5d, 0x72, 0x6d, 0x4d, 0x64, 0xa7, 0x9c, 0xf3, 0x14, + 0x2a, 0x2b, 0x9d, 0x11, 0x4f, 0x33, 0xcb, 0xbc, 0x34, 0x73, 0x39, 0x61, 0x50, 0xb3, 0x0d, 0x8d, + 0xa0, 0x10, 0x90, 0x73, 0x1a, 0x70, 0xeb, 0xaa, 0xa2, 0x7a, 0x70, 0x39, 0x6f, 0xa9, 0x52, 0x3b, + 0x14, 0xec, 0x05, 0xd6, 0x3c, 0xfb, 0x0f, 0xa1, 0x94, 0x82, 0x51, 0x15, 0x72, 0xcf, 0xe9, 0x0b, + 0xdd, 0xd9, 0x72, 0xf9, 0xef, 0xe3, 0xff, 0xeb, 0xec, 0x83, 0x4c, 0xfd, 0x00, 0x0a, 0x71, 0xe7, + 0xa2, 0x12, 0x14, 0xbb, 0xed, 0xd1, 0xe9, 0x19, 0xb6, 0xab, 0x57, 0x50, 0x11, 0x72, 0xfd, 0xe1, + 0x49, 0x35, 0x53, 0xbf, 0x03, 0x46, 0xe2, 0x66, 0x64, 0x40, 0xbe, 0x37, 0x38, 0x1e, 0x56, 0xaf, + 0xc8, 0xdc, 0x6f, 0xdb, 0x78, 0xd0, 0x1b, 0x9c, 0x54, 0x33, 0xc8, 0x84, 0x1d, 0x1b, 0xe3, 0x21, + 0xae, 0x66, 0xeb, 0x7f, 0xe5, 0xc1, 0x38, 0x8a, 0x9f, 0x9b, 0xbe, 0x31, 0x5f, 0x2c, 0x28, 0x2e, + 0x58, 0xf4, 0x23, 0x9d, 0x0a, 0xfd, 0x18, 0xc9, 0x56, 0x7e, 0xb7, 0x2c, 0x43, 0xff, 0xa7, 0xa5, + 0x3f, 0xf3, 0x29, 0xd3, 0xfd, 0x9d, 0x42, 0x64, 0x83, 0xa7, 0x3f, 0x19, 0xf2, 0x2a, 0x21, 0x0d, + 0xa1, 0x5b, 0x50, 0xf2, 0xb9, 0xe3, 0x87, 0x72, 0xfa, 0xac, 0x92, 0xde, 0x02, 0x9f, 0xf7, 0x34, + 0x82, 0x3e, 0x83, 0x32, 0xf1, 0x68, 0x28, 0x9c, 0x15, 0x65, 0xf2, 0x66, 0xf5, 0x3b, 0x6f, 0x57, + 0x81, 0x93, 0x18, 0xd3, 0x2c, 0xae, 0xcf, 0xe5, 0x3d, 0xb9, 0x56, 0x31, 0x61, 0x39, 0xd2, 0x48, + 0xaa, 0x11, 0x8c, 0xf7, 0x6b, 0x84, 0xa7, 0xb0, 0xb7, 0xf9, 0x2d, 0xc9, 0xb5, 0xaf, 0x0e, 0xdf, + 0x60, 0x8c, 0xf3, 0x1a, 0xab, 0x7b, 0x7a, 0x3c, 0x76, 0xe3, 0x02, 0x5c, 0xe1, 0xe9, 0x2d, 0x47, + 0x2e, 0x7c, 0x44, 0x2f, 0x84, 0xf3, 0x3a, 0x6d, 0x59, 0xd1, 0xde, 0xfd, 0x0f, 0x5a, 0xfb, 0x42, + 0xd0, 0xd0, 0xa5, 0xee, 0x06, 0x7d, 0x27, 0x6b, 0x65, 0xf0, 0x55, 0x7a, 0x21, 0xc6, 0x9b, 0xa7, + 0x0c, 0xd6, 0xe6, 0x2d, 0x29, 0xe2, 0xaf, 0xb6, 0x2b, 0x90, 0x18, 0xe2, 0x03, 0x5b, 0xb7, 0xf3, + 0x7b, 0x06, 0x6e, 0x4f, 0xa3, 0xf9, 0xd6, 0x07, 0xe8, 0x98, 0x47, 0x44, 0x90, 0x91, 0x1c, 0x80, + 0xa3, 0xcc, 0xf7, 0x4f, 0x74, 0xba, 0x17, 0x05, 0x24, 0xf4, 0x1a, 0x11, 0xf3, 0x9a, 0x1e, 0x0d, + 0xd5, 0x78, 0x6c, 0xc6, 0x21, 0xb2, 0xf0, 0xf9, 0xdb, 0xff, 0x69, 0x3c, 0xda, 0x00, 0xfe, 0xce, + 0x64, 0x7e, 0xcd, 0x5a, 0x27, 0x31, 0x65, 0x57, 0x46, 0x92, 0x9f, 0xcb, 0x1a, 0x93, 0xd6, 0x1f, + 0x49, 0xe8, 0x99, 0x0a, 0x3d, 0x4b, 0x42, 0xcf, 0x26, 0xad, 0xf3, 0x82, 0x3a, 0xf2, 0xfe, 0x3f, + 0x01, 0x00, 0x00, 0xff, 0xff, 0x0a, 0xe0, 0xe9, 0x2c, 0xdb, 0x0c, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/devtools/clouddebugger/v2/debugger.pb.go b/vendor/google.golang.org/genproto/googleapis/devtools/clouddebugger/v2/debugger.pb.go new file mode 100644 index 0000000..7da1a00 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/devtools/clouddebugger/v2/debugger.pb.go @@ -0,0 +1,876 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/devtools/clouddebugger/v2/debugger.proto + +package clouddebugger // import "google.golang.org/genproto/googleapis/devtools/clouddebugger/v2" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import empty "github.com/golang/protobuf/ptypes/empty" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +import ( + context "golang.org/x/net/context" + grpc "google.golang.org/grpc" +) + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Request to set a breakpoint +type SetBreakpointRequest struct { + // ID of the debuggee where the breakpoint is to be set. + DebuggeeId string `protobuf:"bytes,1,opt,name=debuggee_id,json=debuggeeId,proto3" json:"debuggee_id,omitempty"` + // Breakpoint specification to set. + // The field `location` of the breakpoint must be set. + Breakpoint *Breakpoint `protobuf:"bytes,2,opt,name=breakpoint,proto3" json:"breakpoint,omitempty"` + // The client version making the call. + // Schema: `domain/type/version` (e.g., `google.com/intellij/v1`). + ClientVersion string `protobuf:"bytes,4,opt,name=client_version,json=clientVersion,proto3" json:"client_version,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *SetBreakpointRequest) Reset() { *m = SetBreakpointRequest{} } +func (m *SetBreakpointRequest) String() string { return proto.CompactTextString(m) } +func (*SetBreakpointRequest) ProtoMessage() {} +func (*SetBreakpointRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_debugger_c54a26c25209c588, []int{0} +} +func (m *SetBreakpointRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_SetBreakpointRequest.Unmarshal(m, b) +} +func (m *SetBreakpointRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_SetBreakpointRequest.Marshal(b, m, deterministic) +} +func (dst *SetBreakpointRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_SetBreakpointRequest.Merge(dst, src) +} +func (m *SetBreakpointRequest) XXX_Size() int { + return xxx_messageInfo_SetBreakpointRequest.Size(m) +} +func (m *SetBreakpointRequest) XXX_DiscardUnknown() { + xxx_messageInfo_SetBreakpointRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_SetBreakpointRequest proto.InternalMessageInfo + +func (m *SetBreakpointRequest) GetDebuggeeId() string { + if m != nil { + return m.DebuggeeId + } + return "" +} + +func (m *SetBreakpointRequest) GetBreakpoint() *Breakpoint { + if m != nil { + return m.Breakpoint + } + return nil +} + +func (m *SetBreakpointRequest) GetClientVersion() string { + if m != nil { + return m.ClientVersion + } + return "" +} + +// Response for setting a breakpoint. +type SetBreakpointResponse struct { + // Breakpoint resource. + // The field `id` is guaranteed to be set (in addition to the echoed fileds). + Breakpoint *Breakpoint `protobuf:"bytes,1,opt,name=breakpoint,proto3" json:"breakpoint,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *SetBreakpointResponse) Reset() { *m = SetBreakpointResponse{} } +func (m *SetBreakpointResponse) String() string { return proto.CompactTextString(m) } +func (*SetBreakpointResponse) ProtoMessage() {} +func (*SetBreakpointResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_debugger_c54a26c25209c588, []int{1} +} +func (m *SetBreakpointResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_SetBreakpointResponse.Unmarshal(m, b) +} +func (m *SetBreakpointResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_SetBreakpointResponse.Marshal(b, m, deterministic) +} +func (dst *SetBreakpointResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_SetBreakpointResponse.Merge(dst, src) +} +func (m *SetBreakpointResponse) XXX_Size() int { + return xxx_messageInfo_SetBreakpointResponse.Size(m) +} +func (m *SetBreakpointResponse) XXX_DiscardUnknown() { + xxx_messageInfo_SetBreakpointResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_SetBreakpointResponse proto.InternalMessageInfo + +func (m *SetBreakpointResponse) GetBreakpoint() *Breakpoint { + if m != nil { + return m.Breakpoint + } + return nil +} + +// Request to get breakpoint information. +type GetBreakpointRequest struct { + // ID of the debuggee whose breakpoint to get. + DebuggeeId string `protobuf:"bytes,1,opt,name=debuggee_id,json=debuggeeId,proto3" json:"debuggee_id,omitempty"` + // ID of the breakpoint to get. + BreakpointId string `protobuf:"bytes,2,opt,name=breakpoint_id,json=breakpointId,proto3" json:"breakpoint_id,omitempty"` + // The client version making the call. + // Schema: `domain/type/version` (e.g., `google.com/intellij/v1`). + ClientVersion string `protobuf:"bytes,4,opt,name=client_version,json=clientVersion,proto3" json:"client_version,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GetBreakpointRequest) Reset() { *m = GetBreakpointRequest{} } +func (m *GetBreakpointRequest) String() string { return proto.CompactTextString(m) } +func (*GetBreakpointRequest) ProtoMessage() {} +func (*GetBreakpointRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_debugger_c54a26c25209c588, []int{2} +} +func (m *GetBreakpointRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GetBreakpointRequest.Unmarshal(m, b) +} +func (m *GetBreakpointRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GetBreakpointRequest.Marshal(b, m, deterministic) +} +func (dst *GetBreakpointRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_GetBreakpointRequest.Merge(dst, src) +} +func (m *GetBreakpointRequest) XXX_Size() int { + return xxx_messageInfo_GetBreakpointRequest.Size(m) +} +func (m *GetBreakpointRequest) XXX_DiscardUnknown() { + xxx_messageInfo_GetBreakpointRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_GetBreakpointRequest proto.InternalMessageInfo + +func (m *GetBreakpointRequest) GetDebuggeeId() string { + if m != nil { + return m.DebuggeeId + } + return "" +} + +func (m *GetBreakpointRequest) GetBreakpointId() string { + if m != nil { + return m.BreakpointId + } + return "" +} + +func (m *GetBreakpointRequest) GetClientVersion() string { + if m != nil { + return m.ClientVersion + } + return "" +} + +// Response for getting breakpoint information. +type GetBreakpointResponse struct { + // Complete breakpoint state. + // The fields `id` and `location` are guaranteed to be set. + Breakpoint *Breakpoint `protobuf:"bytes,1,opt,name=breakpoint,proto3" json:"breakpoint,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GetBreakpointResponse) Reset() { *m = GetBreakpointResponse{} } +func (m *GetBreakpointResponse) String() string { return proto.CompactTextString(m) } +func (*GetBreakpointResponse) ProtoMessage() {} +func (*GetBreakpointResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_debugger_c54a26c25209c588, []int{3} +} +func (m *GetBreakpointResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GetBreakpointResponse.Unmarshal(m, b) +} +func (m *GetBreakpointResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GetBreakpointResponse.Marshal(b, m, deterministic) +} +func (dst *GetBreakpointResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_GetBreakpointResponse.Merge(dst, src) +} +func (m *GetBreakpointResponse) XXX_Size() int { + return xxx_messageInfo_GetBreakpointResponse.Size(m) +} +func (m *GetBreakpointResponse) XXX_DiscardUnknown() { + xxx_messageInfo_GetBreakpointResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_GetBreakpointResponse proto.InternalMessageInfo + +func (m *GetBreakpointResponse) GetBreakpoint() *Breakpoint { + if m != nil { + return m.Breakpoint + } + return nil +} + +// Request to delete a breakpoint. +type DeleteBreakpointRequest struct { + // ID of the debuggee whose breakpoint to delete. + DebuggeeId string `protobuf:"bytes,1,opt,name=debuggee_id,json=debuggeeId,proto3" json:"debuggee_id,omitempty"` + // ID of the breakpoint to delete. + BreakpointId string `protobuf:"bytes,2,opt,name=breakpoint_id,json=breakpointId,proto3" json:"breakpoint_id,omitempty"` + // The client version making the call. + // Schema: `domain/type/version` (e.g., `google.com/intellij/v1`). + ClientVersion string `protobuf:"bytes,3,opt,name=client_version,json=clientVersion,proto3" json:"client_version,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *DeleteBreakpointRequest) Reset() { *m = DeleteBreakpointRequest{} } +func (m *DeleteBreakpointRequest) String() string { return proto.CompactTextString(m) } +func (*DeleteBreakpointRequest) ProtoMessage() {} +func (*DeleteBreakpointRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_debugger_c54a26c25209c588, []int{4} +} +func (m *DeleteBreakpointRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_DeleteBreakpointRequest.Unmarshal(m, b) +} +func (m *DeleteBreakpointRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_DeleteBreakpointRequest.Marshal(b, m, deterministic) +} +func (dst *DeleteBreakpointRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_DeleteBreakpointRequest.Merge(dst, src) +} +func (m *DeleteBreakpointRequest) XXX_Size() int { + return xxx_messageInfo_DeleteBreakpointRequest.Size(m) +} +func (m *DeleteBreakpointRequest) XXX_DiscardUnknown() { + xxx_messageInfo_DeleteBreakpointRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_DeleteBreakpointRequest proto.InternalMessageInfo + +func (m *DeleteBreakpointRequest) GetDebuggeeId() string { + if m != nil { + return m.DebuggeeId + } + return "" +} + +func (m *DeleteBreakpointRequest) GetBreakpointId() string { + if m != nil { + return m.BreakpointId + } + return "" +} + +func (m *DeleteBreakpointRequest) GetClientVersion() string { + if m != nil { + return m.ClientVersion + } + return "" +} + +// Request to list breakpoints. +type ListBreakpointsRequest struct { + // ID of the debuggee whose breakpoints to list. + DebuggeeId string `protobuf:"bytes,1,opt,name=debuggee_id,json=debuggeeId,proto3" json:"debuggee_id,omitempty"` + // When set to `true`, the response includes the list of breakpoints set by + // any user. Otherwise, it includes only breakpoints set by the caller. + IncludeAllUsers bool `protobuf:"varint,2,opt,name=include_all_users,json=includeAllUsers,proto3" json:"include_all_users,omitempty"` + // When set to `true`, the response includes active and inactive + // breakpoints. Otherwise, it includes only active breakpoints. + IncludeInactive bool `protobuf:"varint,3,opt,name=include_inactive,json=includeInactive,proto3" json:"include_inactive,omitempty"` + // When set, the response includes only breakpoints with the specified action. + Action *ListBreakpointsRequest_BreakpointActionValue `protobuf:"bytes,4,opt,name=action,proto3" json:"action,omitempty"` + // This field is deprecated. The following fields are always stripped out of + // the result: `stack_frames`, `evaluated_expressions` and `variable_table`. + StripResults bool `protobuf:"varint,5,opt,name=strip_results,json=stripResults,proto3" json:"strip_results,omitempty"` // Deprecated: Do not use. + // A wait token that, if specified, blocks the call until the breakpoints + // list has changed, or a server selected timeout has expired. The value + // should be set from the last response. The error code + // `google.rpc.Code.ABORTED` (RPC) is returned on wait timeout, which + // should be called again with the same `wait_token`. + WaitToken string `protobuf:"bytes,6,opt,name=wait_token,json=waitToken,proto3" json:"wait_token,omitempty"` + // The client version making the call. + // Schema: `domain/type/version` (e.g., `google.com/intellij/v1`). + ClientVersion string `protobuf:"bytes,8,opt,name=client_version,json=clientVersion,proto3" json:"client_version,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ListBreakpointsRequest) Reset() { *m = ListBreakpointsRequest{} } +func (m *ListBreakpointsRequest) String() string { return proto.CompactTextString(m) } +func (*ListBreakpointsRequest) ProtoMessage() {} +func (*ListBreakpointsRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_debugger_c54a26c25209c588, []int{5} +} +func (m *ListBreakpointsRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ListBreakpointsRequest.Unmarshal(m, b) +} +func (m *ListBreakpointsRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ListBreakpointsRequest.Marshal(b, m, deterministic) +} +func (dst *ListBreakpointsRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_ListBreakpointsRequest.Merge(dst, src) +} +func (m *ListBreakpointsRequest) XXX_Size() int { + return xxx_messageInfo_ListBreakpointsRequest.Size(m) +} +func (m *ListBreakpointsRequest) XXX_DiscardUnknown() { + xxx_messageInfo_ListBreakpointsRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_ListBreakpointsRequest proto.InternalMessageInfo + +func (m *ListBreakpointsRequest) GetDebuggeeId() string { + if m != nil { + return m.DebuggeeId + } + return "" +} + +func (m *ListBreakpointsRequest) GetIncludeAllUsers() bool { + if m != nil { + return m.IncludeAllUsers + } + return false +} + +func (m *ListBreakpointsRequest) GetIncludeInactive() bool { + if m != nil { + return m.IncludeInactive + } + return false +} + +func (m *ListBreakpointsRequest) GetAction() *ListBreakpointsRequest_BreakpointActionValue { + if m != nil { + return m.Action + } + return nil +} + +// Deprecated: Do not use. +func (m *ListBreakpointsRequest) GetStripResults() bool { + if m != nil { + return m.StripResults + } + return false +} + +func (m *ListBreakpointsRequest) GetWaitToken() string { + if m != nil { + return m.WaitToken + } + return "" +} + +func (m *ListBreakpointsRequest) GetClientVersion() string { + if m != nil { + return m.ClientVersion + } + return "" +} + +// Wrapper message for `Breakpoint.Action`. Defines a filter on the action +// field of breakpoints. +type ListBreakpointsRequest_BreakpointActionValue struct { + // Only breakpoints with the specified action will pass the filter. + Value Breakpoint_Action `protobuf:"varint,1,opt,name=value,proto3,enum=google.devtools.clouddebugger.v2.Breakpoint_Action" json:"value,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ListBreakpointsRequest_BreakpointActionValue) Reset() { + *m = ListBreakpointsRequest_BreakpointActionValue{} +} +func (m *ListBreakpointsRequest_BreakpointActionValue) String() string { + return proto.CompactTextString(m) +} +func (*ListBreakpointsRequest_BreakpointActionValue) ProtoMessage() {} +func (*ListBreakpointsRequest_BreakpointActionValue) Descriptor() ([]byte, []int) { + return fileDescriptor_debugger_c54a26c25209c588, []int{5, 0} +} +func (m *ListBreakpointsRequest_BreakpointActionValue) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ListBreakpointsRequest_BreakpointActionValue.Unmarshal(m, b) +} +func (m *ListBreakpointsRequest_BreakpointActionValue) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ListBreakpointsRequest_BreakpointActionValue.Marshal(b, m, deterministic) +} +func (dst *ListBreakpointsRequest_BreakpointActionValue) XXX_Merge(src proto.Message) { + xxx_messageInfo_ListBreakpointsRequest_BreakpointActionValue.Merge(dst, src) +} +func (m *ListBreakpointsRequest_BreakpointActionValue) XXX_Size() int { + return xxx_messageInfo_ListBreakpointsRequest_BreakpointActionValue.Size(m) +} +func (m *ListBreakpointsRequest_BreakpointActionValue) XXX_DiscardUnknown() { + xxx_messageInfo_ListBreakpointsRequest_BreakpointActionValue.DiscardUnknown(m) +} + +var xxx_messageInfo_ListBreakpointsRequest_BreakpointActionValue proto.InternalMessageInfo + +func (m *ListBreakpointsRequest_BreakpointActionValue) GetValue() Breakpoint_Action { + if m != nil { + return m.Value + } + return Breakpoint_CAPTURE +} + +// Response for listing breakpoints. +type ListBreakpointsResponse struct { + // List of breakpoints matching the request. + // The fields `id` and `location` are guaranteed to be set on each breakpoint. + // The fields: `stack_frames`, `evaluated_expressions` and `variable_table` + // are cleared on each breakpoint regardless of its status. + Breakpoints []*Breakpoint `protobuf:"bytes,1,rep,name=breakpoints,proto3" json:"breakpoints,omitempty"` + // A wait token that can be used in the next call to `list` (REST) or + // `ListBreakpoints` (RPC) to block until the list of breakpoints has changes. + NextWaitToken string `protobuf:"bytes,2,opt,name=next_wait_token,json=nextWaitToken,proto3" json:"next_wait_token,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ListBreakpointsResponse) Reset() { *m = ListBreakpointsResponse{} } +func (m *ListBreakpointsResponse) String() string { return proto.CompactTextString(m) } +func (*ListBreakpointsResponse) ProtoMessage() {} +func (*ListBreakpointsResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_debugger_c54a26c25209c588, []int{6} +} +func (m *ListBreakpointsResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ListBreakpointsResponse.Unmarshal(m, b) +} +func (m *ListBreakpointsResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ListBreakpointsResponse.Marshal(b, m, deterministic) +} +func (dst *ListBreakpointsResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_ListBreakpointsResponse.Merge(dst, src) +} +func (m *ListBreakpointsResponse) XXX_Size() int { + return xxx_messageInfo_ListBreakpointsResponse.Size(m) +} +func (m *ListBreakpointsResponse) XXX_DiscardUnknown() { + xxx_messageInfo_ListBreakpointsResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_ListBreakpointsResponse proto.InternalMessageInfo + +func (m *ListBreakpointsResponse) GetBreakpoints() []*Breakpoint { + if m != nil { + return m.Breakpoints + } + return nil +} + +func (m *ListBreakpointsResponse) GetNextWaitToken() string { + if m != nil { + return m.NextWaitToken + } + return "" +} + +// Request to list debuggees. +type ListDebuggeesRequest struct { + // Project number of a Google Cloud project whose debuggees to list. + Project string `protobuf:"bytes,2,opt,name=project,proto3" json:"project,omitempty"` + // When set to `true`, the result includes all debuggees. Otherwise, the + // result includes only debuggees that are active. + IncludeInactive bool `protobuf:"varint,3,opt,name=include_inactive,json=includeInactive,proto3" json:"include_inactive,omitempty"` + // The client version making the call. + // Schema: `domain/type/version` (e.g., `google.com/intellij/v1`). + ClientVersion string `protobuf:"bytes,4,opt,name=client_version,json=clientVersion,proto3" json:"client_version,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ListDebuggeesRequest) Reset() { *m = ListDebuggeesRequest{} } +func (m *ListDebuggeesRequest) String() string { return proto.CompactTextString(m) } +func (*ListDebuggeesRequest) ProtoMessage() {} +func (*ListDebuggeesRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_debugger_c54a26c25209c588, []int{7} +} +func (m *ListDebuggeesRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ListDebuggeesRequest.Unmarshal(m, b) +} +func (m *ListDebuggeesRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ListDebuggeesRequest.Marshal(b, m, deterministic) +} +func (dst *ListDebuggeesRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_ListDebuggeesRequest.Merge(dst, src) +} +func (m *ListDebuggeesRequest) XXX_Size() int { + return xxx_messageInfo_ListDebuggeesRequest.Size(m) +} +func (m *ListDebuggeesRequest) XXX_DiscardUnknown() { + xxx_messageInfo_ListDebuggeesRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_ListDebuggeesRequest proto.InternalMessageInfo + +func (m *ListDebuggeesRequest) GetProject() string { + if m != nil { + return m.Project + } + return "" +} + +func (m *ListDebuggeesRequest) GetIncludeInactive() bool { + if m != nil { + return m.IncludeInactive + } + return false +} + +func (m *ListDebuggeesRequest) GetClientVersion() string { + if m != nil { + return m.ClientVersion + } + return "" +} + +// Response for listing debuggees. +type ListDebuggeesResponse struct { + // List of debuggees accessible to the calling user. + // The fields `debuggee.id` and `description` are guaranteed to be set. + // The `description` field is a human readable field provided by agents and + // can be displayed to users. + Debuggees []*Debuggee `protobuf:"bytes,1,rep,name=debuggees,proto3" json:"debuggees,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ListDebuggeesResponse) Reset() { *m = ListDebuggeesResponse{} } +func (m *ListDebuggeesResponse) String() string { return proto.CompactTextString(m) } +func (*ListDebuggeesResponse) ProtoMessage() {} +func (*ListDebuggeesResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_debugger_c54a26c25209c588, []int{8} +} +func (m *ListDebuggeesResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ListDebuggeesResponse.Unmarshal(m, b) +} +func (m *ListDebuggeesResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ListDebuggeesResponse.Marshal(b, m, deterministic) +} +func (dst *ListDebuggeesResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_ListDebuggeesResponse.Merge(dst, src) +} +func (m *ListDebuggeesResponse) XXX_Size() int { + return xxx_messageInfo_ListDebuggeesResponse.Size(m) +} +func (m *ListDebuggeesResponse) XXX_DiscardUnknown() { + xxx_messageInfo_ListDebuggeesResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_ListDebuggeesResponse proto.InternalMessageInfo + +func (m *ListDebuggeesResponse) GetDebuggees() []*Debuggee { + if m != nil { + return m.Debuggees + } + return nil +} + +func init() { + proto.RegisterType((*SetBreakpointRequest)(nil), "google.devtools.clouddebugger.v2.SetBreakpointRequest") + proto.RegisterType((*SetBreakpointResponse)(nil), "google.devtools.clouddebugger.v2.SetBreakpointResponse") + proto.RegisterType((*GetBreakpointRequest)(nil), "google.devtools.clouddebugger.v2.GetBreakpointRequest") + proto.RegisterType((*GetBreakpointResponse)(nil), "google.devtools.clouddebugger.v2.GetBreakpointResponse") + proto.RegisterType((*DeleteBreakpointRequest)(nil), "google.devtools.clouddebugger.v2.DeleteBreakpointRequest") + proto.RegisterType((*ListBreakpointsRequest)(nil), "google.devtools.clouddebugger.v2.ListBreakpointsRequest") + proto.RegisterType((*ListBreakpointsRequest_BreakpointActionValue)(nil), "google.devtools.clouddebugger.v2.ListBreakpointsRequest.BreakpointActionValue") + proto.RegisterType((*ListBreakpointsResponse)(nil), "google.devtools.clouddebugger.v2.ListBreakpointsResponse") + proto.RegisterType((*ListDebuggeesRequest)(nil), "google.devtools.clouddebugger.v2.ListDebuggeesRequest") + proto.RegisterType((*ListDebuggeesResponse)(nil), "google.devtools.clouddebugger.v2.ListDebuggeesResponse") +} + +// Reference imports to suppress errors if they are not otherwise used. +var _ context.Context +var _ grpc.ClientConn + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the grpc package it is being compiled against. +const _ = grpc.SupportPackageIsVersion4 + +// Debugger2Client is the client API for Debugger2 service. +// +// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://godoc.org/google.golang.org/grpc#ClientConn.NewStream. +type Debugger2Client interface { + // Sets the breakpoint to the debuggee. + SetBreakpoint(ctx context.Context, in *SetBreakpointRequest, opts ...grpc.CallOption) (*SetBreakpointResponse, error) + // Gets breakpoint information. + GetBreakpoint(ctx context.Context, in *GetBreakpointRequest, opts ...grpc.CallOption) (*GetBreakpointResponse, error) + // Deletes the breakpoint from the debuggee. + DeleteBreakpoint(ctx context.Context, in *DeleteBreakpointRequest, opts ...grpc.CallOption) (*empty.Empty, error) + // Lists all breakpoints for the debuggee. + ListBreakpoints(ctx context.Context, in *ListBreakpointsRequest, opts ...grpc.CallOption) (*ListBreakpointsResponse, error) + // Lists all the debuggees that the user has access to. + ListDebuggees(ctx context.Context, in *ListDebuggeesRequest, opts ...grpc.CallOption) (*ListDebuggeesResponse, error) +} + +type debugger2Client struct { + cc *grpc.ClientConn +} + +func NewDebugger2Client(cc *grpc.ClientConn) Debugger2Client { + return &debugger2Client{cc} +} + +func (c *debugger2Client) SetBreakpoint(ctx context.Context, in *SetBreakpointRequest, opts ...grpc.CallOption) (*SetBreakpointResponse, error) { + out := new(SetBreakpointResponse) + err := c.cc.Invoke(ctx, "/google.devtools.clouddebugger.v2.Debugger2/SetBreakpoint", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *debugger2Client) GetBreakpoint(ctx context.Context, in *GetBreakpointRequest, opts ...grpc.CallOption) (*GetBreakpointResponse, error) { + out := new(GetBreakpointResponse) + err := c.cc.Invoke(ctx, "/google.devtools.clouddebugger.v2.Debugger2/GetBreakpoint", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *debugger2Client) DeleteBreakpoint(ctx context.Context, in *DeleteBreakpointRequest, opts ...grpc.CallOption) (*empty.Empty, error) { + out := new(empty.Empty) + err := c.cc.Invoke(ctx, "/google.devtools.clouddebugger.v2.Debugger2/DeleteBreakpoint", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *debugger2Client) ListBreakpoints(ctx context.Context, in *ListBreakpointsRequest, opts ...grpc.CallOption) (*ListBreakpointsResponse, error) { + out := new(ListBreakpointsResponse) + err := c.cc.Invoke(ctx, "/google.devtools.clouddebugger.v2.Debugger2/ListBreakpoints", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *debugger2Client) ListDebuggees(ctx context.Context, in *ListDebuggeesRequest, opts ...grpc.CallOption) (*ListDebuggeesResponse, error) { + out := new(ListDebuggeesResponse) + err := c.cc.Invoke(ctx, "/google.devtools.clouddebugger.v2.Debugger2/ListDebuggees", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +// Debugger2Server is the server API for Debugger2 service. +type Debugger2Server interface { + // Sets the breakpoint to the debuggee. + SetBreakpoint(context.Context, *SetBreakpointRequest) (*SetBreakpointResponse, error) + // Gets breakpoint information. + GetBreakpoint(context.Context, *GetBreakpointRequest) (*GetBreakpointResponse, error) + // Deletes the breakpoint from the debuggee. + DeleteBreakpoint(context.Context, *DeleteBreakpointRequest) (*empty.Empty, error) + // Lists all breakpoints for the debuggee. + ListBreakpoints(context.Context, *ListBreakpointsRequest) (*ListBreakpointsResponse, error) + // Lists all the debuggees that the user has access to. + ListDebuggees(context.Context, *ListDebuggeesRequest) (*ListDebuggeesResponse, error) +} + +func RegisterDebugger2Server(s *grpc.Server, srv Debugger2Server) { + s.RegisterService(&_Debugger2_serviceDesc, srv) +} + +func _Debugger2_SetBreakpoint_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(SetBreakpointRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(Debugger2Server).SetBreakpoint(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.devtools.clouddebugger.v2.Debugger2/SetBreakpoint", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(Debugger2Server).SetBreakpoint(ctx, req.(*SetBreakpointRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _Debugger2_GetBreakpoint_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(GetBreakpointRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(Debugger2Server).GetBreakpoint(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.devtools.clouddebugger.v2.Debugger2/GetBreakpoint", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(Debugger2Server).GetBreakpoint(ctx, req.(*GetBreakpointRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _Debugger2_DeleteBreakpoint_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(DeleteBreakpointRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(Debugger2Server).DeleteBreakpoint(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.devtools.clouddebugger.v2.Debugger2/DeleteBreakpoint", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(Debugger2Server).DeleteBreakpoint(ctx, req.(*DeleteBreakpointRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _Debugger2_ListBreakpoints_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(ListBreakpointsRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(Debugger2Server).ListBreakpoints(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.devtools.clouddebugger.v2.Debugger2/ListBreakpoints", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(Debugger2Server).ListBreakpoints(ctx, req.(*ListBreakpointsRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _Debugger2_ListDebuggees_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(ListDebuggeesRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(Debugger2Server).ListDebuggees(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.devtools.clouddebugger.v2.Debugger2/ListDebuggees", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(Debugger2Server).ListDebuggees(ctx, req.(*ListDebuggeesRequest)) + } + return interceptor(ctx, in, info, handler) +} + +var _Debugger2_serviceDesc = grpc.ServiceDesc{ + ServiceName: "google.devtools.clouddebugger.v2.Debugger2", + HandlerType: (*Debugger2Server)(nil), + Methods: []grpc.MethodDesc{ + { + MethodName: "SetBreakpoint", + Handler: _Debugger2_SetBreakpoint_Handler, + }, + { + MethodName: "GetBreakpoint", + Handler: _Debugger2_GetBreakpoint_Handler, + }, + { + MethodName: "DeleteBreakpoint", + Handler: _Debugger2_DeleteBreakpoint_Handler, + }, + { + MethodName: "ListBreakpoints", + Handler: _Debugger2_ListBreakpoints_Handler, + }, + { + MethodName: "ListDebuggees", + Handler: _Debugger2_ListDebuggees_Handler, + }, + }, + Streams: []grpc.StreamDesc{}, + Metadata: "google/devtools/clouddebugger/v2/debugger.proto", +} + +func init() { + proto.RegisterFile("google/devtools/clouddebugger/v2/debugger.proto", fileDescriptor_debugger_c54a26c25209c588) +} + +var fileDescriptor_debugger_c54a26c25209c588 = []byte{ + // 802 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xbc, 0x56, 0xdd, 0x6e, 0xdb, 0x36, + 0x14, 0x06, 0x9d, 0xe5, 0xc7, 0xc7, 0x71, 0x92, 0x11, 0xf9, 0x11, 0xbc, 0x3f, 0x43, 0xfb, 0xcb, + 0xb2, 0x41, 0x1a, 0x94, 0x61, 0x4b, 0xb6, 0x9b, 0xc5, 0xcb, 0xe0, 0x18, 0xc8, 0x82, 0x40, 0xdb, + 0x3c, 0x60, 0x08, 0x60, 0xc8, 0x36, 0x23, 0xa8, 0x51, 0x44, 0x55, 0xa4, 0xdc, 0x16, 0x41, 0x6e, + 0x52, 0xa0, 0xf7, 0x45, 0x5f, 0xa0, 0xd7, 0x45, 0x81, 0xbe, 0x40, 0x0b, 0xf4, 0x3a, 0xbd, 0xec, + 0x2b, 0xf4, 0x41, 0x0a, 0x49, 0x64, 0x2c, 0xbb, 0x6a, 0x6d, 0x39, 0x40, 0xee, 0xa8, 0x8f, 0x3c, + 0x87, 0xdf, 0xf7, 0xf1, 0xf0, 0x50, 0xa0, 0xdb, 0x94, 0xda, 0x2e, 0xd1, 0xbb, 0xa4, 0xc7, 0x29, + 0x75, 0x99, 0xde, 0x71, 0x69, 0xd8, 0xed, 0x92, 0x76, 0x68, 0xdb, 0x24, 0xd0, 0x7b, 0x86, 0x2e, + 0xc7, 0x9a, 0x1f, 0x50, 0x4e, 0x71, 0x35, 0x09, 0xd0, 0x64, 0x80, 0x36, 0x10, 0xa0, 0xf5, 0x8c, + 0xca, 0xa7, 0x22, 0xa5, 0xe5, 0x3b, 0xba, 0xe5, 0x79, 0x94, 0x5b, 0xdc, 0xa1, 0x1e, 0x4b, 0xe2, + 0x2b, 0xdf, 0x8f, 0xde, 0xd0, 0xe2, 0x96, 0x58, 0xfc, 0x89, 0x58, 0x1c, 0x7f, 0xb5, 0xc3, 0x63, + 0x9d, 0x9c, 0xfa, 0xfc, 0x5e, 0x32, 0xa9, 0x3e, 0x45, 0xb0, 0xfc, 0x37, 0xe1, 0xb5, 0x80, 0x58, + 0x27, 0x3e, 0x75, 0x3c, 0x6e, 0x92, 0xdb, 0x21, 0x61, 0x1c, 0x7f, 0x01, 0x25, 0x91, 0x8f, 0xb4, + 0x9c, 0xae, 0x82, 0xaa, 0x68, 0xbd, 0x68, 0x82, 0x84, 0x1a, 0x5d, 0xbc, 0x0f, 0xd0, 0xbe, 0x8a, + 0x52, 0x0a, 0x55, 0xb4, 0x5e, 0x32, 0x7e, 0xd0, 0x46, 0x09, 0xd3, 0x52, 0x3b, 0xa5, 0xe2, 0xf1, + 0xd7, 0xb0, 0xd0, 0x71, 0x1d, 0xe2, 0xf1, 0x56, 0x8f, 0x04, 0xcc, 0xa1, 0x9e, 0xf2, 0x51, 0xbc, + 0x63, 0x39, 0x41, 0x9b, 0x09, 0xa8, 0x12, 0x58, 0x19, 0x62, 0xcb, 0x7c, 0xea, 0x31, 0x32, 0xc4, + 0x06, 0x5d, 0x8f, 0x8d, 0x7a, 0x1f, 0xc1, 0x72, 0x7d, 0x22, 0x57, 0xbe, 0x84, 0x72, 0x3f, 0x4f, + 0xb4, 0xa4, 0x10, 0x2f, 0x99, 0xef, 0x83, 0x8d, 0x6e, 0x0e, 0xb1, 0xf5, 0x1b, 0x10, 0xfb, 0x00, + 0xc1, 0xda, 0x2e, 0x71, 0x09, 0x27, 0x37, 0xa7, 0x77, 0x2a, 0x4b, 0xef, 0xab, 0x29, 0x58, 0xdd, + 0x77, 0x58, 0x4a, 0x31, 0x1b, 0x9b, 0xc7, 0x06, 0x7c, 0xec, 0x78, 0x1d, 0x37, 0xec, 0x92, 0x96, + 0xe5, 0xba, 0xad, 0x90, 0x91, 0x80, 0xc5, 0x5c, 0xe6, 0xcc, 0x45, 0x31, 0xb1, 0xe3, 0xba, 0xff, + 0x46, 0x30, 0xfe, 0x0e, 0x96, 0xe4, 0x5a, 0xc7, 0xb3, 0x3a, 0xdc, 0xe9, 0x91, 0x98, 0x50, 0x7f, + 0x69, 0x43, 0xc0, 0xf8, 0x18, 0x66, 0xa2, 0x91, 0x38, 0xa1, 0x92, 0x71, 0x30, 0xda, 0xe5, 0x6c, + 0x05, 0x29, 0xf3, 0x77, 0xe2, 0x84, 0x4d, 0xcb, 0x0d, 0x89, 0x29, 0xb2, 0xe3, 0x6f, 0xa1, 0xcc, + 0x78, 0xe0, 0xf8, 0xad, 0x80, 0xb0, 0xd0, 0xe5, 0x4c, 0x99, 0x8e, 0xf8, 0xd4, 0x0a, 0x0a, 0x32, + 0xe7, 0xe3, 0x09, 0x33, 0xc1, 0xf1, 0x67, 0x00, 0x77, 0x2c, 0x87, 0xb7, 0x38, 0x3d, 0x21, 0x9e, + 0x32, 0x13, 0xfb, 0x50, 0x8c, 0x90, 0x7f, 0x22, 0x20, 0xc3, 0xe9, 0xb9, 0x0c, 0xa7, 0x2b, 0x6d, + 0x58, 0xc9, 0xe4, 0x83, 0x1b, 0x30, 0xdd, 0x8b, 0x06, 0xb1, 0xc3, 0x0b, 0xc6, 0x66, 0x9e, 0xa2, + 0xd2, 0x92, 0x44, 0x66, 0x92, 0x41, 0x7d, 0x88, 0x60, 0xed, 0x1d, 0x2f, 0x44, 0x01, 0x1f, 0x40, + 0xa9, 0x5f, 0x20, 0x4c, 0x41, 0xd5, 0xa9, 0xdc, 0x15, 0x9c, 0x4e, 0x80, 0xbf, 0x81, 0x45, 0x8f, + 0xdc, 0xe5, 0xad, 0x94, 0x35, 0x49, 0x1d, 0x96, 0x23, 0xf8, 0x3f, 0x69, 0x8f, 0x7a, 0x81, 0x60, + 0x39, 0xe2, 0xb4, 0x2b, 0x0a, 0xe7, 0xaa, 0xbe, 0x14, 0x98, 0xf5, 0x03, 0x7a, 0x8b, 0x74, 0xb8, + 0x08, 0x94, 0x9f, 0x79, 0x8a, 0x65, 0xcc, 0x6b, 0x6d, 0xc1, 0xca, 0x10, 0x07, 0xe1, 0xca, 0x1e, + 0x14, 0x65, 0x45, 0x4b, 0x4f, 0x36, 0x46, 0x7b, 0x22, 0xf3, 0x98, 0xfd, 0x60, 0xe3, 0xc5, 0x2c, + 0x14, 0x05, 0x1e, 0x18, 0xf8, 0x12, 0x41, 0x79, 0xa0, 0x6b, 0xe2, 0x9f, 0x47, 0xa7, 0xcd, 0x7a, + 0x14, 0x2a, 0xbf, 0xe4, 0x8e, 0x4b, 0xa4, 0xa9, 0x7b, 0x17, 0xaf, 0xdf, 0x3c, 0x2a, 0xd4, 0xd4, + 0x9f, 0xd2, 0x8f, 0xa1, 0x7e, 0x45, 0x58, 0x3f, 0x4b, 0xdd, 0xee, 0x73, 0x3d, 0x75, 0xb4, 0x3a, + 0x23, 0xfc, 0xd7, 0xf4, 0x43, 0x11, 0x89, 0xa9, 0xe7, 0x15, 0x53, 0x9f, 0x50, 0x4c, 0xfd, 0x43, + 0x62, 0xf0, 0xef, 0xb9, 0xc5, 0x9c, 0x0d, 0xf4, 0xca, 0x73, 0xfc, 0x0c, 0xc1, 0xd2, 0x70, 0xeb, + 0xc5, 0xdb, 0xe3, 0x9c, 0x79, 0x66, 0xbb, 0xae, 0xac, 0xca, 0x50, 0xf9, 0xd6, 0x6b, 0x7f, 0x46, + 0x6f, 0xbd, 0x64, 0xbc, 0x71, 0x7d, 0xc6, 0x2f, 0x11, 0x2c, 0x0e, 0xdd, 0x6a, 0xbc, 0x35, 0x69, + 0x53, 0xac, 0x6c, 0x4f, 0x10, 0x29, 0x0e, 0x61, 0x2b, 0x96, 0x64, 0xe0, 0x1f, 0xf3, 0x4a, 0xc2, + 0x8f, 0x11, 0x94, 0x07, 0x2e, 0xe0, 0x38, 0x15, 0x94, 0xd5, 0x35, 0xc6, 0xa9, 0xa0, 0xcc, 0x9b, + 0xae, 0x7e, 0x1e, 0x93, 0x57, 0xf0, 0x6a, 0x36, 0xf9, 0xda, 0x73, 0x04, 0x5f, 0x75, 0xe8, 0xe9, + 0xc8, 0xf4, 0xb5, 0xb2, 0xbc, 0xe5, 0x87, 0xd1, 0x81, 0x1f, 0xa2, 0xff, 0xff, 0x12, 0x21, 0x36, + 0x75, 0x2d, 0xcf, 0xd6, 0x68, 0x60, 0xeb, 0x36, 0xf1, 0xe2, 0x72, 0x10, 0x7f, 0xa9, 0x96, 0xef, + 0xb0, 0xf7, 0xff, 0x38, 0xfe, 0x36, 0x00, 0x3c, 0x29, 0x28, 0xf5, 0x24, 0xdf, 0x1f, 0x11, 0x2c, + 0x7b, 0x4d, 0xa0, 0x35, 0x8d, 0x4b, 0x39, 0x75, 0x14, 0x4f, 0x1d, 0xc9, 0xa9, 0xa3, 0xa6, 0xd1, + 0x9e, 0x89, 0xf7, 0xdb, 0x7c, 0x1b, 0x00, 0x00, 0xff, 0xff, 0x7a, 0x13, 0xd7, 0xe1, 0x18, 0x0b, + 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/devtools/clouderrorreporting/v1beta1/common.pb.go b/vendor/google.golang.org/genproto/googleapis/devtools/clouderrorreporting/v1beta1/common.pb.go new file mode 100644 index 0000000..a5b017b --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/devtools/clouderrorreporting/v1beta1/common.pb.go @@ -0,0 +1,555 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/devtools/clouderrorreporting/v1beta1/common.proto + +package clouderrorreporting // import "google.golang.org/genproto/googleapis/devtools/clouderrorreporting/v1beta1" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import timestamp "github.com/golang/protobuf/ptypes/timestamp" +import _ "google.golang.org/genproto/googleapis/api/annotations" +import _ "google.golang.org/genproto/googleapis/api/monitoredres" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Description of a group of similar error events. +type ErrorGroup struct { + // The group resource name. + // Example: projects/my-project-123/groups/my-groupid + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // Group IDs are unique for a given project. If the same kind of error + // occurs in different service contexts, it will receive the same group ID. + GroupId string `protobuf:"bytes,2,opt,name=group_id,json=groupId,proto3" json:"group_id,omitempty"` + // Associated tracking issues. + TrackingIssues []*TrackingIssue `protobuf:"bytes,3,rep,name=tracking_issues,json=trackingIssues,proto3" json:"tracking_issues,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ErrorGroup) Reset() { *m = ErrorGroup{} } +func (m *ErrorGroup) String() string { return proto.CompactTextString(m) } +func (*ErrorGroup) ProtoMessage() {} +func (*ErrorGroup) Descriptor() ([]byte, []int) { + return fileDescriptor_common_293c65082d2aefa2, []int{0} +} +func (m *ErrorGroup) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ErrorGroup.Unmarshal(m, b) +} +func (m *ErrorGroup) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ErrorGroup.Marshal(b, m, deterministic) +} +func (dst *ErrorGroup) XXX_Merge(src proto.Message) { + xxx_messageInfo_ErrorGroup.Merge(dst, src) +} +func (m *ErrorGroup) XXX_Size() int { + return xxx_messageInfo_ErrorGroup.Size(m) +} +func (m *ErrorGroup) XXX_DiscardUnknown() { + xxx_messageInfo_ErrorGroup.DiscardUnknown(m) +} + +var xxx_messageInfo_ErrorGroup proto.InternalMessageInfo + +func (m *ErrorGroup) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *ErrorGroup) GetGroupId() string { + if m != nil { + return m.GroupId + } + return "" +} + +func (m *ErrorGroup) GetTrackingIssues() []*TrackingIssue { + if m != nil { + return m.TrackingIssues + } + return nil +} + +// Information related to tracking the progress on resolving the error. +type TrackingIssue struct { + // A URL pointing to a related entry in an issue tracking system. + // Example: https://github.com/user/project/issues/4 + Url string `protobuf:"bytes,1,opt,name=url,proto3" json:"url,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *TrackingIssue) Reset() { *m = TrackingIssue{} } +func (m *TrackingIssue) String() string { return proto.CompactTextString(m) } +func (*TrackingIssue) ProtoMessage() {} +func (*TrackingIssue) Descriptor() ([]byte, []int) { + return fileDescriptor_common_293c65082d2aefa2, []int{1} +} +func (m *TrackingIssue) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_TrackingIssue.Unmarshal(m, b) +} +func (m *TrackingIssue) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_TrackingIssue.Marshal(b, m, deterministic) +} +func (dst *TrackingIssue) XXX_Merge(src proto.Message) { + xxx_messageInfo_TrackingIssue.Merge(dst, src) +} +func (m *TrackingIssue) XXX_Size() int { + return xxx_messageInfo_TrackingIssue.Size(m) +} +func (m *TrackingIssue) XXX_DiscardUnknown() { + xxx_messageInfo_TrackingIssue.DiscardUnknown(m) +} + +var xxx_messageInfo_TrackingIssue proto.InternalMessageInfo + +func (m *TrackingIssue) GetUrl() string { + if m != nil { + return m.Url + } + return "" +} + +// An error event which is returned by the Error Reporting system. +type ErrorEvent struct { + // Time when the event occurred as provided in the error report. + // If the report did not contain a timestamp, the time the error was received + // by the Error Reporting system is used. + EventTime *timestamp.Timestamp `protobuf:"bytes,1,opt,name=event_time,json=eventTime,proto3" json:"event_time,omitempty"` + // The `ServiceContext` for which this error was reported. + ServiceContext *ServiceContext `protobuf:"bytes,2,opt,name=service_context,json=serviceContext,proto3" json:"service_context,omitempty"` + // The stack trace that was reported or logged by the service. + Message string `protobuf:"bytes,3,opt,name=message,proto3" json:"message,omitempty"` + // Data about the context in which the error occurred. + Context *ErrorContext `protobuf:"bytes,5,opt,name=context,proto3" json:"context,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ErrorEvent) Reset() { *m = ErrorEvent{} } +func (m *ErrorEvent) String() string { return proto.CompactTextString(m) } +func (*ErrorEvent) ProtoMessage() {} +func (*ErrorEvent) Descriptor() ([]byte, []int) { + return fileDescriptor_common_293c65082d2aefa2, []int{2} +} +func (m *ErrorEvent) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ErrorEvent.Unmarshal(m, b) +} +func (m *ErrorEvent) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ErrorEvent.Marshal(b, m, deterministic) +} +func (dst *ErrorEvent) XXX_Merge(src proto.Message) { + xxx_messageInfo_ErrorEvent.Merge(dst, src) +} +func (m *ErrorEvent) XXX_Size() int { + return xxx_messageInfo_ErrorEvent.Size(m) +} +func (m *ErrorEvent) XXX_DiscardUnknown() { + xxx_messageInfo_ErrorEvent.DiscardUnknown(m) +} + +var xxx_messageInfo_ErrorEvent proto.InternalMessageInfo + +func (m *ErrorEvent) GetEventTime() *timestamp.Timestamp { + if m != nil { + return m.EventTime + } + return nil +} + +func (m *ErrorEvent) GetServiceContext() *ServiceContext { + if m != nil { + return m.ServiceContext + } + return nil +} + +func (m *ErrorEvent) GetMessage() string { + if m != nil { + return m.Message + } + return "" +} + +func (m *ErrorEvent) GetContext() *ErrorContext { + if m != nil { + return m.Context + } + return nil +} + +// Describes a running service that sends errors. +// Its version changes over time and multiple versions can run in parallel. +type ServiceContext struct { + // An identifier of the service, such as the name of the + // executable, job, or Google App Engine service name. This field is expected + // to have a low number of values that are relatively stable over time, as + // opposed to `version`, which can be changed whenever new code is deployed. + // + // Contains the service name for error reports extracted from Google + // App Engine logs or `default` if the App Engine default service is used. + Service string `protobuf:"bytes,2,opt,name=service,proto3" json:"service,omitempty"` + // Represents the source code version that the developer provided, + // which could represent a version label or a Git SHA-1 hash, for example. + Version string `protobuf:"bytes,3,opt,name=version,proto3" json:"version,omitempty"` + // Type of the MonitoredResource. List of possible values: + // https://cloud.google.com/monitoring/api/resources + // + // Value is set automatically for incoming errors and must not be set when + // reporting errors. + ResourceType string `protobuf:"bytes,4,opt,name=resource_type,json=resourceType,proto3" json:"resource_type,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ServiceContext) Reset() { *m = ServiceContext{} } +func (m *ServiceContext) String() string { return proto.CompactTextString(m) } +func (*ServiceContext) ProtoMessage() {} +func (*ServiceContext) Descriptor() ([]byte, []int) { + return fileDescriptor_common_293c65082d2aefa2, []int{3} +} +func (m *ServiceContext) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ServiceContext.Unmarshal(m, b) +} +func (m *ServiceContext) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ServiceContext.Marshal(b, m, deterministic) +} +func (dst *ServiceContext) XXX_Merge(src proto.Message) { + xxx_messageInfo_ServiceContext.Merge(dst, src) +} +func (m *ServiceContext) XXX_Size() int { + return xxx_messageInfo_ServiceContext.Size(m) +} +func (m *ServiceContext) XXX_DiscardUnknown() { + xxx_messageInfo_ServiceContext.DiscardUnknown(m) +} + +var xxx_messageInfo_ServiceContext proto.InternalMessageInfo + +func (m *ServiceContext) GetService() string { + if m != nil { + return m.Service + } + return "" +} + +func (m *ServiceContext) GetVersion() string { + if m != nil { + return m.Version + } + return "" +} + +func (m *ServiceContext) GetResourceType() string { + if m != nil { + return m.ResourceType + } + return "" +} + +// A description of the context in which an error occurred. +// This data should be provided by the application when reporting an error, +// unless the +// error report has been generated automatically from Google App Engine logs. +type ErrorContext struct { + // The HTTP request which was processed when the error was + // triggered. + HttpRequest *HttpRequestContext `protobuf:"bytes,1,opt,name=http_request,json=httpRequest,proto3" json:"http_request,omitempty"` + // The user who caused or was affected by the crash. + // This can be a user ID, an email address, or an arbitrary token that + // uniquely identifies the user. + // When sending an error report, leave this field empty if the user was not + // logged in. In this case the + // Error Reporting system will use other data, such as remote IP address, to + // distinguish affected users. See `affected_users_count` in + // `ErrorGroupStats`. + User string `protobuf:"bytes,2,opt,name=user,proto3" json:"user,omitempty"` + // The location in the source code where the decision was made to + // report the error, usually the place where it was logged. + // For a logged exception this would be the source line where the + // exception is logged, usually close to the place where it was + // caught. This value is in contrast to `Exception.cause_location`, + // which describes the source line where the exception was thrown. + ReportLocation *SourceLocation `protobuf:"bytes,3,opt,name=report_location,json=reportLocation,proto3" json:"report_location,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ErrorContext) Reset() { *m = ErrorContext{} } +func (m *ErrorContext) String() string { return proto.CompactTextString(m) } +func (*ErrorContext) ProtoMessage() {} +func (*ErrorContext) Descriptor() ([]byte, []int) { + return fileDescriptor_common_293c65082d2aefa2, []int{4} +} +func (m *ErrorContext) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ErrorContext.Unmarshal(m, b) +} +func (m *ErrorContext) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ErrorContext.Marshal(b, m, deterministic) +} +func (dst *ErrorContext) XXX_Merge(src proto.Message) { + xxx_messageInfo_ErrorContext.Merge(dst, src) +} +func (m *ErrorContext) XXX_Size() int { + return xxx_messageInfo_ErrorContext.Size(m) +} +func (m *ErrorContext) XXX_DiscardUnknown() { + xxx_messageInfo_ErrorContext.DiscardUnknown(m) +} + +var xxx_messageInfo_ErrorContext proto.InternalMessageInfo + +func (m *ErrorContext) GetHttpRequest() *HttpRequestContext { + if m != nil { + return m.HttpRequest + } + return nil +} + +func (m *ErrorContext) GetUser() string { + if m != nil { + return m.User + } + return "" +} + +func (m *ErrorContext) GetReportLocation() *SourceLocation { + if m != nil { + return m.ReportLocation + } + return nil +} + +// HTTP request data that is related to a reported error. +// This data should be provided by the application when reporting an error, +// unless the +// error report has been generated automatically from Google App Engine logs. +type HttpRequestContext struct { + // The type of HTTP request, such as `GET`, `POST`, etc. + Method string `protobuf:"bytes,1,opt,name=method,proto3" json:"method,omitempty"` + // The URL of the request. + Url string `protobuf:"bytes,2,opt,name=url,proto3" json:"url,omitempty"` + // The user agent information that is provided with the request. + UserAgent string `protobuf:"bytes,3,opt,name=user_agent,json=userAgent,proto3" json:"user_agent,omitempty"` + // The referrer information that is provided with the request. + Referrer string `protobuf:"bytes,4,opt,name=referrer,proto3" json:"referrer,omitempty"` + // The HTTP response status code for the request. + ResponseStatusCode int32 `protobuf:"varint,5,opt,name=response_status_code,json=responseStatusCode,proto3" json:"response_status_code,omitempty"` + // The IP address from which the request originated. + // This can be IPv4, IPv6, or a token which is derived from the + // IP address, depending on the data that has been provided + // in the error report. + RemoteIp string `protobuf:"bytes,6,opt,name=remote_ip,json=remoteIp,proto3" json:"remote_ip,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *HttpRequestContext) Reset() { *m = HttpRequestContext{} } +func (m *HttpRequestContext) String() string { return proto.CompactTextString(m) } +func (*HttpRequestContext) ProtoMessage() {} +func (*HttpRequestContext) Descriptor() ([]byte, []int) { + return fileDescriptor_common_293c65082d2aefa2, []int{5} +} +func (m *HttpRequestContext) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_HttpRequestContext.Unmarshal(m, b) +} +func (m *HttpRequestContext) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_HttpRequestContext.Marshal(b, m, deterministic) +} +func (dst *HttpRequestContext) XXX_Merge(src proto.Message) { + xxx_messageInfo_HttpRequestContext.Merge(dst, src) +} +func (m *HttpRequestContext) XXX_Size() int { + return xxx_messageInfo_HttpRequestContext.Size(m) +} +func (m *HttpRequestContext) XXX_DiscardUnknown() { + xxx_messageInfo_HttpRequestContext.DiscardUnknown(m) +} + +var xxx_messageInfo_HttpRequestContext proto.InternalMessageInfo + +func (m *HttpRequestContext) GetMethod() string { + if m != nil { + return m.Method + } + return "" +} + +func (m *HttpRequestContext) GetUrl() string { + if m != nil { + return m.Url + } + return "" +} + +func (m *HttpRequestContext) GetUserAgent() string { + if m != nil { + return m.UserAgent + } + return "" +} + +func (m *HttpRequestContext) GetReferrer() string { + if m != nil { + return m.Referrer + } + return "" +} + +func (m *HttpRequestContext) GetResponseStatusCode() int32 { + if m != nil { + return m.ResponseStatusCode + } + return 0 +} + +func (m *HttpRequestContext) GetRemoteIp() string { + if m != nil { + return m.RemoteIp + } + return "" +} + +// Indicates a location in the source code of the service for which +// errors are reported. +// This data should be provided by the application when reporting an error, +// unless the error report has been generated automatically from Google App +// Engine logs. All fields are optional. +type SourceLocation struct { + // The source code filename, which can include a truncated relative + // path, or a full path from a production machine. + FilePath string `protobuf:"bytes,1,opt,name=file_path,json=filePath,proto3" json:"file_path,omitempty"` + // 1-based. 0 indicates that the line number is unknown. + LineNumber int32 `protobuf:"varint,2,opt,name=line_number,json=lineNumber,proto3" json:"line_number,omitempty"` + // Human-readable name of a function or method. + // The value can include optional context like the class or package name. + // For example, `my.package.MyClass.method` in case of Java. + FunctionName string `protobuf:"bytes,4,opt,name=function_name,json=functionName,proto3" json:"function_name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *SourceLocation) Reset() { *m = SourceLocation{} } +func (m *SourceLocation) String() string { return proto.CompactTextString(m) } +func (*SourceLocation) ProtoMessage() {} +func (*SourceLocation) Descriptor() ([]byte, []int) { + return fileDescriptor_common_293c65082d2aefa2, []int{6} +} +func (m *SourceLocation) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_SourceLocation.Unmarshal(m, b) +} +func (m *SourceLocation) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_SourceLocation.Marshal(b, m, deterministic) +} +func (dst *SourceLocation) XXX_Merge(src proto.Message) { + xxx_messageInfo_SourceLocation.Merge(dst, src) +} +func (m *SourceLocation) XXX_Size() int { + return xxx_messageInfo_SourceLocation.Size(m) +} +func (m *SourceLocation) XXX_DiscardUnknown() { + xxx_messageInfo_SourceLocation.DiscardUnknown(m) +} + +var xxx_messageInfo_SourceLocation proto.InternalMessageInfo + +func (m *SourceLocation) GetFilePath() string { + if m != nil { + return m.FilePath + } + return "" +} + +func (m *SourceLocation) GetLineNumber() int32 { + if m != nil { + return m.LineNumber + } + return 0 +} + +func (m *SourceLocation) GetFunctionName() string { + if m != nil { + return m.FunctionName + } + return "" +} + +func init() { + proto.RegisterType((*ErrorGroup)(nil), "google.devtools.clouderrorreporting.v1beta1.ErrorGroup") + proto.RegisterType((*TrackingIssue)(nil), "google.devtools.clouderrorreporting.v1beta1.TrackingIssue") + proto.RegisterType((*ErrorEvent)(nil), "google.devtools.clouderrorreporting.v1beta1.ErrorEvent") + proto.RegisterType((*ServiceContext)(nil), "google.devtools.clouderrorreporting.v1beta1.ServiceContext") + proto.RegisterType((*ErrorContext)(nil), "google.devtools.clouderrorreporting.v1beta1.ErrorContext") + proto.RegisterType((*HttpRequestContext)(nil), "google.devtools.clouderrorreporting.v1beta1.HttpRequestContext") + proto.RegisterType((*SourceLocation)(nil), "google.devtools.clouderrorreporting.v1beta1.SourceLocation") +} + +func init() { + proto.RegisterFile("google/devtools/clouderrorreporting/v1beta1/common.proto", fileDescriptor_common_293c65082d2aefa2) +} + +var fileDescriptor_common_293c65082d2aefa2 = []byte{ + // 705 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x94, 0x54, 0xcd, 0x6e, 0x13, 0x31, + 0x10, 0x56, 0x92, 0xfe, 0xc5, 0x69, 0x53, 0x64, 0x21, 0x14, 0x02, 0xa8, 0x25, 0xbd, 0x54, 0x42, + 0xda, 0xa5, 0xe5, 0x42, 0xe9, 0x01, 0xd1, 0xa8, 0x2a, 0x95, 0x50, 0x55, 0x6d, 0xaa, 0x1e, 0x50, + 0x85, 0xe5, 0xec, 0x4e, 0x36, 0x16, 0xbb, 0xb6, 0xb1, 0xbd, 0x11, 0x7d, 0x17, 0x0e, 0x9c, 0x79, + 0x12, 0xc4, 0xb3, 0xf4, 0x21, 0x90, 0xbd, 0x76, 0x69, 0xd4, 0x1e, 0xc8, 0xcd, 0x33, 0xf3, 0xcd, + 0x37, 0xf3, 0x8d, 0xc7, 0x46, 0x6f, 0x73, 0x21, 0xf2, 0x02, 0xe2, 0x0c, 0x66, 0x46, 0x88, 0x42, + 0xc7, 0x69, 0x21, 0xaa, 0x0c, 0x94, 0x12, 0x4a, 0x81, 0x14, 0xca, 0x30, 0x9e, 0xc7, 0xb3, 0xbd, + 0x31, 0x18, 0xba, 0x17, 0xa7, 0xa2, 0x2c, 0x05, 0x8f, 0xa4, 0x12, 0x46, 0xe0, 0x57, 0x75, 0x66, + 0x14, 0x32, 0xa3, 0x07, 0x32, 0x23, 0x9f, 0xd9, 0x7f, 0xee, 0xcb, 0x50, 0xc9, 0x62, 0xca, 0xb9, + 0x30, 0xd4, 0x30, 0xc1, 0x75, 0x4d, 0xd5, 0xdf, 0xb9, 0x13, 0x2d, 0x05, 0x67, 0x46, 0x28, 0xc8, + 0x88, 0x02, 0x2d, 0x2a, 0x95, 0x82, 0x07, 0x6d, 0x79, 0x90, 0xb3, 0xc6, 0xd5, 0x24, 0x36, 0xac, + 0x04, 0x6d, 0x68, 0x29, 0x6b, 0xc0, 0xe0, 0x67, 0x03, 0xa1, 0x63, 0x5b, 0xfe, 0x44, 0x89, 0x4a, + 0x62, 0x8c, 0x96, 0x38, 0x2d, 0xa1, 0xd7, 0xd8, 0x6e, 0xec, 0xb6, 0x13, 0x77, 0xc6, 0x4f, 0xd1, + 0x5a, 0x6e, 0x83, 0x84, 0x65, 0xbd, 0xa6, 0xf3, 0xaf, 0x3a, 0xfb, 0x34, 0xc3, 0x29, 0xda, 0x34, + 0x8a, 0xa6, 0x5f, 0x19, 0xcf, 0x09, 0xd3, 0xba, 0x02, 0xdd, 0x6b, 0x6d, 0xb7, 0x76, 0x3b, 0xfb, + 0xef, 0xa2, 0x05, 0x84, 0x46, 0x17, 0x9e, 0xe3, 0xd4, 0x52, 0x24, 0x5d, 0x73, 0xd7, 0xd4, 0x83, + 0x97, 0x68, 0x63, 0x0e, 0x80, 0x1f, 0xa1, 0x56, 0xa5, 0x0a, 0xdf, 0xa3, 0x3d, 0x0e, 0x7e, 0x34, + 0xbd, 0x8a, 0xe3, 0x19, 0x70, 0x83, 0x0f, 0x10, 0x02, 0x7b, 0x20, 0x56, 0xad, 0xc3, 0x75, 0xf6, + 0xfb, 0xa1, 0xa3, 0x30, 0x8a, 0xe8, 0x22, 0x8c, 0x22, 0x69, 0x3b, 0xb4, 0xb5, 0x71, 0x86, 0x36, + 0x35, 0xa8, 0x19, 0x4b, 0x81, 0xa4, 0x82, 0x1b, 0xf8, 0x6e, 0x9c, 0xe6, 0xce, 0xfe, 0xe1, 0x42, + 0x8a, 0x46, 0x35, 0xc7, 0xb0, 0xa6, 0x48, 0xba, 0x7a, 0xce, 0xc6, 0x3d, 0xb4, 0x5a, 0x82, 0xd6, + 0x34, 0x87, 0x5e, 0xab, 0x9e, 0xa8, 0x37, 0xf1, 0x08, 0xad, 0x86, 0xba, 0xcb, 0xae, 0xee, 0xc1, + 0x42, 0x75, 0xdd, 0x10, 0x42, 0xd5, 0xc0, 0x34, 0x60, 0xa8, 0x3b, 0xba, 0xd7, 0x80, 0x6f, 0x29, + 0x5c, 0xa9, 0x37, 0x6d, 0x64, 0x06, 0x4a, 0x33, 0xc1, 0x43, 0x6b, 0xde, 0xc4, 0x3b, 0x68, 0x23, + 0x6c, 0x17, 0x31, 0xd7, 0x12, 0x7a, 0x4b, 0x2e, 0xbe, 0x1e, 0x9c, 0x17, 0xd7, 0x12, 0x06, 0x37, + 0x0d, 0xb4, 0x7e, 0xb7, 0x09, 0x3c, 0x46, 0xeb, 0x53, 0x63, 0x24, 0x51, 0xf0, 0xad, 0x02, 0x6d, + 0xfc, 0x6d, 0xbc, 0x5f, 0x48, 0xd5, 0x47, 0x63, 0x64, 0x52, 0xe7, 0x07, 0x6d, 0x9d, 0xe9, 0x3f, + 0x9f, 0xdd, 0xda, 0x4a, 0x83, 0xf2, 0x52, 0xdc, 0xd9, 0x5e, 0x64, 0x4d, 0x44, 0x0a, 0x91, 0xba, + 0x87, 0xe3, 0xf4, 0x2c, 0x7c, 0x91, 0x4e, 0xda, 0x27, 0x4f, 0x91, 0x74, 0x6b, 0x44, 0xb0, 0x07, + 0xbf, 0x1b, 0x08, 0xdf, 0xef, 0x0e, 0x3f, 0x41, 0x2b, 0x25, 0x98, 0xa9, 0xc8, 0xfc, 0x92, 0x7a, + 0x2b, 0x6c, 0x6e, 0xf3, 0x76, 0x73, 0xf1, 0x0b, 0x84, 0x6c, 0xbb, 0x84, 0xe6, 0xc0, 0x8d, 0x9f, + 0x78, 0xdb, 0x7a, 0x3e, 0x58, 0x07, 0xee, 0xa3, 0x35, 0x05, 0x13, 0x50, 0x0a, 0x94, 0x1f, 0xf7, + 0xad, 0x8d, 0x5f, 0xa3, 0xc7, 0x0a, 0xb4, 0x14, 0x5c, 0x03, 0xd1, 0x86, 0x9a, 0x4a, 0x93, 0x54, + 0x64, 0xe0, 0xf6, 0x66, 0x39, 0xc1, 0x21, 0x36, 0x72, 0xa1, 0xa1, 0xc8, 0x00, 0x3f, 0x43, 0x6d, + 0x05, 0xa5, 0x30, 0x40, 0x98, 0xec, 0xad, 0x04, 0x3a, 0xeb, 0x38, 0x95, 0x03, 0x8d, 0xba, 0xf3, + 0x62, 0x2d, 0x7c, 0xc2, 0x0a, 0x20, 0x92, 0x9a, 0xa9, 0x17, 0xb2, 0x66, 0x1d, 0xe7, 0xd4, 0x4c, + 0xf1, 0x16, 0xea, 0x14, 0x8c, 0x03, 0xe1, 0x55, 0x39, 0xf6, 0xa3, 0x5f, 0x4e, 0x90, 0x75, 0x9d, + 0x39, 0x8f, 0x5d, 0x97, 0x49, 0xc5, 0x53, 0xcb, 0x44, 0xdc, 0x9f, 0xe2, 0xd7, 0x25, 0x38, 0xcf, + 0x68, 0x09, 0x47, 0x37, 0x0d, 0x64, 0x3f, 0xc8, 0x45, 0xae, 0xe4, 0xa8, 0x33, 0x74, 0x3f, 0xea, + 0xb9, 0x7d, 0xc7, 0xe7, 0x8d, 0xcf, 0x5f, 0x7c, 0x6e, 0x2e, 0x0a, 0xca, 0xf3, 0x48, 0xa8, 0x3c, + 0xce, 0x81, 0xbb, 0x57, 0x1e, 0xd7, 0x21, 0x2a, 0x99, 0xfe, 0xaf, 0xbf, 0xfa, 0xf0, 0x81, 0xd8, + 0xaf, 0xe6, 0xce, 0x49, 0x5d, 0x60, 0x68, 0x83, 0xf5, 0x0b, 0x4b, 0x6e, 0x9b, 0xba, 0xdc, 0x3b, + 0xb2, 0x99, 0x7f, 0x02, 0xea, 0xca, 0xa1, 0xae, 0xe6, 0x51, 0x57, 0x97, 0x35, 0xff, 0x78, 0xc5, + 0xb5, 0xf5, 0xe6, 0x6f, 0x00, 0x00, 0x00, 0xff, 0xff, 0x24, 0x65, 0x84, 0x33, 0x41, 0x06, 0x00, + 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/devtools/clouderrorreporting/v1beta1/error_group_service.pb.go b/vendor/google.golang.org/genproto/googleapis/devtools/clouderrorreporting/v1beta1/error_group_service.pb.go new file mode 100644 index 0000000..c509b72 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/devtools/clouderrorreporting/v1beta1/error_group_service.pb.go @@ -0,0 +1,261 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/devtools/clouderrorreporting/v1beta1/error_group_service.proto + +package clouderrorreporting // import "google.golang.org/genproto/googleapis/devtools/clouderrorreporting/v1beta1" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +import ( + context "golang.org/x/net/context" + grpc "google.golang.org/grpc" +) + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// A request to return an individual group. +type GetGroupRequest struct { + // [Required] The group resource name. Written as + // projects/projectID/groups/group_name. + // Call + // + // groupStats.list to return a list of groups belonging to + // this project. + // + // Example: projects/my-project-123/groups/my-group + GroupName string `protobuf:"bytes,1,opt,name=group_name,json=groupName,proto3" json:"group_name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GetGroupRequest) Reset() { *m = GetGroupRequest{} } +func (m *GetGroupRequest) String() string { return proto.CompactTextString(m) } +func (*GetGroupRequest) ProtoMessage() {} +func (*GetGroupRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_error_group_service_99a7665773c1719a, []int{0} +} +func (m *GetGroupRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GetGroupRequest.Unmarshal(m, b) +} +func (m *GetGroupRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GetGroupRequest.Marshal(b, m, deterministic) +} +func (dst *GetGroupRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_GetGroupRequest.Merge(dst, src) +} +func (m *GetGroupRequest) XXX_Size() int { + return xxx_messageInfo_GetGroupRequest.Size(m) +} +func (m *GetGroupRequest) XXX_DiscardUnknown() { + xxx_messageInfo_GetGroupRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_GetGroupRequest proto.InternalMessageInfo + +func (m *GetGroupRequest) GetGroupName() string { + if m != nil { + return m.GroupName + } + return "" +} + +// A request to replace the existing data for the given group. +type UpdateGroupRequest struct { + // [Required] The group which replaces the resource on the server. + Group *ErrorGroup `protobuf:"bytes,1,opt,name=group,proto3" json:"group,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *UpdateGroupRequest) Reset() { *m = UpdateGroupRequest{} } +func (m *UpdateGroupRequest) String() string { return proto.CompactTextString(m) } +func (*UpdateGroupRequest) ProtoMessage() {} +func (*UpdateGroupRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_error_group_service_99a7665773c1719a, []int{1} +} +func (m *UpdateGroupRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_UpdateGroupRequest.Unmarshal(m, b) +} +func (m *UpdateGroupRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_UpdateGroupRequest.Marshal(b, m, deterministic) +} +func (dst *UpdateGroupRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_UpdateGroupRequest.Merge(dst, src) +} +func (m *UpdateGroupRequest) XXX_Size() int { + return xxx_messageInfo_UpdateGroupRequest.Size(m) +} +func (m *UpdateGroupRequest) XXX_DiscardUnknown() { + xxx_messageInfo_UpdateGroupRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_UpdateGroupRequest proto.InternalMessageInfo + +func (m *UpdateGroupRequest) GetGroup() *ErrorGroup { + if m != nil { + return m.Group + } + return nil +} + +func init() { + proto.RegisterType((*GetGroupRequest)(nil), "google.devtools.clouderrorreporting.v1beta1.GetGroupRequest") + proto.RegisterType((*UpdateGroupRequest)(nil), "google.devtools.clouderrorreporting.v1beta1.UpdateGroupRequest") +} + +// Reference imports to suppress errors if they are not otherwise used. +var _ context.Context +var _ grpc.ClientConn + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the grpc package it is being compiled against. +const _ = grpc.SupportPackageIsVersion4 + +// ErrorGroupServiceClient is the client API for ErrorGroupService service. +// +// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://godoc.org/google.golang.org/grpc#ClientConn.NewStream. +type ErrorGroupServiceClient interface { + // Get the specified group. + GetGroup(ctx context.Context, in *GetGroupRequest, opts ...grpc.CallOption) (*ErrorGroup, error) + // Replace the data for the specified group. + // Fails if the group does not exist. + UpdateGroup(ctx context.Context, in *UpdateGroupRequest, opts ...grpc.CallOption) (*ErrorGroup, error) +} + +type errorGroupServiceClient struct { + cc *grpc.ClientConn +} + +func NewErrorGroupServiceClient(cc *grpc.ClientConn) ErrorGroupServiceClient { + return &errorGroupServiceClient{cc} +} + +func (c *errorGroupServiceClient) GetGroup(ctx context.Context, in *GetGroupRequest, opts ...grpc.CallOption) (*ErrorGroup, error) { + out := new(ErrorGroup) + err := c.cc.Invoke(ctx, "/google.devtools.clouderrorreporting.v1beta1.ErrorGroupService/GetGroup", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *errorGroupServiceClient) UpdateGroup(ctx context.Context, in *UpdateGroupRequest, opts ...grpc.CallOption) (*ErrorGroup, error) { + out := new(ErrorGroup) + err := c.cc.Invoke(ctx, "/google.devtools.clouderrorreporting.v1beta1.ErrorGroupService/UpdateGroup", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +// ErrorGroupServiceServer is the server API for ErrorGroupService service. +type ErrorGroupServiceServer interface { + // Get the specified group. + GetGroup(context.Context, *GetGroupRequest) (*ErrorGroup, error) + // Replace the data for the specified group. + // Fails if the group does not exist. + UpdateGroup(context.Context, *UpdateGroupRequest) (*ErrorGroup, error) +} + +func RegisterErrorGroupServiceServer(s *grpc.Server, srv ErrorGroupServiceServer) { + s.RegisterService(&_ErrorGroupService_serviceDesc, srv) +} + +func _ErrorGroupService_GetGroup_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(GetGroupRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(ErrorGroupServiceServer).GetGroup(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.devtools.clouderrorreporting.v1beta1.ErrorGroupService/GetGroup", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(ErrorGroupServiceServer).GetGroup(ctx, req.(*GetGroupRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _ErrorGroupService_UpdateGroup_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(UpdateGroupRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(ErrorGroupServiceServer).UpdateGroup(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.devtools.clouderrorreporting.v1beta1.ErrorGroupService/UpdateGroup", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(ErrorGroupServiceServer).UpdateGroup(ctx, req.(*UpdateGroupRequest)) + } + return interceptor(ctx, in, info, handler) +} + +var _ErrorGroupService_serviceDesc = grpc.ServiceDesc{ + ServiceName: "google.devtools.clouderrorreporting.v1beta1.ErrorGroupService", + HandlerType: (*ErrorGroupServiceServer)(nil), + Methods: []grpc.MethodDesc{ + { + MethodName: "GetGroup", + Handler: _ErrorGroupService_GetGroup_Handler, + }, + { + MethodName: "UpdateGroup", + Handler: _ErrorGroupService_UpdateGroup_Handler, + }, + }, + Streams: []grpc.StreamDesc{}, + Metadata: "google/devtools/clouderrorreporting/v1beta1/error_group_service.proto", +} + +func init() { + proto.RegisterFile("google/devtools/clouderrorreporting/v1beta1/error_group_service.proto", fileDescriptor_error_group_service_99a7665773c1719a) +} + +var fileDescriptor_error_group_service_99a7665773c1719a = []byte{ + // 398 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x9c, 0x92, 0xcb, 0x4a, 0x23, 0x41, + 0x14, 0x86, 0xe9, 0x0c, 0x33, 0x4c, 0x2a, 0x8b, 0x61, 0x6a, 0x31, 0x0c, 0xcd, 0x0c, 0x48, 0xdc, + 0x68, 0x02, 0x55, 0x76, 0x5c, 0x18, 0xbc, 0x20, 0x44, 0x42, 0x56, 0x4a, 0x88, 0x98, 0x85, 0x04, + 0x43, 0xa5, 0x53, 0x14, 0x2d, 0xdd, 0x75, 0xda, 0xea, 0x4a, 0x36, 0xe2, 0xc6, 0x07, 0x70, 0xe3, + 0x5b, 0xb8, 0xf6, 0x05, 0xdc, 0xba, 0xf5, 0x15, 0x7c, 0x07, 0xb7, 0xd2, 0x55, 0xb9, 0x98, 0x8b, + 0x60, 0x67, 0x7b, 0x2e, 0xff, 0xff, 0xd5, 0x5f, 0x07, 0xd5, 0x05, 0x80, 0x08, 0x39, 0xed, 0xf3, + 0xa1, 0x06, 0x08, 0x13, 0xea, 0x87, 0x30, 0xe8, 0x73, 0xa5, 0x40, 0x29, 0x1e, 0x83, 0xd2, 0x81, + 0x14, 0x74, 0xe8, 0xf5, 0xb8, 0x66, 0x1e, 0x35, 0xe5, 0xae, 0x50, 0x30, 0x88, 0xbb, 0x09, 0x57, + 0xc3, 0xc0, 0xe7, 0x24, 0x56, 0xa0, 0x01, 0x97, 0xad, 0x0c, 0x19, 0xcb, 0x90, 0x25, 0x32, 0x64, + 0x24, 0xe3, 0xfe, 0x1b, 0x79, 0xb2, 0x38, 0xa0, 0x4c, 0x4a, 0xd0, 0x4c, 0x07, 0x20, 0x13, 0x2b, + 0xe5, 0x56, 0xb3, 0x10, 0xf9, 0x10, 0x45, 0x20, 0xed, 0x66, 0x71, 0x0b, 0xfd, 0x6a, 0x70, 0xdd, + 0x48, 0xf1, 0x5a, 0xfc, 0x6a, 0xc0, 0x13, 0x8d, 0xff, 0x23, 0x64, 0x71, 0x25, 0x8b, 0xf8, 0x5f, + 0x67, 0xcd, 0xd9, 0xc8, 0xb7, 0xf2, 0xa6, 0x72, 0xc2, 0x22, 0x5e, 0xf4, 0x11, 0x3e, 0x8b, 0xfb, + 0x4c, 0xf3, 0x99, 0xa5, 0x63, 0xf4, 0xdd, 0x8c, 0x98, 0xf9, 0x42, 0x65, 0x87, 0x64, 0x78, 0x1c, + 0xa9, 0xa7, 0x65, 0x2b, 0x67, 0x55, 0x2a, 0x77, 0xdf, 0xd0, 0xef, 0x69, 0xf5, 0xd4, 0xe6, 0x86, + 0x1f, 0x1d, 0xf4, 0x73, 0x4c, 0x8b, 0xf7, 0x33, 0x59, 0xcc, 0x3d, 0xd2, 0x5d, 0x15, 0xb0, 0xe8, + 0xdd, 0xbe, 0xbc, 0xde, 0xe7, 0xca, 0x78, 0x73, 0x92, 0xe7, 0xf5, 0x34, 0xad, 0x83, 0x58, 0xc1, + 0x25, 0xf7, 0x75, 0x42, 0x4b, 0xd4, 0x54, 0x13, 0x5a, 0xba, 0xc1, 0x4f, 0x0e, 0x2a, 0x7c, 0x88, + 0x0c, 0x1f, 0x66, 0xf2, 0x5e, 0x0c, 0x7b, 0x75, 0xf8, 0xaa, 0x81, 0xaf, 0xb8, 0xf3, 0xf0, 0xe4, + 0x53, 0xf8, 0x5d, 0xfb, 0x21, 0xb5, 0x37, 0x07, 0xa5, 0x87, 0x93, 0xc5, 0xb8, 0xf6, 0x67, 0xe1, + 0x07, 0x9b, 0xe9, 0xcd, 0x35, 0x9d, 0xf3, 0x8b, 0x91, 0x8c, 0x80, 0x90, 0x49, 0x41, 0x40, 0x09, + 0x2a, 0xb8, 0x34, 0x17, 0x49, 0x6d, 0x8b, 0xc5, 0x41, 0xf2, 0xa5, 0x73, 0xde, 0x5b, 0xd2, 0x7b, + 0xc8, 0xad, 0x37, 0xac, 0xc1, 0x51, 0xda, 0xb4, 0x09, 0xb4, 0x26, 0x7c, 0x6d, 0xaf, 0x96, 0x6e, + 0x3e, 0x8f, 0xa7, 0x3a, 0x66, 0xaa, 0x33, 0x3b, 0xd5, 0x69, 0x5b, 0xfd, 0xde, 0x0f, 0x83, 0xb5, + 0xfd, 0x1e, 0x00, 0x00, 0xff, 0xff, 0xc2, 0x0a, 0xfa, 0x93, 0xf6, 0x03, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/devtools/clouderrorreporting/v1beta1/error_stats_service.pb.go b/vendor/google.golang.org/genproto/googleapis/devtools/clouderrorreporting/v1beta1/error_stats_service.pb.go new file mode 100644 index 0000000..65e44f7 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/devtools/clouderrorreporting/v1beta1/error_stats_service.pb.go @@ -0,0 +1,1142 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/devtools/clouderrorreporting/v1beta1/error_stats_service.proto + +package clouderrorreporting // import "google.golang.org/genproto/googleapis/devtools/clouderrorreporting/v1beta1" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import duration "github.com/golang/protobuf/ptypes/duration" +import timestamp "github.com/golang/protobuf/ptypes/timestamp" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +import ( + context "golang.org/x/net/context" + grpc "google.golang.org/grpc" +) + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Specifies how the time periods of error group counts are aligned. +type TimedCountAlignment int32 + +const ( + // No alignment specified. + TimedCountAlignment_ERROR_COUNT_ALIGNMENT_UNSPECIFIED TimedCountAlignment = 0 + // The time periods shall be consecutive, have width equal to the + // requested duration, and be aligned at the `alignment_time` provided in + // the request. + // The `alignment_time` does not have to be inside the query period but + // even if it is outside, only time periods are returned which overlap + // with the query period. + // A rounded alignment will typically result in a + // different size of the first or the last time period. + TimedCountAlignment_ALIGNMENT_EQUAL_ROUNDED TimedCountAlignment = 1 + // The time periods shall be consecutive, have width equal to the + // requested duration, and be aligned at the end of the requested time + // period. This can result in a different size of the + // first time period. + TimedCountAlignment_ALIGNMENT_EQUAL_AT_END TimedCountAlignment = 2 +) + +var TimedCountAlignment_name = map[int32]string{ + 0: "ERROR_COUNT_ALIGNMENT_UNSPECIFIED", + 1: "ALIGNMENT_EQUAL_ROUNDED", + 2: "ALIGNMENT_EQUAL_AT_END", +} +var TimedCountAlignment_value = map[string]int32{ + "ERROR_COUNT_ALIGNMENT_UNSPECIFIED": 0, + "ALIGNMENT_EQUAL_ROUNDED": 1, + "ALIGNMENT_EQUAL_AT_END": 2, +} + +func (x TimedCountAlignment) String() string { + return proto.EnumName(TimedCountAlignment_name, int32(x)) +} +func (TimedCountAlignment) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_error_stats_service_906700da599bba31, []int{0} +} + +// A sorting order of error groups. +type ErrorGroupOrder int32 + +const ( + // No group order specified. + ErrorGroupOrder_GROUP_ORDER_UNSPECIFIED ErrorGroupOrder = 0 + // Total count of errors in the given time window in descending order. + ErrorGroupOrder_COUNT_DESC ErrorGroupOrder = 1 + // Timestamp when the group was last seen in the given time window + // in descending order. + ErrorGroupOrder_LAST_SEEN_DESC ErrorGroupOrder = 2 + // Timestamp when the group was created in descending order. + ErrorGroupOrder_CREATED_DESC ErrorGroupOrder = 3 + // Number of affected users in the given time window in descending order. + ErrorGroupOrder_AFFECTED_USERS_DESC ErrorGroupOrder = 4 +) + +var ErrorGroupOrder_name = map[int32]string{ + 0: "GROUP_ORDER_UNSPECIFIED", + 1: "COUNT_DESC", + 2: "LAST_SEEN_DESC", + 3: "CREATED_DESC", + 4: "AFFECTED_USERS_DESC", +} +var ErrorGroupOrder_value = map[string]int32{ + "GROUP_ORDER_UNSPECIFIED": 0, + "COUNT_DESC": 1, + "LAST_SEEN_DESC": 2, + "CREATED_DESC": 3, + "AFFECTED_USERS_DESC": 4, +} + +func (x ErrorGroupOrder) String() string { + return proto.EnumName(ErrorGroupOrder_name, int32(x)) +} +func (ErrorGroupOrder) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_error_stats_service_906700da599bba31, []int{1} +} + +// The supported time ranges. +type QueryTimeRange_Period int32 + +const ( + // Do not use. + QueryTimeRange_PERIOD_UNSPECIFIED QueryTimeRange_Period = 0 + // Retrieve data for the last hour. + // Recommended minimum timed count duration: 1 min. + QueryTimeRange_PERIOD_1_HOUR QueryTimeRange_Period = 1 + // Retrieve data for the last 6 hours. + // Recommended minimum timed count duration: 10 min. + QueryTimeRange_PERIOD_6_HOURS QueryTimeRange_Period = 2 + // Retrieve data for the last day. + // Recommended minimum timed count duration: 1 hour. + QueryTimeRange_PERIOD_1_DAY QueryTimeRange_Period = 3 + // Retrieve data for the last week. + // Recommended minimum timed count duration: 6 hours. + QueryTimeRange_PERIOD_1_WEEK QueryTimeRange_Period = 4 + // Retrieve data for the last 30 days. + // Recommended minimum timed count duration: 1 day. + QueryTimeRange_PERIOD_30_DAYS QueryTimeRange_Period = 5 +) + +var QueryTimeRange_Period_name = map[int32]string{ + 0: "PERIOD_UNSPECIFIED", + 1: "PERIOD_1_HOUR", + 2: "PERIOD_6_HOURS", + 3: "PERIOD_1_DAY", + 4: "PERIOD_1_WEEK", + 5: "PERIOD_30_DAYS", +} +var QueryTimeRange_Period_value = map[string]int32{ + "PERIOD_UNSPECIFIED": 0, + "PERIOD_1_HOUR": 1, + "PERIOD_6_HOURS": 2, + "PERIOD_1_DAY": 3, + "PERIOD_1_WEEK": 4, + "PERIOD_30_DAYS": 5, +} + +func (x QueryTimeRange_Period) String() string { + return proto.EnumName(QueryTimeRange_Period_name, int32(x)) +} +func (QueryTimeRange_Period) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_error_stats_service_906700da599bba31, []int{6, 0} +} + +// Specifies a set of `ErrorGroupStats` to return. +type ListGroupStatsRequest struct { + // [Required] The resource name of the Google Cloud Platform project. Written + // as projects/ plus the + // Google Cloud + // Platform project ID. + // + // Example: projects/my-project-123. + ProjectName string `protobuf:"bytes,1,opt,name=project_name,json=projectName,proto3" json:"project_name,omitempty"` + // [Optional] List all ErrorGroupStats with these IDs. + GroupId []string `protobuf:"bytes,2,rep,name=group_id,json=groupId,proto3" json:"group_id,omitempty"` + // [Optional] List only ErrorGroupStats which belong to a service + // context that matches the filter. + // Data for all service contexts is returned if this field is not specified. + ServiceFilter *ServiceContextFilter `protobuf:"bytes,3,opt,name=service_filter,json=serviceFilter,proto3" json:"service_filter,omitempty"` + // [Optional] List data for the given time range. + // If not set a default time range is used. The field time_range_begin + // in the response will specify the beginning of this time range. + // Only ErrorGroupStats with a non-zero count in the given time + // range are returned, unless the request contains an explicit group_id list. + // If a group_id list is given, also ErrorGroupStats with zero + // occurrences are returned. + TimeRange *QueryTimeRange `protobuf:"bytes,5,opt,name=time_range,json=timeRange,proto3" json:"time_range,omitempty"` + // [Optional] The preferred duration for a single returned `TimedCount`. + // If not set, no timed counts are returned. + TimedCountDuration *duration.Duration `protobuf:"bytes,6,opt,name=timed_count_duration,json=timedCountDuration,proto3" json:"timed_count_duration,omitempty"` + // [Optional] The alignment of the timed counts to be returned. + // Default is `ALIGNMENT_EQUAL_AT_END`. + Alignment TimedCountAlignment `protobuf:"varint,7,opt,name=alignment,proto3,enum=google.devtools.clouderrorreporting.v1beta1.TimedCountAlignment" json:"alignment,omitempty"` + // [Optional] Time where the timed counts shall be aligned if rounded + // alignment is chosen. Default is 00:00 UTC. + AlignmentTime *timestamp.Timestamp `protobuf:"bytes,8,opt,name=alignment_time,json=alignmentTime,proto3" json:"alignment_time,omitempty"` + // [Optional] The sort order in which the results are returned. + // Default is `COUNT_DESC`. + Order ErrorGroupOrder `protobuf:"varint,9,opt,name=order,proto3,enum=google.devtools.clouderrorreporting.v1beta1.ErrorGroupOrder" json:"order,omitempty"` + // [Optional] The maximum number of results to return per response. + // Default is 20. + PageSize int32 `protobuf:"varint,11,opt,name=page_size,json=pageSize,proto3" json:"page_size,omitempty"` + // [Optional] A `next_page_token` provided by a previous response. To view + // additional results, pass this token along with the identical query + // parameters as the first request. + PageToken string `protobuf:"bytes,12,opt,name=page_token,json=pageToken,proto3" json:"page_token,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ListGroupStatsRequest) Reset() { *m = ListGroupStatsRequest{} } +func (m *ListGroupStatsRequest) String() string { return proto.CompactTextString(m) } +func (*ListGroupStatsRequest) ProtoMessage() {} +func (*ListGroupStatsRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_error_stats_service_906700da599bba31, []int{0} +} +func (m *ListGroupStatsRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ListGroupStatsRequest.Unmarshal(m, b) +} +func (m *ListGroupStatsRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ListGroupStatsRequest.Marshal(b, m, deterministic) +} +func (dst *ListGroupStatsRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_ListGroupStatsRequest.Merge(dst, src) +} +func (m *ListGroupStatsRequest) XXX_Size() int { + return xxx_messageInfo_ListGroupStatsRequest.Size(m) +} +func (m *ListGroupStatsRequest) XXX_DiscardUnknown() { + xxx_messageInfo_ListGroupStatsRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_ListGroupStatsRequest proto.InternalMessageInfo + +func (m *ListGroupStatsRequest) GetProjectName() string { + if m != nil { + return m.ProjectName + } + return "" +} + +func (m *ListGroupStatsRequest) GetGroupId() []string { + if m != nil { + return m.GroupId + } + return nil +} + +func (m *ListGroupStatsRequest) GetServiceFilter() *ServiceContextFilter { + if m != nil { + return m.ServiceFilter + } + return nil +} + +func (m *ListGroupStatsRequest) GetTimeRange() *QueryTimeRange { + if m != nil { + return m.TimeRange + } + return nil +} + +func (m *ListGroupStatsRequest) GetTimedCountDuration() *duration.Duration { + if m != nil { + return m.TimedCountDuration + } + return nil +} + +func (m *ListGroupStatsRequest) GetAlignment() TimedCountAlignment { + if m != nil { + return m.Alignment + } + return TimedCountAlignment_ERROR_COUNT_ALIGNMENT_UNSPECIFIED +} + +func (m *ListGroupStatsRequest) GetAlignmentTime() *timestamp.Timestamp { + if m != nil { + return m.AlignmentTime + } + return nil +} + +func (m *ListGroupStatsRequest) GetOrder() ErrorGroupOrder { + if m != nil { + return m.Order + } + return ErrorGroupOrder_GROUP_ORDER_UNSPECIFIED +} + +func (m *ListGroupStatsRequest) GetPageSize() int32 { + if m != nil { + return m.PageSize + } + return 0 +} + +func (m *ListGroupStatsRequest) GetPageToken() string { + if m != nil { + return m.PageToken + } + return "" +} + +// Contains a set of requested error group stats. +type ListGroupStatsResponse struct { + // The error group stats which match the given request. + ErrorGroupStats []*ErrorGroupStats `protobuf:"bytes,1,rep,name=error_group_stats,json=errorGroupStats,proto3" json:"error_group_stats,omitempty"` + // If non-empty, more results are available. + // Pass this token, along with the same query parameters as the first + // request, to view the next page of results. + NextPageToken string `protobuf:"bytes,2,opt,name=next_page_token,json=nextPageToken,proto3" json:"next_page_token,omitempty"` + // The timestamp specifies the start time to which the request was restricted. + // The start time is set based on the requested time range. It may be adjusted + // to a later time if a project has exceeded the storage quota and older data + // has been deleted. + TimeRangeBegin *timestamp.Timestamp `protobuf:"bytes,4,opt,name=time_range_begin,json=timeRangeBegin,proto3" json:"time_range_begin,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ListGroupStatsResponse) Reset() { *m = ListGroupStatsResponse{} } +func (m *ListGroupStatsResponse) String() string { return proto.CompactTextString(m) } +func (*ListGroupStatsResponse) ProtoMessage() {} +func (*ListGroupStatsResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_error_stats_service_906700da599bba31, []int{1} +} +func (m *ListGroupStatsResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ListGroupStatsResponse.Unmarshal(m, b) +} +func (m *ListGroupStatsResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ListGroupStatsResponse.Marshal(b, m, deterministic) +} +func (dst *ListGroupStatsResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_ListGroupStatsResponse.Merge(dst, src) +} +func (m *ListGroupStatsResponse) XXX_Size() int { + return xxx_messageInfo_ListGroupStatsResponse.Size(m) +} +func (m *ListGroupStatsResponse) XXX_DiscardUnknown() { + xxx_messageInfo_ListGroupStatsResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_ListGroupStatsResponse proto.InternalMessageInfo + +func (m *ListGroupStatsResponse) GetErrorGroupStats() []*ErrorGroupStats { + if m != nil { + return m.ErrorGroupStats + } + return nil +} + +func (m *ListGroupStatsResponse) GetNextPageToken() string { + if m != nil { + return m.NextPageToken + } + return "" +} + +func (m *ListGroupStatsResponse) GetTimeRangeBegin() *timestamp.Timestamp { + if m != nil { + return m.TimeRangeBegin + } + return nil +} + +// Data extracted for a specific group based on certain filter criteria, +// such as a given time period and/or service filter. +type ErrorGroupStats struct { + // Group data that is independent of the filter criteria. + Group *ErrorGroup `protobuf:"bytes,1,opt,name=group,proto3" json:"group,omitempty"` + // Approximate total number of events in the given group that match + // the filter criteria. + Count int64 `protobuf:"varint,2,opt,name=count,proto3" json:"count,omitempty"` + // Approximate number of affected users in the given group that + // match the filter criteria. + // Users are distinguished by data in the `ErrorContext` of the + // individual error events, such as their login name or their remote + // IP address in case of HTTP requests. + // The number of affected users can be zero even if the number of + // errors is non-zero if no data was provided from which the + // affected user could be deduced. + // Users are counted based on data in the request + // context that was provided in the error report. If more users are + // implicitly affected, such as due to a crash of the whole service, + // this is not reflected here. + AffectedUsersCount int64 `protobuf:"varint,3,opt,name=affected_users_count,json=affectedUsersCount,proto3" json:"affected_users_count,omitempty"` + // Approximate number of occurrences over time. + // Timed counts returned by ListGroups are guaranteed to be: + // + // - Inside the requested time interval + // - Non-overlapping, and + // - Ordered by ascending time. + TimedCounts []*TimedCount `protobuf:"bytes,4,rep,name=timed_counts,json=timedCounts,proto3" json:"timed_counts,omitempty"` + // Approximate first occurrence that was ever seen for this group + // and which matches the given filter criteria, ignoring the + // time_range that was specified in the request. + FirstSeenTime *timestamp.Timestamp `protobuf:"bytes,5,opt,name=first_seen_time,json=firstSeenTime,proto3" json:"first_seen_time,omitempty"` + // Approximate last occurrence that was ever seen for this group and + // which matches the given filter criteria, ignoring the time_range + // that was specified in the request. + LastSeenTime *timestamp.Timestamp `protobuf:"bytes,6,opt,name=last_seen_time,json=lastSeenTime,proto3" json:"last_seen_time,omitempty"` + // Service contexts with a non-zero error count for the given filter + // criteria. This list can be truncated if multiple services are affected. + // Refer to `num_affected_services` for the total count. + AffectedServices []*ServiceContext `protobuf:"bytes,7,rep,name=affected_services,json=affectedServices,proto3" json:"affected_services,omitempty"` + // The total number of services with a non-zero error count for the given + // filter criteria. + NumAffectedServices int32 `protobuf:"varint,8,opt,name=num_affected_services,json=numAffectedServices,proto3" json:"num_affected_services,omitempty"` + // An arbitrary event that is chosen as representative for the whole group. + // The representative event is intended to be used as a quick preview for + // the whole group. Events in the group are usually sufficiently similar + // to each other such that showing an arbitrary representative provides + // insight into the characteristics of the group as a whole. + Representative *ErrorEvent `protobuf:"bytes,9,opt,name=representative,proto3" json:"representative,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ErrorGroupStats) Reset() { *m = ErrorGroupStats{} } +func (m *ErrorGroupStats) String() string { return proto.CompactTextString(m) } +func (*ErrorGroupStats) ProtoMessage() {} +func (*ErrorGroupStats) Descriptor() ([]byte, []int) { + return fileDescriptor_error_stats_service_906700da599bba31, []int{2} +} +func (m *ErrorGroupStats) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ErrorGroupStats.Unmarshal(m, b) +} +func (m *ErrorGroupStats) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ErrorGroupStats.Marshal(b, m, deterministic) +} +func (dst *ErrorGroupStats) XXX_Merge(src proto.Message) { + xxx_messageInfo_ErrorGroupStats.Merge(dst, src) +} +func (m *ErrorGroupStats) XXX_Size() int { + return xxx_messageInfo_ErrorGroupStats.Size(m) +} +func (m *ErrorGroupStats) XXX_DiscardUnknown() { + xxx_messageInfo_ErrorGroupStats.DiscardUnknown(m) +} + +var xxx_messageInfo_ErrorGroupStats proto.InternalMessageInfo + +func (m *ErrorGroupStats) GetGroup() *ErrorGroup { + if m != nil { + return m.Group + } + return nil +} + +func (m *ErrorGroupStats) GetCount() int64 { + if m != nil { + return m.Count + } + return 0 +} + +func (m *ErrorGroupStats) GetAffectedUsersCount() int64 { + if m != nil { + return m.AffectedUsersCount + } + return 0 +} + +func (m *ErrorGroupStats) GetTimedCounts() []*TimedCount { + if m != nil { + return m.TimedCounts + } + return nil +} + +func (m *ErrorGroupStats) GetFirstSeenTime() *timestamp.Timestamp { + if m != nil { + return m.FirstSeenTime + } + return nil +} + +func (m *ErrorGroupStats) GetLastSeenTime() *timestamp.Timestamp { + if m != nil { + return m.LastSeenTime + } + return nil +} + +func (m *ErrorGroupStats) GetAffectedServices() []*ServiceContext { + if m != nil { + return m.AffectedServices + } + return nil +} + +func (m *ErrorGroupStats) GetNumAffectedServices() int32 { + if m != nil { + return m.NumAffectedServices + } + return 0 +} + +func (m *ErrorGroupStats) GetRepresentative() *ErrorEvent { + if m != nil { + return m.Representative + } + return nil +} + +// The number of errors in a given time period. +// All numbers are approximate since the error events are sampled +// before counting them. +type TimedCount struct { + // Approximate number of occurrences in the given time period. + Count int64 `protobuf:"varint,1,opt,name=count,proto3" json:"count,omitempty"` + // Start of the time period to which `count` refers (included). + StartTime *timestamp.Timestamp `protobuf:"bytes,2,opt,name=start_time,json=startTime,proto3" json:"start_time,omitempty"` + // End of the time period to which `count` refers (excluded). + EndTime *timestamp.Timestamp `protobuf:"bytes,3,opt,name=end_time,json=endTime,proto3" json:"end_time,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *TimedCount) Reset() { *m = TimedCount{} } +func (m *TimedCount) String() string { return proto.CompactTextString(m) } +func (*TimedCount) ProtoMessage() {} +func (*TimedCount) Descriptor() ([]byte, []int) { + return fileDescriptor_error_stats_service_906700da599bba31, []int{3} +} +func (m *TimedCount) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_TimedCount.Unmarshal(m, b) +} +func (m *TimedCount) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_TimedCount.Marshal(b, m, deterministic) +} +func (dst *TimedCount) XXX_Merge(src proto.Message) { + xxx_messageInfo_TimedCount.Merge(dst, src) +} +func (m *TimedCount) XXX_Size() int { + return xxx_messageInfo_TimedCount.Size(m) +} +func (m *TimedCount) XXX_DiscardUnknown() { + xxx_messageInfo_TimedCount.DiscardUnknown(m) +} + +var xxx_messageInfo_TimedCount proto.InternalMessageInfo + +func (m *TimedCount) GetCount() int64 { + if m != nil { + return m.Count + } + return 0 +} + +func (m *TimedCount) GetStartTime() *timestamp.Timestamp { + if m != nil { + return m.StartTime + } + return nil +} + +func (m *TimedCount) GetEndTime() *timestamp.Timestamp { + if m != nil { + return m.EndTime + } + return nil +} + +// Specifies a set of error events to return. +type ListEventsRequest struct { + // [Required] The resource name of the Google Cloud Platform project. Written + // as `projects/` plus the + // [Google Cloud Platform project + // ID](https://support.google.com/cloud/answer/6158840). + // Example: `projects/my-project-123`. + ProjectName string `protobuf:"bytes,1,opt,name=project_name,json=projectName,proto3" json:"project_name,omitempty"` + // [Required] The group for which events shall be returned. + GroupId string `protobuf:"bytes,2,opt,name=group_id,json=groupId,proto3" json:"group_id,omitempty"` + // [Optional] List only ErrorGroups which belong to a service context that + // matches the filter. + // Data for all service contexts is returned if this field is not specified. + ServiceFilter *ServiceContextFilter `protobuf:"bytes,3,opt,name=service_filter,json=serviceFilter,proto3" json:"service_filter,omitempty"` + // [Optional] List only data for the given time range. + // If not set a default time range is used. The field time_range_begin + // in the response will specify the beginning of this time range. + TimeRange *QueryTimeRange `protobuf:"bytes,4,opt,name=time_range,json=timeRange,proto3" json:"time_range,omitempty"` + // [Optional] The maximum number of results to return per response. + PageSize int32 `protobuf:"varint,6,opt,name=page_size,json=pageSize,proto3" json:"page_size,omitempty"` + // [Optional] A `next_page_token` provided by a previous response. + PageToken string `protobuf:"bytes,7,opt,name=page_token,json=pageToken,proto3" json:"page_token,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ListEventsRequest) Reset() { *m = ListEventsRequest{} } +func (m *ListEventsRequest) String() string { return proto.CompactTextString(m) } +func (*ListEventsRequest) ProtoMessage() {} +func (*ListEventsRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_error_stats_service_906700da599bba31, []int{4} +} +func (m *ListEventsRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ListEventsRequest.Unmarshal(m, b) +} +func (m *ListEventsRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ListEventsRequest.Marshal(b, m, deterministic) +} +func (dst *ListEventsRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_ListEventsRequest.Merge(dst, src) +} +func (m *ListEventsRequest) XXX_Size() int { + return xxx_messageInfo_ListEventsRequest.Size(m) +} +func (m *ListEventsRequest) XXX_DiscardUnknown() { + xxx_messageInfo_ListEventsRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_ListEventsRequest proto.InternalMessageInfo + +func (m *ListEventsRequest) GetProjectName() string { + if m != nil { + return m.ProjectName + } + return "" +} + +func (m *ListEventsRequest) GetGroupId() string { + if m != nil { + return m.GroupId + } + return "" +} + +func (m *ListEventsRequest) GetServiceFilter() *ServiceContextFilter { + if m != nil { + return m.ServiceFilter + } + return nil +} + +func (m *ListEventsRequest) GetTimeRange() *QueryTimeRange { + if m != nil { + return m.TimeRange + } + return nil +} + +func (m *ListEventsRequest) GetPageSize() int32 { + if m != nil { + return m.PageSize + } + return 0 +} + +func (m *ListEventsRequest) GetPageToken() string { + if m != nil { + return m.PageToken + } + return "" +} + +// Contains a set of requested error events. +type ListEventsResponse struct { + // The error events which match the given request. + ErrorEvents []*ErrorEvent `protobuf:"bytes,1,rep,name=error_events,json=errorEvents,proto3" json:"error_events,omitempty"` + // If non-empty, more results are available. + // Pass this token, along with the same query parameters as the first + // request, to view the next page of results. + NextPageToken string `protobuf:"bytes,2,opt,name=next_page_token,json=nextPageToken,proto3" json:"next_page_token,omitempty"` + // The timestamp specifies the start time to which the request was restricted. + TimeRangeBegin *timestamp.Timestamp `protobuf:"bytes,4,opt,name=time_range_begin,json=timeRangeBegin,proto3" json:"time_range_begin,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ListEventsResponse) Reset() { *m = ListEventsResponse{} } +func (m *ListEventsResponse) String() string { return proto.CompactTextString(m) } +func (*ListEventsResponse) ProtoMessage() {} +func (*ListEventsResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_error_stats_service_906700da599bba31, []int{5} +} +func (m *ListEventsResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ListEventsResponse.Unmarshal(m, b) +} +func (m *ListEventsResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ListEventsResponse.Marshal(b, m, deterministic) +} +func (dst *ListEventsResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_ListEventsResponse.Merge(dst, src) +} +func (m *ListEventsResponse) XXX_Size() int { + return xxx_messageInfo_ListEventsResponse.Size(m) +} +func (m *ListEventsResponse) XXX_DiscardUnknown() { + xxx_messageInfo_ListEventsResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_ListEventsResponse proto.InternalMessageInfo + +func (m *ListEventsResponse) GetErrorEvents() []*ErrorEvent { + if m != nil { + return m.ErrorEvents + } + return nil +} + +func (m *ListEventsResponse) GetNextPageToken() string { + if m != nil { + return m.NextPageToken + } + return "" +} + +func (m *ListEventsResponse) GetTimeRangeBegin() *timestamp.Timestamp { + if m != nil { + return m.TimeRangeBegin + } + return nil +} + +// Requests might be rejected or the resulting timed count durations might be +// adjusted for lower durations. +type QueryTimeRange struct { + // Restricts the query to the specified time range. + Period QueryTimeRange_Period `protobuf:"varint,1,opt,name=period,proto3,enum=google.devtools.clouderrorreporting.v1beta1.QueryTimeRange_Period" json:"period,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *QueryTimeRange) Reset() { *m = QueryTimeRange{} } +func (m *QueryTimeRange) String() string { return proto.CompactTextString(m) } +func (*QueryTimeRange) ProtoMessage() {} +func (*QueryTimeRange) Descriptor() ([]byte, []int) { + return fileDescriptor_error_stats_service_906700da599bba31, []int{6} +} +func (m *QueryTimeRange) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_QueryTimeRange.Unmarshal(m, b) +} +func (m *QueryTimeRange) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_QueryTimeRange.Marshal(b, m, deterministic) +} +func (dst *QueryTimeRange) XXX_Merge(src proto.Message) { + xxx_messageInfo_QueryTimeRange.Merge(dst, src) +} +func (m *QueryTimeRange) XXX_Size() int { + return xxx_messageInfo_QueryTimeRange.Size(m) +} +func (m *QueryTimeRange) XXX_DiscardUnknown() { + xxx_messageInfo_QueryTimeRange.DiscardUnknown(m) +} + +var xxx_messageInfo_QueryTimeRange proto.InternalMessageInfo + +func (m *QueryTimeRange) GetPeriod() QueryTimeRange_Period { + if m != nil { + return m.Period + } + return QueryTimeRange_PERIOD_UNSPECIFIED +} + +// Specifies criteria for filtering a subset of service contexts. +// The fields in the filter correspond to the fields in `ServiceContext`. +// Only exact, case-sensitive matches are supported. +// If a field is unset or empty, it matches arbitrary values. +type ServiceContextFilter struct { + // [Optional] The exact value to match against + // [`ServiceContext.service`](/error-reporting/reference/rest/v1beta1/ServiceContext#FIELDS.service). + Service string `protobuf:"bytes,2,opt,name=service,proto3" json:"service,omitempty"` + // [Optional] The exact value to match against + // [`ServiceContext.version`](/error-reporting/reference/rest/v1beta1/ServiceContext#FIELDS.version). + Version string `protobuf:"bytes,3,opt,name=version,proto3" json:"version,omitempty"` + // [Optional] The exact value to match against + // [`ServiceContext.resource_type`](/error-reporting/reference/rest/v1beta1/ServiceContext#FIELDS.resource_type). + ResourceType string `protobuf:"bytes,4,opt,name=resource_type,json=resourceType,proto3" json:"resource_type,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ServiceContextFilter) Reset() { *m = ServiceContextFilter{} } +func (m *ServiceContextFilter) String() string { return proto.CompactTextString(m) } +func (*ServiceContextFilter) ProtoMessage() {} +func (*ServiceContextFilter) Descriptor() ([]byte, []int) { + return fileDescriptor_error_stats_service_906700da599bba31, []int{7} +} +func (m *ServiceContextFilter) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ServiceContextFilter.Unmarshal(m, b) +} +func (m *ServiceContextFilter) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ServiceContextFilter.Marshal(b, m, deterministic) +} +func (dst *ServiceContextFilter) XXX_Merge(src proto.Message) { + xxx_messageInfo_ServiceContextFilter.Merge(dst, src) +} +func (m *ServiceContextFilter) XXX_Size() int { + return xxx_messageInfo_ServiceContextFilter.Size(m) +} +func (m *ServiceContextFilter) XXX_DiscardUnknown() { + xxx_messageInfo_ServiceContextFilter.DiscardUnknown(m) +} + +var xxx_messageInfo_ServiceContextFilter proto.InternalMessageInfo + +func (m *ServiceContextFilter) GetService() string { + if m != nil { + return m.Service + } + return "" +} + +func (m *ServiceContextFilter) GetVersion() string { + if m != nil { + return m.Version + } + return "" +} + +func (m *ServiceContextFilter) GetResourceType() string { + if m != nil { + return m.ResourceType + } + return "" +} + +// Deletes all events in the project. +type DeleteEventsRequest struct { + // [Required] The resource name of the Google Cloud Platform project. Written + // as `projects/` plus the + // [Google Cloud Platform project + // ID](https://support.google.com/cloud/answer/6158840). + // Example: `projects/my-project-123`. + ProjectName string `protobuf:"bytes,1,opt,name=project_name,json=projectName,proto3" json:"project_name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *DeleteEventsRequest) Reset() { *m = DeleteEventsRequest{} } +func (m *DeleteEventsRequest) String() string { return proto.CompactTextString(m) } +func (*DeleteEventsRequest) ProtoMessage() {} +func (*DeleteEventsRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_error_stats_service_906700da599bba31, []int{8} +} +func (m *DeleteEventsRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_DeleteEventsRequest.Unmarshal(m, b) +} +func (m *DeleteEventsRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_DeleteEventsRequest.Marshal(b, m, deterministic) +} +func (dst *DeleteEventsRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_DeleteEventsRequest.Merge(dst, src) +} +func (m *DeleteEventsRequest) XXX_Size() int { + return xxx_messageInfo_DeleteEventsRequest.Size(m) +} +func (m *DeleteEventsRequest) XXX_DiscardUnknown() { + xxx_messageInfo_DeleteEventsRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_DeleteEventsRequest proto.InternalMessageInfo + +func (m *DeleteEventsRequest) GetProjectName() string { + if m != nil { + return m.ProjectName + } + return "" +} + +// Response message for deleting error events. +type DeleteEventsResponse struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *DeleteEventsResponse) Reset() { *m = DeleteEventsResponse{} } +func (m *DeleteEventsResponse) String() string { return proto.CompactTextString(m) } +func (*DeleteEventsResponse) ProtoMessage() {} +func (*DeleteEventsResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_error_stats_service_906700da599bba31, []int{9} +} +func (m *DeleteEventsResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_DeleteEventsResponse.Unmarshal(m, b) +} +func (m *DeleteEventsResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_DeleteEventsResponse.Marshal(b, m, deterministic) +} +func (dst *DeleteEventsResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_DeleteEventsResponse.Merge(dst, src) +} +func (m *DeleteEventsResponse) XXX_Size() int { + return xxx_messageInfo_DeleteEventsResponse.Size(m) +} +func (m *DeleteEventsResponse) XXX_DiscardUnknown() { + xxx_messageInfo_DeleteEventsResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_DeleteEventsResponse proto.InternalMessageInfo + +func init() { + proto.RegisterType((*ListGroupStatsRequest)(nil), "google.devtools.clouderrorreporting.v1beta1.ListGroupStatsRequest") + proto.RegisterType((*ListGroupStatsResponse)(nil), "google.devtools.clouderrorreporting.v1beta1.ListGroupStatsResponse") + proto.RegisterType((*ErrorGroupStats)(nil), "google.devtools.clouderrorreporting.v1beta1.ErrorGroupStats") + proto.RegisterType((*TimedCount)(nil), "google.devtools.clouderrorreporting.v1beta1.TimedCount") + proto.RegisterType((*ListEventsRequest)(nil), "google.devtools.clouderrorreporting.v1beta1.ListEventsRequest") + proto.RegisterType((*ListEventsResponse)(nil), "google.devtools.clouderrorreporting.v1beta1.ListEventsResponse") + proto.RegisterType((*QueryTimeRange)(nil), "google.devtools.clouderrorreporting.v1beta1.QueryTimeRange") + proto.RegisterType((*ServiceContextFilter)(nil), "google.devtools.clouderrorreporting.v1beta1.ServiceContextFilter") + proto.RegisterType((*DeleteEventsRequest)(nil), "google.devtools.clouderrorreporting.v1beta1.DeleteEventsRequest") + proto.RegisterType((*DeleteEventsResponse)(nil), "google.devtools.clouderrorreporting.v1beta1.DeleteEventsResponse") + proto.RegisterEnum("google.devtools.clouderrorreporting.v1beta1.TimedCountAlignment", TimedCountAlignment_name, TimedCountAlignment_value) + proto.RegisterEnum("google.devtools.clouderrorreporting.v1beta1.ErrorGroupOrder", ErrorGroupOrder_name, ErrorGroupOrder_value) + proto.RegisterEnum("google.devtools.clouderrorreporting.v1beta1.QueryTimeRange_Period", QueryTimeRange_Period_name, QueryTimeRange_Period_value) +} + +// Reference imports to suppress errors if they are not otherwise used. +var _ context.Context +var _ grpc.ClientConn + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the grpc package it is being compiled against. +const _ = grpc.SupportPackageIsVersion4 + +// ErrorStatsServiceClient is the client API for ErrorStatsService service. +// +// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://godoc.org/google.golang.org/grpc#ClientConn.NewStream. +type ErrorStatsServiceClient interface { + // Lists the specified groups. + ListGroupStats(ctx context.Context, in *ListGroupStatsRequest, opts ...grpc.CallOption) (*ListGroupStatsResponse, error) + // Lists the specified events. + ListEvents(ctx context.Context, in *ListEventsRequest, opts ...grpc.CallOption) (*ListEventsResponse, error) + // Deletes all error events of a given project. + DeleteEvents(ctx context.Context, in *DeleteEventsRequest, opts ...grpc.CallOption) (*DeleteEventsResponse, error) +} + +type errorStatsServiceClient struct { + cc *grpc.ClientConn +} + +func NewErrorStatsServiceClient(cc *grpc.ClientConn) ErrorStatsServiceClient { + return &errorStatsServiceClient{cc} +} + +func (c *errorStatsServiceClient) ListGroupStats(ctx context.Context, in *ListGroupStatsRequest, opts ...grpc.CallOption) (*ListGroupStatsResponse, error) { + out := new(ListGroupStatsResponse) + err := c.cc.Invoke(ctx, "/google.devtools.clouderrorreporting.v1beta1.ErrorStatsService/ListGroupStats", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *errorStatsServiceClient) ListEvents(ctx context.Context, in *ListEventsRequest, opts ...grpc.CallOption) (*ListEventsResponse, error) { + out := new(ListEventsResponse) + err := c.cc.Invoke(ctx, "/google.devtools.clouderrorreporting.v1beta1.ErrorStatsService/ListEvents", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *errorStatsServiceClient) DeleteEvents(ctx context.Context, in *DeleteEventsRequest, opts ...grpc.CallOption) (*DeleteEventsResponse, error) { + out := new(DeleteEventsResponse) + err := c.cc.Invoke(ctx, "/google.devtools.clouderrorreporting.v1beta1.ErrorStatsService/DeleteEvents", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +// ErrorStatsServiceServer is the server API for ErrorStatsService service. +type ErrorStatsServiceServer interface { + // Lists the specified groups. + ListGroupStats(context.Context, *ListGroupStatsRequest) (*ListGroupStatsResponse, error) + // Lists the specified events. + ListEvents(context.Context, *ListEventsRequest) (*ListEventsResponse, error) + // Deletes all error events of a given project. + DeleteEvents(context.Context, *DeleteEventsRequest) (*DeleteEventsResponse, error) +} + +func RegisterErrorStatsServiceServer(s *grpc.Server, srv ErrorStatsServiceServer) { + s.RegisterService(&_ErrorStatsService_serviceDesc, srv) +} + +func _ErrorStatsService_ListGroupStats_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(ListGroupStatsRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(ErrorStatsServiceServer).ListGroupStats(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.devtools.clouderrorreporting.v1beta1.ErrorStatsService/ListGroupStats", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(ErrorStatsServiceServer).ListGroupStats(ctx, req.(*ListGroupStatsRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _ErrorStatsService_ListEvents_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(ListEventsRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(ErrorStatsServiceServer).ListEvents(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.devtools.clouderrorreporting.v1beta1.ErrorStatsService/ListEvents", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(ErrorStatsServiceServer).ListEvents(ctx, req.(*ListEventsRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _ErrorStatsService_DeleteEvents_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(DeleteEventsRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(ErrorStatsServiceServer).DeleteEvents(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.devtools.clouderrorreporting.v1beta1.ErrorStatsService/DeleteEvents", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(ErrorStatsServiceServer).DeleteEvents(ctx, req.(*DeleteEventsRequest)) + } + return interceptor(ctx, in, info, handler) +} + +var _ErrorStatsService_serviceDesc = grpc.ServiceDesc{ + ServiceName: "google.devtools.clouderrorreporting.v1beta1.ErrorStatsService", + HandlerType: (*ErrorStatsServiceServer)(nil), + Methods: []grpc.MethodDesc{ + { + MethodName: "ListGroupStats", + Handler: _ErrorStatsService_ListGroupStats_Handler, + }, + { + MethodName: "ListEvents", + Handler: _ErrorStatsService_ListEvents_Handler, + }, + { + MethodName: "DeleteEvents", + Handler: _ErrorStatsService_DeleteEvents_Handler, + }, + }, + Streams: []grpc.StreamDesc{}, + Metadata: "google/devtools/clouderrorreporting/v1beta1/error_stats_service.proto", +} + +func init() { + proto.RegisterFile("google/devtools/clouderrorreporting/v1beta1/error_stats_service.proto", fileDescriptor_error_stats_service_906700da599bba31) +} + +var fileDescriptor_error_stats_service_906700da599bba31 = []byte{ + // 1328 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xcc, 0x57, 0xcd, 0x6f, 0x1b, 0x45, + 0x14, 0x67, 0xed, 0x38, 0x89, 0x9f, 0x1d, 0xc7, 0x99, 0xa4, 0xe9, 0xd6, 0xe5, 0x23, 0x75, 0x05, + 0x0a, 0xa9, 0xb0, 0x9b, 0x54, 0xa5, 0x45, 0xe5, 0xa3, 0x8e, 0xbd, 0x09, 0x51, 0x53, 0xdb, 0x1d, + 0xdb, 0x45, 0x44, 0x55, 0x57, 0x1b, 0xfb, 0xc5, 0x5d, 0xb0, 0x77, 0x97, 0xdd, 0x71, 0xd4, 0x16, + 0x55, 0x42, 0xdc, 0x38, 0xc3, 0x8d, 0xff, 0x80, 0xbf, 0x82, 0x13, 0x07, 0x4e, 0x48, 0xbd, 0x73, + 0xe2, 0x0e, 0xe2, 0xc2, 0x15, 0xcd, 0xc7, 0xfa, 0xab, 0x11, 0xa9, 0x1d, 0x84, 0xb8, 0xed, 0xbc, + 0x37, 0xef, 0xf7, 0x3e, 0xe6, 0xf7, 0xde, 0xcc, 0x82, 0xd1, 0x76, 0xdd, 0x76, 0x07, 0xf3, 0x2d, + 0x3c, 0x66, 0xae, 0xdb, 0x09, 0xf2, 0xcd, 0x8e, 0xdb, 0x6b, 0xa1, 0xef, 0xbb, 0xbe, 0x8f, 0x9e, + 0xeb, 0x33, 0xdb, 0x69, 0xe7, 0x8f, 0x37, 0x0f, 0x91, 0x59, 0x9b, 0x79, 0x21, 0x36, 0x03, 0x66, + 0xb1, 0xc0, 0x0c, 0xd0, 0x3f, 0xb6, 0x9b, 0x98, 0xf3, 0x7c, 0x97, 0xb9, 0xe4, 0x8a, 0x84, 0xc9, + 0x85, 0x30, 0xb9, 0x13, 0x60, 0x72, 0x0a, 0x26, 0xf3, 0xaa, 0xf2, 0x69, 0x79, 0x76, 0xde, 0x72, + 0x1c, 0x97, 0x59, 0xcc, 0x76, 0x9d, 0x40, 0x42, 0x65, 0x6e, 0x4e, 0x12, 0x51, 0xd3, 0xed, 0x76, + 0x5d, 0x47, 0x59, 0xbe, 0xae, 0x2c, 0xc5, 0xea, 0xb0, 0x77, 0x94, 0x6f, 0xf5, 0x7c, 0x01, 0xad, + 0xf4, 0x6f, 0x8c, 0xeb, 0x99, 0xdd, 0xc5, 0x80, 0x59, 0x5d, 0x4f, 0x6e, 0xc8, 0x7e, 0x1f, 0x83, + 0x73, 0xfb, 0x76, 0xc0, 0x76, 0x7d, 0xb7, 0xe7, 0xd5, 0x78, 0x9a, 0x14, 0xbf, 0xe8, 0x61, 0xc0, + 0xc8, 0x25, 0x48, 0x7a, 0xbe, 0xfb, 0x19, 0x36, 0x99, 0xe9, 0x58, 0x5d, 0xd4, 0xb5, 0x35, 0x6d, + 0x3d, 0x4e, 0x13, 0x4a, 0x56, 0xb6, 0xba, 0x48, 0x2e, 0xc0, 0x7c, 0x9b, 0xdb, 0x99, 0x76, 0x4b, + 0x8f, 0xac, 0x45, 0xd7, 0xe3, 0x74, 0x4e, 0xac, 0xf7, 0x5a, 0xe4, 0x11, 0xa4, 0x54, 0xb9, 0xcc, + 0x23, 0xbb, 0xc3, 0xd0, 0xd7, 0xa3, 0x6b, 0xda, 0x7a, 0x62, 0xab, 0x90, 0x9b, 0xa0, 0x6c, 0xb9, + 0x9a, 0x84, 0x28, 0xba, 0x0e, 0xc3, 0xc7, 0x6c, 0x47, 0x00, 0xd1, 0x05, 0x05, 0x2c, 0x97, 0xe4, + 0x00, 0x80, 0x27, 0x65, 0xfa, 0x96, 0xd3, 0x46, 0x3d, 0x26, 0xbc, 0xdc, 0x9a, 0xc8, 0xcb, 0xbd, + 0x1e, 0xfa, 0x4f, 0xea, 0x76, 0x17, 0x29, 0x87, 0xa0, 0x71, 0x16, 0x7e, 0x92, 0x3b, 0xb0, 0xc2, + 0x17, 0x2d, 0xb3, 0xe9, 0xf6, 0x1c, 0x66, 0x86, 0xc5, 0xd5, 0x67, 0x85, 0x97, 0x0b, 0xa1, 0x97, + 0xb0, 0xba, 0xb9, 0x92, 0xda, 0x40, 0x89, 0x30, 0x2b, 0x72, 0xab, 0x50, 0x46, 0x1e, 0x42, 0xdc, + 0xea, 0xd8, 0x6d, 0xa7, 0x8b, 0x0e, 0xd3, 0xe7, 0xd6, 0xb4, 0xf5, 0xd4, 0xd6, 0xed, 0x89, 0xe2, + 0xac, 0xf7, 0x31, 0x0b, 0x21, 0x0e, 0x1d, 0x40, 0x92, 0x02, 0xa4, 0xfa, 0x0b, 0x93, 0xfb, 0xd7, + 0xe7, 0x45, 0x98, 0x99, 0x17, 0xc2, 0xac, 0x87, 0x24, 0xa0, 0x0b, 0x7d, 0x0b, 0x2e, 0x23, 0x14, + 0x62, 0xae, 0xdf, 0x42, 0x5f, 0x8f, 0x8b, 0xf0, 0xde, 0x9f, 0x28, 0x3c, 0x83, 0x8b, 0x05, 0x8f, + 0x2a, 0x1c, 0x83, 0x4a, 0x28, 0x72, 0x11, 0xe2, 0x9e, 0xd5, 0x46, 0x33, 0xb0, 0x9f, 0xa2, 0x9e, + 0x58, 0xd3, 0xd6, 0x63, 0x74, 0x9e, 0x0b, 0x6a, 0xf6, 0x53, 0x24, 0xaf, 0x01, 0x08, 0x25, 0x73, + 0x3f, 0x47, 0x47, 0x4f, 0x0a, 0x8a, 0x89, 0xed, 0x75, 0x2e, 0xc8, 0xfe, 0xa1, 0xc1, 0xea, 0x38, + 0x3b, 0x03, 0xcf, 0x75, 0x02, 0x24, 0x8f, 0x60, 0x49, 0xf6, 0xa6, 0x64, 0xa0, 0xe8, 0x50, 0x5d, + 0x5b, 0x8b, 0xae, 0x27, 0xa6, 0x0e, 0x5b, 0x3a, 0x58, 0xc4, 0x51, 0x01, 0x79, 0x0b, 0x16, 0x1d, + 0x7c, 0xcc, 0xcc, 0xa1, 0x40, 0x23, 0x22, 0xd0, 0x05, 0x2e, 0xae, 0x86, 0xc1, 0x92, 0x12, 0xa4, + 0x07, 0x44, 0x34, 0x0f, 0xb1, 0x6d, 0x3b, 0xfa, 0xcc, 0xa9, 0x27, 0x90, 0xea, 0xb3, 0x6d, 0x9b, + 0x5b, 0x64, 0xbf, 0x89, 0xc1, 0xe2, 0x58, 0x48, 0xe4, 0x2e, 0xc4, 0x44, 0x96, 0xa2, 0x07, 0x13, + 0x5b, 0x37, 0xa6, 0xcc, 0x8f, 0x4a, 0x14, 0xb2, 0x02, 0x31, 0xc1, 0x67, 0x91, 0x46, 0x94, 0xca, + 0x05, 0xb9, 0x0a, 0x2b, 0xd6, 0xd1, 0x11, 0x36, 0x19, 0xb6, 0xcc, 0x5e, 0x80, 0x7e, 0x20, 0x49, + 0x2f, 0xfa, 0x36, 0x4a, 0x49, 0xa8, 0x6b, 0x70, 0x95, 0x20, 0x21, 0x39, 0x80, 0xe4, 0x50, 0x77, + 0x04, 0xfa, 0x8c, 0xa8, 0xfe, 0x8d, 0x29, 0x39, 0x4d, 0x13, 0x83, 0x9e, 0x09, 0xc8, 0x36, 0x2c, + 0x1e, 0xd9, 0x7e, 0xc0, 0xcc, 0x00, 0xd1, 0x91, 0x6c, 0x8e, 0x9d, 0xce, 0x66, 0x61, 0x52, 0x43, + 0x74, 0x04, 0x9b, 0x6f, 0x43, 0xaa, 0x63, 0x8d, 0x40, 0xcc, 0x9e, 0x0a, 0x91, 0xe4, 0x16, 0x7d, + 0x84, 0x47, 0xb0, 0xd4, 0xaf, 0x89, 0x9a, 0x3a, 0x81, 0x3e, 0x27, 0xd2, 0xbc, 0x75, 0x86, 0x41, + 0x46, 0xd3, 0x21, 0xaa, 0x92, 0x07, 0x64, 0x0b, 0xce, 0x39, 0xbd, 0xae, 0xf9, 0xa2, 0xb7, 0x79, + 0xd1, 0x31, 0xcb, 0x4e, 0xaf, 0x5b, 0x18, 0xb7, 0x31, 0x21, 0xe5, 0xa3, 0xe7, 0x63, 0x80, 0x0e, + 0xbf, 0x4f, 0x8e, 0x51, 0xb4, 0xed, 0x54, 0xfc, 0x30, 0x8e, 0xf9, 0x30, 0x19, 0x83, 0xcb, 0x7e, + 0xa7, 0x01, 0x0c, 0x0e, 0x68, 0xc0, 0x1b, 0x6d, 0x98, 0x37, 0xef, 0x01, 0x04, 0xcc, 0xf2, 0xd5, + 0xc8, 0x89, 0x9c, 0x5a, 0xe1, 0xb8, 0xd8, 0x2d, 0xca, 0x7b, 0x1d, 0xe6, 0xd1, 0x69, 0x49, 0xc3, + 0xe8, 0xa9, 0x86, 0x73, 0xe8, 0xb4, 0xf8, 0x2a, 0xfb, 0x3c, 0x02, 0x4b, 0x7c, 0x2a, 0x88, 0xa0, + 0xa7, 0xbf, 0xaf, 0xb4, 0xff, 0xc3, 0x7d, 0x35, 0xf3, 0xaf, 0xde, 0x57, 0x23, 0xb3, 0x76, 0xf6, + 0x1f, 0x67, 0xed, 0xdc, 0xf8, 0xac, 0xfd, 0x55, 0x03, 0x32, 0x5c, 0x55, 0x35, 0x67, 0x0f, 0x20, + 0x29, 0xe7, 0x2c, 0x0a, 0xb9, 0x1a, 0xb1, 0x53, 0x53, 0x2c, 0x81, 0xfd, 0xef, 0xff, 0x7a, 0xb2, + 0xfe, 0xae, 0x41, 0x6a, 0xb4, 0x74, 0xe4, 0x00, 0x66, 0x3d, 0xf4, 0x6d, 0xb7, 0x25, 0xd8, 0x92, + 0xda, 0xda, 0x3e, 0xc3, 0x39, 0xe4, 0xaa, 0x02, 0x89, 0x2a, 0xc4, 0xec, 0x57, 0x1a, 0xcc, 0x4a, + 0x11, 0x59, 0x05, 0x52, 0x35, 0xe8, 0x5e, 0xa5, 0x64, 0x36, 0xca, 0xb5, 0xaa, 0x51, 0xdc, 0xdb, + 0xd9, 0x33, 0x4a, 0xe9, 0x57, 0xc8, 0x12, 0x2c, 0x28, 0xf9, 0xa6, 0xf9, 0x71, 0xa5, 0x41, 0xd3, + 0x1a, 0x21, 0x90, 0x52, 0xa2, 0x77, 0x85, 0xa8, 0x96, 0x8e, 0x90, 0x34, 0x24, 0xfb, 0xdb, 0x4a, + 0x85, 0x4f, 0xd3, 0xd1, 0x11, 0xc3, 0x4f, 0x0c, 0xe3, 0x4e, 0x7a, 0x66, 0xc8, 0xf0, 0xda, 0x55, + 0xbe, 0xab, 0x96, 0x8e, 0x65, 0x5d, 0x58, 0x39, 0x89, 0x91, 0x44, 0x87, 0x39, 0xc5, 0xc9, 0xb0, + 0x0d, 0xd4, 0x92, 0x6b, 0x8e, 0xd1, 0x0f, 0xf8, 0x1b, 0x27, 0x2a, 0x35, 0x6a, 0x49, 0x2e, 0xc3, + 0x82, 0x8f, 0x81, 0xdb, 0xf3, 0x9b, 0x68, 0xb2, 0x27, 0x9e, 0x64, 0x6e, 0x9c, 0x26, 0x43, 0x61, + 0xfd, 0x89, 0x87, 0xd9, 0x9b, 0xb0, 0x5c, 0xc2, 0x0e, 0x32, 0x9c, 0xb4, 0x35, 0xb3, 0xab, 0xb0, + 0x32, 0x6a, 0x29, 0xe9, 0xb7, 0xd1, 0x83, 0xe5, 0x13, 0x9e, 0x3d, 0xe4, 0x4d, 0xb8, 0x64, 0x50, + 0x5a, 0xa1, 0x66, 0xb1, 0xd2, 0x28, 0xd7, 0xcd, 0xc2, 0xfe, 0xde, 0x6e, 0xf9, 0xae, 0x51, 0xae, + 0x8f, 0x15, 0xf8, 0x22, 0x9c, 0x1f, 0xa8, 0x8c, 0x7b, 0x8d, 0xc2, 0xbe, 0x49, 0x2b, 0x8d, 0x72, + 0xc9, 0x28, 0xa5, 0x35, 0x92, 0x81, 0xd5, 0x71, 0x65, 0xa1, 0x6e, 0x1a, 0xe5, 0x52, 0x3a, 0xb2, + 0xf1, 0x6c, 0xf8, 0x12, 0xae, 0xa8, 0x77, 0xcc, 0xf9, 0x5d, 0x5a, 0x69, 0x54, 0xcd, 0x0a, 0x2d, + 0x19, 0x74, 0xcc, 0x51, 0x0a, 0x40, 0x46, 0x52, 0x32, 0x6a, 0x45, 0x79, 0x8c, 0xfb, 0x85, 0x5a, + 0xdd, 0xac, 0x19, 0x46, 0x59, 0xca, 0xc4, 0x31, 0x16, 0xa9, 0x51, 0xa8, 0x1b, 0x25, 0x29, 0x89, + 0x92, 0xf3, 0xb0, 0x5c, 0xd8, 0xd9, 0x31, 0x8a, 0x5c, 0xd4, 0xa8, 0x19, 0xb4, 0x26, 0x15, 0x33, + 0x5b, 0x7f, 0xce, 0xc0, 0x92, 0xf0, 0x2f, 0xee, 0x7f, 0x75, 0x86, 0xe4, 0x17, 0x0d, 0x52, 0xa3, + 0xaf, 0x21, 0x32, 0x19, 0x61, 0x4f, 0x7c, 0xe8, 0x67, 0x8a, 0x67, 0xc2, 0x90, 0xe7, 0x94, 0xbd, + 0xfe, 0xf5, 0xf3, 0xdf, 0xbe, 0x8d, 0xe4, 0xc9, 0x3b, 0xfd, 0xff, 0x94, 0x2f, 0x87, 0x8f, 0xfc, + 0x03, 0xb5, 0x08, 0xf2, 0x1b, 0xcf, 0xf2, 0xed, 0x41, 0xfc, 0x3f, 0x6a, 0x00, 0x83, 0xa1, 0x43, + 0x3e, 0x9c, 0x38, 0x94, 0x11, 0xa2, 0x65, 0x3e, 0x9a, 0xda, 0x5e, 0xa5, 0xb1, 0x29, 0xd2, 0xb8, + 0x42, 0xde, 0x7e, 0x89, 0x34, 0xe4, 0x40, 0x24, 0x3f, 0x69, 0x90, 0x1c, 0xa6, 0x2e, 0x99, 0xec, + 0x51, 0x7f, 0x42, 0xbf, 0x64, 0x0a, 0x67, 0x40, 0x18, 0x4d, 0x64, 0xe3, 0xe5, 0x13, 0xd9, 0xfe, + 0x4b, 0x03, 0xfe, 0x73, 0x39, 0x89, 0xef, 0xed, 0xd5, 0x17, 0x58, 0x5a, 0xe5, 0x83, 0xb8, 0xaa, + 0x1d, 0x3c, 0x54, 0x30, 0x6d, 0xb7, 0x63, 0x39, 0xed, 0x9c, 0xeb, 0xb7, 0xf3, 0x6d, 0x74, 0xc4, + 0x98, 0xce, 0x4b, 0x95, 0xe5, 0xd9, 0xc1, 0x4b, 0xfd, 0xf2, 0xde, 0x3a, 0x41, 0xf7, 0x43, 0xe4, + 0xf2, 0xae, 0x74, 0x50, 0xe4, 0x4a, 0x79, 0xc5, 0xd0, 0x7e, 0x7c, 0xf7, 0x37, 0xb7, 0xb9, 0xe5, + 0xcf, 0xe1, 0xae, 0x07, 0x62, 0xd7, 0x83, 0xd1, 0x5d, 0x0f, 0xee, 0x4b, 0xfc, 0xc3, 0x59, 0x11, + 0xd6, 0xb5, 0xbf, 0x03, 0x00, 0x00, 0xff, 0xff, 0xb8, 0x90, 0xe3, 0x06, 0x1a, 0x10, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/devtools/clouderrorreporting/v1beta1/report_errors_service.pb.go b/vendor/google.golang.org/genproto/googleapis/devtools/clouderrorreporting/v1beta1/report_errors_service.pb.go new file mode 100644 index 0000000..aa3ed8f --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/devtools/clouderrorreporting/v1beta1/report_errors_service.pb.go @@ -0,0 +1,318 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/devtools/clouderrorreporting/v1beta1/report_errors_service.proto + +package clouderrorreporting // import "google.golang.org/genproto/googleapis/devtools/clouderrorreporting/v1beta1" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import timestamp "github.com/golang/protobuf/ptypes/timestamp" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +import ( + context "golang.org/x/net/context" + grpc "google.golang.org/grpc" +) + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// A request for reporting an individual error event. +type ReportErrorEventRequest struct { + // [Required] The resource name of the Google Cloud Platform project. Written + // as `projects/` plus the + // [Google Cloud Platform project + // ID](https://support.google.com/cloud/answer/6158840). Example: + // `projects/my-project-123`. + ProjectName string `protobuf:"bytes,1,opt,name=project_name,json=projectName,proto3" json:"project_name,omitempty"` + // [Required] The error event to be reported. + Event *ReportedErrorEvent `protobuf:"bytes,2,opt,name=event,proto3" json:"event,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ReportErrorEventRequest) Reset() { *m = ReportErrorEventRequest{} } +func (m *ReportErrorEventRequest) String() string { return proto.CompactTextString(m) } +func (*ReportErrorEventRequest) ProtoMessage() {} +func (*ReportErrorEventRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_report_errors_service_e8d1154fe6c98b5b, []int{0} +} +func (m *ReportErrorEventRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ReportErrorEventRequest.Unmarshal(m, b) +} +func (m *ReportErrorEventRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ReportErrorEventRequest.Marshal(b, m, deterministic) +} +func (dst *ReportErrorEventRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_ReportErrorEventRequest.Merge(dst, src) +} +func (m *ReportErrorEventRequest) XXX_Size() int { + return xxx_messageInfo_ReportErrorEventRequest.Size(m) +} +func (m *ReportErrorEventRequest) XXX_DiscardUnknown() { + xxx_messageInfo_ReportErrorEventRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_ReportErrorEventRequest proto.InternalMessageInfo + +func (m *ReportErrorEventRequest) GetProjectName() string { + if m != nil { + return m.ProjectName + } + return "" +} + +func (m *ReportErrorEventRequest) GetEvent() *ReportedErrorEvent { + if m != nil { + return m.Event + } + return nil +} + +// Response for reporting an individual error event. +// Data may be added to this message in the future. +type ReportErrorEventResponse struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ReportErrorEventResponse) Reset() { *m = ReportErrorEventResponse{} } +func (m *ReportErrorEventResponse) String() string { return proto.CompactTextString(m) } +func (*ReportErrorEventResponse) ProtoMessage() {} +func (*ReportErrorEventResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_report_errors_service_e8d1154fe6c98b5b, []int{1} +} +func (m *ReportErrorEventResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ReportErrorEventResponse.Unmarshal(m, b) +} +func (m *ReportErrorEventResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ReportErrorEventResponse.Marshal(b, m, deterministic) +} +func (dst *ReportErrorEventResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_ReportErrorEventResponse.Merge(dst, src) +} +func (m *ReportErrorEventResponse) XXX_Size() int { + return xxx_messageInfo_ReportErrorEventResponse.Size(m) +} +func (m *ReportErrorEventResponse) XXX_DiscardUnknown() { + xxx_messageInfo_ReportErrorEventResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_ReportErrorEventResponse proto.InternalMessageInfo + +// An error event which is reported to the Error Reporting system. +type ReportedErrorEvent struct { + // [Optional] Time when the event occurred. + // If not provided, the time when the event was received by the + // Error Reporting system will be used. + EventTime *timestamp.Timestamp `protobuf:"bytes,1,opt,name=event_time,json=eventTime,proto3" json:"event_time,omitempty"` + // [Required] The service context in which this error has occurred. + ServiceContext *ServiceContext `protobuf:"bytes,2,opt,name=service_context,json=serviceContext,proto3" json:"service_context,omitempty"` + // [Required] A message describing the error. The message can contain an + // exception stack in one of the supported programming languages and formats. + // In that case, the message is parsed and detailed exception information + // is returned when retrieving the error event again. + Message string `protobuf:"bytes,3,opt,name=message,proto3" json:"message,omitempty"` + // [Optional] A description of the context in which the error occurred. + Context *ErrorContext `protobuf:"bytes,4,opt,name=context,proto3" json:"context,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ReportedErrorEvent) Reset() { *m = ReportedErrorEvent{} } +func (m *ReportedErrorEvent) String() string { return proto.CompactTextString(m) } +func (*ReportedErrorEvent) ProtoMessage() {} +func (*ReportedErrorEvent) Descriptor() ([]byte, []int) { + return fileDescriptor_report_errors_service_e8d1154fe6c98b5b, []int{2} +} +func (m *ReportedErrorEvent) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ReportedErrorEvent.Unmarshal(m, b) +} +func (m *ReportedErrorEvent) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ReportedErrorEvent.Marshal(b, m, deterministic) +} +func (dst *ReportedErrorEvent) XXX_Merge(src proto.Message) { + xxx_messageInfo_ReportedErrorEvent.Merge(dst, src) +} +func (m *ReportedErrorEvent) XXX_Size() int { + return xxx_messageInfo_ReportedErrorEvent.Size(m) +} +func (m *ReportedErrorEvent) XXX_DiscardUnknown() { + xxx_messageInfo_ReportedErrorEvent.DiscardUnknown(m) +} + +var xxx_messageInfo_ReportedErrorEvent proto.InternalMessageInfo + +func (m *ReportedErrorEvent) GetEventTime() *timestamp.Timestamp { + if m != nil { + return m.EventTime + } + return nil +} + +func (m *ReportedErrorEvent) GetServiceContext() *ServiceContext { + if m != nil { + return m.ServiceContext + } + return nil +} + +func (m *ReportedErrorEvent) GetMessage() string { + if m != nil { + return m.Message + } + return "" +} + +func (m *ReportedErrorEvent) GetContext() *ErrorContext { + if m != nil { + return m.Context + } + return nil +} + +func init() { + proto.RegisterType((*ReportErrorEventRequest)(nil), "google.devtools.clouderrorreporting.v1beta1.ReportErrorEventRequest") + proto.RegisterType((*ReportErrorEventResponse)(nil), "google.devtools.clouderrorreporting.v1beta1.ReportErrorEventResponse") + proto.RegisterType((*ReportedErrorEvent)(nil), "google.devtools.clouderrorreporting.v1beta1.ReportedErrorEvent") +} + +// Reference imports to suppress errors if they are not otherwise used. +var _ context.Context +var _ grpc.ClientConn + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the grpc package it is being compiled against. +const _ = grpc.SupportPackageIsVersion4 + +// ReportErrorsServiceClient is the client API for ReportErrorsService service. +// +// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://godoc.org/google.golang.org/grpc#ClientConn.NewStream. +type ReportErrorsServiceClient interface { + // Report an individual error event. + // + // This endpoint accepts either an OAuth token, + // or an + // API key + // for authentication. To use an API key, append it to the URL as the value of + // a `key` parameter. For example: + //
POST
+	// https://clouderrorreporting.googleapis.com/v1beta1/projects/example-project/events:report?key=123ABC456
+ ReportErrorEvent(ctx context.Context, in *ReportErrorEventRequest, opts ...grpc.CallOption) (*ReportErrorEventResponse, error) +} + +type reportErrorsServiceClient struct { + cc *grpc.ClientConn +} + +func NewReportErrorsServiceClient(cc *grpc.ClientConn) ReportErrorsServiceClient { + return &reportErrorsServiceClient{cc} +} + +func (c *reportErrorsServiceClient) ReportErrorEvent(ctx context.Context, in *ReportErrorEventRequest, opts ...grpc.CallOption) (*ReportErrorEventResponse, error) { + out := new(ReportErrorEventResponse) + err := c.cc.Invoke(ctx, "/google.devtools.clouderrorreporting.v1beta1.ReportErrorsService/ReportErrorEvent", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +// ReportErrorsServiceServer is the server API for ReportErrorsService service. +type ReportErrorsServiceServer interface { + // Report an individual error event. + // + // This endpoint accepts either an OAuth token, + // or an + // API key + // for authentication. To use an API key, append it to the URL as the value of + // a `key` parameter. For example: + //
POST
+	// https://clouderrorreporting.googleapis.com/v1beta1/projects/example-project/events:report?key=123ABC456
+ ReportErrorEvent(context.Context, *ReportErrorEventRequest) (*ReportErrorEventResponse, error) +} + +func RegisterReportErrorsServiceServer(s *grpc.Server, srv ReportErrorsServiceServer) { + s.RegisterService(&_ReportErrorsService_serviceDesc, srv) +} + +func _ReportErrorsService_ReportErrorEvent_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(ReportErrorEventRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(ReportErrorsServiceServer).ReportErrorEvent(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.devtools.clouderrorreporting.v1beta1.ReportErrorsService/ReportErrorEvent", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(ReportErrorsServiceServer).ReportErrorEvent(ctx, req.(*ReportErrorEventRequest)) + } + return interceptor(ctx, in, info, handler) +} + +var _ReportErrorsService_serviceDesc = grpc.ServiceDesc{ + ServiceName: "google.devtools.clouderrorreporting.v1beta1.ReportErrorsService", + HandlerType: (*ReportErrorsServiceServer)(nil), + Methods: []grpc.MethodDesc{ + { + MethodName: "ReportErrorEvent", + Handler: _ReportErrorsService_ReportErrorEvent_Handler, + }, + }, + Streams: []grpc.StreamDesc{}, + Metadata: "google/devtools/clouderrorreporting/v1beta1/report_errors_service.proto", +} + +func init() { + proto.RegisterFile("google/devtools/clouderrorreporting/v1beta1/report_errors_service.proto", fileDescriptor_report_errors_service_e8d1154fe6c98b5b) +} + +var fileDescriptor_report_errors_service_e8d1154fe6c98b5b = []byte{ + // 490 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xa4, 0x93, 0xcd, 0x8a, 0x13, 0x41, + 0x10, 0xc7, 0x99, 0xf8, 0xb1, 0x6c, 0x47, 0x54, 0xda, 0x83, 0xc3, 0x20, 0xb8, 0xc6, 0xcb, 0xa2, + 0x30, 0x6d, 0xe2, 0xc5, 0xec, 0x22, 0x0b, 0x59, 0xc3, 0xde, 0x64, 0x99, 0xd5, 0x3d, 0x48, 0x70, + 0xe8, 0x4c, 0xca, 0x61, 0x24, 0xd3, 0x35, 0x76, 0x77, 0x82, 0x20, 0x5e, 0x7c, 0x85, 0x7d, 0x05, + 0x4f, 0x3e, 0x8a, 0x57, 0x5f, 0xc0, 0x83, 0x0f, 0xa1, 0x37, 0xe9, 0xaf, 0x25, 0x6b, 0x72, 0x70, + 0xf4, 0x58, 0xd3, 0x55, 0xbf, 0xff, 0xbf, 0x3e, 0x86, 0x1c, 0x95, 0x88, 0xe5, 0x1c, 0xd8, 0x0c, + 0x96, 0x1a, 0x71, 0xae, 0x58, 0x31, 0xc7, 0xc5, 0x0c, 0xa4, 0x44, 0x29, 0xa1, 0x41, 0xa9, 0x2b, + 0x51, 0xb2, 0x65, 0x7f, 0x0a, 0x9a, 0xf7, 0x99, 0xfb, 0x92, 0xdb, 0x57, 0x95, 0x2b, 0x90, 0xcb, + 0xaa, 0x80, 0xb4, 0x91, 0xa8, 0x91, 0x3e, 0x74, 0xa0, 0x34, 0x80, 0xd2, 0x0d, 0xa0, 0xd4, 0x83, + 0x92, 0x3b, 0x5e, 0x95, 0x37, 0x15, 0xe3, 0x42, 0xa0, 0xe6, 0xba, 0x42, 0xa1, 0x1c, 0x2a, 0x79, + 0xd2, 0xc6, 0x53, 0x81, 0x75, 0x8d, 0xc2, 0x57, 0xde, 0xf5, 0x95, 0x36, 0x9a, 0x2e, 0xde, 0x30, + 0x5d, 0xd5, 0xa0, 0x34, 0xaf, 0x1b, 0x97, 0xd0, 0x3b, 0x8b, 0xc8, 0xed, 0xcc, 0x32, 0xc6, 0x06, + 0x37, 0x5e, 0x82, 0xd0, 0x19, 0xbc, 0x5b, 0x80, 0xd2, 0xf4, 0x1e, 0xb9, 0xd6, 0x48, 0x7c, 0x0b, + 0x85, 0xce, 0x05, 0xaf, 0x21, 0x8e, 0x76, 0xa2, 0xdd, 0xed, 0xac, 0xeb, 0xbf, 0x3d, 0xe7, 0x35, + 0xd0, 0x97, 0xe4, 0x0a, 0x98, 0x92, 0xb8, 0xb3, 0x13, 0xed, 0x76, 0x07, 0x07, 0x69, 0x8b, 0xa6, + 0x53, 0xa7, 0x0b, 0xb3, 0x15, 0x65, 0x47, 0xeb, 0x25, 0x24, 0x5e, 0x37, 0xa5, 0x1a, 0x14, 0x0a, + 0x7a, 0x9f, 0x3b, 0x84, 0xae, 0x57, 0xd2, 0x21, 0x21, 0xb6, 0x36, 0x37, 0x1d, 0x5a, 0xab, 0xdd, + 0x41, 0x12, 0xec, 0x84, 0xf6, 0xd3, 0x17, 0xa1, 0xfd, 0x6c, 0xdb, 0x66, 0x9b, 0x98, 0xce, 0xc8, + 0x0d, 0xbf, 0xba, 0xbc, 0x40, 0xa1, 0xe1, 0x7d, 0x68, 0x67, 0xbf, 0x55, 0x3b, 0x27, 0x8e, 0x71, + 0xe8, 0x10, 0xd9, 0x75, 0x75, 0x21, 0xa6, 0x31, 0xd9, 0xaa, 0x41, 0x29, 0x5e, 0x42, 0x7c, 0xc9, + 0x0e, 0x32, 0x84, 0xf4, 0x84, 0x6c, 0x05, 0xdd, 0xcb, 0x56, 0x77, 0xd8, 0x4a, 0xd7, 0x0e, 0x21, + 0xa8, 0x06, 0xd2, 0xe0, 0x67, 0x44, 0x6e, 0xad, 0xcc, 0x50, 0x79, 0x77, 0xf4, 0x7b, 0x44, 0x6e, + 0xfe, 0x39, 0x5b, 0xfa, 0xec, 0x1f, 0xf6, 0xb6, 0x76, 0x2f, 0xc9, 0xf8, 0x3f, 0x29, 0x7e, 0xc1, + 0x07, 0x9f, 0xbe, 0xfd, 0x38, 0xeb, 0x0c, 0x7b, 0x8f, 0xce, 0x4f, 0xfa, 0xc3, 0xea, 0x19, 0x3e, + 0xf5, 0x81, 0x62, 0x0f, 0x3e, 0x32, 0xbb, 0x44, 0xb5, 0xe7, 0xe8, 0x7b, 0xee, 0x7a, 0x46, 0xbf, + 0x22, 0x62, 0xfe, 0x82, 0x36, 0x6e, 0x46, 0xf1, 0x86, 0x59, 0x1d, 0x9b, 0xab, 0x39, 0x8e, 0x5e, + 0xbd, 0xf6, 0xa0, 0x12, 0xe7, 0x5c, 0x94, 0x29, 0xca, 0x92, 0x95, 0x20, 0xec, 0x4d, 0x31, 0xf7, + 0xc4, 0x9b, 0x4a, 0xfd, 0xd5, 0xdf, 0xb9, 0xbf, 0xe1, 0xed, 0x4b, 0xe7, 0xfe, 0x91, 0x13, 0x38, + 0x34, 0x8f, 0x6e, 0x9f, 0xd9, 0xb9, 0xc3, 0xd3, 0xfe, 0xc8, 0x54, 0x7e, 0x0d, 0x59, 0x13, 0x9b, + 0x35, 0xb9, 0x98, 0x35, 0x39, 0x75, 0xfc, 0xe9, 0x55, 0x6b, 0xeb, 0xf1, 0xef, 0x00, 0x00, 0x00, + 0xff, 0xff, 0x2c, 0xd1, 0x8e, 0x76, 0xc7, 0x04, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/devtools/cloudprofiler/v2/profiler.pb.go b/vendor/google.golang.org/genproto/googleapis/devtools/cloudprofiler/v2/profiler.pb.go new file mode 100644 index 0000000..73a1653 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/devtools/cloudprofiler/v2/profiler.pb.go @@ -0,0 +1,667 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/devtools/cloudprofiler/v2/profiler.proto + +package cloudprofiler // import "google.golang.org/genproto/googleapis/devtools/cloudprofiler/v2" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import duration "github.com/golang/protobuf/ptypes/duration" +import _ "github.com/golang/protobuf/ptypes/timestamp" +import _ "google.golang.org/genproto/googleapis/api/annotations" +import field_mask "google.golang.org/genproto/protobuf/field_mask" + +import ( + context "golang.org/x/net/context" + grpc "google.golang.org/grpc" +) + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// ProfileType is type of profiling data. +// NOTE: the enumeration member names are used (in lowercase) as unique string +// identifiers of profile types, so they must not be renamed. +type ProfileType int32 + +const ( + // Unspecified profile type. + ProfileType_PROFILE_TYPE_UNSPECIFIED ProfileType = 0 + // Thread CPU time sampling. + ProfileType_CPU ProfileType = 1 + // Wallclock time sampling. More expensive as stops all threads. + ProfileType_WALL ProfileType = 2 + // In-use heap profile. Represents a snapshot of the allocations that are + // live at the time of the profiling. + ProfileType_HEAP ProfileType = 3 + // Single-shot collection of all thread stacks. + ProfileType_THREADS ProfileType = 4 + // Synchronization contention profile. + ProfileType_CONTENTION ProfileType = 5 + // Peak heap profile. + ProfileType_PEAK_HEAP ProfileType = 6 + // Heap allocation profile. It represents the aggregation of all allocations + // made over the duration of the profile. All allocations are included, + // including those that might have been freed by the end of the profiling + // interval. The profile is in particular useful for garbage collecting + // languages to understand which parts of the code create most of the garbage + // collection pressure to see if those can be optimized. + ProfileType_HEAP_ALLOC ProfileType = 7 +) + +var ProfileType_name = map[int32]string{ + 0: "PROFILE_TYPE_UNSPECIFIED", + 1: "CPU", + 2: "WALL", + 3: "HEAP", + 4: "THREADS", + 5: "CONTENTION", + 6: "PEAK_HEAP", + 7: "HEAP_ALLOC", +} +var ProfileType_value = map[string]int32{ + "PROFILE_TYPE_UNSPECIFIED": 0, + "CPU": 1, + "WALL": 2, + "HEAP": 3, + "THREADS": 4, + "CONTENTION": 5, + "PEAK_HEAP": 6, + "HEAP_ALLOC": 7, +} + +func (x ProfileType) String() string { + return proto.EnumName(ProfileType_name, int32(x)) +} +func (ProfileType) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_profiler_852d4daf34af2ab9, []int{0} +} + +// CreateProfileRequest describes a profile resource online creation request. +// The deployment field must be populated. The profile_type specifies the list +// of profile types supported by the agent. The creation call will hang until a +// profile of one of these types needs to be collected. +type CreateProfileRequest struct { + // Parent project to create the profile in. + Parent string `protobuf:"bytes,4,opt,name=parent,proto3" json:"parent,omitempty"` + // Deployment details. + Deployment *Deployment `protobuf:"bytes,1,opt,name=deployment,proto3" json:"deployment,omitempty"` + // One or more profile types that the agent is capable of providing. + ProfileType []ProfileType `protobuf:"varint,2,rep,packed,name=profile_type,json=profileType,proto3,enum=google.devtools.cloudprofiler.v2.ProfileType" json:"profile_type,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *CreateProfileRequest) Reset() { *m = CreateProfileRequest{} } +func (m *CreateProfileRequest) String() string { return proto.CompactTextString(m) } +func (*CreateProfileRequest) ProtoMessage() {} +func (*CreateProfileRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_profiler_852d4daf34af2ab9, []int{0} +} +func (m *CreateProfileRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_CreateProfileRequest.Unmarshal(m, b) +} +func (m *CreateProfileRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_CreateProfileRequest.Marshal(b, m, deterministic) +} +func (dst *CreateProfileRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_CreateProfileRequest.Merge(dst, src) +} +func (m *CreateProfileRequest) XXX_Size() int { + return xxx_messageInfo_CreateProfileRequest.Size(m) +} +func (m *CreateProfileRequest) XXX_DiscardUnknown() { + xxx_messageInfo_CreateProfileRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_CreateProfileRequest proto.InternalMessageInfo + +func (m *CreateProfileRequest) GetParent() string { + if m != nil { + return m.Parent + } + return "" +} + +func (m *CreateProfileRequest) GetDeployment() *Deployment { + if m != nil { + return m.Deployment + } + return nil +} + +func (m *CreateProfileRequest) GetProfileType() []ProfileType { + if m != nil { + return m.ProfileType + } + return nil +} + +// CreateOfflineProfileRequest describes a profile resource offline creation +// request. Profile field must be set. +type CreateOfflineProfileRequest struct { + // Parent project to create the profile in. + Parent string `protobuf:"bytes,1,opt,name=parent,proto3" json:"parent,omitempty"` + // Contents of the profile to create. + Profile *Profile `protobuf:"bytes,2,opt,name=profile,proto3" json:"profile,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *CreateOfflineProfileRequest) Reset() { *m = CreateOfflineProfileRequest{} } +func (m *CreateOfflineProfileRequest) String() string { return proto.CompactTextString(m) } +func (*CreateOfflineProfileRequest) ProtoMessage() {} +func (*CreateOfflineProfileRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_profiler_852d4daf34af2ab9, []int{1} +} +func (m *CreateOfflineProfileRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_CreateOfflineProfileRequest.Unmarshal(m, b) +} +func (m *CreateOfflineProfileRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_CreateOfflineProfileRequest.Marshal(b, m, deterministic) +} +func (dst *CreateOfflineProfileRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_CreateOfflineProfileRequest.Merge(dst, src) +} +func (m *CreateOfflineProfileRequest) XXX_Size() int { + return xxx_messageInfo_CreateOfflineProfileRequest.Size(m) +} +func (m *CreateOfflineProfileRequest) XXX_DiscardUnknown() { + xxx_messageInfo_CreateOfflineProfileRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_CreateOfflineProfileRequest proto.InternalMessageInfo + +func (m *CreateOfflineProfileRequest) GetParent() string { + if m != nil { + return m.Parent + } + return "" +} + +func (m *CreateOfflineProfileRequest) GetProfile() *Profile { + if m != nil { + return m.Profile + } + return nil +} + +// UpdateProfileRequest contains the profile to update. +type UpdateProfileRequest struct { + // Profile to update + Profile *Profile `protobuf:"bytes,1,opt,name=profile,proto3" json:"profile,omitempty"` + // Field mask used to specify the fields to be overwritten. Currently only + // profile_bytes and labels fields are supported by UpdateProfile, so only + // those fields can be specified in the mask. When no mask is provided, all + // fields are overwritten. + UpdateMask *field_mask.FieldMask `protobuf:"bytes,2,opt,name=update_mask,json=updateMask,proto3" json:"update_mask,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *UpdateProfileRequest) Reset() { *m = UpdateProfileRequest{} } +func (m *UpdateProfileRequest) String() string { return proto.CompactTextString(m) } +func (*UpdateProfileRequest) ProtoMessage() {} +func (*UpdateProfileRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_profiler_852d4daf34af2ab9, []int{2} +} +func (m *UpdateProfileRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_UpdateProfileRequest.Unmarshal(m, b) +} +func (m *UpdateProfileRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_UpdateProfileRequest.Marshal(b, m, deterministic) +} +func (dst *UpdateProfileRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_UpdateProfileRequest.Merge(dst, src) +} +func (m *UpdateProfileRequest) XXX_Size() int { + return xxx_messageInfo_UpdateProfileRequest.Size(m) +} +func (m *UpdateProfileRequest) XXX_DiscardUnknown() { + xxx_messageInfo_UpdateProfileRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_UpdateProfileRequest proto.InternalMessageInfo + +func (m *UpdateProfileRequest) GetProfile() *Profile { + if m != nil { + return m.Profile + } + return nil +} + +func (m *UpdateProfileRequest) GetUpdateMask() *field_mask.FieldMask { + if m != nil { + return m.UpdateMask + } + return nil +} + +// Profile resource. +type Profile struct { + // Output only. Opaque, server-assigned, unique ID for this profile. + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // Type of profile. + // For offline mode, this must be specified when creating the profile. For + // online mode it is assigned and returned by the server. + ProfileType ProfileType `protobuf:"varint,2,opt,name=profile_type,json=profileType,proto3,enum=google.devtools.cloudprofiler.v2.ProfileType" json:"profile_type,omitempty"` + // Deployment this profile corresponds to. + Deployment *Deployment `protobuf:"bytes,3,opt,name=deployment,proto3" json:"deployment,omitempty"` + // Duration of the profiling session. + // Input (for the offline mode) or output (for the online mode). + // The field represents requested profiling duration. It may slightly differ + // from the effective profiling duration, which is recorded in the profile + // data, in case the profiling can't be stopped immediately (e.g. in case + // stopping the profiling is handled asynchronously). + Duration *duration.Duration `protobuf:"bytes,4,opt,name=duration,proto3" json:"duration,omitempty"` + // Input only. Profile bytes, as a gzip compressed serialized proto, the + // format is https://github.com/google/pprof/blob/master/proto/profile.proto. + ProfileBytes []byte `protobuf:"bytes,5,opt,name=profile_bytes,json=profileBytes,proto3" json:"profile_bytes,omitempty"` + // Input only. Labels associated to this specific profile. These labels will + // get merged with the deployment labels for the final data set. See + // documentation on deployment labels for validation rules and limits. + Labels map[string]string `protobuf:"bytes,6,rep,name=labels,proto3" json:"labels,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Profile) Reset() { *m = Profile{} } +func (m *Profile) String() string { return proto.CompactTextString(m) } +func (*Profile) ProtoMessage() {} +func (*Profile) Descriptor() ([]byte, []int) { + return fileDescriptor_profiler_852d4daf34af2ab9, []int{3} +} +func (m *Profile) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Profile.Unmarshal(m, b) +} +func (m *Profile) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Profile.Marshal(b, m, deterministic) +} +func (dst *Profile) XXX_Merge(src proto.Message) { + xxx_messageInfo_Profile.Merge(dst, src) +} +func (m *Profile) XXX_Size() int { + return xxx_messageInfo_Profile.Size(m) +} +func (m *Profile) XXX_DiscardUnknown() { + xxx_messageInfo_Profile.DiscardUnknown(m) +} + +var xxx_messageInfo_Profile proto.InternalMessageInfo + +func (m *Profile) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *Profile) GetProfileType() ProfileType { + if m != nil { + return m.ProfileType + } + return ProfileType_PROFILE_TYPE_UNSPECIFIED +} + +func (m *Profile) GetDeployment() *Deployment { + if m != nil { + return m.Deployment + } + return nil +} + +func (m *Profile) GetDuration() *duration.Duration { + if m != nil { + return m.Duration + } + return nil +} + +func (m *Profile) GetProfileBytes() []byte { + if m != nil { + return m.ProfileBytes + } + return nil +} + +func (m *Profile) GetLabels() map[string]string { + if m != nil { + return m.Labels + } + return nil +} + +// Deployment contains the deployment identification information. +type Deployment struct { + // Project ID is the ID of a cloud project. + // Validation regex: `^[a-z][-a-z0-9:.]{4,61}[a-z0-9]$`. + ProjectId string `protobuf:"bytes,1,opt,name=project_id,json=projectId,proto3" json:"project_id,omitempty"` + // Target is the service name used to group related deployments: + // * Service name for GAE Flex / Standard. + // * Cluster and container name for GKE. + // * User-specified string for direct GCE profiling (e.g. Java). + // * Job name for Dataflow. + // Validation regex: `^[a-z]([-a-z0-9_.]{0,253}[a-z0-9])?$`. + Target string `protobuf:"bytes,2,opt,name=target,proto3" json:"target,omitempty"` + // Labels identify the deployment within the user universe and same target. + // Validation regex for label names: `^[a-z0-9]([a-z0-9-]{0,61}[a-z0-9])?$`. + // Value for an individual label must be <= 512 bytes, the total + // size of all label names and values must be <= 1024 bytes. + // + // Label named "language" can be used to record the programming language of + // the profiled deployment. The standard choices for the value include "java", + // "go", "python", "ruby", "nodejs", "php", "dotnet". + // + // For deployments running on Google Cloud Platform, "zone" or "region" label + // should be present describing the deployment location. An example of a zone + // is "us-central1-a", an example of a region is "us-central1" or + // "us-central". + Labels map[string]string `protobuf:"bytes,3,rep,name=labels,proto3" json:"labels,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Deployment) Reset() { *m = Deployment{} } +func (m *Deployment) String() string { return proto.CompactTextString(m) } +func (*Deployment) ProtoMessage() {} +func (*Deployment) Descriptor() ([]byte, []int) { + return fileDescriptor_profiler_852d4daf34af2ab9, []int{4} +} +func (m *Deployment) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Deployment.Unmarshal(m, b) +} +func (m *Deployment) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Deployment.Marshal(b, m, deterministic) +} +func (dst *Deployment) XXX_Merge(src proto.Message) { + xxx_messageInfo_Deployment.Merge(dst, src) +} +func (m *Deployment) XXX_Size() int { + return xxx_messageInfo_Deployment.Size(m) +} +func (m *Deployment) XXX_DiscardUnknown() { + xxx_messageInfo_Deployment.DiscardUnknown(m) +} + +var xxx_messageInfo_Deployment proto.InternalMessageInfo + +func (m *Deployment) GetProjectId() string { + if m != nil { + return m.ProjectId + } + return "" +} + +func (m *Deployment) GetTarget() string { + if m != nil { + return m.Target + } + return "" +} + +func (m *Deployment) GetLabels() map[string]string { + if m != nil { + return m.Labels + } + return nil +} + +func init() { + proto.RegisterType((*CreateProfileRequest)(nil), "google.devtools.cloudprofiler.v2.CreateProfileRequest") + proto.RegisterType((*CreateOfflineProfileRequest)(nil), "google.devtools.cloudprofiler.v2.CreateOfflineProfileRequest") + proto.RegisterType((*UpdateProfileRequest)(nil), "google.devtools.cloudprofiler.v2.UpdateProfileRequest") + proto.RegisterType((*Profile)(nil), "google.devtools.cloudprofiler.v2.Profile") + proto.RegisterMapType((map[string]string)(nil), "google.devtools.cloudprofiler.v2.Profile.LabelsEntry") + proto.RegisterType((*Deployment)(nil), "google.devtools.cloudprofiler.v2.Deployment") + proto.RegisterMapType((map[string]string)(nil), "google.devtools.cloudprofiler.v2.Deployment.LabelsEntry") + proto.RegisterEnum("google.devtools.cloudprofiler.v2.ProfileType", ProfileType_name, ProfileType_value) +} + +// Reference imports to suppress errors if they are not otherwise used. +var _ context.Context +var _ grpc.ClientConn + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the grpc package it is being compiled against. +const _ = grpc.SupportPackageIsVersion4 + +// ProfilerServiceClient is the client API for ProfilerService service. +// +// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://godoc.org/google.golang.org/grpc#ClientConn.NewStream. +type ProfilerServiceClient interface { + // CreateProfile creates a new profile resource in the online mode. + // + // The server ensures that the new profiles are created at a constant rate per + // deployment, so the creation request may hang for some time until the next + // profile session is available. + // + // The request may fail with ABORTED error if the creation is not available + // within ~1m, the response will indicate the duration of the backoff the + // client should take before attempting creating a profile again. The backoff + // duration is returned in google.rpc.RetryInfo extension on the response + // status. To a gRPC client, the extension will be return as a + // binary-serialized proto in the trailing metadata item named + // "google.rpc.retryinfo-bin". + CreateProfile(ctx context.Context, in *CreateProfileRequest, opts ...grpc.CallOption) (*Profile, error) + // CreateOfflineProfile creates a new profile resource in the offline mode. + // The client provides the profile to create along with the profile bytes, the + // server records it. + CreateOfflineProfile(ctx context.Context, in *CreateOfflineProfileRequest, opts ...grpc.CallOption) (*Profile, error) + // UpdateProfile updates the profile bytes and labels on the profile resource + // created in the online mode. Updating the bytes for profiles created in the + // offline mode is currently not supported: the profile content must be + // provided at the time of the profile creation. + UpdateProfile(ctx context.Context, in *UpdateProfileRequest, opts ...grpc.CallOption) (*Profile, error) +} + +type profilerServiceClient struct { + cc *grpc.ClientConn +} + +func NewProfilerServiceClient(cc *grpc.ClientConn) ProfilerServiceClient { + return &profilerServiceClient{cc} +} + +func (c *profilerServiceClient) CreateProfile(ctx context.Context, in *CreateProfileRequest, opts ...grpc.CallOption) (*Profile, error) { + out := new(Profile) + err := c.cc.Invoke(ctx, "/google.devtools.cloudprofiler.v2.ProfilerService/CreateProfile", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *profilerServiceClient) CreateOfflineProfile(ctx context.Context, in *CreateOfflineProfileRequest, opts ...grpc.CallOption) (*Profile, error) { + out := new(Profile) + err := c.cc.Invoke(ctx, "/google.devtools.cloudprofiler.v2.ProfilerService/CreateOfflineProfile", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *profilerServiceClient) UpdateProfile(ctx context.Context, in *UpdateProfileRequest, opts ...grpc.CallOption) (*Profile, error) { + out := new(Profile) + err := c.cc.Invoke(ctx, "/google.devtools.cloudprofiler.v2.ProfilerService/UpdateProfile", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +// ProfilerServiceServer is the server API for ProfilerService service. +type ProfilerServiceServer interface { + // CreateProfile creates a new profile resource in the online mode. + // + // The server ensures that the new profiles are created at a constant rate per + // deployment, so the creation request may hang for some time until the next + // profile session is available. + // + // The request may fail with ABORTED error if the creation is not available + // within ~1m, the response will indicate the duration of the backoff the + // client should take before attempting creating a profile again. The backoff + // duration is returned in google.rpc.RetryInfo extension on the response + // status. To a gRPC client, the extension will be return as a + // binary-serialized proto in the trailing metadata item named + // "google.rpc.retryinfo-bin". + CreateProfile(context.Context, *CreateProfileRequest) (*Profile, error) + // CreateOfflineProfile creates a new profile resource in the offline mode. + // The client provides the profile to create along with the profile bytes, the + // server records it. + CreateOfflineProfile(context.Context, *CreateOfflineProfileRequest) (*Profile, error) + // UpdateProfile updates the profile bytes and labels on the profile resource + // created in the online mode. Updating the bytes for profiles created in the + // offline mode is currently not supported: the profile content must be + // provided at the time of the profile creation. + UpdateProfile(context.Context, *UpdateProfileRequest) (*Profile, error) +} + +func RegisterProfilerServiceServer(s *grpc.Server, srv ProfilerServiceServer) { + s.RegisterService(&_ProfilerService_serviceDesc, srv) +} + +func _ProfilerService_CreateProfile_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(CreateProfileRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(ProfilerServiceServer).CreateProfile(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.devtools.cloudprofiler.v2.ProfilerService/CreateProfile", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(ProfilerServiceServer).CreateProfile(ctx, req.(*CreateProfileRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _ProfilerService_CreateOfflineProfile_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(CreateOfflineProfileRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(ProfilerServiceServer).CreateOfflineProfile(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.devtools.cloudprofiler.v2.ProfilerService/CreateOfflineProfile", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(ProfilerServiceServer).CreateOfflineProfile(ctx, req.(*CreateOfflineProfileRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _ProfilerService_UpdateProfile_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(UpdateProfileRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(ProfilerServiceServer).UpdateProfile(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.devtools.cloudprofiler.v2.ProfilerService/UpdateProfile", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(ProfilerServiceServer).UpdateProfile(ctx, req.(*UpdateProfileRequest)) + } + return interceptor(ctx, in, info, handler) +} + +var _ProfilerService_serviceDesc = grpc.ServiceDesc{ + ServiceName: "google.devtools.cloudprofiler.v2.ProfilerService", + HandlerType: (*ProfilerServiceServer)(nil), + Methods: []grpc.MethodDesc{ + { + MethodName: "CreateProfile", + Handler: _ProfilerService_CreateProfile_Handler, + }, + { + MethodName: "CreateOfflineProfile", + Handler: _ProfilerService_CreateOfflineProfile_Handler, + }, + { + MethodName: "UpdateProfile", + Handler: _ProfilerService_UpdateProfile_Handler, + }, + }, + Streams: []grpc.StreamDesc{}, + Metadata: "google/devtools/cloudprofiler/v2/profiler.proto", +} + +func init() { + proto.RegisterFile("google/devtools/cloudprofiler/v2/profiler.proto", fileDescriptor_profiler_852d4daf34af2ab9) +} + +var fileDescriptor_profiler_852d4daf34af2ab9 = []byte{ + // 786 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xac, 0x56, 0x5f, 0x6f, 0xda, 0x56, + 0x14, 0xdf, 0xc5, 0x04, 0xc2, 0x71, 0xc8, 0xac, 0xab, 0x68, 0x62, 0x2c, 0xdb, 0x2c, 0x4f, 0x93, + 0x18, 0xdb, 0x6c, 0xc9, 0x51, 0xa6, 0xfc, 0x51, 0x34, 0x11, 0x70, 0x14, 0x34, 0x02, 0x96, 0x43, + 0x34, 0x6d, 0x2f, 0xc8, 0xe0, 0x0b, 0x72, 0x63, 0x6c, 0xd7, 0x36, 0x48, 0xb4, 0xca, 0x4b, 0xd5, + 0x6f, 0xd0, 0x97, 0xbe, 0xf5, 0xa1, 0x0f, 0x7d, 0xea, 0x87, 0xe8, 0x17, 0xa8, 0x2a, 0xf5, 0x2b, + 0xf4, 0x83, 0x54, 0xb6, 0xaf, 0x09, 0x24, 0x44, 0x90, 0xa6, 0x6f, 0xe7, 0xde, 0x7b, 0xce, 0xcf, + 0xbf, 0xdf, 0xb9, 0xc7, 0x3f, 0x1b, 0xa4, 0x81, 0xe3, 0x0c, 0x2c, 0x22, 0x19, 0x64, 0x1c, 0x38, + 0x8e, 0xe5, 0x4b, 0x3d, 0xcb, 0x19, 0x19, 0xae, 0xe7, 0xf4, 0x4d, 0x8b, 0x78, 0xd2, 0x58, 0x96, + 0x92, 0x58, 0x74, 0x3d, 0x27, 0x70, 0x30, 0x1f, 0x17, 0x88, 0x49, 0x81, 0x38, 0x57, 0x20, 0x8e, + 0xe5, 0xe2, 0x36, 0x85, 0xd4, 0x5d, 0x53, 0xd2, 0x6d, 0xdb, 0x09, 0xf4, 0xc0, 0x74, 0x6c, 0x3f, + 0xae, 0x2f, 0xfe, 0x44, 0x4f, 0xa3, 0x55, 0x77, 0xd4, 0x97, 0x8c, 0x91, 0x17, 0x25, 0xd0, 0x73, + 0xfe, 0xe6, 0x79, 0xdf, 0x24, 0x96, 0xd1, 0x19, 0xea, 0xfe, 0x25, 0xcd, 0xf8, 0xf9, 0x66, 0x46, + 0x60, 0x0e, 0x89, 0x1f, 0xe8, 0x43, 0x37, 0x4e, 0x10, 0xde, 0x23, 0xd8, 0xaa, 0x7a, 0x44, 0x0f, + 0x88, 0x1a, 0xd3, 0xd2, 0xc8, 0xe3, 0x11, 0xf1, 0x03, 0xfc, 0x1d, 0x64, 0x5c, 0xdd, 0x23, 0x76, + 0x50, 0x48, 0xf3, 0xa8, 0x94, 0xd3, 0xe8, 0x0a, 0x37, 0x00, 0x0c, 0xe2, 0x5a, 0xce, 0x64, 0x18, + 0x9e, 0x21, 0x1e, 0x95, 0x58, 0xf9, 0x0f, 0x71, 0x99, 0x50, 0xb1, 0x36, 0xad, 0xd1, 0x66, 0xea, + 0xb1, 0x0a, 0x1b, 0x34, 0xab, 0x13, 0x4c, 0x5c, 0x52, 0x48, 0xf1, 0x4c, 0x69, 0x53, 0xfe, 0x73, + 0x39, 0x1e, 0x65, 0xdb, 0x9e, 0xb8, 0x44, 0x63, 0xdd, 0xeb, 0x85, 0xf0, 0x04, 0x7e, 0x88, 0xf5, + 0xb4, 0xfa, 0x7d, 0xcb, 0xb4, 0xef, 0x96, 0x85, 0xe6, 0x64, 0x55, 0x21, 0x4b, 0x51, 0x0a, 0xa9, + 0x48, 0xd3, 0x6f, 0x2b, 0x73, 0xd0, 0x92, 0x4a, 0xe1, 0x25, 0x82, 0xad, 0x0b, 0xd7, 0xb8, 0xdd, + 0xcc, 0x19, 0x74, 0xf4, 0xa5, 0xe8, 0xf8, 0x10, 0xd8, 0x51, 0x04, 0x1e, 0x5d, 0x30, 0xa5, 0x59, + 0x4c, 0x80, 0x92, 0x1b, 0x16, 0x4f, 0xc2, 0x19, 0x38, 0xd3, 0xfd, 0x4b, 0x0d, 0xe2, 0xf4, 0x30, + 0x16, 0x5e, 0x33, 0x90, 0xa5, 0x88, 0x18, 0x43, 0xda, 0xd6, 0x87, 0x84, 0x76, 0x20, 0x8a, 0x17, + 0x5c, 0x04, 0x7a, 0xd8, 0x45, 0xdc, 0x18, 0x14, 0xe6, 0x81, 0x83, 0xb2, 0x0b, 0xeb, 0xc9, 0xf0, + 0x47, 0x03, 0xc9, 0xca, 0xdf, 0xdf, 0x52, 0x5e, 0xa3, 0x09, 0xda, 0x34, 0x15, 0xff, 0x02, 0xf9, + 0x44, 0x56, 0x77, 0x12, 0x10, 0xbf, 0xb0, 0xc6, 0xa3, 0xd2, 0x86, 0x96, 0x68, 0x3d, 0x0e, 0xf7, + 0xf0, 0x19, 0x64, 0x2c, 0xbd, 0x4b, 0x2c, 0xbf, 0x90, 0xe1, 0x99, 0x12, 0x2b, 0xef, 0xae, 0xac, + 0x5a, 0x6c, 0x44, 0x75, 0x8a, 0x1d, 0x78, 0x13, 0x8d, 0x82, 0x14, 0xf7, 0x81, 0x9d, 0xd9, 0xc6, + 0x1c, 0x30, 0x97, 0x64, 0x42, 0x9b, 0x1d, 0x86, 0x78, 0x0b, 0xd6, 0xc6, 0xba, 0x35, 0x8a, 0x9b, + 0x9c, 0xd3, 0xe2, 0xc5, 0x41, 0x6a, 0x0f, 0x09, 0x1f, 0x10, 0xc0, 0x75, 0x03, 0xf0, 0x8f, 0x00, + 0xae, 0xe7, 0x3c, 0x22, 0xbd, 0xa0, 0x63, 0x1a, 0x14, 0x21, 0x47, 0x77, 0xea, 0x46, 0x38, 0xcb, + 0x81, 0xee, 0x0d, 0x48, 0x40, 0x81, 0xe8, 0x0a, 0xab, 0x53, 0x3d, 0x4c, 0xa4, 0x67, 0xef, 0x3e, + 0x5d, 0xff, 0xca, 0x92, 0xca, 0xcf, 0x11, 0xb0, 0x33, 0x33, 0x82, 0xb7, 0xa1, 0xa0, 0x6a, 0xad, + 0x93, 0x7a, 0x43, 0xe9, 0xb4, 0xff, 0x53, 0x95, 0xce, 0x45, 0xf3, 0x5c, 0x55, 0xaa, 0xf5, 0x93, + 0xba, 0x52, 0xe3, 0xbe, 0xc1, 0x59, 0x60, 0xaa, 0xea, 0x05, 0x87, 0xf0, 0x3a, 0xa4, 0xff, 0xad, + 0x34, 0x1a, 0x5c, 0x2a, 0x8c, 0x4e, 0x95, 0x8a, 0xca, 0x31, 0x98, 0x85, 0x6c, 0xfb, 0x54, 0x53, + 0x2a, 0xb5, 0x73, 0x2e, 0x8d, 0x37, 0x01, 0xaa, 0xad, 0x66, 0x5b, 0x69, 0xb6, 0xeb, 0xad, 0x26, + 0xb7, 0x86, 0xf3, 0x90, 0x53, 0x95, 0xca, 0x3f, 0x9d, 0x28, 0x37, 0x13, 0x1e, 0x87, 0x51, 0xa7, + 0xd2, 0x68, 0xb4, 0xaa, 0x5c, 0x56, 0x7e, 0x93, 0x86, 0x6f, 0x29, 0x0d, 0xef, 0x9c, 0x78, 0x63, + 0xb3, 0x47, 0xf0, 0x2b, 0x04, 0xf9, 0x39, 0xef, 0xc3, 0x7f, 0x2d, 0xef, 0xd4, 0x22, 0xb3, 0x2c, + 0xae, 0xfe, 0x3a, 0x0b, 0xbf, 0x3f, 0xfb, 0xf8, 0xe9, 0x45, 0xea, 0x57, 0x81, 0x0f, 0xbf, 0x17, + 0x4f, 0x63, 0xf7, 0x39, 0xa2, 0x77, 0xea, 0x4b, 0xe5, 0xab, 0xe4, 0x1b, 0xe2, 0x1f, 0xa0, 0x32, + 0x7e, 0x37, 0x75, 0xe7, 0x79, 0x37, 0xc3, 0x47, 0xab, 0x12, 0x5d, 0xe8, 0x82, 0xf7, 0xe1, 0xfb, + 0x77, 0xc4, 0x77, 0x5f, 0x10, 0x97, 0xf2, 0xed, 0xcd, 0x3e, 0xf0, 0x60, 0x6a, 0x5b, 0x6f, 0x11, + 0xe4, 0xe7, 0x4c, 0x71, 0x95, 0x2e, 0x2f, 0x72, 0xd1, 0xfb, 0xb0, 0xde, 0x8f, 0x58, 0xef, 0xc8, + 0xa5, 0x98, 0x35, 0x7d, 0x5b, 0x43, 0xa7, 0x9b, 0xe1, 0x3e, 0xa5, 0x2e, 0x95, 0xaf, 0xa6, 0x7c, + 0x8f, 0x5b, 0xff, 0x9f, 0xd1, 0xc7, 0x0c, 0x1c, 0x4b, 0xb7, 0x07, 0xa2, 0xe3, 0x0d, 0xa4, 0x01, + 0xb1, 0x23, 0x9b, 0xa1, 0xbf, 0x00, 0xba, 0x6b, 0xfa, 0x77, 0xff, 0x06, 0x1c, 0xce, 0x6d, 0x74, + 0x33, 0x51, 0xe5, 0xce, 0xe7, 0x00, 0x00, 0x00, 0xff, 0xff, 0x0c, 0x08, 0x2b, 0x60, 0x3f, 0x08, + 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/devtools/cloudtrace/v1/trace.pb.go b/vendor/google.golang.org/genproto/googleapis/devtools/cloudtrace/v1/trace.pb.go new file mode 100644 index 0000000..0f5afcf --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/devtools/cloudtrace/v1/trace.pb.go @@ -0,0 +1,870 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/devtools/cloudtrace/v1/trace.proto + +package cloudtrace // import "google.golang.org/genproto/googleapis/devtools/cloudtrace/v1" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import empty "github.com/golang/protobuf/ptypes/empty" +import timestamp "github.com/golang/protobuf/ptypes/timestamp" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +import ( + context "golang.org/x/net/context" + grpc "google.golang.org/grpc" +) + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Type of span. Can be used to specify additional relationships between spans +// in addition to a parent/child relationship. +type TraceSpan_SpanKind int32 + +const ( + // Unspecified. + TraceSpan_SPAN_KIND_UNSPECIFIED TraceSpan_SpanKind = 0 + // Indicates that the span covers server-side handling of an RPC or other + // remote network request. + TraceSpan_RPC_SERVER TraceSpan_SpanKind = 1 + // Indicates that the span covers the client-side wrapper around an RPC or + // other remote request. + TraceSpan_RPC_CLIENT TraceSpan_SpanKind = 2 +) + +var TraceSpan_SpanKind_name = map[int32]string{ + 0: "SPAN_KIND_UNSPECIFIED", + 1: "RPC_SERVER", + 2: "RPC_CLIENT", +} +var TraceSpan_SpanKind_value = map[string]int32{ + "SPAN_KIND_UNSPECIFIED": 0, + "RPC_SERVER": 1, + "RPC_CLIENT": 2, +} + +func (x TraceSpan_SpanKind) String() string { + return proto.EnumName(TraceSpan_SpanKind_name, int32(x)) +} +func (TraceSpan_SpanKind) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_trace_6c7185ebccf38983, []int{2, 0} +} + +// Type of data returned for traces in the list. +type ListTracesRequest_ViewType int32 + +const ( + // Default is `MINIMAL` if unspecified. + ListTracesRequest_VIEW_TYPE_UNSPECIFIED ListTracesRequest_ViewType = 0 + // Minimal view of the trace record that contains only the project + // and trace IDs. + ListTracesRequest_MINIMAL ListTracesRequest_ViewType = 1 + // Root span view of the trace record that returns the root spans along + // with the minimal trace data. + ListTracesRequest_ROOTSPAN ListTracesRequest_ViewType = 2 + // Complete view of the trace record that contains the actual trace data. + // This is equivalent to calling the REST `get` or RPC `GetTrace` method + // using the ID of each listed trace. + ListTracesRequest_COMPLETE ListTracesRequest_ViewType = 3 +) + +var ListTracesRequest_ViewType_name = map[int32]string{ + 0: "VIEW_TYPE_UNSPECIFIED", + 1: "MINIMAL", + 2: "ROOTSPAN", + 3: "COMPLETE", +} +var ListTracesRequest_ViewType_value = map[string]int32{ + "VIEW_TYPE_UNSPECIFIED": 0, + "MINIMAL": 1, + "ROOTSPAN": 2, + "COMPLETE": 3, +} + +func (x ListTracesRequest_ViewType) String() string { + return proto.EnumName(ListTracesRequest_ViewType_name, int32(x)) +} +func (ListTracesRequest_ViewType) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_trace_6c7185ebccf38983, []int{3, 0} +} + +// A trace describes how long it takes for an application to perform an +// operation. It consists of a set of spans, each of which represent a single +// timed event within the operation. +type Trace struct { + // Project ID of the Cloud project where the trace data is stored. + ProjectId string `protobuf:"bytes,1,opt,name=project_id,json=projectId,proto3" json:"project_id,omitempty"` + // Globally unique identifier for the trace. This identifier is a 128-bit + // numeric value formatted as a 32-byte hex string. + TraceId string `protobuf:"bytes,2,opt,name=trace_id,json=traceId,proto3" json:"trace_id,omitempty"` + // Collection of spans in the trace. + Spans []*TraceSpan `protobuf:"bytes,3,rep,name=spans,proto3" json:"spans,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Trace) Reset() { *m = Trace{} } +func (m *Trace) String() string { return proto.CompactTextString(m) } +func (*Trace) ProtoMessage() {} +func (*Trace) Descriptor() ([]byte, []int) { + return fileDescriptor_trace_6c7185ebccf38983, []int{0} +} +func (m *Trace) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Trace.Unmarshal(m, b) +} +func (m *Trace) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Trace.Marshal(b, m, deterministic) +} +func (dst *Trace) XXX_Merge(src proto.Message) { + xxx_messageInfo_Trace.Merge(dst, src) +} +func (m *Trace) XXX_Size() int { + return xxx_messageInfo_Trace.Size(m) +} +func (m *Trace) XXX_DiscardUnknown() { + xxx_messageInfo_Trace.DiscardUnknown(m) +} + +var xxx_messageInfo_Trace proto.InternalMessageInfo + +func (m *Trace) GetProjectId() string { + if m != nil { + return m.ProjectId + } + return "" +} + +func (m *Trace) GetTraceId() string { + if m != nil { + return m.TraceId + } + return "" +} + +func (m *Trace) GetSpans() []*TraceSpan { + if m != nil { + return m.Spans + } + return nil +} + +// List of new or updated traces. +type Traces struct { + // List of traces. + Traces []*Trace `protobuf:"bytes,1,rep,name=traces,proto3" json:"traces,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Traces) Reset() { *m = Traces{} } +func (m *Traces) String() string { return proto.CompactTextString(m) } +func (*Traces) ProtoMessage() {} +func (*Traces) Descriptor() ([]byte, []int) { + return fileDescriptor_trace_6c7185ebccf38983, []int{1} +} +func (m *Traces) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Traces.Unmarshal(m, b) +} +func (m *Traces) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Traces.Marshal(b, m, deterministic) +} +func (dst *Traces) XXX_Merge(src proto.Message) { + xxx_messageInfo_Traces.Merge(dst, src) +} +func (m *Traces) XXX_Size() int { + return xxx_messageInfo_Traces.Size(m) +} +func (m *Traces) XXX_DiscardUnknown() { + xxx_messageInfo_Traces.DiscardUnknown(m) +} + +var xxx_messageInfo_Traces proto.InternalMessageInfo + +func (m *Traces) GetTraces() []*Trace { + if m != nil { + return m.Traces + } + return nil +} + +// A span represents a single timed event within a trace. Spans can be nested +// and form a trace tree. Often, a trace contains a root span that describes the +// end-to-end latency of an operation and, optionally, one or more subspans for +// its suboperations. Spans do not need to be contiguous. There may be gaps +// between spans in a trace. +type TraceSpan struct { + // Identifier for the span. Must be a 64-bit integer other than 0 and + // unique within a trace. + SpanId uint64 `protobuf:"fixed64,1,opt,name=span_id,json=spanId,proto3" json:"span_id,omitempty"` + // Distinguishes between spans generated in a particular context. For example, + // two spans with the same name may be distinguished using `RPC_CLIENT` + // and `RPC_SERVER` to identify queueing latency associated with the span. + Kind TraceSpan_SpanKind `protobuf:"varint,2,opt,name=kind,proto3,enum=google.devtools.cloudtrace.v1.TraceSpan_SpanKind" json:"kind,omitempty"` + // Name of the span. Must be less than 128 bytes. The span name is sanitized + // and displayed in the Stackdriver Trace tool in the + // {% dynamic print site_values.console_name %}. + // The name may be a method name or some other per-call site name. + // For the same executable and the same call point, a best practice is + // to use a consistent name, which makes it easier to correlate + // cross-trace spans. + Name string `protobuf:"bytes,3,opt,name=name,proto3" json:"name,omitempty"` + // Start time of the span in nanoseconds from the UNIX epoch. + StartTime *timestamp.Timestamp `protobuf:"bytes,4,opt,name=start_time,json=startTime,proto3" json:"start_time,omitempty"` + // End time of the span in nanoseconds from the UNIX epoch. + EndTime *timestamp.Timestamp `protobuf:"bytes,5,opt,name=end_time,json=endTime,proto3" json:"end_time,omitempty"` + // ID of the parent span, if any. Optional. + ParentSpanId uint64 `protobuf:"fixed64,6,opt,name=parent_span_id,json=parentSpanId,proto3" json:"parent_span_id,omitempty"` + // Collection of labels associated with the span. Label keys must be less than + // 128 bytes. Label values must be less than 16 kilobytes (10MB for + // `/stacktrace` values). + // + // Some predefined label keys exist, or you may create your own. When creating + // your own, we recommend the following formats: + // + // * `/category/product/key` for agents of well-known products (e.g. + // `/db/mongodb/read_size`). + // * `short_host/path/key` for domain-specific keys (e.g. + // `foo.com/myproduct/bar`) + // + // Predefined labels include: + // + // * `/agent` + // * `/component` + // * `/error/message` + // * `/error/name` + // * `/http/client_city` + // * `/http/client_country` + // * `/http/client_protocol` + // * `/http/client_region` + // * `/http/host` + // * `/http/method` + // * `/http/path` + // * `/http/redirected_url` + // * `/http/request/size` + // * `/http/response/size` + // * `/http/route` + // * `/http/status_code` + // * `/http/url` + // * `/http/user_agent` + // * `/pid` + // * `/stacktrace` + // * `/tid` + Labels map[string]string `protobuf:"bytes,7,rep,name=labels,proto3" json:"labels,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *TraceSpan) Reset() { *m = TraceSpan{} } +func (m *TraceSpan) String() string { return proto.CompactTextString(m) } +func (*TraceSpan) ProtoMessage() {} +func (*TraceSpan) Descriptor() ([]byte, []int) { + return fileDescriptor_trace_6c7185ebccf38983, []int{2} +} +func (m *TraceSpan) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_TraceSpan.Unmarshal(m, b) +} +func (m *TraceSpan) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_TraceSpan.Marshal(b, m, deterministic) +} +func (dst *TraceSpan) XXX_Merge(src proto.Message) { + xxx_messageInfo_TraceSpan.Merge(dst, src) +} +func (m *TraceSpan) XXX_Size() int { + return xxx_messageInfo_TraceSpan.Size(m) +} +func (m *TraceSpan) XXX_DiscardUnknown() { + xxx_messageInfo_TraceSpan.DiscardUnknown(m) +} + +var xxx_messageInfo_TraceSpan proto.InternalMessageInfo + +func (m *TraceSpan) GetSpanId() uint64 { + if m != nil { + return m.SpanId + } + return 0 +} + +func (m *TraceSpan) GetKind() TraceSpan_SpanKind { + if m != nil { + return m.Kind + } + return TraceSpan_SPAN_KIND_UNSPECIFIED +} + +func (m *TraceSpan) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *TraceSpan) GetStartTime() *timestamp.Timestamp { + if m != nil { + return m.StartTime + } + return nil +} + +func (m *TraceSpan) GetEndTime() *timestamp.Timestamp { + if m != nil { + return m.EndTime + } + return nil +} + +func (m *TraceSpan) GetParentSpanId() uint64 { + if m != nil { + return m.ParentSpanId + } + return 0 +} + +func (m *TraceSpan) GetLabels() map[string]string { + if m != nil { + return m.Labels + } + return nil +} + +// The request message for the `ListTraces` method. All fields are required +// unless specified. +type ListTracesRequest struct { + // ID of the Cloud project where the trace data is stored. + ProjectId string `protobuf:"bytes,1,opt,name=project_id,json=projectId,proto3" json:"project_id,omitempty"` + // Type of data returned for traces in the list. Optional. Default is + // `MINIMAL`. + View ListTracesRequest_ViewType `protobuf:"varint,2,opt,name=view,proto3,enum=google.devtools.cloudtrace.v1.ListTracesRequest_ViewType" json:"view,omitempty"` + // Maximum number of traces to return. If not specified or <= 0, the + // implementation selects a reasonable value. The implementation may + // return fewer traces than the requested page size. Optional. + PageSize int32 `protobuf:"varint,3,opt,name=page_size,json=pageSize,proto3" json:"page_size,omitempty"` + // Token identifying the page of results to return. If provided, use the + // value of the `next_page_token` field from a previous request. Optional. + PageToken string `protobuf:"bytes,4,opt,name=page_token,json=pageToken,proto3" json:"page_token,omitempty"` + // Start of the time interval (inclusive) during which the trace data was + // collected from the application. + StartTime *timestamp.Timestamp `protobuf:"bytes,5,opt,name=start_time,json=startTime,proto3" json:"start_time,omitempty"` + // End of the time interval (inclusive) during which the trace data was + // collected from the application. + EndTime *timestamp.Timestamp `protobuf:"bytes,6,opt,name=end_time,json=endTime,proto3" json:"end_time,omitempty"` + // An optional filter against labels for the request. + // + // By default, searches use prefix matching. To specify exact match, prepend + // a plus symbol (`+`) to the search term. + // Multiple terms are ANDed. Syntax: + // + // * `root:NAME_PREFIX` or `NAME_PREFIX`: Return traces where any root + // span starts with `NAME_PREFIX`. + // * `+root:NAME` or `+NAME`: Return traces where any root span's name is + // exactly `NAME`. + // * `span:NAME_PREFIX`: Return traces where any span starts with + // `NAME_PREFIX`. + // * `+span:NAME`: Return traces where any span's name is exactly + // `NAME`. + // * `latency:DURATION`: Return traces whose overall latency is + // greater or equal to than `DURATION`. Accepted units are nanoseconds + // (`ns`), milliseconds (`ms`), and seconds (`s`). Default is `ms`. For + // example, `latency:24ms` returns traces whose overall latency + // is greater than or equal to 24 milliseconds. + // * `label:LABEL_KEY`: Return all traces containing the specified + // label key (exact match, case-sensitive) regardless of the key:value + // pair's value (including empty values). + // * `LABEL_KEY:VALUE_PREFIX`: Return all traces containing the specified + // label key (exact match, case-sensitive) whose value starts with + // `VALUE_PREFIX`. Both a key and a value must be specified. + // * `+LABEL_KEY:VALUE`: Return all traces containing a key:value pair + // exactly matching the specified text. Both a key and a value must be + // specified. + // * `method:VALUE`: Equivalent to `/http/method:VALUE`. + // * `url:VALUE`: Equivalent to `/http/url:VALUE`. + Filter string `protobuf:"bytes,7,opt,name=filter,proto3" json:"filter,omitempty"` + // Field used to sort the returned traces. Optional. + // Can be one of the following: + // + // * `trace_id` + // * `name` (`name` field of root span in the trace) + // * `duration` (difference between `end_time` and `start_time` fields of + // the root span) + // * `start` (`start_time` field of the root span) + // + // Descending order can be specified by appending `desc` to the sort field + // (for example, `name desc`). + // + // Only one sort field is permitted. + OrderBy string `protobuf:"bytes,8,opt,name=order_by,json=orderBy,proto3" json:"order_by,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ListTracesRequest) Reset() { *m = ListTracesRequest{} } +func (m *ListTracesRequest) String() string { return proto.CompactTextString(m) } +func (*ListTracesRequest) ProtoMessage() {} +func (*ListTracesRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_trace_6c7185ebccf38983, []int{3} +} +func (m *ListTracesRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ListTracesRequest.Unmarshal(m, b) +} +func (m *ListTracesRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ListTracesRequest.Marshal(b, m, deterministic) +} +func (dst *ListTracesRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_ListTracesRequest.Merge(dst, src) +} +func (m *ListTracesRequest) XXX_Size() int { + return xxx_messageInfo_ListTracesRequest.Size(m) +} +func (m *ListTracesRequest) XXX_DiscardUnknown() { + xxx_messageInfo_ListTracesRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_ListTracesRequest proto.InternalMessageInfo + +func (m *ListTracesRequest) GetProjectId() string { + if m != nil { + return m.ProjectId + } + return "" +} + +func (m *ListTracesRequest) GetView() ListTracesRequest_ViewType { + if m != nil { + return m.View + } + return ListTracesRequest_VIEW_TYPE_UNSPECIFIED +} + +func (m *ListTracesRequest) GetPageSize() int32 { + if m != nil { + return m.PageSize + } + return 0 +} + +func (m *ListTracesRequest) GetPageToken() string { + if m != nil { + return m.PageToken + } + return "" +} + +func (m *ListTracesRequest) GetStartTime() *timestamp.Timestamp { + if m != nil { + return m.StartTime + } + return nil +} + +func (m *ListTracesRequest) GetEndTime() *timestamp.Timestamp { + if m != nil { + return m.EndTime + } + return nil +} + +func (m *ListTracesRequest) GetFilter() string { + if m != nil { + return m.Filter + } + return "" +} + +func (m *ListTracesRequest) GetOrderBy() string { + if m != nil { + return m.OrderBy + } + return "" +} + +// The response message for the `ListTraces` method. +type ListTracesResponse struct { + // List of trace records returned. + Traces []*Trace `protobuf:"bytes,1,rep,name=traces,proto3" json:"traces,omitempty"` + // If defined, indicates that there are more traces that match the request + // and that this value should be passed to the next request to continue + // retrieving additional traces. + NextPageToken string `protobuf:"bytes,2,opt,name=next_page_token,json=nextPageToken,proto3" json:"next_page_token,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ListTracesResponse) Reset() { *m = ListTracesResponse{} } +func (m *ListTracesResponse) String() string { return proto.CompactTextString(m) } +func (*ListTracesResponse) ProtoMessage() {} +func (*ListTracesResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_trace_6c7185ebccf38983, []int{4} +} +func (m *ListTracesResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ListTracesResponse.Unmarshal(m, b) +} +func (m *ListTracesResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ListTracesResponse.Marshal(b, m, deterministic) +} +func (dst *ListTracesResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_ListTracesResponse.Merge(dst, src) +} +func (m *ListTracesResponse) XXX_Size() int { + return xxx_messageInfo_ListTracesResponse.Size(m) +} +func (m *ListTracesResponse) XXX_DiscardUnknown() { + xxx_messageInfo_ListTracesResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_ListTracesResponse proto.InternalMessageInfo + +func (m *ListTracesResponse) GetTraces() []*Trace { + if m != nil { + return m.Traces + } + return nil +} + +func (m *ListTracesResponse) GetNextPageToken() string { + if m != nil { + return m.NextPageToken + } + return "" +} + +// The request message for the `GetTrace` method. +type GetTraceRequest struct { + // ID of the Cloud project where the trace data is stored. + ProjectId string `protobuf:"bytes,1,opt,name=project_id,json=projectId,proto3" json:"project_id,omitempty"` + // ID of the trace to return. + TraceId string `protobuf:"bytes,2,opt,name=trace_id,json=traceId,proto3" json:"trace_id,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GetTraceRequest) Reset() { *m = GetTraceRequest{} } +func (m *GetTraceRequest) String() string { return proto.CompactTextString(m) } +func (*GetTraceRequest) ProtoMessage() {} +func (*GetTraceRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_trace_6c7185ebccf38983, []int{5} +} +func (m *GetTraceRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GetTraceRequest.Unmarshal(m, b) +} +func (m *GetTraceRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GetTraceRequest.Marshal(b, m, deterministic) +} +func (dst *GetTraceRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_GetTraceRequest.Merge(dst, src) +} +func (m *GetTraceRequest) XXX_Size() int { + return xxx_messageInfo_GetTraceRequest.Size(m) +} +func (m *GetTraceRequest) XXX_DiscardUnknown() { + xxx_messageInfo_GetTraceRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_GetTraceRequest proto.InternalMessageInfo + +func (m *GetTraceRequest) GetProjectId() string { + if m != nil { + return m.ProjectId + } + return "" +} + +func (m *GetTraceRequest) GetTraceId() string { + if m != nil { + return m.TraceId + } + return "" +} + +// The request message for the `PatchTraces` method. +type PatchTracesRequest struct { + // ID of the Cloud project where the trace data is stored. + ProjectId string `protobuf:"bytes,1,opt,name=project_id,json=projectId,proto3" json:"project_id,omitempty"` + // The body of the message. + Traces *Traces `protobuf:"bytes,2,opt,name=traces,proto3" json:"traces,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *PatchTracesRequest) Reset() { *m = PatchTracesRequest{} } +func (m *PatchTracesRequest) String() string { return proto.CompactTextString(m) } +func (*PatchTracesRequest) ProtoMessage() {} +func (*PatchTracesRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_trace_6c7185ebccf38983, []int{6} +} +func (m *PatchTracesRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_PatchTracesRequest.Unmarshal(m, b) +} +func (m *PatchTracesRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_PatchTracesRequest.Marshal(b, m, deterministic) +} +func (dst *PatchTracesRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_PatchTracesRequest.Merge(dst, src) +} +func (m *PatchTracesRequest) XXX_Size() int { + return xxx_messageInfo_PatchTracesRequest.Size(m) +} +func (m *PatchTracesRequest) XXX_DiscardUnknown() { + xxx_messageInfo_PatchTracesRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_PatchTracesRequest proto.InternalMessageInfo + +func (m *PatchTracesRequest) GetProjectId() string { + if m != nil { + return m.ProjectId + } + return "" +} + +func (m *PatchTracesRequest) GetTraces() *Traces { + if m != nil { + return m.Traces + } + return nil +} + +func init() { + proto.RegisterType((*Trace)(nil), "google.devtools.cloudtrace.v1.Trace") + proto.RegisterType((*Traces)(nil), "google.devtools.cloudtrace.v1.Traces") + proto.RegisterType((*TraceSpan)(nil), "google.devtools.cloudtrace.v1.TraceSpan") + proto.RegisterMapType((map[string]string)(nil), "google.devtools.cloudtrace.v1.TraceSpan.LabelsEntry") + proto.RegisterType((*ListTracesRequest)(nil), "google.devtools.cloudtrace.v1.ListTracesRequest") + proto.RegisterType((*ListTracesResponse)(nil), "google.devtools.cloudtrace.v1.ListTracesResponse") + proto.RegisterType((*GetTraceRequest)(nil), "google.devtools.cloudtrace.v1.GetTraceRequest") + proto.RegisterType((*PatchTracesRequest)(nil), "google.devtools.cloudtrace.v1.PatchTracesRequest") + proto.RegisterEnum("google.devtools.cloudtrace.v1.TraceSpan_SpanKind", TraceSpan_SpanKind_name, TraceSpan_SpanKind_value) + proto.RegisterEnum("google.devtools.cloudtrace.v1.ListTracesRequest_ViewType", ListTracesRequest_ViewType_name, ListTracesRequest_ViewType_value) +} + +// Reference imports to suppress errors if they are not otherwise used. +var _ context.Context +var _ grpc.ClientConn + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the grpc package it is being compiled against. +const _ = grpc.SupportPackageIsVersion4 + +// TraceServiceClient is the client API for TraceService service. +// +// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://godoc.org/google.golang.org/grpc#ClientConn.NewStream. +type TraceServiceClient interface { + // Returns of a list of traces that match the specified filter conditions. + ListTraces(ctx context.Context, in *ListTracesRequest, opts ...grpc.CallOption) (*ListTracesResponse, error) + // Gets a single trace by its ID. + GetTrace(ctx context.Context, in *GetTraceRequest, opts ...grpc.CallOption) (*Trace, error) + // Sends new traces to Stackdriver Trace or updates existing traces. If the ID + // of a trace that you send matches that of an existing trace, any fields + // in the existing trace and its spans are overwritten by the provided values, + // and any new fields provided are merged with the existing trace data. If the + // ID does not match, a new trace is created. + PatchTraces(ctx context.Context, in *PatchTracesRequest, opts ...grpc.CallOption) (*empty.Empty, error) +} + +type traceServiceClient struct { + cc *grpc.ClientConn +} + +func NewTraceServiceClient(cc *grpc.ClientConn) TraceServiceClient { + return &traceServiceClient{cc} +} + +func (c *traceServiceClient) ListTraces(ctx context.Context, in *ListTracesRequest, opts ...grpc.CallOption) (*ListTracesResponse, error) { + out := new(ListTracesResponse) + err := c.cc.Invoke(ctx, "/google.devtools.cloudtrace.v1.TraceService/ListTraces", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *traceServiceClient) GetTrace(ctx context.Context, in *GetTraceRequest, opts ...grpc.CallOption) (*Trace, error) { + out := new(Trace) + err := c.cc.Invoke(ctx, "/google.devtools.cloudtrace.v1.TraceService/GetTrace", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *traceServiceClient) PatchTraces(ctx context.Context, in *PatchTracesRequest, opts ...grpc.CallOption) (*empty.Empty, error) { + out := new(empty.Empty) + err := c.cc.Invoke(ctx, "/google.devtools.cloudtrace.v1.TraceService/PatchTraces", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +// TraceServiceServer is the server API for TraceService service. +type TraceServiceServer interface { + // Returns of a list of traces that match the specified filter conditions. + ListTraces(context.Context, *ListTracesRequest) (*ListTracesResponse, error) + // Gets a single trace by its ID. + GetTrace(context.Context, *GetTraceRequest) (*Trace, error) + // Sends new traces to Stackdriver Trace or updates existing traces. If the ID + // of a trace that you send matches that of an existing trace, any fields + // in the existing trace and its spans are overwritten by the provided values, + // and any new fields provided are merged with the existing trace data. If the + // ID does not match, a new trace is created. + PatchTraces(context.Context, *PatchTracesRequest) (*empty.Empty, error) +} + +func RegisterTraceServiceServer(s *grpc.Server, srv TraceServiceServer) { + s.RegisterService(&_TraceService_serviceDesc, srv) +} + +func _TraceService_ListTraces_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(ListTracesRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(TraceServiceServer).ListTraces(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.devtools.cloudtrace.v1.TraceService/ListTraces", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(TraceServiceServer).ListTraces(ctx, req.(*ListTracesRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _TraceService_GetTrace_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(GetTraceRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(TraceServiceServer).GetTrace(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.devtools.cloudtrace.v1.TraceService/GetTrace", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(TraceServiceServer).GetTrace(ctx, req.(*GetTraceRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _TraceService_PatchTraces_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(PatchTracesRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(TraceServiceServer).PatchTraces(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.devtools.cloudtrace.v1.TraceService/PatchTraces", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(TraceServiceServer).PatchTraces(ctx, req.(*PatchTracesRequest)) + } + return interceptor(ctx, in, info, handler) +} + +var _TraceService_serviceDesc = grpc.ServiceDesc{ + ServiceName: "google.devtools.cloudtrace.v1.TraceService", + HandlerType: (*TraceServiceServer)(nil), + Methods: []grpc.MethodDesc{ + { + MethodName: "ListTraces", + Handler: _TraceService_ListTraces_Handler, + }, + { + MethodName: "GetTrace", + Handler: _TraceService_GetTrace_Handler, + }, + { + MethodName: "PatchTraces", + Handler: _TraceService_PatchTraces_Handler, + }, + }, + Streams: []grpc.StreamDesc{}, + Metadata: "google/devtools/cloudtrace/v1/trace.proto", +} + +func init() { + proto.RegisterFile("google/devtools/cloudtrace/v1/trace.proto", fileDescriptor_trace_6c7185ebccf38983) +} + +var fileDescriptor_trace_6c7185ebccf38983 = []byte{ + // 898 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xa4, 0x56, 0xdd, 0x6e, 0x1b, 0x45, + 0x14, 0x66, 0xed, 0x78, 0x6d, 0x1f, 0x87, 0xd4, 0x8c, 0x68, 0x71, 0x5d, 0x2a, 0xc2, 0xaa, 0x20, + 0x03, 0x62, 0xb7, 0x76, 0x41, 0x22, 0xe5, 0x47, 0x6a, 0xdc, 0x6d, 0xb4, 0x8a, 0xe3, 0xac, 0xd6, + 0xc6, 0x08, 0x14, 0x69, 0x35, 0xf1, 0x4e, 0xcd, 0x12, 0x7b, 0x66, 0xd9, 0x99, 0xb8, 0x38, 0x55, + 0x2f, 0xe0, 0x92, 0x5b, 0xc4, 0x15, 0x6f, 0xd0, 0x4b, 0x1e, 0x83, 0x3b, 0xc4, 0x2b, 0x20, 0xf1, + 0x1a, 0x68, 0x66, 0x76, 0x9b, 0x28, 0x51, 0x63, 0x07, 0x6e, 0xa2, 0x39, 0x67, 0xce, 0xef, 0xf7, + 0x7d, 0x93, 0x35, 0xbc, 0x37, 0x61, 0x6c, 0x32, 0x25, 0x4e, 0x44, 0xe6, 0x82, 0xb1, 0x29, 0x77, + 0xc6, 0x53, 0x76, 0x1c, 0x89, 0x14, 0x8f, 0x89, 0x33, 0x6f, 0x3b, 0xea, 0x60, 0x27, 0x29, 0x13, + 0x0c, 0xdd, 0xd6, 0xa1, 0x76, 0x1e, 0x6a, 0x9f, 0x86, 0xda, 0xf3, 0x76, 0xf3, 0xcd, 0xac, 0x12, + 0x4e, 0x62, 0x07, 0x53, 0xca, 0x04, 0x16, 0x31, 0xa3, 0x5c, 0x27, 0x37, 0x6f, 0x65, 0xb7, 0xca, + 0x3a, 0x3c, 0x7e, 0xec, 0x90, 0x59, 0x22, 0x16, 0xd9, 0xe5, 0x5b, 0xe7, 0x2f, 0x45, 0x3c, 0x23, + 0x5c, 0xe0, 0x59, 0xa2, 0x03, 0xac, 0x1f, 0x0d, 0x28, 0x0d, 0x65, 0x23, 0x74, 0x1b, 0x20, 0x49, + 0xd9, 0x77, 0x64, 0x2c, 0xc2, 0x38, 0x6a, 0x18, 0x9b, 0x46, 0xab, 0x1a, 0x54, 0x33, 0x8f, 0x17, + 0xa1, 0x9b, 0x50, 0x51, 0x03, 0xc9, 0xcb, 0x82, 0xba, 0x2c, 0x2b, 0xdb, 0x8b, 0xd0, 0x17, 0x50, + 0xe2, 0x09, 0xa6, 0xbc, 0x51, 0xdc, 0x2c, 0xb6, 0x6a, 0x9d, 0x96, 0x7d, 0xe9, 0x3a, 0xb6, 0x6a, + 0x37, 0x48, 0x30, 0x0d, 0x74, 0x9a, 0xf5, 0x08, 0x4c, 0xe5, 0xe3, 0xe8, 0x33, 0x30, 0x55, 0x18, + 0x6f, 0x18, 0xaa, 0xd4, 0x9d, 0x55, 0x4a, 0x05, 0x59, 0x8e, 0xf5, 0x4f, 0x11, 0xaa, 0x2f, 0x8a, + 0xa3, 0x37, 0xa0, 0x2c, 0xcb, 0xe7, 0xcb, 0x98, 0x81, 0x29, 0x4d, 0x2f, 0x42, 0x2e, 0xac, 0x1d, + 0xc5, 0x54, 0x6f, 0xb1, 0xd1, 0x69, 0xaf, 0x3a, 0xad, 0x2d, 0xff, 0xec, 0xc6, 0x34, 0x0a, 0x54, + 0x3a, 0x42, 0xb0, 0x46, 0xf1, 0x8c, 0x34, 0x8a, 0x0a, 0x0c, 0x75, 0x46, 0x5b, 0x00, 0x5c, 0xe0, + 0x54, 0x84, 0x12, 0xe6, 0xc6, 0xda, 0xa6, 0xd1, 0xaa, 0x75, 0x9a, 0x79, 0x83, 0x9c, 0x03, 0x7b, + 0x98, 0x73, 0x10, 0x54, 0x55, 0xb4, 0xb4, 0xd1, 0xc7, 0x50, 0x21, 0x34, 0xd2, 0x89, 0xa5, 0xa5, + 0x89, 0x65, 0x42, 0x23, 0x95, 0x76, 0x07, 0x36, 0x12, 0x9c, 0x12, 0x2a, 0xc2, 0x7c, 0x59, 0x53, + 0x2d, 0xbb, 0xae, 0xbd, 0x03, 0xbd, 0x72, 0x0f, 0xcc, 0x29, 0x3e, 0x24, 0x53, 0xde, 0x28, 0x2b, + 0x5c, 0x3f, 0x5a, 0x79, 0xe9, 0x9e, 0x4a, 0x73, 0xa9, 0x48, 0x17, 0x41, 0x56, 0xa3, 0xb9, 0x05, + 0xb5, 0x33, 0x6e, 0x54, 0x87, 0xe2, 0x11, 0x59, 0x64, 0x8a, 0x91, 0x47, 0xf4, 0x3a, 0x94, 0xe6, + 0x78, 0x7a, 0x4c, 0x32, 0xa1, 0x68, 0xe3, 0x7e, 0xe1, 0x13, 0xc3, 0x72, 0xa1, 0x92, 0xc3, 0x88, + 0x6e, 0xc2, 0xf5, 0x81, 0xff, 0xa0, 0x1f, 0xee, 0x7a, 0xfd, 0x87, 0xe1, 0x97, 0xfd, 0x81, 0xef, + 0x76, 0xbd, 0x47, 0x9e, 0xfb, 0xb0, 0xfe, 0x0a, 0xda, 0x00, 0x08, 0xfc, 0x6e, 0x38, 0x70, 0x83, + 0x91, 0x1b, 0xd4, 0x8d, 0xdc, 0xee, 0xf6, 0x3c, 0xb7, 0x3f, 0xac, 0x17, 0xac, 0xdf, 0x8b, 0xf0, + 0x5a, 0x2f, 0xe6, 0x42, 0xcb, 0x26, 0x20, 0xdf, 0x1f, 0x13, 0x2e, 0x96, 0x29, 0x78, 0x0f, 0xd6, + 0xe6, 0x31, 0x79, 0x92, 0xf1, 0xbe, 0xb5, 0x04, 0x82, 0x0b, 0xe5, 0xed, 0x51, 0x4c, 0x9e, 0x0c, + 0x17, 0x09, 0x09, 0x54, 0x19, 0x74, 0x0b, 0xaa, 0x09, 0x9e, 0x90, 0x90, 0xc7, 0x27, 0x5a, 0x04, + 0xa5, 0xa0, 0x22, 0x1d, 0x83, 0xf8, 0x44, 0x3f, 0x26, 0x79, 0x29, 0xd8, 0x11, 0xa1, 0x4a, 0x08, + 0x72, 0x14, 0x3c, 0x21, 0x43, 0xe9, 0x38, 0xa7, 0x93, 0xd2, 0x7f, 0xd5, 0x89, 0xb9, 0xba, 0x4e, + 0x6e, 0x80, 0xf9, 0x38, 0x9e, 0x0a, 0x92, 0x36, 0xca, 0x6a, 0x98, 0xcc, 0x92, 0xcf, 0x9a, 0xa5, + 0x11, 0x49, 0xc3, 0xc3, 0x45, 0xa3, 0xa2, 0x9f, 0xb5, 0xb2, 0xb7, 0x17, 0x56, 0x1f, 0x2a, 0xf9, + 0xca, 0x92, 0xab, 0x91, 0xe7, 0x7e, 0x15, 0x0e, 0xbf, 0xf6, 0xdd, 0x73, 0x5c, 0xd5, 0xa0, 0xbc, + 0xe7, 0xf5, 0xbd, 0xbd, 0x07, 0xbd, 0xba, 0x81, 0xd6, 0xa1, 0x12, 0xec, 0xef, 0x0f, 0x25, 0xaf, + 0xf5, 0x82, 0xb4, 0xba, 0xfb, 0x7b, 0x7e, 0xcf, 0x1d, 0xba, 0xf5, 0xa2, 0x75, 0x02, 0xe8, 0x2c, + 0xa8, 0x3c, 0x61, 0x94, 0x93, 0xff, 0xf7, 0xe4, 0xd1, 0xbb, 0x70, 0x8d, 0x92, 0x1f, 0x44, 0x78, + 0x06, 0x6c, 0xad, 0xb9, 0x57, 0xa5, 0xdb, 0xcf, 0x01, 0xb7, 0x76, 0xe1, 0xda, 0x0e, 0xd1, 0xad, + 0x57, 0x54, 0xcb, 0xcb, 0xff, 0xdf, 0x59, 0x29, 0x20, 0x1f, 0x8b, 0xf1, 0xb7, 0x57, 0x52, 0xdf, + 0xe7, 0x2f, 0xf6, 0x2c, 0x28, 0xd6, 0xde, 0x59, 0x65, 0x4f, 0x9e, 0x2f, 0xda, 0xf9, 0xb3, 0x08, + 0xeb, 0xfa, 0x55, 0x92, 0x74, 0x1e, 0x8f, 0x09, 0xfa, 0xcd, 0x00, 0x38, 0x85, 0x13, 0xdd, 0xbd, + 0xaa, 0x9c, 0x9b, 0xed, 0x2b, 0x64, 0x68, 0xae, 0xac, 0xd6, 0x4f, 0x7f, 0xfd, 0xfd, 0x4b, 0xc1, + 0x42, 0x9b, 0xf2, 0x03, 0x96, 0xad, 0xc6, 0x9d, 0xa7, 0xa7, 0x6b, 0x3f, 0x73, 0x32, 0x5e, 0x7e, + 0x35, 0xa0, 0x92, 0x03, 0x8e, 0xec, 0x25, 0x9d, 0xce, 0x31, 0xd3, 0x5c, 0x49, 0x02, 0xd6, 0x3d, + 0x35, 0xcc, 0x87, 0xe8, 0x83, 0x65, 0xc3, 0x38, 0x4f, 0x73, 0x22, 0x9f, 0xa1, 0x9f, 0x0d, 0xa8, + 0x9d, 0xe1, 0x0e, 0x2d, 0x03, 0xe1, 0x22, 0xcf, 0xcd, 0x1b, 0x17, 0x9e, 0x9b, 0x2b, 0x3f, 0xb8, + 0xd6, 0x5d, 0x35, 0xcf, 0xfb, 0x9d, 0xa5, 0xe0, 0xdc, 0xcf, 0x38, 0xdd, 0x7e, 0x6e, 0xc0, 0xdb, + 0x63, 0x36, 0xbb, 0x7c, 0x84, 0x6d, 0x50, 0xed, 0x7d, 0xd9, 0xcc, 0x37, 0xbe, 0xd9, 0xc9, 0x82, + 0x27, 0x6c, 0x8a, 0xe9, 0xc4, 0x66, 0xe9, 0xc4, 0x99, 0x10, 0xaa, 0x46, 0x71, 0xf4, 0x15, 0x4e, + 0x62, 0xfe, 0x92, 0x1f, 0x1d, 0x9f, 0x9e, 0x5a, 0xcf, 0x0b, 0xd7, 0x77, 0x74, 0xa5, 0xae, 0xf4, + 0x69, 0x4c, 0xed, 0x51, 0xfb, 0x8f, 0xdc, 0x7f, 0xa0, 0xfc, 0x07, 0xca, 0x7f, 0x30, 0x6a, 0x1f, + 0x9a, 0xaa, 0xc7, 0xbd, 0x7f, 0x03, 0x00, 0x00, 0xff, 0xff, 0x08, 0xd9, 0xf5, 0xea, 0xd7, 0x08, + 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/devtools/cloudtrace/v2/trace.pb.go b/vendor/google.golang.org/genproto/googleapis/devtools/cloudtrace/v2/trace.pb.go new file mode 100644 index 0000000..0297ee9 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/devtools/cloudtrace/v2/trace.pb.go @@ -0,0 +1,1391 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/devtools/cloudtrace/v2/trace.proto + +package cloudtrace // import "google.golang.org/genproto/googleapis/devtools/cloudtrace/v2" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import timestamp "github.com/golang/protobuf/ptypes/timestamp" +import wrappers "github.com/golang/protobuf/ptypes/wrappers" +import _ "google.golang.org/genproto/googleapis/api/annotations" +import status "google.golang.org/genproto/googleapis/rpc/status" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Indicates whether the message was sent or received. +type Span_TimeEvent_MessageEvent_Type int32 + +const ( + // Unknown event type. + Span_TimeEvent_MessageEvent_TYPE_UNSPECIFIED Span_TimeEvent_MessageEvent_Type = 0 + // Indicates a sent message. + Span_TimeEvent_MessageEvent_SENT Span_TimeEvent_MessageEvent_Type = 1 + // Indicates a received message. + Span_TimeEvent_MessageEvent_RECEIVED Span_TimeEvent_MessageEvent_Type = 2 +) + +var Span_TimeEvent_MessageEvent_Type_name = map[int32]string{ + 0: "TYPE_UNSPECIFIED", + 1: "SENT", + 2: "RECEIVED", +} +var Span_TimeEvent_MessageEvent_Type_value = map[string]int32{ + "TYPE_UNSPECIFIED": 0, + "SENT": 1, + "RECEIVED": 2, +} + +func (x Span_TimeEvent_MessageEvent_Type) String() string { + return proto.EnumName(Span_TimeEvent_MessageEvent_Type_name, int32(x)) +} +func (Span_TimeEvent_MessageEvent_Type) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_trace_717f3961302e3ac6, []int{0, 1, 1, 0} +} + +// The relationship of the current span relative to the linked span: child, +// parent, or unspecified. +type Span_Link_Type int32 + +const ( + // The relationship of the two spans is unknown. + Span_Link_TYPE_UNSPECIFIED Span_Link_Type = 0 + // The linked span is a child of the current span. + Span_Link_CHILD_LINKED_SPAN Span_Link_Type = 1 + // The linked span is a parent of the current span. + Span_Link_PARENT_LINKED_SPAN Span_Link_Type = 2 +) + +var Span_Link_Type_name = map[int32]string{ + 0: "TYPE_UNSPECIFIED", + 1: "CHILD_LINKED_SPAN", + 2: "PARENT_LINKED_SPAN", +} +var Span_Link_Type_value = map[string]int32{ + "TYPE_UNSPECIFIED": 0, + "CHILD_LINKED_SPAN": 1, + "PARENT_LINKED_SPAN": 2, +} + +func (x Span_Link_Type) String() string { + return proto.EnumName(Span_Link_Type_name, int32(x)) +} +func (Span_Link_Type) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_trace_717f3961302e3ac6, []int{0, 3, 0} +} + +// A span represents a single operation within a trace. Spans can be +// nested to form a trace tree. Often, a trace contains a root span +// that describes the end-to-end latency, and one or more subspans for +// its sub-operations. A trace can also contain multiple root spans, +// or none at all. Spans do not need to be contiguous—there may be +// gaps or overlaps between spans in a trace. +type Span struct { + // The resource name of the span in the following format: + // + // projects/[PROJECT_ID]/traces/[TRACE_ID]/spans/[SPAN_ID] + // + // [TRACE_ID] is a unique identifier for a trace within a project; + // it is a 32-character hexadecimal encoding of a 16-byte array. + // + // [SPAN_ID] is a unique identifier for a span within a trace; it + // is a 16-character hexadecimal encoding of an 8-byte array. + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // The [SPAN_ID] portion of the span's resource name. + SpanId string `protobuf:"bytes,2,opt,name=span_id,json=spanId,proto3" json:"span_id,omitempty"` + // The [SPAN_ID] of this span's parent span. If this is a root span, + // then this field must be empty. + ParentSpanId string `protobuf:"bytes,3,opt,name=parent_span_id,json=parentSpanId,proto3" json:"parent_span_id,omitempty"` + // A description of the span's operation (up to 128 bytes). + // Stackdriver Trace displays the description in the + // {% dynamic print site_values.console_name %}. + // For example, the display name can be a qualified method name or a file name + // and a line number where the operation is called. A best practice is to use + // the same display name within an application and at the same call point. + // This makes it easier to correlate spans in different traces. + DisplayName *TruncatableString `protobuf:"bytes,4,opt,name=display_name,json=displayName,proto3" json:"display_name,omitempty"` + // The start time of the span. On the client side, this is the time kept by + // the local machine where the span execution starts. On the server side, this + // is the time when the server's application handler starts running. + StartTime *timestamp.Timestamp `protobuf:"bytes,5,opt,name=start_time,json=startTime,proto3" json:"start_time,omitempty"` + // The end time of the span. On the client side, this is the time kept by + // the local machine where the span execution ends. On the server side, this + // is the time when the server application handler stops running. + EndTime *timestamp.Timestamp `protobuf:"bytes,6,opt,name=end_time,json=endTime,proto3" json:"end_time,omitempty"` + // A set of attributes on the span. You can have up to 32 attributes per + // span. + Attributes *Span_Attributes `protobuf:"bytes,7,opt,name=attributes,proto3" json:"attributes,omitempty"` + // Stack trace captured at the start of the span. + StackTrace *StackTrace `protobuf:"bytes,8,opt,name=stack_trace,json=stackTrace,proto3" json:"stack_trace,omitempty"` + // A set of time events. You can have up to 32 annotations and 128 message + // events per span. + TimeEvents *Span_TimeEvents `protobuf:"bytes,9,opt,name=time_events,json=timeEvents,proto3" json:"time_events,omitempty"` + // Links associated with the span. You can have up to 128 links per Span. + Links *Span_Links `protobuf:"bytes,10,opt,name=links,proto3" json:"links,omitempty"` + // An optional final status for this span. + Status *status.Status `protobuf:"bytes,11,opt,name=status,proto3" json:"status,omitempty"` + // (Optional) Set this parameter to indicate whether this span is in + // the same process as its parent. If you do not set this parameter, + // Stackdriver Trace is unable to take advantage of this helpful + // information. + SameProcessAsParentSpan *wrappers.BoolValue `protobuf:"bytes,12,opt,name=same_process_as_parent_span,json=sameProcessAsParentSpan,proto3" json:"same_process_as_parent_span,omitempty"` + // An optional number of child spans that were generated while this span + // was active. If set, allows implementation to detect missing child spans. + ChildSpanCount *wrappers.Int32Value `protobuf:"bytes,13,opt,name=child_span_count,json=childSpanCount,proto3" json:"child_span_count,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Span) Reset() { *m = Span{} } +func (m *Span) String() string { return proto.CompactTextString(m) } +func (*Span) ProtoMessage() {} +func (*Span) Descriptor() ([]byte, []int) { + return fileDescriptor_trace_717f3961302e3ac6, []int{0} +} +func (m *Span) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Span.Unmarshal(m, b) +} +func (m *Span) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Span.Marshal(b, m, deterministic) +} +func (dst *Span) XXX_Merge(src proto.Message) { + xxx_messageInfo_Span.Merge(dst, src) +} +func (m *Span) XXX_Size() int { + return xxx_messageInfo_Span.Size(m) +} +func (m *Span) XXX_DiscardUnknown() { + xxx_messageInfo_Span.DiscardUnknown(m) +} + +var xxx_messageInfo_Span proto.InternalMessageInfo + +func (m *Span) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *Span) GetSpanId() string { + if m != nil { + return m.SpanId + } + return "" +} + +func (m *Span) GetParentSpanId() string { + if m != nil { + return m.ParentSpanId + } + return "" +} + +func (m *Span) GetDisplayName() *TruncatableString { + if m != nil { + return m.DisplayName + } + return nil +} + +func (m *Span) GetStartTime() *timestamp.Timestamp { + if m != nil { + return m.StartTime + } + return nil +} + +func (m *Span) GetEndTime() *timestamp.Timestamp { + if m != nil { + return m.EndTime + } + return nil +} + +func (m *Span) GetAttributes() *Span_Attributes { + if m != nil { + return m.Attributes + } + return nil +} + +func (m *Span) GetStackTrace() *StackTrace { + if m != nil { + return m.StackTrace + } + return nil +} + +func (m *Span) GetTimeEvents() *Span_TimeEvents { + if m != nil { + return m.TimeEvents + } + return nil +} + +func (m *Span) GetLinks() *Span_Links { + if m != nil { + return m.Links + } + return nil +} + +func (m *Span) GetStatus() *status.Status { + if m != nil { + return m.Status + } + return nil +} + +func (m *Span) GetSameProcessAsParentSpan() *wrappers.BoolValue { + if m != nil { + return m.SameProcessAsParentSpan + } + return nil +} + +func (m *Span) GetChildSpanCount() *wrappers.Int32Value { + if m != nil { + return m.ChildSpanCount + } + return nil +} + +// A set of attributes, each in the format `[KEY]:[VALUE]`. +type Span_Attributes struct { + // The set of attributes. Each attribute's key can be up to 128 bytes + // long. The value can be a string up to 256 bytes, an integer, or the + // Boolean values `true` and `false`. For example: + // + // "/instance_id": "my-instance" + // "/http/user_agent": "" + // "/http/request_bytes": 300 + // "abc.com/myattribute": true + AttributeMap map[string]*AttributeValue `protobuf:"bytes,1,rep,name=attribute_map,json=attributeMap,proto3" json:"attribute_map,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` + // The number of attributes that were discarded. Attributes can be discarded + // because their keys are too long or because there are too many attributes. + // If this value is 0 then all attributes are valid. + DroppedAttributesCount int32 `protobuf:"varint,2,opt,name=dropped_attributes_count,json=droppedAttributesCount,proto3" json:"dropped_attributes_count,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Span_Attributes) Reset() { *m = Span_Attributes{} } +func (m *Span_Attributes) String() string { return proto.CompactTextString(m) } +func (*Span_Attributes) ProtoMessage() {} +func (*Span_Attributes) Descriptor() ([]byte, []int) { + return fileDescriptor_trace_717f3961302e3ac6, []int{0, 0} +} +func (m *Span_Attributes) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Span_Attributes.Unmarshal(m, b) +} +func (m *Span_Attributes) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Span_Attributes.Marshal(b, m, deterministic) +} +func (dst *Span_Attributes) XXX_Merge(src proto.Message) { + xxx_messageInfo_Span_Attributes.Merge(dst, src) +} +func (m *Span_Attributes) XXX_Size() int { + return xxx_messageInfo_Span_Attributes.Size(m) +} +func (m *Span_Attributes) XXX_DiscardUnknown() { + xxx_messageInfo_Span_Attributes.DiscardUnknown(m) +} + +var xxx_messageInfo_Span_Attributes proto.InternalMessageInfo + +func (m *Span_Attributes) GetAttributeMap() map[string]*AttributeValue { + if m != nil { + return m.AttributeMap + } + return nil +} + +func (m *Span_Attributes) GetDroppedAttributesCount() int32 { + if m != nil { + return m.DroppedAttributesCount + } + return 0 +} + +// A time-stamped annotation or message event in the Span. +type Span_TimeEvent struct { + // The timestamp indicating the time the event occurred. + Time *timestamp.Timestamp `protobuf:"bytes,1,opt,name=time,proto3" json:"time,omitempty"` + // A `TimeEvent` can contain either an `Annotation` object or a + // `MessageEvent` object, but not both. + // + // Types that are valid to be assigned to Value: + // *Span_TimeEvent_Annotation_ + // *Span_TimeEvent_MessageEvent_ + Value isSpan_TimeEvent_Value `protobuf_oneof:"value"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Span_TimeEvent) Reset() { *m = Span_TimeEvent{} } +func (m *Span_TimeEvent) String() string { return proto.CompactTextString(m) } +func (*Span_TimeEvent) ProtoMessage() {} +func (*Span_TimeEvent) Descriptor() ([]byte, []int) { + return fileDescriptor_trace_717f3961302e3ac6, []int{0, 1} +} +func (m *Span_TimeEvent) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Span_TimeEvent.Unmarshal(m, b) +} +func (m *Span_TimeEvent) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Span_TimeEvent.Marshal(b, m, deterministic) +} +func (dst *Span_TimeEvent) XXX_Merge(src proto.Message) { + xxx_messageInfo_Span_TimeEvent.Merge(dst, src) +} +func (m *Span_TimeEvent) XXX_Size() int { + return xxx_messageInfo_Span_TimeEvent.Size(m) +} +func (m *Span_TimeEvent) XXX_DiscardUnknown() { + xxx_messageInfo_Span_TimeEvent.DiscardUnknown(m) +} + +var xxx_messageInfo_Span_TimeEvent proto.InternalMessageInfo + +func (m *Span_TimeEvent) GetTime() *timestamp.Timestamp { + if m != nil { + return m.Time + } + return nil +} + +type isSpan_TimeEvent_Value interface { + isSpan_TimeEvent_Value() +} + +type Span_TimeEvent_Annotation_ struct { + Annotation *Span_TimeEvent_Annotation `protobuf:"bytes,2,opt,name=annotation,proto3,oneof"` +} + +type Span_TimeEvent_MessageEvent_ struct { + MessageEvent *Span_TimeEvent_MessageEvent `protobuf:"bytes,3,opt,name=message_event,json=messageEvent,proto3,oneof"` +} + +func (*Span_TimeEvent_Annotation_) isSpan_TimeEvent_Value() {} + +func (*Span_TimeEvent_MessageEvent_) isSpan_TimeEvent_Value() {} + +func (m *Span_TimeEvent) GetValue() isSpan_TimeEvent_Value { + if m != nil { + return m.Value + } + return nil +} + +func (m *Span_TimeEvent) GetAnnotation() *Span_TimeEvent_Annotation { + if x, ok := m.GetValue().(*Span_TimeEvent_Annotation_); ok { + return x.Annotation + } + return nil +} + +func (m *Span_TimeEvent) GetMessageEvent() *Span_TimeEvent_MessageEvent { + if x, ok := m.GetValue().(*Span_TimeEvent_MessageEvent_); ok { + return x.MessageEvent + } + return nil +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*Span_TimeEvent) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _Span_TimeEvent_OneofMarshaler, _Span_TimeEvent_OneofUnmarshaler, _Span_TimeEvent_OneofSizer, []interface{}{ + (*Span_TimeEvent_Annotation_)(nil), + (*Span_TimeEvent_MessageEvent_)(nil), + } +} + +func _Span_TimeEvent_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*Span_TimeEvent) + // value + switch x := m.Value.(type) { + case *Span_TimeEvent_Annotation_: + b.EncodeVarint(2<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.Annotation); err != nil { + return err + } + case *Span_TimeEvent_MessageEvent_: + b.EncodeVarint(3<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.MessageEvent); err != nil { + return err + } + case nil: + default: + return fmt.Errorf("Span_TimeEvent.Value has unexpected type %T", x) + } + return nil +} + +func _Span_TimeEvent_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*Span_TimeEvent) + switch tag { + case 2: // value.annotation + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(Span_TimeEvent_Annotation) + err := b.DecodeMessage(msg) + m.Value = &Span_TimeEvent_Annotation_{msg} + return true, err + case 3: // value.message_event + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(Span_TimeEvent_MessageEvent) + err := b.DecodeMessage(msg) + m.Value = &Span_TimeEvent_MessageEvent_{msg} + return true, err + default: + return false, nil + } +} + +func _Span_TimeEvent_OneofSizer(msg proto.Message) (n int) { + m := msg.(*Span_TimeEvent) + // value + switch x := m.Value.(type) { + case *Span_TimeEvent_Annotation_: + s := proto.Size(x.Annotation) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *Span_TimeEvent_MessageEvent_: + s := proto.Size(x.MessageEvent) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +// Text annotation with a set of attributes. +type Span_TimeEvent_Annotation struct { + // A user-supplied message describing the event. The maximum length for + // the description is 256 bytes. + Description *TruncatableString `protobuf:"bytes,1,opt,name=description,proto3" json:"description,omitempty"` + // A set of attributes on the annotation. You can have up to 4 attributes + // per Annotation. + Attributes *Span_Attributes `protobuf:"bytes,2,opt,name=attributes,proto3" json:"attributes,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Span_TimeEvent_Annotation) Reset() { *m = Span_TimeEvent_Annotation{} } +func (m *Span_TimeEvent_Annotation) String() string { return proto.CompactTextString(m) } +func (*Span_TimeEvent_Annotation) ProtoMessage() {} +func (*Span_TimeEvent_Annotation) Descriptor() ([]byte, []int) { + return fileDescriptor_trace_717f3961302e3ac6, []int{0, 1, 0} +} +func (m *Span_TimeEvent_Annotation) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Span_TimeEvent_Annotation.Unmarshal(m, b) +} +func (m *Span_TimeEvent_Annotation) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Span_TimeEvent_Annotation.Marshal(b, m, deterministic) +} +func (dst *Span_TimeEvent_Annotation) XXX_Merge(src proto.Message) { + xxx_messageInfo_Span_TimeEvent_Annotation.Merge(dst, src) +} +func (m *Span_TimeEvent_Annotation) XXX_Size() int { + return xxx_messageInfo_Span_TimeEvent_Annotation.Size(m) +} +func (m *Span_TimeEvent_Annotation) XXX_DiscardUnknown() { + xxx_messageInfo_Span_TimeEvent_Annotation.DiscardUnknown(m) +} + +var xxx_messageInfo_Span_TimeEvent_Annotation proto.InternalMessageInfo + +func (m *Span_TimeEvent_Annotation) GetDescription() *TruncatableString { + if m != nil { + return m.Description + } + return nil +} + +func (m *Span_TimeEvent_Annotation) GetAttributes() *Span_Attributes { + if m != nil { + return m.Attributes + } + return nil +} + +// An event describing a message sent/received between Spans. +type Span_TimeEvent_MessageEvent struct { + // Type of MessageEvent. Indicates whether the message was sent or + // received. + Type Span_TimeEvent_MessageEvent_Type `protobuf:"varint,1,opt,name=type,proto3,enum=google.devtools.cloudtrace.v2.Span_TimeEvent_MessageEvent_Type" json:"type,omitempty"` + // An identifier for the MessageEvent's message that can be used to match + // SENT and RECEIVED MessageEvents. It is recommended to be unique within + // a Span. + Id int64 `protobuf:"varint,2,opt,name=id,proto3" json:"id,omitempty"` + // The number of uncompressed bytes sent or received. + UncompressedSizeBytes int64 `protobuf:"varint,3,opt,name=uncompressed_size_bytes,json=uncompressedSizeBytes,proto3" json:"uncompressed_size_bytes,omitempty"` + // The number of compressed bytes sent or received. If missing assumed to + // be the same size as uncompressed. + CompressedSizeBytes int64 `protobuf:"varint,4,opt,name=compressed_size_bytes,json=compressedSizeBytes,proto3" json:"compressed_size_bytes,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Span_TimeEvent_MessageEvent) Reset() { *m = Span_TimeEvent_MessageEvent{} } +func (m *Span_TimeEvent_MessageEvent) String() string { return proto.CompactTextString(m) } +func (*Span_TimeEvent_MessageEvent) ProtoMessage() {} +func (*Span_TimeEvent_MessageEvent) Descriptor() ([]byte, []int) { + return fileDescriptor_trace_717f3961302e3ac6, []int{0, 1, 1} +} +func (m *Span_TimeEvent_MessageEvent) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Span_TimeEvent_MessageEvent.Unmarshal(m, b) +} +func (m *Span_TimeEvent_MessageEvent) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Span_TimeEvent_MessageEvent.Marshal(b, m, deterministic) +} +func (dst *Span_TimeEvent_MessageEvent) XXX_Merge(src proto.Message) { + xxx_messageInfo_Span_TimeEvent_MessageEvent.Merge(dst, src) +} +func (m *Span_TimeEvent_MessageEvent) XXX_Size() int { + return xxx_messageInfo_Span_TimeEvent_MessageEvent.Size(m) +} +func (m *Span_TimeEvent_MessageEvent) XXX_DiscardUnknown() { + xxx_messageInfo_Span_TimeEvent_MessageEvent.DiscardUnknown(m) +} + +var xxx_messageInfo_Span_TimeEvent_MessageEvent proto.InternalMessageInfo + +func (m *Span_TimeEvent_MessageEvent) GetType() Span_TimeEvent_MessageEvent_Type { + if m != nil { + return m.Type + } + return Span_TimeEvent_MessageEvent_TYPE_UNSPECIFIED +} + +func (m *Span_TimeEvent_MessageEvent) GetId() int64 { + if m != nil { + return m.Id + } + return 0 +} + +func (m *Span_TimeEvent_MessageEvent) GetUncompressedSizeBytes() int64 { + if m != nil { + return m.UncompressedSizeBytes + } + return 0 +} + +func (m *Span_TimeEvent_MessageEvent) GetCompressedSizeBytes() int64 { + if m != nil { + return m.CompressedSizeBytes + } + return 0 +} + +// A collection of `TimeEvent`s. A `TimeEvent` is a time-stamped annotation +// on the span, consisting of either user-supplied key:value pairs, or +// details of a message sent/received between Spans. +type Span_TimeEvents struct { + // A collection of `TimeEvent`s. + TimeEvent []*Span_TimeEvent `protobuf:"bytes,1,rep,name=time_event,json=timeEvent,proto3" json:"time_event,omitempty"` + // The number of dropped annotations in all the included time events. + // If the value is 0, then no annotations were dropped. + DroppedAnnotationsCount int32 `protobuf:"varint,2,opt,name=dropped_annotations_count,json=droppedAnnotationsCount,proto3" json:"dropped_annotations_count,omitempty"` + // The number of dropped message events in all the included time events. + // If the value is 0, then no message events were dropped. + DroppedMessageEventsCount int32 `protobuf:"varint,3,opt,name=dropped_message_events_count,json=droppedMessageEventsCount,proto3" json:"dropped_message_events_count,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Span_TimeEvents) Reset() { *m = Span_TimeEvents{} } +func (m *Span_TimeEvents) String() string { return proto.CompactTextString(m) } +func (*Span_TimeEvents) ProtoMessage() {} +func (*Span_TimeEvents) Descriptor() ([]byte, []int) { + return fileDescriptor_trace_717f3961302e3ac6, []int{0, 2} +} +func (m *Span_TimeEvents) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Span_TimeEvents.Unmarshal(m, b) +} +func (m *Span_TimeEvents) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Span_TimeEvents.Marshal(b, m, deterministic) +} +func (dst *Span_TimeEvents) XXX_Merge(src proto.Message) { + xxx_messageInfo_Span_TimeEvents.Merge(dst, src) +} +func (m *Span_TimeEvents) XXX_Size() int { + return xxx_messageInfo_Span_TimeEvents.Size(m) +} +func (m *Span_TimeEvents) XXX_DiscardUnknown() { + xxx_messageInfo_Span_TimeEvents.DiscardUnknown(m) +} + +var xxx_messageInfo_Span_TimeEvents proto.InternalMessageInfo + +func (m *Span_TimeEvents) GetTimeEvent() []*Span_TimeEvent { + if m != nil { + return m.TimeEvent + } + return nil +} + +func (m *Span_TimeEvents) GetDroppedAnnotationsCount() int32 { + if m != nil { + return m.DroppedAnnotationsCount + } + return 0 +} + +func (m *Span_TimeEvents) GetDroppedMessageEventsCount() int32 { + if m != nil { + return m.DroppedMessageEventsCount + } + return 0 +} + +// A pointer from the current span to another span in the same trace or in a +// different trace. For example, this can be used in batching operations, +// where a single batch handler processes multiple requests from different +// traces or when the handler receives a request from a different project. +type Span_Link struct { + // The [TRACE_ID] for a trace within a project. + TraceId string `protobuf:"bytes,1,opt,name=trace_id,json=traceId,proto3" json:"trace_id,omitempty"` + // The [SPAN_ID] for a span within a trace. + SpanId string `protobuf:"bytes,2,opt,name=span_id,json=spanId,proto3" json:"span_id,omitempty"` + // The relationship of the current span relative to the linked span. + Type Span_Link_Type `protobuf:"varint,3,opt,name=type,proto3,enum=google.devtools.cloudtrace.v2.Span_Link_Type" json:"type,omitempty"` + // A set of attributes on the link. You have have up to 32 attributes per + // link. + Attributes *Span_Attributes `protobuf:"bytes,4,opt,name=attributes,proto3" json:"attributes,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Span_Link) Reset() { *m = Span_Link{} } +func (m *Span_Link) String() string { return proto.CompactTextString(m) } +func (*Span_Link) ProtoMessage() {} +func (*Span_Link) Descriptor() ([]byte, []int) { + return fileDescriptor_trace_717f3961302e3ac6, []int{0, 3} +} +func (m *Span_Link) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Span_Link.Unmarshal(m, b) +} +func (m *Span_Link) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Span_Link.Marshal(b, m, deterministic) +} +func (dst *Span_Link) XXX_Merge(src proto.Message) { + xxx_messageInfo_Span_Link.Merge(dst, src) +} +func (m *Span_Link) XXX_Size() int { + return xxx_messageInfo_Span_Link.Size(m) +} +func (m *Span_Link) XXX_DiscardUnknown() { + xxx_messageInfo_Span_Link.DiscardUnknown(m) +} + +var xxx_messageInfo_Span_Link proto.InternalMessageInfo + +func (m *Span_Link) GetTraceId() string { + if m != nil { + return m.TraceId + } + return "" +} + +func (m *Span_Link) GetSpanId() string { + if m != nil { + return m.SpanId + } + return "" +} + +func (m *Span_Link) GetType() Span_Link_Type { + if m != nil { + return m.Type + } + return Span_Link_TYPE_UNSPECIFIED +} + +func (m *Span_Link) GetAttributes() *Span_Attributes { + if m != nil { + return m.Attributes + } + return nil +} + +// A collection of links, which are references from this span to a span +// in the same or different trace. +type Span_Links struct { + // A collection of links. + Link []*Span_Link `protobuf:"bytes,1,rep,name=link,proto3" json:"link,omitempty"` + // The number of dropped links after the maximum size was enforced. If + // this value is 0, then no links were dropped. + DroppedLinksCount int32 `protobuf:"varint,2,opt,name=dropped_links_count,json=droppedLinksCount,proto3" json:"dropped_links_count,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Span_Links) Reset() { *m = Span_Links{} } +func (m *Span_Links) String() string { return proto.CompactTextString(m) } +func (*Span_Links) ProtoMessage() {} +func (*Span_Links) Descriptor() ([]byte, []int) { + return fileDescriptor_trace_717f3961302e3ac6, []int{0, 4} +} +func (m *Span_Links) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Span_Links.Unmarshal(m, b) +} +func (m *Span_Links) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Span_Links.Marshal(b, m, deterministic) +} +func (dst *Span_Links) XXX_Merge(src proto.Message) { + xxx_messageInfo_Span_Links.Merge(dst, src) +} +func (m *Span_Links) XXX_Size() int { + return xxx_messageInfo_Span_Links.Size(m) +} +func (m *Span_Links) XXX_DiscardUnknown() { + xxx_messageInfo_Span_Links.DiscardUnknown(m) +} + +var xxx_messageInfo_Span_Links proto.InternalMessageInfo + +func (m *Span_Links) GetLink() []*Span_Link { + if m != nil { + return m.Link + } + return nil +} + +func (m *Span_Links) GetDroppedLinksCount() int32 { + if m != nil { + return m.DroppedLinksCount + } + return 0 +} + +// The allowed types for [VALUE] in a `[KEY]:[VALUE]` attribute. +type AttributeValue struct { + // The type of the value. + // + // Types that are valid to be assigned to Value: + // *AttributeValue_StringValue + // *AttributeValue_IntValue + // *AttributeValue_BoolValue + Value isAttributeValue_Value `protobuf_oneof:"value"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *AttributeValue) Reset() { *m = AttributeValue{} } +func (m *AttributeValue) String() string { return proto.CompactTextString(m) } +func (*AttributeValue) ProtoMessage() {} +func (*AttributeValue) Descriptor() ([]byte, []int) { + return fileDescriptor_trace_717f3961302e3ac6, []int{1} +} +func (m *AttributeValue) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_AttributeValue.Unmarshal(m, b) +} +func (m *AttributeValue) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_AttributeValue.Marshal(b, m, deterministic) +} +func (dst *AttributeValue) XXX_Merge(src proto.Message) { + xxx_messageInfo_AttributeValue.Merge(dst, src) +} +func (m *AttributeValue) XXX_Size() int { + return xxx_messageInfo_AttributeValue.Size(m) +} +func (m *AttributeValue) XXX_DiscardUnknown() { + xxx_messageInfo_AttributeValue.DiscardUnknown(m) +} + +var xxx_messageInfo_AttributeValue proto.InternalMessageInfo + +type isAttributeValue_Value interface { + isAttributeValue_Value() +} + +type AttributeValue_StringValue struct { + StringValue *TruncatableString `protobuf:"bytes,1,opt,name=string_value,json=stringValue,proto3,oneof"` +} + +type AttributeValue_IntValue struct { + IntValue int64 `protobuf:"varint,2,opt,name=int_value,json=intValue,proto3,oneof"` +} + +type AttributeValue_BoolValue struct { + BoolValue bool `protobuf:"varint,3,opt,name=bool_value,json=boolValue,proto3,oneof"` +} + +func (*AttributeValue_StringValue) isAttributeValue_Value() {} + +func (*AttributeValue_IntValue) isAttributeValue_Value() {} + +func (*AttributeValue_BoolValue) isAttributeValue_Value() {} + +func (m *AttributeValue) GetValue() isAttributeValue_Value { + if m != nil { + return m.Value + } + return nil +} + +func (m *AttributeValue) GetStringValue() *TruncatableString { + if x, ok := m.GetValue().(*AttributeValue_StringValue); ok { + return x.StringValue + } + return nil +} + +func (m *AttributeValue) GetIntValue() int64 { + if x, ok := m.GetValue().(*AttributeValue_IntValue); ok { + return x.IntValue + } + return 0 +} + +func (m *AttributeValue) GetBoolValue() bool { + if x, ok := m.GetValue().(*AttributeValue_BoolValue); ok { + return x.BoolValue + } + return false +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*AttributeValue) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _AttributeValue_OneofMarshaler, _AttributeValue_OneofUnmarshaler, _AttributeValue_OneofSizer, []interface{}{ + (*AttributeValue_StringValue)(nil), + (*AttributeValue_IntValue)(nil), + (*AttributeValue_BoolValue)(nil), + } +} + +func _AttributeValue_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*AttributeValue) + // value + switch x := m.Value.(type) { + case *AttributeValue_StringValue: + b.EncodeVarint(1<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.StringValue); err != nil { + return err + } + case *AttributeValue_IntValue: + b.EncodeVarint(2<<3 | proto.WireVarint) + b.EncodeVarint(uint64(x.IntValue)) + case *AttributeValue_BoolValue: + t := uint64(0) + if x.BoolValue { + t = 1 + } + b.EncodeVarint(3<<3 | proto.WireVarint) + b.EncodeVarint(t) + case nil: + default: + return fmt.Errorf("AttributeValue.Value has unexpected type %T", x) + } + return nil +} + +func _AttributeValue_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*AttributeValue) + switch tag { + case 1: // value.string_value + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(TruncatableString) + err := b.DecodeMessage(msg) + m.Value = &AttributeValue_StringValue{msg} + return true, err + case 2: // value.int_value + if wire != proto.WireVarint { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeVarint() + m.Value = &AttributeValue_IntValue{int64(x)} + return true, err + case 3: // value.bool_value + if wire != proto.WireVarint { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeVarint() + m.Value = &AttributeValue_BoolValue{x != 0} + return true, err + default: + return false, nil + } +} + +func _AttributeValue_OneofSizer(msg proto.Message) (n int) { + m := msg.(*AttributeValue) + // value + switch x := m.Value.(type) { + case *AttributeValue_StringValue: + s := proto.Size(x.StringValue) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *AttributeValue_IntValue: + n += 1 // tag and wire + n += proto.SizeVarint(uint64(x.IntValue)) + case *AttributeValue_BoolValue: + n += 1 // tag and wire + n += 1 + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +// A call stack appearing in a trace. +type StackTrace struct { + // Stack frames in this stack trace. A maximum of 128 frames are allowed. + StackFrames *StackTrace_StackFrames `protobuf:"bytes,1,opt,name=stack_frames,json=stackFrames,proto3" json:"stack_frames,omitempty"` + // The hash ID is used to conserve network bandwidth for duplicate + // stack traces within a single trace. + // + // Often multiple spans will have identical stack traces. + // The first occurrence of a stack trace should contain both the + // `stackFrame` content and a value in `stackTraceHashId`. + // + // Subsequent spans within the same request can refer + // to that stack trace by only setting `stackTraceHashId`. + StackTraceHashId int64 `protobuf:"varint,2,opt,name=stack_trace_hash_id,json=stackTraceHashId,proto3" json:"stack_trace_hash_id,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *StackTrace) Reset() { *m = StackTrace{} } +func (m *StackTrace) String() string { return proto.CompactTextString(m) } +func (*StackTrace) ProtoMessage() {} +func (*StackTrace) Descriptor() ([]byte, []int) { + return fileDescriptor_trace_717f3961302e3ac6, []int{2} +} +func (m *StackTrace) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_StackTrace.Unmarshal(m, b) +} +func (m *StackTrace) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_StackTrace.Marshal(b, m, deterministic) +} +func (dst *StackTrace) XXX_Merge(src proto.Message) { + xxx_messageInfo_StackTrace.Merge(dst, src) +} +func (m *StackTrace) XXX_Size() int { + return xxx_messageInfo_StackTrace.Size(m) +} +func (m *StackTrace) XXX_DiscardUnknown() { + xxx_messageInfo_StackTrace.DiscardUnknown(m) +} + +var xxx_messageInfo_StackTrace proto.InternalMessageInfo + +func (m *StackTrace) GetStackFrames() *StackTrace_StackFrames { + if m != nil { + return m.StackFrames + } + return nil +} + +func (m *StackTrace) GetStackTraceHashId() int64 { + if m != nil { + return m.StackTraceHashId + } + return 0 +} + +// Represents a single stack frame in a stack trace. +type StackTrace_StackFrame struct { + // The fully-qualified name that uniquely identifies the function or + // method that is active in this frame (up to 1024 bytes). + FunctionName *TruncatableString `protobuf:"bytes,1,opt,name=function_name,json=functionName,proto3" json:"function_name,omitempty"` + // An un-mangled function name, if `function_name` is + // [mangled](http://www.avabodh.com/cxxin/namemangling.html). The name can + // be fully-qualified (up to 1024 bytes). + OriginalFunctionName *TruncatableString `protobuf:"bytes,2,opt,name=original_function_name,json=originalFunctionName,proto3" json:"original_function_name,omitempty"` + // The name of the source file where the function call appears (up to 256 + // bytes). + FileName *TruncatableString `protobuf:"bytes,3,opt,name=file_name,json=fileName,proto3" json:"file_name,omitempty"` + // The line number in `file_name` where the function call appears. + LineNumber int64 `protobuf:"varint,4,opt,name=line_number,json=lineNumber,proto3" json:"line_number,omitempty"` + // The column number where the function call appears, if available. + // This is important in JavaScript because of its anonymous functions. + ColumnNumber int64 `protobuf:"varint,5,opt,name=column_number,json=columnNumber,proto3" json:"column_number,omitempty"` + // The binary module from where the code was loaded. + LoadModule *Module `protobuf:"bytes,6,opt,name=load_module,json=loadModule,proto3" json:"load_module,omitempty"` + // The version of the deployed source code (up to 128 bytes). + SourceVersion *TruncatableString `protobuf:"bytes,7,opt,name=source_version,json=sourceVersion,proto3" json:"source_version,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *StackTrace_StackFrame) Reset() { *m = StackTrace_StackFrame{} } +func (m *StackTrace_StackFrame) String() string { return proto.CompactTextString(m) } +func (*StackTrace_StackFrame) ProtoMessage() {} +func (*StackTrace_StackFrame) Descriptor() ([]byte, []int) { + return fileDescriptor_trace_717f3961302e3ac6, []int{2, 0} +} +func (m *StackTrace_StackFrame) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_StackTrace_StackFrame.Unmarshal(m, b) +} +func (m *StackTrace_StackFrame) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_StackTrace_StackFrame.Marshal(b, m, deterministic) +} +func (dst *StackTrace_StackFrame) XXX_Merge(src proto.Message) { + xxx_messageInfo_StackTrace_StackFrame.Merge(dst, src) +} +func (m *StackTrace_StackFrame) XXX_Size() int { + return xxx_messageInfo_StackTrace_StackFrame.Size(m) +} +func (m *StackTrace_StackFrame) XXX_DiscardUnknown() { + xxx_messageInfo_StackTrace_StackFrame.DiscardUnknown(m) +} + +var xxx_messageInfo_StackTrace_StackFrame proto.InternalMessageInfo + +func (m *StackTrace_StackFrame) GetFunctionName() *TruncatableString { + if m != nil { + return m.FunctionName + } + return nil +} + +func (m *StackTrace_StackFrame) GetOriginalFunctionName() *TruncatableString { + if m != nil { + return m.OriginalFunctionName + } + return nil +} + +func (m *StackTrace_StackFrame) GetFileName() *TruncatableString { + if m != nil { + return m.FileName + } + return nil +} + +func (m *StackTrace_StackFrame) GetLineNumber() int64 { + if m != nil { + return m.LineNumber + } + return 0 +} + +func (m *StackTrace_StackFrame) GetColumnNumber() int64 { + if m != nil { + return m.ColumnNumber + } + return 0 +} + +func (m *StackTrace_StackFrame) GetLoadModule() *Module { + if m != nil { + return m.LoadModule + } + return nil +} + +func (m *StackTrace_StackFrame) GetSourceVersion() *TruncatableString { + if m != nil { + return m.SourceVersion + } + return nil +} + +// A collection of stack frames, which can be truncated. +type StackTrace_StackFrames struct { + // Stack frames in this call stack. + Frame []*StackTrace_StackFrame `protobuf:"bytes,1,rep,name=frame,proto3" json:"frame,omitempty"` + // The number of stack frames that were dropped because there + // were too many stack frames. + // If this value is 0, then no stack frames were dropped. + DroppedFramesCount int32 `protobuf:"varint,2,opt,name=dropped_frames_count,json=droppedFramesCount,proto3" json:"dropped_frames_count,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *StackTrace_StackFrames) Reset() { *m = StackTrace_StackFrames{} } +func (m *StackTrace_StackFrames) String() string { return proto.CompactTextString(m) } +func (*StackTrace_StackFrames) ProtoMessage() {} +func (*StackTrace_StackFrames) Descriptor() ([]byte, []int) { + return fileDescriptor_trace_717f3961302e3ac6, []int{2, 1} +} +func (m *StackTrace_StackFrames) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_StackTrace_StackFrames.Unmarshal(m, b) +} +func (m *StackTrace_StackFrames) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_StackTrace_StackFrames.Marshal(b, m, deterministic) +} +func (dst *StackTrace_StackFrames) XXX_Merge(src proto.Message) { + xxx_messageInfo_StackTrace_StackFrames.Merge(dst, src) +} +func (m *StackTrace_StackFrames) XXX_Size() int { + return xxx_messageInfo_StackTrace_StackFrames.Size(m) +} +func (m *StackTrace_StackFrames) XXX_DiscardUnknown() { + xxx_messageInfo_StackTrace_StackFrames.DiscardUnknown(m) +} + +var xxx_messageInfo_StackTrace_StackFrames proto.InternalMessageInfo + +func (m *StackTrace_StackFrames) GetFrame() []*StackTrace_StackFrame { + if m != nil { + return m.Frame + } + return nil +} + +func (m *StackTrace_StackFrames) GetDroppedFramesCount() int32 { + if m != nil { + return m.DroppedFramesCount + } + return 0 +} + +// Binary module. +type Module struct { + // For example: main binary, kernel modules, and dynamic libraries + // such as libc.so, sharedlib.so (up to 256 bytes). + Module *TruncatableString `protobuf:"bytes,1,opt,name=module,proto3" json:"module,omitempty"` + // A unique identifier for the module, usually a hash of its + // contents (up to 128 bytes). + BuildId *TruncatableString `protobuf:"bytes,2,opt,name=build_id,json=buildId,proto3" json:"build_id,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Module) Reset() { *m = Module{} } +func (m *Module) String() string { return proto.CompactTextString(m) } +func (*Module) ProtoMessage() {} +func (*Module) Descriptor() ([]byte, []int) { + return fileDescriptor_trace_717f3961302e3ac6, []int{3} +} +func (m *Module) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Module.Unmarshal(m, b) +} +func (m *Module) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Module.Marshal(b, m, deterministic) +} +func (dst *Module) XXX_Merge(src proto.Message) { + xxx_messageInfo_Module.Merge(dst, src) +} +func (m *Module) XXX_Size() int { + return xxx_messageInfo_Module.Size(m) +} +func (m *Module) XXX_DiscardUnknown() { + xxx_messageInfo_Module.DiscardUnknown(m) +} + +var xxx_messageInfo_Module proto.InternalMessageInfo + +func (m *Module) GetModule() *TruncatableString { + if m != nil { + return m.Module + } + return nil +} + +func (m *Module) GetBuildId() *TruncatableString { + if m != nil { + return m.BuildId + } + return nil +} + +// Represents a string that might be shortened to a specified length. +type TruncatableString struct { + // The shortened string. For example, if the original string is 500 + // bytes long and the limit of the string is 128 bytes, then + // `value` contains the first 128 bytes of the 500-byte string. + // + // Truncation always happens on a UTF8 character boundary. If there + // are multi-byte characters in the string, then the length of the + // shortened string might be less than the size limit. + Value string `protobuf:"bytes,1,opt,name=value,proto3" json:"value,omitempty"` + // The number of bytes removed from the original string. If this + // value is 0, then the string was not shortened. + TruncatedByteCount int32 `protobuf:"varint,2,opt,name=truncated_byte_count,json=truncatedByteCount,proto3" json:"truncated_byte_count,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *TruncatableString) Reset() { *m = TruncatableString{} } +func (m *TruncatableString) String() string { return proto.CompactTextString(m) } +func (*TruncatableString) ProtoMessage() {} +func (*TruncatableString) Descriptor() ([]byte, []int) { + return fileDescriptor_trace_717f3961302e3ac6, []int{4} +} +func (m *TruncatableString) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_TruncatableString.Unmarshal(m, b) +} +func (m *TruncatableString) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_TruncatableString.Marshal(b, m, deterministic) +} +func (dst *TruncatableString) XXX_Merge(src proto.Message) { + xxx_messageInfo_TruncatableString.Merge(dst, src) +} +func (m *TruncatableString) XXX_Size() int { + return xxx_messageInfo_TruncatableString.Size(m) +} +func (m *TruncatableString) XXX_DiscardUnknown() { + xxx_messageInfo_TruncatableString.DiscardUnknown(m) +} + +var xxx_messageInfo_TruncatableString proto.InternalMessageInfo + +func (m *TruncatableString) GetValue() string { + if m != nil { + return m.Value + } + return "" +} + +func (m *TruncatableString) GetTruncatedByteCount() int32 { + if m != nil { + return m.TruncatedByteCount + } + return 0 +} + +func init() { + proto.RegisterType((*Span)(nil), "google.devtools.cloudtrace.v2.Span") + proto.RegisterType((*Span_Attributes)(nil), "google.devtools.cloudtrace.v2.Span.Attributes") + proto.RegisterMapType((map[string]*AttributeValue)(nil), "google.devtools.cloudtrace.v2.Span.Attributes.AttributeMapEntry") + proto.RegisterType((*Span_TimeEvent)(nil), "google.devtools.cloudtrace.v2.Span.TimeEvent") + proto.RegisterType((*Span_TimeEvent_Annotation)(nil), "google.devtools.cloudtrace.v2.Span.TimeEvent.Annotation") + proto.RegisterType((*Span_TimeEvent_MessageEvent)(nil), "google.devtools.cloudtrace.v2.Span.TimeEvent.MessageEvent") + proto.RegisterType((*Span_TimeEvents)(nil), "google.devtools.cloudtrace.v2.Span.TimeEvents") + proto.RegisterType((*Span_Link)(nil), "google.devtools.cloudtrace.v2.Span.Link") + proto.RegisterType((*Span_Links)(nil), "google.devtools.cloudtrace.v2.Span.Links") + proto.RegisterType((*AttributeValue)(nil), "google.devtools.cloudtrace.v2.AttributeValue") + proto.RegisterType((*StackTrace)(nil), "google.devtools.cloudtrace.v2.StackTrace") + proto.RegisterType((*StackTrace_StackFrame)(nil), "google.devtools.cloudtrace.v2.StackTrace.StackFrame") + proto.RegisterType((*StackTrace_StackFrames)(nil), "google.devtools.cloudtrace.v2.StackTrace.StackFrames") + proto.RegisterType((*Module)(nil), "google.devtools.cloudtrace.v2.Module") + proto.RegisterType((*TruncatableString)(nil), "google.devtools.cloudtrace.v2.TruncatableString") + proto.RegisterEnum("google.devtools.cloudtrace.v2.Span_TimeEvent_MessageEvent_Type", Span_TimeEvent_MessageEvent_Type_name, Span_TimeEvent_MessageEvent_Type_value) + proto.RegisterEnum("google.devtools.cloudtrace.v2.Span_Link_Type", Span_Link_Type_name, Span_Link_Type_value) +} + +func init() { + proto.RegisterFile("google/devtools/cloudtrace/v2/trace.proto", fileDescriptor_trace_717f3961302e3ac6) +} + +var fileDescriptor_trace_717f3961302e3ac6 = []byte{ + // 1425 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xa4, 0x57, 0x4b, 0x6f, 0xdb, 0xc6, + 0x16, 0x36, 0xf5, 0xd6, 0x91, 0x6c, 0xc8, 0x13, 0x3b, 0x56, 0x94, 0xe4, 0x26, 0xd7, 0xf7, 0x16, + 0x70, 0x0a, 0x98, 0x0a, 0x94, 0xa4, 0x48, 0xd3, 0x02, 0xa9, 0x1f, 0x72, 0xa4, 0xc4, 0x56, 0x05, + 0x4a, 0x71, 0xd3, 0x34, 0x00, 0x31, 0x22, 0xc7, 0x32, 0x11, 0x8a, 0x24, 0x38, 0x43, 0x17, 0xce, + 0xae, 0xeb, 0xae, 0xbb, 0x29, 0x50, 0x74, 0x59, 0x20, 0xab, 0xfc, 0x8e, 0x2e, 0xba, 0xed, 0x7f, + 0xe9, 0xaa, 0x98, 0x07, 0x49, 0x29, 0x2f, 0xdb, 0xca, 0x6e, 0x66, 0xce, 0xf9, 0x3e, 0x9e, 0x33, + 0x73, 0x5e, 0x84, 0x5b, 0x63, 0xdf, 0x1f, 0xbb, 0xa4, 0x69, 0x93, 0x13, 0xe6, 0xfb, 0x2e, 0x6d, + 0x5a, 0xae, 0x1f, 0xd9, 0x2c, 0xc4, 0x16, 0x69, 0x9e, 0xb4, 0x9a, 0x62, 0xa1, 0x07, 0xa1, 0xcf, + 0x7c, 0x74, 0x5d, 0xaa, 0xea, 0xb1, 0xaa, 0x9e, 0xaa, 0xea, 0x27, 0xad, 0xc6, 0x35, 0xc5, 0x84, + 0x03, 0xa7, 0x89, 0x3d, 0xcf, 0x67, 0x98, 0x39, 0xbe, 0x47, 0x25, 0xb8, 0x71, 0x43, 0x49, 0xc5, + 0x6e, 0x14, 0x1d, 0x35, 0x99, 0x33, 0x21, 0x94, 0xe1, 0x49, 0xa0, 0x14, 0xfe, 0xf3, 0xb6, 0xc2, + 0x8f, 0x21, 0x0e, 0x02, 0x12, 0xc6, 0x04, 0x6b, 0x4a, 0x1e, 0x06, 0x56, 0x93, 0x32, 0xcc, 0x22, + 0x25, 0x58, 0xff, 0x07, 0x41, 0x6e, 0x10, 0x60, 0x0f, 0x21, 0xc8, 0x79, 0x78, 0x42, 0xea, 0xda, + 0x4d, 0x6d, 0xa3, 0x6c, 0x88, 0x35, 0x5a, 0x83, 0x22, 0x0d, 0xb0, 0x67, 0x3a, 0x76, 0x3d, 0x23, + 0x8e, 0x0b, 0x7c, 0xdb, 0xb5, 0xd1, 0xff, 0x61, 0x29, 0xc0, 0x21, 0xf1, 0x98, 0x19, 0xcb, 0xb3, + 0x42, 0x5e, 0x95, 0xa7, 0x03, 0xa9, 0x35, 0x80, 0xaa, 0xed, 0xd0, 0xc0, 0xc5, 0xa7, 0xa6, 0xa0, + 0xce, 0xdd, 0xd4, 0x36, 0x2a, 0xad, 0xdb, 0xfa, 0x47, 0x6f, 0x42, 0x1f, 0x86, 0x91, 0x67, 0x61, + 0x86, 0x47, 0x2e, 0x19, 0xb0, 0xd0, 0xf1, 0xc6, 0x46, 0x45, 0xb1, 0xf4, 0xb8, 0x4d, 0x5f, 0x02, + 0x50, 0x86, 0x43, 0x66, 0xf2, 0x2b, 0xa8, 0xe7, 0x05, 0x65, 0x23, 0xa6, 0x8c, 0xdd, 0xd7, 0x87, + 0xf1, 0xfd, 0x18, 0x65, 0xa1, 0xcd, 0xf7, 0xe8, 0x1e, 0x94, 0x88, 0x67, 0x4b, 0x60, 0xe1, 0x4c, + 0x60, 0x91, 0x78, 0xb6, 0x80, 0xf5, 0x00, 0x30, 0x63, 0xa1, 0x33, 0x8a, 0x18, 0xa1, 0xf5, 0xa2, + 0x00, 0xea, 0x67, 0x38, 0xc1, 0x6f, 0x40, 0xdf, 0x4a, 0x50, 0xc6, 0x14, 0x03, 0x7a, 0x0c, 0x15, + 0xca, 0xb0, 0xf5, 0xd2, 0x14, 0xda, 0xf5, 0x92, 0x20, 0xbc, 0x75, 0x16, 0x21, 0x47, 0x0c, 0xf9, + 0xce, 0x00, 0x9a, 0xac, 0xd1, 0xb7, 0x50, 0xe1, 0xee, 0x98, 0xe4, 0x84, 0x78, 0x8c, 0xd6, 0xcb, + 0xe7, 0x37, 0x8e, 0xbb, 0xd6, 0x16, 0x28, 0x03, 0x58, 0xb2, 0x46, 0x0f, 0x21, 0xef, 0x3a, 0xde, + 0x4b, 0x5a, 0x87, 0xf3, 0x99, 0xc5, 0xa9, 0xf6, 0x39, 0xc0, 0x90, 0x38, 0xf4, 0x39, 0x14, 0x64, + 0x80, 0xd5, 0x2b, 0x82, 0x01, 0xc5, 0x0c, 0x61, 0x60, 0x71, 0x2f, 0x58, 0x44, 0x0d, 0xa5, 0x81, + 0x9e, 0xc1, 0x55, 0x8a, 0x27, 0xc4, 0x0c, 0x42, 0xdf, 0x22, 0x94, 0x9a, 0x98, 0x9a, 0x53, 0x61, + 0x55, 0xaf, 0x7e, 0xe0, 0x8d, 0xb6, 0x7d, 0xdf, 0x3d, 0xc4, 0x6e, 0x44, 0x8c, 0x35, 0x0e, 0xef, + 0x4b, 0xf4, 0x16, 0xed, 0x27, 0xc1, 0x87, 0xda, 0x50, 0xb3, 0x8e, 0x1d, 0xd7, 0x96, 0xf1, 0x69, + 0xf9, 0x91, 0xc7, 0xea, 0x8b, 0x82, 0xee, 0xea, 0x3b, 0x74, 0x5d, 0x8f, 0xdd, 0x69, 0x49, 0xbe, + 0x25, 0x01, 0xe2, 0x0c, 0x3b, 0x1c, 0xd2, 0xf8, 0x2d, 0x03, 0x90, 0xbe, 0x22, 0x22, 0xb0, 0x98, + 0xbc, 0xa3, 0x39, 0xc1, 0x41, 0x5d, 0xbb, 0x99, 0xdd, 0xa8, 0xb4, 0xbe, 0xb9, 0x58, 0x30, 0xa4, + 0xcb, 0x03, 0x1c, 0xb4, 0x3d, 0x16, 0x9e, 0x1a, 0x55, 0x3c, 0x75, 0x84, 0xee, 0x43, 0xdd, 0x0e, + 0xfd, 0x20, 0x20, 0xb6, 0x99, 0x86, 0x8d, 0x72, 0x82, 0xe7, 0x61, 0xde, 0xb8, 0xac, 0xe4, 0x29, + 0xa9, 0xb4, 0xd7, 0x83, 0xe5, 0x77, 0xc8, 0x51, 0x0d, 0xb2, 0x2f, 0xc9, 0xa9, 0x4a, 0x6c, 0xbe, + 0x44, 0x3b, 0x90, 0x3f, 0xe1, 0xfe, 0x0a, 0xb6, 0x4a, 0x6b, 0xf3, 0x0c, 0xfb, 0x13, 0x4a, 0x79, + 0x49, 0x12, 0xfb, 0x20, 0x73, 0x5f, 0x6b, 0xfc, 0x95, 0x87, 0x72, 0x12, 0x48, 0x48, 0x87, 0x9c, + 0xc8, 0x2d, 0xed, 0xcc, 0xdc, 0x12, 0x7a, 0xe8, 0x39, 0x40, 0x5a, 0xea, 0x94, 0x2d, 0xf7, 0x2f, + 0x14, 0xbb, 0xfa, 0x56, 0x82, 0xef, 0x2c, 0x18, 0x53, 0x6c, 0x08, 0xc3, 0xe2, 0x84, 0x50, 0x8a, + 0xc7, 0x2a, 0x37, 0x44, 0x81, 0xaa, 0xb4, 0x1e, 0x5c, 0x8c, 0xfe, 0x40, 0x52, 0x88, 0x4d, 0x67, + 0xc1, 0xa8, 0x4e, 0xa6, 0xf6, 0x8d, 0x37, 0x1a, 0x40, 0xfa, 0x7d, 0x64, 0x40, 0xc5, 0x26, 0xd4, + 0x0a, 0x9d, 0x40, 0xb8, 0xa3, 0xcd, 0x5d, 0xec, 0x52, 0x92, 0xb7, 0x4a, 0x4f, 0xe6, 0x53, 0x4b, + 0x4f, 0xe3, 0x97, 0x0c, 0x54, 0xa7, 0x7d, 0x42, 0x03, 0xc8, 0xb1, 0xd3, 0x40, 0x3e, 0xd9, 0x52, + 0xeb, 0xe1, 0xfc, 0xb7, 0xa3, 0x0f, 0x4f, 0x03, 0x62, 0x08, 0x32, 0xb4, 0x04, 0x19, 0xd5, 0x31, + 0xb2, 0x46, 0xc6, 0xb1, 0xd1, 0x17, 0xb0, 0x16, 0x79, 0x96, 0x3f, 0x09, 0x42, 0x42, 0x29, 0xb1, + 0x4d, 0xea, 0xbc, 0x22, 0xe6, 0xe8, 0x94, 0xbb, 0x94, 0x15, 0x4a, 0xab, 0xd3, 0xe2, 0x81, 0xf3, + 0x8a, 0x6c, 0x73, 0x21, 0x6a, 0xc1, 0xea, 0xfb, 0x51, 0x39, 0x81, 0xba, 0xf4, 0x1e, 0xcc, 0xfa, + 0x5d, 0xc8, 0x71, 0x4b, 0xd0, 0x0a, 0xd4, 0x86, 0xdf, 0xf7, 0xdb, 0xe6, 0xd3, 0xde, 0xa0, 0xdf, + 0xde, 0xe9, 0xee, 0x75, 0xdb, 0xbb, 0xb5, 0x05, 0x54, 0x82, 0xdc, 0xa0, 0xdd, 0x1b, 0xd6, 0x34, + 0x54, 0x85, 0x92, 0xd1, 0xde, 0x69, 0x77, 0x0f, 0xdb, 0xbb, 0xb5, 0xcc, 0x76, 0x51, 0x25, 0x44, + 0xe3, 0x6f, 0x0d, 0x20, 0xad, 0x8c, 0x68, 0x1f, 0x20, 0x2d, 0xaf, 0x2a, 0xdb, 0x37, 0x2f, 0x74, + 0x49, 0x46, 0x39, 0x29, 0xae, 0xe8, 0x01, 0x5c, 0x49, 0xf2, 0x3a, 0x6d, 0xf1, 0x33, 0x89, 0xbd, + 0x16, 0x27, 0x76, 0x2a, 0x17, 0x99, 0x8d, 0x1e, 0xc2, 0xb5, 0x18, 0x3b, 0x13, 0xd7, 0x31, 0x3c, + 0x2b, 0xe0, 0x31, 0xff, 0xf4, 0xcb, 0xa8, 0xd2, 0xf0, 0x6b, 0x06, 0x72, 0xbc, 0x50, 0xa3, 0x2b, + 0x50, 0x12, 0xb6, 0xf2, 0xae, 0x2d, 0x6b, 0x42, 0x51, 0xec, 0xbb, 0xf6, 0x87, 0xfb, 0xfd, 0x96, + 0x0a, 0x93, 0xac, 0x08, 0x93, 0xcd, 0xf3, 0x36, 0x85, 0xe9, 0xa0, 0x98, 0x0d, 0xe5, 0xdc, 0xa7, + 0x86, 0xf2, 0xfa, 0x93, 0x8f, 0x3e, 0xf4, 0x2a, 0x2c, 0xef, 0x74, 0xba, 0xfb, 0xbb, 0xe6, 0x7e, + 0xb7, 0xf7, 0xa4, 0xbd, 0x6b, 0x0e, 0xfa, 0x5b, 0xbd, 0x9a, 0x86, 0x2e, 0x03, 0xea, 0x6f, 0x19, + 0xed, 0xde, 0x70, 0xe6, 0x3c, 0xd3, 0x88, 0x20, 0x2f, 0x9a, 0x18, 0xfa, 0x1a, 0x72, 0xbc, 0x8d, + 0xa9, 0xa7, 0xde, 0x38, 0xaf, 0xa3, 0x86, 0x40, 0x21, 0x1d, 0x2e, 0xc5, 0x8f, 0x24, 0x9a, 0xe1, + 0xcc, 0xd3, 0x2e, 0x2b, 0x91, 0xf8, 0x90, 0x78, 0x93, 0xf5, 0x37, 0x1a, 0x2c, 0xcd, 0x16, 0x57, + 0xf4, 0x14, 0xaa, 0x54, 0x14, 0x02, 0x53, 0x56, 0xe8, 0x39, 0xcb, 0x48, 0x67, 0xc1, 0xa8, 0x48, + 0x1e, 0x49, 0x7b, 0x1d, 0xca, 0x8e, 0xc7, 0xcc, 0xb4, 0xea, 0x67, 0x3b, 0x0b, 0x46, 0xc9, 0xf1, + 0x98, 0x14, 0xdf, 0x00, 0x18, 0xf9, 0xbe, 0xab, 0xe4, 0xfc, 0x95, 0x4b, 0x9d, 0x05, 0xa3, 0x3c, + 0x8a, 0x1b, 0x6d, 0x92, 0x20, 0xeb, 0x7f, 0x14, 0x00, 0xd2, 0x59, 0x04, 0x3d, 0xe3, 0xe6, 0xf2, + 0x59, 0xe6, 0x28, 0xc4, 0x13, 0x42, 0x95, 0xb9, 0xf7, 0xce, 0x3d, 0xcc, 0xc8, 0xe5, 0x9e, 0x00, + 0x1b, 0x72, 0x2c, 0x92, 0x1b, 0xb4, 0x09, 0x97, 0xa6, 0xa6, 0x24, 0xf3, 0x18, 0xd3, 0x63, 0x33, + 0xa9, 0x2a, 0xb5, 0x74, 0x04, 0xea, 0x60, 0x7a, 0xdc, 0xb5, 0x1b, 0x3f, 0xe5, 0x94, 0x5d, 0x02, + 0x8e, 0x9e, 0xc2, 0xe2, 0x51, 0xe4, 0x59, 0x3c, 0x81, 0xcc, 0x64, 0xac, 0x9d, 0xa7, 0x1c, 0x57, + 0x63, 0x1a, 0x31, 0x7c, 0x1e, 0xc1, 0x65, 0x3f, 0x74, 0xc6, 0x8e, 0x87, 0x5d, 0x73, 0x96, 0x3f, + 0x33, 0x27, 0xff, 0x4a, 0xcc, 0xb7, 0x37, 0xfd, 0x9d, 0x03, 0x28, 0x1f, 0x39, 0x2e, 0x91, 0xd4, + 0xd9, 0x39, 0xa9, 0x4b, 0x9c, 0x42, 0xd0, 0xdd, 0x80, 0x8a, 0xeb, 0x78, 0xc4, 0xf4, 0xa2, 0xc9, + 0x88, 0x84, 0xaa, 0x7c, 0x02, 0x3f, 0xea, 0x89, 0x13, 0xf4, 0x3f, 0x58, 0xb4, 0x7c, 0x37, 0x9a, + 0x78, 0xb1, 0x4a, 0x5e, 0xa8, 0x54, 0xe5, 0xa1, 0x52, 0xda, 0x83, 0x8a, 0xeb, 0x63, 0xdb, 0x9c, + 0xf8, 0x76, 0xe4, 0xc6, 0x13, 0xf4, 0x67, 0x67, 0x98, 0x75, 0x20, 0x94, 0x0d, 0xe0, 0x48, 0xb9, + 0x46, 0xdf, 0xc1, 0x12, 0xf5, 0xa3, 0xd0, 0x22, 0xe6, 0x09, 0x09, 0x29, 0xef, 0x95, 0xc5, 0x39, + 0x3d, 0x5c, 0x94, 0x3c, 0x87, 0x92, 0xa6, 0xf1, 0xb3, 0x06, 0x95, 0xa9, 0x78, 0x42, 0x8f, 0x21, + 0x2f, 0xc2, 0x52, 0x65, 0xf3, 0xdd, 0x79, 0xa2, 0xd2, 0x90, 0x14, 0xe8, 0x36, 0xac, 0xc4, 0xa9, + 0x2d, 0x43, 0x7d, 0x26, 0xb7, 0x91, 0x92, 0xc9, 0x0f, 0xcb, 0xe4, 0xfe, 0x5d, 0x83, 0x82, 0xf2, + 0xb8, 0x03, 0x05, 0x75, 0x69, 0xf3, 0x86, 0xa1, 0xc2, 0xa3, 0x27, 0x50, 0x1a, 0x45, 0x7c, 0xae, + 0x55, 0xa9, 0x30, 0x0f, 0x57, 0x51, 0x30, 0x74, 0xed, 0xf5, 0x1f, 0x60, 0xf9, 0x1d, 0x29, 0x5a, + 0x89, 0x67, 0x43, 0xd9, 0x1b, 0xe4, 0x86, 0xbb, 0xcf, 0xa4, 0x2a, 0xb1, 0x45, 0x13, 0x9e, 0x75, + 0x3f, 0x91, 0xf1, 0x26, 0x2c, 0xdc, 0xdf, 0x7e, 0xad, 0xc1, 0x7f, 0x2d, 0x7f, 0xf2, 0x71, 0xeb, + 0xb6, 0x41, 0xdc, 0x77, 0x9f, 0x4f, 0x88, 0x7d, 0xed, 0xf9, 0x23, 0xa5, 0x3c, 0xf6, 0x5d, 0xec, + 0x8d, 0x75, 0x3f, 0x1c, 0x37, 0xc7, 0xc4, 0x13, 0xf3, 0x63, 0x53, 0x8a, 0x70, 0xe0, 0xd0, 0x0f, + 0xfc, 0x6d, 0x7f, 0x95, 0xee, 0x5e, 0x67, 0x56, 0x1f, 0x49, 0xa6, 0x1d, 0x7e, 0xa6, 0xcb, 0x47, + 0x3d, 0x6c, 0xfd, 0x19, 0x9f, 0xbf, 0x10, 0xe7, 0x2f, 0xc4, 0xf9, 0x8b, 0xc3, 0xd6, 0xa8, 0x20, + 0xbe, 0x71, 0xe7, 0xdf, 0x00, 0x00, 0x00, 0xff, 0xff, 0x7b, 0x3c, 0x8a, 0x77, 0xd0, 0x0f, 0x00, + 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/devtools/cloudtrace/v2/tracing.pb.go b/vendor/google.golang.org/genproto/googleapis/devtools/cloudtrace/v2/tracing.pb.go new file mode 100644 index 0000000..d88229b --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/devtools/cloudtrace/v2/tracing.pb.go @@ -0,0 +1,227 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/devtools/cloudtrace/v2/tracing.proto + +package cloudtrace // import "google.golang.org/genproto/googleapis/devtools/cloudtrace/v2" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import empty "github.com/golang/protobuf/ptypes/empty" +import _ "github.com/golang/protobuf/ptypes/timestamp" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +import ( + context "golang.org/x/net/context" + grpc "google.golang.org/grpc" +) + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// The request message for the `BatchWriteSpans` method. +type BatchWriteSpansRequest struct { + // Required. The name of the project where the spans belong. The format is + // `projects/[PROJECT_ID]`. + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // A list of new spans. The span names must not match existing + // spans, or the results are undefined. + Spans []*Span `protobuf:"bytes,2,rep,name=spans,proto3" json:"spans,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *BatchWriteSpansRequest) Reset() { *m = BatchWriteSpansRequest{} } +func (m *BatchWriteSpansRequest) String() string { return proto.CompactTextString(m) } +func (*BatchWriteSpansRequest) ProtoMessage() {} +func (*BatchWriteSpansRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_tracing_25bea29412890bc4, []int{0} +} +func (m *BatchWriteSpansRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_BatchWriteSpansRequest.Unmarshal(m, b) +} +func (m *BatchWriteSpansRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_BatchWriteSpansRequest.Marshal(b, m, deterministic) +} +func (dst *BatchWriteSpansRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_BatchWriteSpansRequest.Merge(dst, src) +} +func (m *BatchWriteSpansRequest) XXX_Size() int { + return xxx_messageInfo_BatchWriteSpansRequest.Size(m) +} +func (m *BatchWriteSpansRequest) XXX_DiscardUnknown() { + xxx_messageInfo_BatchWriteSpansRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_BatchWriteSpansRequest proto.InternalMessageInfo + +func (m *BatchWriteSpansRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *BatchWriteSpansRequest) GetSpans() []*Span { + if m != nil { + return m.Spans + } + return nil +} + +func init() { + proto.RegisterType((*BatchWriteSpansRequest)(nil), "google.devtools.cloudtrace.v2.BatchWriteSpansRequest") +} + +// Reference imports to suppress errors if they are not otherwise used. +var _ context.Context +var _ grpc.ClientConn + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the grpc package it is being compiled against. +const _ = grpc.SupportPackageIsVersion4 + +// TraceServiceClient is the client API for TraceService service. +// +// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://godoc.org/google.golang.org/grpc#ClientConn.NewStream. +type TraceServiceClient interface { + // Sends new spans to new or existing traces. You cannot update + // existing spans. + BatchWriteSpans(ctx context.Context, in *BatchWriteSpansRequest, opts ...grpc.CallOption) (*empty.Empty, error) + // Creates a new span. + CreateSpan(ctx context.Context, in *Span, opts ...grpc.CallOption) (*Span, error) +} + +type traceServiceClient struct { + cc *grpc.ClientConn +} + +func NewTraceServiceClient(cc *grpc.ClientConn) TraceServiceClient { + return &traceServiceClient{cc} +} + +func (c *traceServiceClient) BatchWriteSpans(ctx context.Context, in *BatchWriteSpansRequest, opts ...grpc.CallOption) (*empty.Empty, error) { + out := new(empty.Empty) + err := c.cc.Invoke(ctx, "/google.devtools.cloudtrace.v2.TraceService/BatchWriteSpans", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *traceServiceClient) CreateSpan(ctx context.Context, in *Span, opts ...grpc.CallOption) (*Span, error) { + out := new(Span) + err := c.cc.Invoke(ctx, "/google.devtools.cloudtrace.v2.TraceService/CreateSpan", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +// TraceServiceServer is the server API for TraceService service. +type TraceServiceServer interface { + // Sends new spans to new or existing traces. You cannot update + // existing spans. + BatchWriteSpans(context.Context, *BatchWriteSpansRequest) (*empty.Empty, error) + // Creates a new span. + CreateSpan(context.Context, *Span) (*Span, error) +} + +func RegisterTraceServiceServer(s *grpc.Server, srv TraceServiceServer) { + s.RegisterService(&_TraceService_serviceDesc, srv) +} + +func _TraceService_BatchWriteSpans_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(BatchWriteSpansRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(TraceServiceServer).BatchWriteSpans(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.devtools.cloudtrace.v2.TraceService/BatchWriteSpans", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(TraceServiceServer).BatchWriteSpans(ctx, req.(*BatchWriteSpansRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _TraceService_CreateSpan_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(Span) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(TraceServiceServer).CreateSpan(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.devtools.cloudtrace.v2.TraceService/CreateSpan", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(TraceServiceServer).CreateSpan(ctx, req.(*Span)) + } + return interceptor(ctx, in, info, handler) +} + +var _TraceService_serviceDesc = grpc.ServiceDesc{ + ServiceName: "google.devtools.cloudtrace.v2.TraceService", + HandlerType: (*TraceServiceServer)(nil), + Methods: []grpc.MethodDesc{ + { + MethodName: "BatchWriteSpans", + Handler: _TraceService_BatchWriteSpans_Handler, + }, + { + MethodName: "CreateSpan", + Handler: _TraceService_CreateSpan_Handler, + }, + }, + Streams: []grpc.StreamDesc{}, + Metadata: "google/devtools/cloudtrace/v2/tracing.proto", +} + +func init() { + proto.RegisterFile("google/devtools/cloudtrace/v2/tracing.proto", fileDescriptor_tracing_25bea29412890bc4) +} + +var fileDescriptor_tracing_25bea29412890bc4 = []byte{ + // 404 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x8c, 0x52, 0xdd, 0x6a, 0xdb, 0x30, + 0x14, 0x46, 0xde, 0x0f, 0x4c, 0x1b, 0x0c, 0x04, 0x0b, 0xc1, 0xdb, 0x58, 0xe6, 0x0d, 0x96, 0x64, + 0x43, 0x02, 0x8f, 0x5d, 0x2c, 0x63, 0x37, 0x09, 0x23, 0xb7, 0x21, 0x19, 0x19, 0x8c, 0xdc, 0x28, + 0x8e, 0xa6, 0x69, 0xd8, 0x92, 0x67, 0x29, 0x86, 0x52, 0x7a, 0xd3, 0x9b, 0x3e, 0x40, 0xfb, 0x14, + 0xa5, 0xd0, 0xf7, 0xe8, 0x6d, 0x5f, 0xa1, 0x0f, 0x52, 0x24, 0xd9, 0x0d, 0x84, 0x34, 0xc9, 0x9d, + 0xce, 0x39, 0xdf, 0xf9, 0xce, 0xf7, 0x7d, 0x36, 0xfc, 0xc8, 0x95, 0xe2, 0x29, 0x23, 0x0b, 0x56, + 0x1a, 0xa5, 0x52, 0x4d, 0x92, 0x54, 0x2d, 0x17, 0xa6, 0xa0, 0x09, 0x23, 0x65, 0x4c, 0xec, 0x43, + 0x48, 0x8e, 0xf3, 0x42, 0x19, 0x85, 0x5e, 0x7b, 0x30, 0xae, 0xc1, 0x78, 0x05, 0xc6, 0x65, 0x1c, + 0xbe, 0xaa, 0xb8, 0x68, 0x2e, 0x08, 0x95, 0x52, 0x19, 0x6a, 0x84, 0x92, 0xda, 0x2f, 0x87, 0x9d, + 0xdd, 0x97, 0x58, 0x05, 0x7d, 0x59, 0x41, 0x5d, 0x35, 0x5f, 0xfe, 0x21, 0x2c, 0xcb, 0xcd, 0x41, + 0x35, 0x7c, 0xb3, 0x3e, 0x34, 0x22, 0x63, 0xda, 0xd0, 0x2c, 0xf7, 0x80, 0x88, 0xc3, 0x46, 0x9f, + 0x9a, 0xe4, 0xef, 0xaf, 0x42, 0x18, 0x36, 0xc9, 0xa9, 0xd4, 0x63, 0xf6, 0x7f, 0xc9, 0xb4, 0x41, + 0x08, 0x3e, 0x94, 0x34, 0x63, 0x4d, 0xd0, 0x02, 0xed, 0x27, 0x63, 0xf7, 0x46, 0x5f, 0xe1, 0x23, + 0x6d, 0x31, 0xcd, 0xa0, 0xf5, 0xa0, 0xfd, 0x34, 0x7e, 0x87, 0xb7, 0x7a, 0xc4, 0x96, 0x6f, 0xec, + 0x37, 0xe2, 0xcb, 0x00, 0x3e, 0xfb, 0x69, 0x07, 0x13, 0x56, 0x94, 0x22, 0x61, 0xe8, 0x0c, 0xc0, + 0xe7, 0x6b, 0xa7, 0xd1, 0x97, 0x1d, 0x84, 0x9b, 0xa5, 0x86, 0x8d, 0x7a, 0xad, 0xb6, 0x89, 0x7f, + 0xd8, 0x0c, 0xa2, 0xf8, 0xf8, 0xfa, 0xe6, 0x34, 0xf8, 0x14, 0x7d, 0xb0, 0x99, 0x1d, 0x5a, 0x07, + 0xdf, 0xf3, 0x42, 0xfd, 0x63, 0x89, 0xd1, 0xa4, 0x7b, 0xe4, 0x53, 0xd4, 0xbd, 0xf9, 0x1d, 0x69, + 0x0f, 0x74, 0xd1, 0x09, 0x80, 0x70, 0x50, 0x30, 0xea, 0x4f, 0xa0, 0x7d, 0x2c, 0x86, 0xfb, 0x80, + 0x22, 0xe2, 0xc4, 0x74, 0xa2, 0xf7, 0x9b, 0xc4, 0x54, 0x5a, 0xac, 0x2a, 0x17, 0x57, 0x0f, 0x74, + 0xfb, 0x17, 0x00, 0xbe, 0x4d, 0x54, 0xb6, 0x9d, 0xbb, 0xef, 0x42, 0x15, 0x92, 0x8f, 0xac, 0xf5, + 0x11, 0xf8, 0x3d, 0xac, 0xe0, 0x5c, 0xa5, 0x54, 0x72, 0xac, 0x0a, 0x4e, 0x38, 0x93, 0x2e, 0x18, + 0xe2, 0x47, 0x34, 0x17, 0xfa, 0x9e, 0x1f, 0xeb, 0xdb, 0xaa, 0x3a, 0x0f, 0x5e, 0x0c, 0x3d, 0xd3, + 0xc0, 0xf6, 0xb0, 0xfb, 0x76, 0x78, 0x1a, 0x5f, 0xd5, 0xfd, 0x99, 0xeb, 0xcf, 0x5c, 0x7f, 0x36, + 0x8d, 0xe7, 0x8f, 0xdd, 0x8d, 0xcf, 0xb7, 0x01, 0x00, 0x00, 0xff, 0xff, 0xbd, 0x94, 0x51, 0x1d, + 0x25, 0x03, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/devtools/containeranalysis/v1/containeranalysis.pb.go b/vendor/google.golang.org/genproto/googleapis/devtools/containeranalysis/v1/containeranalysis.pb.go new file mode 100644 index 0000000..e654893 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/devtools/containeranalysis/v1/containeranalysis.pb.go @@ -0,0 +1,244 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/devtools/containeranalysis/v1/containeranalysis.proto + +package containeranalysis // import "google.golang.org/genproto/googleapis/devtools/containeranalysis/v1" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "github.com/golang/protobuf/ptypes/timestamp" +import _ "google.golang.org/genproto/googleapis/api/annotations" +import v1 "google.golang.org/genproto/googleapis/iam/v1" + +import ( + context "golang.org/x/net/context" + grpc "google.golang.org/grpc" +) + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Reference imports to suppress errors if they are not otherwise used. +var _ context.Context +var _ grpc.ClientConn + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the grpc package it is being compiled against. +const _ = grpc.SupportPackageIsVersion4 + +// ContainerAnalysisClient is the client API for ContainerAnalysis service. +// +// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://godoc.org/google.golang.org/grpc#ClientConn.NewStream. +type ContainerAnalysisClient interface { + // Sets the access control policy on the specified note or occurrence. + // Requires `containeranalysis.notes.setIamPolicy` or + // `containeranalysis.occurrences.setIamPolicy` permission if the resource is + // a note or an occurrence, respectively. + // + // The resource takes the format `projects/[PROJECT_ID]/notes/[NOTE_ID]` for + // notes and `projects/[PROJECT_ID]/occurrences/[OCCURRENCE_ID]` for + // occurrences. + SetIamPolicy(ctx context.Context, in *v1.SetIamPolicyRequest, opts ...grpc.CallOption) (*v1.Policy, error) + // Gets the access control policy for a note or an occurrence resource. + // Requires `containeranalysis.notes.setIamPolicy` or + // `containeranalysis.occurrences.setIamPolicy` permission if the resource is + // a note or occurrence, respectively. + // + // The resource takes the format `projects/[PROJECT_ID]/notes/[NOTE_ID]` for + // notes and `projects/[PROJECT_ID]/occurrences/[OCCURRENCE_ID]` for + // occurrences. + GetIamPolicy(ctx context.Context, in *v1.GetIamPolicyRequest, opts ...grpc.CallOption) (*v1.Policy, error) + // Returns the permissions that a caller has on the specified note or + // occurrence. Requires list permission on the project (for example, + // `containeranalysis.notes.list`). + // + // The resource takes the format `projects/[PROJECT_ID]/notes/[NOTE_ID]` for + // notes and `projects/[PROJECT_ID]/occurrences/[OCCURRENCE_ID]` for + // occurrences. + TestIamPermissions(ctx context.Context, in *v1.TestIamPermissionsRequest, opts ...grpc.CallOption) (*v1.TestIamPermissionsResponse, error) +} + +type containerAnalysisClient struct { + cc *grpc.ClientConn +} + +func NewContainerAnalysisClient(cc *grpc.ClientConn) ContainerAnalysisClient { + return &containerAnalysisClient{cc} +} + +func (c *containerAnalysisClient) SetIamPolicy(ctx context.Context, in *v1.SetIamPolicyRequest, opts ...grpc.CallOption) (*v1.Policy, error) { + out := new(v1.Policy) + err := c.cc.Invoke(ctx, "/google.devtools.containeranalysis.v1.ContainerAnalysis/SetIamPolicy", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *containerAnalysisClient) GetIamPolicy(ctx context.Context, in *v1.GetIamPolicyRequest, opts ...grpc.CallOption) (*v1.Policy, error) { + out := new(v1.Policy) + err := c.cc.Invoke(ctx, "/google.devtools.containeranalysis.v1.ContainerAnalysis/GetIamPolicy", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *containerAnalysisClient) TestIamPermissions(ctx context.Context, in *v1.TestIamPermissionsRequest, opts ...grpc.CallOption) (*v1.TestIamPermissionsResponse, error) { + out := new(v1.TestIamPermissionsResponse) + err := c.cc.Invoke(ctx, "/google.devtools.containeranalysis.v1.ContainerAnalysis/TestIamPermissions", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +// ContainerAnalysisServer is the server API for ContainerAnalysis service. +type ContainerAnalysisServer interface { + // Sets the access control policy on the specified note or occurrence. + // Requires `containeranalysis.notes.setIamPolicy` or + // `containeranalysis.occurrences.setIamPolicy` permission if the resource is + // a note or an occurrence, respectively. + // + // The resource takes the format `projects/[PROJECT_ID]/notes/[NOTE_ID]` for + // notes and `projects/[PROJECT_ID]/occurrences/[OCCURRENCE_ID]` for + // occurrences. + SetIamPolicy(context.Context, *v1.SetIamPolicyRequest) (*v1.Policy, error) + // Gets the access control policy for a note or an occurrence resource. + // Requires `containeranalysis.notes.setIamPolicy` or + // `containeranalysis.occurrences.setIamPolicy` permission if the resource is + // a note or occurrence, respectively. + // + // The resource takes the format `projects/[PROJECT_ID]/notes/[NOTE_ID]` for + // notes and `projects/[PROJECT_ID]/occurrences/[OCCURRENCE_ID]` for + // occurrences. + GetIamPolicy(context.Context, *v1.GetIamPolicyRequest) (*v1.Policy, error) + // Returns the permissions that a caller has on the specified note or + // occurrence. Requires list permission on the project (for example, + // `containeranalysis.notes.list`). + // + // The resource takes the format `projects/[PROJECT_ID]/notes/[NOTE_ID]` for + // notes and `projects/[PROJECT_ID]/occurrences/[OCCURRENCE_ID]` for + // occurrences. + TestIamPermissions(context.Context, *v1.TestIamPermissionsRequest) (*v1.TestIamPermissionsResponse, error) +} + +func RegisterContainerAnalysisServer(s *grpc.Server, srv ContainerAnalysisServer) { + s.RegisterService(&_ContainerAnalysis_serviceDesc, srv) +} + +func _ContainerAnalysis_SetIamPolicy_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(v1.SetIamPolicyRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(ContainerAnalysisServer).SetIamPolicy(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.devtools.containeranalysis.v1.ContainerAnalysis/SetIamPolicy", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(ContainerAnalysisServer).SetIamPolicy(ctx, req.(*v1.SetIamPolicyRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _ContainerAnalysis_GetIamPolicy_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(v1.GetIamPolicyRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(ContainerAnalysisServer).GetIamPolicy(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.devtools.containeranalysis.v1.ContainerAnalysis/GetIamPolicy", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(ContainerAnalysisServer).GetIamPolicy(ctx, req.(*v1.GetIamPolicyRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _ContainerAnalysis_TestIamPermissions_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(v1.TestIamPermissionsRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(ContainerAnalysisServer).TestIamPermissions(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.devtools.containeranalysis.v1.ContainerAnalysis/TestIamPermissions", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(ContainerAnalysisServer).TestIamPermissions(ctx, req.(*v1.TestIamPermissionsRequest)) + } + return interceptor(ctx, in, info, handler) +} + +var _ContainerAnalysis_serviceDesc = grpc.ServiceDesc{ + ServiceName: "google.devtools.containeranalysis.v1.ContainerAnalysis", + HandlerType: (*ContainerAnalysisServer)(nil), + Methods: []grpc.MethodDesc{ + { + MethodName: "SetIamPolicy", + Handler: _ContainerAnalysis_SetIamPolicy_Handler, + }, + { + MethodName: "GetIamPolicy", + Handler: _ContainerAnalysis_GetIamPolicy_Handler, + }, + { + MethodName: "TestIamPermissions", + Handler: _ContainerAnalysis_TestIamPermissions_Handler, + }, + }, + Streams: []grpc.StreamDesc{}, + Metadata: "google/devtools/containeranalysis/v1/containeranalysis.proto", +} + +func init() { + proto.RegisterFile("google/devtools/containeranalysis/v1/containeranalysis.proto", fileDescriptor_containeranalysis_21b725fe07df89a8) +} + +var fileDescriptor_containeranalysis_21b725fe07df89a8 = []byte{ + // 393 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xb4, 0x93, 0x4f, 0x6a, 0xdb, 0x40, + 0x14, 0xc6, 0x91, 0x4b, 0xbb, 0x10, 0xdd, 0x54, 0xd0, 0x8d, 0x28, 0x35, 0x88, 0x2e, 0x5a, 0x2f, + 0x66, 0x70, 0xdb, 0x4d, 0xdd, 0x84, 0xe0, 0x78, 0x61, 0xb2, 0x33, 0xf9, 0xb3, 0xf1, 0x26, 0x8c, + 0x27, 0x2f, 0x62, 0x82, 0x66, 0x9e, 0x32, 0x33, 0x12, 0x38, 0x21, 0x60, 0xb2, 0xc8, 0x05, 0x72, + 0x83, 0x5c, 0x24, 0x87, 0xc8, 0x15, 0xb2, 0xcc, 0x21, 0x82, 0xa4, 0x51, 0xb0, 0x2d, 0xc7, 0x98, + 0x40, 0x96, 0xf3, 0xbe, 0xf7, 0x7d, 0xf3, 0xfd, 0x60, 0xc6, 0xdf, 0x8a, 0x11, 0xe3, 0x04, 0xe8, + 0x09, 0xe4, 0x16, 0x31, 0x31, 0x94, 0xa3, 0xb2, 0x4c, 0x28, 0xd0, 0x4c, 0xb1, 0x64, 0x6a, 0x84, + 0xa1, 0x79, 0xb7, 0x39, 0x24, 0xa9, 0x46, 0x8b, 0xc1, 0x8f, 0xca, 0x4d, 0x6a, 0x37, 0x69, 0x2e, + 0xe6, 0xdd, 0xf0, 0x9b, 0xbb, 0x83, 0xa5, 0x82, 0x32, 0xa5, 0xd0, 0x32, 0x2b, 0x50, 0xb9, 0x8c, + 0xf0, 0xbb, 0x53, 0x05, 0x93, 0xc5, 0x55, 0x82, 0xc9, 0xe3, 0x14, 0x13, 0xc1, 0xa7, 0x4e, 0x0f, + 0x17, 0xf5, 0x05, 0xad, 0xed, 0xb4, 0xf2, 0x34, 0xc9, 0x4e, 0xa9, 0x15, 0x12, 0x8c, 0x65, 0x32, + 0xad, 0x16, 0x7e, 0xdf, 0x7c, 0xf4, 0xbf, 0x0c, 0xea, 0x4e, 0x7d, 0xd7, 0x29, 0xb8, 0xf7, 0xfc, + 0xcf, 0x07, 0x60, 0xf7, 0x98, 0x1c, 0x95, 0x69, 0x41, 0x44, 0x1c, 0x88, 0x60, 0x92, 0xe4, 0x5d, + 0x32, 0x2f, 0xee, 0xc3, 0x79, 0x06, 0xc6, 0x86, 0x5f, 0x97, 0x76, 0x2a, 0x35, 0xb2, 0xd7, 0x0f, + 0x8f, 0xb7, 0x2d, 0x15, 0x91, 0xa2, 0xdc, 0xa5, 0x06, 0x83, 0x99, 0xe6, 0xb0, 0x9d, 0x6a, 0x3c, + 0x03, 0x6e, 0x0d, 0xed, 0x50, 0x85, 0x16, 0x0c, 0xed, 0x5c, 0xf5, 0xcc, 0x5c, 0x6a, 0xcf, 0xeb, + 0x8c, 0xff, 0x45, 0x7f, 0x5f, 0x35, 0x21, 0xe7, 0x99, 0xd6, 0xa0, 0xf8, 0x4a, 0x6b, 0x49, 0x30, + 0x5c, 0x47, 0x30, 0x7c, 0x17, 0x82, 0xf8, 0xed, 0x04, 0x4b, 0xd6, 0xe0, 0xc9, 0xf3, 0x83, 0x43, + 0x30, 0xe5, 0x10, 0xb4, 0x14, 0xc6, 0x14, 0x6f, 0x22, 0xf8, 0xb9, 0xd4, 0xb1, 0xb9, 0x52, 0xd3, + 0xfc, 0xda, 0x60, 0xd3, 0xa4, 0xa8, 0x0c, 0x44, 0x33, 0xaf, 0x44, 0xbc, 0x58, 0xd3, 0xf6, 0x05, + 0xd1, 0x36, 0x62, 0x0a, 0xd0, 0x9d, 0xa8, 0xb7, 0x29, 0xe8, 0xca, 0x80, 0xdd, 0x99, 0xe7, 0xb7, + 0x39, 0xca, 0xba, 0xf3, 0xaa, 0x7f, 0x32, 0xf2, 0xc6, 0x47, 0x4e, 0x8e, 0x31, 0x61, 0x2a, 0x26, + 0xa8, 0x63, 0x1a, 0x83, 0x2a, 0x9f, 0x32, 0xad, 0x24, 0x96, 0x0a, 0xb3, 0xfe, 0xb3, 0xfe, 0x6f, + 0x0c, 0xef, 0x5a, 0x1f, 0x86, 0x83, 0xfe, 0xe4, 0x53, 0x99, 0xf3, 0xe7, 0x39, 0x00, 0x00, 0xff, + 0xff, 0xba, 0xe6, 0x43, 0xb1, 0xf3, 0x03, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/devtools/containeranalysis/v1alpha1/bill_of_materials.pb.go b/vendor/google.golang.org/genproto/googleapis/devtools/containeranalysis/v1alpha1/bill_of_materials.pb.go new file mode 100644 index 0000000..45c8a65 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/devtools/containeranalysis/v1alpha1/bill_of_materials.pb.go @@ -0,0 +1,383 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/devtools/containeranalysis/v1alpha1/bill_of_materials.proto + +package containeranalysis // import "google.golang.org/genproto/googleapis/devtools/containeranalysis/v1alpha1" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Instruction set architectures supported by various package managers. +type PackageManager_Architecture int32 + +const ( + // Unknown architecture + PackageManager_ARCHITECTURE_UNSPECIFIED PackageManager_Architecture = 0 + // X86 architecture + PackageManager_X86 PackageManager_Architecture = 1 + // X64 architecture + PackageManager_X64 PackageManager_Architecture = 2 +) + +var PackageManager_Architecture_name = map[int32]string{ + 0: "ARCHITECTURE_UNSPECIFIED", + 1: "X86", + 2: "X64", +} +var PackageManager_Architecture_value = map[string]int32{ + "ARCHITECTURE_UNSPECIFIED": 0, + "X86": 1, + "X64": 2, +} + +func (x PackageManager_Architecture) String() string { + return proto.EnumName(PackageManager_Architecture_name, int32(x)) +} +func (PackageManager_Architecture) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_bill_of_materials_5994c0bb5dadb2f3, []int{0, 0} +} + +// PackageManager provides metadata about available / installed packages. +type PackageManager struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *PackageManager) Reset() { *m = PackageManager{} } +func (m *PackageManager) String() string { return proto.CompactTextString(m) } +func (*PackageManager) ProtoMessage() {} +func (*PackageManager) Descriptor() ([]byte, []int) { + return fileDescriptor_bill_of_materials_5994c0bb5dadb2f3, []int{0} +} +func (m *PackageManager) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_PackageManager.Unmarshal(m, b) +} +func (m *PackageManager) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_PackageManager.Marshal(b, m, deterministic) +} +func (dst *PackageManager) XXX_Merge(src proto.Message) { + xxx_messageInfo_PackageManager.Merge(dst, src) +} +func (m *PackageManager) XXX_Size() int { + return xxx_messageInfo_PackageManager.Size(m) +} +func (m *PackageManager) XXX_DiscardUnknown() { + xxx_messageInfo_PackageManager.DiscardUnknown(m) +} + +var xxx_messageInfo_PackageManager proto.InternalMessageInfo + +// This represents a particular channel of distribution for a given package. +// e.g. Debian's jessie-backports dpkg mirror +type PackageManager_Distribution struct { + // The cpe_uri in [cpe format](https://cpe.mitre.org/specification/) + // denoting the package manager version distributing a package. + CpeUri string `protobuf:"bytes,1,opt,name=cpe_uri,json=cpeUri,proto3" json:"cpe_uri,omitempty"` + // The CPU architecture for which packages in this distribution + // channel were built + Architecture PackageManager_Architecture `protobuf:"varint,2,opt,name=architecture,proto3,enum=google.devtools.containeranalysis.v1alpha1.PackageManager_Architecture" json:"architecture,omitempty"` + // The latest available version of this package in + // this distribution channel. + LatestVersion *VulnerabilityType_Version `protobuf:"bytes,3,opt,name=latest_version,json=latestVersion,proto3" json:"latest_version,omitempty"` + // A freeform string denoting the maintainer of this package. + Maintainer string `protobuf:"bytes,4,opt,name=maintainer,proto3" json:"maintainer,omitempty"` + // The distribution channel-specific homepage for this package. + Url string `protobuf:"bytes,6,opt,name=url,proto3" json:"url,omitempty"` + // The distribution channel-specific description of this package. + Description string `protobuf:"bytes,7,opt,name=description,proto3" json:"description,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *PackageManager_Distribution) Reset() { *m = PackageManager_Distribution{} } +func (m *PackageManager_Distribution) String() string { return proto.CompactTextString(m) } +func (*PackageManager_Distribution) ProtoMessage() {} +func (*PackageManager_Distribution) Descriptor() ([]byte, []int) { + return fileDescriptor_bill_of_materials_5994c0bb5dadb2f3, []int{0, 0} +} +func (m *PackageManager_Distribution) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_PackageManager_Distribution.Unmarshal(m, b) +} +func (m *PackageManager_Distribution) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_PackageManager_Distribution.Marshal(b, m, deterministic) +} +func (dst *PackageManager_Distribution) XXX_Merge(src proto.Message) { + xxx_messageInfo_PackageManager_Distribution.Merge(dst, src) +} +func (m *PackageManager_Distribution) XXX_Size() int { + return xxx_messageInfo_PackageManager_Distribution.Size(m) +} +func (m *PackageManager_Distribution) XXX_DiscardUnknown() { + xxx_messageInfo_PackageManager_Distribution.DiscardUnknown(m) +} + +var xxx_messageInfo_PackageManager_Distribution proto.InternalMessageInfo + +func (m *PackageManager_Distribution) GetCpeUri() string { + if m != nil { + return m.CpeUri + } + return "" +} + +func (m *PackageManager_Distribution) GetArchitecture() PackageManager_Architecture { + if m != nil { + return m.Architecture + } + return PackageManager_ARCHITECTURE_UNSPECIFIED +} + +func (m *PackageManager_Distribution) GetLatestVersion() *VulnerabilityType_Version { + if m != nil { + return m.LatestVersion + } + return nil +} + +func (m *PackageManager_Distribution) GetMaintainer() string { + if m != nil { + return m.Maintainer + } + return "" +} + +func (m *PackageManager_Distribution) GetUrl() string { + if m != nil { + return m.Url + } + return "" +} + +func (m *PackageManager_Distribution) GetDescription() string { + if m != nil { + return m.Description + } + return "" +} + +// An occurrence of a particular package installation found within a +// system's filesystem. +// e.g. glibc was found in /var/lib/dpkg/status +type PackageManager_Location struct { + // The cpe_uri in [cpe format](https://cpe.mitre.org/specification/) + // denoting the package manager version distributing a package. + CpeUri string `protobuf:"bytes,1,opt,name=cpe_uri,json=cpeUri,proto3" json:"cpe_uri,omitempty"` + // The version installed at this location. + Version *VulnerabilityType_Version `protobuf:"bytes,2,opt,name=version,proto3" json:"version,omitempty"` + // The path from which we gathered that this package/version is installed. + Path string `protobuf:"bytes,3,opt,name=path,proto3" json:"path,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *PackageManager_Location) Reset() { *m = PackageManager_Location{} } +func (m *PackageManager_Location) String() string { return proto.CompactTextString(m) } +func (*PackageManager_Location) ProtoMessage() {} +func (*PackageManager_Location) Descriptor() ([]byte, []int) { + return fileDescriptor_bill_of_materials_5994c0bb5dadb2f3, []int{0, 1} +} +func (m *PackageManager_Location) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_PackageManager_Location.Unmarshal(m, b) +} +func (m *PackageManager_Location) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_PackageManager_Location.Marshal(b, m, deterministic) +} +func (dst *PackageManager_Location) XXX_Merge(src proto.Message) { + xxx_messageInfo_PackageManager_Location.Merge(dst, src) +} +func (m *PackageManager_Location) XXX_Size() int { + return xxx_messageInfo_PackageManager_Location.Size(m) +} +func (m *PackageManager_Location) XXX_DiscardUnknown() { + xxx_messageInfo_PackageManager_Location.DiscardUnknown(m) +} + +var xxx_messageInfo_PackageManager_Location proto.InternalMessageInfo + +func (m *PackageManager_Location) GetCpeUri() string { + if m != nil { + return m.CpeUri + } + return "" +} + +func (m *PackageManager_Location) GetVersion() *VulnerabilityType_Version { + if m != nil { + return m.Version + } + return nil +} + +func (m *PackageManager_Location) GetPath() string { + if m != nil { + return m.Path + } + return "" +} + +// This represents a particular package that is distributed over +// various channels. +// e.g. glibc (aka libc6) is distributed by many, at various versions. +type PackageManager_Package struct { + // The name of the package. + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // The various channels by which a package is distributed. + Distribution []*PackageManager_Distribution `protobuf:"bytes,10,rep,name=distribution,proto3" json:"distribution,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *PackageManager_Package) Reset() { *m = PackageManager_Package{} } +func (m *PackageManager_Package) String() string { return proto.CompactTextString(m) } +func (*PackageManager_Package) ProtoMessage() {} +func (*PackageManager_Package) Descriptor() ([]byte, []int) { + return fileDescriptor_bill_of_materials_5994c0bb5dadb2f3, []int{0, 2} +} +func (m *PackageManager_Package) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_PackageManager_Package.Unmarshal(m, b) +} +func (m *PackageManager_Package) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_PackageManager_Package.Marshal(b, m, deterministic) +} +func (dst *PackageManager_Package) XXX_Merge(src proto.Message) { + xxx_messageInfo_PackageManager_Package.Merge(dst, src) +} +func (m *PackageManager_Package) XXX_Size() int { + return xxx_messageInfo_PackageManager_Package.Size(m) +} +func (m *PackageManager_Package) XXX_DiscardUnknown() { + xxx_messageInfo_PackageManager_Package.DiscardUnknown(m) +} + +var xxx_messageInfo_PackageManager_Package proto.InternalMessageInfo + +func (m *PackageManager_Package) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *PackageManager_Package) GetDistribution() []*PackageManager_Distribution { + if m != nil { + return m.Distribution + } + return nil +} + +// This represents how a particular software package may be installed on +// a system. +type PackageManager_Installation struct { + // Output only. The name of the installed package. + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // All of the places within the filesystem versions of this package + // have been found. + Location []*PackageManager_Location `protobuf:"bytes,2,rep,name=location,proto3" json:"location,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *PackageManager_Installation) Reset() { *m = PackageManager_Installation{} } +func (m *PackageManager_Installation) String() string { return proto.CompactTextString(m) } +func (*PackageManager_Installation) ProtoMessage() {} +func (*PackageManager_Installation) Descriptor() ([]byte, []int) { + return fileDescriptor_bill_of_materials_5994c0bb5dadb2f3, []int{0, 3} +} +func (m *PackageManager_Installation) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_PackageManager_Installation.Unmarshal(m, b) +} +func (m *PackageManager_Installation) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_PackageManager_Installation.Marshal(b, m, deterministic) +} +func (dst *PackageManager_Installation) XXX_Merge(src proto.Message) { + xxx_messageInfo_PackageManager_Installation.Merge(dst, src) +} +func (m *PackageManager_Installation) XXX_Size() int { + return xxx_messageInfo_PackageManager_Installation.Size(m) +} +func (m *PackageManager_Installation) XXX_DiscardUnknown() { + xxx_messageInfo_PackageManager_Installation.DiscardUnknown(m) +} + +var xxx_messageInfo_PackageManager_Installation proto.InternalMessageInfo + +func (m *PackageManager_Installation) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *PackageManager_Installation) GetLocation() []*PackageManager_Location { + if m != nil { + return m.Location + } + return nil +} + +func init() { + proto.RegisterType((*PackageManager)(nil), "google.devtools.containeranalysis.v1alpha1.PackageManager") + proto.RegisterType((*PackageManager_Distribution)(nil), "google.devtools.containeranalysis.v1alpha1.PackageManager.Distribution") + proto.RegisterType((*PackageManager_Location)(nil), "google.devtools.containeranalysis.v1alpha1.PackageManager.Location") + proto.RegisterType((*PackageManager_Package)(nil), "google.devtools.containeranalysis.v1alpha1.PackageManager.Package") + proto.RegisterType((*PackageManager_Installation)(nil), "google.devtools.containeranalysis.v1alpha1.PackageManager.Installation") + proto.RegisterEnum("google.devtools.containeranalysis.v1alpha1.PackageManager_Architecture", PackageManager_Architecture_name, PackageManager_Architecture_value) +} + +func init() { + proto.RegisterFile("google/devtools/containeranalysis/v1alpha1/bill_of_materials.proto", fileDescriptor_bill_of_materials_5994c0bb5dadb2f3) +} + +var fileDescriptor_bill_of_materials_5994c0bb5dadb2f3 = []byte{ + // 522 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xac, 0x93, 0xd1, 0x8a, 0xd3, 0x4e, + 0x14, 0xc6, 0xff, 0x49, 0x97, 0x76, 0xf7, 0xb4, 0xff, 0x52, 0xe6, 0xc6, 0x10, 0x16, 0x29, 0x0b, + 0x42, 0xf1, 0x22, 0x61, 0x57, 0x59, 0x04, 0x41, 0xe8, 0x76, 0xbb, 0x6b, 0x41, 0xa5, 0xc4, 0x76, + 0x11, 0xbd, 0x08, 0xa7, 0xe9, 0x98, 0x0e, 0x3b, 0x9d, 0x09, 0x93, 0x49, 0xa1, 0xd7, 0xde, 0x89, + 0x0f, 0xe0, 0xb5, 0x0f, 0xa5, 0xaf, 0x23, 0x99, 0x24, 0x92, 0xb2, 0x2a, 0xbb, 0xac, 0x77, 0x27, + 0xf3, 0x85, 0xdf, 0xf9, 0xce, 0x77, 0x66, 0xe0, 0x2c, 0x96, 0x32, 0xe6, 0xd4, 0x5f, 0xd2, 0x8d, + 0x96, 0x92, 0xa7, 0x7e, 0x24, 0x85, 0x46, 0x26, 0xa8, 0x42, 0x81, 0x7c, 0x9b, 0xb2, 0xd4, 0xdf, + 0x1c, 0x23, 0x4f, 0x56, 0x78, 0xec, 0x2f, 0x18, 0xe7, 0xa1, 0xfc, 0x18, 0xae, 0x51, 0x53, 0xc5, + 0x90, 0xa7, 0x5e, 0xa2, 0xa4, 0x96, 0xe4, 0x71, 0xc1, 0xf0, 0x2a, 0x86, 0x77, 0x83, 0xe1, 0x55, + 0x0c, 0xf7, 0xb0, 0xec, 0x87, 0x09, 0xf3, 0x51, 0x08, 0xa9, 0x51, 0x33, 0x29, 0x4a, 0x92, 0x7b, + 0x71, 0x07, 0x37, 0x09, 0x46, 0xd7, 0x18, 0xd3, 0x70, 0x93, 0xf1, 0x5c, 0x5f, 0x30, 0xce, 0xf4, + 0xb6, 0xe0, 0x1c, 0xfd, 0x68, 0x42, 0x77, 0x5a, 0xe8, 0xaf, 0x51, 0x60, 0x4c, 0x95, 0xfb, 0xdd, + 0x86, 0xce, 0x39, 0x4b, 0xb5, 0x62, 0x8b, 0x2c, 0x6f, 0x49, 0x1e, 0x40, 0x2b, 0x4a, 0x68, 0x98, + 0x29, 0xe6, 0x58, 0x7d, 0x6b, 0x70, 0x10, 0x34, 0xa3, 0x84, 0xce, 0x15, 0x23, 0xd7, 0xd0, 0x41, + 0x15, 0xad, 0x98, 0xa6, 0x91, 0xce, 0x14, 0x75, 0xec, 0xbe, 0x35, 0xe8, 0x9e, 0x5c, 0x7a, 0xb7, + 0x9f, 0xd2, 0xdb, 0xed, 0xed, 0x0d, 0x6b, 0xb8, 0x60, 0x07, 0x4e, 0x38, 0x74, 0x39, 0x6a, 0x9a, + 0xea, 0x70, 0x43, 0x55, 0xca, 0xa4, 0x70, 0x1a, 0x7d, 0x6b, 0xd0, 0x3e, 0x19, 0xdf, 0xa5, 0xdd, + 0x55, 0x3d, 0x82, 0xd9, 0x36, 0xa1, 0xde, 0x55, 0x01, 0x0b, 0xfe, 0x2f, 0xe0, 0xe5, 0x27, 0x79, + 0x08, 0xb0, 0x46, 0x56, 0x72, 0x9c, 0x3d, 0x33, 0x76, 0xed, 0x84, 0xf4, 0xa0, 0x91, 0x29, 0xee, + 0x34, 0x8d, 0x90, 0x97, 0xa4, 0x0f, 0xed, 0x25, 0x4d, 0x23, 0xc5, 0x92, 0x3c, 0x34, 0xa7, 0x65, + 0x94, 0xfa, 0x91, 0xfb, 0xd5, 0x82, 0xfd, 0x57, 0x32, 0xc2, 0xbf, 0x87, 0x1a, 0x42, 0xab, 0x1a, + 0xd0, 0xfe, 0x97, 0x03, 0x56, 0x54, 0x42, 0x60, 0x2f, 0x41, 0xbd, 0x32, 0xf1, 0x1d, 0x04, 0xa6, + 0x76, 0x3f, 0x5b, 0xd0, 0x2a, 0x57, 0x91, 0xeb, 0x02, 0xd7, 0xb4, 0xb4, 0x65, 0xea, 0x7c, 0xd3, + 0xcb, 0xda, 0x95, 0x70, 0xa0, 0xdf, 0x18, 0xb4, 0xef, 0xb5, 0xe9, 0xfa, 0x0d, 0x0b, 0x76, 0xe0, + 0xee, 0x27, 0x0b, 0x3a, 0x13, 0x91, 0x6a, 0xe4, 0xbc, 0xc8, 0xea, 0x77, 0x8e, 0x42, 0xd8, 0xe7, + 0x65, 0x96, 0x8e, 0x6d, 0xdc, 0x8c, 0xee, 0xe1, 0xa6, 0x5a, 0x4b, 0xf0, 0x0b, 0x7a, 0xf4, 0x02, + 0x3a, 0xf5, 0xdb, 0x48, 0x0e, 0xc1, 0x19, 0x06, 0xa3, 0x97, 0x93, 0xd9, 0x78, 0x34, 0x9b, 0x07, + 0xe3, 0x70, 0xfe, 0xe6, 0xed, 0x74, 0x3c, 0x9a, 0x5c, 0x4c, 0xc6, 0xe7, 0xbd, 0xff, 0x48, 0x0b, + 0x1a, 0xef, 0x9e, 0x9d, 0xf6, 0x2c, 0x53, 0x9c, 0x3e, 0xed, 0xd9, 0x67, 0x5f, 0x2c, 0x78, 0x14, + 0xc9, 0x75, 0x65, 0xea, 0xcf, 0x5e, 0xa6, 0xd6, 0xfb, 0x0f, 0xe5, 0x4f, 0xb1, 0xe4, 0x28, 0x62, + 0x4f, 0xaa, 0xd8, 0x8f, 0xa9, 0x30, 0x2f, 0xd4, 0x2f, 0x24, 0x4c, 0x58, 0x7a, 0x9b, 0xc7, 0xfe, + 0xfc, 0x86, 0xf4, 0xcd, 0x6e, 0x5c, 0x8e, 0x86, 0x8b, 0xa6, 0xa1, 0x3d, 0xf9, 0x19, 0x00, 0x00, + 0xff, 0xff, 0xfa, 0x4f, 0xa4, 0x56, 0xc7, 0x04, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/devtools/containeranalysis/v1alpha1/containeranalysis.pb.go b/vendor/google.golang.org/genproto/googleapis/devtools/containeranalysis/v1alpha1/containeranalysis.pb.go new file mode 100644 index 0000000..7a30d6d --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/devtools/containeranalysis/v1alpha1/containeranalysis.pb.go @@ -0,0 +1,4434 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/devtools/containeranalysis/v1alpha1/containeranalysis.proto + +package containeranalysis // import "google.golang.org/genproto/googleapis/devtools/containeranalysis/v1alpha1" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "github.com/golang/protobuf/ptypes/any" +import empty "github.com/golang/protobuf/ptypes/empty" +import timestamp "github.com/golang/protobuf/ptypes/timestamp" +import _ "google.golang.org/genproto/googleapis/api/annotations" +import v1 "google.golang.org/genproto/googleapis/iam/v1" +import longrunning "google.golang.org/genproto/googleapis/longrunning" +import status "google.golang.org/genproto/googleapis/rpc/status" +import field_mask "google.golang.org/genproto/protobuf/field_mask" + +import ( + context "golang.org/x/net/context" + grpc "google.golang.org/grpc" +) + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// This must be 1:1 with members of our oneofs, it can be used for filtering +// Note and Occurrence on their kind. +type Note_Kind int32 + +const ( + // Unknown + Note_KIND_UNSPECIFIED Note_Kind = 0 + // The note and occurrence represent a package vulnerability. + Note_PACKAGE_VULNERABILITY Note_Kind = 2 + // The note and occurrence assert build provenance. + Note_BUILD_DETAILS Note_Kind = 3 + // This represents an image basis relationship. + Note_IMAGE_BASIS Note_Kind = 4 + // This represents a package installed via a package manager. + Note_PACKAGE_MANAGER Note_Kind = 5 + // The note and occurrence track deployment events. + Note_DEPLOYABLE Note_Kind = 6 + // The note and occurrence track the initial discovery status of a resource. + Note_DISCOVERY Note_Kind = 7 + // This represents a logical "role" that can attest to artifacts. + Note_ATTESTATION_AUTHORITY Note_Kind = 8 +) + +var Note_Kind_name = map[int32]string{ + 0: "KIND_UNSPECIFIED", + 2: "PACKAGE_VULNERABILITY", + 3: "BUILD_DETAILS", + 4: "IMAGE_BASIS", + 5: "PACKAGE_MANAGER", + 6: "DEPLOYABLE", + 7: "DISCOVERY", + 8: "ATTESTATION_AUTHORITY", +} +var Note_Kind_value = map[string]int32{ + "KIND_UNSPECIFIED": 0, + "PACKAGE_VULNERABILITY": 2, + "BUILD_DETAILS": 3, + "IMAGE_BASIS": 4, + "PACKAGE_MANAGER": 5, + "DEPLOYABLE": 6, + "DISCOVERY": 7, + "ATTESTATION_AUTHORITY": 8, +} + +func (x Note_Kind) String() string { + return proto.EnumName(Note_Kind_name, int32(x)) +} +func (Note_Kind) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_containeranalysis_a487a8754ddd6778, []int{2, 0} +} + +// Types of platforms. +type Deployable_Deployment_Platform int32 + +const ( + // Unknown + Deployable_Deployment_PLATFORM_UNSPECIFIED Deployable_Deployment_Platform = 0 + // Google Container Engine + Deployable_Deployment_GKE Deployable_Deployment_Platform = 1 + // Google App Engine: Flexible Environment + Deployable_Deployment_FLEX Deployable_Deployment_Platform = 2 + // Custom user-defined platform + Deployable_Deployment_CUSTOM Deployable_Deployment_Platform = 3 +) + +var Deployable_Deployment_Platform_name = map[int32]string{ + 0: "PLATFORM_UNSPECIFIED", + 1: "GKE", + 2: "FLEX", + 3: "CUSTOM", +} +var Deployable_Deployment_Platform_value = map[string]int32{ + "PLATFORM_UNSPECIFIED": 0, + "GKE": 1, + "FLEX": 2, + "CUSTOM": 3, +} + +func (x Deployable_Deployment_Platform) String() string { + return proto.EnumName(Deployable_Deployment_Platform_name, int32(x)) +} +func (Deployable_Deployment_Platform) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_containeranalysis_a487a8754ddd6778, []int{3, 0, 0} +} + +// Analysis status for a resource. +type Discovery_Discovered_AnalysisStatus int32 + +const ( + // Unknown + Discovery_Discovered_ANALYSIS_STATUS_UNSPECIFIED Discovery_Discovered_AnalysisStatus = 0 + // Resource is known but no action has been taken yet. + Discovery_Discovered_PENDING Discovery_Discovered_AnalysisStatus = 1 + // Resource is being analyzed. + Discovery_Discovered_SCANNING Discovery_Discovered_AnalysisStatus = 2 + // Analysis has finished successfully. + Discovery_Discovered_FINISHED_SUCCESS Discovery_Discovered_AnalysisStatus = 3 + // Analysis has finished unsuccessfully, the analysis itself is in a bad + // state. + Discovery_Discovered_FINISHED_FAILED Discovery_Discovered_AnalysisStatus = 4 + // Analysis will not happen, the resource is not supported. + Discovery_Discovered_UNSUPPORTED_RESOURCE Discovery_Discovered_AnalysisStatus = 5 +) + +var Discovery_Discovered_AnalysisStatus_name = map[int32]string{ + 0: "ANALYSIS_STATUS_UNSPECIFIED", + 1: "PENDING", + 2: "SCANNING", + 3: "FINISHED_SUCCESS", + 4: "FINISHED_FAILED", + 5: "UNSUPPORTED_RESOURCE", +} +var Discovery_Discovered_AnalysisStatus_value = map[string]int32{ + "ANALYSIS_STATUS_UNSPECIFIED": 0, + "PENDING": 1, + "SCANNING": 2, + "FINISHED_SUCCESS": 3, + "FINISHED_FAILED": 4, + "UNSUPPORTED_RESOURCE": 5, +} + +func (x Discovery_Discovered_AnalysisStatus) String() string { + return proto.EnumName(Discovery_Discovered_AnalysisStatus_name, int32(x)) +} +func (Discovery_Discovered_AnalysisStatus) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_containeranalysis_a487a8754ddd6778, []int{4, 0, 0} +} + +// Public key formats +type BuildSignature_KeyType int32 + +const ( + // `KeyType` is not set. + BuildSignature_KEY_TYPE_UNSPECIFIED BuildSignature_KeyType = 0 + // `PGP ASCII Armored` public key. + BuildSignature_PGP_ASCII_ARMORED BuildSignature_KeyType = 1 + // `PKIX PEM` public key. + BuildSignature_PKIX_PEM BuildSignature_KeyType = 2 +) + +var BuildSignature_KeyType_name = map[int32]string{ + 0: "KEY_TYPE_UNSPECIFIED", + 1: "PGP_ASCII_ARMORED", + 2: "PKIX_PEM", +} +var BuildSignature_KeyType_value = map[string]int32{ + "KEY_TYPE_UNSPECIFIED": 0, + "PGP_ASCII_ARMORED": 1, + "PKIX_PEM": 2, +} + +func (x BuildSignature_KeyType) String() string { + return proto.EnumName(BuildSignature_KeyType_name, int32(x)) +} +func (BuildSignature_KeyType) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_containeranalysis_a487a8754ddd6778, []int{6, 0} +} + +// Type (for example schema) of the attestation payload that was signed. +type PgpSignedAttestation_ContentType int32 + +const ( + // `ContentType` is not set. + PgpSignedAttestation_CONTENT_TYPE_UNSPECIFIED PgpSignedAttestation_ContentType = 0 + // Atomic format attestation signature. See + // https://github.com/containers/image/blob/8a5d2f82a6e3263290c8e0276c3e0f64e77723e7/docs/atomic-signature.md + // The payload extracted from `signature` is a JSON blob conforming to the + // linked schema. + PgpSignedAttestation_SIMPLE_SIGNING_JSON PgpSignedAttestation_ContentType = 1 +) + +var PgpSignedAttestation_ContentType_name = map[int32]string{ + 0: "CONTENT_TYPE_UNSPECIFIED", + 1: "SIMPLE_SIGNING_JSON", +} +var PgpSignedAttestation_ContentType_value = map[string]int32{ + "CONTENT_TYPE_UNSPECIFIED": 0, + "SIMPLE_SIGNING_JSON": 1, +} + +func (x PgpSignedAttestation_ContentType) String() string { + return proto.EnumName(PgpSignedAttestation_ContentType_name, int32(x)) +} +func (PgpSignedAttestation_ContentType) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_containeranalysis_a487a8754ddd6778, []int{7, 0} +} + +// `Occurrence` includes information about analysis occurrences for an image. +type Occurrence struct { + // Output only. The name of the `Occurrence` in the form + // "projects/{project_id}/occurrences/{OCCURRENCE_ID}" + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // The unique URL of the image or the container for which the `Occurrence` + // applies. For example, https://gcr.io/project/image@sha256:foo This field + // can be used as a filter in list requests. + ResourceUrl string `protobuf:"bytes,2,opt,name=resource_url,json=resourceUrl,proto3" json:"resource_url,omitempty"` + // The resource for which the `Occurrence` applies. + Resource *Resource `protobuf:"bytes,17,opt,name=resource,proto3" json:"resource,omitempty"` + // An analysis note associated with this image, in the form + // "providers/{provider_id}/notes/{NOTE_ID}" + // This field can be used as a filter in list requests. + NoteName string `protobuf:"bytes,3,opt,name=note_name,json=noteName,proto3" json:"note_name,omitempty"` + // Output only. This explicitly denotes which of the `Occurrence` details are + // specified. This field can be used as a filter in list requests. + Kind Note_Kind `protobuf:"varint,6,opt,name=kind,proto3,enum=google.devtools.containeranalysis.v1alpha1.Note_Kind" json:"kind,omitempty"` + // Describes the details of the vulnerability `Note` found in this resource. + // + // Types that are valid to be assigned to Details: + // *Occurrence_VulnerabilityDetails + // *Occurrence_BuildDetails + // *Occurrence_DerivedImage + // *Occurrence_Installation + // *Occurrence_Deployment + // *Occurrence_Discovered + // *Occurrence_Attestation + Details isOccurrence_Details `protobuf_oneof:"details"` + // A description of actions that can be taken to remedy the `Note` + Remediation string `protobuf:"bytes,5,opt,name=remediation,proto3" json:"remediation,omitempty"` + // Output only. The time this `Occurrence` was created. + CreateTime *timestamp.Timestamp `protobuf:"bytes,9,opt,name=create_time,json=createTime,proto3" json:"create_time,omitempty"` + // Output only. The time this `Occurrence` was last updated. + UpdateTime *timestamp.Timestamp `protobuf:"bytes,10,opt,name=update_time,json=updateTime,proto3" json:"update_time,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Occurrence) Reset() { *m = Occurrence{} } +func (m *Occurrence) String() string { return proto.CompactTextString(m) } +func (*Occurrence) ProtoMessage() {} +func (*Occurrence) Descriptor() ([]byte, []int) { + return fileDescriptor_containeranalysis_a487a8754ddd6778, []int{0} +} +func (m *Occurrence) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Occurrence.Unmarshal(m, b) +} +func (m *Occurrence) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Occurrence.Marshal(b, m, deterministic) +} +func (dst *Occurrence) XXX_Merge(src proto.Message) { + xxx_messageInfo_Occurrence.Merge(dst, src) +} +func (m *Occurrence) XXX_Size() int { + return xxx_messageInfo_Occurrence.Size(m) +} +func (m *Occurrence) XXX_DiscardUnknown() { + xxx_messageInfo_Occurrence.DiscardUnknown(m) +} + +var xxx_messageInfo_Occurrence proto.InternalMessageInfo + +func (m *Occurrence) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *Occurrence) GetResourceUrl() string { + if m != nil { + return m.ResourceUrl + } + return "" +} + +func (m *Occurrence) GetResource() *Resource { + if m != nil { + return m.Resource + } + return nil +} + +func (m *Occurrence) GetNoteName() string { + if m != nil { + return m.NoteName + } + return "" +} + +func (m *Occurrence) GetKind() Note_Kind { + if m != nil { + return m.Kind + } + return Note_KIND_UNSPECIFIED +} + +type isOccurrence_Details interface { + isOccurrence_Details() +} + +type Occurrence_VulnerabilityDetails struct { + VulnerabilityDetails *VulnerabilityType_VulnerabilityDetails `protobuf:"bytes,8,opt,name=vulnerability_details,json=vulnerabilityDetails,proto3,oneof"` +} + +type Occurrence_BuildDetails struct { + BuildDetails *BuildDetails `protobuf:"bytes,7,opt,name=build_details,json=buildDetails,proto3,oneof"` +} + +type Occurrence_DerivedImage struct { + DerivedImage *DockerImage_Derived `protobuf:"bytes,11,opt,name=derived_image,json=derivedImage,proto3,oneof"` +} + +type Occurrence_Installation struct { + Installation *PackageManager_Installation `protobuf:"bytes,12,opt,name=installation,proto3,oneof"` +} + +type Occurrence_Deployment struct { + Deployment *Deployable_Deployment `protobuf:"bytes,14,opt,name=deployment,proto3,oneof"` +} + +type Occurrence_Discovered struct { + Discovered *Discovery_Discovered `protobuf:"bytes,15,opt,name=discovered,proto3,oneof"` +} + +type Occurrence_Attestation struct { + Attestation *AttestationAuthority_Attestation `protobuf:"bytes,16,opt,name=attestation,proto3,oneof"` +} + +func (*Occurrence_VulnerabilityDetails) isOccurrence_Details() {} + +func (*Occurrence_BuildDetails) isOccurrence_Details() {} + +func (*Occurrence_DerivedImage) isOccurrence_Details() {} + +func (*Occurrence_Installation) isOccurrence_Details() {} + +func (*Occurrence_Deployment) isOccurrence_Details() {} + +func (*Occurrence_Discovered) isOccurrence_Details() {} + +func (*Occurrence_Attestation) isOccurrence_Details() {} + +func (m *Occurrence) GetDetails() isOccurrence_Details { + if m != nil { + return m.Details + } + return nil +} + +func (m *Occurrence) GetVulnerabilityDetails() *VulnerabilityType_VulnerabilityDetails { + if x, ok := m.GetDetails().(*Occurrence_VulnerabilityDetails); ok { + return x.VulnerabilityDetails + } + return nil +} + +func (m *Occurrence) GetBuildDetails() *BuildDetails { + if x, ok := m.GetDetails().(*Occurrence_BuildDetails); ok { + return x.BuildDetails + } + return nil +} + +func (m *Occurrence) GetDerivedImage() *DockerImage_Derived { + if x, ok := m.GetDetails().(*Occurrence_DerivedImage); ok { + return x.DerivedImage + } + return nil +} + +func (m *Occurrence) GetInstallation() *PackageManager_Installation { + if x, ok := m.GetDetails().(*Occurrence_Installation); ok { + return x.Installation + } + return nil +} + +func (m *Occurrence) GetDeployment() *Deployable_Deployment { + if x, ok := m.GetDetails().(*Occurrence_Deployment); ok { + return x.Deployment + } + return nil +} + +func (m *Occurrence) GetDiscovered() *Discovery_Discovered { + if x, ok := m.GetDetails().(*Occurrence_Discovered); ok { + return x.Discovered + } + return nil +} + +func (m *Occurrence) GetAttestation() *AttestationAuthority_Attestation { + if x, ok := m.GetDetails().(*Occurrence_Attestation); ok { + return x.Attestation + } + return nil +} + +func (m *Occurrence) GetRemediation() string { + if m != nil { + return m.Remediation + } + return "" +} + +func (m *Occurrence) GetCreateTime() *timestamp.Timestamp { + if m != nil { + return m.CreateTime + } + return nil +} + +func (m *Occurrence) GetUpdateTime() *timestamp.Timestamp { + if m != nil { + return m.UpdateTime + } + return nil +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*Occurrence) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _Occurrence_OneofMarshaler, _Occurrence_OneofUnmarshaler, _Occurrence_OneofSizer, []interface{}{ + (*Occurrence_VulnerabilityDetails)(nil), + (*Occurrence_BuildDetails)(nil), + (*Occurrence_DerivedImage)(nil), + (*Occurrence_Installation)(nil), + (*Occurrence_Deployment)(nil), + (*Occurrence_Discovered)(nil), + (*Occurrence_Attestation)(nil), + } +} + +func _Occurrence_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*Occurrence) + // details + switch x := m.Details.(type) { + case *Occurrence_VulnerabilityDetails: + b.EncodeVarint(8<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.VulnerabilityDetails); err != nil { + return err + } + case *Occurrence_BuildDetails: + b.EncodeVarint(7<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.BuildDetails); err != nil { + return err + } + case *Occurrence_DerivedImage: + b.EncodeVarint(11<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.DerivedImage); err != nil { + return err + } + case *Occurrence_Installation: + b.EncodeVarint(12<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.Installation); err != nil { + return err + } + case *Occurrence_Deployment: + b.EncodeVarint(14<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.Deployment); err != nil { + return err + } + case *Occurrence_Discovered: + b.EncodeVarint(15<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.Discovered); err != nil { + return err + } + case *Occurrence_Attestation: + b.EncodeVarint(16<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.Attestation); err != nil { + return err + } + case nil: + default: + return fmt.Errorf("Occurrence.Details has unexpected type %T", x) + } + return nil +} + +func _Occurrence_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*Occurrence) + switch tag { + case 8: // details.vulnerability_details + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(VulnerabilityType_VulnerabilityDetails) + err := b.DecodeMessage(msg) + m.Details = &Occurrence_VulnerabilityDetails{msg} + return true, err + case 7: // details.build_details + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(BuildDetails) + err := b.DecodeMessage(msg) + m.Details = &Occurrence_BuildDetails{msg} + return true, err + case 11: // details.derived_image + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(DockerImage_Derived) + err := b.DecodeMessage(msg) + m.Details = &Occurrence_DerivedImage{msg} + return true, err + case 12: // details.installation + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(PackageManager_Installation) + err := b.DecodeMessage(msg) + m.Details = &Occurrence_Installation{msg} + return true, err + case 14: // details.deployment + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(Deployable_Deployment) + err := b.DecodeMessage(msg) + m.Details = &Occurrence_Deployment{msg} + return true, err + case 15: // details.discovered + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(Discovery_Discovered) + err := b.DecodeMessage(msg) + m.Details = &Occurrence_Discovered{msg} + return true, err + case 16: // details.attestation + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(AttestationAuthority_Attestation) + err := b.DecodeMessage(msg) + m.Details = &Occurrence_Attestation{msg} + return true, err + default: + return false, nil + } +} + +func _Occurrence_OneofSizer(msg proto.Message) (n int) { + m := msg.(*Occurrence) + // details + switch x := m.Details.(type) { + case *Occurrence_VulnerabilityDetails: + s := proto.Size(x.VulnerabilityDetails) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *Occurrence_BuildDetails: + s := proto.Size(x.BuildDetails) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *Occurrence_DerivedImage: + s := proto.Size(x.DerivedImage) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *Occurrence_Installation: + s := proto.Size(x.Installation) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *Occurrence_Deployment: + s := proto.Size(x.Deployment) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *Occurrence_Discovered: + s := proto.Size(x.Discovered) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *Occurrence_Attestation: + s := proto.Size(x.Attestation) + n += 2 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +// Resource is an entity that can have metadata. E.g., a Docker image. +type Resource struct { + // The name of the resource. E.g., the name of a Docker image - "Debian". + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // The unique URI of the resource. E.g., + // "https://gcr.io/project/image@sha256:foo" for a Docker image. + Uri string `protobuf:"bytes,2,opt,name=uri,proto3" json:"uri,omitempty"` + // The hash of the resource content. E.g., the Docker digest. + ContentHash *Hash `protobuf:"bytes,3,opt,name=content_hash,json=contentHash,proto3" json:"content_hash,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Resource) Reset() { *m = Resource{} } +func (m *Resource) String() string { return proto.CompactTextString(m) } +func (*Resource) ProtoMessage() {} +func (*Resource) Descriptor() ([]byte, []int) { + return fileDescriptor_containeranalysis_a487a8754ddd6778, []int{1} +} +func (m *Resource) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Resource.Unmarshal(m, b) +} +func (m *Resource) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Resource.Marshal(b, m, deterministic) +} +func (dst *Resource) XXX_Merge(src proto.Message) { + xxx_messageInfo_Resource.Merge(dst, src) +} +func (m *Resource) XXX_Size() int { + return xxx_messageInfo_Resource.Size(m) +} +func (m *Resource) XXX_DiscardUnknown() { + xxx_messageInfo_Resource.DiscardUnknown(m) +} + +var xxx_messageInfo_Resource proto.InternalMessageInfo + +func (m *Resource) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *Resource) GetUri() string { + if m != nil { + return m.Uri + } + return "" +} + +func (m *Resource) GetContentHash() *Hash { + if m != nil { + return m.ContentHash + } + return nil +} + +// Provides a detailed description of a `Note`. +type Note struct { + // The name of the note in the form + // "providers/{provider_id}/notes/{NOTE_ID}" + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // A one sentence description of this `Note`. + ShortDescription string `protobuf:"bytes,3,opt,name=short_description,json=shortDescription,proto3" json:"short_description,omitempty"` + // A detailed description of this `Note`. + LongDescription string `protobuf:"bytes,4,opt,name=long_description,json=longDescription,proto3" json:"long_description,omitempty"` + // Output only. This explicitly denotes which kind of note is specified. This + // field can be used as a filter in list requests. + Kind Note_Kind `protobuf:"varint,9,opt,name=kind,proto3,enum=google.devtools.containeranalysis.v1alpha1.Note_Kind" json:"kind,omitempty"` + // The type of note. + // + // Types that are valid to be assigned to NoteType: + // *Note_VulnerabilityType + // *Note_BuildType + // *Note_BaseImage + // *Note_Package + // *Note_Deployable + // *Note_Discovery + // *Note_AttestationAuthority + NoteType isNote_NoteType `protobuf_oneof:"note_type"` + // URLs associated with this note + RelatedUrl []*Note_RelatedUrl `protobuf:"bytes,7,rep,name=related_url,json=relatedUrl,proto3" json:"related_url,omitempty"` + // Time of expiration for this note, null if note does not expire. + ExpirationTime *timestamp.Timestamp `protobuf:"bytes,10,opt,name=expiration_time,json=expirationTime,proto3" json:"expiration_time,omitempty"` + // Output only. The time this note was created. This field can be used as a + // filter in list requests. + CreateTime *timestamp.Timestamp `protobuf:"bytes,11,opt,name=create_time,json=createTime,proto3" json:"create_time,omitempty"` + // Output only. The time this note was last updated. This field can be used as + // a filter in list requests. + UpdateTime *timestamp.Timestamp `protobuf:"bytes,12,opt,name=update_time,json=updateTime,proto3" json:"update_time,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Note) Reset() { *m = Note{} } +func (m *Note) String() string { return proto.CompactTextString(m) } +func (*Note) ProtoMessage() {} +func (*Note) Descriptor() ([]byte, []int) { + return fileDescriptor_containeranalysis_a487a8754ddd6778, []int{2} +} +func (m *Note) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Note.Unmarshal(m, b) +} +func (m *Note) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Note.Marshal(b, m, deterministic) +} +func (dst *Note) XXX_Merge(src proto.Message) { + xxx_messageInfo_Note.Merge(dst, src) +} +func (m *Note) XXX_Size() int { + return xxx_messageInfo_Note.Size(m) +} +func (m *Note) XXX_DiscardUnknown() { + xxx_messageInfo_Note.DiscardUnknown(m) +} + +var xxx_messageInfo_Note proto.InternalMessageInfo + +func (m *Note) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *Note) GetShortDescription() string { + if m != nil { + return m.ShortDescription + } + return "" +} + +func (m *Note) GetLongDescription() string { + if m != nil { + return m.LongDescription + } + return "" +} + +func (m *Note) GetKind() Note_Kind { + if m != nil { + return m.Kind + } + return Note_KIND_UNSPECIFIED +} + +type isNote_NoteType interface { + isNote_NoteType() +} + +type Note_VulnerabilityType struct { + VulnerabilityType *VulnerabilityType `protobuf:"bytes,6,opt,name=vulnerability_type,json=vulnerabilityType,proto3,oneof"` +} + +type Note_BuildType struct { + BuildType *BuildType `protobuf:"bytes,8,opt,name=build_type,json=buildType,proto3,oneof"` +} + +type Note_BaseImage struct { + BaseImage *DockerImage_Basis `protobuf:"bytes,13,opt,name=base_image,json=baseImage,proto3,oneof"` +} + +type Note_Package struct { + Package *PackageManager_Package `protobuf:"bytes,14,opt,name=package,proto3,oneof"` +} + +type Note_Deployable struct { + Deployable *Deployable `protobuf:"bytes,17,opt,name=deployable,proto3,oneof"` +} + +type Note_Discovery struct { + Discovery *Discovery `protobuf:"bytes,18,opt,name=discovery,proto3,oneof"` +} + +type Note_AttestationAuthority struct { + AttestationAuthority *AttestationAuthority `protobuf:"bytes,19,opt,name=attestation_authority,json=attestationAuthority,proto3,oneof"` +} + +func (*Note_VulnerabilityType) isNote_NoteType() {} + +func (*Note_BuildType) isNote_NoteType() {} + +func (*Note_BaseImage) isNote_NoteType() {} + +func (*Note_Package) isNote_NoteType() {} + +func (*Note_Deployable) isNote_NoteType() {} + +func (*Note_Discovery) isNote_NoteType() {} + +func (*Note_AttestationAuthority) isNote_NoteType() {} + +func (m *Note) GetNoteType() isNote_NoteType { + if m != nil { + return m.NoteType + } + return nil +} + +func (m *Note) GetVulnerabilityType() *VulnerabilityType { + if x, ok := m.GetNoteType().(*Note_VulnerabilityType); ok { + return x.VulnerabilityType + } + return nil +} + +func (m *Note) GetBuildType() *BuildType { + if x, ok := m.GetNoteType().(*Note_BuildType); ok { + return x.BuildType + } + return nil +} + +func (m *Note) GetBaseImage() *DockerImage_Basis { + if x, ok := m.GetNoteType().(*Note_BaseImage); ok { + return x.BaseImage + } + return nil +} + +func (m *Note) GetPackage() *PackageManager_Package { + if x, ok := m.GetNoteType().(*Note_Package); ok { + return x.Package + } + return nil +} + +func (m *Note) GetDeployable() *Deployable { + if x, ok := m.GetNoteType().(*Note_Deployable); ok { + return x.Deployable + } + return nil +} + +func (m *Note) GetDiscovery() *Discovery { + if x, ok := m.GetNoteType().(*Note_Discovery); ok { + return x.Discovery + } + return nil +} + +func (m *Note) GetAttestationAuthority() *AttestationAuthority { + if x, ok := m.GetNoteType().(*Note_AttestationAuthority); ok { + return x.AttestationAuthority + } + return nil +} + +func (m *Note) GetRelatedUrl() []*Note_RelatedUrl { + if m != nil { + return m.RelatedUrl + } + return nil +} + +func (m *Note) GetExpirationTime() *timestamp.Timestamp { + if m != nil { + return m.ExpirationTime + } + return nil +} + +func (m *Note) GetCreateTime() *timestamp.Timestamp { + if m != nil { + return m.CreateTime + } + return nil +} + +func (m *Note) GetUpdateTime() *timestamp.Timestamp { + if m != nil { + return m.UpdateTime + } + return nil +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*Note) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _Note_OneofMarshaler, _Note_OneofUnmarshaler, _Note_OneofSizer, []interface{}{ + (*Note_VulnerabilityType)(nil), + (*Note_BuildType)(nil), + (*Note_BaseImage)(nil), + (*Note_Package)(nil), + (*Note_Deployable)(nil), + (*Note_Discovery)(nil), + (*Note_AttestationAuthority)(nil), + } +} + +func _Note_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*Note) + // note_type + switch x := m.NoteType.(type) { + case *Note_VulnerabilityType: + b.EncodeVarint(6<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.VulnerabilityType); err != nil { + return err + } + case *Note_BuildType: + b.EncodeVarint(8<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.BuildType); err != nil { + return err + } + case *Note_BaseImage: + b.EncodeVarint(13<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.BaseImage); err != nil { + return err + } + case *Note_Package: + b.EncodeVarint(14<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.Package); err != nil { + return err + } + case *Note_Deployable: + b.EncodeVarint(17<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.Deployable); err != nil { + return err + } + case *Note_Discovery: + b.EncodeVarint(18<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.Discovery); err != nil { + return err + } + case *Note_AttestationAuthority: + b.EncodeVarint(19<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.AttestationAuthority); err != nil { + return err + } + case nil: + default: + return fmt.Errorf("Note.NoteType has unexpected type %T", x) + } + return nil +} + +func _Note_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*Note) + switch tag { + case 6: // note_type.vulnerability_type + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(VulnerabilityType) + err := b.DecodeMessage(msg) + m.NoteType = &Note_VulnerabilityType{msg} + return true, err + case 8: // note_type.build_type + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(BuildType) + err := b.DecodeMessage(msg) + m.NoteType = &Note_BuildType{msg} + return true, err + case 13: // note_type.base_image + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(DockerImage_Basis) + err := b.DecodeMessage(msg) + m.NoteType = &Note_BaseImage{msg} + return true, err + case 14: // note_type.package + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(PackageManager_Package) + err := b.DecodeMessage(msg) + m.NoteType = &Note_Package{msg} + return true, err + case 17: // note_type.deployable + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(Deployable) + err := b.DecodeMessage(msg) + m.NoteType = &Note_Deployable{msg} + return true, err + case 18: // note_type.discovery + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(Discovery) + err := b.DecodeMessage(msg) + m.NoteType = &Note_Discovery{msg} + return true, err + case 19: // note_type.attestation_authority + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(AttestationAuthority) + err := b.DecodeMessage(msg) + m.NoteType = &Note_AttestationAuthority{msg} + return true, err + default: + return false, nil + } +} + +func _Note_OneofSizer(msg proto.Message) (n int) { + m := msg.(*Note) + // note_type + switch x := m.NoteType.(type) { + case *Note_VulnerabilityType: + s := proto.Size(x.VulnerabilityType) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *Note_BuildType: + s := proto.Size(x.BuildType) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *Note_BaseImage: + s := proto.Size(x.BaseImage) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *Note_Package: + s := proto.Size(x.Package) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *Note_Deployable: + s := proto.Size(x.Deployable) + n += 2 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *Note_Discovery: + s := proto.Size(x.Discovery) + n += 2 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *Note_AttestationAuthority: + s := proto.Size(x.AttestationAuthority) + n += 2 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +// Metadata for any related URL information +type Note_RelatedUrl struct { + // Specific URL to associate with the note + Url string `protobuf:"bytes,1,opt,name=url,proto3" json:"url,omitempty"` + // Label to describe usage of the URL + Label string `protobuf:"bytes,2,opt,name=label,proto3" json:"label,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Note_RelatedUrl) Reset() { *m = Note_RelatedUrl{} } +func (m *Note_RelatedUrl) String() string { return proto.CompactTextString(m) } +func (*Note_RelatedUrl) ProtoMessage() {} +func (*Note_RelatedUrl) Descriptor() ([]byte, []int) { + return fileDescriptor_containeranalysis_a487a8754ddd6778, []int{2, 0} +} +func (m *Note_RelatedUrl) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Note_RelatedUrl.Unmarshal(m, b) +} +func (m *Note_RelatedUrl) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Note_RelatedUrl.Marshal(b, m, deterministic) +} +func (dst *Note_RelatedUrl) XXX_Merge(src proto.Message) { + xxx_messageInfo_Note_RelatedUrl.Merge(dst, src) +} +func (m *Note_RelatedUrl) XXX_Size() int { + return xxx_messageInfo_Note_RelatedUrl.Size(m) +} +func (m *Note_RelatedUrl) XXX_DiscardUnknown() { + xxx_messageInfo_Note_RelatedUrl.DiscardUnknown(m) +} + +var xxx_messageInfo_Note_RelatedUrl proto.InternalMessageInfo + +func (m *Note_RelatedUrl) GetUrl() string { + if m != nil { + return m.Url + } + return "" +} + +func (m *Note_RelatedUrl) GetLabel() string { + if m != nil { + return m.Label + } + return "" +} + +// An artifact that can be deployed in some runtime. +type Deployable struct { + // Resource URI for the artifact being deployed. + ResourceUri []string `protobuf:"bytes,1,rep,name=resource_uri,json=resourceUri,proto3" json:"resource_uri,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Deployable) Reset() { *m = Deployable{} } +func (m *Deployable) String() string { return proto.CompactTextString(m) } +func (*Deployable) ProtoMessage() {} +func (*Deployable) Descriptor() ([]byte, []int) { + return fileDescriptor_containeranalysis_a487a8754ddd6778, []int{3} +} +func (m *Deployable) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Deployable.Unmarshal(m, b) +} +func (m *Deployable) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Deployable.Marshal(b, m, deterministic) +} +func (dst *Deployable) XXX_Merge(src proto.Message) { + xxx_messageInfo_Deployable.Merge(dst, src) +} +func (m *Deployable) XXX_Size() int { + return xxx_messageInfo_Deployable.Size(m) +} +func (m *Deployable) XXX_DiscardUnknown() { + xxx_messageInfo_Deployable.DiscardUnknown(m) +} + +var xxx_messageInfo_Deployable proto.InternalMessageInfo + +func (m *Deployable) GetResourceUri() []string { + if m != nil { + return m.ResourceUri + } + return nil +} + +// The period during which some deployable was active in a runtime. +type Deployable_Deployment struct { + // Identity of the user that triggered this deployment. + UserEmail string `protobuf:"bytes,1,opt,name=user_email,json=userEmail,proto3" json:"user_email,omitempty"` + // Beginning of the lifetime of this deployment. + DeployTime *timestamp.Timestamp `protobuf:"bytes,2,opt,name=deploy_time,json=deployTime,proto3" json:"deploy_time,omitempty"` + // End of the lifetime of this deployment. + UndeployTime *timestamp.Timestamp `protobuf:"bytes,3,opt,name=undeploy_time,json=undeployTime,proto3" json:"undeploy_time,omitempty"` + // Configuration used to create this deployment. + Config string `protobuf:"bytes,8,opt,name=config,proto3" json:"config,omitempty"` + // Address of the runtime element hosting this deployment. + Address string `protobuf:"bytes,5,opt,name=address,proto3" json:"address,omitempty"` + // Output only. Resource URI for the artifact being deployed taken from the + // deployable field with the same name. + ResourceUri []string `protobuf:"bytes,6,rep,name=resource_uri,json=resourceUri,proto3" json:"resource_uri,omitempty"` + // Platform hosting this deployment. + Platform Deployable_Deployment_Platform `protobuf:"varint,7,opt,name=platform,proto3,enum=google.devtools.containeranalysis.v1alpha1.Deployable_Deployment_Platform" json:"platform,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Deployable_Deployment) Reset() { *m = Deployable_Deployment{} } +func (m *Deployable_Deployment) String() string { return proto.CompactTextString(m) } +func (*Deployable_Deployment) ProtoMessage() {} +func (*Deployable_Deployment) Descriptor() ([]byte, []int) { + return fileDescriptor_containeranalysis_a487a8754ddd6778, []int{3, 0} +} +func (m *Deployable_Deployment) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Deployable_Deployment.Unmarshal(m, b) +} +func (m *Deployable_Deployment) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Deployable_Deployment.Marshal(b, m, deterministic) +} +func (dst *Deployable_Deployment) XXX_Merge(src proto.Message) { + xxx_messageInfo_Deployable_Deployment.Merge(dst, src) +} +func (m *Deployable_Deployment) XXX_Size() int { + return xxx_messageInfo_Deployable_Deployment.Size(m) +} +func (m *Deployable_Deployment) XXX_DiscardUnknown() { + xxx_messageInfo_Deployable_Deployment.DiscardUnknown(m) +} + +var xxx_messageInfo_Deployable_Deployment proto.InternalMessageInfo + +func (m *Deployable_Deployment) GetUserEmail() string { + if m != nil { + return m.UserEmail + } + return "" +} + +func (m *Deployable_Deployment) GetDeployTime() *timestamp.Timestamp { + if m != nil { + return m.DeployTime + } + return nil +} + +func (m *Deployable_Deployment) GetUndeployTime() *timestamp.Timestamp { + if m != nil { + return m.UndeployTime + } + return nil +} + +func (m *Deployable_Deployment) GetConfig() string { + if m != nil { + return m.Config + } + return "" +} + +func (m *Deployable_Deployment) GetAddress() string { + if m != nil { + return m.Address + } + return "" +} + +func (m *Deployable_Deployment) GetResourceUri() []string { + if m != nil { + return m.ResourceUri + } + return nil +} + +func (m *Deployable_Deployment) GetPlatform() Deployable_Deployment_Platform { + if m != nil { + return m.Platform + } + return Deployable_Deployment_PLATFORM_UNSPECIFIED +} + +// A note that indicates a type of analysis a provider would perform. This note +// exists in a provider's project. A `Discovery` occurrence is created in a +// consumer's project at the start of analysis. The occurrence's operation will +// indicate the status of the analysis. Absence of an occurrence linked to this +// note for a resource indicates that analysis hasn't started. +type Discovery struct { + // The kind of analysis that is handled by this discovery. + AnalysisKind Note_Kind `protobuf:"varint,1,opt,name=analysis_kind,json=analysisKind,proto3,enum=google.devtools.containeranalysis.v1alpha1.Note_Kind" json:"analysis_kind,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Discovery) Reset() { *m = Discovery{} } +func (m *Discovery) String() string { return proto.CompactTextString(m) } +func (*Discovery) ProtoMessage() {} +func (*Discovery) Descriptor() ([]byte, []int) { + return fileDescriptor_containeranalysis_a487a8754ddd6778, []int{4} +} +func (m *Discovery) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Discovery.Unmarshal(m, b) +} +func (m *Discovery) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Discovery.Marshal(b, m, deterministic) +} +func (dst *Discovery) XXX_Merge(src proto.Message) { + xxx_messageInfo_Discovery.Merge(dst, src) +} +func (m *Discovery) XXX_Size() int { + return xxx_messageInfo_Discovery.Size(m) +} +func (m *Discovery) XXX_DiscardUnknown() { + xxx_messageInfo_Discovery.DiscardUnknown(m) +} + +var xxx_messageInfo_Discovery proto.InternalMessageInfo + +func (m *Discovery) GetAnalysisKind() Note_Kind { + if m != nil { + return m.AnalysisKind + } + return Note_KIND_UNSPECIFIED +} + +// Provides information about the scan status of a discovered resource. +type Discovery_Discovered struct { + // Output only. An operation that indicates the status of the current scan. + Operation *longrunning.Operation `protobuf:"bytes,1,opt,name=operation,proto3" json:"operation,omitempty"` + // The status of discovery for the resource. + AnalysisStatus Discovery_Discovered_AnalysisStatus `protobuf:"varint,5,opt,name=analysis_status,json=analysisStatus,proto3,enum=google.devtools.containeranalysis.v1alpha1.Discovery_Discovered_AnalysisStatus" json:"analysis_status,omitempty"` + // When an error is encountered this will contain a LocalizedMessage under + // details to show to the user. The LocalizedMessage output only and + // populated by the API. + AnalysisStatusError *status.Status `protobuf:"bytes,6,opt,name=analysis_status_error,json=analysisStatusError,proto3" json:"analysis_status_error,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Discovery_Discovered) Reset() { *m = Discovery_Discovered{} } +func (m *Discovery_Discovered) String() string { return proto.CompactTextString(m) } +func (*Discovery_Discovered) ProtoMessage() {} +func (*Discovery_Discovered) Descriptor() ([]byte, []int) { + return fileDescriptor_containeranalysis_a487a8754ddd6778, []int{4, 0} +} +func (m *Discovery_Discovered) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Discovery_Discovered.Unmarshal(m, b) +} +func (m *Discovery_Discovered) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Discovery_Discovered.Marshal(b, m, deterministic) +} +func (dst *Discovery_Discovered) XXX_Merge(src proto.Message) { + xxx_messageInfo_Discovery_Discovered.Merge(dst, src) +} +func (m *Discovery_Discovered) XXX_Size() int { + return xxx_messageInfo_Discovery_Discovered.Size(m) +} +func (m *Discovery_Discovered) XXX_DiscardUnknown() { + xxx_messageInfo_Discovery_Discovered.DiscardUnknown(m) +} + +var xxx_messageInfo_Discovery_Discovered proto.InternalMessageInfo + +func (m *Discovery_Discovered) GetOperation() *longrunning.Operation { + if m != nil { + return m.Operation + } + return nil +} + +func (m *Discovery_Discovered) GetAnalysisStatus() Discovery_Discovered_AnalysisStatus { + if m != nil { + return m.AnalysisStatus + } + return Discovery_Discovered_ANALYSIS_STATUS_UNSPECIFIED +} + +func (m *Discovery_Discovered) GetAnalysisStatusError() *status.Status { + if m != nil { + return m.AnalysisStatusError + } + return nil +} + +// Note holding the version of the provider's builder and the signature of +// the provenance message in linked BuildDetails. +type BuildType struct { + // Version of the builder which produced this Note. + BuilderVersion string `protobuf:"bytes,1,opt,name=builder_version,json=builderVersion,proto3" json:"builder_version,omitempty"` + // Signature of the build in Occurrences pointing to the Note containing this + // `BuilderDetails`. + Signature *BuildSignature `protobuf:"bytes,2,opt,name=signature,proto3" json:"signature,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *BuildType) Reset() { *m = BuildType{} } +func (m *BuildType) String() string { return proto.CompactTextString(m) } +func (*BuildType) ProtoMessage() {} +func (*BuildType) Descriptor() ([]byte, []int) { + return fileDescriptor_containeranalysis_a487a8754ddd6778, []int{5} +} +func (m *BuildType) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_BuildType.Unmarshal(m, b) +} +func (m *BuildType) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_BuildType.Marshal(b, m, deterministic) +} +func (dst *BuildType) XXX_Merge(src proto.Message) { + xxx_messageInfo_BuildType.Merge(dst, src) +} +func (m *BuildType) XXX_Size() int { + return xxx_messageInfo_BuildType.Size(m) +} +func (m *BuildType) XXX_DiscardUnknown() { + xxx_messageInfo_BuildType.DiscardUnknown(m) +} + +var xxx_messageInfo_BuildType proto.InternalMessageInfo + +func (m *BuildType) GetBuilderVersion() string { + if m != nil { + return m.BuilderVersion + } + return "" +} + +func (m *BuildType) GetSignature() *BuildSignature { + if m != nil { + return m.Signature + } + return nil +} + +// Message encapsulating the signature of the verified build. +type BuildSignature struct { + // Public key of the builder which can be used to verify that the related + // findings are valid and unchanged. If `key_type` is empty, this defaults + // to PEM encoded public keys. + // + // This field may be empty if `key_id` references an external key. + // + // For Cloud Build based signatures, this is a PEM encoded public + // key. To verify the Cloud Build signature, place the contents of + // this field into a file (public.pem). The signature field is base64-decoded + // into its binary representation in signature.bin, and the provenance bytes + // from `BuildDetails` are base64-decoded into a binary representation in + // signed.bin. OpenSSL can then verify the signature: + // `openssl sha256 -verify public.pem -signature signature.bin signed.bin` + PublicKey string `protobuf:"bytes,1,opt,name=public_key,json=publicKey,proto3" json:"public_key,omitempty"` + // Signature of the related `BuildProvenance`, encoded in a base64 string. + Signature string `protobuf:"bytes,2,opt,name=signature,proto3" json:"signature,omitempty"` + // An Id for the key used to sign. This could be either an Id for the key + // stored in `public_key` (such as the Id or fingerprint for a PGP key, or the + // CN for a cert), or a reference to an external key (such as a reference to a + // key in Cloud Key Management Service). + KeyId string `protobuf:"bytes,3,opt,name=key_id,json=keyId,proto3" json:"key_id,omitempty"` + // The type of the key, either stored in `public_key` or referenced in + // `key_id` + KeyType BuildSignature_KeyType `protobuf:"varint,4,opt,name=key_type,json=keyType,proto3,enum=google.devtools.containeranalysis.v1alpha1.BuildSignature_KeyType" json:"key_type,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *BuildSignature) Reset() { *m = BuildSignature{} } +func (m *BuildSignature) String() string { return proto.CompactTextString(m) } +func (*BuildSignature) ProtoMessage() {} +func (*BuildSignature) Descriptor() ([]byte, []int) { + return fileDescriptor_containeranalysis_a487a8754ddd6778, []int{6} +} +func (m *BuildSignature) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_BuildSignature.Unmarshal(m, b) +} +func (m *BuildSignature) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_BuildSignature.Marshal(b, m, deterministic) +} +func (dst *BuildSignature) XXX_Merge(src proto.Message) { + xxx_messageInfo_BuildSignature.Merge(dst, src) +} +func (m *BuildSignature) XXX_Size() int { + return xxx_messageInfo_BuildSignature.Size(m) +} +func (m *BuildSignature) XXX_DiscardUnknown() { + xxx_messageInfo_BuildSignature.DiscardUnknown(m) +} + +var xxx_messageInfo_BuildSignature proto.InternalMessageInfo + +func (m *BuildSignature) GetPublicKey() string { + if m != nil { + return m.PublicKey + } + return "" +} + +func (m *BuildSignature) GetSignature() string { + if m != nil { + return m.Signature + } + return "" +} + +func (m *BuildSignature) GetKeyId() string { + if m != nil { + return m.KeyId + } + return "" +} + +func (m *BuildSignature) GetKeyType() BuildSignature_KeyType { + if m != nil { + return m.KeyType + } + return BuildSignature_KEY_TYPE_UNSPECIFIED +} + +// An attestation wrapper with a PGP-compatible signature. +// This message only supports `ATTACHED` signatures, where the payload that is +// signed is included alongside the signature itself in the same file. +type PgpSignedAttestation struct { + // The raw content of the signature, as output by GNU Privacy Guard (GPG) or + // equivalent. Since this message only supports attached signatures, the + // payload that was signed must be attached. While the signature format + // supported is dependent on the verification implementation, currently only + // ASCII-armored (`--armor` to gpg), non-clearsigned (`--sign` rather than + // `--clearsign` to gpg) are supported. Concretely, `gpg --sign --armor + // --output=signature.gpg payload.json` will create the signature content + // expected in this field in `signature.gpg` for the `payload.json` + // attestation payload. + Signature string `protobuf:"bytes,1,opt,name=signature,proto3" json:"signature,omitempty"` + // Type (for example schema) of the attestation payload that was signed. + // The verifier must ensure that the provided type is one that the verifier + // supports, and that the attestation payload is a valid instantiation of that + // type (for example by validating a JSON schema). + ContentType PgpSignedAttestation_ContentType `protobuf:"varint,3,opt,name=content_type,json=contentType,proto3,enum=google.devtools.containeranalysis.v1alpha1.PgpSignedAttestation_ContentType" json:"content_type,omitempty"` + // This field is used by verifiers to select the public key used to validate + // the signature. Note that the policy of the verifier ultimately determines + // which public keys verify a signature based on the context of the + // verification. There is no guarantee validation will succeed if the + // verifier has no key matching this ID, even if it has a key under a + // different ID that would verify the signature. Note that this ID should also + // be present in the signature content above, but that is not expected to be + // used by the verifier. + // + // Types that are valid to be assigned to KeyId: + // *PgpSignedAttestation_PgpKeyId + KeyId isPgpSignedAttestation_KeyId `protobuf_oneof:"key_id"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *PgpSignedAttestation) Reset() { *m = PgpSignedAttestation{} } +func (m *PgpSignedAttestation) String() string { return proto.CompactTextString(m) } +func (*PgpSignedAttestation) ProtoMessage() {} +func (*PgpSignedAttestation) Descriptor() ([]byte, []int) { + return fileDescriptor_containeranalysis_a487a8754ddd6778, []int{7} +} +func (m *PgpSignedAttestation) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_PgpSignedAttestation.Unmarshal(m, b) +} +func (m *PgpSignedAttestation) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_PgpSignedAttestation.Marshal(b, m, deterministic) +} +func (dst *PgpSignedAttestation) XXX_Merge(src proto.Message) { + xxx_messageInfo_PgpSignedAttestation.Merge(dst, src) +} +func (m *PgpSignedAttestation) XXX_Size() int { + return xxx_messageInfo_PgpSignedAttestation.Size(m) +} +func (m *PgpSignedAttestation) XXX_DiscardUnknown() { + xxx_messageInfo_PgpSignedAttestation.DiscardUnknown(m) +} + +var xxx_messageInfo_PgpSignedAttestation proto.InternalMessageInfo + +func (m *PgpSignedAttestation) GetSignature() string { + if m != nil { + return m.Signature + } + return "" +} + +func (m *PgpSignedAttestation) GetContentType() PgpSignedAttestation_ContentType { + if m != nil { + return m.ContentType + } + return PgpSignedAttestation_CONTENT_TYPE_UNSPECIFIED +} + +type isPgpSignedAttestation_KeyId interface { + isPgpSignedAttestation_KeyId() +} + +type PgpSignedAttestation_PgpKeyId struct { + PgpKeyId string `protobuf:"bytes,2,opt,name=pgp_key_id,json=pgpKeyId,proto3,oneof"` +} + +func (*PgpSignedAttestation_PgpKeyId) isPgpSignedAttestation_KeyId() {} + +func (m *PgpSignedAttestation) GetKeyId() isPgpSignedAttestation_KeyId { + if m != nil { + return m.KeyId + } + return nil +} + +func (m *PgpSignedAttestation) GetPgpKeyId() string { + if x, ok := m.GetKeyId().(*PgpSignedAttestation_PgpKeyId); ok { + return x.PgpKeyId + } + return "" +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*PgpSignedAttestation) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _PgpSignedAttestation_OneofMarshaler, _PgpSignedAttestation_OneofUnmarshaler, _PgpSignedAttestation_OneofSizer, []interface{}{ + (*PgpSignedAttestation_PgpKeyId)(nil), + } +} + +func _PgpSignedAttestation_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*PgpSignedAttestation) + // key_id + switch x := m.KeyId.(type) { + case *PgpSignedAttestation_PgpKeyId: + b.EncodeVarint(2<<3 | proto.WireBytes) + b.EncodeStringBytes(x.PgpKeyId) + case nil: + default: + return fmt.Errorf("PgpSignedAttestation.KeyId has unexpected type %T", x) + } + return nil +} + +func _PgpSignedAttestation_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*PgpSignedAttestation) + switch tag { + case 2: // key_id.pgp_key_id + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeStringBytes() + m.KeyId = &PgpSignedAttestation_PgpKeyId{x} + return true, err + default: + return false, nil + } +} + +func _PgpSignedAttestation_OneofSizer(msg proto.Message) (n int) { + m := msg.(*PgpSignedAttestation) + // key_id + switch x := m.KeyId.(type) { + case *PgpSignedAttestation_PgpKeyId: + n += 1 // tag and wire + n += proto.SizeVarint(uint64(len(x.PgpKeyId))) + n += len(x.PgpKeyId) + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +// Note kind that represents a logical attestation "role" or "authority". For +// example, an organization might have one `AttestationAuthority` for "QA" and +// one for "build". This Note is intended to act strictly as a grouping +// mechanism for the attached Occurrences (Attestations). This grouping +// mechanism also provides a security boundary, since IAM ACLs gate the ability +// for a principle to attach an Occurrence to a given Note. It also provides a +// single point of lookup to find all attached Attestation Occurrences, even if +// they don't all live in the same project. +type AttestationAuthority struct { + Hint *AttestationAuthority_AttestationAuthorityHint `protobuf:"bytes,1,opt,name=hint,proto3" json:"hint,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *AttestationAuthority) Reset() { *m = AttestationAuthority{} } +func (m *AttestationAuthority) String() string { return proto.CompactTextString(m) } +func (*AttestationAuthority) ProtoMessage() {} +func (*AttestationAuthority) Descriptor() ([]byte, []int) { + return fileDescriptor_containeranalysis_a487a8754ddd6778, []int{8} +} +func (m *AttestationAuthority) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_AttestationAuthority.Unmarshal(m, b) +} +func (m *AttestationAuthority) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_AttestationAuthority.Marshal(b, m, deterministic) +} +func (dst *AttestationAuthority) XXX_Merge(src proto.Message) { + xxx_messageInfo_AttestationAuthority.Merge(dst, src) +} +func (m *AttestationAuthority) XXX_Size() int { + return xxx_messageInfo_AttestationAuthority.Size(m) +} +func (m *AttestationAuthority) XXX_DiscardUnknown() { + xxx_messageInfo_AttestationAuthority.DiscardUnknown(m) +} + +var xxx_messageInfo_AttestationAuthority proto.InternalMessageInfo + +func (m *AttestationAuthority) GetHint() *AttestationAuthority_AttestationAuthorityHint { + if m != nil { + return m.Hint + } + return nil +} + +// This submessage provides human-readable hints about the purpose of the +// AttestationAuthority. Because the name of a Note acts as its resource +// reference, it is important to disambiguate the canonical name of the Note +// (which might be a UUID for security purposes) from "readable" names more +// suitable for debug output. Note that these hints should NOT be used to +// look up AttestationAuthorities in security sensitive contexts, such as when +// looking up Attestations to verify. +type AttestationAuthority_AttestationAuthorityHint struct { + // The human readable name of this Attestation Authority, for example "qa". + HumanReadableName string `protobuf:"bytes,1,opt,name=human_readable_name,json=humanReadableName,proto3" json:"human_readable_name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *AttestationAuthority_AttestationAuthorityHint) Reset() { + *m = AttestationAuthority_AttestationAuthorityHint{} +} +func (m *AttestationAuthority_AttestationAuthorityHint) String() string { + return proto.CompactTextString(m) +} +func (*AttestationAuthority_AttestationAuthorityHint) ProtoMessage() {} +func (*AttestationAuthority_AttestationAuthorityHint) Descriptor() ([]byte, []int) { + return fileDescriptor_containeranalysis_a487a8754ddd6778, []int{8, 0} +} +func (m *AttestationAuthority_AttestationAuthorityHint) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_AttestationAuthority_AttestationAuthorityHint.Unmarshal(m, b) +} +func (m *AttestationAuthority_AttestationAuthorityHint) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_AttestationAuthority_AttestationAuthorityHint.Marshal(b, m, deterministic) +} +func (dst *AttestationAuthority_AttestationAuthorityHint) XXX_Merge(src proto.Message) { + xxx_messageInfo_AttestationAuthority_AttestationAuthorityHint.Merge(dst, src) +} +func (m *AttestationAuthority_AttestationAuthorityHint) XXX_Size() int { + return xxx_messageInfo_AttestationAuthority_AttestationAuthorityHint.Size(m) +} +func (m *AttestationAuthority_AttestationAuthorityHint) XXX_DiscardUnknown() { + xxx_messageInfo_AttestationAuthority_AttestationAuthorityHint.DiscardUnknown(m) +} + +var xxx_messageInfo_AttestationAuthority_AttestationAuthorityHint proto.InternalMessageInfo + +func (m *AttestationAuthority_AttestationAuthorityHint) GetHumanReadableName() string { + if m != nil { + return m.HumanReadableName + } + return "" +} + +// Occurrence that represents a single "attestation". The authenticity of an +// Attestation can be verified using the attached signature. If the verifier +// trusts the public key of the signer, then verifying the signature is +// sufficient to establish trust. In this circumstance, the +// AttestationAuthority to which this Attestation is attached is primarily +// useful for look-up (how to find this Attestation if you already know the +// Authority and artifact to be verified) and intent (which authority was this +// attestation intended to sign for). +type AttestationAuthority_Attestation struct { + // The signature, generally over the `resource_url`, that verifies this + // attestation. The semantics of the signature veracity are ultimately + // determined by the verification engine. + // + // Types that are valid to be assigned to Signature: + // *AttestationAuthority_Attestation_PgpSignedAttestation + Signature isAttestationAuthority_Attestation_Signature `protobuf_oneof:"signature"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *AttestationAuthority_Attestation) Reset() { *m = AttestationAuthority_Attestation{} } +func (m *AttestationAuthority_Attestation) String() string { return proto.CompactTextString(m) } +func (*AttestationAuthority_Attestation) ProtoMessage() {} +func (*AttestationAuthority_Attestation) Descriptor() ([]byte, []int) { + return fileDescriptor_containeranalysis_a487a8754ddd6778, []int{8, 1} +} +func (m *AttestationAuthority_Attestation) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_AttestationAuthority_Attestation.Unmarshal(m, b) +} +func (m *AttestationAuthority_Attestation) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_AttestationAuthority_Attestation.Marshal(b, m, deterministic) +} +func (dst *AttestationAuthority_Attestation) XXX_Merge(src proto.Message) { + xxx_messageInfo_AttestationAuthority_Attestation.Merge(dst, src) +} +func (m *AttestationAuthority_Attestation) XXX_Size() int { + return xxx_messageInfo_AttestationAuthority_Attestation.Size(m) +} +func (m *AttestationAuthority_Attestation) XXX_DiscardUnknown() { + xxx_messageInfo_AttestationAuthority_Attestation.DiscardUnknown(m) +} + +var xxx_messageInfo_AttestationAuthority_Attestation proto.InternalMessageInfo + +type isAttestationAuthority_Attestation_Signature interface { + isAttestationAuthority_Attestation_Signature() +} + +type AttestationAuthority_Attestation_PgpSignedAttestation struct { + PgpSignedAttestation *PgpSignedAttestation `protobuf:"bytes,1,opt,name=pgp_signed_attestation,json=pgpSignedAttestation,proto3,oneof"` +} + +func (*AttestationAuthority_Attestation_PgpSignedAttestation) isAttestationAuthority_Attestation_Signature() { +} + +func (m *AttestationAuthority_Attestation) GetSignature() isAttestationAuthority_Attestation_Signature { + if m != nil { + return m.Signature + } + return nil +} + +func (m *AttestationAuthority_Attestation) GetPgpSignedAttestation() *PgpSignedAttestation { + if x, ok := m.GetSignature().(*AttestationAuthority_Attestation_PgpSignedAttestation); ok { + return x.PgpSignedAttestation + } + return nil +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*AttestationAuthority_Attestation) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _AttestationAuthority_Attestation_OneofMarshaler, _AttestationAuthority_Attestation_OneofUnmarshaler, _AttestationAuthority_Attestation_OneofSizer, []interface{}{ + (*AttestationAuthority_Attestation_PgpSignedAttestation)(nil), + } +} + +func _AttestationAuthority_Attestation_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*AttestationAuthority_Attestation) + // signature + switch x := m.Signature.(type) { + case *AttestationAuthority_Attestation_PgpSignedAttestation: + b.EncodeVarint(1<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.PgpSignedAttestation); err != nil { + return err + } + case nil: + default: + return fmt.Errorf("AttestationAuthority_Attestation.Signature has unexpected type %T", x) + } + return nil +} + +func _AttestationAuthority_Attestation_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*AttestationAuthority_Attestation) + switch tag { + case 1: // signature.pgp_signed_attestation + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(PgpSignedAttestation) + err := b.DecodeMessage(msg) + m.Signature = &AttestationAuthority_Attestation_PgpSignedAttestation{msg} + return true, err + default: + return false, nil + } +} + +func _AttestationAuthority_Attestation_OneofSizer(msg proto.Message) (n int) { + m := msg.(*AttestationAuthority_Attestation) + // signature + switch x := m.Signature.(type) { + case *AttestationAuthority_Attestation_PgpSignedAttestation: + s := proto.Size(x.PgpSignedAttestation) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +// Message encapsulating build provenance details. +type BuildDetails struct { + // The actual provenance + Provenance *BuildProvenance `protobuf:"bytes,1,opt,name=provenance,proto3" json:"provenance,omitempty"` + // Serialized JSON representation of the provenance, used in generating the + // `BuildSignature` in the corresponding Result. After verifying the + // signature, `provenance_bytes` can be unmarshalled and compared to the + // provenance to confirm that it is unchanged. A base64-encoded string + // representation of the provenance bytes is used for the signature in order + // to interoperate with openssl which expects this format for signature + // verification. + // + // The serialized form is captured both to avoid ambiguity in how the + // provenance is marshalled to json as well to prevent incompatibilities with + // future changes. + ProvenanceBytes string `protobuf:"bytes,2,opt,name=provenance_bytes,json=provenanceBytes,proto3" json:"provenance_bytes,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *BuildDetails) Reset() { *m = BuildDetails{} } +func (m *BuildDetails) String() string { return proto.CompactTextString(m) } +func (*BuildDetails) ProtoMessage() {} +func (*BuildDetails) Descriptor() ([]byte, []int) { + return fileDescriptor_containeranalysis_a487a8754ddd6778, []int{9} +} +func (m *BuildDetails) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_BuildDetails.Unmarshal(m, b) +} +func (m *BuildDetails) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_BuildDetails.Marshal(b, m, deterministic) +} +func (dst *BuildDetails) XXX_Merge(src proto.Message) { + xxx_messageInfo_BuildDetails.Merge(dst, src) +} +func (m *BuildDetails) XXX_Size() int { + return xxx_messageInfo_BuildDetails.Size(m) +} +func (m *BuildDetails) XXX_DiscardUnknown() { + xxx_messageInfo_BuildDetails.DiscardUnknown(m) +} + +var xxx_messageInfo_BuildDetails proto.InternalMessageInfo + +func (m *BuildDetails) GetProvenance() *BuildProvenance { + if m != nil { + return m.Provenance + } + return nil +} + +func (m *BuildDetails) GetProvenanceBytes() string { + if m != nil { + return m.ProvenanceBytes + } + return "" +} + +// Indicates various scans and whether they are turned on or off. +type ScanConfig struct { + // Output only. The name of the ScanConfig in the form + // “projects/{project_id}/ScanConfigs/{ScanConfig_id}". + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // Output only. A human-readable description of what the `ScanConfig` does. + Description string `protobuf:"bytes,2,opt,name=description,proto3" json:"description,omitempty"` + // Indicates whether the Scan is enabled. + Enabled bool `protobuf:"varint,3,opt,name=enabled,proto3" json:"enabled,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ScanConfig) Reset() { *m = ScanConfig{} } +func (m *ScanConfig) String() string { return proto.CompactTextString(m) } +func (*ScanConfig) ProtoMessage() {} +func (*ScanConfig) Descriptor() ([]byte, []int) { + return fileDescriptor_containeranalysis_a487a8754ddd6778, []int{10} +} +func (m *ScanConfig) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ScanConfig.Unmarshal(m, b) +} +func (m *ScanConfig) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ScanConfig.Marshal(b, m, deterministic) +} +func (dst *ScanConfig) XXX_Merge(src proto.Message) { + xxx_messageInfo_ScanConfig.Merge(dst, src) +} +func (m *ScanConfig) XXX_Size() int { + return xxx_messageInfo_ScanConfig.Size(m) +} +func (m *ScanConfig) XXX_DiscardUnknown() { + xxx_messageInfo_ScanConfig.DiscardUnknown(m) +} + +var xxx_messageInfo_ScanConfig proto.InternalMessageInfo + +func (m *ScanConfig) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *ScanConfig) GetDescription() string { + if m != nil { + return m.Description + } + return "" +} + +func (m *ScanConfig) GetEnabled() bool { + if m != nil { + return m.Enabled + } + return false +} + +// Request to get a Occurrence. +type GetOccurrenceRequest struct { + // The name of the occurrence of the form + // "projects/{project_id}/occurrences/{OCCURRENCE_ID}" + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GetOccurrenceRequest) Reset() { *m = GetOccurrenceRequest{} } +func (m *GetOccurrenceRequest) String() string { return proto.CompactTextString(m) } +func (*GetOccurrenceRequest) ProtoMessage() {} +func (*GetOccurrenceRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_containeranalysis_a487a8754ddd6778, []int{11} +} +func (m *GetOccurrenceRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GetOccurrenceRequest.Unmarshal(m, b) +} +func (m *GetOccurrenceRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GetOccurrenceRequest.Marshal(b, m, deterministic) +} +func (dst *GetOccurrenceRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_GetOccurrenceRequest.Merge(dst, src) +} +func (m *GetOccurrenceRequest) XXX_Size() int { + return xxx_messageInfo_GetOccurrenceRequest.Size(m) +} +func (m *GetOccurrenceRequest) XXX_DiscardUnknown() { + xxx_messageInfo_GetOccurrenceRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_GetOccurrenceRequest proto.InternalMessageInfo + +func (m *GetOccurrenceRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +// Request to list occurrences. +type ListOccurrencesRequest struct { + // The name field contains the project Id. For example: + // "projects/{project_id} + // @Deprecated + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // This contains the project Id for example: projects/{project_id}. + Parent string `protobuf:"bytes,5,opt,name=parent,proto3" json:"parent,omitempty"` + // The filter expression. + Filter string `protobuf:"bytes,2,opt,name=filter,proto3" json:"filter,omitempty"` + // Number of occurrences to return in the list. + PageSize int32 `protobuf:"varint,3,opt,name=page_size,json=pageSize,proto3" json:"page_size,omitempty"` + // Token to provide to skip to a particular spot in the list. + PageToken string `protobuf:"bytes,4,opt,name=page_token,json=pageToken,proto3" json:"page_token,omitempty"` + // The kind of occurrences to filter on. + Kind Note_Kind `protobuf:"varint,6,opt,name=kind,proto3,enum=google.devtools.containeranalysis.v1alpha1.Note_Kind" json:"kind,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ListOccurrencesRequest) Reset() { *m = ListOccurrencesRequest{} } +func (m *ListOccurrencesRequest) String() string { return proto.CompactTextString(m) } +func (*ListOccurrencesRequest) ProtoMessage() {} +func (*ListOccurrencesRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_containeranalysis_a487a8754ddd6778, []int{12} +} +func (m *ListOccurrencesRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ListOccurrencesRequest.Unmarshal(m, b) +} +func (m *ListOccurrencesRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ListOccurrencesRequest.Marshal(b, m, deterministic) +} +func (dst *ListOccurrencesRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_ListOccurrencesRequest.Merge(dst, src) +} +func (m *ListOccurrencesRequest) XXX_Size() int { + return xxx_messageInfo_ListOccurrencesRequest.Size(m) +} +func (m *ListOccurrencesRequest) XXX_DiscardUnknown() { + xxx_messageInfo_ListOccurrencesRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_ListOccurrencesRequest proto.InternalMessageInfo + +func (m *ListOccurrencesRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *ListOccurrencesRequest) GetParent() string { + if m != nil { + return m.Parent + } + return "" +} + +func (m *ListOccurrencesRequest) GetFilter() string { + if m != nil { + return m.Filter + } + return "" +} + +func (m *ListOccurrencesRequest) GetPageSize() int32 { + if m != nil { + return m.PageSize + } + return 0 +} + +func (m *ListOccurrencesRequest) GetPageToken() string { + if m != nil { + return m.PageToken + } + return "" +} + +func (m *ListOccurrencesRequest) GetKind() Note_Kind { + if m != nil { + return m.Kind + } + return Note_KIND_UNSPECIFIED +} + +// Response including listed active occurrences. +type ListOccurrencesResponse struct { + // The occurrences requested. + Occurrences []*Occurrence `protobuf:"bytes,1,rep,name=occurrences,proto3" json:"occurrences,omitempty"` + // The next pagination token in the list response. It should be used as + // `page_token` for the following request. An empty value means no more + // results. + NextPageToken string `protobuf:"bytes,2,opt,name=next_page_token,json=nextPageToken,proto3" json:"next_page_token,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ListOccurrencesResponse) Reset() { *m = ListOccurrencesResponse{} } +func (m *ListOccurrencesResponse) String() string { return proto.CompactTextString(m) } +func (*ListOccurrencesResponse) ProtoMessage() {} +func (*ListOccurrencesResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_containeranalysis_a487a8754ddd6778, []int{13} +} +func (m *ListOccurrencesResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ListOccurrencesResponse.Unmarshal(m, b) +} +func (m *ListOccurrencesResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ListOccurrencesResponse.Marshal(b, m, deterministic) +} +func (dst *ListOccurrencesResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_ListOccurrencesResponse.Merge(dst, src) +} +func (m *ListOccurrencesResponse) XXX_Size() int { + return xxx_messageInfo_ListOccurrencesResponse.Size(m) +} +func (m *ListOccurrencesResponse) XXX_DiscardUnknown() { + xxx_messageInfo_ListOccurrencesResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_ListOccurrencesResponse proto.InternalMessageInfo + +func (m *ListOccurrencesResponse) GetOccurrences() []*Occurrence { + if m != nil { + return m.Occurrences + } + return nil +} + +func (m *ListOccurrencesResponse) GetNextPageToken() string { + if m != nil { + return m.NextPageToken + } + return "" +} + +// Request to delete a occurrence +type DeleteOccurrenceRequest struct { + // The name of the occurrence in the form of + // "projects/{project_id}/occurrences/{OCCURRENCE_ID}" + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *DeleteOccurrenceRequest) Reset() { *m = DeleteOccurrenceRequest{} } +func (m *DeleteOccurrenceRequest) String() string { return proto.CompactTextString(m) } +func (*DeleteOccurrenceRequest) ProtoMessage() {} +func (*DeleteOccurrenceRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_containeranalysis_a487a8754ddd6778, []int{14} +} +func (m *DeleteOccurrenceRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_DeleteOccurrenceRequest.Unmarshal(m, b) +} +func (m *DeleteOccurrenceRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_DeleteOccurrenceRequest.Marshal(b, m, deterministic) +} +func (dst *DeleteOccurrenceRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_DeleteOccurrenceRequest.Merge(dst, src) +} +func (m *DeleteOccurrenceRequest) XXX_Size() int { + return xxx_messageInfo_DeleteOccurrenceRequest.Size(m) +} +func (m *DeleteOccurrenceRequest) XXX_DiscardUnknown() { + xxx_messageInfo_DeleteOccurrenceRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_DeleteOccurrenceRequest proto.InternalMessageInfo + +func (m *DeleteOccurrenceRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +// Request to insert a new occurrence. +type CreateOccurrenceRequest struct { + // The name of the project. Should be of the form "projects/{project_id}". + // @Deprecated + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // This field contains the project Id for example: "projects/{project_id}" + Parent string `protobuf:"bytes,3,opt,name=parent,proto3" json:"parent,omitempty"` + // The occurrence to be inserted + Occurrence *Occurrence `protobuf:"bytes,2,opt,name=occurrence,proto3" json:"occurrence,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *CreateOccurrenceRequest) Reset() { *m = CreateOccurrenceRequest{} } +func (m *CreateOccurrenceRequest) String() string { return proto.CompactTextString(m) } +func (*CreateOccurrenceRequest) ProtoMessage() {} +func (*CreateOccurrenceRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_containeranalysis_a487a8754ddd6778, []int{15} +} +func (m *CreateOccurrenceRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_CreateOccurrenceRequest.Unmarshal(m, b) +} +func (m *CreateOccurrenceRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_CreateOccurrenceRequest.Marshal(b, m, deterministic) +} +func (dst *CreateOccurrenceRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_CreateOccurrenceRequest.Merge(dst, src) +} +func (m *CreateOccurrenceRequest) XXX_Size() int { + return xxx_messageInfo_CreateOccurrenceRequest.Size(m) +} +func (m *CreateOccurrenceRequest) XXX_DiscardUnknown() { + xxx_messageInfo_CreateOccurrenceRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_CreateOccurrenceRequest proto.InternalMessageInfo + +func (m *CreateOccurrenceRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *CreateOccurrenceRequest) GetParent() string { + if m != nil { + return m.Parent + } + return "" +} + +func (m *CreateOccurrenceRequest) GetOccurrence() *Occurrence { + if m != nil { + return m.Occurrence + } + return nil +} + +// Request to update an existing occurrence +type UpdateOccurrenceRequest struct { + // The name of the occurrence. + // Should be of the form "projects/{project_id}/occurrences/{OCCURRENCE_ID}". + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // The updated occurrence. + Occurrence *Occurrence `protobuf:"bytes,2,opt,name=occurrence,proto3" json:"occurrence,omitempty"` + // The fields to update. + UpdateMask *field_mask.FieldMask `protobuf:"bytes,3,opt,name=update_mask,json=updateMask,proto3" json:"update_mask,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *UpdateOccurrenceRequest) Reset() { *m = UpdateOccurrenceRequest{} } +func (m *UpdateOccurrenceRequest) String() string { return proto.CompactTextString(m) } +func (*UpdateOccurrenceRequest) ProtoMessage() {} +func (*UpdateOccurrenceRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_containeranalysis_a487a8754ddd6778, []int{16} +} +func (m *UpdateOccurrenceRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_UpdateOccurrenceRequest.Unmarshal(m, b) +} +func (m *UpdateOccurrenceRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_UpdateOccurrenceRequest.Marshal(b, m, deterministic) +} +func (dst *UpdateOccurrenceRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_UpdateOccurrenceRequest.Merge(dst, src) +} +func (m *UpdateOccurrenceRequest) XXX_Size() int { + return xxx_messageInfo_UpdateOccurrenceRequest.Size(m) +} +func (m *UpdateOccurrenceRequest) XXX_DiscardUnknown() { + xxx_messageInfo_UpdateOccurrenceRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_UpdateOccurrenceRequest proto.InternalMessageInfo + +func (m *UpdateOccurrenceRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *UpdateOccurrenceRequest) GetOccurrence() *Occurrence { + if m != nil { + return m.Occurrence + } + return nil +} + +func (m *UpdateOccurrenceRequest) GetUpdateMask() *field_mask.FieldMask { + if m != nil { + return m.UpdateMask + } + return nil +} + +// Request to get a Note. +type GetNoteRequest struct { + // The name of the note in the form of + // "providers/{provider_id}/notes/{NOTE_ID}" + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GetNoteRequest) Reset() { *m = GetNoteRequest{} } +func (m *GetNoteRequest) String() string { return proto.CompactTextString(m) } +func (*GetNoteRequest) ProtoMessage() {} +func (*GetNoteRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_containeranalysis_a487a8754ddd6778, []int{17} +} +func (m *GetNoteRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GetNoteRequest.Unmarshal(m, b) +} +func (m *GetNoteRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GetNoteRequest.Marshal(b, m, deterministic) +} +func (dst *GetNoteRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_GetNoteRequest.Merge(dst, src) +} +func (m *GetNoteRequest) XXX_Size() int { + return xxx_messageInfo_GetNoteRequest.Size(m) +} +func (m *GetNoteRequest) XXX_DiscardUnknown() { + xxx_messageInfo_GetNoteRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_GetNoteRequest proto.InternalMessageInfo + +func (m *GetNoteRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +// Request to get the note to which this occurrence is attached. +type GetOccurrenceNoteRequest struct { + // The name of the occurrence in the form + // "projects/{project_id}/occurrences/{OCCURRENCE_ID}" + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GetOccurrenceNoteRequest) Reset() { *m = GetOccurrenceNoteRequest{} } +func (m *GetOccurrenceNoteRequest) String() string { return proto.CompactTextString(m) } +func (*GetOccurrenceNoteRequest) ProtoMessage() {} +func (*GetOccurrenceNoteRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_containeranalysis_a487a8754ddd6778, []int{18} +} +func (m *GetOccurrenceNoteRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GetOccurrenceNoteRequest.Unmarshal(m, b) +} +func (m *GetOccurrenceNoteRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GetOccurrenceNoteRequest.Marshal(b, m, deterministic) +} +func (dst *GetOccurrenceNoteRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_GetOccurrenceNoteRequest.Merge(dst, src) +} +func (m *GetOccurrenceNoteRequest) XXX_Size() int { + return xxx_messageInfo_GetOccurrenceNoteRequest.Size(m) +} +func (m *GetOccurrenceNoteRequest) XXX_DiscardUnknown() { + xxx_messageInfo_GetOccurrenceNoteRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_GetOccurrenceNoteRequest proto.InternalMessageInfo + +func (m *GetOccurrenceNoteRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +// Request to list notes. +type ListNotesRequest struct { + // The name field will contain the project Id for example: + // "providers/{provider_id} + // @Deprecated + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // This field contains the project Id for example: "projects/{PROJECT_ID}". + Parent string `protobuf:"bytes,5,opt,name=parent,proto3" json:"parent,omitempty"` + // The filter expression. + Filter string `protobuf:"bytes,2,opt,name=filter,proto3" json:"filter,omitempty"` + // Number of notes to return in the list. + PageSize int32 `protobuf:"varint,3,opt,name=page_size,json=pageSize,proto3" json:"page_size,omitempty"` + // Token to provide to skip to a particular spot in the list. + PageToken string `protobuf:"bytes,4,opt,name=page_token,json=pageToken,proto3" json:"page_token,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ListNotesRequest) Reset() { *m = ListNotesRequest{} } +func (m *ListNotesRequest) String() string { return proto.CompactTextString(m) } +func (*ListNotesRequest) ProtoMessage() {} +func (*ListNotesRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_containeranalysis_a487a8754ddd6778, []int{19} +} +func (m *ListNotesRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ListNotesRequest.Unmarshal(m, b) +} +func (m *ListNotesRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ListNotesRequest.Marshal(b, m, deterministic) +} +func (dst *ListNotesRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_ListNotesRequest.Merge(dst, src) +} +func (m *ListNotesRequest) XXX_Size() int { + return xxx_messageInfo_ListNotesRequest.Size(m) +} +func (m *ListNotesRequest) XXX_DiscardUnknown() { + xxx_messageInfo_ListNotesRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_ListNotesRequest proto.InternalMessageInfo + +func (m *ListNotesRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *ListNotesRequest) GetParent() string { + if m != nil { + return m.Parent + } + return "" +} + +func (m *ListNotesRequest) GetFilter() string { + if m != nil { + return m.Filter + } + return "" +} + +func (m *ListNotesRequest) GetPageSize() int32 { + if m != nil { + return m.PageSize + } + return 0 +} + +func (m *ListNotesRequest) GetPageToken() string { + if m != nil { + return m.PageToken + } + return "" +} + +// Response including listed notes. +type ListNotesResponse struct { + // The occurrences requested + Notes []*Note `protobuf:"bytes,1,rep,name=notes,proto3" json:"notes,omitempty"` + // The next pagination token in the list response. It should be used as + // page_token for the following request. An empty value means no more result. + NextPageToken string `protobuf:"bytes,2,opt,name=next_page_token,json=nextPageToken,proto3" json:"next_page_token,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ListNotesResponse) Reset() { *m = ListNotesResponse{} } +func (m *ListNotesResponse) String() string { return proto.CompactTextString(m) } +func (*ListNotesResponse) ProtoMessage() {} +func (*ListNotesResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_containeranalysis_a487a8754ddd6778, []int{20} +} +func (m *ListNotesResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ListNotesResponse.Unmarshal(m, b) +} +func (m *ListNotesResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ListNotesResponse.Marshal(b, m, deterministic) +} +func (dst *ListNotesResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_ListNotesResponse.Merge(dst, src) +} +func (m *ListNotesResponse) XXX_Size() int { + return xxx_messageInfo_ListNotesResponse.Size(m) +} +func (m *ListNotesResponse) XXX_DiscardUnknown() { + xxx_messageInfo_ListNotesResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_ListNotesResponse proto.InternalMessageInfo + +func (m *ListNotesResponse) GetNotes() []*Note { + if m != nil { + return m.Notes + } + return nil +} + +func (m *ListNotesResponse) GetNextPageToken() string { + if m != nil { + return m.NextPageToken + } + return "" +} + +// Request to delete a note +type DeleteNoteRequest struct { + // The name of the note in the form of + // "providers/{provider_id}/notes/{NOTE_ID}" + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *DeleteNoteRequest) Reset() { *m = DeleteNoteRequest{} } +func (m *DeleteNoteRequest) String() string { return proto.CompactTextString(m) } +func (*DeleteNoteRequest) ProtoMessage() {} +func (*DeleteNoteRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_containeranalysis_a487a8754ddd6778, []int{21} +} +func (m *DeleteNoteRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_DeleteNoteRequest.Unmarshal(m, b) +} +func (m *DeleteNoteRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_DeleteNoteRequest.Marshal(b, m, deterministic) +} +func (dst *DeleteNoteRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_DeleteNoteRequest.Merge(dst, src) +} +func (m *DeleteNoteRequest) XXX_Size() int { + return xxx_messageInfo_DeleteNoteRequest.Size(m) +} +func (m *DeleteNoteRequest) XXX_DiscardUnknown() { + xxx_messageInfo_DeleteNoteRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_DeleteNoteRequest proto.InternalMessageInfo + +func (m *DeleteNoteRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +// Request to insert a new note +type CreateNoteRequest struct { + // The name of the project. + // Should be of the form "providers/{provider_id}". + // @Deprecated + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // This field contains the project Id for example: + // "projects/{project_id} + Parent string `protobuf:"bytes,4,opt,name=parent,proto3" json:"parent,omitempty"` + // The ID to use for this note. + NoteId string `protobuf:"bytes,2,opt,name=note_id,json=noteId,proto3" json:"note_id,omitempty"` + // The Note to be inserted + Note *Note `protobuf:"bytes,3,opt,name=note,proto3" json:"note,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *CreateNoteRequest) Reset() { *m = CreateNoteRequest{} } +func (m *CreateNoteRequest) String() string { return proto.CompactTextString(m) } +func (*CreateNoteRequest) ProtoMessage() {} +func (*CreateNoteRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_containeranalysis_a487a8754ddd6778, []int{22} +} +func (m *CreateNoteRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_CreateNoteRequest.Unmarshal(m, b) +} +func (m *CreateNoteRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_CreateNoteRequest.Marshal(b, m, deterministic) +} +func (dst *CreateNoteRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_CreateNoteRequest.Merge(dst, src) +} +func (m *CreateNoteRequest) XXX_Size() int { + return xxx_messageInfo_CreateNoteRequest.Size(m) +} +func (m *CreateNoteRequest) XXX_DiscardUnknown() { + xxx_messageInfo_CreateNoteRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_CreateNoteRequest proto.InternalMessageInfo + +func (m *CreateNoteRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *CreateNoteRequest) GetParent() string { + if m != nil { + return m.Parent + } + return "" +} + +func (m *CreateNoteRequest) GetNoteId() string { + if m != nil { + return m.NoteId + } + return "" +} + +func (m *CreateNoteRequest) GetNote() *Note { + if m != nil { + return m.Note + } + return nil +} + +// Request to update an existing note +type UpdateNoteRequest struct { + // The name of the note. + // Should be of the form "projects/{provider_id}/notes/{note_id}". + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // The updated note. + Note *Note `protobuf:"bytes,2,opt,name=note,proto3" json:"note,omitempty"` + // The fields to update. + UpdateMask *field_mask.FieldMask `protobuf:"bytes,3,opt,name=update_mask,json=updateMask,proto3" json:"update_mask,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *UpdateNoteRequest) Reset() { *m = UpdateNoteRequest{} } +func (m *UpdateNoteRequest) String() string { return proto.CompactTextString(m) } +func (*UpdateNoteRequest) ProtoMessage() {} +func (*UpdateNoteRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_containeranalysis_a487a8754ddd6778, []int{23} +} +func (m *UpdateNoteRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_UpdateNoteRequest.Unmarshal(m, b) +} +func (m *UpdateNoteRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_UpdateNoteRequest.Marshal(b, m, deterministic) +} +func (dst *UpdateNoteRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_UpdateNoteRequest.Merge(dst, src) +} +func (m *UpdateNoteRequest) XXX_Size() int { + return xxx_messageInfo_UpdateNoteRequest.Size(m) +} +func (m *UpdateNoteRequest) XXX_DiscardUnknown() { + xxx_messageInfo_UpdateNoteRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_UpdateNoteRequest proto.InternalMessageInfo + +func (m *UpdateNoteRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *UpdateNoteRequest) GetNote() *Note { + if m != nil { + return m.Note + } + return nil +} + +func (m *UpdateNoteRequest) GetUpdateMask() *field_mask.FieldMask { + if m != nil { + return m.UpdateMask + } + return nil +} + +// Request to list occurrences. +type ListNoteOccurrencesRequest struct { + // The name field will contain the note name for example: + // "provider/{provider_id}/notes/{note_id}" + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // The filter expression. + Filter string `protobuf:"bytes,2,opt,name=filter,proto3" json:"filter,omitempty"` + // Number of notes to return in the list. + PageSize int32 `protobuf:"varint,3,opt,name=page_size,json=pageSize,proto3" json:"page_size,omitempty"` + // Token to provide to skip to a particular spot in the list. + PageToken string `protobuf:"bytes,4,opt,name=page_token,json=pageToken,proto3" json:"page_token,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ListNoteOccurrencesRequest) Reset() { *m = ListNoteOccurrencesRequest{} } +func (m *ListNoteOccurrencesRequest) String() string { return proto.CompactTextString(m) } +func (*ListNoteOccurrencesRequest) ProtoMessage() {} +func (*ListNoteOccurrencesRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_containeranalysis_a487a8754ddd6778, []int{24} +} +func (m *ListNoteOccurrencesRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ListNoteOccurrencesRequest.Unmarshal(m, b) +} +func (m *ListNoteOccurrencesRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ListNoteOccurrencesRequest.Marshal(b, m, deterministic) +} +func (dst *ListNoteOccurrencesRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_ListNoteOccurrencesRequest.Merge(dst, src) +} +func (m *ListNoteOccurrencesRequest) XXX_Size() int { + return xxx_messageInfo_ListNoteOccurrencesRequest.Size(m) +} +func (m *ListNoteOccurrencesRequest) XXX_DiscardUnknown() { + xxx_messageInfo_ListNoteOccurrencesRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_ListNoteOccurrencesRequest proto.InternalMessageInfo + +func (m *ListNoteOccurrencesRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *ListNoteOccurrencesRequest) GetFilter() string { + if m != nil { + return m.Filter + } + return "" +} + +func (m *ListNoteOccurrencesRequest) GetPageSize() int32 { + if m != nil { + return m.PageSize + } + return 0 +} + +func (m *ListNoteOccurrencesRequest) GetPageToken() string { + if m != nil { + return m.PageToken + } + return "" +} + +// Response including listed occurrences for a note. +type ListNoteOccurrencesResponse struct { + // The occurrences attached to the specified note. + Occurrences []*Occurrence `protobuf:"bytes,1,rep,name=occurrences,proto3" json:"occurrences,omitempty"` + // Token to receive the next page of notes. + NextPageToken string `protobuf:"bytes,2,opt,name=next_page_token,json=nextPageToken,proto3" json:"next_page_token,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ListNoteOccurrencesResponse) Reset() { *m = ListNoteOccurrencesResponse{} } +func (m *ListNoteOccurrencesResponse) String() string { return proto.CompactTextString(m) } +func (*ListNoteOccurrencesResponse) ProtoMessage() {} +func (*ListNoteOccurrencesResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_containeranalysis_a487a8754ddd6778, []int{25} +} +func (m *ListNoteOccurrencesResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ListNoteOccurrencesResponse.Unmarshal(m, b) +} +func (m *ListNoteOccurrencesResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ListNoteOccurrencesResponse.Marshal(b, m, deterministic) +} +func (dst *ListNoteOccurrencesResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_ListNoteOccurrencesResponse.Merge(dst, src) +} +func (m *ListNoteOccurrencesResponse) XXX_Size() int { + return xxx_messageInfo_ListNoteOccurrencesResponse.Size(m) +} +func (m *ListNoteOccurrencesResponse) XXX_DiscardUnknown() { + xxx_messageInfo_ListNoteOccurrencesResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_ListNoteOccurrencesResponse proto.InternalMessageInfo + +func (m *ListNoteOccurrencesResponse) GetOccurrences() []*Occurrence { + if m != nil { + return m.Occurrences + } + return nil +} + +func (m *ListNoteOccurrencesResponse) GetNextPageToken() string { + if m != nil { + return m.NextPageToken + } + return "" +} + +// Request for creating an operation +type CreateOperationRequest struct { + // The project Id that this operation should be created under. + Parent string `protobuf:"bytes,1,opt,name=parent,proto3" json:"parent,omitempty"` + // The ID to use for this operation. + OperationId string `protobuf:"bytes,2,opt,name=operation_id,json=operationId,proto3" json:"operation_id,omitempty"` + // The operation to create. + Operation *longrunning.Operation `protobuf:"bytes,3,opt,name=operation,proto3" json:"operation,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *CreateOperationRequest) Reset() { *m = CreateOperationRequest{} } +func (m *CreateOperationRequest) String() string { return proto.CompactTextString(m) } +func (*CreateOperationRequest) ProtoMessage() {} +func (*CreateOperationRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_containeranalysis_a487a8754ddd6778, []int{26} +} +func (m *CreateOperationRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_CreateOperationRequest.Unmarshal(m, b) +} +func (m *CreateOperationRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_CreateOperationRequest.Marshal(b, m, deterministic) +} +func (dst *CreateOperationRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_CreateOperationRequest.Merge(dst, src) +} +func (m *CreateOperationRequest) XXX_Size() int { + return xxx_messageInfo_CreateOperationRequest.Size(m) +} +func (m *CreateOperationRequest) XXX_DiscardUnknown() { + xxx_messageInfo_CreateOperationRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_CreateOperationRequest proto.InternalMessageInfo + +func (m *CreateOperationRequest) GetParent() string { + if m != nil { + return m.Parent + } + return "" +} + +func (m *CreateOperationRequest) GetOperationId() string { + if m != nil { + return m.OperationId + } + return "" +} + +func (m *CreateOperationRequest) GetOperation() *longrunning.Operation { + if m != nil { + return m.Operation + } + return nil +} + +// Request for updating an existing operation +type UpdateOperationRequest struct { + // The name of the Operation. + // Should be of the form "projects/{provider_id}/operations/{operation_id}". + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // The operation to create. + Operation *longrunning.Operation `protobuf:"bytes,3,opt,name=operation,proto3" json:"operation,omitempty"` + UpdateMask *field_mask.FieldMask `protobuf:"bytes,4,opt,name=update_mask,json=updateMask,proto3" json:"update_mask,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *UpdateOperationRequest) Reset() { *m = UpdateOperationRequest{} } +func (m *UpdateOperationRequest) String() string { return proto.CompactTextString(m) } +func (*UpdateOperationRequest) ProtoMessage() {} +func (*UpdateOperationRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_containeranalysis_a487a8754ddd6778, []int{27} +} +func (m *UpdateOperationRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_UpdateOperationRequest.Unmarshal(m, b) +} +func (m *UpdateOperationRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_UpdateOperationRequest.Marshal(b, m, deterministic) +} +func (dst *UpdateOperationRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_UpdateOperationRequest.Merge(dst, src) +} +func (m *UpdateOperationRequest) XXX_Size() int { + return xxx_messageInfo_UpdateOperationRequest.Size(m) +} +func (m *UpdateOperationRequest) XXX_DiscardUnknown() { + xxx_messageInfo_UpdateOperationRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_UpdateOperationRequest proto.InternalMessageInfo + +func (m *UpdateOperationRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *UpdateOperationRequest) GetOperation() *longrunning.Operation { + if m != nil { + return m.Operation + } + return nil +} + +func (m *UpdateOperationRequest) GetUpdateMask() *field_mask.FieldMask { + if m != nil { + return m.UpdateMask + } + return nil +} + +// Metadata for all operations used and required for all operations +// that created by Container Analysis Providers +type OperationMetadata struct { + // Output only. The time this operation was created. + CreateTime *timestamp.Timestamp `protobuf:"bytes,1,opt,name=create_time,json=createTime,proto3" json:"create_time,omitempty"` + // Output only. The time that this operation was marked completed or failed. + EndTime *timestamp.Timestamp `protobuf:"bytes,2,opt,name=end_time,json=endTime,proto3" json:"end_time,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *OperationMetadata) Reset() { *m = OperationMetadata{} } +func (m *OperationMetadata) String() string { return proto.CompactTextString(m) } +func (*OperationMetadata) ProtoMessage() {} +func (*OperationMetadata) Descriptor() ([]byte, []int) { + return fileDescriptor_containeranalysis_a487a8754ddd6778, []int{28} +} +func (m *OperationMetadata) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_OperationMetadata.Unmarshal(m, b) +} +func (m *OperationMetadata) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_OperationMetadata.Marshal(b, m, deterministic) +} +func (dst *OperationMetadata) XXX_Merge(src proto.Message) { + xxx_messageInfo_OperationMetadata.Merge(dst, src) +} +func (m *OperationMetadata) XXX_Size() int { + return xxx_messageInfo_OperationMetadata.Size(m) +} +func (m *OperationMetadata) XXX_DiscardUnknown() { + xxx_messageInfo_OperationMetadata.DiscardUnknown(m) +} + +var xxx_messageInfo_OperationMetadata proto.InternalMessageInfo + +func (m *OperationMetadata) GetCreateTime() *timestamp.Timestamp { + if m != nil { + return m.CreateTime + } + return nil +} + +func (m *OperationMetadata) GetEndTime() *timestamp.Timestamp { + if m != nil { + return m.EndTime + } + return nil +} + +// Request to get the vulnz summary for some set of vulnerability Occurrences. +type GetVulnzOccurrencesSummaryRequest struct { + // This contains the project Id for example: projects/{project_id} + Parent string `protobuf:"bytes,1,opt,name=parent,proto3" json:"parent,omitempty"` + // The filter expression. + Filter string `protobuf:"bytes,2,opt,name=filter,proto3" json:"filter,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GetVulnzOccurrencesSummaryRequest) Reset() { *m = GetVulnzOccurrencesSummaryRequest{} } +func (m *GetVulnzOccurrencesSummaryRequest) String() string { return proto.CompactTextString(m) } +func (*GetVulnzOccurrencesSummaryRequest) ProtoMessage() {} +func (*GetVulnzOccurrencesSummaryRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_containeranalysis_a487a8754ddd6778, []int{29} +} +func (m *GetVulnzOccurrencesSummaryRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GetVulnzOccurrencesSummaryRequest.Unmarshal(m, b) +} +func (m *GetVulnzOccurrencesSummaryRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GetVulnzOccurrencesSummaryRequest.Marshal(b, m, deterministic) +} +func (dst *GetVulnzOccurrencesSummaryRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_GetVulnzOccurrencesSummaryRequest.Merge(dst, src) +} +func (m *GetVulnzOccurrencesSummaryRequest) XXX_Size() int { + return xxx_messageInfo_GetVulnzOccurrencesSummaryRequest.Size(m) +} +func (m *GetVulnzOccurrencesSummaryRequest) XXX_DiscardUnknown() { + xxx_messageInfo_GetVulnzOccurrencesSummaryRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_GetVulnzOccurrencesSummaryRequest proto.InternalMessageInfo + +func (m *GetVulnzOccurrencesSummaryRequest) GetParent() string { + if m != nil { + return m.Parent + } + return "" +} + +func (m *GetVulnzOccurrencesSummaryRequest) GetFilter() string { + if m != nil { + return m.Filter + } + return "" +} + +// A summary of how many vulnz occurrences there are per severity type. +// counts by groups, or if we should have different summary messages +// like this. +type GetVulnzOccurrencesSummaryResponse struct { + // A map of how many occurrences were found for each severity. + Counts []*GetVulnzOccurrencesSummaryResponse_SeverityCount `protobuf:"bytes,1,rep,name=counts,proto3" json:"counts,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GetVulnzOccurrencesSummaryResponse) Reset() { *m = GetVulnzOccurrencesSummaryResponse{} } +func (m *GetVulnzOccurrencesSummaryResponse) String() string { return proto.CompactTextString(m) } +func (*GetVulnzOccurrencesSummaryResponse) ProtoMessage() {} +func (*GetVulnzOccurrencesSummaryResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_containeranalysis_a487a8754ddd6778, []int{30} +} +func (m *GetVulnzOccurrencesSummaryResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GetVulnzOccurrencesSummaryResponse.Unmarshal(m, b) +} +func (m *GetVulnzOccurrencesSummaryResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GetVulnzOccurrencesSummaryResponse.Marshal(b, m, deterministic) +} +func (dst *GetVulnzOccurrencesSummaryResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_GetVulnzOccurrencesSummaryResponse.Merge(dst, src) +} +func (m *GetVulnzOccurrencesSummaryResponse) XXX_Size() int { + return xxx_messageInfo_GetVulnzOccurrencesSummaryResponse.Size(m) +} +func (m *GetVulnzOccurrencesSummaryResponse) XXX_DiscardUnknown() { + xxx_messageInfo_GetVulnzOccurrencesSummaryResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_GetVulnzOccurrencesSummaryResponse proto.InternalMessageInfo + +func (m *GetVulnzOccurrencesSummaryResponse) GetCounts() []*GetVulnzOccurrencesSummaryResponse_SeverityCount { + if m != nil { + return m.Counts + } + return nil +} + +// The number of occurrences created for a specific severity. +type GetVulnzOccurrencesSummaryResponse_SeverityCount struct { + // The severity of the occurrences. + Severity VulnerabilityType_Severity `protobuf:"varint,1,opt,name=severity,proto3,enum=google.devtools.containeranalysis.v1alpha1.VulnerabilityType_Severity" json:"severity,omitempty"` + // The number of occurrences with the severity. + Count int64 `protobuf:"varint,2,opt,name=count,proto3" json:"count,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GetVulnzOccurrencesSummaryResponse_SeverityCount) Reset() { + *m = GetVulnzOccurrencesSummaryResponse_SeverityCount{} +} +func (m *GetVulnzOccurrencesSummaryResponse_SeverityCount) String() string { + return proto.CompactTextString(m) +} +func (*GetVulnzOccurrencesSummaryResponse_SeverityCount) ProtoMessage() {} +func (*GetVulnzOccurrencesSummaryResponse_SeverityCount) Descriptor() ([]byte, []int) { + return fileDescriptor_containeranalysis_a487a8754ddd6778, []int{30, 0} +} +func (m *GetVulnzOccurrencesSummaryResponse_SeverityCount) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GetVulnzOccurrencesSummaryResponse_SeverityCount.Unmarshal(m, b) +} +func (m *GetVulnzOccurrencesSummaryResponse_SeverityCount) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GetVulnzOccurrencesSummaryResponse_SeverityCount.Marshal(b, m, deterministic) +} +func (dst *GetVulnzOccurrencesSummaryResponse_SeverityCount) XXX_Merge(src proto.Message) { + xxx_messageInfo_GetVulnzOccurrencesSummaryResponse_SeverityCount.Merge(dst, src) +} +func (m *GetVulnzOccurrencesSummaryResponse_SeverityCount) XXX_Size() int { + return xxx_messageInfo_GetVulnzOccurrencesSummaryResponse_SeverityCount.Size(m) +} +func (m *GetVulnzOccurrencesSummaryResponse_SeverityCount) XXX_DiscardUnknown() { + xxx_messageInfo_GetVulnzOccurrencesSummaryResponse_SeverityCount.DiscardUnknown(m) +} + +var xxx_messageInfo_GetVulnzOccurrencesSummaryResponse_SeverityCount proto.InternalMessageInfo + +func (m *GetVulnzOccurrencesSummaryResponse_SeverityCount) GetSeverity() VulnerabilityType_Severity { + if m != nil { + return m.Severity + } + return VulnerabilityType_SEVERITY_UNSPECIFIED +} + +func (m *GetVulnzOccurrencesSummaryResponse_SeverityCount) GetCount() int64 { + if m != nil { + return m.Count + } + return 0 +} + +// Request to get a ScanConfig. +type GetScanConfigRequest struct { + // The name of the ScanConfig in the form + // projects/{project_id}/scan_configs/{ScanConfig_id} + // instead. + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GetScanConfigRequest) Reset() { *m = GetScanConfigRequest{} } +func (m *GetScanConfigRequest) String() string { return proto.CompactTextString(m) } +func (*GetScanConfigRequest) ProtoMessage() {} +func (*GetScanConfigRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_containeranalysis_a487a8754ddd6778, []int{31} +} +func (m *GetScanConfigRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GetScanConfigRequest.Unmarshal(m, b) +} +func (m *GetScanConfigRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GetScanConfigRequest.Marshal(b, m, deterministic) +} +func (dst *GetScanConfigRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_GetScanConfigRequest.Merge(dst, src) +} +func (m *GetScanConfigRequest) XXX_Size() int { + return xxx_messageInfo_GetScanConfigRequest.Size(m) +} +func (m *GetScanConfigRequest) XXX_DiscardUnknown() { + xxx_messageInfo_GetScanConfigRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_GetScanConfigRequest proto.InternalMessageInfo + +func (m *GetScanConfigRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +// Request to list the available scan configurations. +type ListScanConfigsRequest struct { + // This containers the project Id i.e.: projects/{project_id} + // instead. + Parent string `protobuf:"bytes,1,opt,name=parent,proto3" json:"parent,omitempty"` + // The filter expression. + Filter string `protobuf:"bytes,2,opt,name=filter,proto3" json:"filter,omitempty"` + // The number of items to return. + PageSize int32 `protobuf:"varint,3,opt,name=page_size,json=pageSize,proto3" json:"page_size,omitempty"` + // The page token to use for the next request. + PageToken string `protobuf:"bytes,4,opt,name=page_token,json=pageToken,proto3" json:"page_token,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ListScanConfigsRequest) Reset() { *m = ListScanConfigsRequest{} } +func (m *ListScanConfigsRequest) String() string { return proto.CompactTextString(m) } +func (*ListScanConfigsRequest) ProtoMessage() {} +func (*ListScanConfigsRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_containeranalysis_a487a8754ddd6778, []int{32} +} +func (m *ListScanConfigsRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ListScanConfigsRequest.Unmarshal(m, b) +} +func (m *ListScanConfigsRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ListScanConfigsRequest.Marshal(b, m, deterministic) +} +func (dst *ListScanConfigsRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_ListScanConfigsRequest.Merge(dst, src) +} +func (m *ListScanConfigsRequest) XXX_Size() int { + return xxx_messageInfo_ListScanConfigsRequest.Size(m) +} +func (m *ListScanConfigsRequest) XXX_DiscardUnknown() { + xxx_messageInfo_ListScanConfigsRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_ListScanConfigsRequest proto.InternalMessageInfo + +func (m *ListScanConfigsRequest) GetParent() string { + if m != nil { + return m.Parent + } + return "" +} + +func (m *ListScanConfigsRequest) GetFilter() string { + if m != nil { + return m.Filter + } + return "" +} + +func (m *ListScanConfigsRequest) GetPageSize() int32 { + if m != nil { + return m.PageSize + } + return 0 +} + +func (m *ListScanConfigsRequest) GetPageToken() string { + if m != nil { + return m.PageToken + } + return "" +} + +// A list of ScanConfigs for the project. +type ListScanConfigsResponse struct { + // The set of scan configs + ScanConfigs []*ScanConfig `protobuf:"bytes,1,rep,name=scan_configs,json=scanConfigs,proto3" json:"scan_configs,omitempty"` + // A page token to pass in order to get more scans. + NextPageToken string `protobuf:"bytes,2,opt,name=next_page_token,json=nextPageToken,proto3" json:"next_page_token,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ListScanConfigsResponse) Reset() { *m = ListScanConfigsResponse{} } +func (m *ListScanConfigsResponse) String() string { return proto.CompactTextString(m) } +func (*ListScanConfigsResponse) ProtoMessage() {} +func (*ListScanConfigsResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_containeranalysis_a487a8754ddd6778, []int{33} +} +func (m *ListScanConfigsResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ListScanConfigsResponse.Unmarshal(m, b) +} +func (m *ListScanConfigsResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ListScanConfigsResponse.Marshal(b, m, deterministic) +} +func (dst *ListScanConfigsResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_ListScanConfigsResponse.Merge(dst, src) +} +func (m *ListScanConfigsResponse) XXX_Size() int { + return xxx_messageInfo_ListScanConfigsResponse.Size(m) +} +func (m *ListScanConfigsResponse) XXX_DiscardUnknown() { + xxx_messageInfo_ListScanConfigsResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_ListScanConfigsResponse proto.InternalMessageInfo + +func (m *ListScanConfigsResponse) GetScanConfigs() []*ScanConfig { + if m != nil { + return m.ScanConfigs + } + return nil +} + +func (m *ListScanConfigsResponse) GetNextPageToken() string { + if m != nil { + return m.NextPageToken + } + return "" +} + +// A request to update a ScanConfig. +type UpdateScanConfigRequest struct { + // The scan config to update of the form + // projects/{project_id}/scan_configs/{ScanConfig_id} + // instead. + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // The new scan configuration + ScanConfig *ScanConfig `protobuf:"bytes,2,opt,name=scan_config,json=scanConfig,proto3" json:"scan_config,omitempty"` + UpdateMask *field_mask.FieldMask `protobuf:"bytes,3,opt,name=update_mask,json=updateMask,proto3" json:"update_mask,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *UpdateScanConfigRequest) Reset() { *m = UpdateScanConfigRequest{} } +func (m *UpdateScanConfigRequest) String() string { return proto.CompactTextString(m) } +func (*UpdateScanConfigRequest) ProtoMessage() {} +func (*UpdateScanConfigRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_containeranalysis_a487a8754ddd6778, []int{34} +} +func (m *UpdateScanConfigRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_UpdateScanConfigRequest.Unmarshal(m, b) +} +func (m *UpdateScanConfigRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_UpdateScanConfigRequest.Marshal(b, m, deterministic) +} +func (dst *UpdateScanConfigRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_UpdateScanConfigRequest.Merge(dst, src) +} +func (m *UpdateScanConfigRequest) XXX_Size() int { + return xxx_messageInfo_UpdateScanConfigRequest.Size(m) +} +func (m *UpdateScanConfigRequest) XXX_DiscardUnknown() { + xxx_messageInfo_UpdateScanConfigRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_UpdateScanConfigRequest proto.InternalMessageInfo + +func (m *UpdateScanConfigRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *UpdateScanConfigRequest) GetScanConfig() *ScanConfig { + if m != nil { + return m.ScanConfig + } + return nil +} + +func (m *UpdateScanConfigRequest) GetUpdateMask() *field_mask.FieldMask { + if m != nil { + return m.UpdateMask + } + return nil +} + +func init() { + proto.RegisterType((*Occurrence)(nil), "google.devtools.containeranalysis.v1alpha1.Occurrence") + proto.RegisterType((*Resource)(nil), "google.devtools.containeranalysis.v1alpha1.Resource") + proto.RegisterType((*Note)(nil), "google.devtools.containeranalysis.v1alpha1.Note") + proto.RegisterType((*Note_RelatedUrl)(nil), "google.devtools.containeranalysis.v1alpha1.Note.RelatedUrl") + proto.RegisterType((*Deployable)(nil), "google.devtools.containeranalysis.v1alpha1.Deployable") + proto.RegisterType((*Deployable_Deployment)(nil), "google.devtools.containeranalysis.v1alpha1.Deployable.Deployment") + proto.RegisterType((*Discovery)(nil), "google.devtools.containeranalysis.v1alpha1.Discovery") + proto.RegisterType((*Discovery_Discovered)(nil), "google.devtools.containeranalysis.v1alpha1.Discovery.Discovered") + proto.RegisterType((*BuildType)(nil), "google.devtools.containeranalysis.v1alpha1.BuildType") + proto.RegisterType((*BuildSignature)(nil), "google.devtools.containeranalysis.v1alpha1.BuildSignature") + proto.RegisterType((*PgpSignedAttestation)(nil), "google.devtools.containeranalysis.v1alpha1.PgpSignedAttestation") + proto.RegisterType((*AttestationAuthority)(nil), "google.devtools.containeranalysis.v1alpha1.AttestationAuthority") + proto.RegisterType((*AttestationAuthority_AttestationAuthorityHint)(nil), "google.devtools.containeranalysis.v1alpha1.AttestationAuthority.AttestationAuthorityHint") + proto.RegisterType((*AttestationAuthority_Attestation)(nil), "google.devtools.containeranalysis.v1alpha1.AttestationAuthority.Attestation") + proto.RegisterType((*BuildDetails)(nil), "google.devtools.containeranalysis.v1alpha1.BuildDetails") + proto.RegisterType((*ScanConfig)(nil), "google.devtools.containeranalysis.v1alpha1.ScanConfig") + proto.RegisterType((*GetOccurrenceRequest)(nil), "google.devtools.containeranalysis.v1alpha1.GetOccurrenceRequest") + proto.RegisterType((*ListOccurrencesRequest)(nil), "google.devtools.containeranalysis.v1alpha1.ListOccurrencesRequest") + proto.RegisterType((*ListOccurrencesResponse)(nil), "google.devtools.containeranalysis.v1alpha1.ListOccurrencesResponse") + proto.RegisterType((*DeleteOccurrenceRequest)(nil), "google.devtools.containeranalysis.v1alpha1.DeleteOccurrenceRequest") + proto.RegisterType((*CreateOccurrenceRequest)(nil), "google.devtools.containeranalysis.v1alpha1.CreateOccurrenceRequest") + proto.RegisterType((*UpdateOccurrenceRequest)(nil), "google.devtools.containeranalysis.v1alpha1.UpdateOccurrenceRequest") + proto.RegisterType((*GetNoteRequest)(nil), "google.devtools.containeranalysis.v1alpha1.GetNoteRequest") + proto.RegisterType((*GetOccurrenceNoteRequest)(nil), "google.devtools.containeranalysis.v1alpha1.GetOccurrenceNoteRequest") + proto.RegisterType((*ListNotesRequest)(nil), "google.devtools.containeranalysis.v1alpha1.ListNotesRequest") + proto.RegisterType((*ListNotesResponse)(nil), "google.devtools.containeranalysis.v1alpha1.ListNotesResponse") + proto.RegisterType((*DeleteNoteRequest)(nil), "google.devtools.containeranalysis.v1alpha1.DeleteNoteRequest") + proto.RegisterType((*CreateNoteRequest)(nil), "google.devtools.containeranalysis.v1alpha1.CreateNoteRequest") + proto.RegisterType((*UpdateNoteRequest)(nil), "google.devtools.containeranalysis.v1alpha1.UpdateNoteRequest") + proto.RegisterType((*ListNoteOccurrencesRequest)(nil), "google.devtools.containeranalysis.v1alpha1.ListNoteOccurrencesRequest") + proto.RegisterType((*ListNoteOccurrencesResponse)(nil), "google.devtools.containeranalysis.v1alpha1.ListNoteOccurrencesResponse") + proto.RegisterType((*CreateOperationRequest)(nil), "google.devtools.containeranalysis.v1alpha1.CreateOperationRequest") + proto.RegisterType((*UpdateOperationRequest)(nil), "google.devtools.containeranalysis.v1alpha1.UpdateOperationRequest") + proto.RegisterType((*OperationMetadata)(nil), "google.devtools.containeranalysis.v1alpha1.OperationMetadata") + proto.RegisterType((*GetVulnzOccurrencesSummaryRequest)(nil), "google.devtools.containeranalysis.v1alpha1.GetVulnzOccurrencesSummaryRequest") + proto.RegisterType((*GetVulnzOccurrencesSummaryResponse)(nil), "google.devtools.containeranalysis.v1alpha1.GetVulnzOccurrencesSummaryResponse") + proto.RegisterType((*GetVulnzOccurrencesSummaryResponse_SeverityCount)(nil), "google.devtools.containeranalysis.v1alpha1.GetVulnzOccurrencesSummaryResponse.SeverityCount") + proto.RegisterType((*GetScanConfigRequest)(nil), "google.devtools.containeranalysis.v1alpha1.GetScanConfigRequest") + proto.RegisterType((*ListScanConfigsRequest)(nil), "google.devtools.containeranalysis.v1alpha1.ListScanConfigsRequest") + proto.RegisterType((*ListScanConfigsResponse)(nil), "google.devtools.containeranalysis.v1alpha1.ListScanConfigsResponse") + proto.RegisterType((*UpdateScanConfigRequest)(nil), "google.devtools.containeranalysis.v1alpha1.UpdateScanConfigRequest") + proto.RegisterEnum("google.devtools.containeranalysis.v1alpha1.Note_Kind", Note_Kind_name, Note_Kind_value) + proto.RegisterEnum("google.devtools.containeranalysis.v1alpha1.Deployable_Deployment_Platform", Deployable_Deployment_Platform_name, Deployable_Deployment_Platform_value) + proto.RegisterEnum("google.devtools.containeranalysis.v1alpha1.Discovery_Discovered_AnalysisStatus", Discovery_Discovered_AnalysisStatus_name, Discovery_Discovered_AnalysisStatus_value) + proto.RegisterEnum("google.devtools.containeranalysis.v1alpha1.BuildSignature_KeyType", BuildSignature_KeyType_name, BuildSignature_KeyType_value) + proto.RegisterEnum("google.devtools.containeranalysis.v1alpha1.PgpSignedAttestation_ContentType", PgpSignedAttestation_ContentType_name, PgpSignedAttestation_ContentType_value) +} + +// Reference imports to suppress errors if they are not otherwise used. +var _ context.Context +var _ grpc.ClientConn + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the grpc package it is being compiled against. +const _ = grpc.SupportPackageIsVersion4 + +// ContainerAnalysisClient is the client API for ContainerAnalysis service. +// +// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://godoc.org/google.golang.org/grpc#ClientConn.NewStream. +type ContainerAnalysisClient interface { + // Returns the requested `Occurrence`. + GetOccurrence(ctx context.Context, in *GetOccurrenceRequest, opts ...grpc.CallOption) (*Occurrence, error) + // Lists active `Occurrences` for a given project matching the filters. + ListOccurrences(ctx context.Context, in *ListOccurrencesRequest, opts ...grpc.CallOption) (*ListOccurrencesResponse, error) + // Deletes the given `Occurrence` from the system. Use this when + // an `Occurrence` is no longer applicable for the given resource. + DeleteOccurrence(ctx context.Context, in *DeleteOccurrenceRequest, opts ...grpc.CallOption) (*empty.Empty, error) + // Creates a new `Occurrence`. Use this method to create `Occurrences` + // for a resource. + CreateOccurrence(ctx context.Context, in *CreateOccurrenceRequest, opts ...grpc.CallOption) (*Occurrence, error) + // Updates an existing occurrence. + UpdateOccurrence(ctx context.Context, in *UpdateOccurrenceRequest, opts ...grpc.CallOption) (*Occurrence, error) + // Gets the `Note` attached to the given `Occurrence`. + GetOccurrenceNote(ctx context.Context, in *GetOccurrenceNoteRequest, opts ...grpc.CallOption) (*Note, error) + // Returns the requested `Note`. + GetNote(ctx context.Context, in *GetNoteRequest, opts ...grpc.CallOption) (*Note, error) + // Lists all `Notes` for a given project. + ListNotes(ctx context.Context, in *ListNotesRequest, opts ...grpc.CallOption) (*ListNotesResponse, error) + // Deletes the given `Note` from the system. + DeleteNote(ctx context.Context, in *DeleteNoteRequest, opts ...grpc.CallOption) (*empty.Empty, error) + // Creates a new `Note`. + CreateNote(ctx context.Context, in *CreateNoteRequest, opts ...grpc.CallOption) (*Note, error) + // Updates an existing `Note`. + UpdateNote(ctx context.Context, in *UpdateNoteRequest, opts ...grpc.CallOption) (*Note, error) + // Lists `Occurrences` referencing the specified `Note`. Use this method to + // get all occurrences referencing your `Note` across all your customer + // projects. + ListNoteOccurrences(ctx context.Context, in *ListNoteOccurrencesRequest, opts ...grpc.CallOption) (*ListNoteOccurrencesResponse, error) + // Gets a summary of the number and severity of occurrences. + GetVulnzOccurrencesSummary(ctx context.Context, in *GetVulnzOccurrencesSummaryRequest, opts ...grpc.CallOption) (*GetVulnzOccurrencesSummaryResponse, error) + // Sets the access control policy on the specified `Note` or `Occurrence`. + // Requires `containeranalysis.notes.setIamPolicy` or + // `containeranalysis.occurrences.setIamPolicy` permission if the resource is + // a `Note` or an `Occurrence`, respectively. + // Attempting to call this method without these permissions will result in a ` + // `PERMISSION_DENIED` error. + // Attempting to call this method on a non-existent resource will result in a + // `NOT_FOUND` error if the user has `containeranalysis.notes.list` permission + // on a `Note` or `containeranalysis.occurrences.list` on an `Occurrence`, or + // a `PERMISSION_DENIED` error otherwise. The resource takes the following + // formats: `projects/{projectid}/occurrences/{occurrenceid}` for occurrences + // and projects/{projectid}/notes/{noteid} for notes + SetIamPolicy(ctx context.Context, in *v1.SetIamPolicyRequest, opts ...grpc.CallOption) (*v1.Policy, error) + // Gets the access control policy for a note or an `Occurrence` resource. + // Requires `containeranalysis.notes.setIamPolicy` or + // `containeranalysis.occurrences.setIamPolicy` permission if the resource is + // a note or occurrence, respectively. + // Attempting to call this method on a resource without the required + // permission will result in a `PERMISSION_DENIED` error. Attempting to call + // this method on a non-existent resource will result in a `NOT_FOUND` error + // if the user has list permission on the project, or a `PERMISSION_DENIED` + // error otherwise. The resource takes the following formats: + // `projects/{PROJECT_ID}/occurrences/{OCCURRENCE_ID}` for occurrences and + // projects/{PROJECT_ID}/notes/{NOTE_ID} for notes + GetIamPolicy(ctx context.Context, in *v1.GetIamPolicyRequest, opts ...grpc.CallOption) (*v1.Policy, error) + // Returns the permissions that a caller has on the specified note or + // occurrence resource. Requires list permission on the project (for example, + // "storage.objects.list" on the containing bucket for testing permission of + // an object). Attempting to call this method on a non-existent resource will + // result in a `NOT_FOUND` error if the user has list permission on the + // project, or a `PERMISSION_DENIED` error otherwise. The resource takes the + // following formats: `projects/{PROJECT_ID}/occurrences/{OCCURRENCE_ID}` for + // `Occurrences` and `projects/{PROJECT_ID}/notes/{NOTE_ID}` for `Notes` + TestIamPermissions(ctx context.Context, in *v1.TestIamPermissionsRequest, opts ...grpc.CallOption) (*v1.TestIamPermissionsResponse, error) + // Creates a new `Operation`. + CreateOperation(ctx context.Context, in *CreateOperationRequest, opts ...grpc.CallOption) (*longrunning.Operation, error) + // Updates an existing operation returns an error if operation + // does not exist. The only valid operations are to update mark the done bit + // change the result. + UpdateOperation(ctx context.Context, in *UpdateOperationRequest, opts ...grpc.CallOption) (*longrunning.Operation, error) + // Gets a specific scan configuration for a project. + GetScanConfig(ctx context.Context, in *GetScanConfigRequest, opts ...grpc.CallOption) (*ScanConfig, error) + // Lists scan configurations for a project. + ListScanConfigs(ctx context.Context, in *ListScanConfigsRequest, opts ...grpc.CallOption) (*ListScanConfigsResponse, error) + // Updates the scan configuration to a new value. + UpdateScanConfig(ctx context.Context, in *UpdateScanConfigRequest, opts ...grpc.CallOption) (*ScanConfig, error) +} + +type containerAnalysisClient struct { + cc *grpc.ClientConn +} + +func NewContainerAnalysisClient(cc *grpc.ClientConn) ContainerAnalysisClient { + return &containerAnalysisClient{cc} +} + +func (c *containerAnalysisClient) GetOccurrence(ctx context.Context, in *GetOccurrenceRequest, opts ...grpc.CallOption) (*Occurrence, error) { + out := new(Occurrence) + err := c.cc.Invoke(ctx, "/google.devtools.containeranalysis.v1alpha1.ContainerAnalysis/GetOccurrence", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *containerAnalysisClient) ListOccurrences(ctx context.Context, in *ListOccurrencesRequest, opts ...grpc.CallOption) (*ListOccurrencesResponse, error) { + out := new(ListOccurrencesResponse) + err := c.cc.Invoke(ctx, "/google.devtools.containeranalysis.v1alpha1.ContainerAnalysis/ListOccurrences", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *containerAnalysisClient) DeleteOccurrence(ctx context.Context, in *DeleteOccurrenceRequest, opts ...grpc.CallOption) (*empty.Empty, error) { + out := new(empty.Empty) + err := c.cc.Invoke(ctx, "/google.devtools.containeranalysis.v1alpha1.ContainerAnalysis/DeleteOccurrence", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *containerAnalysisClient) CreateOccurrence(ctx context.Context, in *CreateOccurrenceRequest, opts ...grpc.CallOption) (*Occurrence, error) { + out := new(Occurrence) + err := c.cc.Invoke(ctx, "/google.devtools.containeranalysis.v1alpha1.ContainerAnalysis/CreateOccurrence", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *containerAnalysisClient) UpdateOccurrence(ctx context.Context, in *UpdateOccurrenceRequest, opts ...grpc.CallOption) (*Occurrence, error) { + out := new(Occurrence) + err := c.cc.Invoke(ctx, "/google.devtools.containeranalysis.v1alpha1.ContainerAnalysis/UpdateOccurrence", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *containerAnalysisClient) GetOccurrenceNote(ctx context.Context, in *GetOccurrenceNoteRequest, opts ...grpc.CallOption) (*Note, error) { + out := new(Note) + err := c.cc.Invoke(ctx, "/google.devtools.containeranalysis.v1alpha1.ContainerAnalysis/GetOccurrenceNote", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *containerAnalysisClient) GetNote(ctx context.Context, in *GetNoteRequest, opts ...grpc.CallOption) (*Note, error) { + out := new(Note) + err := c.cc.Invoke(ctx, "/google.devtools.containeranalysis.v1alpha1.ContainerAnalysis/GetNote", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *containerAnalysisClient) ListNotes(ctx context.Context, in *ListNotesRequest, opts ...grpc.CallOption) (*ListNotesResponse, error) { + out := new(ListNotesResponse) + err := c.cc.Invoke(ctx, "/google.devtools.containeranalysis.v1alpha1.ContainerAnalysis/ListNotes", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *containerAnalysisClient) DeleteNote(ctx context.Context, in *DeleteNoteRequest, opts ...grpc.CallOption) (*empty.Empty, error) { + out := new(empty.Empty) + err := c.cc.Invoke(ctx, "/google.devtools.containeranalysis.v1alpha1.ContainerAnalysis/DeleteNote", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *containerAnalysisClient) CreateNote(ctx context.Context, in *CreateNoteRequest, opts ...grpc.CallOption) (*Note, error) { + out := new(Note) + err := c.cc.Invoke(ctx, "/google.devtools.containeranalysis.v1alpha1.ContainerAnalysis/CreateNote", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *containerAnalysisClient) UpdateNote(ctx context.Context, in *UpdateNoteRequest, opts ...grpc.CallOption) (*Note, error) { + out := new(Note) + err := c.cc.Invoke(ctx, "/google.devtools.containeranalysis.v1alpha1.ContainerAnalysis/UpdateNote", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *containerAnalysisClient) ListNoteOccurrences(ctx context.Context, in *ListNoteOccurrencesRequest, opts ...grpc.CallOption) (*ListNoteOccurrencesResponse, error) { + out := new(ListNoteOccurrencesResponse) + err := c.cc.Invoke(ctx, "/google.devtools.containeranalysis.v1alpha1.ContainerAnalysis/ListNoteOccurrences", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *containerAnalysisClient) GetVulnzOccurrencesSummary(ctx context.Context, in *GetVulnzOccurrencesSummaryRequest, opts ...grpc.CallOption) (*GetVulnzOccurrencesSummaryResponse, error) { + out := new(GetVulnzOccurrencesSummaryResponse) + err := c.cc.Invoke(ctx, "/google.devtools.containeranalysis.v1alpha1.ContainerAnalysis/GetVulnzOccurrencesSummary", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *containerAnalysisClient) SetIamPolicy(ctx context.Context, in *v1.SetIamPolicyRequest, opts ...grpc.CallOption) (*v1.Policy, error) { + out := new(v1.Policy) + err := c.cc.Invoke(ctx, "/google.devtools.containeranalysis.v1alpha1.ContainerAnalysis/SetIamPolicy", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *containerAnalysisClient) GetIamPolicy(ctx context.Context, in *v1.GetIamPolicyRequest, opts ...grpc.CallOption) (*v1.Policy, error) { + out := new(v1.Policy) + err := c.cc.Invoke(ctx, "/google.devtools.containeranalysis.v1alpha1.ContainerAnalysis/GetIamPolicy", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *containerAnalysisClient) TestIamPermissions(ctx context.Context, in *v1.TestIamPermissionsRequest, opts ...grpc.CallOption) (*v1.TestIamPermissionsResponse, error) { + out := new(v1.TestIamPermissionsResponse) + err := c.cc.Invoke(ctx, "/google.devtools.containeranalysis.v1alpha1.ContainerAnalysis/TestIamPermissions", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *containerAnalysisClient) CreateOperation(ctx context.Context, in *CreateOperationRequest, opts ...grpc.CallOption) (*longrunning.Operation, error) { + out := new(longrunning.Operation) + err := c.cc.Invoke(ctx, "/google.devtools.containeranalysis.v1alpha1.ContainerAnalysis/CreateOperation", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *containerAnalysisClient) UpdateOperation(ctx context.Context, in *UpdateOperationRequest, opts ...grpc.CallOption) (*longrunning.Operation, error) { + out := new(longrunning.Operation) + err := c.cc.Invoke(ctx, "/google.devtools.containeranalysis.v1alpha1.ContainerAnalysis/UpdateOperation", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *containerAnalysisClient) GetScanConfig(ctx context.Context, in *GetScanConfigRequest, opts ...grpc.CallOption) (*ScanConfig, error) { + out := new(ScanConfig) + err := c.cc.Invoke(ctx, "/google.devtools.containeranalysis.v1alpha1.ContainerAnalysis/GetScanConfig", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *containerAnalysisClient) ListScanConfigs(ctx context.Context, in *ListScanConfigsRequest, opts ...grpc.CallOption) (*ListScanConfigsResponse, error) { + out := new(ListScanConfigsResponse) + err := c.cc.Invoke(ctx, "/google.devtools.containeranalysis.v1alpha1.ContainerAnalysis/ListScanConfigs", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *containerAnalysisClient) UpdateScanConfig(ctx context.Context, in *UpdateScanConfigRequest, opts ...grpc.CallOption) (*ScanConfig, error) { + out := new(ScanConfig) + err := c.cc.Invoke(ctx, "/google.devtools.containeranalysis.v1alpha1.ContainerAnalysis/UpdateScanConfig", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +// ContainerAnalysisServer is the server API for ContainerAnalysis service. +type ContainerAnalysisServer interface { + // Returns the requested `Occurrence`. + GetOccurrence(context.Context, *GetOccurrenceRequest) (*Occurrence, error) + // Lists active `Occurrences` for a given project matching the filters. + ListOccurrences(context.Context, *ListOccurrencesRequest) (*ListOccurrencesResponse, error) + // Deletes the given `Occurrence` from the system. Use this when + // an `Occurrence` is no longer applicable for the given resource. + DeleteOccurrence(context.Context, *DeleteOccurrenceRequest) (*empty.Empty, error) + // Creates a new `Occurrence`. Use this method to create `Occurrences` + // for a resource. + CreateOccurrence(context.Context, *CreateOccurrenceRequest) (*Occurrence, error) + // Updates an existing occurrence. + UpdateOccurrence(context.Context, *UpdateOccurrenceRequest) (*Occurrence, error) + // Gets the `Note` attached to the given `Occurrence`. + GetOccurrenceNote(context.Context, *GetOccurrenceNoteRequest) (*Note, error) + // Returns the requested `Note`. + GetNote(context.Context, *GetNoteRequest) (*Note, error) + // Lists all `Notes` for a given project. + ListNotes(context.Context, *ListNotesRequest) (*ListNotesResponse, error) + // Deletes the given `Note` from the system. + DeleteNote(context.Context, *DeleteNoteRequest) (*empty.Empty, error) + // Creates a new `Note`. + CreateNote(context.Context, *CreateNoteRequest) (*Note, error) + // Updates an existing `Note`. + UpdateNote(context.Context, *UpdateNoteRequest) (*Note, error) + // Lists `Occurrences` referencing the specified `Note`. Use this method to + // get all occurrences referencing your `Note` across all your customer + // projects. + ListNoteOccurrences(context.Context, *ListNoteOccurrencesRequest) (*ListNoteOccurrencesResponse, error) + // Gets a summary of the number and severity of occurrences. + GetVulnzOccurrencesSummary(context.Context, *GetVulnzOccurrencesSummaryRequest) (*GetVulnzOccurrencesSummaryResponse, error) + // Sets the access control policy on the specified `Note` or `Occurrence`. + // Requires `containeranalysis.notes.setIamPolicy` or + // `containeranalysis.occurrences.setIamPolicy` permission if the resource is + // a `Note` or an `Occurrence`, respectively. + // Attempting to call this method without these permissions will result in a ` + // `PERMISSION_DENIED` error. + // Attempting to call this method on a non-existent resource will result in a + // `NOT_FOUND` error if the user has `containeranalysis.notes.list` permission + // on a `Note` or `containeranalysis.occurrences.list` on an `Occurrence`, or + // a `PERMISSION_DENIED` error otherwise. The resource takes the following + // formats: `projects/{projectid}/occurrences/{occurrenceid}` for occurrences + // and projects/{projectid}/notes/{noteid} for notes + SetIamPolicy(context.Context, *v1.SetIamPolicyRequest) (*v1.Policy, error) + // Gets the access control policy for a note or an `Occurrence` resource. + // Requires `containeranalysis.notes.setIamPolicy` or + // `containeranalysis.occurrences.setIamPolicy` permission if the resource is + // a note or occurrence, respectively. + // Attempting to call this method on a resource without the required + // permission will result in a `PERMISSION_DENIED` error. Attempting to call + // this method on a non-existent resource will result in a `NOT_FOUND` error + // if the user has list permission on the project, or a `PERMISSION_DENIED` + // error otherwise. The resource takes the following formats: + // `projects/{PROJECT_ID}/occurrences/{OCCURRENCE_ID}` for occurrences and + // projects/{PROJECT_ID}/notes/{NOTE_ID} for notes + GetIamPolicy(context.Context, *v1.GetIamPolicyRequest) (*v1.Policy, error) + // Returns the permissions that a caller has on the specified note or + // occurrence resource. Requires list permission on the project (for example, + // "storage.objects.list" on the containing bucket for testing permission of + // an object). Attempting to call this method on a non-existent resource will + // result in a `NOT_FOUND` error if the user has list permission on the + // project, or a `PERMISSION_DENIED` error otherwise. The resource takes the + // following formats: `projects/{PROJECT_ID}/occurrences/{OCCURRENCE_ID}` for + // `Occurrences` and `projects/{PROJECT_ID}/notes/{NOTE_ID}` for `Notes` + TestIamPermissions(context.Context, *v1.TestIamPermissionsRequest) (*v1.TestIamPermissionsResponse, error) + // Creates a new `Operation`. + CreateOperation(context.Context, *CreateOperationRequest) (*longrunning.Operation, error) + // Updates an existing operation returns an error if operation + // does not exist. The only valid operations are to update mark the done bit + // change the result. + UpdateOperation(context.Context, *UpdateOperationRequest) (*longrunning.Operation, error) + // Gets a specific scan configuration for a project. + GetScanConfig(context.Context, *GetScanConfigRequest) (*ScanConfig, error) + // Lists scan configurations for a project. + ListScanConfigs(context.Context, *ListScanConfigsRequest) (*ListScanConfigsResponse, error) + // Updates the scan configuration to a new value. + UpdateScanConfig(context.Context, *UpdateScanConfigRequest) (*ScanConfig, error) +} + +func RegisterContainerAnalysisServer(s *grpc.Server, srv ContainerAnalysisServer) { + s.RegisterService(&_ContainerAnalysis_serviceDesc, srv) +} + +func _ContainerAnalysis_GetOccurrence_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(GetOccurrenceRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(ContainerAnalysisServer).GetOccurrence(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.devtools.containeranalysis.v1alpha1.ContainerAnalysis/GetOccurrence", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(ContainerAnalysisServer).GetOccurrence(ctx, req.(*GetOccurrenceRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _ContainerAnalysis_ListOccurrences_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(ListOccurrencesRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(ContainerAnalysisServer).ListOccurrences(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.devtools.containeranalysis.v1alpha1.ContainerAnalysis/ListOccurrences", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(ContainerAnalysisServer).ListOccurrences(ctx, req.(*ListOccurrencesRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _ContainerAnalysis_DeleteOccurrence_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(DeleteOccurrenceRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(ContainerAnalysisServer).DeleteOccurrence(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.devtools.containeranalysis.v1alpha1.ContainerAnalysis/DeleteOccurrence", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(ContainerAnalysisServer).DeleteOccurrence(ctx, req.(*DeleteOccurrenceRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _ContainerAnalysis_CreateOccurrence_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(CreateOccurrenceRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(ContainerAnalysisServer).CreateOccurrence(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.devtools.containeranalysis.v1alpha1.ContainerAnalysis/CreateOccurrence", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(ContainerAnalysisServer).CreateOccurrence(ctx, req.(*CreateOccurrenceRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _ContainerAnalysis_UpdateOccurrence_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(UpdateOccurrenceRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(ContainerAnalysisServer).UpdateOccurrence(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.devtools.containeranalysis.v1alpha1.ContainerAnalysis/UpdateOccurrence", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(ContainerAnalysisServer).UpdateOccurrence(ctx, req.(*UpdateOccurrenceRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _ContainerAnalysis_GetOccurrenceNote_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(GetOccurrenceNoteRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(ContainerAnalysisServer).GetOccurrenceNote(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.devtools.containeranalysis.v1alpha1.ContainerAnalysis/GetOccurrenceNote", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(ContainerAnalysisServer).GetOccurrenceNote(ctx, req.(*GetOccurrenceNoteRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _ContainerAnalysis_GetNote_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(GetNoteRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(ContainerAnalysisServer).GetNote(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.devtools.containeranalysis.v1alpha1.ContainerAnalysis/GetNote", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(ContainerAnalysisServer).GetNote(ctx, req.(*GetNoteRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _ContainerAnalysis_ListNotes_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(ListNotesRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(ContainerAnalysisServer).ListNotes(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.devtools.containeranalysis.v1alpha1.ContainerAnalysis/ListNotes", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(ContainerAnalysisServer).ListNotes(ctx, req.(*ListNotesRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _ContainerAnalysis_DeleteNote_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(DeleteNoteRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(ContainerAnalysisServer).DeleteNote(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.devtools.containeranalysis.v1alpha1.ContainerAnalysis/DeleteNote", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(ContainerAnalysisServer).DeleteNote(ctx, req.(*DeleteNoteRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _ContainerAnalysis_CreateNote_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(CreateNoteRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(ContainerAnalysisServer).CreateNote(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.devtools.containeranalysis.v1alpha1.ContainerAnalysis/CreateNote", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(ContainerAnalysisServer).CreateNote(ctx, req.(*CreateNoteRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _ContainerAnalysis_UpdateNote_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(UpdateNoteRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(ContainerAnalysisServer).UpdateNote(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.devtools.containeranalysis.v1alpha1.ContainerAnalysis/UpdateNote", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(ContainerAnalysisServer).UpdateNote(ctx, req.(*UpdateNoteRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _ContainerAnalysis_ListNoteOccurrences_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(ListNoteOccurrencesRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(ContainerAnalysisServer).ListNoteOccurrences(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.devtools.containeranalysis.v1alpha1.ContainerAnalysis/ListNoteOccurrences", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(ContainerAnalysisServer).ListNoteOccurrences(ctx, req.(*ListNoteOccurrencesRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _ContainerAnalysis_GetVulnzOccurrencesSummary_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(GetVulnzOccurrencesSummaryRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(ContainerAnalysisServer).GetVulnzOccurrencesSummary(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.devtools.containeranalysis.v1alpha1.ContainerAnalysis/GetVulnzOccurrencesSummary", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(ContainerAnalysisServer).GetVulnzOccurrencesSummary(ctx, req.(*GetVulnzOccurrencesSummaryRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _ContainerAnalysis_SetIamPolicy_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(v1.SetIamPolicyRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(ContainerAnalysisServer).SetIamPolicy(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.devtools.containeranalysis.v1alpha1.ContainerAnalysis/SetIamPolicy", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(ContainerAnalysisServer).SetIamPolicy(ctx, req.(*v1.SetIamPolicyRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _ContainerAnalysis_GetIamPolicy_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(v1.GetIamPolicyRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(ContainerAnalysisServer).GetIamPolicy(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.devtools.containeranalysis.v1alpha1.ContainerAnalysis/GetIamPolicy", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(ContainerAnalysisServer).GetIamPolicy(ctx, req.(*v1.GetIamPolicyRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _ContainerAnalysis_TestIamPermissions_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(v1.TestIamPermissionsRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(ContainerAnalysisServer).TestIamPermissions(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.devtools.containeranalysis.v1alpha1.ContainerAnalysis/TestIamPermissions", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(ContainerAnalysisServer).TestIamPermissions(ctx, req.(*v1.TestIamPermissionsRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _ContainerAnalysis_CreateOperation_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(CreateOperationRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(ContainerAnalysisServer).CreateOperation(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.devtools.containeranalysis.v1alpha1.ContainerAnalysis/CreateOperation", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(ContainerAnalysisServer).CreateOperation(ctx, req.(*CreateOperationRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _ContainerAnalysis_UpdateOperation_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(UpdateOperationRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(ContainerAnalysisServer).UpdateOperation(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.devtools.containeranalysis.v1alpha1.ContainerAnalysis/UpdateOperation", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(ContainerAnalysisServer).UpdateOperation(ctx, req.(*UpdateOperationRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _ContainerAnalysis_GetScanConfig_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(GetScanConfigRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(ContainerAnalysisServer).GetScanConfig(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.devtools.containeranalysis.v1alpha1.ContainerAnalysis/GetScanConfig", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(ContainerAnalysisServer).GetScanConfig(ctx, req.(*GetScanConfigRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _ContainerAnalysis_ListScanConfigs_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(ListScanConfigsRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(ContainerAnalysisServer).ListScanConfigs(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.devtools.containeranalysis.v1alpha1.ContainerAnalysis/ListScanConfigs", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(ContainerAnalysisServer).ListScanConfigs(ctx, req.(*ListScanConfigsRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _ContainerAnalysis_UpdateScanConfig_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(UpdateScanConfigRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(ContainerAnalysisServer).UpdateScanConfig(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.devtools.containeranalysis.v1alpha1.ContainerAnalysis/UpdateScanConfig", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(ContainerAnalysisServer).UpdateScanConfig(ctx, req.(*UpdateScanConfigRequest)) + } + return interceptor(ctx, in, info, handler) +} + +var _ContainerAnalysis_serviceDesc = grpc.ServiceDesc{ + ServiceName: "google.devtools.containeranalysis.v1alpha1.ContainerAnalysis", + HandlerType: (*ContainerAnalysisServer)(nil), + Methods: []grpc.MethodDesc{ + { + MethodName: "GetOccurrence", + Handler: _ContainerAnalysis_GetOccurrence_Handler, + }, + { + MethodName: "ListOccurrences", + Handler: _ContainerAnalysis_ListOccurrences_Handler, + }, + { + MethodName: "DeleteOccurrence", + Handler: _ContainerAnalysis_DeleteOccurrence_Handler, + }, + { + MethodName: "CreateOccurrence", + Handler: _ContainerAnalysis_CreateOccurrence_Handler, + }, + { + MethodName: "UpdateOccurrence", + Handler: _ContainerAnalysis_UpdateOccurrence_Handler, + }, + { + MethodName: "GetOccurrenceNote", + Handler: _ContainerAnalysis_GetOccurrenceNote_Handler, + }, + { + MethodName: "GetNote", + Handler: _ContainerAnalysis_GetNote_Handler, + }, + { + MethodName: "ListNotes", + Handler: _ContainerAnalysis_ListNotes_Handler, + }, + { + MethodName: "DeleteNote", + Handler: _ContainerAnalysis_DeleteNote_Handler, + }, + { + MethodName: "CreateNote", + Handler: _ContainerAnalysis_CreateNote_Handler, + }, + { + MethodName: "UpdateNote", + Handler: _ContainerAnalysis_UpdateNote_Handler, + }, + { + MethodName: "ListNoteOccurrences", + Handler: _ContainerAnalysis_ListNoteOccurrences_Handler, + }, + { + MethodName: "GetVulnzOccurrencesSummary", + Handler: _ContainerAnalysis_GetVulnzOccurrencesSummary_Handler, + }, + { + MethodName: "SetIamPolicy", + Handler: _ContainerAnalysis_SetIamPolicy_Handler, + }, + { + MethodName: "GetIamPolicy", + Handler: _ContainerAnalysis_GetIamPolicy_Handler, + }, + { + MethodName: "TestIamPermissions", + Handler: _ContainerAnalysis_TestIamPermissions_Handler, + }, + { + MethodName: "CreateOperation", + Handler: _ContainerAnalysis_CreateOperation_Handler, + }, + { + MethodName: "UpdateOperation", + Handler: _ContainerAnalysis_UpdateOperation_Handler, + }, + { + MethodName: "GetScanConfig", + Handler: _ContainerAnalysis_GetScanConfig_Handler, + }, + { + MethodName: "ListScanConfigs", + Handler: _ContainerAnalysis_ListScanConfigs_Handler, + }, + { + MethodName: "UpdateScanConfig", + Handler: _ContainerAnalysis_UpdateScanConfig_Handler, + }, + }, + Streams: []grpc.StreamDesc{}, + Metadata: "google/devtools/containeranalysis/v1alpha1/containeranalysis.proto", +} + +func init() { + proto.RegisterFile("google/devtools/containeranalysis/v1alpha1/containeranalysis.proto", fileDescriptor_containeranalysis_a487a8754ddd6778) +} + +var fileDescriptor_containeranalysis_a487a8754ddd6778 = []byte{ + // 3256 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xcc, 0x5b, 0xdb, 0x6f, 0x23, 0x57, + 0x19, 0xdf, 0xc9, 0xd5, 0xfe, 0x9c, 0x8b, 0x73, 0x36, 0xbb, 0xeb, 0x7a, 0x7b, 0x49, 0xa7, 0x94, + 0x6e, 0x53, 0x6a, 0x77, 0xd3, 0x96, 0xc2, 0x6e, 0x97, 0x5d, 0xc7, 0x76, 0x12, 0x37, 0x89, 0x63, + 0xcd, 0x38, 0x61, 0xd3, 0x96, 0x8e, 0xc6, 0xf6, 0x89, 0x33, 0x64, 0x3c, 0x33, 0xcc, 0x8c, 0xc3, + 0x66, 0x51, 0x25, 0x44, 0x0b, 0xa8, 0x12, 0x45, 0xe5, 0xfa, 0xc2, 0xa5, 0x52, 0x41, 0x95, 0x80, + 0x17, 0x5e, 0x78, 0x83, 0x17, 0x40, 0x3c, 0x01, 0x42, 0xe2, 0x19, 0xf1, 0x00, 0x0f, 0xfc, 0x0b, + 0x48, 0x3c, 0xa0, 0x73, 0xe6, 0x9c, 0x99, 0xf1, 0x25, 0x89, 0xc7, 0xde, 0x4a, 0x7d, 0xda, 0x39, + 0xdf, 0xf9, 0xce, 0x77, 0x3b, 0xdf, 0xf9, 0x9d, 0xef, 0x3b, 0xf1, 0xc2, 0x6a, 0xd3, 0x34, 0x9b, + 0x3a, 0xce, 0x36, 0xf0, 0xb1, 0x6b, 0x9a, 0xba, 0x93, 0xad, 0x9b, 0x86, 0xab, 0x6a, 0x06, 0xb6, + 0x55, 0x43, 0xd5, 0x4f, 0x1c, 0xcd, 0xc9, 0x1e, 0x5f, 0x57, 0x75, 0xeb, 0x50, 0xbd, 0xde, 0x3b, + 0x95, 0xb1, 0x6c, 0xd3, 0x35, 0xd1, 0xb2, 0x27, 0x23, 0xc3, 0x65, 0x64, 0x7a, 0x19, 0xb9, 0x8c, + 0xf4, 0xc3, 0x4c, 0x9f, 0x6a, 0x69, 0x59, 0xd5, 0x30, 0x4c, 0x57, 0x75, 0x35, 0xd3, 0x60, 0x92, + 0xd2, 0x51, 0xac, 0xa9, 0x69, 0xba, 0xae, 0x98, 0x07, 0x4a, 0x4b, 0x75, 0xb1, 0xad, 0xa9, 0x3a, + 0x97, 0xf1, 0x72, 0x04, 0x19, 0x5a, 0x4b, 0x6d, 0x62, 0xa5, 0xa6, 0xfa, 0xbe, 0xa4, 0xd7, 0x22, + 0xac, 0xb6, 0xd4, 0xfa, 0x11, 0x59, 0x7f, 0xdc, 0xd6, 0xc9, 0x7c, 0x4d, 0xd3, 0x35, 0xf7, 0x84, + 0xc9, 0xb9, 0x19, 0x45, 0x8e, 0x6d, 0x1e, 0x63, 0x43, 0x35, 0xea, 0x98, 0x2d, 0x7e, 0x94, 0x2d, + 0xd6, 0xd4, 0x56, 0xf6, 0xf8, 0x3a, 0xf9, 0x47, 0xb1, 0x4c, 0x5d, 0xab, 0x73, 0xe1, 0xe9, 0xce, + 0xf9, 0x8e, 0xb9, 0x27, 0xd8, 0x9c, 0x6e, 0x1a, 0x4d, 0xbb, 0x6d, 0x18, 0x9a, 0xd1, 0xcc, 0x9a, + 0x16, 0xb6, 0x3b, 0xe2, 0xfc, 0x10, 0x63, 0xa2, 0xa3, 0x5a, 0xfb, 0x20, 0xab, 0x1a, 0x7c, 0xfd, + 0xd5, 0xee, 0x29, 0xdc, 0xb2, 0x7c, 0xaf, 0x96, 0xba, 0x27, 0x0f, 0x34, 0xac, 0x37, 0x94, 0x96, + 0xea, 0x1c, 0x31, 0x8e, 0xc7, 0xba, 0x39, 0x5c, 0xad, 0x85, 0x1d, 0x57, 0x6d, 0x59, 0x8c, 0xe1, + 0x0a, 0x63, 0xb0, 0xad, 0x7a, 0xd6, 0x71, 0x55, 0xb7, 0xcd, 0x6c, 0x12, 0x7f, 0x16, 0x07, 0xd8, + 0xa9, 0xd7, 0xdb, 0xb6, 0x8d, 0x8d, 0x3a, 0x46, 0x08, 0x26, 0x0c, 0xb5, 0x85, 0x53, 0xc2, 0x92, + 0x70, 0x2d, 0x2e, 0xd1, 0x6f, 0xf4, 0x38, 0xcc, 0xd8, 0xd8, 0x31, 0xdb, 0x76, 0x1d, 0x2b, 0x6d, + 0x5b, 0x4f, 0x8d, 0xd1, 0xb9, 0x04, 0xa7, 0xed, 0xda, 0x3a, 0xaa, 0x40, 0x8c, 0x0f, 0x53, 0x0b, + 0x4b, 0xc2, 0xb5, 0xc4, 0xca, 0x0b, 0x99, 0xc1, 0xd3, 0x33, 0x23, 0xb1, 0xb5, 0x92, 0x2f, 0x05, + 0x5d, 0x85, 0xb8, 0x61, 0xba, 0x58, 0xa1, 0xd6, 0x8c, 0x53, 0x8d, 0x31, 0x42, 0x28, 0x13, 0x8b, + 0x4a, 0x30, 0x71, 0xa4, 0x19, 0x8d, 0xd4, 0xd4, 0x92, 0x70, 0x6d, 0x6e, 0xe5, 0xc5, 0x28, 0xaa, + 0xca, 0xa6, 0x8b, 0x33, 0x9b, 0x9a, 0xd1, 0x90, 0xa8, 0x08, 0xf4, 0x8e, 0x00, 0x97, 0x3a, 0x32, + 0x49, 0x69, 0x60, 0x57, 0xd5, 0x74, 0x27, 0x15, 0xa3, 0x7e, 0x48, 0x51, 0x84, 0xef, 0x85, 0x05, + 0x55, 0x4f, 0x2c, 0xdc, 0x49, 0x29, 0x78, 0x92, 0x37, 0x2e, 0x48, 0x8b, 0xc7, 0x7d, 0xe8, 0x48, + 0x81, 0xd9, 0x5a, 0x5b, 0xd3, 0x1b, 0xbe, 0x09, 0xd3, 0xd4, 0x84, 0xcf, 0x44, 0x31, 0x61, 0x95, + 0x08, 0x08, 0x14, 0xcd, 0xd4, 0x42, 0x63, 0x74, 0x00, 0xb3, 0x0d, 0x6c, 0x6b, 0xc7, 0xb8, 0xa1, + 0xd0, 0x33, 0x98, 0x4a, 0x50, 0x05, 0xb7, 0xa3, 0x28, 0x28, 0x98, 0xf5, 0x23, 0x6c, 0x97, 0xc8, + 0xf2, 0x4c, 0xc1, 0x13, 0x46, 0xf4, 0x30, 0xb9, 0x94, 0x8e, 0x5a, 0x30, 0xa3, 0x19, 0x8e, 0xab, + 0xea, 0x3a, 0xcd, 0xff, 0xd4, 0x0c, 0x55, 0xb3, 0x1e, 0x45, 0x4d, 0xc5, 0x3b, 0xe5, 0xdb, 0xaa, + 0xa1, 0x36, 0xb1, 0x9d, 0x29, 0x85, 0xc4, 0x11, 0x75, 0x61, 0xf1, 0xa8, 0x0e, 0xd0, 0xc0, 0x96, + 0x6e, 0x9e, 0xb4, 0xb0, 0xe1, 0xa6, 0xe6, 0xa8, 0xb2, 0x5c, 0x24, 0x9f, 0xe8, 0x6a, 0xb5, 0xa6, + 0x63, 0xf6, 0x49, 0x04, 0x6d, 0x5c, 0x90, 0x42, 0x62, 0x51, 0x0d, 0xa0, 0xa1, 0x39, 0x75, 0xf3, + 0x18, 0xdb, 0xb8, 0x91, 0x9a, 0xa7, 0x4a, 0xee, 0x44, 0x52, 0xc2, 0x56, 0x9f, 0xf8, 0x5f, 0x34, + 0x72, 0x21, 0xa9, 0xc8, 0x82, 0x84, 0xea, 0xba, 0xe4, 0xe0, 0xd2, 0xb0, 0x25, 0xa9, 0x92, 0xad, + 0x28, 0x4a, 0x72, 0xc1, 0xf2, 0x5c, 0xdb, 0x3d, 0x34, 0x6d, 0x82, 0x8d, 0x21, 0xe2, 0xc6, 0x05, + 0x29, 0xac, 0x02, 0x2d, 0x41, 0xc2, 0xc6, 0x2d, 0xdc, 0xd0, 0x3c, 0x8d, 0x93, 0xfc, 0x68, 0xfb, + 0x24, 0x74, 0x13, 0x12, 0x75, 0x1b, 0xab, 0x2e, 0x56, 0x08, 0xa6, 0xa4, 0xe2, 0xd4, 0xa6, 0x34, + 0xb7, 0x89, 0x03, 0x4e, 0xa6, 0xca, 0x01, 0x47, 0x02, 0x8f, 0x9d, 0x10, 0xc8, 0xe2, 0xb6, 0xd5, + 0xf0, 0x17, 0xc3, 0xf9, 0x8b, 0x3d, 0x76, 0x42, 0x58, 0x8d, 0xc3, 0x34, 0x3b, 0x08, 0xe2, 0xd7, + 0x05, 0x88, 0x71, 0x90, 0xe8, 0x8b, 0x51, 0x49, 0x18, 0x6f, 0xdb, 0x1a, 0x83, 0x26, 0xf2, 0x89, + 0x64, 0x98, 0x21, 0x71, 0xc2, 0x86, 0xab, 0x1c, 0xaa, 0xce, 0x21, 0xc5, 0x90, 0xc4, 0xca, 0x73, + 0x51, 0x82, 0xb9, 0xa1, 0x3a, 0x87, 0x52, 0x82, 0x49, 0x21, 0x03, 0xf1, 0x7f, 0x00, 0x13, 0x04, + 0x41, 0xfa, 0xda, 0xf0, 0x0c, 0x2c, 0x38, 0x87, 0xa6, 0xed, 0x2a, 0x0d, 0xec, 0xd4, 0x6d, 0xcd, + 0xa2, 0x11, 0xf5, 0xa0, 0x2b, 0x49, 0x27, 0x0a, 0x01, 0x1d, 0x3d, 0x0d, 0x49, 0x72, 0x57, 0x74, + 0xf0, 0x4e, 0x50, 0xde, 0x79, 0x42, 0x0f, 0xb3, 0x72, 0xb4, 0x8b, 0x8f, 0x8e, 0x76, 0x06, 0xa0, + 0x4e, 0xb0, 0x73, 0x4f, 0x2c, 0x4c, 0x61, 0x34, 0xb1, 0x72, 0x6b, 0x24, 0xa4, 0xdb, 0xb8, 0x20, + 0x2d, 0x1c, 0x77, 0x13, 0xd1, 0x1e, 0x80, 0x87, 0x68, 0x54, 0x8f, 0x87, 0xa8, 0x2f, 0x46, 0x86, + 0x33, 0x26, 0x3f, 0x5e, 0xe3, 0x03, 0xf4, 0x06, 0x40, 0x4d, 0x75, 0x30, 0x43, 0xb1, 0xd9, 0xe8, + 0xf6, 0x87, 0x51, 0x6c, 0x95, 0x14, 0x22, 0x54, 0xbe, 0xea, 0x60, 0x0f, 0xc0, 0xde, 0x80, 0x69, + 0x56, 0x66, 0x30, 0x38, 0x59, 0x1d, 0x01, 0xbb, 0xd8, 0x70, 0xe3, 0x82, 0xc4, 0x85, 0xa2, 0xbb, + 0x1c, 0xb1, 0x08, 0xe6, 0xb0, 0x1b, 0xf3, 0xd3, 0xc3, 0x21, 0x56, 0x00, 0x53, 0x64, 0x84, 0x76, + 0x21, 0xce, 0x01, 0xe5, 0x24, 0x85, 0xa2, 0x07, 0xdc, 0x47, 0x29, 0x12, 0x10, 0x5f, 0x12, 0xfa, + 0x32, 0x5c, 0x0a, 0xc1, 0x86, 0xa2, 0x72, 0x6c, 0x49, 0x5d, 0x8c, 0x0e, 0x84, 0xfd, 0x30, 0x8a, + 0xdc, 0x89, 0x6a, 0x1f, 0x3a, 0x7a, 0x9d, 0x00, 0x94, 0xae, 0xba, 0xb8, 0x41, 0x6b, 0x8f, 0xe9, + 0xa5, 0xf1, 0x6b, 0x89, 0x95, 0x9b, 0x91, 0xcf, 0x80, 0xe4, 0xc9, 0xd8, 0xb5, 0x75, 0x09, 0x6c, + 0xff, 0x1b, 0xe5, 0x61, 0x1e, 0xdf, 0xb3, 0x34, 0xaf, 0x4c, 0x1b, 0x14, 0xa3, 0xe6, 0x82, 0x25, + 0x1c, 0xe4, 0xc2, 0x08, 0x99, 0x18, 0x05, 0x21, 0x67, 0xa2, 0x20, 0x64, 0xfa, 0x05, 0x80, 0xc0, + 0x31, 0x0f, 0x03, 0x75, 0x06, 0x49, 0xe4, 0x13, 0x2d, 0xc2, 0xa4, 0xae, 0xd6, 0x30, 0x2f, 0xd9, + 0xbc, 0x81, 0xf8, 0x4b, 0x01, 0x26, 0x08, 0x26, 0xa0, 0x45, 0x48, 0x6e, 0x96, 0xca, 0x05, 0x65, + 0xb7, 0x2c, 0x57, 0x8a, 0xf9, 0xd2, 0x5a, 0xa9, 0x58, 0x48, 0x5e, 0x40, 0x0f, 0xc1, 0xa5, 0x4a, + 0x2e, 0xbf, 0x99, 0x5b, 0x2f, 0x2a, 0x7b, 0xbb, 0x5b, 0xe5, 0xa2, 0x94, 0x5b, 0x2d, 0x6d, 0x95, + 0xaa, 0xfb, 0xc9, 0x31, 0xb4, 0x00, 0xb3, 0xab, 0xbb, 0xa5, 0xad, 0x82, 0x52, 0x28, 0x56, 0x73, + 0xa5, 0x2d, 0x39, 0x39, 0x8e, 0xe6, 0x21, 0x51, 0xda, 0x26, 0xbc, 0xab, 0x39, 0xb9, 0x24, 0x27, + 0x27, 0xd0, 0x45, 0x98, 0xe7, 0xcb, 0xb7, 0x73, 0xe5, 0xdc, 0x7a, 0x51, 0x4a, 0x4e, 0xa2, 0x39, + 0x80, 0x42, 0xb1, 0xb2, 0xb5, 0xb3, 0x9f, 0x5b, 0xdd, 0x2a, 0x26, 0xa7, 0xd0, 0x2c, 0xc4, 0x0b, + 0x25, 0x39, 0xbf, 0xb3, 0x57, 0x94, 0xf6, 0x93, 0xd3, 0x44, 0x65, 0xae, 0x5a, 0x2d, 0xca, 0xd5, + 0x5c, 0xb5, 0xb4, 0x53, 0x56, 0x72, 0xbb, 0xd5, 0x8d, 0x1d, 0x89, 0xa8, 0x8c, 0xad, 0x26, 0x58, + 0x1d, 0x48, 0x00, 0x44, 0xfc, 0xc7, 0x38, 0x40, 0x90, 0xf9, 0x5d, 0x85, 0xa9, 0x96, 0x12, 0x96, + 0xc6, 0x3b, 0x0b, 0x53, 0x2d, 0xfd, 0x2b, 0x7f, 0x05, 0xbd, 0xc4, 0x1f, 0x01, 0x68, 0x3b, 0xd8, + 0x56, 0x70, 0x4b, 0xd5, 0x78, 0xa4, 0xe2, 0x84, 0x52, 0x24, 0x04, 0xb2, 0x19, 0xde, 0x51, 0xf2, + 0x36, 0x63, 0xec, 0xfc, 0xcd, 0xf0, 0xd8, 0xe9, 0x4e, 0xde, 0x86, 0xd9, 0xb6, 0x11, 0x5e, 0x3e, + 0x7e, 0xee, 0xf2, 0x19, 0xbe, 0x80, 0x0a, 0xb8, 0x0c, 0x53, 0x75, 0xd3, 0x38, 0xd0, 0x9a, 0x14, + 0x28, 0xe3, 0x12, 0x1b, 0xa1, 0x14, 0x4c, 0xab, 0x8d, 0x86, 0x8d, 0x1d, 0x87, 0xdd, 0xcf, 0x7c, + 0xd8, 0x13, 0x80, 0xa9, 0x9e, 0x00, 0xa0, 0x03, 0x88, 0x59, 0xba, 0xea, 0x1e, 0x98, 0x76, 0x8b, + 0x96, 0x93, 0x73, 0x2b, 0xaf, 0x8c, 0x5c, 0x19, 0x65, 0x2a, 0x4c, 0xa2, 0xe4, 0xcb, 0x16, 0xf3, + 0x10, 0xe3, 0x54, 0x94, 0x82, 0xc5, 0xca, 0x56, 0xae, 0xba, 0xb6, 0x23, 0x6d, 0x77, 0xe5, 0xd6, + 0x34, 0x8c, 0xaf, 0x6f, 0x16, 0x93, 0x02, 0x8a, 0xc1, 0xc4, 0xda, 0x56, 0xf1, 0x6e, 0x72, 0x0c, + 0x01, 0x4c, 0xe5, 0x77, 0xe5, 0xea, 0xce, 0x76, 0x72, 0x5c, 0x7c, 0x67, 0x02, 0xe2, 0x3e, 0x00, + 0xa1, 0x57, 0x61, 0x96, 0x1b, 0xa4, 0xd0, 0x0b, 0x50, 0x18, 0xe5, 0x02, 0x9c, 0xe1, 0x93, 0x64, + 0x94, 0x7e, 0x9f, 0xe4, 0x45, 0x50, 0x78, 0xdd, 0x84, 0xb8, 0xdf, 0xad, 0x51, 0x35, 0x89, 0x95, + 0x47, 0xb8, 0x9a, 0x50, 0x4b, 0x97, 0xd9, 0xe1, 0x4c, 0x52, 0xc0, 0x8f, 0xee, 0xc1, 0xbc, 0x6f, + 0xa7, 0xd7, 0x5b, 0xd1, 0x7d, 0x9a, 0x5b, 0xd9, 0x19, 0xb5, 0x3c, 0xcc, 0xe4, 0x18, 0x9b, 0x4c, + 0xc5, 0x4a, 0x73, 0x6a, 0xc7, 0x18, 0xad, 0xc1, 0xa5, 0x2e, 0xcd, 0x0a, 0xb6, 0x6d, 0xd3, 0x66, + 0x37, 0x3a, 0xe2, 0xfa, 0x6d, 0xab, 0x9e, 0x61, 0x22, 0x2e, 0x76, 0x8a, 0x28, 0x12, 0x76, 0xf1, + 0x3b, 0x02, 0xcc, 0x75, 0xaa, 0x42, 0x8f, 0xc1, 0xd5, 0x5c, 0x39, 0xb7, 0xb5, 0x2f, 0x97, 0x64, + 0x85, 0x9c, 0xcb, 0x5d, 0xb9, 0x6b, 0x2b, 0x13, 0x30, 0x5d, 0x29, 0x96, 0x0b, 0xa5, 0xf2, 0x7a, + 0x52, 0x40, 0x33, 0x10, 0x93, 0xf3, 0xb9, 0x72, 0x99, 0x8c, 0xc6, 0x08, 0xae, 0xac, 0x95, 0xca, + 0x25, 0x79, 0xa3, 0x58, 0x50, 0xe4, 0xdd, 0x7c, 0xbe, 0x28, 0x13, 0xa4, 0xb8, 0x08, 0xf3, 0x3e, + 0x75, 0x2d, 0x57, 0xda, 0x2a, 0x16, 0x92, 0x13, 0x24, 0x55, 0x76, 0xcb, 0xf2, 0x6e, 0xa5, 0xb2, + 0x23, 0x55, 0x8b, 0x05, 0x45, 0x2a, 0xca, 0x3b, 0xbb, 0x52, 0xbe, 0x98, 0x9c, 0x14, 0xbf, 0x2d, + 0x40, 0xdc, 0xbf, 0xfd, 0xd1, 0x53, 0x30, 0x4f, 0x6f, 0x7f, 0x6c, 0x2b, 0xc7, 0xd8, 0x76, 0xf8, + 0x36, 0xc5, 0xa5, 0x39, 0x46, 0xde, 0xf3, 0xa8, 0xe8, 0x2e, 0xc4, 0x1d, 0xad, 0x69, 0xa8, 0x6e, + 0xdb, 0xe6, 0x07, 0xf8, 0x46, 0xe4, 0x82, 0x43, 0xe6, 0x12, 0xa4, 0x40, 0x98, 0xf8, 0xee, 0x18, + 0xcc, 0x75, 0xce, 0x12, 0x38, 0xb1, 0xda, 0x35, 0x5d, 0xab, 0x2b, 0x47, 0xf8, 0x84, 0xc3, 0x89, + 0x47, 0xd9, 0xc4, 0x27, 0xe8, 0xe1, 0x6e, 0x5b, 0xe2, 0x21, 0x79, 0xe8, 0x12, 0x4c, 0x1d, 0xe1, + 0x13, 0x45, 0x6b, 0xb0, 0x1a, 0x71, 0xf2, 0x08, 0x9f, 0x94, 0x1a, 0xe8, 0x0b, 0x10, 0x23, 0x64, + 0x5a, 0x30, 0x4d, 0xd0, 0x34, 0x5a, 0x1d, 0xde, 0xfe, 0xcc, 0x26, 0xa6, 0x85, 0x98, 0x34, 0x7d, + 0xe4, 0x7d, 0x88, 0x1b, 0x30, 0xcd, 0x68, 0x24, 0xf6, 0x9b, 0xc5, 0x7d, 0xa5, 0xba, 0x5f, 0x29, + 0x76, 0xed, 0xed, 0x25, 0x58, 0xa8, 0xac, 0x57, 0x94, 0x9c, 0x9c, 0x2f, 0x95, 0x94, 0x9c, 0xb4, + 0xbd, 0x23, 0x15, 0x0b, 0xde, 0x2e, 0x57, 0x36, 0x4b, 0x77, 0x95, 0x4a, 0x71, 0x3b, 0x39, 0x26, + 0xfe, 0x60, 0x0c, 0x16, 0x2b, 0x4d, 0x8b, 0xe8, 0xc2, 0x8d, 0xd0, 0x9d, 0xde, 0xe9, 0xb6, 0xd0, + 0xed, 0xb6, 0x19, 0xd4, 0xe5, 0xd4, 0xc7, 0x71, 0xea, 0x63, 0xa4, 0x26, 0xa7, 0x9f, 0xd6, 0x4c, + 0xde, 0x13, 0x4a, 0xbd, 0xe5, 0x35, 0x3b, 0x75, 0xf3, 0x51, 0x00, 0xab, 0x69, 0x29, 0x2c, 0xd6, + 0x74, 0x1b, 0x36, 0x2e, 0x48, 0x31, 0xab, 0x69, 0x6d, 0x92, 0x80, 0x8b, 0x05, 0x48, 0x84, 0xd6, + 0xa2, 0x87, 0x21, 0x95, 0xdf, 0x29, 0x57, 0x8b, 0xe5, 0x6a, 0xbf, 0xc8, 0x5c, 0x81, 0x8b, 0x72, + 0x69, 0xbb, 0xb2, 0x55, 0x54, 0xe4, 0xd2, 0x3a, 0x49, 0x77, 0xe5, 0x15, 0x79, 0xa7, 0x9c, 0x14, + 0x56, 0x63, 0x7c, 0x37, 0xc5, 0xff, 0x8c, 0xc1, 0x62, 0xbf, 0x12, 0x07, 0xb5, 0x60, 0xe2, 0x50, + 0x33, 0x5c, 0x86, 0x2f, 0xfb, 0x0f, 0xb2, 0xad, 0x0b, 0xea, 0x28, 0xcd, 0x70, 0x25, 0xaa, 0x26, + 0xfd, 0x0a, 0xa4, 0x4e, 0xe3, 0x40, 0x19, 0xb8, 0x78, 0xd8, 0x6e, 0xa9, 0x86, 0x62, 0x63, 0xb5, + 0x41, 0xd0, 0x5d, 0x09, 0x75, 0x33, 0x0b, 0x74, 0x4a, 0x62, 0x33, 0x65, 0xb5, 0x85, 0xd3, 0xdf, + 0x17, 0x20, 0x11, 0xde, 0xe2, 0x7b, 0x70, 0x99, 0xc4, 0xd4, 0xa1, 0xbb, 0xa0, 0x84, 0x7b, 0x56, + 0x21, 0x7a, 0x3d, 0xd8, 0x6f, 0x3b, 0x49, 0x3d, 0x68, 0xf5, 0xa1, 0x93, 0x7a, 0x20, 0x38, 0x92, + 0x3f, 0x14, 0x60, 0x26, 0xfc, 0xe0, 0x81, 0x5e, 0x03, 0x08, 0x9e, 0xf5, 0x98, 0x2d, 0x37, 0x23, + 0x1f, 0x9f, 0x8a, 0x2f, 0x42, 0x0a, 0x89, 0x23, 0x2d, 0x5b, 0x30, 0x52, 0x6a, 0x27, 0x2e, 0x76, + 0xd8, 0xa9, 0x9e, 0x0f, 0xe8, 0xab, 0x84, 0x2c, 0xbe, 0x0e, 0x20, 0xd7, 0x55, 0x23, 0xef, 0x5d, + 0xe0, 0xfd, 0x9a, 0xc5, 0x25, 0x52, 0x6a, 0x04, 0xad, 0x1f, 0x7b, 0x53, 0x0b, 0x91, 0xc8, 0xb5, + 0x8f, 0x0d, 0xb2, 0x03, 0x1e, 0x40, 0xc4, 0x24, 0x3e, 0x14, 0x97, 0x61, 0x71, 0x1d, 0xbb, 0xc1, + 0xab, 0x9d, 0x84, 0xbf, 0xd4, 0xc6, 0x8e, 0xdb, 0x4f, 0x8f, 0xf8, 0x2f, 0x01, 0x2e, 0x6f, 0x69, + 0x4e, 0x88, 0xdb, 0x39, 0x83, 0x9d, 0xd4, 0x20, 0x96, 0x6a, 0x63, 0xc3, 0x65, 0xa5, 0x06, 0x1b, + 0x11, 0xfa, 0x81, 0xa6, 0xbb, 0xd8, 0x66, 0x96, 0xb2, 0x11, 0xba, 0x0a, 0x71, 0x4b, 0x6d, 0x62, + 0xc5, 0xd1, 0xee, 0x7b, 0x47, 0x79, 0x52, 0x8a, 0x11, 0x82, 0xac, 0xdd, 0xf7, 0xe0, 0x91, 0x4c, + 0xba, 0xe6, 0x11, 0xe6, 0xdd, 0x2d, 0x65, 0xaf, 0x12, 0xc2, 0x03, 0x7c, 0xc5, 0x13, 0x7f, 0x24, + 0xc0, 0x95, 0x1e, 0x2f, 0x1d, 0xcb, 0x34, 0x1c, 0xd2, 0x6b, 0x25, 0xcc, 0x80, 0x4c, 0x8b, 0xc4, + 0x88, 0xcd, 0x56, 0x28, 0xd2, 0x61, 0x51, 0xe8, 0x93, 0x30, 0x6f, 0xe0, 0x7b, 0xae, 0x12, 0x72, + 0xd2, 0x8b, 0xce, 0x2c, 0x21, 0x57, 0xb8, 0xa3, 0xe2, 0xb3, 0x70, 0xa5, 0x80, 0x75, 0xec, 0xe2, + 0xc1, 0xb6, 0xec, 0x27, 0x02, 0x5c, 0xc9, 0xd3, 0x0e, 0x61, 0x20, 0xfe, 0xd0, 0x9e, 0x8d, 0x77, + 0xec, 0xd9, 0x1e, 0x40, 0x60, 0x2d, 0xbb, 0x0b, 0x87, 0xf5, 0x3b, 0x24, 0x49, 0xfc, 0xbd, 0x00, + 0x57, 0x76, 0x69, 0x13, 0x32, 0x98, 0x7d, 0x1f, 0x91, 0x1d, 0xa1, 0xd6, 0xa9, 0xa5, 0x3a, 0x47, + 0xa7, 0x96, 0xdb, 0x6b, 0x1a, 0xd6, 0x1b, 0xdb, 0xaa, 0x73, 0xc4, 0x5b, 0x27, 0xf2, 0x2d, 0x7e, + 0x02, 0xe6, 0xd6, 0xb1, 0x4b, 0xf2, 0xe8, 0xac, 0xad, 0xc8, 0x40, 0xaa, 0xe3, 0xa4, 0x9d, 0xc7, + 0xff, 0x5d, 0x01, 0x92, 0x24, 0x0f, 0x09, 0xdf, 0xc7, 0xe5, 0x9c, 0x89, 0x6f, 0x09, 0xb0, 0x10, + 0x32, 0x8a, 0x1d, 0x8b, 0x35, 0x98, 0x24, 0x8d, 0x15, 0x3f, 0x10, 0xcf, 0x45, 0x3d, 0x7e, 0x92, + 0xb7, 0x7c, 0xe0, 0x43, 0xf0, 0x14, 0x2c, 0x78, 0x87, 0xe0, 0xbc, 0x18, 0xfe, 0x54, 0x80, 0x05, + 0x2f, 0xfd, 0xcf, 0xe1, 0x0c, 0x05, 0x71, 0xa2, 0x23, 0x88, 0x57, 0x60, 0x9a, 0xf6, 0x8c, 0xfc, + 0xba, 0x97, 0xa6, 0xc8, 0xb0, 0xd4, 0x40, 0x05, 0x98, 0x20, 0x5f, 0xc3, 0xbc, 0x05, 0x52, 0x5b, + 0xe8, 0x6a, 0xd2, 0x3f, 0x2f, 0x78, 0xf9, 0x7f, 0x9e, 0x81, 0x5c, 0xdf, 0xd8, 0x28, 0xfa, 0x46, + 0xcb, 0xf3, 0xb7, 0x05, 0x48, 0xf3, 0xcd, 0x1f, 0xfc, 0x0e, 0x78, 0xe0, 0x39, 0xf8, 0xbe, 0x00, + 0x57, 0xfb, 0x9a, 0xf1, 0xb1, 0x01, 0xe9, 0xf7, 0x04, 0xb8, 0xcc, 0x50, 0xd7, 0x6f, 0xf2, 0x58, + 0x90, 0x82, 0x3c, 0x13, 0x3a, 0xf2, 0xec, 0x71, 0x98, 0xf1, 0xbb, 0xc0, 0x20, 0xd9, 0x12, 0x3e, + 0xad, 0xd4, 0xd5, 0x58, 0x8e, 0x47, 0x6b, 0x2c, 0xc5, 0x0f, 0x05, 0xb8, 0xcc, 0x80, 0xb6, 0xdb, + 0xa4, 0x7e, 0xfb, 0x36, 0x8a, 0xae, 0xee, 0x24, 0x9b, 0x88, 0x94, 0x64, 0xdf, 0x14, 0x60, 0xc1, + 0x97, 0xba, 0x8d, 0x5d, 0xb5, 0xa1, 0xba, 0x6a, 0xf7, 0xbb, 0x98, 0x10, 0xe9, 0x5d, 0xec, 0x45, + 0x88, 0x61, 0xa3, 0x31, 0xe8, 0x3b, 0xcc, 0x34, 0x36, 0x1a, 0x64, 0x24, 0xca, 0xf0, 0xf8, 0x3a, + 0x76, 0xf7, 0xda, 0xba, 0x71, 0x3f, 0x94, 0x66, 0x72, 0xbb, 0xd5, 0x52, 0xed, 0x93, 0xf3, 0xf6, + 0xf3, 0x94, 0xc4, 0x17, 0x3f, 0x1c, 0x03, 0xf1, 0x2c, 0xa9, 0x2c, 0x87, 0x5d, 0x98, 0xaa, 0x9b, + 0x6d, 0xc3, 0xe5, 0xe9, 0xfb, 0x7a, 0x94, 0xf4, 0x3d, 0x5f, 0x7e, 0x46, 0xc6, 0xc7, 0x98, 0x54, + 0xf0, 0x79, 0xa2, 0x44, 0x62, 0xba, 0xd2, 0xef, 0x08, 0x30, 0xdb, 0x31, 0x83, 0x6a, 0x10, 0x73, + 0x18, 0x81, 0x3d, 0x99, 0xac, 0x8d, 0xf6, 0x47, 0x4c, 0x2e, 0x5e, 0xf2, 0xe5, 0xa2, 0x45, 0x98, + 0xa4, 0xfa, 0x69, 0xa4, 0xc6, 0x25, 0x6f, 0xc0, 0x0a, 0xd3, 0xa0, 0xf2, 0x3d, 0x0b, 0xe6, 0xdf, + 0x66, 0x85, 0x69, 0xc0, 0xed, 0x0c, 0xb9, 0x3f, 0x23, 0x01, 0xd3, 0x8f, 0x59, 0xe5, 0xd8, 0x61, + 0x06, 0xdb, 0xd0, 0x7d, 0x98, 0x71, 0xea, 0xaa, 0xa1, 0x78, 0xef, 0x70, 0x43, 0xa1, 0x52, 0x28, + 0x16, 0x09, 0x27, 0x50, 0x31, 0x30, 0x2a, 0xfd, 0xc1, 0xaf, 0xb5, 0x06, 0x8a, 0x2a, 0xfa, 0x3c, + 0x24, 0x42, 0x26, 0x0f, 0x53, 0x6c, 0x85, 0xf4, 0x40, 0x60, 0xf1, 0x48, 0x97, 0xd0, 0xca, 0x9f, + 0x96, 0x60, 0x21, 0xcf, 0x55, 0xf2, 0x87, 0x26, 0xf4, 0x5b, 0x01, 0x66, 0x3b, 0xaa, 0x2b, 0x74, + 0x27, 0xe2, 0x89, 0xe9, 0xa9, 0x3f, 0xd3, 0x43, 0x5e, 0x19, 0xe2, 0xf5, 0xaf, 0xfd, 0xfd, 0xdf, + 0xdf, 0x1b, 0x7b, 0x06, 0x3d, 0x1d, 0xfc, 0x3e, 0xe4, 0x2b, 0x24, 0xa0, 0xb7, 0x2c, 0xdb, 0xfc, + 0x22, 0xae, 0xbb, 0x4e, 0x76, 0x39, 0x1b, 0xba, 0x55, 0xb2, 0xcb, 0x6f, 0xa2, 0xbf, 0x08, 0x30, + 0xdf, 0xd5, 0x73, 0xa0, 0x48, 0x4f, 0x35, 0xfd, 0xdb, 0xb2, 0x74, 0x7e, 0x24, 0x19, 0x5e, 0xea, + 0xf6, 0xf5, 0xc7, 0x3b, 0x45, 0x21, 0x8f, 0xde, 0x0c, 0xbb, 0x84, 0x3e, 0x10, 0x20, 0xd9, 0xdd, + 0xa6, 0xa0, 0x7c, 0xb4, 0xc7, 0xe2, 0xbe, 0x4d, 0x4e, 0xfa, 0x72, 0x4f, 0x9a, 0x14, 0x5b, 0x96, + 0x7b, 0xc2, 0x8d, 0x5c, 0x8e, 0x10, 0xf4, 0xbf, 0x0a, 0x90, 0xec, 0xee, 0x8d, 0xa2, 0x19, 0x79, + 0x4a, 0x67, 0x35, 0x74, 0xe6, 0xdc, 0xa2, 0x4e, 0xbc, 0x24, 0x0e, 0x1e, 0xe9, 0x1b, 0xe1, 0x26, + 0x86, 0x38, 0xd4, 0xdd, 0x4c, 0x45, 0x73, 0xe8, 0x94, 0x56, 0x6c, 0x54, 0x87, 0x56, 0x06, 0xdf, + 0x95, 0x0e, 0x87, 0xfe, 0x28, 0xc0, 0x42, 0x4f, 0xcf, 0x84, 0x0a, 0x43, 0x9f, 0xec, 0x50, 0x8d, + 0x9d, 0x8e, 0x5c, 0x41, 0x8b, 0x2f, 0x51, 0x67, 0xae, 0xa3, 0xec, 0xc0, 0xce, 0x64, 0xbd, 0xb6, + 0xe6, 0xe7, 0x02, 0x4c, 0xb3, 0x06, 0x11, 0xdd, 0x88, 0x68, 0xfc, 0x68, 0x26, 0x3f, 0x43, 0x4d, + 0x7e, 0x12, 0x3d, 0x71, 0x86, 0xc9, 0xd4, 0x46, 0x72, 0x1e, 0x7e, 0x23, 0x40, 0xdc, 0xef, 0xed, + 0xd0, 0xcb, 0x51, 0xa1, 0x23, 0xdc, 0xa7, 0xa6, 0x6f, 0x0d, 0xb9, 0x9a, 0x41, 0x4e, 0x3f, 0xbb, + 0xfb, 0x1c, 0x04, 0x2f, 0xbc, 0xef, 0x09, 0x00, 0x41, 0x3b, 0x88, 0x6e, 0x45, 0x87, 0x99, 0x70, + 0x90, 0x4f, 0x03, 0x18, 0x66, 0xd2, 0xf2, 0x40, 0xa1, 0xfc, 0xb5, 0x00, 0x10, 0xf4, 0x9d, 0xd1, + 0x4c, 0xea, 0xe9, 0x57, 0x87, 0xd8, 0x77, 0x86, 0x86, 0xe2, 0x20, 0xf1, 0xbb, 0xe1, 0x75, 0x86, + 0xc4, 0xe4, 0xa0, 0x13, 0x8d, 0x66, 0x72, 0x4f, 0x07, 0x3b, 0xbc, 0xc9, 0x2b, 0x83, 0xc4, 0x97, + 0x99, 0xfc, 0x4f, 0x01, 0x2e, 0xf6, 0x69, 0x04, 0xd1, 0xda, 0x30, 0xc9, 0xd7, 0xe7, 0xf6, 0x5c, + 0x1f, 0x59, 0x0e, 0x4b, 0xe7, 0x41, 0x90, 0x83, 0xfb, 0xd6, 0x71, 0x8f, 0xbe, 0x35, 0x06, 0xe9, + 0xd3, 0xab, 0x79, 0xb4, 0xfd, 0xa0, 0xba, 0x02, 0xcf, 0xdf, 0xf2, 0x83, 0x6d, 0x32, 0xc4, 0x35, + 0xea, 0xf6, 0x1d, 0xf4, 0xb9, 0xc1, 0xaf, 0xb3, 0x8e, 0x9f, 0xfd, 0x70, 0x37, 0xff, 0x2c, 0xc0, + 0x8c, 0x8c, 0xdd, 0x92, 0xda, 0xaa, 0xd0, 0xdf, 0xc9, 0x22, 0x91, 0x1b, 0xaa, 0xa9, 0xad, 0xcc, + 0xf1, 0xf5, 0x4c, 0x78, 0x92, 0x3b, 0x73, 0xa9, 0x8b, 0xc7, 0x9b, 0x15, 0xbf, 0x2a, 0x50, 0xa3, + 0xee, 0x8b, 0x2f, 0x84, 0x8c, 0xe2, 0x7f, 0xe4, 0xee, 0x9b, 0x6b, 0x4e, 0x48, 0xf8, 0x0d, 0x61, + 0xf9, 0xd5, 0xdb, 0xe2, 0x8d, 0x73, 0x96, 0x76, 0xdd, 0x68, 0x5d, 0x02, 0xa8, 0x3b, 0xeb, 0x67, + 0xb9, 0xb3, 0xfe, 0x51, 0xba, 0xd3, 0x1c, 0xd5, 0x9d, 0x2e, 0x01, 0xe8, 0xbf, 0x02, 0xa0, 0x2a, + 0x76, 0x28, 0x11, 0xdb, 0x2d, 0xcd, 0x71, 0x34, 0xd3, 0x70, 0xd0, 0xb5, 0x2e, 0x83, 0x7b, 0x59, + 0xb8, 0x6b, 0x4f, 0x0f, 0xc0, 0xc9, 0x32, 0xea, 0x5d, 0xcf, 0xdd, 0x6f, 0x08, 0xe7, 0x1a, 0xed, + 0xfb, 0xeb, 0xf6, 0x48, 0x23, 0x5e, 0x17, 0xc5, 0x3b, 0xd1, 0xbc, 0xee, 0x2b, 0x06, 0xfd, 0x42, + 0x80, 0xf9, 0xae, 0x87, 0x9e, 0x68, 0x75, 0x7b, 0xff, 0x57, 0xa2, 0xf4, 0xd9, 0x6f, 0x2d, 0xe2, + 0xf3, 0x34, 0x0a, 0xcf, 0x8a, 0xd7, 0xce, 0x39, 0x58, 0xfe, 0x8f, 0xc6, 0xb9, 0xad, 0x5d, 0x2f, + 0x40, 0xd1, 0x6c, 0xed, 0xff, 0x7c, 0x34, 0xa0, 0xad, 0x2b, 0xd7, 0xce, 0xaa, 0x9a, 0x7c, 0x43, + 0x49, 0xa8, 0x85, 0x65, 0xf4, 0x3b, 0xaf, 0x9d, 0x0b, 0xfd, 0xdd, 0x2b, 0x6a, 0x3b, 0xd7, 0xd3, + 0xe2, 0xa6, 0x87, 0xec, 0x5c, 0xc5, 0x15, 0xea, 0xc0, 0xa7, 0xd0, 0xf2, 0x19, 0x0e, 0x84, 0x5b, + 0x7b, 0xd6, 0x5a, 0xcc, 0x77, 0xbd, 0x04, 0x44, 0xef, 0xe7, 0x7a, 0x5f, 0x33, 0xa2, 0xf7, 0x73, + 0x7d, 0x9e, 0x22, 0xfa, 0x3a, 0xd4, 0x27, 0x7b, 0xc2, 0x3e, 0xa1, 0xbf, 0xf9, 0xad, 0x45, 0x68, + 0x4f, 0x86, 0x68, 0x2d, 0x1e, 0xdc, 0xb6, 0xdc, 0xa6, 0x5e, 0x7c, 0x76, 0x25, 0xc2, 0xb6, 0xdc, + 0x08, 0xbf, 0x67, 0xac, 0x7e, 0x4b, 0x80, 0x27, 0xeb, 0x66, 0x8b, 0xab, 0x3f, 0x5d, 0x6b, 0x45, + 0x78, 0xf5, 0x35, 0xc6, 0xd4, 0x34, 0x75, 0xd5, 0x68, 0x66, 0x4c, 0xbb, 0x99, 0x6d, 0x62, 0x83, + 0x56, 0x88, 0x59, 0x6f, 0x4a, 0xb5, 0x34, 0x67, 0x90, 0xff, 0x2a, 0x72, 0xb3, 0x67, 0xea, 0x83, + 0xb1, 0xf1, 0xf5, 0x7c, 0xae, 0x36, 0x45, 0xa5, 0x3d, 0xff, 0xff, 0x00, 0x00, 0x00, 0xff, 0xff, + 0xb3, 0xe0, 0xe0, 0x39, 0xcf, 0x33, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/devtools/containeranalysis/v1alpha1/image_basis.pb.go b/vendor/google.golang.org/genproto/googleapis/devtools/containeranalysis/v1alpha1/image_basis.pb.go new file mode 100644 index 0000000..74247c0 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/devtools/containeranalysis/v1alpha1/image_basis.pb.go @@ -0,0 +1,439 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/devtools/containeranalysis/v1alpha1/image_basis.proto + +package containeranalysis // import "google.golang.org/genproto/googleapis/devtools/containeranalysis/v1alpha1" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Instructions from dockerfile +type DockerImage_Layer_Directive int32 + +const ( + // Default value for unsupported/missing directive + DockerImage_Layer_DIRECTIVE_UNSPECIFIED DockerImage_Layer_Directive = 0 + // https://docs.docker.com/reference/builder/#maintainer + DockerImage_Layer_MAINTAINER DockerImage_Layer_Directive = 1 + // https://docs.docker.com/reference/builder/#run + DockerImage_Layer_RUN DockerImage_Layer_Directive = 2 + // https://docs.docker.com/reference/builder/#cmd + DockerImage_Layer_CMD DockerImage_Layer_Directive = 3 + // https://docs.docker.com/reference/builder/#label + DockerImage_Layer_LABEL DockerImage_Layer_Directive = 4 + // https://docs.docker.com/reference/builder/#expose + DockerImage_Layer_EXPOSE DockerImage_Layer_Directive = 5 + // https://docs.docker.com/reference/builder/#env + DockerImage_Layer_ENV DockerImage_Layer_Directive = 6 + // https://docs.docker.com/reference/builder/#add + DockerImage_Layer_ADD DockerImage_Layer_Directive = 7 + // https://docs.docker.com/reference/builder/#copy + DockerImage_Layer_COPY DockerImage_Layer_Directive = 8 + // https://docs.docker.com/reference/builder/#entrypoint + DockerImage_Layer_ENTRYPOINT DockerImage_Layer_Directive = 9 + // https://docs.docker.com/reference/builder/#volume + DockerImage_Layer_VOLUME DockerImage_Layer_Directive = 10 + // https://docs.docker.com/reference/builder/#user + DockerImage_Layer_USER DockerImage_Layer_Directive = 11 + // https://docs.docker.com/reference/builder/#workdir + DockerImage_Layer_WORKDIR DockerImage_Layer_Directive = 12 + // https://docs.docker.com/reference/builder/#arg + DockerImage_Layer_ARG DockerImage_Layer_Directive = 13 + // https://docs.docker.com/reference/builder/#onbuild + DockerImage_Layer_ONBUILD DockerImage_Layer_Directive = 14 + // https://docs.docker.com/reference/builder/#stopsignal + DockerImage_Layer_STOPSIGNAL DockerImage_Layer_Directive = 15 + // https://docs.docker.com/reference/builder/#healthcheck + DockerImage_Layer_HEALTHCHECK DockerImage_Layer_Directive = 16 + // https://docs.docker.com/reference/builder/#shell + DockerImage_Layer_SHELL DockerImage_Layer_Directive = 17 +) + +var DockerImage_Layer_Directive_name = map[int32]string{ + 0: "DIRECTIVE_UNSPECIFIED", + 1: "MAINTAINER", + 2: "RUN", + 3: "CMD", + 4: "LABEL", + 5: "EXPOSE", + 6: "ENV", + 7: "ADD", + 8: "COPY", + 9: "ENTRYPOINT", + 10: "VOLUME", + 11: "USER", + 12: "WORKDIR", + 13: "ARG", + 14: "ONBUILD", + 15: "STOPSIGNAL", + 16: "HEALTHCHECK", + 17: "SHELL", +} +var DockerImage_Layer_Directive_value = map[string]int32{ + "DIRECTIVE_UNSPECIFIED": 0, + "MAINTAINER": 1, + "RUN": 2, + "CMD": 3, + "LABEL": 4, + "EXPOSE": 5, + "ENV": 6, + "ADD": 7, + "COPY": 8, + "ENTRYPOINT": 9, + "VOLUME": 10, + "USER": 11, + "WORKDIR": 12, + "ARG": 13, + "ONBUILD": 14, + "STOPSIGNAL": 15, + "HEALTHCHECK": 16, + "SHELL": 17, +} + +func (x DockerImage_Layer_Directive) String() string { + return proto.EnumName(DockerImage_Layer_Directive_name, int32(x)) +} +func (DockerImage_Layer_Directive) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_image_basis_d44a570647a88cb8, []int{0, 0, 0} +} + +// DockerImage holds types defining base image notes +// and derived image occurrences. +type DockerImage struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *DockerImage) Reset() { *m = DockerImage{} } +func (m *DockerImage) String() string { return proto.CompactTextString(m) } +func (*DockerImage) ProtoMessage() {} +func (*DockerImage) Descriptor() ([]byte, []int) { + return fileDescriptor_image_basis_d44a570647a88cb8, []int{0} +} +func (m *DockerImage) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_DockerImage.Unmarshal(m, b) +} +func (m *DockerImage) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_DockerImage.Marshal(b, m, deterministic) +} +func (dst *DockerImage) XXX_Merge(src proto.Message) { + xxx_messageInfo_DockerImage.Merge(dst, src) +} +func (m *DockerImage) XXX_Size() int { + return xxx_messageInfo_DockerImage.Size(m) +} +func (m *DockerImage) XXX_DiscardUnknown() { + xxx_messageInfo_DockerImage.DiscardUnknown(m) +} + +var xxx_messageInfo_DockerImage proto.InternalMessageInfo + +// Layer holds metadata specific to a layer of a Docker image. +type DockerImage_Layer struct { + // The recovered Dockerfile directive used to construct this layer. + Directive DockerImage_Layer_Directive `protobuf:"varint,1,opt,name=directive,proto3,enum=google.devtools.containeranalysis.v1alpha1.DockerImage_Layer_Directive" json:"directive,omitempty"` + // The recovered arguments to the Dockerfile directive. + Arguments string `protobuf:"bytes,2,opt,name=arguments,proto3" json:"arguments,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *DockerImage_Layer) Reset() { *m = DockerImage_Layer{} } +func (m *DockerImage_Layer) String() string { return proto.CompactTextString(m) } +func (*DockerImage_Layer) ProtoMessage() {} +func (*DockerImage_Layer) Descriptor() ([]byte, []int) { + return fileDescriptor_image_basis_d44a570647a88cb8, []int{0, 0} +} +func (m *DockerImage_Layer) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_DockerImage_Layer.Unmarshal(m, b) +} +func (m *DockerImage_Layer) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_DockerImage_Layer.Marshal(b, m, deterministic) +} +func (dst *DockerImage_Layer) XXX_Merge(src proto.Message) { + xxx_messageInfo_DockerImage_Layer.Merge(dst, src) +} +func (m *DockerImage_Layer) XXX_Size() int { + return xxx_messageInfo_DockerImage_Layer.Size(m) +} +func (m *DockerImage_Layer) XXX_DiscardUnknown() { + xxx_messageInfo_DockerImage_Layer.DiscardUnknown(m) +} + +var xxx_messageInfo_DockerImage_Layer proto.InternalMessageInfo + +func (m *DockerImage_Layer) GetDirective() DockerImage_Layer_Directive { + if m != nil { + return m.Directive + } + return DockerImage_Layer_DIRECTIVE_UNSPECIFIED +} + +func (m *DockerImage_Layer) GetArguments() string { + if m != nil { + return m.Arguments + } + return "" +} + +// A set of properties that uniquely identify a given Docker image. +type DockerImage_Fingerprint struct { + // The layer-id of the final layer in the Docker image's v1 + // representation. + // This field can be used as a filter in list requests. + V1Name string `protobuf:"bytes,1,opt,name=v1_name,json=v1Name,proto3" json:"v1_name,omitempty"` + // The ordered list of v2 blobs that represent a given image. + V2Blob []string `protobuf:"bytes,2,rep,name=v2_blob,json=v2Blob,proto3" json:"v2_blob,omitempty"` + // Output only. The name of the image's v2 blobs computed via: + // [bottom] := v2_blob[bottom] + // [N] := sha256(v2_blob[N] + " " + v2_name[N+1]) + // Only the name of the final blob is kept. + // This field can be used as a filter in list requests. + V2Name string `protobuf:"bytes,3,opt,name=v2_name,json=v2Name,proto3" json:"v2_name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *DockerImage_Fingerprint) Reset() { *m = DockerImage_Fingerprint{} } +func (m *DockerImage_Fingerprint) String() string { return proto.CompactTextString(m) } +func (*DockerImage_Fingerprint) ProtoMessage() {} +func (*DockerImage_Fingerprint) Descriptor() ([]byte, []int) { + return fileDescriptor_image_basis_d44a570647a88cb8, []int{0, 1} +} +func (m *DockerImage_Fingerprint) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_DockerImage_Fingerprint.Unmarshal(m, b) +} +func (m *DockerImage_Fingerprint) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_DockerImage_Fingerprint.Marshal(b, m, deterministic) +} +func (dst *DockerImage_Fingerprint) XXX_Merge(src proto.Message) { + xxx_messageInfo_DockerImage_Fingerprint.Merge(dst, src) +} +func (m *DockerImage_Fingerprint) XXX_Size() int { + return xxx_messageInfo_DockerImage_Fingerprint.Size(m) +} +func (m *DockerImage_Fingerprint) XXX_DiscardUnknown() { + xxx_messageInfo_DockerImage_Fingerprint.DiscardUnknown(m) +} + +var xxx_messageInfo_DockerImage_Fingerprint proto.InternalMessageInfo + +func (m *DockerImage_Fingerprint) GetV1Name() string { + if m != nil { + return m.V1Name + } + return "" +} + +func (m *DockerImage_Fingerprint) GetV2Blob() []string { + if m != nil { + return m.V2Blob + } + return nil +} + +func (m *DockerImage_Fingerprint) GetV2Name() string { + if m != nil { + return m.V2Name + } + return "" +} + +// Basis describes the base image portion (Note) of the DockerImage +// relationship. Linked occurrences are derived from this or an +// equivalent image via: +// FROM +// Or an equivalent reference, e.g. a tag of the resource_url. +type DockerImage_Basis struct { + // The resource_url for the resource representing the basis of + // associated occurrence images. + ResourceUrl string `protobuf:"bytes,1,opt,name=resource_url,json=resourceUrl,proto3" json:"resource_url,omitempty"` + // The fingerprint of the base image. + Fingerprint *DockerImage_Fingerprint `protobuf:"bytes,2,opt,name=fingerprint,proto3" json:"fingerprint,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *DockerImage_Basis) Reset() { *m = DockerImage_Basis{} } +func (m *DockerImage_Basis) String() string { return proto.CompactTextString(m) } +func (*DockerImage_Basis) ProtoMessage() {} +func (*DockerImage_Basis) Descriptor() ([]byte, []int) { + return fileDescriptor_image_basis_d44a570647a88cb8, []int{0, 2} +} +func (m *DockerImage_Basis) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_DockerImage_Basis.Unmarshal(m, b) +} +func (m *DockerImage_Basis) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_DockerImage_Basis.Marshal(b, m, deterministic) +} +func (dst *DockerImage_Basis) XXX_Merge(src proto.Message) { + xxx_messageInfo_DockerImage_Basis.Merge(dst, src) +} +func (m *DockerImage_Basis) XXX_Size() int { + return xxx_messageInfo_DockerImage_Basis.Size(m) +} +func (m *DockerImage_Basis) XXX_DiscardUnknown() { + xxx_messageInfo_DockerImage_Basis.DiscardUnknown(m) +} + +var xxx_messageInfo_DockerImage_Basis proto.InternalMessageInfo + +func (m *DockerImage_Basis) GetResourceUrl() string { + if m != nil { + return m.ResourceUrl + } + return "" +} + +func (m *DockerImage_Basis) GetFingerprint() *DockerImage_Fingerprint { + if m != nil { + return m.Fingerprint + } + return nil +} + +// Derived describes the derived image portion (Occurrence) of the +// DockerImage relationship. This image would be produced from a Dockerfile +// with FROM . +type DockerImage_Derived struct { + // The fingerprint of the derived image. + Fingerprint *DockerImage_Fingerprint `protobuf:"bytes,1,opt,name=fingerprint,proto3" json:"fingerprint,omitempty"` + // Output only. The number of layers by which this image differs from the + // associated image basis. + Distance uint32 `protobuf:"varint,2,opt,name=distance,proto3" json:"distance,omitempty"` + // This contains layer-specific metadata, if populated it has length + // "distance" and is ordered with [distance] being the layer immediately + // following the base image and [1] being the final layer. + LayerInfo []*DockerImage_Layer `protobuf:"bytes,3,rep,name=layer_info,json=layerInfo,proto3" json:"layer_info,omitempty"` + // Output only. This contains the base image URL for the derived image + // occurrence. + BaseResourceUrl string `protobuf:"bytes,4,opt,name=base_resource_url,json=baseResourceUrl,proto3" json:"base_resource_url,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *DockerImage_Derived) Reset() { *m = DockerImage_Derived{} } +func (m *DockerImage_Derived) String() string { return proto.CompactTextString(m) } +func (*DockerImage_Derived) ProtoMessage() {} +func (*DockerImage_Derived) Descriptor() ([]byte, []int) { + return fileDescriptor_image_basis_d44a570647a88cb8, []int{0, 3} +} +func (m *DockerImage_Derived) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_DockerImage_Derived.Unmarshal(m, b) +} +func (m *DockerImage_Derived) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_DockerImage_Derived.Marshal(b, m, deterministic) +} +func (dst *DockerImage_Derived) XXX_Merge(src proto.Message) { + xxx_messageInfo_DockerImage_Derived.Merge(dst, src) +} +func (m *DockerImage_Derived) XXX_Size() int { + return xxx_messageInfo_DockerImage_Derived.Size(m) +} +func (m *DockerImage_Derived) XXX_DiscardUnknown() { + xxx_messageInfo_DockerImage_Derived.DiscardUnknown(m) +} + +var xxx_messageInfo_DockerImage_Derived proto.InternalMessageInfo + +func (m *DockerImage_Derived) GetFingerprint() *DockerImage_Fingerprint { + if m != nil { + return m.Fingerprint + } + return nil +} + +func (m *DockerImage_Derived) GetDistance() uint32 { + if m != nil { + return m.Distance + } + return 0 +} + +func (m *DockerImage_Derived) GetLayerInfo() []*DockerImage_Layer { + if m != nil { + return m.LayerInfo + } + return nil +} + +func (m *DockerImage_Derived) GetBaseResourceUrl() string { + if m != nil { + return m.BaseResourceUrl + } + return "" +} + +func init() { + proto.RegisterType((*DockerImage)(nil), "google.devtools.containeranalysis.v1alpha1.DockerImage") + proto.RegisterType((*DockerImage_Layer)(nil), "google.devtools.containeranalysis.v1alpha1.DockerImage.Layer") + proto.RegisterType((*DockerImage_Fingerprint)(nil), "google.devtools.containeranalysis.v1alpha1.DockerImage.Fingerprint") + proto.RegisterType((*DockerImage_Basis)(nil), "google.devtools.containeranalysis.v1alpha1.DockerImage.Basis") + proto.RegisterType((*DockerImage_Derived)(nil), "google.devtools.containeranalysis.v1alpha1.DockerImage.Derived") + proto.RegisterEnum("google.devtools.containeranalysis.v1alpha1.DockerImage_Layer_Directive", DockerImage_Layer_Directive_name, DockerImage_Layer_Directive_value) +} + +func init() { + proto.RegisterFile("google/devtools/containeranalysis/v1alpha1/image_basis.proto", fileDescriptor_image_basis_d44a570647a88cb8) +} + +var fileDescriptor_image_basis_d44a570647a88cb8 = []byte{ + // 627 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xb4, 0x94, 0xdf, 0x6e, 0xda, 0x30, + 0x14, 0xc6, 0x17, 0x28, 0xd0, 0x9c, 0xf4, 0x8f, 0x6b, 0x69, 0x1a, 0x43, 0xbd, 0x60, 0x95, 0x26, + 0x55, 0xbd, 0x08, 0x82, 0x5d, 0x6e, 0xbb, 0x80, 0xc4, 0x85, 0xa8, 0x69, 0x40, 0x06, 0xba, 0x76, + 0x9b, 0x84, 0x0c, 0xb8, 0x59, 0xb4, 0x60, 0x23, 0x27, 0x45, 0xea, 0x3b, 0xec, 0x66, 0x37, 0x7d, + 0x80, 0x3d, 0xe1, 0xde, 0x60, 0x93, 0x53, 0x28, 0xdd, 0xaa, 0x49, 0xd5, 0xa6, 0xdd, 0x99, 0xf3, + 0xf9, 0xfb, 0x7d, 0xf6, 0xf1, 0x21, 0xf0, 0x26, 0x94, 0x32, 0x8c, 0x79, 0x6d, 0xca, 0x17, 0xa9, + 0x94, 0x71, 0x52, 0x9b, 0x48, 0x91, 0xb2, 0x48, 0x70, 0xc5, 0x04, 0x8b, 0xaf, 0x93, 0x28, 0xa9, + 0x2d, 0xea, 0x2c, 0x9e, 0x7f, 0x62, 0xf5, 0x5a, 0x34, 0x63, 0x21, 0x1f, 0x8d, 0x59, 0x12, 0x25, + 0xf6, 0x5c, 0xc9, 0x54, 0xe2, 0xa3, 0x5b, 0xb7, 0xbd, 0x72, 0xdb, 0x0f, 0xdc, 0xf6, 0xca, 0x5d, + 0xd9, 0x5f, 0x26, 0xb1, 0x79, 0x54, 0x63, 0x42, 0xc8, 0x94, 0xa5, 0x91, 0x14, 0x4b, 0xd2, 0xc1, + 0x4d, 0x09, 0x2c, 0x57, 0x4e, 0x3e, 0x73, 0xe5, 0xe9, 0x94, 0xca, 0x8f, 0x1c, 0x14, 0x7c, 0x76, + 0xcd, 0x15, 0xe6, 0x60, 0x4e, 0x23, 0xc5, 0x27, 0x69, 0xb4, 0xe0, 0x65, 0xa3, 0x6a, 0x1c, 0xee, + 0x34, 0xda, 0xf6, 0xe3, 0x73, 0xed, 0x7b, 0x54, 0x3b, 0x23, 0xda, 0xee, 0x0a, 0x47, 0xd7, 0x64, + 0xbc, 0x0f, 0x26, 0x53, 0xe1, 0xd5, 0x8c, 0x8b, 0x34, 0x29, 0xe7, 0xaa, 0xc6, 0xa1, 0x49, 0xd7, + 0x85, 0x83, 0xef, 0x06, 0x98, 0x77, 0x36, 0xfc, 0x1c, 0x9e, 0xba, 0x1e, 0x25, 0xce, 0xc0, 0x3b, + 0x23, 0xa3, 0x61, 0xd0, 0xef, 0x11, 0xc7, 0x3b, 0xf6, 0x88, 0x8b, 0x9e, 0xe0, 0x1d, 0x80, 0xd3, + 0xa6, 0x17, 0x0c, 0x9a, 0x5e, 0x40, 0x28, 0x32, 0x70, 0x09, 0xf2, 0x74, 0x18, 0xa0, 0x9c, 0x5e, + 0x38, 0xa7, 0x2e, 0xca, 0x63, 0x13, 0x0a, 0x7e, 0xb3, 0x45, 0x7c, 0xb4, 0x81, 0x01, 0x8a, 0xe4, + 0xbc, 0xd7, 0xed, 0x13, 0x54, 0xd0, 0x3a, 0x09, 0xce, 0x50, 0x51, 0x2f, 0x9a, 0xae, 0x8b, 0x4a, + 0x78, 0x13, 0x36, 0x9c, 0x6e, 0xef, 0x02, 0x6d, 0x6a, 0x28, 0x09, 0x06, 0xf4, 0xa2, 0xd7, 0xf5, + 0x82, 0x01, 0x32, 0xb5, 0xef, 0xac, 0xeb, 0x0f, 0x4f, 0x09, 0x02, 0xbd, 0x6b, 0xd8, 0x27, 0x14, + 0x59, 0xd8, 0x82, 0xd2, 0xbb, 0x2e, 0x3d, 0x71, 0x3d, 0x8a, 0xb6, 0x32, 0x0a, 0x6d, 0xa3, 0x6d, + 0x5d, 0xed, 0x06, 0xad, 0xa1, 0xe7, 0xbb, 0x68, 0x47, 0x83, 0xfa, 0x83, 0x6e, 0xaf, 0xef, 0xb5, + 0x83, 0xa6, 0x8f, 0x76, 0xf1, 0x2e, 0x58, 0x1d, 0xd2, 0xf4, 0x07, 0x1d, 0xa7, 0x43, 0x9c, 0x13, + 0x84, 0xf4, 0xe1, 0xfa, 0x1d, 0xe2, 0xfb, 0x68, 0xaf, 0x72, 0x0e, 0xd6, 0x71, 0x24, 0x42, 0xae, + 0xe6, 0x2a, 0x12, 0x29, 0x7e, 0x06, 0xa5, 0x45, 0x7d, 0x24, 0xd8, 0xec, 0xf6, 0x11, 0x4c, 0x5a, + 0x5c, 0xd4, 0x03, 0x36, 0xe3, 0x99, 0xd0, 0x18, 0x8d, 0x63, 0x39, 0x2e, 0xe7, 0xaa, 0xf9, 0x4c, + 0x68, 0xb4, 0x62, 0x39, 0x5e, 0x0a, 0x99, 0x23, 0xbf, 0x74, 0x34, 0xb4, 0xa3, 0xf2, 0xd5, 0x80, + 0x42, 0x4b, 0x4f, 0x11, 0x7e, 0x01, 0x5b, 0x8a, 0x27, 0xf2, 0x4a, 0x4d, 0xf8, 0xe8, 0x4a, 0xc5, + 0x4b, 0xb2, 0xb5, 0xaa, 0x0d, 0x55, 0x8c, 0x39, 0x58, 0x97, 0xeb, 0x63, 0x64, 0x2f, 0x63, 0x35, + 0x9c, 0xbf, 0x1d, 0x80, 0x7b, 0x37, 0xa2, 0xf7, 0xb9, 0x95, 0x9b, 0x1c, 0x94, 0x5c, 0xae, 0xa2, + 0x05, 0x9f, 0xfe, 0x1e, 0x69, 0xfc, 0x9f, 0x48, 0x5c, 0x81, 0xcd, 0x69, 0x94, 0xa4, 0x4c, 0x4c, + 0x78, 0x76, 0xad, 0x6d, 0x7a, 0xf7, 0x1b, 0x7f, 0x04, 0x88, 0xf5, 0xac, 0x8e, 0x22, 0x71, 0x29, + 0xcb, 0xf9, 0x6a, 0xfe, 0xd0, 0x6a, 0xbc, 0xfd, 0xa7, 0xa9, 0xa7, 0x66, 0x06, 0xf4, 0xc4, 0xa5, + 0xc4, 0x47, 0xb0, 0x37, 0x66, 0x09, 0x1f, 0xfd, 0xd2, 0xfb, 0x8d, 0xac, 0xf7, 0xbb, 0x5a, 0xa0, + 0xeb, 0xfe, 0xb7, 0xbe, 0x18, 0xf0, 0x72, 0x22, 0x67, 0xab, 0xec, 0x3f, 0x47, 0xf6, 0x8c, 0xf7, + 0x1f, 0x96, 0x9b, 0x42, 0x19, 0x33, 0x11, 0xda, 0x52, 0x85, 0xb5, 0x90, 0x8b, 0xec, 0x0f, 0x5e, + 0xbb, 0x95, 0xd8, 0x3c, 0x4a, 0x1e, 0xf3, 0xad, 0x79, 0xfd, 0x40, 0xfa, 0x96, 0xcb, 0xb7, 0x9d, + 0xe6, 0xb8, 0x98, 0xd1, 0x5e, 0xfd, 0x0c, 0x00, 0x00, 0xff, 0xff, 0xc3, 0xd1, 0xf4, 0x9a, 0xb8, + 0x04, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/devtools/containeranalysis/v1alpha1/package_vulnerability.pb.go b/vendor/google.golang.org/genproto/googleapis/devtools/containeranalysis/v1alpha1/package_vulnerability.pb.go new file mode 100644 index 0000000..f419c5e --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/devtools/containeranalysis/v1alpha1/package_vulnerability.pb.go @@ -0,0 +1,612 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/devtools/containeranalysis/v1alpha1/package_vulnerability.proto + +package containeranalysis // import "google.golang.org/genproto/googleapis/devtools/containeranalysis/v1alpha1" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Note provider-assigned severity/impact ranking +type VulnerabilityType_Severity int32 + +const ( + // Unknown Impact + VulnerabilityType_SEVERITY_UNSPECIFIED VulnerabilityType_Severity = 0 + // Minimal Impact + VulnerabilityType_MINIMAL VulnerabilityType_Severity = 1 + // Low Impact + VulnerabilityType_LOW VulnerabilityType_Severity = 2 + // Medium Impact + VulnerabilityType_MEDIUM VulnerabilityType_Severity = 3 + // High Impact + VulnerabilityType_HIGH VulnerabilityType_Severity = 4 + // Critical Impact + VulnerabilityType_CRITICAL VulnerabilityType_Severity = 5 +) + +var VulnerabilityType_Severity_name = map[int32]string{ + 0: "SEVERITY_UNSPECIFIED", + 1: "MINIMAL", + 2: "LOW", + 3: "MEDIUM", + 4: "HIGH", + 5: "CRITICAL", +} +var VulnerabilityType_Severity_value = map[string]int32{ + "SEVERITY_UNSPECIFIED": 0, + "MINIMAL": 1, + "LOW": 2, + "MEDIUM": 3, + "HIGH": 4, + "CRITICAL": 5, +} + +func (x VulnerabilityType_Severity) String() string { + return proto.EnumName(VulnerabilityType_Severity_name, int32(x)) +} +func (VulnerabilityType_Severity) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_package_vulnerability_c46b00c226fdd05b, []int{0, 0} +} + +// Whether this is an ordinary package version or a +// sentinel MIN/MAX version. +type VulnerabilityType_Version_VersionKind int32 + +const ( + // A standard package version, defined by the other fields. + VulnerabilityType_Version_NORMAL VulnerabilityType_Version_VersionKind = 0 + // A special version representing negative infinity, + // other fields are ignored. + VulnerabilityType_Version_MINIMUM VulnerabilityType_Version_VersionKind = 1 + // A special version representing positive infinity, + // other fields are ignored. + VulnerabilityType_Version_MAXIMUM VulnerabilityType_Version_VersionKind = 2 +) + +var VulnerabilityType_Version_VersionKind_name = map[int32]string{ + 0: "NORMAL", + 1: "MINIMUM", + 2: "MAXIMUM", +} +var VulnerabilityType_Version_VersionKind_value = map[string]int32{ + "NORMAL": 0, + "MINIMUM": 1, + "MAXIMUM": 2, +} + +func (x VulnerabilityType_Version_VersionKind) String() string { + return proto.EnumName(VulnerabilityType_Version_VersionKind_name, int32(x)) +} +func (VulnerabilityType_Version_VersionKind) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_package_vulnerability_c46b00c226fdd05b, []int{0, 0, 0} +} + +// VulnerabilityType provides metadata about a security vulnerability. +type VulnerabilityType struct { + // The CVSS score for this Vulnerability. + CvssScore float32 `protobuf:"fixed32,2,opt,name=cvss_score,json=cvssScore,proto3" json:"cvss_score,omitempty"` + // Note provider assigned impact of the vulnerability + Severity VulnerabilityType_Severity `protobuf:"varint,3,opt,name=severity,proto3,enum=google.devtools.containeranalysis.v1alpha1.VulnerabilityType_Severity" json:"severity,omitempty"` + // All information about the package to specifically identify this + // vulnerability. One entry per (version range and cpe_uri) the + // package vulnerability has manifested in. + Details []*VulnerabilityType_Detail `protobuf:"bytes,4,rep,name=details,proto3" json:"details,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *VulnerabilityType) Reset() { *m = VulnerabilityType{} } +func (m *VulnerabilityType) String() string { return proto.CompactTextString(m) } +func (*VulnerabilityType) ProtoMessage() {} +func (*VulnerabilityType) Descriptor() ([]byte, []int) { + return fileDescriptor_package_vulnerability_c46b00c226fdd05b, []int{0} +} +func (m *VulnerabilityType) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_VulnerabilityType.Unmarshal(m, b) +} +func (m *VulnerabilityType) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_VulnerabilityType.Marshal(b, m, deterministic) +} +func (dst *VulnerabilityType) XXX_Merge(src proto.Message) { + xxx_messageInfo_VulnerabilityType.Merge(dst, src) +} +func (m *VulnerabilityType) XXX_Size() int { + return xxx_messageInfo_VulnerabilityType.Size(m) +} +func (m *VulnerabilityType) XXX_DiscardUnknown() { + xxx_messageInfo_VulnerabilityType.DiscardUnknown(m) +} + +var xxx_messageInfo_VulnerabilityType proto.InternalMessageInfo + +func (m *VulnerabilityType) GetCvssScore() float32 { + if m != nil { + return m.CvssScore + } + return 0 +} + +func (m *VulnerabilityType) GetSeverity() VulnerabilityType_Severity { + if m != nil { + return m.Severity + } + return VulnerabilityType_SEVERITY_UNSPECIFIED +} + +func (m *VulnerabilityType) GetDetails() []*VulnerabilityType_Detail { + if m != nil { + return m.Details + } + return nil +} + +// Version contains structured information about the version of the package. +// For a discussion of this in Debian/Ubuntu: +// http://serverfault.com/questions/604541/debian-packages-version-convention +// For a discussion of this in Redhat/Fedora/Centos: +// http://blog.jasonantman.com/2014/07/how-yum-and-rpm-compare-versions/ +type VulnerabilityType_Version struct { + // Used to correct mistakes in the version numbering scheme. + Epoch int32 `protobuf:"varint,1,opt,name=epoch,proto3" json:"epoch,omitempty"` + // The main part of the version name. + Name string `protobuf:"bytes,2,opt,name=name,proto3" json:"name,omitempty"` + // The iteration of the package build from the above version. + Revision string `protobuf:"bytes,3,opt,name=revision,proto3" json:"revision,omitempty"` + // Distinguish between sentinel MIN/MAX versions and normal versions. + // If kind is not NORMAL, then the other fields are ignored. + Kind VulnerabilityType_Version_VersionKind `protobuf:"varint,5,opt,name=kind,proto3,enum=google.devtools.containeranalysis.v1alpha1.VulnerabilityType_Version_VersionKind" json:"kind,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *VulnerabilityType_Version) Reset() { *m = VulnerabilityType_Version{} } +func (m *VulnerabilityType_Version) String() string { return proto.CompactTextString(m) } +func (*VulnerabilityType_Version) ProtoMessage() {} +func (*VulnerabilityType_Version) Descriptor() ([]byte, []int) { + return fileDescriptor_package_vulnerability_c46b00c226fdd05b, []int{0, 0} +} +func (m *VulnerabilityType_Version) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_VulnerabilityType_Version.Unmarshal(m, b) +} +func (m *VulnerabilityType_Version) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_VulnerabilityType_Version.Marshal(b, m, deterministic) +} +func (dst *VulnerabilityType_Version) XXX_Merge(src proto.Message) { + xxx_messageInfo_VulnerabilityType_Version.Merge(dst, src) +} +func (m *VulnerabilityType_Version) XXX_Size() int { + return xxx_messageInfo_VulnerabilityType_Version.Size(m) +} +func (m *VulnerabilityType_Version) XXX_DiscardUnknown() { + xxx_messageInfo_VulnerabilityType_Version.DiscardUnknown(m) +} + +var xxx_messageInfo_VulnerabilityType_Version proto.InternalMessageInfo + +func (m *VulnerabilityType_Version) GetEpoch() int32 { + if m != nil { + return m.Epoch + } + return 0 +} + +func (m *VulnerabilityType_Version) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *VulnerabilityType_Version) GetRevision() string { + if m != nil { + return m.Revision + } + return "" +} + +func (m *VulnerabilityType_Version) GetKind() VulnerabilityType_Version_VersionKind { + if m != nil { + return m.Kind + } + return VulnerabilityType_Version_NORMAL +} + +// Identifies all occurrences of this vulnerability in the package for a +// specific distro/location +// For example: glibc in cpe:/o:debian:debian_linux:8 for versions 2.1 - 2.2 +type VulnerabilityType_Detail struct { + // The cpe_uri in [cpe format] (https://cpe.mitre.org/specification/) in + // which the vulnerability manifests. Examples include distro or storage + // location for vulnerable jar. + // This field can be used as a filter in list requests. + CpeUri string `protobuf:"bytes,1,opt,name=cpe_uri,json=cpeUri,proto3" json:"cpe_uri,omitempty"` + // The name of the package where the vulnerability was found. + // This field can be used as a filter in list requests. + Package string `protobuf:"bytes,8,opt,name=package,proto3" json:"package,omitempty"` + // The min version of the package in which the vulnerability exists. + MinAffectedVersion *VulnerabilityType_Version `protobuf:"bytes,6,opt,name=min_affected_version,json=minAffectedVersion,proto3" json:"min_affected_version,omitempty"` + // The max version of the package in which the vulnerability exists. + // This field can be used as a filter in list requests. + MaxAffectedVersion *VulnerabilityType_Version `protobuf:"bytes,7,opt,name=max_affected_version,json=maxAffectedVersion,proto3" json:"max_affected_version,omitempty"` + // The severity (eg: distro assigned severity) for this vulnerability. + SeverityName string `protobuf:"bytes,4,opt,name=severity_name,json=severityName,proto3" json:"severity_name,omitempty"` + // A vendor-specific description of this note. + Description string `protobuf:"bytes,9,opt,name=description,proto3" json:"description,omitempty"` + // The fix for this specific package version. + FixedLocation *VulnerabilityType_VulnerabilityLocation `protobuf:"bytes,5,opt,name=fixed_location,json=fixedLocation,proto3" json:"fixed_location,omitempty"` + // The type of package; whether native or non native(ruby gems, + // node.js packages etc) + PackageType string `protobuf:"bytes,10,opt,name=package_type,json=packageType,proto3" json:"package_type,omitempty"` + // Whether this Detail is obsolete. Occurrences are expected not to point to + // obsolete details. + IsObsolete bool `protobuf:"varint,11,opt,name=is_obsolete,json=isObsolete,proto3" json:"is_obsolete,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *VulnerabilityType_Detail) Reset() { *m = VulnerabilityType_Detail{} } +func (m *VulnerabilityType_Detail) String() string { return proto.CompactTextString(m) } +func (*VulnerabilityType_Detail) ProtoMessage() {} +func (*VulnerabilityType_Detail) Descriptor() ([]byte, []int) { + return fileDescriptor_package_vulnerability_c46b00c226fdd05b, []int{0, 1} +} +func (m *VulnerabilityType_Detail) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_VulnerabilityType_Detail.Unmarshal(m, b) +} +func (m *VulnerabilityType_Detail) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_VulnerabilityType_Detail.Marshal(b, m, deterministic) +} +func (dst *VulnerabilityType_Detail) XXX_Merge(src proto.Message) { + xxx_messageInfo_VulnerabilityType_Detail.Merge(dst, src) +} +func (m *VulnerabilityType_Detail) XXX_Size() int { + return xxx_messageInfo_VulnerabilityType_Detail.Size(m) +} +func (m *VulnerabilityType_Detail) XXX_DiscardUnknown() { + xxx_messageInfo_VulnerabilityType_Detail.DiscardUnknown(m) +} + +var xxx_messageInfo_VulnerabilityType_Detail proto.InternalMessageInfo + +func (m *VulnerabilityType_Detail) GetCpeUri() string { + if m != nil { + return m.CpeUri + } + return "" +} + +func (m *VulnerabilityType_Detail) GetPackage() string { + if m != nil { + return m.Package + } + return "" +} + +func (m *VulnerabilityType_Detail) GetMinAffectedVersion() *VulnerabilityType_Version { + if m != nil { + return m.MinAffectedVersion + } + return nil +} + +func (m *VulnerabilityType_Detail) GetMaxAffectedVersion() *VulnerabilityType_Version { + if m != nil { + return m.MaxAffectedVersion + } + return nil +} + +func (m *VulnerabilityType_Detail) GetSeverityName() string { + if m != nil { + return m.SeverityName + } + return "" +} + +func (m *VulnerabilityType_Detail) GetDescription() string { + if m != nil { + return m.Description + } + return "" +} + +func (m *VulnerabilityType_Detail) GetFixedLocation() *VulnerabilityType_VulnerabilityLocation { + if m != nil { + return m.FixedLocation + } + return nil +} + +func (m *VulnerabilityType_Detail) GetPackageType() string { + if m != nil { + return m.PackageType + } + return "" +} + +func (m *VulnerabilityType_Detail) GetIsObsolete() bool { + if m != nil { + return m.IsObsolete + } + return false +} + +// Used by Occurrence to point to where the vulnerability exists and how +// to fix it. +type VulnerabilityType_VulnerabilityDetails struct { + // The type of package; whether native or non native(ruby gems, + // node.js packages etc) + Type string `protobuf:"bytes,3,opt,name=type,proto3" json:"type,omitempty"` + // Output only. The note provider assigned Severity of the vulnerability. + Severity VulnerabilityType_Severity `protobuf:"varint,4,opt,name=severity,proto3,enum=google.devtools.containeranalysis.v1alpha1.VulnerabilityType_Severity" json:"severity,omitempty"` + // Output only. The CVSS score of this vulnerability. CVSS score is on a + // scale of 0-10 where 0 indicates low severity and 10 indicates high + // severity. + CvssScore float32 `protobuf:"fixed32,5,opt,name=cvss_score,json=cvssScore,proto3" json:"cvss_score,omitempty"` + // The set of affected locations and their fixes (if available) within + // the associated resource. + PackageIssue []*VulnerabilityType_PackageIssue `protobuf:"bytes,6,rep,name=package_issue,json=packageIssue,proto3" json:"package_issue,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *VulnerabilityType_VulnerabilityDetails) Reset() { + *m = VulnerabilityType_VulnerabilityDetails{} +} +func (m *VulnerabilityType_VulnerabilityDetails) String() string { return proto.CompactTextString(m) } +func (*VulnerabilityType_VulnerabilityDetails) ProtoMessage() {} +func (*VulnerabilityType_VulnerabilityDetails) Descriptor() ([]byte, []int) { + return fileDescriptor_package_vulnerability_c46b00c226fdd05b, []int{0, 2} +} +func (m *VulnerabilityType_VulnerabilityDetails) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_VulnerabilityType_VulnerabilityDetails.Unmarshal(m, b) +} +func (m *VulnerabilityType_VulnerabilityDetails) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_VulnerabilityType_VulnerabilityDetails.Marshal(b, m, deterministic) +} +func (dst *VulnerabilityType_VulnerabilityDetails) XXX_Merge(src proto.Message) { + xxx_messageInfo_VulnerabilityType_VulnerabilityDetails.Merge(dst, src) +} +func (m *VulnerabilityType_VulnerabilityDetails) XXX_Size() int { + return xxx_messageInfo_VulnerabilityType_VulnerabilityDetails.Size(m) +} +func (m *VulnerabilityType_VulnerabilityDetails) XXX_DiscardUnknown() { + xxx_messageInfo_VulnerabilityType_VulnerabilityDetails.DiscardUnknown(m) +} + +var xxx_messageInfo_VulnerabilityType_VulnerabilityDetails proto.InternalMessageInfo + +func (m *VulnerabilityType_VulnerabilityDetails) GetType() string { + if m != nil { + return m.Type + } + return "" +} + +func (m *VulnerabilityType_VulnerabilityDetails) GetSeverity() VulnerabilityType_Severity { + if m != nil { + return m.Severity + } + return VulnerabilityType_SEVERITY_UNSPECIFIED +} + +func (m *VulnerabilityType_VulnerabilityDetails) GetCvssScore() float32 { + if m != nil { + return m.CvssScore + } + return 0 +} + +func (m *VulnerabilityType_VulnerabilityDetails) GetPackageIssue() []*VulnerabilityType_PackageIssue { + if m != nil { + return m.PackageIssue + } + return nil +} + +// This message wraps a location affected by a vulnerability and its +// associated fix (if one is available). +type VulnerabilityType_PackageIssue struct { + // The location of the vulnerability. + AffectedLocation *VulnerabilityType_VulnerabilityLocation `protobuf:"bytes,1,opt,name=affected_location,json=affectedLocation,proto3" json:"affected_location,omitempty"` + // The location of the available fix for vulnerability. + FixedLocation *VulnerabilityType_VulnerabilityLocation `protobuf:"bytes,2,opt,name=fixed_location,json=fixedLocation,proto3" json:"fixed_location,omitempty"` + // The severity (eg: distro assigned severity) for this vulnerability. + SeverityName string `protobuf:"bytes,3,opt,name=severity_name,json=severityName,proto3" json:"severity_name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *VulnerabilityType_PackageIssue) Reset() { *m = VulnerabilityType_PackageIssue{} } +func (m *VulnerabilityType_PackageIssue) String() string { return proto.CompactTextString(m) } +func (*VulnerabilityType_PackageIssue) ProtoMessage() {} +func (*VulnerabilityType_PackageIssue) Descriptor() ([]byte, []int) { + return fileDescriptor_package_vulnerability_c46b00c226fdd05b, []int{0, 3} +} +func (m *VulnerabilityType_PackageIssue) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_VulnerabilityType_PackageIssue.Unmarshal(m, b) +} +func (m *VulnerabilityType_PackageIssue) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_VulnerabilityType_PackageIssue.Marshal(b, m, deterministic) +} +func (dst *VulnerabilityType_PackageIssue) XXX_Merge(src proto.Message) { + xxx_messageInfo_VulnerabilityType_PackageIssue.Merge(dst, src) +} +func (m *VulnerabilityType_PackageIssue) XXX_Size() int { + return xxx_messageInfo_VulnerabilityType_PackageIssue.Size(m) +} +func (m *VulnerabilityType_PackageIssue) XXX_DiscardUnknown() { + xxx_messageInfo_VulnerabilityType_PackageIssue.DiscardUnknown(m) +} + +var xxx_messageInfo_VulnerabilityType_PackageIssue proto.InternalMessageInfo + +func (m *VulnerabilityType_PackageIssue) GetAffectedLocation() *VulnerabilityType_VulnerabilityLocation { + if m != nil { + return m.AffectedLocation + } + return nil +} + +func (m *VulnerabilityType_PackageIssue) GetFixedLocation() *VulnerabilityType_VulnerabilityLocation { + if m != nil { + return m.FixedLocation + } + return nil +} + +func (m *VulnerabilityType_PackageIssue) GetSeverityName() string { + if m != nil { + return m.SeverityName + } + return "" +} + +// The location of the vulnerability +type VulnerabilityType_VulnerabilityLocation struct { + // The cpe_uri in [cpe format] (https://cpe.mitre.org/specification/) + // format. Examples include distro or storage location for vulnerable jar. + // This field can be used as a filter in list requests. + CpeUri string `protobuf:"bytes,1,opt,name=cpe_uri,json=cpeUri,proto3" json:"cpe_uri,omitempty"` + // The package being described. + Package string `protobuf:"bytes,2,opt,name=package,proto3" json:"package,omitempty"` + // The version of the package being described. + // This field can be used as a filter in list requests. + Version *VulnerabilityType_Version `protobuf:"bytes,4,opt,name=version,proto3" json:"version,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *VulnerabilityType_VulnerabilityLocation) Reset() { + *m = VulnerabilityType_VulnerabilityLocation{} +} +func (m *VulnerabilityType_VulnerabilityLocation) String() string { return proto.CompactTextString(m) } +func (*VulnerabilityType_VulnerabilityLocation) ProtoMessage() {} +func (*VulnerabilityType_VulnerabilityLocation) Descriptor() ([]byte, []int) { + return fileDescriptor_package_vulnerability_c46b00c226fdd05b, []int{0, 4} +} +func (m *VulnerabilityType_VulnerabilityLocation) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_VulnerabilityType_VulnerabilityLocation.Unmarshal(m, b) +} +func (m *VulnerabilityType_VulnerabilityLocation) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_VulnerabilityType_VulnerabilityLocation.Marshal(b, m, deterministic) +} +func (dst *VulnerabilityType_VulnerabilityLocation) XXX_Merge(src proto.Message) { + xxx_messageInfo_VulnerabilityType_VulnerabilityLocation.Merge(dst, src) +} +func (m *VulnerabilityType_VulnerabilityLocation) XXX_Size() int { + return xxx_messageInfo_VulnerabilityType_VulnerabilityLocation.Size(m) +} +func (m *VulnerabilityType_VulnerabilityLocation) XXX_DiscardUnknown() { + xxx_messageInfo_VulnerabilityType_VulnerabilityLocation.DiscardUnknown(m) +} + +var xxx_messageInfo_VulnerabilityType_VulnerabilityLocation proto.InternalMessageInfo + +func (m *VulnerabilityType_VulnerabilityLocation) GetCpeUri() string { + if m != nil { + return m.CpeUri + } + return "" +} + +func (m *VulnerabilityType_VulnerabilityLocation) GetPackage() string { + if m != nil { + return m.Package + } + return "" +} + +func (m *VulnerabilityType_VulnerabilityLocation) GetVersion() *VulnerabilityType_Version { + if m != nil { + return m.Version + } + return nil +} + +func init() { + proto.RegisterType((*VulnerabilityType)(nil), "google.devtools.containeranalysis.v1alpha1.VulnerabilityType") + proto.RegisterType((*VulnerabilityType_Version)(nil), "google.devtools.containeranalysis.v1alpha1.VulnerabilityType.Version") + proto.RegisterType((*VulnerabilityType_Detail)(nil), "google.devtools.containeranalysis.v1alpha1.VulnerabilityType.Detail") + proto.RegisterType((*VulnerabilityType_VulnerabilityDetails)(nil), "google.devtools.containeranalysis.v1alpha1.VulnerabilityType.VulnerabilityDetails") + proto.RegisterType((*VulnerabilityType_PackageIssue)(nil), "google.devtools.containeranalysis.v1alpha1.VulnerabilityType.PackageIssue") + proto.RegisterType((*VulnerabilityType_VulnerabilityLocation)(nil), "google.devtools.containeranalysis.v1alpha1.VulnerabilityType.VulnerabilityLocation") + proto.RegisterEnum("google.devtools.containeranalysis.v1alpha1.VulnerabilityType_Severity", VulnerabilityType_Severity_name, VulnerabilityType_Severity_value) + proto.RegisterEnum("google.devtools.containeranalysis.v1alpha1.VulnerabilityType_Version_VersionKind", VulnerabilityType_Version_VersionKind_name, VulnerabilityType_Version_VersionKind_value) +} + +func init() { + proto.RegisterFile("google/devtools/containeranalysis/v1alpha1/package_vulnerability.proto", fileDescriptor_package_vulnerability_c46b00c226fdd05b) +} + +var fileDescriptor_package_vulnerability_c46b00c226fdd05b = []byte{ + // 769 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xbc, 0x56, 0xcf, 0x6e, 0xfa, 0x46, + 0x10, 0xae, 0x8d, 0xc1, 0x30, 0x26, 0x91, 0xb3, 0xa2, 0xaa, 0x85, 0x5a, 0x95, 0xa6, 0xaa, 0x84, + 0x7a, 0x30, 0x0a, 0x39, 0xf6, 0x44, 0x81, 0x24, 0x6e, 0x81, 0xa4, 0x26, 0xa4, 0xff, 0xa4, 0x58, + 0x8b, 0xd9, 0x90, 0x55, 0x8c, 0xd7, 0xf2, 0x3a, 0x34, 0xf4, 0xd4, 0x07, 0xe8, 0x33, 0xf4, 0xd0, + 0x6b, 0x6f, 0x55, 0xdf, 0xaa, 0x0f, 0xd1, 0xca, 0x6b, 0x2f, 0x82, 0xd0, 0x4a, 0x91, 0x48, 0x7e, + 0x27, 0x3c, 0x33, 0xe6, 0xfb, 0x66, 0x76, 0xbf, 0xf9, 0x00, 0xce, 0xe6, 0x8c, 0xcd, 0x03, 0xd2, + 0x9a, 0x91, 0x65, 0xc2, 0x58, 0xc0, 0x5b, 0x3e, 0x0b, 0x13, 0x4c, 0x43, 0x12, 0xe3, 0x10, 0x07, + 0x2b, 0x4e, 0x79, 0x6b, 0x79, 0x82, 0x83, 0xe8, 0x1e, 0x9f, 0xb4, 0x22, 0xec, 0x3f, 0xe0, 0x39, + 0xf1, 0x96, 0x8f, 0x41, 0x5a, 0x9f, 0xd2, 0x80, 0x26, 0x2b, 0x3b, 0x8a, 0x59, 0xc2, 0xd0, 0xe7, + 0x19, 0x8e, 0x2d, 0x71, 0xec, 0x1d, 0x1c, 0x5b, 0xe2, 0xd4, 0x3f, 0xcc, 0x39, 0x71, 0x44, 0x5b, + 0x38, 0x0c, 0x59, 0x82, 0x13, 0xca, 0x42, 0x9e, 0x21, 0x1d, 0xff, 0x73, 0x08, 0x47, 0x37, 0x9b, + 0x0c, 0xd7, 0xab, 0x88, 0xa0, 0x8f, 0x00, 0xfc, 0x25, 0xe7, 0x1e, 0xf7, 0x59, 0x4c, 0x2c, 0xb5, + 0xa1, 0x34, 0x55, 0xb7, 0x92, 0x66, 0xc6, 0x69, 0x02, 0x4d, 0xa1, 0xcc, 0xc9, 0x92, 0xc4, 0x34, + 0x59, 0x59, 0x85, 0x86, 0xd2, 0x3c, 0x6c, 0x9f, 0xd9, 0x2f, 0xef, 0xc8, 0xde, 0xe1, 0xb3, 0xc7, + 0x39, 0x9a, 0xbb, 0xc6, 0x45, 0xb7, 0xa0, 0xcf, 0x48, 0x82, 0x69, 0xc0, 0x2d, 0xad, 0x51, 0x68, + 0x1a, 0xed, 0xde, 0x7e, 0x14, 0x3d, 0x01, 0xe6, 0x4a, 0xd0, 0xfa, 0xdf, 0x0a, 0xe8, 0x37, 0x24, + 0xe6, 0x94, 0x85, 0xa8, 0x06, 0x45, 0x12, 0x31, 0xff, 0xde, 0x52, 0x1a, 0x4a, 0xb3, 0xe8, 0x66, + 0x01, 0x42, 0xa0, 0x85, 0x78, 0x91, 0x8d, 0x5f, 0x71, 0xc5, 0x33, 0xaa, 0x43, 0x39, 0x26, 0x4b, + 0x9a, 0x7e, 0x4b, 0x4c, 0x5e, 0x71, 0xd7, 0x31, 0x22, 0xa0, 0x3d, 0xd0, 0x70, 0x66, 0x15, 0xc5, + 0x89, 0x7c, 0xb3, 0x5f, 0xbb, 0x79, 0x6b, 0xf2, 0xf3, 0x6b, 0x1a, 0xce, 0x5c, 0x01, 0x7f, 0x7c, + 0x0a, 0xc6, 0x46, 0x12, 0x01, 0x94, 0x46, 0x97, 0xee, 0xb0, 0x33, 0x30, 0xdf, 0x43, 0x06, 0xe8, + 0x43, 0x67, 0xe4, 0x0c, 0x27, 0x43, 0x53, 0x11, 0x41, 0xe7, 0x3b, 0x11, 0xa8, 0xf5, 0xbf, 0x34, + 0x28, 0x65, 0x27, 0x80, 0x3e, 0x00, 0xdd, 0x8f, 0x88, 0xf7, 0x18, 0x53, 0x31, 0x6e, 0xc5, 0x2d, + 0xf9, 0x11, 0x99, 0xc4, 0x14, 0x59, 0xa0, 0xe7, 0x9a, 0xb3, 0xca, 0xa2, 0x20, 0x43, 0xf4, 0x13, + 0xd4, 0x16, 0x34, 0xf4, 0xf0, 0xdd, 0x1d, 0xf1, 0x13, 0x32, 0xf3, 0x96, 0x19, 0xbf, 0x55, 0x6a, + 0x28, 0x4d, 0xa3, 0xdd, 0x7f, 0x95, 0x49, 0x5d, 0xb4, 0xa0, 0x61, 0x27, 0x67, 0x90, 0x17, 0x93, + 0x12, 0xe3, 0xa7, 0x5d, 0x62, 0xfd, 0x75, 0x89, 0xf1, 0xd3, 0x73, 0xe2, 0x4f, 0xe1, 0x40, 0x2a, + 0xd1, 0x13, 0x22, 0xd0, 0xc4, 0x89, 0x54, 0x65, 0x72, 0x94, 0x8a, 0xa1, 0x01, 0xc6, 0x8c, 0x70, + 0x3f, 0xa6, 0x51, 0xba, 0x51, 0x56, 0x45, 0xbc, 0xb2, 0x99, 0x42, 0x3f, 0xc3, 0xe1, 0x1d, 0x7d, + 0x22, 0x33, 0x2f, 0x60, 0xbe, 0x58, 0x3b, 0x21, 0x0e, 0xa3, 0x3d, 0xde, 0xb3, 0xf3, 0xcd, 0xcc, + 0x20, 0x87, 0x76, 0x0f, 0x04, 0x95, 0x0c, 0xd1, 0x27, 0x50, 0x95, 0x16, 0x92, 0xac, 0x22, 0x62, + 0x41, 0xd6, 0x5e, 0x9e, 0x13, 0x6b, 0xfe, 0x31, 0x18, 0x94, 0x7b, 0x6c, 0xca, 0x59, 0x40, 0x12, + 0x62, 0x19, 0x0d, 0xa5, 0x59, 0x76, 0x81, 0xf2, 0xcb, 0x3c, 0x53, 0xff, 0x4d, 0x85, 0xda, 0x16, + 0x59, 0xa6, 0x21, 0x9e, 0xee, 0x86, 0x00, 0xcd, 0x76, 0x40, 0x3c, 0x6f, 0xb9, 0x82, 0xf6, 0x46, + 0xae, 0xb0, 0x6d, 0x4c, 0xc5, 0xe7, 0xc6, 0xc4, 0xe0, 0x40, 0xce, 0x4c, 0x39, 0x7f, 0x24, 0x56, + 0x49, 0x58, 0xc7, 0x57, 0xfb, 0xf5, 0x71, 0x95, 0x41, 0x3a, 0x29, 0xa2, 0x2b, 0x0f, 0x55, 0x44, + 0xf5, 0x3f, 0x55, 0xa8, 0x6e, 0x96, 0xd1, 0x2f, 0x0a, 0x1c, 0xad, 0xe5, 0xba, 0xbe, 0x75, 0xe5, + 0xed, 0x6e, 0xdd, 0x94, 0x6c, 0xeb, 0x8b, 0xdf, 0x15, 0x9d, 0xfa, 0xce, 0x44, 0xb7, 0xb3, 0x37, + 0x85, 0xdd, 0xbd, 0xa9, 0xff, 0xa1, 0xc0, 0xfb, 0xff, 0x89, 0xf6, 0x22, 0x6f, 0x52, 0xb7, 0xbd, + 0xc9, 0x03, 0x5d, 0xba, 0x82, 0xf6, 0x9a, 0xae, 0x20, 0x51, 0x8f, 0x6f, 0xa1, 0x2c, 0x85, 0x88, + 0x2c, 0xa8, 0x8d, 0xfb, 0x37, 0x7d, 0xd7, 0xb9, 0xfe, 0xde, 0x9b, 0x8c, 0xc6, 0x57, 0xfd, 0xae, + 0x73, 0xe6, 0xf4, 0x7b, 0x1b, 0xd6, 0xdb, 0x19, 0x98, 0x0a, 0xd2, 0xa1, 0x30, 0xb8, 0xfc, 0xd6, + 0x54, 0x53, 0x73, 0x1e, 0xf6, 0x7b, 0xce, 0x64, 0x68, 0x16, 0x50, 0x19, 0xb4, 0x0b, 0xe7, 0xfc, + 0xc2, 0xd4, 0x50, 0x15, 0xca, 0x5d, 0xd7, 0xb9, 0x76, 0xba, 0x9d, 0x81, 0x59, 0xfc, 0xf2, 0x57, + 0x05, 0x3e, 0xf3, 0xd9, 0x42, 0x76, 0xfd, 0xff, 0xcd, 0x5e, 0x29, 0x3f, 0xfc, 0x98, 0xbf, 0x34, + 0x67, 0x01, 0x0e, 0xe7, 0x36, 0x8b, 0xe7, 0xad, 0x39, 0x09, 0xc5, 0x2f, 0x79, 0x2b, 0x2b, 0xe1, + 0x88, 0xf2, 0x97, 0xfc, 0xbd, 0xf8, 0x62, 0xa7, 0xf4, 0xbb, 0x5a, 0x38, 0xef, 0x76, 0xa6, 0x25, + 0x81, 0x76, 0xfa, 0x6f, 0x00, 0x00, 0x00, 0xff, 0xff, 0x66, 0xf8, 0xb7, 0x2f, 0xab, 0x08, 0x00, + 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/devtools/containeranalysis/v1alpha1/provenance.pb.go b/vendor/google.golang.org/genproto/googleapis/devtools/containeranalysis/v1alpha1/provenance.pb.go new file mode 100644 index 0000000..a0153bd --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/devtools/containeranalysis/v1alpha1/provenance.pb.go @@ -0,0 +1,989 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/devtools/containeranalysis/v1alpha1/provenance.proto + +package containeranalysis // import "google.golang.org/genproto/googleapis/devtools/containeranalysis/v1alpha1" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import timestamp "github.com/golang/protobuf/ptypes/timestamp" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Specifies the hash algorithm, if any. +type Hash_HashType int32 + +const ( + // No hash requested. + Hash_NONE Hash_HashType = 0 + // A sha256 hash. + Hash_SHA256 Hash_HashType = 1 +) + +var Hash_HashType_name = map[int32]string{ + 0: "NONE", + 1: "SHA256", +} +var Hash_HashType_value = map[string]int32{ + "NONE": 0, + "SHA256": 1, +} + +func (x Hash_HashType) String() string { + return proto.EnumName(Hash_HashType_name, int32(x)) +} +func (Hash_HashType) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_provenance_62a3806b713433e8, []int{3, 0} +} + +// Provenance of a build. Contains all information needed to verify the full +// details about the build from source to completion. +type BuildProvenance struct { + // Unique identifier of the build. + Id string `protobuf:"bytes,1,opt,name=id,proto3" json:"id,omitempty"` + // ID of the project. + ProjectId string `protobuf:"bytes,2,opt,name=project_id,json=projectId,proto3" json:"project_id,omitempty"` + // Commands requested by the build. + Commands []*Command `protobuf:"bytes,5,rep,name=commands,proto3" json:"commands,omitempty"` + // Output of the build. + BuiltArtifacts []*Artifact `protobuf:"bytes,6,rep,name=built_artifacts,json=builtArtifacts,proto3" json:"built_artifacts,omitempty"` + // Time at which the build was created. + CreateTime *timestamp.Timestamp `protobuf:"bytes,7,opt,name=create_time,json=createTime,proto3" json:"create_time,omitempty"` + // Time at which execution of the build was started. + StartTime *timestamp.Timestamp `protobuf:"bytes,8,opt,name=start_time,json=startTime,proto3" json:"start_time,omitempty"` + // Time at which execution of the build was finished. + FinishTime *timestamp.Timestamp `protobuf:"bytes,9,opt,name=finish_time,json=finishTime,proto3" json:"finish_time,omitempty"` + // E-mail address of the user who initiated this build. Note that this was the + // user's e-mail address at the time the build was initiated; this address may + // not represent the same end-user for all time. + Creator string `protobuf:"bytes,11,opt,name=creator,proto3" json:"creator,omitempty"` + // Google Cloud Storage bucket where logs were written. + LogsBucket string `protobuf:"bytes,13,opt,name=logs_bucket,json=logsBucket,proto3" json:"logs_bucket,omitempty"` + // Details of the Source input to the build. + SourceProvenance *Source `protobuf:"bytes,14,opt,name=source_provenance,json=sourceProvenance,proto3" json:"source_provenance,omitempty"` + // Trigger identifier if the build was triggered automatically; empty if not. + TriggerId string `protobuf:"bytes,15,opt,name=trigger_id,json=triggerId,proto3" json:"trigger_id,omitempty"` + // Special options applied to this build. This is a catch-all field where + // build providers can enter any desired additional details. + BuildOptions map[string]string `protobuf:"bytes,16,rep,name=build_options,json=buildOptions,proto3" json:"build_options,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` + // Version string of the builder at the time this build was executed. + BuilderVersion string `protobuf:"bytes,17,opt,name=builder_version,json=builderVersion,proto3" json:"builder_version,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *BuildProvenance) Reset() { *m = BuildProvenance{} } +func (m *BuildProvenance) String() string { return proto.CompactTextString(m) } +func (*BuildProvenance) ProtoMessage() {} +func (*BuildProvenance) Descriptor() ([]byte, []int) { + return fileDescriptor_provenance_62a3806b713433e8, []int{0} +} +func (m *BuildProvenance) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_BuildProvenance.Unmarshal(m, b) +} +func (m *BuildProvenance) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_BuildProvenance.Marshal(b, m, deterministic) +} +func (dst *BuildProvenance) XXX_Merge(src proto.Message) { + xxx_messageInfo_BuildProvenance.Merge(dst, src) +} +func (m *BuildProvenance) XXX_Size() int { + return xxx_messageInfo_BuildProvenance.Size(m) +} +func (m *BuildProvenance) XXX_DiscardUnknown() { + xxx_messageInfo_BuildProvenance.DiscardUnknown(m) +} + +var xxx_messageInfo_BuildProvenance proto.InternalMessageInfo + +func (m *BuildProvenance) GetId() string { + if m != nil { + return m.Id + } + return "" +} + +func (m *BuildProvenance) GetProjectId() string { + if m != nil { + return m.ProjectId + } + return "" +} + +func (m *BuildProvenance) GetCommands() []*Command { + if m != nil { + return m.Commands + } + return nil +} + +func (m *BuildProvenance) GetBuiltArtifacts() []*Artifact { + if m != nil { + return m.BuiltArtifacts + } + return nil +} + +func (m *BuildProvenance) GetCreateTime() *timestamp.Timestamp { + if m != nil { + return m.CreateTime + } + return nil +} + +func (m *BuildProvenance) GetStartTime() *timestamp.Timestamp { + if m != nil { + return m.StartTime + } + return nil +} + +func (m *BuildProvenance) GetFinishTime() *timestamp.Timestamp { + if m != nil { + return m.FinishTime + } + return nil +} + +func (m *BuildProvenance) GetCreator() string { + if m != nil { + return m.Creator + } + return "" +} + +func (m *BuildProvenance) GetLogsBucket() string { + if m != nil { + return m.LogsBucket + } + return "" +} + +func (m *BuildProvenance) GetSourceProvenance() *Source { + if m != nil { + return m.SourceProvenance + } + return nil +} + +func (m *BuildProvenance) GetTriggerId() string { + if m != nil { + return m.TriggerId + } + return "" +} + +func (m *BuildProvenance) GetBuildOptions() map[string]string { + if m != nil { + return m.BuildOptions + } + return nil +} + +func (m *BuildProvenance) GetBuilderVersion() string { + if m != nil { + return m.BuilderVersion + } + return "" +} + +// Source describes the location of the source used for the build. +type Source struct { + // Source location information. + // + // Types that are valid to be assigned to Source: + // *Source_StorageSource + // *Source_RepoSource + Source isSource_Source `protobuf_oneof:"source"` + // If provided, the input binary artifacts for the build came from this + // location. + ArtifactStorageSource *StorageSource `protobuf:"bytes,4,opt,name=artifact_storage_source,json=artifactStorageSource,proto3" json:"artifact_storage_source,omitempty"` + // Hash(es) of the build source, which can be used to verify that the original + // source integrity was maintained in the build. + // + // The keys to this map are file paths used as build source and the values + // contain the hash values for those files. + // + // If the build source came in a single package such as a gzipped tarfile + // (.tar.gz), the FileHash will be for the single path to that file. + FileHashes map[string]*FileHashes `protobuf:"bytes,3,rep,name=file_hashes,json=fileHashes,proto3" json:"file_hashes,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` + // If provided, the source code used for the build came from this location. + Context *SourceContext `protobuf:"bytes,7,opt,name=context,proto3" json:"context,omitempty"` + // If provided, some of the source code used for the build may be found in + // these locations, in the case where the source repository had multiple + // remotes or submodules. This list will not include the context specified in + // the context field. + AdditionalContexts []*SourceContext `protobuf:"bytes,8,rep,name=additional_contexts,json=additionalContexts,proto3" json:"additional_contexts,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Source) Reset() { *m = Source{} } +func (m *Source) String() string { return proto.CompactTextString(m) } +func (*Source) ProtoMessage() {} +func (*Source) Descriptor() ([]byte, []int) { + return fileDescriptor_provenance_62a3806b713433e8, []int{1} +} +func (m *Source) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Source.Unmarshal(m, b) +} +func (m *Source) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Source.Marshal(b, m, deterministic) +} +func (dst *Source) XXX_Merge(src proto.Message) { + xxx_messageInfo_Source.Merge(dst, src) +} +func (m *Source) XXX_Size() int { + return xxx_messageInfo_Source.Size(m) +} +func (m *Source) XXX_DiscardUnknown() { + xxx_messageInfo_Source.DiscardUnknown(m) +} + +var xxx_messageInfo_Source proto.InternalMessageInfo + +type isSource_Source interface { + isSource_Source() +} + +type Source_StorageSource struct { + StorageSource *StorageSource `protobuf:"bytes,1,opt,name=storage_source,json=storageSource,proto3,oneof"` +} + +type Source_RepoSource struct { + RepoSource *RepoSource `protobuf:"bytes,2,opt,name=repo_source,json=repoSource,proto3,oneof"` +} + +func (*Source_StorageSource) isSource_Source() {} + +func (*Source_RepoSource) isSource_Source() {} + +func (m *Source) GetSource() isSource_Source { + if m != nil { + return m.Source + } + return nil +} + +func (m *Source) GetStorageSource() *StorageSource { + if x, ok := m.GetSource().(*Source_StorageSource); ok { + return x.StorageSource + } + return nil +} + +func (m *Source) GetRepoSource() *RepoSource { + if x, ok := m.GetSource().(*Source_RepoSource); ok { + return x.RepoSource + } + return nil +} + +func (m *Source) GetArtifactStorageSource() *StorageSource { + if m != nil { + return m.ArtifactStorageSource + } + return nil +} + +func (m *Source) GetFileHashes() map[string]*FileHashes { + if m != nil { + return m.FileHashes + } + return nil +} + +func (m *Source) GetContext() *SourceContext { + if m != nil { + return m.Context + } + return nil +} + +func (m *Source) GetAdditionalContexts() []*SourceContext { + if m != nil { + return m.AdditionalContexts + } + return nil +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*Source) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _Source_OneofMarshaler, _Source_OneofUnmarshaler, _Source_OneofSizer, []interface{}{ + (*Source_StorageSource)(nil), + (*Source_RepoSource)(nil), + } +} + +func _Source_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*Source) + // source + switch x := m.Source.(type) { + case *Source_StorageSource: + b.EncodeVarint(1<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.StorageSource); err != nil { + return err + } + case *Source_RepoSource: + b.EncodeVarint(2<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.RepoSource); err != nil { + return err + } + case nil: + default: + return fmt.Errorf("Source.Source has unexpected type %T", x) + } + return nil +} + +func _Source_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*Source) + switch tag { + case 1: // source.storage_source + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(StorageSource) + err := b.DecodeMessage(msg) + m.Source = &Source_StorageSource{msg} + return true, err + case 2: // source.repo_source + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(RepoSource) + err := b.DecodeMessage(msg) + m.Source = &Source_RepoSource{msg} + return true, err + default: + return false, nil + } +} + +func _Source_OneofSizer(msg proto.Message) (n int) { + m := msg.(*Source) + // source + switch x := m.Source.(type) { + case *Source_StorageSource: + s := proto.Size(x.StorageSource) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *Source_RepoSource: + s := proto.Size(x.RepoSource) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +// Container message for hashes of byte content of files, used in Source +// messages to verify integrity of source input to the build. +type FileHashes struct { + // Collection of file hashes. + FileHash []*Hash `protobuf:"bytes,1,rep,name=file_hash,json=fileHash,proto3" json:"file_hash,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *FileHashes) Reset() { *m = FileHashes{} } +func (m *FileHashes) String() string { return proto.CompactTextString(m) } +func (*FileHashes) ProtoMessage() {} +func (*FileHashes) Descriptor() ([]byte, []int) { + return fileDescriptor_provenance_62a3806b713433e8, []int{2} +} +func (m *FileHashes) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_FileHashes.Unmarshal(m, b) +} +func (m *FileHashes) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_FileHashes.Marshal(b, m, deterministic) +} +func (dst *FileHashes) XXX_Merge(src proto.Message) { + xxx_messageInfo_FileHashes.Merge(dst, src) +} +func (m *FileHashes) XXX_Size() int { + return xxx_messageInfo_FileHashes.Size(m) +} +func (m *FileHashes) XXX_DiscardUnknown() { + xxx_messageInfo_FileHashes.DiscardUnknown(m) +} + +var xxx_messageInfo_FileHashes proto.InternalMessageInfo + +func (m *FileHashes) GetFileHash() []*Hash { + if m != nil { + return m.FileHash + } + return nil +} + +// Container message for hash values. +type Hash struct { + // The type of hash that was performed. + Type Hash_HashType `protobuf:"varint,1,opt,name=type,proto3,enum=google.devtools.containeranalysis.v1alpha1.Hash_HashType" json:"type,omitempty"` + // The hash value. + Value []byte `protobuf:"bytes,2,opt,name=value,proto3" json:"value,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Hash) Reset() { *m = Hash{} } +func (m *Hash) String() string { return proto.CompactTextString(m) } +func (*Hash) ProtoMessage() {} +func (*Hash) Descriptor() ([]byte, []int) { + return fileDescriptor_provenance_62a3806b713433e8, []int{3} +} +func (m *Hash) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Hash.Unmarshal(m, b) +} +func (m *Hash) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Hash.Marshal(b, m, deterministic) +} +func (dst *Hash) XXX_Merge(src proto.Message) { + xxx_messageInfo_Hash.Merge(dst, src) +} +func (m *Hash) XXX_Size() int { + return xxx_messageInfo_Hash.Size(m) +} +func (m *Hash) XXX_DiscardUnknown() { + xxx_messageInfo_Hash.DiscardUnknown(m) +} + +var xxx_messageInfo_Hash proto.InternalMessageInfo + +func (m *Hash) GetType() Hash_HashType { + if m != nil { + return m.Type + } + return Hash_NONE +} + +func (m *Hash) GetValue() []byte { + if m != nil { + return m.Value + } + return nil +} + +// StorageSource describes the location of the source in an archive file in +// Google Cloud Storage. +type StorageSource struct { + // Google Cloud Storage bucket containing source (see [Bucket Name + // Requirements] + // (https://cloud.google.com/storage/docs/bucket-naming#requirements)). + Bucket string `protobuf:"bytes,1,opt,name=bucket,proto3" json:"bucket,omitempty"` + // Google Cloud Storage object containing source. + Object string `protobuf:"bytes,2,opt,name=object,proto3" json:"object,omitempty"` + // Google Cloud Storage generation for the object. + Generation int64 `protobuf:"varint,3,opt,name=generation,proto3" json:"generation,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *StorageSource) Reset() { *m = StorageSource{} } +func (m *StorageSource) String() string { return proto.CompactTextString(m) } +func (*StorageSource) ProtoMessage() {} +func (*StorageSource) Descriptor() ([]byte, []int) { + return fileDescriptor_provenance_62a3806b713433e8, []int{4} +} +func (m *StorageSource) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_StorageSource.Unmarshal(m, b) +} +func (m *StorageSource) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_StorageSource.Marshal(b, m, deterministic) +} +func (dst *StorageSource) XXX_Merge(src proto.Message) { + xxx_messageInfo_StorageSource.Merge(dst, src) +} +func (m *StorageSource) XXX_Size() int { + return xxx_messageInfo_StorageSource.Size(m) +} +func (m *StorageSource) XXX_DiscardUnknown() { + xxx_messageInfo_StorageSource.DiscardUnknown(m) +} + +var xxx_messageInfo_StorageSource proto.InternalMessageInfo + +func (m *StorageSource) GetBucket() string { + if m != nil { + return m.Bucket + } + return "" +} + +func (m *StorageSource) GetObject() string { + if m != nil { + return m.Object + } + return "" +} + +func (m *StorageSource) GetGeneration() int64 { + if m != nil { + return m.Generation + } + return 0 +} + +// RepoSource describes the location of the source in a Google Cloud Source +// Repository. +type RepoSource struct { + // ID of the project that owns the repo. + ProjectId string `protobuf:"bytes,1,opt,name=project_id,json=projectId,proto3" json:"project_id,omitempty"` + // Name of the repo. + RepoName string `protobuf:"bytes,2,opt,name=repo_name,json=repoName,proto3" json:"repo_name,omitempty"` + // A revision within the source repository must be specified in + // one of these ways. + // + // Types that are valid to be assigned to Revision: + // *RepoSource_BranchName + // *RepoSource_TagName + // *RepoSource_CommitSha + Revision isRepoSource_Revision `protobuf_oneof:"revision"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *RepoSource) Reset() { *m = RepoSource{} } +func (m *RepoSource) String() string { return proto.CompactTextString(m) } +func (*RepoSource) ProtoMessage() {} +func (*RepoSource) Descriptor() ([]byte, []int) { + return fileDescriptor_provenance_62a3806b713433e8, []int{5} +} +func (m *RepoSource) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_RepoSource.Unmarshal(m, b) +} +func (m *RepoSource) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_RepoSource.Marshal(b, m, deterministic) +} +func (dst *RepoSource) XXX_Merge(src proto.Message) { + xxx_messageInfo_RepoSource.Merge(dst, src) +} +func (m *RepoSource) XXX_Size() int { + return xxx_messageInfo_RepoSource.Size(m) +} +func (m *RepoSource) XXX_DiscardUnknown() { + xxx_messageInfo_RepoSource.DiscardUnknown(m) +} + +var xxx_messageInfo_RepoSource proto.InternalMessageInfo + +func (m *RepoSource) GetProjectId() string { + if m != nil { + return m.ProjectId + } + return "" +} + +func (m *RepoSource) GetRepoName() string { + if m != nil { + return m.RepoName + } + return "" +} + +type isRepoSource_Revision interface { + isRepoSource_Revision() +} + +type RepoSource_BranchName struct { + BranchName string `protobuf:"bytes,3,opt,name=branch_name,json=branchName,proto3,oneof"` +} + +type RepoSource_TagName struct { + TagName string `protobuf:"bytes,4,opt,name=tag_name,json=tagName,proto3,oneof"` +} + +type RepoSource_CommitSha struct { + CommitSha string `protobuf:"bytes,5,opt,name=commit_sha,json=commitSha,proto3,oneof"` +} + +func (*RepoSource_BranchName) isRepoSource_Revision() {} + +func (*RepoSource_TagName) isRepoSource_Revision() {} + +func (*RepoSource_CommitSha) isRepoSource_Revision() {} + +func (m *RepoSource) GetRevision() isRepoSource_Revision { + if m != nil { + return m.Revision + } + return nil +} + +func (m *RepoSource) GetBranchName() string { + if x, ok := m.GetRevision().(*RepoSource_BranchName); ok { + return x.BranchName + } + return "" +} + +func (m *RepoSource) GetTagName() string { + if x, ok := m.GetRevision().(*RepoSource_TagName); ok { + return x.TagName + } + return "" +} + +func (m *RepoSource) GetCommitSha() string { + if x, ok := m.GetRevision().(*RepoSource_CommitSha); ok { + return x.CommitSha + } + return "" +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*RepoSource) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _RepoSource_OneofMarshaler, _RepoSource_OneofUnmarshaler, _RepoSource_OneofSizer, []interface{}{ + (*RepoSource_BranchName)(nil), + (*RepoSource_TagName)(nil), + (*RepoSource_CommitSha)(nil), + } +} + +func _RepoSource_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*RepoSource) + // revision + switch x := m.Revision.(type) { + case *RepoSource_BranchName: + b.EncodeVarint(3<<3 | proto.WireBytes) + b.EncodeStringBytes(x.BranchName) + case *RepoSource_TagName: + b.EncodeVarint(4<<3 | proto.WireBytes) + b.EncodeStringBytes(x.TagName) + case *RepoSource_CommitSha: + b.EncodeVarint(5<<3 | proto.WireBytes) + b.EncodeStringBytes(x.CommitSha) + case nil: + default: + return fmt.Errorf("RepoSource.Revision has unexpected type %T", x) + } + return nil +} + +func _RepoSource_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*RepoSource) + switch tag { + case 3: // revision.branch_name + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeStringBytes() + m.Revision = &RepoSource_BranchName{x} + return true, err + case 4: // revision.tag_name + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeStringBytes() + m.Revision = &RepoSource_TagName{x} + return true, err + case 5: // revision.commit_sha + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeStringBytes() + m.Revision = &RepoSource_CommitSha{x} + return true, err + default: + return false, nil + } +} + +func _RepoSource_OneofSizer(msg proto.Message) (n int) { + m := msg.(*RepoSource) + // revision + switch x := m.Revision.(type) { + case *RepoSource_BranchName: + n += 1 // tag and wire + n += proto.SizeVarint(uint64(len(x.BranchName))) + n += len(x.BranchName) + case *RepoSource_TagName: + n += 1 // tag and wire + n += proto.SizeVarint(uint64(len(x.TagName))) + n += len(x.TagName) + case *RepoSource_CommitSha: + n += 1 // tag and wire + n += proto.SizeVarint(uint64(len(x.CommitSha))) + n += len(x.CommitSha) + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +// Command describes a step performed as part of the build pipeline. +type Command struct { + // Name of the command, as presented on the command line, or if the command is + // packaged as a Docker container, as presented to `docker pull`. + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // Environment variables set before running this Command. + Env []string `protobuf:"bytes,2,rep,name=env,proto3" json:"env,omitempty"` + // Command-line arguments used when executing this Command. + Args []string `protobuf:"bytes,3,rep,name=args,proto3" json:"args,omitempty"` + // Working directory (relative to project source root) used when running + // this Command. + Dir string `protobuf:"bytes,4,opt,name=dir,proto3" json:"dir,omitempty"` + // Optional unique identifier for this Command, used in wait_for to reference + // this Command as a dependency. + Id string `protobuf:"bytes,5,opt,name=id,proto3" json:"id,omitempty"` + // The ID(s) of the Command(s) that this Command depends on. + WaitFor []string `protobuf:"bytes,6,rep,name=wait_for,json=waitFor,proto3" json:"wait_for,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Command) Reset() { *m = Command{} } +func (m *Command) String() string { return proto.CompactTextString(m) } +func (*Command) ProtoMessage() {} +func (*Command) Descriptor() ([]byte, []int) { + return fileDescriptor_provenance_62a3806b713433e8, []int{6} +} +func (m *Command) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Command.Unmarshal(m, b) +} +func (m *Command) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Command.Marshal(b, m, deterministic) +} +func (dst *Command) XXX_Merge(src proto.Message) { + xxx_messageInfo_Command.Merge(dst, src) +} +func (m *Command) XXX_Size() int { + return xxx_messageInfo_Command.Size(m) +} +func (m *Command) XXX_DiscardUnknown() { + xxx_messageInfo_Command.DiscardUnknown(m) +} + +var xxx_messageInfo_Command proto.InternalMessageInfo + +func (m *Command) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *Command) GetEnv() []string { + if m != nil { + return m.Env + } + return nil +} + +func (m *Command) GetArgs() []string { + if m != nil { + return m.Args + } + return nil +} + +func (m *Command) GetDir() string { + if m != nil { + return m.Dir + } + return "" +} + +func (m *Command) GetId() string { + if m != nil { + return m.Id + } + return "" +} + +func (m *Command) GetWaitFor() []string { + if m != nil { + return m.WaitFor + } + return nil +} + +// Artifact describes a build product. +type Artifact struct { + // Name of the artifact. This may be the path to a binary or jar file, or in + // the case of a container build, the name used to push the container image to + // Google Container Registry, as presented to `docker push`. + // + // This field is deprecated in favor of the plural `names` field; it continues + // to exist here to allow existing BuildProvenance serialized to json in + // google.devtools.containeranalysis.v1alpha1.BuildDetails.provenance_bytes to + // deserialize back into proto. + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // Hash or checksum value of a binary, or Docker Registry 2.0 digest of a + // container. + Checksum string `protobuf:"bytes,2,opt,name=checksum,proto3" json:"checksum,omitempty"` + // Artifact ID, if any; for container images, this will be a URL by digest + // like gcr.io/projectID/imagename@sha256:123456 + Id string `protobuf:"bytes,3,opt,name=id,proto3" json:"id,omitempty"` + // Related artifact names. This may be the path to a binary or jar file, or in + // the case of a container build, the name used to push the container image to + // Google Container Registry, as presented to `docker push`. Note that a + // single Artifact ID can have multiple names, for example if two tags are + // applied to one image. + Names []string `protobuf:"bytes,4,rep,name=names,proto3" json:"names,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Artifact) Reset() { *m = Artifact{} } +func (m *Artifact) String() string { return proto.CompactTextString(m) } +func (*Artifact) ProtoMessage() {} +func (*Artifact) Descriptor() ([]byte, []int) { + return fileDescriptor_provenance_62a3806b713433e8, []int{7} +} +func (m *Artifact) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Artifact.Unmarshal(m, b) +} +func (m *Artifact) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Artifact.Marshal(b, m, deterministic) +} +func (dst *Artifact) XXX_Merge(src proto.Message) { + xxx_messageInfo_Artifact.Merge(dst, src) +} +func (m *Artifact) XXX_Size() int { + return xxx_messageInfo_Artifact.Size(m) +} +func (m *Artifact) XXX_DiscardUnknown() { + xxx_messageInfo_Artifact.DiscardUnknown(m) +} + +var xxx_messageInfo_Artifact proto.InternalMessageInfo + +func (m *Artifact) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *Artifact) GetChecksum() string { + if m != nil { + return m.Checksum + } + return "" +} + +func (m *Artifact) GetId() string { + if m != nil { + return m.Id + } + return "" +} + +func (m *Artifact) GetNames() []string { + if m != nil { + return m.Names + } + return nil +} + +func init() { + proto.RegisterType((*BuildProvenance)(nil), "google.devtools.containeranalysis.v1alpha1.BuildProvenance") + proto.RegisterMapType((map[string]string)(nil), "google.devtools.containeranalysis.v1alpha1.BuildProvenance.BuildOptionsEntry") + proto.RegisterType((*Source)(nil), "google.devtools.containeranalysis.v1alpha1.Source") + proto.RegisterMapType((map[string]*FileHashes)(nil), "google.devtools.containeranalysis.v1alpha1.Source.FileHashesEntry") + proto.RegisterType((*FileHashes)(nil), "google.devtools.containeranalysis.v1alpha1.FileHashes") + proto.RegisterType((*Hash)(nil), "google.devtools.containeranalysis.v1alpha1.Hash") + proto.RegisterType((*StorageSource)(nil), "google.devtools.containeranalysis.v1alpha1.StorageSource") + proto.RegisterType((*RepoSource)(nil), "google.devtools.containeranalysis.v1alpha1.RepoSource") + proto.RegisterType((*Command)(nil), "google.devtools.containeranalysis.v1alpha1.Command") + proto.RegisterType((*Artifact)(nil), "google.devtools.containeranalysis.v1alpha1.Artifact") + proto.RegisterEnum("google.devtools.containeranalysis.v1alpha1.Hash_HashType", Hash_HashType_name, Hash_HashType_value) +} + +func init() { + proto.RegisterFile("google/devtools/containeranalysis/v1alpha1/provenance.proto", fileDescriptor_provenance_62a3806b713433e8) +} + +var fileDescriptor_provenance_62a3806b713433e8 = []byte{ + // 1026 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xa4, 0x56, 0xdd, 0x6e, 0x1b, 0x45, + 0x14, 0xee, 0xfa, 0x77, 0xf7, 0xb8, 0x71, 0x92, 0xa1, 0xc0, 0xe2, 0x52, 0x62, 0x2c, 0x21, 0x22, + 0x2e, 0x6c, 0x9a, 0x42, 0x45, 0xc8, 0x45, 0x15, 0x47, 0x2d, 0xa9, 0x44, 0x92, 0x6a, 0x53, 0x21, + 0x41, 0x85, 0x96, 0xf1, 0xee, 0x78, 0x3d, 0xcd, 0x7a, 0x67, 0x99, 0x19, 0x1b, 0x7c, 0xc7, 0x03, + 0xc0, 0x4b, 0xf0, 0x0e, 0xdc, 0xf2, 0x2e, 0xbc, 0x09, 0x9a, 0x9f, 0xb5, 0x1d, 0xa7, 0x55, 0xbb, + 0xea, 0xcd, 0x6a, 0xce, 0x37, 0xe7, 0x7c, 0x67, 0xe6, 0xcc, 0x77, 0x66, 0x16, 0x8e, 0x12, 0xc6, + 0x92, 0x94, 0x0c, 0x62, 0x32, 0x97, 0x8c, 0xa5, 0x62, 0x10, 0xb1, 0x4c, 0x62, 0x9a, 0x11, 0x8e, + 0x33, 0x9c, 0x2e, 0x04, 0x15, 0x83, 0xf9, 0x7d, 0x9c, 0xe6, 0x13, 0x7c, 0x7f, 0x90, 0x73, 0x36, + 0x27, 0x19, 0xce, 0x22, 0xd2, 0xcf, 0x39, 0x93, 0x0c, 0x7d, 0x61, 0x82, 0xfb, 0x45, 0x70, 0xff, + 0x46, 0x70, 0xbf, 0x08, 0xee, 0x7c, 0x6c, 0x13, 0xe1, 0x9c, 0x0e, 0x70, 0x96, 0x31, 0x89, 0x25, + 0x65, 0x99, 0x30, 0x4c, 0x9d, 0x47, 0x25, 0x96, 0x21, 0xd8, 0x8c, 0x47, 0x24, 0x54, 0x1e, 0xe4, + 0x77, 0x69, 0x09, 0xf6, 0x2c, 0x81, 0xb6, 0x46, 0xb3, 0xf1, 0x40, 0xd2, 0x29, 0x11, 0x12, 0x4f, + 0x73, 0xe3, 0xd0, 0xfb, 0xb7, 0x01, 0xdb, 0xc3, 0x19, 0x4d, 0xe3, 0x67, 0xcb, 0x5d, 0xa0, 0x36, + 0x54, 0x68, 0xec, 0x3b, 0x5d, 0x67, 0xdf, 0x0b, 0x2a, 0x34, 0x46, 0xf7, 0x00, 0x72, 0xce, 0x5e, + 0x92, 0x48, 0x86, 0x34, 0xf6, 0x2b, 0x1a, 0xf7, 0x2c, 0xf2, 0x34, 0x46, 0x17, 0xe0, 0x46, 0x6c, + 0x3a, 0xc5, 0x59, 0x2c, 0xfc, 0x7a, 0xb7, 0xba, 0xdf, 0x3a, 0x78, 0xd0, 0x7f, 0xfb, 0x0a, 0xf4, + 0x4f, 0x4c, 0x6c, 0xb0, 0x24, 0x41, 0x3f, 0xc3, 0xf6, 0x68, 0x46, 0x53, 0x19, 0x62, 0x2e, 0xe9, + 0x18, 0x47, 0x52, 0xf8, 0x0d, 0xcd, 0xfb, 0x55, 0x19, 0xde, 0x63, 0x1b, 0x1c, 0xb4, 0x35, 0x59, + 0x61, 0x0a, 0x74, 0x04, 0xad, 0x88, 0x13, 0x2c, 0x49, 0xa8, 0x8a, 0xe1, 0x37, 0xbb, 0xce, 0x7e, + 0xeb, 0xa0, 0x53, 0x50, 0x17, 0x95, 0xea, 0x3f, 0x2f, 0x2a, 0x15, 0x80, 0x71, 0x57, 0x00, 0x3a, + 0x04, 0x10, 0x12, 0x73, 0x69, 0x62, 0xdd, 0x37, 0xc6, 0x7a, 0xda, 0x5b, 0x87, 0x1e, 0x41, 0x6b, + 0x4c, 0x33, 0x2a, 0x26, 0x26, 0xd6, 0x7b, 0x73, 0x5e, 0xe3, 0xae, 0x83, 0x7d, 0x68, 0xea, 0x55, + 0x30, 0xee, 0xb7, 0xf4, 0x01, 0x14, 0x26, 0xda, 0x83, 0x56, 0xca, 0x12, 0x11, 0x8e, 0x66, 0xd1, + 0x15, 0x91, 0xfe, 0x96, 0x9e, 0x05, 0x05, 0x0d, 0x35, 0x82, 0x42, 0xd8, 0xb5, 0xda, 0x58, 0x29, + 0xd5, 0x6f, 0xeb, 0xec, 0x07, 0x65, 0x0a, 0x7a, 0xa9, 0x49, 0x82, 0x1d, 0x43, 0xb6, 0xa6, 0x97, + 0x7b, 0x00, 0x92, 0xd3, 0x24, 0x21, 0x5c, 0xe9, 0x63, 0xdb, 0xe8, 0xc3, 0x22, 0x4f, 0x63, 0xc4, + 0x61, 0x4b, 0x9d, 0x40, 0x1c, 0xb2, 0x5c, 0x6b, 0xdb, 0xdf, 0xd1, 0x87, 0x79, 0x56, 0x26, 0xf7, + 0x86, 0x44, 0x8d, 0x7d, 0x61, 0xf8, 0x1e, 0x67, 0x92, 0x2f, 0x82, 0xdb, 0xa3, 0x35, 0x08, 0x7d, + 0x6e, 0x24, 0x14, 0x13, 0x1e, 0xce, 0x09, 0x17, 0x94, 0x65, 0xfe, 0xae, 0x5e, 0x57, 0xdb, 0xc2, + 0x3f, 0x18, 0xb4, 0xf3, 0x08, 0x76, 0x6f, 0x70, 0xa1, 0x1d, 0xa8, 0x5e, 0x91, 0x85, 0xed, 0x00, + 0x35, 0x44, 0x77, 0xa0, 0x3e, 0xc7, 0xe9, 0x8c, 0x58, 0xf5, 0x1b, 0xe3, 0xdb, 0xca, 0x37, 0x4e, + 0xef, 0xbf, 0x3a, 0x34, 0x4c, 0x65, 0xd0, 0x08, 0xda, 0x42, 0x32, 0x8e, 0x13, 0x12, 0x9a, 0x1a, + 0x69, 0x86, 0xd6, 0xc1, 0x61, 0xa9, 0x2a, 0x1b, 0x06, 0x43, 0x79, 0x7a, 0x2b, 0xd8, 0x12, 0xeb, + 0x00, 0xfa, 0x11, 0x5a, 0x9c, 0xe4, 0xac, 0x48, 0x50, 0xd1, 0x09, 0x1e, 0x96, 0x49, 0x10, 0x90, + 0x9c, 0x2d, 0xd9, 0x81, 0x2f, 0x2d, 0xf4, 0x2b, 0x7c, 0x58, 0x34, 0x5c, 0xb8, 0xb1, 0x8f, 0xda, + 0x3b, 0xee, 0x23, 0x78, 0xbf, 0x60, 0xbe, 0x06, 0xa3, 0x48, 0xb5, 0x44, 0x4a, 0xc2, 0x09, 0x16, + 0x13, 0x22, 0xfc, 0xaa, 0x16, 0xc6, 0xb0, 0xbc, 0x28, 0xfb, 0x4f, 0x68, 0x4a, 0x4e, 0x35, 0x89, + 0x51, 0x03, 0x8c, 0x97, 0x00, 0xba, 0x84, 0xa6, 0xbd, 0x14, 0x6d, 0xaf, 0x1f, 0x96, 0x4f, 0x70, + 0x62, 0x08, 0x82, 0x82, 0x09, 0xbd, 0x84, 0xf7, 0x70, 0x1c, 0x53, 0x25, 0x1a, 0x9c, 0x16, 0x97, + 0xae, 0xf0, 0x5d, 0xbd, 0x83, 0x77, 0x48, 0x80, 0x56, 0xac, 0x16, 0x12, 0x9d, 0x19, 0x6c, 0x6f, + 0xec, 0xef, 0x15, 0x0a, 0xfd, 0x7e, 0x5d, 0xa1, 0x25, 0x25, 0xb1, 0x62, 0x5f, 0x53, 0xf6, 0xd0, + 0x85, 0x86, 0x39, 0xfe, 0xde, 0x0b, 0x80, 0x95, 0x0b, 0x3a, 0x03, 0x6f, 0x79, 0x68, 0xbe, 0xa3, + 0x37, 0xfc, 0x65, 0x99, 0x6c, 0x8a, 0x26, 0x70, 0x8b, 0x03, 0xea, 0xfd, 0xe5, 0x40, 0x4d, 0x0d, + 0xd0, 0x19, 0xd4, 0xe4, 0x22, 0x37, 0x4d, 0xd3, 0x2e, 0x57, 0x43, 0x15, 0xaf, 0x3f, 0xcf, 0x17, + 0x39, 0x09, 0x34, 0xcd, 0xf5, 0x96, 0xbd, 0x6d, 0x37, 0xd6, 0xeb, 0x82, 0x5b, 0xf8, 0x21, 0x17, + 0x6a, 0xe7, 0x17, 0xe7, 0x8f, 0x77, 0x6e, 0x21, 0x80, 0xc6, 0xe5, 0xe9, 0xf1, 0xc1, 0xd7, 0x0f, + 0x77, 0x9c, 0x5e, 0x08, 0x5b, 0xd7, 0x45, 0xfa, 0x01, 0x34, 0xec, 0xdd, 0x6a, 0xca, 0x6d, 0x2d, + 0x85, 0xb3, 0x91, 0x7a, 0x03, 0xed, 0xa5, 0x60, 0x2d, 0xf4, 0x09, 0x40, 0x42, 0xd4, 0x32, 0xd5, + 0x31, 0xfa, 0xd5, 0xae, 0xb3, 0x5f, 0x0d, 0xd6, 0x90, 0xde, 0x3f, 0x0e, 0xc0, 0xaa, 0x09, 0x37, + 0x5e, 0x57, 0x67, 0xf3, 0x75, 0xbd, 0x0b, 0x9e, 0x6e, 0xf8, 0x0c, 0x4f, 0x8b, 0xdb, 0xc7, 0x55, + 0xc0, 0x39, 0x9e, 0x12, 0xf4, 0x29, 0xb4, 0x46, 0x1c, 0x67, 0xd1, 0xc4, 0x4c, 0xab, 0x5c, 0x9e, + 0xea, 0x6a, 0x03, 0x6a, 0x97, 0xbb, 0xe0, 0x4a, 0x9c, 0x98, 0xf9, 0x9a, 0x9d, 0x6f, 0x4a, 0x9c, + 0xe8, 0xc9, 0x3d, 0x00, 0xf5, 0xea, 0x52, 0x19, 0x8a, 0x09, 0xf6, 0xeb, 0x76, 0xda, 0x33, 0xd8, + 0xe5, 0x04, 0x0f, 0x01, 0x5c, 0x4e, 0xe6, 0x54, 0x5d, 0x95, 0xbd, 0x3f, 0x1c, 0x68, 0xda, 0xc7, + 0x1a, 0x21, 0xa8, 0x69, 0x46, 0xb3, 0x5c, 0x3d, 0x56, 0x9a, 0x24, 0xd9, 0xdc, 0xaf, 0x74, 0xab, + 0x4a, 0x93, 0x24, 0x9b, 0x2b, 0x2f, 0xcc, 0x13, 0xd3, 0xd7, 0x5e, 0xa0, 0xc7, 0xca, 0x2b, 0xa6, + 0xdc, 0x2c, 0x25, 0x50, 0x43, 0xfb, 0xbb, 0x51, 0x5f, 0xfe, 0x6e, 0x7c, 0x04, 0xee, 0x6f, 0x98, + 0xca, 0x70, 0xcc, 0xb8, 0x7e, 0xf7, 0xbd, 0xa0, 0xa9, 0xec, 0x27, 0x8c, 0xf7, 0x7e, 0x01, 0xb7, + 0x78, 0xc7, 0x5f, 0xb9, 0x84, 0x0e, 0xb8, 0xd1, 0x84, 0x44, 0x57, 0x62, 0x36, 0x2d, 0x6a, 0x55, + 0xd8, 0x36, 0x4d, 0x75, 0x99, 0xe6, 0x0e, 0xd4, 0x55, 0x8c, 0xf0, 0x6b, 0x3a, 0x87, 0x31, 0x86, + 0x7f, 0x3a, 0xf0, 0x59, 0xc4, 0xa6, 0x85, 0xf8, 0x5e, 0xaf, 0xb9, 0x67, 0xce, 0x4f, 0x2f, 0xac, + 0x53, 0xc2, 0x52, 0x9c, 0x25, 0x7d, 0xc6, 0x93, 0x41, 0x42, 0x32, 0xfd, 0x90, 0x0f, 0xcc, 0x14, + 0xce, 0xa9, 0x78, 0x9b, 0x9f, 0xb7, 0xa3, 0x1b, 0x53, 0x7f, 0x57, 0xaa, 0xdf, 0x9d, 0x1c, 0x8f, + 0x1a, 0x9a, 0xed, 0xc1, 0xff, 0x01, 0x00, 0x00, 0xff, 0xff, 0x91, 0xec, 0x1b, 0x85, 0x90, 0x0a, + 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/devtools/containeranalysis/v1alpha1/source_context.pb.go b/vendor/google.golang.org/genproto/googleapis/devtools/containeranalysis/v1alpha1/source_context.pb.go new file mode 100644 index 0000000..96e051b --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/devtools/containeranalysis/v1alpha1/source_context.pb.go @@ -0,0 +1,924 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/devtools/containeranalysis/v1alpha1/source_context.proto + +package containeranalysis // import "google.golang.org/genproto/googleapis/devtools/containeranalysis/v1alpha1" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// The type of an alias. +type AliasContext_Kind int32 + +const ( + // Unknown. + AliasContext_KIND_UNSPECIFIED AliasContext_Kind = 0 + // Git tag. + AliasContext_FIXED AliasContext_Kind = 1 + // Git branch. + AliasContext_MOVABLE AliasContext_Kind = 2 + // Used to specify non-standard aliases. For example, if a Git repo has a + // ref named "refs/foo/bar". + AliasContext_OTHER AliasContext_Kind = 4 +) + +var AliasContext_Kind_name = map[int32]string{ + 0: "KIND_UNSPECIFIED", + 1: "FIXED", + 2: "MOVABLE", + 4: "OTHER", +} +var AliasContext_Kind_value = map[string]int32{ + "KIND_UNSPECIFIED": 0, + "FIXED": 1, + "MOVABLE": 2, + "OTHER": 4, +} + +func (x AliasContext_Kind) String() string { + return proto.EnumName(AliasContext_Kind_name, int32(x)) +} +func (AliasContext_Kind) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_source_context_d60bdb3ee9072374, []int{1, 0} +} + +// A SourceContext is a reference to a tree of files. A SourceContext together +// with a path point to a unique revision of a single file or directory. +type SourceContext struct { + // A SourceContext can refer any one of the following types of repositories. + // + // Types that are valid to be assigned to Context: + // *SourceContext_CloudRepo + // *SourceContext_Gerrit + // *SourceContext_Git + Context isSourceContext_Context `protobuf_oneof:"context"` + // Labels with user defined metadata. + Labels map[string]string `protobuf:"bytes,4,rep,name=labels,proto3" json:"labels,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *SourceContext) Reset() { *m = SourceContext{} } +func (m *SourceContext) String() string { return proto.CompactTextString(m) } +func (*SourceContext) ProtoMessage() {} +func (*SourceContext) Descriptor() ([]byte, []int) { + return fileDescriptor_source_context_d60bdb3ee9072374, []int{0} +} +func (m *SourceContext) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_SourceContext.Unmarshal(m, b) +} +func (m *SourceContext) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_SourceContext.Marshal(b, m, deterministic) +} +func (dst *SourceContext) XXX_Merge(src proto.Message) { + xxx_messageInfo_SourceContext.Merge(dst, src) +} +func (m *SourceContext) XXX_Size() int { + return xxx_messageInfo_SourceContext.Size(m) +} +func (m *SourceContext) XXX_DiscardUnknown() { + xxx_messageInfo_SourceContext.DiscardUnknown(m) +} + +var xxx_messageInfo_SourceContext proto.InternalMessageInfo + +type isSourceContext_Context interface { + isSourceContext_Context() +} + +type SourceContext_CloudRepo struct { + CloudRepo *CloudRepoSourceContext `protobuf:"bytes,1,opt,name=cloud_repo,json=cloudRepo,proto3,oneof"` +} + +type SourceContext_Gerrit struct { + Gerrit *GerritSourceContext `protobuf:"bytes,2,opt,name=gerrit,proto3,oneof"` +} + +type SourceContext_Git struct { + Git *GitSourceContext `protobuf:"bytes,3,opt,name=git,proto3,oneof"` +} + +func (*SourceContext_CloudRepo) isSourceContext_Context() {} + +func (*SourceContext_Gerrit) isSourceContext_Context() {} + +func (*SourceContext_Git) isSourceContext_Context() {} + +func (m *SourceContext) GetContext() isSourceContext_Context { + if m != nil { + return m.Context + } + return nil +} + +func (m *SourceContext) GetCloudRepo() *CloudRepoSourceContext { + if x, ok := m.GetContext().(*SourceContext_CloudRepo); ok { + return x.CloudRepo + } + return nil +} + +func (m *SourceContext) GetGerrit() *GerritSourceContext { + if x, ok := m.GetContext().(*SourceContext_Gerrit); ok { + return x.Gerrit + } + return nil +} + +func (m *SourceContext) GetGit() *GitSourceContext { + if x, ok := m.GetContext().(*SourceContext_Git); ok { + return x.Git + } + return nil +} + +func (m *SourceContext) GetLabels() map[string]string { + if m != nil { + return m.Labels + } + return nil +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*SourceContext) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _SourceContext_OneofMarshaler, _SourceContext_OneofUnmarshaler, _SourceContext_OneofSizer, []interface{}{ + (*SourceContext_CloudRepo)(nil), + (*SourceContext_Gerrit)(nil), + (*SourceContext_Git)(nil), + } +} + +func _SourceContext_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*SourceContext) + // context + switch x := m.Context.(type) { + case *SourceContext_CloudRepo: + b.EncodeVarint(1<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.CloudRepo); err != nil { + return err + } + case *SourceContext_Gerrit: + b.EncodeVarint(2<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.Gerrit); err != nil { + return err + } + case *SourceContext_Git: + b.EncodeVarint(3<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.Git); err != nil { + return err + } + case nil: + default: + return fmt.Errorf("SourceContext.Context has unexpected type %T", x) + } + return nil +} + +func _SourceContext_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*SourceContext) + switch tag { + case 1: // context.cloud_repo + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(CloudRepoSourceContext) + err := b.DecodeMessage(msg) + m.Context = &SourceContext_CloudRepo{msg} + return true, err + case 2: // context.gerrit + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(GerritSourceContext) + err := b.DecodeMessage(msg) + m.Context = &SourceContext_Gerrit{msg} + return true, err + case 3: // context.git + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(GitSourceContext) + err := b.DecodeMessage(msg) + m.Context = &SourceContext_Git{msg} + return true, err + default: + return false, nil + } +} + +func _SourceContext_OneofSizer(msg proto.Message) (n int) { + m := msg.(*SourceContext) + // context + switch x := m.Context.(type) { + case *SourceContext_CloudRepo: + s := proto.Size(x.CloudRepo) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *SourceContext_Gerrit: + s := proto.Size(x.Gerrit) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *SourceContext_Git: + s := proto.Size(x.Git) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +// An alias to a repo revision. +type AliasContext struct { + // The alias kind. + Kind AliasContext_Kind `protobuf:"varint,1,opt,name=kind,proto3,enum=google.devtools.containeranalysis.v1alpha1.AliasContext_Kind" json:"kind,omitempty"` + // The alias name. + Name string `protobuf:"bytes,2,opt,name=name,proto3" json:"name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *AliasContext) Reset() { *m = AliasContext{} } +func (m *AliasContext) String() string { return proto.CompactTextString(m) } +func (*AliasContext) ProtoMessage() {} +func (*AliasContext) Descriptor() ([]byte, []int) { + return fileDescriptor_source_context_d60bdb3ee9072374, []int{1} +} +func (m *AliasContext) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_AliasContext.Unmarshal(m, b) +} +func (m *AliasContext) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_AliasContext.Marshal(b, m, deterministic) +} +func (dst *AliasContext) XXX_Merge(src proto.Message) { + xxx_messageInfo_AliasContext.Merge(dst, src) +} +func (m *AliasContext) XXX_Size() int { + return xxx_messageInfo_AliasContext.Size(m) +} +func (m *AliasContext) XXX_DiscardUnknown() { + xxx_messageInfo_AliasContext.DiscardUnknown(m) +} + +var xxx_messageInfo_AliasContext proto.InternalMessageInfo + +func (m *AliasContext) GetKind() AliasContext_Kind { + if m != nil { + return m.Kind + } + return AliasContext_KIND_UNSPECIFIED +} + +func (m *AliasContext) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +// A CloudRepoSourceContext denotes a particular revision in a Google Cloud +// Source Repo. +type CloudRepoSourceContext struct { + // The ID of the repo. + RepoId *RepoId `protobuf:"bytes,1,opt,name=repo_id,json=repoId,proto3" json:"repo_id,omitempty"` + // A revision in a Cloud Repo can be identified by either its revision ID or + // its alias. + // + // Types that are valid to be assigned to Revision: + // *CloudRepoSourceContext_RevisionId + // *CloudRepoSourceContext_AliasContext + Revision isCloudRepoSourceContext_Revision `protobuf_oneof:"revision"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *CloudRepoSourceContext) Reset() { *m = CloudRepoSourceContext{} } +func (m *CloudRepoSourceContext) String() string { return proto.CompactTextString(m) } +func (*CloudRepoSourceContext) ProtoMessage() {} +func (*CloudRepoSourceContext) Descriptor() ([]byte, []int) { + return fileDescriptor_source_context_d60bdb3ee9072374, []int{2} +} +func (m *CloudRepoSourceContext) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_CloudRepoSourceContext.Unmarshal(m, b) +} +func (m *CloudRepoSourceContext) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_CloudRepoSourceContext.Marshal(b, m, deterministic) +} +func (dst *CloudRepoSourceContext) XXX_Merge(src proto.Message) { + xxx_messageInfo_CloudRepoSourceContext.Merge(dst, src) +} +func (m *CloudRepoSourceContext) XXX_Size() int { + return xxx_messageInfo_CloudRepoSourceContext.Size(m) +} +func (m *CloudRepoSourceContext) XXX_DiscardUnknown() { + xxx_messageInfo_CloudRepoSourceContext.DiscardUnknown(m) +} + +var xxx_messageInfo_CloudRepoSourceContext proto.InternalMessageInfo + +func (m *CloudRepoSourceContext) GetRepoId() *RepoId { + if m != nil { + return m.RepoId + } + return nil +} + +type isCloudRepoSourceContext_Revision interface { + isCloudRepoSourceContext_Revision() +} + +type CloudRepoSourceContext_RevisionId struct { + RevisionId string `protobuf:"bytes,2,opt,name=revision_id,json=revisionId,proto3,oneof"` +} + +type CloudRepoSourceContext_AliasContext struct { + AliasContext *AliasContext `protobuf:"bytes,3,opt,name=alias_context,json=aliasContext,proto3,oneof"` +} + +func (*CloudRepoSourceContext_RevisionId) isCloudRepoSourceContext_Revision() {} + +func (*CloudRepoSourceContext_AliasContext) isCloudRepoSourceContext_Revision() {} + +func (m *CloudRepoSourceContext) GetRevision() isCloudRepoSourceContext_Revision { + if m != nil { + return m.Revision + } + return nil +} + +func (m *CloudRepoSourceContext) GetRevisionId() string { + if x, ok := m.GetRevision().(*CloudRepoSourceContext_RevisionId); ok { + return x.RevisionId + } + return "" +} + +func (m *CloudRepoSourceContext) GetAliasContext() *AliasContext { + if x, ok := m.GetRevision().(*CloudRepoSourceContext_AliasContext); ok { + return x.AliasContext + } + return nil +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*CloudRepoSourceContext) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _CloudRepoSourceContext_OneofMarshaler, _CloudRepoSourceContext_OneofUnmarshaler, _CloudRepoSourceContext_OneofSizer, []interface{}{ + (*CloudRepoSourceContext_RevisionId)(nil), + (*CloudRepoSourceContext_AliasContext)(nil), + } +} + +func _CloudRepoSourceContext_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*CloudRepoSourceContext) + // revision + switch x := m.Revision.(type) { + case *CloudRepoSourceContext_RevisionId: + b.EncodeVarint(2<<3 | proto.WireBytes) + b.EncodeStringBytes(x.RevisionId) + case *CloudRepoSourceContext_AliasContext: + b.EncodeVarint(3<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.AliasContext); err != nil { + return err + } + case nil: + default: + return fmt.Errorf("CloudRepoSourceContext.Revision has unexpected type %T", x) + } + return nil +} + +func _CloudRepoSourceContext_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*CloudRepoSourceContext) + switch tag { + case 2: // revision.revision_id + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeStringBytes() + m.Revision = &CloudRepoSourceContext_RevisionId{x} + return true, err + case 3: // revision.alias_context + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(AliasContext) + err := b.DecodeMessage(msg) + m.Revision = &CloudRepoSourceContext_AliasContext{msg} + return true, err + default: + return false, nil + } +} + +func _CloudRepoSourceContext_OneofSizer(msg proto.Message) (n int) { + m := msg.(*CloudRepoSourceContext) + // revision + switch x := m.Revision.(type) { + case *CloudRepoSourceContext_RevisionId: + n += 1 // tag and wire + n += proto.SizeVarint(uint64(len(x.RevisionId))) + n += len(x.RevisionId) + case *CloudRepoSourceContext_AliasContext: + s := proto.Size(x.AliasContext) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +// A SourceContext referring to a Gerrit project. +type GerritSourceContext struct { + // The URI of a running Gerrit instance. + HostUri string `protobuf:"bytes,1,opt,name=host_uri,json=hostUri,proto3" json:"host_uri,omitempty"` + // The full project name within the host. Projects may be nested, so + // "project/subproject" is a valid project name. The "repo name" is + // the hostURI/project. + GerritProject string `protobuf:"bytes,2,opt,name=gerrit_project,json=gerritProject,proto3" json:"gerrit_project,omitempty"` + // A revision in a Gerrit project can be identified by either its revision ID + // or its alias. + // + // Types that are valid to be assigned to Revision: + // *GerritSourceContext_RevisionId + // *GerritSourceContext_AliasContext + Revision isGerritSourceContext_Revision `protobuf_oneof:"revision"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GerritSourceContext) Reset() { *m = GerritSourceContext{} } +func (m *GerritSourceContext) String() string { return proto.CompactTextString(m) } +func (*GerritSourceContext) ProtoMessage() {} +func (*GerritSourceContext) Descriptor() ([]byte, []int) { + return fileDescriptor_source_context_d60bdb3ee9072374, []int{3} +} +func (m *GerritSourceContext) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GerritSourceContext.Unmarshal(m, b) +} +func (m *GerritSourceContext) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GerritSourceContext.Marshal(b, m, deterministic) +} +func (dst *GerritSourceContext) XXX_Merge(src proto.Message) { + xxx_messageInfo_GerritSourceContext.Merge(dst, src) +} +func (m *GerritSourceContext) XXX_Size() int { + return xxx_messageInfo_GerritSourceContext.Size(m) +} +func (m *GerritSourceContext) XXX_DiscardUnknown() { + xxx_messageInfo_GerritSourceContext.DiscardUnknown(m) +} + +var xxx_messageInfo_GerritSourceContext proto.InternalMessageInfo + +func (m *GerritSourceContext) GetHostUri() string { + if m != nil { + return m.HostUri + } + return "" +} + +func (m *GerritSourceContext) GetGerritProject() string { + if m != nil { + return m.GerritProject + } + return "" +} + +type isGerritSourceContext_Revision interface { + isGerritSourceContext_Revision() +} + +type GerritSourceContext_RevisionId struct { + RevisionId string `protobuf:"bytes,3,opt,name=revision_id,json=revisionId,proto3,oneof"` +} + +type GerritSourceContext_AliasContext struct { + AliasContext *AliasContext `protobuf:"bytes,4,opt,name=alias_context,json=aliasContext,proto3,oneof"` +} + +func (*GerritSourceContext_RevisionId) isGerritSourceContext_Revision() {} + +func (*GerritSourceContext_AliasContext) isGerritSourceContext_Revision() {} + +func (m *GerritSourceContext) GetRevision() isGerritSourceContext_Revision { + if m != nil { + return m.Revision + } + return nil +} + +func (m *GerritSourceContext) GetRevisionId() string { + if x, ok := m.GetRevision().(*GerritSourceContext_RevisionId); ok { + return x.RevisionId + } + return "" +} + +func (m *GerritSourceContext) GetAliasContext() *AliasContext { + if x, ok := m.GetRevision().(*GerritSourceContext_AliasContext); ok { + return x.AliasContext + } + return nil +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*GerritSourceContext) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _GerritSourceContext_OneofMarshaler, _GerritSourceContext_OneofUnmarshaler, _GerritSourceContext_OneofSizer, []interface{}{ + (*GerritSourceContext_RevisionId)(nil), + (*GerritSourceContext_AliasContext)(nil), + } +} + +func _GerritSourceContext_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*GerritSourceContext) + // revision + switch x := m.Revision.(type) { + case *GerritSourceContext_RevisionId: + b.EncodeVarint(3<<3 | proto.WireBytes) + b.EncodeStringBytes(x.RevisionId) + case *GerritSourceContext_AliasContext: + b.EncodeVarint(4<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.AliasContext); err != nil { + return err + } + case nil: + default: + return fmt.Errorf("GerritSourceContext.Revision has unexpected type %T", x) + } + return nil +} + +func _GerritSourceContext_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*GerritSourceContext) + switch tag { + case 3: // revision.revision_id + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeStringBytes() + m.Revision = &GerritSourceContext_RevisionId{x} + return true, err + case 4: // revision.alias_context + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(AliasContext) + err := b.DecodeMessage(msg) + m.Revision = &GerritSourceContext_AliasContext{msg} + return true, err + default: + return false, nil + } +} + +func _GerritSourceContext_OneofSizer(msg proto.Message) (n int) { + m := msg.(*GerritSourceContext) + // revision + switch x := m.Revision.(type) { + case *GerritSourceContext_RevisionId: + n += 1 // tag and wire + n += proto.SizeVarint(uint64(len(x.RevisionId))) + n += len(x.RevisionId) + case *GerritSourceContext_AliasContext: + s := proto.Size(x.AliasContext) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +// A GitSourceContext denotes a particular revision in a third party Git +// repository (e.g., GitHub). +type GitSourceContext struct { + // Git repository URL. + Url string `protobuf:"bytes,1,opt,name=url,proto3" json:"url,omitempty"` + // Required. + // Git commit hash. + RevisionId string `protobuf:"bytes,2,opt,name=revision_id,json=revisionId,proto3" json:"revision_id,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GitSourceContext) Reset() { *m = GitSourceContext{} } +func (m *GitSourceContext) String() string { return proto.CompactTextString(m) } +func (*GitSourceContext) ProtoMessage() {} +func (*GitSourceContext) Descriptor() ([]byte, []int) { + return fileDescriptor_source_context_d60bdb3ee9072374, []int{4} +} +func (m *GitSourceContext) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GitSourceContext.Unmarshal(m, b) +} +func (m *GitSourceContext) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GitSourceContext.Marshal(b, m, deterministic) +} +func (dst *GitSourceContext) XXX_Merge(src proto.Message) { + xxx_messageInfo_GitSourceContext.Merge(dst, src) +} +func (m *GitSourceContext) XXX_Size() int { + return xxx_messageInfo_GitSourceContext.Size(m) +} +func (m *GitSourceContext) XXX_DiscardUnknown() { + xxx_messageInfo_GitSourceContext.DiscardUnknown(m) +} + +var xxx_messageInfo_GitSourceContext proto.InternalMessageInfo + +func (m *GitSourceContext) GetUrl() string { + if m != nil { + return m.Url + } + return "" +} + +func (m *GitSourceContext) GetRevisionId() string { + if m != nil { + return m.RevisionId + } + return "" +} + +// A unique identifier for a Cloud Repo. +type RepoId struct { + // A cloud repo can be identified by either its project ID and repository name + // combination, or its globally unique identifier. + // + // Types that are valid to be assigned to Id: + // *RepoId_ProjectRepoId + // *RepoId_Uid + Id isRepoId_Id `protobuf_oneof:"id"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *RepoId) Reset() { *m = RepoId{} } +func (m *RepoId) String() string { return proto.CompactTextString(m) } +func (*RepoId) ProtoMessage() {} +func (*RepoId) Descriptor() ([]byte, []int) { + return fileDescriptor_source_context_d60bdb3ee9072374, []int{5} +} +func (m *RepoId) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_RepoId.Unmarshal(m, b) +} +func (m *RepoId) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_RepoId.Marshal(b, m, deterministic) +} +func (dst *RepoId) XXX_Merge(src proto.Message) { + xxx_messageInfo_RepoId.Merge(dst, src) +} +func (m *RepoId) XXX_Size() int { + return xxx_messageInfo_RepoId.Size(m) +} +func (m *RepoId) XXX_DiscardUnknown() { + xxx_messageInfo_RepoId.DiscardUnknown(m) +} + +var xxx_messageInfo_RepoId proto.InternalMessageInfo + +type isRepoId_Id interface { + isRepoId_Id() +} + +type RepoId_ProjectRepoId struct { + ProjectRepoId *ProjectRepoId `protobuf:"bytes,1,opt,name=project_repo_id,json=projectRepoId,proto3,oneof"` +} + +type RepoId_Uid struct { + Uid string `protobuf:"bytes,2,opt,name=uid,proto3,oneof"` +} + +func (*RepoId_ProjectRepoId) isRepoId_Id() {} + +func (*RepoId_Uid) isRepoId_Id() {} + +func (m *RepoId) GetId() isRepoId_Id { + if m != nil { + return m.Id + } + return nil +} + +func (m *RepoId) GetProjectRepoId() *ProjectRepoId { + if x, ok := m.GetId().(*RepoId_ProjectRepoId); ok { + return x.ProjectRepoId + } + return nil +} + +func (m *RepoId) GetUid() string { + if x, ok := m.GetId().(*RepoId_Uid); ok { + return x.Uid + } + return "" +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*RepoId) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _RepoId_OneofMarshaler, _RepoId_OneofUnmarshaler, _RepoId_OneofSizer, []interface{}{ + (*RepoId_ProjectRepoId)(nil), + (*RepoId_Uid)(nil), + } +} + +func _RepoId_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*RepoId) + // id + switch x := m.Id.(type) { + case *RepoId_ProjectRepoId: + b.EncodeVarint(1<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.ProjectRepoId); err != nil { + return err + } + case *RepoId_Uid: + b.EncodeVarint(2<<3 | proto.WireBytes) + b.EncodeStringBytes(x.Uid) + case nil: + default: + return fmt.Errorf("RepoId.Id has unexpected type %T", x) + } + return nil +} + +func _RepoId_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*RepoId) + switch tag { + case 1: // id.project_repo_id + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(ProjectRepoId) + err := b.DecodeMessage(msg) + m.Id = &RepoId_ProjectRepoId{msg} + return true, err + case 2: // id.uid + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeStringBytes() + m.Id = &RepoId_Uid{x} + return true, err + default: + return false, nil + } +} + +func _RepoId_OneofSizer(msg proto.Message) (n int) { + m := msg.(*RepoId) + // id + switch x := m.Id.(type) { + case *RepoId_ProjectRepoId: + s := proto.Size(x.ProjectRepoId) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *RepoId_Uid: + n += 1 // tag and wire + n += proto.SizeVarint(uint64(len(x.Uid))) + n += len(x.Uid) + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +// Selects a repo using a Google Cloud Platform project ID (e.g., +// winged-cargo-31) and a repo name within that project. +type ProjectRepoId struct { + // The ID of the project. + ProjectId string `protobuf:"bytes,1,opt,name=project_id,json=projectId,proto3" json:"project_id,omitempty"` + // The name of the repo. Leave empty for the default repo. + RepoName string `protobuf:"bytes,2,opt,name=repo_name,json=repoName,proto3" json:"repo_name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ProjectRepoId) Reset() { *m = ProjectRepoId{} } +func (m *ProjectRepoId) String() string { return proto.CompactTextString(m) } +func (*ProjectRepoId) ProtoMessage() {} +func (*ProjectRepoId) Descriptor() ([]byte, []int) { + return fileDescriptor_source_context_d60bdb3ee9072374, []int{6} +} +func (m *ProjectRepoId) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ProjectRepoId.Unmarshal(m, b) +} +func (m *ProjectRepoId) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ProjectRepoId.Marshal(b, m, deterministic) +} +func (dst *ProjectRepoId) XXX_Merge(src proto.Message) { + xxx_messageInfo_ProjectRepoId.Merge(dst, src) +} +func (m *ProjectRepoId) XXX_Size() int { + return xxx_messageInfo_ProjectRepoId.Size(m) +} +func (m *ProjectRepoId) XXX_DiscardUnknown() { + xxx_messageInfo_ProjectRepoId.DiscardUnknown(m) +} + +var xxx_messageInfo_ProjectRepoId proto.InternalMessageInfo + +func (m *ProjectRepoId) GetProjectId() string { + if m != nil { + return m.ProjectId + } + return "" +} + +func (m *ProjectRepoId) GetRepoName() string { + if m != nil { + return m.RepoName + } + return "" +} + +func init() { + proto.RegisterType((*SourceContext)(nil), "google.devtools.containeranalysis.v1alpha1.SourceContext") + proto.RegisterMapType((map[string]string)(nil), "google.devtools.containeranalysis.v1alpha1.SourceContext.LabelsEntry") + proto.RegisterType((*AliasContext)(nil), "google.devtools.containeranalysis.v1alpha1.AliasContext") + proto.RegisterType((*CloudRepoSourceContext)(nil), "google.devtools.containeranalysis.v1alpha1.CloudRepoSourceContext") + proto.RegisterType((*GerritSourceContext)(nil), "google.devtools.containeranalysis.v1alpha1.GerritSourceContext") + proto.RegisterType((*GitSourceContext)(nil), "google.devtools.containeranalysis.v1alpha1.GitSourceContext") + proto.RegisterType((*RepoId)(nil), "google.devtools.containeranalysis.v1alpha1.RepoId") + proto.RegisterType((*ProjectRepoId)(nil), "google.devtools.containeranalysis.v1alpha1.ProjectRepoId") + proto.RegisterEnum("google.devtools.containeranalysis.v1alpha1.AliasContext_Kind", AliasContext_Kind_name, AliasContext_Kind_value) +} + +func init() { + proto.RegisterFile("google/devtools/containeranalysis/v1alpha1/source_context.proto", fileDescriptor_source_context_d60bdb3ee9072374) +} + +var fileDescriptor_source_context_d60bdb3ee9072374 = []byte{ + // 675 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xb4, 0x95, 0x5d, 0x4e, 0xdb, 0x4a, + 0x14, 0xc7, 0xe3, 0x38, 0x24, 0xf8, 0x84, 0x70, 0xa3, 0xb9, 0xe8, 0x2a, 0x97, 0x7b, 0xab, 0x52, + 0x4b, 0x48, 0xa8, 0x0f, 0xb6, 0x48, 0x5f, 0xa0, 0x1f, 0x42, 0x24, 0x18, 0x62, 0x85, 0x42, 0x6a, + 0x4a, 0xd5, 0x0f, 0x55, 0xd6, 0x60, 0x8f, 0xcc, 0x14, 0xe3, 0xb1, 0xc6, 0x76, 0x54, 0x56, 0xd0, + 0x97, 0xae, 0xa2, 0x8b, 0xe8, 0x12, 0xba, 0x95, 0xae, 0xa0, 0xef, 0xd5, 0x8c, 0x6d, 0xc9, 0x01, + 0x2a, 0x91, 0x4a, 0x7d, 0xca, 0xcc, 0x99, 0x99, 0xdf, 0xf9, 0x9f, 0x33, 0xff, 0x89, 0x61, 0x27, + 0x60, 0x2c, 0x08, 0x89, 0xe9, 0x93, 0x69, 0xca, 0x58, 0x98, 0x98, 0x1e, 0x8b, 0x52, 0x4c, 0x23, + 0xc2, 0x71, 0x84, 0xc3, 0xab, 0x84, 0x26, 0xe6, 0x74, 0x13, 0x87, 0xf1, 0x39, 0xde, 0x34, 0x13, + 0x96, 0x71, 0x8f, 0xb8, 0x62, 0x07, 0xf9, 0x98, 0x1a, 0x31, 0x67, 0x29, 0x43, 0x0f, 0x73, 0x80, + 0x51, 0x02, 0x8c, 0x1b, 0x00, 0xa3, 0x04, 0xac, 0xfe, 0x5f, 0x24, 0xc3, 0x31, 0x35, 0x71, 0x14, + 0xb1, 0x14, 0xa7, 0x94, 0x45, 0x49, 0x4e, 0xd2, 0xbf, 0xa9, 0xd0, 0x39, 0x91, 0x29, 0x86, 0x79, + 0x06, 0xe4, 0x01, 0x78, 0x21, 0xcb, 0x7c, 0x97, 0x93, 0x98, 0xf5, 0x94, 0x35, 0x65, 0xa3, 0xdd, + 0x1f, 0x18, 0x77, 0x4f, 0x68, 0x0c, 0xc5, 0x69, 0x87, 0xc4, 0x6c, 0x86, 0x3b, 0xaa, 0x39, 0x9a, + 0x57, 0xae, 0xa0, 0x37, 0xd0, 0x0c, 0x08, 0xe7, 0x34, 0xed, 0xd5, 0x65, 0x82, 0x9d, 0x79, 0x12, + 0x1c, 0xc8, 0x93, 0xd7, 0xe9, 0x05, 0x10, 0x4d, 0x40, 0x0d, 0x68, 0xda, 0x53, 0x25, 0xf7, 0xe9, + 0x5c, 0xdc, 0x9b, 0x50, 0x81, 0x42, 0xef, 0xa1, 0x19, 0xe2, 0x33, 0x12, 0x26, 0xbd, 0xc6, 0x9a, + 0xba, 0xd1, 0xee, 0x5b, 0xf3, 0x40, 0x67, 0x88, 0xc6, 0xa1, 0xe4, 0x58, 0x51, 0xca, 0xaf, 0x9c, + 0x02, 0xba, 0xba, 0x0d, 0xed, 0x4a, 0x18, 0x75, 0x41, 0xbd, 0x20, 0x57, 0xb2, 0xf1, 0x9a, 0x23, + 0x86, 0x68, 0x05, 0x16, 0xa6, 0x38, 0xcc, 0x88, 0xec, 0x95, 0xe6, 0xe4, 0x93, 0xc7, 0xf5, 0x2d, + 0x65, 0xa0, 0x41, 0xab, 0x30, 0x86, 0xfe, 0x55, 0x81, 0xa5, 0xdd, 0x90, 0xe2, 0xa4, 0xbc, 0xc7, + 0x17, 0xd0, 0xb8, 0xa0, 0x91, 0x2f, 0x41, 0xcb, 0xfd, 0x67, 0xf3, 0x68, 0xae, 0x72, 0x8c, 0x31, + 0x8d, 0x7c, 0x47, 0xa2, 0x10, 0x82, 0x46, 0x84, 0x2f, 0x4b, 0x1d, 0x72, 0xac, 0xef, 0x40, 0x43, + 0xec, 0x40, 0x2b, 0xd0, 0x1d, 0xdb, 0x47, 0x7b, 0xee, 0xe9, 0xd1, 0xc9, 0xc4, 0x1a, 0xda, 0xfb, + 0xb6, 0xb5, 0xd7, 0xad, 0x21, 0x0d, 0x16, 0xf6, 0xed, 0xd7, 0xd6, 0x5e, 0x57, 0x41, 0x6d, 0x68, + 0x3d, 0x3f, 0x7e, 0xb5, 0x3b, 0x38, 0xb4, 0xba, 0x75, 0x11, 0x3f, 0x7e, 0x39, 0xb2, 0x9c, 0x6e, + 0x43, 0xff, 0xa1, 0xc0, 0x3f, 0xb7, 0x5b, 0x06, 0x8d, 0xa1, 0x25, 0x4c, 0xe8, 0x52, 0xbf, 0xf0, + 0x61, 0x7f, 0x9e, 0x2a, 0x04, 0xcf, 0xf6, 0x9d, 0x26, 0x97, 0xbf, 0xe8, 0x01, 0xb4, 0x39, 0x99, + 0xd2, 0x84, 0xb2, 0x48, 0x00, 0x65, 0x0d, 0xa3, 0x9a, 0x03, 0x65, 0xd0, 0xf6, 0x91, 0x0b, 0x1d, + 0x2c, 0x4a, 0x2f, 0x5f, 0x5b, 0x61, 0xa2, 0xad, 0xdf, 0xed, 0xdd, 0xa8, 0xe6, 0x2c, 0xe1, 0xca, + 0x7c, 0x00, 0xb0, 0x58, 0xa6, 0xd3, 0xbf, 0x2b, 0xf0, 0xf7, 0x2d, 0x4e, 0x46, 0xff, 0xc2, 0xe2, + 0x39, 0x4b, 0x52, 0x37, 0xe3, 0xb4, 0x30, 0x41, 0x4b, 0xcc, 0x4f, 0x39, 0x45, 0xeb, 0xb0, 0x9c, + 0x9b, 0xdc, 0x8d, 0x39, 0xfb, 0x40, 0xbc, 0xb4, 0xb8, 0x89, 0x4e, 0x1e, 0x9d, 0xe4, 0xc1, 0xeb, + 0x95, 0xaa, 0x77, 0xa9, 0xb4, 0xf1, 0x07, 0x2b, 0xb5, 0xa0, 0x7b, 0xfd, 0x69, 0x09, 0x97, 0x67, + 0x3c, 0x2c, 0x5d, 0x9e, 0xf1, 0x10, 0xdd, 0xbf, 0xe5, 0x7e, 0xaa, 0x9a, 0xf5, 0x4f, 0x0a, 0x34, + 0xf3, 0x3b, 0x45, 0x1e, 0xfc, 0x55, 0x74, 0xc0, 0x9d, 0x35, 0xc8, 0xf6, 0x3c, 0x05, 0x14, 0xfd, + 0xca, 0x99, 0xa3, 0x9a, 0xd3, 0x89, 0xab, 0x01, 0x84, 0x40, 0xcd, 0x2a, 0x46, 0x11, 0x93, 0x41, + 0x03, 0xea, 0xd4, 0xd7, 0xc7, 0xd0, 0x99, 0x39, 0x8b, 0xee, 0x01, 0x94, 0x7a, 0x0a, 0x29, 0x9a, + 0xa3, 0x15, 0x11, 0xdb, 0x47, 0xff, 0x81, 0x26, 0x65, 0x56, 0x1e, 0xcf, 0xa2, 0x08, 0x1c, 0xe1, + 0x4b, 0x32, 0xf8, 0xac, 0xc0, 0xba, 0xc7, 0x2e, 0x4b, 0xe1, 0xbf, 0xd6, 0x3b, 0x51, 0xde, 0xbe, + 0x2b, 0x36, 0x05, 0x2c, 0xc4, 0x51, 0x60, 0x30, 0x1e, 0x98, 0x01, 0x89, 0xe4, 0x3f, 0xb9, 0x99, + 0x2f, 0xe1, 0x98, 0x26, 0x77, 0xf9, 0xae, 0x3c, 0xb9, 0xb1, 0xf4, 0xa5, 0xae, 0x1e, 0x0c, 0x77, + 0xcf, 0x9a, 0x92, 0xf6, 0xe8, 0x67, 0x00, 0x00, 0x00, 0xff, 0xff, 0xff, 0xcf, 0x59, 0x43, 0xa4, + 0x06, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/devtools/containeranalysis/v1beta1/attestation/attestation.pb.go b/vendor/google.golang.org/genproto/googleapis/devtools/containeranalysis/v1beta1/attestation/attestation.pb.go new file mode 100644 index 0000000..206c4db --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/devtools/containeranalysis/v1beta1/attestation/attestation.pb.go @@ -0,0 +1,500 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/devtools/containeranalysis/v1beta1/attestation/attestation.proto + +package attestation // import "google.golang.org/genproto/googleapis/devtools/containeranalysis/v1beta1/attestation" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Type (for example schema) of the attestation payload that was signed. +type PgpSignedAttestation_ContentType int32 + +const ( + // `ContentType` is not set. + PgpSignedAttestation_CONTENT_TYPE_UNSPECIFIED PgpSignedAttestation_ContentType = 0 + // Atomic format attestation signature. See + // https://github.com/containers/image/blob/8a5d2f82a6e3263290c8e0276c3e0f64e77723e7/docs/atomic-signature.md + // The payload extracted from `signature` is a JSON blob conforming to the + // linked schema. + PgpSignedAttestation_SIMPLE_SIGNING_JSON PgpSignedAttestation_ContentType = 1 +) + +var PgpSignedAttestation_ContentType_name = map[int32]string{ + 0: "CONTENT_TYPE_UNSPECIFIED", + 1: "SIMPLE_SIGNING_JSON", +} +var PgpSignedAttestation_ContentType_value = map[string]int32{ + "CONTENT_TYPE_UNSPECIFIED": 0, + "SIMPLE_SIGNING_JSON": 1, +} + +func (x PgpSignedAttestation_ContentType) String() string { + return proto.EnumName(PgpSignedAttestation_ContentType_name, int32(x)) +} +func (PgpSignedAttestation_ContentType) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_attestation_5f15fe591a16ad2d, []int{0, 0} +} + +// An attestation wrapper with a PGP-compatible signature. This message only +// supports `ATTACHED` signatures, where the payload that is signed is included +// alongside the signature itself in the same file. +type PgpSignedAttestation struct { + // The raw content of the signature, as output by GNU Privacy Guard (GPG) or + // equivalent. Since this message only supports attached signatures, the + // payload that was signed must be attached. While the signature format + // supported is dependent on the verification implementation, currently only + // ASCII-armored (`--armor` to gpg), non-clearsigned (`--sign` rather than + // `--clearsign` to gpg) are supported. Concretely, `gpg --sign --armor + // --output=signature.gpg payload.json` will create the signature content + // expected in this field in `signature.gpg` for the `payload.json` + // attestation payload. + Signature string `protobuf:"bytes,1,opt,name=signature,proto3" json:"signature,omitempty"` + // Type (for example schema) of the attestation payload that was signed. + // The verifier must ensure that the provided type is one that the verifier + // supports, and that the attestation payload is a valid instantiation of that + // type (for example by validating a JSON schema). + ContentType PgpSignedAttestation_ContentType `protobuf:"varint,3,opt,name=content_type,json=contentType,proto3,enum=grafeas.v1beta1.attestation.PgpSignedAttestation_ContentType" json:"content_type,omitempty"` + // This field is used by verifiers to select the public key used to validate + // the signature. Note that the policy of the verifier ultimately determines + // which public keys verify a signature based on the context of the + // verification. There is no guarantee validation will succeed if the + // verifier has no key matching this ID, even if it has a key under a + // different ID that would verify the signature. Note that this ID should also + // be present in the signature content above, but that is not expected to be + // used by the verifier. + // + // Types that are valid to be assigned to KeyId: + // *PgpSignedAttestation_PgpKeyId + KeyId isPgpSignedAttestation_KeyId `protobuf_oneof:"key_id"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *PgpSignedAttestation) Reset() { *m = PgpSignedAttestation{} } +func (m *PgpSignedAttestation) String() string { return proto.CompactTextString(m) } +func (*PgpSignedAttestation) ProtoMessage() {} +func (*PgpSignedAttestation) Descriptor() ([]byte, []int) { + return fileDescriptor_attestation_5f15fe591a16ad2d, []int{0} +} +func (m *PgpSignedAttestation) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_PgpSignedAttestation.Unmarshal(m, b) +} +func (m *PgpSignedAttestation) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_PgpSignedAttestation.Marshal(b, m, deterministic) +} +func (dst *PgpSignedAttestation) XXX_Merge(src proto.Message) { + xxx_messageInfo_PgpSignedAttestation.Merge(dst, src) +} +func (m *PgpSignedAttestation) XXX_Size() int { + return xxx_messageInfo_PgpSignedAttestation.Size(m) +} +func (m *PgpSignedAttestation) XXX_DiscardUnknown() { + xxx_messageInfo_PgpSignedAttestation.DiscardUnknown(m) +} + +var xxx_messageInfo_PgpSignedAttestation proto.InternalMessageInfo + +func (m *PgpSignedAttestation) GetSignature() string { + if m != nil { + return m.Signature + } + return "" +} + +func (m *PgpSignedAttestation) GetContentType() PgpSignedAttestation_ContentType { + if m != nil { + return m.ContentType + } + return PgpSignedAttestation_CONTENT_TYPE_UNSPECIFIED +} + +type isPgpSignedAttestation_KeyId interface { + isPgpSignedAttestation_KeyId() +} + +type PgpSignedAttestation_PgpKeyId struct { + PgpKeyId string `protobuf:"bytes,2,opt,name=pgp_key_id,json=pgpKeyId,proto3,oneof"` +} + +func (*PgpSignedAttestation_PgpKeyId) isPgpSignedAttestation_KeyId() {} + +func (m *PgpSignedAttestation) GetKeyId() isPgpSignedAttestation_KeyId { + if m != nil { + return m.KeyId + } + return nil +} + +func (m *PgpSignedAttestation) GetPgpKeyId() string { + if x, ok := m.GetKeyId().(*PgpSignedAttestation_PgpKeyId); ok { + return x.PgpKeyId + } + return "" +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*PgpSignedAttestation) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _PgpSignedAttestation_OneofMarshaler, _PgpSignedAttestation_OneofUnmarshaler, _PgpSignedAttestation_OneofSizer, []interface{}{ + (*PgpSignedAttestation_PgpKeyId)(nil), + } +} + +func _PgpSignedAttestation_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*PgpSignedAttestation) + // key_id + switch x := m.KeyId.(type) { + case *PgpSignedAttestation_PgpKeyId: + b.EncodeVarint(2<<3 | proto.WireBytes) + b.EncodeStringBytes(x.PgpKeyId) + case nil: + default: + return fmt.Errorf("PgpSignedAttestation.KeyId has unexpected type %T", x) + } + return nil +} + +func _PgpSignedAttestation_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*PgpSignedAttestation) + switch tag { + case 2: // key_id.pgp_key_id + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeStringBytes() + m.KeyId = &PgpSignedAttestation_PgpKeyId{x} + return true, err + default: + return false, nil + } +} + +func _PgpSignedAttestation_OneofSizer(msg proto.Message) (n int) { + m := msg.(*PgpSignedAttestation) + // key_id + switch x := m.KeyId.(type) { + case *PgpSignedAttestation_PgpKeyId: + n += 1 // tag and wire + n += proto.SizeVarint(uint64(len(x.PgpKeyId))) + n += len(x.PgpKeyId) + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +// Note kind that represents a logical attestation "role" or "authority". For +// example, an organization might have one `Authority` for "QA" and one for +// "build". This Note is intended to act strictly as a grouping mechanism for +// the attached Occurrences (Attestations). This grouping mechanism also +// provides a security boundary, since IAM ACLs gate the ability for a principle +// to attach an Occurrence to a given Note. It also provides a single point of +// lookup to find all attached Attestation Occurrences, even if they don't all +// live in the same project. +type Authority struct { + // Hint hints at the purpose of the attestation authority. + Hint *Authority_Hint `protobuf:"bytes,1,opt,name=hint,proto3" json:"hint,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Authority) Reset() { *m = Authority{} } +func (m *Authority) String() string { return proto.CompactTextString(m) } +func (*Authority) ProtoMessage() {} +func (*Authority) Descriptor() ([]byte, []int) { + return fileDescriptor_attestation_5f15fe591a16ad2d, []int{1} +} +func (m *Authority) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Authority.Unmarshal(m, b) +} +func (m *Authority) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Authority.Marshal(b, m, deterministic) +} +func (dst *Authority) XXX_Merge(src proto.Message) { + xxx_messageInfo_Authority.Merge(dst, src) +} +func (m *Authority) XXX_Size() int { + return xxx_messageInfo_Authority.Size(m) +} +func (m *Authority) XXX_DiscardUnknown() { + xxx_messageInfo_Authority.DiscardUnknown(m) +} + +var xxx_messageInfo_Authority proto.InternalMessageInfo + +func (m *Authority) GetHint() *Authority_Hint { + if m != nil { + return m.Hint + } + return nil +} + +// This submessage provides human-readable hints about the purpose of the +// Authority. Because the name of a Note acts as its resource reference, it is +// important to disambiguate the canonical name of the Note (which might be a +// UUID for security purposes) from "readable" names more suitable for debug +// output. Note that these hints should NOT be used to look up authorities in +// security sensitive contexts, such as when looking up Attestations to +// verify. +type Authority_Hint struct { + // The human readable name of this Attestation Authority, for example "qa". + HumanReadableName string `protobuf:"bytes,1,opt,name=human_readable_name,json=humanReadableName,proto3" json:"human_readable_name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Authority_Hint) Reset() { *m = Authority_Hint{} } +func (m *Authority_Hint) String() string { return proto.CompactTextString(m) } +func (*Authority_Hint) ProtoMessage() {} +func (*Authority_Hint) Descriptor() ([]byte, []int) { + return fileDescriptor_attestation_5f15fe591a16ad2d, []int{1, 0} +} +func (m *Authority_Hint) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Authority_Hint.Unmarshal(m, b) +} +func (m *Authority_Hint) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Authority_Hint.Marshal(b, m, deterministic) +} +func (dst *Authority_Hint) XXX_Merge(src proto.Message) { + xxx_messageInfo_Authority_Hint.Merge(dst, src) +} +func (m *Authority_Hint) XXX_Size() int { + return xxx_messageInfo_Authority_Hint.Size(m) +} +func (m *Authority_Hint) XXX_DiscardUnknown() { + xxx_messageInfo_Authority_Hint.DiscardUnknown(m) +} + +var xxx_messageInfo_Authority_Hint proto.InternalMessageInfo + +func (m *Authority_Hint) GetHumanReadableName() string { + if m != nil { + return m.HumanReadableName + } + return "" +} + +// Details of an attestation occurrence. +type Details struct { + // Attestation for the resource. + Attestation *Attestation `protobuf:"bytes,1,opt,name=attestation,proto3" json:"attestation,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Details) Reset() { *m = Details{} } +func (m *Details) String() string { return proto.CompactTextString(m) } +func (*Details) ProtoMessage() {} +func (*Details) Descriptor() ([]byte, []int) { + return fileDescriptor_attestation_5f15fe591a16ad2d, []int{2} +} +func (m *Details) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Details.Unmarshal(m, b) +} +func (m *Details) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Details.Marshal(b, m, deterministic) +} +func (dst *Details) XXX_Merge(src proto.Message) { + xxx_messageInfo_Details.Merge(dst, src) +} +func (m *Details) XXX_Size() int { + return xxx_messageInfo_Details.Size(m) +} +func (m *Details) XXX_DiscardUnknown() { + xxx_messageInfo_Details.DiscardUnknown(m) +} + +var xxx_messageInfo_Details proto.InternalMessageInfo + +func (m *Details) GetAttestation() *Attestation { + if m != nil { + return m.Attestation + } + return nil +} + +// Occurrence that represents a single "attestation". The authenticity of an +// Attestation can be verified using the attached signature. If the verifier +// trusts the public key of the signer, then verifying the signature is +// sufficient to establish trust. In this circumstance, the Authority to which +// this Attestation is attached is primarily useful for look-up (how to find +// this Attestation if you already know the Authority and artifact to be +// verified) and intent (which authority was this attestation intended to sign +// for). +type Attestation struct { + // The signature, generally over the `resource_url`, that verifies this + // attestation. The semantics of the signature veracity are ultimately + // determined by the verification engine. + // + // Types that are valid to be assigned to Signature: + // *Attestation_PgpSignedAttestation + Signature isAttestation_Signature `protobuf_oneof:"signature"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Attestation) Reset() { *m = Attestation{} } +func (m *Attestation) String() string { return proto.CompactTextString(m) } +func (*Attestation) ProtoMessage() {} +func (*Attestation) Descriptor() ([]byte, []int) { + return fileDescriptor_attestation_5f15fe591a16ad2d, []int{3} +} +func (m *Attestation) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Attestation.Unmarshal(m, b) +} +func (m *Attestation) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Attestation.Marshal(b, m, deterministic) +} +func (dst *Attestation) XXX_Merge(src proto.Message) { + xxx_messageInfo_Attestation.Merge(dst, src) +} +func (m *Attestation) XXX_Size() int { + return xxx_messageInfo_Attestation.Size(m) +} +func (m *Attestation) XXX_DiscardUnknown() { + xxx_messageInfo_Attestation.DiscardUnknown(m) +} + +var xxx_messageInfo_Attestation proto.InternalMessageInfo + +type isAttestation_Signature interface { + isAttestation_Signature() +} + +type Attestation_PgpSignedAttestation struct { + PgpSignedAttestation *PgpSignedAttestation `protobuf:"bytes,1,opt,name=pgp_signed_attestation,json=pgpSignedAttestation,proto3,oneof"` +} + +func (*Attestation_PgpSignedAttestation) isAttestation_Signature() {} + +func (m *Attestation) GetSignature() isAttestation_Signature { + if m != nil { + return m.Signature + } + return nil +} + +func (m *Attestation) GetPgpSignedAttestation() *PgpSignedAttestation { + if x, ok := m.GetSignature().(*Attestation_PgpSignedAttestation); ok { + return x.PgpSignedAttestation + } + return nil +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*Attestation) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _Attestation_OneofMarshaler, _Attestation_OneofUnmarshaler, _Attestation_OneofSizer, []interface{}{ + (*Attestation_PgpSignedAttestation)(nil), + } +} + +func _Attestation_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*Attestation) + // signature + switch x := m.Signature.(type) { + case *Attestation_PgpSignedAttestation: + b.EncodeVarint(1<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.PgpSignedAttestation); err != nil { + return err + } + case nil: + default: + return fmt.Errorf("Attestation.Signature has unexpected type %T", x) + } + return nil +} + +func _Attestation_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*Attestation) + switch tag { + case 1: // signature.pgp_signed_attestation + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(PgpSignedAttestation) + err := b.DecodeMessage(msg) + m.Signature = &Attestation_PgpSignedAttestation{msg} + return true, err + default: + return false, nil + } +} + +func _Attestation_OneofSizer(msg proto.Message) (n int) { + m := msg.(*Attestation) + // signature + switch x := m.Signature.(type) { + case *Attestation_PgpSignedAttestation: + s := proto.Size(x.PgpSignedAttestation) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +func init() { + proto.RegisterType((*PgpSignedAttestation)(nil), "grafeas.v1beta1.attestation.PgpSignedAttestation") + proto.RegisterType((*Authority)(nil), "grafeas.v1beta1.attestation.Authority") + proto.RegisterType((*Authority_Hint)(nil), "grafeas.v1beta1.attestation.Authority.Hint") + proto.RegisterType((*Details)(nil), "grafeas.v1beta1.attestation.Details") + proto.RegisterType((*Attestation)(nil), "grafeas.v1beta1.attestation.Attestation") + proto.RegisterEnum("grafeas.v1beta1.attestation.PgpSignedAttestation_ContentType", PgpSignedAttestation_ContentType_name, PgpSignedAttestation_ContentType_value) +} + +func init() { + proto.RegisterFile("google/devtools/containeranalysis/v1beta1/attestation/attestation.proto", fileDescriptor_attestation_5f15fe591a16ad2d) +} + +var fileDescriptor_attestation_5f15fe591a16ad2d = []byte{ + // 452 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x94, 0x53, 0x5d, 0x6f, 0xd3, 0x30, + 0x14, 0x6d, 0xba, 0x69, 0xac, 0x37, 0x08, 0x8d, 0x6c, 0x82, 0x0a, 0xa6, 0x69, 0xca, 0x53, 0x25, + 0x24, 0x47, 0x1d, 0x12, 0x2f, 0x08, 0xa1, 0x7e, 0x84, 0x36, 0x03, 0xb2, 0x28, 0xe9, 0x1e, 0xe0, + 0xc5, 0x73, 0x97, 0x8b, 0x6b, 0x91, 0xda, 0x56, 0xe2, 0x4e, 0xca, 0x3b, 0xf0, 0xc0, 0xcf, 0xe0, + 0x97, 0xa2, 0xa6, 0x85, 0x46, 0xda, 0x54, 0xa9, 0x6f, 0xd7, 0x3e, 0x3e, 0xf7, 0x9c, 0xfb, 0x61, + 0x18, 0x71, 0xa5, 0x78, 0x86, 0x5e, 0x8a, 0x77, 0x46, 0xa9, 0xac, 0xf0, 0x6e, 0x95, 0x34, 0x4c, + 0x48, 0xcc, 0x99, 0x64, 0x59, 0x59, 0x88, 0xc2, 0xbb, 0xeb, 0x4e, 0xd1, 0xb0, 0xae, 0xc7, 0x8c, + 0xc1, 0xc2, 0x30, 0x23, 0x94, 0xac, 0xc7, 0x44, 0xe7, 0xca, 0x28, 0xe7, 0x25, 0xcf, 0xd9, 0x37, + 0x64, 0x05, 0x59, 0x3f, 0x27, 0xb5, 0x27, 0xee, 0xaf, 0x26, 0x9c, 0x44, 0x5c, 0x27, 0x82, 0x4b, + 0x4c, 0x7b, 0x1b, 0xc0, 0x39, 0x85, 0x56, 0x21, 0xb8, 0x64, 0x66, 0x91, 0x63, 0xdb, 0x3a, 0xb7, + 0x3a, 0xad, 0x78, 0x73, 0xe1, 0xdc, 0xc0, 0xe3, 0xa5, 0x1d, 0x94, 0x86, 0x9a, 0x52, 0x63, 0x7b, + 0xef, 0xdc, 0xea, 0x3c, 0xb9, 0x78, 0x47, 0xb6, 0x48, 0x91, 0x87, 0x64, 0xc8, 0x60, 0x95, 0x65, + 0x52, 0x6a, 0x8c, 0xed, 0xdb, 0xcd, 0xc1, 0x39, 0x03, 0xd0, 0x5c, 0xd3, 0xef, 0x58, 0x52, 0x91, + 0xb6, 0x9b, 0x4b, 0x03, 0xe3, 0x46, 0x7c, 0xa8, 0xb9, 0xfe, 0x88, 0x65, 0x90, 0xba, 0x43, 0xb0, + 0x6b, 0x5c, 0xe7, 0x14, 0xda, 0x83, 0xab, 0x70, 0xe2, 0x87, 0x13, 0x3a, 0xf9, 0x12, 0xf9, 0xf4, + 0x3a, 0x4c, 0x22, 0x7f, 0x10, 0x7c, 0x08, 0xfc, 0xe1, 0x51, 0xc3, 0x79, 0x0e, 0xc7, 0x49, 0xf0, + 0x39, 0xfa, 0xe4, 0xd3, 0x24, 0x18, 0x85, 0x41, 0x38, 0xa2, 0x97, 0xc9, 0x55, 0x78, 0x64, 0xf5, + 0x0f, 0xe1, 0x60, 0xa5, 0xe0, 0xfe, 0xb0, 0xa0, 0xd5, 0x5b, 0x98, 0x99, 0xca, 0x85, 0x29, 0x9d, + 0xf7, 0xb0, 0x3f, 0x13, 0xd2, 0x54, 0x85, 0xdb, 0x17, 0xaf, 0xb6, 0xd6, 0xf5, 0x9f, 0x45, 0xc6, + 0x42, 0x9a, 0xb8, 0x22, 0xbe, 0x78, 0x03, 0xfb, 0xcb, 0x93, 0x43, 0xe0, 0x78, 0xb6, 0x98, 0x33, + 0x49, 0x73, 0x64, 0x29, 0x9b, 0x66, 0x48, 0x25, 0x9b, 0xff, 0x6b, 0xe8, 0xd3, 0x0a, 0x8a, 0xd7, + 0x48, 0xc8, 0xe6, 0xe8, 0x5e, 0xc3, 0xa3, 0x21, 0x1a, 0x26, 0xb2, 0xc2, 0xb9, 0x04, 0xbb, 0x26, + 0xb3, 0xb6, 0xd2, 0xd9, 0x6e, 0x65, 0x13, 0xc7, 0x75, 0xb2, 0xfb, 0xd3, 0x02, 0xbb, 0x3e, 0x5d, + 0x01, 0xcf, 0x96, 0xdd, 0x2d, 0xaa, 0x79, 0xd0, 0xfb, 0x32, 0xdd, 0x9d, 0x27, 0x39, 0x6e, 0xc4, + 0x27, 0xfa, 0x81, 0xfb, 0xbe, 0x5d, 0x5b, 0xa4, 0xfe, 0x6f, 0x0b, 0xce, 0x84, 0xda, 0x96, 0x3c, + 0xb2, 0xbe, 0xde, 0xac, 0x36, 0x9f, 0x70, 0x95, 0x31, 0xc9, 0x89, 0xca, 0xb9, 0xc7, 0x51, 0x56, + 0xcb, 0xec, 0xad, 0x20, 0xa6, 0x45, 0xb1, 0xe3, 0xc7, 0x78, 0x5b, 0x8b, 0xff, 0x34, 0xf7, 0x46, + 0x71, 0x6f, 0x7a, 0x50, 0xa5, 0x7c, 0xfd, 0x37, 0x00, 0x00, 0xff, 0xff, 0x69, 0x0c, 0x36, 0xf8, + 0x6a, 0x03, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/devtools/containeranalysis/v1beta1/build/build.pb.go b/vendor/google.golang.org/genproto/googleapis/devtools/containeranalysis/v1beta1/build/build.pb.go new file mode 100644 index 0000000..d99250b --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/devtools/containeranalysis/v1beta1/build/build.pb.go @@ -0,0 +1,287 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/devtools/containeranalysis/v1beta1/build/build.proto + +package build // import "google.golang.org/genproto/googleapis/devtools/containeranalysis/v1beta1/build" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import provenance "google.golang.org/genproto/googleapis/devtools/containeranalysis/v1beta1/provenance" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Public key formats +type BuildSignature_KeyType int32 + +const ( + // `KeyType` is not set. + BuildSignature_KEY_TYPE_UNSPECIFIED BuildSignature_KeyType = 0 + // `PGP ASCII Armored` public key. + BuildSignature_PGP_ASCII_ARMORED BuildSignature_KeyType = 1 + // `PKIX PEM` public key. + BuildSignature_PKIX_PEM BuildSignature_KeyType = 2 +) + +var BuildSignature_KeyType_name = map[int32]string{ + 0: "KEY_TYPE_UNSPECIFIED", + 1: "PGP_ASCII_ARMORED", + 2: "PKIX_PEM", +} +var BuildSignature_KeyType_value = map[string]int32{ + "KEY_TYPE_UNSPECIFIED": 0, + "PGP_ASCII_ARMORED": 1, + "PKIX_PEM": 2, +} + +func (x BuildSignature_KeyType) String() string { + return proto.EnumName(BuildSignature_KeyType_name, int32(x)) +} +func (BuildSignature_KeyType) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_build_fc5a1b8d414ac9bd, []int{1, 0} +} + +// Note holding the version of the provider's builder and the signature of the +// provenance message in linked BuildDetails. +type Build struct { + // Version of the builder which produced this Note. + BuilderVersion string `protobuf:"bytes,1,opt,name=builder_version,json=builderVersion,proto3" json:"builder_version,omitempty"` + // Signature of the build in Occurrences pointing to the Note containing this + // `BuilderDetails`. + Signature *BuildSignature `protobuf:"bytes,2,opt,name=signature,proto3" json:"signature,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Build) Reset() { *m = Build{} } +func (m *Build) String() string { return proto.CompactTextString(m) } +func (*Build) ProtoMessage() {} +func (*Build) Descriptor() ([]byte, []int) { + return fileDescriptor_build_fc5a1b8d414ac9bd, []int{0} +} +func (m *Build) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Build.Unmarshal(m, b) +} +func (m *Build) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Build.Marshal(b, m, deterministic) +} +func (dst *Build) XXX_Merge(src proto.Message) { + xxx_messageInfo_Build.Merge(dst, src) +} +func (m *Build) XXX_Size() int { + return xxx_messageInfo_Build.Size(m) +} +func (m *Build) XXX_DiscardUnknown() { + xxx_messageInfo_Build.DiscardUnknown(m) +} + +var xxx_messageInfo_Build proto.InternalMessageInfo + +func (m *Build) GetBuilderVersion() string { + if m != nil { + return m.BuilderVersion + } + return "" +} + +func (m *Build) GetSignature() *BuildSignature { + if m != nil { + return m.Signature + } + return nil +} + +// Message encapsulating the signature of the verified build. +type BuildSignature struct { + // Public key of the builder which can be used to verify that the related + // findings are valid and unchanged. If `key_type` is empty, this defaults + // to PEM encoded public keys. + // + // This field may be empty if `key_id` references an external key. + // + // For Cloud Container Builder based signatures, this is a PEM encoded public + // key. To verify the Cloud Container Builder signature, place the contents of + // this field into a file (public.pem). The signature field is base64-decoded + // into its binary representation in signature.bin, and the provenance bytes + // from `BuildDetails` are base64-decoded into a binary representation in + // signed.bin. OpenSSL can then verify the signature: + // `openssl sha256 -verify public.pem -signature signature.bin signed.bin` + PublicKey string `protobuf:"bytes,1,opt,name=public_key,json=publicKey,proto3" json:"public_key,omitempty"` + // Signature of the related `BuildProvenance`. In JSON, this is base-64 + // encoded. + Signature []byte `protobuf:"bytes,2,opt,name=signature,proto3" json:"signature,omitempty"` + // An ID for the key used to sign. This could be either an Id for the key + // stored in `public_key` (such as the Id or fingerprint for a PGP key, or the + // CN for a cert), or a reference to an external key (such as a reference to a + // key in Cloud Key Management Service). + KeyId string `protobuf:"bytes,3,opt,name=key_id,json=keyId,proto3" json:"key_id,omitempty"` + // The type of the key, either stored in `public_key` or referenced in + // `key_id` + KeyType BuildSignature_KeyType `protobuf:"varint,4,opt,name=key_type,json=keyType,proto3,enum=grafeas.v1beta1.build.BuildSignature_KeyType" json:"key_type,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *BuildSignature) Reset() { *m = BuildSignature{} } +func (m *BuildSignature) String() string { return proto.CompactTextString(m) } +func (*BuildSignature) ProtoMessage() {} +func (*BuildSignature) Descriptor() ([]byte, []int) { + return fileDescriptor_build_fc5a1b8d414ac9bd, []int{1} +} +func (m *BuildSignature) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_BuildSignature.Unmarshal(m, b) +} +func (m *BuildSignature) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_BuildSignature.Marshal(b, m, deterministic) +} +func (dst *BuildSignature) XXX_Merge(src proto.Message) { + xxx_messageInfo_BuildSignature.Merge(dst, src) +} +func (m *BuildSignature) XXX_Size() int { + return xxx_messageInfo_BuildSignature.Size(m) +} +func (m *BuildSignature) XXX_DiscardUnknown() { + xxx_messageInfo_BuildSignature.DiscardUnknown(m) +} + +var xxx_messageInfo_BuildSignature proto.InternalMessageInfo + +func (m *BuildSignature) GetPublicKey() string { + if m != nil { + return m.PublicKey + } + return "" +} + +func (m *BuildSignature) GetSignature() []byte { + if m != nil { + return m.Signature + } + return nil +} + +func (m *BuildSignature) GetKeyId() string { + if m != nil { + return m.KeyId + } + return "" +} + +func (m *BuildSignature) GetKeyType() BuildSignature_KeyType { + if m != nil { + return m.KeyType + } + return BuildSignature_KEY_TYPE_UNSPECIFIED +} + +// Details of a build occurrence. +type Details struct { + // The actual provenance for the build. + Provenance *provenance.BuildProvenance `protobuf:"bytes,1,opt,name=provenance,proto3" json:"provenance,omitempty"` + // Serialized JSON representation of the provenance, used in generating the + // `BuildSignature` in the corresponding Result. After verifying the + // signature, `provenance_bytes` can be unmarshalled and compared to the + // provenance to confirm that it is unchanged. A base64-encoded string + // representation of the provenance bytes is used for the signature in order + // to interoperate with openssl which expects this format for signature + // verification. + // + // The serialized form is captured both to avoid ambiguity in how the + // provenance is marshalled to json as well to prevent incompatibilities with + // future changes. + ProvenanceBytes string `protobuf:"bytes,2,opt,name=provenance_bytes,json=provenanceBytes,proto3" json:"provenance_bytes,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Details) Reset() { *m = Details{} } +func (m *Details) String() string { return proto.CompactTextString(m) } +func (*Details) ProtoMessage() {} +func (*Details) Descriptor() ([]byte, []int) { + return fileDescriptor_build_fc5a1b8d414ac9bd, []int{2} +} +func (m *Details) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Details.Unmarshal(m, b) +} +func (m *Details) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Details.Marshal(b, m, deterministic) +} +func (dst *Details) XXX_Merge(src proto.Message) { + xxx_messageInfo_Details.Merge(dst, src) +} +func (m *Details) XXX_Size() int { + return xxx_messageInfo_Details.Size(m) +} +func (m *Details) XXX_DiscardUnknown() { + xxx_messageInfo_Details.DiscardUnknown(m) +} + +var xxx_messageInfo_Details proto.InternalMessageInfo + +func (m *Details) GetProvenance() *provenance.BuildProvenance { + if m != nil { + return m.Provenance + } + return nil +} + +func (m *Details) GetProvenanceBytes() string { + if m != nil { + return m.ProvenanceBytes + } + return "" +} + +func init() { + proto.RegisterType((*Build)(nil), "grafeas.v1beta1.build.Build") + proto.RegisterType((*BuildSignature)(nil), "grafeas.v1beta1.build.BuildSignature") + proto.RegisterType((*Details)(nil), "grafeas.v1beta1.build.Details") + proto.RegisterEnum("grafeas.v1beta1.build.BuildSignature_KeyType", BuildSignature_KeyType_name, BuildSignature_KeyType_value) +} + +func init() { + proto.RegisterFile("google/devtools/containeranalysis/v1beta1/build/build.proto", fileDescriptor_build_fc5a1b8d414ac9bd) +} + +var fileDescriptor_build_fc5a1b8d414ac9bd = []byte{ + // 434 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x8c, 0x52, 0x4d, 0x6f, 0xd3, 0x40, + 0x10, 0xc5, 0x29, 0x6d, 0x9a, 0x69, 0x95, 0x86, 0x15, 0x91, 0x2c, 0x04, 0x52, 0x14, 0x09, 0x11, + 0x84, 0xb0, 0xd5, 0x70, 0xec, 0x29, 0x1f, 0xa6, 0xb5, 0xac, 0x82, 0xe5, 0x04, 0x44, 0xb9, 0xac, + 0xd6, 0xc9, 0x60, 0xad, 0x62, 0xed, 0x5a, 0xde, 0x4d, 0xc4, 0x1e, 0xf9, 0x2b, 0xfc, 0x4a, 0x8e, + 0x28, 0xeb, 0x40, 0x0c, 0xed, 0x21, 0x97, 0xd5, 0xcc, 0x9b, 0x99, 0x37, 0x6f, 0x67, 0x06, 0xae, + 0x32, 0x29, 0xb3, 0x1c, 0xfd, 0x25, 0x6e, 0xb4, 0x94, 0xb9, 0xf2, 0x17, 0x52, 0x68, 0xc6, 0x05, + 0x96, 0x4c, 0xb0, 0xdc, 0x28, 0xae, 0xfc, 0xcd, 0x65, 0x8a, 0x9a, 0x5d, 0xfa, 0xe9, 0x9a, 0xe7, + 0xcb, 0xea, 0xf5, 0x8a, 0x52, 0x6a, 0x49, 0xba, 0x59, 0xc9, 0xbe, 0x21, 0x53, 0xde, 0x2e, 0xc5, + 0xb3, 0xc1, 0x67, 0xc1, 0xe1, 0x9c, 0x45, 0x29, 0x37, 0x28, 0x98, 0x58, 0x60, 0xcd, 0xac, 0xd8, + 0xfb, 0x6b, 0x38, 0x1e, 0x6f, 0xf9, 0xc8, 0x2b, 0xb8, 0xb0, 0xc4, 0x58, 0xd2, 0x0d, 0x96, 0x8a, + 0x4b, 0xe1, 0x3a, 0x3d, 0x67, 0xd0, 0x4a, 0xda, 0x3b, 0xf8, 0x73, 0x85, 0x92, 0x09, 0xb4, 0x14, + 0xcf, 0x04, 0xd3, 0xeb, 0x12, 0xdd, 0x46, 0xcf, 0x19, 0x9c, 0x0d, 0x5f, 0x7a, 0x0f, 0x6a, 0xf4, + 0x2c, 0xf3, 0xec, 0x4f, 0x72, 0xb2, 0xaf, 0xeb, 0xff, 0x72, 0xa0, 0xfd, 0x6f, 0x94, 0xbc, 0x00, + 0x28, 0xd6, 0x69, 0xce, 0x17, 0x74, 0x85, 0x66, 0xd7, 0xbb, 0x55, 0x21, 0x11, 0x1a, 0xf2, 0xfc, + 0xff, 0xb6, 0xe7, 0x35, 0x3e, 0xd2, 0x85, 0x93, 0x15, 0x1a, 0xca, 0x97, 0xee, 0x91, 0x2d, 0x3c, + 0x5e, 0xa1, 0x09, 0x97, 0xe4, 0x06, 0x4e, 0xb7, 0xb0, 0x36, 0x05, 0xba, 0x8f, 0x7b, 0xce, 0xa0, + 0x3d, 0x7c, 0x7b, 0x90, 0x54, 0x2f, 0x42, 0x33, 0x37, 0x05, 0x26, 0xcd, 0x55, 0x65, 0xf4, 0x6f, + 0xa0, 0xb9, 0xc3, 0x88, 0x0b, 0x4f, 0xa3, 0xe0, 0x8e, 0xce, 0xef, 0xe2, 0x80, 0x7e, 0xfa, 0x30, + 0x8b, 0x83, 0x49, 0xf8, 0x3e, 0x0c, 0xa6, 0x9d, 0x47, 0xa4, 0x0b, 0x4f, 0xe2, 0xeb, 0x98, 0x8e, + 0x66, 0x93, 0x30, 0xa4, 0xa3, 0xe4, 0xf6, 0x63, 0x12, 0x4c, 0x3b, 0x0e, 0x39, 0x87, 0xd3, 0x38, + 0x0a, 0xbf, 0xd0, 0x38, 0xb8, 0xed, 0x34, 0xfa, 0x3f, 0x1c, 0x68, 0x4e, 0x51, 0x33, 0x9e, 0x2b, + 0x12, 0x01, 0xec, 0x37, 0x62, 0xff, 0x7c, 0x36, 0x7c, 0x73, 0x4f, 0x61, 0x6d, 0x69, 0x56, 0x66, + 0xfc, 0xd7, 0x4f, 0x6a, 0xe5, 0xe4, 0x35, 0x74, 0xf6, 0x1e, 0x4d, 0x8d, 0x46, 0x65, 0x07, 0xd5, + 0x4a, 0x2e, 0xf6, 0xf8, 0x78, 0x0b, 0x8f, 0xbf, 0x83, 0xcb, 0xe5, 0xc3, 0x93, 0x88, 0x9d, 0xaf, + 0xf3, 0xea, 0xb4, 0xbc, 0x4c, 0xe6, 0x4c, 0x64, 0x9e, 0x2c, 0x33, 0x3f, 0x43, 0x61, 0xef, 0xc5, + 0xaf, 0x42, 0xac, 0xe0, 0xea, 0xe0, 0x6b, 0xbe, 0xb2, 0xef, 0xcf, 0xc6, 0xd1, 0x75, 0x32, 0x4a, + 0x4f, 0x2c, 0xcd, 0xbb, 0xdf, 0x01, 0x00, 0x00, 0xff, 0xff, 0x06, 0x5a, 0x03, 0x5d, 0x13, 0x03, + 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/devtools/containeranalysis/v1beta1/common/common.pb.go b/vendor/google.golang.org/genproto/googleapis/devtools/containeranalysis/v1beta1/common/common.pb.go new file mode 100644 index 0000000..ea676c2 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/devtools/containeranalysis/v1beta1/common/common.pb.go @@ -0,0 +1,152 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/devtools/containeranalysis/v1beta1/common/common.proto + +package common // import "google.golang.org/genproto/googleapis/devtools/containeranalysis/v1beta1/common" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Kind represents the kinds of notes supported. +type NoteKind int32 + +const ( + // Unknown. + NoteKind_NOTE_KIND_UNSPECIFIED NoteKind = 0 + // The note and occurrence represent a package vulnerability. + NoteKind_VULNERABILITY NoteKind = 1 + // The note and occurrence assert build provenance. + NoteKind_BUILD NoteKind = 2 + // This represents an image basis relationship. + NoteKind_IMAGE NoteKind = 3 + // This represents a package installed via a package manager. + NoteKind_PACKAGE NoteKind = 4 + // The note and occurrence track deployment events. + NoteKind_DEPLOYMENT NoteKind = 5 + // The note and occurrence track the initial discovery status of a resource. + NoteKind_DISCOVERY NoteKind = 6 + // This represents a logical "role" that can attest to artifacts. + NoteKind_ATTESTATION NoteKind = 7 +) + +var NoteKind_name = map[int32]string{ + 0: "NOTE_KIND_UNSPECIFIED", + 1: "VULNERABILITY", + 2: "BUILD", + 3: "IMAGE", + 4: "PACKAGE", + 5: "DEPLOYMENT", + 6: "DISCOVERY", + 7: "ATTESTATION", +} +var NoteKind_value = map[string]int32{ + "NOTE_KIND_UNSPECIFIED": 0, + "VULNERABILITY": 1, + "BUILD": 2, + "IMAGE": 3, + "PACKAGE": 4, + "DEPLOYMENT": 5, + "DISCOVERY": 6, + "ATTESTATION": 7, +} + +func (x NoteKind) String() string { + return proto.EnumName(NoteKind_name, int32(x)) +} +func (NoteKind) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_common_945c0ba544e58d51, []int{0} +} + +// Metadata for any related URL information. +type RelatedUrl struct { + // Specific URL associated with the resource. + Url string `protobuf:"bytes,1,opt,name=url,proto3" json:"url,omitempty"` + // Label to describe usage of the URL. + Label string `protobuf:"bytes,2,opt,name=label,proto3" json:"label,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *RelatedUrl) Reset() { *m = RelatedUrl{} } +func (m *RelatedUrl) String() string { return proto.CompactTextString(m) } +func (*RelatedUrl) ProtoMessage() {} +func (*RelatedUrl) Descriptor() ([]byte, []int) { + return fileDescriptor_common_945c0ba544e58d51, []int{0} +} +func (m *RelatedUrl) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_RelatedUrl.Unmarshal(m, b) +} +func (m *RelatedUrl) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_RelatedUrl.Marshal(b, m, deterministic) +} +func (dst *RelatedUrl) XXX_Merge(src proto.Message) { + xxx_messageInfo_RelatedUrl.Merge(dst, src) +} +func (m *RelatedUrl) XXX_Size() int { + return xxx_messageInfo_RelatedUrl.Size(m) +} +func (m *RelatedUrl) XXX_DiscardUnknown() { + xxx_messageInfo_RelatedUrl.DiscardUnknown(m) +} + +var xxx_messageInfo_RelatedUrl proto.InternalMessageInfo + +func (m *RelatedUrl) GetUrl() string { + if m != nil { + return m.Url + } + return "" +} + +func (m *RelatedUrl) GetLabel() string { + if m != nil { + return m.Label + } + return "" +} + +func init() { + proto.RegisterType((*RelatedUrl)(nil), "grafeas.v1beta1.RelatedUrl") + proto.RegisterEnum("grafeas.v1beta1.NoteKind", NoteKind_name, NoteKind_value) +} + +func init() { + proto.RegisterFile("google/devtools/containeranalysis/v1beta1/common/common.proto", fileDescriptor_common_945c0ba544e58d51) +} + +var fileDescriptor_common_945c0ba544e58d51 = []byte{ + // 322 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x8c, 0x90, 0x41, 0x6b, 0xea, 0x40, + 0x14, 0x85, 0x5f, 0xf4, 0xa9, 0xcf, 0x2b, 0x3e, 0xa7, 0x43, 0x0b, 0xba, 0x2b, 0x5d, 0x95, 0x2e, + 0x12, 0xa4, 0xdd, 0x95, 0x2e, 0xa2, 0x99, 0xca, 0x60, 0x9c, 0x84, 0x38, 0x11, 0xec, 0x46, 0x46, + 0x9d, 0x0e, 0x81, 0x71, 0x46, 0x92, 0x54, 0x28, 0xfd, 0x09, 0xfd, 0x17, 0xfd, 0xa5, 0x45, 0xe3, + 0xaa, 0xab, 0xae, 0xce, 0x39, 0xf7, 0x5e, 0x2e, 0x9c, 0x0f, 0x9e, 0x94, 0xb5, 0x4a, 0x4b, 0x6f, + 0x2b, 0x0f, 0xa5, 0xb5, 0xba, 0xf0, 0x36, 0xd6, 0x94, 0x22, 0x33, 0x32, 0x17, 0x46, 0xe8, 0xf7, + 0x22, 0x2b, 0xbc, 0xc3, 0x70, 0x2d, 0x4b, 0x31, 0xf4, 0x36, 0x76, 0xb7, 0xb3, 0xe6, 0x2c, 0xee, + 0x3e, 0xb7, 0xa5, 0xc5, 0x3d, 0x95, 0x8b, 0x57, 0x29, 0x0a, 0xf7, 0x7c, 0x74, 0xf3, 0x00, 0x90, + 0x48, 0x2d, 0x4a, 0xb9, 0x4d, 0x73, 0x8d, 0x11, 0xd4, 0xdf, 0x72, 0xdd, 0x77, 0xae, 0x9d, 0xdb, + 0x76, 0x72, 0xb4, 0xf8, 0x12, 0x1a, 0x5a, 0xac, 0xa5, 0xee, 0xd7, 0x4e, 0xb3, 0x2a, 0xdc, 0x7d, + 0x3a, 0xf0, 0x8f, 0xd9, 0x52, 0x4e, 0x33, 0xb3, 0xc5, 0x03, 0xb8, 0x62, 0x11, 0x27, 0xab, 0x29, + 0x65, 0xc1, 0x2a, 0x65, 0xf3, 0x98, 0x8c, 0xe9, 0x33, 0x25, 0x01, 0xfa, 0x83, 0x2f, 0xa0, 0xbb, + 0x48, 0x43, 0x46, 0x12, 0x7f, 0x44, 0x43, 0xca, 0x97, 0xc8, 0xc1, 0x6d, 0x68, 0x8c, 0x52, 0x1a, + 0x06, 0xa8, 0x76, 0xb4, 0x74, 0xe6, 0x4f, 0x08, 0xaa, 0xe3, 0x0e, 0xb4, 0x62, 0x7f, 0x3c, 0x3d, + 0x86, 0xbf, 0xf8, 0x3f, 0x40, 0x40, 0xe2, 0x30, 0x5a, 0xce, 0x08, 0xe3, 0xa8, 0x81, 0xbb, 0xd0, + 0x0e, 0xe8, 0x7c, 0x1c, 0x2d, 0x48, 0xb2, 0x44, 0x4d, 0xdc, 0x83, 0x8e, 0xcf, 0x39, 0x99, 0x73, + 0x9f, 0xd3, 0x88, 0xa1, 0xd6, 0xe8, 0x03, 0x06, 0x99, 0x75, 0x7f, 0x34, 0x73, 0xab, 0xde, 0xb1, + 0xf3, 0xb2, 0xa8, 0x90, 0xb9, 0xca, 0x6a, 0x61, 0x94, 0x6b, 0x73, 0xe5, 0x29, 0x69, 0x4e, 0x3c, + 0xbc, 0x6a, 0x25, 0xf6, 0x59, 0xf1, 0x7b, 0xa2, 0x8f, 0x95, 0x7c, 0xd5, 0xea, 0x93, 0xc4, 0x5f, + 0x37, 0x4f, 0x8f, 0xee, 0xbf, 0x03, 0x00, 0x00, 0xff, 0xff, 0xe2, 0x0a, 0x57, 0x94, 0x99, 0x01, + 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/devtools/containeranalysis/v1beta1/containeranalysis.pb.go b/vendor/google.golang.org/genproto/googleapis/devtools/containeranalysis/v1beta1/containeranalysis.pb.go new file mode 100644 index 0000000..0429cb2 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/devtools/containeranalysis/v1beta1/containeranalysis.pb.go @@ -0,0 +1,670 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/devtools/containeranalysis/v1beta1/containeranalysis.proto + +package containeranalysis // import "google.golang.org/genproto/googleapis/devtools/containeranalysis/v1beta1" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import timestamp "github.com/golang/protobuf/ptypes/timestamp" +import _ "google.golang.org/genproto/googleapis/api/annotations" +import v1 "google.golang.org/genproto/googleapis/iam/v1" + +import ( + context "golang.org/x/net/context" + grpc "google.golang.org/grpc" +) + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// A scan configuration specifies whether Cloud components in a project have a +// particular type of analysis being run. For example, it can configure whether +// vulnerability scanning is being done on Docker images or not. +type ScanConfig struct { + // Output only. The name of the scan configuration in the form of + // `projects/[PROJECT_ID]/scanConfigs/[SCAN_CONFIG_ID]`. + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // Output only. A human-readable description of what the scan configuration + // does. + Description string `protobuf:"bytes,2,opt,name=description,proto3" json:"description,omitempty"` + // Whether the scan is enabled. + Enabled bool `protobuf:"varint,3,opt,name=enabled,proto3" json:"enabled,omitempty"` + // Output only. The time this scan config was created. + CreateTime *timestamp.Timestamp `protobuf:"bytes,4,opt,name=create_time,json=createTime,proto3" json:"create_time,omitempty"` + // Output only. The time this scan config was last updated. + UpdateTime *timestamp.Timestamp `protobuf:"bytes,5,opt,name=update_time,json=updateTime,proto3" json:"update_time,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ScanConfig) Reset() { *m = ScanConfig{} } +func (m *ScanConfig) String() string { return proto.CompactTextString(m) } +func (*ScanConfig) ProtoMessage() {} +func (*ScanConfig) Descriptor() ([]byte, []int) { + return fileDescriptor_containeranalysis_0620917dfaf828c6, []int{0} +} +func (m *ScanConfig) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ScanConfig.Unmarshal(m, b) +} +func (m *ScanConfig) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ScanConfig.Marshal(b, m, deterministic) +} +func (dst *ScanConfig) XXX_Merge(src proto.Message) { + xxx_messageInfo_ScanConfig.Merge(dst, src) +} +func (m *ScanConfig) XXX_Size() int { + return xxx_messageInfo_ScanConfig.Size(m) +} +func (m *ScanConfig) XXX_DiscardUnknown() { + xxx_messageInfo_ScanConfig.DiscardUnknown(m) +} + +var xxx_messageInfo_ScanConfig proto.InternalMessageInfo + +func (m *ScanConfig) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *ScanConfig) GetDescription() string { + if m != nil { + return m.Description + } + return "" +} + +func (m *ScanConfig) GetEnabled() bool { + if m != nil { + return m.Enabled + } + return false +} + +func (m *ScanConfig) GetCreateTime() *timestamp.Timestamp { + if m != nil { + return m.CreateTime + } + return nil +} + +func (m *ScanConfig) GetUpdateTime() *timestamp.Timestamp { + if m != nil { + return m.UpdateTime + } + return nil +} + +// Request to get a scan configuration. +type GetScanConfigRequest struct { + // The name of the scan configuration in the form of + // `projects/[PROJECT_ID]/scanConfigs/[SCAN_CONFIG_ID]`. + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GetScanConfigRequest) Reset() { *m = GetScanConfigRequest{} } +func (m *GetScanConfigRequest) String() string { return proto.CompactTextString(m) } +func (*GetScanConfigRequest) ProtoMessage() {} +func (*GetScanConfigRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_containeranalysis_0620917dfaf828c6, []int{1} +} +func (m *GetScanConfigRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GetScanConfigRequest.Unmarshal(m, b) +} +func (m *GetScanConfigRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GetScanConfigRequest.Marshal(b, m, deterministic) +} +func (dst *GetScanConfigRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_GetScanConfigRequest.Merge(dst, src) +} +func (m *GetScanConfigRequest) XXX_Size() int { + return xxx_messageInfo_GetScanConfigRequest.Size(m) +} +func (m *GetScanConfigRequest) XXX_DiscardUnknown() { + xxx_messageInfo_GetScanConfigRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_GetScanConfigRequest proto.InternalMessageInfo + +func (m *GetScanConfigRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +// Request to list scan configurations. +type ListScanConfigsRequest struct { + // The name of the project to list scan configurations for in the form of + // `projects/[PROJECT_ID]`. + Parent string `protobuf:"bytes,1,opt,name=parent,proto3" json:"parent,omitempty"` + // The filter expression. + Filter string `protobuf:"bytes,2,opt,name=filter,proto3" json:"filter,omitempty"` + // The number of scan configs to return in the list. + PageSize int32 `protobuf:"varint,3,opt,name=page_size,json=pageSize,proto3" json:"page_size,omitempty"` + // Token to provide to skip to a particular spot in the list. + PageToken string `protobuf:"bytes,4,opt,name=page_token,json=pageToken,proto3" json:"page_token,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ListScanConfigsRequest) Reset() { *m = ListScanConfigsRequest{} } +func (m *ListScanConfigsRequest) String() string { return proto.CompactTextString(m) } +func (*ListScanConfigsRequest) ProtoMessage() {} +func (*ListScanConfigsRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_containeranalysis_0620917dfaf828c6, []int{2} +} +func (m *ListScanConfigsRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ListScanConfigsRequest.Unmarshal(m, b) +} +func (m *ListScanConfigsRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ListScanConfigsRequest.Marshal(b, m, deterministic) +} +func (dst *ListScanConfigsRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_ListScanConfigsRequest.Merge(dst, src) +} +func (m *ListScanConfigsRequest) XXX_Size() int { + return xxx_messageInfo_ListScanConfigsRequest.Size(m) +} +func (m *ListScanConfigsRequest) XXX_DiscardUnknown() { + xxx_messageInfo_ListScanConfigsRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_ListScanConfigsRequest proto.InternalMessageInfo + +func (m *ListScanConfigsRequest) GetParent() string { + if m != nil { + return m.Parent + } + return "" +} + +func (m *ListScanConfigsRequest) GetFilter() string { + if m != nil { + return m.Filter + } + return "" +} + +func (m *ListScanConfigsRequest) GetPageSize() int32 { + if m != nil { + return m.PageSize + } + return 0 +} + +func (m *ListScanConfigsRequest) GetPageToken() string { + if m != nil { + return m.PageToken + } + return "" +} + +// Response for listing scan configurations. +type ListScanConfigsResponse struct { + // The scan configurations requested. + ScanConfigs []*ScanConfig `protobuf:"bytes,1,rep,name=scan_configs,json=scanConfigs,proto3" json:"scan_configs,omitempty"` + // The next pagination token in the list response. It should be used as + // `page_token` for the following request. An empty value means no more + // results. + NextPageToken string `protobuf:"bytes,2,opt,name=next_page_token,json=nextPageToken,proto3" json:"next_page_token,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ListScanConfigsResponse) Reset() { *m = ListScanConfigsResponse{} } +func (m *ListScanConfigsResponse) String() string { return proto.CompactTextString(m) } +func (*ListScanConfigsResponse) ProtoMessage() {} +func (*ListScanConfigsResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_containeranalysis_0620917dfaf828c6, []int{3} +} +func (m *ListScanConfigsResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ListScanConfigsResponse.Unmarshal(m, b) +} +func (m *ListScanConfigsResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ListScanConfigsResponse.Marshal(b, m, deterministic) +} +func (dst *ListScanConfigsResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_ListScanConfigsResponse.Merge(dst, src) +} +func (m *ListScanConfigsResponse) XXX_Size() int { + return xxx_messageInfo_ListScanConfigsResponse.Size(m) +} +func (m *ListScanConfigsResponse) XXX_DiscardUnknown() { + xxx_messageInfo_ListScanConfigsResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_ListScanConfigsResponse proto.InternalMessageInfo + +func (m *ListScanConfigsResponse) GetScanConfigs() []*ScanConfig { + if m != nil { + return m.ScanConfigs + } + return nil +} + +func (m *ListScanConfigsResponse) GetNextPageToken() string { + if m != nil { + return m.NextPageToken + } + return "" +} + +// A request to update a scan configuration. +type UpdateScanConfigRequest struct { + // The name of the scan configuration in the form of + // `projects/[PROJECT_ID]/scanConfigs/[SCAN_CONFIG_ID]`. + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // The updated scan configuration. + ScanConfig *ScanConfig `protobuf:"bytes,2,opt,name=scan_config,json=scanConfig,proto3" json:"scan_config,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *UpdateScanConfigRequest) Reset() { *m = UpdateScanConfigRequest{} } +func (m *UpdateScanConfigRequest) String() string { return proto.CompactTextString(m) } +func (*UpdateScanConfigRequest) ProtoMessage() {} +func (*UpdateScanConfigRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_containeranalysis_0620917dfaf828c6, []int{4} +} +func (m *UpdateScanConfigRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_UpdateScanConfigRequest.Unmarshal(m, b) +} +func (m *UpdateScanConfigRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_UpdateScanConfigRequest.Marshal(b, m, deterministic) +} +func (dst *UpdateScanConfigRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_UpdateScanConfigRequest.Merge(dst, src) +} +func (m *UpdateScanConfigRequest) XXX_Size() int { + return xxx_messageInfo_UpdateScanConfigRequest.Size(m) +} +func (m *UpdateScanConfigRequest) XXX_DiscardUnknown() { + xxx_messageInfo_UpdateScanConfigRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_UpdateScanConfigRequest proto.InternalMessageInfo + +func (m *UpdateScanConfigRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *UpdateScanConfigRequest) GetScanConfig() *ScanConfig { + if m != nil { + return m.ScanConfig + } + return nil +} + +func init() { + proto.RegisterType((*ScanConfig)(nil), "google.devtools.containeranalysis.v1beta1.ScanConfig") + proto.RegisterType((*GetScanConfigRequest)(nil), "google.devtools.containeranalysis.v1beta1.GetScanConfigRequest") + proto.RegisterType((*ListScanConfigsRequest)(nil), "google.devtools.containeranalysis.v1beta1.ListScanConfigsRequest") + proto.RegisterType((*ListScanConfigsResponse)(nil), "google.devtools.containeranalysis.v1beta1.ListScanConfigsResponse") + proto.RegisterType((*UpdateScanConfigRequest)(nil), "google.devtools.containeranalysis.v1beta1.UpdateScanConfigRequest") +} + +// Reference imports to suppress errors if they are not otherwise used. +var _ context.Context +var _ grpc.ClientConn + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the grpc package it is being compiled against. +const _ = grpc.SupportPackageIsVersion4 + +// ContainerAnalysisV1Beta1Client is the client API for ContainerAnalysisV1Beta1 service. +// +// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://godoc.org/google.golang.org/grpc#ClientConn.NewStream. +type ContainerAnalysisV1Beta1Client interface { + // Sets the access control policy on the specified note or occurrence. + // Requires `containeranalysis.notes.setIamPolicy` or + // `containeranalysis.occurrences.setIamPolicy` permission if the resource is + // a note or an occurrence, respectively. + // + // The resource takes the format `projects/[PROJECT_ID]/notes/[NOTE_ID]` for + // notes and `projects/[PROJECT_ID]/occurrences/[OCCURRENCE_ID]` for + // occurrences. + SetIamPolicy(ctx context.Context, in *v1.SetIamPolicyRequest, opts ...grpc.CallOption) (*v1.Policy, error) + // Gets the access control policy for a note or an occurrence resource. + // Requires `containeranalysis.notes.setIamPolicy` or + // `containeranalysis.occurrences.setIamPolicy` permission if the resource is + // a note or occurrence, respectively. + // + // The resource takes the format `projects/[PROJECT_ID]/notes/[NOTE_ID]` for + // notes and `projects/[PROJECT_ID]/occurrences/[OCCURRENCE_ID]` for + // occurrences. + GetIamPolicy(ctx context.Context, in *v1.GetIamPolicyRequest, opts ...grpc.CallOption) (*v1.Policy, error) + // Returns the permissions that a caller has on the specified note or + // occurrence. Requires list permission on the project (for example, + // `containeranalysis.notes.list`). + // + // The resource takes the format `projects/[PROJECT_ID]/notes/[NOTE_ID]` for + // notes and `projects/[PROJECT_ID]/occurrences/[OCCURRENCE_ID]` for + // occurrences. + TestIamPermissions(ctx context.Context, in *v1.TestIamPermissionsRequest, opts ...grpc.CallOption) (*v1.TestIamPermissionsResponse, error) + // Gets the specified scan configuration. + GetScanConfig(ctx context.Context, in *GetScanConfigRequest, opts ...grpc.CallOption) (*ScanConfig, error) + // Lists scan configurations for the specified project. + ListScanConfigs(ctx context.Context, in *ListScanConfigsRequest, opts ...grpc.CallOption) (*ListScanConfigsResponse, error) + // Updates the specified scan configuration. + UpdateScanConfig(ctx context.Context, in *UpdateScanConfigRequest, opts ...grpc.CallOption) (*ScanConfig, error) +} + +type containerAnalysisV1Beta1Client struct { + cc *grpc.ClientConn +} + +func NewContainerAnalysisV1Beta1Client(cc *grpc.ClientConn) ContainerAnalysisV1Beta1Client { + return &containerAnalysisV1Beta1Client{cc} +} + +func (c *containerAnalysisV1Beta1Client) SetIamPolicy(ctx context.Context, in *v1.SetIamPolicyRequest, opts ...grpc.CallOption) (*v1.Policy, error) { + out := new(v1.Policy) + err := c.cc.Invoke(ctx, "/google.devtools.containeranalysis.v1beta1.ContainerAnalysisV1Beta1/SetIamPolicy", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *containerAnalysisV1Beta1Client) GetIamPolicy(ctx context.Context, in *v1.GetIamPolicyRequest, opts ...grpc.CallOption) (*v1.Policy, error) { + out := new(v1.Policy) + err := c.cc.Invoke(ctx, "/google.devtools.containeranalysis.v1beta1.ContainerAnalysisV1Beta1/GetIamPolicy", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *containerAnalysisV1Beta1Client) TestIamPermissions(ctx context.Context, in *v1.TestIamPermissionsRequest, opts ...grpc.CallOption) (*v1.TestIamPermissionsResponse, error) { + out := new(v1.TestIamPermissionsResponse) + err := c.cc.Invoke(ctx, "/google.devtools.containeranalysis.v1beta1.ContainerAnalysisV1Beta1/TestIamPermissions", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *containerAnalysisV1Beta1Client) GetScanConfig(ctx context.Context, in *GetScanConfigRequest, opts ...grpc.CallOption) (*ScanConfig, error) { + out := new(ScanConfig) + err := c.cc.Invoke(ctx, "/google.devtools.containeranalysis.v1beta1.ContainerAnalysisV1Beta1/GetScanConfig", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *containerAnalysisV1Beta1Client) ListScanConfigs(ctx context.Context, in *ListScanConfigsRequest, opts ...grpc.CallOption) (*ListScanConfigsResponse, error) { + out := new(ListScanConfigsResponse) + err := c.cc.Invoke(ctx, "/google.devtools.containeranalysis.v1beta1.ContainerAnalysisV1Beta1/ListScanConfigs", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *containerAnalysisV1Beta1Client) UpdateScanConfig(ctx context.Context, in *UpdateScanConfigRequest, opts ...grpc.CallOption) (*ScanConfig, error) { + out := new(ScanConfig) + err := c.cc.Invoke(ctx, "/google.devtools.containeranalysis.v1beta1.ContainerAnalysisV1Beta1/UpdateScanConfig", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +// ContainerAnalysisV1Beta1Server is the server API for ContainerAnalysisV1Beta1 service. +type ContainerAnalysisV1Beta1Server interface { + // Sets the access control policy on the specified note or occurrence. + // Requires `containeranalysis.notes.setIamPolicy` or + // `containeranalysis.occurrences.setIamPolicy` permission if the resource is + // a note or an occurrence, respectively. + // + // The resource takes the format `projects/[PROJECT_ID]/notes/[NOTE_ID]` for + // notes and `projects/[PROJECT_ID]/occurrences/[OCCURRENCE_ID]` for + // occurrences. + SetIamPolicy(context.Context, *v1.SetIamPolicyRequest) (*v1.Policy, error) + // Gets the access control policy for a note or an occurrence resource. + // Requires `containeranalysis.notes.setIamPolicy` or + // `containeranalysis.occurrences.setIamPolicy` permission if the resource is + // a note or occurrence, respectively. + // + // The resource takes the format `projects/[PROJECT_ID]/notes/[NOTE_ID]` for + // notes and `projects/[PROJECT_ID]/occurrences/[OCCURRENCE_ID]` for + // occurrences. + GetIamPolicy(context.Context, *v1.GetIamPolicyRequest) (*v1.Policy, error) + // Returns the permissions that a caller has on the specified note or + // occurrence. Requires list permission on the project (for example, + // `containeranalysis.notes.list`). + // + // The resource takes the format `projects/[PROJECT_ID]/notes/[NOTE_ID]` for + // notes and `projects/[PROJECT_ID]/occurrences/[OCCURRENCE_ID]` for + // occurrences. + TestIamPermissions(context.Context, *v1.TestIamPermissionsRequest) (*v1.TestIamPermissionsResponse, error) + // Gets the specified scan configuration. + GetScanConfig(context.Context, *GetScanConfigRequest) (*ScanConfig, error) + // Lists scan configurations for the specified project. + ListScanConfigs(context.Context, *ListScanConfigsRequest) (*ListScanConfigsResponse, error) + // Updates the specified scan configuration. + UpdateScanConfig(context.Context, *UpdateScanConfigRequest) (*ScanConfig, error) +} + +func RegisterContainerAnalysisV1Beta1Server(s *grpc.Server, srv ContainerAnalysisV1Beta1Server) { + s.RegisterService(&_ContainerAnalysisV1Beta1_serviceDesc, srv) +} + +func _ContainerAnalysisV1Beta1_SetIamPolicy_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(v1.SetIamPolicyRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(ContainerAnalysisV1Beta1Server).SetIamPolicy(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.devtools.containeranalysis.v1beta1.ContainerAnalysisV1Beta1/SetIamPolicy", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(ContainerAnalysisV1Beta1Server).SetIamPolicy(ctx, req.(*v1.SetIamPolicyRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _ContainerAnalysisV1Beta1_GetIamPolicy_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(v1.GetIamPolicyRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(ContainerAnalysisV1Beta1Server).GetIamPolicy(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.devtools.containeranalysis.v1beta1.ContainerAnalysisV1Beta1/GetIamPolicy", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(ContainerAnalysisV1Beta1Server).GetIamPolicy(ctx, req.(*v1.GetIamPolicyRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _ContainerAnalysisV1Beta1_TestIamPermissions_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(v1.TestIamPermissionsRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(ContainerAnalysisV1Beta1Server).TestIamPermissions(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.devtools.containeranalysis.v1beta1.ContainerAnalysisV1Beta1/TestIamPermissions", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(ContainerAnalysisV1Beta1Server).TestIamPermissions(ctx, req.(*v1.TestIamPermissionsRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _ContainerAnalysisV1Beta1_GetScanConfig_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(GetScanConfigRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(ContainerAnalysisV1Beta1Server).GetScanConfig(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.devtools.containeranalysis.v1beta1.ContainerAnalysisV1Beta1/GetScanConfig", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(ContainerAnalysisV1Beta1Server).GetScanConfig(ctx, req.(*GetScanConfigRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _ContainerAnalysisV1Beta1_ListScanConfigs_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(ListScanConfigsRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(ContainerAnalysisV1Beta1Server).ListScanConfigs(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.devtools.containeranalysis.v1beta1.ContainerAnalysisV1Beta1/ListScanConfigs", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(ContainerAnalysisV1Beta1Server).ListScanConfigs(ctx, req.(*ListScanConfigsRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _ContainerAnalysisV1Beta1_UpdateScanConfig_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(UpdateScanConfigRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(ContainerAnalysisV1Beta1Server).UpdateScanConfig(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.devtools.containeranalysis.v1beta1.ContainerAnalysisV1Beta1/UpdateScanConfig", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(ContainerAnalysisV1Beta1Server).UpdateScanConfig(ctx, req.(*UpdateScanConfigRequest)) + } + return interceptor(ctx, in, info, handler) +} + +var _ContainerAnalysisV1Beta1_serviceDesc = grpc.ServiceDesc{ + ServiceName: "google.devtools.containeranalysis.v1beta1.ContainerAnalysisV1Beta1", + HandlerType: (*ContainerAnalysisV1Beta1Server)(nil), + Methods: []grpc.MethodDesc{ + { + MethodName: "SetIamPolicy", + Handler: _ContainerAnalysisV1Beta1_SetIamPolicy_Handler, + }, + { + MethodName: "GetIamPolicy", + Handler: _ContainerAnalysisV1Beta1_GetIamPolicy_Handler, + }, + { + MethodName: "TestIamPermissions", + Handler: _ContainerAnalysisV1Beta1_TestIamPermissions_Handler, + }, + { + MethodName: "GetScanConfig", + Handler: _ContainerAnalysisV1Beta1_GetScanConfig_Handler, + }, + { + MethodName: "ListScanConfigs", + Handler: _ContainerAnalysisV1Beta1_ListScanConfigs_Handler, + }, + { + MethodName: "UpdateScanConfig", + Handler: _ContainerAnalysisV1Beta1_UpdateScanConfig_Handler, + }, + }, + Streams: []grpc.StreamDesc{}, + Metadata: "google/devtools/containeranalysis/v1beta1/containeranalysis.proto", +} + +func init() { + proto.RegisterFile("google/devtools/containeranalysis/v1beta1/containeranalysis.proto", fileDescriptor_containeranalysis_0620917dfaf828c6) +} + +var fileDescriptor_containeranalysis_0620917dfaf828c6 = []byte{ + // 766 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xb4, 0x56, 0xcd, 0x4e, 0x1b, 0x49, + 0x10, 0x56, 0xf3, 0xb7, 0xd0, 0x06, 0xb1, 0x6a, 0xed, 0x82, 0x35, 0xfb, 0x67, 0x8d, 0x56, 0x2b, + 0xe3, 0xc3, 0xcc, 0x1a, 0xb4, 0x5a, 0x05, 0x44, 0x90, 0x21, 0x92, 0x15, 0x29, 0x07, 0x64, 0x08, + 0x8a, 0xb8, 0x58, 0xed, 0x71, 0x31, 0xea, 0xc4, 0xd3, 0x3d, 0x99, 0x6e, 0x5b, 0x40, 0x44, 0x0e, + 0x11, 0xb9, 0x24, 0xc7, 0x1c, 0x73, 0xcb, 0x5b, 0xe4, 0x11, 0x12, 0xe5, 0x16, 0x29, 0x4f, 0x90, + 0x77, 0xc8, 0x35, 0x9a, 0x9e, 0x1e, 0x7b, 0xb0, 0x0d, 0xd8, 0xa0, 0x9c, 0xa0, 0xeb, 0xfb, 0xaa, + 0xea, 0xfb, 0xaa, 0xdd, 0x65, 0xe3, 0x8a, 0x2f, 0x84, 0xdf, 0x02, 0xb7, 0x09, 0x1d, 0x25, 0x44, + 0x4b, 0xba, 0x9e, 0xe0, 0x8a, 0x32, 0x0e, 0x11, 0xe5, 0xb4, 0x75, 0x22, 0x99, 0x74, 0x3b, 0xe5, + 0x06, 0x28, 0x5a, 0x1e, 0x44, 0x9c, 0x30, 0x12, 0x4a, 0x90, 0x95, 0xa4, 0x84, 0x93, 0x96, 0x70, + 0x06, 0x89, 0xa6, 0x84, 0xf5, 0xbb, 0xe9, 0x46, 0x43, 0xe6, 0x52, 0xce, 0x85, 0xa2, 0x8a, 0x09, + 0x6e, 0x0a, 0x59, 0x7f, 0x1a, 0x94, 0xd1, 0xc0, 0xed, 0x94, 0xe3, 0x3f, 0xf5, 0x50, 0xb4, 0x98, + 0x77, 0x62, 0x70, 0xeb, 0x22, 0x7e, 0x01, 0xfb, 0xcb, 0x60, 0xfa, 0xd4, 0x68, 0x1f, 0xb9, 0x8a, + 0x05, 0x20, 0x15, 0x0d, 0xc2, 0x84, 0x60, 0x7f, 0x41, 0x18, 0xef, 0x79, 0x94, 0xef, 0x08, 0x7e, + 0xc4, 0x7c, 0x42, 0xf0, 0x14, 0xa7, 0x01, 0xe4, 0x51, 0x01, 0x15, 0xe7, 0x6a, 0xfa, 0x7f, 0x52, + 0xc0, 0xb9, 0x26, 0x48, 0x2f, 0x62, 0x61, 0xac, 0x2a, 0x3f, 0xa1, 0xa1, 0x6c, 0x88, 0xe4, 0xf1, + 0x4f, 0xc0, 0x69, 0xa3, 0x05, 0xcd, 0xfc, 0x64, 0x01, 0x15, 0x67, 0x6b, 0xe9, 0x91, 0x6c, 0xe0, + 0x9c, 0x17, 0x01, 0x55, 0x50, 0x8f, 0x1b, 0xe7, 0xa7, 0x0a, 0xa8, 0x98, 0x5b, 0xb5, 0x1c, 0x33, + 0x9a, 0x54, 0x95, 0xb3, 0x9f, 0xaa, 0xaa, 0xe1, 0x84, 0x1e, 0x07, 0xe2, 0xe4, 0x76, 0xd8, 0xec, + 0x26, 0x4f, 0x5f, 0x9f, 0x9c, 0xd0, 0xe3, 0x80, 0x5d, 0xc2, 0xbf, 0x54, 0x41, 0xf5, 0xac, 0xd5, + 0xe0, 0x69, 0x1b, 0xa4, 0x1a, 0xe6, 0xd0, 0x3e, 0x47, 0x78, 0xe9, 0x01, 0x93, 0x19, 0xb6, 0x4c, + 0xe9, 0x4b, 0x78, 0x26, 0xa4, 0x11, 0x70, 0x65, 0x12, 0xcc, 0x29, 0x8e, 0x1f, 0xb1, 0x96, 0x82, + 0xc8, 0xcc, 0xc3, 0x9c, 0xc8, 0x6f, 0x78, 0x2e, 0xa4, 0x3e, 0xd4, 0x25, 0x3b, 0x05, 0x3d, 0x8c, + 0xe9, 0xda, 0x6c, 0x1c, 0xd8, 0x63, 0xa7, 0x40, 0xfe, 0xc0, 0x58, 0x83, 0x4a, 0x3c, 0x01, 0xae, + 0x87, 0x31, 0x57, 0xd3, 0xf4, 0xfd, 0x38, 0x60, 0xbf, 0x45, 0x78, 0x79, 0x40, 0x86, 0x0c, 0x05, + 0x97, 0x40, 0x1e, 0xe1, 0x79, 0xe9, 0x51, 0x5e, 0xf7, 0x92, 0x78, 0x1e, 0x15, 0x26, 0x8b, 0xb9, + 0xd5, 0xff, 0x9c, 0x91, 0x3f, 0x64, 0x4e, 0x66, 0x14, 0x39, 0xd9, 0xeb, 0x40, 0xfe, 0xc1, 0x8b, + 0x1c, 0x8e, 0x55, 0x3d, 0xa3, 0x2c, 0xb1, 0xb4, 0x10, 0x87, 0x77, 0xbb, 0xea, 0x5e, 0x22, 0xbc, + 0xfc, 0x50, 0xcf, 0x77, 0xa4, 0xa1, 0x92, 0x03, 0x9c, 0xcb, 0x28, 0xd6, 0x35, 0x6f, 0x2c, 0x18, + 0xf7, 0x04, 0xaf, 0x9e, 0x63, 0x9c, 0xdf, 0x49, 0x93, 0x2a, 0x26, 0xe9, 0xa0, 0xbc, 0x1d, 0xe7, + 0x90, 0x0f, 0x08, 0xcf, 0xef, 0x81, 0xba, 0x4f, 0x83, 0x5d, 0xfd, 0x0c, 0x88, 0x9d, 0x36, 0x64, + 0x34, 0x70, 0x3a, 0x65, 0x27, 0x0b, 0x1a, 0xf5, 0xd6, 0xaf, 0x7d, 0x9c, 0x04, 0xb5, 0x9f, 0xbf, + 0xf8, 0xfc, 0xf5, 0xcd, 0xc4, 0xb1, 0xbd, 0xd6, 0x7d, 0xea, 0xcf, 0x22, 0x90, 0xa2, 0x1d, 0x79, + 0xb0, 0x19, 0x46, 0xe2, 0x31, 0x78, 0x4a, 0xba, 0x25, 0x97, 0x0b, 0x05, 0xd2, 0x2d, 0x9d, 0xad, + 0xcb, 0x4c, 0xe9, 0x75, 0x54, 0x3a, 0xbc, 0x6b, 0xdf, 0xb9, 0x3a, 0x53, 0x78, 0x5e, 0x3b, 0x8a, + 0x80, 0x7b, 0x43, 0xf3, 0xb5, 0x97, 0xea, 0x55, 0x5e, 0xaa, 0x3f, 0xce, 0x8b, 0x7f, 0x4b, 0x2f, + 0x7d, 0xf9, 0xe4, 0x1b, 0xc2, 0x64, 0x1f, 0xa4, 0x0e, 0x42, 0x14, 0x30, 0x29, 0xe3, 0x05, 0x47, + 0x8a, 0x7d, 0x6a, 0x07, 0x29, 0xa9, 0xaf, 0x95, 0x11, 0x98, 0xc9, 0x53, 0xb1, 0x5f, 0x23, 0x6d, + 0xf6, 0x1c, 0x5d, 0xa7, 0xb9, 0xeb, 0x56, 0x0d, 0x14, 0x8b, 0x3d, 0xdf, 0xb3, 0xb7, 0xc6, 0xf2, + 0x3c, 0xb4, 0x0a, 0x79, 0x8f, 0xf0, 0xc2, 0x85, 0x45, 0x44, 0xb6, 0xc6, 0x78, 0x03, 0xc3, 0x56, + 0x98, 0x75, 0xb3, 0x47, 0x64, 0xff, 0xab, 0xc7, 0x52, 0x22, 0xc5, 0x9e, 0xab, 0xf8, 0xa1, 0x66, + 0x1d, 0x65, 0xf6, 0x82, 0x5b, 0x3a, 0x23, 0x1f, 0x11, 0x5e, 0xec, 0x5b, 0x48, 0xa4, 0x32, 0x46, + 0xf3, 0xe1, 0x3b, 0xd5, 0xda, 0xbe, 0x4d, 0x09, 0x73, 0xc9, 0x43, 0xcc, 0x24, 0x9b, 0x39, 0x63, + 0xe7, 0x2c, 0xeb, 0x87, 0x7c, 0x42, 0xf8, 0xe7, 0xfe, 0xfd, 0x45, 0xc6, 0x91, 0x72, 0xc9, 0xf2, + 0xbb, 0xe9, 0x75, 0x6c, 0x6a, 0x07, 0xff, 0x5b, 0x23, 0x5f, 0xc7, 0x7a, 0x76, 0x9f, 0x6e, 0xbf, + 0x42, 0xf8, 0x6f, 0x4f, 0x04, 0x69, 0xef, 0x4b, 0x5b, 0xee, 0xa2, 0xc3, 0x43, 0xc3, 0xf1, 0x45, + 0x8b, 0x72, 0xdf, 0x11, 0x91, 0xef, 0xfa, 0xc0, 0xf5, 0xf7, 0xa7, 0x9b, 0x40, 0x34, 0x64, 0x72, + 0x84, 0xdf, 0x3a, 0x1b, 0x03, 0xc8, 0xbb, 0x89, 0xc9, 0xea, 0x4e, 0xa5, 0x31, 0xa3, 0x8b, 0xad, + 0x7d, 0x0f, 0x00, 0x00, 0xff, 0xff, 0x6c, 0xf6, 0x5c, 0x69, 0x37, 0x09, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/devtools/containeranalysis/v1beta1/deployment/deployment.pb.go b/vendor/google.golang.org/genproto/googleapis/devtools/containeranalysis/v1beta1/deployment/deployment.pb.go new file mode 100644 index 0000000..a138369 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/devtools/containeranalysis/v1beta1/deployment/deployment.pb.go @@ -0,0 +1,272 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/devtools/containeranalysis/v1beta1/deployment/deployment.proto + +package deployment // import "google.golang.org/genproto/googleapis/devtools/containeranalysis/v1beta1/deployment" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import timestamp "github.com/golang/protobuf/ptypes/timestamp" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Types of platforms. +type Deployment_Platform int32 + +const ( + // Unknown. + Deployment_PLATFORM_UNSPECIFIED Deployment_Platform = 0 + // Google Container Engine. + Deployment_GKE Deployment_Platform = 1 + // Google App Engine: Flexible Environment. + Deployment_FLEX Deployment_Platform = 2 + // Custom user-defined platform. + Deployment_CUSTOM Deployment_Platform = 3 +) + +var Deployment_Platform_name = map[int32]string{ + 0: "PLATFORM_UNSPECIFIED", + 1: "GKE", + 2: "FLEX", + 3: "CUSTOM", +} +var Deployment_Platform_value = map[string]int32{ + "PLATFORM_UNSPECIFIED": 0, + "GKE": 1, + "FLEX": 2, + "CUSTOM": 3, +} + +func (x Deployment_Platform) String() string { + return proto.EnumName(Deployment_Platform_name, int32(x)) +} +func (Deployment_Platform) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_deployment_31a346de33a1fffc, []int{2, 0} +} + +// An artifact that can be deployed in some runtime. +type Deployable struct { + // Resource URI for the artifact being deployed. + ResourceUri []string `protobuf:"bytes,1,rep,name=resource_uri,json=resourceUri,proto3" json:"resource_uri,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Deployable) Reset() { *m = Deployable{} } +func (m *Deployable) String() string { return proto.CompactTextString(m) } +func (*Deployable) ProtoMessage() {} +func (*Deployable) Descriptor() ([]byte, []int) { + return fileDescriptor_deployment_31a346de33a1fffc, []int{0} +} +func (m *Deployable) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Deployable.Unmarshal(m, b) +} +func (m *Deployable) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Deployable.Marshal(b, m, deterministic) +} +func (dst *Deployable) XXX_Merge(src proto.Message) { + xxx_messageInfo_Deployable.Merge(dst, src) +} +func (m *Deployable) XXX_Size() int { + return xxx_messageInfo_Deployable.Size(m) +} +func (m *Deployable) XXX_DiscardUnknown() { + xxx_messageInfo_Deployable.DiscardUnknown(m) +} + +var xxx_messageInfo_Deployable proto.InternalMessageInfo + +func (m *Deployable) GetResourceUri() []string { + if m != nil { + return m.ResourceUri + } + return nil +} + +// Details of a deployment occurrence. +type Details struct { + // Deployment history for the resource. + Deployment *Deployment `protobuf:"bytes,1,opt,name=deployment,proto3" json:"deployment,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Details) Reset() { *m = Details{} } +func (m *Details) String() string { return proto.CompactTextString(m) } +func (*Details) ProtoMessage() {} +func (*Details) Descriptor() ([]byte, []int) { + return fileDescriptor_deployment_31a346de33a1fffc, []int{1} +} +func (m *Details) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Details.Unmarshal(m, b) +} +func (m *Details) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Details.Marshal(b, m, deterministic) +} +func (dst *Details) XXX_Merge(src proto.Message) { + xxx_messageInfo_Details.Merge(dst, src) +} +func (m *Details) XXX_Size() int { + return xxx_messageInfo_Details.Size(m) +} +func (m *Details) XXX_DiscardUnknown() { + xxx_messageInfo_Details.DiscardUnknown(m) +} + +var xxx_messageInfo_Details proto.InternalMessageInfo + +func (m *Details) GetDeployment() *Deployment { + if m != nil { + return m.Deployment + } + return nil +} + +// The period during which some deployable was active in a runtime. +type Deployment struct { + // Identity of the user that triggered this deployment. + UserEmail string `protobuf:"bytes,1,opt,name=user_email,json=userEmail,proto3" json:"user_email,omitempty"` + // Beginning of the lifetime of this deployment. + DeployTime *timestamp.Timestamp `protobuf:"bytes,2,opt,name=deploy_time,json=deployTime,proto3" json:"deploy_time,omitempty"` + // End of the lifetime of this deployment. + UndeployTime *timestamp.Timestamp `protobuf:"bytes,3,opt,name=undeploy_time,json=undeployTime,proto3" json:"undeploy_time,omitempty"` + // Configuration used to create this deployment. + Config string `protobuf:"bytes,4,opt,name=config,proto3" json:"config,omitempty"` + // Address of the runtime element hosting this deployment. + Address string `protobuf:"bytes,5,opt,name=address,proto3" json:"address,omitempty"` + // Output only. Resource URI for the artifact being deployed taken from + // the deployable field with the same name. + ResourceUri []string `protobuf:"bytes,6,rep,name=resource_uri,json=resourceUri,proto3" json:"resource_uri,omitempty"` + // Platform hosting this deployment. + Platform Deployment_Platform `protobuf:"varint,7,opt,name=platform,proto3,enum=grafeas.v1beta1.deployment.Deployment_Platform" json:"platform,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Deployment) Reset() { *m = Deployment{} } +func (m *Deployment) String() string { return proto.CompactTextString(m) } +func (*Deployment) ProtoMessage() {} +func (*Deployment) Descriptor() ([]byte, []int) { + return fileDescriptor_deployment_31a346de33a1fffc, []int{2} +} +func (m *Deployment) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Deployment.Unmarshal(m, b) +} +func (m *Deployment) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Deployment.Marshal(b, m, deterministic) +} +func (dst *Deployment) XXX_Merge(src proto.Message) { + xxx_messageInfo_Deployment.Merge(dst, src) +} +func (m *Deployment) XXX_Size() int { + return xxx_messageInfo_Deployment.Size(m) +} +func (m *Deployment) XXX_DiscardUnknown() { + xxx_messageInfo_Deployment.DiscardUnknown(m) +} + +var xxx_messageInfo_Deployment proto.InternalMessageInfo + +func (m *Deployment) GetUserEmail() string { + if m != nil { + return m.UserEmail + } + return "" +} + +func (m *Deployment) GetDeployTime() *timestamp.Timestamp { + if m != nil { + return m.DeployTime + } + return nil +} + +func (m *Deployment) GetUndeployTime() *timestamp.Timestamp { + if m != nil { + return m.UndeployTime + } + return nil +} + +func (m *Deployment) GetConfig() string { + if m != nil { + return m.Config + } + return "" +} + +func (m *Deployment) GetAddress() string { + if m != nil { + return m.Address + } + return "" +} + +func (m *Deployment) GetResourceUri() []string { + if m != nil { + return m.ResourceUri + } + return nil +} + +func (m *Deployment) GetPlatform() Deployment_Platform { + if m != nil { + return m.Platform + } + return Deployment_PLATFORM_UNSPECIFIED +} + +func init() { + proto.RegisterType((*Deployable)(nil), "grafeas.v1beta1.deployment.Deployable") + proto.RegisterType((*Details)(nil), "grafeas.v1beta1.deployment.Details") + proto.RegisterType((*Deployment)(nil), "grafeas.v1beta1.deployment.Deployment") + proto.RegisterEnum("grafeas.v1beta1.deployment.Deployment_Platform", Deployment_Platform_name, Deployment_Platform_value) +} + +func init() { + proto.RegisterFile("google/devtools/containeranalysis/v1beta1/deployment/deployment.proto", fileDescriptor_deployment_31a346de33a1fffc) +} + +var fileDescriptor_deployment_31a346de33a1fffc = []byte{ + // 434 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x94, 0x52, 0x4d, 0x6b, 0xdb, 0x40, + 0x10, 0xad, 0xac, 0xd4, 0x1f, 0xe3, 0xb4, 0x98, 0xa5, 0x14, 0x61, 0x08, 0x75, 0x7d, 0x28, 0x3e, + 0xed, 0x92, 0xf4, 0x98, 0x43, 0x49, 0x6d, 0x39, 0x84, 0x24, 0x8d, 0xab, 0xd8, 0x50, 0x7a, 0xa8, + 0x59, 0xdb, 0x63, 0xb1, 0xb0, 0xd2, 0x8a, 0xdd, 0x55, 0x20, 0xbf, 0xa0, 0xd7, 0xfe, 0x86, 0xfe, + 0xd2, 0xa2, 0x95, 0x14, 0x1b, 0x4c, 0xbf, 0x6e, 0x33, 0x6f, 0xe7, 0xcd, 0xbc, 0xf7, 0x58, 0x08, + 0x63, 0xa5, 0x62, 0x89, 0x6c, 0x83, 0x0f, 0x56, 0x29, 0x69, 0xd8, 0x5a, 0xa5, 0x96, 0x8b, 0x14, + 0x35, 0x4f, 0xb9, 0x7c, 0x34, 0xc2, 0xb0, 0x87, 0xd3, 0x15, 0x5a, 0x7e, 0xca, 0x36, 0x98, 0x49, + 0xf5, 0x98, 0x60, 0x6a, 0xf7, 0x4a, 0x9a, 0x69, 0x65, 0x15, 0xe9, 0xc7, 0x9a, 0x6f, 0x91, 0x1b, + 0x5a, 0x0d, 0xd3, 0xdd, 0x44, 0xff, 0x4d, 0x75, 0xc2, 0x4d, 0xae, 0xf2, 0x2d, 0xb3, 0x22, 0x41, + 0x63, 0x79, 0x92, 0x95, 0xe4, 0x21, 0x03, 0x98, 0xb8, 0x71, 0xbe, 0x92, 0x48, 0xde, 0xc2, 0xb1, + 0x46, 0xa3, 0x72, 0xbd, 0xc6, 0x65, 0xae, 0x45, 0xe0, 0x0d, 0xfc, 0x51, 0x27, 0xea, 0xd6, 0xd8, + 0x42, 0x8b, 0xe1, 0x67, 0x68, 0x4d, 0xd0, 0x72, 0x21, 0x0d, 0x99, 0x02, 0xec, 0x4e, 0x05, 0xde, + 0xc0, 0x1b, 0x75, 0xcf, 0xde, 0xd1, 0xdf, 0xab, 0xa1, 0x93, 0xa7, 0x32, 0xda, 0x63, 0x0e, 0x7f, + 0xf8, 0xb5, 0x88, 0xa2, 0x25, 0x27, 0x00, 0xb9, 0x41, 0xbd, 0xc4, 0x84, 0x0b, 0xe9, 0xd6, 0x76, + 0xa2, 0x4e, 0x81, 0x84, 0x05, 0x40, 0xce, 0xa1, 0x5b, 0x72, 0x97, 0x85, 0x97, 0xa0, 0xe1, 0xce, + 0xf6, 0x69, 0x69, 0x94, 0xd6, 0x46, 0xe9, 0xbc, 0x36, 0x5a, 0x9f, 0x2a, 0x00, 0xf2, 0x01, 0x5e, + 0xe4, 0xe9, 0x3e, 0xdd, 0xff, 0x2b, 0xfd, 0xb8, 0x26, 0xb8, 0x05, 0xaf, 0xa1, 0xb9, 0x56, 0xe9, + 0x56, 0xc4, 0xc1, 0x91, 0x13, 0x56, 0x75, 0x24, 0x80, 0x16, 0xdf, 0x6c, 0x34, 0x1a, 0x13, 0x3c, + 0x77, 0x0f, 0x75, 0x7b, 0x90, 0x69, 0xf3, 0x20, 0x53, 0x72, 0x0d, 0xed, 0x4c, 0x72, 0xbb, 0x55, + 0x3a, 0x09, 0x5a, 0x03, 0x6f, 0xf4, 0xf2, 0x8c, 0xfd, 0x5b, 0x8c, 0x74, 0x56, 0xd1, 0xa2, 0xa7, + 0x05, 0xc3, 0x31, 0xb4, 0x6b, 0x94, 0x04, 0xf0, 0x6a, 0x76, 0x73, 0x31, 0x9f, 0xde, 0x45, 0xb7, + 0xcb, 0xc5, 0xa7, 0xfb, 0x59, 0x38, 0xbe, 0x9a, 0x5e, 0x85, 0x93, 0xde, 0x33, 0xd2, 0x02, 0xff, + 0xf2, 0x3a, 0xec, 0x79, 0xa4, 0x0d, 0x47, 0xd3, 0x9b, 0xf0, 0x4b, 0xaf, 0x41, 0x00, 0x9a, 0xe3, + 0xc5, 0xfd, 0xfc, 0xee, 0xb6, 0xe7, 0x7f, 0xfc, 0xee, 0xc1, 0x89, 0x50, 0x7f, 0x10, 0x31, 0xf3, + 0xbe, 0x7e, 0xab, 0x32, 0x8b, 0x95, 0xe4, 0x69, 0x4c, 0x95, 0x8e, 0x59, 0x8c, 0xa9, 0x4b, 0x90, + 0x95, 0x4f, 0x3c, 0x13, 0xe6, 0xff, 0x7e, 0xf7, 0xf9, 0xae, 0xfc, 0xd9, 0xf0, 0x2f, 0xa3, 0x8b, + 0x55, 0xd3, 0x2d, 0x7c, 0xff, 0x2b, 0x00, 0x00, 0xff, 0xff, 0x4f, 0x59, 0xd5, 0x23, 0x2d, 0x03, + 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/devtools/containeranalysis/v1beta1/discovery/discovery.pb.go b/vendor/google.golang.org/genproto/googleapis/devtools/containeranalysis/v1beta1/discovery/discovery.pb.go new file mode 100644 index 0000000..1bef6fa --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/devtools/containeranalysis/v1beta1/discovery/discovery.pb.go @@ -0,0 +1,297 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/devtools/containeranalysis/v1beta1/discovery/discovery.proto + +package discovery // import "google.golang.org/genproto/googleapis/devtools/containeranalysis/v1beta1/discovery" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import timestamp "github.com/golang/protobuf/ptypes/timestamp" +import common "google.golang.org/genproto/googleapis/devtools/containeranalysis/v1beta1/common" +import status "google.golang.org/genproto/googleapis/rpc/status" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Whether the resource is continuously analyzed. +type Discovered_ContinuousAnalysis int32 + +const ( + // Unknown. + Discovered_CONTINUOUS_ANALYSIS_UNSPECIFIED Discovered_ContinuousAnalysis = 0 + // The resource is continuously analyzed. + Discovered_ACTIVE Discovered_ContinuousAnalysis = 1 + // The resource is ignored for continuous analysis. + Discovered_INACTIVE Discovered_ContinuousAnalysis = 2 +) + +var Discovered_ContinuousAnalysis_name = map[int32]string{ + 0: "CONTINUOUS_ANALYSIS_UNSPECIFIED", + 1: "ACTIVE", + 2: "INACTIVE", +} +var Discovered_ContinuousAnalysis_value = map[string]int32{ + "CONTINUOUS_ANALYSIS_UNSPECIFIED": 0, + "ACTIVE": 1, + "INACTIVE": 2, +} + +func (x Discovered_ContinuousAnalysis) String() string { + return proto.EnumName(Discovered_ContinuousAnalysis_name, int32(x)) +} +func (Discovered_ContinuousAnalysis) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_discovery_ce3fafdae1720c89, []int{2, 0} +} + +// Analysis status for a resource. Currently for initial analysis only (not +// updated in continuous analysis). +type Discovered_AnalysisStatus int32 + +const ( + // Unknown. + Discovered_ANALYSIS_STATUS_UNSPECIFIED Discovered_AnalysisStatus = 0 + // Resource is known but no action has been taken yet. + Discovered_PENDING Discovered_AnalysisStatus = 1 + // Resource is being analyzed. + Discovered_SCANNING Discovered_AnalysisStatus = 2 + // Analysis has finished successfully. + Discovered_FINISHED_SUCCESS Discovered_AnalysisStatus = 3 + // Analysis has finished unsuccessfully, the analysis itself is in a bad + // state. + Discovered_FINISHED_FAILED Discovered_AnalysisStatus = 4 + // The resource is known not to be supported + Discovered_FINISHED_UNSUPPORTED Discovered_AnalysisStatus = 5 +) + +var Discovered_AnalysisStatus_name = map[int32]string{ + 0: "ANALYSIS_STATUS_UNSPECIFIED", + 1: "PENDING", + 2: "SCANNING", + 3: "FINISHED_SUCCESS", + 4: "FINISHED_FAILED", + 5: "FINISHED_UNSUPPORTED", +} +var Discovered_AnalysisStatus_value = map[string]int32{ + "ANALYSIS_STATUS_UNSPECIFIED": 0, + "PENDING": 1, + "SCANNING": 2, + "FINISHED_SUCCESS": 3, + "FINISHED_FAILED": 4, + "FINISHED_UNSUPPORTED": 5, +} + +func (x Discovered_AnalysisStatus) String() string { + return proto.EnumName(Discovered_AnalysisStatus_name, int32(x)) +} +func (Discovered_AnalysisStatus) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_discovery_ce3fafdae1720c89, []int{2, 1} +} + +// A note that indicates a type of analysis a provider would perform. This note +// exists in a provider's project. A `Discovery` occurrence is created in a +// consumer's project at the start of analysis. +type Discovery struct { + // The kind of analysis that is handled by this discovery. + AnalysisKind common.NoteKind `protobuf:"varint,1,opt,name=analysis_kind,json=analysisKind,proto3,enum=grafeas.v1beta1.NoteKind" json:"analysis_kind,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Discovery) Reset() { *m = Discovery{} } +func (m *Discovery) String() string { return proto.CompactTextString(m) } +func (*Discovery) ProtoMessage() {} +func (*Discovery) Descriptor() ([]byte, []int) { + return fileDescriptor_discovery_ce3fafdae1720c89, []int{0} +} +func (m *Discovery) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Discovery.Unmarshal(m, b) +} +func (m *Discovery) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Discovery.Marshal(b, m, deterministic) +} +func (dst *Discovery) XXX_Merge(src proto.Message) { + xxx_messageInfo_Discovery.Merge(dst, src) +} +func (m *Discovery) XXX_Size() int { + return xxx_messageInfo_Discovery.Size(m) +} +func (m *Discovery) XXX_DiscardUnknown() { + xxx_messageInfo_Discovery.DiscardUnknown(m) +} + +var xxx_messageInfo_Discovery proto.InternalMessageInfo + +func (m *Discovery) GetAnalysisKind() common.NoteKind { + if m != nil { + return m.AnalysisKind + } + return common.NoteKind_NOTE_KIND_UNSPECIFIED +} + +// Details of a discovery occurrence. +type Details struct { + // Analysis status for the discovered resource. + Discovered *Discovered `protobuf:"bytes,1,opt,name=discovered,proto3" json:"discovered,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Details) Reset() { *m = Details{} } +func (m *Details) String() string { return proto.CompactTextString(m) } +func (*Details) ProtoMessage() {} +func (*Details) Descriptor() ([]byte, []int) { + return fileDescriptor_discovery_ce3fafdae1720c89, []int{1} +} +func (m *Details) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Details.Unmarshal(m, b) +} +func (m *Details) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Details.Marshal(b, m, deterministic) +} +func (dst *Details) XXX_Merge(src proto.Message) { + xxx_messageInfo_Details.Merge(dst, src) +} +func (m *Details) XXX_Size() int { + return xxx_messageInfo_Details.Size(m) +} +func (m *Details) XXX_DiscardUnknown() { + xxx_messageInfo_Details.DiscardUnknown(m) +} + +var xxx_messageInfo_Details proto.InternalMessageInfo + +func (m *Details) GetDiscovered() *Discovered { + if m != nil { + return m.Discovered + } + return nil +} + +// Provides information about the analysis status of a discovered resource. +type Discovered struct { + // Whether the resource is continuously analyzed. + ContinuousAnalysis Discovered_ContinuousAnalysis `protobuf:"varint,1,opt,name=continuous_analysis,json=continuousAnalysis,proto3,enum=grafeas.v1beta1.discovery.Discovered_ContinuousAnalysis" json:"continuous_analysis,omitempty"` + // The last time continuous analysis was done for this resource. + LastAnalysisTime *timestamp.Timestamp `protobuf:"bytes,2,opt,name=last_analysis_time,json=lastAnalysisTime,proto3" json:"last_analysis_time,omitempty"` + // The status of discovery for the resource. + AnalysisStatus Discovered_AnalysisStatus `protobuf:"varint,3,opt,name=analysis_status,json=analysisStatus,proto3,enum=grafeas.v1beta1.discovery.Discovered_AnalysisStatus" json:"analysis_status,omitempty"` + // When an error is encountered this will contain a LocalizedMessage under + // details to show to the user. The LocalizedMessage is output only and + // populated by the API. + AnalysisStatusError *status.Status `protobuf:"bytes,4,opt,name=analysis_status_error,json=analysisStatusError,proto3" json:"analysis_status_error,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Discovered) Reset() { *m = Discovered{} } +func (m *Discovered) String() string { return proto.CompactTextString(m) } +func (*Discovered) ProtoMessage() {} +func (*Discovered) Descriptor() ([]byte, []int) { + return fileDescriptor_discovery_ce3fafdae1720c89, []int{2} +} +func (m *Discovered) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Discovered.Unmarshal(m, b) +} +func (m *Discovered) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Discovered.Marshal(b, m, deterministic) +} +func (dst *Discovered) XXX_Merge(src proto.Message) { + xxx_messageInfo_Discovered.Merge(dst, src) +} +func (m *Discovered) XXX_Size() int { + return xxx_messageInfo_Discovered.Size(m) +} +func (m *Discovered) XXX_DiscardUnknown() { + xxx_messageInfo_Discovered.DiscardUnknown(m) +} + +var xxx_messageInfo_Discovered proto.InternalMessageInfo + +func (m *Discovered) GetContinuousAnalysis() Discovered_ContinuousAnalysis { + if m != nil { + return m.ContinuousAnalysis + } + return Discovered_CONTINUOUS_ANALYSIS_UNSPECIFIED +} + +func (m *Discovered) GetLastAnalysisTime() *timestamp.Timestamp { + if m != nil { + return m.LastAnalysisTime + } + return nil +} + +func (m *Discovered) GetAnalysisStatus() Discovered_AnalysisStatus { + if m != nil { + return m.AnalysisStatus + } + return Discovered_ANALYSIS_STATUS_UNSPECIFIED +} + +func (m *Discovered) GetAnalysisStatusError() *status.Status { + if m != nil { + return m.AnalysisStatusError + } + return nil +} + +func init() { + proto.RegisterType((*Discovery)(nil), "grafeas.v1beta1.discovery.Discovery") + proto.RegisterType((*Details)(nil), "grafeas.v1beta1.discovery.Details") + proto.RegisterType((*Discovered)(nil), "grafeas.v1beta1.discovery.Discovered") + proto.RegisterEnum("grafeas.v1beta1.discovery.Discovered_ContinuousAnalysis", Discovered_ContinuousAnalysis_name, Discovered_ContinuousAnalysis_value) + proto.RegisterEnum("grafeas.v1beta1.discovery.Discovered_AnalysisStatus", Discovered_AnalysisStatus_name, Discovered_AnalysisStatus_value) +} + +func init() { + proto.RegisterFile("google/devtools/containeranalysis/v1beta1/discovery/discovery.proto", fileDescriptor_discovery_ce3fafdae1720c89) +} + +var fileDescriptor_discovery_ce3fafdae1720c89 = []byte{ + // 541 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x94, 0x94, 0xdf, 0x6a, 0xdb, 0x4c, + 0x10, 0xc5, 0x3f, 0x39, 0xf9, 0x92, 0x76, 0x92, 0x3a, 0x62, 0x9d, 0x52, 0xc7, 0x2d, 0xb8, 0xb8, + 0x14, 0x7a, 0xb5, 0x22, 0x69, 0x2f, 0x0a, 0xa5, 0x05, 0x55, 0x92, 0x13, 0x91, 0xb0, 0x16, 0x5a, + 0xa9, 0xd0, 0xd2, 0x22, 0xd6, 0xf2, 0x46, 0x88, 0xca, 0x5a, 0xa3, 0x95, 0x0d, 0xb9, 0xef, 0x0b, + 0xf4, 0x15, 0xfa, 0x46, 0x7d, 0xa3, 0xa2, 0xbf, 0xa9, 0x1d, 0x02, 0xee, 0x95, 0x77, 0x66, 0xcf, + 0xfc, 0xe6, 0xec, 0x31, 0x08, 0x8c, 0x48, 0x88, 0x28, 0xe1, 0xda, 0x8c, 0xaf, 0x72, 0x21, 0x12, + 0xa9, 0x85, 0x22, 0xcd, 0x59, 0x9c, 0xf2, 0x8c, 0xa5, 0x2c, 0xb9, 0x91, 0xb1, 0xd4, 0x56, 0xa7, + 0x53, 0x9e, 0xb3, 0x53, 0x6d, 0x16, 0xcb, 0x50, 0xac, 0x78, 0x76, 0x73, 0x7b, 0xc2, 0x8b, 0x4c, + 0xe4, 0x02, 0x9d, 0x44, 0x19, 0xbb, 0xe6, 0x4c, 0xe2, 0x5a, 0x8a, 0x5b, 0xc1, 0xe0, 0xfd, 0xf6, + 0xfc, 0x50, 0xcc, 0xe7, 0x22, 0xad, 0x7f, 0x2a, 0xf2, 0x60, 0x58, 0x8f, 0x97, 0xd5, 0x74, 0x79, + 0xad, 0xe5, 0xf1, 0x9c, 0xcb, 0x9c, 0xcd, 0x17, 0xb5, 0xe0, 0x49, 0x2d, 0xc8, 0x16, 0xa1, 0x26, + 0x73, 0x96, 0x2f, 0x65, 0x75, 0x31, 0xba, 0x84, 0x87, 0x66, 0xe3, 0x02, 0x7d, 0x80, 0x47, 0xcd, + 0xba, 0xe0, 0x7b, 0x9c, 0xce, 0xfa, 0xca, 0x73, 0xe5, 0x55, 0xf7, 0xec, 0x04, 0x6f, 0x1a, 0x27, + 0x22, 0xe7, 0x97, 0x71, 0x3a, 0x73, 0x0f, 0x1b, 0x7d, 0x51, 0x8d, 0x1c, 0xd8, 0x37, 0x79, 0xce, + 0xe2, 0x44, 0x22, 0x0b, 0xa0, 0x79, 0x1d, 0xaf, 0x38, 0x07, 0x67, 0x2f, 0xf1, 0xbd, 0x01, 0x60, + 0xb3, 0x15, 0xbb, 0x7f, 0x0d, 0x8e, 0x7e, 0xef, 0x02, 0xdc, 0x5e, 0xa1, 0x18, 0x7a, 0x45, 0x30, + 0x71, 0xba, 0x14, 0x4b, 0x19, 0x34, 0xbb, 0x6b, 0x9b, 0x6f, 0xb7, 0xc2, 0x63, 0xa3, 0x05, 0xe8, + 0xf5, 0xbc, 0x8b, 0xc2, 0x3b, 0x3d, 0x74, 0x01, 0x28, 0x61, 0x32, 0x6f, 0x97, 0x04, 0x45, 0xa4, + 0xfd, 0x4e, 0xf9, 0x90, 0x01, 0xae, 0xe2, 0xc4, 0x4d, 0xde, 0xd8, 0x6b, 0xf2, 0x76, 0xd5, 0x62, + 0xaa, 0xa1, 0x14, 0x6d, 0xf4, 0x0d, 0x8e, 0x5a, 0x48, 0x95, 0x7d, 0x7f, 0xa7, 0x34, 0xfc, 0x66, + 0x3b, 0xc3, 0x0d, 0x8c, 0x96, 0xb3, 0x6e, 0x97, 0xad, 0xd5, 0x68, 0x0c, 0x8f, 0x37, 0xf0, 0x01, + 0xcf, 0x32, 0x91, 0xf5, 0x77, 0x4b, 0xaf, 0xa8, 0xf1, 0x9a, 0x2d, 0x42, 0x5c, 0x23, 0x7a, 0xeb, + 0x08, 0xab, 0x90, 0x8f, 0x28, 0xa0, 0xbb, 0xd1, 0xa0, 0x17, 0x30, 0x34, 0x26, 0xc4, 0xb3, 0x89, + 0x3f, 0xf1, 0x69, 0xa0, 0x13, 0xfd, 0xea, 0x33, 0xb5, 0x69, 0xe0, 0x13, 0xea, 0x58, 0x86, 0x3d, + 0xb6, 0x2d, 0x53, 0xfd, 0x0f, 0x01, 0xec, 0xe9, 0x86, 0x67, 0x7f, 0xb2, 0x54, 0x05, 0x1d, 0xc2, + 0x03, 0x9b, 0xd4, 0x55, 0x67, 0xf4, 0x53, 0x81, 0xee, 0xba, 0x7f, 0x34, 0x84, 0xa7, 0x2d, 0x86, + 0x7a, 0xba, 0xe7, 0x6f, 0xd2, 0x0e, 0x60, 0xdf, 0xb1, 0x88, 0x69, 0x93, 0xf3, 0x0a, 0x47, 0x0d, + 0x9d, 0x90, 0xa2, 0xea, 0xa0, 0x63, 0x50, 0xc7, 0x36, 0xb1, 0xe9, 0x85, 0x65, 0x06, 0xd4, 0x37, + 0x0c, 0x8b, 0x52, 0x75, 0x07, 0xf5, 0xe0, 0xa8, 0xed, 0x8e, 0x75, 0xfb, 0xca, 0x32, 0xd5, 0x5d, + 0xd4, 0x87, 0xe3, 0xb6, 0xe9, 0x13, 0xea, 0x3b, 0xce, 0xc4, 0xf5, 0x2c, 0x53, 0xfd, 0xff, 0xe3, + 0x0f, 0x05, 0x9e, 0xc5, 0xe2, 0xfe, 0xec, 0x1d, 0xe5, 0xcb, 0xd7, 0x3a, 0xb3, 0x48, 0x24, 0x2c, + 0x8d, 0xb0, 0xc8, 0x22, 0x2d, 0xe2, 0x69, 0xf9, 0x6f, 0x6b, 0xd5, 0x15, 0x5b, 0xc4, 0xf2, 0x9f, + 0xbe, 0x06, 0xef, 0xda, 0xd3, 0xaf, 0xce, 0xce, 0xb9, 0xab, 0x4f, 0xf7, 0x4a, 0xdc, 0xeb, 0x3f, + 0x01, 0x00, 0x00, 0xff, 0xff, 0xaf, 0x60, 0x55, 0x7f, 0x5b, 0x04, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/devtools/containeranalysis/v1beta1/grafeas/grafeas.pb.go b/vendor/google.golang.org/genproto/googleapis/devtools/containeranalysis/v1beta1/grafeas/grafeas.pb.go new file mode 100644 index 0000000..1e7b53e --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/devtools/containeranalysis/v1beta1/grafeas/grafeas.pb.go @@ -0,0 +1,2739 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/devtools/containeranalysis/v1beta1/grafeas/grafeas.proto + +package grafeas // import "google.golang.org/genproto/googleapis/devtools/containeranalysis/v1beta1/grafeas" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import empty "github.com/golang/protobuf/ptypes/empty" +import timestamp "github.com/golang/protobuf/ptypes/timestamp" +import _ "google.golang.org/genproto/googleapis/api/annotations" +import attestation "google.golang.org/genproto/googleapis/devtools/containeranalysis/v1beta1/attestation" +import build "google.golang.org/genproto/googleapis/devtools/containeranalysis/v1beta1/build" +import common "google.golang.org/genproto/googleapis/devtools/containeranalysis/v1beta1/common" +import deployment "google.golang.org/genproto/googleapis/devtools/containeranalysis/v1beta1/deployment" +import discovery "google.golang.org/genproto/googleapis/devtools/containeranalysis/v1beta1/discovery" +import image "google.golang.org/genproto/googleapis/devtools/containeranalysis/v1beta1/image" +import _package "google.golang.org/genproto/googleapis/devtools/containeranalysis/v1beta1/package" +import provenance "google.golang.org/genproto/googleapis/devtools/containeranalysis/v1beta1/provenance" +import vulnerability "google.golang.org/genproto/googleapis/devtools/containeranalysis/v1beta1/vulnerability" +import field_mask "google.golang.org/genproto/protobuf/field_mask" + +import ( + context "golang.org/x/net/context" + grpc "google.golang.org/grpc" +) + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// An instance of an analysis type that has been found on a resource. +type Occurrence struct { + // Output only. The name of the occurrence in the form of + // `projects/[PROJECT_ID]/occurrences/[OCCURRENCE_ID]`. + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // Required. Immutable. The resource for which the occurrence applies. + Resource *Resource `protobuf:"bytes,2,opt,name=resource,proto3" json:"resource,omitempty"` + // Required. Immutable. The analysis note associated with this occurrence, in + // the form of `projects[PROVIDER_ID]/notes/[NOTE_ID]`. This field can be used + // as a filter in list requests. + NoteName string `protobuf:"bytes,3,opt,name=note_name,json=noteName,proto3" json:"note_name,omitempty"` + // Output only. This explicitly denotes which of the occurrence details are + // specified. This field can be used as a filter in list requests. + Kind common.NoteKind `protobuf:"varint,4,opt,name=kind,proto3,enum=grafeas.v1beta1.NoteKind" json:"kind,omitempty"` + // A description of actions that can be taken to remedy the note. + Remediation string `protobuf:"bytes,5,opt,name=remediation,proto3" json:"remediation,omitempty"` + // Output only. The time this occurrence was created. + CreateTime *timestamp.Timestamp `protobuf:"bytes,6,opt,name=create_time,json=createTime,proto3" json:"create_time,omitempty"` + // Output only. The time this occurrence was last updated. + UpdateTime *timestamp.Timestamp `protobuf:"bytes,7,opt,name=update_time,json=updateTime,proto3" json:"update_time,omitempty"` + // Required. Immutable. Describes the details of the note kind found on this + // resource. + // + // Types that are valid to be assigned to Details: + // *Occurrence_Vulnerability + // *Occurrence_Build + // *Occurrence_DerivedImage + // *Occurrence_Installation + // *Occurrence_Deployment + // *Occurrence_Discovered + // *Occurrence_Attestation + Details isOccurrence_Details `protobuf_oneof:"details"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Occurrence) Reset() { *m = Occurrence{} } +func (m *Occurrence) String() string { return proto.CompactTextString(m) } +func (*Occurrence) ProtoMessage() {} +func (*Occurrence) Descriptor() ([]byte, []int) { + return fileDescriptor_grafeas_0102e4ff7bfb77b0, []int{0} +} +func (m *Occurrence) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Occurrence.Unmarshal(m, b) +} +func (m *Occurrence) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Occurrence.Marshal(b, m, deterministic) +} +func (dst *Occurrence) XXX_Merge(src proto.Message) { + xxx_messageInfo_Occurrence.Merge(dst, src) +} +func (m *Occurrence) XXX_Size() int { + return xxx_messageInfo_Occurrence.Size(m) +} +func (m *Occurrence) XXX_DiscardUnknown() { + xxx_messageInfo_Occurrence.DiscardUnknown(m) +} + +var xxx_messageInfo_Occurrence proto.InternalMessageInfo + +func (m *Occurrence) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *Occurrence) GetResource() *Resource { + if m != nil { + return m.Resource + } + return nil +} + +func (m *Occurrence) GetNoteName() string { + if m != nil { + return m.NoteName + } + return "" +} + +func (m *Occurrence) GetKind() common.NoteKind { + if m != nil { + return m.Kind + } + return common.NoteKind_NOTE_KIND_UNSPECIFIED +} + +func (m *Occurrence) GetRemediation() string { + if m != nil { + return m.Remediation + } + return "" +} + +func (m *Occurrence) GetCreateTime() *timestamp.Timestamp { + if m != nil { + return m.CreateTime + } + return nil +} + +func (m *Occurrence) GetUpdateTime() *timestamp.Timestamp { + if m != nil { + return m.UpdateTime + } + return nil +} + +type isOccurrence_Details interface { + isOccurrence_Details() +} + +type Occurrence_Vulnerability struct { + Vulnerability *vulnerability.Details `protobuf:"bytes,8,opt,name=vulnerability,proto3,oneof"` +} + +type Occurrence_Build struct { + Build *build.Details `protobuf:"bytes,9,opt,name=build,proto3,oneof"` +} + +type Occurrence_DerivedImage struct { + DerivedImage *image.Details `protobuf:"bytes,10,opt,name=derived_image,json=derivedImage,proto3,oneof"` +} + +type Occurrence_Installation struct { + Installation *_package.Details `protobuf:"bytes,11,opt,name=installation,proto3,oneof"` +} + +type Occurrence_Deployment struct { + Deployment *deployment.Details `protobuf:"bytes,12,opt,name=deployment,proto3,oneof"` +} + +type Occurrence_Discovered struct { + Discovered *discovery.Details `protobuf:"bytes,13,opt,name=discovered,proto3,oneof"` +} + +type Occurrence_Attestation struct { + Attestation *attestation.Details `protobuf:"bytes,14,opt,name=attestation,proto3,oneof"` +} + +func (*Occurrence_Vulnerability) isOccurrence_Details() {} + +func (*Occurrence_Build) isOccurrence_Details() {} + +func (*Occurrence_DerivedImage) isOccurrence_Details() {} + +func (*Occurrence_Installation) isOccurrence_Details() {} + +func (*Occurrence_Deployment) isOccurrence_Details() {} + +func (*Occurrence_Discovered) isOccurrence_Details() {} + +func (*Occurrence_Attestation) isOccurrence_Details() {} + +func (m *Occurrence) GetDetails() isOccurrence_Details { + if m != nil { + return m.Details + } + return nil +} + +func (m *Occurrence) GetVulnerability() *vulnerability.Details { + if x, ok := m.GetDetails().(*Occurrence_Vulnerability); ok { + return x.Vulnerability + } + return nil +} + +func (m *Occurrence) GetBuild() *build.Details { + if x, ok := m.GetDetails().(*Occurrence_Build); ok { + return x.Build + } + return nil +} + +func (m *Occurrence) GetDerivedImage() *image.Details { + if x, ok := m.GetDetails().(*Occurrence_DerivedImage); ok { + return x.DerivedImage + } + return nil +} + +func (m *Occurrence) GetInstallation() *_package.Details { + if x, ok := m.GetDetails().(*Occurrence_Installation); ok { + return x.Installation + } + return nil +} + +func (m *Occurrence) GetDeployment() *deployment.Details { + if x, ok := m.GetDetails().(*Occurrence_Deployment); ok { + return x.Deployment + } + return nil +} + +func (m *Occurrence) GetDiscovered() *discovery.Details { + if x, ok := m.GetDetails().(*Occurrence_Discovered); ok { + return x.Discovered + } + return nil +} + +func (m *Occurrence) GetAttestation() *attestation.Details { + if x, ok := m.GetDetails().(*Occurrence_Attestation); ok { + return x.Attestation + } + return nil +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*Occurrence) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _Occurrence_OneofMarshaler, _Occurrence_OneofUnmarshaler, _Occurrence_OneofSizer, []interface{}{ + (*Occurrence_Vulnerability)(nil), + (*Occurrence_Build)(nil), + (*Occurrence_DerivedImage)(nil), + (*Occurrence_Installation)(nil), + (*Occurrence_Deployment)(nil), + (*Occurrence_Discovered)(nil), + (*Occurrence_Attestation)(nil), + } +} + +func _Occurrence_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*Occurrence) + // details + switch x := m.Details.(type) { + case *Occurrence_Vulnerability: + b.EncodeVarint(8<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.Vulnerability); err != nil { + return err + } + case *Occurrence_Build: + b.EncodeVarint(9<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.Build); err != nil { + return err + } + case *Occurrence_DerivedImage: + b.EncodeVarint(10<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.DerivedImage); err != nil { + return err + } + case *Occurrence_Installation: + b.EncodeVarint(11<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.Installation); err != nil { + return err + } + case *Occurrence_Deployment: + b.EncodeVarint(12<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.Deployment); err != nil { + return err + } + case *Occurrence_Discovered: + b.EncodeVarint(13<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.Discovered); err != nil { + return err + } + case *Occurrence_Attestation: + b.EncodeVarint(14<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.Attestation); err != nil { + return err + } + case nil: + default: + return fmt.Errorf("Occurrence.Details has unexpected type %T", x) + } + return nil +} + +func _Occurrence_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*Occurrence) + switch tag { + case 8: // details.vulnerability + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(vulnerability.Details) + err := b.DecodeMessage(msg) + m.Details = &Occurrence_Vulnerability{msg} + return true, err + case 9: // details.build + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(build.Details) + err := b.DecodeMessage(msg) + m.Details = &Occurrence_Build{msg} + return true, err + case 10: // details.derived_image + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(image.Details) + err := b.DecodeMessage(msg) + m.Details = &Occurrence_DerivedImage{msg} + return true, err + case 11: // details.installation + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(_package.Details) + err := b.DecodeMessage(msg) + m.Details = &Occurrence_Installation{msg} + return true, err + case 12: // details.deployment + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(deployment.Details) + err := b.DecodeMessage(msg) + m.Details = &Occurrence_Deployment{msg} + return true, err + case 13: // details.discovered + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(discovery.Details) + err := b.DecodeMessage(msg) + m.Details = &Occurrence_Discovered{msg} + return true, err + case 14: // details.attestation + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(attestation.Details) + err := b.DecodeMessage(msg) + m.Details = &Occurrence_Attestation{msg} + return true, err + default: + return false, nil + } +} + +func _Occurrence_OneofSizer(msg proto.Message) (n int) { + m := msg.(*Occurrence) + // details + switch x := m.Details.(type) { + case *Occurrence_Vulnerability: + s := proto.Size(x.Vulnerability) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *Occurrence_Build: + s := proto.Size(x.Build) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *Occurrence_DerivedImage: + s := proto.Size(x.DerivedImage) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *Occurrence_Installation: + s := proto.Size(x.Installation) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *Occurrence_Deployment: + s := proto.Size(x.Deployment) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *Occurrence_Discovered: + s := proto.Size(x.Discovered) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *Occurrence_Attestation: + s := proto.Size(x.Attestation) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +// An entity that can have metadata. For example, a Docker image. +type Resource struct { + // The name of the resource. For example, the name of a Docker image - + // "Debian". + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // The unique URI of the resource. For example, + // `https://gcr.io/project/image@sha256:foo` for a Docker image. + Uri string `protobuf:"bytes,2,opt,name=uri,proto3" json:"uri,omitempty"` + // The hash of the resource content. For example, the Docker digest. + ContentHash *provenance.Hash `protobuf:"bytes,3,opt,name=content_hash,json=contentHash,proto3" json:"content_hash,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Resource) Reset() { *m = Resource{} } +func (m *Resource) String() string { return proto.CompactTextString(m) } +func (*Resource) ProtoMessage() {} +func (*Resource) Descriptor() ([]byte, []int) { + return fileDescriptor_grafeas_0102e4ff7bfb77b0, []int{1} +} +func (m *Resource) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Resource.Unmarshal(m, b) +} +func (m *Resource) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Resource.Marshal(b, m, deterministic) +} +func (dst *Resource) XXX_Merge(src proto.Message) { + xxx_messageInfo_Resource.Merge(dst, src) +} +func (m *Resource) XXX_Size() int { + return xxx_messageInfo_Resource.Size(m) +} +func (m *Resource) XXX_DiscardUnknown() { + xxx_messageInfo_Resource.DiscardUnknown(m) +} + +var xxx_messageInfo_Resource proto.InternalMessageInfo + +func (m *Resource) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *Resource) GetUri() string { + if m != nil { + return m.Uri + } + return "" +} + +func (m *Resource) GetContentHash() *provenance.Hash { + if m != nil { + return m.ContentHash + } + return nil +} + +// A type of analysis that can be done for a resource. +type Note struct { + // Output only. The name of the note in the form of + // `projects/[PROVIDER_ID]/notes/[NOTE_ID]`. + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // A one sentence description of this note. + ShortDescription string `protobuf:"bytes,2,opt,name=short_description,json=shortDescription,proto3" json:"short_description,omitempty"` + // A detailed description of this note. + LongDescription string `protobuf:"bytes,3,opt,name=long_description,json=longDescription,proto3" json:"long_description,omitempty"` + // Output only. The type of analysis. This field can be used as a filter in + // list requests. + Kind common.NoteKind `protobuf:"varint,4,opt,name=kind,proto3,enum=grafeas.v1beta1.NoteKind" json:"kind,omitempty"` + // URLs associated with this note. + RelatedUrl []*common.RelatedUrl `protobuf:"bytes,5,rep,name=related_url,json=relatedUrl,proto3" json:"related_url,omitempty"` + // Time of expiration for this note. Empty if note does not expire. + ExpirationTime *timestamp.Timestamp `protobuf:"bytes,6,opt,name=expiration_time,json=expirationTime,proto3" json:"expiration_time,omitempty"` + // Output only. The time this note was created. This field can be used as a + // filter in list requests. + CreateTime *timestamp.Timestamp `protobuf:"bytes,7,opt,name=create_time,json=createTime,proto3" json:"create_time,omitempty"` + // Output only. The time this note was last updated. This field can be used as + // a filter in list requests. + UpdateTime *timestamp.Timestamp `protobuf:"bytes,8,opt,name=update_time,json=updateTime,proto3" json:"update_time,omitempty"` + // Other notes related to this note. + RelatedNoteNames []string `protobuf:"bytes,9,rep,name=related_note_names,json=relatedNoteNames,proto3" json:"related_note_names,omitempty"` + // Required. Immutable. The type of analysis this note represents. + // + // Types that are valid to be assigned to Type: + // *Note_Vulnerability + // *Note_Build + // *Note_BaseImage + // *Note_Package + // *Note_Deployable + // *Note_Discovery + // *Note_AttestationAuthority + Type isNote_Type `protobuf_oneof:"type"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Note) Reset() { *m = Note{} } +func (m *Note) String() string { return proto.CompactTextString(m) } +func (*Note) ProtoMessage() {} +func (*Note) Descriptor() ([]byte, []int) { + return fileDescriptor_grafeas_0102e4ff7bfb77b0, []int{2} +} +func (m *Note) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Note.Unmarshal(m, b) +} +func (m *Note) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Note.Marshal(b, m, deterministic) +} +func (dst *Note) XXX_Merge(src proto.Message) { + xxx_messageInfo_Note.Merge(dst, src) +} +func (m *Note) XXX_Size() int { + return xxx_messageInfo_Note.Size(m) +} +func (m *Note) XXX_DiscardUnknown() { + xxx_messageInfo_Note.DiscardUnknown(m) +} + +var xxx_messageInfo_Note proto.InternalMessageInfo + +func (m *Note) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *Note) GetShortDescription() string { + if m != nil { + return m.ShortDescription + } + return "" +} + +func (m *Note) GetLongDescription() string { + if m != nil { + return m.LongDescription + } + return "" +} + +func (m *Note) GetKind() common.NoteKind { + if m != nil { + return m.Kind + } + return common.NoteKind_NOTE_KIND_UNSPECIFIED +} + +func (m *Note) GetRelatedUrl() []*common.RelatedUrl { + if m != nil { + return m.RelatedUrl + } + return nil +} + +func (m *Note) GetExpirationTime() *timestamp.Timestamp { + if m != nil { + return m.ExpirationTime + } + return nil +} + +func (m *Note) GetCreateTime() *timestamp.Timestamp { + if m != nil { + return m.CreateTime + } + return nil +} + +func (m *Note) GetUpdateTime() *timestamp.Timestamp { + if m != nil { + return m.UpdateTime + } + return nil +} + +func (m *Note) GetRelatedNoteNames() []string { + if m != nil { + return m.RelatedNoteNames + } + return nil +} + +type isNote_Type interface { + isNote_Type() +} + +type Note_Vulnerability struct { + Vulnerability *vulnerability.Vulnerability `protobuf:"bytes,10,opt,name=vulnerability,proto3,oneof"` +} + +type Note_Build struct { + Build *build.Build `protobuf:"bytes,11,opt,name=build,proto3,oneof"` +} + +type Note_BaseImage struct { + BaseImage *image.Basis `protobuf:"bytes,12,opt,name=base_image,json=baseImage,proto3,oneof"` +} + +type Note_Package struct { + Package *_package.Package `protobuf:"bytes,13,opt,name=package,proto3,oneof"` +} + +type Note_Deployable struct { + Deployable *deployment.Deployable `protobuf:"bytes,14,opt,name=deployable,proto3,oneof"` +} + +type Note_Discovery struct { + Discovery *discovery.Discovery `protobuf:"bytes,15,opt,name=discovery,proto3,oneof"` +} + +type Note_AttestationAuthority struct { + AttestationAuthority *attestation.Authority `protobuf:"bytes,16,opt,name=attestation_authority,json=attestationAuthority,proto3,oneof"` +} + +func (*Note_Vulnerability) isNote_Type() {} + +func (*Note_Build) isNote_Type() {} + +func (*Note_BaseImage) isNote_Type() {} + +func (*Note_Package) isNote_Type() {} + +func (*Note_Deployable) isNote_Type() {} + +func (*Note_Discovery) isNote_Type() {} + +func (*Note_AttestationAuthority) isNote_Type() {} + +func (m *Note) GetType() isNote_Type { + if m != nil { + return m.Type + } + return nil +} + +func (m *Note) GetVulnerability() *vulnerability.Vulnerability { + if x, ok := m.GetType().(*Note_Vulnerability); ok { + return x.Vulnerability + } + return nil +} + +func (m *Note) GetBuild() *build.Build { + if x, ok := m.GetType().(*Note_Build); ok { + return x.Build + } + return nil +} + +func (m *Note) GetBaseImage() *image.Basis { + if x, ok := m.GetType().(*Note_BaseImage); ok { + return x.BaseImage + } + return nil +} + +func (m *Note) GetPackage() *_package.Package { + if x, ok := m.GetType().(*Note_Package); ok { + return x.Package + } + return nil +} + +func (m *Note) GetDeployable() *deployment.Deployable { + if x, ok := m.GetType().(*Note_Deployable); ok { + return x.Deployable + } + return nil +} + +func (m *Note) GetDiscovery() *discovery.Discovery { + if x, ok := m.GetType().(*Note_Discovery); ok { + return x.Discovery + } + return nil +} + +func (m *Note) GetAttestationAuthority() *attestation.Authority { + if x, ok := m.GetType().(*Note_AttestationAuthority); ok { + return x.AttestationAuthority + } + return nil +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*Note) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _Note_OneofMarshaler, _Note_OneofUnmarshaler, _Note_OneofSizer, []interface{}{ + (*Note_Vulnerability)(nil), + (*Note_Build)(nil), + (*Note_BaseImage)(nil), + (*Note_Package)(nil), + (*Note_Deployable)(nil), + (*Note_Discovery)(nil), + (*Note_AttestationAuthority)(nil), + } +} + +func _Note_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*Note) + // type + switch x := m.Type.(type) { + case *Note_Vulnerability: + b.EncodeVarint(10<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.Vulnerability); err != nil { + return err + } + case *Note_Build: + b.EncodeVarint(11<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.Build); err != nil { + return err + } + case *Note_BaseImage: + b.EncodeVarint(12<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.BaseImage); err != nil { + return err + } + case *Note_Package: + b.EncodeVarint(13<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.Package); err != nil { + return err + } + case *Note_Deployable: + b.EncodeVarint(14<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.Deployable); err != nil { + return err + } + case *Note_Discovery: + b.EncodeVarint(15<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.Discovery); err != nil { + return err + } + case *Note_AttestationAuthority: + b.EncodeVarint(16<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.AttestationAuthority); err != nil { + return err + } + case nil: + default: + return fmt.Errorf("Note.Type has unexpected type %T", x) + } + return nil +} + +func _Note_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*Note) + switch tag { + case 10: // type.vulnerability + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(vulnerability.Vulnerability) + err := b.DecodeMessage(msg) + m.Type = &Note_Vulnerability{msg} + return true, err + case 11: // type.build + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(build.Build) + err := b.DecodeMessage(msg) + m.Type = &Note_Build{msg} + return true, err + case 12: // type.base_image + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(image.Basis) + err := b.DecodeMessage(msg) + m.Type = &Note_BaseImage{msg} + return true, err + case 13: // type.package + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(_package.Package) + err := b.DecodeMessage(msg) + m.Type = &Note_Package{msg} + return true, err + case 14: // type.deployable + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(deployment.Deployable) + err := b.DecodeMessage(msg) + m.Type = &Note_Deployable{msg} + return true, err + case 15: // type.discovery + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(discovery.Discovery) + err := b.DecodeMessage(msg) + m.Type = &Note_Discovery{msg} + return true, err + case 16: // type.attestation_authority + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(attestation.Authority) + err := b.DecodeMessage(msg) + m.Type = &Note_AttestationAuthority{msg} + return true, err + default: + return false, nil + } +} + +func _Note_OneofSizer(msg proto.Message) (n int) { + m := msg.(*Note) + // type + switch x := m.Type.(type) { + case *Note_Vulnerability: + s := proto.Size(x.Vulnerability) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *Note_Build: + s := proto.Size(x.Build) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *Note_BaseImage: + s := proto.Size(x.BaseImage) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *Note_Package: + s := proto.Size(x.Package) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *Note_Deployable: + s := proto.Size(x.Deployable) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *Note_Discovery: + s := proto.Size(x.Discovery) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *Note_AttestationAuthority: + s := proto.Size(x.AttestationAuthority) + n += 2 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +// Request to get an occurrence. +type GetOccurrenceRequest struct { + // The name of the occurrence in the form of + // `projects/[PROJECT_ID]/occurrences/[OCCURRENCE_ID]`. + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GetOccurrenceRequest) Reset() { *m = GetOccurrenceRequest{} } +func (m *GetOccurrenceRequest) String() string { return proto.CompactTextString(m) } +func (*GetOccurrenceRequest) ProtoMessage() {} +func (*GetOccurrenceRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_grafeas_0102e4ff7bfb77b0, []int{3} +} +func (m *GetOccurrenceRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GetOccurrenceRequest.Unmarshal(m, b) +} +func (m *GetOccurrenceRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GetOccurrenceRequest.Marshal(b, m, deterministic) +} +func (dst *GetOccurrenceRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_GetOccurrenceRequest.Merge(dst, src) +} +func (m *GetOccurrenceRequest) XXX_Size() int { + return xxx_messageInfo_GetOccurrenceRequest.Size(m) +} +func (m *GetOccurrenceRequest) XXX_DiscardUnknown() { + xxx_messageInfo_GetOccurrenceRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_GetOccurrenceRequest proto.InternalMessageInfo + +func (m *GetOccurrenceRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +// Request to list occurrences. +type ListOccurrencesRequest struct { + // The name of the project to list occurrences for in the form of + // `projects/[PROJECT_ID]`. + Parent string `protobuf:"bytes,1,opt,name=parent,proto3" json:"parent,omitempty"` + // The filter expression. + Filter string `protobuf:"bytes,2,opt,name=filter,proto3" json:"filter,omitempty"` + // Number of occurrences to return in the list. + PageSize int32 `protobuf:"varint,3,opt,name=page_size,json=pageSize,proto3" json:"page_size,omitempty"` + // Token to provide to skip to a particular spot in the list. + PageToken string `protobuf:"bytes,4,opt,name=page_token,json=pageToken,proto3" json:"page_token,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ListOccurrencesRequest) Reset() { *m = ListOccurrencesRequest{} } +func (m *ListOccurrencesRequest) String() string { return proto.CompactTextString(m) } +func (*ListOccurrencesRequest) ProtoMessage() {} +func (*ListOccurrencesRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_grafeas_0102e4ff7bfb77b0, []int{4} +} +func (m *ListOccurrencesRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ListOccurrencesRequest.Unmarshal(m, b) +} +func (m *ListOccurrencesRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ListOccurrencesRequest.Marshal(b, m, deterministic) +} +func (dst *ListOccurrencesRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_ListOccurrencesRequest.Merge(dst, src) +} +func (m *ListOccurrencesRequest) XXX_Size() int { + return xxx_messageInfo_ListOccurrencesRequest.Size(m) +} +func (m *ListOccurrencesRequest) XXX_DiscardUnknown() { + xxx_messageInfo_ListOccurrencesRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_ListOccurrencesRequest proto.InternalMessageInfo + +func (m *ListOccurrencesRequest) GetParent() string { + if m != nil { + return m.Parent + } + return "" +} + +func (m *ListOccurrencesRequest) GetFilter() string { + if m != nil { + return m.Filter + } + return "" +} + +func (m *ListOccurrencesRequest) GetPageSize() int32 { + if m != nil { + return m.PageSize + } + return 0 +} + +func (m *ListOccurrencesRequest) GetPageToken() string { + if m != nil { + return m.PageToken + } + return "" +} + +// Response for listing occurrences. +type ListOccurrencesResponse struct { + // The occurrences requested. + Occurrences []*Occurrence `protobuf:"bytes,1,rep,name=occurrences,proto3" json:"occurrences,omitempty"` + // The next pagination token in the list response. It should be used as + // `page_token` for the following request. An empty value means no more + // results. + NextPageToken string `protobuf:"bytes,2,opt,name=next_page_token,json=nextPageToken,proto3" json:"next_page_token,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ListOccurrencesResponse) Reset() { *m = ListOccurrencesResponse{} } +func (m *ListOccurrencesResponse) String() string { return proto.CompactTextString(m) } +func (*ListOccurrencesResponse) ProtoMessage() {} +func (*ListOccurrencesResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_grafeas_0102e4ff7bfb77b0, []int{5} +} +func (m *ListOccurrencesResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ListOccurrencesResponse.Unmarshal(m, b) +} +func (m *ListOccurrencesResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ListOccurrencesResponse.Marshal(b, m, deterministic) +} +func (dst *ListOccurrencesResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_ListOccurrencesResponse.Merge(dst, src) +} +func (m *ListOccurrencesResponse) XXX_Size() int { + return xxx_messageInfo_ListOccurrencesResponse.Size(m) +} +func (m *ListOccurrencesResponse) XXX_DiscardUnknown() { + xxx_messageInfo_ListOccurrencesResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_ListOccurrencesResponse proto.InternalMessageInfo + +func (m *ListOccurrencesResponse) GetOccurrences() []*Occurrence { + if m != nil { + return m.Occurrences + } + return nil +} + +func (m *ListOccurrencesResponse) GetNextPageToken() string { + if m != nil { + return m.NextPageToken + } + return "" +} + +// Request to delete a occurrence. +type DeleteOccurrenceRequest struct { + // The name of the occurrence in the form of + // `projects/[PROJECT_ID]/occurrences/[OCCURRENCE_ID]`. + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *DeleteOccurrenceRequest) Reset() { *m = DeleteOccurrenceRequest{} } +func (m *DeleteOccurrenceRequest) String() string { return proto.CompactTextString(m) } +func (*DeleteOccurrenceRequest) ProtoMessage() {} +func (*DeleteOccurrenceRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_grafeas_0102e4ff7bfb77b0, []int{6} +} +func (m *DeleteOccurrenceRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_DeleteOccurrenceRequest.Unmarshal(m, b) +} +func (m *DeleteOccurrenceRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_DeleteOccurrenceRequest.Marshal(b, m, deterministic) +} +func (dst *DeleteOccurrenceRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_DeleteOccurrenceRequest.Merge(dst, src) +} +func (m *DeleteOccurrenceRequest) XXX_Size() int { + return xxx_messageInfo_DeleteOccurrenceRequest.Size(m) +} +func (m *DeleteOccurrenceRequest) XXX_DiscardUnknown() { + xxx_messageInfo_DeleteOccurrenceRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_DeleteOccurrenceRequest proto.InternalMessageInfo + +func (m *DeleteOccurrenceRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +// Request to create a new occurrence. +type CreateOccurrenceRequest struct { + // The name of the project in the form of `projects/[PROJECT_ID]`, under which + // the occurrence is to be created. + Parent string `protobuf:"bytes,1,opt,name=parent,proto3" json:"parent,omitempty"` + // The occurrence to create. + Occurrence *Occurrence `protobuf:"bytes,2,opt,name=occurrence,proto3" json:"occurrence,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *CreateOccurrenceRequest) Reset() { *m = CreateOccurrenceRequest{} } +func (m *CreateOccurrenceRequest) String() string { return proto.CompactTextString(m) } +func (*CreateOccurrenceRequest) ProtoMessage() {} +func (*CreateOccurrenceRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_grafeas_0102e4ff7bfb77b0, []int{7} +} +func (m *CreateOccurrenceRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_CreateOccurrenceRequest.Unmarshal(m, b) +} +func (m *CreateOccurrenceRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_CreateOccurrenceRequest.Marshal(b, m, deterministic) +} +func (dst *CreateOccurrenceRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_CreateOccurrenceRequest.Merge(dst, src) +} +func (m *CreateOccurrenceRequest) XXX_Size() int { + return xxx_messageInfo_CreateOccurrenceRequest.Size(m) +} +func (m *CreateOccurrenceRequest) XXX_DiscardUnknown() { + xxx_messageInfo_CreateOccurrenceRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_CreateOccurrenceRequest proto.InternalMessageInfo + +func (m *CreateOccurrenceRequest) GetParent() string { + if m != nil { + return m.Parent + } + return "" +} + +func (m *CreateOccurrenceRequest) GetOccurrence() *Occurrence { + if m != nil { + return m.Occurrence + } + return nil +} + +// Request to update an occurrence. +type UpdateOccurrenceRequest struct { + // The name of the occurrence in the form of + // `projects/[PROJECT_ID]/occurrences/[OCCURRENCE_ID]`. + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // The updated occurrence. + Occurrence *Occurrence `protobuf:"bytes,2,opt,name=occurrence,proto3" json:"occurrence,omitempty"` + // The fields to update. + UpdateMask *field_mask.FieldMask `protobuf:"bytes,3,opt,name=update_mask,json=updateMask,proto3" json:"update_mask,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *UpdateOccurrenceRequest) Reset() { *m = UpdateOccurrenceRequest{} } +func (m *UpdateOccurrenceRequest) String() string { return proto.CompactTextString(m) } +func (*UpdateOccurrenceRequest) ProtoMessage() {} +func (*UpdateOccurrenceRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_grafeas_0102e4ff7bfb77b0, []int{8} +} +func (m *UpdateOccurrenceRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_UpdateOccurrenceRequest.Unmarshal(m, b) +} +func (m *UpdateOccurrenceRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_UpdateOccurrenceRequest.Marshal(b, m, deterministic) +} +func (dst *UpdateOccurrenceRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_UpdateOccurrenceRequest.Merge(dst, src) +} +func (m *UpdateOccurrenceRequest) XXX_Size() int { + return xxx_messageInfo_UpdateOccurrenceRequest.Size(m) +} +func (m *UpdateOccurrenceRequest) XXX_DiscardUnknown() { + xxx_messageInfo_UpdateOccurrenceRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_UpdateOccurrenceRequest proto.InternalMessageInfo + +func (m *UpdateOccurrenceRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *UpdateOccurrenceRequest) GetOccurrence() *Occurrence { + if m != nil { + return m.Occurrence + } + return nil +} + +func (m *UpdateOccurrenceRequest) GetUpdateMask() *field_mask.FieldMask { + if m != nil { + return m.UpdateMask + } + return nil +} + +// Request to get a note. +type GetNoteRequest struct { + // The name of the note in the form of + // `projects/[PROVIDER_ID]/notes/[NOTE_ID]`. + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GetNoteRequest) Reset() { *m = GetNoteRequest{} } +func (m *GetNoteRequest) String() string { return proto.CompactTextString(m) } +func (*GetNoteRequest) ProtoMessage() {} +func (*GetNoteRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_grafeas_0102e4ff7bfb77b0, []int{9} +} +func (m *GetNoteRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GetNoteRequest.Unmarshal(m, b) +} +func (m *GetNoteRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GetNoteRequest.Marshal(b, m, deterministic) +} +func (dst *GetNoteRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_GetNoteRequest.Merge(dst, src) +} +func (m *GetNoteRequest) XXX_Size() int { + return xxx_messageInfo_GetNoteRequest.Size(m) +} +func (m *GetNoteRequest) XXX_DiscardUnknown() { + xxx_messageInfo_GetNoteRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_GetNoteRequest proto.InternalMessageInfo + +func (m *GetNoteRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +// Request to get the note to which the specified occurrence is attached. +type GetOccurrenceNoteRequest struct { + // The name of the occurrence in the form of + // `projects/[PROJECT_ID]/occurrences/[OCCURRENCE_ID]`. + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GetOccurrenceNoteRequest) Reset() { *m = GetOccurrenceNoteRequest{} } +func (m *GetOccurrenceNoteRequest) String() string { return proto.CompactTextString(m) } +func (*GetOccurrenceNoteRequest) ProtoMessage() {} +func (*GetOccurrenceNoteRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_grafeas_0102e4ff7bfb77b0, []int{10} +} +func (m *GetOccurrenceNoteRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GetOccurrenceNoteRequest.Unmarshal(m, b) +} +func (m *GetOccurrenceNoteRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GetOccurrenceNoteRequest.Marshal(b, m, deterministic) +} +func (dst *GetOccurrenceNoteRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_GetOccurrenceNoteRequest.Merge(dst, src) +} +func (m *GetOccurrenceNoteRequest) XXX_Size() int { + return xxx_messageInfo_GetOccurrenceNoteRequest.Size(m) +} +func (m *GetOccurrenceNoteRequest) XXX_DiscardUnknown() { + xxx_messageInfo_GetOccurrenceNoteRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_GetOccurrenceNoteRequest proto.InternalMessageInfo + +func (m *GetOccurrenceNoteRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +// Request to list notes. +type ListNotesRequest struct { + // The name of the project to list notes for in the form of + // `projects/[PROJECT_ID]`. + Parent string `protobuf:"bytes,1,opt,name=parent,proto3" json:"parent,omitempty"` + // The filter expression. + Filter string `protobuf:"bytes,2,opt,name=filter,proto3" json:"filter,omitempty"` + // Number of notes to return in the list. + PageSize int32 `protobuf:"varint,3,opt,name=page_size,json=pageSize,proto3" json:"page_size,omitempty"` + // Token to provide to skip to a particular spot in the list. + PageToken string `protobuf:"bytes,4,opt,name=page_token,json=pageToken,proto3" json:"page_token,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ListNotesRequest) Reset() { *m = ListNotesRequest{} } +func (m *ListNotesRequest) String() string { return proto.CompactTextString(m) } +func (*ListNotesRequest) ProtoMessage() {} +func (*ListNotesRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_grafeas_0102e4ff7bfb77b0, []int{11} +} +func (m *ListNotesRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ListNotesRequest.Unmarshal(m, b) +} +func (m *ListNotesRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ListNotesRequest.Marshal(b, m, deterministic) +} +func (dst *ListNotesRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_ListNotesRequest.Merge(dst, src) +} +func (m *ListNotesRequest) XXX_Size() int { + return xxx_messageInfo_ListNotesRequest.Size(m) +} +func (m *ListNotesRequest) XXX_DiscardUnknown() { + xxx_messageInfo_ListNotesRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_ListNotesRequest proto.InternalMessageInfo + +func (m *ListNotesRequest) GetParent() string { + if m != nil { + return m.Parent + } + return "" +} + +func (m *ListNotesRequest) GetFilter() string { + if m != nil { + return m.Filter + } + return "" +} + +func (m *ListNotesRequest) GetPageSize() int32 { + if m != nil { + return m.PageSize + } + return 0 +} + +func (m *ListNotesRequest) GetPageToken() string { + if m != nil { + return m.PageToken + } + return "" +} + +// Response for listing notes. +type ListNotesResponse struct { + // The notes requested. + Notes []*Note `protobuf:"bytes,1,rep,name=notes,proto3" json:"notes,omitempty"` + // The next pagination token in the list response. It should be used as + // `page_token` for the following request. An empty value means no more + // results. + NextPageToken string `protobuf:"bytes,2,opt,name=next_page_token,json=nextPageToken,proto3" json:"next_page_token,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ListNotesResponse) Reset() { *m = ListNotesResponse{} } +func (m *ListNotesResponse) String() string { return proto.CompactTextString(m) } +func (*ListNotesResponse) ProtoMessage() {} +func (*ListNotesResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_grafeas_0102e4ff7bfb77b0, []int{12} +} +func (m *ListNotesResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ListNotesResponse.Unmarshal(m, b) +} +func (m *ListNotesResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ListNotesResponse.Marshal(b, m, deterministic) +} +func (dst *ListNotesResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_ListNotesResponse.Merge(dst, src) +} +func (m *ListNotesResponse) XXX_Size() int { + return xxx_messageInfo_ListNotesResponse.Size(m) +} +func (m *ListNotesResponse) XXX_DiscardUnknown() { + xxx_messageInfo_ListNotesResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_ListNotesResponse proto.InternalMessageInfo + +func (m *ListNotesResponse) GetNotes() []*Note { + if m != nil { + return m.Notes + } + return nil +} + +func (m *ListNotesResponse) GetNextPageToken() string { + if m != nil { + return m.NextPageToken + } + return "" +} + +// Request to delete a note. +type DeleteNoteRequest struct { + // The name of the note in the form of + // `projects/[PROVIDER_ID]/notes/[NOTE_ID]`. + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *DeleteNoteRequest) Reset() { *m = DeleteNoteRequest{} } +func (m *DeleteNoteRequest) String() string { return proto.CompactTextString(m) } +func (*DeleteNoteRequest) ProtoMessage() {} +func (*DeleteNoteRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_grafeas_0102e4ff7bfb77b0, []int{13} +} +func (m *DeleteNoteRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_DeleteNoteRequest.Unmarshal(m, b) +} +func (m *DeleteNoteRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_DeleteNoteRequest.Marshal(b, m, deterministic) +} +func (dst *DeleteNoteRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_DeleteNoteRequest.Merge(dst, src) +} +func (m *DeleteNoteRequest) XXX_Size() int { + return xxx_messageInfo_DeleteNoteRequest.Size(m) +} +func (m *DeleteNoteRequest) XXX_DiscardUnknown() { + xxx_messageInfo_DeleteNoteRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_DeleteNoteRequest proto.InternalMessageInfo + +func (m *DeleteNoteRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +// Request to create a new note. +type CreateNoteRequest struct { + // The name of the project in the form of `projects/[PROJECT_ID]`, under which + // the note is to be created. + Parent string `protobuf:"bytes,1,opt,name=parent,proto3" json:"parent,omitempty"` + // The ID to use for this note. + NoteId string `protobuf:"bytes,2,opt,name=note_id,json=noteId,proto3" json:"note_id,omitempty"` + // The note to create. + Note *Note `protobuf:"bytes,3,opt,name=note,proto3" json:"note,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *CreateNoteRequest) Reset() { *m = CreateNoteRequest{} } +func (m *CreateNoteRequest) String() string { return proto.CompactTextString(m) } +func (*CreateNoteRequest) ProtoMessage() {} +func (*CreateNoteRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_grafeas_0102e4ff7bfb77b0, []int{14} +} +func (m *CreateNoteRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_CreateNoteRequest.Unmarshal(m, b) +} +func (m *CreateNoteRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_CreateNoteRequest.Marshal(b, m, deterministic) +} +func (dst *CreateNoteRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_CreateNoteRequest.Merge(dst, src) +} +func (m *CreateNoteRequest) XXX_Size() int { + return xxx_messageInfo_CreateNoteRequest.Size(m) +} +func (m *CreateNoteRequest) XXX_DiscardUnknown() { + xxx_messageInfo_CreateNoteRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_CreateNoteRequest proto.InternalMessageInfo + +func (m *CreateNoteRequest) GetParent() string { + if m != nil { + return m.Parent + } + return "" +} + +func (m *CreateNoteRequest) GetNoteId() string { + if m != nil { + return m.NoteId + } + return "" +} + +func (m *CreateNoteRequest) GetNote() *Note { + if m != nil { + return m.Note + } + return nil +} + +// Request to update a note. +type UpdateNoteRequest struct { + // The name of the note in the form of + // `projects/[PROVIDER_ID]/notes/[NOTE_ID]`. + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // The updated note. + Note *Note `protobuf:"bytes,2,opt,name=note,proto3" json:"note,omitempty"` + // The fields to update. + UpdateMask *field_mask.FieldMask `protobuf:"bytes,3,opt,name=update_mask,json=updateMask,proto3" json:"update_mask,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *UpdateNoteRequest) Reset() { *m = UpdateNoteRequest{} } +func (m *UpdateNoteRequest) String() string { return proto.CompactTextString(m) } +func (*UpdateNoteRequest) ProtoMessage() {} +func (*UpdateNoteRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_grafeas_0102e4ff7bfb77b0, []int{15} +} +func (m *UpdateNoteRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_UpdateNoteRequest.Unmarshal(m, b) +} +func (m *UpdateNoteRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_UpdateNoteRequest.Marshal(b, m, deterministic) +} +func (dst *UpdateNoteRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_UpdateNoteRequest.Merge(dst, src) +} +func (m *UpdateNoteRequest) XXX_Size() int { + return xxx_messageInfo_UpdateNoteRequest.Size(m) +} +func (m *UpdateNoteRequest) XXX_DiscardUnknown() { + xxx_messageInfo_UpdateNoteRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_UpdateNoteRequest proto.InternalMessageInfo + +func (m *UpdateNoteRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *UpdateNoteRequest) GetNote() *Note { + if m != nil { + return m.Note + } + return nil +} + +func (m *UpdateNoteRequest) GetUpdateMask() *field_mask.FieldMask { + if m != nil { + return m.UpdateMask + } + return nil +} + +// Request to list occurrences for a note. +type ListNoteOccurrencesRequest struct { + // The name of the note to list occurrences for in the form of + // `projects/[PROVIDER_ID]/notes/[NOTE_ID]`. + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // The filter expression. + Filter string `protobuf:"bytes,2,opt,name=filter,proto3" json:"filter,omitempty"` + // Number of occurrences to return in the list. + PageSize int32 `protobuf:"varint,3,opt,name=page_size,json=pageSize,proto3" json:"page_size,omitempty"` + // Token to provide to skip to a particular spot in the list. + PageToken string `protobuf:"bytes,4,opt,name=page_token,json=pageToken,proto3" json:"page_token,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ListNoteOccurrencesRequest) Reset() { *m = ListNoteOccurrencesRequest{} } +func (m *ListNoteOccurrencesRequest) String() string { return proto.CompactTextString(m) } +func (*ListNoteOccurrencesRequest) ProtoMessage() {} +func (*ListNoteOccurrencesRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_grafeas_0102e4ff7bfb77b0, []int{16} +} +func (m *ListNoteOccurrencesRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ListNoteOccurrencesRequest.Unmarshal(m, b) +} +func (m *ListNoteOccurrencesRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ListNoteOccurrencesRequest.Marshal(b, m, deterministic) +} +func (dst *ListNoteOccurrencesRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_ListNoteOccurrencesRequest.Merge(dst, src) +} +func (m *ListNoteOccurrencesRequest) XXX_Size() int { + return xxx_messageInfo_ListNoteOccurrencesRequest.Size(m) +} +func (m *ListNoteOccurrencesRequest) XXX_DiscardUnknown() { + xxx_messageInfo_ListNoteOccurrencesRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_ListNoteOccurrencesRequest proto.InternalMessageInfo + +func (m *ListNoteOccurrencesRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *ListNoteOccurrencesRequest) GetFilter() string { + if m != nil { + return m.Filter + } + return "" +} + +func (m *ListNoteOccurrencesRequest) GetPageSize() int32 { + if m != nil { + return m.PageSize + } + return 0 +} + +func (m *ListNoteOccurrencesRequest) GetPageToken() string { + if m != nil { + return m.PageToken + } + return "" +} + +// Response for listing occurrences for a note. +type ListNoteOccurrencesResponse struct { + // The occurrences attached to the specified note. + Occurrences []*Occurrence `protobuf:"bytes,1,rep,name=occurrences,proto3" json:"occurrences,omitempty"` + // Token to provide to skip to a particular spot in the list. + NextPageToken string `protobuf:"bytes,2,opt,name=next_page_token,json=nextPageToken,proto3" json:"next_page_token,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ListNoteOccurrencesResponse) Reset() { *m = ListNoteOccurrencesResponse{} } +func (m *ListNoteOccurrencesResponse) String() string { return proto.CompactTextString(m) } +func (*ListNoteOccurrencesResponse) ProtoMessage() {} +func (*ListNoteOccurrencesResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_grafeas_0102e4ff7bfb77b0, []int{17} +} +func (m *ListNoteOccurrencesResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ListNoteOccurrencesResponse.Unmarshal(m, b) +} +func (m *ListNoteOccurrencesResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ListNoteOccurrencesResponse.Marshal(b, m, deterministic) +} +func (dst *ListNoteOccurrencesResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_ListNoteOccurrencesResponse.Merge(dst, src) +} +func (m *ListNoteOccurrencesResponse) XXX_Size() int { + return xxx_messageInfo_ListNoteOccurrencesResponse.Size(m) +} +func (m *ListNoteOccurrencesResponse) XXX_DiscardUnknown() { + xxx_messageInfo_ListNoteOccurrencesResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_ListNoteOccurrencesResponse proto.InternalMessageInfo + +func (m *ListNoteOccurrencesResponse) GetOccurrences() []*Occurrence { + if m != nil { + return m.Occurrences + } + return nil +} + +func (m *ListNoteOccurrencesResponse) GetNextPageToken() string { + if m != nil { + return m.NextPageToken + } + return "" +} + +// Request to create notes in batch. +type BatchCreateNotesRequest struct { + // The name of the project in the form of `projects/[PROJECT_ID]`, under which + // the notes are to be created. + Parent string `protobuf:"bytes,1,opt,name=parent,proto3" json:"parent,omitempty"` + // The notes to create. + Notes map[string]*Note `protobuf:"bytes,2,rep,name=notes,proto3" json:"notes,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *BatchCreateNotesRequest) Reset() { *m = BatchCreateNotesRequest{} } +func (m *BatchCreateNotesRequest) String() string { return proto.CompactTextString(m) } +func (*BatchCreateNotesRequest) ProtoMessage() {} +func (*BatchCreateNotesRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_grafeas_0102e4ff7bfb77b0, []int{18} +} +func (m *BatchCreateNotesRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_BatchCreateNotesRequest.Unmarshal(m, b) +} +func (m *BatchCreateNotesRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_BatchCreateNotesRequest.Marshal(b, m, deterministic) +} +func (dst *BatchCreateNotesRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_BatchCreateNotesRequest.Merge(dst, src) +} +func (m *BatchCreateNotesRequest) XXX_Size() int { + return xxx_messageInfo_BatchCreateNotesRequest.Size(m) +} +func (m *BatchCreateNotesRequest) XXX_DiscardUnknown() { + xxx_messageInfo_BatchCreateNotesRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_BatchCreateNotesRequest proto.InternalMessageInfo + +func (m *BatchCreateNotesRequest) GetParent() string { + if m != nil { + return m.Parent + } + return "" +} + +func (m *BatchCreateNotesRequest) GetNotes() map[string]*Note { + if m != nil { + return m.Notes + } + return nil +} + +// Response for creating notes in batch. +type BatchCreateNotesResponse struct { + // The notes that were created. + Notes []*Note `protobuf:"bytes,1,rep,name=notes,proto3" json:"notes,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *BatchCreateNotesResponse) Reset() { *m = BatchCreateNotesResponse{} } +func (m *BatchCreateNotesResponse) String() string { return proto.CompactTextString(m) } +func (*BatchCreateNotesResponse) ProtoMessage() {} +func (*BatchCreateNotesResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_grafeas_0102e4ff7bfb77b0, []int{19} +} +func (m *BatchCreateNotesResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_BatchCreateNotesResponse.Unmarshal(m, b) +} +func (m *BatchCreateNotesResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_BatchCreateNotesResponse.Marshal(b, m, deterministic) +} +func (dst *BatchCreateNotesResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_BatchCreateNotesResponse.Merge(dst, src) +} +func (m *BatchCreateNotesResponse) XXX_Size() int { + return xxx_messageInfo_BatchCreateNotesResponse.Size(m) +} +func (m *BatchCreateNotesResponse) XXX_DiscardUnknown() { + xxx_messageInfo_BatchCreateNotesResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_BatchCreateNotesResponse proto.InternalMessageInfo + +func (m *BatchCreateNotesResponse) GetNotes() []*Note { + if m != nil { + return m.Notes + } + return nil +} + +// Request to create occurrences in batch. +type BatchCreateOccurrencesRequest struct { + // The name of the project in the form of `projects/[PROJECT_ID]`, under which + // the occurrences are to be created. + Parent string `protobuf:"bytes,1,opt,name=parent,proto3" json:"parent,omitempty"` + // The occurrences to create. + Occurrences []*Occurrence `protobuf:"bytes,2,rep,name=occurrences,proto3" json:"occurrences,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *BatchCreateOccurrencesRequest) Reset() { *m = BatchCreateOccurrencesRequest{} } +func (m *BatchCreateOccurrencesRequest) String() string { return proto.CompactTextString(m) } +func (*BatchCreateOccurrencesRequest) ProtoMessage() {} +func (*BatchCreateOccurrencesRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_grafeas_0102e4ff7bfb77b0, []int{20} +} +func (m *BatchCreateOccurrencesRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_BatchCreateOccurrencesRequest.Unmarshal(m, b) +} +func (m *BatchCreateOccurrencesRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_BatchCreateOccurrencesRequest.Marshal(b, m, deterministic) +} +func (dst *BatchCreateOccurrencesRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_BatchCreateOccurrencesRequest.Merge(dst, src) +} +func (m *BatchCreateOccurrencesRequest) XXX_Size() int { + return xxx_messageInfo_BatchCreateOccurrencesRequest.Size(m) +} +func (m *BatchCreateOccurrencesRequest) XXX_DiscardUnknown() { + xxx_messageInfo_BatchCreateOccurrencesRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_BatchCreateOccurrencesRequest proto.InternalMessageInfo + +func (m *BatchCreateOccurrencesRequest) GetParent() string { + if m != nil { + return m.Parent + } + return "" +} + +func (m *BatchCreateOccurrencesRequest) GetOccurrences() []*Occurrence { + if m != nil { + return m.Occurrences + } + return nil +} + +// Response for creating occurrences in batch. +type BatchCreateOccurrencesResponse struct { + // The occurrences that were created. + Occurrences []*Occurrence `protobuf:"bytes,1,rep,name=occurrences,proto3" json:"occurrences,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *BatchCreateOccurrencesResponse) Reset() { *m = BatchCreateOccurrencesResponse{} } +func (m *BatchCreateOccurrencesResponse) String() string { return proto.CompactTextString(m) } +func (*BatchCreateOccurrencesResponse) ProtoMessage() {} +func (*BatchCreateOccurrencesResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_grafeas_0102e4ff7bfb77b0, []int{21} +} +func (m *BatchCreateOccurrencesResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_BatchCreateOccurrencesResponse.Unmarshal(m, b) +} +func (m *BatchCreateOccurrencesResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_BatchCreateOccurrencesResponse.Marshal(b, m, deterministic) +} +func (dst *BatchCreateOccurrencesResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_BatchCreateOccurrencesResponse.Merge(dst, src) +} +func (m *BatchCreateOccurrencesResponse) XXX_Size() int { + return xxx_messageInfo_BatchCreateOccurrencesResponse.Size(m) +} +func (m *BatchCreateOccurrencesResponse) XXX_DiscardUnknown() { + xxx_messageInfo_BatchCreateOccurrencesResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_BatchCreateOccurrencesResponse proto.InternalMessageInfo + +func (m *BatchCreateOccurrencesResponse) GetOccurrences() []*Occurrence { + if m != nil { + return m.Occurrences + } + return nil +} + +// Request to get a vulnerability summary for some set of occurrences. +type GetVulnerabilityOccurrencesSummaryRequest struct { + // The name of the project to get a vulnerability summary for in the form of + // `projects/[PROJECT_ID]`. + Parent string `protobuf:"bytes,1,opt,name=parent,proto3" json:"parent,omitempty"` + // The filter expression. + Filter string `protobuf:"bytes,2,opt,name=filter,proto3" json:"filter,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GetVulnerabilityOccurrencesSummaryRequest) Reset() { + *m = GetVulnerabilityOccurrencesSummaryRequest{} +} +func (m *GetVulnerabilityOccurrencesSummaryRequest) String() string { return proto.CompactTextString(m) } +func (*GetVulnerabilityOccurrencesSummaryRequest) ProtoMessage() {} +func (*GetVulnerabilityOccurrencesSummaryRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_grafeas_0102e4ff7bfb77b0, []int{22} +} +func (m *GetVulnerabilityOccurrencesSummaryRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GetVulnerabilityOccurrencesSummaryRequest.Unmarshal(m, b) +} +func (m *GetVulnerabilityOccurrencesSummaryRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GetVulnerabilityOccurrencesSummaryRequest.Marshal(b, m, deterministic) +} +func (dst *GetVulnerabilityOccurrencesSummaryRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_GetVulnerabilityOccurrencesSummaryRequest.Merge(dst, src) +} +func (m *GetVulnerabilityOccurrencesSummaryRequest) XXX_Size() int { + return xxx_messageInfo_GetVulnerabilityOccurrencesSummaryRequest.Size(m) +} +func (m *GetVulnerabilityOccurrencesSummaryRequest) XXX_DiscardUnknown() { + xxx_messageInfo_GetVulnerabilityOccurrencesSummaryRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_GetVulnerabilityOccurrencesSummaryRequest proto.InternalMessageInfo + +func (m *GetVulnerabilityOccurrencesSummaryRequest) GetParent() string { + if m != nil { + return m.Parent + } + return "" +} + +func (m *GetVulnerabilityOccurrencesSummaryRequest) GetFilter() string { + if m != nil { + return m.Filter + } + return "" +} + +// A summary of how many vulnerability occurrences there are per resource and +// severity type. +type VulnerabilityOccurrencesSummary struct { + // A listing by resource of the number of fixable and total vulnerabilities. + Counts []*VulnerabilityOccurrencesSummary_FixableTotalByDigest `protobuf:"bytes,1,rep,name=counts,proto3" json:"counts,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *VulnerabilityOccurrencesSummary) Reset() { *m = VulnerabilityOccurrencesSummary{} } +func (m *VulnerabilityOccurrencesSummary) String() string { return proto.CompactTextString(m) } +func (*VulnerabilityOccurrencesSummary) ProtoMessage() {} +func (*VulnerabilityOccurrencesSummary) Descriptor() ([]byte, []int) { + return fileDescriptor_grafeas_0102e4ff7bfb77b0, []int{23} +} +func (m *VulnerabilityOccurrencesSummary) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_VulnerabilityOccurrencesSummary.Unmarshal(m, b) +} +func (m *VulnerabilityOccurrencesSummary) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_VulnerabilityOccurrencesSummary.Marshal(b, m, deterministic) +} +func (dst *VulnerabilityOccurrencesSummary) XXX_Merge(src proto.Message) { + xxx_messageInfo_VulnerabilityOccurrencesSummary.Merge(dst, src) +} +func (m *VulnerabilityOccurrencesSummary) XXX_Size() int { + return xxx_messageInfo_VulnerabilityOccurrencesSummary.Size(m) +} +func (m *VulnerabilityOccurrencesSummary) XXX_DiscardUnknown() { + xxx_messageInfo_VulnerabilityOccurrencesSummary.DiscardUnknown(m) +} + +var xxx_messageInfo_VulnerabilityOccurrencesSummary proto.InternalMessageInfo + +func (m *VulnerabilityOccurrencesSummary) GetCounts() []*VulnerabilityOccurrencesSummary_FixableTotalByDigest { + if m != nil { + return m.Counts + } + return nil +} + +// Per resource and severity counts of fixable and total vulnerabilites. +type VulnerabilityOccurrencesSummary_FixableTotalByDigest struct { + // The affected resource. + Resource *Resource `protobuf:"bytes,1,opt,name=resource,proto3" json:"resource,omitempty"` + // The severity for this count. SEVERITY_UNSPECIFIED indicates total across + // all severities. + Severity vulnerability.Severity `protobuf:"varint,2,opt,name=severity,proto3,enum=grafeas.v1beta1.vulnerability.Severity" json:"severity,omitempty"` + // The number of fixable vulnerabilities associated with this resource. + FixableCount int64 `protobuf:"varint,3,opt,name=fixable_count,json=fixableCount,proto3" json:"fixable_count,omitempty"` + // The total number of vulnerabilities associated with this resource. + TotalCount int64 `protobuf:"varint,4,opt,name=total_count,json=totalCount,proto3" json:"total_count,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *VulnerabilityOccurrencesSummary_FixableTotalByDigest) Reset() { + *m = VulnerabilityOccurrencesSummary_FixableTotalByDigest{} +} +func (m *VulnerabilityOccurrencesSummary_FixableTotalByDigest) String() string { + return proto.CompactTextString(m) +} +func (*VulnerabilityOccurrencesSummary_FixableTotalByDigest) ProtoMessage() {} +func (*VulnerabilityOccurrencesSummary_FixableTotalByDigest) Descriptor() ([]byte, []int) { + return fileDescriptor_grafeas_0102e4ff7bfb77b0, []int{23, 0} +} +func (m *VulnerabilityOccurrencesSummary_FixableTotalByDigest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_VulnerabilityOccurrencesSummary_FixableTotalByDigest.Unmarshal(m, b) +} +func (m *VulnerabilityOccurrencesSummary_FixableTotalByDigest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_VulnerabilityOccurrencesSummary_FixableTotalByDigest.Marshal(b, m, deterministic) +} +func (dst *VulnerabilityOccurrencesSummary_FixableTotalByDigest) XXX_Merge(src proto.Message) { + xxx_messageInfo_VulnerabilityOccurrencesSummary_FixableTotalByDigest.Merge(dst, src) +} +func (m *VulnerabilityOccurrencesSummary_FixableTotalByDigest) XXX_Size() int { + return xxx_messageInfo_VulnerabilityOccurrencesSummary_FixableTotalByDigest.Size(m) +} +func (m *VulnerabilityOccurrencesSummary_FixableTotalByDigest) XXX_DiscardUnknown() { + xxx_messageInfo_VulnerabilityOccurrencesSummary_FixableTotalByDigest.DiscardUnknown(m) +} + +var xxx_messageInfo_VulnerabilityOccurrencesSummary_FixableTotalByDigest proto.InternalMessageInfo + +func (m *VulnerabilityOccurrencesSummary_FixableTotalByDigest) GetResource() *Resource { + if m != nil { + return m.Resource + } + return nil +} + +func (m *VulnerabilityOccurrencesSummary_FixableTotalByDigest) GetSeverity() vulnerability.Severity { + if m != nil { + return m.Severity + } + return vulnerability.Severity_SEVERITY_UNSPECIFIED +} + +func (m *VulnerabilityOccurrencesSummary_FixableTotalByDigest) GetFixableCount() int64 { + if m != nil { + return m.FixableCount + } + return 0 +} + +func (m *VulnerabilityOccurrencesSummary_FixableTotalByDigest) GetTotalCount() int64 { + if m != nil { + return m.TotalCount + } + return 0 +} + +func init() { + proto.RegisterType((*Occurrence)(nil), "grafeas.v1beta1.Occurrence") + proto.RegisterType((*Resource)(nil), "grafeas.v1beta1.Resource") + proto.RegisterType((*Note)(nil), "grafeas.v1beta1.Note") + proto.RegisterType((*GetOccurrenceRequest)(nil), "grafeas.v1beta1.GetOccurrenceRequest") + proto.RegisterType((*ListOccurrencesRequest)(nil), "grafeas.v1beta1.ListOccurrencesRequest") + proto.RegisterType((*ListOccurrencesResponse)(nil), "grafeas.v1beta1.ListOccurrencesResponse") + proto.RegisterType((*DeleteOccurrenceRequest)(nil), "grafeas.v1beta1.DeleteOccurrenceRequest") + proto.RegisterType((*CreateOccurrenceRequest)(nil), "grafeas.v1beta1.CreateOccurrenceRequest") + proto.RegisterType((*UpdateOccurrenceRequest)(nil), "grafeas.v1beta1.UpdateOccurrenceRequest") + proto.RegisterType((*GetNoteRequest)(nil), "grafeas.v1beta1.GetNoteRequest") + proto.RegisterType((*GetOccurrenceNoteRequest)(nil), "grafeas.v1beta1.GetOccurrenceNoteRequest") + proto.RegisterType((*ListNotesRequest)(nil), "grafeas.v1beta1.ListNotesRequest") + proto.RegisterType((*ListNotesResponse)(nil), "grafeas.v1beta1.ListNotesResponse") + proto.RegisterType((*DeleteNoteRequest)(nil), "grafeas.v1beta1.DeleteNoteRequest") + proto.RegisterType((*CreateNoteRequest)(nil), "grafeas.v1beta1.CreateNoteRequest") + proto.RegisterType((*UpdateNoteRequest)(nil), "grafeas.v1beta1.UpdateNoteRequest") + proto.RegisterType((*ListNoteOccurrencesRequest)(nil), "grafeas.v1beta1.ListNoteOccurrencesRequest") + proto.RegisterType((*ListNoteOccurrencesResponse)(nil), "grafeas.v1beta1.ListNoteOccurrencesResponse") + proto.RegisterType((*BatchCreateNotesRequest)(nil), "grafeas.v1beta1.BatchCreateNotesRequest") + proto.RegisterMapType((map[string]*Note)(nil), "grafeas.v1beta1.BatchCreateNotesRequest.NotesEntry") + proto.RegisterType((*BatchCreateNotesResponse)(nil), "grafeas.v1beta1.BatchCreateNotesResponse") + proto.RegisterType((*BatchCreateOccurrencesRequest)(nil), "grafeas.v1beta1.BatchCreateOccurrencesRequest") + proto.RegisterType((*BatchCreateOccurrencesResponse)(nil), "grafeas.v1beta1.BatchCreateOccurrencesResponse") + proto.RegisterType((*GetVulnerabilityOccurrencesSummaryRequest)(nil), "grafeas.v1beta1.GetVulnerabilityOccurrencesSummaryRequest") + proto.RegisterType((*VulnerabilityOccurrencesSummary)(nil), "grafeas.v1beta1.VulnerabilityOccurrencesSummary") + proto.RegisterType((*VulnerabilityOccurrencesSummary_FixableTotalByDigest)(nil), "grafeas.v1beta1.VulnerabilityOccurrencesSummary.FixableTotalByDigest") +} + +// Reference imports to suppress errors if they are not otherwise used. +var _ context.Context +var _ grpc.ClientConn + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the grpc package it is being compiled against. +const _ = grpc.SupportPackageIsVersion4 + +// GrafeasV1Beta1Client is the client API for GrafeasV1Beta1 service. +// +// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://godoc.org/google.golang.org/grpc#ClientConn.NewStream. +type GrafeasV1Beta1Client interface { + // Gets the specified occurrence. + GetOccurrence(ctx context.Context, in *GetOccurrenceRequest, opts ...grpc.CallOption) (*Occurrence, error) + // Lists occurrences for the specified project. + ListOccurrences(ctx context.Context, in *ListOccurrencesRequest, opts ...grpc.CallOption) (*ListOccurrencesResponse, error) + // Deletes the specified occurrence. For example, use this method to delete an + // occurrence when the occurrence is no longer applicable for the given + // resource. + DeleteOccurrence(ctx context.Context, in *DeleteOccurrenceRequest, opts ...grpc.CallOption) (*empty.Empty, error) + // Creates a new occurrence. + CreateOccurrence(ctx context.Context, in *CreateOccurrenceRequest, opts ...grpc.CallOption) (*Occurrence, error) + // Creates new occurrences in batch. + BatchCreateOccurrences(ctx context.Context, in *BatchCreateOccurrencesRequest, opts ...grpc.CallOption) (*BatchCreateOccurrencesResponse, error) + // Updates the specified occurrence. + UpdateOccurrence(ctx context.Context, in *UpdateOccurrenceRequest, opts ...grpc.CallOption) (*Occurrence, error) + // Gets the note attached to the specified occurrence. Consumer projects can + // use this method to get a note that belongs to a provider project. + GetOccurrenceNote(ctx context.Context, in *GetOccurrenceNoteRequest, opts ...grpc.CallOption) (*Note, error) + // Gets the specified note. + GetNote(ctx context.Context, in *GetNoteRequest, opts ...grpc.CallOption) (*Note, error) + // Lists notes for the specified project. + ListNotes(ctx context.Context, in *ListNotesRequest, opts ...grpc.CallOption) (*ListNotesResponse, error) + // Deletes the specified note. + DeleteNote(ctx context.Context, in *DeleteNoteRequest, opts ...grpc.CallOption) (*empty.Empty, error) + // Creates a new note. + CreateNote(ctx context.Context, in *CreateNoteRequest, opts ...grpc.CallOption) (*Note, error) + // Creates new notes in batch. + BatchCreateNotes(ctx context.Context, in *BatchCreateNotesRequest, opts ...grpc.CallOption) (*BatchCreateNotesResponse, error) + // Updates the specified note. + UpdateNote(ctx context.Context, in *UpdateNoteRequest, opts ...grpc.CallOption) (*Note, error) + // Lists occurrences referencing the specified note. Provider projects can use + // this method to get all occurrences across consumer projects referencing the + // specified note. + ListNoteOccurrences(ctx context.Context, in *ListNoteOccurrencesRequest, opts ...grpc.CallOption) (*ListNoteOccurrencesResponse, error) + // Gets a summary of the number and severity of occurrences. + GetVulnerabilityOccurrencesSummary(ctx context.Context, in *GetVulnerabilityOccurrencesSummaryRequest, opts ...grpc.CallOption) (*VulnerabilityOccurrencesSummary, error) +} + +type grafeasV1Beta1Client struct { + cc *grpc.ClientConn +} + +func NewGrafeasV1Beta1Client(cc *grpc.ClientConn) GrafeasV1Beta1Client { + return &grafeasV1Beta1Client{cc} +} + +func (c *grafeasV1Beta1Client) GetOccurrence(ctx context.Context, in *GetOccurrenceRequest, opts ...grpc.CallOption) (*Occurrence, error) { + out := new(Occurrence) + err := c.cc.Invoke(ctx, "/grafeas.v1beta1.GrafeasV1Beta1/GetOccurrence", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *grafeasV1Beta1Client) ListOccurrences(ctx context.Context, in *ListOccurrencesRequest, opts ...grpc.CallOption) (*ListOccurrencesResponse, error) { + out := new(ListOccurrencesResponse) + err := c.cc.Invoke(ctx, "/grafeas.v1beta1.GrafeasV1Beta1/ListOccurrences", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *grafeasV1Beta1Client) DeleteOccurrence(ctx context.Context, in *DeleteOccurrenceRequest, opts ...grpc.CallOption) (*empty.Empty, error) { + out := new(empty.Empty) + err := c.cc.Invoke(ctx, "/grafeas.v1beta1.GrafeasV1Beta1/DeleteOccurrence", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *grafeasV1Beta1Client) CreateOccurrence(ctx context.Context, in *CreateOccurrenceRequest, opts ...grpc.CallOption) (*Occurrence, error) { + out := new(Occurrence) + err := c.cc.Invoke(ctx, "/grafeas.v1beta1.GrafeasV1Beta1/CreateOccurrence", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *grafeasV1Beta1Client) BatchCreateOccurrences(ctx context.Context, in *BatchCreateOccurrencesRequest, opts ...grpc.CallOption) (*BatchCreateOccurrencesResponse, error) { + out := new(BatchCreateOccurrencesResponse) + err := c.cc.Invoke(ctx, "/grafeas.v1beta1.GrafeasV1Beta1/BatchCreateOccurrences", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *grafeasV1Beta1Client) UpdateOccurrence(ctx context.Context, in *UpdateOccurrenceRequest, opts ...grpc.CallOption) (*Occurrence, error) { + out := new(Occurrence) + err := c.cc.Invoke(ctx, "/grafeas.v1beta1.GrafeasV1Beta1/UpdateOccurrence", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *grafeasV1Beta1Client) GetOccurrenceNote(ctx context.Context, in *GetOccurrenceNoteRequest, opts ...grpc.CallOption) (*Note, error) { + out := new(Note) + err := c.cc.Invoke(ctx, "/grafeas.v1beta1.GrafeasV1Beta1/GetOccurrenceNote", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *grafeasV1Beta1Client) GetNote(ctx context.Context, in *GetNoteRequest, opts ...grpc.CallOption) (*Note, error) { + out := new(Note) + err := c.cc.Invoke(ctx, "/grafeas.v1beta1.GrafeasV1Beta1/GetNote", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *grafeasV1Beta1Client) ListNotes(ctx context.Context, in *ListNotesRequest, opts ...grpc.CallOption) (*ListNotesResponse, error) { + out := new(ListNotesResponse) + err := c.cc.Invoke(ctx, "/grafeas.v1beta1.GrafeasV1Beta1/ListNotes", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *grafeasV1Beta1Client) DeleteNote(ctx context.Context, in *DeleteNoteRequest, opts ...grpc.CallOption) (*empty.Empty, error) { + out := new(empty.Empty) + err := c.cc.Invoke(ctx, "/grafeas.v1beta1.GrafeasV1Beta1/DeleteNote", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *grafeasV1Beta1Client) CreateNote(ctx context.Context, in *CreateNoteRequest, opts ...grpc.CallOption) (*Note, error) { + out := new(Note) + err := c.cc.Invoke(ctx, "/grafeas.v1beta1.GrafeasV1Beta1/CreateNote", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *grafeasV1Beta1Client) BatchCreateNotes(ctx context.Context, in *BatchCreateNotesRequest, opts ...grpc.CallOption) (*BatchCreateNotesResponse, error) { + out := new(BatchCreateNotesResponse) + err := c.cc.Invoke(ctx, "/grafeas.v1beta1.GrafeasV1Beta1/BatchCreateNotes", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *grafeasV1Beta1Client) UpdateNote(ctx context.Context, in *UpdateNoteRequest, opts ...grpc.CallOption) (*Note, error) { + out := new(Note) + err := c.cc.Invoke(ctx, "/grafeas.v1beta1.GrafeasV1Beta1/UpdateNote", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *grafeasV1Beta1Client) ListNoteOccurrences(ctx context.Context, in *ListNoteOccurrencesRequest, opts ...grpc.CallOption) (*ListNoteOccurrencesResponse, error) { + out := new(ListNoteOccurrencesResponse) + err := c.cc.Invoke(ctx, "/grafeas.v1beta1.GrafeasV1Beta1/ListNoteOccurrences", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *grafeasV1Beta1Client) GetVulnerabilityOccurrencesSummary(ctx context.Context, in *GetVulnerabilityOccurrencesSummaryRequest, opts ...grpc.CallOption) (*VulnerabilityOccurrencesSummary, error) { + out := new(VulnerabilityOccurrencesSummary) + err := c.cc.Invoke(ctx, "/grafeas.v1beta1.GrafeasV1Beta1/GetVulnerabilityOccurrencesSummary", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +// GrafeasV1Beta1Server is the server API for GrafeasV1Beta1 service. +type GrafeasV1Beta1Server interface { + // Gets the specified occurrence. + GetOccurrence(context.Context, *GetOccurrenceRequest) (*Occurrence, error) + // Lists occurrences for the specified project. + ListOccurrences(context.Context, *ListOccurrencesRequest) (*ListOccurrencesResponse, error) + // Deletes the specified occurrence. For example, use this method to delete an + // occurrence when the occurrence is no longer applicable for the given + // resource. + DeleteOccurrence(context.Context, *DeleteOccurrenceRequest) (*empty.Empty, error) + // Creates a new occurrence. + CreateOccurrence(context.Context, *CreateOccurrenceRequest) (*Occurrence, error) + // Creates new occurrences in batch. + BatchCreateOccurrences(context.Context, *BatchCreateOccurrencesRequest) (*BatchCreateOccurrencesResponse, error) + // Updates the specified occurrence. + UpdateOccurrence(context.Context, *UpdateOccurrenceRequest) (*Occurrence, error) + // Gets the note attached to the specified occurrence. Consumer projects can + // use this method to get a note that belongs to a provider project. + GetOccurrenceNote(context.Context, *GetOccurrenceNoteRequest) (*Note, error) + // Gets the specified note. + GetNote(context.Context, *GetNoteRequest) (*Note, error) + // Lists notes for the specified project. + ListNotes(context.Context, *ListNotesRequest) (*ListNotesResponse, error) + // Deletes the specified note. + DeleteNote(context.Context, *DeleteNoteRequest) (*empty.Empty, error) + // Creates a new note. + CreateNote(context.Context, *CreateNoteRequest) (*Note, error) + // Creates new notes in batch. + BatchCreateNotes(context.Context, *BatchCreateNotesRequest) (*BatchCreateNotesResponse, error) + // Updates the specified note. + UpdateNote(context.Context, *UpdateNoteRequest) (*Note, error) + // Lists occurrences referencing the specified note. Provider projects can use + // this method to get all occurrences across consumer projects referencing the + // specified note. + ListNoteOccurrences(context.Context, *ListNoteOccurrencesRequest) (*ListNoteOccurrencesResponse, error) + // Gets a summary of the number and severity of occurrences. + GetVulnerabilityOccurrencesSummary(context.Context, *GetVulnerabilityOccurrencesSummaryRequest) (*VulnerabilityOccurrencesSummary, error) +} + +func RegisterGrafeasV1Beta1Server(s *grpc.Server, srv GrafeasV1Beta1Server) { + s.RegisterService(&_GrafeasV1Beta1_serviceDesc, srv) +} + +func _GrafeasV1Beta1_GetOccurrence_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(GetOccurrenceRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(GrafeasV1Beta1Server).GetOccurrence(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/grafeas.v1beta1.GrafeasV1Beta1/GetOccurrence", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(GrafeasV1Beta1Server).GetOccurrence(ctx, req.(*GetOccurrenceRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _GrafeasV1Beta1_ListOccurrences_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(ListOccurrencesRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(GrafeasV1Beta1Server).ListOccurrences(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/grafeas.v1beta1.GrafeasV1Beta1/ListOccurrences", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(GrafeasV1Beta1Server).ListOccurrences(ctx, req.(*ListOccurrencesRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _GrafeasV1Beta1_DeleteOccurrence_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(DeleteOccurrenceRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(GrafeasV1Beta1Server).DeleteOccurrence(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/grafeas.v1beta1.GrafeasV1Beta1/DeleteOccurrence", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(GrafeasV1Beta1Server).DeleteOccurrence(ctx, req.(*DeleteOccurrenceRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _GrafeasV1Beta1_CreateOccurrence_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(CreateOccurrenceRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(GrafeasV1Beta1Server).CreateOccurrence(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/grafeas.v1beta1.GrafeasV1Beta1/CreateOccurrence", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(GrafeasV1Beta1Server).CreateOccurrence(ctx, req.(*CreateOccurrenceRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _GrafeasV1Beta1_BatchCreateOccurrences_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(BatchCreateOccurrencesRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(GrafeasV1Beta1Server).BatchCreateOccurrences(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/grafeas.v1beta1.GrafeasV1Beta1/BatchCreateOccurrences", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(GrafeasV1Beta1Server).BatchCreateOccurrences(ctx, req.(*BatchCreateOccurrencesRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _GrafeasV1Beta1_UpdateOccurrence_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(UpdateOccurrenceRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(GrafeasV1Beta1Server).UpdateOccurrence(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/grafeas.v1beta1.GrafeasV1Beta1/UpdateOccurrence", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(GrafeasV1Beta1Server).UpdateOccurrence(ctx, req.(*UpdateOccurrenceRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _GrafeasV1Beta1_GetOccurrenceNote_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(GetOccurrenceNoteRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(GrafeasV1Beta1Server).GetOccurrenceNote(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/grafeas.v1beta1.GrafeasV1Beta1/GetOccurrenceNote", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(GrafeasV1Beta1Server).GetOccurrenceNote(ctx, req.(*GetOccurrenceNoteRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _GrafeasV1Beta1_GetNote_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(GetNoteRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(GrafeasV1Beta1Server).GetNote(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/grafeas.v1beta1.GrafeasV1Beta1/GetNote", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(GrafeasV1Beta1Server).GetNote(ctx, req.(*GetNoteRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _GrafeasV1Beta1_ListNotes_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(ListNotesRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(GrafeasV1Beta1Server).ListNotes(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/grafeas.v1beta1.GrafeasV1Beta1/ListNotes", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(GrafeasV1Beta1Server).ListNotes(ctx, req.(*ListNotesRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _GrafeasV1Beta1_DeleteNote_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(DeleteNoteRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(GrafeasV1Beta1Server).DeleteNote(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/grafeas.v1beta1.GrafeasV1Beta1/DeleteNote", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(GrafeasV1Beta1Server).DeleteNote(ctx, req.(*DeleteNoteRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _GrafeasV1Beta1_CreateNote_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(CreateNoteRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(GrafeasV1Beta1Server).CreateNote(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/grafeas.v1beta1.GrafeasV1Beta1/CreateNote", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(GrafeasV1Beta1Server).CreateNote(ctx, req.(*CreateNoteRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _GrafeasV1Beta1_BatchCreateNotes_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(BatchCreateNotesRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(GrafeasV1Beta1Server).BatchCreateNotes(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/grafeas.v1beta1.GrafeasV1Beta1/BatchCreateNotes", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(GrafeasV1Beta1Server).BatchCreateNotes(ctx, req.(*BatchCreateNotesRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _GrafeasV1Beta1_UpdateNote_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(UpdateNoteRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(GrafeasV1Beta1Server).UpdateNote(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/grafeas.v1beta1.GrafeasV1Beta1/UpdateNote", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(GrafeasV1Beta1Server).UpdateNote(ctx, req.(*UpdateNoteRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _GrafeasV1Beta1_ListNoteOccurrences_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(ListNoteOccurrencesRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(GrafeasV1Beta1Server).ListNoteOccurrences(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/grafeas.v1beta1.GrafeasV1Beta1/ListNoteOccurrences", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(GrafeasV1Beta1Server).ListNoteOccurrences(ctx, req.(*ListNoteOccurrencesRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _GrafeasV1Beta1_GetVulnerabilityOccurrencesSummary_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(GetVulnerabilityOccurrencesSummaryRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(GrafeasV1Beta1Server).GetVulnerabilityOccurrencesSummary(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/grafeas.v1beta1.GrafeasV1Beta1/GetVulnerabilityOccurrencesSummary", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(GrafeasV1Beta1Server).GetVulnerabilityOccurrencesSummary(ctx, req.(*GetVulnerabilityOccurrencesSummaryRequest)) + } + return interceptor(ctx, in, info, handler) +} + +var _GrafeasV1Beta1_serviceDesc = grpc.ServiceDesc{ + ServiceName: "grafeas.v1beta1.GrafeasV1Beta1", + HandlerType: (*GrafeasV1Beta1Server)(nil), + Methods: []grpc.MethodDesc{ + { + MethodName: "GetOccurrence", + Handler: _GrafeasV1Beta1_GetOccurrence_Handler, + }, + { + MethodName: "ListOccurrences", + Handler: _GrafeasV1Beta1_ListOccurrences_Handler, + }, + { + MethodName: "DeleteOccurrence", + Handler: _GrafeasV1Beta1_DeleteOccurrence_Handler, + }, + { + MethodName: "CreateOccurrence", + Handler: _GrafeasV1Beta1_CreateOccurrence_Handler, + }, + { + MethodName: "BatchCreateOccurrences", + Handler: _GrafeasV1Beta1_BatchCreateOccurrences_Handler, + }, + { + MethodName: "UpdateOccurrence", + Handler: _GrafeasV1Beta1_UpdateOccurrence_Handler, + }, + { + MethodName: "GetOccurrenceNote", + Handler: _GrafeasV1Beta1_GetOccurrenceNote_Handler, + }, + { + MethodName: "GetNote", + Handler: _GrafeasV1Beta1_GetNote_Handler, + }, + { + MethodName: "ListNotes", + Handler: _GrafeasV1Beta1_ListNotes_Handler, + }, + { + MethodName: "DeleteNote", + Handler: _GrafeasV1Beta1_DeleteNote_Handler, + }, + { + MethodName: "CreateNote", + Handler: _GrafeasV1Beta1_CreateNote_Handler, + }, + { + MethodName: "BatchCreateNotes", + Handler: _GrafeasV1Beta1_BatchCreateNotes_Handler, + }, + { + MethodName: "UpdateNote", + Handler: _GrafeasV1Beta1_UpdateNote_Handler, + }, + { + MethodName: "ListNoteOccurrences", + Handler: _GrafeasV1Beta1_ListNoteOccurrences_Handler, + }, + { + MethodName: "GetVulnerabilityOccurrencesSummary", + Handler: _GrafeasV1Beta1_GetVulnerabilityOccurrencesSummary_Handler, + }, + }, + Streams: []grpc.StreamDesc{}, + Metadata: "google/devtools/containeranalysis/v1beta1/grafeas/grafeas.proto", +} + +func init() { + proto.RegisterFile("google/devtools/containeranalysis/v1beta1/grafeas/grafeas.proto", fileDescriptor_grafeas_0102e4ff7bfb77b0) +} + +var fileDescriptor_grafeas_0102e4ff7bfb77b0 = []byte{ + // 1920 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xc4, 0x59, 0xcd, 0x6f, 0x1b, 0xc7, + 0x15, 0xf7, 0x52, 0x9f, 0x7c, 0xd4, 0x07, 0x35, 0x75, 0xac, 0x2d, 0x9d, 0xc4, 0xec, 0xc6, 0x75, + 0x24, 0xda, 0x21, 0x6d, 0x39, 0x35, 0x1a, 0xd9, 0x82, 0x11, 0x4a, 0xb2, 0x64, 0xa4, 0x75, 0x8c, + 0xb5, 0x12, 0x14, 0x2d, 0x02, 0x62, 0xc8, 0x1d, 0x51, 0x5b, 0x2d, 0x77, 0xb7, 0xbb, 0x43, 0xc2, + 0x4c, 0x91, 0xa0, 0x28, 0xdc, 0xde, 0x8a, 0x1e, 0x0a, 0xb4, 0xf7, 0x5c, 0xda, 0x3f, 0xa1, 0xe8, + 0xb1, 0xe7, 0xf6, 0xd4, 0x4b, 0x8b, 0x5e, 0xfb, 0x87, 0x14, 0xf3, 0xb1, 0xdc, 0x21, 0x77, 0x97, + 0x5c, 0xc6, 0xfd, 0xb8, 0x88, 0xb3, 0xb3, 0xef, 0xfd, 0xe6, 0xcd, 0x9b, 0xf7, 0xfb, 0xcd, 0x23, + 0x05, 0x8f, 0xbb, 0x9e, 0xd7, 0x75, 0x48, 0xc3, 0x22, 0x03, 0xea, 0x79, 0x4e, 0xd8, 0xe8, 0x78, + 0x2e, 0xc5, 0xb6, 0x4b, 0x02, 0xec, 0x62, 0x67, 0x18, 0xda, 0x61, 0x63, 0x70, 0xaf, 0x4d, 0x28, + 0xbe, 0xd7, 0xe8, 0x06, 0xf8, 0x9c, 0xe0, 0x30, 0xfa, 0xac, 0xfb, 0x81, 0x47, 0x3d, 0xb4, 0x19, + 0x3d, 0x4a, 0xb3, 0xca, 0x9b, 0x12, 0x11, 0xfb, 0x76, 0x03, 0xbb, 0xae, 0x47, 0x31, 0xb5, 0x3d, + 0x57, 0x9a, 0x57, 0x4e, 0xf2, 0xaf, 0x87, 0x29, 0x25, 0xa1, 0xf0, 0x56, 0xc7, 0x12, 0xe8, 0x61, + 0x7e, 0xa0, 0x76, 0xdf, 0x76, 0x2c, 0xf1, 0x57, 0x3a, 0x1f, 0xe4, 0x77, 0xee, 0x78, 0xbd, 0x9e, + 0xe7, 0xca, 0x0f, 0xe9, 0x7e, 0x9c, 0xdf, 0xdd, 0x22, 0xbe, 0xe3, 0x0d, 0x7b, 0xc4, 0xa5, 0xca, + 0x50, 0xc2, 0x1c, 0xce, 0x01, 0x63, 0x87, 0x1d, 0x6f, 0x40, 0x82, 0x61, 0x3c, 0x9a, 0x3f, 0x0f, + 0x76, 0x0f, 0x77, 0x89, 0xf8, 0x2b, 0x9d, 0xe7, 0x38, 0x7d, 0x1f, 0x77, 0x2e, 0x99, 0xbb, 0xfc, + 0x9c, 0x3f, 0x13, 0x7e, 0xe0, 0x0d, 0x88, 0x8b, 0xdd, 0x0e, 0x51, 0x86, 0x12, 0xe6, 0xa3, 0xfc, + 0x30, 0x83, 0xbe, 0xc3, 0xa6, 0xdb, 0xb6, 0x63, 0xd3, 0xe1, 0xf8, 0x93, 0x04, 0xbb, 0x2e, 0xc1, + 0xf8, 0x53, 0xbb, 0x7f, 0xde, 0x20, 0x3d, 0x7f, 0xf4, 0xb2, 0x3a, 0xf9, 0xf2, 0xdc, 0x26, 0x8e, + 0xd5, 0xea, 0xe1, 0xf0, 0x52, 0x5a, 0xdc, 0x98, 0xb4, 0xa0, 0x76, 0x8f, 0xd5, 0x5e, 0xcf, 0x17, + 0x06, 0xc6, 0x1f, 0x97, 0x01, 0x3e, 0xee, 0x74, 0xfa, 0x41, 0x40, 0xdc, 0x0e, 0x41, 0x08, 0x16, + 0x5d, 0xdc, 0x23, 0xba, 0x56, 0xd5, 0x76, 0x8a, 0x26, 0x1f, 0xa3, 0xef, 0xc0, 0x6a, 0x40, 0x42, + 0xaf, 0x1f, 0x74, 0x88, 0x5e, 0xa8, 0x6a, 0x3b, 0xa5, 0xbd, 0x6f, 0xd6, 0x27, 0x78, 0x52, 0x37, + 0xa5, 0x81, 0x39, 0x32, 0x45, 0xd7, 0xa1, 0xe8, 0x7a, 0x94, 0xb4, 0x38, 0xde, 0x02, 0xc7, 0x5b, + 0x65, 0x13, 0xcf, 0x18, 0xe6, 0x7b, 0xb0, 0x78, 0x69, 0xbb, 0x96, 0xbe, 0x58, 0xd5, 0x76, 0x36, + 0x52, 0xf0, 0x9e, 0x79, 0x94, 0x7c, 0x64, 0xbb, 0x96, 0xc9, 0xcd, 0x50, 0x15, 0x4a, 0x01, 0xe9, + 0x11, 0xcb, 0xe6, 0xa4, 0xd1, 0x97, 0x38, 0x9a, 0x3a, 0x85, 0x1e, 0x42, 0xa9, 0x13, 0x10, 0x4c, + 0x49, 0x8b, 0xed, 0x50, 0x5f, 0xe6, 0x71, 0x56, 0xea, 0x62, 0xfb, 0xf5, 0x68, 0xfb, 0xf5, 0xb3, + 0x68, 0xfb, 0x26, 0x08, 0x73, 0x36, 0xc1, 0x9c, 0xfb, 0xbe, 0x35, 0x72, 0x5e, 0x99, 0xed, 0x2c, + 0xcc, 0xb9, 0xf3, 0x33, 0x58, 0x1f, 0x3b, 0x38, 0x7d, 0x95, 0xbb, 0xdf, 0x4a, 0xec, 0x69, 0xfc, + 0x78, 0x8f, 0x08, 0xc5, 0xb6, 0x13, 0x9e, 0x5e, 0x31, 0xc7, 0xdd, 0xd1, 0x03, 0x58, 0xe2, 0xec, + 0xd6, 0x8b, 0x1c, 0xe7, 0xed, 0x04, 0x8e, 0xe0, 0x7e, 0xec, 0x2f, 0xcc, 0xd1, 0x31, 0xac, 0x5b, + 0x24, 0xb0, 0x07, 0xc4, 0x6a, 0x71, 0x56, 0xe8, 0x90, 0xe1, 0x2f, 0x38, 0x13, 0xfb, 0xaf, 0x49, + 0xb7, 0xa7, 0x6c, 0x1e, 0x3d, 0x81, 0x35, 0xdb, 0x0d, 0x29, 0x76, 0x1c, 0x91, 0xeb, 0x12, 0x47, + 0xa9, 0x26, 0x50, 0x22, 0xea, 0x28, 0x38, 0xaa, 0x1f, 0x3a, 0x06, 0x88, 0x35, 0x42, 0x5f, 0xe3, + 0x28, 0xef, 0x24, 0x50, 0x14, 0x19, 0x89, 0x81, 0x14, 0x47, 0x74, 0x04, 0x10, 0x89, 0x04, 0xb1, + 0xf4, 0x75, 0x0e, 0x63, 0x24, 0x61, 0x46, 0x3a, 0xa2, 0xa2, 0x8c, 0xfc, 0xd0, 0x29, 0x94, 0x14, + 0xd1, 0xd5, 0x37, 0x38, 0xcc, 0xcd, 0x04, 0x8c, 0x2a, 0xcc, 0x31, 0x90, 0xea, 0xda, 0x2c, 0xc2, + 0x8a, 0x25, 0xde, 0x18, 0x7d, 0x58, 0x8d, 0xca, 0x3e, 0x95, 0x37, 0x65, 0x58, 0xe8, 0x07, 0x36, + 0xa7, 0x4c, 0xd1, 0x64, 0x43, 0x74, 0x08, 0x6b, 0x4c, 0x0b, 0x88, 0x4b, 0x5b, 0x17, 0x38, 0xbc, + 0xe0, 0xac, 0x48, 0xcd, 0x6d, 0x2c, 0x29, 0xa7, 0x38, 0xbc, 0x30, 0x4b, 0xd2, 0x8b, 0x3d, 0x18, + 0x7f, 0x59, 0x81, 0x45, 0x46, 0x8f, 0xd4, 0x35, 0x6f, 0xc3, 0x56, 0x78, 0xe1, 0x05, 0xb4, 0x65, + 0x91, 0xb0, 0x13, 0xd8, 0x3e, 0xdf, 0xae, 0x88, 0xa0, 0xcc, 0x5f, 0x1c, 0xc5, 0xf3, 0x68, 0x17, + 0xca, 0x8e, 0xe7, 0x76, 0xc7, 0x6c, 0x05, 0x51, 0x37, 0xd9, 0xbc, 0x6a, 0x3a, 0x27, 0x5f, 0x1f, + 0x31, 0xbe, 0x3a, 0x98, 0x12, 0xab, 0xd5, 0x0f, 0x1c, 0x7d, 0xa9, 0xba, 0xb0, 0x53, 0xda, 0xbb, + 0x9e, 0xa2, 0x1a, 0xdc, 0xe6, 0x93, 0xc0, 0x31, 0x21, 0x18, 0x8d, 0xd1, 0x21, 0x6c, 0x92, 0x97, + 0xbe, 0x1d, 0xf0, 0x8c, 0xe7, 0xe5, 0xf3, 0x46, 0xec, 0x12, 0x71, 0x5a, 0x15, 0x84, 0x95, 0xd7, + 0x11, 0x84, 0xd5, 0xb9, 0x04, 0xe1, 0x0e, 0xa0, 0x68, 0xf3, 0x23, 0x01, 0x0c, 0xf5, 0x62, 0x75, + 0x81, 0x1d, 0x82, 0x7c, 0xf3, 0x4c, 0x0a, 0x61, 0x88, 0xce, 0x26, 0xe5, 0x43, 0xd0, 0xf6, 0xce, + 0x0c, 0xf9, 0xf8, 0x54, 0x7d, 0x4a, 0x8a, 0xc8, 0xfb, 0x91, 0x88, 0x08, 0xfa, 0xbe, 0x99, 0x21, + 0x22, 0x4d, 0xf6, 0x37, 0x96, 0x90, 0x03, 0x80, 0x36, 0x0e, 0x89, 0xd4, 0x8f, 0xb5, 0x0c, 0x57, + 0xa1, 0x1f, 0x4d, 0x1c, 0xda, 0x8c, 0x1d, 0x45, 0xe6, 0x21, 0xa4, 0xe3, 0x11, 0xac, 0x48, 0x55, + 0x90, 0x44, 0xcd, 0x56, 0x8d, 0xe7, 0xe2, 0xf3, 0xf4, 0x8a, 0x19, 0xb9, 0xa0, 0xd3, 0x48, 0x30, + 0x70, 0xdb, 0x21, 0x92, 0xa2, 0xb7, 0xa6, 0x0b, 0x46, 0x64, 0x1d, 0x6b, 0x06, 0x7b, 0x42, 0x47, + 0x50, 0x1c, 0x09, 0x82, 0xbe, 0x99, 0xc1, 0x75, 0x45, 0x32, 0xa2, 0x11, 0xdb, 0xcd, 0x68, 0x1a, + 0x7d, 0x06, 0x6f, 0x28, 0xc4, 0x6f, 0xe1, 0x3e, 0xbd, 0xf0, 0x02, 0x76, 0x40, 0xe5, 0x8c, 0xd0, + 0x54, 0xf5, 0xf8, 0x30, 0xb2, 0x3e, 0xbd, 0x62, 0x5e, 0x55, 0x5e, 0x8c, 0xe6, 0x9b, 0xcb, 0xb0, + 0x48, 0x87, 0x3e, 0x31, 0x6a, 0x70, 0xf5, 0x84, 0xd0, 0xf8, 0x0a, 0x36, 0xc9, 0x4f, 0xfa, 0x24, + 0xa4, 0x69, 0xec, 0x36, 0x5e, 0x69, 0x70, 0xed, 0x7b, 0x76, 0xa8, 0x58, 0x87, 0x91, 0xf9, 0x35, + 0x58, 0xf6, 0x71, 0xc0, 0xa4, 0x56, 0x38, 0xc8, 0x27, 0x36, 0x7f, 0x6e, 0x3b, 0x94, 0x04, 0x52, + 0x05, 0xe4, 0x13, 0xbb, 0x9d, 0x7d, 0xdc, 0x25, 0xad, 0xd0, 0xfe, 0x5c, 0xdc, 0xce, 0x4b, 0xe6, + 0x2a, 0x9b, 0x78, 0x61, 0x7f, 0x4e, 0xd0, 0x5b, 0x00, 0xfc, 0x25, 0xf5, 0x2e, 0x89, 0xcb, 0x39, + 0x5f, 0x34, 0xb9, 0xf9, 0x19, 0x9b, 0x30, 0x7e, 0xa6, 0xc1, 0x76, 0x22, 0x8c, 0xd0, 0xf7, 0xdc, + 0x90, 0xa0, 0x03, 0x28, 0x79, 0xf1, 0xb4, 0xae, 0x65, 0x30, 0x5f, 0xd9, 0xaf, 0x6a, 0x8f, 0x6e, + 0xc1, 0xa6, 0x4b, 0x5e, 0xd2, 0x96, 0xb2, 0xbc, 0x88, 0x7b, 0x9d, 0x4d, 0x3f, 0x1f, 0x85, 0xf0, + 0x1e, 0x6c, 0x1f, 0x11, 0x87, 0x50, 0x92, 0x2f, 0x71, 0x2e, 0x6c, 0x1f, 0x72, 0x76, 0x27, 0xcd, + 0xb3, 0x12, 0xf7, 0x10, 0x20, 0x0e, 0x4c, 0xf6, 0x3d, 0x53, 0xf7, 0xa1, 0x98, 0x1b, 0xbf, 0xd7, + 0x60, 0xfb, 0x13, 0xae, 0x08, 0xb9, 0xe2, 0x7b, 0xad, 0xc5, 0x14, 0xb1, 0x62, 0x8d, 0x9f, 0xbc, + 0x54, 0x92, 0x62, 0xf5, 0x84, 0xf5, 0x86, 0xdf, 0xc7, 0xe1, 0x65, 0x24, 0x56, 0x6c, 0x6c, 0xdc, + 0x84, 0x8d, 0x13, 0x42, 0x99, 0x1c, 0x4d, 0xcb, 0x5f, 0x1d, 0xf4, 0xb1, 0x22, 0x9d, 0x65, 0xff, + 0x25, 0x94, 0x59, 0x81, 0x30, 0xb3, 0xff, 0x4b, 0x85, 0x5e, 0xc0, 0x96, 0xb2, 0xbe, 0x2c, 0xcd, + 0xdb, 0xb0, 0xc4, 0xf4, 0x38, 0x2a, 0xca, 0x37, 0x52, 0x2f, 0x31, 0x53, 0xd8, 0xe4, 0x2e, 0xc4, + 0x77, 0x61, 0x4b, 0x14, 0xe2, 0xac, 0x94, 0x78, 0xb0, 0x25, 0x4a, 0x50, 0x35, 0xcc, 0xca, 0xc9, + 0x36, 0xac, 0xf0, 0xab, 0xc3, 0xb6, 0xa2, 0xa4, 0xb0, 0xc7, 0xa7, 0x16, 0xda, 0x85, 0x45, 0x36, + 0x92, 0x87, 0x9c, 0xb1, 0x05, 0x6e, 0x62, 0xfc, 0x5a, 0x83, 0x2d, 0x51, 0x83, 0x33, 0x42, 0x1b, + 0x81, 0x16, 0x66, 0x82, 0xbe, 0x5e, 0xad, 0xbd, 0xd2, 0xa0, 0x12, 0x1d, 0x4b, 0x8a, 0x84, 0xa5, + 0x85, 0xf6, 0xdf, 0x28, 0x8e, 0x57, 0x1a, 0x5c, 0x4f, 0x0d, 0xe3, 0x7f, 0x2b, 0x61, 0x7f, 0xd5, + 0x60, 0xbb, 0x89, 0x69, 0xe7, 0x22, 0x2e, 0x8b, 0x99, 0x5c, 0x79, 0x1a, 0x95, 0x70, 0x81, 0x07, + 0x75, 0x3f, 0x11, 0x54, 0x06, 0x20, 0x3f, 0xc2, 0xf0, 0xd8, 0xa5, 0xc1, 0x50, 0x16, 0x78, 0xe5, + 0x63, 0x80, 0x78, 0x92, 0xf5, 0xaa, 0x97, 0x64, 0x28, 0x57, 0x63, 0x43, 0xc6, 0x96, 0x01, 0x76, + 0xfa, 0x33, 0xaa, 0x42, 0xd8, 0xec, 0x17, 0xbe, 0xab, 0x19, 0x27, 0xa0, 0x27, 0x57, 0xff, 0x1a, + 0xd4, 0x33, 0x06, 0xf0, 0x96, 0x02, 0x34, 0xc7, 0x5d, 0x37, 0x71, 0x70, 0x85, 0xf9, 0x0e, 0xce, + 0x68, 0xc1, 0xdb, 0x59, 0xeb, 0xfe, 0x47, 0x2a, 0xc3, 0xf8, 0x11, 0xec, 0x9e, 0x10, 0x3a, 0xd6, + 0xb9, 0x29, 0xab, 0xbc, 0xe8, 0xf7, 0x7a, 0x38, 0x18, 0x7e, 0x4d, 0xb9, 0x34, 0xfe, 0x59, 0x80, + 0x1b, 0x33, 0xa0, 0xd1, 0x67, 0xb0, 0xdc, 0xf1, 0xfa, 0x2e, 0x8d, 0x42, 0x3f, 0x4e, 0x84, 0x3e, + 0x03, 0xa1, 0xfe, 0xc4, 0x7e, 0xc9, 0x5a, 0xac, 0x33, 0x8f, 0x62, 0xa7, 0x39, 0x3c, 0xb2, 0xbb, + 0x24, 0xa4, 0xa6, 0x04, 0xad, 0xfc, 0x5d, 0x83, 0xab, 0x69, 0x06, 0x63, 0xbf, 0x20, 0x68, 0xf9, + 0x7f, 0x41, 0x38, 0x84, 0xd5, 0x90, 0x0c, 0x08, 0x6f, 0xba, 0x0a, 0xfc, 0x8b, 0xc7, 0xbb, 0x33, + 0xba, 0xe2, 0x17, 0xd2, 0xdc, 0x1c, 0x39, 0xa2, 0x77, 0x60, 0xfd, 0x5c, 0xc4, 0xd4, 0xe2, 0x61, + 0x72, 0xb5, 0x58, 0x30, 0xd7, 0xe4, 0xe4, 0x21, 0x9b, 0x43, 0x37, 0xa0, 0x44, 0x59, 0xc4, 0xd2, + 0x64, 0x91, 0x9b, 0x00, 0x9f, 0xe2, 0x06, 0x7b, 0x7f, 0xde, 0x82, 0x8d, 0x13, 0xb1, 0xf4, 0xa7, + 0xf7, 0x9a, 0x6c, 0x65, 0xf4, 0x0b, 0x0d, 0xd6, 0xc7, 0x2e, 0x45, 0xf4, 0xed, 0x44, 0x74, 0x69, + 0x9d, 0x5d, 0x65, 0x5a, 0xc1, 0x18, 0x77, 0x7f, 0xfe, 0xb7, 0x7f, 0xfd, 0xa6, 0x50, 0x43, 0x3b, + 0xa3, 0xdf, 0x88, 0x7e, 0xca, 0x84, 0xf0, 0xc0, 0x0f, 0xbc, 0x1f, 0x93, 0x0e, 0x0d, 0x1b, 0xb5, + 0x86, 0x52, 0x52, 0x8d, 0xda, 0x17, 0xe8, 0xb7, 0x1a, 0x6c, 0x4e, 0x74, 0x63, 0x28, 0x99, 0xa7, + 0xf4, 0xb6, 0xb1, 0xb2, 0x33, 0xdb, 0x50, 0xd4, 0x7e, 0x5a, 0x60, 0xa2, 0x22, 0x95, 0xd0, 0xbe, + 0x50, 0x63, 0x43, 0xbf, 0xd4, 0xa0, 0x3c, 0xd9, 0xa4, 0xa1, 0xe4, 0x82, 0x19, 0x7d, 0x5c, 0xe5, + 0x5a, 0xe2, 0x56, 0x39, 0xee, 0xf9, 0x74, 0x18, 0x05, 0x52, 0xcb, 0x9f, 0xa1, 0xdf, 0x69, 0x50, + 0x9e, 0x24, 0x75, 0x4a, 0x20, 0x19, 0x1d, 0xe2, 0xf4, 0xf3, 0x7a, 0xc4, 0xa3, 0x79, 0x60, 0xe4, + 0x4e, 0xcb, 0xbe, 0xda, 0xba, 0xfd, 0x49, 0x83, 0x6b, 0xe9, 0x9a, 0x83, 0xea, 0xd3, 0xb4, 0x3d, + 0xe5, 0x24, 0x1b, 0xb9, 0xed, 0xe5, 0x81, 0x3e, 0xe6, 0x91, 0x7f, 0x60, 0xbc, 0x9f, 0x3b, 0xf2, + 0x76, 0x0c, 0xb8, 0xaf, 0xd5, 0x78, 0x5a, 0x27, 0x9b, 0xdc, 0x94, 0xb4, 0x66, 0xf4, 0xc1, 0xb9, + 0xd2, 0xba, 0x97, 0xfb, 0x90, 0xc7, 0xd2, 0xfa, 0x2b, 0x0d, 0xb6, 0x12, 0xfd, 0x2a, 0xda, 0x9d, + 0x4e, 0x4f, 0xa5, 0x4b, 0xaa, 0xa4, 0x5f, 0x50, 0xc6, 0x03, 0x1e, 0xd5, 0x5d, 0x54, 0xcf, 0x1b, + 0x55, 0x43, 0x34, 0x93, 0x3d, 0x58, 0x91, 0x4d, 0x36, 0xba, 0x91, 0x16, 0x44, 0x8e, 0xa5, 0x6b, + 0x7c, 0xe9, 0x9b, 0xc8, 0xc8, 0x5e, 0x9a, 0xaf, 0xc5, 0xea, 0xfd, 0x4b, 0x28, 0x8e, 0xba, 0x5f, + 0xf4, 0xad, 0x54, 0x86, 0xab, 0xcd, 0x41, 0xc5, 0x98, 0x66, 0x22, 0xab, 0x25, 0x65, 0xfd, 0x94, + 0x6a, 0x11, 0xdb, 0xa5, 0x00, 0x71, 0x4f, 0x8c, 0x8c, 0x0c, 0xc6, 0xab, 0x9b, 0xce, 0xe2, 0xba, + 0x5c, 0xb5, 0x96, 0x67, 0xd7, 0x43, 0x80, 0xb8, 0xf5, 0x48, 0x59, 0x35, 0xd1, 0x7d, 0x67, 0xa5, + 0x5a, 0x0a, 0x8c, 0x91, 0x63, 0xab, 0xfb, 0xa2, 0x2b, 0xfe, 0x4a, 0x83, 0xf2, 0x64, 0xef, 0x93, + 0xc2, 0x84, 0x8c, 0xe6, 0xac, 0xb2, 0x9b, 0xc3, 0x52, 0x1e, 0xc3, 0x07, 0x3c, 0xb6, 0xfb, 0x46, + 0x3d, 0x47, 0x6c, 0x13, 0x74, 0x1d, 0x02, 0xc4, 0x5f, 0x07, 0x52, 0xf2, 0x93, 0xf8, 0xae, 0x30, + 0x23, 0x3f, 0x7b, 0x39, 0x0e, 0x45, 0xe6, 0xe7, 0x0f, 0x1a, 0x7c, 0x23, 0xa5, 0xe3, 0x46, 0xb7, + 0x33, 0x0b, 0x2f, 0x45, 0xe0, 0xee, 0xe4, 0x33, 0x96, 0x89, 0xca, 0x41, 0xd5, 0x28, 0xc8, 0xb1, + 0x4b, 0xeb, 0x1f, 0x1a, 0x18, 0xb3, 0x9b, 0x34, 0xb4, 0x9f, 0x46, 0xe3, 0x7c, 0x9d, 0x5d, 0xe5, + 0xee, 0xbc, 0x5d, 0x97, 0x71, 0xcc, 0x37, 0xf3, 0x18, 0x1d, 0xe4, 0x96, 0xea, 0xb1, 0xb6, 0x48, + 0xc2, 0x34, 0x07, 0x80, 0x6c, 0x6f, 0x72, 0xf1, 0xe7, 0xda, 0x0f, 0x7f, 0x20, 0xc9, 0xd7, 0xf5, + 0x1c, 0xec, 0x76, 0xeb, 0x5e, 0xd0, 0x6d, 0x74, 0x89, 0xcb, 0xa9, 0xd8, 0x10, 0xaf, 0xb0, 0x6f, + 0x87, 0x73, 0xfc, 0x57, 0xf5, 0xa1, 0xfc, 0xfc, 0xaa, 0xb0, 0x70, 0x62, 0x7e, 0xd8, 0x5e, 0xe6, + 0x50, 0xf7, 0xff, 0x1d, 0x00, 0x00, 0xff, 0xff, 0x58, 0x08, 0x46, 0x82, 0x9f, 0x1d, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/devtools/containeranalysis/v1beta1/image/image.pb.go b/vendor/google.golang.org/genproto/googleapis/devtools/containeranalysis/v1beta1/image/image.pb.go new file mode 100644 index 0000000..f4d66ac --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/devtools/containeranalysis/v1beta1/image/image.pb.go @@ -0,0 +1,442 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/devtools/containeranalysis/v1beta1/image/image.proto + +package image // import "google.golang.org/genproto/googleapis/devtools/containeranalysis/v1beta1/image" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Instructions from Dockerfile. +type Layer_Directive int32 + +const ( + // Default value for unsupported/missing directive. + Layer_DIRECTIVE_UNSPECIFIED Layer_Directive = 0 + // https://docs.docker.com/reference/builder/#maintainer + Layer_MAINTAINER Layer_Directive = 1 + // https://docs.docker.com/reference/builder/#run + Layer_RUN Layer_Directive = 2 + // https://docs.docker.com/reference/builder/#cmd + Layer_CMD Layer_Directive = 3 + // https://docs.docker.com/reference/builder/#label + Layer_LABEL Layer_Directive = 4 + // https://docs.docker.com/reference/builder/#expose + Layer_EXPOSE Layer_Directive = 5 + // https://docs.docker.com/reference/builder/#env + Layer_ENV Layer_Directive = 6 + // https://docs.docker.com/reference/builder/#add + Layer_ADD Layer_Directive = 7 + // https://docs.docker.com/reference/builder/#copy + Layer_COPY Layer_Directive = 8 + // https://docs.docker.com/reference/builder/#entrypoint + Layer_ENTRYPOINT Layer_Directive = 9 + // https://docs.docker.com/reference/builder/#volume + Layer_VOLUME Layer_Directive = 10 + // https://docs.docker.com/reference/builder/#user + Layer_USER Layer_Directive = 11 + // https://docs.docker.com/reference/builder/#workdir + Layer_WORKDIR Layer_Directive = 12 + // https://docs.docker.com/reference/builder/#arg + Layer_ARG Layer_Directive = 13 + // https://docs.docker.com/reference/builder/#onbuild + Layer_ONBUILD Layer_Directive = 14 + // https://docs.docker.com/reference/builder/#stopsignal + Layer_STOPSIGNAL Layer_Directive = 15 + // https://docs.docker.com/reference/builder/#healthcheck + Layer_HEALTHCHECK Layer_Directive = 16 + // https://docs.docker.com/reference/builder/#shell + Layer_SHELL Layer_Directive = 17 +) + +var Layer_Directive_name = map[int32]string{ + 0: "DIRECTIVE_UNSPECIFIED", + 1: "MAINTAINER", + 2: "RUN", + 3: "CMD", + 4: "LABEL", + 5: "EXPOSE", + 6: "ENV", + 7: "ADD", + 8: "COPY", + 9: "ENTRYPOINT", + 10: "VOLUME", + 11: "USER", + 12: "WORKDIR", + 13: "ARG", + 14: "ONBUILD", + 15: "STOPSIGNAL", + 16: "HEALTHCHECK", + 17: "SHELL", +} +var Layer_Directive_value = map[string]int32{ + "DIRECTIVE_UNSPECIFIED": 0, + "MAINTAINER": 1, + "RUN": 2, + "CMD": 3, + "LABEL": 4, + "EXPOSE": 5, + "ENV": 6, + "ADD": 7, + "COPY": 8, + "ENTRYPOINT": 9, + "VOLUME": 10, + "USER": 11, + "WORKDIR": 12, + "ARG": 13, + "ONBUILD": 14, + "STOPSIGNAL": 15, + "HEALTHCHECK": 16, + "SHELL": 17, +} + +func (x Layer_Directive) String() string { + return proto.EnumName(Layer_Directive_name, int32(x)) +} +func (Layer_Directive) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_image_c96c11ee6530869e, []int{0, 0} +} + +// Layer holds metadata specific to a layer of a Docker image. +type Layer struct { + // The recovered Dockerfile directive used to construct this layer. + Directive Layer_Directive `protobuf:"varint,1,opt,name=directive,proto3,enum=grafeas.v1beta1.image.Layer_Directive" json:"directive,omitempty"` + // The recovered arguments to the Dockerfile directive. + Arguments string `protobuf:"bytes,2,opt,name=arguments,proto3" json:"arguments,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Layer) Reset() { *m = Layer{} } +func (m *Layer) String() string { return proto.CompactTextString(m) } +func (*Layer) ProtoMessage() {} +func (*Layer) Descriptor() ([]byte, []int) { + return fileDescriptor_image_c96c11ee6530869e, []int{0} +} +func (m *Layer) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Layer.Unmarshal(m, b) +} +func (m *Layer) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Layer.Marshal(b, m, deterministic) +} +func (dst *Layer) XXX_Merge(src proto.Message) { + xxx_messageInfo_Layer.Merge(dst, src) +} +func (m *Layer) XXX_Size() int { + return xxx_messageInfo_Layer.Size(m) +} +func (m *Layer) XXX_DiscardUnknown() { + xxx_messageInfo_Layer.DiscardUnknown(m) +} + +var xxx_messageInfo_Layer proto.InternalMessageInfo + +func (m *Layer) GetDirective() Layer_Directive { + if m != nil { + return m.Directive + } + return Layer_DIRECTIVE_UNSPECIFIED +} + +func (m *Layer) GetArguments() string { + if m != nil { + return m.Arguments + } + return "" +} + +// A set of properties that uniquely identify a given Docker image. +type Fingerprint struct { + // The layer-id of the final layer in the Docker image's v1 representation. + V1Name string `protobuf:"bytes,1,opt,name=v1_name,json=v1Name,proto3" json:"v1_name,omitempty"` + // The ordered list of v2 blobs that represent a given image. + V2Blob []string `protobuf:"bytes,2,rep,name=v2_blob,json=v2Blob,proto3" json:"v2_blob,omitempty"` + // Output only. The name of the image's v2 blobs computed via: + // [bottom] := v2_blob[bottom] + // [N] := sha256(v2_blob[N] + " " + v2_name[N+1]) + // Only the name of the final blob is kept. + V2Name string `protobuf:"bytes,3,opt,name=v2_name,json=v2Name,proto3" json:"v2_name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Fingerprint) Reset() { *m = Fingerprint{} } +func (m *Fingerprint) String() string { return proto.CompactTextString(m) } +func (*Fingerprint) ProtoMessage() {} +func (*Fingerprint) Descriptor() ([]byte, []int) { + return fileDescriptor_image_c96c11ee6530869e, []int{1} +} +func (m *Fingerprint) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Fingerprint.Unmarshal(m, b) +} +func (m *Fingerprint) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Fingerprint.Marshal(b, m, deterministic) +} +func (dst *Fingerprint) XXX_Merge(src proto.Message) { + xxx_messageInfo_Fingerprint.Merge(dst, src) +} +func (m *Fingerprint) XXX_Size() int { + return xxx_messageInfo_Fingerprint.Size(m) +} +func (m *Fingerprint) XXX_DiscardUnknown() { + xxx_messageInfo_Fingerprint.DiscardUnknown(m) +} + +var xxx_messageInfo_Fingerprint proto.InternalMessageInfo + +func (m *Fingerprint) GetV1Name() string { + if m != nil { + return m.V1Name + } + return "" +} + +func (m *Fingerprint) GetV2Blob() []string { + if m != nil { + return m.V2Blob + } + return nil +} + +func (m *Fingerprint) GetV2Name() string { + if m != nil { + return m.V2Name + } + return "" +} + +// Basis describes the base image portion (Note) of the DockerImage +// relationship. Linked occurrences are derived from this or an +// equivalent image via: +// FROM +// Or an equivalent reference, e.g. a tag of the resource_url. +type Basis struct { + // The resource_url for the resource representing the basis of + // associated occurrence images. + ResourceUrl string `protobuf:"bytes,1,opt,name=resource_url,json=resourceUrl,proto3" json:"resource_url,omitempty"` + // The fingerprint of the base image. + Fingerprint *Fingerprint `protobuf:"bytes,2,opt,name=fingerprint,proto3" json:"fingerprint,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Basis) Reset() { *m = Basis{} } +func (m *Basis) String() string { return proto.CompactTextString(m) } +func (*Basis) ProtoMessage() {} +func (*Basis) Descriptor() ([]byte, []int) { + return fileDescriptor_image_c96c11ee6530869e, []int{2} +} +func (m *Basis) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Basis.Unmarshal(m, b) +} +func (m *Basis) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Basis.Marshal(b, m, deterministic) +} +func (dst *Basis) XXX_Merge(src proto.Message) { + xxx_messageInfo_Basis.Merge(dst, src) +} +func (m *Basis) XXX_Size() int { + return xxx_messageInfo_Basis.Size(m) +} +func (m *Basis) XXX_DiscardUnknown() { + xxx_messageInfo_Basis.DiscardUnknown(m) +} + +var xxx_messageInfo_Basis proto.InternalMessageInfo + +func (m *Basis) GetResourceUrl() string { + if m != nil { + return m.ResourceUrl + } + return "" +} + +func (m *Basis) GetFingerprint() *Fingerprint { + if m != nil { + return m.Fingerprint + } + return nil +} + +// Details of an image occurrence. +type Details struct { + // The child image derived from the base image. + DerivedImage *Derived `protobuf:"bytes,1,opt,name=derived_image,json=derivedImage,proto3" json:"derived_image,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Details) Reset() { *m = Details{} } +func (m *Details) String() string { return proto.CompactTextString(m) } +func (*Details) ProtoMessage() {} +func (*Details) Descriptor() ([]byte, []int) { + return fileDescriptor_image_c96c11ee6530869e, []int{3} +} +func (m *Details) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Details.Unmarshal(m, b) +} +func (m *Details) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Details.Marshal(b, m, deterministic) +} +func (dst *Details) XXX_Merge(src proto.Message) { + xxx_messageInfo_Details.Merge(dst, src) +} +func (m *Details) XXX_Size() int { + return xxx_messageInfo_Details.Size(m) +} +func (m *Details) XXX_DiscardUnknown() { + xxx_messageInfo_Details.DiscardUnknown(m) +} + +var xxx_messageInfo_Details proto.InternalMessageInfo + +func (m *Details) GetDerivedImage() *Derived { + if m != nil { + return m.DerivedImage + } + return nil +} + +// Derived describes the derived image portion (Occurrence) of the DockerImage +// relationship. This image would be produced from a Dockerfile with FROM +// . +type Derived struct { + // The fingerprint of the derived image. + Fingerprint *Fingerprint `protobuf:"bytes,1,opt,name=fingerprint,proto3" json:"fingerprint,omitempty"` + // Output only. The number of layers by which this image differs from the + // associated image basis. + Distance int32 `protobuf:"varint,2,opt,name=distance,proto3" json:"distance,omitempty"` + // This contains layer-specific metadata, if populated it has length + // "distance" and is ordered with [distance] being the layer immediately + // following the base image and [1] being the final layer. + LayerInfo []*Layer `protobuf:"bytes,3,rep,name=layer_info,json=layerInfo,proto3" json:"layer_info,omitempty"` + // Output only. This contains the base image URL for the derived image + // occurrence. + BaseResourceUrl string `protobuf:"bytes,4,opt,name=base_resource_url,json=baseResourceUrl,proto3" json:"base_resource_url,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Derived) Reset() { *m = Derived{} } +func (m *Derived) String() string { return proto.CompactTextString(m) } +func (*Derived) ProtoMessage() {} +func (*Derived) Descriptor() ([]byte, []int) { + return fileDescriptor_image_c96c11ee6530869e, []int{4} +} +func (m *Derived) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Derived.Unmarshal(m, b) +} +func (m *Derived) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Derived.Marshal(b, m, deterministic) +} +func (dst *Derived) XXX_Merge(src proto.Message) { + xxx_messageInfo_Derived.Merge(dst, src) +} +func (m *Derived) XXX_Size() int { + return xxx_messageInfo_Derived.Size(m) +} +func (m *Derived) XXX_DiscardUnknown() { + xxx_messageInfo_Derived.DiscardUnknown(m) +} + +var xxx_messageInfo_Derived proto.InternalMessageInfo + +func (m *Derived) GetFingerprint() *Fingerprint { + if m != nil { + return m.Fingerprint + } + return nil +} + +func (m *Derived) GetDistance() int32 { + if m != nil { + return m.Distance + } + return 0 +} + +func (m *Derived) GetLayerInfo() []*Layer { + if m != nil { + return m.LayerInfo + } + return nil +} + +func (m *Derived) GetBaseResourceUrl() string { + if m != nil { + return m.BaseResourceUrl + } + return "" +} + +func init() { + proto.RegisterType((*Layer)(nil), "grafeas.v1beta1.image.Layer") + proto.RegisterType((*Fingerprint)(nil), "grafeas.v1beta1.image.Fingerprint") + proto.RegisterType((*Basis)(nil), "grafeas.v1beta1.image.Basis") + proto.RegisterType((*Details)(nil), "grafeas.v1beta1.image.Details") + proto.RegisterType((*Derived)(nil), "grafeas.v1beta1.image.Derived") + proto.RegisterEnum("grafeas.v1beta1.image.Layer_Directive", Layer_Directive_name, Layer_Directive_value) +} + +func init() { + proto.RegisterFile("google/devtools/containeranalysis/v1beta1/image/image.proto", fileDescriptor_image_c96c11ee6530869e) +} + +var fileDescriptor_image_c96c11ee6530869e = []byte{ + // 613 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x9c, 0x93, 0xcf, 0x6e, 0xda, 0x40, + 0x10, 0xc6, 0x6b, 0x08, 0x10, 0x8f, 0xf3, 0x67, 0xb3, 0x52, 0x54, 0x5a, 0x45, 0x15, 0xe5, 0x50, + 0x45, 0x3d, 0x18, 0x41, 0x8f, 0x39, 0x01, 0xde, 0x04, 0x2b, 0x8e, 0x8d, 0x16, 0x93, 0x26, 0xbd, + 0x58, 0x0b, 0x2c, 0xd6, 0x4a, 0xc6, 0x8b, 0xd6, 0x0e, 0x6a, 0x5e, 0xa7, 0xcf, 0xd4, 0x73, 0x9f, + 0xa1, 0x8f, 0x50, 0x79, 0x21, 0x21, 0xad, 0x92, 0xaa, 0xea, 0x65, 0xb5, 0xcc, 0x37, 0xbf, 0x8f, + 0xf1, 0xec, 0x0c, 0x9c, 0xc5, 0x52, 0xc6, 0x09, 0x6f, 0xcd, 0xf8, 0x2a, 0x97, 0x32, 0xc9, 0x5a, + 0x53, 0x99, 0xe6, 0x4c, 0xa4, 0x5c, 0xb1, 0x94, 0x25, 0xf7, 0x99, 0xc8, 0x5a, 0xab, 0xf6, 0x84, + 0xe7, 0xac, 0xdd, 0x12, 0x0b, 0x16, 0xf3, 0xf5, 0x69, 0x2f, 0x95, 0xcc, 0x25, 0x3e, 0x8e, 0x15, + 0x9b, 0x73, 0x96, 0xd9, 0x9b, 0x14, 0x5b, 0x8b, 0xcd, 0x1f, 0x25, 0xa8, 0x78, 0xec, 0x9e, 0x2b, + 0xec, 0x80, 0x39, 0x13, 0x8a, 0x4f, 0x73, 0xb1, 0xe2, 0x75, 0xa3, 0x61, 0x9c, 0x1e, 0x74, 0x3e, + 0xd8, 0xcf, 0x42, 0xb6, 0x06, 0x6c, 0xe7, 0x21, 0x9b, 0x6e, 0x41, 0x7c, 0x02, 0x26, 0x53, 0xf1, + 0xdd, 0x82, 0xa7, 0x79, 0x56, 0x2f, 0x35, 0x8c, 0x53, 0x93, 0x6e, 0x03, 0xcd, 0x9f, 0x06, 0x98, + 0x8f, 0x18, 0x7e, 0x03, 0xc7, 0x8e, 0x4b, 0x49, 0x3f, 0x74, 0xaf, 0x49, 0x34, 0xf6, 0x47, 0x43, + 0xd2, 0x77, 0xcf, 0x5d, 0xe2, 0xa0, 0x57, 0xf8, 0x00, 0xe0, 0xaa, 0xeb, 0xfa, 0x61, 0xd7, 0xf5, + 0x09, 0x45, 0x06, 0xae, 0x41, 0x99, 0x8e, 0x7d, 0x54, 0x2a, 0x2e, 0xfd, 0x2b, 0x07, 0x95, 0xb1, + 0x09, 0x15, 0xaf, 0xdb, 0x23, 0x1e, 0xda, 0xc1, 0x00, 0x55, 0x72, 0x33, 0x0c, 0x46, 0x04, 0x55, + 0x0a, 0x9d, 0xf8, 0xd7, 0xa8, 0x5a, 0x5c, 0xba, 0x8e, 0x83, 0x6a, 0x78, 0x17, 0x76, 0xfa, 0xc1, + 0xf0, 0x16, 0xed, 0x16, 0xa6, 0xc4, 0x0f, 0xe9, 0xed, 0x30, 0x70, 0xfd, 0x10, 0x99, 0x05, 0x77, + 0x1d, 0x78, 0xe3, 0x2b, 0x82, 0xa0, 0xc8, 0x1a, 0x8f, 0x08, 0x45, 0x16, 0xb6, 0xa0, 0xf6, 0x39, + 0xa0, 0x97, 0x8e, 0x4b, 0xd1, 0x9e, 0x76, 0xa1, 0x17, 0x68, 0xbf, 0x88, 0x06, 0x7e, 0x6f, 0xec, + 0x7a, 0x0e, 0x3a, 0x28, 0x8c, 0x46, 0x61, 0x30, 0x1c, 0xb9, 0x17, 0x7e, 0xd7, 0x43, 0x87, 0xf8, + 0x10, 0xac, 0x01, 0xe9, 0x7a, 0xe1, 0xa0, 0x3f, 0x20, 0xfd, 0x4b, 0x84, 0x8a, 0xe2, 0x46, 0x03, + 0xe2, 0x79, 0xe8, 0xa8, 0x79, 0x03, 0xd6, 0xb9, 0x48, 0x63, 0xae, 0x96, 0x4a, 0xa4, 0x39, 0x7e, + 0x0d, 0xb5, 0x55, 0x3b, 0x4a, 0xd9, 0x62, 0xdd, 0x63, 0x93, 0x56, 0x57, 0x6d, 0x9f, 0x2d, 0xb8, + 0x16, 0x3a, 0xd1, 0x24, 0x91, 0x93, 0x7a, 0xa9, 0x51, 0xd6, 0x42, 0xa7, 0x97, 0xc8, 0xc9, 0x46, + 0xd0, 0x44, 0x79, 0x43, 0x74, 0x0a, 0xa2, 0xb9, 0x84, 0x4a, 0x8f, 0x65, 0x22, 0xc3, 0xef, 0x61, + 0x4f, 0xf1, 0x4c, 0xde, 0xa9, 0x29, 0x8f, 0xee, 0x54, 0xb2, 0x31, 0xb6, 0x1e, 0x62, 0x63, 0x95, + 0x60, 0x07, 0xac, 0xf9, 0xb6, 0x0a, 0xfd, 0x30, 0x56, 0xa7, 0xf9, 0xc2, 0xf3, 0x3e, 0xa9, 0x97, + 0x3e, 0xc5, 0x9a, 0x3e, 0xd4, 0x1c, 0x9e, 0x33, 0x91, 0x64, 0xb8, 0x0f, 0xfb, 0x33, 0xae, 0xc4, + 0x8a, 0xcf, 0x22, 0x0d, 0xe9, 0x3f, 0xb5, 0x3a, 0xef, 0x5e, 0xb0, 0x74, 0xd6, 0xb9, 0x74, 0x6f, + 0x03, 0xb9, 0x7a, 0xf8, 0xbe, 0x1b, 0x85, 0xa1, 0x0e, 0xfc, 0x59, 0xa1, 0xf1, 0x5f, 0x15, 0xe2, + 0xb7, 0xb0, 0x3b, 0x13, 0x59, 0xce, 0xd2, 0x29, 0xd7, 0x1f, 0x59, 0xa1, 0x8f, 0xbf, 0xf1, 0x19, + 0x40, 0x52, 0x0c, 0x6e, 0x24, 0xd2, 0xb9, 0xac, 0x97, 0x1b, 0xe5, 0x53, 0xab, 0x73, 0xf2, 0xb7, + 0x09, 0xa7, 0xa6, 0xce, 0x77, 0xd3, 0xb9, 0xc4, 0x1f, 0xe1, 0x68, 0xc2, 0x32, 0x1e, 0xfd, 0xd6, + 0xe8, 0x1d, 0xdd, 0xe8, 0xc3, 0x42, 0xa0, 0xdb, 0x66, 0xf7, 0xbe, 0x42, 0x5d, 0xc8, 0xe7, 0x8d, + 0x87, 0xc6, 0x97, 0x70, 0xbd, 0xc5, 0x76, 0x2c, 0x13, 0x96, 0xc6, 0xb6, 0x54, 0x71, 0x2b, 0xe6, + 0xa9, 0x5e, 0xd2, 0xd6, 0x5a, 0x62, 0x4b, 0x91, 0xfd, 0xf3, 0x92, 0x9f, 0xe9, 0xf3, 0x5b, 0xa9, + 0x7c, 0x41, 0xbb, 0x93, 0xaa, 0xb6, 0xf9, 0xf4, 0x2b, 0x00, 0x00, 0xff, 0xff, 0xe8, 0x06, 0xe8, + 0x32, 0x2a, 0x04, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/devtools/containeranalysis/v1beta1/package/package.pb.go b/vendor/google.golang.org/genproto/googleapis/devtools/containeranalysis/v1beta1/package/package.pb.go new file mode 100644 index 0000000..a93f006 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/devtools/containeranalysis/v1beta1/package/package.pb.go @@ -0,0 +1,499 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/devtools/containeranalysis/v1beta1/package/package.proto + +package _package // import "google.golang.org/genproto/googleapis/devtools/containeranalysis/v1beta1/package" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Instruction set architectures supported by various package managers. +type Architecture int32 + +const ( + // Unknown architecture. + Architecture_ARCHITECTURE_UNSPECIFIED Architecture = 0 + // X86 architecture. + Architecture_X86 Architecture = 1 + // X64 architecture. + Architecture_X64 Architecture = 2 +) + +var Architecture_name = map[int32]string{ + 0: "ARCHITECTURE_UNSPECIFIED", + 1: "X86", + 2: "X64", +} +var Architecture_value = map[string]int32{ + "ARCHITECTURE_UNSPECIFIED": 0, + "X86": 1, + "X64": 2, +} + +func (x Architecture) String() string { + return proto.EnumName(Architecture_name, int32(x)) +} +func (Architecture) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_package_bb5c882585bb63a7, []int{0} +} + +// Whether this is an ordinary package version or a sentinel MIN/MAX version. +type Version_VersionKind int32 + +const ( + // Unknown. + Version_VERSION_KIND_UNSPECIFIED Version_VersionKind = 0 + // A standard package version, defined by the other fields. + Version_NORMAL Version_VersionKind = 1 + // A special version representing negative infinity, other fields are + // ignored. + Version_MINIMUM Version_VersionKind = 2 + // A special version representing positive infinity, other fields are + // ignored. + Version_MAXIMUM Version_VersionKind = 3 +) + +var Version_VersionKind_name = map[int32]string{ + 0: "VERSION_KIND_UNSPECIFIED", + 1: "NORMAL", + 2: "MINIMUM", + 3: "MAXIMUM", +} +var Version_VersionKind_value = map[string]int32{ + "VERSION_KIND_UNSPECIFIED": 0, + "NORMAL": 1, + "MINIMUM": 2, + "MAXIMUM": 3, +} + +func (x Version_VersionKind) String() string { + return proto.EnumName(Version_VersionKind_name, int32(x)) +} +func (Version_VersionKind) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_package_bb5c882585bb63a7, []int{5, 0} +} + +// This represents a particular channel of distribution for a given package. +// E.g., Debian's jessie-backports dpkg mirror. +type Distribution struct { + // The cpe_uri in [cpe format](https://cpe.mitre.org/specification/) + // denoting the package manager version distributing a package. + CpeUri string `protobuf:"bytes,1,opt,name=cpe_uri,json=cpeUri,proto3" json:"cpe_uri,omitempty"` + // The CPU architecture for which packages in this distribution channel were + // built. + Architecture Architecture `protobuf:"varint,2,opt,name=architecture,proto3,enum=grafeas.v1beta1.package.Architecture" json:"architecture,omitempty"` + // The latest available version of this package in this distribution + // channel. + LatestVersion *Version `protobuf:"bytes,3,opt,name=latest_version,json=latestVersion,proto3" json:"latest_version,omitempty"` + // A freeform string denoting the maintainer of this package. + Maintainer string `protobuf:"bytes,4,opt,name=maintainer,proto3" json:"maintainer,omitempty"` + // The distribution channel-specific homepage for this package. + Url string `protobuf:"bytes,5,opt,name=url,proto3" json:"url,omitempty"` + // The distribution channel-specific description of this package. + Description string `protobuf:"bytes,6,opt,name=description,proto3" json:"description,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Distribution) Reset() { *m = Distribution{} } +func (m *Distribution) String() string { return proto.CompactTextString(m) } +func (*Distribution) ProtoMessage() {} +func (*Distribution) Descriptor() ([]byte, []int) { + return fileDescriptor_package_bb5c882585bb63a7, []int{0} +} +func (m *Distribution) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Distribution.Unmarshal(m, b) +} +func (m *Distribution) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Distribution.Marshal(b, m, deterministic) +} +func (dst *Distribution) XXX_Merge(src proto.Message) { + xxx_messageInfo_Distribution.Merge(dst, src) +} +func (m *Distribution) XXX_Size() int { + return xxx_messageInfo_Distribution.Size(m) +} +func (m *Distribution) XXX_DiscardUnknown() { + xxx_messageInfo_Distribution.DiscardUnknown(m) +} + +var xxx_messageInfo_Distribution proto.InternalMessageInfo + +func (m *Distribution) GetCpeUri() string { + if m != nil { + return m.CpeUri + } + return "" +} + +func (m *Distribution) GetArchitecture() Architecture { + if m != nil { + return m.Architecture + } + return Architecture_ARCHITECTURE_UNSPECIFIED +} + +func (m *Distribution) GetLatestVersion() *Version { + if m != nil { + return m.LatestVersion + } + return nil +} + +func (m *Distribution) GetMaintainer() string { + if m != nil { + return m.Maintainer + } + return "" +} + +func (m *Distribution) GetUrl() string { + if m != nil { + return m.Url + } + return "" +} + +func (m *Distribution) GetDescription() string { + if m != nil { + return m.Description + } + return "" +} + +// An occurrence of a particular package installation found within a system's +// filesystem. E.g., glibc was found in /var/lib/dpkg/status. +type Location struct { + // The cpe_uri in [cpe format](https://cpe.mitre.org/specification/) + // denoting the package manager version distributing a package. + CpeUri string `protobuf:"bytes,1,opt,name=cpe_uri,json=cpeUri,proto3" json:"cpe_uri,omitempty"` + // The version installed at this location. + Version *Version `protobuf:"bytes,2,opt,name=version,proto3" json:"version,omitempty"` + // The path from which we gathered that this package/version is installed. + Path string `protobuf:"bytes,3,opt,name=path,proto3" json:"path,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Location) Reset() { *m = Location{} } +func (m *Location) String() string { return proto.CompactTextString(m) } +func (*Location) ProtoMessage() {} +func (*Location) Descriptor() ([]byte, []int) { + return fileDescriptor_package_bb5c882585bb63a7, []int{1} +} +func (m *Location) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Location.Unmarshal(m, b) +} +func (m *Location) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Location.Marshal(b, m, deterministic) +} +func (dst *Location) XXX_Merge(src proto.Message) { + xxx_messageInfo_Location.Merge(dst, src) +} +func (m *Location) XXX_Size() int { + return xxx_messageInfo_Location.Size(m) +} +func (m *Location) XXX_DiscardUnknown() { + xxx_messageInfo_Location.DiscardUnknown(m) +} + +var xxx_messageInfo_Location proto.InternalMessageInfo + +func (m *Location) GetCpeUri() string { + if m != nil { + return m.CpeUri + } + return "" +} + +func (m *Location) GetVersion() *Version { + if m != nil { + return m.Version + } + return nil +} + +func (m *Location) GetPath() string { + if m != nil { + return m.Path + } + return "" +} + +// This represents a particular package that is distributed over various +// channels. E.g., glibc (aka libc6) is distributed by many, at various +// versions. +type Package struct { + // The name of the package. + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // The various channels by which a package is distributed. + Distribution []*Distribution `protobuf:"bytes,10,rep,name=distribution,proto3" json:"distribution,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Package) Reset() { *m = Package{} } +func (m *Package) String() string { return proto.CompactTextString(m) } +func (*Package) ProtoMessage() {} +func (*Package) Descriptor() ([]byte, []int) { + return fileDescriptor_package_bb5c882585bb63a7, []int{2} +} +func (m *Package) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Package.Unmarshal(m, b) +} +func (m *Package) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Package.Marshal(b, m, deterministic) +} +func (dst *Package) XXX_Merge(src proto.Message) { + xxx_messageInfo_Package.Merge(dst, src) +} +func (m *Package) XXX_Size() int { + return xxx_messageInfo_Package.Size(m) +} +func (m *Package) XXX_DiscardUnknown() { + xxx_messageInfo_Package.DiscardUnknown(m) +} + +var xxx_messageInfo_Package proto.InternalMessageInfo + +func (m *Package) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *Package) GetDistribution() []*Distribution { + if m != nil { + return m.Distribution + } + return nil +} + +// Details of a package occurrence. +type Details struct { + // Where the package was installed. + Installation *Installation `protobuf:"bytes,1,opt,name=installation,proto3" json:"installation,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Details) Reset() { *m = Details{} } +func (m *Details) String() string { return proto.CompactTextString(m) } +func (*Details) ProtoMessage() {} +func (*Details) Descriptor() ([]byte, []int) { + return fileDescriptor_package_bb5c882585bb63a7, []int{3} +} +func (m *Details) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Details.Unmarshal(m, b) +} +func (m *Details) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Details.Marshal(b, m, deterministic) +} +func (dst *Details) XXX_Merge(src proto.Message) { + xxx_messageInfo_Details.Merge(dst, src) +} +func (m *Details) XXX_Size() int { + return xxx_messageInfo_Details.Size(m) +} +func (m *Details) XXX_DiscardUnknown() { + xxx_messageInfo_Details.DiscardUnknown(m) +} + +var xxx_messageInfo_Details proto.InternalMessageInfo + +func (m *Details) GetInstallation() *Installation { + if m != nil { + return m.Installation + } + return nil +} + +// This represents how a particular software package may be installed on a +// system. +type Installation struct { + // Output only. The name of the installed package. + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // All of the places within the filesystem versions of this package + // have been found. + Location []*Location `protobuf:"bytes,2,rep,name=location,proto3" json:"location,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Installation) Reset() { *m = Installation{} } +func (m *Installation) String() string { return proto.CompactTextString(m) } +func (*Installation) ProtoMessage() {} +func (*Installation) Descriptor() ([]byte, []int) { + return fileDescriptor_package_bb5c882585bb63a7, []int{4} +} +func (m *Installation) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Installation.Unmarshal(m, b) +} +func (m *Installation) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Installation.Marshal(b, m, deterministic) +} +func (dst *Installation) XXX_Merge(src proto.Message) { + xxx_messageInfo_Installation.Merge(dst, src) +} +func (m *Installation) XXX_Size() int { + return xxx_messageInfo_Installation.Size(m) +} +func (m *Installation) XXX_DiscardUnknown() { + xxx_messageInfo_Installation.DiscardUnknown(m) +} + +var xxx_messageInfo_Installation proto.InternalMessageInfo + +func (m *Installation) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *Installation) GetLocation() []*Location { + if m != nil { + return m.Location + } + return nil +} + +// Version contains structured information about the version of a package. +type Version struct { + // Used to correct mistakes in the version numbering scheme. + Epoch int32 `protobuf:"varint,1,opt,name=epoch,proto3" json:"epoch,omitempty"` + // The main part of the version name. + Name string `protobuf:"bytes,2,opt,name=name,proto3" json:"name,omitempty"` + // The iteration of the package build from the above version. + Revision string `protobuf:"bytes,3,opt,name=revision,proto3" json:"revision,omitempty"` + // Distinguish between sentinel MIN/MAX versions and normal versions. If + // kind is not NORMAL, then the other fields are ignored. + Kind Version_VersionKind `protobuf:"varint,4,opt,name=kind,proto3,enum=grafeas.v1beta1.package.Version_VersionKind" json:"kind,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Version) Reset() { *m = Version{} } +func (m *Version) String() string { return proto.CompactTextString(m) } +func (*Version) ProtoMessage() {} +func (*Version) Descriptor() ([]byte, []int) { + return fileDescriptor_package_bb5c882585bb63a7, []int{5} +} +func (m *Version) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Version.Unmarshal(m, b) +} +func (m *Version) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Version.Marshal(b, m, deterministic) +} +func (dst *Version) XXX_Merge(src proto.Message) { + xxx_messageInfo_Version.Merge(dst, src) +} +func (m *Version) XXX_Size() int { + return xxx_messageInfo_Version.Size(m) +} +func (m *Version) XXX_DiscardUnknown() { + xxx_messageInfo_Version.DiscardUnknown(m) +} + +var xxx_messageInfo_Version proto.InternalMessageInfo + +func (m *Version) GetEpoch() int32 { + if m != nil { + return m.Epoch + } + return 0 +} + +func (m *Version) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *Version) GetRevision() string { + if m != nil { + return m.Revision + } + return "" +} + +func (m *Version) GetKind() Version_VersionKind { + if m != nil { + return m.Kind + } + return Version_VERSION_KIND_UNSPECIFIED +} + +func init() { + proto.RegisterType((*Distribution)(nil), "grafeas.v1beta1.package.Distribution") + proto.RegisterType((*Location)(nil), "grafeas.v1beta1.package.Location") + proto.RegisterType((*Package)(nil), "grafeas.v1beta1.package.Package") + proto.RegisterType((*Details)(nil), "grafeas.v1beta1.package.Details") + proto.RegisterType((*Installation)(nil), "grafeas.v1beta1.package.Installation") + proto.RegisterType((*Version)(nil), "grafeas.v1beta1.package.Version") + proto.RegisterEnum("grafeas.v1beta1.package.Architecture", Architecture_name, Architecture_value) + proto.RegisterEnum("grafeas.v1beta1.package.Version_VersionKind", Version_VersionKind_name, Version_VersionKind_value) +} + +func init() { + proto.RegisterFile("google/devtools/containeranalysis/v1beta1/package/package.proto", fileDescriptor_package_bb5c882585bb63a7) +} + +var fileDescriptor_package_bb5c882585bb63a7 = []byte{ + // 575 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x94, 0x54, 0xff, 0x6a, 0xd4, 0x40, + 0x10, 0x36, 0x49, 0x7b, 0x69, 0xe7, 0xce, 0x12, 0x16, 0xb1, 0x41, 0x44, 0x62, 0x40, 0x38, 0x44, + 0x12, 0x5a, 0xa5, 0x88, 0xe2, 0x8f, 0xb3, 0x77, 0xd6, 0xd0, 0xde, 0xb5, 0x6e, 0x7b, 0xa5, 0xf8, + 0xcf, 0xb1, 0xcd, 0xad, 0xb9, 0xa5, 0x69, 0x36, 0xec, 0xee, 0x1d, 0xe8, 0x4b, 0xf8, 0x0e, 0x3e, + 0x9b, 0x0f, 0x22, 0xd9, 0x24, 0x47, 0xaa, 0xb6, 0xea, 0x5f, 0x3b, 0xb3, 0x33, 0xdf, 0x37, 0xfb, + 0xcd, 0x24, 0x03, 0x6f, 0x12, 0xce, 0x93, 0x94, 0x86, 0x53, 0xba, 0x50, 0x9c, 0xa7, 0x32, 0x8c, + 0x79, 0xa6, 0x08, 0xcb, 0xa8, 0x20, 0x19, 0x49, 0xbf, 0x48, 0x26, 0xc3, 0xc5, 0xd6, 0x39, 0x55, + 0x64, 0x2b, 0xcc, 0x49, 0x7c, 0x41, 0x12, 0x5a, 0x9f, 0x41, 0x2e, 0xb8, 0xe2, 0x68, 0x33, 0x11, + 0xe4, 0x33, 0x25, 0x32, 0xa8, 0xd2, 0x82, 0x2a, 0xec, 0x7f, 0x33, 0xa1, 0xd3, 0x67, 0x52, 0x09, + 0x76, 0x3e, 0x57, 0x8c, 0x67, 0x68, 0x13, 0xec, 0x38, 0xa7, 0x93, 0xb9, 0x60, 0xae, 0xe1, 0x19, + 0xdd, 0x75, 0xdc, 0x8a, 0x73, 0x3a, 0x16, 0x0c, 0x45, 0xd0, 0x21, 0x22, 0x9e, 0x31, 0x45, 0x63, + 0x35, 0x17, 0xd4, 0x35, 0x3d, 0xa3, 0xbb, 0xb1, 0xfd, 0x28, 0xb8, 0x86, 0x39, 0xe8, 0x35, 0x92, + 0xf1, 0x15, 0x28, 0xda, 0x83, 0x8d, 0x94, 0x28, 0x2a, 0xd5, 0x64, 0x41, 0x85, 0x64, 0x3c, 0x73, + 0x2d, 0xcf, 0xe8, 0xb6, 0xb7, 0xbd, 0x6b, 0xc9, 0x4e, 0xcb, 0x3c, 0x7c, 0xbb, 0xc4, 0x55, 0x2e, + 0x7a, 0x00, 0x70, 0x49, 0x58, 0xd5, 0x0a, 0x77, 0x45, 0xbf, 0xb7, 0x71, 0x83, 0x1c, 0xb0, 0xe6, + 0x22, 0x75, 0x57, 0x75, 0xa0, 0x30, 0x91, 0x07, 0xed, 0x29, 0x95, 0xb1, 0x60, 0x79, 0xa1, 0xd6, + 0x6d, 0xe9, 0x48, 0xf3, 0xca, 0x97, 0xb0, 0x76, 0xc0, 0x63, 0x72, 0x73, 0x33, 0x5e, 0x80, 0x5d, + 0x3f, 0xdd, 0xfc, 0xc7, 0xa7, 0xd7, 0x00, 0x84, 0x60, 0x25, 0x27, 0x6a, 0xa6, 0x35, 0xaf, 0x63, + 0x6d, 0xfb, 0x33, 0xb0, 0x8f, 0xca, 0xfc, 0x22, 0x9c, 0x91, 0x4b, 0x5a, 0x15, 0xd4, 0x76, 0xd1, + 0xfb, 0x69, 0x63, 0x48, 0x2e, 0x78, 0x56, 0xb7, 0x7d, 0x43, 0xef, 0x9b, 0x13, 0xc5, 0x57, 0xa0, + 0xfe, 0x09, 0xd8, 0x7d, 0xaa, 0x08, 0x4b, 0x65, 0xc1, 0xca, 0x32, 0xa9, 0x48, 0x9a, 0x6a, 0xb5, + 0xba, 0xe2, 0x4d, 0xac, 0x51, 0x23, 0x19, 0x5f, 0x81, 0xfa, 0x04, 0x3a, 0xcd, 0xe8, 0x1f, 0x45, + 0xbc, 0x82, 0xb5, 0xb4, 0x6a, 0xac, 0x6b, 0x6a, 0x01, 0x0f, 0xaf, 0x2d, 0x55, 0x4f, 0x00, 0x2f, + 0x21, 0xfe, 0x0f, 0x03, 0xec, 0x7a, 0xee, 0x77, 0x60, 0x95, 0xe6, 0x3c, 0x9e, 0x69, 0xfe, 0x55, + 0x5c, 0x3a, 0xcb, 0xa2, 0x66, 0xa3, 0xe8, 0x3d, 0x58, 0x13, 0x74, 0xc1, 0x96, 0x1f, 0xd9, 0x3a, + 0x5e, 0xfa, 0xe8, 0x2d, 0xac, 0x5c, 0xb0, 0x6c, 0xaa, 0xbf, 0x9b, 0x8d, 0xed, 0x27, 0x7f, 0x9b, + 0x60, 0x7d, 0xee, 0xb3, 0x6c, 0x8a, 0x35, 0xd2, 0xff, 0x08, 0xed, 0xc6, 0x25, 0xba, 0x0f, 0xee, + 0xe9, 0x00, 0x1f, 0x47, 0x87, 0xa3, 0xc9, 0x7e, 0x34, 0xea, 0x4f, 0xc6, 0xa3, 0xe3, 0xa3, 0xc1, + 0x6e, 0xf4, 0x3e, 0x1a, 0xf4, 0x9d, 0x5b, 0x08, 0xa0, 0x35, 0x3a, 0xc4, 0xc3, 0xde, 0x81, 0x63, + 0xa0, 0x36, 0xd8, 0xc3, 0x68, 0x14, 0x0d, 0xc7, 0x43, 0xc7, 0xd4, 0x4e, 0xef, 0x4c, 0x3b, 0xd6, + 0xe3, 0xd7, 0xd0, 0x69, 0xfe, 0x39, 0x05, 0x67, 0x0f, 0xef, 0x7e, 0x88, 0x4e, 0x06, 0xbb, 0x27, + 0x63, 0x3c, 0xf8, 0x85, 0xd3, 0x06, 0xeb, 0xec, 0xf9, 0x8e, 0x63, 0x68, 0x63, 0xe7, 0x99, 0x63, + 0xbe, 0xfb, 0x0a, 0x77, 0x19, 0xff, 0x5d, 0xca, 0x45, 0x72, 0x64, 0x7c, 0x3a, 0x2b, 0xd7, 0x48, + 0x90, 0xf0, 0x94, 0x64, 0x49, 0xc0, 0x45, 0x12, 0x26, 0x34, 0xd3, 0x1b, 0x22, 0x2c, 0x43, 0x24, + 0x67, 0xf2, 0x3f, 0xb6, 0xcc, 0xcb, 0xea, 0xfc, 0x6e, 0x5a, 0x7b, 0xb8, 0x77, 0xde, 0xd2, 0x54, + 0x4f, 0x7f, 0x06, 0x00, 0x00, 0xff, 0xff, 0x78, 0xac, 0x5c, 0xdf, 0xaf, 0x04, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/devtools/containeranalysis/v1beta1/provenance/provenance.pb.go b/vendor/google.golang.org/genproto/googleapis/devtools/containeranalysis/v1beta1/provenance/provenance.pb.go new file mode 100644 index 0000000..84edf6a --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/devtools/containeranalysis/v1beta1/provenance/provenance.pb.go @@ -0,0 +1,592 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/devtools/containeranalysis/v1beta1/provenance/provenance.proto + +package provenance // import "google.golang.org/genproto/googleapis/devtools/containeranalysis/v1beta1/provenance" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import timestamp "github.com/golang/protobuf/ptypes/timestamp" +import source "google.golang.org/genproto/googleapis/devtools/containeranalysis/v1beta1/source" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Specifies the hash algorithm, if any. +type Hash_HashType int32 + +const ( + // Unknown. + Hash_HASH_TYPE_UNSPECIFIED Hash_HashType = 0 + // A SHA-256 hash. + Hash_SHA256 Hash_HashType = 1 +) + +var Hash_HashType_name = map[int32]string{ + 0: "HASH_TYPE_UNSPECIFIED", + 1: "SHA256", +} +var Hash_HashType_value = map[string]int32{ + "HASH_TYPE_UNSPECIFIED": 0, + "SHA256": 1, +} + +func (x Hash_HashType) String() string { + return proto.EnumName(Hash_HashType_name, int32(x)) +} +func (Hash_HashType) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_provenance_4fd1fd2866d3bc11, []int{3, 0} +} + +// Provenance of a build. Contains all information needed to verify the full +// details about the build from source to completion. +type BuildProvenance struct { + // Unique identifier of the build. + Id string `protobuf:"bytes,1,opt,name=id,proto3" json:"id,omitempty"` + // ID of the project. + ProjectId string `protobuf:"bytes,2,opt,name=project_id,json=projectId,proto3" json:"project_id,omitempty"` + // Commands requested by the build. + Commands []*Command `protobuf:"bytes,3,rep,name=commands,proto3" json:"commands,omitempty"` + // Output of the build. + BuiltArtifacts []*Artifact `protobuf:"bytes,4,rep,name=built_artifacts,json=builtArtifacts,proto3" json:"built_artifacts,omitempty"` + // Time at which the build was created. + CreateTime *timestamp.Timestamp `protobuf:"bytes,5,opt,name=create_time,json=createTime,proto3" json:"create_time,omitempty"` + // Time at which execution of the build was started. + StartTime *timestamp.Timestamp `protobuf:"bytes,6,opt,name=start_time,json=startTime,proto3" json:"start_time,omitempty"` + // Time at which execution of the build was finished. + EndTime *timestamp.Timestamp `protobuf:"bytes,7,opt,name=end_time,json=endTime,proto3" json:"end_time,omitempty"` + // E-mail address of the user who initiated this build. Note that this was the + // user's e-mail address at the time the build was initiated; this address may + // not represent the same end-user for all time. + Creator string `protobuf:"bytes,8,opt,name=creator,proto3" json:"creator,omitempty"` + // URI where any logs for this provenance were written. + LogsUri string `protobuf:"bytes,9,opt,name=logs_uri,json=logsUri,proto3" json:"logs_uri,omitempty"` + // Details of the Source input to the build. + SourceProvenance *Source `protobuf:"bytes,10,opt,name=source_provenance,json=sourceProvenance,proto3" json:"source_provenance,omitempty"` + // Trigger identifier if the build was triggered automatically; empty if not. + TriggerId string `protobuf:"bytes,11,opt,name=trigger_id,json=triggerId,proto3" json:"trigger_id,omitempty"` + // Special options applied to this build. This is a catch-all field where + // build providers can enter any desired additional details. + BuildOptions map[string]string `protobuf:"bytes,12,rep,name=build_options,json=buildOptions,proto3" json:"build_options,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` + // Version string of the builder at the time this build was executed. + BuilderVersion string `protobuf:"bytes,13,opt,name=builder_version,json=builderVersion,proto3" json:"builder_version,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *BuildProvenance) Reset() { *m = BuildProvenance{} } +func (m *BuildProvenance) String() string { return proto.CompactTextString(m) } +func (*BuildProvenance) ProtoMessage() {} +func (*BuildProvenance) Descriptor() ([]byte, []int) { + return fileDescriptor_provenance_4fd1fd2866d3bc11, []int{0} +} +func (m *BuildProvenance) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_BuildProvenance.Unmarshal(m, b) +} +func (m *BuildProvenance) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_BuildProvenance.Marshal(b, m, deterministic) +} +func (dst *BuildProvenance) XXX_Merge(src proto.Message) { + xxx_messageInfo_BuildProvenance.Merge(dst, src) +} +func (m *BuildProvenance) XXX_Size() int { + return xxx_messageInfo_BuildProvenance.Size(m) +} +func (m *BuildProvenance) XXX_DiscardUnknown() { + xxx_messageInfo_BuildProvenance.DiscardUnknown(m) +} + +var xxx_messageInfo_BuildProvenance proto.InternalMessageInfo + +func (m *BuildProvenance) GetId() string { + if m != nil { + return m.Id + } + return "" +} + +func (m *BuildProvenance) GetProjectId() string { + if m != nil { + return m.ProjectId + } + return "" +} + +func (m *BuildProvenance) GetCommands() []*Command { + if m != nil { + return m.Commands + } + return nil +} + +func (m *BuildProvenance) GetBuiltArtifacts() []*Artifact { + if m != nil { + return m.BuiltArtifacts + } + return nil +} + +func (m *BuildProvenance) GetCreateTime() *timestamp.Timestamp { + if m != nil { + return m.CreateTime + } + return nil +} + +func (m *BuildProvenance) GetStartTime() *timestamp.Timestamp { + if m != nil { + return m.StartTime + } + return nil +} + +func (m *BuildProvenance) GetEndTime() *timestamp.Timestamp { + if m != nil { + return m.EndTime + } + return nil +} + +func (m *BuildProvenance) GetCreator() string { + if m != nil { + return m.Creator + } + return "" +} + +func (m *BuildProvenance) GetLogsUri() string { + if m != nil { + return m.LogsUri + } + return "" +} + +func (m *BuildProvenance) GetSourceProvenance() *Source { + if m != nil { + return m.SourceProvenance + } + return nil +} + +func (m *BuildProvenance) GetTriggerId() string { + if m != nil { + return m.TriggerId + } + return "" +} + +func (m *BuildProvenance) GetBuildOptions() map[string]string { + if m != nil { + return m.BuildOptions + } + return nil +} + +func (m *BuildProvenance) GetBuilderVersion() string { + if m != nil { + return m.BuilderVersion + } + return "" +} + +// Source describes the location of the source used for the build. +type Source struct { + // If provided, the input binary artifacts for the build came from this + // location. + ArtifactStorageSourceUri string `protobuf:"bytes,1,opt,name=artifact_storage_source_uri,json=artifactStorageSourceUri,proto3" json:"artifact_storage_source_uri,omitempty"` + // Hash(es) of the build source, which can be used to verify that the original + // source integrity was maintained in the build. + // + // The keys to this map are file paths used as build source and the values + // contain the hash values for those files. + // + // If the build source came in a single package such as a gzipped tarfile + // (.tar.gz), the FileHash will be for the single path to that file. + FileHashes map[string]*FileHashes `protobuf:"bytes,2,rep,name=file_hashes,json=fileHashes,proto3" json:"file_hashes,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` + // If provided, the source code used for the build came from this location. + Context *source.SourceContext `protobuf:"bytes,3,opt,name=context,proto3" json:"context,omitempty"` + // If provided, some of the source code used for the build may be found in + // these locations, in the case where the source repository had multiple + // remotes or submodules. This list will not include the context specified in + // the context field. + AdditionalContexts []*source.SourceContext `protobuf:"bytes,4,rep,name=additional_contexts,json=additionalContexts,proto3" json:"additional_contexts,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Source) Reset() { *m = Source{} } +func (m *Source) String() string { return proto.CompactTextString(m) } +func (*Source) ProtoMessage() {} +func (*Source) Descriptor() ([]byte, []int) { + return fileDescriptor_provenance_4fd1fd2866d3bc11, []int{1} +} +func (m *Source) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Source.Unmarshal(m, b) +} +func (m *Source) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Source.Marshal(b, m, deterministic) +} +func (dst *Source) XXX_Merge(src proto.Message) { + xxx_messageInfo_Source.Merge(dst, src) +} +func (m *Source) XXX_Size() int { + return xxx_messageInfo_Source.Size(m) +} +func (m *Source) XXX_DiscardUnknown() { + xxx_messageInfo_Source.DiscardUnknown(m) +} + +var xxx_messageInfo_Source proto.InternalMessageInfo + +func (m *Source) GetArtifactStorageSourceUri() string { + if m != nil { + return m.ArtifactStorageSourceUri + } + return "" +} + +func (m *Source) GetFileHashes() map[string]*FileHashes { + if m != nil { + return m.FileHashes + } + return nil +} + +func (m *Source) GetContext() *source.SourceContext { + if m != nil { + return m.Context + } + return nil +} + +func (m *Source) GetAdditionalContexts() []*source.SourceContext { + if m != nil { + return m.AdditionalContexts + } + return nil +} + +// Container message for hashes of byte content of files, used in Source +// messages to verify integrity of source input to the build. +type FileHashes struct { + // Collection of file hashes. + FileHash []*Hash `protobuf:"bytes,1,rep,name=file_hash,json=fileHash,proto3" json:"file_hash,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *FileHashes) Reset() { *m = FileHashes{} } +func (m *FileHashes) String() string { return proto.CompactTextString(m) } +func (*FileHashes) ProtoMessage() {} +func (*FileHashes) Descriptor() ([]byte, []int) { + return fileDescriptor_provenance_4fd1fd2866d3bc11, []int{2} +} +func (m *FileHashes) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_FileHashes.Unmarshal(m, b) +} +func (m *FileHashes) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_FileHashes.Marshal(b, m, deterministic) +} +func (dst *FileHashes) XXX_Merge(src proto.Message) { + xxx_messageInfo_FileHashes.Merge(dst, src) +} +func (m *FileHashes) XXX_Size() int { + return xxx_messageInfo_FileHashes.Size(m) +} +func (m *FileHashes) XXX_DiscardUnknown() { + xxx_messageInfo_FileHashes.DiscardUnknown(m) +} + +var xxx_messageInfo_FileHashes proto.InternalMessageInfo + +func (m *FileHashes) GetFileHash() []*Hash { + if m != nil { + return m.FileHash + } + return nil +} + +// Container message for hash values. +type Hash struct { + // The type of hash that was performed. + Type Hash_HashType `protobuf:"varint,1,opt,name=type,proto3,enum=grafeas.v1beta1.provenance.Hash_HashType" json:"type,omitempty"` + // The hash value. + Value []byte `protobuf:"bytes,2,opt,name=value,proto3" json:"value,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Hash) Reset() { *m = Hash{} } +func (m *Hash) String() string { return proto.CompactTextString(m) } +func (*Hash) ProtoMessage() {} +func (*Hash) Descriptor() ([]byte, []int) { + return fileDescriptor_provenance_4fd1fd2866d3bc11, []int{3} +} +func (m *Hash) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Hash.Unmarshal(m, b) +} +func (m *Hash) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Hash.Marshal(b, m, deterministic) +} +func (dst *Hash) XXX_Merge(src proto.Message) { + xxx_messageInfo_Hash.Merge(dst, src) +} +func (m *Hash) XXX_Size() int { + return xxx_messageInfo_Hash.Size(m) +} +func (m *Hash) XXX_DiscardUnknown() { + xxx_messageInfo_Hash.DiscardUnknown(m) +} + +var xxx_messageInfo_Hash proto.InternalMessageInfo + +func (m *Hash) GetType() Hash_HashType { + if m != nil { + return m.Type + } + return Hash_HASH_TYPE_UNSPECIFIED +} + +func (m *Hash) GetValue() []byte { + if m != nil { + return m.Value + } + return nil +} + +// Command describes a step performed as part of the build pipeline. +type Command struct { + // Name of the command, as presented on the command line, or if the command is + // packaged as a Docker container, as presented to `docker pull`. + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // Environment variables set before running this command. + Env []string `protobuf:"bytes,2,rep,name=env,proto3" json:"env,omitempty"` + // Command-line arguments used when executing this command. + Args []string `protobuf:"bytes,3,rep,name=args,proto3" json:"args,omitempty"` + // Working directory (relative to project source root) used when running this + // command. + Dir string `protobuf:"bytes,4,opt,name=dir,proto3" json:"dir,omitempty"` + // Optional unique identifier for this command, used in wait_for to reference + // this command as a dependency. + Id string `protobuf:"bytes,5,opt,name=id,proto3" json:"id,omitempty"` + // The ID(s) of the command(s) that this command depends on. + WaitFor []string `protobuf:"bytes,6,rep,name=wait_for,json=waitFor,proto3" json:"wait_for,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Command) Reset() { *m = Command{} } +func (m *Command) String() string { return proto.CompactTextString(m) } +func (*Command) ProtoMessage() {} +func (*Command) Descriptor() ([]byte, []int) { + return fileDescriptor_provenance_4fd1fd2866d3bc11, []int{4} +} +func (m *Command) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Command.Unmarshal(m, b) +} +func (m *Command) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Command.Marshal(b, m, deterministic) +} +func (dst *Command) XXX_Merge(src proto.Message) { + xxx_messageInfo_Command.Merge(dst, src) +} +func (m *Command) XXX_Size() int { + return xxx_messageInfo_Command.Size(m) +} +func (m *Command) XXX_DiscardUnknown() { + xxx_messageInfo_Command.DiscardUnknown(m) +} + +var xxx_messageInfo_Command proto.InternalMessageInfo + +func (m *Command) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *Command) GetEnv() []string { + if m != nil { + return m.Env + } + return nil +} + +func (m *Command) GetArgs() []string { + if m != nil { + return m.Args + } + return nil +} + +func (m *Command) GetDir() string { + if m != nil { + return m.Dir + } + return "" +} + +func (m *Command) GetId() string { + if m != nil { + return m.Id + } + return "" +} + +func (m *Command) GetWaitFor() []string { + if m != nil { + return m.WaitFor + } + return nil +} + +// Artifact describes a build product. +type Artifact struct { + // Hash or checksum value of a binary, or Docker Registry 2.0 digest of a + // container. + Checksum string `protobuf:"bytes,1,opt,name=checksum,proto3" json:"checksum,omitempty"` + // Artifact ID, if any; for container images, this will be a URL by digest + // like `gcr.io/projectID/imagename@sha256:123456`. + Id string `protobuf:"bytes,2,opt,name=id,proto3" json:"id,omitempty"` + // Related artifact names. This may be the path to a binary or jar file, or in + // the case of a container build, the name used to push the container image to + // Google Container Registry, as presented to `docker push`. Note that a + // single Artifact ID can have multiple names, for example if two tags are + // applied to one image. + Names []string `protobuf:"bytes,3,rep,name=names,proto3" json:"names,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Artifact) Reset() { *m = Artifact{} } +func (m *Artifact) String() string { return proto.CompactTextString(m) } +func (*Artifact) ProtoMessage() {} +func (*Artifact) Descriptor() ([]byte, []int) { + return fileDescriptor_provenance_4fd1fd2866d3bc11, []int{5} +} +func (m *Artifact) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Artifact.Unmarshal(m, b) +} +func (m *Artifact) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Artifact.Marshal(b, m, deterministic) +} +func (dst *Artifact) XXX_Merge(src proto.Message) { + xxx_messageInfo_Artifact.Merge(dst, src) +} +func (m *Artifact) XXX_Size() int { + return xxx_messageInfo_Artifact.Size(m) +} +func (m *Artifact) XXX_DiscardUnknown() { + xxx_messageInfo_Artifact.DiscardUnknown(m) +} + +var xxx_messageInfo_Artifact proto.InternalMessageInfo + +func (m *Artifact) GetChecksum() string { + if m != nil { + return m.Checksum + } + return "" +} + +func (m *Artifact) GetId() string { + if m != nil { + return m.Id + } + return "" +} + +func (m *Artifact) GetNames() []string { + if m != nil { + return m.Names + } + return nil +} + +func init() { + proto.RegisterType((*BuildProvenance)(nil), "grafeas.v1beta1.provenance.BuildProvenance") + proto.RegisterMapType((map[string]string)(nil), "grafeas.v1beta1.provenance.BuildProvenance.BuildOptionsEntry") + proto.RegisterType((*Source)(nil), "grafeas.v1beta1.provenance.Source") + proto.RegisterMapType((map[string]*FileHashes)(nil), "grafeas.v1beta1.provenance.Source.FileHashesEntry") + proto.RegisterType((*FileHashes)(nil), "grafeas.v1beta1.provenance.FileHashes") + proto.RegisterType((*Hash)(nil), "grafeas.v1beta1.provenance.Hash") + proto.RegisterType((*Command)(nil), "grafeas.v1beta1.provenance.Command") + proto.RegisterType((*Artifact)(nil), "grafeas.v1beta1.provenance.Artifact") + proto.RegisterEnum("grafeas.v1beta1.provenance.Hash_HashType", Hash_HashType_name, Hash_HashType_value) +} + +func init() { + proto.RegisterFile("google/devtools/containeranalysis/v1beta1/provenance/provenance.proto", fileDescriptor_provenance_4fd1fd2866d3bc11) +} + +var fileDescriptor_provenance_4fd1fd2866d3bc11 = []byte{ + // 848 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x94, 0x55, 0x4b, 0x6f, 0x23, 0x45, + 0x10, 0x66, 0xfc, 0x1c, 0x97, 0xb3, 0x49, 0xb6, 0x59, 0xa4, 0x8e, 0x51, 0x44, 0x64, 0x5e, 0xe1, + 0x32, 0x56, 0x8c, 0x16, 0x01, 0x8b, 0x15, 0x65, 0x83, 0x43, 0x22, 0x1e, 0x1b, 0x8d, 0xb3, 0x2b, + 0xc1, 0x81, 0x51, 0x7b, 0xba, 0x3d, 0x69, 0x76, 0x3c, 0x6d, 0x75, 0xb7, 0x0d, 0xbe, 0x71, 0xe3, + 0xc6, 0x2f, 0xe0, 0xc4, 0x4f, 0xe4, 0x17, 0xa0, 0x7e, 0x8c, 0x6d, 0x76, 0xc1, 0x09, 0x97, 0xa4, + 0xab, 0xe6, 0xfb, 0xbe, 0xae, 0xf9, 0xaa, 0xa6, 0x0c, 0xc3, 0x4c, 0x88, 0x2c, 0x67, 0x3d, 0xca, + 0x16, 0x5a, 0x88, 0x5c, 0xf5, 0x52, 0x51, 0x68, 0xc2, 0x0b, 0x26, 0x49, 0x41, 0xf2, 0xa5, 0xe2, + 0xaa, 0xb7, 0x38, 0x19, 0x33, 0x4d, 0x4e, 0x7a, 0x33, 0x29, 0x16, 0xac, 0x20, 0x45, 0xca, 0x36, + 0x8e, 0xd1, 0x4c, 0x0a, 0x2d, 0x50, 0x27, 0x93, 0x64, 0xc2, 0x88, 0x8a, 0x3c, 0x38, 0x5a, 0x23, + 0x3a, 0x83, 0xfb, 0x5f, 0xa1, 0xc4, 0x5c, 0xa6, 0xcc, 0xff, 0x73, 0xd2, 0x9d, 0x77, 0x3c, 0xdd, + 0x46, 0xe3, 0xf9, 0xa4, 0xa7, 0xf9, 0x94, 0x29, 0x4d, 0xa6, 0x33, 0x07, 0xe8, 0xfe, 0x55, 0x87, + 0xbd, 0xa7, 0x73, 0x9e, 0xd3, 0xeb, 0xd5, 0x9d, 0x68, 0x17, 0x2a, 0x9c, 0xe2, 0xe0, 0x28, 0x38, + 0x6e, 0xc5, 0x15, 0x4e, 0xd1, 0x21, 0xc0, 0x4c, 0x8a, 0x9f, 0x58, 0xaa, 0x13, 0x4e, 0x71, 0xc5, + 0xe6, 0x5b, 0x3e, 0x73, 0x45, 0xd1, 0x29, 0x84, 0xa9, 0x98, 0x4e, 0x49, 0x41, 0x15, 0xae, 0x1e, + 0x55, 0x8f, 0xdb, 0xfd, 0x77, 0xa3, 0xff, 0x7e, 0xa3, 0xe8, 0xdc, 0x61, 0xe3, 0x15, 0x09, 0x7d, + 0x0b, 0x7b, 0xe3, 0x39, 0xcf, 0x75, 0x42, 0xa4, 0xe6, 0x13, 0x92, 0x6a, 0x85, 0x6b, 0x56, 0xe7, + 0xbd, 0x6d, 0x3a, 0x67, 0x1e, 0x1c, 0xef, 0x5a, 0x72, 0x19, 0x2a, 0xf4, 0x04, 0xda, 0xa9, 0x64, + 0x44, 0xb3, 0xc4, 0xbc, 0x2c, 0xae, 0x1f, 0x05, 0xc7, 0xed, 0x7e, 0x27, 0x72, 0x4e, 0x44, 0xa5, + 0x13, 0xd1, 0x4d, 0xe9, 0x44, 0x0c, 0x0e, 0x6e, 0x12, 0xe8, 0x33, 0x00, 0xa5, 0x89, 0xd4, 0x8e, + 0xdb, 0xb8, 0x93, 0xdb, 0xb2, 0x68, 0x4b, 0x7d, 0x0c, 0x21, 0x2b, 0xa8, 0x23, 0x36, 0xef, 0x24, + 0x36, 0x59, 0x41, 0x2d, 0x0d, 0x43, 0xd3, 0xde, 0x2f, 0x24, 0x0e, 0xad, 0xb5, 0x65, 0x88, 0x0e, + 0x20, 0xcc, 0x45, 0xa6, 0x92, 0xb9, 0xe4, 0xb8, 0xe5, 0x1e, 0x99, 0xf8, 0xb9, 0xe4, 0xe8, 0x19, + 0x3c, 0x74, 0x7d, 0x4e, 0xd6, 0x8e, 0x60, 0xb0, 0x97, 0x76, 0xb7, 0x99, 0x36, 0xb2, 0xa4, 0x78, + 0xdf, 0x91, 0x37, 0x7a, 0x7e, 0x08, 0xa0, 0x25, 0xcf, 0x32, 0x26, 0x4d, 0x8f, 0xdb, 0xae, 0xc7, + 0x3e, 0x73, 0x45, 0xd1, 0x18, 0x1e, 0x18, 0x97, 0x69, 0x22, 0x66, 0x9a, 0x8b, 0x42, 0xe1, 0x1d, + 0xdb, 0xa0, 0xc1, 0xb6, 0xbb, 0x5e, 0x19, 0x2b, 0x17, 0x3f, 0x73, 0xfc, 0x61, 0xa1, 0xe5, 0x32, + 0xde, 0x19, 0x6f, 0xa4, 0xd0, 0x87, 0x6e, 0x0c, 0x28, 0x93, 0xc9, 0x82, 0x49, 0xc5, 0x45, 0x81, + 0x1f, 0xd8, 0x3a, 0x76, 0x7d, 0xfa, 0x85, 0xcb, 0x76, 0x4e, 0xe1, 0xe1, 0x6b, 0x5a, 0x68, 0x1f, + 0xaa, 0x2f, 0xd9, 0xd2, 0x4f, 0xad, 0x39, 0xa2, 0x47, 0x50, 0x5f, 0x90, 0x7c, 0xce, 0xfc, 0xc4, + 0xba, 0xe0, 0xf3, 0xca, 0xa7, 0x41, 0xf7, 0x8f, 0x2a, 0x34, 0x9c, 0x13, 0x68, 0x00, 0x6f, 0x97, + 0x53, 0x97, 0x28, 0x2d, 0x24, 0xc9, 0x58, 0xe2, 0x9d, 0x35, 0xb6, 0x3b, 0x39, 0x5c, 0x42, 0x46, + 0x0e, 0xe1, 0xb8, 0xa6, 0x0f, 0x23, 0x68, 0x4f, 0x78, 0xce, 0x92, 0x5b, 0xa2, 0x6e, 0x99, 0xc2, + 0x15, 0xeb, 0x4a, 0xff, 0xee, 0x0e, 0x44, 0x17, 0x3c, 0x67, 0x97, 0x96, 0xe4, 0xac, 0x80, 0xc9, + 0x2a, 0x81, 0x4e, 0xa1, 0x69, 0xbe, 0x72, 0xf6, 0x8b, 0xc6, 0x55, 0xdb, 0xd2, 0xf7, 0x5f, 0x13, + 0xf4, 0x1f, 0xb9, 0x13, 0x3b, 0x77, 0xe0, 0xb8, 0x64, 0xa1, 0x17, 0xf0, 0x26, 0xa1, 0x94, 0x1b, + 0x77, 0x48, 0x9e, 0xf8, 0x6c, 0xf9, 0x51, 0xdd, 0x53, 0x0c, 0xad, 0x15, 0x7c, 0x4a, 0x75, 0x18, + 0xec, 0xbd, 0x52, 0xf7, 0xbf, 0xd8, 0xfe, 0xc5, 0xa6, 0xed, 0xed, 0xfe, 0x07, 0xdb, 0xcc, 0x58, + 0xab, 0x6d, 0xb6, 0xe7, 0x6b, 0x80, 0xf5, 0x03, 0x34, 0x80, 0xd6, 0xca, 0x62, 0x1c, 0xd8, 0x57, + 0x38, 0xda, 0xa6, 0x69, 0x68, 0x71, 0x58, 0xda, 0xd9, 0xfd, 0x3d, 0x80, 0x9a, 0x39, 0xa0, 0x01, + 0xd4, 0xf4, 0x72, 0xc6, 0x6c, 0xa9, 0xbb, 0xfd, 0x8f, 0xee, 0x92, 0xb0, 0x7f, 0x6e, 0x96, 0x33, + 0x16, 0x5b, 0xda, 0x3f, 0xa7, 0x69, 0xc7, 0x97, 0xdb, 0x3d, 0x81, 0xb0, 0xc4, 0xa1, 0x03, 0x78, + 0xeb, 0xf2, 0x6c, 0x74, 0x99, 0xdc, 0x7c, 0x7f, 0x3d, 0x4c, 0x9e, 0x7f, 0x37, 0xba, 0x1e, 0x9e, + 0x5f, 0x5d, 0x5c, 0x0d, 0xbf, 0xdc, 0x7f, 0x03, 0x01, 0x34, 0x46, 0x97, 0x67, 0xfd, 0xc7, 0x9f, + 0xec, 0x07, 0xdd, 0x5f, 0x03, 0x68, 0xfa, 0x1d, 0x88, 0x10, 0xd4, 0x0a, 0x32, 0x65, 0xde, 0x3e, + 0x7b, 0x36, 0x8e, 0xb2, 0x62, 0x61, 0x47, 0xa9, 0x15, 0x9b, 0xa3, 0x41, 0x11, 0x99, 0xb9, 0xe5, + 0xda, 0x8a, 0xed, 0xd9, 0xa0, 0x28, 0x97, 0xb8, 0xe6, 0x7c, 0xa7, 0x5c, 0xfa, 0xad, 0x5d, 0x5f, + 0x6d, 0xed, 0x03, 0x08, 0x7f, 0x26, 0x5c, 0x27, 0x13, 0x21, 0x71, 0xc3, 0x32, 0x9b, 0x26, 0xbe, + 0x10, 0xb2, 0xfb, 0x0d, 0x84, 0xe5, 0xba, 0x44, 0x1d, 0x08, 0xd3, 0x5b, 0x96, 0xbe, 0x54, 0xf3, + 0xa9, 0x2f, 0x63, 0x15, 0x7b, 0xc9, 0xca, 0x4a, 0xf2, 0x11, 0xd4, 0x4d, 0x89, 0x65, 0x25, 0x2e, + 0x78, 0xfa, 0x5b, 0x00, 0x87, 0x5c, 0x6c, 0xf1, 0xf3, 0x3a, 0xf8, 0xe1, 0x47, 0xbf, 0x08, 0x33, + 0x91, 0x93, 0x22, 0x8b, 0x84, 0xcc, 0x7a, 0x19, 0x2b, 0xec, 0x5a, 0xec, 0xb9, 0x47, 0x64, 0xc6, + 0xd5, 0xff, 0xfb, 0x21, 0x7d, 0xb2, 0x3e, 0xfe, 0x59, 0xa9, 0x7e, 0x15, 0x9f, 0x8d, 0x1b, 0x56, + 0xf0, 0xe3, 0xbf, 0x03, 0x00, 0x00, 0xff, 0xff, 0x79, 0x79, 0x09, 0xaa, 0x98, 0x07, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/devtools/containeranalysis/v1beta1/source/source.pb.go b/vendor/google.golang.org/genproto/googleapis/devtools/containeranalysis/v1beta1/source/source.pb.go new file mode 100644 index 0000000..b9acf23 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/devtools/containeranalysis/v1beta1/source/source.pb.go @@ -0,0 +1,919 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/devtools/containeranalysis/v1beta1/source/source.proto + +package source // import "google.golang.org/genproto/googleapis/devtools/containeranalysis/v1beta1/source" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// The type of an alias. +type AliasContext_Kind int32 + +const ( + // Unknown. + AliasContext_KIND_UNSPECIFIED AliasContext_Kind = 0 + // Git tag. + AliasContext_FIXED AliasContext_Kind = 1 + // Git branch. + AliasContext_MOVABLE AliasContext_Kind = 2 + // Used to specify non-standard aliases. For example, if a Git repo has a + // ref named "refs/foo/bar". + AliasContext_OTHER AliasContext_Kind = 4 +) + +var AliasContext_Kind_name = map[int32]string{ + 0: "KIND_UNSPECIFIED", + 1: "FIXED", + 2: "MOVABLE", + 4: "OTHER", +} +var AliasContext_Kind_value = map[string]int32{ + "KIND_UNSPECIFIED": 0, + "FIXED": 1, + "MOVABLE": 2, + "OTHER": 4, +} + +func (x AliasContext_Kind) String() string { + return proto.EnumName(AliasContext_Kind_name, int32(x)) +} +func (AliasContext_Kind) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_source_41019e073d7a9204, []int{1, 0} +} + +// A SourceContext is a reference to a tree of files. A SourceContext together +// with a path point to a unique revision of a single file or directory. +type SourceContext struct { + // A SourceContext can refer any one of the following types of repositories. + // + // Types that are valid to be assigned to Context: + // *SourceContext_CloudRepo + // *SourceContext_Gerrit + // *SourceContext_Git + Context isSourceContext_Context `protobuf_oneof:"context"` + // Labels with user defined metadata. + Labels map[string]string `protobuf:"bytes,4,rep,name=labels,proto3" json:"labels,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *SourceContext) Reset() { *m = SourceContext{} } +func (m *SourceContext) String() string { return proto.CompactTextString(m) } +func (*SourceContext) ProtoMessage() {} +func (*SourceContext) Descriptor() ([]byte, []int) { + return fileDescriptor_source_41019e073d7a9204, []int{0} +} +func (m *SourceContext) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_SourceContext.Unmarshal(m, b) +} +func (m *SourceContext) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_SourceContext.Marshal(b, m, deterministic) +} +func (dst *SourceContext) XXX_Merge(src proto.Message) { + xxx_messageInfo_SourceContext.Merge(dst, src) +} +func (m *SourceContext) XXX_Size() int { + return xxx_messageInfo_SourceContext.Size(m) +} +func (m *SourceContext) XXX_DiscardUnknown() { + xxx_messageInfo_SourceContext.DiscardUnknown(m) +} + +var xxx_messageInfo_SourceContext proto.InternalMessageInfo + +type isSourceContext_Context interface { + isSourceContext_Context() +} + +type SourceContext_CloudRepo struct { + CloudRepo *CloudRepoSourceContext `protobuf:"bytes,1,opt,name=cloud_repo,json=cloudRepo,proto3,oneof"` +} + +type SourceContext_Gerrit struct { + Gerrit *GerritSourceContext `protobuf:"bytes,2,opt,name=gerrit,proto3,oneof"` +} + +type SourceContext_Git struct { + Git *GitSourceContext `protobuf:"bytes,3,opt,name=git,proto3,oneof"` +} + +func (*SourceContext_CloudRepo) isSourceContext_Context() {} + +func (*SourceContext_Gerrit) isSourceContext_Context() {} + +func (*SourceContext_Git) isSourceContext_Context() {} + +func (m *SourceContext) GetContext() isSourceContext_Context { + if m != nil { + return m.Context + } + return nil +} + +func (m *SourceContext) GetCloudRepo() *CloudRepoSourceContext { + if x, ok := m.GetContext().(*SourceContext_CloudRepo); ok { + return x.CloudRepo + } + return nil +} + +func (m *SourceContext) GetGerrit() *GerritSourceContext { + if x, ok := m.GetContext().(*SourceContext_Gerrit); ok { + return x.Gerrit + } + return nil +} + +func (m *SourceContext) GetGit() *GitSourceContext { + if x, ok := m.GetContext().(*SourceContext_Git); ok { + return x.Git + } + return nil +} + +func (m *SourceContext) GetLabels() map[string]string { + if m != nil { + return m.Labels + } + return nil +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*SourceContext) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _SourceContext_OneofMarshaler, _SourceContext_OneofUnmarshaler, _SourceContext_OneofSizer, []interface{}{ + (*SourceContext_CloudRepo)(nil), + (*SourceContext_Gerrit)(nil), + (*SourceContext_Git)(nil), + } +} + +func _SourceContext_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*SourceContext) + // context + switch x := m.Context.(type) { + case *SourceContext_CloudRepo: + b.EncodeVarint(1<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.CloudRepo); err != nil { + return err + } + case *SourceContext_Gerrit: + b.EncodeVarint(2<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.Gerrit); err != nil { + return err + } + case *SourceContext_Git: + b.EncodeVarint(3<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.Git); err != nil { + return err + } + case nil: + default: + return fmt.Errorf("SourceContext.Context has unexpected type %T", x) + } + return nil +} + +func _SourceContext_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*SourceContext) + switch tag { + case 1: // context.cloud_repo + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(CloudRepoSourceContext) + err := b.DecodeMessage(msg) + m.Context = &SourceContext_CloudRepo{msg} + return true, err + case 2: // context.gerrit + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(GerritSourceContext) + err := b.DecodeMessage(msg) + m.Context = &SourceContext_Gerrit{msg} + return true, err + case 3: // context.git + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(GitSourceContext) + err := b.DecodeMessage(msg) + m.Context = &SourceContext_Git{msg} + return true, err + default: + return false, nil + } +} + +func _SourceContext_OneofSizer(msg proto.Message) (n int) { + m := msg.(*SourceContext) + // context + switch x := m.Context.(type) { + case *SourceContext_CloudRepo: + s := proto.Size(x.CloudRepo) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *SourceContext_Gerrit: + s := proto.Size(x.Gerrit) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *SourceContext_Git: + s := proto.Size(x.Git) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +// An alias to a repo revision. +type AliasContext struct { + // The alias kind. + Kind AliasContext_Kind `protobuf:"varint,1,opt,name=kind,proto3,enum=grafeas.v1beta1.source.AliasContext_Kind" json:"kind,omitempty"` + // The alias name. + Name string `protobuf:"bytes,2,opt,name=name,proto3" json:"name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *AliasContext) Reset() { *m = AliasContext{} } +func (m *AliasContext) String() string { return proto.CompactTextString(m) } +func (*AliasContext) ProtoMessage() {} +func (*AliasContext) Descriptor() ([]byte, []int) { + return fileDescriptor_source_41019e073d7a9204, []int{1} +} +func (m *AliasContext) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_AliasContext.Unmarshal(m, b) +} +func (m *AliasContext) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_AliasContext.Marshal(b, m, deterministic) +} +func (dst *AliasContext) XXX_Merge(src proto.Message) { + xxx_messageInfo_AliasContext.Merge(dst, src) +} +func (m *AliasContext) XXX_Size() int { + return xxx_messageInfo_AliasContext.Size(m) +} +func (m *AliasContext) XXX_DiscardUnknown() { + xxx_messageInfo_AliasContext.DiscardUnknown(m) +} + +var xxx_messageInfo_AliasContext proto.InternalMessageInfo + +func (m *AliasContext) GetKind() AliasContext_Kind { + if m != nil { + return m.Kind + } + return AliasContext_KIND_UNSPECIFIED +} + +func (m *AliasContext) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +// A CloudRepoSourceContext denotes a particular revision in a Google Cloud +// Source Repo. +type CloudRepoSourceContext struct { + // The ID of the repo. + RepoId *RepoId `protobuf:"bytes,1,opt,name=repo_id,json=repoId,proto3" json:"repo_id,omitempty"` + // A revision in a Cloud Repo can be identified by either its revision ID or + // its alias. + // + // Types that are valid to be assigned to Revision: + // *CloudRepoSourceContext_RevisionId + // *CloudRepoSourceContext_AliasContext + Revision isCloudRepoSourceContext_Revision `protobuf_oneof:"revision"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *CloudRepoSourceContext) Reset() { *m = CloudRepoSourceContext{} } +func (m *CloudRepoSourceContext) String() string { return proto.CompactTextString(m) } +func (*CloudRepoSourceContext) ProtoMessage() {} +func (*CloudRepoSourceContext) Descriptor() ([]byte, []int) { + return fileDescriptor_source_41019e073d7a9204, []int{2} +} +func (m *CloudRepoSourceContext) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_CloudRepoSourceContext.Unmarshal(m, b) +} +func (m *CloudRepoSourceContext) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_CloudRepoSourceContext.Marshal(b, m, deterministic) +} +func (dst *CloudRepoSourceContext) XXX_Merge(src proto.Message) { + xxx_messageInfo_CloudRepoSourceContext.Merge(dst, src) +} +func (m *CloudRepoSourceContext) XXX_Size() int { + return xxx_messageInfo_CloudRepoSourceContext.Size(m) +} +func (m *CloudRepoSourceContext) XXX_DiscardUnknown() { + xxx_messageInfo_CloudRepoSourceContext.DiscardUnknown(m) +} + +var xxx_messageInfo_CloudRepoSourceContext proto.InternalMessageInfo + +func (m *CloudRepoSourceContext) GetRepoId() *RepoId { + if m != nil { + return m.RepoId + } + return nil +} + +type isCloudRepoSourceContext_Revision interface { + isCloudRepoSourceContext_Revision() +} + +type CloudRepoSourceContext_RevisionId struct { + RevisionId string `protobuf:"bytes,2,opt,name=revision_id,json=revisionId,proto3,oneof"` +} + +type CloudRepoSourceContext_AliasContext struct { + AliasContext *AliasContext `protobuf:"bytes,3,opt,name=alias_context,json=aliasContext,proto3,oneof"` +} + +func (*CloudRepoSourceContext_RevisionId) isCloudRepoSourceContext_Revision() {} + +func (*CloudRepoSourceContext_AliasContext) isCloudRepoSourceContext_Revision() {} + +func (m *CloudRepoSourceContext) GetRevision() isCloudRepoSourceContext_Revision { + if m != nil { + return m.Revision + } + return nil +} + +func (m *CloudRepoSourceContext) GetRevisionId() string { + if x, ok := m.GetRevision().(*CloudRepoSourceContext_RevisionId); ok { + return x.RevisionId + } + return "" +} + +func (m *CloudRepoSourceContext) GetAliasContext() *AliasContext { + if x, ok := m.GetRevision().(*CloudRepoSourceContext_AliasContext); ok { + return x.AliasContext + } + return nil +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*CloudRepoSourceContext) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _CloudRepoSourceContext_OneofMarshaler, _CloudRepoSourceContext_OneofUnmarshaler, _CloudRepoSourceContext_OneofSizer, []interface{}{ + (*CloudRepoSourceContext_RevisionId)(nil), + (*CloudRepoSourceContext_AliasContext)(nil), + } +} + +func _CloudRepoSourceContext_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*CloudRepoSourceContext) + // revision + switch x := m.Revision.(type) { + case *CloudRepoSourceContext_RevisionId: + b.EncodeVarint(2<<3 | proto.WireBytes) + b.EncodeStringBytes(x.RevisionId) + case *CloudRepoSourceContext_AliasContext: + b.EncodeVarint(3<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.AliasContext); err != nil { + return err + } + case nil: + default: + return fmt.Errorf("CloudRepoSourceContext.Revision has unexpected type %T", x) + } + return nil +} + +func _CloudRepoSourceContext_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*CloudRepoSourceContext) + switch tag { + case 2: // revision.revision_id + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeStringBytes() + m.Revision = &CloudRepoSourceContext_RevisionId{x} + return true, err + case 3: // revision.alias_context + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(AliasContext) + err := b.DecodeMessage(msg) + m.Revision = &CloudRepoSourceContext_AliasContext{msg} + return true, err + default: + return false, nil + } +} + +func _CloudRepoSourceContext_OneofSizer(msg proto.Message) (n int) { + m := msg.(*CloudRepoSourceContext) + // revision + switch x := m.Revision.(type) { + case *CloudRepoSourceContext_RevisionId: + n += 1 // tag and wire + n += proto.SizeVarint(uint64(len(x.RevisionId))) + n += len(x.RevisionId) + case *CloudRepoSourceContext_AliasContext: + s := proto.Size(x.AliasContext) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +// A SourceContext referring to a Gerrit project. +type GerritSourceContext struct { + // The URI of a running Gerrit instance. + HostUri string `protobuf:"bytes,1,opt,name=host_uri,json=hostUri,proto3" json:"host_uri,omitempty"` + // The full project name within the host. Projects may be nested, so + // "project/subproject" is a valid project name. The "repo name" is the + // hostURI/project. + GerritProject string `protobuf:"bytes,2,opt,name=gerrit_project,json=gerritProject,proto3" json:"gerrit_project,omitempty"` + // A revision in a Gerrit project can be identified by either its revision ID + // or its alias. + // + // Types that are valid to be assigned to Revision: + // *GerritSourceContext_RevisionId + // *GerritSourceContext_AliasContext + Revision isGerritSourceContext_Revision `protobuf_oneof:"revision"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GerritSourceContext) Reset() { *m = GerritSourceContext{} } +func (m *GerritSourceContext) String() string { return proto.CompactTextString(m) } +func (*GerritSourceContext) ProtoMessage() {} +func (*GerritSourceContext) Descriptor() ([]byte, []int) { + return fileDescriptor_source_41019e073d7a9204, []int{3} +} +func (m *GerritSourceContext) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GerritSourceContext.Unmarshal(m, b) +} +func (m *GerritSourceContext) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GerritSourceContext.Marshal(b, m, deterministic) +} +func (dst *GerritSourceContext) XXX_Merge(src proto.Message) { + xxx_messageInfo_GerritSourceContext.Merge(dst, src) +} +func (m *GerritSourceContext) XXX_Size() int { + return xxx_messageInfo_GerritSourceContext.Size(m) +} +func (m *GerritSourceContext) XXX_DiscardUnknown() { + xxx_messageInfo_GerritSourceContext.DiscardUnknown(m) +} + +var xxx_messageInfo_GerritSourceContext proto.InternalMessageInfo + +func (m *GerritSourceContext) GetHostUri() string { + if m != nil { + return m.HostUri + } + return "" +} + +func (m *GerritSourceContext) GetGerritProject() string { + if m != nil { + return m.GerritProject + } + return "" +} + +type isGerritSourceContext_Revision interface { + isGerritSourceContext_Revision() +} + +type GerritSourceContext_RevisionId struct { + RevisionId string `protobuf:"bytes,3,opt,name=revision_id,json=revisionId,proto3,oneof"` +} + +type GerritSourceContext_AliasContext struct { + AliasContext *AliasContext `protobuf:"bytes,4,opt,name=alias_context,json=aliasContext,proto3,oneof"` +} + +func (*GerritSourceContext_RevisionId) isGerritSourceContext_Revision() {} + +func (*GerritSourceContext_AliasContext) isGerritSourceContext_Revision() {} + +func (m *GerritSourceContext) GetRevision() isGerritSourceContext_Revision { + if m != nil { + return m.Revision + } + return nil +} + +func (m *GerritSourceContext) GetRevisionId() string { + if x, ok := m.GetRevision().(*GerritSourceContext_RevisionId); ok { + return x.RevisionId + } + return "" +} + +func (m *GerritSourceContext) GetAliasContext() *AliasContext { + if x, ok := m.GetRevision().(*GerritSourceContext_AliasContext); ok { + return x.AliasContext + } + return nil +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*GerritSourceContext) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _GerritSourceContext_OneofMarshaler, _GerritSourceContext_OneofUnmarshaler, _GerritSourceContext_OneofSizer, []interface{}{ + (*GerritSourceContext_RevisionId)(nil), + (*GerritSourceContext_AliasContext)(nil), + } +} + +func _GerritSourceContext_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*GerritSourceContext) + // revision + switch x := m.Revision.(type) { + case *GerritSourceContext_RevisionId: + b.EncodeVarint(3<<3 | proto.WireBytes) + b.EncodeStringBytes(x.RevisionId) + case *GerritSourceContext_AliasContext: + b.EncodeVarint(4<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.AliasContext); err != nil { + return err + } + case nil: + default: + return fmt.Errorf("GerritSourceContext.Revision has unexpected type %T", x) + } + return nil +} + +func _GerritSourceContext_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*GerritSourceContext) + switch tag { + case 3: // revision.revision_id + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeStringBytes() + m.Revision = &GerritSourceContext_RevisionId{x} + return true, err + case 4: // revision.alias_context + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(AliasContext) + err := b.DecodeMessage(msg) + m.Revision = &GerritSourceContext_AliasContext{msg} + return true, err + default: + return false, nil + } +} + +func _GerritSourceContext_OneofSizer(msg proto.Message) (n int) { + m := msg.(*GerritSourceContext) + // revision + switch x := m.Revision.(type) { + case *GerritSourceContext_RevisionId: + n += 1 // tag and wire + n += proto.SizeVarint(uint64(len(x.RevisionId))) + n += len(x.RevisionId) + case *GerritSourceContext_AliasContext: + s := proto.Size(x.AliasContext) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +// A GitSourceContext denotes a particular revision in a third party Git +// repository (e.g., GitHub). +type GitSourceContext struct { + // Git repository URL. + Url string `protobuf:"bytes,1,opt,name=url,proto3" json:"url,omitempty"` + // Git commit hash. + RevisionId string `protobuf:"bytes,2,opt,name=revision_id,json=revisionId,proto3" json:"revision_id,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GitSourceContext) Reset() { *m = GitSourceContext{} } +func (m *GitSourceContext) String() string { return proto.CompactTextString(m) } +func (*GitSourceContext) ProtoMessage() {} +func (*GitSourceContext) Descriptor() ([]byte, []int) { + return fileDescriptor_source_41019e073d7a9204, []int{4} +} +func (m *GitSourceContext) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GitSourceContext.Unmarshal(m, b) +} +func (m *GitSourceContext) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GitSourceContext.Marshal(b, m, deterministic) +} +func (dst *GitSourceContext) XXX_Merge(src proto.Message) { + xxx_messageInfo_GitSourceContext.Merge(dst, src) +} +func (m *GitSourceContext) XXX_Size() int { + return xxx_messageInfo_GitSourceContext.Size(m) +} +func (m *GitSourceContext) XXX_DiscardUnknown() { + xxx_messageInfo_GitSourceContext.DiscardUnknown(m) +} + +var xxx_messageInfo_GitSourceContext proto.InternalMessageInfo + +func (m *GitSourceContext) GetUrl() string { + if m != nil { + return m.Url + } + return "" +} + +func (m *GitSourceContext) GetRevisionId() string { + if m != nil { + return m.RevisionId + } + return "" +} + +// A unique identifier for a Cloud Repo. +type RepoId struct { + // A cloud repo can be identified by either its project ID and repository name + // combination, or its globally unique identifier. + // + // Types that are valid to be assigned to Id: + // *RepoId_ProjectRepoId + // *RepoId_Uid + Id isRepoId_Id `protobuf_oneof:"id"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *RepoId) Reset() { *m = RepoId{} } +func (m *RepoId) String() string { return proto.CompactTextString(m) } +func (*RepoId) ProtoMessage() {} +func (*RepoId) Descriptor() ([]byte, []int) { + return fileDescriptor_source_41019e073d7a9204, []int{5} +} +func (m *RepoId) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_RepoId.Unmarshal(m, b) +} +func (m *RepoId) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_RepoId.Marshal(b, m, deterministic) +} +func (dst *RepoId) XXX_Merge(src proto.Message) { + xxx_messageInfo_RepoId.Merge(dst, src) +} +func (m *RepoId) XXX_Size() int { + return xxx_messageInfo_RepoId.Size(m) +} +func (m *RepoId) XXX_DiscardUnknown() { + xxx_messageInfo_RepoId.DiscardUnknown(m) +} + +var xxx_messageInfo_RepoId proto.InternalMessageInfo + +type isRepoId_Id interface { + isRepoId_Id() +} + +type RepoId_ProjectRepoId struct { + ProjectRepoId *ProjectRepoId `protobuf:"bytes,1,opt,name=project_repo_id,json=projectRepoId,proto3,oneof"` +} + +type RepoId_Uid struct { + Uid string `protobuf:"bytes,2,opt,name=uid,proto3,oneof"` +} + +func (*RepoId_ProjectRepoId) isRepoId_Id() {} + +func (*RepoId_Uid) isRepoId_Id() {} + +func (m *RepoId) GetId() isRepoId_Id { + if m != nil { + return m.Id + } + return nil +} + +func (m *RepoId) GetProjectRepoId() *ProjectRepoId { + if x, ok := m.GetId().(*RepoId_ProjectRepoId); ok { + return x.ProjectRepoId + } + return nil +} + +func (m *RepoId) GetUid() string { + if x, ok := m.GetId().(*RepoId_Uid); ok { + return x.Uid + } + return "" +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*RepoId) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _RepoId_OneofMarshaler, _RepoId_OneofUnmarshaler, _RepoId_OneofSizer, []interface{}{ + (*RepoId_ProjectRepoId)(nil), + (*RepoId_Uid)(nil), + } +} + +func _RepoId_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*RepoId) + // id + switch x := m.Id.(type) { + case *RepoId_ProjectRepoId: + b.EncodeVarint(1<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.ProjectRepoId); err != nil { + return err + } + case *RepoId_Uid: + b.EncodeVarint(2<<3 | proto.WireBytes) + b.EncodeStringBytes(x.Uid) + case nil: + default: + return fmt.Errorf("RepoId.Id has unexpected type %T", x) + } + return nil +} + +func _RepoId_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*RepoId) + switch tag { + case 1: // id.project_repo_id + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(ProjectRepoId) + err := b.DecodeMessage(msg) + m.Id = &RepoId_ProjectRepoId{msg} + return true, err + case 2: // id.uid + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeStringBytes() + m.Id = &RepoId_Uid{x} + return true, err + default: + return false, nil + } +} + +func _RepoId_OneofSizer(msg proto.Message) (n int) { + m := msg.(*RepoId) + // id + switch x := m.Id.(type) { + case *RepoId_ProjectRepoId: + s := proto.Size(x.ProjectRepoId) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *RepoId_Uid: + n += 1 // tag and wire + n += proto.SizeVarint(uint64(len(x.Uid))) + n += len(x.Uid) + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +// Selects a repo using a Google Cloud Platform project ID (e.g., +// winged-cargo-31) and a repo name within that project. +type ProjectRepoId struct { + // The ID of the project. + ProjectId string `protobuf:"bytes,1,opt,name=project_id,json=projectId,proto3" json:"project_id,omitempty"` + // The name of the repo. Leave empty for the default repo. + RepoName string `protobuf:"bytes,2,opt,name=repo_name,json=repoName,proto3" json:"repo_name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ProjectRepoId) Reset() { *m = ProjectRepoId{} } +func (m *ProjectRepoId) String() string { return proto.CompactTextString(m) } +func (*ProjectRepoId) ProtoMessage() {} +func (*ProjectRepoId) Descriptor() ([]byte, []int) { + return fileDescriptor_source_41019e073d7a9204, []int{6} +} +func (m *ProjectRepoId) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ProjectRepoId.Unmarshal(m, b) +} +func (m *ProjectRepoId) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ProjectRepoId.Marshal(b, m, deterministic) +} +func (dst *ProjectRepoId) XXX_Merge(src proto.Message) { + xxx_messageInfo_ProjectRepoId.Merge(dst, src) +} +func (m *ProjectRepoId) XXX_Size() int { + return xxx_messageInfo_ProjectRepoId.Size(m) +} +func (m *ProjectRepoId) XXX_DiscardUnknown() { + xxx_messageInfo_ProjectRepoId.DiscardUnknown(m) +} + +var xxx_messageInfo_ProjectRepoId proto.InternalMessageInfo + +func (m *ProjectRepoId) GetProjectId() string { + if m != nil { + return m.ProjectId + } + return "" +} + +func (m *ProjectRepoId) GetRepoName() string { + if m != nil { + return m.RepoName + } + return "" +} + +func init() { + proto.RegisterType((*SourceContext)(nil), "grafeas.v1beta1.source.SourceContext") + proto.RegisterMapType((map[string]string)(nil), "grafeas.v1beta1.source.SourceContext.LabelsEntry") + proto.RegisterType((*AliasContext)(nil), "grafeas.v1beta1.source.AliasContext") + proto.RegisterType((*CloudRepoSourceContext)(nil), "grafeas.v1beta1.source.CloudRepoSourceContext") + proto.RegisterType((*GerritSourceContext)(nil), "grafeas.v1beta1.source.GerritSourceContext") + proto.RegisterType((*GitSourceContext)(nil), "grafeas.v1beta1.source.GitSourceContext") + proto.RegisterType((*RepoId)(nil), "grafeas.v1beta1.source.RepoId") + proto.RegisterType((*ProjectRepoId)(nil), "grafeas.v1beta1.source.ProjectRepoId") + proto.RegisterEnum("grafeas.v1beta1.source.AliasContext_Kind", AliasContext_Kind_name, AliasContext_Kind_value) +} + +func init() { + proto.RegisterFile("google/devtools/containeranalysis/v1beta1/source/source.proto", fileDescriptor_source_41019e073d7a9204) +} + +var fileDescriptor_source_41019e073d7a9204 = []byte{ + // 640 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xac, 0x54, 0xdd, 0x4e, 0xdb, 0x30, + 0x18, 0x6d, 0xda, 0xd0, 0x92, 0xaf, 0x94, 0x45, 0x1e, 0x42, 0x65, 0xd3, 0x36, 0x16, 0x0d, 0x89, + 0x69, 0x52, 0x2a, 0xd8, 0xc5, 0x7e, 0xd1, 0x44, 0x21, 0xd0, 0xa8, 0x8c, 0xa2, 0x30, 0xd0, 0xb4, + 0x9b, 0xc8, 0x34, 0x5e, 0xe6, 0x11, 0xe2, 0xca, 0x49, 0xaa, 0xa1, 0xbd, 0x0d, 0xef, 0xb3, 0xab, + 0xbd, 0xcb, 0xae, 0x27, 0x3b, 0x8e, 0x94, 0xb2, 0x46, 0xe2, 0x62, 0x57, 0xb6, 0x8f, 0xbf, 0x73, + 0xfc, 0xfd, 0x1c, 0x19, 0x76, 0x42, 0xc6, 0xc2, 0x88, 0xf4, 0x02, 0x32, 0x4d, 0x19, 0x8b, 0x92, + 0xde, 0x98, 0xc5, 0x29, 0xa6, 0x31, 0xe1, 0x38, 0xc6, 0xd1, 0x75, 0x42, 0x93, 0xde, 0x74, 0xeb, + 0x82, 0xa4, 0x78, 0xab, 0x97, 0xb0, 0x8c, 0x8f, 0x89, 0x5a, 0xec, 0x09, 0x67, 0x29, 0x43, 0xab, + 0x21, 0xc7, 0x5f, 0x09, 0x4e, 0x6c, 0x15, 0x64, 0xe7, 0xb7, 0xd6, 0x9f, 0x3a, 0x74, 0x4e, 0xe5, + 0x76, 0x8f, 0xc5, 0x29, 0xf9, 0x91, 0xa2, 0x11, 0xc0, 0x38, 0x62, 0x59, 0xe0, 0x73, 0x32, 0x61, + 0x5d, 0x6d, 0x5d, 0xdb, 0x6c, 0x6f, 0xdb, 0xf6, 0x7c, 0xba, 0xbd, 0x27, 0x22, 0x3d, 0x32, 0x61, + 0x33, 0x1a, 0x83, 0x9a, 0x67, 0x8c, 0x8b, 0x1b, 0xe4, 0x40, 0x33, 0x24, 0x9c, 0xd3, 0xb4, 0x5b, + 0x97, 0x62, 0x2f, 0xaa, 0xc4, 0x0e, 0x65, 0xd4, 0x6d, 0x25, 0x45, 0x46, 0xef, 0xa1, 0x11, 0xd2, + 0xb4, 0xdb, 0x90, 0x1a, 0x9b, 0x95, 0x1a, 0xff, 0x0a, 0x08, 0x1a, 0x72, 0xa1, 0x19, 0xe1, 0x0b, + 0x12, 0x25, 0x5d, 0x7d, 0xbd, 0xb1, 0xd9, 0xde, 0xde, 0xaa, 0x12, 0x98, 0x61, 0xdb, 0x47, 0x92, + 0xe3, 0xc4, 0x29, 0xbf, 0xf6, 0x94, 0xc0, 0x83, 0x37, 0xd0, 0x2e, 0xc1, 0xc8, 0x84, 0xc6, 0x25, + 0xb9, 0x96, 0x8d, 0x32, 0x3c, 0xb1, 0x45, 0x2b, 0xb0, 0x30, 0xc5, 0x51, 0x46, 0x64, 0xbd, 0x86, + 0x97, 0x1f, 0xde, 0xd6, 0x5f, 0x6b, 0x7d, 0x03, 0x5a, 0xe3, 0x5c, 0xd9, 0xba, 0xd1, 0x60, 0x69, + 0x37, 0xa2, 0x38, 0x29, 0xfa, 0xbe, 0x03, 0xfa, 0x25, 0x8d, 0x03, 0x29, 0xb4, 0xbc, 0xfd, 0xbc, + 0x2a, 0xbf, 0x32, 0xc7, 0x1e, 0xd2, 0x38, 0xf0, 0x24, 0x0d, 0x21, 0xd0, 0x63, 0x7c, 0x55, 0xbc, + 0x29, 0xf7, 0xd6, 0x07, 0xd0, 0x45, 0x04, 0x5a, 0x01, 0x73, 0xe8, 0x1e, 0xef, 0xfb, 0x67, 0xc7, + 0xa7, 0x27, 0xce, 0x9e, 0x7b, 0xe0, 0x3a, 0xfb, 0x66, 0x0d, 0x19, 0xb0, 0x70, 0xe0, 0x7e, 0x76, + 0xf6, 0x4d, 0x0d, 0xb5, 0xa1, 0xf5, 0x71, 0x74, 0xbe, 0xdb, 0x3f, 0x72, 0xcc, 0xba, 0xc0, 0x47, + 0x9f, 0x06, 0x8e, 0x67, 0xea, 0xd6, 0x2f, 0x0d, 0x56, 0xe7, 0x8f, 0x18, 0xbd, 0x82, 0x96, 0x30, + 0x88, 0x4f, 0x03, 0xe5, 0x91, 0xc7, 0x55, 0x19, 0x0b, 0xae, 0x1b, 0x78, 0x4d, 0x2e, 0x57, 0xf4, + 0x14, 0xda, 0x9c, 0x4c, 0x69, 0x42, 0x59, 0x2c, 0xc8, 0x32, 0xdf, 0x41, 0xcd, 0x83, 0x02, 0x74, + 0x03, 0x34, 0x84, 0x0e, 0x16, 0x65, 0xfa, 0xaa, 0x59, 0x6a, 0xe8, 0xcf, 0xee, 0xd2, 0x93, 0x41, + 0xcd, 0x5b, 0xc2, 0xa5, 0x73, 0x1f, 0x60, 0xb1, 0x90, 0xb6, 0x7e, 0x6b, 0x70, 0x7f, 0x8e, 0xcb, + 0xd0, 0x1a, 0x2c, 0x7e, 0x63, 0x49, 0xea, 0x67, 0x9c, 0xaa, 0x41, 0xb6, 0xc4, 0xf9, 0x8c, 0x53, + 0xb4, 0x01, 0xcb, 0xb9, 0x01, 0xfd, 0x09, 0x67, 0xdf, 0xc9, 0x38, 0x55, 0x1d, 0xee, 0xe4, 0xe8, + 0x49, 0x0e, 0xde, 0xae, 0xaa, 0x71, 0x97, 0xaa, 0xf4, 0xff, 0x54, 0x95, 0x03, 0xe6, 0x6d, 0xdb, + 0x0b, 0x57, 0x66, 0x3c, 0x2a, 0x5c, 0x99, 0xf1, 0x08, 0x3d, 0x99, 0xd3, 0xf7, 0x72, 0x7e, 0x56, + 0x02, 0xcd, 0x7c, 0x54, 0x68, 0x04, 0xf7, 0x54, 0xb1, 0xfe, 0xec, 0x8c, 0x37, 0xaa, 0x72, 0x55, + 0x6d, 0xc8, 0xf9, 0x83, 0x9a, 0xd7, 0x99, 0x94, 0x01, 0x84, 0xa0, 0x91, 0x95, 0x66, 0x2d, 0x0e, + 0x7d, 0x1d, 0xea, 0x34, 0xb0, 0x86, 0xd0, 0x99, 0xe1, 0xa2, 0x47, 0x00, 0xc5, 0xdb, 0xea, 0x59, + 0xc3, 0x33, 0x14, 0xe2, 0x06, 0xe8, 0x21, 0x18, 0x32, 0xa5, 0x92, 0xd7, 0x17, 0x05, 0x70, 0x8c, + 0xaf, 0x48, 0xff, 0x27, 0xac, 0x51, 0x56, 0x91, 0xe2, 0x89, 0xf6, 0xe5, 0x3c, 0xff, 0x42, 0xed, + 0x90, 0x45, 0x38, 0x0e, 0x6d, 0xc6, 0xc3, 0x5e, 0x48, 0x62, 0xf9, 0x3f, 0xf6, 0xf2, 0x2b, 0x3c, + 0xa1, 0xc9, 0xdd, 0x7f, 0xd8, 0x77, 0xf9, 0x72, 0x53, 0x6f, 0x1c, 0x7a, 0xbb, 0x17, 0x4d, 0x29, + 0xf4, 0xf2, 0x6f, 0x00, 0x00, 0x00, 0xff, 0xff, 0x39, 0x33, 0xb6, 0xbe, 0xa9, 0x05, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/devtools/containeranalysis/v1beta1/vulnerability/vulnerability.pb.go b/vendor/google.golang.org/genproto/googleapis/devtools/containeranalysis/v1beta1/vulnerability/vulnerability.pb.go new file mode 100644 index 0000000..dc31bf4 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/devtools/containeranalysis/v1beta1/vulnerability/vulnerability.pb.go @@ -0,0 +1,520 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/devtools/containeranalysis/v1beta1/vulnerability/vulnerability.proto + +package vulnerability // import "google.golang.org/genproto/googleapis/devtools/containeranalysis/v1beta1/vulnerability" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import common "google.golang.org/genproto/googleapis/devtools/containeranalysis/v1beta1/common" +import _package "google.golang.org/genproto/googleapis/devtools/containeranalysis/v1beta1/package" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Note provider-assigned severity/impact ranking. +type Severity int32 + +const ( + // Unknown. + Severity_SEVERITY_UNSPECIFIED Severity = 0 + // Minimal severity. + Severity_MINIMAL Severity = 1 + // Low severity. + Severity_LOW Severity = 2 + // Medium severity. + Severity_MEDIUM Severity = 3 + // High severity. + Severity_HIGH Severity = 4 + // Critical severity. + Severity_CRITICAL Severity = 5 +) + +var Severity_name = map[int32]string{ + 0: "SEVERITY_UNSPECIFIED", + 1: "MINIMAL", + 2: "LOW", + 3: "MEDIUM", + 4: "HIGH", + 5: "CRITICAL", +} +var Severity_value = map[string]int32{ + "SEVERITY_UNSPECIFIED": 0, + "MINIMAL": 1, + "LOW": 2, + "MEDIUM": 3, + "HIGH": 4, + "CRITICAL": 5, +} + +func (x Severity) String() string { + return proto.EnumName(Severity_name, int32(x)) +} +func (Severity) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_vulnerability_de415cb4c75f5ae1, []int{0} +} + +// Vulnerability provides metadata about a security vulnerability. +type Vulnerability struct { + // The CVSS score for this vulnerability. + CvssScore float32 `protobuf:"fixed32,1,opt,name=cvss_score,json=cvssScore,proto3" json:"cvss_score,omitempty"` + // Note provider assigned impact of the vulnerability. + Severity Severity `protobuf:"varint,2,opt,name=severity,proto3,enum=grafeas.v1beta1.vulnerability.Severity" json:"severity,omitempty"` + // All information about the package to specifically identify this + // vulnerability. One entry per (version range and cpe_uri) the package + // vulnerability has manifested in. + Details []*Vulnerability_Detail `protobuf:"bytes,3,rep,name=details,proto3" json:"details,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Vulnerability) Reset() { *m = Vulnerability{} } +func (m *Vulnerability) String() string { return proto.CompactTextString(m) } +func (*Vulnerability) ProtoMessage() {} +func (*Vulnerability) Descriptor() ([]byte, []int) { + return fileDescriptor_vulnerability_de415cb4c75f5ae1, []int{0} +} +func (m *Vulnerability) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Vulnerability.Unmarshal(m, b) +} +func (m *Vulnerability) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Vulnerability.Marshal(b, m, deterministic) +} +func (dst *Vulnerability) XXX_Merge(src proto.Message) { + xxx_messageInfo_Vulnerability.Merge(dst, src) +} +func (m *Vulnerability) XXX_Size() int { + return xxx_messageInfo_Vulnerability.Size(m) +} +func (m *Vulnerability) XXX_DiscardUnknown() { + xxx_messageInfo_Vulnerability.DiscardUnknown(m) +} + +var xxx_messageInfo_Vulnerability proto.InternalMessageInfo + +func (m *Vulnerability) GetCvssScore() float32 { + if m != nil { + return m.CvssScore + } + return 0 +} + +func (m *Vulnerability) GetSeverity() Severity { + if m != nil { + return m.Severity + } + return Severity_SEVERITY_UNSPECIFIED +} + +func (m *Vulnerability) GetDetails() []*Vulnerability_Detail { + if m != nil { + return m.Details + } + return nil +} + +// Identifies all occurrences of this vulnerability in the package for a +// specific distro/location. For example: glibc in +// cpe:/o:debian:debian_linux:8 for versions 2.1 - 2.2 +type Vulnerability_Detail struct { + // The cpe_uri in [cpe format] (https://cpe.mitre.org/specification/) in + // which the vulnerability manifests. Examples include distro or storage + // location for vulnerable jar. + CpeUri string `protobuf:"bytes,1,opt,name=cpe_uri,json=cpeUri,proto3" json:"cpe_uri,omitempty"` + // The name of the package where the vulnerability was found. + Package string `protobuf:"bytes,2,opt,name=package,proto3" json:"package,omitempty"` + // The min version of the package in which the vulnerability exists. + MinAffectedVersion *_package.Version `protobuf:"bytes,3,opt,name=min_affected_version,json=minAffectedVersion,proto3" json:"min_affected_version,omitempty"` + // The max version of the package in which the vulnerability exists. + MaxAffectedVersion *_package.Version `protobuf:"bytes,4,opt,name=max_affected_version,json=maxAffectedVersion,proto3" json:"max_affected_version,omitempty"` + // The severity (eg: distro assigned severity) for this vulnerability. + SeverityName string `protobuf:"bytes,5,opt,name=severity_name,json=severityName,proto3" json:"severity_name,omitempty"` + // A vendor-specific description of this note. + Description string `protobuf:"bytes,6,opt,name=description,proto3" json:"description,omitempty"` + // The fix for this specific package version. + FixedLocation *VulnerabilityLocation `protobuf:"bytes,7,opt,name=fixed_location,json=fixedLocation,proto3" json:"fixed_location,omitempty"` + // The type of package; whether native or non native(ruby gems, node.js + // packages etc). + PackageType string `protobuf:"bytes,8,opt,name=package_type,json=packageType,proto3" json:"package_type,omitempty"` + // Whether this detail is obsolete. Occurrences are expected not to point to + // obsolete details. + IsObsolete bool `protobuf:"varint,9,opt,name=is_obsolete,json=isObsolete,proto3" json:"is_obsolete,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Vulnerability_Detail) Reset() { *m = Vulnerability_Detail{} } +func (m *Vulnerability_Detail) String() string { return proto.CompactTextString(m) } +func (*Vulnerability_Detail) ProtoMessage() {} +func (*Vulnerability_Detail) Descriptor() ([]byte, []int) { + return fileDescriptor_vulnerability_de415cb4c75f5ae1, []int{0, 0} +} +func (m *Vulnerability_Detail) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Vulnerability_Detail.Unmarshal(m, b) +} +func (m *Vulnerability_Detail) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Vulnerability_Detail.Marshal(b, m, deterministic) +} +func (dst *Vulnerability_Detail) XXX_Merge(src proto.Message) { + xxx_messageInfo_Vulnerability_Detail.Merge(dst, src) +} +func (m *Vulnerability_Detail) XXX_Size() int { + return xxx_messageInfo_Vulnerability_Detail.Size(m) +} +func (m *Vulnerability_Detail) XXX_DiscardUnknown() { + xxx_messageInfo_Vulnerability_Detail.DiscardUnknown(m) +} + +var xxx_messageInfo_Vulnerability_Detail proto.InternalMessageInfo + +func (m *Vulnerability_Detail) GetCpeUri() string { + if m != nil { + return m.CpeUri + } + return "" +} + +func (m *Vulnerability_Detail) GetPackage() string { + if m != nil { + return m.Package + } + return "" +} + +func (m *Vulnerability_Detail) GetMinAffectedVersion() *_package.Version { + if m != nil { + return m.MinAffectedVersion + } + return nil +} + +func (m *Vulnerability_Detail) GetMaxAffectedVersion() *_package.Version { + if m != nil { + return m.MaxAffectedVersion + } + return nil +} + +func (m *Vulnerability_Detail) GetSeverityName() string { + if m != nil { + return m.SeverityName + } + return "" +} + +func (m *Vulnerability_Detail) GetDescription() string { + if m != nil { + return m.Description + } + return "" +} + +func (m *Vulnerability_Detail) GetFixedLocation() *VulnerabilityLocation { + if m != nil { + return m.FixedLocation + } + return nil +} + +func (m *Vulnerability_Detail) GetPackageType() string { + if m != nil { + return m.PackageType + } + return "" +} + +func (m *Vulnerability_Detail) GetIsObsolete() bool { + if m != nil { + return m.IsObsolete + } + return false +} + +// Details of a vulnerability occurrence. +type Details struct { + // The type of package; whether native or non native(ruby gems, node.js + // packages etc) + Type string `protobuf:"bytes,1,opt,name=type,proto3" json:"type,omitempty"` + // Output only. The note provider assigned Severity of the vulnerability. + Severity Severity `protobuf:"varint,2,opt,name=severity,proto3,enum=grafeas.v1beta1.vulnerability.Severity" json:"severity,omitempty"` + // Output only. The CVSS score of this vulnerability. CVSS score is on a + // scale of 0-10 where 0 indicates low severity and 10 indicates high + // severity. + CvssScore float32 `protobuf:"fixed32,3,opt,name=cvss_score,json=cvssScore,proto3" json:"cvss_score,omitempty"` + // The set of affected locations and their fixes (if available) within the + // associated resource. + PackageIssue []*PackageIssue `protobuf:"bytes,4,rep,name=package_issue,json=packageIssue,proto3" json:"package_issue,omitempty"` + // Output only. A one sentence description of this vulnerability. + ShortDescription string `protobuf:"bytes,5,opt,name=short_description,json=shortDescription,proto3" json:"short_description,omitempty"` + // Output only. A detailed description of this vulnerability. + LongDescription string `protobuf:"bytes,6,opt,name=long_description,json=longDescription,proto3" json:"long_description,omitempty"` + // Output only. URLs related to this vulnerability. + RelatedUrls []*common.RelatedUrl `protobuf:"bytes,7,rep,name=related_urls,json=relatedUrls,proto3" json:"related_urls,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Details) Reset() { *m = Details{} } +func (m *Details) String() string { return proto.CompactTextString(m) } +func (*Details) ProtoMessage() {} +func (*Details) Descriptor() ([]byte, []int) { + return fileDescriptor_vulnerability_de415cb4c75f5ae1, []int{1} +} +func (m *Details) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Details.Unmarshal(m, b) +} +func (m *Details) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Details.Marshal(b, m, deterministic) +} +func (dst *Details) XXX_Merge(src proto.Message) { + xxx_messageInfo_Details.Merge(dst, src) +} +func (m *Details) XXX_Size() int { + return xxx_messageInfo_Details.Size(m) +} +func (m *Details) XXX_DiscardUnknown() { + xxx_messageInfo_Details.DiscardUnknown(m) +} + +var xxx_messageInfo_Details proto.InternalMessageInfo + +func (m *Details) GetType() string { + if m != nil { + return m.Type + } + return "" +} + +func (m *Details) GetSeverity() Severity { + if m != nil { + return m.Severity + } + return Severity_SEVERITY_UNSPECIFIED +} + +func (m *Details) GetCvssScore() float32 { + if m != nil { + return m.CvssScore + } + return 0 +} + +func (m *Details) GetPackageIssue() []*PackageIssue { + if m != nil { + return m.PackageIssue + } + return nil +} + +func (m *Details) GetShortDescription() string { + if m != nil { + return m.ShortDescription + } + return "" +} + +func (m *Details) GetLongDescription() string { + if m != nil { + return m.LongDescription + } + return "" +} + +func (m *Details) GetRelatedUrls() []*common.RelatedUrl { + if m != nil { + return m.RelatedUrls + } + return nil +} + +// This message wraps a location affected by a vulnerability and its +// associated fix (if one is available). +type PackageIssue struct { + // The location of the vulnerability. + AffectedLocation *VulnerabilityLocation `protobuf:"bytes,1,opt,name=affected_location,json=affectedLocation,proto3" json:"affected_location,omitempty"` + // The location of the available fix for vulnerability. + FixedLocation *VulnerabilityLocation `protobuf:"bytes,2,opt,name=fixed_location,json=fixedLocation,proto3" json:"fixed_location,omitempty"` + // The severity (e.g., distro assigned severity) for this vulnerability. + SeverityName string `protobuf:"bytes,3,opt,name=severity_name,json=severityName,proto3" json:"severity_name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *PackageIssue) Reset() { *m = PackageIssue{} } +func (m *PackageIssue) String() string { return proto.CompactTextString(m) } +func (*PackageIssue) ProtoMessage() {} +func (*PackageIssue) Descriptor() ([]byte, []int) { + return fileDescriptor_vulnerability_de415cb4c75f5ae1, []int{2} +} +func (m *PackageIssue) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_PackageIssue.Unmarshal(m, b) +} +func (m *PackageIssue) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_PackageIssue.Marshal(b, m, deterministic) +} +func (dst *PackageIssue) XXX_Merge(src proto.Message) { + xxx_messageInfo_PackageIssue.Merge(dst, src) +} +func (m *PackageIssue) XXX_Size() int { + return xxx_messageInfo_PackageIssue.Size(m) +} +func (m *PackageIssue) XXX_DiscardUnknown() { + xxx_messageInfo_PackageIssue.DiscardUnknown(m) +} + +var xxx_messageInfo_PackageIssue proto.InternalMessageInfo + +func (m *PackageIssue) GetAffectedLocation() *VulnerabilityLocation { + if m != nil { + return m.AffectedLocation + } + return nil +} + +func (m *PackageIssue) GetFixedLocation() *VulnerabilityLocation { + if m != nil { + return m.FixedLocation + } + return nil +} + +func (m *PackageIssue) GetSeverityName() string { + if m != nil { + return m.SeverityName + } + return "" +} + +// The location of the vulnerability. +type VulnerabilityLocation struct { + // The cpe_uri in [cpe format] (https://cpe.mitre.org/specification/) + // format. Examples include distro or storage location for vulnerable jar. + CpeUri string `protobuf:"bytes,1,opt,name=cpe_uri,json=cpeUri,proto3" json:"cpe_uri,omitempty"` + // The package being described. + Package string `protobuf:"bytes,2,opt,name=package,proto3" json:"package,omitempty"` + // The version of the package being described. + Version *_package.Version `protobuf:"bytes,3,opt,name=version,proto3" json:"version,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *VulnerabilityLocation) Reset() { *m = VulnerabilityLocation{} } +func (m *VulnerabilityLocation) String() string { return proto.CompactTextString(m) } +func (*VulnerabilityLocation) ProtoMessage() {} +func (*VulnerabilityLocation) Descriptor() ([]byte, []int) { + return fileDescriptor_vulnerability_de415cb4c75f5ae1, []int{3} +} +func (m *VulnerabilityLocation) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_VulnerabilityLocation.Unmarshal(m, b) +} +func (m *VulnerabilityLocation) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_VulnerabilityLocation.Marshal(b, m, deterministic) +} +func (dst *VulnerabilityLocation) XXX_Merge(src proto.Message) { + xxx_messageInfo_VulnerabilityLocation.Merge(dst, src) +} +func (m *VulnerabilityLocation) XXX_Size() int { + return xxx_messageInfo_VulnerabilityLocation.Size(m) +} +func (m *VulnerabilityLocation) XXX_DiscardUnknown() { + xxx_messageInfo_VulnerabilityLocation.DiscardUnknown(m) +} + +var xxx_messageInfo_VulnerabilityLocation proto.InternalMessageInfo + +func (m *VulnerabilityLocation) GetCpeUri() string { + if m != nil { + return m.CpeUri + } + return "" +} + +func (m *VulnerabilityLocation) GetPackage() string { + if m != nil { + return m.Package + } + return "" +} + +func (m *VulnerabilityLocation) GetVersion() *_package.Version { + if m != nil { + return m.Version + } + return nil +} + +func init() { + proto.RegisterType((*Vulnerability)(nil), "grafeas.v1beta1.vulnerability.Vulnerability") + proto.RegisterType((*Vulnerability_Detail)(nil), "grafeas.v1beta1.vulnerability.Vulnerability.Detail") + proto.RegisterType((*Details)(nil), "grafeas.v1beta1.vulnerability.Details") + proto.RegisterType((*PackageIssue)(nil), "grafeas.v1beta1.vulnerability.PackageIssue") + proto.RegisterType((*VulnerabilityLocation)(nil), "grafeas.v1beta1.vulnerability.VulnerabilityLocation") + proto.RegisterEnum("grafeas.v1beta1.vulnerability.Severity", Severity_name, Severity_value) +} + +func init() { + proto.RegisterFile("google/devtools/containeranalysis/v1beta1/vulnerability/vulnerability.proto", fileDescriptor_vulnerability_de415cb4c75f5ae1) +} + +var fileDescriptor_vulnerability_de415cb4c75f5ae1 = []byte{ + // 727 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xac, 0x55, 0xdd, 0x6e, 0xda, 0x48, + 0x14, 0x5e, 0x63, 0x82, 0xe1, 0x00, 0x59, 0x67, 0x94, 0xd5, 0x5a, 0x59, 0x45, 0xeb, 0x65, 0x2f, + 0x4a, 0x1b, 0xc9, 0x28, 0x49, 0xaf, 0x5a, 0xb5, 0x15, 0x05, 0x9a, 0x58, 0x85, 0x04, 0x99, 0x90, + 0xaa, 0xad, 0x54, 0x6b, 0x30, 0x83, 0x3b, 0xaa, 0xf1, 0x58, 0x1e, 0x83, 0xc2, 0x0b, 0xf4, 0x3a, + 0xcf, 0xd0, 0xd7, 0xe9, 0xa3, 0xf4, 0xb2, 0x2f, 0x50, 0x31, 0xb6, 0x11, 0x84, 0x2a, 0x3f, 0x6d, + 0xae, 0xec, 0xf3, 0xf9, 0x9c, 0x6f, 0xbe, 0x33, 0xe7, 0xf3, 0x0c, 0xbc, 0x76, 0x19, 0x73, 0x3d, + 0x52, 0x1b, 0x92, 0x69, 0xc4, 0x98, 0xc7, 0x6b, 0x0e, 0xf3, 0x23, 0x4c, 0x7d, 0x12, 0x62, 0x1f, + 0x7b, 0x33, 0x4e, 0x79, 0x6d, 0xba, 0x3f, 0x20, 0x11, 0xde, 0xaf, 0x4d, 0x27, 0xde, 0x1c, 0x1e, + 0x50, 0x8f, 0x46, 0xb3, 0xd5, 0xc8, 0x08, 0x42, 0x16, 0x31, 0xb4, 0xeb, 0x86, 0x78, 0x44, 0x30, + 0x37, 0x92, 0x12, 0x63, 0x25, 0x69, 0xe7, 0xd9, 0xed, 0xd7, 0x72, 0xd8, 0x78, 0xcc, 0xfc, 0xe4, + 0x11, 0xb3, 0xef, 0xbc, 0xb8, 0x7d, 0x79, 0x80, 0x9d, 0x4f, 0xd8, 0x25, 0xe9, 0x33, 0x26, 0xa8, + 0x5c, 0x6e, 0x40, 0xf9, 0x7c, 0x59, 0x11, 0xda, 0x05, 0x70, 0xa6, 0x9c, 0xdb, 0xdc, 0x61, 0x21, + 0xd1, 0x24, 0x5d, 0xaa, 0x66, 0xac, 0xc2, 0x1c, 0xe9, 0xcd, 0x01, 0xd4, 0x80, 0x3c, 0x27, 0x53, + 0x12, 0xd2, 0x68, 0xa6, 0x65, 0x74, 0xa9, 0xba, 0x79, 0xf0, 0xc0, 0xb8, 0xb6, 0x45, 0xa3, 0x97, + 0xa4, 0x5b, 0x8b, 0x42, 0xd4, 0x01, 0x65, 0x48, 0x22, 0x4c, 0x3d, 0xae, 0xc9, 0xba, 0x5c, 0x2d, + 0x1e, 0x1c, 0xde, 0xc0, 0xb1, 0x22, 0xd1, 0x68, 0x8a, 0x5a, 0x2b, 0xe5, 0xd8, 0xf9, 0x2a, 0x43, + 0x2e, 0xc6, 0xd0, 0xdf, 0xa0, 0x38, 0x01, 0xb1, 0x27, 0x21, 0x15, 0xd2, 0x0b, 0x56, 0xce, 0x09, + 0x48, 0x3f, 0xa4, 0x48, 0x03, 0x25, 0xe9, 0x5c, 0xc8, 0x2e, 0x58, 0x69, 0x88, 0x2c, 0xd8, 0x1e, + 0x53, 0xdf, 0xc6, 0xa3, 0x11, 0x71, 0x22, 0x32, 0xb4, 0xa7, 0x24, 0xe4, 0x94, 0xf9, 0x9a, 0xac, + 0x4b, 0xd5, 0xe2, 0x81, 0xbe, 0xa6, 0x2c, 0xdd, 0xc0, 0xf3, 0x38, 0xcf, 0x42, 0x63, 0xea, 0xd7, + 0x93, 0xe2, 0x04, 0x13, 0x9c, 0xf8, 0x62, 0x9d, 0x33, 0x7b, 0x6b, 0x4e, 0x7c, 0x71, 0x95, 0xf3, + 0x7f, 0x28, 0xa7, 0x1b, 0x68, 0xfb, 0x78, 0x4c, 0xb4, 0x0d, 0xd1, 0x47, 0x29, 0x05, 0x4f, 0xf0, + 0x98, 0x20, 0x1d, 0x8a, 0x43, 0xc2, 0x9d, 0x90, 0x06, 0xd1, 0x7c, 0xbd, 0x9c, 0x48, 0x59, 0x86, + 0xd0, 0x7b, 0xd8, 0x1c, 0xd1, 0x0b, 0x32, 0xb4, 0x3d, 0xe6, 0x60, 0x91, 0xa4, 0x08, 0x51, 0x8f, + 0xef, 0x32, 0x82, 0x76, 0x52, 0x6b, 0x95, 0x05, 0x57, 0x1a, 0xa2, 0xff, 0xa0, 0x94, 0xb4, 0x62, + 0x47, 0xb3, 0x80, 0x68, 0xf9, 0x78, 0xfd, 0x04, 0x3b, 0x9b, 0x05, 0x04, 0xfd, 0x0b, 0x45, 0xca, + 0x6d, 0x36, 0xe0, 0xcc, 0x23, 0x11, 0xd1, 0x0a, 0xba, 0x54, 0xcd, 0x5b, 0x40, 0xf9, 0x69, 0x82, + 0x54, 0xbe, 0x65, 0x40, 0x89, 0xa7, 0xc9, 0x11, 0x82, 0xac, 0xe0, 0x89, 0x67, 0x29, 0xde, 0xef, + 0xc7, 0x81, 0xab, 0x2e, 0x97, 0xaf, 0xba, 0xbc, 0x0b, 0xe5, 0xb4, 0x0f, 0xca, 0xf9, 0x84, 0x68, + 0x59, 0x61, 0xd3, 0xbd, 0x1b, 0x16, 0xea, 0xc6, 0x35, 0xe6, 0xbc, 0xc4, 0x4a, 0x77, 0x42, 0x44, + 0x68, 0x0f, 0xb6, 0xf8, 0x47, 0x16, 0x46, 0xf6, 0xf2, 0x78, 0xe2, 0x09, 0xaa, 0xe2, 0x43, 0x73, + 0x69, 0x46, 0x0f, 0x41, 0xf5, 0x98, 0xef, 0xda, 0xeb, 0xa3, 0xfc, 0x73, 0x8e, 0x2f, 0xa7, 0x3e, + 0x87, 0x52, 0x48, 0x3c, 0x3c, 0x37, 0xd9, 0x24, 0xf4, 0xb8, 0xa6, 0x08, 0xa1, 0xff, 0xac, 0x09, + 0xb5, 0xe2, 0xa4, 0x7e, 0xe8, 0x59, 0xc5, 0x70, 0xf1, 0xce, 0x2b, 0xdf, 0x25, 0x28, 0x2d, 0xcb, + 0x46, 0x18, 0xb6, 0x16, 0xb6, 0x5d, 0x58, 0x44, 0xfa, 0x0d, 0x8b, 0xa8, 0x29, 0xdd, 0xc2, 0x25, + 0xeb, 0x16, 0xcc, 0xdc, 0x9f, 0x05, 0xd7, 0x7e, 0x13, 0x79, 0xfd, 0x37, 0xa9, 0x7c, 0x96, 0xe0, + 0xaf, 0x9f, 0xb2, 0xfd, 0xca, 0x01, 0xf2, 0x04, 0x94, 0xbb, 0x9e, 0x19, 0x69, 0xc1, 0xa3, 0x0f, + 0x90, 0x4f, 0xdd, 0x89, 0x34, 0xd8, 0xee, 0xb5, 0xce, 0x5b, 0x96, 0x79, 0xf6, 0xd6, 0xee, 0x9f, + 0xf4, 0xba, 0xad, 0x86, 0xf9, 0xca, 0x6c, 0x35, 0xd5, 0x3f, 0x50, 0x11, 0x94, 0x8e, 0x79, 0x62, + 0x76, 0xea, 0x6d, 0x55, 0x42, 0x0a, 0xc8, 0xed, 0xd3, 0x37, 0x6a, 0x06, 0x01, 0xe4, 0x3a, 0xad, + 0xa6, 0xd9, 0xef, 0xa8, 0x32, 0xca, 0x43, 0xf6, 0xd8, 0x3c, 0x3a, 0x56, 0xb3, 0xa8, 0x04, 0xf9, + 0x86, 0x65, 0x9e, 0x99, 0x8d, 0x7a, 0x5b, 0xdd, 0x78, 0x79, 0x29, 0x81, 0x4e, 0xd9, 0xf5, 0xfb, + 0xda, 0x95, 0xde, 0x0d, 0xe3, 0x7b, 0xc4, 0x70, 0x99, 0x87, 0x7d, 0xd7, 0x60, 0xa1, 0x5b, 0x73, + 0x89, 0x2f, 0xae, 0x88, 0x5a, 0xfc, 0x09, 0x07, 0x94, 0xdf, 0xf9, 0x46, 0x7c, 0xba, 0x12, 0x7d, + 0xc9, 0xc8, 0x47, 0x56, 0x7d, 0x90, 0x13, 0xb4, 0x87, 0x3f, 0x02, 0x00, 0x00, 0xff, 0xff, 0xfd, + 0xfb, 0x4c, 0x27, 0x67, 0x07, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/devtools/remoteexecution/v1test/remote_execution.pb.go b/vendor/google.golang.org/genproto/googleapis/devtools/remoteexecution/v1test/remote_execution.pb.go new file mode 100644 index 0000000..000ca7a --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/devtools/remoteexecution/v1test/remote_execution.pb.go @@ -0,0 +1,2807 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/devtools/remoteexecution/v1test/remote_execution.proto + +package remoteexecution // import "google.golang.org/genproto/googleapis/devtools/remoteexecution/v1test" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import duration "github.com/golang/protobuf/ptypes/duration" +import _ "google.golang.org/genproto/googleapis/api/annotations" +import longrunning "google.golang.org/genproto/googleapis/longrunning" +import status "google.golang.org/genproto/googleapis/rpc/status" + +import ( + context "golang.org/x/net/context" + grpc "google.golang.org/grpc" +) + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// The current stage of execution. +type ExecuteOperationMetadata_Stage int32 + +const ( + ExecuteOperationMetadata_UNKNOWN ExecuteOperationMetadata_Stage = 0 + // Checking the result against the cache. + ExecuteOperationMetadata_CACHE_CHECK ExecuteOperationMetadata_Stage = 1 + // Currently idle, awaiting a free machine to execute. + ExecuteOperationMetadata_QUEUED ExecuteOperationMetadata_Stage = 2 + // Currently being executed by a worker. + ExecuteOperationMetadata_EXECUTING ExecuteOperationMetadata_Stage = 3 + // Finished execution. + ExecuteOperationMetadata_COMPLETED ExecuteOperationMetadata_Stage = 4 +) + +var ExecuteOperationMetadata_Stage_name = map[int32]string{ + 0: "UNKNOWN", + 1: "CACHE_CHECK", + 2: "QUEUED", + 3: "EXECUTING", + 4: "COMPLETED", +} +var ExecuteOperationMetadata_Stage_value = map[string]int32{ + "UNKNOWN": 0, + "CACHE_CHECK": 1, + "QUEUED": 2, + "EXECUTING": 3, + "COMPLETED": 4, +} + +func (x ExecuteOperationMetadata_Stage) String() string { + return proto.EnumName(ExecuteOperationMetadata_Stage_name, int32(x)) +} +func (ExecuteOperationMetadata_Stage) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_remote_execution_3cbae0ca1c4e63f2, []int{14, 0} +} + +// An `Action` captures all the information about an execution which is required +// to reproduce it. +// +// `Action`s are the core component of the [Execution] service. A single +// `Action` represents a repeatable action that can be performed by the +// execution service. `Action`s can be succinctly identified by the digest of +// their wire format encoding and, once an `Action` has been executed, will be +// cached in the action cache. Future requests can then use the cached result +// rather than needing to run afresh. +// +// When a server completes execution of an +// [Action][google.devtools.remoteexecution.v1test.Action], it MAY choose to +// cache the [result][google.devtools.remoteexecution.v1test.ActionResult] in +// the [ActionCache][google.devtools.remoteexecution.v1test.ActionCache] unless +// `do_not_cache` is `true`. Clients SHOULD expect the server to do so. By +// default, future calls to [Execute][] the same `Action` will also serve their +// results from the cache. Clients must take care to understand the caching +// behaviour. Ideally, all `Action`s will be reproducible so that serving a +// result from cache is always desirable and correct. +type Action struct { + // The digest of the [Command][google.devtools.remoteexecution.v1test.Command] + // to run, which MUST be present in the + // [ContentAddressableStorage][google.devtools.remoteexecution.v1test.ContentAddressableStorage]. + CommandDigest *Digest `protobuf:"bytes,1,opt,name=command_digest,json=commandDigest,proto3" json:"command_digest,omitempty"` + // The digest of the root + // [Directory][google.devtools.remoteexecution.v1test.Directory] for the input + // files. The files in the directory tree are available in the correct + // location on the build machine before the command is executed. The root + // directory, as well as every subdirectory and content blob referred to, MUST + // be in the + // [ContentAddressableStorage][google.devtools.remoteexecution.v1test.ContentAddressableStorage]. + InputRootDigest *Digest `protobuf:"bytes,2,opt,name=input_root_digest,json=inputRootDigest,proto3" json:"input_root_digest,omitempty"` + // A list of the output files that the client expects to retrieve from the + // action. Only the listed files, as well as directories listed in + // `output_directories`, will be returned to the client as output. + // Other files that may be created during command execution are discarded. + // + // The paths are relative to the working directory of the action execution. + // The paths are specified using a single forward slash (`/`) as a path + // separator, even if the execution platform natively uses a different + // separator. The path MUST NOT include a trailing slash, nor a leading slash, + // being a relative path. + // + // In order to ensure consistent hashing of the same Action, the output paths + // MUST be sorted lexicographically by code point (or, equivalently, by UTF-8 + // bytes). + OutputFiles []string `protobuf:"bytes,3,rep,name=output_files,json=outputFiles,proto3" json:"output_files,omitempty"` + // A list of the output directories that the client expects to retrieve from + // the action. Only the contents of the indicated directories (recursively + // including the contents of their subdirectories) will be + // returned, as well as files listed in `output_files`. Other files that may + // be created during command execution are discarded. + // + // The paths are relative to the working directory of the action execution. + // The paths are specified using a single forward slash (`/`) as a path + // separator, even if the execution platform natively uses a different + // separator. The path MUST NOT include a trailing slash, nor a leading slash, + // being a relative path. + // The special value of empty string is allowed, although not recommended, and + // can be used to capture the entire working directory tree, including inputs. + // + // In order to ensure consistent hashing of the same Action, the output paths + // MUST be sorted lexicographically by code point (or, equivalently, by UTF-8 + // bytes). + OutputDirectories []string `protobuf:"bytes,4,rep,name=output_directories,json=outputDirectories,proto3" json:"output_directories,omitempty"` + // The platform requirements for the execution environment. The server MAY + // choose to execute the action on any worker satisfying the requirements, so + // the client SHOULD ensure that running the action on any such worker will + // have the same result. + Platform *Platform `protobuf:"bytes,5,opt,name=platform,proto3" json:"platform,omitempty"` + // A timeout after which the execution should be killed. If the timeout is + // absent, then the client is specifying that the execution should continue + // as long as the server will let it. The server SHOULD impose a timeout if + // the client does not specify one, however, if the client does specify a + // timeout that is longer than the server's maximum timeout, the server MUST + // reject the request. + // + // The timeout is a part of the + // [Action][google.devtools.remoteexecution.v1test.Action] message, and + // therefore two `Actions` with different timeouts are different, even if they + // are otherwise identical. This is because, if they were not, running an + // `Action` with a lower timeout than is required might result in a cache hit + // from an execution run with a longer timeout, hiding the fact that the + // timeout is too short. By encoding it directly in the `Action`, a lower + // timeout will result in a cache miss and the execution timeout will fail + // immediately, rather than whenever the cache entry gets evicted. + Timeout *duration.Duration `protobuf:"bytes,6,opt,name=timeout,proto3" json:"timeout,omitempty"` + // If true, then the `Action`'s result cannot be cached. + DoNotCache bool `protobuf:"varint,7,opt,name=do_not_cache,json=doNotCache,proto3" json:"do_not_cache,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Action) Reset() { *m = Action{} } +func (m *Action) String() string { return proto.CompactTextString(m) } +func (*Action) ProtoMessage() {} +func (*Action) Descriptor() ([]byte, []int) { + return fileDescriptor_remote_execution_3cbae0ca1c4e63f2, []int{0} +} +func (m *Action) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Action.Unmarshal(m, b) +} +func (m *Action) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Action.Marshal(b, m, deterministic) +} +func (dst *Action) XXX_Merge(src proto.Message) { + xxx_messageInfo_Action.Merge(dst, src) +} +func (m *Action) XXX_Size() int { + return xxx_messageInfo_Action.Size(m) +} +func (m *Action) XXX_DiscardUnknown() { + xxx_messageInfo_Action.DiscardUnknown(m) +} + +var xxx_messageInfo_Action proto.InternalMessageInfo + +func (m *Action) GetCommandDigest() *Digest { + if m != nil { + return m.CommandDigest + } + return nil +} + +func (m *Action) GetInputRootDigest() *Digest { + if m != nil { + return m.InputRootDigest + } + return nil +} + +func (m *Action) GetOutputFiles() []string { + if m != nil { + return m.OutputFiles + } + return nil +} + +func (m *Action) GetOutputDirectories() []string { + if m != nil { + return m.OutputDirectories + } + return nil +} + +func (m *Action) GetPlatform() *Platform { + if m != nil { + return m.Platform + } + return nil +} + +func (m *Action) GetTimeout() *duration.Duration { + if m != nil { + return m.Timeout + } + return nil +} + +func (m *Action) GetDoNotCache() bool { + if m != nil { + return m.DoNotCache + } + return false +} + +// A `Command` is the actual command executed by a worker running an +// [Action][google.devtools.remoteexecution.v1test.Action]. +// +// Except as otherwise required, the environment (such as which system +// libraries or binaries are available, and what filesystems are mounted where) +// is defined by and specific to the implementation of the remote execution API. +type Command struct { + // The arguments to the command. The first argument must be the path to the + // executable, which must be either a relative path, in which case it is + // evaluated with respect to the input root, or an absolute path. + // + // The working directory will always be the input root. + Arguments []string `protobuf:"bytes,1,rep,name=arguments,proto3" json:"arguments,omitempty"` + // The environment variables to set when running the program. The worker may + // provide its own default environment variables; these defaults can be + // overridden using this field. Additional variables can also be specified. + // + // In order to ensure that equivalent `Command`s always hash to the same + // value, the environment variables MUST be lexicographically sorted by name. + // Sorting of strings is done by code point, equivalently, by the UTF-8 bytes. + EnvironmentVariables []*Command_EnvironmentVariable `protobuf:"bytes,2,rep,name=environment_variables,json=environmentVariables,proto3" json:"environment_variables,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Command) Reset() { *m = Command{} } +func (m *Command) String() string { return proto.CompactTextString(m) } +func (*Command) ProtoMessage() {} +func (*Command) Descriptor() ([]byte, []int) { + return fileDescriptor_remote_execution_3cbae0ca1c4e63f2, []int{1} +} +func (m *Command) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Command.Unmarshal(m, b) +} +func (m *Command) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Command.Marshal(b, m, deterministic) +} +func (dst *Command) XXX_Merge(src proto.Message) { + xxx_messageInfo_Command.Merge(dst, src) +} +func (m *Command) XXX_Size() int { + return xxx_messageInfo_Command.Size(m) +} +func (m *Command) XXX_DiscardUnknown() { + xxx_messageInfo_Command.DiscardUnknown(m) +} + +var xxx_messageInfo_Command proto.InternalMessageInfo + +func (m *Command) GetArguments() []string { + if m != nil { + return m.Arguments + } + return nil +} + +func (m *Command) GetEnvironmentVariables() []*Command_EnvironmentVariable { + if m != nil { + return m.EnvironmentVariables + } + return nil +} + +// An `EnvironmentVariable` is one variable to set in the running program's +// environment. +type Command_EnvironmentVariable struct { + // The variable name. + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // The variable value. + Value string `protobuf:"bytes,2,opt,name=value,proto3" json:"value,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Command_EnvironmentVariable) Reset() { *m = Command_EnvironmentVariable{} } +func (m *Command_EnvironmentVariable) String() string { return proto.CompactTextString(m) } +func (*Command_EnvironmentVariable) ProtoMessage() {} +func (*Command_EnvironmentVariable) Descriptor() ([]byte, []int) { + return fileDescriptor_remote_execution_3cbae0ca1c4e63f2, []int{1, 0} +} +func (m *Command_EnvironmentVariable) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Command_EnvironmentVariable.Unmarshal(m, b) +} +func (m *Command_EnvironmentVariable) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Command_EnvironmentVariable.Marshal(b, m, deterministic) +} +func (dst *Command_EnvironmentVariable) XXX_Merge(src proto.Message) { + xxx_messageInfo_Command_EnvironmentVariable.Merge(dst, src) +} +func (m *Command_EnvironmentVariable) XXX_Size() int { + return xxx_messageInfo_Command_EnvironmentVariable.Size(m) +} +func (m *Command_EnvironmentVariable) XXX_DiscardUnknown() { + xxx_messageInfo_Command_EnvironmentVariable.DiscardUnknown(m) +} + +var xxx_messageInfo_Command_EnvironmentVariable proto.InternalMessageInfo + +func (m *Command_EnvironmentVariable) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *Command_EnvironmentVariable) GetValue() string { + if m != nil { + return m.Value + } + return "" +} + +// A `Platform` is a set of requirements, such as hardware, operating system, or +// compiler toolchain, for an +// [Action][google.devtools.remoteexecution.v1test.Action]'s execution +// environment. A `Platform` is represented as a series of key-value pairs +// representing the properties that are required of the platform. +// +// This message is currently being redeveloped since it is an overly simplistic +// model of platforms. +type Platform struct { + // The properties that make up this platform. In order to ensure that + // equivalent `Platform`s always hash to the same value, the properties MUST + // be lexicographically sorted by name, and then by value. Sorting of strings + // is done by code point, equivalently, by the UTF-8 bytes. + Properties []*Platform_Property `protobuf:"bytes,1,rep,name=properties,proto3" json:"properties,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Platform) Reset() { *m = Platform{} } +func (m *Platform) String() string { return proto.CompactTextString(m) } +func (*Platform) ProtoMessage() {} +func (*Platform) Descriptor() ([]byte, []int) { + return fileDescriptor_remote_execution_3cbae0ca1c4e63f2, []int{2} +} +func (m *Platform) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Platform.Unmarshal(m, b) +} +func (m *Platform) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Platform.Marshal(b, m, deterministic) +} +func (dst *Platform) XXX_Merge(src proto.Message) { + xxx_messageInfo_Platform.Merge(dst, src) +} +func (m *Platform) XXX_Size() int { + return xxx_messageInfo_Platform.Size(m) +} +func (m *Platform) XXX_DiscardUnknown() { + xxx_messageInfo_Platform.DiscardUnknown(m) +} + +var xxx_messageInfo_Platform proto.InternalMessageInfo + +func (m *Platform) GetProperties() []*Platform_Property { + if m != nil { + return m.Properties + } + return nil +} + +// A single property for the environment. The server is responsible for +// specifying the property `name`s that it accepts. If an unknown `name` is +// provided in the requirements for an +// [Action][google.devtools.remoteexecution.v1test.Action], the server SHOULD +// reject the execution request. If permitted by the server, the same `name` +// may occur multiple times. +// +// The server is also responsible for specifying the interpretation of +// property `value`s. For instance, a property describing how much RAM must be +// available may be interpreted as allowing a worker with 16GB to fulfill a +// request for 8GB, while a property describing the OS environment on which +// the action must be performed may require an exact match with the worker's +// OS. +// +// The server MAY use the `value` of one or more properties to determine how +// it sets up the execution environment, such as by making specific system +// files available to the worker. +type Platform_Property struct { + // The property name. + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // The property value. + Value string `protobuf:"bytes,2,opt,name=value,proto3" json:"value,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Platform_Property) Reset() { *m = Platform_Property{} } +func (m *Platform_Property) String() string { return proto.CompactTextString(m) } +func (*Platform_Property) ProtoMessage() {} +func (*Platform_Property) Descriptor() ([]byte, []int) { + return fileDescriptor_remote_execution_3cbae0ca1c4e63f2, []int{2, 0} +} +func (m *Platform_Property) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Platform_Property.Unmarshal(m, b) +} +func (m *Platform_Property) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Platform_Property.Marshal(b, m, deterministic) +} +func (dst *Platform_Property) XXX_Merge(src proto.Message) { + xxx_messageInfo_Platform_Property.Merge(dst, src) +} +func (m *Platform_Property) XXX_Size() int { + return xxx_messageInfo_Platform_Property.Size(m) +} +func (m *Platform_Property) XXX_DiscardUnknown() { + xxx_messageInfo_Platform_Property.DiscardUnknown(m) +} + +var xxx_messageInfo_Platform_Property proto.InternalMessageInfo + +func (m *Platform_Property) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *Platform_Property) GetValue() string { + if m != nil { + return m.Value + } + return "" +} + +// A `Directory` represents a directory node in a file tree, containing zero or +// more children [FileNodes][google.devtools.remoteexecution.v1test.FileNode] +// and [DirectoryNodes][google.devtools.remoteexecution.v1test.DirectoryNode]. +// Each `Node` contains its name in the directory, the digest of its content +// (either a file blob or a `Directory` proto), as well as possibly some +// metadata about the file or directory. +// +// In order to ensure that two equivalent directory trees hash to the same +// value, the following restrictions MUST be obeyed when constructing a +// a `Directory`: +// - Every child in the directory must have a path of exactly one segment. +// Multiple levels of directory hierarchy may not be collapsed. +// - Each child in the directory must have a unique path segment (file name). +// - The files and directories in the directory must each be sorted in +// lexicographical order by path. The path strings must be sorted by code +// point, equivalently, by UTF-8 bytes. +// +// A `Directory` that obeys the restrictions is said to be in canonical form. +// +// As an example, the following could be used for a file named `bar` and a +// directory named `foo` with an executable file named `baz` (hashes shortened +// for readability): +// +// ```json +// // (Directory proto) +// { +// files: [ +// { +// name: "bar", +// digest: { +// hash: "4a73bc9d03...", +// size: 65534 +// } +// } +// ], +// directories: [ +// { +// name: "foo", +// digest: { +// hash: "4cf2eda940...", +// size: 43 +// } +// } +// ] +// } +// +// // (Directory proto with hash "4cf2eda940..." and size 43) +// { +// files: [ +// { +// name: "baz", +// digest: { +// hash: "b2c941073e...", +// size: 1294, +// }, +// is_executable: true +// } +// ] +// } +// ``` +type Directory struct { + // The files in the directory. + Files []*FileNode `protobuf:"bytes,1,rep,name=files,proto3" json:"files,omitempty"` + // The subdirectories in the directory. + Directories []*DirectoryNode `protobuf:"bytes,2,rep,name=directories,proto3" json:"directories,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Directory) Reset() { *m = Directory{} } +func (m *Directory) String() string { return proto.CompactTextString(m) } +func (*Directory) ProtoMessage() {} +func (*Directory) Descriptor() ([]byte, []int) { + return fileDescriptor_remote_execution_3cbae0ca1c4e63f2, []int{3} +} +func (m *Directory) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Directory.Unmarshal(m, b) +} +func (m *Directory) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Directory.Marshal(b, m, deterministic) +} +func (dst *Directory) XXX_Merge(src proto.Message) { + xxx_messageInfo_Directory.Merge(dst, src) +} +func (m *Directory) XXX_Size() int { + return xxx_messageInfo_Directory.Size(m) +} +func (m *Directory) XXX_DiscardUnknown() { + xxx_messageInfo_Directory.DiscardUnknown(m) +} + +var xxx_messageInfo_Directory proto.InternalMessageInfo + +func (m *Directory) GetFiles() []*FileNode { + if m != nil { + return m.Files + } + return nil +} + +func (m *Directory) GetDirectories() []*DirectoryNode { + if m != nil { + return m.Directories + } + return nil +} + +// A `FileNode` represents a single file and associated metadata. +type FileNode struct { + // The name of the file. + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // The digest of the file's content. + Digest *Digest `protobuf:"bytes,2,opt,name=digest,proto3" json:"digest,omitempty"` + // True if file is executable, false otherwise. + IsExecutable bool `protobuf:"varint,4,opt,name=is_executable,json=isExecutable,proto3" json:"is_executable,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *FileNode) Reset() { *m = FileNode{} } +func (m *FileNode) String() string { return proto.CompactTextString(m) } +func (*FileNode) ProtoMessage() {} +func (*FileNode) Descriptor() ([]byte, []int) { + return fileDescriptor_remote_execution_3cbae0ca1c4e63f2, []int{4} +} +func (m *FileNode) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_FileNode.Unmarshal(m, b) +} +func (m *FileNode) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_FileNode.Marshal(b, m, deterministic) +} +func (dst *FileNode) XXX_Merge(src proto.Message) { + xxx_messageInfo_FileNode.Merge(dst, src) +} +func (m *FileNode) XXX_Size() int { + return xxx_messageInfo_FileNode.Size(m) +} +func (m *FileNode) XXX_DiscardUnknown() { + xxx_messageInfo_FileNode.DiscardUnknown(m) +} + +var xxx_messageInfo_FileNode proto.InternalMessageInfo + +func (m *FileNode) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *FileNode) GetDigest() *Digest { + if m != nil { + return m.Digest + } + return nil +} + +func (m *FileNode) GetIsExecutable() bool { + if m != nil { + return m.IsExecutable + } + return false +} + +// A `DirectoryNode` represents a child of a +// [Directory][google.devtools.remoteexecution.v1test.Directory] which is itself +// a `Directory` and its associated metadata. +type DirectoryNode struct { + // The name of the directory. + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // The digest of the + // [Directory][google.devtools.remoteexecution.v1test.Directory] object + // represented. See [Digest][google.devtools.remoteexecution.v1test.Digest] + // for information about how to take the digest of a proto message. + Digest *Digest `protobuf:"bytes,2,opt,name=digest,proto3" json:"digest,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *DirectoryNode) Reset() { *m = DirectoryNode{} } +func (m *DirectoryNode) String() string { return proto.CompactTextString(m) } +func (*DirectoryNode) ProtoMessage() {} +func (*DirectoryNode) Descriptor() ([]byte, []int) { + return fileDescriptor_remote_execution_3cbae0ca1c4e63f2, []int{5} +} +func (m *DirectoryNode) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_DirectoryNode.Unmarshal(m, b) +} +func (m *DirectoryNode) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_DirectoryNode.Marshal(b, m, deterministic) +} +func (dst *DirectoryNode) XXX_Merge(src proto.Message) { + xxx_messageInfo_DirectoryNode.Merge(dst, src) +} +func (m *DirectoryNode) XXX_Size() int { + return xxx_messageInfo_DirectoryNode.Size(m) +} +func (m *DirectoryNode) XXX_DiscardUnknown() { + xxx_messageInfo_DirectoryNode.DiscardUnknown(m) +} + +var xxx_messageInfo_DirectoryNode proto.InternalMessageInfo + +func (m *DirectoryNode) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *DirectoryNode) GetDigest() *Digest { + if m != nil { + return m.Digest + } + return nil +} + +// A content digest. A digest for a given blob consists of the size of the blob +// and its hash. The hash algorithm to use is defined by the server, but servers +// SHOULD use SHA-256. +// +// The size is considered to be an integral part of the digest and cannot be +// separated. That is, even if the `hash` field is correctly specified but +// `size_bytes` is not, the server MUST reject the request. +// +// The reason for including the size in the digest is as follows: in a great +// many cases, the server needs to know the size of the blob it is about to work +// with prior to starting an operation with it, such as flattening Merkle tree +// structures or streaming it to a worker. Technically, the server could +// implement a separate metadata store, but this results in a significantly more +// complicated implementation as opposed to having the client specify the size +// up-front (or storing the size along with the digest in every message where +// digests are embedded). This does mean that the API leaks some implementation +// details of (what we consider to be) a reasonable server implementation, but +// we consider this to be a worthwhile tradeoff. +// +// When a `Digest` is used to refer to a proto message, it always refers to the +// message in binary encoded form. To ensure consistent hashing, clients and +// servers MUST ensure that they serialize messages according to the following +// rules, even if there are alternate valid encodings for the same message. +// - Fields are serialized in tag order. +// - There are no unknown fields. +// - There are no duplicate fields. +// - Fields are serialized according to the default semantics for their type. +// +// Most protocol buffer implementations will always follow these rules when +// serializing, but care should be taken to avoid shortcuts. For instance, +// concatenating two messages to merge them may produce duplicate fields. +type Digest struct { + // The hash. In the case of SHA-256, it will always be a lowercase hex string + // exactly 64 characters long. + Hash string `protobuf:"bytes,1,opt,name=hash,proto3" json:"hash,omitempty"` + // The size of the blob, in bytes. + SizeBytes int64 `protobuf:"varint,2,opt,name=size_bytes,json=sizeBytes,proto3" json:"size_bytes,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Digest) Reset() { *m = Digest{} } +func (m *Digest) String() string { return proto.CompactTextString(m) } +func (*Digest) ProtoMessage() {} +func (*Digest) Descriptor() ([]byte, []int) { + return fileDescriptor_remote_execution_3cbae0ca1c4e63f2, []int{6} +} +func (m *Digest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Digest.Unmarshal(m, b) +} +func (m *Digest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Digest.Marshal(b, m, deterministic) +} +func (dst *Digest) XXX_Merge(src proto.Message) { + xxx_messageInfo_Digest.Merge(dst, src) +} +func (m *Digest) XXX_Size() int { + return xxx_messageInfo_Digest.Size(m) +} +func (m *Digest) XXX_DiscardUnknown() { + xxx_messageInfo_Digest.DiscardUnknown(m) +} + +var xxx_messageInfo_Digest proto.InternalMessageInfo + +func (m *Digest) GetHash() string { + if m != nil { + return m.Hash + } + return "" +} + +func (m *Digest) GetSizeBytes() int64 { + if m != nil { + return m.SizeBytes + } + return 0 +} + +// An ActionResult represents the result of an +// [Action][google.devtools.remoteexecution.v1test.Action] being run. +type ActionResult struct { + // The output files of the action. For each output file requested in the + // `output_files` field of the Action, if the corresponding file existed after + // the action completed, a single entry will be present in the output list. + // + // If the action does not produce the requested output, or produces a + // directory where a regular file is expected or vice versa, then that output + // will be omitted from the list. The server is free to arrange the output + // list as desired; clients MUST NOT assume that the output list is sorted. + OutputFiles []*OutputFile `protobuf:"bytes,2,rep,name=output_files,json=outputFiles,proto3" json:"output_files,omitempty"` + // The output directories of the action. For each output directory requested + // in the `output_directories` field of the Action, if the corresponding + // directory existed after the action completed, a single entry will be + // present in the output list, which will contain the digest of + // a [Tree][google.devtools.remoteexecution.v1test.Tree] message containing + // the directory tree, and the path equal exactly to the corresponding Action + // output_directories member. + // As an example, suppose the Action had an output directory `a/b/dir` and the + // execution produced the following contents in `a/b/dir`: a file named `bar` + // and a directory named `foo` with an executable file named `baz`. Then, + // output_directory will contain (hashes shortened for readability): + // + // ```json + // // OutputDirectory proto: + // { + // path: "a/b/dir" + // tree_digest: { + // hash: "4a73bc9d03...", + // size: 55 + // } + // } + // // Tree proto with hash "4a73bc9d03..." and size 55: + // { + // root: { + // files: [ + // { + // name: "bar", + // digest: { + // hash: "4a73bc9d03...", + // size: 65534 + // } + // } + // ], + // directories: [ + // { + // name: "foo", + // digest: { + // hash: "4cf2eda940...", + // size: 43 + // } + // } + // ] + // } + // children : { + // // (Directory proto with hash "4cf2eda940..." and size 43) + // files: [ + // { + // name: "baz", + // digest: { + // hash: "b2c941073e...", + // size: 1294, + // }, + // is_executable: true + // } + // ] + // } + // } + // ``` + OutputDirectories []*OutputDirectory `protobuf:"bytes,3,rep,name=output_directories,json=outputDirectories,proto3" json:"output_directories,omitempty"` + // The exit code of the command. + ExitCode int32 `protobuf:"varint,4,opt,name=exit_code,json=exitCode,proto3" json:"exit_code,omitempty"` + // The standard output buffer of the action. The server will determine, based + // on the size of the buffer, whether to return it in raw form or to return + // a digest in `stdout_digest` that points to the buffer. If neither is set, + // then the buffer is empty. The client SHOULD NOT assume it will get one of + // the raw buffer or a digest on any given request and should be prepared to + // handle either. + StdoutRaw []byte `protobuf:"bytes,5,opt,name=stdout_raw,json=stdoutRaw,proto3" json:"stdout_raw,omitempty"` + // The digest for a blob containing the standard output of the action, which + // can be retrieved from the + // [ContentAddressableStorage][google.devtools.remoteexecution.v1test.ContentAddressableStorage]. + // See `stdout_raw` for when this will be set. + StdoutDigest *Digest `protobuf:"bytes,6,opt,name=stdout_digest,json=stdoutDigest,proto3" json:"stdout_digest,omitempty"` + // The standard error buffer of the action. The server will determine, based + // on the size of the buffer, whether to return it in raw form or to return + // a digest in `stderr_digest` that points to the buffer. If neither is set, + // then the buffer is empty. The client SHOULD NOT assume it will get one of + // the raw buffer or a digest on any given request and should be prepared to + // handle either. + StderrRaw []byte `protobuf:"bytes,7,opt,name=stderr_raw,json=stderrRaw,proto3" json:"stderr_raw,omitempty"` + // The digest for a blob containing the standard error of the action, which + // can be retrieved from the + // [ContentAddressableStorage][google.devtools.remoteexecution.v1test.ContentAddressableStorage]. + // See `stderr_raw` for when this will be set. + StderrDigest *Digest `protobuf:"bytes,8,opt,name=stderr_digest,json=stderrDigest,proto3" json:"stderr_digest,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ActionResult) Reset() { *m = ActionResult{} } +func (m *ActionResult) String() string { return proto.CompactTextString(m) } +func (*ActionResult) ProtoMessage() {} +func (*ActionResult) Descriptor() ([]byte, []int) { + return fileDescriptor_remote_execution_3cbae0ca1c4e63f2, []int{7} +} +func (m *ActionResult) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ActionResult.Unmarshal(m, b) +} +func (m *ActionResult) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ActionResult.Marshal(b, m, deterministic) +} +func (dst *ActionResult) XXX_Merge(src proto.Message) { + xxx_messageInfo_ActionResult.Merge(dst, src) +} +func (m *ActionResult) XXX_Size() int { + return xxx_messageInfo_ActionResult.Size(m) +} +func (m *ActionResult) XXX_DiscardUnknown() { + xxx_messageInfo_ActionResult.DiscardUnknown(m) +} + +var xxx_messageInfo_ActionResult proto.InternalMessageInfo + +func (m *ActionResult) GetOutputFiles() []*OutputFile { + if m != nil { + return m.OutputFiles + } + return nil +} + +func (m *ActionResult) GetOutputDirectories() []*OutputDirectory { + if m != nil { + return m.OutputDirectories + } + return nil +} + +func (m *ActionResult) GetExitCode() int32 { + if m != nil { + return m.ExitCode + } + return 0 +} + +func (m *ActionResult) GetStdoutRaw() []byte { + if m != nil { + return m.StdoutRaw + } + return nil +} + +func (m *ActionResult) GetStdoutDigest() *Digest { + if m != nil { + return m.StdoutDigest + } + return nil +} + +func (m *ActionResult) GetStderrRaw() []byte { + if m != nil { + return m.StderrRaw + } + return nil +} + +func (m *ActionResult) GetStderrDigest() *Digest { + if m != nil { + return m.StderrDigest + } + return nil +} + +// An `OutputFile` is similar to a +// [FileNode][google.devtools.remoteexecution.v1test.FileNode], but it is +// tailored for output as part of an `ActionResult`. It allows a full file path +// rather than only a name, and allows the server to include content inline. +// +// `OutputFile` is binary-compatible with `FileNode`. +type OutputFile struct { + // The full path of the file relative to the input root, including the + // filename. The path separator is a forward slash `/`. Since this is a + // relative path, it MUST NOT begin with a leading forward slash. + Path string `protobuf:"bytes,1,opt,name=path,proto3" json:"path,omitempty"` + // The digest of the file's content. + Digest *Digest `protobuf:"bytes,2,opt,name=digest,proto3" json:"digest,omitempty"` + // The raw content of the file. + // + // This field may be used by the server to provide the content of a file + // inline in an + // [ActionResult][google.devtools.remoteexecution.v1test.ActionResult] and + // avoid requiring that the client make a separate call to + // [ContentAddressableStorage.GetBlob] to retrieve it. + // + // The client SHOULD NOT assume that it will get raw content with any request, + // and always be prepared to retrieve it via `digest`. + Content []byte `protobuf:"bytes,3,opt,name=content,proto3" json:"content,omitempty"` + // True if file is executable, false otherwise. + IsExecutable bool `protobuf:"varint,4,opt,name=is_executable,json=isExecutable,proto3" json:"is_executable,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *OutputFile) Reset() { *m = OutputFile{} } +func (m *OutputFile) String() string { return proto.CompactTextString(m) } +func (*OutputFile) ProtoMessage() {} +func (*OutputFile) Descriptor() ([]byte, []int) { + return fileDescriptor_remote_execution_3cbae0ca1c4e63f2, []int{8} +} +func (m *OutputFile) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_OutputFile.Unmarshal(m, b) +} +func (m *OutputFile) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_OutputFile.Marshal(b, m, deterministic) +} +func (dst *OutputFile) XXX_Merge(src proto.Message) { + xxx_messageInfo_OutputFile.Merge(dst, src) +} +func (m *OutputFile) XXX_Size() int { + return xxx_messageInfo_OutputFile.Size(m) +} +func (m *OutputFile) XXX_DiscardUnknown() { + xxx_messageInfo_OutputFile.DiscardUnknown(m) +} + +var xxx_messageInfo_OutputFile proto.InternalMessageInfo + +func (m *OutputFile) GetPath() string { + if m != nil { + return m.Path + } + return "" +} + +func (m *OutputFile) GetDigest() *Digest { + if m != nil { + return m.Digest + } + return nil +} + +func (m *OutputFile) GetContent() []byte { + if m != nil { + return m.Content + } + return nil +} + +func (m *OutputFile) GetIsExecutable() bool { + if m != nil { + return m.IsExecutable + } + return false +} + +// A `Tree` contains all the +// [Directory][google.devtools.remoteexecution.v1test.Directory] protos in a +// single directory Merkle tree, compressed into one message. +type Tree struct { + // The root directory in the tree. + Root *Directory `protobuf:"bytes,1,opt,name=root,proto3" json:"root,omitempty"` + // All the child directories: the directories referred to by the root and, + // recursively, all its children. In order to reconstruct the directory tree, + // the client must take the digests of each of the child directories and then + // build up a tree starting from the `root`. + Children []*Directory `protobuf:"bytes,2,rep,name=children,proto3" json:"children,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Tree) Reset() { *m = Tree{} } +func (m *Tree) String() string { return proto.CompactTextString(m) } +func (*Tree) ProtoMessage() {} +func (*Tree) Descriptor() ([]byte, []int) { + return fileDescriptor_remote_execution_3cbae0ca1c4e63f2, []int{9} +} +func (m *Tree) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Tree.Unmarshal(m, b) +} +func (m *Tree) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Tree.Marshal(b, m, deterministic) +} +func (dst *Tree) XXX_Merge(src proto.Message) { + xxx_messageInfo_Tree.Merge(dst, src) +} +func (m *Tree) XXX_Size() int { + return xxx_messageInfo_Tree.Size(m) +} +func (m *Tree) XXX_DiscardUnknown() { + xxx_messageInfo_Tree.DiscardUnknown(m) +} + +var xxx_messageInfo_Tree proto.InternalMessageInfo + +func (m *Tree) GetRoot() *Directory { + if m != nil { + return m.Root + } + return nil +} + +func (m *Tree) GetChildren() []*Directory { + if m != nil { + return m.Children + } + return nil +} + +// An `OutputDirectory` is the output in an `ActionResult` corresponding to a +// directory's full contents rather than a single file. +type OutputDirectory struct { + // The full path of the directory relative to the working directory. The path + // separator is a forward slash `/`. Since this is a relative path, it MUST + // NOT begin with a leading forward slash. The empty string value is allowed, + // and it denotes the entire working directory. + Path string `protobuf:"bytes,1,opt,name=path,proto3" json:"path,omitempty"` + // DEPRECATED: This field is deprecated and should no longer be used. + Digest *Digest `protobuf:"bytes,2,opt,name=digest,proto3" json:"digest,omitempty"` + // The digest of the encoded + // [Tree][google.devtools.remoteexecution.v1test.Tree] proto containing the + // directory's contents. + TreeDigest *Digest `protobuf:"bytes,3,opt,name=tree_digest,json=treeDigest,proto3" json:"tree_digest,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *OutputDirectory) Reset() { *m = OutputDirectory{} } +func (m *OutputDirectory) String() string { return proto.CompactTextString(m) } +func (*OutputDirectory) ProtoMessage() {} +func (*OutputDirectory) Descriptor() ([]byte, []int) { + return fileDescriptor_remote_execution_3cbae0ca1c4e63f2, []int{10} +} +func (m *OutputDirectory) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_OutputDirectory.Unmarshal(m, b) +} +func (m *OutputDirectory) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_OutputDirectory.Marshal(b, m, deterministic) +} +func (dst *OutputDirectory) XXX_Merge(src proto.Message) { + xxx_messageInfo_OutputDirectory.Merge(dst, src) +} +func (m *OutputDirectory) XXX_Size() int { + return xxx_messageInfo_OutputDirectory.Size(m) +} +func (m *OutputDirectory) XXX_DiscardUnknown() { + xxx_messageInfo_OutputDirectory.DiscardUnknown(m) +} + +var xxx_messageInfo_OutputDirectory proto.InternalMessageInfo + +func (m *OutputDirectory) GetPath() string { + if m != nil { + return m.Path + } + return "" +} + +func (m *OutputDirectory) GetDigest() *Digest { + if m != nil { + return m.Digest + } + return nil +} + +func (m *OutputDirectory) GetTreeDigest() *Digest { + if m != nil { + return m.TreeDigest + } + return nil +} + +// A request message for +// [Execution.Execute][google.devtools.remoteexecution.v1test.Execution.Execute]. +type ExecuteRequest struct { + // The instance of the execution system to operate against. A server may + // support multiple instances of the execution system (with their own workers, + // storage, caches, etc.). The server MAY require use of this field to select + // between them in an implementation-defined fashion, otherwise it can be + // omitted. + InstanceName string `protobuf:"bytes,1,opt,name=instance_name,json=instanceName,proto3" json:"instance_name,omitempty"` + // The action to be performed. + Action *Action `protobuf:"bytes,2,opt,name=action,proto3" json:"action,omitempty"` + // If true, the action will be executed anew even if its result was already + // present in the cache. If false, the result may be served from the + // [ActionCache][google.devtools.remoteexecution.v1test.ActionCache]. + SkipCacheLookup bool `protobuf:"varint,3,opt,name=skip_cache_lookup,json=skipCacheLookup,proto3" json:"skip_cache_lookup,omitempty"` + // DEPRECATED: This field should be ignored by clients and servers and will be + // removed. + TotalInputFileCount int32 `protobuf:"varint,4,opt,name=total_input_file_count,json=totalInputFileCount,proto3" json:"total_input_file_count,omitempty"` + // DEPRECATED: This field should be ignored by clients and servers and will be + // removed. + TotalInputFileBytes int64 `protobuf:"varint,5,opt,name=total_input_file_bytes,json=totalInputFileBytes,proto3" json:"total_input_file_bytes,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ExecuteRequest) Reset() { *m = ExecuteRequest{} } +func (m *ExecuteRequest) String() string { return proto.CompactTextString(m) } +func (*ExecuteRequest) ProtoMessage() {} +func (*ExecuteRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_remote_execution_3cbae0ca1c4e63f2, []int{11} +} +func (m *ExecuteRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ExecuteRequest.Unmarshal(m, b) +} +func (m *ExecuteRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ExecuteRequest.Marshal(b, m, deterministic) +} +func (dst *ExecuteRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_ExecuteRequest.Merge(dst, src) +} +func (m *ExecuteRequest) XXX_Size() int { + return xxx_messageInfo_ExecuteRequest.Size(m) +} +func (m *ExecuteRequest) XXX_DiscardUnknown() { + xxx_messageInfo_ExecuteRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_ExecuteRequest proto.InternalMessageInfo + +func (m *ExecuteRequest) GetInstanceName() string { + if m != nil { + return m.InstanceName + } + return "" +} + +func (m *ExecuteRequest) GetAction() *Action { + if m != nil { + return m.Action + } + return nil +} + +func (m *ExecuteRequest) GetSkipCacheLookup() bool { + if m != nil { + return m.SkipCacheLookup + } + return false +} + +func (m *ExecuteRequest) GetTotalInputFileCount() int32 { + if m != nil { + return m.TotalInputFileCount + } + return 0 +} + +func (m *ExecuteRequest) GetTotalInputFileBytes() int64 { + if m != nil { + return m.TotalInputFileBytes + } + return 0 +} + +// A `LogFile` is a log stored in the CAS. +type LogFile struct { + // The digest of the log contents. + Digest *Digest `protobuf:"bytes,1,opt,name=digest,proto3" json:"digest,omitempty"` + // This is a hint as to the purpose of the log, and is set to true if the log + // is human-readable text that can be usefully displayed to a user, and false + // otherwise. For instance, if a command-line client wishes to print the + // server logs to the terminal for a failed action, this allows it to avoid + // displaying a binary file. + HumanReadable bool `protobuf:"varint,2,opt,name=human_readable,json=humanReadable,proto3" json:"human_readable,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *LogFile) Reset() { *m = LogFile{} } +func (m *LogFile) String() string { return proto.CompactTextString(m) } +func (*LogFile) ProtoMessage() {} +func (*LogFile) Descriptor() ([]byte, []int) { + return fileDescriptor_remote_execution_3cbae0ca1c4e63f2, []int{12} +} +func (m *LogFile) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_LogFile.Unmarshal(m, b) +} +func (m *LogFile) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_LogFile.Marshal(b, m, deterministic) +} +func (dst *LogFile) XXX_Merge(src proto.Message) { + xxx_messageInfo_LogFile.Merge(dst, src) +} +func (m *LogFile) XXX_Size() int { + return xxx_messageInfo_LogFile.Size(m) +} +func (m *LogFile) XXX_DiscardUnknown() { + xxx_messageInfo_LogFile.DiscardUnknown(m) +} + +var xxx_messageInfo_LogFile proto.InternalMessageInfo + +func (m *LogFile) GetDigest() *Digest { + if m != nil { + return m.Digest + } + return nil +} + +func (m *LogFile) GetHumanReadable() bool { + if m != nil { + return m.HumanReadable + } + return false +} + +// The response message for +// [Execution.Execute][google.devtools.remoteexecution.v1test.Execution.Execute], +// which will be contained in the [response +// field][google.longrunning.Operation.response] of the +// [Operation][google.longrunning.Operation]. +type ExecuteResponse struct { + // The result of the action. + Result *ActionResult `protobuf:"bytes,1,opt,name=result,proto3" json:"result,omitempty"` + // True if the result was served from cache, false if it was executed. + CachedResult bool `protobuf:"varint,2,opt,name=cached_result,json=cachedResult,proto3" json:"cached_result,omitempty"` + // If the status has a code other than `OK`, it indicates that the action did + // not finish execution. For example, if the operation times out during + // execution, the status will have a `DEADLINE_EXCEEDED` code. Servers MUST + // use this field for errors in execution, rather than the error field on the + // `Operation` object. + // + // If the status code is other than `OK`, then the result MUST NOT be cached. + // For an error status, the `result` field is optional; the server may + // populate the output-, stdout-, and stderr-related fields if it has any + // information available, such as the stdout and stderr of a timed-out action. + Status *status.Status `protobuf:"bytes,3,opt,name=status,proto3" json:"status,omitempty"` + // An optional list of additional log outputs the server wishes to provide. A + // server can use this to return execution-specific logs however it wishes. + // This is intended primarily to make it easier for users to debug issues that + // may be outside of the actual job execution, such as by identifying the + // worker executing the action or by providing logs from the worker's setup + // phase. The keys SHOULD be human readable so that a client can display them + // to a user. + ServerLogs map[string]*LogFile `protobuf:"bytes,4,rep,name=server_logs,json=serverLogs,proto3" json:"server_logs,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ExecuteResponse) Reset() { *m = ExecuteResponse{} } +func (m *ExecuteResponse) String() string { return proto.CompactTextString(m) } +func (*ExecuteResponse) ProtoMessage() {} +func (*ExecuteResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_remote_execution_3cbae0ca1c4e63f2, []int{13} +} +func (m *ExecuteResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ExecuteResponse.Unmarshal(m, b) +} +func (m *ExecuteResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ExecuteResponse.Marshal(b, m, deterministic) +} +func (dst *ExecuteResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_ExecuteResponse.Merge(dst, src) +} +func (m *ExecuteResponse) XXX_Size() int { + return xxx_messageInfo_ExecuteResponse.Size(m) +} +func (m *ExecuteResponse) XXX_DiscardUnknown() { + xxx_messageInfo_ExecuteResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_ExecuteResponse proto.InternalMessageInfo + +func (m *ExecuteResponse) GetResult() *ActionResult { + if m != nil { + return m.Result + } + return nil +} + +func (m *ExecuteResponse) GetCachedResult() bool { + if m != nil { + return m.CachedResult + } + return false +} + +func (m *ExecuteResponse) GetStatus() *status.Status { + if m != nil { + return m.Status + } + return nil +} + +func (m *ExecuteResponse) GetServerLogs() map[string]*LogFile { + if m != nil { + return m.ServerLogs + } + return nil +} + +// Metadata about an ongoing +// [execution][google.devtools.remoteexecution.v1test.Execution.Execute], which +// will be contained in the [metadata +// field][google.longrunning.Operation.response] of the +// [Operation][google.longrunning.Operation]. +type ExecuteOperationMetadata struct { + Stage ExecuteOperationMetadata_Stage `protobuf:"varint,1,opt,name=stage,proto3,enum=google.devtools.remoteexecution.v1test.ExecuteOperationMetadata_Stage" json:"stage,omitempty"` + // The digest of the [Action][google.devtools.remoteexecution.v1test.Action] + // being executed. + ActionDigest *Digest `protobuf:"bytes,2,opt,name=action_digest,json=actionDigest,proto3" json:"action_digest,omitempty"` + // If set, the client can use this name with + // [ByteStream.Read][google.bytestream.ByteStream.Read] to stream the + // standard output. + StdoutStreamName string `protobuf:"bytes,3,opt,name=stdout_stream_name,json=stdoutStreamName,proto3" json:"stdout_stream_name,omitempty"` + // If set, the client can use this name with + // [ByteStream.Read][google.bytestream.ByteStream.Read] to stream the + // standard error. + StderrStreamName string `protobuf:"bytes,4,opt,name=stderr_stream_name,json=stderrStreamName,proto3" json:"stderr_stream_name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ExecuteOperationMetadata) Reset() { *m = ExecuteOperationMetadata{} } +func (m *ExecuteOperationMetadata) String() string { return proto.CompactTextString(m) } +func (*ExecuteOperationMetadata) ProtoMessage() {} +func (*ExecuteOperationMetadata) Descriptor() ([]byte, []int) { + return fileDescriptor_remote_execution_3cbae0ca1c4e63f2, []int{14} +} +func (m *ExecuteOperationMetadata) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ExecuteOperationMetadata.Unmarshal(m, b) +} +func (m *ExecuteOperationMetadata) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ExecuteOperationMetadata.Marshal(b, m, deterministic) +} +func (dst *ExecuteOperationMetadata) XXX_Merge(src proto.Message) { + xxx_messageInfo_ExecuteOperationMetadata.Merge(dst, src) +} +func (m *ExecuteOperationMetadata) XXX_Size() int { + return xxx_messageInfo_ExecuteOperationMetadata.Size(m) +} +func (m *ExecuteOperationMetadata) XXX_DiscardUnknown() { + xxx_messageInfo_ExecuteOperationMetadata.DiscardUnknown(m) +} + +var xxx_messageInfo_ExecuteOperationMetadata proto.InternalMessageInfo + +func (m *ExecuteOperationMetadata) GetStage() ExecuteOperationMetadata_Stage { + if m != nil { + return m.Stage + } + return ExecuteOperationMetadata_UNKNOWN +} + +func (m *ExecuteOperationMetadata) GetActionDigest() *Digest { + if m != nil { + return m.ActionDigest + } + return nil +} + +func (m *ExecuteOperationMetadata) GetStdoutStreamName() string { + if m != nil { + return m.StdoutStreamName + } + return "" +} + +func (m *ExecuteOperationMetadata) GetStderrStreamName() string { + if m != nil { + return m.StderrStreamName + } + return "" +} + +// A request message for +// [ActionCache.GetActionResult][google.devtools.remoteexecution.v1test.ActionCache.GetActionResult]. +type GetActionResultRequest struct { + // The instance of the execution system to operate against. A server may + // support multiple instances of the execution system (with their own workers, + // storage, caches, etc.). The server MAY require use of this field to select + // between them in an implementation-defined fashion, otherwise it can be + // omitted. + InstanceName string `protobuf:"bytes,1,opt,name=instance_name,json=instanceName,proto3" json:"instance_name,omitempty"` + // The digest of the [Action][google.devtools.remoteexecution.v1test.Action] + // whose result is requested. + ActionDigest *Digest `protobuf:"bytes,2,opt,name=action_digest,json=actionDigest,proto3" json:"action_digest,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GetActionResultRequest) Reset() { *m = GetActionResultRequest{} } +func (m *GetActionResultRequest) String() string { return proto.CompactTextString(m) } +func (*GetActionResultRequest) ProtoMessage() {} +func (*GetActionResultRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_remote_execution_3cbae0ca1c4e63f2, []int{15} +} +func (m *GetActionResultRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GetActionResultRequest.Unmarshal(m, b) +} +func (m *GetActionResultRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GetActionResultRequest.Marshal(b, m, deterministic) +} +func (dst *GetActionResultRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_GetActionResultRequest.Merge(dst, src) +} +func (m *GetActionResultRequest) XXX_Size() int { + return xxx_messageInfo_GetActionResultRequest.Size(m) +} +func (m *GetActionResultRequest) XXX_DiscardUnknown() { + xxx_messageInfo_GetActionResultRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_GetActionResultRequest proto.InternalMessageInfo + +func (m *GetActionResultRequest) GetInstanceName() string { + if m != nil { + return m.InstanceName + } + return "" +} + +func (m *GetActionResultRequest) GetActionDigest() *Digest { + if m != nil { + return m.ActionDigest + } + return nil +} + +// A request message for +// [ActionCache.UpdateActionResult][google.devtools.remoteexecution.v1test.ActionCache.UpdateActionResult]. +type UpdateActionResultRequest struct { + // The instance of the execution system to operate against. A server may + // support multiple instances of the execution system (with their own workers, + // storage, caches, etc.). The server MAY require use of this field to select + // between them in an implementation-defined fashion, otherwise it can be + // omitted. + InstanceName string `protobuf:"bytes,1,opt,name=instance_name,json=instanceName,proto3" json:"instance_name,omitempty"` + // The digest of the [Action][google.devtools.remoteexecution.v1test.Action] + // whose result is being uploaded. + ActionDigest *Digest `protobuf:"bytes,2,opt,name=action_digest,json=actionDigest,proto3" json:"action_digest,omitempty"` + // The [ActionResult][google.devtools.remoteexecution.v1test.ActionResult] + // to store in the cache. + ActionResult *ActionResult `protobuf:"bytes,3,opt,name=action_result,json=actionResult,proto3" json:"action_result,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *UpdateActionResultRequest) Reset() { *m = UpdateActionResultRequest{} } +func (m *UpdateActionResultRequest) String() string { return proto.CompactTextString(m) } +func (*UpdateActionResultRequest) ProtoMessage() {} +func (*UpdateActionResultRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_remote_execution_3cbae0ca1c4e63f2, []int{16} +} +func (m *UpdateActionResultRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_UpdateActionResultRequest.Unmarshal(m, b) +} +func (m *UpdateActionResultRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_UpdateActionResultRequest.Marshal(b, m, deterministic) +} +func (dst *UpdateActionResultRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_UpdateActionResultRequest.Merge(dst, src) +} +func (m *UpdateActionResultRequest) XXX_Size() int { + return xxx_messageInfo_UpdateActionResultRequest.Size(m) +} +func (m *UpdateActionResultRequest) XXX_DiscardUnknown() { + xxx_messageInfo_UpdateActionResultRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_UpdateActionResultRequest proto.InternalMessageInfo + +func (m *UpdateActionResultRequest) GetInstanceName() string { + if m != nil { + return m.InstanceName + } + return "" +} + +func (m *UpdateActionResultRequest) GetActionDigest() *Digest { + if m != nil { + return m.ActionDigest + } + return nil +} + +func (m *UpdateActionResultRequest) GetActionResult() *ActionResult { + if m != nil { + return m.ActionResult + } + return nil +} + +// A request message for +// [ContentAddressableStorage.FindMissingBlobs][google.devtools.remoteexecution.v1test.ContentAddressableStorage.FindMissingBlobs]. +type FindMissingBlobsRequest struct { + // The instance of the execution system to operate against. A server may + // support multiple instances of the execution system (with their own workers, + // storage, caches, etc.). The server MAY require use of this field to select + // between them in an implementation-defined fashion, otherwise it can be + // omitted. + InstanceName string `protobuf:"bytes,1,opt,name=instance_name,json=instanceName,proto3" json:"instance_name,omitempty"` + // A list of the blobs to check. + BlobDigests []*Digest `protobuf:"bytes,2,rep,name=blob_digests,json=blobDigests,proto3" json:"blob_digests,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *FindMissingBlobsRequest) Reset() { *m = FindMissingBlobsRequest{} } +func (m *FindMissingBlobsRequest) String() string { return proto.CompactTextString(m) } +func (*FindMissingBlobsRequest) ProtoMessage() {} +func (*FindMissingBlobsRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_remote_execution_3cbae0ca1c4e63f2, []int{17} +} +func (m *FindMissingBlobsRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_FindMissingBlobsRequest.Unmarshal(m, b) +} +func (m *FindMissingBlobsRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_FindMissingBlobsRequest.Marshal(b, m, deterministic) +} +func (dst *FindMissingBlobsRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_FindMissingBlobsRequest.Merge(dst, src) +} +func (m *FindMissingBlobsRequest) XXX_Size() int { + return xxx_messageInfo_FindMissingBlobsRequest.Size(m) +} +func (m *FindMissingBlobsRequest) XXX_DiscardUnknown() { + xxx_messageInfo_FindMissingBlobsRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_FindMissingBlobsRequest proto.InternalMessageInfo + +func (m *FindMissingBlobsRequest) GetInstanceName() string { + if m != nil { + return m.InstanceName + } + return "" +} + +func (m *FindMissingBlobsRequest) GetBlobDigests() []*Digest { + if m != nil { + return m.BlobDigests + } + return nil +} + +// A response message for +// [ContentAddressableStorage.FindMissingBlobs][google.devtools.remoteexecution.v1test.ContentAddressableStorage.FindMissingBlobs]. +type FindMissingBlobsResponse struct { + // A list of the blobs requested *not* present in the storage. + MissingBlobDigests []*Digest `protobuf:"bytes,2,rep,name=missing_blob_digests,json=missingBlobDigests,proto3" json:"missing_blob_digests,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *FindMissingBlobsResponse) Reset() { *m = FindMissingBlobsResponse{} } +func (m *FindMissingBlobsResponse) String() string { return proto.CompactTextString(m) } +func (*FindMissingBlobsResponse) ProtoMessage() {} +func (*FindMissingBlobsResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_remote_execution_3cbae0ca1c4e63f2, []int{18} +} +func (m *FindMissingBlobsResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_FindMissingBlobsResponse.Unmarshal(m, b) +} +func (m *FindMissingBlobsResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_FindMissingBlobsResponse.Marshal(b, m, deterministic) +} +func (dst *FindMissingBlobsResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_FindMissingBlobsResponse.Merge(dst, src) +} +func (m *FindMissingBlobsResponse) XXX_Size() int { + return xxx_messageInfo_FindMissingBlobsResponse.Size(m) +} +func (m *FindMissingBlobsResponse) XXX_DiscardUnknown() { + xxx_messageInfo_FindMissingBlobsResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_FindMissingBlobsResponse proto.InternalMessageInfo + +func (m *FindMissingBlobsResponse) GetMissingBlobDigests() []*Digest { + if m != nil { + return m.MissingBlobDigests + } + return nil +} + +// A single request message for +// [ContentAddressableStorage.BatchUpdateBlobs][google.devtools.remoteexecution.v1test.ContentAddressableStorage.BatchUpdateBlobs]. +type UpdateBlobRequest struct { + // The digest of the blob. This MUST be the digest of `data`. + ContentDigest *Digest `protobuf:"bytes,1,opt,name=content_digest,json=contentDigest,proto3" json:"content_digest,omitempty"` + // The raw binary data. + Data []byte `protobuf:"bytes,2,opt,name=data,proto3" json:"data,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *UpdateBlobRequest) Reset() { *m = UpdateBlobRequest{} } +func (m *UpdateBlobRequest) String() string { return proto.CompactTextString(m) } +func (*UpdateBlobRequest) ProtoMessage() {} +func (*UpdateBlobRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_remote_execution_3cbae0ca1c4e63f2, []int{19} +} +func (m *UpdateBlobRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_UpdateBlobRequest.Unmarshal(m, b) +} +func (m *UpdateBlobRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_UpdateBlobRequest.Marshal(b, m, deterministic) +} +func (dst *UpdateBlobRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_UpdateBlobRequest.Merge(dst, src) +} +func (m *UpdateBlobRequest) XXX_Size() int { + return xxx_messageInfo_UpdateBlobRequest.Size(m) +} +func (m *UpdateBlobRequest) XXX_DiscardUnknown() { + xxx_messageInfo_UpdateBlobRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_UpdateBlobRequest proto.InternalMessageInfo + +func (m *UpdateBlobRequest) GetContentDigest() *Digest { + if m != nil { + return m.ContentDigest + } + return nil +} + +func (m *UpdateBlobRequest) GetData() []byte { + if m != nil { + return m.Data + } + return nil +} + +// A request message for +// [ContentAddressableStorage.BatchUpdateBlobs][google.devtools.remoteexecution.v1test.ContentAddressableStorage.BatchUpdateBlobs]. +type BatchUpdateBlobsRequest struct { + // The instance of the execution system to operate against. A server may + // support multiple instances of the execution system (with their own workers, + // storage, caches, etc.). The server MAY require use of this field to select + // between them in an implementation-defined fashion, otherwise it can be + // omitted. + InstanceName string `protobuf:"bytes,1,opt,name=instance_name,json=instanceName,proto3" json:"instance_name,omitempty"` + // The individual upload requests. + Requests []*UpdateBlobRequest `protobuf:"bytes,2,rep,name=requests,proto3" json:"requests,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *BatchUpdateBlobsRequest) Reset() { *m = BatchUpdateBlobsRequest{} } +func (m *BatchUpdateBlobsRequest) String() string { return proto.CompactTextString(m) } +func (*BatchUpdateBlobsRequest) ProtoMessage() {} +func (*BatchUpdateBlobsRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_remote_execution_3cbae0ca1c4e63f2, []int{20} +} +func (m *BatchUpdateBlobsRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_BatchUpdateBlobsRequest.Unmarshal(m, b) +} +func (m *BatchUpdateBlobsRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_BatchUpdateBlobsRequest.Marshal(b, m, deterministic) +} +func (dst *BatchUpdateBlobsRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_BatchUpdateBlobsRequest.Merge(dst, src) +} +func (m *BatchUpdateBlobsRequest) XXX_Size() int { + return xxx_messageInfo_BatchUpdateBlobsRequest.Size(m) +} +func (m *BatchUpdateBlobsRequest) XXX_DiscardUnknown() { + xxx_messageInfo_BatchUpdateBlobsRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_BatchUpdateBlobsRequest proto.InternalMessageInfo + +func (m *BatchUpdateBlobsRequest) GetInstanceName() string { + if m != nil { + return m.InstanceName + } + return "" +} + +func (m *BatchUpdateBlobsRequest) GetRequests() []*UpdateBlobRequest { + if m != nil { + return m.Requests + } + return nil +} + +// A response message for +// [ContentAddressableStorage.BatchUpdateBlobs][google.devtools.remoteexecution.v1test.ContentAddressableStorage.BatchUpdateBlobs]. +type BatchUpdateBlobsResponse struct { + // The responses to the requests. + Responses []*BatchUpdateBlobsResponse_Response `protobuf:"bytes,1,rep,name=responses,proto3" json:"responses,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *BatchUpdateBlobsResponse) Reset() { *m = BatchUpdateBlobsResponse{} } +func (m *BatchUpdateBlobsResponse) String() string { return proto.CompactTextString(m) } +func (*BatchUpdateBlobsResponse) ProtoMessage() {} +func (*BatchUpdateBlobsResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_remote_execution_3cbae0ca1c4e63f2, []int{21} +} +func (m *BatchUpdateBlobsResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_BatchUpdateBlobsResponse.Unmarshal(m, b) +} +func (m *BatchUpdateBlobsResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_BatchUpdateBlobsResponse.Marshal(b, m, deterministic) +} +func (dst *BatchUpdateBlobsResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_BatchUpdateBlobsResponse.Merge(dst, src) +} +func (m *BatchUpdateBlobsResponse) XXX_Size() int { + return xxx_messageInfo_BatchUpdateBlobsResponse.Size(m) +} +func (m *BatchUpdateBlobsResponse) XXX_DiscardUnknown() { + xxx_messageInfo_BatchUpdateBlobsResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_BatchUpdateBlobsResponse proto.InternalMessageInfo + +func (m *BatchUpdateBlobsResponse) GetResponses() []*BatchUpdateBlobsResponse_Response { + if m != nil { + return m.Responses + } + return nil +} + +// A response corresponding to a single blob that the client tried to upload. +type BatchUpdateBlobsResponse_Response struct { + // The digest to which this response corresponds. + BlobDigest *Digest `protobuf:"bytes,1,opt,name=blob_digest,json=blobDigest,proto3" json:"blob_digest,omitempty"` + // The result of attempting to upload that blob. + Status *status.Status `protobuf:"bytes,2,opt,name=status,proto3" json:"status,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *BatchUpdateBlobsResponse_Response) Reset() { *m = BatchUpdateBlobsResponse_Response{} } +func (m *BatchUpdateBlobsResponse_Response) String() string { return proto.CompactTextString(m) } +func (*BatchUpdateBlobsResponse_Response) ProtoMessage() {} +func (*BatchUpdateBlobsResponse_Response) Descriptor() ([]byte, []int) { + return fileDescriptor_remote_execution_3cbae0ca1c4e63f2, []int{21, 0} +} +func (m *BatchUpdateBlobsResponse_Response) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_BatchUpdateBlobsResponse_Response.Unmarshal(m, b) +} +func (m *BatchUpdateBlobsResponse_Response) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_BatchUpdateBlobsResponse_Response.Marshal(b, m, deterministic) +} +func (dst *BatchUpdateBlobsResponse_Response) XXX_Merge(src proto.Message) { + xxx_messageInfo_BatchUpdateBlobsResponse_Response.Merge(dst, src) +} +func (m *BatchUpdateBlobsResponse_Response) XXX_Size() int { + return xxx_messageInfo_BatchUpdateBlobsResponse_Response.Size(m) +} +func (m *BatchUpdateBlobsResponse_Response) XXX_DiscardUnknown() { + xxx_messageInfo_BatchUpdateBlobsResponse_Response.DiscardUnknown(m) +} + +var xxx_messageInfo_BatchUpdateBlobsResponse_Response proto.InternalMessageInfo + +func (m *BatchUpdateBlobsResponse_Response) GetBlobDigest() *Digest { + if m != nil { + return m.BlobDigest + } + return nil +} + +func (m *BatchUpdateBlobsResponse_Response) GetStatus() *status.Status { + if m != nil { + return m.Status + } + return nil +} + +// A request message for +// [ContentAddressableStorage.GetTree][google.devtools.remoteexecution.v1test.ContentAddressableStorage.GetTree]. +type GetTreeRequest struct { + // The instance of the execution system to operate against. A server may + // support multiple instances of the execution system (with their own workers, + // storage, caches, etc.). The server MAY require use of this field to select + // between them in an implementation-defined fashion, otherwise it can be + // omitted. + InstanceName string `protobuf:"bytes,1,opt,name=instance_name,json=instanceName,proto3" json:"instance_name,omitempty"` + // The digest of the root, which must be an encoded + // [Directory][google.devtools.remoteexecution.v1test.Directory] message + // stored in the + // [ContentAddressableStorage][google.devtools.remoteexecution.v1test.ContentAddressableStorage]. + RootDigest *Digest `protobuf:"bytes,2,opt,name=root_digest,json=rootDigest,proto3" json:"root_digest,omitempty"` + // A maximum page size to request. If present, the server will request no more + // than this many items. Regardless of whether a page size is specified, the + // server may place its own limit on the number of items to be returned and + // require the client to retrieve more items using a subsequent request. + PageSize int32 `protobuf:"varint,3,opt,name=page_size,json=pageSize,proto3" json:"page_size,omitempty"` + // A page token, which must be a value received in a previous + // [GetTreeResponse][google.devtools.remoteexecution.v1test.GetTreeResponse]. + // If present, the server will use it to return the following page of results. + PageToken string `protobuf:"bytes,4,opt,name=page_token,json=pageToken,proto3" json:"page_token,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GetTreeRequest) Reset() { *m = GetTreeRequest{} } +func (m *GetTreeRequest) String() string { return proto.CompactTextString(m) } +func (*GetTreeRequest) ProtoMessage() {} +func (*GetTreeRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_remote_execution_3cbae0ca1c4e63f2, []int{22} +} +func (m *GetTreeRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GetTreeRequest.Unmarshal(m, b) +} +func (m *GetTreeRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GetTreeRequest.Marshal(b, m, deterministic) +} +func (dst *GetTreeRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_GetTreeRequest.Merge(dst, src) +} +func (m *GetTreeRequest) XXX_Size() int { + return xxx_messageInfo_GetTreeRequest.Size(m) +} +func (m *GetTreeRequest) XXX_DiscardUnknown() { + xxx_messageInfo_GetTreeRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_GetTreeRequest proto.InternalMessageInfo + +func (m *GetTreeRequest) GetInstanceName() string { + if m != nil { + return m.InstanceName + } + return "" +} + +func (m *GetTreeRequest) GetRootDigest() *Digest { + if m != nil { + return m.RootDigest + } + return nil +} + +func (m *GetTreeRequest) GetPageSize() int32 { + if m != nil { + return m.PageSize + } + return 0 +} + +func (m *GetTreeRequest) GetPageToken() string { + if m != nil { + return m.PageToken + } + return "" +} + +// A response message for +// [ContentAddressableStorage.GetTree][google.devtools.remoteexecution.v1test.ContentAddressableStorage.GetTree]. +type GetTreeResponse struct { + // The directories descended from the requested root. + Directories []*Directory `protobuf:"bytes,1,rep,name=directories,proto3" json:"directories,omitempty"` + // If present, signifies that there are more results which the client can + // retrieve by passing this as the page_token in a subsequent + // [request][google.devtools.remoteexecution.v1test.GetTreeRequest]. + // If empty, signifies that this is the last page of results. + NextPageToken string `protobuf:"bytes,2,opt,name=next_page_token,json=nextPageToken,proto3" json:"next_page_token,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GetTreeResponse) Reset() { *m = GetTreeResponse{} } +func (m *GetTreeResponse) String() string { return proto.CompactTextString(m) } +func (*GetTreeResponse) ProtoMessage() {} +func (*GetTreeResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_remote_execution_3cbae0ca1c4e63f2, []int{23} +} +func (m *GetTreeResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GetTreeResponse.Unmarshal(m, b) +} +func (m *GetTreeResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GetTreeResponse.Marshal(b, m, deterministic) +} +func (dst *GetTreeResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_GetTreeResponse.Merge(dst, src) +} +func (m *GetTreeResponse) XXX_Size() int { + return xxx_messageInfo_GetTreeResponse.Size(m) +} +func (m *GetTreeResponse) XXX_DiscardUnknown() { + xxx_messageInfo_GetTreeResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_GetTreeResponse proto.InternalMessageInfo + +func (m *GetTreeResponse) GetDirectories() []*Directory { + if m != nil { + return m.Directories + } + return nil +} + +func (m *GetTreeResponse) GetNextPageToken() string { + if m != nil { + return m.NextPageToken + } + return "" +} + +// Details for the tool used to call the API. +type ToolDetails struct { + // Name of the tool, e.g. bazel. + ToolName string `protobuf:"bytes,1,opt,name=tool_name,json=toolName,proto3" json:"tool_name,omitempty"` + // Version of the tool used for the request, e.g. 5.0.3. + ToolVersion string `protobuf:"bytes,2,opt,name=tool_version,json=toolVersion,proto3" json:"tool_version,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ToolDetails) Reset() { *m = ToolDetails{} } +func (m *ToolDetails) String() string { return proto.CompactTextString(m) } +func (*ToolDetails) ProtoMessage() {} +func (*ToolDetails) Descriptor() ([]byte, []int) { + return fileDescriptor_remote_execution_3cbae0ca1c4e63f2, []int{24} +} +func (m *ToolDetails) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ToolDetails.Unmarshal(m, b) +} +func (m *ToolDetails) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ToolDetails.Marshal(b, m, deterministic) +} +func (dst *ToolDetails) XXX_Merge(src proto.Message) { + xxx_messageInfo_ToolDetails.Merge(dst, src) +} +func (m *ToolDetails) XXX_Size() int { + return xxx_messageInfo_ToolDetails.Size(m) +} +func (m *ToolDetails) XXX_DiscardUnknown() { + xxx_messageInfo_ToolDetails.DiscardUnknown(m) +} + +var xxx_messageInfo_ToolDetails proto.InternalMessageInfo + +func (m *ToolDetails) GetToolName() string { + if m != nil { + return m.ToolName + } + return "" +} + +func (m *ToolDetails) GetToolVersion() string { + if m != nil { + return m.ToolVersion + } + return "" +} + +// An optional Metadata to attach to any RPC request to tell the server about an +// external context of the request. The server may use this for logging or other +// purposes. To use it, the client attaches the header to the call using the +// canonical proto serialization: +// name: google.devtools.remoteexecution.v1test.requestmetadata-bin +// contents: the base64 encoded binary RequestMetadata message. +type RequestMetadata struct { + // The details for the tool invoking the requests. + ToolDetails *ToolDetails `protobuf:"bytes,1,opt,name=tool_details,json=toolDetails,proto3" json:"tool_details,omitempty"` + // An identifier that ties multiple requests to the same action. + // For example, multiple requests to the CAS, Action Cache, and Execution + // API are used in order to compile foo.cc. + ActionId string `protobuf:"bytes,2,opt,name=action_id,json=actionId,proto3" json:"action_id,omitempty"` + // An identifier that ties multiple actions together to a final result. + // For example, multiple actions are required to build and run foo_test. + ToolInvocationId string `protobuf:"bytes,3,opt,name=tool_invocation_id,json=toolInvocationId,proto3" json:"tool_invocation_id,omitempty"` + // An identifier to tie multiple tool invocations together. For example, + // runs of foo_test, bar_test and baz_test on a post-submit of a given patch. + CorrelatedInvocationsId string `protobuf:"bytes,4,opt,name=correlated_invocations_id,json=correlatedInvocationsId,proto3" json:"correlated_invocations_id,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *RequestMetadata) Reset() { *m = RequestMetadata{} } +func (m *RequestMetadata) String() string { return proto.CompactTextString(m) } +func (*RequestMetadata) ProtoMessage() {} +func (*RequestMetadata) Descriptor() ([]byte, []int) { + return fileDescriptor_remote_execution_3cbae0ca1c4e63f2, []int{25} +} +func (m *RequestMetadata) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_RequestMetadata.Unmarshal(m, b) +} +func (m *RequestMetadata) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_RequestMetadata.Marshal(b, m, deterministic) +} +func (dst *RequestMetadata) XXX_Merge(src proto.Message) { + xxx_messageInfo_RequestMetadata.Merge(dst, src) +} +func (m *RequestMetadata) XXX_Size() int { + return xxx_messageInfo_RequestMetadata.Size(m) +} +func (m *RequestMetadata) XXX_DiscardUnknown() { + xxx_messageInfo_RequestMetadata.DiscardUnknown(m) +} + +var xxx_messageInfo_RequestMetadata proto.InternalMessageInfo + +func (m *RequestMetadata) GetToolDetails() *ToolDetails { + if m != nil { + return m.ToolDetails + } + return nil +} + +func (m *RequestMetadata) GetActionId() string { + if m != nil { + return m.ActionId + } + return "" +} + +func (m *RequestMetadata) GetToolInvocationId() string { + if m != nil { + return m.ToolInvocationId + } + return "" +} + +func (m *RequestMetadata) GetCorrelatedInvocationsId() string { + if m != nil { + return m.CorrelatedInvocationsId + } + return "" +} + +func init() { + proto.RegisterType((*Action)(nil), "google.devtools.remoteexecution.v1test.Action") + proto.RegisterType((*Command)(nil), "google.devtools.remoteexecution.v1test.Command") + proto.RegisterType((*Command_EnvironmentVariable)(nil), "google.devtools.remoteexecution.v1test.Command.EnvironmentVariable") + proto.RegisterType((*Platform)(nil), "google.devtools.remoteexecution.v1test.Platform") + proto.RegisterType((*Platform_Property)(nil), "google.devtools.remoteexecution.v1test.Platform.Property") + proto.RegisterType((*Directory)(nil), "google.devtools.remoteexecution.v1test.Directory") + proto.RegisterType((*FileNode)(nil), "google.devtools.remoteexecution.v1test.FileNode") + proto.RegisterType((*DirectoryNode)(nil), "google.devtools.remoteexecution.v1test.DirectoryNode") + proto.RegisterType((*Digest)(nil), "google.devtools.remoteexecution.v1test.Digest") + proto.RegisterType((*ActionResult)(nil), "google.devtools.remoteexecution.v1test.ActionResult") + proto.RegisterType((*OutputFile)(nil), "google.devtools.remoteexecution.v1test.OutputFile") + proto.RegisterType((*Tree)(nil), "google.devtools.remoteexecution.v1test.Tree") + proto.RegisterType((*OutputDirectory)(nil), "google.devtools.remoteexecution.v1test.OutputDirectory") + proto.RegisterType((*ExecuteRequest)(nil), "google.devtools.remoteexecution.v1test.ExecuteRequest") + proto.RegisterType((*LogFile)(nil), "google.devtools.remoteexecution.v1test.LogFile") + proto.RegisterType((*ExecuteResponse)(nil), "google.devtools.remoteexecution.v1test.ExecuteResponse") + proto.RegisterMapType((map[string]*LogFile)(nil), "google.devtools.remoteexecution.v1test.ExecuteResponse.ServerLogsEntry") + proto.RegisterType((*ExecuteOperationMetadata)(nil), "google.devtools.remoteexecution.v1test.ExecuteOperationMetadata") + proto.RegisterType((*GetActionResultRequest)(nil), "google.devtools.remoteexecution.v1test.GetActionResultRequest") + proto.RegisterType((*UpdateActionResultRequest)(nil), "google.devtools.remoteexecution.v1test.UpdateActionResultRequest") + proto.RegisterType((*FindMissingBlobsRequest)(nil), "google.devtools.remoteexecution.v1test.FindMissingBlobsRequest") + proto.RegisterType((*FindMissingBlobsResponse)(nil), "google.devtools.remoteexecution.v1test.FindMissingBlobsResponse") + proto.RegisterType((*UpdateBlobRequest)(nil), "google.devtools.remoteexecution.v1test.UpdateBlobRequest") + proto.RegisterType((*BatchUpdateBlobsRequest)(nil), "google.devtools.remoteexecution.v1test.BatchUpdateBlobsRequest") + proto.RegisterType((*BatchUpdateBlobsResponse)(nil), "google.devtools.remoteexecution.v1test.BatchUpdateBlobsResponse") + proto.RegisterType((*BatchUpdateBlobsResponse_Response)(nil), "google.devtools.remoteexecution.v1test.BatchUpdateBlobsResponse.Response") + proto.RegisterType((*GetTreeRequest)(nil), "google.devtools.remoteexecution.v1test.GetTreeRequest") + proto.RegisterType((*GetTreeResponse)(nil), "google.devtools.remoteexecution.v1test.GetTreeResponse") + proto.RegisterType((*ToolDetails)(nil), "google.devtools.remoteexecution.v1test.ToolDetails") + proto.RegisterType((*RequestMetadata)(nil), "google.devtools.remoteexecution.v1test.RequestMetadata") + proto.RegisterEnum("google.devtools.remoteexecution.v1test.ExecuteOperationMetadata_Stage", ExecuteOperationMetadata_Stage_name, ExecuteOperationMetadata_Stage_value) +} + +// Reference imports to suppress errors if they are not otherwise used. +var _ context.Context +var _ grpc.ClientConn + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the grpc package it is being compiled against. +const _ = grpc.SupportPackageIsVersion4 + +// ExecutionClient is the client API for Execution service. +// +// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://godoc.org/google.golang.org/grpc#ClientConn.NewStream. +type ExecutionClient interface { + // Execute an action remotely. + // + // In order to execute an action, the client must first upload all of the + // inputs, as well as the + // [Command][google.devtools.remoteexecution.v1test.Command] to run, into the + // [ContentAddressableStorage][google.devtools.remoteexecution.v1test.ContentAddressableStorage]. + // It then calls `Execute` with an + // [Action][google.devtools.remoteexecution.v1test.Action] referring to them. + // The server will run the action and eventually return the result. + // + // The input `Action`'s fields MUST meet the various canonicalization + // requirements specified in the documentation for their types so that it has + // the same digest as other logically equivalent `Action`s. The server MAY + // enforce the requirements and return errors if a non-canonical input is + // received. It MAY also proceed without verifying some or all of the + // requirements, such as for performance reasons. If the server does not + // verify the requirement, then it will treat the `Action` as distinct from + // another logically equivalent action if they hash differently. + // + // Returns a [google.longrunning.Operation][google.longrunning.Operation] + // describing the resulting execution, with eventual `response` + // [ExecuteResponse][google.devtools.remoteexecution.v1test.ExecuteResponse]. + // The `metadata` on the operation is of type + // [ExecuteOperationMetadata][google.devtools.remoteexecution.v1test.ExecuteOperationMetadata]. + // + // To query the operation, you can use the + // [Operations API][google.longrunning.Operations.GetOperation]. If you wish + // to allow the server to stream operations updates, rather than requiring + // client polling, you can use the + // [Watcher API][google.watcher.v1.Watcher.Watch] with the Operation's `name` + // as the `target`. + // + // When using the Watcher API, the initial `data` will be the `Operation` at + // the time of the request. Updates will be provided periodically by the + // server until the `Operation` completes, at which point the response message + // will (assuming no error) be at `data.response`. + // + // The server NEED NOT implement other methods or functionality of the + // Operation and Watcher APIs. + // + // Errors discovered during creation of the `Operation` will be reported + // as gRPC Status errors, while errors that occurred while running the + // action will be reported in the `status` field of the `ExecuteResponse`. The + // server MUST NOT set the `error` field of the `Operation` proto. + // The possible errors include: + // * `INVALID_ARGUMENT`: One or more arguments are invalid. + // * `FAILED_PRECONDITION`: One or more errors occurred in setting up the + // action requested, such as a missing input or command or no worker being + // available. The client may be able to fix the errors and retry. + // * `RESOURCE_EXHAUSTED`: There is insufficient quota of some resource to run + // the action. + // * `UNAVAILABLE`: Due to a transient condition, such as all workers being + // occupied (and the server does not support a queue), the action could not + // be started. The client should retry. + // * `INTERNAL`: An internal error occurred in the execution engine or the + // worker. + // * `DEADLINE_EXCEEDED`: The execution timed out. + // + // In the case of a missing input or command, the server SHOULD additionally + // send a [PreconditionFailure][google.rpc.PreconditionFailure] error detail + // where, for each requested blob not present in the CAS, there is a + // `Violation` with a `type` of `MISSING` and a `subject` of + // `"blobs/{hash}/{size}"` indicating the digest of the missing blob. + Execute(ctx context.Context, in *ExecuteRequest, opts ...grpc.CallOption) (*longrunning.Operation, error) +} + +type executionClient struct { + cc *grpc.ClientConn +} + +func NewExecutionClient(cc *grpc.ClientConn) ExecutionClient { + return &executionClient{cc} +} + +func (c *executionClient) Execute(ctx context.Context, in *ExecuteRequest, opts ...grpc.CallOption) (*longrunning.Operation, error) { + out := new(longrunning.Operation) + err := c.cc.Invoke(ctx, "/google.devtools.remoteexecution.v1test.Execution/Execute", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +// ExecutionServer is the server API for Execution service. +type ExecutionServer interface { + // Execute an action remotely. + // + // In order to execute an action, the client must first upload all of the + // inputs, as well as the + // [Command][google.devtools.remoteexecution.v1test.Command] to run, into the + // [ContentAddressableStorage][google.devtools.remoteexecution.v1test.ContentAddressableStorage]. + // It then calls `Execute` with an + // [Action][google.devtools.remoteexecution.v1test.Action] referring to them. + // The server will run the action and eventually return the result. + // + // The input `Action`'s fields MUST meet the various canonicalization + // requirements specified in the documentation for their types so that it has + // the same digest as other logically equivalent `Action`s. The server MAY + // enforce the requirements and return errors if a non-canonical input is + // received. It MAY also proceed without verifying some or all of the + // requirements, such as for performance reasons. If the server does not + // verify the requirement, then it will treat the `Action` as distinct from + // another logically equivalent action if they hash differently. + // + // Returns a [google.longrunning.Operation][google.longrunning.Operation] + // describing the resulting execution, with eventual `response` + // [ExecuteResponse][google.devtools.remoteexecution.v1test.ExecuteResponse]. + // The `metadata` on the operation is of type + // [ExecuteOperationMetadata][google.devtools.remoteexecution.v1test.ExecuteOperationMetadata]. + // + // To query the operation, you can use the + // [Operations API][google.longrunning.Operations.GetOperation]. If you wish + // to allow the server to stream operations updates, rather than requiring + // client polling, you can use the + // [Watcher API][google.watcher.v1.Watcher.Watch] with the Operation's `name` + // as the `target`. + // + // When using the Watcher API, the initial `data` will be the `Operation` at + // the time of the request. Updates will be provided periodically by the + // server until the `Operation` completes, at which point the response message + // will (assuming no error) be at `data.response`. + // + // The server NEED NOT implement other methods or functionality of the + // Operation and Watcher APIs. + // + // Errors discovered during creation of the `Operation` will be reported + // as gRPC Status errors, while errors that occurred while running the + // action will be reported in the `status` field of the `ExecuteResponse`. The + // server MUST NOT set the `error` field of the `Operation` proto. + // The possible errors include: + // * `INVALID_ARGUMENT`: One or more arguments are invalid. + // * `FAILED_PRECONDITION`: One or more errors occurred in setting up the + // action requested, such as a missing input or command or no worker being + // available. The client may be able to fix the errors and retry. + // * `RESOURCE_EXHAUSTED`: There is insufficient quota of some resource to run + // the action. + // * `UNAVAILABLE`: Due to a transient condition, such as all workers being + // occupied (and the server does not support a queue), the action could not + // be started. The client should retry. + // * `INTERNAL`: An internal error occurred in the execution engine or the + // worker. + // * `DEADLINE_EXCEEDED`: The execution timed out. + // + // In the case of a missing input or command, the server SHOULD additionally + // send a [PreconditionFailure][google.rpc.PreconditionFailure] error detail + // where, for each requested blob not present in the CAS, there is a + // `Violation` with a `type` of `MISSING` and a `subject` of + // `"blobs/{hash}/{size}"` indicating the digest of the missing blob. + Execute(context.Context, *ExecuteRequest) (*longrunning.Operation, error) +} + +func RegisterExecutionServer(s *grpc.Server, srv ExecutionServer) { + s.RegisterService(&_Execution_serviceDesc, srv) +} + +func _Execution_Execute_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(ExecuteRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(ExecutionServer).Execute(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.devtools.remoteexecution.v1test.Execution/Execute", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(ExecutionServer).Execute(ctx, req.(*ExecuteRequest)) + } + return interceptor(ctx, in, info, handler) +} + +var _Execution_serviceDesc = grpc.ServiceDesc{ + ServiceName: "google.devtools.remoteexecution.v1test.Execution", + HandlerType: (*ExecutionServer)(nil), + Methods: []grpc.MethodDesc{ + { + MethodName: "Execute", + Handler: _Execution_Execute_Handler, + }, + }, + Streams: []grpc.StreamDesc{}, + Metadata: "google/devtools/remoteexecution/v1test/remote_execution.proto", +} + +// ActionCacheClient is the client API for ActionCache service. +// +// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://godoc.org/google.golang.org/grpc#ClientConn.NewStream. +type ActionCacheClient interface { + // Retrieve a cached execution result. + // + // Errors: + // * `NOT_FOUND`: The requested `ActionResult` is not in the cache. + GetActionResult(ctx context.Context, in *GetActionResultRequest, opts ...grpc.CallOption) (*ActionResult, error) + // Upload a new execution result. + // + // This method is intended for servers which implement the distributed cache + // independently of the + // [Execution][google.devtools.remoteexecution.v1test.Execution] API. As a + // result, it is OPTIONAL for servers to implement. + // + // Errors: + // * `NOT_IMPLEMENTED`: This method is not supported by the server. + // * `RESOURCE_EXHAUSTED`: There is insufficient storage space to add the + // entry to the cache. + UpdateActionResult(ctx context.Context, in *UpdateActionResultRequest, opts ...grpc.CallOption) (*ActionResult, error) +} + +type actionCacheClient struct { + cc *grpc.ClientConn +} + +func NewActionCacheClient(cc *grpc.ClientConn) ActionCacheClient { + return &actionCacheClient{cc} +} + +func (c *actionCacheClient) GetActionResult(ctx context.Context, in *GetActionResultRequest, opts ...grpc.CallOption) (*ActionResult, error) { + out := new(ActionResult) + err := c.cc.Invoke(ctx, "/google.devtools.remoteexecution.v1test.ActionCache/GetActionResult", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *actionCacheClient) UpdateActionResult(ctx context.Context, in *UpdateActionResultRequest, opts ...grpc.CallOption) (*ActionResult, error) { + out := new(ActionResult) + err := c.cc.Invoke(ctx, "/google.devtools.remoteexecution.v1test.ActionCache/UpdateActionResult", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +// ActionCacheServer is the server API for ActionCache service. +type ActionCacheServer interface { + // Retrieve a cached execution result. + // + // Errors: + // * `NOT_FOUND`: The requested `ActionResult` is not in the cache. + GetActionResult(context.Context, *GetActionResultRequest) (*ActionResult, error) + // Upload a new execution result. + // + // This method is intended for servers which implement the distributed cache + // independently of the + // [Execution][google.devtools.remoteexecution.v1test.Execution] API. As a + // result, it is OPTIONAL for servers to implement. + // + // Errors: + // * `NOT_IMPLEMENTED`: This method is not supported by the server. + // * `RESOURCE_EXHAUSTED`: There is insufficient storage space to add the + // entry to the cache. + UpdateActionResult(context.Context, *UpdateActionResultRequest) (*ActionResult, error) +} + +func RegisterActionCacheServer(s *grpc.Server, srv ActionCacheServer) { + s.RegisterService(&_ActionCache_serviceDesc, srv) +} + +func _ActionCache_GetActionResult_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(GetActionResultRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(ActionCacheServer).GetActionResult(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.devtools.remoteexecution.v1test.ActionCache/GetActionResult", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(ActionCacheServer).GetActionResult(ctx, req.(*GetActionResultRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _ActionCache_UpdateActionResult_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(UpdateActionResultRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(ActionCacheServer).UpdateActionResult(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.devtools.remoteexecution.v1test.ActionCache/UpdateActionResult", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(ActionCacheServer).UpdateActionResult(ctx, req.(*UpdateActionResultRequest)) + } + return interceptor(ctx, in, info, handler) +} + +var _ActionCache_serviceDesc = grpc.ServiceDesc{ + ServiceName: "google.devtools.remoteexecution.v1test.ActionCache", + HandlerType: (*ActionCacheServer)(nil), + Methods: []grpc.MethodDesc{ + { + MethodName: "GetActionResult", + Handler: _ActionCache_GetActionResult_Handler, + }, + { + MethodName: "UpdateActionResult", + Handler: _ActionCache_UpdateActionResult_Handler, + }, + }, + Streams: []grpc.StreamDesc{}, + Metadata: "google/devtools/remoteexecution/v1test/remote_execution.proto", +} + +// ContentAddressableStorageClient is the client API for ContentAddressableStorage service. +// +// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://godoc.org/google.golang.org/grpc#ClientConn.NewStream. +type ContentAddressableStorageClient interface { + // Determine if blobs are present in the CAS. + // + // Clients can use this API before uploading blobs to determine which ones are + // already present in the CAS and do not need to be uploaded again. + // + // There are no method-specific errors. + FindMissingBlobs(ctx context.Context, in *FindMissingBlobsRequest, opts ...grpc.CallOption) (*FindMissingBlobsResponse, error) + // Upload many blobs at once. + // + // The client MUST NOT upload blobs with a combined total size of more than 10 + // MiB using this API. Such requests should either be split into smaller + // chunks or uploaded using the + // [ByteStream API][google.bytestream.ByteStream], as appropriate. + // + // This request is equivalent to calling [UpdateBlob][] on each individual + // blob, in parallel. The requests may succeed or fail independently. + // + // Errors: + // * `INVALID_ARGUMENT`: The client attempted to upload more than 10 MiB of + // data. + // + // Individual requests may return the following errors, additionally: + // * `RESOURCE_EXHAUSTED`: There is insufficient disk quota to store the blob. + // * `INVALID_ARGUMENT`: The + // [Digest][google.devtools.remoteexecution.v1test.Digest] does not match the + // provided data. + BatchUpdateBlobs(ctx context.Context, in *BatchUpdateBlobsRequest, opts ...grpc.CallOption) (*BatchUpdateBlobsResponse, error) + // Fetch the entire directory tree rooted at a node. + // + // This request must be targeted at a + // [Directory][google.devtools.remoteexecution.v1test.Directory] stored in the + // [ContentAddressableStorage][google.devtools.remoteexecution.v1test.ContentAddressableStorage] + // (CAS). The server will enumerate the `Directory` tree recursively and + // return every node descended from the root. + // The exact traversal order is unspecified and, unless retrieving subsequent + // pages from an earlier request, is not guaranteed to be stable across + // multiple invocations of `GetTree`. + // + // If part of the tree is missing from the CAS, the server will return the + // portion present and omit the rest. + // + // * `NOT_FOUND`: The requested tree root is not present in the CAS. + GetTree(ctx context.Context, in *GetTreeRequest, opts ...grpc.CallOption) (*GetTreeResponse, error) +} + +type contentAddressableStorageClient struct { + cc *grpc.ClientConn +} + +func NewContentAddressableStorageClient(cc *grpc.ClientConn) ContentAddressableStorageClient { + return &contentAddressableStorageClient{cc} +} + +func (c *contentAddressableStorageClient) FindMissingBlobs(ctx context.Context, in *FindMissingBlobsRequest, opts ...grpc.CallOption) (*FindMissingBlobsResponse, error) { + out := new(FindMissingBlobsResponse) + err := c.cc.Invoke(ctx, "/google.devtools.remoteexecution.v1test.ContentAddressableStorage/FindMissingBlobs", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *contentAddressableStorageClient) BatchUpdateBlobs(ctx context.Context, in *BatchUpdateBlobsRequest, opts ...grpc.CallOption) (*BatchUpdateBlobsResponse, error) { + out := new(BatchUpdateBlobsResponse) + err := c.cc.Invoke(ctx, "/google.devtools.remoteexecution.v1test.ContentAddressableStorage/BatchUpdateBlobs", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *contentAddressableStorageClient) GetTree(ctx context.Context, in *GetTreeRequest, opts ...grpc.CallOption) (*GetTreeResponse, error) { + out := new(GetTreeResponse) + err := c.cc.Invoke(ctx, "/google.devtools.remoteexecution.v1test.ContentAddressableStorage/GetTree", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +// ContentAddressableStorageServer is the server API for ContentAddressableStorage service. +type ContentAddressableStorageServer interface { + // Determine if blobs are present in the CAS. + // + // Clients can use this API before uploading blobs to determine which ones are + // already present in the CAS and do not need to be uploaded again. + // + // There are no method-specific errors. + FindMissingBlobs(context.Context, *FindMissingBlobsRequest) (*FindMissingBlobsResponse, error) + // Upload many blobs at once. + // + // The client MUST NOT upload blobs with a combined total size of more than 10 + // MiB using this API. Such requests should either be split into smaller + // chunks or uploaded using the + // [ByteStream API][google.bytestream.ByteStream], as appropriate. + // + // This request is equivalent to calling [UpdateBlob][] on each individual + // blob, in parallel. The requests may succeed or fail independently. + // + // Errors: + // * `INVALID_ARGUMENT`: The client attempted to upload more than 10 MiB of + // data. + // + // Individual requests may return the following errors, additionally: + // * `RESOURCE_EXHAUSTED`: There is insufficient disk quota to store the blob. + // * `INVALID_ARGUMENT`: The + // [Digest][google.devtools.remoteexecution.v1test.Digest] does not match the + // provided data. + BatchUpdateBlobs(context.Context, *BatchUpdateBlobsRequest) (*BatchUpdateBlobsResponse, error) + // Fetch the entire directory tree rooted at a node. + // + // This request must be targeted at a + // [Directory][google.devtools.remoteexecution.v1test.Directory] stored in the + // [ContentAddressableStorage][google.devtools.remoteexecution.v1test.ContentAddressableStorage] + // (CAS). The server will enumerate the `Directory` tree recursively and + // return every node descended from the root. + // The exact traversal order is unspecified and, unless retrieving subsequent + // pages from an earlier request, is not guaranteed to be stable across + // multiple invocations of `GetTree`. + // + // If part of the tree is missing from the CAS, the server will return the + // portion present and omit the rest. + // + // * `NOT_FOUND`: The requested tree root is not present in the CAS. + GetTree(context.Context, *GetTreeRequest) (*GetTreeResponse, error) +} + +func RegisterContentAddressableStorageServer(s *grpc.Server, srv ContentAddressableStorageServer) { + s.RegisterService(&_ContentAddressableStorage_serviceDesc, srv) +} + +func _ContentAddressableStorage_FindMissingBlobs_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(FindMissingBlobsRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(ContentAddressableStorageServer).FindMissingBlobs(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.devtools.remoteexecution.v1test.ContentAddressableStorage/FindMissingBlobs", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(ContentAddressableStorageServer).FindMissingBlobs(ctx, req.(*FindMissingBlobsRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _ContentAddressableStorage_BatchUpdateBlobs_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(BatchUpdateBlobsRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(ContentAddressableStorageServer).BatchUpdateBlobs(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.devtools.remoteexecution.v1test.ContentAddressableStorage/BatchUpdateBlobs", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(ContentAddressableStorageServer).BatchUpdateBlobs(ctx, req.(*BatchUpdateBlobsRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _ContentAddressableStorage_GetTree_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(GetTreeRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(ContentAddressableStorageServer).GetTree(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.devtools.remoteexecution.v1test.ContentAddressableStorage/GetTree", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(ContentAddressableStorageServer).GetTree(ctx, req.(*GetTreeRequest)) + } + return interceptor(ctx, in, info, handler) +} + +var _ContentAddressableStorage_serviceDesc = grpc.ServiceDesc{ + ServiceName: "google.devtools.remoteexecution.v1test.ContentAddressableStorage", + HandlerType: (*ContentAddressableStorageServer)(nil), + Methods: []grpc.MethodDesc{ + { + MethodName: "FindMissingBlobs", + Handler: _ContentAddressableStorage_FindMissingBlobs_Handler, + }, + { + MethodName: "BatchUpdateBlobs", + Handler: _ContentAddressableStorage_BatchUpdateBlobs_Handler, + }, + { + MethodName: "GetTree", + Handler: _ContentAddressableStorage_GetTree_Handler, + }, + }, + Streams: []grpc.StreamDesc{}, + Metadata: "google/devtools/remoteexecution/v1test/remote_execution.proto", +} + +func init() { + proto.RegisterFile("google/devtools/remoteexecution/v1test/remote_execution.proto", fileDescriptor_remote_execution_3cbae0ca1c4e63f2) +} + +var fileDescriptor_remote_execution_3cbae0ca1c4e63f2 = []byte{ + // 2025 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xcc, 0x59, 0xdd, 0x6f, 0x23, 0x57, + 0x15, 0x67, 0xec, 0x24, 0xb6, 0x8f, 0x9d, 0x75, 0xf6, 0x76, 0xe9, 0x7a, 0xdd, 0x2e, 0x4a, 0xa7, + 0xa2, 0x8a, 0xa2, 0x62, 0xb3, 0xde, 0x96, 0x85, 0x54, 0xa5, 0x6c, 0x1c, 0x27, 0x8d, 0x9a, 0xaf, + 0x4e, 0xe2, 0x74, 0xbb, 0xaa, 0x34, 0x9d, 0x78, 0x6e, 0xc6, 0xa3, 0xd8, 0x73, 0xcd, 0xbd, 0xd7, + 0x69, 0xd2, 0x65, 0x79, 0xe0, 0x05, 0x09, 0x04, 0x12, 0x54, 0x08, 0x24, 0x78, 0x42, 0x42, 0x48, + 0x88, 0x27, 0xfe, 0x00, 0x24, 0xf8, 0x03, 0x78, 0x80, 0x17, 0x9e, 0x11, 0x2f, 0xbc, 0xf1, 0xdc, + 0x07, 0x84, 0xee, 0xc7, 0x7c, 0xd8, 0xc9, 0xb2, 0x63, 0x67, 0x57, 0xe2, 0xcd, 0x73, 0xce, 0x3d, + 0xbf, 0xf3, 0x79, 0xcf, 0x39, 0x33, 0x86, 0xb7, 0x3d, 0x42, 0xbc, 0x1e, 0xae, 0xbb, 0xf8, 0x94, + 0x13, 0xd2, 0x63, 0x75, 0x8a, 0xfb, 0x84, 0x63, 0x7c, 0x86, 0x3b, 0x43, 0xee, 0x93, 0xa0, 0x7e, + 0x7a, 0x87, 0x63, 0xc6, 0x35, 0xd9, 0x8e, 0xe8, 0xb5, 0x01, 0x25, 0x9c, 0xa0, 0xd7, 0x94, 0x78, + 0x2d, 0x14, 0xaf, 0x8d, 0x89, 0xd7, 0x94, 0x78, 0xf5, 0x65, 0xad, 0xc6, 0x19, 0xf8, 0x75, 0x27, + 0x08, 0x08, 0x77, 0x04, 0x97, 0x29, 0x94, 0xea, 0xab, 0x9a, 0xdb, 0x23, 0x81, 0x47, 0x87, 0x41, + 0xe0, 0x07, 0x5e, 0x9d, 0x0c, 0x30, 0x1d, 0x39, 0xf4, 0x25, 0x7d, 0x48, 0x3e, 0x1d, 0x0d, 0x8f, + 0xeb, 0xee, 0x50, 0x1d, 0xd0, 0xfc, 0x9b, 0x9a, 0x4f, 0x07, 0x9d, 0x3a, 0xe3, 0x0e, 0x1f, 0x6a, + 0x41, 0xf3, 0x0f, 0x59, 0x98, 0xbb, 0xdf, 0x11, 0x27, 0x51, 0x1b, 0xae, 0x75, 0x48, 0xbf, 0xef, + 0x04, 0xae, 0xed, 0xfa, 0x1e, 0x66, 0xbc, 0x62, 0x2c, 0x1a, 0x4b, 0xc5, 0x46, 0xad, 0x96, 0xce, + 0x8f, 0xda, 0x9a, 0x94, 0xb2, 0xe6, 0x35, 0x8a, 0x7a, 0x44, 0x0f, 0xe1, 0xba, 0x1f, 0x0c, 0x86, + 0xdc, 0xa6, 0x84, 0xf0, 0x10, 0x39, 0x33, 0x15, 0x72, 0x59, 0x02, 0x59, 0x84, 0x70, 0x8d, 0xfd, + 0x0a, 0x94, 0xc8, 0x90, 0x0b, 0xf0, 0x63, 0xbf, 0x87, 0x59, 0x25, 0xbb, 0x98, 0x5d, 0x2a, 0x58, + 0x45, 0x45, 0x5b, 0x17, 0x24, 0xf4, 0x15, 0x40, 0xfa, 0x88, 0xeb, 0x53, 0xdc, 0xe1, 0x84, 0xfa, + 0x98, 0x55, 0x66, 0xe4, 0xc1, 0xeb, 0x8a, 0xb3, 0x16, 0x33, 0xd0, 0x16, 0xe4, 0x07, 0x3d, 0x87, + 0x1f, 0x13, 0xda, 0xaf, 0xcc, 0x4a, 0x23, 0xbf, 0x9a, 0xd6, 0xc8, 0x3d, 0x2d, 0x67, 0x45, 0x08, + 0xe8, 0x2e, 0xe4, 0xb8, 0xdf, 0xc7, 0x64, 0xc8, 0x2b, 0x73, 0x12, 0xec, 0x56, 0x08, 0x16, 0x26, + 0xaa, 0xb6, 0xa6, 0x13, 0x65, 0x85, 0x27, 0xd1, 0x22, 0x94, 0x5c, 0x62, 0x07, 0x84, 0xdb, 0x1d, + 0xa7, 0xd3, 0xc5, 0x95, 0xdc, 0xa2, 0xb1, 0x94, 0xb7, 0xc0, 0x25, 0x3b, 0x84, 0x37, 0x05, 0xc5, + 0xfc, 0x87, 0x01, 0xb9, 0xa6, 0x0a, 0x32, 0x7a, 0x19, 0x0a, 0x0e, 0xf5, 0x86, 0x7d, 0x1c, 0x70, + 0x56, 0x31, 0xa4, 0x5b, 0x31, 0x01, 0x9d, 0xc1, 0x17, 0x71, 0x70, 0xea, 0x53, 0x12, 0x88, 0x67, + 0xfb, 0xd4, 0xa1, 0xbe, 0x73, 0x24, 0x22, 0x95, 0x59, 0xcc, 0x2e, 0x15, 0x1b, 0xcd, 0xb4, 0xbe, + 0x69, 0x6d, 0xb5, 0x56, 0x0c, 0x76, 0xa8, 0xb1, 0xac, 0x1b, 0xf8, 0x22, 0x91, 0x55, 0xdf, 0x81, + 0x17, 0x2e, 0x39, 0x8c, 0x10, 0xcc, 0x04, 0x4e, 0x1f, 0xcb, 0xd2, 0x2a, 0x58, 0xf2, 0x37, 0xba, + 0x01, 0xb3, 0xa7, 0x4e, 0x6f, 0x88, 0x65, 0x55, 0x14, 0x2c, 0xf5, 0x60, 0xfe, 0xd2, 0x80, 0x7c, + 0x18, 0x52, 0xf4, 0x21, 0xc0, 0x80, 0x8a, 0xaa, 0xe7, 0x22, 0x7b, 0x86, 0x34, 0xfe, 0x1b, 0x93, + 0x26, 0xa6, 0xb6, 0xa7, 0x20, 0xce, 0xad, 0x04, 0x58, 0xf5, 0x0d, 0xc8, 0x87, 0xf4, 0x09, 0xac, + 0xfb, 0xbd, 0x01, 0x85, 0xb0, 0x6e, 0xce, 0xd1, 0x3a, 0xcc, 0xaa, 0x02, 0x54, 0x96, 0xa5, 0x2e, + 0x19, 0x51, 0xa2, 0x3b, 0xc4, 0xc5, 0x96, 0x12, 0x47, 0x1f, 0x40, 0x31, 0x59, 0xa5, 0x2a, 0x49, + 0x6f, 0xa6, 0xbf, 0x25, 0xda, 0x1e, 0x09, 0x99, 0x44, 0x32, 0x7f, 0x68, 0x40, 0x3e, 0x54, 0x76, + 0xa9, 0x97, 0xeb, 0x30, 0x77, 0xa5, 0xab, 0xa9, 0xa5, 0xd1, 0xab, 0x30, 0xef, 0x33, 0xdd, 0x09, + 0x45, 0xc2, 0x2b, 0x33, 0xb2, 0x7a, 0x4b, 0x3e, 0x6b, 0x45, 0x34, 0xf3, 0x04, 0xe6, 0x47, 0x6c, + 0x7d, 0x9e, 0x16, 0x99, 0x6f, 0xc1, 0x9c, 0xee, 0x16, 0x08, 0x66, 0xba, 0x0e, 0xeb, 0x86, 0x5a, + 0xc4, 0x6f, 0x74, 0x1b, 0x80, 0xf9, 0x9f, 0x62, 0xfb, 0xe8, 0x9c, 0xcb, 0x80, 0x1b, 0x4b, 0x59, + 0xab, 0x20, 0x28, 0xab, 0x82, 0x60, 0xfe, 0x35, 0x0b, 0x25, 0xd5, 0x1e, 0x2d, 0xcc, 0x86, 0x3d, + 0x8e, 0xda, 0x63, 0x1d, 0x47, 0xa5, 0xa8, 0x91, 0xd6, 0xb6, 0xdd, 0xa8, 0x33, 0x8d, 0x76, 0xa9, + 0xe3, 0x4b, 0xbb, 0x54, 0x56, 0x82, 0xdf, 0x9b, 0x0c, 0x3c, 0x8a, 0xec, 0x65, 0xed, 0xed, 0x25, + 0x28, 0xe0, 0x33, 0x9f, 0xdb, 0x1d, 0xe2, 0xaa, 0xd4, 0xcc, 0x5a, 0x79, 0x41, 0x68, 0x8a, 0x2c, + 0x88, 0x58, 0x70, 0x97, 0x88, 0x56, 0xed, 0x7c, 0x22, 0xbb, 0x5f, 0xc9, 0x2a, 0x28, 0x8a, 0xe5, + 0x7c, 0x82, 0xf6, 0x61, 0x5e, 0xb3, 0x75, 0x5e, 0xe6, 0xa6, 0xca, 0x4b, 0x49, 0x81, 0xe8, 0x9c, + 0x28, 0x9d, 0x98, 0x52, 0xa9, 0x33, 0x17, 0xe9, 0xc4, 0x94, 0xc6, 0x3a, 0x05, 0x5b, 0xeb, 0xcc, + 0x4f, 0xad, 0x13, 0x53, 0xaa, 0x9e, 0xcc, 0xdf, 0x1a, 0x00, 0x71, 0x22, 0x44, 0x59, 0x0c, 0x1c, + 0x1e, 0x95, 0x85, 0xf8, 0xfd, 0xcc, 0xae, 0x43, 0x05, 0x72, 0x1d, 0x12, 0x70, 0x1c, 0xf0, 0x4a, + 0x56, 0xfa, 0x16, 0x3e, 0xa6, 0xbb, 0x28, 0xbf, 0x32, 0x60, 0xe6, 0x80, 0x62, 0x8c, 0x5a, 0x30, + 0x23, 0xc6, 0xa7, 0x9e, 0xc8, 0x77, 0x26, 0xee, 0x08, 0x96, 0x14, 0x47, 0xdb, 0x90, 0xef, 0x74, + 0xfd, 0x9e, 0x4b, 0x71, 0xa0, 0x2b, 0x77, 0x0a, 0xa8, 0x08, 0xc2, 0xfc, 0xa3, 0x01, 0xe5, 0xb1, + 0xa2, 0x7b, 0xae, 0xd1, 0xdc, 0x85, 0x22, 0xa7, 0x18, 0x87, 0xb5, 0x90, 0x9d, 0x0a, 0x0c, 0x04, + 0x84, 0xae, 0x84, 0xcf, 0x32, 0x70, 0x4d, 0x85, 0x1b, 0x5b, 0xf8, 0xdb, 0xc3, 0xb0, 0x81, 0x05, + 0x8c, 0x3b, 0x41, 0x07, 0xdb, 0x89, 0x9e, 0x54, 0x0a, 0x89, 0x3b, 0xba, 0x37, 0x39, 0xb2, 0x2b, + 0x4c, 0xea, 0x90, 0xee, 0x25, 0x5a, 0x1a, 0x2d, 0xc3, 0x75, 0x76, 0xe2, 0x0f, 0xd4, 0xa0, 0xb7, + 0x7b, 0x84, 0x9c, 0x0c, 0x07, 0xd2, 0xad, 0xbc, 0x55, 0x16, 0x0c, 0x39, 0xee, 0xb7, 0x24, 0x19, + 0xdd, 0x85, 0x17, 0x39, 0xe1, 0x4e, 0xcf, 0x56, 0xdb, 0x94, 0x68, 0x3f, 0x76, 0x87, 0x0c, 0x03, + 0xae, 0xef, 0xf1, 0x0b, 0x92, 0xbb, 0x19, 0xe8, 0xb2, 0x6e, 0x0a, 0xd6, 0xa5, 0x42, 0xaa, 0xd5, + 0xcd, 0xca, 0x56, 0x37, 0x26, 0xa4, 0x9a, 0xde, 0x19, 0xe4, 0xb6, 0x88, 0x27, 0xef, 0x46, 0x9c, + 0x39, 0xe3, 0x4a, 0x99, 0xfb, 0x32, 0x5c, 0xeb, 0x0e, 0xfb, 0x4e, 0x60, 0x53, 0xec, 0xb8, 0xb2, + 0xdc, 0x33, 0xd2, 0xcb, 0x79, 0x49, 0xb5, 0x34, 0xd1, 0xfc, 0x41, 0x16, 0xca, 0x51, 0x3e, 0xd8, + 0x80, 0x04, 0x0c, 0xa3, 0x2d, 0x98, 0xa3, 0xb2, 0xf7, 0x6a, 0x13, 0xde, 0x98, 0x30, 0xd6, 0x52, + 0xd6, 0xd2, 0x18, 0x22, 0xbd, 0x32, 0xd8, 0xae, 0xad, 0x41, 0x95, 0x1d, 0x25, 0x45, 0xd4, 0x4d, + 0x7e, 0x19, 0xe6, 0xd4, 0x92, 0xac, 0x4b, 0x0c, 0x85, 0x2a, 0xe9, 0xa0, 0x53, 0xdb, 0x97, 0x1c, + 0x4b, 0x9f, 0x40, 0x5d, 0x28, 0x32, 0x4c, 0x4f, 0x31, 0xb5, 0x7b, 0xc4, 0x53, 0x8b, 0x65, 0xb1, + 0xb1, 0x91, 0xd6, 0xc6, 0x31, 0x67, 0x6b, 0xfb, 0x12, 0x6a, 0x8b, 0x78, 0xac, 0x15, 0x70, 0x7a, + 0x6e, 0x01, 0x8b, 0x08, 0xd5, 0x00, 0xca, 0x63, 0x6c, 0xb4, 0x00, 0xd9, 0x13, 0x7c, 0xae, 0x4b, + 0x54, 0xfc, 0x44, 0xad, 0xe4, 0xb6, 0x52, 0x6c, 0xd4, 0xd3, 0x1a, 0xa2, 0x13, 0xae, 0xd7, 0x9b, + 0x95, 0xcc, 0xd7, 0x0d, 0xf3, 0xf3, 0x0c, 0x54, 0xb4, 0x7d, 0xbb, 0xe1, 0xfb, 0xc6, 0x36, 0xe6, + 0x8e, 0xeb, 0x70, 0x07, 0x7d, 0x04, 0xb3, 0x8c, 0x3b, 0x9e, 0xba, 0x1e, 0xd7, 0x1a, 0xeb, 0x13, + 0x3a, 0x7c, 0x01, 0x50, 0x84, 0xd5, 0xc3, 0x96, 0x02, 0x15, 0x6d, 0x5f, 0xdd, 0x90, 0xab, 0xbd, + 0x2f, 0x94, 0x14, 0x88, 0x1e, 0x35, 0xaf, 0x03, 0xd2, 0xf3, 0x8b, 0x71, 0x8a, 0x9d, 0xbe, 0xba, + 0xde, 0x59, 0x19, 0xbb, 0x05, 0xc5, 0xd9, 0x97, 0x0c, 0x79, 0xc5, 0xd5, 0x69, 0x31, 0x79, 0x92, + 0xa7, 0x67, 0xa2, 0xd3, 0x98, 0xd2, 0xf8, 0xb4, 0xb9, 0x0b, 0xb3, 0xd2, 0x01, 0x54, 0x84, 0x5c, + 0x7b, 0xe7, 0xbd, 0x9d, 0xdd, 0x0f, 0x76, 0x16, 0xbe, 0x80, 0xca, 0x50, 0x6c, 0xde, 0x6f, 0xbe, + 0xdb, 0xb2, 0x9b, 0xef, 0xb6, 0x9a, 0xef, 0x2d, 0x18, 0x08, 0x60, 0xee, 0xfd, 0x76, 0xab, 0xdd, + 0x5a, 0x5b, 0xc8, 0xa0, 0x79, 0x28, 0xb4, 0x1e, 0xb4, 0x9a, 0xed, 0x83, 0xcd, 0x9d, 0x8d, 0x85, + 0xac, 0x78, 0x6c, 0xee, 0x6e, 0xef, 0x6d, 0xb5, 0x0e, 0x5a, 0x6b, 0x0b, 0x33, 0xe6, 0x4f, 0x0d, + 0x78, 0x71, 0x03, 0xf3, 0x91, 0x1a, 0x9e, 0xa4, 0x43, 0x3d, 0x8f, 0x08, 0x9a, 0xff, 0x36, 0xe0, + 0x56, 0x7b, 0xe0, 0x3a, 0x1c, 0xff, 0x5f, 0xd9, 0x85, 0x3e, 0x8c, 0x40, 0xf5, 0xa5, 0xce, 0x5e, + 0xa1, 0x53, 0x68, 0x68, 0xf5, 0x64, 0xfe, 0xc4, 0x80, 0x9b, 0xeb, 0x7e, 0xe0, 0x6e, 0xfb, 0x8c, + 0xf9, 0x81, 0xb7, 0xda, 0x23, 0x47, 0x6c, 0x22, 0x87, 0xdf, 0x87, 0xd2, 0x51, 0x8f, 0x1c, 0x69, + 0x77, 0xc3, 0x85, 0x71, 0x52, 0x7f, 0x8b, 0x02, 0x43, 0xfd, 0x66, 0xe6, 0x77, 0xa0, 0x72, 0xd1, + 0x24, 0xdd, 0x2d, 0x3f, 0x86, 0x1b, 0x7d, 0x45, 0xb7, 0x9f, 0x81, 0x5a, 0xd4, 0x8f, 0x75, 0x84, + 0xda, 0xbf, 0x0b, 0xd7, 0x55, 0x0d, 0x08, 0x62, 0x18, 0x0a, 0xf9, 0xed, 0x40, 0x2e, 0x36, 0x57, + 0xfe, 0x76, 0x20, 0x51, 0xe2, 0x8d, 0x5d, 0x34, 0x07, 0x59, 0x24, 0x25, 0x4b, 0xfe, 0x36, 0x7f, + 0x66, 0xc0, 0xcd, 0x55, 0x87, 0x77, 0xba, 0xb1, 0x15, 0x93, 0x65, 0xa4, 0x0d, 0x79, 0xaa, 0xce, + 0x87, 0x61, 0x49, 0xfd, 0x26, 0x79, 0xc1, 0x71, 0x2b, 0x82, 0x32, 0x7f, 0x94, 0x81, 0xca, 0x45, + 0xbb, 0x74, 0x5a, 0x3c, 0x28, 0x50, 0xfd, 0x3b, 0x7c, 0x49, 0xdc, 0x4c, 0xab, 0xf4, 0x49, 0xa0, + 0xb5, 0xf0, 0x87, 0x15, 0x63, 0x57, 0xbf, 0x6f, 0x40, 0x3e, 0xd2, 0xba, 0x0b, 0xc5, 0x44, 0x11, + 0x4c, 0x99, 0x12, 0x88, 0x4b, 0x2f, 0x31, 0x18, 0x33, 0x4f, 0x1b, 0x8c, 0xe6, 0x9f, 0x0d, 0xb8, + 0xb6, 0x81, 0xb9, 0x58, 0x5f, 0x27, 0x4a, 0xcf, 0x2e, 0x14, 0xaf, 0xfe, 0xa5, 0x08, 0x68, 0xfc, + 0x91, 0xe8, 0x25, 0x28, 0x0c, 0x1c, 0x0f, 0xdb, 0xe2, 0xad, 0x4e, 0x76, 0x86, 0x59, 0x2b, 0x2f, + 0x08, 0xfb, 0xfe, 0xa7, 0xf2, 0x9d, 0x47, 0x32, 0x39, 0x39, 0xc1, 0x81, 0x6e, 0xef, 0xf2, 0xf8, + 0x81, 0x20, 0x98, 0x3f, 0x36, 0xa0, 0x1c, 0x39, 0xa1, 0xa3, 0xba, 0x3f, 0xfa, 0x92, 0x6e, 0x4c, + 0xbb, 0x47, 0x27, 0x51, 0xd0, 0x6b, 0x50, 0x0e, 0xf0, 0x19, 0xb7, 0x13, 0xc6, 0xa8, 0xef, 0x0d, + 0xf3, 0x82, 0xbc, 0x17, 0x19, 0xb4, 0x0d, 0xc5, 0x03, 0x42, 0x7a, 0x6b, 0x98, 0x3b, 0x7e, 0x4f, + 0xbe, 0xcf, 0x09, 0x6d, 0xc9, 0x68, 0xe6, 0x05, 0x41, 0x46, 0xf2, 0x15, 0x28, 0x49, 0xe6, 0x29, + 0xa6, 0x2c, 0xdc, 0x55, 0x0b, 0x56, 0x51, 0xd0, 0x0e, 0x15, 0x49, 0x74, 0xf4, 0xb2, 0xce, 0x4e, + 0x34, 0xda, 0x0f, 0xb5, 0x98, 0xab, 0x74, 0xe8, 0xb2, 0xb9, 0x9b, 0xd6, 0xc1, 0x84, 0x79, 0x4a, + 0x57, 0xc2, 0x56, 0xdd, 0xa5, 0x7d, 0x57, 0xdb, 0x92, 0x57, 0x84, 0x4d, 0x57, 0x8c, 0x5b, 0xa9, + 0xd4, 0x0f, 0x4e, 0x49, 0xc7, 0x09, 0x4f, 0xe9, 0xe1, 0x2c, 0x38, 0x9b, 0x11, 0x63, 0xd3, 0x45, + 0x2b, 0x70, 0xab, 0x43, 0x28, 0xc5, 0x3d, 0x87, 0x63, 0x37, 0x21, 0xc3, 0x84, 0x90, 0x4a, 0xe2, + 0xcd, 0xf8, 0x40, 0x2c, 0xca, 0x36, 0xdd, 0xc6, 0x6f, 0x0c, 0x28, 0xb4, 0x42, 0xa3, 0xd1, 0xcf, + 0x0d, 0xc8, 0xe9, 0x9d, 0x04, 0x7d, 0x6d, 0xe2, 0xad, 0x4d, 0x06, 0xae, 0x7a, 0x3b, 0x94, 0x4b, + 0x7c, 0xa2, 0xad, 0x45, 0x1b, 0x8e, 0xf9, 0xe6, 0xf7, 0xfe, 0xf6, 0xcf, 0xcf, 0x32, 0x75, 0x73, + 0x39, 0xfc, 0x5c, 0xfc, 0x68, 0xe4, 0x12, 0xbc, 0xbd, 0xbc, 0xfc, 0xb8, 0xae, 0xe2, 0xc0, 0x56, + 0x94, 0x2a, 0xbc, 0x62, 0x2c, 0x37, 0x3e, 0xcf, 0x42, 0x51, 0x0d, 0x26, 0xf9, 0x16, 0x80, 0xfe, + 0xa5, 0x4a, 0x71, 0xe4, 0x6b, 0xc4, 0x37, 0xd3, 0x5a, 0x7c, 0xf9, 0x2a, 0x51, 0x9d, 0x6a, 0x42, + 0x9a, 0x1f, 0x4b, 0x87, 0x1e, 0xa2, 0x07, 0x4f, 0x75, 0x48, 0x09, 0xb0, 0xfa, 0xa3, 0x91, 0x99, + 0x5f, 0xeb, 0x3a, 0xac, 0xfb, 0x78, 0x9c, 0x18, 0x7f, 0x86, 0x79, 0x8c, 0xfe, 0x63, 0x00, 0xba, + 0xb8, 0x68, 0xa0, 0xfb, 0x93, 0xf5, 0xe9, 0x67, 0xe7, 0x31, 0x91, 0x1e, 0xfb, 0xd5, 0xe7, 0xe6, + 0xf1, 0xca, 0xe8, 0x02, 0xd3, 0xf8, 0xc5, 0x2c, 0xdc, 0x6a, 0xaa, 0x51, 0x78, 0xdf, 0x75, 0x29, + 0x66, 0x4c, 0xbc, 0x1f, 0xed, 0x73, 0x42, 0xc5, 0x92, 0xf9, 0x17, 0x03, 0x16, 0xc6, 0x37, 0x00, + 0xf4, 0x4e, 0xfa, 0x8f, 0x8e, 0x97, 0xae, 0x33, 0xd5, 0x6f, 0x4d, 0x0f, 0xa0, 0x3a, 0xa3, 0x79, + 0x4f, 0x86, 0xe9, 0x8e, 0xf9, 0xfa, 0xff, 0x08, 0x93, 0x98, 0x26, 0x6c, 0xe5, 0x38, 0x86, 0x58, + 0x31, 0x96, 0xa5, 0x43, 0xe3, 0x63, 0x2e, 0xbd, 0x43, 0x4f, 0xd8, 0x06, 0xd2, 0x3b, 0xf4, 0xa4, + 0x09, 0x3b, 0x81, 0x43, 0x47, 0x31, 0x84, 0x70, 0xe8, 0xef, 0x06, 0xe4, 0xf4, 0xdc, 0x48, 0xdf, + 0x56, 0x46, 0xa7, 0x65, 0xf5, 0xde, 0xc4, 0x72, 0xda, 0xea, 0x8f, 0xa4, 0xd5, 0x87, 0xe8, 0xe0, + 0x69, 0x56, 0xd7, 0x1f, 0x25, 0x26, 0x6d, 0x58, 0xa3, 0x49, 0x52, 0xb2, 0x42, 0x3d, 0xa5, 0x65, + 0xf5, 0x4f, 0x06, 0x2c, 0x77, 0x48, 0x3f, 0xa5, 0x71, 0xab, 0x37, 0x2c, 0x49, 0x8f, 0x3a, 0xee, + 0x1e, 0x25, 0x9c, 0xec, 0x19, 0x0f, 0xdb, 0x5a, 0xde, 0x23, 0x3d, 0x27, 0xf0, 0x6a, 0x84, 0x7a, + 0x75, 0x0f, 0x07, 0xf2, 0x6f, 0x91, 0xba, 0x62, 0x39, 0x03, 0x9f, 0x3d, 0xed, 0xaf, 0xb7, 0xb7, + 0xc6, 0xc8, 0xbf, 0xce, 0x64, 0xad, 0xd6, 0x83, 0xdf, 0x65, 0x6e, 0x6f, 0x28, 0xf4, 0x31, 0xe5, + 0xb5, 0xc3, 0x3b, 0x07, 0x98, 0xf1, 0xa3, 0x39, 0xa9, 0xe7, 0xee, 0x7f, 0x03, 0x00, 0x00, 0xff, + 0xff, 0xf0, 0x8a, 0x2f, 0x43, 0xe1, 0x1b, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/devtools/remoteworkers/v1test2/bots.pb.go b/vendor/google.golang.org/genproto/googleapis/devtools/remoteworkers/v1test2/bots.pb.go new file mode 100644 index 0000000..c9524e7 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/devtools/remoteworkers/v1test2/bots.pb.go @@ -0,0 +1,942 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/devtools/remoteworkers/v1test2/bots.proto + +package remoteworkers // import "google.golang.org/genproto/googleapis/devtools/remoteworkers/v1test2" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import any "github.com/golang/protobuf/ptypes/any" +import empty "github.com/golang/protobuf/ptypes/empty" +import timestamp "github.com/golang/protobuf/ptypes/timestamp" +import _ "google.golang.org/genproto/googleapis/api/annotations" +import status "google.golang.org/genproto/googleapis/rpc/status" +import field_mask "google.golang.org/genproto/protobuf/field_mask" + +import ( + context "golang.org/x/net/context" + grpc "google.golang.org/grpc" +) + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// A coarse description of the status of the bot that the server uses to +// determine whether to assign the bot new leases. +type BotStatus int32 + +const ( + // Default value; do not use. + BotStatus_BOT_STATUS_UNSPECIFIED BotStatus = 0 + // The bot is healthy, and will accept leases as normal. + BotStatus_OK BotStatus = 1 + // The bot is unhealthy and will not accept new leases. For example, the bot + // may have detected that available disk space is too low. This situation may + // resolve itself, but will typically require human intervention. + BotStatus_UNHEALTHY BotStatus = 2 + // The bot has been asked to reboot the host. The bot will not accept new + // leases; once all leases are complete, this session will no longer be + // updated but the bot will be expected to establish a new session after the + // reboot completes. + BotStatus_HOST_REBOOTING BotStatus = 3 + // The bot has been asked to shut down. As with HOST_REBOOTING, once all + // leases are completed, the session will no longer be updated and the bot + // will not be expected to establish a new session. + // + // Bots are typically only asked to shut down if its host computer will be + // modified in some way, such as deleting a VM. + BotStatus_BOT_TERMINATING BotStatus = 4 +) + +var BotStatus_name = map[int32]string{ + 0: "BOT_STATUS_UNSPECIFIED", + 1: "OK", + 2: "UNHEALTHY", + 3: "HOST_REBOOTING", + 4: "BOT_TERMINATING", +} +var BotStatus_value = map[string]int32{ + "BOT_STATUS_UNSPECIFIED": 0, + "OK": 1, + "UNHEALTHY": 2, + "HOST_REBOOTING": 3, + "BOT_TERMINATING": 4, +} + +func (x BotStatus) String() string { + return proto.EnumName(BotStatus_name, int32(x)) +} +func (BotStatus) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_bots_0fa217a861a9a803, []int{0} +} + +// The state of the lease. All leases start in the PENDING state. A bot can +// change PENDING to ACTIVE or (in the case of an error) COMPLETED, or from +// ACTIVE to COMPLETED. The server can change PENDING or ACTIVE to CANCELLED if +// it wants the bot to release its resources - for example, if the bot needs to +// be quarantined (it's producing bad output) or a cell needs to be drained. +type LeaseState int32 + +const ( + // Default value; do not use. + LeaseState_LEASE_STATE_UNSPECIFIED LeaseState = 0 + // Pending: the server expects the bot to accept this lease. This may only be + // set by the server. + LeaseState_PENDING LeaseState = 1 + // Active: the bot has accepted this lease. This may only be set by the bot. + LeaseState_ACTIVE LeaseState = 2 + // Completed: the bot is no longer leased. This may only be set by the bot, + // and the status field must be populated iff the state is COMPLETED. + LeaseState_COMPLETED LeaseState = 4 + // Cancelled: The bot should immediately release all resources associated with + // the lease. This may only be set by the server. + LeaseState_CANCELLED LeaseState = 5 +) + +var LeaseState_name = map[int32]string{ + 0: "LEASE_STATE_UNSPECIFIED", + 1: "PENDING", + 2: "ACTIVE", + 4: "COMPLETED", + 5: "CANCELLED", +} +var LeaseState_value = map[string]int32{ + "LEASE_STATE_UNSPECIFIED": 0, + "PENDING": 1, + "ACTIVE": 2, + "COMPLETED": 4, + "CANCELLED": 5, +} + +func (x LeaseState) String() string { + return proto.EnumName(LeaseState_name, int32(x)) +} +func (LeaseState) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_bots_0fa217a861a9a803, []int{1} +} + +// Possible administration actions. +type AdminTemp_Command int32 + +const ( + // Illegal value. + AdminTemp_UNSPECIFIED AdminTemp_Command = 0 + // Download and run a new version of the bot. `arg` will be a resource + // accessible via `ByteStream.Read` to obtain the new bot code. + AdminTemp_BOT_UPDATE AdminTemp_Command = 1 + // Restart the bot without downloading a new version. `arg` will be a + // message to log. + AdminTemp_BOT_RESTART AdminTemp_Command = 2 + // Shut down the bot. `arg` will be a task resource name (similar to those + // in tasks.proto) that the bot can use to tell the server that it is + // terminating. + AdminTemp_BOT_TERMINATE AdminTemp_Command = 3 + // Restart the host computer. `arg` will be a message to log. + AdminTemp_HOST_RESTART AdminTemp_Command = 4 +) + +var AdminTemp_Command_name = map[int32]string{ + 0: "UNSPECIFIED", + 1: "BOT_UPDATE", + 2: "BOT_RESTART", + 3: "BOT_TERMINATE", + 4: "HOST_RESTART", +} +var AdminTemp_Command_value = map[string]int32{ + "UNSPECIFIED": 0, + "BOT_UPDATE": 1, + "BOT_RESTART": 2, + "BOT_TERMINATE": 3, + "HOST_RESTART": 4, +} + +func (x AdminTemp_Command) String() string { + return proto.EnumName(AdminTemp_Command_name, int32(x)) +} +func (AdminTemp_Command) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_bots_0fa217a861a9a803, []int{2, 0} +} + +// Types of bot events. +type PostBotEventTempRequest_Type int32 + +const ( + // Illegal value. + PostBotEventTempRequest_UNSPECIFIED PostBotEventTempRequest_Type = 0 + // Interesting but harmless event. + PostBotEventTempRequest_INFO PostBotEventTempRequest_Type = 1 + // Error condition. + PostBotEventTempRequest_ERROR PostBotEventTempRequest_Type = 2 +) + +var PostBotEventTempRequest_Type_name = map[int32]string{ + 0: "UNSPECIFIED", + 1: "INFO", + 2: "ERROR", +} +var PostBotEventTempRequest_Type_value = map[string]int32{ + "UNSPECIFIED": 0, + "INFO": 1, + "ERROR": 2, +} + +func (x PostBotEventTempRequest_Type) String() string { + return proto.EnumName(PostBotEventTempRequest_Type_name, int32(x)) +} +func (PostBotEventTempRequest_Type) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_bots_0fa217a861a9a803, []int{5, 0} +} + +// A bot session represents the state of a bot while in continuous contact with +// the server for a period of time. The session includes information about the +// worker - that is, the *worker* (the physical or virtual hardware) is +// considered to be a property of the bot (the software agent running on that +// hardware), which is the reverse of real life, but more natural from the point +// of the view of this API, which communicates solely with the bot and not +// directly with the underlying worker. +type BotSession struct { + // The bot session name, as selected by the server. Output only during a call + // to CreateBotSession. + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // A unique bot ID within the farm used to persistently identify this bot over + // time (i.e., over multiple sessions). This ID must be unique within a + // farm. Typically, the bot ID will be the same as the name of the primary + // device in the worker (e.g., what you'd get from typing `uname -n` on *nix), + // but this is not required since a single device may allow multiple bots to + // run on it, each with access to different resources. What is important is + // that this ID is meaningful to humans, who might need to hunt a physical + // machine down to fix it. + // + // When CreateBotSession is successfully called with a bot_id, all prior + // sessions with the same ID are invalidated. If a bot attempts to update an + // invalid session, the server must reject that request, and may also + // quarantine the other bot with the same bot IDs (ie, stop sending it new + // leases and alert an admin). + BotId string `protobuf:"bytes,2,opt,name=bot_id,json=botId,proto3" json:"bot_id,omitempty"` + // The status of the bot. This must be populated in every call to + // UpdateBotSession. + Status BotStatus `protobuf:"varint,3,opt,name=status,proto3,enum=google.devtools.remoteworkers.v1test2.BotStatus" json:"status,omitempty"` + // A description of the worker hosting this bot. The Worker message is used + // here in the Status context (see Worker for more information). If multiple + // bots are running on the worker, this field should only describe the + // resources accessible from this bot. + // + // During the call to CreateBotSession, the server may make arbitrary changes + // to the worker's `server_properties` field (see that field for more + // information). Otherwise, this field is input-only. + Worker *Worker `protobuf:"bytes,4,opt,name=worker,proto3" json:"worker,omitempty"` + // A list of all leases that are a part of this session. See the Lease message + // for details. + Leases []*Lease `protobuf:"bytes,5,rep,name=leases,proto3" json:"leases,omitempty"` + // The time at which this bot session will expire, unless the bot calls + // UpdateBotSession again. Output only. + ExpireTime *timestamp.Timestamp `protobuf:"bytes,6,opt,name=expire_time,json=expireTime,proto3" json:"expire_time,omitempty"` + // The version of the bot code currently running. The server may use this + // information to issue an admin action to tell the bot to update itself. + Version string `protobuf:"bytes,7,opt,name=version,proto3" json:"version,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *BotSession) Reset() { *m = BotSession{} } +func (m *BotSession) String() string { return proto.CompactTextString(m) } +func (*BotSession) ProtoMessage() {} +func (*BotSession) Descriptor() ([]byte, []int) { + return fileDescriptor_bots_0fa217a861a9a803, []int{0} +} +func (m *BotSession) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_BotSession.Unmarshal(m, b) +} +func (m *BotSession) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_BotSession.Marshal(b, m, deterministic) +} +func (dst *BotSession) XXX_Merge(src proto.Message) { + xxx_messageInfo_BotSession.Merge(dst, src) +} +func (m *BotSession) XXX_Size() int { + return xxx_messageInfo_BotSession.Size(m) +} +func (m *BotSession) XXX_DiscardUnknown() { + xxx_messageInfo_BotSession.DiscardUnknown(m) +} + +var xxx_messageInfo_BotSession proto.InternalMessageInfo + +func (m *BotSession) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *BotSession) GetBotId() string { + if m != nil { + return m.BotId + } + return "" +} + +func (m *BotSession) GetStatus() BotStatus { + if m != nil { + return m.Status + } + return BotStatus_BOT_STATUS_UNSPECIFIED +} + +func (m *BotSession) GetWorker() *Worker { + if m != nil { + return m.Worker + } + return nil +} + +func (m *BotSession) GetLeases() []*Lease { + if m != nil { + return m.Leases + } + return nil +} + +func (m *BotSession) GetExpireTime() *timestamp.Timestamp { + if m != nil { + return m.ExpireTime + } + return nil +} + +func (m *BotSession) GetVersion() string { + if m != nil { + return m.Version + } + return "" +} + +// A Lease is a lease that the scheduler has assigned to this bot. If the bot +// notices (by UpdateBotSession) that it has any leases in the PENDING state, it +// should call UpdateBotSession to put the leases into the ACTIVE state and +// start executing their assignments. +// +// All fields in this message are output-only, *except* the `state` and `status` +// fields. Note that repeated fields can only be updated as a unit, so on every +// update the bot must provide an update for *all* the leases the server expects +// it to report on. +// +// The scheduler *should* ensure that all leases scheduled to a bot can actually +// be accepted, but race conditions may occur. In such cases, the bot should +// attempt to accept the leases in the order they are listed by the server, to +// allow the server to control priorities. +// +// The server will remove COMPLETED leases from time to time, after which the +// bot shouldn't report on them any more (the server will ignore superfluous +// COMPLETED records). +type Lease struct { + // A short string uniquely identifing the lease within this bot session. + Id string `protobuf:"bytes,7,opt,name=id,proto3" json:"id,omitempty"` + // The actual work to be performed, if any. May be omitted by the server if + // the lease is not in the `PENDING` state. The message must be meaningful to + // the bot. Output only (must only be set by the server). + Payload *any.Any `protobuf:"bytes,8,opt,name=payload,proto3" json:"payload,omitempty"` + // Any result the bot wishes to provide about the lease. Must not be changed + // after the first call with the lease in the `COMPLETED` or `CANCELLED` + // state. Input only (must only be set by the bot, will not be echoed by the + // server). + Result *any.Any `protobuf:"bytes,9,opt,name=result,proto3" json:"result,omitempty"` + // The state of the lease. See LeaseState for more information. + State LeaseState `protobuf:"varint,2,opt,name=state,proto3,enum=google.devtools.remoteworkers.v1test2.LeaseState" json:"state,omitempty"` + // The final status of the lease (should be populated by the bot if the state + // is completed). This is the status of the lease, not of any task represented + // by the lease. For example, if the bot could not accept the lease because it + // asked for some resource the bot didn't have, this status will be + // FAILED_PRECONDITION. But if the assignment in the lease didn't execute + // correctly, this field will be `OK` while the failure of the assignment must + // communicated via the `result` field. + Status *status.Status `protobuf:"bytes,3,opt,name=status,proto3" json:"status,omitempty"` + // The requirements that are being claimed by this lease. This field may be + // omitted by the server if the lease is not pending. + Requirements *Worker `protobuf:"bytes,4,opt,name=requirements,proto3" json:"requirements,omitempty"` + // The time at which this lease expires. The server *may* extend this over + // time, but due to race conditions, the bot is not *required* to respect any + // expiry date except the first one. + ExpireTime *timestamp.Timestamp `protobuf:"bytes,5,opt,name=expire_time,json=expireTime,proto3" json:"expire_time,omitempty"` + // DEPRECATED. The assignment should be provided to the bot via the `payload` + // field. Clients that wish to use a simple name (such as a queue of work + // provided elsewhere) should define a custom message type and encode it into + // `payload`. + Assignment string `protobuf:"bytes,1,opt,name=assignment,proto3" json:"assignment,omitempty"` // Deprecated: Do not use. + // DEPRECATED. Use `payload` instead. + InlineAssignment *any.Any `protobuf:"bytes,6,opt,name=inline_assignment,json=inlineAssignment,proto3" json:"inline_assignment,omitempty"` // Deprecated: Do not use. + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Lease) Reset() { *m = Lease{} } +func (m *Lease) String() string { return proto.CompactTextString(m) } +func (*Lease) ProtoMessage() {} +func (*Lease) Descriptor() ([]byte, []int) { + return fileDescriptor_bots_0fa217a861a9a803, []int{1} +} +func (m *Lease) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Lease.Unmarshal(m, b) +} +func (m *Lease) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Lease.Marshal(b, m, deterministic) +} +func (dst *Lease) XXX_Merge(src proto.Message) { + xxx_messageInfo_Lease.Merge(dst, src) +} +func (m *Lease) XXX_Size() int { + return xxx_messageInfo_Lease.Size(m) +} +func (m *Lease) XXX_DiscardUnknown() { + xxx_messageInfo_Lease.DiscardUnknown(m) +} + +var xxx_messageInfo_Lease proto.InternalMessageInfo + +func (m *Lease) GetId() string { + if m != nil { + return m.Id + } + return "" +} + +func (m *Lease) GetPayload() *any.Any { + if m != nil { + return m.Payload + } + return nil +} + +func (m *Lease) GetResult() *any.Any { + if m != nil { + return m.Result + } + return nil +} + +func (m *Lease) GetState() LeaseState { + if m != nil { + return m.State + } + return LeaseState_LEASE_STATE_UNSPECIFIED +} + +func (m *Lease) GetStatus() *status.Status { + if m != nil { + return m.Status + } + return nil +} + +func (m *Lease) GetRequirements() *Worker { + if m != nil { + return m.Requirements + } + return nil +} + +func (m *Lease) GetExpireTime() *timestamp.Timestamp { + if m != nil { + return m.ExpireTime + } + return nil +} + +// Deprecated: Do not use. +func (m *Lease) GetAssignment() string { + if m != nil { + return m.Assignment + } + return "" +} + +// Deprecated: Do not use. +func (m *Lease) GetInlineAssignment() *any.Any { + if m != nil { + return m.InlineAssignment + } + return nil +} + +// AdminTemp is a prelimiary set of administration tasks. It's called "Temp" +// because we do not yet know the best way to represent admin tasks; it's +// possible that this will be entirely replaced in later versions of this API. +// If this message proves to be sufficient, it will be renamed in the alpha or +// beta release of this API. +// +// This message (suitably marshalled into a protobuf.Any) can be used as the +// inline_assignment field in a lease; the lease assignment field should simply +// be `"admin"` in these cases. +// +// This message is heavily based on Swarming administration tasks from the LUCI +// project (http://github.com/luci/luci-py/appengine/swarming). +type AdminTemp struct { + // The admin action; see `Command` for legal values. + Command AdminTemp_Command `protobuf:"varint,1,opt,name=command,proto3,enum=google.devtools.remoteworkers.v1test2.AdminTemp_Command" json:"command,omitempty"` + // The argument to the admin action; see `Command` for semantics. + Arg string `protobuf:"bytes,2,opt,name=arg,proto3" json:"arg,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *AdminTemp) Reset() { *m = AdminTemp{} } +func (m *AdminTemp) String() string { return proto.CompactTextString(m) } +func (*AdminTemp) ProtoMessage() {} +func (*AdminTemp) Descriptor() ([]byte, []int) { + return fileDescriptor_bots_0fa217a861a9a803, []int{2} +} +func (m *AdminTemp) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_AdminTemp.Unmarshal(m, b) +} +func (m *AdminTemp) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_AdminTemp.Marshal(b, m, deterministic) +} +func (dst *AdminTemp) XXX_Merge(src proto.Message) { + xxx_messageInfo_AdminTemp.Merge(dst, src) +} +func (m *AdminTemp) XXX_Size() int { + return xxx_messageInfo_AdminTemp.Size(m) +} +func (m *AdminTemp) XXX_DiscardUnknown() { + xxx_messageInfo_AdminTemp.DiscardUnknown(m) +} + +var xxx_messageInfo_AdminTemp proto.InternalMessageInfo + +func (m *AdminTemp) GetCommand() AdminTemp_Command { + if m != nil { + return m.Command + } + return AdminTemp_UNSPECIFIED +} + +func (m *AdminTemp) GetArg() string { + if m != nil { + return m.Arg + } + return "" +} + +// Request message for CreateBotSession. +type CreateBotSessionRequest struct { + // The farm resource. + Parent string `protobuf:"bytes,1,opt,name=parent,proto3" json:"parent,omitempty"` + // The bot session to create. Server-assigned fields like name must be unset. + BotSession *BotSession `protobuf:"bytes,2,opt,name=bot_session,json=botSession,proto3" json:"bot_session,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *CreateBotSessionRequest) Reset() { *m = CreateBotSessionRequest{} } +func (m *CreateBotSessionRequest) String() string { return proto.CompactTextString(m) } +func (*CreateBotSessionRequest) ProtoMessage() {} +func (*CreateBotSessionRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_bots_0fa217a861a9a803, []int{3} +} +func (m *CreateBotSessionRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_CreateBotSessionRequest.Unmarshal(m, b) +} +func (m *CreateBotSessionRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_CreateBotSessionRequest.Marshal(b, m, deterministic) +} +func (dst *CreateBotSessionRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_CreateBotSessionRequest.Merge(dst, src) +} +func (m *CreateBotSessionRequest) XXX_Size() int { + return xxx_messageInfo_CreateBotSessionRequest.Size(m) +} +func (m *CreateBotSessionRequest) XXX_DiscardUnknown() { + xxx_messageInfo_CreateBotSessionRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_CreateBotSessionRequest proto.InternalMessageInfo + +func (m *CreateBotSessionRequest) GetParent() string { + if m != nil { + return m.Parent + } + return "" +} + +func (m *CreateBotSessionRequest) GetBotSession() *BotSession { + if m != nil { + return m.BotSession + } + return nil +} + +// Request message for UpdateBotSession. +type UpdateBotSessionRequest struct { + // The bot session name. Must match bot_session.name. + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // The bot session resource to update. + BotSession *BotSession `protobuf:"bytes,2,opt,name=bot_session,json=botSession,proto3" json:"bot_session,omitempty"` + // The fields on the bot that should be updated. See the BotSession resource + // for which fields are updatable by which caller. + UpdateMask *field_mask.FieldMask `protobuf:"bytes,3,opt,name=update_mask,json=updateMask,proto3" json:"update_mask,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *UpdateBotSessionRequest) Reset() { *m = UpdateBotSessionRequest{} } +func (m *UpdateBotSessionRequest) String() string { return proto.CompactTextString(m) } +func (*UpdateBotSessionRequest) ProtoMessage() {} +func (*UpdateBotSessionRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_bots_0fa217a861a9a803, []int{4} +} +func (m *UpdateBotSessionRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_UpdateBotSessionRequest.Unmarshal(m, b) +} +func (m *UpdateBotSessionRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_UpdateBotSessionRequest.Marshal(b, m, deterministic) +} +func (dst *UpdateBotSessionRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_UpdateBotSessionRequest.Merge(dst, src) +} +func (m *UpdateBotSessionRequest) XXX_Size() int { + return xxx_messageInfo_UpdateBotSessionRequest.Size(m) +} +func (m *UpdateBotSessionRequest) XXX_DiscardUnknown() { + xxx_messageInfo_UpdateBotSessionRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_UpdateBotSessionRequest proto.InternalMessageInfo + +func (m *UpdateBotSessionRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *UpdateBotSessionRequest) GetBotSession() *BotSession { + if m != nil { + return m.BotSession + } + return nil +} + +func (m *UpdateBotSessionRequest) GetUpdateMask() *field_mask.FieldMask { + if m != nil { + return m.UpdateMask + } + return nil +} + +// Request message for PostBotEventTemp +type PostBotEventTempRequest struct { + // The bot session name. + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // The type of bot event. + Type PostBotEventTempRequest_Type `protobuf:"varint,2,opt,name=type,proto3,enum=google.devtools.remoteworkers.v1test2.PostBotEventTempRequest_Type" json:"type,omitempty"` + // A human-readable message. + Msg string `protobuf:"bytes,3,opt,name=msg,proto3" json:"msg,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *PostBotEventTempRequest) Reset() { *m = PostBotEventTempRequest{} } +func (m *PostBotEventTempRequest) String() string { return proto.CompactTextString(m) } +func (*PostBotEventTempRequest) ProtoMessage() {} +func (*PostBotEventTempRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_bots_0fa217a861a9a803, []int{5} +} +func (m *PostBotEventTempRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_PostBotEventTempRequest.Unmarshal(m, b) +} +func (m *PostBotEventTempRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_PostBotEventTempRequest.Marshal(b, m, deterministic) +} +func (dst *PostBotEventTempRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_PostBotEventTempRequest.Merge(dst, src) +} +func (m *PostBotEventTempRequest) XXX_Size() int { + return xxx_messageInfo_PostBotEventTempRequest.Size(m) +} +func (m *PostBotEventTempRequest) XXX_DiscardUnknown() { + xxx_messageInfo_PostBotEventTempRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_PostBotEventTempRequest proto.InternalMessageInfo + +func (m *PostBotEventTempRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *PostBotEventTempRequest) GetType() PostBotEventTempRequest_Type { + if m != nil { + return m.Type + } + return PostBotEventTempRequest_UNSPECIFIED +} + +func (m *PostBotEventTempRequest) GetMsg() string { + if m != nil { + return m.Msg + } + return "" +} + +func init() { + proto.RegisterType((*BotSession)(nil), "google.devtools.remoteworkers.v1test2.BotSession") + proto.RegisterType((*Lease)(nil), "google.devtools.remoteworkers.v1test2.Lease") + proto.RegisterType((*AdminTemp)(nil), "google.devtools.remoteworkers.v1test2.AdminTemp") + proto.RegisterType((*CreateBotSessionRequest)(nil), "google.devtools.remoteworkers.v1test2.CreateBotSessionRequest") + proto.RegisterType((*UpdateBotSessionRequest)(nil), "google.devtools.remoteworkers.v1test2.UpdateBotSessionRequest") + proto.RegisterType((*PostBotEventTempRequest)(nil), "google.devtools.remoteworkers.v1test2.PostBotEventTempRequest") + proto.RegisterEnum("google.devtools.remoteworkers.v1test2.BotStatus", BotStatus_name, BotStatus_value) + proto.RegisterEnum("google.devtools.remoteworkers.v1test2.LeaseState", LeaseState_name, LeaseState_value) + proto.RegisterEnum("google.devtools.remoteworkers.v1test2.AdminTemp_Command", AdminTemp_Command_name, AdminTemp_Command_value) + proto.RegisterEnum("google.devtools.remoteworkers.v1test2.PostBotEventTempRequest_Type", PostBotEventTempRequest_Type_name, PostBotEventTempRequest_Type_value) +} + +// Reference imports to suppress errors if they are not otherwise used. +var _ context.Context +var _ grpc.ClientConn + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the grpc package it is being compiled against. +const _ = grpc.SupportPackageIsVersion4 + +// BotsClient is the client API for Bots service. +// +// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://godoc.org/google.golang.org/grpc#ClientConn.NewStream. +type BotsClient interface { + // CreateBotSession is called when the bot first joins the farm, and + // establishes a session ID to ensure that multiple machines do not register + // using the same name accidentally. + CreateBotSession(ctx context.Context, in *CreateBotSessionRequest, opts ...grpc.CallOption) (*BotSession, error) + // UpdateBotSession must be called periodically by the bot (on a schedule + // determined by the server) to let the server know about its status, and to + // pick up new lease requests from the server. + UpdateBotSession(ctx context.Context, in *UpdateBotSessionRequest, opts ...grpc.CallOption) (*BotSession, error) + // PostBotEventTemp may be called by the bot to indicate that some exceptional + // event has occurred. This method is subject to change or removal in future + // revisions of this API; we may simply want to replace it with StackDriver or + // some other common interface. + PostBotEventTemp(ctx context.Context, in *PostBotEventTempRequest, opts ...grpc.CallOption) (*empty.Empty, error) +} + +type botsClient struct { + cc *grpc.ClientConn +} + +func NewBotsClient(cc *grpc.ClientConn) BotsClient { + return &botsClient{cc} +} + +func (c *botsClient) CreateBotSession(ctx context.Context, in *CreateBotSessionRequest, opts ...grpc.CallOption) (*BotSession, error) { + out := new(BotSession) + err := c.cc.Invoke(ctx, "/google.devtools.remoteworkers.v1test2.Bots/CreateBotSession", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *botsClient) UpdateBotSession(ctx context.Context, in *UpdateBotSessionRequest, opts ...grpc.CallOption) (*BotSession, error) { + out := new(BotSession) + err := c.cc.Invoke(ctx, "/google.devtools.remoteworkers.v1test2.Bots/UpdateBotSession", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *botsClient) PostBotEventTemp(ctx context.Context, in *PostBotEventTempRequest, opts ...grpc.CallOption) (*empty.Empty, error) { + out := new(empty.Empty) + err := c.cc.Invoke(ctx, "/google.devtools.remoteworkers.v1test2.Bots/PostBotEventTemp", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +// BotsServer is the server API for Bots service. +type BotsServer interface { + // CreateBotSession is called when the bot first joins the farm, and + // establishes a session ID to ensure that multiple machines do not register + // using the same name accidentally. + CreateBotSession(context.Context, *CreateBotSessionRequest) (*BotSession, error) + // UpdateBotSession must be called periodically by the bot (on a schedule + // determined by the server) to let the server know about its status, and to + // pick up new lease requests from the server. + UpdateBotSession(context.Context, *UpdateBotSessionRequest) (*BotSession, error) + // PostBotEventTemp may be called by the bot to indicate that some exceptional + // event has occurred. This method is subject to change or removal in future + // revisions of this API; we may simply want to replace it with StackDriver or + // some other common interface. + PostBotEventTemp(context.Context, *PostBotEventTempRequest) (*empty.Empty, error) +} + +func RegisterBotsServer(s *grpc.Server, srv BotsServer) { + s.RegisterService(&_Bots_serviceDesc, srv) +} + +func _Bots_CreateBotSession_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(CreateBotSessionRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(BotsServer).CreateBotSession(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.devtools.remoteworkers.v1test2.Bots/CreateBotSession", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(BotsServer).CreateBotSession(ctx, req.(*CreateBotSessionRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _Bots_UpdateBotSession_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(UpdateBotSessionRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(BotsServer).UpdateBotSession(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.devtools.remoteworkers.v1test2.Bots/UpdateBotSession", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(BotsServer).UpdateBotSession(ctx, req.(*UpdateBotSessionRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _Bots_PostBotEventTemp_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(PostBotEventTempRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(BotsServer).PostBotEventTemp(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.devtools.remoteworkers.v1test2.Bots/PostBotEventTemp", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(BotsServer).PostBotEventTemp(ctx, req.(*PostBotEventTempRequest)) + } + return interceptor(ctx, in, info, handler) +} + +var _Bots_serviceDesc = grpc.ServiceDesc{ + ServiceName: "google.devtools.remoteworkers.v1test2.Bots", + HandlerType: (*BotsServer)(nil), + Methods: []grpc.MethodDesc{ + { + MethodName: "CreateBotSession", + Handler: _Bots_CreateBotSession_Handler, + }, + { + MethodName: "UpdateBotSession", + Handler: _Bots_UpdateBotSession_Handler, + }, + { + MethodName: "PostBotEventTemp", + Handler: _Bots_PostBotEventTemp_Handler, + }, + }, + Streams: []grpc.StreamDesc{}, + Metadata: "google/devtools/remoteworkers/v1test2/bots.proto", +} + +func init() { + proto.RegisterFile("google/devtools/remoteworkers/v1test2/bots.proto", fileDescriptor_bots_0fa217a861a9a803) +} + +var fileDescriptor_bots_0fa217a861a9a803 = []byte{ + // 1075 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xb4, 0x56, 0xdd, 0x8e, 0xdb, 0x44, + 0x14, 0xc6, 0xce, 0x5f, 0x73, 0xd2, 0xa6, 0xee, 0x00, 0x5d, 0x93, 0x22, 0x11, 0x59, 0xaa, 0xb4, + 0x44, 0x8b, 0xdd, 0x0d, 0x42, 0x42, 0xad, 0x8a, 0x94, 0x1f, 0xef, 0x6e, 0x44, 0x36, 0x09, 0x13, + 0x6f, 0x57, 0x70, 0x93, 0x3a, 0x9b, 0x69, 0x64, 0x6d, 0xec, 0x71, 0x3d, 0x93, 0x85, 0x15, 0xea, + 0x0d, 0x12, 0x4f, 0xc0, 0x13, 0xc0, 0x15, 0xe2, 0x19, 0x10, 0x12, 0x37, 0xbc, 0x40, 0x5f, 0x81, + 0xbe, 0x07, 0x9a, 0xb1, 0x93, 0x4d, 0xb2, 0xdd, 0xd6, 0x0b, 0xe2, 0x6e, 0x7e, 0xbe, 0xef, 0x9c, + 0x2f, 0xe7, 0x7c, 0x67, 0x62, 0x78, 0x30, 0xa5, 0x74, 0x3a, 0x23, 0xd6, 0x84, 0x9c, 0x71, 0x4a, + 0x67, 0xcc, 0x8a, 0x88, 0x4f, 0x39, 0xf9, 0x96, 0x46, 0xa7, 0x24, 0x62, 0xd6, 0xd9, 0x2e, 0x27, + 0x8c, 0xd7, 0xad, 0x31, 0xe5, 0xcc, 0x0c, 0x23, 0xca, 0x29, 0xba, 0x1f, 0x33, 0xcc, 0x05, 0xc3, + 0x5c, 0x63, 0x98, 0x09, 0xa3, 0xf2, 0x61, 0x12, 0xd8, 0x0d, 0x3d, 0xcb, 0x0d, 0x02, 0xca, 0x5d, + 0xee, 0xd1, 0x20, 0x09, 0x52, 0xa9, 0xa7, 0x4b, 0x1b, 0xef, 0x13, 0xce, 0x07, 0x09, 0x47, 0xee, + 0xc6, 0xf3, 0x67, 0x96, 0x1b, 0x9c, 0x27, 0x57, 0xf7, 0x36, 0xaf, 0x88, 0x1f, 0xf2, 0xc5, 0x65, + 0x75, 0xf3, 0xf2, 0x99, 0x47, 0x66, 0x93, 0x91, 0xef, 0xb2, 0xd3, 0x04, 0xf1, 0xd1, 0x26, 0x82, + 0x7b, 0x3e, 0x61, 0xdc, 0xf5, 0xc3, 0x04, 0xb0, 0x95, 0x00, 0xa2, 0xf0, 0xc4, 0x62, 0xdc, 0xe5, + 0xf3, 0xe4, 0x77, 0x18, 0xaf, 0x54, 0x80, 0x26, 0xe5, 0x43, 0xc2, 0x98, 0x47, 0x03, 0x84, 0x20, + 0x1b, 0xb8, 0x3e, 0xd1, 0x95, 0xaa, 0xb2, 0x5d, 0xc4, 0x72, 0x8d, 0xde, 0x87, 0xfc, 0x98, 0xf2, + 0x91, 0x37, 0xd1, 0x55, 0x79, 0x9a, 0x1b, 0x53, 0xde, 0x99, 0xa0, 0x03, 0xc8, 0xc7, 0x91, 0xf4, + 0x4c, 0x55, 0xd9, 0x2e, 0xd7, 0x1f, 0x98, 0xa9, 0xea, 0x6a, 0x8a, 0x6c, 0x92, 0x87, 0x13, 0x3e, + 0xb2, 0x21, 0x1f, 0x83, 0xf4, 0x6c, 0x55, 0xd9, 0x2e, 0xd5, 0x3f, 0x49, 0x19, 0xe9, 0x58, 0xee, + 0x71, 0x42, 0x46, 0x6d, 0xc8, 0xcf, 0x88, 0xcb, 0x08, 0xd3, 0x73, 0xd5, 0xcc, 0x76, 0xa9, 0xbe, + 0x93, 0x32, 0x4c, 0x57, 0x90, 0x70, 0xc2, 0x45, 0x8f, 0xa0, 0x44, 0xbe, 0x0b, 0xbd, 0x88, 0x8c, + 0x44, 0x0d, 0xf5, 0xbc, 0x54, 0x54, 0x59, 0x84, 0x5a, 0x14, 0xd8, 0x74, 0x16, 0x05, 0xc6, 0x10, + 0xc3, 0xc5, 0x01, 0xd2, 0xa1, 0x70, 0x46, 0x22, 0x51, 0x49, 0xbd, 0x20, 0x6b, 0xb5, 0xd8, 0x1a, + 0xaf, 0x32, 0x90, 0x93, 0x89, 0x50, 0x19, 0x54, 0x6f, 0x92, 0x5c, 0xab, 0xde, 0x04, 0x99, 0x50, + 0x08, 0xdd, 0xf3, 0x19, 0x75, 0x27, 0xfa, 0x0d, 0x99, 0xec, 0xbd, 0x4b, 0xc9, 0x1a, 0xc1, 0x39, + 0x5e, 0x80, 0xd0, 0x0e, 0xe4, 0x23, 0xc2, 0xe6, 0x33, 0xae, 0x17, 0xdf, 0x00, 0x4f, 0x30, 0x68, + 0x1f, 0x72, 0xa2, 0xca, 0x44, 0xf6, 0xae, 0x5c, 0xdf, 0xbd, 0x4e, 0x4d, 0x44, 0x9b, 0x08, 0x8e, + 0xf9, 0xa8, 0xb6, 0xd6, 0xee, 0x52, 0x1d, 0x2d, 0x22, 0x45, 0xe1, 0x89, 0xb9, 0xd1, 0xd0, 0xaf, + 0xe0, 0x66, 0x44, 0x9e, 0xcf, 0xbd, 0x88, 0xf8, 0x24, 0xe0, 0xec, 0xdf, 0xb5, 0x75, 0x2d, 0xc4, + 0x66, 0x5b, 0x72, 0xd7, 0x6a, 0x8b, 0x01, 0xe0, 0x32, 0xe6, 0x4d, 0x03, 0x11, 0x2b, 0xf6, 0x76, + 0x53, 0xd5, 0x15, 0xbc, 0x72, 0x8a, 0x6c, 0xb8, 0xe3, 0x05, 0x33, 0x2f, 0x20, 0xa3, 0x15, 0x68, + 0xfe, 0xea, 0x0a, 0xcb, 0x00, 0x5a, 0x4c, 0x69, 0x2c, 0x19, 0xc6, 0x4b, 0x05, 0x8a, 0x8d, 0x89, + 0xef, 0x05, 0x0e, 0xf1, 0x43, 0x84, 0xa1, 0x70, 0x42, 0x7d, 0xdf, 0x0d, 0x26, 0x32, 0x6b, 0xb9, + 0xfe, 0x79, 0xca, 0x1a, 0x2c, 0x43, 0x98, 0xad, 0x98, 0x8f, 0x17, 0x81, 0x90, 0x06, 0x19, 0x37, + 0x9a, 0x26, 0xb3, 0x28, 0x96, 0xc6, 0x53, 0x28, 0x24, 0x28, 0x74, 0x1b, 0x4a, 0x47, 0xbd, 0xe1, + 0xc0, 0x6e, 0x75, 0xf6, 0x3a, 0x76, 0x5b, 0x7b, 0x07, 0x95, 0x01, 0x9a, 0x7d, 0x67, 0x74, 0x34, + 0x68, 0x37, 0x1c, 0x5b, 0x53, 0x04, 0x40, 0xec, 0xb1, 0x3d, 0x74, 0x1a, 0xd8, 0xd1, 0x54, 0x74, + 0x07, 0x6e, 0x89, 0x03, 0xc7, 0xc6, 0x87, 0x9d, 0x9e, 0xc0, 0x64, 0x90, 0x06, 0x37, 0x0f, 0xfa, + 0xc3, 0x0b, 0x50, 0xd6, 0xf8, 0x51, 0x81, 0xad, 0x56, 0x44, 0x5c, 0x4e, 0x2e, 0xde, 0x0a, 0x4c, + 0x9e, 0xcf, 0x09, 0xe3, 0xe8, 0x2e, 0xe4, 0x43, 0x37, 0x5a, 0x16, 0x16, 0x27, 0x3b, 0x84, 0xa1, + 0x24, 0x9e, 0x0d, 0x16, 0xa3, 0xa5, 0xde, 0x52, 0x6a, 0xff, 0xad, 0xa4, 0x81, 0xf1, 0x72, 0x6d, + 0xfc, 0xa1, 0xc0, 0xd6, 0x51, 0x38, 0x79, 0xad, 0x8e, 0xd7, 0x3d, 0x5d, 0xff, 0x83, 0x06, 0xe1, + 0xc4, 0xb9, 0x94, 0x20, 0x1f, 0xe0, 0x64, 0x1a, 0x2e, 0x3b, 0x71, 0x4f, 0xbc, 0xd1, 0x87, 0x2e, + 0x3b, 0xc5, 0x10, 0xc3, 0xc5, 0xda, 0xf8, 0x4b, 0x81, 0xad, 0x01, 0x65, 0xbc, 0x49, 0xb9, 0x7d, + 0x46, 0x02, 0x2e, 0x5a, 0xfc, 0xa6, 0x1f, 0x70, 0x0c, 0x59, 0x7e, 0x1e, 0x2e, 0xa6, 0xb7, 0x95, + 0x52, 0xf9, 0x15, 0x19, 0x4c, 0xe7, 0x3c, 0x24, 0x58, 0x06, 0x14, 0x2e, 0xf2, 0xd9, 0x54, 0xaa, + 0x2f, 0x62, 0xb1, 0x34, 0x76, 0x20, 0x2b, 0xee, 0x2f, 0x5b, 0xe8, 0x06, 0x64, 0x3b, 0xbd, 0xbd, + 0xbe, 0xa6, 0xa0, 0x22, 0xe4, 0x6c, 0x8c, 0xfb, 0x58, 0x53, 0x6b, 0x53, 0x28, 0x2e, 0x1f, 0x72, + 0x54, 0x81, 0xbb, 0xc2, 0x43, 0x43, 0xa7, 0xe1, 0x1c, 0x0d, 0x47, 0xeb, 0xec, 0x3c, 0xa8, 0xfd, + 0x2f, 0x35, 0x05, 0xdd, 0x82, 0xe2, 0x51, 0xef, 0xc0, 0x6e, 0x74, 0x9d, 0x83, 0xaf, 0x35, 0x15, + 0x21, 0x28, 0x27, 0x1e, 0x6b, 0xf6, 0xfb, 0x4e, 0xa7, 0xb7, 0xaf, 0x65, 0xd0, 0xbb, 0x70, 0x7b, + 0xd5, 0x8a, 0xe2, 0x30, 0x5b, 0x7b, 0x0a, 0x70, 0xf1, 0x18, 0xa1, 0x7b, 0xb0, 0xd5, 0xb5, 0x1b, + 0x43, 0x5b, 0xe6, 0xb2, 0x37, 0x52, 0x95, 0xa0, 0x30, 0xb0, 0x7b, 0x6d, 0xc1, 0x53, 0x10, 0x40, + 0xbe, 0xd1, 0x72, 0x3a, 0x4f, 0x6c, 0x4d, 0x15, 0xb9, 0x5b, 0xfd, 0xc3, 0x41, 0xd7, 0x76, 0xec, + 0xb6, 0x96, 0x95, 0xdb, 0x46, 0xaf, 0x65, 0x77, 0xbb, 0x76, 0x5b, 0xcb, 0xd5, 0x7f, 0xcd, 0x42, + 0xb6, 0x49, 0x39, 0x43, 0xbf, 0x2b, 0xa0, 0x6d, 0xba, 0x1c, 0x7d, 0x91, 0xb2, 0xe6, 0x57, 0x8c, + 0x47, 0xe5, 0xfa, 0x6e, 0x33, 0x3e, 0xfb, 0xe1, 0xe5, 0xdf, 0x3f, 0xa9, 0x96, 0x51, 0x5d, 0x7e, + 0x46, 0x7c, 0x1f, 0xcf, 0xd4, 0xe3, 0x5a, 0xed, 0x85, 0x75, 0x61, 0x45, 0xf6, 0x70, 0xd5, 0xdd, + 0x52, 0xfe, 0xe6, 0x70, 0xa4, 0x96, 0x7f, 0xc5, 0x54, 0xfd, 0x07, 0xf9, 0xf5, 0x15, 0xf9, 0xc2, + 0xcb, 0x8f, 0x6b, 0xb5, 0x55, 0xed, 0x56, 0xed, 0xc5, 0xba, 0xfc, 0x9f, 0x15, 0xd0, 0x36, 0x8d, + 0x9b, 0x5a, 0xfe, 0x15, 0x8e, 0xaf, 0xdc, 0xbd, 0x34, 0x97, 0xb6, 0xf8, 0xb0, 0x5a, 0x96, 0xb8, + 0xf6, 0x56, 0x8d, 0x21, 0x65, 0x71, 0xd8, 0x87, 0x4a, 0xad, 0xf9, 0xa7, 0x02, 0x1f, 0x9f, 0x50, + 0x3f, 0x9d, 0xa8, 0xe6, 0x1d, 0x2c, 0x8f, 0xe3, 0xff, 0x33, 0x26, 0x2c, 0x36, 0x50, 0xbe, 0xc1, + 0x09, 0x77, 0x4a, 0x67, 0x6e, 0x30, 0x35, 0x69, 0x34, 0xb5, 0xa6, 0x24, 0x90, 0xf2, 0xac, 0xf8, + 0xca, 0x0d, 0x3d, 0xf6, 0x96, 0xef, 0xca, 0x47, 0x6b, 0xa7, 0xbf, 0xa8, 0x2a, 0x3e, 0xfe, 0x4d, + 0xbd, 0xbf, 0x1f, 0x47, 0x6e, 0x93, 0x33, 0x47, 0xaa, 0x5a, 0x4b, 0x6f, 0x3e, 0xd9, 0x75, 0x04, + 0x75, 0x9c, 0x97, 0xb9, 0x3e, 0xfd, 0x27, 0x00, 0x00, 0xff, 0xff, 0x75, 0x00, 0xcd, 0xd5, 0x39, + 0x0b, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/devtools/remoteworkers/v1test2/command.pb.go b/vendor/google.golang.org/genproto/googleapis/devtools/remoteworkers/v1test2/command.pb.go new file mode 100644 index 0000000..6a301db --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/devtools/remoteworkers/v1test2/command.pb.go @@ -0,0 +1,878 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/devtools/remoteworkers/v1test2/command.proto + +package remoteworkers // import "google.golang.org/genproto/googleapis/devtools/remoteworkers/v1test2" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import any "github.com/golang/protobuf/ptypes/any" +import duration "github.com/golang/protobuf/ptypes/duration" +import status "google.golang.org/genproto/googleapis/rpc/status" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Describes a shell-style task to execute, suitable for providing as the Bots +// interface's `Lease.payload` field. +type CommandTask struct { + // The inputs to the task. + Inputs *CommandTask_Inputs `protobuf:"bytes,1,opt,name=inputs,proto3" json:"inputs,omitempty"` + // The expected outputs from the task. + ExpectedOutputs *CommandTask_Outputs `protobuf:"bytes,4,opt,name=expected_outputs,json=expectedOutputs,proto3" json:"expected_outputs,omitempty"` + // The timeouts of this task. + Timeouts *CommandTask_Timeouts `protobuf:"bytes,5,opt,name=timeouts,proto3" json:"timeouts,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *CommandTask) Reset() { *m = CommandTask{} } +func (m *CommandTask) String() string { return proto.CompactTextString(m) } +func (*CommandTask) ProtoMessage() {} +func (*CommandTask) Descriptor() ([]byte, []int) { + return fileDescriptor_command_64a15fa32645f108, []int{0} +} +func (m *CommandTask) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_CommandTask.Unmarshal(m, b) +} +func (m *CommandTask) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_CommandTask.Marshal(b, m, deterministic) +} +func (dst *CommandTask) XXX_Merge(src proto.Message) { + xxx_messageInfo_CommandTask.Merge(dst, src) +} +func (m *CommandTask) XXX_Size() int { + return xxx_messageInfo_CommandTask.Size(m) +} +func (m *CommandTask) XXX_DiscardUnknown() { + xxx_messageInfo_CommandTask.DiscardUnknown(m) +} + +var xxx_messageInfo_CommandTask proto.InternalMessageInfo + +func (m *CommandTask) GetInputs() *CommandTask_Inputs { + if m != nil { + return m.Inputs + } + return nil +} + +func (m *CommandTask) GetExpectedOutputs() *CommandTask_Outputs { + if m != nil { + return m.ExpectedOutputs + } + return nil +} + +func (m *CommandTask) GetTimeouts() *CommandTask_Timeouts { + if m != nil { + return m.Timeouts + } + return nil +} + +// Describes the inputs to a shell-style task. +type CommandTask_Inputs struct { + // The command itself to run (e.g., argv). + // + // This field should be passed directly to the underlying operating system, + // and so it must be sensible to that operating system. For example, on + // Windows, the first argument might be "C:\Windows\System32\ping.exe" - + // that is, using drive letters and backslashes. A command for a *nix + // system, on the other hand, would use forward slashes. + // + // All other fields in the RWAPI must consistently use forward slashes, + // since those fields may be interpretted by both the service and the bot. + Arguments []string `protobuf:"bytes,1,rep,name=arguments,proto3" json:"arguments,omitempty"` + // The input filesystem to be set up prior to the task beginning. The + // contents should be a repeated set of FileMetadata messages though other + // formats are allowed if better for the implementation (eg, a LUCI-style + // .isolated file). + // + // This field is repeated since implementations might want to cache the + // metadata, in which case it may be useful to break up portions of the + // filesystem that change frequently (eg, specific input files) from those + // that don't (eg, standard header files). + Files []*Digest `protobuf:"bytes,2,rep,name=files,proto3" json:"files,omitempty"` + // All environment variables required by the task. + EnvironmentVariables []*CommandTask_Inputs_EnvironmentVariable `protobuf:"bytes,3,rep,name=environment_variables,json=environmentVariables,proto3" json:"environment_variables,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *CommandTask_Inputs) Reset() { *m = CommandTask_Inputs{} } +func (m *CommandTask_Inputs) String() string { return proto.CompactTextString(m) } +func (*CommandTask_Inputs) ProtoMessage() {} +func (*CommandTask_Inputs) Descriptor() ([]byte, []int) { + return fileDescriptor_command_64a15fa32645f108, []int{0, 0} +} +func (m *CommandTask_Inputs) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_CommandTask_Inputs.Unmarshal(m, b) +} +func (m *CommandTask_Inputs) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_CommandTask_Inputs.Marshal(b, m, deterministic) +} +func (dst *CommandTask_Inputs) XXX_Merge(src proto.Message) { + xxx_messageInfo_CommandTask_Inputs.Merge(dst, src) +} +func (m *CommandTask_Inputs) XXX_Size() int { + return xxx_messageInfo_CommandTask_Inputs.Size(m) +} +func (m *CommandTask_Inputs) XXX_DiscardUnknown() { + xxx_messageInfo_CommandTask_Inputs.DiscardUnknown(m) +} + +var xxx_messageInfo_CommandTask_Inputs proto.InternalMessageInfo + +func (m *CommandTask_Inputs) GetArguments() []string { + if m != nil { + return m.Arguments + } + return nil +} + +func (m *CommandTask_Inputs) GetFiles() []*Digest { + if m != nil { + return m.Files + } + return nil +} + +func (m *CommandTask_Inputs) GetEnvironmentVariables() []*CommandTask_Inputs_EnvironmentVariable { + if m != nil { + return m.EnvironmentVariables + } + return nil +} + +// An environment variable required by this task. +type CommandTask_Inputs_EnvironmentVariable struct { + // The envvar name. + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // The envvar value. + Value string `protobuf:"bytes,2,opt,name=value,proto3" json:"value,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *CommandTask_Inputs_EnvironmentVariable) Reset() { + *m = CommandTask_Inputs_EnvironmentVariable{} +} +func (m *CommandTask_Inputs_EnvironmentVariable) String() string { return proto.CompactTextString(m) } +func (*CommandTask_Inputs_EnvironmentVariable) ProtoMessage() {} +func (*CommandTask_Inputs_EnvironmentVariable) Descriptor() ([]byte, []int) { + return fileDescriptor_command_64a15fa32645f108, []int{0, 0, 0} +} +func (m *CommandTask_Inputs_EnvironmentVariable) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_CommandTask_Inputs_EnvironmentVariable.Unmarshal(m, b) +} +func (m *CommandTask_Inputs_EnvironmentVariable) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_CommandTask_Inputs_EnvironmentVariable.Marshal(b, m, deterministic) +} +func (dst *CommandTask_Inputs_EnvironmentVariable) XXX_Merge(src proto.Message) { + xxx_messageInfo_CommandTask_Inputs_EnvironmentVariable.Merge(dst, src) +} +func (m *CommandTask_Inputs_EnvironmentVariable) XXX_Size() int { + return xxx_messageInfo_CommandTask_Inputs_EnvironmentVariable.Size(m) +} +func (m *CommandTask_Inputs_EnvironmentVariable) XXX_DiscardUnknown() { + xxx_messageInfo_CommandTask_Inputs_EnvironmentVariable.DiscardUnknown(m) +} + +var xxx_messageInfo_CommandTask_Inputs_EnvironmentVariable proto.InternalMessageInfo + +func (m *CommandTask_Inputs_EnvironmentVariable) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *CommandTask_Inputs_EnvironmentVariable) GetValue() string { + if m != nil { + return m.Value + } + return "" +} + +// Describes the expected outputs of the command. +type CommandTask_Outputs struct { + // A list of expected files, relative to the execution root. All paths + // MUST be delimited by forward slashes. + Files []string `protobuf:"bytes,1,rep,name=files,proto3" json:"files,omitempty"` + // A list of expected directories, relative to the execution root. All paths + // MUST be delimited by forward slashes. + Directories []string `protobuf:"bytes,2,rep,name=directories,proto3" json:"directories,omitempty"` + // The destination to which any stdout should be sent. The method by which + // the bot should send the stream contents to that destination is not + // defined in this API. As examples, the destination could be a file + // referenced in the `files` field in this message, or it could be a URI + // that must be written via the ByteStream API. + StdoutDestination string `protobuf:"bytes,3,opt,name=stdout_destination,json=stdoutDestination,proto3" json:"stdout_destination,omitempty"` + // The destination to which any stderr should be sent. The method by which + // the bot should send the stream contents to that destination is not + // defined in this API. As examples, the destination could be a file + // referenced in the `files` field in this message, or it could be a URI + // that must be written via the ByteStream API. + StderrDestination string `protobuf:"bytes,4,opt,name=stderr_destination,json=stderrDestination,proto3" json:"stderr_destination,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *CommandTask_Outputs) Reset() { *m = CommandTask_Outputs{} } +func (m *CommandTask_Outputs) String() string { return proto.CompactTextString(m) } +func (*CommandTask_Outputs) ProtoMessage() {} +func (*CommandTask_Outputs) Descriptor() ([]byte, []int) { + return fileDescriptor_command_64a15fa32645f108, []int{0, 1} +} +func (m *CommandTask_Outputs) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_CommandTask_Outputs.Unmarshal(m, b) +} +func (m *CommandTask_Outputs) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_CommandTask_Outputs.Marshal(b, m, deterministic) +} +func (dst *CommandTask_Outputs) XXX_Merge(src proto.Message) { + xxx_messageInfo_CommandTask_Outputs.Merge(dst, src) +} +func (m *CommandTask_Outputs) XXX_Size() int { + return xxx_messageInfo_CommandTask_Outputs.Size(m) +} +func (m *CommandTask_Outputs) XXX_DiscardUnknown() { + xxx_messageInfo_CommandTask_Outputs.DiscardUnknown(m) +} + +var xxx_messageInfo_CommandTask_Outputs proto.InternalMessageInfo + +func (m *CommandTask_Outputs) GetFiles() []string { + if m != nil { + return m.Files + } + return nil +} + +func (m *CommandTask_Outputs) GetDirectories() []string { + if m != nil { + return m.Directories + } + return nil +} + +func (m *CommandTask_Outputs) GetStdoutDestination() string { + if m != nil { + return m.StdoutDestination + } + return "" +} + +func (m *CommandTask_Outputs) GetStderrDestination() string { + if m != nil { + return m.StderrDestination + } + return "" +} + +// Describes the timeouts associated with this task. +type CommandTask_Timeouts struct { + // This specifies the maximum time that the task can run, excluding the + // time required to download inputs or upload outputs. That is, the worker + // will terminate the task if it runs longer than this. + Execution *duration.Duration `protobuf:"bytes,1,opt,name=execution,proto3" json:"execution,omitempty"` + // This specifies the maximum amount of time the task can be idle - that is, + // go without generating some output in either stdout or stderr. If the + // process is silent for more than the specified time, the worker will + // terminate the task. + Idle *duration.Duration `protobuf:"bytes,2,opt,name=idle,proto3" json:"idle,omitempty"` + // If the execution or IO timeouts are exceeded, the worker will try to + // gracefully terminate the task and return any existing logs. However, + // tasks may be hard-frozen in which case this process will fail. This + // timeout specifies how long to wait for a terminated task to shut down + // gracefully (e.g. via SIGTERM) before we bring down the hammer (e.g. + // SIGKILL on *nix, CTRL_BREAK_EVENT on Windows). + Shutdown *duration.Duration `protobuf:"bytes,3,opt,name=shutdown,proto3" json:"shutdown,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *CommandTask_Timeouts) Reset() { *m = CommandTask_Timeouts{} } +func (m *CommandTask_Timeouts) String() string { return proto.CompactTextString(m) } +func (*CommandTask_Timeouts) ProtoMessage() {} +func (*CommandTask_Timeouts) Descriptor() ([]byte, []int) { + return fileDescriptor_command_64a15fa32645f108, []int{0, 2} +} +func (m *CommandTask_Timeouts) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_CommandTask_Timeouts.Unmarshal(m, b) +} +func (m *CommandTask_Timeouts) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_CommandTask_Timeouts.Marshal(b, m, deterministic) +} +func (dst *CommandTask_Timeouts) XXX_Merge(src proto.Message) { + xxx_messageInfo_CommandTask_Timeouts.Merge(dst, src) +} +func (m *CommandTask_Timeouts) XXX_Size() int { + return xxx_messageInfo_CommandTask_Timeouts.Size(m) +} +func (m *CommandTask_Timeouts) XXX_DiscardUnknown() { + xxx_messageInfo_CommandTask_Timeouts.DiscardUnknown(m) +} + +var xxx_messageInfo_CommandTask_Timeouts proto.InternalMessageInfo + +func (m *CommandTask_Timeouts) GetExecution() *duration.Duration { + if m != nil { + return m.Execution + } + return nil +} + +func (m *CommandTask_Timeouts) GetIdle() *duration.Duration { + if m != nil { + return m.Idle + } + return nil +} + +func (m *CommandTask_Timeouts) GetShutdown() *duration.Duration { + if m != nil { + return m.Shutdown + } + return nil +} + +// DEPRECATED - use CommandResult instead. +// Describes the actual outputs from the task. +type CommandOutputs struct { + // exit_code is only fully reliable if the status' code is OK. If the task + // exceeded its deadline or was cancelled, the process may still produce an + // exit code as it is cancelled, and this will be populated, but a successful + // (zero) is unlikely to be correct unless the status code is OK. + ExitCode int32 `protobuf:"varint,1,opt,name=exit_code,json=exitCode,proto3" json:"exit_code,omitempty"` + // The output files. The blob referenced by the digest should contain + // one of the following (implementation-dependent): + // * A marshalled DirectoryMetadata of the returned filesystem + // * A LUCI-style .isolated file + Outputs *Digest `protobuf:"bytes,2,opt,name=outputs,proto3" json:"outputs,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *CommandOutputs) Reset() { *m = CommandOutputs{} } +func (m *CommandOutputs) String() string { return proto.CompactTextString(m) } +func (*CommandOutputs) ProtoMessage() {} +func (*CommandOutputs) Descriptor() ([]byte, []int) { + return fileDescriptor_command_64a15fa32645f108, []int{1} +} +func (m *CommandOutputs) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_CommandOutputs.Unmarshal(m, b) +} +func (m *CommandOutputs) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_CommandOutputs.Marshal(b, m, deterministic) +} +func (dst *CommandOutputs) XXX_Merge(src proto.Message) { + xxx_messageInfo_CommandOutputs.Merge(dst, src) +} +func (m *CommandOutputs) XXX_Size() int { + return xxx_messageInfo_CommandOutputs.Size(m) +} +func (m *CommandOutputs) XXX_DiscardUnknown() { + xxx_messageInfo_CommandOutputs.DiscardUnknown(m) +} + +var xxx_messageInfo_CommandOutputs proto.InternalMessageInfo + +func (m *CommandOutputs) GetExitCode() int32 { + if m != nil { + return m.ExitCode + } + return 0 +} + +func (m *CommandOutputs) GetOutputs() *Digest { + if m != nil { + return m.Outputs + } + return nil +} + +// DEPRECATED - use CommandResult instead. +// Can be used as part of CompleteRequest.metadata, or are part of a more +// sophisticated message. +type CommandOverhead struct { + // The elapsed time between calling Accept and Complete. The server will also + // have its own idea of what this should be, but this excludes the overhead of + // the RPCs and the bot response time. + Duration *duration.Duration `protobuf:"bytes,1,opt,name=duration,proto3" json:"duration,omitempty"` + // The amount of time *not* spent executing the command (ie + // uploading/downloading files). + Overhead *duration.Duration `protobuf:"bytes,2,opt,name=overhead,proto3" json:"overhead,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *CommandOverhead) Reset() { *m = CommandOverhead{} } +func (m *CommandOverhead) String() string { return proto.CompactTextString(m) } +func (*CommandOverhead) ProtoMessage() {} +func (*CommandOverhead) Descriptor() ([]byte, []int) { + return fileDescriptor_command_64a15fa32645f108, []int{2} +} +func (m *CommandOverhead) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_CommandOverhead.Unmarshal(m, b) +} +func (m *CommandOverhead) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_CommandOverhead.Marshal(b, m, deterministic) +} +func (dst *CommandOverhead) XXX_Merge(src proto.Message) { + xxx_messageInfo_CommandOverhead.Merge(dst, src) +} +func (m *CommandOverhead) XXX_Size() int { + return xxx_messageInfo_CommandOverhead.Size(m) +} +func (m *CommandOverhead) XXX_DiscardUnknown() { + xxx_messageInfo_CommandOverhead.DiscardUnknown(m) +} + +var xxx_messageInfo_CommandOverhead proto.InternalMessageInfo + +func (m *CommandOverhead) GetDuration() *duration.Duration { + if m != nil { + return m.Duration + } + return nil +} + +func (m *CommandOverhead) GetOverhead() *duration.Duration { + if m != nil { + return m.Overhead + } + return nil +} + +// All information about the execution of a command, suitable for providing as +// the Bots interface's `Lease.result` field. +type CommandResult struct { + // An overall status for the command. For example, if the command timed out, + // this might have a code of DEADLINE_EXCEEDED; if it was killed by the OS for + // memory exhaustion, it might have a code of RESOURCE_EXHAUSTED. + Status *status.Status `protobuf:"bytes,1,opt,name=status,proto3" json:"status,omitempty"` + // The exit code of the process. An exit code of "0" should only be trusted if + // `status` has a code of OK (otherwise it may simply be unset). + ExitCode int32 `protobuf:"varint,2,opt,name=exit_code,json=exitCode,proto3" json:"exit_code,omitempty"` + // The output files. The blob referenced by the digest should contain + // one of the following (implementation-dependent): + // * A marshalled DirectoryMetadata of the returned filesystem + // * A LUCI-style .isolated file + Outputs *Digest `protobuf:"bytes,3,opt,name=outputs,proto3" json:"outputs,omitempty"` + // The elapsed time between calling Accept and Complete. The server will also + // have its own idea of what this should be, but this excludes the overhead of + // the RPCs and the bot response time. + Duration *duration.Duration `protobuf:"bytes,4,opt,name=duration,proto3" json:"duration,omitempty"` // Deprecated: Do not use. + // The amount of time *not* spent executing the command (ie + // uploading/downloading files). + Overhead *duration.Duration `protobuf:"bytes,5,opt,name=overhead,proto3" json:"overhead,omitempty"` // Deprecated: Do not use. + // Implementation-dependent statistics about the task. Both servers and bots + // may define messages which can be encoded here; bots are free to provide + // statistics in multiple formats, and servers are free to choose one or more + // of the values to process and ignore others. In particular, it is *not* + // considered an error for the bot to provide the server with a field that it + // doesn't know about. + Statistics []*any.Any `protobuf:"bytes,6,rep,name=statistics,proto3" json:"statistics,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *CommandResult) Reset() { *m = CommandResult{} } +func (m *CommandResult) String() string { return proto.CompactTextString(m) } +func (*CommandResult) ProtoMessage() {} +func (*CommandResult) Descriptor() ([]byte, []int) { + return fileDescriptor_command_64a15fa32645f108, []int{3} +} +func (m *CommandResult) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_CommandResult.Unmarshal(m, b) +} +func (m *CommandResult) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_CommandResult.Marshal(b, m, deterministic) +} +func (dst *CommandResult) XXX_Merge(src proto.Message) { + xxx_messageInfo_CommandResult.Merge(dst, src) +} +func (m *CommandResult) XXX_Size() int { + return xxx_messageInfo_CommandResult.Size(m) +} +func (m *CommandResult) XXX_DiscardUnknown() { + xxx_messageInfo_CommandResult.DiscardUnknown(m) +} + +var xxx_messageInfo_CommandResult proto.InternalMessageInfo + +func (m *CommandResult) GetStatus() *status.Status { + if m != nil { + return m.Status + } + return nil +} + +func (m *CommandResult) GetExitCode() int32 { + if m != nil { + return m.ExitCode + } + return 0 +} + +func (m *CommandResult) GetOutputs() *Digest { + if m != nil { + return m.Outputs + } + return nil +} + +// Deprecated: Do not use. +func (m *CommandResult) GetDuration() *duration.Duration { + if m != nil { + return m.Duration + } + return nil +} + +// Deprecated: Do not use. +func (m *CommandResult) GetOverhead() *duration.Duration { + if m != nil { + return m.Overhead + } + return nil +} + +func (m *CommandResult) GetStatistics() []*any.Any { + if m != nil { + return m.Statistics + } + return nil +} + +// The metadata for a file. Similar to the equivalent message in the Remote +// Execution API. +type FileMetadata struct { + // The path of this file. If this message is part of the + // CommandOutputs.outputs fields, the path is relative to the execution root + // and must correspond to an entry in CommandTask.outputs.files. If this + // message is part of a Directory message, then the path is relative to the + // root of that directory. All paths MUST be delimited by forward slashes. + Path string `protobuf:"bytes,1,opt,name=path,proto3" json:"path,omitempty"` + // A pointer to the contents of the file. The method by which a client + // retrieves the contents from a CAS system is not defined here. + Digest *Digest `protobuf:"bytes,2,opt,name=digest,proto3" json:"digest,omitempty"` + // If the file is small enough, its contents may also or alternatively be + // listed here. + Contents []byte `protobuf:"bytes,3,opt,name=contents,proto3" json:"contents,omitempty"` + // Properties of the file + IsExecutable bool `protobuf:"varint,4,opt,name=is_executable,json=isExecutable,proto3" json:"is_executable,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *FileMetadata) Reset() { *m = FileMetadata{} } +func (m *FileMetadata) String() string { return proto.CompactTextString(m) } +func (*FileMetadata) ProtoMessage() {} +func (*FileMetadata) Descriptor() ([]byte, []int) { + return fileDescriptor_command_64a15fa32645f108, []int{4} +} +func (m *FileMetadata) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_FileMetadata.Unmarshal(m, b) +} +func (m *FileMetadata) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_FileMetadata.Marshal(b, m, deterministic) +} +func (dst *FileMetadata) XXX_Merge(src proto.Message) { + xxx_messageInfo_FileMetadata.Merge(dst, src) +} +func (m *FileMetadata) XXX_Size() int { + return xxx_messageInfo_FileMetadata.Size(m) +} +func (m *FileMetadata) XXX_DiscardUnknown() { + xxx_messageInfo_FileMetadata.DiscardUnknown(m) +} + +var xxx_messageInfo_FileMetadata proto.InternalMessageInfo + +func (m *FileMetadata) GetPath() string { + if m != nil { + return m.Path + } + return "" +} + +func (m *FileMetadata) GetDigest() *Digest { + if m != nil { + return m.Digest + } + return nil +} + +func (m *FileMetadata) GetContents() []byte { + if m != nil { + return m.Contents + } + return nil +} + +func (m *FileMetadata) GetIsExecutable() bool { + if m != nil { + return m.IsExecutable + } + return false +} + +// The metadata for a directory. Similar to the equivalent message in the Remote +// Execution API. +type DirectoryMetadata struct { + // The path of the directory, as in + // [FileMetadata.path][google.devtools.remoteworkers.v1test2.FileMetadata.path]. + Path string `protobuf:"bytes,1,opt,name=path,proto3" json:"path,omitempty"` + // A pointer to the contents of the directory, in the form of a marshalled + // Directory message. + Digest *Digest `protobuf:"bytes,2,opt,name=digest,proto3" json:"digest,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *DirectoryMetadata) Reset() { *m = DirectoryMetadata{} } +func (m *DirectoryMetadata) String() string { return proto.CompactTextString(m) } +func (*DirectoryMetadata) ProtoMessage() {} +func (*DirectoryMetadata) Descriptor() ([]byte, []int) { + return fileDescriptor_command_64a15fa32645f108, []int{5} +} +func (m *DirectoryMetadata) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_DirectoryMetadata.Unmarshal(m, b) +} +func (m *DirectoryMetadata) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_DirectoryMetadata.Marshal(b, m, deterministic) +} +func (dst *DirectoryMetadata) XXX_Merge(src proto.Message) { + xxx_messageInfo_DirectoryMetadata.Merge(dst, src) +} +func (m *DirectoryMetadata) XXX_Size() int { + return xxx_messageInfo_DirectoryMetadata.Size(m) +} +func (m *DirectoryMetadata) XXX_DiscardUnknown() { + xxx_messageInfo_DirectoryMetadata.DiscardUnknown(m) +} + +var xxx_messageInfo_DirectoryMetadata proto.InternalMessageInfo + +func (m *DirectoryMetadata) GetPath() string { + if m != nil { + return m.Path + } + return "" +} + +func (m *DirectoryMetadata) GetDigest() *Digest { + if m != nil { + return m.Digest + } + return nil +} + +// The CommandTask and CommandResult messages assume the existence of a service +// that can serve blobs of content, identified by a hash and size known as a +// "digest." The method by which these blobs may be retrieved is not specified +// here, but a model implementation is in the Remote Execution API's +// "ContentAddressibleStorage" interface. +// +// In the context of the RWAPI, a Digest will virtually always refer to the +// contents of a file or a directory. The latter is represented by the +// byte-encoded Directory message. +type Digest struct { + // A string-encoded hash (eg "1a2b3c", not the byte array [0x1a, 0x2b, 0x3c]) + // using an implementation-defined hash algorithm (eg SHA-256). + Hash string `protobuf:"bytes,1,opt,name=hash,proto3" json:"hash,omitempty"` + // The size of the contents. While this is not strictly required as part of an + // identifier (after all, any given hash will have exactly one canonical + // size), it's useful in almost all cases when one might want to send or + // retrieve blobs of content and is included here for this reason. + SizeBytes int64 `protobuf:"varint,2,opt,name=size_bytes,json=sizeBytes,proto3" json:"size_bytes,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Digest) Reset() { *m = Digest{} } +func (m *Digest) String() string { return proto.CompactTextString(m) } +func (*Digest) ProtoMessage() {} +func (*Digest) Descriptor() ([]byte, []int) { + return fileDescriptor_command_64a15fa32645f108, []int{6} +} +func (m *Digest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Digest.Unmarshal(m, b) +} +func (m *Digest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Digest.Marshal(b, m, deterministic) +} +func (dst *Digest) XXX_Merge(src proto.Message) { + xxx_messageInfo_Digest.Merge(dst, src) +} +func (m *Digest) XXX_Size() int { + return xxx_messageInfo_Digest.Size(m) +} +func (m *Digest) XXX_DiscardUnknown() { + xxx_messageInfo_Digest.DiscardUnknown(m) +} + +var xxx_messageInfo_Digest proto.InternalMessageInfo + +func (m *Digest) GetHash() string { + if m != nil { + return m.Hash + } + return "" +} + +func (m *Digest) GetSizeBytes() int64 { + if m != nil { + return m.SizeBytes + } + return 0 +} + +// The contents of a directory. Similar to the equivalent message in the Remote +// Execution API. +type Directory struct { + // The files in this directory + Files []*FileMetadata `protobuf:"bytes,1,rep,name=files,proto3" json:"files,omitempty"` + // Any subdirectories + Directories []*DirectoryMetadata `protobuf:"bytes,2,rep,name=directories,proto3" json:"directories,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Directory) Reset() { *m = Directory{} } +func (m *Directory) String() string { return proto.CompactTextString(m) } +func (*Directory) ProtoMessage() {} +func (*Directory) Descriptor() ([]byte, []int) { + return fileDescriptor_command_64a15fa32645f108, []int{7} +} +func (m *Directory) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Directory.Unmarshal(m, b) +} +func (m *Directory) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Directory.Marshal(b, m, deterministic) +} +func (dst *Directory) XXX_Merge(src proto.Message) { + xxx_messageInfo_Directory.Merge(dst, src) +} +func (m *Directory) XXX_Size() int { + return xxx_messageInfo_Directory.Size(m) +} +func (m *Directory) XXX_DiscardUnknown() { + xxx_messageInfo_Directory.DiscardUnknown(m) +} + +var xxx_messageInfo_Directory proto.InternalMessageInfo + +func (m *Directory) GetFiles() []*FileMetadata { + if m != nil { + return m.Files + } + return nil +} + +func (m *Directory) GetDirectories() []*DirectoryMetadata { + if m != nil { + return m.Directories + } + return nil +} + +func init() { + proto.RegisterType((*CommandTask)(nil), "google.devtools.remoteworkers.v1test2.CommandTask") + proto.RegisterType((*CommandTask_Inputs)(nil), "google.devtools.remoteworkers.v1test2.CommandTask.Inputs") + proto.RegisterType((*CommandTask_Inputs_EnvironmentVariable)(nil), "google.devtools.remoteworkers.v1test2.CommandTask.Inputs.EnvironmentVariable") + proto.RegisterType((*CommandTask_Outputs)(nil), "google.devtools.remoteworkers.v1test2.CommandTask.Outputs") + proto.RegisterType((*CommandTask_Timeouts)(nil), "google.devtools.remoteworkers.v1test2.CommandTask.Timeouts") + proto.RegisterType((*CommandOutputs)(nil), "google.devtools.remoteworkers.v1test2.CommandOutputs") + proto.RegisterType((*CommandOverhead)(nil), "google.devtools.remoteworkers.v1test2.CommandOverhead") + proto.RegisterType((*CommandResult)(nil), "google.devtools.remoteworkers.v1test2.CommandResult") + proto.RegisterType((*FileMetadata)(nil), "google.devtools.remoteworkers.v1test2.FileMetadata") + proto.RegisterType((*DirectoryMetadata)(nil), "google.devtools.remoteworkers.v1test2.DirectoryMetadata") + proto.RegisterType((*Digest)(nil), "google.devtools.remoteworkers.v1test2.Digest") + proto.RegisterType((*Directory)(nil), "google.devtools.remoteworkers.v1test2.Directory") +} + +func init() { + proto.RegisterFile("google/devtools/remoteworkers/v1test2/command.proto", fileDescriptor_command_64a15fa32645f108) +} + +var fileDescriptor_command_64a15fa32645f108 = []byte{ + // 852 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xbc, 0x56, 0x5d, 0x6f, 0x1b, 0x45, + 0x14, 0xd5, 0xae, 0x3f, 0x62, 0x5f, 0xa7, 0x94, 0x0e, 0xa9, 0x70, 0xcd, 0x87, 0x22, 0xa3, 0x4a, + 0x01, 0x29, 0x6b, 0xd5, 0x01, 0x41, 0xc8, 0x03, 0x22, 0x71, 0xa8, 0xfa, 0x50, 0x01, 0x43, 0xd4, + 0x48, 0x7d, 0xb1, 0x26, 0xbb, 0xb7, 0xce, 0xa8, 0xf6, 0x8e, 0x35, 0x33, 0xeb, 0x36, 0xbc, 0x20, + 0xf1, 0x47, 0x90, 0xfa, 0x46, 0xdf, 0xe0, 0x3f, 0xf0, 0xc8, 0x7f, 0x42, 0xf3, 0xb5, 0x59, 0x37, + 0x90, 0x18, 0x23, 0xf1, 0xb6, 0xbe, 0xf7, 0x9c, 0x33, 0xf7, 0x9e, 0x99, 0x7b, 0x65, 0xd8, 0x9b, + 0x08, 0x31, 0x99, 0xe2, 0x20, 0xc3, 0x85, 0x16, 0x62, 0xaa, 0x06, 0x12, 0x67, 0x42, 0xe3, 0x0b, + 0x21, 0x9f, 0xa3, 0x54, 0x83, 0xc5, 0x03, 0x8d, 0x4a, 0x0f, 0x07, 0xa9, 0x98, 0xcd, 0x58, 0x9e, + 0x25, 0x73, 0x29, 0xb4, 0x20, 0xf7, 0x1d, 0x29, 0x09, 0xa4, 0x64, 0x89, 0x94, 0x78, 0x52, 0xef, + 0x9e, 0xd7, 0xb6, 0xa4, 0xb3, 0xe2, 0xd9, 0x80, 0xe5, 0x17, 0x4e, 0xa1, 0xf7, 0xe1, 0x9b, 0xa9, + 0xac, 0x90, 0x4c, 0x73, 0x91, 0xfb, 0xfc, 0xbb, 0x3e, 0x2f, 0xe7, 0xe9, 0x40, 0x69, 0xa6, 0x0b, + 0xe5, 0x12, 0xfd, 0x57, 0x1b, 0xd0, 0x39, 0x72, 0xc5, 0x9c, 0x30, 0xf5, 0x9c, 0x7c, 0x0f, 0x4d, + 0x9e, 0xcf, 0x0b, 0xad, 0xba, 0xd1, 0x76, 0xb4, 0xd3, 0x19, 0xee, 0x27, 0x2b, 0xd5, 0x96, 0x54, + 0x34, 0x92, 0x47, 0x56, 0x80, 0x7a, 0x21, 0x82, 0xf0, 0x36, 0xbe, 0x9c, 0x63, 0xaa, 0x31, 0x1b, + 0x8b, 0x42, 0x5b, 0xf1, 0xba, 0x15, 0xff, 0x72, 0x0d, 0xf1, 0x6f, 0x9d, 0x02, 0xbd, 0x1d, 0x34, + 0x7d, 0x80, 0x9c, 0x42, 0x4b, 0xf3, 0x19, 0x0a, 0x23, 0xdf, 0xb0, 0xf2, 0x07, 0x6b, 0xc8, 0x9f, + 0x78, 0x09, 0x5a, 0x8a, 0xf5, 0x7e, 0x8b, 0xa1, 0xe9, 0x5a, 0x22, 0xef, 0x43, 0x9b, 0xc9, 0x49, + 0x31, 0xc3, 0xdc, 0x1a, 0x54, 0xdb, 0x69, 0xd3, 0xcb, 0x00, 0x39, 0x82, 0xc6, 0x33, 0x3e, 0x45, + 0xd5, 0x8d, 0xb7, 0x6b, 0x3b, 0x9d, 0xe1, 0xee, 0x8a, 0xc7, 0x8f, 0xf8, 0x04, 0x95, 0xa6, 0x8e, + 0x4b, 0x7e, 0x8e, 0xe0, 0x2e, 0xe6, 0x0b, 0x2e, 0x45, 0x6e, 0x54, 0xc7, 0x0b, 0x26, 0x39, 0x3b, + 0x33, 0xaa, 0x35, 0xab, 0xfa, 0x78, 0xed, 0x0b, 0x49, 0x8e, 0x2f, 0x65, 0x9f, 0x78, 0x55, 0xba, + 0x85, 0x57, 0x83, 0xaa, 0xf7, 0x15, 0xbc, 0xf3, 0x37, 0x60, 0x42, 0xa0, 0x9e, 0xb3, 0x19, 0xda, + 0xa7, 0xd1, 0xa6, 0xf6, 0x9b, 0x6c, 0x41, 0x63, 0xc1, 0xa6, 0x05, 0x76, 0x63, 0x1b, 0x74, 0x3f, + 0x7a, 0xbf, 0x44, 0xb0, 0x11, 0x2e, 0x66, 0x2b, 0xd8, 0xe2, 0x0c, 0xf3, 0x7d, 0x6e, 0x43, 0x27, + 0xe3, 0x12, 0x53, 0x2d, 0x24, 0xf7, 0x96, 0xb5, 0x69, 0x35, 0x44, 0x76, 0x81, 0x28, 0x9d, 0x89, + 0x42, 0x8f, 0x33, 0x54, 0x9a, 0xe7, 0xf6, 0x3d, 0x77, 0x6b, 0xf6, 0x98, 0x3b, 0x2e, 0x33, 0xba, + 0x4c, 0x78, 0x38, 0x4a, 0xb9, 0x04, 0xaf, 0x97, 0x70, 0x94, 0xb2, 0x02, 0xef, 0xfd, 0x1a, 0x41, + 0x2b, 0x5c, 0x36, 0xf9, 0x1c, 0xda, 0xf8, 0x12, 0xd3, 0xc2, 0x52, 0xdc, 0xc3, 0xbf, 0x17, 0x7c, + 0x0e, 0x23, 0x95, 0x8c, 0xfc, 0x48, 0xd1, 0x4b, 0x2c, 0xd9, 0x85, 0x3a, 0xcf, 0xa6, 0xae, 0xf9, + 0x6b, 0x39, 0x16, 0x46, 0x3e, 0x83, 0x96, 0x3a, 0x2f, 0x74, 0x26, 0x5e, 0xb8, 0x46, 0xae, 0xa5, + 0x94, 0xd0, 0xfe, 0x02, 0xde, 0xf2, 0xd7, 0x19, 0x3c, 0x7d, 0xcf, 0x14, 0xcc, 0xf5, 0x38, 0x15, + 0x99, 0xbb, 0x8e, 0x06, 0x6d, 0x99, 0xc0, 0x91, 0xc8, 0x90, 0x3c, 0x84, 0x8d, 0x30, 0x67, 0xae, + 0xae, 0x7f, 0xf9, 0x12, 0x03, 0xbb, 0xff, 0x13, 0xdc, 0x0e, 0xe7, 0x2e, 0x50, 0x9e, 0x23, 0xcb, + 0x4c, 0x07, 0x61, 0xb5, 0xdc, 0x6c, 0x54, 0x09, 0x35, 0x34, 0xe1, 0x25, 0x6e, 0xf6, 0xaa, 0x84, + 0xf6, 0xff, 0x8c, 0xe1, 0x96, 0xaf, 0x80, 0xa2, 0x2a, 0xa6, 0x9a, 0x7c, 0x02, 0x4d, 0xb7, 0xbf, + 0xfc, 0xe9, 0x24, 0xc8, 0xc8, 0x79, 0x9a, 0xfc, 0x60, 0x33, 0xd4, 0x23, 0x96, 0x4d, 0x8a, 0xff, + 0xd9, 0xa4, 0xda, 0x7f, 0x31, 0x89, 0xec, 0x57, 0x1c, 0xa9, 0xdf, 0xd0, 0xda, 0x61, 0xdc, 0x8d, + 0x2a, 0xae, 0xec, 0x57, 0x5c, 0x69, 0xac, 0x44, 0x0d, 0x70, 0xf2, 0x29, 0x80, 0xe9, 0x92, 0x2b, + 0xcd, 0x53, 0xd5, 0x6d, 0xda, 0xd5, 0xb0, 0x75, 0x85, 0xfc, 0x75, 0x7e, 0x41, 0x2b, 0xb8, 0xfe, + 0xeb, 0x08, 0x36, 0xbf, 0xe1, 0x53, 0x7c, 0x8c, 0x9a, 0x65, 0x4c, 0x33, 0x33, 0xd1, 0x73, 0xa6, + 0xcf, 0xc3, 0x44, 0x9b, 0x6f, 0x72, 0x0c, 0xcd, 0xcc, 0xf6, 0xb8, 0xde, 0xeb, 0xf1, 0x64, 0xd2, + 0x83, 0x56, 0x2a, 0x72, 0x6d, 0x57, 0xa5, 0x71, 0x78, 0x93, 0x96, 0xbf, 0xc9, 0x47, 0x70, 0x8b, + 0xab, 0xb1, 0x1b, 0x23, 0xb3, 0x59, 0xac, 0x71, 0x2d, 0xba, 0xc9, 0xd5, 0x71, 0x19, 0xeb, 0xe7, + 0x70, 0x67, 0xe4, 0xd7, 0xc1, 0xc5, 0xff, 0x50, 0x70, 0xff, 0x00, 0x9a, 0x2e, 0x62, 0x0e, 0x39, + 0x67, 0xaa, 0x3c, 0xc4, 0x7c, 0x93, 0x0f, 0x00, 0x14, 0xff, 0x11, 0xc7, 0x67, 0x17, 0x1a, 0xdd, + 0x5c, 0xd5, 0x68, 0xdb, 0x44, 0x0e, 0x4d, 0xa0, 0xff, 0x7b, 0x04, 0xed, 0xb2, 0x5a, 0xf2, 0xa8, + 0xba, 0xf2, 0x3a, 0xc3, 0xbd, 0x15, 0x0b, 0xaa, 0x5e, 0x4d, 0xd8, 0x93, 0x4f, 0xaf, 0xee, 0xc9, + 0xce, 0xf0, 0x8b, 0x95, 0x3b, 0x7c, 0xc3, 0xbf, 0xa5, 0x0d, 0x7b, 0xf8, 0x47, 0x04, 0x1f, 0xa7, + 0x62, 0xb6, 0x9a, 0xd8, 0xe1, 0x5d, 0x6a, 0xc3, 0xa7, 0x2e, 0xec, 0xc7, 0x52, 0x7d, 0x17, 0x3d, + 0xa5, 0x9e, 0x3f, 0x11, 0x53, 0x96, 0x4f, 0x12, 0x21, 0x27, 0x83, 0x09, 0xe6, 0xf6, 0x11, 0x0e, + 0x5c, 0x8a, 0xcd, 0xb9, 0xba, 0xe1, 0x2f, 0xd1, 0xc1, 0x52, 0xf4, 0x55, 0x1c, 0xd3, 0xd3, 0xd7, + 0xf1, 0xfd, 0x87, 0x4e, 0x79, 0x84, 0x8b, 0x13, 0x5b, 0xd9, 0x52, 0x09, 0xc9, 0x93, 0x07, 0x27, + 0x86, 0x7a, 0xd6, 0xb4, 0x67, 0xed, 0xfd, 0x15, 0x00, 0x00, 0xff, 0xff, 0xf4, 0x52, 0xfa, 0x6a, + 0x7d, 0x09, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/devtools/remoteworkers/v1test2/tasks.pb.go b/vendor/google.golang.org/genproto/googleapis/devtools/remoteworkers/v1test2/tasks.pb.go new file mode 100644 index 0000000..48a3c6f --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/devtools/remoteworkers/v1test2/tasks.pb.go @@ -0,0 +1,608 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/devtools/remoteworkers/v1test2/tasks.proto + +package remoteworkers // import "google.golang.org/genproto/googleapis/devtools/remoteworkers/v1test2" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import any "github.com/golang/protobuf/ptypes/any" +import _ "google.golang.org/genproto/googleapis/api/annotations" +import status "google.golang.org/genproto/googleapis/rpc/status" +import field_mask "google.golang.org/genproto/protobuf/field_mask" + +import ( + context "golang.org/x/net/context" + grpc "google.golang.org/grpc" +) + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// DEPRECATED - use Lease.payload instead. +// A Task represents a unit of work. Its result and logs are defined as +// subresources. +// +// If all the `Any` fields are populated, this can be a very large message, and +// clients may not want the entire message returned on every call to every +// method. Such clients should request partial responses +// (https://cloud.google.com/apis/design/design_patterns#partial_response) and +// servers should implement partial responses in order to reduce unnecessry +// overhead. +type Task struct { + // The name of this task. Output only. + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // The actual task to perform. For example, this could be CommandTask to run a + // command line. + Description *any.Any `protobuf:"bytes,2,opt,name=description,proto3" json:"description,omitempty"` + // Handles to logs. The key is a human-readable name like `stdout`, and the + // handle is a resource name that can be passed to ByteStream or other + // accessors. + // + // An implementation may define some logs by default (like `stdout`), and may + // allow clients to add new logs via AddTaskLog. + Logs map[string]string `protobuf:"bytes,3,rep,name=logs,proto3" json:"logs,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Task) Reset() { *m = Task{} } +func (m *Task) String() string { return proto.CompactTextString(m) } +func (*Task) ProtoMessage() {} +func (*Task) Descriptor() ([]byte, []int) { + return fileDescriptor_tasks_67f801ee7e4c185b, []int{0} +} +func (m *Task) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Task.Unmarshal(m, b) +} +func (m *Task) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Task.Marshal(b, m, deterministic) +} +func (dst *Task) XXX_Merge(src proto.Message) { + xxx_messageInfo_Task.Merge(dst, src) +} +func (m *Task) XXX_Size() int { + return xxx_messageInfo_Task.Size(m) +} +func (m *Task) XXX_DiscardUnknown() { + xxx_messageInfo_Task.DiscardUnknown(m) +} + +var xxx_messageInfo_Task proto.InternalMessageInfo + +func (m *Task) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *Task) GetDescription() *any.Any { + if m != nil { + return m.Description + } + return nil +} + +func (m *Task) GetLogs() map[string]string { + if m != nil { + return m.Logs + } + return nil +} + +// DEPRECATED - use Lease.assignment_result instead. +// The result and metadata of the task. +type TaskResult struct { + // The name of the task result; must be a name of a `Task` followed by + // `/result`. + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // The result may be updated several times; the client must only set + // `complete` to true to indicate that no further updates are allowed. + // If this is not true, the `status` field must not be examined since its zero + // value is equivalent to `OK`. + // + // Once a task is completed, it must not be updated with further results, + // though the implementation may choose to continue to receive logs. + Complete bool `protobuf:"varint,2,opt,name=complete,proto3" json:"complete,omitempty"` + // The final status of the task itself. For example, if task.description + // included a timeout which was violated, status.code may be + // DEADLINE_EXCEEDED. This field can only be read if `complete` is true. + Status *status.Status `protobuf:"bytes,3,opt,name=status,proto3" json:"status,omitempty"` + // Any non-log output, such as output files and exit codes. See + // CommandResult as an example. + Output *any.Any `protobuf:"bytes,4,opt,name=output,proto3" json:"output,omitempty"` + // Any information about how the command was executed, eg runtime. See + // CommandOverhead as an example. + Meta *any.Any `protobuf:"bytes,5,opt,name=meta,proto3" json:"meta,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *TaskResult) Reset() { *m = TaskResult{} } +func (m *TaskResult) String() string { return proto.CompactTextString(m) } +func (*TaskResult) ProtoMessage() {} +func (*TaskResult) Descriptor() ([]byte, []int) { + return fileDescriptor_tasks_67f801ee7e4c185b, []int{1} +} +func (m *TaskResult) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_TaskResult.Unmarshal(m, b) +} +func (m *TaskResult) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_TaskResult.Marshal(b, m, deterministic) +} +func (dst *TaskResult) XXX_Merge(src proto.Message) { + xxx_messageInfo_TaskResult.Merge(dst, src) +} +func (m *TaskResult) XXX_Size() int { + return xxx_messageInfo_TaskResult.Size(m) +} +func (m *TaskResult) XXX_DiscardUnknown() { + xxx_messageInfo_TaskResult.DiscardUnknown(m) +} + +var xxx_messageInfo_TaskResult proto.InternalMessageInfo + +func (m *TaskResult) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *TaskResult) GetComplete() bool { + if m != nil { + return m.Complete + } + return false +} + +func (m *TaskResult) GetStatus() *status.Status { + if m != nil { + return m.Status + } + return nil +} + +func (m *TaskResult) GetOutput() *any.Any { + if m != nil { + return m.Output + } + return nil +} + +func (m *TaskResult) GetMeta() *any.Any { + if m != nil { + return m.Meta + } + return nil +} + +// Request message for `GetTask`. +type GetTaskRequest struct { + // The task name. + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GetTaskRequest) Reset() { *m = GetTaskRequest{} } +func (m *GetTaskRequest) String() string { return proto.CompactTextString(m) } +func (*GetTaskRequest) ProtoMessage() {} +func (*GetTaskRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_tasks_67f801ee7e4c185b, []int{2} +} +func (m *GetTaskRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GetTaskRequest.Unmarshal(m, b) +} +func (m *GetTaskRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GetTaskRequest.Marshal(b, m, deterministic) +} +func (dst *GetTaskRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_GetTaskRequest.Merge(dst, src) +} +func (m *GetTaskRequest) XXX_Size() int { + return xxx_messageInfo_GetTaskRequest.Size(m) +} +func (m *GetTaskRequest) XXX_DiscardUnknown() { + xxx_messageInfo_GetTaskRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_GetTaskRequest proto.InternalMessageInfo + +func (m *GetTaskRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +// Request message for `UpdateTaskResult`. +type UpdateTaskResultRequest struct { + // The task result name; must match `result.name`. + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // The result being updated. + Result *TaskResult `protobuf:"bytes,2,opt,name=result,proto3" json:"result,omitempty"` + // The fields within `result` that are specified. + UpdateMask *field_mask.FieldMask `protobuf:"bytes,3,opt,name=update_mask,json=updateMask,proto3" json:"update_mask,omitempty"` + // If this is being updated by a bot from BotManager, the source should be + // bot.session_id. That way, if two bots accidentally get the same name, we'll + // know to reject updates from the older one. + Source string `protobuf:"bytes,4,opt,name=source,proto3" json:"source,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *UpdateTaskResultRequest) Reset() { *m = UpdateTaskResultRequest{} } +func (m *UpdateTaskResultRequest) String() string { return proto.CompactTextString(m) } +func (*UpdateTaskResultRequest) ProtoMessage() {} +func (*UpdateTaskResultRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_tasks_67f801ee7e4c185b, []int{3} +} +func (m *UpdateTaskResultRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_UpdateTaskResultRequest.Unmarshal(m, b) +} +func (m *UpdateTaskResultRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_UpdateTaskResultRequest.Marshal(b, m, deterministic) +} +func (dst *UpdateTaskResultRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_UpdateTaskResultRequest.Merge(dst, src) +} +func (m *UpdateTaskResultRequest) XXX_Size() int { + return xxx_messageInfo_UpdateTaskResultRequest.Size(m) +} +func (m *UpdateTaskResultRequest) XXX_DiscardUnknown() { + xxx_messageInfo_UpdateTaskResultRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_UpdateTaskResultRequest proto.InternalMessageInfo + +func (m *UpdateTaskResultRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *UpdateTaskResultRequest) GetResult() *TaskResult { + if m != nil { + return m.Result + } + return nil +} + +func (m *UpdateTaskResultRequest) GetUpdateMask() *field_mask.FieldMask { + if m != nil { + return m.UpdateMask + } + return nil +} + +func (m *UpdateTaskResultRequest) GetSource() string { + if m != nil { + return m.Source + } + return "" +} + +// Request message for `AddTaskLog`. +type AddTaskLogRequest struct { + // The name of the task that will own the new log. + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // The human-readable name of the log, like `stdout` or a relative file path. + LogId string `protobuf:"bytes,2,opt,name=log_id,json=logId,proto3" json:"log_id,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *AddTaskLogRequest) Reset() { *m = AddTaskLogRequest{} } +func (m *AddTaskLogRequest) String() string { return proto.CompactTextString(m) } +func (*AddTaskLogRequest) ProtoMessage() {} +func (*AddTaskLogRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_tasks_67f801ee7e4c185b, []int{4} +} +func (m *AddTaskLogRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_AddTaskLogRequest.Unmarshal(m, b) +} +func (m *AddTaskLogRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_AddTaskLogRequest.Marshal(b, m, deterministic) +} +func (dst *AddTaskLogRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_AddTaskLogRequest.Merge(dst, src) +} +func (m *AddTaskLogRequest) XXX_Size() int { + return xxx_messageInfo_AddTaskLogRequest.Size(m) +} +func (m *AddTaskLogRequest) XXX_DiscardUnknown() { + xxx_messageInfo_AddTaskLogRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_AddTaskLogRequest proto.InternalMessageInfo + +func (m *AddTaskLogRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *AddTaskLogRequest) GetLogId() string { + if m != nil { + return m.LogId + } + return "" +} + +// Response message for `AddTaskLog`. +type AddTaskLogResponse struct { + // The handle for the new log, as would be returned in Task.logs. + Handle string `protobuf:"bytes,1,opt,name=handle,proto3" json:"handle,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *AddTaskLogResponse) Reset() { *m = AddTaskLogResponse{} } +func (m *AddTaskLogResponse) String() string { return proto.CompactTextString(m) } +func (*AddTaskLogResponse) ProtoMessage() {} +func (*AddTaskLogResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_tasks_67f801ee7e4c185b, []int{5} +} +func (m *AddTaskLogResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_AddTaskLogResponse.Unmarshal(m, b) +} +func (m *AddTaskLogResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_AddTaskLogResponse.Marshal(b, m, deterministic) +} +func (dst *AddTaskLogResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_AddTaskLogResponse.Merge(dst, src) +} +func (m *AddTaskLogResponse) XXX_Size() int { + return xxx_messageInfo_AddTaskLogResponse.Size(m) +} +func (m *AddTaskLogResponse) XXX_DiscardUnknown() { + xxx_messageInfo_AddTaskLogResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_AddTaskLogResponse proto.InternalMessageInfo + +func (m *AddTaskLogResponse) GetHandle() string { + if m != nil { + return m.Handle + } + return "" +} + +func init() { + proto.RegisterType((*Task)(nil), "google.devtools.remoteworkers.v1test2.Task") + proto.RegisterMapType((map[string]string)(nil), "google.devtools.remoteworkers.v1test2.Task.LogsEntry") + proto.RegisterType((*TaskResult)(nil), "google.devtools.remoteworkers.v1test2.TaskResult") + proto.RegisterType((*GetTaskRequest)(nil), "google.devtools.remoteworkers.v1test2.GetTaskRequest") + proto.RegisterType((*UpdateTaskResultRequest)(nil), "google.devtools.remoteworkers.v1test2.UpdateTaskResultRequest") + proto.RegisterType((*AddTaskLogRequest)(nil), "google.devtools.remoteworkers.v1test2.AddTaskLogRequest") + proto.RegisterType((*AddTaskLogResponse)(nil), "google.devtools.remoteworkers.v1test2.AddTaskLogResponse") +} + +// Reference imports to suppress errors if they are not otherwise used. +var _ context.Context +var _ grpc.ClientConn + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the grpc package it is being compiled against. +const _ = grpc.SupportPackageIsVersion4 + +// TasksClient is the client API for Tasks service. +// +// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://godoc.org/google.golang.org/grpc#ClientConn.NewStream. +type TasksClient interface { + // DEPRECATED - use Lease.payload instead. + // GetTask reads the current state of the task. Tasks must be created through + // some other interface, and should be immutable once created and exposed to + // the bots. + GetTask(ctx context.Context, in *GetTaskRequest, opts ...grpc.CallOption) (*Task, error) + // DEPRECATED - use Lease.result instead. + // UpdateTaskResult updates the result. + UpdateTaskResult(ctx context.Context, in *UpdateTaskResultRequest, opts ...grpc.CallOption) (*TaskResult, error) + // DEPRECATED - precreate logs prior to sending to bot. + // AddTaskLog creates a new streaming log. The log is streamed and marked as + // completed through other interfaces (i.e., ByteStream). This can be called + // by the bot if it wants to create a new log; the server can also predefine + // logs that do not need to be created (e.g. `stdout`). + AddTaskLog(ctx context.Context, in *AddTaskLogRequest, opts ...grpc.CallOption) (*AddTaskLogResponse, error) +} + +type tasksClient struct { + cc *grpc.ClientConn +} + +func NewTasksClient(cc *grpc.ClientConn) TasksClient { + return &tasksClient{cc} +} + +func (c *tasksClient) GetTask(ctx context.Context, in *GetTaskRequest, opts ...grpc.CallOption) (*Task, error) { + out := new(Task) + err := c.cc.Invoke(ctx, "/google.devtools.remoteworkers.v1test2.Tasks/GetTask", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *tasksClient) UpdateTaskResult(ctx context.Context, in *UpdateTaskResultRequest, opts ...grpc.CallOption) (*TaskResult, error) { + out := new(TaskResult) + err := c.cc.Invoke(ctx, "/google.devtools.remoteworkers.v1test2.Tasks/UpdateTaskResult", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *tasksClient) AddTaskLog(ctx context.Context, in *AddTaskLogRequest, opts ...grpc.CallOption) (*AddTaskLogResponse, error) { + out := new(AddTaskLogResponse) + err := c.cc.Invoke(ctx, "/google.devtools.remoteworkers.v1test2.Tasks/AddTaskLog", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +// TasksServer is the server API for Tasks service. +type TasksServer interface { + // DEPRECATED - use Lease.payload instead. + // GetTask reads the current state of the task. Tasks must be created through + // some other interface, and should be immutable once created and exposed to + // the bots. + GetTask(context.Context, *GetTaskRequest) (*Task, error) + // DEPRECATED - use Lease.result instead. + // UpdateTaskResult updates the result. + UpdateTaskResult(context.Context, *UpdateTaskResultRequest) (*TaskResult, error) + // DEPRECATED - precreate logs prior to sending to bot. + // AddTaskLog creates a new streaming log. The log is streamed and marked as + // completed through other interfaces (i.e., ByteStream). This can be called + // by the bot if it wants to create a new log; the server can also predefine + // logs that do not need to be created (e.g. `stdout`). + AddTaskLog(context.Context, *AddTaskLogRequest) (*AddTaskLogResponse, error) +} + +func RegisterTasksServer(s *grpc.Server, srv TasksServer) { + s.RegisterService(&_Tasks_serviceDesc, srv) +} + +func _Tasks_GetTask_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(GetTaskRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(TasksServer).GetTask(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.devtools.remoteworkers.v1test2.Tasks/GetTask", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(TasksServer).GetTask(ctx, req.(*GetTaskRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _Tasks_UpdateTaskResult_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(UpdateTaskResultRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(TasksServer).UpdateTaskResult(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.devtools.remoteworkers.v1test2.Tasks/UpdateTaskResult", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(TasksServer).UpdateTaskResult(ctx, req.(*UpdateTaskResultRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _Tasks_AddTaskLog_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(AddTaskLogRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(TasksServer).AddTaskLog(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.devtools.remoteworkers.v1test2.Tasks/AddTaskLog", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(TasksServer).AddTaskLog(ctx, req.(*AddTaskLogRequest)) + } + return interceptor(ctx, in, info, handler) +} + +var _Tasks_serviceDesc = grpc.ServiceDesc{ + ServiceName: "google.devtools.remoteworkers.v1test2.Tasks", + HandlerType: (*TasksServer)(nil), + Methods: []grpc.MethodDesc{ + { + MethodName: "GetTask", + Handler: _Tasks_GetTask_Handler, + }, + { + MethodName: "UpdateTaskResult", + Handler: _Tasks_UpdateTaskResult_Handler, + }, + { + MethodName: "AddTaskLog", + Handler: _Tasks_AddTaskLog_Handler, + }, + }, + Streams: []grpc.StreamDesc{}, + Metadata: "google/devtools/remoteworkers/v1test2/tasks.proto", +} + +func init() { + proto.RegisterFile("google/devtools/remoteworkers/v1test2/tasks.proto", fileDescriptor_tasks_67f801ee7e4c185b) +} + +var fileDescriptor_tasks_67f801ee7e4c185b = []byte{ + // 682 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x9c, 0x55, 0xcf, 0x6b, 0x13, 0x41, + 0x14, 0x66, 0x92, 0x34, 0x36, 0x2f, 0x20, 0x75, 0xa8, 0x36, 0x2e, 0x3d, 0xc4, 0xc5, 0x4a, 0x4c, + 0xcb, 0x2e, 0x89, 0xa8, 0x35, 0xc5, 0x42, 0x8b, 0x5a, 0x0a, 0x15, 0x64, 0xad, 0x16, 0xbc, 0x94, + 0x69, 0x76, 0x3a, 0x86, 0x6c, 0x76, 0xd6, 0x9d, 0xd9, 0x48, 0x90, 0x5e, 0x3c, 0x79, 0x15, 0xff, + 0x03, 0x8f, 0x5e, 0xbc, 0x7a, 0x14, 0xef, 0x9e, 0x04, 0xff, 0x02, 0xff, 0x10, 0x99, 0x1f, 0x69, + 0x9b, 0xfe, 0x88, 0xa9, 0xb7, 0x99, 0x79, 0xdf, 0xf7, 0xde, 0xfb, 0xde, 0xf7, 0xb2, 0x81, 0x06, + 0xe3, 0x9c, 0x45, 0xd4, 0x0f, 0x69, 0x5f, 0x72, 0x1e, 0x09, 0x3f, 0xa5, 0x3d, 0x2e, 0xe9, 0x5b, + 0x9e, 0x76, 0x69, 0x2a, 0xfc, 0x7e, 0x43, 0x52, 0x21, 0x9b, 0xbe, 0x24, 0xa2, 0x2b, 0xbc, 0x24, + 0xe5, 0x92, 0xe3, 0x05, 0x43, 0xf1, 0x86, 0x14, 0x6f, 0x84, 0xe2, 0x59, 0x8a, 0x33, 0x6f, 0x33, + 0x93, 0xa4, 0xe3, 0x93, 0x38, 0xe6, 0x92, 0xc8, 0x0e, 0x8f, 0x6d, 0x12, 0xe7, 0xba, 0x8d, 0xea, + 0xdb, 0x5e, 0xb6, 0xef, 0x93, 0x78, 0x60, 0x43, 0xd5, 0x93, 0xa1, 0xfd, 0x0e, 0x8d, 0xc2, 0xdd, + 0x1e, 0x11, 0x5d, 0x8b, 0x98, 0xb3, 0x88, 0x34, 0x69, 0xfb, 0x42, 0x12, 0x99, 0xd9, 0xac, 0xee, + 0x6f, 0x04, 0x85, 0x6d, 0x22, 0xba, 0x18, 0x43, 0x21, 0x26, 0x3d, 0x5a, 0x41, 0x55, 0x54, 0x2b, + 0x05, 0xfa, 0x8c, 0xef, 0x41, 0x39, 0xa4, 0xa2, 0x9d, 0x76, 0x12, 0xd5, 0x48, 0x25, 0x57, 0x45, + 0xb5, 0x72, 0x73, 0xd6, 0xb3, 0x6a, 0x86, 0xd5, 0xbc, 0xb5, 0x78, 0x10, 0x1c, 0x07, 0xe2, 0x4d, + 0x28, 0x44, 0x9c, 0x89, 0x4a, 0xbe, 0x9a, 0xaf, 0x95, 0x9b, 0x77, 0xbd, 0x89, 0xe4, 0x7b, 0xaa, + 0x0d, 0x6f, 0x8b, 0x33, 0xf1, 0x38, 0x96, 0xe9, 0x20, 0xd0, 0x29, 0x9c, 0xfb, 0x50, 0x3a, 0x7c, + 0xc2, 0x33, 0x90, 0xef, 0xd2, 0x81, 0x6d, 0x51, 0x1d, 0xf1, 0x2c, 0x4c, 0xf5, 0x49, 0x94, 0x51, + 0xdd, 0x5b, 0x29, 0x30, 0x97, 0x56, 0x6e, 0x19, 0xb9, 0xdf, 0x11, 0x80, 0xca, 0x18, 0x50, 0x91, + 0x45, 0xf2, 0x4c, 0x79, 0x0e, 0x4c, 0xb7, 0x79, 0x2f, 0x89, 0xa8, 0x34, 0xfc, 0xe9, 0xe0, 0xf0, + 0x8e, 0xeb, 0x50, 0x34, 0x73, 0xaa, 0xe4, 0xb5, 0x6a, 0x3c, 0x14, 0x91, 0x26, 0x6d, 0xef, 0xb9, + 0x8e, 0x04, 0x16, 0x81, 0x97, 0xa0, 0xc8, 0x33, 0x99, 0x64, 0xb2, 0x52, 0x18, 0x33, 0x21, 0x8b, + 0xc1, 0x35, 0x28, 0xf4, 0xa8, 0x24, 0x95, 0xa9, 0x31, 0x58, 0x8d, 0x70, 0x6f, 0xc2, 0xe5, 0x0d, + 0x2a, 0x8d, 0x88, 0x37, 0x19, 0x15, 0x67, 0xaa, 0x70, 0x7f, 0x22, 0x98, 0x7b, 0x91, 0x84, 0x44, + 0xd2, 0x23, 0xb9, 0x63, 0xf0, 0x78, 0x13, 0x8a, 0xa9, 0x06, 0x59, 0x3f, 0x1b, 0x17, 0xb0, 0xc7, + 0x66, 0xb7, 0x09, 0xf0, 0x0a, 0x94, 0x33, 0x5d, 0x59, 0xaf, 0x9a, 0x9d, 0x94, 0x73, 0x4a, 0xd1, + 0x13, 0xb5, 0x8d, 0x4f, 0x15, 0x1d, 0x0c, 0x5c, 0x9d, 0xf1, 0x35, 0x28, 0x0a, 0x9e, 0xa5, 0x6d, + 0xaa, 0xa7, 0x56, 0x0a, 0xec, 0xcd, 0x5d, 0x85, 0x2b, 0x6b, 0x61, 0xa8, 0xaa, 0x6d, 0x71, 0x36, + 0x4e, 0xc8, 0x55, 0x28, 0x46, 0x9c, 0xed, 0x76, 0xc2, 0xa1, 0xf9, 0x11, 0x67, 0x9b, 0xa1, 0xbb, + 0x04, 0xf8, 0x38, 0x5f, 0x24, 0x3c, 0x16, 0x54, 0x55, 0x7b, 0x4d, 0xe2, 0x30, 0x1a, 0xa6, 0xb0, + 0xb7, 0xe6, 0x87, 0x02, 0x4c, 0x29, 0xac, 0xc0, 0x1f, 0x11, 0x5c, 0xb2, 0xe3, 0xc6, 0x93, 0xae, + 0xec, 0xa8, 0x3d, 0xce, 0xe2, 0x05, 0x46, 0xe9, 0xba, 0xef, 0x7f, 0xfd, 0xf9, 0x94, 0x9b, 0xc7, + 0xce, 0xe1, 0x27, 0xe3, 0x9d, 0x92, 0xf5, 0xb0, 0x5e, 0x37, 0xdf, 0x0e, 0xbf, 0x7e, 0x80, 0xbf, + 0x21, 0x98, 0x39, 0xe9, 0x2d, 0x5e, 0x9d, 0xb0, 0xca, 0x39, 0x4b, 0xe1, 0x5c, 0xdc, 0x70, 0xb7, + 0xa1, 0x7b, 0x5d, 0x6c, 0xde, 0x38, 0xb7, 0x57, 0xdf, 0xac, 0xc4, 0x41, 0x6b, 0xb8, 0x1b, 0x5f, + 0x11, 0xc0, 0x91, 0x0f, 0x78, 0x79, 0xc2, 0xa2, 0xa7, 0xac, 0x77, 0x1e, 0xfc, 0x07, 0xd3, 0x98, + 0xee, 0x2e, 0xe9, 0xb6, 0x6f, 0xb9, 0xe7, 0xb7, 0x7d, 0xd0, 0x22, 0x61, 0xb8, 0xc5, 0x59, 0x0b, + 0xd5, 0xd7, 0x7f, 0x20, 0xb8, 0xdd, 0xe6, 0xbd, 0xc9, 0xca, 0xad, 0xe3, 0x40, 0x3f, 0xef, 0x98, + 0x67, 0xbd, 0x42, 0xcf, 0xd0, 0xab, 0xc0, 0x92, 0x19, 0x8f, 0x48, 0xcc, 0x3c, 0x9e, 0x32, 0x9f, + 0xd1, 0x58, 0xff, 0x12, 0x7c, 0x13, 0x22, 0x49, 0x47, 0xfc, 0xe3, 0xbf, 0x63, 0x65, 0xe4, 0xf5, + 0x73, 0x2e, 0x17, 0xec, 0x7c, 0xc9, 0x2d, 0x6c, 0x98, 0xcc, 0x8f, 0x68, 0x7f, 0x5b, 0xb7, 0x35, + 0x52, 0xdf, 0x7b, 0xd9, 0xd8, 0x56, 0xd4, 0xbd, 0xa2, 0xae, 0x75, 0xe7, 0x6f, 0x00, 0x00, 0x00, + 0xff, 0xff, 0xbb, 0x4a, 0x2c, 0x76, 0xa6, 0x06, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/devtools/remoteworkers/v1test2/worker.pb.go b/vendor/google.golang.org/genproto/googleapis/devtools/remoteworkers/v1test2/worker.pb.go new file mode 100644 index 0000000..26dd90e --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/devtools/remoteworkers/v1test2/worker.pb.go @@ -0,0 +1,412 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/devtools/remoteworkers/v1test2/worker.proto + +package remoteworkers // import "google.golang.org/genproto/googleapis/devtools/remoteworkers/v1test2" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Describes a worker, which is a list of one or more devices and the +// connections between them. A device could be a computer, a phone, or even an +// accelerator like a GPU; it's up to the farm administrator to decide how to +// model their farm. For example, if a farm only has one type of GPU, the GPU +// could be modelled as a "has_gpu" property on its host computer; if it has +// many subproperties itself, it might be better to model it as a separate +// device. +// +// The first device in the worker is the "primary device" - that is, the device +// running a bot and which is responsible for actually executing commands. All +// other devices are considered to be attached devices, and must be controllable +// by the primary device. +// +// This message (and all its submessages) can be used in two contexts: +// +// * Status: sent by the bot to report the current capabilities of the device to +// allow reservation matching. +// * Request: sent by a client to request a device with certain capabilities in +// a reservation. +// +// Several of the fields in this message have different semantics depending on +// which of which of these contexts it is used. These semantics are described +// below. +// +// Several messages in Worker and its submessages have the concept of keys and +// values, such as `Worker.Property` and `Device.Property`. All keys are simple +// strings, but certain keys are "standard" keys and should be broadly supported +// across farms and implementations; these are listed below each relevant +// message. Bot implementations or farm admins may add *additional* keys, but +// these SHOULD all begin with an underscore so they do not conflict with +// standard keys that may be added in the future. +// +// Keys are not context sensitive. +// +// See http://goo.gl/NurY8g for more information on the Worker message. +type Worker struct { + // A list of devices; the first device is the primary device. See the `Device` + // message for more information. + Devices []*Device `protobuf:"bytes,1,rep,name=devices,proto3" json:"devices,omitempty"` + // A worker may contain "global" properties. For example, certain machines + // might be reserved for certain types of jobs, like short-running compilation + // versus long-running integration tests. This property is known as a "pool" + // and is not related to any one device within the worker; rather, it applies + // to the worker as a whole. + // + // The behaviour of repeated keys is identical to that of Device.Property. + Properties []*Worker_Property `protobuf:"bytes,2,rep,name=properties,proto3" json:"properties,omitempty"` + // Bots can be configured in certain ways when accepting leases. For example, + // many leases are executed inside a Docker container. To support this, the + // bot needs to be able to report that it has Docker installed (and knows how + // to execute something inside a container), and the task submitter needs to + // specify which image should be used to start the container. Similarly, a + // lease may be able to run as one of several users on the worker; in such + // cases, the bot needs to report what users are available, and the submitter + // needs to choose one. + // + // Therefore, when this message is reported by the bot to the service, each + // key represents a *type* of configuration that the bot knows how to set, + // while each *value* represents a legal value for that configuration (the + // empty string is interpretted as a wildcard, such as for Docker images). + // When this message is sent by the server to the bot in the context of a + // lease, it represents a command to the bot to apply the setting. Keys may + // be repeated during reporting but not in a lease. + Configs []*Worker_Config `protobuf:"bytes,3,rep,name=configs,proto3" json:"configs,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Worker) Reset() { *m = Worker{} } +func (m *Worker) String() string { return proto.CompactTextString(m) } +func (*Worker) ProtoMessage() {} +func (*Worker) Descriptor() ([]byte, []int) { + return fileDescriptor_worker_5f67d0a1f90b0802, []int{0} +} +func (m *Worker) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Worker.Unmarshal(m, b) +} +func (m *Worker) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Worker.Marshal(b, m, deterministic) +} +func (dst *Worker) XXX_Merge(src proto.Message) { + xxx_messageInfo_Worker.Merge(dst, src) +} +func (m *Worker) XXX_Size() int { + return xxx_messageInfo_Worker.Size(m) +} +func (m *Worker) XXX_DiscardUnknown() { + xxx_messageInfo_Worker.DiscardUnknown(m) +} + +var xxx_messageInfo_Worker proto.InternalMessageInfo + +func (m *Worker) GetDevices() []*Device { + if m != nil { + return m.Devices + } + return nil +} + +func (m *Worker) GetProperties() []*Worker_Property { + if m != nil { + return m.Properties + } + return nil +} + +func (m *Worker) GetConfigs() []*Worker_Config { + if m != nil { + return m.Configs + } + return nil +} + +// A global property; see the `properties` field for more information. +type Worker_Property struct { + // For general information on keys, see the documentation to `Worker`. + // + // The current set of standard keys are: + // + // * pool: different workers can be reserved for different purposes. For + // example, an admin might want to segregate long-running integration tests + // from short-running unit tests, so unit tests will always get some + // throughput. To support this, the server can assign different values for + // `pool` (such as "itest" and "utest") to different workers, and then have + // jobs request workers from those pools. + Key string `protobuf:"bytes,1,opt,name=key,proto3" json:"key,omitempty"` + // The property's value. + Value string `protobuf:"bytes,2,opt,name=value,proto3" json:"value,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Worker_Property) Reset() { *m = Worker_Property{} } +func (m *Worker_Property) String() string { return proto.CompactTextString(m) } +func (*Worker_Property) ProtoMessage() {} +func (*Worker_Property) Descriptor() ([]byte, []int) { + return fileDescriptor_worker_5f67d0a1f90b0802, []int{0, 0} +} +func (m *Worker_Property) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Worker_Property.Unmarshal(m, b) +} +func (m *Worker_Property) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Worker_Property.Marshal(b, m, deterministic) +} +func (dst *Worker_Property) XXX_Merge(src proto.Message) { + xxx_messageInfo_Worker_Property.Merge(dst, src) +} +func (m *Worker_Property) XXX_Size() int { + return xxx_messageInfo_Worker_Property.Size(m) +} +func (m *Worker_Property) XXX_DiscardUnknown() { + xxx_messageInfo_Worker_Property.DiscardUnknown(m) +} + +var xxx_messageInfo_Worker_Property proto.InternalMessageInfo + +func (m *Worker_Property) GetKey() string { + if m != nil { + return m.Key + } + return "" +} + +func (m *Worker_Property) GetValue() string { + if m != nil { + return m.Value + } + return "" +} + +// A configuration request or report; see the `configs` field for more +// information. +type Worker_Config struct { + // For general information on keys, see the documentation to `Worker`. + // + // The current set of standard keys are: + // + // * DockerImage: the image of the container. When being reported by the + // bot, the empty value should always be included if the bot is able to pull + // its own images; the bot may optionally *also* report images that are + // present in its cache. When being requested in a lease, the value is the + // URI of the image (eg `gcr.io/user/image@sha256:hash`). + Key string `protobuf:"bytes,1,opt,name=key,proto3" json:"key,omitempty"` + // The configuration's value. + Value string `protobuf:"bytes,2,opt,name=value,proto3" json:"value,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Worker_Config) Reset() { *m = Worker_Config{} } +func (m *Worker_Config) String() string { return proto.CompactTextString(m) } +func (*Worker_Config) ProtoMessage() {} +func (*Worker_Config) Descriptor() ([]byte, []int) { + return fileDescriptor_worker_5f67d0a1f90b0802, []int{0, 1} +} +func (m *Worker_Config) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Worker_Config.Unmarshal(m, b) +} +func (m *Worker_Config) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Worker_Config.Marshal(b, m, deterministic) +} +func (dst *Worker_Config) XXX_Merge(src proto.Message) { + xxx_messageInfo_Worker_Config.Merge(dst, src) +} +func (m *Worker_Config) XXX_Size() int { + return xxx_messageInfo_Worker_Config.Size(m) +} +func (m *Worker_Config) XXX_DiscardUnknown() { + xxx_messageInfo_Worker_Config.DiscardUnknown(m) +} + +var xxx_messageInfo_Worker_Config proto.InternalMessageInfo + +func (m *Worker_Config) GetKey() string { + if m != nil { + return m.Key + } + return "" +} + +func (m *Worker_Config) GetValue() string { + if m != nil { + return m.Value + } + return "" +} + +// Any device, including computers, phones, accelerators (e.g. GPUs), etc. All +// names must be unique. +type Device struct { + // The handle can be thought of as the "name" of the device, and must be + // unique within a Worker. + // + // In the Status context, the handle should be some human-understandable name, + // perhaps corresponding to a label physically written on the device to make + // it easy to locate. In the Request context, the name should be the + // *logical* name expected by the task. The bot is responsible for mapping the + // logical name expected by the task to a machine-readable name that the task + // can actually use, such as a USB address. The method by which this mapping + // is communicated to the task is not covered in this API. + Handle string `protobuf:"bytes,1,opt,name=handle,proto3" json:"handle,omitempty"` + // Properties of this device that don't change based on the tasks that are + // running on it, e.g. OS, CPU architecture, etc. + // + // Keys may be repeated, and have the following interpretation: + // + // * Status context: the device can support *any* the listed values. For + // example, an "ISA" property might include "x86", "x86-64" and "sse4". + // + // * Request context: the device *must* support *all* of the listed values. + Properties []*Device_Property `protobuf:"bytes,2,rep,name=properties,proto3" json:"properties,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Device) Reset() { *m = Device{} } +func (m *Device) String() string { return proto.CompactTextString(m) } +func (*Device) ProtoMessage() {} +func (*Device) Descriptor() ([]byte, []int) { + return fileDescriptor_worker_5f67d0a1f90b0802, []int{1} +} +func (m *Device) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Device.Unmarshal(m, b) +} +func (m *Device) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Device.Marshal(b, m, deterministic) +} +func (dst *Device) XXX_Merge(src proto.Message) { + xxx_messageInfo_Device.Merge(dst, src) +} +func (m *Device) XXX_Size() int { + return xxx_messageInfo_Device.Size(m) +} +func (m *Device) XXX_DiscardUnknown() { + xxx_messageInfo_Device.DiscardUnknown(m) +} + +var xxx_messageInfo_Device proto.InternalMessageInfo + +func (m *Device) GetHandle() string { + if m != nil { + return m.Handle + } + return "" +} + +func (m *Device) GetProperties() []*Device_Property { + if m != nil { + return m.Properties + } + return nil +} + +// A device property; see `properties` for more information. +type Device_Property struct { + // For general information on keys, see the documentation to `Worker`. + // + // The current set of standard keys are: + // + // * os: a human-readable description of the OS. Examples include `linux`, + // `ubuntu` and `ubuntu 14.04` (note that a bot may advertise itself as more + // than one). This will be replaced in the future by more well-structured + // keys and values to represent OS variants. + // + // * has-docker: "true" if the bot has Docker installed. This will be + // replaced in the future by a more structured message for Docker support. + Key string `protobuf:"bytes,1,opt,name=key,proto3" json:"key,omitempty"` + // The property's value. + Value string `protobuf:"bytes,2,opt,name=value,proto3" json:"value,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Device_Property) Reset() { *m = Device_Property{} } +func (m *Device_Property) String() string { return proto.CompactTextString(m) } +func (*Device_Property) ProtoMessage() {} +func (*Device_Property) Descriptor() ([]byte, []int) { + return fileDescriptor_worker_5f67d0a1f90b0802, []int{1, 0} +} +func (m *Device_Property) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Device_Property.Unmarshal(m, b) +} +func (m *Device_Property) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Device_Property.Marshal(b, m, deterministic) +} +func (dst *Device_Property) XXX_Merge(src proto.Message) { + xxx_messageInfo_Device_Property.Merge(dst, src) +} +func (m *Device_Property) XXX_Size() int { + return xxx_messageInfo_Device_Property.Size(m) +} +func (m *Device_Property) XXX_DiscardUnknown() { + xxx_messageInfo_Device_Property.DiscardUnknown(m) +} + +var xxx_messageInfo_Device_Property proto.InternalMessageInfo + +func (m *Device_Property) GetKey() string { + if m != nil { + return m.Key + } + return "" +} + +func (m *Device_Property) GetValue() string { + if m != nil { + return m.Value + } + return "" +} + +func init() { + proto.RegisterType((*Worker)(nil), "google.devtools.remoteworkers.v1test2.Worker") + proto.RegisterType((*Worker_Property)(nil), "google.devtools.remoteworkers.v1test2.Worker.Property") + proto.RegisterType((*Worker_Config)(nil), "google.devtools.remoteworkers.v1test2.Worker.Config") + proto.RegisterType((*Device)(nil), "google.devtools.remoteworkers.v1test2.Device") + proto.RegisterType((*Device_Property)(nil), "google.devtools.remoteworkers.v1test2.Device.Property") +} + +func init() { + proto.RegisterFile("google/devtools/remoteworkers/v1test2/worker.proto", fileDescriptor_worker_5f67d0a1f90b0802) +} + +var fileDescriptor_worker_5f67d0a1f90b0802 = []byte{ + // 332 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xa4, 0x92, 0xc1, 0x4a, 0xf3, 0x40, + 0x10, 0xc7, 0x49, 0xca, 0x97, 0x7e, 0x8e, 0x17, 0x59, 0x45, 0x42, 0x4f, 0xa5, 0x50, 0xa8, 0x07, + 0x37, 0x36, 0x8a, 0x17, 0x6f, 0xb5, 0xd0, 0x9b, 0x94, 0xa5, 0xb4, 0xe0, 0x2d, 0xb6, 0xe3, 0x1a, + 0x9a, 0x66, 0xc2, 0x6e, 0x8c, 0xf4, 0x75, 0x3c, 0x8a, 0x6f, 0xe1, 0x83, 0xf8, 0x2a, 0xd2, 0xdd, + 0x04, 0x0c, 0x08, 0x46, 0x3d, 0x25, 0x33, 0xe1, 0xf7, 0x9b, 0xc9, 0x9f, 0x81, 0x50, 0x12, 0xc9, + 0x04, 0x83, 0x15, 0x16, 0x39, 0x51, 0xa2, 0x03, 0x85, 0x1b, 0xca, 0xf1, 0x89, 0xd4, 0x1a, 0x95, + 0x0e, 0x8a, 0x61, 0x8e, 0x3a, 0x0f, 0x03, 0x5b, 0xf3, 0x4c, 0x51, 0x4e, 0xac, 0x6f, 0x19, 0x5e, + 0x31, 0xbc, 0xc6, 0xf0, 0x92, 0xe9, 0xbd, 0xbb, 0xe0, 0x2d, 0x4c, 0x8f, 0x4d, 0xa0, 0xbd, 0xc2, + 0x22, 0x5e, 0xa2, 0xf6, 0x9d, 0x6e, 0x6b, 0xb0, 0x1f, 0x9e, 0xf2, 0x46, 0x0e, 0x3e, 0x36, 0x94, + 0xa8, 0x68, 0x36, 0x07, 0xc8, 0x14, 0x65, 0xa8, 0xf2, 0x18, 0xb5, 0xef, 0x1a, 0xd7, 0x65, 0x43, + 0x97, 0xdd, 0x85, 0x4f, 0x2d, 0xbf, 0x15, 0x9f, 0x4c, 0xec, 0x06, 0xda, 0x4b, 0x4a, 0xef, 0x63, + 0xa9, 0xfd, 0x96, 0x91, 0x5e, 0xfc, 0x4c, 0x7a, 0x6d, 0x60, 0x51, 0x49, 0x3a, 0x21, 0xfc, 0xaf, + 0xe6, 0xb0, 0x03, 0x68, 0xad, 0x71, 0xeb, 0x3b, 0x5d, 0x67, 0xb0, 0x27, 0x76, 0xaf, 0xec, 0x08, + 0xfe, 0x15, 0x51, 0xf2, 0x88, 0xbe, 0x6b, 0x7a, 0xb6, 0xe8, 0x9c, 0x81, 0x67, 0x35, 0x4d, 0x89, + 0xde, 0xab, 0x03, 0x9e, 0x4d, 0x88, 0x1d, 0x83, 0xf7, 0x10, 0xa5, 0xab, 0x04, 0x4b, 0xaa, 0xac, + 0xfe, 0x14, 0x98, 0x55, 0x7f, 0x19, 0xd8, 0x6f, 0x7e, 0x70, 0xf4, 0xe6, 0xc0, 0xc9, 0x92, 0x36, + 0xcd, 0xa6, 0x8f, 0x0e, 0x85, 0x69, 0xdb, 0x80, 0xb5, 0x7d, 0x4c, 0x9d, 0x5b, 0x51, 0xd2, 0x92, + 0x92, 0x28, 0x95, 0x9c, 0x94, 0x0c, 0x24, 0xa6, 0xe6, 0x30, 0x03, 0xfb, 0x29, 0xca, 0x62, 0xfd, + 0xcd, 0x3d, 0x5f, 0xd5, 0xba, 0xcf, 0xae, 0x2b, 0x16, 0x2f, 0x6e, 0x7f, 0x62, 0xcd, 0x63, 0x2c, + 0x66, 0x66, 0xaf, 0xda, 0x02, 0x7c, 0x3e, 0x9c, 0xed, 0xd0, 0x3b, 0xcf, 0xcc, 0x3a, 0xff, 0x08, + 0x00, 0x00, 0xff, 0xff, 0x4e, 0x67, 0x96, 0xee, 0x3a, 0x03, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/devtools/resultstore/v2/action.pb.go b/vendor/google.golang.org/genproto/googleapis/devtools/resultstore/v2/action.pb.go new file mode 100644 index 0000000..898a381 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/devtools/resultstore/v2/action.pb.go @@ -0,0 +1,1438 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/devtools/resultstore/v2/action.proto + +package resultstore // import "google.golang.org/genproto/googleapis/devtools/resultstore/v2" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import duration "github.com/golang/protobuf/ptypes/duration" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Indicates how/where this Action was executed. +type ExecutionStrategy int32 + +const ( + // The action did not indicate how it was executed. + ExecutionStrategy_EXECUTION_STRATEGY_UNSPECIFIED ExecutionStrategy = 0 + // The action was executed in some other form. + ExecutionStrategy_OTHER_ENVIRONMENT ExecutionStrategy = 1 + // The action used a remote build service. + ExecutionStrategy_REMOTE_SERVICE ExecutionStrategy = 2 + // The action was executed locally, in parallel with other actions. + ExecutionStrategy_LOCAL_PARALLEL ExecutionStrategy = 3 + // The action was executed locally, without parallelism. + ExecutionStrategy_LOCAL_SEQUENTIAL ExecutionStrategy = 4 +) + +var ExecutionStrategy_name = map[int32]string{ + 0: "EXECUTION_STRATEGY_UNSPECIFIED", + 1: "OTHER_ENVIRONMENT", + 2: "REMOTE_SERVICE", + 3: "LOCAL_PARALLEL", + 4: "LOCAL_SEQUENTIAL", +} +var ExecutionStrategy_value = map[string]int32{ + "EXECUTION_STRATEGY_UNSPECIFIED": 0, + "OTHER_ENVIRONMENT": 1, + "REMOTE_SERVICE": 2, + "LOCAL_PARALLEL": 3, + "LOCAL_SEQUENTIAL": 4, +} + +func (x ExecutionStrategy) String() string { + return proto.EnumName(ExecutionStrategy_name, int32(x)) +} +func (ExecutionStrategy) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_action_2068b1c39ff365e5, []int{0} +} + +// Most build systems cache build results to speed up incremental builds. +// Some also cache test results too. This indicates whether the test results +// were found in a cache, and where that cache was located. +type TestCaching int32 + +const ( + // The implicit default enum value. Should never be set. + TestCaching_TEST_CACHING_UNSPECIFIED TestCaching = 0 + // The test result was found in a local cache, so it wasn't run again. + TestCaching_LOCAL_CACHE_HIT TestCaching = 1 + // The test result was found in a remote cache, so it wasn't run again. + TestCaching_REMOTE_CACHE_HIT TestCaching = 2 + // The test result was not found in any cache, so it had to be run again. + TestCaching_CACHE_MISS TestCaching = 3 +) + +var TestCaching_name = map[int32]string{ + 0: "TEST_CACHING_UNSPECIFIED", + 1: "LOCAL_CACHE_HIT", + 2: "REMOTE_CACHE_HIT", + 3: "CACHE_MISS", +} +var TestCaching_value = map[string]int32{ + "TEST_CACHING_UNSPECIFIED": 0, + "LOCAL_CACHE_HIT": 1, + "REMOTE_CACHE_HIT": 2, + "CACHE_MISS": 3, +} + +func (x TestCaching) String() string { + return proto.EnumName(TestCaching_name, int32(x)) +} +func (TestCaching) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_action_2068b1c39ff365e5, []int{1} +} + +// Errors in file post-processing are categorized using this enum. +type FileProcessingErrorType int32 + +const ( + // Type unspecified or not listed here. + FileProcessingErrorType_FILE_PROCESSING_ERROR_TYPE_UNSPECIFIED FileProcessingErrorType = 0 + // A read error occurred trying to read the file. + FileProcessingErrorType_GENERIC_READ_ERROR FileProcessingErrorType = 1 + // There was an error trying to parse the file. + FileProcessingErrorType_GENERIC_PARSE_ERROR FileProcessingErrorType = 2 + // File is exceeds size limit. + FileProcessingErrorType_FILE_TOO_LARGE FileProcessingErrorType = 3 + // The result of parsing the file exceeded size limit. + FileProcessingErrorType_OUTPUT_TOO_LARGE FileProcessingErrorType = 4 + // Read access to the file was denied by file system. + FileProcessingErrorType_ACCESS_DENIED FileProcessingErrorType = 5 + // Deadline exceeded trying to read the file. + FileProcessingErrorType_DEADLINE_EXCEEDED FileProcessingErrorType = 6 + // File not found. + FileProcessingErrorType_NOT_FOUND FileProcessingErrorType = 7 +) + +var FileProcessingErrorType_name = map[int32]string{ + 0: "FILE_PROCESSING_ERROR_TYPE_UNSPECIFIED", + 1: "GENERIC_READ_ERROR", + 2: "GENERIC_PARSE_ERROR", + 3: "FILE_TOO_LARGE", + 4: "OUTPUT_TOO_LARGE", + 5: "ACCESS_DENIED", + 6: "DEADLINE_EXCEEDED", + 7: "NOT_FOUND", +} +var FileProcessingErrorType_value = map[string]int32{ + "FILE_PROCESSING_ERROR_TYPE_UNSPECIFIED": 0, + "GENERIC_READ_ERROR": 1, + "GENERIC_PARSE_ERROR": 2, + "FILE_TOO_LARGE": 3, + "OUTPUT_TOO_LARGE": 4, + "ACCESS_DENIED": 5, + "DEADLINE_EXCEEDED": 6, + "NOT_FOUND": 7, +} + +func (x FileProcessingErrorType) String() string { + return proto.EnumName(FileProcessingErrorType_name, int32(x)) +} +func (FileProcessingErrorType) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_action_2068b1c39ff365e5, []int{2} +} + +// An action that happened as part of a configured target. This action could be +// a build, a test, or another type of action. +// Each parent ConfiguredTarget resource should have at least one Action as its +// child resource before the invocation is finalized. ResultStore is a tool to +// store build & test results. ConfiguredTarget proto by itself does not contain +// enough fields to fully represent such results. For a simple build, at least +// one build action is required to represent the build result. +type Action struct { + // The resource name. Its format must be: + // invocations/${INVOCATION_ID}/targets/${TARGET_ID}/configuredTargets/${CONFIG_ID}/actions/${ACTION_ID} + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // The resource ID components that identify the Action. They must match the + // resource name after proper encoding. + Id *Action_Id `protobuf:"bytes,2,opt,name=id,proto3" json:"id,omitempty"` + // The status of the action. + StatusAttributes *StatusAttributes `protobuf:"bytes,3,opt,name=status_attributes,json=statusAttributes,proto3" json:"status_attributes,omitempty"` + // The timing of the whole action. For TestActions, the start time may be + // before the test actually started, and the duration may last until after the + // test actually finished. + Timing *Timing `protobuf:"bytes,4,opt,name=timing,proto3" json:"timing,omitempty"` + // The type of the action. The type of an action may not change over the + // lifetime of the invocation. If one of these fields is to be set, it must be + // set in the CreateAction method. It may be set to an empty message that is + // populated in later methods or post-processing. A generic "untyped" action + // can be created by not setting any of these fields. An untyped action will + // be untyped for the lifetime of the invocation. + // + // Types that are valid to be assigned to ActionType: + // *Action_BuildAction + // *Action_TestAction + ActionType isAction_ActionType `protobuf_oneof:"action_type"` + // General attributes of the action. + ActionAttributes *ActionAttributes `protobuf:"bytes,5,opt,name=action_attributes,json=actionAttributes,proto3" json:"action_attributes,omitempty"` + // A list of resources that this action depended upon. May be used to provide + // the cause of a build failure in the case of a failed build action. + ActionDependencies []*Dependency `protobuf:"bytes,14,rep,name=action_dependencies,json=actionDependencies,proto3" json:"action_dependencies,omitempty"` + // Arbitrary name-value pairs. + // This is implemented as a multi-map. Multiple properties are allowed with + // the same key. Properties will be returned in lexicographical order by key. + Properties []*Property `protobuf:"bytes,7,rep,name=properties,proto3" json:"properties,omitempty"` + // A list of file references for action level files. + // The file IDs must be unique within this list. Duplicate file IDs will + // result in an error. Files will be returned in lexicographical order by ID. + // Files with the following reserved file IDs cause specific post-processing + // or have special handling: + // + // For build actions: + // stdout: The stdout of the action + // stderr: The stderr of the action + // baseline.lcov: Baseline coverage file to be parsed by the server. This + // uses a stripped down implementation of the LCOV standard. + // http://ltp.sourceforge.net/coverage/lcov/geninfo.1.php + // + // For test actions: + // test.xml: The test suite / test case data in XML format. + // test.log: The combined stdout and stderr of the test process. + // test.lcov: Coverage file to be parsed by the server. This uses a stripped + // down implementation of the LCOV standard. + // http://ltp.sourceforge.net/coverage/lcov/geninfo.1.php + Files []*File `protobuf:"bytes,8,rep,name=files,proto3" json:"files,omitempty"` + // Coverage data was collected while running the build or test action. This + // usually includes line coverage, and may also include branch coverage. + // For test actions, this is usually only for the source files which were + // actually executed by that particular action. + // For build actions, this is the baseline coverage, which captures the + // instrumented files and lines, without any lines being executed. This + // ensures files that are never covered at all are included. + Coverage *ActionCoverage `protobuf:"bytes,11,opt,name=coverage,proto3" json:"coverage,omitempty"` + // ResultStore will read and parse Files with reserved IDs listed above. Read + // and parse errors for all these Files are reported here. + // This is implemented as a map, with one FileProcessingErrors for each file. + // Typically produced when parsing Files, but may also be provided directly + // by clients. + FileProcessingErrors []*FileProcessingErrors `protobuf:"bytes,13,rep,name=file_processing_errors,json=fileProcessingErrors,proto3" json:"file_processing_errors,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Action) Reset() { *m = Action{} } +func (m *Action) String() string { return proto.CompactTextString(m) } +func (*Action) ProtoMessage() {} +func (*Action) Descriptor() ([]byte, []int) { + return fileDescriptor_action_2068b1c39ff365e5, []int{0} +} +func (m *Action) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Action.Unmarshal(m, b) +} +func (m *Action) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Action.Marshal(b, m, deterministic) +} +func (dst *Action) XXX_Merge(src proto.Message) { + xxx_messageInfo_Action.Merge(dst, src) +} +func (m *Action) XXX_Size() int { + return xxx_messageInfo_Action.Size(m) +} +func (m *Action) XXX_DiscardUnknown() { + xxx_messageInfo_Action.DiscardUnknown(m) +} + +var xxx_messageInfo_Action proto.InternalMessageInfo + +func (m *Action) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *Action) GetId() *Action_Id { + if m != nil { + return m.Id + } + return nil +} + +func (m *Action) GetStatusAttributes() *StatusAttributes { + if m != nil { + return m.StatusAttributes + } + return nil +} + +func (m *Action) GetTiming() *Timing { + if m != nil { + return m.Timing + } + return nil +} + +type isAction_ActionType interface { + isAction_ActionType() +} + +type Action_BuildAction struct { + BuildAction *BuildAction `protobuf:"bytes,9,opt,name=build_action,json=buildAction,proto3,oneof"` +} + +type Action_TestAction struct { + TestAction *TestAction `protobuf:"bytes,10,opt,name=test_action,json=testAction,proto3,oneof"` +} + +func (*Action_BuildAction) isAction_ActionType() {} + +func (*Action_TestAction) isAction_ActionType() {} + +func (m *Action) GetActionType() isAction_ActionType { + if m != nil { + return m.ActionType + } + return nil +} + +func (m *Action) GetBuildAction() *BuildAction { + if x, ok := m.GetActionType().(*Action_BuildAction); ok { + return x.BuildAction + } + return nil +} + +func (m *Action) GetTestAction() *TestAction { + if x, ok := m.GetActionType().(*Action_TestAction); ok { + return x.TestAction + } + return nil +} + +func (m *Action) GetActionAttributes() *ActionAttributes { + if m != nil { + return m.ActionAttributes + } + return nil +} + +func (m *Action) GetActionDependencies() []*Dependency { + if m != nil { + return m.ActionDependencies + } + return nil +} + +func (m *Action) GetProperties() []*Property { + if m != nil { + return m.Properties + } + return nil +} + +func (m *Action) GetFiles() []*File { + if m != nil { + return m.Files + } + return nil +} + +func (m *Action) GetCoverage() *ActionCoverage { + if m != nil { + return m.Coverage + } + return nil +} + +func (m *Action) GetFileProcessingErrors() []*FileProcessingErrors { + if m != nil { + return m.FileProcessingErrors + } + return nil +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*Action) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _Action_OneofMarshaler, _Action_OneofUnmarshaler, _Action_OneofSizer, []interface{}{ + (*Action_BuildAction)(nil), + (*Action_TestAction)(nil), + } +} + +func _Action_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*Action) + // action_type + switch x := m.ActionType.(type) { + case *Action_BuildAction: + b.EncodeVarint(9<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.BuildAction); err != nil { + return err + } + case *Action_TestAction: + b.EncodeVarint(10<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.TestAction); err != nil { + return err + } + case nil: + default: + return fmt.Errorf("Action.ActionType has unexpected type %T", x) + } + return nil +} + +func _Action_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*Action) + switch tag { + case 9: // action_type.build_action + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(BuildAction) + err := b.DecodeMessage(msg) + m.ActionType = &Action_BuildAction{msg} + return true, err + case 10: // action_type.test_action + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(TestAction) + err := b.DecodeMessage(msg) + m.ActionType = &Action_TestAction{msg} + return true, err + default: + return false, nil + } +} + +func _Action_OneofSizer(msg proto.Message) (n int) { + m := msg.(*Action) + // action_type + switch x := m.ActionType.(type) { + case *Action_BuildAction: + s := proto.Size(x.BuildAction) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *Action_TestAction: + s := proto.Size(x.TestAction) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +// The resource ID components that identify the Action. +type Action_Id struct { + // The Invocation ID. + InvocationId string `protobuf:"bytes,1,opt,name=invocation_id,json=invocationId,proto3" json:"invocation_id,omitempty"` + // The Target ID. + TargetId string `protobuf:"bytes,2,opt,name=target_id,json=targetId,proto3" json:"target_id,omitempty"` + // The Configuration ID. + ConfigurationId string `protobuf:"bytes,3,opt,name=configuration_id,json=configurationId,proto3" json:"configuration_id,omitempty"` + // The Action ID. + ActionId string `protobuf:"bytes,4,opt,name=action_id,json=actionId,proto3" json:"action_id,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Action_Id) Reset() { *m = Action_Id{} } +func (m *Action_Id) String() string { return proto.CompactTextString(m) } +func (*Action_Id) ProtoMessage() {} +func (*Action_Id) Descriptor() ([]byte, []int) { + return fileDescriptor_action_2068b1c39ff365e5, []int{0, 0} +} +func (m *Action_Id) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Action_Id.Unmarshal(m, b) +} +func (m *Action_Id) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Action_Id.Marshal(b, m, deterministic) +} +func (dst *Action_Id) XXX_Merge(src proto.Message) { + xxx_messageInfo_Action_Id.Merge(dst, src) +} +func (m *Action_Id) XXX_Size() int { + return xxx_messageInfo_Action_Id.Size(m) +} +func (m *Action_Id) XXX_DiscardUnknown() { + xxx_messageInfo_Action_Id.DiscardUnknown(m) +} + +var xxx_messageInfo_Action_Id proto.InternalMessageInfo + +func (m *Action_Id) GetInvocationId() string { + if m != nil { + return m.InvocationId + } + return "" +} + +func (m *Action_Id) GetTargetId() string { + if m != nil { + return m.TargetId + } + return "" +} + +func (m *Action_Id) GetConfigurationId() string { + if m != nil { + return m.ConfigurationId + } + return "" +} + +func (m *Action_Id) GetActionId() string { + if m != nil { + return m.ActionId + } + return "" +} + +// A build action, such as building a java library. +type BuildAction struct { + // The type of the action. This is intended to be a clue as to how the output + // of the action should be parsed. For example "javac" for a Java compile + // action. + Type string `protobuf:"bytes,1,opt,name=type,proto3" json:"type,omitempty"` + // The "primary" input artifact processed by this action. E.g., the .cc file + // of a C++ compile action. Empty string ("") if the action has no input + // artifacts or no "primary" input artifact. + PrimaryInputPath string `protobuf:"bytes,2,opt,name=primary_input_path,json=primaryInputPath,proto3" json:"primary_input_path,omitempty"` + // The "primary" output artifact processed by this action. E.g., the .o file + // of a C++ compile action. Empty string ("") if the action has no output + // artifacts or no "primary" output artifact. + PrimaryOutputPath string `protobuf:"bytes,3,opt,name=primary_output_path,json=primaryOutputPath,proto3" json:"primary_output_path,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *BuildAction) Reset() { *m = BuildAction{} } +func (m *BuildAction) String() string { return proto.CompactTextString(m) } +func (*BuildAction) ProtoMessage() {} +func (*BuildAction) Descriptor() ([]byte, []int) { + return fileDescriptor_action_2068b1c39ff365e5, []int{1} +} +func (m *BuildAction) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_BuildAction.Unmarshal(m, b) +} +func (m *BuildAction) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_BuildAction.Marshal(b, m, deterministic) +} +func (dst *BuildAction) XXX_Merge(src proto.Message) { + xxx_messageInfo_BuildAction.Merge(dst, src) +} +func (m *BuildAction) XXX_Size() int { + return xxx_messageInfo_BuildAction.Size(m) +} +func (m *BuildAction) XXX_DiscardUnknown() { + xxx_messageInfo_BuildAction.DiscardUnknown(m) +} + +var xxx_messageInfo_BuildAction proto.InternalMessageInfo + +func (m *BuildAction) GetType() string { + if m != nil { + return m.Type + } + return "" +} + +func (m *BuildAction) GetPrimaryInputPath() string { + if m != nil { + return m.PrimaryInputPath + } + return "" +} + +func (m *BuildAction) GetPrimaryOutputPath() string { + if m != nil { + return m.PrimaryOutputPath + } + return "" +} + +// A test action, such as running a JUnit4 test binary. +type TestAction struct { + // Timing data for execution of the test action. + TestTiming *TestTiming `protobuf:"bytes,1,opt,name=test_timing,json=testTiming,proto3" json:"test_timing,omitempty"` + // If the test is divided up into shards to improve performance, set this to + // indicate which shard this test action is for. Value must be in interval + // [0, total_shard_count). Defaults to 0, which is appropriate if all test + // cases are run in the same process. + ShardNumber int32 `protobuf:"varint,2,opt,name=shard_number,json=shardNumber,proto3" json:"shard_number,omitempty"` + // If the user requested that every test be run multiple times, as is often + // done to measure flakiness, set this to indicate which run this test action + // is for. Value must be in interval [0, total_run_count). Defaults to 0, + // which is appropriate if multiple runs were not requested. + RunNumber int32 `protobuf:"varint,3,opt,name=run_number,json=runNumber,proto3" json:"run_number,omitempty"` + // If flaky tests are automatically retried, set this to indicate which + // attempt this test action is for. (e.g. 0 for the first attempt, 1 for + // second, and so on). Defaults to 0, which is appropriate if this is only + // attempt. + AttemptNumber int32 `protobuf:"varint,4,opt,name=attempt_number,json=attemptNumber,proto3" json:"attempt_number,omitempty"` + // A tree of test suites and test cases that were run by this test action. + // Each test case has its own status information, including stack traces. + // Typically produced by parsing an XML Log, but may also be provided directly + // by clients. + TestSuite *TestSuite `protobuf:"bytes,5,opt,name=test_suite,json=testSuite,proto3" json:"test_suite,omitempty"` + // Warnings for this test action. + Warnings []*TestWarning `protobuf:"bytes,8,rep,name=warnings,proto3" json:"warnings,omitempty"` + // Estimated memory consumption of the test action, in bytes. A default value + // of 0 means there is no memory consumption estimate specified. + EstimatedMemoryBytes int64 `protobuf:"varint,10,opt,name=estimated_memory_bytes,json=estimatedMemoryBytes,proto3" json:"estimated_memory_bytes,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *TestAction) Reset() { *m = TestAction{} } +func (m *TestAction) String() string { return proto.CompactTextString(m) } +func (*TestAction) ProtoMessage() {} +func (*TestAction) Descriptor() ([]byte, []int) { + return fileDescriptor_action_2068b1c39ff365e5, []int{2} +} +func (m *TestAction) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_TestAction.Unmarshal(m, b) +} +func (m *TestAction) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_TestAction.Marshal(b, m, deterministic) +} +func (dst *TestAction) XXX_Merge(src proto.Message) { + xxx_messageInfo_TestAction.Merge(dst, src) +} +func (m *TestAction) XXX_Size() int { + return xxx_messageInfo_TestAction.Size(m) +} +func (m *TestAction) XXX_DiscardUnknown() { + xxx_messageInfo_TestAction.DiscardUnknown(m) +} + +var xxx_messageInfo_TestAction proto.InternalMessageInfo + +func (m *TestAction) GetTestTiming() *TestTiming { + if m != nil { + return m.TestTiming + } + return nil +} + +func (m *TestAction) GetShardNumber() int32 { + if m != nil { + return m.ShardNumber + } + return 0 +} + +func (m *TestAction) GetRunNumber() int32 { + if m != nil { + return m.RunNumber + } + return 0 +} + +func (m *TestAction) GetAttemptNumber() int32 { + if m != nil { + return m.AttemptNumber + } + return 0 +} + +func (m *TestAction) GetTestSuite() *TestSuite { + if m != nil { + return m.TestSuite + } + return nil +} + +func (m *TestAction) GetWarnings() []*TestWarning { + if m != nil { + return m.Warnings + } + return nil +} + +func (m *TestAction) GetEstimatedMemoryBytes() int64 { + if m != nil { + return m.EstimatedMemoryBytes + } + return 0 +} + +// General attributes of an action +type ActionAttributes struct { + // Strategy used for executing the action. + ExecutionStrategy ExecutionStrategy `protobuf:"varint,1,opt,name=execution_strategy,json=executionStrategy,proto3,enum=google.devtools.resultstore.v2.ExecutionStrategy" json:"execution_strategy,omitempty"` + // Exit code of the process that ran the action. A non-zero value means + // failure. + ExitCode int32 `protobuf:"varint,2,opt,name=exit_code,json=exitCode,proto3" json:"exit_code,omitempty"` + // Where the action was run. + Hostname string `protobuf:"bytes,3,opt,name=hostname,proto3" json:"hostname,omitempty"` + // Information about the input files used in all actions under this configured + // target. + InputFileInfo *InputFileInfo `protobuf:"bytes,4,opt,name=input_file_info,json=inputFileInfo,proto3" json:"input_file_info,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ActionAttributes) Reset() { *m = ActionAttributes{} } +func (m *ActionAttributes) String() string { return proto.CompactTextString(m) } +func (*ActionAttributes) ProtoMessage() {} +func (*ActionAttributes) Descriptor() ([]byte, []int) { + return fileDescriptor_action_2068b1c39ff365e5, []int{3} +} +func (m *ActionAttributes) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ActionAttributes.Unmarshal(m, b) +} +func (m *ActionAttributes) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ActionAttributes.Marshal(b, m, deterministic) +} +func (dst *ActionAttributes) XXX_Merge(src proto.Message) { + xxx_messageInfo_ActionAttributes.Merge(dst, src) +} +func (m *ActionAttributes) XXX_Size() int { + return xxx_messageInfo_ActionAttributes.Size(m) +} +func (m *ActionAttributes) XXX_DiscardUnknown() { + xxx_messageInfo_ActionAttributes.DiscardUnknown(m) +} + +var xxx_messageInfo_ActionAttributes proto.InternalMessageInfo + +func (m *ActionAttributes) GetExecutionStrategy() ExecutionStrategy { + if m != nil { + return m.ExecutionStrategy + } + return ExecutionStrategy_EXECUTION_STRATEGY_UNSPECIFIED +} + +func (m *ActionAttributes) GetExitCode() int32 { + if m != nil { + return m.ExitCode + } + return 0 +} + +func (m *ActionAttributes) GetHostname() string { + if m != nil { + return m.Hostname + } + return "" +} + +func (m *ActionAttributes) GetInputFileInfo() *InputFileInfo { + if m != nil { + return m.InputFileInfo + } + return nil +} + +// File count and size information for the input files to a configured target. +type InputFileInfo struct { + // The number of input files (counting every file, even if a duplicate). + Count int64 `protobuf:"varint,1,opt,name=count,proto3" json:"count,omitempty"` + // The number of distinct input files. + DistinctCount int64 `protobuf:"varint,2,opt,name=distinct_count,json=distinctCount,proto3" json:"distinct_count,omitempty"` + // The max number of input files allowed by the build system (counting every + // file, even if a duplicate). + CountLimit int64 `protobuf:"varint,3,opt,name=count_limit,json=countLimit,proto3" json:"count_limit,omitempty"` + // The total size of the distinct input files. + DistinctBytes int64 `protobuf:"varint,4,opt,name=distinct_bytes,json=distinctBytes,proto3" json:"distinct_bytes,omitempty"` + // The max allowed total size of the distinct input files. + DistinctByteLimit int64 `protobuf:"varint,5,opt,name=distinct_byte_limit,json=distinctByteLimit,proto3" json:"distinct_byte_limit,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *InputFileInfo) Reset() { *m = InputFileInfo{} } +func (m *InputFileInfo) String() string { return proto.CompactTextString(m) } +func (*InputFileInfo) ProtoMessage() {} +func (*InputFileInfo) Descriptor() ([]byte, []int) { + return fileDescriptor_action_2068b1c39ff365e5, []int{4} +} +func (m *InputFileInfo) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_InputFileInfo.Unmarshal(m, b) +} +func (m *InputFileInfo) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_InputFileInfo.Marshal(b, m, deterministic) +} +func (dst *InputFileInfo) XXX_Merge(src proto.Message) { + xxx_messageInfo_InputFileInfo.Merge(dst, src) +} +func (m *InputFileInfo) XXX_Size() int { + return xxx_messageInfo_InputFileInfo.Size(m) +} +func (m *InputFileInfo) XXX_DiscardUnknown() { + xxx_messageInfo_InputFileInfo.DiscardUnknown(m) +} + +var xxx_messageInfo_InputFileInfo proto.InternalMessageInfo + +func (m *InputFileInfo) GetCount() int64 { + if m != nil { + return m.Count + } + return 0 +} + +func (m *InputFileInfo) GetDistinctCount() int64 { + if m != nil { + return m.DistinctCount + } + return 0 +} + +func (m *InputFileInfo) GetCountLimit() int64 { + if m != nil { + return m.CountLimit + } + return 0 +} + +func (m *InputFileInfo) GetDistinctBytes() int64 { + if m != nil { + return m.DistinctBytes + } + return 0 +} + +func (m *InputFileInfo) GetDistinctByteLimit() int64 { + if m != nil { + return m.DistinctByteLimit + } + return 0 +} + +// Timing data for tests executed locally on the machine running the build. +type LocalTestTiming struct { + // Time taken by the test process, typically surrounded by a small wrapper + // script. + TestProcessDuration *duration.Duration `protobuf:"bytes,1,opt,name=test_process_duration,json=testProcessDuration,proto3" json:"test_process_duration,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *LocalTestTiming) Reset() { *m = LocalTestTiming{} } +func (m *LocalTestTiming) String() string { return proto.CompactTextString(m) } +func (*LocalTestTiming) ProtoMessage() {} +func (*LocalTestTiming) Descriptor() ([]byte, []int) { + return fileDescriptor_action_2068b1c39ff365e5, []int{5} +} +func (m *LocalTestTiming) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_LocalTestTiming.Unmarshal(m, b) +} +func (m *LocalTestTiming) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_LocalTestTiming.Marshal(b, m, deterministic) +} +func (dst *LocalTestTiming) XXX_Merge(src proto.Message) { + xxx_messageInfo_LocalTestTiming.Merge(dst, src) +} +func (m *LocalTestTiming) XXX_Size() int { + return xxx_messageInfo_LocalTestTiming.Size(m) +} +func (m *LocalTestTiming) XXX_DiscardUnknown() { + xxx_messageInfo_LocalTestTiming.DiscardUnknown(m) +} + +var xxx_messageInfo_LocalTestTiming proto.InternalMessageInfo + +func (m *LocalTestTiming) GetTestProcessDuration() *duration.Duration { + if m != nil { + return m.TestProcessDuration + } + return nil +} + +// Timing data for one attempt to execute a test action remotely. +type RemoteTestAttemptTiming struct { + // Idle period before the test process is invoked on the remote machine. + QueueDuration *duration.Duration `protobuf:"bytes,1,opt,name=queue_duration,json=queueDuration,proto3" json:"queue_duration,omitempty"` + // Time to upload data dependencies from the local machine to the remote + // machine running the test, or to the distributed cache. + UploadDuration *duration.Duration `protobuf:"bytes,2,opt,name=upload_duration,json=uploadDuration,proto3" json:"upload_duration,omitempty"` + // Time to set up the remote machine. + // Not to be confused with setup time in + // xUnit test frameworks, which falls within the test_process_time. + MachineSetupDuration *duration.Duration `protobuf:"bytes,3,opt,name=machine_setup_duration,json=machineSetupDuration,proto3" json:"machine_setup_duration,omitempty"` + // Time taken by the test process, typically surrounded by a small wrapper + // script. + // For Java tests, this includes JVM setup, flag parsing, class path setup, + // parsing files to setup the suite, and finally running your test methods. + // In many cases, only a small fraction of the test process time is spent + // running the test methods. + TestProcessDuration *duration.Duration `protobuf:"bytes,4,opt,name=test_process_duration,json=testProcessDuration,proto3" json:"test_process_duration,omitempty"` + // Time spent retrieving test logs and any other test outputs, back to the + // local machine. + DownloadDuration *duration.Duration `protobuf:"bytes,5,opt,name=download_duration,json=downloadDuration,proto3" json:"download_duration,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *RemoteTestAttemptTiming) Reset() { *m = RemoteTestAttemptTiming{} } +func (m *RemoteTestAttemptTiming) String() string { return proto.CompactTextString(m) } +func (*RemoteTestAttemptTiming) ProtoMessage() {} +func (*RemoteTestAttemptTiming) Descriptor() ([]byte, []int) { + return fileDescriptor_action_2068b1c39ff365e5, []int{6} +} +func (m *RemoteTestAttemptTiming) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_RemoteTestAttemptTiming.Unmarshal(m, b) +} +func (m *RemoteTestAttemptTiming) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_RemoteTestAttemptTiming.Marshal(b, m, deterministic) +} +func (dst *RemoteTestAttemptTiming) XXX_Merge(src proto.Message) { + xxx_messageInfo_RemoteTestAttemptTiming.Merge(dst, src) +} +func (m *RemoteTestAttemptTiming) XXX_Size() int { + return xxx_messageInfo_RemoteTestAttemptTiming.Size(m) +} +func (m *RemoteTestAttemptTiming) XXX_DiscardUnknown() { + xxx_messageInfo_RemoteTestAttemptTiming.DiscardUnknown(m) +} + +var xxx_messageInfo_RemoteTestAttemptTiming proto.InternalMessageInfo + +func (m *RemoteTestAttemptTiming) GetQueueDuration() *duration.Duration { + if m != nil { + return m.QueueDuration + } + return nil +} + +func (m *RemoteTestAttemptTiming) GetUploadDuration() *duration.Duration { + if m != nil { + return m.UploadDuration + } + return nil +} + +func (m *RemoteTestAttemptTiming) GetMachineSetupDuration() *duration.Duration { + if m != nil { + return m.MachineSetupDuration + } + return nil +} + +func (m *RemoteTestAttemptTiming) GetTestProcessDuration() *duration.Duration { + if m != nil { + return m.TestProcessDuration + } + return nil +} + +func (m *RemoteTestAttemptTiming) GetDownloadDuration() *duration.Duration { + if m != nil { + return m.DownloadDuration + } + return nil +} + +// Timing data for the part of the test execution that is done remotely. +type RemoteTestTiming struct { + // Time taken locally to determine what to do. + LocalAnalysisDuration *duration.Duration `protobuf:"bytes,1,opt,name=local_analysis_duration,json=localAnalysisDuration,proto3" json:"local_analysis_duration,omitempty"` + // Normally there is only one attempt, but the system may retry on internal + // errors, leading to multiple attempts. + Attempts []*RemoteTestAttemptTiming `protobuf:"bytes,2,rep,name=attempts,proto3" json:"attempts,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *RemoteTestTiming) Reset() { *m = RemoteTestTiming{} } +func (m *RemoteTestTiming) String() string { return proto.CompactTextString(m) } +func (*RemoteTestTiming) ProtoMessage() {} +func (*RemoteTestTiming) Descriptor() ([]byte, []int) { + return fileDescriptor_action_2068b1c39ff365e5, []int{7} +} +func (m *RemoteTestTiming) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_RemoteTestTiming.Unmarshal(m, b) +} +func (m *RemoteTestTiming) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_RemoteTestTiming.Marshal(b, m, deterministic) +} +func (dst *RemoteTestTiming) XXX_Merge(src proto.Message) { + xxx_messageInfo_RemoteTestTiming.Merge(dst, src) +} +func (m *RemoteTestTiming) XXX_Size() int { + return xxx_messageInfo_RemoteTestTiming.Size(m) +} +func (m *RemoteTestTiming) XXX_DiscardUnknown() { + xxx_messageInfo_RemoteTestTiming.DiscardUnknown(m) +} + +var xxx_messageInfo_RemoteTestTiming proto.InternalMessageInfo + +func (m *RemoteTestTiming) GetLocalAnalysisDuration() *duration.Duration { + if m != nil { + return m.LocalAnalysisDuration + } + return nil +} + +func (m *RemoteTestTiming) GetAttempts() []*RemoteTestAttemptTiming { + if m != nil { + return m.Attempts + } + return nil +} + +// Timing data for execution of a test action. The action may be performed +// locally, on the machine running the build, or remotely. +type TestTiming struct { + // Test timing for either a local or remote execution. + // + // Types that are valid to be assigned to Location: + // *TestTiming_Local + // *TestTiming_Remote + Location isTestTiming_Location `protobuf_oneof:"location"` + // The amount of CPU time spent by the test process executing system calls + // within the kernel, as opposed to library code. Time the test process spent + // blocked does not count towards this figure. + SystemTimeDuration *duration.Duration `protobuf:"bytes,3,opt,name=system_time_duration,json=systemTimeDuration,proto3" json:"system_time_duration,omitempty"` + // The amount of CPU time spent by the test process executing user-mode code + // outside the kernel, as opposed to library code. Time the test process + // spent blocked does not count towards this figure. You can add user_time to + // system_time to get total CPU time taken by the test process. + UserTimeDuration *duration.Duration `protobuf:"bytes,4,opt,name=user_time_duration,json=userTimeDuration,proto3" json:"user_time_duration,omitempty"` + // Most build systems cache build results to speed up incremental builds. + // Some also cache test results too. This indicates whether the test results + // were found in a cache, and where that cache was located. + TestCaching TestCaching `protobuf:"varint,5,opt,name=test_caching,json=testCaching,proto3,enum=google.devtools.resultstore.v2.TestCaching" json:"test_caching,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *TestTiming) Reset() { *m = TestTiming{} } +func (m *TestTiming) String() string { return proto.CompactTextString(m) } +func (*TestTiming) ProtoMessage() {} +func (*TestTiming) Descriptor() ([]byte, []int) { + return fileDescriptor_action_2068b1c39ff365e5, []int{8} +} +func (m *TestTiming) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_TestTiming.Unmarshal(m, b) +} +func (m *TestTiming) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_TestTiming.Marshal(b, m, deterministic) +} +func (dst *TestTiming) XXX_Merge(src proto.Message) { + xxx_messageInfo_TestTiming.Merge(dst, src) +} +func (m *TestTiming) XXX_Size() int { + return xxx_messageInfo_TestTiming.Size(m) +} +func (m *TestTiming) XXX_DiscardUnknown() { + xxx_messageInfo_TestTiming.DiscardUnknown(m) +} + +var xxx_messageInfo_TestTiming proto.InternalMessageInfo + +type isTestTiming_Location interface { + isTestTiming_Location() +} + +type TestTiming_Local struct { + Local *LocalTestTiming `protobuf:"bytes,1,opt,name=local,proto3,oneof"` +} + +type TestTiming_Remote struct { + Remote *RemoteTestTiming `protobuf:"bytes,2,opt,name=remote,proto3,oneof"` +} + +func (*TestTiming_Local) isTestTiming_Location() {} + +func (*TestTiming_Remote) isTestTiming_Location() {} + +func (m *TestTiming) GetLocation() isTestTiming_Location { + if m != nil { + return m.Location + } + return nil +} + +func (m *TestTiming) GetLocal() *LocalTestTiming { + if x, ok := m.GetLocation().(*TestTiming_Local); ok { + return x.Local + } + return nil +} + +func (m *TestTiming) GetRemote() *RemoteTestTiming { + if x, ok := m.GetLocation().(*TestTiming_Remote); ok { + return x.Remote + } + return nil +} + +func (m *TestTiming) GetSystemTimeDuration() *duration.Duration { + if m != nil { + return m.SystemTimeDuration + } + return nil +} + +func (m *TestTiming) GetUserTimeDuration() *duration.Duration { + if m != nil { + return m.UserTimeDuration + } + return nil +} + +func (m *TestTiming) GetTestCaching() TestCaching { + if m != nil { + return m.TestCaching + } + return TestCaching_TEST_CACHING_UNSPECIFIED +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*TestTiming) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _TestTiming_OneofMarshaler, _TestTiming_OneofUnmarshaler, _TestTiming_OneofSizer, []interface{}{ + (*TestTiming_Local)(nil), + (*TestTiming_Remote)(nil), + } +} + +func _TestTiming_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*TestTiming) + // location + switch x := m.Location.(type) { + case *TestTiming_Local: + b.EncodeVarint(1<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.Local); err != nil { + return err + } + case *TestTiming_Remote: + b.EncodeVarint(2<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.Remote); err != nil { + return err + } + case nil: + default: + return fmt.Errorf("TestTiming.Location has unexpected type %T", x) + } + return nil +} + +func _TestTiming_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*TestTiming) + switch tag { + case 1: // location.local + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(LocalTestTiming) + err := b.DecodeMessage(msg) + m.Location = &TestTiming_Local{msg} + return true, err + case 2: // location.remote + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(RemoteTestTiming) + err := b.DecodeMessage(msg) + m.Location = &TestTiming_Remote{msg} + return true, err + default: + return false, nil + } +} + +func _TestTiming_OneofSizer(msg proto.Message) (n int) { + m := msg.(*TestTiming) + // location + switch x := m.Location.(type) { + case *TestTiming_Local: + s := proto.Size(x.Local) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *TestTiming_Remote: + s := proto.Size(x.Remote) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +// A warning from a test execution. +type TestWarning struct { + // Contains the message detailing the warning. + WarningMessage string `protobuf:"bytes,1,opt,name=warning_message,json=warningMessage,proto3" json:"warning_message,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *TestWarning) Reset() { *m = TestWarning{} } +func (m *TestWarning) String() string { return proto.CompactTextString(m) } +func (*TestWarning) ProtoMessage() {} +func (*TestWarning) Descriptor() ([]byte, []int) { + return fileDescriptor_action_2068b1c39ff365e5, []int{9} +} +func (m *TestWarning) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_TestWarning.Unmarshal(m, b) +} +func (m *TestWarning) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_TestWarning.Marshal(b, m, deterministic) +} +func (dst *TestWarning) XXX_Merge(src proto.Message) { + xxx_messageInfo_TestWarning.Merge(dst, src) +} +func (m *TestWarning) XXX_Size() int { + return xxx_messageInfo_TestWarning.Size(m) +} +func (m *TestWarning) XXX_DiscardUnknown() { + xxx_messageInfo_TestWarning.DiscardUnknown(m) +} + +var xxx_messageInfo_TestWarning proto.InternalMessageInfo + +func (m *TestWarning) GetWarningMessage() string { + if m != nil { + return m.WarningMessage + } + return "" +} + +// Stores errors reading or parsing a file during post-processing. +type FileProcessingErrors struct { + // The uid of the File being read or parsed. + FileUid string `protobuf:"bytes,1,opt,name=file_uid,json=fileUid,proto3" json:"file_uid,omitempty"` + // What went wrong. + FileProcessingErrors []*FileProcessingError `protobuf:"bytes,3,rep,name=file_processing_errors,json=fileProcessingErrors,proto3" json:"file_processing_errors,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *FileProcessingErrors) Reset() { *m = FileProcessingErrors{} } +func (m *FileProcessingErrors) String() string { return proto.CompactTextString(m) } +func (*FileProcessingErrors) ProtoMessage() {} +func (*FileProcessingErrors) Descriptor() ([]byte, []int) { + return fileDescriptor_action_2068b1c39ff365e5, []int{10} +} +func (m *FileProcessingErrors) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_FileProcessingErrors.Unmarshal(m, b) +} +func (m *FileProcessingErrors) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_FileProcessingErrors.Marshal(b, m, deterministic) +} +func (dst *FileProcessingErrors) XXX_Merge(src proto.Message) { + xxx_messageInfo_FileProcessingErrors.Merge(dst, src) +} +func (m *FileProcessingErrors) XXX_Size() int { + return xxx_messageInfo_FileProcessingErrors.Size(m) +} +func (m *FileProcessingErrors) XXX_DiscardUnknown() { + xxx_messageInfo_FileProcessingErrors.DiscardUnknown(m) +} + +var xxx_messageInfo_FileProcessingErrors proto.InternalMessageInfo + +func (m *FileProcessingErrors) GetFileUid() string { + if m != nil { + return m.FileUid + } + return "" +} + +func (m *FileProcessingErrors) GetFileProcessingErrors() []*FileProcessingError { + if m != nil { + return m.FileProcessingErrors + } + return nil +} + +// Stores an error reading or parsing a file during post-processing. +type FileProcessingError struct { + // The type of error that occurred. + Type FileProcessingErrorType `protobuf:"varint,1,opt,name=type,proto3,enum=google.devtools.resultstore.v2.FileProcessingErrorType" json:"type,omitempty"` + // Error message describing the problem. + Message string `protobuf:"bytes,2,opt,name=message,proto3" json:"message,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *FileProcessingError) Reset() { *m = FileProcessingError{} } +func (m *FileProcessingError) String() string { return proto.CompactTextString(m) } +func (*FileProcessingError) ProtoMessage() {} +func (*FileProcessingError) Descriptor() ([]byte, []int) { + return fileDescriptor_action_2068b1c39ff365e5, []int{11} +} +func (m *FileProcessingError) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_FileProcessingError.Unmarshal(m, b) +} +func (m *FileProcessingError) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_FileProcessingError.Marshal(b, m, deterministic) +} +func (dst *FileProcessingError) XXX_Merge(src proto.Message) { + xxx_messageInfo_FileProcessingError.Merge(dst, src) +} +func (m *FileProcessingError) XXX_Size() int { + return xxx_messageInfo_FileProcessingError.Size(m) +} +func (m *FileProcessingError) XXX_DiscardUnknown() { + xxx_messageInfo_FileProcessingError.DiscardUnknown(m) +} + +var xxx_messageInfo_FileProcessingError proto.InternalMessageInfo + +func (m *FileProcessingError) GetType() FileProcessingErrorType { + if m != nil { + return m.Type + } + return FileProcessingErrorType_FILE_PROCESSING_ERROR_TYPE_UNSPECIFIED +} + +func (m *FileProcessingError) GetMessage() string { + if m != nil { + return m.Message + } + return "" +} + +func init() { + proto.RegisterType((*Action)(nil), "google.devtools.resultstore.v2.Action") + proto.RegisterType((*Action_Id)(nil), "google.devtools.resultstore.v2.Action.Id") + proto.RegisterType((*BuildAction)(nil), "google.devtools.resultstore.v2.BuildAction") + proto.RegisterType((*TestAction)(nil), "google.devtools.resultstore.v2.TestAction") + proto.RegisterType((*ActionAttributes)(nil), "google.devtools.resultstore.v2.ActionAttributes") + proto.RegisterType((*InputFileInfo)(nil), "google.devtools.resultstore.v2.InputFileInfo") + proto.RegisterType((*LocalTestTiming)(nil), "google.devtools.resultstore.v2.LocalTestTiming") + proto.RegisterType((*RemoteTestAttemptTiming)(nil), "google.devtools.resultstore.v2.RemoteTestAttemptTiming") + proto.RegisterType((*RemoteTestTiming)(nil), "google.devtools.resultstore.v2.RemoteTestTiming") + proto.RegisterType((*TestTiming)(nil), "google.devtools.resultstore.v2.TestTiming") + proto.RegisterType((*TestWarning)(nil), "google.devtools.resultstore.v2.TestWarning") + proto.RegisterType((*FileProcessingErrors)(nil), "google.devtools.resultstore.v2.FileProcessingErrors") + proto.RegisterType((*FileProcessingError)(nil), "google.devtools.resultstore.v2.FileProcessingError") + proto.RegisterEnum("google.devtools.resultstore.v2.ExecutionStrategy", ExecutionStrategy_name, ExecutionStrategy_value) + proto.RegisterEnum("google.devtools.resultstore.v2.TestCaching", TestCaching_name, TestCaching_value) + proto.RegisterEnum("google.devtools.resultstore.v2.FileProcessingErrorType", FileProcessingErrorType_name, FileProcessingErrorType_value) +} + +func init() { + proto.RegisterFile("google/devtools/resultstore/v2/action.proto", fileDescriptor_action_2068b1c39ff365e5) +} + +var fileDescriptor_action_2068b1c39ff365e5 = []byte{ + // 1620 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x9c, 0x58, 0x5f, 0x73, 0x23, 0x47, + 0x11, 0x3f, 0x49, 0xb6, 0x4f, 0x6e, 0x9d, 0xe4, 0xf5, 0xd8, 0x77, 0xa7, 0x1c, 0x10, 0x0e, 0x01, + 0xe1, 0xce, 0x47, 0xa4, 0xe0, 0xa4, 0xa0, 0x80, 0x2a, 0x0a, 0x59, 0x1a, 0xdb, 0x9b, 0x93, 0x25, + 0x65, 0xb4, 0x0a, 0x09, 0x14, 0x35, 0x59, 0x6b, 0xc7, 0xf2, 0x50, 0xda, 0x5d, 0x65, 0x77, 0xd6, + 0x89, 0xaa, 0xa8, 0xe2, 0x85, 0x2a, 0x1e, 0xa0, 0x78, 0xe1, 0x95, 0x2f, 0xc1, 0x2b, 0xcf, 0x7c, + 0x09, 0xbe, 0x0d, 0x35, 0x7f, 0x76, 0x25, 0x0b, 0x3b, 0xab, 0xbb, 0x37, 0x4d, 0xf7, 0xef, 0xf7, + 0x9b, 0xde, 0x9e, 0xee, 0x99, 0x2e, 0xc1, 0xab, 0x69, 0x18, 0x4e, 0x67, 0xac, 0xe5, 0xb1, 0x1b, + 0x11, 0x86, 0xb3, 0xb8, 0x15, 0xb1, 0x38, 0x99, 0x89, 0x58, 0x84, 0x11, 0x6b, 0xdd, 0x1c, 0xb7, + 0xdc, 0x89, 0xe0, 0x61, 0xd0, 0x9c, 0x47, 0xa1, 0x08, 0xd1, 0xbb, 0x1a, 0xdc, 0x4c, 0xc1, 0xcd, + 0x15, 0x70, 0xf3, 0xe6, 0xf8, 0x59, 0x9e, 0xd8, 0x24, 0xf4, 0xfd, 0x54, 0xec, 0xd9, 0xfb, 0xb9, + 0xe0, 0x1b, 0x16, 0xb9, 0x53, 0x66, 0xe0, 0x2f, 0x73, 0xe0, 0x57, 0x7c, 0x96, 0x42, 0x5b, 0x39, + 0x50, 0xc1, 0x62, 0x41, 0xe3, 0x84, 0x8b, 0x94, 0x60, 0xbe, 0xab, 0xa5, 0x56, 0x97, 0xc9, 0x55, + 0xcb, 0x4b, 0x22, 0x77, 0xf9, 0xdd, 0x8d, 0x7f, 0x95, 0x61, 0xa7, 0xad, 0x12, 0x81, 0x10, 0x6c, + 0x05, 0xae, 0xcf, 0xea, 0x85, 0xe7, 0x85, 0x17, 0xbb, 0x44, 0xfd, 0x46, 0x3f, 0x87, 0x22, 0xf7, + 0xea, 0xc5, 0xe7, 0x85, 0x17, 0x95, 0xe3, 0x97, 0xcd, 0x6f, 0xce, 0x51, 0x53, 0xeb, 0x34, 0x6d, + 0x8f, 0x14, 0xb9, 0x87, 0x7e, 0x0f, 0xfb, 0xb1, 0x70, 0x45, 0x12, 0x53, 0x57, 0x88, 0x88, 0x5f, + 0x26, 0x82, 0xc5, 0xf5, 0x92, 0x52, 0xfa, 0x20, 0x4f, 0x69, 0xa4, 0x88, 0xed, 0x8c, 0x47, 0xac, + 0x78, 0xcd, 0x82, 0x7e, 0x05, 0x3b, 0x82, 0xfb, 0x3c, 0x98, 0xd6, 0xb7, 0x94, 0xe6, 0x7b, 0x79, + 0x9a, 0x8e, 0x42, 0x13, 0xc3, 0x42, 0x43, 0x78, 0x74, 0x99, 0xf0, 0x99, 0x47, 0x75, 0x19, 0xd4, + 0x77, 0x95, 0xca, 0xab, 0x3c, 0x95, 0x13, 0xc9, 0xd1, 0x1f, 0x7a, 0xfe, 0x80, 0x54, 0x2e, 0x97, + 0x4b, 0x74, 0x01, 0x15, 0x95, 0x7e, 0x23, 0x08, 0x4a, 0xf0, 0x28, 0x37, 0x2c, 0x16, 0x8b, 0x4c, + 0x0f, 0x44, 0xb6, 0x92, 0xf9, 0xd3, 0x4a, 0xab, 0xf9, 0xdb, 0xde, 0x2c, 0x7f, 0x5a, 0x62, 0x35, + 0x7f, 0xee, 0x9a, 0x05, 0xfd, 0x0e, 0x0e, 0x8c, 0xbc, 0xc7, 0xe6, 0x2c, 0xf0, 0x58, 0x30, 0xe1, + 0x2c, 0xae, 0xd7, 0x9e, 0x97, 0x36, 0x89, 0xba, 0x9b, 0x72, 0x16, 0x04, 0x69, 0x99, 0xee, 0x8a, + 0x0a, 0x3a, 0x07, 0x98, 0x47, 0xe1, 0x9c, 0x45, 0x42, 0x6a, 0x3e, 0x54, 0x9a, 0x2f, 0xf2, 0x34, + 0x87, 0x9a, 0xb1, 0x20, 0x2b, 0x5c, 0xf4, 0x0b, 0xd8, 0x96, 0xe5, 0x1f, 0xd7, 0xcb, 0x4a, 0xe4, + 0x07, 0x79, 0x22, 0xa7, 0x7c, 0xc6, 0x88, 0xa6, 0xa0, 0x8f, 0xa1, 0x9c, 0x76, 0x5a, 0xbd, 0xa2, + 0x12, 0xd7, 0xdc, 0x2c, 0x71, 0x1d, 0xc3, 0x22, 0x19, 0x1f, 0xfd, 0x01, 0x9e, 0x48, 0x51, 0x3a, + 0x8f, 0xc2, 0x09, 0x8b, 0x63, 0x1e, 0x4c, 0x29, 0x8b, 0xa2, 0x30, 0x8a, 0xeb, 0x55, 0x15, 0xd8, + 0x47, 0x9b, 0x04, 0x36, 0xcc, 0xc8, 0x58, 0x71, 0xc9, 0xe1, 0xd5, 0x1d, 0xd6, 0x67, 0x7f, 0x2f, + 0x40, 0xd1, 0xf6, 0xd0, 0xf7, 0xa1, 0xca, 0x83, 0x9b, 0x70, 0xa2, 0xda, 0x95, 0x72, 0xcf, 0x34, + 0xe6, 0xa3, 0xa5, 0xd1, 0xf6, 0xd0, 0xb7, 0x60, 0x57, 0xb8, 0xd1, 0x94, 0x09, 0x6a, 0xfa, 0x74, + 0x97, 0x94, 0xb5, 0xc1, 0xf6, 0xd0, 0x4b, 0xb0, 0x26, 0x61, 0x70, 0xc5, 0xa7, 0xa6, 0xe7, 0x25, + 0xa6, 0xa4, 0x30, 0x7b, 0xb7, 0xec, 0x5a, 0xc7, 0x94, 0x03, 0xf7, 0x54, 0x47, 0xed, 0x92, 0xb2, + 0x36, 0xd8, 0xde, 0x49, 0x15, 0x2a, 0xc6, 0x29, 0x16, 0x73, 0xd6, 0xf8, 0x13, 0x54, 0x56, 0xda, + 0x40, 0xde, 0x1b, 0xd2, 0x9c, 0xde, 0x1b, 0xf2, 0x37, 0xfa, 0x31, 0xa0, 0x79, 0xc4, 0x7d, 0x37, + 0x5a, 0x50, 0x1e, 0xcc, 0x13, 0x41, 0xe7, 0xae, 0xb8, 0x36, 0xf1, 0x59, 0xc6, 0x63, 0x4b, 0xc7, + 0xd0, 0x15, 0xd7, 0xa8, 0x09, 0x07, 0x29, 0x3a, 0x4c, 0x44, 0x06, 0xd7, 0xa1, 0xee, 0x1b, 0xd7, + 0x40, 0x79, 0x24, 0xbe, 0xf1, 0xd7, 0x12, 0xc0, 0xb2, 0x6f, 0xd0, 0x6b, 0xd3, 0x78, 0xe6, 0x3e, + 0x28, 0x6c, 0xde, 0x78, 0xe6, 0x4e, 0x50, 0x6d, 0xa7, 0x7f, 0xa3, 0xef, 0xc1, 0xa3, 0xf8, 0xda, + 0x8d, 0x3c, 0x1a, 0x24, 0xfe, 0x25, 0x8b, 0x54, 0xcc, 0xdb, 0xa4, 0xa2, 0x6c, 0x7d, 0x65, 0x42, + 0xdf, 0x01, 0x88, 0x92, 0x20, 0x05, 0x94, 0x14, 0x60, 0x37, 0x4a, 0x02, 0xe3, 0xfe, 0x21, 0xd4, + 0x5c, 0x21, 0x98, 0x3f, 0x17, 0x29, 0x64, 0x4b, 0x41, 0xaa, 0xc6, 0x6a, 0x60, 0xe7, 0x00, 0xcb, + 0xdb, 0xda, 0x34, 0xf6, 0xcb, 0x4d, 0x82, 0x1e, 0x49, 0x02, 0xd9, 0x15, 0xe9, 0x4f, 0x74, 0x06, + 0xe5, 0xaf, 0xdc, 0x28, 0xe0, 0xc1, 0x34, 0x6d, 0x93, 0x57, 0x9b, 0xe8, 0xfc, 0x46, 0x73, 0x48, + 0x46, 0x46, 0x1f, 0xc1, 0x13, 0x16, 0x0b, 0xee, 0xbb, 0x82, 0x79, 0xd4, 0x67, 0x7e, 0x18, 0x2d, + 0xe8, 0xe5, 0x42, 0xde, 0x3b, 0xf2, 0x32, 0x2b, 0x91, 0xc3, 0xcc, 0x7b, 0xa1, 0x9c, 0x27, 0xd2, + 0xd7, 0xf8, 0x73, 0x11, 0xac, 0xf5, 0x0b, 0x07, 0x7d, 0x01, 0x88, 0x7d, 0xcd, 0x26, 0x89, 0xaa, + 0x9a, 0x58, 0x44, 0xae, 0x60, 0xd3, 0x85, 0x3a, 0x9a, 0xda, 0xf1, 0x4f, 0xf2, 0xa2, 0xc3, 0x29, + 0x73, 0x64, 0x88, 0x64, 0x9f, 0xad, 0x9b, 0x64, 0xc5, 0xb2, 0xaf, 0xb9, 0xa0, 0x93, 0xd0, 0x63, + 0xe6, 0x94, 0xca, 0xd2, 0xd0, 0x09, 0x3d, 0x86, 0x9e, 0x41, 0xf9, 0x3a, 0x8c, 0x85, 0x7a, 0xcf, + 0x74, 0x19, 0x65, 0x6b, 0x34, 0x86, 0x3d, 0x5d, 0x93, 0xaa, 0xa1, 0x79, 0x70, 0x15, 0x9a, 0x27, + 0xe4, 0xfd, 0xbc, 0xb8, 0x54, 0xc5, 0xca, 0x46, 0xb6, 0x83, 0xab, 0x90, 0x54, 0xf9, 0xea, 0xb2, + 0xf1, 0x9f, 0x02, 0x54, 0x6f, 0x01, 0xd0, 0x21, 0x6c, 0x4f, 0xc2, 0x24, 0x10, 0xea, 0xb3, 0x4b, + 0x44, 0x2f, 0x64, 0x79, 0x78, 0x3c, 0x16, 0x3c, 0x98, 0xc8, 0xd8, 0xa5, 0xbb, 0xa8, 0xdc, 0xd5, + 0xd4, 0xda, 0x51, 0xb0, 0xef, 0x42, 0x45, 0x79, 0xe9, 0x8c, 0xfb, 0x5c, 0xa8, 0x8f, 0x28, 0x11, + 0x50, 0xa6, 0x9e, 0xb4, 0xdc, 0xd2, 0xd1, 0x87, 0xb4, 0x75, 0x5b, 0x47, 0x9d, 0x8e, 0xec, 0xad, + 0x5b, 0x30, 0xa3, 0xb7, 0xad, 0xb0, 0xfb, 0xab, 0x58, 0x25, 0xdb, 0xf8, 0x02, 0xf6, 0x7a, 0xe1, + 0xc4, 0x9d, 0x2d, 0xdb, 0x03, 0x5d, 0xc0, 0x63, 0x55, 0xa9, 0xe6, 0xee, 0xa3, 0xe9, 0x08, 0x61, + 0x3a, 0xed, 0x9d, 0x34, 0x6d, 0xe9, 0x8c, 0xd1, 0xec, 0x1a, 0x00, 0x39, 0x90, 0x3c, 0x73, 0xbf, + 0xa5, 0xc6, 0xc6, 0x5f, 0x4a, 0xf0, 0x94, 0x30, 0x3f, 0x14, 0x4c, 0xf5, 0xb0, 0x6e, 0x0a, 0xb3, + 0xd5, 0xaf, 0xa1, 0xf6, 0x65, 0xc2, 0x12, 0xf6, 0x06, 0x7b, 0x54, 0x15, 0x21, 0x5d, 0xa2, 0x13, + 0xd8, 0x4b, 0xe6, 0xb3, 0xd0, 0xf5, 0x96, 0x12, 0xc5, 0x3c, 0x89, 0x9a, 0x66, 0x64, 0x1a, 0x03, + 0x78, 0xe2, 0xbb, 0x93, 0x6b, 0x1e, 0x30, 0x1a, 0x33, 0x91, 0xcc, 0x97, 0x52, 0xa5, 0x3c, 0xa9, + 0x43, 0x43, 0x1c, 0x49, 0x5e, 0x26, 0x78, 0x6f, 0x06, 0xb7, 0xde, 0x26, 0x83, 0xe8, 0x14, 0xf6, + 0xbd, 0xf0, 0xab, 0xe0, 0xf6, 0x57, 0x6e, 0xe7, 0x49, 0x59, 0x29, 0x27, 0x3b, 0x89, 0x7f, 0x17, + 0xc0, 0x5a, 0x9e, 0x84, 0x39, 0x82, 0x4f, 0xe0, 0xe9, 0x4c, 0x16, 0x00, 0x75, 0x03, 0x77, 0xb6, + 0x88, 0xf9, 0x9b, 0x9c, 0xf7, 0x63, 0xc5, 0x6c, 0x1b, 0x62, 0x16, 0xef, 0x08, 0xca, 0xe6, 0xee, + 0x8b, 0xeb, 0x45, 0x75, 0x41, 0xfd, 0x2c, 0xaf, 0xd5, 0xee, 0x29, 0x10, 0x92, 0x09, 0x35, 0xfe, + 0x61, 0x1e, 0x01, 0x13, 0xf6, 0x19, 0x6c, 0xab, 0xcd, 0x4d, 0x90, 0xad, 0xbc, 0x0d, 0xd6, 0x8a, + 0xfc, 0xfc, 0x01, 0xd1, 0x7c, 0xf4, 0x31, 0xec, 0x44, 0x6a, 0x73, 0x53, 0x37, 0x1f, 0x6c, 0x1e, + 0x6a, 0x26, 0x65, 0x14, 0xd0, 0x6b, 0x38, 0x8c, 0x17, 0xb1, 0x60, 0xbe, 0x7c, 0x9b, 0xd8, 0x1b, + 0x94, 0x11, 0xd2, 0x34, 0x87, 0xfb, 0xcb, 0xca, 0x3e, 0x03, 0x94, 0xc4, 0x2c, 0x5a, 0x93, 0xca, + 0xad, 0x20, 0x4b, 0x92, 0x6e, 0x09, 0xf5, 0xe1, 0x91, 0xaa, 0xc6, 0x89, 0x2a, 0xd5, 0xa9, 0xaa, + 0x9c, 0xda, 0x66, 0x6f, 0x46, 0x47, 0x53, 0x88, 0x7a, 0x70, 0xcd, 0xe2, 0x04, 0xa0, 0x3c, 0x33, + 0x13, 0x49, 0xe3, 0xa7, 0x50, 0x59, 0x79, 0x5b, 0xd0, 0x8f, 0x60, 0xcf, 0xbc, 0x2e, 0xd4, 0x67, + 0x71, 0x2c, 0x27, 0x31, 0x3d, 0x26, 0xd4, 0x8c, 0xf9, 0x42, 0x5b, 0x1b, 0xff, 0x2c, 0xc0, 0xe1, + 0x5d, 0x23, 0x12, 0x7a, 0x07, 0xca, 0xea, 0x9e, 0x4e, 0xb2, 0x01, 0xe8, 0xa1, 0x5c, 0x8f, 0xb9, + 0x87, 0xf8, 0xbd, 0x33, 0x59, 0x49, 0x15, 0xd9, 0x87, 0x6f, 0x31, 0x93, 0xdd, 0x3d, 0x92, 0x35, + 0xfe, 0x08, 0x07, 0x77, 0x80, 0xd1, 0xeb, 0x95, 0xd1, 0xa7, 0x96, 0x5f, 0xd4, 0x77, 0x48, 0x38, + 0x8b, 0x39, 0x33, 0x33, 0x53, 0x1d, 0x1e, 0xa6, 0x39, 0xd2, 0x83, 0x52, 0xba, 0x3c, 0xfa, 0x5b, + 0x01, 0xf6, 0xff, 0xef, 0x4d, 0x44, 0x0d, 0x78, 0x17, 0x7f, 0x86, 0x3b, 0x63, 0xc7, 0x1e, 0xf4, + 0xe9, 0xc8, 0x21, 0x6d, 0x07, 0x9f, 0x7d, 0x4e, 0xc7, 0xfd, 0xd1, 0x10, 0x77, 0xec, 0x53, 0x1b, + 0x77, 0xad, 0x07, 0xe8, 0x31, 0xec, 0x0f, 0x9c, 0x73, 0x4c, 0x28, 0xee, 0x7f, 0x6a, 0x93, 0x41, + 0xff, 0x02, 0xf7, 0x1d, 0xab, 0x80, 0x10, 0xd4, 0x08, 0xbe, 0x18, 0x38, 0x98, 0x8e, 0x30, 0xf9, + 0xd4, 0xee, 0x60, 0xab, 0x28, 0x6d, 0xbd, 0x41, 0xa7, 0xdd, 0xa3, 0xc3, 0x36, 0x69, 0xf7, 0x7a, + 0xb8, 0x67, 0x95, 0xd0, 0x21, 0x58, 0xda, 0x36, 0xc2, 0x9f, 0x8c, 0x71, 0xdf, 0xb1, 0xdb, 0x3d, + 0x6b, 0xeb, 0xe8, 0x4a, 0x9f, 0xb1, 0x39, 0x7e, 0xf4, 0x6d, 0xa8, 0x3b, 0x78, 0xe4, 0xd0, 0x4e, + 0xbb, 0x73, 0x6e, 0xf7, 0xcf, 0xd6, 0x22, 0x38, 0x80, 0x3d, 0x2d, 0x21, 0xdd, 0x98, 0x9e, 0xdb, + 0x72, 0xff, 0x43, 0xb0, 0xcc, 0xfe, 0x4b, 0x6b, 0x11, 0xd5, 0x00, 0xf4, 0xf2, 0xc2, 0x1e, 0x8d, + 0xac, 0xd2, 0xd1, 0x7f, 0x0b, 0xf0, 0xf4, 0x9e, 0x94, 0xa1, 0x23, 0x78, 0xef, 0xd4, 0xee, 0x61, + 0x3a, 0x24, 0x83, 0x0e, 0x1e, 0x8d, 0xe4, 0xbe, 0x98, 0x90, 0x01, 0xa1, 0xce, 0xe7, 0x43, 0xbc, + 0x16, 0xc2, 0x13, 0x40, 0x67, 0xb8, 0x8f, 0x89, 0xdd, 0xa1, 0x04, 0xb7, 0xbb, 0x1a, 0x68, 0x15, + 0xd0, 0x53, 0x38, 0x48, 0xed, 0xc3, 0x36, 0x19, 0x61, 0xe3, 0x50, 0xa9, 0x50, 0xe2, 0xce, 0x60, + 0x40, 0x7b, 0x6d, 0x72, 0x86, 0x75, 0x2a, 0x06, 0x63, 0x67, 0x38, 0x76, 0x56, 0xac, 0x5b, 0x68, + 0x1f, 0xaa, 0xed, 0x8e, 0x0c, 0x80, 0x76, 0x71, 0x5f, 0xee, 0xb6, 0x2d, 0x53, 0xde, 0xc5, 0xed, + 0x6e, 0xcf, 0xee, 0x63, 0x8a, 0x3f, 0xeb, 0x60, 0xdc, 0xc5, 0x5d, 0x6b, 0x07, 0x55, 0x61, 0xb7, + 0x3f, 0x70, 0xe8, 0xe9, 0x60, 0xdc, 0xef, 0x5a, 0x0f, 0x4f, 0xbe, 0x84, 0xc6, 0x24, 0xf4, 0x73, + 0x0a, 0x66, 0x58, 0xf8, 0xad, 0x6d, 0x10, 0xd3, 0x70, 0xe6, 0x06, 0xd3, 0x66, 0x18, 0x4d, 0x5b, + 0x53, 0x16, 0xa8, 0x2e, 0x37, 0xff, 0x05, 0xb8, 0x73, 0x1e, 0xdf, 0xf7, 0x7f, 0xc0, 0x2f, 0x57, + 0x96, 0x97, 0x3b, 0x8a, 0xf5, 0xe1, 0xff, 0x02, 0x00, 0x00, 0xff, 0xff, 0x09, 0xc6, 0x5f, 0x39, + 0x18, 0x11, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/devtools/resultstore/v2/common.pb.go b/vendor/google.golang.org/genproto/googleapis/devtools/resultstore/v2/common.pb.go new file mode 100644 index 0000000..5704164 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/devtools/resultstore/v2/common.pb.go @@ -0,0 +1,600 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/devtools/resultstore/v2/common.proto + +package resultstore // import "google.golang.org/genproto/googleapis/devtools/resultstore/v2" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import duration "github.com/golang/protobuf/ptypes/duration" +import timestamp "github.com/golang/protobuf/ptypes/timestamp" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// These correspond to the prefix of the rule name. Eg cc_test has language CC. +type Language int32 + +const ( + // Language unspecified or not listed here. + Language_LANGUAGE_UNSPECIFIED Language = 0 + // Not related to any particular language + Language_NONE Language = 1 + // Android + Language_ANDROID Language = 2 + // ActionScript (Flash) + Language_AS Language = 3 + // C++ or C + Language_CC Language = 4 + // Cascading-Style-Sheets + Language_CSS Language = 5 + // Dart + Language_DART Language = 6 + // Go + Language_GO Language = 7 + // Google-Web-Toolkit + Language_GWT Language = 8 + // Haskell + Language_HASKELL Language = 9 + // Java + Language_JAVA Language = 10 + // Javascript + Language_JS Language = 11 + // Lisp + Language_LISP Language = 12 + // Objective-C + Language_OBJC Language = 13 + // Python + Language_PY Language = 14 + // Shell (Typically Bash) + Language_SH Language = 15 + // Swift + Language_SWIFT Language = 16 + // Typescript + Language_TS Language = 18 + // Webtesting + Language_WEB Language = 19 + // Scala + Language_SCALA Language = 20 + // Protocol Buffer + Language_PROTO Language = 21 +) + +var Language_name = map[int32]string{ + 0: "LANGUAGE_UNSPECIFIED", + 1: "NONE", + 2: "ANDROID", + 3: "AS", + 4: "CC", + 5: "CSS", + 6: "DART", + 7: "GO", + 8: "GWT", + 9: "HASKELL", + 10: "JAVA", + 11: "JS", + 12: "LISP", + 13: "OBJC", + 14: "PY", + 15: "SH", + 16: "SWIFT", + 18: "TS", + 19: "WEB", + 20: "SCALA", + 21: "PROTO", +} +var Language_value = map[string]int32{ + "LANGUAGE_UNSPECIFIED": 0, + "NONE": 1, + "ANDROID": 2, + "AS": 3, + "CC": 4, + "CSS": 5, + "DART": 6, + "GO": 7, + "GWT": 8, + "HASKELL": 9, + "JAVA": 10, + "JS": 11, + "LISP": 12, + "OBJC": 13, + "PY": 14, + "SH": 15, + "SWIFT": 16, + "TS": 18, + "WEB": 19, + "SCALA": 20, + "PROTO": 21, +} + +func (x Language) String() string { + return proto.EnumName(Language_name, int32(x)) +} +func (Language) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_common_c40fe7cfb3b2f2fd, []int{0} +} + +// Status of a resource. +type Status int32 + +const ( + // The implicit default enum value. Should never be set. + Status_STATUS_UNSPECIFIED Status = 0 + // Displays as "Building". Means the target is compiling, linking, etc. + Status_BUILDING Status = 1 + // Displays as "Built". Means the target was built successfully. + // If testing was requested, it should never reach this status: it should go + // straight from BUILDING to TESTING. + Status_BUILT Status = 2 + // Displays as "Broken". Means build failure such as compile error. + Status_FAILED_TO_BUILD Status = 3 + // Displays as "Testing". Means the test is running. + Status_TESTING Status = 4 + // Displays as "Passed". Means the test was run and passed. + Status_PASSED Status = 5 + // Displays as "Failed". Means the test was run and failed. + Status_FAILED Status = 6 + // Displays as "Timed out". Means the test didn't finish in time. + Status_TIMED_OUT Status = 7 + // Displays as "Cancelled". Means the build or test was cancelled. + // E.g. User hit control-C. + Status_CANCELLED Status = 8 + // Displays as "Tool Failed". Means the build or test had internal tool + // failure. + Status_TOOL_FAILED Status = 9 + // Displays as "Incomplete". Means the build or test did not complete. This + // might happen when a build breakage or test failure causes the tool to stop + // trying to build anything more or run any more tests, with the default + // bazel --nokeep_going option or the --notest_keep_going option. + Status_INCOMPLETE Status = 10 + // Displays as "Flaky". Means the aggregate status contains some runs that + // were successful, and some that were not. + Status_FLAKY Status = 11 + // Displays as "Unknown". Means the tool uploading to the server died + // mid-upload or does not know the state. + Status_UNKNOWN Status = 12 + // Displays as "Skipped". Means building and testing were skipped. + // (E.g. Restricted to a different configuration.) + Status_SKIPPED Status = 13 +) + +var Status_name = map[int32]string{ + 0: "STATUS_UNSPECIFIED", + 1: "BUILDING", + 2: "BUILT", + 3: "FAILED_TO_BUILD", + 4: "TESTING", + 5: "PASSED", + 6: "FAILED", + 7: "TIMED_OUT", + 8: "CANCELLED", + 9: "TOOL_FAILED", + 10: "INCOMPLETE", + 11: "FLAKY", + 12: "UNKNOWN", + 13: "SKIPPED", +} +var Status_value = map[string]int32{ + "STATUS_UNSPECIFIED": 0, + "BUILDING": 1, + "BUILT": 2, + "FAILED_TO_BUILD": 3, + "TESTING": 4, + "PASSED": 5, + "FAILED": 6, + "TIMED_OUT": 7, + "CANCELLED": 8, + "TOOL_FAILED": 9, + "INCOMPLETE": 10, + "FLAKY": 11, + "UNKNOWN": 12, + "SKIPPED": 13, +} + +func (x Status) String() string { + return proto.EnumName(Status_name, int32(x)) +} +func (Status) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_common_c40fe7cfb3b2f2fd, []int{1} +} + +// Describes the status of a resource in both enum and string form. +// Only use description when conveying additional info not captured in the enum +// name. +type StatusAttributes struct { + // Enum representation of the status. + Status Status `protobuf:"varint,1,opt,name=status,proto3,enum=google.devtools.resultstore.v2.Status" json:"status,omitempty"` + // A longer description about the status. + Description string `protobuf:"bytes,2,opt,name=description,proto3" json:"description,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *StatusAttributes) Reset() { *m = StatusAttributes{} } +func (m *StatusAttributes) String() string { return proto.CompactTextString(m) } +func (*StatusAttributes) ProtoMessage() {} +func (*StatusAttributes) Descriptor() ([]byte, []int) { + return fileDescriptor_common_c40fe7cfb3b2f2fd, []int{0} +} +func (m *StatusAttributes) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_StatusAttributes.Unmarshal(m, b) +} +func (m *StatusAttributes) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_StatusAttributes.Marshal(b, m, deterministic) +} +func (dst *StatusAttributes) XXX_Merge(src proto.Message) { + xxx_messageInfo_StatusAttributes.Merge(dst, src) +} +func (m *StatusAttributes) XXX_Size() int { + return xxx_messageInfo_StatusAttributes.Size(m) +} +func (m *StatusAttributes) XXX_DiscardUnknown() { + xxx_messageInfo_StatusAttributes.DiscardUnknown(m) +} + +var xxx_messageInfo_StatusAttributes proto.InternalMessageInfo + +func (m *StatusAttributes) GetStatus() Status { + if m != nil { + return m.Status + } + return Status_STATUS_UNSPECIFIED +} + +func (m *StatusAttributes) GetDescription() string { + if m != nil { + return m.Description + } + return "" +} + +// A generic key-value property definition. +type Property struct { + // The key. + Key string `protobuf:"bytes,1,opt,name=key,proto3" json:"key,omitempty"` + // The value. + Value string `protobuf:"bytes,2,opt,name=value,proto3" json:"value,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Property) Reset() { *m = Property{} } +func (m *Property) String() string { return proto.CompactTextString(m) } +func (*Property) ProtoMessage() {} +func (*Property) Descriptor() ([]byte, []int) { + return fileDescriptor_common_c40fe7cfb3b2f2fd, []int{1} +} +func (m *Property) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Property.Unmarshal(m, b) +} +func (m *Property) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Property.Marshal(b, m, deterministic) +} +func (dst *Property) XXX_Merge(src proto.Message) { + xxx_messageInfo_Property.Merge(dst, src) +} +func (m *Property) XXX_Size() int { + return xxx_messageInfo_Property.Size(m) +} +func (m *Property) XXX_DiscardUnknown() { + xxx_messageInfo_Property.DiscardUnknown(m) +} + +var xxx_messageInfo_Property proto.InternalMessageInfo + +func (m *Property) GetKey() string { + if m != nil { + return m.Key + } + return "" +} + +func (m *Property) GetValue() string { + if m != nil { + return m.Value + } + return "" +} + +// The timing of a particular Invocation, Action, etc. The start_time is +// specified, stop time can be calculated by adding duration to start_time. +type Timing struct { + // The time the resource started running. This is in UTC Epoch time. + StartTime *timestamp.Timestamp `protobuf:"bytes,1,opt,name=start_time,json=startTime,proto3" json:"start_time,omitempty"` + // The duration for which the resource ran. + Duration *duration.Duration `protobuf:"bytes,2,opt,name=duration,proto3" json:"duration,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Timing) Reset() { *m = Timing{} } +func (m *Timing) String() string { return proto.CompactTextString(m) } +func (*Timing) ProtoMessage() {} +func (*Timing) Descriptor() ([]byte, []int) { + return fileDescriptor_common_c40fe7cfb3b2f2fd, []int{2} +} +func (m *Timing) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Timing.Unmarshal(m, b) +} +func (m *Timing) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Timing.Marshal(b, m, deterministic) +} +func (dst *Timing) XXX_Merge(src proto.Message) { + xxx_messageInfo_Timing.Merge(dst, src) +} +func (m *Timing) XXX_Size() int { + return xxx_messageInfo_Timing.Size(m) +} +func (m *Timing) XXX_DiscardUnknown() { + xxx_messageInfo_Timing.DiscardUnknown(m) +} + +var xxx_messageInfo_Timing proto.InternalMessageInfo + +func (m *Timing) GetStartTime() *timestamp.Timestamp { + if m != nil { + return m.StartTime + } + return nil +} + +func (m *Timing) GetDuration() *duration.Duration { + if m != nil { + return m.Duration + } + return nil +} + +// Represents a dependency of a resource on another resource. This can be used +// to define a graph or a workflow paradigm through resources. +type Dependency struct { + // The resource depended upon. It may be a Target, ConfiguredTarget, or + // Action. + // + // Types that are valid to be assigned to Resource: + // *Dependency_Target + // *Dependency_ConfiguredTarget + // *Dependency_Action + Resource isDependency_Resource `protobuf_oneof:"resource"` + // A label describing this dependency. + // The label "Root Cause" is handled specially. It is used to point to the + // exact resource that caused a resource to fail. + Label string `protobuf:"bytes,4,opt,name=label,proto3" json:"label,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Dependency) Reset() { *m = Dependency{} } +func (m *Dependency) String() string { return proto.CompactTextString(m) } +func (*Dependency) ProtoMessage() {} +func (*Dependency) Descriptor() ([]byte, []int) { + return fileDescriptor_common_c40fe7cfb3b2f2fd, []int{3} +} +func (m *Dependency) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Dependency.Unmarshal(m, b) +} +func (m *Dependency) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Dependency.Marshal(b, m, deterministic) +} +func (dst *Dependency) XXX_Merge(src proto.Message) { + xxx_messageInfo_Dependency.Merge(dst, src) +} +func (m *Dependency) XXX_Size() int { + return xxx_messageInfo_Dependency.Size(m) +} +func (m *Dependency) XXX_DiscardUnknown() { + xxx_messageInfo_Dependency.DiscardUnknown(m) +} + +var xxx_messageInfo_Dependency proto.InternalMessageInfo + +type isDependency_Resource interface { + isDependency_Resource() +} + +type Dependency_Target struct { + Target string `protobuf:"bytes,1,opt,name=target,proto3,oneof"` +} + +type Dependency_ConfiguredTarget struct { + ConfiguredTarget string `protobuf:"bytes,2,opt,name=configured_target,json=configuredTarget,proto3,oneof"` +} + +type Dependency_Action struct { + Action string `protobuf:"bytes,3,opt,name=action,proto3,oneof"` +} + +func (*Dependency_Target) isDependency_Resource() {} + +func (*Dependency_ConfiguredTarget) isDependency_Resource() {} + +func (*Dependency_Action) isDependency_Resource() {} + +func (m *Dependency) GetResource() isDependency_Resource { + if m != nil { + return m.Resource + } + return nil +} + +func (m *Dependency) GetTarget() string { + if x, ok := m.GetResource().(*Dependency_Target); ok { + return x.Target + } + return "" +} + +func (m *Dependency) GetConfiguredTarget() string { + if x, ok := m.GetResource().(*Dependency_ConfiguredTarget); ok { + return x.ConfiguredTarget + } + return "" +} + +func (m *Dependency) GetAction() string { + if x, ok := m.GetResource().(*Dependency_Action); ok { + return x.Action + } + return "" +} + +func (m *Dependency) GetLabel() string { + if m != nil { + return m.Label + } + return "" +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*Dependency) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _Dependency_OneofMarshaler, _Dependency_OneofUnmarshaler, _Dependency_OneofSizer, []interface{}{ + (*Dependency_Target)(nil), + (*Dependency_ConfiguredTarget)(nil), + (*Dependency_Action)(nil), + } +} + +func _Dependency_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*Dependency) + // resource + switch x := m.Resource.(type) { + case *Dependency_Target: + b.EncodeVarint(1<<3 | proto.WireBytes) + b.EncodeStringBytes(x.Target) + case *Dependency_ConfiguredTarget: + b.EncodeVarint(2<<3 | proto.WireBytes) + b.EncodeStringBytes(x.ConfiguredTarget) + case *Dependency_Action: + b.EncodeVarint(3<<3 | proto.WireBytes) + b.EncodeStringBytes(x.Action) + case nil: + default: + return fmt.Errorf("Dependency.Resource has unexpected type %T", x) + } + return nil +} + +func _Dependency_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*Dependency) + switch tag { + case 1: // resource.target + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeStringBytes() + m.Resource = &Dependency_Target{x} + return true, err + case 2: // resource.configured_target + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeStringBytes() + m.Resource = &Dependency_ConfiguredTarget{x} + return true, err + case 3: // resource.action + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeStringBytes() + m.Resource = &Dependency_Action{x} + return true, err + default: + return false, nil + } +} + +func _Dependency_OneofSizer(msg proto.Message) (n int) { + m := msg.(*Dependency) + // resource + switch x := m.Resource.(type) { + case *Dependency_Target: + n += 1 // tag and wire + n += proto.SizeVarint(uint64(len(x.Target))) + n += len(x.Target) + case *Dependency_ConfiguredTarget: + n += 1 // tag and wire + n += proto.SizeVarint(uint64(len(x.ConfiguredTarget))) + n += len(x.ConfiguredTarget) + case *Dependency_Action: + n += 1 // tag and wire + n += proto.SizeVarint(uint64(len(x.Action))) + n += len(x.Action) + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +func init() { + proto.RegisterType((*StatusAttributes)(nil), "google.devtools.resultstore.v2.StatusAttributes") + proto.RegisterType((*Property)(nil), "google.devtools.resultstore.v2.Property") + proto.RegisterType((*Timing)(nil), "google.devtools.resultstore.v2.Timing") + proto.RegisterType((*Dependency)(nil), "google.devtools.resultstore.v2.Dependency") + proto.RegisterEnum("google.devtools.resultstore.v2.Language", Language_name, Language_value) + proto.RegisterEnum("google.devtools.resultstore.v2.Status", Status_name, Status_value) +} + +func init() { + proto.RegisterFile("google/devtools/resultstore/v2/common.proto", fileDescriptor_common_c40fe7cfb3b2f2fd) +} + +var fileDescriptor_common_c40fe7cfb3b2f2fd = []byte{ + // 704 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x84, 0x94, 0xdf, 0x6e, 0xe3, 0x44, + 0x14, 0xc6, 0xd7, 0x49, 0xeb, 0x3a, 0x27, 0xfd, 0x73, 0x98, 0x2d, 0x28, 0xf4, 0x02, 0xaa, 0x5c, + 0xa0, 0xd5, 0x22, 0x6c, 0x29, 0x88, 0x0b, 0x84, 0x84, 0x34, 0xb1, 0xdd, 0xd4, 0xad, 0xd7, 0xb6, + 0x3c, 0x13, 0xa2, 0xe5, 0x26, 0x72, 0x92, 0x59, 0xcb, 0x22, 0xb1, 0x83, 0x3d, 0x8e, 0x54, 0xde, + 0x82, 0xa7, 0xe3, 0x05, 0x78, 0x10, 0x34, 0xb6, 0x03, 0x55, 0x11, 0xec, 0x95, 0xcf, 0x77, 0xbe, + 0xdf, 0x39, 0x73, 0xce, 0x68, 0x12, 0xf8, 0x3a, 0x2d, 0x8a, 0x74, 0x2b, 0xac, 0x8d, 0x38, 0xc8, + 0xa2, 0xd8, 0x56, 0x56, 0x29, 0xaa, 0x7a, 0x2b, 0x2b, 0x59, 0x94, 0xc2, 0x3a, 0x4c, 0xac, 0x75, + 0xb1, 0xdb, 0x15, 0xb9, 0xb9, 0x2f, 0x0b, 0x59, 0x90, 0x2f, 0x5a, 0xd8, 0x3c, 0xc2, 0xe6, 0x33, + 0xd8, 0x3c, 0x4c, 0x6e, 0x3a, 0xdf, 0x6a, 0xe8, 0x55, 0xfd, 0xc1, 0xda, 0xd4, 0x65, 0x22, 0xb3, + 0x63, 0xfd, 0xcd, 0x97, 0x2f, 0x7d, 0x99, 0xed, 0x44, 0x25, 0x93, 0xdd, 0xbe, 0x05, 0xc6, 0x12, + 0x90, 0xc9, 0x44, 0xd6, 0x15, 0x95, 0xb2, 0xcc, 0x56, 0xb5, 0x14, 0x15, 0xf9, 0x11, 0xf4, 0xaa, + 0xc9, 0x8d, 0xb4, 0x5b, 0xed, 0xcd, 0xe5, 0xe4, 0x2b, 0xf3, 0xff, 0xa7, 0x30, 0xdb, 0x0e, 0x71, + 0x57, 0x45, 0x6e, 0x61, 0xb8, 0x11, 0xd5, 0xba, 0xcc, 0xf6, 0x6a, 0x92, 0x51, 0xef, 0x56, 0x7b, + 0x33, 0x88, 0x9f, 0xa7, 0xc6, 0x13, 0x30, 0xa2, 0xb2, 0xd8, 0x8b, 0x52, 0x3e, 0x11, 0x84, 0xfe, + 0x2f, 0xe2, 0xa9, 0x39, 0x6a, 0x10, 0xab, 0x90, 0x5c, 0xc3, 0xe9, 0x21, 0xd9, 0xd6, 0xa2, 0xab, + 0x6c, 0xc5, 0xf8, 0x37, 0xd0, 0x79, 0xb6, 0xcb, 0xf2, 0x94, 0x7c, 0x0f, 0x50, 0xc9, 0xa4, 0x94, + 0x4b, 0xb5, 0x4c, 0x53, 0x38, 0x9c, 0xdc, 0x1c, 0x67, 0x3c, 0x6e, 0x6a, 0xf2, 0xe3, 0xa6, 0xf1, + 0xa0, 0xa1, 0x95, 0x26, 0xdf, 0x81, 0x71, 0xbc, 0xa1, 0xa6, 0xfb, 0x70, 0xf2, 0xf9, 0xbf, 0x0a, + 0x9d, 0x0e, 0x88, 0xff, 0x46, 0xc7, 0xbf, 0x6b, 0x00, 0x8e, 0xd8, 0x8b, 0x7c, 0x23, 0xf2, 0xf5, + 0x13, 0x19, 0x81, 0x2e, 0x93, 0x32, 0x15, 0xb2, 0x9d, 0xfa, 0xfe, 0x55, 0xdc, 0x69, 0xf2, 0x0d, + 0x7c, 0xb2, 0x2e, 0xf2, 0x0f, 0x59, 0x5a, 0x97, 0x62, 0xb3, 0xec, 0xa0, 0x5e, 0x07, 0xe1, 0x3f, + 0x16, 0x6f, 0xf1, 0x11, 0xe8, 0xc9, 0xba, 0x19, 0xa6, 0x7f, 0x6c, 0xd4, 0x6a, 0x75, 0x07, 0xdb, + 0x64, 0x25, 0xb6, 0xa3, 0x93, 0xf6, 0x0e, 0x1a, 0x31, 0x05, 0x30, 0x4a, 0x51, 0x15, 0x75, 0xb9, + 0x16, 0x6f, 0xff, 0xd4, 0xc0, 0xf0, 0x93, 0x3c, 0xad, 0x93, 0x54, 0x90, 0x11, 0x5c, 0xfb, 0x34, + 0x98, 0xcd, 0xe9, 0xcc, 0x5d, 0xce, 0x03, 0x16, 0xb9, 0xb6, 0x77, 0xe7, 0xb9, 0x0e, 0xbe, 0x22, + 0x06, 0x9c, 0x04, 0x61, 0xe0, 0xa2, 0x46, 0x86, 0x70, 0x46, 0x03, 0x27, 0x0e, 0x3d, 0x07, 0x7b, + 0x44, 0x87, 0x1e, 0x65, 0xd8, 0x57, 0x5f, 0xdb, 0xc6, 0x13, 0x72, 0x06, 0x7d, 0x9b, 0x31, 0x3c, + 0x55, 0xbc, 0x43, 0x63, 0x8e, 0xba, 0xb2, 0x66, 0x21, 0x9e, 0x29, 0x6b, 0xb6, 0xe0, 0x68, 0xa8, + 0x06, 0xf7, 0x94, 0x3d, 0xba, 0xbe, 0x8f, 0x03, 0xc5, 0x3d, 0xd0, 0x9f, 0x28, 0x82, 0xe2, 0x1e, + 0x18, 0x0e, 0x55, 0xc6, 0xf7, 0x58, 0x84, 0xe7, 0x2a, 0x0a, 0xa7, 0x0f, 0x36, 0x5e, 0x28, 0x2f, + 0x7a, 0x8f, 0x97, 0xea, 0xcb, 0xee, 0xf1, 0x8a, 0x0c, 0xe0, 0x94, 0x2d, 0xbc, 0x3b, 0x8e, 0xa8, + 0x52, 0x9c, 0x21, 0x51, 0xed, 0x17, 0xee, 0x14, 0x5f, 0x37, 0x9e, 0x4d, 0x7d, 0x8a, 0xd7, 0x2a, + 0x8c, 0xe2, 0x90, 0x87, 0xf8, 0xe9, 0xdb, 0x3f, 0x34, 0xd0, 0xdb, 0xf7, 0x45, 0x3e, 0x03, 0xc2, + 0x38, 0xe5, 0x73, 0xf6, 0x62, 0xc5, 0x73, 0x30, 0xa6, 0x73, 0xcf, 0x77, 0xbc, 0x60, 0x86, 0x9a, + 0xaa, 0x55, 0x8a, 0x63, 0x8f, 0xbc, 0x86, 0xab, 0x3b, 0xea, 0xf9, 0xae, 0xb3, 0xe4, 0xe1, 0xb2, + 0x41, 0xb0, 0xaf, 0xb6, 0xe0, 0x2e, 0xe3, 0x0a, 0x3e, 0x21, 0x00, 0x7a, 0x44, 0x19, 0x73, 0x1d, + 0x3c, 0x55, 0x71, 0x4b, 0xa3, 0x4e, 0x2e, 0x60, 0xc0, 0xbd, 0x77, 0xae, 0xb3, 0x0c, 0xe7, 0x1c, + 0xcf, 0x94, 0xb4, 0x69, 0x60, 0xbb, 0xbe, 0x72, 0x0d, 0x72, 0x05, 0x43, 0x1e, 0x86, 0xfe, 0xb2, + 0xc3, 0x07, 0xe4, 0x12, 0xc0, 0x0b, 0xec, 0xf0, 0x5d, 0xe4, 0xbb, 0xdc, 0x45, 0x50, 0x33, 0xdc, + 0xf9, 0xf4, 0xf1, 0x3d, 0x0e, 0xd5, 0x71, 0xf3, 0xe0, 0x31, 0x08, 0x17, 0x01, 0x9e, 0x2b, 0xc1, + 0x1e, 0xbd, 0x28, 0x72, 0x1d, 0xbc, 0x98, 0xfe, 0x0a, 0xe3, 0x75, 0xb1, 0xfb, 0xc8, 0x6f, 0x2b, + 0xd2, 0x7e, 0xf6, 0x3a, 0x22, 0x2d, 0xb6, 0x49, 0x9e, 0x9a, 0x45, 0x99, 0x5a, 0xa9, 0xc8, 0x9b, + 0xe7, 0x6a, 0xb5, 0x56, 0xb2, 0xcf, 0xaa, 0xff, 0xfa, 0x3f, 0xf9, 0xe1, 0x99, 0x5c, 0xe9, 0x4d, + 0xd5, 0xb7, 0x7f, 0x05, 0x00, 0x00, 0xff, 0xff, 0x47, 0xe0, 0xd7, 0x5c, 0x84, 0x04, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/devtools/resultstore/v2/configuration.pb.go b/vendor/google.golang.org/genproto/googleapis/devtools/resultstore/v2/configuration.pb.go new file mode 100644 index 0000000..e11f312 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/devtools/resultstore/v2/configuration.pb.go @@ -0,0 +1,227 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/devtools/resultstore/v2/configuration.proto + +package resultstore // import "google.golang.org/genproto/googleapis/devtools/resultstore/v2" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Represents a configuration within an Invocation associated with one or more +// ConfiguredTargets. It captures the environment and other settings that +// were used. +type Configuration struct { + // The format of this Configuration resource name must be: + // invocations/${INVOCATION_ID}/configs/${CONFIG_ID} + // The configuration ID of "default" should be preferred for the default + // configuration in a single-config invocation. + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // The resource ID components that identify the Configuration. They must match + // the resource name after proper encoding. + Id *Configuration_Id `protobuf:"bytes,2,opt,name=id,proto3" json:"id,omitempty"` + // The aggregate status for this configuration. + StatusAttributes *StatusAttributes `protobuf:"bytes,3,opt,name=status_attributes,json=statusAttributes,proto3" json:"status_attributes,omitempty"` + // Attributes that apply only to this configuration. + ConfigurationAttributes *ConfigurationAttributes `protobuf:"bytes,5,opt,name=configuration_attributes,json=configurationAttributes,proto3" json:"configuration_attributes,omitempty"` + // Arbitrary name-value pairs. + // This is implemented as a multi-map. Multiple properties are allowed with + // the same key. Properties will be returned in lexicographical order by key. + Properties []*Property `protobuf:"bytes,6,rep,name=properties,proto3" json:"properties,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Configuration) Reset() { *m = Configuration{} } +func (m *Configuration) String() string { return proto.CompactTextString(m) } +func (*Configuration) ProtoMessage() {} +func (*Configuration) Descriptor() ([]byte, []int) { + return fileDescriptor_configuration_9976ad0123b12f96, []int{0} +} +func (m *Configuration) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Configuration.Unmarshal(m, b) +} +func (m *Configuration) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Configuration.Marshal(b, m, deterministic) +} +func (dst *Configuration) XXX_Merge(src proto.Message) { + xxx_messageInfo_Configuration.Merge(dst, src) +} +func (m *Configuration) XXX_Size() int { + return xxx_messageInfo_Configuration.Size(m) +} +func (m *Configuration) XXX_DiscardUnknown() { + xxx_messageInfo_Configuration.DiscardUnknown(m) +} + +var xxx_messageInfo_Configuration proto.InternalMessageInfo + +func (m *Configuration) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *Configuration) GetId() *Configuration_Id { + if m != nil { + return m.Id + } + return nil +} + +func (m *Configuration) GetStatusAttributes() *StatusAttributes { + if m != nil { + return m.StatusAttributes + } + return nil +} + +func (m *Configuration) GetConfigurationAttributes() *ConfigurationAttributes { + if m != nil { + return m.ConfigurationAttributes + } + return nil +} + +func (m *Configuration) GetProperties() []*Property { + if m != nil { + return m.Properties + } + return nil +} + +// The resource ID components that identify the Configuration. +type Configuration_Id struct { + // The Invocation ID. + InvocationId string `protobuf:"bytes,1,opt,name=invocation_id,json=invocationId,proto3" json:"invocation_id,omitempty"` + // The Configuration ID. + ConfigurationId string `protobuf:"bytes,2,opt,name=configuration_id,json=configurationId,proto3" json:"configuration_id,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Configuration_Id) Reset() { *m = Configuration_Id{} } +func (m *Configuration_Id) String() string { return proto.CompactTextString(m) } +func (*Configuration_Id) ProtoMessage() {} +func (*Configuration_Id) Descriptor() ([]byte, []int) { + return fileDescriptor_configuration_9976ad0123b12f96, []int{0, 0} +} +func (m *Configuration_Id) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Configuration_Id.Unmarshal(m, b) +} +func (m *Configuration_Id) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Configuration_Id.Marshal(b, m, deterministic) +} +func (dst *Configuration_Id) XXX_Merge(src proto.Message) { + xxx_messageInfo_Configuration_Id.Merge(dst, src) +} +func (m *Configuration_Id) XXX_Size() int { + return xxx_messageInfo_Configuration_Id.Size(m) +} +func (m *Configuration_Id) XXX_DiscardUnknown() { + xxx_messageInfo_Configuration_Id.DiscardUnknown(m) +} + +var xxx_messageInfo_Configuration_Id proto.InternalMessageInfo + +func (m *Configuration_Id) GetInvocationId() string { + if m != nil { + return m.InvocationId + } + return "" +} + +func (m *Configuration_Id) GetConfigurationId() string { + if m != nil { + return m.ConfigurationId + } + return "" +} + +// Attributes that apply only to the configuration. +type ConfigurationAttributes struct { + // The type of cpu. (e.g. "x86", "powerpc") + Cpu string `protobuf:"bytes,1,opt,name=cpu,proto3" json:"cpu,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ConfigurationAttributes) Reset() { *m = ConfigurationAttributes{} } +func (m *ConfigurationAttributes) String() string { return proto.CompactTextString(m) } +func (*ConfigurationAttributes) ProtoMessage() {} +func (*ConfigurationAttributes) Descriptor() ([]byte, []int) { + return fileDescriptor_configuration_9976ad0123b12f96, []int{1} +} +func (m *ConfigurationAttributes) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ConfigurationAttributes.Unmarshal(m, b) +} +func (m *ConfigurationAttributes) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ConfigurationAttributes.Marshal(b, m, deterministic) +} +func (dst *ConfigurationAttributes) XXX_Merge(src proto.Message) { + xxx_messageInfo_ConfigurationAttributes.Merge(dst, src) +} +func (m *ConfigurationAttributes) XXX_Size() int { + return xxx_messageInfo_ConfigurationAttributes.Size(m) +} +func (m *ConfigurationAttributes) XXX_DiscardUnknown() { + xxx_messageInfo_ConfigurationAttributes.DiscardUnknown(m) +} + +var xxx_messageInfo_ConfigurationAttributes proto.InternalMessageInfo + +func (m *ConfigurationAttributes) GetCpu() string { + if m != nil { + return m.Cpu + } + return "" +} + +func init() { + proto.RegisterType((*Configuration)(nil), "google.devtools.resultstore.v2.Configuration") + proto.RegisterType((*Configuration_Id)(nil), "google.devtools.resultstore.v2.Configuration.Id") + proto.RegisterType((*ConfigurationAttributes)(nil), "google.devtools.resultstore.v2.ConfigurationAttributes") +} + +func init() { + proto.RegisterFile("google/devtools/resultstore/v2/configuration.proto", fileDescriptor_configuration_9976ad0123b12f96) +} + +var fileDescriptor_configuration_9976ad0123b12f96 = []byte{ + // 335 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x94, 0x92, 0x4f, 0x4f, 0xb3, 0x40, + 0x10, 0xc6, 0x03, 0xbc, 0x6f, 0x93, 0x4e, 0x6d, 0xac, 0x7b, 0x29, 0xe9, 0xc1, 0x34, 0xf5, 0x82, + 0x69, 0xb2, 0x18, 0x3c, 0x78, 0xf0, 0xe2, 0x9f, 0x8b, 0xdc, 0x1a, 0xf4, 0x64, 0x62, 0x0c, 0x65, + 0xd7, 0xcd, 0x26, 0xc0, 0xe0, 0xee, 0x42, 0xe2, 0x37, 0xf5, 0xe3, 0x18, 0x81, 0x2a, 0x18, 0x6b, + 0xf5, 0x36, 0xcc, 0xf0, 0x7b, 0x9e, 0x99, 0x9d, 0x81, 0x40, 0x20, 0x8a, 0x94, 0xfb, 0x8c, 0x57, + 0x06, 0x31, 0xd5, 0xbe, 0xe2, 0xba, 0x4c, 0x8d, 0x36, 0xa8, 0xb8, 0x5f, 0x05, 0x7e, 0x82, 0xf9, + 0x93, 0x14, 0xa5, 0x8a, 0x8d, 0xc4, 0x9c, 0x16, 0x0a, 0x0d, 0x92, 0xc3, 0x86, 0xa1, 0x1b, 0x86, + 0x76, 0x18, 0x5a, 0x05, 0xb3, 0xe5, 0x4e, 0xcd, 0x2c, 0xdb, 0x88, 0x2d, 0x5e, 0x1d, 0x18, 0x5f, + 0x77, 0x4d, 0x08, 0x81, 0x7f, 0x79, 0x9c, 0x71, 0xd7, 0x9a, 0x5b, 0xde, 0x30, 0xaa, 0x63, 0x72, + 0x01, 0xb6, 0x64, 0xae, 0x3d, 0xb7, 0xbc, 0x51, 0x70, 0x42, 0x7f, 0xf6, 0xa7, 0x3d, 0x39, 0x1a, + 0xb2, 0xc8, 0x96, 0x8c, 0x3c, 0xc0, 0x81, 0x36, 0xb1, 0x29, 0xf5, 0x63, 0x6c, 0x8c, 0x92, 0xeb, + 0xd2, 0x70, 0xed, 0x3a, 0xbf, 0x13, 0xbc, 0xad, 0xc1, 0xcb, 0x0f, 0x2e, 0x9a, 0xe8, 0x2f, 0x19, + 0xa2, 0xc0, 0xed, 0x3d, 0x55, 0xd7, 0xe5, 0x7f, 0xed, 0x72, 0xf6, 0xa7, 0xb6, 0x3b, 0x66, 0xd3, + 0xe4, 0xfb, 0x02, 0xb9, 0x01, 0x28, 0x14, 0x16, 0x5c, 0x19, 0xc9, 0xb5, 0x3b, 0x98, 0x3b, 0xde, + 0x28, 0xf0, 0x76, 0xb9, 0xac, 0x1a, 0xe2, 0x25, 0xea, 0xb0, 0xb3, 0x3b, 0xb0, 0x43, 0x46, 0x8e, + 0x60, 0x2c, 0xf3, 0x0a, 0x93, 0x66, 0x00, 0xc9, 0xda, 0x0d, 0xec, 0x7d, 0x26, 0x43, 0x46, 0x8e, + 0x61, 0xd2, 0x1f, 0xb4, 0xdd, 0xcb, 0x30, 0xda, 0xef, 0xe5, 0x43, 0xb6, 0x58, 0xc2, 0x74, 0xcb, + 0x4c, 0x64, 0x02, 0x4e, 0x52, 0x94, 0xad, 0xc1, 0x7b, 0x78, 0xf5, 0x0c, 0x8b, 0x04, 0xb3, 0x1d, + 0xdd, 0xaf, 0xac, 0xfb, 0xb0, 0xfd, 0x43, 0x60, 0x1a, 0xe7, 0x82, 0xa2, 0x12, 0xbe, 0xe0, 0x79, + 0x7d, 0x4b, 0x7e, 0x53, 0x8a, 0x0b, 0xa9, 0xb7, 0xdd, 0xde, 0x79, 0xe7, 0x73, 0x3d, 0xa8, 0xa9, + 0xd3, 0xb7, 0x00, 0x00, 0x00, 0xff, 0xff, 0xad, 0xea, 0x27, 0xe2, 0x04, 0x03, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/devtools/resultstore/v2/configured_target.pb.go b/vendor/google.golang.org/genproto/googleapis/devtools/resultstore/v2/configured_target.pb.go new file mode 100644 index 0000000..a8549c2 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/devtools/resultstore/v2/configured_target.pb.go @@ -0,0 +1,290 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/devtools/resultstore/v2/configured_target.proto + +package resultstore // import "google.golang.org/genproto/googleapis/devtools/resultstore/v2" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import duration "github.com/golang/protobuf/ptypes/duration" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Each ConfiguredTarget represents data for a given configuration of a given +// target in a given Invocation. +// Every ConfiguredTarget should have at least one Action as a child resource +// before the invocation is finalized. Refer to the Action's documentation for +// more info on this. +type ConfiguredTarget struct { + // The resource name. Its format must be: + // invocations/${INVOCATION_ID}/targets/${TARGET_ID}/configuredTargets/${CONFIG_ID} + // where ${CONFIG_ID} must match the ID of an existing Configuration under + // this Invocation. + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // The resource ID components that identify the ConfiguredTarget. They must + // match the resource name after proper encoding. + Id *ConfiguredTarget_Id `protobuf:"bytes,2,opt,name=id,proto3" json:"id,omitempty"` + // The aggregate status for this configuration of this target. If testing + // was not requested, set this to the build status (e.g. BUILT or + // FAILED_TO_BUILD). + StatusAttributes *StatusAttributes `protobuf:"bytes,3,opt,name=status_attributes,json=statusAttributes,proto3" json:"status_attributes,omitempty"` + // Captures the start time and duration of this configured target. + Timing *Timing `protobuf:"bytes,4,opt,name=timing,proto3" json:"timing,omitempty"` + // Test specific attributes for this ConfiguredTarget. + TestAttributes *ConfiguredTestAttributes `protobuf:"bytes,6,opt,name=test_attributes,json=testAttributes,proto3" json:"test_attributes,omitempty"` + // Arbitrary name-value pairs. + // This is implemented as a multi-map. Multiple properties are allowed with + // the same key. Properties will be returned in lexicographical order by key. + Properties []*Property `protobuf:"bytes,7,rep,name=properties,proto3" json:"properties,omitempty"` + // A list of file references for configured target level files. + // The file IDs must be unique within this list. Duplicate file IDs will + // result in an error. Files will be returned in lexicographical order by ID. + Files []*File `protobuf:"bytes,8,rep,name=files,proto3" json:"files,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ConfiguredTarget) Reset() { *m = ConfiguredTarget{} } +func (m *ConfiguredTarget) String() string { return proto.CompactTextString(m) } +func (*ConfiguredTarget) ProtoMessage() {} +func (*ConfiguredTarget) Descriptor() ([]byte, []int) { + return fileDescriptor_configured_target_b74dc45116856d73, []int{0} +} +func (m *ConfiguredTarget) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ConfiguredTarget.Unmarshal(m, b) +} +func (m *ConfiguredTarget) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ConfiguredTarget.Marshal(b, m, deterministic) +} +func (dst *ConfiguredTarget) XXX_Merge(src proto.Message) { + xxx_messageInfo_ConfiguredTarget.Merge(dst, src) +} +func (m *ConfiguredTarget) XXX_Size() int { + return xxx_messageInfo_ConfiguredTarget.Size(m) +} +func (m *ConfiguredTarget) XXX_DiscardUnknown() { + xxx_messageInfo_ConfiguredTarget.DiscardUnknown(m) +} + +var xxx_messageInfo_ConfiguredTarget proto.InternalMessageInfo + +func (m *ConfiguredTarget) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *ConfiguredTarget) GetId() *ConfiguredTarget_Id { + if m != nil { + return m.Id + } + return nil +} + +func (m *ConfiguredTarget) GetStatusAttributes() *StatusAttributes { + if m != nil { + return m.StatusAttributes + } + return nil +} + +func (m *ConfiguredTarget) GetTiming() *Timing { + if m != nil { + return m.Timing + } + return nil +} + +func (m *ConfiguredTarget) GetTestAttributes() *ConfiguredTestAttributes { + if m != nil { + return m.TestAttributes + } + return nil +} + +func (m *ConfiguredTarget) GetProperties() []*Property { + if m != nil { + return m.Properties + } + return nil +} + +func (m *ConfiguredTarget) GetFiles() []*File { + if m != nil { + return m.Files + } + return nil +} + +// The resource ID components that identify the ConfiguredTarget. +type ConfiguredTarget_Id struct { + // The Invocation ID. + InvocationId string `protobuf:"bytes,1,opt,name=invocation_id,json=invocationId,proto3" json:"invocation_id,omitempty"` + // The Target ID. + TargetId string `protobuf:"bytes,2,opt,name=target_id,json=targetId,proto3" json:"target_id,omitempty"` + // The Configuration ID. + ConfigurationId string `protobuf:"bytes,3,opt,name=configuration_id,json=configurationId,proto3" json:"configuration_id,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ConfiguredTarget_Id) Reset() { *m = ConfiguredTarget_Id{} } +func (m *ConfiguredTarget_Id) String() string { return proto.CompactTextString(m) } +func (*ConfiguredTarget_Id) ProtoMessage() {} +func (*ConfiguredTarget_Id) Descriptor() ([]byte, []int) { + return fileDescriptor_configured_target_b74dc45116856d73, []int{0, 0} +} +func (m *ConfiguredTarget_Id) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ConfiguredTarget_Id.Unmarshal(m, b) +} +func (m *ConfiguredTarget_Id) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ConfiguredTarget_Id.Marshal(b, m, deterministic) +} +func (dst *ConfiguredTarget_Id) XXX_Merge(src proto.Message) { + xxx_messageInfo_ConfiguredTarget_Id.Merge(dst, src) +} +func (m *ConfiguredTarget_Id) XXX_Size() int { + return xxx_messageInfo_ConfiguredTarget_Id.Size(m) +} +func (m *ConfiguredTarget_Id) XXX_DiscardUnknown() { + xxx_messageInfo_ConfiguredTarget_Id.DiscardUnknown(m) +} + +var xxx_messageInfo_ConfiguredTarget_Id proto.InternalMessageInfo + +func (m *ConfiguredTarget_Id) GetInvocationId() string { + if m != nil { + return m.InvocationId + } + return "" +} + +func (m *ConfiguredTarget_Id) GetTargetId() string { + if m != nil { + return m.TargetId + } + return "" +} + +func (m *ConfiguredTarget_Id) GetConfigurationId() string { + if m != nil { + return m.ConfigurationId + } + return "" +} + +// Attributes that apply only to test actions under this configured target. +type ConfiguredTestAttributes struct { + // Total number of test runs. For example, in bazel this is specified with + // --runs_per_test. Zero if runs_per_test is not used. + TotalRunCount int32 `protobuf:"varint,2,opt,name=total_run_count,json=totalRunCount,proto3" json:"total_run_count,omitempty"` + // Total number of test shards. Zero if shard count was not specified. + TotalShardCount int32 `protobuf:"varint,3,opt,name=total_shard_count,json=totalShardCount,proto3" json:"total_shard_count,omitempty"` + // How long test is allowed to run. + TimeoutDuration *duration.Duration `protobuf:"bytes,5,opt,name=timeout_duration,json=timeoutDuration,proto3" json:"timeout_duration,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ConfiguredTestAttributes) Reset() { *m = ConfiguredTestAttributes{} } +func (m *ConfiguredTestAttributes) String() string { return proto.CompactTextString(m) } +func (*ConfiguredTestAttributes) ProtoMessage() {} +func (*ConfiguredTestAttributes) Descriptor() ([]byte, []int) { + return fileDescriptor_configured_target_b74dc45116856d73, []int{1} +} +func (m *ConfiguredTestAttributes) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ConfiguredTestAttributes.Unmarshal(m, b) +} +func (m *ConfiguredTestAttributes) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ConfiguredTestAttributes.Marshal(b, m, deterministic) +} +func (dst *ConfiguredTestAttributes) XXX_Merge(src proto.Message) { + xxx_messageInfo_ConfiguredTestAttributes.Merge(dst, src) +} +func (m *ConfiguredTestAttributes) XXX_Size() int { + return xxx_messageInfo_ConfiguredTestAttributes.Size(m) +} +func (m *ConfiguredTestAttributes) XXX_DiscardUnknown() { + xxx_messageInfo_ConfiguredTestAttributes.DiscardUnknown(m) +} + +var xxx_messageInfo_ConfiguredTestAttributes proto.InternalMessageInfo + +func (m *ConfiguredTestAttributes) GetTotalRunCount() int32 { + if m != nil { + return m.TotalRunCount + } + return 0 +} + +func (m *ConfiguredTestAttributes) GetTotalShardCount() int32 { + if m != nil { + return m.TotalShardCount + } + return 0 +} + +func (m *ConfiguredTestAttributes) GetTimeoutDuration() *duration.Duration { + if m != nil { + return m.TimeoutDuration + } + return nil +} + +func init() { + proto.RegisterType((*ConfiguredTarget)(nil), "google.devtools.resultstore.v2.ConfiguredTarget") + proto.RegisterType((*ConfiguredTarget_Id)(nil), "google.devtools.resultstore.v2.ConfiguredTarget.Id") + proto.RegisterType((*ConfiguredTestAttributes)(nil), "google.devtools.resultstore.v2.ConfiguredTestAttributes") +} + +func init() { + proto.RegisterFile("google/devtools/resultstore/v2/configured_target.proto", fileDescriptor_configured_target_b74dc45116856d73) +} + +var fileDescriptor_configured_target_b74dc45116856d73 = []byte{ + // 488 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x94, 0x93, 0xdf, 0x8a, 0x13, 0x31, + 0x14, 0xc6, 0xe9, 0x5f, 0xb7, 0x67, 0x5d, 0xdb, 0xcd, 0xd5, 0x58, 0x41, 0x4a, 0x95, 0xa5, 0xab, + 0x30, 0x23, 0x5d, 0x10, 0x51, 0x10, 0xb4, 0x8b, 0x38, 0x77, 0x4b, 0x76, 0xaf, 0x04, 0x19, 0xd2, + 0x26, 0x1d, 0x03, 0x33, 0x49, 0x37, 0x39, 0x29, 0xf8, 0x5e, 0x3e, 0x93, 0xcf, 0x21, 0x4d, 0x66, + 0x6a, 0x77, 0x61, 0x1d, 0xbc, 0x9b, 0x7c, 0xf3, 0x7d, 0xbf, 0x9c, 0xe4, 0x9c, 0xc0, 0xdb, 0x5c, + 0xeb, 0xbc, 0x10, 0x09, 0x17, 0x5b, 0xd4, 0xba, 0xb0, 0x89, 0x11, 0xd6, 0x15, 0x68, 0x51, 0x1b, + 0x91, 0x6c, 0xe7, 0xc9, 0x4a, 0xab, 0xb5, 0xcc, 0x9d, 0x11, 0x3c, 0x43, 0x66, 0x72, 0x81, 0xf1, + 0xc6, 0x68, 0xd4, 0xe4, 0x79, 0xc8, 0xc5, 0x75, 0x2e, 0x3e, 0xc8, 0xc5, 0xdb, 0xf9, 0xf8, 0x75, + 0x23, 0xb7, 0x2c, 0xb5, 0x0a, 0xb0, 0xf1, 0x79, 0x83, 0x79, 0x2d, 0x0b, 0x51, 0x59, 0xab, 0x7d, + 0x13, 0xbf, 0x5a, 0xba, 0x75, 0xc2, 0x9d, 0x61, 0x28, 0x6b, 0xd4, 0xf4, 0x77, 0x17, 0x46, 0x8b, + 0x7d, 0xcd, 0x37, 0xbe, 0x64, 0x42, 0xa0, 0xab, 0x58, 0x29, 0xa2, 0xd6, 0xa4, 0x35, 0x1b, 0x50, + 0xff, 0x4d, 0x16, 0xd0, 0x96, 0x3c, 0x6a, 0x4f, 0x5a, 0xb3, 0xe3, 0xf9, 0x45, 0xfc, 0xef, 0xd3, + 0xc4, 0xf7, 0x89, 0x71, 0xca, 0x69, 0x5b, 0x72, 0xf2, 0x1d, 0x4e, 0x2d, 0x32, 0x74, 0x36, 0x63, + 0x88, 0x46, 0x2e, 0x1d, 0x0a, 0x1b, 0x75, 0x3c, 0xf3, 0x4d, 0x13, 0xf3, 0xda, 0x07, 0x3f, 0xed, + 0x73, 0x74, 0x64, 0xef, 0x29, 0xe4, 0x23, 0xf4, 0x51, 0x96, 0x52, 0xe5, 0x51, 0xd7, 0x33, 0xcf, + 0x9a, 0x98, 0x37, 0xde, 0x4d, 0xab, 0x14, 0x61, 0x30, 0x44, 0x61, 0xf1, 0xb0, 0xb8, 0xbe, 0x07, + 0xbd, 0xfb, 0x8f, 0x03, 0x0b, 0x8b, 0x07, 0x45, 0x3e, 0xc1, 0x3b, 0x6b, 0xf2, 0x15, 0x60, 0x63, + 0xf4, 0x46, 0x18, 0x94, 0xc2, 0x46, 0x8f, 0x26, 0x9d, 0xd9, 0xf1, 0x7c, 0xd6, 0x44, 0xbf, 0x0a, + 0x89, 0x9f, 0xf4, 0x20, 0x4b, 0xde, 0x43, 0x6f, 0xd7, 0x67, 0x1b, 0x1d, 0x79, 0xc8, 0xcb, 0x26, + 0xc8, 0x17, 0x59, 0x08, 0x1a, 0x22, 0xe3, 0x5b, 0x68, 0xa7, 0x9c, 0xbc, 0x80, 0x13, 0xa9, 0xb6, + 0x7a, 0xe5, 0xe7, 0x21, 0x93, 0xbc, 0xea, 0xf7, 0xe3, 0xbf, 0x62, 0xca, 0xc9, 0x33, 0x18, 0x84, + 0x41, 0xce, 0xaa, 0xf6, 0x0f, 0xe8, 0x51, 0x10, 0x52, 0x4e, 0xce, 0x61, 0x54, 0x0f, 0xfc, 0x1e, + 0xd2, 0xf1, 0x9e, 0xe1, 0x1d, 0x3d, 0xe5, 0xd3, 0x5f, 0x2d, 0x88, 0x1e, 0xba, 0x25, 0x72, 0x06, + 0x43, 0xd4, 0xc8, 0x8a, 0xcc, 0x38, 0x95, 0xad, 0xb4, 0x53, 0xe8, 0xb7, 0xea, 0xd1, 0x13, 0x2f, + 0x53, 0xa7, 0x16, 0x3b, 0x91, 0xbc, 0x82, 0xd3, 0xe0, 0xb3, 0x3f, 0x98, 0xe1, 0x95, 0xb3, 0xe3, + 0x9d, 0x01, 0x70, 0xbd, 0xd3, 0x83, 0xf7, 0x12, 0x46, 0x28, 0x4b, 0xa1, 0x1d, 0x66, 0xf5, 0xcc, + 0x47, 0x3d, 0xdf, 0xcd, 0xa7, 0xf5, 0x55, 0xd5, 0x8f, 0x22, 0xbe, 0xac, 0x0c, 0x74, 0x58, 0x45, + 0x6a, 0xe1, 0xf3, 0x2d, 0x4c, 0x57, 0xba, 0x6c, 0xb8, 0xdb, 0xab, 0xd6, 0xb7, 0xb4, 0x72, 0xe4, + 0xba, 0x60, 0x2a, 0x8f, 0xb5, 0xc9, 0x93, 0x5c, 0x28, 0xbf, 0x41, 0x12, 0x7e, 0xb1, 0x8d, 0xb4, + 0x0f, 0xbd, 0xd8, 0x0f, 0x07, 0xcb, 0x65, 0xdf, 0xa7, 0x2e, 0xfe, 0x04, 0x00, 0x00, 0xff, 0xff, + 0xe2, 0xc4, 0xd5, 0x45, 0x6b, 0x04, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/devtools/resultstore/v2/coverage.pb.go b/vendor/google.golang.org/genproto/googleapis/devtools/resultstore/v2/coverage.pb.go new file mode 100644 index 0000000..6517567 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/devtools/resultstore/v2/coverage.pb.go @@ -0,0 +1,310 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/devtools/resultstore/v2/coverage.proto + +package resultstore // import "google.golang.org/genproto/googleapis/devtools/resultstore/v2" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Describes line coverage for a file +type LineCoverage struct { + // Which source lines in the file represent the start of a statement that was + // instrumented to detect whether it was executed by the test. + // + // This is a bitfield where i-th bit corresponds to the i-th line. Divide line + // number by 8 to get index into byte array. Mod line number by 8 to get bit + // number (0 = LSB, 7 = MSB). + // + // A 1 denotes the line was instrumented. + // A 0 denotes the line was not instrumented. + InstrumentedLines []byte `protobuf:"bytes,1,opt,name=instrumented_lines,json=instrumentedLines,proto3" json:"instrumented_lines,omitempty"` + // Which of the instrumented source lines were executed by the test. Should + // include lines that were not instrumented. + // + // This is a bitfield where i-th bit corresponds to the i-th line. Divide line + // number by 8 to get index into byte array. Mod line number by 8 to get bit + // number (0 = LSB, 7 = MSB). + // + // A 1 denotes the line was executed. + // A 0 denotes the line was not executed. + ExecutedLines []byte `protobuf:"bytes,2,opt,name=executed_lines,json=executedLines,proto3" json:"executed_lines,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *LineCoverage) Reset() { *m = LineCoverage{} } +func (m *LineCoverage) String() string { return proto.CompactTextString(m) } +func (*LineCoverage) ProtoMessage() {} +func (*LineCoverage) Descriptor() ([]byte, []int) { + return fileDescriptor_coverage_cf37bf4336d4edc8, []int{0} +} +func (m *LineCoverage) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_LineCoverage.Unmarshal(m, b) +} +func (m *LineCoverage) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_LineCoverage.Marshal(b, m, deterministic) +} +func (dst *LineCoverage) XXX_Merge(src proto.Message) { + xxx_messageInfo_LineCoverage.Merge(dst, src) +} +func (m *LineCoverage) XXX_Size() int { + return xxx_messageInfo_LineCoverage.Size(m) +} +func (m *LineCoverage) XXX_DiscardUnknown() { + xxx_messageInfo_LineCoverage.DiscardUnknown(m) +} + +var xxx_messageInfo_LineCoverage proto.InternalMessageInfo + +func (m *LineCoverage) GetInstrumentedLines() []byte { + if m != nil { + return m.InstrumentedLines + } + return nil +} + +func (m *LineCoverage) GetExecutedLines() []byte { + if m != nil { + return m.ExecutedLines + } + return nil +} + +// Describes branch coverage for a file +type BranchCoverage struct { + // The field branch_present denotes the lines containing at least one branch. + // + // This is a bitfield where i-th bit corresponds to the i-th line. Divide line + // number by 8 to get index into byte array. Mod line number by 8 to get bit + // number (0 = LSB, 7 = MSB). + // + // A 1 denotes the line contains at least one branch. + // A 0 denotes the line contains no branches. + BranchPresent []byte `protobuf:"bytes,1,opt,name=branch_present,json=branchPresent,proto3" json:"branch_present,omitempty"` + // Contains the number of branches present, only for the lines which have the + // corresponding bit set in branch_present, in a relative order ignoring + // lines which do not have any branches. + BranchesInLine []int32 `protobuf:"varint,2,rep,packed,name=branches_in_line,json=branchesInLine,proto3" json:"branches_in_line,omitempty"` + // As each branch can have any one of the following three states: not + // executed, executed but not taken, executed and taken. + // + // This is a bitfield where i-th bit corresponds to the i-th line. Divide line + // number by 8 to get index into byte array. Mod line number by 8 to get bit + // number (0 = LSB, 7 = MSB). + // + // i-th bit of the following two byte arrays are used to denote the above + // mentioned states. + // + // not executed: i-th bit of executed == 0 && i-th bit of taken == 0 + // executed but not taken: i-th bit of executed == 1 && i-th bit of taken == 0 + // executed and taken: i-th bit of executed == 1 && i-th bit of taken == 1 + Executed []byte `protobuf:"bytes,3,opt,name=executed,proto3" json:"executed,omitempty"` + // Described above. + Taken []byte `protobuf:"bytes,4,opt,name=taken,proto3" json:"taken,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *BranchCoverage) Reset() { *m = BranchCoverage{} } +func (m *BranchCoverage) String() string { return proto.CompactTextString(m) } +func (*BranchCoverage) ProtoMessage() {} +func (*BranchCoverage) Descriptor() ([]byte, []int) { + return fileDescriptor_coverage_cf37bf4336d4edc8, []int{1} +} +func (m *BranchCoverage) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_BranchCoverage.Unmarshal(m, b) +} +func (m *BranchCoverage) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_BranchCoverage.Marshal(b, m, deterministic) +} +func (dst *BranchCoverage) XXX_Merge(src proto.Message) { + xxx_messageInfo_BranchCoverage.Merge(dst, src) +} +func (m *BranchCoverage) XXX_Size() int { + return xxx_messageInfo_BranchCoverage.Size(m) +} +func (m *BranchCoverage) XXX_DiscardUnknown() { + xxx_messageInfo_BranchCoverage.DiscardUnknown(m) +} + +var xxx_messageInfo_BranchCoverage proto.InternalMessageInfo + +func (m *BranchCoverage) GetBranchPresent() []byte { + if m != nil { + return m.BranchPresent + } + return nil +} + +func (m *BranchCoverage) GetBranchesInLine() []int32 { + if m != nil { + return m.BranchesInLine + } + return nil +} + +func (m *BranchCoverage) GetExecuted() []byte { + if m != nil { + return m.Executed + } + return nil +} + +func (m *BranchCoverage) GetTaken() []byte { + if m != nil { + return m.Taken + } + return nil +} + +// Describes code coverage for a particular file under test. +type FileCoverage struct { + // Path of source file within the SourceContext of this Invocation. + Path string `protobuf:"bytes,1,opt,name=path,proto3" json:"path,omitempty"` + // Details of lines in a file required to calculate line coverage. + LineCoverage *LineCoverage `protobuf:"bytes,2,opt,name=line_coverage,json=lineCoverage,proto3" json:"line_coverage,omitempty"` + // Details of branches in a file required to calculate branch coverage. + BranchCoverage *BranchCoverage `protobuf:"bytes,3,opt,name=branch_coverage,json=branchCoverage,proto3" json:"branch_coverage,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *FileCoverage) Reset() { *m = FileCoverage{} } +func (m *FileCoverage) String() string { return proto.CompactTextString(m) } +func (*FileCoverage) ProtoMessage() {} +func (*FileCoverage) Descriptor() ([]byte, []int) { + return fileDescriptor_coverage_cf37bf4336d4edc8, []int{2} +} +func (m *FileCoverage) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_FileCoverage.Unmarshal(m, b) +} +func (m *FileCoverage) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_FileCoverage.Marshal(b, m, deterministic) +} +func (dst *FileCoverage) XXX_Merge(src proto.Message) { + xxx_messageInfo_FileCoverage.Merge(dst, src) +} +func (m *FileCoverage) XXX_Size() int { + return xxx_messageInfo_FileCoverage.Size(m) +} +func (m *FileCoverage) XXX_DiscardUnknown() { + xxx_messageInfo_FileCoverage.DiscardUnknown(m) +} + +var xxx_messageInfo_FileCoverage proto.InternalMessageInfo + +func (m *FileCoverage) GetPath() string { + if m != nil { + return m.Path + } + return "" +} + +func (m *FileCoverage) GetLineCoverage() *LineCoverage { + if m != nil { + return m.LineCoverage + } + return nil +} + +func (m *FileCoverage) GetBranchCoverage() *BranchCoverage { + if m != nil { + return m.BranchCoverage + } + return nil +} + +// Describes code coverage for a build or test Action. This is used to store +// baseline coverage for build Actions and test coverage for test Actions. +type ActionCoverage struct { + // List of coverage info for all source files that the TestResult covers. + FileCoverages []*FileCoverage `protobuf:"bytes,2,rep,name=file_coverages,json=fileCoverages,proto3" json:"file_coverages,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ActionCoverage) Reset() { *m = ActionCoverage{} } +func (m *ActionCoverage) String() string { return proto.CompactTextString(m) } +func (*ActionCoverage) ProtoMessage() {} +func (*ActionCoverage) Descriptor() ([]byte, []int) { + return fileDescriptor_coverage_cf37bf4336d4edc8, []int{3} +} +func (m *ActionCoverage) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ActionCoverage.Unmarshal(m, b) +} +func (m *ActionCoverage) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ActionCoverage.Marshal(b, m, deterministic) +} +func (dst *ActionCoverage) XXX_Merge(src proto.Message) { + xxx_messageInfo_ActionCoverage.Merge(dst, src) +} +func (m *ActionCoverage) XXX_Size() int { + return xxx_messageInfo_ActionCoverage.Size(m) +} +func (m *ActionCoverage) XXX_DiscardUnknown() { + xxx_messageInfo_ActionCoverage.DiscardUnknown(m) +} + +var xxx_messageInfo_ActionCoverage proto.InternalMessageInfo + +func (m *ActionCoverage) GetFileCoverages() []*FileCoverage { + if m != nil { + return m.FileCoverages + } + return nil +} + +func init() { + proto.RegisterType((*LineCoverage)(nil), "google.devtools.resultstore.v2.LineCoverage") + proto.RegisterType((*BranchCoverage)(nil), "google.devtools.resultstore.v2.BranchCoverage") + proto.RegisterType((*FileCoverage)(nil), "google.devtools.resultstore.v2.FileCoverage") + proto.RegisterType((*ActionCoverage)(nil), "google.devtools.resultstore.v2.ActionCoverage") +} + +func init() { + proto.RegisterFile("google/devtools/resultstore/v2/coverage.proto", fileDescriptor_coverage_cf37bf4336d4edc8) +} + +var fileDescriptor_coverage_cf37bf4336d4edc8 = []byte{ + // 372 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x8c, 0x52, 0x4d, 0x6b, 0xe3, 0x30, + 0x10, 0xc5, 0xf9, 0x58, 0x76, 0x67, 0x6d, 0xef, 0xae, 0xd8, 0x83, 0xd9, 0xc3, 0x12, 0x0c, 0x81, + 0x1c, 0x1a, 0x19, 0xd2, 0x63, 0x4f, 0x4d, 0xa1, 0x10, 0xe8, 0x21, 0x75, 0x0f, 0x85, 0x5e, 0x8c, + 0xe3, 0x4c, 0x1c, 0x51, 0x45, 0x72, 0x25, 0xc5, 0xf4, 0x7f, 0xf4, 0x7f, 0xf5, 0x37, 0x95, 0xc8, + 0x1f, 0x71, 0x0f, 0x6d, 0x7a, 0xd3, 0xbc, 0x79, 0xf3, 0xde, 0xcc, 0x43, 0x30, 0xcd, 0xa5, 0xcc, + 0x39, 0x46, 0x6b, 0x2c, 0x8d, 0x94, 0x5c, 0x47, 0x0a, 0xf5, 0x9e, 0x1b, 0x6d, 0xa4, 0xc2, 0xa8, + 0x9c, 0x45, 0x99, 0x2c, 0x51, 0xa5, 0x39, 0xd2, 0x42, 0x49, 0x23, 0xc9, 0xff, 0x8a, 0x4e, 0x1b, + 0x3a, 0xed, 0xd0, 0x69, 0x39, 0x0b, 0xd7, 0xe0, 0xde, 0x30, 0x81, 0x57, 0xf5, 0x14, 0x99, 0x02, + 0x61, 0x42, 0x1b, 0xb5, 0xdf, 0xa1, 0x30, 0xb8, 0x4e, 0x38, 0x13, 0xa8, 0x03, 0x67, 0xe4, 0x4c, + 0xdc, 0xf8, 0x4f, 0xb7, 0x73, 0x98, 0xd2, 0x64, 0x0c, 0x3e, 0x3e, 0x63, 0xb6, 0x3f, 0x52, 0x7b, + 0x96, 0xea, 0x35, 0xa8, 0xa5, 0x85, 0x2f, 0x0e, 0xf8, 0x73, 0x95, 0x8a, 0x6c, 0xdb, 0x1a, 0x8d, + 0xc1, 0x5f, 0x59, 0x24, 0x29, 0x14, 0x6a, 0x14, 0xa6, 0x36, 0xf1, 0x2a, 0x74, 0x59, 0x81, 0x64, + 0x02, 0xbf, 0x2b, 0x00, 0x75, 0xc2, 0x84, 0xf5, 0x08, 0x7a, 0xa3, 0xfe, 0x64, 0x18, 0xfb, 0x0d, + 0xbe, 0x10, 0x07, 0x13, 0xf2, 0x0f, 0xbe, 0x37, 0xa6, 0x41, 0xdf, 0x4a, 0xb5, 0x35, 0xf9, 0x0b, + 0x43, 0x93, 0x3e, 0xa2, 0x08, 0x06, 0xb6, 0x51, 0x15, 0xe1, 0xab, 0x03, 0xee, 0x35, 0xe3, 0xc7, + 0xe3, 0x09, 0x0c, 0x8a, 0xd4, 0x6c, 0xed, 0x26, 0x3f, 0x62, 0xfb, 0x26, 0xb7, 0xe0, 0x1d, 0x4c, + 0x93, 0x26, 0x57, 0x7b, 0xe0, 0xcf, 0xd9, 0x19, 0xfd, 0x3c, 0x58, 0xda, 0x4d, 0x35, 0x76, 0x79, + 0x37, 0xe3, 0x7b, 0xf8, 0x55, 0x9f, 0xde, 0x8a, 0xf6, 0xad, 0x28, 0x3d, 0x25, 0xfa, 0x3e, 0xc3, + 0x26, 0x82, 0xa6, 0x0e, 0x11, 0xfc, 0xcb, 0xcc, 0x30, 0x29, 0x5a, 0xab, 0x3b, 0xf0, 0x37, 0x8c, + 0x1f, 0xb7, 0xd7, 0x36, 0xbc, 0x2f, 0xac, 0xdf, 0xcd, 0x25, 0xf6, 0x36, 0x9d, 0x4a, 0xcf, 0x9f, + 0x20, 0xcc, 0xe4, 0xee, 0x84, 0xc2, 0xd2, 0x79, 0x58, 0xd4, 0x8c, 0x5c, 0xf2, 0x54, 0xe4, 0x54, + 0xaa, 0x3c, 0xca, 0x51, 0xd8, 0x7f, 0x19, 0x55, 0xad, 0xb4, 0x60, 0xfa, 0xa3, 0x9f, 0x7c, 0xd1, + 0x29, 0x57, 0xdf, 0xec, 0xd4, 0xf9, 0x5b, 0x00, 0x00, 0x00, 0xff, 0xff, 0x3e, 0x92, 0x57, 0x34, + 0xfe, 0x02, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/devtools/resultstore/v2/coverage_summary.pb.go b/vendor/google.golang.org/genproto/googleapis/devtools/resultstore/v2/coverage_summary.pb.go new file mode 100644 index 0000000..6c1b721 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/devtools/resultstore/v2/coverage_summary.pb.go @@ -0,0 +1,229 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/devtools/resultstore/v2/coverage_summary.proto + +package resultstore // import "google.golang.org/genproto/googleapis/devtools/resultstore/v2" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Summary of line coverage +type LineCoverageSummary struct { + // Number of lines instrumented for coverage. + InstrumentedLineCount int32 `protobuf:"varint,1,opt,name=instrumented_line_count,json=instrumentedLineCount,proto3" json:"instrumented_line_count,omitempty"` + // Number of instrumented lines that were executed by the test. + ExecutedLineCount int32 `protobuf:"varint,2,opt,name=executed_line_count,json=executedLineCount,proto3" json:"executed_line_count,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *LineCoverageSummary) Reset() { *m = LineCoverageSummary{} } +func (m *LineCoverageSummary) String() string { return proto.CompactTextString(m) } +func (*LineCoverageSummary) ProtoMessage() {} +func (*LineCoverageSummary) Descriptor() ([]byte, []int) { + return fileDescriptor_coverage_summary_6ce81cdd85c870eb, []int{0} +} +func (m *LineCoverageSummary) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_LineCoverageSummary.Unmarshal(m, b) +} +func (m *LineCoverageSummary) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_LineCoverageSummary.Marshal(b, m, deterministic) +} +func (dst *LineCoverageSummary) XXX_Merge(src proto.Message) { + xxx_messageInfo_LineCoverageSummary.Merge(dst, src) +} +func (m *LineCoverageSummary) XXX_Size() int { + return xxx_messageInfo_LineCoverageSummary.Size(m) +} +func (m *LineCoverageSummary) XXX_DiscardUnknown() { + xxx_messageInfo_LineCoverageSummary.DiscardUnknown(m) +} + +var xxx_messageInfo_LineCoverageSummary proto.InternalMessageInfo + +func (m *LineCoverageSummary) GetInstrumentedLineCount() int32 { + if m != nil { + return m.InstrumentedLineCount + } + return 0 +} + +func (m *LineCoverageSummary) GetExecutedLineCount() int32 { + if m != nil { + return m.ExecutedLineCount + } + return 0 +} + +// Summary of branch coverage +// A branch may be: +// * not executed. Counted only in total. +// * executed but not taken. Appears in total and executed. +// * executed and taken. Appears in all three fields. +type BranchCoverageSummary struct { + // The number of branches present in the file. + TotalBranchCount int32 `protobuf:"varint,1,opt,name=total_branch_count,json=totalBranchCount,proto3" json:"total_branch_count,omitempty"` + // The number of branches executed out of the total branches present. + // A branch is executed when its condition is evaluated. + // This is <= total_branch_count as not all branches are executed. + ExecutedBranchCount int32 `protobuf:"varint,2,opt,name=executed_branch_count,json=executedBranchCount,proto3" json:"executed_branch_count,omitempty"` + // The number of branches taken out of the total branches executed. + // A branch is taken when its condition is satisfied. + // This is <= executed_branch_count as not all executed branches are taken. + TakenBranchCount int32 `protobuf:"varint,3,opt,name=taken_branch_count,json=takenBranchCount,proto3" json:"taken_branch_count,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *BranchCoverageSummary) Reset() { *m = BranchCoverageSummary{} } +func (m *BranchCoverageSummary) String() string { return proto.CompactTextString(m) } +func (*BranchCoverageSummary) ProtoMessage() {} +func (*BranchCoverageSummary) Descriptor() ([]byte, []int) { + return fileDescriptor_coverage_summary_6ce81cdd85c870eb, []int{1} +} +func (m *BranchCoverageSummary) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_BranchCoverageSummary.Unmarshal(m, b) +} +func (m *BranchCoverageSummary) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_BranchCoverageSummary.Marshal(b, m, deterministic) +} +func (dst *BranchCoverageSummary) XXX_Merge(src proto.Message) { + xxx_messageInfo_BranchCoverageSummary.Merge(dst, src) +} +func (m *BranchCoverageSummary) XXX_Size() int { + return xxx_messageInfo_BranchCoverageSummary.Size(m) +} +func (m *BranchCoverageSummary) XXX_DiscardUnknown() { + xxx_messageInfo_BranchCoverageSummary.DiscardUnknown(m) +} + +var xxx_messageInfo_BranchCoverageSummary proto.InternalMessageInfo + +func (m *BranchCoverageSummary) GetTotalBranchCount() int32 { + if m != nil { + return m.TotalBranchCount + } + return 0 +} + +func (m *BranchCoverageSummary) GetExecutedBranchCount() int32 { + if m != nil { + return m.ExecutedBranchCount + } + return 0 +} + +func (m *BranchCoverageSummary) GetTakenBranchCount() int32 { + if m != nil { + return m.TakenBranchCount + } + return 0 +} + +// Summary of coverage in each language +type LanguageCoverageSummary struct { + // This summary is for all files written in this programming language. + Language Language `protobuf:"varint,1,opt,name=language,proto3,enum=google.devtools.resultstore.v2.Language" json:"language,omitempty"` + // Summary of lines covered vs instrumented. + LineSummary *LineCoverageSummary `protobuf:"bytes,2,opt,name=line_summary,json=lineSummary,proto3" json:"line_summary,omitempty"` + // Summary of branch coverage. + BranchSummary *BranchCoverageSummary `protobuf:"bytes,3,opt,name=branch_summary,json=branchSummary,proto3" json:"branch_summary,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *LanguageCoverageSummary) Reset() { *m = LanguageCoverageSummary{} } +func (m *LanguageCoverageSummary) String() string { return proto.CompactTextString(m) } +func (*LanguageCoverageSummary) ProtoMessage() {} +func (*LanguageCoverageSummary) Descriptor() ([]byte, []int) { + return fileDescriptor_coverage_summary_6ce81cdd85c870eb, []int{2} +} +func (m *LanguageCoverageSummary) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_LanguageCoverageSummary.Unmarshal(m, b) +} +func (m *LanguageCoverageSummary) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_LanguageCoverageSummary.Marshal(b, m, deterministic) +} +func (dst *LanguageCoverageSummary) XXX_Merge(src proto.Message) { + xxx_messageInfo_LanguageCoverageSummary.Merge(dst, src) +} +func (m *LanguageCoverageSummary) XXX_Size() int { + return xxx_messageInfo_LanguageCoverageSummary.Size(m) +} +func (m *LanguageCoverageSummary) XXX_DiscardUnknown() { + xxx_messageInfo_LanguageCoverageSummary.DiscardUnknown(m) +} + +var xxx_messageInfo_LanguageCoverageSummary proto.InternalMessageInfo + +func (m *LanguageCoverageSummary) GetLanguage() Language { + if m != nil { + return m.Language + } + return Language_LANGUAGE_UNSPECIFIED +} + +func (m *LanguageCoverageSummary) GetLineSummary() *LineCoverageSummary { + if m != nil { + return m.LineSummary + } + return nil +} + +func (m *LanguageCoverageSummary) GetBranchSummary() *BranchCoverageSummary { + if m != nil { + return m.BranchSummary + } + return nil +} + +func init() { + proto.RegisterType((*LineCoverageSummary)(nil), "google.devtools.resultstore.v2.LineCoverageSummary") + proto.RegisterType((*BranchCoverageSummary)(nil), "google.devtools.resultstore.v2.BranchCoverageSummary") + proto.RegisterType((*LanguageCoverageSummary)(nil), "google.devtools.resultstore.v2.LanguageCoverageSummary") +} + +func init() { + proto.RegisterFile("google/devtools/resultstore/v2/coverage_summary.proto", fileDescriptor_coverage_summary_6ce81cdd85c870eb) +} + +var fileDescriptor_coverage_summary_6ce81cdd85c870eb = []byte{ + // 358 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x84, 0x92, 0x41, 0x4b, 0xeb, 0x40, + 0x14, 0x85, 0x49, 0xcb, 0x7b, 0x3c, 0xa6, 0xef, 0x95, 0x67, 0x4a, 0xa9, 0xb8, 0x10, 0xc9, 0xaa, + 0xa0, 0x4c, 0x20, 0xa5, 0x6e, 0xdc, 0x55, 0x37, 0x42, 0x17, 0x12, 0xc1, 0x85, 0x08, 0x61, 0x9a, + 0x5e, 0xc6, 0x60, 0x32, 0xb7, 0x4e, 0x26, 0x41, 0x17, 0xfe, 0x0c, 0x7f, 0x83, 0x7f, 0x53, 0x32, + 0x33, 0x09, 0x69, 0xab, 0x66, 0x39, 0x9c, 0x7b, 0xbe, 0x73, 0x66, 0xee, 0x90, 0x39, 0x47, 0xe4, + 0x29, 0xf8, 0x6b, 0x28, 0x15, 0x62, 0x9a, 0xfb, 0x12, 0xf2, 0x22, 0x55, 0xb9, 0x42, 0x09, 0x7e, + 0x19, 0xf8, 0x31, 0x96, 0x20, 0x19, 0x87, 0x28, 0x2f, 0xb2, 0x8c, 0xc9, 0x57, 0xba, 0x91, 0xa8, + 0xd0, 0x3d, 0x36, 0x36, 0x5a, 0xdb, 0x68, 0xcb, 0x46, 0xcb, 0xe0, 0xe8, 0xb4, 0x13, 0x9b, 0x65, + 0x28, 0x0c, 0xcc, 0x7b, 0x23, 0xa3, 0x65, 0x22, 0xe0, 0xd2, 0x46, 0xdd, 0x9a, 0x24, 0xf7, 0x9c, + 0x4c, 0x12, 0x91, 0x2b, 0x59, 0x64, 0x20, 0x14, 0xac, 0xa3, 0x34, 0x11, 0x10, 0xc5, 0x58, 0x08, + 0x75, 0xe8, 0x9c, 0x38, 0xd3, 0x5f, 0xe1, 0xb8, 0x2d, 0x1b, 0x42, 0x21, 0x94, 0x4b, 0xc9, 0x08, + 0x5e, 0x20, 0x2e, 0x76, 0x3c, 0x3d, 0xed, 0x39, 0xa8, 0xa5, 0x66, 0xde, 0xfb, 0x70, 0xc8, 0x78, + 0x21, 0x99, 0x88, 0x1f, 0x77, 0x1b, 0x9c, 0x11, 0x57, 0xa1, 0x62, 0x69, 0xb4, 0xd2, 0xf2, 0x56, + 0xf8, 0x7f, 0xad, 0xd4, 0xbe, 0x2a, 0x37, 0x20, 0xe3, 0x26, 0x77, 0xcb, 0x60, 0x92, 0x9b, 0x52, + 0x6d, 0x4f, 0x95, 0xc0, 0x9e, 0x40, 0x6c, 0x1b, 0xfa, 0x36, 0xa1, 0x52, 0x5a, 0xd3, 0xde, 0x7b, + 0x8f, 0x4c, 0x96, 0x4c, 0xf0, 0x82, 0xf1, 0xbd, 0xd7, 0xba, 0x22, 0x7f, 0x52, 0x2b, 0xe9, 0x86, + 0xc3, 0x60, 0x4a, 0x7f, 0x5e, 0x12, 0xad, 0x51, 0x61, 0xe3, 0x74, 0xef, 0xc8, 0x5f, 0xfd, 0x64, + 0x76, 0xdb, 0xba, 0xfa, 0x20, 0x98, 0x75, 0x92, 0xf6, 0xd7, 0x17, 0x0e, 0x2a, 0x50, 0xdd, 0xee, + 0x81, 0x0c, 0xed, 0x0d, 0x6b, 0x72, 0x5f, 0x93, 0xe7, 0x5d, 0xe4, 0x2f, 0x17, 0x13, 0xfe, 0x33, + 0x30, 0x7b, 0x5c, 0x3c, 0x13, 0x2f, 0xc6, 0xac, 0x03, 0x75, 0xe3, 0xdc, 0x5f, 0xdb, 0x09, 0x8e, + 0xd5, 0x85, 0x29, 0x4a, 0xee, 0x73, 0x10, 0xfa, 0x13, 0xfa, 0x46, 0x62, 0x9b, 0x24, 0xff, 0xee, + 0xd3, 0x5e, 0xb4, 0x8e, 0xab, 0xdf, 0xda, 0x35, 0xfb, 0x0c, 0x00, 0x00, 0xff, 0xff, 0x0f, 0x65, + 0x24, 0xb5, 0x40, 0x03, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/devtools/resultstore/v2/file.pb.go b/vendor/google.golang.org/genproto/googleapis/devtools/resultstore/v2/file.pb.go new file mode 100644 index 0000000..2ee4c54 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/devtools/resultstore/v2/file.pb.go @@ -0,0 +1,298 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/devtools/resultstore/v2/file.proto + +package resultstore // import "google.golang.org/genproto/googleapis/devtools/resultstore/v2" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import wrappers "github.com/golang/protobuf/ptypes/wrappers" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// If known, the hash function used to compute this digest. +type File_HashType int32 + +const ( + // Unknown + File_HASH_TYPE_UNSPECIFIED File_HashType = 0 + // MD5 + File_MD5 File_HashType = 1 + // SHA-1 + File_SHA1 File_HashType = 2 + // SHA-256 + File_SHA256 File_HashType = 3 +) + +var File_HashType_name = map[int32]string{ + 0: "HASH_TYPE_UNSPECIFIED", + 1: "MD5", + 2: "SHA1", + 3: "SHA256", +} +var File_HashType_value = map[string]int32{ + "HASH_TYPE_UNSPECIFIED": 0, + "MD5": 1, + "SHA1": 2, + "SHA256": 3, +} + +func (x File_HashType) String() string { + return proto.EnumName(File_HashType_name, int32(x)) +} +func (File_HashType) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_file_ce5c35947c0b9efa, []int{0, 0} +} + +// The metadata for a file or an archive file entry. +type File struct { + // The identifier of the file or archive entry. + // User-provided, must be unique for the repeated field it is in. When an + // Append RPC is called with a Files field populated, if a File already exists + // with this ID, that File will be overwritten with the new File proto. + Uid string `protobuf:"bytes,1,opt,name=uid,proto3" json:"uid,omitempty"` + // The URI of a file. + // This could also be the URI of an entire archive. + // Most log data doesn't need to be stored forever, so a ttl is suggested. + // Note that if you ever move or delete the file at this URI, the link from + // the server will be broken. + Uri string `protobuf:"bytes,2,opt,name=uri,proto3" json:"uri,omitempty"` + // (Optional) The length of the file in bytes. Allows the filesize to be + // shown in the UI. Omit if file is still being written or length is + // not known. This could also be the length of an entire archive. + Length *wrappers.Int64Value `protobuf:"bytes,3,opt,name=length,proto3" json:"length,omitempty"` + // (Optional) The content-type (aka MIME-type) of the file. This is sent to + // the web browser so it knows how to handle the file. (e.g. text/plain, + // image/jpeg, text/html, etc). For zip archives, use "application/zip". + ContentType string `protobuf:"bytes,4,opt,name=content_type,json=contentType,proto3" json:"content_type,omitempty"` + // (Optional) If the above path, length, and content_type are referring to an + // archive, and you wish to refer to a particular entry within that archive, + // put the particular archive entry data here. + ArchiveEntry *ArchiveEntry `protobuf:"bytes,5,opt,name=archive_entry,json=archiveEntry,proto3" json:"archive_entry,omitempty"` + // (Optional) A url to a content display app/site for this file or archive + // entry. + ContentViewer string `protobuf:"bytes,6,opt,name=content_viewer,json=contentViewer,proto3" json:"content_viewer,omitempty"` + // (Optional) Whether to hide this file or archive entry in the UI. Defaults + // to false. A checkbox lets users see hidden files, but they're hidden by + // default. + Hidden bool `protobuf:"varint,7,opt,name=hidden,proto3" json:"hidden,omitempty"` + // (Optional) A short description of what this file or archive entry + // contains. This description should help someone viewing the list of these + // files to understand the purpose of this file and what they would want to + // view it for. + Description string `protobuf:"bytes,8,opt,name=description,proto3" json:"description,omitempty"` + // (Optional) digest of this file in hexadecimal-like string if known. + Digest string `protobuf:"bytes,9,opt,name=digest,proto3" json:"digest,omitempty"` + // (Optional) The algorithm corresponding to the digest if known. + HashType File_HashType `protobuf:"varint,10,opt,name=hash_type,json=hashType,proto3,enum=google.devtools.resultstore.v2.File_HashType" json:"hash_type,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *File) Reset() { *m = File{} } +func (m *File) String() string { return proto.CompactTextString(m) } +func (*File) ProtoMessage() {} +func (*File) Descriptor() ([]byte, []int) { + return fileDescriptor_file_ce5c35947c0b9efa, []int{0} +} +func (m *File) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_File.Unmarshal(m, b) +} +func (m *File) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_File.Marshal(b, m, deterministic) +} +func (dst *File) XXX_Merge(src proto.Message) { + xxx_messageInfo_File.Merge(dst, src) +} +func (m *File) XXX_Size() int { + return xxx_messageInfo_File.Size(m) +} +func (m *File) XXX_DiscardUnknown() { + xxx_messageInfo_File.DiscardUnknown(m) +} + +var xxx_messageInfo_File proto.InternalMessageInfo + +func (m *File) GetUid() string { + if m != nil { + return m.Uid + } + return "" +} + +func (m *File) GetUri() string { + if m != nil { + return m.Uri + } + return "" +} + +func (m *File) GetLength() *wrappers.Int64Value { + if m != nil { + return m.Length + } + return nil +} + +func (m *File) GetContentType() string { + if m != nil { + return m.ContentType + } + return "" +} + +func (m *File) GetArchiveEntry() *ArchiveEntry { + if m != nil { + return m.ArchiveEntry + } + return nil +} + +func (m *File) GetContentViewer() string { + if m != nil { + return m.ContentViewer + } + return "" +} + +func (m *File) GetHidden() bool { + if m != nil { + return m.Hidden + } + return false +} + +func (m *File) GetDescription() string { + if m != nil { + return m.Description + } + return "" +} + +func (m *File) GetDigest() string { + if m != nil { + return m.Digest + } + return "" +} + +func (m *File) GetHashType() File_HashType { + if m != nil { + return m.HashType + } + return File_HASH_TYPE_UNSPECIFIED +} + +// Information specific to an entry in an archive. +type ArchiveEntry struct { + // The relative path of the entry within the archive. + Path string `protobuf:"bytes,1,opt,name=path,proto3" json:"path,omitempty"` + // (Optional) The uncompressed length of the archive entry in bytes. Allows + // the entry size to be shown in the UI. Omit if the length is not known. + Length *wrappers.Int64Value `protobuf:"bytes,2,opt,name=length,proto3" json:"length,omitempty"` + // (Optional) The content-type (aka MIME-type) of the archive entry. (e.g. + // text/plain, image/jpeg, text/html, etc). This is sent to the web browser + // so it knows how to handle the entry. + ContentType string `protobuf:"bytes,3,opt,name=content_type,json=contentType,proto3" json:"content_type,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ArchiveEntry) Reset() { *m = ArchiveEntry{} } +func (m *ArchiveEntry) String() string { return proto.CompactTextString(m) } +func (*ArchiveEntry) ProtoMessage() {} +func (*ArchiveEntry) Descriptor() ([]byte, []int) { + return fileDescriptor_file_ce5c35947c0b9efa, []int{1} +} +func (m *ArchiveEntry) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ArchiveEntry.Unmarshal(m, b) +} +func (m *ArchiveEntry) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ArchiveEntry.Marshal(b, m, deterministic) +} +func (dst *ArchiveEntry) XXX_Merge(src proto.Message) { + xxx_messageInfo_ArchiveEntry.Merge(dst, src) +} +func (m *ArchiveEntry) XXX_Size() int { + return xxx_messageInfo_ArchiveEntry.Size(m) +} +func (m *ArchiveEntry) XXX_DiscardUnknown() { + xxx_messageInfo_ArchiveEntry.DiscardUnknown(m) +} + +var xxx_messageInfo_ArchiveEntry proto.InternalMessageInfo + +func (m *ArchiveEntry) GetPath() string { + if m != nil { + return m.Path + } + return "" +} + +func (m *ArchiveEntry) GetLength() *wrappers.Int64Value { + if m != nil { + return m.Length + } + return nil +} + +func (m *ArchiveEntry) GetContentType() string { + if m != nil { + return m.ContentType + } + return "" +} + +func init() { + proto.RegisterType((*File)(nil), "google.devtools.resultstore.v2.File") + proto.RegisterType((*ArchiveEntry)(nil), "google.devtools.resultstore.v2.ArchiveEntry") + proto.RegisterEnum("google.devtools.resultstore.v2.File_HashType", File_HashType_name, File_HashType_value) +} + +func init() { + proto.RegisterFile("google/devtools/resultstore/v2/file.proto", fileDescriptor_file_ce5c35947c0b9efa) +} + +var fileDescriptor_file_ce5c35947c0b9efa = []byte{ + // 453 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x9c, 0x53, 0x61, 0x8b, 0xd3, 0x40, + 0x10, 0x35, 0x4d, 0xed, 0xa5, 0xd3, 0xde, 0x11, 0x16, 0x94, 0xa8, 0x20, 0xb1, 0x20, 0x54, 0xd0, + 0x0d, 0xe6, 0xbc, 0xfb, 0xe2, 0xa7, 0x6a, 0x7b, 0xa4, 0x82, 0x52, 0xd3, 0xf3, 0x40, 0xbf, 0x94, + 0x5c, 0x33, 0x97, 0x2c, 0xc4, 0x6c, 0xdc, 0xdd, 0xe6, 0xa8, 0xbf, 0xd6, 0x9f, 0x22, 0xd9, 0x6c, + 0x21, 0x20, 0x7a, 0xe0, 0xb7, 0x99, 0x37, 0xef, 0xcd, 0xe4, 0xed, 0x23, 0xf0, 0x22, 0xe3, 0x3c, + 0x2b, 0x30, 0x48, 0xb1, 0x56, 0x9c, 0x17, 0x32, 0x10, 0x28, 0x77, 0x85, 0x92, 0x8a, 0x0b, 0x0c, + 0xea, 0x30, 0xb8, 0x61, 0x05, 0xd2, 0x4a, 0x70, 0xc5, 0xc9, 0xd3, 0x96, 0x4a, 0x0f, 0x54, 0xda, + 0xa1, 0xd2, 0x3a, 0x7c, 0x6c, 0xe6, 0x81, 0x66, 0x5f, 0xef, 0x6e, 0x82, 0x5b, 0x91, 0x54, 0x15, + 0x0a, 0xd9, 0xea, 0x27, 0xbf, 0x6c, 0xe8, 0x5f, 0xb0, 0x02, 0x89, 0x0b, 0xf6, 0x8e, 0xa5, 0x9e, + 0xe5, 0x5b, 0xd3, 0x61, 0xdc, 0x94, 0x1a, 0x11, 0xcc, 0xeb, 0x19, 0x44, 0x30, 0x72, 0x0a, 0x83, + 0x02, 0xcb, 0x4c, 0xe5, 0x9e, 0xed, 0x5b, 0xd3, 0x51, 0xf8, 0x84, 0x9a, 0xeb, 0x87, 0xed, 0x74, + 0x59, 0xaa, 0xf3, 0x37, 0x57, 0x49, 0xb1, 0xc3, 0xd8, 0x50, 0xc9, 0x33, 0x18, 0x6f, 0x79, 0xa9, + 0xb0, 0x54, 0x1b, 0xb5, 0xaf, 0xd0, 0xeb, 0xeb, 0x7d, 0x23, 0x83, 0x5d, 0xee, 0x2b, 0x24, 0x9f, + 0xe1, 0x38, 0x11, 0xdb, 0x9c, 0xd5, 0xb8, 0xc1, 0x52, 0x89, 0xbd, 0x77, 0x5f, 0xaf, 0x7f, 0x49, + 0xff, 0x6d, 0x8e, 0xce, 0x5a, 0xd1, 0xa2, 0xd1, 0xc4, 0xe3, 0xa4, 0xd3, 0x91, 0xe7, 0x70, 0x72, + 0xb8, 0x5a, 0x33, 0xbc, 0x45, 0xe1, 0x0d, 0xf4, 0xdd, 0x63, 0x83, 0x5e, 0x69, 0x90, 0x3c, 0x84, + 0x41, 0xce, 0xd2, 0x14, 0x4b, 0xef, 0xc8, 0xb7, 0xa6, 0x4e, 0x6c, 0x3a, 0xe2, 0xc3, 0x28, 0x45, + 0xb9, 0x15, 0xac, 0x52, 0x8c, 0x97, 0x9e, 0xd3, 0x7e, 0x73, 0x07, 0x6a, 0x94, 0x29, 0xcb, 0x50, + 0x2a, 0x6f, 0xa8, 0x87, 0xa6, 0x23, 0x1f, 0x60, 0x98, 0x27, 0x32, 0x6f, 0xbd, 0x82, 0x6f, 0x4d, + 0x4f, 0xc2, 0x57, 0x77, 0xf9, 0x68, 0x02, 0xa0, 0x51, 0x22, 0xf3, 0xe6, 0x35, 0x62, 0x27, 0x37, + 0xd5, 0x64, 0x0e, 0xce, 0x01, 0x25, 0x8f, 0xe0, 0x41, 0x34, 0x5b, 0x47, 0x9b, 0xcb, 0xaf, 0xab, + 0xc5, 0xe6, 0xcb, 0xa7, 0xf5, 0x6a, 0xf1, 0x7e, 0x79, 0xb1, 0x5c, 0xcc, 0xdd, 0x7b, 0xe4, 0x08, + 0xec, 0x8f, 0xf3, 0x33, 0xd7, 0x22, 0x0e, 0xf4, 0xd7, 0xd1, 0xec, 0xb5, 0xdb, 0x23, 0x00, 0x83, + 0x75, 0x34, 0x0b, 0xcf, 0xce, 0x5d, 0x7b, 0xf2, 0x13, 0xc6, 0xdd, 0x87, 0x22, 0x04, 0xfa, 0x55, + 0xa2, 0x72, 0x13, 0xb5, 0xae, 0x3b, 0xc9, 0xf6, 0xfe, 0x3f, 0x59, 0xfb, 0x8f, 0x64, 0xdf, 0xfd, + 0x80, 0xc9, 0x96, 0x7f, 0xbf, 0xc3, 0xff, 0xca, 0xfa, 0xb6, 0x34, 0x8c, 0x8c, 0x17, 0x49, 0x99, + 0x51, 0x2e, 0xb2, 0x20, 0xc3, 0x52, 0x1f, 0x0f, 0xda, 0x51, 0x52, 0x31, 0xf9, 0xb7, 0x1f, 0xe2, + 0x6d, 0xa7, 0xbd, 0x1e, 0x68, 0xd5, 0xe9, 0xef, 0x00, 0x00, 0x00, 0xff, 0xff, 0x08, 0xc7, 0x53, + 0x67, 0x45, 0x03, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/devtools/resultstore/v2/file_set.pb.go b/vendor/google.golang.org/genproto/googleapis/devtools/resultstore/v2/file_set.pb.go new file mode 100644 index 0000000..abe72ea --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/devtools/resultstore/v2/file_set.pb.go @@ -0,0 +1,169 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/devtools/resultstore/v2/file_set.proto + +package resultstore // import "google.golang.org/genproto/googleapis/devtools/resultstore/v2" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Represents a set of files within an Invocation. Can contain other file sets. +type FileSet struct { + // The format of this FileSet resource name must be: + // invocations/${INVOCATION_ID}/fileSets/${FILE_SET_ID} + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // The resource ID components that identify the file set. They must match the + // resource name after proper encoding. + Id *FileSet_Id `protobuf:"bytes,2,opt,name=id,proto3" json:"id,omitempty"` + // List of names of other file sets that are referenced from this one. + // Each name must point to a file set under the same invocation. The name + // format must be: invocations/${INVOCATION_ID}/fileSets/${FILE_SET_ID} + FileSets []string `protobuf:"bytes,3,rep,name=file_sets,json=fileSets,proto3" json:"file_sets,omitempty"` + // Files that are contained within this file set. + Files []*File `protobuf:"bytes,4,rep,name=files,proto3" json:"files,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *FileSet) Reset() { *m = FileSet{} } +func (m *FileSet) String() string { return proto.CompactTextString(m) } +func (*FileSet) ProtoMessage() {} +func (*FileSet) Descriptor() ([]byte, []int) { + return fileDescriptor_file_set_ffa23055473f4928, []int{0} +} +func (m *FileSet) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_FileSet.Unmarshal(m, b) +} +func (m *FileSet) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_FileSet.Marshal(b, m, deterministic) +} +func (dst *FileSet) XXX_Merge(src proto.Message) { + xxx_messageInfo_FileSet.Merge(dst, src) +} +func (m *FileSet) XXX_Size() int { + return xxx_messageInfo_FileSet.Size(m) +} +func (m *FileSet) XXX_DiscardUnknown() { + xxx_messageInfo_FileSet.DiscardUnknown(m) +} + +var xxx_messageInfo_FileSet proto.InternalMessageInfo + +func (m *FileSet) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *FileSet) GetId() *FileSet_Id { + if m != nil { + return m.Id + } + return nil +} + +func (m *FileSet) GetFileSets() []string { + if m != nil { + return m.FileSets + } + return nil +} + +func (m *FileSet) GetFiles() []*File { + if m != nil { + return m.Files + } + return nil +} + +// The resource ID components that identify the FileSet. +type FileSet_Id struct { + // The Invocation ID. + InvocationId string `protobuf:"bytes,1,opt,name=invocation_id,json=invocationId,proto3" json:"invocation_id,omitempty"` + // The FileSet ID. + FileSetId string `protobuf:"bytes,2,opt,name=file_set_id,json=fileSetId,proto3" json:"file_set_id,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *FileSet_Id) Reset() { *m = FileSet_Id{} } +func (m *FileSet_Id) String() string { return proto.CompactTextString(m) } +func (*FileSet_Id) ProtoMessage() {} +func (*FileSet_Id) Descriptor() ([]byte, []int) { + return fileDescriptor_file_set_ffa23055473f4928, []int{0, 0} +} +func (m *FileSet_Id) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_FileSet_Id.Unmarshal(m, b) +} +func (m *FileSet_Id) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_FileSet_Id.Marshal(b, m, deterministic) +} +func (dst *FileSet_Id) XXX_Merge(src proto.Message) { + xxx_messageInfo_FileSet_Id.Merge(dst, src) +} +func (m *FileSet_Id) XXX_Size() int { + return xxx_messageInfo_FileSet_Id.Size(m) +} +func (m *FileSet_Id) XXX_DiscardUnknown() { + xxx_messageInfo_FileSet_Id.DiscardUnknown(m) +} + +var xxx_messageInfo_FileSet_Id proto.InternalMessageInfo + +func (m *FileSet_Id) GetInvocationId() string { + if m != nil { + return m.InvocationId + } + return "" +} + +func (m *FileSet_Id) GetFileSetId() string { + if m != nil { + return m.FileSetId + } + return "" +} + +func init() { + proto.RegisterType((*FileSet)(nil), "google.devtools.resultstore.v2.FileSet") + proto.RegisterType((*FileSet_Id)(nil), "google.devtools.resultstore.v2.FileSet.Id") +} + +func init() { + proto.RegisterFile("google/devtools/resultstore/v2/file_set.proto", fileDescriptor_file_set_ffa23055473f4928) +} + +var fileDescriptor_file_set_ffa23055473f4928 = []byte{ + // 271 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x8c, 0x90, 0xbb, 0x4f, 0xc3, 0x30, + 0x10, 0x87, 0x95, 0xa4, 0x3c, 0x72, 0x81, 0xc5, 0x53, 0x54, 0xa4, 0x2a, 0x2a, 0x0c, 0x01, 0x09, + 0x5b, 0x0a, 0x5b, 0xd9, 0x18, 0x90, 0xbc, 0xa1, 0xb0, 0xb1, 0x44, 0xa1, 0xbe, 0x5a, 0x96, 0xdc, + 0x5c, 0x89, 0x4d, 0xfe, 0x7b, 0x24, 0x44, 0x1e, 0xa2, 0x0b, 0xb4, 0xdb, 0xd9, 0xfe, 0xee, 0x77, + 0x9f, 0x0f, 0xee, 0x35, 0x91, 0xb6, 0x28, 0x14, 0x76, 0x9e, 0xc8, 0x3a, 0xd1, 0xa2, 0xfb, 0xb4, + 0xde, 0x79, 0x6a, 0x51, 0x74, 0x85, 0xd8, 0x18, 0x8b, 0x95, 0x43, 0xcf, 0x77, 0x2d, 0x79, 0x62, + 0x8b, 0x01, 0xe7, 0x13, 0xce, 0xf7, 0x70, 0xde, 0x15, 0xf3, 0xdb, 0x23, 0xe2, 0x86, 0xa8, 0xe5, + 0x57, 0x00, 0x67, 0xcf, 0xc6, 0xe2, 0x2b, 0x7a, 0xc6, 0x60, 0xd6, 0xd4, 0x5b, 0x4c, 0x83, 0x2c, + 0xc8, 0xe3, 0xb2, 0xaf, 0xd9, 0x0a, 0x42, 0xa3, 0xd2, 0x30, 0x0b, 0xf2, 0xa4, 0xb8, 0xe3, 0xff, + 0xcf, 0xe5, 0x63, 0x10, 0x97, 0xaa, 0x0c, 0x8d, 0x62, 0x57, 0x10, 0x4f, 0xe2, 0x2e, 0x8d, 0xb2, + 0x28, 0x8f, 0xcb, 0xf3, 0xcd, 0x80, 0x38, 0xb6, 0x82, 0x93, 0x9f, 0xda, 0xa5, 0xb3, 0x2c, 0xca, + 0x93, 0xe2, 0xe6, 0x98, 0xec, 0x72, 0x68, 0x99, 0x4b, 0x08, 0xa5, 0x62, 0xd7, 0x70, 0x69, 0x9a, + 0x8e, 0xd6, 0xb5, 0x37, 0xd4, 0x54, 0x46, 0x8d, 0xde, 0x17, 0xbf, 0x97, 0x52, 0xb1, 0x05, 0x24, + 0x93, 0x43, 0x35, 0x7e, 0x24, 0x2e, 0xe3, 0xd1, 0x42, 0xaa, 0xa7, 0x0f, 0x58, 0xae, 0x69, 0x7b, + 0x60, 0xf8, 0x4b, 0xf0, 0x26, 0x47, 0x42, 0x93, 0xad, 0x1b, 0xcd, 0xa9, 0xd5, 0x42, 0x63, 0xd3, + 0xef, 0x50, 0x0c, 0x4f, 0xf5, 0xce, 0xb8, 0xbf, 0x36, 0xfe, 0xb8, 0x77, 0x7c, 0x3f, 0xed, 0xbb, + 0x1e, 0xbe, 0x03, 0x00, 0x00, 0xff, 0xff, 0xc9, 0x67, 0xbb, 0x64, 0xf5, 0x01, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/devtools/resultstore/v2/invocation.pb.go b/vendor/google.golang.org/genproto/googleapis/devtools/resultstore/v2/invocation.pb.go new file mode 100644 index 0000000..e68002b --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/devtools/resultstore/v2/invocation.pb.go @@ -0,0 +1,561 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/devtools/resultstore/v2/invocation.proto + +package resultstore // import "google.golang.org/genproto/googleapis/devtools/resultstore/v2" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// An Invocation typically represents the result of running a tool. Each has a +// unique ID, typically generated by the server. Target resources under each +// Invocation contain the bulk of the data. +type Invocation struct { + // The resource name. Its format must be: + // invocations/${INVOCATION_ID} + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // The resource ID components that identify the Invocation. They must match + // the resource name after proper encoding. + Id *Invocation_Id `protobuf:"bytes,2,opt,name=id,proto3" json:"id,omitempty"` + // The aggregate status of the invocation. + StatusAttributes *StatusAttributes `protobuf:"bytes,3,opt,name=status_attributes,json=statusAttributes,proto3" json:"status_attributes,omitempty"` + // When this invocation started and its duration. + Timing *Timing `protobuf:"bytes,4,opt,name=timing,proto3" json:"timing,omitempty"` + // Attributes of this invocation. + InvocationAttributes *InvocationAttributes `protobuf:"bytes,5,opt,name=invocation_attributes,json=invocationAttributes,proto3" json:"invocation_attributes,omitempty"` + // The workspace the tool was run in. + WorkspaceInfo *WorkspaceInfo `protobuf:"bytes,6,opt,name=workspace_info,json=workspaceInfo,proto3" json:"workspace_info,omitempty"` + // Arbitrary name-value pairs. + // This is implemented as a multi-map. Multiple properties are allowed with + // the same key. Properties will be returned in lexicographical order by key. + Properties []*Property `protobuf:"bytes,7,rep,name=properties,proto3" json:"properties,omitempty"` + // A list of file references for invocation level files. + // The file IDs must be unique within this list. Duplicate file IDs will + // result in an error. Files will be returned in lexicographical order by ID. + // Use this field to specify build logs, and other invocation level logs. + Files []*File `protobuf:"bytes,8,rep,name=files,proto3" json:"files,omitempty"` + // Summary of aggregate coverage across all Actions in this Invocation. + // the server populates this for you in the post-processing phase. + CoverageSummaries []*LanguageCoverageSummary `protobuf:"bytes,9,rep,name=coverage_summaries,json=coverageSummaries,proto3" json:"coverage_summaries,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Invocation) Reset() { *m = Invocation{} } +func (m *Invocation) String() string { return proto.CompactTextString(m) } +func (*Invocation) ProtoMessage() {} +func (*Invocation) Descriptor() ([]byte, []int) { + return fileDescriptor_invocation_3c3642aab1b671da, []int{0} +} +func (m *Invocation) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Invocation.Unmarshal(m, b) +} +func (m *Invocation) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Invocation.Marshal(b, m, deterministic) +} +func (dst *Invocation) XXX_Merge(src proto.Message) { + xxx_messageInfo_Invocation.Merge(dst, src) +} +func (m *Invocation) XXX_Size() int { + return xxx_messageInfo_Invocation.Size(m) +} +func (m *Invocation) XXX_DiscardUnknown() { + xxx_messageInfo_Invocation.DiscardUnknown(m) +} + +var xxx_messageInfo_Invocation proto.InternalMessageInfo + +func (m *Invocation) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *Invocation) GetId() *Invocation_Id { + if m != nil { + return m.Id + } + return nil +} + +func (m *Invocation) GetStatusAttributes() *StatusAttributes { + if m != nil { + return m.StatusAttributes + } + return nil +} + +func (m *Invocation) GetTiming() *Timing { + if m != nil { + return m.Timing + } + return nil +} + +func (m *Invocation) GetInvocationAttributes() *InvocationAttributes { + if m != nil { + return m.InvocationAttributes + } + return nil +} + +func (m *Invocation) GetWorkspaceInfo() *WorkspaceInfo { + if m != nil { + return m.WorkspaceInfo + } + return nil +} + +func (m *Invocation) GetProperties() []*Property { + if m != nil { + return m.Properties + } + return nil +} + +func (m *Invocation) GetFiles() []*File { + if m != nil { + return m.Files + } + return nil +} + +func (m *Invocation) GetCoverageSummaries() []*LanguageCoverageSummary { + if m != nil { + return m.CoverageSummaries + } + return nil +} + +// The resource ID components that identify the Invocation. +type Invocation_Id struct { + // The Invocation ID. + InvocationId string `protobuf:"bytes,1,opt,name=invocation_id,json=invocationId,proto3" json:"invocation_id,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Invocation_Id) Reset() { *m = Invocation_Id{} } +func (m *Invocation_Id) String() string { return proto.CompactTextString(m) } +func (*Invocation_Id) ProtoMessage() {} +func (*Invocation_Id) Descriptor() ([]byte, []int) { + return fileDescriptor_invocation_3c3642aab1b671da, []int{0, 0} +} +func (m *Invocation_Id) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Invocation_Id.Unmarshal(m, b) +} +func (m *Invocation_Id) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Invocation_Id.Marshal(b, m, deterministic) +} +func (dst *Invocation_Id) XXX_Merge(src proto.Message) { + xxx_messageInfo_Invocation_Id.Merge(dst, src) +} +func (m *Invocation_Id) XXX_Size() int { + return xxx_messageInfo_Invocation_Id.Size(m) +} +func (m *Invocation_Id) XXX_DiscardUnknown() { + xxx_messageInfo_Invocation_Id.DiscardUnknown(m) +} + +var xxx_messageInfo_Invocation_Id proto.InternalMessageInfo + +func (m *Invocation_Id) GetInvocationId() string { + if m != nil { + return m.InvocationId + } + return "" +} + +// If known, represents the state of the user/build-system workspace. +type WorkspaceContext struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *WorkspaceContext) Reset() { *m = WorkspaceContext{} } +func (m *WorkspaceContext) String() string { return proto.CompactTextString(m) } +func (*WorkspaceContext) ProtoMessage() {} +func (*WorkspaceContext) Descriptor() ([]byte, []int) { + return fileDescriptor_invocation_3c3642aab1b671da, []int{1} +} +func (m *WorkspaceContext) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_WorkspaceContext.Unmarshal(m, b) +} +func (m *WorkspaceContext) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_WorkspaceContext.Marshal(b, m, deterministic) +} +func (dst *WorkspaceContext) XXX_Merge(src proto.Message) { + xxx_messageInfo_WorkspaceContext.Merge(dst, src) +} +func (m *WorkspaceContext) XXX_Size() int { + return xxx_messageInfo_WorkspaceContext.Size(m) +} +func (m *WorkspaceContext) XXX_DiscardUnknown() { + xxx_messageInfo_WorkspaceContext.DiscardUnknown(m) +} + +var xxx_messageInfo_WorkspaceContext proto.InternalMessageInfo + +// Describes the workspace under which the tool was invoked, this includes +// information that was fed into the command, the source code referenced, and +// the tool itself. +type WorkspaceInfo struct { + // Data about the workspace that might be useful for debugging. + WorkspaceContext *WorkspaceContext `protobuf:"bytes,1,opt,name=workspace_context,json=workspaceContext,proto3" json:"workspace_context,omitempty"` + // Where the tool was invoked + Hostname string `protobuf:"bytes,3,opt,name=hostname,proto3" json:"hostname,omitempty"` + // The client's working directory where the build/test was run from. + WorkingDirectory string `protobuf:"bytes,4,opt,name=working_directory,json=workingDirectory,proto3" json:"working_directory,omitempty"` + // Tools should set tool_tag to the name of the tool or use case. + ToolTag string `protobuf:"bytes,5,opt,name=tool_tag,json=toolTag,proto3" json:"tool_tag,omitempty"` + // The command lines invoked. The first command line is the one typed by the + // user, then each one after that should be an expansion of the previous + // command line. + CommandLines []*CommandLine `protobuf:"bytes,7,rep,name=command_lines,json=commandLines,proto3" json:"command_lines,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *WorkspaceInfo) Reset() { *m = WorkspaceInfo{} } +func (m *WorkspaceInfo) String() string { return proto.CompactTextString(m) } +func (*WorkspaceInfo) ProtoMessage() {} +func (*WorkspaceInfo) Descriptor() ([]byte, []int) { + return fileDescriptor_invocation_3c3642aab1b671da, []int{2} +} +func (m *WorkspaceInfo) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_WorkspaceInfo.Unmarshal(m, b) +} +func (m *WorkspaceInfo) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_WorkspaceInfo.Marshal(b, m, deterministic) +} +func (dst *WorkspaceInfo) XXX_Merge(src proto.Message) { + xxx_messageInfo_WorkspaceInfo.Merge(dst, src) +} +func (m *WorkspaceInfo) XXX_Size() int { + return xxx_messageInfo_WorkspaceInfo.Size(m) +} +func (m *WorkspaceInfo) XXX_DiscardUnknown() { + xxx_messageInfo_WorkspaceInfo.DiscardUnknown(m) +} + +var xxx_messageInfo_WorkspaceInfo proto.InternalMessageInfo + +func (m *WorkspaceInfo) GetWorkspaceContext() *WorkspaceContext { + if m != nil { + return m.WorkspaceContext + } + return nil +} + +func (m *WorkspaceInfo) GetHostname() string { + if m != nil { + return m.Hostname + } + return "" +} + +func (m *WorkspaceInfo) GetWorkingDirectory() string { + if m != nil { + return m.WorkingDirectory + } + return "" +} + +func (m *WorkspaceInfo) GetToolTag() string { + if m != nil { + return m.ToolTag + } + return "" +} + +func (m *WorkspaceInfo) GetCommandLines() []*CommandLine { + if m != nil { + return m.CommandLines + } + return nil +} + +// The command and arguments that produced this Invocation. +type CommandLine struct { + // A label describing this command line. + Label string `protobuf:"bytes,1,opt,name=label,proto3" json:"label,omitempty"` + // The command-line tool that is run: argv[0]. + Tool string `protobuf:"bytes,2,opt,name=tool,proto3" json:"tool,omitempty"` + // The arguments to the above tool: argv[1]...argv[N]. + Args []string `protobuf:"bytes,3,rep,name=args,proto3" json:"args,omitempty"` + // The actual command that was run with the tool. (e.g. "build", or "test") + // Omit if the tool doesn't accept a command. + // This is a duplicate of one of the fields in args. + Command string `protobuf:"bytes,4,opt,name=command,proto3" json:"command,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *CommandLine) Reset() { *m = CommandLine{} } +func (m *CommandLine) String() string { return proto.CompactTextString(m) } +func (*CommandLine) ProtoMessage() {} +func (*CommandLine) Descriptor() ([]byte, []int) { + return fileDescriptor_invocation_3c3642aab1b671da, []int{3} +} +func (m *CommandLine) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_CommandLine.Unmarshal(m, b) +} +func (m *CommandLine) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_CommandLine.Marshal(b, m, deterministic) +} +func (dst *CommandLine) XXX_Merge(src proto.Message) { + xxx_messageInfo_CommandLine.Merge(dst, src) +} +func (m *CommandLine) XXX_Size() int { + return xxx_messageInfo_CommandLine.Size(m) +} +func (m *CommandLine) XXX_DiscardUnknown() { + xxx_messageInfo_CommandLine.DiscardUnknown(m) +} + +var xxx_messageInfo_CommandLine proto.InternalMessageInfo + +func (m *CommandLine) GetLabel() string { + if m != nil { + return m.Label + } + return "" +} + +func (m *CommandLine) GetTool() string { + if m != nil { + return m.Tool + } + return "" +} + +func (m *CommandLine) GetArgs() []string { + if m != nil { + return m.Args + } + return nil +} + +func (m *CommandLine) GetCommand() string { + if m != nil { + return m.Command + } + return "" +} + +// Attributes that apply to all invocations. +type InvocationAttributes struct { + // The project ID this invocation is associated with. This must be + // set in the CreateInvocation call, and can't be changed. + ProjectId string `protobuf:"bytes,1,opt,name=project_id,json=projectId,proto3" json:"project_id,omitempty"` + // The list of users in the command chain. The first user in this sequence + // is the one who instigated the first command in the chain. + Users []string `protobuf:"bytes,2,rep,name=users,proto3" json:"users,omitempty"` + // Labels to categorize this invocation. + // This is implemented as a set. All labels will be unique. Any duplicate + // labels added will be ignored. Labels will be returned in lexicographical + // order. Labels should be short, easy to read, and you + // shouldn't have more than a handful. + // Labels should match regex \w([- \w]*\w)? + // Labels should not be used for unique properties such as unique IDs. + // Use properties in cases that don't meet these conditions. + Labels []string `protobuf:"bytes,3,rep,name=labels,proto3" json:"labels,omitempty"` + // This field describes the overall context or purpose of this invocation. + // It will be used in the UI to give users more information about + // how or why this invocation was run. + Description string `protobuf:"bytes,4,opt,name=description,proto3" json:"description,omitempty"` + // If this Invocation was run in the context of a larger Continuous + // Integration build or other automated system, this field may contain more + // information about the greater context. + InvocationContexts []*InvocationContext `protobuf:"bytes,6,rep,name=invocation_contexts,json=invocationContexts,proto3" json:"invocation_contexts,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *InvocationAttributes) Reset() { *m = InvocationAttributes{} } +func (m *InvocationAttributes) String() string { return proto.CompactTextString(m) } +func (*InvocationAttributes) ProtoMessage() {} +func (*InvocationAttributes) Descriptor() ([]byte, []int) { + return fileDescriptor_invocation_3c3642aab1b671da, []int{4} +} +func (m *InvocationAttributes) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_InvocationAttributes.Unmarshal(m, b) +} +func (m *InvocationAttributes) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_InvocationAttributes.Marshal(b, m, deterministic) +} +func (dst *InvocationAttributes) XXX_Merge(src proto.Message) { + xxx_messageInfo_InvocationAttributes.Merge(dst, src) +} +func (m *InvocationAttributes) XXX_Size() int { + return xxx_messageInfo_InvocationAttributes.Size(m) +} +func (m *InvocationAttributes) XXX_DiscardUnknown() { + xxx_messageInfo_InvocationAttributes.DiscardUnknown(m) +} + +var xxx_messageInfo_InvocationAttributes proto.InternalMessageInfo + +func (m *InvocationAttributes) GetProjectId() string { + if m != nil { + return m.ProjectId + } + return "" +} + +func (m *InvocationAttributes) GetUsers() []string { + if m != nil { + return m.Users + } + return nil +} + +func (m *InvocationAttributes) GetLabels() []string { + if m != nil { + return m.Labels + } + return nil +} + +func (m *InvocationAttributes) GetDescription() string { + if m != nil { + return m.Description + } + return "" +} + +func (m *InvocationAttributes) GetInvocationContexts() []*InvocationContext { + if m != nil { + return m.InvocationContexts + } + return nil +} + +// Describes the invocation context which includes a display name and URL. +type InvocationContext struct { + // A human readable name for the context under which this Invocation was run. + DisplayName string `protobuf:"bytes,1,opt,name=display_name,json=displayName,proto3" json:"display_name,omitempty"` + // A URL pointing to a UI containing more information + Url string `protobuf:"bytes,2,opt,name=url,proto3" json:"url,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *InvocationContext) Reset() { *m = InvocationContext{} } +func (m *InvocationContext) String() string { return proto.CompactTextString(m) } +func (*InvocationContext) ProtoMessage() {} +func (*InvocationContext) Descriptor() ([]byte, []int) { + return fileDescriptor_invocation_3c3642aab1b671da, []int{5} +} +func (m *InvocationContext) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_InvocationContext.Unmarshal(m, b) +} +func (m *InvocationContext) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_InvocationContext.Marshal(b, m, deterministic) +} +func (dst *InvocationContext) XXX_Merge(src proto.Message) { + xxx_messageInfo_InvocationContext.Merge(dst, src) +} +func (m *InvocationContext) XXX_Size() int { + return xxx_messageInfo_InvocationContext.Size(m) +} +func (m *InvocationContext) XXX_DiscardUnknown() { + xxx_messageInfo_InvocationContext.DiscardUnknown(m) +} + +var xxx_messageInfo_InvocationContext proto.InternalMessageInfo + +func (m *InvocationContext) GetDisplayName() string { + if m != nil { + return m.DisplayName + } + return "" +} + +func (m *InvocationContext) GetUrl() string { + if m != nil { + return m.Url + } + return "" +} + +func init() { + proto.RegisterType((*Invocation)(nil), "google.devtools.resultstore.v2.Invocation") + proto.RegisterType((*Invocation_Id)(nil), "google.devtools.resultstore.v2.Invocation.Id") + proto.RegisterType((*WorkspaceContext)(nil), "google.devtools.resultstore.v2.WorkspaceContext") + proto.RegisterType((*WorkspaceInfo)(nil), "google.devtools.resultstore.v2.WorkspaceInfo") + proto.RegisterType((*CommandLine)(nil), "google.devtools.resultstore.v2.CommandLine") + proto.RegisterType((*InvocationAttributes)(nil), "google.devtools.resultstore.v2.InvocationAttributes") + proto.RegisterType((*InvocationContext)(nil), "google.devtools.resultstore.v2.InvocationContext") +} + +func init() { + proto.RegisterFile("google/devtools/resultstore/v2/invocation.proto", fileDescriptor_invocation_3c3642aab1b671da) +} + +var fileDescriptor_invocation_3c3642aab1b671da = []byte{ + // 697 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x8c, 0x55, 0x71, 0x6b, 0x13, 0x31, + 0x14, 0xa7, 0xed, 0xda, 0xad, 0xaf, 0xab, 0x74, 0x71, 0xca, 0x59, 0x50, 0x6a, 0x15, 0xe9, 0x18, + 0x5e, 0xb5, 0x2a, 0x82, 0xa2, 0xa0, 0x13, 0x59, 0x61, 0xc8, 0xb8, 0x0d, 0x04, 0x41, 0x4a, 0x7a, + 0x97, 0xc6, 0xe8, 0x35, 0xa9, 0x49, 0xae, 0xb3, 0xdf, 0xc7, 0x0f, 0xe5, 0x57, 0xf0, 0x5b, 0x48, + 0x72, 0xb9, 0xf6, 0x56, 0xa7, 0xd7, 0xff, 0xf2, 0x7e, 0xf7, 0x7e, 0xbf, 0xbc, 0xbc, 0xfc, 0xf2, + 0x0e, 0xfa, 0x54, 0x08, 0x1a, 0x93, 0x7e, 0x44, 0xe6, 0x5a, 0x88, 0x58, 0xf5, 0x25, 0x51, 0x49, + 0xac, 0x95, 0x16, 0x92, 0xf4, 0xe7, 0x83, 0x3e, 0xe3, 0x73, 0x11, 0x62, 0xcd, 0x04, 0xf7, 0x67, + 0x52, 0x68, 0x81, 0xee, 0xa4, 0x04, 0x3f, 0x23, 0xf8, 0x39, 0x82, 0x3f, 0x1f, 0xb4, 0x0f, 0x0b, + 0x04, 0x43, 0x31, 0x9d, 0x66, 0x62, 0xed, 0x67, 0x85, 0xc9, 0x73, 0x22, 0x31, 0x25, 0x23, 0x95, + 0x4c, 0xa7, 0x58, 0x2e, 0x1c, 0xed, 0xa0, 0x80, 0x36, 0x61, 0x31, 0x49, 0x53, 0xbb, 0xbf, 0xaa, + 0x00, 0xc3, 0xe5, 0x19, 0x10, 0x82, 0x2d, 0x8e, 0xa7, 0xc4, 0x2b, 0x75, 0x4a, 0xbd, 0x7a, 0x60, + 0xd7, 0xe8, 0x15, 0x94, 0x59, 0xe4, 0x95, 0x3b, 0xa5, 0x5e, 0x63, 0xf0, 0xd0, 0xff, 0xff, 0xf1, + 0xfc, 0x95, 0x96, 0x3f, 0x8c, 0x82, 0x32, 0x8b, 0xd0, 0x67, 0xd8, 0x53, 0x1a, 0xeb, 0x44, 0x8d, + 0xb0, 0xd6, 0x92, 0x8d, 0x13, 0x4d, 0x94, 0x57, 0xb1, 0x6a, 0x8f, 0x8a, 0xd4, 0xce, 0x2c, 0xf1, + 0xcd, 0x92, 0x17, 0xb4, 0xd4, 0x1a, 0x82, 0x5e, 0x43, 0x4d, 0xb3, 0x29, 0xe3, 0xd4, 0xdb, 0xb2, + 0x9a, 0x0f, 0x8a, 0x34, 0xcf, 0x6d, 0x76, 0xe0, 0x58, 0x88, 0xc1, 0x8d, 0xd5, 0x1d, 0xe6, 0x4b, + 0xac, 0x5a, 0xb9, 0xa7, 0x9b, 0x1f, 0x38, 0x57, 0xe6, 0x3e, 0xbb, 0x02, 0x45, 0xe7, 0x70, 0xed, + 0x42, 0xc8, 0x6f, 0x6a, 0x86, 0x43, 0x32, 0x62, 0x7c, 0x22, 0xbc, 0xda, 0x66, 0x4d, 0xfd, 0x98, + 0xb1, 0x86, 0x7c, 0x22, 0x82, 0xe6, 0x45, 0x3e, 0x44, 0xc7, 0x00, 0x33, 0x29, 0x66, 0x44, 0x6a, + 0x46, 0x94, 0xb7, 0xdd, 0xa9, 0xf4, 0x1a, 0x83, 0x5e, 0x91, 0xe2, 0x69, 0xca, 0x58, 0x04, 0x39, + 0x2e, 0x7a, 0x01, 0x55, 0xe3, 0x0c, 0xe5, 0xed, 0x58, 0x91, 0xfb, 0x45, 0x22, 0xef, 0x59, 0x4c, + 0x82, 0x94, 0x82, 0x26, 0x80, 0xd6, 0xcc, 0x68, 0xaa, 0xa9, 0x5b, 0xa1, 0xe7, 0x45, 0x42, 0x27, + 0x98, 0xd3, 0x04, 0x53, 0x72, 0xe4, 0x14, 0xce, 0x52, 0x37, 0x07, 0x7b, 0xe1, 0x25, 0x80, 0x11, + 0xd5, 0x3e, 0x80, 0xf2, 0x30, 0x42, 0xf7, 0xa0, 0x99, 0xbb, 0x34, 0x16, 0x39, 0xbf, 0xee, 0xae, + 0xc0, 0x61, 0xd4, 0x45, 0xd0, 0x5a, 0x36, 0xee, 0x48, 0x70, 0x4d, 0x7e, 0xe8, 0xee, 0xcf, 0x32, + 0x34, 0x2f, 0x75, 0xd3, 0xd8, 0x73, 0x75, 0x29, 0x61, 0x9a, 0x66, 0xe5, 0x36, 0xb0, 0xe7, 0xba, + 0x7c, 0xd0, 0xba, 0x58, 0x43, 0x50, 0x1b, 0x76, 0xbe, 0x08, 0xa5, 0xed, 0xa3, 0xaa, 0xd8, 0x22, + 0x97, 0x31, 0x3a, 0x4c, 0xb7, 0x66, 0x9c, 0x8e, 0x22, 0x26, 0x49, 0xa8, 0x85, 0x5c, 0x58, 0x17, + 0xd7, 0x53, 0x21, 0xc6, 0xe9, 0xbb, 0x0c, 0x47, 0xb7, 0x60, 0xc7, 0xd4, 0x30, 0xd2, 0x98, 0x5a, + 0x6b, 0xd6, 0x83, 0x6d, 0x13, 0x9f, 0x63, 0x8a, 0x4e, 0xa1, 0x69, 0xa6, 0x06, 0xe6, 0xd1, 0x28, + 0x66, 0x7c, 0x69, 0x82, 0xc3, 0xa2, 0xf2, 0x8f, 0x52, 0xd2, 0x09, 0xe3, 0x24, 0xd8, 0x0d, 0x57, + 0x81, 0xea, 0x12, 0x68, 0xe4, 0x3e, 0xa2, 0x7d, 0xa8, 0xc6, 0x78, 0x4c, 0x62, 0xd7, 0xe6, 0x34, + 0x30, 0xb3, 0xc2, 0xc8, 0xda, 0xc9, 0x50, 0x0f, 0xec, 0xda, 0x60, 0x58, 0x52, 0xf3, 0xbe, 0x2b, + 0x06, 0x33, 0x6b, 0xe4, 0xc1, 0xb6, 0x13, 0x77, 0x87, 0xcb, 0xc2, 0xee, 0xef, 0x12, 0xec, 0x5f, + 0xf5, 0x7e, 0xd0, 0x6d, 0xeb, 0xe9, 0xaf, 0x24, 0xd4, 0xab, 0xcb, 0xad, 0x3b, 0x64, 0x18, 0x99, + 0x7a, 0x12, 0x45, 0xa4, 0xf2, 0xca, 0x76, 0x9b, 0x34, 0x40, 0x37, 0xa1, 0x66, 0x0b, 0xcb, 0x76, + 0x77, 0x11, 0xea, 0x40, 0x23, 0x22, 0x2a, 0x94, 0x6c, 0x66, 0x76, 0x71, 0x35, 0xe4, 0x21, 0x34, + 0x86, 0xeb, 0x39, 0x3b, 0x39, 0x13, 0x28, 0xaf, 0x66, 0xdb, 0xf8, 0x78, 0xf3, 0x09, 0x90, 0xd9, + 0x00, 0xb1, 0x75, 0x48, 0x75, 0x8f, 0x61, 0xef, 0xaf, 0x44, 0x74, 0x17, 0x76, 0x23, 0xa6, 0x66, + 0x31, 0x5e, 0x8c, 0x72, 0x63, 0xb7, 0xe1, 0xb0, 0x0f, 0xc6, 0x24, 0x2d, 0xa8, 0x24, 0x32, 0x6b, + 0xb2, 0x59, 0xbe, 0xfd, 0x0e, 0xdd, 0x50, 0x4c, 0x0b, 0xaa, 0x3a, 0x2d, 0x7d, 0x1a, 0xba, 0x0c, + 0x2a, 0x62, 0xcc, 0xa9, 0x2f, 0x24, 0xed, 0x53, 0xc2, 0xed, 0xd8, 0x77, 0x7f, 0x35, 0x3c, 0x63, + 0xea, 0x5f, 0x3f, 0x89, 0x97, 0xb9, 0x70, 0x5c, 0xb3, 0xac, 0x27, 0x7f, 0x02, 0x00, 0x00, 0xff, + 0xff, 0xf0, 0x9c, 0x60, 0x5f, 0x0e, 0x07, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/devtools/resultstore/v2/resultstore_download.pb.go b/vendor/google.golang.org/genproto/googleapis/devtools/resultstore/v2/resultstore_download.pb.go new file mode 100644 index 0000000..c431d7c --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/devtools/resultstore/v2/resultstore_download.pb.go @@ -0,0 +1,2240 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/devtools/resultstore/v2/resultstore_download.proto + +package resultstore // import "google.golang.org/genproto/googleapis/devtools/resultstore/v2" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +import ( + context "golang.org/x/net/context" + grpc "google.golang.org/grpc" +) + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Request passed into GetInvocation +type GetInvocationRequest struct { + // The name of the invocation to retrieve. It must match this format: + // invocations/${INVOCATION_ID} + // where INVOCATION_ID must be an RFC 4122-compliant random UUID. + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GetInvocationRequest) Reset() { *m = GetInvocationRequest{} } +func (m *GetInvocationRequest) String() string { return proto.CompactTextString(m) } +func (*GetInvocationRequest) ProtoMessage() {} +func (*GetInvocationRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_resultstore_download_3481b489ba35ace6, []int{0} +} +func (m *GetInvocationRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GetInvocationRequest.Unmarshal(m, b) +} +func (m *GetInvocationRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GetInvocationRequest.Marshal(b, m, deterministic) +} +func (dst *GetInvocationRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_GetInvocationRequest.Merge(dst, src) +} +func (m *GetInvocationRequest) XXX_Size() int { + return xxx_messageInfo_GetInvocationRequest.Size(m) +} +func (m *GetInvocationRequest) XXX_DiscardUnknown() { + xxx_messageInfo_GetInvocationRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_GetInvocationRequest proto.InternalMessageInfo + +func (m *GetInvocationRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +// Request passed into SearchInvocations +type SearchInvocationsRequest struct { + // The maximum number of items to return. Zero means all, but may be capped by + // the server. + PageSize int32 `protobuf:"varint,1,opt,name=page_size,json=pageSize,proto3" json:"page_size,omitempty"` + // Options for pagination. + // + // Types that are valid to be assigned to PageStart: + // *SearchInvocationsRequest_PageToken + // *SearchInvocationsRequest_Offset + PageStart isSearchInvocationsRequest_PageStart `protobuf_oneof:"page_start"` + // A filtering query string. + Query string `protobuf:"bytes,4,opt,name=query,proto3" json:"query,omitempty"` + // The project id to search under. + ProjectId string `protobuf:"bytes,5,opt,name=project_id,json=projectId,proto3" json:"project_id,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *SearchInvocationsRequest) Reset() { *m = SearchInvocationsRequest{} } +func (m *SearchInvocationsRequest) String() string { return proto.CompactTextString(m) } +func (*SearchInvocationsRequest) ProtoMessage() {} +func (*SearchInvocationsRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_resultstore_download_3481b489ba35ace6, []int{1} +} +func (m *SearchInvocationsRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_SearchInvocationsRequest.Unmarshal(m, b) +} +func (m *SearchInvocationsRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_SearchInvocationsRequest.Marshal(b, m, deterministic) +} +func (dst *SearchInvocationsRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_SearchInvocationsRequest.Merge(dst, src) +} +func (m *SearchInvocationsRequest) XXX_Size() int { + return xxx_messageInfo_SearchInvocationsRequest.Size(m) +} +func (m *SearchInvocationsRequest) XXX_DiscardUnknown() { + xxx_messageInfo_SearchInvocationsRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_SearchInvocationsRequest proto.InternalMessageInfo + +func (m *SearchInvocationsRequest) GetPageSize() int32 { + if m != nil { + return m.PageSize + } + return 0 +} + +type isSearchInvocationsRequest_PageStart interface { + isSearchInvocationsRequest_PageStart() +} + +type SearchInvocationsRequest_PageToken struct { + PageToken string `protobuf:"bytes,2,opt,name=page_token,json=pageToken,proto3,oneof"` +} + +type SearchInvocationsRequest_Offset struct { + Offset int64 `protobuf:"varint,3,opt,name=offset,proto3,oneof"` +} + +func (*SearchInvocationsRequest_PageToken) isSearchInvocationsRequest_PageStart() {} + +func (*SearchInvocationsRequest_Offset) isSearchInvocationsRequest_PageStart() {} + +func (m *SearchInvocationsRequest) GetPageStart() isSearchInvocationsRequest_PageStart { + if m != nil { + return m.PageStart + } + return nil +} + +func (m *SearchInvocationsRequest) GetPageToken() string { + if x, ok := m.GetPageStart().(*SearchInvocationsRequest_PageToken); ok { + return x.PageToken + } + return "" +} + +func (m *SearchInvocationsRequest) GetOffset() int64 { + if x, ok := m.GetPageStart().(*SearchInvocationsRequest_Offset); ok { + return x.Offset + } + return 0 +} + +func (m *SearchInvocationsRequest) GetQuery() string { + if m != nil { + return m.Query + } + return "" +} + +func (m *SearchInvocationsRequest) GetProjectId() string { + if m != nil { + return m.ProjectId + } + return "" +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*SearchInvocationsRequest) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _SearchInvocationsRequest_OneofMarshaler, _SearchInvocationsRequest_OneofUnmarshaler, _SearchInvocationsRequest_OneofSizer, []interface{}{ + (*SearchInvocationsRequest_PageToken)(nil), + (*SearchInvocationsRequest_Offset)(nil), + } +} + +func _SearchInvocationsRequest_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*SearchInvocationsRequest) + // page_start + switch x := m.PageStart.(type) { + case *SearchInvocationsRequest_PageToken: + b.EncodeVarint(2<<3 | proto.WireBytes) + b.EncodeStringBytes(x.PageToken) + case *SearchInvocationsRequest_Offset: + b.EncodeVarint(3<<3 | proto.WireVarint) + b.EncodeVarint(uint64(x.Offset)) + case nil: + default: + return fmt.Errorf("SearchInvocationsRequest.PageStart has unexpected type %T", x) + } + return nil +} + +func _SearchInvocationsRequest_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*SearchInvocationsRequest) + switch tag { + case 2: // page_start.page_token + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeStringBytes() + m.PageStart = &SearchInvocationsRequest_PageToken{x} + return true, err + case 3: // page_start.offset + if wire != proto.WireVarint { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeVarint() + m.PageStart = &SearchInvocationsRequest_Offset{int64(x)} + return true, err + default: + return false, nil + } +} + +func _SearchInvocationsRequest_OneofSizer(msg proto.Message) (n int) { + m := msg.(*SearchInvocationsRequest) + // page_start + switch x := m.PageStart.(type) { + case *SearchInvocationsRequest_PageToken: + n += 1 // tag and wire + n += proto.SizeVarint(uint64(len(x.PageToken))) + n += len(x.PageToken) + case *SearchInvocationsRequest_Offset: + n += 1 // tag and wire + n += proto.SizeVarint(uint64(x.Offset)) + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +// Response from calling SearchInvocations +type SearchInvocationsResponse struct { + // Invocations matching the search, possibly capped at request.page_size or a + // server limit. + Invocations []*Invocation `protobuf:"bytes,1,rep,name=invocations,proto3" json:"invocations,omitempty"` + // Token to retrieve the next page of results, or empty if there are no + // more results. + NextPageToken string `protobuf:"bytes,2,opt,name=next_page_token,json=nextPageToken,proto3" json:"next_page_token,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *SearchInvocationsResponse) Reset() { *m = SearchInvocationsResponse{} } +func (m *SearchInvocationsResponse) String() string { return proto.CompactTextString(m) } +func (*SearchInvocationsResponse) ProtoMessage() {} +func (*SearchInvocationsResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_resultstore_download_3481b489ba35ace6, []int{2} +} +func (m *SearchInvocationsResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_SearchInvocationsResponse.Unmarshal(m, b) +} +func (m *SearchInvocationsResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_SearchInvocationsResponse.Marshal(b, m, deterministic) +} +func (dst *SearchInvocationsResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_SearchInvocationsResponse.Merge(dst, src) +} +func (m *SearchInvocationsResponse) XXX_Size() int { + return xxx_messageInfo_SearchInvocationsResponse.Size(m) +} +func (m *SearchInvocationsResponse) XXX_DiscardUnknown() { + xxx_messageInfo_SearchInvocationsResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_SearchInvocationsResponse proto.InternalMessageInfo + +func (m *SearchInvocationsResponse) GetInvocations() []*Invocation { + if m != nil { + return m.Invocations + } + return nil +} + +func (m *SearchInvocationsResponse) GetNextPageToken() string { + if m != nil { + return m.NextPageToken + } + return "" +} + +// Request passed into GetConfiguration +type GetConfigurationRequest struct { + // The name of the configuration to retrieve. It must match this format: + // invocations/${INVOCATION_ID}/configs/${CONFIGURATION_ID} + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GetConfigurationRequest) Reset() { *m = GetConfigurationRequest{} } +func (m *GetConfigurationRequest) String() string { return proto.CompactTextString(m) } +func (*GetConfigurationRequest) ProtoMessage() {} +func (*GetConfigurationRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_resultstore_download_3481b489ba35ace6, []int{3} +} +func (m *GetConfigurationRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GetConfigurationRequest.Unmarshal(m, b) +} +func (m *GetConfigurationRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GetConfigurationRequest.Marshal(b, m, deterministic) +} +func (dst *GetConfigurationRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_GetConfigurationRequest.Merge(dst, src) +} +func (m *GetConfigurationRequest) XXX_Size() int { + return xxx_messageInfo_GetConfigurationRequest.Size(m) +} +func (m *GetConfigurationRequest) XXX_DiscardUnknown() { + xxx_messageInfo_GetConfigurationRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_GetConfigurationRequest proto.InternalMessageInfo + +func (m *GetConfigurationRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +// Request passed into ListConfigurations +type ListConfigurationsRequest struct { + // The invocation name of the configurations to retrieve. + // It must match this format: invocations/${INVOCATION_ID} + Parent string `protobuf:"bytes,1,opt,name=parent,proto3" json:"parent,omitempty"` + // The maximum number of items to return. + // Zero means all, but may be capped by the server. + PageSize int32 `protobuf:"varint,2,opt,name=page_size,json=pageSize,proto3" json:"page_size,omitempty"` + // Options for pagination. + // + // Types that are valid to be assigned to PageStart: + // *ListConfigurationsRequest_PageToken + // *ListConfigurationsRequest_Offset + PageStart isListConfigurationsRequest_PageStart `protobuf_oneof:"page_start"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ListConfigurationsRequest) Reset() { *m = ListConfigurationsRequest{} } +func (m *ListConfigurationsRequest) String() string { return proto.CompactTextString(m) } +func (*ListConfigurationsRequest) ProtoMessage() {} +func (*ListConfigurationsRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_resultstore_download_3481b489ba35ace6, []int{4} +} +func (m *ListConfigurationsRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ListConfigurationsRequest.Unmarshal(m, b) +} +func (m *ListConfigurationsRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ListConfigurationsRequest.Marshal(b, m, deterministic) +} +func (dst *ListConfigurationsRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_ListConfigurationsRequest.Merge(dst, src) +} +func (m *ListConfigurationsRequest) XXX_Size() int { + return xxx_messageInfo_ListConfigurationsRequest.Size(m) +} +func (m *ListConfigurationsRequest) XXX_DiscardUnknown() { + xxx_messageInfo_ListConfigurationsRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_ListConfigurationsRequest proto.InternalMessageInfo + +func (m *ListConfigurationsRequest) GetParent() string { + if m != nil { + return m.Parent + } + return "" +} + +func (m *ListConfigurationsRequest) GetPageSize() int32 { + if m != nil { + return m.PageSize + } + return 0 +} + +type isListConfigurationsRequest_PageStart interface { + isListConfigurationsRequest_PageStart() +} + +type ListConfigurationsRequest_PageToken struct { + PageToken string `protobuf:"bytes,3,opt,name=page_token,json=pageToken,proto3,oneof"` +} + +type ListConfigurationsRequest_Offset struct { + Offset int64 `protobuf:"varint,4,opt,name=offset,proto3,oneof"` +} + +func (*ListConfigurationsRequest_PageToken) isListConfigurationsRequest_PageStart() {} + +func (*ListConfigurationsRequest_Offset) isListConfigurationsRequest_PageStart() {} + +func (m *ListConfigurationsRequest) GetPageStart() isListConfigurationsRequest_PageStart { + if m != nil { + return m.PageStart + } + return nil +} + +func (m *ListConfigurationsRequest) GetPageToken() string { + if x, ok := m.GetPageStart().(*ListConfigurationsRequest_PageToken); ok { + return x.PageToken + } + return "" +} + +func (m *ListConfigurationsRequest) GetOffset() int64 { + if x, ok := m.GetPageStart().(*ListConfigurationsRequest_Offset); ok { + return x.Offset + } + return 0 +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*ListConfigurationsRequest) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _ListConfigurationsRequest_OneofMarshaler, _ListConfigurationsRequest_OneofUnmarshaler, _ListConfigurationsRequest_OneofSizer, []interface{}{ + (*ListConfigurationsRequest_PageToken)(nil), + (*ListConfigurationsRequest_Offset)(nil), + } +} + +func _ListConfigurationsRequest_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*ListConfigurationsRequest) + // page_start + switch x := m.PageStart.(type) { + case *ListConfigurationsRequest_PageToken: + b.EncodeVarint(3<<3 | proto.WireBytes) + b.EncodeStringBytes(x.PageToken) + case *ListConfigurationsRequest_Offset: + b.EncodeVarint(4<<3 | proto.WireVarint) + b.EncodeVarint(uint64(x.Offset)) + case nil: + default: + return fmt.Errorf("ListConfigurationsRequest.PageStart has unexpected type %T", x) + } + return nil +} + +func _ListConfigurationsRequest_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*ListConfigurationsRequest) + switch tag { + case 3: // page_start.page_token + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeStringBytes() + m.PageStart = &ListConfigurationsRequest_PageToken{x} + return true, err + case 4: // page_start.offset + if wire != proto.WireVarint { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeVarint() + m.PageStart = &ListConfigurationsRequest_Offset{int64(x)} + return true, err + default: + return false, nil + } +} + +func _ListConfigurationsRequest_OneofSizer(msg proto.Message) (n int) { + m := msg.(*ListConfigurationsRequest) + // page_start + switch x := m.PageStart.(type) { + case *ListConfigurationsRequest_PageToken: + n += 1 // tag and wire + n += proto.SizeVarint(uint64(len(x.PageToken))) + n += len(x.PageToken) + case *ListConfigurationsRequest_Offset: + n += 1 // tag and wire + n += proto.SizeVarint(uint64(x.Offset)) + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +// Response from calling ListConfigurations +type ListConfigurationsResponse struct { + // Configurations matching the request invocation, + // possibly capped at request.page_size or a server limit. + Configurations []*Configuration `protobuf:"bytes,1,rep,name=configurations,proto3" json:"configurations,omitempty"` + // Token to retrieve the next page of results, or empty if there are no + // more results in the list. + NextPageToken string `protobuf:"bytes,2,opt,name=next_page_token,json=nextPageToken,proto3" json:"next_page_token,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ListConfigurationsResponse) Reset() { *m = ListConfigurationsResponse{} } +func (m *ListConfigurationsResponse) String() string { return proto.CompactTextString(m) } +func (*ListConfigurationsResponse) ProtoMessage() {} +func (*ListConfigurationsResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_resultstore_download_3481b489ba35ace6, []int{5} +} +func (m *ListConfigurationsResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ListConfigurationsResponse.Unmarshal(m, b) +} +func (m *ListConfigurationsResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ListConfigurationsResponse.Marshal(b, m, deterministic) +} +func (dst *ListConfigurationsResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_ListConfigurationsResponse.Merge(dst, src) +} +func (m *ListConfigurationsResponse) XXX_Size() int { + return xxx_messageInfo_ListConfigurationsResponse.Size(m) +} +func (m *ListConfigurationsResponse) XXX_DiscardUnknown() { + xxx_messageInfo_ListConfigurationsResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_ListConfigurationsResponse proto.InternalMessageInfo + +func (m *ListConfigurationsResponse) GetConfigurations() []*Configuration { + if m != nil { + return m.Configurations + } + return nil +} + +func (m *ListConfigurationsResponse) GetNextPageToken() string { + if m != nil { + return m.NextPageToken + } + return "" +} + +// Request passed into GetTarget +type GetTargetRequest struct { + // The name of the target to retrieve. It must match this format: + // invocations/${INVOCATION_ID}/targets/${TARGET_ID} + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GetTargetRequest) Reset() { *m = GetTargetRequest{} } +func (m *GetTargetRequest) String() string { return proto.CompactTextString(m) } +func (*GetTargetRequest) ProtoMessage() {} +func (*GetTargetRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_resultstore_download_3481b489ba35ace6, []int{6} +} +func (m *GetTargetRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GetTargetRequest.Unmarshal(m, b) +} +func (m *GetTargetRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GetTargetRequest.Marshal(b, m, deterministic) +} +func (dst *GetTargetRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_GetTargetRequest.Merge(dst, src) +} +func (m *GetTargetRequest) XXX_Size() int { + return xxx_messageInfo_GetTargetRequest.Size(m) +} +func (m *GetTargetRequest) XXX_DiscardUnknown() { + xxx_messageInfo_GetTargetRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_GetTargetRequest proto.InternalMessageInfo + +func (m *GetTargetRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +// Request passed into ListTargets +type ListTargetsRequest struct { + // The invocation name of the targets to retrieve. It must match this format: + // invocations/${INVOCATION_ID} + Parent string `protobuf:"bytes,1,opt,name=parent,proto3" json:"parent,omitempty"` + // The maximum number of items to return. + // Zero means all, but may be capped by the server. + PageSize int32 `protobuf:"varint,2,opt,name=page_size,json=pageSize,proto3" json:"page_size,omitempty"` + // Options for pagination. + // + // Types that are valid to be assigned to PageStart: + // *ListTargetsRequest_PageToken + // *ListTargetsRequest_Offset + PageStart isListTargetsRequest_PageStart `protobuf_oneof:"page_start"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ListTargetsRequest) Reset() { *m = ListTargetsRequest{} } +func (m *ListTargetsRequest) String() string { return proto.CompactTextString(m) } +func (*ListTargetsRequest) ProtoMessage() {} +func (*ListTargetsRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_resultstore_download_3481b489ba35ace6, []int{7} +} +func (m *ListTargetsRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ListTargetsRequest.Unmarshal(m, b) +} +func (m *ListTargetsRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ListTargetsRequest.Marshal(b, m, deterministic) +} +func (dst *ListTargetsRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_ListTargetsRequest.Merge(dst, src) +} +func (m *ListTargetsRequest) XXX_Size() int { + return xxx_messageInfo_ListTargetsRequest.Size(m) +} +func (m *ListTargetsRequest) XXX_DiscardUnknown() { + xxx_messageInfo_ListTargetsRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_ListTargetsRequest proto.InternalMessageInfo + +func (m *ListTargetsRequest) GetParent() string { + if m != nil { + return m.Parent + } + return "" +} + +func (m *ListTargetsRequest) GetPageSize() int32 { + if m != nil { + return m.PageSize + } + return 0 +} + +type isListTargetsRequest_PageStart interface { + isListTargetsRequest_PageStart() +} + +type ListTargetsRequest_PageToken struct { + PageToken string `protobuf:"bytes,3,opt,name=page_token,json=pageToken,proto3,oneof"` +} + +type ListTargetsRequest_Offset struct { + Offset int64 `protobuf:"varint,4,opt,name=offset,proto3,oneof"` +} + +func (*ListTargetsRequest_PageToken) isListTargetsRequest_PageStart() {} + +func (*ListTargetsRequest_Offset) isListTargetsRequest_PageStart() {} + +func (m *ListTargetsRequest) GetPageStart() isListTargetsRequest_PageStart { + if m != nil { + return m.PageStart + } + return nil +} + +func (m *ListTargetsRequest) GetPageToken() string { + if x, ok := m.GetPageStart().(*ListTargetsRequest_PageToken); ok { + return x.PageToken + } + return "" +} + +func (m *ListTargetsRequest) GetOffset() int64 { + if x, ok := m.GetPageStart().(*ListTargetsRequest_Offset); ok { + return x.Offset + } + return 0 +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*ListTargetsRequest) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _ListTargetsRequest_OneofMarshaler, _ListTargetsRequest_OneofUnmarshaler, _ListTargetsRequest_OneofSizer, []interface{}{ + (*ListTargetsRequest_PageToken)(nil), + (*ListTargetsRequest_Offset)(nil), + } +} + +func _ListTargetsRequest_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*ListTargetsRequest) + // page_start + switch x := m.PageStart.(type) { + case *ListTargetsRequest_PageToken: + b.EncodeVarint(3<<3 | proto.WireBytes) + b.EncodeStringBytes(x.PageToken) + case *ListTargetsRequest_Offset: + b.EncodeVarint(4<<3 | proto.WireVarint) + b.EncodeVarint(uint64(x.Offset)) + case nil: + default: + return fmt.Errorf("ListTargetsRequest.PageStart has unexpected type %T", x) + } + return nil +} + +func _ListTargetsRequest_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*ListTargetsRequest) + switch tag { + case 3: // page_start.page_token + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeStringBytes() + m.PageStart = &ListTargetsRequest_PageToken{x} + return true, err + case 4: // page_start.offset + if wire != proto.WireVarint { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeVarint() + m.PageStart = &ListTargetsRequest_Offset{int64(x)} + return true, err + default: + return false, nil + } +} + +func _ListTargetsRequest_OneofSizer(msg proto.Message) (n int) { + m := msg.(*ListTargetsRequest) + // page_start + switch x := m.PageStart.(type) { + case *ListTargetsRequest_PageToken: + n += 1 // tag and wire + n += proto.SizeVarint(uint64(len(x.PageToken))) + n += len(x.PageToken) + case *ListTargetsRequest_Offset: + n += 1 // tag and wire + n += proto.SizeVarint(uint64(x.Offset)) + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +// Response from calling ListTargetsResponse +type ListTargetsResponse struct { + // Targets matching the request invocation, + // possibly capped at request.page_size or a server limit. + Targets []*Target `protobuf:"bytes,1,rep,name=targets,proto3" json:"targets,omitempty"` + // Token to retrieve the next page of results, or empty if there are no + // more results in the list. + NextPageToken string `protobuf:"bytes,2,opt,name=next_page_token,json=nextPageToken,proto3" json:"next_page_token,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ListTargetsResponse) Reset() { *m = ListTargetsResponse{} } +func (m *ListTargetsResponse) String() string { return proto.CompactTextString(m) } +func (*ListTargetsResponse) ProtoMessage() {} +func (*ListTargetsResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_resultstore_download_3481b489ba35ace6, []int{8} +} +func (m *ListTargetsResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ListTargetsResponse.Unmarshal(m, b) +} +func (m *ListTargetsResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ListTargetsResponse.Marshal(b, m, deterministic) +} +func (dst *ListTargetsResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_ListTargetsResponse.Merge(dst, src) +} +func (m *ListTargetsResponse) XXX_Size() int { + return xxx_messageInfo_ListTargetsResponse.Size(m) +} +func (m *ListTargetsResponse) XXX_DiscardUnknown() { + xxx_messageInfo_ListTargetsResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_ListTargetsResponse proto.InternalMessageInfo + +func (m *ListTargetsResponse) GetTargets() []*Target { + if m != nil { + return m.Targets + } + return nil +} + +func (m *ListTargetsResponse) GetNextPageToken() string { + if m != nil { + return m.NextPageToken + } + return "" +} + +// Request passed into GetConfiguredTarget +type GetConfiguredTargetRequest struct { + // The name of the configured target to retrieve. It must match this format: + // invocations/${INVOCATION_ID}/targets/${TARGET_ID}/configuredTargets/${CONFIGURATION_ID} + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GetConfiguredTargetRequest) Reset() { *m = GetConfiguredTargetRequest{} } +func (m *GetConfiguredTargetRequest) String() string { return proto.CompactTextString(m) } +func (*GetConfiguredTargetRequest) ProtoMessage() {} +func (*GetConfiguredTargetRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_resultstore_download_3481b489ba35ace6, []int{9} +} +func (m *GetConfiguredTargetRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GetConfiguredTargetRequest.Unmarshal(m, b) +} +func (m *GetConfiguredTargetRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GetConfiguredTargetRequest.Marshal(b, m, deterministic) +} +func (dst *GetConfiguredTargetRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_GetConfiguredTargetRequest.Merge(dst, src) +} +func (m *GetConfiguredTargetRequest) XXX_Size() int { + return xxx_messageInfo_GetConfiguredTargetRequest.Size(m) +} +func (m *GetConfiguredTargetRequest) XXX_DiscardUnknown() { + xxx_messageInfo_GetConfiguredTargetRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_GetConfiguredTargetRequest proto.InternalMessageInfo + +func (m *GetConfiguredTargetRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +// Request passed into ListConfiguredTargets +type ListConfiguredTargetsRequest struct { + // The invocation and target name of the configured targets to retrieve. + // It must match this format: + // invocations/${INVOCATION_ID}/targets/${TARGET_ID} + Parent string `protobuf:"bytes,1,opt,name=parent,proto3" json:"parent,omitempty"` + // The maximum number of items to return. + // Zero means all, but may be capped by the server. + PageSize int32 `protobuf:"varint,2,opt,name=page_size,json=pageSize,proto3" json:"page_size,omitempty"` + // Options for pagination. + // + // Types that are valid to be assigned to PageStart: + // *ListConfiguredTargetsRequest_PageToken + // *ListConfiguredTargetsRequest_Offset + PageStart isListConfiguredTargetsRequest_PageStart `protobuf_oneof:"page_start"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ListConfiguredTargetsRequest) Reset() { *m = ListConfiguredTargetsRequest{} } +func (m *ListConfiguredTargetsRequest) String() string { return proto.CompactTextString(m) } +func (*ListConfiguredTargetsRequest) ProtoMessage() {} +func (*ListConfiguredTargetsRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_resultstore_download_3481b489ba35ace6, []int{10} +} +func (m *ListConfiguredTargetsRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ListConfiguredTargetsRequest.Unmarshal(m, b) +} +func (m *ListConfiguredTargetsRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ListConfiguredTargetsRequest.Marshal(b, m, deterministic) +} +func (dst *ListConfiguredTargetsRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_ListConfiguredTargetsRequest.Merge(dst, src) +} +func (m *ListConfiguredTargetsRequest) XXX_Size() int { + return xxx_messageInfo_ListConfiguredTargetsRequest.Size(m) +} +func (m *ListConfiguredTargetsRequest) XXX_DiscardUnknown() { + xxx_messageInfo_ListConfiguredTargetsRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_ListConfiguredTargetsRequest proto.InternalMessageInfo + +func (m *ListConfiguredTargetsRequest) GetParent() string { + if m != nil { + return m.Parent + } + return "" +} + +func (m *ListConfiguredTargetsRequest) GetPageSize() int32 { + if m != nil { + return m.PageSize + } + return 0 +} + +type isListConfiguredTargetsRequest_PageStart interface { + isListConfiguredTargetsRequest_PageStart() +} + +type ListConfiguredTargetsRequest_PageToken struct { + PageToken string `protobuf:"bytes,3,opt,name=page_token,json=pageToken,proto3,oneof"` +} + +type ListConfiguredTargetsRequest_Offset struct { + Offset int64 `protobuf:"varint,4,opt,name=offset,proto3,oneof"` +} + +func (*ListConfiguredTargetsRequest_PageToken) isListConfiguredTargetsRequest_PageStart() {} + +func (*ListConfiguredTargetsRequest_Offset) isListConfiguredTargetsRequest_PageStart() {} + +func (m *ListConfiguredTargetsRequest) GetPageStart() isListConfiguredTargetsRequest_PageStart { + if m != nil { + return m.PageStart + } + return nil +} + +func (m *ListConfiguredTargetsRequest) GetPageToken() string { + if x, ok := m.GetPageStart().(*ListConfiguredTargetsRequest_PageToken); ok { + return x.PageToken + } + return "" +} + +func (m *ListConfiguredTargetsRequest) GetOffset() int64 { + if x, ok := m.GetPageStart().(*ListConfiguredTargetsRequest_Offset); ok { + return x.Offset + } + return 0 +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*ListConfiguredTargetsRequest) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _ListConfiguredTargetsRequest_OneofMarshaler, _ListConfiguredTargetsRequest_OneofUnmarshaler, _ListConfiguredTargetsRequest_OneofSizer, []interface{}{ + (*ListConfiguredTargetsRequest_PageToken)(nil), + (*ListConfiguredTargetsRequest_Offset)(nil), + } +} + +func _ListConfiguredTargetsRequest_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*ListConfiguredTargetsRequest) + // page_start + switch x := m.PageStart.(type) { + case *ListConfiguredTargetsRequest_PageToken: + b.EncodeVarint(3<<3 | proto.WireBytes) + b.EncodeStringBytes(x.PageToken) + case *ListConfiguredTargetsRequest_Offset: + b.EncodeVarint(4<<3 | proto.WireVarint) + b.EncodeVarint(uint64(x.Offset)) + case nil: + default: + return fmt.Errorf("ListConfiguredTargetsRequest.PageStart has unexpected type %T", x) + } + return nil +} + +func _ListConfiguredTargetsRequest_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*ListConfiguredTargetsRequest) + switch tag { + case 3: // page_start.page_token + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeStringBytes() + m.PageStart = &ListConfiguredTargetsRequest_PageToken{x} + return true, err + case 4: // page_start.offset + if wire != proto.WireVarint { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeVarint() + m.PageStart = &ListConfiguredTargetsRequest_Offset{int64(x)} + return true, err + default: + return false, nil + } +} + +func _ListConfiguredTargetsRequest_OneofSizer(msg proto.Message) (n int) { + m := msg.(*ListConfiguredTargetsRequest) + // page_start + switch x := m.PageStart.(type) { + case *ListConfiguredTargetsRequest_PageToken: + n += 1 // tag and wire + n += proto.SizeVarint(uint64(len(x.PageToken))) + n += len(x.PageToken) + case *ListConfiguredTargetsRequest_Offset: + n += 1 // tag and wire + n += proto.SizeVarint(uint64(x.Offset)) + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +// Response from calling ListConfiguredTargets +type ListConfiguredTargetsResponse struct { + // ConfiguredTargets matching the request, + // possibly capped at request.page_size or a server limit. + ConfiguredTargets []*ConfiguredTarget `protobuf:"bytes,1,rep,name=configured_targets,json=configuredTargets,proto3" json:"configured_targets,omitempty"` + // Token to retrieve the next page of results, or empty if there are no + // more results in the list. + NextPageToken string `protobuf:"bytes,2,opt,name=next_page_token,json=nextPageToken,proto3" json:"next_page_token,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ListConfiguredTargetsResponse) Reset() { *m = ListConfiguredTargetsResponse{} } +func (m *ListConfiguredTargetsResponse) String() string { return proto.CompactTextString(m) } +func (*ListConfiguredTargetsResponse) ProtoMessage() {} +func (*ListConfiguredTargetsResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_resultstore_download_3481b489ba35ace6, []int{11} +} +func (m *ListConfiguredTargetsResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ListConfiguredTargetsResponse.Unmarshal(m, b) +} +func (m *ListConfiguredTargetsResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ListConfiguredTargetsResponse.Marshal(b, m, deterministic) +} +func (dst *ListConfiguredTargetsResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_ListConfiguredTargetsResponse.Merge(dst, src) +} +func (m *ListConfiguredTargetsResponse) XXX_Size() int { + return xxx_messageInfo_ListConfiguredTargetsResponse.Size(m) +} +func (m *ListConfiguredTargetsResponse) XXX_DiscardUnknown() { + xxx_messageInfo_ListConfiguredTargetsResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_ListConfiguredTargetsResponse proto.InternalMessageInfo + +func (m *ListConfiguredTargetsResponse) GetConfiguredTargets() []*ConfiguredTarget { + if m != nil { + return m.ConfiguredTargets + } + return nil +} + +func (m *ListConfiguredTargetsResponse) GetNextPageToken() string { + if m != nil { + return m.NextPageToken + } + return "" +} + +// Request passed into GetAction +type GetActionRequest struct { + // The name of the action to retrieve. It must match this format: + // invocations/${INVOCATION_ID}/targets/${TARGET_ID}/configuredTargets/${CONFIGURATION_ID}/actions/${ACTION_ID} + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GetActionRequest) Reset() { *m = GetActionRequest{} } +func (m *GetActionRequest) String() string { return proto.CompactTextString(m) } +func (*GetActionRequest) ProtoMessage() {} +func (*GetActionRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_resultstore_download_3481b489ba35ace6, []int{12} +} +func (m *GetActionRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GetActionRequest.Unmarshal(m, b) +} +func (m *GetActionRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GetActionRequest.Marshal(b, m, deterministic) +} +func (dst *GetActionRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_GetActionRequest.Merge(dst, src) +} +func (m *GetActionRequest) XXX_Size() int { + return xxx_messageInfo_GetActionRequest.Size(m) +} +func (m *GetActionRequest) XXX_DiscardUnknown() { + xxx_messageInfo_GetActionRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_GetActionRequest proto.InternalMessageInfo + +func (m *GetActionRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +// Request passed into ListActions +type ListActionsRequest struct { + // The invocation, target, and configuration name of the action to retrieve. + // It must match this format: + // invocations/${INVOCATION_ID}/targets/${TARGET_ID}/configuredTargets/${CONFIGURATION_ID} + Parent string `protobuf:"bytes,1,opt,name=parent,proto3" json:"parent,omitempty"` + // The maximum number of items to return. + // Zero means all, but may be capped by the server. + PageSize int32 `protobuf:"varint,2,opt,name=page_size,json=pageSize,proto3" json:"page_size,omitempty"` + // Options for pagination. + // + // Types that are valid to be assigned to PageStart: + // *ListActionsRequest_PageToken + // *ListActionsRequest_Offset + PageStart isListActionsRequest_PageStart `protobuf_oneof:"page_start"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ListActionsRequest) Reset() { *m = ListActionsRequest{} } +func (m *ListActionsRequest) String() string { return proto.CompactTextString(m) } +func (*ListActionsRequest) ProtoMessage() {} +func (*ListActionsRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_resultstore_download_3481b489ba35ace6, []int{13} +} +func (m *ListActionsRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ListActionsRequest.Unmarshal(m, b) +} +func (m *ListActionsRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ListActionsRequest.Marshal(b, m, deterministic) +} +func (dst *ListActionsRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_ListActionsRequest.Merge(dst, src) +} +func (m *ListActionsRequest) XXX_Size() int { + return xxx_messageInfo_ListActionsRequest.Size(m) +} +func (m *ListActionsRequest) XXX_DiscardUnknown() { + xxx_messageInfo_ListActionsRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_ListActionsRequest proto.InternalMessageInfo + +func (m *ListActionsRequest) GetParent() string { + if m != nil { + return m.Parent + } + return "" +} + +func (m *ListActionsRequest) GetPageSize() int32 { + if m != nil { + return m.PageSize + } + return 0 +} + +type isListActionsRequest_PageStart interface { + isListActionsRequest_PageStart() +} + +type ListActionsRequest_PageToken struct { + PageToken string `protobuf:"bytes,3,opt,name=page_token,json=pageToken,proto3,oneof"` +} + +type ListActionsRequest_Offset struct { + Offset int64 `protobuf:"varint,4,opt,name=offset,proto3,oneof"` +} + +func (*ListActionsRequest_PageToken) isListActionsRequest_PageStart() {} + +func (*ListActionsRequest_Offset) isListActionsRequest_PageStart() {} + +func (m *ListActionsRequest) GetPageStart() isListActionsRequest_PageStart { + if m != nil { + return m.PageStart + } + return nil +} + +func (m *ListActionsRequest) GetPageToken() string { + if x, ok := m.GetPageStart().(*ListActionsRequest_PageToken); ok { + return x.PageToken + } + return "" +} + +func (m *ListActionsRequest) GetOffset() int64 { + if x, ok := m.GetPageStart().(*ListActionsRequest_Offset); ok { + return x.Offset + } + return 0 +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*ListActionsRequest) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _ListActionsRequest_OneofMarshaler, _ListActionsRequest_OneofUnmarshaler, _ListActionsRequest_OneofSizer, []interface{}{ + (*ListActionsRequest_PageToken)(nil), + (*ListActionsRequest_Offset)(nil), + } +} + +func _ListActionsRequest_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*ListActionsRequest) + // page_start + switch x := m.PageStart.(type) { + case *ListActionsRequest_PageToken: + b.EncodeVarint(3<<3 | proto.WireBytes) + b.EncodeStringBytes(x.PageToken) + case *ListActionsRequest_Offset: + b.EncodeVarint(4<<3 | proto.WireVarint) + b.EncodeVarint(uint64(x.Offset)) + case nil: + default: + return fmt.Errorf("ListActionsRequest.PageStart has unexpected type %T", x) + } + return nil +} + +func _ListActionsRequest_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*ListActionsRequest) + switch tag { + case 3: // page_start.page_token + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeStringBytes() + m.PageStart = &ListActionsRequest_PageToken{x} + return true, err + case 4: // page_start.offset + if wire != proto.WireVarint { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeVarint() + m.PageStart = &ListActionsRequest_Offset{int64(x)} + return true, err + default: + return false, nil + } +} + +func _ListActionsRequest_OneofSizer(msg proto.Message) (n int) { + m := msg.(*ListActionsRequest) + // page_start + switch x := m.PageStart.(type) { + case *ListActionsRequest_PageToken: + n += 1 // tag and wire + n += proto.SizeVarint(uint64(len(x.PageToken))) + n += len(x.PageToken) + case *ListActionsRequest_Offset: + n += 1 // tag and wire + n += proto.SizeVarint(uint64(x.Offset)) + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +// Response from calling ListActions +type ListActionsResponse struct { + // Actions matching the request, + // possibly capped at request.page_size or a server limit. + Actions []*Action `protobuf:"bytes,1,rep,name=actions,proto3" json:"actions,omitempty"` + // Token to retrieve the next page of results, or empty if there are no + // more results in the list. + NextPageToken string `protobuf:"bytes,2,opt,name=next_page_token,json=nextPageToken,proto3" json:"next_page_token,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ListActionsResponse) Reset() { *m = ListActionsResponse{} } +func (m *ListActionsResponse) String() string { return proto.CompactTextString(m) } +func (*ListActionsResponse) ProtoMessage() {} +func (*ListActionsResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_resultstore_download_3481b489ba35ace6, []int{14} +} +func (m *ListActionsResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ListActionsResponse.Unmarshal(m, b) +} +func (m *ListActionsResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ListActionsResponse.Marshal(b, m, deterministic) +} +func (dst *ListActionsResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_ListActionsResponse.Merge(dst, src) +} +func (m *ListActionsResponse) XXX_Size() int { + return xxx_messageInfo_ListActionsResponse.Size(m) +} +func (m *ListActionsResponse) XXX_DiscardUnknown() { + xxx_messageInfo_ListActionsResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_ListActionsResponse proto.InternalMessageInfo + +func (m *ListActionsResponse) GetActions() []*Action { + if m != nil { + return m.Actions + } + return nil +} + +func (m *ListActionsResponse) GetNextPageToken() string { + if m != nil { + return m.NextPageToken + } + return "" +} + +// Request passed into GetFileSet +type GetFileSetRequest struct { + // The name of the file set to retrieve. It must match this format: + // invocations/${INVOCATION_ID}/fileSets/${FILE_SET_ID} + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GetFileSetRequest) Reset() { *m = GetFileSetRequest{} } +func (m *GetFileSetRequest) String() string { return proto.CompactTextString(m) } +func (*GetFileSetRequest) ProtoMessage() {} +func (*GetFileSetRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_resultstore_download_3481b489ba35ace6, []int{15} +} +func (m *GetFileSetRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GetFileSetRequest.Unmarshal(m, b) +} +func (m *GetFileSetRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GetFileSetRequest.Marshal(b, m, deterministic) +} +func (dst *GetFileSetRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_GetFileSetRequest.Merge(dst, src) +} +func (m *GetFileSetRequest) XXX_Size() int { + return xxx_messageInfo_GetFileSetRequest.Size(m) +} +func (m *GetFileSetRequest) XXX_DiscardUnknown() { + xxx_messageInfo_GetFileSetRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_GetFileSetRequest proto.InternalMessageInfo + +func (m *GetFileSetRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +// Request passed into ListFileSets +type ListFileSetsRequest struct { + // The invocation name of the file sets to retrieve. + // It must match this format: invocations/${INVOCATION_ID} + Parent string `protobuf:"bytes,1,opt,name=parent,proto3" json:"parent,omitempty"` + // The maximum number of items to return. + // Zero means all, but may be capped by the server. + PageSize int32 `protobuf:"varint,2,opt,name=page_size,json=pageSize,proto3" json:"page_size,omitempty"` + // Options for pagination. + // + // Types that are valid to be assigned to PageStart: + // *ListFileSetsRequest_PageToken + // *ListFileSetsRequest_Offset + PageStart isListFileSetsRequest_PageStart `protobuf_oneof:"page_start"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ListFileSetsRequest) Reset() { *m = ListFileSetsRequest{} } +func (m *ListFileSetsRequest) String() string { return proto.CompactTextString(m) } +func (*ListFileSetsRequest) ProtoMessage() {} +func (*ListFileSetsRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_resultstore_download_3481b489ba35ace6, []int{16} +} +func (m *ListFileSetsRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ListFileSetsRequest.Unmarshal(m, b) +} +func (m *ListFileSetsRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ListFileSetsRequest.Marshal(b, m, deterministic) +} +func (dst *ListFileSetsRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_ListFileSetsRequest.Merge(dst, src) +} +func (m *ListFileSetsRequest) XXX_Size() int { + return xxx_messageInfo_ListFileSetsRequest.Size(m) +} +func (m *ListFileSetsRequest) XXX_DiscardUnknown() { + xxx_messageInfo_ListFileSetsRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_ListFileSetsRequest proto.InternalMessageInfo + +func (m *ListFileSetsRequest) GetParent() string { + if m != nil { + return m.Parent + } + return "" +} + +func (m *ListFileSetsRequest) GetPageSize() int32 { + if m != nil { + return m.PageSize + } + return 0 +} + +type isListFileSetsRequest_PageStart interface { + isListFileSetsRequest_PageStart() +} + +type ListFileSetsRequest_PageToken struct { + PageToken string `protobuf:"bytes,3,opt,name=page_token,json=pageToken,proto3,oneof"` +} + +type ListFileSetsRequest_Offset struct { + Offset int64 `protobuf:"varint,4,opt,name=offset,proto3,oneof"` +} + +func (*ListFileSetsRequest_PageToken) isListFileSetsRequest_PageStart() {} + +func (*ListFileSetsRequest_Offset) isListFileSetsRequest_PageStart() {} + +func (m *ListFileSetsRequest) GetPageStart() isListFileSetsRequest_PageStart { + if m != nil { + return m.PageStart + } + return nil +} + +func (m *ListFileSetsRequest) GetPageToken() string { + if x, ok := m.GetPageStart().(*ListFileSetsRequest_PageToken); ok { + return x.PageToken + } + return "" +} + +func (m *ListFileSetsRequest) GetOffset() int64 { + if x, ok := m.GetPageStart().(*ListFileSetsRequest_Offset); ok { + return x.Offset + } + return 0 +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*ListFileSetsRequest) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _ListFileSetsRequest_OneofMarshaler, _ListFileSetsRequest_OneofUnmarshaler, _ListFileSetsRequest_OneofSizer, []interface{}{ + (*ListFileSetsRequest_PageToken)(nil), + (*ListFileSetsRequest_Offset)(nil), + } +} + +func _ListFileSetsRequest_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*ListFileSetsRequest) + // page_start + switch x := m.PageStart.(type) { + case *ListFileSetsRequest_PageToken: + b.EncodeVarint(3<<3 | proto.WireBytes) + b.EncodeStringBytes(x.PageToken) + case *ListFileSetsRequest_Offset: + b.EncodeVarint(4<<3 | proto.WireVarint) + b.EncodeVarint(uint64(x.Offset)) + case nil: + default: + return fmt.Errorf("ListFileSetsRequest.PageStart has unexpected type %T", x) + } + return nil +} + +func _ListFileSetsRequest_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*ListFileSetsRequest) + switch tag { + case 3: // page_start.page_token + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeStringBytes() + m.PageStart = &ListFileSetsRequest_PageToken{x} + return true, err + case 4: // page_start.offset + if wire != proto.WireVarint { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeVarint() + m.PageStart = &ListFileSetsRequest_Offset{int64(x)} + return true, err + default: + return false, nil + } +} + +func _ListFileSetsRequest_OneofSizer(msg proto.Message) (n int) { + m := msg.(*ListFileSetsRequest) + // page_start + switch x := m.PageStart.(type) { + case *ListFileSetsRequest_PageToken: + n += 1 // tag and wire + n += proto.SizeVarint(uint64(len(x.PageToken))) + n += len(x.PageToken) + case *ListFileSetsRequest_Offset: + n += 1 // tag and wire + n += proto.SizeVarint(uint64(x.Offset)) + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +// Response from calling ListFileSets +type ListFileSetsResponse struct { + // File sets matching the request, + // possibly capped at request.page_size or a server limit. + FileSets []*FileSet `protobuf:"bytes,1,rep,name=file_sets,json=fileSets,proto3" json:"file_sets,omitempty"` + // Token to retrieve the next page of results, or empty if there are no + // more results in the list. + NextPageToken string `protobuf:"bytes,2,opt,name=next_page_token,json=nextPageToken,proto3" json:"next_page_token,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ListFileSetsResponse) Reset() { *m = ListFileSetsResponse{} } +func (m *ListFileSetsResponse) String() string { return proto.CompactTextString(m) } +func (*ListFileSetsResponse) ProtoMessage() {} +func (*ListFileSetsResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_resultstore_download_3481b489ba35ace6, []int{17} +} +func (m *ListFileSetsResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ListFileSetsResponse.Unmarshal(m, b) +} +func (m *ListFileSetsResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ListFileSetsResponse.Marshal(b, m, deterministic) +} +func (dst *ListFileSetsResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_ListFileSetsResponse.Merge(dst, src) +} +func (m *ListFileSetsResponse) XXX_Size() int { + return xxx_messageInfo_ListFileSetsResponse.Size(m) +} +func (m *ListFileSetsResponse) XXX_DiscardUnknown() { + xxx_messageInfo_ListFileSetsResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_ListFileSetsResponse proto.InternalMessageInfo + +func (m *ListFileSetsResponse) GetFileSets() []*FileSet { + if m != nil { + return m.FileSets + } + return nil +} + +func (m *ListFileSetsResponse) GetNextPageToken() string { + if m != nil { + return m.NextPageToken + } + return "" +} + +func init() { + proto.RegisterType((*GetInvocationRequest)(nil), "google.devtools.resultstore.v2.GetInvocationRequest") + proto.RegisterType((*SearchInvocationsRequest)(nil), "google.devtools.resultstore.v2.SearchInvocationsRequest") + proto.RegisterType((*SearchInvocationsResponse)(nil), "google.devtools.resultstore.v2.SearchInvocationsResponse") + proto.RegisterType((*GetConfigurationRequest)(nil), "google.devtools.resultstore.v2.GetConfigurationRequest") + proto.RegisterType((*ListConfigurationsRequest)(nil), "google.devtools.resultstore.v2.ListConfigurationsRequest") + proto.RegisterType((*ListConfigurationsResponse)(nil), "google.devtools.resultstore.v2.ListConfigurationsResponse") + proto.RegisterType((*GetTargetRequest)(nil), "google.devtools.resultstore.v2.GetTargetRequest") + proto.RegisterType((*ListTargetsRequest)(nil), "google.devtools.resultstore.v2.ListTargetsRequest") + proto.RegisterType((*ListTargetsResponse)(nil), "google.devtools.resultstore.v2.ListTargetsResponse") + proto.RegisterType((*GetConfiguredTargetRequest)(nil), "google.devtools.resultstore.v2.GetConfiguredTargetRequest") + proto.RegisterType((*ListConfiguredTargetsRequest)(nil), "google.devtools.resultstore.v2.ListConfiguredTargetsRequest") + proto.RegisterType((*ListConfiguredTargetsResponse)(nil), "google.devtools.resultstore.v2.ListConfiguredTargetsResponse") + proto.RegisterType((*GetActionRequest)(nil), "google.devtools.resultstore.v2.GetActionRequest") + proto.RegisterType((*ListActionsRequest)(nil), "google.devtools.resultstore.v2.ListActionsRequest") + proto.RegisterType((*ListActionsResponse)(nil), "google.devtools.resultstore.v2.ListActionsResponse") + proto.RegisterType((*GetFileSetRequest)(nil), "google.devtools.resultstore.v2.GetFileSetRequest") + proto.RegisterType((*ListFileSetsRequest)(nil), "google.devtools.resultstore.v2.ListFileSetsRequest") + proto.RegisterType((*ListFileSetsResponse)(nil), "google.devtools.resultstore.v2.ListFileSetsResponse") +} + +// Reference imports to suppress errors if they are not otherwise used. +var _ context.Context +var _ grpc.ClientConn + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the grpc package it is being compiled against. +const _ = grpc.SupportPackageIsVersion4 + +// ResultStoreDownloadClient is the client API for ResultStoreDownload service. +// +// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://godoc.org/google.golang.org/grpc#ClientConn.NewStream. +type ResultStoreDownloadClient interface { + // Retrieves the invocation with the given name. + // + // An error will be reported in the following cases: + // - If the invocation is not found. + // - If the given invocation name is badly formatted. + // - If no field mask was given. + GetInvocation(ctx context.Context, in *GetInvocationRequest, opts ...grpc.CallOption) (*Invocation, error) + // Searches for invocations matching the given query parameters. + // + // + // An error will be reported in the following cases: + // - If a query string is not provided + // - If no field mask was given. + SearchInvocations(ctx context.Context, in *SearchInvocationsRequest, opts ...grpc.CallOption) (*SearchInvocationsResponse, error) + // Retrieves the configuration with the given name. + // + // An error will be reported in the following cases: + // - If the configuration or its parent invocation is not found. + // - If the given configuration name is badly formatted. + // - If no field mask was given. + GetConfiguration(ctx context.Context, in *GetConfigurationRequest, opts ...grpc.CallOption) (*Configuration, error) + // Retrieves all configurations for a parent invocation. + // This might be limited by user or server, + // in which case a continuation token is provided. + // + // An error will be reported in the following cases: + // - If the parent invocation is not found. + // - If the given parent invocation name is badly formatted. + // - If no field mask was given. + ListConfigurations(ctx context.Context, in *ListConfigurationsRequest, opts ...grpc.CallOption) (*ListConfigurationsResponse, error) + // Retrieves the target with the given name. + // + // An error will be reported in the following cases: + // - If the target or its parent invocation is not found. + // - If the given target name is badly formatted. + // - If no field mask was given. + GetTarget(ctx context.Context, in *GetTargetRequest, opts ...grpc.CallOption) (*Target, error) + // Retrieves all targets for a parent invocation. This might be limited by + // user or server, in which case a continuation token is provided. + // + // An error will be reported in the following cases: + // - If the parent is not found. + // - If the given parent name is badly formatted. + // - If no field mask was given. + ListTargets(ctx context.Context, in *ListTargetsRequest, opts ...grpc.CallOption) (*ListTargetsResponse, error) + // Retrieves the configured target with the given name. + // + // An error will be reported in the following cases: + // - If the configured target is not found. + // - If the given name is badly formatted. + // - If no field mask was given. + GetConfiguredTarget(ctx context.Context, in *GetConfiguredTargetRequest, opts ...grpc.CallOption) (*ConfiguredTarget, error) + // Retrieves all configured targets for a parent invocation/target. + // This might be limited by user or server, in which case a continuation + // token is provided. Supports '-' for targetId meaning all targets. + // + // An error will be reported in the following cases: + // - If the parent is not found. + // - If the given parent name is badly formatted. + // - If no field mask was given. + ListConfiguredTargets(ctx context.Context, in *ListConfiguredTargetsRequest, opts ...grpc.CallOption) (*ListConfiguredTargetsResponse, error) + // Retrieves the action with the given name. + // + // An error will be reported in the following cases: + // - If the action is not found. + // - If the given name is badly formatted. + // - If no field mask was given. + GetAction(ctx context.Context, in *GetActionRequest, opts ...grpc.CallOption) (*Action, error) + // Retrieves all actions for a parent invocation/target/configuration. + // This might be limited by user or server, in which case a continuation + // token is provided. Supports '-' for configurationId to mean all + // actions for all configurations for a target, or '-' for targetId and + // configurationId to mean all actions for all configurations and all targets. + // Does not support targetId '-' with a specified configuration. + // + // An error will be reported in the following cases: + // - If the parent is not found. + // - If the given parent name is badly formatted. + // - If no field mask was given. + ListActions(ctx context.Context, in *ListActionsRequest, opts ...grpc.CallOption) (*ListActionsResponse, error) + // Retrieves the file set with the given name. + // + // An error will be reported in the following cases: + // - If the file set or its parent invocation is not found. + // - If the given file set name is badly formatted. + // - If no field mask was given. + GetFileSet(ctx context.Context, in *GetFileSetRequest, opts ...grpc.CallOption) (*FileSet, error) + // Retrieves all file sets for a parent invocation. + // This might be limited by user or server, + // in which case a continuation token is provided. + // + // An error will be reported in the following cases: + // - If the parent invocation is not found. + // - If the given parent invocation name is badly formatted. + // - If no field mask was given. + ListFileSets(ctx context.Context, in *ListFileSetsRequest, opts ...grpc.CallOption) (*ListFileSetsResponse, error) +} + +type resultStoreDownloadClient struct { + cc *grpc.ClientConn +} + +func NewResultStoreDownloadClient(cc *grpc.ClientConn) ResultStoreDownloadClient { + return &resultStoreDownloadClient{cc} +} + +func (c *resultStoreDownloadClient) GetInvocation(ctx context.Context, in *GetInvocationRequest, opts ...grpc.CallOption) (*Invocation, error) { + out := new(Invocation) + err := c.cc.Invoke(ctx, "/google.devtools.resultstore.v2.ResultStoreDownload/GetInvocation", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *resultStoreDownloadClient) SearchInvocations(ctx context.Context, in *SearchInvocationsRequest, opts ...grpc.CallOption) (*SearchInvocationsResponse, error) { + out := new(SearchInvocationsResponse) + err := c.cc.Invoke(ctx, "/google.devtools.resultstore.v2.ResultStoreDownload/SearchInvocations", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *resultStoreDownloadClient) GetConfiguration(ctx context.Context, in *GetConfigurationRequest, opts ...grpc.CallOption) (*Configuration, error) { + out := new(Configuration) + err := c.cc.Invoke(ctx, "/google.devtools.resultstore.v2.ResultStoreDownload/GetConfiguration", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *resultStoreDownloadClient) ListConfigurations(ctx context.Context, in *ListConfigurationsRequest, opts ...grpc.CallOption) (*ListConfigurationsResponse, error) { + out := new(ListConfigurationsResponse) + err := c.cc.Invoke(ctx, "/google.devtools.resultstore.v2.ResultStoreDownload/ListConfigurations", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *resultStoreDownloadClient) GetTarget(ctx context.Context, in *GetTargetRequest, opts ...grpc.CallOption) (*Target, error) { + out := new(Target) + err := c.cc.Invoke(ctx, "/google.devtools.resultstore.v2.ResultStoreDownload/GetTarget", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *resultStoreDownloadClient) ListTargets(ctx context.Context, in *ListTargetsRequest, opts ...grpc.CallOption) (*ListTargetsResponse, error) { + out := new(ListTargetsResponse) + err := c.cc.Invoke(ctx, "/google.devtools.resultstore.v2.ResultStoreDownload/ListTargets", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *resultStoreDownloadClient) GetConfiguredTarget(ctx context.Context, in *GetConfiguredTargetRequest, opts ...grpc.CallOption) (*ConfiguredTarget, error) { + out := new(ConfiguredTarget) + err := c.cc.Invoke(ctx, "/google.devtools.resultstore.v2.ResultStoreDownload/GetConfiguredTarget", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *resultStoreDownloadClient) ListConfiguredTargets(ctx context.Context, in *ListConfiguredTargetsRequest, opts ...grpc.CallOption) (*ListConfiguredTargetsResponse, error) { + out := new(ListConfiguredTargetsResponse) + err := c.cc.Invoke(ctx, "/google.devtools.resultstore.v2.ResultStoreDownload/ListConfiguredTargets", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *resultStoreDownloadClient) GetAction(ctx context.Context, in *GetActionRequest, opts ...grpc.CallOption) (*Action, error) { + out := new(Action) + err := c.cc.Invoke(ctx, "/google.devtools.resultstore.v2.ResultStoreDownload/GetAction", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *resultStoreDownloadClient) ListActions(ctx context.Context, in *ListActionsRequest, opts ...grpc.CallOption) (*ListActionsResponse, error) { + out := new(ListActionsResponse) + err := c.cc.Invoke(ctx, "/google.devtools.resultstore.v2.ResultStoreDownload/ListActions", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *resultStoreDownloadClient) GetFileSet(ctx context.Context, in *GetFileSetRequest, opts ...grpc.CallOption) (*FileSet, error) { + out := new(FileSet) + err := c.cc.Invoke(ctx, "/google.devtools.resultstore.v2.ResultStoreDownload/GetFileSet", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *resultStoreDownloadClient) ListFileSets(ctx context.Context, in *ListFileSetsRequest, opts ...grpc.CallOption) (*ListFileSetsResponse, error) { + out := new(ListFileSetsResponse) + err := c.cc.Invoke(ctx, "/google.devtools.resultstore.v2.ResultStoreDownload/ListFileSets", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +// ResultStoreDownloadServer is the server API for ResultStoreDownload service. +type ResultStoreDownloadServer interface { + // Retrieves the invocation with the given name. + // + // An error will be reported in the following cases: + // - If the invocation is not found. + // - If the given invocation name is badly formatted. + // - If no field mask was given. + GetInvocation(context.Context, *GetInvocationRequest) (*Invocation, error) + // Searches for invocations matching the given query parameters. + // + // + // An error will be reported in the following cases: + // - If a query string is not provided + // - If no field mask was given. + SearchInvocations(context.Context, *SearchInvocationsRequest) (*SearchInvocationsResponse, error) + // Retrieves the configuration with the given name. + // + // An error will be reported in the following cases: + // - If the configuration or its parent invocation is not found. + // - If the given configuration name is badly formatted. + // - If no field mask was given. + GetConfiguration(context.Context, *GetConfigurationRequest) (*Configuration, error) + // Retrieves all configurations for a parent invocation. + // This might be limited by user or server, + // in which case a continuation token is provided. + // + // An error will be reported in the following cases: + // - If the parent invocation is not found. + // - If the given parent invocation name is badly formatted. + // - If no field mask was given. + ListConfigurations(context.Context, *ListConfigurationsRequest) (*ListConfigurationsResponse, error) + // Retrieves the target with the given name. + // + // An error will be reported in the following cases: + // - If the target or its parent invocation is not found. + // - If the given target name is badly formatted. + // - If no field mask was given. + GetTarget(context.Context, *GetTargetRequest) (*Target, error) + // Retrieves all targets for a parent invocation. This might be limited by + // user or server, in which case a continuation token is provided. + // + // An error will be reported in the following cases: + // - If the parent is not found. + // - If the given parent name is badly formatted. + // - If no field mask was given. + ListTargets(context.Context, *ListTargetsRequest) (*ListTargetsResponse, error) + // Retrieves the configured target with the given name. + // + // An error will be reported in the following cases: + // - If the configured target is not found. + // - If the given name is badly formatted. + // - If no field mask was given. + GetConfiguredTarget(context.Context, *GetConfiguredTargetRequest) (*ConfiguredTarget, error) + // Retrieves all configured targets for a parent invocation/target. + // This might be limited by user or server, in which case a continuation + // token is provided. Supports '-' for targetId meaning all targets. + // + // An error will be reported in the following cases: + // - If the parent is not found. + // - If the given parent name is badly formatted. + // - If no field mask was given. + ListConfiguredTargets(context.Context, *ListConfiguredTargetsRequest) (*ListConfiguredTargetsResponse, error) + // Retrieves the action with the given name. + // + // An error will be reported in the following cases: + // - If the action is not found. + // - If the given name is badly formatted. + // - If no field mask was given. + GetAction(context.Context, *GetActionRequest) (*Action, error) + // Retrieves all actions for a parent invocation/target/configuration. + // This might be limited by user or server, in which case a continuation + // token is provided. Supports '-' for configurationId to mean all + // actions for all configurations for a target, or '-' for targetId and + // configurationId to mean all actions for all configurations and all targets. + // Does not support targetId '-' with a specified configuration. + // + // An error will be reported in the following cases: + // - If the parent is not found. + // - If the given parent name is badly formatted. + // - If no field mask was given. + ListActions(context.Context, *ListActionsRequest) (*ListActionsResponse, error) + // Retrieves the file set with the given name. + // + // An error will be reported in the following cases: + // - If the file set or its parent invocation is not found. + // - If the given file set name is badly formatted. + // - If no field mask was given. + GetFileSet(context.Context, *GetFileSetRequest) (*FileSet, error) + // Retrieves all file sets for a parent invocation. + // This might be limited by user or server, + // in which case a continuation token is provided. + // + // An error will be reported in the following cases: + // - If the parent invocation is not found. + // - If the given parent invocation name is badly formatted. + // - If no field mask was given. + ListFileSets(context.Context, *ListFileSetsRequest) (*ListFileSetsResponse, error) +} + +func RegisterResultStoreDownloadServer(s *grpc.Server, srv ResultStoreDownloadServer) { + s.RegisterService(&_ResultStoreDownload_serviceDesc, srv) +} + +func _ResultStoreDownload_GetInvocation_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(GetInvocationRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(ResultStoreDownloadServer).GetInvocation(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.devtools.resultstore.v2.ResultStoreDownload/GetInvocation", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(ResultStoreDownloadServer).GetInvocation(ctx, req.(*GetInvocationRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _ResultStoreDownload_SearchInvocations_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(SearchInvocationsRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(ResultStoreDownloadServer).SearchInvocations(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.devtools.resultstore.v2.ResultStoreDownload/SearchInvocations", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(ResultStoreDownloadServer).SearchInvocations(ctx, req.(*SearchInvocationsRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _ResultStoreDownload_GetConfiguration_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(GetConfigurationRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(ResultStoreDownloadServer).GetConfiguration(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.devtools.resultstore.v2.ResultStoreDownload/GetConfiguration", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(ResultStoreDownloadServer).GetConfiguration(ctx, req.(*GetConfigurationRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _ResultStoreDownload_ListConfigurations_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(ListConfigurationsRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(ResultStoreDownloadServer).ListConfigurations(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.devtools.resultstore.v2.ResultStoreDownload/ListConfigurations", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(ResultStoreDownloadServer).ListConfigurations(ctx, req.(*ListConfigurationsRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _ResultStoreDownload_GetTarget_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(GetTargetRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(ResultStoreDownloadServer).GetTarget(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.devtools.resultstore.v2.ResultStoreDownload/GetTarget", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(ResultStoreDownloadServer).GetTarget(ctx, req.(*GetTargetRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _ResultStoreDownload_ListTargets_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(ListTargetsRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(ResultStoreDownloadServer).ListTargets(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.devtools.resultstore.v2.ResultStoreDownload/ListTargets", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(ResultStoreDownloadServer).ListTargets(ctx, req.(*ListTargetsRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _ResultStoreDownload_GetConfiguredTarget_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(GetConfiguredTargetRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(ResultStoreDownloadServer).GetConfiguredTarget(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.devtools.resultstore.v2.ResultStoreDownload/GetConfiguredTarget", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(ResultStoreDownloadServer).GetConfiguredTarget(ctx, req.(*GetConfiguredTargetRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _ResultStoreDownload_ListConfiguredTargets_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(ListConfiguredTargetsRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(ResultStoreDownloadServer).ListConfiguredTargets(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.devtools.resultstore.v2.ResultStoreDownload/ListConfiguredTargets", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(ResultStoreDownloadServer).ListConfiguredTargets(ctx, req.(*ListConfiguredTargetsRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _ResultStoreDownload_GetAction_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(GetActionRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(ResultStoreDownloadServer).GetAction(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.devtools.resultstore.v2.ResultStoreDownload/GetAction", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(ResultStoreDownloadServer).GetAction(ctx, req.(*GetActionRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _ResultStoreDownload_ListActions_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(ListActionsRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(ResultStoreDownloadServer).ListActions(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.devtools.resultstore.v2.ResultStoreDownload/ListActions", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(ResultStoreDownloadServer).ListActions(ctx, req.(*ListActionsRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _ResultStoreDownload_GetFileSet_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(GetFileSetRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(ResultStoreDownloadServer).GetFileSet(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.devtools.resultstore.v2.ResultStoreDownload/GetFileSet", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(ResultStoreDownloadServer).GetFileSet(ctx, req.(*GetFileSetRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _ResultStoreDownload_ListFileSets_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(ListFileSetsRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(ResultStoreDownloadServer).ListFileSets(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.devtools.resultstore.v2.ResultStoreDownload/ListFileSets", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(ResultStoreDownloadServer).ListFileSets(ctx, req.(*ListFileSetsRequest)) + } + return interceptor(ctx, in, info, handler) +} + +var _ResultStoreDownload_serviceDesc = grpc.ServiceDesc{ + ServiceName: "google.devtools.resultstore.v2.ResultStoreDownload", + HandlerType: (*ResultStoreDownloadServer)(nil), + Methods: []grpc.MethodDesc{ + { + MethodName: "GetInvocation", + Handler: _ResultStoreDownload_GetInvocation_Handler, + }, + { + MethodName: "SearchInvocations", + Handler: _ResultStoreDownload_SearchInvocations_Handler, + }, + { + MethodName: "GetConfiguration", + Handler: _ResultStoreDownload_GetConfiguration_Handler, + }, + { + MethodName: "ListConfigurations", + Handler: _ResultStoreDownload_ListConfigurations_Handler, + }, + { + MethodName: "GetTarget", + Handler: _ResultStoreDownload_GetTarget_Handler, + }, + { + MethodName: "ListTargets", + Handler: _ResultStoreDownload_ListTargets_Handler, + }, + { + MethodName: "GetConfiguredTarget", + Handler: _ResultStoreDownload_GetConfiguredTarget_Handler, + }, + { + MethodName: "ListConfiguredTargets", + Handler: _ResultStoreDownload_ListConfiguredTargets_Handler, + }, + { + MethodName: "GetAction", + Handler: _ResultStoreDownload_GetAction_Handler, + }, + { + MethodName: "ListActions", + Handler: _ResultStoreDownload_ListActions_Handler, + }, + { + MethodName: "GetFileSet", + Handler: _ResultStoreDownload_GetFileSet_Handler, + }, + { + MethodName: "ListFileSets", + Handler: _ResultStoreDownload_ListFileSets_Handler, + }, + }, + Streams: []grpc.StreamDesc{}, + Metadata: "google/devtools/resultstore/v2/resultstore_download.proto", +} + +func init() { + proto.RegisterFile("google/devtools/resultstore/v2/resultstore_download.proto", fileDescriptor_resultstore_download_3481b489ba35ace6) +} + +var fileDescriptor_resultstore_download_3481b489ba35ace6 = []byte{ + // 1043 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xcc, 0x98, 0xcf, 0x6f, 0xdc, 0x44, + 0x14, 0xc7, 0x3b, 0xf9, 0x45, 0xf7, 0xa5, 0x85, 0xe6, 0x25, 0x14, 0xd7, 0xb4, 0x25, 0x32, 0x90, + 0x46, 0xa9, 0xb2, 0x1b, 0xb6, 0x55, 0x69, 0x03, 0x45, 0x25, 0x54, 0x24, 0x91, 0x7a, 0xa8, 0x36, + 0xe5, 0xc2, 0x65, 0x65, 0x76, 0x67, 0x8d, 0x61, 0xe3, 0xd9, 0x78, 0x26, 0x01, 0x8a, 0x10, 0x12, + 0xe2, 0x1f, 0x28, 0x11, 0x07, 0x04, 0x27, 0x0e, 0x55, 0x6e, 0x9c, 0x10, 0x57, 0x24, 0xc4, 0x5f, + 0x00, 0x7f, 0x02, 0x7f, 0x08, 0xf2, 0x78, 0x66, 0xe3, 0x71, 0xbc, 0xeb, 0x71, 0x4e, 0xb9, 0xc5, + 0xde, 0xf7, 0x9d, 0x79, 0xef, 0x33, 0x5f, 0xfb, 0xbd, 0x18, 0xee, 0x05, 0x8c, 0x05, 0x7d, 0xda, + 0xe8, 0xd2, 0x03, 0xc1, 0x58, 0x9f, 0x37, 0x62, 0xca, 0xf7, 0xfb, 0x82, 0x0b, 0x16, 0xd3, 0xc6, + 0x41, 0x33, 0x7b, 0xd9, 0xee, 0xb2, 0x2f, 0xa2, 0x3e, 0xf3, 0xbb, 0xf5, 0x41, 0xcc, 0x04, 0xc3, + 0xeb, 0xa9, 0xb4, 0xae, 0xa5, 0xf5, 0x4c, 0x6c, 0xfd, 0xa0, 0xe9, 0x5e, 0x55, 0x4b, 0xfb, 0x83, + 0xb0, 0xe1, 0x47, 0x11, 0x13, 0xbe, 0x08, 0x59, 0xc4, 0x53, 0xb5, 0x7b, 0xb3, 0x64, 0x63, 0xbf, + 0x93, 0x44, 0x5b, 0x06, 0x77, 0xd8, 0xee, 0xee, 0x30, 0xb8, 0x59, 0x1a, 0x1c, 0xf5, 0xc2, 0x60, + 0x3f, 0xf6, 0x33, 0x1b, 0xdc, 0xb1, 0xd4, 0xd0, 0x6e, 0x5b, 0xf8, 0x71, 0x40, 0x85, 0xd2, 0xad, + 0x96, 0xe8, 0x7a, 0x61, 0x9f, 0xb6, 0xf9, 0x30, 0xbc, 0x51, 0x12, 0x1e, 0x46, 0x07, 0xac, 0xe3, + 0x57, 0x28, 0x3c, 0x9b, 0x8c, 0xb7, 0x02, 0x0b, 0x9b, 0x54, 0x6c, 0x0f, 0xd7, 0x68, 0xd1, 0xbd, + 0x7d, 0xca, 0x05, 0x22, 0x4c, 0x45, 0xfe, 0x2e, 0x75, 0xc8, 0x22, 0x59, 0xae, 0xb5, 0xe4, 0xdf, + 0xde, 0xef, 0x04, 0x9c, 0x1d, 0xea, 0xc7, 0x9d, 0x4f, 0x8f, 0xe3, 0xb9, 0x16, 0xbc, 0x0a, 0xb5, + 0x81, 0x1f, 0xd0, 0x36, 0x0f, 0x9f, 0xa6, 0xaa, 0xe9, 0xd6, 0xf9, 0xe4, 0xc6, 0x4e, 0xf8, 0x94, + 0xe2, 0x6b, 0x00, 0xf2, 0x47, 0xc1, 0x3e, 0xa7, 0x91, 0x33, 0x91, 0xac, 0xb9, 0x75, 0xae, 0x25, + 0x05, 0x4f, 0x92, 0x5b, 0xe8, 0xc0, 0x0c, 0xeb, 0xf5, 0x38, 0x15, 0xce, 0xe4, 0x22, 0x59, 0x9e, + 0xdc, 0x3a, 0xd7, 0x52, 0xd7, 0xb8, 0x00, 0xd3, 0x7b, 0xfb, 0x34, 0xfe, 0xca, 0x99, 0x92, 0x99, + 0xa4, 0x17, 0x78, 0x0d, 0x60, 0x10, 0xb3, 0xcf, 0x68, 0x47, 0xb4, 0xc3, 0xae, 0x33, 0x2d, 0x7f, + 0xaa, 0xa9, 0x3b, 0xdb, 0xdd, 0x8d, 0x0b, 0x6a, 0x3f, 0x2e, 0xfc, 0x58, 0x78, 0xcf, 0x08, 0x5c, + 0x29, 0xc8, 0x9b, 0x0f, 0x58, 0xc4, 0x29, 0x3e, 0x82, 0xd9, 0x63, 0x84, 0xdc, 0x21, 0x8b, 0x93, + 0xcb, 0xb3, 0xcd, 0x95, 0xfa, 0x78, 0xa3, 0xd6, 0x33, 0xc4, 0xb2, 0x72, 0x5c, 0x82, 0x97, 0x22, + 0xfa, 0xa5, 0x68, 0xe7, 0xcb, 0x6d, 0x5d, 0x4c, 0x6e, 0x3f, 0xd6, 0x05, 0x7b, 0xab, 0xf0, 0xca, + 0x26, 0x15, 0x1f, 0x64, 0x6d, 0x35, 0x0e, 0xfd, 0x4f, 0x04, 0xae, 0x3c, 0x0a, 0xb9, 0x29, 0x18, + 0xb2, 0xbf, 0x0c, 0x33, 0x03, 0x3f, 0xa6, 0x91, 0x50, 0x1a, 0x75, 0x65, 0x9e, 0xc9, 0xc4, 0xd8, + 0x33, 0x99, 0x1c, 0x77, 0x26, 0x53, 0xe6, 0x99, 0xe4, 0xf0, 0xfe, 0x4c, 0xc0, 0x2d, 0xca, 0x4d, + 0xf1, 0xfd, 0x08, 0x5e, 0x34, 0x9e, 0x1e, 0x8d, 0x78, 0xb5, 0x0c, 0xb1, 0x09, 0x27, 0xb7, 0x88, + 0x35, 0xe8, 0x25, 0xb8, 0xb4, 0x49, 0xc5, 0x13, 0xe9, 0xf9, 0x71, 0x84, 0x7f, 0x20, 0x80, 0x49, + 0x15, 0x69, 0xe4, 0x19, 0x41, 0xfb, 0x2d, 0xcc, 0x1b, 0x39, 0x29, 0xa4, 0x0f, 0xe0, 0x85, 0xf4, + 0x21, 0xd6, 0x2c, 0x97, 0xca, 0x58, 0xaa, 0xfa, 0xb5, 0xcc, 0x9a, 0xde, 0x1a, 0xb8, 0x19, 0x9b, + 0xd2, 0x6e, 0x39, 0xc7, 0x5f, 0x08, 0x5c, 0xcd, 0xba, 0x41, 0x6b, 0xce, 0x08, 0xd1, 0x23, 0x02, + 0xd7, 0x46, 0xa4, 0xa7, 0xe0, 0xb6, 0x01, 0x4f, 0xbc, 0xb9, 0x35, 0xe7, 0x35, 0x5b, 0xcf, 0x0e, + 0x49, 0xcd, 0x75, 0xf2, 0x1b, 0x55, 0x74, 0xee, 0xfb, 0x9d, 0xb2, 0x77, 0x83, 0x76, 0x6e, 0x1a, + 0x79, 0xb6, 0x9c, 0x3b, 0xcc, 0xe9, 0xd8, 0xb9, 0x69, 0x93, 0xb6, 0x76, 0xae, 0xaa, 0x5f, 0xcb, + 0xac, 0xe9, 0xdd, 0x80, 0xb9, 0x4d, 0x2a, 0x3e, 0x0c, 0xfb, 0x74, 0x67, 0xbc, 0x61, 0x0f, 0x49, + 0x9a, 0xaa, 0x0a, 0x3d, 0x23, 0xfc, 0xbe, 0x27, 0xb0, 0x60, 0x66, 0xa5, 0x08, 0x3e, 0x84, 0x9a, + 0x1e, 0x10, 0x34, 0xc3, 0x1b, 0x65, 0x0c, 0x35, 0x85, 0xf3, 0x3d, 0xb5, 0x9a, 0x2d, 0xc5, 0xe6, + 0xdf, 0x97, 0x60, 0xbe, 0x25, 0x17, 0xdb, 0x49, 0x16, 0x7b, 0xa8, 0xa6, 0x39, 0x3c, 0x24, 0x70, + 0xd1, 0x98, 0x1b, 0xf0, 0x76, 0x59, 0x12, 0x45, 0x63, 0x86, 0x5b, 0xa1, 0xcf, 0x7a, 0x8b, 0xdf, + 0xfd, 0xf3, 0xdf, 0xe1, 0x84, 0x8b, 0x4e, 0x32, 0xc4, 0x7c, 0x9d, 0x9c, 0xdd, 0xfd, 0x4c, 0xe7, + 0x6d, 0xac, 0x7c, 0x83, 0x47, 0x04, 0xe6, 0x4e, 0x34, 0x7a, 0xbc, 0x5b, 0xb6, 0xc7, 0xa8, 0x99, + 0xc6, 0xbd, 0x77, 0x0a, 0x65, 0x7a, 0x4c, 0xde, 0x75, 0x99, 0xac, 0x83, 0x97, 0xcd, 0x11, 0x8d, + 0xaf, 0x73, 0x29, 0xc1, 0xe7, 0x44, 0x3e, 0xdd, 0x46, 0x8f, 0xc3, 0xb7, 0x2d, 0x18, 0x16, 0x8d, + 0x0c, 0x6e, 0xb5, 0x5e, 0xea, 0xad, 0xc8, 0xe4, 0xde, 0x40, 0x6f, 0x04, 0x49, 0x35, 0xba, 0x4a, + 0xa6, 0x7f, 0xa8, 0xb7, 0x8b, 0xd9, 0xdd, 0xb1, 0x14, 0xcd, 0xc8, 0x69, 0xc5, 0x5d, 0x3f, 0x8d, + 0x54, 0x61, 0x35, 0x33, 0x4f, 0x9f, 0xc8, 0x9c, 0x0b, 0x74, 0xf2, 0xf8, 0x8c, 0x40, 0x6d, 0xd8, + 0xfa, 0x71, 0xcd, 0x82, 0xad, 0xd1, 0xdd, 0x5c, 0xcb, 0xa6, 0x5a, 0x4e, 0x53, 0xf5, 0x90, 0x84, + 0xe6, 0xaf, 0x04, 0x66, 0x33, 0x1d, 0x1d, 0x9b, 0x36, 0x2c, 0xcc, 0x06, 0xea, 0xde, 0xaa, 0xa4, + 0xa9, 0x00, 0x4e, 0x0f, 0x07, 0x7f, 0x11, 0x98, 0x2f, 0xe8, 0xfa, 0xb8, 0x5e, 0xc1, 0x9e, 0xb9, + 0x51, 0xc1, 0xad, 0xdc, 0x39, 0xbd, 0xf7, 0x64, 0xc6, 0x77, 0xf1, 0x4e, 0x29, 0xd6, 0xc6, 0x89, + 0x1e, 0x9b, 0xa0, 0xfe, 0x97, 0xc0, 0xcb, 0x85, 0x9d, 0x1e, 0xdf, 0xad, 0x62, 0xc0, 0xfc, 0xfc, + 0xe2, 0xde, 0x3f, 0xa5, 0x5a, 0x1d, 0x84, 0x59, 0x56, 0xd1, 0x41, 0x64, 0xfc, 0x72, 0xb2, 0x32, + 0xfc, 0x2d, 0x75, 0x75, 0xda, 0x16, 0xad, 0x5c, 0x6d, 0x4c, 0x10, 0xae, 0x65, 0xc3, 0xf5, 0xb6, + 0x64, 0x9e, 0x1b, 0xf8, 0xe0, 0x54, 0xf8, 0xd5, 0xbf, 0xe2, 0xf2, 0x20, 0xfe, 0x54, 0x9e, 0x57, + 0xb3, 0x80, 0x9d, 0xe7, 0xcd, 0x61, 0xc6, 0xce, 0xf3, 0xb9, 0x61, 0x23, 0x57, 0xc2, 0x78, 0xd4, + 0x85, 0x1e, 0xd2, 0x55, 0xe0, 0x8f, 0x04, 0xe0, 0x78, 0x9a, 0xc0, 0xb7, 0x2c, 0xa8, 0x9b, 0x93, + 0x87, 0x6b, 0xdb, 0xa3, 0xbd, 0x9b, 0x32, 0xe9, 0x37, 0xf1, 0xf5, 0x51, 0xdc, 0x75, 0x0f, 0x4f, + 0xd0, 0x3e, 0x27, 0x70, 0x21, 0x3b, 0x25, 0xa0, 0x15, 0xa7, 0xdc, 0xa4, 0xe3, 0xde, 0xae, 0x26, + 0x52, 0x74, 0xcd, 0x44, 0x8b, 0xdf, 0x28, 0x3a, 0xd7, 0x8d, 0x3d, 0xf0, 0x3a, 0x6c, 0xb7, 0x64, + 0x9f, 0xc7, 0xe4, 0xe3, 0x6d, 0x15, 0x11, 0xb0, 0xbe, 0x1f, 0x05, 0x75, 0x16, 0x07, 0x8d, 0x80, + 0x46, 0xf2, 0x53, 0x85, 0xfa, 0x0e, 0xe2, 0x0f, 0x42, 0x3e, 0xea, 0xd3, 0xc6, 0x3b, 0x99, 0xcb, + 0x4f, 0x66, 0xa4, 0xea, 0xd6, 0xff, 0x01, 0x00, 0x00, 0xff, 0xff, 0x69, 0xe0, 0x2a, 0x9a, 0xae, + 0x12, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/devtools/resultstore/v2/resultstore_file_download.pb.go b/vendor/google.golang.org/genproto/googleapis/devtools/resultstore/v2/resultstore_file_download.pb.go new file mode 100644 index 0000000..6a70c7e --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/devtools/resultstore/v2/resultstore_file_download.pb.go @@ -0,0 +1,433 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/devtools/resultstore/v2/resultstore_file_download.proto + +package resultstore // import "google.golang.org/genproto/googleapis/devtools/resultstore/v2" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +import ( + context "golang.org/x/net/context" + grpc "google.golang.org/grpc" +) + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Request object for GetFile +type GetFileRequest struct { + // This corresponds to the uri field in the File message. + Uri string `protobuf:"bytes,1,opt,name=uri,proto3" json:"uri,omitempty"` + // The offset for the first byte to return in the read, relative to the start + // of the resource. + // + // A `read_offset` that is negative or greater than the size of the resource + // will cause an `OUT_OF_RANGE` error. + ReadOffset int64 `protobuf:"varint,2,opt,name=read_offset,json=readOffset,proto3" json:"read_offset,omitempty"` + // The maximum number of `data` bytes the server is allowed to return in the + // sum of all `ReadResponse` messages. A `read_limit` of zero indicates that + // there is no limit, and a negative `read_limit` will cause an error. + // + // If the stream returns fewer bytes than allowed by the `read_limit` and no + // error occurred, the stream includes all data from the `read_offset` to the + // end of the resource. + ReadLimit int64 `protobuf:"varint,3,opt,name=read_limit,json=readLimit,proto3" json:"read_limit,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GetFileRequest) Reset() { *m = GetFileRequest{} } +func (m *GetFileRequest) String() string { return proto.CompactTextString(m) } +func (*GetFileRequest) ProtoMessage() {} +func (*GetFileRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_resultstore_file_download_ebcc3d1484fcd68b, []int{0} +} +func (m *GetFileRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GetFileRequest.Unmarshal(m, b) +} +func (m *GetFileRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GetFileRequest.Marshal(b, m, deterministic) +} +func (dst *GetFileRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_GetFileRequest.Merge(dst, src) +} +func (m *GetFileRequest) XXX_Size() int { + return xxx_messageInfo_GetFileRequest.Size(m) +} +func (m *GetFileRequest) XXX_DiscardUnknown() { + xxx_messageInfo_GetFileRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_GetFileRequest proto.InternalMessageInfo + +func (m *GetFileRequest) GetUri() string { + if m != nil { + return m.Uri + } + return "" +} + +func (m *GetFileRequest) GetReadOffset() int64 { + if m != nil { + return m.ReadOffset + } + return 0 +} + +func (m *GetFileRequest) GetReadLimit() int64 { + if m != nil { + return m.ReadLimit + } + return 0 +} + +// Response object for GetFile +type GetFileResponse struct { + // The file data. + Data []byte `protobuf:"bytes,1,opt,name=data,proto3" json:"data,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GetFileResponse) Reset() { *m = GetFileResponse{} } +func (m *GetFileResponse) String() string { return proto.CompactTextString(m) } +func (*GetFileResponse) ProtoMessage() {} +func (*GetFileResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_resultstore_file_download_ebcc3d1484fcd68b, []int{1} +} +func (m *GetFileResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GetFileResponse.Unmarshal(m, b) +} +func (m *GetFileResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GetFileResponse.Marshal(b, m, deterministic) +} +func (dst *GetFileResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_GetFileResponse.Merge(dst, src) +} +func (m *GetFileResponse) XXX_Size() int { + return xxx_messageInfo_GetFileResponse.Size(m) +} +func (m *GetFileResponse) XXX_DiscardUnknown() { + xxx_messageInfo_GetFileResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_GetFileResponse proto.InternalMessageInfo + +func (m *GetFileResponse) GetData() []byte { + if m != nil { + return m.Data + } + return nil +} + +// Request object for GetFileTail +type GetFileTailRequest struct { + // This corresponds to the uri field in the File message. + Uri string `protobuf:"bytes,1,opt,name=uri,proto3" json:"uri,omitempty"` + // The offset for the first byte to return in the read, relative to the end + // of the resource. + // + // A `read_offset` that is negative or greater than the size of the resource + // will cause an `OUT_OF_RANGE` error. + ReadOffset int64 `protobuf:"varint,2,opt,name=read_offset,json=readOffset,proto3" json:"read_offset,omitempty"` + // The maximum number of `data` bytes the server is allowed to return. The + // server will return bytes starting from the tail of the file. + // + // A `read_limit` of zero indicates that there is no limit, and a negative + // `read_limit` will cause an error. + ReadLimit int64 `protobuf:"varint,3,opt,name=read_limit,json=readLimit,proto3" json:"read_limit,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GetFileTailRequest) Reset() { *m = GetFileTailRequest{} } +func (m *GetFileTailRequest) String() string { return proto.CompactTextString(m) } +func (*GetFileTailRequest) ProtoMessage() {} +func (*GetFileTailRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_resultstore_file_download_ebcc3d1484fcd68b, []int{2} +} +func (m *GetFileTailRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GetFileTailRequest.Unmarshal(m, b) +} +func (m *GetFileTailRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GetFileTailRequest.Marshal(b, m, deterministic) +} +func (dst *GetFileTailRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_GetFileTailRequest.Merge(dst, src) +} +func (m *GetFileTailRequest) XXX_Size() int { + return xxx_messageInfo_GetFileTailRequest.Size(m) +} +func (m *GetFileTailRequest) XXX_DiscardUnknown() { + xxx_messageInfo_GetFileTailRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_GetFileTailRequest proto.InternalMessageInfo + +func (m *GetFileTailRequest) GetUri() string { + if m != nil { + return m.Uri + } + return "" +} + +func (m *GetFileTailRequest) GetReadOffset() int64 { + if m != nil { + return m.ReadOffset + } + return 0 +} + +func (m *GetFileTailRequest) GetReadLimit() int64 { + if m != nil { + return m.ReadLimit + } + return 0 +} + +// Response object for GetFileTail +type GetFileTailResponse struct { + // The file data, encoded with UTF-8. + Data []byte `protobuf:"bytes,1,opt,name=data,proto3" json:"data,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GetFileTailResponse) Reset() { *m = GetFileTailResponse{} } +func (m *GetFileTailResponse) String() string { return proto.CompactTextString(m) } +func (*GetFileTailResponse) ProtoMessage() {} +func (*GetFileTailResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_resultstore_file_download_ebcc3d1484fcd68b, []int{3} +} +func (m *GetFileTailResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GetFileTailResponse.Unmarshal(m, b) +} +func (m *GetFileTailResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GetFileTailResponse.Marshal(b, m, deterministic) +} +func (dst *GetFileTailResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_GetFileTailResponse.Merge(dst, src) +} +func (m *GetFileTailResponse) XXX_Size() int { + return xxx_messageInfo_GetFileTailResponse.Size(m) +} +func (m *GetFileTailResponse) XXX_DiscardUnknown() { + xxx_messageInfo_GetFileTailResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_GetFileTailResponse proto.InternalMessageInfo + +func (m *GetFileTailResponse) GetData() []byte { + if m != nil { + return m.Data + } + return nil +} + +func init() { + proto.RegisterType((*GetFileRequest)(nil), "google.devtools.resultstore.v2.GetFileRequest") + proto.RegisterType((*GetFileResponse)(nil), "google.devtools.resultstore.v2.GetFileResponse") + proto.RegisterType((*GetFileTailRequest)(nil), "google.devtools.resultstore.v2.GetFileTailRequest") + proto.RegisterType((*GetFileTailResponse)(nil), "google.devtools.resultstore.v2.GetFileTailResponse") +} + +// Reference imports to suppress errors if they are not otherwise used. +var _ context.Context +var _ grpc.ClientConn + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the grpc package it is being compiled against. +const _ = grpc.SupportPackageIsVersion4 + +// ResultStoreFileDownloadClient is the client API for ResultStoreFileDownload service. +// +// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://godoc.org/google.golang.org/grpc#ClientConn.NewStream. +type ResultStoreFileDownloadClient interface { + // Retrieves the File with the given uri. + // returns a stream of bytes to be stitched together in order. + // + // An error will be reported in the following cases: + // - If the File is not found. + // - If the given File uri is badly formatted. + GetFile(ctx context.Context, in *GetFileRequest, opts ...grpc.CallOption) (ResultStoreFileDownload_GetFileClient, error) + // Retrieves the tail of a File with the given uri. + // + // An error will be reported in the following cases: + // - If the File is not found. + // - If the given File uri is badly formatted. + GetFileTail(ctx context.Context, in *GetFileTailRequest, opts ...grpc.CallOption) (*GetFileTailResponse, error) +} + +type resultStoreFileDownloadClient struct { + cc *grpc.ClientConn +} + +func NewResultStoreFileDownloadClient(cc *grpc.ClientConn) ResultStoreFileDownloadClient { + return &resultStoreFileDownloadClient{cc} +} + +func (c *resultStoreFileDownloadClient) GetFile(ctx context.Context, in *GetFileRequest, opts ...grpc.CallOption) (ResultStoreFileDownload_GetFileClient, error) { + stream, err := c.cc.NewStream(ctx, &_ResultStoreFileDownload_serviceDesc.Streams[0], "/google.devtools.resultstore.v2.ResultStoreFileDownload/GetFile", opts...) + if err != nil { + return nil, err + } + x := &resultStoreFileDownloadGetFileClient{stream} + if err := x.ClientStream.SendMsg(in); err != nil { + return nil, err + } + if err := x.ClientStream.CloseSend(); err != nil { + return nil, err + } + return x, nil +} + +type ResultStoreFileDownload_GetFileClient interface { + Recv() (*GetFileResponse, error) + grpc.ClientStream +} + +type resultStoreFileDownloadGetFileClient struct { + grpc.ClientStream +} + +func (x *resultStoreFileDownloadGetFileClient) Recv() (*GetFileResponse, error) { + m := new(GetFileResponse) + if err := x.ClientStream.RecvMsg(m); err != nil { + return nil, err + } + return m, nil +} + +func (c *resultStoreFileDownloadClient) GetFileTail(ctx context.Context, in *GetFileTailRequest, opts ...grpc.CallOption) (*GetFileTailResponse, error) { + out := new(GetFileTailResponse) + err := c.cc.Invoke(ctx, "/google.devtools.resultstore.v2.ResultStoreFileDownload/GetFileTail", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +// ResultStoreFileDownloadServer is the server API for ResultStoreFileDownload service. +type ResultStoreFileDownloadServer interface { + // Retrieves the File with the given uri. + // returns a stream of bytes to be stitched together in order. + // + // An error will be reported in the following cases: + // - If the File is not found. + // - If the given File uri is badly formatted. + GetFile(*GetFileRequest, ResultStoreFileDownload_GetFileServer) error + // Retrieves the tail of a File with the given uri. + // + // An error will be reported in the following cases: + // - If the File is not found. + // - If the given File uri is badly formatted. + GetFileTail(context.Context, *GetFileTailRequest) (*GetFileTailResponse, error) +} + +func RegisterResultStoreFileDownloadServer(s *grpc.Server, srv ResultStoreFileDownloadServer) { + s.RegisterService(&_ResultStoreFileDownload_serviceDesc, srv) +} + +func _ResultStoreFileDownload_GetFile_Handler(srv interface{}, stream grpc.ServerStream) error { + m := new(GetFileRequest) + if err := stream.RecvMsg(m); err != nil { + return err + } + return srv.(ResultStoreFileDownloadServer).GetFile(m, &resultStoreFileDownloadGetFileServer{stream}) +} + +type ResultStoreFileDownload_GetFileServer interface { + Send(*GetFileResponse) error + grpc.ServerStream +} + +type resultStoreFileDownloadGetFileServer struct { + grpc.ServerStream +} + +func (x *resultStoreFileDownloadGetFileServer) Send(m *GetFileResponse) error { + return x.ServerStream.SendMsg(m) +} + +func _ResultStoreFileDownload_GetFileTail_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(GetFileTailRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(ResultStoreFileDownloadServer).GetFileTail(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.devtools.resultstore.v2.ResultStoreFileDownload/GetFileTail", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(ResultStoreFileDownloadServer).GetFileTail(ctx, req.(*GetFileTailRequest)) + } + return interceptor(ctx, in, info, handler) +} + +var _ResultStoreFileDownload_serviceDesc = grpc.ServiceDesc{ + ServiceName: "google.devtools.resultstore.v2.ResultStoreFileDownload", + HandlerType: (*ResultStoreFileDownloadServer)(nil), + Methods: []grpc.MethodDesc{ + { + MethodName: "GetFileTail", + Handler: _ResultStoreFileDownload_GetFileTail_Handler, + }, + }, + Streams: []grpc.StreamDesc{ + { + StreamName: "GetFile", + Handler: _ResultStoreFileDownload_GetFile_Handler, + ServerStreams: true, + }, + }, + Metadata: "google/devtools/resultstore/v2/resultstore_file_download.proto", +} + +func init() { + proto.RegisterFile("google/devtools/resultstore/v2/resultstore_file_download.proto", fileDescriptor_resultstore_file_download_ebcc3d1484fcd68b) +} + +var fileDescriptor_resultstore_file_download_ebcc3d1484fcd68b = []byte{ + // 384 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xb4, 0x53, 0xc1, 0x4a, 0xe3, 0x40, + 0x18, 0x26, 0xed, 0xb2, 0x4b, 0xa7, 0xcb, 0x6e, 0x99, 0x65, 0x69, 0x28, 0xdb, 0xb5, 0x04, 0x84, + 0xda, 0xc3, 0x8c, 0xa4, 0x47, 0xd1, 0x83, 0x88, 0x22, 0x08, 0x4a, 0xf4, 0xe4, 0x25, 0x4c, 0xcd, + 0x24, 0x0c, 0x4c, 0xf3, 0xa7, 0x99, 0x49, 0x3d, 0x48, 0x2f, 0x1e, 0x7c, 0x01, 0xf1, 0x65, 0x7c, + 0x0d, 0x5f, 0xc1, 0x07, 0x91, 0x99, 0x44, 0x49, 0x0f, 0xd5, 0x7a, 0xf0, 0x36, 0xff, 0x37, 0xff, + 0xf7, 0xfd, 0x5f, 0xbe, 0xfc, 0x83, 0xf6, 0x12, 0x80, 0x44, 0x72, 0x1a, 0xf1, 0xb9, 0x06, 0x90, + 0x8a, 0xe6, 0x5c, 0x15, 0x52, 0x2b, 0x0d, 0x39, 0xa7, 0x73, 0xbf, 0x5e, 0x86, 0xb1, 0x90, 0x3c, + 0x8c, 0xe0, 0x3a, 0x95, 0xc0, 0x22, 0x92, 0xe5, 0xa0, 0x01, 0xff, 0x2f, 0xf9, 0xe4, 0x95, 0x4f, + 0x6a, 0x04, 0x32, 0xf7, 0x7b, 0xff, 0x2a, 0x7d, 0x96, 0x09, 0xca, 0xd2, 0x14, 0x34, 0xd3, 0x02, + 0x52, 0x55, 0xb2, 0xbd, 0x09, 0xfa, 0x75, 0xc4, 0xf5, 0xa1, 0x90, 0x3c, 0xe0, 0xb3, 0x82, 0x2b, + 0x8d, 0x3b, 0xa8, 0x59, 0xe4, 0xc2, 0x75, 0x06, 0xce, 0xb0, 0x15, 0x98, 0x23, 0xde, 0x40, 0xed, + 0x9c, 0xb3, 0x28, 0x84, 0x38, 0x56, 0x5c, 0xbb, 0x8d, 0x81, 0x33, 0x6c, 0x06, 0xc8, 0x40, 0xa7, + 0x16, 0xc1, 0x7d, 0x64, 0xab, 0x50, 0x8a, 0xa9, 0xd0, 0x6e, 0xd3, 0xde, 0xb7, 0x0c, 0x72, 0x62, + 0x00, 0x6f, 0x13, 0xfd, 0x7e, 0x9b, 0xa1, 0x32, 0x48, 0x15, 0xc7, 0x18, 0x7d, 0x8b, 0x98, 0x66, + 0x76, 0xca, 0xcf, 0xc0, 0x9e, 0xbd, 0x18, 0xe1, 0xaa, 0xed, 0x82, 0x09, 0xf9, 0x75, 0x76, 0xb6, + 0xd0, 0x9f, 0xa5, 0x39, 0xab, 0x2d, 0xf9, 0x8f, 0x0d, 0xd4, 0x0d, 0x6c, 0x9c, 0xe7, 0x26, 0x4e, + 0xc3, 0x39, 0xa8, 0xd2, 0xc7, 0x77, 0x0e, 0xfa, 0x51, 0xe9, 0x60, 0x42, 0xde, 0xff, 0x09, 0x64, + 0x39, 0xe3, 0x1e, 0x5d, 0xbb, 0xbf, 0x34, 0xe7, 0xb9, 0xb7, 0x4f, 0xcf, 0xf7, 0x0d, 0x8c, 0x3b, + 0x66, 0x23, 0x6e, 0x8a, 0x5c, 0xec, 0x9a, 0x55, 0xa0, 0xa3, 0xc5, 0xb6, 0x83, 0x1f, 0x1c, 0xd4, + 0xae, 0x7d, 0x10, 0xf6, 0xd7, 0x14, 0xaf, 0xa5, 0xdc, 0x1b, 0x7f, 0x8a, 0x53, 0x99, 0xea, 0x5b, + 0x53, 0x5d, 0xfc, 0x77, 0xd9, 0x94, 0x66, 0x42, 0xd2, 0xd1, 0x62, 0x7f, 0x86, 0xbc, 0x2b, 0x98, + 0x7e, 0x20, 0x7c, 0xe6, 0x5c, 0x1e, 0x57, 0x1d, 0x09, 0x48, 0x96, 0x26, 0x04, 0xf2, 0x84, 0x26, + 0x3c, 0xb5, 0xeb, 0x49, 0xcb, 0x2b, 0x96, 0x09, 0xb5, 0xea, 0x7d, 0xec, 0xd4, 0xca, 0xc9, 0x77, + 0xcb, 0x1a, 0xbf, 0x04, 0x00, 0x00, 0xff, 0xff, 0x0d, 0x3d, 0x87, 0x5d, 0x54, 0x03, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/devtools/resultstore/v2/resultstore_upload.pb.go b/vendor/google.golang.org/genproto/googleapis/devtools/resultstore/v2/resultstore_upload.pb.go new file mode 100644 index 0000000..8a3f950 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/devtools/resultstore/v2/resultstore_upload.pb.go @@ -0,0 +1,2197 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/devtools/resultstore/v2/resultstore_upload.proto + +package resultstore // import "google.golang.org/genproto/googleapis/devtools/resultstore/v2" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import timestamp "github.com/golang/protobuf/ptypes/timestamp" +import _ "google.golang.org/genproto/googleapis/api/annotations" +import field_mask "google.golang.org/genproto/protobuf/field_mask" + +import ( + context "golang.org/x/net/context" + grpc "google.golang.org/grpc" +) + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Request passed into CreateInvocation +type CreateInvocationRequest struct { + // A unique identifier for this request. Must be set to a different value for + // each request that affects a given resource (eg. a random UUID). Required + // for the operation to be idempotent. This is achieved by ignoring this + // request if the last successful operation on the resource had the same + // request ID. If set, invocation_id must also be provided. + // Restricted to 36 utf-8 bytes. + RequestId string `protobuf:"bytes,1,opt,name=request_id,json=requestId,proto3" json:"request_id,omitempty"` + // The invocation ID. If left empty then a new unique ID will be + // assigned by the server. If populated, a RFC 4122-compliant v4 UUID is + // preferred, but v3 or v5 UUIDs are allowed too. + InvocationId string `protobuf:"bytes,2,opt,name=invocation_id,json=invocationId,proto3" json:"invocation_id,omitempty"` + // The invocation to create. Its name field will be ignored, since the name + // will be derived from the id field above and assigned by the server. + Invocation *Invocation `protobuf:"bytes,3,opt,name=invocation,proto3" json:"invocation,omitempty"` + // This is a token to authorize upload access to this invocation. It must be + // set to a RFC 4122-compliant v3, v4, or v5 UUID. Once this is set in + // CreateInvocation, all other upload RPCs for that Invocation and any of its + // child resources must also include the exact same token, or they will be + // rejected. The generated token should be unique to this invocation, and it + // should be kept secret. + // + // The purpose of this field is to prevent other users and tools from + // clobbering your upload intentionally or accidentally. The standard way of + // using this token is to create a second v4 UUID when the invocation_id is + // created, and storing them together during the upload. Essentially, this is + // a "password" to the invocation. + AuthorizationToken string `protobuf:"bytes,4,opt,name=authorization_token,json=authorizationToken,proto3" json:"authorization_token,omitempty"` + // By default, Invocations are auto-finished if they are not modified for 24 + // hours. If you need auto-finish to happen sooner, set this field to the time + // you'd like auto-finish to occur. + AutoFinishTime *timestamp.Timestamp `protobuf:"bytes,5,opt,name=auto_finish_time,json=autoFinishTime,proto3" json:"auto_finish_time,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *CreateInvocationRequest) Reset() { *m = CreateInvocationRequest{} } +func (m *CreateInvocationRequest) String() string { return proto.CompactTextString(m) } +func (*CreateInvocationRequest) ProtoMessage() {} +func (*CreateInvocationRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_resultstore_upload_c44d7b55c8a904a0, []int{0} +} +func (m *CreateInvocationRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_CreateInvocationRequest.Unmarshal(m, b) +} +func (m *CreateInvocationRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_CreateInvocationRequest.Marshal(b, m, deterministic) +} +func (dst *CreateInvocationRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_CreateInvocationRequest.Merge(dst, src) +} +func (m *CreateInvocationRequest) XXX_Size() int { + return xxx_messageInfo_CreateInvocationRequest.Size(m) +} +func (m *CreateInvocationRequest) XXX_DiscardUnknown() { + xxx_messageInfo_CreateInvocationRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_CreateInvocationRequest proto.InternalMessageInfo + +func (m *CreateInvocationRequest) GetRequestId() string { + if m != nil { + return m.RequestId + } + return "" +} + +func (m *CreateInvocationRequest) GetInvocationId() string { + if m != nil { + return m.InvocationId + } + return "" +} + +func (m *CreateInvocationRequest) GetInvocation() *Invocation { + if m != nil { + return m.Invocation + } + return nil +} + +func (m *CreateInvocationRequest) GetAuthorizationToken() string { + if m != nil { + return m.AuthorizationToken + } + return "" +} + +func (m *CreateInvocationRequest) GetAutoFinishTime() *timestamp.Timestamp { + if m != nil { + return m.AutoFinishTime + } + return nil +} + +// Request passed into UpdateInvocation +type UpdateInvocationRequest struct { + // Contains the name and the fields of the invocation to be updated. The + // name format must be: invocations/${INVOCATION_ID} + Invocation *Invocation `protobuf:"bytes,3,opt,name=invocation,proto3" json:"invocation,omitempty"` + // Indicates which fields to update. + UpdateMask *field_mask.FieldMask `protobuf:"bytes,4,opt,name=update_mask,json=updateMask,proto3" json:"update_mask,omitempty"` + // This is a token to authorize access to this invocation. It must be set to + // the same value that was provided in the CreateInvocationRequest. + AuthorizationToken string `protobuf:"bytes,5,opt,name=authorization_token,json=authorizationToken,proto3" json:"authorization_token,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *UpdateInvocationRequest) Reset() { *m = UpdateInvocationRequest{} } +func (m *UpdateInvocationRequest) String() string { return proto.CompactTextString(m) } +func (*UpdateInvocationRequest) ProtoMessage() {} +func (*UpdateInvocationRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_resultstore_upload_c44d7b55c8a904a0, []int{1} +} +func (m *UpdateInvocationRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_UpdateInvocationRequest.Unmarshal(m, b) +} +func (m *UpdateInvocationRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_UpdateInvocationRequest.Marshal(b, m, deterministic) +} +func (dst *UpdateInvocationRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_UpdateInvocationRequest.Merge(dst, src) +} +func (m *UpdateInvocationRequest) XXX_Size() int { + return xxx_messageInfo_UpdateInvocationRequest.Size(m) +} +func (m *UpdateInvocationRequest) XXX_DiscardUnknown() { + xxx_messageInfo_UpdateInvocationRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_UpdateInvocationRequest proto.InternalMessageInfo + +func (m *UpdateInvocationRequest) GetInvocation() *Invocation { + if m != nil { + return m.Invocation + } + return nil +} + +func (m *UpdateInvocationRequest) GetUpdateMask() *field_mask.FieldMask { + if m != nil { + return m.UpdateMask + } + return nil +} + +func (m *UpdateInvocationRequest) GetAuthorizationToken() string { + if m != nil { + return m.AuthorizationToken + } + return "" +} + +// Request passed into FinishInvocation +type FinishInvocationRequest struct { + // The name of the invocation. Its format must be: + // invocations/${INVOCATION_ID} + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // This is a token to authorize access to this invocation. It must be set to + // the same value that was provided in the CreateInvocationRequest. + AuthorizationToken string `protobuf:"bytes,3,opt,name=authorization_token,json=authorizationToken,proto3" json:"authorization_token,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *FinishInvocationRequest) Reset() { *m = FinishInvocationRequest{} } +func (m *FinishInvocationRequest) String() string { return proto.CompactTextString(m) } +func (*FinishInvocationRequest) ProtoMessage() {} +func (*FinishInvocationRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_resultstore_upload_c44d7b55c8a904a0, []int{2} +} +func (m *FinishInvocationRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_FinishInvocationRequest.Unmarshal(m, b) +} +func (m *FinishInvocationRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_FinishInvocationRequest.Marshal(b, m, deterministic) +} +func (dst *FinishInvocationRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_FinishInvocationRequest.Merge(dst, src) +} +func (m *FinishInvocationRequest) XXX_Size() int { + return xxx_messageInfo_FinishInvocationRequest.Size(m) +} +func (m *FinishInvocationRequest) XXX_DiscardUnknown() { + xxx_messageInfo_FinishInvocationRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_FinishInvocationRequest proto.InternalMessageInfo + +func (m *FinishInvocationRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *FinishInvocationRequest) GetAuthorizationToken() string { + if m != nil { + return m.AuthorizationToken + } + return "" +} + +// Response returned from FinishInvocation +type FinishInvocationResponse struct { + // The name of the invocation. Its format will be: + // invocations/${INVOCATION_ID} + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // The resource ID components that identify the Invocation. + Id *Invocation_Id `protobuf:"bytes,2,opt,name=id,proto3" json:"id,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *FinishInvocationResponse) Reset() { *m = FinishInvocationResponse{} } +func (m *FinishInvocationResponse) String() string { return proto.CompactTextString(m) } +func (*FinishInvocationResponse) ProtoMessage() {} +func (*FinishInvocationResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_resultstore_upload_c44d7b55c8a904a0, []int{3} +} +func (m *FinishInvocationResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_FinishInvocationResponse.Unmarshal(m, b) +} +func (m *FinishInvocationResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_FinishInvocationResponse.Marshal(b, m, deterministic) +} +func (dst *FinishInvocationResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_FinishInvocationResponse.Merge(dst, src) +} +func (m *FinishInvocationResponse) XXX_Size() int { + return xxx_messageInfo_FinishInvocationResponse.Size(m) +} +func (m *FinishInvocationResponse) XXX_DiscardUnknown() { + xxx_messageInfo_FinishInvocationResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_FinishInvocationResponse proto.InternalMessageInfo + +func (m *FinishInvocationResponse) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *FinishInvocationResponse) GetId() *Invocation_Id { + if m != nil { + return m.Id + } + return nil +} + +// Request passed into CreateTarget +type CreateTargetRequest struct { + // A unique identifier for this request. Must be set to a different value for + // each request that affects a given resource (eg. a random UUID). Required + // for the operation to be idempotent. This is achieved by ignoring this + // request if the last successful operation on the resource had the same + // request ID. Restricted to 36 utf-8 bytes. + RequestId string `protobuf:"bytes,1,opt,name=request_id,json=requestId,proto3" json:"request_id,omitempty"` + // The name of the parent invocation in which the target is created. + // Its format must be invocations/${INVOCATION_ID} + Parent string `protobuf:"bytes,2,opt,name=parent,proto3" json:"parent,omitempty"` + // The target identifier. It can be any UTF-8 string up to 1024 bytes long + // except for the reserved id '-'. + TargetId string `protobuf:"bytes,3,opt,name=target_id,json=targetId,proto3" json:"target_id,omitempty"` + // The target to create. Its name field will be ignored, since the name will + // be derived from the id field above and assigned by the server. + Target *Target `protobuf:"bytes,4,opt,name=target,proto3" json:"target,omitempty"` + // This is a token to authorize access to this invocation. It must be set to + // the same value that was provided in the CreateInvocationRequest. + AuthorizationToken string `protobuf:"bytes,5,opt,name=authorization_token,json=authorizationToken,proto3" json:"authorization_token,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *CreateTargetRequest) Reset() { *m = CreateTargetRequest{} } +func (m *CreateTargetRequest) String() string { return proto.CompactTextString(m) } +func (*CreateTargetRequest) ProtoMessage() {} +func (*CreateTargetRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_resultstore_upload_c44d7b55c8a904a0, []int{4} +} +func (m *CreateTargetRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_CreateTargetRequest.Unmarshal(m, b) +} +func (m *CreateTargetRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_CreateTargetRequest.Marshal(b, m, deterministic) +} +func (dst *CreateTargetRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_CreateTargetRequest.Merge(dst, src) +} +func (m *CreateTargetRequest) XXX_Size() int { + return xxx_messageInfo_CreateTargetRequest.Size(m) +} +func (m *CreateTargetRequest) XXX_DiscardUnknown() { + xxx_messageInfo_CreateTargetRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_CreateTargetRequest proto.InternalMessageInfo + +func (m *CreateTargetRequest) GetRequestId() string { + if m != nil { + return m.RequestId + } + return "" +} + +func (m *CreateTargetRequest) GetParent() string { + if m != nil { + return m.Parent + } + return "" +} + +func (m *CreateTargetRequest) GetTargetId() string { + if m != nil { + return m.TargetId + } + return "" +} + +func (m *CreateTargetRequest) GetTarget() *Target { + if m != nil { + return m.Target + } + return nil +} + +func (m *CreateTargetRequest) GetAuthorizationToken() string { + if m != nil { + return m.AuthorizationToken + } + return "" +} + +// Request passed into UpdateTarget +type UpdateTargetRequest struct { + // Contains the name and the fields of the target to be updated. The name + // format must be: invocations/${INVOCATION_ID}/targets/${TARGET_ID} + Target *Target `protobuf:"bytes,3,opt,name=target,proto3" json:"target,omitempty"` + // Indicates which fields to update. + UpdateMask *field_mask.FieldMask `protobuf:"bytes,4,opt,name=update_mask,json=updateMask,proto3" json:"update_mask,omitempty"` + // This is a token to authorize access to this invocation. It must be set to + // the same value that was provided in the CreateInvocationRequest. + AuthorizationToken string `protobuf:"bytes,5,opt,name=authorization_token,json=authorizationToken,proto3" json:"authorization_token,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *UpdateTargetRequest) Reset() { *m = UpdateTargetRequest{} } +func (m *UpdateTargetRequest) String() string { return proto.CompactTextString(m) } +func (*UpdateTargetRequest) ProtoMessage() {} +func (*UpdateTargetRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_resultstore_upload_c44d7b55c8a904a0, []int{5} +} +func (m *UpdateTargetRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_UpdateTargetRequest.Unmarshal(m, b) +} +func (m *UpdateTargetRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_UpdateTargetRequest.Marshal(b, m, deterministic) +} +func (dst *UpdateTargetRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_UpdateTargetRequest.Merge(dst, src) +} +func (m *UpdateTargetRequest) XXX_Size() int { + return xxx_messageInfo_UpdateTargetRequest.Size(m) +} +func (m *UpdateTargetRequest) XXX_DiscardUnknown() { + xxx_messageInfo_UpdateTargetRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_UpdateTargetRequest proto.InternalMessageInfo + +func (m *UpdateTargetRequest) GetTarget() *Target { + if m != nil { + return m.Target + } + return nil +} + +func (m *UpdateTargetRequest) GetUpdateMask() *field_mask.FieldMask { + if m != nil { + return m.UpdateMask + } + return nil +} + +func (m *UpdateTargetRequest) GetAuthorizationToken() string { + if m != nil { + return m.AuthorizationToken + } + return "" +} + +// Request passed into FinishTarget +type FinishTargetRequest struct { + // The name of the target. Its format must be: + // invocations/${INVOCATION_ID}/targets/${TARGET_ID} + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // This is a token to authorize access to this invocation. It must be set to + // the same value that was provided in the CreateInvocationRequest. + AuthorizationToken string `protobuf:"bytes,3,opt,name=authorization_token,json=authorizationToken,proto3" json:"authorization_token,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *FinishTargetRequest) Reset() { *m = FinishTargetRequest{} } +func (m *FinishTargetRequest) String() string { return proto.CompactTextString(m) } +func (*FinishTargetRequest) ProtoMessage() {} +func (*FinishTargetRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_resultstore_upload_c44d7b55c8a904a0, []int{6} +} +func (m *FinishTargetRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_FinishTargetRequest.Unmarshal(m, b) +} +func (m *FinishTargetRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_FinishTargetRequest.Marshal(b, m, deterministic) +} +func (dst *FinishTargetRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_FinishTargetRequest.Merge(dst, src) +} +func (m *FinishTargetRequest) XXX_Size() int { + return xxx_messageInfo_FinishTargetRequest.Size(m) +} +func (m *FinishTargetRequest) XXX_DiscardUnknown() { + xxx_messageInfo_FinishTargetRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_FinishTargetRequest proto.InternalMessageInfo + +func (m *FinishTargetRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *FinishTargetRequest) GetAuthorizationToken() string { + if m != nil { + return m.AuthorizationToken + } + return "" +} + +// Response returned from FinishTarget +type FinishTargetResponse struct { + // The name of the target. Its format will be: + // invocations/${INVOCATION_ID}/targets/${TARGET_ID} + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // The resource ID components that identify the Target. + Id *Target_Id `protobuf:"bytes,2,opt,name=id,proto3" json:"id,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *FinishTargetResponse) Reset() { *m = FinishTargetResponse{} } +func (m *FinishTargetResponse) String() string { return proto.CompactTextString(m) } +func (*FinishTargetResponse) ProtoMessage() {} +func (*FinishTargetResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_resultstore_upload_c44d7b55c8a904a0, []int{7} +} +func (m *FinishTargetResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_FinishTargetResponse.Unmarshal(m, b) +} +func (m *FinishTargetResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_FinishTargetResponse.Marshal(b, m, deterministic) +} +func (dst *FinishTargetResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_FinishTargetResponse.Merge(dst, src) +} +func (m *FinishTargetResponse) XXX_Size() int { + return xxx_messageInfo_FinishTargetResponse.Size(m) +} +func (m *FinishTargetResponse) XXX_DiscardUnknown() { + xxx_messageInfo_FinishTargetResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_FinishTargetResponse proto.InternalMessageInfo + +func (m *FinishTargetResponse) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *FinishTargetResponse) GetId() *Target_Id { + if m != nil { + return m.Id + } + return nil +} + +// Request passed into CreateConfiguredTarget +type CreateConfiguredTargetRequest struct { + // A unique identifier for this request. Must be set to a different value for + // each request that affects a given resource (eg. a random UUID). Required + // for the operation to be idempotent. This is achieved by ignoring this + // request if the last successful operation on the resource had the same + // request ID. Restricted to 36 utf-8 bytes. + RequestId string `protobuf:"bytes,1,opt,name=request_id,json=requestId,proto3" json:"request_id,omitempty"` + // The name of the parent target in which the configured target is created. + // Its format must be: + // invocations/${INVOCATION_ID}/targets/${TARGET_ID} + Parent string `protobuf:"bytes,2,opt,name=parent,proto3" json:"parent,omitempty"` + // The configuration identifier. This must match the ID of an existing + // Configuration under this Invocation. Cannot be the reserved id '-'. + ConfigId string `protobuf:"bytes,3,opt,name=config_id,json=configId,proto3" json:"config_id,omitempty"` + // The configured target to create. Its name field will be ignored, since the + // name will be derived from the id field above and assigned by the server. + ConfiguredTarget *ConfiguredTarget `protobuf:"bytes,4,opt,name=configured_target,json=configuredTarget,proto3" json:"configured_target,omitempty"` + // This is a token to authorize access to this invocation. It must be set to + // the same value that was provided in the CreateInvocationRequest. + AuthorizationToken string `protobuf:"bytes,5,opt,name=authorization_token,json=authorizationToken,proto3" json:"authorization_token,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *CreateConfiguredTargetRequest) Reset() { *m = CreateConfiguredTargetRequest{} } +func (m *CreateConfiguredTargetRequest) String() string { return proto.CompactTextString(m) } +func (*CreateConfiguredTargetRequest) ProtoMessage() {} +func (*CreateConfiguredTargetRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_resultstore_upload_c44d7b55c8a904a0, []int{8} +} +func (m *CreateConfiguredTargetRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_CreateConfiguredTargetRequest.Unmarshal(m, b) +} +func (m *CreateConfiguredTargetRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_CreateConfiguredTargetRequest.Marshal(b, m, deterministic) +} +func (dst *CreateConfiguredTargetRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_CreateConfiguredTargetRequest.Merge(dst, src) +} +func (m *CreateConfiguredTargetRequest) XXX_Size() int { + return xxx_messageInfo_CreateConfiguredTargetRequest.Size(m) +} +func (m *CreateConfiguredTargetRequest) XXX_DiscardUnknown() { + xxx_messageInfo_CreateConfiguredTargetRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_CreateConfiguredTargetRequest proto.InternalMessageInfo + +func (m *CreateConfiguredTargetRequest) GetRequestId() string { + if m != nil { + return m.RequestId + } + return "" +} + +func (m *CreateConfiguredTargetRequest) GetParent() string { + if m != nil { + return m.Parent + } + return "" +} + +func (m *CreateConfiguredTargetRequest) GetConfigId() string { + if m != nil { + return m.ConfigId + } + return "" +} + +func (m *CreateConfiguredTargetRequest) GetConfiguredTarget() *ConfiguredTarget { + if m != nil { + return m.ConfiguredTarget + } + return nil +} + +func (m *CreateConfiguredTargetRequest) GetAuthorizationToken() string { + if m != nil { + return m.AuthorizationToken + } + return "" +} + +// Request passed into UpdateConfiguredTarget +type UpdateConfiguredTargetRequest struct { + // Contains the name and the fields of the configured target to be updated. + // The name format must be: + // invocations/${INVOCATION_ID}/targets/${TARGET_ID}/configuredTargets/${CONFIG_ID} + ConfiguredTarget *ConfiguredTarget `protobuf:"bytes,3,opt,name=configured_target,json=configuredTarget,proto3" json:"configured_target,omitempty"` + // Indicates which fields to update. + UpdateMask *field_mask.FieldMask `protobuf:"bytes,4,opt,name=update_mask,json=updateMask,proto3" json:"update_mask,omitempty"` + // This is a token to authorize access to this invocation. It must be set to + // the same value that was provided in the CreateInvocationRequest. + AuthorizationToken string `protobuf:"bytes,5,opt,name=authorization_token,json=authorizationToken,proto3" json:"authorization_token,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *UpdateConfiguredTargetRequest) Reset() { *m = UpdateConfiguredTargetRequest{} } +func (m *UpdateConfiguredTargetRequest) String() string { return proto.CompactTextString(m) } +func (*UpdateConfiguredTargetRequest) ProtoMessage() {} +func (*UpdateConfiguredTargetRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_resultstore_upload_c44d7b55c8a904a0, []int{9} +} +func (m *UpdateConfiguredTargetRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_UpdateConfiguredTargetRequest.Unmarshal(m, b) +} +func (m *UpdateConfiguredTargetRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_UpdateConfiguredTargetRequest.Marshal(b, m, deterministic) +} +func (dst *UpdateConfiguredTargetRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_UpdateConfiguredTargetRequest.Merge(dst, src) +} +func (m *UpdateConfiguredTargetRequest) XXX_Size() int { + return xxx_messageInfo_UpdateConfiguredTargetRequest.Size(m) +} +func (m *UpdateConfiguredTargetRequest) XXX_DiscardUnknown() { + xxx_messageInfo_UpdateConfiguredTargetRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_UpdateConfiguredTargetRequest proto.InternalMessageInfo + +func (m *UpdateConfiguredTargetRequest) GetConfiguredTarget() *ConfiguredTarget { + if m != nil { + return m.ConfiguredTarget + } + return nil +} + +func (m *UpdateConfiguredTargetRequest) GetUpdateMask() *field_mask.FieldMask { + if m != nil { + return m.UpdateMask + } + return nil +} + +func (m *UpdateConfiguredTargetRequest) GetAuthorizationToken() string { + if m != nil { + return m.AuthorizationToken + } + return "" +} + +// Request passed into FinishConfiguredTarget +type FinishConfiguredTargetRequest struct { + // The name of the configured target. Its format must be: + // invocations/${INVOCATION_ID}/targets/${TARGET_ID}/configuredTargets/${CONFIG_ID} + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // This is a token to authorize access to this invocation. It must be set to + // the same value that was provided in the CreateInvocationRequest. + AuthorizationToken string `protobuf:"bytes,3,opt,name=authorization_token,json=authorizationToken,proto3" json:"authorization_token,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *FinishConfiguredTargetRequest) Reset() { *m = FinishConfiguredTargetRequest{} } +func (m *FinishConfiguredTargetRequest) String() string { return proto.CompactTextString(m) } +func (*FinishConfiguredTargetRequest) ProtoMessage() {} +func (*FinishConfiguredTargetRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_resultstore_upload_c44d7b55c8a904a0, []int{10} +} +func (m *FinishConfiguredTargetRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_FinishConfiguredTargetRequest.Unmarshal(m, b) +} +func (m *FinishConfiguredTargetRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_FinishConfiguredTargetRequest.Marshal(b, m, deterministic) +} +func (dst *FinishConfiguredTargetRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_FinishConfiguredTargetRequest.Merge(dst, src) +} +func (m *FinishConfiguredTargetRequest) XXX_Size() int { + return xxx_messageInfo_FinishConfiguredTargetRequest.Size(m) +} +func (m *FinishConfiguredTargetRequest) XXX_DiscardUnknown() { + xxx_messageInfo_FinishConfiguredTargetRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_FinishConfiguredTargetRequest proto.InternalMessageInfo + +func (m *FinishConfiguredTargetRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *FinishConfiguredTargetRequest) GetAuthorizationToken() string { + if m != nil { + return m.AuthorizationToken + } + return "" +} + +// Response returned from FinishConfiguredTarget +type FinishConfiguredTargetResponse struct { + // The name of the configured target. Its format must be: + // invocations/${INVOCATION_ID}/targets/${TARGET_ID}/configuredTargets/${CONFIG_ID} + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // The resource ID components that identify the ConfiguredTarget. + Id *ConfiguredTarget_Id `protobuf:"bytes,2,opt,name=id,proto3" json:"id,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *FinishConfiguredTargetResponse) Reset() { *m = FinishConfiguredTargetResponse{} } +func (m *FinishConfiguredTargetResponse) String() string { return proto.CompactTextString(m) } +func (*FinishConfiguredTargetResponse) ProtoMessage() {} +func (*FinishConfiguredTargetResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_resultstore_upload_c44d7b55c8a904a0, []int{11} +} +func (m *FinishConfiguredTargetResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_FinishConfiguredTargetResponse.Unmarshal(m, b) +} +func (m *FinishConfiguredTargetResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_FinishConfiguredTargetResponse.Marshal(b, m, deterministic) +} +func (dst *FinishConfiguredTargetResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_FinishConfiguredTargetResponse.Merge(dst, src) +} +func (m *FinishConfiguredTargetResponse) XXX_Size() int { + return xxx_messageInfo_FinishConfiguredTargetResponse.Size(m) +} +func (m *FinishConfiguredTargetResponse) XXX_DiscardUnknown() { + xxx_messageInfo_FinishConfiguredTargetResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_FinishConfiguredTargetResponse proto.InternalMessageInfo + +func (m *FinishConfiguredTargetResponse) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *FinishConfiguredTargetResponse) GetId() *ConfiguredTarget_Id { + if m != nil { + return m.Id + } + return nil +} + +// Request passed into CreateAction +type CreateActionRequest struct { + // A unique identifier for this request. Must be set to a different value for + // each request that affects a given resource (eg. a random UUID). Required + // for the operation to be idempotent. This is achieved by ignoring this + // request if the last successful operation on the resource had the same + // request ID. Restricted to 36 utf-8 bytes. + RequestId string `protobuf:"bytes,1,opt,name=request_id,json=requestId,proto3" json:"request_id,omitempty"` + // The name of the parent configured target in which the action is created. + // Its format must be: + // invocations/${INVOCATION_ID}/targets/${TARGET_ID}/configuredTargets/${CONFIG_ID} + Parent string `protobuf:"bytes,2,opt,name=parent,proto3" json:"parent,omitempty"` + // The action identifier. It can be any UTF-8 string up to 512 bytes long, + // except for the reserved id '-'. + ActionId string `protobuf:"bytes,3,opt,name=action_id,json=actionId,proto3" json:"action_id,omitempty"` + // The action to create. Its name field will be ignored, since the + // name will be derived from the id field above and assigned by the server. + Action *Action `protobuf:"bytes,4,opt,name=action,proto3" json:"action,omitempty"` + // This is a token to authorize access to this invocation. It must be set to + // the same value that was provided in the CreateInvocationRequest. + AuthorizationToken string `protobuf:"bytes,5,opt,name=authorization_token,json=authorizationToken,proto3" json:"authorization_token,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *CreateActionRequest) Reset() { *m = CreateActionRequest{} } +func (m *CreateActionRequest) String() string { return proto.CompactTextString(m) } +func (*CreateActionRequest) ProtoMessage() {} +func (*CreateActionRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_resultstore_upload_c44d7b55c8a904a0, []int{12} +} +func (m *CreateActionRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_CreateActionRequest.Unmarshal(m, b) +} +func (m *CreateActionRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_CreateActionRequest.Marshal(b, m, deterministic) +} +func (dst *CreateActionRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_CreateActionRequest.Merge(dst, src) +} +func (m *CreateActionRequest) XXX_Size() int { + return xxx_messageInfo_CreateActionRequest.Size(m) +} +func (m *CreateActionRequest) XXX_DiscardUnknown() { + xxx_messageInfo_CreateActionRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_CreateActionRequest proto.InternalMessageInfo + +func (m *CreateActionRequest) GetRequestId() string { + if m != nil { + return m.RequestId + } + return "" +} + +func (m *CreateActionRequest) GetParent() string { + if m != nil { + return m.Parent + } + return "" +} + +func (m *CreateActionRequest) GetActionId() string { + if m != nil { + return m.ActionId + } + return "" +} + +func (m *CreateActionRequest) GetAction() *Action { + if m != nil { + return m.Action + } + return nil +} + +func (m *CreateActionRequest) GetAuthorizationToken() string { + if m != nil { + return m.AuthorizationToken + } + return "" +} + +// Request passed into UpdateAction +type UpdateActionRequest struct { + // Contains the name and the fields of the action to be updated. The + // name format must be: + // invocations/${INVOCATION_ID}/targets/${TARGET_ID}/configuredTargets/${CONFIG_ID}/actions/${ACTION_ID} + Action *Action `protobuf:"bytes,3,opt,name=action,proto3" json:"action,omitempty"` + // Indicates which fields to update. + UpdateMask *field_mask.FieldMask `protobuf:"bytes,4,opt,name=update_mask,json=updateMask,proto3" json:"update_mask,omitempty"` + // This is a token to authorize access to this invocation. It must be set to + // the same value that was provided in the CreateInvocationRequest. + AuthorizationToken string `protobuf:"bytes,5,opt,name=authorization_token,json=authorizationToken,proto3" json:"authorization_token,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *UpdateActionRequest) Reset() { *m = UpdateActionRequest{} } +func (m *UpdateActionRequest) String() string { return proto.CompactTextString(m) } +func (*UpdateActionRequest) ProtoMessage() {} +func (*UpdateActionRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_resultstore_upload_c44d7b55c8a904a0, []int{13} +} +func (m *UpdateActionRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_UpdateActionRequest.Unmarshal(m, b) +} +func (m *UpdateActionRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_UpdateActionRequest.Marshal(b, m, deterministic) +} +func (dst *UpdateActionRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_UpdateActionRequest.Merge(dst, src) +} +func (m *UpdateActionRequest) XXX_Size() int { + return xxx_messageInfo_UpdateActionRequest.Size(m) +} +func (m *UpdateActionRequest) XXX_DiscardUnknown() { + xxx_messageInfo_UpdateActionRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_UpdateActionRequest proto.InternalMessageInfo + +func (m *UpdateActionRequest) GetAction() *Action { + if m != nil { + return m.Action + } + return nil +} + +func (m *UpdateActionRequest) GetUpdateMask() *field_mask.FieldMask { + if m != nil { + return m.UpdateMask + } + return nil +} + +func (m *UpdateActionRequest) GetAuthorizationToken() string { + if m != nil { + return m.AuthorizationToken + } + return "" +} + +// Request passed into CreateConfiguration +type CreateConfigurationRequest struct { + // A unique identifier for this request. Must be set to a different value for + // each request that affects a given resource (eg. a random UUID). Required + // for the operation to be idempotent. This is achieved by ignoring this + // request if the last successful operation on the resource had the same + // request ID. Restricted to 36 utf-8 bytes. + RequestId string `protobuf:"bytes,1,opt,name=request_id,json=requestId,proto3" json:"request_id,omitempty"` + // The name of the parent invocation in which the configuration is created. + // Its format must be invocations/${INVOCATION_ID} + Parent string `protobuf:"bytes,2,opt,name=parent,proto3" json:"parent,omitempty"` + // The configuration identifier. It can be any UTF-8 string up to 256 bytes + // long. The configuration ID of "default" should be preferred for the default + // configuration in a single-config invocation. Cannot be the reserved id '-'. + ConfigId string `protobuf:"bytes,3,opt,name=config_id,json=configId,proto3" json:"config_id,omitempty"` + // The configuration to create. Its name field will be ignored, since the name + // will be derived from the id field above and assigned by the server. + Configuration *Configuration `protobuf:"bytes,4,opt,name=configuration,proto3" json:"configuration,omitempty"` + // This is a token to authorize access to this invocation. It must be set to + // the same value that was provided in the CreateInvocationRequest. + AuthorizationToken string `protobuf:"bytes,5,opt,name=authorization_token,json=authorizationToken,proto3" json:"authorization_token,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *CreateConfigurationRequest) Reset() { *m = CreateConfigurationRequest{} } +func (m *CreateConfigurationRequest) String() string { return proto.CompactTextString(m) } +func (*CreateConfigurationRequest) ProtoMessage() {} +func (*CreateConfigurationRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_resultstore_upload_c44d7b55c8a904a0, []int{14} +} +func (m *CreateConfigurationRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_CreateConfigurationRequest.Unmarshal(m, b) +} +func (m *CreateConfigurationRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_CreateConfigurationRequest.Marshal(b, m, deterministic) +} +func (dst *CreateConfigurationRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_CreateConfigurationRequest.Merge(dst, src) +} +func (m *CreateConfigurationRequest) XXX_Size() int { + return xxx_messageInfo_CreateConfigurationRequest.Size(m) +} +func (m *CreateConfigurationRequest) XXX_DiscardUnknown() { + xxx_messageInfo_CreateConfigurationRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_CreateConfigurationRequest proto.InternalMessageInfo + +func (m *CreateConfigurationRequest) GetRequestId() string { + if m != nil { + return m.RequestId + } + return "" +} + +func (m *CreateConfigurationRequest) GetParent() string { + if m != nil { + return m.Parent + } + return "" +} + +func (m *CreateConfigurationRequest) GetConfigId() string { + if m != nil { + return m.ConfigId + } + return "" +} + +func (m *CreateConfigurationRequest) GetConfiguration() *Configuration { + if m != nil { + return m.Configuration + } + return nil +} + +func (m *CreateConfigurationRequest) GetAuthorizationToken() string { + if m != nil { + return m.AuthorizationToken + } + return "" +} + +// Request passed into UpdateConfiguration +type UpdateConfigurationRequest struct { + // Contains the name and fields of the configuration to be updated. The name + // format must be: invocations/${INVOCATION_ID}/configs/${CONFIG_ID} + Configuration *Configuration `protobuf:"bytes,3,opt,name=configuration,proto3" json:"configuration,omitempty"` + // Indicates which fields to update. + UpdateMask *field_mask.FieldMask `protobuf:"bytes,4,opt,name=update_mask,json=updateMask,proto3" json:"update_mask,omitempty"` + // This is a token to authorize access to this invocation. It must be set to + // the same value that was provided in the CreateInvocationRequest. + AuthorizationToken string `protobuf:"bytes,5,opt,name=authorization_token,json=authorizationToken,proto3" json:"authorization_token,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *UpdateConfigurationRequest) Reset() { *m = UpdateConfigurationRequest{} } +func (m *UpdateConfigurationRequest) String() string { return proto.CompactTextString(m) } +func (*UpdateConfigurationRequest) ProtoMessage() {} +func (*UpdateConfigurationRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_resultstore_upload_c44d7b55c8a904a0, []int{15} +} +func (m *UpdateConfigurationRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_UpdateConfigurationRequest.Unmarshal(m, b) +} +func (m *UpdateConfigurationRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_UpdateConfigurationRequest.Marshal(b, m, deterministic) +} +func (dst *UpdateConfigurationRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_UpdateConfigurationRequest.Merge(dst, src) +} +func (m *UpdateConfigurationRequest) XXX_Size() int { + return xxx_messageInfo_UpdateConfigurationRequest.Size(m) +} +func (m *UpdateConfigurationRequest) XXX_DiscardUnknown() { + xxx_messageInfo_UpdateConfigurationRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_UpdateConfigurationRequest proto.InternalMessageInfo + +func (m *UpdateConfigurationRequest) GetConfiguration() *Configuration { + if m != nil { + return m.Configuration + } + return nil +} + +func (m *UpdateConfigurationRequest) GetUpdateMask() *field_mask.FieldMask { + if m != nil { + return m.UpdateMask + } + return nil +} + +func (m *UpdateConfigurationRequest) GetAuthorizationToken() string { + if m != nil { + return m.AuthorizationToken + } + return "" +} + +// Request passed into CreateFileSet +type CreateFileSetRequest struct { + // A unique identifier for this request. Must be set to a different value for + // each request that affects a given resource (eg. a random UUID). Required + // for the operation to be idempotent. This is achieved by ignoring this + // request if the last successful operation on the resource had the same + // request ID. Restricted to 36 utf-8 bytes. + RequestId string `protobuf:"bytes,1,opt,name=request_id,json=requestId,proto3" json:"request_id,omitempty"` + // The name of the parent invocation in which the file set is created. + // Its format must be invocations/${INVOCATION_ID} + Parent string `protobuf:"bytes,2,opt,name=parent,proto3" json:"parent,omitempty"` + // The file set identifier. It can be any UTF-8 string up to 256 bytes long. + FileSetId string `protobuf:"bytes,3,opt,name=file_set_id,json=fileSetId,proto3" json:"file_set_id,omitempty"` + // The file set to create. Its name field will be ignored, since the name will + // be derived from the id field above and assigned by the server. + FileSet *FileSet `protobuf:"bytes,4,opt,name=file_set,json=fileSet,proto3" json:"file_set,omitempty"` + // This is a token to authorize access to this invocation. It must be set to + // the same value that was provided in the CreateInvocationRequest. + AuthorizationToken string `protobuf:"bytes,5,opt,name=authorization_token,json=authorizationToken,proto3" json:"authorization_token,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *CreateFileSetRequest) Reset() { *m = CreateFileSetRequest{} } +func (m *CreateFileSetRequest) String() string { return proto.CompactTextString(m) } +func (*CreateFileSetRequest) ProtoMessage() {} +func (*CreateFileSetRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_resultstore_upload_c44d7b55c8a904a0, []int{16} +} +func (m *CreateFileSetRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_CreateFileSetRequest.Unmarshal(m, b) +} +func (m *CreateFileSetRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_CreateFileSetRequest.Marshal(b, m, deterministic) +} +func (dst *CreateFileSetRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_CreateFileSetRequest.Merge(dst, src) +} +func (m *CreateFileSetRequest) XXX_Size() int { + return xxx_messageInfo_CreateFileSetRequest.Size(m) +} +func (m *CreateFileSetRequest) XXX_DiscardUnknown() { + xxx_messageInfo_CreateFileSetRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_CreateFileSetRequest proto.InternalMessageInfo + +func (m *CreateFileSetRequest) GetRequestId() string { + if m != nil { + return m.RequestId + } + return "" +} + +func (m *CreateFileSetRequest) GetParent() string { + if m != nil { + return m.Parent + } + return "" +} + +func (m *CreateFileSetRequest) GetFileSetId() string { + if m != nil { + return m.FileSetId + } + return "" +} + +func (m *CreateFileSetRequest) GetFileSet() *FileSet { + if m != nil { + return m.FileSet + } + return nil +} + +func (m *CreateFileSetRequest) GetAuthorizationToken() string { + if m != nil { + return m.AuthorizationToken + } + return "" +} + +// Request passed into UpdateFileSet +type UpdateFileSetRequest struct { + // Contains the name and fields of the file set to be updated. The name format + // must be: invocations/${INVOCATION_ID}/fileSets/${FILE_SET_ID} + FileSet *FileSet `protobuf:"bytes,1,opt,name=file_set,json=fileSet,proto3" json:"file_set,omitempty"` + // Indicates which fields to update. + UpdateMask *field_mask.FieldMask `protobuf:"bytes,2,opt,name=update_mask,json=updateMask,proto3" json:"update_mask,omitempty"` + // This is a token to authorize access to this invocation. It must be set to + // the same value that was provided in the CreateInvocationRequest. + AuthorizationToken string `protobuf:"bytes,3,opt,name=authorization_token,json=authorizationToken,proto3" json:"authorization_token,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *UpdateFileSetRequest) Reset() { *m = UpdateFileSetRequest{} } +func (m *UpdateFileSetRequest) String() string { return proto.CompactTextString(m) } +func (*UpdateFileSetRequest) ProtoMessage() {} +func (*UpdateFileSetRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_resultstore_upload_c44d7b55c8a904a0, []int{17} +} +func (m *UpdateFileSetRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_UpdateFileSetRequest.Unmarshal(m, b) +} +func (m *UpdateFileSetRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_UpdateFileSetRequest.Marshal(b, m, deterministic) +} +func (dst *UpdateFileSetRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_UpdateFileSetRequest.Merge(dst, src) +} +func (m *UpdateFileSetRequest) XXX_Size() int { + return xxx_messageInfo_UpdateFileSetRequest.Size(m) +} +func (m *UpdateFileSetRequest) XXX_DiscardUnknown() { + xxx_messageInfo_UpdateFileSetRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_UpdateFileSetRequest proto.InternalMessageInfo + +func (m *UpdateFileSetRequest) GetFileSet() *FileSet { + if m != nil { + return m.FileSet + } + return nil +} + +func (m *UpdateFileSetRequest) GetUpdateMask() *field_mask.FieldMask { + if m != nil { + return m.UpdateMask + } + return nil +} + +func (m *UpdateFileSetRequest) GetAuthorizationToken() string { + if m != nil { + return m.AuthorizationToken + } + return "" +} + +func init() { + proto.RegisterType((*CreateInvocationRequest)(nil), "google.devtools.resultstore.v2.CreateInvocationRequest") + proto.RegisterType((*UpdateInvocationRequest)(nil), "google.devtools.resultstore.v2.UpdateInvocationRequest") + proto.RegisterType((*FinishInvocationRequest)(nil), "google.devtools.resultstore.v2.FinishInvocationRequest") + proto.RegisterType((*FinishInvocationResponse)(nil), "google.devtools.resultstore.v2.FinishInvocationResponse") + proto.RegisterType((*CreateTargetRequest)(nil), "google.devtools.resultstore.v2.CreateTargetRequest") + proto.RegisterType((*UpdateTargetRequest)(nil), "google.devtools.resultstore.v2.UpdateTargetRequest") + proto.RegisterType((*FinishTargetRequest)(nil), "google.devtools.resultstore.v2.FinishTargetRequest") + proto.RegisterType((*FinishTargetResponse)(nil), "google.devtools.resultstore.v2.FinishTargetResponse") + proto.RegisterType((*CreateConfiguredTargetRequest)(nil), "google.devtools.resultstore.v2.CreateConfiguredTargetRequest") + proto.RegisterType((*UpdateConfiguredTargetRequest)(nil), "google.devtools.resultstore.v2.UpdateConfiguredTargetRequest") + proto.RegisterType((*FinishConfiguredTargetRequest)(nil), "google.devtools.resultstore.v2.FinishConfiguredTargetRequest") + proto.RegisterType((*FinishConfiguredTargetResponse)(nil), "google.devtools.resultstore.v2.FinishConfiguredTargetResponse") + proto.RegisterType((*CreateActionRequest)(nil), "google.devtools.resultstore.v2.CreateActionRequest") + proto.RegisterType((*UpdateActionRequest)(nil), "google.devtools.resultstore.v2.UpdateActionRequest") + proto.RegisterType((*CreateConfigurationRequest)(nil), "google.devtools.resultstore.v2.CreateConfigurationRequest") + proto.RegisterType((*UpdateConfigurationRequest)(nil), "google.devtools.resultstore.v2.UpdateConfigurationRequest") + proto.RegisterType((*CreateFileSetRequest)(nil), "google.devtools.resultstore.v2.CreateFileSetRequest") + proto.RegisterType((*UpdateFileSetRequest)(nil), "google.devtools.resultstore.v2.UpdateFileSetRequest") +} + +// Reference imports to suppress errors if they are not otherwise used. +var _ context.Context +var _ grpc.ClientConn + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the grpc package it is being compiled against. +const _ = grpc.SupportPackageIsVersion4 + +// ResultStoreUploadClient is the client API for ResultStoreUpload service. +// +// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://godoc.org/google.golang.org/grpc#ClientConn.NewStream. +type ResultStoreUploadClient interface { + // Creates the given invocation. Generally, a unique ID will be assigned to + // the invocation's name field by the server. This is not an implicitly + // idempotent API, so a request id is required to make it idempotent. + // + // Returns an empty Invocation proto with only the name and ID fields + // populated. + // + // An error will be reported in the following cases: + // - If an invocation with the same ID already exists. + CreateInvocation(ctx context.Context, in *CreateInvocationRequest, opts ...grpc.CallOption) (*Invocation, error) + // Applies a standard update to the invocation identified by the given proto's + // name. For all types of fields (primitive, message, or repeated), replaces + // them with the given proto fields if they are under the given field mask + // paths. Fields that match the mask but aren't populated in the given + // invocation are cleared. This is an implicitly idempotent API. + // + // Returns an empty Invocation proto with only the name and ID fields + // populated. + // + // An error will be reported in the following cases: + // - If the invocation does not exist. + // - If the invocation is finished. + // - If no field mask was given. + UpdateInvocation(ctx context.Context, in *UpdateInvocationRequest, opts ...grpc.CallOption) (*Invocation, error) + // Declares the invocation with the given name as finished and immutable. + // This is an implicitly idempotent API. + // + // If an Invocation is not updated for 24 hours, some time after that + // this will be called automatically. + // + // An error will be reported in the following cases: + // - If the invocation does not exist. + FinishInvocation(ctx context.Context, in *FinishInvocationRequest, opts ...grpc.CallOption) (*FinishInvocationResponse, error) + // Creates the given target under the given parent invocation. The given + // target ID is URL encoded, converted to the full resource name, and assigned + // to the target's name field. This is not an implicitly idempotent API, so a + // request id is required to make it idempotent. + // + // Returns an empty Target proto with only the name and ID fields populated. + // + // An error will be reported in the following cases: + // - If no target ID is provided. + // - If the parent invocation does not exist. + // - If the parent invocation is finished. + // - If a target with the same name already exists. + CreateTarget(ctx context.Context, in *CreateTargetRequest, opts ...grpc.CallOption) (*Target, error) + // Applies a standard update to the target identified by the given proto's + // name. For all types of fields (primitive, message, or repeated), replaces + // them with the given proto fields if they are under the given field mask + // paths. Fields that match the mask but aren't populated in the given + // target are cleared. This is an implicitly idempotent API. + // + // Returns an empty Target proto with only the name and ID fields populated. + // + // An error will be reported in the following cases: + // - If the target does not exist. + // - If the target or parent invocation is finished. + // - If no field mask was given. + UpdateTarget(ctx context.Context, in *UpdateTargetRequest, opts ...grpc.CallOption) (*Target, error) + // Declares the target with the given name as finished and immutable. + // This is an implicitly idempotent API. + // + // An error will be reported in the following cases: + // - If the target does not exist. + FinishTarget(ctx context.Context, in *FinishTargetRequest, opts ...grpc.CallOption) (*FinishTargetResponse, error) + // Creates the given configured target under the given parent target. + // The given configured target ID is URL encoded, converted to the full + // resource name, and assigned to the configured target's name field. + // This is not an implicitly idempotent API, so a request id is required + // to make it idempotent. + // + // Returns an empty ConfiguredTarget proto with only the name and ID fields + // populated. + // + // An error will be reported in the following cases: + // - If no config ID is provided. + // - If a configured target with the same ID already exists. + // - If the parent target does not exist. + // - If the parent target or invocation is finished. + CreateConfiguredTarget(ctx context.Context, in *CreateConfiguredTargetRequest, opts ...grpc.CallOption) (*ConfiguredTarget, error) + // Applies a standard update to the configured target identified by the given + // proto's name. For all types of fields (primitive, message, or repeated), + // replaces them with the given proto fields if they are under the given + // field mask paths. Fields that match the mask but aren't populated in the + // given configured target are cleared. This is an implicitly idempotent API. + // + // Returns an empty ConfiguredTarget proto with only the name and ID fields + // populated. + // + // An error will be reported in the following cases: + // - If the configured target does not exist. + // - If the parent target or invocation is finished. + // - If no field mask was given. + UpdateConfiguredTarget(ctx context.Context, in *UpdateConfiguredTargetRequest, opts ...grpc.CallOption) (*ConfiguredTarget, error) + // Declares the configured target with the given name as finished and + // immutable. This is an implicitly idempotent API. + // + // An error will be reported in the following cases: + // - If the configured target does not exist. + FinishConfiguredTarget(ctx context.Context, in *FinishConfiguredTargetRequest, opts ...grpc.CallOption) (*FinishConfiguredTargetResponse, error) + // Creates the given action under the given configured target. The given + // action ID is URL encoded, converted to the full resource name, and + // assigned to the action's name field. This is not an implicitly + // idempotent API, so a request id is required to make it idempotent. + // + // Returns an empty Action proto with only the name and ID fields populated. + // + // An error will be reported in the following cases: + // - If no action ID provided. + // - If the parent configured target does not exist. + // - If the parent target or invocation is finished. + // - If an action with the same name already exists. + CreateAction(ctx context.Context, in *CreateActionRequest, opts ...grpc.CallOption) (*Action, error) + // Applies a standard update to the action identified by the given + // proto's name. For all types of fields (primitive, message, or repeated), + // replaces them with the given proto fields if they are under the given + // field mask paths. Fields that match the mask but aren't populated in the + // given action are cleared. This is an implicitly idempotent API. + // + // Returns an empty Action proto with only the name and ID fields populated. + // + // An error will be reported in the following cases: + // - If the action does not exist. + // - If the parent target or invocation is finished. + // - If no field mask was given. + UpdateAction(ctx context.Context, in *UpdateActionRequest, opts ...grpc.CallOption) (*Action, error) + // Creates the given configuration under the given parent invocation. The + // given configuration ID is URL encoded, converted to the full resource name, + // and assigned to the configuration's name field. The configuration ID of + // "default" should be preferred for the default configuration in a + // single-config invocation. This is not an implicitly idempotent API, so a + // request id is required to make it idempotent. + // + // Returns an empty Configuration proto with only the name and ID fields + // populated. + // + // An error will be reported in the following cases: + // - If no configuration ID is provided. + // - If the parent invocation does not exist. + // - If the parent invocation is finished. + // - If a configuration with the same name already exists. + CreateConfiguration(ctx context.Context, in *CreateConfigurationRequest, opts ...grpc.CallOption) (*Configuration, error) + // Applies a standard update to the configuration identified by the given + // proto's name. For all types of fields (primitive, message, or repeated), + // replaces them with the given proto fields if they are under the given field + // mask paths. Fields that match the mask but aren't populated in the given + // configuration are cleared. This is an implicitly idempotent API. + // + // Returns an empty Configuration proto with only the name and ID fields + // populated. + // + // An error will be reported in the following cases: + // - If the configuration does not exist. + // - If the parent invocation is finished. + // - If no field mask was given. + // - If a given field mask path is not valid. + UpdateConfiguration(ctx context.Context, in *UpdateConfigurationRequest, opts ...grpc.CallOption) (*Configuration, error) + // Creates the given file set under the given parent invocation. The given + // file set ID is URL encoded, converted to the full resource name, and + // assigned to the file set's name field. This is not an implicitly idempotent + // API, so a request id is required to make it idempotent. + // + // Returns an empty FileSet proto with only the name and ID fields populated. + // + // An error will be reported in the following cases: + // - If no file set ID is provided. + // - If a file set with the same name already exists. + // - If the parent invocation does not exist. + // - If the parent invocation is finished. + CreateFileSet(ctx context.Context, in *CreateFileSetRequest, opts ...grpc.CallOption) (*FileSet, error) + // Applies a standard update to the file set identified by the given proto's + // name. For all types of fields (primitive, message, or repeated), replaces + // them with the given proto fields if they are under the given field mask + // paths. Fields that match the mask but aren't populated in the given + // configuration are cleared. This is an implicitly idempotent API. + // + // Returns an empty FileSet proto with only the name and ID fields populated. + // + // An error will be reported in the following cases: + // - If the file set does not exist. + // - If the parent invocation is finished. + // - If no field mask was given. + // - If a given field mask path is not valid. + UpdateFileSet(ctx context.Context, in *UpdateFileSetRequest, opts ...grpc.CallOption) (*FileSet, error) +} + +type resultStoreUploadClient struct { + cc *grpc.ClientConn +} + +func NewResultStoreUploadClient(cc *grpc.ClientConn) ResultStoreUploadClient { + return &resultStoreUploadClient{cc} +} + +func (c *resultStoreUploadClient) CreateInvocation(ctx context.Context, in *CreateInvocationRequest, opts ...grpc.CallOption) (*Invocation, error) { + out := new(Invocation) + err := c.cc.Invoke(ctx, "/google.devtools.resultstore.v2.ResultStoreUpload/CreateInvocation", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *resultStoreUploadClient) UpdateInvocation(ctx context.Context, in *UpdateInvocationRequest, opts ...grpc.CallOption) (*Invocation, error) { + out := new(Invocation) + err := c.cc.Invoke(ctx, "/google.devtools.resultstore.v2.ResultStoreUpload/UpdateInvocation", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *resultStoreUploadClient) FinishInvocation(ctx context.Context, in *FinishInvocationRequest, opts ...grpc.CallOption) (*FinishInvocationResponse, error) { + out := new(FinishInvocationResponse) + err := c.cc.Invoke(ctx, "/google.devtools.resultstore.v2.ResultStoreUpload/FinishInvocation", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *resultStoreUploadClient) CreateTarget(ctx context.Context, in *CreateTargetRequest, opts ...grpc.CallOption) (*Target, error) { + out := new(Target) + err := c.cc.Invoke(ctx, "/google.devtools.resultstore.v2.ResultStoreUpload/CreateTarget", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *resultStoreUploadClient) UpdateTarget(ctx context.Context, in *UpdateTargetRequest, opts ...grpc.CallOption) (*Target, error) { + out := new(Target) + err := c.cc.Invoke(ctx, "/google.devtools.resultstore.v2.ResultStoreUpload/UpdateTarget", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *resultStoreUploadClient) FinishTarget(ctx context.Context, in *FinishTargetRequest, opts ...grpc.CallOption) (*FinishTargetResponse, error) { + out := new(FinishTargetResponse) + err := c.cc.Invoke(ctx, "/google.devtools.resultstore.v2.ResultStoreUpload/FinishTarget", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *resultStoreUploadClient) CreateConfiguredTarget(ctx context.Context, in *CreateConfiguredTargetRequest, opts ...grpc.CallOption) (*ConfiguredTarget, error) { + out := new(ConfiguredTarget) + err := c.cc.Invoke(ctx, "/google.devtools.resultstore.v2.ResultStoreUpload/CreateConfiguredTarget", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *resultStoreUploadClient) UpdateConfiguredTarget(ctx context.Context, in *UpdateConfiguredTargetRequest, opts ...grpc.CallOption) (*ConfiguredTarget, error) { + out := new(ConfiguredTarget) + err := c.cc.Invoke(ctx, "/google.devtools.resultstore.v2.ResultStoreUpload/UpdateConfiguredTarget", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *resultStoreUploadClient) FinishConfiguredTarget(ctx context.Context, in *FinishConfiguredTargetRequest, opts ...grpc.CallOption) (*FinishConfiguredTargetResponse, error) { + out := new(FinishConfiguredTargetResponse) + err := c.cc.Invoke(ctx, "/google.devtools.resultstore.v2.ResultStoreUpload/FinishConfiguredTarget", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *resultStoreUploadClient) CreateAction(ctx context.Context, in *CreateActionRequest, opts ...grpc.CallOption) (*Action, error) { + out := new(Action) + err := c.cc.Invoke(ctx, "/google.devtools.resultstore.v2.ResultStoreUpload/CreateAction", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *resultStoreUploadClient) UpdateAction(ctx context.Context, in *UpdateActionRequest, opts ...grpc.CallOption) (*Action, error) { + out := new(Action) + err := c.cc.Invoke(ctx, "/google.devtools.resultstore.v2.ResultStoreUpload/UpdateAction", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *resultStoreUploadClient) CreateConfiguration(ctx context.Context, in *CreateConfigurationRequest, opts ...grpc.CallOption) (*Configuration, error) { + out := new(Configuration) + err := c.cc.Invoke(ctx, "/google.devtools.resultstore.v2.ResultStoreUpload/CreateConfiguration", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *resultStoreUploadClient) UpdateConfiguration(ctx context.Context, in *UpdateConfigurationRequest, opts ...grpc.CallOption) (*Configuration, error) { + out := new(Configuration) + err := c.cc.Invoke(ctx, "/google.devtools.resultstore.v2.ResultStoreUpload/UpdateConfiguration", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *resultStoreUploadClient) CreateFileSet(ctx context.Context, in *CreateFileSetRequest, opts ...grpc.CallOption) (*FileSet, error) { + out := new(FileSet) + err := c.cc.Invoke(ctx, "/google.devtools.resultstore.v2.ResultStoreUpload/CreateFileSet", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *resultStoreUploadClient) UpdateFileSet(ctx context.Context, in *UpdateFileSetRequest, opts ...grpc.CallOption) (*FileSet, error) { + out := new(FileSet) + err := c.cc.Invoke(ctx, "/google.devtools.resultstore.v2.ResultStoreUpload/UpdateFileSet", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +// ResultStoreUploadServer is the server API for ResultStoreUpload service. +type ResultStoreUploadServer interface { + // Creates the given invocation. Generally, a unique ID will be assigned to + // the invocation's name field by the server. This is not an implicitly + // idempotent API, so a request id is required to make it idempotent. + // + // Returns an empty Invocation proto with only the name and ID fields + // populated. + // + // An error will be reported in the following cases: + // - If an invocation with the same ID already exists. + CreateInvocation(context.Context, *CreateInvocationRequest) (*Invocation, error) + // Applies a standard update to the invocation identified by the given proto's + // name. For all types of fields (primitive, message, or repeated), replaces + // them with the given proto fields if they are under the given field mask + // paths. Fields that match the mask but aren't populated in the given + // invocation are cleared. This is an implicitly idempotent API. + // + // Returns an empty Invocation proto with only the name and ID fields + // populated. + // + // An error will be reported in the following cases: + // - If the invocation does not exist. + // - If the invocation is finished. + // - If no field mask was given. + UpdateInvocation(context.Context, *UpdateInvocationRequest) (*Invocation, error) + // Declares the invocation with the given name as finished and immutable. + // This is an implicitly idempotent API. + // + // If an Invocation is not updated for 24 hours, some time after that + // this will be called automatically. + // + // An error will be reported in the following cases: + // - If the invocation does not exist. + FinishInvocation(context.Context, *FinishInvocationRequest) (*FinishInvocationResponse, error) + // Creates the given target under the given parent invocation. The given + // target ID is URL encoded, converted to the full resource name, and assigned + // to the target's name field. This is not an implicitly idempotent API, so a + // request id is required to make it idempotent. + // + // Returns an empty Target proto with only the name and ID fields populated. + // + // An error will be reported in the following cases: + // - If no target ID is provided. + // - If the parent invocation does not exist. + // - If the parent invocation is finished. + // - If a target with the same name already exists. + CreateTarget(context.Context, *CreateTargetRequest) (*Target, error) + // Applies a standard update to the target identified by the given proto's + // name. For all types of fields (primitive, message, or repeated), replaces + // them with the given proto fields if they are under the given field mask + // paths. Fields that match the mask but aren't populated in the given + // target are cleared. This is an implicitly idempotent API. + // + // Returns an empty Target proto with only the name and ID fields populated. + // + // An error will be reported in the following cases: + // - If the target does not exist. + // - If the target or parent invocation is finished. + // - If no field mask was given. + UpdateTarget(context.Context, *UpdateTargetRequest) (*Target, error) + // Declares the target with the given name as finished and immutable. + // This is an implicitly idempotent API. + // + // An error will be reported in the following cases: + // - If the target does not exist. + FinishTarget(context.Context, *FinishTargetRequest) (*FinishTargetResponse, error) + // Creates the given configured target under the given parent target. + // The given configured target ID is URL encoded, converted to the full + // resource name, and assigned to the configured target's name field. + // This is not an implicitly idempotent API, so a request id is required + // to make it idempotent. + // + // Returns an empty ConfiguredTarget proto with only the name and ID fields + // populated. + // + // An error will be reported in the following cases: + // - If no config ID is provided. + // - If a configured target with the same ID already exists. + // - If the parent target does not exist. + // - If the parent target or invocation is finished. + CreateConfiguredTarget(context.Context, *CreateConfiguredTargetRequest) (*ConfiguredTarget, error) + // Applies a standard update to the configured target identified by the given + // proto's name. For all types of fields (primitive, message, or repeated), + // replaces them with the given proto fields if they are under the given + // field mask paths. Fields that match the mask but aren't populated in the + // given configured target are cleared. This is an implicitly idempotent API. + // + // Returns an empty ConfiguredTarget proto with only the name and ID fields + // populated. + // + // An error will be reported in the following cases: + // - If the configured target does not exist. + // - If the parent target or invocation is finished. + // - If no field mask was given. + UpdateConfiguredTarget(context.Context, *UpdateConfiguredTargetRequest) (*ConfiguredTarget, error) + // Declares the configured target with the given name as finished and + // immutable. This is an implicitly idempotent API. + // + // An error will be reported in the following cases: + // - If the configured target does not exist. + FinishConfiguredTarget(context.Context, *FinishConfiguredTargetRequest) (*FinishConfiguredTargetResponse, error) + // Creates the given action under the given configured target. The given + // action ID is URL encoded, converted to the full resource name, and + // assigned to the action's name field. This is not an implicitly + // idempotent API, so a request id is required to make it idempotent. + // + // Returns an empty Action proto with only the name and ID fields populated. + // + // An error will be reported in the following cases: + // - If no action ID provided. + // - If the parent configured target does not exist. + // - If the parent target or invocation is finished. + // - If an action with the same name already exists. + CreateAction(context.Context, *CreateActionRequest) (*Action, error) + // Applies a standard update to the action identified by the given + // proto's name. For all types of fields (primitive, message, or repeated), + // replaces them with the given proto fields if they are under the given + // field mask paths. Fields that match the mask but aren't populated in the + // given action are cleared. This is an implicitly idempotent API. + // + // Returns an empty Action proto with only the name and ID fields populated. + // + // An error will be reported in the following cases: + // - If the action does not exist. + // - If the parent target or invocation is finished. + // - If no field mask was given. + UpdateAction(context.Context, *UpdateActionRequest) (*Action, error) + // Creates the given configuration under the given parent invocation. The + // given configuration ID is URL encoded, converted to the full resource name, + // and assigned to the configuration's name field. The configuration ID of + // "default" should be preferred for the default configuration in a + // single-config invocation. This is not an implicitly idempotent API, so a + // request id is required to make it idempotent. + // + // Returns an empty Configuration proto with only the name and ID fields + // populated. + // + // An error will be reported in the following cases: + // - If no configuration ID is provided. + // - If the parent invocation does not exist. + // - If the parent invocation is finished. + // - If a configuration with the same name already exists. + CreateConfiguration(context.Context, *CreateConfigurationRequest) (*Configuration, error) + // Applies a standard update to the configuration identified by the given + // proto's name. For all types of fields (primitive, message, or repeated), + // replaces them with the given proto fields if they are under the given field + // mask paths. Fields that match the mask but aren't populated in the given + // configuration are cleared. This is an implicitly idempotent API. + // + // Returns an empty Configuration proto with only the name and ID fields + // populated. + // + // An error will be reported in the following cases: + // - If the configuration does not exist. + // - If the parent invocation is finished. + // - If no field mask was given. + // - If a given field mask path is not valid. + UpdateConfiguration(context.Context, *UpdateConfigurationRequest) (*Configuration, error) + // Creates the given file set under the given parent invocation. The given + // file set ID is URL encoded, converted to the full resource name, and + // assigned to the file set's name field. This is not an implicitly idempotent + // API, so a request id is required to make it idempotent. + // + // Returns an empty FileSet proto with only the name and ID fields populated. + // + // An error will be reported in the following cases: + // - If no file set ID is provided. + // - If a file set with the same name already exists. + // - If the parent invocation does not exist. + // - If the parent invocation is finished. + CreateFileSet(context.Context, *CreateFileSetRequest) (*FileSet, error) + // Applies a standard update to the file set identified by the given proto's + // name. For all types of fields (primitive, message, or repeated), replaces + // them with the given proto fields if they are under the given field mask + // paths. Fields that match the mask but aren't populated in the given + // configuration are cleared. This is an implicitly idempotent API. + // + // Returns an empty FileSet proto with only the name and ID fields populated. + // + // An error will be reported in the following cases: + // - If the file set does not exist. + // - If the parent invocation is finished. + // - If no field mask was given. + // - If a given field mask path is not valid. + UpdateFileSet(context.Context, *UpdateFileSetRequest) (*FileSet, error) +} + +func RegisterResultStoreUploadServer(s *grpc.Server, srv ResultStoreUploadServer) { + s.RegisterService(&_ResultStoreUpload_serviceDesc, srv) +} + +func _ResultStoreUpload_CreateInvocation_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(CreateInvocationRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(ResultStoreUploadServer).CreateInvocation(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.devtools.resultstore.v2.ResultStoreUpload/CreateInvocation", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(ResultStoreUploadServer).CreateInvocation(ctx, req.(*CreateInvocationRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _ResultStoreUpload_UpdateInvocation_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(UpdateInvocationRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(ResultStoreUploadServer).UpdateInvocation(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.devtools.resultstore.v2.ResultStoreUpload/UpdateInvocation", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(ResultStoreUploadServer).UpdateInvocation(ctx, req.(*UpdateInvocationRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _ResultStoreUpload_FinishInvocation_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(FinishInvocationRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(ResultStoreUploadServer).FinishInvocation(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.devtools.resultstore.v2.ResultStoreUpload/FinishInvocation", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(ResultStoreUploadServer).FinishInvocation(ctx, req.(*FinishInvocationRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _ResultStoreUpload_CreateTarget_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(CreateTargetRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(ResultStoreUploadServer).CreateTarget(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.devtools.resultstore.v2.ResultStoreUpload/CreateTarget", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(ResultStoreUploadServer).CreateTarget(ctx, req.(*CreateTargetRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _ResultStoreUpload_UpdateTarget_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(UpdateTargetRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(ResultStoreUploadServer).UpdateTarget(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.devtools.resultstore.v2.ResultStoreUpload/UpdateTarget", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(ResultStoreUploadServer).UpdateTarget(ctx, req.(*UpdateTargetRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _ResultStoreUpload_FinishTarget_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(FinishTargetRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(ResultStoreUploadServer).FinishTarget(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.devtools.resultstore.v2.ResultStoreUpload/FinishTarget", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(ResultStoreUploadServer).FinishTarget(ctx, req.(*FinishTargetRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _ResultStoreUpload_CreateConfiguredTarget_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(CreateConfiguredTargetRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(ResultStoreUploadServer).CreateConfiguredTarget(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.devtools.resultstore.v2.ResultStoreUpload/CreateConfiguredTarget", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(ResultStoreUploadServer).CreateConfiguredTarget(ctx, req.(*CreateConfiguredTargetRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _ResultStoreUpload_UpdateConfiguredTarget_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(UpdateConfiguredTargetRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(ResultStoreUploadServer).UpdateConfiguredTarget(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.devtools.resultstore.v2.ResultStoreUpload/UpdateConfiguredTarget", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(ResultStoreUploadServer).UpdateConfiguredTarget(ctx, req.(*UpdateConfiguredTargetRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _ResultStoreUpload_FinishConfiguredTarget_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(FinishConfiguredTargetRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(ResultStoreUploadServer).FinishConfiguredTarget(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.devtools.resultstore.v2.ResultStoreUpload/FinishConfiguredTarget", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(ResultStoreUploadServer).FinishConfiguredTarget(ctx, req.(*FinishConfiguredTargetRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _ResultStoreUpload_CreateAction_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(CreateActionRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(ResultStoreUploadServer).CreateAction(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.devtools.resultstore.v2.ResultStoreUpload/CreateAction", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(ResultStoreUploadServer).CreateAction(ctx, req.(*CreateActionRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _ResultStoreUpload_UpdateAction_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(UpdateActionRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(ResultStoreUploadServer).UpdateAction(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.devtools.resultstore.v2.ResultStoreUpload/UpdateAction", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(ResultStoreUploadServer).UpdateAction(ctx, req.(*UpdateActionRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _ResultStoreUpload_CreateConfiguration_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(CreateConfigurationRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(ResultStoreUploadServer).CreateConfiguration(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.devtools.resultstore.v2.ResultStoreUpload/CreateConfiguration", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(ResultStoreUploadServer).CreateConfiguration(ctx, req.(*CreateConfigurationRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _ResultStoreUpload_UpdateConfiguration_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(UpdateConfigurationRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(ResultStoreUploadServer).UpdateConfiguration(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.devtools.resultstore.v2.ResultStoreUpload/UpdateConfiguration", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(ResultStoreUploadServer).UpdateConfiguration(ctx, req.(*UpdateConfigurationRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _ResultStoreUpload_CreateFileSet_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(CreateFileSetRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(ResultStoreUploadServer).CreateFileSet(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.devtools.resultstore.v2.ResultStoreUpload/CreateFileSet", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(ResultStoreUploadServer).CreateFileSet(ctx, req.(*CreateFileSetRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _ResultStoreUpload_UpdateFileSet_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(UpdateFileSetRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(ResultStoreUploadServer).UpdateFileSet(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.devtools.resultstore.v2.ResultStoreUpload/UpdateFileSet", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(ResultStoreUploadServer).UpdateFileSet(ctx, req.(*UpdateFileSetRequest)) + } + return interceptor(ctx, in, info, handler) +} + +var _ResultStoreUpload_serviceDesc = grpc.ServiceDesc{ + ServiceName: "google.devtools.resultstore.v2.ResultStoreUpload", + HandlerType: (*ResultStoreUploadServer)(nil), + Methods: []grpc.MethodDesc{ + { + MethodName: "CreateInvocation", + Handler: _ResultStoreUpload_CreateInvocation_Handler, + }, + { + MethodName: "UpdateInvocation", + Handler: _ResultStoreUpload_UpdateInvocation_Handler, + }, + { + MethodName: "FinishInvocation", + Handler: _ResultStoreUpload_FinishInvocation_Handler, + }, + { + MethodName: "CreateTarget", + Handler: _ResultStoreUpload_CreateTarget_Handler, + }, + { + MethodName: "UpdateTarget", + Handler: _ResultStoreUpload_UpdateTarget_Handler, + }, + { + MethodName: "FinishTarget", + Handler: _ResultStoreUpload_FinishTarget_Handler, + }, + { + MethodName: "CreateConfiguredTarget", + Handler: _ResultStoreUpload_CreateConfiguredTarget_Handler, + }, + { + MethodName: "UpdateConfiguredTarget", + Handler: _ResultStoreUpload_UpdateConfiguredTarget_Handler, + }, + { + MethodName: "FinishConfiguredTarget", + Handler: _ResultStoreUpload_FinishConfiguredTarget_Handler, + }, + { + MethodName: "CreateAction", + Handler: _ResultStoreUpload_CreateAction_Handler, + }, + { + MethodName: "UpdateAction", + Handler: _ResultStoreUpload_UpdateAction_Handler, + }, + { + MethodName: "CreateConfiguration", + Handler: _ResultStoreUpload_CreateConfiguration_Handler, + }, + { + MethodName: "UpdateConfiguration", + Handler: _ResultStoreUpload_UpdateConfiguration_Handler, + }, + { + MethodName: "CreateFileSet", + Handler: _ResultStoreUpload_CreateFileSet_Handler, + }, + { + MethodName: "UpdateFileSet", + Handler: _ResultStoreUpload_UpdateFileSet_Handler, + }, + }, + Streams: []grpc.StreamDesc{}, + Metadata: "google/devtools/resultstore/v2/resultstore_upload.proto", +} + +func init() { + proto.RegisterFile("google/devtools/resultstore/v2/resultstore_upload.proto", fileDescriptor_resultstore_upload_c44d7b55c8a904a0) +} + +var fileDescriptor_resultstore_upload_c44d7b55c8a904a0 = []byte{ + // 1348 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xbc, 0x99, 0xcf, 0x6f, 0xdc, 0x44, + 0x14, 0xc7, 0x35, 0xbb, 0x6d, 0x69, 0x5e, 0x12, 0x48, 0x27, 0x51, 0x12, 0x19, 0x92, 0x46, 0x0e, + 0x82, 0x26, 0x34, 0xeb, 0xe2, 0x84, 0xa6, 0xdd, 0x2a, 0x11, 0x4d, 0x50, 0xda, 0x2d, 0x42, 0x6a, + 0x37, 0xa9, 0x90, 0x2a, 0x95, 0x95, 0xbb, 0x9e, 0xdd, 0x5a, 0xd9, 0xb5, 0xb7, 0x6b, 0x6f, 0x24, + 0x40, 0x5c, 0xe0, 0x4f, 0xe0, 0x8a, 0x04, 0x17, 0x38, 0xf2, 0x43, 0xe2, 0x86, 0x38, 0xc1, 0x01, + 0xce, 0xbd, 0x21, 0x24, 0x04, 0xe2, 0x88, 0x38, 0x72, 0x46, 0x9e, 0x19, 0xef, 0xce, 0xf8, 0xc7, + 0xda, 0xde, 0xae, 0x72, 0xb3, 0xbd, 0xf3, 0xec, 0xcf, 0x77, 0xde, 0xd7, 0x33, 0xef, 0x79, 0x61, + 0xbb, 0xe9, 0x38, 0xcd, 0x16, 0xd1, 0x4c, 0x72, 0xe2, 0x39, 0x4e, 0xcb, 0xd5, 0xba, 0xc4, 0xed, + 0xb5, 0x3c, 0xd7, 0x73, 0xba, 0x44, 0x3b, 0xd1, 0xc5, 0xd3, 0x5a, 0xaf, 0xd3, 0x72, 0x0c, 0xb3, + 0xd4, 0xe9, 0x3a, 0x9e, 0x83, 0x97, 0x59, 0x60, 0x29, 0x08, 0x2c, 0x09, 0x23, 0x4b, 0x27, 0xba, + 0xf2, 0x12, 0xbf, 0xb1, 0xd1, 0xb1, 0x34, 0xc3, 0xb6, 0x1d, 0xcf, 0xf0, 0x2c, 0xc7, 0x76, 0x59, + 0xb4, 0xf2, 0x5a, 0xca, 0x63, 0x8d, 0xba, 0x3f, 0x9a, 0x0f, 0xd6, 0x53, 0x06, 0xd7, 0x1d, 0xbb, + 0x61, 0x35, 0x7b, 0x5d, 0x43, 0x88, 0xb9, 0x9a, 0x31, 0x86, 0x98, 0x35, 0xcf, 0xe8, 0x36, 0x89, + 0xc7, 0xe3, 0x36, 0x52, 0xe2, 0x1a, 0x56, 0x8b, 0xd4, 0xdc, 0xfe, 0x70, 0x2d, 0x65, 0xb8, 0x65, + 0x9f, 0x38, 0x75, 0x91, 0x2b, 0x4d, 0xb8, 0x04, 0xb3, 0xc2, 0x07, 0xd3, 0xb3, 0x47, 0xbd, 0x86, + 0xd6, 0xb0, 0x48, 0xcb, 0xac, 0xb5, 0x0d, 0xf7, 0x98, 0x8f, 0xb8, 0x18, 0x1e, 0xe1, 0x59, 0x6d, + 0xe2, 0x7a, 0x46, 0xbb, 0xc3, 0x06, 0xa8, 0x5f, 0x14, 0x60, 0x61, 0xbf, 0x4b, 0x0c, 0x8f, 0x54, + 0xfa, 0x28, 0x55, 0xf2, 0xa4, 0x47, 0x5c, 0x0f, 0x2f, 0x01, 0x74, 0xd9, 0x61, 0xcd, 0x32, 0x17, + 0xd1, 0x0a, 0xba, 0x34, 0x51, 0x9d, 0xe0, 0x57, 0x2a, 0x26, 0x5e, 0x85, 0xe9, 0x01, 0xbe, 0x3f, + 0xa2, 0x40, 0x47, 0x4c, 0x0d, 0x2e, 0x56, 0x4c, 0x7c, 0x07, 0x60, 0x70, 0xbe, 0x58, 0x5c, 0x41, + 0x97, 0x26, 0xf5, 0xf5, 0xd2, 0x70, 0x6f, 0x94, 0x04, 0x14, 0x21, 0x1a, 0x6b, 0x30, 0x6b, 0xf4, + 0xbc, 0xc7, 0x4e, 0xd7, 0xfa, 0x80, 0x3d, 0xd3, 0x73, 0x8e, 0x89, 0xbd, 0x78, 0x86, 0x3e, 0x16, + 0x4b, 0x3f, 0x1d, 0xf9, 0xbf, 0xe0, 0xb7, 0x60, 0xc6, 0xe8, 0x79, 0x4e, 0xad, 0x61, 0xd9, 0x96, + 0xfb, 0xb8, 0xe6, 0x6b, 0x5f, 0x3c, 0x4b, 0x11, 0x94, 0x00, 0x21, 0x98, 0x98, 0xd2, 0x51, 0x30, + 0x31, 0xd5, 0xe7, 0xfd, 0x98, 0x03, 0x1a, 0xe2, 0x5f, 0x54, 0x9f, 0x22, 0x58, 0xb8, 0xdf, 0x31, + 0x63, 0xa7, 0x68, 0x9c, 0xf2, 0x6e, 0xc0, 0x64, 0x8f, 0x3e, 0x86, 0x26, 0x90, 0xca, 0x8a, 0x03, + 0x3d, 0xf0, 0x73, 0xfc, 0x8e, 0xe1, 0x1e, 0x57, 0x81, 0x0d, 0xf7, 0x8f, 0x93, 0xe6, 0xe6, 0x6c, + 0xd2, 0xdc, 0xa8, 0xef, 0xc1, 0x02, 0xd3, 0x18, 0x15, 0x85, 0xe1, 0x8c, 0x6d, 0xb4, 0x09, 0xcf, + 0x38, 0x3d, 0x4e, 0xba, 0x7f, 0x31, 0xf1, 0xfe, 0x6d, 0x58, 0x8c, 0xde, 0xdf, 0xed, 0x38, 0xb6, + 0x4b, 0x62, 0x1f, 0xb0, 0x03, 0x05, 0x6e, 0xa1, 0x49, 0x7d, 0x23, 0xfb, 0x0c, 0x96, 0x2a, 0x66, + 0xb5, 0x60, 0x99, 0xea, 0x6f, 0x08, 0x66, 0x99, 0x8f, 0x8f, 0xe8, 0x1b, 0x92, 0xd1, 0xc3, 0xf3, + 0x70, 0xae, 0x63, 0x74, 0x89, 0xed, 0x71, 0xf3, 0xf2, 0x33, 0xfc, 0x22, 0x4c, 0xb0, 0x37, 0xcd, + 0x8f, 0x62, 0x22, 0xcf, 0xb3, 0x0b, 0x15, 0x13, 0xef, 0xc2, 0x39, 0x76, 0xcc, 0x73, 0xf4, 0x4a, + 0x1a, 0x2e, 0x47, 0xe2, 0x51, 0xf9, 0x73, 0xf5, 0x13, 0x82, 0x59, 0xe6, 0x40, 0x59, 0xdc, 0x00, + 0xa4, 0x38, 0x12, 0xc8, 0xe9, 0x3a, 0xee, 0x01, 0xcc, 0xf2, 0xb7, 0x4a, 0x12, 0x31, 0x16, 0xb7, + 0x11, 0x98, 0x93, 0xef, 0x3d, 0xc4, 0x69, 0xd7, 0x05, 0xa7, 0xad, 0x65, 0x9b, 0xb1, 0xc0, 0x65, + 0x9f, 0x14, 0x60, 0x89, 0xb9, 0x6c, 0xbf, 0xbf, 0x3f, 0x8c, 0xcb, 0x6f, 0x6c, 0xc7, 0x11, 0xfc, + 0xc6, 0x2e, 0x54, 0x4c, 0xfc, 0x10, 0x2e, 0x44, 0xb6, 0x23, 0x9e, 0xac, 0x2b, 0x69, 0xfc, 0x11, + 0xce, 0x99, 0x7a, 0xe8, 0x4a, 0xfe, 0x44, 0xfe, 0x83, 0x60, 0x89, 0xd9, 0x31, 0x69, 0x16, 0x62, + 0x89, 0x8b, 0x63, 0x23, 0x3e, 0x5d, 0xdf, 0x9a, 0xb0, 0xc4, 0xbc, 0x95, 0xa4, 0x76, 0x2c, 0x0e, + 0x7e, 0x1f, 0x96, 0x93, 0x9e, 0x32, 0xc4, 0xcb, 0xfb, 0x82, 0x97, 0x37, 0xf3, 0xce, 0x6c, 0x74, + 0xed, 0xbc, 0x59, 0xcf, 0xb1, 0xff, 0x0f, 0xf1, 0x32, 0x2b, 0xcf, 0x04, 0x2f, 0xb3, 0x0b, 0x6c, + 0xed, 0x64, 0xc7, 0x59, 0xd7, 0x4e, 0x8e, 0xc4, 0xa3, 0x9e, 0x65, 0xed, 0x94, 0xc5, 0x0d, 0x40, + 0x8a, 0x23, 0x81, 0x9c, 0xae, 0x07, 0xff, 0x43, 0xa0, 0xc8, 0x0b, 0x8f, 0x31, 0x9e, 0x4c, 0x25, + 0xaf, 0x3a, 0x87, 0x30, 0x2d, 0x15, 0xce, 0x5c, 0xe2, 0x46, 0x56, 0x97, 0x31, 0x40, 0xf9, 0x1e, + 0xf9, 0x85, 0xff, 0x81, 0x40, 0x91, 0xd7, 0x1a, 0x49, 0x78, 0x04, 0xb2, 0x38, 0x06, 0xc8, 0xd3, + 0x4d, 0xed, 0x5f, 0x08, 0xe6, 0x58, 0x6a, 0x0f, 0xac, 0x16, 0x39, 0x7c, 0xe6, 0xad, 0x64, 0x19, + 0x26, 0x83, 0x26, 0x64, 0x90, 0xd6, 0x89, 0x06, 0xbb, 0x77, 0xc5, 0xc4, 0x7b, 0x70, 0x3e, 0xf8, + 0x9d, 0x4b, 0x7b, 0x35, 0x6d, 0xb6, 0x02, 0xb0, 0xe7, 0xf8, 0x5d, 0xf2, 0x8b, 0xfc, 0x05, 0xc1, + 0x1c, 0x4b, 0x63, 0x48, 0xa4, 0x48, 0x83, 0x46, 0xa4, 0x09, 0xe5, 0xab, 0x30, 0x8e, 0x7c, 0x25, + 0x2e, 0xd4, 0xfa, 0xf7, 0xf3, 0x70, 0xa1, 0x4a, 0x89, 0x0e, 0x7d, 0xa2, 0xfb, 0xb4, 0xe9, 0xc5, + 0x9f, 0x21, 0x98, 0x09, 0xf7, 0x51, 0x78, 0x3b, 0xd5, 0x86, 0xf1, 0x9d, 0x97, 0x92, 0xa3, 0x85, + 0x50, 0x57, 0x3f, 0x7e, 0xfa, 0xf7, 0xa7, 0x85, 0x25, 0xf5, 0x05, 0xb9, 0x9f, 0x74, 0xcb, 0x62, + 0x6f, 0xf1, 0x2d, 0x82, 0x99, 0x70, 0x0f, 0x93, 0x8e, 0x97, 0xd0, 0xf5, 0xe4, 0xc2, 0xdb, 0xa6, + 0x78, 0xaf, 0xeb, 0xab, 0x3e, 0xde, 0x87, 0x42, 0xbf, 0xeb, 0xef, 0x5f, 0x3b, 0x02, 0xaf, 0xb6, + 0xfe, 0x91, 0x84, 0xfc, 0x1d, 0x82, 0x99, 0x70, 0x07, 0x91, 0x8e, 0x9c, 0xd0, 0xd3, 0x28, 0xd7, + 0xf2, 0x07, 0xb2, 0x6d, 0x57, 0x5d, 0xa7, 0x02, 0x5e, 0x56, 0x2f, 0x52, 0x01, 0x71, 0xd4, 0xac, + 0xbd, 0x2c, 0xa3, 0x75, 0xfc, 0x39, 0x82, 0x29, 0xb1, 0x0b, 0xc1, 0x9b, 0xd9, 0x1c, 0x20, 0xd5, + 0x13, 0x4a, 0xc6, 0x32, 0x5e, 0xd5, 0x29, 0xd9, 0x65, 0x55, 0xa5, 0x64, 0xec, 0xf5, 0x0f, 0xb1, + 0xf1, 0xef, 0x05, 0x6e, 0x39, 0x28, 0xf9, 0xbf, 0x42, 0x30, 0x25, 0xb6, 0x12, 0xe9, 0x84, 0x31, + 0x8d, 0x47, 0x66, 0xc2, 0xeb, 0x94, 0x70, 0x53, 0x5f, 0xa3, 0x84, 0xfc, 0xdb, 0x45, 0x74, 0x0a, + 0x03, 0x4a, 0x7f, 0x32, 0x03, 0xd0, 0x6f, 0x10, 0x4c, 0x89, 0x25, 0x7d, 0x3a, 0x68, 0x4c, 0x73, + 0xa1, 0x6c, 0xe5, 0x0b, 0xe2, 0x29, 0xdf, 0xa2, 0xd8, 0x25, 0x75, 0x2d, 0x21, 0xe5, 0x22, 0xef, + 0x20, 0xf9, 0xbf, 0x23, 0x98, 0x8f, 0x6f, 0x0e, 0xf0, 0x4e, 0x36, 0x1b, 0x24, 0x14, 0x98, 0x4a, + 0xee, 0x9a, 0x59, 0xbd, 0x47, 0x15, 0xbc, 0xad, 0x5e, 0x4d, 0xb4, 0x86, 0xa0, 0x41, 0x0b, 0x17, + 0xd8, 0x6e, 0x39, 0x5a, 0xbe, 0xe3, 0x7f, 0x11, 0xcc, 0xc7, 0x57, 0xfd, 0xe9, 0xf2, 0x86, 0x76, + 0x0b, 0x23, 0xc8, 0xab, 0x53, 0x79, 0x0f, 0xf5, 0xdb, 0x54, 0x5e, 0xf4, 0x5b, 0xdd, 0xb0, 0x94, + 0x45, 0xd5, 0xfa, 0x69, 0x8c, 0x11, 0xfc, 0x27, 0x82, 0xf9, 0xf8, 0x92, 0x3c, 0x5d, 0xf0, 0xd0, + 0x86, 0x41, 0xd9, 0x1d, 0x35, 0x9c, 0xfb, 0xf3, 0x36, 0x95, 0xbf, 0xa7, 0xee, 0xa4, 0xfa, 0x33, + 0x5e, 0xec, 0xc0, 0xb3, 0x3f, 0xf6, 0x17, 0x2c, 0x56, 0xde, 0x66, 0x5d, 0xb0, 0xa4, 0x5a, 0x5a, + 0xc9, 0x58, 0x3b, 0xab, 0x77, 0x29, 0xf7, 0x1d, 0xf5, 0xcd, 0x0c, 0xae, 0x8c, 0x25, 0xe7, 0xdf, + 0x7d, 0xdd, 0x72, 0x50, 0x85, 0xff, 0xdc, 0x5f, 0xce, 0xb2, 0xf2, 0xc7, 0xf4, 0x02, 0x99, 0xf9, + 0xdf, 0xa5, 0xfc, 0xf7, 0xf4, 0x5b, 0x94, 0x9f, 0x7f, 0x83, 0xce, 0x3b, 0xfd, 0x81, 0x06, 0xba, + 0xd8, 0x71, 0x19, 0x3f, 0xf4, 0x3b, 0x30, 0xa9, 0x30, 0xc5, 0xe5, 0x7c, 0xeb, 0x86, 0xb4, 0xe3, + 0xe5, 0xab, 0x81, 0x83, 0xa5, 0x7a, 0xe8, 0x66, 0xc2, 0xd4, 0xb8, 0xe5, 0x50, 0xbd, 0xfc, 0x6b, + 0xbf, 0xc5, 0xca, 0x49, 0x9f, 0x5c, 0xd8, 0xe7, 0xa5, 0xbf, 0x45, 0xe9, 0x6f, 0xea, 0x57, 0xa4, + 0x05, 0x21, 0xa9, 0xd0, 0x08, 0x84, 0xf8, 0x29, 0x08, 0x69, 0xf9, 0x12, 0xc1, 0xb4, 0x54, 0x8d, + 0xe3, 0xad, 0x6c, 0x39, 0x90, 0xeb, 0x5a, 0x25, 0x6b, 0x15, 0xab, 0xbe, 0x41, 0xc9, 0x35, 0x75, + 0x75, 0xc8, 0xbc, 0xf3, 0x42, 0xd7, 0x2d, 0xf7, 0x6b, 0x65, 0xfc, 0x35, 0x82, 0x69, 0xa9, 0xa0, + 0x4e, 0xe7, 0x8c, 0xab, 0xbf, 0xb3, 0x73, 0xee, 0x52, 0xce, 0x6b, 0xfa, 0x65, 0xca, 0xd9, 0xff, + 0x9b, 0x23, 0x66, 0x72, 0x03, 0x5a, 0x7f, 0x76, 0xfb, 0xc0, 0x7b, 0x4f, 0x40, 0xad, 0x3b, 0xed, + 0x94, 0xa7, 0xdd, 0x45, 0x0f, 0x2a, 0x7c, 0x44, 0xd3, 0x69, 0x19, 0x76, 0xb3, 0xe4, 0x74, 0x9b, + 0x5a, 0x93, 0xd8, 0xb4, 0x88, 0xe7, 0x7f, 0xa6, 0x18, 0x1d, 0xcb, 0x4d, 0xfa, 0x7f, 0xe4, 0x86, + 0x70, 0xfa, 0xe8, 0x1c, 0x8d, 0xda, 0xfc, 0x3f, 0x00, 0x00, 0xff, 0xff, 0x5e, 0xa6, 0x1a, 0x3d, + 0xc4, 0x1a, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/devtools/resultstore/v2/target.pb.go b/vendor/google.golang.org/genproto/googleapis/devtools/resultstore/v2/target.pb.go new file mode 100644 index 0000000..a2085b8 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/devtools/resultstore/v2/target.pb.go @@ -0,0 +1,434 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/devtools/resultstore/v2/target.proto + +package resultstore // import "google.golang.org/genproto/googleapis/devtools/resultstore/v2" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// These correspond to the suffix of the rule name. Eg cc_test has type TEST. +type TargetType int32 + +const ( + // Unspecified by the build system. + TargetType_TARGET_TYPE_UNSPECIFIED TargetType = 0 + // An application e.g. ios_application. + TargetType_APPLICATION TargetType = 1 + // A binary target e.g. cc_binary. + TargetType_BINARY TargetType = 2 + // A library target e.g. java_library + TargetType_LIBRARY TargetType = 3 + // A package + TargetType_PACKAGE TargetType = 4 + // Any test target, in bazel that means a rule with a '_test' suffix. + TargetType_TEST TargetType = 5 +) + +var TargetType_name = map[int32]string{ + 0: "TARGET_TYPE_UNSPECIFIED", + 1: "APPLICATION", + 2: "BINARY", + 3: "LIBRARY", + 4: "PACKAGE", + 5: "TEST", +} +var TargetType_value = map[string]int32{ + "TARGET_TYPE_UNSPECIFIED": 0, + "APPLICATION": 1, + "BINARY": 2, + "LIBRARY": 3, + "PACKAGE": 4, + "TEST": 5, +} + +func (x TargetType) String() string { + return proto.EnumName(TargetType_name, int32(x)) +} +func (TargetType) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_target_3c0ccbffe81e229a, []int{0} +} + +// Indicates how big the user indicated the test action was. +type TestSize int32 + +const ( + // Unspecified by the user. + TestSize_TEST_SIZE_UNSPECIFIED TestSize = 0 + // Unit test taking less than 1 minute. + TestSize_SMALL TestSize = 1 + // Integration tests taking less than 5 minutes. + TestSize_MEDIUM TestSize = 2 + // End-to-end tests taking less than 15 minutes. + TestSize_LARGE TestSize = 3 + // Even bigger than LARGE. + TestSize_ENORMOUS TestSize = 4 + // Something that doesn't fit into the above categories. + TestSize_OTHER_SIZE TestSize = 5 +) + +var TestSize_name = map[int32]string{ + 0: "TEST_SIZE_UNSPECIFIED", + 1: "SMALL", + 2: "MEDIUM", + 3: "LARGE", + 4: "ENORMOUS", + 5: "OTHER_SIZE", +} +var TestSize_value = map[string]int32{ + "TEST_SIZE_UNSPECIFIED": 0, + "SMALL": 1, + "MEDIUM": 2, + "LARGE": 3, + "ENORMOUS": 4, + "OTHER_SIZE": 5, +} + +func (x TestSize) String() string { + return proto.EnumName(TestSize_name, int32(x)) +} +func (TestSize) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_target_3c0ccbffe81e229a, []int{1} +} + +// Each Target represents data for a given target in a given Invocation. +// ConfiguredTarget and Action resources under each Target contain the bulk of +// the data. +type Target struct { + // The resource name. Its format must be: + // invocations/${INVOCATION_ID}/targets/${TARGET_ID} + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // The resource ID components that identify the Target. They must match the + // resource name after proper encoding. + Id *Target_Id `protobuf:"bytes,2,opt,name=id,proto3" json:"id,omitempty"` + // This is the aggregate status of the target. + StatusAttributes *StatusAttributes `protobuf:"bytes,3,opt,name=status_attributes,json=statusAttributes,proto3" json:"status_attributes,omitempty"` + // When this target started and its duration. + Timing *Timing `protobuf:"bytes,4,opt,name=timing,proto3" json:"timing,omitempty"` + // Attributes that apply to all targets. + TargetAttributes *TargetAttributes `protobuf:"bytes,5,opt,name=target_attributes,json=targetAttributes,proto3" json:"target_attributes,omitempty"` + // Attributes that apply to all test actions under this target. + TestAttributes *TestAttributes `protobuf:"bytes,6,opt,name=test_attributes,json=testAttributes,proto3" json:"test_attributes,omitempty"` + // Arbitrary name-value pairs. + // This is implemented as a multi-map. Multiple properties are allowed with + // the same key. Properties will be returned in lexicographical order by key. + Properties []*Property `protobuf:"bytes,7,rep,name=properties,proto3" json:"properties,omitempty"` + // A list of file references for target level files. + // The file IDs must be unique within this list. Duplicate file IDs will + // result in an error. Files will be returned in lexicographical order by ID. + // Use this field to specify outputs not related to a configuration. + Files []*File `protobuf:"bytes,8,rep,name=files,proto3" json:"files,omitempty"` + // Provides a hint to clients as to whether to display the Target to users. + // If true then clients likely want to display the Target by default. + // Once set to true, this may not be set back to false. + Visible bool `protobuf:"varint,10,opt,name=visible,proto3" json:"visible,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Target) Reset() { *m = Target{} } +func (m *Target) String() string { return proto.CompactTextString(m) } +func (*Target) ProtoMessage() {} +func (*Target) Descriptor() ([]byte, []int) { + return fileDescriptor_target_3c0ccbffe81e229a, []int{0} +} +func (m *Target) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Target.Unmarshal(m, b) +} +func (m *Target) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Target.Marshal(b, m, deterministic) +} +func (dst *Target) XXX_Merge(src proto.Message) { + xxx_messageInfo_Target.Merge(dst, src) +} +func (m *Target) XXX_Size() int { + return xxx_messageInfo_Target.Size(m) +} +func (m *Target) XXX_DiscardUnknown() { + xxx_messageInfo_Target.DiscardUnknown(m) +} + +var xxx_messageInfo_Target proto.InternalMessageInfo + +func (m *Target) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *Target) GetId() *Target_Id { + if m != nil { + return m.Id + } + return nil +} + +func (m *Target) GetStatusAttributes() *StatusAttributes { + if m != nil { + return m.StatusAttributes + } + return nil +} + +func (m *Target) GetTiming() *Timing { + if m != nil { + return m.Timing + } + return nil +} + +func (m *Target) GetTargetAttributes() *TargetAttributes { + if m != nil { + return m.TargetAttributes + } + return nil +} + +func (m *Target) GetTestAttributes() *TestAttributes { + if m != nil { + return m.TestAttributes + } + return nil +} + +func (m *Target) GetProperties() []*Property { + if m != nil { + return m.Properties + } + return nil +} + +func (m *Target) GetFiles() []*File { + if m != nil { + return m.Files + } + return nil +} + +func (m *Target) GetVisible() bool { + if m != nil { + return m.Visible + } + return false +} + +// The resource ID components that identify the Target. +type Target_Id struct { + // The Invocation ID. + InvocationId string `protobuf:"bytes,1,opt,name=invocation_id,json=invocationId,proto3" json:"invocation_id,omitempty"` + // The Target ID. + TargetId string `protobuf:"bytes,2,opt,name=target_id,json=targetId,proto3" json:"target_id,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Target_Id) Reset() { *m = Target_Id{} } +func (m *Target_Id) String() string { return proto.CompactTextString(m) } +func (*Target_Id) ProtoMessage() {} +func (*Target_Id) Descriptor() ([]byte, []int) { + return fileDescriptor_target_3c0ccbffe81e229a, []int{0, 0} +} +func (m *Target_Id) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Target_Id.Unmarshal(m, b) +} +func (m *Target_Id) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Target_Id.Marshal(b, m, deterministic) +} +func (dst *Target_Id) XXX_Merge(src proto.Message) { + xxx_messageInfo_Target_Id.Merge(dst, src) +} +func (m *Target_Id) XXX_Size() int { + return xxx_messageInfo_Target_Id.Size(m) +} +func (m *Target_Id) XXX_DiscardUnknown() { + xxx_messageInfo_Target_Id.DiscardUnknown(m) +} + +var xxx_messageInfo_Target_Id proto.InternalMessageInfo + +func (m *Target_Id) GetInvocationId() string { + if m != nil { + return m.InvocationId + } + return "" +} + +func (m *Target_Id) GetTargetId() string { + if m != nil { + return m.TargetId + } + return "" +} + +// Attributes that apply to all targets. +type TargetAttributes struct { + // If known, indicates the type of this target. In bazel this corresponds + // to the rule-suffix. + Type TargetType `protobuf:"varint,1,opt,name=type,proto3,enum=google.devtools.resultstore.v2.TargetType" json:"type,omitempty"` + // If known, the main language of this target, e.g. java, cc, python, etc. + Language Language `protobuf:"varint,2,opt,name=language,proto3,enum=google.devtools.resultstore.v2.Language" json:"language,omitempty"` + // The tags attribute of the build rule. These should be short, descriptive + // words, and there should only be a few of them. + // This is implemented as a set. All tags will be unique. Any duplicate tags + // will be ignored. Tags will be returned in lexicographical order. + Tags []string `protobuf:"bytes,3,rep,name=tags,proto3" json:"tags,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *TargetAttributes) Reset() { *m = TargetAttributes{} } +func (m *TargetAttributes) String() string { return proto.CompactTextString(m) } +func (*TargetAttributes) ProtoMessage() {} +func (*TargetAttributes) Descriptor() ([]byte, []int) { + return fileDescriptor_target_3c0ccbffe81e229a, []int{1} +} +func (m *TargetAttributes) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_TargetAttributes.Unmarshal(m, b) +} +func (m *TargetAttributes) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_TargetAttributes.Marshal(b, m, deterministic) +} +func (dst *TargetAttributes) XXX_Merge(src proto.Message) { + xxx_messageInfo_TargetAttributes.Merge(dst, src) +} +func (m *TargetAttributes) XXX_Size() int { + return xxx_messageInfo_TargetAttributes.Size(m) +} +func (m *TargetAttributes) XXX_DiscardUnknown() { + xxx_messageInfo_TargetAttributes.DiscardUnknown(m) +} + +var xxx_messageInfo_TargetAttributes proto.InternalMessageInfo + +func (m *TargetAttributes) GetType() TargetType { + if m != nil { + return m.Type + } + return TargetType_TARGET_TYPE_UNSPECIFIED +} + +func (m *TargetAttributes) GetLanguage() Language { + if m != nil { + return m.Language + } + return Language_LANGUAGE_UNSPECIFIED +} + +func (m *TargetAttributes) GetTags() []string { + if m != nil { + return m.Tags + } + return nil +} + +// Attributes that apply only to test actions under this target. +type TestAttributes struct { + // Indicates how big the user indicated the test action was. + Size TestSize `protobuf:"varint,1,opt,name=size,proto3,enum=google.devtools.resultstore.v2.TestSize" json:"size,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *TestAttributes) Reset() { *m = TestAttributes{} } +func (m *TestAttributes) String() string { return proto.CompactTextString(m) } +func (*TestAttributes) ProtoMessage() {} +func (*TestAttributes) Descriptor() ([]byte, []int) { + return fileDescriptor_target_3c0ccbffe81e229a, []int{2} +} +func (m *TestAttributes) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_TestAttributes.Unmarshal(m, b) +} +func (m *TestAttributes) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_TestAttributes.Marshal(b, m, deterministic) +} +func (dst *TestAttributes) XXX_Merge(src proto.Message) { + xxx_messageInfo_TestAttributes.Merge(dst, src) +} +func (m *TestAttributes) XXX_Size() int { + return xxx_messageInfo_TestAttributes.Size(m) +} +func (m *TestAttributes) XXX_DiscardUnknown() { + xxx_messageInfo_TestAttributes.DiscardUnknown(m) +} + +var xxx_messageInfo_TestAttributes proto.InternalMessageInfo + +func (m *TestAttributes) GetSize() TestSize { + if m != nil { + return m.Size + } + return TestSize_TEST_SIZE_UNSPECIFIED +} + +func init() { + proto.RegisterType((*Target)(nil), "google.devtools.resultstore.v2.Target") + proto.RegisterType((*Target_Id)(nil), "google.devtools.resultstore.v2.Target.Id") + proto.RegisterType((*TargetAttributes)(nil), "google.devtools.resultstore.v2.TargetAttributes") + proto.RegisterType((*TestAttributes)(nil), "google.devtools.resultstore.v2.TestAttributes") + proto.RegisterEnum("google.devtools.resultstore.v2.TargetType", TargetType_name, TargetType_value) + proto.RegisterEnum("google.devtools.resultstore.v2.TestSize", TestSize_name, TestSize_value) +} + +func init() { + proto.RegisterFile("google/devtools/resultstore/v2/target.proto", fileDescriptor_target_3c0ccbffe81e229a) +} + +var fileDescriptor_target_3c0ccbffe81e229a = []byte{ + // 628 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x8c, 0x54, 0xed, 0x6a, 0xdb, 0x4a, + 0x10, 0xbd, 0xb2, 0x65, 0x47, 0x1e, 0xe7, 0x3a, 0xba, 0x0b, 0x97, 0xaa, 0x09, 0x14, 0xe3, 0x96, + 0xe2, 0xa4, 0x20, 0x17, 0xf7, 0x57, 0x3f, 0x08, 0x28, 0x89, 0x92, 0x2c, 0xf5, 0x87, 0x58, 0x29, + 0x94, 0x04, 0x8a, 0x51, 0xa2, 0xad, 0xd8, 0x22, 0x6b, 0x5d, 0xed, 0xda, 0x90, 0x3c, 0x48, 0x9f, + 0xa2, 0x0f, 0x59, 0xb4, 0xb2, 0x13, 0xc7, 0xd0, 0x2a, 0xff, 0x76, 0x46, 0xe7, 0x9c, 0x39, 0x9a, + 0x99, 0x5d, 0x78, 0x13, 0x73, 0x1e, 0x27, 0xb4, 0x17, 0xd1, 0x85, 0xe4, 0x3c, 0x11, 0xbd, 0x8c, + 0x8a, 0x79, 0x22, 0x85, 0xe4, 0x19, 0xed, 0x2d, 0xfa, 0x3d, 0x19, 0x66, 0x31, 0x95, 0xf6, 0x2c, + 0xe3, 0x92, 0xa3, 0x17, 0x05, 0xd8, 0x5e, 0x81, 0xed, 0x35, 0xb0, 0xbd, 0xe8, 0xef, 0x96, 0x89, + 0xdd, 0xf0, 0xe9, 0x94, 0xa7, 0x85, 0xd8, 0xee, 0x7e, 0x09, 0xf8, 0x1b, 0x4b, 0x68, 0x01, 0xed, + 0xfc, 0xac, 0x41, 0x3d, 0x50, 0x46, 0x10, 0x02, 0x3d, 0x0d, 0xa7, 0xd4, 0xd2, 0xda, 0x5a, 0xb7, + 0x41, 0xd4, 0x19, 0xbd, 0x87, 0x0a, 0x8b, 0xac, 0x4a, 0x5b, 0xeb, 0x36, 0xfb, 0xfb, 0xf6, 0xdf, + 0x3d, 0xda, 0x85, 0x8e, 0x8d, 0x23, 0x52, 0x61, 0x11, 0xfa, 0x0a, 0xff, 0x09, 0x19, 0xca, 0xb9, + 0x98, 0x84, 0x52, 0x66, 0xec, 0x7a, 0x2e, 0xa9, 0xb0, 0xaa, 0x4a, 0xe9, 0x6d, 0x99, 0x92, 0xaf, + 0x88, 0xce, 0x3d, 0x8f, 0x98, 0x62, 0x23, 0x83, 0x0e, 0xa1, 0x2e, 0xd9, 0x94, 0xa5, 0xb1, 0xa5, + 0x2b, 0xcd, 0xd7, 0xa5, 0xee, 0x14, 0x9a, 0x2c, 0x59, 0xb9, 0xbd, 0x62, 0x00, 0xeb, 0xf6, 0x6a, + 0x4f, 0xb3, 0x57, 0xfc, 0xe8, 0xba, 0x3d, 0xb9, 0x91, 0x41, 0x5f, 0x60, 0x47, 0x52, 0xf1, 0x48, + 0xbc, 0xae, 0xc4, 0xed, 0x52, 0x71, 0x2a, 0xd6, 0xa5, 0x5b, 0xf2, 0x51, 0x8c, 0xce, 0x01, 0x66, + 0x19, 0x9f, 0xd1, 0x4c, 0x32, 0x2a, 0xac, 0xad, 0x76, 0xb5, 0xdb, 0xec, 0x77, 0xcb, 0x34, 0xbd, + 0x82, 0x71, 0x4b, 0xd6, 0xb8, 0xe8, 0x03, 0xd4, 0xf2, 0x45, 0x10, 0x96, 0xa1, 0x44, 0x5e, 0x95, + 0x89, 0x9c, 0xb2, 0x84, 0x92, 0x82, 0x82, 0x2c, 0xd8, 0x5a, 0x30, 0xc1, 0xae, 0x13, 0x6a, 0x41, + 0x5b, 0xeb, 0x1a, 0x64, 0x15, 0xee, 0x9e, 0x42, 0x05, 0x47, 0xe8, 0x25, 0xfc, 0xcb, 0xd2, 0x05, + 0xbf, 0x09, 0x25, 0xe3, 0xe9, 0x84, 0x45, 0xcb, 0xa5, 0xda, 0x7e, 0x48, 0xe2, 0x08, 0xed, 0x41, + 0x63, 0x39, 0x82, 0xe5, 0x8e, 0x35, 0x88, 0x51, 0x24, 0x70, 0xd4, 0xf9, 0xa5, 0x81, 0xb9, 0xd9, + 0x67, 0x74, 0x08, 0xba, 0xbc, 0x9d, 0x15, 0x2b, 0xda, 0xea, 0x1f, 0x3c, 0x6d, 0x4e, 0xc1, 0xed, + 0x8c, 0x12, 0xc5, 0x43, 0x27, 0x60, 0x24, 0x61, 0x1a, 0xcf, 0xc3, 0x98, 0xaa, 0x82, 0xad, 0xf2, + 0xd6, 0x0d, 0x96, 0x78, 0x72, 0xcf, 0xcc, 0x2f, 0x8a, 0x0c, 0xe3, 0x7c, 0x99, 0xab, 0xf9, 0x45, + 0xc9, 0xcf, 0x9d, 0x11, 0xb4, 0x1e, 0x0f, 0x0e, 0x7d, 0x02, 0x5d, 0xb0, 0xbb, 0x95, 0xd7, 0xee, + 0x53, 0xc6, 0xee, 0xb3, 0x3b, 0x4a, 0x14, 0xeb, 0xe0, 0x3b, 0xc0, 0x83, 0x7b, 0xb4, 0x07, 0xcf, + 0x02, 0x87, 0x9c, 0xb9, 0xc1, 0x24, 0xb8, 0xf4, 0xdc, 0xc9, 0xc5, 0xc8, 0xf7, 0xdc, 0x63, 0x7c, + 0x8a, 0xdd, 0x13, 0xf3, 0x1f, 0xb4, 0x03, 0x4d, 0xc7, 0xf3, 0x06, 0xf8, 0xd8, 0x09, 0xf0, 0x78, + 0x64, 0x6a, 0x08, 0xa0, 0x7e, 0x84, 0x47, 0x0e, 0xb9, 0x34, 0x2b, 0xa8, 0x09, 0x5b, 0x03, 0x7c, + 0x44, 0xf2, 0xa0, 0x9a, 0x07, 0x9e, 0x73, 0xfc, 0xd9, 0x39, 0x73, 0x4d, 0x1d, 0x19, 0xa0, 0x07, + 0xae, 0x1f, 0x98, 0xb5, 0x03, 0x0a, 0xc6, 0xaa, 0x3a, 0x7a, 0x0e, 0xff, 0xe7, 0xd9, 0x89, 0x8f, + 0xaf, 0x36, 0xeb, 0x34, 0xa0, 0xe6, 0x0f, 0x9d, 0xc1, 0xa0, 0xa8, 0x30, 0x74, 0x4f, 0xf0, 0xc5, + 0xd0, 0xac, 0xe4, 0xe9, 0x41, 0xee, 0xcd, 0xac, 0xa2, 0x6d, 0x30, 0xdc, 0xd1, 0x98, 0x0c, 0xc7, + 0x17, 0xbe, 0xa9, 0xa3, 0x16, 0xc0, 0x38, 0x38, 0x77, 0x89, 0xd2, 0x32, 0x6b, 0x47, 0x3f, 0xa0, + 0x73, 0xc3, 0xa7, 0x25, 0x7d, 0xf0, 0xb4, 0x2b, 0xbc, 0x44, 0xc4, 0x3c, 0xef, 0xb8, 0xcd, 0xb3, + 0xb8, 0x17, 0xd3, 0x54, 0x3d, 0x57, 0xbd, 0xe2, 0x53, 0x38, 0x63, 0xe2, 0x4f, 0x8f, 0xdb, 0xc7, + 0xb5, 0xf0, 0xba, 0xae, 0x58, 0xef, 0x7e, 0x07, 0x00, 0x00, 0xff, 0xff, 0xce, 0x70, 0x29, 0x31, + 0x8b, 0x05, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/devtools/resultstore/v2/test_suite.pb.go b/vendor/google.golang.org/genproto/googleapis/devtools/resultstore/v2/test_suite.pb.go new file mode 100644 index 0000000..a04dfc7 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/devtools/resultstore/v2/test_suite.pb.go @@ -0,0 +1,659 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/devtools/resultstore/v2/test_suite.proto + +package resultstore // import "google.golang.org/genproto/googleapis/devtools/resultstore/v2" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// The result of running a test case. +type TestCase_Result int32 + +const ( + // The implicit default enum value. Do not use. + TestCase_RESULT_UNSPECIFIED TestCase_Result = 0 + // Test case ran to completion. Look for failures or errors to determine + // whether it passed, failed, or errored. + TestCase_COMPLETED TestCase_Result = 1 + // Test case started but did not complete because the test harness received + // a signal and decided to stop running tests. + TestCase_INTERRUPTED TestCase_Result = 2 + // Test case was not started because the test harness received a SIGINT or + // timed out. + TestCase_CANCELLED TestCase_Result = 3 + // Test case was not run because the user or process running the test + // specified a filter that excluded this test case. + TestCase_FILTERED TestCase_Result = 4 + // Test case was not run to completion because the test case decided it + // should not be run (eg. due to a failed assumption in a JUnit4 test). + // Per-test setup or tear-down may or may not have run. + TestCase_SKIPPED TestCase_Result = 5 + // The test framework did not run the test case because it was labeled as + // suppressed. Eg. if someone temporarily disables a failing test. + TestCase_SUPPRESSED TestCase_Result = 6 +) + +var TestCase_Result_name = map[int32]string{ + 0: "RESULT_UNSPECIFIED", + 1: "COMPLETED", + 2: "INTERRUPTED", + 3: "CANCELLED", + 4: "FILTERED", + 5: "SKIPPED", + 6: "SUPPRESSED", +} +var TestCase_Result_value = map[string]int32{ + "RESULT_UNSPECIFIED": 0, + "COMPLETED": 1, + "INTERRUPTED": 2, + "CANCELLED": 3, + "FILTERED": 4, + "SKIPPED": 5, + "SUPPRESSED": 6, +} + +func (x TestCase_Result) String() string { + return proto.EnumName(TestCase_Result_name, int32(x)) +} +func (TestCase_Result) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_test_suite_48f943d641146787, []int{2, 0} +} + +// The result of running a test suite, as reported in a element of +// an XML log. +type TestSuite struct { + // The full name of this suite, as reported in the name attribute. For Java + // tests, this is normally the fully qualified class name. Eg. + // "com.google.common.hash.BloomFilterTest". + SuiteName string `protobuf:"bytes,1,opt,name=suite_name,json=suiteName,proto3" json:"suite_name,omitempty"` + // The results of the test cases and test suites contained in this suite, + // as reported in the and elements contained within + // this . + Tests []*Test `protobuf:"bytes,2,rep,name=tests,proto3" json:"tests,omitempty"` + // Failures reported in elements within this . + Failures []*TestFailure `protobuf:"bytes,3,rep,name=failures,proto3" json:"failures,omitempty"` + // Errors reported in elements within this . + Errors []*TestError `protobuf:"bytes,4,rep,name=errors,proto3" json:"errors,omitempty"` + // The timing for the entire TestSuite, as reported by the time attribute. + Timing *Timing `protobuf:"bytes,6,opt,name=timing,proto3" json:"timing,omitempty"` + // Arbitrary name-value pairs, as reported in custom attributes or in a + // element within this . Multiple properties are + // allowed with the same key. Properties will be returned in lexicographical + // order by key. + Properties []*Property `protobuf:"bytes,7,rep,name=properties,proto3" json:"properties,omitempty"` + // Files produced by this test suite, as reported by undeclared output + // annotations. + // The file IDs must be unique within this list. Duplicate file IDs will + // result in an error. Files will be returned in lexicographical order by ID. + Files []*File `protobuf:"bytes,8,rep,name=files,proto3" json:"files,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *TestSuite) Reset() { *m = TestSuite{} } +func (m *TestSuite) String() string { return proto.CompactTextString(m) } +func (*TestSuite) ProtoMessage() {} +func (*TestSuite) Descriptor() ([]byte, []int) { + return fileDescriptor_test_suite_48f943d641146787, []int{0} +} +func (m *TestSuite) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_TestSuite.Unmarshal(m, b) +} +func (m *TestSuite) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_TestSuite.Marshal(b, m, deterministic) +} +func (dst *TestSuite) XXX_Merge(src proto.Message) { + xxx_messageInfo_TestSuite.Merge(dst, src) +} +func (m *TestSuite) XXX_Size() int { + return xxx_messageInfo_TestSuite.Size(m) +} +func (m *TestSuite) XXX_DiscardUnknown() { + xxx_messageInfo_TestSuite.DiscardUnknown(m) +} + +var xxx_messageInfo_TestSuite proto.InternalMessageInfo + +func (m *TestSuite) GetSuiteName() string { + if m != nil { + return m.SuiteName + } + return "" +} + +func (m *TestSuite) GetTests() []*Test { + if m != nil { + return m.Tests + } + return nil +} + +func (m *TestSuite) GetFailures() []*TestFailure { + if m != nil { + return m.Failures + } + return nil +} + +func (m *TestSuite) GetErrors() []*TestError { + if m != nil { + return m.Errors + } + return nil +} + +func (m *TestSuite) GetTiming() *Timing { + if m != nil { + return m.Timing + } + return nil +} + +func (m *TestSuite) GetProperties() []*Property { + if m != nil { + return m.Properties + } + return nil +} + +func (m *TestSuite) GetFiles() []*File { + if m != nil { + return m.Files + } + return nil +} + +// The result of running a test case or test suite. JUnit3 TestDecorators are +// represented as a TestSuite with a single test. +type Test struct { + // Either a TestCase of a TestSuite + // + // Types that are valid to be assigned to TestType: + // *Test_TestCase + // *Test_TestSuite + TestType isTest_TestType `protobuf_oneof:"test_type"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Test) Reset() { *m = Test{} } +func (m *Test) String() string { return proto.CompactTextString(m) } +func (*Test) ProtoMessage() {} +func (*Test) Descriptor() ([]byte, []int) { + return fileDescriptor_test_suite_48f943d641146787, []int{1} +} +func (m *Test) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Test.Unmarshal(m, b) +} +func (m *Test) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Test.Marshal(b, m, deterministic) +} +func (dst *Test) XXX_Merge(src proto.Message) { + xxx_messageInfo_Test.Merge(dst, src) +} +func (m *Test) XXX_Size() int { + return xxx_messageInfo_Test.Size(m) +} +func (m *Test) XXX_DiscardUnknown() { + xxx_messageInfo_Test.DiscardUnknown(m) +} + +var xxx_messageInfo_Test proto.InternalMessageInfo + +type isTest_TestType interface { + isTest_TestType() +} + +type Test_TestCase struct { + TestCase *TestCase `protobuf:"bytes,1,opt,name=test_case,json=testCase,proto3,oneof"` +} + +type Test_TestSuite struct { + TestSuite *TestSuite `protobuf:"bytes,2,opt,name=test_suite,json=testSuite,proto3,oneof"` +} + +func (*Test_TestCase) isTest_TestType() {} + +func (*Test_TestSuite) isTest_TestType() {} + +func (m *Test) GetTestType() isTest_TestType { + if m != nil { + return m.TestType + } + return nil +} + +func (m *Test) GetTestCase() *TestCase { + if x, ok := m.GetTestType().(*Test_TestCase); ok { + return x.TestCase + } + return nil +} + +func (m *Test) GetTestSuite() *TestSuite { + if x, ok := m.GetTestType().(*Test_TestSuite); ok { + return x.TestSuite + } + return nil +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*Test) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _Test_OneofMarshaler, _Test_OneofUnmarshaler, _Test_OneofSizer, []interface{}{ + (*Test_TestCase)(nil), + (*Test_TestSuite)(nil), + } +} + +func _Test_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*Test) + // test_type + switch x := m.TestType.(type) { + case *Test_TestCase: + b.EncodeVarint(1<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.TestCase); err != nil { + return err + } + case *Test_TestSuite: + b.EncodeVarint(2<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.TestSuite); err != nil { + return err + } + case nil: + default: + return fmt.Errorf("Test.TestType has unexpected type %T", x) + } + return nil +} + +func _Test_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*Test) + switch tag { + case 1: // test_type.test_case + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(TestCase) + err := b.DecodeMessage(msg) + m.TestType = &Test_TestCase{msg} + return true, err + case 2: // test_type.test_suite + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(TestSuite) + err := b.DecodeMessage(msg) + m.TestType = &Test_TestSuite{msg} + return true, err + default: + return false, nil + } +} + +func _Test_OneofSizer(msg proto.Message) (n int) { + m := msg.(*Test) + // test_type + switch x := m.TestType.(type) { + case *Test_TestCase: + s := proto.Size(x.TestCase) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *Test_TestSuite: + s := proto.Size(x.TestSuite) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +// The result of running a test case, as reported in a element of +// an XML log. +type TestCase struct { + // The name of the test case, as reported in the name attribute. For Java, + // this is normally the method name. Eg. "testBasic". + CaseName string `protobuf:"bytes,1,opt,name=case_name,json=caseName,proto3" json:"case_name,omitempty"` + // The name of the class in which the test case was defined, as reported in + // the classname attribute. For Java, this is normally the fully qualified + // class name. Eg. "com.google.common.hash.BloomFilterTest". + ClassName string `protobuf:"bytes,2,opt,name=class_name,json=className,proto3" json:"class_name,omitempty"` + // An enum reported in the result attribute that is used in conjunction with + // failures and errors below to report the outcome. + Result TestCase_Result `protobuf:"varint,3,opt,name=result,proto3,enum=google.devtools.resultstore.v2.TestCase_Result" json:"result,omitempty"` + // Failures reported in elements within this . + Failures []*TestFailure `protobuf:"bytes,4,rep,name=failures,proto3" json:"failures,omitempty"` + // Errors reported in elements within this . + Errors []*TestError `protobuf:"bytes,5,rep,name=errors,proto3" json:"errors,omitempty"` + // The timing for the TestCase, as reported by the time attribute. + Timing *Timing `protobuf:"bytes,7,opt,name=timing,proto3" json:"timing,omitempty"` + // Arbitrary name-value pairs, as reported in custom attributes or in a + // element within this . Multiple properties are + // allowed with the same key. Properties will be returned in lexicographical + // order by key. + Properties []*Property `protobuf:"bytes,8,rep,name=properties,proto3" json:"properties,omitempty"` + // Files produced by this test case, as reported by undeclared output + // annotations. + // The file IDs must be unique within this list. Duplicate file IDs will + // result in an error. Files will be returned in lexicographical order by ID. + Files []*File `protobuf:"bytes,9,rep,name=files,proto3" json:"files,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *TestCase) Reset() { *m = TestCase{} } +func (m *TestCase) String() string { return proto.CompactTextString(m) } +func (*TestCase) ProtoMessage() {} +func (*TestCase) Descriptor() ([]byte, []int) { + return fileDescriptor_test_suite_48f943d641146787, []int{2} +} +func (m *TestCase) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_TestCase.Unmarshal(m, b) +} +func (m *TestCase) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_TestCase.Marshal(b, m, deterministic) +} +func (dst *TestCase) XXX_Merge(src proto.Message) { + xxx_messageInfo_TestCase.Merge(dst, src) +} +func (m *TestCase) XXX_Size() int { + return xxx_messageInfo_TestCase.Size(m) +} +func (m *TestCase) XXX_DiscardUnknown() { + xxx_messageInfo_TestCase.DiscardUnknown(m) +} + +var xxx_messageInfo_TestCase proto.InternalMessageInfo + +func (m *TestCase) GetCaseName() string { + if m != nil { + return m.CaseName + } + return "" +} + +func (m *TestCase) GetClassName() string { + if m != nil { + return m.ClassName + } + return "" +} + +func (m *TestCase) GetResult() TestCase_Result { + if m != nil { + return m.Result + } + return TestCase_RESULT_UNSPECIFIED +} + +func (m *TestCase) GetFailures() []*TestFailure { + if m != nil { + return m.Failures + } + return nil +} + +func (m *TestCase) GetErrors() []*TestError { + if m != nil { + return m.Errors + } + return nil +} + +func (m *TestCase) GetTiming() *Timing { + if m != nil { + return m.Timing + } + return nil +} + +func (m *TestCase) GetProperties() []*Property { + if m != nil { + return m.Properties + } + return nil +} + +func (m *TestCase) GetFiles() []*File { + if m != nil { + return m.Files + } + return nil +} + +// Represents a violated assertion, as reported in a element within a +// . Some languages allow assertions to be made without stopping the +// test case when they're violated, leading to multiple TestFailures. For Java, +// multiple TestFailures are used to represent a chained exception. +type TestFailure struct { + // The exception message reported in the message attribute. Typically short, + // but may be multi-line. Eg. "Expected 'foo' but was 'bar'". + FailureMessage string `protobuf:"bytes,1,opt,name=failure_message,json=failureMessage,proto3" json:"failure_message,omitempty"` + // The type of the exception being thrown, reported in the type attribute. + // Eg: "org.junit.ComparisonFailure" + ExceptionType string `protobuf:"bytes,2,opt,name=exception_type,json=exceptionType,proto3" json:"exception_type,omitempty"` + // The stack trace reported as the content of the element, often in + // a CDATA block. This contains one line for each stack frame, each including + // a method/function name, a class/file name, and a line number. Most recent + // call is usually first, but not for Python stack traces. May contain the + // exception_type and message. + StackTrace string `protobuf:"bytes,3,opt,name=stack_trace,json=stackTrace,proto3" json:"stack_trace,omitempty"` + // The expected values. + // + // These values can be diffed against the actual values. Often, there is just + // one actual and one expected value. If there is more than one, they should + // be compared as an unordered collection. + Expected []string `protobuf:"bytes,4,rep,name=expected,proto3" json:"expected,omitempty"` + // The actual values. + // + // These values can be diffed against the expected values. Often, there is + // just one actual and one expected value. If there is more than one, they + // should be compared as an unordered collection. + Actual []string `protobuf:"bytes,5,rep,name=actual,proto3" json:"actual,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *TestFailure) Reset() { *m = TestFailure{} } +func (m *TestFailure) String() string { return proto.CompactTextString(m) } +func (*TestFailure) ProtoMessage() {} +func (*TestFailure) Descriptor() ([]byte, []int) { + return fileDescriptor_test_suite_48f943d641146787, []int{3} +} +func (m *TestFailure) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_TestFailure.Unmarshal(m, b) +} +func (m *TestFailure) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_TestFailure.Marshal(b, m, deterministic) +} +func (dst *TestFailure) XXX_Merge(src proto.Message) { + xxx_messageInfo_TestFailure.Merge(dst, src) +} +func (m *TestFailure) XXX_Size() int { + return xxx_messageInfo_TestFailure.Size(m) +} +func (m *TestFailure) XXX_DiscardUnknown() { + xxx_messageInfo_TestFailure.DiscardUnknown(m) +} + +var xxx_messageInfo_TestFailure proto.InternalMessageInfo + +func (m *TestFailure) GetFailureMessage() string { + if m != nil { + return m.FailureMessage + } + return "" +} + +func (m *TestFailure) GetExceptionType() string { + if m != nil { + return m.ExceptionType + } + return "" +} + +func (m *TestFailure) GetStackTrace() string { + if m != nil { + return m.StackTrace + } + return "" +} + +func (m *TestFailure) GetExpected() []string { + if m != nil { + return m.Expected + } + return nil +} + +func (m *TestFailure) GetActual() []string { + if m != nil { + return m.Actual + } + return nil +} + +// Represents an exception that prevented a test case from completing, as +// reported in an element within a . For Java, multiple +// TestErrors are used to represent a chained exception. +type TestError struct { + // The exception message, as reported in the message attribute. Typically + // short, but may be multi-line. Eg. "argument cannot be null". + ErrorMessage string `protobuf:"bytes,1,opt,name=error_message,json=errorMessage,proto3" json:"error_message,omitempty"` + // The type of the exception being thrown, reported in the type attribute. + // For Java, this is a fully qualified Throwable class name. + // Eg: "java.lang.IllegalArgumentException" + ExceptionType string `protobuf:"bytes,2,opt,name=exception_type,json=exceptionType,proto3" json:"exception_type,omitempty"` + // The stack trace reported as the content of the element, often in + // a CDATA block. This contains one line for each stack frame, each including + // a method/function name, a class/file name, and a line number. Most recent + // call is usually first, but not for Python stack traces. May contain the + // exception_type and message. + StackTrace string `protobuf:"bytes,3,opt,name=stack_trace,json=stackTrace,proto3" json:"stack_trace,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *TestError) Reset() { *m = TestError{} } +func (m *TestError) String() string { return proto.CompactTextString(m) } +func (*TestError) ProtoMessage() {} +func (*TestError) Descriptor() ([]byte, []int) { + return fileDescriptor_test_suite_48f943d641146787, []int{4} +} +func (m *TestError) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_TestError.Unmarshal(m, b) +} +func (m *TestError) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_TestError.Marshal(b, m, deterministic) +} +func (dst *TestError) XXX_Merge(src proto.Message) { + xxx_messageInfo_TestError.Merge(dst, src) +} +func (m *TestError) XXX_Size() int { + return xxx_messageInfo_TestError.Size(m) +} +func (m *TestError) XXX_DiscardUnknown() { + xxx_messageInfo_TestError.DiscardUnknown(m) +} + +var xxx_messageInfo_TestError proto.InternalMessageInfo + +func (m *TestError) GetErrorMessage() string { + if m != nil { + return m.ErrorMessage + } + return "" +} + +func (m *TestError) GetExceptionType() string { + if m != nil { + return m.ExceptionType + } + return "" +} + +func (m *TestError) GetStackTrace() string { + if m != nil { + return m.StackTrace + } + return "" +} + +func init() { + proto.RegisterType((*TestSuite)(nil), "google.devtools.resultstore.v2.TestSuite") + proto.RegisterType((*Test)(nil), "google.devtools.resultstore.v2.Test") + proto.RegisterType((*TestCase)(nil), "google.devtools.resultstore.v2.TestCase") + proto.RegisterType((*TestFailure)(nil), "google.devtools.resultstore.v2.TestFailure") + proto.RegisterType((*TestError)(nil), "google.devtools.resultstore.v2.TestError") + proto.RegisterEnum("google.devtools.resultstore.v2.TestCase_Result", TestCase_Result_name, TestCase_Result_value) +} + +func init() { + proto.RegisterFile("google/devtools/resultstore/v2/test_suite.proto", fileDescriptor_test_suite_48f943d641146787) +} + +var fileDescriptor_test_suite_48f943d641146787 = []byte{ + // 671 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xac, 0x95, 0x5f, 0x6f, 0xd3, 0x3a, + 0x18, 0xc6, 0x97, 0xb6, 0xcb, 0x92, 0x37, 0x5b, 0x57, 0xf9, 0x62, 0x8a, 0x76, 0xb4, 0x73, 0xaa, + 0x1e, 0xfe, 0x74, 0x9a, 0x94, 0x48, 0xe5, 0x0e, 0x24, 0xa4, 0xad, 0x75, 0xb7, 0x42, 0x57, 0x22, + 0x27, 0xbd, 0xe1, 0xa6, 0x0a, 0x99, 0x17, 0x45, 0x24, 0x75, 0x88, 0xdd, 0x69, 0xe3, 0x13, 0x71, + 0xcd, 0x15, 0x5f, 0x86, 0xef, 0x82, 0x62, 0x67, 0xa3, 0x20, 0x41, 0x8b, 0xe8, 0x5d, 0xfd, 0xd8, + 0xcf, 0xeb, 0x27, 0xef, 0xfb, 0x53, 0x0d, 0x6e, 0xcc, 0x58, 0x9c, 0x52, 0xf7, 0x8a, 0xde, 0x08, + 0xc6, 0x52, 0xee, 0x16, 0x94, 0x2f, 0x52, 0xc1, 0x05, 0x2b, 0xa8, 0x7b, 0xd3, 0x73, 0x05, 0xe5, + 0x62, 0xc6, 0x17, 0x89, 0xa0, 0x4e, 0x5e, 0x30, 0xc1, 0xd0, 0xbf, 0xca, 0xe0, 0xdc, 0x1b, 0x9c, + 0x25, 0x83, 0x73, 0xd3, 0x3b, 0x3c, 0x59, 0x51, 0x30, 0x62, 0x59, 0xc6, 0xe6, 0xaa, 0xd8, 0xe1, + 0xf1, 0x8a, 0xc3, 0xd7, 0x49, 0x5a, 0xdd, 0xdb, 0xf9, 0x52, 0x07, 0x33, 0xa0, 0x5c, 0xf8, 0x65, + 0x16, 0x74, 0x04, 0x20, 0x43, 0xcd, 0xe6, 0x61, 0x46, 0x6d, 0xad, 0xad, 0x75, 0x4d, 0x62, 0x4a, + 0x65, 0x12, 0x66, 0x14, 0x3d, 0x87, 0xed, 0x32, 0x38, 0xb7, 0x6b, 0xed, 0x7a, 0xd7, 0xea, 0x3d, + 0x72, 0x7e, 0x1f, 0xda, 0x29, 0x0b, 0x13, 0x65, 0x41, 0xe7, 0x60, 0x5c, 0x87, 0x49, 0xba, 0x28, + 0x28, 0xb7, 0xeb, 0xd2, 0x7e, 0xb2, 0x8e, 0x7d, 0xa8, 0x3c, 0xe4, 0xc1, 0x8c, 0x4e, 0x41, 0xa7, + 0x45, 0xc1, 0x0a, 0x6e, 0x37, 0x64, 0x99, 0xe3, 0x75, 0xca, 0xe0, 0xd2, 0x41, 0x2a, 0x23, 0x7a, + 0x09, 0xba, 0x48, 0xb2, 0x64, 0x1e, 0xdb, 0x7a, 0x5b, 0xeb, 0x5a, 0xbd, 0x27, 0x2b, 0x4b, 0xc8, + 0xd3, 0xa4, 0x72, 0xa1, 0x0b, 0x80, 0xbc, 0x60, 0x39, 0x2d, 0x44, 0x42, 0xb9, 0xbd, 0x23, 0x63, + 0x74, 0x57, 0xd5, 0xf0, 0x94, 0xe3, 0x8e, 0x2c, 0x79, 0xcb, 0x8e, 0x96, 0xc3, 0xe0, 0xb6, 0xb1, + 0x5e, 0x47, 0x87, 0x49, 0x4a, 0x89, 0xb2, 0x74, 0x3e, 0x69, 0xd0, 0x28, 0xbf, 0x0d, 0x9d, 0x83, + 0x29, 0x79, 0x8a, 0x42, 0xae, 0x86, 0xb6, 0x46, 0x9a, 0xd2, 0xd8, 0x0f, 0x39, 0xbd, 0xd8, 0x22, + 0x86, 0xa8, 0x7e, 0xa3, 0x57, 0x00, 0xdf, 0xc1, 0xb4, 0x6b, 0xb2, 0xd2, 0x5a, 0xed, 0x95, 0xf4, + 0x5c, 0x6c, 0x11, 0x99, 0x43, 0x2e, 0xce, 0xac, 0x2a, 0x94, 0xb8, 0xcb, 0x69, 0xe7, 0x6b, 0x03, + 0x8c, 0xfb, 0x1b, 0xd1, 0x3f, 0x60, 0x96, 0x49, 0x97, 0x19, 0x33, 0x4a, 0x41, 0x22, 0x76, 0x04, + 0x10, 0xa5, 0x21, 0xe7, 0x6a, 0xb7, 0xa6, 0x08, 0x94, 0x8a, 0xdc, 0x3e, 0x07, 0x5d, 0x5d, 0x6f, + 0xd7, 0xdb, 0x5a, 0xb7, 0xd9, 0x73, 0xd7, 0xfd, 0x4e, 0x87, 0x48, 0x9d, 0x54, 0xf6, 0x1f, 0x70, + 0x6c, 0x6c, 0x06, 0xc7, 0xed, 0xbf, 0xc7, 0x71, 0x67, 0x03, 0x38, 0x1a, 0x9b, 0xc0, 0xd1, 0xfc, + 0x73, 0x1c, 0x3f, 0x82, 0xae, 0x7a, 0x8c, 0x0e, 0x00, 0x11, 0xec, 0x4f, 0xc7, 0xc1, 0x6c, 0x3a, + 0xf1, 0x3d, 0xdc, 0x1f, 0x0d, 0x47, 0x78, 0xd0, 0xda, 0x42, 0x7b, 0x60, 0xf6, 0xdf, 0x5c, 0x7a, + 0x63, 0x1c, 0xe0, 0x41, 0x4b, 0x43, 0xfb, 0x60, 0x8d, 0x26, 0x01, 0x26, 0x64, 0xea, 0x95, 0x42, + 0x4d, 0xee, 0x9f, 0x4e, 0xfa, 0x78, 0x3c, 0xc6, 0x83, 0x56, 0x1d, 0xed, 0x82, 0x31, 0x1c, 0x8d, + 0x03, 0x4c, 0xf0, 0xa0, 0xd5, 0x40, 0x16, 0xec, 0xf8, 0xaf, 0x47, 0x9e, 0x87, 0x07, 0xad, 0x6d, + 0xd4, 0x04, 0xf0, 0xa7, 0x9e, 0x47, 0xb0, 0xef, 0xe3, 0x41, 0x4b, 0xef, 0x7c, 0xd6, 0xc0, 0x5a, + 0x1a, 0x0f, 0x7a, 0x0a, 0xfb, 0xd5, 0x80, 0x66, 0x19, 0xe5, 0x3c, 0x8c, 0xef, 0x41, 0x6b, 0x56, + 0xf2, 0xa5, 0x52, 0xd1, 0x63, 0x68, 0xd2, 0xdb, 0x88, 0xe6, 0x22, 0x61, 0x73, 0x89, 0x6a, 0x85, + 0xdc, 0xde, 0x83, 0x1a, 0xdc, 0xe5, 0x14, 0xfd, 0x07, 0x16, 0x17, 0x61, 0xf4, 0x7e, 0x26, 0x8a, + 0x30, 0xa2, 0x92, 0x3d, 0x93, 0x80, 0x94, 0x82, 0x52, 0x41, 0x87, 0x60, 0xd0, 0xdb, 0x9c, 0x46, + 0x82, 0x5e, 0x49, 0x9c, 0x4c, 0xf2, 0xb0, 0x46, 0x07, 0xa0, 0x87, 0x91, 0x58, 0x84, 0xa9, 0x24, + 0xc4, 0x24, 0xd5, 0xaa, 0x73, 0xab, 0xfe, 0x79, 0x25, 0x0b, 0xe8, 0x7f, 0xd8, 0x93, 0x34, 0xfc, + 0x94, 0x77, 0x57, 0x8a, 0x1b, 0x4e, 0x7b, 0xf6, 0x01, 0x3a, 0x11, 0xcb, 0x56, 0x0c, 0xd7, 0xd3, + 0xde, 0x8e, 0xaa, 0x13, 0x31, 0x4b, 0xc3, 0x79, 0xec, 0xb0, 0x22, 0x76, 0x63, 0x3a, 0x97, 0x0f, + 0x47, 0xf5, 0xc0, 0x85, 0x79, 0xc2, 0x7f, 0xf5, 0xcc, 0xbc, 0x58, 0x5a, 0xbe, 0xd3, 0xa5, 0xeb, + 0xd9, 0xb7, 0x00, 0x00, 0x00, 0xff, 0xff, 0xb6, 0x4d, 0x9a, 0x00, 0x19, 0x07, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/devtools/source/v1/source_context.pb.go b/vendor/google.golang.org/genproto/googleapis/devtools/source/v1/source_context.pb.go new file mode 100644 index 0000000..1e40993 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/devtools/source/v1/source_context.pb.go @@ -0,0 +1,1170 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/devtools/source/v1/source_context.proto + +package source // import "google.golang.org/genproto/googleapis/devtools/source/v1" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// The type of an Alias. +type AliasContext_Kind int32 + +const ( + // Do not use. + AliasContext_ANY AliasContext_Kind = 0 + // Git tag + AliasContext_FIXED AliasContext_Kind = 1 + // Git branch + AliasContext_MOVABLE AliasContext_Kind = 2 + // OTHER is used to specify non-standard aliases, those not of the kinds + // above. For example, if a Git repo has a ref named "refs/foo/bar", it + // is considered to be of kind OTHER. + AliasContext_OTHER AliasContext_Kind = 4 +) + +var AliasContext_Kind_name = map[int32]string{ + 0: "ANY", + 1: "FIXED", + 2: "MOVABLE", + 4: "OTHER", +} +var AliasContext_Kind_value = map[string]int32{ + "ANY": 0, + "FIXED": 1, + "MOVABLE": 2, + "OTHER": 4, +} + +func (x AliasContext_Kind) String() string { + return proto.EnumName(AliasContext_Kind_name, int32(x)) +} +func (AliasContext_Kind) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_source_context_95d7fc1394bb1b31, []int{2, 0} +} + +// A SourceContext is a reference to a tree of files. A SourceContext together +// with a path point to a unique revision of a single file or directory. +type SourceContext struct { + // A SourceContext can refer any one of the following types of repositories. + // + // Types that are valid to be assigned to Context: + // *SourceContext_CloudRepo + // *SourceContext_CloudWorkspace + // *SourceContext_Gerrit + // *SourceContext_Git + Context isSourceContext_Context `protobuf_oneof:"context"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *SourceContext) Reset() { *m = SourceContext{} } +func (m *SourceContext) String() string { return proto.CompactTextString(m) } +func (*SourceContext) ProtoMessage() {} +func (*SourceContext) Descriptor() ([]byte, []int) { + return fileDescriptor_source_context_95d7fc1394bb1b31, []int{0} +} +func (m *SourceContext) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_SourceContext.Unmarshal(m, b) +} +func (m *SourceContext) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_SourceContext.Marshal(b, m, deterministic) +} +func (dst *SourceContext) XXX_Merge(src proto.Message) { + xxx_messageInfo_SourceContext.Merge(dst, src) +} +func (m *SourceContext) XXX_Size() int { + return xxx_messageInfo_SourceContext.Size(m) +} +func (m *SourceContext) XXX_DiscardUnknown() { + xxx_messageInfo_SourceContext.DiscardUnknown(m) +} + +var xxx_messageInfo_SourceContext proto.InternalMessageInfo + +type isSourceContext_Context interface { + isSourceContext_Context() +} + +type SourceContext_CloudRepo struct { + CloudRepo *CloudRepoSourceContext `protobuf:"bytes,1,opt,name=cloud_repo,json=cloudRepo,proto3,oneof"` +} + +type SourceContext_CloudWorkspace struct { + CloudWorkspace *CloudWorkspaceSourceContext `protobuf:"bytes,2,opt,name=cloud_workspace,json=cloudWorkspace,proto3,oneof"` +} + +type SourceContext_Gerrit struct { + Gerrit *GerritSourceContext `protobuf:"bytes,3,opt,name=gerrit,proto3,oneof"` +} + +type SourceContext_Git struct { + Git *GitSourceContext `protobuf:"bytes,6,opt,name=git,proto3,oneof"` +} + +func (*SourceContext_CloudRepo) isSourceContext_Context() {} + +func (*SourceContext_CloudWorkspace) isSourceContext_Context() {} + +func (*SourceContext_Gerrit) isSourceContext_Context() {} + +func (*SourceContext_Git) isSourceContext_Context() {} + +func (m *SourceContext) GetContext() isSourceContext_Context { + if m != nil { + return m.Context + } + return nil +} + +func (m *SourceContext) GetCloudRepo() *CloudRepoSourceContext { + if x, ok := m.GetContext().(*SourceContext_CloudRepo); ok { + return x.CloudRepo + } + return nil +} + +func (m *SourceContext) GetCloudWorkspace() *CloudWorkspaceSourceContext { + if x, ok := m.GetContext().(*SourceContext_CloudWorkspace); ok { + return x.CloudWorkspace + } + return nil +} + +func (m *SourceContext) GetGerrit() *GerritSourceContext { + if x, ok := m.GetContext().(*SourceContext_Gerrit); ok { + return x.Gerrit + } + return nil +} + +func (m *SourceContext) GetGit() *GitSourceContext { + if x, ok := m.GetContext().(*SourceContext_Git); ok { + return x.Git + } + return nil +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*SourceContext) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _SourceContext_OneofMarshaler, _SourceContext_OneofUnmarshaler, _SourceContext_OneofSizer, []interface{}{ + (*SourceContext_CloudRepo)(nil), + (*SourceContext_CloudWorkspace)(nil), + (*SourceContext_Gerrit)(nil), + (*SourceContext_Git)(nil), + } +} + +func _SourceContext_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*SourceContext) + // context + switch x := m.Context.(type) { + case *SourceContext_CloudRepo: + b.EncodeVarint(1<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.CloudRepo); err != nil { + return err + } + case *SourceContext_CloudWorkspace: + b.EncodeVarint(2<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.CloudWorkspace); err != nil { + return err + } + case *SourceContext_Gerrit: + b.EncodeVarint(3<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.Gerrit); err != nil { + return err + } + case *SourceContext_Git: + b.EncodeVarint(6<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.Git); err != nil { + return err + } + case nil: + default: + return fmt.Errorf("SourceContext.Context has unexpected type %T", x) + } + return nil +} + +func _SourceContext_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*SourceContext) + switch tag { + case 1: // context.cloud_repo + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(CloudRepoSourceContext) + err := b.DecodeMessage(msg) + m.Context = &SourceContext_CloudRepo{msg} + return true, err + case 2: // context.cloud_workspace + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(CloudWorkspaceSourceContext) + err := b.DecodeMessage(msg) + m.Context = &SourceContext_CloudWorkspace{msg} + return true, err + case 3: // context.gerrit + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(GerritSourceContext) + err := b.DecodeMessage(msg) + m.Context = &SourceContext_Gerrit{msg} + return true, err + case 6: // context.git + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(GitSourceContext) + err := b.DecodeMessage(msg) + m.Context = &SourceContext_Git{msg} + return true, err + default: + return false, nil + } +} + +func _SourceContext_OneofSizer(msg proto.Message) (n int) { + m := msg.(*SourceContext) + // context + switch x := m.Context.(type) { + case *SourceContext_CloudRepo: + s := proto.Size(x.CloudRepo) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *SourceContext_CloudWorkspace: + s := proto.Size(x.CloudWorkspace) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *SourceContext_Gerrit: + s := proto.Size(x.Gerrit) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *SourceContext_Git: + s := proto.Size(x.Git) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +// An ExtendedSourceContext is a SourceContext combined with additional +// details describing the context. +type ExtendedSourceContext struct { + // Any source context. + Context *SourceContext `protobuf:"bytes,1,opt,name=context,proto3" json:"context,omitempty"` + // Labels with user defined metadata. + Labels map[string]string `protobuf:"bytes,2,rep,name=labels,proto3" json:"labels,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ExtendedSourceContext) Reset() { *m = ExtendedSourceContext{} } +func (m *ExtendedSourceContext) String() string { return proto.CompactTextString(m) } +func (*ExtendedSourceContext) ProtoMessage() {} +func (*ExtendedSourceContext) Descriptor() ([]byte, []int) { + return fileDescriptor_source_context_95d7fc1394bb1b31, []int{1} +} +func (m *ExtendedSourceContext) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ExtendedSourceContext.Unmarshal(m, b) +} +func (m *ExtendedSourceContext) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ExtendedSourceContext.Marshal(b, m, deterministic) +} +func (dst *ExtendedSourceContext) XXX_Merge(src proto.Message) { + xxx_messageInfo_ExtendedSourceContext.Merge(dst, src) +} +func (m *ExtendedSourceContext) XXX_Size() int { + return xxx_messageInfo_ExtendedSourceContext.Size(m) +} +func (m *ExtendedSourceContext) XXX_DiscardUnknown() { + xxx_messageInfo_ExtendedSourceContext.DiscardUnknown(m) +} + +var xxx_messageInfo_ExtendedSourceContext proto.InternalMessageInfo + +func (m *ExtendedSourceContext) GetContext() *SourceContext { + if m != nil { + return m.Context + } + return nil +} + +func (m *ExtendedSourceContext) GetLabels() map[string]string { + if m != nil { + return m.Labels + } + return nil +} + +// An alias to a repo revision. +type AliasContext struct { + // The alias kind. + Kind AliasContext_Kind `protobuf:"varint,1,opt,name=kind,proto3,enum=google.devtools.source.v1.AliasContext_Kind" json:"kind,omitempty"` + // The alias name. + Name string `protobuf:"bytes,2,opt,name=name,proto3" json:"name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *AliasContext) Reset() { *m = AliasContext{} } +func (m *AliasContext) String() string { return proto.CompactTextString(m) } +func (*AliasContext) ProtoMessage() {} +func (*AliasContext) Descriptor() ([]byte, []int) { + return fileDescriptor_source_context_95d7fc1394bb1b31, []int{2} +} +func (m *AliasContext) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_AliasContext.Unmarshal(m, b) +} +func (m *AliasContext) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_AliasContext.Marshal(b, m, deterministic) +} +func (dst *AliasContext) XXX_Merge(src proto.Message) { + xxx_messageInfo_AliasContext.Merge(dst, src) +} +func (m *AliasContext) XXX_Size() int { + return xxx_messageInfo_AliasContext.Size(m) +} +func (m *AliasContext) XXX_DiscardUnknown() { + xxx_messageInfo_AliasContext.DiscardUnknown(m) +} + +var xxx_messageInfo_AliasContext proto.InternalMessageInfo + +func (m *AliasContext) GetKind() AliasContext_Kind { + if m != nil { + return m.Kind + } + return AliasContext_ANY +} + +func (m *AliasContext) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +// A CloudRepoSourceContext denotes a particular revision in a cloud +// repo (a repo hosted by the Google Cloud Platform). +type CloudRepoSourceContext struct { + // The ID of the repo. + RepoId *RepoId `protobuf:"bytes,1,opt,name=repo_id,json=repoId,proto3" json:"repo_id,omitempty"` + // A revision in a cloud repository can be identified by either its revision + // ID or its Alias. + // + // Types that are valid to be assigned to Revision: + // *CloudRepoSourceContext_RevisionId + // *CloudRepoSourceContext_AliasName + // *CloudRepoSourceContext_AliasContext + Revision isCloudRepoSourceContext_Revision `protobuf_oneof:"revision"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *CloudRepoSourceContext) Reset() { *m = CloudRepoSourceContext{} } +func (m *CloudRepoSourceContext) String() string { return proto.CompactTextString(m) } +func (*CloudRepoSourceContext) ProtoMessage() {} +func (*CloudRepoSourceContext) Descriptor() ([]byte, []int) { + return fileDescriptor_source_context_95d7fc1394bb1b31, []int{3} +} +func (m *CloudRepoSourceContext) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_CloudRepoSourceContext.Unmarshal(m, b) +} +func (m *CloudRepoSourceContext) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_CloudRepoSourceContext.Marshal(b, m, deterministic) +} +func (dst *CloudRepoSourceContext) XXX_Merge(src proto.Message) { + xxx_messageInfo_CloudRepoSourceContext.Merge(dst, src) +} +func (m *CloudRepoSourceContext) XXX_Size() int { + return xxx_messageInfo_CloudRepoSourceContext.Size(m) +} +func (m *CloudRepoSourceContext) XXX_DiscardUnknown() { + xxx_messageInfo_CloudRepoSourceContext.DiscardUnknown(m) +} + +var xxx_messageInfo_CloudRepoSourceContext proto.InternalMessageInfo + +func (m *CloudRepoSourceContext) GetRepoId() *RepoId { + if m != nil { + return m.RepoId + } + return nil +} + +type isCloudRepoSourceContext_Revision interface { + isCloudRepoSourceContext_Revision() +} + +type CloudRepoSourceContext_RevisionId struct { + RevisionId string `protobuf:"bytes,2,opt,name=revision_id,json=revisionId,proto3,oneof"` +} + +type CloudRepoSourceContext_AliasName struct { + AliasName string `protobuf:"bytes,3,opt,name=alias_name,json=aliasName,proto3,oneof"` +} + +type CloudRepoSourceContext_AliasContext struct { + AliasContext *AliasContext `protobuf:"bytes,4,opt,name=alias_context,json=aliasContext,proto3,oneof"` +} + +func (*CloudRepoSourceContext_RevisionId) isCloudRepoSourceContext_Revision() {} + +func (*CloudRepoSourceContext_AliasName) isCloudRepoSourceContext_Revision() {} + +func (*CloudRepoSourceContext_AliasContext) isCloudRepoSourceContext_Revision() {} + +func (m *CloudRepoSourceContext) GetRevision() isCloudRepoSourceContext_Revision { + if m != nil { + return m.Revision + } + return nil +} + +func (m *CloudRepoSourceContext) GetRevisionId() string { + if x, ok := m.GetRevision().(*CloudRepoSourceContext_RevisionId); ok { + return x.RevisionId + } + return "" +} + +func (m *CloudRepoSourceContext) GetAliasName() string { + if x, ok := m.GetRevision().(*CloudRepoSourceContext_AliasName); ok { + return x.AliasName + } + return "" +} + +func (m *CloudRepoSourceContext) GetAliasContext() *AliasContext { + if x, ok := m.GetRevision().(*CloudRepoSourceContext_AliasContext); ok { + return x.AliasContext + } + return nil +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*CloudRepoSourceContext) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _CloudRepoSourceContext_OneofMarshaler, _CloudRepoSourceContext_OneofUnmarshaler, _CloudRepoSourceContext_OneofSizer, []interface{}{ + (*CloudRepoSourceContext_RevisionId)(nil), + (*CloudRepoSourceContext_AliasName)(nil), + (*CloudRepoSourceContext_AliasContext)(nil), + } +} + +func _CloudRepoSourceContext_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*CloudRepoSourceContext) + // revision + switch x := m.Revision.(type) { + case *CloudRepoSourceContext_RevisionId: + b.EncodeVarint(2<<3 | proto.WireBytes) + b.EncodeStringBytes(x.RevisionId) + case *CloudRepoSourceContext_AliasName: + b.EncodeVarint(3<<3 | proto.WireBytes) + b.EncodeStringBytes(x.AliasName) + case *CloudRepoSourceContext_AliasContext: + b.EncodeVarint(4<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.AliasContext); err != nil { + return err + } + case nil: + default: + return fmt.Errorf("CloudRepoSourceContext.Revision has unexpected type %T", x) + } + return nil +} + +func _CloudRepoSourceContext_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*CloudRepoSourceContext) + switch tag { + case 2: // revision.revision_id + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeStringBytes() + m.Revision = &CloudRepoSourceContext_RevisionId{x} + return true, err + case 3: // revision.alias_name + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeStringBytes() + m.Revision = &CloudRepoSourceContext_AliasName{x} + return true, err + case 4: // revision.alias_context + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(AliasContext) + err := b.DecodeMessage(msg) + m.Revision = &CloudRepoSourceContext_AliasContext{msg} + return true, err + default: + return false, nil + } +} + +func _CloudRepoSourceContext_OneofSizer(msg proto.Message) (n int) { + m := msg.(*CloudRepoSourceContext) + // revision + switch x := m.Revision.(type) { + case *CloudRepoSourceContext_RevisionId: + n += 1 // tag and wire + n += proto.SizeVarint(uint64(len(x.RevisionId))) + n += len(x.RevisionId) + case *CloudRepoSourceContext_AliasName: + n += 1 // tag and wire + n += proto.SizeVarint(uint64(len(x.AliasName))) + n += len(x.AliasName) + case *CloudRepoSourceContext_AliasContext: + s := proto.Size(x.AliasContext) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +// A CloudWorkspaceSourceContext denotes a workspace at a particular snapshot. +type CloudWorkspaceSourceContext struct { + // The ID of the workspace. + WorkspaceId *CloudWorkspaceId `protobuf:"bytes,1,opt,name=workspace_id,json=workspaceId,proto3" json:"workspace_id,omitempty"` + // The ID of the snapshot. + // An empty snapshot_id refers to the most recent snapshot. + SnapshotId string `protobuf:"bytes,2,opt,name=snapshot_id,json=snapshotId,proto3" json:"snapshot_id,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *CloudWorkspaceSourceContext) Reset() { *m = CloudWorkspaceSourceContext{} } +func (m *CloudWorkspaceSourceContext) String() string { return proto.CompactTextString(m) } +func (*CloudWorkspaceSourceContext) ProtoMessage() {} +func (*CloudWorkspaceSourceContext) Descriptor() ([]byte, []int) { + return fileDescriptor_source_context_95d7fc1394bb1b31, []int{4} +} +func (m *CloudWorkspaceSourceContext) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_CloudWorkspaceSourceContext.Unmarshal(m, b) +} +func (m *CloudWorkspaceSourceContext) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_CloudWorkspaceSourceContext.Marshal(b, m, deterministic) +} +func (dst *CloudWorkspaceSourceContext) XXX_Merge(src proto.Message) { + xxx_messageInfo_CloudWorkspaceSourceContext.Merge(dst, src) +} +func (m *CloudWorkspaceSourceContext) XXX_Size() int { + return xxx_messageInfo_CloudWorkspaceSourceContext.Size(m) +} +func (m *CloudWorkspaceSourceContext) XXX_DiscardUnknown() { + xxx_messageInfo_CloudWorkspaceSourceContext.DiscardUnknown(m) +} + +var xxx_messageInfo_CloudWorkspaceSourceContext proto.InternalMessageInfo + +func (m *CloudWorkspaceSourceContext) GetWorkspaceId() *CloudWorkspaceId { + if m != nil { + return m.WorkspaceId + } + return nil +} + +func (m *CloudWorkspaceSourceContext) GetSnapshotId() string { + if m != nil { + return m.SnapshotId + } + return "" +} + +// A SourceContext referring to a Gerrit project. +type GerritSourceContext struct { + // The URI of a running Gerrit instance. + HostUri string `protobuf:"bytes,1,opt,name=host_uri,json=hostUri,proto3" json:"host_uri,omitempty"` + // The full project name within the host. Projects may be nested, so + // "project/subproject" is a valid project name. + // The "repo name" is hostURI/project. + GerritProject string `protobuf:"bytes,2,opt,name=gerrit_project,json=gerritProject,proto3" json:"gerrit_project,omitempty"` + // A revision in a Gerrit project can be identified by either its revision ID + // or its alias. + // + // Types that are valid to be assigned to Revision: + // *GerritSourceContext_RevisionId + // *GerritSourceContext_AliasName + // *GerritSourceContext_AliasContext + Revision isGerritSourceContext_Revision `protobuf_oneof:"revision"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GerritSourceContext) Reset() { *m = GerritSourceContext{} } +func (m *GerritSourceContext) String() string { return proto.CompactTextString(m) } +func (*GerritSourceContext) ProtoMessage() {} +func (*GerritSourceContext) Descriptor() ([]byte, []int) { + return fileDescriptor_source_context_95d7fc1394bb1b31, []int{5} +} +func (m *GerritSourceContext) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GerritSourceContext.Unmarshal(m, b) +} +func (m *GerritSourceContext) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GerritSourceContext.Marshal(b, m, deterministic) +} +func (dst *GerritSourceContext) XXX_Merge(src proto.Message) { + xxx_messageInfo_GerritSourceContext.Merge(dst, src) +} +func (m *GerritSourceContext) XXX_Size() int { + return xxx_messageInfo_GerritSourceContext.Size(m) +} +func (m *GerritSourceContext) XXX_DiscardUnknown() { + xxx_messageInfo_GerritSourceContext.DiscardUnknown(m) +} + +var xxx_messageInfo_GerritSourceContext proto.InternalMessageInfo + +func (m *GerritSourceContext) GetHostUri() string { + if m != nil { + return m.HostUri + } + return "" +} + +func (m *GerritSourceContext) GetGerritProject() string { + if m != nil { + return m.GerritProject + } + return "" +} + +type isGerritSourceContext_Revision interface { + isGerritSourceContext_Revision() +} + +type GerritSourceContext_RevisionId struct { + RevisionId string `protobuf:"bytes,3,opt,name=revision_id,json=revisionId,proto3,oneof"` +} + +type GerritSourceContext_AliasName struct { + AliasName string `protobuf:"bytes,4,opt,name=alias_name,json=aliasName,proto3,oneof"` +} + +type GerritSourceContext_AliasContext struct { + AliasContext *AliasContext `protobuf:"bytes,5,opt,name=alias_context,json=aliasContext,proto3,oneof"` +} + +func (*GerritSourceContext_RevisionId) isGerritSourceContext_Revision() {} + +func (*GerritSourceContext_AliasName) isGerritSourceContext_Revision() {} + +func (*GerritSourceContext_AliasContext) isGerritSourceContext_Revision() {} + +func (m *GerritSourceContext) GetRevision() isGerritSourceContext_Revision { + if m != nil { + return m.Revision + } + return nil +} + +func (m *GerritSourceContext) GetRevisionId() string { + if x, ok := m.GetRevision().(*GerritSourceContext_RevisionId); ok { + return x.RevisionId + } + return "" +} + +func (m *GerritSourceContext) GetAliasName() string { + if x, ok := m.GetRevision().(*GerritSourceContext_AliasName); ok { + return x.AliasName + } + return "" +} + +func (m *GerritSourceContext) GetAliasContext() *AliasContext { + if x, ok := m.GetRevision().(*GerritSourceContext_AliasContext); ok { + return x.AliasContext + } + return nil +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*GerritSourceContext) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _GerritSourceContext_OneofMarshaler, _GerritSourceContext_OneofUnmarshaler, _GerritSourceContext_OneofSizer, []interface{}{ + (*GerritSourceContext_RevisionId)(nil), + (*GerritSourceContext_AliasName)(nil), + (*GerritSourceContext_AliasContext)(nil), + } +} + +func _GerritSourceContext_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*GerritSourceContext) + // revision + switch x := m.Revision.(type) { + case *GerritSourceContext_RevisionId: + b.EncodeVarint(3<<3 | proto.WireBytes) + b.EncodeStringBytes(x.RevisionId) + case *GerritSourceContext_AliasName: + b.EncodeVarint(4<<3 | proto.WireBytes) + b.EncodeStringBytes(x.AliasName) + case *GerritSourceContext_AliasContext: + b.EncodeVarint(5<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.AliasContext); err != nil { + return err + } + case nil: + default: + return fmt.Errorf("GerritSourceContext.Revision has unexpected type %T", x) + } + return nil +} + +func _GerritSourceContext_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*GerritSourceContext) + switch tag { + case 3: // revision.revision_id + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeStringBytes() + m.Revision = &GerritSourceContext_RevisionId{x} + return true, err + case 4: // revision.alias_name + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeStringBytes() + m.Revision = &GerritSourceContext_AliasName{x} + return true, err + case 5: // revision.alias_context + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(AliasContext) + err := b.DecodeMessage(msg) + m.Revision = &GerritSourceContext_AliasContext{msg} + return true, err + default: + return false, nil + } +} + +func _GerritSourceContext_OneofSizer(msg proto.Message) (n int) { + m := msg.(*GerritSourceContext) + // revision + switch x := m.Revision.(type) { + case *GerritSourceContext_RevisionId: + n += 1 // tag and wire + n += proto.SizeVarint(uint64(len(x.RevisionId))) + n += len(x.RevisionId) + case *GerritSourceContext_AliasName: + n += 1 // tag and wire + n += proto.SizeVarint(uint64(len(x.AliasName))) + n += len(x.AliasName) + case *GerritSourceContext_AliasContext: + s := proto.Size(x.AliasContext) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +// A GitSourceContext denotes a particular revision in a third party Git +// repository (e.g. GitHub). +type GitSourceContext struct { + // Git repository URL. + Url string `protobuf:"bytes,1,opt,name=url,proto3" json:"url,omitempty"` + // Git commit hash. + // required. + RevisionId string `protobuf:"bytes,2,opt,name=revision_id,json=revisionId,proto3" json:"revision_id,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GitSourceContext) Reset() { *m = GitSourceContext{} } +func (m *GitSourceContext) String() string { return proto.CompactTextString(m) } +func (*GitSourceContext) ProtoMessage() {} +func (*GitSourceContext) Descriptor() ([]byte, []int) { + return fileDescriptor_source_context_95d7fc1394bb1b31, []int{6} +} +func (m *GitSourceContext) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GitSourceContext.Unmarshal(m, b) +} +func (m *GitSourceContext) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GitSourceContext.Marshal(b, m, deterministic) +} +func (dst *GitSourceContext) XXX_Merge(src proto.Message) { + xxx_messageInfo_GitSourceContext.Merge(dst, src) +} +func (m *GitSourceContext) XXX_Size() int { + return xxx_messageInfo_GitSourceContext.Size(m) +} +func (m *GitSourceContext) XXX_DiscardUnknown() { + xxx_messageInfo_GitSourceContext.DiscardUnknown(m) +} + +var xxx_messageInfo_GitSourceContext proto.InternalMessageInfo + +func (m *GitSourceContext) GetUrl() string { + if m != nil { + return m.Url + } + return "" +} + +func (m *GitSourceContext) GetRevisionId() string { + if m != nil { + return m.RevisionId + } + return "" +} + +// A unique identifier for a cloud repo. +type RepoId struct { + // A cloud repository can be identified by either its project ID and + // repository name combination, or its globally unique identifier. + // + // Types that are valid to be assigned to Id: + // *RepoId_ProjectRepoId + // *RepoId_Uid + Id isRepoId_Id `protobuf_oneof:"id"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *RepoId) Reset() { *m = RepoId{} } +func (m *RepoId) String() string { return proto.CompactTextString(m) } +func (*RepoId) ProtoMessage() {} +func (*RepoId) Descriptor() ([]byte, []int) { + return fileDescriptor_source_context_95d7fc1394bb1b31, []int{7} +} +func (m *RepoId) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_RepoId.Unmarshal(m, b) +} +func (m *RepoId) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_RepoId.Marshal(b, m, deterministic) +} +func (dst *RepoId) XXX_Merge(src proto.Message) { + xxx_messageInfo_RepoId.Merge(dst, src) +} +func (m *RepoId) XXX_Size() int { + return xxx_messageInfo_RepoId.Size(m) +} +func (m *RepoId) XXX_DiscardUnknown() { + xxx_messageInfo_RepoId.DiscardUnknown(m) +} + +var xxx_messageInfo_RepoId proto.InternalMessageInfo + +type isRepoId_Id interface { + isRepoId_Id() +} + +type RepoId_ProjectRepoId struct { + ProjectRepoId *ProjectRepoId `protobuf:"bytes,1,opt,name=project_repo_id,json=projectRepoId,proto3,oneof"` +} + +type RepoId_Uid struct { + Uid string `protobuf:"bytes,2,opt,name=uid,proto3,oneof"` +} + +func (*RepoId_ProjectRepoId) isRepoId_Id() {} + +func (*RepoId_Uid) isRepoId_Id() {} + +func (m *RepoId) GetId() isRepoId_Id { + if m != nil { + return m.Id + } + return nil +} + +func (m *RepoId) GetProjectRepoId() *ProjectRepoId { + if x, ok := m.GetId().(*RepoId_ProjectRepoId); ok { + return x.ProjectRepoId + } + return nil +} + +func (m *RepoId) GetUid() string { + if x, ok := m.GetId().(*RepoId_Uid); ok { + return x.Uid + } + return "" +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*RepoId) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _RepoId_OneofMarshaler, _RepoId_OneofUnmarshaler, _RepoId_OneofSizer, []interface{}{ + (*RepoId_ProjectRepoId)(nil), + (*RepoId_Uid)(nil), + } +} + +func _RepoId_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*RepoId) + // id + switch x := m.Id.(type) { + case *RepoId_ProjectRepoId: + b.EncodeVarint(1<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.ProjectRepoId); err != nil { + return err + } + case *RepoId_Uid: + b.EncodeVarint(2<<3 | proto.WireBytes) + b.EncodeStringBytes(x.Uid) + case nil: + default: + return fmt.Errorf("RepoId.Id has unexpected type %T", x) + } + return nil +} + +func _RepoId_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*RepoId) + switch tag { + case 1: // id.project_repo_id + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(ProjectRepoId) + err := b.DecodeMessage(msg) + m.Id = &RepoId_ProjectRepoId{msg} + return true, err + case 2: // id.uid + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeStringBytes() + m.Id = &RepoId_Uid{x} + return true, err + default: + return false, nil + } +} + +func _RepoId_OneofSizer(msg proto.Message) (n int) { + m := msg.(*RepoId) + // id + switch x := m.Id.(type) { + case *RepoId_ProjectRepoId: + s := proto.Size(x.ProjectRepoId) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *RepoId_Uid: + n += 1 // tag and wire + n += proto.SizeVarint(uint64(len(x.Uid))) + n += len(x.Uid) + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +// Selects a repo using a Google Cloud Platform project ID +// (e.g. winged-cargo-31) and a repo name within that project. +type ProjectRepoId struct { + // The ID of the project. + ProjectId string `protobuf:"bytes,1,opt,name=project_id,json=projectId,proto3" json:"project_id,omitempty"` + // The name of the repo. Leave empty for the default repo. + RepoName string `protobuf:"bytes,2,opt,name=repo_name,json=repoName,proto3" json:"repo_name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ProjectRepoId) Reset() { *m = ProjectRepoId{} } +func (m *ProjectRepoId) String() string { return proto.CompactTextString(m) } +func (*ProjectRepoId) ProtoMessage() {} +func (*ProjectRepoId) Descriptor() ([]byte, []int) { + return fileDescriptor_source_context_95d7fc1394bb1b31, []int{8} +} +func (m *ProjectRepoId) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ProjectRepoId.Unmarshal(m, b) +} +func (m *ProjectRepoId) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ProjectRepoId.Marshal(b, m, deterministic) +} +func (dst *ProjectRepoId) XXX_Merge(src proto.Message) { + xxx_messageInfo_ProjectRepoId.Merge(dst, src) +} +func (m *ProjectRepoId) XXX_Size() int { + return xxx_messageInfo_ProjectRepoId.Size(m) +} +func (m *ProjectRepoId) XXX_DiscardUnknown() { + xxx_messageInfo_ProjectRepoId.DiscardUnknown(m) +} + +var xxx_messageInfo_ProjectRepoId proto.InternalMessageInfo + +func (m *ProjectRepoId) GetProjectId() string { + if m != nil { + return m.ProjectId + } + return "" +} + +func (m *ProjectRepoId) GetRepoName() string { + if m != nil { + return m.RepoName + } + return "" +} + +// A CloudWorkspaceId is a unique identifier for a cloud workspace. +// A cloud workspace is a place associated with a repo where modified files +// can be stored before they are committed. +type CloudWorkspaceId struct { + // The ID of the repo containing the workspace. + RepoId *RepoId `protobuf:"bytes,1,opt,name=repo_id,json=repoId,proto3" json:"repo_id,omitempty"` + // The unique name of the workspace within the repo. This is the name + // chosen by the client in the Source API's CreateWorkspace method. + Name string `protobuf:"bytes,2,opt,name=name,proto3" json:"name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *CloudWorkspaceId) Reset() { *m = CloudWorkspaceId{} } +func (m *CloudWorkspaceId) String() string { return proto.CompactTextString(m) } +func (*CloudWorkspaceId) ProtoMessage() {} +func (*CloudWorkspaceId) Descriptor() ([]byte, []int) { + return fileDescriptor_source_context_95d7fc1394bb1b31, []int{9} +} +func (m *CloudWorkspaceId) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_CloudWorkspaceId.Unmarshal(m, b) +} +func (m *CloudWorkspaceId) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_CloudWorkspaceId.Marshal(b, m, deterministic) +} +func (dst *CloudWorkspaceId) XXX_Merge(src proto.Message) { + xxx_messageInfo_CloudWorkspaceId.Merge(dst, src) +} +func (m *CloudWorkspaceId) XXX_Size() int { + return xxx_messageInfo_CloudWorkspaceId.Size(m) +} +func (m *CloudWorkspaceId) XXX_DiscardUnknown() { + xxx_messageInfo_CloudWorkspaceId.DiscardUnknown(m) +} + +var xxx_messageInfo_CloudWorkspaceId proto.InternalMessageInfo + +func (m *CloudWorkspaceId) GetRepoId() *RepoId { + if m != nil { + return m.RepoId + } + return nil +} + +func (m *CloudWorkspaceId) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func init() { + proto.RegisterType((*SourceContext)(nil), "google.devtools.source.v1.SourceContext") + proto.RegisterType((*ExtendedSourceContext)(nil), "google.devtools.source.v1.ExtendedSourceContext") + proto.RegisterMapType((map[string]string)(nil), "google.devtools.source.v1.ExtendedSourceContext.LabelsEntry") + proto.RegisterType((*AliasContext)(nil), "google.devtools.source.v1.AliasContext") + proto.RegisterType((*CloudRepoSourceContext)(nil), "google.devtools.source.v1.CloudRepoSourceContext") + proto.RegisterType((*CloudWorkspaceSourceContext)(nil), "google.devtools.source.v1.CloudWorkspaceSourceContext") + proto.RegisterType((*GerritSourceContext)(nil), "google.devtools.source.v1.GerritSourceContext") + proto.RegisterType((*GitSourceContext)(nil), "google.devtools.source.v1.GitSourceContext") + proto.RegisterType((*RepoId)(nil), "google.devtools.source.v1.RepoId") + proto.RegisterType((*ProjectRepoId)(nil), "google.devtools.source.v1.ProjectRepoId") + proto.RegisterType((*CloudWorkspaceId)(nil), "google.devtools.source.v1.CloudWorkspaceId") + proto.RegisterEnum("google.devtools.source.v1.AliasContext_Kind", AliasContext_Kind_name, AliasContext_Kind_value) +} + +func init() { + proto.RegisterFile("google/devtools/source/v1/source_context.proto", fileDescriptor_source_context_95d7fc1394bb1b31) +} + +var fileDescriptor_source_context_95d7fc1394bb1b31 = []byte{ + // 809 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xac, 0x55, 0x5d, 0x4e, 0xdb, 0x4a, + 0x14, 0x8e, 0x9d, 0x90, 0xe0, 0x13, 0x02, 0xd1, 0xdc, 0x1f, 0x05, 0xb8, 0x08, 0xf0, 0xd5, 0xd5, + 0x45, 0xa2, 0x72, 0x94, 0x54, 0xaa, 0x5a, 0x5a, 0x89, 0x62, 0x48, 0x49, 0x04, 0x0d, 0x68, 0x4a, + 0xe9, 0x8f, 0x22, 0x45, 0xc6, 0x1e, 0x19, 0x17, 0xe3, 0xb1, 0x6c, 0x27, 0xc0, 0x26, 0xfa, 0xdc, + 0x35, 0x74, 0x0f, 0xdd, 0x40, 0xd7, 0xd0, 0x25, 0x74, 0x01, 0x48, 0x7d, 0xa9, 0x66, 0xc6, 0x86, + 0x24, 0x04, 0x83, 0xd4, 0x3e, 0x79, 0xe6, 0xf8, 0xfb, 0xbe, 0x73, 0xe6, 0xfc, 0xcc, 0x80, 0x66, + 0x53, 0x6a, 0xbb, 0xa4, 0x6a, 0x91, 0x7e, 0x44, 0xa9, 0x1b, 0x56, 0x43, 0xda, 0x0b, 0x4c, 0x52, + 0xed, 0xd7, 0xe2, 0x55, 0xd7, 0xa4, 0x5e, 0x44, 0xce, 0x23, 0xcd, 0x0f, 0x68, 0x44, 0xd1, 0xac, + 0xc0, 0x6b, 0x09, 0x5e, 0x13, 0x28, 0xad, 0x5f, 0x9b, 0xfb, 0x27, 0x96, 0x32, 0x7c, 0xa7, 0x6a, + 0x78, 0x1e, 0x8d, 0x8c, 0xc8, 0xa1, 0x5e, 0x28, 0x88, 0xea, 0x37, 0x19, 0x4a, 0xaf, 0x38, 0x76, + 0x53, 0x08, 0x22, 0x0c, 0x60, 0xba, 0xb4, 0x67, 0x75, 0x03, 0xe2, 0xd3, 0x8a, 0xb4, 0x24, 0xad, + 0x14, 0xeb, 0x35, 0xed, 0x56, 0x7d, 0x6d, 0x93, 0x81, 0x31, 0xf1, 0xe9, 0x90, 0x4c, 0x33, 0x83, + 0x15, 0x33, 0xf9, 0x83, 0x0c, 0x98, 0x11, 0x9a, 0x67, 0x34, 0x38, 0x09, 0x7d, 0xc3, 0x24, 0x15, + 0x99, 0x0b, 0x3f, 0xba, 0x4b, 0xf8, 0x4d, 0x42, 0x18, 0x55, 0x9f, 0x36, 0x87, 0x7e, 0xa3, 0x26, + 0xe4, 0x6d, 0x12, 0x04, 0x4e, 0x54, 0xc9, 0x72, 0x65, 0x2d, 0x45, 0x79, 0x9b, 0x03, 0x47, 0x15, + 0x63, 0x3e, 0x5a, 0x87, 0xac, 0xed, 0x44, 0x95, 0x3c, 0x97, 0x59, 0x4d, 0x93, 0xb9, 0xa9, 0xc1, + 0x98, 0xba, 0x02, 0x85, 0xb8, 0x3a, 0xea, 0x77, 0x09, 0xfe, 0x6a, 0x9c, 0x47, 0xc4, 0xb3, 0x88, + 0x35, 0x9c, 0x66, 0xfd, 0x0a, 0x14, 0xe7, 0x78, 0x25, 0xc5, 0xd3, 0x10, 0x15, 0x27, 0x44, 0x74, + 0x00, 0x79, 0xd7, 0x38, 0x22, 0x6e, 0x58, 0x91, 0x97, 0xb2, 0x2b, 0xc5, 0xfa, 0xb3, 0x14, 0x89, + 0xb1, 0x51, 0x68, 0xbb, 0x9c, 0xde, 0xf0, 0xa2, 0xe0, 0x02, 0xc7, 0x5a, 0x73, 0x4f, 0xa0, 0x38, + 0x60, 0x46, 0x65, 0xc8, 0x9e, 0x90, 0x0b, 0x1e, 0xa4, 0x82, 0xd9, 0x12, 0xfd, 0x09, 0x13, 0x7d, + 0xc3, 0xed, 0x89, 0x1a, 0x2a, 0x58, 0x6c, 0xd6, 0xe4, 0xc7, 0x92, 0xfa, 0x49, 0x82, 0xa9, 0x0d, + 0xd7, 0x31, 0xc2, 0xe4, 0x94, 0xcf, 0x21, 0x77, 0xe2, 0x78, 0x16, 0x67, 0x4f, 0xd7, 0x1f, 0xa4, + 0xc4, 0x37, 0x48, 0xd3, 0x76, 0x1c, 0xcf, 0xc2, 0x9c, 0x89, 0x10, 0xe4, 0x3c, 0xe3, 0x34, 0xf1, + 0xc5, 0xd7, 0x6a, 0x1d, 0x72, 0x0c, 0x81, 0x0a, 0x90, 0xdd, 0x68, 0xbf, 0x2b, 0x67, 0x90, 0x02, + 0x13, 0x2f, 0x5a, 0x6f, 0x1b, 0x5b, 0x65, 0x09, 0x15, 0xa1, 0xf0, 0x72, 0xef, 0x70, 0x43, 0xdf, + 0x6d, 0x94, 0x65, 0x66, 0xdf, 0x3b, 0x68, 0x36, 0x70, 0x39, 0xa7, 0x5e, 0x4a, 0xf0, 0xf7, 0xf8, + 0x56, 0x45, 0x6b, 0x50, 0x60, 0xbd, 0xde, 0x75, 0xac, 0xb8, 0x14, 0xcb, 0x29, 0x71, 0x32, 0x7a, + 0xcb, 0xc2, 0xf9, 0x80, 0x7f, 0xd1, 0x32, 0x14, 0x03, 0xd2, 0x77, 0x42, 0x87, 0x7a, 0x8c, 0xcf, + 0xa3, 0x6c, 0x66, 0x30, 0x24, 0xc6, 0x96, 0x85, 0xfe, 0x05, 0x30, 0xd8, 0xe1, 0xba, 0xfc, 0x1c, + 0xac, 0x3b, 0x15, 0x5d, 0xae, 0x48, 0x6c, 0x42, 0xb8, 0xbd, 0x6d, 0x9c, 0x12, 0xd4, 0x86, 0x92, + 0x00, 0x25, 0x4d, 0x91, 0xe3, 0x91, 0xfc, 0x7f, 0xcf, 0x8c, 0x35, 0x33, 0x78, 0xca, 0x18, 0xd8, + 0xeb, 0x00, 0x93, 0x49, 0x08, 0xea, 0x47, 0x09, 0xe6, 0x53, 0x86, 0x09, 0xb5, 0x61, 0xea, 0x6a, + 0x2e, 0xaf, 0x93, 0xb0, 0x7a, 0xef, 0xd1, 0x6c, 0x59, 0xb8, 0x78, 0x76, 0xbd, 0x41, 0x8b, 0x50, + 0x0c, 0x3d, 0xc3, 0x0f, 0x8f, 0x69, 0x74, 0x95, 0x13, 0x0c, 0x89, 0xa9, 0x65, 0xa9, 0x3f, 0x24, + 0xf8, 0x63, 0xcc, 0x0c, 0xa2, 0x59, 0x98, 0x3c, 0xa6, 0x61, 0xd4, 0xed, 0x05, 0x4e, 0xdc, 0x6f, + 0x05, 0xb6, 0x7f, 0x1d, 0x38, 0xe8, 0x3f, 0x98, 0x16, 0xe3, 0xd9, 0xf5, 0x03, 0xfa, 0x81, 0x98, + 0x51, 0x2c, 0x5b, 0x12, 0xd6, 0x7d, 0x61, 0x1c, 0x2d, 0x47, 0xf6, 0xce, 0x72, 0xe4, 0xee, 0x59, + 0x8e, 0x89, 0xdf, 0x57, 0x8e, 0x06, 0x94, 0x47, 0x6f, 0x0e, 0x36, 0x64, 0xbd, 0xc0, 0x4d, 0x86, + 0xac, 0x17, 0xb8, 0x2c, 0x89, 0x37, 0x1a, 0x6b, 0xf0, 0x1c, 0x6a, 0x1f, 0xf2, 0xa2, 0x17, 0x11, + 0x86, 0x99, 0x38, 0x29, 0xdd, 0xe1, 0x3e, 0x4e, 0xbb, 0x52, 0xe2, 0x8c, 0x09, 0x89, 0x66, 0x06, + 0x97, 0xfc, 0x41, 0x03, 0x42, 0x90, 0xed, 0x0d, 0xf4, 0x33, 0xdb, 0xe8, 0x39, 0x90, 0x1d, 0x4b, + 0xdd, 0x81, 0xd2, 0x10, 0x17, 0x2d, 0x00, 0x24, 0xee, 0x63, 0xcf, 0x0a, 0x56, 0x62, 0x4b, 0xcb, + 0x42, 0xf3, 0xa0, 0xf0, 0xa8, 0x06, 0xa6, 0x78, 0x92, 0x19, 0x58, 0x9e, 0xd5, 0x23, 0x28, 0x8f, + 0xf6, 0xd2, 0x2f, 0x8d, 0xe3, 0x98, 0xdb, 0x42, 0xff, 0x22, 0xc1, 0x82, 0x49, 0x4f, 0x6f, 0x17, + 0xd1, 0xd1, 0x50, 0x31, 0xf6, 0xd9, 0xc3, 0xb8, 0x2f, 0xbd, 0x5f, 0x8f, 0x09, 0x36, 0x75, 0x0d, + 0xcf, 0xd6, 0x68, 0x60, 0x57, 0x6d, 0xe2, 0xf1, 0x67, 0xb3, 0x2a, 0x7e, 0x19, 0xbe, 0x13, 0x8e, + 0x79, 0xa2, 0x9f, 0x8a, 0xd5, 0xa5, 0x24, 0x7d, 0x96, 0x17, 0xb7, 0x85, 0x08, 0x3f, 0xa5, 0xb6, + 0x45, 0xfa, 0x07, 0xdc, 0xb7, 0x70, 0xa8, 0x1d, 0xd6, 0xbe, 0x26, 0x88, 0x0e, 0x47, 0x74, 0x12, + 0x44, 0x47, 0x20, 0x3a, 0x87, 0xb5, 0xa3, 0x3c, 0x77, 0xf9, 0xf0, 0x67, 0x00, 0x00, 0x00, 0xff, + 0xff, 0x98, 0x86, 0xf5, 0x6f, 0x14, 0x08, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/devtools/sourcerepo/v1/sourcerepo.pb.go b/vendor/google.golang.org/genproto/googleapis/devtools/sourcerepo/v1/sourcerepo.pb.go new file mode 100644 index 0000000..796b102 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/devtools/sourcerepo/v1/sourcerepo.pb.go @@ -0,0 +1,776 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/devtools/sourcerepo/v1/sourcerepo.proto + +package sourcerepo // import "google.golang.org/genproto/googleapis/devtools/sourcerepo/v1" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import empty "github.com/golang/protobuf/ptypes/empty" +import _ "google.golang.org/genproto/googleapis/api/annotations" +import v1 "google.golang.org/genproto/googleapis/iam/v1" + +import ( + context "golang.org/x/net/context" + grpc "google.golang.org/grpc" +) + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// A repository (or repo) is a Git repository storing versioned source content. +type Repo struct { + // Resource name of the repository, of the form + // `projects//repos/`. The repo name may contain slashes. + // eg, `projects/myproject/repos/name/with/slash` + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // The disk usage of the repo, in bytes. Read-only field. Size is only + // returned by GetRepo. + Size int64 `protobuf:"varint,2,opt,name=size,proto3" json:"size,omitempty"` + // URL to clone the repository from Google Cloud Source Repositories. + // Read-only field. + Url string `protobuf:"bytes,3,opt,name=url,proto3" json:"url,omitempty"` + // How this repository mirrors a repository managed by another service. + // Read-only field. + MirrorConfig *MirrorConfig `protobuf:"bytes,4,opt,name=mirror_config,json=mirrorConfig,proto3" json:"mirror_config,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Repo) Reset() { *m = Repo{} } +func (m *Repo) String() string { return proto.CompactTextString(m) } +func (*Repo) ProtoMessage() {} +func (*Repo) Descriptor() ([]byte, []int) { + return fileDescriptor_sourcerepo_96faf75821b8b3a1, []int{0} +} +func (m *Repo) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Repo.Unmarshal(m, b) +} +func (m *Repo) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Repo.Marshal(b, m, deterministic) +} +func (dst *Repo) XXX_Merge(src proto.Message) { + xxx_messageInfo_Repo.Merge(dst, src) +} +func (m *Repo) XXX_Size() int { + return xxx_messageInfo_Repo.Size(m) +} +func (m *Repo) XXX_DiscardUnknown() { + xxx_messageInfo_Repo.DiscardUnknown(m) +} + +var xxx_messageInfo_Repo proto.InternalMessageInfo + +func (m *Repo) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *Repo) GetSize() int64 { + if m != nil { + return m.Size + } + return 0 +} + +func (m *Repo) GetUrl() string { + if m != nil { + return m.Url + } + return "" +} + +func (m *Repo) GetMirrorConfig() *MirrorConfig { + if m != nil { + return m.MirrorConfig + } + return nil +} + +// Configuration to automatically mirror a repository from another +// hosting service, for example GitHub or BitBucket. +type MirrorConfig struct { + // URL of the main repository at the other hosting service. + Url string `protobuf:"bytes,1,opt,name=url,proto3" json:"url,omitempty"` + // ID of the webhook listening to updates to trigger mirroring. + // Removing this webhook from the other hosting service will stop + // Google Cloud Source Repositories from receiving notifications, + // and thereby disabling mirroring. + WebhookId string `protobuf:"bytes,2,opt,name=webhook_id,json=webhookId,proto3" json:"webhook_id,omitempty"` + // ID of the SSH deploy key at the other hosting service. + // Removing this key from the other service would deauthorize + // Google Cloud Source Repositories from mirroring. + DeployKeyId string `protobuf:"bytes,3,opt,name=deploy_key_id,json=deployKeyId,proto3" json:"deploy_key_id,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *MirrorConfig) Reset() { *m = MirrorConfig{} } +func (m *MirrorConfig) String() string { return proto.CompactTextString(m) } +func (*MirrorConfig) ProtoMessage() {} +func (*MirrorConfig) Descriptor() ([]byte, []int) { + return fileDescriptor_sourcerepo_96faf75821b8b3a1, []int{1} +} +func (m *MirrorConfig) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_MirrorConfig.Unmarshal(m, b) +} +func (m *MirrorConfig) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_MirrorConfig.Marshal(b, m, deterministic) +} +func (dst *MirrorConfig) XXX_Merge(src proto.Message) { + xxx_messageInfo_MirrorConfig.Merge(dst, src) +} +func (m *MirrorConfig) XXX_Size() int { + return xxx_messageInfo_MirrorConfig.Size(m) +} +func (m *MirrorConfig) XXX_DiscardUnknown() { + xxx_messageInfo_MirrorConfig.DiscardUnknown(m) +} + +var xxx_messageInfo_MirrorConfig proto.InternalMessageInfo + +func (m *MirrorConfig) GetUrl() string { + if m != nil { + return m.Url + } + return "" +} + +func (m *MirrorConfig) GetWebhookId() string { + if m != nil { + return m.WebhookId + } + return "" +} + +func (m *MirrorConfig) GetDeployKeyId() string { + if m != nil { + return m.DeployKeyId + } + return "" +} + +// Request for GetRepo. +type GetRepoRequest struct { + // The name of the requested repository. Values are of the form + // `projects//repos/`. + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GetRepoRequest) Reset() { *m = GetRepoRequest{} } +func (m *GetRepoRequest) String() string { return proto.CompactTextString(m) } +func (*GetRepoRequest) ProtoMessage() {} +func (*GetRepoRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_sourcerepo_96faf75821b8b3a1, []int{2} +} +func (m *GetRepoRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GetRepoRequest.Unmarshal(m, b) +} +func (m *GetRepoRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GetRepoRequest.Marshal(b, m, deterministic) +} +func (dst *GetRepoRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_GetRepoRequest.Merge(dst, src) +} +func (m *GetRepoRequest) XXX_Size() int { + return xxx_messageInfo_GetRepoRequest.Size(m) +} +func (m *GetRepoRequest) XXX_DiscardUnknown() { + xxx_messageInfo_GetRepoRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_GetRepoRequest proto.InternalMessageInfo + +func (m *GetRepoRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +// Request for ListRepos. +type ListReposRequest struct { + // The project ID whose repos should be listed. Values are of the form + // `projects/`. + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // Maximum number of repositories to return; between 1 and 500. + // If not set or zero, defaults to 100 at the server. + PageSize int32 `protobuf:"varint,2,opt,name=page_size,json=pageSize,proto3" json:"page_size,omitempty"` + // Resume listing repositories where a prior ListReposResponse + // left off. This is an opaque token that must be obtained from + // a recent, prior ListReposResponse's next_page_token field. + PageToken string `protobuf:"bytes,3,opt,name=page_token,json=pageToken,proto3" json:"page_token,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ListReposRequest) Reset() { *m = ListReposRequest{} } +func (m *ListReposRequest) String() string { return proto.CompactTextString(m) } +func (*ListReposRequest) ProtoMessage() {} +func (*ListReposRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_sourcerepo_96faf75821b8b3a1, []int{3} +} +func (m *ListReposRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ListReposRequest.Unmarshal(m, b) +} +func (m *ListReposRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ListReposRequest.Marshal(b, m, deterministic) +} +func (dst *ListReposRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_ListReposRequest.Merge(dst, src) +} +func (m *ListReposRequest) XXX_Size() int { + return xxx_messageInfo_ListReposRequest.Size(m) +} +func (m *ListReposRequest) XXX_DiscardUnknown() { + xxx_messageInfo_ListReposRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_ListReposRequest proto.InternalMessageInfo + +func (m *ListReposRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *ListReposRequest) GetPageSize() int32 { + if m != nil { + return m.PageSize + } + return 0 +} + +func (m *ListReposRequest) GetPageToken() string { + if m != nil { + return m.PageToken + } + return "" +} + +// Response for ListRepos. The size is not set in the returned repositories. +type ListReposResponse struct { + // The listed repos. + Repos []*Repo `protobuf:"bytes,1,rep,name=repos,proto3" json:"repos,omitempty"` + // If non-empty, additional repositories exist within the project. These + // can be retrieved by including this value in the next ListReposRequest's + // page_token field. + NextPageToken string `protobuf:"bytes,2,opt,name=next_page_token,json=nextPageToken,proto3" json:"next_page_token,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ListReposResponse) Reset() { *m = ListReposResponse{} } +func (m *ListReposResponse) String() string { return proto.CompactTextString(m) } +func (*ListReposResponse) ProtoMessage() {} +func (*ListReposResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_sourcerepo_96faf75821b8b3a1, []int{4} +} +func (m *ListReposResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ListReposResponse.Unmarshal(m, b) +} +func (m *ListReposResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ListReposResponse.Marshal(b, m, deterministic) +} +func (dst *ListReposResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_ListReposResponse.Merge(dst, src) +} +func (m *ListReposResponse) XXX_Size() int { + return xxx_messageInfo_ListReposResponse.Size(m) +} +func (m *ListReposResponse) XXX_DiscardUnknown() { + xxx_messageInfo_ListReposResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_ListReposResponse proto.InternalMessageInfo + +func (m *ListReposResponse) GetRepos() []*Repo { + if m != nil { + return m.Repos + } + return nil +} + +func (m *ListReposResponse) GetNextPageToken() string { + if m != nil { + return m.NextPageToken + } + return "" +} + +// Request for CreateRepo +type CreateRepoRequest struct { + // The project in which to create the repo. Values are of the form + // `projects/`. + Parent string `protobuf:"bytes,1,opt,name=parent,proto3" json:"parent,omitempty"` + // The repo to create. Only name should be set; setting other fields + // is an error. The project in the name should match the parent field. + Repo *Repo `protobuf:"bytes,2,opt,name=repo,proto3" json:"repo,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *CreateRepoRequest) Reset() { *m = CreateRepoRequest{} } +func (m *CreateRepoRequest) String() string { return proto.CompactTextString(m) } +func (*CreateRepoRequest) ProtoMessage() {} +func (*CreateRepoRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_sourcerepo_96faf75821b8b3a1, []int{5} +} +func (m *CreateRepoRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_CreateRepoRequest.Unmarshal(m, b) +} +func (m *CreateRepoRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_CreateRepoRequest.Marshal(b, m, deterministic) +} +func (dst *CreateRepoRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_CreateRepoRequest.Merge(dst, src) +} +func (m *CreateRepoRequest) XXX_Size() int { + return xxx_messageInfo_CreateRepoRequest.Size(m) +} +func (m *CreateRepoRequest) XXX_DiscardUnknown() { + xxx_messageInfo_CreateRepoRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_CreateRepoRequest proto.InternalMessageInfo + +func (m *CreateRepoRequest) GetParent() string { + if m != nil { + return m.Parent + } + return "" +} + +func (m *CreateRepoRequest) GetRepo() *Repo { + if m != nil { + return m.Repo + } + return nil +} + +// Request for DeleteRepo. +type DeleteRepoRequest struct { + // The name of the repo to delete. Values are of the form + // `projects//repos/`. + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *DeleteRepoRequest) Reset() { *m = DeleteRepoRequest{} } +func (m *DeleteRepoRequest) String() string { return proto.CompactTextString(m) } +func (*DeleteRepoRequest) ProtoMessage() {} +func (*DeleteRepoRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_sourcerepo_96faf75821b8b3a1, []int{6} +} +func (m *DeleteRepoRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_DeleteRepoRequest.Unmarshal(m, b) +} +func (m *DeleteRepoRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_DeleteRepoRequest.Marshal(b, m, deterministic) +} +func (dst *DeleteRepoRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_DeleteRepoRequest.Merge(dst, src) +} +func (m *DeleteRepoRequest) XXX_Size() int { + return xxx_messageInfo_DeleteRepoRequest.Size(m) +} +func (m *DeleteRepoRequest) XXX_DiscardUnknown() { + xxx_messageInfo_DeleteRepoRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_DeleteRepoRequest proto.InternalMessageInfo + +func (m *DeleteRepoRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func init() { + proto.RegisterType((*Repo)(nil), "google.devtools.sourcerepo.v1.Repo") + proto.RegisterType((*MirrorConfig)(nil), "google.devtools.sourcerepo.v1.MirrorConfig") + proto.RegisterType((*GetRepoRequest)(nil), "google.devtools.sourcerepo.v1.GetRepoRequest") + proto.RegisterType((*ListReposRequest)(nil), "google.devtools.sourcerepo.v1.ListReposRequest") + proto.RegisterType((*ListReposResponse)(nil), "google.devtools.sourcerepo.v1.ListReposResponse") + proto.RegisterType((*CreateRepoRequest)(nil), "google.devtools.sourcerepo.v1.CreateRepoRequest") + proto.RegisterType((*DeleteRepoRequest)(nil), "google.devtools.sourcerepo.v1.DeleteRepoRequest") +} + +// Reference imports to suppress errors if they are not otherwise used. +var _ context.Context +var _ grpc.ClientConn + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the grpc package it is being compiled against. +const _ = grpc.SupportPackageIsVersion4 + +// SourceRepoClient is the client API for SourceRepo service. +// +// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://godoc.org/google.golang.org/grpc#ClientConn.NewStream. +type SourceRepoClient interface { + // Returns all repos belonging to a project. The sizes of the repos are + // not set by ListRepos. To get the size of a repo, use GetRepo. + ListRepos(ctx context.Context, in *ListReposRequest, opts ...grpc.CallOption) (*ListReposResponse, error) + // Returns information about a repo. + GetRepo(ctx context.Context, in *GetRepoRequest, opts ...grpc.CallOption) (*Repo, error) + // Creates a repo in the given project with the given name. + // + // If the named repository already exists, `CreateRepo` returns + // `ALREADY_EXISTS`. + CreateRepo(ctx context.Context, in *CreateRepoRequest, opts ...grpc.CallOption) (*Repo, error) + // Deletes a repo. + DeleteRepo(ctx context.Context, in *DeleteRepoRequest, opts ...grpc.CallOption) (*empty.Empty, error) + // Sets the access control policy on the specified resource. Replaces any + // existing policy. + SetIamPolicy(ctx context.Context, in *v1.SetIamPolicyRequest, opts ...grpc.CallOption) (*v1.Policy, error) + // Gets the access control policy for a resource. + // Returns an empty policy if the resource exists and does not have a policy + // set. + GetIamPolicy(ctx context.Context, in *v1.GetIamPolicyRequest, opts ...grpc.CallOption) (*v1.Policy, error) + // Returns permissions that a caller has on the specified resource. + // If the resource does not exist, this will return an empty set of + // permissions, not a NOT_FOUND error. + TestIamPermissions(ctx context.Context, in *v1.TestIamPermissionsRequest, opts ...grpc.CallOption) (*v1.TestIamPermissionsResponse, error) +} + +type sourceRepoClient struct { + cc *grpc.ClientConn +} + +func NewSourceRepoClient(cc *grpc.ClientConn) SourceRepoClient { + return &sourceRepoClient{cc} +} + +func (c *sourceRepoClient) ListRepos(ctx context.Context, in *ListReposRequest, opts ...grpc.CallOption) (*ListReposResponse, error) { + out := new(ListReposResponse) + err := c.cc.Invoke(ctx, "/google.devtools.sourcerepo.v1.SourceRepo/ListRepos", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *sourceRepoClient) GetRepo(ctx context.Context, in *GetRepoRequest, opts ...grpc.CallOption) (*Repo, error) { + out := new(Repo) + err := c.cc.Invoke(ctx, "/google.devtools.sourcerepo.v1.SourceRepo/GetRepo", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *sourceRepoClient) CreateRepo(ctx context.Context, in *CreateRepoRequest, opts ...grpc.CallOption) (*Repo, error) { + out := new(Repo) + err := c.cc.Invoke(ctx, "/google.devtools.sourcerepo.v1.SourceRepo/CreateRepo", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *sourceRepoClient) DeleteRepo(ctx context.Context, in *DeleteRepoRequest, opts ...grpc.CallOption) (*empty.Empty, error) { + out := new(empty.Empty) + err := c.cc.Invoke(ctx, "/google.devtools.sourcerepo.v1.SourceRepo/DeleteRepo", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *sourceRepoClient) SetIamPolicy(ctx context.Context, in *v1.SetIamPolicyRequest, opts ...grpc.CallOption) (*v1.Policy, error) { + out := new(v1.Policy) + err := c.cc.Invoke(ctx, "/google.devtools.sourcerepo.v1.SourceRepo/SetIamPolicy", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *sourceRepoClient) GetIamPolicy(ctx context.Context, in *v1.GetIamPolicyRequest, opts ...grpc.CallOption) (*v1.Policy, error) { + out := new(v1.Policy) + err := c.cc.Invoke(ctx, "/google.devtools.sourcerepo.v1.SourceRepo/GetIamPolicy", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *sourceRepoClient) TestIamPermissions(ctx context.Context, in *v1.TestIamPermissionsRequest, opts ...grpc.CallOption) (*v1.TestIamPermissionsResponse, error) { + out := new(v1.TestIamPermissionsResponse) + err := c.cc.Invoke(ctx, "/google.devtools.sourcerepo.v1.SourceRepo/TestIamPermissions", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +// SourceRepoServer is the server API for SourceRepo service. +type SourceRepoServer interface { + // Returns all repos belonging to a project. The sizes of the repos are + // not set by ListRepos. To get the size of a repo, use GetRepo. + ListRepos(context.Context, *ListReposRequest) (*ListReposResponse, error) + // Returns information about a repo. + GetRepo(context.Context, *GetRepoRequest) (*Repo, error) + // Creates a repo in the given project with the given name. + // + // If the named repository already exists, `CreateRepo` returns + // `ALREADY_EXISTS`. + CreateRepo(context.Context, *CreateRepoRequest) (*Repo, error) + // Deletes a repo. + DeleteRepo(context.Context, *DeleteRepoRequest) (*empty.Empty, error) + // Sets the access control policy on the specified resource. Replaces any + // existing policy. + SetIamPolicy(context.Context, *v1.SetIamPolicyRequest) (*v1.Policy, error) + // Gets the access control policy for a resource. + // Returns an empty policy if the resource exists and does not have a policy + // set. + GetIamPolicy(context.Context, *v1.GetIamPolicyRequest) (*v1.Policy, error) + // Returns permissions that a caller has on the specified resource. + // If the resource does not exist, this will return an empty set of + // permissions, not a NOT_FOUND error. + TestIamPermissions(context.Context, *v1.TestIamPermissionsRequest) (*v1.TestIamPermissionsResponse, error) +} + +func RegisterSourceRepoServer(s *grpc.Server, srv SourceRepoServer) { + s.RegisterService(&_SourceRepo_serviceDesc, srv) +} + +func _SourceRepo_ListRepos_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(ListReposRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(SourceRepoServer).ListRepos(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.devtools.sourcerepo.v1.SourceRepo/ListRepos", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(SourceRepoServer).ListRepos(ctx, req.(*ListReposRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _SourceRepo_GetRepo_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(GetRepoRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(SourceRepoServer).GetRepo(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.devtools.sourcerepo.v1.SourceRepo/GetRepo", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(SourceRepoServer).GetRepo(ctx, req.(*GetRepoRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _SourceRepo_CreateRepo_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(CreateRepoRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(SourceRepoServer).CreateRepo(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.devtools.sourcerepo.v1.SourceRepo/CreateRepo", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(SourceRepoServer).CreateRepo(ctx, req.(*CreateRepoRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _SourceRepo_DeleteRepo_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(DeleteRepoRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(SourceRepoServer).DeleteRepo(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.devtools.sourcerepo.v1.SourceRepo/DeleteRepo", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(SourceRepoServer).DeleteRepo(ctx, req.(*DeleteRepoRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _SourceRepo_SetIamPolicy_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(v1.SetIamPolicyRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(SourceRepoServer).SetIamPolicy(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.devtools.sourcerepo.v1.SourceRepo/SetIamPolicy", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(SourceRepoServer).SetIamPolicy(ctx, req.(*v1.SetIamPolicyRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _SourceRepo_GetIamPolicy_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(v1.GetIamPolicyRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(SourceRepoServer).GetIamPolicy(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.devtools.sourcerepo.v1.SourceRepo/GetIamPolicy", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(SourceRepoServer).GetIamPolicy(ctx, req.(*v1.GetIamPolicyRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _SourceRepo_TestIamPermissions_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(v1.TestIamPermissionsRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(SourceRepoServer).TestIamPermissions(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.devtools.sourcerepo.v1.SourceRepo/TestIamPermissions", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(SourceRepoServer).TestIamPermissions(ctx, req.(*v1.TestIamPermissionsRequest)) + } + return interceptor(ctx, in, info, handler) +} + +var _SourceRepo_serviceDesc = grpc.ServiceDesc{ + ServiceName: "google.devtools.sourcerepo.v1.SourceRepo", + HandlerType: (*SourceRepoServer)(nil), + Methods: []grpc.MethodDesc{ + { + MethodName: "ListRepos", + Handler: _SourceRepo_ListRepos_Handler, + }, + { + MethodName: "GetRepo", + Handler: _SourceRepo_GetRepo_Handler, + }, + { + MethodName: "CreateRepo", + Handler: _SourceRepo_CreateRepo_Handler, + }, + { + MethodName: "DeleteRepo", + Handler: _SourceRepo_DeleteRepo_Handler, + }, + { + MethodName: "SetIamPolicy", + Handler: _SourceRepo_SetIamPolicy_Handler, + }, + { + MethodName: "GetIamPolicy", + Handler: _SourceRepo_GetIamPolicy_Handler, + }, + { + MethodName: "TestIamPermissions", + Handler: _SourceRepo_TestIamPermissions_Handler, + }, + }, + Streams: []grpc.StreamDesc{}, + Metadata: "google/devtools/sourcerepo/v1/sourcerepo.proto", +} + +func init() { + proto.RegisterFile("google/devtools/sourcerepo/v1/sourcerepo.proto", fileDescriptor_sourcerepo_96faf75821b8b3a1) +} + +var fileDescriptor_sourcerepo_96faf75821b8b3a1 = []byte{ + // 743 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x8c, 0x55, 0xd1, 0x6e, 0xd3, 0x4a, + 0x10, 0xd5, 0x36, 0x69, 0x7b, 0x33, 0x4d, 0x6f, 0xdb, 0x95, 0x6e, 0x15, 0xa5, 0x37, 0x55, 0xae, + 0x7b, 0x29, 0x21, 0x15, 0x36, 0x2d, 0xa0, 0x8a, 0x20, 0x24, 0xd4, 0x82, 0xa2, 0x0a, 0x90, 0xa2, + 0xb4, 0x4f, 0xbc, 0x44, 0x4e, 0x32, 0x35, 0xa6, 0xb6, 0xd7, 0x78, 0x37, 0x81, 0x80, 0x0a, 0x52, + 0xa5, 0xbe, 0x23, 0xfa, 0x19, 0x7c, 0x0e, 0xbf, 0xc0, 0x47, 0xf0, 0x88, 0x76, 0x6d, 0x37, 0x4e, + 0x13, 0x12, 0xbf, 0xed, 0xce, 0x9c, 0x99, 0x73, 0xf6, 0xec, 0x78, 0x0d, 0xba, 0xc5, 0x98, 0xe5, + 0xa0, 0xd1, 0xc5, 0xbe, 0x60, 0xcc, 0xe1, 0x06, 0x67, 0xbd, 0xa0, 0x83, 0x01, 0xfa, 0xcc, 0xe8, + 0xef, 0x26, 0x76, 0xba, 0x1f, 0x30, 0xc1, 0x68, 0x29, 0xc4, 0xeb, 0x31, 0x5e, 0x4f, 0x20, 0xfa, + 0xbb, 0xc5, 0x7f, 0xa3, 0x76, 0xa6, 0x6f, 0x1b, 0xa6, 0xe7, 0x31, 0x61, 0x0a, 0x9b, 0x79, 0x3c, + 0x2c, 0x2e, 0x6e, 0x46, 0x59, 0xdb, 0x74, 0x65, 0x73, 0xdb, 0x74, 0x5b, 0x3e, 0x73, 0xec, 0xce, + 0x20, 0xca, 0x17, 0x47, 0xf3, 0x23, 0xb9, 0x8d, 0x28, 0xa7, 0x76, 0xed, 0xde, 0xa9, 0x81, 0xae, + 0x2f, 0xa2, 0xa4, 0xf6, 0x8d, 0x40, 0xb6, 0x89, 0x3e, 0xa3, 0x14, 0xb2, 0x9e, 0xe9, 0x62, 0x81, + 0x94, 0x49, 0x25, 0xd7, 0x54, 0x6b, 0x19, 0xe3, 0xf6, 0x47, 0x2c, 0xcc, 0x95, 0x49, 0x25, 0xd3, + 0x54, 0x6b, 0xba, 0x0a, 0x99, 0x5e, 0xe0, 0x14, 0x32, 0x0a, 0x26, 0x97, 0xb4, 0x01, 0xcb, 0xae, + 0x1d, 0x04, 0x2c, 0x68, 0x75, 0x98, 0x77, 0x6a, 0x5b, 0x85, 0x6c, 0x99, 0x54, 0x96, 0xf6, 0x76, + 0xf4, 0xa9, 0x07, 0xd6, 0x5f, 0xa9, 0x9a, 0x43, 0x55, 0xd2, 0xcc, 0xbb, 0x89, 0x9d, 0xd6, 0x81, + 0x7c, 0x32, 0x1b, 0x73, 0x92, 0x21, 0x67, 0x09, 0xe0, 0x3d, 0xb6, 0xdf, 0x30, 0x76, 0xd6, 0xb2, + 0xbb, 0x4a, 0x5f, 0xae, 0x99, 0x8b, 0x22, 0x47, 0x5d, 0xaa, 0xc1, 0x72, 0x17, 0x7d, 0x87, 0x0d, + 0x5a, 0x67, 0x38, 0x90, 0x88, 0x50, 0xee, 0x52, 0x18, 0x7c, 0x81, 0x83, 0xa3, 0xae, 0xf6, 0x3f, + 0xfc, 0x5d, 0x47, 0x21, 0xcf, 0xde, 0xc4, 0x77, 0x3d, 0xe4, 0x62, 0x92, 0x05, 0x5a, 0x1b, 0x56, + 0x5f, 0xda, 0x5c, 0xc1, 0xf8, 0x14, 0x1c, 0xdd, 0x80, 0x9c, 0x6f, 0x5a, 0xd8, 0xba, 0xf6, 0x6b, + 0xbe, 0xf9, 0x97, 0x0c, 0x1c, 0x4b, 0xcf, 0x4a, 0x00, 0x2a, 0x29, 0xd8, 0x19, 0x7a, 0x91, 0x16, + 0x05, 0x3f, 0x91, 0x01, 0xad, 0x0f, 0x6b, 0x09, 0x0e, 0xee, 0x33, 0x8f, 0x23, 0x7d, 0x04, 0xf3, + 0xd2, 0x29, 0x5e, 0x20, 0xe5, 0x4c, 0x65, 0x69, 0x6f, 0x6b, 0x86, 0x9b, 0xea, 0x1c, 0x61, 0x05, + 0xdd, 0x86, 0x15, 0x0f, 0x3f, 0x88, 0x56, 0x82, 0x33, 0x74, 0x68, 0x59, 0x86, 0x1b, 0xd7, 0xbc, + 0x5d, 0x58, 0x3b, 0x0c, 0xd0, 0x14, 0x98, 0x34, 0x61, 0x1d, 0x16, 0x7c, 0x33, 0x40, 0x4f, 0x44, + 0xc7, 0x8b, 0x76, 0x74, 0x1f, 0xb2, 0xb2, 0xbb, 0xea, 0x94, 0x52, 0x8e, 0x2a, 0xd0, 0x6e, 0xc3, + 0xda, 0x33, 0x74, 0x70, 0x94, 0x65, 0x82, 0x85, 0x7b, 0xbf, 0x16, 0x01, 0x8e, 0x55, 0x17, 0x35, + 0x90, 0x57, 0x04, 0x72, 0xd7, 0xb6, 0x50, 0x63, 0x06, 0xe1, 0xcd, 0x4b, 0x2a, 0xde, 0x4b, 0x5f, + 0x10, 0x3a, 0xae, 0x6d, 0x5d, 0xfc, 0xf8, 0x79, 0x35, 0x57, 0xa2, 0x1b, 0xf2, 0x0b, 0xfa, 0x24, + 0x25, 0x3d, 0xf1, 0x03, 0xf6, 0x16, 0x3b, 0x82, 0x1b, 0xd5, 0x73, 0x23, 0xf4, 0xf6, 0x92, 0xc0, + 0x62, 0x34, 0x36, 0xf4, 0xee, 0x0c, 0x8a, 0xd1, 0xf1, 0x2a, 0xa6, 0xf1, 0x4c, 0xdb, 0x56, 0x22, + 0xca, 0x74, 0x73, 0x92, 0x88, 0x50, 0x83, 0x51, 0xad, 0x9e, 0xd3, 0xaf, 0x04, 0x60, 0x78, 0x79, + 0x74, 0xd6, 0x69, 0xc7, 0xee, 0x39, 0x9d, 0x9a, 0x1d, 0xa5, 0xe6, 0x96, 0x56, 0x52, 0x6a, 0xc2, + 0x49, 0x18, 0x37, 0xa5, 0xa6, 0x2e, 0x9a, 0x7e, 0x06, 0x18, 0x5e, 0xf4, 0x4c, 0x45, 0x63, 0x33, + 0x51, 0x5c, 0x8f, 0x2b, 0xe2, 0x87, 0x4a, 0x7f, 0x2e, 0x1f, 0xaa, 0xd8, 0x92, 0xea, 0x2c, 0x4b, + 0x2e, 0x09, 0xe4, 0x8f, 0x51, 0x1c, 0x99, 0x6e, 0x43, 0x3d, 0x7f, 0x54, 0x8b, 0x1b, 0xda, 0xa6, + 0x2b, 0x29, 0x93, 0xc9, 0x98, 0xf4, 0x9f, 0x1b, 0x98, 0x30, 0xab, 0xd5, 0x14, 0xe7, 0x03, 0xcd, + 0x50, 0x9c, 0x01, 0x86, 0xda, 0x27, 0xf2, 0xd6, 0x78, 0xa2, 0x6d, 0x8d, 0x54, 0xe9, 0x05, 0x81, + 0x7c, 0x7d, 0x9a, 0x8e, 0x7a, 0x7a, 0x1d, 0xfb, 0x4a, 0xc7, 0x2e, 0x4d, 0xa3, 0xc3, 0x4a, 0x72, + 0x7e, 0x27, 0x40, 0x4f, 0x90, 0xab, 0x08, 0x06, 0xae, 0xcd, 0xb9, 0xfc, 0x9b, 0xd0, 0xca, 0x0d, + 0x9a, 0x71, 0x48, 0x2c, 0xe8, 0x4e, 0x0a, 0x64, 0xf4, 0xe1, 0x3c, 0x55, 0x22, 0x6b, 0xda, 0xc3, + 0x14, 0x22, 0xc5, 0x58, 0x9b, 0x1a, 0xa9, 0x1e, 0x7c, 0x81, 0xff, 0x3a, 0xcc, 0x9d, 0x3e, 0x31, + 0x07, 0x2b, 0xc3, 0xc7, 0xa1, 0x21, 0x27, 0xa4, 0x41, 0x5e, 0xd7, 0xa3, 0x0a, 0x8b, 0x39, 0xa6, + 0x67, 0xe9, 0x2c, 0xb0, 0x0c, 0x0b, 0x3d, 0x35, 0x3f, 0x46, 0x98, 0x32, 0x7d, 0x9b, 0xff, 0xe1, + 0x17, 0xfd, 0x78, 0xb8, 0x6b, 0x2f, 0xa8, 0x9a, 0xfb, 0xbf, 0x03, 0x00, 0x00, 0xff, 0xff, 0x31, + 0x75, 0x14, 0x03, 0xd5, 0x07, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/example/library/v1/library.pb.go b/vendor/google.golang.org/genproto/googleapis/example/library/v1/library.pb.go new file mode 100644 index 0000000..b18fb80 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/example/library/v1/library.pb.go @@ -0,0 +1,1303 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/example/library/v1/library.proto + +package library // import "google.golang.org/genproto/googleapis/example/library/v1" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import empty "github.com/golang/protobuf/ptypes/empty" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +import ( + context "golang.org/x/net/context" + grpc "google.golang.org/grpc" +) + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// A single book in the library. +type Book struct { + // The resource name of the book. + // Book names have the form `shelves/{shelf_id}/books/{book_id}`. + // The name is ignored when creating a book. + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // The name of the book author. + Author string `protobuf:"bytes,2,opt,name=author,proto3" json:"author,omitempty"` + // The title of the book. + Title string `protobuf:"bytes,3,opt,name=title,proto3" json:"title,omitempty"` + // Value indicating whether the book has been read. + Read bool `protobuf:"varint,4,opt,name=read,proto3" json:"read,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Book) Reset() { *m = Book{} } +func (m *Book) String() string { return proto.CompactTextString(m) } +func (*Book) ProtoMessage() {} +func (*Book) Descriptor() ([]byte, []int) { + return fileDescriptor_library_1838ee047f1c2e83, []int{0} +} +func (m *Book) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Book.Unmarshal(m, b) +} +func (m *Book) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Book.Marshal(b, m, deterministic) +} +func (dst *Book) XXX_Merge(src proto.Message) { + xxx_messageInfo_Book.Merge(dst, src) +} +func (m *Book) XXX_Size() int { + return xxx_messageInfo_Book.Size(m) +} +func (m *Book) XXX_DiscardUnknown() { + xxx_messageInfo_Book.DiscardUnknown(m) +} + +var xxx_messageInfo_Book proto.InternalMessageInfo + +func (m *Book) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *Book) GetAuthor() string { + if m != nil { + return m.Author + } + return "" +} + +func (m *Book) GetTitle() string { + if m != nil { + return m.Title + } + return "" +} + +func (m *Book) GetRead() bool { + if m != nil { + return m.Read + } + return false +} + +// A Shelf contains a collection of books with a theme. +type Shelf struct { + // The resource name of the shelf. + // Shelf names have the form `shelves/{shelf_id}`. + // The name is ignored when creating a shelf. + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // The theme of the shelf + Theme string `protobuf:"bytes,2,opt,name=theme,proto3" json:"theme,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Shelf) Reset() { *m = Shelf{} } +func (m *Shelf) String() string { return proto.CompactTextString(m) } +func (*Shelf) ProtoMessage() {} +func (*Shelf) Descriptor() ([]byte, []int) { + return fileDescriptor_library_1838ee047f1c2e83, []int{1} +} +func (m *Shelf) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Shelf.Unmarshal(m, b) +} +func (m *Shelf) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Shelf.Marshal(b, m, deterministic) +} +func (dst *Shelf) XXX_Merge(src proto.Message) { + xxx_messageInfo_Shelf.Merge(dst, src) +} +func (m *Shelf) XXX_Size() int { + return xxx_messageInfo_Shelf.Size(m) +} +func (m *Shelf) XXX_DiscardUnknown() { + xxx_messageInfo_Shelf.DiscardUnknown(m) +} + +var xxx_messageInfo_Shelf proto.InternalMessageInfo + +func (m *Shelf) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *Shelf) GetTheme() string { + if m != nil { + return m.Theme + } + return "" +} + +// Request message for LibraryService.CreateShelf. +type CreateShelfRequest struct { + // The shelf to create. + Shelf *Shelf `protobuf:"bytes,1,opt,name=shelf,proto3" json:"shelf,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *CreateShelfRequest) Reset() { *m = CreateShelfRequest{} } +func (m *CreateShelfRequest) String() string { return proto.CompactTextString(m) } +func (*CreateShelfRequest) ProtoMessage() {} +func (*CreateShelfRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_library_1838ee047f1c2e83, []int{2} +} +func (m *CreateShelfRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_CreateShelfRequest.Unmarshal(m, b) +} +func (m *CreateShelfRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_CreateShelfRequest.Marshal(b, m, deterministic) +} +func (dst *CreateShelfRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_CreateShelfRequest.Merge(dst, src) +} +func (m *CreateShelfRequest) XXX_Size() int { + return xxx_messageInfo_CreateShelfRequest.Size(m) +} +func (m *CreateShelfRequest) XXX_DiscardUnknown() { + xxx_messageInfo_CreateShelfRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_CreateShelfRequest proto.InternalMessageInfo + +func (m *CreateShelfRequest) GetShelf() *Shelf { + if m != nil { + return m.Shelf + } + return nil +} + +// Request message for LibraryService.GetShelf. +type GetShelfRequest struct { + // The name of the shelf to retrieve. + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GetShelfRequest) Reset() { *m = GetShelfRequest{} } +func (m *GetShelfRequest) String() string { return proto.CompactTextString(m) } +func (*GetShelfRequest) ProtoMessage() {} +func (*GetShelfRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_library_1838ee047f1c2e83, []int{3} +} +func (m *GetShelfRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GetShelfRequest.Unmarshal(m, b) +} +func (m *GetShelfRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GetShelfRequest.Marshal(b, m, deterministic) +} +func (dst *GetShelfRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_GetShelfRequest.Merge(dst, src) +} +func (m *GetShelfRequest) XXX_Size() int { + return xxx_messageInfo_GetShelfRequest.Size(m) +} +func (m *GetShelfRequest) XXX_DiscardUnknown() { + xxx_messageInfo_GetShelfRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_GetShelfRequest proto.InternalMessageInfo + +func (m *GetShelfRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +// Request message for LibraryService.ListShelves. +type ListShelvesRequest struct { + // Requested page size. Server may return fewer shelves than requested. + // If unspecified, server will pick an appropriate default. + PageSize int32 `protobuf:"varint,1,opt,name=page_size,json=pageSize,proto3" json:"page_size,omitempty"` + // A token identifying a page of results the server should return. + // Typically, this is the value of + // [ListShelvesResponse.next_page_token][google.example.library.v1.ListShelvesResponse.next_page_token] + // returned from the previous call to `ListShelves` method. + PageToken string `protobuf:"bytes,2,opt,name=page_token,json=pageToken,proto3" json:"page_token,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ListShelvesRequest) Reset() { *m = ListShelvesRequest{} } +func (m *ListShelvesRequest) String() string { return proto.CompactTextString(m) } +func (*ListShelvesRequest) ProtoMessage() {} +func (*ListShelvesRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_library_1838ee047f1c2e83, []int{4} +} +func (m *ListShelvesRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ListShelvesRequest.Unmarshal(m, b) +} +func (m *ListShelvesRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ListShelvesRequest.Marshal(b, m, deterministic) +} +func (dst *ListShelvesRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_ListShelvesRequest.Merge(dst, src) +} +func (m *ListShelvesRequest) XXX_Size() int { + return xxx_messageInfo_ListShelvesRequest.Size(m) +} +func (m *ListShelvesRequest) XXX_DiscardUnknown() { + xxx_messageInfo_ListShelvesRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_ListShelvesRequest proto.InternalMessageInfo + +func (m *ListShelvesRequest) GetPageSize() int32 { + if m != nil { + return m.PageSize + } + return 0 +} + +func (m *ListShelvesRequest) GetPageToken() string { + if m != nil { + return m.PageToken + } + return "" +} + +// Response message for LibraryService.ListShelves. +type ListShelvesResponse struct { + // The list of shelves. + Shelves []*Shelf `protobuf:"bytes,1,rep,name=shelves,proto3" json:"shelves,omitempty"` + // A token to retrieve next page of results. + // Pass this value in the + // [ListShelvesRequest.page_token][google.example.library.v1.ListShelvesRequest.page_token] + // field in the subsequent call to `ListShelves` method to retrieve the next + // page of results. + NextPageToken string `protobuf:"bytes,2,opt,name=next_page_token,json=nextPageToken,proto3" json:"next_page_token,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ListShelvesResponse) Reset() { *m = ListShelvesResponse{} } +func (m *ListShelvesResponse) String() string { return proto.CompactTextString(m) } +func (*ListShelvesResponse) ProtoMessage() {} +func (*ListShelvesResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_library_1838ee047f1c2e83, []int{5} +} +func (m *ListShelvesResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ListShelvesResponse.Unmarshal(m, b) +} +func (m *ListShelvesResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ListShelvesResponse.Marshal(b, m, deterministic) +} +func (dst *ListShelvesResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_ListShelvesResponse.Merge(dst, src) +} +func (m *ListShelvesResponse) XXX_Size() int { + return xxx_messageInfo_ListShelvesResponse.Size(m) +} +func (m *ListShelvesResponse) XXX_DiscardUnknown() { + xxx_messageInfo_ListShelvesResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_ListShelvesResponse proto.InternalMessageInfo + +func (m *ListShelvesResponse) GetShelves() []*Shelf { + if m != nil { + return m.Shelves + } + return nil +} + +func (m *ListShelvesResponse) GetNextPageToken() string { + if m != nil { + return m.NextPageToken + } + return "" +} + +// Request message for LibraryService.DeleteShelf. +type DeleteShelfRequest struct { + // The name of the shelf to delete. + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *DeleteShelfRequest) Reset() { *m = DeleteShelfRequest{} } +func (m *DeleteShelfRequest) String() string { return proto.CompactTextString(m) } +func (*DeleteShelfRequest) ProtoMessage() {} +func (*DeleteShelfRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_library_1838ee047f1c2e83, []int{6} +} +func (m *DeleteShelfRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_DeleteShelfRequest.Unmarshal(m, b) +} +func (m *DeleteShelfRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_DeleteShelfRequest.Marshal(b, m, deterministic) +} +func (dst *DeleteShelfRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_DeleteShelfRequest.Merge(dst, src) +} +func (m *DeleteShelfRequest) XXX_Size() int { + return xxx_messageInfo_DeleteShelfRequest.Size(m) +} +func (m *DeleteShelfRequest) XXX_DiscardUnknown() { + xxx_messageInfo_DeleteShelfRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_DeleteShelfRequest proto.InternalMessageInfo + +func (m *DeleteShelfRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +// Describes the shelf being removed (other_shelf_name) and updated +// (name) in this merge. +type MergeShelvesRequest struct { + // The name of the shelf we're adding books to. + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // The name of the shelf we're removing books from and deleting. + OtherShelfName string `protobuf:"bytes,2,opt,name=other_shelf_name,json=otherShelfName,proto3" json:"other_shelf_name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *MergeShelvesRequest) Reset() { *m = MergeShelvesRequest{} } +func (m *MergeShelvesRequest) String() string { return proto.CompactTextString(m) } +func (*MergeShelvesRequest) ProtoMessage() {} +func (*MergeShelvesRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_library_1838ee047f1c2e83, []int{7} +} +func (m *MergeShelvesRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_MergeShelvesRequest.Unmarshal(m, b) +} +func (m *MergeShelvesRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_MergeShelvesRequest.Marshal(b, m, deterministic) +} +func (dst *MergeShelvesRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_MergeShelvesRequest.Merge(dst, src) +} +func (m *MergeShelvesRequest) XXX_Size() int { + return xxx_messageInfo_MergeShelvesRequest.Size(m) +} +func (m *MergeShelvesRequest) XXX_DiscardUnknown() { + xxx_messageInfo_MergeShelvesRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_MergeShelvesRequest proto.InternalMessageInfo + +func (m *MergeShelvesRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *MergeShelvesRequest) GetOtherShelfName() string { + if m != nil { + return m.OtherShelfName + } + return "" +} + +// Request message for LibraryService.CreateBook. +type CreateBookRequest struct { + // The name of the shelf in which the book is created. + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // The book to create. + Book *Book `protobuf:"bytes,2,opt,name=book,proto3" json:"book,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *CreateBookRequest) Reset() { *m = CreateBookRequest{} } +func (m *CreateBookRequest) String() string { return proto.CompactTextString(m) } +func (*CreateBookRequest) ProtoMessage() {} +func (*CreateBookRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_library_1838ee047f1c2e83, []int{8} +} +func (m *CreateBookRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_CreateBookRequest.Unmarshal(m, b) +} +func (m *CreateBookRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_CreateBookRequest.Marshal(b, m, deterministic) +} +func (dst *CreateBookRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_CreateBookRequest.Merge(dst, src) +} +func (m *CreateBookRequest) XXX_Size() int { + return xxx_messageInfo_CreateBookRequest.Size(m) +} +func (m *CreateBookRequest) XXX_DiscardUnknown() { + xxx_messageInfo_CreateBookRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_CreateBookRequest proto.InternalMessageInfo + +func (m *CreateBookRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *CreateBookRequest) GetBook() *Book { + if m != nil { + return m.Book + } + return nil +} + +// Request message for LibraryService.GetBook. +type GetBookRequest struct { + // The name of the book to retrieve. + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GetBookRequest) Reset() { *m = GetBookRequest{} } +func (m *GetBookRequest) String() string { return proto.CompactTextString(m) } +func (*GetBookRequest) ProtoMessage() {} +func (*GetBookRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_library_1838ee047f1c2e83, []int{9} +} +func (m *GetBookRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GetBookRequest.Unmarshal(m, b) +} +func (m *GetBookRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GetBookRequest.Marshal(b, m, deterministic) +} +func (dst *GetBookRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_GetBookRequest.Merge(dst, src) +} +func (m *GetBookRequest) XXX_Size() int { + return xxx_messageInfo_GetBookRequest.Size(m) +} +func (m *GetBookRequest) XXX_DiscardUnknown() { + xxx_messageInfo_GetBookRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_GetBookRequest proto.InternalMessageInfo + +func (m *GetBookRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +// Request message for LibraryService.ListBooks. +type ListBooksRequest struct { + // The name of the shelf whose books we'd like to list. + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // Requested page size. Server may return fewer books than requested. + // If unspecified, server will pick an appropriate default. + PageSize int32 `protobuf:"varint,2,opt,name=page_size,json=pageSize,proto3" json:"page_size,omitempty"` + // A token identifying a page of results the server should return. + // Typically, this is the value of + // [ListBooksResponse.next_page_token][google.example.library.v1.ListBooksResponse.next_page_token]. + // returned from the previous call to `ListBooks` method. + PageToken string `protobuf:"bytes,3,opt,name=page_token,json=pageToken,proto3" json:"page_token,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ListBooksRequest) Reset() { *m = ListBooksRequest{} } +func (m *ListBooksRequest) String() string { return proto.CompactTextString(m) } +func (*ListBooksRequest) ProtoMessage() {} +func (*ListBooksRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_library_1838ee047f1c2e83, []int{10} +} +func (m *ListBooksRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ListBooksRequest.Unmarshal(m, b) +} +func (m *ListBooksRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ListBooksRequest.Marshal(b, m, deterministic) +} +func (dst *ListBooksRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_ListBooksRequest.Merge(dst, src) +} +func (m *ListBooksRequest) XXX_Size() int { + return xxx_messageInfo_ListBooksRequest.Size(m) +} +func (m *ListBooksRequest) XXX_DiscardUnknown() { + xxx_messageInfo_ListBooksRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_ListBooksRequest proto.InternalMessageInfo + +func (m *ListBooksRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *ListBooksRequest) GetPageSize() int32 { + if m != nil { + return m.PageSize + } + return 0 +} + +func (m *ListBooksRequest) GetPageToken() string { + if m != nil { + return m.PageToken + } + return "" +} + +// Response message for LibraryService.ListBooks. +type ListBooksResponse struct { + // The list of books. + Books []*Book `protobuf:"bytes,1,rep,name=books,proto3" json:"books,omitempty"` + // A token to retrieve next page of results. + // Pass this value in the + // [ListBooksRequest.page_token][google.example.library.v1.ListBooksRequest.page_token] + // field in the subsequent call to `ListBooks` method to retrieve the next + // page of results. + NextPageToken string `protobuf:"bytes,2,opt,name=next_page_token,json=nextPageToken,proto3" json:"next_page_token,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ListBooksResponse) Reset() { *m = ListBooksResponse{} } +func (m *ListBooksResponse) String() string { return proto.CompactTextString(m) } +func (*ListBooksResponse) ProtoMessage() {} +func (*ListBooksResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_library_1838ee047f1c2e83, []int{11} +} +func (m *ListBooksResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ListBooksResponse.Unmarshal(m, b) +} +func (m *ListBooksResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ListBooksResponse.Marshal(b, m, deterministic) +} +func (dst *ListBooksResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_ListBooksResponse.Merge(dst, src) +} +func (m *ListBooksResponse) XXX_Size() int { + return xxx_messageInfo_ListBooksResponse.Size(m) +} +func (m *ListBooksResponse) XXX_DiscardUnknown() { + xxx_messageInfo_ListBooksResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_ListBooksResponse proto.InternalMessageInfo + +func (m *ListBooksResponse) GetBooks() []*Book { + if m != nil { + return m.Books + } + return nil +} + +func (m *ListBooksResponse) GetNextPageToken() string { + if m != nil { + return m.NextPageToken + } + return "" +} + +// Request message for LibraryService.UpdateBook. +type UpdateBookRequest struct { + // The name of the book to update. + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // The book to update with. The name must match or be empty. + Book *Book `protobuf:"bytes,2,opt,name=book,proto3" json:"book,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *UpdateBookRequest) Reset() { *m = UpdateBookRequest{} } +func (m *UpdateBookRequest) String() string { return proto.CompactTextString(m) } +func (*UpdateBookRequest) ProtoMessage() {} +func (*UpdateBookRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_library_1838ee047f1c2e83, []int{12} +} +func (m *UpdateBookRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_UpdateBookRequest.Unmarshal(m, b) +} +func (m *UpdateBookRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_UpdateBookRequest.Marshal(b, m, deterministic) +} +func (dst *UpdateBookRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_UpdateBookRequest.Merge(dst, src) +} +func (m *UpdateBookRequest) XXX_Size() int { + return xxx_messageInfo_UpdateBookRequest.Size(m) +} +func (m *UpdateBookRequest) XXX_DiscardUnknown() { + xxx_messageInfo_UpdateBookRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_UpdateBookRequest proto.InternalMessageInfo + +func (m *UpdateBookRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *UpdateBookRequest) GetBook() *Book { + if m != nil { + return m.Book + } + return nil +} + +// Request message for LibraryService.DeleteBook. +type DeleteBookRequest struct { + // The name of the book to delete. + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *DeleteBookRequest) Reset() { *m = DeleteBookRequest{} } +func (m *DeleteBookRequest) String() string { return proto.CompactTextString(m) } +func (*DeleteBookRequest) ProtoMessage() {} +func (*DeleteBookRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_library_1838ee047f1c2e83, []int{13} +} +func (m *DeleteBookRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_DeleteBookRequest.Unmarshal(m, b) +} +func (m *DeleteBookRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_DeleteBookRequest.Marshal(b, m, deterministic) +} +func (dst *DeleteBookRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_DeleteBookRequest.Merge(dst, src) +} +func (m *DeleteBookRequest) XXX_Size() int { + return xxx_messageInfo_DeleteBookRequest.Size(m) +} +func (m *DeleteBookRequest) XXX_DiscardUnknown() { + xxx_messageInfo_DeleteBookRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_DeleteBookRequest proto.InternalMessageInfo + +func (m *DeleteBookRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +// Describes what book to move (name) and what shelf we're moving it +// to (other_shelf_name). +type MoveBookRequest struct { + // The name of the book to move. + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // The name of the destination shelf. + OtherShelfName string `protobuf:"bytes,2,opt,name=other_shelf_name,json=otherShelfName,proto3" json:"other_shelf_name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *MoveBookRequest) Reset() { *m = MoveBookRequest{} } +func (m *MoveBookRequest) String() string { return proto.CompactTextString(m) } +func (*MoveBookRequest) ProtoMessage() {} +func (*MoveBookRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_library_1838ee047f1c2e83, []int{14} +} +func (m *MoveBookRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_MoveBookRequest.Unmarshal(m, b) +} +func (m *MoveBookRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_MoveBookRequest.Marshal(b, m, deterministic) +} +func (dst *MoveBookRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_MoveBookRequest.Merge(dst, src) +} +func (m *MoveBookRequest) XXX_Size() int { + return xxx_messageInfo_MoveBookRequest.Size(m) +} +func (m *MoveBookRequest) XXX_DiscardUnknown() { + xxx_messageInfo_MoveBookRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_MoveBookRequest proto.InternalMessageInfo + +func (m *MoveBookRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *MoveBookRequest) GetOtherShelfName() string { + if m != nil { + return m.OtherShelfName + } + return "" +} + +func init() { + proto.RegisterType((*Book)(nil), "google.example.library.v1.Book") + proto.RegisterType((*Shelf)(nil), "google.example.library.v1.Shelf") + proto.RegisterType((*CreateShelfRequest)(nil), "google.example.library.v1.CreateShelfRequest") + proto.RegisterType((*GetShelfRequest)(nil), "google.example.library.v1.GetShelfRequest") + proto.RegisterType((*ListShelvesRequest)(nil), "google.example.library.v1.ListShelvesRequest") + proto.RegisterType((*ListShelvesResponse)(nil), "google.example.library.v1.ListShelvesResponse") + proto.RegisterType((*DeleteShelfRequest)(nil), "google.example.library.v1.DeleteShelfRequest") + proto.RegisterType((*MergeShelvesRequest)(nil), "google.example.library.v1.MergeShelvesRequest") + proto.RegisterType((*CreateBookRequest)(nil), "google.example.library.v1.CreateBookRequest") + proto.RegisterType((*GetBookRequest)(nil), "google.example.library.v1.GetBookRequest") + proto.RegisterType((*ListBooksRequest)(nil), "google.example.library.v1.ListBooksRequest") + proto.RegisterType((*ListBooksResponse)(nil), "google.example.library.v1.ListBooksResponse") + proto.RegisterType((*UpdateBookRequest)(nil), "google.example.library.v1.UpdateBookRequest") + proto.RegisterType((*DeleteBookRequest)(nil), "google.example.library.v1.DeleteBookRequest") + proto.RegisterType((*MoveBookRequest)(nil), "google.example.library.v1.MoveBookRequest") +} + +// Reference imports to suppress errors if they are not otherwise used. +var _ context.Context +var _ grpc.ClientConn + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the grpc package it is being compiled against. +const _ = grpc.SupportPackageIsVersion4 + +// LibraryServiceClient is the client API for LibraryService service. +// +// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://godoc.org/google.golang.org/grpc#ClientConn.NewStream. +type LibraryServiceClient interface { + // Creates a shelf, and returns the new Shelf. + CreateShelf(ctx context.Context, in *CreateShelfRequest, opts ...grpc.CallOption) (*Shelf, error) + // Gets a shelf. Returns NOT_FOUND if the shelf does not exist. + GetShelf(ctx context.Context, in *GetShelfRequest, opts ...grpc.CallOption) (*Shelf, error) + // Lists shelves. The order is unspecified but deterministic. Newly created + // shelves will not necessarily be added to the end of this list. + ListShelves(ctx context.Context, in *ListShelvesRequest, opts ...grpc.CallOption) (*ListShelvesResponse, error) + // Deletes a shelf. Returns NOT_FOUND if the shelf does not exist. + DeleteShelf(ctx context.Context, in *DeleteShelfRequest, opts ...grpc.CallOption) (*empty.Empty, error) + // Merges two shelves by adding all books from the shelf named + // `other_shelf_name` to shelf `name`, and deletes + // `other_shelf_name`. Returns the updated shelf. + // The book ids of the moved books may not be the same as the original books. + // + // Returns NOT_FOUND if either shelf does not exist. + // This call is a no-op if the specified shelves are the same. + MergeShelves(ctx context.Context, in *MergeShelvesRequest, opts ...grpc.CallOption) (*Shelf, error) + // Creates a book, and returns the new Book. + CreateBook(ctx context.Context, in *CreateBookRequest, opts ...grpc.CallOption) (*Book, error) + // Gets a book. Returns NOT_FOUND if the book does not exist. + GetBook(ctx context.Context, in *GetBookRequest, opts ...grpc.CallOption) (*Book, error) + // Lists books in a shelf. The order is unspecified but deterministic. Newly + // created books will not necessarily be added to the end of this list. + // Returns NOT_FOUND if the shelf does not exist. + ListBooks(ctx context.Context, in *ListBooksRequest, opts ...grpc.CallOption) (*ListBooksResponse, error) + // Deletes a book. Returns NOT_FOUND if the book does not exist. + DeleteBook(ctx context.Context, in *DeleteBookRequest, opts ...grpc.CallOption) (*empty.Empty, error) + // Updates a book. Returns INVALID_ARGUMENT if the name of the book + // is non-empty and does not equal the existing name. + UpdateBook(ctx context.Context, in *UpdateBookRequest, opts ...grpc.CallOption) (*Book, error) + // Moves a book to another shelf, and returns the new book. The book + // id of the new book may not be the same as the original book. + MoveBook(ctx context.Context, in *MoveBookRequest, opts ...grpc.CallOption) (*Book, error) +} + +type libraryServiceClient struct { + cc *grpc.ClientConn +} + +func NewLibraryServiceClient(cc *grpc.ClientConn) LibraryServiceClient { + return &libraryServiceClient{cc} +} + +func (c *libraryServiceClient) CreateShelf(ctx context.Context, in *CreateShelfRequest, opts ...grpc.CallOption) (*Shelf, error) { + out := new(Shelf) + err := c.cc.Invoke(ctx, "/google.example.library.v1.LibraryService/CreateShelf", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *libraryServiceClient) GetShelf(ctx context.Context, in *GetShelfRequest, opts ...grpc.CallOption) (*Shelf, error) { + out := new(Shelf) + err := c.cc.Invoke(ctx, "/google.example.library.v1.LibraryService/GetShelf", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *libraryServiceClient) ListShelves(ctx context.Context, in *ListShelvesRequest, opts ...grpc.CallOption) (*ListShelvesResponse, error) { + out := new(ListShelvesResponse) + err := c.cc.Invoke(ctx, "/google.example.library.v1.LibraryService/ListShelves", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *libraryServiceClient) DeleteShelf(ctx context.Context, in *DeleteShelfRequest, opts ...grpc.CallOption) (*empty.Empty, error) { + out := new(empty.Empty) + err := c.cc.Invoke(ctx, "/google.example.library.v1.LibraryService/DeleteShelf", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *libraryServiceClient) MergeShelves(ctx context.Context, in *MergeShelvesRequest, opts ...grpc.CallOption) (*Shelf, error) { + out := new(Shelf) + err := c.cc.Invoke(ctx, "/google.example.library.v1.LibraryService/MergeShelves", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *libraryServiceClient) CreateBook(ctx context.Context, in *CreateBookRequest, opts ...grpc.CallOption) (*Book, error) { + out := new(Book) + err := c.cc.Invoke(ctx, "/google.example.library.v1.LibraryService/CreateBook", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *libraryServiceClient) GetBook(ctx context.Context, in *GetBookRequest, opts ...grpc.CallOption) (*Book, error) { + out := new(Book) + err := c.cc.Invoke(ctx, "/google.example.library.v1.LibraryService/GetBook", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *libraryServiceClient) ListBooks(ctx context.Context, in *ListBooksRequest, opts ...grpc.CallOption) (*ListBooksResponse, error) { + out := new(ListBooksResponse) + err := c.cc.Invoke(ctx, "/google.example.library.v1.LibraryService/ListBooks", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *libraryServiceClient) DeleteBook(ctx context.Context, in *DeleteBookRequest, opts ...grpc.CallOption) (*empty.Empty, error) { + out := new(empty.Empty) + err := c.cc.Invoke(ctx, "/google.example.library.v1.LibraryService/DeleteBook", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *libraryServiceClient) UpdateBook(ctx context.Context, in *UpdateBookRequest, opts ...grpc.CallOption) (*Book, error) { + out := new(Book) + err := c.cc.Invoke(ctx, "/google.example.library.v1.LibraryService/UpdateBook", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *libraryServiceClient) MoveBook(ctx context.Context, in *MoveBookRequest, opts ...grpc.CallOption) (*Book, error) { + out := new(Book) + err := c.cc.Invoke(ctx, "/google.example.library.v1.LibraryService/MoveBook", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +// LibraryServiceServer is the server API for LibraryService service. +type LibraryServiceServer interface { + // Creates a shelf, and returns the new Shelf. + CreateShelf(context.Context, *CreateShelfRequest) (*Shelf, error) + // Gets a shelf. Returns NOT_FOUND if the shelf does not exist. + GetShelf(context.Context, *GetShelfRequest) (*Shelf, error) + // Lists shelves. The order is unspecified but deterministic. Newly created + // shelves will not necessarily be added to the end of this list. + ListShelves(context.Context, *ListShelvesRequest) (*ListShelvesResponse, error) + // Deletes a shelf. Returns NOT_FOUND if the shelf does not exist. + DeleteShelf(context.Context, *DeleteShelfRequest) (*empty.Empty, error) + // Merges two shelves by adding all books from the shelf named + // `other_shelf_name` to shelf `name`, and deletes + // `other_shelf_name`. Returns the updated shelf. + // The book ids of the moved books may not be the same as the original books. + // + // Returns NOT_FOUND if either shelf does not exist. + // This call is a no-op if the specified shelves are the same. + MergeShelves(context.Context, *MergeShelvesRequest) (*Shelf, error) + // Creates a book, and returns the new Book. + CreateBook(context.Context, *CreateBookRequest) (*Book, error) + // Gets a book. Returns NOT_FOUND if the book does not exist. + GetBook(context.Context, *GetBookRequest) (*Book, error) + // Lists books in a shelf. The order is unspecified but deterministic. Newly + // created books will not necessarily be added to the end of this list. + // Returns NOT_FOUND if the shelf does not exist. + ListBooks(context.Context, *ListBooksRequest) (*ListBooksResponse, error) + // Deletes a book. Returns NOT_FOUND if the book does not exist. + DeleteBook(context.Context, *DeleteBookRequest) (*empty.Empty, error) + // Updates a book. Returns INVALID_ARGUMENT if the name of the book + // is non-empty and does not equal the existing name. + UpdateBook(context.Context, *UpdateBookRequest) (*Book, error) + // Moves a book to another shelf, and returns the new book. The book + // id of the new book may not be the same as the original book. + MoveBook(context.Context, *MoveBookRequest) (*Book, error) +} + +func RegisterLibraryServiceServer(s *grpc.Server, srv LibraryServiceServer) { + s.RegisterService(&_LibraryService_serviceDesc, srv) +} + +func _LibraryService_CreateShelf_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(CreateShelfRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(LibraryServiceServer).CreateShelf(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.example.library.v1.LibraryService/CreateShelf", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(LibraryServiceServer).CreateShelf(ctx, req.(*CreateShelfRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _LibraryService_GetShelf_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(GetShelfRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(LibraryServiceServer).GetShelf(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.example.library.v1.LibraryService/GetShelf", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(LibraryServiceServer).GetShelf(ctx, req.(*GetShelfRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _LibraryService_ListShelves_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(ListShelvesRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(LibraryServiceServer).ListShelves(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.example.library.v1.LibraryService/ListShelves", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(LibraryServiceServer).ListShelves(ctx, req.(*ListShelvesRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _LibraryService_DeleteShelf_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(DeleteShelfRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(LibraryServiceServer).DeleteShelf(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.example.library.v1.LibraryService/DeleteShelf", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(LibraryServiceServer).DeleteShelf(ctx, req.(*DeleteShelfRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _LibraryService_MergeShelves_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(MergeShelvesRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(LibraryServiceServer).MergeShelves(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.example.library.v1.LibraryService/MergeShelves", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(LibraryServiceServer).MergeShelves(ctx, req.(*MergeShelvesRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _LibraryService_CreateBook_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(CreateBookRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(LibraryServiceServer).CreateBook(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.example.library.v1.LibraryService/CreateBook", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(LibraryServiceServer).CreateBook(ctx, req.(*CreateBookRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _LibraryService_GetBook_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(GetBookRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(LibraryServiceServer).GetBook(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.example.library.v1.LibraryService/GetBook", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(LibraryServiceServer).GetBook(ctx, req.(*GetBookRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _LibraryService_ListBooks_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(ListBooksRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(LibraryServiceServer).ListBooks(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.example.library.v1.LibraryService/ListBooks", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(LibraryServiceServer).ListBooks(ctx, req.(*ListBooksRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _LibraryService_DeleteBook_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(DeleteBookRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(LibraryServiceServer).DeleteBook(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.example.library.v1.LibraryService/DeleteBook", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(LibraryServiceServer).DeleteBook(ctx, req.(*DeleteBookRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _LibraryService_UpdateBook_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(UpdateBookRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(LibraryServiceServer).UpdateBook(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.example.library.v1.LibraryService/UpdateBook", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(LibraryServiceServer).UpdateBook(ctx, req.(*UpdateBookRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _LibraryService_MoveBook_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(MoveBookRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(LibraryServiceServer).MoveBook(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.example.library.v1.LibraryService/MoveBook", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(LibraryServiceServer).MoveBook(ctx, req.(*MoveBookRequest)) + } + return interceptor(ctx, in, info, handler) +} + +var _LibraryService_serviceDesc = grpc.ServiceDesc{ + ServiceName: "google.example.library.v1.LibraryService", + HandlerType: (*LibraryServiceServer)(nil), + Methods: []grpc.MethodDesc{ + { + MethodName: "CreateShelf", + Handler: _LibraryService_CreateShelf_Handler, + }, + { + MethodName: "GetShelf", + Handler: _LibraryService_GetShelf_Handler, + }, + { + MethodName: "ListShelves", + Handler: _LibraryService_ListShelves_Handler, + }, + { + MethodName: "DeleteShelf", + Handler: _LibraryService_DeleteShelf_Handler, + }, + { + MethodName: "MergeShelves", + Handler: _LibraryService_MergeShelves_Handler, + }, + { + MethodName: "CreateBook", + Handler: _LibraryService_CreateBook_Handler, + }, + { + MethodName: "GetBook", + Handler: _LibraryService_GetBook_Handler, + }, + { + MethodName: "ListBooks", + Handler: _LibraryService_ListBooks_Handler, + }, + { + MethodName: "DeleteBook", + Handler: _LibraryService_DeleteBook_Handler, + }, + { + MethodName: "UpdateBook", + Handler: _LibraryService_UpdateBook_Handler, + }, + { + MethodName: "MoveBook", + Handler: _LibraryService_MoveBook_Handler, + }, + }, + Streams: []grpc.StreamDesc{}, + Metadata: "google/example/library/v1/library.proto", +} + +func init() { + proto.RegisterFile("google/example/library/v1/library.proto", fileDescriptor_library_1838ee047f1c2e83) +} + +var fileDescriptor_library_1838ee047f1c2e83 = []byte{ + // 1041 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xa4, 0x57, 0x4b, 0x53, 0xdb, 0x56, + 0x14, 0x1e, 0x19, 0xf3, 0x3a, 0xe6, 0x11, 0x2e, 0x94, 0x38, 0x0e, 0x29, 0x46, 0x49, 0x69, 0x30, + 0x44, 0x9a, 0xd0, 0xa4, 0xe9, 0x28, 0xc3, 0x34, 0x72, 0xd3, 0xc9, 0x74, 0x86, 0x74, 0xa8, 0x69, + 0x37, 0xdd, 0x78, 0x64, 0x73, 0xb0, 0x35, 0xc8, 0xba, 0x42, 0x12, 0x1e, 0x08, 0xc3, 0xa2, 0xf9, + 0x07, 0x6d, 0xd6, 0x6d, 0x7f, 0x54, 0x17, 0x9d, 0xf1, 0x2a, 0x8b, 0xae, 0xf2, 0x13, 0xba, 0xea, + 0xdc, 0x87, 0x40, 0x7e, 0x20, 0x89, 0x66, 0x65, 0xe9, 0xde, 0x73, 0xee, 0x77, 0x1e, 0xdf, 0xf9, + 0x74, 0x0d, 0x9f, 0xb7, 0x28, 0x6d, 0x39, 0xa8, 0xe3, 0xa9, 0xd5, 0xf1, 0x1c, 0xd4, 0x1d, 0xbb, + 0xe1, 0x5b, 0xfe, 0x99, 0xde, 0x7d, 0x1c, 0x3d, 0x6a, 0x9e, 0x4f, 0x43, 0x4a, 0xee, 0x08, 0x43, + 0x4d, 0x1a, 0x6a, 0xd1, 0x6e, 0xf7, 0x71, 0x69, 0x45, 0x9e, 0x61, 0x79, 0xb6, 0x6e, 0xb9, 0x2e, + 0x0d, 0xad, 0xd0, 0xa6, 0x6e, 0x20, 0x1c, 0x4b, 0xb7, 0x63, 0xbb, 0x4d, 0xc7, 0x46, 0x37, 0x94, + 0x1b, 0xab, 0xb1, 0x8d, 0x43, 0x1b, 0x9d, 0x83, 0x7a, 0x03, 0xdb, 0x56, 0xd7, 0xa6, 0xbe, 0x34, + 0xb8, 0x13, 0x33, 0xf0, 0x31, 0xa0, 0x27, 0x7e, 0x13, 0xe5, 0xd6, 0x5d, 0xb9, 0xc5, 0xdf, 0x1a, + 0x27, 0x87, 0x3a, 0x76, 0xbc, 0x50, 0x86, 0xaa, 0xfe, 0xad, 0x40, 0xbe, 0x4a, 0xe9, 0x11, 0xf9, + 0x1a, 0xf2, 0xae, 0xd5, 0xc1, 0xa2, 0x52, 0x56, 0x1e, 0x4e, 0x57, 0x37, 0xdf, 0x9b, 0xb9, 0x7f, + 0xcd, 0xcf, 0xe0, 0xbe, 0x0c, 0xfd, 0x51, 0x94, 0x8a, 0x38, 0xcb, 0xf2, 0xec, 0x40, 0x6b, 0xd2, + 0x8e, 0xce, 0x5c, 0x6b, 0xdc, 0x91, 0x2c, 0xc3, 0x84, 0x75, 0x12, 0xb6, 0xa9, 0x5f, 0xcc, 0xb1, + 0x23, 0x6a, 0xf2, 0x8d, 0x2c, 0xc1, 0x78, 0x68, 0x87, 0x0e, 0x16, 0xc7, 0xf8, 0xb2, 0x78, 0x21, + 0x04, 0xf2, 0x3e, 0x5a, 0x07, 0xc5, 0x7c, 0x59, 0x79, 0x38, 0x55, 0xe3, 0xcf, 0xc6, 0xee, 0x07, + 0xf3, 0xbb, 0x4c, 0x88, 0x44, 0x0d, 0xda, 0xe8, 0x74, 0x31, 0xd0, 0xcf, 0xd9, 0xc3, 0x61, 0xdd, + 0x3e, 0xb8, 0xd0, 0x1b, 0x94, 0x1e, 0x05, 0xfa, 0x39, 0xfb, 0x61, 0xef, 0xea, 0x1f, 0x0a, 0x8c, + 0xef, 0xb3, 0x5d, 0xf2, 0xa2, 0x2f, 0xb5, 0x2d, 0x9e, 0xda, 0x3a, 0x3c, 0x48, 0x01, 0xe2, 0xbe, + 0x32, 0x37, 0x96, 0x43, 0x1b, 0x3b, 0x28, 0x53, 0x13, 0x2f, 0xc6, 0xce, 0x07, 0xd3, 0xc8, 0x76, + 0x0c, 0x21, 0xc3, 0x01, 0xab, 0x3f, 0x00, 0xf9, 0xc6, 0x47, 0x2b, 0x44, 0x81, 0x84, 0xc7, 0x27, + 0x18, 0x84, 0xe4, 0x39, 0x8c, 0x73, 0x13, 0x1e, 0x6d, 0x61, 0xbb, 0xac, 0x5d, 0xcb, 0x25, 0x8d, + 0xfb, 0x55, 0xc7, 0xde, 0x9b, 0xb9, 0x9a, 0xf0, 0x51, 0x9f, 0xc0, 0xfc, 0x2b, 0x0c, 0xfb, 0xce, + 0x5b, 0xeb, 0x4b, 0x7e, 0x96, 0x27, 0x3f, 0x29, 0x2b, 0x23, 0xb2, 0x53, 0xf7, 0x80, 0xec, 0xda, + 0x01, 0x77, 0xeb, 0x62, 0x10, 0x39, 0xde, 0x85, 0x69, 0xcf, 0x6a, 0x61, 0x3d, 0xb0, 0xdf, 0x08, + 0xef, 0xf1, 0xda, 0x14, 0x5b, 0xd8, 0xb7, 0xdf, 0x20, 0xb9, 0x07, 0xc0, 0x37, 0x43, 0x7a, 0x84, + 0xae, 0xac, 0x0a, 0x37, 0xff, 0x91, 0x2d, 0xa8, 0x67, 0xb0, 0xd8, 0x77, 0x62, 0xe0, 0x51, 0x37, + 0x40, 0x62, 0xc0, 0xa4, 0xac, 0x43, 0x51, 0x29, 0x8f, 0x65, 0xc9, 0xae, 0x16, 0x39, 0x90, 0x75, + 0x98, 0x77, 0xf1, 0x34, 0xac, 0x0f, 0xc1, 0xce, 0xb2, 0xe5, 0xbd, 0x4b, 0xe8, 0x67, 0x40, 0x5e, + 0xa2, 0x83, 0x03, 0x55, 0xcd, 0x50, 0x85, 0x63, 0x58, 0x7c, 0x8d, 0x7e, 0x0b, 0x07, 0xca, 0x90, + 0xee, 0x49, 0x9e, 0xc1, 0x2d, 0x1a, 0xb6, 0xd1, 0xaf, 0x8b, 0xde, 0x72, 0xf3, 0xdc, 0x28, 0xf3, + 0x39, 0x6e, 0xc6, 0x9f, 0xbf, 0x67, 0x90, 0x1e, 0x2c, 0x08, 0x06, 0xf0, 0x31, 0xca, 0x0e, 0xf8, + 0x15, 0xe4, 0x19, 0xcd, 0x39, 0x48, 0x61, 0x7b, 0x35, 0xa1, 0x88, 0xec, 0x60, 0xc1, 0x10, 0xee, + 0xa1, 0x6e, 0xc3, 0xdc, 0x2b, 0x0c, 0xe3, 0x70, 0xe5, 0x3e, 0xb8, 0x19, 0x0e, 0x37, 0x21, 0x34, + 0xe1, 0xb2, 0x30, 0xb7, 0x58, 0x33, 0xd9, 0xca, 0x4d, 0xaa, 0xd2, 0xc7, 0x9f, 0x5c, 0x22, 0x7f, + 0xc6, 0x06, 0xf9, 0xe3, 0xc3, 0x42, 0x0c, 0x52, 0xb2, 0xe7, 0x29, 0x8c, 0xf3, 0x19, 0x97, 0xdc, + 0x49, 0x4b, 0xbb, 0x26, 0xac, 0x33, 0x13, 0xe7, 0x10, 0x16, 0x7e, 0xf2, 0x0e, 0x06, 0x9a, 0x71, + 0xbb, 0x2f, 0x4f, 0x51, 0xc8, 0x8f, 0x6c, 0xc1, 0x53, 0x58, 0x10, 0x04, 0xbd, 0x59, 0x17, 0x1c, + 0x98, 0x7f, 0x4d, 0xbb, 0x37, 0x73, 0xfa, 0xdf, 0xcc, 0xdc, 0xfe, 0x6d, 0x06, 0xe6, 0x76, 0x45, + 0x0e, 0xfb, 0xe8, 0x77, 0xed, 0x26, 0x92, 0xb7, 0x0a, 0x14, 0x62, 0x7a, 0x45, 0x1e, 0x25, 0xe4, + 0x3c, 0xac, 0x6b, 0xa5, 0xd4, 0x51, 0x57, 0xd5, 0x9e, 0x29, 0x64, 0xec, 0xed, 0x5f, 0xff, 0xbc, + 0xcb, 0x2d, 0xa9, 0x05, 0xf6, 0x61, 0x95, 0xe3, 0x6f, 0x88, 0x1d, 0x72, 0x01, 0x53, 0x91, 0xc0, + 0x91, 0x4a, 0xc2, 0x89, 0x03, 0x2a, 0x98, 0x01, 0xfd, 0x7e, 0xcf, 0xe4, 0x35, 0xe3, 0xe0, 0xcb, + 0x64, 0x89, 0x81, 0x9f, 0xb3, 0x85, 0x9d, 0x48, 0xba, 0x2b, 0x17, 0xe4, 0x17, 0x05, 0x0a, 0x31, + 0x61, 0x4b, 0xac, 0xc1, 0xb0, 0xa4, 0x96, 0xb4, 0xac, 0xe6, 0x82, 0xf1, 0xea, 0x22, 0x0f, 0x66, + 0x96, 0xc4, 0x2b, 0x41, 0xce, 0xa0, 0x10, 0x13, 0xb8, 0xc4, 0x10, 0x86, 0x85, 0xb0, 0xb4, 0x1c, + 0x99, 0x47, 0xb7, 0x01, 0xed, 0x5b, 0x76, 0x1b, 0x18, 0x48, 0xbf, 0x32, 0x3a, 0xfd, 0x3f, 0x15, + 0x98, 0x89, 0x6b, 0x24, 0x49, 0x4a, 0x68, 0x84, 0x98, 0x66, 0x68, 0xc3, 0x4e, 0xcf, 0xfc, 0x84, + 0xc1, 0x6e, 0x0d, 0xf2, 0x96, 0x07, 0xb6, 0xaa, 0x96, 0x46, 0x05, 0x66, 0x74, 0x18, 0x96, 0xa1, + 0x54, 0xc8, 0xaf, 0x0a, 0xc0, 0x95, 0xa4, 0x92, 0xad, 0x54, 0x8e, 0xc6, 0xe6, 0xa9, 0x94, 0x36, + 0xc5, 0xea, 0x93, 0x9e, 0x39, 0xcd, 0x83, 0x63, 0x83, 0xcc, 0x03, 0x52, 0x47, 0x07, 0x24, 0x2e, + 0x23, 0x06, 0x1f, 0x78, 0x36, 0x38, 0x93, 0x52, 0x74, 0xc9, 0x46, 0x32, 0x67, 0x6f, 0x14, 0xcd, + 0x66, 0xbc, 0x65, 0x9f, 0x92, 0x95, 0x11, 0x81, 0xc8, 0x4b, 0x51, 0xe5, 0x82, 0x15, 0x66, 0xfa, + 0x52, 0x52, 0xc9, 0x66, 0x0a, 0x11, 0xe3, 0x5a, 0x5f, 0xda, 0xca, 0x66, 0x2c, 0x39, 0xbb, 0x11, + 0x8f, 0x6a, 0x85, 0x24, 0x94, 0x87, 0x9c, 0x02, 0x5c, 0x29, 0x61, 0x62, 0xaf, 0x86, 0x04, 0xf3, + 0x5a, 0x1e, 0x3f, 0x10, 0xd5, 0xa8, 0x24, 0x57, 0xe3, 0x9d, 0x02, 0x70, 0x25, 0xf6, 0x89, 0xd0, + 0x43, 0xdf, 0x84, 0xf4, 0xc6, 0x7c, 0xd9, 0x33, 0xf3, 0x97, 0x0c, 0xd9, 0x28, 0xad, 0xf1, 0x50, + 0xd8, 0x82, 0x76, 0x4d, 0x3c, 0x92, 0x28, 0xbf, 0x2b, 0x30, 0x15, 0x69, 0x7c, 0xa2, 0xba, 0x0d, + 0x7c, 0x08, 0xd2, 0x23, 0x7a, 0x99, 0x38, 0x55, 0xeb, 0xea, 0x5a, 0x52, 0xb5, 0x8c, 0x0e, 0xed, + 0xb2, 0xe1, 0xaa, 0x1e, 0xc3, 0xbd, 0x26, 0xed, 0x5c, 0x8f, 0x55, 0x9d, 0x91, 0x9f, 0x8c, 0x3d, + 0xd6, 0x93, 0x3d, 0xe5, 0xe7, 0x17, 0xd2, 0xb4, 0x45, 0x1d, 0xcb, 0x6d, 0x69, 0xd4, 0x6f, 0xe9, + 0x2d, 0x74, 0x79, 0xc7, 0xf4, 0xab, 0x6b, 0xf2, 0x88, 0xff, 0x53, 0xcf, 0xe5, 0x63, 0x63, 0x82, + 0x1b, 0x7f, 0xf1, 0x5f, 0x00, 0x00, 0x00, 0xff, 0xff, 0x3a, 0x38, 0xed, 0x3a, 0x7b, 0x0d, 0x00, + 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/firebase/fcm/connection/v1alpha1/connection_api.pb.go b/vendor/google.golang.org/genproto/googleapis/firebase/fcm/connection/v1alpha1/connection_api.pb.go new file mode 100644 index 0000000..99a7b60 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/firebase/fcm/connection/v1alpha1/connection_api.pb.go @@ -0,0 +1,541 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/firebase/fcm/connection/v1alpha1/connection_api.proto + +package connection // import "google.golang.org/genproto/googleapis/firebase/fcm/connection/v1alpha1" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import timestamp "github.com/golang/protobuf/ptypes/timestamp" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +import ( + context "golang.org/x/net/context" + grpc "google.golang.org/grpc" +) + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Request sent to FCM from the connected client. +type UpstreamRequest struct { + // The type of request the client is making to FCM. + // + // Types that are valid to be assigned to RequestType: + // *UpstreamRequest_Ack + RequestType isUpstreamRequest_RequestType `protobuf_oneof:"request_type"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *UpstreamRequest) Reset() { *m = UpstreamRequest{} } +func (m *UpstreamRequest) String() string { return proto.CompactTextString(m) } +func (*UpstreamRequest) ProtoMessage() {} +func (*UpstreamRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_connection_api_678fe704a2040da8, []int{0} +} +func (m *UpstreamRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_UpstreamRequest.Unmarshal(m, b) +} +func (m *UpstreamRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_UpstreamRequest.Marshal(b, m, deterministic) +} +func (dst *UpstreamRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_UpstreamRequest.Merge(dst, src) +} +func (m *UpstreamRequest) XXX_Size() int { + return xxx_messageInfo_UpstreamRequest.Size(m) +} +func (m *UpstreamRequest) XXX_DiscardUnknown() { + xxx_messageInfo_UpstreamRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_UpstreamRequest proto.InternalMessageInfo + +type isUpstreamRequest_RequestType interface { + isUpstreamRequest_RequestType() +} + +type UpstreamRequest_Ack struct { + Ack *Ack `protobuf:"bytes,1,opt,name=ack,proto3,oneof"` +} + +func (*UpstreamRequest_Ack) isUpstreamRequest_RequestType() {} + +func (m *UpstreamRequest) GetRequestType() isUpstreamRequest_RequestType { + if m != nil { + return m.RequestType + } + return nil +} + +func (m *UpstreamRequest) GetAck() *Ack { + if x, ok := m.GetRequestType().(*UpstreamRequest_Ack); ok { + return x.Ack + } + return nil +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*UpstreamRequest) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _UpstreamRequest_OneofMarshaler, _UpstreamRequest_OneofUnmarshaler, _UpstreamRequest_OneofSizer, []interface{}{ + (*UpstreamRequest_Ack)(nil), + } +} + +func _UpstreamRequest_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*UpstreamRequest) + // request_type + switch x := m.RequestType.(type) { + case *UpstreamRequest_Ack: + b.EncodeVarint(1<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.Ack); err != nil { + return err + } + case nil: + default: + return fmt.Errorf("UpstreamRequest.RequestType has unexpected type %T", x) + } + return nil +} + +func _UpstreamRequest_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*UpstreamRequest) + switch tag { + case 1: // request_type.ack + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(Ack) + err := b.DecodeMessage(msg) + m.RequestType = &UpstreamRequest_Ack{msg} + return true, err + default: + return false, nil + } +} + +func _UpstreamRequest_OneofSizer(msg proto.Message) (n int) { + m := msg.(*UpstreamRequest) + // request_type + switch x := m.RequestType.(type) { + case *UpstreamRequest_Ack: + s := proto.Size(x.Ack) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +// Response sent to the connected client from FCM. +type DownstreamResponse struct { + // The type of response FCM is sending to the client. + // + // Types that are valid to be assigned to ResponseType: + // *DownstreamResponse_Message + ResponseType isDownstreamResponse_ResponseType `protobuf_oneof:"response_type"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *DownstreamResponse) Reset() { *m = DownstreamResponse{} } +func (m *DownstreamResponse) String() string { return proto.CompactTextString(m) } +func (*DownstreamResponse) ProtoMessage() {} +func (*DownstreamResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_connection_api_678fe704a2040da8, []int{1} +} +func (m *DownstreamResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_DownstreamResponse.Unmarshal(m, b) +} +func (m *DownstreamResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_DownstreamResponse.Marshal(b, m, deterministic) +} +func (dst *DownstreamResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_DownstreamResponse.Merge(dst, src) +} +func (m *DownstreamResponse) XXX_Size() int { + return xxx_messageInfo_DownstreamResponse.Size(m) +} +func (m *DownstreamResponse) XXX_DiscardUnknown() { + xxx_messageInfo_DownstreamResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_DownstreamResponse proto.InternalMessageInfo + +type isDownstreamResponse_ResponseType interface { + isDownstreamResponse_ResponseType() +} + +type DownstreamResponse_Message struct { + Message *Message `protobuf:"bytes,1,opt,name=message,proto3,oneof"` +} + +func (*DownstreamResponse_Message) isDownstreamResponse_ResponseType() {} + +func (m *DownstreamResponse) GetResponseType() isDownstreamResponse_ResponseType { + if m != nil { + return m.ResponseType + } + return nil +} + +func (m *DownstreamResponse) GetMessage() *Message { + if x, ok := m.GetResponseType().(*DownstreamResponse_Message); ok { + return x.Message + } + return nil +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*DownstreamResponse) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _DownstreamResponse_OneofMarshaler, _DownstreamResponse_OneofUnmarshaler, _DownstreamResponse_OneofSizer, []interface{}{ + (*DownstreamResponse_Message)(nil), + } +} + +func _DownstreamResponse_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*DownstreamResponse) + // response_type + switch x := m.ResponseType.(type) { + case *DownstreamResponse_Message: + b.EncodeVarint(1<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.Message); err != nil { + return err + } + case nil: + default: + return fmt.Errorf("DownstreamResponse.ResponseType has unexpected type %T", x) + } + return nil +} + +func _DownstreamResponse_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*DownstreamResponse) + switch tag { + case 1: // response_type.message + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(Message) + err := b.DecodeMessage(msg) + m.ResponseType = &DownstreamResponse_Message{msg} + return true, err + default: + return false, nil + } +} + +func _DownstreamResponse_OneofSizer(msg proto.Message) (n int) { + m := msg.(*DownstreamResponse) + // response_type + switch x := m.ResponseType.(type) { + case *DownstreamResponse_Message: + s := proto.Size(x.Message) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +// Acknowledgement to indicate a client successfully received an FCM message. +// +// If a message is not acked, FCM will continously resend the message until +// it expires. Duplicate delivery in this case is working as intended. +type Ack struct { + // Id of message being acknowledged + MessageId string `protobuf:"bytes,1,opt,name=message_id,json=messageId,proto3" json:"message_id,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Ack) Reset() { *m = Ack{} } +func (m *Ack) String() string { return proto.CompactTextString(m) } +func (*Ack) ProtoMessage() {} +func (*Ack) Descriptor() ([]byte, []int) { + return fileDescriptor_connection_api_678fe704a2040da8, []int{2} +} +func (m *Ack) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Ack.Unmarshal(m, b) +} +func (m *Ack) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Ack.Marshal(b, m, deterministic) +} +func (dst *Ack) XXX_Merge(src proto.Message) { + xxx_messageInfo_Ack.Merge(dst, src) +} +func (m *Ack) XXX_Size() int { + return xxx_messageInfo_Ack.Size(m) +} +func (m *Ack) XXX_DiscardUnknown() { + xxx_messageInfo_Ack.DiscardUnknown(m) +} + +var xxx_messageInfo_Ack proto.InternalMessageInfo + +func (m *Ack) GetMessageId() string { + if m != nil { + return m.MessageId + } + return "" +} + +// Message created through the [Send +// API](https://firebase.google.com/docs/reference/fcm/rest/v1/projects.messages#resource-message). +type Message struct { + // The identifier of the message. Used to ack the message. + MessageId string `protobuf:"bytes,1,opt,name=message_id,json=messageId,proto3" json:"message_id,omitempty"` + // Time the message was received in FCM. + CreateTime *timestamp.Timestamp `protobuf:"bytes,2,opt,name=create_time,json=createTime,proto3" json:"create_time,omitempty"` + // Expiry time of the message. Currently it is always 4 weeks. + ExpireTime *timestamp.Timestamp `protobuf:"bytes,3,opt,name=expire_time,json=expireTime,proto3" json:"expire_time,omitempty"` + // The arbitrary payload set in the [Send + // API](https://firebase.google.com/docs/reference/fcm/rest/v1/projects.messages#resource-message). + Data map[string]string `protobuf:"bytes,4,rep,name=data,proto3" json:"data,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Message) Reset() { *m = Message{} } +func (m *Message) String() string { return proto.CompactTextString(m) } +func (*Message) ProtoMessage() {} +func (*Message) Descriptor() ([]byte, []int) { + return fileDescriptor_connection_api_678fe704a2040da8, []int{3} +} +func (m *Message) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Message.Unmarshal(m, b) +} +func (m *Message) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Message.Marshal(b, m, deterministic) +} +func (dst *Message) XXX_Merge(src proto.Message) { + xxx_messageInfo_Message.Merge(dst, src) +} +func (m *Message) XXX_Size() int { + return xxx_messageInfo_Message.Size(m) +} +func (m *Message) XXX_DiscardUnknown() { + xxx_messageInfo_Message.DiscardUnknown(m) +} + +var xxx_messageInfo_Message proto.InternalMessageInfo + +func (m *Message) GetMessageId() string { + if m != nil { + return m.MessageId + } + return "" +} + +func (m *Message) GetCreateTime() *timestamp.Timestamp { + if m != nil { + return m.CreateTime + } + return nil +} + +func (m *Message) GetExpireTime() *timestamp.Timestamp { + if m != nil { + return m.ExpireTime + } + return nil +} + +func (m *Message) GetData() map[string]string { + if m != nil { + return m.Data + } + return nil +} + +func init() { + proto.RegisterType((*UpstreamRequest)(nil), "google.firebase.fcm.connection.v1alpha1.UpstreamRequest") + proto.RegisterType((*DownstreamResponse)(nil), "google.firebase.fcm.connection.v1alpha1.DownstreamResponse") + proto.RegisterType((*Ack)(nil), "google.firebase.fcm.connection.v1alpha1.Ack") + proto.RegisterType((*Message)(nil), "google.firebase.fcm.connection.v1alpha1.Message") + proto.RegisterMapType((map[string]string)(nil), "google.firebase.fcm.connection.v1alpha1.Message.DataEntry") +} + +// Reference imports to suppress errors if they are not otherwise used. +var _ context.Context +var _ grpc.ClientConn + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the grpc package it is being compiled against. +const _ = grpc.SupportPackageIsVersion4 + +// ConnectionApiClient is the client API for ConnectionApi service. +// +// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://godoc.org/google.golang.org/grpc#ClientConn.NewStream. +type ConnectionApiClient interface { + // Creates a streaming connection with FCM to send messages and their + // respective ACKs. + // + // The client credentials need to be passed in the [gRPC + // Metadata](https://grpc.io/docs/guides/concepts.html#metadata). The Format + // of the header is: + // Key: "authorization" + // Value: "Checkin [client_id:secret]" + // + // + // The project's API key also needs to be sent to authorize the project. + // That can be set in the X-Goog-Api-Key Metadata header. + Connect(ctx context.Context, opts ...grpc.CallOption) (ConnectionApi_ConnectClient, error) +} + +type connectionApiClient struct { + cc *grpc.ClientConn +} + +func NewConnectionApiClient(cc *grpc.ClientConn) ConnectionApiClient { + return &connectionApiClient{cc} +} + +func (c *connectionApiClient) Connect(ctx context.Context, opts ...grpc.CallOption) (ConnectionApi_ConnectClient, error) { + stream, err := c.cc.NewStream(ctx, &_ConnectionApi_serviceDesc.Streams[0], "/google.firebase.fcm.connection.v1alpha1.ConnectionApi/Connect", opts...) + if err != nil { + return nil, err + } + x := &connectionApiConnectClient{stream} + return x, nil +} + +type ConnectionApi_ConnectClient interface { + Send(*UpstreamRequest) error + Recv() (*DownstreamResponse, error) + grpc.ClientStream +} + +type connectionApiConnectClient struct { + grpc.ClientStream +} + +func (x *connectionApiConnectClient) Send(m *UpstreamRequest) error { + return x.ClientStream.SendMsg(m) +} + +func (x *connectionApiConnectClient) Recv() (*DownstreamResponse, error) { + m := new(DownstreamResponse) + if err := x.ClientStream.RecvMsg(m); err != nil { + return nil, err + } + return m, nil +} + +// ConnectionApiServer is the server API for ConnectionApi service. +type ConnectionApiServer interface { + // Creates a streaming connection with FCM to send messages and their + // respective ACKs. + // + // The client credentials need to be passed in the [gRPC + // Metadata](https://grpc.io/docs/guides/concepts.html#metadata). The Format + // of the header is: + // Key: "authorization" + // Value: "Checkin [client_id:secret]" + // + // + // The project's API key also needs to be sent to authorize the project. + // That can be set in the X-Goog-Api-Key Metadata header. + Connect(ConnectionApi_ConnectServer) error +} + +func RegisterConnectionApiServer(s *grpc.Server, srv ConnectionApiServer) { + s.RegisterService(&_ConnectionApi_serviceDesc, srv) +} + +func _ConnectionApi_Connect_Handler(srv interface{}, stream grpc.ServerStream) error { + return srv.(ConnectionApiServer).Connect(&connectionApiConnectServer{stream}) +} + +type ConnectionApi_ConnectServer interface { + Send(*DownstreamResponse) error + Recv() (*UpstreamRequest, error) + grpc.ServerStream +} + +type connectionApiConnectServer struct { + grpc.ServerStream +} + +func (x *connectionApiConnectServer) Send(m *DownstreamResponse) error { + return x.ServerStream.SendMsg(m) +} + +func (x *connectionApiConnectServer) Recv() (*UpstreamRequest, error) { + m := new(UpstreamRequest) + if err := x.ServerStream.RecvMsg(m); err != nil { + return nil, err + } + return m, nil +} + +var _ConnectionApi_serviceDesc = grpc.ServiceDesc{ + ServiceName: "google.firebase.fcm.connection.v1alpha1.ConnectionApi", + HandlerType: (*ConnectionApiServer)(nil), + Methods: []grpc.MethodDesc{}, + Streams: []grpc.StreamDesc{ + { + StreamName: "Connect", + Handler: _ConnectionApi_Connect_Handler, + ServerStreams: true, + ClientStreams: true, + }, + }, + Metadata: "google/firebase/fcm/connection/v1alpha1/connection_api.proto", +} + +func init() { + proto.RegisterFile("google/firebase/fcm/connection/v1alpha1/connection_api.proto", fileDescriptor_connection_api_678fe704a2040da8) +} + +var fileDescriptor_connection_api_678fe704a2040da8 = []byte{ + // 453 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x94, 0x93, 0xc1, 0x6e, 0x13, 0x31, + 0x10, 0x86, 0xb3, 0xd9, 0x42, 0x94, 0x09, 0xa5, 0xc8, 0xe2, 0x10, 0xad, 0x40, 0x54, 0x11, 0x12, + 0x91, 0x40, 0xde, 0x36, 0x1c, 0xa8, 0x1a, 0x0e, 0x24, 0x14, 0xa9, 0x48, 0x80, 0x60, 0x05, 0x17, + 0x2e, 0xd1, 0xc4, 0x71, 0x16, 0x2b, 0x59, 0xdb, 0xd8, 0x4e, 0x21, 0x57, 0x0e, 0x3c, 0x03, 0xef, + 0xc0, 0x4b, 0xa2, 0x5d, 0x7b, 0x5b, 0x04, 0x87, 0x6c, 0x6f, 0xf1, 0xcc, 0xff, 0xfd, 0xff, 0x78, + 0xe2, 0x85, 0xe7, 0xb9, 0x52, 0xf9, 0x9a, 0xa7, 0x4b, 0x61, 0xf8, 0x1c, 0x2d, 0x4f, 0x97, 0xac, + 0x48, 0x99, 0x92, 0x92, 0x33, 0x27, 0x94, 0x4c, 0x2f, 0x8e, 0x71, 0xad, 0xbf, 0xe0, 0xf1, 0x5f, + 0xb5, 0x19, 0x6a, 0x41, 0xb5, 0x51, 0x4e, 0x91, 0x47, 0x9e, 0xa6, 0x35, 0x4d, 0x97, 0xac, 0xa0, + 0x57, 0x4a, 0x5a, 0xd3, 0xc9, 0xbd, 0x10, 0x83, 0x5a, 0xa4, 0x28, 0xa5, 0x72, 0x58, 0xf6, 0xad, + 0xb7, 0x49, 0x1e, 0x84, 0x6e, 0x75, 0x9a, 0x6f, 0x96, 0xa9, 0x13, 0x05, 0xb7, 0x0e, 0x0b, 0xed, + 0x05, 0x03, 0x06, 0x07, 0x9f, 0xb4, 0x75, 0x86, 0x63, 0x91, 0xf1, 0xaf, 0x1b, 0x6e, 0x1d, 0x79, + 0x01, 0x31, 0xb2, 0x55, 0x3f, 0x3a, 0x8c, 0x86, 0xbd, 0xd1, 0x13, 0xda, 0x70, 0x10, 0x3a, 0x61, + 0xab, 0xf3, 0x56, 0x56, 0xa2, 0xd3, 0xdb, 0x70, 0xcb, 0x78, 0xb3, 0x99, 0xdb, 0x6a, 0x3e, 0xb0, + 0x40, 0xce, 0xd4, 0x37, 0x59, 0xc7, 0x58, 0xad, 0xa4, 0xe5, 0xe4, 0x0d, 0x74, 0x0a, 0x6e, 0x2d, + 0xe6, 0x3c, 0x64, 0x1d, 0x35, 0xce, 0x7a, 0xeb, 0xb9, 0xf3, 0x56, 0x56, 0x5b, 0x4c, 0x0f, 0x60, + 0xdf, 0x04, 0x67, 0x1f, 0xfa, 0x10, 0xe2, 0x09, 0x5b, 0x91, 0xfb, 0x00, 0x41, 0x32, 0x13, 0x8b, + 0x2a, 0xa8, 0x9b, 0x75, 0x43, 0xe5, 0xf5, 0x62, 0xf0, 0xbb, 0x0d, 0x9d, 0xe0, 0xb6, 0x43, 0x4a, + 0xc6, 0xd0, 0x63, 0x86, 0xa3, 0xe3, 0xb3, 0x72, 0x89, 0xfd, 0x76, 0x35, 0x73, 0x52, 0xcf, 0x5c, + 0x6f, 0x98, 0x7e, 0xac, 0x37, 0x9c, 0x81, 0x97, 0x97, 0x85, 0x12, 0xe6, 0xdf, 0xb5, 0x30, 0x01, + 0x8e, 0x77, 0xc3, 0x5e, 0x5e, 0xc1, 0xef, 0x60, 0x6f, 0x81, 0x0e, 0xfb, 0x7b, 0x87, 0xf1, 0xb0, + 0x37, 0x3a, 0xbd, 0xee, 0x9a, 0xe8, 0x19, 0x3a, 0x7c, 0x25, 0x9d, 0xd9, 0x66, 0x95, 0x4f, 0xf2, + 0x0c, 0xba, 0x97, 0x25, 0x72, 0x07, 0xe2, 0x15, 0xdf, 0x86, 0xeb, 0x96, 0x3f, 0xc9, 0x5d, 0xb8, + 0x71, 0x81, 0xeb, 0x8d, 0xbf, 0x62, 0x37, 0xf3, 0x87, 0xd3, 0xf6, 0x49, 0x34, 0xfa, 0x15, 0xc1, + 0xfe, 0xcb, 0xcb, 0xa0, 0x89, 0x16, 0xe4, 0x67, 0x04, 0x9d, 0x50, 0x21, 0x27, 0x8d, 0x07, 0xfb, + 0xe7, 0xc9, 0x25, 0xe3, 0xc6, 0xe4, 0xff, 0xef, 0x68, 0xd0, 0x1a, 0x46, 0x47, 0xd1, 0xf4, 0x47, + 0x04, 0x8f, 0x99, 0x2a, 0x9a, 0x1a, 0xbd, 0x8f, 0x3e, 0x7f, 0x08, 0xd2, 0x5c, 0xad, 0x51, 0xe6, + 0x54, 0x99, 0x3c, 0xcd, 0xb9, 0xac, 0xfe, 0x8a, 0xd4, 0xb7, 0x50, 0x0b, 0xbb, 0xf3, 0xfb, 0x1d, + 0x5f, 0xd5, 0xe6, 0x37, 0x2b, 0xfa, 0xe9, 0x9f, 0x00, 0x00, 0x00, 0xff, 0xff, 0xe4, 0x30, 0x40, + 0x1a, 0xfc, 0x03, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/firestore/admin/v1/field.pb.go b/vendor/google.golang.org/genproto/googleapis/firestore/admin/v1/field.pb.go new file mode 100644 index 0000000..1a47eae --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/firestore/admin/v1/field.pb.go @@ -0,0 +1,213 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/firestore/admin/v1/field.proto + +package admin // import "google.golang.org/genproto/googleapis/firestore/admin/v1" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Represents a single field in the database. +// +// Fields are grouped by their "Collection Group", which represent all +// collections in the database with the same id. +type Field struct { + // A field name of the form + // `projects/{project_id}/databases/{database_id}/collectionGroups/{collection_id}/fields/{field_path}` + // + // A field path may be a simple field name, e.g. `address` or a path to fields + // within map_value , e.g. `address.city`, + // or a special field path. The only valid special field is `*`, which + // represents any field. + // + // Field paths may be quoted using ` (backtick). The only character that needs + // to be escaped within a quoted field path is the backtick character itself, + // escaped using a backslash. Special characters in field paths that + // must be quoted include: `*`, `.`, + // ``` (backtick), `[`, `]`, as well as any ascii symbolic characters. + // + // Examples: + // (Note: Comments here are written in markdown syntax, so there is an + // additional layer of backticks to represent a code block) + // `\`address.city\`` represents a field named `address.city`, not the map key + // `city` in the field `address`. + // `\`*\`` represents a field named `*`, not any field. + // + // A special `Field` contains the default indexing settings for all fields. + // This field's resource name is: + // `projects/{project_id}/databases/{database_id}/collectionGroups/__default__/fields/*` + // Indexes defined on this `Field` will be applied to all fields which do not + // have their own `Field` index configuration. + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // The index configuration for this field. If unset, field indexing will + // revert to the configuration defined by the `ancestor_field`. To + // explicitly remove all indexes for this field, specify an index config + // with an empty list of indexes. + IndexConfig *Field_IndexConfig `protobuf:"bytes,2,opt,name=index_config,json=indexConfig,proto3" json:"index_config,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Field) Reset() { *m = Field{} } +func (m *Field) String() string { return proto.CompactTextString(m) } +func (*Field) ProtoMessage() {} +func (*Field) Descriptor() ([]byte, []int) { + return fileDescriptor_field_f3c3d86fef574b48, []int{0} +} +func (m *Field) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Field.Unmarshal(m, b) +} +func (m *Field) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Field.Marshal(b, m, deterministic) +} +func (dst *Field) XXX_Merge(src proto.Message) { + xxx_messageInfo_Field.Merge(dst, src) +} +func (m *Field) XXX_Size() int { + return xxx_messageInfo_Field.Size(m) +} +func (m *Field) XXX_DiscardUnknown() { + xxx_messageInfo_Field.DiscardUnknown(m) +} + +var xxx_messageInfo_Field proto.InternalMessageInfo + +func (m *Field) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *Field) GetIndexConfig() *Field_IndexConfig { + if m != nil { + return m.IndexConfig + } + return nil +} + +// The index configuration for this field. +type Field_IndexConfig struct { + // The indexes supported for this field. + Indexes []*Index `protobuf:"bytes,1,rep,name=indexes,proto3" json:"indexes,omitempty"` + // Output only. + // When true, the `Field`'s index configuration is set from the + // configuration specified by the `ancestor_field`. + // When false, the `Field`'s index configuration is defined explicitly. + UsesAncestorConfig bool `protobuf:"varint,2,opt,name=uses_ancestor_config,json=usesAncestorConfig,proto3" json:"uses_ancestor_config,omitempty"` + // Output only. + // Specifies the resource name of the `Field` from which this field's + // index configuration is set (when `uses_ancestor_config` is true), + // or from which it *would* be set if this field had no index configuration + // (when `uses_ancestor_config` is false). + AncestorField string `protobuf:"bytes,3,opt,name=ancestor_field,json=ancestorField,proto3" json:"ancestor_field,omitempty"` + // Output only + // When true, the `Field`'s index configuration is in the process of being + // reverted. Once complete, the index config will transition to the same + // state as the field specified by `ancestor_field`, at which point + // `uses_ancestor_config` will be `true` and `reverting` will be `false`. + Reverting bool `protobuf:"varint,4,opt,name=reverting,proto3" json:"reverting,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Field_IndexConfig) Reset() { *m = Field_IndexConfig{} } +func (m *Field_IndexConfig) String() string { return proto.CompactTextString(m) } +func (*Field_IndexConfig) ProtoMessage() {} +func (*Field_IndexConfig) Descriptor() ([]byte, []int) { + return fileDescriptor_field_f3c3d86fef574b48, []int{0, 0} +} +func (m *Field_IndexConfig) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Field_IndexConfig.Unmarshal(m, b) +} +func (m *Field_IndexConfig) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Field_IndexConfig.Marshal(b, m, deterministic) +} +func (dst *Field_IndexConfig) XXX_Merge(src proto.Message) { + xxx_messageInfo_Field_IndexConfig.Merge(dst, src) +} +func (m *Field_IndexConfig) XXX_Size() int { + return xxx_messageInfo_Field_IndexConfig.Size(m) +} +func (m *Field_IndexConfig) XXX_DiscardUnknown() { + xxx_messageInfo_Field_IndexConfig.DiscardUnknown(m) +} + +var xxx_messageInfo_Field_IndexConfig proto.InternalMessageInfo + +func (m *Field_IndexConfig) GetIndexes() []*Index { + if m != nil { + return m.Indexes + } + return nil +} + +func (m *Field_IndexConfig) GetUsesAncestorConfig() bool { + if m != nil { + return m.UsesAncestorConfig + } + return false +} + +func (m *Field_IndexConfig) GetAncestorField() string { + if m != nil { + return m.AncestorField + } + return "" +} + +func (m *Field_IndexConfig) GetReverting() bool { + if m != nil { + return m.Reverting + } + return false +} + +func init() { + proto.RegisterType((*Field)(nil), "google.firestore.admin.v1.Field") + proto.RegisterType((*Field_IndexConfig)(nil), "google.firestore.admin.v1.Field.IndexConfig") +} + +func init() { + proto.RegisterFile("google/firestore/admin/v1/field.proto", fileDescriptor_field_f3c3d86fef574b48) +} + +var fileDescriptor_field_f3c3d86fef574b48 = []byte{ + // 352 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x7c, 0x91, 0x4f, 0x4b, 0xc3, 0x30, + 0x18, 0xc6, 0x49, 0x37, 0xff, 0x2c, 0x55, 0x0f, 0xc1, 0x43, 0x1d, 0x13, 0x8b, 0x30, 0xd8, 0x41, + 0x12, 0x3b, 0x6f, 0x0a, 0xc2, 0x36, 0xd8, 0xf0, 0xe4, 0xa8, 0xb0, 0x83, 0x14, 0x46, 0x5c, 0xb3, + 0x10, 0xe8, 0x92, 0xd1, 0x76, 0xc3, 0xcf, 0xe3, 0xd1, 0x8b, 0x57, 0x8f, 0x9e, 0xfd, 0x54, 0xd2, + 0xb7, 0x7f, 0xb6, 0x83, 0xdb, 0x2d, 0xcd, 0xf3, 0x7b, 0x9f, 0xe7, 0xed, 0x13, 0xdc, 0x96, 0xc6, + 0xc8, 0x48, 0xb0, 0xb9, 0x8a, 0x45, 0x92, 0x9a, 0x58, 0x30, 0x1e, 0x2e, 0x94, 0x66, 0x6b, 0x8f, + 0xcd, 0x95, 0x88, 0x42, 0xba, 0x8c, 0x4d, 0x6a, 0xc8, 0x45, 0x8e, 0xd1, 0x0a, 0xa3, 0x80, 0xd1, + 0xb5, 0xd7, 0xdc, 0xe3, 0xa0, 0x74, 0x28, 0xde, 0x73, 0x87, 0x66, 0xab, 0xc0, 0xf8, 0x52, 0x31, + 0xae, 0xb5, 0x49, 0x79, 0xaa, 0x8c, 0x4e, 0x72, 0xf5, 0xfa, 0xcb, 0xc2, 0x07, 0xc3, 0x2c, 0x8f, + 0x10, 0x5c, 0xd7, 0x7c, 0x21, 0x1c, 0xe4, 0xa2, 0x4e, 0xc3, 0x87, 0x33, 0x79, 0xc6, 0x27, 0x60, + 0x35, 0x9d, 0x19, 0x3d, 0x57, 0xd2, 0xb1, 0x5c, 0xd4, 0xb1, 0xbb, 0x37, 0x74, 0xe7, 0x52, 0x14, + 0xbc, 0xe8, 0x53, 0x36, 0x34, 0x80, 0x19, 0xdf, 0x56, 0x9b, 0x8f, 0xe6, 0x0f, 0xc2, 0xf6, 0x96, + 0x48, 0xee, 0xf1, 0x11, 0xc8, 0x22, 0x71, 0x90, 0x5b, 0xeb, 0xd8, 0x5d, 0x77, 0x8f, 0x37, 0x0c, + 0xfa, 0xe5, 0x00, 0xb9, 0xc5, 0xe7, 0xab, 0x44, 0x24, 0x53, 0xae, 0x67, 0x40, 0x6e, 0x2f, 0x79, + 0xec, 0x93, 0x4c, 0xeb, 0x15, 0x52, 0x91, 0xd6, 0xc6, 0x67, 0x15, 0x0c, 0x25, 0x3b, 0x35, 0xf8, + 0xd9, 0xd3, 0xf2, 0x36, 0x6f, 0xa2, 0x85, 0x1b, 0xb1, 0x58, 0x8b, 0x38, 0x55, 0x5a, 0x3a, 0x75, + 0x70, 0xdb, 0x5c, 0xf4, 0xbf, 0x11, 0xbe, 0x9c, 0x99, 0xc5, 0xee, 0x3d, 0xfb, 0x18, 0x6c, 0xc6, + 0x59, 0xbf, 0x63, 0xf4, 0xfa, 0x58, 0x80, 0xd2, 0x44, 0x5c, 0x4b, 0x6a, 0x62, 0xc9, 0xa4, 0xd0, + 0xd0, 0x3e, 0xcb, 0x25, 0xbe, 0x54, 0xc9, 0x3f, 0xaf, 0xf8, 0x00, 0x87, 0x0f, 0xab, 0x3e, 0x1a, + 0x0c, 0x5f, 0x3e, 0xad, 0xab, 0x51, 0xee, 0x33, 0x88, 0xcc, 0x2a, 0xa4, 0xc3, 0x2a, 0xb6, 0x07, + 0xb1, 0x13, 0xef, 0xb7, 0x24, 0x02, 0x20, 0x82, 0x8a, 0x08, 0x80, 0x08, 0x26, 0xde, 0xdb, 0x21, + 0xa4, 0xde, 0xfd, 0x05, 0x00, 0x00, 0xff, 0xff, 0x32, 0xcd, 0x52, 0xcd, 0x7c, 0x02, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/firestore/admin/v1/firestore_admin.pb.go b/vendor/google.golang.org/genproto/googleapis/firestore/admin/v1/firestore_admin.pb.go new file mode 100644 index 0000000..e96913f --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/firestore/admin/v1/firestore_admin.pb.go @@ -0,0 +1,1120 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/firestore/admin/v1/firestore_admin.proto + +package admin // import "google.golang.org/genproto/googleapis/firestore/admin/v1" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import empty "github.com/golang/protobuf/ptypes/empty" +import _ "google.golang.org/genproto/googleapis/api/annotations" +import longrunning "google.golang.org/genproto/googleapis/longrunning" +import field_mask "google.golang.org/genproto/protobuf/field_mask" + +import ( + context "golang.org/x/net/context" + grpc "google.golang.org/grpc" +) + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// The request for [FirestoreAdmin.CreateIndex][google.firestore.admin.v1.FirestoreAdmin.CreateIndex]. +type CreateIndexRequest struct { + // A parent name of the form + // `projects/{project_id}/databases/{database_id}/collectionGroups/{collection_id}` + Parent string `protobuf:"bytes,1,opt,name=parent,proto3" json:"parent,omitempty"` + // The composite index to create. + Index *Index `protobuf:"bytes,2,opt,name=index,proto3" json:"index,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *CreateIndexRequest) Reset() { *m = CreateIndexRequest{} } +func (m *CreateIndexRequest) String() string { return proto.CompactTextString(m) } +func (*CreateIndexRequest) ProtoMessage() {} +func (*CreateIndexRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_firestore_admin_5cf2286451a3a340, []int{0} +} +func (m *CreateIndexRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_CreateIndexRequest.Unmarshal(m, b) +} +func (m *CreateIndexRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_CreateIndexRequest.Marshal(b, m, deterministic) +} +func (dst *CreateIndexRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_CreateIndexRequest.Merge(dst, src) +} +func (m *CreateIndexRequest) XXX_Size() int { + return xxx_messageInfo_CreateIndexRequest.Size(m) +} +func (m *CreateIndexRequest) XXX_DiscardUnknown() { + xxx_messageInfo_CreateIndexRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_CreateIndexRequest proto.InternalMessageInfo + +func (m *CreateIndexRequest) GetParent() string { + if m != nil { + return m.Parent + } + return "" +} + +func (m *CreateIndexRequest) GetIndex() *Index { + if m != nil { + return m.Index + } + return nil +} + +// The request for [FirestoreAdmin.ListIndexes][google.firestore.admin.v1.FirestoreAdmin.ListIndexes]. +type ListIndexesRequest struct { + // A parent name of the form + // `projects/{project_id}/databases/{database_id}/collectionGroups/{collection_id}` + Parent string `protobuf:"bytes,1,opt,name=parent,proto3" json:"parent,omitempty"` + // The filter to apply to list results. + Filter string `protobuf:"bytes,2,opt,name=filter,proto3" json:"filter,omitempty"` + // The number of results to return. + PageSize int32 `protobuf:"varint,3,opt,name=page_size,json=pageSize,proto3" json:"page_size,omitempty"` + // A page token, returned from a previous call to + // [FirestoreAdmin.ListIndexes][google.firestore.admin.v1.FirestoreAdmin.ListIndexes], that may be used to get the next + // page of results. + PageToken string `protobuf:"bytes,4,opt,name=page_token,json=pageToken,proto3" json:"page_token,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ListIndexesRequest) Reset() { *m = ListIndexesRequest{} } +func (m *ListIndexesRequest) String() string { return proto.CompactTextString(m) } +func (*ListIndexesRequest) ProtoMessage() {} +func (*ListIndexesRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_firestore_admin_5cf2286451a3a340, []int{1} +} +func (m *ListIndexesRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ListIndexesRequest.Unmarshal(m, b) +} +func (m *ListIndexesRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ListIndexesRequest.Marshal(b, m, deterministic) +} +func (dst *ListIndexesRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_ListIndexesRequest.Merge(dst, src) +} +func (m *ListIndexesRequest) XXX_Size() int { + return xxx_messageInfo_ListIndexesRequest.Size(m) +} +func (m *ListIndexesRequest) XXX_DiscardUnknown() { + xxx_messageInfo_ListIndexesRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_ListIndexesRequest proto.InternalMessageInfo + +func (m *ListIndexesRequest) GetParent() string { + if m != nil { + return m.Parent + } + return "" +} + +func (m *ListIndexesRequest) GetFilter() string { + if m != nil { + return m.Filter + } + return "" +} + +func (m *ListIndexesRequest) GetPageSize() int32 { + if m != nil { + return m.PageSize + } + return 0 +} + +func (m *ListIndexesRequest) GetPageToken() string { + if m != nil { + return m.PageToken + } + return "" +} + +// The response for [FirestoreAdmin.ListIndexes][google.firestore.admin.v1.FirestoreAdmin.ListIndexes]. +type ListIndexesResponse struct { + // The requested indexes. + Indexes []*Index `protobuf:"bytes,1,rep,name=indexes,proto3" json:"indexes,omitempty"` + // A page token that may be used to request another page of results. If blank, + // this is the last page. + NextPageToken string `protobuf:"bytes,2,opt,name=next_page_token,json=nextPageToken,proto3" json:"next_page_token,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ListIndexesResponse) Reset() { *m = ListIndexesResponse{} } +func (m *ListIndexesResponse) String() string { return proto.CompactTextString(m) } +func (*ListIndexesResponse) ProtoMessage() {} +func (*ListIndexesResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_firestore_admin_5cf2286451a3a340, []int{2} +} +func (m *ListIndexesResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ListIndexesResponse.Unmarshal(m, b) +} +func (m *ListIndexesResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ListIndexesResponse.Marshal(b, m, deterministic) +} +func (dst *ListIndexesResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_ListIndexesResponse.Merge(dst, src) +} +func (m *ListIndexesResponse) XXX_Size() int { + return xxx_messageInfo_ListIndexesResponse.Size(m) +} +func (m *ListIndexesResponse) XXX_DiscardUnknown() { + xxx_messageInfo_ListIndexesResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_ListIndexesResponse proto.InternalMessageInfo + +func (m *ListIndexesResponse) GetIndexes() []*Index { + if m != nil { + return m.Indexes + } + return nil +} + +func (m *ListIndexesResponse) GetNextPageToken() string { + if m != nil { + return m.NextPageToken + } + return "" +} + +// The request for [FirestoreAdmin.GetIndex][google.firestore.admin.v1.FirestoreAdmin.GetIndex]. +type GetIndexRequest struct { + // A name of the form + // `projects/{project_id}/databases/{database_id}/collectionGroups/{collection_id}/indexes/{index_id}` + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GetIndexRequest) Reset() { *m = GetIndexRequest{} } +func (m *GetIndexRequest) String() string { return proto.CompactTextString(m) } +func (*GetIndexRequest) ProtoMessage() {} +func (*GetIndexRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_firestore_admin_5cf2286451a3a340, []int{3} +} +func (m *GetIndexRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GetIndexRequest.Unmarshal(m, b) +} +func (m *GetIndexRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GetIndexRequest.Marshal(b, m, deterministic) +} +func (dst *GetIndexRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_GetIndexRequest.Merge(dst, src) +} +func (m *GetIndexRequest) XXX_Size() int { + return xxx_messageInfo_GetIndexRequest.Size(m) +} +func (m *GetIndexRequest) XXX_DiscardUnknown() { + xxx_messageInfo_GetIndexRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_GetIndexRequest proto.InternalMessageInfo + +func (m *GetIndexRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +// The request for [FirestoreAdmin.DeleteIndex][google.firestore.admin.v1.FirestoreAdmin.DeleteIndex]. +type DeleteIndexRequest struct { + // A name of the form + // `projects/{project_id}/databases/{database_id}/collectionGroups/{collection_id}/indexes/{index_id}` + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *DeleteIndexRequest) Reset() { *m = DeleteIndexRequest{} } +func (m *DeleteIndexRequest) String() string { return proto.CompactTextString(m) } +func (*DeleteIndexRequest) ProtoMessage() {} +func (*DeleteIndexRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_firestore_admin_5cf2286451a3a340, []int{4} +} +func (m *DeleteIndexRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_DeleteIndexRequest.Unmarshal(m, b) +} +func (m *DeleteIndexRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_DeleteIndexRequest.Marshal(b, m, deterministic) +} +func (dst *DeleteIndexRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_DeleteIndexRequest.Merge(dst, src) +} +func (m *DeleteIndexRequest) XXX_Size() int { + return xxx_messageInfo_DeleteIndexRequest.Size(m) +} +func (m *DeleteIndexRequest) XXX_DiscardUnknown() { + xxx_messageInfo_DeleteIndexRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_DeleteIndexRequest proto.InternalMessageInfo + +func (m *DeleteIndexRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +// The request for [FirestoreAdmin.UpdateField][google.firestore.admin.v1.FirestoreAdmin.UpdateField]. +type UpdateFieldRequest struct { + // The field to be updated. + Field *Field `protobuf:"bytes,1,opt,name=field,proto3" json:"field,omitempty"` + // A mask, relative to the field. If specified, only configuration specified + // by this field_mask will be updated in the field. + UpdateMask *field_mask.FieldMask `protobuf:"bytes,2,opt,name=update_mask,json=updateMask,proto3" json:"update_mask,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *UpdateFieldRequest) Reset() { *m = UpdateFieldRequest{} } +func (m *UpdateFieldRequest) String() string { return proto.CompactTextString(m) } +func (*UpdateFieldRequest) ProtoMessage() {} +func (*UpdateFieldRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_firestore_admin_5cf2286451a3a340, []int{5} +} +func (m *UpdateFieldRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_UpdateFieldRequest.Unmarshal(m, b) +} +func (m *UpdateFieldRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_UpdateFieldRequest.Marshal(b, m, deterministic) +} +func (dst *UpdateFieldRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_UpdateFieldRequest.Merge(dst, src) +} +func (m *UpdateFieldRequest) XXX_Size() int { + return xxx_messageInfo_UpdateFieldRequest.Size(m) +} +func (m *UpdateFieldRequest) XXX_DiscardUnknown() { + xxx_messageInfo_UpdateFieldRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_UpdateFieldRequest proto.InternalMessageInfo + +func (m *UpdateFieldRequest) GetField() *Field { + if m != nil { + return m.Field + } + return nil +} + +func (m *UpdateFieldRequest) GetUpdateMask() *field_mask.FieldMask { + if m != nil { + return m.UpdateMask + } + return nil +} + +// The request for [FirestoreAdmin.GetField][google.firestore.admin.v1.FirestoreAdmin.GetField]. +type GetFieldRequest struct { + // A name of the form + // `projects/{project_id}/databases/{database_id}/collectionGroups/{collection_id}/fields/{field_id}` + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GetFieldRequest) Reset() { *m = GetFieldRequest{} } +func (m *GetFieldRequest) String() string { return proto.CompactTextString(m) } +func (*GetFieldRequest) ProtoMessage() {} +func (*GetFieldRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_firestore_admin_5cf2286451a3a340, []int{6} +} +func (m *GetFieldRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GetFieldRequest.Unmarshal(m, b) +} +func (m *GetFieldRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GetFieldRequest.Marshal(b, m, deterministic) +} +func (dst *GetFieldRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_GetFieldRequest.Merge(dst, src) +} +func (m *GetFieldRequest) XXX_Size() int { + return xxx_messageInfo_GetFieldRequest.Size(m) +} +func (m *GetFieldRequest) XXX_DiscardUnknown() { + xxx_messageInfo_GetFieldRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_GetFieldRequest proto.InternalMessageInfo + +func (m *GetFieldRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +// The request for [FirestoreAdmin.ListFields][google.firestore.admin.v1.FirestoreAdmin.ListFields]. +type ListFieldsRequest struct { + // A parent name of the form + // `projects/{project_id}/databases/{database_id}/collectionGroups/{collection_id}` + Parent string `protobuf:"bytes,1,opt,name=parent,proto3" json:"parent,omitempty"` + // The filter to apply to list results. Currently, + // [FirestoreAdmin.ListFields][google.firestore.admin.v1.FirestoreAdmin.ListFields] only supports listing fields + // that have been explicitly overridden. To issue this query, call + // [FirestoreAdmin.ListFields][google.firestore.admin.v1.FirestoreAdmin.ListFields] with the filter set to + // `indexConfig.usesAncestorConfig:false`. + Filter string `protobuf:"bytes,2,opt,name=filter,proto3" json:"filter,omitempty"` + // The number of results to return. + PageSize int32 `protobuf:"varint,3,opt,name=page_size,json=pageSize,proto3" json:"page_size,omitempty"` + // A page token, returned from a previous call to + // [FirestoreAdmin.ListFields][google.firestore.admin.v1.FirestoreAdmin.ListFields], that may be used to get the next + // page of results. + PageToken string `protobuf:"bytes,4,opt,name=page_token,json=pageToken,proto3" json:"page_token,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ListFieldsRequest) Reset() { *m = ListFieldsRequest{} } +func (m *ListFieldsRequest) String() string { return proto.CompactTextString(m) } +func (*ListFieldsRequest) ProtoMessage() {} +func (*ListFieldsRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_firestore_admin_5cf2286451a3a340, []int{7} +} +func (m *ListFieldsRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ListFieldsRequest.Unmarshal(m, b) +} +func (m *ListFieldsRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ListFieldsRequest.Marshal(b, m, deterministic) +} +func (dst *ListFieldsRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_ListFieldsRequest.Merge(dst, src) +} +func (m *ListFieldsRequest) XXX_Size() int { + return xxx_messageInfo_ListFieldsRequest.Size(m) +} +func (m *ListFieldsRequest) XXX_DiscardUnknown() { + xxx_messageInfo_ListFieldsRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_ListFieldsRequest proto.InternalMessageInfo + +func (m *ListFieldsRequest) GetParent() string { + if m != nil { + return m.Parent + } + return "" +} + +func (m *ListFieldsRequest) GetFilter() string { + if m != nil { + return m.Filter + } + return "" +} + +func (m *ListFieldsRequest) GetPageSize() int32 { + if m != nil { + return m.PageSize + } + return 0 +} + +func (m *ListFieldsRequest) GetPageToken() string { + if m != nil { + return m.PageToken + } + return "" +} + +// The response for [FirestoreAdmin.ListFields][google.firestore.admin.v1.FirestoreAdmin.ListFields]. +type ListFieldsResponse struct { + // The requested fields. + Fields []*Field `protobuf:"bytes,1,rep,name=fields,proto3" json:"fields,omitempty"` + // A page token that may be used to request another page of results. If blank, + // this is the last page. + NextPageToken string `protobuf:"bytes,2,opt,name=next_page_token,json=nextPageToken,proto3" json:"next_page_token,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ListFieldsResponse) Reset() { *m = ListFieldsResponse{} } +func (m *ListFieldsResponse) String() string { return proto.CompactTextString(m) } +func (*ListFieldsResponse) ProtoMessage() {} +func (*ListFieldsResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_firestore_admin_5cf2286451a3a340, []int{8} +} +func (m *ListFieldsResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ListFieldsResponse.Unmarshal(m, b) +} +func (m *ListFieldsResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ListFieldsResponse.Marshal(b, m, deterministic) +} +func (dst *ListFieldsResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_ListFieldsResponse.Merge(dst, src) +} +func (m *ListFieldsResponse) XXX_Size() int { + return xxx_messageInfo_ListFieldsResponse.Size(m) +} +func (m *ListFieldsResponse) XXX_DiscardUnknown() { + xxx_messageInfo_ListFieldsResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_ListFieldsResponse proto.InternalMessageInfo + +func (m *ListFieldsResponse) GetFields() []*Field { + if m != nil { + return m.Fields + } + return nil +} + +func (m *ListFieldsResponse) GetNextPageToken() string { + if m != nil { + return m.NextPageToken + } + return "" +} + +// The request for [FirestoreAdmin.ExportDocuments][google.firestore.admin.v1.FirestoreAdmin.ExportDocuments]. +type ExportDocumentsRequest struct { + // Database to export. Should be of the form: + // `projects/{project_id}/databases/{database_id}`. + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // Which collection ids to export. Unspecified means all collections. + CollectionIds []string `protobuf:"bytes,2,rep,name=collection_ids,json=collectionIds,proto3" json:"collection_ids,omitempty"` + // The output URI. Currently only supports Google Cloud Storage URIs of the + // form: `gs://BUCKET_NAME[/NAMESPACE_PATH]`, where `BUCKET_NAME` is the name + // of the Google Cloud Storage bucket and `NAMESPACE_PATH` is an optional + // Google Cloud Storage namespace path. When + // choosing a name, be sure to consider Google Cloud Storage naming + // guidelines: https://cloud.google.com/storage/docs/naming. + // If the URI is a bucket (without a namespace path), a prefix will be + // generated based on the start time. + OutputUriPrefix string `protobuf:"bytes,3,opt,name=output_uri_prefix,json=outputUriPrefix,proto3" json:"output_uri_prefix,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ExportDocumentsRequest) Reset() { *m = ExportDocumentsRequest{} } +func (m *ExportDocumentsRequest) String() string { return proto.CompactTextString(m) } +func (*ExportDocumentsRequest) ProtoMessage() {} +func (*ExportDocumentsRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_firestore_admin_5cf2286451a3a340, []int{9} +} +func (m *ExportDocumentsRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ExportDocumentsRequest.Unmarshal(m, b) +} +func (m *ExportDocumentsRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ExportDocumentsRequest.Marshal(b, m, deterministic) +} +func (dst *ExportDocumentsRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_ExportDocumentsRequest.Merge(dst, src) +} +func (m *ExportDocumentsRequest) XXX_Size() int { + return xxx_messageInfo_ExportDocumentsRequest.Size(m) +} +func (m *ExportDocumentsRequest) XXX_DiscardUnknown() { + xxx_messageInfo_ExportDocumentsRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_ExportDocumentsRequest proto.InternalMessageInfo + +func (m *ExportDocumentsRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *ExportDocumentsRequest) GetCollectionIds() []string { + if m != nil { + return m.CollectionIds + } + return nil +} + +func (m *ExportDocumentsRequest) GetOutputUriPrefix() string { + if m != nil { + return m.OutputUriPrefix + } + return "" +} + +// The request for [FirestoreAdmin.ImportDocuments][google.firestore.admin.v1.FirestoreAdmin.ImportDocuments]. +type ImportDocumentsRequest struct { + // Database to import into. Should be of the form: + // `projects/{project_id}/databases/{database_id}`. + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // Which collection ids to import. Unspecified means all collections included + // in the import. + CollectionIds []string `protobuf:"bytes,2,rep,name=collection_ids,json=collectionIds,proto3" json:"collection_ids,omitempty"` + // Location of the exported files. + // This must match the output_uri_prefix of an ExportDocumentsResponse from + // an export that has completed successfully. + // See: + // [google.firestore.admin.v1.ExportDocumentsResponse.output_uri_prefix][google.firestore.admin.v1.ExportDocumentsResponse.output_uri_prefix]. + InputUriPrefix string `protobuf:"bytes,3,opt,name=input_uri_prefix,json=inputUriPrefix,proto3" json:"input_uri_prefix,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ImportDocumentsRequest) Reset() { *m = ImportDocumentsRequest{} } +func (m *ImportDocumentsRequest) String() string { return proto.CompactTextString(m) } +func (*ImportDocumentsRequest) ProtoMessage() {} +func (*ImportDocumentsRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_firestore_admin_5cf2286451a3a340, []int{10} +} +func (m *ImportDocumentsRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ImportDocumentsRequest.Unmarshal(m, b) +} +func (m *ImportDocumentsRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ImportDocumentsRequest.Marshal(b, m, deterministic) +} +func (dst *ImportDocumentsRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_ImportDocumentsRequest.Merge(dst, src) +} +func (m *ImportDocumentsRequest) XXX_Size() int { + return xxx_messageInfo_ImportDocumentsRequest.Size(m) +} +func (m *ImportDocumentsRequest) XXX_DiscardUnknown() { + xxx_messageInfo_ImportDocumentsRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_ImportDocumentsRequest proto.InternalMessageInfo + +func (m *ImportDocumentsRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *ImportDocumentsRequest) GetCollectionIds() []string { + if m != nil { + return m.CollectionIds + } + return nil +} + +func (m *ImportDocumentsRequest) GetInputUriPrefix() string { + if m != nil { + return m.InputUriPrefix + } + return "" +} + +func init() { + proto.RegisterType((*CreateIndexRequest)(nil), "google.firestore.admin.v1.CreateIndexRequest") + proto.RegisterType((*ListIndexesRequest)(nil), "google.firestore.admin.v1.ListIndexesRequest") + proto.RegisterType((*ListIndexesResponse)(nil), "google.firestore.admin.v1.ListIndexesResponse") + proto.RegisterType((*GetIndexRequest)(nil), "google.firestore.admin.v1.GetIndexRequest") + proto.RegisterType((*DeleteIndexRequest)(nil), "google.firestore.admin.v1.DeleteIndexRequest") + proto.RegisterType((*UpdateFieldRequest)(nil), "google.firestore.admin.v1.UpdateFieldRequest") + proto.RegisterType((*GetFieldRequest)(nil), "google.firestore.admin.v1.GetFieldRequest") + proto.RegisterType((*ListFieldsRequest)(nil), "google.firestore.admin.v1.ListFieldsRequest") + proto.RegisterType((*ListFieldsResponse)(nil), "google.firestore.admin.v1.ListFieldsResponse") + proto.RegisterType((*ExportDocumentsRequest)(nil), "google.firestore.admin.v1.ExportDocumentsRequest") + proto.RegisterType((*ImportDocumentsRequest)(nil), "google.firestore.admin.v1.ImportDocumentsRequest") +} + +// Reference imports to suppress errors if they are not otherwise used. +var _ context.Context +var _ grpc.ClientConn + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the grpc package it is being compiled against. +const _ = grpc.SupportPackageIsVersion4 + +// FirestoreAdminClient is the client API for FirestoreAdmin service. +// +// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://godoc.org/google.golang.org/grpc#ClientConn.NewStream. +type FirestoreAdminClient interface { + // Creates a composite index. This returns a [google.longrunning.Operation][google.longrunning.Operation] + // which may be used to track the status of the creation. The metadata for + // the operation will be the type [IndexOperationMetadata][google.firestore.admin.v1.IndexOperationMetadata]. + CreateIndex(ctx context.Context, in *CreateIndexRequest, opts ...grpc.CallOption) (*longrunning.Operation, error) + // Lists composite indexes. + ListIndexes(ctx context.Context, in *ListIndexesRequest, opts ...grpc.CallOption) (*ListIndexesResponse, error) + // Gets a composite index. + GetIndex(ctx context.Context, in *GetIndexRequest, opts ...grpc.CallOption) (*Index, error) + // Deletes a composite index. + DeleteIndex(ctx context.Context, in *DeleteIndexRequest, opts ...grpc.CallOption) (*empty.Empty, error) + // Gets the metadata and configuration for a Field. + GetField(ctx context.Context, in *GetFieldRequest, opts ...grpc.CallOption) (*Field, error) + // Updates a field configuration. Currently, field updates apply only to + // single field index configuration. However, calls to + // [FirestoreAdmin.UpdateField][google.firestore.admin.v1.FirestoreAdmin.UpdateField] should provide a field mask to avoid + // changing any configuration that the caller isn't aware of. The field mask + // should be specified as: `{ paths: "index_config" }`. + // + // This call returns a [google.longrunning.Operation][google.longrunning.Operation] which may be used to + // track the status of the field update. The metadata for + // the operation will be the type [FieldOperationMetadata][google.firestore.admin.v1.FieldOperationMetadata]. + // + // To configure the default field settings for the database, use + // the special `Field` with resource name: + // `projects/{project_id}/databases/{database_id}/collectionGroups/__default__/fields/*`. + UpdateField(ctx context.Context, in *UpdateFieldRequest, opts ...grpc.CallOption) (*longrunning.Operation, error) + // Lists the field configuration and metadata for this database. + // + // Currently, [FirestoreAdmin.ListFields][google.firestore.admin.v1.FirestoreAdmin.ListFields] only supports listing fields + // that have been explicitly overridden. To issue this query, call + // [FirestoreAdmin.ListFields][google.firestore.admin.v1.FirestoreAdmin.ListFields] with the filter set to + // `indexConfig.usesAncestorConfig:false`. + ListFields(ctx context.Context, in *ListFieldsRequest, opts ...grpc.CallOption) (*ListFieldsResponse, error) + // Exports a copy of all or a subset of documents from Google Cloud Firestore + // to another storage system, such as Google Cloud Storage. Recent updates to + // documents may not be reflected in the export. The export occurs in the + // background and its progress can be monitored and managed via the + // Operation resource that is created. The output of an export may only be + // used once the associated operation is done. If an export operation is + // cancelled before completion it may leave partial data behind in Google + // Cloud Storage. + ExportDocuments(ctx context.Context, in *ExportDocumentsRequest, opts ...grpc.CallOption) (*longrunning.Operation, error) + // Imports documents into Google Cloud Firestore. Existing documents with the + // same name are overwritten. The import occurs in the background and its + // progress can be monitored and managed via the Operation resource that is + // created. If an ImportDocuments operation is cancelled, it is possible + // that a subset of the data has already been imported to Cloud Firestore. + ImportDocuments(ctx context.Context, in *ImportDocumentsRequest, opts ...grpc.CallOption) (*longrunning.Operation, error) +} + +type firestoreAdminClient struct { + cc *grpc.ClientConn +} + +func NewFirestoreAdminClient(cc *grpc.ClientConn) FirestoreAdminClient { + return &firestoreAdminClient{cc} +} + +func (c *firestoreAdminClient) CreateIndex(ctx context.Context, in *CreateIndexRequest, opts ...grpc.CallOption) (*longrunning.Operation, error) { + out := new(longrunning.Operation) + err := c.cc.Invoke(ctx, "/google.firestore.admin.v1.FirestoreAdmin/CreateIndex", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *firestoreAdminClient) ListIndexes(ctx context.Context, in *ListIndexesRequest, opts ...grpc.CallOption) (*ListIndexesResponse, error) { + out := new(ListIndexesResponse) + err := c.cc.Invoke(ctx, "/google.firestore.admin.v1.FirestoreAdmin/ListIndexes", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *firestoreAdminClient) GetIndex(ctx context.Context, in *GetIndexRequest, opts ...grpc.CallOption) (*Index, error) { + out := new(Index) + err := c.cc.Invoke(ctx, "/google.firestore.admin.v1.FirestoreAdmin/GetIndex", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *firestoreAdminClient) DeleteIndex(ctx context.Context, in *DeleteIndexRequest, opts ...grpc.CallOption) (*empty.Empty, error) { + out := new(empty.Empty) + err := c.cc.Invoke(ctx, "/google.firestore.admin.v1.FirestoreAdmin/DeleteIndex", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *firestoreAdminClient) GetField(ctx context.Context, in *GetFieldRequest, opts ...grpc.CallOption) (*Field, error) { + out := new(Field) + err := c.cc.Invoke(ctx, "/google.firestore.admin.v1.FirestoreAdmin/GetField", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *firestoreAdminClient) UpdateField(ctx context.Context, in *UpdateFieldRequest, opts ...grpc.CallOption) (*longrunning.Operation, error) { + out := new(longrunning.Operation) + err := c.cc.Invoke(ctx, "/google.firestore.admin.v1.FirestoreAdmin/UpdateField", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *firestoreAdminClient) ListFields(ctx context.Context, in *ListFieldsRequest, opts ...grpc.CallOption) (*ListFieldsResponse, error) { + out := new(ListFieldsResponse) + err := c.cc.Invoke(ctx, "/google.firestore.admin.v1.FirestoreAdmin/ListFields", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *firestoreAdminClient) ExportDocuments(ctx context.Context, in *ExportDocumentsRequest, opts ...grpc.CallOption) (*longrunning.Operation, error) { + out := new(longrunning.Operation) + err := c.cc.Invoke(ctx, "/google.firestore.admin.v1.FirestoreAdmin/ExportDocuments", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *firestoreAdminClient) ImportDocuments(ctx context.Context, in *ImportDocumentsRequest, opts ...grpc.CallOption) (*longrunning.Operation, error) { + out := new(longrunning.Operation) + err := c.cc.Invoke(ctx, "/google.firestore.admin.v1.FirestoreAdmin/ImportDocuments", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +// FirestoreAdminServer is the server API for FirestoreAdmin service. +type FirestoreAdminServer interface { + // Creates a composite index. This returns a [google.longrunning.Operation][google.longrunning.Operation] + // which may be used to track the status of the creation. The metadata for + // the operation will be the type [IndexOperationMetadata][google.firestore.admin.v1.IndexOperationMetadata]. + CreateIndex(context.Context, *CreateIndexRequest) (*longrunning.Operation, error) + // Lists composite indexes. + ListIndexes(context.Context, *ListIndexesRequest) (*ListIndexesResponse, error) + // Gets a composite index. + GetIndex(context.Context, *GetIndexRequest) (*Index, error) + // Deletes a composite index. + DeleteIndex(context.Context, *DeleteIndexRequest) (*empty.Empty, error) + // Gets the metadata and configuration for a Field. + GetField(context.Context, *GetFieldRequest) (*Field, error) + // Updates a field configuration. Currently, field updates apply only to + // single field index configuration. However, calls to + // [FirestoreAdmin.UpdateField][google.firestore.admin.v1.FirestoreAdmin.UpdateField] should provide a field mask to avoid + // changing any configuration that the caller isn't aware of. The field mask + // should be specified as: `{ paths: "index_config" }`. + // + // This call returns a [google.longrunning.Operation][google.longrunning.Operation] which may be used to + // track the status of the field update. The metadata for + // the operation will be the type [FieldOperationMetadata][google.firestore.admin.v1.FieldOperationMetadata]. + // + // To configure the default field settings for the database, use + // the special `Field` with resource name: + // `projects/{project_id}/databases/{database_id}/collectionGroups/__default__/fields/*`. + UpdateField(context.Context, *UpdateFieldRequest) (*longrunning.Operation, error) + // Lists the field configuration and metadata for this database. + // + // Currently, [FirestoreAdmin.ListFields][google.firestore.admin.v1.FirestoreAdmin.ListFields] only supports listing fields + // that have been explicitly overridden. To issue this query, call + // [FirestoreAdmin.ListFields][google.firestore.admin.v1.FirestoreAdmin.ListFields] with the filter set to + // `indexConfig.usesAncestorConfig:false`. + ListFields(context.Context, *ListFieldsRequest) (*ListFieldsResponse, error) + // Exports a copy of all or a subset of documents from Google Cloud Firestore + // to another storage system, such as Google Cloud Storage. Recent updates to + // documents may not be reflected in the export. The export occurs in the + // background and its progress can be monitored and managed via the + // Operation resource that is created. The output of an export may only be + // used once the associated operation is done. If an export operation is + // cancelled before completion it may leave partial data behind in Google + // Cloud Storage. + ExportDocuments(context.Context, *ExportDocumentsRequest) (*longrunning.Operation, error) + // Imports documents into Google Cloud Firestore. Existing documents with the + // same name are overwritten. The import occurs in the background and its + // progress can be monitored and managed via the Operation resource that is + // created. If an ImportDocuments operation is cancelled, it is possible + // that a subset of the data has already been imported to Cloud Firestore. + ImportDocuments(context.Context, *ImportDocumentsRequest) (*longrunning.Operation, error) +} + +func RegisterFirestoreAdminServer(s *grpc.Server, srv FirestoreAdminServer) { + s.RegisterService(&_FirestoreAdmin_serviceDesc, srv) +} + +func _FirestoreAdmin_CreateIndex_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(CreateIndexRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(FirestoreAdminServer).CreateIndex(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.firestore.admin.v1.FirestoreAdmin/CreateIndex", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(FirestoreAdminServer).CreateIndex(ctx, req.(*CreateIndexRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _FirestoreAdmin_ListIndexes_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(ListIndexesRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(FirestoreAdminServer).ListIndexes(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.firestore.admin.v1.FirestoreAdmin/ListIndexes", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(FirestoreAdminServer).ListIndexes(ctx, req.(*ListIndexesRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _FirestoreAdmin_GetIndex_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(GetIndexRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(FirestoreAdminServer).GetIndex(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.firestore.admin.v1.FirestoreAdmin/GetIndex", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(FirestoreAdminServer).GetIndex(ctx, req.(*GetIndexRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _FirestoreAdmin_DeleteIndex_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(DeleteIndexRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(FirestoreAdminServer).DeleteIndex(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.firestore.admin.v1.FirestoreAdmin/DeleteIndex", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(FirestoreAdminServer).DeleteIndex(ctx, req.(*DeleteIndexRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _FirestoreAdmin_GetField_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(GetFieldRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(FirestoreAdminServer).GetField(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.firestore.admin.v1.FirestoreAdmin/GetField", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(FirestoreAdminServer).GetField(ctx, req.(*GetFieldRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _FirestoreAdmin_UpdateField_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(UpdateFieldRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(FirestoreAdminServer).UpdateField(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.firestore.admin.v1.FirestoreAdmin/UpdateField", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(FirestoreAdminServer).UpdateField(ctx, req.(*UpdateFieldRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _FirestoreAdmin_ListFields_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(ListFieldsRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(FirestoreAdminServer).ListFields(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.firestore.admin.v1.FirestoreAdmin/ListFields", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(FirestoreAdminServer).ListFields(ctx, req.(*ListFieldsRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _FirestoreAdmin_ExportDocuments_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(ExportDocumentsRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(FirestoreAdminServer).ExportDocuments(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.firestore.admin.v1.FirestoreAdmin/ExportDocuments", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(FirestoreAdminServer).ExportDocuments(ctx, req.(*ExportDocumentsRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _FirestoreAdmin_ImportDocuments_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(ImportDocumentsRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(FirestoreAdminServer).ImportDocuments(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.firestore.admin.v1.FirestoreAdmin/ImportDocuments", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(FirestoreAdminServer).ImportDocuments(ctx, req.(*ImportDocumentsRequest)) + } + return interceptor(ctx, in, info, handler) +} + +var _FirestoreAdmin_serviceDesc = grpc.ServiceDesc{ + ServiceName: "google.firestore.admin.v1.FirestoreAdmin", + HandlerType: (*FirestoreAdminServer)(nil), + Methods: []grpc.MethodDesc{ + { + MethodName: "CreateIndex", + Handler: _FirestoreAdmin_CreateIndex_Handler, + }, + { + MethodName: "ListIndexes", + Handler: _FirestoreAdmin_ListIndexes_Handler, + }, + { + MethodName: "GetIndex", + Handler: _FirestoreAdmin_GetIndex_Handler, + }, + { + MethodName: "DeleteIndex", + Handler: _FirestoreAdmin_DeleteIndex_Handler, + }, + { + MethodName: "GetField", + Handler: _FirestoreAdmin_GetField_Handler, + }, + { + MethodName: "UpdateField", + Handler: _FirestoreAdmin_UpdateField_Handler, + }, + { + MethodName: "ListFields", + Handler: _FirestoreAdmin_ListFields_Handler, + }, + { + MethodName: "ExportDocuments", + Handler: _FirestoreAdmin_ExportDocuments_Handler, + }, + { + MethodName: "ImportDocuments", + Handler: _FirestoreAdmin_ImportDocuments_Handler, + }, + }, + Streams: []grpc.StreamDesc{}, + Metadata: "google/firestore/admin/v1/firestore_admin.proto", +} + +func init() { + proto.RegisterFile("google/firestore/admin/v1/firestore_admin.proto", fileDescriptor_firestore_admin_5cf2286451a3a340) +} + +var fileDescriptor_firestore_admin_5cf2286451a3a340 = []byte{ + // 944 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xc4, 0x56, 0x4b, 0x6f, 0x1c, 0x45, + 0x10, 0x56, 0xaf, 0x1f, 0xc4, 0xb5, 0x60, 0x93, 0x8e, 0xb4, 0x5a, 0x26, 0x58, 0xac, 0x06, 0x19, + 0xad, 0x56, 0x64, 0x46, 0x6b, 0x24, 0x0b, 0x6d, 0x48, 0x80, 0x38, 0xf6, 0xca, 0x52, 0x22, 0xac, + 0x0d, 0xc9, 0x01, 0x59, 0x5a, 0x8d, 0x77, 0xda, 0xa3, 0xc6, 0xb3, 0xdd, 0xc3, 0x74, 0x8f, 0xe5, + 0x04, 0x59, 0x41, 0x48, 0x1c, 0x38, 0x73, 0x45, 0xe2, 0x71, 0xcc, 0x05, 0x0e, 0xfc, 0x01, 0xae, + 0x5c, 0xf9, 0x0b, 0xfc, 0x10, 0xd4, 0x8f, 0x59, 0xef, 0x2b, 0x33, 0x4b, 0x12, 0x89, 0x5b, 0x4f, + 0xf5, 0x57, 0x5d, 0x5f, 0x57, 0x75, 0x7d, 0x35, 0xe0, 0x47, 0x9c, 0x47, 0x31, 0xf1, 0x4f, 0x68, + 0x4a, 0x84, 0xe4, 0x29, 0xf1, 0x83, 0x70, 0x48, 0x99, 0x7f, 0xd6, 0xbe, 0x34, 0xf5, 0xb5, 0xc9, + 0x4b, 0x52, 0x2e, 0x39, 0x7e, 0xcb, 0x38, 0x78, 0xa3, 0x5d, 0xcf, 0xec, 0x9e, 0xb5, 0x9d, 0xb7, + 0xed, 0x59, 0x41, 0x42, 0xfd, 0x80, 0x31, 0x2e, 0x03, 0x49, 0x39, 0x13, 0xc6, 0xd1, 0xd9, 0x2a, + 0x8a, 0x44, 0xe2, 0xb0, 0x1c, 0x46, 0x59, 0x48, 0xce, 0x2d, 0xec, 0x5d, 0x0b, 0x8b, 0x39, 0x8b, + 0xd2, 0x8c, 0x31, 0xca, 0x22, 0x9f, 0x27, 0x24, 0x9d, 0x08, 0x79, 0xdd, 0x82, 0xf4, 0xd7, 0x71, + 0x76, 0xe2, 0x93, 0x61, 0x22, 0x1f, 0xdb, 0xcd, 0xc6, 0xf4, 0xa6, 0x66, 0xd1, 0x1f, 0x06, 0xe2, + 0xd4, 0x20, 0xdc, 0x10, 0xf0, 0x6e, 0x4a, 0x02, 0x49, 0x0e, 0x54, 0xe0, 0x1e, 0xf9, 0x2a, 0x23, + 0x42, 0xe2, 0x1a, 0xac, 0x26, 0x41, 0x4a, 0x98, 0xac, 0xa3, 0x06, 0x6a, 0xae, 0xf5, 0xec, 0x17, + 0xde, 0x81, 0x15, 0x4d, 0xb0, 0x5e, 0x69, 0xa0, 0x66, 0x75, 0xbb, 0xe1, 0x3d, 0x37, 0x51, 0x9e, + 0x39, 0xcf, 0xc0, 0xdd, 0x6f, 0x10, 0xe0, 0x7b, 0x54, 0x48, 0x6d, 0x24, 0xa2, 0x2c, 0x4c, 0x0d, + 0x56, 0x4f, 0x68, 0x2c, 0x49, 0xaa, 0xe3, 0xac, 0xf5, 0xec, 0x17, 0xbe, 0x0e, 0x6b, 0x49, 0x10, + 0x91, 0xbe, 0xa0, 0x4f, 0x48, 0x7d, 0xa9, 0x81, 0x9a, 0x2b, 0xbd, 0x2b, 0xca, 0xf0, 0x80, 0x3e, + 0x21, 0x78, 0x13, 0x40, 0x6f, 0x4a, 0x7e, 0x4a, 0x58, 0x7d, 0x59, 0x3b, 0x6a, 0xf8, 0xe7, 0xca, + 0xe0, 0x3e, 0x86, 0x6b, 0x13, 0x0c, 0x44, 0xc2, 0x99, 0x20, 0xb8, 0x03, 0xaf, 0x51, 0x63, 0xaa, + 0xa3, 0xc6, 0xd2, 0x42, 0x77, 0xca, 0x1d, 0xf0, 0x7b, 0xb0, 0xc1, 0xc8, 0xb9, 0xec, 0x8f, 0x85, + 0x35, 0x7c, 0xdf, 0x50, 0xe6, 0xc3, 0x51, 0xe8, 0x2d, 0xd8, 0xe8, 0x12, 0x39, 0x91, 0x60, 0x0c, + 0xcb, 0x2c, 0x18, 0x12, 0x7b, 0x6f, 0xbd, 0x76, 0x9b, 0x80, 0xef, 0x92, 0x98, 0x4c, 0x95, 0x62, + 0x1e, 0xf2, 0x7b, 0x04, 0xf8, 0x61, 0x12, 0x06, 0x92, 0xec, 0xab, 0x7a, 0xe6, 0xd0, 0x1d, 0x58, + 0xd1, 0xf5, 0xd5, 0xd8, 0xe2, 0x9b, 0x18, 0x3f, 0x03, 0xc7, 0x37, 0xa1, 0x9a, 0xe9, 0xd3, 0xf4, + 0xc3, 0xb0, 0xb5, 0x75, 0x72, 0xef, 0xfc, 0xed, 0x18, 0x9f, 0xfb, 0x81, 0x38, 0xed, 0x81, 0x81, + 0xab, 0xb5, 0xbd, 0xdc, 0x04, 0x8f, 0x79, 0x94, 0x9f, 0xc2, 0x55, 0x95, 0x7e, 0x8d, 0xfb, 0x5f, + 0xea, 0x7f, 0x66, 0x5e, 0x60, 0x4e, 0xc0, 0x96, 0xff, 0x43, 0x15, 0x49, 0x59, 0x16, 0xa8, 0xbe, + 0xb9, 0xa3, 0xc5, 0x2f, 0x5c, 0xfc, 0xa7, 0x50, 0xdb, 0x3b, 0x4f, 0x78, 0x2a, 0xef, 0xf2, 0x41, + 0x36, 0x24, 0x4c, 0x8a, 0x82, 0x34, 0xe1, 0x2d, 0x58, 0x1f, 0xf0, 0x38, 0x26, 0x03, 0xd5, 0xe2, + 0x7d, 0x1a, 0x8a, 0x7a, 0xa5, 0xb1, 0xa4, 0x0e, 0xbd, 0xb4, 0x1e, 0x84, 0x02, 0xb7, 0xe0, 0x2a, + 0xcf, 0x64, 0x92, 0xc9, 0x7e, 0x96, 0xd2, 0x7e, 0x92, 0x92, 0x13, 0x7a, 0xae, 0x13, 0xb2, 0xd6, + 0xdb, 0x30, 0x1b, 0x0f, 0x53, 0x7a, 0xa8, 0xcd, 0xee, 0x05, 0xd4, 0x0e, 0x86, 0xaf, 0x9a, 0x40, + 0x13, 0xde, 0xa4, 0x6c, 0x6e, 0xfc, 0x75, 0x6d, 0x1f, 0x85, 0xdf, 0xfe, 0xee, 0x75, 0x58, 0xdf, + 0xcf, 0x93, 0xf9, 0xa9, 0xca, 0x25, 0x7e, 0x86, 0xa0, 0x3a, 0x26, 0x3a, 0xf8, 0x46, 0x41, 0xd2, + 0x67, 0xc5, 0xc9, 0xd9, 0xcc, 0xe1, 0x63, 0xba, 0xe8, 0x7d, 0x96, 0xeb, 0xa2, 0x7b, 0xff, 0xdb, + 0xbf, 0xff, 0xf9, 0xa1, 0xd2, 0x75, 0x6f, 0x2b, 0x39, 0xfd, 0xda, 0xbc, 0xa8, 0x5b, 0x49, 0xca, + 0xbf, 0x24, 0x03, 0x29, 0xfc, 0x96, 0x1f, 0x06, 0x32, 0x38, 0x0e, 0x04, 0x51, 0xeb, 0xcb, 0x4b, + 0x75, 0x53, 0x9e, 0x25, 0xc2, 0x6f, 0x5d, 0xf8, 0xb6, 0xbb, 0x3b, 0x46, 0xba, 0xf0, 0x1f, 0x08, + 0xaa, 0x63, 0xc2, 0x51, 0x48, 0x76, 0x56, 0xe2, 0x1c, 0x6f, 0x51, 0xb8, 0x79, 0x90, 0xee, 0xbe, + 0x66, 0xff, 0x09, 0x7e, 0x49, 0xf6, 0xf8, 0x67, 0x04, 0x57, 0x72, 0xd1, 0xc1, 0xad, 0x02, 0x12, + 0x53, 0xca, 0xe4, 0x94, 0xea, 0xdf, 0x14, 0x45, 0xf5, 0x6a, 0x16, 0x27, 0x98, 0xf3, 0xf3, 0x5b, + 0x17, 0xf8, 0x47, 0x04, 0xd5, 0x31, 0xc1, 0x2b, 0xcc, 0xec, 0xac, 0x30, 0x3a, 0xb5, 0x19, 0x81, + 0xda, 0x53, 0x93, 0x2f, 0xa7, 0xd7, 0x7a, 0x59, 0x7a, 0x3f, 0x99, 0x0c, 0xea, 0xae, 0x2f, 0xcb, + 0xe0, 0xb8, 0xfc, 0x39, 0xa5, 0x1a, 0xe2, 0xee, 0x69, 0x8a, 0x1f, 0xe3, 0x5b, 0x2f, 0x42, 0xd1, + 0xe8, 0x8f, 0x62, 0xf8, 0x1b, 0x82, 0xea, 0xd8, 0x18, 0x28, 0x4c, 0xe0, 0xec, 0xb8, 0x28, 0xeb, + 0xa3, 0x9e, 0x26, 0x79, 0x6f, 0x7b, 0x57, 0x93, 0x34, 0xbf, 0x2f, 0x2f, 0x4a, 0xb5, 0x63, 0x27, + 0xcd, 0xef, 0x08, 0xe0, 0x52, 0x85, 0xf1, 0xfb, 0x25, 0xcd, 0x31, 0x31, 0x2d, 0x9c, 0x1b, 0x0b, + 0xa2, 0x6d, 0x27, 0x4d, 0x26, 0xf9, 0xbf, 0x77, 0x92, 0xd5, 0xf9, 0x5f, 0x10, 0x6c, 0x4c, 0x09, + 0x38, 0x6e, 0x17, 0x30, 0x99, 0x2f, 0xf6, 0x65, 0xc9, 0xfe, 0x48, 0x93, 0xdd, 0x71, 0xdb, 0x65, + 0x2f, 0xe2, 0xa2, 0x43, 0x26, 0x03, 0x74, 0x50, 0x4b, 0x73, 0x9c, 0xd2, 0xf8, 0x42, 0x8e, 0xf3, + 0xe7, 0xc1, 0x2b, 0xe4, 0x48, 0x87, 0xd3, 0x1c, 0xef, 0xfc, 0x89, 0x60, 0x73, 0xc0, 0x87, 0xcf, + 0x67, 0x75, 0xe7, 0xda, 0xe4, 0x98, 0x38, 0x54, 0x6d, 0x7d, 0x88, 0xbe, 0xb8, 0x6d, 0x3d, 0x22, + 0x1e, 0x07, 0x2c, 0xf2, 0x78, 0x1a, 0xf9, 0x11, 0x61, 0xba, 0xe9, 0xed, 0x8f, 0x7d, 0x90, 0x50, + 0x31, 0xe7, 0x5f, 0xfa, 0xa6, 0x5e, 0xfc, 0x5a, 0x59, 0xee, 0xee, 0xee, 0x3f, 0x78, 0x56, 0x79, + 0xa7, 0x6b, 0xce, 0xd9, 0x8d, 0x79, 0x16, 0x7a, 0xa3, 0x58, 0x9e, 0x0e, 0xe6, 0x3d, 0x6a, 0xff, + 0x95, 0x23, 0x8e, 0x34, 0xe2, 0x68, 0x84, 0x38, 0xd2, 0x88, 0xa3, 0x47, 0xed, 0xe3, 0x55, 0x1d, + 0xf5, 0x83, 0x7f, 0x03, 0x00, 0x00, 0xff, 0xff, 0xde, 0x49, 0x90, 0x6c, 0x51, 0x0c, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/firestore/admin/v1/index.pb.go b/vendor/google.golang.org/genproto/googleapis/firestore/admin/v1/index.pb.go new file mode 100644 index 0000000..6b9bd42 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/firestore/admin/v1/index.pb.go @@ -0,0 +1,446 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/firestore/admin/v1/index.proto + +package admin // import "google.golang.org/genproto/googleapis/firestore/admin/v1" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Query Scope defines the scope at which a query is run. This is specified on +// a StructuredQuery's `from` field. +type Index_QueryScope int32 + +const ( + // The query scope is unspecified. Not a valid option. + Index_QUERY_SCOPE_UNSPECIFIED Index_QueryScope = 0 + // Indexes with a collection query scope specified allow queries + // against a collection that is the child of a specific document, specified + // at query time, and that has the collection id specified by the index. + Index_COLLECTION Index_QueryScope = 1 +) + +var Index_QueryScope_name = map[int32]string{ + 0: "QUERY_SCOPE_UNSPECIFIED", + 1: "COLLECTION", +} +var Index_QueryScope_value = map[string]int32{ + "QUERY_SCOPE_UNSPECIFIED": 0, + "COLLECTION": 1, +} + +func (x Index_QueryScope) String() string { + return proto.EnumName(Index_QueryScope_name, int32(x)) +} +func (Index_QueryScope) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_index_58e7fd4a61be5522, []int{0, 0} +} + +// The state of an index. During index creation, an index will be in the +// `CREATING` state. If the index is created successfully, it will transition +// to the `READY` state. If the index creation encounters a problem, the index +// will transition to the `NEEDS_REPAIR` state. +type Index_State int32 + +const ( + // The state is unspecified. + Index_STATE_UNSPECIFIED Index_State = 0 + // The index is being created. + // There is an active long-running operation for the index. + // The index is updated when writing a document. + // Some index data may exist. + Index_CREATING Index_State = 1 + // The index is ready to be used. + // The index is updated when writing a document. + // The index is fully populated from all stored documents it applies to. + Index_READY Index_State = 2 + // The index was being created, but something went wrong. + // There is no active long-running operation for the index, + // and the most recently finished long-running operation failed. + // The index is not updated when writing a document. + // Some index data may exist. + // Use the google.longrunning.Operations API to determine why the operation + // that last attempted to create this index failed, then re-create the + // index. + Index_NEEDS_REPAIR Index_State = 3 +) + +var Index_State_name = map[int32]string{ + 0: "STATE_UNSPECIFIED", + 1: "CREATING", + 2: "READY", + 3: "NEEDS_REPAIR", +} +var Index_State_value = map[string]int32{ + "STATE_UNSPECIFIED": 0, + "CREATING": 1, + "READY": 2, + "NEEDS_REPAIR": 3, +} + +func (x Index_State) String() string { + return proto.EnumName(Index_State_name, int32(x)) +} +func (Index_State) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_index_58e7fd4a61be5522, []int{0, 1} +} + +// The supported orderings. +type Index_IndexField_Order int32 + +const ( + // The ordering is unspecified. Not a valid option. + Index_IndexField_ORDER_UNSPECIFIED Index_IndexField_Order = 0 + // The field is ordered by ascending field value. + Index_IndexField_ASCENDING Index_IndexField_Order = 1 + // The field is ordered by descending field value. + Index_IndexField_DESCENDING Index_IndexField_Order = 2 +) + +var Index_IndexField_Order_name = map[int32]string{ + 0: "ORDER_UNSPECIFIED", + 1: "ASCENDING", + 2: "DESCENDING", +} +var Index_IndexField_Order_value = map[string]int32{ + "ORDER_UNSPECIFIED": 0, + "ASCENDING": 1, + "DESCENDING": 2, +} + +func (x Index_IndexField_Order) String() string { + return proto.EnumName(Index_IndexField_Order_name, int32(x)) +} +func (Index_IndexField_Order) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_index_58e7fd4a61be5522, []int{0, 0, 0} +} + +// The supported array value configurations. +type Index_IndexField_ArrayConfig int32 + +const ( + // The index does not support additional array queries. + Index_IndexField_ARRAY_CONFIG_UNSPECIFIED Index_IndexField_ArrayConfig = 0 + // The index supports array containment queries. + Index_IndexField_CONTAINS Index_IndexField_ArrayConfig = 1 +) + +var Index_IndexField_ArrayConfig_name = map[int32]string{ + 0: "ARRAY_CONFIG_UNSPECIFIED", + 1: "CONTAINS", +} +var Index_IndexField_ArrayConfig_value = map[string]int32{ + "ARRAY_CONFIG_UNSPECIFIED": 0, + "CONTAINS": 1, +} + +func (x Index_IndexField_ArrayConfig) String() string { + return proto.EnumName(Index_IndexField_ArrayConfig_name, int32(x)) +} +func (Index_IndexField_ArrayConfig) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_index_58e7fd4a61be5522, []int{0, 0, 1} +} + +// Cloud Firestore indexes enable simple and complex queries against +// documents in a database. +type Index struct { + // Output only. + // A server defined name for this index. + // The form of this name for composite indexes will be: + // `projects/{project_id}/databases/{database_id}/collectionGroups/{collection_id}/indexes/{composite_index_id}` + // For single field indexes, this field will be empty. + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // Indexes with a collection query scope specified allow queries + // against a collection that is the child of a specific document, specified at + // query time, and that has the same collection id. + // + // Indexes with a collection group query scope specified allow queries against + // all collections descended from a specific document, specified at query + // time, and that have the same collection id as this index. + QueryScope Index_QueryScope `protobuf:"varint,2,opt,name=query_scope,json=queryScope,proto3,enum=google.firestore.admin.v1.Index_QueryScope" json:"query_scope,omitempty"` + // The fields supported by this index. + // + // For composite indexes, this is always 2 or more fields. + // The last field entry is always for the field path `__name__`. If, on + // creation, `__name__` was not specified as the last field, it will be added + // automatically with the same direction as that of the last field defined. If + // the final field in a composite index is not directional, the `__name__` + // will be ordered ASCENDING (unless explicitly specified). + // + // For single field indexes, this will always be exactly one entry with a + // field path equal to the field path of the associated field. + Fields []*Index_IndexField `protobuf:"bytes,3,rep,name=fields,proto3" json:"fields,omitempty"` + // Output only. + // The serving state of the index. + State Index_State `protobuf:"varint,4,opt,name=state,proto3,enum=google.firestore.admin.v1.Index_State" json:"state,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Index) Reset() { *m = Index{} } +func (m *Index) String() string { return proto.CompactTextString(m) } +func (*Index) ProtoMessage() {} +func (*Index) Descriptor() ([]byte, []int) { + return fileDescriptor_index_58e7fd4a61be5522, []int{0} +} +func (m *Index) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Index.Unmarshal(m, b) +} +func (m *Index) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Index.Marshal(b, m, deterministic) +} +func (dst *Index) XXX_Merge(src proto.Message) { + xxx_messageInfo_Index.Merge(dst, src) +} +func (m *Index) XXX_Size() int { + return xxx_messageInfo_Index.Size(m) +} +func (m *Index) XXX_DiscardUnknown() { + xxx_messageInfo_Index.DiscardUnknown(m) +} + +var xxx_messageInfo_Index proto.InternalMessageInfo + +func (m *Index) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *Index) GetQueryScope() Index_QueryScope { + if m != nil { + return m.QueryScope + } + return Index_QUERY_SCOPE_UNSPECIFIED +} + +func (m *Index) GetFields() []*Index_IndexField { + if m != nil { + return m.Fields + } + return nil +} + +func (m *Index) GetState() Index_State { + if m != nil { + return m.State + } + return Index_STATE_UNSPECIFIED +} + +// A field in an index. +// The field_path describes which field is indexed, the value_mode describes +// how the field value is indexed. +type Index_IndexField struct { + // Can be __name__. + // For single field indexes, this must match the name of the field or may + // be omitted. + FieldPath string `protobuf:"bytes,1,opt,name=field_path,json=fieldPath,proto3" json:"field_path,omitempty"` + // How the field value is indexed. + // + // Types that are valid to be assigned to ValueMode: + // *Index_IndexField_Order_ + // *Index_IndexField_ArrayConfig_ + ValueMode isIndex_IndexField_ValueMode `protobuf_oneof:"value_mode"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Index_IndexField) Reset() { *m = Index_IndexField{} } +func (m *Index_IndexField) String() string { return proto.CompactTextString(m) } +func (*Index_IndexField) ProtoMessage() {} +func (*Index_IndexField) Descriptor() ([]byte, []int) { + return fileDescriptor_index_58e7fd4a61be5522, []int{0, 0} +} +func (m *Index_IndexField) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Index_IndexField.Unmarshal(m, b) +} +func (m *Index_IndexField) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Index_IndexField.Marshal(b, m, deterministic) +} +func (dst *Index_IndexField) XXX_Merge(src proto.Message) { + xxx_messageInfo_Index_IndexField.Merge(dst, src) +} +func (m *Index_IndexField) XXX_Size() int { + return xxx_messageInfo_Index_IndexField.Size(m) +} +func (m *Index_IndexField) XXX_DiscardUnknown() { + xxx_messageInfo_Index_IndexField.DiscardUnknown(m) +} + +var xxx_messageInfo_Index_IndexField proto.InternalMessageInfo + +func (m *Index_IndexField) GetFieldPath() string { + if m != nil { + return m.FieldPath + } + return "" +} + +type isIndex_IndexField_ValueMode interface { + isIndex_IndexField_ValueMode() +} + +type Index_IndexField_Order_ struct { + Order Index_IndexField_Order `protobuf:"varint,2,opt,name=order,proto3,enum=google.firestore.admin.v1.Index_IndexField_Order,oneof"` +} + +type Index_IndexField_ArrayConfig_ struct { + ArrayConfig Index_IndexField_ArrayConfig `protobuf:"varint,3,opt,name=array_config,json=arrayConfig,proto3,enum=google.firestore.admin.v1.Index_IndexField_ArrayConfig,oneof"` +} + +func (*Index_IndexField_Order_) isIndex_IndexField_ValueMode() {} + +func (*Index_IndexField_ArrayConfig_) isIndex_IndexField_ValueMode() {} + +func (m *Index_IndexField) GetValueMode() isIndex_IndexField_ValueMode { + if m != nil { + return m.ValueMode + } + return nil +} + +func (m *Index_IndexField) GetOrder() Index_IndexField_Order { + if x, ok := m.GetValueMode().(*Index_IndexField_Order_); ok { + return x.Order + } + return Index_IndexField_ORDER_UNSPECIFIED +} + +func (m *Index_IndexField) GetArrayConfig() Index_IndexField_ArrayConfig { + if x, ok := m.GetValueMode().(*Index_IndexField_ArrayConfig_); ok { + return x.ArrayConfig + } + return Index_IndexField_ARRAY_CONFIG_UNSPECIFIED +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*Index_IndexField) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _Index_IndexField_OneofMarshaler, _Index_IndexField_OneofUnmarshaler, _Index_IndexField_OneofSizer, []interface{}{ + (*Index_IndexField_Order_)(nil), + (*Index_IndexField_ArrayConfig_)(nil), + } +} + +func _Index_IndexField_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*Index_IndexField) + // value_mode + switch x := m.ValueMode.(type) { + case *Index_IndexField_Order_: + b.EncodeVarint(2<<3 | proto.WireVarint) + b.EncodeVarint(uint64(x.Order)) + case *Index_IndexField_ArrayConfig_: + b.EncodeVarint(3<<3 | proto.WireVarint) + b.EncodeVarint(uint64(x.ArrayConfig)) + case nil: + default: + return fmt.Errorf("Index_IndexField.ValueMode has unexpected type %T", x) + } + return nil +} + +func _Index_IndexField_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*Index_IndexField) + switch tag { + case 2: // value_mode.order + if wire != proto.WireVarint { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeVarint() + m.ValueMode = &Index_IndexField_Order_{Index_IndexField_Order(x)} + return true, err + case 3: // value_mode.array_config + if wire != proto.WireVarint { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeVarint() + m.ValueMode = &Index_IndexField_ArrayConfig_{Index_IndexField_ArrayConfig(x)} + return true, err + default: + return false, nil + } +} + +func _Index_IndexField_OneofSizer(msg proto.Message) (n int) { + m := msg.(*Index_IndexField) + // value_mode + switch x := m.ValueMode.(type) { + case *Index_IndexField_Order_: + n += 1 // tag and wire + n += proto.SizeVarint(uint64(x.Order)) + case *Index_IndexField_ArrayConfig_: + n += 1 // tag and wire + n += proto.SizeVarint(uint64(x.ArrayConfig)) + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +func init() { + proto.RegisterType((*Index)(nil), "google.firestore.admin.v1.Index") + proto.RegisterType((*Index_IndexField)(nil), "google.firestore.admin.v1.Index.IndexField") + proto.RegisterEnum("google.firestore.admin.v1.Index_QueryScope", Index_QueryScope_name, Index_QueryScope_value) + proto.RegisterEnum("google.firestore.admin.v1.Index_State", Index_State_name, Index_State_value) + proto.RegisterEnum("google.firestore.admin.v1.Index_IndexField_Order", Index_IndexField_Order_name, Index_IndexField_Order_value) + proto.RegisterEnum("google.firestore.admin.v1.Index_IndexField_ArrayConfig", Index_IndexField_ArrayConfig_name, Index_IndexField_ArrayConfig_value) +} + +func init() { + proto.RegisterFile("google/firestore/admin/v1/index.proto", fileDescriptor_index_58e7fd4a61be5522) +} + +var fileDescriptor_index_58e7fd4a61be5522 = []byte{ + // 554 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x94, 0x93, 0xdf, 0x6a, 0xdb, 0x30, + 0x14, 0xc6, 0x6b, 0xa7, 0x2e, 0xcb, 0x49, 0x56, 0x3c, 0xc1, 0x98, 0xd7, 0xb5, 0xac, 0x04, 0x36, + 0x02, 0x03, 0x9b, 0x6c, 0x17, 0xa3, 0xec, 0x0f, 0x38, 0xb6, 0x92, 0x1a, 0x82, 0xed, 0xca, 0x69, + 0x21, 0x23, 0x60, 0xb4, 0x58, 0x71, 0x0d, 0x89, 0x95, 0xda, 0x4e, 0x58, 0x5f, 0x67, 0x97, 0x7b, + 0x82, 0x3d, 0xc3, 0xee, 0x76, 0xbf, 0x87, 0x19, 0x96, 0xb3, 0x64, 0x94, 0x96, 0xad, 0x37, 0xe6, + 0xc8, 0xfe, 0xbe, 0xdf, 0xa7, 0x63, 0xe9, 0xc0, 0x8b, 0x98, 0xf3, 0x78, 0xc6, 0x8c, 0x69, 0x92, + 0xb1, 0xbc, 0xe0, 0x19, 0x33, 0x68, 0x34, 0x4f, 0x52, 0x63, 0xd5, 0x31, 0x92, 0x34, 0x62, 0x5f, + 0xf4, 0x45, 0xc6, 0x0b, 0x8e, 0x9e, 0x56, 0x32, 0x7d, 0x23, 0xd3, 0x85, 0x4c, 0x5f, 0x75, 0x0e, + 0x0e, 0xd7, 0x04, 0xba, 0x48, 0x0c, 0x9a, 0xa6, 0xbc, 0xa0, 0x45, 0xc2, 0xd3, 0xbc, 0x32, 0xb6, + 0x7e, 0x2a, 0xa0, 0x38, 0x25, 0x08, 0x21, 0xd8, 0x4d, 0xe9, 0x9c, 0x69, 0xd2, 0xb1, 0xd4, 0xae, + 0x13, 0x51, 0xa3, 0x01, 0x34, 0xae, 0x96, 0x2c, 0xbb, 0x0e, 0xf3, 0x09, 0x5f, 0x30, 0x4d, 0x3e, + 0x96, 0xda, 0xfb, 0xaf, 0x5f, 0xe9, 0x77, 0x86, 0xe9, 0x02, 0xa5, 0x9f, 0x95, 0x9e, 0xa0, 0xb4, + 0x10, 0xb8, 0xda, 0xd4, 0xc8, 0x82, 0xbd, 0x69, 0xc2, 0x66, 0x51, 0xae, 0xd5, 0x8e, 0x6b, 0xed, + 0xc6, 0x7f, 0x80, 0xc4, 0xb3, 0x57, 0x7a, 0xc8, 0xda, 0x8a, 0xde, 0x83, 0x92, 0x17, 0xb4, 0x60, + 0xda, 0xae, 0xd8, 0xcc, 0xcb, 0x7f, 0x32, 0x82, 0x52, 0x4d, 0x2a, 0xd3, 0xc1, 0x2f, 0x19, 0x60, + 0x0b, 0x45, 0x47, 0x00, 0x02, 0x1b, 0x2e, 0x68, 0x71, 0xb9, 0xee, 0xbc, 0x2e, 0xde, 0xf8, 0xb4, + 0xb8, 0x44, 0x0e, 0x28, 0x3c, 0x8b, 0x58, 0xb6, 0x6e, 0xbc, 0x73, 0x8f, 0xfd, 0xea, 0x5e, 0x69, + 0x3c, 0xdd, 0x21, 0x15, 0x01, 0x8d, 0xa1, 0x49, 0xb3, 0x8c, 0x5e, 0x87, 0x13, 0x9e, 0x4e, 0x93, + 0x58, 0xab, 0x09, 0xe2, 0xdb, 0xfb, 0x10, 0xcd, 0xd2, 0x6f, 0x09, 0xfb, 0xe9, 0x0e, 0x69, 0xd0, + 0xed, 0xb2, 0xf5, 0x01, 0x14, 0x91, 0x87, 0x1e, 0xc3, 0x23, 0x8f, 0xd8, 0x98, 0x84, 0xe7, 0x6e, + 0xe0, 0x63, 0xcb, 0xe9, 0x39, 0xd8, 0x56, 0x77, 0xd0, 0x43, 0xa8, 0x9b, 0x81, 0x85, 0x5d, 0xdb, + 0x71, 0xfb, 0xaa, 0x84, 0xf6, 0x01, 0x6c, 0xbc, 0x59, 0xcb, 0xad, 0x13, 0x68, 0xfc, 0x05, 0x47, + 0x87, 0xa0, 0x99, 0x84, 0x98, 0xa3, 0xd0, 0xf2, 0xdc, 0x9e, 0xd3, 0xbf, 0xc1, 0x6a, 0xc2, 0x03, + 0xcb, 0x73, 0x87, 0xa6, 0xe3, 0x06, 0xaa, 0xd4, 0x6d, 0x02, 0xac, 0xe8, 0x6c, 0xc9, 0xc2, 0x39, + 0x8f, 0x58, 0xeb, 0x04, 0x60, 0x7b, 0xf6, 0xe8, 0x19, 0x3c, 0x39, 0x3b, 0xc7, 0x64, 0x14, 0x06, + 0x96, 0xe7, 0xe3, 0x1b, 0x98, 0x7d, 0x00, 0xcb, 0x1b, 0x0c, 0xb0, 0x35, 0x74, 0x3c, 0x57, 0x95, + 0x5a, 0x0e, 0x28, 0xe2, 0xa4, 0xca, 0x16, 0x82, 0xa1, 0x39, 0xc4, 0xb7, 0xc4, 0x12, 0x6c, 0x0e, + 0xab, 0x0e, 0xea, 0xa0, 0x10, 0x6c, 0xda, 0x23, 0x55, 0x46, 0x2a, 0x34, 0x5d, 0x8c, 0xed, 0x20, + 0x24, 0xd8, 0x37, 0x1d, 0xa2, 0xd6, 0xba, 0xdf, 0x25, 0x38, 0x9a, 0xf0, 0xf9, 0xdd, 0xff, 0xb6, + 0x5b, 0xdd, 0x01, 0xbf, 0x9c, 0x00, 0x5f, 0xfa, 0xf4, 0x71, 0x2d, 0x8c, 0xf9, 0x8c, 0xa6, 0xb1, + 0xce, 0xb3, 0xd8, 0x88, 0x59, 0x2a, 0xe6, 0xc3, 0xa8, 0x3e, 0xd1, 0x45, 0x92, 0xdf, 0x32, 0x82, + 0xef, 0x44, 0xf1, 0x55, 0xde, 0xed, 0x5b, 0xbd, 0xe0, 0x9b, 0xfc, 0xbc, 0x5f, 0x71, 0xac, 0x19, + 0x5f, 0x46, 0x7a, 0x6f, 0x13, 0x6b, 0x8a, 0xd8, 0x8b, 0xce, 0x8f, 0x3f, 0x8a, 0xb1, 0x50, 0x8c, + 0x37, 0x8a, 0xb1, 0x50, 0x8c, 0x2f, 0x3a, 0x9f, 0xf7, 0x44, 0xea, 0x9b, 0xdf, 0x01, 0x00, 0x00, + 0xff, 0xff, 0xb0, 0x5a, 0xfb, 0xcf, 0xf7, 0x03, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/firestore/admin/v1/location.pb.go b/vendor/google.golang.org/genproto/googleapis/firestore/admin/v1/location.pb.go new file mode 100644 index 0000000..a25c5bc --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/firestore/admin/v1/location.pb.go @@ -0,0 +1,79 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/firestore/admin/v1/location.proto + +package admin // import "google.golang.org/genproto/googleapis/firestore/admin/v1" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "google.golang.org/genproto/googleapis/api/annotations" +import _ "google.golang.org/genproto/googleapis/type/latlng" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// The metadata message for [google.cloud.location.Location.metadata][google.cloud.location.Location.metadata]. +type LocationMetadata struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *LocationMetadata) Reset() { *m = LocationMetadata{} } +func (m *LocationMetadata) String() string { return proto.CompactTextString(m) } +func (*LocationMetadata) ProtoMessage() {} +func (*LocationMetadata) Descriptor() ([]byte, []int) { + return fileDescriptor_location_d0ba94c897950f24, []int{0} +} +func (m *LocationMetadata) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_LocationMetadata.Unmarshal(m, b) +} +func (m *LocationMetadata) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_LocationMetadata.Marshal(b, m, deterministic) +} +func (dst *LocationMetadata) XXX_Merge(src proto.Message) { + xxx_messageInfo_LocationMetadata.Merge(dst, src) +} +func (m *LocationMetadata) XXX_Size() int { + return xxx_messageInfo_LocationMetadata.Size(m) +} +func (m *LocationMetadata) XXX_DiscardUnknown() { + xxx_messageInfo_LocationMetadata.DiscardUnknown(m) +} + +var xxx_messageInfo_LocationMetadata proto.InternalMessageInfo + +func init() { + proto.RegisterType((*LocationMetadata)(nil), "google.firestore.admin.v1.LocationMetadata") +} + +func init() { + proto.RegisterFile("google/firestore/admin/v1/location.proto", fileDescriptor_location_d0ba94c897950f24) +} + +var fileDescriptor_location_d0ba94c897950f24 = []byte{ + // 230 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xe2, 0xd2, 0x48, 0xcf, 0xcf, 0x4f, + 0xcf, 0x49, 0xd5, 0x4f, 0xcb, 0x2c, 0x4a, 0x2d, 0x2e, 0xc9, 0x2f, 0x4a, 0xd5, 0x4f, 0x4c, 0xc9, + 0xcd, 0xcc, 0xd3, 0x2f, 0x33, 0xd4, 0xcf, 0xc9, 0x4f, 0x4e, 0x2c, 0xc9, 0xcc, 0xcf, 0xd3, 0x2b, + 0x28, 0xca, 0x2f, 0xc9, 0x17, 0x92, 0x84, 0xa8, 0xd4, 0x83, 0xab, 0xd4, 0x03, 0xab, 0xd4, 0x2b, + 0x33, 0x94, 0x92, 0x80, 0x1a, 0x52, 0x52, 0x59, 0x90, 0xaa, 0x9f, 0x93, 0x58, 0x92, 0x93, 0x97, + 0x0e, 0xd1, 0x24, 0x25, 0x03, 0x95, 0x49, 0x2c, 0xc8, 0xd4, 0x4f, 0xcc, 0xcb, 0xcb, 0x2f, 0x01, + 0x9b, 0x58, 0x0c, 0x91, 0x55, 0x12, 0xe2, 0x12, 0xf0, 0x81, 0x5a, 0xe2, 0x9b, 0x5a, 0x92, 0x98, + 0x92, 0x58, 0x92, 0xe8, 0xb4, 0x9b, 0x91, 0x4b, 0x36, 0x39, 0x3f, 0x57, 0x0f, 0xa7, 0x6d, 0x4e, + 0xbc, 0x30, 0x3d, 0x01, 0x20, 0x43, 0x02, 0x18, 0xa3, 0xec, 0xa0, 0x6a, 0xd3, 0xf3, 0x73, 0x12, + 0xf3, 0xd2, 0xf5, 0xf2, 0x8b, 0xd2, 0xf5, 0xd3, 0x53, 0xf3, 0xc0, 0x56, 0xe8, 0x43, 0xa4, 0x12, + 0x0b, 0x32, 0x8b, 0xb1, 0x78, 0xd1, 0x1a, 0xcc, 0x58, 0xc4, 0xc4, 0xe2, 0xee, 0xec, 0x16, 0xbc, + 0x8a, 0x49, 0xde, 0x1d, 0x62, 0x8e, 0x73, 0x4e, 0x7e, 0x69, 0x8a, 0x9e, 0x1b, 0xdc, 0x66, 0x47, + 0xb0, 0xcd, 0x61, 0x86, 0xa7, 0x60, 0x2a, 0x62, 0xc0, 0x2a, 0x62, 0xe0, 0x2a, 0x62, 0xc0, 0x2a, + 0x62, 0xc2, 0x0c, 0x93, 0xd8, 0xc0, 0xb6, 0x1a, 0x03, 0x02, 0x00, 0x00, 0xff, 0xff, 0xa3, 0xed, + 0xd1, 0x40, 0x57, 0x01, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/firestore/admin/v1/operation.pb.go b/vendor/google.golang.org/genproto/googleapis/firestore/admin/v1/operation.pb.go new file mode 100644 index 0000000..b92bf93 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/firestore/admin/v1/operation.pb.go @@ -0,0 +1,699 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/firestore/admin/v1/operation.proto + +package admin // import "google.golang.org/genproto/googleapis/firestore/admin/v1" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import timestamp "github.com/golang/protobuf/ptypes/timestamp" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Describes the state of the operation. +type OperationState int32 + +const ( + // Unspecified. + OperationState_OPERATION_STATE_UNSPECIFIED OperationState = 0 + // Request is being prepared for processing. + OperationState_INITIALIZING OperationState = 1 + // Request is actively being processed. + OperationState_PROCESSING OperationState = 2 + // Request is in the process of being cancelled after user called + // google.longrunning.Operations.CancelOperation on the operation. + OperationState_CANCELLING OperationState = 3 + // Request has been processed and is in its finalization stage. + OperationState_FINALIZING OperationState = 4 + // Request has completed successfully. + OperationState_SUCCESSFUL OperationState = 5 + // Request has finished being processed, but encountered an error. + OperationState_FAILED OperationState = 6 + // Request has finished being cancelled after user called + // google.longrunning.Operations.CancelOperation. + OperationState_CANCELLED OperationState = 7 +) + +var OperationState_name = map[int32]string{ + 0: "OPERATION_STATE_UNSPECIFIED", + 1: "INITIALIZING", + 2: "PROCESSING", + 3: "CANCELLING", + 4: "FINALIZING", + 5: "SUCCESSFUL", + 6: "FAILED", + 7: "CANCELLED", +} +var OperationState_value = map[string]int32{ + "OPERATION_STATE_UNSPECIFIED": 0, + "INITIALIZING": 1, + "PROCESSING": 2, + "CANCELLING": 3, + "FINALIZING": 4, + "SUCCESSFUL": 5, + "FAILED": 6, + "CANCELLED": 7, +} + +func (x OperationState) String() string { + return proto.EnumName(OperationState_name, int32(x)) +} +func (OperationState) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_operation_b0b923bddd463673, []int{0} +} + +// Specifies how the index is changing. +type FieldOperationMetadata_IndexConfigDelta_ChangeType int32 + +const ( + // The type of change is not specified or known. + FieldOperationMetadata_IndexConfigDelta_CHANGE_TYPE_UNSPECIFIED FieldOperationMetadata_IndexConfigDelta_ChangeType = 0 + // The single field index is being added. + FieldOperationMetadata_IndexConfigDelta_ADD FieldOperationMetadata_IndexConfigDelta_ChangeType = 1 + // The single field index is being removed. + FieldOperationMetadata_IndexConfigDelta_REMOVE FieldOperationMetadata_IndexConfigDelta_ChangeType = 2 +) + +var FieldOperationMetadata_IndexConfigDelta_ChangeType_name = map[int32]string{ + 0: "CHANGE_TYPE_UNSPECIFIED", + 1: "ADD", + 2: "REMOVE", +} +var FieldOperationMetadata_IndexConfigDelta_ChangeType_value = map[string]int32{ + "CHANGE_TYPE_UNSPECIFIED": 0, + "ADD": 1, + "REMOVE": 2, +} + +func (x FieldOperationMetadata_IndexConfigDelta_ChangeType) String() string { + return proto.EnumName(FieldOperationMetadata_IndexConfigDelta_ChangeType_name, int32(x)) +} +func (FieldOperationMetadata_IndexConfigDelta_ChangeType) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_operation_b0b923bddd463673, []int{1, 0, 0} +} + +// Metadata for [google.longrunning.Operation][google.longrunning.Operation] results from +// [FirestoreAdmin.CreateIndex][google.firestore.admin.v1.FirestoreAdmin.CreateIndex]. +type IndexOperationMetadata struct { + // The time this operation started. + StartTime *timestamp.Timestamp `protobuf:"bytes,1,opt,name=start_time,json=startTime,proto3" json:"start_time,omitempty"` + // The time this operation completed. Will be unset if operation still in + // progress. + EndTime *timestamp.Timestamp `protobuf:"bytes,2,opt,name=end_time,json=endTime,proto3" json:"end_time,omitempty"` + // The index resource that this operation is acting on. For example: + // `projects/{project_id}/databases/{database_id}/collectionGroups/{collection_id}/indexes/{index_id}` + Index string `protobuf:"bytes,3,opt,name=index,proto3" json:"index,omitempty"` + // The state of the operation. + State OperationState `protobuf:"varint,4,opt,name=state,proto3,enum=google.firestore.admin.v1.OperationState" json:"state,omitempty"` + // The progress, in documents, of this operation. + ProgressDocuments *Progress `protobuf:"bytes,5,opt,name=progress_documents,json=progressDocuments,proto3" json:"progress_documents,omitempty"` + // The progress, in bytes, of this operation. + ProgressBytes *Progress `protobuf:"bytes,6,opt,name=progress_bytes,json=progressBytes,proto3" json:"progress_bytes,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *IndexOperationMetadata) Reset() { *m = IndexOperationMetadata{} } +func (m *IndexOperationMetadata) String() string { return proto.CompactTextString(m) } +func (*IndexOperationMetadata) ProtoMessage() {} +func (*IndexOperationMetadata) Descriptor() ([]byte, []int) { + return fileDescriptor_operation_b0b923bddd463673, []int{0} +} +func (m *IndexOperationMetadata) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_IndexOperationMetadata.Unmarshal(m, b) +} +func (m *IndexOperationMetadata) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_IndexOperationMetadata.Marshal(b, m, deterministic) +} +func (dst *IndexOperationMetadata) XXX_Merge(src proto.Message) { + xxx_messageInfo_IndexOperationMetadata.Merge(dst, src) +} +func (m *IndexOperationMetadata) XXX_Size() int { + return xxx_messageInfo_IndexOperationMetadata.Size(m) +} +func (m *IndexOperationMetadata) XXX_DiscardUnknown() { + xxx_messageInfo_IndexOperationMetadata.DiscardUnknown(m) +} + +var xxx_messageInfo_IndexOperationMetadata proto.InternalMessageInfo + +func (m *IndexOperationMetadata) GetStartTime() *timestamp.Timestamp { + if m != nil { + return m.StartTime + } + return nil +} + +func (m *IndexOperationMetadata) GetEndTime() *timestamp.Timestamp { + if m != nil { + return m.EndTime + } + return nil +} + +func (m *IndexOperationMetadata) GetIndex() string { + if m != nil { + return m.Index + } + return "" +} + +func (m *IndexOperationMetadata) GetState() OperationState { + if m != nil { + return m.State + } + return OperationState_OPERATION_STATE_UNSPECIFIED +} + +func (m *IndexOperationMetadata) GetProgressDocuments() *Progress { + if m != nil { + return m.ProgressDocuments + } + return nil +} + +func (m *IndexOperationMetadata) GetProgressBytes() *Progress { + if m != nil { + return m.ProgressBytes + } + return nil +} + +// Metadata for [google.longrunning.Operation][google.longrunning.Operation] results from +// [FirestoreAdmin.UpdateField][google.firestore.admin.v1.FirestoreAdmin.UpdateField]. +type FieldOperationMetadata struct { + // The time this operation started. + StartTime *timestamp.Timestamp `protobuf:"bytes,1,opt,name=start_time,json=startTime,proto3" json:"start_time,omitempty"` + // The time this operation completed. Will be unset if operation still in + // progress. + EndTime *timestamp.Timestamp `protobuf:"bytes,2,opt,name=end_time,json=endTime,proto3" json:"end_time,omitempty"` + // The field resource that this operation is acting on. For example: + // `projects/{project_id}/databases/{database_id}/collectionGroups/{collection_id}/fields/{field_path}` + Field string `protobuf:"bytes,3,opt,name=field,proto3" json:"field,omitempty"` + // A list of [IndexConfigDelta][google.firestore.admin.v1.FieldOperationMetadata.IndexConfigDelta], which describe the intent of this + // operation. + IndexConfigDeltas []*FieldOperationMetadata_IndexConfigDelta `protobuf:"bytes,4,rep,name=index_config_deltas,json=indexConfigDeltas,proto3" json:"index_config_deltas,omitempty"` + // The state of the operation. + State OperationState `protobuf:"varint,5,opt,name=state,proto3,enum=google.firestore.admin.v1.OperationState" json:"state,omitempty"` + // The progress, in documents, of this operation. + ProgressDocuments *Progress `protobuf:"bytes,6,opt,name=progress_documents,json=progressDocuments,proto3" json:"progress_documents,omitempty"` + // The progress, in bytes, of this operation. + ProgressBytes *Progress `protobuf:"bytes,7,opt,name=progress_bytes,json=progressBytes,proto3" json:"progress_bytes,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *FieldOperationMetadata) Reset() { *m = FieldOperationMetadata{} } +func (m *FieldOperationMetadata) String() string { return proto.CompactTextString(m) } +func (*FieldOperationMetadata) ProtoMessage() {} +func (*FieldOperationMetadata) Descriptor() ([]byte, []int) { + return fileDescriptor_operation_b0b923bddd463673, []int{1} +} +func (m *FieldOperationMetadata) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_FieldOperationMetadata.Unmarshal(m, b) +} +func (m *FieldOperationMetadata) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_FieldOperationMetadata.Marshal(b, m, deterministic) +} +func (dst *FieldOperationMetadata) XXX_Merge(src proto.Message) { + xxx_messageInfo_FieldOperationMetadata.Merge(dst, src) +} +func (m *FieldOperationMetadata) XXX_Size() int { + return xxx_messageInfo_FieldOperationMetadata.Size(m) +} +func (m *FieldOperationMetadata) XXX_DiscardUnknown() { + xxx_messageInfo_FieldOperationMetadata.DiscardUnknown(m) +} + +var xxx_messageInfo_FieldOperationMetadata proto.InternalMessageInfo + +func (m *FieldOperationMetadata) GetStartTime() *timestamp.Timestamp { + if m != nil { + return m.StartTime + } + return nil +} + +func (m *FieldOperationMetadata) GetEndTime() *timestamp.Timestamp { + if m != nil { + return m.EndTime + } + return nil +} + +func (m *FieldOperationMetadata) GetField() string { + if m != nil { + return m.Field + } + return "" +} + +func (m *FieldOperationMetadata) GetIndexConfigDeltas() []*FieldOperationMetadata_IndexConfigDelta { + if m != nil { + return m.IndexConfigDeltas + } + return nil +} + +func (m *FieldOperationMetadata) GetState() OperationState { + if m != nil { + return m.State + } + return OperationState_OPERATION_STATE_UNSPECIFIED +} + +func (m *FieldOperationMetadata) GetProgressDocuments() *Progress { + if m != nil { + return m.ProgressDocuments + } + return nil +} + +func (m *FieldOperationMetadata) GetProgressBytes() *Progress { + if m != nil { + return m.ProgressBytes + } + return nil +} + +// Information about an index configuration change. +type FieldOperationMetadata_IndexConfigDelta struct { + // Specifies how the index is changing. + ChangeType FieldOperationMetadata_IndexConfigDelta_ChangeType `protobuf:"varint,1,opt,name=change_type,json=changeType,proto3,enum=google.firestore.admin.v1.FieldOperationMetadata_IndexConfigDelta_ChangeType" json:"change_type,omitempty"` + // The index being changed. + Index *Index `protobuf:"bytes,2,opt,name=index,proto3" json:"index,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *FieldOperationMetadata_IndexConfigDelta) Reset() { + *m = FieldOperationMetadata_IndexConfigDelta{} +} +func (m *FieldOperationMetadata_IndexConfigDelta) String() string { return proto.CompactTextString(m) } +func (*FieldOperationMetadata_IndexConfigDelta) ProtoMessage() {} +func (*FieldOperationMetadata_IndexConfigDelta) Descriptor() ([]byte, []int) { + return fileDescriptor_operation_b0b923bddd463673, []int{1, 0} +} +func (m *FieldOperationMetadata_IndexConfigDelta) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_FieldOperationMetadata_IndexConfigDelta.Unmarshal(m, b) +} +func (m *FieldOperationMetadata_IndexConfigDelta) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_FieldOperationMetadata_IndexConfigDelta.Marshal(b, m, deterministic) +} +func (dst *FieldOperationMetadata_IndexConfigDelta) XXX_Merge(src proto.Message) { + xxx_messageInfo_FieldOperationMetadata_IndexConfigDelta.Merge(dst, src) +} +func (m *FieldOperationMetadata_IndexConfigDelta) XXX_Size() int { + return xxx_messageInfo_FieldOperationMetadata_IndexConfigDelta.Size(m) +} +func (m *FieldOperationMetadata_IndexConfigDelta) XXX_DiscardUnknown() { + xxx_messageInfo_FieldOperationMetadata_IndexConfigDelta.DiscardUnknown(m) +} + +var xxx_messageInfo_FieldOperationMetadata_IndexConfigDelta proto.InternalMessageInfo + +func (m *FieldOperationMetadata_IndexConfigDelta) GetChangeType() FieldOperationMetadata_IndexConfigDelta_ChangeType { + if m != nil { + return m.ChangeType + } + return FieldOperationMetadata_IndexConfigDelta_CHANGE_TYPE_UNSPECIFIED +} + +func (m *FieldOperationMetadata_IndexConfigDelta) GetIndex() *Index { + if m != nil { + return m.Index + } + return nil +} + +// Metadata for [google.longrunning.Operation][google.longrunning.Operation] results from +// [FirestoreAdmin.ExportDocuments][google.firestore.admin.v1.FirestoreAdmin.ExportDocuments]. +type ExportDocumentsMetadata struct { + // The time this operation started. + StartTime *timestamp.Timestamp `protobuf:"bytes,1,opt,name=start_time,json=startTime,proto3" json:"start_time,omitempty"` + // The time this operation completed. Will be unset if operation still in + // progress. + EndTime *timestamp.Timestamp `protobuf:"bytes,2,opt,name=end_time,json=endTime,proto3" json:"end_time,omitempty"` + // The state of the export operation. + OperationState OperationState `protobuf:"varint,3,opt,name=operation_state,json=operationState,proto3,enum=google.firestore.admin.v1.OperationState" json:"operation_state,omitempty"` + // The progress, in documents, of this operation. + ProgressDocuments *Progress `protobuf:"bytes,4,opt,name=progress_documents,json=progressDocuments,proto3" json:"progress_documents,omitempty"` + // The progress, in bytes, of this operation. + ProgressBytes *Progress `protobuf:"bytes,5,opt,name=progress_bytes,json=progressBytes,proto3" json:"progress_bytes,omitempty"` + // Which collection ids are being exported. + CollectionIds []string `protobuf:"bytes,6,rep,name=collection_ids,json=collectionIds,proto3" json:"collection_ids,omitempty"` + // Where the entities are being exported to. + OutputUriPrefix string `protobuf:"bytes,7,opt,name=output_uri_prefix,json=outputUriPrefix,proto3" json:"output_uri_prefix,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ExportDocumentsMetadata) Reset() { *m = ExportDocumentsMetadata{} } +func (m *ExportDocumentsMetadata) String() string { return proto.CompactTextString(m) } +func (*ExportDocumentsMetadata) ProtoMessage() {} +func (*ExportDocumentsMetadata) Descriptor() ([]byte, []int) { + return fileDescriptor_operation_b0b923bddd463673, []int{2} +} +func (m *ExportDocumentsMetadata) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ExportDocumentsMetadata.Unmarshal(m, b) +} +func (m *ExportDocumentsMetadata) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ExportDocumentsMetadata.Marshal(b, m, deterministic) +} +func (dst *ExportDocumentsMetadata) XXX_Merge(src proto.Message) { + xxx_messageInfo_ExportDocumentsMetadata.Merge(dst, src) +} +func (m *ExportDocumentsMetadata) XXX_Size() int { + return xxx_messageInfo_ExportDocumentsMetadata.Size(m) +} +func (m *ExportDocumentsMetadata) XXX_DiscardUnknown() { + xxx_messageInfo_ExportDocumentsMetadata.DiscardUnknown(m) +} + +var xxx_messageInfo_ExportDocumentsMetadata proto.InternalMessageInfo + +func (m *ExportDocumentsMetadata) GetStartTime() *timestamp.Timestamp { + if m != nil { + return m.StartTime + } + return nil +} + +func (m *ExportDocumentsMetadata) GetEndTime() *timestamp.Timestamp { + if m != nil { + return m.EndTime + } + return nil +} + +func (m *ExportDocumentsMetadata) GetOperationState() OperationState { + if m != nil { + return m.OperationState + } + return OperationState_OPERATION_STATE_UNSPECIFIED +} + +func (m *ExportDocumentsMetadata) GetProgressDocuments() *Progress { + if m != nil { + return m.ProgressDocuments + } + return nil +} + +func (m *ExportDocumentsMetadata) GetProgressBytes() *Progress { + if m != nil { + return m.ProgressBytes + } + return nil +} + +func (m *ExportDocumentsMetadata) GetCollectionIds() []string { + if m != nil { + return m.CollectionIds + } + return nil +} + +func (m *ExportDocumentsMetadata) GetOutputUriPrefix() string { + if m != nil { + return m.OutputUriPrefix + } + return "" +} + +// Metadata for [google.longrunning.Operation][google.longrunning.Operation] results from +// [FirestoreAdmin.ImportDocuments][google.firestore.admin.v1.FirestoreAdmin.ImportDocuments]. +type ImportDocumentsMetadata struct { + // The time this operation started. + StartTime *timestamp.Timestamp `protobuf:"bytes,1,opt,name=start_time,json=startTime,proto3" json:"start_time,omitempty"` + // The time this operation completed. Will be unset if operation still in + // progress. + EndTime *timestamp.Timestamp `protobuf:"bytes,2,opt,name=end_time,json=endTime,proto3" json:"end_time,omitempty"` + // The state of the import operation. + OperationState OperationState `protobuf:"varint,3,opt,name=operation_state,json=operationState,proto3,enum=google.firestore.admin.v1.OperationState" json:"operation_state,omitempty"` + // The progress, in documents, of this operation. + ProgressDocuments *Progress `protobuf:"bytes,4,opt,name=progress_documents,json=progressDocuments,proto3" json:"progress_documents,omitempty"` + // The progress, in bytes, of this operation. + ProgressBytes *Progress `protobuf:"bytes,5,opt,name=progress_bytes,json=progressBytes,proto3" json:"progress_bytes,omitempty"` + // Which collection ids are being imported. + CollectionIds []string `protobuf:"bytes,6,rep,name=collection_ids,json=collectionIds,proto3" json:"collection_ids,omitempty"` + // The location of the documents being imported. + InputUriPrefix string `protobuf:"bytes,7,opt,name=input_uri_prefix,json=inputUriPrefix,proto3" json:"input_uri_prefix,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ImportDocumentsMetadata) Reset() { *m = ImportDocumentsMetadata{} } +func (m *ImportDocumentsMetadata) String() string { return proto.CompactTextString(m) } +func (*ImportDocumentsMetadata) ProtoMessage() {} +func (*ImportDocumentsMetadata) Descriptor() ([]byte, []int) { + return fileDescriptor_operation_b0b923bddd463673, []int{3} +} +func (m *ImportDocumentsMetadata) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ImportDocumentsMetadata.Unmarshal(m, b) +} +func (m *ImportDocumentsMetadata) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ImportDocumentsMetadata.Marshal(b, m, deterministic) +} +func (dst *ImportDocumentsMetadata) XXX_Merge(src proto.Message) { + xxx_messageInfo_ImportDocumentsMetadata.Merge(dst, src) +} +func (m *ImportDocumentsMetadata) XXX_Size() int { + return xxx_messageInfo_ImportDocumentsMetadata.Size(m) +} +func (m *ImportDocumentsMetadata) XXX_DiscardUnknown() { + xxx_messageInfo_ImportDocumentsMetadata.DiscardUnknown(m) +} + +var xxx_messageInfo_ImportDocumentsMetadata proto.InternalMessageInfo + +func (m *ImportDocumentsMetadata) GetStartTime() *timestamp.Timestamp { + if m != nil { + return m.StartTime + } + return nil +} + +func (m *ImportDocumentsMetadata) GetEndTime() *timestamp.Timestamp { + if m != nil { + return m.EndTime + } + return nil +} + +func (m *ImportDocumentsMetadata) GetOperationState() OperationState { + if m != nil { + return m.OperationState + } + return OperationState_OPERATION_STATE_UNSPECIFIED +} + +func (m *ImportDocumentsMetadata) GetProgressDocuments() *Progress { + if m != nil { + return m.ProgressDocuments + } + return nil +} + +func (m *ImportDocumentsMetadata) GetProgressBytes() *Progress { + if m != nil { + return m.ProgressBytes + } + return nil +} + +func (m *ImportDocumentsMetadata) GetCollectionIds() []string { + if m != nil { + return m.CollectionIds + } + return nil +} + +func (m *ImportDocumentsMetadata) GetInputUriPrefix() string { + if m != nil { + return m.InputUriPrefix + } + return "" +} + +// Returned in the [google.longrunning.Operation][google.longrunning.Operation] response field. +type ExportDocumentsResponse struct { + // Location of the output files. This can be used to begin an import + // into Cloud Firestore (this project or another project) after the operation + // completes successfully. + OutputUriPrefix string `protobuf:"bytes,1,opt,name=output_uri_prefix,json=outputUriPrefix,proto3" json:"output_uri_prefix,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ExportDocumentsResponse) Reset() { *m = ExportDocumentsResponse{} } +func (m *ExportDocumentsResponse) String() string { return proto.CompactTextString(m) } +func (*ExportDocumentsResponse) ProtoMessage() {} +func (*ExportDocumentsResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_operation_b0b923bddd463673, []int{4} +} +func (m *ExportDocumentsResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ExportDocumentsResponse.Unmarshal(m, b) +} +func (m *ExportDocumentsResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ExportDocumentsResponse.Marshal(b, m, deterministic) +} +func (dst *ExportDocumentsResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_ExportDocumentsResponse.Merge(dst, src) +} +func (m *ExportDocumentsResponse) XXX_Size() int { + return xxx_messageInfo_ExportDocumentsResponse.Size(m) +} +func (m *ExportDocumentsResponse) XXX_DiscardUnknown() { + xxx_messageInfo_ExportDocumentsResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_ExportDocumentsResponse proto.InternalMessageInfo + +func (m *ExportDocumentsResponse) GetOutputUriPrefix() string { + if m != nil { + return m.OutputUriPrefix + } + return "" +} + +// Describes the progress of the operation. +// Unit of work is generic and must be interpreted based on where [Progress][google.firestore.admin.v1.Progress] +// is used. +type Progress struct { + // The amount of work estimated. + EstimatedWork int64 `protobuf:"varint,1,opt,name=estimated_work,json=estimatedWork,proto3" json:"estimated_work,omitempty"` + // The amount of work completed. + CompletedWork int64 `protobuf:"varint,2,opt,name=completed_work,json=completedWork,proto3" json:"completed_work,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Progress) Reset() { *m = Progress{} } +func (m *Progress) String() string { return proto.CompactTextString(m) } +func (*Progress) ProtoMessage() {} +func (*Progress) Descriptor() ([]byte, []int) { + return fileDescriptor_operation_b0b923bddd463673, []int{5} +} +func (m *Progress) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Progress.Unmarshal(m, b) +} +func (m *Progress) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Progress.Marshal(b, m, deterministic) +} +func (dst *Progress) XXX_Merge(src proto.Message) { + xxx_messageInfo_Progress.Merge(dst, src) +} +func (m *Progress) XXX_Size() int { + return xxx_messageInfo_Progress.Size(m) +} +func (m *Progress) XXX_DiscardUnknown() { + xxx_messageInfo_Progress.DiscardUnknown(m) +} + +var xxx_messageInfo_Progress proto.InternalMessageInfo + +func (m *Progress) GetEstimatedWork() int64 { + if m != nil { + return m.EstimatedWork + } + return 0 +} + +func (m *Progress) GetCompletedWork() int64 { + if m != nil { + return m.CompletedWork + } + return 0 +} + +func init() { + proto.RegisterType((*IndexOperationMetadata)(nil), "google.firestore.admin.v1.IndexOperationMetadata") + proto.RegisterType((*FieldOperationMetadata)(nil), "google.firestore.admin.v1.FieldOperationMetadata") + proto.RegisterType((*FieldOperationMetadata_IndexConfigDelta)(nil), "google.firestore.admin.v1.FieldOperationMetadata.IndexConfigDelta") + proto.RegisterType((*ExportDocumentsMetadata)(nil), "google.firestore.admin.v1.ExportDocumentsMetadata") + proto.RegisterType((*ImportDocumentsMetadata)(nil), "google.firestore.admin.v1.ImportDocumentsMetadata") + proto.RegisterType((*ExportDocumentsResponse)(nil), "google.firestore.admin.v1.ExportDocumentsResponse") + proto.RegisterType((*Progress)(nil), "google.firestore.admin.v1.Progress") + proto.RegisterEnum("google.firestore.admin.v1.OperationState", OperationState_name, OperationState_value) + proto.RegisterEnum("google.firestore.admin.v1.FieldOperationMetadata_IndexConfigDelta_ChangeType", FieldOperationMetadata_IndexConfigDelta_ChangeType_name, FieldOperationMetadata_IndexConfigDelta_ChangeType_value) +} + +func init() { + proto.RegisterFile("google/firestore/admin/v1/operation.proto", fileDescriptor_operation_b0b923bddd463673) +} + +var fileDescriptor_operation_b0b923bddd463673 = []byte{ + // 850 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xec, 0x56, 0x4f, 0x6f, 0xe3, 0x44, + 0x1c, 0xc5, 0x71, 0x92, 0x6e, 0x7e, 0xa5, 0x59, 0x77, 0x40, 0xdb, 0xd0, 0x05, 0x35, 0x0a, 0x5a, + 0x29, 0xbb, 0x07, 0x47, 0x2d, 0x02, 0x09, 0x21, 0x2d, 0x4a, 0x1d, 0xa7, 0x18, 0xa5, 0x89, 0xe5, + 0xa4, 0x05, 0x56, 0x95, 0x2c, 0x37, 0x9e, 0x98, 0xd1, 0xda, 0x1e, 0xcb, 0x33, 0x59, 0xda, 0x23, + 0x5f, 0x84, 0x03, 0x47, 0x3e, 0x03, 0x9f, 0x80, 0x13, 0xe2, 0xa3, 0x70, 0xe4, 0x84, 0x66, 0xfc, + 0x27, 0xda, 0x92, 0x14, 0x58, 0x2a, 0xb1, 0x87, 0xbd, 0xe5, 0x37, 0x7e, 0xef, 0xfd, 0x7e, 0x33, + 0xef, 0xcd, 0x28, 0xf0, 0x38, 0xa0, 0x34, 0x08, 0x71, 0x6f, 0x41, 0x52, 0xcc, 0x38, 0x4d, 0x71, + 0xcf, 0xf3, 0x23, 0x12, 0xf7, 0x5e, 0x1c, 0xf6, 0x68, 0x82, 0x53, 0x8f, 0x13, 0x1a, 0xeb, 0x49, + 0x4a, 0x39, 0x45, 0xef, 0x65, 0x50, 0xbd, 0x84, 0xea, 0x12, 0xaa, 0xbf, 0x38, 0xdc, 0x7f, 0xb4, + 0x59, 0x85, 0xc4, 0x3e, 0xbe, 0xca, 0x14, 0xf6, 0x0f, 0x72, 0x98, 0xac, 0x2e, 0x97, 0x8b, 0x1e, + 0x27, 0x11, 0x66, 0xdc, 0x8b, 0x92, 0x1c, 0xf0, 0x7e, 0x0e, 0xf0, 0x12, 0xd2, 0xf3, 0xe2, 0x98, + 0x72, 0xd9, 0x9f, 0x65, 0x5f, 0x3b, 0xdf, 0xab, 0xf0, 0xc0, 0x12, 0x72, 0x93, 0x62, 0xb2, 0x53, + 0xcc, 0x3d, 0xdf, 0xe3, 0x1e, 0xfa, 0x14, 0x80, 0x71, 0x2f, 0xe5, 0xae, 0x50, 0x6c, 0x29, 0x6d, + 0xa5, 0xbb, 0x7d, 0xb4, 0xaf, 0xe7, 0x03, 0x17, 0xed, 0xf4, 0x59, 0xd1, 0xce, 0x69, 0x48, 0xb4, + 0xa8, 0xd1, 0xc7, 0x70, 0x0f, 0xc7, 0x7e, 0x46, 0xac, 0xfc, 0x2d, 0x71, 0x0b, 0xc7, 0xbe, 0xa4, + 0xbd, 0x0b, 0x35, 0xb9, 0xb5, 0x96, 0xda, 0x56, 0xba, 0x0d, 0x27, 0x2b, 0xd0, 0xe7, 0x50, 0x63, + 0xdc, 0xe3, 0xb8, 0x55, 0x6d, 0x2b, 0xdd, 0xe6, 0xd1, 0x63, 0x7d, 0xe3, 0x99, 0xe9, 0xe5, 0x26, + 0xa6, 0x82, 0xe0, 0x64, 0x3c, 0xe4, 0x00, 0x4a, 0x52, 0x1a, 0xa4, 0x98, 0x31, 0xd7, 0xa7, 0xf3, + 0x65, 0x84, 0x63, 0xce, 0x5a, 0x35, 0x39, 0xd7, 0x87, 0xb7, 0xa8, 0xd9, 0x39, 0xc9, 0xd9, 0x2d, + 0xe8, 0x83, 0x82, 0x8d, 0xbe, 0x84, 0x66, 0xa9, 0x79, 0x79, 0xcd, 0x31, 0x6b, 0xd5, 0xff, 0xb9, + 0xde, 0x4e, 0x41, 0x3d, 0x16, 0xcc, 0xce, 0xef, 0x35, 0x78, 0x30, 0x24, 0x38, 0xf4, 0x5f, 0x13, + 0x0f, 0x16, 0x62, 0x96, 0xc2, 0x03, 0x59, 0xa0, 0x14, 0xde, 0x91, 0x66, 0xb8, 0x73, 0x1a, 0x2f, + 0x48, 0xe0, 0xfa, 0x38, 0xe4, 0x1e, 0x6b, 0x55, 0xdb, 0x6a, 0x77, 0xfb, 0xe8, 0xf8, 0x96, 0x3d, + 0xaf, 0xdf, 0x97, 0x2e, 0x23, 0x67, 0x48, 0xad, 0x81, 0x90, 0x72, 0x76, 0xc9, 0x8d, 0x15, 0xb6, + 0xf2, 0xbd, 0x76, 0xa7, 0xbe, 0xd7, 0xef, 0xd8, 0xf7, 0xad, 0x57, 0xf5, 0x7d, 0xff, 0x0f, 0x05, + 0xb4, 0x9b, 0x07, 0x81, 0x62, 0xd8, 0x9e, 0x7f, 0xeb, 0xc5, 0x01, 0x76, 0xf9, 0x75, 0x92, 0x59, + 0xde, 0x3c, 0x3a, 0xfd, 0xef, 0x27, 0xac, 0x1b, 0x52, 0x75, 0x76, 0x9d, 0x60, 0x07, 0xe6, 0xe5, + 0x6f, 0xf4, 0x49, 0x71, 0xe7, 0xb2, 0x8c, 0xb4, 0x6f, 0xe9, 0x24, 0x25, 0xf3, 0x5b, 0xd9, 0x79, + 0x0a, 0xb0, 0x52, 0x44, 0x0f, 0x61, 0xcf, 0xf8, 0xa2, 0x3f, 0x3e, 0x31, 0xdd, 0xd9, 0x37, 0xb6, + 0xe9, 0x9e, 0x8d, 0xa7, 0xb6, 0x69, 0x58, 0x43, 0xcb, 0x1c, 0x68, 0x6f, 0xa1, 0x2d, 0x50, 0xfb, + 0x83, 0x81, 0xa6, 0x20, 0x80, 0xba, 0x63, 0x9e, 0x4e, 0xce, 0x4d, 0xad, 0xd2, 0xf9, 0x4d, 0x85, + 0x3d, 0xf3, 0x2a, 0xa1, 0x29, 0x2f, 0x0f, 0xf7, 0x7f, 0x4c, 0xbd, 0x03, 0xf7, 0xcb, 0xa7, 0xd9, + 0xcd, 0x52, 0xa7, 0xfe, 0xdb, 0xd4, 0x35, 0xe9, 0x4b, 0xf5, 0x86, 0xf8, 0x55, 0xef, 0x38, 0x7e, + 0xb5, 0x57, 0x8d, 0x1f, 0x7a, 0x04, 0xcd, 0x39, 0x0d, 0x43, 0x3c, 0x97, 0x9b, 0x26, 0xbe, 0xb8, + 0x1a, 0x6a, 0xb7, 0xe1, 0xec, 0xac, 0x56, 0x2d, 0x9f, 0xa1, 0x27, 0xb0, 0x4b, 0x97, 0x3c, 0x59, + 0x72, 0x77, 0x99, 0x12, 0x37, 0x49, 0xf1, 0x82, 0x5c, 0xc9, 0xd0, 0x37, 0x9c, 0xfb, 0xd9, 0x87, + 0xb3, 0x94, 0xd8, 0x72, 0xb9, 0xf3, 0xab, 0x0a, 0x7b, 0x56, 0xf4, 0xc6, 0xd4, 0xd7, 0xde, 0xd4, + 0x2e, 0x68, 0x24, 0x5e, 0xeb, 0x69, 0x53, 0xae, 0xaf, 0x2c, 0x35, 0xff, 0x72, 0x4d, 0x1d, 0xcc, + 0x12, 0x1a, 0x33, 0xbc, 0x3e, 0x19, 0xca, 0xfa, 0x64, 0x7c, 0x0d, 0xf7, 0x8a, 0x91, 0xc5, 0x8c, + 0x98, 0x71, 0x12, 0x79, 0x1c, 0xfb, 0xee, 0x77, 0x34, 0x7d, 0x2e, 0x49, 0xaa, 0xb3, 0x53, 0xae, + 0x7e, 0x45, 0xd3, 0xe7, 0xd9, 0x56, 0xa2, 0x24, 0xc4, 0x25, 0xac, 0x92, 0xc1, 0xca, 0x55, 0x01, + 0x7b, 0xf2, 0x83, 0x02, 0xcd, 0x97, 0x4d, 0x43, 0x07, 0xf0, 0x70, 0x62, 0x9b, 0x4e, 0x7f, 0x66, + 0x4d, 0xc6, 0xee, 0x74, 0xd6, 0x9f, 0xdd, 0x7c, 0x91, 0x34, 0x78, 0xdb, 0x1a, 0x5b, 0x33, 0xab, + 0x3f, 0xb2, 0x9e, 0x59, 0xe3, 0x13, 0x4d, 0x41, 0x4d, 0x00, 0xdb, 0x99, 0x18, 0xe6, 0x74, 0x2a, + 0xea, 0x8a, 0xa8, 0x8d, 0xfe, 0xd8, 0x30, 0x47, 0x23, 0x51, 0xab, 0xa2, 0x1e, 0x5a, 0xe3, 0x02, + 0x5f, 0x15, 0xf5, 0xf4, 0xcc, 0x10, 0xf8, 0xe1, 0xd9, 0x48, 0xab, 0x89, 0xa7, 0x6d, 0xd8, 0xb7, + 0x46, 0xe6, 0x40, 0xab, 0xa3, 0x1d, 0x68, 0xe4, 0x5c, 0x73, 0xa0, 0x6d, 0x1d, 0xff, 0xac, 0xc0, + 0x07, 0x73, 0x1a, 0x6d, 0x36, 0xf3, 0x78, 0x35, 0xbf, 0x2d, 0xe2, 0x6c, 0x2b, 0xcf, 0x9e, 0xe6, + 0xe0, 0x80, 0x86, 0x5e, 0x1c, 0xe8, 0x34, 0x0d, 0x7a, 0x01, 0x8e, 0x65, 0xd8, 0x7b, 0xd9, 0x27, + 0x2f, 0x21, 0x6c, 0xcd, 0x7f, 0xc3, 0xcf, 0xe4, 0x8f, 0x1f, 0x2b, 0xd5, 0x13, 0x63, 0x38, 0xfd, + 0xa9, 0x72, 0x70, 0x92, 0xe9, 0x18, 0x21, 0x5d, 0xfa, 0xfa, 0xb0, 0x6c, 0xdd, 0x97, 0xad, 0xcf, + 0x0f, 0x7f, 0x29, 0x10, 0x17, 0x12, 0x71, 0x51, 0x22, 0x2e, 0x24, 0xe2, 0xe2, 0xfc, 0xf0, 0xb2, + 0x2e, 0xbb, 0x7e, 0xf4, 0x67, 0x00, 0x00, 0x00, 0xff, 0xff, 0x63, 0x18, 0x66, 0xe2, 0xd6, 0x0a, + 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/firestore/admin/v1beta1/firestore_admin.pb.go b/vendor/google.golang.org/genproto/googleapis/firestore/admin/v1beta1/firestore_admin.pb.go new file mode 100644 index 0000000..1a00742 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/firestore/admin/v1beta1/firestore_admin.pb.go @@ -0,0 +1,1252 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/firestore/admin/v1beta1/firestore_admin.proto + +package admin // import "google.golang.org/genproto/googleapis/firestore/admin/v1beta1" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import empty "github.com/golang/protobuf/ptypes/empty" +import timestamp "github.com/golang/protobuf/ptypes/timestamp" +import _ "google.golang.org/genproto/googleapis/api/annotations" +import longrunning "google.golang.org/genproto/googleapis/longrunning" + +import ( + context "golang.org/x/net/context" + grpc "google.golang.org/grpc" +) + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// The various possible states for an ongoing Operation. +type OperationState int32 + +const ( + // Unspecified. + OperationState_STATE_UNSPECIFIED OperationState = 0 + // Request is being prepared for processing. + OperationState_INITIALIZING OperationState = 1 + // Request is actively being processed. + OperationState_PROCESSING OperationState = 2 + // Request is in the process of being cancelled after user called + // google.longrunning.Operations.CancelOperation on the operation. + OperationState_CANCELLING OperationState = 3 + // Request has been processed and is in its finalization stage. + OperationState_FINALIZING OperationState = 4 + // Request has completed successfully. + OperationState_SUCCESSFUL OperationState = 5 + // Request has finished being processed, but encountered an error. + OperationState_FAILED OperationState = 6 + // Request has finished being cancelled after user called + // google.longrunning.Operations.CancelOperation. + OperationState_CANCELLED OperationState = 7 +) + +var OperationState_name = map[int32]string{ + 0: "STATE_UNSPECIFIED", + 1: "INITIALIZING", + 2: "PROCESSING", + 3: "CANCELLING", + 4: "FINALIZING", + 5: "SUCCESSFUL", + 6: "FAILED", + 7: "CANCELLED", +} +var OperationState_value = map[string]int32{ + "STATE_UNSPECIFIED": 0, + "INITIALIZING": 1, + "PROCESSING": 2, + "CANCELLING": 3, + "FINALIZING": 4, + "SUCCESSFUL": 5, + "FAILED": 6, + "CANCELLED": 7, +} + +func (x OperationState) String() string { + return proto.EnumName(OperationState_name, int32(x)) +} +func (OperationState) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_firestore_admin_aada04317d1f4b2a, []int{0} +} + +// The type of index operation. +type IndexOperationMetadata_OperationType int32 + +const ( + // Unspecified. Never set by server. + IndexOperationMetadata_OPERATION_TYPE_UNSPECIFIED IndexOperationMetadata_OperationType = 0 + // The operation is creating the index. Initiated by a `CreateIndex` call. + IndexOperationMetadata_CREATING_INDEX IndexOperationMetadata_OperationType = 1 +) + +var IndexOperationMetadata_OperationType_name = map[int32]string{ + 0: "OPERATION_TYPE_UNSPECIFIED", + 1: "CREATING_INDEX", +} +var IndexOperationMetadata_OperationType_value = map[string]int32{ + "OPERATION_TYPE_UNSPECIFIED": 0, + "CREATING_INDEX": 1, +} + +func (x IndexOperationMetadata_OperationType) String() string { + return proto.EnumName(IndexOperationMetadata_OperationType_name, int32(x)) +} +func (IndexOperationMetadata_OperationType) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_firestore_admin_aada04317d1f4b2a, []int{0, 0} +} + +// Metadata for index operations. This metadata populates +// the metadata field of [google.longrunning.Operation][google.longrunning.Operation]. +type IndexOperationMetadata struct { + // The time that work began on the operation. + StartTime *timestamp.Timestamp `protobuf:"bytes,1,opt,name=start_time,json=startTime,proto3" json:"start_time,omitempty"` + // The time the operation ended, either successfully or otherwise. Unset if + // the operation is still active. + EndTime *timestamp.Timestamp `protobuf:"bytes,2,opt,name=end_time,json=endTime,proto3" json:"end_time,omitempty"` + // The index resource that this operation is acting on. For example: + // `projects/{project_id}/databases/{database_id}/indexes/{index_id}` + Index string `protobuf:"bytes,3,opt,name=index,proto3" json:"index,omitempty"` + // The type of index operation. + OperationType IndexOperationMetadata_OperationType `protobuf:"varint,4,opt,name=operation_type,json=operationType,proto3,enum=google.firestore.admin.v1beta1.IndexOperationMetadata_OperationType" json:"operation_type,omitempty"` + // True if the [google.longrunning.Operation] was cancelled. If the + // cancellation is in progress, cancelled will be true but + // [google.longrunning.Operation.done][google.longrunning.Operation.done] will be false. + Cancelled bool `protobuf:"varint,5,opt,name=cancelled,proto3" json:"cancelled,omitempty"` + // Progress of the existing operation, measured in number of documents. + DocumentProgress *Progress `protobuf:"bytes,6,opt,name=document_progress,json=documentProgress,proto3" json:"document_progress,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *IndexOperationMetadata) Reset() { *m = IndexOperationMetadata{} } +func (m *IndexOperationMetadata) String() string { return proto.CompactTextString(m) } +func (*IndexOperationMetadata) ProtoMessage() {} +func (*IndexOperationMetadata) Descriptor() ([]byte, []int) { + return fileDescriptor_firestore_admin_aada04317d1f4b2a, []int{0} +} +func (m *IndexOperationMetadata) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_IndexOperationMetadata.Unmarshal(m, b) +} +func (m *IndexOperationMetadata) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_IndexOperationMetadata.Marshal(b, m, deterministic) +} +func (dst *IndexOperationMetadata) XXX_Merge(src proto.Message) { + xxx_messageInfo_IndexOperationMetadata.Merge(dst, src) +} +func (m *IndexOperationMetadata) XXX_Size() int { + return xxx_messageInfo_IndexOperationMetadata.Size(m) +} +func (m *IndexOperationMetadata) XXX_DiscardUnknown() { + xxx_messageInfo_IndexOperationMetadata.DiscardUnknown(m) +} + +var xxx_messageInfo_IndexOperationMetadata proto.InternalMessageInfo + +func (m *IndexOperationMetadata) GetStartTime() *timestamp.Timestamp { + if m != nil { + return m.StartTime + } + return nil +} + +func (m *IndexOperationMetadata) GetEndTime() *timestamp.Timestamp { + if m != nil { + return m.EndTime + } + return nil +} + +func (m *IndexOperationMetadata) GetIndex() string { + if m != nil { + return m.Index + } + return "" +} + +func (m *IndexOperationMetadata) GetOperationType() IndexOperationMetadata_OperationType { + if m != nil { + return m.OperationType + } + return IndexOperationMetadata_OPERATION_TYPE_UNSPECIFIED +} + +func (m *IndexOperationMetadata) GetCancelled() bool { + if m != nil { + return m.Cancelled + } + return false +} + +func (m *IndexOperationMetadata) GetDocumentProgress() *Progress { + if m != nil { + return m.DocumentProgress + } + return nil +} + +// Measures the progress of a particular metric. +type Progress struct { + // An estimate of how much work has been completed. Note that this may be + // greater than `work_estimated`. + WorkCompleted int64 `protobuf:"varint,1,opt,name=work_completed,json=workCompleted,proto3" json:"work_completed,omitempty"` + // An estimate of how much work needs to be performed. Zero if the + // work estimate is unavailable. May change as work progresses. + WorkEstimated int64 `protobuf:"varint,2,opt,name=work_estimated,json=workEstimated,proto3" json:"work_estimated,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Progress) Reset() { *m = Progress{} } +func (m *Progress) String() string { return proto.CompactTextString(m) } +func (*Progress) ProtoMessage() {} +func (*Progress) Descriptor() ([]byte, []int) { + return fileDescriptor_firestore_admin_aada04317d1f4b2a, []int{1} +} +func (m *Progress) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Progress.Unmarshal(m, b) +} +func (m *Progress) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Progress.Marshal(b, m, deterministic) +} +func (dst *Progress) XXX_Merge(src proto.Message) { + xxx_messageInfo_Progress.Merge(dst, src) +} +func (m *Progress) XXX_Size() int { + return xxx_messageInfo_Progress.Size(m) +} +func (m *Progress) XXX_DiscardUnknown() { + xxx_messageInfo_Progress.DiscardUnknown(m) +} + +var xxx_messageInfo_Progress proto.InternalMessageInfo + +func (m *Progress) GetWorkCompleted() int64 { + if m != nil { + return m.WorkCompleted + } + return 0 +} + +func (m *Progress) GetWorkEstimated() int64 { + if m != nil { + return m.WorkEstimated + } + return 0 +} + +// The request for [FirestoreAdmin.CreateIndex][google.firestore.admin.v1beta1.FirestoreAdmin.CreateIndex]. +type CreateIndexRequest struct { + // The name of the database this index will apply to. For example: + // `projects/{project_id}/databases/{database_id}` + Parent string `protobuf:"bytes,1,opt,name=parent,proto3" json:"parent,omitempty"` + // The index to create. The name and state fields are output only and will be + // ignored. Certain single field indexes cannot be created or deleted. + Index *Index `protobuf:"bytes,2,opt,name=index,proto3" json:"index,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *CreateIndexRequest) Reset() { *m = CreateIndexRequest{} } +func (m *CreateIndexRequest) String() string { return proto.CompactTextString(m) } +func (*CreateIndexRequest) ProtoMessage() {} +func (*CreateIndexRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_firestore_admin_aada04317d1f4b2a, []int{2} +} +func (m *CreateIndexRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_CreateIndexRequest.Unmarshal(m, b) +} +func (m *CreateIndexRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_CreateIndexRequest.Marshal(b, m, deterministic) +} +func (dst *CreateIndexRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_CreateIndexRequest.Merge(dst, src) +} +func (m *CreateIndexRequest) XXX_Size() int { + return xxx_messageInfo_CreateIndexRequest.Size(m) +} +func (m *CreateIndexRequest) XXX_DiscardUnknown() { + xxx_messageInfo_CreateIndexRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_CreateIndexRequest proto.InternalMessageInfo + +func (m *CreateIndexRequest) GetParent() string { + if m != nil { + return m.Parent + } + return "" +} + +func (m *CreateIndexRequest) GetIndex() *Index { + if m != nil { + return m.Index + } + return nil +} + +// The request for [FirestoreAdmin.GetIndex][google.firestore.admin.v1beta1.FirestoreAdmin.GetIndex]. +type GetIndexRequest struct { + // The name of the index. For example: + // `projects/{project_id}/databases/{database_id}/indexes/{index_id}` + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GetIndexRequest) Reset() { *m = GetIndexRequest{} } +func (m *GetIndexRequest) String() string { return proto.CompactTextString(m) } +func (*GetIndexRequest) ProtoMessage() {} +func (*GetIndexRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_firestore_admin_aada04317d1f4b2a, []int{3} +} +func (m *GetIndexRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GetIndexRequest.Unmarshal(m, b) +} +func (m *GetIndexRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GetIndexRequest.Marshal(b, m, deterministic) +} +func (dst *GetIndexRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_GetIndexRequest.Merge(dst, src) +} +func (m *GetIndexRequest) XXX_Size() int { + return xxx_messageInfo_GetIndexRequest.Size(m) +} +func (m *GetIndexRequest) XXX_DiscardUnknown() { + xxx_messageInfo_GetIndexRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_GetIndexRequest proto.InternalMessageInfo + +func (m *GetIndexRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +// The request for [FirestoreAdmin.ListIndexes][google.firestore.admin.v1beta1.FirestoreAdmin.ListIndexes]. +type ListIndexesRequest struct { + // The database name. For example: + // `projects/{project_id}/databases/{database_id}` + Parent string `protobuf:"bytes,1,opt,name=parent,proto3" json:"parent,omitempty"` + Filter string `protobuf:"bytes,2,opt,name=filter,proto3" json:"filter,omitempty"` + // The standard List page size. + PageSize int32 `protobuf:"varint,3,opt,name=page_size,json=pageSize,proto3" json:"page_size,omitempty"` + // The standard List page token. + PageToken string `protobuf:"bytes,4,opt,name=page_token,json=pageToken,proto3" json:"page_token,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ListIndexesRequest) Reset() { *m = ListIndexesRequest{} } +func (m *ListIndexesRequest) String() string { return proto.CompactTextString(m) } +func (*ListIndexesRequest) ProtoMessage() {} +func (*ListIndexesRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_firestore_admin_aada04317d1f4b2a, []int{4} +} +func (m *ListIndexesRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ListIndexesRequest.Unmarshal(m, b) +} +func (m *ListIndexesRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ListIndexesRequest.Marshal(b, m, deterministic) +} +func (dst *ListIndexesRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_ListIndexesRequest.Merge(dst, src) +} +func (m *ListIndexesRequest) XXX_Size() int { + return xxx_messageInfo_ListIndexesRequest.Size(m) +} +func (m *ListIndexesRequest) XXX_DiscardUnknown() { + xxx_messageInfo_ListIndexesRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_ListIndexesRequest proto.InternalMessageInfo + +func (m *ListIndexesRequest) GetParent() string { + if m != nil { + return m.Parent + } + return "" +} + +func (m *ListIndexesRequest) GetFilter() string { + if m != nil { + return m.Filter + } + return "" +} + +func (m *ListIndexesRequest) GetPageSize() int32 { + if m != nil { + return m.PageSize + } + return 0 +} + +func (m *ListIndexesRequest) GetPageToken() string { + if m != nil { + return m.PageToken + } + return "" +} + +// The request for [FirestoreAdmin.DeleteIndex][google.firestore.admin.v1beta1.FirestoreAdmin.DeleteIndex]. +type DeleteIndexRequest struct { + // The index name. For example: + // `projects/{project_id}/databases/{database_id}/indexes/{index_id}` + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *DeleteIndexRequest) Reset() { *m = DeleteIndexRequest{} } +func (m *DeleteIndexRequest) String() string { return proto.CompactTextString(m) } +func (*DeleteIndexRequest) ProtoMessage() {} +func (*DeleteIndexRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_firestore_admin_aada04317d1f4b2a, []int{5} +} +func (m *DeleteIndexRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_DeleteIndexRequest.Unmarshal(m, b) +} +func (m *DeleteIndexRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_DeleteIndexRequest.Marshal(b, m, deterministic) +} +func (dst *DeleteIndexRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_DeleteIndexRequest.Merge(dst, src) +} +func (m *DeleteIndexRequest) XXX_Size() int { + return xxx_messageInfo_DeleteIndexRequest.Size(m) +} +func (m *DeleteIndexRequest) XXX_DiscardUnknown() { + xxx_messageInfo_DeleteIndexRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_DeleteIndexRequest proto.InternalMessageInfo + +func (m *DeleteIndexRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +// The response for [FirestoreAdmin.ListIndexes][google.firestore.admin.v1beta1.FirestoreAdmin.ListIndexes]. +type ListIndexesResponse struct { + // The indexes. + Indexes []*Index `protobuf:"bytes,1,rep,name=indexes,proto3" json:"indexes,omitempty"` + // The standard List next-page token. + NextPageToken string `protobuf:"bytes,2,opt,name=next_page_token,json=nextPageToken,proto3" json:"next_page_token,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ListIndexesResponse) Reset() { *m = ListIndexesResponse{} } +func (m *ListIndexesResponse) String() string { return proto.CompactTextString(m) } +func (*ListIndexesResponse) ProtoMessage() {} +func (*ListIndexesResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_firestore_admin_aada04317d1f4b2a, []int{6} +} +func (m *ListIndexesResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ListIndexesResponse.Unmarshal(m, b) +} +func (m *ListIndexesResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ListIndexesResponse.Marshal(b, m, deterministic) +} +func (dst *ListIndexesResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_ListIndexesResponse.Merge(dst, src) +} +func (m *ListIndexesResponse) XXX_Size() int { + return xxx_messageInfo_ListIndexesResponse.Size(m) +} +func (m *ListIndexesResponse) XXX_DiscardUnknown() { + xxx_messageInfo_ListIndexesResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_ListIndexesResponse proto.InternalMessageInfo + +func (m *ListIndexesResponse) GetIndexes() []*Index { + if m != nil { + return m.Indexes + } + return nil +} + +func (m *ListIndexesResponse) GetNextPageToken() string { + if m != nil { + return m.NextPageToken + } + return "" +} + +// The request for [FirestoreAdmin.ExportDocuments][google.firestore.admin.v1beta1.FirestoreAdmin.ExportDocuments]. +type ExportDocumentsRequest struct { + // Database to export. Should be of the form: + // `projects/{project_id}/databases/{database_id}`. + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // Which collection ids to export. Unspecified means all collections. + CollectionIds []string `protobuf:"bytes,3,rep,name=collection_ids,json=collectionIds,proto3" json:"collection_ids,omitempty"` + // The output URI. Currently only supports Google Cloud Storage URIs of the + // form: `gs://BUCKET_NAME[/NAMESPACE_PATH]`, where `BUCKET_NAME` is the name + // of the Google Cloud Storage bucket and `NAMESPACE_PATH` is an optional + // Google Cloud Storage namespace path. When + // choosing a name, be sure to consider Google Cloud Storage naming + // guidelines: https://cloud.google.com/storage/docs/naming. + // If the URI is a bucket (without a namespace path), a prefix will be + // generated based on the start time. + OutputUriPrefix string `protobuf:"bytes,4,opt,name=output_uri_prefix,json=outputUriPrefix,proto3" json:"output_uri_prefix,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ExportDocumentsRequest) Reset() { *m = ExportDocumentsRequest{} } +func (m *ExportDocumentsRequest) String() string { return proto.CompactTextString(m) } +func (*ExportDocumentsRequest) ProtoMessage() {} +func (*ExportDocumentsRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_firestore_admin_aada04317d1f4b2a, []int{7} +} +func (m *ExportDocumentsRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ExportDocumentsRequest.Unmarshal(m, b) +} +func (m *ExportDocumentsRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ExportDocumentsRequest.Marshal(b, m, deterministic) +} +func (dst *ExportDocumentsRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_ExportDocumentsRequest.Merge(dst, src) +} +func (m *ExportDocumentsRequest) XXX_Size() int { + return xxx_messageInfo_ExportDocumentsRequest.Size(m) +} +func (m *ExportDocumentsRequest) XXX_DiscardUnknown() { + xxx_messageInfo_ExportDocumentsRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_ExportDocumentsRequest proto.InternalMessageInfo + +func (m *ExportDocumentsRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *ExportDocumentsRequest) GetCollectionIds() []string { + if m != nil { + return m.CollectionIds + } + return nil +} + +func (m *ExportDocumentsRequest) GetOutputUriPrefix() string { + if m != nil { + return m.OutputUriPrefix + } + return "" +} + +// The request for [FirestoreAdmin.ImportDocuments][google.firestore.admin.v1beta1.FirestoreAdmin.ImportDocuments]. +type ImportDocumentsRequest struct { + // Database to import into. Should be of the form: + // `projects/{project_id}/databases/{database_id}`. + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // Which collection ids to import. Unspecified means all collections included + // in the import. + CollectionIds []string `protobuf:"bytes,3,rep,name=collection_ids,json=collectionIds,proto3" json:"collection_ids,omitempty"` + // Location of the exported files. + // This must match the output_uri_prefix of an ExportDocumentsResponse from + // an export that has completed successfully. + // See: + // [google.firestore.admin.v1beta1.ExportDocumentsResponse.output_uri_prefix][google.firestore.admin.v1beta1.ExportDocumentsResponse.output_uri_prefix]. + InputUriPrefix string `protobuf:"bytes,4,opt,name=input_uri_prefix,json=inputUriPrefix,proto3" json:"input_uri_prefix,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ImportDocumentsRequest) Reset() { *m = ImportDocumentsRequest{} } +func (m *ImportDocumentsRequest) String() string { return proto.CompactTextString(m) } +func (*ImportDocumentsRequest) ProtoMessage() {} +func (*ImportDocumentsRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_firestore_admin_aada04317d1f4b2a, []int{8} +} +func (m *ImportDocumentsRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ImportDocumentsRequest.Unmarshal(m, b) +} +func (m *ImportDocumentsRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ImportDocumentsRequest.Marshal(b, m, deterministic) +} +func (dst *ImportDocumentsRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_ImportDocumentsRequest.Merge(dst, src) +} +func (m *ImportDocumentsRequest) XXX_Size() int { + return xxx_messageInfo_ImportDocumentsRequest.Size(m) +} +func (m *ImportDocumentsRequest) XXX_DiscardUnknown() { + xxx_messageInfo_ImportDocumentsRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_ImportDocumentsRequest proto.InternalMessageInfo + +func (m *ImportDocumentsRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *ImportDocumentsRequest) GetCollectionIds() []string { + if m != nil { + return m.CollectionIds + } + return nil +} + +func (m *ImportDocumentsRequest) GetInputUriPrefix() string { + if m != nil { + return m.InputUriPrefix + } + return "" +} + +// Returned in the [google.longrunning.Operation][google.longrunning.Operation] response field. +type ExportDocumentsResponse struct { + // Location of the output files. This can be used to begin an import + // into Cloud Firestore (this project or another project) after the operation + // completes successfully. + OutputUriPrefix string `protobuf:"bytes,1,opt,name=output_uri_prefix,json=outputUriPrefix,proto3" json:"output_uri_prefix,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ExportDocumentsResponse) Reset() { *m = ExportDocumentsResponse{} } +func (m *ExportDocumentsResponse) String() string { return proto.CompactTextString(m) } +func (*ExportDocumentsResponse) ProtoMessage() {} +func (*ExportDocumentsResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_firestore_admin_aada04317d1f4b2a, []int{9} +} +func (m *ExportDocumentsResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ExportDocumentsResponse.Unmarshal(m, b) +} +func (m *ExportDocumentsResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ExportDocumentsResponse.Marshal(b, m, deterministic) +} +func (dst *ExportDocumentsResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_ExportDocumentsResponse.Merge(dst, src) +} +func (m *ExportDocumentsResponse) XXX_Size() int { + return xxx_messageInfo_ExportDocumentsResponse.Size(m) +} +func (m *ExportDocumentsResponse) XXX_DiscardUnknown() { + xxx_messageInfo_ExportDocumentsResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_ExportDocumentsResponse proto.InternalMessageInfo + +func (m *ExportDocumentsResponse) GetOutputUriPrefix() string { + if m != nil { + return m.OutputUriPrefix + } + return "" +} + +// Metadata for ExportDocuments operations. +type ExportDocumentsMetadata struct { + // The time that work began on the operation. + StartTime *timestamp.Timestamp `protobuf:"bytes,1,opt,name=start_time,json=startTime,proto3" json:"start_time,omitempty"` + // The time the operation ended, either successfully or otherwise. Unset if + // the operation is still active. + EndTime *timestamp.Timestamp `protobuf:"bytes,2,opt,name=end_time,json=endTime,proto3" json:"end_time,omitempty"` + // The state of the export operation. + OperationState OperationState `protobuf:"varint,3,opt,name=operation_state,json=operationState,proto3,enum=google.firestore.admin.v1beta1.OperationState" json:"operation_state,omitempty"` + // An estimate of the number of documents processed. + ProgressDocuments *Progress `protobuf:"bytes,4,opt,name=progress_documents,json=progressDocuments,proto3" json:"progress_documents,omitempty"` + // An estimate of the number of bytes processed. + ProgressBytes *Progress `protobuf:"bytes,5,opt,name=progress_bytes,json=progressBytes,proto3" json:"progress_bytes,omitempty"` + // Which collection ids are being exported. + CollectionIds []string `protobuf:"bytes,6,rep,name=collection_ids,json=collectionIds,proto3" json:"collection_ids,omitempty"` + // Where the entities are being exported to. + OutputUriPrefix string `protobuf:"bytes,7,opt,name=output_uri_prefix,json=outputUriPrefix,proto3" json:"output_uri_prefix,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ExportDocumentsMetadata) Reset() { *m = ExportDocumentsMetadata{} } +func (m *ExportDocumentsMetadata) String() string { return proto.CompactTextString(m) } +func (*ExportDocumentsMetadata) ProtoMessage() {} +func (*ExportDocumentsMetadata) Descriptor() ([]byte, []int) { + return fileDescriptor_firestore_admin_aada04317d1f4b2a, []int{10} +} +func (m *ExportDocumentsMetadata) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ExportDocumentsMetadata.Unmarshal(m, b) +} +func (m *ExportDocumentsMetadata) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ExportDocumentsMetadata.Marshal(b, m, deterministic) +} +func (dst *ExportDocumentsMetadata) XXX_Merge(src proto.Message) { + xxx_messageInfo_ExportDocumentsMetadata.Merge(dst, src) +} +func (m *ExportDocumentsMetadata) XXX_Size() int { + return xxx_messageInfo_ExportDocumentsMetadata.Size(m) +} +func (m *ExportDocumentsMetadata) XXX_DiscardUnknown() { + xxx_messageInfo_ExportDocumentsMetadata.DiscardUnknown(m) +} + +var xxx_messageInfo_ExportDocumentsMetadata proto.InternalMessageInfo + +func (m *ExportDocumentsMetadata) GetStartTime() *timestamp.Timestamp { + if m != nil { + return m.StartTime + } + return nil +} + +func (m *ExportDocumentsMetadata) GetEndTime() *timestamp.Timestamp { + if m != nil { + return m.EndTime + } + return nil +} + +func (m *ExportDocumentsMetadata) GetOperationState() OperationState { + if m != nil { + return m.OperationState + } + return OperationState_STATE_UNSPECIFIED +} + +func (m *ExportDocumentsMetadata) GetProgressDocuments() *Progress { + if m != nil { + return m.ProgressDocuments + } + return nil +} + +func (m *ExportDocumentsMetadata) GetProgressBytes() *Progress { + if m != nil { + return m.ProgressBytes + } + return nil +} + +func (m *ExportDocumentsMetadata) GetCollectionIds() []string { + if m != nil { + return m.CollectionIds + } + return nil +} + +func (m *ExportDocumentsMetadata) GetOutputUriPrefix() string { + if m != nil { + return m.OutputUriPrefix + } + return "" +} + +// Metadata for ImportDocuments operations. +type ImportDocumentsMetadata struct { + // The time that work began on the operation. + StartTime *timestamp.Timestamp `protobuf:"bytes,1,opt,name=start_time,json=startTime,proto3" json:"start_time,omitempty"` + // The time the operation ended, either successfully or otherwise. Unset if + // the operation is still active. + EndTime *timestamp.Timestamp `protobuf:"bytes,2,opt,name=end_time,json=endTime,proto3" json:"end_time,omitempty"` + // The state of the import operation. + OperationState OperationState `protobuf:"varint,3,opt,name=operation_state,json=operationState,proto3,enum=google.firestore.admin.v1beta1.OperationState" json:"operation_state,omitempty"` + // An estimate of the number of documents processed. + ProgressDocuments *Progress `protobuf:"bytes,4,opt,name=progress_documents,json=progressDocuments,proto3" json:"progress_documents,omitempty"` + // An estimate of the number of bytes processed. + ProgressBytes *Progress `protobuf:"bytes,5,opt,name=progress_bytes,json=progressBytes,proto3" json:"progress_bytes,omitempty"` + // Which collection ids are being imported. + CollectionIds []string `protobuf:"bytes,6,rep,name=collection_ids,json=collectionIds,proto3" json:"collection_ids,omitempty"` + // The location of the documents being imported. + InputUriPrefix string `protobuf:"bytes,7,opt,name=input_uri_prefix,json=inputUriPrefix,proto3" json:"input_uri_prefix,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ImportDocumentsMetadata) Reset() { *m = ImportDocumentsMetadata{} } +func (m *ImportDocumentsMetadata) String() string { return proto.CompactTextString(m) } +func (*ImportDocumentsMetadata) ProtoMessage() {} +func (*ImportDocumentsMetadata) Descriptor() ([]byte, []int) { + return fileDescriptor_firestore_admin_aada04317d1f4b2a, []int{11} +} +func (m *ImportDocumentsMetadata) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ImportDocumentsMetadata.Unmarshal(m, b) +} +func (m *ImportDocumentsMetadata) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ImportDocumentsMetadata.Marshal(b, m, deterministic) +} +func (dst *ImportDocumentsMetadata) XXX_Merge(src proto.Message) { + xxx_messageInfo_ImportDocumentsMetadata.Merge(dst, src) +} +func (m *ImportDocumentsMetadata) XXX_Size() int { + return xxx_messageInfo_ImportDocumentsMetadata.Size(m) +} +func (m *ImportDocumentsMetadata) XXX_DiscardUnknown() { + xxx_messageInfo_ImportDocumentsMetadata.DiscardUnknown(m) +} + +var xxx_messageInfo_ImportDocumentsMetadata proto.InternalMessageInfo + +func (m *ImportDocumentsMetadata) GetStartTime() *timestamp.Timestamp { + if m != nil { + return m.StartTime + } + return nil +} + +func (m *ImportDocumentsMetadata) GetEndTime() *timestamp.Timestamp { + if m != nil { + return m.EndTime + } + return nil +} + +func (m *ImportDocumentsMetadata) GetOperationState() OperationState { + if m != nil { + return m.OperationState + } + return OperationState_STATE_UNSPECIFIED +} + +func (m *ImportDocumentsMetadata) GetProgressDocuments() *Progress { + if m != nil { + return m.ProgressDocuments + } + return nil +} + +func (m *ImportDocumentsMetadata) GetProgressBytes() *Progress { + if m != nil { + return m.ProgressBytes + } + return nil +} + +func (m *ImportDocumentsMetadata) GetCollectionIds() []string { + if m != nil { + return m.CollectionIds + } + return nil +} + +func (m *ImportDocumentsMetadata) GetInputUriPrefix() string { + if m != nil { + return m.InputUriPrefix + } + return "" +} + +func init() { + proto.RegisterType((*IndexOperationMetadata)(nil), "google.firestore.admin.v1beta1.IndexOperationMetadata") + proto.RegisterType((*Progress)(nil), "google.firestore.admin.v1beta1.Progress") + proto.RegisterType((*CreateIndexRequest)(nil), "google.firestore.admin.v1beta1.CreateIndexRequest") + proto.RegisterType((*GetIndexRequest)(nil), "google.firestore.admin.v1beta1.GetIndexRequest") + proto.RegisterType((*ListIndexesRequest)(nil), "google.firestore.admin.v1beta1.ListIndexesRequest") + proto.RegisterType((*DeleteIndexRequest)(nil), "google.firestore.admin.v1beta1.DeleteIndexRequest") + proto.RegisterType((*ListIndexesResponse)(nil), "google.firestore.admin.v1beta1.ListIndexesResponse") + proto.RegisterType((*ExportDocumentsRequest)(nil), "google.firestore.admin.v1beta1.ExportDocumentsRequest") + proto.RegisterType((*ImportDocumentsRequest)(nil), "google.firestore.admin.v1beta1.ImportDocumentsRequest") + proto.RegisterType((*ExportDocumentsResponse)(nil), "google.firestore.admin.v1beta1.ExportDocumentsResponse") + proto.RegisterType((*ExportDocumentsMetadata)(nil), "google.firestore.admin.v1beta1.ExportDocumentsMetadata") + proto.RegisterType((*ImportDocumentsMetadata)(nil), "google.firestore.admin.v1beta1.ImportDocumentsMetadata") + proto.RegisterEnum("google.firestore.admin.v1beta1.OperationState", OperationState_name, OperationState_value) + proto.RegisterEnum("google.firestore.admin.v1beta1.IndexOperationMetadata_OperationType", IndexOperationMetadata_OperationType_name, IndexOperationMetadata_OperationType_value) +} + +// Reference imports to suppress errors if they are not otherwise used. +var _ context.Context +var _ grpc.ClientConn + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the grpc package it is being compiled against. +const _ = grpc.SupportPackageIsVersion4 + +// FirestoreAdminClient is the client API for FirestoreAdmin service. +// +// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://godoc.org/google.golang.org/grpc#ClientConn.NewStream. +type FirestoreAdminClient interface { + // Creates the specified index. + // A newly created index's initial state is `CREATING`. On completion of the + // returned [google.longrunning.Operation][google.longrunning.Operation], the state will be `READY`. + // If the index already exists, the call will return an `ALREADY_EXISTS` + // status. + // + // During creation, the process could result in an error, in which case the + // index will move to the `ERROR` state. The process can be recovered by + // fixing the data that caused the error, removing the index with + // [delete][google.firestore.admin.v1beta1.FirestoreAdmin.DeleteIndex], then re-creating the index with + // [create][google.firestore.admin.v1beta1.FirestoreAdmin.CreateIndex]. + // + // Indexes with a single field cannot be created. + CreateIndex(ctx context.Context, in *CreateIndexRequest, opts ...grpc.CallOption) (*longrunning.Operation, error) + // Lists the indexes that match the specified filters. + ListIndexes(ctx context.Context, in *ListIndexesRequest, opts ...grpc.CallOption) (*ListIndexesResponse, error) + // Gets an index. + GetIndex(ctx context.Context, in *GetIndexRequest, opts ...grpc.CallOption) (*Index, error) + // Deletes an index. + DeleteIndex(ctx context.Context, in *DeleteIndexRequest, opts ...grpc.CallOption) (*empty.Empty, error) + // Exports a copy of all or a subset of documents from Google Cloud Firestore + // to another storage system, such as Google Cloud Storage. Recent updates to + // documents may not be reflected in the export. The export occurs in the + // background and its progress can be monitored and managed via the + // Operation resource that is created. The output of an export may only be + // used once the associated operation is done. If an export operation is + // cancelled before completion it may leave partial data behind in Google + // Cloud Storage. + ExportDocuments(ctx context.Context, in *ExportDocumentsRequest, opts ...grpc.CallOption) (*longrunning.Operation, error) + // Imports documents into Google Cloud Firestore. Existing documents with the + // same name are overwritten. The import occurs in the background and its + // progress can be monitored and managed via the Operation resource that is + // created. If an ImportDocuments operation is cancelled, it is possible + // that a subset of the data has already been imported to Cloud Firestore. + ImportDocuments(ctx context.Context, in *ImportDocumentsRequest, opts ...grpc.CallOption) (*longrunning.Operation, error) +} + +type firestoreAdminClient struct { + cc *grpc.ClientConn +} + +func NewFirestoreAdminClient(cc *grpc.ClientConn) FirestoreAdminClient { + return &firestoreAdminClient{cc} +} + +func (c *firestoreAdminClient) CreateIndex(ctx context.Context, in *CreateIndexRequest, opts ...grpc.CallOption) (*longrunning.Operation, error) { + out := new(longrunning.Operation) + err := c.cc.Invoke(ctx, "/google.firestore.admin.v1beta1.FirestoreAdmin/CreateIndex", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *firestoreAdminClient) ListIndexes(ctx context.Context, in *ListIndexesRequest, opts ...grpc.CallOption) (*ListIndexesResponse, error) { + out := new(ListIndexesResponse) + err := c.cc.Invoke(ctx, "/google.firestore.admin.v1beta1.FirestoreAdmin/ListIndexes", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *firestoreAdminClient) GetIndex(ctx context.Context, in *GetIndexRequest, opts ...grpc.CallOption) (*Index, error) { + out := new(Index) + err := c.cc.Invoke(ctx, "/google.firestore.admin.v1beta1.FirestoreAdmin/GetIndex", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *firestoreAdminClient) DeleteIndex(ctx context.Context, in *DeleteIndexRequest, opts ...grpc.CallOption) (*empty.Empty, error) { + out := new(empty.Empty) + err := c.cc.Invoke(ctx, "/google.firestore.admin.v1beta1.FirestoreAdmin/DeleteIndex", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *firestoreAdminClient) ExportDocuments(ctx context.Context, in *ExportDocumentsRequest, opts ...grpc.CallOption) (*longrunning.Operation, error) { + out := new(longrunning.Operation) + err := c.cc.Invoke(ctx, "/google.firestore.admin.v1beta1.FirestoreAdmin/ExportDocuments", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *firestoreAdminClient) ImportDocuments(ctx context.Context, in *ImportDocumentsRequest, opts ...grpc.CallOption) (*longrunning.Operation, error) { + out := new(longrunning.Operation) + err := c.cc.Invoke(ctx, "/google.firestore.admin.v1beta1.FirestoreAdmin/ImportDocuments", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +// FirestoreAdminServer is the server API for FirestoreAdmin service. +type FirestoreAdminServer interface { + // Creates the specified index. + // A newly created index's initial state is `CREATING`. On completion of the + // returned [google.longrunning.Operation][google.longrunning.Operation], the state will be `READY`. + // If the index already exists, the call will return an `ALREADY_EXISTS` + // status. + // + // During creation, the process could result in an error, in which case the + // index will move to the `ERROR` state. The process can be recovered by + // fixing the data that caused the error, removing the index with + // [delete][google.firestore.admin.v1beta1.FirestoreAdmin.DeleteIndex], then re-creating the index with + // [create][google.firestore.admin.v1beta1.FirestoreAdmin.CreateIndex]. + // + // Indexes with a single field cannot be created. + CreateIndex(context.Context, *CreateIndexRequest) (*longrunning.Operation, error) + // Lists the indexes that match the specified filters. + ListIndexes(context.Context, *ListIndexesRequest) (*ListIndexesResponse, error) + // Gets an index. + GetIndex(context.Context, *GetIndexRequest) (*Index, error) + // Deletes an index. + DeleteIndex(context.Context, *DeleteIndexRequest) (*empty.Empty, error) + // Exports a copy of all or a subset of documents from Google Cloud Firestore + // to another storage system, such as Google Cloud Storage. Recent updates to + // documents may not be reflected in the export. The export occurs in the + // background and its progress can be monitored and managed via the + // Operation resource that is created. The output of an export may only be + // used once the associated operation is done. If an export operation is + // cancelled before completion it may leave partial data behind in Google + // Cloud Storage. + ExportDocuments(context.Context, *ExportDocumentsRequest) (*longrunning.Operation, error) + // Imports documents into Google Cloud Firestore. Existing documents with the + // same name are overwritten. The import occurs in the background and its + // progress can be monitored and managed via the Operation resource that is + // created. If an ImportDocuments operation is cancelled, it is possible + // that a subset of the data has already been imported to Cloud Firestore. + ImportDocuments(context.Context, *ImportDocumentsRequest) (*longrunning.Operation, error) +} + +func RegisterFirestoreAdminServer(s *grpc.Server, srv FirestoreAdminServer) { + s.RegisterService(&_FirestoreAdmin_serviceDesc, srv) +} + +func _FirestoreAdmin_CreateIndex_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(CreateIndexRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(FirestoreAdminServer).CreateIndex(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.firestore.admin.v1beta1.FirestoreAdmin/CreateIndex", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(FirestoreAdminServer).CreateIndex(ctx, req.(*CreateIndexRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _FirestoreAdmin_ListIndexes_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(ListIndexesRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(FirestoreAdminServer).ListIndexes(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.firestore.admin.v1beta1.FirestoreAdmin/ListIndexes", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(FirestoreAdminServer).ListIndexes(ctx, req.(*ListIndexesRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _FirestoreAdmin_GetIndex_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(GetIndexRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(FirestoreAdminServer).GetIndex(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.firestore.admin.v1beta1.FirestoreAdmin/GetIndex", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(FirestoreAdminServer).GetIndex(ctx, req.(*GetIndexRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _FirestoreAdmin_DeleteIndex_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(DeleteIndexRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(FirestoreAdminServer).DeleteIndex(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.firestore.admin.v1beta1.FirestoreAdmin/DeleteIndex", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(FirestoreAdminServer).DeleteIndex(ctx, req.(*DeleteIndexRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _FirestoreAdmin_ExportDocuments_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(ExportDocumentsRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(FirestoreAdminServer).ExportDocuments(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.firestore.admin.v1beta1.FirestoreAdmin/ExportDocuments", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(FirestoreAdminServer).ExportDocuments(ctx, req.(*ExportDocumentsRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _FirestoreAdmin_ImportDocuments_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(ImportDocumentsRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(FirestoreAdminServer).ImportDocuments(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.firestore.admin.v1beta1.FirestoreAdmin/ImportDocuments", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(FirestoreAdminServer).ImportDocuments(ctx, req.(*ImportDocumentsRequest)) + } + return interceptor(ctx, in, info, handler) +} + +var _FirestoreAdmin_serviceDesc = grpc.ServiceDesc{ + ServiceName: "google.firestore.admin.v1beta1.FirestoreAdmin", + HandlerType: (*FirestoreAdminServer)(nil), + Methods: []grpc.MethodDesc{ + { + MethodName: "CreateIndex", + Handler: _FirestoreAdmin_CreateIndex_Handler, + }, + { + MethodName: "ListIndexes", + Handler: _FirestoreAdmin_ListIndexes_Handler, + }, + { + MethodName: "GetIndex", + Handler: _FirestoreAdmin_GetIndex_Handler, + }, + { + MethodName: "DeleteIndex", + Handler: _FirestoreAdmin_DeleteIndex_Handler, + }, + { + MethodName: "ExportDocuments", + Handler: _FirestoreAdmin_ExportDocuments_Handler, + }, + { + MethodName: "ImportDocuments", + Handler: _FirestoreAdmin_ImportDocuments_Handler, + }, + }, + Streams: []grpc.StreamDesc{}, + Metadata: "google/firestore/admin/v1beta1/firestore_admin.proto", +} + +func init() { + proto.RegisterFile("google/firestore/admin/v1beta1/firestore_admin.proto", fileDescriptor_firestore_admin_aada04317d1f4b2a) +} + +var fileDescriptor_firestore_admin_aada04317d1f4b2a = []byte{ + // 1183 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xec, 0x57, 0xdd, 0x6e, 0xe3, 0xc4, + 0x17, 0xff, 0x3b, 0x69, 0x93, 0xe6, 0xf4, 0x9f, 0x34, 0x9d, 0x85, 0x6c, 0x94, 0xfd, 0x20, 0x32, + 0x2c, 0x8a, 0x72, 0x61, 0xb3, 0x59, 0x58, 0xb1, 0x5b, 0xa1, 0x55, 0xea, 0x38, 0x95, 0xa5, 0x90, + 0x46, 0x4e, 0xca, 0x2e, 0x7b, 0x63, 0xb9, 0xc9, 0x34, 0x32, 0x4d, 0x3c, 0xc6, 0x33, 0x81, 0x76, + 0x51, 0x01, 0xf1, 0x0a, 0xcb, 0x25, 0x37, 0x70, 0x09, 0x12, 0xe2, 0x2d, 0x78, 0x00, 0x1e, 0x00, + 0x21, 0xf1, 0x10, 0x5c, 0xa2, 0x19, 0xdb, 0xf9, 0xde, 0x75, 0x2b, 0xad, 0x84, 0x90, 0xb8, 0xcb, + 0x39, 0x73, 0x3e, 0x7e, 0xe7, 0x23, 0xe7, 0x1c, 0xc3, 0xbb, 0x43, 0x42, 0x86, 0x23, 0xac, 0x9e, + 0x38, 0x3e, 0xa6, 0x8c, 0xf8, 0x58, 0xb5, 0x07, 0x63, 0xc7, 0x55, 0x3f, 0xbb, 0x7b, 0x8c, 0x99, + 0x7d, 0x77, 0xc6, 0xb7, 0x04, 0x5f, 0xf1, 0x7c, 0xc2, 0x08, 0xba, 0x1d, 0x68, 0x29, 0xd3, 0x57, + 0x25, 0x78, 0x0d, 0xb5, 0x4a, 0x37, 0x43, 0xab, 0xb6, 0xe7, 0xa8, 0xb6, 0xeb, 0x12, 0x66, 0x33, + 0x87, 0xb8, 0x34, 0xd0, 0x2e, 0x55, 0x63, 0x7c, 0x3a, 0xee, 0x00, 0x9f, 0x85, 0xb2, 0x6f, 0x86, + 0xb2, 0x23, 0xe2, 0x0e, 0xfd, 0x89, 0xeb, 0x3a, 0xee, 0x50, 0x25, 0x1e, 0xf6, 0x17, 0x0c, 0xde, + 0x08, 0x85, 0x04, 0x75, 0x3c, 0x39, 0x51, 0xf1, 0xd8, 0x63, 0xe7, 0xe1, 0xe3, 0x1b, 0xcb, 0x8f, + 0xcc, 0x19, 0x63, 0xca, 0xec, 0xb1, 0x17, 0x08, 0xc8, 0xbf, 0x26, 0xa1, 0x60, 0x70, 0x97, 0x87, + 0x91, 0xdd, 0x0f, 0x31, 0xb3, 0x07, 0x36, 0xb3, 0xd1, 0x03, 0x00, 0xca, 0x6c, 0x9f, 0x59, 0x5c, + 0xa7, 0x28, 0x95, 0xa5, 0xca, 0x76, 0xad, 0xa4, 0x84, 0xc1, 0x47, 0x06, 0x95, 0x5e, 0x64, 0xd0, + 0xcc, 0x08, 0x69, 0x4e, 0xa3, 0xf7, 0x60, 0x0b, 0xbb, 0x83, 0x40, 0x31, 0x11, 0xab, 0x98, 0xc6, + 0xee, 0x40, 0xa8, 0xbd, 0x06, 0x9b, 0x22, 0xfc, 0x62, 0xb2, 0x2c, 0x55, 0x32, 0x66, 0x40, 0xa0, + 0x53, 0xc8, 0x4d, 0x83, 0xb6, 0xd8, 0xb9, 0x87, 0x8b, 0x1b, 0x65, 0xa9, 0x92, 0xab, 0x35, 0x94, + 0x97, 0x17, 0x42, 0x59, 0x1f, 0x97, 0x32, 0xe5, 0xf4, 0xce, 0x3d, 0x6c, 0x66, 0xc9, 0x3c, 0x89, + 0x6e, 0x42, 0xa6, 0x6f, 0xbb, 0x7d, 0x3c, 0x1a, 0xe1, 0x41, 0x71, 0xb3, 0x2c, 0x55, 0xb6, 0xcc, + 0x19, 0x03, 0x1d, 0xc1, 0xee, 0x80, 0xf4, 0x27, 0x63, 0xec, 0x32, 0xcb, 0xf3, 0xc9, 0xd0, 0xc7, + 0x94, 0x16, 0x53, 0x22, 0xc0, 0x4a, 0x1c, 0x9a, 0x4e, 0x28, 0x6f, 0xe6, 0x23, 0x13, 0x11, 0x47, + 0xd6, 0x20, 0xbb, 0x00, 0x0a, 0xdd, 0x86, 0xd2, 0x61, 0x47, 0x37, 0xeb, 0x3d, 0xe3, 0xb0, 0x6d, + 0xf5, 0x3e, 0xee, 0xe8, 0xd6, 0x51, 0xbb, 0xdb, 0xd1, 0x35, 0xa3, 0x69, 0xe8, 0x8d, 0xfc, 0xff, + 0x10, 0x82, 0x9c, 0x66, 0xea, 0xf5, 0x9e, 0xd1, 0x3e, 0xb0, 0x8c, 0x76, 0x43, 0x7f, 0x92, 0x97, + 0xe4, 0x27, 0xb0, 0x15, 0x19, 0x44, 0x77, 0x20, 0xf7, 0x39, 0xf1, 0x4f, 0xad, 0x3e, 0x19, 0x7b, + 0x23, 0xcc, 0xf0, 0x40, 0x94, 0x2f, 0x69, 0x66, 0x39, 0x57, 0x8b, 0x98, 0x53, 0x31, 0x4c, 0x99, + 0x33, 0xb6, 0xb9, 0x58, 0x62, 0x26, 0xa6, 0x47, 0x4c, 0xd9, 0x01, 0xa4, 0xf9, 0xd8, 0x66, 0x58, + 0x24, 0xd4, 0xc4, 0x9f, 0x4e, 0x30, 0x65, 0xa8, 0x00, 0x29, 0xcf, 0xf6, 0xb1, 0xcb, 0x84, 0xed, + 0x8c, 0x19, 0x52, 0x68, 0x2f, 0x2a, 0x62, 0x50, 0xf8, 0x3b, 0x97, 0xaa, 0x52, 0x58, 0x6b, 0xf9, + 0x0e, 0xec, 0x1c, 0x60, 0xb6, 0xe0, 0x07, 0xc1, 0x86, 0x6b, 0x87, 0x0d, 0x98, 0x31, 0xc5, 0x6f, + 0xf9, 0x6b, 0x09, 0x50, 0xcb, 0xa1, 0x81, 0x20, 0xa6, 0x71, 0x90, 0x0a, 0x90, 0x3a, 0x71, 0x46, + 0x0c, 0xfb, 0x02, 0x53, 0xc6, 0x0c, 0x29, 0x74, 0x03, 0x32, 0x9e, 0x3d, 0xc4, 0x16, 0x75, 0x9e, + 0x61, 0xd1, 0x73, 0x9b, 0xe6, 0x16, 0x67, 0x74, 0x9d, 0x67, 0x18, 0xdd, 0x02, 0x10, 0x8f, 0x8c, + 0x9c, 0x62, 0x57, 0xb4, 0x5c, 0xc6, 0x14, 0xe2, 0x3d, 0xce, 0x90, 0x2b, 0x80, 0x1a, 0x98, 0xa7, + 0x31, 0x16, 0xec, 0x97, 0x70, 0x6d, 0x01, 0x2b, 0xf5, 0x88, 0x4b, 0x31, 0x7a, 0x04, 0x69, 0x27, + 0x60, 0x15, 0xa5, 0x72, 0xf2, 0xf2, 0x99, 0x8a, 0xb4, 0xd0, 0xdb, 0xb0, 0xe3, 0xe2, 0x33, 0x66, + 0xcd, 0xa1, 0x0c, 0xc2, 0xcb, 0x72, 0x76, 0x67, 0x8a, 0xf4, 0x2b, 0x28, 0xe8, 0x67, 0x1e, 0xf1, + 0x59, 0x23, 0xec, 0x3b, 0xfa, 0x12, 0xb4, 0xbc, 0x27, 0xfa, 0x64, 0x34, 0xc2, 0x7d, 0xf1, 0x77, + 0x73, 0x06, 0xb4, 0x98, 0x2c, 0x27, 0xb9, 0xd1, 0x19, 0xd7, 0x18, 0x50, 0x54, 0x85, 0x5d, 0x32, + 0x61, 0xde, 0x84, 0x59, 0x13, 0xdf, 0xb1, 0x3c, 0x1f, 0x9f, 0x38, 0x67, 0x61, 0x92, 0x76, 0x82, + 0x87, 0x23, 0xdf, 0xe9, 0x08, 0xb6, 0x7c, 0x01, 0x05, 0x63, 0xfc, 0xaa, 0x01, 0x54, 0x20, 0xef, + 0xb8, 0x6b, 0xfd, 0xe7, 0x04, 0x7f, 0xe6, 0x5e, 0x87, 0xeb, 0x2b, 0xf1, 0x87, 0x35, 0x58, 0x1b, + 0x85, 0xb4, 0x3e, 0x8a, 0x3f, 0x92, 0x2b, 0x76, 0xfe, 0xc1, 0x51, 0xf9, 0x18, 0x76, 0x66, 0x43, + 0x91, 0x32, 0x9b, 0x05, 0x0d, 0x9c, 0xab, 0x29, 0x71, 0x5d, 0x34, 0x9d, 0x34, 0x5d, 0xae, 0x65, + 0xce, 0x66, 0xab, 0xa0, 0xd1, 0x63, 0x40, 0xd1, 0x64, 0xb3, 0xa2, 0x41, 0x45, 0x45, 0x66, 0xaf, + 0x32, 0xe3, 0x76, 0x23, 0x1b, 0xd3, 0x5c, 0xa1, 0x43, 0xc8, 0x4d, 0x0d, 0x1f, 0x9f, 0x33, 0x4c, + 0xc5, 0x78, 0xbd, 0x8a, 0xd1, 0x6c, 0xa4, 0xbf, 0xcf, 0xd5, 0xd7, 0x34, 0x4a, 0xea, 0xd2, 0x9d, + 0x9a, 0x5e, 0x5f, 0xe3, 0xdf, 0x93, 0x70, 0x7d, 0xa9, 0x55, 0xff, 0xab, 0xf1, 0xbf, 0xa0, 0xc6, + 0xeb, 0x86, 0x41, 0x7a, 0xdd, 0x30, 0xa8, 0x7e, 0x2b, 0x41, 0x6e, 0x31, 0x3b, 0xe8, 0x75, 0xd8, + 0xed, 0xf6, 0xea, 0xbd, 0xe5, 0x1d, 0x9b, 0x87, 0xff, 0x1b, 0x6d, 0xa3, 0x67, 0xd4, 0x5b, 0xc6, + 0x53, 0xa3, 0x7d, 0x90, 0x97, 0x50, 0x0e, 0xa0, 0x63, 0x1e, 0x6a, 0x7a, 0xb7, 0xcb, 0xe9, 0x04, + 0xa7, 0xb5, 0x7a, 0x5b, 0xd3, 0x5b, 0x2d, 0x4e, 0x27, 0x39, 0xdd, 0x34, 0xda, 0x91, 0xfc, 0x06, + 0xa7, 0xbb, 0x47, 0x1a, 0x97, 0x6f, 0x1e, 0xb5, 0xf2, 0x9b, 0x08, 0x20, 0xd5, 0xac, 0x1b, 0x2d, + 0xbd, 0x91, 0x4f, 0xa1, 0x2c, 0x64, 0x42, 0x5d, 0xbd, 0x91, 0x4f, 0xd7, 0xfe, 0x4a, 0x43, 0xae, + 0x19, 0xe5, 0xa6, 0xce, 0x53, 0x83, 0xbe, 0x97, 0x60, 0x7b, 0x6e, 0xed, 0xa2, 0x5a, 0x5c, 0x0e, + 0x57, 0x77, 0x74, 0xe9, 0x56, 0xa4, 0x33, 0x77, 0x41, 0xce, 0x9a, 0x43, 0x7e, 0xf4, 0xcd, 0x6f, + 0x7f, 0x3e, 0x4f, 0x3c, 0x90, 0xdf, 0x99, 0x5e, 0x9f, 0x5f, 0x04, 0x1b, 0xf3, 0x03, 0xcf, 0x27, + 0x9f, 0xe0, 0x3e, 0xa3, 0x6a, 0x55, 0xe5, 0xcd, 0x7f, 0x6c, 0x53, 0x4c, 0xd5, 0xea, 0x85, 0x1a, + 0xee, 0x9e, 0x87, 0xe1, 0x69, 0xf6, 0x8b, 0x04, 0xdb, 0x73, 0xbb, 0x2d, 0x1e, 0xe3, 0xea, 0xd2, + 0x2e, 0xdd, 0xbb, 0x92, 0x4e, 0x30, 0xb8, 0xe5, 0xf7, 0x05, 0xf2, 0x1a, 0xba, 0x32, 0x72, 0xf4, + 0x9d, 0x04, 0x5b, 0xd1, 0x89, 0x81, 0xd4, 0x38, 0xdf, 0x4b, 0xc7, 0x48, 0xe9, 0x72, 0x3b, 0x7a, + 0x1d, 0x3c, 0xbe, 0xdb, 0x5e, 0x00, 0x2e, 0xc2, 0xa6, 0x56, 0x2f, 0xd0, 0x73, 0x09, 0xb6, 0xe7, + 0xee, 0x8a, 0xf8, 0x8c, 0xae, 0x1e, 0x21, 0xa5, 0xc2, 0xca, 0x70, 0xd1, 0xf9, 0x27, 0x41, 0x84, + 0xaa, 0x7a, 0x75, 0x54, 0x3f, 0x49, 0xb0, 0xb3, 0xb4, 0xfb, 0xd0, 0xfd, 0x38, 0x64, 0xeb, 0x8f, + 0x8e, 0xb8, 0x9e, 0xac, 0x0b, 0x90, 0x7b, 0xf2, 0xfd, 0x4b, 0x81, 0xbc, 0x78, 0x88, 0x17, 0xbd, + 0x3c, 0x94, 0xaa, 0x02, 0xed, 0xd2, 0x14, 0x8f, 0x47, 0xbb, 0xfe, 0x42, 0x79, 0xd5, 0x68, 0x9d, + 0xf1, 0x32, 0xda, 0xfd, 0x9f, 0x25, 0x90, 0xfb, 0x64, 0x1c, 0x83, 0x6f, 0xff, 0xda, 0xe2, 0x78, + 0xe8, 0xf0, 0xd2, 0x76, 0xa4, 0xa7, 0x5a, 0xa8, 0x36, 0x24, 0x23, 0xdb, 0x1d, 0x2a, 0xc4, 0x1f, + 0xaa, 0x43, 0xec, 0x8a, 0xc2, 0xab, 0xc1, 0x93, 0xed, 0x39, 0xf4, 0x45, 0x5f, 0x9b, 0x7b, 0x82, + 0xfa, 0x21, 0xb1, 0x71, 0xa0, 0x35, 0xbb, 0x3f, 0x26, 0xde, 0x3a, 0x08, 0x8c, 0x69, 0x23, 0x32, + 0x19, 0x28, 0x53, 0x87, 0x8a, 0xf0, 0xa8, 0x7c, 0x74, 0x77, 0x9f, 0xeb, 0x1c, 0xa7, 0x84, 0xf5, + 0x7b, 0x7f, 0x07, 0x00, 0x00, 0xff, 0xff, 0xab, 0xb1, 0x04, 0xab, 0x3e, 0x0f, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/firestore/admin/v1beta1/index.pb.go b/vendor/google.golang.org/genproto/googleapis/firestore/admin/v1beta1/index.pb.go new file mode 100644 index 0000000..a4648a6 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/firestore/admin/v1beta1/index.pb.go @@ -0,0 +1,267 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/firestore/admin/v1beta1/index.proto + +package admin // import "google.golang.org/genproto/googleapis/firestore/admin/v1beta1" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// The mode determines how a field is indexed. +type IndexField_Mode int32 + +const ( + // The mode is unspecified. + IndexField_MODE_UNSPECIFIED IndexField_Mode = 0 + // The field's values are indexed so as to support sequencing in + // ascending order and also query by <, >, <=, >=, and =. + IndexField_ASCENDING IndexField_Mode = 2 + // The field's values are indexed so as to support sequencing in + // descending order and also query by <, >, <=, >=, and =. + IndexField_DESCENDING IndexField_Mode = 3 + // The field's array values are indexed so as to support membership using + // ARRAY_CONTAINS queries. + IndexField_ARRAY_CONTAINS IndexField_Mode = 4 +) + +var IndexField_Mode_name = map[int32]string{ + 0: "MODE_UNSPECIFIED", + 2: "ASCENDING", + 3: "DESCENDING", + 4: "ARRAY_CONTAINS", +} +var IndexField_Mode_value = map[string]int32{ + "MODE_UNSPECIFIED": 0, + "ASCENDING": 2, + "DESCENDING": 3, + "ARRAY_CONTAINS": 4, +} + +func (x IndexField_Mode) String() string { + return proto.EnumName(IndexField_Mode_name, int32(x)) +} +func (IndexField_Mode) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_index_6cc09157d7fa246c, []int{0, 0} +} + +// The state of an index. During index creation, an index will be in the +// `CREATING` state. If the index is created successfully, it will transition +// to the `READY` state. If the index is not able to be created, it will +// transition to the `ERROR` state. +type Index_State int32 + +const ( + // The state is unspecified. + Index_STATE_UNSPECIFIED Index_State = 0 + // The index is being created. + // There is an active long-running operation for the index. + // The index is updated when writing a document. + // Some index data may exist. + Index_CREATING Index_State = 3 + // The index is ready to be used. + // The index is updated when writing a document. + // The index is fully populated from all stored documents it applies to. + Index_READY Index_State = 2 + // The index was being created, but something went wrong. + // There is no active long-running operation for the index, + // and the most recently finished long-running operation failed. + // The index is not updated when writing a document. + // Some index data may exist. + Index_ERROR Index_State = 5 +) + +var Index_State_name = map[int32]string{ + 0: "STATE_UNSPECIFIED", + 3: "CREATING", + 2: "READY", + 5: "ERROR", +} +var Index_State_value = map[string]int32{ + "STATE_UNSPECIFIED": 0, + "CREATING": 3, + "READY": 2, + "ERROR": 5, +} + +func (x Index_State) String() string { + return proto.EnumName(Index_State_name, int32(x)) +} +func (Index_State) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_index_6cc09157d7fa246c, []int{1, 0} +} + +// A field of an index. +type IndexField struct { + // The path of the field. Must match the field path specification described + // by [google.firestore.v1beta1.Document.fields][fields]. + // Special field path `__name__` may be used by itself or at the end of a + // path. `__type__` may be used only at the end of path. + FieldPath string `protobuf:"bytes,1,opt,name=field_path,json=fieldPath,proto3" json:"field_path,omitempty"` + // The field's mode. + Mode IndexField_Mode `protobuf:"varint,2,opt,name=mode,proto3,enum=google.firestore.admin.v1beta1.IndexField_Mode" json:"mode,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *IndexField) Reset() { *m = IndexField{} } +func (m *IndexField) String() string { return proto.CompactTextString(m) } +func (*IndexField) ProtoMessage() {} +func (*IndexField) Descriptor() ([]byte, []int) { + return fileDescriptor_index_6cc09157d7fa246c, []int{0} +} +func (m *IndexField) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_IndexField.Unmarshal(m, b) +} +func (m *IndexField) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_IndexField.Marshal(b, m, deterministic) +} +func (dst *IndexField) XXX_Merge(src proto.Message) { + xxx_messageInfo_IndexField.Merge(dst, src) +} +func (m *IndexField) XXX_Size() int { + return xxx_messageInfo_IndexField.Size(m) +} +func (m *IndexField) XXX_DiscardUnknown() { + xxx_messageInfo_IndexField.DiscardUnknown(m) +} + +var xxx_messageInfo_IndexField proto.InternalMessageInfo + +func (m *IndexField) GetFieldPath() string { + if m != nil { + return m.FieldPath + } + return "" +} + +func (m *IndexField) GetMode() IndexField_Mode { + if m != nil { + return m.Mode + } + return IndexField_MODE_UNSPECIFIED +} + +// An index definition. +type Index struct { + // The resource name of the index. + // Output only. + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // The collection ID to which this index applies. Required. + CollectionId string `protobuf:"bytes,2,opt,name=collection_id,json=collectionId,proto3" json:"collection_id,omitempty"` + // The fields to index. + Fields []*IndexField `protobuf:"bytes,3,rep,name=fields,proto3" json:"fields,omitempty"` + // The state of the index. + // Output only. + State Index_State `protobuf:"varint,6,opt,name=state,proto3,enum=google.firestore.admin.v1beta1.Index_State" json:"state,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Index) Reset() { *m = Index{} } +func (m *Index) String() string { return proto.CompactTextString(m) } +func (*Index) ProtoMessage() {} +func (*Index) Descriptor() ([]byte, []int) { + return fileDescriptor_index_6cc09157d7fa246c, []int{1} +} +func (m *Index) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Index.Unmarshal(m, b) +} +func (m *Index) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Index.Marshal(b, m, deterministic) +} +func (dst *Index) XXX_Merge(src proto.Message) { + xxx_messageInfo_Index.Merge(dst, src) +} +func (m *Index) XXX_Size() int { + return xxx_messageInfo_Index.Size(m) +} +func (m *Index) XXX_DiscardUnknown() { + xxx_messageInfo_Index.DiscardUnknown(m) +} + +var xxx_messageInfo_Index proto.InternalMessageInfo + +func (m *Index) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *Index) GetCollectionId() string { + if m != nil { + return m.CollectionId + } + return "" +} + +func (m *Index) GetFields() []*IndexField { + if m != nil { + return m.Fields + } + return nil +} + +func (m *Index) GetState() Index_State { + if m != nil { + return m.State + } + return Index_STATE_UNSPECIFIED +} + +func init() { + proto.RegisterType((*IndexField)(nil), "google.firestore.admin.v1beta1.IndexField") + proto.RegisterType((*Index)(nil), "google.firestore.admin.v1beta1.Index") + proto.RegisterEnum("google.firestore.admin.v1beta1.IndexField_Mode", IndexField_Mode_name, IndexField_Mode_value) + proto.RegisterEnum("google.firestore.admin.v1beta1.Index_State", Index_State_name, Index_State_value) +} + +func init() { + proto.RegisterFile("google/firestore/admin/v1beta1/index.proto", fileDescriptor_index_6cc09157d7fa246c) +} + +var fileDescriptor_index_6cc09157d7fa246c = []byte{ + // 438 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x8c, 0x52, 0x5d, 0x8b, 0xd3, 0x40, + 0x14, 0x35, 0x69, 0x52, 0xcc, 0x75, 0xb7, 0xc4, 0x41, 0xa1, 0x88, 0x4a, 0x89, 0x3e, 0x94, 0x15, + 0x26, 0x74, 0x7d, 0xf4, 0x69, 0xf2, 0xd1, 0x92, 0x87, 0x4d, 0xcb, 0xa4, 0x0a, 0xeb, 0x4b, 0x99, + 0x6d, 0x66, 0xb3, 0x81, 0x74, 0xa6, 0x34, 0xa3, 0xf8, 0x1f, 0xfc, 0x17, 0x82, 0x2f, 0xfe, 0x0b, + 0xff, 0x99, 0xcc, 0x24, 0x74, 0x41, 0xd0, 0xed, 0xdb, 0x3d, 0x93, 0x73, 0xce, 0x3d, 0xf7, 0xe6, + 0xc2, 0x45, 0x25, 0x65, 0xd5, 0xf0, 0xf0, 0xb6, 0x3e, 0xf0, 0x56, 0xc9, 0x03, 0x0f, 0x59, 0xb9, + 0xab, 0x45, 0xf8, 0x75, 0x76, 0xc3, 0x15, 0x9b, 0x85, 0xb5, 0x28, 0xf9, 0x37, 0xbc, 0x3f, 0x48, + 0x25, 0xd1, 0xeb, 0x8e, 0x8b, 0x8f, 0x5c, 0x6c, 0xb8, 0xb8, 0xe7, 0xbe, 0x78, 0xd9, 0x7b, 0xb1, + 0x7d, 0x1d, 0x32, 0x21, 0xa4, 0x62, 0xaa, 0x96, 0xa2, 0xed, 0xd4, 0xc1, 0x6f, 0x0b, 0x20, 0xd3, + 0x6e, 0xf3, 0x9a, 0x37, 0x25, 0x7a, 0x05, 0x70, 0xab, 0x8b, 0xcd, 0x9e, 0xa9, 0xbb, 0xb1, 0x35, + 0xb1, 0xa6, 0x1e, 0xf5, 0xcc, 0xcb, 0x8a, 0xa9, 0x3b, 0x14, 0x83, 0xb3, 0x93, 0x25, 0x1f, 0xdb, + 0x13, 0x6b, 0x3a, 0xba, 0x0c, 0xf1, 0xff, 0x5b, 0xe3, 0x7b, 0x63, 0x7c, 0x25, 0x4b, 0x4e, 0x8d, + 0x38, 0x58, 0x82, 0xa3, 0x11, 0x7a, 0x06, 0xfe, 0xd5, 0x32, 0x49, 0x37, 0x1f, 0xf3, 0x62, 0x95, + 0xc6, 0xd9, 0x3c, 0x4b, 0x13, 0xff, 0x11, 0x3a, 0x07, 0x8f, 0x14, 0x71, 0x9a, 0x27, 0x59, 0xbe, + 0xf0, 0x6d, 0x34, 0x02, 0x48, 0xd2, 0x23, 0x1e, 0x20, 0x04, 0x23, 0x42, 0x29, 0xb9, 0xde, 0xc4, + 0xcb, 0x7c, 0x4d, 0xb2, 0xbc, 0xf0, 0x9d, 0xe0, 0xbb, 0x0d, 0xae, 0x69, 0x85, 0x10, 0x38, 0x82, + 0xed, 0x78, 0x1f, 0xdc, 0xd4, 0xe8, 0x0d, 0x9c, 0x6f, 0x65, 0xd3, 0xf0, 0xad, 0x1e, 0x7b, 0x53, + 0x97, 0x26, 0xbc, 0x47, 0xcf, 0xee, 0x1f, 0xb3, 0x12, 0x45, 0x30, 0x34, 0x53, 0xb6, 0xe3, 0xc1, + 0x64, 0x30, 0x7d, 0x72, 0x79, 0x71, 0xfa, 0x68, 0xb4, 0x57, 0x22, 0x02, 0x6e, 0xab, 0x98, 0xe2, + 0xe3, 0xa1, 0xd9, 0xce, 0xbb, 0x93, 0x2c, 0x70, 0xa1, 0x25, 0xb4, 0x53, 0x06, 0x11, 0xb8, 0x06, + 0xa3, 0xe7, 0xf0, 0xb4, 0x58, 0x93, 0xf5, 0xdf, 0xcb, 0x39, 0x83, 0xc7, 0x31, 0x4d, 0xc9, 0xba, + 0xdb, 0x85, 0x07, 0x2e, 0x4d, 0x49, 0x72, 0xed, 0xdb, 0xba, 0x4c, 0x29, 0x5d, 0x52, 0xdf, 0x8d, + 0x7e, 0x5a, 0x10, 0x6c, 0xe5, 0xee, 0x81, 0xee, 0x51, 0xf7, 0xd7, 0x57, 0xfa, 0x08, 0x56, 0xd6, + 0xe7, 0xb8, 0x67, 0x57, 0xb2, 0x61, 0xa2, 0xc2, 0xf2, 0x50, 0x85, 0x15, 0x17, 0xe6, 0x44, 0xc2, + 0xee, 0x13, 0xdb, 0xd7, 0xed, 0xbf, 0xee, 0xf1, 0x83, 0x41, 0x3f, 0x6c, 0x67, 0x11, 0xcf, 0x8b, + 0x5f, 0xf6, 0xdb, 0x45, 0x67, 0x16, 0x37, 0xf2, 0x4b, 0x89, 0xe7, 0xc7, 0x00, 0xc4, 0x04, 0xf8, + 0x34, 0x8b, 0xb4, 0xe6, 0x66, 0x68, 0xdc, 0xdf, 0xff, 0x09, 0x00, 0x00, 0xff, 0xff, 0x0c, 0xbf, + 0x61, 0xbe, 0xec, 0x02, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/firestore/admin/v1beta1/location.pb.go b/vendor/google.golang.org/genproto/googleapis/firestore/admin/v1beta1/location.pb.go new file mode 100644 index 0000000..8ae4a43 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/firestore/admin/v1beta1/location.pb.go @@ -0,0 +1,78 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/firestore/admin/v1beta1/location.proto + +package admin // import "google.golang.org/genproto/googleapis/firestore/admin/v1beta1" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "google.golang.org/genproto/googleapis/api/annotations" +import _ "google.golang.org/genproto/googleapis/type/latlng" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// The metadata message for [google.cloud.location.Location.metadata][google.cloud.location.Location.metadata]. +type LocationMetadata struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *LocationMetadata) Reset() { *m = LocationMetadata{} } +func (m *LocationMetadata) String() string { return proto.CompactTextString(m) } +func (*LocationMetadata) ProtoMessage() {} +func (*LocationMetadata) Descriptor() ([]byte, []int) { + return fileDescriptor_location_c0b3dc6e559decde, []int{0} +} +func (m *LocationMetadata) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_LocationMetadata.Unmarshal(m, b) +} +func (m *LocationMetadata) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_LocationMetadata.Marshal(b, m, deterministic) +} +func (dst *LocationMetadata) XXX_Merge(src proto.Message) { + xxx_messageInfo_LocationMetadata.Merge(dst, src) +} +func (m *LocationMetadata) XXX_Size() int { + return xxx_messageInfo_LocationMetadata.Size(m) +} +func (m *LocationMetadata) XXX_DiscardUnknown() { + xxx_messageInfo_LocationMetadata.DiscardUnknown(m) +} + +var xxx_messageInfo_LocationMetadata proto.InternalMessageInfo + +func init() { + proto.RegisterType((*LocationMetadata)(nil), "google.firestore.admin.v1beta1.LocationMetadata") +} + +func init() { + proto.RegisterFile("google/firestore/admin/v1beta1/location.proto", fileDescriptor_location_c0b3dc6e559decde) +} + +var fileDescriptor_location_c0b3dc6e559decde = []byte{ + // 218 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x7c, 0x8f, 0x31, 0x4b, 0xc0, 0x30, + 0x10, 0x85, 0x69, 0x11, 0x87, 0x82, 0x20, 0x9d, 0xa4, 0x88, 0x43, 0x71, 0x35, 0x47, 0x71, 0x74, + 0xb2, 0x85, 0x76, 0x51, 0x28, 0x08, 0x0e, 0x6e, 0xd7, 0x36, 0x86, 0x40, 0x9a, 0x0b, 0xe9, 0x29, + 0xf8, 0x77, 0x9c, 0xc4, 0x5f, 0x29, 0x4d, 0x62, 0x37, 0x1d, 0xc3, 0x7b, 0xdf, 0xf7, 0x2e, 0xc5, + 0x8d, 0x22, 0x52, 0x46, 0xc2, 0xab, 0xf6, 0x72, 0x63, 0xf2, 0x12, 0x70, 0x59, 0xb5, 0x85, 0xf7, + 0x66, 0x92, 0x8c, 0x0d, 0x18, 0x9a, 0x91, 0x35, 0x59, 0xe1, 0x3c, 0x31, 0x95, 0x57, 0xb1, 0x2e, + 0x8e, 0xba, 0x08, 0x75, 0x91, 0xea, 0xd5, 0x45, 0xd2, 0xf1, 0x87, 0x93, 0x60, 0x90, 0x8d, 0x55, + 0x91, 0xac, 0x2e, 0x53, 0x82, 0x4e, 0x03, 0x5a, 0x4b, 0x1c, 0xb4, 0x5b, 0x4c, 0xeb, 0xb2, 0x38, + 0x7f, 0x48, 0x4b, 0x8f, 0x92, 0x71, 0x41, 0xc6, 0xf6, 0x2b, 0x2b, 0xea, 0x99, 0x56, 0xf1, 0xff, + 0x64, 0x7b, 0xf6, 0x0b, 0x8e, 0xbb, 0x69, 0xcc, 0x5e, 0xba, 0x04, 0x28, 0x32, 0x68, 0x95, 0x20, + 0xaf, 0x40, 0x49, 0x1b, 0x76, 0x20, 0x46, 0xe8, 0xf4, 0xf6, 0xd7, 0x8f, 0xef, 0xc2, 0xeb, 0x33, + 0x3f, 0x19, 0xba, 0xfe, 0xe9, 0x3b, 0xbf, 0x1e, 0xa2, 0xac, 0x33, 0xf4, 0xb6, 0x88, 0xfe, 0xb8, + 0xe1, 0x3e, 0xdc, 0xf0, 0xdc, 0xb4, 0x3b, 0x33, 0x9d, 0x06, 0xfb, 0xed, 0x4f, 0x00, 0x00, 0x00, + 0xff, 0xff, 0x76, 0x88, 0x2e, 0x88, 0x4e, 0x01, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/firestore/admin/v1beta2/field.pb.go b/vendor/google.golang.org/genproto/googleapis/firestore/admin/v1beta2/field.pb.go new file mode 100644 index 0000000..b380814 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/firestore/admin/v1beta2/field.pb.go @@ -0,0 +1,213 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/firestore/admin/v1beta2/field.proto + +package admin // import "google.golang.org/genproto/googleapis/firestore/admin/v1beta2" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Represents a single field in the database. +// +// Fields are grouped by their "Collection Group", which represent all +// collections in the database with the same id. +type Field struct { + // A field name of the form + // `projects/{project_id}/databases/{database_id}/collectionGroups/{collection_id}/fields/{field_path}` + // + // A field path may be a simple field name, e.g. `address` or a path to fields + // within map_value , e.g. `address.city`, + // or a special field path. The only valid special field is `*`, which + // represents any field. + // + // Field paths may be quoted using ` (backtick). The only character that needs + // to be escaped within a quoted field path is the backtick character itself, + // escaped using a backslash. Special characters in field paths that + // must be quoted include: `*`, `.`, + // ``` (backtick), `[`, `]`, as well as any ascii symbolic characters. + // + // Examples: + // (Note: Comments here are written in markdown syntax, so there is an + // additional layer of backticks to represent a code block) + // `\`address.city\`` represents a field named `address.city`, not the map key + // `city` in the field `address`. + // `\`*\`` represents a field named `*`, not any field. + // + // A special `Field` contains the default indexing settings for all fields. + // This field's resource name is: + // `projects/{project_id}/databases/{database_id}/collectionGroups/__default__/fields/*` + // Indexes defined on this `Field` will be applied to all fields which do not + // have their own `Field` index configuration. + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // The index configuration for this field. If unset, field indexing will + // revert to the configuration defined by the `ancestor_field`. To + // explicitly remove all indexes for this field, specify an index config + // with an empty list of indexes. + IndexConfig *Field_IndexConfig `protobuf:"bytes,2,opt,name=index_config,json=indexConfig,proto3" json:"index_config,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Field) Reset() { *m = Field{} } +func (m *Field) String() string { return proto.CompactTextString(m) } +func (*Field) ProtoMessage() {} +func (*Field) Descriptor() ([]byte, []int) { + return fileDescriptor_field_f272e136f5db2293, []int{0} +} +func (m *Field) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Field.Unmarshal(m, b) +} +func (m *Field) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Field.Marshal(b, m, deterministic) +} +func (dst *Field) XXX_Merge(src proto.Message) { + xxx_messageInfo_Field.Merge(dst, src) +} +func (m *Field) XXX_Size() int { + return xxx_messageInfo_Field.Size(m) +} +func (m *Field) XXX_DiscardUnknown() { + xxx_messageInfo_Field.DiscardUnknown(m) +} + +var xxx_messageInfo_Field proto.InternalMessageInfo + +func (m *Field) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *Field) GetIndexConfig() *Field_IndexConfig { + if m != nil { + return m.IndexConfig + } + return nil +} + +// The index configuration for this field. +type Field_IndexConfig struct { + // The indexes supported for this field. + Indexes []*Index `protobuf:"bytes,1,rep,name=indexes,proto3" json:"indexes,omitempty"` + // Output only. + // When true, the `Field`'s index configuration is set from the + // configuration specified by the `ancestor_field`. + // When false, the `Field`'s index configuration is defined explicitly. + UsesAncestorConfig bool `protobuf:"varint,2,opt,name=uses_ancestor_config,json=usesAncestorConfig,proto3" json:"uses_ancestor_config,omitempty"` + // Output only. + // Specifies the resource name of the `Field` from which this field's + // index configuration is set (when `uses_ancestor_config` is true), + // or from which it *would* be set if this field had no index configuration + // (when `uses_ancestor_config` is false). + AncestorField string `protobuf:"bytes,3,opt,name=ancestor_field,json=ancestorField,proto3" json:"ancestor_field,omitempty"` + // Output only + // When true, the `Field`'s index configuration is in the process of being + // reverted. Once complete, the index config will transition to the same + // state as the field specified by `ancestor_field`, at which point + // `uses_ancestor_config` will be `true` and `reverting` will be `false`. + Reverting bool `protobuf:"varint,4,opt,name=reverting,proto3" json:"reverting,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Field_IndexConfig) Reset() { *m = Field_IndexConfig{} } +func (m *Field_IndexConfig) String() string { return proto.CompactTextString(m) } +func (*Field_IndexConfig) ProtoMessage() {} +func (*Field_IndexConfig) Descriptor() ([]byte, []int) { + return fileDescriptor_field_f272e136f5db2293, []int{0, 0} +} +func (m *Field_IndexConfig) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Field_IndexConfig.Unmarshal(m, b) +} +func (m *Field_IndexConfig) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Field_IndexConfig.Marshal(b, m, deterministic) +} +func (dst *Field_IndexConfig) XXX_Merge(src proto.Message) { + xxx_messageInfo_Field_IndexConfig.Merge(dst, src) +} +func (m *Field_IndexConfig) XXX_Size() int { + return xxx_messageInfo_Field_IndexConfig.Size(m) +} +func (m *Field_IndexConfig) XXX_DiscardUnknown() { + xxx_messageInfo_Field_IndexConfig.DiscardUnknown(m) +} + +var xxx_messageInfo_Field_IndexConfig proto.InternalMessageInfo + +func (m *Field_IndexConfig) GetIndexes() []*Index { + if m != nil { + return m.Indexes + } + return nil +} + +func (m *Field_IndexConfig) GetUsesAncestorConfig() bool { + if m != nil { + return m.UsesAncestorConfig + } + return false +} + +func (m *Field_IndexConfig) GetAncestorField() string { + if m != nil { + return m.AncestorField + } + return "" +} + +func (m *Field_IndexConfig) GetReverting() bool { + if m != nil { + return m.Reverting + } + return false +} + +func init() { + proto.RegisterType((*Field)(nil), "google.firestore.admin.v1beta2.Field") + proto.RegisterType((*Field_IndexConfig)(nil), "google.firestore.admin.v1beta2.Field.IndexConfig") +} + +func init() { + proto.RegisterFile("google/firestore/admin/v1beta2/field.proto", fileDescriptor_field_f272e136f5db2293) +} + +var fileDescriptor_field_f272e136f5db2293 = []byte{ + // 346 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x84, 0x91, 0xcf, 0x4a, 0xf3, 0x40, + 0x14, 0xc5, 0x99, 0xb4, 0xdf, 0xa7, 0x9d, 0xa8, 0x8b, 0xc1, 0x45, 0x28, 0x45, 0x4a, 0xb1, 0x50, + 0x5c, 0xcc, 0xd8, 0xba, 0x74, 0x21, 0x6d, 0xa0, 0xc5, 0x5d, 0x89, 0xe2, 0xc2, 0x4d, 0x99, 0x36, + 0xd3, 0x61, 0x20, 0x9d, 0x5b, 0x92, 0xb4, 0xf8, 0x3c, 0x82, 0x1b, 0xdf, 0xc0, 0x07, 0xf0, 0xbd, + 0x24, 0x77, 0xd2, 0x3f, 0x1b, 0xcd, 0x2e, 0x33, 0xe7, 0x9c, 0xdf, 0x3d, 0xb9, 0x43, 0x6f, 0x34, + 0x80, 0x4e, 0x94, 0x58, 0x9a, 0x54, 0x65, 0x39, 0xa4, 0x4a, 0xc8, 0x78, 0x65, 0xac, 0xd8, 0xf6, + 0xe7, 0x2a, 0x97, 0x03, 0xb1, 0x34, 0x2a, 0x89, 0xf9, 0x3a, 0x85, 0x1c, 0xd8, 0x95, 0xf3, 0xf2, + 0xbd, 0x97, 0xa3, 0x97, 0x97, 0xde, 0x66, 0x15, 0xcb, 0xd8, 0x58, 0xbd, 0x39, 0x56, 0xb3, 0x55, + 0x7a, 0xe5, 0xda, 0x08, 0x69, 0x2d, 0xe4, 0x32, 0x37, 0x60, 0x33, 0xa7, 0x76, 0xbe, 0x3c, 0xfa, + 0x6f, 0x5c, 0x4c, 0x66, 0x8c, 0xd6, 0xad, 0x5c, 0xa9, 0x80, 0xb4, 0x49, 0xaf, 0x11, 0xe1, 0x37, + 0x7b, 0xa6, 0x67, 0x88, 0x9a, 0x2d, 0xc0, 0x2e, 0x8d, 0x0e, 0xbc, 0x36, 0xe9, 0xf9, 0x83, 0x3e, + 0xff, 0xbb, 0x1e, 0x47, 0x20, 0x7f, 0x2c, 0x92, 0x21, 0x06, 0x23, 0xdf, 0x1c, 0x0e, 0xcd, 0x6f, + 0x42, 0xfd, 0x23, 0x91, 0x3d, 0xd0, 0x13, 0x94, 0x55, 0x16, 0x90, 0x76, 0xad, 0xe7, 0x0f, 0xba, + 0x55, 0x03, 0x30, 0x1d, 0xed, 0x52, 0xec, 0x96, 0x5e, 0x6e, 0x32, 0x95, 0xcd, 0xa4, 0x5d, 0xa0, + 0xfd, 0xb8, 0xee, 0x69, 0xc4, 0x0a, 0x6d, 0x58, 0x4a, 0xe5, 0xc8, 0x2e, 0xbd, 0xd8, 0x9b, 0x71, + 0xf1, 0x41, 0x0d, 0x7f, 0xfb, 0x7c, 0x77, 0xeb, 0x76, 0xd2, 0xa2, 0x8d, 0x54, 0x6d, 0x55, 0x9a, + 0x1b, 0xab, 0x83, 0x3a, 0xd2, 0x0e, 0x17, 0xa3, 0x0f, 0x42, 0x3b, 0x0b, 0x58, 0x55, 0x94, 0x1d, + 0x51, 0x64, 0x4d, 0x8b, 0x75, 0x4f, 0xc9, 0x6b, 0x58, 0xba, 0x35, 0x24, 0xd2, 0x6a, 0x0e, 0xa9, + 0x16, 0x5a, 0x59, 0x7c, 0x0c, 0xe1, 0x24, 0xb9, 0x36, 0xd9, 0x6f, 0x2f, 0x7b, 0x8f, 0xa7, 0x77, + 0xaf, 0x3e, 0x09, 0xc7, 0x4f, 0x9f, 0xde, 0xf5, 0xc4, 0xc1, 0xc2, 0x04, 0x36, 0x31, 0x1f, 0xef, + 0x0b, 0x0c, 0xb1, 0xc0, 0x4b, 0x7f, 0x54, 0x64, 0xe6, 0xff, 0x91, 0x7e, 0xf7, 0x13, 0x00, 0x00, + 0xff, 0xff, 0xd4, 0x92, 0xf0, 0x86, 0x82, 0x02, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/firestore/admin/v1beta2/firestore_admin.pb.go b/vendor/google.golang.org/genproto/googleapis/firestore/admin/v1beta2/firestore_admin.pb.go new file mode 100644 index 0000000..51ac15d --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/firestore/admin/v1beta2/firestore_admin.pb.go @@ -0,0 +1,1120 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/firestore/admin/v1beta2/firestore_admin.proto + +package admin // import "google.golang.org/genproto/googleapis/firestore/admin/v1beta2" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import empty "github.com/golang/protobuf/ptypes/empty" +import _ "google.golang.org/genproto/googleapis/api/annotations" +import longrunning "google.golang.org/genproto/googleapis/longrunning" +import field_mask "google.golang.org/genproto/protobuf/field_mask" + +import ( + context "golang.org/x/net/context" + grpc "google.golang.org/grpc" +) + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// The request for [FirestoreAdmin.CreateIndex][google.firestore.admin.v1beta2.FirestoreAdmin.CreateIndex]. +type CreateIndexRequest struct { + // A parent name of the form + // `projects/{project_id}/databases/{database_id}/collectionGroups/{collection_id}` + Parent string `protobuf:"bytes,1,opt,name=parent,proto3" json:"parent,omitempty"` + // The composite index to create. + Index *Index `protobuf:"bytes,2,opt,name=index,proto3" json:"index,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *CreateIndexRequest) Reset() { *m = CreateIndexRequest{} } +func (m *CreateIndexRequest) String() string { return proto.CompactTextString(m) } +func (*CreateIndexRequest) ProtoMessage() {} +func (*CreateIndexRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_firestore_admin_549fe3017350a705, []int{0} +} +func (m *CreateIndexRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_CreateIndexRequest.Unmarshal(m, b) +} +func (m *CreateIndexRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_CreateIndexRequest.Marshal(b, m, deterministic) +} +func (dst *CreateIndexRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_CreateIndexRequest.Merge(dst, src) +} +func (m *CreateIndexRequest) XXX_Size() int { + return xxx_messageInfo_CreateIndexRequest.Size(m) +} +func (m *CreateIndexRequest) XXX_DiscardUnknown() { + xxx_messageInfo_CreateIndexRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_CreateIndexRequest proto.InternalMessageInfo + +func (m *CreateIndexRequest) GetParent() string { + if m != nil { + return m.Parent + } + return "" +} + +func (m *CreateIndexRequest) GetIndex() *Index { + if m != nil { + return m.Index + } + return nil +} + +// The request for [FirestoreAdmin.ListIndexes][google.firestore.admin.v1beta2.FirestoreAdmin.ListIndexes]. +type ListIndexesRequest struct { + // A parent name of the form + // `projects/{project_id}/databases/{database_id}/collectionGroups/{collection_id}` + Parent string `protobuf:"bytes,1,opt,name=parent,proto3" json:"parent,omitempty"` + // The filter to apply to list results. + Filter string `protobuf:"bytes,2,opt,name=filter,proto3" json:"filter,omitempty"` + // The number of results to return. + PageSize int32 `protobuf:"varint,3,opt,name=page_size,json=pageSize,proto3" json:"page_size,omitempty"` + // A page token, returned from a previous call to + // [FirestoreAdmin.ListIndexes][google.firestore.admin.v1beta2.FirestoreAdmin.ListIndexes], that may be used to get the next + // page of results. + PageToken string `protobuf:"bytes,4,opt,name=page_token,json=pageToken,proto3" json:"page_token,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ListIndexesRequest) Reset() { *m = ListIndexesRequest{} } +func (m *ListIndexesRequest) String() string { return proto.CompactTextString(m) } +func (*ListIndexesRequest) ProtoMessage() {} +func (*ListIndexesRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_firestore_admin_549fe3017350a705, []int{1} +} +func (m *ListIndexesRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ListIndexesRequest.Unmarshal(m, b) +} +func (m *ListIndexesRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ListIndexesRequest.Marshal(b, m, deterministic) +} +func (dst *ListIndexesRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_ListIndexesRequest.Merge(dst, src) +} +func (m *ListIndexesRequest) XXX_Size() int { + return xxx_messageInfo_ListIndexesRequest.Size(m) +} +func (m *ListIndexesRequest) XXX_DiscardUnknown() { + xxx_messageInfo_ListIndexesRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_ListIndexesRequest proto.InternalMessageInfo + +func (m *ListIndexesRequest) GetParent() string { + if m != nil { + return m.Parent + } + return "" +} + +func (m *ListIndexesRequest) GetFilter() string { + if m != nil { + return m.Filter + } + return "" +} + +func (m *ListIndexesRequest) GetPageSize() int32 { + if m != nil { + return m.PageSize + } + return 0 +} + +func (m *ListIndexesRequest) GetPageToken() string { + if m != nil { + return m.PageToken + } + return "" +} + +// The response for [FirestoreAdmin.ListIndexes][google.firestore.admin.v1beta2.FirestoreAdmin.ListIndexes]. +type ListIndexesResponse struct { + // The requested indexes. + Indexes []*Index `protobuf:"bytes,1,rep,name=indexes,proto3" json:"indexes,omitempty"` + // A page token that may be used to request another page of results. If blank, + // this is the last page. + NextPageToken string `protobuf:"bytes,2,opt,name=next_page_token,json=nextPageToken,proto3" json:"next_page_token,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ListIndexesResponse) Reset() { *m = ListIndexesResponse{} } +func (m *ListIndexesResponse) String() string { return proto.CompactTextString(m) } +func (*ListIndexesResponse) ProtoMessage() {} +func (*ListIndexesResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_firestore_admin_549fe3017350a705, []int{2} +} +func (m *ListIndexesResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ListIndexesResponse.Unmarshal(m, b) +} +func (m *ListIndexesResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ListIndexesResponse.Marshal(b, m, deterministic) +} +func (dst *ListIndexesResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_ListIndexesResponse.Merge(dst, src) +} +func (m *ListIndexesResponse) XXX_Size() int { + return xxx_messageInfo_ListIndexesResponse.Size(m) +} +func (m *ListIndexesResponse) XXX_DiscardUnknown() { + xxx_messageInfo_ListIndexesResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_ListIndexesResponse proto.InternalMessageInfo + +func (m *ListIndexesResponse) GetIndexes() []*Index { + if m != nil { + return m.Indexes + } + return nil +} + +func (m *ListIndexesResponse) GetNextPageToken() string { + if m != nil { + return m.NextPageToken + } + return "" +} + +// The request for [FirestoreAdmin.GetIndex][google.firestore.admin.v1beta2.FirestoreAdmin.GetIndex]. +type GetIndexRequest struct { + // A name of the form + // `projects/{project_id}/databases/{database_id}/collectionGroups/{collection_id}/indexes/{index_id}` + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GetIndexRequest) Reset() { *m = GetIndexRequest{} } +func (m *GetIndexRequest) String() string { return proto.CompactTextString(m) } +func (*GetIndexRequest) ProtoMessage() {} +func (*GetIndexRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_firestore_admin_549fe3017350a705, []int{3} +} +func (m *GetIndexRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GetIndexRequest.Unmarshal(m, b) +} +func (m *GetIndexRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GetIndexRequest.Marshal(b, m, deterministic) +} +func (dst *GetIndexRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_GetIndexRequest.Merge(dst, src) +} +func (m *GetIndexRequest) XXX_Size() int { + return xxx_messageInfo_GetIndexRequest.Size(m) +} +func (m *GetIndexRequest) XXX_DiscardUnknown() { + xxx_messageInfo_GetIndexRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_GetIndexRequest proto.InternalMessageInfo + +func (m *GetIndexRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +// The request for [FirestoreAdmin.DeleteIndex][google.firestore.admin.v1beta2.FirestoreAdmin.DeleteIndex]. +type DeleteIndexRequest struct { + // A name of the form + // `projects/{project_id}/databases/{database_id}/collectionGroups/{collection_id}/indexes/{index_id}` + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *DeleteIndexRequest) Reset() { *m = DeleteIndexRequest{} } +func (m *DeleteIndexRequest) String() string { return proto.CompactTextString(m) } +func (*DeleteIndexRequest) ProtoMessage() {} +func (*DeleteIndexRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_firestore_admin_549fe3017350a705, []int{4} +} +func (m *DeleteIndexRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_DeleteIndexRequest.Unmarshal(m, b) +} +func (m *DeleteIndexRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_DeleteIndexRequest.Marshal(b, m, deterministic) +} +func (dst *DeleteIndexRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_DeleteIndexRequest.Merge(dst, src) +} +func (m *DeleteIndexRequest) XXX_Size() int { + return xxx_messageInfo_DeleteIndexRequest.Size(m) +} +func (m *DeleteIndexRequest) XXX_DiscardUnknown() { + xxx_messageInfo_DeleteIndexRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_DeleteIndexRequest proto.InternalMessageInfo + +func (m *DeleteIndexRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +// The request for [FirestoreAdmin.UpdateField][google.firestore.admin.v1beta2.FirestoreAdmin.UpdateField]. +type UpdateFieldRequest struct { + // The field to be updated. + Field *Field `protobuf:"bytes,1,opt,name=field,proto3" json:"field,omitempty"` + // A mask, relative to the field. If specified, only configuration specified + // by this field_mask will be updated in the field. + UpdateMask *field_mask.FieldMask `protobuf:"bytes,2,opt,name=update_mask,json=updateMask,proto3" json:"update_mask,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *UpdateFieldRequest) Reset() { *m = UpdateFieldRequest{} } +func (m *UpdateFieldRequest) String() string { return proto.CompactTextString(m) } +func (*UpdateFieldRequest) ProtoMessage() {} +func (*UpdateFieldRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_firestore_admin_549fe3017350a705, []int{5} +} +func (m *UpdateFieldRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_UpdateFieldRequest.Unmarshal(m, b) +} +func (m *UpdateFieldRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_UpdateFieldRequest.Marshal(b, m, deterministic) +} +func (dst *UpdateFieldRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_UpdateFieldRequest.Merge(dst, src) +} +func (m *UpdateFieldRequest) XXX_Size() int { + return xxx_messageInfo_UpdateFieldRequest.Size(m) +} +func (m *UpdateFieldRequest) XXX_DiscardUnknown() { + xxx_messageInfo_UpdateFieldRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_UpdateFieldRequest proto.InternalMessageInfo + +func (m *UpdateFieldRequest) GetField() *Field { + if m != nil { + return m.Field + } + return nil +} + +func (m *UpdateFieldRequest) GetUpdateMask() *field_mask.FieldMask { + if m != nil { + return m.UpdateMask + } + return nil +} + +// The request for [FirestoreAdmin.GetField][google.firestore.admin.v1beta2.FirestoreAdmin.GetField]. +type GetFieldRequest struct { + // A name of the form + // `projects/{project_id}/databases/{database_id}/collectionGroups/{collection_id}/fields/{field_id}` + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GetFieldRequest) Reset() { *m = GetFieldRequest{} } +func (m *GetFieldRequest) String() string { return proto.CompactTextString(m) } +func (*GetFieldRequest) ProtoMessage() {} +func (*GetFieldRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_firestore_admin_549fe3017350a705, []int{6} +} +func (m *GetFieldRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GetFieldRequest.Unmarshal(m, b) +} +func (m *GetFieldRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GetFieldRequest.Marshal(b, m, deterministic) +} +func (dst *GetFieldRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_GetFieldRequest.Merge(dst, src) +} +func (m *GetFieldRequest) XXX_Size() int { + return xxx_messageInfo_GetFieldRequest.Size(m) +} +func (m *GetFieldRequest) XXX_DiscardUnknown() { + xxx_messageInfo_GetFieldRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_GetFieldRequest proto.InternalMessageInfo + +func (m *GetFieldRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +// The request for [FirestoreAdmin.ListFields][google.firestore.admin.v1beta2.FirestoreAdmin.ListFields]. +type ListFieldsRequest struct { + // A parent name of the form + // `projects/{project_id}/databases/{database_id}/collectionGroups/{collection_id}` + Parent string `protobuf:"bytes,1,opt,name=parent,proto3" json:"parent,omitempty"` + // The filter to apply to list results. Currently, + // [FirestoreAdmin.ListFields][google.firestore.admin.v1beta2.FirestoreAdmin.ListFields] only supports listing fields + // that have been explicitly overridden. To issue this query, call + // [FirestoreAdmin.ListFields][google.firestore.admin.v1beta2.FirestoreAdmin.ListFields] with the filter set to + // `indexConfig.usesAncestorConfig:false`. + Filter string `protobuf:"bytes,2,opt,name=filter,proto3" json:"filter,omitempty"` + // The number of results to return. + PageSize int32 `protobuf:"varint,3,opt,name=page_size,json=pageSize,proto3" json:"page_size,omitempty"` + // A page token, returned from a previous call to + // [FirestoreAdmin.ListFields][google.firestore.admin.v1beta2.FirestoreAdmin.ListFields], that may be used to get the next + // page of results. + PageToken string `protobuf:"bytes,4,opt,name=page_token,json=pageToken,proto3" json:"page_token,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ListFieldsRequest) Reset() { *m = ListFieldsRequest{} } +func (m *ListFieldsRequest) String() string { return proto.CompactTextString(m) } +func (*ListFieldsRequest) ProtoMessage() {} +func (*ListFieldsRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_firestore_admin_549fe3017350a705, []int{7} +} +func (m *ListFieldsRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ListFieldsRequest.Unmarshal(m, b) +} +func (m *ListFieldsRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ListFieldsRequest.Marshal(b, m, deterministic) +} +func (dst *ListFieldsRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_ListFieldsRequest.Merge(dst, src) +} +func (m *ListFieldsRequest) XXX_Size() int { + return xxx_messageInfo_ListFieldsRequest.Size(m) +} +func (m *ListFieldsRequest) XXX_DiscardUnknown() { + xxx_messageInfo_ListFieldsRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_ListFieldsRequest proto.InternalMessageInfo + +func (m *ListFieldsRequest) GetParent() string { + if m != nil { + return m.Parent + } + return "" +} + +func (m *ListFieldsRequest) GetFilter() string { + if m != nil { + return m.Filter + } + return "" +} + +func (m *ListFieldsRequest) GetPageSize() int32 { + if m != nil { + return m.PageSize + } + return 0 +} + +func (m *ListFieldsRequest) GetPageToken() string { + if m != nil { + return m.PageToken + } + return "" +} + +// The response for [FirestoreAdmin.ListFields][google.firestore.admin.v1beta2.FirestoreAdmin.ListFields]. +type ListFieldsResponse struct { + // The requested fields. + Fields []*Field `protobuf:"bytes,1,rep,name=fields,proto3" json:"fields,omitempty"` + // A page token that may be used to request another page of results. If blank, + // this is the last page. + NextPageToken string `protobuf:"bytes,2,opt,name=next_page_token,json=nextPageToken,proto3" json:"next_page_token,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ListFieldsResponse) Reset() { *m = ListFieldsResponse{} } +func (m *ListFieldsResponse) String() string { return proto.CompactTextString(m) } +func (*ListFieldsResponse) ProtoMessage() {} +func (*ListFieldsResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_firestore_admin_549fe3017350a705, []int{8} +} +func (m *ListFieldsResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ListFieldsResponse.Unmarshal(m, b) +} +func (m *ListFieldsResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ListFieldsResponse.Marshal(b, m, deterministic) +} +func (dst *ListFieldsResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_ListFieldsResponse.Merge(dst, src) +} +func (m *ListFieldsResponse) XXX_Size() int { + return xxx_messageInfo_ListFieldsResponse.Size(m) +} +func (m *ListFieldsResponse) XXX_DiscardUnknown() { + xxx_messageInfo_ListFieldsResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_ListFieldsResponse proto.InternalMessageInfo + +func (m *ListFieldsResponse) GetFields() []*Field { + if m != nil { + return m.Fields + } + return nil +} + +func (m *ListFieldsResponse) GetNextPageToken() string { + if m != nil { + return m.NextPageToken + } + return "" +} + +// The request for [FirestoreAdmin.ExportDocuments][google.firestore.admin.v1beta2.FirestoreAdmin.ExportDocuments]. +type ExportDocumentsRequest struct { + // Database to export. Should be of the form: + // `projects/{project_id}/databases/{database_id}`. + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // Which collection ids to export. Unspecified means all collections. + CollectionIds []string `protobuf:"bytes,2,rep,name=collection_ids,json=collectionIds,proto3" json:"collection_ids,omitempty"` + // The output URI. Currently only supports Google Cloud Storage URIs of the + // form: `gs://BUCKET_NAME[/NAMESPACE_PATH]`, where `BUCKET_NAME` is the name + // of the Google Cloud Storage bucket and `NAMESPACE_PATH` is an optional + // Google Cloud Storage namespace path. When + // choosing a name, be sure to consider Google Cloud Storage naming + // guidelines: https://cloud.google.com/storage/docs/naming. + // If the URI is a bucket (without a namespace path), a prefix will be + // generated based on the start time. + OutputUriPrefix string `protobuf:"bytes,3,opt,name=output_uri_prefix,json=outputUriPrefix,proto3" json:"output_uri_prefix,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ExportDocumentsRequest) Reset() { *m = ExportDocumentsRequest{} } +func (m *ExportDocumentsRequest) String() string { return proto.CompactTextString(m) } +func (*ExportDocumentsRequest) ProtoMessage() {} +func (*ExportDocumentsRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_firestore_admin_549fe3017350a705, []int{9} +} +func (m *ExportDocumentsRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ExportDocumentsRequest.Unmarshal(m, b) +} +func (m *ExportDocumentsRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ExportDocumentsRequest.Marshal(b, m, deterministic) +} +func (dst *ExportDocumentsRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_ExportDocumentsRequest.Merge(dst, src) +} +func (m *ExportDocumentsRequest) XXX_Size() int { + return xxx_messageInfo_ExportDocumentsRequest.Size(m) +} +func (m *ExportDocumentsRequest) XXX_DiscardUnknown() { + xxx_messageInfo_ExportDocumentsRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_ExportDocumentsRequest proto.InternalMessageInfo + +func (m *ExportDocumentsRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *ExportDocumentsRequest) GetCollectionIds() []string { + if m != nil { + return m.CollectionIds + } + return nil +} + +func (m *ExportDocumentsRequest) GetOutputUriPrefix() string { + if m != nil { + return m.OutputUriPrefix + } + return "" +} + +// The request for [FirestoreAdmin.ImportDocuments][google.firestore.admin.v1beta2.FirestoreAdmin.ImportDocuments]. +type ImportDocumentsRequest struct { + // Database to import into. Should be of the form: + // `projects/{project_id}/databases/{database_id}`. + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // Which collection ids to import. Unspecified means all collections included + // in the import. + CollectionIds []string `protobuf:"bytes,2,rep,name=collection_ids,json=collectionIds,proto3" json:"collection_ids,omitempty"` + // Location of the exported files. + // This must match the output_uri_prefix of an ExportDocumentsResponse from + // an export that has completed successfully. + // See: + // [google.firestore.admin.v1beta2.ExportDocumentsResponse.output_uri_prefix][google.firestore.admin.v1beta2.ExportDocumentsResponse.output_uri_prefix]. + InputUriPrefix string `protobuf:"bytes,3,opt,name=input_uri_prefix,json=inputUriPrefix,proto3" json:"input_uri_prefix,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ImportDocumentsRequest) Reset() { *m = ImportDocumentsRequest{} } +func (m *ImportDocumentsRequest) String() string { return proto.CompactTextString(m) } +func (*ImportDocumentsRequest) ProtoMessage() {} +func (*ImportDocumentsRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_firestore_admin_549fe3017350a705, []int{10} +} +func (m *ImportDocumentsRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ImportDocumentsRequest.Unmarshal(m, b) +} +func (m *ImportDocumentsRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ImportDocumentsRequest.Marshal(b, m, deterministic) +} +func (dst *ImportDocumentsRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_ImportDocumentsRequest.Merge(dst, src) +} +func (m *ImportDocumentsRequest) XXX_Size() int { + return xxx_messageInfo_ImportDocumentsRequest.Size(m) +} +func (m *ImportDocumentsRequest) XXX_DiscardUnknown() { + xxx_messageInfo_ImportDocumentsRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_ImportDocumentsRequest proto.InternalMessageInfo + +func (m *ImportDocumentsRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *ImportDocumentsRequest) GetCollectionIds() []string { + if m != nil { + return m.CollectionIds + } + return nil +} + +func (m *ImportDocumentsRequest) GetInputUriPrefix() string { + if m != nil { + return m.InputUriPrefix + } + return "" +} + +func init() { + proto.RegisterType((*CreateIndexRequest)(nil), "google.firestore.admin.v1beta2.CreateIndexRequest") + proto.RegisterType((*ListIndexesRequest)(nil), "google.firestore.admin.v1beta2.ListIndexesRequest") + proto.RegisterType((*ListIndexesResponse)(nil), "google.firestore.admin.v1beta2.ListIndexesResponse") + proto.RegisterType((*GetIndexRequest)(nil), "google.firestore.admin.v1beta2.GetIndexRequest") + proto.RegisterType((*DeleteIndexRequest)(nil), "google.firestore.admin.v1beta2.DeleteIndexRequest") + proto.RegisterType((*UpdateFieldRequest)(nil), "google.firestore.admin.v1beta2.UpdateFieldRequest") + proto.RegisterType((*GetFieldRequest)(nil), "google.firestore.admin.v1beta2.GetFieldRequest") + proto.RegisterType((*ListFieldsRequest)(nil), "google.firestore.admin.v1beta2.ListFieldsRequest") + proto.RegisterType((*ListFieldsResponse)(nil), "google.firestore.admin.v1beta2.ListFieldsResponse") + proto.RegisterType((*ExportDocumentsRequest)(nil), "google.firestore.admin.v1beta2.ExportDocumentsRequest") + proto.RegisterType((*ImportDocumentsRequest)(nil), "google.firestore.admin.v1beta2.ImportDocumentsRequest") +} + +// Reference imports to suppress errors if they are not otherwise used. +var _ context.Context +var _ grpc.ClientConn + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the grpc package it is being compiled against. +const _ = grpc.SupportPackageIsVersion4 + +// FirestoreAdminClient is the client API for FirestoreAdmin service. +// +// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://godoc.org/google.golang.org/grpc#ClientConn.NewStream. +type FirestoreAdminClient interface { + // Creates a composite index. This returns a [google.longrunning.Operation][google.longrunning.Operation] + // which may be used to track the status of the creation. The metadata for + // the operation will be the type [IndexOperationMetadata][google.firestore.admin.v1beta2.IndexOperationMetadata]. + CreateIndex(ctx context.Context, in *CreateIndexRequest, opts ...grpc.CallOption) (*longrunning.Operation, error) + // Lists composite indexes. + ListIndexes(ctx context.Context, in *ListIndexesRequest, opts ...grpc.CallOption) (*ListIndexesResponse, error) + // Gets a composite index. + GetIndex(ctx context.Context, in *GetIndexRequest, opts ...grpc.CallOption) (*Index, error) + // Deletes a composite index. + DeleteIndex(ctx context.Context, in *DeleteIndexRequest, opts ...grpc.CallOption) (*empty.Empty, error) + // Gets the metadata and configuration for a Field. + GetField(ctx context.Context, in *GetFieldRequest, opts ...grpc.CallOption) (*Field, error) + // Updates a field configuration. Currently, field updates apply only to + // single field index configuration. However, calls to + // [FirestoreAdmin.UpdateField][google.firestore.admin.v1beta2.FirestoreAdmin.UpdateField] should provide a field mask to avoid + // changing any configuration that the caller isn't aware of. The field mask + // should be specified as: `{ paths: "index_config" }`. + // + // This call returns a [google.longrunning.Operation][google.longrunning.Operation] which may be used to + // track the status of the field update. The metadata for + // the operation will be the type [FieldOperationMetadata][google.firestore.admin.v1beta2.FieldOperationMetadata]. + // + // To configure the default field settings for the database, use + // the special `Field` with resource name: + // `projects/{project_id}/databases/{database_id}/collectionGroups/__default__/fields/*`. + UpdateField(ctx context.Context, in *UpdateFieldRequest, opts ...grpc.CallOption) (*longrunning.Operation, error) + // Lists the field configuration and metadata for this database. + // + // Currently, [FirestoreAdmin.ListFields][google.firestore.admin.v1beta2.FirestoreAdmin.ListFields] only supports listing fields + // that have been explicitly overridden. To issue this query, call + // [FirestoreAdmin.ListFields][google.firestore.admin.v1beta2.FirestoreAdmin.ListFields] with the filter set to + // `indexConfig.usesAncestorConfig:false`. + ListFields(ctx context.Context, in *ListFieldsRequest, opts ...grpc.CallOption) (*ListFieldsResponse, error) + // Exports a copy of all or a subset of documents from Google Cloud Firestore + // to another storage system, such as Google Cloud Storage. Recent updates to + // documents may not be reflected in the export. The export occurs in the + // background and its progress can be monitored and managed via the + // Operation resource that is created. The output of an export may only be + // used once the associated operation is done. If an export operation is + // cancelled before completion it may leave partial data behind in Google + // Cloud Storage. + ExportDocuments(ctx context.Context, in *ExportDocumentsRequest, opts ...grpc.CallOption) (*longrunning.Operation, error) + // Imports documents into Google Cloud Firestore. Existing documents with the + // same name are overwritten. The import occurs in the background and its + // progress can be monitored and managed via the Operation resource that is + // created. If an ImportDocuments operation is cancelled, it is possible + // that a subset of the data has already been imported to Cloud Firestore. + ImportDocuments(ctx context.Context, in *ImportDocumentsRequest, opts ...grpc.CallOption) (*longrunning.Operation, error) +} + +type firestoreAdminClient struct { + cc *grpc.ClientConn +} + +func NewFirestoreAdminClient(cc *grpc.ClientConn) FirestoreAdminClient { + return &firestoreAdminClient{cc} +} + +func (c *firestoreAdminClient) CreateIndex(ctx context.Context, in *CreateIndexRequest, opts ...grpc.CallOption) (*longrunning.Operation, error) { + out := new(longrunning.Operation) + err := c.cc.Invoke(ctx, "/google.firestore.admin.v1beta2.FirestoreAdmin/CreateIndex", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *firestoreAdminClient) ListIndexes(ctx context.Context, in *ListIndexesRequest, opts ...grpc.CallOption) (*ListIndexesResponse, error) { + out := new(ListIndexesResponse) + err := c.cc.Invoke(ctx, "/google.firestore.admin.v1beta2.FirestoreAdmin/ListIndexes", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *firestoreAdminClient) GetIndex(ctx context.Context, in *GetIndexRequest, opts ...grpc.CallOption) (*Index, error) { + out := new(Index) + err := c.cc.Invoke(ctx, "/google.firestore.admin.v1beta2.FirestoreAdmin/GetIndex", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *firestoreAdminClient) DeleteIndex(ctx context.Context, in *DeleteIndexRequest, opts ...grpc.CallOption) (*empty.Empty, error) { + out := new(empty.Empty) + err := c.cc.Invoke(ctx, "/google.firestore.admin.v1beta2.FirestoreAdmin/DeleteIndex", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *firestoreAdminClient) GetField(ctx context.Context, in *GetFieldRequest, opts ...grpc.CallOption) (*Field, error) { + out := new(Field) + err := c.cc.Invoke(ctx, "/google.firestore.admin.v1beta2.FirestoreAdmin/GetField", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *firestoreAdminClient) UpdateField(ctx context.Context, in *UpdateFieldRequest, opts ...grpc.CallOption) (*longrunning.Operation, error) { + out := new(longrunning.Operation) + err := c.cc.Invoke(ctx, "/google.firestore.admin.v1beta2.FirestoreAdmin/UpdateField", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *firestoreAdminClient) ListFields(ctx context.Context, in *ListFieldsRequest, opts ...grpc.CallOption) (*ListFieldsResponse, error) { + out := new(ListFieldsResponse) + err := c.cc.Invoke(ctx, "/google.firestore.admin.v1beta2.FirestoreAdmin/ListFields", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *firestoreAdminClient) ExportDocuments(ctx context.Context, in *ExportDocumentsRequest, opts ...grpc.CallOption) (*longrunning.Operation, error) { + out := new(longrunning.Operation) + err := c.cc.Invoke(ctx, "/google.firestore.admin.v1beta2.FirestoreAdmin/ExportDocuments", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *firestoreAdminClient) ImportDocuments(ctx context.Context, in *ImportDocumentsRequest, opts ...grpc.CallOption) (*longrunning.Operation, error) { + out := new(longrunning.Operation) + err := c.cc.Invoke(ctx, "/google.firestore.admin.v1beta2.FirestoreAdmin/ImportDocuments", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +// FirestoreAdminServer is the server API for FirestoreAdmin service. +type FirestoreAdminServer interface { + // Creates a composite index. This returns a [google.longrunning.Operation][google.longrunning.Operation] + // which may be used to track the status of the creation. The metadata for + // the operation will be the type [IndexOperationMetadata][google.firestore.admin.v1beta2.IndexOperationMetadata]. + CreateIndex(context.Context, *CreateIndexRequest) (*longrunning.Operation, error) + // Lists composite indexes. + ListIndexes(context.Context, *ListIndexesRequest) (*ListIndexesResponse, error) + // Gets a composite index. + GetIndex(context.Context, *GetIndexRequest) (*Index, error) + // Deletes a composite index. + DeleteIndex(context.Context, *DeleteIndexRequest) (*empty.Empty, error) + // Gets the metadata and configuration for a Field. + GetField(context.Context, *GetFieldRequest) (*Field, error) + // Updates a field configuration. Currently, field updates apply only to + // single field index configuration. However, calls to + // [FirestoreAdmin.UpdateField][google.firestore.admin.v1beta2.FirestoreAdmin.UpdateField] should provide a field mask to avoid + // changing any configuration that the caller isn't aware of. The field mask + // should be specified as: `{ paths: "index_config" }`. + // + // This call returns a [google.longrunning.Operation][google.longrunning.Operation] which may be used to + // track the status of the field update. The metadata for + // the operation will be the type [FieldOperationMetadata][google.firestore.admin.v1beta2.FieldOperationMetadata]. + // + // To configure the default field settings for the database, use + // the special `Field` with resource name: + // `projects/{project_id}/databases/{database_id}/collectionGroups/__default__/fields/*`. + UpdateField(context.Context, *UpdateFieldRequest) (*longrunning.Operation, error) + // Lists the field configuration and metadata for this database. + // + // Currently, [FirestoreAdmin.ListFields][google.firestore.admin.v1beta2.FirestoreAdmin.ListFields] only supports listing fields + // that have been explicitly overridden. To issue this query, call + // [FirestoreAdmin.ListFields][google.firestore.admin.v1beta2.FirestoreAdmin.ListFields] with the filter set to + // `indexConfig.usesAncestorConfig:false`. + ListFields(context.Context, *ListFieldsRequest) (*ListFieldsResponse, error) + // Exports a copy of all or a subset of documents from Google Cloud Firestore + // to another storage system, such as Google Cloud Storage. Recent updates to + // documents may not be reflected in the export. The export occurs in the + // background and its progress can be monitored and managed via the + // Operation resource that is created. The output of an export may only be + // used once the associated operation is done. If an export operation is + // cancelled before completion it may leave partial data behind in Google + // Cloud Storage. + ExportDocuments(context.Context, *ExportDocumentsRequest) (*longrunning.Operation, error) + // Imports documents into Google Cloud Firestore. Existing documents with the + // same name are overwritten. The import occurs in the background and its + // progress can be monitored and managed via the Operation resource that is + // created. If an ImportDocuments operation is cancelled, it is possible + // that a subset of the data has already been imported to Cloud Firestore. + ImportDocuments(context.Context, *ImportDocumentsRequest) (*longrunning.Operation, error) +} + +func RegisterFirestoreAdminServer(s *grpc.Server, srv FirestoreAdminServer) { + s.RegisterService(&_FirestoreAdmin_serviceDesc, srv) +} + +func _FirestoreAdmin_CreateIndex_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(CreateIndexRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(FirestoreAdminServer).CreateIndex(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.firestore.admin.v1beta2.FirestoreAdmin/CreateIndex", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(FirestoreAdminServer).CreateIndex(ctx, req.(*CreateIndexRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _FirestoreAdmin_ListIndexes_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(ListIndexesRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(FirestoreAdminServer).ListIndexes(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.firestore.admin.v1beta2.FirestoreAdmin/ListIndexes", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(FirestoreAdminServer).ListIndexes(ctx, req.(*ListIndexesRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _FirestoreAdmin_GetIndex_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(GetIndexRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(FirestoreAdminServer).GetIndex(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.firestore.admin.v1beta2.FirestoreAdmin/GetIndex", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(FirestoreAdminServer).GetIndex(ctx, req.(*GetIndexRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _FirestoreAdmin_DeleteIndex_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(DeleteIndexRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(FirestoreAdminServer).DeleteIndex(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.firestore.admin.v1beta2.FirestoreAdmin/DeleteIndex", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(FirestoreAdminServer).DeleteIndex(ctx, req.(*DeleteIndexRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _FirestoreAdmin_GetField_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(GetFieldRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(FirestoreAdminServer).GetField(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.firestore.admin.v1beta2.FirestoreAdmin/GetField", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(FirestoreAdminServer).GetField(ctx, req.(*GetFieldRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _FirestoreAdmin_UpdateField_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(UpdateFieldRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(FirestoreAdminServer).UpdateField(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.firestore.admin.v1beta2.FirestoreAdmin/UpdateField", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(FirestoreAdminServer).UpdateField(ctx, req.(*UpdateFieldRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _FirestoreAdmin_ListFields_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(ListFieldsRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(FirestoreAdminServer).ListFields(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.firestore.admin.v1beta2.FirestoreAdmin/ListFields", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(FirestoreAdminServer).ListFields(ctx, req.(*ListFieldsRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _FirestoreAdmin_ExportDocuments_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(ExportDocumentsRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(FirestoreAdminServer).ExportDocuments(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.firestore.admin.v1beta2.FirestoreAdmin/ExportDocuments", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(FirestoreAdminServer).ExportDocuments(ctx, req.(*ExportDocumentsRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _FirestoreAdmin_ImportDocuments_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(ImportDocumentsRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(FirestoreAdminServer).ImportDocuments(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.firestore.admin.v1beta2.FirestoreAdmin/ImportDocuments", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(FirestoreAdminServer).ImportDocuments(ctx, req.(*ImportDocumentsRequest)) + } + return interceptor(ctx, in, info, handler) +} + +var _FirestoreAdmin_serviceDesc = grpc.ServiceDesc{ + ServiceName: "google.firestore.admin.v1beta2.FirestoreAdmin", + HandlerType: (*FirestoreAdminServer)(nil), + Methods: []grpc.MethodDesc{ + { + MethodName: "CreateIndex", + Handler: _FirestoreAdmin_CreateIndex_Handler, + }, + { + MethodName: "ListIndexes", + Handler: _FirestoreAdmin_ListIndexes_Handler, + }, + { + MethodName: "GetIndex", + Handler: _FirestoreAdmin_GetIndex_Handler, + }, + { + MethodName: "DeleteIndex", + Handler: _FirestoreAdmin_DeleteIndex_Handler, + }, + { + MethodName: "GetField", + Handler: _FirestoreAdmin_GetField_Handler, + }, + { + MethodName: "UpdateField", + Handler: _FirestoreAdmin_UpdateField_Handler, + }, + { + MethodName: "ListFields", + Handler: _FirestoreAdmin_ListFields_Handler, + }, + { + MethodName: "ExportDocuments", + Handler: _FirestoreAdmin_ExportDocuments_Handler, + }, + { + MethodName: "ImportDocuments", + Handler: _FirestoreAdmin_ImportDocuments_Handler, + }, + }, + Streams: []grpc.StreamDesc{}, + Metadata: "google/firestore/admin/v1beta2/firestore_admin.proto", +} + +func init() { + proto.RegisterFile("google/firestore/admin/v1beta2/firestore_admin.proto", fileDescriptor_firestore_admin_549fe3017350a705) +} + +var fileDescriptor_firestore_admin_549fe3017350a705 = []byte{ + // 940 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xc4, 0x56, 0xcb, 0x6e, 0x1c, 0x45, + 0x17, 0x56, 0x8d, 0x2f, 0x7f, 0x7c, 0xe6, 0xc7, 0x26, 0x15, 0x69, 0x34, 0xea, 0x10, 0x34, 0x6a, + 0x30, 0x1a, 0xcd, 0xa2, 0x5b, 0x9e, 0xa0, 0x2c, 0x3c, 0x8a, 0x50, 0x3c, 0xbe, 0x60, 0x08, 0xc2, + 0x4c, 0x08, 0x48, 0x6c, 0x46, 0xe5, 0x99, 0x72, 0xab, 0x70, 0x4f, 0x55, 0xd3, 0x55, 0x8d, 0x4c, + 0x22, 0x13, 0xc4, 0x03, 0xb0, 0xe1, 0x0d, 0x58, 0xb0, 0x00, 0x29, 0x6c, 0xd8, 0xb0, 0x62, 0xc1, + 0x23, 0xf0, 0x0a, 0xbc, 0x01, 0x2f, 0x80, 0xea, 0xd2, 0x73, 0x27, 0xdd, 0x8e, 0x2c, 0xb1, 0xeb, + 0x3a, 0x75, 0xbe, 0x3a, 0xdf, 0x39, 0x75, 0xea, 0x3b, 0x0d, 0x6f, 0x47, 0x42, 0x44, 0x31, 0x0d, + 0xcf, 0x58, 0x4a, 0xa5, 0x12, 0x29, 0x0d, 0xc9, 0x70, 0xc4, 0x78, 0xf8, 0xe5, 0xce, 0x29, 0x55, + 0xa4, 0x3d, 0xb1, 0xf7, 0x8d, 0x3d, 0x48, 0x52, 0xa1, 0x04, 0x7e, 0xdd, 0xa2, 0x82, 0xf1, 0x6e, + 0x60, 0x77, 0x1d, 0xca, 0x7b, 0xcd, 0x9d, 0x4a, 0x12, 0x16, 0x12, 0xce, 0x85, 0x22, 0x8a, 0x09, + 0x2e, 0x2d, 0xda, 0x6b, 0x15, 0xc6, 0xa4, 0xf1, 0xb0, 0xa4, 0x2f, 0xe3, 0x43, 0x7a, 0xe1, 0x7c, + 0xdf, 0x70, 0xbe, 0xb1, 0xe0, 0x51, 0x9a, 0x71, 0xce, 0x78, 0x14, 0x8a, 0x84, 0xa6, 0x33, 0xc1, + 0x6f, 0x3b, 0x27, 0xb3, 0x3a, 0xcd, 0xce, 0x42, 0x3a, 0x4a, 0xd4, 0x57, 0x6e, 0xb3, 0x31, 0xbf, + 0x69, 0xa8, 0xf4, 0x47, 0x44, 0x9e, 0x5b, 0x0f, 0x9f, 0x01, 0xee, 0xa6, 0x94, 0x28, 0x7a, 0xac, + 0x03, 0xf7, 0xe8, 0x17, 0x19, 0x95, 0x0a, 0xd7, 0x60, 0x3d, 0x21, 0x29, 0xe5, 0xaa, 0x8e, 0x1a, + 0xa8, 0xb9, 0xd1, 0x73, 0x2b, 0xdc, 0x81, 0x35, 0x43, 0xb0, 0x5e, 0x69, 0xa0, 0x66, 0xb5, 0xbd, + 0x1d, 0xbc, 0xb8, 0x6e, 0x81, 0x3d, 0xd4, 0x62, 0xfc, 0x6f, 0x10, 0xe0, 0x87, 0x4c, 0x2a, 0x63, + 0xa4, 0xb2, 0x28, 0x56, 0x0d, 0xd6, 0xcf, 0x58, 0xac, 0x68, 0x6a, 0x82, 0x6d, 0xf4, 0xdc, 0x0a, + 0xdf, 0x86, 0x8d, 0x84, 0x44, 0xb4, 0x2f, 0xd9, 0x13, 0x5a, 0x5f, 0x69, 0xa0, 0xe6, 0x5a, 0xef, + 0x86, 0x36, 0x3c, 0x62, 0x4f, 0x28, 0xbe, 0x03, 0x60, 0x36, 0x95, 0x38, 0xa7, 0xbc, 0xbe, 0x6a, + 0x80, 0xc6, 0xfd, 0x63, 0x6d, 0xf0, 0xbf, 0x86, 0x5b, 0x33, 0x0c, 0x64, 0x22, 0xb8, 0xa4, 0xf8, + 0x1d, 0xf8, 0x1f, 0xb3, 0xa6, 0x3a, 0x6a, 0xac, 0x94, 0x4f, 0x2c, 0x47, 0xe1, 0xb7, 0x60, 0x8b, + 0xd3, 0x0b, 0xd5, 0x9f, 0x8a, 0x6d, 0x49, 0xbf, 0xa2, 0xcd, 0x27, 0xe3, 0xf8, 0xdb, 0xb0, 0x75, + 0x44, 0xd5, 0x4c, 0xa9, 0x31, 0xac, 0x72, 0x32, 0xa2, 0x2e, 0x79, 0xf3, 0xed, 0x37, 0x01, 0xef, + 0xd3, 0x98, 0xce, 0x5d, 0xca, 0x32, 0xcf, 0xef, 0x10, 0xe0, 0xc7, 0xc9, 0x90, 0x28, 0x7a, 0xa8, + 0x6f, 0x36, 0x77, 0xed, 0xc0, 0x9a, 0xb9, 0x69, 0xe3, 0x5b, 0x22, 0x1d, 0x0b, 0xb6, 0x18, 0xdc, + 0x81, 0x6a, 0x66, 0x8e, 0x34, 0x7d, 0xe2, 0xae, 0xda, 0xcb, 0x8f, 0xc8, 0x5b, 0xc9, 0x62, 0x3e, + 0x20, 0xf2, 0xbc, 0x07, 0xd6, 0x5d, 0x7f, 0xbb, 0x0c, 0x67, 0xc8, 0x2c, 0xe3, 0xfd, 0x0c, 0x6e, + 0xea, 0x8b, 0x30, 0x7e, 0xff, 0x49, 0x27, 0x3c, 0xb5, 0xbd, 0x98, 0x13, 0x70, 0x8d, 0x70, 0x5f, + 0x47, 0xd2, 0x96, 0xb2, 0x7d, 0x60, 0x13, 0x75, 0xa0, 0xd2, 0x6d, 0xf0, 0x0c, 0x6a, 0x07, 0x17, + 0x89, 0x48, 0xd5, 0xbe, 0x18, 0x64, 0x23, 0xca, 0x95, 0x7c, 0x41, 0xad, 0xf0, 0x36, 0x6c, 0x0e, + 0x44, 0x1c, 0xd3, 0x81, 0x7e, 0xf6, 0x7d, 0x36, 0x94, 0xf5, 0x4a, 0x63, 0x45, 0x1f, 0x3a, 0xb1, + 0x1e, 0x0f, 0x25, 0x6e, 0xc1, 0x4d, 0x91, 0xa9, 0x24, 0x53, 0xfd, 0x2c, 0x65, 0xfd, 0x24, 0xa5, + 0x67, 0xec, 0xc2, 0x54, 0x65, 0xa3, 0xb7, 0x65, 0x37, 0x1e, 0xa7, 0xec, 0xc4, 0x98, 0xfd, 0x4b, + 0xa8, 0x1d, 0x8f, 0xae, 0x9b, 0x40, 0x13, 0x5e, 0x65, 0x7c, 0x69, 0xfc, 0x4d, 0x63, 0x1f, 0x87, + 0x6f, 0xff, 0xfd, 0x7f, 0xd8, 0x3c, 0xcc, 0x2b, 0xfa, 0x40, 0x17, 0x14, 0xff, 0x8a, 0xa0, 0x3a, + 0x25, 0x44, 0xb8, 0x5d, 0x54, 0xf9, 0x45, 0xd5, 0xf2, 0xee, 0xe4, 0x98, 0x29, 0xc1, 0x0c, 0x3e, + 0xcc, 0x05, 0xd3, 0xef, 0x7d, 0xfb, 0xe7, 0x5f, 0xdf, 0x57, 0x1e, 0xfa, 0xdd, 0xb1, 0xd8, 0x3e, + 0xb5, 0x0d, 0x76, 0x3f, 0x49, 0xc5, 0xe7, 0x74, 0xa0, 0x64, 0xd8, 0x0a, 0x87, 0x44, 0x91, 0x53, + 0x22, 0xa9, 0xfe, 0x9e, 0xa4, 0x77, 0x94, 0x8a, 0x2c, 0x91, 0x61, 0xeb, 0x32, 0x74, 0x2f, 0x7e, + 0xd7, 0x6a, 0x1a, 0xfe, 0x03, 0x41, 0x75, 0x4a, 0x51, 0x8a, 0x69, 0x2f, 0x0a, 0xa0, 0x77, 0xf7, + 0x4a, 0x18, 0xdb, 0xa9, 0xfe, 0xfb, 0x26, 0x99, 0x03, 0x7c, 0x1d, 0xc9, 0xe0, 0x5f, 0x10, 0xdc, + 0xc8, 0x75, 0x09, 0x87, 0x45, 0x74, 0xe6, 0x14, 0xcc, 0x2b, 0x27, 0x96, 0xcb, 0x18, 0xeb, 0xee, + 0x2a, 0xcf, 0x37, 0xa7, 0x1b, 0xb6, 0x2e, 0xf1, 0x8f, 0x08, 0xaa, 0x53, 0x12, 0x59, 0x5c, 0xf7, + 0x45, 0x3d, 0xf5, 0x6a, 0x0b, 0x92, 0x76, 0xa0, 0x47, 0x67, 0x4e, 0xb4, 0x75, 0x2d, 0x44, 0x9f, + 0xdb, 0xd2, 0x1a, 0x9d, 0x28, 0x55, 0xda, 0x69, 0xe9, 0xf4, 0xca, 0xe9, 0x8f, 0xff, 0x9e, 0x61, + 0xbc, 0x8f, 0xf7, 0x5e, 0x9a, 0xb1, 0x15, 0x30, 0x4d, 0xf8, 0x37, 0x04, 0xd5, 0xa9, 0x89, 0x52, + 0x5c, 0xd9, 0xc5, 0xf1, 0x53, 0xf4, 0x10, 0x3f, 0x35, 0x74, 0x3f, 0x6a, 0xbf, 0x3b, 0xa1, 0x6b, + 0x7f, 0x91, 0x5e, 0x96, 0xf4, 0xae, 0x9b, 0x5c, 0xbf, 0x23, 0x80, 0x89, 0xaa, 0xe3, 0x9d, 0x32, + 0x0f, 0x6b, 0x66, 0x04, 0x79, 0xed, 0xab, 0x40, 0xdc, 0x53, 0x5c, 0x52, 0xfd, 0xab, 0x3f, 0x45, + 0x37, 0x41, 0x7e, 0x46, 0xb0, 0x35, 0x37, 0x1a, 0xf0, 0xbd, 0x22, 0x4e, 0xcb, 0x67, 0x49, 0xd1, + 0x2d, 0x3c, 0x30, 0xb4, 0x3b, 0xfe, 0xbd, 0x52, 0x4d, 0x73, 0xb9, 0x4b, 0x67, 0xa3, 0xec, 0xa2, + 0x96, 0x61, 0x3b, 0x37, 0x47, 0x8a, 0xd9, 0x2e, 0x1f, 0x3c, 0xd7, 0xcd, 0x96, 0x8d, 0xe6, 0xd9, + 0xee, 0x3d, 0x47, 0xe0, 0x0f, 0xc4, 0xa8, 0x80, 0xdf, 0xde, 0xad, 0xd9, 0xc9, 0x74, 0xa2, 0xc5, + 0xe1, 0x04, 0x7d, 0xd6, 0x75, 0xb0, 0x48, 0xc4, 0x84, 0x47, 0x81, 0x48, 0xa3, 0x30, 0xa2, 0xdc, + 0x48, 0x47, 0x68, 0xb7, 0x48, 0xc2, 0xe4, 0xbf, 0xfd, 0xd7, 0x77, 0xcc, 0xea, 0x87, 0xca, 0xea, + 0x51, 0xf7, 0xf0, 0xd1, 0x4f, 0x95, 0x37, 0x8f, 0xec, 0x61, 0xdd, 0x58, 0x64, 0xc3, 0x60, 0x1c, + 0x30, 0x30, 0x11, 0x83, 0x4f, 0x76, 0xf6, 0x34, 0xe6, 0x74, 0xdd, 0x9c, 0x7e, 0xf7, 0x9f, 0x00, + 0x00, 0x00, 0xff, 0xff, 0xc9, 0xcc, 0x19, 0xb3, 0xd4, 0x0c, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/firestore/admin/v1beta2/index.pb.go b/vendor/google.golang.org/genproto/googleapis/firestore/admin/v1beta2/index.pb.go new file mode 100644 index 0000000..7a4acf8 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/firestore/admin/v1beta2/index.pb.go @@ -0,0 +1,446 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/firestore/admin/v1beta2/index.proto + +package admin // import "google.golang.org/genproto/googleapis/firestore/admin/v1beta2" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Query Scope defines the scope at which a query is run. This is specified on +// a StructuredQuery's `from` field. +type Index_QueryScope int32 + +const ( + // The query scope is unspecified. Not a valid option. + Index_QUERY_SCOPE_UNSPECIFIED Index_QueryScope = 0 + // Indexes with a collection query scope specified allow queries + // against a collection that is the child of a specific document, specified + // at query time, and that has the collection id specified by the index. + Index_COLLECTION Index_QueryScope = 1 +) + +var Index_QueryScope_name = map[int32]string{ + 0: "QUERY_SCOPE_UNSPECIFIED", + 1: "COLLECTION", +} +var Index_QueryScope_value = map[string]int32{ + "QUERY_SCOPE_UNSPECIFIED": 0, + "COLLECTION": 1, +} + +func (x Index_QueryScope) String() string { + return proto.EnumName(Index_QueryScope_name, int32(x)) +} +func (Index_QueryScope) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_index_6822d5f85dc4ef3b, []int{0, 0} +} + +// The state of an index. During index creation, an index will be in the +// `CREATING` state. If the index is created successfully, it will transition +// to the `READY` state. If the index creation encounters a problem, the index +// will transition to the `NEEDS_REPAIR` state. +type Index_State int32 + +const ( + // The state is unspecified. + Index_STATE_UNSPECIFIED Index_State = 0 + // The index is being created. + // There is an active long-running operation for the index. + // The index is updated when writing a document. + // Some index data may exist. + Index_CREATING Index_State = 1 + // The index is ready to be used. + // The index is updated when writing a document. + // The index is fully populated from all stored documents it applies to. + Index_READY Index_State = 2 + // The index was being created, but something went wrong. + // There is no active long-running operation for the index, + // and the most recently finished long-running operation failed. + // The index is not updated when writing a document. + // Some index data may exist. + // Use the google.longrunning.Operations API to determine why the operation + // that last attempted to create this index failed, then re-create the + // index. + Index_NEEDS_REPAIR Index_State = 3 +) + +var Index_State_name = map[int32]string{ + 0: "STATE_UNSPECIFIED", + 1: "CREATING", + 2: "READY", + 3: "NEEDS_REPAIR", +} +var Index_State_value = map[string]int32{ + "STATE_UNSPECIFIED": 0, + "CREATING": 1, + "READY": 2, + "NEEDS_REPAIR": 3, +} + +func (x Index_State) String() string { + return proto.EnumName(Index_State_name, int32(x)) +} +func (Index_State) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_index_6822d5f85dc4ef3b, []int{0, 1} +} + +// The supported orderings. +type Index_IndexField_Order int32 + +const ( + // The ordering is unspecified. Not a valid option. + Index_IndexField_ORDER_UNSPECIFIED Index_IndexField_Order = 0 + // The field is ordered by ascending field value. + Index_IndexField_ASCENDING Index_IndexField_Order = 1 + // The field is ordered by descending field value. + Index_IndexField_DESCENDING Index_IndexField_Order = 2 +) + +var Index_IndexField_Order_name = map[int32]string{ + 0: "ORDER_UNSPECIFIED", + 1: "ASCENDING", + 2: "DESCENDING", +} +var Index_IndexField_Order_value = map[string]int32{ + "ORDER_UNSPECIFIED": 0, + "ASCENDING": 1, + "DESCENDING": 2, +} + +func (x Index_IndexField_Order) String() string { + return proto.EnumName(Index_IndexField_Order_name, int32(x)) +} +func (Index_IndexField_Order) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_index_6822d5f85dc4ef3b, []int{0, 0, 0} +} + +// The supported array value configurations. +type Index_IndexField_ArrayConfig int32 + +const ( + // The index does not support additional array queries. + Index_IndexField_ARRAY_CONFIG_UNSPECIFIED Index_IndexField_ArrayConfig = 0 + // The index supports array containment queries. + Index_IndexField_CONTAINS Index_IndexField_ArrayConfig = 1 +) + +var Index_IndexField_ArrayConfig_name = map[int32]string{ + 0: "ARRAY_CONFIG_UNSPECIFIED", + 1: "CONTAINS", +} +var Index_IndexField_ArrayConfig_value = map[string]int32{ + "ARRAY_CONFIG_UNSPECIFIED": 0, + "CONTAINS": 1, +} + +func (x Index_IndexField_ArrayConfig) String() string { + return proto.EnumName(Index_IndexField_ArrayConfig_name, int32(x)) +} +func (Index_IndexField_ArrayConfig) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_index_6822d5f85dc4ef3b, []int{0, 0, 1} +} + +// Cloud Firestore indexes enable simple and complex queries against +// documents in a database. +type Index struct { + // Output only. + // A server defined name for this index. + // The form of this name for composite indexes will be: + // `projects/{project_id}/databases/{database_id}/collectionGroups/{collection_id}/indexes/{composite_index_id}` + // For single field indexes, this field will be empty. + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // Indexes with a collection query scope specified allow queries + // against a collection that is the child of a specific document, specified at + // query time, and that has the same collection id. + // + // Indexes with a collection group query scope specified allow queries against + // all collections descended from a specific document, specified at query + // time, and that have the same collection id as this index. + QueryScope Index_QueryScope `protobuf:"varint,2,opt,name=query_scope,json=queryScope,proto3,enum=google.firestore.admin.v1beta2.Index_QueryScope" json:"query_scope,omitempty"` + // The fields supported by this index. + // + // For composite indexes, this is always 2 or more fields. + // The last field entry is always for the field path `__name__`. If, on + // creation, `__name__` was not specified as the last field, it will be added + // automatically with the same direction as that of the last field defined. If + // the final field in a composite index is not directional, the `__name__` + // will be ordered ASCENDING (unless explicitly specified). + // + // For single field indexes, this will always be exactly one entry with a + // field path equal to the field path of the associated field. + Fields []*Index_IndexField `protobuf:"bytes,3,rep,name=fields,proto3" json:"fields,omitempty"` + // Output only. + // The serving state of the index. + State Index_State `protobuf:"varint,4,opt,name=state,proto3,enum=google.firestore.admin.v1beta2.Index_State" json:"state,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Index) Reset() { *m = Index{} } +func (m *Index) String() string { return proto.CompactTextString(m) } +func (*Index) ProtoMessage() {} +func (*Index) Descriptor() ([]byte, []int) { + return fileDescriptor_index_6822d5f85dc4ef3b, []int{0} +} +func (m *Index) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Index.Unmarshal(m, b) +} +func (m *Index) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Index.Marshal(b, m, deterministic) +} +func (dst *Index) XXX_Merge(src proto.Message) { + xxx_messageInfo_Index.Merge(dst, src) +} +func (m *Index) XXX_Size() int { + return xxx_messageInfo_Index.Size(m) +} +func (m *Index) XXX_DiscardUnknown() { + xxx_messageInfo_Index.DiscardUnknown(m) +} + +var xxx_messageInfo_Index proto.InternalMessageInfo + +func (m *Index) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *Index) GetQueryScope() Index_QueryScope { + if m != nil { + return m.QueryScope + } + return Index_QUERY_SCOPE_UNSPECIFIED +} + +func (m *Index) GetFields() []*Index_IndexField { + if m != nil { + return m.Fields + } + return nil +} + +func (m *Index) GetState() Index_State { + if m != nil { + return m.State + } + return Index_STATE_UNSPECIFIED +} + +// A field in an index. +// The field_path describes which field is indexed, the value_mode describes +// how the field value is indexed. +type Index_IndexField struct { + // Can be __name__. + // For single field indexes, this must match the name of the field or may + // be omitted. + FieldPath string `protobuf:"bytes,1,opt,name=field_path,json=fieldPath,proto3" json:"field_path,omitempty"` + // How the field value is indexed. + // + // Types that are valid to be assigned to ValueMode: + // *Index_IndexField_Order_ + // *Index_IndexField_ArrayConfig_ + ValueMode isIndex_IndexField_ValueMode `protobuf_oneof:"value_mode"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Index_IndexField) Reset() { *m = Index_IndexField{} } +func (m *Index_IndexField) String() string { return proto.CompactTextString(m) } +func (*Index_IndexField) ProtoMessage() {} +func (*Index_IndexField) Descriptor() ([]byte, []int) { + return fileDescriptor_index_6822d5f85dc4ef3b, []int{0, 0} +} +func (m *Index_IndexField) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Index_IndexField.Unmarshal(m, b) +} +func (m *Index_IndexField) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Index_IndexField.Marshal(b, m, deterministic) +} +func (dst *Index_IndexField) XXX_Merge(src proto.Message) { + xxx_messageInfo_Index_IndexField.Merge(dst, src) +} +func (m *Index_IndexField) XXX_Size() int { + return xxx_messageInfo_Index_IndexField.Size(m) +} +func (m *Index_IndexField) XXX_DiscardUnknown() { + xxx_messageInfo_Index_IndexField.DiscardUnknown(m) +} + +var xxx_messageInfo_Index_IndexField proto.InternalMessageInfo + +func (m *Index_IndexField) GetFieldPath() string { + if m != nil { + return m.FieldPath + } + return "" +} + +type isIndex_IndexField_ValueMode interface { + isIndex_IndexField_ValueMode() +} + +type Index_IndexField_Order_ struct { + Order Index_IndexField_Order `protobuf:"varint,2,opt,name=order,proto3,enum=google.firestore.admin.v1beta2.Index_IndexField_Order,oneof"` +} + +type Index_IndexField_ArrayConfig_ struct { + ArrayConfig Index_IndexField_ArrayConfig `protobuf:"varint,3,opt,name=array_config,json=arrayConfig,proto3,enum=google.firestore.admin.v1beta2.Index_IndexField_ArrayConfig,oneof"` +} + +func (*Index_IndexField_Order_) isIndex_IndexField_ValueMode() {} + +func (*Index_IndexField_ArrayConfig_) isIndex_IndexField_ValueMode() {} + +func (m *Index_IndexField) GetValueMode() isIndex_IndexField_ValueMode { + if m != nil { + return m.ValueMode + } + return nil +} + +func (m *Index_IndexField) GetOrder() Index_IndexField_Order { + if x, ok := m.GetValueMode().(*Index_IndexField_Order_); ok { + return x.Order + } + return Index_IndexField_ORDER_UNSPECIFIED +} + +func (m *Index_IndexField) GetArrayConfig() Index_IndexField_ArrayConfig { + if x, ok := m.GetValueMode().(*Index_IndexField_ArrayConfig_); ok { + return x.ArrayConfig + } + return Index_IndexField_ARRAY_CONFIG_UNSPECIFIED +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*Index_IndexField) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _Index_IndexField_OneofMarshaler, _Index_IndexField_OneofUnmarshaler, _Index_IndexField_OneofSizer, []interface{}{ + (*Index_IndexField_Order_)(nil), + (*Index_IndexField_ArrayConfig_)(nil), + } +} + +func _Index_IndexField_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*Index_IndexField) + // value_mode + switch x := m.ValueMode.(type) { + case *Index_IndexField_Order_: + b.EncodeVarint(2<<3 | proto.WireVarint) + b.EncodeVarint(uint64(x.Order)) + case *Index_IndexField_ArrayConfig_: + b.EncodeVarint(3<<3 | proto.WireVarint) + b.EncodeVarint(uint64(x.ArrayConfig)) + case nil: + default: + return fmt.Errorf("Index_IndexField.ValueMode has unexpected type %T", x) + } + return nil +} + +func _Index_IndexField_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*Index_IndexField) + switch tag { + case 2: // value_mode.order + if wire != proto.WireVarint { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeVarint() + m.ValueMode = &Index_IndexField_Order_{Index_IndexField_Order(x)} + return true, err + case 3: // value_mode.array_config + if wire != proto.WireVarint { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeVarint() + m.ValueMode = &Index_IndexField_ArrayConfig_{Index_IndexField_ArrayConfig(x)} + return true, err + default: + return false, nil + } +} + +func _Index_IndexField_OneofSizer(msg proto.Message) (n int) { + m := msg.(*Index_IndexField) + // value_mode + switch x := m.ValueMode.(type) { + case *Index_IndexField_Order_: + n += 1 // tag and wire + n += proto.SizeVarint(uint64(x.Order)) + case *Index_IndexField_ArrayConfig_: + n += 1 // tag and wire + n += proto.SizeVarint(uint64(x.ArrayConfig)) + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +func init() { + proto.RegisterType((*Index)(nil), "google.firestore.admin.v1beta2.Index") + proto.RegisterType((*Index_IndexField)(nil), "google.firestore.admin.v1beta2.Index.IndexField") + proto.RegisterEnum("google.firestore.admin.v1beta2.Index_QueryScope", Index_QueryScope_name, Index_QueryScope_value) + proto.RegisterEnum("google.firestore.admin.v1beta2.Index_State", Index_State_name, Index_State_value) + proto.RegisterEnum("google.firestore.admin.v1beta2.Index_IndexField_Order", Index_IndexField_Order_name, Index_IndexField_Order_value) + proto.RegisterEnum("google.firestore.admin.v1beta2.Index_IndexField_ArrayConfig", Index_IndexField_ArrayConfig_name, Index_IndexField_ArrayConfig_value) +} + +func init() { + proto.RegisterFile("google/firestore/admin/v1beta2/index.proto", fileDescriptor_index_6822d5f85dc4ef3b) +} + +var fileDescriptor_index_6822d5f85dc4ef3b = []byte{ + // 545 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x94, 0x93, 0x4f, 0x6b, 0xdb, 0x4c, + 0x10, 0xc6, 0x23, 0x39, 0x0a, 0xaf, 0xc7, 0x7e, 0x83, 0xba, 0x50, 0x2a, 0xd2, 0xb4, 0x18, 0xd1, + 0x83, 0x69, 0x41, 0x6a, 0x52, 0x28, 0x84, 0xb6, 0x07, 0x59, 0x5a, 0xdb, 0x82, 0x20, 0xc9, 0x2b, + 0xa7, 0xe0, 0x5e, 0xc4, 0xc6, 0x5a, 0x2b, 0x02, 0x5b, 0xeb, 0x48, 0x72, 0x68, 0xbe, 0x4e, 0xa1, + 0x97, 0x7e, 0xb0, 0x1e, 0xfa, 0x29, 0x8a, 0x56, 0xc2, 0x2e, 0xa1, 0x7f, 0x92, 0x8b, 0xd8, 0x91, + 0x9e, 0xe7, 0x37, 0xb3, 0xa3, 0x19, 0x78, 0x99, 0x70, 0x9e, 0x2c, 0x99, 0xb9, 0x48, 0x73, 0x56, + 0x94, 0x3c, 0x67, 0x26, 0x8d, 0x57, 0x69, 0x66, 0xde, 0x9c, 0x5c, 0xb2, 0x92, 0x9e, 0x9a, 0x69, + 0x16, 0xb3, 0xcf, 0xc6, 0x3a, 0xe7, 0x25, 0x47, 0xcf, 0x6b, 0xad, 0xb1, 0xd5, 0x1a, 0x42, 0x6b, + 0x34, 0xda, 0xa3, 0xe3, 0x86, 0x45, 0xd7, 0xa9, 0x49, 0xb3, 0x8c, 0x97, 0xb4, 0x4c, 0x79, 0x56, + 0xd4, 0x6e, 0xfd, 0x87, 0x02, 0x8a, 0x5b, 0xd1, 0x10, 0x82, 0xfd, 0x8c, 0xae, 0x98, 0x26, 0xf5, + 0xa4, 0x7e, 0x9b, 0x88, 0x33, 0x9a, 0x40, 0xe7, 0x7a, 0xc3, 0xf2, 0xdb, 0xa8, 0x98, 0xf3, 0x35, + 0xd3, 0xe4, 0x9e, 0xd4, 0x3f, 0x3c, 0x7d, 0x6d, 0xfc, 0x3d, 0xa3, 0x21, 0x78, 0xc6, 0xa4, 0x32, + 0x86, 0x95, 0x8f, 0xc0, 0xf5, 0xf6, 0x8c, 0xc6, 0x70, 0xb0, 0x48, 0xd9, 0x32, 0x2e, 0xb4, 0x56, + 0xaf, 0xd5, 0xef, 0xdc, 0x97, 0x26, 0x9e, 0xc3, 0xca, 0x48, 0x1a, 0x3f, 0xb2, 0x40, 0x29, 0x4a, + 0x5a, 0x32, 0x6d, 0x5f, 0x94, 0xf5, 0xea, 0x7e, 0xa0, 0xb0, 0xb2, 0x90, 0xda, 0x79, 0xf4, 0x5d, + 0x06, 0xd8, 0x91, 0xd1, 0x33, 0x00, 0xc1, 0x8e, 0xd6, 0xb4, 0xbc, 0x6a, 0x1a, 0xd1, 0x16, 0x6f, + 0x02, 0x5a, 0x5e, 0x21, 0x0f, 0x14, 0x9e, 0xc7, 0x2c, 0x6f, 0xfa, 0xf0, 0xf6, 0xa1, 0x95, 0x1b, + 0x7e, 0xe5, 0x1e, 0xef, 0x91, 0x1a, 0x83, 0x28, 0x74, 0x69, 0x9e, 0xd3, 0xdb, 0x68, 0xce, 0xb3, + 0x45, 0x9a, 0x68, 0x2d, 0x81, 0x7d, 0xff, 0x60, 0xac, 0x55, 0x41, 0x6c, 0xc1, 0x18, 0xef, 0x91, + 0x0e, 0xdd, 0x85, 0xfa, 0x07, 0x50, 0x44, 0x52, 0xf4, 0x18, 0x1e, 0xf9, 0xc4, 0xc1, 0x24, 0xba, + 0xf0, 0xc2, 0x00, 0xdb, 0xee, 0xd0, 0xc5, 0x8e, 0xba, 0x87, 0xfe, 0x87, 0xb6, 0x15, 0xda, 0xd8, + 0x73, 0x5c, 0x6f, 0xa4, 0x4a, 0xe8, 0x10, 0xc0, 0xc1, 0xdb, 0x58, 0xd6, 0xcf, 0xa0, 0xf3, 0x0b, + 0x1c, 0x1d, 0x83, 0x66, 0x11, 0x62, 0xcd, 0x22, 0xdb, 0xf7, 0x86, 0xee, 0xe8, 0x0e, 0xab, 0x0b, + 0xff, 0xd9, 0xbe, 0x37, 0xb5, 0x5c, 0x2f, 0x54, 0xa5, 0x41, 0x17, 0xe0, 0x86, 0x2e, 0x37, 0x2c, + 0x5a, 0xf1, 0x98, 0xe9, 0x67, 0x00, 0xbb, 0x79, 0x40, 0x4f, 0xe1, 0xc9, 0xe4, 0x02, 0x93, 0x59, + 0x14, 0xda, 0x7e, 0x80, 0xef, 0x60, 0x0e, 0x01, 0x6c, 0xff, 0xfc, 0x1c, 0xdb, 0x53, 0xd7, 0xf7, + 0x54, 0x49, 0x77, 0x41, 0x11, 0xff, 0xac, 0xba, 0x42, 0x38, 0xb5, 0xa6, 0xf8, 0x37, 0x69, 0x09, + 0xb6, 0xa6, 0xf5, 0x0d, 0xda, 0xa0, 0x10, 0x6c, 0x39, 0x33, 0x55, 0x46, 0x2a, 0x74, 0x3d, 0x8c, + 0x9d, 0x30, 0x22, 0x38, 0xb0, 0x5c, 0xa2, 0xb6, 0x06, 0x5f, 0x25, 0xd0, 0xe7, 0x7c, 0xf5, 0x8f, + 0x06, 0x0f, 0xea, 0x91, 0x08, 0xaa, 0xfd, 0x08, 0xa4, 0x4f, 0x76, 0xa3, 0x4e, 0xf8, 0x92, 0x66, + 0x89, 0xc1, 0xf3, 0xc4, 0x4c, 0x58, 0x26, 0xb6, 0xc7, 0xac, 0x3f, 0xd1, 0x75, 0x5a, 0xfc, 0x69, + 0x55, 0xdf, 0x89, 0xe8, 0x8b, 0xbc, 0x3f, 0xb2, 0x87, 0xe1, 0x37, 0xf9, 0xc5, 0xa8, 0x86, 0xd9, + 0x4b, 0xbe, 0x89, 0x8d, 0xe1, 0xb6, 0x00, 0x4b, 0x14, 0xf0, 0xf1, 0x64, 0x50, 0x79, 0x2e, 0x0f, + 0x04, 0xfd, 0xcd, 0xcf, 0x00, 0x00, 0x00, 0xff, 0xff, 0xc9, 0xcd, 0x75, 0x16, 0x07, 0x04, 0x00, + 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/firestore/admin/v1beta2/operation.pb.go b/vendor/google.golang.org/genproto/googleapis/firestore/admin/v1beta2/operation.pb.go new file mode 100644 index 0000000..3ee7a5c --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/firestore/admin/v1beta2/operation.pb.go @@ -0,0 +1,699 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/firestore/admin/v1beta2/operation.proto + +package admin // import "google.golang.org/genproto/googleapis/firestore/admin/v1beta2" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import timestamp "github.com/golang/protobuf/ptypes/timestamp" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Describes the state of the operation. +type OperationState int32 + +const ( + // Unspecified. + OperationState_OPERATION_STATE_UNSPECIFIED OperationState = 0 + // Request is being prepared for processing. + OperationState_INITIALIZING OperationState = 1 + // Request is actively being processed. + OperationState_PROCESSING OperationState = 2 + // Request is in the process of being cancelled after user called + // google.longrunning.Operations.CancelOperation on the operation. + OperationState_CANCELLING OperationState = 3 + // Request has been processed and is in its finalization stage. + OperationState_FINALIZING OperationState = 4 + // Request has completed successfully. + OperationState_SUCCESSFUL OperationState = 5 + // Request has finished being processed, but encountered an error. + OperationState_FAILED OperationState = 6 + // Request has finished being cancelled after user called + // google.longrunning.Operations.CancelOperation. + OperationState_CANCELLED OperationState = 7 +) + +var OperationState_name = map[int32]string{ + 0: "OPERATION_STATE_UNSPECIFIED", + 1: "INITIALIZING", + 2: "PROCESSING", + 3: "CANCELLING", + 4: "FINALIZING", + 5: "SUCCESSFUL", + 6: "FAILED", + 7: "CANCELLED", +} +var OperationState_value = map[string]int32{ + "OPERATION_STATE_UNSPECIFIED": 0, + "INITIALIZING": 1, + "PROCESSING": 2, + "CANCELLING": 3, + "FINALIZING": 4, + "SUCCESSFUL": 5, + "FAILED": 6, + "CANCELLED": 7, +} + +func (x OperationState) String() string { + return proto.EnumName(OperationState_name, int32(x)) +} +func (OperationState) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_operation_1eae331224aa61a0, []int{0} +} + +// Specifies how the index is changing. +type FieldOperationMetadata_IndexConfigDelta_ChangeType int32 + +const ( + // The type of change is not specified or known. + FieldOperationMetadata_IndexConfigDelta_CHANGE_TYPE_UNSPECIFIED FieldOperationMetadata_IndexConfigDelta_ChangeType = 0 + // The single field index is being added. + FieldOperationMetadata_IndexConfigDelta_ADD FieldOperationMetadata_IndexConfigDelta_ChangeType = 1 + // The single field index is being removed. + FieldOperationMetadata_IndexConfigDelta_REMOVE FieldOperationMetadata_IndexConfigDelta_ChangeType = 2 +) + +var FieldOperationMetadata_IndexConfigDelta_ChangeType_name = map[int32]string{ + 0: "CHANGE_TYPE_UNSPECIFIED", + 1: "ADD", + 2: "REMOVE", +} +var FieldOperationMetadata_IndexConfigDelta_ChangeType_value = map[string]int32{ + "CHANGE_TYPE_UNSPECIFIED": 0, + "ADD": 1, + "REMOVE": 2, +} + +func (x FieldOperationMetadata_IndexConfigDelta_ChangeType) String() string { + return proto.EnumName(FieldOperationMetadata_IndexConfigDelta_ChangeType_name, int32(x)) +} +func (FieldOperationMetadata_IndexConfigDelta_ChangeType) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_operation_1eae331224aa61a0, []int{1, 0, 0} +} + +// Metadata for [google.longrunning.Operation][google.longrunning.Operation] results from +// [FirestoreAdmin.CreateIndex][google.firestore.admin.v1beta2.FirestoreAdmin.CreateIndex]. +type IndexOperationMetadata struct { + // The time this operation started. + StartTime *timestamp.Timestamp `protobuf:"bytes,1,opt,name=start_time,json=startTime,proto3" json:"start_time,omitempty"` + // The time this operation completed. Will be unset if operation still in + // progress. + EndTime *timestamp.Timestamp `protobuf:"bytes,2,opt,name=end_time,json=endTime,proto3" json:"end_time,omitempty"` + // The index resource that this operation is acting on. For example: + // `projects/{project_id}/databases/{database_id}/collectionGroups/{collection_id}/indexes/{index_id}` + Index string `protobuf:"bytes,3,opt,name=index,proto3" json:"index,omitempty"` + // The state of the operation. + State OperationState `protobuf:"varint,4,opt,name=state,proto3,enum=google.firestore.admin.v1beta2.OperationState" json:"state,omitempty"` + // The progress, in documents, of this operation. + ProgressDocuments *Progress `protobuf:"bytes,5,opt,name=progress_documents,json=progressDocuments,proto3" json:"progress_documents,omitempty"` + // The progress, in bytes, of this operation. + ProgressBytes *Progress `protobuf:"bytes,6,opt,name=progress_bytes,json=progressBytes,proto3" json:"progress_bytes,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *IndexOperationMetadata) Reset() { *m = IndexOperationMetadata{} } +func (m *IndexOperationMetadata) String() string { return proto.CompactTextString(m) } +func (*IndexOperationMetadata) ProtoMessage() {} +func (*IndexOperationMetadata) Descriptor() ([]byte, []int) { + return fileDescriptor_operation_1eae331224aa61a0, []int{0} +} +func (m *IndexOperationMetadata) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_IndexOperationMetadata.Unmarshal(m, b) +} +func (m *IndexOperationMetadata) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_IndexOperationMetadata.Marshal(b, m, deterministic) +} +func (dst *IndexOperationMetadata) XXX_Merge(src proto.Message) { + xxx_messageInfo_IndexOperationMetadata.Merge(dst, src) +} +func (m *IndexOperationMetadata) XXX_Size() int { + return xxx_messageInfo_IndexOperationMetadata.Size(m) +} +func (m *IndexOperationMetadata) XXX_DiscardUnknown() { + xxx_messageInfo_IndexOperationMetadata.DiscardUnknown(m) +} + +var xxx_messageInfo_IndexOperationMetadata proto.InternalMessageInfo + +func (m *IndexOperationMetadata) GetStartTime() *timestamp.Timestamp { + if m != nil { + return m.StartTime + } + return nil +} + +func (m *IndexOperationMetadata) GetEndTime() *timestamp.Timestamp { + if m != nil { + return m.EndTime + } + return nil +} + +func (m *IndexOperationMetadata) GetIndex() string { + if m != nil { + return m.Index + } + return "" +} + +func (m *IndexOperationMetadata) GetState() OperationState { + if m != nil { + return m.State + } + return OperationState_OPERATION_STATE_UNSPECIFIED +} + +func (m *IndexOperationMetadata) GetProgressDocuments() *Progress { + if m != nil { + return m.ProgressDocuments + } + return nil +} + +func (m *IndexOperationMetadata) GetProgressBytes() *Progress { + if m != nil { + return m.ProgressBytes + } + return nil +} + +// Metadata for [google.longrunning.Operation][google.longrunning.Operation] results from +// [FirestoreAdmin.UpdateField][google.firestore.admin.v1beta2.FirestoreAdmin.UpdateField]. +type FieldOperationMetadata struct { + // The time this operation started. + StartTime *timestamp.Timestamp `protobuf:"bytes,1,opt,name=start_time,json=startTime,proto3" json:"start_time,omitempty"` + // The time this operation completed. Will be unset if operation still in + // progress. + EndTime *timestamp.Timestamp `protobuf:"bytes,2,opt,name=end_time,json=endTime,proto3" json:"end_time,omitempty"` + // The field resource that this operation is acting on. For example: + // `projects/{project_id}/databases/{database_id}/collectionGroups/{collection_id}/fields/{field_path}` + Field string `protobuf:"bytes,3,opt,name=field,proto3" json:"field,omitempty"` + // A list of [IndexConfigDelta][google.firestore.admin.v1beta2.FieldOperationMetadata.IndexConfigDelta], which describe the intent of this + // operation. + IndexConfigDeltas []*FieldOperationMetadata_IndexConfigDelta `protobuf:"bytes,4,rep,name=index_config_deltas,json=indexConfigDeltas,proto3" json:"index_config_deltas,omitempty"` + // The state of the operation. + State OperationState `protobuf:"varint,5,opt,name=state,proto3,enum=google.firestore.admin.v1beta2.OperationState" json:"state,omitempty"` + // The progress, in documents, of this operation. + DocumentProgress *Progress `protobuf:"bytes,6,opt,name=document_progress,json=documentProgress,proto3" json:"document_progress,omitempty"` + // The progress, in bytes, of this operation. + BytesProgress *Progress `protobuf:"bytes,7,opt,name=bytes_progress,json=bytesProgress,proto3" json:"bytes_progress,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *FieldOperationMetadata) Reset() { *m = FieldOperationMetadata{} } +func (m *FieldOperationMetadata) String() string { return proto.CompactTextString(m) } +func (*FieldOperationMetadata) ProtoMessage() {} +func (*FieldOperationMetadata) Descriptor() ([]byte, []int) { + return fileDescriptor_operation_1eae331224aa61a0, []int{1} +} +func (m *FieldOperationMetadata) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_FieldOperationMetadata.Unmarshal(m, b) +} +func (m *FieldOperationMetadata) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_FieldOperationMetadata.Marshal(b, m, deterministic) +} +func (dst *FieldOperationMetadata) XXX_Merge(src proto.Message) { + xxx_messageInfo_FieldOperationMetadata.Merge(dst, src) +} +func (m *FieldOperationMetadata) XXX_Size() int { + return xxx_messageInfo_FieldOperationMetadata.Size(m) +} +func (m *FieldOperationMetadata) XXX_DiscardUnknown() { + xxx_messageInfo_FieldOperationMetadata.DiscardUnknown(m) +} + +var xxx_messageInfo_FieldOperationMetadata proto.InternalMessageInfo + +func (m *FieldOperationMetadata) GetStartTime() *timestamp.Timestamp { + if m != nil { + return m.StartTime + } + return nil +} + +func (m *FieldOperationMetadata) GetEndTime() *timestamp.Timestamp { + if m != nil { + return m.EndTime + } + return nil +} + +func (m *FieldOperationMetadata) GetField() string { + if m != nil { + return m.Field + } + return "" +} + +func (m *FieldOperationMetadata) GetIndexConfigDeltas() []*FieldOperationMetadata_IndexConfigDelta { + if m != nil { + return m.IndexConfigDeltas + } + return nil +} + +func (m *FieldOperationMetadata) GetState() OperationState { + if m != nil { + return m.State + } + return OperationState_OPERATION_STATE_UNSPECIFIED +} + +func (m *FieldOperationMetadata) GetDocumentProgress() *Progress { + if m != nil { + return m.DocumentProgress + } + return nil +} + +func (m *FieldOperationMetadata) GetBytesProgress() *Progress { + if m != nil { + return m.BytesProgress + } + return nil +} + +// Information about an index configuration change. +type FieldOperationMetadata_IndexConfigDelta struct { + // Specifies how the index is changing. + ChangeType FieldOperationMetadata_IndexConfigDelta_ChangeType `protobuf:"varint,1,opt,name=change_type,json=changeType,proto3,enum=google.firestore.admin.v1beta2.FieldOperationMetadata_IndexConfigDelta_ChangeType" json:"change_type,omitempty"` + // The index being changed. + Index *Index `protobuf:"bytes,2,opt,name=index,proto3" json:"index,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *FieldOperationMetadata_IndexConfigDelta) Reset() { + *m = FieldOperationMetadata_IndexConfigDelta{} +} +func (m *FieldOperationMetadata_IndexConfigDelta) String() string { return proto.CompactTextString(m) } +func (*FieldOperationMetadata_IndexConfigDelta) ProtoMessage() {} +func (*FieldOperationMetadata_IndexConfigDelta) Descriptor() ([]byte, []int) { + return fileDescriptor_operation_1eae331224aa61a0, []int{1, 0} +} +func (m *FieldOperationMetadata_IndexConfigDelta) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_FieldOperationMetadata_IndexConfigDelta.Unmarshal(m, b) +} +func (m *FieldOperationMetadata_IndexConfigDelta) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_FieldOperationMetadata_IndexConfigDelta.Marshal(b, m, deterministic) +} +func (dst *FieldOperationMetadata_IndexConfigDelta) XXX_Merge(src proto.Message) { + xxx_messageInfo_FieldOperationMetadata_IndexConfigDelta.Merge(dst, src) +} +func (m *FieldOperationMetadata_IndexConfigDelta) XXX_Size() int { + return xxx_messageInfo_FieldOperationMetadata_IndexConfigDelta.Size(m) +} +func (m *FieldOperationMetadata_IndexConfigDelta) XXX_DiscardUnknown() { + xxx_messageInfo_FieldOperationMetadata_IndexConfigDelta.DiscardUnknown(m) +} + +var xxx_messageInfo_FieldOperationMetadata_IndexConfigDelta proto.InternalMessageInfo + +func (m *FieldOperationMetadata_IndexConfigDelta) GetChangeType() FieldOperationMetadata_IndexConfigDelta_ChangeType { + if m != nil { + return m.ChangeType + } + return FieldOperationMetadata_IndexConfigDelta_CHANGE_TYPE_UNSPECIFIED +} + +func (m *FieldOperationMetadata_IndexConfigDelta) GetIndex() *Index { + if m != nil { + return m.Index + } + return nil +} + +// Metadata for [google.longrunning.Operation][google.longrunning.Operation] results from +// [FirestoreAdmin.ExportDocuments][google.firestore.admin.v1beta2.FirestoreAdmin.ExportDocuments]. +type ExportDocumentsMetadata struct { + // The time this operation started. + StartTime *timestamp.Timestamp `protobuf:"bytes,1,opt,name=start_time,json=startTime,proto3" json:"start_time,omitempty"` + // The time this operation completed. Will be unset if operation still in + // progress. + EndTime *timestamp.Timestamp `protobuf:"bytes,2,opt,name=end_time,json=endTime,proto3" json:"end_time,omitempty"` + // The state of the export operation. + OperationState OperationState `protobuf:"varint,3,opt,name=operation_state,json=operationState,proto3,enum=google.firestore.admin.v1beta2.OperationState" json:"operation_state,omitempty"` + // The progress, in documents, of this operation. + ProgressDocuments *Progress `protobuf:"bytes,4,opt,name=progress_documents,json=progressDocuments,proto3" json:"progress_documents,omitempty"` + // The progress, in bytes, of this operation. + ProgressBytes *Progress `protobuf:"bytes,5,opt,name=progress_bytes,json=progressBytes,proto3" json:"progress_bytes,omitempty"` + // Which collection ids are being exported. + CollectionIds []string `protobuf:"bytes,6,rep,name=collection_ids,json=collectionIds,proto3" json:"collection_ids,omitempty"` + // Where the entities are being exported to. + OutputUriPrefix string `protobuf:"bytes,7,opt,name=output_uri_prefix,json=outputUriPrefix,proto3" json:"output_uri_prefix,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ExportDocumentsMetadata) Reset() { *m = ExportDocumentsMetadata{} } +func (m *ExportDocumentsMetadata) String() string { return proto.CompactTextString(m) } +func (*ExportDocumentsMetadata) ProtoMessage() {} +func (*ExportDocumentsMetadata) Descriptor() ([]byte, []int) { + return fileDescriptor_operation_1eae331224aa61a0, []int{2} +} +func (m *ExportDocumentsMetadata) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ExportDocumentsMetadata.Unmarshal(m, b) +} +func (m *ExportDocumentsMetadata) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ExportDocumentsMetadata.Marshal(b, m, deterministic) +} +func (dst *ExportDocumentsMetadata) XXX_Merge(src proto.Message) { + xxx_messageInfo_ExportDocumentsMetadata.Merge(dst, src) +} +func (m *ExportDocumentsMetadata) XXX_Size() int { + return xxx_messageInfo_ExportDocumentsMetadata.Size(m) +} +func (m *ExportDocumentsMetadata) XXX_DiscardUnknown() { + xxx_messageInfo_ExportDocumentsMetadata.DiscardUnknown(m) +} + +var xxx_messageInfo_ExportDocumentsMetadata proto.InternalMessageInfo + +func (m *ExportDocumentsMetadata) GetStartTime() *timestamp.Timestamp { + if m != nil { + return m.StartTime + } + return nil +} + +func (m *ExportDocumentsMetadata) GetEndTime() *timestamp.Timestamp { + if m != nil { + return m.EndTime + } + return nil +} + +func (m *ExportDocumentsMetadata) GetOperationState() OperationState { + if m != nil { + return m.OperationState + } + return OperationState_OPERATION_STATE_UNSPECIFIED +} + +func (m *ExportDocumentsMetadata) GetProgressDocuments() *Progress { + if m != nil { + return m.ProgressDocuments + } + return nil +} + +func (m *ExportDocumentsMetadata) GetProgressBytes() *Progress { + if m != nil { + return m.ProgressBytes + } + return nil +} + +func (m *ExportDocumentsMetadata) GetCollectionIds() []string { + if m != nil { + return m.CollectionIds + } + return nil +} + +func (m *ExportDocumentsMetadata) GetOutputUriPrefix() string { + if m != nil { + return m.OutputUriPrefix + } + return "" +} + +// Metadata for [google.longrunning.Operation][google.longrunning.Operation] results from +// [FirestoreAdmin.ImportDocuments][google.firestore.admin.v1beta2.FirestoreAdmin.ImportDocuments]. +type ImportDocumentsMetadata struct { + // The time this operation started. + StartTime *timestamp.Timestamp `protobuf:"bytes,1,opt,name=start_time,json=startTime,proto3" json:"start_time,omitempty"` + // The time this operation completed. Will be unset if operation still in + // progress. + EndTime *timestamp.Timestamp `protobuf:"bytes,2,opt,name=end_time,json=endTime,proto3" json:"end_time,omitempty"` + // The state of the import operation. + OperationState OperationState `protobuf:"varint,3,opt,name=operation_state,json=operationState,proto3,enum=google.firestore.admin.v1beta2.OperationState" json:"operation_state,omitempty"` + // The progress, in documents, of this operation. + ProgressDocuments *Progress `protobuf:"bytes,4,opt,name=progress_documents,json=progressDocuments,proto3" json:"progress_documents,omitempty"` + // The progress, in bytes, of this operation. + ProgressBytes *Progress `protobuf:"bytes,5,opt,name=progress_bytes,json=progressBytes,proto3" json:"progress_bytes,omitempty"` + // Which collection ids are being imported. + CollectionIds []string `protobuf:"bytes,6,rep,name=collection_ids,json=collectionIds,proto3" json:"collection_ids,omitempty"` + // The location of the documents being imported. + InputUriPrefix string `protobuf:"bytes,7,opt,name=input_uri_prefix,json=inputUriPrefix,proto3" json:"input_uri_prefix,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ImportDocumentsMetadata) Reset() { *m = ImportDocumentsMetadata{} } +func (m *ImportDocumentsMetadata) String() string { return proto.CompactTextString(m) } +func (*ImportDocumentsMetadata) ProtoMessage() {} +func (*ImportDocumentsMetadata) Descriptor() ([]byte, []int) { + return fileDescriptor_operation_1eae331224aa61a0, []int{3} +} +func (m *ImportDocumentsMetadata) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ImportDocumentsMetadata.Unmarshal(m, b) +} +func (m *ImportDocumentsMetadata) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ImportDocumentsMetadata.Marshal(b, m, deterministic) +} +func (dst *ImportDocumentsMetadata) XXX_Merge(src proto.Message) { + xxx_messageInfo_ImportDocumentsMetadata.Merge(dst, src) +} +func (m *ImportDocumentsMetadata) XXX_Size() int { + return xxx_messageInfo_ImportDocumentsMetadata.Size(m) +} +func (m *ImportDocumentsMetadata) XXX_DiscardUnknown() { + xxx_messageInfo_ImportDocumentsMetadata.DiscardUnknown(m) +} + +var xxx_messageInfo_ImportDocumentsMetadata proto.InternalMessageInfo + +func (m *ImportDocumentsMetadata) GetStartTime() *timestamp.Timestamp { + if m != nil { + return m.StartTime + } + return nil +} + +func (m *ImportDocumentsMetadata) GetEndTime() *timestamp.Timestamp { + if m != nil { + return m.EndTime + } + return nil +} + +func (m *ImportDocumentsMetadata) GetOperationState() OperationState { + if m != nil { + return m.OperationState + } + return OperationState_OPERATION_STATE_UNSPECIFIED +} + +func (m *ImportDocumentsMetadata) GetProgressDocuments() *Progress { + if m != nil { + return m.ProgressDocuments + } + return nil +} + +func (m *ImportDocumentsMetadata) GetProgressBytes() *Progress { + if m != nil { + return m.ProgressBytes + } + return nil +} + +func (m *ImportDocumentsMetadata) GetCollectionIds() []string { + if m != nil { + return m.CollectionIds + } + return nil +} + +func (m *ImportDocumentsMetadata) GetInputUriPrefix() string { + if m != nil { + return m.InputUriPrefix + } + return "" +} + +// Returned in the [google.longrunning.Operation][google.longrunning.Operation] response field. +type ExportDocumentsResponse struct { + // Location of the output files. This can be used to begin an import + // into Cloud Firestore (this project or another project) after the operation + // completes successfully. + OutputUriPrefix string `protobuf:"bytes,1,opt,name=output_uri_prefix,json=outputUriPrefix,proto3" json:"output_uri_prefix,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ExportDocumentsResponse) Reset() { *m = ExportDocumentsResponse{} } +func (m *ExportDocumentsResponse) String() string { return proto.CompactTextString(m) } +func (*ExportDocumentsResponse) ProtoMessage() {} +func (*ExportDocumentsResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_operation_1eae331224aa61a0, []int{4} +} +func (m *ExportDocumentsResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ExportDocumentsResponse.Unmarshal(m, b) +} +func (m *ExportDocumentsResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ExportDocumentsResponse.Marshal(b, m, deterministic) +} +func (dst *ExportDocumentsResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_ExportDocumentsResponse.Merge(dst, src) +} +func (m *ExportDocumentsResponse) XXX_Size() int { + return xxx_messageInfo_ExportDocumentsResponse.Size(m) +} +func (m *ExportDocumentsResponse) XXX_DiscardUnknown() { + xxx_messageInfo_ExportDocumentsResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_ExportDocumentsResponse proto.InternalMessageInfo + +func (m *ExportDocumentsResponse) GetOutputUriPrefix() string { + if m != nil { + return m.OutputUriPrefix + } + return "" +} + +// Describes the progress of the operation. +// Unit of work is generic and must be interpreted based on where [Progress][google.firestore.admin.v1beta2.Progress] +// is used. +type Progress struct { + // The amount of work estimated. + EstimatedWork int64 `protobuf:"varint,1,opt,name=estimated_work,json=estimatedWork,proto3" json:"estimated_work,omitempty"` + // The amount of work completed. + CompletedWork int64 `protobuf:"varint,2,opt,name=completed_work,json=completedWork,proto3" json:"completed_work,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Progress) Reset() { *m = Progress{} } +func (m *Progress) String() string { return proto.CompactTextString(m) } +func (*Progress) ProtoMessage() {} +func (*Progress) Descriptor() ([]byte, []int) { + return fileDescriptor_operation_1eae331224aa61a0, []int{5} +} +func (m *Progress) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Progress.Unmarshal(m, b) +} +func (m *Progress) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Progress.Marshal(b, m, deterministic) +} +func (dst *Progress) XXX_Merge(src proto.Message) { + xxx_messageInfo_Progress.Merge(dst, src) +} +func (m *Progress) XXX_Size() int { + return xxx_messageInfo_Progress.Size(m) +} +func (m *Progress) XXX_DiscardUnknown() { + xxx_messageInfo_Progress.DiscardUnknown(m) +} + +var xxx_messageInfo_Progress proto.InternalMessageInfo + +func (m *Progress) GetEstimatedWork() int64 { + if m != nil { + return m.EstimatedWork + } + return 0 +} + +func (m *Progress) GetCompletedWork() int64 { + if m != nil { + return m.CompletedWork + } + return 0 +} + +func init() { + proto.RegisterType((*IndexOperationMetadata)(nil), "google.firestore.admin.v1beta2.IndexOperationMetadata") + proto.RegisterType((*FieldOperationMetadata)(nil), "google.firestore.admin.v1beta2.FieldOperationMetadata") + proto.RegisterType((*FieldOperationMetadata_IndexConfigDelta)(nil), "google.firestore.admin.v1beta2.FieldOperationMetadata.IndexConfigDelta") + proto.RegisterType((*ExportDocumentsMetadata)(nil), "google.firestore.admin.v1beta2.ExportDocumentsMetadata") + proto.RegisterType((*ImportDocumentsMetadata)(nil), "google.firestore.admin.v1beta2.ImportDocumentsMetadata") + proto.RegisterType((*ExportDocumentsResponse)(nil), "google.firestore.admin.v1beta2.ExportDocumentsResponse") + proto.RegisterType((*Progress)(nil), "google.firestore.admin.v1beta2.Progress") + proto.RegisterEnum("google.firestore.admin.v1beta2.OperationState", OperationState_name, OperationState_value) + proto.RegisterEnum("google.firestore.admin.v1beta2.FieldOperationMetadata_IndexConfigDelta_ChangeType", FieldOperationMetadata_IndexConfigDelta_ChangeType_name, FieldOperationMetadata_IndexConfigDelta_ChangeType_value) +} + +func init() { + proto.RegisterFile("google/firestore/admin/v1beta2/operation.proto", fileDescriptor_operation_1eae331224aa61a0) +} + +var fileDescriptor_operation_1eae331224aa61a0 = []byte{ + // 858 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xec, 0x56, 0xcf, 0x6f, 0xe3, 0x44, + 0x14, 0xc6, 0x71, 0x92, 0x6e, 0x5e, 0xa9, 0xd7, 0x1d, 0xd0, 0x36, 0xea, 0x22, 0xb6, 0x8a, 0xa8, + 0x14, 0xf5, 0xe0, 0x68, 0x83, 0x38, 0xa0, 0x95, 0x90, 0x52, 0xc7, 0x09, 0x96, 0xb2, 0x49, 0xe4, + 0x24, 0x5b, 0xd8, 0x8b, 0x35, 0x8d, 0x27, 0x66, 0xb4, 0xb1, 0xc7, 0xf2, 0x4c, 0xd8, 0xf6, 0xce, + 0x19, 0xf1, 0x17, 0x70, 0xe0, 0x06, 0xff, 0x14, 0xe2, 0x3f, 0x41, 0x33, 0xfe, 0x11, 0x6d, 0xe9, + 0x12, 0x95, 0x82, 0x00, 0x69, 0x6f, 0x79, 0xcf, 0xdf, 0xf7, 0xcd, 0xbc, 0x79, 0xdf, 0x7b, 0x0a, + 0x58, 0x21, 0x63, 0xe1, 0x9a, 0x74, 0x56, 0x34, 0x25, 0x5c, 0xb0, 0x94, 0x74, 0x70, 0x10, 0xd1, + 0xb8, 0xf3, 0xed, 0xd3, 0x4b, 0x22, 0x70, 0xb7, 0xc3, 0x12, 0x92, 0x62, 0x41, 0x59, 0x6c, 0x25, + 0x29, 0x13, 0x0c, 0x7d, 0x9c, 0xe1, 0xad, 0x12, 0x6f, 0x29, 0xbc, 0x95, 0xe3, 0x8f, 0xcf, 0x76, + 0xe8, 0xd1, 0x38, 0x20, 0x57, 0x99, 0xd6, 0xf1, 0x93, 0x1c, 0xab, 0xa2, 0xcb, 0xcd, 0xaa, 0x23, + 0x68, 0x44, 0xb8, 0xc0, 0x51, 0x92, 0x03, 0x3e, 0xca, 0x01, 0x38, 0xa1, 0x1d, 0x1c, 0xc7, 0x4c, + 0xa8, 0x9b, 0xf0, 0xec, 0x6b, 0xeb, 0x07, 0x1d, 0x1e, 0xb9, 0x52, 0x6e, 0x52, 0xdc, 0xf1, 0x39, + 0x11, 0x38, 0xc0, 0x02, 0xa3, 0xcf, 0x01, 0xb8, 0xc0, 0xa9, 0xf0, 0xa5, 0x62, 0x53, 0x3b, 0xd1, + 0xda, 0xfb, 0xdd, 0xe3, 0xbc, 0x54, 0xab, 0x38, 0xce, 0x9a, 0x17, 0xc7, 0x79, 0x0d, 0x85, 0x96, + 0x31, 0xfa, 0x0c, 0x1e, 0x90, 0x38, 0xc8, 0x88, 0x95, 0x9d, 0xc4, 0x3d, 0x12, 0x07, 0x8a, 0xf6, + 0x21, 0xd4, 0x54, 0x69, 0x4d, 0xfd, 0x44, 0x6b, 0x37, 0xbc, 0x2c, 0x40, 0x7d, 0xa8, 0x71, 0x81, + 0x05, 0x69, 0x56, 0x4f, 0xb4, 0xb6, 0xd1, 0xb5, 0xac, 0x3f, 0x7f, 0x3d, 0xab, 0xac, 0x64, 0x26, + 0x59, 0x5e, 0x46, 0x46, 0x17, 0x80, 0x92, 0x94, 0x85, 0x29, 0xe1, 0xdc, 0x0f, 0xd8, 0x72, 0x13, + 0x91, 0x58, 0xf0, 0x66, 0x4d, 0x5d, 0xae, 0xbd, 0x4b, 0x72, 0x9a, 0x33, 0xbd, 0xc3, 0x42, 0xa3, + 0x5f, 0x48, 0xa0, 0x09, 0x18, 0xa5, 0xf0, 0xe5, 0xb5, 0x20, 0xbc, 0x59, 0xbf, 0xa3, 0xe8, 0x41, + 0xc1, 0x3f, 0x97, 0xf4, 0xd6, 0xf7, 0x75, 0x78, 0x34, 0xa0, 0x64, 0x1d, 0xfc, 0x47, 0x5a, 0xb2, + 0x92, 0x77, 0x29, 0x5a, 0xa2, 0x02, 0xf4, 0x1a, 0x3e, 0x50, 0xbd, 0xf1, 0x97, 0x2c, 0x5e, 0xd1, + 0xd0, 0x0f, 0xc8, 0x5a, 0x60, 0xde, 0xac, 0x9e, 0xe8, 0xed, 0xfd, 0xee, 0x70, 0x57, 0xe1, 0xb7, + 0x17, 0x67, 0x29, 0x1b, 0xda, 0x4a, 0xb0, 0x2f, 0xf5, 0xbc, 0x43, 0x7a, 0x23, 0xc3, 0xb7, 0x5e, + 0xa8, 0xdd, 0xc7, 0x0b, 0x0b, 0x38, 0x2c, 0x2c, 0xe0, 0x17, 0x6f, 0x7f, 0xe7, 0xae, 0x99, 0x85, + 0x44, 0x91, 0x91, 0x4e, 0x50, 0x06, 0xd8, 0x6a, 0xee, 0xdd, 0xd5, 0x09, 0x8a, 0x5f, 0x84, 0xc7, + 0xdf, 0x55, 0xc0, 0xbc, 0xf9, 0x2a, 0x88, 0xc3, 0xfe, 0xf2, 0x1b, 0x1c, 0x87, 0xc4, 0x17, 0xd7, + 0x49, 0x66, 0x02, 0xa3, 0xeb, 0xfd, 0x4d, 0x6f, 0x6e, 0xd9, 0x4a, 0x7a, 0x7e, 0x9d, 0x10, 0x0f, + 0x96, 0xe5, 0x6f, 0xf4, 0xac, 0x98, 0xcc, 0xcc, 0x3a, 0xa7, 0xbb, 0x8e, 0x53, 0xba, 0xf9, 0x00, + 0xb7, 0xbe, 0x00, 0xd8, 0xca, 0xa2, 0xc7, 0x70, 0x64, 0x7f, 0xd9, 0x1b, 0x0f, 0x1d, 0x7f, 0xfe, + 0xf5, 0xd4, 0xf1, 0x17, 0xe3, 0xd9, 0xd4, 0xb1, 0xdd, 0x81, 0xeb, 0xf4, 0xcd, 0xf7, 0xd0, 0x1e, + 0xe8, 0xbd, 0x7e, 0xdf, 0xd4, 0x10, 0x40, 0xdd, 0x73, 0x9e, 0x4f, 0x5e, 0x38, 0x66, 0xa5, 0xf5, + 0x9b, 0x0e, 0x47, 0xce, 0x55, 0xc2, 0x52, 0x51, 0x4e, 0xdd, 0xbf, 0x38, 0x11, 0x17, 0xf0, 0xb0, + 0xdc, 0xe7, 0x7e, 0x66, 0x46, 0xfd, 0x2f, 0x99, 0xd1, 0x60, 0x6f, 0xc4, 0x6f, 0xd9, 0x50, 0xd5, + 0x7f, 0x62, 0x43, 0xd5, 0xee, 0xb5, 0xa1, 0xd0, 0x29, 0x18, 0x4b, 0xb6, 0x5e, 0x93, 0xa5, 0x7a, + 0x03, 0x1a, 0xc8, 0xe1, 0xd1, 0xdb, 0x0d, 0xef, 0x60, 0x9b, 0x75, 0x03, 0x8e, 0xce, 0xe0, 0x90, + 0x6d, 0x44, 0xb2, 0x11, 0xfe, 0x26, 0xa5, 0x7e, 0x92, 0x92, 0x15, 0xbd, 0x52, 0x23, 0xd1, 0xf0, + 0x1e, 0x66, 0x1f, 0x16, 0x29, 0x9d, 0xaa, 0x74, 0xeb, 0x57, 0x1d, 0x8e, 0xdc, 0xe8, 0x5d, 0x8f, + 0xff, 0x6f, 0x3d, 0x6e, 0x83, 0x49, 0xe3, 0x5b, 0x5b, 0x6c, 0xa8, 0xfc, 0xb6, 0xc3, 0xce, 0x1f, + 0x86, 0xd8, 0x23, 0x3c, 0x61, 0x31, 0x27, 0xb7, 0x1b, 0x45, 0xbb, 0xdd, 0x28, 0x5f, 0xc1, 0x83, + 0x72, 0xe1, 0x9e, 0x82, 0x41, 0xb8, 0xa0, 0x11, 0x16, 0x24, 0xf0, 0x5f, 0xb3, 0xf4, 0x95, 0x22, + 0xe9, 0xde, 0x41, 0x99, 0xbd, 0x60, 0xe9, 0xab, 0xac, 0x94, 0x28, 0x59, 0x93, 0x12, 0x56, 0xc9, + 0x60, 0x65, 0x56, 0xc2, 0xce, 0x7e, 0xd4, 0xc0, 0x78, 0xb3, 0x7d, 0xe8, 0x09, 0x3c, 0x9e, 0x4c, + 0x1d, 0xaf, 0x37, 0x77, 0x27, 0x63, 0x7f, 0x36, 0xef, 0xcd, 0x6f, 0xee, 0x2b, 0x13, 0xde, 0x77, + 0xc7, 0xee, 0xdc, 0xed, 0x8d, 0xdc, 0x97, 0xee, 0x78, 0x68, 0x6a, 0xc8, 0x00, 0x98, 0x7a, 0x13, + 0xdb, 0x99, 0xcd, 0x64, 0x5c, 0x91, 0xb1, 0xdd, 0x1b, 0xdb, 0xce, 0x68, 0x24, 0x63, 0x5d, 0xc6, + 0x03, 0x77, 0x5c, 0xe0, 0xab, 0x32, 0x9e, 0x2d, 0x6c, 0x89, 0x1f, 0x2c, 0x46, 0x66, 0x4d, 0x2e, + 0xbe, 0x41, 0xcf, 0x1d, 0x39, 0x7d, 0xb3, 0x8e, 0x0e, 0xa0, 0x91, 0x73, 0x9d, 0xbe, 0xb9, 0x77, + 0xfe, 0xb3, 0x06, 0xad, 0x25, 0x8b, 0x76, 0x74, 0xf4, 0x7c, 0x5b, 0xc4, 0x54, 0x5a, 0x7c, 0xaa, + 0xbd, 0xb4, 0x73, 0x46, 0xc8, 0xd6, 0x38, 0x0e, 0x2d, 0x96, 0x86, 0x9d, 0x90, 0xc4, 0x6a, 0x00, + 0x3a, 0xd9, 0x27, 0x9c, 0x50, 0xfe, 0xb6, 0xbf, 0x9b, 0xcf, 0x54, 0xf4, 0x53, 0xa5, 0x3a, 0xb4, + 0x07, 0xb3, 0x5f, 0x2a, 0x9f, 0x0c, 0x33, 0x31, 0x7b, 0xcd, 0x36, 0x81, 0x35, 0x28, 0x2f, 0xd1, + 0x53, 0x97, 0x78, 0xf1, 0xf4, 0x5c, 0x72, 0x2e, 0xeb, 0x4a, 0xfd, 0xd3, 0xdf, 0x03, 0x00, 0x00, + 0xff, 0xff, 0xae, 0xa2, 0x85, 0xc8, 0x1b, 0x0b, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/firestore/v1/common.pb.go b/vendor/google.golang.org/genproto/googleapis/firestore/v1/common.pb.go new file mode 100644 index 0000000..2ba26db --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/firestore/v1/common.pb.go @@ -0,0 +1,562 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/firestore/v1/common.proto + +package firestore // import "google.golang.org/genproto/googleapis/firestore/v1" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import timestamp "github.com/golang/protobuf/ptypes/timestamp" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// A set of field paths on a document. +// Used to restrict a get or update operation on a document to a subset of its +// fields. +// This is different from standard field masks, as this is always scoped to a +// [Document][google.firestore.v1.Document], and takes in account the dynamic nature of [Value][google.firestore.v1.Value]. +type DocumentMask struct { + // The list of field paths in the mask. See [Document.fields][google.firestore.v1.Document.fields] for a field + // path syntax reference. + FieldPaths []string `protobuf:"bytes,1,rep,name=field_paths,json=fieldPaths,proto3" json:"field_paths,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *DocumentMask) Reset() { *m = DocumentMask{} } +func (m *DocumentMask) String() string { return proto.CompactTextString(m) } +func (*DocumentMask) ProtoMessage() {} +func (*DocumentMask) Descriptor() ([]byte, []int) { + return fileDescriptor_common_dddbf67b55d1e5f0, []int{0} +} +func (m *DocumentMask) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_DocumentMask.Unmarshal(m, b) +} +func (m *DocumentMask) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_DocumentMask.Marshal(b, m, deterministic) +} +func (dst *DocumentMask) XXX_Merge(src proto.Message) { + xxx_messageInfo_DocumentMask.Merge(dst, src) +} +func (m *DocumentMask) XXX_Size() int { + return xxx_messageInfo_DocumentMask.Size(m) +} +func (m *DocumentMask) XXX_DiscardUnknown() { + xxx_messageInfo_DocumentMask.DiscardUnknown(m) +} + +var xxx_messageInfo_DocumentMask proto.InternalMessageInfo + +func (m *DocumentMask) GetFieldPaths() []string { + if m != nil { + return m.FieldPaths + } + return nil +} + +// A precondition on a document, used for conditional operations. +type Precondition struct { + // The type of precondition. + // + // Types that are valid to be assigned to ConditionType: + // *Precondition_Exists + // *Precondition_UpdateTime + ConditionType isPrecondition_ConditionType `protobuf_oneof:"condition_type"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Precondition) Reset() { *m = Precondition{} } +func (m *Precondition) String() string { return proto.CompactTextString(m) } +func (*Precondition) ProtoMessage() {} +func (*Precondition) Descriptor() ([]byte, []int) { + return fileDescriptor_common_dddbf67b55d1e5f0, []int{1} +} +func (m *Precondition) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Precondition.Unmarshal(m, b) +} +func (m *Precondition) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Precondition.Marshal(b, m, deterministic) +} +func (dst *Precondition) XXX_Merge(src proto.Message) { + xxx_messageInfo_Precondition.Merge(dst, src) +} +func (m *Precondition) XXX_Size() int { + return xxx_messageInfo_Precondition.Size(m) +} +func (m *Precondition) XXX_DiscardUnknown() { + xxx_messageInfo_Precondition.DiscardUnknown(m) +} + +var xxx_messageInfo_Precondition proto.InternalMessageInfo + +type isPrecondition_ConditionType interface { + isPrecondition_ConditionType() +} + +type Precondition_Exists struct { + Exists bool `protobuf:"varint,1,opt,name=exists,proto3,oneof"` +} + +type Precondition_UpdateTime struct { + UpdateTime *timestamp.Timestamp `protobuf:"bytes,2,opt,name=update_time,json=updateTime,proto3,oneof"` +} + +func (*Precondition_Exists) isPrecondition_ConditionType() {} + +func (*Precondition_UpdateTime) isPrecondition_ConditionType() {} + +func (m *Precondition) GetConditionType() isPrecondition_ConditionType { + if m != nil { + return m.ConditionType + } + return nil +} + +func (m *Precondition) GetExists() bool { + if x, ok := m.GetConditionType().(*Precondition_Exists); ok { + return x.Exists + } + return false +} + +func (m *Precondition) GetUpdateTime() *timestamp.Timestamp { + if x, ok := m.GetConditionType().(*Precondition_UpdateTime); ok { + return x.UpdateTime + } + return nil +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*Precondition) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _Precondition_OneofMarshaler, _Precondition_OneofUnmarshaler, _Precondition_OneofSizer, []interface{}{ + (*Precondition_Exists)(nil), + (*Precondition_UpdateTime)(nil), + } +} + +func _Precondition_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*Precondition) + // condition_type + switch x := m.ConditionType.(type) { + case *Precondition_Exists: + t := uint64(0) + if x.Exists { + t = 1 + } + b.EncodeVarint(1<<3 | proto.WireVarint) + b.EncodeVarint(t) + case *Precondition_UpdateTime: + b.EncodeVarint(2<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.UpdateTime); err != nil { + return err + } + case nil: + default: + return fmt.Errorf("Precondition.ConditionType has unexpected type %T", x) + } + return nil +} + +func _Precondition_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*Precondition) + switch tag { + case 1: // condition_type.exists + if wire != proto.WireVarint { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeVarint() + m.ConditionType = &Precondition_Exists{x != 0} + return true, err + case 2: // condition_type.update_time + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(timestamp.Timestamp) + err := b.DecodeMessage(msg) + m.ConditionType = &Precondition_UpdateTime{msg} + return true, err + default: + return false, nil + } +} + +func _Precondition_OneofSizer(msg proto.Message) (n int) { + m := msg.(*Precondition) + // condition_type + switch x := m.ConditionType.(type) { + case *Precondition_Exists: + n += 1 // tag and wire + n += 1 + case *Precondition_UpdateTime: + s := proto.Size(x.UpdateTime) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +// Options for creating a new transaction. +type TransactionOptions struct { + // The mode of the transaction. + // + // Types that are valid to be assigned to Mode: + // *TransactionOptions_ReadOnly_ + // *TransactionOptions_ReadWrite_ + Mode isTransactionOptions_Mode `protobuf_oneof:"mode"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *TransactionOptions) Reset() { *m = TransactionOptions{} } +func (m *TransactionOptions) String() string { return proto.CompactTextString(m) } +func (*TransactionOptions) ProtoMessage() {} +func (*TransactionOptions) Descriptor() ([]byte, []int) { + return fileDescriptor_common_dddbf67b55d1e5f0, []int{2} +} +func (m *TransactionOptions) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_TransactionOptions.Unmarshal(m, b) +} +func (m *TransactionOptions) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_TransactionOptions.Marshal(b, m, deterministic) +} +func (dst *TransactionOptions) XXX_Merge(src proto.Message) { + xxx_messageInfo_TransactionOptions.Merge(dst, src) +} +func (m *TransactionOptions) XXX_Size() int { + return xxx_messageInfo_TransactionOptions.Size(m) +} +func (m *TransactionOptions) XXX_DiscardUnknown() { + xxx_messageInfo_TransactionOptions.DiscardUnknown(m) +} + +var xxx_messageInfo_TransactionOptions proto.InternalMessageInfo + +type isTransactionOptions_Mode interface { + isTransactionOptions_Mode() +} + +type TransactionOptions_ReadOnly_ struct { + ReadOnly *TransactionOptions_ReadOnly `protobuf:"bytes,2,opt,name=read_only,json=readOnly,proto3,oneof"` +} + +type TransactionOptions_ReadWrite_ struct { + ReadWrite *TransactionOptions_ReadWrite `protobuf:"bytes,3,opt,name=read_write,json=readWrite,proto3,oneof"` +} + +func (*TransactionOptions_ReadOnly_) isTransactionOptions_Mode() {} + +func (*TransactionOptions_ReadWrite_) isTransactionOptions_Mode() {} + +func (m *TransactionOptions) GetMode() isTransactionOptions_Mode { + if m != nil { + return m.Mode + } + return nil +} + +func (m *TransactionOptions) GetReadOnly() *TransactionOptions_ReadOnly { + if x, ok := m.GetMode().(*TransactionOptions_ReadOnly_); ok { + return x.ReadOnly + } + return nil +} + +func (m *TransactionOptions) GetReadWrite() *TransactionOptions_ReadWrite { + if x, ok := m.GetMode().(*TransactionOptions_ReadWrite_); ok { + return x.ReadWrite + } + return nil +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*TransactionOptions) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _TransactionOptions_OneofMarshaler, _TransactionOptions_OneofUnmarshaler, _TransactionOptions_OneofSizer, []interface{}{ + (*TransactionOptions_ReadOnly_)(nil), + (*TransactionOptions_ReadWrite_)(nil), + } +} + +func _TransactionOptions_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*TransactionOptions) + // mode + switch x := m.Mode.(type) { + case *TransactionOptions_ReadOnly_: + b.EncodeVarint(2<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.ReadOnly); err != nil { + return err + } + case *TransactionOptions_ReadWrite_: + b.EncodeVarint(3<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.ReadWrite); err != nil { + return err + } + case nil: + default: + return fmt.Errorf("TransactionOptions.Mode has unexpected type %T", x) + } + return nil +} + +func _TransactionOptions_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*TransactionOptions) + switch tag { + case 2: // mode.read_only + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(TransactionOptions_ReadOnly) + err := b.DecodeMessage(msg) + m.Mode = &TransactionOptions_ReadOnly_{msg} + return true, err + case 3: // mode.read_write + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(TransactionOptions_ReadWrite) + err := b.DecodeMessage(msg) + m.Mode = &TransactionOptions_ReadWrite_{msg} + return true, err + default: + return false, nil + } +} + +func _TransactionOptions_OneofSizer(msg proto.Message) (n int) { + m := msg.(*TransactionOptions) + // mode + switch x := m.Mode.(type) { + case *TransactionOptions_ReadOnly_: + s := proto.Size(x.ReadOnly) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *TransactionOptions_ReadWrite_: + s := proto.Size(x.ReadWrite) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +// Options for a transaction that can be used to read and write documents. +type TransactionOptions_ReadWrite struct { + // An optional transaction to retry. + RetryTransaction []byte `protobuf:"bytes,1,opt,name=retry_transaction,json=retryTransaction,proto3" json:"retry_transaction,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *TransactionOptions_ReadWrite) Reset() { *m = TransactionOptions_ReadWrite{} } +func (m *TransactionOptions_ReadWrite) String() string { return proto.CompactTextString(m) } +func (*TransactionOptions_ReadWrite) ProtoMessage() {} +func (*TransactionOptions_ReadWrite) Descriptor() ([]byte, []int) { + return fileDescriptor_common_dddbf67b55d1e5f0, []int{2, 0} +} +func (m *TransactionOptions_ReadWrite) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_TransactionOptions_ReadWrite.Unmarshal(m, b) +} +func (m *TransactionOptions_ReadWrite) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_TransactionOptions_ReadWrite.Marshal(b, m, deterministic) +} +func (dst *TransactionOptions_ReadWrite) XXX_Merge(src proto.Message) { + xxx_messageInfo_TransactionOptions_ReadWrite.Merge(dst, src) +} +func (m *TransactionOptions_ReadWrite) XXX_Size() int { + return xxx_messageInfo_TransactionOptions_ReadWrite.Size(m) +} +func (m *TransactionOptions_ReadWrite) XXX_DiscardUnknown() { + xxx_messageInfo_TransactionOptions_ReadWrite.DiscardUnknown(m) +} + +var xxx_messageInfo_TransactionOptions_ReadWrite proto.InternalMessageInfo + +func (m *TransactionOptions_ReadWrite) GetRetryTransaction() []byte { + if m != nil { + return m.RetryTransaction + } + return nil +} + +// Options for a transaction that can only be used to read documents. +type TransactionOptions_ReadOnly struct { + // The consistency mode for this transaction. If not set, defaults to strong + // consistency. + // + // Types that are valid to be assigned to ConsistencySelector: + // *TransactionOptions_ReadOnly_ReadTime + ConsistencySelector isTransactionOptions_ReadOnly_ConsistencySelector `protobuf_oneof:"consistency_selector"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *TransactionOptions_ReadOnly) Reset() { *m = TransactionOptions_ReadOnly{} } +func (m *TransactionOptions_ReadOnly) String() string { return proto.CompactTextString(m) } +func (*TransactionOptions_ReadOnly) ProtoMessage() {} +func (*TransactionOptions_ReadOnly) Descriptor() ([]byte, []int) { + return fileDescriptor_common_dddbf67b55d1e5f0, []int{2, 1} +} +func (m *TransactionOptions_ReadOnly) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_TransactionOptions_ReadOnly.Unmarshal(m, b) +} +func (m *TransactionOptions_ReadOnly) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_TransactionOptions_ReadOnly.Marshal(b, m, deterministic) +} +func (dst *TransactionOptions_ReadOnly) XXX_Merge(src proto.Message) { + xxx_messageInfo_TransactionOptions_ReadOnly.Merge(dst, src) +} +func (m *TransactionOptions_ReadOnly) XXX_Size() int { + return xxx_messageInfo_TransactionOptions_ReadOnly.Size(m) +} +func (m *TransactionOptions_ReadOnly) XXX_DiscardUnknown() { + xxx_messageInfo_TransactionOptions_ReadOnly.DiscardUnknown(m) +} + +var xxx_messageInfo_TransactionOptions_ReadOnly proto.InternalMessageInfo + +type isTransactionOptions_ReadOnly_ConsistencySelector interface { + isTransactionOptions_ReadOnly_ConsistencySelector() +} + +type TransactionOptions_ReadOnly_ReadTime struct { + ReadTime *timestamp.Timestamp `protobuf:"bytes,2,opt,name=read_time,json=readTime,proto3,oneof"` +} + +func (*TransactionOptions_ReadOnly_ReadTime) isTransactionOptions_ReadOnly_ConsistencySelector() {} + +func (m *TransactionOptions_ReadOnly) GetConsistencySelector() isTransactionOptions_ReadOnly_ConsistencySelector { + if m != nil { + return m.ConsistencySelector + } + return nil +} + +func (m *TransactionOptions_ReadOnly) GetReadTime() *timestamp.Timestamp { + if x, ok := m.GetConsistencySelector().(*TransactionOptions_ReadOnly_ReadTime); ok { + return x.ReadTime + } + return nil +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*TransactionOptions_ReadOnly) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _TransactionOptions_ReadOnly_OneofMarshaler, _TransactionOptions_ReadOnly_OneofUnmarshaler, _TransactionOptions_ReadOnly_OneofSizer, []interface{}{ + (*TransactionOptions_ReadOnly_ReadTime)(nil), + } +} + +func _TransactionOptions_ReadOnly_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*TransactionOptions_ReadOnly) + // consistency_selector + switch x := m.ConsistencySelector.(type) { + case *TransactionOptions_ReadOnly_ReadTime: + b.EncodeVarint(2<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.ReadTime); err != nil { + return err + } + case nil: + default: + return fmt.Errorf("TransactionOptions_ReadOnly.ConsistencySelector has unexpected type %T", x) + } + return nil +} + +func _TransactionOptions_ReadOnly_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*TransactionOptions_ReadOnly) + switch tag { + case 2: // consistency_selector.read_time + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(timestamp.Timestamp) + err := b.DecodeMessage(msg) + m.ConsistencySelector = &TransactionOptions_ReadOnly_ReadTime{msg} + return true, err + default: + return false, nil + } +} + +func _TransactionOptions_ReadOnly_OneofSizer(msg proto.Message) (n int) { + m := msg.(*TransactionOptions_ReadOnly) + // consistency_selector + switch x := m.ConsistencySelector.(type) { + case *TransactionOptions_ReadOnly_ReadTime: + s := proto.Size(x.ReadTime) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +func init() { + proto.RegisterType((*DocumentMask)(nil), "google.firestore.v1.DocumentMask") + proto.RegisterType((*Precondition)(nil), "google.firestore.v1.Precondition") + proto.RegisterType((*TransactionOptions)(nil), "google.firestore.v1.TransactionOptions") + proto.RegisterType((*TransactionOptions_ReadWrite)(nil), "google.firestore.v1.TransactionOptions.ReadWrite") + proto.RegisterType((*TransactionOptions_ReadOnly)(nil), "google.firestore.v1.TransactionOptions.ReadOnly") +} + +func init() { + proto.RegisterFile("google/firestore/v1/common.proto", fileDescriptor_common_dddbf67b55d1e5f0) +} + +var fileDescriptor_common_dddbf67b55d1e5f0 = []byte{ + // 458 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x94, 0x52, 0x5f, 0x8b, 0xd3, 0x40, + 0x10, 0x6f, 0x7a, 0x47, 0x69, 0xa7, 0x45, 0xce, 0x28, 0x5a, 0x83, 0x70, 0xa5, 0x4f, 0x05, 0x61, + 0x63, 0xf4, 0x45, 0x51, 0x5f, 0x5a, 0xb9, 0xeb, 0x8b, 0xb4, 0xc4, 0xe3, 0x04, 0xa9, 0x84, 0xbd, + 0x64, 0x1a, 0x17, 0x93, 0x9d, 0xb0, 0xbb, 0x3d, 0xcd, 0xd7, 0x11, 0x7c, 0xf1, 0xa3, 0xf8, 0x11, + 0xfc, 0x34, 0x92, 0x4d, 0x1a, 0x15, 0xfb, 0xe0, 0xbd, 0xed, 0xcc, 0xef, 0xdf, 0xcc, 0xb0, 0x30, + 0x49, 0x89, 0xd2, 0x0c, 0xfd, 0xad, 0x50, 0xa8, 0x0d, 0x29, 0xf4, 0xaf, 0x03, 0x3f, 0xa6, 0x3c, + 0x27, 0xc9, 0x0a, 0x45, 0x86, 0xdc, 0x3b, 0x35, 0x83, 0xb5, 0x0c, 0x76, 0x1d, 0x78, 0xa7, 0x8d, + 0xcc, 0x52, 0xae, 0x76, 0x5b, 0xdf, 0x88, 0x1c, 0xb5, 0xe1, 0x79, 0x51, 0xab, 0xbc, 0x87, 0x0d, + 0x81, 0x17, 0xc2, 0xe7, 0x52, 0x92, 0xe1, 0x46, 0x90, 0xd4, 0x35, 0x3a, 0xf5, 0x61, 0xf4, 0x9a, + 0xe2, 0x5d, 0x8e, 0xd2, 0xbc, 0xe1, 0xfa, 0x93, 0x7b, 0x0a, 0xc3, 0xad, 0xc0, 0x2c, 0x89, 0x0a, + 0x6e, 0x3e, 0xea, 0xb1, 0x33, 0x39, 0x9a, 0x0d, 0x42, 0xb0, 0xad, 0x75, 0xd5, 0x99, 0x96, 0x30, + 0x5a, 0x2b, 0x8c, 0x49, 0x26, 0xa2, 0xf2, 0x71, 0xc7, 0xd0, 0xc3, 0x2f, 0x42, 0x9b, 0x8a, 0xeb, + 0xcc, 0xfa, 0xcb, 0x4e, 0xd8, 0xd4, 0xee, 0x2b, 0x18, 0xee, 0x8a, 0x84, 0x1b, 0x8c, 0xaa, 0x91, + 0xc6, 0xdd, 0x89, 0x33, 0x1b, 0x3e, 0xf1, 0x58, 0xb3, 0xc4, 0x7e, 0x5e, 0x76, 0xb1, 0x9f, 0x77, + 0xd9, 0x09, 0xa1, 0x16, 0x54, 0xad, 0xf9, 0x09, 0xdc, 0x6a, 0x53, 0x22, 0x53, 0x16, 0x38, 0xfd, + 0xd9, 0x05, 0xf7, 0x42, 0x71, 0xa9, 0x79, 0x5c, 0x35, 0x57, 0x85, 0x5d, 0xc4, 0x5d, 0xc1, 0x40, + 0x21, 0x4f, 0x22, 0x92, 0x59, 0xd9, 0xa4, 0x3c, 0x66, 0x07, 0x4e, 0xc5, 0xfe, 0xd5, 0xb2, 0x10, + 0x79, 0xb2, 0x92, 0x59, 0xb9, 0xec, 0x84, 0x7d, 0xd5, 0xbc, 0xdd, 0x10, 0xc0, 0x1a, 0x7e, 0x56, + 0xc2, 0xe0, 0xf8, 0xc8, 0x3a, 0x06, 0x37, 0x71, 0x7c, 0x57, 0x09, 0x97, 0x9d, 0xd0, 0xce, 0x65, + 0x0b, 0xef, 0x19, 0x0c, 0x5a, 0xc4, 0x7d, 0x04, 0xb7, 0x15, 0x1a, 0x55, 0x46, 0xe6, 0xb7, 0xde, + 0x9e, 0x6f, 0x14, 0x9e, 0x58, 0xe0, 0x0f, 0x5f, 0xef, 0x03, 0xf4, 0xf7, 0x53, 0xba, 0xcf, 0x9b, + 0x55, 0xff, 0xfb, 0xa0, 0x76, 0x29, 0x7b, 0xce, 0x7b, 0x70, 0x37, 0x26, 0xa9, 0x85, 0x36, 0x28, + 0xe3, 0x32, 0xd2, 0x98, 0x61, 0x6c, 0x48, 0xcd, 0x7b, 0x70, 0x9c, 0x53, 0x82, 0xf3, 0x6f, 0x0e, + 0xdc, 0x8f, 0x29, 0x3f, 0xb4, 0xe6, 0x7c, 0xb8, 0xb0, 0xdf, 0x70, 0x5d, 0x25, 0xac, 0x9d, 0xf7, + 0x2f, 0x1b, 0x4e, 0x4a, 0x19, 0x97, 0x29, 0x23, 0x95, 0xfa, 0x29, 0x4a, 0x9b, 0xef, 0xd7, 0x10, + 0x2f, 0x84, 0xfe, 0xeb, 0x23, 0xbf, 0x68, 0x8b, 0xaf, 0xdd, 0xe3, 0xf3, 0xc5, 0xd9, 0xdb, 0xef, + 0xdd, 0x07, 0xe7, 0xb5, 0xcb, 0x22, 0xa3, 0x5d, 0xc2, 0xce, 0xda, 0xbc, 0xcb, 0xe0, 0xc7, 0x1e, + 0xdb, 0x58, 0x6c, 0xd3, 0x62, 0x9b, 0xcb, 0xe0, 0xaa, 0x67, 0x73, 0x9e, 0xfe, 0x0a, 0x00, 0x00, + 0xff, 0xff, 0x26, 0x0b, 0x81, 0xaa, 0x2f, 0x03, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/firestore/v1/document.pb.go b/vendor/google.golang.org/genproto/googleapis/firestore/v1/document.pb.go new file mode 100644 index 0000000..437d185 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/firestore/v1/document.pb.go @@ -0,0 +1,684 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/firestore/v1/document.proto + +package firestore // import "google.golang.org/genproto/googleapis/firestore/v1" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _struct "github.com/golang/protobuf/ptypes/struct" +import timestamp "github.com/golang/protobuf/ptypes/timestamp" +import _ "google.golang.org/genproto/googleapis/api/annotations" +import latlng "google.golang.org/genproto/googleapis/type/latlng" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// A Firestore document. +// +// Must not exceed 1 MiB - 4 bytes. +type Document struct { + // The resource name of the document, for example + // `projects/{project_id}/databases/{database_id}/documents/{document_path}`. + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // The document's fields. + // + // The map keys represent field names. + // + // A simple field name contains only characters `a` to `z`, `A` to `Z`, + // `0` to `9`, or `_`, and must not start with `0` to `9`. For example, + // `foo_bar_17`. + // + // Field names matching the regular expression `__.*__` are reserved. Reserved + // field names are forbidden except in certain documented contexts. The map + // keys, represented as UTF-8, must not exceed 1,500 bytes and cannot be + // empty. + // + // Field paths may be used in other contexts to refer to structured fields + // defined here. For `map_value`, the field path is represented by the simple + // or quoted field names of the containing fields, delimited by `.`. For + // example, the structured field + // `"foo" : { map_value: { "x&y" : { string_value: "hello" }}}` would be + // represented by the field path `foo.x&y`. + // + // Within a field path, a quoted field name starts and ends with `` ` `` and + // may contain any character. Some characters, including `` ` ``, must be + // escaped using a `\`. For example, `` `x&y` `` represents `x&y` and + // `` `bak\`tik` `` represents `` bak`tik ``. + Fields map[string]*Value `protobuf:"bytes,2,rep,name=fields,proto3" json:"fields,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` + // Output only. The time at which the document was created. + // + // This value increases monotonically when a document is deleted then + // recreated. It can also be compared to values from other documents and + // the `read_time` of a query. + CreateTime *timestamp.Timestamp `protobuf:"bytes,3,opt,name=create_time,json=createTime,proto3" json:"create_time,omitempty"` + // Output only. The time at which the document was last changed. + // + // This value is initially set to the `create_time` then increases + // monotonically with each change to the document. It can also be + // compared to values from other documents and the `read_time` of a query. + UpdateTime *timestamp.Timestamp `protobuf:"bytes,4,opt,name=update_time,json=updateTime,proto3" json:"update_time,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Document) Reset() { *m = Document{} } +func (m *Document) String() string { return proto.CompactTextString(m) } +func (*Document) ProtoMessage() {} +func (*Document) Descriptor() ([]byte, []int) { + return fileDescriptor_document_36dad12edbf1593d, []int{0} +} +func (m *Document) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Document.Unmarshal(m, b) +} +func (m *Document) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Document.Marshal(b, m, deterministic) +} +func (dst *Document) XXX_Merge(src proto.Message) { + xxx_messageInfo_Document.Merge(dst, src) +} +func (m *Document) XXX_Size() int { + return xxx_messageInfo_Document.Size(m) +} +func (m *Document) XXX_DiscardUnknown() { + xxx_messageInfo_Document.DiscardUnknown(m) +} + +var xxx_messageInfo_Document proto.InternalMessageInfo + +func (m *Document) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *Document) GetFields() map[string]*Value { + if m != nil { + return m.Fields + } + return nil +} + +func (m *Document) GetCreateTime() *timestamp.Timestamp { + if m != nil { + return m.CreateTime + } + return nil +} + +func (m *Document) GetUpdateTime() *timestamp.Timestamp { + if m != nil { + return m.UpdateTime + } + return nil +} + +// A message that can hold any of the supported value types. +type Value struct { + // Must have a value set. + // + // Types that are valid to be assigned to ValueType: + // *Value_NullValue + // *Value_BooleanValue + // *Value_IntegerValue + // *Value_DoubleValue + // *Value_TimestampValue + // *Value_StringValue + // *Value_BytesValue + // *Value_ReferenceValue + // *Value_GeoPointValue + // *Value_ArrayValue + // *Value_MapValue + ValueType isValue_ValueType `protobuf_oneof:"value_type"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Value) Reset() { *m = Value{} } +func (m *Value) String() string { return proto.CompactTextString(m) } +func (*Value) ProtoMessage() {} +func (*Value) Descriptor() ([]byte, []int) { + return fileDescriptor_document_36dad12edbf1593d, []int{1} +} +func (m *Value) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Value.Unmarshal(m, b) +} +func (m *Value) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Value.Marshal(b, m, deterministic) +} +func (dst *Value) XXX_Merge(src proto.Message) { + xxx_messageInfo_Value.Merge(dst, src) +} +func (m *Value) XXX_Size() int { + return xxx_messageInfo_Value.Size(m) +} +func (m *Value) XXX_DiscardUnknown() { + xxx_messageInfo_Value.DiscardUnknown(m) +} + +var xxx_messageInfo_Value proto.InternalMessageInfo + +type isValue_ValueType interface { + isValue_ValueType() +} + +type Value_NullValue struct { + NullValue _struct.NullValue `protobuf:"varint,11,opt,name=null_value,json=nullValue,proto3,enum=google.protobuf.NullValue,oneof"` +} + +type Value_BooleanValue struct { + BooleanValue bool `protobuf:"varint,1,opt,name=boolean_value,json=booleanValue,proto3,oneof"` +} + +type Value_IntegerValue struct { + IntegerValue int64 `protobuf:"varint,2,opt,name=integer_value,json=integerValue,proto3,oneof"` +} + +type Value_DoubleValue struct { + DoubleValue float64 `protobuf:"fixed64,3,opt,name=double_value,json=doubleValue,proto3,oneof"` +} + +type Value_TimestampValue struct { + TimestampValue *timestamp.Timestamp `protobuf:"bytes,10,opt,name=timestamp_value,json=timestampValue,proto3,oneof"` +} + +type Value_StringValue struct { + StringValue string `protobuf:"bytes,17,opt,name=string_value,json=stringValue,proto3,oneof"` +} + +type Value_BytesValue struct { + BytesValue []byte `protobuf:"bytes,18,opt,name=bytes_value,json=bytesValue,proto3,oneof"` +} + +type Value_ReferenceValue struct { + ReferenceValue string `protobuf:"bytes,5,opt,name=reference_value,json=referenceValue,proto3,oneof"` +} + +type Value_GeoPointValue struct { + GeoPointValue *latlng.LatLng `protobuf:"bytes,8,opt,name=geo_point_value,json=geoPointValue,proto3,oneof"` +} + +type Value_ArrayValue struct { + ArrayValue *ArrayValue `protobuf:"bytes,9,opt,name=array_value,json=arrayValue,proto3,oneof"` +} + +type Value_MapValue struct { + MapValue *MapValue `protobuf:"bytes,6,opt,name=map_value,json=mapValue,proto3,oneof"` +} + +func (*Value_NullValue) isValue_ValueType() {} + +func (*Value_BooleanValue) isValue_ValueType() {} + +func (*Value_IntegerValue) isValue_ValueType() {} + +func (*Value_DoubleValue) isValue_ValueType() {} + +func (*Value_TimestampValue) isValue_ValueType() {} + +func (*Value_StringValue) isValue_ValueType() {} + +func (*Value_BytesValue) isValue_ValueType() {} + +func (*Value_ReferenceValue) isValue_ValueType() {} + +func (*Value_GeoPointValue) isValue_ValueType() {} + +func (*Value_ArrayValue) isValue_ValueType() {} + +func (*Value_MapValue) isValue_ValueType() {} + +func (m *Value) GetValueType() isValue_ValueType { + if m != nil { + return m.ValueType + } + return nil +} + +func (m *Value) GetNullValue() _struct.NullValue { + if x, ok := m.GetValueType().(*Value_NullValue); ok { + return x.NullValue + } + return _struct.NullValue_NULL_VALUE +} + +func (m *Value) GetBooleanValue() bool { + if x, ok := m.GetValueType().(*Value_BooleanValue); ok { + return x.BooleanValue + } + return false +} + +func (m *Value) GetIntegerValue() int64 { + if x, ok := m.GetValueType().(*Value_IntegerValue); ok { + return x.IntegerValue + } + return 0 +} + +func (m *Value) GetDoubleValue() float64 { + if x, ok := m.GetValueType().(*Value_DoubleValue); ok { + return x.DoubleValue + } + return 0 +} + +func (m *Value) GetTimestampValue() *timestamp.Timestamp { + if x, ok := m.GetValueType().(*Value_TimestampValue); ok { + return x.TimestampValue + } + return nil +} + +func (m *Value) GetStringValue() string { + if x, ok := m.GetValueType().(*Value_StringValue); ok { + return x.StringValue + } + return "" +} + +func (m *Value) GetBytesValue() []byte { + if x, ok := m.GetValueType().(*Value_BytesValue); ok { + return x.BytesValue + } + return nil +} + +func (m *Value) GetReferenceValue() string { + if x, ok := m.GetValueType().(*Value_ReferenceValue); ok { + return x.ReferenceValue + } + return "" +} + +func (m *Value) GetGeoPointValue() *latlng.LatLng { + if x, ok := m.GetValueType().(*Value_GeoPointValue); ok { + return x.GeoPointValue + } + return nil +} + +func (m *Value) GetArrayValue() *ArrayValue { + if x, ok := m.GetValueType().(*Value_ArrayValue); ok { + return x.ArrayValue + } + return nil +} + +func (m *Value) GetMapValue() *MapValue { + if x, ok := m.GetValueType().(*Value_MapValue); ok { + return x.MapValue + } + return nil +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*Value) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _Value_OneofMarshaler, _Value_OneofUnmarshaler, _Value_OneofSizer, []interface{}{ + (*Value_NullValue)(nil), + (*Value_BooleanValue)(nil), + (*Value_IntegerValue)(nil), + (*Value_DoubleValue)(nil), + (*Value_TimestampValue)(nil), + (*Value_StringValue)(nil), + (*Value_BytesValue)(nil), + (*Value_ReferenceValue)(nil), + (*Value_GeoPointValue)(nil), + (*Value_ArrayValue)(nil), + (*Value_MapValue)(nil), + } +} + +func _Value_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*Value) + // value_type + switch x := m.ValueType.(type) { + case *Value_NullValue: + b.EncodeVarint(11<<3 | proto.WireVarint) + b.EncodeVarint(uint64(x.NullValue)) + case *Value_BooleanValue: + t := uint64(0) + if x.BooleanValue { + t = 1 + } + b.EncodeVarint(1<<3 | proto.WireVarint) + b.EncodeVarint(t) + case *Value_IntegerValue: + b.EncodeVarint(2<<3 | proto.WireVarint) + b.EncodeVarint(uint64(x.IntegerValue)) + case *Value_DoubleValue: + b.EncodeVarint(3<<3 | proto.WireFixed64) + b.EncodeFixed64(math.Float64bits(x.DoubleValue)) + case *Value_TimestampValue: + b.EncodeVarint(10<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.TimestampValue); err != nil { + return err + } + case *Value_StringValue: + b.EncodeVarint(17<<3 | proto.WireBytes) + b.EncodeStringBytes(x.StringValue) + case *Value_BytesValue: + b.EncodeVarint(18<<3 | proto.WireBytes) + b.EncodeRawBytes(x.BytesValue) + case *Value_ReferenceValue: + b.EncodeVarint(5<<3 | proto.WireBytes) + b.EncodeStringBytes(x.ReferenceValue) + case *Value_GeoPointValue: + b.EncodeVarint(8<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.GeoPointValue); err != nil { + return err + } + case *Value_ArrayValue: + b.EncodeVarint(9<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.ArrayValue); err != nil { + return err + } + case *Value_MapValue: + b.EncodeVarint(6<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.MapValue); err != nil { + return err + } + case nil: + default: + return fmt.Errorf("Value.ValueType has unexpected type %T", x) + } + return nil +} + +func _Value_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*Value) + switch tag { + case 11: // value_type.null_value + if wire != proto.WireVarint { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeVarint() + m.ValueType = &Value_NullValue{_struct.NullValue(x)} + return true, err + case 1: // value_type.boolean_value + if wire != proto.WireVarint { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeVarint() + m.ValueType = &Value_BooleanValue{x != 0} + return true, err + case 2: // value_type.integer_value + if wire != proto.WireVarint { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeVarint() + m.ValueType = &Value_IntegerValue{int64(x)} + return true, err + case 3: // value_type.double_value + if wire != proto.WireFixed64 { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeFixed64() + m.ValueType = &Value_DoubleValue{math.Float64frombits(x)} + return true, err + case 10: // value_type.timestamp_value + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(timestamp.Timestamp) + err := b.DecodeMessage(msg) + m.ValueType = &Value_TimestampValue{msg} + return true, err + case 17: // value_type.string_value + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeStringBytes() + m.ValueType = &Value_StringValue{x} + return true, err + case 18: // value_type.bytes_value + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeRawBytes(true) + m.ValueType = &Value_BytesValue{x} + return true, err + case 5: // value_type.reference_value + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeStringBytes() + m.ValueType = &Value_ReferenceValue{x} + return true, err + case 8: // value_type.geo_point_value + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(latlng.LatLng) + err := b.DecodeMessage(msg) + m.ValueType = &Value_GeoPointValue{msg} + return true, err + case 9: // value_type.array_value + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(ArrayValue) + err := b.DecodeMessage(msg) + m.ValueType = &Value_ArrayValue{msg} + return true, err + case 6: // value_type.map_value + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(MapValue) + err := b.DecodeMessage(msg) + m.ValueType = &Value_MapValue{msg} + return true, err + default: + return false, nil + } +} + +func _Value_OneofSizer(msg proto.Message) (n int) { + m := msg.(*Value) + // value_type + switch x := m.ValueType.(type) { + case *Value_NullValue: + n += 1 // tag and wire + n += proto.SizeVarint(uint64(x.NullValue)) + case *Value_BooleanValue: + n += 1 // tag and wire + n += 1 + case *Value_IntegerValue: + n += 1 // tag and wire + n += proto.SizeVarint(uint64(x.IntegerValue)) + case *Value_DoubleValue: + n += 1 // tag and wire + n += 8 + case *Value_TimestampValue: + s := proto.Size(x.TimestampValue) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *Value_StringValue: + n += 2 // tag and wire + n += proto.SizeVarint(uint64(len(x.StringValue))) + n += len(x.StringValue) + case *Value_BytesValue: + n += 2 // tag and wire + n += proto.SizeVarint(uint64(len(x.BytesValue))) + n += len(x.BytesValue) + case *Value_ReferenceValue: + n += 1 // tag and wire + n += proto.SizeVarint(uint64(len(x.ReferenceValue))) + n += len(x.ReferenceValue) + case *Value_GeoPointValue: + s := proto.Size(x.GeoPointValue) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *Value_ArrayValue: + s := proto.Size(x.ArrayValue) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *Value_MapValue: + s := proto.Size(x.MapValue) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +// An array value. +type ArrayValue struct { + // Values in the array. + Values []*Value `protobuf:"bytes,1,rep,name=values,proto3" json:"values,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ArrayValue) Reset() { *m = ArrayValue{} } +func (m *ArrayValue) String() string { return proto.CompactTextString(m) } +func (*ArrayValue) ProtoMessage() {} +func (*ArrayValue) Descriptor() ([]byte, []int) { + return fileDescriptor_document_36dad12edbf1593d, []int{2} +} +func (m *ArrayValue) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ArrayValue.Unmarshal(m, b) +} +func (m *ArrayValue) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ArrayValue.Marshal(b, m, deterministic) +} +func (dst *ArrayValue) XXX_Merge(src proto.Message) { + xxx_messageInfo_ArrayValue.Merge(dst, src) +} +func (m *ArrayValue) XXX_Size() int { + return xxx_messageInfo_ArrayValue.Size(m) +} +func (m *ArrayValue) XXX_DiscardUnknown() { + xxx_messageInfo_ArrayValue.DiscardUnknown(m) +} + +var xxx_messageInfo_ArrayValue proto.InternalMessageInfo + +func (m *ArrayValue) GetValues() []*Value { + if m != nil { + return m.Values + } + return nil +} + +// A map value. +type MapValue struct { + // The map's fields. + // + // The map keys represent field names. Field names matching the regular + // expression `__.*__` are reserved. Reserved field names are forbidden except + // in certain documented contexts. The map keys, represented as UTF-8, must + // not exceed 1,500 bytes and cannot be empty. + Fields map[string]*Value `protobuf:"bytes,1,rep,name=fields,proto3" json:"fields,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *MapValue) Reset() { *m = MapValue{} } +func (m *MapValue) String() string { return proto.CompactTextString(m) } +func (*MapValue) ProtoMessage() {} +func (*MapValue) Descriptor() ([]byte, []int) { + return fileDescriptor_document_36dad12edbf1593d, []int{3} +} +func (m *MapValue) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_MapValue.Unmarshal(m, b) +} +func (m *MapValue) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_MapValue.Marshal(b, m, deterministic) +} +func (dst *MapValue) XXX_Merge(src proto.Message) { + xxx_messageInfo_MapValue.Merge(dst, src) +} +func (m *MapValue) XXX_Size() int { + return xxx_messageInfo_MapValue.Size(m) +} +func (m *MapValue) XXX_DiscardUnknown() { + xxx_messageInfo_MapValue.DiscardUnknown(m) +} + +var xxx_messageInfo_MapValue proto.InternalMessageInfo + +func (m *MapValue) GetFields() map[string]*Value { + if m != nil { + return m.Fields + } + return nil +} + +func init() { + proto.RegisterType((*Document)(nil), "google.firestore.v1.Document") + proto.RegisterMapType((map[string]*Value)(nil), "google.firestore.v1.Document.FieldsEntry") + proto.RegisterType((*Value)(nil), "google.firestore.v1.Value") + proto.RegisterType((*ArrayValue)(nil), "google.firestore.v1.ArrayValue") + proto.RegisterType((*MapValue)(nil), "google.firestore.v1.MapValue") + proto.RegisterMapType((map[string]*Value)(nil), "google.firestore.v1.MapValue.FieldsEntry") +} + +func init() { + proto.RegisterFile("google/firestore/v1/document.proto", fileDescriptor_document_36dad12edbf1593d) +} + +var fileDescriptor_document_36dad12edbf1593d = []byte{ + // 650 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xb4, 0x94, 0x4f, 0x6f, 0xd3, 0x4c, + 0x10, 0xc6, 0xe3, 0x24, 0x8d, 0x92, 0x71, 0xda, 0xbe, 0xaf, 0x7b, 0x20, 0x44, 0xa0, 0x86, 0x20, + 0xa4, 0xf4, 0x62, 0x93, 0x72, 0x41, 0xb4, 0x48, 0x34, 0xa5, 0x6d, 0x0e, 0x05, 0x55, 0x06, 0x7a, + 0x40, 0x95, 0xa2, 0x4d, 0xb2, 0x59, 0x59, 0xac, 0x77, 0xad, 0xf5, 0xba, 0x52, 0xbe, 0x0e, 0x42, + 0x42, 0xe2, 0xc8, 0x27, 0xe0, 0xcc, 0xa7, 0x42, 0xfb, 0xcf, 0x6d, 0x50, 0xd4, 0x9e, 0xb8, 0xed, + 0xce, 0xfc, 0x9e, 0x99, 0x67, 0xd7, 0x3b, 0x86, 0x3e, 0xe1, 0x9c, 0x50, 0x1c, 0x2d, 0x12, 0x81, + 0x73, 0xc9, 0x05, 0x8e, 0xae, 0x87, 0xd1, 0x9c, 0xcf, 0x8a, 0x14, 0x33, 0x19, 0x66, 0x82, 0x4b, + 0x1e, 0xec, 0x18, 0x26, 0x2c, 0x99, 0xf0, 0x7a, 0xd8, 0x7d, 0x64, 0x85, 0x1a, 0x99, 0x16, 0x8b, + 0x28, 0x97, 0xa2, 0x98, 0x59, 0x49, 0x77, 0xf7, 0xef, 0xac, 0x4c, 0x52, 0x9c, 0x4b, 0x94, 0x66, + 0x16, 0xe8, 0x58, 0x40, 0x2e, 0x33, 0x1c, 0x51, 0x24, 0x29, 0x23, 0x36, 0xe3, 0x0a, 0xa3, 0x2c, + 0x89, 0x10, 0x63, 0x5c, 0x22, 0x99, 0x70, 0x96, 0x9b, 0x6c, 0xff, 0x67, 0x15, 0x9a, 0x6f, 0xad, + 0xbd, 0x20, 0x80, 0x3a, 0x43, 0x29, 0xee, 0x78, 0x3d, 0x6f, 0xd0, 0x8a, 0xf5, 0x3a, 0x38, 0x82, + 0xc6, 0x22, 0xc1, 0x74, 0x9e, 0x77, 0xaa, 0xbd, 0xda, 0xc0, 0xdf, 0xdf, 0x0b, 0xd7, 0xb8, 0x0f, + 0x5d, 0x89, 0xf0, 0x54, 0xb3, 0x27, 0x4c, 0x8a, 0x65, 0x6c, 0x85, 0xc1, 0x01, 0xf8, 0x33, 0x81, + 0x91, 0xc4, 0x13, 0xe5, 0xba, 0x53, 0xeb, 0x79, 0x03, 0x7f, 0xbf, 0xeb, 0xea, 0xb8, 0x23, 0x85, + 0x1f, 0xdd, 0x91, 0x62, 0x30, 0xb8, 0x0a, 0x28, 0x71, 0x91, 0xcd, 0x4b, 0x71, 0xfd, 0x7e, 0xb1, + 0xc1, 0x55, 0xa0, 0xfb, 0x09, 0xfc, 0x5b, 0x86, 0x82, 0xff, 0xa0, 0xf6, 0x05, 0x2f, 0xed, 0xf1, + 0xd4, 0x32, 0x78, 0x0e, 0x1b, 0xd7, 0x88, 0x16, 0xb8, 0x53, 0x5d, 0xad, 0xbb, 0x72, 0xb8, 0x4b, + 0x45, 0xc4, 0x06, 0x7c, 0x55, 0x7d, 0xe9, 0xf5, 0x7f, 0xd5, 0x61, 0x43, 0x07, 0x83, 0x03, 0x00, + 0x56, 0x50, 0x3a, 0x31, 0x45, 0xfc, 0x9e, 0x37, 0xd8, 0x5a, 0x63, 0xee, 0x7d, 0x41, 0xa9, 0xe6, + 0xc7, 0x95, 0xb8, 0xc5, 0xdc, 0x26, 0x78, 0x06, 0x9b, 0x53, 0xce, 0x29, 0x46, 0xcc, 0xea, 0x95, + 0xb1, 0xe6, 0xb8, 0x12, 0xb7, 0x6d, 0xb8, 0xc4, 0x12, 0x26, 0x31, 0xc1, 0x62, 0x72, 0xe3, 0xb5, + 0xa6, 0x30, 0x1b, 0x36, 0xd8, 0x53, 0x68, 0xcf, 0x79, 0x31, 0xa5, 0xd8, 0x52, 0xea, 0x9a, 0xbd, + 0x71, 0x25, 0xf6, 0x4d, 0xd4, 0x40, 0x27, 0xb0, 0x5d, 0xbe, 0x1c, 0xcb, 0xc1, 0x7d, 0x37, 0x3a, + 0xae, 0xc4, 0x5b, 0xa5, 0xa8, 0xec, 0x95, 0x4b, 0x91, 0x30, 0x62, 0x6b, 0xfc, 0xaf, 0x6e, 0x54, + 0xf5, 0x32, 0x51, 0x03, 0x3d, 0x01, 0x7f, 0xba, 0x94, 0x38, 0xb7, 0x4c, 0xd0, 0xf3, 0x06, 0xed, + 0x71, 0x25, 0x06, 0x1d, 0x34, 0xc8, 0x1e, 0x6c, 0x0b, 0xbc, 0xc0, 0x02, 0xb3, 0x99, 0xb3, 0xbd, + 0x61, 0x4b, 0x6d, 0x95, 0x09, 0x83, 0xbe, 0x86, 0x6d, 0x82, 0xf9, 0x24, 0xe3, 0x09, 0x93, 0x16, + 0x6d, 0x6a, 0xe7, 0x3b, 0xce, 0xb9, 0x7a, 0xfa, 0xe1, 0x39, 0x92, 0xe7, 0x8c, 0x8c, 0x2b, 0xf1, + 0x26, 0xc1, 0xfc, 0x42, 0xc1, 0x46, 0x3e, 0x02, 0x1f, 0x09, 0x81, 0x96, 0x56, 0xda, 0xd2, 0xd2, + 0xdd, 0xb5, 0x9f, 0xfb, 0x48, 0x71, 0xee, 0x73, 0x01, 0x2a, 0x77, 0xc1, 0x21, 0xb4, 0x52, 0xe4, + 0xae, 0xad, 0xa1, 0x2b, 0x3c, 0x5e, 0x5b, 0xe1, 0x1d, 0xca, 0x9c, 0xbe, 0x99, 0xda, 0xf5, 0xa8, + 0x0d, 0xa0, 0x95, 0x13, 0xe5, 0xb3, 0xff, 0x06, 0xe0, 0xa6, 0x4f, 0xb0, 0x0f, 0x0d, 0x9d, 0xcb, + 0x3b, 0x9e, 0x1e, 0xb2, 0xbb, 0xde, 0xa1, 0x25, 0xfb, 0xdf, 0x3c, 0x68, 0xba, 0x46, 0xb7, 0xa6, + 0xd4, 0xbb, 0x63, 0x4a, 0x1d, 0xbe, 0x6e, 0x4a, 0xff, 0xd1, 0xac, 0x8c, 0xbe, 0x7b, 0xf0, 0x60, + 0xc6, 0xd3, 0x75, 0xf0, 0x68, 0xd3, 0xfd, 0x36, 0x2e, 0xd4, 0xa3, 0xbb, 0xf0, 0x3e, 0x1f, 0x5a, + 0x8a, 0x70, 0x8a, 0x18, 0x09, 0xb9, 0x20, 0x11, 0xc1, 0x4c, 0x3f, 0xc9, 0xc8, 0xa4, 0x50, 0x96, + 0xe4, 0x2b, 0x3f, 0xd7, 0x83, 0x72, 0xf3, 0xb5, 0x5a, 0x3f, 0x3b, 0x3e, 0xfd, 0xf0, 0xa3, 0xfa, + 0xf0, 0xcc, 0x54, 0x39, 0xa6, 0xbc, 0x98, 0x87, 0xa7, 0x65, 0xc7, 0xcb, 0xe1, 0x6f, 0x97, 0xbb, + 0xd2, 0xb9, 0xab, 0x32, 0x77, 0x75, 0x39, 0x9c, 0x36, 0x74, 0x9f, 0x17, 0x7f, 0x02, 0x00, 0x00, + 0xff, 0xff, 0x2d, 0x9a, 0x82, 0x39, 0xc3, 0x05, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/firestore/v1/firestore.pb.go b/vendor/google.golang.org/genproto/googleapis/firestore/v1/firestore.pb.go new file mode 100644 index 0000000..45dd707 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/firestore/v1/firestore.pb.go @@ -0,0 +1,3721 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/firestore/v1/firestore.proto + +package firestore // import "google.golang.org/genproto/googleapis/firestore/v1" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import empty "github.com/golang/protobuf/ptypes/empty" +import timestamp "github.com/golang/protobuf/ptypes/timestamp" +import _ "google.golang.org/genproto/googleapis/api/annotations" +import status "google.golang.org/genproto/googleapis/rpc/status" + +import ( + context "golang.org/x/net/context" + grpc "google.golang.org/grpc" +) + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// The type of change. +type TargetChange_TargetChangeType int32 + +const ( + // No change has occurred. Used only to send an updated `resume_token`. + TargetChange_NO_CHANGE TargetChange_TargetChangeType = 0 + // The targets have been added. + TargetChange_ADD TargetChange_TargetChangeType = 1 + // The targets have been removed. + TargetChange_REMOVE TargetChange_TargetChangeType = 2 + // The targets reflect all changes committed before the targets were added + // to the stream. + // + // This will be sent after or with a `read_time` that is greater than or + // equal to the time at which the targets were added. + // + // Listeners can wait for this change if read-after-write semantics + // are desired. + TargetChange_CURRENT TargetChange_TargetChangeType = 3 + // The targets have been reset, and a new initial state for the targets + // will be returned in subsequent changes. + // + // After the initial state is complete, `CURRENT` will be returned even + // if the target was previously indicated to be `CURRENT`. + TargetChange_RESET TargetChange_TargetChangeType = 4 +) + +var TargetChange_TargetChangeType_name = map[int32]string{ + 0: "NO_CHANGE", + 1: "ADD", + 2: "REMOVE", + 3: "CURRENT", + 4: "RESET", +} +var TargetChange_TargetChangeType_value = map[string]int32{ + "NO_CHANGE": 0, + "ADD": 1, + "REMOVE": 2, + "CURRENT": 3, + "RESET": 4, +} + +func (x TargetChange_TargetChangeType) String() string { + return proto.EnumName(TargetChange_TargetChangeType_name, int32(x)) +} +func (TargetChange_TargetChangeType) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_firestore_e04b661bc0002f16, []int{20, 0} +} + +// The request for [Firestore.GetDocument][google.firestore.v1.Firestore.GetDocument]. +type GetDocumentRequest struct { + // The resource name of the Document to get. In the format: + // `projects/{project_id}/databases/{database_id}/documents/{document_path}`. + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // The fields to return. If not set, returns all fields. + // + // If the document has a field that is not present in this mask, that field + // will not be returned in the response. + Mask *DocumentMask `protobuf:"bytes,2,opt,name=mask,proto3" json:"mask,omitempty"` + // The consistency mode for this transaction. + // If not set, defaults to strong consistency. + // + // Types that are valid to be assigned to ConsistencySelector: + // *GetDocumentRequest_Transaction + // *GetDocumentRequest_ReadTime + ConsistencySelector isGetDocumentRequest_ConsistencySelector `protobuf_oneof:"consistency_selector"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GetDocumentRequest) Reset() { *m = GetDocumentRequest{} } +func (m *GetDocumentRequest) String() string { return proto.CompactTextString(m) } +func (*GetDocumentRequest) ProtoMessage() {} +func (*GetDocumentRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_firestore_e04b661bc0002f16, []int{0} +} +func (m *GetDocumentRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GetDocumentRequest.Unmarshal(m, b) +} +func (m *GetDocumentRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GetDocumentRequest.Marshal(b, m, deterministic) +} +func (dst *GetDocumentRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_GetDocumentRequest.Merge(dst, src) +} +func (m *GetDocumentRequest) XXX_Size() int { + return xxx_messageInfo_GetDocumentRequest.Size(m) +} +func (m *GetDocumentRequest) XXX_DiscardUnknown() { + xxx_messageInfo_GetDocumentRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_GetDocumentRequest proto.InternalMessageInfo + +func (m *GetDocumentRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *GetDocumentRequest) GetMask() *DocumentMask { + if m != nil { + return m.Mask + } + return nil +} + +type isGetDocumentRequest_ConsistencySelector interface { + isGetDocumentRequest_ConsistencySelector() +} + +type GetDocumentRequest_Transaction struct { + Transaction []byte `protobuf:"bytes,3,opt,name=transaction,proto3,oneof"` +} + +type GetDocumentRequest_ReadTime struct { + ReadTime *timestamp.Timestamp `protobuf:"bytes,5,opt,name=read_time,json=readTime,proto3,oneof"` +} + +func (*GetDocumentRequest_Transaction) isGetDocumentRequest_ConsistencySelector() {} + +func (*GetDocumentRequest_ReadTime) isGetDocumentRequest_ConsistencySelector() {} + +func (m *GetDocumentRequest) GetConsistencySelector() isGetDocumentRequest_ConsistencySelector { + if m != nil { + return m.ConsistencySelector + } + return nil +} + +func (m *GetDocumentRequest) GetTransaction() []byte { + if x, ok := m.GetConsistencySelector().(*GetDocumentRequest_Transaction); ok { + return x.Transaction + } + return nil +} + +func (m *GetDocumentRequest) GetReadTime() *timestamp.Timestamp { + if x, ok := m.GetConsistencySelector().(*GetDocumentRequest_ReadTime); ok { + return x.ReadTime + } + return nil +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*GetDocumentRequest) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _GetDocumentRequest_OneofMarshaler, _GetDocumentRequest_OneofUnmarshaler, _GetDocumentRequest_OneofSizer, []interface{}{ + (*GetDocumentRequest_Transaction)(nil), + (*GetDocumentRequest_ReadTime)(nil), + } +} + +func _GetDocumentRequest_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*GetDocumentRequest) + // consistency_selector + switch x := m.ConsistencySelector.(type) { + case *GetDocumentRequest_Transaction: + b.EncodeVarint(3<<3 | proto.WireBytes) + b.EncodeRawBytes(x.Transaction) + case *GetDocumentRequest_ReadTime: + b.EncodeVarint(5<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.ReadTime); err != nil { + return err + } + case nil: + default: + return fmt.Errorf("GetDocumentRequest.ConsistencySelector has unexpected type %T", x) + } + return nil +} + +func _GetDocumentRequest_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*GetDocumentRequest) + switch tag { + case 3: // consistency_selector.transaction + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeRawBytes(true) + m.ConsistencySelector = &GetDocumentRequest_Transaction{x} + return true, err + case 5: // consistency_selector.read_time + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(timestamp.Timestamp) + err := b.DecodeMessage(msg) + m.ConsistencySelector = &GetDocumentRequest_ReadTime{msg} + return true, err + default: + return false, nil + } +} + +func _GetDocumentRequest_OneofSizer(msg proto.Message) (n int) { + m := msg.(*GetDocumentRequest) + // consistency_selector + switch x := m.ConsistencySelector.(type) { + case *GetDocumentRequest_Transaction: + n += 1 // tag and wire + n += proto.SizeVarint(uint64(len(x.Transaction))) + n += len(x.Transaction) + case *GetDocumentRequest_ReadTime: + s := proto.Size(x.ReadTime) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +// The request for [Firestore.ListDocuments][google.firestore.v1.Firestore.ListDocuments]. +type ListDocumentsRequest struct { + // The parent resource name. In the format: + // `projects/{project_id}/databases/{database_id}/documents` or + // `projects/{project_id}/databases/{database_id}/documents/{document_path}`. + // For example: + // `projects/my-project/databases/my-database/documents` or + // `projects/my-project/databases/my-database/documents/chatrooms/my-chatroom` + Parent string `protobuf:"bytes,1,opt,name=parent,proto3" json:"parent,omitempty"` + // The collection ID, relative to `parent`, to list. For example: `chatrooms` + // or `messages`. + CollectionId string `protobuf:"bytes,2,opt,name=collection_id,json=collectionId,proto3" json:"collection_id,omitempty"` + // The maximum number of documents to return. + PageSize int32 `protobuf:"varint,3,opt,name=page_size,json=pageSize,proto3" json:"page_size,omitempty"` + // The `next_page_token` value returned from a previous List request, if any. + PageToken string `protobuf:"bytes,4,opt,name=page_token,json=pageToken,proto3" json:"page_token,omitempty"` + // The order to sort results by. For example: `priority desc, name`. + OrderBy string `protobuf:"bytes,6,opt,name=order_by,json=orderBy,proto3" json:"order_by,omitempty"` + // The fields to return. If not set, returns all fields. + // + // If a document has a field that is not present in this mask, that field + // will not be returned in the response. + Mask *DocumentMask `protobuf:"bytes,7,opt,name=mask,proto3" json:"mask,omitempty"` + // The consistency mode for this transaction. + // If not set, defaults to strong consistency. + // + // Types that are valid to be assigned to ConsistencySelector: + // *ListDocumentsRequest_Transaction + // *ListDocumentsRequest_ReadTime + ConsistencySelector isListDocumentsRequest_ConsistencySelector `protobuf_oneof:"consistency_selector"` + // If the list should show missing documents. A missing document is a + // document that does not exist but has sub-documents. These documents will + // be returned with a key but will not have fields, [Document.create_time][google.firestore.v1.Document.create_time], + // or [Document.update_time][google.firestore.v1.Document.update_time] set. + // + // Requests with `show_missing` may not specify `where` or + // `order_by`. + ShowMissing bool `protobuf:"varint,12,opt,name=show_missing,json=showMissing,proto3" json:"show_missing,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ListDocumentsRequest) Reset() { *m = ListDocumentsRequest{} } +func (m *ListDocumentsRequest) String() string { return proto.CompactTextString(m) } +func (*ListDocumentsRequest) ProtoMessage() {} +func (*ListDocumentsRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_firestore_e04b661bc0002f16, []int{1} +} +func (m *ListDocumentsRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ListDocumentsRequest.Unmarshal(m, b) +} +func (m *ListDocumentsRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ListDocumentsRequest.Marshal(b, m, deterministic) +} +func (dst *ListDocumentsRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_ListDocumentsRequest.Merge(dst, src) +} +func (m *ListDocumentsRequest) XXX_Size() int { + return xxx_messageInfo_ListDocumentsRequest.Size(m) +} +func (m *ListDocumentsRequest) XXX_DiscardUnknown() { + xxx_messageInfo_ListDocumentsRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_ListDocumentsRequest proto.InternalMessageInfo + +func (m *ListDocumentsRequest) GetParent() string { + if m != nil { + return m.Parent + } + return "" +} + +func (m *ListDocumentsRequest) GetCollectionId() string { + if m != nil { + return m.CollectionId + } + return "" +} + +func (m *ListDocumentsRequest) GetPageSize() int32 { + if m != nil { + return m.PageSize + } + return 0 +} + +func (m *ListDocumentsRequest) GetPageToken() string { + if m != nil { + return m.PageToken + } + return "" +} + +func (m *ListDocumentsRequest) GetOrderBy() string { + if m != nil { + return m.OrderBy + } + return "" +} + +func (m *ListDocumentsRequest) GetMask() *DocumentMask { + if m != nil { + return m.Mask + } + return nil +} + +type isListDocumentsRequest_ConsistencySelector interface { + isListDocumentsRequest_ConsistencySelector() +} + +type ListDocumentsRequest_Transaction struct { + Transaction []byte `protobuf:"bytes,8,opt,name=transaction,proto3,oneof"` +} + +type ListDocumentsRequest_ReadTime struct { + ReadTime *timestamp.Timestamp `protobuf:"bytes,10,opt,name=read_time,json=readTime,proto3,oneof"` +} + +func (*ListDocumentsRequest_Transaction) isListDocumentsRequest_ConsistencySelector() {} + +func (*ListDocumentsRequest_ReadTime) isListDocumentsRequest_ConsistencySelector() {} + +func (m *ListDocumentsRequest) GetConsistencySelector() isListDocumentsRequest_ConsistencySelector { + if m != nil { + return m.ConsistencySelector + } + return nil +} + +func (m *ListDocumentsRequest) GetTransaction() []byte { + if x, ok := m.GetConsistencySelector().(*ListDocumentsRequest_Transaction); ok { + return x.Transaction + } + return nil +} + +func (m *ListDocumentsRequest) GetReadTime() *timestamp.Timestamp { + if x, ok := m.GetConsistencySelector().(*ListDocumentsRequest_ReadTime); ok { + return x.ReadTime + } + return nil +} + +func (m *ListDocumentsRequest) GetShowMissing() bool { + if m != nil { + return m.ShowMissing + } + return false +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*ListDocumentsRequest) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _ListDocumentsRequest_OneofMarshaler, _ListDocumentsRequest_OneofUnmarshaler, _ListDocumentsRequest_OneofSizer, []interface{}{ + (*ListDocumentsRequest_Transaction)(nil), + (*ListDocumentsRequest_ReadTime)(nil), + } +} + +func _ListDocumentsRequest_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*ListDocumentsRequest) + // consistency_selector + switch x := m.ConsistencySelector.(type) { + case *ListDocumentsRequest_Transaction: + b.EncodeVarint(8<<3 | proto.WireBytes) + b.EncodeRawBytes(x.Transaction) + case *ListDocumentsRequest_ReadTime: + b.EncodeVarint(10<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.ReadTime); err != nil { + return err + } + case nil: + default: + return fmt.Errorf("ListDocumentsRequest.ConsistencySelector has unexpected type %T", x) + } + return nil +} + +func _ListDocumentsRequest_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*ListDocumentsRequest) + switch tag { + case 8: // consistency_selector.transaction + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeRawBytes(true) + m.ConsistencySelector = &ListDocumentsRequest_Transaction{x} + return true, err + case 10: // consistency_selector.read_time + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(timestamp.Timestamp) + err := b.DecodeMessage(msg) + m.ConsistencySelector = &ListDocumentsRequest_ReadTime{msg} + return true, err + default: + return false, nil + } +} + +func _ListDocumentsRequest_OneofSizer(msg proto.Message) (n int) { + m := msg.(*ListDocumentsRequest) + // consistency_selector + switch x := m.ConsistencySelector.(type) { + case *ListDocumentsRequest_Transaction: + n += 1 // tag and wire + n += proto.SizeVarint(uint64(len(x.Transaction))) + n += len(x.Transaction) + case *ListDocumentsRequest_ReadTime: + s := proto.Size(x.ReadTime) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +// The response for [Firestore.ListDocuments][google.firestore.v1.Firestore.ListDocuments]. +type ListDocumentsResponse struct { + // The Documents found. + Documents []*Document `protobuf:"bytes,1,rep,name=documents,proto3" json:"documents,omitempty"` + // The next page token. + NextPageToken string `protobuf:"bytes,2,opt,name=next_page_token,json=nextPageToken,proto3" json:"next_page_token,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ListDocumentsResponse) Reset() { *m = ListDocumentsResponse{} } +func (m *ListDocumentsResponse) String() string { return proto.CompactTextString(m) } +func (*ListDocumentsResponse) ProtoMessage() {} +func (*ListDocumentsResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_firestore_e04b661bc0002f16, []int{2} +} +func (m *ListDocumentsResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ListDocumentsResponse.Unmarshal(m, b) +} +func (m *ListDocumentsResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ListDocumentsResponse.Marshal(b, m, deterministic) +} +func (dst *ListDocumentsResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_ListDocumentsResponse.Merge(dst, src) +} +func (m *ListDocumentsResponse) XXX_Size() int { + return xxx_messageInfo_ListDocumentsResponse.Size(m) +} +func (m *ListDocumentsResponse) XXX_DiscardUnknown() { + xxx_messageInfo_ListDocumentsResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_ListDocumentsResponse proto.InternalMessageInfo + +func (m *ListDocumentsResponse) GetDocuments() []*Document { + if m != nil { + return m.Documents + } + return nil +} + +func (m *ListDocumentsResponse) GetNextPageToken() string { + if m != nil { + return m.NextPageToken + } + return "" +} + +// The request for [Firestore.CreateDocument][google.firestore.v1.Firestore.CreateDocument]. +type CreateDocumentRequest struct { + // The parent resource. For example: + // `projects/{project_id}/databases/{database_id}/documents` or + // `projects/{project_id}/databases/{database_id}/documents/chatrooms/{chatroom_id}` + Parent string `protobuf:"bytes,1,opt,name=parent,proto3" json:"parent,omitempty"` + // The collection ID, relative to `parent`, to list. For example: `chatrooms`. + CollectionId string `protobuf:"bytes,2,opt,name=collection_id,json=collectionId,proto3" json:"collection_id,omitempty"` + // The client-assigned document ID to use for this document. + // + // Optional. If not specified, an ID will be assigned by the service. + DocumentId string `protobuf:"bytes,3,opt,name=document_id,json=documentId,proto3" json:"document_id,omitempty"` + // The document to create. `name` must not be set. + Document *Document `protobuf:"bytes,4,opt,name=document,proto3" json:"document,omitempty"` + // The fields to return. If not set, returns all fields. + // + // If the document has a field that is not present in this mask, that field + // will not be returned in the response. + Mask *DocumentMask `protobuf:"bytes,5,opt,name=mask,proto3" json:"mask,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *CreateDocumentRequest) Reset() { *m = CreateDocumentRequest{} } +func (m *CreateDocumentRequest) String() string { return proto.CompactTextString(m) } +func (*CreateDocumentRequest) ProtoMessage() {} +func (*CreateDocumentRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_firestore_e04b661bc0002f16, []int{3} +} +func (m *CreateDocumentRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_CreateDocumentRequest.Unmarshal(m, b) +} +func (m *CreateDocumentRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_CreateDocumentRequest.Marshal(b, m, deterministic) +} +func (dst *CreateDocumentRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_CreateDocumentRequest.Merge(dst, src) +} +func (m *CreateDocumentRequest) XXX_Size() int { + return xxx_messageInfo_CreateDocumentRequest.Size(m) +} +func (m *CreateDocumentRequest) XXX_DiscardUnknown() { + xxx_messageInfo_CreateDocumentRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_CreateDocumentRequest proto.InternalMessageInfo + +func (m *CreateDocumentRequest) GetParent() string { + if m != nil { + return m.Parent + } + return "" +} + +func (m *CreateDocumentRequest) GetCollectionId() string { + if m != nil { + return m.CollectionId + } + return "" +} + +func (m *CreateDocumentRequest) GetDocumentId() string { + if m != nil { + return m.DocumentId + } + return "" +} + +func (m *CreateDocumentRequest) GetDocument() *Document { + if m != nil { + return m.Document + } + return nil +} + +func (m *CreateDocumentRequest) GetMask() *DocumentMask { + if m != nil { + return m.Mask + } + return nil +} + +// The request for [Firestore.UpdateDocument][google.firestore.v1.Firestore.UpdateDocument]. +type UpdateDocumentRequest struct { + // The updated document. + // Creates the document if it does not already exist. + Document *Document `protobuf:"bytes,1,opt,name=document,proto3" json:"document,omitempty"` + // The fields to update. + // None of the field paths in the mask may contain a reserved name. + // + // If the document exists on the server and has fields not referenced in the + // mask, they are left unchanged. + // Fields referenced in the mask, but not present in the input document, are + // deleted from the document on the server. + UpdateMask *DocumentMask `protobuf:"bytes,2,opt,name=update_mask,json=updateMask,proto3" json:"update_mask,omitempty"` + // The fields to return. If not set, returns all fields. + // + // If the document has a field that is not present in this mask, that field + // will not be returned in the response. + Mask *DocumentMask `protobuf:"bytes,3,opt,name=mask,proto3" json:"mask,omitempty"` + // An optional precondition on the document. + // The request will fail if this is set and not met by the target document. + CurrentDocument *Precondition `protobuf:"bytes,4,opt,name=current_document,json=currentDocument,proto3" json:"current_document,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *UpdateDocumentRequest) Reset() { *m = UpdateDocumentRequest{} } +func (m *UpdateDocumentRequest) String() string { return proto.CompactTextString(m) } +func (*UpdateDocumentRequest) ProtoMessage() {} +func (*UpdateDocumentRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_firestore_e04b661bc0002f16, []int{4} +} +func (m *UpdateDocumentRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_UpdateDocumentRequest.Unmarshal(m, b) +} +func (m *UpdateDocumentRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_UpdateDocumentRequest.Marshal(b, m, deterministic) +} +func (dst *UpdateDocumentRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_UpdateDocumentRequest.Merge(dst, src) +} +func (m *UpdateDocumentRequest) XXX_Size() int { + return xxx_messageInfo_UpdateDocumentRequest.Size(m) +} +func (m *UpdateDocumentRequest) XXX_DiscardUnknown() { + xxx_messageInfo_UpdateDocumentRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_UpdateDocumentRequest proto.InternalMessageInfo + +func (m *UpdateDocumentRequest) GetDocument() *Document { + if m != nil { + return m.Document + } + return nil +} + +func (m *UpdateDocumentRequest) GetUpdateMask() *DocumentMask { + if m != nil { + return m.UpdateMask + } + return nil +} + +func (m *UpdateDocumentRequest) GetMask() *DocumentMask { + if m != nil { + return m.Mask + } + return nil +} + +func (m *UpdateDocumentRequest) GetCurrentDocument() *Precondition { + if m != nil { + return m.CurrentDocument + } + return nil +} + +// The request for [Firestore.DeleteDocument][google.firestore.v1.Firestore.DeleteDocument]. +type DeleteDocumentRequest struct { + // The resource name of the Document to delete. In the format: + // `projects/{project_id}/databases/{database_id}/documents/{document_path}`. + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // An optional precondition on the document. + // The request will fail if this is set and not met by the target document. + CurrentDocument *Precondition `protobuf:"bytes,2,opt,name=current_document,json=currentDocument,proto3" json:"current_document,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *DeleteDocumentRequest) Reset() { *m = DeleteDocumentRequest{} } +func (m *DeleteDocumentRequest) String() string { return proto.CompactTextString(m) } +func (*DeleteDocumentRequest) ProtoMessage() {} +func (*DeleteDocumentRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_firestore_e04b661bc0002f16, []int{5} +} +func (m *DeleteDocumentRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_DeleteDocumentRequest.Unmarshal(m, b) +} +func (m *DeleteDocumentRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_DeleteDocumentRequest.Marshal(b, m, deterministic) +} +func (dst *DeleteDocumentRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_DeleteDocumentRequest.Merge(dst, src) +} +func (m *DeleteDocumentRequest) XXX_Size() int { + return xxx_messageInfo_DeleteDocumentRequest.Size(m) +} +func (m *DeleteDocumentRequest) XXX_DiscardUnknown() { + xxx_messageInfo_DeleteDocumentRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_DeleteDocumentRequest proto.InternalMessageInfo + +func (m *DeleteDocumentRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *DeleteDocumentRequest) GetCurrentDocument() *Precondition { + if m != nil { + return m.CurrentDocument + } + return nil +} + +// The request for [Firestore.BatchGetDocuments][google.firestore.v1.Firestore.BatchGetDocuments]. +type BatchGetDocumentsRequest struct { + // The database name. In the format: + // `projects/{project_id}/databases/{database_id}`. + Database string `protobuf:"bytes,1,opt,name=database,proto3" json:"database,omitempty"` + // The names of the documents to retrieve. In the format: + // `projects/{project_id}/databases/{database_id}/documents/{document_path}`. + // The request will fail if any of the document is not a child resource of the + // given `database`. Duplicate names will be elided. + Documents []string `protobuf:"bytes,2,rep,name=documents,proto3" json:"documents,omitempty"` + // The fields to return. If not set, returns all fields. + // + // If a document has a field that is not present in this mask, that field will + // not be returned in the response. + Mask *DocumentMask `protobuf:"bytes,3,opt,name=mask,proto3" json:"mask,omitempty"` + // The consistency mode for this transaction. + // If not set, defaults to strong consistency. + // + // Types that are valid to be assigned to ConsistencySelector: + // *BatchGetDocumentsRequest_Transaction + // *BatchGetDocumentsRequest_NewTransaction + // *BatchGetDocumentsRequest_ReadTime + ConsistencySelector isBatchGetDocumentsRequest_ConsistencySelector `protobuf_oneof:"consistency_selector"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *BatchGetDocumentsRequest) Reset() { *m = BatchGetDocumentsRequest{} } +func (m *BatchGetDocumentsRequest) String() string { return proto.CompactTextString(m) } +func (*BatchGetDocumentsRequest) ProtoMessage() {} +func (*BatchGetDocumentsRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_firestore_e04b661bc0002f16, []int{6} +} +func (m *BatchGetDocumentsRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_BatchGetDocumentsRequest.Unmarshal(m, b) +} +func (m *BatchGetDocumentsRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_BatchGetDocumentsRequest.Marshal(b, m, deterministic) +} +func (dst *BatchGetDocumentsRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_BatchGetDocumentsRequest.Merge(dst, src) +} +func (m *BatchGetDocumentsRequest) XXX_Size() int { + return xxx_messageInfo_BatchGetDocumentsRequest.Size(m) +} +func (m *BatchGetDocumentsRequest) XXX_DiscardUnknown() { + xxx_messageInfo_BatchGetDocumentsRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_BatchGetDocumentsRequest proto.InternalMessageInfo + +func (m *BatchGetDocumentsRequest) GetDatabase() string { + if m != nil { + return m.Database + } + return "" +} + +func (m *BatchGetDocumentsRequest) GetDocuments() []string { + if m != nil { + return m.Documents + } + return nil +} + +func (m *BatchGetDocumentsRequest) GetMask() *DocumentMask { + if m != nil { + return m.Mask + } + return nil +} + +type isBatchGetDocumentsRequest_ConsistencySelector interface { + isBatchGetDocumentsRequest_ConsistencySelector() +} + +type BatchGetDocumentsRequest_Transaction struct { + Transaction []byte `protobuf:"bytes,4,opt,name=transaction,proto3,oneof"` +} + +type BatchGetDocumentsRequest_NewTransaction struct { + NewTransaction *TransactionOptions `protobuf:"bytes,5,opt,name=new_transaction,json=newTransaction,proto3,oneof"` +} + +type BatchGetDocumentsRequest_ReadTime struct { + ReadTime *timestamp.Timestamp `protobuf:"bytes,7,opt,name=read_time,json=readTime,proto3,oneof"` +} + +func (*BatchGetDocumentsRequest_Transaction) isBatchGetDocumentsRequest_ConsistencySelector() {} + +func (*BatchGetDocumentsRequest_NewTransaction) isBatchGetDocumentsRequest_ConsistencySelector() {} + +func (*BatchGetDocumentsRequest_ReadTime) isBatchGetDocumentsRequest_ConsistencySelector() {} + +func (m *BatchGetDocumentsRequest) GetConsistencySelector() isBatchGetDocumentsRequest_ConsistencySelector { + if m != nil { + return m.ConsistencySelector + } + return nil +} + +func (m *BatchGetDocumentsRequest) GetTransaction() []byte { + if x, ok := m.GetConsistencySelector().(*BatchGetDocumentsRequest_Transaction); ok { + return x.Transaction + } + return nil +} + +func (m *BatchGetDocumentsRequest) GetNewTransaction() *TransactionOptions { + if x, ok := m.GetConsistencySelector().(*BatchGetDocumentsRequest_NewTransaction); ok { + return x.NewTransaction + } + return nil +} + +func (m *BatchGetDocumentsRequest) GetReadTime() *timestamp.Timestamp { + if x, ok := m.GetConsistencySelector().(*BatchGetDocumentsRequest_ReadTime); ok { + return x.ReadTime + } + return nil +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*BatchGetDocumentsRequest) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _BatchGetDocumentsRequest_OneofMarshaler, _BatchGetDocumentsRequest_OneofUnmarshaler, _BatchGetDocumentsRequest_OneofSizer, []interface{}{ + (*BatchGetDocumentsRequest_Transaction)(nil), + (*BatchGetDocumentsRequest_NewTransaction)(nil), + (*BatchGetDocumentsRequest_ReadTime)(nil), + } +} + +func _BatchGetDocumentsRequest_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*BatchGetDocumentsRequest) + // consistency_selector + switch x := m.ConsistencySelector.(type) { + case *BatchGetDocumentsRequest_Transaction: + b.EncodeVarint(4<<3 | proto.WireBytes) + b.EncodeRawBytes(x.Transaction) + case *BatchGetDocumentsRequest_NewTransaction: + b.EncodeVarint(5<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.NewTransaction); err != nil { + return err + } + case *BatchGetDocumentsRequest_ReadTime: + b.EncodeVarint(7<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.ReadTime); err != nil { + return err + } + case nil: + default: + return fmt.Errorf("BatchGetDocumentsRequest.ConsistencySelector has unexpected type %T", x) + } + return nil +} + +func _BatchGetDocumentsRequest_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*BatchGetDocumentsRequest) + switch tag { + case 4: // consistency_selector.transaction + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeRawBytes(true) + m.ConsistencySelector = &BatchGetDocumentsRequest_Transaction{x} + return true, err + case 5: // consistency_selector.new_transaction + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(TransactionOptions) + err := b.DecodeMessage(msg) + m.ConsistencySelector = &BatchGetDocumentsRequest_NewTransaction{msg} + return true, err + case 7: // consistency_selector.read_time + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(timestamp.Timestamp) + err := b.DecodeMessage(msg) + m.ConsistencySelector = &BatchGetDocumentsRequest_ReadTime{msg} + return true, err + default: + return false, nil + } +} + +func _BatchGetDocumentsRequest_OneofSizer(msg proto.Message) (n int) { + m := msg.(*BatchGetDocumentsRequest) + // consistency_selector + switch x := m.ConsistencySelector.(type) { + case *BatchGetDocumentsRequest_Transaction: + n += 1 // tag and wire + n += proto.SizeVarint(uint64(len(x.Transaction))) + n += len(x.Transaction) + case *BatchGetDocumentsRequest_NewTransaction: + s := proto.Size(x.NewTransaction) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *BatchGetDocumentsRequest_ReadTime: + s := proto.Size(x.ReadTime) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +// The streamed response for [Firestore.BatchGetDocuments][google.firestore.v1.Firestore.BatchGetDocuments]. +type BatchGetDocumentsResponse struct { + // A single result. + // This can be empty if the server is just returning a transaction. + // + // Types that are valid to be assigned to Result: + // *BatchGetDocumentsResponse_Found + // *BatchGetDocumentsResponse_Missing + Result isBatchGetDocumentsResponse_Result `protobuf_oneof:"result"` + // The transaction that was started as part of this request. + // Will only be set in the first response, and only if + // [BatchGetDocumentsRequest.new_transaction][google.firestore.v1.BatchGetDocumentsRequest.new_transaction] was set in the request. + Transaction []byte `protobuf:"bytes,3,opt,name=transaction,proto3" json:"transaction,omitempty"` + // The time at which the document was read. + // This may be monotically increasing, in this case the previous documents in + // the result stream are guaranteed not to have changed between their + // read_time and this one. + ReadTime *timestamp.Timestamp `protobuf:"bytes,4,opt,name=read_time,json=readTime,proto3" json:"read_time,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *BatchGetDocumentsResponse) Reset() { *m = BatchGetDocumentsResponse{} } +func (m *BatchGetDocumentsResponse) String() string { return proto.CompactTextString(m) } +func (*BatchGetDocumentsResponse) ProtoMessage() {} +func (*BatchGetDocumentsResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_firestore_e04b661bc0002f16, []int{7} +} +func (m *BatchGetDocumentsResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_BatchGetDocumentsResponse.Unmarshal(m, b) +} +func (m *BatchGetDocumentsResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_BatchGetDocumentsResponse.Marshal(b, m, deterministic) +} +func (dst *BatchGetDocumentsResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_BatchGetDocumentsResponse.Merge(dst, src) +} +func (m *BatchGetDocumentsResponse) XXX_Size() int { + return xxx_messageInfo_BatchGetDocumentsResponse.Size(m) +} +func (m *BatchGetDocumentsResponse) XXX_DiscardUnknown() { + xxx_messageInfo_BatchGetDocumentsResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_BatchGetDocumentsResponse proto.InternalMessageInfo + +type isBatchGetDocumentsResponse_Result interface { + isBatchGetDocumentsResponse_Result() +} + +type BatchGetDocumentsResponse_Found struct { + Found *Document `protobuf:"bytes,1,opt,name=found,proto3,oneof"` +} + +type BatchGetDocumentsResponse_Missing struct { + Missing string `protobuf:"bytes,2,opt,name=missing,proto3,oneof"` +} + +func (*BatchGetDocumentsResponse_Found) isBatchGetDocumentsResponse_Result() {} + +func (*BatchGetDocumentsResponse_Missing) isBatchGetDocumentsResponse_Result() {} + +func (m *BatchGetDocumentsResponse) GetResult() isBatchGetDocumentsResponse_Result { + if m != nil { + return m.Result + } + return nil +} + +func (m *BatchGetDocumentsResponse) GetFound() *Document { + if x, ok := m.GetResult().(*BatchGetDocumentsResponse_Found); ok { + return x.Found + } + return nil +} + +func (m *BatchGetDocumentsResponse) GetMissing() string { + if x, ok := m.GetResult().(*BatchGetDocumentsResponse_Missing); ok { + return x.Missing + } + return "" +} + +func (m *BatchGetDocumentsResponse) GetTransaction() []byte { + if m != nil { + return m.Transaction + } + return nil +} + +func (m *BatchGetDocumentsResponse) GetReadTime() *timestamp.Timestamp { + if m != nil { + return m.ReadTime + } + return nil +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*BatchGetDocumentsResponse) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _BatchGetDocumentsResponse_OneofMarshaler, _BatchGetDocumentsResponse_OneofUnmarshaler, _BatchGetDocumentsResponse_OneofSizer, []interface{}{ + (*BatchGetDocumentsResponse_Found)(nil), + (*BatchGetDocumentsResponse_Missing)(nil), + } +} + +func _BatchGetDocumentsResponse_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*BatchGetDocumentsResponse) + // result + switch x := m.Result.(type) { + case *BatchGetDocumentsResponse_Found: + b.EncodeVarint(1<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.Found); err != nil { + return err + } + case *BatchGetDocumentsResponse_Missing: + b.EncodeVarint(2<<3 | proto.WireBytes) + b.EncodeStringBytes(x.Missing) + case nil: + default: + return fmt.Errorf("BatchGetDocumentsResponse.Result has unexpected type %T", x) + } + return nil +} + +func _BatchGetDocumentsResponse_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*BatchGetDocumentsResponse) + switch tag { + case 1: // result.found + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(Document) + err := b.DecodeMessage(msg) + m.Result = &BatchGetDocumentsResponse_Found{msg} + return true, err + case 2: // result.missing + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeStringBytes() + m.Result = &BatchGetDocumentsResponse_Missing{x} + return true, err + default: + return false, nil + } +} + +func _BatchGetDocumentsResponse_OneofSizer(msg proto.Message) (n int) { + m := msg.(*BatchGetDocumentsResponse) + // result + switch x := m.Result.(type) { + case *BatchGetDocumentsResponse_Found: + s := proto.Size(x.Found) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *BatchGetDocumentsResponse_Missing: + n += 1 // tag and wire + n += proto.SizeVarint(uint64(len(x.Missing))) + n += len(x.Missing) + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +// The request for [Firestore.BeginTransaction][google.firestore.v1.Firestore.BeginTransaction]. +type BeginTransactionRequest struct { + // The database name. In the format: + // `projects/{project_id}/databases/{database_id}`. + Database string `protobuf:"bytes,1,opt,name=database,proto3" json:"database,omitempty"` + // The options for the transaction. + // Defaults to a read-write transaction. + Options *TransactionOptions `protobuf:"bytes,2,opt,name=options,proto3" json:"options,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *BeginTransactionRequest) Reset() { *m = BeginTransactionRequest{} } +func (m *BeginTransactionRequest) String() string { return proto.CompactTextString(m) } +func (*BeginTransactionRequest) ProtoMessage() {} +func (*BeginTransactionRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_firestore_e04b661bc0002f16, []int{8} +} +func (m *BeginTransactionRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_BeginTransactionRequest.Unmarshal(m, b) +} +func (m *BeginTransactionRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_BeginTransactionRequest.Marshal(b, m, deterministic) +} +func (dst *BeginTransactionRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_BeginTransactionRequest.Merge(dst, src) +} +func (m *BeginTransactionRequest) XXX_Size() int { + return xxx_messageInfo_BeginTransactionRequest.Size(m) +} +func (m *BeginTransactionRequest) XXX_DiscardUnknown() { + xxx_messageInfo_BeginTransactionRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_BeginTransactionRequest proto.InternalMessageInfo + +func (m *BeginTransactionRequest) GetDatabase() string { + if m != nil { + return m.Database + } + return "" +} + +func (m *BeginTransactionRequest) GetOptions() *TransactionOptions { + if m != nil { + return m.Options + } + return nil +} + +// The response for [Firestore.BeginTransaction][google.firestore.v1.Firestore.BeginTransaction]. +type BeginTransactionResponse struct { + // The transaction that was started. + Transaction []byte `protobuf:"bytes,1,opt,name=transaction,proto3" json:"transaction,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *BeginTransactionResponse) Reset() { *m = BeginTransactionResponse{} } +func (m *BeginTransactionResponse) String() string { return proto.CompactTextString(m) } +func (*BeginTransactionResponse) ProtoMessage() {} +func (*BeginTransactionResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_firestore_e04b661bc0002f16, []int{9} +} +func (m *BeginTransactionResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_BeginTransactionResponse.Unmarshal(m, b) +} +func (m *BeginTransactionResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_BeginTransactionResponse.Marshal(b, m, deterministic) +} +func (dst *BeginTransactionResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_BeginTransactionResponse.Merge(dst, src) +} +func (m *BeginTransactionResponse) XXX_Size() int { + return xxx_messageInfo_BeginTransactionResponse.Size(m) +} +func (m *BeginTransactionResponse) XXX_DiscardUnknown() { + xxx_messageInfo_BeginTransactionResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_BeginTransactionResponse proto.InternalMessageInfo + +func (m *BeginTransactionResponse) GetTransaction() []byte { + if m != nil { + return m.Transaction + } + return nil +} + +// The request for [Firestore.Commit][google.firestore.v1.Firestore.Commit]. +type CommitRequest struct { + // The database name. In the format: + // `projects/{project_id}/databases/{database_id}`. + Database string `protobuf:"bytes,1,opt,name=database,proto3" json:"database,omitempty"` + // The writes to apply. + // + // Always executed atomically and in order. + Writes []*Write `protobuf:"bytes,2,rep,name=writes,proto3" json:"writes,omitempty"` + // If set, applies all writes in this transaction, and commits it. + Transaction []byte `protobuf:"bytes,3,opt,name=transaction,proto3" json:"transaction,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *CommitRequest) Reset() { *m = CommitRequest{} } +func (m *CommitRequest) String() string { return proto.CompactTextString(m) } +func (*CommitRequest) ProtoMessage() {} +func (*CommitRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_firestore_e04b661bc0002f16, []int{10} +} +func (m *CommitRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_CommitRequest.Unmarshal(m, b) +} +func (m *CommitRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_CommitRequest.Marshal(b, m, deterministic) +} +func (dst *CommitRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_CommitRequest.Merge(dst, src) +} +func (m *CommitRequest) XXX_Size() int { + return xxx_messageInfo_CommitRequest.Size(m) +} +func (m *CommitRequest) XXX_DiscardUnknown() { + xxx_messageInfo_CommitRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_CommitRequest proto.InternalMessageInfo + +func (m *CommitRequest) GetDatabase() string { + if m != nil { + return m.Database + } + return "" +} + +func (m *CommitRequest) GetWrites() []*Write { + if m != nil { + return m.Writes + } + return nil +} + +func (m *CommitRequest) GetTransaction() []byte { + if m != nil { + return m.Transaction + } + return nil +} + +// The response for [Firestore.Commit][google.firestore.v1.Firestore.Commit]. +type CommitResponse struct { + // The result of applying the writes. + // + // This i-th write result corresponds to the i-th write in the + // request. + WriteResults []*WriteResult `protobuf:"bytes,1,rep,name=write_results,json=writeResults,proto3" json:"write_results,omitempty"` + // The time at which the commit occurred. + CommitTime *timestamp.Timestamp `protobuf:"bytes,2,opt,name=commit_time,json=commitTime,proto3" json:"commit_time,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *CommitResponse) Reset() { *m = CommitResponse{} } +func (m *CommitResponse) String() string { return proto.CompactTextString(m) } +func (*CommitResponse) ProtoMessage() {} +func (*CommitResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_firestore_e04b661bc0002f16, []int{11} +} +func (m *CommitResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_CommitResponse.Unmarshal(m, b) +} +func (m *CommitResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_CommitResponse.Marshal(b, m, deterministic) +} +func (dst *CommitResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_CommitResponse.Merge(dst, src) +} +func (m *CommitResponse) XXX_Size() int { + return xxx_messageInfo_CommitResponse.Size(m) +} +func (m *CommitResponse) XXX_DiscardUnknown() { + xxx_messageInfo_CommitResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_CommitResponse proto.InternalMessageInfo + +func (m *CommitResponse) GetWriteResults() []*WriteResult { + if m != nil { + return m.WriteResults + } + return nil +} + +func (m *CommitResponse) GetCommitTime() *timestamp.Timestamp { + if m != nil { + return m.CommitTime + } + return nil +} + +// The request for [Firestore.Rollback][google.firestore.v1.Firestore.Rollback]. +type RollbackRequest struct { + // The database name. In the format: + // `projects/{project_id}/databases/{database_id}`. + Database string `protobuf:"bytes,1,opt,name=database,proto3" json:"database,omitempty"` + // The transaction to roll back. + Transaction []byte `protobuf:"bytes,2,opt,name=transaction,proto3" json:"transaction,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *RollbackRequest) Reset() { *m = RollbackRequest{} } +func (m *RollbackRequest) String() string { return proto.CompactTextString(m) } +func (*RollbackRequest) ProtoMessage() {} +func (*RollbackRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_firestore_e04b661bc0002f16, []int{12} +} +func (m *RollbackRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_RollbackRequest.Unmarshal(m, b) +} +func (m *RollbackRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_RollbackRequest.Marshal(b, m, deterministic) +} +func (dst *RollbackRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_RollbackRequest.Merge(dst, src) +} +func (m *RollbackRequest) XXX_Size() int { + return xxx_messageInfo_RollbackRequest.Size(m) +} +func (m *RollbackRequest) XXX_DiscardUnknown() { + xxx_messageInfo_RollbackRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_RollbackRequest proto.InternalMessageInfo + +func (m *RollbackRequest) GetDatabase() string { + if m != nil { + return m.Database + } + return "" +} + +func (m *RollbackRequest) GetTransaction() []byte { + if m != nil { + return m.Transaction + } + return nil +} + +// The request for [Firestore.RunQuery][google.firestore.v1.Firestore.RunQuery]. +type RunQueryRequest struct { + // The parent resource name. In the format: + // `projects/{project_id}/databases/{database_id}/documents` or + // `projects/{project_id}/databases/{database_id}/documents/{document_path}`. + // For example: + // `projects/my-project/databases/my-database/documents` or + // `projects/my-project/databases/my-database/documents/chatrooms/my-chatroom` + Parent string `protobuf:"bytes,1,opt,name=parent,proto3" json:"parent,omitempty"` + // The query to run. + // + // Types that are valid to be assigned to QueryType: + // *RunQueryRequest_StructuredQuery + QueryType isRunQueryRequest_QueryType `protobuf_oneof:"query_type"` + // The consistency mode for this transaction. + // If not set, defaults to strong consistency. + // + // Types that are valid to be assigned to ConsistencySelector: + // *RunQueryRequest_Transaction + // *RunQueryRequest_NewTransaction + // *RunQueryRequest_ReadTime + ConsistencySelector isRunQueryRequest_ConsistencySelector `protobuf_oneof:"consistency_selector"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *RunQueryRequest) Reset() { *m = RunQueryRequest{} } +func (m *RunQueryRequest) String() string { return proto.CompactTextString(m) } +func (*RunQueryRequest) ProtoMessage() {} +func (*RunQueryRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_firestore_e04b661bc0002f16, []int{13} +} +func (m *RunQueryRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_RunQueryRequest.Unmarshal(m, b) +} +func (m *RunQueryRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_RunQueryRequest.Marshal(b, m, deterministic) +} +func (dst *RunQueryRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_RunQueryRequest.Merge(dst, src) +} +func (m *RunQueryRequest) XXX_Size() int { + return xxx_messageInfo_RunQueryRequest.Size(m) +} +func (m *RunQueryRequest) XXX_DiscardUnknown() { + xxx_messageInfo_RunQueryRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_RunQueryRequest proto.InternalMessageInfo + +func (m *RunQueryRequest) GetParent() string { + if m != nil { + return m.Parent + } + return "" +} + +type isRunQueryRequest_QueryType interface { + isRunQueryRequest_QueryType() +} + +type RunQueryRequest_StructuredQuery struct { + StructuredQuery *StructuredQuery `protobuf:"bytes,2,opt,name=structured_query,json=structuredQuery,proto3,oneof"` +} + +func (*RunQueryRequest_StructuredQuery) isRunQueryRequest_QueryType() {} + +func (m *RunQueryRequest) GetQueryType() isRunQueryRequest_QueryType { + if m != nil { + return m.QueryType + } + return nil +} + +func (m *RunQueryRequest) GetStructuredQuery() *StructuredQuery { + if x, ok := m.GetQueryType().(*RunQueryRequest_StructuredQuery); ok { + return x.StructuredQuery + } + return nil +} + +type isRunQueryRequest_ConsistencySelector interface { + isRunQueryRequest_ConsistencySelector() +} + +type RunQueryRequest_Transaction struct { + Transaction []byte `protobuf:"bytes,5,opt,name=transaction,proto3,oneof"` +} + +type RunQueryRequest_NewTransaction struct { + NewTransaction *TransactionOptions `protobuf:"bytes,6,opt,name=new_transaction,json=newTransaction,proto3,oneof"` +} + +type RunQueryRequest_ReadTime struct { + ReadTime *timestamp.Timestamp `protobuf:"bytes,7,opt,name=read_time,json=readTime,proto3,oneof"` +} + +func (*RunQueryRequest_Transaction) isRunQueryRequest_ConsistencySelector() {} + +func (*RunQueryRequest_NewTransaction) isRunQueryRequest_ConsistencySelector() {} + +func (*RunQueryRequest_ReadTime) isRunQueryRequest_ConsistencySelector() {} + +func (m *RunQueryRequest) GetConsistencySelector() isRunQueryRequest_ConsistencySelector { + if m != nil { + return m.ConsistencySelector + } + return nil +} + +func (m *RunQueryRequest) GetTransaction() []byte { + if x, ok := m.GetConsistencySelector().(*RunQueryRequest_Transaction); ok { + return x.Transaction + } + return nil +} + +func (m *RunQueryRequest) GetNewTransaction() *TransactionOptions { + if x, ok := m.GetConsistencySelector().(*RunQueryRequest_NewTransaction); ok { + return x.NewTransaction + } + return nil +} + +func (m *RunQueryRequest) GetReadTime() *timestamp.Timestamp { + if x, ok := m.GetConsistencySelector().(*RunQueryRequest_ReadTime); ok { + return x.ReadTime + } + return nil +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*RunQueryRequest) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _RunQueryRequest_OneofMarshaler, _RunQueryRequest_OneofUnmarshaler, _RunQueryRequest_OneofSizer, []interface{}{ + (*RunQueryRequest_StructuredQuery)(nil), + (*RunQueryRequest_Transaction)(nil), + (*RunQueryRequest_NewTransaction)(nil), + (*RunQueryRequest_ReadTime)(nil), + } +} + +func _RunQueryRequest_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*RunQueryRequest) + // query_type + switch x := m.QueryType.(type) { + case *RunQueryRequest_StructuredQuery: + b.EncodeVarint(2<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.StructuredQuery); err != nil { + return err + } + case nil: + default: + return fmt.Errorf("RunQueryRequest.QueryType has unexpected type %T", x) + } + // consistency_selector + switch x := m.ConsistencySelector.(type) { + case *RunQueryRequest_Transaction: + b.EncodeVarint(5<<3 | proto.WireBytes) + b.EncodeRawBytes(x.Transaction) + case *RunQueryRequest_NewTransaction: + b.EncodeVarint(6<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.NewTransaction); err != nil { + return err + } + case *RunQueryRequest_ReadTime: + b.EncodeVarint(7<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.ReadTime); err != nil { + return err + } + case nil: + default: + return fmt.Errorf("RunQueryRequest.ConsistencySelector has unexpected type %T", x) + } + return nil +} + +func _RunQueryRequest_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*RunQueryRequest) + switch tag { + case 2: // query_type.structured_query + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(StructuredQuery) + err := b.DecodeMessage(msg) + m.QueryType = &RunQueryRequest_StructuredQuery{msg} + return true, err + case 5: // consistency_selector.transaction + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeRawBytes(true) + m.ConsistencySelector = &RunQueryRequest_Transaction{x} + return true, err + case 6: // consistency_selector.new_transaction + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(TransactionOptions) + err := b.DecodeMessage(msg) + m.ConsistencySelector = &RunQueryRequest_NewTransaction{msg} + return true, err + case 7: // consistency_selector.read_time + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(timestamp.Timestamp) + err := b.DecodeMessage(msg) + m.ConsistencySelector = &RunQueryRequest_ReadTime{msg} + return true, err + default: + return false, nil + } +} + +func _RunQueryRequest_OneofSizer(msg proto.Message) (n int) { + m := msg.(*RunQueryRequest) + // query_type + switch x := m.QueryType.(type) { + case *RunQueryRequest_StructuredQuery: + s := proto.Size(x.StructuredQuery) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + // consistency_selector + switch x := m.ConsistencySelector.(type) { + case *RunQueryRequest_Transaction: + n += 1 // tag and wire + n += proto.SizeVarint(uint64(len(x.Transaction))) + n += len(x.Transaction) + case *RunQueryRequest_NewTransaction: + s := proto.Size(x.NewTransaction) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *RunQueryRequest_ReadTime: + s := proto.Size(x.ReadTime) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +// The response for [Firestore.RunQuery][google.firestore.v1.Firestore.RunQuery]. +type RunQueryResponse struct { + // The transaction that was started as part of this request. + // Can only be set in the first response, and only if + // [RunQueryRequest.new_transaction][google.firestore.v1.RunQueryRequest.new_transaction] was set in the request. + // If set, no other fields will be set in this response. + Transaction []byte `protobuf:"bytes,2,opt,name=transaction,proto3" json:"transaction,omitempty"` + // A query result. + // Not set when reporting partial progress. + Document *Document `protobuf:"bytes,1,opt,name=document,proto3" json:"document,omitempty"` + // The time at which the document was read. This may be monotonically + // increasing; in this case, the previous documents in the result stream are + // guaranteed not to have changed between their `read_time` and this one. + // + // If the query returns no results, a response with `read_time` and no + // `document` will be sent, and this represents the time at which the query + // was run. + ReadTime *timestamp.Timestamp `protobuf:"bytes,3,opt,name=read_time,json=readTime,proto3" json:"read_time,omitempty"` + // The number of results that have been skipped due to an offset between + // the last response and the current response. + SkippedResults int32 `protobuf:"varint,4,opt,name=skipped_results,json=skippedResults,proto3" json:"skipped_results,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *RunQueryResponse) Reset() { *m = RunQueryResponse{} } +func (m *RunQueryResponse) String() string { return proto.CompactTextString(m) } +func (*RunQueryResponse) ProtoMessage() {} +func (*RunQueryResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_firestore_e04b661bc0002f16, []int{14} +} +func (m *RunQueryResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_RunQueryResponse.Unmarshal(m, b) +} +func (m *RunQueryResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_RunQueryResponse.Marshal(b, m, deterministic) +} +func (dst *RunQueryResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_RunQueryResponse.Merge(dst, src) +} +func (m *RunQueryResponse) XXX_Size() int { + return xxx_messageInfo_RunQueryResponse.Size(m) +} +func (m *RunQueryResponse) XXX_DiscardUnknown() { + xxx_messageInfo_RunQueryResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_RunQueryResponse proto.InternalMessageInfo + +func (m *RunQueryResponse) GetTransaction() []byte { + if m != nil { + return m.Transaction + } + return nil +} + +func (m *RunQueryResponse) GetDocument() *Document { + if m != nil { + return m.Document + } + return nil +} + +func (m *RunQueryResponse) GetReadTime() *timestamp.Timestamp { + if m != nil { + return m.ReadTime + } + return nil +} + +func (m *RunQueryResponse) GetSkippedResults() int32 { + if m != nil { + return m.SkippedResults + } + return 0 +} + +// The request for [Firestore.Write][google.firestore.v1.Firestore.Write]. +// +// The first request creates a stream, or resumes an existing one from a token. +// +// When creating a new stream, the server replies with a response containing +// only an ID and a token, to use in the next request. +// +// When resuming a stream, the server first streams any responses later than the +// given token, then a response containing only an up-to-date token, to use in +// the next request. +type WriteRequest struct { + // The database name. In the format: + // `projects/{project_id}/databases/{database_id}`. + // This is only required in the first message. + Database string `protobuf:"bytes,1,opt,name=database,proto3" json:"database,omitempty"` + // The ID of the write stream to resume. + // This may only be set in the first message. When left empty, a new write + // stream will be created. + StreamId string `protobuf:"bytes,2,opt,name=stream_id,json=streamId,proto3" json:"stream_id,omitempty"` + // The writes to apply. + // + // Always executed atomically and in order. + // This must be empty on the first request. + // This may be empty on the last request. + // This must not be empty on all other requests. + Writes []*Write `protobuf:"bytes,3,rep,name=writes,proto3" json:"writes,omitempty"` + // A stream token that was previously sent by the server. + // + // The client should set this field to the token from the most recent + // [WriteResponse][google.firestore.v1.WriteResponse] it has received. This acknowledges that the client has + // received responses up to this token. After sending this token, earlier + // tokens may not be used anymore. + // + // The server may close the stream if there are too many unacknowledged + // responses. + // + // Leave this field unset when creating a new stream. To resume a stream at + // a specific point, set this field and the `stream_id` field. + // + // Leave this field unset when creating a new stream. + StreamToken []byte `protobuf:"bytes,4,opt,name=stream_token,json=streamToken,proto3" json:"stream_token,omitempty"` + // Labels associated with this write request. + Labels map[string]string `protobuf:"bytes,5,rep,name=labels,proto3" json:"labels,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *WriteRequest) Reset() { *m = WriteRequest{} } +func (m *WriteRequest) String() string { return proto.CompactTextString(m) } +func (*WriteRequest) ProtoMessage() {} +func (*WriteRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_firestore_e04b661bc0002f16, []int{15} +} +func (m *WriteRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_WriteRequest.Unmarshal(m, b) +} +func (m *WriteRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_WriteRequest.Marshal(b, m, deterministic) +} +func (dst *WriteRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_WriteRequest.Merge(dst, src) +} +func (m *WriteRequest) XXX_Size() int { + return xxx_messageInfo_WriteRequest.Size(m) +} +func (m *WriteRequest) XXX_DiscardUnknown() { + xxx_messageInfo_WriteRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_WriteRequest proto.InternalMessageInfo + +func (m *WriteRequest) GetDatabase() string { + if m != nil { + return m.Database + } + return "" +} + +func (m *WriteRequest) GetStreamId() string { + if m != nil { + return m.StreamId + } + return "" +} + +func (m *WriteRequest) GetWrites() []*Write { + if m != nil { + return m.Writes + } + return nil +} + +func (m *WriteRequest) GetStreamToken() []byte { + if m != nil { + return m.StreamToken + } + return nil +} + +func (m *WriteRequest) GetLabels() map[string]string { + if m != nil { + return m.Labels + } + return nil +} + +// The response for [Firestore.Write][google.firestore.v1.Firestore.Write]. +type WriteResponse struct { + // The ID of the stream. + // Only set on the first message, when a new stream was created. + StreamId string `protobuf:"bytes,1,opt,name=stream_id,json=streamId,proto3" json:"stream_id,omitempty"` + // A token that represents the position of this response in the stream. + // This can be used by a client to resume the stream at this point. + // + // This field is always set. + StreamToken []byte `protobuf:"bytes,2,opt,name=stream_token,json=streamToken,proto3" json:"stream_token,omitempty"` + // The result of applying the writes. + // + // This i-th write result corresponds to the i-th write in the + // request. + WriteResults []*WriteResult `protobuf:"bytes,3,rep,name=write_results,json=writeResults,proto3" json:"write_results,omitempty"` + // The time at which the commit occurred. + CommitTime *timestamp.Timestamp `protobuf:"bytes,4,opt,name=commit_time,json=commitTime,proto3" json:"commit_time,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *WriteResponse) Reset() { *m = WriteResponse{} } +func (m *WriteResponse) String() string { return proto.CompactTextString(m) } +func (*WriteResponse) ProtoMessage() {} +func (*WriteResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_firestore_e04b661bc0002f16, []int{16} +} +func (m *WriteResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_WriteResponse.Unmarshal(m, b) +} +func (m *WriteResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_WriteResponse.Marshal(b, m, deterministic) +} +func (dst *WriteResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_WriteResponse.Merge(dst, src) +} +func (m *WriteResponse) XXX_Size() int { + return xxx_messageInfo_WriteResponse.Size(m) +} +func (m *WriteResponse) XXX_DiscardUnknown() { + xxx_messageInfo_WriteResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_WriteResponse proto.InternalMessageInfo + +func (m *WriteResponse) GetStreamId() string { + if m != nil { + return m.StreamId + } + return "" +} + +func (m *WriteResponse) GetStreamToken() []byte { + if m != nil { + return m.StreamToken + } + return nil +} + +func (m *WriteResponse) GetWriteResults() []*WriteResult { + if m != nil { + return m.WriteResults + } + return nil +} + +func (m *WriteResponse) GetCommitTime() *timestamp.Timestamp { + if m != nil { + return m.CommitTime + } + return nil +} + +// A request for [Firestore.Listen][google.firestore.v1.Firestore.Listen] +type ListenRequest struct { + // The database name. In the format: + // `projects/{project_id}/databases/{database_id}`. + Database string `protobuf:"bytes,1,opt,name=database,proto3" json:"database,omitempty"` + // The supported target changes. + // + // Types that are valid to be assigned to TargetChange: + // *ListenRequest_AddTarget + // *ListenRequest_RemoveTarget + TargetChange isListenRequest_TargetChange `protobuf_oneof:"target_change"` + // Labels associated with this target change. + Labels map[string]string `protobuf:"bytes,4,rep,name=labels,proto3" json:"labels,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ListenRequest) Reset() { *m = ListenRequest{} } +func (m *ListenRequest) String() string { return proto.CompactTextString(m) } +func (*ListenRequest) ProtoMessage() {} +func (*ListenRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_firestore_e04b661bc0002f16, []int{17} +} +func (m *ListenRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ListenRequest.Unmarshal(m, b) +} +func (m *ListenRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ListenRequest.Marshal(b, m, deterministic) +} +func (dst *ListenRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_ListenRequest.Merge(dst, src) +} +func (m *ListenRequest) XXX_Size() int { + return xxx_messageInfo_ListenRequest.Size(m) +} +func (m *ListenRequest) XXX_DiscardUnknown() { + xxx_messageInfo_ListenRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_ListenRequest proto.InternalMessageInfo + +func (m *ListenRequest) GetDatabase() string { + if m != nil { + return m.Database + } + return "" +} + +type isListenRequest_TargetChange interface { + isListenRequest_TargetChange() +} + +type ListenRequest_AddTarget struct { + AddTarget *Target `protobuf:"bytes,2,opt,name=add_target,json=addTarget,proto3,oneof"` +} + +type ListenRequest_RemoveTarget struct { + RemoveTarget int32 `protobuf:"varint,3,opt,name=remove_target,json=removeTarget,proto3,oneof"` +} + +func (*ListenRequest_AddTarget) isListenRequest_TargetChange() {} + +func (*ListenRequest_RemoveTarget) isListenRequest_TargetChange() {} + +func (m *ListenRequest) GetTargetChange() isListenRequest_TargetChange { + if m != nil { + return m.TargetChange + } + return nil +} + +func (m *ListenRequest) GetAddTarget() *Target { + if x, ok := m.GetTargetChange().(*ListenRequest_AddTarget); ok { + return x.AddTarget + } + return nil +} + +func (m *ListenRequest) GetRemoveTarget() int32 { + if x, ok := m.GetTargetChange().(*ListenRequest_RemoveTarget); ok { + return x.RemoveTarget + } + return 0 +} + +func (m *ListenRequest) GetLabels() map[string]string { + if m != nil { + return m.Labels + } + return nil +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*ListenRequest) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _ListenRequest_OneofMarshaler, _ListenRequest_OneofUnmarshaler, _ListenRequest_OneofSizer, []interface{}{ + (*ListenRequest_AddTarget)(nil), + (*ListenRequest_RemoveTarget)(nil), + } +} + +func _ListenRequest_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*ListenRequest) + // target_change + switch x := m.TargetChange.(type) { + case *ListenRequest_AddTarget: + b.EncodeVarint(2<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.AddTarget); err != nil { + return err + } + case *ListenRequest_RemoveTarget: + b.EncodeVarint(3<<3 | proto.WireVarint) + b.EncodeVarint(uint64(x.RemoveTarget)) + case nil: + default: + return fmt.Errorf("ListenRequest.TargetChange has unexpected type %T", x) + } + return nil +} + +func _ListenRequest_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*ListenRequest) + switch tag { + case 2: // target_change.add_target + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(Target) + err := b.DecodeMessage(msg) + m.TargetChange = &ListenRequest_AddTarget{msg} + return true, err + case 3: // target_change.remove_target + if wire != proto.WireVarint { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeVarint() + m.TargetChange = &ListenRequest_RemoveTarget{int32(x)} + return true, err + default: + return false, nil + } +} + +func _ListenRequest_OneofSizer(msg proto.Message) (n int) { + m := msg.(*ListenRequest) + // target_change + switch x := m.TargetChange.(type) { + case *ListenRequest_AddTarget: + s := proto.Size(x.AddTarget) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *ListenRequest_RemoveTarget: + n += 1 // tag and wire + n += proto.SizeVarint(uint64(x.RemoveTarget)) + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +// The response for [Firestore.Listen][google.firestore.v1.Firestore.Listen]. +type ListenResponse struct { + // The supported responses. + // + // Types that are valid to be assigned to ResponseType: + // *ListenResponse_TargetChange + // *ListenResponse_DocumentChange + // *ListenResponse_DocumentDelete + // *ListenResponse_DocumentRemove + // *ListenResponse_Filter + ResponseType isListenResponse_ResponseType `protobuf_oneof:"response_type"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ListenResponse) Reset() { *m = ListenResponse{} } +func (m *ListenResponse) String() string { return proto.CompactTextString(m) } +func (*ListenResponse) ProtoMessage() {} +func (*ListenResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_firestore_e04b661bc0002f16, []int{18} +} +func (m *ListenResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ListenResponse.Unmarshal(m, b) +} +func (m *ListenResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ListenResponse.Marshal(b, m, deterministic) +} +func (dst *ListenResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_ListenResponse.Merge(dst, src) +} +func (m *ListenResponse) XXX_Size() int { + return xxx_messageInfo_ListenResponse.Size(m) +} +func (m *ListenResponse) XXX_DiscardUnknown() { + xxx_messageInfo_ListenResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_ListenResponse proto.InternalMessageInfo + +type isListenResponse_ResponseType interface { + isListenResponse_ResponseType() +} + +type ListenResponse_TargetChange struct { + TargetChange *TargetChange `protobuf:"bytes,2,opt,name=target_change,json=targetChange,proto3,oneof"` +} + +type ListenResponse_DocumentChange struct { + DocumentChange *DocumentChange `protobuf:"bytes,3,opt,name=document_change,json=documentChange,proto3,oneof"` +} + +type ListenResponse_DocumentDelete struct { + DocumentDelete *DocumentDelete `protobuf:"bytes,4,opt,name=document_delete,json=documentDelete,proto3,oneof"` +} + +type ListenResponse_DocumentRemove struct { + DocumentRemove *DocumentRemove `protobuf:"bytes,6,opt,name=document_remove,json=documentRemove,proto3,oneof"` +} + +type ListenResponse_Filter struct { + Filter *ExistenceFilter `protobuf:"bytes,5,opt,name=filter,proto3,oneof"` +} + +func (*ListenResponse_TargetChange) isListenResponse_ResponseType() {} + +func (*ListenResponse_DocumentChange) isListenResponse_ResponseType() {} + +func (*ListenResponse_DocumentDelete) isListenResponse_ResponseType() {} + +func (*ListenResponse_DocumentRemove) isListenResponse_ResponseType() {} + +func (*ListenResponse_Filter) isListenResponse_ResponseType() {} + +func (m *ListenResponse) GetResponseType() isListenResponse_ResponseType { + if m != nil { + return m.ResponseType + } + return nil +} + +func (m *ListenResponse) GetTargetChange() *TargetChange { + if x, ok := m.GetResponseType().(*ListenResponse_TargetChange); ok { + return x.TargetChange + } + return nil +} + +func (m *ListenResponse) GetDocumentChange() *DocumentChange { + if x, ok := m.GetResponseType().(*ListenResponse_DocumentChange); ok { + return x.DocumentChange + } + return nil +} + +func (m *ListenResponse) GetDocumentDelete() *DocumentDelete { + if x, ok := m.GetResponseType().(*ListenResponse_DocumentDelete); ok { + return x.DocumentDelete + } + return nil +} + +func (m *ListenResponse) GetDocumentRemove() *DocumentRemove { + if x, ok := m.GetResponseType().(*ListenResponse_DocumentRemove); ok { + return x.DocumentRemove + } + return nil +} + +func (m *ListenResponse) GetFilter() *ExistenceFilter { + if x, ok := m.GetResponseType().(*ListenResponse_Filter); ok { + return x.Filter + } + return nil +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*ListenResponse) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _ListenResponse_OneofMarshaler, _ListenResponse_OneofUnmarshaler, _ListenResponse_OneofSizer, []interface{}{ + (*ListenResponse_TargetChange)(nil), + (*ListenResponse_DocumentChange)(nil), + (*ListenResponse_DocumentDelete)(nil), + (*ListenResponse_DocumentRemove)(nil), + (*ListenResponse_Filter)(nil), + } +} + +func _ListenResponse_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*ListenResponse) + // response_type + switch x := m.ResponseType.(type) { + case *ListenResponse_TargetChange: + b.EncodeVarint(2<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.TargetChange); err != nil { + return err + } + case *ListenResponse_DocumentChange: + b.EncodeVarint(3<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.DocumentChange); err != nil { + return err + } + case *ListenResponse_DocumentDelete: + b.EncodeVarint(4<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.DocumentDelete); err != nil { + return err + } + case *ListenResponse_DocumentRemove: + b.EncodeVarint(6<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.DocumentRemove); err != nil { + return err + } + case *ListenResponse_Filter: + b.EncodeVarint(5<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.Filter); err != nil { + return err + } + case nil: + default: + return fmt.Errorf("ListenResponse.ResponseType has unexpected type %T", x) + } + return nil +} + +func _ListenResponse_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*ListenResponse) + switch tag { + case 2: // response_type.target_change + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(TargetChange) + err := b.DecodeMessage(msg) + m.ResponseType = &ListenResponse_TargetChange{msg} + return true, err + case 3: // response_type.document_change + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(DocumentChange) + err := b.DecodeMessage(msg) + m.ResponseType = &ListenResponse_DocumentChange{msg} + return true, err + case 4: // response_type.document_delete + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(DocumentDelete) + err := b.DecodeMessage(msg) + m.ResponseType = &ListenResponse_DocumentDelete{msg} + return true, err + case 6: // response_type.document_remove + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(DocumentRemove) + err := b.DecodeMessage(msg) + m.ResponseType = &ListenResponse_DocumentRemove{msg} + return true, err + case 5: // response_type.filter + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(ExistenceFilter) + err := b.DecodeMessage(msg) + m.ResponseType = &ListenResponse_Filter{msg} + return true, err + default: + return false, nil + } +} + +func _ListenResponse_OneofSizer(msg proto.Message) (n int) { + m := msg.(*ListenResponse) + // response_type + switch x := m.ResponseType.(type) { + case *ListenResponse_TargetChange: + s := proto.Size(x.TargetChange) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *ListenResponse_DocumentChange: + s := proto.Size(x.DocumentChange) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *ListenResponse_DocumentDelete: + s := proto.Size(x.DocumentDelete) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *ListenResponse_DocumentRemove: + s := proto.Size(x.DocumentRemove) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *ListenResponse_Filter: + s := proto.Size(x.Filter) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +// A specification of a set of documents to listen to. +type Target struct { + // The type of target to listen to. + // + // Types that are valid to be assigned to TargetType: + // *Target_Query + // *Target_Documents + TargetType isTarget_TargetType `protobuf_oneof:"target_type"` + // When to start listening. + // + // If not specified, all matching Documents are returned before any + // subsequent changes. + // + // Types that are valid to be assigned to ResumeType: + // *Target_ResumeToken + // *Target_ReadTime + ResumeType isTarget_ResumeType `protobuf_oneof:"resume_type"` + // A client provided target ID. + // + // If not set, the server will assign an ID for the target. + // + // Used for resuming a target without changing IDs. The IDs can either be + // client-assigned or be server-assigned in a previous stream. All targets + // with client provided IDs must be added before adding a target that needs + // a server-assigned id. + TargetId int32 `protobuf:"varint,5,opt,name=target_id,json=targetId,proto3" json:"target_id,omitempty"` + // If the target should be removed once it is current and consistent. + Once bool `protobuf:"varint,6,opt,name=once,proto3" json:"once,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Target) Reset() { *m = Target{} } +func (m *Target) String() string { return proto.CompactTextString(m) } +func (*Target) ProtoMessage() {} +func (*Target) Descriptor() ([]byte, []int) { + return fileDescriptor_firestore_e04b661bc0002f16, []int{19} +} +func (m *Target) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Target.Unmarshal(m, b) +} +func (m *Target) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Target.Marshal(b, m, deterministic) +} +func (dst *Target) XXX_Merge(src proto.Message) { + xxx_messageInfo_Target.Merge(dst, src) +} +func (m *Target) XXX_Size() int { + return xxx_messageInfo_Target.Size(m) +} +func (m *Target) XXX_DiscardUnknown() { + xxx_messageInfo_Target.DiscardUnknown(m) +} + +var xxx_messageInfo_Target proto.InternalMessageInfo + +type isTarget_TargetType interface { + isTarget_TargetType() +} + +type Target_Query struct { + Query *Target_QueryTarget `protobuf:"bytes,2,opt,name=query,proto3,oneof"` +} + +type Target_Documents struct { + Documents *Target_DocumentsTarget `protobuf:"bytes,3,opt,name=documents,proto3,oneof"` +} + +func (*Target_Query) isTarget_TargetType() {} + +func (*Target_Documents) isTarget_TargetType() {} + +func (m *Target) GetTargetType() isTarget_TargetType { + if m != nil { + return m.TargetType + } + return nil +} + +func (m *Target) GetQuery() *Target_QueryTarget { + if x, ok := m.GetTargetType().(*Target_Query); ok { + return x.Query + } + return nil +} + +func (m *Target) GetDocuments() *Target_DocumentsTarget { + if x, ok := m.GetTargetType().(*Target_Documents); ok { + return x.Documents + } + return nil +} + +type isTarget_ResumeType interface { + isTarget_ResumeType() +} + +type Target_ResumeToken struct { + ResumeToken []byte `protobuf:"bytes,4,opt,name=resume_token,json=resumeToken,proto3,oneof"` +} + +type Target_ReadTime struct { + ReadTime *timestamp.Timestamp `protobuf:"bytes,11,opt,name=read_time,json=readTime,proto3,oneof"` +} + +func (*Target_ResumeToken) isTarget_ResumeType() {} + +func (*Target_ReadTime) isTarget_ResumeType() {} + +func (m *Target) GetResumeType() isTarget_ResumeType { + if m != nil { + return m.ResumeType + } + return nil +} + +func (m *Target) GetResumeToken() []byte { + if x, ok := m.GetResumeType().(*Target_ResumeToken); ok { + return x.ResumeToken + } + return nil +} + +func (m *Target) GetReadTime() *timestamp.Timestamp { + if x, ok := m.GetResumeType().(*Target_ReadTime); ok { + return x.ReadTime + } + return nil +} + +func (m *Target) GetTargetId() int32 { + if m != nil { + return m.TargetId + } + return 0 +} + +func (m *Target) GetOnce() bool { + if m != nil { + return m.Once + } + return false +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*Target) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _Target_OneofMarshaler, _Target_OneofUnmarshaler, _Target_OneofSizer, []interface{}{ + (*Target_Query)(nil), + (*Target_Documents)(nil), + (*Target_ResumeToken)(nil), + (*Target_ReadTime)(nil), + } +} + +func _Target_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*Target) + // target_type + switch x := m.TargetType.(type) { + case *Target_Query: + b.EncodeVarint(2<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.Query); err != nil { + return err + } + case *Target_Documents: + b.EncodeVarint(3<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.Documents); err != nil { + return err + } + case nil: + default: + return fmt.Errorf("Target.TargetType has unexpected type %T", x) + } + // resume_type + switch x := m.ResumeType.(type) { + case *Target_ResumeToken: + b.EncodeVarint(4<<3 | proto.WireBytes) + b.EncodeRawBytes(x.ResumeToken) + case *Target_ReadTime: + b.EncodeVarint(11<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.ReadTime); err != nil { + return err + } + case nil: + default: + return fmt.Errorf("Target.ResumeType has unexpected type %T", x) + } + return nil +} + +func _Target_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*Target) + switch tag { + case 2: // target_type.query + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(Target_QueryTarget) + err := b.DecodeMessage(msg) + m.TargetType = &Target_Query{msg} + return true, err + case 3: // target_type.documents + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(Target_DocumentsTarget) + err := b.DecodeMessage(msg) + m.TargetType = &Target_Documents{msg} + return true, err + case 4: // resume_type.resume_token + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeRawBytes(true) + m.ResumeType = &Target_ResumeToken{x} + return true, err + case 11: // resume_type.read_time + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(timestamp.Timestamp) + err := b.DecodeMessage(msg) + m.ResumeType = &Target_ReadTime{msg} + return true, err + default: + return false, nil + } +} + +func _Target_OneofSizer(msg proto.Message) (n int) { + m := msg.(*Target) + // target_type + switch x := m.TargetType.(type) { + case *Target_Query: + s := proto.Size(x.Query) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *Target_Documents: + s := proto.Size(x.Documents) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + // resume_type + switch x := m.ResumeType.(type) { + case *Target_ResumeToken: + n += 1 // tag and wire + n += proto.SizeVarint(uint64(len(x.ResumeToken))) + n += len(x.ResumeToken) + case *Target_ReadTime: + s := proto.Size(x.ReadTime) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +// A target specified by a set of documents names. +type Target_DocumentsTarget struct { + // The names of the documents to retrieve. In the format: + // `projects/{project_id}/databases/{database_id}/documents/{document_path}`. + // The request will fail if any of the document is not a child resource of + // the given `database`. Duplicate names will be elided. + Documents []string `protobuf:"bytes,2,rep,name=documents,proto3" json:"documents,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Target_DocumentsTarget) Reset() { *m = Target_DocumentsTarget{} } +func (m *Target_DocumentsTarget) String() string { return proto.CompactTextString(m) } +func (*Target_DocumentsTarget) ProtoMessage() {} +func (*Target_DocumentsTarget) Descriptor() ([]byte, []int) { + return fileDescriptor_firestore_e04b661bc0002f16, []int{19, 0} +} +func (m *Target_DocumentsTarget) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Target_DocumentsTarget.Unmarshal(m, b) +} +func (m *Target_DocumentsTarget) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Target_DocumentsTarget.Marshal(b, m, deterministic) +} +func (dst *Target_DocumentsTarget) XXX_Merge(src proto.Message) { + xxx_messageInfo_Target_DocumentsTarget.Merge(dst, src) +} +func (m *Target_DocumentsTarget) XXX_Size() int { + return xxx_messageInfo_Target_DocumentsTarget.Size(m) +} +func (m *Target_DocumentsTarget) XXX_DiscardUnknown() { + xxx_messageInfo_Target_DocumentsTarget.DiscardUnknown(m) +} + +var xxx_messageInfo_Target_DocumentsTarget proto.InternalMessageInfo + +func (m *Target_DocumentsTarget) GetDocuments() []string { + if m != nil { + return m.Documents + } + return nil +} + +// A target specified by a query. +type Target_QueryTarget struct { + // The parent resource name. In the format: + // `projects/{project_id}/databases/{database_id}/documents` or + // `projects/{project_id}/databases/{database_id}/documents/{document_path}`. + // For example: + // `projects/my-project/databases/my-database/documents` or + // `projects/my-project/databases/my-database/documents/chatrooms/my-chatroom` + Parent string `protobuf:"bytes,1,opt,name=parent,proto3" json:"parent,omitempty"` + // The query to run. + // + // Types that are valid to be assigned to QueryType: + // *Target_QueryTarget_StructuredQuery + QueryType isTarget_QueryTarget_QueryType `protobuf_oneof:"query_type"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Target_QueryTarget) Reset() { *m = Target_QueryTarget{} } +func (m *Target_QueryTarget) String() string { return proto.CompactTextString(m) } +func (*Target_QueryTarget) ProtoMessage() {} +func (*Target_QueryTarget) Descriptor() ([]byte, []int) { + return fileDescriptor_firestore_e04b661bc0002f16, []int{19, 1} +} +func (m *Target_QueryTarget) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Target_QueryTarget.Unmarshal(m, b) +} +func (m *Target_QueryTarget) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Target_QueryTarget.Marshal(b, m, deterministic) +} +func (dst *Target_QueryTarget) XXX_Merge(src proto.Message) { + xxx_messageInfo_Target_QueryTarget.Merge(dst, src) +} +func (m *Target_QueryTarget) XXX_Size() int { + return xxx_messageInfo_Target_QueryTarget.Size(m) +} +func (m *Target_QueryTarget) XXX_DiscardUnknown() { + xxx_messageInfo_Target_QueryTarget.DiscardUnknown(m) +} + +var xxx_messageInfo_Target_QueryTarget proto.InternalMessageInfo + +func (m *Target_QueryTarget) GetParent() string { + if m != nil { + return m.Parent + } + return "" +} + +type isTarget_QueryTarget_QueryType interface { + isTarget_QueryTarget_QueryType() +} + +type Target_QueryTarget_StructuredQuery struct { + StructuredQuery *StructuredQuery `protobuf:"bytes,2,opt,name=structured_query,json=structuredQuery,proto3,oneof"` +} + +func (*Target_QueryTarget_StructuredQuery) isTarget_QueryTarget_QueryType() {} + +func (m *Target_QueryTarget) GetQueryType() isTarget_QueryTarget_QueryType { + if m != nil { + return m.QueryType + } + return nil +} + +func (m *Target_QueryTarget) GetStructuredQuery() *StructuredQuery { + if x, ok := m.GetQueryType().(*Target_QueryTarget_StructuredQuery); ok { + return x.StructuredQuery + } + return nil +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*Target_QueryTarget) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _Target_QueryTarget_OneofMarshaler, _Target_QueryTarget_OneofUnmarshaler, _Target_QueryTarget_OneofSizer, []interface{}{ + (*Target_QueryTarget_StructuredQuery)(nil), + } +} + +func _Target_QueryTarget_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*Target_QueryTarget) + // query_type + switch x := m.QueryType.(type) { + case *Target_QueryTarget_StructuredQuery: + b.EncodeVarint(2<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.StructuredQuery); err != nil { + return err + } + case nil: + default: + return fmt.Errorf("Target_QueryTarget.QueryType has unexpected type %T", x) + } + return nil +} + +func _Target_QueryTarget_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*Target_QueryTarget) + switch tag { + case 2: // query_type.structured_query + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(StructuredQuery) + err := b.DecodeMessage(msg) + m.QueryType = &Target_QueryTarget_StructuredQuery{msg} + return true, err + default: + return false, nil + } +} + +func _Target_QueryTarget_OneofSizer(msg proto.Message) (n int) { + m := msg.(*Target_QueryTarget) + // query_type + switch x := m.QueryType.(type) { + case *Target_QueryTarget_StructuredQuery: + s := proto.Size(x.StructuredQuery) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +// Targets being watched have changed. +type TargetChange struct { + // The type of change that occurred. + TargetChangeType TargetChange_TargetChangeType `protobuf:"varint,1,opt,name=target_change_type,json=targetChangeType,proto3,enum=google.firestore.v1.TargetChange_TargetChangeType" json:"target_change_type,omitempty"` + // The target IDs of targets that have changed. + // + // If empty, the change applies to all targets. + // + // For `target_change_type=ADD`, the order of the target IDs matches the order + // of the requests to add the targets. This allows clients to unambiguously + // associate server-assigned target IDs with added targets. + // + // For other states, the order of the target IDs is not defined. + TargetIds []int32 `protobuf:"varint,2,rep,packed,name=target_ids,json=targetIds,proto3" json:"target_ids,omitempty"` + // The error that resulted in this change, if applicable. + Cause *status.Status `protobuf:"bytes,3,opt,name=cause,proto3" json:"cause,omitempty"` + // A token that can be used to resume the stream for the given `target_ids`, + // or all targets if `target_ids` is empty. + // + // Not set on every target change. + ResumeToken []byte `protobuf:"bytes,4,opt,name=resume_token,json=resumeToken,proto3" json:"resume_token,omitempty"` + // The consistent `read_time` for the given `target_ids` (omitted when the + // target_ids are not at a consistent snapshot). + // + // The stream is guaranteed to send a `read_time` with `target_ids` empty + // whenever the entire stream reaches a new consistent snapshot. ADD, + // CURRENT, and RESET messages are guaranteed to (eventually) result in a + // new consistent snapshot (while NO_CHANGE and REMOVE messages are not). + // + // For a given stream, `read_time` is guaranteed to be monotonically + // increasing. + ReadTime *timestamp.Timestamp `protobuf:"bytes,6,opt,name=read_time,json=readTime,proto3" json:"read_time,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *TargetChange) Reset() { *m = TargetChange{} } +func (m *TargetChange) String() string { return proto.CompactTextString(m) } +func (*TargetChange) ProtoMessage() {} +func (*TargetChange) Descriptor() ([]byte, []int) { + return fileDescriptor_firestore_e04b661bc0002f16, []int{20} +} +func (m *TargetChange) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_TargetChange.Unmarshal(m, b) +} +func (m *TargetChange) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_TargetChange.Marshal(b, m, deterministic) +} +func (dst *TargetChange) XXX_Merge(src proto.Message) { + xxx_messageInfo_TargetChange.Merge(dst, src) +} +func (m *TargetChange) XXX_Size() int { + return xxx_messageInfo_TargetChange.Size(m) +} +func (m *TargetChange) XXX_DiscardUnknown() { + xxx_messageInfo_TargetChange.DiscardUnknown(m) +} + +var xxx_messageInfo_TargetChange proto.InternalMessageInfo + +func (m *TargetChange) GetTargetChangeType() TargetChange_TargetChangeType { + if m != nil { + return m.TargetChangeType + } + return TargetChange_NO_CHANGE +} + +func (m *TargetChange) GetTargetIds() []int32 { + if m != nil { + return m.TargetIds + } + return nil +} + +func (m *TargetChange) GetCause() *status.Status { + if m != nil { + return m.Cause + } + return nil +} + +func (m *TargetChange) GetResumeToken() []byte { + if m != nil { + return m.ResumeToken + } + return nil +} + +func (m *TargetChange) GetReadTime() *timestamp.Timestamp { + if m != nil { + return m.ReadTime + } + return nil +} + +// The request for [Firestore.ListCollectionIds][google.firestore.v1.Firestore.ListCollectionIds]. +type ListCollectionIdsRequest struct { + // The parent document. In the format: + // `projects/{project_id}/databases/{database_id}/documents/{document_path}`. + // For example: + // `projects/my-project/databases/my-database/documents/chatrooms/my-chatroom` + Parent string `protobuf:"bytes,1,opt,name=parent,proto3" json:"parent,omitempty"` + // The maximum number of results to return. + PageSize int32 `protobuf:"varint,2,opt,name=page_size,json=pageSize,proto3" json:"page_size,omitempty"` + // A page token. Must be a value from + // [ListCollectionIdsResponse][google.firestore.v1.ListCollectionIdsResponse]. + PageToken string `protobuf:"bytes,3,opt,name=page_token,json=pageToken,proto3" json:"page_token,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ListCollectionIdsRequest) Reset() { *m = ListCollectionIdsRequest{} } +func (m *ListCollectionIdsRequest) String() string { return proto.CompactTextString(m) } +func (*ListCollectionIdsRequest) ProtoMessage() {} +func (*ListCollectionIdsRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_firestore_e04b661bc0002f16, []int{21} +} +func (m *ListCollectionIdsRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ListCollectionIdsRequest.Unmarshal(m, b) +} +func (m *ListCollectionIdsRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ListCollectionIdsRequest.Marshal(b, m, deterministic) +} +func (dst *ListCollectionIdsRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_ListCollectionIdsRequest.Merge(dst, src) +} +func (m *ListCollectionIdsRequest) XXX_Size() int { + return xxx_messageInfo_ListCollectionIdsRequest.Size(m) +} +func (m *ListCollectionIdsRequest) XXX_DiscardUnknown() { + xxx_messageInfo_ListCollectionIdsRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_ListCollectionIdsRequest proto.InternalMessageInfo + +func (m *ListCollectionIdsRequest) GetParent() string { + if m != nil { + return m.Parent + } + return "" +} + +func (m *ListCollectionIdsRequest) GetPageSize() int32 { + if m != nil { + return m.PageSize + } + return 0 +} + +func (m *ListCollectionIdsRequest) GetPageToken() string { + if m != nil { + return m.PageToken + } + return "" +} + +// The response from [Firestore.ListCollectionIds][google.firestore.v1.Firestore.ListCollectionIds]. +type ListCollectionIdsResponse struct { + // The collection ids. + CollectionIds []string `protobuf:"bytes,1,rep,name=collection_ids,json=collectionIds,proto3" json:"collection_ids,omitempty"` + // A page token that may be used to continue the list. + NextPageToken string `protobuf:"bytes,2,opt,name=next_page_token,json=nextPageToken,proto3" json:"next_page_token,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ListCollectionIdsResponse) Reset() { *m = ListCollectionIdsResponse{} } +func (m *ListCollectionIdsResponse) String() string { return proto.CompactTextString(m) } +func (*ListCollectionIdsResponse) ProtoMessage() {} +func (*ListCollectionIdsResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_firestore_e04b661bc0002f16, []int{22} +} +func (m *ListCollectionIdsResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ListCollectionIdsResponse.Unmarshal(m, b) +} +func (m *ListCollectionIdsResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ListCollectionIdsResponse.Marshal(b, m, deterministic) +} +func (dst *ListCollectionIdsResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_ListCollectionIdsResponse.Merge(dst, src) +} +func (m *ListCollectionIdsResponse) XXX_Size() int { + return xxx_messageInfo_ListCollectionIdsResponse.Size(m) +} +func (m *ListCollectionIdsResponse) XXX_DiscardUnknown() { + xxx_messageInfo_ListCollectionIdsResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_ListCollectionIdsResponse proto.InternalMessageInfo + +func (m *ListCollectionIdsResponse) GetCollectionIds() []string { + if m != nil { + return m.CollectionIds + } + return nil +} + +func (m *ListCollectionIdsResponse) GetNextPageToken() string { + if m != nil { + return m.NextPageToken + } + return "" +} + +func init() { + proto.RegisterType((*GetDocumentRequest)(nil), "google.firestore.v1.GetDocumentRequest") + proto.RegisterType((*ListDocumentsRequest)(nil), "google.firestore.v1.ListDocumentsRequest") + proto.RegisterType((*ListDocumentsResponse)(nil), "google.firestore.v1.ListDocumentsResponse") + proto.RegisterType((*CreateDocumentRequest)(nil), "google.firestore.v1.CreateDocumentRequest") + proto.RegisterType((*UpdateDocumentRequest)(nil), "google.firestore.v1.UpdateDocumentRequest") + proto.RegisterType((*DeleteDocumentRequest)(nil), "google.firestore.v1.DeleteDocumentRequest") + proto.RegisterType((*BatchGetDocumentsRequest)(nil), "google.firestore.v1.BatchGetDocumentsRequest") + proto.RegisterType((*BatchGetDocumentsResponse)(nil), "google.firestore.v1.BatchGetDocumentsResponse") + proto.RegisterType((*BeginTransactionRequest)(nil), "google.firestore.v1.BeginTransactionRequest") + proto.RegisterType((*BeginTransactionResponse)(nil), "google.firestore.v1.BeginTransactionResponse") + proto.RegisterType((*CommitRequest)(nil), "google.firestore.v1.CommitRequest") + proto.RegisterType((*CommitResponse)(nil), "google.firestore.v1.CommitResponse") + proto.RegisterType((*RollbackRequest)(nil), "google.firestore.v1.RollbackRequest") + proto.RegisterType((*RunQueryRequest)(nil), "google.firestore.v1.RunQueryRequest") + proto.RegisterType((*RunQueryResponse)(nil), "google.firestore.v1.RunQueryResponse") + proto.RegisterType((*WriteRequest)(nil), "google.firestore.v1.WriteRequest") + proto.RegisterMapType((map[string]string)(nil), "google.firestore.v1.WriteRequest.LabelsEntry") + proto.RegisterType((*WriteResponse)(nil), "google.firestore.v1.WriteResponse") + proto.RegisterType((*ListenRequest)(nil), "google.firestore.v1.ListenRequest") + proto.RegisterMapType((map[string]string)(nil), "google.firestore.v1.ListenRequest.LabelsEntry") + proto.RegisterType((*ListenResponse)(nil), "google.firestore.v1.ListenResponse") + proto.RegisterType((*Target)(nil), "google.firestore.v1.Target") + proto.RegisterType((*Target_DocumentsTarget)(nil), "google.firestore.v1.Target.DocumentsTarget") + proto.RegisterType((*Target_QueryTarget)(nil), "google.firestore.v1.Target.QueryTarget") + proto.RegisterType((*TargetChange)(nil), "google.firestore.v1.TargetChange") + proto.RegisterType((*ListCollectionIdsRequest)(nil), "google.firestore.v1.ListCollectionIdsRequest") + proto.RegisterType((*ListCollectionIdsResponse)(nil), "google.firestore.v1.ListCollectionIdsResponse") + proto.RegisterEnum("google.firestore.v1.TargetChange_TargetChangeType", TargetChange_TargetChangeType_name, TargetChange_TargetChangeType_value) +} + +// Reference imports to suppress errors if they are not otherwise used. +var _ context.Context +var _ grpc.ClientConn + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the grpc package it is being compiled against. +const _ = grpc.SupportPackageIsVersion4 + +// FirestoreClient is the client API for Firestore service. +// +// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://godoc.org/google.golang.org/grpc#ClientConn.NewStream. +type FirestoreClient interface { + // Gets a single document. + GetDocument(ctx context.Context, in *GetDocumentRequest, opts ...grpc.CallOption) (*Document, error) + // Lists documents. + ListDocuments(ctx context.Context, in *ListDocumentsRequest, opts ...grpc.CallOption) (*ListDocumentsResponse, error) + // Creates a new document. + CreateDocument(ctx context.Context, in *CreateDocumentRequest, opts ...grpc.CallOption) (*Document, error) + // Updates or inserts a document. + UpdateDocument(ctx context.Context, in *UpdateDocumentRequest, opts ...grpc.CallOption) (*Document, error) + // Deletes a document. + DeleteDocument(ctx context.Context, in *DeleteDocumentRequest, opts ...grpc.CallOption) (*empty.Empty, error) + // Gets multiple documents. + // + // Documents returned by this method are not guaranteed to be returned in the + // same order that they were requested. + BatchGetDocuments(ctx context.Context, in *BatchGetDocumentsRequest, opts ...grpc.CallOption) (Firestore_BatchGetDocumentsClient, error) + // Starts a new transaction. + BeginTransaction(ctx context.Context, in *BeginTransactionRequest, opts ...grpc.CallOption) (*BeginTransactionResponse, error) + // Commits a transaction, while optionally updating documents. + Commit(ctx context.Context, in *CommitRequest, opts ...grpc.CallOption) (*CommitResponse, error) + // Rolls back a transaction. + Rollback(ctx context.Context, in *RollbackRequest, opts ...grpc.CallOption) (*empty.Empty, error) + // Runs a query. + RunQuery(ctx context.Context, in *RunQueryRequest, opts ...grpc.CallOption) (Firestore_RunQueryClient, error) + // Streams batches of document updates and deletes, in order. + Write(ctx context.Context, opts ...grpc.CallOption) (Firestore_WriteClient, error) + // Listens to changes. + Listen(ctx context.Context, opts ...grpc.CallOption) (Firestore_ListenClient, error) + // Lists all the collection IDs underneath a document. + ListCollectionIds(ctx context.Context, in *ListCollectionIdsRequest, opts ...grpc.CallOption) (*ListCollectionIdsResponse, error) +} + +type firestoreClient struct { + cc *grpc.ClientConn +} + +func NewFirestoreClient(cc *grpc.ClientConn) FirestoreClient { + return &firestoreClient{cc} +} + +func (c *firestoreClient) GetDocument(ctx context.Context, in *GetDocumentRequest, opts ...grpc.CallOption) (*Document, error) { + out := new(Document) + err := c.cc.Invoke(ctx, "/google.firestore.v1.Firestore/GetDocument", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *firestoreClient) ListDocuments(ctx context.Context, in *ListDocumentsRequest, opts ...grpc.CallOption) (*ListDocumentsResponse, error) { + out := new(ListDocumentsResponse) + err := c.cc.Invoke(ctx, "/google.firestore.v1.Firestore/ListDocuments", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *firestoreClient) CreateDocument(ctx context.Context, in *CreateDocumentRequest, opts ...grpc.CallOption) (*Document, error) { + out := new(Document) + err := c.cc.Invoke(ctx, "/google.firestore.v1.Firestore/CreateDocument", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *firestoreClient) UpdateDocument(ctx context.Context, in *UpdateDocumentRequest, opts ...grpc.CallOption) (*Document, error) { + out := new(Document) + err := c.cc.Invoke(ctx, "/google.firestore.v1.Firestore/UpdateDocument", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *firestoreClient) DeleteDocument(ctx context.Context, in *DeleteDocumentRequest, opts ...grpc.CallOption) (*empty.Empty, error) { + out := new(empty.Empty) + err := c.cc.Invoke(ctx, "/google.firestore.v1.Firestore/DeleteDocument", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *firestoreClient) BatchGetDocuments(ctx context.Context, in *BatchGetDocumentsRequest, opts ...grpc.CallOption) (Firestore_BatchGetDocumentsClient, error) { + stream, err := c.cc.NewStream(ctx, &_Firestore_serviceDesc.Streams[0], "/google.firestore.v1.Firestore/BatchGetDocuments", opts...) + if err != nil { + return nil, err + } + x := &firestoreBatchGetDocumentsClient{stream} + if err := x.ClientStream.SendMsg(in); err != nil { + return nil, err + } + if err := x.ClientStream.CloseSend(); err != nil { + return nil, err + } + return x, nil +} + +type Firestore_BatchGetDocumentsClient interface { + Recv() (*BatchGetDocumentsResponse, error) + grpc.ClientStream +} + +type firestoreBatchGetDocumentsClient struct { + grpc.ClientStream +} + +func (x *firestoreBatchGetDocumentsClient) Recv() (*BatchGetDocumentsResponse, error) { + m := new(BatchGetDocumentsResponse) + if err := x.ClientStream.RecvMsg(m); err != nil { + return nil, err + } + return m, nil +} + +func (c *firestoreClient) BeginTransaction(ctx context.Context, in *BeginTransactionRequest, opts ...grpc.CallOption) (*BeginTransactionResponse, error) { + out := new(BeginTransactionResponse) + err := c.cc.Invoke(ctx, "/google.firestore.v1.Firestore/BeginTransaction", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *firestoreClient) Commit(ctx context.Context, in *CommitRequest, opts ...grpc.CallOption) (*CommitResponse, error) { + out := new(CommitResponse) + err := c.cc.Invoke(ctx, "/google.firestore.v1.Firestore/Commit", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *firestoreClient) Rollback(ctx context.Context, in *RollbackRequest, opts ...grpc.CallOption) (*empty.Empty, error) { + out := new(empty.Empty) + err := c.cc.Invoke(ctx, "/google.firestore.v1.Firestore/Rollback", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *firestoreClient) RunQuery(ctx context.Context, in *RunQueryRequest, opts ...grpc.CallOption) (Firestore_RunQueryClient, error) { + stream, err := c.cc.NewStream(ctx, &_Firestore_serviceDesc.Streams[1], "/google.firestore.v1.Firestore/RunQuery", opts...) + if err != nil { + return nil, err + } + x := &firestoreRunQueryClient{stream} + if err := x.ClientStream.SendMsg(in); err != nil { + return nil, err + } + if err := x.ClientStream.CloseSend(); err != nil { + return nil, err + } + return x, nil +} + +type Firestore_RunQueryClient interface { + Recv() (*RunQueryResponse, error) + grpc.ClientStream +} + +type firestoreRunQueryClient struct { + grpc.ClientStream +} + +func (x *firestoreRunQueryClient) Recv() (*RunQueryResponse, error) { + m := new(RunQueryResponse) + if err := x.ClientStream.RecvMsg(m); err != nil { + return nil, err + } + return m, nil +} + +func (c *firestoreClient) Write(ctx context.Context, opts ...grpc.CallOption) (Firestore_WriteClient, error) { + stream, err := c.cc.NewStream(ctx, &_Firestore_serviceDesc.Streams[2], "/google.firestore.v1.Firestore/Write", opts...) + if err != nil { + return nil, err + } + x := &firestoreWriteClient{stream} + return x, nil +} + +type Firestore_WriteClient interface { + Send(*WriteRequest) error + Recv() (*WriteResponse, error) + grpc.ClientStream +} + +type firestoreWriteClient struct { + grpc.ClientStream +} + +func (x *firestoreWriteClient) Send(m *WriteRequest) error { + return x.ClientStream.SendMsg(m) +} + +func (x *firestoreWriteClient) Recv() (*WriteResponse, error) { + m := new(WriteResponse) + if err := x.ClientStream.RecvMsg(m); err != nil { + return nil, err + } + return m, nil +} + +func (c *firestoreClient) Listen(ctx context.Context, opts ...grpc.CallOption) (Firestore_ListenClient, error) { + stream, err := c.cc.NewStream(ctx, &_Firestore_serviceDesc.Streams[3], "/google.firestore.v1.Firestore/Listen", opts...) + if err != nil { + return nil, err + } + x := &firestoreListenClient{stream} + return x, nil +} + +type Firestore_ListenClient interface { + Send(*ListenRequest) error + Recv() (*ListenResponse, error) + grpc.ClientStream +} + +type firestoreListenClient struct { + grpc.ClientStream +} + +func (x *firestoreListenClient) Send(m *ListenRequest) error { + return x.ClientStream.SendMsg(m) +} + +func (x *firestoreListenClient) Recv() (*ListenResponse, error) { + m := new(ListenResponse) + if err := x.ClientStream.RecvMsg(m); err != nil { + return nil, err + } + return m, nil +} + +func (c *firestoreClient) ListCollectionIds(ctx context.Context, in *ListCollectionIdsRequest, opts ...grpc.CallOption) (*ListCollectionIdsResponse, error) { + out := new(ListCollectionIdsResponse) + err := c.cc.Invoke(ctx, "/google.firestore.v1.Firestore/ListCollectionIds", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +// FirestoreServer is the server API for Firestore service. +type FirestoreServer interface { + // Gets a single document. + GetDocument(context.Context, *GetDocumentRequest) (*Document, error) + // Lists documents. + ListDocuments(context.Context, *ListDocumentsRequest) (*ListDocumentsResponse, error) + // Creates a new document. + CreateDocument(context.Context, *CreateDocumentRequest) (*Document, error) + // Updates or inserts a document. + UpdateDocument(context.Context, *UpdateDocumentRequest) (*Document, error) + // Deletes a document. + DeleteDocument(context.Context, *DeleteDocumentRequest) (*empty.Empty, error) + // Gets multiple documents. + // + // Documents returned by this method are not guaranteed to be returned in the + // same order that they were requested. + BatchGetDocuments(*BatchGetDocumentsRequest, Firestore_BatchGetDocumentsServer) error + // Starts a new transaction. + BeginTransaction(context.Context, *BeginTransactionRequest) (*BeginTransactionResponse, error) + // Commits a transaction, while optionally updating documents. + Commit(context.Context, *CommitRequest) (*CommitResponse, error) + // Rolls back a transaction. + Rollback(context.Context, *RollbackRequest) (*empty.Empty, error) + // Runs a query. + RunQuery(*RunQueryRequest, Firestore_RunQueryServer) error + // Streams batches of document updates and deletes, in order. + Write(Firestore_WriteServer) error + // Listens to changes. + Listen(Firestore_ListenServer) error + // Lists all the collection IDs underneath a document. + ListCollectionIds(context.Context, *ListCollectionIdsRequest) (*ListCollectionIdsResponse, error) +} + +func RegisterFirestoreServer(s *grpc.Server, srv FirestoreServer) { + s.RegisterService(&_Firestore_serviceDesc, srv) +} + +func _Firestore_GetDocument_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(GetDocumentRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(FirestoreServer).GetDocument(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.firestore.v1.Firestore/GetDocument", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(FirestoreServer).GetDocument(ctx, req.(*GetDocumentRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _Firestore_ListDocuments_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(ListDocumentsRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(FirestoreServer).ListDocuments(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.firestore.v1.Firestore/ListDocuments", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(FirestoreServer).ListDocuments(ctx, req.(*ListDocumentsRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _Firestore_CreateDocument_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(CreateDocumentRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(FirestoreServer).CreateDocument(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.firestore.v1.Firestore/CreateDocument", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(FirestoreServer).CreateDocument(ctx, req.(*CreateDocumentRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _Firestore_UpdateDocument_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(UpdateDocumentRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(FirestoreServer).UpdateDocument(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.firestore.v1.Firestore/UpdateDocument", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(FirestoreServer).UpdateDocument(ctx, req.(*UpdateDocumentRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _Firestore_DeleteDocument_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(DeleteDocumentRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(FirestoreServer).DeleteDocument(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.firestore.v1.Firestore/DeleteDocument", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(FirestoreServer).DeleteDocument(ctx, req.(*DeleteDocumentRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _Firestore_BatchGetDocuments_Handler(srv interface{}, stream grpc.ServerStream) error { + m := new(BatchGetDocumentsRequest) + if err := stream.RecvMsg(m); err != nil { + return err + } + return srv.(FirestoreServer).BatchGetDocuments(m, &firestoreBatchGetDocumentsServer{stream}) +} + +type Firestore_BatchGetDocumentsServer interface { + Send(*BatchGetDocumentsResponse) error + grpc.ServerStream +} + +type firestoreBatchGetDocumentsServer struct { + grpc.ServerStream +} + +func (x *firestoreBatchGetDocumentsServer) Send(m *BatchGetDocumentsResponse) error { + return x.ServerStream.SendMsg(m) +} + +func _Firestore_BeginTransaction_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(BeginTransactionRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(FirestoreServer).BeginTransaction(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.firestore.v1.Firestore/BeginTransaction", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(FirestoreServer).BeginTransaction(ctx, req.(*BeginTransactionRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _Firestore_Commit_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(CommitRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(FirestoreServer).Commit(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.firestore.v1.Firestore/Commit", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(FirestoreServer).Commit(ctx, req.(*CommitRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _Firestore_Rollback_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(RollbackRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(FirestoreServer).Rollback(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.firestore.v1.Firestore/Rollback", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(FirestoreServer).Rollback(ctx, req.(*RollbackRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _Firestore_RunQuery_Handler(srv interface{}, stream grpc.ServerStream) error { + m := new(RunQueryRequest) + if err := stream.RecvMsg(m); err != nil { + return err + } + return srv.(FirestoreServer).RunQuery(m, &firestoreRunQueryServer{stream}) +} + +type Firestore_RunQueryServer interface { + Send(*RunQueryResponse) error + grpc.ServerStream +} + +type firestoreRunQueryServer struct { + grpc.ServerStream +} + +func (x *firestoreRunQueryServer) Send(m *RunQueryResponse) error { + return x.ServerStream.SendMsg(m) +} + +func _Firestore_Write_Handler(srv interface{}, stream grpc.ServerStream) error { + return srv.(FirestoreServer).Write(&firestoreWriteServer{stream}) +} + +type Firestore_WriteServer interface { + Send(*WriteResponse) error + Recv() (*WriteRequest, error) + grpc.ServerStream +} + +type firestoreWriteServer struct { + grpc.ServerStream +} + +func (x *firestoreWriteServer) Send(m *WriteResponse) error { + return x.ServerStream.SendMsg(m) +} + +func (x *firestoreWriteServer) Recv() (*WriteRequest, error) { + m := new(WriteRequest) + if err := x.ServerStream.RecvMsg(m); err != nil { + return nil, err + } + return m, nil +} + +func _Firestore_Listen_Handler(srv interface{}, stream grpc.ServerStream) error { + return srv.(FirestoreServer).Listen(&firestoreListenServer{stream}) +} + +type Firestore_ListenServer interface { + Send(*ListenResponse) error + Recv() (*ListenRequest, error) + grpc.ServerStream +} + +type firestoreListenServer struct { + grpc.ServerStream +} + +func (x *firestoreListenServer) Send(m *ListenResponse) error { + return x.ServerStream.SendMsg(m) +} + +func (x *firestoreListenServer) Recv() (*ListenRequest, error) { + m := new(ListenRequest) + if err := x.ServerStream.RecvMsg(m); err != nil { + return nil, err + } + return m, nil +} + +func _Firestore_ListCollectionIds_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(ListCollectionIdsRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(FirestoreServer).ListCollectionIds(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.firestore.v1.Firestore/ListCollectionIds", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(FirestoreServer).ListCollectionIds(ctx, req.(*ListCollectionIdsRequest)) + } + return interceptor(ctx, in, info, handler) +} + +var _Firestore_serviceDesc = grpc.ServiceDesc{ + ServiceName: "google.firestore.v1.Firestore", + HandlerType: (*FirestoreServer)(nil), + Methods: []grpc.MethodDesc{ + { + MethodName: "GetDocument", + Handler: _Firestore_GetDocument_Handler, + }, + { + MethodName: "ListDocuments", + Handler: _Firestore_ListDocuments_Handler, + }, + { + MethodName: "CreateDocument", + Handler: _Firestore_CreateDocument_Handler, + }, + { + MethodName: "UpdateDocument", + Handler: _Firestore_UpdateDocument_Handler, + }, + { + MethodName: "DeleteDocument", + Handler: _Firestore_DeleteDocument_Handler, + }, + { + MethodName: "BeginTransaction", + Handler: _Firestore_BeginTransaction_Handler, + }, + { + MethodName: "Commit", + Handler: _Firestore_Commit_Handler, + }, + { + MethodName: "Rollback", + Handler: _Firestore_Rollback_Handler, + }, + { + MethodName: "ListCollectionIds", + Handler: _Firestore_ListCollectionIds_Handler, + }, + }, + Streams: []grpc.StreamDesc{ + { + StreamName: "BatchGetDocuments", + Handler: _Firestore_BatchGetDocuments_Handler, + ServerStreams: true, + }, + { + StreamName: "RunQuery", + Handler: _Firestore_RunQuery_Handler, + ServerStreams: true, + }, + { + StreamName: "Write", + Handler: _Firestore_Write_Handler, + ServerStreams: true, + ClientStreams: true, + }, + { + StreamName: "Listen", + Handler: _Firestore_Listen_Handler, + ServerStreams: true, + ClientStreams: true, + }, + }, + Metadata: "google/firestore/v1/firestore.proto", +} + +func init() { + proto.RegisterFile("google/firestore/v1/firestore.proto", fileDescriptor_firestore_e04b661bc0002f16) +} + +var fileDescriptor_firestore_e04b661bc0002f16 = []byte{ + // 2188 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xbc, 0x59, 0xcd, 0x6f, 0xdb, 0xc8, + 0x15, 0xd7, 0x50, 0x1f, 0x96, 0x9e, 0x3e, 0xac, 0x9d, 0xe6, 0x43, 0x51, 0x36, 0x58, 0x87, 0x6e, + 0x1a, 0x57, 0xed, 0x4a, 0xb1, 0x8b, 0xb4, 0x89, 0x9d, 0xed, 0x26, 0xb6, 0x95, 0xc8, 0x5d, 0xc7, + 0xc9, 0xd2, 0xde, 0x14, 0x58, 0x04, 0x50, 0x69, 0x72, 0xa2, 0x70, 0x2d, 0x91, 0x5c, 0x92, 0xb2, + 0xa3, 0xdd, 0xfa, 0xd0, 0x0f, 0xf4, 0x52, 0xb4, 0x7b, 0xe8, 0xa2, 0x28, 0xb0, 0x97, 0xa2, 0xe8, + 0x61, 0xd1, 0x4b, 0x81, 0xde, 0x0a, 0xf4, 0x2f, 0xe8, 0xad, 0x58, 0x74, 0xaf, 0xbd, 0xb4, 0xe8, + 0x1f, 0xd1, 0x43, 0x0b, 0xce, 0x0c, 0x29, 0x92, 0xa2, 0xbe, 0x9c, 0xa0, 0x37, 0xcd, 0xe3, 0x9b, + 0x37, 0xbf, 0xf7, 0x39, 0x6f, 0x9e, 0x60, 0xb9, 0x63, 0x18, 0x9d, 0x2e, 0x69, 0x3c, 0xd3, 0x2c, + 0x62, 0x3b, 0x86, 0x45, 0x1a, 0xc7, 0xab, 0xc3, 0x45, 0xdd, 0xb4, 0x0c, 0xc7, 0xc0, 0x5f, 0x61, + 0x4c, 0xf5, 0x21, 0xfd, 0x78, 0xb5, 0xfa, 0x3a, 0xdf, 0x29, 0x9b, 0x5a, 0x43, 0xd6, 0x75, 0xc3, + 0x91, 0x1d, 0xcd, 0xd0, 0x6d, 0xb6, 0xa5, 0xba, 0x14, 0x27, 0x57, 0x31, 0x7a, 0x3d, 0x43, 0xe7, + 0x1c, 0x62, 0x1c, 0x87, 0x6a, 0x28, 0xfd, 0x1e, 0xd1, 0x1d, 0xce, 0xf3, 0x46, 0x1c, 0xcf, 0x87, + 0x7d, 0x62, 0x0d, 0x26, 0x31, 0x9c, 0x58, 0x9a, 0xc3, 0xa1, 0x57, 0x2f, 0x73, 0x06, 0xba, 0x3a, + 0xec, 0x3f, 0x6b, 0x90, 0x9e, 0xe9, 0x44, 0x77, 0xfb, 0x1f, 0x1d, 0xad, 0x47, 0x6c, 0x47, 0xee, + 0x99, 0x9c, 0xe1, 0x22, 0x67, 0xb0, 0x4c, 0xa5, 0x61, 0x3b, 0xb2, 0xd3, 0xe7, 0xea, 0x89, 0x5f, + 0x22, 0xc0, 0x0f, 0x88, 0xb3, 0xcd, 0xe1, 0x4a, 0xe4, 0xc3, 0x3e, 0xb1, 0x1d, 0x8c, 0x21, 0xa5, + 0xcb, 0x3d, 0x52, 0x41, 0x4b, 0x68, 0x25, 0x27, 0xd1, 0xdf, 0xf8, 0x26, 0xa4, 0x7a, 0xb2, 0x7d, + 0x54, 0x11, 0x96, 0xd0, 0x4a, 0x7e, 0xed, 0x6a, 0x3d, 0xc6, 0x96, 0x75, 0x4f, 0xce, 0x43, 0xd9, + 0x3e, 0x92, 0x28, 0x3b, 0x16, 0x21, 0xef, 0x58, 0xb2, 0x6e, 0xcb, 0x8a, 0x6b, 0xd6, 0x4a, 0x72, + 0x09, 0xad, 0x14, 0x5a, 0x09, 0x29, 0x48, 0xc4, 0xb7, 0x21, 0x67, 0x11, 0x59, 0x6d, 0xbb, 0xb0, + 0x2b, 0x69, 0x2a, 0xbf, 0xea, 0xc9, 0xf7, 0x74, 0xaa, 0x1f, 0x78, 0x3a, 0xb5, 0x12, 0x52, 0xd6, + 0x65, 0x77, 0x09, 0x9b, 0x17, 0xe0, 0x9c, 0x62, 0xe8, 0xb6, 0x66, 0x3b, 0x44, 0x57, 0x06, 0x6d, + 0x9b, 0x74, 0x89, 0xe2, 0x18, 0x96, 0xf8, 0x1f, 0x01, 0xce, 0xed, 0x6a, 0xb6, 0xaf, 0x99, 0xed, + 0xa9, 0x76, 0x01, 0x32, 0xa6, 0x6c, 0x11, 0xdd, 0xe1, 0xca, 0xf1, 0x15, 0x5e, 0x86, 0xa2, 0x62, + 0x74, 0xdd, 0xdd, 0x9a, 0xa1, 0xb7, 0x35, 0x95, 0xea, 0x99, 0x93, 0x0a, 0x43, 0xe2, 0x8e, 0x8a, + 0x2f, 0x43, 0xce, 0x94, 0x3b, 0xa4, 0x6d, 0x6b, 0x1f, 0x11, 0xaa, 0x4a, 0x5a, 0xca, 0xba, 0x84, + 0x7d, 0xed, 0x23, 0x82, 0xaf, 0x00, 0xd0, 0x8f, 0x8e, 0x71, 0x44, 0xf4, 0x4a, 0x8a, 0x6e, 0xa7, + 0xec, 0x07, 0x2e, 0x01, 0x5f, 0x82, 0xac, 0x61, 0xa9, 0xc4, 0x6a, 0x1f, 0x0e, 0x2a, 0x19, 0xfa, + 0x71, 0x81, 0xae, 0x37, 0x07, 0xbe, 0x69, 0x17, 0x5e, 0xca, 0xb4, 0xd9, 0xa9, 0xa6, 0x85, 0x79, + 0x4c, 0x8b, 0xaf, 0x42, 0xc1, 0x7e, 0x6e, 0x9c, 0xb4, 0x7b, 0x9a, 0x6d, 0x6b, 0x7a, 0xa7, 0x52, + 0x58, 0x42, 0x2b, 0x59, 0x29, 0xef, 0xd2, 0x1e, 0x32, 0xd2, 0x58, 0xeb, 0xff, 0x10, 0xce, 0x47, + 0x8c, 0x6f, 0x9b, 0x86, 0x6e, 0x13, 0xbc, 0x01, 0x39, 0x2f, 0x35, 0xec, 0x0a, 0x5a, 0x4a, 0xae, + 0xe4, 0xd7, 0xae, 0x4c, 0x54, 0x57, 0x1a, 0xf2, 0xe3, 0xaf, 0xc1, 0xa2, 0x4e, 0x5e, 0x38, 0xed, + 0x80, 0x95, 0x99, 0x93, 0x8a, 0x2e, 0xf9, 0xb1, 0x67, 0x69, 0xf1, 0xdf, 0x08, 0xce, 0x6f, 0x59, + 0x44, 0x76, 0x48, 0x34, 0xae, 0x5f, 0xca, 0xf9, 0x6f, 0x40, 0xde, 0xc3, 0xe2, 0xb2, 0x24, 0x29, + 0x0b, 0x78, 0xa4, 0x1d, 0x15, 0xdf, 0x86, 0xac, 0xb7, 0xa2, 0xee, 0x9f, 0xaa, 0x9b, 0xcf, 0xee, + 0x47, 0x40, 0x7a, 0xae, 0x08, 0x10, 0x3f, 0x13, 0xe0, 0xfc, 0x7b, 0xa6, 0x1a, 0xa3, 0x69, 0x10, + 0x0b, 0x9a, 0x0f, 0xcb, 0x26, 0xe4, 0xfb, 0x54, 0x66, 0x7b, 0xbe, 0x7c, 0x07, 0xb6, 0xcb, 0xfd, + 0xed, 0xeb, 0x93, 0x9c, 0x2f, 0xa2, 0x77, 0xa1, 0xac, 0xf4, 0x2d, 0xd7, 0x25, 0xed, 0x88, 0x25, + 0xe3, 0x45, 0x3c, 0xb6, 0x88, 0x62, 0xe8, 0xaa, 0xe6, 0x7a, 0x48, 0x5a, 0xe4, 0x5b, 0x3d, 0xb9, + 0xe2, 0x00, 0xce, 0x6f, 0x93, 0x2e, 0x19, 0x35, 0x4e, 0x5c, 0x79, 0x8b, 0x3b, 0x5a, 0x38, 0xf3, + 0xd1, 0x5f, 0x0a, 0x50, 0xd9, 0x94, 0x1d, 0xe5, 0x79, 0xa0, 0xb8, 0xfa, 0x25, 0xa8, 0x0a, 0x59, + 0x55, 0x76, 0xe4, 0x43, 0xd9, 0xf6, 0x20, 0xf8, 0x6b, 0xfc, 0x7a, 0x30, 0x41, 0x84, 0xa5, 0xa4, + 0x5b, 0x43, 0x86, 0x19, 0x70, 0x46, 0xb3, 0x46, 0x0a, 0x45, 0x2a, 0xae, 0x50, 0x48, 0x6e, 0x72, + 0x9d, 0xb4, 0x83, 0x7c, 0x2c, 0x18, 0xaf, 0xc7, 0x9e, 0x72, 0x30, 0xe4, 0x7b, 0x64, 0xd2, 0x0b, + 0xb3, 0x95, 0x90, 0x4a, 0x3a, 0x39, 0x39, 0x18, 0x57, 0x7c, 0x16, 0x5e, 0x49, 0x5d, 0xff, 0x02, + 0xc1, 0xa5, 0x18, 0xc3, 0xf2, 0xf2, 0x72, 0x13, 0xd2, 0xcf, 0x8c, 0xbe, 0xae, 0xce, 0x14, 0xf2, + 0xad, 0x84, 0xc4, 0xb8, 0x71, 0x15, 0x16, 0xbc, 0x22, 0x47, 0x13, 0xbf, 0x95, 0x90, 0x3c, 0x02, + 0x5e, 0x8a, 0xb9, 0xbf, 0xc2, 0x96, 0xfb, 0x4e, 0x50, 0xcb, 0xd4, 0x34, 0x2d, 0x03, 0x3a, 0x66, + 0x21, 0x63, 0x11, 0xbb, 0xdf, 0x75, 0xc4, 0x17, 0x70, 0x71, 0x93, 0x74, 0x34, 0x3d, 0x60, 0xbc, + 0x59, 0x82, 0xe5, 0x1e, 0x2c, 0x18, 0xcc, 0xf8, 0x3c, 0x54, 0x67, 0xf5, 0x95, 0xe4, 0xed, 0x13, + 0xef, 0x40, 0x65, 0xf4, 0x64, 0x6e, 0xcd, 0x88, 0xea, 0x68, 0x44, 0x75, 0xf1, 0x47, 0x08, 0x8a, + 0x5b, 0x46, 0xaf, 0xa7, 0x39, 0xb3, 0xc0, 0x5d, 0x83, 0x0c, 0x6d, 0x69, 0x58, 0x60, 0x07, 0xac, + 0x14, 0x42, 0xfb, 0x7d, 0x97, 0x45, 0xe2, 0x9c, 0xd3, 0xcd, 0x2f, 0x7e, 0x8a, 0xa0, 0xe4, 0x61, + 0xe0, 0xc0, 0x9b, 0x50, 0xa4, 0xdb, 0xdb, 0xcc, 0xbc, 0xde, 0x4d, 0xb3, 0x34, 0xe1, 0x3c, 0xca, + 0x28, 0x15, 0x4e, 0x86, 0x0b, 0x1b, 0x6f, 0x40, 0x5e, 0xa1, 0x82, 0x99, 0x6b, 0x85, 0xa9, 0xae, + 0x05, 0xc6, 0xee, 0x12, 0xc4, 0x47, 0xb0, 0x28, 0x19, 0xdd, 0xee, 0xa1, 0xac, 0x1c, 0xcd, 0x62, + 0x9b, 0x88, 0x9e, 0xc2, 0xa8, 0x9e, 0x7f, 0x17, 0x60, 0x51, 0xea, 0xeb, 0xef, 0xba, 0x5d, 0xe3, + 0xb4, 0xfb, 0xec, 0x5d, 0x28, 0xdb, 0x8e, 0xd5, 0x57, 0x9c, 0xbe, 0x45, 0xd4, 0x36, 0x6d, 0x34, + 0x39, 0xfc, 0xaf, 0xc6, 0xda, 0x60, 0xdf, 0x67, 0xa6, 0xe2, 0x5b, 0x09, 0x69, 0xd1, 0x0e, 0x93, + 0xa2, 0x35, 0x24, 0x4d, 0x6b, 0x08, 0x9a, 0x5a, 0x43, 0x32, 0xf3, 0xd5, 0x10, 0xf4, 0xb2, 0x35, + 0x04, 0x05, 0xf2, 0xab, 0x00, 0x40, 0x55, 0x6f, 0x3b, 0x03, 0x73, 0x7c, 0x45, 0xf9, 0x1b, 0x82, + 0xf2, 0xd0, 0xae, 0xf1, 0xa1, 0x3f, 0xea, 0x8e, 0x97, 0xb9, 0x60, 0x43, 0x05, 0x23, 0x39, 0x7b, + 0xc1, 0xc0, 0xd7, 0x61, 0xd1, 0x3e, 0xd2, 0x4c, 0x93, 0xa8, 0x7e, 0x64, 0xa7, 0x68, 0x13, 0x5a, + 0xe2, 0x64, 0x1e, 0xb9, 0xe2, 0x6f, 0x05, 0x28, 0xf0, 0xb8, 0x9e, 0x1e, 0x7a, 0x97, 0x21, 0x67, + 0x3b, 0x16, 0x91, 0x7b, 0xc3, 0xc6, 0x27, 0xcb, 0x08, 0x3b, 0x6a, 0x20, 0x67, 0x93, 0x33, 0xe7, + 0xac, 0xdb, 0x38, 0x32, 0x81, 0xc3, 0x56, 0xb8, 0x20, 0xe5, 0x19, 0x8d, 0x35, 0xc3, 0x4d, 0xc8, + 0x74, 0xe5, 0x43, 0xd2, 0xb5, 0x2b, 0x69, 0x2a, 0xf6, 0xcd, 0x49, 0xa9, 0x49, 0x55, 0xa8, 0xef, + 0x52, 0xfe, 0xa6, 0xee, 0x58, 0x03, 0x89, 0x6f, 0xae, 0xde, 0x86, 0x7c, 0x80, 0x8c, 0xcb, 0x90, + 0x3c, 0x22, 0x03, 0xae, 0xa0, 0xfb, 0x13, 0x9f, 0x83, 0xf4, 0xb1, 0xdc, 0xed, 0x13, 0xae, 0x17, + 0x5b, 0xac, 0x0b, 0xb7, 0x90, 0x7b, 0x91, 0x14, 0xbd, 0xd4, 0x67, 0x3e, 0x0f, 0xd9, 0x01, 0x45, + 0xec, 0x10, 0xd5, 0x49, 0x88, 0xd3, 0x29, 0x52, 0x75, 0x92, 0xaf, 0xa2, 0xea, 0xa4, 0xe6, 0xaa, + 0x3a, 0xbf, 0x17, 0xa0, 0xb8, 0x4b, 0x43, 0x7c, 0x16, 0xcf, 0xdf, 0x01, 0x90, 0x55, 0xb5, 0xed, + 0xc8, 0x56, 0x87, 0x78, 0xdd, 0xce, 0xe5, 0xf8, 0x54, 0xa5, 0x2c, 0xad, 0x84, 0x94, 0x93, 0x55, + 0x95, 0x2d, 0xf0, 0x35, 0x28, 0x5a, 0xa4, 0x67, 0x1c, 0x13, 0x4f, 0x00, 0x7d, 0x10, 0xb5, 0x12, + 0x52, 0x81, 0x91, 0x39, 0xdb, 0x7d, 0xdf, 0xd5, 0x29, 0x6a, 0x8f, 0x7a, 0xec, 0x01, 0x21, 0xd0, + 0xaf, 0xd8, 0xd7, 0x9b, 0x8b, 0x50, 0x64, 0x10, 0xdb, 0xca, 0x73, 0x59, 0xef, 0x10, 0xf1, 0xb3, + 0x24, 0x94, 0xbc, 0x13, 0xb9, 0xf7, 0x5b, 0x11, 0x9e, 0x89, 0xcd, 0x1f, 0x53, 0x6d, 0x8b, 0x32, + 0xba, 0x0a, 0x3b, 0x81, 0x35, 0xde, 0x83, 0x45, 0xff, 0x9d, 0xc0, 0x65, 0xb1, 0x24, 0x5f, 0x9e, + 0x58, 0x20, 0x7c, 0x69, 0x25, 0x35, 0x44, 0x09, 0xc9, 0x53, 0x69, 0x3f, 0xcb, 0x83, 0x62, 0xb2, + 0x3c, 0xd6, 0xfa, 0x06, 0xe5, 0x31, 0x4a, 0x48, 0x1e, 0xf3, 0x14, 0xaf, 0xd2, 0x93, 0xe5, 0x49, + 0x94, 0x35, 0x28, 0x8f, 0x51, 0xf0, 0x77, 0x21, 0xf3, 0x4c, 0xeb, 0x3a, 0xc4, 0xe2, 0x0d, 0x63, + 0xfc, 0x15, 0xd3, 0x7c, 0xc1, 0x2a, 0x2f, 0xb9, 0x4f, 0x79, 0x5b, 0x09, 0x89, 0xef, 0x72, 0xbd, + 0x63, 0x71, 0x2f, 0xd0, 0x4a, 0x2d, 0xfe, 0x37, 0x09, 0x19, 0x1e, 0x3c, 0x6f, 0x43, 0x3a, 0x78, + 0x7b, 0x5d, 0x9f, 0xe0, 0x8d, 0x3a, 0xad, 0xe0, 0x7e, 0xa0, 0xb2, 0x7d, 0xf8, 0x9d, 0x60, 0x3f, + 0xcd, 0xdc, 0xf0, 0x8d, 0x49, 0x42, 0xfc, 0x9e, 0x72, 0x18, 0xf1, 0xc3, 0xf6, 0x7b, 0x19, 0x0a, + 0x6e, 0x6e, 0xf7, 0x82, 0x6f, 0x7c, 0x7a, 0x09, 0x32, 0x2a, 0x2b, 0x03, 0xa1, 0x0b, 0x2b, 0x3f, + 0xcf, 0x85, 0xe5, 0x56, 0x20, 0x1e, 0x83, 0x9a, 0x4a, 0x8d, 0x99, 0x96, 0xb2, 0x8c, 0xb0, 0xa3, + 0xba, 0x8f, 0x16, 0x43, 0x57, 0x98, 0xaf, 0xb2, 0x12, 0xfd, 0x5d, 0x6d, 0xc0, 0x62, 0x04, 0xf0, + 0xe4, 0x07, 0x44, 0xf5, 0x67, 0x08, 0xf2, 0x01, 0x3b, 0xfd, 0x1f, 0x1b, 0x88, 0xc8, 0x6d, 0x5c, + 0x84, 0x3c, 0x57, 0xd5, 0x5b, 0x7a, 0x96, 0x75, 0x23, 0xe0, 0x5f, 0x02, 0x14, 0x82, 0x39, 0x86, + 0x7f, 0x00, 0x38, 0x94, 0x9d, 0x94, 0x8d, 0x62, 0x2e, 0xad, 0xad, 0x4d, 0x4d, 0xd1, 0xd0, 0xe2, + 0x60, 0x60, 0x12, 0xa9, 0xec, 0x44, 0x28, 0xf8, 0x0a, 0x80, 0x6f, 0x7b, 0x66, 0xb8, 0xb4, 0x94, + 0xf3, 0x8c, 0x6f, 0xe3, 0x15, 0x48, 0x2b, 0x72, 0xdf, 0xf6, 0x52, 0x19, 0x7b, 0x67, 0x5a, 0xa6, + 0x52, 0xdf, 0xa7, 0x13, 0x35, 0x89, 0x31, 0xb8, 0x37, 0xc5, 0x68, 0x90, 0x84, 0x43, 0x24, 0xd4, + 0x00, 0x64, 0x66, 0x6f, 0x00, 0xc4, 0x3d, 0x28, 0x47, 0x55, 0xc1, 0x45, 0xc8, 0xed, 0x3d, 0x6a, + 0x6f, 0xb5, 0xee, 0xed, 0x3d, 0x68, 0x96, 0x13, 0x78, 0x01, 0x92, 0xf7, 0xb6, 0xb7, 0xcb, 0x08, + 0x03, 0x64, 0xa4, 0xe6, 0xc3, 0x47, 0x4f, 0x9a, 0x65, 0x01, 0xe7, 0x61, 0x61, 0xeb, 0x3d, 0x49, + 0x6a, 0xee, 0x1d, 0x94, 0x93, 0x38, 0x07, 0x69, 0xa9, 0xb9, 0xdf, 0x3c, 0x28, 0xa7, 0x44, 0x1d, + 0x2a, 0x6e, 0x19, 0xdc, 0x0a, 0x8c, 0x39, 0xa6, 0x0e, 0xca, 0x42, 0x33, 0x30, 0x61, 0xe2, 0x0c, + 0x2c, 0x19, 0x99, 0x81, 0x89, 0x1f, 0xc0, 0xa5, 0x98, 0xf3, 0x78, 0x05, 0xbe, 0x06, 0xa5, 0xd0, + 0x10, 0x86, 0xb5, 0xed, 0x39, 0xa9, 0x18, 0x9c, 0xc2, 0xcc, 0x3c, 0x05, 0x5a, 0xfb, 0x29, 0x86, + 0xdc, 0x7d, 0x2f, 0x22, 0xf0, 0x27, 0x08, 0xf2, 0x81, 0x27, 0x23, 0x8e, 0xaf, 0x24, 0xa3, 0xa3, + 0xd0, 0xea, 0xe4, 0xae, 0x4e, 0xbc, 0xf5, 0xe3, 0x2f, 0xfe, 0xf9, 0x2b, 0x61, 0x0d, 0xdf, 0x68, + 0x1c, 0xaf, 0x36, 0x3e, 0xd6, 0xe5, 0x1e, 0x79, 0xcb, 0xb4, 0x8c, 0x0f, 0x88, 0xe2, 0xd8, 0x8d, + 0x5a, 0xc3, 0xbb, 0x69, 0xe9, 0x6f, 0x2f, 0x09, 0x1b, 0xb5, 0x46, 0xad, 0x76, 0x8a, 0xff, 0x84, + 0xd8, 0x55, 0xed, 0x27, 0x30, 0xfe, 0xfa, 0xd8, 0x9b, 0x31, 0x3a, 0x42, 0xa8, 0xd6, 0x66, 0x61, + 0x65, 0x76, 0x15, 0xbf, 0x47, 0x21, 0x6e, 0xe3, 0x4d, 0x0a, 0x91, 0x79, 0x71, 0x46, 0x90, 0x8d, + 0x8f, 0x43, 0x1e, 0x39, 0xc5, 0x7f, 0x74, 0x1f, 0x5b, 0xa1, 0xd1, 0x1a, 0x8e, 0x87, 0x12, 0x3b, + 0x7f, 0x9b, 0x66, 0x4c, 0x89, 0x22, 0xdd, 0x15, 0xef, 0xce, 0x87, 0x74, 0x14, 0xe7, 0xfa, 0xb0, + 0xd9, 0xfe, 0x1c, 0x41, 0x29, 0x3c, 0x22, 0x1b, 0x83, 0x38, 0x76, 0x8e, 0x36, 0x0d, 0xf1, 0x3b, + 0x14, 0x71, 0x73, 0xed, 0x36, 0x45, 0xec, 0x4f, 0xfd, 0xe7, 0x88, 0x83, 0x00, 0xd4, 0x5f, 0x22, + 0x28, 0x85, 0x07, 0x56, 0x63, 0xa0, 0xc6, 0x4e, 0xb5, 0xaa, 0x17, 0x46, 0x4a, 0x48, 0xb3, 0x67, + 0x3a, 0x03, 0x2f, 0x44, 0x6b, 0xf3, 0x87, 0xe8, 0x9f, 0x11, 0xbc, 0x36, 0x32, 0x6c, 0xc1, 0xf1, + 0xbd, 0xfa, 0xb8, 0x69, 0x57, 0xb5, 0x3e, 0x2b, 0x3b, 0x0f, 0xd7, 0x2d, 0x0a, 0xf7, 0x2d, 0xf1, + 0x16, 0x33, 0x29, 0x87, 0x37, 0x06, 0xf2, 0xe9, 0x10, 0xf3, 0xfa, 0x21, 0x17, 0xb9, 0x8e, 0x6a, + 0x37, 0x10, 0xfe, 0x0b, 0x82, 0x72, 0x74, 0xb2, 0x81, 0xbf, 0x19, 0x8f, 0x25, 0x7e, 0xf4, 0x52, + 0x7d, 0x73, 0x46, 0x6e, 0x0e, 0x9c, 0xc7, 0x02, 0x8f, 0xde, 0x79, 0x80, 0x47, 0x24, 0xae, 0xa3, + 0x1a, 0xfe, 0x14, 0x41, 0x86, 0x4d, 0x35, 0xb0, 0x18, 0x9f, 0x60, 0xc1, 0xb1, 0x4b, 0x75, 0x79, + 0x22, 0x0f, 0x07, 0x78, 0x8f, 0x02, 0xdc, 0x10, 0xbf, 0x3d, 0x2f, 0x40, 0xf6, 0xc0, 0x70, 0x61, + 0xfd, 0x02, 0x41, 0xd6, 0x1b, 0x6b, 0xe0, 0xf8, 0x56, 0x20, 0x32, 0xf5, 0x18, 0x1b, 0x96, 0x67, + 0xf6, 0xb3, 0xc5, 0x0f, 0x70, 0xf1, 0xfc, 0xc3, 0xc5, 0xc3, 0x1f, 0xef, 0xe3, 0xf0, 0x84, 0x67, + 0x26, 0xd5, 0x6b, 0x53, 0xb8, 0xb8, 0xb1, 0x7e, 0x82, 0x28, 0xbe, 0x53, 0x6e, 0xad, 0x19, 0x8b, + 0xd1, 0xe9, 0xba, 0xc5, 0x05, 0xad, 0xa3, 0xda, 0xfb, 0x77, 0xc5, 0x8d, 0xf9, 0x6b, 0x6e, 0x50, + 0xc2, 0x0d, 0xe4, 0x06, 0x42, 0x9a, 0x3e, 0x16, 0xf1, 0xd5, 0xa9, 0x6f, 0xe4, 0xaa, 0x38, 0xf1, + 0xad, 0xc9, 0x14, 0xbb, 0x4b, 0xf5, 0x5a, 0x17, 0x6f, 0xce, 0x6b, 0x77, 0xfa, 0x4a, 0x5d, 0x47, + 0xb5, 0x15, 0x74, 0x03, 0xe1, 0xdf, 0x20, 0xc8, 0xb0, 0x17, 0xd4, 0x98, 0xf8, 0x0c, 0x3d, 0xe8, + 0xc6, 0xc4, 0x67, 0xf8, 0x09, 0x76, 0xf6, 0xf8, 0xec, 0x52, 0x39, 0x1e, 0xb4, 0x9f, 0x0b, 0xf0, + 0xda, 0x48, 0x97, 0x31, 0xa6, 0x6a, 0x8d, 0xeb, 0x7e, 0xaa, 0xf5, 0x59, 0xd9, 0x39, 0xf6, 0x5f, + 0xb3, 0x70, 0xf9, 0x04, 0x89, 0x6f, 0xcf, 0x17, 0x2f, 0xdd, 0xa8, 0x48, 0x37, 0x70, 0x76, 0xc4, + 0xed, 0x33, 0x04, 0x4e, 0x9c, 0xa8, 0xcd, 0xcf, 0x11, 0x5c, 0x54, 0x8c, 0x5e, 0x9c, 0x3a, 0x9b, + 0x25, 0xbf, 0x3f, 0x7a, 0xec, 0xe6, 0xe6, 0x63, 0xf4, 0xfe, 0x1d, 0xce, 0xd6, 0x31, 0xba, 0xb2, + 0xde, 0xa9, 0x1b, 0x56, 0xa7, 0xd1, 0x21, 0x3a, 0xcd, 0xdc, 0x06, 0xfb, 0x24, 0x9b, 0x9a, 0x1d, + 0xfa, 0x9b, 0x7a, 0xc3, 0x5f, 0xfc, 0x4e, 0x48, 0x3d, 0xd8, 0xba, 0xbf, 0xff, 0x07, 0xe1, 0xd2, + 0x03, 0x26, 0x65, 0xab, 0x6b, 0xf4, 0xd5, 0xba, 0x7f, 0x48, 0xfd, 0xc9, 0xea, 0x5f, 0xbd, 0x6f, + 0x4f, 0xe9, 0xb7, 0xa7, 0xfe, 0xb7, 0xa7, 0x4f, 0x56, 0x0f, 0x33, 0xf4, 0x9c, 0x6f, 0xfd, 0x2f, + 0x00, 0x00, 0xff, 0xff, 0x03, 0x8c, 0x0d, 0x98, 0xcc, 0x1f, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/firestore/v1/query.pb.go b/vendor/google.golang.org/genproto/googleapis/firestore/v1/query.pb.go new file mode 100644 index 0000000..1043040 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/firestore/v1/query.pb.go @@ -0,0 +1,1015 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/firestore/v1/query.proto + +package firestore // import "google.golang.org/genproto/googleapis/firestore/v1" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import wrappers "github.com/golang/protobuf/ptypes/wrappers" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// A sort direction. +type StructuredQuery_Direction int32 + +const ( + // Unspecified. + StructuredQuery_DIRECTION_UNSPECIFIED StructuredQuery_Direction = 0 + // Ascending. + StructuredQuery_ASCENDING StructuredQuery_Direction = 1 + // Descending. + StructuredQuery_DESCENDING StructuredQuery_Direction = 2 +) + +var StructuredQuery_Direction_name = map[int32]string{ + 0: "DIRECTION_UNSPECIFIED", + 1: "ASCENDING", + 2: "DESCENDING", +} +var StructuredQuery_Direction_value = map[string]int32{ + "DIRECTION_UNSPECIFIED": 0, + "ASCENDING": 1, + "DESCENDING": 2, +} + +func (x StructuredQuery_Direction) String() string { + return proto.EnumName(StructuredQuery_Direction_name, int32(x)) +} +func (StructuredQuery_Direction) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_query_24e5c79c1727d2c0, []int{0, 0} +} + +// A composite filter operator. +type StructuredQuery_CompositeFilter_Operator int32 + +const ( + // Unspecified. This value must not be used. + StructuredQuery_CompositeFilter_OPERATOR_UNSPECIFIED StructuredQuery_CompositeFilter_Operator = 0 + // The results are required to satisfy each of the combined filters. + StructuredQuery_CompositeFilter_AND StructuredQuery_CompositeFilter_Operator = 1 +) + +var StructuredQuery_CompositeFilter_Operator_name = map[int32]string{ + 0: "OPERATOR_UNSPECIFIED", + 1: "AND", +} +var StructuredQuery_CompositeFilter_Operator_value = map[string]int32{ + "OPERATOR_UNSPECIFIED": 0, + "AND": 1, +} + +func (x StructuredQuery_CompositeFilter_Operator) String() string { + return proto.EnumName(StructuredQuery_CompositeFilter_Operator_name, int32(x)) +} +func (StructuredQuery_CompositeFilter_Operator) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_query_24e5c79c1727d2c0, []int{0, 2, 0} +} + +// A field filter operator. +type StructuredQuery_FieldFilter_Operator int32 + +const ( + // Unspecified. This value must not be used. + StructuredQuery_FieldFilter_OPERATOR_UNSPECIFIED StructuredQuery_FieldFilter_Operator = 0 + // Less than. Requires that the field come first in `order_by`. + StructuredQuery_FieldFilter_LESS_THAN StructuredQuery_FieldFilter_Operator = 1 + // Less than or equal. Requires that the field come first in `order_by`. + StructuredQuery_FieldFilter_LESS_THAN_OR_EQUAL StructuredQuery_FieldFilter_Operator = 2 + // Greater than. Requires that the field come first in `order_by`. + StructuredQuery_FieldFilter_GREATER_THAN StructuredQuery_FieldFilter_Operator = 3 + // Greater than or equal. Requires that the field come first in + // `order_by`. + StructuredQuery_FieldFilter_GREATER_THAN_OR_EQUAL StructuredQuery_FieldFilter_Operator = 4 + // Equal. + StructuredQuery_FieldFilter_EQUAL StructuredQuery_FieldFilter_Operator = 5 + // Contains. Requires that the field is an array. + StructuredQuery_FieldFilter_ARRAY_CONTAINS StructuredQuery_FieldFilter_Operator = 7 +) + +var StructuredQuery_FieldFilter_Operator_name = map[int32]string{ + 0: "OPERATOR_UNSPECIFIED", + 1: "LESS_THAN", + 2: "LESS_THAN_OR_EQUAL", + 3: "GREATER_THAN", + 4: "GREATER_THAN_OR_EQUAL", + 5: "EQUAL", + 7: "ARRAY_CONTAINS", +} +var StructuredQuery_FieldFilter_Operator_value = map[string]int32{ + "OPERATOR_UNSPECIFIED": 0, + "LESS_THAN": 1, + "LESS_THAN_OR_EQUAL": 2, + "GREATER_THAN": 3, + "GREATER_THAN_OR_EQUAL": 4, + "EQUAL": 5, + "ARRAY_CONTAINS": 7, +} + +func (x StructuredQuery_FieldFilter_Operator) String() string { + return proto.EnumName(StructuredQuery_FieldFilter_Operator_name, int32(x)) +} +func (StructuredQuery_FieldFilter_Operator) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_query_24e5c79c1727d2c0, []int{0, 3, 0} +} + +// A unary operator. +type StructuredQuery_UnaryFilter_Operator int32 + +const ( + // Unspecified. This value must not be used. + StructuredQuery_UnaryFilter_OPERATOR_UNSPECIFIED StructuredQuery_UnaryFilter_Operator = 0 + // Test if a field is equal to NaN. + StructuredQuery_UnaryFilter_IS_NAN StructuredQuery_UnaryFilter_Operator = 2 + // Test if an exprestion evaluates to Null. + StructuredQuery_UnaryFilter_IS_NULL StructuredQuery_UnaryFilter_Operator = 3 +) + +var StructuredQuery_UnaryFilter_Operator_name = map[int32]string{ + 0: "OPERATOR_UNSPECIFIED", + 2: "IS_NAN", + 3: "IS_NULL", +} +var StructuredQuery_UnaryFilter_Operator_value = map[string]int32{ + "OPERATOR_UNSPECIFIED": 0, + "IS_NAN": 2, + "IS_NULL": 3, +} + +func (x StructuredQuery_UnaryFilter_Operator) String() string { + return proto.EnumName(StructuredQuery_UnaryFilter_Operator_name, int32(x)) +} +func (StructuredQuery_UnaryFilter_Operator) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_query_24e5c79c1727d2c0, []int{0, 4, 0} +} + +// A Firestore query. +type StructuredQuery struct { + // The projection to return. + Select *StructuredQuery_Projection `protobuf:"bytes,1,opt,name=select,proto3" json:"select,omitempty"` + // The collections to query. + From []*StructuredQuery_CollectionSelector `protobuf:"bytes,2,rep,name=from,proto3" json:"from,omitempty"` + // The filter to apply. + Where *StructuredQuery_Filter `protobuf:"bytes,3,opt,name=where,proto3" json:"where,omitempty"` + // The order to apply to the query results. + // + // Firestore guarantees a stable ordering through the following rules: + // + // * Any field required to appear in `order_by`, that is not already + // specified in `order_by`, is appended to the order in field name order + // by default. + // * If an order on `__name__` is not specified, it is appended by default. + // + // Fields are appended with the same sort direction as the last order + // specified, or 'ASCENDING' if no order was specified. For example: + // + // * `SELECT * FROM Foo ORDER BY A` becomes + // `SELECT * FROM Foo ORDER BY A, __name__` + // * `SELECT * FROM Foo ORDER BY A DESC` becomes + // `SELECT * FROM Foo ORDER BY A DESC, __name__ DESC` + // * `SELECT * FROM Foo WHERE A > 1` becomes + // `SELECT * FROM Foo WHERE A > 1 ORDER BY A, __name__` + OrderBy []*StructuredQuery_Order `protobuf:"bytes,4,rep,name=order_by,json=orderBy,proto3" json:"order_by,omitempty"` + // A starting point for the query results. + StartAt *Cursor `protobuf:"bytes,7,opt,name=start_at,json=startAt,proto3" json:"start_at,omitempty"` + // A end point for the query results. + EndAt *Cursor `protobuf:"bytes,8,opt,name=end_at,json=endAt,proto3" json:"end_at,omitempty"` + // The number of results to skip. + // + // Applies before limit, but after all other constraints. Must be >= 0 if + // specified. + Offset int32 `protobuf:"varint,6,opt,name=offset,proto3" json:"offset,omitempty"` + // The maximum number of results to return. + // + // Applies after all other constraints. + // Must be >= 0 if specified. + Limit *wrappers.Int32Value `protobuf:"bytes,5,opt,name=limit,proto3" json:"limit,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *StructuredQuery) Reset() { *m = StructuredQuery{} } +func (m *StructuredQuery) String() string { return proto.CompactTextString(m) } +func (*StructuredQuery) ProtoMessage() {} +func (*StructuredQuery) Descriptor() ([]byte, []int) { + return fileDescriptor_query_24e5c79c1727d2c0, []int{0} +} +func (m *StructuredQuery) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_StructuredQuery.Unmarshal(m, b) +} +func (m *StructuredQuery) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_StructuredQuery.Marshal(b, m, deterministic) +} +func (dst *StructuredQuery) XXX_Merge(src proto.Message) { + xxx_messageInfo_StructuredQuery.Merge(dst, src) +} +func (m *StructuredQuery) XXX_Size() int { + return xxx_messageInfo_StructuredQuery.Size(m) +} +func (m *StructuredQuery) XXX_DiscardUnknown() { + xxx_messageInfo_StructuredQuery.DiscardUnknown(m) +} + +var xxx_messageInfo_StructuredQuery proto.InternalMessageInfo + +func (m *StructuredQuery) GetSelect() *StructuredQuery_Projection { + if m != nil { + return m.Select + } + return nil +} + +func (m *StructuredQuery) GetFrom() []*StructuredQuery_CollectionSelector { + if m != nil { + return m.From + } + return nil +} + +func (m *StructuredQuery) GetWhere() *StructuredQuery_Filter { + if m != nil { + return m.Where + } + return nil +} + +func (m *StructuredQuery) GetOrderBy() []*StructuredQuery_Order { + if m != nil { + return m.OrderBy + } + return nil +} + +func (m *StructuredQuery) GetStartAt() *Cursor { + if m != nil { + return m.StartAt + } + return nil +} + +func (m *StructuredQuery) GetEndAt() *Cursor { + if m != nil { + return m.EndAt + } + return nil +} + +func (m *StructuredQuery) GetOffset() int32 { + if m != nil { + return m.Offset + } + return 0 +} + +func (m *StructuredQuery) GetLimit() *wrappers.Int32Value { + if m != nil { + return m.Limit + } + return nil +} + +// A selection of a collection, such as `messages as m1`. +type StructuredQuery_CollectionSelector struct { + // The collection ID. + // When set, selects only collections with this ID. + CollectionId string `protobuf:"bytes,2,opt,name=collection_id,json=collectionId,proto3" json:"collection_id,omitempty"` + // When false, selects only collections that are immediate children of + // the `parent` specified in the containing `RunQueryRequest`. + // When true, selects all descendant collections. + AllDescendants bool `protobuf:"varint,3,opt,name=all_descendants,json=allDescendants,proto3" json:"all_descendants,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *StructuredQuery_CollectionSelector) Reset() { *m = StructuredQuery_CollectionSelector{} } +func (m *StructuredQuery_CollectionSelector) String() string { return proto.CompactTextString(m) } +func (*StructuredQuery_CollectionSelector) ProtoMessage() {} +func (*StructuredQuery_CollectionSelector) Descriptor() ([]byte, []int) { + return fileDescriptor_query_24e5c79c1727d2c0, []int{0, 0} +} +func (m *StructuredQuery_CollectionSelector) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_StructuredQuery_CollectionSelector.Unmarshal(m, b) +} +func (m *StructuredQuery_CollectionSelector) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_StructuredQuery_CollectionSelector.Marshal(b, m, deterministic) +} +func (dst *StructuredQuery_CollectionSelector) XXX_Merge(src proto.Message) { + xxx_messageInfo_StructuredQuery_CollectionSelector.Merge(dst, src) +} +func (m *StructuredQuery_CollectionSelector) XXX_Size() int { + return xxx_messageInfo_StructuredQuery_CollectionSelector.Size(m) +} +func (m *StructuredQuery_CollectionSelector) XXX_DiscardUnknown() { + xxx_messageInfo_StructuredQuery_CollectionSelector.DiscardUnknown(m) +} + +var xxx_messageInfo_StructuredQuery_CollectionSelector proto.InternalMessageInfo + +func (m *StructuredQuery_CollectionSelector) GetCollectionId() string { + if m != nil { + return m.CollectionId + } + return "" +} + +func (m *StructuredQuery_CollectionSelector) GetAllDescendants() bool { + if m != nil { + return m.AllDescendants + } + return false +} + +// A filter. +type StructuredQuery_Filter struct { + // The type of filter. + // + // Types that are valid to be assigned to FilterType: + // *StructuredQuery_Filter_CompositeFilter + // *StructuredQuery_Filter_FieldFilter + // *StructuredQuery_Filter_UnaryFilter + FilterType isStructuredQuery_Filter_FilterType `protobuf_oneof:"filter_type"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *StructuredQuery_Filter) Reset() { *m = StructuredQuery_Filter{} } +func (m *StructuredQuery_Filter) String() string { return proto.CompactTextString(m) } +func (*StructuredQuery_Filter) ProtoMessage() {} +func (*StructuredQuery_Filter) Descriptor() ([]byte, []int) { + return fileDescriptor_query_24e5c79c1727d2c0, []int{0, 1} +} +func (m *StructuredQuery_Filter) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_StructuredQuery_Filter.Unmarshal(m, b) +} +func (m *StructuredQuery_Filter) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_StructuredQuery_Filter.Marshal(b, m, deterministic) +} +func (dst *StructuredQuery_Filter) XXX_Merge(src proto.Message) { + xxx_messageInfo_StructuredQuery_Filter.Merge(dst, src) +} +func (m *StructuredQuery_Filter) XXX_Size() int { + return xxx_messageInfo_StructuredQuery_Filter.Size(m) +} +func (m *StructuredQuery_Filter) XXX_DiscardUnknown() { + xxx_messageInfo_StructuredQuery_Filter.DiscardUnknown(m) +} + +var xxx_messageInfo_StructuredQuery_Filter proto.InternalMessageInfo + +type isStructuredQuery_Filter_FilterType interface { + isStructuredQuery_Filter_FilterType() +} + +type StructuredQuery_Filter_CompositeFilter struct { + CompositeFilter *StructuredQuery_CompositeFilter `protobuf:"bytes,1,opt,name=composite_filter,json=compositeFilter,proto3,oneof"` +} + +type StructuredQuery_Filter_FieldFilter struct { + FieldFilter *StructuredQuery_FieldFilter `protobuf:"bytes,2,opt,name=field_filter,json=fieldFilter,proto3,oneof"` +} + +type StructuredQuery_Filter_UnaryFilter struct { + UnaryFilter *StructuredQuery_UnaryFilter `protobuf:"bytes,3,opt,name=unary_filter,json=unaryFilter,proto3,oneof"` +} + +func (*StructuredQuery_Filter_CompositeFilter) isStructuredQuery_Filter_FilterType() {} + +func (*StructuredQuery_Filter_FieldFilter) isStructuredQuery_Filter_FilterType() {} + +func (*StructuredQuery_Filter_UnaryFilter) isStructuredQuery_Filter_FilterType() {} + +func (m *StructuredQuery_Filter) GetFilterType() isStructuredQuery_Filter_FilterType { + if m != nil { + return m.FilterType + } + return nil +} + +func (m *StructuredQuery_Filter) GetCompositeFilter() *StructuredQuery_CompositeFilter { + if x, ok := m.GetFilterType().(*StructuredQuery_Filter_CompositeFilter); ok { + return x.CompositeFilter + } + return nil +} + +func (m *StructuredQuery_Filter) GetFieldFilter() *StructuredQuery_FieldFilter { + if x, ok := m.GetFilterType().(*StructuredQuery_Filter_FieldFilter); ok { + return x.FieldFilter + } + return nil +} + +func (m *StructuredQuery_Filter) GetUnaryFilter() *StructuredQuery_UnaryFilter { + if x, ok := m.GetFilterType().(*StructuredQuery_Filter_UnaryFilter); ok { + return x.UnaryFilter + } + return nil +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*StructuredQuery_Filter) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _StructuredQuery_Filter_OneofMarshaler, _StructuredQuery_Filter_OneofUnmarshaler, _StructuredQuery_Filter_OneofSizer, []interface{}{ + (*StructuredQuery_Filter_CompositeFilter)(nil), + (*StructuredQuery_Filter_FieldFilter)(nil), + (*StructuredQuery_Filter_UnaryFilter)(nil), + } +} + +func _StructuredQuery_Filter_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*StructuredQuery_Filter) + // filter_type + switch x := m.FilterType.(type) { + case *StructuredQuery_Filter_CompositeFilter: + b.EncodeVarint(1<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.CompositeFilter); err != nil { + return err + } + case *StructuredQuery_Filter_FieldFilter: + b.EncodeVarint(2<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.FieldFilter); err != nil { + return err + } + case *StructuredQuery_Filter_UnaryFilter: + b.EncodeVarint(3<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.UnaryFilter); err != nil { + return err + } + case nil: + default: + return fmt.Errorf("StructuredQuery_Filter.FilterType has unexpected type %T", x) + } + return nil +} + +func _StructuredQuery_Filter_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*StructuredQuery_Filter) + switch tag { + case 1: // filter_type.composite_filter + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(StructuredQuery_CompositeFilter) + err := b.DecodeMessage(msg) + m.FilterType = &StructuredQuery_Filter_CompositeFilter{msg} + return true, err + case 2: // filter_type.field_filter + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(StructuredQuery_FieldFilter) + err := b.DecodeMessage(msg) + m.FilterType = &StructuredQuery_Filter_FieldFilter{msg} + return true, err + case 3: // filter_type.unary_filter + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(StructuredQuery_UnaryFilter) + err := b.DecodeMessage(msg) + m.FilterType = &StructuredQuery_Filter_UnaryFilter{msg} + return true, err + default: + return false, nil + } +} + +func _StructuredQuery_Filter_OneofSizer(msg proto.Message) (n int) { + m := msg.(*StructuredQuery_Filter) + // filter_type + switch x := m.FilterType.(type) { + case *StructuredQuery_Filter_CompositeFilter: + s := proto.Size(x.CompositeFilter) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *StructuredQuery_Filter_FieldFilter: + s := proto.Size(x.FieldFilter) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *StructuredQuery_Filter_UnaryFilter: + s := proto.Size(x.UnaryFilter) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +// A filter that merges multiple other filters using the given operator. +type StructuredQuery_CompositeFilter struct { + // The operator for combining multiple filters. + Op StructuredQuery_CompositeFilter_Operator `protobuf:"varint,1,opt,name=op,proto3,enum=google.firestore.v1.StructuredQuery_CompositeFilter_Operator" json:"op,omitempty"` + // The list of filters to combine. + // Must contain at least one filter. + Filters []*StructuredQuery_Filter `protobuf:"bytes,2,rep,name=filters,proto3" json:"filters,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *StructuredQuery_CompositeFilter) Reset() { *m = StructuredQuery_CompositeFilter{} } +func (m *StructuredQuery_CompositeFilter) String() string { return proto.CompactTextString(m) } +func (*StructuredQuery_CompositeFilter) ProtoMessage() {} +func (*StructuredQuery_CompositeFilter) Descriptor() ([]byte, []int) { + return fileDescriptor_query_24e5c79c1727d2c0, []int{0, 2} +} +func (m *StructuredQuery_CompositeFilter) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_StructuredQuery_CompositeFilter.Unmarshal(m, b) +} +func (m *StructuredQuery_CompositeFilter) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_StructuredQuery_CompositeFilter.Marshal(b, m, deterministic) +} +func (dst *StructuredQuery_CompositeFilter) XXX_Merge(src proto.Message) { + xxx_messageInfo_StructuredQuery_CompositeFilter.Merge(dst, src) +} +func (m *StructuredQuery_CompositeFilter) XXX_Size() int { + return xxx_messageInfo_StructuredQuery_CompositeFilter.Size(m) +} +func (m *StructuredQuery_CompositeFilter) XXX_DiscardUnknown() { + xxx_messageInfo_StructuredQuery_CompositeFilter.DiscardUnknown(m) +} + +var xxx_messageInfo_StructuredQuery_CompositeFilter proto.InternalMessageInfo + +func (m *StructuredQuery_CompositeFilter) GetOp() StructuredQuery_CompositeFilter_Operator { + if m != nil { + return m.Op + } + return StructuredQuery_CompositeFilter_OPERATOR_UNSPECIFIED +} + +func (m *StructuredQuery_CompositeFilter) GetFilters() []*StructuredQuery_Filter { + if m != nil { + return m.Filters + } + return nil +} + +// A filter on a specific field. +type StructuredQuery_FieldFilter struct { + // The field to filter by. + Field *StructuredQuery_FieldReference `protobuf:"bytes,1,opt,name=field,proto3" json:"field,omitempty"` + // The operator to filter by. + Op StructuredQuery_FieldFilter_Operator `protobuf:"varint,2,opt,name=op,proto3,enum=google.firestore.v1.StructuredQuery_FieldFilter_Operator" json:"op,omitempty"` + // The value to compare to. + Value *Value `protobuf:"bytes,3,opt,name=value,proto3" json:"value,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *StructuredQuery_FieldFilter) Reset() { *m = StructuredQuery_FieldFilter{} } +func (m *StructuredQuery_FieldFilter) String() string { return proto.CompactTextString(m) } +func (*StructuredQuery_FieldFilter) ProtoMessage() {} +func (*StructuredQuery_FieldFilter) Descriptor() ([]byte, []int) { + return fileDescriptor_query_24e5c79c1727d2c0, []int{0, 3} +} +func (m *StructuredQuery_FieldFilter) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_StructuredQuery_FieldFilter.Unmarshal(m, b) +} +func (m *StructuredQuery_FieldFilter) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_StructuredQuery_FieldFilter.Marshal(b, m, deterministic) +} +func (dst *StructuredQuery_FieldFilter) XXX_Merge(src proto.Message) { + xxx_messageInfo_StructuredQuery_FieldFilter.Merge(dst, src) +} +func (m *StructuredQuery_FieldFilter) XXX_Size() int { + return xxx_messageInfo_StructuredQuery_FieldFilter.Size(m) +} +func (m *StructuredQuery_FieldFilter) XXX_DiscardUnknown() { + xxx_messageInfo_StructuredQuery_FieldFilter.DiscardUnknown(m) +} + +var xxx_messageInfo_StructuredQuery_FieldFilter proto.InternalMessageInfo + +func (m *StructuredQuery_FieldFilter) GetField() *StructuredQuery_FieldReference { + if m != nil { + return m.Field + } + return nil +} + +func (m *StructuredQuery_FieldFilter) GetOp() StructuredQuery_FieldFilter_Operator { + if m != nil { + return m.Op + } + return StructuredQuery_FieldFilter_OPERATOR_UNSPECIFIED +} + +func (m *StructuredQuery_FieldFilter) GetValue() *Value { + if m != nil { + return m.Value + } + return nil +} + +// A filter with a single operand. +type StructuredQuery_UnaryFilter struct { + // The unary operator to apply. + Op StructuredQuery_UnaryFilter_Operator `protobuf:"varint,1,opt,name=op,proto3,enum=google.firestore.v1.StructuredQuery_UnaryFilter_Operator" json:"op,omitempty"` + // The argument to the filter. + // + // Types that are valid to be assigned to OperandType: + // *StructuredQuery_UnaryFilter_Field + OperandType isStructuredQuery_UnaryFilter_OperandType `protobuf_oneof:"operand_type"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *StructuredQuery_UnaryFilter) Reset() { *m = StructuredQuery_UnaryFilter{} } +func (m *StructuredQuery_UnaryFilter) String() string { return proto.CompactTextString(m) } +func (*StructuredQuery_UnaryFilter) ProtoMessage() {} +func (*StructuredQuery_UnaryFilter) Descriptor() ([]byte, []int) { + return fileDescriptor_query_24e5c79c1727d2c0, []int{0, 4} +} +func (m *StructuredQuery_UnaryFilter) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_StructuredQuery_UnaryFilter.Unmarshal(m, b) +} +func (m *StructuredQuery_UnaryFilter) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_StructuredQuery_UnaryFilter.Marshal(b, m, deterministic) +} +func (dst *StructuredQuery_UnaryFilter) XXX_Merge(src proto.Message) { + xxx_messageInfo_StructuredQuery_UnaryFilter.Merge(dst, src) +} +func (m *StructuredQuery_UnaryFilter) XXX_Size() int { + return xxx_messageInfo_StructuredQuery_UnaryFilter.Size(m) +} +func (m *StructuredQuery_UnaryFilter) XXX_DiscardUnknown() { + xxx_messageInfo_StructuredQuery_UnaryFilter.DiscardUnknown(m) +} + +var xxx_messageInfo_StructuredQuery_UnaryFilter proto.InternalMessageInfo + +func (m *StructuredQuery_UnaryFilter) GetOp() StructuredQuery_UnaryFilter_Operator { + if m != nil { + return m.Op + } + return StructuredQuery_UnaryFilter_OPERATOR_UNSPECIFIED +} + +type isStructuredQuery_UnaryFilter_OperandType interface { + isStructuredQuery_UnaryFilter_OperandType() +} + +type StructuredQuery_UnaryFilter_Field struct { + Field *StructuredQuery_FieldReference `protobuf:"bytes,2,opt,name=field,proto3,oneof"` +} + +func (*StructuredQuery_UnaryFilter_Field) isStructuredQuery_UnaryFilter_OperandType() {} + +func (m *StructuredQuery_UnaryFilter) GetOperandType() isStructuredQuery_UnaryFilter_OperandType { + if m != nil { + return m.OperandType + } + return nil +} + +func (m *StructuredQuery_UnaryFilter) GetField() *StructuredQuery_FieldReference { + if x, ok := m.GetOperandType().(*StructuredQuery_UnaryFilter_Field); ok { + return x.Field + } + return nil +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*StructuredQuery_UnaryFilter) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _StructuredQuery_UnaryFilter_OneofMarshaler, _StructuredQuery_UnaryFilter_OneofUnmarshaler, _StructuredQuery_UnaryFilter_OneofSizer, []interface{}{ + (*StructuredQuery_UnaryFilter_Field)(nil), + } +} + +func _StructuredQuery_UnaryFilter_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*StructuredQuery_UnaryFilter) + // operand_type + switch x := m.OperandType.(type) { + case *StructuredQuery_UnaryFilter_Field: + b.EncodeVarint(2<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.Field); err != nil { + return err + } + case nil: + default: + return fmt.Errorf("StructuredQuery_UnaryFilter.OperandType has unexpected type %T", x) + } + return nil +} + +func _StructuredQuery_UnaryFilter_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*StructuredQuery_UnaryFilter) + switch tag { + case 2: // operand_type.field + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(StructuredQuery_FieldReference) + err := b.DecodeMessage(msg) + m.OperandType = &StructuredQuery_UnaryFilter_Field{msg} + return true, err + default: + return false, nil + } +} + +func _StructuredQuery_UnaryFilter_OneofSizer(msg proto.Message) (n int) { + m := msg.(*StructuredQuery_UnaryFilter) + // operand_type + switch x := m.OperandType.(type) { + case *StructuredQuery_UnaryFilter_Field: + s := proto.Size(x.Field) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +// A reference to a field, such as `max(messages.time) as max_time`. +type StructuredQuery_FieldReference struct { + FieldPath string `protobuf:"bytes,2,opt,name=field_path,json=fieldPath,proto3" json:"field_path,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *StructuredQuery_FieldReference) Reset() { *m = StructuredQuery_FieldReference{} } +func (m *StructuredQuery_FieldReference) String() string { return proto.CompactTextString(m) } +func (*StructuredQuery_FieldReference) ProtoMessage() {} +func (*StructuredQuery_FieldReference) Descriptor() ([]byte, []int) { + return fileDescriptor_query_24e5c79c1727d2c0, []int{0, 5} +} +func (m *StructuredQuery_FieldReference) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_StructuredQuery_FieldReference.Unmarshal(m, b) +} +func (m *StructuredQuery_FieldReference) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_StructuredQuery_FieldReference.Marshal(b, m, deterministic) +} +func (dst *StructuredQuery_FieldReference) XXX_Merge(src proto.Message) { + xxx_messageInfo_StructuredQuery_FieldReference.Merge(dst, src) +} +func (m *StructuredQuery_FieldReference) XXX_Size() int { + return xxx_messageInfo_StructuredQuery_FieldReference.Size(m) +} +func (m *StructuredQuery_FieldReference) XXX_DiscardUnknown() { + xxx_messageInfo_StructuredQuery_FieldReference.DiscardUnknown(m) +} + +var xxx_messageInfo_StructuredQuery_FieldReference proto.InternalMessageInfo + +func (m *StructuredQuery_FieldReference) GetFieldPath() string { + if m != nil { + return m.FieldPath + } + return "" +} + +// An order on a field. +type StructuredQuery_Order struct { + // The field to order by. + Field *StructuredQuery_FieldReference `protobuf:"bytes,1,opt,name=field,proto3" json:"field,omitempty"` + // The direction to order by. Defaults to `ASCENDING`. + Direction StructuredQuery_Direction `protobuf:"varint,2,opt,name=direction,proto3,enum=google.firestore.v1.StructuredQuery_Direction" json:"direction,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *StructuredQuery_Order) Reset() { *m = StructuredQuery_Order{} } +func (m *StructuredQuery_Order) String() string { return proto.CompactTextString(m) } +func (*StructuredQuery_Order) ProtoMessage() {} +func (*StructuredQuery_Order) Descriptor() ([]byte, []int) { + return fileDescriptor_query_24e5c79c1727d2c0, []int{0, 6} +} +func (m *StructuredQuery_Order) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_StructuredQuery_Order.Unmarshal(m, b) +} +func (m *StructuredQuery_Order) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_StructuredQuery_Order.Marshal(b, m, deterministic) +} +func (dst *StructuredQuery_Order) XXX_Merge(src proto.Message) { + xxx_messageInfo_StructuredQuery_Order.Merge(dst, src) +} +func (m *StructuredQuery_Order) XXX_Size() int { + return xxx_messageInfo_StructuredQuery_Order.Size(m) +} +func (m *StructuredQuery_Order) XXX_DiscardUnknown() { + xxx_messageInfo_StructuredQuery_Order.DiscardUnknown(m) +} + +var xxx_messageInfo_StructuredQuery_Order proto.InternalMessageInfo + +func (m *StructuredQuery_Order) GetField() *StructuredQuery_FieldReference { + if m != nil { + return m.Field + } + return nil +} + +func (m *StructuredQuery_Order) GetDirection() StructuredQuery_Direction { + if m != nil { + return m.Direction + } + return StructuredQuery_DIRECTION_UNSPECIFIED +} + +// The projection of document's fields to return. +type StructuredQuery_Projection struct { + // The fields to return. + // + // If empty, all fields are returned. To only return the name + // of the document, use `['__name__']`. + Fields []*StructuredQuery_FieldReference `protobuf:"bytes,2,rep,name=fields,proto3" json:"fields,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *StructuredQuery_Projection) Reset() { *m = StructuredQuery_Projection{} } +func (m *StructuredQuery_Projection) String() string { return proto.CompactTextString(m) } +func (*StructuredQuery_Projection) ProtoMessage() {} +func (*StructuredQuery_Projection) Descriptor() ([]byte, []int) { + return fileDescriptor_query_24e5c79c1727d2c0, []int{0, 7} +} +func (m *StructuredQuery_Projection) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_StructuredQuery_Projection.Unmarshal(m, b) +} +func (m *StructuredQuery_Projection) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_StructuredQuery_Projection.Marshal(b, m, deterministic) +} +func (dst *StructuredQuery_Projection) XXX_Merge(src proto.Message) { + xxx_messageInfo_StructuredQuery_Projection.Merge(dst, src) +} +func (m *StructuredQuery_Projection) XXX_Size() int { + return xxx_messageInfo_StructuredQuery_Projection.Size(m) +} +func (m *StructuredQuery_Projection) XXX_DiscardUnknown() { + xxx_messageInfo_StructuredQuery_Projection.DiscardUnknown(m) +} + +var xxx_messageInfo_StructuredQuery_Projection proto.InternalMessageInfo + +func (m *StructuredQuery_Projection) GetFields() []*StructuredQuery_FieldReference { + if m != nil { + return m.Fields + } + return nil +} + +// A position in a query result set. +type Cursor struct { + // The values that represent a position, in the order they appear in + // the order by clause of a query. + // + // Can contain fewer values than specified in the order by clause. + Values []*Value `protobuf:"bytes,1,rep,name=values,proto3" json:"values,omitempty"` + // If the position is just before or just after the given values, relative + // to the sort order defined by the query. + Before bool `protobuf:"varint,2,opt,name=before,proto3" json:"before,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Cursor) Reset() { *m = Cursor{} } +func (m *Cursor) String() string { return proto.CompactTextString(m) } +func (*Cursor) ProtoMessage() {} +func (*Cursor) Descriptor() ([]byte, []int) { + return fileDescriptor_query_24e5c79c1727d2c0, []int{1} +} +func (m *Cursor) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Cursor.Unmarshal(m, b) +} +func (m *Cursor) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Cursor.Marshal(b, m, deterministic) +} +func (dst *Cursor) XXX_Merge(src proto.Message) { + xxx_messageInfo_Cursor.Merge(dst, src) +} +func (m *Cursor) XXX_Size() int { + return xxx_messageInfo_Cursor.Size(m) +} +func (m *Cursor) XXX_DiscardUnknown() { + xxx_messageInfo_Cursor.DiscardUnknown(m) +} + +var xxx_messageInfo_Cursor proto.InternalMessageInfo + +func (m *Cursor) GetValues() []*Value { + if m != nil { + return m.Values + } + return nil +} + +func (m *Cursor) GetBefore() bool { + if m != nil { + return m.Before + } + return false +} + +func init() { + proto.RegisterType((*StructuredQuery)(nil), "google.firestore.v1.StructuredQuery") + proto.RegisterType((*StructuredQuery_CollectionSelector)(nil), "google.firestore.v1.StructuredQuery.CollectionSelector") + proto.RegisterType((*StructuredQuery_Filter)(nil), "google.firestore.v1.StructuredQuery.Filter") + proto.RegisterType((*StructuredQuery_CompositeFilter)(nil), "google.firestore.v1.StructuredQuery.CompositeFilter") + proto.RegisterType((*StructuredQuery_FieldFilter)(nil), "google.firestore.v1.StructuredQuery.FieldFilter") + proto.RegisterType((*StructuredQuery_UnaryFilter)(nil), "google.firestore.v1.StructuredQuery.UnaryFilter") + proto.RegisterType((*StructuredQuery_FieldReference)(nil), "google.firestore.v1.StructuredQuery.FieldReference") + proto.RegisterType((*StructuredQuery_Order)(nil), "google.firestore.v1.StructuredQuery.Order") + proto.RegisterType((*StructuredQuery_Projection)(nil), "google.firestore.v1.StructuredQuery.Projection") + proto.RegisterType((*Cursor)(nil), "google.firestore.v1.Cursor") + proto.RegisterEnum("google.firestore.v1.StructuredQuery_Direction", StructuredQuery_Direction_name, StructuredQuery_Direction_value) + proto.RegisterEnum("google.firestore.v1.StructuredQuery_CompositeFilter_Operator", StructuredQuery_CompositeFilter_Operator_name, StructuredQuery_CompositeFilter_Operator_value) + proto.RegisterEnum("google.firestore.v1.StructuredQuery_FieldFilter_Operator", StructuredQuery_FieldFilter_Operator_name, StructuredQuery_FieldFilter_Operator_value) + proto.RegisterEnum("google.firestore.v1.StructuredQuery_UnaryFilter_Operator", StructuredQuery_UnaryFilter_Operator_name, StructuredQuery_UnaryFilter_Operator_value) +} + +func init() { + proto.RegisterFile("google/firestore/v1/query.proto", fileDescriptor_query_24e5c79c1727d2c0) +} + +var fileDescriptor_query_24e5c79c1727d2c0 = []byte{ + // 976 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xac, 0x56, 0xdd, 0x6e, 0xe3, 0xc4, + 0x17, 0xaf, 0x9d, 0xe6, 0xeb, 0xa4, 0x4d, 0xad, 0xf9, 0xff, 0x59, 0xb2, 0xe6, 0xab, 0x0a, 0x17, + 0x54, 0x20, 0x9c, 0x6d, 0x8b, 0x40, 0x08, 0xf6, 0xc2, 0x4d, 0xdc, 0xd6, 0x6a, 0x71, 0xb2, 0x93, + 0xb4, 0xd2, 0xa2, 0x4a, 0x96, 0x6b, 0x8f, 0x53, 0x23, 0xd7, 0x63, 0xc6, 0xe3, 0xae, 0xfa, 0x24, + 0xdc, 0x22, 0xc4, 0x05, 0xe2, 0x86, 0xf7, 0xe0, 0x01, 0x78, 0x0d, 0x6e, 0x78, 0x00, 0xe4, 0xf1, + 0x24, 0x69, 0x76, 0x2b, 0x48, 0x2b, 0xee, 0x72, 0x3e, 0x7e, 0xbf, 0x39, 0xfe, 0x9d, 0x73, 0x66, + 0x02, 0x1f, 0x4c, 0x29, 0x9d, 0xc6, 0xa4, 0x17, 0x46, 0x8c, 0x64, 0x9c, 0x32, 0xd2, 0xbb, 0xd9, + 0xed, 0x7d, 0x9f, 0x13, 0x76, 0x6b, 0xa4, 0x8c, 0x72, 0x8a, 0xfe, 0x57, 0x26, 0x18, 0xf3, 0x04, + 0xe3, 0x66, 0x57, 0xef, 0xde, 0x87, 0x0a, 0xa8, 0x9f, 0x5f, 0x93, 0x84, 0x97, 0x40, 0xfd, 0x7d, + 0x99, 0x23, 0xac, 0xcb, 0x3c, 0xec, 0xbd, 0x62, 0x5e, 0x9a, 0x12, 0x96, 0xc9, 0xf8, 0xbb, 0x32, + 0xee, 0xa5, 0x51, 0xcf, 0x4b, 0x12, 0xca, 0x3d, 0x1e, 0xd1, 0x44, 0x46, 0xbb, 0xbf, 0x69, 0xb0, + 0x35, 0xe6, 0x2c, 0xf7, 0x79, 0xce, 0x48, 0xf0, 0xa2, 0x28, 0x08, 0x1d, 0x41, 0x2d, 0x23, 0x31, + 0xf1, 0x79, 0x47, 0xd9, 0x56, 0x76, 0x5a, 0x7b, 0x3d, 0xe3, 0x9e, 0xda, 0x8c, 0xd7, 0x50, 0xc6, + 0x88, 0xd1, 0xef, 0x88, 0x5f, 0x70, 0x63, 0x09, 0x47, 0x27, 0xb0, 0x1e, 0x32, 0x7a, 0xdd, 0x51, + 0xb7, 0x2b, 0x3b, 0xad, 0xbd, 0x2f, 0x56, 0xa2, 0xe9, 0xd3, 0x38, 0x2e, 0x69, 0xc6, 0x82, 0x84, + 0x32, 0x2c, 0x48, 0x90, 0x09, 0xd5, 0x57, 0x57, 0x84, 0x91, 0x4e, 0x45, 0x14, 0xf5, 0xc9, 0x4a, + 0x6c, 0x87, 0x51, 0xcc, 0x09, 0xc3, 0x25, 0x12, 0x59, 0xd0, 0xa0, 0x2c, 0x20, 0xcc, 0xbd, 0xbc, + 0xed, 0xac, 0x8b, 0x9a, 0x3e, 0x5e, 0x89, 0x65, 0x58, 0x80, 0x70, 0x5d, 0x60, 0x0f, 0x6e, 0xd1, + 0xe7, 0xd0, 0xc8, 0xb8, 0xc7, 0xb8, 0xeb, 0xf1, 0x4e, 0x5d, 0x14, 0xf3, 0xce, 0xbd, 0x34, 0xfd, + 0x9c, 0x65, 0x94, 0xe1, 0xba, 0x48, 0x36, 0x39, 0xda, 0x83, 0x1a, 0x49, 0x82, 0x02, 0xd5, 0xf8, + 0x77, 0x54, 0x95, 0x24, 0x81, 0xc9, 0xd1, 0x13, 0xa8, 0xd1, 0x30, 0xcc, 0x08, 0xef, 0xd4, 0xb6, + 0x95, 0x9d, 0x2a, 0x96, 0x16, 0xda, 0x85, 0x6a, 0x1c, 0x5d, 0x47, 0xbc, 0x53, 0x5d, 0xa6, 0x9a, + 0x4d, 0x81, 0x61, 0x27, 0x7c, 0x7f, 0xef, 0xdc, 0x8b, 0x73, 0x82, 0xcb, 0x4c, 0xfd, 0x12, 0xd0, + 0x9b, 0xe2, 0xa2, 0x0f, 0x61, 0xd3, 0x9f, 0x7b, 0xdd, 0x28, 0xe8, 0xa8, 0xdb, 0xca, 0x4e, 0x13, + 0x6f, 0x2c, 0x9c, 0x76, 0x80, 0x3e, 0x82, 0x2d, 0x2f, 0x8e, 0xdd, 0x80, 0x64, 0x3e, 0x49, 0x02, + 0x2f, 0xe1, 0x99, 0xe8, 0x42, 0x03, 0xb7, 0xbd, 0x38, 0x1e, 0x2c, 0xbc, 0xfa, 0x2f, 0x2a, 0xd4, + 0x4a, 0xcd, 0x91, 0x07, 0x9a, 0x4f, 0xaf, 0x53, 0x9a, 0x45, 0x9c, 0xb8, 0xa1, 0xf0, 0xc9, 0x79, + 0xfa, 0x6c, 0xc5, 0x41, 0x90, 0xe0, 0x92, 0xef, 0x78, 0x0d, 0x6f, 0xf9, 0xcb, 0x2e, 0x74, 0x06, + 0x1b, 0x61, 0x44, 0xe2, 0x60, 0x46, 0xaf, 0x0a, 0xfa, 0x67, 0x2b, 0x4e, 0x06, 0x89, 0x83, 0x39, + 0x75, 0x2b, 0x5c, 0x98, 0x05, 0x6d, 0x9e, 0x78, 0xec, 0x76, 0x46, 0x5b, 0x79, 0x00, 0xed, 0x59, + 0x01, 0x5c, 0xd0, 0xe6, 0x0b, 0xf3, 0x60, 0x13, 0x5a, 0x25, 0xa1, 0xcb, 0x6f, 0x53, 0xa2, 0xff, + 0xa1, 0xc0, 0xd6, 0x6b, 0xdf, 0x88, 0xbe, 0x01, 0x95, 0xa6, 0x42, 0xa5, 0xf6, 0xde, 0xf3, 0xc7, + 0xa8, 0x64, 0x0c, 0x53, 0xc2, 0xbc, 0x62, 0x69, 0x54, 0x9a, 0x22, 0x0b, 0xea, 0xe5, 0x89, 0x99, + 0x5c, 0xc1, 0x07, 0x2d, 0xcd, 0x0c, 0xdb, 0xfd, 0x14, 0x1a, 0x33, 0x5a, 0xd4, 0x81, 0xff, 0x0f, + 0x47, 0x16, 0x36, 0x27, 0x43, 0xec, 0x9e, 0x39, 0xe3, 0x91, 0xd5, 0xb7, 0x0f, 0x6d, 0x6b, 0xa0, + 0xad, 0xa1, 0x3a, 0x54, 0x4c, 0x67, 0xa0, 0x29, 0xfa, 0x9f, 0x2a, 0xb4, 0xee, 0xa8, 0x8b, 0x6c, + 0xa8, 0x0a, 0x75, 0x65, 0xf7, 0xf7, 0x57, 0x6f, 0x0f, 0x26, 0x21, 0x61, 0x24, 0xf1, 0x09, 0x2e, + 0x19, 0x90, 0x2d, 0xf4, 0x51, 0x85, 0x3e, 0x5f, 0x3e, 0xb4, 0xcd, 0xcb, 0xda, 0x3c, 0x83, 0xea, + 0x4d, 0xb1, 0x1d, 0xb2, 0xbb, 0xfa, 0xbd, 0x6c, 0x72, 0x7f, 0x44, 0x62, 0xf7, 0x07, 0x65, 0x25, + 0x1d, 0x36, 0xa1, 0x79, 0x6a, 0x8d, 0xc7, 0xee, 0xe4, 0xd8, 0x74, 0x34, 0x05, 0x3d, 0x01, 0x34, + 0x37, 0xdd, 0x21, 0x76, 0xad, 0x17, 0x67, 0xe6, 0xa9, 0xa6, 0x22, 0x0d, 0x36, 0x8e, 0xb0, 0x65, + 0x4e, 0x2c, 0x5c, 0x66, 0x56, 0xd0, 0x53, 0x78, 0xeb, 0xae, 0x67, 0x91, 0xbc, 0x8e, 0x9a, 0x50, + 0x2d, 0x7f, 0x56, 0x11, 0x82, 0xb6, 0x89, 0xb1, 0xf9, 0xd2, 0xed, 0x0f, 0x9d, 0x89, 0x69, 0x3b, + 0x63, 0xad, 0xae, 0xff, 0xa5, 0x40, 0xeb, 0xce, 0xe0, 0x49, 0x99, 0x94, 0x07, 0xc8, 0x74, 0x07, + 0xbd, 0x2c, 0xd3, 0xc9, 0xac, 0x79, 0xea, 0xa3, 0x9b, 0x77, 0xbc, 0x26, 0xdb, 0xd7, 0x7d, 0xbe, + 0x92, 0x80, 0x00, 0x35, 0x7b, 0xec, 0x3a, 0xa6, 0xa3, 0xa9, 0xa8, 0x05, 0xf5, 0xe2, 0xf7, 0xd9, + 0xe9, 0xa9, 0x56, 0x39, 0x68, 0xc3, 0x06, 0x2d, 0xe0, 0x49, 0x50, 0x6e, 0x50, 0x0f, 0xda, 0xcb, + 0x27, 0xa1, 0xf7, 0x00, 0xca, 0x0b, 0x21, 0xf5, 0xf8, 0x95, 0xbc, 0xc9, 0x9a, 0xc2, 0x33, 0xf2, + 0xf8, 0x95, 0xfe, 0xa3, 0x02, 0x55, 0x71, 0x97, 0xff, 0x97, 0x33, 0x79, 0x0a, 0xcd, 0x20, 0x62, + 0xe5, 0x55, 0x29, 0x47, 0xd3, 0x58, 0x89, 0x6e, 0x30, 0x43, 0xe1, 0x05, 0x81, 0xfe, 0x12, 0x60, + 0xf1, 0x90, 0xa2, 0x13, 0xa8, 0x89, 0x43, 0x66, 0xfb, 0xfb, 0xa8, 0x3a, 0x25, 0x45, 0xd7, 0x82, + 0xe6, 0xfc, 0xc8, 0x62, 0xd8, 0x06, 0x36, 0xb6, 0xfa, 0x13, 0x7b, 0xe8, 0xbc, 0x39, 0xc0, 0xe6, + 0xb8, 0x6f, 0x39, 0x03, 0xdb, 0x39, 0xd2, 0x14, 0xd4, 0x06, 0x18, 0x58, 0x73, 0x5b, 0xed, 0x4e, + 0xa0, 0x56, 0x3e, 0x51, 0xc5, 0x7b, 0x26, 0x36, 0x23, 0xeb, 0x28, 0xa2, 0xba, 0x7f, 0xda, 0x21, + 0x99, 0x59, 0xbc, 0x67, 0x97, 0x24, 0xa4, 0x8c, 0x08, 0xa9, 0x1a, 0x58, 0x5a, 0x07, 0x3f, 0x2b, + 0xf0, 0xb6, 0x4f, 0xaf, 0xef, 0x63, 0x38, 0x00, 0xf1, 0x59, 0xa3, 0xe2, 0x65, 0x1b, 0x29, 0xdf, + 0x7e, 0x2d, 0x53, 0xa6, 0x34, 0xf6, 0x92, 0xa9, 0x41, 0xd9, 0xb4, 0x37, 0x25, 0x89, 0x78, 0xf7, + 0x7a, 0x65, 0xc8, 0x4b, 0xa3, 0x6c, 0xe9, 0x2f, 0xd3, 0x57, 0x73, 0xe3, 0x27, 0x75, 0xfd, 0xa8, + 0x7f, 0x38, 0xfe, 0x55, 0x7d, 0x7a, 0x54, 0xb2, 0xf4, 0x63, 0x9a, 0x07, 0xc6, 0xe1, 0xfc, 0xb8, + 0xf3, 0xdd, 0xdf, 0x67, 0xb1, 0x0b, 0x11, 0xbb, 0x98, 0xc7, 0x2e, 0xce, 0x77, 0x2f, 0x6b, 0xe2, + 0x9c, 0xfd, 0xbf, 0x03, 0x00, 0x00, 0xff, 0xff, 0xae, 0xbb, 0x56, 0xcb, 0xcf, 0x09, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/firestore/v1/write.pb.go b/vendor/google.golang.org/genproto/googleapis/firestore/v1/write.pb.go new file mode 100644 index 0000000..6ac7544 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/firestore/v1/write.pb.go @@ -0,0 +1,973 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/firestore/v1/write.proto + +package firestore // import "google.golang.org/genproto/googleapis/firestore/v1" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import timestamp "github.com/golang/protobuf/ptypes/timestamp" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// A value that is calculated by the server. +type DocumentTransform_FieldTransform_ServerValue int32 + +const ( + // Unspecified. This value must not be used. + DocumentTransform_FieldTransform_SERVER_VALUE_UNSPECIFIED DocumentTransform_FieldTransform_ServerValue = 0 + // The time at which the server processed the request, with millisecond + // precision. + DocumentTransform_FieldTransform_REQUEST_TIME DocumentTransform_FieldTransform_ServerValue = 1 +) + +var DocumentTransform_FieldTransform_ServerValue_name = map[int32]string{ + 0: "SERVER_VALUE_UNSPECIFIED", + 1: "REQUEST_TIME", +} +var DocumentTransform_FieldTransform_ServerValue_value = map[string]int32{ + "SERVER_VALUE_UNSPECIFIED": 0, + "REQUEST_TIME": 1, +} + +func (x DocumentTransform_FieldTransform_ServerValue) String() string { + return proto.EnumName(DocumentTransform_FieldTransform_ServerValue_name, int32(x)) +} +func (DocumentTransform_FieldTransform_ServerValue) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_write_e73df969af87efc4, []int{1, 0, 0} +} + +// A write on a document. +type Write struct { + // The operation to execute. + // + // Types that are valid to be assigned to Operation: + // *Write_Update + // *Write_Delete + // *Write_Transform + Operation isWrite_Operation `protobuf_oneof:"operation"` + // The fields to update in this write. + // + // This field can be set only when the operation is `update`. + // If the mask is not set for an `update` and the document exists, any + // existing data will be overwritten. + // If the mask is set and the document on the server has fields not covered by + // the mask, they are left unchanged. + // Fields referenced in the mask, but not present in the input document, are + // deleted from the document on the server. + // The field paths in this mask must not contain a reserved field name. + UpdateMask *DocumentMask `protobuf:"bytes,3,opt,name=update_mask,json=updateMask,proto3" json:"update_mask,omitempty"` + // An optional precondition on the document. + // + // The write will fail if this is set and not met by the target document. + CurrentDocument *Precondition `protobuf:"bytes,4,opt,name=current_document,json=currentDocument,proto3" json:"current_document,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Write) Reset() { *m = Write{} } +func (m *Write) String() string { return proto.CompactTextString(m) } +func (*Write) ProtoMessage() {} +func (*Write) Descriptor() ([]byte, []int) { + return fileDescriptor_write_e73df969af87efc4, []int{0} +} +func (m *Write) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Write.Unmarshal(m, b) +} +func (m *Write) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Write.Marshal(b, m, deterministic) +} +func (dst *Write) XXX_Merge(src proto.Message) { + xxx_messageInfo_Write.Merge(dst, src) +} +func (m *Write) XXX_Size() int { + return xxx_messageInfo_Write.Size(m) +} +func (m *Write) XXX_DiscardUnknown() { + xxx_messageInfo_Write.DiscardUnknown(m) +} + +var xxx_messageInfo_Write proto.InternalMessageInfo + +type isWrite_Operation interface { + isWrite_Operation() +} + +type Write_Update struct { + Update *Document `protobuf:"bytes,1,opt,name=update,proto3,oneof"` +} + +type Write_Delete struct { + Delete string `protobuf:"bytes,2,opt,name=delete,proto3,oneof"` +} + +type Write_Transform struct { + Transform *DocumentTransform `protobuf:"bytes,6,opt,name=transform,proto3,oneof"` +} + +func (*Write_Update) isWrite_Operation() {} + +func (*Write_Delete) isWrite_Operation() {} + +func (*Write_Transform) isWrite_Operation() {} + +func (m *Write) GetOperation() isWrite_Operation { + if m != nil { + return m.Operation + } + return nil +} + +func (m *Write) GetUpdate() *Document { + if x, ok := m.GetOperation().(*Write_Update); ok { + return x.Update + } + return nil +} + +func (m *Write) GetDelete() string { + if x, ok := m.GetOperation().(*Write_Delete); ok { + return x.Delete + } + return "" +} + +func (m *Write) GetTransform() *DocumentTransform { + if x, ok := m.GetOperation().(*Write_Transform); ok { + return x.Transform + } + return nil +} + +func (m *Write) GetUpdateMask() *DocumentMask { + if m != nil { + return m.UpdateMask + } + return nil +} + +func (m *Write) GetCurrentDocument() *Precondition { + if m != nil { + return m.CurrentDocument + } + return nil +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*Write) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _Write_OneofMarshaler, _Write_OneofUnmarshaler, _Write_OneofSizer, []interface{}{ + (*Write_Update)(nil), + (*Write_Delete)(nil), + (*Write_Transform)(nil), + } +} + +func _Write_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*Write) + // operation + switch x := m.Operation.(type) { + case *Write_Update: + b.EncodeVarint(1<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.Update); err != nil { + return err + } + case *Write_Delete: + b.EncodeVarint(2<<3 | proto.WireBytes) + b.EncodeStringBytes(x.Delete) + case *Write_Transform: + b.EncodeVarint(6<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.Transform); err != nil { + return err + } + case nil: + default: + return fmt.Errorf("Write.Operation has unexpected type %T", x) + } + return nil +} + +func _Write_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*Write) + switch tag { + case 1: // operation.update + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(Document) + err := b.DecodeMessage(msg) + m.Operation = &Write_Update{msg} + return true, err + case 2: // operation.delete + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeStringBytes() + m.Operation = &Write_Delete{x} + return true, err + case 6: // operation.transform + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(DocumentTransform) + err := b.DecodeMessage(msg) + m.Operation = &Write_Transform{msg} + return true, err + default: + return false, nil + } +} + +func _Write_OneofSizer(msg proto.Message) (n int) { + m := msg.(*Write) + // operation + switch x := m.Operation.(type) { + case *Write_Update: + s := proto.Size(x.Update) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *Write_Delete: + n += 1 // tag and wire + n += proto.SizeVarint(uint64(len(x.Delete))) + n += len(x.Delete) + case *Write_Transform: + s := proto.Size(x.Transform) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +// A transformation of a document. +type DocumentTransform struct { + // The name of the document to transform. + Document string `protobuf:"bytes,1,opt,name=document,proto3" json:"document,omitempty"` + // The list of transformations to apply to the fields of the document, in + // order. + // This must not be empty. + FieldTransforms []*DocumentTransform_FieldTransform `protobuf:"bytes,2,rep,name=field_transforms,json=fieldTransforms,proto3" json:"field_transforms,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *DocumentTransform) Reset() { *m = DocumentTransform{} } +func (m *DocumentTransform) String() string { return proto.CompactTextString(m) } +func (*DocumentTransform) ProtoMessage() {} +func (*DocumentTransform) Descriptor() ([]byte, []int) { + return fileDescriptor_write_e73df969af87efc4, []int{1} +} +func (m *DocumentTransform) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_DocumentTransform.Unmarshal(m, b) +} +func (m *DocumentTransform) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_DocumentTransform.Marshal(b, m, deterministic) +} +func (dst *DocumentTransform) XXX_Merge(src proto.Message) { + xxx_messageInfo_DocumentTransform.Merge(dst, src) +} +func (m *DocumentTransform) XXX_Size() int { + return xxx_messageInfo_DocumentTransform.Size(m) +} +func (m *DocumentTransform) XXX_DiscardUnknown() { + xxx_messageInfo_DocumentTransform.DiscardUnknown(m) +} + +var xxx_messageInfo_DocumentTransform proto.InternalMessageInfo + +func (m *DocumentTransform) GetDocument() string { + if m != nil { + return m.Document + } + return "" +} + +func (m *DocumentTransform) GetFieldTransforms() []*DocumentTransform_FieldTransform { + if m != nil { + return m.FieldTransforms + } + return nil +} + +// A transformation of a field of the document. +type DocumentTransform_FieldTransform struct { + // The path of the field. See [Document.fields][google.firestore.v1.Document.fields] for the field path syntax + // reference. + FieldPath string `protobuf:"bytes,1,opt,name=field_path,json=fieldPath,proto3" json:"field_path,omitempty"` + // The transformation to apply on the field. + // + // Types that are valid to be assigned to TransformType: + // *DocumentTransform_FieldTransform_SetToServerValue + // *DocumentTransform_FieldTransform_Increment + // *DocumentTransform_FieldTransform_Maximum + // *DocumentTransform_FieldTransform_Minimum + // *DocumentTransform_FieldTransform_AppendMissingElements + // *DocumentTransform_FieldTransform_RemoveAllFromArray + TransformType isDocumentTransform_FieldTransform_TransformType `protobuf_oneof:"transform_type"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *DocumentTransform_FieldTransform) Reset() { *m = DocumentTransform_FieldTransform{} } +func (m *DocumentTransform_FieldTransform) String() string { return proto.CompactTextString(m) } +func (*DocumentTransform_FieldTransform) ProtoMessage() {} +func (*DocumentTransform_FieldTransform) Descriptor() ([]byte, []int) { + return fileDescriptor_write_e73df969af87efc4, []int{1, 0} +} +func (m *DocumentTransform_FieldTransform) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_DocumentTransform_FieldTransform.Unmarshal(m, b) +} +func (m *DocumentTransform_FieldTransform) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_DocumentTransform_FieldTransform.Marshal(b, m, deterministic) +} +func (dst *DocumentTransform_FieldTransform) XXX_Merge(src proto.Message) { + xxx_messageInfo_DocumentTransform_FieldTransform.Merge(dst, src) +} +func (m *DocumentTransform_FieldTransform) XXX_Size() int { + return xxx_messageInfo_DocumentTransform_FieldTransform.Size(m) +} +func (m *DocumentTransform_FieldTransform) XXX_DiscardUnknown() { + xxx_messageInfo_DocumentTransform_FieldTransform.DiscardUnknown(m) +} + +var xxx_messageInfo_DocumentTransform_FieldTransform proto.InternalMessageInfo + +func (m *DocumentTransform_FieldTransform) GetFieldPath() string { + if m != nil { + return m.FieldPath + } + return "" +} + +type isDocumentTransform_FieldTransform_TransformType interface { + isDocumentTransform_FieldTransform_TransformType() +} + +type DocumentTransform_FieldTransform_SetToServerValue struct { + SetToServerValue DocumentTransform_FieldTransform_ServerValue `protobuf:"varint,2,opt,name=set_to_server_value,json=setToServerValue,proto3,enum=google.firestore.v1.DocumentTransform_FieldTransform_ServerValue,oneof"` +} + +type DocumentTransform_FieldTransform_Increment struct { + Increment *Value `protobuf:"bytes,3,opt,name=increment,proto3,oneof"` +} + +type DocumentTransform_FieldTransform_Maximum struct { + Maximum *Value `protobuf:"bytes,4,opt,name=maximum,proto3,oneof"` +} + +type DocumentTransform_FieldTransform_Minimum struct { + Minimum *Value `protobuf:"bytes,5,opt,name=minimum,proto3,oneof"` +} + +type DocumentTransform_FieldTransform_AppendMissingElements struct { + AppendMissingElements *ArrayValue `protobuf:"bytes,6,opt,name=append_missing_elements,json=appendMissingElements,proto3,oneof"` +} + +type DocumentTransform_FieldTransform_RemoveAllFromArray struct { + RemoveAllFromArray *ArrayValue `protobuf:"bytes,7,opt,name=remove_all_from_array,json=removeAllFromArray,proto3,oneof"` +} + +func (*DocumentTransform_FieldTransform_SetToServerValue) isDocumentTransform_FieldTransform_TransformType() { +} + +func (*DocumentTransform_FieldTransform_Increment) isDocumentTransform_FieldTransform_TransformType() { +} + +func (*DocumentTransform_FieldTransform_Maximum) isDocumentTransform_FieldTransform_TransformType() {} + +func (*DocumentTransform_FieldTransform_Minimum) isDocumentTransform_FieldTransform_TransformType() {} + +func (*DocumentTransform_FieldTransform_AppendMissingElements) isDocumentTransform_FieldTransform_TransformType() { +} + +func (*DocumentTransform_FieldTransform_RemoveAllFromArray) isDocumentTransform_FieldTransform_TransformType() { +} + +func (m *DocumentTransform_FieldTransform) GetTransformType() isDocumentTransform_FieldTransform_TransformType { + if m != nil { + return m.TransformType + } + return nil +} + +func (m *DocumentTransform_FieldTransform) GetSetToServerValue() DocumentTransform_FieldTransform_ServerValue { + if x, ok := m.GetTransformType().(*DocumentTransform_FieldTransform_SetToServerValue); ok { + return x.SetToServerValue + } + return DocumentTransform_FieldTransform_SERVER_VALUE_UNSPECIFIED +} + +func (m *DocumentTransform_FieldTransform) GetIncrement() *Value { + if x, ok := m.GetTransformType().(*DocumentTransform_FieldTransform_Increment); ok { + return x.Increment + } + return nil +} + +func (m *DocumentTransform_FieldTransform) GetMaximum() *Value { + if x, ok := m.GetTransformType().(*DocumentTransform_FieldTransform_Maximum); ok { + return x.Maximum + } + return nil +} + +func (m *DocumentTransform_FieldTransform) GetMinimum() *Value { + if x, ok := m.GetTransformType().(*DocumentTransform_FieldTransform_Minimum); ok { + return x.Minimum + } + return nil +} + +func (m *DocumentTransform_FieldTransform) GetAppendMissingElements() *ArrayValue { + if x, ok := m.GetTransformType().(*DocumentTransform_FieldTransform_AppendMissingElements); ok { + return x.AppendMissingElements + } + return nil +} + +func (m *DocumentTransform_FieldTransform) GetRemoveAllFromArray() *ArrayValue { + if x, ok := m.GetTransformType().(*DocumentTransform_FieldTransform_RemoveAllFromArray); ok { + return x.RemoveAllFromArray + } + return nil +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*DocumentTransform_FieldTransform) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _DocumentTransform_FieldTransform_OneofMarshaler, _DocumentTransform_FieldTransform_OneofUnmarshaler, _DocumentTransform_FieldTransform_OneofSizer, []interface{}{ + (*DocumentTransform_FieldTransform_SetToServerValue)(nil), + (*DocumentTransform_FieldTransform_Increment)(nil), + (*DocumentTransform_FieldTransform_Maximum)(nil), + (*DocumentTransform_FieldTransform_Minimum)(nil), + (*DocumentTransform_FieldTransform_AppendMissingElements)(nil), + (*DocumentTransform_FieldTransform_RemoveAllFromArray)(nil), + } +} + +func _DocumentTransform_FieldTransform_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*DocumentTransform_FieldTransform) + // transform_type + switch x := m.TransformType.(type) { + case *DocumentTransform_FieldTransform_SetToServerValue: + b.EncodeVarint(2<<3 | proto.WireVarint) + b.EncodeVarint(uint64(x.SetToServerValue)) + case *DocumentTransform_FieldTransform_Increment: + b.EncodeVarint(3<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.Increment); err != nil { + return err + } + case *DocumentTransform_FieldTransform_Maximum: + b.EncodeVarint(4<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.Maximum); err != nil { + return err + } + case *DocumentTransform_FieldTransform_Minimum: + b.EncodeVarint(5<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.Minimum); err != nil { + return err + } + case *DocumentTransform_FieldTransform_AppendMissingElements: + b.EncodeVarint(6<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.AppendMissingElements); err != nil { + return err + } + case *DocumentTransform_FieldTransform_RemoveAllFromArray: + b.EncodeVarint(7<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.RemoveAllFromArray); err != nil { + return err + } + case nil: + default: + return fmt.Errorf("DocumentTransform_FieldTransform.TransformType has unexpected type %T", x) + } + return nil +} + +func _DocumentTransform_FieldTransform_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*DocumentTransform_FieldTransform) + switch tag { + case 2: // transform_type.set_to_server_value + if wire != proto.WireVarint { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeVarint() + m.TransformType = &DocumentTransform_FieldTransform_SetToServerValue{DocumentTransform_FieldTransform_ServerValue(x)} + return true, err + case 3: // transform_type.increment + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(Value) + err := b.DecodeMessage(msg) + m.TransformType = &DocumentTransform_FieldTransform_Increment{msg} + return true, err + case 4: // transform_type.maximum + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(Value) + err := b.DecodeMessage(msg) + m.TransformType = &DocumentTransform_FieldTransform_Maximum{msg} + return true, err + case 5: // transform_type.minimum + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(Value) + err := b.DecodeMessage(msg) + m.TransformType = &DocumentTransform_FieldTransform_Minimum{msg} + return true, err + case 6: // transform_type.append_missing_elements + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(ArrayValue) + err := b.DecodeMessage(msg) + m.TransformType = &DocumentTransform_FieldTransform_AppendMissingElements{msg} + return true, err + case 7: // transform_type.remove_all_from_array + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(ArrayValue) + err := b.DecodeMessage(msg) + m.TransformType = &DocumentTransform_FieldTransform_RemoveAllFromArray{msg} + return true, err + default: + return false, nil + } +} + +func _DocumentTransform_FieldTransform_OneofSizer(msg proto.Message) (n int) { + m := msg.(*DocumentTransform_FieldTransform) + // transform_type + switch x := m.TransformType.(type) { + case *DocumentTransform_FieldTransform_SetToServerValue: + n += 1 // tag and wire + n += proto.SizeVarint(uint64(x.SetToServerValue)) + case *DocumentTransform_FieldTransform_Increment: + s := proto.Size(x.Increment) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *DocumentTransform_FieldTransform_Maximum: + s := proto.Size(x.Maximum) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *DocumentTransform_FieldTransform_Minimum: + s := proto.Size(x.Minimum) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *DocumentTransform_FieldTransform_AppendMissingElements: + s := proto.Size(x.AppendMissingElements) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *DocumentTransform_FieldTransform_RemoveAllFromArray: + s := proto.Size(x.RemoveAllFromArray) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +// The result of applying a write. +type WriteResult struct { + // The last update time of the document after applying the write. Not set + // after a `delete`. + // + // If the write did not actually change the document, this will be the + // previous update_time. + UpdateTime *timestamp.Timestamp `protobuf:"bytes,1,opt,name=update_time,json=updateTime,proto3" json:"update_time,omitempty"` + // The results of applying each [DocumentTransform.FieldTransform][google.firestore.v1.DocumentTransform.FieldTransform], in the + // same order. + TransformResults []*Value `protobuf:"bytes,2,rep,name=transform_results,json=transformResults,proto3" json:"transform_results,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *WriteResult) Reset() { *m = WriteResult{} } +func (m *WriteResult) String() string { return proto.CompactTextString(m) } +func (*WriteResult) ProtoMessage() {} +func (*WriteResult) Descriptor() ([]byte, []int) { + return fileDescriptor_write_e73df969af87efc4, []int{2} +} +func (m *WriteResult) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_WriteResult.Unmarshal(m, b) +} +func (m *WriteResult) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_WriteResult.Marshal(b, m, deterministic) +} +func (dst *WriteResult) XXX_Merge(src proto.Message) { + xxx_messageInfo_WriteResult.Merge(dst, src) +} +func (m *WriteResult) XXX_Size() int { + return xxx_messageInfo_WriteResult.Size(m) +} +func (m *WriteResult) XXX_DiscardUnknown() { + xxx_messageInfo_WriteResult.DiscardUnknown(m) +} + +var xxx_messageInfo_WriteResult proto.InternalMessageInfo + +func (m *WriteResult) GetUpdateTime() *timestamp.Timestamp { + if m != nil { + return m.UpdateTime + } + return nil +} + +func (m *WriteResult) GetTransformResults() []*Value { + if m != nil { + return m.TransformResults + } + return nil +} + +// A [Document][google.firestore.v1.Document] has changed. +// +// May be the result of multiple [writes][google.firestore.v1.Write], including deletes, that +// ultimately resulted in a new value for the [Document][google.firestore.v1.Document]. +// +// Multiple [DocumentChange][google.firestore.v1.DocumentChange] messages may be returned for the same logical +// change, if multiple targets are affected. +type DocumentChange struct { + // The new state of the [Document][google.firestore.v1.Document]. + // + // If `mask` is set, contains only fields that were updated or added. + Document *Document `protobuf:"bytes,1,opt,name=document,proto3" json:"document,omitempty"` + // A set of target IDs of targets that match this document. + TargetIds []int32 `protobuf:"varint,5,rep,packed,name=target_ids,json=targetIds,proto3" json:"target_ids,omitempty"` + // A set of target IDs for targets that no longer match this document. + RemovedTargetIds []int32 `protobuf:"varint,6,rep,packed,name=removed_target_ids,json=removedTargetIds,proto3" json:"removed_target_ids,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *DocumentChange) Reset() { *m = DocumentChange{} } +func (m *DocumentChange) String() string { return proto.CompactTextString(m) } +func (*DocumentChange) ProtoMessage() {} +func (*DocumentChange) Descriptor() ([]byte, []int) { + return fileDescriptor_write_e73df969af87efc4, []int{3} +} +func (m *DocumentChange) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_DocumentChange.Unmarshal(m, b) +} +func (m *DocumentChange) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_DocumentChange.Marshal(b, m, deterministic) +} +func (dst *DocumentChange) XXX_Merge(src proto.Message) { + xxx_messageInfo_DocumentChange.Merge(dst, src) +} +func (m *DocumentChange) XXX_Size() int { + return xxx_messageInfo_DocumentChange.Size(m) +} +func (m *DocumentChange) XXX_DiscardUnknown() { + xxx_messageInfo_DocumentChange.DiscardUnknown(m) +} + +var xxx_messageInfo_DocumentChange proto.InternalMessageInfo + +func (m *DocumentChange) GetDocument() *Document { + if m != nil { + return m.Document + } + return nil +} + +func (m *DocumentChange) GetTargetIds() []int32 { + if m != nil { + return m.TargetIds + } + return nil +} + +func (m *DocumentChange) GetRemovedTargetIds() []int32 { + if m != nil { + return m.RemovedTargetIds + } + return nil +} + +// A [Document][google.firestore.v1.Document] has been deleted. +// +// May be the result of multiple [writes][google.firestore.v1.Write], including updates, the +// last of which deleted the [Document][google.firestore.v1.Document]. +// +// Multiple [DocumentDelete][google.firestore.v1.DocumentDelete] messages may be returned for the same logical +// delete, if multiple targets are affected. +type DocumentDelete struct { + // The resource name of the [Document][google.firestore.v1.Document] that was deleted. + Document string `protobuf:"bytes,1,opt,name=document,proto3" json:"document,omitempty"` + // A set of target IDs for targets that previously matched this entity. + RemovedTargetIds []int32 `protobuf:"varint,6,rep,packed,name=removed_target_ids,json=removedTargetIds,proto3" json:"removed_target_ids,omitempty"` + // The read timestamp at which the delete was observed. + // + // Greater or equal to the `commit_time` of the delete. + ReadTime *timestamp.Timestamp `protobuf:"bytes,4,opt,name=read_time,json=readTime,proto3" json:"read_time,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *DocumentDelete) Reset() { *m = DocumentDelete{} } +func (m *DocumentDelete) String() string { return proto.CompactTextString(m) } +func (*DocumentDelete) ProtoMessage() {} +func (*DocumentDelete) Descriptor() ([]byte, []int) { + return fileDescriptor_write_e73df969af87efc4, []int{4} +} +func (m *DocumentDelete) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_DocumentDelete.Unmarshal(m, b) +} +func (m *DocumentDelete) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_DocumentDelete.Marshal(b, m, deterministic) +} +func (dst *DocumentDelete) XXX_Merge(src proto.Message) { + xxx_messageInfo_DocumentDelete.Merge(dst, src) +} +func (m *DocumentDelete) XXX_Size() int { + return xxx_messageInfo_DocumentDelete.Size(m) +} +func (m *DocumentDelete) XXX_DiscardUnknown() { + xxx_messageInfo_DocumentDelete.DiscardUnknown(m) +} + +var xxx_messageInfo_DocumentDelete proto.InternalMessageInfo + +func (m *DocumentDelete) GetDocument() string { + if m != nil { + return m.Document + } + return "" +} + +func (m *DocumentDelete) GetRemovedTargetIds() []int32 { + if m != nil { + return m.RemovedTargetIds + } + return nil +} + +func (m *DocumentDelete) GetReadTime() *timestamp.Timestamp { + if m != nil { + return m.ReadTime + } + return nil +} + +// A [Document][google.firestore.v1.Document] has been removed from the view of the targets. +// +// Sent if the document is no longer relevant to a target and is out of view. +// Can be sent instead of a DocumentDelete or a DocumentChange if the server +// can not send the new value of the document. +// +// Multiple [DocumentRemove][google.firestore.v1.DocumentRemove] messages may be returned for the same logical +// write or delete, if multiple targets are affected. +type DocumentRemove struct { + // The resource name of the [Document][google.firestore.v1.Document] that has gone out of view. + Document string `protobuf:"bytes,1,opt,name=document,proto3" json:"document,omitempty"` + // A set of target IDs for targets that previously matched this document. + RemovedTargetIds []int32 `protobuf:"varint,2,rep,packed,name=removed_target_ids,json=removedTargetIds,proto3" json:"removed_target_ids,omitempty"` + // The read timestamp at which the remove was observed. + // + // Greater or equal to the `commit_time` of the change/delete/remove. + ReadTime *timestamp.Timestamp `protobuf:"bytes,4,opt,name=read_time,json=readTime,proto3" json:"read_time,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *DocumentRemove) Reset() { *m = DocumentRemove{} } +func (m *DocumentRemove) String() string { return proto.CompactTextString(m) } +func (*DocumentRemove) ProtoMessage() {} +func (*DocumentRemove) Descriptor() ([]byte, []int) { + return fileDescriptor_write_e73df969af87efc4, []int{5} +} +func (m *DocumentRemove) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_DocumentRemove.Unmarshal(m, b) +} +func (m *DocumentRemove) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_DocumentRemove.Marshal(b, m, deterministic) +} +func (dst *DocumentRemove) XXX_Merge(src proto.Message) { + xxx_messageInfo_DocumentRemove.Merge(dst, src) +} +func (m *DocumentRemove) XXX_Size() int { + return xxx_messageInfo_DocumentRemove.Size(m) +} +func (m *DocumentRemove) XXX_DiscardUnknown() { + xxx_messageInfo_DocumentRemove.DiscardUnknown(m) +} + +var xxx_messageInfo_DocumentRemove proto.InternalMessageInfo + +func (m *DocumentRemove) GetDocument() string { + if m != nil { + return m.Document + } + return "" +} + +func (m *DocumentRemove) GetRemovedTargetIds() []int32 { + if m != nil { + return m.RemovedTargetIds + } + return nil +} + +func (m *DocumentRemove) GetReadTime() *timestamp.Timestamp { + if m != nil { + return m.ReadTime + } + return nil +} + +// A digest of all the documents that match a given target. +type ExistenceFilter struct { + // The target ID to which this filter applies. + TargetId int32 `protobuf:"varint,1,opt,name=target_id,json=targetId,proto3" json:"target_id,omitempty"` + // The total count of documents that match [target_id][google.firestore.v1.ExistenceFilter.target_id]. + // + // If different from the count of documents in the client that match, the + // client must manually determine which documents no longer match the target. + Count int32 `protobuf:"varint,2,opt,name=count,proto3" json:"count,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ExistenceFilter) Reset() { *m = ExistenceFilter{} } +func (m *ExistenceFilter) String() string { return proto.CompactTextString(m) } +func (*ExistenceFilter) ProtoMessage() {} +func (*ExistenceFilter) Descriptor() ([]byte, []int) { + return fileDescriptor_write_e73df969af87efc4, []int{6} +} +func (m *ExistenceFilter) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ExistenceFilter.Unmarshal(m, b) +} +func (m *ExistenceFilter) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ExistenceFilter.Marshal(b, m, deterministic) +} +func (dst *ExistenceFilter) XXX_Merge(src proto.Message) { + xxx_messageInfo_ExistenceFilter.Merge(dst, src) +} +func (m *ExistenceFilter) XXX_Size() int { + return xxx_messageInfo_ExistenceFilter.Size(m) +} +func (m *ExistenceFilter) XXX_DiscardUnknown() { + xxx_messageInfo_ExistenceFilter.DiscardUnknown(m) +} + +var xxx_messageInfo_ExistenceFilter proto.InternalMessageInfo + +func (m *ExistenceFilter) GetTargetId() int32 { + if m != nil { + return m.TargetId + } + return 0 +} + +func (m *ExistenceFilter) GetCount() int32 { + if m != nil { + return m.Count + } + return 0 +} + +func init() { + proto.RegisterType((*Write)(nil), "google.firestore.v1.Write") + proto.RegisterType((*DocumentTransform)(nil), "google.firestore.v1.DocumentTransform") + proto.RegisterType((*DocumentTransform_FieldTransform)(nil), "google.firestore.v1.DocumentTransform.FieldTransform") + proto.RegisterType((*WriteResult)(nil), "google.firestore.v1.WriteResult") + proto.RegisterType((*DocumentChange)(nil), "google.firestore.v1.DocumentChange") + proto.RegisterType((*DocumentDelete)(nil), "google.firestore.v1.DocumentDelete") + proto.RegisterType((*DocumentRemove)(nil), "google.firestore.v1.DocumentRemove") + proto.RegisterType((*ExistenceFilter)(nil), "google.firestore.v1.ExistenceFilter") + proto.RegisterEnum("google.firestore.v1.DocumentTransform_FieldTransform_ServerValue", DocumentTransform_FieldTransform_ServerValue_name, DocumentTransform_FieldTransform_ServerValue_value) +} + +func init() { + proto.RegisterFile("google/firestore/v1/write.proto", fileDescriptor_write_e73df969af87efc4) +} + +var fileDescriptor_write_e73df969af87efc4 = []byte{ + // 853 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xac, 0x55, 0x41, 0x6f, 0xe3, 0x44, + 0x14, 0x6e, 0xd2, 0x26, 0x5b, 0xbf, 0xa0, 0xd6, 0x3b, 0xcb, 0x6a, 0x4d, 0xd8, 0x6a, 0x43, 0x0e, + 0xa8, 0x07, 0xe4, 0xa8, 0x45, 0xb0, 0x82, 0x85, 0x43, 0xd3, 0x3a, 0x6d, 0xa5, 0x2d, 0x0a, 0x4e, + 0x1a, 0x04, 0xaa, 0x34, 0xcc, 0xda, 0x13, 0xd7, 0x5a, 0x7b, 0xc6, 0x9a, 0x19, 0x87, 0xdd, 0xdf, + 0xc1, 0x85, 0x33, 0xe2, 0xc4, 0xbf, 0xe0, 0xca, 0x8d, 0x1f, 0xc1, 0xff, 0x40, 0x9e, 0xb1, 0xdd, + 0x06, 0xa2, 0x6c, 0x59, 0xed, 0x2d, 0x6f, 0xde, 0xf7, 0x7d, 0xef, 0xf3, 0x7b, 0x33, 0x2f, 0xf0, + 0x24, 0xe2, 0x3c, 0x4a, 0xe8, 0x60, 0x1e, 0x0b, 0x2a, 0x15, 0x17, 0x74, 0xb0, 0x38, 0x18, 0xfc, + 0x24, 0x62, 0x45, 0xdd, 0x4c, 0x70, 0xc5, 0xd1, 0x03, 0x03, 0x70, 0x6b, 0x80, 0xbb, 0x38, 0xe8, + 0xf6, 0x56, 0xb1, 0x02, 0x9e, 0xa6, 0x9c, 0x19, 0x5a, 0xb7, 0xbf, 0x0a, 0x11, 0xf2, 0x20, 0x4f, + 0x29, 0x53, 0x25, 0xa6, 0xaa, 0xad, 0xa3, 0x17, 0xf9, 0x7c, 0xa0, 0xe2, 0x94, 0x4a, 0x45, 0xd2, + 0xac, 0x04, 0x3c, 0x2e, 0x01, 0x24, 0x8b, 0x07, 0x84, 0x31, 0xae, 0x88, 0x8a, 0x39, 0x93, 0x26, + 0xdb, 0xff, 0xa3, 0x09, 0xad, 0xef, 0x0a, 0xa7, 0xe8, 0x29, 0xb4, 0xf3, 0x2c, 0x24, 0x8a, 0x3a, + 0x8d, 0x5e, 0x63, 0xbf, 0x73, 0xb8, 0xe7, 0xae, 0x30, 0xed, 0x9e, 0x94, 0xd5, 0xcf, 0x36, 0xfc, + 0x12, 0x8e, 0x1c, 0x68, 0x87, 0x34, 0xa1, 0x8a, 0x3a, 0xcd, 0x5e, 0x63, 0xdf, 0x2a, 0x32, 0x26, + 0x46, 0x23, 0xb0, 0x94, 0x20, 0x4c, 0xce, 0xb9, 0x48, 0x9d, 0xb6, 0x56, 0xfd, 0x78, 0xad, 0xea, + 0xb4, 0x42, 0x9f, 0x6d, 0xf8, 0x37, 0x54, 0x34, 0x84, 0x8e, 0xa9, 0x85, 0x53, 0x22, 0x5f, 0x3a, + 0x9b, 0x5a, 0xe9, 0xa3, 0xb5, 0x4a, 0x17, 0x44, 0xbe, 0xf4, 0xc1, 0xb0, 0x8a, 0xdf, 0xe8, 0x39, + 0xd8, 0x41, 0x2e, 0x04, 0x65, 0x0a, 0x57, 0x1d, 0x74, 0xb6, 0xd6, 0x08, 0x8d, 0x05, 0x0d, 0x38, + 0x0b, 0xe3, 0xa2, 0x59, 0xfe, 0x6e, 0x49, 0xad, 0xd4, 0x87, 0x1d, 0xb0, 0x78, 0x46, 0x85, 0x6e, + 0x65, 0xff, 0xef, 0x16, 0xdc, 0xff, 0xcf, 0x17, 0xa0, 0x2e, 0x6c, 0xd7, 0x85, 0x8a, 0x8e, 0x5a, + 0x7e, 0x1d, 0xa3, 0x1f, 0xc1, 0x9e, 0xc7, 0x34, 0x09, 0x71, 0xfd, 0x8d, 0xd2, 0x69, 0xf6, 0x36, + 0xf7, 0x3b, 0x87, 0x9f, 0xdd, 0xad, 0x3f, 0xee, 0xa8, 0xa0, 0xd7, 0xa1, 0xbf, 0x3b, 0x5f, 0x8a, + 0x65, 0xf7, 0xaf, 0x2d, 0xd8, 0x59, 0xc6, 0xa0, 0x3d, 0x00, 0x53, 0x34, 0x23, 0xea, 0xba, 0xb4, + 0x64, 0xe9, 0x93, 0x31, 0x51, 0xd7, 0x48, 0xc0, 0x03, 0x49, 0x15, 0x56, 0x1c, 0x4b, 0x2a, 0x16, + 0x54, 0xe0, 0x05, 0x49, 0x72, 0x33, 0xd3, 0x9d, 0xc3, 0xa3, 0xb7, 0xb2, 0xe5, 0x4e, 0xb4, 0xd2, + 0xac, 0x10, 0x3a, 0xdb, 0xf0, 0x6d, 0x49, 0xd5, 0x94, 0xdf, 0x3a, 0x43, 0x5f, 0x82, 0x15, 0xb3, + 0x40, 0x50, 0xdd, 0x24, 0x33, 0xd6, 0xee, 0xca, 0x4a, 0x95, 0xc4, 0x0d, 0x1c, 0x7d, 0x0e, 0xf7, + 0x52, 0xf2, 0x2a, 0x4e, 0xf3, 0xb4, 0x9c, 0xe3, 0x7a, 0x66, 0x05, 0xd6, 0xbc, 0x98, 0x69, 0x5e, + 0xeb, 0x4e, 0x3c, 0x03, 0x46, 0xdf, 0xc3, 0x23, 0x92, 0x65, 0x94, 0x85, 0x38, 0x8d, 0xa5, 0x8c, + 0x59, 0x84, 0x69, 0xa2, 0x9d, 0xc8, 0xf2, 0x6a, 0x3f, 0x59, 0xa9, 0x73, 0x24, 0x04, 0x79, 0x5d, + 0x89, 0x3d, 0x34, 0x0a, 0x17, 0x46, 0xc0, 0x2b, 0xf9, 0x68, 0x0a, 0x0f, 0x05, 0x4d, 0xf9, 0x82, + 0x62, 0x92, 0x24, 0x78, 0x2e, 0x78, 0x8a, 0x49, 0x41, 0x73, 0xee, 0xdd, 0x55, 0x18, 0x19, 0xfe, + 0x51, 0x92, 0x8c, 0x04, 0x4f, 0x75, 0xaa, 0xff, 0x35, 0x74, 0x6e, 0xf7, 0xfa, 0x31, 0x38, 0x13, + 0xcf, 0x9f, 0x79, 0x3e, 0x9e, 0x1d, 0x3d, 0xbf, 0xf4, 0xf0, 0xe5, 0x37, 0x93, 0xb1, 0x77, 0x7c, + 0x3e, 0x3a, 0xf7, 0x4e, 0xec, 0x0d, 0x64, 0xc3, 0x7b, 0xbe, 0xf7, 0xed, 0xa5, 0x37, 0x99, 0xe2, + 0xe9, 0xf9, 0x85, 0x67, 0x37, 0x86, 0x36, 0xec, 0xd4, 0xb7, 0x13, 0xab, 0xd7, 0x19, 0xed, 0xff, + 0xdc, 0x80, 0x8e, 0xde, 0x15, 0x3e, 0x95, 0x79, 0xa2, 0xd0, 0xb3, 0xfa, 0x59, 0x16, 0x3b, 0xa7, + 0x5c, 0x1b, 0x75, 0x37, 0xab, 0x85, 0xe4, 0x4e, 0xab, 0x85, 0x54, 0xbd, 0xc7, 0xe2, 0x00, 0x9d, + 0xc2, 0xfd, 0x1b, 0x79, 0xa1, 0x05, 0xab, 0x37, 0xb0, 0x66, 0x20, 0xbe, 0x5d, 0x93, 0x8c, 0x09, + 0xd9, 0xff, 0xa5, 0x01, 0x3b, 0xd5, 0x45, 0x3c, 0xbe, 0x26, 0x2c, 0xa2, 0xe8, 0x8b, 0x7f, 0x3d, + 0xbd, 0x37, 0x2d, 0xb3, 0x5b, 0x2f, 0x73, 0x0f, 0x40, 0x11, 0x11, 0x51, 0x85, 0xe3, 0x50, 0x3a, + 0xad, 0xde, 0xe6, 0x7e, 0xcb, 0xb7, 0xcc, 0xc9, 0x79, 0x28, 0xd1, 0x27, 0x50, 0x76, 0x3a, 0xc4, + 0xb7, 0x60, 0x6d, 0x0d, 0xb3, 0xcb, 0xcc, 0xb4, 0x42, 0x17, 0x0d, 0xab, 0xad, 0x9d, 0x98, 0x95, + 0xb8, 0x6e, 0x2b, 0xfc, 0x2f, 0x71, 0xf4, 0x14, 0x2c, 0x41, 0x49, 0x68, 0x7a, 0xbf, 0xf5, 0xc6, + 0xde, 0x6f, 0x17, 0xe0, 0x22, 0x5c, 0x72, 0xe5, 0x6b, 0xd5, 0xb7, 0x70, 0xd5, 0x7c, 0xd7, 0xae, + 0x4e, 0x60, 0xd7, 0x7b, 0x15, 0x4b, 0x45, 0x59, 0x40, 0x47, 0x71, 0xa2, 0xa8, 0x40, 0x1f, 0x82, + 0x55, 0x57, 0xd4, 0xb6, 0x5a, 0xfe, 0x76, 0x35, 0x0a, 0xf4, 0x3e, 0xb4, 0x02, 0x9e, 0x33, 0xa5, + 0x17, 0x54, 0xcb, 0x37, 0xc1, 0xf0, 0xb7, 0x06, 0x3c, 0x0a, 0x78, 0xba, 0x6a, 0xda, 0x43, 0xd0, + 0x77, 0x77, 0x5c, 0x98, 0x18, 0x37, 0x7e, 0xf8, 0xaa, 0x84, 0x44, 0x3c, 0x21, 0x2c, 0x72, 0xb9, + 0x88, 0x06, 0x11, 0x65, 0xda, 0xe2, 0xc0, 0xa4, 0x48, 0x16, 0xcb, 0xa5, 0xbf, 0xde, 0x67, 0x75, + 0xf0, 0x6b, 0x73, 0xeb, 0xf4, 0x78, 0x34, 0xf9, 0xbd, 0xf9, 0xc1, 0xa9, 0x51, 0x39, 0x4e, 0x78, + 0x1e, 0xba, 0xa3, 0xba, 0xdc, 0xec, 0xe0, 0xcf, 0x2a, 0x77, 0xa5, 0x73, 0x57, 0x75, 0xee, 0x6a, + 0x76, 0xf0, 0xa2, 0xad, 0xeb, 0x7c, 0xfa, 0x4f, 0x00, 0x00, 0x00, 0xff, 0xff, 0x69, 0x45, 0xd0, + 0x55, 0x39, 0x08, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/firestore/v1beta1/common.pb.go b/vendor/google.golang.org/genproto/googleapis/firestore/v1beta1/common.pb.go new file mode 100644 index 0000000..98051d9 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/firestore/v1beta1/common.pb.go @@ -0,0 +1,563 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/firestore/v1beta1/common.proto + +package firestore // import "google.golang.org/genproto/googleapis/firestore/v1beta1" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import timestamp "github.com/golang/protobuf/ptypes/timestamp" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// A set of field paths on a document. +// Used to restrict a get or update operation on a document to a subset of its +// fields. +// This is different from standard field masks, as this is always scoped to a +// [Document][google.firestore.v1beta1.Document], and takes in account the dynamic nature of [Value][google.firestore.v1beta1.Value]. +type DocumentMask struct { + // The list of field paths in the mask. See [Document.fields][google.firestore.v1beta1.Document.fields] for a field + // path syntax reference. + FieldPaths []string `protobuf:"bytes,1,rep,name=field_paths,json=fieldPaths,proto3" json:"field_paths,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *DocumentMask) Reset() { *m = DocumentMask{} } +func (m *DocumentMask) String() string { return proto.CompactTextString(m) } +func (*DocumentMask) ProtoMessage() {} +func (*DocumentMask) Descriptor() ([]byte, []int) { + return fileDescriptor_common_33e7705eedc91e86, []int{0} +} +func (m *DocumentMask) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_DocumentMask.Unmarshal(m, b) +} +func (m *DocumentMask) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_DocumentMask.Marshal(b, m, deterministic) +} +func (dst *DocumentMask) XXX_Merge(src proto.Message) { + xxx_messageInfo_DocumentMask.Merge(dst, src) +} +func (m *DocumentMask) XXX_Size() int { + return xxx_messageInfo_DocumentMask.Size(m) +} +func (m *DocumentMask) XXX_DiscardUnknown() { + xxx_messageInfo_DocumentMask.DiscardUnknown(m) +} + +var xxx_messageInfo_DocumentMask proto.InternalMessageInfo + +func (m *DocumentMask) GetFieldPaths() []string { + if m != nil { + return m.FieldPaths + } + return nil +} + +// A precondition on a document, used for conditional operations. +type Precondition struct { + // The type of precondition. + // + // Types that are valid to be assigned to ConditionType: + // *Precondition_Exists + // *Precondition_UpdateTime + ConditionType isPrecondition_ConditionType `protobuf_oneof:"condition_type"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Precondition) Reset() { *m = Precondition{} } +func (m *Precondition) String() string { return proto.CompactTextString(m) } +func (*Precondition) ProtoMessage() {} +func (*Precondition) Descriptor() ([]byte, []int) { + return fileDescriptor_common_33e7705eedc91e86, []int{1} +} +func (m *Precondition) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Precondition.Unmarshal(m, b) +} +func (m *Precondition) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Precondition.Marshal(b, m, deterministic) +} +func (dst *Precondition) XXX_Merge(src proto.Message) { + xxx_messageInfo_Precondition.Merge(dst, src) +} +func (m *Precondition) XXX_Size() int { + return xxx_messageInfo_Precondition.Size(m) +} +func (m *Precondition) XXX_DiscardUnknown() { + xxx_messageInfo_Precondition.DiscardUnknown(m) +} + +var xxx_messageInfo_Precondition proto.InternalMessageInfo + +type isPrecondition_ConditionType interface { + isPrecondition_ConditionType() +} + +type Precondition_Exists struct { + Exists bool `protobuf:"varint,1,opt,name=exists,proto3,oneof"` +} + +type Precondition_UpdateTime struct { + UpdateTime *timestamp.Timestamp `protobuf:"bytes,2,opt,name=update_time,json=updateTime,proto3,oneof"` +} + +func (*Precondition_Exists) isPrecondition_ConditionType() {} + +func (*Precondition_UpdateTime) isPrecondition_ConditionType() {} + +func (m *Precondition) GetConditionType() isPrecondition_ConditionType { + if m != nil { + return m.ConditionType + } + return nil +} + +func (m *Precondition) GetExists() bool { + if x, ok := m.GetConditionType().(*Precondition_Exists); ok { + return x.Exists + } + return false +} + +func (m *Precondition) GetUpdateTime() *timestamp.Timestamp { + if x, ok := m.GetConditionType().(*Precondition_UpdateTime); ok { + return x.UpdateTime + } + return nil +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*Precondition) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _Precondition_OneofMarshaler, _Precondition_OneofUnmarshaler, _Precondition_OneofSizer, []interface{}{ + (*Precondition_Exists)(nil), + (*Precondition_UpdateTime)(nil), + } +} + +func _Precondition_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*Precondition) + // condition_type + switch x := m.ConditionType.(type) { + case *Precondition_Exists: + t := uint64(0) + if x.Exists { + t = 1 + } + b.EncodeVarint(1<<3 | proto.WireVarint) + b.EncodeVarint(t) + case *Precondition_UpdateTime: + b.EncodeVarint(2<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.UpdateTime); err != nil { + return err + } + case nil: + default: + return fmt.Errorf("Precondition.ConditionType has unexpected type %T", x) + } + return nil +} + +func _Precondition_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*Precondition) + switch tag { + case 1: // condition_type.exists + if wire != proto.WireVarint { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeVarint() + m.ConditionType = &Precondition_Exists{x != 0} + return true, err + case 2: // condition_type.update_time + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(timestamp.Timestamp) + err := b.DecodeMessage(msg) + m.ConditionType = &Precondition_UpdateTime{msg} + return true, err + default: + return false, nil + } +} + +func _Precondition_OneofSizer(msg proto.Message) (n int) { + m := msg.(*Precondition) + // condition_type + switch x := m.ConditionType.(type) { + case *Precondition_Exists: + n += 1 // tag and wire + n += 1 + case *Precondition_UpdateTime: + s := proto.Size(x.UpdateTime) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +// Options for creating a new transaction. +type TransactionOptions struct { + // The mode of the transaction. + // + // Types that are valid to be assigned to Mode: + // *TransactionOptions_ReadOnly_ + // *TransactionOptions_ReadWrite_ + Mode isTransactionOptions_Mode `protobuf_oneof:"mode"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *TransactionOptions) Reset() { *m = TransactionOptions{} } +func (m *TransactionOptions) String() string { return proto.CompactTextString(m) } +func (*TransactionOptions) ProtoMessage() {} +func (*TransactionOptions) Descriptor() ([]byte, []int) { + return fileDescriptor_common_33e7705eedc91e86, []int{2} +} +func (m *TransactionOptions) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_TransactionOptions.Unmarshal(m, b) +} +func (m *TransactionOptions) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_TransactionOptions.Marshal(b, m, deterministic) +} +func (dst *TransactionOptions) XXX_Merge(src proto.Message) { + xxx_messageInfo_TransactionOptions.Merge(dst, src) +} +func (m *TransactionOptions) XXX_Size() int { + return xxx_messageInfo_TransactionOptions.Size(m) +} +func (m *TransactionOptions) XXX_DiscardUnknown() { + xxx_messageInfo_TransactionOptions.DiscardUnknown(m) +} + +var xxx_messageInfo_TransactionOptions proto.InternalMessageInfo + +type isTransactionOptions_Mode interface { + isTransactionOptions_Mode() +} + +type TransactionOptions_ReadOnly_ struct { + ReadOnly *TransactionOptions_ReadOnly `protobuf:"bytes,2,opt,name=read_only,json=readOnly,proto3,oneof"` +} + +type TransactionOptions_ReadWrite_ struct { + ReadWrite *TransactionOptions_ReadWrite `protobuf:"bytes,3,opt,name=read_write,json=readWrite,proto3,oneof"` +} + +func (*TransactionOptions_ReadOnly_) isTransactionOptions_Mode() {} + +func (*TransactionOptions_ReadWrite_) isTransactionOptions_Mode() {} + +func (m *TransactionOptions) GetMode() isTransactionOptions_Mode { + if m != nil { + return m.Mode + } + return nil +} + +func (m *TransactionOptions) GetReadOnly() *TransactionOptions_ReadOnly { + if x, ok := m.GetMode().(*TransactionOptions_ReadOnly_); ok { + return x.ReadOnly + } + return nil +} + +func (m *TransactionOptions) GetReadWrite() *TransactionOptions_ReadWrite { + if x, ok := m.GetMode().(*TransactionOptions_ReadWrite_); ok { + return x.ReadWrite + } + return nil +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*TransactionOptions) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _TransactionOptions_OneofMarshaler, _TransactionOptions_OneofUnmarshaler, _TransactionOptions_OneofSizer, []interface{}{ + (*TransactionOptions_ReadOnly_)(nil), + (*TransactionOptions_ReadWrite_)(nil), + } +} + +func _TransactionOptions_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*TransactionOptions) + // mode + switch x := m.Mode.(type) { + case *TransactionOptions_ReadOnly_: + b.EncodeVarint(2<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.ReadOnly); err != nil { + return err + } + case *TransactionOptions_ReadWrite_: + b.EncodeVarint(3<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.ReadWrite); err != nil { + return err + } + case nil: + default: + return fmt.Errorf("TransactionOptions.Mode has unexpected type %T", x) + } + return nil +} + +func _TransactionOptions_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*TransactionOptions) + switch tag { + case 2: // mode.read_only + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(TransactionOptions_ReadOnly) + err := b.DecodeMessage(msg) + m.Mode = &TransactionOptions_ReadOnly_{msg} + return true, err + case 3: // mode.read_write + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(TransactionOptions_ReadWrite) + err := b.DecodeMessage(msg) + m.Mode = &TransactionOptions_ReadWrite_{msg} + return true, err + default: + return false, nil + } +} + +func _TransactionOptions_OneofSizer(msg proto.Message) (n int) { + m := msg.(*TransactionOptions) + // mode + switch x := m.Mode.(type) { + case *TransactionOptions_ReadOnly_: + s := proto.Size(x.ReadOnly) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *TransactionOptions_ReadWrite_: + s := proto.Size(x.ReadWrite) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +// Options for a transaction that can be used to read and write documents. +type TransactionOptions_ReadWrite struct { + // An optional transaction to retry. + RetryTransaction []byte `protobuf:"bytes,1,opt,name=retry_transaction,json=retryTransaction,proto3" json:"retry_transaction,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *TransactionOptions_ReadWrite) Reset() { *m = TransactionOptions_ReadWrite{} } +func (m *TransactionOptions_ReadWrite) String() string { return proto.CompactTextString(m) } +func (*TransactionOptions_ReadWrite) ProtoMessage() {} +func (*TransactionOptions_ReadWrite) Descriptor() ([]byte, []int) { + return fileDescriptor_common_33e7705eedc91e86, []int{2, 0} +} +func (m *TransactionOptions_ReadWrite) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_TransactionOptions_ReadWrite.Unmarshal(m, b) +} +func (m *TransactionOptions_ReadWrite) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_TransactionOptions_ReadWrite.Marshal(b, m, deterministic) +} +func (dst *TransactionOptions_ReadWrite) XXX_Merge(src proto.Message) { + xxx_messageInfo_TransactionOptions_ReadWrite.Merge(dst, src) +} +func (m *TransactionOptions_ReadWrite) XXX_Size() int { + return xxx_messageInfo_TransactionOptions_ReadWrite.Size(m) +} +func (m *TransactionOptions_ReadWrite) XXX_DiscardUnknown() { + xxx_messageInfo_TransactionOptions_ReadWrite.DiscardUnknown(m) +} + +var xxx_messageInfo_TransactionOptions_ReadWrite proto.InternalMessageInfo + +func (m *TransactionOptions_ReadWrite) GetRetryTransaction() []byte { + if m != nil { + return m.RetryTransaction + } + return nil +} + +// Options for a transaction that can only be used to read documents. +type TransactionOptions_ReadOnly struct { + // The consistency mode for this transaction. If not set, defaults to strong + // consistency. + // + // Types that are valid to be assigned to ConsistencySelector: + // *TransactionOptions_ReadOnly_ReadTime + ConsistencySelector isTransactionOptions_ReadOnly_ConsistencySelector `protobuf_oneof:"consistency_selector"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *TransactionOptions_ReadOnly) Reset() { *m = TransactionOptions_ReadOnly{} } +func (m *TransactionOptions_ReadOnly) String() string { return proto.CompactTextString(m) } +func (*TransactionOptions_ReadOnly) ProtoMessage() {} +func (*TransactionOptions_ReadOnly) Descriptor() ([]byte, []int) { + return fileDescriptor_common_33e7705eedc91e86, []int{2, 1} +} +func (m *TransactionOptions_ReadOnly) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_TransactionOptions_ReadOnly.Unmarshal(m, b) +} +func (m *TransactionOptions_ReadOnly) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_TransactionOptions_ReadOnly.Marshal(b, m, deterministic) +} +func (dst *TransactionOptions_ReadOnly) XXX_Merge(src proto.Message) { + xxx_messageInfo_TransactionOptions_ReadOnly.Merge(dst, src) +} +func (m *TransactionOptions_ReadOnly) XXX_Size() int { + return xxx_messageInfo_TransactionOptions_ReadOnly.Size(m) +} +func (m *TransactionOptions_ReadOnly) XXX_DiscardUnknown() { + xxx_messageInfo_TransactionOptions_ReadOnly.DiscardUnknown(m) +} + +var xxx_messageInfo_TransactionOptions_ReadOnly proto.InternalMessageInfo + +type isTransactionOptions_ReadOnly_ConsistencySelector interface { + isTransactionOptions_ReadOnly_ConsistencySelector() +} + +type TransactionOptions_ReadOnly_ReadTime struct { + ReadTime *timestamp.Timestamp `protobuf:"bytes,2,opt,name=read_time,json=readTime,proto3,oneof"` +} + +func (*TransactionOptions_ReadOnly_ReadTime) isTransactionOptions_ReadOnly_ConsistencySelector() {} + +func (m *TransactionOptions_ReadOnly) GetConsistencySelector() isTransactionOptions_ReadOnly_ConsistencySelector { + if m != nil { + return m.ConsistencySelector + } + return nil +} + +func (m *TransactionOptions_ReadOnly) GetReadTime() *timestamp.Timestamp { + if x, ok := m.GetConsistencySelector().(*TransactionOptions_ReadOnly_ReadTime); ok { + return x.ReadTime + } + return nil +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*TransactionOptions_ReadOnly) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _TransactionOptions_ReadOnly_OneofMarshaler, _TransactionOptions_ReadOnly_OneofUnmarshaler, _TransactionOptions_ReadOnly_OneofSizer, []interface{}{ + (*TransactionOptions_ReadOnly_ReadTime)(nil), + } +} + +func _TransactionOptions_ReadOnly_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*TransactionOptions_ReadOnly) + // consistency_selector + switch x := m.ConsistencySelector.(type) { + case *TransactionOptions_ReadOnly_ReadTime: + b.EncodeVarint(2<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.ReadTime); err != nil { + return err + } + case nil: + default: + return fmt.Errorf("TransactionOptions_ReadOnly.ConsistencySelector has unexpected type %T", x) + } + return nil +} + +func _TransactionOptions_ReadOnly_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*TransactionOptions_ReadOnly) + switch tag { + case 2: // consistency_selector.read_time + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(timestamp.Timestamp) + err := b.DecodeMessage(msg) + m.ConsistencySelector = &TransactionOptions_ReadOnly_ReadTime{msg} + return true, err + default: + return false, nil + } +} + +func _TransactionOptions_ReadOnly_OneofSizer(msg proto.Message) (n int) { + m := msg.(*TransactionOptions_ReadOnly) + // consistency_selector + switch x := m.ConsistencySelector.(type) { + case *TransactionOptions_ReadOnly_ReadTime: + s := proto.Size(x.ReadTime) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +func init() { + proto.RegisterType((*DocumentMask)(nil), "google.firestore.v1beta1.DocumentMask") + proto.RegisterType((*Precondition)(nil), "google.firestore.v1beta1.Precondition") + proto.RegisterType((*TransactionOptions)(nil), "google.firestore.v1beta1.TransactionOptions") + proto.RegisterType((*TransactionOptions_ReadWrite)(nil), "google.firestore.v1beta1.TransactionOptions.ReadWrite") + proto.RegisterType((*TransactionOptions_ReadOnly)(nil), "google.firestore.v1beta1.TransactionOptions.ReadOnly") +} + +func init() { + proto.RegisterFile("google/firestore/v1beta1/common.proto", fileDescriptor_common_33e7705eedc91e86) +} + +var fileDescriptor_common_33e7705eedc91e86 = []byte{ + // 470 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x94, 0x92, 0xe1, 0x8a, 0xd3, 0x40, + 0x10, 0xc7, 0x9b, 0xde, 0x71, 0xb4, 0xd3, 0x22, 0x67, 0x10, 0x09, 0xe1, 0xf0, 0x8e, 0x82, 0x50, + 0x10, 0x36, 0x54, 0x51, 0x14, 0xf1, 0x83, 0xa9, 0xdc, 0xf5, 0x8b, 0x5c, 0x89, 0xe5, 0x0e, 0xa4, + 0x12, 0xb6, 0xc9, 0x34, 0x2e, 0x26, 0x3b, 0x61, 0x77, 0xab, 0xe6, 0x75, 0xfc, 0xe8, 0x1b, 0xf8, + 0x0a, 0x3e, 0x87, 0x0f, 0x22, 0xd9, 0xa4, 0x51, 0x38, 0x0e, 0xf4, 0x5b, 0x32, 0xf3, 0x9b, 0xff, + 0xfc, 0xff, 0xc3, 0xc2, 0xc3, 0x8c, 0x28, 0xcb, 0x31, 0xd8, 0x0a, 0x85, 0xda, 0x90, 0xc2, 0xe0, + 0xf3, 0x6c, 0x83, 0x86, 0xcf, 0x82, 0x84, 0x8a, 0x82, 0x24, 0x2b, 0x15, 0x19, 0x72, 0xbd, 0x06, + 0x63, 0x1d, 0xc6, 0x5a, 0xcc, 0x3f, 0x6d, 0x05, 0x2c, 0xb7, 0xd9, 0x6d, 0x03, 0x23, 0x0a, 0xd4, + 0x86, 0x17, 0x65, 0x33, 0xea, 0x9f, 0xb4, 0x00, 0x2f, 0x45, 0xc0, 0xa5, 0x24, 0xc3, 0x8d, 0x20, + 0xa9, 0x9b, 0xee, 0x24, 0x80, 0xf1, 0x1b, 0x4a, 0x76, 0x05, 0x4a, 0xf3, 0x96, 0xeb, 0x4f, 0xee, + 0x29, 0x8c, 0xb6, 0x02, 0xf3, 0x34, 0x2e, 0xb9, 0xf9, 0xa8, 0x3d, 0xe7, 0xec, 0x60, 0x3a, 0x8c, + 0xc0, 0x96, 0x96, 0x75, 0x65, 0x52, 0xc1, 0x78, 0xa9, 0x30, 0x21, 0x99, 0x8a, 0x5a, 0xc7, 0xf5, + 0xe0, 0x08, 0xbf, 0x0a, 0x6d, 0x6a, 0xd6, 0x99, 0x0e, 0x16, 0xbd, 0xa8, 0xfd, 0x77, 0x5f, 0xc1, + 0x68, 0x57, 0xa6, 0xdc, 0x60, 0x5c, 0x5b, 0xf2, 0xfa, 0x67, 0xce, 0x74, 0xf4, 0xd8, 0x67, 0x6d, + 0x92, 0xbd, 0x5f, 0xb6, 0xda, 0xfb, 0x5d, 0xf4, 0x22, 0x68, 0x06, 0xea, 0x52, 0x78, 0x0c, 0x77, + 0xba, 0x2d, 0xb1, 0xa9, 0x4a, 0x9c, 0xfc, 0xea, 0x83, 0xbb, 0x52, 0x5c, 0x6a, 0x9e, 0xd4, 0xc5, + 0xcb, 0xd2, 0x06, 0x71, 0x57, 0x30, 0x54, 0xc8, 0xd3, 0x98, 0x64, 0x5e, 0xb5, 0x5b, 0x9e, 0xb2, + 0xdb, 0xee, 0xc5, 0x6e, 0x0a, 0xb0, 0x08, 0x79, 0x7a, 0x29, 0xf3, 0x6a, 0xd1, 0x8b, 0x06, 0xaa, + 0xfd, 0x76, 0xaf, 0x01, 0xac, 0xea, 0x17, 0x25, 0x0c, 0x7a, 0x07, 0x56, 0xf6, 0xd9, 0x7f, 0xcb, + 0x5e, 0xd7, 0xd3, 0x8b, 0x5e, 0x64, 0x1d, 0xda, 0x1f, 0xff, 0x39, 0x0c, 0xbb, 0x8e, 0xfb, 0x08, + 0xee, 0x2a, 0x34, 0xaa, 0x8a, 0xcd, 0x9f, 0x79, 0x7b, 0xc8, 0x71, 0x74, 0x6c, 0x1b, 0x7f, 0xe9, + 0xfa, 0x1f, 0x60, 0xb0, 0xb7, 0xea, 0xbe, 0x68, 0x43, 0xff, 0xf3, 0x69, 0x6d, 0x32, 0x7b, 0xd8, + 0xfb, 0x70, 0x2f, 0x21, 0xa9, 0x85, 0x36, 0x28, 0x93, 0x2a, 0xd6, 0x98, 0x63, 0x62, 0x48, 0x85, + 0x47, 0x70, 0x58, 0x50, 0x8a, 0xe1, 0x0f, 0x07, 0x4e, 0x12, 0x2a, 0x6e, 0xcd, 0x1a, 0x8e, 0xe6, + 0xf6, 0x69, 0x2e, 0xeb, 0x35, 0x4b, 0xe7, 0xfd, 0xeb, 0x16, 0xcc, 0x28, 0xe7, 0x32, 0x63, 0xa4, + 0xb2, 0x20, 0x43, 0x69, 0x4d, 0x04, 0x4d, 0x8b, 0x97, 0x42, 0xdf, 0x7c, 0xe1, 0x2f, 0xbb, 0xca, + 0xb7, 0xfe, 0xe1, 0xc5, 0xfc, 0xfc, 0xdd, 0xf7, 0xfe, 0x83, 0x8b, 0x46, 0x6a, 0x9e, 0xd3, 0x2e, + 0x65, 0xe7, 0xdd, 0xe6, 0xab, 0x59, 0x58, 0x4f, 0xfc, 0xdc, 0x03, 0x6b, 0x0b, 0xac, 0x3b, 0x60, + 0x7d, 0xd5, 0x48, 0x6e, 0x8e, 0xec, 0xda, 0x27, 0xbf, 0x03, 0x00, 0x00, 0xff, 0xff, 0xe5, 0x64, + 0x00, 0xd8, 0x57, 0x03, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/firestore/v1beta1/document.pb.go b/vendor/google.golang.org/genproto/googleapis/firestore/v1beta1/document.pb.go new file mode 100644 index 0000000..de0a652 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/firestore/v1beta1/document.pb.go @@ -0,0 +1,684 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/firestore/v1beta1/document.proto + +package firestore // import "google.golang.org/genproto/googleapis/firestore/v1beta1" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _struct "github.com/golang/protobuf/ptypes/struct" +import timestamp "github.com/golang/protobuf/ptypes/timestamp" +import _ "google.golang.org/genproto/googleapis/api/annotations" +import latlng "google.golang.org/genproto/googleapis/type/latlng" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// A Firestore document. +// +// Must not exceed 1 MiB - 4 bytes. +type Document struct { + // The resource name of the document, for example + // `projects/{project_id}/databases/{database_id}/documents/{document_path}`. + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // The document's fields. + // + // The map keys represent field names. + // + // A simple field name contains only characters `a` to `z`, `A` to `Z`, + // `0` to `9`, or `_`, and must not start with `0` to `9`. For example, + // `foo_bar_17`. + // + // Field names matching the regular expression `__.*__` are reserved. Reserved + // field names are forbidden except in certain documented contexts. The map + // keys, represented as UTF-8, must not exceed 1,500 bytes and cannot be + // empty. + // + // Field paths may be used in other contexts to refer to structured fields + // defined here. For `map_value`, the field path is represented by the simple + // or quoted field names of the containing fields, delimited by `.`. For + // example, the structured field + // `"foo" : { map_value: { "x&y" : { string_value: "hello" }}}` would be + // represented by the field path `foo.x&y`. + // + // Within a field path, a quoted field name starts and ends with `` ` `` and + // may contain any character. Some characters, including `` ` ``, must be + // escaped using a `\`. For example, `` `x&y` `` represents `x&y` and + // `` `bak\`tik` `` represents `` bak`tik ``. + Fields map[string]*Value `protobuf:"bytes,2,rep,name=fields,proto3" json:"fields,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` + // Output only. The time at which the document was created. + // + // This value increases monotonically when a document is deleted then + // recreated. It can also be compared to values from other documents and + // the `read_time` of a query. + CreateTime *timestamp.Timestamp `protobuf:"bytes,3,opt,name=create_time,json=createTime,proto3" json:"create_time,omitempty"` + // Output only. The time at which the document was last changed. + // + // This value is initially set to the `create_time` then increases + // monotonically with each change to the document. It can also be + // compared to values from other documents and the `read_time` of a query. + UpdateTime *timestamp.Timestamp `protobuf:"bytes,4,opt,name=update_time,json=updateTime,proto3" json:"update_time,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Document) Reset() { *m = Document{} } +func (m *Document) String() string { return proto.CompactTextString(m) } +func (*Document) ProtoMessage() {} +func (*Document) Descriptor() ([]byte, []int) { + return fileDescriptor_document_8735c50e5ee5c345, []int{0} +} +func (m *Document) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Document.Unmarshal(m, b) +} +func (m *Document) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Document.Marshal(b, m, deterministic) +} +func (dst *Document) XXX_Merge(src proto.Message) { + xxx_messageInfo_Document.Merge(dst, src) +} +func (m *Document) XXX_Size() int { + return xxx_messageInfo_Document.Size(m) +} +func (m *Document) XXX_DiscardUnknown() { + xxx_messageInfo_Document.DiscardUnknown(m) +} + +var xxx_messageInfo_Document proto.InternalMessageInfo + +func (m *Document) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *Document) GetFields() map[string]*Value { + if m != nil { + return m.Fields + } + return nil +} + +func (m *Document) GetCreateTime() *timestamp.Timestamp { + if m != nil { + return m.CreateTime + } + return nil +} + +func (m *Document) GetUpdateTime() *timestamp.Timestamp { + if m != nil { + return m.UpdateTime + } + return nil +} + +// A message that can hold any of the supported value types. +type Value struct { + // Must have a value set. + // + // Types that are valid to be assigned to ValueType: + // *Value_NullValue + // *Value_BooleanValue + // *Value_IntegerValue + // *Value_DoubleValue + // *Value_TimestampValue + // *Value_StringValue + // *Value_BytesValue + // *Value_ReferenceValue + // *Value_GeoPointValue + // *Value_ArrayValue + // *Value_MapValue + ValueType isValue_ValueType `protobuf_oneof:"value_type"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Value) Reset() { *m = Value{} } +func (m *Value) String() string { return proto.CompactTextString(m) } +func (*Value) ProtoMessage() {} +func (*Value) Descriptor() ([]byte, []int) { + return fileDescriptor_document_8735c50e5ee5c345, []int{1} +} +func (m *Value) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Value.Unmarshal(m, b) +} +func (m *Value) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Value.Marshal(b, m, deterministic) +} +func (dst *Value) XXX_Merge(src proto.Message) { + xxx_messageInfo_Value.Merge(dst, src) +} +func (m *Value) XXX_Size() int { + return xxx_messageInfo_Value.Size(m) +} +func (m *Value) XXX_DiscardUnknown() { + xxx_messageInfo_Value.DiscardUnknown(m) +} + +var xxx_messageInfo_Value proto.InternalMessageInfo + +type isValue_ValueType interface { + isValue_ValueType() +} + +type Value_NullValue struct { + NullValue _struct.NullValue `protobuf:"varint,11,opt,name=null_value,json=nullValue,proto3,enum=google.protobuf.NullValue,oneof"` +} + +type Value_BooleanValue struct { + BooleanValue bool `protobuf:"varint,1,opt,name=boolean_value,json=booleanValue,proto3,oneof"` +} + +type Value_IntegerValue struct { + IntegerValue int64 `protobuf:"varint,2,opt,name=integer_value,json=integerValue,proto3,oneof"` +} + +type Value_DoubleValue struct { + DoubleValue float64 `protobuf:"fixed64,3,opt,name=double_value,json=doubleValue,proto3,oneof"` +} + +type Value_TimestampValue struct { + TimestampValue *timestamp.Timestamp `protobuf:"bytes,10,opt,name=timestamp_value,json=timestampValue,proto3,oneof"` +} + +type Value_StringValue struct { + StringValue string `protobuf:"bytes,17,opt,name=string_value,json=stringValue,proto3,oneof"` +} + +type Value_BytesValue struct { + BytesValue []byte `protobuf:"bytes,18,opt,name=bytes_value,json=bytesValue,proto3,oneof"` +} + +type Value_ReferenceValue struct { + ReferenceValue string `protobuf:"bytes,5,opt,name=reference_value,json=referenceValue,proto3,oneof"` +} + +type Value_GeoPointValue struct { + GeoPointValue *latlng.LatLng `protobuf:"bytes,8,opt,name=geo_point_value,json=geoPointValue,proto3,oneof"` +} + +type Value_ArrayValue struct { + ArrayValue *ArrayValue `protobuf:"bytes,9,opt,name=array_value,json=arrayValue,proto3,oneof"` +} + +type Value_MapValue struct { + MapValue *MapValue `protobuf:"bytes,6,opt,name=map_value,json=mapValue,proto3,oneof"` +} + +func (*Value_NullValue) isValue_ValueType() {} + +func (*Value_BooleanValue) isValue_ValueType() {} + +func (*Value_IntegerValue) isValue_ValueType() {} + +func (*Value_DoubleValue) isValue_ValueType() {} + +func (*Value_TimestampValue) isValue_ValueType() {} + +func (*Value_StringValue) isValue_ValueType() {} + +func (*Value_BytesValue) isValue_ValueType() {} + +func (*Value_ReferenceValue) isValue_ValueType() {} + +func (*Value_GeoPointValue) isValue_ValueType() {} + +func (*Value_ArrayValue) isValue_ValueType() {} + +func (*Value_MapValue) isValue_ValueType() {} + +func (m *Value) GetValueType() isValue_ValueType { + if m != nil { + return m.ValueType + } + return nil +} + +func (m *Value) GetNullValue() _struct.NullValue { + if x, ok := m.GetValueType().(*Value_NullValue); ok { + return x.NullValue + } + return _struct.NullValue_NULL_VALUE +} + +func (m *Value) GetBooleanValue() bool { + if x, ok := m.GetValueType().(*Value_BooleanValue); ok { + return x.BooleanValue + } + return false +} + +func (m *Value) GetIntegerValue() int64 { + if x, ok := m.GetValueType().(*Value_IntegerValue); ok { + return x.IntegerValue + } + return 0 +} + +func (m *Value) GetDoubleValue() float64 { + if x, ok := m.GetValueType().(*Value_DoubleValue); ok { + return x.DoubleValue + } + return 0 +} + +func (m *Value) GetTimestampValue() *timestamp.Timestamp { + if x, ok := m.GetValueType().(*Value_TimestampValue); ok { + return x.TimestampValue + } + return nil +} + +func (m *Value) GetStringValue() string { + if x, ok := m.GetValueType().(*Value_StringValue); ok { + return x.StringValue + } + return "" +} + +func (m *Value) GetBytesValue() []byte { + if x, ok := m.GetValueType().(*Value_BytesValue); ok { + return x.BytesValue + } + return nil +} + +func (m *Value) GetReferenceValue() string { + if x, ok := m.GetValueType().(*Value_ReferenceValue); ok { + return x.ReferenceValue + } + return "" +} + +func (m *Value) GetGeoPointValue() *latlng.LatLng { + if x, ok := m.GetValueType().(*Value_GeoPointValue); ok { + return x.GeoPointValue + } + return nil +} + +func (m *Value) GetArrayValue() *ArrayValue { + if x, ok := m.GetValueType().(*Value_ArrayValue); ok { + return x.ArrayValue + } + return nil +} + +func (m *Value) GetMapValue() *MapValue { + if x, ok := m.GetValueType().(*Value_MapValue); ok { + return x.MapValue + } + return nil +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*Value) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _Value_OneofMarshaler, _Value_OneofUnmarshaler, _Value_OneofSizer, []interface{}{ + (*Value_NullValue)(nil), + (*Value_BooleanValue)(nil), + (*Value_IntegerValue)(nil), + (*Value_DoubleValue)(nil), + (*Value_TimestampValue)(nil), + (*Value_StringValue)(nil), + (*Value_BytesValue)(nil), + (*Value_ReferenceValue)(nil), + (*Value_GeoPointValue)(nil), + (*Value_ArrayValue)(nil), + (*Value_MapValue)(nil), + } +} + +func _Value_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*Value) + // value_type + switch x := m.ValueType.(type) { + case *Value_NullValue: + b.EncodeVarint(11<<3 | proto.WireVarint) + b.EncodeVarint(uint64(x.NullValue)) + case *Value_BooleanValue: + t := uint64(0) + if x.BooleanValue { + t = 1 + } + b.EncodeVarint(1<<3 | proto.WireVarint) + b.EncodeVarint(t) + case *Value_IntegerValue: + b.EncodeVarint(2<<3 | proto.WireVarint) + b.EncodeVarint(uint64(x.IntegerValue)) + case *Value_DoubleValue: + b.EncodeVarint(3<<3 | proto.WireFixed64) + b.EncodeFixed64(math.Float64bits(x.DoubleValue)) + case *Value_TimestampValue: + b.EncodeVarint(10<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.TimestampValue); err != nil { + return err + } + case *Value_StringValue: + b.EncodeVarint(17<<3 | proto.WireBytes) + b.EncodeStringBytes(x.StringValue) + case *Value_BytesValue: + b.EncodeVarint(18<<3 | proto.WireBytes) + b.EncodeRawBytes(x.BytesValue) + case *Value_ReferenceValue: + b.EncodeVarint(5<<3 | proto.WireBytes) + b.EncodeStringBytes(x.ReferenceValue) + case *Value_GeoPointValue: + b.EncodeVarint(8<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.GeoPointValue); err != nil { + return err + } + case *Value_ArrayValue: + b.EncodeVarint(9<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.ArrayValue); err != nil { + return err + } + case *Value_MapValue: + b.EncodeVarint(6<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.MapValue); err != nil { + return err + } + case nil: + default: + return fmt.Errorf("Value.ValueType has unexpected type %T", x) + } + return nil +} + +func _Value_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*Value) + switch tag { + case 11: // value_type.null_value + if wire != proto.WireVarint { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeVarint() + m.ValueType = &Value_NullValue{_struct.NullValue(x)} + return true, err + case 1: // value_type.boolean_value + if wire != proto.WireVarint { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeVarint() + m.ValueType = &Value_BooleanValue{x != 0} + return true, err + case 2: // value_type.integer_value + if wire != proto.WireVarint { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeVarint() + m.ValueType = &Value_IntegerValue{int64(x)} + return true, err + case 3: // value_type.double_value + if wire != proto.WireFixed64 { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeFixed64() + m.ValueType = &Value_DoubleValue{math.Float64frombits(x)} + return true, err + case 10: // value_type.timestamp_value + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(timestamp.Timestamp) + err := b.DecodeMessage(msg) + m.ValueType = &Value_TimestampValue{msg} + return true, err + case 17: // value_type.string_value + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeStringBytes() + m.ValueType = &Value_StringValue{x} + return true, err + case 18: // value_type.bytes_value + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeRawBytes(true) + m.ValueType = &Value_BytesValue{x} + return true, err + case 5: // value_type.reference_value + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeStringBytes() + m.ValueType = &Value_ReferenceValue{x} + return true, err + case 8: // value_type.geo_point_value + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(latlng.LatLng) + err := b.DecodeMessage(msg) + m.ValueType = &Value_GeoPointValue{msg} + return true, err + case 9: // value_type.array_value + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(ArrayValue) + err := b.DecodeMessage(msg) + m.ValueType = &Value_ArrayValue{msg} + return true, err + case 6: // value_type.map_value + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(MapValue) + err := b.DecodeMessage(msg) + m.ValueType = &Value_MapValue{msg} + return true, err + default: + return false, nil + } +} + +func _Value_OneofSizer(msg proto.Message) (n int) { + m := msg.(*Value) + // value_type + switch x := m.ValueType.(type) { + case *Value_NullValue: + n += 1 // tag and wire + n += proto.SizeVarint(uint64(x.NullValue)) + case *Value_BooleanValue: + n += 1 // tag and wire + n += 1 + case *Value_IntegerValue: + n += 1 // tag and wire + n += proto.SizeVarint(uint64(x.IntegerValue)) + case *Value_DoubleValue: + n += 1 // tag and wire + n += 8 + case *Value_TimestampValue: + s := proto.Size(x.TimestampValue) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *Value_StringValue: + n += 2 // tag and wire + n += proto.SizeVarint(uint64(len(x.StringValue))) + n += len(x.StringValue) + case *Value_BytesValue: + n += 2 // tag and wire + n += proto.SizeVarint(uint64(len(x.BytesValue))) + n += len(x.BytesValue) + case *Value_ReferenceValue: + n += 1 // tag and wire + n += proto.SizeVarint(uint64(len(x.ReferenceValue))) + n += len(x.ReferenceValue) + case *Value_GeoPointValue: + s := proto.Size(x.GeoPointValue) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *Value_ArrayValue: + s := proto.Size(x.ArrayValue) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *Value_MapValue: + s := proto.Size(x.MapValue) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +// An array value. +type ArrayValue struct { + // Values in the array. + Values []*Value `protobuf:"bytes,1,rep,name=values,proto3" json:"values,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ArrayValue) Reset() { *m = ArrayValue{} } +func (m *ArrayValue) String() string { return proto.CompactTextString(m) } +func (*ArrayValue) ProtoMessage() {} +func (*ArrayValue) Descriptor() ([]byte, []int) { + return fileDescriptor_document_8735c50e5ee5c345, []int{2} +} +func (m *ArrayValue) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ArrayValue.Unmarshal(m, b) +} +func (m *ArrayValue) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ArrayValue.Marshal(b, m, deterministic) +} +func (dst *ArrayValue) XXX_Merge(src proto.Message) { + xxx_messageInfo_ArrayValue.Merge(dst, src) +} +func (m *ArrayValue) XXX_Size() int { + return xxx_messageInfo_ArrayValue.Size(m) +} +func (m *ArrayValue) XXX_DiscardUnknown() { + xxx_messageInfo_ArrayValue.DiscardUnknown(m) +} + +var xxx_messageInfo_ArrayValue proto.InternalMessageInfo + +func (m *ArrayValue) GetValues() []*Value { + if m != nil { + return m.Values + } + return nil +} + +// A map value. +type MapValue struct { + // The map's fields. + // + // The map keys represent field names. Field names matching the regular + // expression `__.*__` are reserved. Reserved field names are forbidden except + // in certain documented contexts. The map keys, represented as UTF-8, must + // not exceed 1,500 bytes and cannot be empty. + Fields map[string]*Value `protobuf:"bytes,1,rep,name=fields,proto3" json:"fields,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *MapValue) Reset() { *m = MapValue{} } +func (m *MapValue) String() string { return proto.CompactTextString(m) } +func (*MapValue) ProtoMessage() {} +func (*MapValue) Descriptor() ([]byte, []int) { + return fileDescriptor_document_8735c50e5ee5c345, []int{3} +} +func (m *MapValue) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_MapValue.Unmarshal(m, b) +} +func (m *MapValue) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_MapValue.Marshal(b, m, deterministic) +} +func (dst *MapValue) XXX_Merge(src proto.Message) { + xxx_messageInfo_MapValue.Merge(dst, src) +} +func (m *MapValue) XXX_Size() int { + return xxx_messageInfo_MapValue.Size(m) +} +func (m *MapValue) XXX_DiscardUnknown() { + xxx_messageInfo_MapValue.DiscardUnknown(m) +} + +var xxx_messageInfo_MapValue proto.InternalMessageInfo + +func (m *MapValue) GetFields() map[string]*Value { + if m != nil { + return m.Fields + } + return nil +} + +func init() { + proto.RegisterType((*Document)(nil), "google.firestore.v1beta1.Document") + proto.RegisterMapType((map[string]*Value)(nil), "google.firestore.v1beta1.Document.FieldsEntry") + proto.RegisterType((*Value)(nil), "google.firestore.v1beta1.Value") + proto.RegisterType((*ArrayValue)(nil), "google.firestore.v1beta1.ArrayValue") + proto.RegisterType((*MapValue)(nil), "google.firestore.v1beta1.MapValue") + proto.RegisterMapType((map[string]*Value)(nil), "google.firestore.v1beta1.MapValue.FieldsEntry") +} + +func init() { + proto.RegisterFile("google/firestore/v1beta1/document.proto", fileDescriptor_document_8735c50e5ee5c345) +} + +var fileDescriptor_document_8735c50e5ee5c345 = []byte{ + // 655 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xb4, 0x94, 0xcf, 0x6e, 0xd3, 0x4e, + 0x10, 0xc7, 0xe3, 0x24, 0x8d, 0x92, 0x71, 0xda, 0xfe, 0x7e, 0xe6, 0x12, 0x45, 0x15, 0x0d, 0x01, + 0x44, 0xb8, 0xd8, 0x6a, 0x11, 0x02, 0x51, 0x71, 0x68, 0x4a, 0xd3, 0x1c, 0x0a, 0xaa, 0x0c, 0xea, + 0xa1, 0xaa, 0x14, 0xad, 0x93, 0xcd, 0xca, 0x62, 0xbd, 0x6b, 0xad, 0xd7, 0x95, 0xf2, 0x3a, 0x1c, + 0x39, 0xf0, 0x02, 0xf0, 0x04, 0x7d, 0x2a, 0xb4, 0xff, 0xdc, 0x0a, 0x1a, 0xe5, 0xc4, 0xcd, 0x9e, + 0xf9, 0x7c, 0xbf, 0x33, 0xb3, 0xde, 0x31, 0xbc, 0x20, 0x9c, 0x13, 0x8a, 0xa3, 0x65, 0x2a, 0x70, + 0x21, 0xb9, 0xc0, 0xd1, 0xcd, 0x41, 0x82, 0x25, 0x3a, 0x88, 0x16, 0x7c, 0x5e, 0x66, 0x98, 0xc9, + 0x30, 0x17, 0x5c, 0xf2, 0xa0, 0x67, 0xc0, 0xb0, 0x02, 0x43, 0x0b, 0xf6, 0xf7, 0xac, 0x85, 0xe6, + 0x92, 0x72, 0x19, 0x15, 0x52, 0x94, 0x73, 0xab, 0xeb, 0xef, 0xff, 0x99, 0x95, 0x69, 0x86, 0x0b, + 0x89, 0xb2, 0xdc, 0x02, 0xd6, 0x38, 0x92, 0xab, 0x1c, 0x47, 0x14, 0x49, 0xca, 0x88, 0xcd, 0x38, + 0x63, 0x94, 0xa7, 0x11, 0x62, 0x8c, 0x4b, 0x24, 0x53, 0xce, 0x0a, 0x93, 0x1d, 0xfe, 0xaa, 0x43, + 0xfb, 0x83, 0xed, 0x31, 0x08, 0xa0, 0xc9, 0x50, 0x86, 0x7b, 0xde, 0xc0, 0x1b, 0x75, 0x62, 0xfd, + 0x1c, 0x4c, 0xa0, 0xb5, 0x4c, 0x31, 0x5d, 0x14, 0xbd, 0xfa, 0xa0, 0x31, 0xf2, 0x0f, 0xc3, 0x70, + 0xdd, 0x08, 0xa1, 0xf3, 0x09, 0x27, 0x5a, 0x70, 0xca, 0xa4, 0x58, 0xc5, 0x56, 0x1d, 0x1c, 0x81, + 0x3f, 0x17, 0x18, 0x49, 0x3c, 0x53, 0xad, 0xf7, 0x1a, 0x03, 0x6f, 0xe4, 0x1f, 0xf6, 0x9d, 0x99, + 0x9b, 0x2b, 0xfc, 0xe2, 0xe6, 0x8a, 0xc1, 0xe0, 0x2a, 0xa0, 0xc4, 0x65, 0xbe, 0xa8, 0xc4, 0xcd, + 0xcd, 0x62, 0x83, 0xab, 0x40, 0xff, 0x0a, 0xfc, 0x7b, 0x0d, 0x05, 0xff, 0x41, 0xe3, 0x2b, 0x5e, + 0xd9, 0x19, 0xd5, 0x63, 0xf0, 0x1a, 0xb6, 0x6e, 0x10, 0x2d, 0x71, 0xaf, 0xae, 0x7d, 0xf7, 0xd7, + 0x4f, 0x78, 0xa9, 0xb0, 0xd8, 0xd0, 0xef, 0xea, 0x6f, 0xbd, 0xe1, 0x6d, 0x13, 0xb6, 0x74, 0x30, + 0x38, 0x02, 0x60, 0x25, 0xa5, 0x33, 0xe3, 0xe4, 0x0f, 0xbc, 0xd1, 0xce, 0x03, 0x1d, 0x7e, 0x2a, + 0x29, 0xd5, 0xfc, 0xb4, 0x16, 0x77, 0x98, 0x7b, 0x09, 0x9e, 0xc3, 0x76, 0xc2, 0x39, 0xc5, 0x88, + 0x59, 0xbd, 0xea, 0xae, 0x3d, 0xad, 0xc5, 0x5d, 0x1b, 0xae, 0xb0, 0x94, 0x49, 0x4c, 0xb0, 0x98, + 0xdd, 0x35, 0xdc, 0x50, 0x98, 0x0d, 0x1b, 0xec, 0x29, 0x74, 0x17, 0xbc, 0x4c, 0x28, 0xb6, 0x94, + 0x3a, 0x6b, 0x6f, 0x5a, 0x8b, 0x7d, 0x13, 0x35, 0xd0, 0x29, 0xec, 0x56, 0x77, 0xc8, 0x72, 0xb0, + 0xe9, 0x58, 0xa7, 0xb5, 0x78, 0xa7, 0x12, 0x55, 0xb5, 0x0a, 0x29, 0x52, 0x46, 0xac, 0xc7, 0xff, + 0xea, 0x58, 0x55, 0x2d, 0x13, 0x35, 0xd0, 0x13, 0xf0, 0x93, 0x95, 0xc4, 0x85, 0x65, 0x82, 0x81, + 0x37, 0xea, 0x4e, 0x6b, 0x31, 0xe8, 0xa0, 0x41, 0x5e, 0xc2, 0xae, 0xc0, 0x4b, 0x2c, 0x30, 0x9b, + 0xbb, 0xb6, 0xb7, 0xac, 0xd5, 0x4e, 0x95, 0x30, 0xe8, 0x7b, 0xd8, 0x25, 0x98, 0xcf, 0x72, 0x9e, + 0x32, 0x69, 0xd1, 0xb6, 0xee, 0xfc, 0x91, 0xeb, 0x5c, 0x2d, 0x41, 0x78, 0x8e, 0xe4, 0x39, 0x23, + 0xd3, 0x5a, 0xbc, 0x4d, 0x30, 0xbf, 0x50, 0xb0, 0x91, 0x9f, 0x81, 0x8f, 0x84, 0x40, 0x2b, 0x2b, + 0xed, 0x68, 0xe9, 0xb3, 0xf5, 0xdf, 0xfc, 0x58, 0xc1, 0xee, 0x9b, 0x01, 0xaa, 0xde, 0x82, 0x63, + 0xe8, 0x64, 0xc8, 0x9d, 0x5d, 0x4b, 0xdb, 0x0c, 0xd7, 0xdb, 0x7c, 0x44, 0xb9, 0x33, 0x69, 0x67, + 0xf6, 0x79, 0xdc, 0x05, 0xd0, 0xf2, 0x99, 0xea, 0x78, 0x78, 0x0a, 0x70, 0x57, 0x2c, 0x78, 0x03, + 0x2d, 0x9d, 0x2b, 0x7a, 0x9e, 0x5e, 0xbc, 0x8d, 0xd7, 0xd2, 0xe2, 0xc3, 0x1f, 0x1e, 0xb4, 0x5d, + 0xb5, 0x7b, 0xeb, 0xeb, 0x6d, 0x5a, 0x5f, 0xa7, 0x79, 0x68, 0x7d, 0xff, 0xe5, 0x12, 0x8d, 0x7f, + 0x7a, 0xb0, 0x37, 0xe7, 0xd9, 0x5a, 0xc5, 0x78, 0xdb, 0xfd, 0x59, 0x2e, 0xd4, 0x95, 0xbc, 0xf0, + 0xae, 0x8e, 0x2d, 0x4a, 0x38, 0x45, 0x8c, 0x84, 0x5c, 0x90, 0x88, 0x60, 0xa6, 0x2f, 0x6c, 0x64, + 0x52, 0x28, 0x4f, 0x8b, 0xbf, 0x7f, 0xc7, 0x47, 0x55, 0xe4, 0x5b, 0xbd, 0x79, 0x76, 0x32, 0xf9, + 0xfc, 0xbd, 0xfe, 0xf8, 0xcc, 0x58, 0x9d, 0x50, 0x5e, 0x2e, 0xc2, 0x49, 0x55, 0xfb, 0xf2, 0x60, + 0xac, 0x14, 0xb7, 0x0e, 0xb8, 0xd6, 0xc0, 0x75, 0x05, 0x5c, 0x5f, 0x1a, 0xcb, 0xa4, 0xa5, 0xcb, + 0xbe, 0xfa, 0x1d, 0x00, 0x00, 0xff, 0xff, 0x00, 0x4b, 0xd9, 0xd6, 0x04, 0x06, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/firestore/v1beta1/firestore.pb.go b/vendor/google.golang.org/genproto/googleapis/firestore/v1beta1/firestore.pb.go new file mode 100644 index 0000000..561b856 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/firestore/v1beta1/firestore.pb.go @@ -0,0 +1,3723 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/firestore/v1beta1/firestore.proto + +package firestore // import "google.golang.org/genproto/googleapis/firestore/v1beta1" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import empty "github.com/golang/protobuf/ptypes/empty" +import timestamp "github.com/golang/protobuf/ptypes/timestamp" +import _ "google.golang.org/genproto/googleapis/api/annotations" +import status "google.golang.org/genproto/googleapis/rpc/status" + +import ( + context "golang.org/x/net/context" + grpc "google.golang.org/grpc" +) + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// The type of change. +type TargetChange_TargetChangeType int32 + +const ( + // No change has occurred. Used only to send an updated `resume_token`. + TargetChange_NO_CHANGE TargetChange_TargetChangeType = 0 + // The targets have been added. + TargetChange_ADD TargetChange_TargetChangeType = 1 + // The targets have been removed. + TargetChange_REMOVE TargetChange_TargetChangeType = 2 + // The targets reflect all changes committed before the targets were added + // to the stream. + // + // This will be sent after or with a `read_time` that is greater than or + // equal to the time at which the targets were added. + // + // Listeners can wait for this change if read-after-write semantics + // are desired. + TargetChange_CURRENT TargetChange_TargetChangeType = 3 + // The targets have been reset, and a new initial state for the targets + // will be returned in subsequent changes. + // + // After the initial state is complete, `CURRENT` will be returned even + // if the target was previously indicated to be `CURRENT`. + TargetChange_RESET TargetChange_TargetChangeType = 4 +) + +var TargetChange_TargetChangeType_name = map[int32]string{ + 0: "NO_CHANGE", + 1: "ADD", + 2: "REMOVE", + 3: "CURRENT", + 4: "RESET", +} +var TargetChange_TargetChangeType_value = map[string]int32{ + "NO_CHANGE": 0, + "ADD": 1, + "REMOVE": 2, + "CURRENT": 3, + "RESET": 4, +} + +func (x TargetChange_TargetChangeType) String() string { + return proto.EnumName(TargetChange_TargetChangeType_name, int32(x)) +} +func (TargetChange_TargetChangeType) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_firestore_d225c1066b9de1b1, []int{20, 0} +} + +// The request for [Firestore.GetDocument][google.firestore.v1beta1.Firestore.GetDocument]. +type GetDocumentRequest struct { + // The resource name of the Document to get. In the format: + // `projects/{project_id}/databases/{database_id}/documents/{document_path}`. + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // The fields to return. If not set, returns all fields. + // + // If the document has a field that is not present in this mask, that field + // will not be returned in the response. + Mask *DocumentMask `protobuf:"bytes,2,opt,name=mask,proto3" json:"mask,omitempty"` + // The consistency mode for this transaction. + // If not set, defaults to strong consistency. + // + // Types that are valid to be assigned to ConsistencySelector: + // *GetDocumentRequest_Transaction + // *GetDocumentRequest_ReadTime + ConsistencySelector isGetDocumentRequest_ConsistencySelector `protobuf_oneof:"consistency_selector"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GetDocumentRequest) Reset() { *m = GetDocumentRequest{} } +func (m *GetDocumentRequest) String() string { return proto.CompactTextString(m) } +func (*GetDocumentRequest) ProtoMessage() {} +func (*GetDocumentRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_firestore_d225c1066b9de1b1, []int{0} +} +func (m *GetDocumentRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GetDocumentRequest.Unmarshal(m, b) +} +func (m *GetDocumentRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GetDocumentRequest.Marshal(b, m, deterministic) +} +func (dst *GetDocumentRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_GetDocumentRequest.Merge(dst, src) +} +func (m *GetDocumentRequest) XXX_Size() int { + return xxx_messageInfo_GetDocumentRequest.Size(m) +} +func (m *GetDocumentRequest) XXX_DiscardUnknown() { + xxx_messageInfo_GetDocumentRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_GetDocumentRequest proto.InternalMessageInfo + +func (m *GetDocumentRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *GetDocumentRequest) GetMask() *DocumentMask { + if m != nil { + return m.Mask + } + return nil +} + +type isGetDocumentRequest_ConsistencySelector interface { + isGetDocumentRequest_ConsistencySelector() +} + +type GetDocumentRequest_Transaction struct { + Transaction []byte `protobuf:"bytes,3,opt,name=transaction,proto3,oneof"` +} + +type GetDocumentRequest_ReadTime struct { + ReadTime *timestamp.Timestamp `protobuf:"bytes,5,opt,name=read_time,json=readTime,proto3,oneof"` +} + +func (*GetDocumentRequest_Transaction) isGetDocumentRequest_ConsistencySelector() {} + +func (*GetDocumentRequest_ReadTime) isGetDocumentRequest_ConsistencySelector() {} + +func (m *GetDocumentRequest) GetConsistencySelector() isGetDocumentRequest_ConsistencySelector { + if m != nil { + return m.ConsistencySelector + } + return nil +} + +func (m *GetDocumentRequest) GetTransaction() []byte { + if x, ok := m.GetConsistencySelector().(*GetDocumentRequest_Transaction); ok { + return x.Transaction + } + return nil +} + +func (m *GetDocumentRequest) GetReadTime() *timestamp.Timestamp { + if x, ok := m.GetConsistencySelector().(*GetDocumentRequest_ReadTime); ok { + return x.ReadTime + } + return nil +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*GetDocumentRequest) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _GetDocumentRequest_OneofMarshaler, _GetDocumentRequest_OneofUnmarshaler, _GetDocumentRequest_OneofSizer, []interface{}{ + (*GetDocumentRequest_Transaction)(nil), + (*GetDocumentRequest_ReadTime)(nil), + } +} + +func _GetDocumentRequest_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*GetDocumentRequest) + // consistency_selector + switch x := m.ConsistencySelector.(type) { + case *GetDocumentRequest_Transaction: + b.EncodeVarint(3<<3 | proto.WireBytes) + b.EncodeRawBytes(x.Transaction) + case *GetDocumentRequest_ReadTime: + b.EncodeVarint(5<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.ReadTime); err != nil { + return err + } + case nil: + default: + return fmt.Errorf("GetDocumentRequest.ConsistencySelector has unexpected type %T", x) + } + return nil +} + +func _GetDocumentRequest_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*GetDocumentRequest) + switch tag { + case 3: // consistency_selector.transaction + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeRawBytes(true) + m.ConsistencySelector = &GetDocumentRequest_Transaction{x} + return true, err + case 5: // consistency_selector.read_time + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(timestamp.Timestamp) + err := b.DecodeMessage(msg) + m.ConsistencySelector = &GetDocumentRequest_ReadTime{msg} + return true, err + default: + return false, nil + } +} + +func _GetDocumentRequest_OneofSizer(msg proto.Message) (n int) { + m := msg.(*GetDocumentRequest) + // consistency_selector + switch x := m.ConsistencySelector.(type) { + case *GetDocumentRequest_Transaction: + n += 1 // tag and wire + n += proto.SizeVarint(uint64(len(x.Transaction))) + n += len(x.Transaction) + case *GetDocumentRequest_ReadTime: + s := proto.Size(x.ReadTime) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +// The request for [Firestore.ListDocuments][google.firestore.v1beta1.Firestore.ListDocuments]. +type ListDocumentsRequest struct { + // The parent resource name. In the format: + // `projects/{project_id}/databases/{database_id}/documents` or + // `projects/{project_id}/databases/{database_id}/documents/{document_path}`. + // For example: + // `projects/my-project/databases/my-database/documents` or + // `projects/my-project/databases/my-database/documents/chatrooms/my-chatroom` + Parent string `protobuf:"bytes,1,opt,name=parent,proto3" json:"parent,omitempty"` + // The collection ID, relative to `parent`, to list. For example: `chatrooms` + // or `messages`. + CollectionId string `protobuf:"bytes,2,opt,name=collection_id,json=collectionId,proto3" json:"collection_id,omitempty"` + // The maximum number of documents to return. + PageSize int32 `protobuf:"varint,3,opt,name=page_size,json=pageSize,proto3" json:"page_size,omitempty"` + // The `next_page_token` value returned from a previous List request, if any. + PageToken string `protobuf:"bytes,4,opt,name=page_token,json=pageToken,proto3" json:"page_token,omitempty"` + // The order to sort results by. For example: `priority desc, name`. + OrderBy string `protobuf:"bytes,6,opt,name=order_by,json=orderBy,proto3" json:"order_by,omitempty"` + // The fields to return. If not set, returns all fields. + // + // If a document has a field that is not present in this mask, that field + // will not be returned in the response. + Mask *DocumentMask `protobuf:"bytes,7,opt,name=mask,proto3" json:"mask,omitempty"` + // The consistency mode for this transaction. + // If not set, defaults to strong consistency. + // + // Types that are valid to be assigned to ConsistencySelector: + // *ListDocumentsRequest_Transaction + // *ListDocumentsRequest_ReadTime + ConsistencySelector isListDocumentsRequest_ConsistencySelector `protobuf_oneof:"consistency_selector"` + // If the list should show missing documents. A missing document is a + // document that does not exist but has sub-documents. These documents will + // be returned with a key but will not have fields, [Document.create_time][google.firestore.v1beta1.Document.create_time], + // or [Document.update_time][google.firestore.v1beta1.Document.update_time] set. + // + // Requests with `show_missing` may not specify `where` or + // `order_by`. + ShowMissing bool `protobuf:"varint,12,opt,name=show_missing,json=showMissing,proto3" json:"show_missing,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ListDocumentsRequest) Reset() { *m = ListDocumentsRequest{} } +func (m *ListDocumentsRequest) String() string { return proto.CompactTextString(m) } +func (*ListDocumentsRequest) ProtoMessage() {} +func (*ListDocumentsRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_firestore_d225c1066b9de1b1, []int{1} +} +func (m *ListDocumentsRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ListDocumentsRequest.Unmarshal(m, b) +} +func (m *ListDocumentsRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ListDocumentsRequest.Marshal(b, m, deterministic) +} +func (dst *ListDocumentsRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_ListDocumentsRequest.Merge(dst, src) +} +func (m *ListDocumentsRequest) XXX_Size() int { + return xxx_messageInfo_ListDocumentsRequest.Size(m) +} +func (m *ListDocumentsRequest) XXX_DiscardUnknown() { + xxx_messageInfo_ListDocumentsRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_ListDocumentsRequest proto.InternalMessageInfo + +func (m *ListDocumentsRequest) GetParent() string { + if m != nil { + return m.Parent + } + return "" +} + +func (m *ListDocumentsRequest) GetCollectionId() string { + if m != nil { + return m.CollectionId + } + return "" +} + +func (m *ListDocumentsRequest) GetPageSize() int32 { + if m != nil { + return m.PageSize + } + return 0 +} + +func (m *ListDocumentsRequest) GetPageToken() string { + if m != nil { + return m.PageToken + } + return "" +} + +func (m *ListDocumentsRequest) GetOrderBy() string { + if m != nil { + return m.OrderBy + } + return "" +} + +func (m *ListDocumentsRequest) GetMask() *DocumentMask { + if m != nil { + return m.Mask + } + return nil +} + +type isListDocumentsRequest_ConsistencySelector interface { + isListDocumentsRequest_ConsistencySelector() +} + +type ListDocumentsRequest_Transaction struct { + Transaction []byte `protobuf:"bytes,8,opt,name=transaction,proto3,oneof"` +} + +type ListDocumentsRequest_ReadTime struct { + ReadTime *timestamp.Timestamp `protobuf:"bytes,10,opt,name=read_time,json=readTime,proto3,oneof"` +} + +func (*ListDocumentsRequest_Transaction) isListDocumentsRequest_ConsistencySelector() {} + +func (*ListDocumentsRequest_ReadTime) isListDocumentsRequest_ConsistencySelector() {} + +func (m *ListDocumentsRequest) GetConsistencySelector() isListDocumentsRequest_ConsistencySelector { + if m != nil { + return m.ConsistencySelector + } + return nil +} + +func (m *ListDocumentsRequest) GetTransaction() []byte { + if x, ok := m.GetConsistencySelector().(*ListDocumentsRequest_Transaction); ok { + return x.Transaction + } + return nil +} + +func (m *ListDocumentsRequest) GetReadTime() *timestamp.Timestamp { + if x, ok := m.GetConsistencySelector().(*ListDocumentsRequest_ReadTime); ok { + return x.ReadTime + } + return nil +} + +func (m *ListDocumentsRequest) GetShowMissing() bool { + if m != nil { + return m.ShowMissing + } + return false +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*ListDocumentsRequest) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _ListDocumentsRequest_OneofMarshaler, _ListDocumentsRequest_OneofUnmarshaler, _ListDocumentsRequest_OneofSizer, []interface{}{ + (*ListDocumentsRequest_Transaction)(nil), + (*ListDocumentsRequest_ReadTime)(nil), + } +} + +func _ListDocumentsRequest_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*ListDocumentsRequest) + // consistency_selector + switch x := m.ConsistencySelector.(type) { + case *ListDocumentsRequest_Transaction: + b.EncodeVarint(8<<3 | proto.WireBytes) + b.EncodeRawBytes(x.Transaction) + case *ListDocumentsRequest_ReadTime: + b.EncodeVarint(10<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.ReadTime); err != nil { + return err + } + case nil: + default: + return fmt.Errorf("ListDocumentsRequest.ConsistencySelector has unexpected type %T", x) + } + return nil +} + +func _ListDocumentsRequest_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*ListDocumentsRequest) + switch tag { + case 8: // consistency_selector.transaction + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeRawBytes(true) + m.ConsistencySelector = &ListDocumentsRequest_Transaction{x} + return true, err + case 10: // consistency_selector.read_time + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(timestamp.Timestamp) + err := b.DecodeMessage(msg) + m.ConsistencySelector = &ListDocumentsRequest_ReadTime{msg} + return true, err + default: + return false, nil + } +} + +func _ListDocumentsRequest_OneofSizer(msg proto.Message) (n int) { + m := msg.(*ListDocumentsRequest) + // consistency_selector + switch x := m.ConsistencySelector.(type) { + case *ListDocumentsRequest_Transaction: + n += 1 // tag and wire + n += proto.SizeVarint(uint64(len(x.Transaction))) + n += len(x.Transaction) + case *ListDocumentsRequest_ReadTime: + s := proto.Size(x.ReadTime) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +// The response for [Firestore.ListDocuments][google.firestore.v1beta1.Firestore.ListDocuments]. +type ListDocumentsResponse struct { + // The Documents found. + Documents []*Document `protobuf:"bytes,1,rep,name=documents,proto3" json:"documents,omitempty"` + // The next page token. + NextPageToken string `protobuf:"bytes,2,opt,name=next_page_token,json=nextPageToken,proto3" json:"next_page_token,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ListDocumentsResponse) Reset() { *m = ListDocumentsResponse{} } +func (m *ListDocumentsResponse) String() string { return proto.CompactTextString(m) } +func (*ListDocumentsResponse) ProtoMessage() {} +func (*ListDocumentsResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_firestore_d225c1066b9de1b1, []int{2} +} +func (m *ListDocumentsResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ListDocumentsResponse.Unmarshal(m, b) +} +func (m *ListDocumentsResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ListDocumentsResponse.Marshal(b, m, deterministic) +} +func (dst *ListDocumentsResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_ListDocumentsResponse.Merge(dst, src) +} +func (m *ListDocumentsResponse) XXX_Size() int { + return xxx_messageInfo_ListDocumentsResponse.Size(m) +} +func (m *ListDocumentsResponse) XXX_DiscardUnknown() { + xxx_messageInfo_ListDocumentsResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_ListDocumentsResponse proto.InternalMessageInfo + +func (m *ListDocumentsResponse) GetDocuments() []*Document { + if m != nil { + return m.Documents + } + return nil +} + +func (m *ListDocumentsResponse) GetNextPageToken() string { + if m != nil { + return m.NextPageToken + } + return "" +} + +// The request for [Firestore.CreateDocument][google.firestore.v1beta1.Firestore.CreateDocument]. +type CreateDocumentRequest struct { + // The parent resource. For example: + // `projects/{project_id}/databases/{database_id}/documents` or + // `projects/{project_id}/databases/{database_id}/documents/chatrooms/{chatroom_id}` + Parent string `protobuf:"bytes,1,opt,name=parent,proto3" json:"parent,omitempty"` + // The collection ID, relative to `parent`, to list. For example: `chatrooms`. + CollectionId string `protobuf:"bytes,2,opt,name=collection_id,json=collectionId,proto3" json:"collection_id,omitempty"` + // The client-assigned document ID to use for this document. + // + // Optional. If not specified, an ID will be assigned by the service. + DocumentId string `protobuf:"bytes,3,opt,name=document_id,json=documentId,proto3" json:"document_id,omitempty"` + // The document to create. `name` must not be set. + Document *Document `protobuf:"bytes,4,opt,name=document,proto3" json:"document,omitempty"` + // The fields to return. If not set, returns all fields. + // + // If the document has a field that is not present in this mask, that field + // will not be returned in the response. + Mask *DocumentMask `protobuf:"bytes,5,opt,name=mask,proto3" json:"mask,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *CreateDocumentRequest) Reset() { *m = CreateDocumentRequest{} } +func (m *CreateDocumentRequest) String() string { return proto.CompactTextString(m) } +func (*CreateDocumentRequest) ProtoMessage() {} +func (*CreateDocumentRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_firestore_d225c1066b9de1b1, []int{3} +} +func (m *CreateDocumentRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_CreateDocumentRequest.Unmarshal(m, b) +} +func (m *CreateDocumentRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_CreateDocumentRequest.Marshal(b, m, deterministic) +} +func (dst *CreateDocumentRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_CreateDocumentRequest.Merge(dst, src) +} +func (m *CreateDocumentRequest) XXX_Size() int { + return xxx_messageInfo_CreateDocumentRequest.Size(m) +} +func (m *CreateDocumentRequest) XXX_DiscardUnknown() { + xxx_messageInfo_CreateDocumentRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_CreateDocumentRequest proto.InternalMessageInfo + +func (m *CreateDocumentRequest) GetParent() string { + if m != nil { + return m.Parent + } + return "" +} + +func (m *CreateDocumentRequest) GetCollectionId() string { + if m != nil { + return m.CollectionId + } + return "" +} + +func (m *CreateDocumentRequest) GetDocumentId() string { + if m != nil { + return m.DocumentId + } + return "" +} + +func (m *CreateDocumentRequest) GetDocument() *Document { + if m != nil { + return m.Document + } + return nil +} + +func (m *CreateDocumentRequest) GetMask() *DocumentMask { + if m != nil { + return m.Mask + } + return nil +} + +// The request for [Firestore.UpdateDocument][google.firestore.v1beta1.Firestore.UpdateDocument]. +type UpdateDocumentRequest struct { + // The updated document. + // Creates the document if it does not already exist. + Document *Document `protobuf:"bytes,1,opt,name=document,proto3" json:"document,omitempty"` + // The fields to update. + // None of the field paths in the mask may contain a reserved name. + // + // If the document exists on the server and has fields not referenced in the + // mask, they are left unchanged. + // Fields referenced in the mask, but not present in the input document, are + // deleted from the document on the server. + UpdateMask *DocumentMask `protobuf:"bytes,2,opt,name=update_mask,json=updateMask,proto3" json:"update_mask,omitempty"` + // The fields to return. If not set, returns all fields. + // + // If the document has a field that is not present in this mask, that field + // will not be returned in the response. + Mask *DocumentMask `protobuf:"bytes,3,opt,name=mask,proto3" json:"mask,omitempty"` + // An optional precondition on the document. + // The request will fail if this is set and not met by the target document. + CurrentDocument *Precondition `protobuf:"bytes,4,opt,name=current_document,json=currentDocument,proto3" json:"current_document,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *UpdateDocumentRequest) Reset() { *m = UpdateDocumentRequest{} } +func (m *UpdateDocumentRequest) String() string { return proto.CompactTextString(m) } +func (*UpdateDocumentRequest) ProtoMessage() {} +func (*UpdateDocumentRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_firestore_d225c1066b9de1b1, []int{4} +} +func (m *UpdateDocumentRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_UpdateDocumentRequest.Unmarshal(m, b) +} +func (m *UpdateDocumentRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_UpdateDocumentRequest.Marshal(b, m, deterministic) +} +func (dst *UpdateDocumentRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_UpdateDocumentRequest.Merge(dst, src) +} +func (m *UpdateDocumentRequest) XXX_Size() int { + return xxx_messageInfo_UpdateDocumentRequest.Size(m) +} +func (m *UpdateDocumentRequest) XXX_DiscardUnknown() { + xxx_messageInfo_UpdateDocumentRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_UpdateDocumentRequest proto.InternalMessageInfo + +func (m *UpdateDocumentRequest) GetDocument() *Document { + if m != nil { + return m.Document + } + return nil +} + +func (m *UpdateDocumentRequest) GetUpdateMask() *DocumentMask { + if m != nil { + return m.UpdateMask + } + return nil +} + +func (m *UpdateDocumentRequest) GetMask() *DocumentMask { + if m != nil { + return m.Mask + } + return nil +} + +func (m *UpdateDocumentRequest) GetCurrentDocument() *Precondition { + if m != nil { + return m.CurrentDocument + } + return nil +} + +// The request for [Firestore.DeleteDocument][google.firestore.v1beta1.Firestore.DeleteDocument]. +type DeleteDocumentRequest struct { + // The resource name of the Document to delete. In the format: + // `projects/{project_id}/databases/{database_id}/documents/{document_path}`. + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // An optional precondition on the document. + // The request will fail if this is set and not met by the target document. + CurrentDocument *Precondition `protobuf:"bytes,2,opt,name=current_document,json=currentDocument,proto3" json:"current_document,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *DeleteDocumentRequest) Reset() { *m = DeleteDocumentRequest{} } +func (m *DeleteDocumentRequest) String() string { return proto.CompactTextString(m) } +func (*DeleteDocumentRequest) ProtoMessage() {} +func (*DeleteDocumentRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_firestore_d225c1066b9de1b1, []int{5} +} +func (m *DeleteDocumentRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_DeleteDocumentRequest.Unmarshal(m, b) +} +func (m *DeleteDocumentRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_DeleteDocumentRequest.Marshal(b, m, deterministic) +} +func (dst *DeleteDocumentRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_DeleteDocumentRequest.Merge(dst, src) +} +func (m *DeleteDocumentRequest) XXX_Size() int { + return xxx_messageInfo_DeleteDocumentRequest.Size(m) +} +func (m *DeleteDocumentRequest) XXX_DiscardUnknown() { + xxx_messageInfo_DeleteDocumentRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_DeleteDocumentRequest proto.InternalMessageInfo + +func (m *DeleteDocumentRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *DeleteDocumentRequest) GetCurrentDocument() *Precondition { + if m != nil { + return m.CurrentDocument + } + return nil +} + +// The request for [Firestore.BatchGetDocuments][google.firestore.v1beta1.Firestore.BatchGetDocuments]. +type BatchGetDocumentsRequest struct { + // The database name. In the format: + // `projects/{project_id}/databases/{database_id}`. + Database string `protobuf:"bytes,1,opt,name=database,proto3" json:"database,omitempty"` + // The names of the documents to retrieve. In the format: + // `projects/{project_id}/databases/{database_id}/documents/{document_path}`. + // The request will fail if any of the document is not a child resource of the + // given `database`. Duplicate names will be elided. + Documents []string `protobuf:"bytes,2,rep,name=documents,proto3" json:"documents,omitempty"` + // The fields to return. If not set, returns all fields. + // + // If a document has a field that is not present in this mask, that field will + // not be returned in the response. + Mask *DocumentMask `protobuf:"bytes,3,opt,name=mask,proto3" json:"mask,omitempty"` + // The consistency mode for this transaction. + // If not set, defaults to strong consistency. + // + // Types that are valid to be assigned to ConsistencySelector: + // *BatchGetDocumentsRequest_Transaction + // *BatchGetDocumentsRequest_NewTransaction + // *BatchGetDocumentsRequest_ReadTime + ConsistencySelector isBatchGetDocumentsRequest_ConsistencySelector `protobuf_oneof:"consistency_selector"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *BatchGetDocumentsRequest) Reset() { *m = BatchGetDocumentsRequest{} } +func (m *BatchGetDocumentsRequest) String() string { return proto.CompactTextString(m) } +func (*BatchGetDocumentsRequest) ProtoMessage() {} +func (*BatchGetDocumentsRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_firestore_d225c1066b9de1b1, []int{6} +} +func (m *BatchGetDocumentsRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_BatchGetDocumentsRequest.Unmarshal(m, b) +} +func (m *BatchGetDocumentsRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_BatchGetDocumentsRequest.Marshal(b, m, deterministic) +} +func (dst *BatchGetDocumentsRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_BatchGetDocumentsRequest.Merge(dst, src) +} +func (m *BatchGetDocumentsRequest) XXX_Size() int { + return xxx_messageInfo_BatchGetDocumentsRequest.Size(m) +} +func (m *BatchGetDocumentsRequest) XXX_DiscardUnknown() { + xxx_messageInfo_BatchGetDocumentsRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_BatchGetDocumentsRequest proto.InternalMessageInfo + +func (m *BatchGetDocumentsRequest) GetDatabase() string { + if m != nil { + return m.Database + } + return "" +} + +func (m *BatchGetDocumentsRequest) GetDocuments() []string { + if m != nil { + return m.Documents + } + return nil +} + +func (m *BatchGetDocumentsRequest) GetMask() *DocumentMask { + if m != nil { + return m.Mask + } + return nil +} + +type isBatchGetDocumentsRequest_ConsistencySelector interface { + isBatchGetDocumentsRequest_ConsistencySelector() +} + +type BatchGetDocumentsRequest_Transaction struct { + Transaction []byte `protobuf:"bytes,4,opt,name=transaction,proto3,oneof"` +} + +type BatchGetDocumentsRequest_NewTransaction struct { + NewTransaction *TransactionOptions `protobuf:"bytes,5,opt,name=new_transaction,json=newTransaction,proto3,oneof"` +} + +type BatchGetDocumentsRequest_ReadTime struct { + ReadTime *timestamp.Timestamp `protobuf:"bytes,7,opt,name=read_time,json=readTime,proto3,oneof"` +} + +func (*BatchGetDocumentsRequest_Transaction) isBatchGetDocumentsRequest_ConsistencySelector() {} + +func (*BatchGetDocumentsRequest_NewTransaction) isBatchGetDocumentsRequest_ConsistencySelector() {} + +func (*BatchGetDocumentsRequest_ReadTime) isBatchGetDocumentsRequest_ConsistencySelector() {} + +func (m *BatchGetDocumentsRequest) GetConsistencySelector() isBatchGetDocumentsRequest_ConsistencySelector { + if m != nil { + return m.ConsistencySelector + } + return nil +} + +func (m *BatchGetDocumentsRequest) GetTransaction() []byte { + if x, ok := m.GetConsistencySelector().(*BatchGetDocumentsRequest_Transaction); ok { + return x.Transaction + } + return nil +} + +func (m *BatchGetDocumentsRequest) GetNewTransaction() *TransactionOptions { + if x, ok := m.GetConsistencySelector().(*BatchGetDocumentsRequest_NewTransaction); ok { + return x.NewTransaction + } + return nil +} + +func (m *BatchGetDocumentsRequest) GetReadTime() *timestamp.Timestamp { + if x, ok := m.GetConsistencySelector().(*BatchGetDocumentsRequest_ReadTime); ok { + return x.ReadTime + } + return nil +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*BatchGetDocumentsRequest) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _BatchGetDocumentsRequest_OneofMarshaler, _BatchGetDocumentsRequest_OneofUnmarshaler, _BatchGetDocumentsRequest_OneofSizer, []interface{}{ + (*BatchGetDocumentsRequest_Transaction)(nil), + (*BatchGetDocumentsRequest_NewTransaction)(nil), + (*BatchGetDocumentsRequest_ReadTime)(nil), + } +} + +func _BatchGetDocumentsRequest_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*BatchGetDocumentsRequest) + // consistency_selector + switch x := m.ConsistencySelector.(type) { + case *BatchGetDocumentsRequest_Transaction: + b.EncodeVarint(4<<3 | proto.WireBytes) + b.EncodeRawBytes(x.Transaction) + case *BatchGetDocumentsRequest_NewTransaction: + b.EncodeVarint(5<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.NewTransaction); err != nil { + return err + } + case *BatchGetDocumentsRequest_ReadTime: + b.EncodeVarint(7<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.ReadTime); err != nil { + return err + } + case nil: + default: + return fmt.Errorf("BatchGetDocumentsRequest.ConsistencySelector has unexpected type %T", x) + } + return nil +} + +func _BatchGetDocumentsRequest_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*BatchGetDocumentsRequest) + switch tag { + case 4: // consistency_selector.transaction + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeRawBytes(true) + m.ConsistencySelector = &BatchGetDocumentsRequest_Transaction{x} + return true, err + case 5: // consistency_selector.new_transaction + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(TransactionOptions) + err := b.DecodeMessage(msg) + m.ConsistencySelector = &BatchGetDocumentsRequest_NewTransaction{msg} + return true, err + case 7: // consistency_selector.read_time + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(timestamp.Timestamp) + err := b.DecodeMessage(msg) + m.ConsistencySelector = &BatchGetDocumentsRequest_ReadTime{msg} + return true, err + default: + return false, nil + } +} + +func _BatchGetDocumentsRequest_OneofSizer(msg proto.Message) (n int) { + m := msg.(*BatchGetDocumentsRequest) + // consistency_selector + switch x := m.ConsistencySelector.(type) { + case *BatchGetDocumentsRequest_Transaction: + n += 1 // tag and wire + n += proto.SizeVarint(uint64(len(x.Transaction))) + n += len(x.Transaction) + case *BatchGetDocumentsRequest_NewTransaction: + s := proto.Size(x.NewTransaction) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *BatchGetDocumentsRequest_ReadTime: + s := proto.Size(x.ReadTime) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +// The streamed response for [Firestore.BatchGetDocuments][google.firestore.v1beta1.Firestore.BatchGetDocuments]. +type BatchGetDocumentsResponse struct { + // A single result. + // This can be empty if the server is just returning a transaction. + // + // Types that are valid to be assigned to Result: + // *BatchGetDocumentsResponse_Found + // *BatchGetDocumentsResponse_Missing + Result isBatchGetDocumentsResponse_Result `protobuf_oneof:"result"` + // The transaction that was started as part of this request. + // Will only be set in the first response, and only if + // [BatchGetDocumentsRequest.new_transaction][google.firestore.v1beta1.BatchGetDocumentsRequest.new_transaction] was set in the request. + Transaction []byte `protobuf:"bytes,3,opt,name=transaction,proto3" json:"transaction,omitempty"` + // The time at which the document was read. + // This may be monotically increasing, in this case the previous documents in + // the result stream are guaranteed not to have changed between their + // read_time and this one. + ReadTime *timestamp.Timestamp `protobuf:"bytes,4,opt,name=read_time,json=readTime,proto3" json:"read_time,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *BatchGetDocumentsResponse) Reset() { *m = BatchGetDocumentsResponse{} } +func (m *BatchGetDocumentsResponse) String() string { return proto.CompactTextString(m) } +func (*BatchGetDocumentsResponse) ProtoMessage() {} +func (*BatchGetDocumentsResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_firestore_d225c1066b9de1b1, []int{7} +} +func (m *BatchGetDocumentsResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_BatchGetDocumentsResponse.Unmarshal(m, b) +} +func (m *BatchGetDocumentsResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_BatchGetDocumentsResponse.Marshal(b, m, deterministic) +} +func (dst *BatchGetDocumentsResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_BatchGetDocumentsResponse.Merge(dst, src) +} +func (m *BatchGetDocumentsResponse) XXX_Size() int { + return xxx_messageInfo_BatchGetDocumentsResponse.Size(m) +} +func (m *BatchGetDocumentsResponse) XXX_DiscardUnknown() { + xxx_messageInfo_BatchGetDocumentsResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_BatchGetDocumentsResponse proto.InternalMessageInfo + +type isBatchGetDocumentsResponse_Result interface { + isBatchGetDocumentsResponse_Result() +} + +type BatchGetDocumentsResponse_Found struct { + Found *Document `protobuf:"bytes,1,opt,name=found,proto3,oneof"` +} + +type BatchGetDocumentsResponse_Missing struct { + Missing string `protobuf:"bytes,2,opt,name=missing,proto3,oneof"` +} + +func (*BatchGetDocumentsResponse_Found) isBatchGetDocumentsResponse_Result() {} + +func (*BatchGetDocumentsResponse_Missing) isBatchGetDocumentsResponse_Result() {} + +func (m *BatchGetDocumentsResponse) GetResult() isBatchGetDocumentsResponse_Result { + if m != nil { + return m.Result + } + return nil +} + +func (m *BatchGetDocumentsResponse) GetFound() *Document { + if x, ok := m.GetResult().(*BatchGetDocumentsResponse_Found); ok { + return x.Found + } + return nil +} + +func (m *BatchGetDocumentsResponse) GetMissing() string { + if x, ok := m.GetResult().(*BatchGetDocumentsResponse_Missing); ok { + return x.Missing + } + return "" +} + +func (m *BatchGetDocumentsResponse) GetTransaction() []byte { + if m != nil { + return m.Transaction + } + return nil +} + +func (m *BatchGetDocumentsResponse) GetReadTime() *timestamp.Timestamp { + if m != nil { + return m.ReadTime + } + return nil +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*BatchGetDocumentsResponse) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _BatchGetDocumentsResponse_OneofMarshaler, _BatchGetDocumentsResponse_OneofUnmarshaler, _BatchGetDocumentsResponse_OneofSizer, []interface{}{ + (*BatchGetDocumentsResponse_Found)(nil), + (*BatchGetDocumentsResponse_Missing)(nil), + } +} + +func _BatchGetDocumentsResponse_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*BatchGetDocumentsResponse) + // result + switch x := m.Result.(type) { + case *BatchGetDocumentsResponse_Found: + b.EncodeVarint(1<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.Found); err != nil { + return err + } + case *BatchGetDocumentsResponse_Missing: + b.EncodeVarint(2<<3 | proto.WireBytes) + b.EncodeStringBytes(x.Missing) + case nil: + default: + return fmt.Errorf("BatchGetDocumentsResponse.Result has unexpected type %T", x) + } + return nil +} + +func _BatchGetDocumentsResponse_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*BatchGetDocumentsResponse) + switch tag { + case 1: // result.found + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(Document) + err := b.DecodeMessage(msg) + m.Result = &BatchGetDocumentsResponse_Found{msg} + return true, err + case 2: // result.missing + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeStringBytes() + m.Result = &BatchGetDocumentsResponse_Missing{x} + return true, err + default: + return false, nil + } +} + +func _BatchGetDocumentsResponse_OneofSizer(msg proto.Message) (n int) { + m := msg.(*BatchGetDocumentsResponse) + // result + switch x := m.Result.(type) { + case *BatchGetDocumentsResponse_Found: + s := proto.Size(x.Found) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *BatchGetDocumentsResponse_Missing: + n += 1 // tag and wire + n += proto.SizeVarint(uint64(len(x.Missing))) + n += len(x.Missing) + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +// The request for [Firestore.BeginTransaction][google.firestore.v1beta1.Firestore.BeginTransaction]. +type BeginTransactionRequest struct { + // The database name. In the format: + // `projects/{project_id}/databases/{database_id}`. + Database string `protobuf:"bytes,1,opt,name=database,proto3" json:"database,omitempty"` + // The options for the transaction. + // Defaults to a read-write transaction. + Options *TransactionOptions `protobuf:"bytes,2,opt,name=options,proto3" json:"options,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *BeginTransactionRequest) Reset() { *m = BeginTransactionRequest{} } +func (m *BeginTransactionRequest) String() string { return proto.CompactTextString(m) } +func (*BeginTransactionRequest) ProtoMessage() {} +func (*BeginTransactionRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_firestore_d225c1066b9de1b1, []int{8} +} +func (m *BeginTransactionRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_BeginTransactionRequest.Unmarshal(m, b) +} +func (m *BeginTransactionRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_BeginTransactionRequest.Marshal(b, m, deterministic) +} +func (dst *BeginTransactionRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_BeginTransactionRequest.Merge(dst, src) +} +func (m *BeginTransactionRequest) XXX_Size() int { + return xxx_messageInfo_BeginTransactionRequest.Size(m) +} +func (m *BeginTransactionRequest) XXX_DiscardUnknown() { + xxx_messageInfo_BeginTransactionRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_BeginTransactionRequest proto.InternalMessageInfo + +func (m *BeginTransactionRequest) GetDatabase() string { + if m != nil { + return m.Database + } + return "" +} + +func (m *BeginTransactionRequest) GetOptions() *TransactionOptions { + if m != nil { + return m.Options + } + return nil +} + +// The response for [Firestore.BeginTransaction][google.firestore.v1beta1.Firestore.BeginTransaction]. +type BeginTransactionResponse struct { + // The transaction that was started. + Transaction []byte `protobuf:"bytes,1,opt,name=transaction,proto3" json:"transaction,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *BeginTransactionResponse) Reset() { *m = BeginTransactionResponse{} } +func (m *BeginTransactionResponse) String() string { return proto.CompactTextString(m) } +func (*BeginTransactionResponse) ProtoMessage() {} +func (*BeginTransactionResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_firestore_d225c1066b9de1b1, []int{9} +} +func (m *BeginTransactionResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_BeginTransactionResponse.Unmarshal(m, b) +} +func (m *BeginTransactionResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_BeginTransactionResponse.Marshal(b, m, deterministic) +} +func (dst *BeginTransactionResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_BeginTransactionResponse.Merge(dst, src) +} +func (m *BeginTransactionResponse) XXX_Size() int { + return xxx_messageInfo_BeginTransactionResponse.Size(m) +} +func (m *BeginTransactionResponse) XXX_DiscardUnknown() { + xxx_messageInfo_BeginTransactionResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_BeginTransactionResponse proto.InternalMessageInfo + +func (m *BeginTransactionResponse) GetTransaction() []byte { + if m != nil { + return m.Transaction + } + return nil +} + +// The request for [Firestore.Commit][google.firestore.v1beta1.Firestore.Commit]. +type CommitRequest struct { + // The database name. In the format: + // `projects/{project_id}/databases/{database_id}`. + Database string `protobuf:"bytes,1,opt,name=database,proto3" json:"database,omitempty"` + // The writes to apply. + // + // Always executed atomically and in order. + Writes []*Write `protobuf:"bytes,2,rep,name=writes,proto3" json:"writes,omitempty"` + // If set, applies all writes in this transaction, and commits it. + Transaction []byte `protobuf:"bytes,3,opt,name=transaction,proto3" json:"transaction,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *CommitRequest) Reset() { *m = CommitRequest{} } +func (m *CommitRequest) String() string { return proto.CompactTextString(m) } +func (*CommitRequest) ProtoMessage() {} +func (*CommitRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_firestore_d225c1066b9de1b1, []int{10} +} +func (m *CommitRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_CommitRequest.Unmarshal(m, b) +} +func (m *CommitRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_CommitRequest.Marshal(b, m, deterministic) +} +func (dst *CommitRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_CommitRequest.Merge(dst, src) +} +func (m *CommitRequest) XXX_Size() int { + return xxx_messageInfo_CommitRequest.Size(m) +} +func (m *CommitRequest) XXX_DiscardUnknown() { + xxx_messageInfo_CommitRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_CommitRequest proto.InternalMessageInfo + +func (m *CommitRequest) GetDatabase() string { + if m != nil { + return m.Database + } + return "" +} + +func (m *CommitRequest) GetWrites() []*Write { + if m != nil { + return m.Writes + } + return nil +} + +func (m *CommitRequest) GetTransaction() []byte { + if m != nil { + return m.Transaction + } + return nil +} + +// The response for [Firestore.Commit][google.firestore.v1beta1.Firestore.Commit]. +type CommitResponse struct { + // The result of applying the writes. + // + // This i-th write result corresponds to the i-th write in the + // request. + WriteResults []*WriteResult `protobuf:"bytes,1,rep,name=write_results,json=writeResults,proto3" json:"write_results,omitempty"` + // The time at which the commit occurred. + CommitTime *timestamp.Timestamp `protobuf:"bytes,2,opt,name=commit_time,json=commitTime,proto3" json:"commit_time,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *CommitResponse) Reset() { *m = CommitResponse{} } +func (m *CommitResponse) String() string { return proto.CompactTextString(m) } +func (*CommitResponse) ProtoMessage() {} +func (*CommitResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_firestore_d225c1066b9de1b1, []int{11} +} +func (m *CommitResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_CommitResponse.Unmarshal(m, b) +} +func (m *CommitResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_CommitResponse.Marshal(b, m, deterministic) +} +func (dst *CommitResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_CommitResponse.Merge(dst, src) +} +func (m *CommitResponse) XXX_Size() int { + return xxx_messageInfo_CommitResponse.Size(m) +} +func (m *CommitResponse) XXX_DiscardUnknown() { + xxx_messageInfo_CommitResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_CommitResponse proto.InternalMessageInfo + +func (m *CommitResponse) GetWriteResults() []*WriteResult { + if m != nil { + return m.WriteResults + } + return nil +} + +func (m *CommitResponse) GetCommitTime() *timestamp.Timestamp { + if m != nil { + return m.CommitTime + } + return nil +} + +// The request for [Firestore.Rollback][google.firestore.v1beta1.Firestore.Rollback]. +type RollbackRequest struct { + // The database name. In the format: + // `projects/{project_id}/databases/{database_id}`. + Database string `protobuf:"bytes,1,opt,name=database,proto3" json:"database,omitempty"` + // The transaction to roll back. + Transaction []byte `protobuf:"bytes,2,opt,name=transaction,proto3" json:"transaction,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *RollbackRequest) Reset() { *m = RollbackRequest{} } +func (m *RollbackRequest) String() string { return proto.CompactTextString(m) } +func (*RollbackRequest) ProtoMessage() {} +func (*RollbackRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_firestore_d225c1066b9de1b1, []int{12} +} +func (m *RollbackRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_RollbackRequest.Unmarshal(m, b) +} +func (m *RollbackRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_RollbackRequest.Marshal(b, m, deterministic) +} +func (dst *RollbackRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_RollbackRequest.Merge(dst, src) +} +func (m *RollbackRequest) XXX_Size() int { + return xxx_messageInfo_RollbackRequest.Size(m) +} +func (m *RollbackRequest) XXX_DiscardUnknown() { + xxx_messageInfo_RollbackRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_RollbackRequest proto.InternalMessageInfo + +func (m *RollbackRequest) GetDatabase() string { + if m != nil { + return m.Database + } + return "" +} + +func (m *RollbackRequest) GetTransaction() []byte { + if m != nil { + return m.Transaction + } + return nil +} + +// The request for [Firestore.RunQuery][google.firestore.v1beta1.Firestore.RunQuery]. +type RunQueryRequest struct { + // The parent resource name. In the format: + // `projects/{project_id}/databases/{database_id}/documents` or + // `projects/{project_id}/databases/{database_id}/documents/{document_path}`. + // For example: + // `projects/my-project/databases/my-database/documents` or + // `projects/my-project/databases/my-database/documents/chatrooms/my-chatroom` + Parent string `protobuf:"bytes,1,opt,name=parent,proto3" json:"parent,omitempty"` + // The query to run. + // + // Types that are valid to be assigned to QueryType: + // *RunQueryRequest_StructuredQuery + QueryType isRunQueryRequest_QueryType `protobuf_oneof:"query_type"` + // The consistency mode for this transaction. + // If not set, defaults to strong consistency. + // + // Types that are valid to be assigned to ConsistencySelector: + // *RunQueryRequest_Transaction + // *RunQueryRequest_NewTransaction + // *RunQueryRequest_ReadTime + ConsistencySelector isRunQueryRequest_ConsistencySelector `protobuf_oneof:"consistency_selector"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *RunQueryRequest) Reset() { *m = RunQueryRequest{} } +func (m *RunQueryRequest) String() string { return proto.CompactTextString(m) } +func (*RunQueryRequest) ProtoMessage() {} +func (*RunQueryRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_firestore_d225c1066b9de1b1, []int{13} +} +func (m *RunQueryRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_RunQueryRequest.Unmarshal(m, b) +} +func (m *RunQueryRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_RunQueryRequest.Marshal(b, m, deterministic) +} +func (dst *RunQueryRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_RunQueryRequest.Merge(dst, src) +} +func (m *RunQueryRequest) XXX_Size() int { + return xxx_messageInfo_RunQueryRequest.Size(m) +} +func (m *RunQueryRequest) XXX_DiscardUnknown() { + xxx_messageInfo_RunQueryRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_RunQueryRequest proto.InternalMessageInfo + +func (m *RunQueryRequest) GetParent() string { + if m != nil { + return m.Parent + } + return "" +} + +type isRunQueryRequest_QueryType interface { + isRunQueryRequest_QueryType() +} + +type RunQueryRequest_StructuredQuery struct { + StructuredQuery *StructuredQuery `protobuf:"bytes,2,opt,name=structured_query,json=structuredQuery,proto3,oneof"` +} + +func (*RunQueryRequest_StructuredQuery) isRunQueryRequest_QueryType() {} + +func (m *RunQueryRequest) GetQueryType() isRunQueryRequest_QueryType { + if m != nil { + return m.QueryType + } + return nil +} + +func (m *RunQueryRequest) GetStructuredQuery() *StructuredQuery { + if x, ok := m.GetQueryType().(*RunQueryRequest_StructuredQuery); ok { + return x.StructuredQuery + } + return nil +} + +type isRunQueryRequest_ConsistencySelector interface { + isRunQueryRequest_ConsistencySelector() +} + +type RunQueryRequest_Transaction struct { + Transaction []byte `protobuf:"bytes,5,opt,name=transaction,proto3,oneof"` +} + +type RunQueryRequest_NewTransaction struct { + NewTransaction *TransactionOptions `protobuf:"bytes,6,opt,name=new_transaction,json=newTransaction,proto3,oneof"` +} + +type RunQueryRequest_ReadTime struct { + ReadTime *timestamp.Timestamp `protobuf:"bytes,7,opt,name=read_time,json=readTime,proto3,oneof"` +} + +func (*RunQueryRequest_Transaction) isRunQueryRequest_ConsistencySelector() {} + +func (*RunQueryRequest_NewTransaction) isRunQueryRequest_ConsistencySelector() {} + +func (*RunQueryRequest_ReadTime) isRunQueryRequest_ConsistencySelector() {} + +func (m *RunQueryRequest) GetConsistencySelector() isRunQueryRequest_ConsistencySelector { + if m != nil { + return m.ConsistencySelector + } + return nil +} + +func (m *RunQueryRequest) GetTransaction() []byte { + if x, ok := m.GetConsistencySelector().(*RunQueryRequest_Transaction); ok { + return x.Transaction + } + return nil +} + +func (m *RunQueryRequest) GetNewTransaction() *TransactionOptions { + if x, ok := m.GetConsistencySelector().(*RunQueryRequest_NewTransaction); ok { + return x.NewTransaction + } + return nil +} + +func (m *RunQueryRequest) GetReadTime() *timestamp.Timestamp { + if x, ok := m.GetConsistencySelector().(*RunQueryRequest_ReadTime); ok { + return x.ReadTime + } + return nil +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*RunQueryRequest) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _RunQueryRequest_OneofMarshaler, _RunQueryRequest_OneofUnmarshaler, _RunQueryRequest_OneofSizer, []interface{}{ + (*RunQueryRequest_StructuredQuery)(nil), + (*RunQueryRequest_Transaction)(nil), + (*RunQueryRequest_NewTransaction)(nil), + (*RunQueryRequest_ReadTime)(nil), + } +} + +func _RunQueryRequest_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*RunQueryRequest) + // query_type + switch x := m.QueryType.(type) { + case *RunQueryRequest_StructuredQuery: + b.EncodeVarint(2<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.StructuredQuery); err != nil { + return err + } + case nil: + default: + return fmt.Errorf("RunQueryRequest.QueryType has unexpected type %T", x) + } + // consistency_selector + switch x := m.ConsistencySelector.(type) { + case *RunQueryRequest_Transaction: + b.EncodeVarint(5<<3 | proto.WireBytes) + b.EncodeRawBytes(x.Transaction) + case *RunQueryRequest_NewTransaction: + b.EncodeVarint(6<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.NewTransaction); err != nil { + return err + } + case *RunQueryRequest_ReadTime: + b.EncodeVarint(7<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.ReadTime); err != nil { + return err + } + case nil: + default: + return fmt.Errorf("RunQueryRequest.ConsistencySelector has unexpected type %T", x) + } + return nil +} + +func _RunQueryRequest_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*RunQueryRequest) + switch tag { + case 2: // query_type.structured_query + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(StructuredQuery) + err := b.DecodeMessage(msg) + m.QueryType = &RunQueryRequest_StructuredQuery{msg} + return true, err + case 5: // consistency_selector.transaction + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeRawBytes(true) + m.ConsistencySelector = &RunQueryRequest_Transaction{x} + return true, err + case 6: // consistency_selector.new_transaction + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(TransactionOptions) + err := b.DecodeMessage(msg) + m.ConsistencySelector = &RunQueryRequest_NewTransaction{msg} + return true, err + case 7: // consistency_selector.read_time + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(timestamp.Timestamp) + err := b.DecodeMessage(msg) + m.ConsistencySelector = &RunQueryRequest_ReadTime{msg} + return true, err + default: + return false, nil + } +} + +func _RunQueryRequest_OneofSizer(msg proto.Message) (n int) { + m := msg.(*RunQueryRequest) + // query_type + switch x := m.QueryType.(type) { + case *RunQueryRequest_StructuredQuery: + s := proto.Size(x.StructuredQuery) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + // consistency_selector + switch x := m.ConsistencySelector.(type) { + case *RunQueryRequest_Transaction: + n += 1 // tag and wire + n += proto.SizeVarint(uint64(len(x.Transaction))) + n += len(x.Transaction) + case *RunQueryRequest_NewTransaction: + s := proto.Size(x.NewTransaction) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *RunQueryRequest_ReadTime: + s := proto.Size(x.ReadTime) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +// The response for [Firestore.RunQuery][google.firestore.v1beta1.Firestore.RunQuery]. +type RunQueryResponse struct { + // The transaction that was started as part of this request. + // Can only be set in the first response, and only if + // [RunQueryRequest.new_transaction][google.firestore.v1beta1.RunQueryRequest.new_transaction] was set in the request. + // If set, no other fields will be set in this response. + Transaction []byte `protobuf:"bytes,2,opt,name=transaction,proto3" json:"transaction,omitempty"` + // A query result. + // Not set when reporting partial progress. + Document *Document `protobuf:"bytes,1,opt,name=document,proto3" json:"document,omitempty"` + // The time at which the document was read. This may be monotonically + // increasing; in this case, the previous documents in the result stream are + // guaranteed not to have changed between their `read_time` and this one. + // + // If the query returns no results, a response with `read_time` and no + // `document` will be sent, and this represents the time at which the query + // was run. + ReadTime *timestamp.Timestamp `protobuf:"bytes,3,opt,name=read_time,json=readTime,proto3" json:"read_time,omitempty"` + // The number of results that have been skipped due to an offset between + // the last response and the current response. + SkippedResults int32 `protobuf:"varint,4,opt,name=skipped_results,json=skippedResults,proto3" json:"skipped_results,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *RunQueryResponse) Reset() { *m = RunQueryResponse{} } +func (m *RunQueryResponse) String() string { return proto.CompactTextString(m) } +func (*RunQueryResponse) ProtoMessage() {} +func (*RunQueryResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_firestore_d225c1066b9de1b1, []int{14} +} +func (m *RunQueryResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_RunQueryResponse.Unmarshal(m, b) +} +func (m *RunQueryResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_RunQueryResponse.Marshal(b, m, deterministic) +} +func (dst *RunQueryResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_RunQueryResponse.Merge(dst, src) +} +func (m *RunQueryResponse) XXX_Size() int { + return xxx_messageInfo_RunQueryResponse.Size(m) +} +func (m *RunQueryResponse) XXX_DiscardUnknown() { + xxx_messageInfo_RunQueryResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_RunQueryResponse proto.InternalMessageInfo + +func (m *RunQueryResponse) GetTransaction() []byte { + if m != nil { + return m.Transaction + } + return nil +} + +func (m *RunQueryResponse) GetDocument() *Document { + if m != nil { + return m.Document + } + return nil +} + +func (m *RunQueryResponse) GetReadTime() *timestamp.Timestamp { + if m != nil { + return m.ReadTime + } + return nil +} + +func (m *RunQueryResponse) GetSkippedResults() int32 { + if m != nil { + return m.SkippedResults + } + return 0 +} + +// The request for [Firestore.Write][google.firestore.v1beta1.Firestore.Write]. +// +// The first request creates a stream, or resumes an existing one from a token. +// +// When creating a new stream, the server replies with a response containing +// only an ID and a token, to use in the next request. +// +// When resuming a stream, the server first streams any responses later than the +// given token, then a response containing only an up-to-date token, to use in +// the next request. +type WriteRequest struct { + // The database name. In the format: + // `projects/{project_id}/databases/{database_id}`. + // This is only required in the first message. + Database string `protobuf:"bytes,1,opt,name=database,proto3" json:"database,omitempty"` + // The ID of the write stream to resume. + // This may only be set in the first message. When left empty, a new write + // stream will be created. + StreamId string `protobuf:"bytes,2,opt,name=stream_id,json=streamId,proto3" json:"stream_id,omitempty"` + // The writes to apply. + // + // Always executed atomically and in order. + // This must be empty on the first request. + // This may be empty on the last request. + // This must not be empty on all other requests. + Writes []*Write `protobuf:"bytes,3,rep,name=writes,proto3" json:"writes,omitempty"` + // A stream token that was previously sent by the server. + // + // The client should set this field to the token from the most recent + // [WriteResponse][google.firestore.v1beta1.WriteResponse] it has received. This acknowledges that the client has + // received responses up to this token. After sending this token, earlier + // tokens may not be used anymore. + // + // The server may close the stream if there are too many unacknowledged + // responses. + // + // Leave this field unset when creating a new stream. To resume a stream at + // a specific point, set this field and the `stream_id` field. + // + // Leave this field unset when creating a new stream. + StreamToken []byte `protobuf:"bytes,4,opt,name=stream_token,json=streamToken,proto3" json:"stream_token,omitempty"` + // Labels associated with this write request. + Labels map[string]string `protobuf:"bytes,5,rep,name=labels,proto3" json:"labels,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *WriteRequest) Reset() { *m = WriteRequest{} } +func (m *WriteRequest) String() string { return proto.CompactTextString(m) } +func (*WriteRequest) ProtoMessage() {} +func (*WriteRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_firestore_d225c1066b9de1b1, []int{15} +} +func (m *WriteRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_WriteRequest.Unmarshal(m, b) +} +func (m *WriteRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_WriteRequest.Marshal(b, m, deterministic) +} +func (dst *WriteRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_WriteRequest.Merge(dst, src) +} +func (m *WriteRequest) XXX_Size() int { + return xxx_messageInfo_WriteRequest.Size(m) +} +func (m *WriteRequest) XXX_DiscardUnknown() { + xxx_messageInfo_WriteRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_WriteRequest proto.InternalMessageInfo + +func (m *WriteRequest) GetDatabase() string { + if m != nil { + return m.Database + } + return "" +} + +func (m *WriteRequest) GetStreamId() string { + if m != nil { + return m.StreamId + } + return "" +} + +func (m *WriteRequest) GetWrites() []*Write { + if m != nil { + return m.Writes + } + return nil +} + +func (m *WriteRequest) GetStreamToken() []byte { + if m != nil { + return m.StreamToken + } + return nil +} + +func (m *WriteRequest) GetLabels() map[string]string { + if m != nil { + return m.Labels + } + return nil +} + +// The response for [Firestore.Write][google.firestore.v1beta1.Firestore.Write]. +type WriteResponse struct { + // The ID of the stream. + // Only set on the first message, when a new stream was created. + StreamId string `protobuf:"bytes,1,opt,name=stream_id,json=streamId,proto3" json:"stream_id,omitempty"` + // A token that represents the position of this response in the stream. + // This can be used by a client to resume the stream at this point. + // + // This field is always set. + StreamToken []byte `protobuf:"bytes,2,opt,name=stream_token,json=streamToken,proto3" json:"stream_token,omitempty"` + // The result of applying the writes. + // + // This i-th write result corresponds to the i-th write in the + // request. + WriteResults []*WriteResult `protobuf:"bytes,3,rep,name=write_results,json=writeResults,proto3" json:"write_results,omitempty"` + // The time at which the commit occurred. + CommitTime *timestamp.Timestamp `protobuf:"bytes,4,opt,name=commit_time,json=commitTime,proto3" json:"commit_time,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *WriteResponse) Reset() { *m = WriteResponse{} } +func (m *WriteResponse) String() string { return proto.CompactTextString(m) } +func (*WriteResponse) ProtoMessage() {} +func (*WriteResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_firestore_d225c1066b9de1b1, []int{16} +} +func (m *WriteResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_WriteResponse.Unmarshal(m, b) +} +func (m *WriteResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_WriteResponse.Marshal(b, m, deterministic) +} +func (dst *WriteResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_WriteResponse.Merge(dst, src) +} +func (m *WriteResponse) XXX_Size() int { + return xxx_messageInfo_WriteResponse.Size(m) +} +func (m *WriteResponse) XXX_DiscardUnknown() { + xxx_messageInfo_WriteResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_WriteResponse proto.InternalMessageInfo + +func (m *WriteResponse) GetStreamId() string { + if m != nil { + return m.StreamId + } + return "" +} + +func (m *WriteResponse) GetStreamToken() []byte { + if m != nil { + return m.StreamToken + } + return nil +} + +func (m *WriteResponse) GetWriteResults() []*WriteResult { + if m != nil { + return m.WriteResults + } + return nil +} + +func (m *WriteResponse) GetCommitTime() *timestamp.Timestamp { + if m != nil { + return m.CommitTime + } + return nil +} + +// A request for [Firestore.Listen][google.firestore.v1beta1.Firestore.Listen] +type ListenRequest struct { + // The database name. In the format: + // `projects/{project_id}/databases/{database_id}`. + Database string `protobuf:"bytes,1,opt,name=database,proto3" json:"database,omitempty"` + // The supported target changes. + // + // Types that are valid to be assigned to TargetChange: + // *ListenRequest_AddTarget + // *ListenRequest_RemoveTarget + TargetChange isListenRequest_TargetChange `protobuf_oneof:"target_change"` + // Labels associated with this target change. + Labels map[string]string `protobuf:"bytes,4,rep,name=labels,proto3" json:"labels,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ListenRequest) Reset() { *m = ListenRequest{} } +func (m *ListenRequest) String() string { return proto.CompactTextString(m) } +func (*ListenRequest) ProtoMessage() {} +func (*ListenRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_firestore_d225c1066b9de1b1, []int{17} +} +func (m *ListenRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ListenRequest.Unmarshal(m, b) +} +func (m *ListenRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ListenRequest.Marshal(b, m, deterministic) +} +func (dst *ListenRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_ListenRequest.Merge(dst, src) +} +func (m *ListenRequest) XXX_Size() int { + return xxx_messageInfo_ListenRequest.Size(m) +} +func (m *ListenRequest) XXX_DiscardUnknown() { + xxx_messageInfo_ListenRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_ListenRequest proto.InternalMessageInfo + +func (m *ListenRequest) GetDatabase() string { + if m != nil { + return m.Database + } + return "" +} + +type isListenRequest_TargetChange interface { + isListenRequest_TargetChange() +} + +type ListenRequest_AddTarget struct { + AddTarget *Target `protobuf:"bytes,2,opt,name=add_target,json=addTarget,proto3,oneof"` +} + +type ListenRequest_RemoveTarget struct { + RemoveTarget int32 `protobuf:"varint,3,opt,name=remove_target,json=removeTarget,proto3,oneof"` +} + +func (*ListenRequest_AddTarget) isListenRequest_TargetChange() {} + +func (*ListenRequest_RemoveTarget) isListenRequest_TargetChange() {} + +func (m *ListenRequest) GetTargetChange() isListenRequest_TargetChange { + if m != nil { + return m.TargetChange + } + return nil +} + +func (m *ListenRequest) GetAddTarget() *Target { + if x, ok := m.GetTargetChange().(*ListenRequest_AddTarget); ok { + return x.AddTarget + } + return nil +} + +func (m *ListenRequest) GetRemoveTarget() int32 { + if x, ok := m.GetTargetChange().(*ListenRequest_RemoveTarget); ok { + return x.RemoveTarget + } + return 0 +} + +func (m *ListenRequest) GetLabels() map[string]string { + if m != nil { + return m.Labels + } + return nil +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*ListenRequest) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _ListenRequest_OneofMarshaler, _ListenRequest_OneofUnmarshaler, _ListenRequest_OneofSizer, []interface{}{ + (*ListenRequest_AddTarget)(nil), + (*ListenRequest_RemoveTarget)(nil), + } +} + +func _ListenRequest_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*ListenRequest) + // target_change + switch x := m.TargetChange.(type) { + case *ListenRequest_AddTarget: + b.EncodeVarint(2<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.AddTarget); err != nil { + return err + } + case *ListenRequest_RemoveTarget: + b.EncodeVarint(3<<3 | proto.WireVarint) + b.EncodeVarint(uint64(x.RemoveTarget)) + case nil: + default: + return fmt.Errorf("ListenRequest.TargetChange has unexpected type %T", x) + } + return nil +} + +func _ListenRequest_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*ListenRequest) + switch tag { + case 2: // target_change.add_target + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(Target) + err := b.DecodeMessage(msg) + m.TargetChange = &ListenRequest_AddTarget{msg} + return true, err + case 3: // target_change.remove_target + if wire != proto.WireVarint { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeVarint() + m.TargetChange = &ListenRequest_RemoveTarget{int32(x)} + return true, err + default: + return false, nil + } +} + +func _ListenRequest_OneofSizer(msg proto.Message) (n int) { + m := msg.(*ListenRequest) + // target_change + switch x := m.TargetChange.(type) { + case *ListenRequest_AddTarget: + s := proto.Size(x.AddTarget) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *ListenRequest_RemoveTarget: + n += 1 // tag and wire + n += proto.SizeVarint(uint64(x.RemoveTarget)) + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +// The response for [Firestore.Listen][google.firestore.v1beta1.Firestore.Listen]. +type ListenResponse struct { + // The supported responses. + // + // Types that are valid to be assigned to ResponseType: + // *ListenResponse_TargetChange + // *ListenResponse_DocumentChange + // *ListenResponse_DocumentDelete + // *ListenResponse_DocumentRemove + // *ListenResponse_Filter + ResponseType isListenResponse_ResponseType `protobuf_oneof:"response_type"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ListenResponse) Reset() { *m = ListenResponse{} } +func (m *ListenResponse) String() string { return proto.CompactTextString(m) } +func (*ListenResponse) ProtoMessage() {} +func (*ListenResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_firestore_d225c1066b9de1b1, []int{18} +} +func (m *ListenResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ListenResponse.Unmarshal(m, b) +} +func (m *ListenResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ListenResponse.Marshal(b, m, deterministic) +} +func (dst *ListenResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_ListenResponse.Merge(dst, src) +} +func (m *ListenResponse) XXX_Size() int { + return xxx_messageInfo_ListenResponse.Size(m) +} +func (m *ListenResponse) XXX_DiscardUnknown() { + xxx_messageInfo_ListenResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_ListenResponse proto.InternalMessageInfo + +type isListenResponse_ResponseType interface { + isListenResponse_ResponseType() +} + +type ListenResponse_TargetChange struct { + TargetChange *TargetChange `protobuf:"bytes,2,opt,name=target_change,json=targetChange,proto3,oneof"` +} + +type ListenResponse_DocumentChange struct { + DocumentChange *DocumentChange `protobuf:"bytes,3,opt,name=document_change,json=documentChange,proto3,oneof"` +} + +type ListenResponse_DocumentDelete struct { + DocumentDelete *DocumentDelete `protobuf:"bytes,4,opt,name=document_delete,json=documentDelete,proto3,oneof"` +} + +type ListenResponse_DocumentRemove struct { + DocumentRemove *DocumentRemove `protobuf:"bytes,6,opt,name=document_remove,json=documentRemove,proto3,oneof"` +} + +type ListenResponse_Filter struct { + Filter *ExistenceFilter `protobuf:"bytes,5,opt,name=filter,proto3,oneof"` +} + +func (*ListenResponse_TargetChange) isListenResponse_ResponseType() {} + +func (*ListenResponse_DocumentChange) isListenResponse_ResponseType() {} + +func (*ListenResponse_DocumentDelete) isListenResponse_ResponseType() {} + +func (*ListenResponse_DocumentRemove) isListenResponse_ResponseType() {} + +func (*ListenResponse_Filter) isListenResponse_ResponseType() {} + +func (m *ListenResponse) GetResponseType() isListenResponse_ResponseType { + if m != nil { + return m.ResponseType + } + return nil +} + +func (m *ListenResponse) GetTargetChange() *TargetChange { + if x, ok := m.GetResponseType().(*ListenResponse_TargetChange); ok { + return x.TargetChange + } + return nil +} + +func (m *ListenResponse) GetDocumentChange() *DocumentChange { + if x, ok := m.GetResponseType().(*ListenResponse_DocumentChange); ok { + return x.DocumentChange + } + return nil +} + +func (m *ListenResponse) GetDocumentDelete() *DocumentDelete { + if x, ok := m.GetResponseType().(*ListenResponse_DocumentDelete); ok { + return x.DocumentDelete + } + return nil +} + +func (m *ListenResponse) GetDocumentRemove() *DocumentRemove { + if x, ok := m.GetResponseType().(*ListenResponse_DocumentRemove); ok { + return x.DocumentRemove + } + return nil +} + +func (m *ListenResponse) GetFilter() *ExistenceFilter { + if x, ok := m.GetResponseType().(*ListenResponse_Filter); ok { + return x.Filter + } + return nil +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*ListenResponse) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _ListenResponse_OneofMarshaler, _ListenResponse_OneofUnmarshaler, _ListenResponse_OneofSizer, []interface{}{ + (*ListenResponse_TargetChange)(nil), + (*ListenResponse_DocumentChange)(nil), + (*ListenResponse_DocumentDelete)(nil), + (*ListenResponse_DocumentRemove)(nil), + (*ListenResponse_Filter)(nil), + } +} + +func _ListenResponse_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*ListenResponse) + // response_type + switch x := m.ResponseType.(type) { + case *ListenResponse_TargetChange: + b.EncodeVarint(2<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.TargetChange); err != nil { + return err + } + case *ListenResponse_DocumentChange: + b.EncodeVarint(3<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.DocumentChange); err != nil { + return err + } + case *ListenResponse_DocumentDelete: + b.EncodeVarint(4<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.DocumentDelete); err != nil { + return err + } + case *ListenResponse_DocumentRemove: + b.EncodeVarint(6<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.DocumentRemove); err != nil { + return err + } + case *ListenResponse_Filter: + b.EncodeVarint(5<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.Filter); err != nil { + return err + } + case nil: + default: + return fmt.Errorf("ListenResponse.ResponseType has unexpected type %T", x) + } + return nil +} + +func _ListenResponse_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*ListenResponse) + switch tag { + case 2: // response_type.target_change + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(TargetChange) + err := b.DecodeMessage(msg) + m.ResponseType = &ListenResponse_TargetChange{msg} + return true, err + case 3: // response_type.document_change + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(DocumentChange) + err := b.DecodeMessage(msg) + m.ResponseType = &ListenResponse_DocumentChange{msg} + return true, err + case 4: // response_type.document_delete + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(DocumentDelete) + err := b.DecodeMessage(msg) + m.ResponseType = &ListenResponse_DocumentDelete{msg} + return true, err + case 6: // response_type.document_remove + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(DocumentRemove) + err := b.DecodeMessage(msg) + m.ResponseType = &ListenResponse_DocumentRemove{msg} + return true, err + case 5: // response_type.filter + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(ExistenceFilter) + err := b.DecodeMessage(msg) + m.ResponseType = &ListenResponse_Filter{msg} + return true, err + default: + return false, nil + } +} + +func _ListenResponse_OneofSizer(msg proto.Message) (n int) { + m := msg.(*ListenResponse) + // response_type + switch x := m.ResponseType.(type) { + case *ListenResponse_TargetChange: + s := proto.Size(x.TargetChange) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *ListenResponse_DocumentChange: + s := proto.Size(x.DocumentChange) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *ListenResponse_DocumentDelete: + s := proto.Size(x.DocumentDelete) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *ListenResponse_DocumentRemove: + s := proto.Size(x.DocumentRemove) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *ListenResponse_Filter: + s := proto.Size(x.Filter) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +// A specification of a set of documents to listen to. +type Target struct { + // The type of target to listen to. + // + // Types that are valid to be assigned to TargetType: + // *Target_Query + // *Target_Documents + TargetType isTarget_TargetType `protobuf_oneof:"target_type"` + // When to start listening. + // + // If not specified, all matching Documents are returned before any + // subsequent changes. + // + // Types that are valid to be assigned to ResumeType: + // *Target_ResumeToken + // *Target_ReadTime + ResumeType isTarget_ResumeType `protobuf_oneof:"resume_type"` + // A client provided target ID. + // + // If not set, the server will assign an ID for the target. + // + // Used for resuming a target without changing IDs. The IDs can either be + // client-assigned or be server-assigned in a previous stream. All targets + // with client provided IDs must be added before adding a target that needs + // a server-assigned id. + TargetId int32 `protobuf:"varint,5,opt,name=target_id,json=targetId,proto3" json:"target_id,omitempty"` + // If the target should be removed once it is current and consistent. + Once bool `protobuf:"varint,6,opt,name=once,proto3" json:"once,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Target) Reset() { *m = Target{} } +func (m *Target) String() string { return proto.CompactTextString(m) } +func (*Target) ProtoMessage() {} +func (*Target) Descriptor() ([]byte, []int) { + return fileDescriptor_firestore_d225c1066b9de1b1, []int{19} +} +func (m *Target) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Target.Unmarshal(m, b) +} +func (m *Target) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Target.Marshal(b, m, deterministic) +} +func (dst *Target) XXX_Merge(src proto.Message) { + xxx_messageInfo_Target.Merge(dst, src) +} +func (m *Target) XXX_Size() int { + return xxx_messageInfo_Target.Size(m) +} +func (m *Target) XXX_DiscardUnknown() { + xxx_messageInfo_Target.DiscardUnknown(m) +} + +var xxx_messageInfo_Target proto.InternalMessageInfo + +type isTarget_TargetType interface { + isTarget_TargetType() +} + +type Target_Query struct { + Query *Target_QueryTarget `protobuf:"bytes,2,opt,name=query,proto3,oneof"` +} + +type Target_Documents struct { + Documents *Target_DocumentsTarget `protobuf:"bytes,3,opt,name=documents,proto3,oneof"` +} + +func (*Target_Query) isTarget_TargetType() {} + +func (*Target_Documents) isTarget_TargetType() {} + +func (m *Target) GetTargetType() isTarget_TargetType { + if m != nil { + return m.TargetType + } + return nil +} + +func (m *Target) GetQuery() *Target_QueryTarget { + if x, ok := m.GetTargetType().(*Target_Query); ok { + return x.Query + } + return nil +} + +func (m *Target) GetDocuments() *Target_DocumentsTarget { + if x, ok := m.GetTargetType().(*Target_Documents); ok { + return x.Documents + } + return nil +} + +type isTarget_ResumeType interface { + isTarget_ResumeType() +} + +type Target_ResumeToken struct { + ResumeToken []byte `protobuf:"bytes,4,opt,name=resume_token,json=resumeToken,proto3,oneof"` +} + +type Target_ReadTime struct { + ReadTime *timestamp.Timestamp `protobuf:"bytes,11,opt,name=read_time,json=readTime,proto3,oneof"` +} + +func (*Target_ResumeToken) isTarget_ResumeType() {} + +func (*Target_ReadTime) isTarget_ResumeType() {} + +func (m *Target) GetResumeType() isTarget_ResumeType { + if m != nil { + return m.ResumeType + } + return nil +} + +func (m *Target) GetResumeToken() []byte { + if x, ok := m.GetResumeType().(*Target_ResumeToken); ok { + return x.ResumeToken + } + return nil +} + +func (m *Target) GetReadTime() *timestamp.Timestamp { + if x, ok := m.GetResumeType().(*Target_ReadTime); ok { + return x.ReadTime + } + return nil +} + +func (m *Target) GetTargetId() int32 { + if m != nil { + return m.TargetId + } + return 0 +} + +func (m *Target) GetOnce() bool { + if m != nil { + return m.Once + } + return false +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*Target) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _Target_OneofMarshaler, _Target_OneofUnmarshaler, _Target_OneofSizer, []interface{}{ + (*Target_Query)(nil), + (*Target_Documents)(nil), + (*Target_ResumeToken)(nil), + (*Target_ReadTime)(nil), + } +} + +func _Target_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*Target) + // target_type + switch x := m.TargetType.(type) { + case *Target_Query: + b.EncodeVarint(2<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.Query); err != nil { + return err + } + case *Target_Documents: + b.EncodeVarint(3<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.Documents); err != nil { + return err + } + case nil: + default: + return fmt.Errorf("Target.TargetType has unexpected type %T", x) + } + // resume_type + switch x := m.ResumeType.(type) { + case *Target_ResumeToken: + b.EncodeVarint(4<<3 | proto.WireBytes) + b.EncodeRawBytes(x.ResumeToken) + case *Target_ReadTime: + b.EncodeVarint(11<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.ReadTime); err != nil { + return err + } + case nil: + default: + return fmt.Errorf("Target.ResumeType has unexpected type %T", x) + } + return nil +} + +func _Target_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*Target) + switch tag { + case 2: // target_type.query + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(Target_QueryTarget) + err := b.DecodeMessage(msg) + m.TargetType = &Target_Query{msg} + return true, err + case 3: // target_type.documents + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(Target_DocumentsTarget) + err := b.DecodeMessage(msg) + m.TargetType = &Target_Documents{msg} + return true, err + case 4: // resume_type.resume_token + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeRawBytes(true) + m.ResumeType = &Target_ResumeToken{x} + return true, err + case 11: // resume_type.read_time + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(timestamp.Timestamp) + err := b.DecodeMessage(msg) + m.ResumeType = &Target_ReadTime{msg} + return true, err + default: + return false, nil + } +} + +func _Target_OneofSizer(msg proto.Message) (n int) { + m := msg.(*Target) + // target_type + switch x := m.TargetType.(type) { + case *Target_Query: + s := proto.Size(x.Query) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *Target_Documents: + s := proto.Size(x.Documents) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + // resume_type + switch x := m.ResumeType.(type) { + case *Target_ResumeToken: + n += 1 // tag and wire + n += proto.SizeVarint(uint64(len(x.ResumeToken))) + n += len(x.ResumeToken) + case *Target_ReadTime: + s := proto.Size(x.ReadTime) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +// A target specified by a set of documents names. +type Target_DocumentsTarget struct { + // The names of the documents to retrieve. In the format: + // `projects/{project_id}/databases/{database_id}/documents/{document_path}`. + // The request will fail if any of the document is not a child resource of + // the given `database`. Duplicate names will be elided. + Documents []string `protobuf:"bytes,2,rep,name=documents,proto3" json:"documents,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Target_DocumentsTarget) Reset() { *m = Target_DocumentsTarget{} } +func (m *Target_DocumentsTarget) String() string { return proto.CompactTextString(m) } +func (*Target_DocumentsTarget) ProtoMessage() {} +func (*Target_DocumentsTarget) Descriptor() ([]byte, []int) { + return fileDescriptor_firestore_d225c1066b9de1b1, []int{19, 0} +} +func (m *Target_DocumentsTarget) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Target_DocumentsTarget.Unmarshal(m, b) +} +func (m *Target_DocumentsTarget) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Target_DocumentsTarget.Marshal(b, m, deterministic) +} +func (dst *Target_DocumentsTarget) XXX_Merge(src proto.Message) { + xxx_messageInfo_Target_DocumentsTarget.Merge(dst, src) +} +func (m *Target_DocumentsTarget) XXX_Size() int { + return xxx_messageInfo_Target_DocumentsTarget.Size(m) +} +func (m *Target_DocumentsTarget) XXX_DiscardUnknown() { + xxx_messageInfo_Target_DocumentsTarget.DiscardUnknown(m) +} + +var xxx_messageInfo_Target_DocumentsTarget proto.InternalMessageInfo + +func (m *Target_DocumentsTarget) GetDocuments() []string { + if m != nil { + return m.Documents + } + return nil +} + +// A target specified by a query. +type Target_QueryTarget struct { + // The parent resource name. In the format: + // `projects/{project_id}/databases/{database_id}/documents` or + // `projects/{project_id}/databases/{database_id}/documents/{document_path}`. + // For example: + // `projects/my-project/databases/my-database/documents` or + // `projects/my-project/databases/my-database/documents/chatrooms/my-chatroom` + Parent string `protobuf:"bytes,1,opt,name=parent,proto3" json:"parent,omitempty"` + // The query to run. + // + // Types that are valid to be assigned to QueryType: + // *Target_QueryTarget_StructuredQuery + QueryType isTarget_QueryTarget_QueryType `protobuf_oneof:"query_type"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Target_QueryTarget) Reset() { *m = Target_QueryTarget{} } +func (m *Target_QueryTarget) String() string { return proto.CompactTextString(m) } +func (*Target_QueryTarget) ProtoMessage() {} +func (*Target_QueryTarget) Descriptor() ([]byte, []int) { + return fileDescriptor_firestore_d225c1066b9de1b1, []int{19, 1} +} +func (m *Target_QueryTarget) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Target_QueryTarget.Unmarshal(m, b) +} +func (m *Target_QueryTarget) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Target_QueryTarget.Marshal(b, m, deterministic) +} +func (dst *Target_QueryTarget) XXX_Merge(src proto.Message) { + xxx_messageInfo_Target_QueryTarget.Merge(dst, src) +} +func (m *Target_QueryTarget) XXX_Size() int { + return xxx_messageInfo_Target_QueryTarget.Size(m) +} +func (m *Target_QueryTarget) XXX_DiscardUnknown() { + xxx_messageInfo_Target_QueryTarget.DiscardUnknown(m) +} + +var xxx_messageInfo_Target_QueryTarget proto.InternalMessageInfo + +func (m *Target_QueryTarget) GetParent() string { + if m != nil { + return m.Parent + } + return "" +} + +type isTarget_QueryTarget_QueryType interface { + isTarget_QueryTarget_QueryType() +} + +type Target_QueryTarget_StructuredQuery struct { + StructuredQuery *StructuredQuery `protobuf:"bytes,2,opt,name=structured_query,json=structuredQuery,proto3,oneof"` +} + +func (*Target_QueryTarget_StructuredQuery) isTarget_QueryTarget_QueryType() {} + +func (m *Target_QueryTarget) GetQueryType() isTarget_QueryTarget_QueryType { + if m != nil { + return m.QueryType + } + return nil +} + +func (m *Target_QueryTarget) GetStructuredQuery() *StructuredQuery { + if x, ok := m.GetQueryType().(*Target_QueryTarget_StructuredQuery); ok { + return x.StructuredQuery + } + return nil +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*Target_QueryTarget) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _Target_QueryTarget_OneofMarshaler, _Target_QueryTarget_OneofUnmarshaler, _Target_QueryTarget_OneofSizer, []interface{}{ + (*Target_QueryTarget_StructuredQuery)(nil), + } +} + +func _Target_QueryTarget_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*Target_QueryTarget) + // query_type + switch x := m.QueryType.(type) { + case *Target_QueryTarget_StructuredQuery: + b.EncodeVarint(2<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.StructuredQuery); err != nil { + return err + } + case nil: + default: + return fmt.Errorf("Target_QueryTarget.QueryType has unexpected type %T", x) + } + return nil +} + +func _Target_QueryTarget_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*Target_QueryTarget) + switch tag { + case 2: // query_type.structured_query + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(StructuredQuery) + err := b.DecodeMessage(msg) + m.QueryType = &Target_QueryTarget_StructuredQuery{msg} + return true, err + default: + return false, nil + } +} + +func _Target_QueryTarget_OneofSizer(msg proto.Message) (n int) { + m := msg.(*Target_QueryTarget) + // query_type + switch x := m.QueryType.(type) { + case *Target_QueryTarget_StructuredQuery: + s := proto.Size(x.StructuredQuery) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +// Targets being watched have changed. +type TargetChange struct { + // The type of change that occurred. + TargetChangeType TargetChange_TargetChangeType `protobuf:"varint,1,opt,name=target_change_type,json=targetChangeType,proto3,enum=google.firestore.v1beta1.TargetChange_TargetChangeType" json:"target_change_type,omitempty"` + // The target IDs of targets that have changed. + // + // If empty, the change applies to all targets. + // + // For `target_change_type=ADD`, the order of the target IDs matches the order + // of the requests to add the targets. This allows clients to unambiguously + // associate server-assigned target IDs with added targets. + // + // For other states, the order of the target IDs is not defined. + TargetIds []int32 `protobuf:"varint,2,rep,packed,name=target_ids,json=targetIds,proto3" json:"target_ids,omitempty"` + // The error that resulted in this change, if applicable. + Cause *status.Status `protobuf:"bytes,3,opt,name=cause,proto3" json:"cause,omitempty"` + // A token that can be used to resume the stream for the given `target_ids`, + // or all targets if `target_ids` is empty. + // + // Not set on every target change. + ResumeToken []byte `protobuf:"bytes,4,opt,name=resume_token,json=resumeToken,proto3" json:"resume_token,omitempty"` + // The consistent `read_time` for the given `target_ids` (omitted when the + // target_ids are not at a consistent snapshot). + // + // The stream is guaranteed to send a `read_time` with `target_ids` empty + // whenever the entire stream reaches a new consistent snapshot. ADD, + // CURRENT, and RESET messages are guaranteed to (eventually) result in a + // new consistent snapshot (while NO_CHANGE and REMOVE messages are not). + // + // For a given stream, `read_time` is guaranteed to be monotonically + // increasing. + ReadTime *timestamp.Timestamp `protobuf:"bytes,6,opt,name=read_time,json=readTime,proto3" json:"read_time,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *TargetChange) Reset() { *m = TargetChange{} } +func (m *TargetChange) String() string { return proto.CompactTextString(m) } +func (*TargetChange) ProtoMessage() {} +func (*TargetChange) Descriptor() ([]byte, []int) { + return fileDescriptor_firestore_d225c1066b9de1b1, []int{20} +} +func (m *TargetChange) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_TargetChange.Unmarshal(m, b) +} +func (m *TargetChange) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_TargetChange.Marshal(b, m, deterministic) +} +func (dst *TargetChange) XXX_Merge(src proto.Message) { + xxx_messageInfo_TargetChange.Merge(dst, src) +} +func (m *TargetChange) XXX_Size() int { + return xxx_messageInfo_TargetChange.Size(m) +} +func (m *TargetChange) XXX_DiscardUnknown() { + xxx_messageInfo_TargetChange.DiscardUnknown(m) +} + +var xxx_messageInfo_TargetChange proto.InternalMessageInfo + +func (m *TargetChange) GetTargetChangeType() TargetChange_TargetChangeType { + if m != nil { + return m.TargetChangeType + } + return TargetChange_NO_CHANGE +} + +func (m *TargetChange) GetTargetIds() []int32 { + if m != nil { + return m.TargetIds + } + return nil +} + +func (m *TargetChange) GetCause() *status.Status { + if m != nil { + return m.Cause + } + return nil +} + +func (m *TargetChange) GetResumeToken() []byte { + if m != nil { + return m.ResumeToken + } + return nil +} + +func (m *TargetChange) GetReadTime() *timestamp.Timestamp { + if m != nil { + return m.ReadTime + } + return nil +} + +// The request for [Firestore.ListCollectionIds][google.firestore.v1beta1.Firestore.ListCollectionIds]. +type ListCollectionIdsRequest struct { + // The parent document. In the format: + // `projects/{project_id}/databases/{database_id}/documents/{document_path}`. + // For example: + // `projects/my-project/databases/my-database/documents/chatrooms/my-chatroom` + Parent string `protobuf:"bytes,1,opt,name=parent,proto3" json:"parent,omitempty"` + // The maximum number of results to return. + PageSize int32 `protobuf:"varint,2,opt,name=page_size,json=pageSize,proto3" json:"page_size,omitempty"` + // A page token. Must be a value from + // [ListCollectionIdsResponse][google.firestore.v1beta1.ListCollectionIdsResponse]. + PageToken string `protobuf:"bytes,3,opt,name=page_token,json=pageToken,proto3" json:"page_token,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ListCollectionIdsRequest) Reset() { *m = ListCollectionIdsRequest{} } +func (m *ListCollectionIdsRequest) String() string { return proto.CompactTextString(m) } +func (*ListCollectionIdsRequest) ProtoMessage() {} +func (*ListCollectionIdsRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_firestore_d225c1066b9de1b1, []int{21} +} +func (m *ListCollectionIdsRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ListCollectionIdsRequest.Unmarshal(m, b) +} +func (m *ListCollectionIdsRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ListCollectionIdsRequest.Marshal(b, m, deterministic) +} +func (dst *ListCollectionIdsRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_ListCollectionIdsRequest.Merge(dst, src) +} +func (m *ListCollectionIdsRequest) XXX_Size() int { + return xxx_messageInfo_ListCollectionIdsRequest.Size(m) +} +func (m *ListCollectionIdsRequest) XXX_DiscardUnknown() { + xxx_messageInfo_ListCollectionIdsRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_ListCollectionIdsRequest proto.InternalMessageInfo + +func (m *ListCollectionIdsRequest) GetParent() string { + if m != nil { + return m.Parent + } + return "" +} + +func (m *ListCollectionIdsRequest) GetPageSize() int32 { + if m != nil { + return m.PageSize + } + return 0 +} + +func (m *ListCollectionIdsRequest) GetPageToken() string { + if m != nil { + return m.PageToken + } + return "" +} + +// The response from [Firestore.ListCollectionIds][google.firestore.v1beta1.Firestore.ListCollectionIds]. +type ListCollectionIdsResponse struct { + // The collection ids. + CollectionIds []string `protobuf:"bytes,1,rep,name=collection_ids,json=collectionIds,proto3" json:"collection_ids,omitempty"` + // A page token that may be used to continue the list. + NextPageToken string `protobuf:"bytes,2,opt,name=next_page_token,json=nextPageToken,proto3" json:"next_page_token,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ListCollectionIdsResponse) Reset() { *m = ListCollectionIdsResponse{} } +func (m *ListCollectionIdsResponse) String() string { return proto.CompactTextString(m) } +func (*ListCollectionIdsResponse) ProtoMessage() {} +func (*ListCollectionIdsResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_firestore_d225c1066b9de1b1, []int{22} +} +func (m *ListCollectionIdsResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ListCollectionIdsResponse.Unmarshal(m, b) +} +func (m *ListCollectionIdsResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ListCollectionIdsResponse.Marshal(b, m, deterministic) +} +func (dst *ListCollectionIdsResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_ListCollectionIdsResponse.Merge(dst, src) +} +func (m *ListCollectionIdsResponse) XXX_Size() int { + return xxx_messageInfo_ListCollectionIdsResponse.Size(m) +} +func (m *ListCollectionIdsResponse) XXX_DiscardUnknown() { + xxx_messageInfo_ListCollectionIdsResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_ListCollectionIdsResponse proto.InternalMessageInfo + +func (m *ListCollectionIdsResponse) GetCollectionIds() []string { + if m != nil { + return m.CollectionIds + } + return nil +} + +func (m *ListCollectionIdsResponse) GetNextPageToken() string { + if m != nil { + return m.NextPageToken + } + return "" +} + +func init() { + proto.RegisterType((*GetDocumentRequest)(nil), "google.firestore.v1beta1.GetDocumentRequest") + proto.RegisterType((*ListDocumentsRequest)(nil), "google.firestore.v1beta1.ListDocumentsRequest") + proto.RegisterType((*ListDocumentsResponse)(nil), "google.firestore.v1beta1.ListDocumentsResponse") + proto.RegisterType((*CreateDocumentRequest)(nil), "google.firestore.v1beta1.CreateDocumentRequest") + proto.RegisterType((*UpdateDocumentRequest)(nil), "google.firestore.v1beta1.UpdateDocumentRequest") + proto.RegisterType((*DeleteDocumentRequest)(nil), "google.firestore.v1beta1.DeleteDocumentRequest") + proto.RegisterType((*BatchGetDocumentsRequest)(nil), "google.firestore.v1beta1.BatchGetDocumentsRequest") + proto.RegisterType((*BatchGetDocumentsResponse)(nil), "google.firestore.v1beta1.BatchGetDocumentsResponse") + proto.RegisterType((*BeginTransactionRequest)(nil), "google.firestore.v1beta1.BeginTransactionRequest") + proto.RegisterType((*BeginTransactionResponse)(nil), "google.firestore.v1beta1.BeginTransactionResponse") + proto.RegisterType((*CommitRequest)(nil), "google.firestore.v1beta1.CommitRequest") + proto.RegisterType((*CommitResponse)(nil), "google.firestore.v1beta1.CommitResponse") + proto.RegisterType((*RollbackRequest)(nil), "google.firestore.v1beta1.RollbackRequest") + proto.RegisterType((*RunQueryRequest)(nil), "google.firestore.v1beta1.RunQueryRequest") + proto.RegisterType((*RunQueryResponse)(nil), "google.firestore.v1beta1.RunQueryResponse") + proto.RegisterType((*WriteRequest)(nil), "google.firestore.v1beta1.WriteRequest") + proto.RegisterMapType((map[string]string)(nil), "google.firestore.v1beta1.WriteRequest.LabelsEntry") + proto.RegisterType((*WriteResponse)(nil), "google.firestore.v1beta1.WriteResponse") + proto.RegisterType((*ListenRequest)(nil), "google.firestore.v1beta1.ListenRequest") + proto.RegisterMapType((map[string]string)(nil), "google.firestore.v1beta1.ListenRequest.LabelsEntry") + proto.RegisterType((*ListenResponse)(nil), "google.firestore.v1beta1.ListenResponse") + proto.RegisterType((*Target)(nil), "google.firestore.v1beta1.Target") + proto.RegisterType((*Target_DocumentsTarget)(nil), "google.firestore.v1beta1.Target.DocumentsTarget") + proto.RegisterType((*Target_QueryTarget)(nil), "google.firestore.v1beta1.Target.QueryTarget") + proto.RegisterType((*TargetChange)(nil), "google.firestore.v1beta1.TargetChange") + proto.RegisterType((*ListCollectionIdsRequest)(nil), "google.firestore.v1beta1.ListCollectionIdsRequest") + proto.RegisterType((*ListCollectionIdsResponse)(nil), "google.firestore.v1beta1.ListCollectionIdsResponse") + proto.RegisterEnum("google.firestore.v1beta1.TargetChange_TargetChangeType", TargetChange_TargetChangeType_name, TargetChange_TargetChangeType_value) +} + +// Reference imports to suppress errors if they are not otherwise used. +var _ context.Context +var _ grpc.ClientConn + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the grpc package it is being compiled against. +const _ = grpc.SupportPackageIsVersion4 + +// FirestoreClient is the client API for Firestore service. +// +// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://godoc.org/google.golang.org/grpc#ClientConn.NewStream. +type FirestoreClient interface { + // Gets a single document. + GetDocument(ctx context.Context, in *GetDocumentRequest, opts ...grpc.CallOption) (*Document, error) + // Lists documents. + ListDocuments(ctx context.Context, in *ListDocumentsRequest, opts ...grpc.CallOption) (*ListDocumentsResponse, error) + // Creates a new document. + CreateDocument(ctx context.Context, in *CreateDocumentRequest, opts ...grpc.CallOption) (*Document, error) + // Updates or inserts a document. + UpdateDocument(ctx context.Context, in *UpdateDocumentRequest, opts ...grpc.CallOption) (*Document, error) + // Deletes a document. + DeleteDocument(ctx context.Context, in *DeleteDocumentRequest, opts ...grpc.CallOption) (*empty.Empty, error) + // Gets multiple documents. + // + // Documents returned by this method are not guaranteed to be returned in the + // same order that they were requested. + BatchGetDocuments(ctx context.Context, in *BatchGetDocumentsRequest, opts ...grpc.CallOption) (Firestore_BatchGetDocumentsClient, error) + // Starts a new transaction. + BeginTransaction(ctx context.Context, in *BeginTransactionRequest, opts ...grpc.CallOption) (*BeginTransactionResponse, error) + // Commits a transaction, while optionally updating documents. + Commit(ctx context.Context, in *CommitRequest, opts ...grpc.CallOption) (*CommitResponse, error) + // Rolls back a transaction. + Rollback(ctx context.Context, in *RollbackRequest, opts ...grpc.CallOption) (*empty.Empty, error) + // Runs a query. + RunQuery(ctx context.Context, in *RunQueryRequest, opts ...grpc.CallOption) (Firestore_RunQueryClient, error) + // Streams batches of document updates and deletes, in order. + Write(ctx context.Context, opts ...grpc.CallOption) (Firestore_WriteClient, error) + // Listens to changes. + Listen(ctx context.Context, opts ...grpc.CallOption) (Firestore_ListenClient, error) + // Lists all the collection IDs underneath a document. + ListCollectionIds(ctx context.Context, in *ListCollectionIdsRequest, opts ...grpc.CallOption) (*ListCollectionIdsResponse, error) +} + +type firestoreClient struct { + cc *grpc.ClientConn +} + +func NewFirestoreClient(cc *grpc.ClientConn) FirestoreClient { + return &firestoreClient{cc} +} + +func (c *firestoreClient) GetDocument(ctx context.Context, in *GetDocumentRequest, opts ...grpc.CallOption) (*Document, error) { + out := new(Document) + err := c.cc.Invoke(ctx, "/google.firestore.v1beta1.Firestore/GetDocument", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *firestoreClient) ListDocuments(ctx context.Context, in *ListDocumentsRequest, opts ...grpc.CallOption) (*ListDocumentsResponse, error) { + out := new(ListDocumentsResponse) + err := c.cc.Invoke(ctx, "/google.firestore.v1beta1.Firestore/ListDocuments", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *firestoreClient) CreateDocument(ctx context.Context, in *CreateDocumentRequest, opts ...grpc.CallOption) (*Document, error) { + out := new(Document) + err := c.cc.Invoke(ctx, "/google.firestore.v1beta1.Firestore/CreateDocument", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *firestoreClient) UpdateDocument(ctx context.Context, in *UpdateDocumentRequest, opts ...grpc.CallOption) (*Document, error) { + out := new(Document) + err := c.cc.Invoke(ctx, "/google.firestore.v1beta1.Firestore/UpdateDocument", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *firestoreClient) DeleteDocument(ctx context.Context, in *DeleteDocumentRequest, opts ...grpc.CallOption) (*empty.Empty, error) { + out := new(empty.Empty) + err := c.cc.Invoke(ctx, "/google.firestore.v1beta1.Firestore/DeleteDocument", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *firestoreClient) BatchGetDocuments(ctx context.Context, in *BatchGetDocumentsRequest, opts ...grpc.CallOption) (Firestore_BatchGetDocumentsClient, error) { + stream, err := c.cc.NewStream(ctx, &_Firestore_serviceDesc.Streams[0], "/google.firestore.v1beta1.Firestore/BatchGetDocuments", opts...) + if err != nil { + return nil, err + } + x := &firestoreBatchGetDocumentsClient{stream} + if err := x.ClientStream.SendMsg(in); err != nil { + return nil, err + } + if err := x.ClientStream.CloseSend(); err != nil { + return nil, err + } + return x, nil +} + +type Firestore_BatchGetDocumentsClient interface { + Recv() (*BatchGetDocumentsResponse, error) + grpc.ClientStream +} + +type firestoreBatchGetDocumentsClient struct { + grpc.ClientStream +} + +func (x *firestoreBatchGetDocumentsClient) Recv() (*BatchGetDocumentsResponse, error) { + m := new(BatchGetDocumentsResponse) + if err := x.ClientStream.RecvMsg(m); err != nil { + return nil, err + } + return m, nil +} + +func (c *firestoreClient) BeginTransaction(ctx context.Context, in *BeginTransactionRequest, opts ...grpc.CallOption) (*BeginTransactionResponse, error) { + out := new(BeginTransactionResponse) + err := c.cc.Invoke(ctx, "/google.firestore.v1beta1.Firestore/BeginTransaction", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *firestoreClient) Commit(ctx context.Context, in *CommitRequest, opts ...grpc.CallOption) (*CommitResponse, error) { + out := new(CommitResponse) + err := c.cc.Invoke(ctx, "/google.firestore.v1beta1.Firestore/Commit", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *firestoreClient) Rollback(ctx context.Context, in *RollbackRequest, opts ...grpc.CallOption) (*empty.Empty, error) { + out := new(empty.Empty) + err := c.cc.Invoke(ctx, "/google.firestore.v1beta1.Firestore/Rollback", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *firestoreClient) RunQuery(ctx context.Context, in *RunQueryRequest, opts ...grpc.CallOption) (Firestore_RunQueryClient, error) { + stream, err := c.cc.NewStream(ctx, &_Firestore_serviceDesc.Streams[1], "/google.firestore.v1beta1.Firestore/RunQuery", opts...) + if err != nil { + return nil, err + } + x := &firestoreRunQueryClient{stream} + if err := x.ClientStream.SendMsg(in); err != nil { + return nil, err + } + if err := x.ClientStream.CloseSend(); err != nil { + return nil, err + } + return x, nil +} + +type Firestore_RunQueryClient interface { + Recv() (*RunQueryResponse, error) + grpc.ClientStream +} + +type firestoreRunQueryClient struct { + grpc.ClientStream +} + +func (x *firestoreRunQueryClient) Recv() (*RunQueryResponse, error) { + m := new(RunQueryResponse) + if err := x.ClientStream.RecvMsg(m); err != nil { + return nil, err + } + return m, nil +} + +func (c *firestoreClient) Write(ctx context.Context, opts ...grpc.CallOption) (Firestore_WriteClient, error) { + stream, err := c.cc.NewStream(ctx, &_Firestore_serviceDesc.Streams[2], "/google.firestore.v1beta1.Firestore/Write", opts...) + if err != nil { + return nil, err + } + x := &firestoreWriteClient{stream} + return x, nil +} + +type Firestore_WriteClient interface { + Send(*WriteRequest) error + Recv() (*WriteResponse, error) + grpc.ClientStream +} + +type firestoreWriteClient struct { + grpc.ClientStream +} + +func (x *firestoreWriteClient) Send(m *WriteRequest) error { + return x.ClientStream.SendMsg(m) +} + +func (x *firestoreWriteClient) Recv() (*WriteResponse, error) { + m := new(WriteResponse) + if err := x.ClientStream.RecvMsg(m); err != nil { + return nil, err + } + return m, nil +} + +func (c *firestoreClient) Listen(ctx context.Context, opts ...grpc.CallOption) (Firestore_ListenClient, error) { + stream, err := c.cc.NewStream(ctx, &_Firestore_serviceDesc.Streams[3], "/google.firestore.v1beta1.Firestore/Listen", opts...) + if err != nil { + return nil, err + } + x := &firestoreListenClient{stream} + return x, nil +} + +type Firestore_ListenClient interface { + Send(*ListenRequest) error + Recv() (*ListenResponse, error) + grpc.ClientStream +} + +type firestoreListenClient struct { + grpc.ClientStream +} + +func (x *firestoreListenClient) Send(m *ListenRequest) error { + return x.ClientStream.SendMsg(m) +} + +func (x *firestoreListenClient) Recv() (*ListenResponse, error) { + m := new(ListenResponse) + if err := x.ClientStream.RecvMsg(m); err != nil { + return nil, err + } + return m, nil +} + +func (c *firestoreClient) ListCollectionIds(ctx context.Context, in *ListCollectionIdsRequest, opts ...grpc.CallOption) (*ListCollectionIdsResponse, error) { + out := new(ListCollectionIdsResponse) + err := c.cc.Invoke(ctx, "/google.firestore.v1beta1.Firestore/ListCollectionIds", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +// FirestoreServer is the server API for Firestore service. +type FirestoreServer interface { + // Gets a single document. + GetDocument(context.Context, *GetDocumentRequest) (*Document, error) + // Lists documents. + ListDocuments(context.Context, *ListDocumentsRequest) (*ListDocumentsResponse, error) + // Creates a new document. + CreateDocument(context.Context, *CreateDocumentRequest) (*Document, error) + // Updates or inserts a document. + UpdateDocument(context.Context, *UpdateDocumentRequest) (*Document, error) + // Deletes a document. + DeleteDocument(context.Context, *DeleteDocumentRequest) (*empty.Empty, error) + // Gets multiple documents. + // + // Documents returned by this method are not guaranteed to be returned in the + // same order that they were requested. + BatchGetDocuments(*BatchGetDocumentsRequest, Firestore_BatchGetDocumentsServer) error + // Starts a new transaction. + BeginTransaction(context.Context, *BeginTransactionRequest) (*BeginTransactionResponse, error) + // Commits a transaction, while optionally updating documents. + Commit(context.Context, *CommitRequest) (*CommitResponse, error) + // Rolls back a transaction. + Rollback(context.Context, *RollbackRequest) (*empty.Empty, error) + // Runs a query. + RunQuery(*RunQueryRequest, Firestore_RunQueryServer) error + // Streams batches of document updates and deletes, in order. + Write(Firestore_WriteServer) error + // Listens to changes. + Listen(Firestore_ListenServer) error + // Lists all the collection IDs underneath a document. + ListCollectionIds(context.Context, *ListCollectionIdsRequest) (*ListCollectionIdsResponse, error) +} + +func RegisterFirestoreServer(s *grpc.Server, srv FirestoreServer) { + s.RegisterService(&_Firestore_serviceDesc, srv) +} + +func _Firestore_GetDocument_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(GetDocumentRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(FirestoreServer).GetDocument(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.firestore.v1beta1.Firestore/GetDocument", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(FirestoreServer).GetDocument(ctx, req.(*GetDocumentRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _Firestore_ListDocuments_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(ListDocumentsRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(FirestoreServer).ListDocuments(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.firestore.v1beta1.Firestore/ListDocuments", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(FirestoreServer).ListDocuments(ctx, req.(*ListDocumentsRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _Firestore_CreateDocument_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(CreateDocumentRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(FirestoreServer).CreateDocument(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.firestore.v1beta1.Firestore/CreateDocument", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(FirestoreServer).CreateDocument(ctx, req.(*CreateDocumentRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _Firestore_UpdateDocument_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(UpdateDocumentRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(FirestoreServer).UpdateDocument(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.firestore.v1beta1.Firestore/UpdateDocument", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(FirestoreServer).UpdateDocument(ctx, req.(*UpdateDocumentRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _Firestore_DeleteDocument_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(DeleteDocumentRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(FirestoreServer).DeleteDocument(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.firestore.v1beta1.Firestore/DeleteDocument", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(FirestoreServer).DeleteDocument(ctx, req.(*DeleteDocumentRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _Firestore_BatchGetDocuments_Handler(srv interface{}, stream grpc.ServerStream) error { + m := new(BatchGetDocumentsRequest) + if err := stream.RecvMsg(m); err != nil { + return err + } + return srv.(FirestoreServer).BatchGetDocuments(m, &firestoreBatchGetDocumentsServer{stream}) +} + +type Firestore_BatchGetDocumentsServer interface { + Send(*BatchGetDocumentsResponse) error + grpc.ServerStream +} + +type firestoreBatchGetDocumentsServer struct { + grpc.ServerStream +} + +func (x *firestoreBatchGetDocumentsServer) Send(m *BatchGetDocumentsResponse) error { + return x.ServerStream.SendMsg(m) +} + +func _Firestore_BeginTransaction_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(BeginTransactionRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(FirestoreServer).BeginTransaction(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.firestore.v1beta1.Firestore/BeginTransaction", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(FirestoreServer).BeginTransaction(ctx, req.(*BeginTransactionRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _Firestore_Commit_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(CommitRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(FirestoreServer).Commit(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.firestore.v1beta1.Firestore/Commit", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(FirestoreServer).Commit(ctx, req.(*CommitRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _Firestore_Rollback_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(RollbackRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(FirestoreServer).Rollback(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.firestore.v1beta1.Firestore/Rollback", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(FirestoreServer).Rollback(ctx, req.(*RollbackRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _Firestore_RunQuery_Handler(srv interface{}, stream grpc.ServerStream) error { + m := new(RunQueryRequest) + if err := stream.RecvMsg(m); err != nil { + return err + } + return srv.(FirestoreServer).RunQuery(m, &firestoreRunQueryServer{stream}) +} + +type Firestore_RunQueryServer interface { + Send(*RunQueryResponse) error + grpc.ServerStream +} + +type firestoreRunQueryServer struct { + grpc.ServerStream +} + +func (x *firestoreRunQueryServer) Send(m *RunQueryResponse) error { + return x.ServerStream.SendMsg(m) +} + +func _Firestore_Write_Handler(srv interface{}, stream grpc.ServerStream) error { + return srv.(FirestoreServer).Write(&firestoreWriteServer{stream}) +} + +type Firestore_WriteServer interface { + Send(*WriteResponse) error + Recv() (*WriteRequest, error) + grpc.ServerStream +} + +type firestoreWriteServer struct { + grpc.ServerStream +} + +func (x *firestoreWriteServer) Send(m *WriteResponse) error { + return x.ServerStream.SendMsg(m) +} + +func (x *firestoreWriteServer) Recv() (*WriteRequest, error) { + m := new(WriteRequest) + if err := x.ServerStream.RecvMsg(m); err != nil { + return nil, err + } + return m, nil +} + +func _Firestore_Listen_Handler(srv interface{}, stream grpc.ServerStream) error { + return srv.(FirestoreServer).Listen(&firestoreListenServer{stream}) +} + +type Firestore_ListenServer interface { + Send(*ListenResponse) error + Recv() (*ListenRequest, error) + grpc.ServerStream +} + +type firestoreListenServer struct { + grpc.ServerStream +} + +func (x *firestoreListenServer) Send(m *ListenResponse) error { + return x.ServerStream.SendMsg(m) +} + +func (x *firestoreListenServer) Recv() (*ListenRequest, error) { + m := new(ListenRequest) + if err := x.ServerStream.RecvMsg(m); err != nil { + return nil, err + } + return m, nil +} + +func _Firestore_ListCollectionIds_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(ListCollectionIdsRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(FirestoreServer).ListCollectionIds(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.firestore.v1beta1.Firestore/ListCollectionIds", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(FirestoreServer).ListCollectionIds(ctx, req.(*ListCollectionIdsRequest)) + } + return interceptor(ctx, in, info, handler) +} + +var _Firestore_serviceDesc = grpc.ServiceDesc{ + ServiceName: "google.firestore.v1beta1.Firestore", + HandlerType: (*FirestoreServer)(nil), + Methods: []grpc.MethodDesc{ + { + MethodName: "GetDocument", + Handler: _Firestore_GetDocument_Handler, + }, + { + MethodName: "ListDocuments", + Handler: _Firestore_ListDocuments_Handler, + }, + { + MethodName: "CreateDocument", + Handler: _Firestore_CreateDocument_Handler, + }, + { + MethodName: "UpdateDocument", + Handler: _Firestore_UpdateDocument_Handler, + }, + { + MethodName: "DeleteDocument", + Handler: _Firestore_DeleteDocument_Handler, + }, + { + MethodName: "BeginTransaction", + Handler: _Firestore_BeginTransaction_Handler, + }, + { + MethodName: "Commit", + Handler: _Firestore_Commit_Handler, + }, + { + MethodName: "Rollback", + Handler: _Firestore_Rollback_Handler, + }, + { + MethodName: "ListCollectionIds", + Handler: _Firestore_ListCollectionIds_Handler, + }, + }, + Streams: []grpc.StreamDesc{ + { + StreamName: "BatchGetDocuments", + Handler: _Firestore_BatchGetDocuments_Handler, + ServerStreams: true, + }, + { + StreamName: "RunQuery", + Handler: _Firestore_RunQuery_Handler, + ServerStreams: true, + }, + { + StreamName: "Write", + Handler: _Firestore_Write_Handler, + ServerStreams: true, + ClientStreams: true, + }, + { + StreamName: "Listen", + Handler: _Firestore_Listen_Handler, + ServerStreams: true, + ClientStreams: true, + }, + }, + Metadata: "google/firestore/v1beta1/firestore.proto", +} + +func init() { + proto.RegisterFile("google/firestore/v1beta1/firestore.proto", fileDescriptor_firestore_d225c1066b9de1b1) +} + +var fileDescriptor_firestore_d225c1066b9de1b1 = []byte{ + // 2214 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xbc, 0x5a, 0xcd, 0x8f, 0x1b, 0x49, + 0x15, 0x77, 0xb5, 0x3f, 0xc6, 0x7e, 0xfe, 0x18, 0x6f, 0x91, 0x64, 0x1d, 0x27, 0x4b, 0x86, 0x5e, + 0x92, 0x18, 0x6b, 0x65, 0x27, 0x13, 0xa1, 0xb0, 0x0e, 0x59, 0x92, 0x99, 0xf1, 0x8c, 0x27, 0x24, + 0x33, 0x93, 0x9e, 0xd9, 0x44, 0x5a, 0x45, 0xb2, 0x7a, 0xba, 0x2b, 0x4e, 0xef, 0xd8, 0xdd, 0xde, + 0xee, 0x76, 0x66, 0x67, 0x57, 0x83, 0x80, 0x03, 0x1c, 0x90, 0xf6, 0xc2, 0x01, 0xf6, 0x02, 0x08, + 0x0e, 0x48, 0x7b, 0x80, 0x0b, 0x12, 0x17, 0x84, 0xc4, 0x6d, 0x05, 0x47, 0x24, 0x84, 0xc4, 0x01, + 0x0e, 0x9c, 0xb8, 0x21, 0xf1, 0x07, 0xa0, 0xae, 0xaa, 0x6e, 0x77, 0xb7, 0xbf, 0xda, 0x9e, 0x68, + 0x6f, 0x5d, 0xcf, 0xaf, 0x5e, 0xbd, 0x8f, 0xdf, 0x7b, 0xf5, 0xea, 0xcd, 0x40, 0xa5, 0x63, 0x18, + 0x9d, 0x2e, 0xa9, 0x3f, 0xd7, 0x4c, 0x62, 0xd9, 0x86, 0x49, 0xea, 0x2f, 0x6f, 0x1e, 0x12, 0x5b, + 0xbe, 0x39, 0xa4, 0xd4, 0xfa, 0xa6, 0x61, 0x1b, 0xb8, 0xc4, 0x38, 0x6b, 0x43, 0x3a, 0xe7, 0x2c, + 0x5f, 0xe6, 0x32, 0xe4, 0xbe, 0x56, 0x97, 0x75, 0xdd, 0xb0, 0x65, 0x5b, 0x33, 0x74, 0x8b, 0xed, + 0x2b, 0x5f, 0x9d, 0x78, 0x82, 0x62, 0xf4, 0x7a, 0x86, 0xce, 0xd9, 0xae, 0x4f, 0x64, 0x53, 0x0d, + 0x65, 0xd0, 0x23, 0xba, 0xcd, 0x19, 0xbf, 0x3a, 0x91, 0xf1, 0x83, 0x01, 0x31, 0x4f, 0x66, 0x72, + 0x1d, 0x9b, 0x9a, 0xcd, 0x6d, 0x2a, 0x5f, 0xe2, 0x5c, 0x74, 0x75, 0x38, 0x78, 0x5e, 0x27, 0xbd, + 0xbe, 0xed, 0x8a, 0xb8, 0x12, 0xfe, 0xd1, 0xd6, 0x7a, 0xc4, 0xb2, 0xe5, 0x5e, 0x9f, 0x33, 0xbc, + 0xce, 0x19, 0xcc, 0xbe, 0x52, 0xb7, 0x6c, 0xd9, 0x1e, 0x70, 0x93, 0xc5, 0x7f, 0x20, 0xc0, 0x5b, + 0xc4, 0xde, 0xe0, 0x8a, 0x4b, 0xe4, 0x83, 0x01, 0xb1, 0x6c, 0x8c, 0x21, 0xa1, 0xcb, 0x3d, 0x52, + 0x42, 0x2b, 0xa8, 0x92, 0x91, 0xe8, 0x37, 0x6e, 0x40, 0xa2, 0x27, 0x5b, 0x47, 0x25, 0x61, 0x05, + 0x55, 0xb2, 0xab, 0xd7, 0x6a, 0x93, 0x9c, 0x5c, 0x73, 0x85, 0x3d, 0x92, 0xad, 0x23, 0x89, 0xee, + 0xc1, 0x22, 0x64, 0x6d, 0x53, 0xd6, 0x2d, 0x59, 0x71, 0xfc, 0x5d, 0x8a, 0xaf, 0xa0, 0x4a, 0xae, + 0x15, 0x93, 0xfc, 0x44, 0xfc, 0x36, 0x64, 0x4c, 0x22, 0xab, 0x6d, 0x47, 0xf7, 0x52, 0x92, 0x1e, + 0x52, 0x76, 0x0f, 0x71, 0x0d, 0xab, 0x1d, 0xb8, 0x86, 0xb5, 0x62, 0x52, 0xda, 0x61, 0x77, 0x08, + 0x6b, 0x17, 0xe0, 0x9c, 0x62, 0xe8, 0x96, 0x66, 0xd9, 0x44, 0x57, 0x4e, 0xda, 0x16, 0xe9, 0x12, + 0xc5, 0x36, 0x4c, 0xf1, 0xbb, 0x71, 0x38, 0xf7, 0x50, 0xb3, 0x3c, 0xf3, 0x2c, 0xd7, 0xbe, 0x0b, + 0x90, 0xea, 0xcb, 0x26, 0xd1, 0x6d, 0x6e, 0x21, 0x5f, 0xe1, 0x37, 0x21, 0xaf, 0x18, 0x5d, 0x67, + 0xb7, 0x66, 0xe8, 0x6d, 0x4d, 0xa5, 0xc6, 0x66, 0xa4, 0xdc, 0x90, 0xb8, 0xad, 0xe2, 0x4b, 0x90, + 0xe9, 0xcb, 0x1d, 0xd2, 0xb6, 0xb4, 0x8f, 0x08, 0x35, 0x25, 0x29, 0xa5, 0x1d, 0xc2, 0xbe, 0xf6, + 0x11, 0xc1, 0x6f, 0x00, 0xd0, 0x1f, 0x6d, 0xe3, 0x88, 0xe8, 0xa5, 0x04, 0xdd, 0x4e, 0xd9, 0x0f, + 0x1c, 0x02, 0xbe, 0x08, 0x69, 0xc3, 0x54, 0x89, 0xd9, 0x3e, 0x3c, 0x29, 0xa5, 0xe8, 0x8f, 0x4b, + 0x74, 0xbd, 0x76, 0xe2, 0xf9, 0x77, 0xe9, 0xec, 0xfe, 0x4d, 0xcf, 0xf4, 0x2f, 0xcc, 0xe3, 0x5f, + 0xfc, 0x15, 0xc8, 0x59, 0x2f, 0x8c, 0xe3, 0x76, 0x4f, 0xb3, 0x2c, 0x4d, 0xef, 0x94, 0x72, 0x2b, + 0xa8, 0x92, 0x96, 0xb2, 0x0e, 0xed, 0x11, 0x23, 0x4d, 0x0c, 0xc1, 0xf7, 0x10, 0x9c, 0x0f, 0x85, + 0xc0, 0xea, 0x1b, 0xba, 0x45, 0xf0, 0x3d, 0xc8, 0xb8, 0xf9, 0x62, 0x95, 0xd0, 0x4a, 0xbc, 0x92, + 0x5d, 0x15, 0x67, 0x1b, 0x2d, 0x0d, 0x37, 0xe1, 0x6b, 0xb0, 0xac, 0x93, 0x0f, 0xed, 0xb6, 0xcf, + 0xe1, 0x2c, 0x5e, 0x79, 0x87, 0xbc, 0xe7, 0x3a, 0x5d, 0xfc, 0x2f, 0x82, 0xf3, 0xeb, 0x26, 0x91, + 0x6d, 0x12, 0xc6, 0xf9, 0x99, 0x70, 0x70, 0x05, 0xb2, 0xae, 0x2e, 0x0e, 0x4b, 0x9c, 0xb2, 0x80, + 0x4b, 0xda, 0x56, 0xf1, 0x3b, 0x90, 0x76, 0x57, 0x14, 0x09, 0xd1, 0x0c, 0xf4, 0xf6, 0x78, 0x88, + 0x48, 0xce, 0x8f, 0x08, 0xf1, 0xb7, 0x02, 0x9c, 0x7f, 0xb7, 0xaf, 0x8e, 0xb1, 0xd9, 0xaf, 0x15, + 0x5a, 0x40, 0xab, 0x2d, 0xc8, 0x0e, 0xa8, 0xe0, 0xf6, 0x02, 0xe5, 0x00, 0xd8, 0x56, 0xe7, 0xdb, + 0x33, 0x2f, 0xbe, 0x00, 0xe0, 0x1f, 0x43, 0x51, 0x19, 0x98, 0x4e, 0xac, 0xda, 0x21, 0x17, 0x4f, + 0x91, 0xb3, 0x67, 0x12, 0xc5, 0xd0, 0x55, 0xcd, 0x89, 0x9f, 0xb4, 0xcc, 0xf7, 0xbb, 0xc2, 0xc5, + 0xef, 0xc0, 0xf9, 0x0d, 0xd2, 0x25, 0xa3, 0x0e, 0x1b, 0x57, 0x0c, 0xc7, 0x9d, 0x2f, 0x9c, 0xed, + 0xfc, 0x7f, 0x09, 0x50, 0x5a, 0x93, 0x6d, 0xe5, 0x85, 0xaf, 0x1e, 0x7b, 0x05, 0xab, 0x0c, 0x69, + 0x55, 0xb6, 0xe5, 0x43, 0xd9, 0x72, 0xf5, 0xf0, 0xd6, 0xf8, 0xb2, 0x3f, 0x91, 0x84, 0x95, 0xb8, + 0x53, 0x71, 0x86, 0x49, 0x72, 0x16, 0x2f, 0x87, 0xca, 0x4a, 0x62, 0x5c, 0x59, 0x79, 0xea, 0x24, + 0xe1, 0x71, 0xdb, 0xcf, 0xc7, 0xf0, 0xfa, 0xd6, 0xe4, 0xa3, 0x0e, 0x86, 0xcc, 0xbb, 0x7d, 0x7a, + 0x03, 0xb7, 0x62, 0x52, 0x41, 0x27, 0xc7, 0x07, 0x93, 0xea, 0xd5, 0xd2, 0x2b, 0xb9, 0x0f, 0xfe, + 0x8e, 0xe0, 0xe2, 0x18, 0x17, 0xf3, 0x82, 0xd4, 0x80, 0xe4, 0x73, 0x63, 0xa0, 0xab, 0xd1, 0xb3, + 0xa2, 0x15, 0x93, 0xd8, 0x16, 0x5c, 0x86, 0x25, 0xb7, 0x38, 0xd2, 0x52, 0xd1, 0x8a, 0x49, 0x2e, + 0x01, 0xaf, 0x8c, 0xb9, 0xfc, 0x82, 0x3e, 0xbc, 0xed, 0x37, 0x35, 0x31, 0xcb, 0x54, 0x9f, 0xa1, + 0x69, 0x48, 0x99, 0xc4, 0x1a, 0x74, 0x6d, 0xf1, 0x14, 0x5e, 0x5f, 0x23, 0x1d, 0x4d, 0xf7, 0x79, + 0x30, 0x0a, 0x76, 0x36, 0x61, 0xc9, 0x60, 0x11, 0xe0, 0xf0, 0x9d, 0x2b, 0x6a, 0x92, 0xbb, 0x59, + 0xfc, 0x26, 0x94, 0x46, 0x8f, 0xe7, 0x7e, 0x0d, 0xd9, 0x8f, 0x46, 0xec, 0x17, 0x7f, 0x80, 0x20, + 0xbf, 0x6e, 0xf4, 0x7a, 0x9a, 0x1d, 0x45, 0xe7, 0xdb, 0x90, 0xa2, 0x9d, 0x11, 0x03, 0x7b, 0x76, + 0xf5, 0xca, 0x64, 0x95, 0x9f, 0x3a, 0x7c, 0x12, 0x67, 0x9f, 0x1d, 0x08, 0xf1, 0x53, 0x04, 0x05, + 0x57, 0x11, 0xae, 0xfd, 0x03, 0xc8, 0xd3, 0xed, 0x6d, 0xe6, 0x68, 0xf7, 0xaa, 0xba, 0x3a, 0xeb, + 0x50, 0xca, 0x2d, 0xe5, 0x8e, 0x87, 0x0b, 0x0b, 0xdf, 0x81, 0xac, 0x42, 0xa5, 0xb3, 0x48, 0x0b, + 0x33, 0x23, 0x0d, 0x8c, 0xdd, 0x21, 0x88, 0xbb, 0xb0, 0x2c, 0x19, 0xdd, 0xee, 0xa1, 0xac, 0x1c, + 0x45, 0xf1, 0x52, 0xc8, 0x58, 0x61, 0xd4, 0xd8, 0x7f, 0x0a, 0xb0, 0x2c, 0x0d, 0xf4, 0xc7, 0x4e, + 0x2f, 0x3a, 0xeb, 0x42, 0x7c, 0x02, 0x45, 0xcb, 0x36, 0x07, 0x8a, 0x3d, 0x30, 0x89, 0xda, 0xa6, + 0xed, 0x2b, 0x57, 0xff, 0x6b, 0x93, 0x1d, 0xb1, 0xef, 0xed, 0xa0, 0x67, 0xb4, 0x62, 0xd2, 0xb2, + 0x15, 0x24, 0x85, 0x2b, 0x4c, 0x92, 0x56, 0x18, 0x34, 0xb3, 0xc2, 0xa4, 0x16, 0xa8, 0x30, 0xe8, + 0xac, 0x15, 0x06, 0xf9, 0x12, 0x2f, 0x07, 0x40, 0x9d, 0xd0, 0xb6, 0x4f, 0xfa, 0x93, 0xeb, 0xcd, + 0xdf, 0x10, 0x14, 0x87, 0x1e, 0x1e, 0x9f, 0x0e, 0xa3, 0x81, 0x39, 0xf3, 0x0d, 0x1d, 0x28, 0x27, + 0xf1, 0xe8, 0xe5, 0x04, 0x5f, 0x87, 0x65, 0xeb, 0x48, 0xeb, 0xf7, 0x89, 0xea, 0xa1, 0x3d, 0x41, + 0xfb, 0xdb, 0x02, 0x27, 0x73, 0x20, 0x8b, 0x9f, 0x09, 0x90, 0xe3, 0x30, 0x9f, 0x8d, 0xc4, 0x4b, + 0x90, 0xb1, 0x6c, 0x93, 0xc8, 0xbd, 0x61, 0x23, 0x95, 0x66, 0x84, 0x6d, 0xd5, 0x97, 0xcc, 0xf1, + 0xf9, 0x92, 0xd9, 0xe9, 0x49, 0x99, 0xd4, 0x61, 0xab, 0x9d, 0x93, 0xb2, 0x8c, 0xc6, 0x9a, 0xed, + 0x07, 0x90, 0xea, 0xca, 0x87, 0xa4, 0x6b, 0x95, 0x92, 0x54, 0xf6, 0xea, 0xcc, 0x9c, 0xa5, 0xc6, + 0xd4, 0x1e, 0xd2, 0x4d, 0x4d, 0xdd, 0x36, 0x4f, 0x24, 0x2e, 0xa1, 0xfc, 0x36, 0x64, 0x7d, 0x64, + 0x5c, 0x84, 0xf8, 0x11, 0x39, 0xe1, 0xa6, 0x3a, 0x9f, 0xf8, 0x1c, 0x24, 0x5f, 0xca, 0xdd, 0x01, + 0xe1, 0x16, 0xb2, 0x45, 0x43, 0xf8, 0x06, 0x72, 0x6e, 0x9d, 0xbc, 0x5b, 0x13, 0x18, 0x04, 0x02, + 0x1e, 0x41, 0x21, 0x8f, 0x84, 0x0d, 0x13, 0xc6, 0x19, 0x16, 0xaa, 0x49, 0xf1, 0x57, 0x56, 0x93, + 0x12, 0x73, 0xd5, 0xa4, 0xdf, 0x08, 0x90, 0x7f, 0x48, 0x61, 0x1f, 0x05, 0x08, 0xf7, 0x01, 0x64, + 0x55, 0x6d, 0xdb, 0xb2, 0xd9, 0x21, 0x6e, 0xbb, 0xb4, 0x32, 0x25, 0x87, 0x29, 0x5f, 0x2b, 0x26, + 0x65, 0x64, 0x55, 0x65, 0x0b, 0x7c, 0x15, 0xf2, 0x26, 0xe9, 0x19, 0x2f, 0x89, 0x2b, 0x85, 0xbe, + 0xbf, 0x5a, 0x31, 0x29, 0xc7, 0xc8, 0x9c, 0xed, 0xdb, 0x5e, 0xe4, 0x13, 0xd4, 0x33, 0xb7, 0x26, + 0x9f, 0x12, 0x50, 0xff, 0x15, 0x87, 0x7e, 0x6d, 0x19, 0xf2, 0x4c, 0xcf, 0xb6, 0xf2, 0x42, 0xd6, + 0x3b, 0x44, 0xfc, 0x5d, 0x1c, 0x0a, 0xee, 0x89, 0x1c, 0x0c, 0x8f, 0x42, 0x3c, 0xb3, 0xfb, 0x48, + 0x66, 0xe4, 0x3a, 0xe5, 0x76, 0x4c, 0xb7, 0x7d, 0x6b, 0xbc, 0x0f, 0xcb, 0xde, 0xab, 0x84, 0x0b, + 0x64, 0x25, 0xa0, 0x32, 0xbb, 0x86, 0x78, 0x22, 0x0b, 0x6a, 0x80, 0x12, 0x10, 0xaa, 0xd2, 0x26, + 0x99, 0x03, 0x25, 0x82, 0x50, 0xd6, 0x54, 0xfb, 0x85, 0x32, 0x4a, 0x40, 0x28, 0x8b, 0x1e, 0xaf, + 0xeb, 0x11, 0x84, 0x4a, 0x94, 0xdf, 0x2f, 0x94, 0x51, 0xf0, 0x3a, 0xa4, 0x9e, 0x6b, 0x5d, 0x9b, + 0x98, 0xbc, 0x0b, 0x9d, 0x72, 0x3d, 0x35, 0x3f, 0x64, 0x05, 0x9b, 0x6c, 0xd2, 0x0d, 0xad, 0x98, + 0xc4, 0xb7, 0x3a, 0x61, 0x33, 0x79, 0x78, 0x68, 0x81, 0x17, 0x3f, 0x49, 0x40, 0x8a, 0x43, 0x6b, + 0x03, 0x92, 0xfe, 0xeb, 0xef, 0xad, 0x59, 0x61, 0xaa, 0xd1, 0xea, 0xef, 0x61, 0x99, 0x6d, 0xc6, + 0x7b, 0xfe, 0x9e, 0x9d, 0xc5, 0xe7, 0xc6, 0x4c, 0x49, 0x5e, 0xcb, 0x3a, 0xcc, 0x8c, 0x61, 0x9f, + 0xff, 0x26, 0xe4, 0x9c, 0x6a, 0xd0, 0xf3, 0x8f, 0x1e, 0xe8, 0x55, 0xca, 0xa8, 0xac, 0x70, 0x04, + 0x6e, 0xbc, 0xec, 0x3c, 0x37, 0x9e, 0x53, 0xb3, 0x38, 0x4c, 0x35, 0x95, 0xfa, 0x36, 0x29, 0xa5, + 0x19, 0x61, 0x5b, 0x75, 0x9e, 0x48, 0x86, 0xae, 0xb0, 0xf8, 0xa5, 0x25, 0xfa, 0x5d, 0xae, 0xc3, + 0x72, 0x48, 0xe1, 0xe9, 0x2f, 0x95, 0xf2, 0x8f, 0x10, 0x64, 0x7d, 0xce, 0xfa, 0xa2, 0x7b, 0x91, + 0xd0, 0x9d, 0x9e, 0x87, 0x2c, 0xb7, 0xd7, 0x5d, 0xba, 0xee, 0x75, 0x00, 0xf1, 0x1f, 0x01, 0x72, + 0xfe, 0x34, 0xc4, 0x04, 0x70, 0x20, 0x8b, 0x29, 0x1b, 0x55, 0xbc, 0xb0, 0x7a, 0x3b, 0x5a, 0x2a, + 0x07, 0x16, 0x07, 0x27, 0x7d, 0x22, 0x15, 0xed, 0x10, 0x05, 0xbf, 0x01, 0xe0, 0x45, 0x81, 0xb9, + 0x30, 0x29, 0x65, 0xdc, 0x30, 0x58, 0xb8, 0x02, 0x49, 0x45, 0x1e, 0x58, 0x6e, 0xca, 0x63, 0xf7, + 0x60, 0xb3, 0xaf, 0xd4, 0xf6, 0xe9, 0xdc, 0x4f, 0x62, 0x0c, 0xce, 0x2d, 0x33, 0x0a, 0x97, 0x20, + 0x58, 0x02, 0x6d, 0x44, 0x2a, 0x7a, 0x1b, 0x21, 0xee, 0x40, 0x31, 0x6c, 0x0a, 0xce, 0x43, 0x66, + 0x67, 0xb7, 0xbd, 0xde, 0xba, 0xbf, 0xb3, 0xd5, 0x2c, 0xc6, 0xf0, 0x12, 0xc4, 0xef, 0x6f, 0x6c, + 0x14, 0x11, 0x06, 0x48, 0x49, 0xcd, 0x47, 0xbb, 0x4f, 0x9a, 0x45, 0x01, 0x67, 0x61, 0x69, 0xfd, + 0x5d, 0x49, 0x6a, 0xee, 0x1c, 0x14, 0xe3, 0x38, 0x03, 0x49, 0xa9, 0xb9, 0xdf, 0x3c, 0x28, 0x26, + 0x44, 0x1d, 0x4a, 0x4e, 0xcd, 0x5c, 0xf7, 0x0d, 0x5f, 0x66, 0x4e, 0xf2, 0x02, 0x43, 0x3a, 0x61, + 0xea, 0x90, 0x2e, 0x1e, 0x1a, 0xd2, 0x89, 0xef, 0xc3, 0xc5, 0x31, 0xe7, 0xf1, 0x72, 0x7d, 0x15, + 0x0a, 0x81, 0xd1, 0x10, 0x7b, 0x10, 0x64, 0xa4, 0xbc, 0x7f, 0x36, 0x14, 0x79, 0x36, 0xb5, 0xfa, + 0xf9, 0x97, 0x20, 0xb3, 0xe9, 0xc2, 0x02, 0xff, 0x0c, 0x41, 0xd6, 0xf7, 0x36, 0xc5, 0x53, 0xaa, + 0xcb, 0xe8, 0xd4, 0xb6, 0x1c, 0xa1, 0x4b, 0x14, 0xef, 0x7e, 0xff, 0xaf, 0xff, 0xfe, 0xb1, 0x70, + 0x1b, 0x7f, 0xdd, 0x9b, 0x32, 0x7f, 0xac, 0xcb, 0x3d, 0x72, 0xb7, 0x6f, 0x1a, 0xef, 0x13, 0xc5, + 0xb6, 0xea, 0xd5, 0xba, 0x7b, 0x5f, 0xd3, 0x6f, 0x37, 0x3b, 0xeb, 0xd5, 0x7a, 0xb5, 0x7a, 0x8a, + 0xff, 0x84, 0xd8, 0x85, 0xef, 0x65, 0x36, 0xae, 0x4d, 0xbf, 0x5a, 0xc3, 0x93, 0x8c, 0x72, 0x3d, + 0x32, 0x3f, 0x73, 0xb8, 0xb8, 0x4b, 0x35, 0xde, 0xc6, 0x5b, 0x43, 0x8d, 0x59, 0x8c, 0x23, 0xea, + 0x5c, 0xff, 0x38, 0x10, 0xaf, 0x53, 0xfc, 0x47, 0xe7, 0x91, 0x17, 0x18, 0x07, 0xe2, 0x29, 0x4a, + 0x8d, 0x1d, 0x1c, 0x46, 0x72, 0xf5, 0x53, 0xaa, 0xf8, 0x63, 0xb1, 0xb9, 0x80, 0xe2, 0xa3, 0x6a, + 0x37, 0x86, 0xfd, 0xfd, 0xef, 0x11, 0x14, 0x82, 0xb3, 0xbd, 0x69, 0x06, 0x8c, 0x9d, 0x02, 0x46, + 0x32, 0x60, 0x8f, 0x1a, 0xf0, 0x60, 0xf5, 0x9d, 0xa1, 0x01, 0xde, 0x5f, 0x38, 0xe6, 0x00, 0x8d, + 0x4f, 0xf3, 0x9f, 0x22, 0x28, 0x04, 0x87, 0x6c, 0xd3, 0x34, 0x1f, 0x3b, 0x8e, 0x2b, 0x5f, 0x18, + 0xa9, 0x41, 0xcd, 0x5e, 0xdf, 0x3e, 0x71, 0x91, 0x5d, 0x5d, 0x10, 0xd9, 0x9f, 0x23, 0x78, 0x6d, + 0x64, 0x36, 0x84, 0xa7, 0x3c, 0x19, 0x26, 0xcd, 0xea, 0xca, 0xb7, 0xe6, 0xda, 0xc3, 0x51, 0xde, + 0xa2, 0xda, 0xaf, 0x89, 0x77, 0x7d, 0xbe, 0xe6, 0xda, 0x4e, 0xb0, 0xe0, 0x74, 0x68, 0x42, 0xe3, + 0x90, 0xcb, 0x6d, 0xa0, 0xea, 0x0d, 0x84, 0xff, 0x82, 0xa0, 0x18, 0x9e, 0xc6, 0xe0, 0x9b, 0x53, + 0xb4, 0x1a, 0x3f, 0x38, 0x2a, 0xaf, 0xce, 0xb3, 0x85, 0xdb, 0xc1, 0x31, 0xe3, 0x07, 0xfd, 0x3c, + 0x76, 0x84, 0xc4, 0x36, 0x50, 0x15, 0xff, 0x0a, 0x41, 0x8a, 0xcd, 0x64, 0xf0, 0xf5, 0x29, 0x69, + 0xea, 0x1f, 0x1f, 0x95, 0x2b, 0xb3, 0x19, 0xb9, 0xbe, 0x9b, 0x54, 0xdf, 0x7b, 0xe2, 0x9d, 0x85, + 0xf4, 0x65, 0x4f, 0x21, 0x47, 0xcb, 0x9f, 0x20, 0x48, 0xbb, 0xe3, 0x19, 0x3c, 0xa5, 0x0f, 0x09, + 0x8d, 0x70, 0x26, 0xa2, 0xf9, 0x6c, 0x78, 0x30, 0xf9, 0x29, 0x8e, 0x66, 0xff, 0x73, 0x34, 0xe3, + 0x43, 0x88, 0xa9, 0x9a, 0x05, 0x47, 0x41, 0xe5, 0x6a, 0x14, 0x56, 0xee, 0xc5, 0x4f, 0x10, 0x55, + 0xf7, 0x87, 0xc8, 0xef, 0xc7, 0x88, 0xc5, 0xee, 0xb4, 0x61, 0x72, 0x71, 0x0d, 0x54, 0x7d, 0xaf, + 0x29, 0xde, 0x5b, 0xb0, 0xce, 0xfb, 0xc5, 0xdc, 0x40, 0x0e, 0x6c, 0x92, 0xf4, 0xd5, 0x8b, 0xaf, + 0x45, 0x7b, 0xf6, 0x97, 0xaf, 0xcf, 0x7e, 0x3e, 0x33, 0x6b, 0x9b, 0xd4, 0xd8, 0x6f, 0x89, 0x8d, + 0x85, 0x62, 0x43, 0x5f, 0xdf, 0x0d, 0x54, 0xad, 0xa0, 0x1b, 0x08, 0xff, 0x1a, 0x41, 0x8a, 0xbd, + 0x07, 0xa7, 0x81, 0x3b, 0xf0, 0x46, 0x9d, 0x06, 0xee, 0xe0, 0xd3, 0xf2, 0x8c, 0xe0, 0xee, 0x52, + 0x61, 0xae, 0xa6, 0x3f, 0x17, 0xe0, 0xb5, 0x91, 0xae, 0x68, 0x5a, 0x7d, 0x9c, 0xd4, 0xb2, 0x95, + 0x6f, 0xcd, 0xb5, 0x87, 0x9b, 0xf2, 0x0b, 0x86, 0xb0, 0x4f, 0x91, 0xb8, 0xb1, 0x00, 0xc2, 0xba, + 0x61, 0xb9, 0x0e, 0xd4, 0x76, 0xc4, 0xed, 0x45, 0xa1, 0x36, 0x4e, 0xde, 0xda, 0x1f, 0x10, 0x5c, + 0x56, 0x8c, 0xde, 0x44, 0xeb, 0xd6, 0x0a, 0x5e, 0xa3, 0xb7, 0xe7, 0x24, 0xfb, 0x1e, 0x7a, 0xef, + 0x3e, 0xe7, 0xed, 0x18, 0x5d, 0x59, 0xef, 0xd4, 0x0c, 0xb3, 0x53, 0xef, 0x10, 0x9d, 0x96, 0x82, + 0x3a, 0xfb, 0x49, 0xee, 0x6b, 0xd6, 0xe8, 0xbf, 0x06, 0xdc, 0xf1, 0x28, 0xbf, 0x14, 0x12, 0x5b, + 0xeb, 0x9b, 0xfb, 0x9f, 0x09, 0x5f, 0xde, 0x62, 0xa2, 0xd6, 0xbb, 0xc6, 0x40, 0xad, 0x79, 0x27, + 0xd5, 0x9e, 0xdc, 0x5c, 0x73, 0x76, 0xfc, 0xd9, 0x65, 0x78, 0x46, 0x19, 0x9e, 0x79, 0x0c, 0xcf, + 0x9e, 0x30, 0x91, 0x87, 0x29, 0x7a, 0xec, 0xad, 0xff, 0x07, 0x00, 0x00, 0xff, 0xff, 0x53, 0x38, + 0xf0, 0x00, 0x68, 0x21, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/firestore/v1beta1/query.pb.go b/vendor/google.golang.org/genproto/googleapis/firestore/v1beta1/query.pb.go new file mode 100644 index 0000000..aa053f1 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/firestore/v1beta1/query.pb.go @@ -0,0 +1,1016 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/firestore/v1beta1/query.proto + +package firestore // import "google.golang.org/genproto/googleapis/firestore/v1beta1" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import wrappers "github.com/golang/protobuf/ptypes/wrappers" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// A sort direction. +type StructuredQuery_Direction int32 + +const ( + // Unspecified. + StructuredQuery_DIRECTION_UNSPECIFIED StructuredQuery_Direction = 0 + // Ascending. + StructuredQuery_ASCENDING StructuredQuery_Direction = 1 + // Descending. + StructuredQuery_DESCENDING StructuredQuery_Direction = 2 +) + +var StructuredQuery_Direction_name = map[int32]string{ + 0: "DIRECTION_UNSPECIFIED", + 1: "ASCENDING", + 2: "DESCENDING", +} +var StructuredQuery_Direction_value = map[string]int32{ + "DIRECTION_UNSPECIFIED": 0, + "ASCENDING": 1, + "DESCENDING": 2, +} + +func (x StructuredQuery_Direction) String() string { + return proto.EnumName(StructuredQuery_Direction_name, int32(x)) +} +func (StructuredQuery_Direction) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_query_849f012e957b5105, []int{0, 0} +} + +// A composite filter operator. +type StructuredQuery_CompositeFilter_Operator int32 + +const ( + // Unspecified. This value must not be used. + StructuredQuery_CompositeFilter_OPERATOR_UNSPECIFIED StructuredQuery_CompositeFilter_Operator = 0 + // The results are required to satisfy each of the combined filters. + StructuredQuery_CompositeFilter_AND StructuredQuery_CompositeFilter_Operator = 1 +) + +var StructuredQuery_CompositeFilter_Operator_name = map[int32]string{ + 0: "OPERATOR_UNSPECIFIED", + 1: "AND", +} +var StructuredQuery_CompositeFilter_Operator_value = map[string]int32{ + "OPERATOR_UNSPECIFIED": 0, + "AND": 1, +} + +func (x StructuredQuery_CompositeFilter_Operator) String() string { + return proto.EnumName(StructuredQuery_CompositeFilter_Operator_name, int32(x)) +} +func (StructuredQuery_CompositeFilter_Operator) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_query_849f012e957b5105, []int{0, 2, 0} +} + +// A field filter operator. +type StructuredQuery_FieldFilter_Operator int32 + +const ( + // Unspecified. This value must not be used. + StructuredQuery_FieldFilter_OPERATOR_UNSPECIFIED StructuredQuery_FieldFilter_Operator = 0 + // Less than. Requires that the field come first in `order_by`. + StructuredQuery_FieldFilter_LESS_THAN StructuredQuery_FieldFilter_Operator = 1 + // Less than or equal. Requires that the field come first in `order_by`. + StructuredQuery_FieldFilter_LESS_THAN_OR_EQUAL StructuredQuery_FieldFilter_Operator = 2 + // Greater than. Requires that the field come first in `order_by`. + StructuredQuery_FieldFilter_GREATER_THAN StructuredQuery_FieldFilter_Operator = 3 + // Greater than or equal. Requires that the field come first in + // `order_by`. + StructuredQuery_FieldFilter_GREATER_THAN_OR_EQUAL StructuredQuery_FieldFilter_Operator = 4 + // Equal. + StructuredQuery_FieldFilter_EQUAL StructuredQuery_FieldFilter_Operator = 5 + // Contains. Requires that the field is an array. + StructuredQuery_FieldFilter_ARRAY_CONTAINS StructuredQuery_FieldFilter_Operator = 7 +) + +var StructuredQuery_FieldFilter_Operator_name = map[int32]string{ + 0: "OPERATOR_UNSPECIFIED", + 1: "LESS_THAN", + 2: "LESS_THAN_OR_EQUAL", + 3: "GREATER_THAN", + 4: "GREATER_THAN_OR_EQUAL", + 5: "EQUAL", + 7: "ARRAY_CONTAINS", +} +var StructuredQuery_FieldFilter_Operator_value = map[string]int32{ + "OPERATOR_UNSPECIFIED": 0, + "LESS_THAN": 1, + "LESS_THAN_OR_EQUAL": 2, + "GREATER_THAN": 3, + "GREATER_THAN_OR_EQUAL": 4, + "EQUAL": 5, + "ARRAY_CONTAINS": 7, +} + +func (x StructuredQuery_FieldFilter_Operator) String() string { + return proto.EnumName(StructuredQuery_FieldFilter_Operator_name, int32(x)) +} +func (StructuredQuery_FieldFilter_Operator) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_query_849f012e957b5105, []int{0, 3, 0} +} + +// A unary operator. +type StructuredQuery_UnaryFilter_Operator int32 + +const ( + // Unspecified. This value must not be used. + StructuredQuery_UnaryFilter_OPERATOR_UNSPECIFIED StructuredQuery_UnaryFilter_Operator = 0 + // Test if a field is equal to NaN. + StructuredQuery_UnaryFilter_IS_NAN StructuredQuery_UnaryFilter_Operator = 2 + // Test if an exprestion evaluates to Null. + StructuredQuery_UnaryFilter_IS_NULL StructuredQuery_UnaryFilter_Operator = 3 +) + +var StructuredQuery_UnaryFilter_Operator_name = map[int32]string{ + 0: "OPERATOR_UNSPECIFIED", + 2: "IS_NAN", + 3: "IS_NULL", +} +var StructuredQuery_UnaryFilter_Operator_value = map[string]int32{ + "OPERATOR_UNSPECIFIED": 0, + "IS_NAN": 2, + "IS_NULL": 3, +} + +func (x StructuredQuery_UnaryFilter_Operator) String() string { + return proto.EnumName(StructuredQuery_UnaryFilter_Operator_name, int32(x)) +} +func (StructuredQuery_UnaryFilter_Operator) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_query_849f012e957b5105, []int{0, 5, 0} +} + +// A Firestore query. +type StructuredQuery struct { + // The projection to return. + Select *StructuredQuery_Projection `protobuf:"bytes,1,opt,name=select,proto3" json:"select,omitempty"` + // The collections to query. + From []*StructuredQuery_CollectionSelector `protobuf:"bytes,2,rep,name=from,proto3" json:"from,omitempty"` + // The filter to apply. + Where *StructuredQuery_Filter `protobuf:"bytes,3,opt,name=where,proto3" json:"where,omitempty"` + // The order to apply to the query results. + // + // Firestore guarantees a stable ordering through the following rules: + // + // * Any field required to appear in `order_by`, that is not already + // specified in `order_by`, is appended to the order in field name order + // by default. + // * If an order on `__name__` is not specified, it is appended by default. + // + // Fields are appended with the same sort direction as the last order + // specified, or 'ASCENDING' if no order was specified. For example: + // + // * `SELECT * FROM Foo ORDER BY A` becomes + // `SELECT * FROM Foo ORDER BY A, __name__` + // * `SELECT * FROM Foo ORDER BY A DESC` becomes + // `SELECT * FROM Foo ORDER BY A DESC, __name__ DESC` + // * `SELECT * FROM Foo WHERE A > 1` becomes + // `SELECT * FROM Foo WHERE A > 1 ORDER BY A, __name__` + OrderBy []*StructuredQuery_Order `protobuf:"bytes,4,rep,name=order_by,json=orderBy,proto3" json:"order_by,omitempty"` + // A starting point for the query results. + StartAt *Cursor `protobuf:"bytes,7,opt,name=start_at,json=startAt,proto3" json:"start_at,omitempty"` + // A end point for the query results. + EndAt *Cursor `protobuf:"bytes,8,opt,name=end_at,json=endAt,proto3" json:"end_at,omitempty"` + // The number of results to skip. + // + // Applies before limit, but after all other constraints. Must be >= 0 if + // specified. + Offset int32 `protobuf:"varint,6,opt,name=offset,proto3" json:"offset,omitempty"` + // The maximum number of results to return. + // + // Applies after all other constraints. + // Must be >= 0 if specified. + Limit *wrappers.Int32Value `protobuf:"bytes,5,opt,name=limit,proto3" json:"limit,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *StructuredQuery) Reset() { *m = StructuredQuery{} } +func (m *StructuredQuery) String() string { return proto.CompactTextString(m) } +func (*StructuredQuery) ProtoMessage() {} +func (*StructuredQuery) Descriptor() ([]byte, []int) { + return fileDescriptor_query_849f012e957b5105, []int{0} +} +func (m *StructuredQuery) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_StructuredQuery.Unmarshal(m, b) +} +func (m *StructuredQuery) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_StructuredQuery.Marshal(b, m, deterministic) +} +func (dst *StructuredQuery) XXX_Merge(src proto.Message) { + xxx_messageInfo_StructuredQuery.Merge(dst, src) +} +func (m *StructuredQuery) XXX_Size() int { + return xxx_messageInfo_StructuredQuery.Size(m) +} +func (m *StructuredQuery) XXX_DiscardUnknown() { + xxx_messageInfo_StructuredQuery.DiscardUnknown(m) +} + +var xxx_messageInfo_StructuredQuery proto.InternalMessageInfo + +func (m *StructuredQuery) GetSelect() *StructuredQuery_Projection { + if m != nil { + return m.Select + } + return nil +} + +func (m *StructuredQuery) GetFrom() []*StructuredQuery_CollectionSelector { + if m != nil { + return m.From + } + return nil +} + +func (m *StructuredQuery) GetWhere() *StructuredQuery_Filter { + if m != nil { + return m.Where + } + return nil +} + +func (m *StructuredQuery) GetOrderBy() []*StructuredQuery_Order { + if m != nil { + return m.OrderBy + } + return nil +} + +func (m *StructuredQuery) GetStartAt() *Cursor { + if m != nil { + return m.StartAt + } + return nil +} + +func (m *StructuredQuery) GetEndAt() *Cursor { + if m != nil { + return m.EndAt + } + return nil +} + +func (m *StructuredQuery) GetOffset() int32 { + if m != nil { + return m.Offset + } + return 0 +} + +func (m *StructuredQuery) GetLimit() *wrappers.Int32Value { + if m != nil { + return m.Limit + } + return nil +} + +// A selection of a collection, such as `messages as m1`. +type StructuredQuery_CollectionSelector struct { + // The collection ID. + // When set, selects only collections with this ID. + CollectionId string `protobuf:"bytes,2,opt,name=collection_id,json=collectionId,proto3" json:"collection_id,omitempty"` + // When false, selects only collections that are immediate children of + // the `parent` specified in the containing `RunQueryRequest`. + // When true, selects all descendant collections. + AllDescendants bool `protobuf:"varint,3,opt,name=all_descendants,json=allDescendants,proto3" json:"all_descendants,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *StructuredQuery_CollectionSelector) Reset() { *m = StructuredQuery_CollectionSelector{} } +func (m *StructuredQuery_CollectionSelector) String() string { return proto.CompactTextString(m) } +func (*StructuredQuery_CollectionSelector) ProtoMessage() {} +func (*StructuredQuery_CollectionSelector) Descriptor() ([]byte, []int) { + return fileDescriptor_query_849f012e957b5105, []int{0, 0} +} +func (m *StructuredQuery_CollectionSelector) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_StructuredQuery_CollectionSelector.Unmarshal(m, b) +} +func (m *StructuredQuery_CollectionSelector) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_StructuredQuery_CollectionSelector.Marshal(b, m, deterministic) +} +func (dst *StructuredQuery_CollectionSelector) XXX_Merge(src proto.Message) { + xxx_messageInfo_StructuredQuery_CollectionSelector.Merge(dst, src) +} +func (m *StructuredQuery_CollectionSelector) XXX_Size() int { + return xxx_messageInfo_StructuredQuery_CollectionSelector.Size(m) +} +func (m *StructuredQuery_CollectionSelector) XXX_DiscardUnknown() { + xxx_messageInfo_StructuredQuery_CollectionSelector.DiscardUnknown(m) +} + +var xxx_messageInfo_StructuredQuery_CollectionSelector proto.InternalMessageInfo + +func (m *StructuredQuery_CollectionSelector) GetCollectionId() string { + if m != nil { + return m.CollectionId + } + return "" +} + +func (m *StructuredQuery_CollectionSelector) GetAllDescendants() bool { + if m != nil { + return m.AllDescendants + } + return false +} + +// A filter. +type StructuredQuery_Filter struct { + // The type of filter. + // + // Types that are valid to be assigned to FilterType: + // *StructuredQuery_Filter_CompositeFilter + // *StructuredQuery_Filter_FieldFilter + // *StructuredQuery_Filter_UnaryFilter + FilterType isStructuredQuery_Filter_FilterType `protobuf_oneof:"filter_type"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *StructuredQuery_Filter) Reset() { *m = StructuredQuery_Filter{} } +func (m *StructuredQuery_Filter) String() string { return proto.CompactTextString(m) } +func (*StructuredQuery_Filter) ProtoMessage() {} +func (*StructuredQuery_Filter) Descriptor() ([]byte, []int) { + return fileDescriptor_query_849f012e957b5105, []int{0, 1} +} +func (m *StructuredQuery_Filter) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_StructuredQuery_Filter.Unmarshal(m, b) +} +func (m *StructuredQuery_Filter) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_StructuredQuery_Filter.Marshal(b, m, deterministic) +} +func (dst *StructuredQuery_Filter) XXX_Merge(src proto.Message) { + xxx_messageInfo_StructuredQuery_Filter.Merge(dst, src) +} +func (m *StructuredQuery_Filter) XXX_Size() int { + return xxx_messageInfo_StructuredQuery_Filter.Size(m) +} +func (m *StructuredQuery_Filter) XXX_DiscardUnknown() { + xxx_messageInfo_StructuredQuery_Filter.DiscardUnknown(m) +} + +var xxx_messageInfo_StructuredQuery_Filter proto.InternalMessageInfo + +type isStructuredQuery_Filter_FilterType interface { + isStructuredQuery_Filter_FilterType() +} + +type StructuredQuery_Filter_CompositeFilter struct { + CompositeFilter *StructuredQuery_CompositeFilter `protobuf:"bytes,1,opt,name=composite_filter,json=compositeFilter,proto3,oneof"` +} + +type StructuredQuery_Filter_FieldFilter struct { + FieldFilter *StructuredQuery_FieldFilter `protobuf:"bytes,2,opt,name=field_filter,json=fieldFilter,proto3,oneof"` +} + +type StructuredQuery_Filter_UnaryFilter struct { + UnaryFilter *StructuredQuery_UnaryFilter `protobuf:"bytes,3,opt,name=unary_filter,json=unaryFilter,proto3,oneof"` +} + +func (*StructuredQuery_Filter_CompositeFilter) isStructuredQuery_Filter_FilterType() {} + +func (*StructuredQuery_Filter_FieldFilter) isStructuredQuery_Filter_FilterType() {} + +func (*StructuredQuery_Filter_UnaryFilter) isStructuredQuery_Filter_FilterType() {} + +func (m *StructuredQuery_Filter) GetFilterType() isStructuredQuery_Filter_FilterType { + if m != nil { + return m.FilterType + } + return nil +} + +func (m *StructuredQuery_Filter) GetCompositeFilter() *StructuredQuery_CompositeFilter { + if x, ok := m.GetFilterType().(*StructuredQuery_Filter_CompositeFilter); ok { + return x.CompositeFilter + } + return nil +} + +func (m *StructuredQuery_Filter) GetFieldFilter() *StructuredQuery_FieldFilter { + if x, ok := m.GetFilterType().(*StructuredQuery_Filter_FieldFilter); ok { + return x.FieldFilter + } + return nil +} + +func (m *StructuredQuery_Filter) GetUnaryFilter() *StructuredQuery_UnaryFilter { + if x, ok := m.GetFilterType().(*StructuredQuery_Filter_UnaryFilter); ok { + return x.UnaryFilter + } + return nil +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*StructuredQuery_Filter) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _StructuredQuery_Filter_OneofMarshaler, _StructuredQuery_Filter_OneofUnmarshaler, _StructuredQuery_Filter_OneofSizer, []interface{}{ + (*StructuredQuery_Filter_CompositeFilter)(nil), + (*StructuredQuery_Filter_FieldFilter)(nil), + (*StructuredQuery_Filter_UnaryFilter)(nil), + } +} + +func _StructuredQuery_Filter_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*StructuredQuery_Filter) + // filter_type + switch x := m.FilterType.(type) { + case *StructuredQuery_Filter_CompositeFilter: + b.EncodeVarint(1<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.CompositeFilter); err != nil { + return err + } + case *StructuredQuery_Filter_FieldFilter: + b.EncodeVarint(2<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.FieldFilter); err != nil { + return err + } + case *StructuredQuery_Filter_UnaryFilter: + b.EncodeVarint(3<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.UnaryFilter); err != nil { + return err + } + case nil: + default: + return fmt.Errorf("StructuredQuery_Filter.FilterType has unexpected type %T", x) + } + return nil +} + +func _StructuredQuery_Filter_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*StructuredQuery_Filter) + switch tag { + case 1: // filter_type.composite_filter + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(StructuredQuery_CompositeFilter) + err := b.DecodeMessage(msg) + m.FilterType = &StructuredQuery_Filter_CompositeFilter{msg} + return true, err + case 2: // filter_type.field_filter + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(StructuredQuery_FieldFilter) + err := b.DecodeMessage(msg) + m.FilterType = &StructuredQuery_Filter_FieldFilter{msg} + return true, err + case 3: // filter_type.unary_filter + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(StructuredQuery_UnaryFilter) + err := b.DecodeMessage(msg) + m.FilterType = &StructuredQuery_Filter_UnaryFilter{msg} + return true, err + default: + return false, nil + } +} + +func _StructuredQuery_Filter_OneofSizer(msg proto.Message) (n int) { + m := msg.(*StructuredQuery_Filter) + // filter_type + switch x := m.FilterType.(type) { + case *StructuredQuery_Filter_CompositeFilter: + s := proto.Size(x.CompositeFilter) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *StructuredQuery_Filter_FieldFilter: + s := proto.Size(x.FieldFilter) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *StructuredQuery_Filter_UnaryFilter: + s := proto.Size(x.UnaryFilter) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +// A filter that merges multiple other filters using the given operator. +type StructuredQuery_CompositeFilter struct { + // The operator for combining multiple filters. + Op StructuredQuery_CompositeFilter_Operator `protobuf:"varint,1,opt,name=op,proto3,enum=google.firestore.v1beta1.StructuredQuery_CompositeFilter_Operator" json:"op,omitempty"` + // The list of filters to combine. + // Must contain at least one filter. + Filters []*StructuredQuery_Filter `protobuf:"bytes,2,rep,name=filters,proto3" json:"filters,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *StructuredQuery_CompositeFilter) Reset() { *m = StructuredQuery_CompositeFilter{} } +func (m *StructuredQuery_CompositeFilter) String() string { return proto.CompactTextString(m) } +func (*StructuredQuery_CompositeFilter) ProtoMessage() {} +func (*StructuredQuery_CompositeFilter) Descriptor() ([]byte, []int) { + return fileDescriptor_query_849f012e957b5105, []int{0, 2} +} +func (m *StructuredQuery_CompositeFilter) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_StructuredQuery_CompositeFilter.Unmarshal(m, b) +} +func (m *StructuredQuery_CompositeFilter) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_StructuredQuery_CompositeFilter.Marshal(b, m, deterministic) +} +func (dst *StructuredQuery_CompositeFilter) XXX_Merge(src proto.Message) { + xxx_messageInfo_StructuredQuery_CompositeFilter.Merge(dst, src) +} +func (m *StructuredQuery_CompositeFilter) XXX_Size() int { + return xxx_messageInfo_StructuredQuery_CompositeFilter.Size(m) +} +func (m *StructuredQuery_CompositeFilter) XXX_DiscardUnknown() { + xxx_messageInfo_StructuredQuery_CompositeFilter.DiscardUnknown(m) +} + +var xxx_messageInfo_StructuredQuery_CompositeFilter proto.InternalMessageInfo + +func (m *StructuredQuery_CompositeFilter) GetOp() StructuredQuery_CompositeFilter_Operator { + if m != nil { + return m.Op + } + return StructuredQuery_CompositeFilter_OPERATOR_UNSPECIFIED +} + +func (m *StructuredQuery_CompositeFilter) GetFilters() []*StructuredQuery_Filter { + if m != nil { + return m.Filters + } + return nil +} + +// A filter on a specific field. +type StructuredQuery_FieldFilter struct { + // The field to filter by. + Field *StructuredQuery_FieldReference `protobuf:"bytes,1,opt,name=field,proto3" json:"field,omitempty"` + // The operator to filter by. + Op StructuredQuery_FieldFilter_Operator `protobuf:"varint,2,opt,name=op,proto3,enum=google.firestore.v1beta1.StructuredQuery_FieldFilter_Operator" json:"op,omitempty"` + // The value to compare to. + Value *Value `protobuf:"bytes,3,opt,name=value,proto3" json:"value,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *StructuredQuery_FieldFilter) Reset() { *m = StructuredQuery_FieldFilter{} } +func (m *StructuredQuery_FieldFilter) String() string { return proto.CompactTextString(m) } +func (*StructuredQuery_FieldFilter) ProtoMessage() {} +func (*StructuredQuery_FieldFilter) Descriptor() ([]byte, []int) { + return fileDescriptor_query_849f012e957b5105, []int{0, 3} +} +func (m *StructuredQuery_FieldFilter) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_StructuredQuery_FieldFilter.Unmarshal(m, b) +} +func (m *StructuredQuery_FieldFilter) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_StructuredQuery_FieldFilter.Marshal(b, m, deterministic) +} +func (dst *StructuredQuery_FieldFilter) XXX_Merge(src proto.Message) { + xxx_messageInfo_StructuredQuery_FieldFilter.Merge(dst, src) +} +func (m *StructuredQuery_FieldFilter) XXX_Size() int { + return xxx_messageInfo_StructuredQuery_FieldFilter.Size(m) +} +func (m *StructuredQuery_FieldFilter) XXX_DiscardUnknown() { + xxx_messageInfo_StructuredQuery_FieldFilter.DiscardUnknown(m) +} + +var xxx_messageInfo_StructuredQuery_FieldFilter proto.InternalMessageInfo + +func (m *StructuredQuery_FieldFilter) GetField() *StructuredQuery_FieldReference { + if m != nil { + return m.Field + } + return nil +} + +func (m *StructuredQuery_FieldFilter) GetOp() StructuredQuery_FieldFilter_Operator { + if m != nil { + return m.Op + } + return StructuredQuery_FieldFilter_OPERATOR_UNSPECIFIED +} + +func (m *StructuredQuery_FieldFilter) GetValue() *Value { + if m != nil { + return m.Value + } + return nil +} + +// The projection of document's fields to return. +type StructuredQuery_Projection struct { + // The fields to return. + // + // If empty, all fields are returned. To only return the name + // of the document, use `['__name__']`. + Fields []*StructuredQuery_FieldReference `protobuf:"bytes,2,rep,name=fields,proto3" json:"fields,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *StructuredQuery_Projection) Reset() { *m = StructuredQuery_Projection{} } +func (m *StructuredQuery_Projection) String() string { return proto.CompactTextString(m) } +func (*StructuredQuery_Projection) ProtoMessage() {} +func (*StructuredQuery_Projection) Descriptor() ([]byte, []int) { + return fileDescriptor_query_849f012e957b5105, []int{0, 4} +} +func (m *StructuredQuery_Projection) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_StructuredQuery_Projection.Unmarshal(m, b) +} +func (m *StructuredQuery_Projection) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_StructuredQuery_Projection.Marshal(b, m, deterministic) +} +func (dst *StructuredQuery_Projection) XXX_Merge(src proto.Message) { + xxx_messageInfo_StructuredQuery_Projection.Merge(dst, src) +} +func (m *StructuredQuery_Projection) XXX_Size() int { + return xxx_messageInfo_StructuredQuery_Projection.Size(m) +} +func (m *StructuredQuery_Projection) XXX_DiscardUnknown() { + xxx_messageInfo_StructuredQuery_Projection.DiscardUnknown(m) +} + +var xxx_messageInfo_StructuredQuery_Projection proto.InternalMessageInfo + +func (m *StructuredQuery_Projection) GetFields() []*StructuredQuery_FieldReference { + if m != nil { + return m.Fields + } + return nil +} + +// A filter with a single operand. +type StructuredQuery_UnaryFilter struct { + // The unary operator to apply. + Op StructuredQuery_UnaryFilter_Operator `protobuf:"varint,1,opt,name=op,proto3,enum=google.firestore.v1beta1.StructuredQuery_UnaryFilter_Operator" json:"op,omitempty"` + // The argument to the filter. + // + // Types that are valid to be assigned to OperandType: + // *StructuredQuery_UnaryFilter_Field + OperandType isStructuredQuery_UnaryFilter_OperandType `protobuf_oneof:"operand_type"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *StructuredQuery_UnaryFilter) Reset() { *m = StructuredQuery_UnaryFilter{} } +func (m *StructuredQuery_UnaryFilter) String() string { return proto.CompactTextString(m) } +func (*StructuredQuery_UnaryFilter) ProtoMessage() {} +func (*StructuredQuery_UnaryFilter) Descriptor() ([]byte, []int) { + return fileDescriptor_query_849f012e957b5105, []int{0, 5} +} +func (m *StructuredQuery_UnaryFilter) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_StructuredQuery_UnaryFilter.Unmarshal(m, b) +} +func (m *StructuredQuery_UnaryFilter) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_StructuredQuery_UnaryFilter.Marshal(b, m, deterministic) +} +func (dst *StructuredQuery_UnaryFilter) XXX_Merge(src proto.Message) { + xxx_messageInfo_StructuredQuery_UnaryFilter.Merge(dst, src) +} +func (m *StructuredQuery_UnaryFilter) XXX_Size() int { + return xxx_messageInfo_StructuredQuery_UnaryFilter.Size(m) +} +func (m *StructuredQuery_UnaryFilter) XXX_DiscardUnknown() { + xxx_messageInfo_StructuredQuery_UnaryFilter.DiscardUnknown(m) +} + +var xxx_messageInfo_StructuredQuery_UnaryFilter proto.InternalMessageInfo + +func (m *StructuredQuery_UnaryFilter) GetOp() StructuredQuery_UnaryFilter_Operator { + if m != nil { + return m.Op + } + return StructuredQuery_UnaryFilter_OPERATOR_UNSPECIFIED +} + +type isStructuredQuery_UnaryFilter_OperandType interface { + isStructuredQuery_UnaryFilter_OperandType() +} + +type StructuredQuery_UnaryFilter_Field struct { + Field *StructuredQuery_FieldReference `protobuf:"bytes,2,opt,name=field,proto3,oneof"` +} + +func (*StructuredQuery_UnaryFilter_Field) isStructuredQuery_UnaryFilter_OperandType() {} + +func (m *StructuredQuery_UnaryFilter) GetOperandType() isStructuredQuery_UnaryFilter_OperandType { + if m != nil { + return m.OperandType + } + return nil +} + +func (m *StructuredQuery_UnaryFilter) GetField() *StructuredQuery_FieldReference { + if x, ok := m.GetOperandType().(*StructuredQuery_UnaryFilter_Field); ok { + return x.Field + } + return nil +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*StructuredQuery_UnaryFilter) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _StructuredQuery_UnaryFilter_OneofMarshaler, _StructuredQuery_UnaryFilter_OneofUnmarshaler, _StructuredQuery_UnaryFilter_OneofSizer, []interface{}{ + (*StructuredQuery_UnaryFilter_Field)(nil), + } +} + +func _StructuredQuery_UnaryFilter_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*StructuredQuery_UnaryFilter) + // operand_type + switch x := m.OperandType.(type) { + case *StructuredQuery_UnaryFilter_Field: + b.EncodeVarint(2<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.Field); err != nil { + return err + } + case nil: + default: + return fmt.Errorf("StructuredQuery_UnaryFilter.OperandType has unexpected type %T", x) + } + return nil +} + +func _StructuredQuery_UnaryFilter_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*StructuredQuery_UnaryFilter) + switch tag { + case 2: // operand_type.field + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(StructuredQuery_FieldReference) + err := b.DecodeMessage(msg) + m.OperandType = &StructuredQuery_UnaryFilter_Field{msg} + return true, err + default: + return false, nil + } +} + +func _StructuredQuery_UnaryFilter_OneofSizer(msg proto.Message) (n int) { + m := msg.(*StructuredQuery_UnaryFilter) + // operand_type + switch x := m.OperandType.(type) { + case *StructuredQuery_UnaryFilter_Field: + s := proto.Size(x.Field) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +// An order on a field. +type StructuredQuery_Order struct { + // The field to order by. + Field *StructuredQuery_FieldReference `protobuf:"bytes,1,opt,name=field,proto3" json:"field,omitempty"` + // The direction to order by. Defaults to `ASCENDING`. + Direction StructuredQuery_Direction `protobuf:"varint,2,opt,name=direction,proto3,enum=google.firestore.v1beta1.StructuredQuery_Direction" json:"direction,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *StructuredQuery_Order) Reset() { *m = StructuredQuery_Order{} } +func (m *StructuredQuery_Order) String() string { return proto.CompactTextString(m) } +func (*StructuredQuery_Order) ProtoMessage() {} +func (*StructuredQuery_Order) Descriptor() ([]byte, []int) { + return fileDescriptor_query_849f012e957b5105, []int{0, 6} +} +func (m *StructuredQuery_Order) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_StructuredQuery_Order.Unmarshal(m, b) +} +func (m *StructuredQuery_Order) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_StructuredQuery_Order.Marshal(b, m, deterministic) +} +func (dst *StructuredQuery_Order) XXX_Merge(src proto.Message) { + xxx_messageInfo_StructuredQuery_Order.Merge(dst, src) +} +func (m *StructuredQuery_Order) XXX_Size() int { + return xxx_messageInfo_StructuredQuery_Order.Size(m) +} +func (m *StructuredQuery_Order) XXX_DiscardUnknown() { + xxx_messageInfo_StructuredQuery_Order.DiscardUnknown(m) +} + +var xxx_messageInfo_StructuredQuery_Order proto.InternalMessageInfo + +func (m *StructuredQuery_Order) GetField() *StructuredQuery_FieldReference { + if m != nil { + return m.Field + } + return nil +} + +func (m *StructuredQuery_Order) GetDirection() StructuredQuery_Direction { + if m != nil { + return m.Direction + } + return StructuredQuery_DIRECTION_UNSPECIFIED +} + +// A reference to a field, such as `max(messages.time) as max_time`. +type StructuredQuery_FieldReference struct { + FieldPath string `protobuf:"bytes,2,opt,name=field_path,json=fieldPath,proto3" json:"field_path,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *StructuredQuery_FieldReference) Reset() { *m = StructuredQuery_FieldReference{} } +func (m *StructuredQuery_FieldReference) String() string { return proto.CompactTextString(m) } +func (*StructuredQuery_FieldReference) ProtoMessage() {} +func (*StructuredQuery_FieldReference) Descriptor() ([]byte, []int) { + return fileDescriptor_query_849f012e957b5105, []int{0, 7} +} +func (m *StructuredQuery_FieldReference) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_StructuredQuery_FieldReference.Unmarshal(m, b) +} +func (m *StructuredQuery_FieldReference) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_StructuredQuery_FieldReference.Marshal(b, m, deterministic) +} +func (dst *StructuredQuery_FieldReference) XXX_Merge(src proto.Message) { + xxx_messageInfo_StructuredQuery_FieldReference.Merge(dst, src) +} +func (m *StructuredQuery_FieldReference) XXX_Size() int { + return xxx_messageInfo_StructuredQuery_FieldReference.Size(m) +} +func (m *StructuredQuery_FieldReference) XXX_DiscardUnknown() { + xxx_messageInfo_StructuredQuery_FieldReference.DiscardUnknown(m) +} + +var xxx_messageInfo_StructuredQuery_FieldReference proto.InternalMessageInfo + +func (m *StructuredQuery_FieldReference) GetFieldPath() string { + if m != nil { + return m.FieldPath + } + return "" +} + +// A position in a query result set. +type Cursor struct { + // The values that represent a position, in the order they appear in + // the order by clause of a query. + // + // Can contain fewer values than specified in the order by clause. + Values []*Value `protobuf:"bytes,1,rep,name=values,proto3" json:"values,omitempty"` + // If the position is just before or just after the given values, relative + // to the sort order defined by the query. + Before bool `protobuf:"varint,2,opt,name=before,proto3" json:"before,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Cursor) Reset() { *m = Cursor{} } +func (m *Cursor) String() string { return proto.CompactTextString(m) } +func (*Cursor) ProtoMessage() {} +func (*Cursor) Descriptor() ([]byte, []int) { + return fileDescriptor_query_849f012e957b5105, []int{1} +} +func (m *Cursor) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Cursor.Unmarshal(m, b) +} +func (m *Cursor) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Cursor.Marshal(b, m, deterministic) +} +func (dst *Cursor) XXX_Merge(src proto.Message) { + xxx_messageInfo_Cursor.Merge(dst, src) +} +func (m *Cursor) XXX_Size() int { + return xxx_messageInfo_Cursor.Size(m) +} +func (m *Cursor) XXX_DiscardUnknown() { + xxx_messageInfo_Cursor.DiscardUnknown(m) +} + +var xxx_messageInfo_Cursor proto.InternalMessageInfo + +func (m *Cursor) GetValues() []*Value { + if m != nil { + return m.Values + } + return nil +} + +func (m *Cursor) GetBefore() bool { + if m != nil { + return m.Before + } + return false +} + +func init() { + proto.RegisterType((*StructuredQuery)(nil), "google.firestore.v1beta1.StructuredQuery") + proto.RegisterType((*StructuredQuery_CollectionSelector)(nil), "google.firestore.v1beta1.StructuredQuery.CollectionSelector") + proto.RegisterType((*StructuredQuery_Filter)(nil), "google.firestore.v1beta1.StructuredQuery.Filter") + proto.RegisterType((*StructuredQuery_CompositeFilter)(nil), "google.firestore.v1beta1.StructuredQuery.CompositeFilter") + proto.RegisterType((*StructuredQuery_FieldFilter)(nil), "google.firestore.v1beta1.StructuredQuery.FieldFilter") + proto.RegisterType((*StructuredQuery_Projection)(nil), "google.firestore.v1beta1.StructuredQuery.Projection") + proto.RegisterType((*StructuredQuery_UnaryFilter)(nil), "google.firestore.v1beta1.StructuredQuery.UnaryFilter") + proto.RegisterType((*StructuredQuery_Order)(nil), "google.firestore.v1beta1.StructuredQuery.Order") + proto.RegisterType((*StructuredQuery_FieldReference)(nil), "google.firestore.v1beta1.StructuredQuery.FieldReference") + proto.RegisterType((*Cursor)(nil), "google.firestore.v1beta1.Cursor") + proto.RegisterEnum("google.firestore.v1beta1.StructuredQuery_Direction", StructuredQuery_Direction_name, StructuredQuery_Direction_value) + proto.RegisterEnum("google.firestore.v1beta1.StructuredQuery_CompositeFilter_Operator", StructuredQuery_CompositeFilter_Operator_name, StructuredQuery_CompositeFilter_Operator_value) + proto.RegisterEnum("google.firestore.v1beta1.StructuredQuery_FieldFilter_Operator", StructuredQuery_FieldFilter_Operator_name, StructuredQuery_FieldFilter_Operator_value) + proto.RegisterEnum("google.firestore.v1beta1.StructuredQuery_UnaryFilter_Operator", StructuredQuery_UnaryFilter_Operator_name, StructuredQuery_UnaryFilter_Operator_value) +} + +func init() { + proto.RegisterFile("google/firestore/v1beta1/query.proto", fileDescriptor_query_849f012e957b5105) +} + +var fileDescriptor_query_849f012e957b5105 = []byte{ + // 984 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xac, 0x96, 0xdd, 0x6e, 0xe3, 0x44, + 0x14, 0xc7, 0x6b, 0xa7, 0xf9, 0x3a, 0x69, 0xd3, 0x30, 0x82, 0x95, 0x09, 0xcb, 0x52, 0x05, 0xa4, + 0xed, 0x0d, 0x0e, 0x6d, 0x59, 0x01, 0x5a, 0x40, 0x72, 0x12, 0xb7, 0xcd, 0xaa, 0x72, 0xd2, 0x49, + 0x5b, 0x69, 0x57, 0x15, 0x96, 0x63, 0x8f, 0x53, 0x23, 0xd7, 0x63, 0xc6, 0xe3, 0x5d, 0xf5, 0x25, + 0xb8, 0xe5, 0x9e, 0xcb, 0x7d, 0x01, 0x78, 0x06, 0x9e, 0x82, 0x27, 0x59, 0x21, 0x8f, 0xc7, 0x49, + 0xbb, 0x55, 0x45, 0x52, 0xb8, 0xcb, 0x9c, 0x39, 0xe7, 0x77, 0xa6, 0xe7, 0xfc, 0x8f, 0x4f, 0xe1, + 0x8b, 0x19, 0xa5, 0xb3, 0x90, 0x74, 0xfd, 0x80, 0x91, 0x84, 0x53, 0x46, 0xba, 0xaf, 0x77, 0xa7, + 0x84, 0x3b, 0xbb, 0xdd, 0x5f, 0x52, 0xc2, 0xae, 0xf5, 0x98, 0x51, 0x4e, 0x91, 0x96, 0x7b, 0xe9, + 0x73, 0x2f, 0x5d, 0x7a, 0xb5, 0x9f, 0xde, 0x1b, 0xef, 0x51, 0x37, 0xbd, 0x22, 0x11, 0xcf, 0x11, + 0xed, 0x27, 0xd2, 0x51, 0x9c, 0xa6, 0xa9, 0xdf, 0x7d, 0xc3, 0x9c, 0x38, 0x26, 0x2c, 0x91, 0xf7, + 0x8f, 0xe5, 0xbd, 0x13, 0x07, 0x5d, 0x27, 0x8a, 0x28, 0x77, 0x78, 0x40, 0x23, 0x79, 0xdb, 0xf9, + 0xf5, 0x03, 0xd8, 0x9a, 0x70, 0x96, 0xba, 0x3c, 0x65, 0xc4, 0x3b, 0xc9, 0x9e, 0x86, 0x8e, 0xa1, + 0x92, 0x90, 0x90, 0xb8, 0x5c, 0x53, 0xb6, 0x95, 0x9d, 0xc6, 0xde, 0xd7, 0xfa, 0x7d, 0xaf, 0xd4, + 0xdf, 0x0b, 0xd5, 0xc7, 0x8c, 0xfe, 0x4c, 0xdc, 0x2c, 0x01, 0x96, 0x0c, 0x34, 0x86, 0x75, 0x9f, + 0xd1, 0x2b, 0x4d, 0xdd, 0x2e, 0xed, 0x34, 0xf6, 0xbe, 0x5f, 0x9e, 0xd5, 0xa7, 0x61, 0x98, 0xb3, + 0x26, 0x82, 0x44, 0x19, 0x16, 0x24, 0x74, 0x00, 0xe5, 0x37, 0x97, 0x84, 0x11, 0xad, 0x24, 0x9e, + 0xf7, 0xd5, 0xf2, 0xc8, 0x83, 0x20, 0xe4, 0x84, 0xe1, 0x3c, 0x1c, 0xbd, 0x80, 0x1a, 0x65, 0x1e, + 0x61, 0xf6, 0xf4, 0x5a, 0x5b, 0x17, 0xaf, 0xeb, 0x2e, 0x8f, 0x1a, 0x65, 0x91, 0xb8, 0x2a, 0x00, + 0xbd, 0x6b, 0xf4, 0x1c, 0x6a, 0x09, 0x77, 0x18, 0xb7, 0x1d, 0xae, 0x55, 0xc5, 0xb3, 0xb6, 0xef, + 0x67, 0xf5, 0x53, 0x96, 0x50, 0x86, 0xab, 0x22, 0xc2, 0xe0, 0xe8, 0x1b, 0xa8, 0x90, 0xc8, 0xcb, + 0x42, 0x6b, 0x4b, 0x86, 0x96, 0x49, 0xe4, 0x19, 0x1c, 0x3d, 0x82, 0x0a, 0xf5, 0xfd, 0x84, 0x70, + 0xad, 0xb2, 0xad, 0xec, 0x94, 0xb1, 0x3c, 0xa1, 0x5d, 0x28, 0x87, 0xc1, 0x55, 0xc0, 0xb5, 0xb2, + 0xe0, 0x7d, 0x52, 0xf0, 0x0a, 0x8d, 0xe8, 0xc3, 0x88, 0xef, 0xef, 0x9d, 0x3b, 0x61, 0x4a, 0x70, + 0xee, 0xd9, 0x9e, 0x02, 0xba, 0x5b, 0x70, 0xf4, 0x39, 0x6c, 0xba, 0x73, 0xab, 0x1d, 0x78, 0x9a, + 0xba, 0xad, 0xec, 0xd4, 0xf1, 0xc6, 0xc2, 0x38, 0xf4, 0xd0, 0x53, 0xd8, 0x72, 0xc2, 0xd0, 0xf6, + 0x48, 0xe2, 0x92, 0xc8, 0x73, 0x22, 0x9e, 0x88, 0xce, 0xd4, 0x70, 0xd3, 0x09, 0xc3, 0xc1, 0xc2, + 0xda, 0xfe, 0x43, 0x85, 0x4a, 0xde, 0x02, 0xe4, 0x43, 0xcb, 0xa5, 0x57, 0x31, 0x4d, 0x02, 0x4e, + 0x6c, 0x5f, 0xd8, 0xa4, 0xda, 0xbe, 0x5b, 0x45, 0x21, 0x92, 0x90, 0x43, 0x8f, 0xd6, 0xf0, 0x96, + 0x7b, 0xdb, 0x84, 0x5e, 0xc1, 0x86, 0x1f, 0x90, 0xd0, 0x2b, 0x72, 0xa8, 0x22, 0xc7, 0xb3, 0x55, + 0x24, 0x43, 0x42, 0x6f, 0xce, 0x6f, 0xf8, 0x8b, 0x63, 0xc6, 0x4e, 0x23, 0x87, 0x5d, 0x17, 0xec, + 0xd2, 0xaa, 0xec, 0xb3, 0x2c, 0x7a, 0xc1, 0x4e, 0x17, 0xc7, 0xde, 0x26, 0x34, 0x72, 0xaa, 0xcd, + 0xaf, 0x63, 0xd2, 0xfe, 0x5b, 0x81, 0xad, 0xf7, 0xfe, 0x5a, 0x84, 0x41, 0xa5, 0xb1, 0x28, 0x5a, + 0x73, 0xaf, 0xf7, 0xe0, 0xa2, 0xe9, 0xa3, 0x98, 0x30, 0x27, 0x1b, 0x2e, 0x95, 0xc6, 0xe8, 0x05, + 0x54, 0xf3, 0xb4, 0x89, 0x9c, 0xd7, 0xd5, 0x87, 0xab, 0x00, 0x74, 0xbe, 0x84, 0x5a, 0xc1, 0x46, + 0x1a, 0x7c, 0x38, 0x1a, 0x9b, 0xd8, 0x38, 0x1d, 0x61, 0xfb, 0xcc, 0x9a, 0x8c, 0xcd, 0xfe, 0xf0, + 0x60, 0x68, 0x0e, 0x5a, 0x6b, 0xa8, 0x0a, 0x25, 0xc3, 0x1a, 0xb4, 0x94, 0xf6, 0x3b, 0x15, 0x1a, + 0x37, 0x8a, 0x8d, 0x2c, 0x28, 0x8b, 0x62, 0x4b, 0x59, 0x7c, 0xbb, 0x62, 0xcb, 0x30, 0xf1, 0x09, + 0x23, 0x91, 0x4b, 0x70, 0x8e, 0x41, 0x96, 0x28, 0x97, 0x2a, 0xca, 0xf5, 0xe3, 0x83, 0xfa, 0x7f, + 0xbb, 0x54, 0xcf, 0xa0, 0xfc, 0x3a, 0x1b, 0x20, 0xd9, 0xf6, 0xcf, 0xee, 0x47, 0xca, 0x39, 0x13, + 0xde, 0x9d, 0xdf, 0x94, 0xa5, 0xca, 0xb2, 0x09, 0xf5, 0x63, 0x73, 0x32, 0xb1, 0x4f, 0x8f, 0x0c, + 0xab, 0xa5, 0xa0, 0x47, 0x80, 0xe6, 0x47, 0x7b, 0x84, 0x6d, 0xf3, 0xe4, 0xcc, 0x38, 0x6e, 0xa9, + 0xa8, 0x05, 0x1b, 0x87, 0xd8, 0x34, 0x4e, 0x4d, 0x9c, 0x7b, 0x96, 0xd0, 0xc7, 0xf0, 0xd1, 0x4d, + 0xcb, 0xc2, 0x79, 0x1d, 0xd5, 0xa1, 0x9c, 0xff, 0x2c, 0x23, 0x04, 0x4d, 0x03, 0x63, 0xe3, 0xa5, + 0xdd, 0x1f, 0x59, 0xa7, 0xc6, 0xd0, 0x9a, 0xb4, 0xaa, 0xed, 0x9f, 0x00, 0x16, 0x9f, 0x6f, 0x34, + 0x86, 0x8a, 0xa8, 0x5b, 0x21, 0x84, 0x87, 0xd7, 0x5f, 0x72, 0xda, 0xef, 0x14, 0x68, 0xdc, 0x50, + 0xbc, 0x6c, 0x88, 0xb2, 0x6a, 0x43, 0x6e, 0x20, 0x6e, 0x37, 0x64, 0x5c, 0x08, 0x46, 0xfd, 0x6f, + 0x82, 0x39, 0x5a, 0x93, 0x92, 0xe9, 0xfc, 0xb0, 0x54, 0xab, 0x00, 0x2a, 0xc3, 0x89, 0x6d, 0x19, + 0x56, 0x4b, 0x45, 0x0d, 0xa8, 0x66, 0xbf, 0xcf, 0x8e, 0x8f, 0x5b, 0xa5, 0x5e, 0x13, 0x36, 0x68, + 0x16, 0x1e, 0x79, 0xf9, 0x10, 0xbf, 0x55, 0xa0, 0x2c, 0xd6, 0xc6, 0xff, 0xae, 0xed, 0x13, 0xa8, + 0x7b, 0x01, 0xcb, 0x3b, 0x27, 0x25, 0xbe, 0xbf, 0x3c, 0x73, 0x50, 0x84, 0xe2, 0x05, 0xa5, 0xdd, + 0x85, 0xe6, 0xed, 0x5c, 0xe8, 0x53, 0x80, 0xfc, 0x53, 0x1a, 0x3b, 0xfc, 0x52, 0x2e, 0x82, 0xba, + 0xb0, 0x8c, 0x1d, 0x7e, 0xd9, 0x31, 0xa1, 0x3e, 0x07, 0x65, 0x2a, 0x1c, 0x0c, 0xb1, 0xd9, 0x3f, + 0x1d, 0x8e, 0xac, 0xbb, 0xca, 0x36, 0x26, 0x7d, 0xd3, 0x1a, 0x0c, 0xad, 0xc3, 0x96, 0x82, 0x9a, + 0x00, 0x03, 0x73, 0x7e, 0x56, 0x3b, 0x2f, 0xa1, 0x92, 0xef, 0xb8, 0x6c, 0x2b, 0x8a, 0x91, 0x49, + 0x34, 0x45, 0x28, 0xf0, 0x5f, 0x27, 0x4c, 0xba, 0x67, 0x5b, 0x71, 0x4a, 0x7c, 0xca, 0x88, 0x78, + 0x64, 0x0d, 0xcb, 0x53, 0xef, 0x4f, 0x05, 0x1e, 0xbb, 0xf4, 0xea, 0x5e, 0x4c, 0x0f, 0x44, 0x3d, + 0xc6, 0xd9, 0x92, 0x1c, 0x2b, 0xaf, 0x0c, 0xe9, 0x37, 0xa3, 0xa1, 0x13, 0xcd, 0x74, 0xca, 0x66, + 0xdd, 0x19, 0x89, 0xc4, 0x0a, 0xed, 0xe6, 0x57, 0x4e, 0x1c, 0x24, 0x77, 0xff, 0x41, 0x7b, 0x3e, + 0xb7, 0xfc, 0xae, 0xae, 0x1f, 0xf6, 0x0f, 0x26, 0x6f, 0xd5, 0x27, 0x87, 0x39, 0xaa, 0x1f, 0xd2, + 0xd4, 0xd3, 0x0f, 0xe6, 0x89, 0xcf, 0x77, 0x7b, 0x59, 0xc4, 0x5f, 0x85, 0xc3, 0x85, 0x70, 0xb8, + 0x98, 0x3b, 0x5c, 0x9c, 0xe7, 0xc8, 0x69, 0x45, 0xa4, 0xdd, 0xff, 0x27, 0x00, 0x00, 0xff, 0xff, + 0x4e, 0xa5, 0xf8, 0x45, 0x56, 0x0a, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/firestore/v1beta1/write.pb.go b/vendor/google.golang.org/genproto/googleapis/firestore/v1beta1/write.pb.go new file mode 100644 index 0000000..4595741 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/firestore/v1beta1/write.pb.go @@ -0,0 +1,974 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/firestore/v1beta1/write.proto + +package firestore // import "google.golang.org/genproto/googleapis/firestore/v1beta1" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import timestamp "github.com/golang/protobuf/ptypes/timestamp" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// A value that is calculated by the server. +type DocumentTransform_FieldTransform_ServerValue int32 + +const ( + // Unspecified. This value must not be used. + DocumentTransform_FieldTransform_SERVER_VALUE_UNSPECIFIED DocumentTransform_FieldTransform_ServerValue = 0 + // The time at which the server processed the request, with millisecond + // precision. + DocumentTransform_FieldTransform_REQUEST_TIME DocumentTransform_FieldTransform_ServerValue = 1 +) + +var DocumentTransform_FieldTransform_ServerValue_name = map[int32]string{ + 0: "SERVER_VALUE_UNSPECIFIED", + 1: "REQUEST_TIME", +} +var DocumentTransform_FieldTransform_ServerValue_value = map[string]int32{ + "SERVER_VALUE_UNSPECIFIED": 0, + "REQUEST_TIME": 1, +} + +func (x DocumentTransform_FieldTransform_ServerValue) String() string { + return proto.EnumName(DocumentTransform_FieldTransform_ServerValue_name, int32(x)) +} +func (DocumentTransform_FieldTransform_ServerValue) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_write_514106e2c3bd8657, []int{1, 0, 0} +} + +// A write on a document. +type Write struct { + // The operation to execute. + // + // Types that are valid to be assigned to Operation: + // *Write_Update + // *Write_Delete + // *Write_Transform + Operation isWrite_Operation `protobuf_oneof:"operation"` + // The fields to update in this write. + // + // This field can be set only when the operation is `update`. + // If the mask is not set for an `update` and the document exists, any + // existing data will be overwritten. + // If the mask is set and the document on the server has fields not covered by + // the mask, they are left unchanged. + // Fields referenced in the mask, but not present in the input document, are + // deleted from the document on the server. + // The field paths in this mask must not contain a reserved field name. + UpdateMask *DocumentMask `protobuf:"bytes,3,opt,name=update_mask,json=updateMask,proto3" json:"update_mask,omitempty"` + // An optional precondition on the document. + // + // The write will fail if this is set and not met by the target document. + CurrentDocument *Precondition `protobuf:"bytes,4,opt,name=current_document,json=currentDocument,proto3" json:"current_document,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Write) Reset() { *m = Write{} } +func (m *Write) String() string { return proto.CompactTextString(m) } +func (*Write) ProtoMessage() {} +func (*Write) Descriptor() ([]byte, []int) { + return fileDescriptor_write_514106e2c3bd8657, []int{0} +} +func (m *Write) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Write.Unmarshal(m, b) +} +func (m *Write) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Write.Marshal(b, m, deterministic) +} +func (dst *Write) XXX_Merge(src proto.Message) { + xxx_messageInfo_Write.Merge(dst, src) +} +func (m *Write) XXX_Size() int { + return xxx_messageInfo_Write.Size(m) +} +func (m *Write) XXX_DiscardUnknown() { + xxx_messageInfo_Write.DiscardUnknown(m) +} + +var xxx_messageInfo_Write proto.InternalMessageInfo + +type isWrite_Operation interface { + isWrite_Operation() +} + +type Write_Update struct { + Update *Document `protobuf:"bytes,1,opt,name=update,proto3,oneof"` +} + +type Write_Delete struct { + Delete string `protobuf:"bytes,2,opt,name=delete,proto3,oneof"` +} + +type Write_Transform struct { + Transform *DocumentTransform `protobuf:"bytes,6,opt,name=transform,proto3,oneof"` +} + +func (*Write_Update) isWrite_Operation() {} + +func (*Write_Delete) isWrite_Operation() {} + +func (*Write_Transform) isWrite_Operation() {} + +func (m *Write) GetOperation() isWrite_Operation { + if m != nil { + return m.Operation + } + return nil +} + +func (m *Write) GetUpdate() *Document { + if x, ok := m.GetOperation().(*Write_Update); ok { + return x.Update + } + return nil +} + +func (m *Write) GetDelete() string { + if x, ok := m.GetOperation().(*Write_Delete); ok { + return x.Delete + } + return "" +} + +func (m *Write) GetTransform() *DocumentTransform { + if x, ok := m.GetOperation().(*Write_Transform); ok { + return x.Transform + } + return nil +} + +func (m *Write) GetUpdateMask() *DocumentMask { + if m != nil { + return m.UpdateMask + } + return nil +} + +func (m *Write) GetCurrentDocument() *Precondition { + if m != nil { + return m.CurrentDocument + } + return nil +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*Write) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _Write_OneofMarshaler, _Write_OneofUnmarshaler, _Write_OneofSizer, []interface{}{ + (*Write_Update)(nil), + (*Write_Delete)(nil), + (*Write_Transform)(nil), + } +} + +func _Write_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*Write) + // operation + switch x := m.Operation.(type) { + case *Write_Update: + b.EncodeVarint(1<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.Update); err != nil { + return err + } + case *Write_Delete: + b.EncodeVarint(2<<3 | proto.WireBytes) + b.EncodeStringBytes(x.Delete) + case *Write_Transform: + b.EncodeVarint(6<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.Transform); err != nil { + return err + } + case nil: + default: + return fmt.Errorf("Write.Operation has unexpected type %T", x) + } + return nil +} + +func _Write_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*Write) + switch tag { + case 1: // operation.update + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(Document) + err := b.DecodeMessage(msg) + m.Operation = &Write_Update{msg} + return true, err + case 2: // operation.delete + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeStringBytes() + m.Operation = &Write_Delete{x} + return true, err + case 6: // operation.transform + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(DocumentTransform) + err := b.DecodeMessage(msg) + m.Operation = &Write_Transform{msg} + return true, err + default: + return false, nil + } +} + +func _Write_OneofSizer(msg proto.Message) (n int) { + m := msg.(*Write) + // operation + switch x := m.Operation.(type) { + case *Write_Update: + s := proto.Size(x.Update) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *Write_Delete: + n += 1 // tag and wire + n += proto.SizeVarint(uint64(len(x.Delete))) + n += len(x.Delete) + case *Write_Transform: + s := proto.Size(x.Transform) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +// A transformation of a document. +type DocumentTransform struct { + // The name of the document to transform. + Document string `protobuf:"bytes,1,opt,name=document,proto3" json:"document,omitempty"` + // The list of transformations to apply to the fields of the document, in + // order. + // This must not be empty. + FieldTransforms []*DocumentTransform_FieldTransform `protobuf:"bytes,2,rep,name=field_transforms,json=fieldTransforms,proto3" json:"field_transforms,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *DocumentTransform) Reset() { *m = DocumentTransform{} } +func (m *DocumentTransform) String() string { return proto.CompactTextString(m) } +func (*DocumentTransform) ProtoMessage() {} +func (*DocumentTransform) Descriptor() ([]byte, []int) { + return fileDescriptor_write_514106e2c3bd8657, []int{1} +} +func (m *DocumentTransform) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_DocumentTransform.Unmarshal(m, b) +} +func (m *DocumentTransform) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_DocumentTransform.Marshal(b, m, deterministic) +} +func (dst *DocumentTransform) XXX_Merge(src proto.Message) { + xxx_messageInfo_DocumentTransform.Merge(dst, src) +} +func (m *DocumentTransform) XXX_Size() int { + return xxx_messageInfo_DocumentTransform.Size(m) +} +func (m *DocumentTransform) XXX_DiscardUnknown() { + xxx_messageInfo_DocumentTransform.DiscardUnknown(m) +} + +var xxx_messageInfo_DocumentTransform proto.InternalMessageInfo + +func (m *DocumentTransform) GetDocument() string { + if m != nil { + return m.Document + } + return "" +} + +func (m *DocumentTransform) GetFieldTransforms() []*DocumentTransform_FieldTransform { + if m != nil { + return m.FieldTransforms + } + return nil +} + +// A transformation of a field of the document. +type DocumentTransform_FieldTransform struct { + // The path of the field. See [Document.fields][google.firestore.v1beta1.Document.fields] for the field path syntax + // reference. + FieldPath string `protobuf:"bytes,1,opt,name=field_path,json=fieldPath,proto3" json:"field_path,omitempty"` + // The transformation to apply on the field. + // + // Types that are valid to be assigned to TransformType: + // *DocumentTransform_FieldTransform_SetToServerValue + // *DocumentTransform_FieldTransform_Increment + // *DocumentTransform_FieldTransform_Maximum + // *DocumentTransform_FieldTransform_Minimum + // *DocumentTransform_FieldTransform_AppendMissingElements + // *DocumentTransform_FieldTransform_RemoveAllFromArray + TransformType isDocumentTransform_FieldTransform_TransformType `protobuf_oneof:"transform_type"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *DocumentTransform_FieldTransform) Reset() { *m = DocumentTransform_FieldTransform{} } +func (m *DocumentTransform_FieldTransform) String() string { return proto.CompactTextString(m) } +func (*DocumentTransform_FieldTransform) ProtoMessage() {} +func (*DocumentTransform_FieldTransform) Descriptor() ([]byte, []int) { + return fileDescriptor_write_514106e2c3bd8657, []int{1, 0} +} +func (m *DocumentTransform_FieldTransform) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_DocumentTransform_FieldTransform.Unmarshal(m, b) +} +func (m *DocumentTransform_FieldTransform) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_DocumentTransform_FieldTransform.Marshal(b, m, deterministic) +} +func (dst *DocumentTransform_FieldTransform) XXX_Merge(src proto.Message) { + xxx_messageInfo_DocumentTransform_FieldTransform.Merge(dst, src) +} +func (m *DocumentTransform_FieldTransform) XXX_Size() int { + return xxx_messageInfo_DocumentTransform_FieldTransform.Size(m) +} +func (m *DocumentTransform_FieldTransform) XXX_DiscardUnknown() { + xxx_messageInfo_DocumentTransform_FieldTransform.DiscardUnknown(m) +} + +var xxx_messageInfo_DocumentTransform_FieldTransform proto.InternalMessageInfo + +func (m *DocumentTransform_FieldTransform) GetFieldPath() string { + if m != nil { + return m.FieldPath + } + return "" +} + +type isDocumentTransform_FieldTransform_TransformType interface { + isDocumentTransform_FieldTransform_TransformType() +} + +type DocumentTransform_FieldTransform_SetToServerValue struct { + SetToServerValue DocumentTransform_FieldTransform_ServerValue `protobuf:"varint,2,opt,name=set_to_server_value,json=setToServerValue,proto3,enum=google.firestore.v1beta1.DocumentTransform_FieldTransform_ServerValue,oneof"` +} + +type DocumentTransform_FieldTransform_Increment struct { + Increment *Value `protobuf:"bytes,3,opt,name=increment,proto3,oneof"` +} + +type DocumentTransform_FieldTransform_Maximum struct { + Maximum *Value `protobuf:"bytes,4,opt,name=maximum,proto3,oneof"` +} + +type DocumentTransform_FieldTransform_Minimum struct { + Minimum *Value `protobuf:"bytes,5,opt,name=minimum,proto3,oneof"` +} + +type DocumentTransform_FieldTransform_AppendMissingElements struct { + AppendMissingElements *ArrayValue `protobuf:"bytes,6,opt,name=append_missing_elements,json=appendMissingElements,proto3,oneof"` +} + +type DocumentTransform_FieldTransform_RemoveAllFromArray struct { + RemoveAllFromArray *ArrayValue `protobuf:"bytes,7,opt,name=remove_all_from_array,json=removeAllFromArray,proto3,oneof"` +} + +func (*DocumentTransform_FieldTransform_SetToServerValue) isDocumentTransform_FieldTransform_TransformType() { +} + +func (*DocumentTransform_FieldTransform_Increment) isDocumentTransform_FieldTransform_TransformType() { +} + +func (*DocumentTransform_FieldTransform_Maximum) isDocumentTransform_FieldTransform_TransformType() {} + +func (*DocumentTransform_FieldTransform_Minimum) isDocumentTransform_FieldTransform_TransformType() {} + +func (*DocumentTransform_FieldTransform_AppendMissingElements) isDocumentTransform_FieldTransform_TransformType() { +} + +func (*DocumentTransform_FieldTransform_RemoveAllFromArray) isDocumentTransform_FieldTransform_TransformType() { +} + +func (m *DocumentTransform_FieldTransform) GetTransformType() isDocumentTransform_FieldTransform_TransformType { + if m != nil { + return m.TransformType + } + return nil +} + +func (m *DocumentTransform_FieldTransform) GetSetToServerValue() DocumentTransform_FieldTransform_ServerValue { + if x, ok := m.GetTransformType().(*DocumentTransform_FieldTransform_SetToServerValue); ok { + return x.SetToServerValue + } + return DocumentTransform_FieldTransform_SERVER_VALUE_UNSPECIFIED +} + +func (m *DocumentTransform_FieldTransform) GetIncrement() *Value { + if x, ok := m.GetTransformType().(*DocumentTransform_FieldTransform_Increment); ok { + return x.Increment + } + return nil +} + +func (m *DocumentTransform_FieldTransform) GetMaximum() *Value { + if x, ok := m.GetTransformType().(*DocumentTransform_FieldTransform_Maximum); ok { + return x.Maximum + } + return nil +} + +func (m *DocumentTransform_FieldTransform) GetMinimum() *Value { + if x, ok := m.GetTransformType().(*DocumentTransform_FieldTransform_Minimum); ok { + return x.Minimum + } + return nil +} + +func (m *DocumentTransform_FieldTransform) GetAppendMissingElements() *ArrayValue { + if x, ok := m.GetTransformType().(*DocumentTransform_FieldTransform_AppendMissingElements); ok { + return x.AppendMissingElements + } + return nil +} + +func (m *DocumentTransform_FieldTransform) GetRemoveAllFromArray() *ArrayValue { + if x, ok := m.GetTransformType().(*DocumentTransform_FieldTransform_RemoveAllFromArray); ok { + return x.RemoveAllFromArray + } + return nil +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*DocumentTransform_FieldTransform) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _DocumentTransform_FieldTransform_OneofMarshaler, _DocumentTransform_FieldTransform_OneofUnmarshaler, _DocumentTransform_FieldTransform_OneofSizer, []interface{}{ + (*DocumentTransform_FieldTransform_SetToServerValue)(nil), + (*DocumentTransform_FieldTransform_Increment)(nil), + (*DocumentTransform_FieldTransform_Maximum)(nil), + (*DocumentTransform_FieldTransform_Minimum)(nil), + (*DocumentTransform_FieldTransform_AppendMissingElements)(nil), + (*DocumentTransform_FieldTransform_RemoveAllFromArray)(nil), + } +} + +func _DocumentTransform_FieldTransform_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*DocumentTransform_FieldTransform) + // transform_type + switch x := m.TransformType.(type) { + case *DocumentTransform_FieldTransform_SetToServerValue: + b.EncodeVarint(2<<3 | proto.WireVarint) + b.EncodeVarint(uint64(x.SetToServerValue)) + case *DocumentTransform_FieldTransform_Increment: + b.EncodeVarint(3<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.Increment); err != nil { + return err + } + case *DocumentTransform_FieldTransform_Maximum: + b.EncodeVarint(4<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.Maximum); err != nil { + return err + } + case *DocumentTransform_FieldTransform_Minimum: + b.EncodeVarint(5<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.Minimum); err != nil { + return err + } + case *DocumentTransform_FieldTransform_AppendMissingElements: + b.EncodeVarint(6<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.AppendMissingElements); err != nil { + return err + } + case *DocumentTransform_FieldTransform_RemoveAllFromArray: + b.EncodeVarint(7<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.RemoveAllFromArray); err != nil { + return err + } + case nil: + default: + return fmt.Errorf("DocumentTransform_FieldTransform.TransformType has unexpected type %T", x) + } + return nil +} + +func _DocumentTransform_FieldTransform_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*DocumentTransform_FieldTransform) + switch tag { + case 2: // transform_type.set_to_server_value + if wire != proto.WireVarint { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeVarint() + m.TransformType = &DocumentTransform_FieldTransform_SetToServerValue{DocumentTransform_FieldTransform_ServerValue(x)} + return true, err + case 3: // transform_type.increment + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(Value) + err := b.DecodeMessage(msg) + m.TransformType = &DocumentTransform_FieldTransform_Increment{msg} + return true, err + case 4: // transform_type.maximum + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(Value) + err := b.DecodeMessage(msg) + m.TransformType = &DocumentTransform_FieldTransform_Maximum{msg} + return true, err + case 5: // transform_type.minimum + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(Value) + err := b.DecodeMessage(msg) + m.TransformType = &DocumentTransform_FieldTransform_Minimum{msg} + return true, err + case 6: // transform_type.append_missing_elements + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(ArrayValue) + err := b.DecodeMessage(msg) + m.TransformType = &DocumentTransform_FieldTransform_AppendMissingElements{msg} + return true, err + case 7: // transform_type.remove_all_from_array + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(ArrayValue) + err := b.DecodeMessage(msg) + m.TransformType = &DocumentTransform_FieldTransform_RemoveAllFromArray{msg} + return true, err + default: + return false, nil + } +} + +func _DocumentTransform_FieldTransform_OneofSizer(msg proto.Message) (n int) { + m := msg.(*DocumentTransform_FieldTransform) + // transform_type + switch x := m.TransformType.(type) { + case *DocumentTransform_FieldTransform_SetToServerValue: + n += 1 // tag and wire + n += proto.SizeVarint(uint64(x.SetToServerValue)) + case *DocumentTransform_FieldTransform_Increment: + s := proto.Size(x.Increment) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *DocumentTransform_FieldTransform_Maximum: + s := proto.Size(x.Maximum) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *DocumentTransform_FieldTransform_Minimum: + s := proto.Size(x.Minimum) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *DocumentTransform_FieldTransform_AppendMissingElements: + s := proto.Size(x.AppendMissingElements) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *DocumentTransform_FieldTransform_RemoveAllFromArray: + s := proto.Size(x.RemoveAllFromArray) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +// The result of applying a write. +type WriteResult struct { + // The last update time of the document after applying the write. Not set + // after a `delete`. + // + // If the write did not actually change the document, this will be the + // previous update_time. + UpdateTime *timestamp.Timestamp `protobuf:"bytes,1,opt,name=update_time,json=updateTime,proto3" json:"update_time,omitempty"` + // The results of applying each [DocumentTransform.FieldTransform][google.firestore.v1beta1.DocumentTransform.FieldTransform], in the + // same order. + TransformResults []*Value `protobuf:"bytes,2,rep,name=transform_results,json=transformResults,proto3" json:"transform_results,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *WriteResult) Reset() { *m = WriteResult{} } +func (m *WriteResult) String() string { return proto.CompactTextString(m) } +func (*WriteResult) ProtoMessage() {} +func (*WriteResult) Descriptor() ([]byte, []int) { + return fileDescriptor_write_514106e2c3bd8657, []int{2} +} +func (m *WriteResult) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_WriteResult.Unmarshal(m, b) +} +func (m *WriteResult) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_WriteResult.Marshal(b, m, deterministic) +} +func (dst *WriteResult) XXX_Merge(src proto.Message) { + xxx_messageInfo_WriteResult.Merge(dst, src) +} +func (m *WriteResult) XXX_Size() int { + return xxx_messageInfo_WriteResult.Size(m) +} +func (m *WriteResult) XXX_DiscardUnknown() { + xxx_messageInfo_WriteResult.DiscardUnknown(m) +} + +var xxx_messageInfo_WriteResult proto.InternalMessageInfo + +func (m *WriteResult) GetUpdateTime() *timestamp.Timestamp { + if m != nil { + return m.UpdateTime + } + return nil +} + +func (m *WriteResult) GetTransformResults() []*Value { + if m != nil { + return m.TransformResults + } + return nil +} + +// A [Document][google.firestore.v1beta1.Document] has changed. +// +// May be the result of multiple [writes][google.firestore.v1beta1.Write], including deletes, that +// ultimately resulted in a new value for the [Document][google.firestore.v1beta1.Document]. +// +// Multiple [DocumentChange][google.firestore.v1beta1.DocumentChange] messages may be returned for the same logical +// change, if multiple targets are affected. +type DocumentChange struct { + // The new state of the [Document][google.firestore.v1beta1.Document]. + // + // If `mask` is set, contains only fields that were updated or added. + Document *Document `protobuf:"bytes,1,opt,name=document,proto3" json:"document,omitempty"` + // A set of target IDs of targets that match this document. + TargetIds []int32 `protobuf:"varint,5,rep,packed,name=target_ids,json=targetIds,proto3" json:"target_ids,omitempty"` + // A set of target IDs for targets that no longer match this document. + RemovedTargetIds []int32 `protobuf:"varint,6,rep,packed,name=removed_target_ids,json=removedTargetIds,proto3" json:"removed_target_ids,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *DocumentChange) Reset() { *m = DocumentChange{} } +func (m *DocumentChange) String() string { return proto.CompactTextString(m) } +func (*DocumentChange) ProtoMessage() {} +func (*DocumentChange) Descriptor() ([]byte, []int) { + return fileDescriptor_write_514106e2c3bd8657, []int{3} +} +func (m *DocumentChange) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_DocumentChange.Unmarshal(m, b) +} +func (m *DocumentChange) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_DocumentChange.Marshal(b, m, deterministic) +} +func (dst *DocumentChange) XXX_Merge(src proto.Message) { + xxx_messageInfo_DocumentChange.Merge(dst, src) +} +func (m *DocumentChange) XXX_Size() int { + return xxx_messageInfo_DocumentChange.Size(m) +} +func (m *DocumentChange) XXX_DiscardUnknown() { + xxx_messageInfo_DocumentChange.DiscardUnknown(m) +} + +var xxx_messageInfo_DocumentChange proto.InternalMessageInfo + +func (m *DocumentChange) GetDocument() *Document { + if m != nil { + return m.Document + } + return nil +} + +func (m *DocumentChange) GetTargetIds() []int32 { + if m != nil { + return m.TargetIds + } + return nil +} + +func (m *DocumentChange) GetRemovedTargetIds() []int32 { + if m != nil { + return m.RemovedTargetIds + } + return nil +} + +// A [Document][google.firestore.v1beta1.Document] has been deleted. +// +// May be the result of multiple [writes][google.firestore.v1beta1.Write], including updates, the +// last of which deleted the [Document][google.firestore.v1beta1.Document]. +// +// Multiple [DocumentDelete][google.firestore.v1beta1.DocumentDelete] messages may be returned for the same logical +// delete, if multiple targets are affected. +type DocumentDelete struct { + // The resource name of the [Document][google.firestore.v1beta1.Document] that was deleted. + Document string `protobuf:"bytes,1,opt,name=document,proto3" json:"document,omitempty"` + // A set of target IDs for targets that previously matched this entity. + RemovedTargetIds []int32 `protobuf:"varint,6,rep,packed,name=removed_target_ids,json=removedTargetIds,proto3" json:"removed_target_ids,omitempty"` + // The read timestamp at which the delete was observed. + // + // Greater or equal to the `commit_time` of the delete. + ReadTime *timestamp.Timestamp `protobuf:"bytes,4,opt,name=read_time,json=readTime,proto3" json:"read_time,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *DocumentDelete) Reset() { *m = DocumentDelete{} } +func (m *DocumentDelete) String() string { return proto.CompactTextString(m) } +func (*DocumentDelete) ProtoMessage() {} +func (*DocumentDelete) Descriptor() ([]byte, []int) { + return fileDescriptor_write_514106e2c3bd8657, []int{4} +} +func (m *DocumentDelete) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_DocumentDelete.Unmarshal(m, b) +} +func (m *DocumentDelete) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_DocumentDelete.Marshal(b, m, deterministic) +} +func (dst *DocumentDelete) XXX_Merge(src proto.Message) { + xxx_messageInfo_DocumentDelete.Merge(dst, src) +} +func (m *DocumentDelete) XXX_Size() int { + return xxx_messageInfo_DocumentDelete.Size(m) +} +func (m *DocumentDelete) XXX_DiscardUnknown() { + xxx_messageInfo_DocumentDelete.DiscardUnknown(m) +} + +var xxx_messageInfo_DocumentDelete proto.InternalMessageInfo + +func (m *DocumentDelete) GetDocument() string { + if m != nil { + return m.Document + } + return "" +} + +func (m *DocumentDelete) GetRemovedTargetIds() []int32 { + if m != nil { + return m.RemovedTargetIds + } + return nil +} + +func (m *DocumentDelete) GetReadTime() *timestamp.Timestamp { + if m != nil { + return m.ReadTime + } + return nil +} + +// A [Document][google.firestore.v1beta1.Document] has been removed from the view of the targets. +// +// Sent if the document is no longer relevant to a target and is out of view. +// Can be sent instead of a DocumentDelete or a DocumentChange if the server +// can not send the new value of the document. +// +// Multiple [DocumentRemove][google.firestore.v1beta1.DocumentRemove] messages may be returned for the same logical +// write or delete, if multiple targets are affected. +type DocumentRemove struct { + // The resource name of the [Document][google.firestore.v1beta1.Document] that has gone out of view. + Document string `protobuf:"bytes,1,opt,name=document,proto3" json:"document,omitempty"` + // A set of target IDs for targets that previously matched this document. + RemovedTargetIds []int32 `protobuf:"varint,2,rep,packed,name=removed_target_ids,json=removedTargetIds,proto3" json:"removed_target_ids,omitempty"` + // The read timestamp at which the remove was observed. + // + // Greater or equal to the `commit_time` of the change/delete/remove. + ReadTime *timestamp.Timestamp `protobuf:"bytes,4,opt,name=read_time,json=readTime,proto3" json:"read_time,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *DocumentRemove) Reset() { *m = DocumentRemove{} } +func (m *DocumentRemove) String() string { return proto.CompactTextString(m) } +func (*DocumentRemove) ProtoMessage() {} +func (*DocumentRemove) Descriptor() ([]byte, []int) { + return fileDescriptor_write_514106e2c3bd8657, []int{5} +} +func (m *DocumentRemove) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_DocumentRemove.Unmarshal(m, b) +} +func (m *DocumentRemove) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_DocumentRemove.Marshal(b, m, deterministic) +} +func (dst *DocumentRemove) XXX_Merge(src proto.Message) { + xxx_messageInfo_DocumentRemove.Merge(dst, src) +} +func (m *DocumentRemove) XXX_Size() int { + return xxx_messageInfo_DocumentRemove.Size(m) +} +func (m *DocumentRemove) XXX_DiscardUnknown() { + xxx_messageInfo_DocumentRemove.DiscardUnknown(m) +} + +var xxx_messageInfo_DocumentRemove proto.InternalMessageInfo + +func (m *DocumentRemove) GetDocument() string { + if m != nil { + return m.Document + } + return "" +} + +func (m *DocumentRemove) GetRemovedTargetIds() []int32 { + if m != nil { + return m.RemovedTargetIds + } + return nil +} + +func (m *DocumentRemove) GetReadTime() *timestamp.Timestamp { + if m != nil { + return m.ReadTime + } + return nil +} + +// A digest of all the documents that match a given target. +type ExistenceFilter struct { + // The target ID to which this filter applies. + TargetId int32 `protobuf:"varint,1,opt,name=target_id,json=targetId,proto3" json:"target_id,omitempty"` + // The total count of documents that match [target_id][google.firestore.v1beta1.ExistenceFilter.target_id]. + // + // If different from the count of documents in the client that match, the + // client must manually determine which documents no longer match the target. + Count int32 `protobuf:"varint,2,opt,name=count,proto3" json:"count,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ExistenceFilter) Reset() { *m = ExistenceFilter{} } +func (m *ExistenceFilter) String() string { return proto.CompactTextString(m) } +func (*ExistenceFilter) ProtoMessage() {} +func (*ExistenceFilter) Descriptor() ([]byte, []int) { + return fileDescriptor_write_514106e2c3bd8657, []int{6} +} +func (m *ExistenceFilter) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ExistenceFilter.Unmarshal(m, b) +} +func (m *ExistenceFilter) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ExistenceFilter.Marshal(b, m, deterministic) +} +func (dst *ExistenceFilter) XXX_Merge(src proto.Message) { + xxx_messageInfo_ExistenceFilter.Merge(dst, src) +} +func (m *ExistenceFilter) XXX_Size() int { + return xxx_messageInfo_ExistenceFilter.Size(m) +} +func (m *ExistenceFilter) XXX_DiscardUnknown() { + xxx_messageInfo_ExistenceFilter.DiscardUnknown(m) +} + +var xxx_messageInfo_ExistenceFilter proto.InternalMessageInfo + +func (m *ExistenceFilter) GetTargetId() int32 { + if m != nil { + return m.TargetId + } + return 0 +} + +func (m *ExistenceFilter) GetCount() int32 { + if m != nil { + return m.Count + } + return 0 +} + +func init() { + proto.RegisterType((*Write)(nil), "google.firestore.v1beta1.Write") + proto.RegisterType((*DocumentTransform)(nil), "google.firestore.v1beta1.DocumentTransform") + proto.RegisterType((*DocumentTransform_FieldTransform)(nil), "google.firestore.v1beta1.DocumentTransform.FieldTransform") + proto.RegisterType((*WriteResult)(nil), "google.firestore.v1beta1.WriteResult") + proto.RegisterType((*DocumentChange)(nil), "google.firestore.v1beta1.DocumentChange") + proto.RegisterType((*DocumentDelete)(nil), "google.firestore.v1beta1.DocumentDelete") + proto.RegisterType((*DocumentRemove)(nil), "google.firestore.v1beta1.DocumentRemove") + proto.RegisterType((*ExistenceFilter)(nil), "google.firestore.v1beta1.ExistenceFilter") + proto.RegisterEnum("google.firestore.v1beta1.DocumentTransform_FieldTransform_ServerValue", DocumentTransform_FieldTransform_ServerValue_name, DocumentTransform_FieldTransform_ServerValue_value) +} + +func init() { + proto.RegisterFile("google/firestore/v1beta1/write.proto", fileDescriptor_write_514106e2c3bd8657) +} + +var fileDescriptor_write_514106e2c3bd8657 = []byte{ + // 867 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xac, 0x55, 0xdd, 0x6e, 0xe3, 0x44, + 0x14, 0x6e, 0xd2, 0x26, 0xdb, 0x9c, 0xa0, 0xd6, 0x3b, 0xb0, 0xc2, 0x0a, 0x5d, 0xb6, 0x8a, 0x16, + 0xa8, 0x04, 0x72, 0xd4, 0x72, 0x81, 0xc4, 0xf2, 0xa3, 0xa6, 0x75, 0xda, 0x8a, 0x2d, 0xca, 0x3a, + 0x69, 0x10, 0xa8, 0xc2, 0x9a, 0xc6, 0x27, 0xae, 0xb5, 0xf6, 0x8c, 0x35, 0x33, 0xee, 0xee, 0xbe, + 0x01, 0xf7, 0xdc, 0x70, 0xc3, 0x0d, 0x97, 0x3c, 0x01, 0xcf, 0xc0, 0x3d, 0xcf, 0xc1, 0x2b, 0x20, + 0xcf, 0xd8, 0x6e, 0xca, 0x2a, 0xa4, 0x5d, 0x71, 0x97, 0x33, 0xf3, 0x7d, 0xdf, 0xf9, 0x7c, 0xce, + 0x9c, 0x13, 0x78, 0x1c, 0x72, 0x1e, 0xc6, 0xd8, 0x9b, 0x45, 0x02, 0xa5, 0xe2, 0x02, 0x7b, 0x57, + 0xbb, 0x17, 0xa8, 0xe8, 0x6e, 0xef, 0x85, 0x88, 0x14, 0x3a, 0xa9, 0xe0, 0x8a, 0x13, 0xdb, 0xa0, + 0x9c, 0x0a, 0xe5, 0x14, 0xa8, 0xce, 0x07, 0x0b, 0xf9, 0x53, 0x9e, 0x24, 0x9c, 0x19, 0x81, 0xce, + 0x47, 0x0b, 0x61, 0x01, 0x9f, 0x66, 0x09, 0x32, 0x55, 0x00, 0x1f, 0x15, 0x40, 0x1d, 0x5d, 0x64, + 0xb3, 0x9e, 0x8a, 0x12, 0x94, 0x8a, 0x26, 0x69, 0x01, 0xd8, 0x2a, 0x00, 0x34, 0x8d, 0x7a, 0x94, + 0x31, 0xae, 0xa8, 0x8a, 0x38, 0x93, 0xe6, 0xb6, 0xfb, 0x57, 0x1d, 0x1a, 0xdf, 0xe5, 0xc6, 0xc9, + 0x17, 0xd0, 0xcc, 0xd2, 0x80, 0x2a, 0xb4, 0x6b, 0xdb, 0xb5, 0x9d, 0xf6, 0x5e, 0xd7, 0x59, 0xf4, + 0x0d, 0xce, 0x61, 0x61, 0xe1, 0x78, 0xc5, 0x2b, 0x38, 0xc4, 0x86, 0x66, 0x80, 0x31, 0x2a, 0xb4, + 0xeb, 0xdb, 0xb5, 0x9d, 0x56, 0x7e, 0x63, 0x62, 0xf2, 0x0d, 0xb4, 0x94, 0xa0, 0x4c, 0xce, 0xb8, + 0x48, 0xec, 0xa6, 0x96, 0xfe, 0x78, 0xb9, 0xf4, 0xb8, 0xa4, 0x1c, 0xaf, 0x78, 0xd7, 0x7c, 0x72, + 0x04, 0x6d, 0x93, 0xd0, 0x4f, 0xa8, 0x7c, 0x6e, 0xaf, 0x6a, 0xb9, 0x0f, 0x97, 0xcb, 0x9d, 0x52, + 0xf9, 0xdc, 0x03, 0x43, 0xcd, 0x7f, 0x93, 0x67, 0x60, 0x4d, 0x33, 0x21, 0x90, 0x29, 0xbf, 0x2c, + 0xa8, 0xbd, 0xb6, 0x4c, 0x6d, 0x28, 0x70, 0xca, 0x59, 0x10, 0xe5, 0x05, 0xf4, 0x36, 0x0b, 0x7e, + 0x99, 0xa2, 0xdf, 0x86, 0x16, 0x4f, 0x51, 0xe8, 0xf2, 0x76, 0x7f, 0x6a, 0xc2, 0xfd, 0xd7, 0xbe, + 0x85, 0x74, 0x60, 0xbd, 0xca, 0x96, 0x57, 0xb9, 0xe5, 0x55, 0x31, 0x41, 0xb0, 0x66, 0x11, 0xc6, + 0x81, 0x5f, 0x7d, 0xad, 0xb4, 0xeb, 0xdb, 0xab, 0x3b, 0xed, 0xbd, 0xcf, 0xef, 0x50, 0x2e, 0x67, + 0x90, 0x6b, 0x54, 0xa1, 0xb7, 0x39, 0xbb, 0x11, 0xcb, 0xce, 0xdf, 0x6b, 0xb0, 0x71, 0x13, 0x43, + 0x1e, 0x02, 0x98, 0xcc, 0x29, 0x55, 0x97, 0x85, 0xaf, 0x96, 0x3e, 0x19, 0x52, 0x75, 0x49, 0x5e, + 0xc0, 0xdb, 0x12, 0x95, 0xaf, 0xb8, 0x2f, 0x51, 0x5c, 0xa1, 0xf0, 0xaf, 0x68, 0x9c, 0x99, 0x3e, + 0x6f, 0xec, 0x0d, 0xde, 0xdc, 0x9b, 0x33, 0xd2, 0x72, 0x93, 0x5c, 0xed, 0x78, 0xc5, 0xb3, 0x24, + 0xaa, 0x31, 0x9f, 0x3b, 0x23, 0x5f, 0x43, 0x2b, 0x62, 0x53, 0x81, 0xba, 0x5c, 0xa6, 0xd5, 0x8f, + 0x16, 0xa7, 0x2b, 0x75, 0xae, 0x39, 0xe4, 0x09, 0xdc, 0x4b, 0xe8, 0xcb, 0x28, 0xc9, 0x92, 0xa2, + 0xb7, 0xb7, 0xa0, 0x97, 0x0c, 0x4d, 0x8e, 0x98, 0x26, 0x37, 0x6e, 0x4f, 0x36, 0x0c, 0xf2, 0x23, + 0xbc, 0x4b, 0xd3, 0x14, 0x59, 0xe0, 0x27, 0x91, 0x94, 0x11, 0x0b, 0x7d, 0x8c, 0xb5, 0x27, 0x59, + 0x8c, 0xc0, 0xe3, 0xc5, 0x62, 0xfb, 0x42, 0xd0, 0x57, 0xa5, 0xe2, 0x03, 0x23, 0x73, 0x6a, 0x54, + 0xdc, 0x42, 0x84, 0x7c, 0x0f, 0x0f, 0x04, 0x26, 0xfc, 0x0a, 0x7d, 0x1a, 0xc7, 0xfe, 0x4c, 0xf0, + 0xc4, 0xa7, 0x39, 0xcd, 0xbe, 0x77, 0x27, 0x75, 0x62, 0x44, 0xf6, 0xe3, 0x78, 0x20, 0x78, 0xa2, + 0xaf, 0xba, 0x5f, 0x42, 0x7b, 0xbe, 0x09, 0x5b, 0x60, 0x8f, 0x5c, 0x6f, 0xe2, 0x7a, 0xfe, 0x64, + 0xff, 0xe9, 0x99, 0xeb, 0x9f, 0x7d, 0x3b, 0x1a, 0xba, 0x07, 0x27, 0x83, 0x13, 0xf7, 0xd0, 0x5a, + 0x21, 0x16, 0xbc, 0xe5, 0xb9, 0xcf, 0xce, 0xdc, 0xd1, 0xd8, 0x1f, 0x9f, 0x9c, 0xba, 0x56, 0xad, + 0x6f, 0xc1, 0x46, 0xf5, 0x80, 0x7d, 0xf5, 0x2a, 0xc5, 0xee, 0x2f, 0x35, 0x68, 0xeb, 0x15, 0xe3, + 0xa1, 0xcc, 0xe2, 0xbc, 0x2b, 0xe5, 0x0c, 0xe7, 0xab, 0xaa, 0xd8, 0x36, 0x9d, 0xd2, 0x71, 0xb9, + 0xc7, 0x9c, 0x71, 0xb9, 0xc7, 0xca, 0xb9, 0xcd, 0x0f, 0xc8, 0x53, 0xb8, 0x7f, 0x2d, 0x2f, 0xb4, + 0x60, 0x39, 0x26, 0xcb, 0xfa, 0xe3, 0x59, 0x15, 0xd3, 0x38, 0x91, 0xdd, 0x5f, 0x6b, 0xb0, 0x51, + 0x3e, 0xd3, 0x83, 0x4b, 0xca, 0x42, 0x24, 0x5f, 0xfd, 0x6b, 0x44, 0x6f, 0xb5, 0x08, 0xe7, 0xc6, + 0xf8, 0x21, 0x80, 0xa2, 0x22, 0x44, 0xe5, 0x47, 0x81, 0xb4, 0x1b, 0xdb, 0xab, 0x3b, 0x0d, 0xaf, + 0x65, 0x4e, 0x4e, 0x02, 0x49, 0x3e, 0x81, 0xa2, 0xe6, 0x81, 0x3f, 0x07, 0x6b, 0x6a, 0x98, 0x55, + 0xdc, 0x8c, 0x4b, 0x74, 0xf7, 0xe7, 0x39, 0x7f, 0x87, 0x66, 0x9d, 0xfe, 0xd7, 0x0a, 0xb9, 0x93, + 0x38, 0xf9, 0x0c, 0x5a, 0x02, 0x69, 0x60, 0xba, 0xb0, 0xb6, 0xb4, 0x0b, 0xeb, 0x39, 0x38, 0x0f, + 0x6f, 0xb8, 0xf2, 0xb4, 0xea, 0x1b, 0xb8, 0xaa, 0xff, 0xdf, 0xae, 0x0e, 0x61, 0xd3, 0x7d, 0x19, + 0x49, 0x85, 0x6c, 0x8a, 0x83, 0x28, 0x56, 0x28, 0xc8, 0x7b, 0xd0, 0xaa, 0x32, 0x6a, 0x5b, 0x0d, + 0x6f, 0xbd, 0x6c, 0x05, 0x79, 0x07, 0x1a, 0x53, 0x9e, 0x31, 0xa5, 0x17, 0x59, 0xc3, 0x33, 0x41, + 0xff, 0x8f, 0x1a, 0x6c, 0x4d, 0x79, 0xb2, 0xb0, 0xe5, 0x7d, 0xd0, 0x4f, 0x79, 0x98, 0x3b, 0x19, + 0xd6, 0x7e, 0xd8, 0x2f, 0x70, 0x21, 0x8f, 0x29, 0x0b, 0x1d, 0x2e, 0xc2, 0x5e, 0x88, 0x4c, 0xfb, + 0xec, 0x99, 0x2b, 0x9a, 0x46, 0xf2, 0xf5, 0x7f, 0xf1, 0x27, 0xd5, 0xc9, 0x6f, 0xf5, 0xb5, 0xa3, + 0x83, 0xc1, 0xe8, 0xf7, 0xfa, 0xfb, 0x47, 0x46, 0xea, 0x20, 0xe6, 0x59, 0xe0, 0x0c, 0xaa, 0xc4, + 0x93, 0xdd, 0x7e, 0xce, 0xf8, 0xb3, 0x04, 0x9c, 0x6b, 0xc0, 0x79, 0x05, 0x38, 0x9f, 0x18, 0xc9, + 0x8b, 0xa6, 0x4e, 0xfb, 0xe9, 0x3f, 0x01, 0x00, 0x00, 0xff, 0xff, 0xdd, 0x64, 0x4b, 0x89, 0xa2, + 0x08, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/genomics/v1/annotations.pb.go b/vendor/google.golang.org/genproto/googleapis/genomics/v1/annotations.pb.go new file mode 100644 index 0000000..8ad87b9 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/genomics/v1/annotations.pb.go @@ -0,0 +1,2633 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/genomics/v1/annotations.proto + +package genomics // import "google.golang.org/genproto/googleapis/genomics/v1" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import empty "github.com/golang/protobuf/ptypes/empty" +import _struct "github.com/golang/protobuf/ptypes/struct" +import wrappers "github.com/golang/protobuf/ptypes/wrappers" +import _ "google.golang.org/genproto/googleapis/api/annotations" +import status "google.golang.org/genproto/googleapis/rpc/status" +import field_mask "google.golang.org/genproto/protobuf/field_mask" + +import ( + context "golang.org/x/net/context" + grpc "google.golang.org/grpc" +) + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// When an [Annotation][google.genomics.v1.Annotation] or +// [AnnotationSet][google.genomics.v1.AnnotationSet] is created, if `type` is +// not specified it will be set to `GENERIC`. +type AnnotationType int32 + +const ( + AnnotationType_ANNOTATION_TYPE_UNSPECIFIED AnnotationType = 0 + // A `GENERIC` annotation type should be used when no other annotation + // type will suffice. This represents an untyped annotation of the reference + // genome. + AnnotationType_GENERIC AnnotationType = 1 + // A `VARIANT` annotation type. + AnnotationType_VARIANT AnnotationType = 2 + // A `GENE` annotation type represents the existence of a gene at the + // associated reference coordinates. The start coordinate is typically the + // gene's transcription start site and the end is typically the end of the + // gene's last exon. + AnnotationType_GENE AnnotationType = 3 + // A `TRANSCRIPT` annotation type represents the assertion that a + // particular region of the reference genome may be transcribed as RNA. + AnnotationType_TRANSCRIPT AnnotationType = 4 +) + +var AnnotationType_name = map[int32]string{ + 0: "ANNOTATION_TYPE_UNSPECIFIED", + 1: "GENERIC", + 2: "VARIANT", + 3: "GENE", + 4: "TRANSCRIPT", +} +var AnnotationType_value = map[string]int32{ + "ANNOTATION_TYPE_UNSPECIFIED": 0, + "GENERIC": 1, + "VARIANT": 2, + "GENE": 3, + "TRANSCRIPT": 4, +} + +func (x AnnotationType) String() string { + return proto.EnumName(AnnotationType_name, int32(x)) +} +func (AnnotationType) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_annotations_257ef94ec8d5dfc4, []int{0} +} + +type VariantAnnotation_Type int32 + +const ( + VariantAnnotation_TYPE_UNSPECIFIED VariantAnnotation_Type = 0 + // `TYPE_OTHER` should be used when no other Type will suffice. + // Further explanation of the variant type may be included in the + // [info][google.genomics.v1.Annotation.info] field. + VariantAnnotation_TYPE_OTHER VariantAnnotation_Type = 1 + // `INSERTION` indicates an insertion. + VariantAnnotation_INSERTION VariantAnnotation_Type = 2 + // `DELETION` indicates a deletion. + VariantAnnotation_DELETION VariantAnnotation_Type = 3 + // `SUBSTITUTION` indicates a block substitution of + // two or more nucleotides. + VariantAnnotation_SUBSTITUTION VariantAnnotation_Type = 4 + // `SNP` indicates a single nucleotide polymorphism. + VariantAnnotation_SNP VariantAnnotation_Type = 5 + // `STRUCTURAL` indicates a large structural variant, + // including chromosomal fusions, inversions, etc. + VariantAnnotation_STRUCTURAL VariantAnnotation_Type = 6 + // `CNV` indicates a variation in copy number. + VariantAnnotation_CNV VariantAnnotation_Type = 7 +) + +var VariantAnnotation_Type_name = map[int32]string{ + 0: "TYPE_UNSPECIFIED", + 1: "TYPE_OTHER", + 2: "INSERTION", + 3: "DELETION", + 4: "SUBSTITUTION", + 5: "SNP", + 6: "STRUCTURAL", + 7: "CNV", +} +var VariantAnnotation_Type_value = map[string]int32{ + "TYPE_UNSPECIFIED": 0, + "TYPE_OTHER": 1, + "INSERTION": 2, + "DELETION": 3, + "SUBSTITUTION": 4, + "SNP": 5, + "STRUCTURAL": 6, + "CNV": 7, +} + +func (x VariantAnnotation_Type) String() string { + return proto.EnumName(VariantAnnotation_Type_name, int32(x)) +} +func (VariantAnnotation_Type) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_annotations_257ef94ec8d5dfc4, []int{2, 0} +} + +type VariantAnnotation_Effect int32 + +const ( + VariantAnnotation_EFFECT_UNSPECIFIED VariantAnnotation_Effect = 0 + // `EFFECT_OTHER` should be used when no other Effect + // will suffice. + VariantAnnotation_EFFECT_OTHER VariantAnnotation_Effect = 1 + // `FRAMESHIFT` indicates a mutation in which the insertion or + // deletion of nucleotides resulted in a frameshift change. + VariantAnnotation_FRAMESHIFT VariantAnnotation_Effect = 2 + // `FRAME_PRESERVING_INDEL` indicates a mutation in which a + // multiple of three nucleotides has been inserted or deleted, resulting + // in no change to the reading frame of the coding sequence. + VariantAnnotation_FRAME_PRESERVING_INDEL VariantAnnotation_Effect = 3 + // `SYNONYMOUS_SNP` indicates a single nucleotide polymorphism + // mutation that results in no amino acid change. + VariantAnnotation_SYNONYMOUS_SNP VariantAnnotation_Effect = 4 + // `NONSYNONYMOUS_SNP` indicates a single nucleotide + // polymorphism mutation that results in an amino acid change. + VariantAnnotation_NONSYNONYMOUS_SNP VariantAnnotation_Effect = 5 + // `STOP_GAIN` indicates a mutation that leads to the creation + // of a stop codon at the variant site. Frameshift mutations creating + // downstream stop codons do not count as `STOP_GAIN`. + VariantAnnotation_STOP_GAIN VariantAnnotation_Effect = 6 + // `STOP_LOSS` indicates a mutation that eliminates a + // stop codon at the variant site. + VariantAnnotation_STOP_LOSS VariantAnnotation_Effect = 7 + // `SPLICE_SITE_DISRUPTION` indicates that this variant is + // found in a splice site for the associated transcript, and alters the + // normal splicing pattern. + VariantAnnotation_SPLICE_SITE_DISRUPTION VariantAnnotation_Effect = 8 +) + +var VariantAnnotation_Effect_name = map[int32]string{ + 0: "EFFECT_UNSPECIFIED", + 1: "EFFECT_OTHER", + 2: "FRAMESHIFT", + 3: "FRAME_PRESERVING_INDEL", + 4: "SYNONYMOUS_SNP", + 5: "NONSYNONYMOUS_SNP", + 6: "STOP_GAIN", + 7: "STOP_LOSS", + 8: "SPLICE_SITE_DISRUPTION", +} +var VariantAnnotation_Effect_value = map[string]int32{ + "EFFECT_UNSPECIFIED": 0, + "EFFECT_OTHER": 1, + "FRAMESHIFT": 2, + "FRAME_PRESERVING_INDEL": 3, + "SYNONYMOUS_SNP": 4, + "NONSYNONYMOUS_SNP": 5, + "STOP_GAIN": 6, + "STOP_LOSS": 7, + "SPLICE_SITE_DISRUPTION": 8, +} + +func (x VariantAnnotation_Effect) String() string { + return proto.EnumName(VariantAnnotation_Effect_name, int32(x)) +} +func (VariantAnnotation_Effect) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_annotations_257ef94ec8d5dfc4, []int{2, 1} +} + +type VariantAnnotation_ClinicalSignificance int32 + +const ( + VariantAnnotation_CLINICAL_SIGNIFICANCE_UNSPECIFIED VariantAnnotation_ClinicalSignificance = 0 + // `OTHER` should be used when no other clinical significance + // value will suffice. + VariantAnnotation_CLINICAL_SIGNIFICANCE_OTHER VariantAnnotation_ClinicalSignificance = 1 + VariantAnnotation_UNCERTAIN VariantAnnotation_ClinicalSignificance = 2 + VariantAnnotation_BENIGN VariantAnnotation_ClinicalSignificance = 3 + VariantAnnotation_LIKELY_BENIGN VariantAnnotation_ClinicalSignificance = 4 + VariantAnnotation_LIKELY_PATHOGENIC VariantAnnotation_ClinicalSignificance = 5 + VariantAnnotation_PATHOGENIC VariantAnnotation_ClinicalSignificance = 6 + VariantAnnotation_DRUG_RESPONSE VariantAnnotation_ClinicalSignificance = 7 + VariantAnnotation_HISTOCOMPATIBILITY VariantAnnotation_ClinicalSignificance = 8 + VariantAnnotation_CONFERS_SENSITIVITY VariantAnnotation_ClinicalSignificance = 9 + VariantAnnotation_RISK_FACTOR VariantAnnotation_ClinicalSignificance = 10 + VariantAnnotation_ASSOCIATION VariantAnnotation_ClinicalSignificance = 11 + VariantAnnotation_PROTECTIVE VariantAnnotation_ClinicalSignificance = 12 + // `MULTIPLE_REPORTED` should be used when multiple clinical + // signficances are reported for a variant. The original clinical + // significance values may be provided in the `info` field. + VariantAnnotation_MULTIPLE_REPORTED VariantAnnotation_ClinicalSignificance = 13 +) + +var VariantAnnotation_ClinicalSignificance_name = map[int32]string{ + 0: "CLINICAL_SIGNIFICANCE_UNSPECIFIED", + 1: "CLINICAL_SIGNIFICANCE_OTHER", + 2: "UNCERTAIN", + 3: "BENIGN", + 4: "LIKELY_BENIGN", + 5: "LIKELY_PATHOGENIC", + 6: "PATHOGENIC", + 7: "DRUG_RESPONSE", + 8: "HISTOCOMPATIBILITY", + 9: "CONFERS_SENSITIVITY", + 10: "RISK_FACTOR", + 11: "ASSOCIATION", + 12: "PROTECTIVE", + 13: "MULTIPLE_REPORTED", +} +var VariantAnnotation_ClinicalSignificance_value = map[string]int32{ + "CLINICAL_SIGNIFICANCE_UNSPECIFIED": 0, + "CLINICAL_SIGNIFICANCE_OTHER": 1, + "UNCERTAIN": 2, + "BENIGN": 3, + "LIKELY_BENIGN": 4, + "LIKELY_PATHOGENIC": 5, + "PATHOGENIC": 6, + "DRUG_RESPONSE": 7, + "HISTOCOMPATIBILITY": 8, + "CONFERS_SENSITIVITY": 9, + "RISK_FACTOR": 10, + "ASSOCIATION": 11, + "PROTECTIVE": 12, + "MULTIPLE_REPORTED": 13, +} + +func (x VariantAnnotation_ClinicalSignificance) String() string { + return proto.EnumName(VariantAnnotation_ClinicalSignificance_name, int32(x)) +} +func (VariantAnnotation_ClinicalSignificance) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_annotations_257ef94ec8d5dfc4, []int{2, 2} +} + +// An annotation set is a logical grouping of annotations that share consistent +// type information and provenance. Examples of annotation sets include 'all +// genes from refseq', and 'all variant annotations from ClinVar'. +type AnnotationSet struct { + // The server-generated annotation set ID, unique across all annotation sets. + Id string `protobuf:"bytes,1,opt,name=id,proto3" json:"id,omitempty"` + // The dataset to which this annotation set belongs. + DatasetId string `protobuf:"bytes,2,opt,name=dataset_id,json=datasetId,proto3" json:"dataset_id,omitempty"` + // The ID of the reference set that defines the coordinate space for this + // set's annotations. + ReferenceSetId string `protobuf:"bytes,3,opt,name=reference_set_id,json=referenceSetId,proto3" json:"reference_set_id,omitempty"` + // The display name for this annotation set. + Name string `protobuf:"bytes,4,opt,name=name,proto3" json:"name,omitempty"` + // The source URI describing the file from which this annotation set was + // generated, if any. + SourceUri string `protobuf:"bytes,5,opt,name=source_uri,json=sourceUri,proto3" json:"source_uri,omitempty"` + // The type of annotations contained within this set. + Type AnnotationType `protobuf:"varint,6,opt,name=type,proto3,enum=google.genomics.v1.AnnotationType" json:"type,omitempty"` + // A map of additional read alignment information. This must be of the form + // map (string key mapping to a list of string values). + Info map[string]*_struct.ListValue `protobuf:"bytes,17,rep,name=info,proto3" json:"info,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *AnnotationSet) Reset() { *m = AnnotationSet{} } +func (m *AnnotationSet) String() string { return proto.CompactTextString(m) } +func (*AnnotationSet) ProtoMessage() {} +func (*AnnotationSet) Descriptor() ([]byte, []int) { + return fileDescriptor_annotations_257ef94ec8d5dfc4, []int{0} +} +func (m *AnnotationSet) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_AnnotationSet.Unmarshal(m, b) +} +func (m *AnnotationSet) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_AnnotationSet.Marshal(b, m, deterministic) +} +func (dst *AnnotationSet) XXX_Merge(src proto.Message) { + xxx_messageInfo_AnnotationSet.Merge(dst, src) +} +func (m *AnnotationSet) XXX_Size() int { + return xxx_messageInfo_AnnotationSet.Size(m) +} +func (m *AnnotationSet) XXX_DiscardUnknown() { + xxx_messageInfo_AnnotationSet.DiscardUnknown(m) +} + +var xxx_messageInfo_AnnotationSet proto.InternalMessageInfo + +func (m *AnnotationSet) GetId() string { + if m != nil { + return m.Id + } + return "" +} + +func (m *AnnotationSet) GetDatasetId() string { + if m != nil { + return m.DatasetId + } + return "" +} + +func (m *AnnotationSet) GetReferenceSetId() string { + if m != nil { + return m.ReferenceSetId + } + return "" +} + +func (m *AnnotationSet) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *AnnotationSet) GetSourceUri() string { + if m != nil { + return m.SourceUri + } + return "" +} + +func (m *AnnotationSet) GetType() AnnotationType { + if m != nil { + return m.Type + } + return AnnotationType_ANNOTATION_TYPE_UNSPECIFIED +} + +func (m *AnnotationSet) GetInfo() map[string]*_struct.ListValue { + if m != nil { + return m.Info + } + return nil +} + +// An annotation describes a region of reference genome. The value of an +// annotation may be one of several canonical types, supplemented by arbitrary +// info tags. An annotation is not inherently associated with a specific +// sample or individual (though a client could choose to use annotations in +// this way). Example canonical annotation types are `GENE` and +// `VARIANT`. +type Annotation struct { + // The server-generated annotation ID, unique across all annotations. + Id string `protobuf:"bytes,1,opt,name=id,proto3" json:"id,omitempty"` + // The annotation set to which this annotation belongs. + AnnotationSetId string `protobuf:"bytes,2,opt,name=annotation_set_id,json=annotationSetId,proto3" json:"annotation_set_id,omitempty"` + // The display name of this annotation. + Name string `protobuf:"bytes,3,opt,name=name,proto3" json:"name,omitempty"` + // The ID of the Google Genomics reference associated with this range. + ReferenceId string `protobuf:"bytes,4,opt,name=reference_id,json=referenceId,proto3" json:"reference_id,omitempty"` + // The display name corresponding to the reference specified by + // `referenceId`, for example `chr1`, `1`, or `chrX`. + ReferenceName string `protobuf:"bytes,5,opt,name=reference_name,json=referenceName,proto3" json:"reference_name,omitempty"` + // The start position of the range on the reference, 0-based inclusive. + Start int64 `protobuf:"varint,6,opt,name=start,proto3" json:"start,omitempty"` + // The end position of the range on the reference, 0-based exclusive. + End int64 `protobuf:"varint,7,opt,name=end,proto3" json:"end,omitempty"` + // Whether this range refers to the reverse strand, as opposed to the forward + // strand. Note that regardless of this field, the start/end position of the + // range always refer to the forward strand. + ReverseStrand bool `protobuf:"varint,8,opt,name=reverse_strand,json=reverseStrand,proto3" json:"reverse_strand,omitempty"` + // The data type for this annotation. Must match the containing annotation + // set's type. + Type AnnotationType `protobuf:"varint,9,opt,name=type,proto3,enum=google.genomics.v1.AnnotationType" json:"type,omitempty"` + // Types that are valid to be assigned to Value: + // *Annotation_Variant + // *Annotation_Transcript + Value isAnnotation_Value `protobuf_oneof:"value"` + // A map of additional read alignment information. This must be of the form + // map (string key mapping to a list of string values). + Info map[string]*_struct.ListValue `protobuf:"bytes,12,rep,name=info,proto3" json:"info,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Annotation) Reset() { *m = Annotation{} } +func (m *Annotation) String() string { return proto.CompactTextString(m) } +func (*Annotation) ProtoMessage() {} +func (*Annotation) Descriptor() ([]byte, []int) { + return fileDescriptor_annotations_257ef94ec8d5dfc4, []int{1} +} +func (m *Annotation) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Annotation.Unmarshal(m, b) +} +func (m *Annotation) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Annotation.Marshal(b, m, deterministic) +} +func (dst *Annotation) XXX_Merge(src proto.Message) { + xxx_messageInfo_Annotation.Merge(dst, src) +} +func (m *Annotation) XXX_Size() int { + return xxx_messageInfo_Annotation.Size(m) +} +func (m *Annotation) XXX_DiscardUnknown() { + xxx_messageInfo_Annotation.DiscardUnknown(m) +} + +var xxx_messageInfo_Annotation proto.InternalMessageInfo + +func (m *Annotation) GetId() string { + if m != nil { + return m.Id + } + return "" +} + +func (m *Annotation) GetAnnotationSetId() string { + if m != nil { + return m.AnnotationSetId + } + return "" +} + +func (m *Annotation) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *Annotation) GetReferenceId() string { + if m != nil { + return m.ReferenceId + } + return "" +} + +func (m *Annotation) GetReferenceName() string { + if m != nil { + return m.ReferenceName + } + return "" +} + +func (m *Annotation) GetStart() int64 { + if m != nil { + return m.Start + } + return 0 +} + +func (m *Annotation) GetEnd() int64 { + if m != nil { + return m.End + } + return 0 +} + +func (m *Annotation) GetReverseStrand() bool { + if m != nil { + return m.ReverseStrand + } + return false +} + +func (m *Annotation) GetType() AnnotationType { + if m != nil { + return m.Type + } + return AnnotationType_ANNOTATION_TYPE_UNSPECIFIED +} + +type isAnnotation_Value interface { + isAnnotation_Value() +} + +type Annotation_Variant struct { + Variant *VariantAnnotation `protobuf:"bytes,10,opt,name=variant,proto3,oneof"` +} + +type Annotation_Transcript struct { + Transcript *Transcript `protobuf:"bytes,11,opt,name=transcript,proto3,oneof"` +} + +func (*Annotation_Variant) isAnnotation_Value() {} + +func (*Annotation_Transcript) isAnnotation_Value() {} + +func (m *Annotation) GetValue() isAnnotation_Value { + if m != nil { + return m.Value + } + return nil +} + +func (m *Annotation) GetVariant() *VariantAnnotation { + if x, ok := m.GetValue().(*Annotation_Variant); ok { + return x.Variant + } + return nil +} + +func (m *Annotation) GetTranscript() *Transcript { + if x, ok := m.GetValue().(*Annotation_Transcript); ok { + return x.Transcript + } + return nil +} + +func (m *Annotation) GetInfo() map[string]*_struct.ListValue { + if m != nil { + return m.Info + } + return nil +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*Annotation) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _Annotation_OneofMarshaler, _Annotation_OneofUnmarshaler, _Annotation_OneofSizer, []interface{}{ + (*Annotation_Variant)(nil), + (*Annotation_Transcript)(nil), + } +} + +func _Annotation_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*Annotation) + // value + switch x := m.Value.(type) { + case *Annotation_Variant: + b.EncodeVarint(10<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.Variant); err != nil { + return err + } + case *Annotation_Transcript: + b.EncodeVarint(11<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.Transcript); err != nil { + return err + } + case nil: + default: + return fmt.Errorf("Annotation.Value has unexpected type %T", x) + } + return nil +} + +func _Annotation_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*Annotation) + switch tag { + case 10: // value.variant + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(VariantAnnotation) + err := b.DecodeMessage(msg) + m.Value = &Annotation_Variant{msg} + return true, err + case 11: // value.transcript + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(Transcript) + err := b.DecodeMessage(msg) + m.Value = &Annotation_Transcript{msg} + return true, err + default: + return false, nil + } +} + +func _Annotation_OneofSizer(msg proto.Message) (n int) { + m := msg.(*Annotation) + // value + switch x := m.Value.(type) { + case *Annotation_Variant: + s := proto.Size(x.Variant) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *Annotation_Transcript: + s := proto.Size(x.Transcript) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +type VariantAnnotation struct { + // Type has been adapted from ClinVar's list of variant types. + Type VariantAnnotation_Type `protobuf:"varint,1,opt,name=type,proto3,enum=google.genomics.v1.VariantAnnotation_Type" json:"type,omitempty"` + // Effect of the variant on the coding sequence. + Effect VariantAnnotation_Effect `protobuf:"varint,2,opt,name=effect,proto3,enum=google.genomics.v1.VariantAnnotation_Effect" json:"effect,omitempty"` + // The alternate allele for this variant. If multiple alternate alleles + // exist at this location, create a separate variant for each one, as they + // may represent distinct conditions. + AlternateBases string `protobuf:"bytes,3,opt,name=alternate_bases,json=alternateBases,proto3" json:"alternate_bases,omitempty"` + // Google annotation ID of the gene affected by this variant. This should + // be provided when the variant is created. + GeneId string `protobuf:"bytes,4,opt,name=gene_id,json=geneId,proto3" json:"gene_id,omitempty"` + // Google annotation IDs of the transcripts affected by this variant. These + // should be provided when the variant is created. + TranscriptIds []string `protobuf:"bytes,5,rep,name=transcript_ids,json=transcriptIds,proto3" json:"transcript_ids,omitempty"` + // The set of conditions associated with this variant. + // A condition describes the way a variant influences human health. + Conditions []*VariantAnnotation_ClinicalCondition `protobuf:"bytes,6,rep,name=conditions,proto3" json:"conditions,omitempty"` + // Describes the clinical significance of a variant. + // It is adapted from the ClinVar controlled vocabulary for clinical + // significance described at: + // http://www.ncbi.nlm.nih.gov/clinvar/docs/clinsig/ + ClinicalSignificance VariantAnnotation_ClinicalSignificance `protobuf:"varint,7,opt,name=clinical_significance,json=clinicalSignificance,proto3,enum=google.genomics.v1.VariantAnnotation_ClinicalSignificance" json:"clinical_significance,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *VariantAnnotation) Reset() { *m = VariantAnnotation{} } +func (m *VariantAnnotation) String() string { return proto.CompactTextString(m) } +func (*VariantAnnotation) ProtoMessage() {} +func (*VariantAnnotation) Descriptor() ([]byte, []int) { + return fileDescriptor_annotations_257ef94ec8d5dfc4, []int{2} +} +func (m *VariantAnnotation) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_VariantAnnotation.Unmarshal(m, b) +} +func (m *VariantAnnotation) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_VariantAnnotation.Marshal(b, m, deterministic) +} +func (dst *VariantAnnotation) XXX_Merge(src proto.Message) { + xxx_messageInfo_VariantAnnotation.Merge(dst, src) +} +func (m *VariantAnnotation) XXX_Size() int { + return xxx_messageInfo_VariantAnnotation.Size(m) +} +func (m *VariantAnnotation) XXX_DiscardUnknown() { + xxx_messageInfo_VariantAnnotation.DiscardUnknown(m) +} + +var xxx_messageInfo_VariantAnnotation proto.InternalMessageInfo + +func (m *VariantAnnotation) GetType() VariantAnnotation_Type { + if m != nil { + return m.Type + } + return VariantAnnotation_TYPE_UNSPECIFIED +} + +func (m *VariantAnnotation) GetEffect() VariantAnnotation_Effect { + if m != nil { + return m.Effect + } + return VariantAnnotation_EFFECT_UNSPECIFIED +} + +func (m *VariantAnnotation) GetAlternateBases() string { + if m != nil { + return m.AlternateBases + } + return "" +} + +func (m *VariantAnnotation) GetGeneId() string { + if m != nil { + return m.GeneId + } + return "" +} + +func (m *VariantAnnotation) GetTranscriptIds() []string { + if m != nil { + return m.TranscriptIds + } + return nil +} + +func (m *VariantAnnotation) GetConditions() []*VariantAnnotation_ClinicalCondition { + if m != nil { + return m.Conditions + } + return nil +} + +func (m *VariantAnnotation) GetClinicalSignificance() VariantAnnotation_ClinicalSignificance { + if m != nil { + return m.ClinicalSignificance + } + return VariantAnnotation_CLINICAL_SIGNIFICANCE_UNSPECIFIED +} + +type VariantAnnotation_ClinicalCondition struct { + // A set of names for the condition. + Names []string `protobuf:"bytes,1,rep,name=names,proto3" json:"names,omitempty"` + // The set of external IDs for this condition. + ExternalIds []*ExternalId `protobuf:"bytes,2,rep,name=external_ids,json=externalIds,proto3" json:"external_ids,omitempty"` + // The MedGen concept id associated with this gene. + // Search for these IDs at http://www.ncbi.nlm.nih.gov/medgen/ + ConceptId string `protobuf:"bytes,3,opt,name=concept_id,json=conceptId,proto3" json:"concept_id,omitempty"` + // The OMIM id for this condition. + // Search for these IDs at http://omim.org/ + OmimId string `protobuf:"bytes,4,opt,name=omim_id,json=omimId,proto3" json:"omim_id,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *VariantAnnotation_ClinicalCondition) Reset() { *m = VariantAnnotation_ClinicalCondition{} } +func (m *VariantAnnotation_ClinicalCondition) String() string { return proto.CompactTextString(m) } +func (*VariantAnnotation_ClinicalCondition) ProtoMessage() {} +func (*VariantAnnotation_ClinicalCondition) Descriptor() ([]byte, []int) { + return fileDescriptor_annotations_257ef94ec8d5dfc4, []int{2, 0} +} +func (m *VariantAnnotation_ClinicalCondition) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_VariantAnnotation_ClinicalCondition.Unmarshal(m, b) +} +func (m *VariantAnnotation_ClinicalCondition) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_VariantAnnotation_ClinicalCondition.Marshal(b, m, deterministic) +} +func (dst *VariantAnnotation_ClinicalCondition) XXX_Merge(src proto.Message) { + xxx_messageInfo_VariantAnnotation_ClinicalCondition.Merge(dst, src) +} +func (m *VariantAnnotation_ClinicalCondition) XXX_Size() int { + return xxx_messageInfo_VariantAnnotation_ClinicalCondition.Size(m) +} +func (m *VariantAnnotation_ClinicalCondition) XXX_DiscardUnknown() { + xxx_messageInfo_VariantAnnotation_ClinicalCondition.DiscardUnknown(m) +} + +var xxx_messageInfo_VariantAnnotation_ClinicalCondition proto.InternalMessageInfo + +func (m *VariantAnnotation_ClinicalCondition) GetNames() []string { + if m != nil { + return m.Names + } + return nil +} + +func (m *VariantAnnotation_ClinicalCondition) GetExternalIds() []*ExternalId { + if m != nil { + return m.ExternalIds + } + return nil +} + +func (m *VariantAnnotation_ClinicalCondition) GetConceptId() string { + if m != nil { + return m.ConceptId + } + return "" +} + +func (m *VariantAnnotation_ClinicalCondition) GetOmimId() string { + if m != nil { + return m.OmimId + } + return "" +} + +// A transcript represents the assertion that a particular region of the +// reference genome may be transcribed as RNA. +type Transcript struct { + // The annotation ID of the gene from which this transcript is transcribed. + GeneId string `protobuf:"bytes,1,opt,name=gene_id,json=geneId,proto3" json:"gene_id,omitempty"` + // The exons that compose + // this transcript. This field should be unset for genomes where transcript + // splicing does not occur, for example prokaryotes. + // + // Introns are regions of the transcript that are not included in the + // spliced RNA product. Though not explicitly modeled here, intron ranges can + // be deduced; all regions of this transcript that are not exons are introns. + // + // Exonic sequences do not necessarily code for a translational product + // (amino acids). Only the regions of exons bounded by the + // [codingSequence][google.genomics.v1.Transcript.coding_sequence] correspond + // to coding DNA sequence. + // + // Exons are ordered by start position and may not overlap. + Exons []*Transcript_Exon `protobuf:"bytes,2,rep,name=exons,proto3" json:"exons,omitempty"` + // The range of the coding sequence for this transcript, if any. To determine + // the exact ranges of coding sequence, intersect this range with those of the + // [exons][google.genomics.v1.Transcript.exons], if any. If there are any + // [exons][google.genomics.v1.Transcript.exons], the + // [codingSequence][google.genomics.v1.Transcript.coding_sequence] must start + // and end within them. + // + // Note that in some cases, the reference genome will not exactly match the + // observed mRNA transcript e.g. due to variance in the source genome from + // reference. In these cases, + // [exon.frame][google.genomics.v1.Transcript.Exon.frame] will not necessarily + // match the expected reference reading frame and coding exon reference bases + // cannot necessarily be concatenated to produce the original transcript mRNA. + CodingSequence *Transcript_CodingSequence `protobuf:"bytes,3,opt,name=coding_sequence,json=codingSequence,proto3" json:"coding_sequence,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Transcript) Reset() { *m = Transcript{} } +func (m *Transcript) String() string { return proto.CompactTextString(m) } +func (*Transcript) ProtoMessage() {} +func (*Transcript) Descriptor() ([]byte, []int) { + return fileDescriptor_annotations_257ef94ec8d5dfc4, []int{3} +} +func (m *Transcript) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Transcript.Unmarshal(m, b) +} +func (m *Transcript) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Transcript.Marshal(b, m, deterministic) +} +func (dst *Transcript) XXX_Merge(src proto.Message) { + xxx_messageInfo_Transcript.Merge(dst, src) +} +func (m *Transcript) XXX_Size() int { + return xxx_messageInfo_Transcript.Size(m) +} +func (m *Transcript) XXX_DiscardUnknown() { + xxx_messageInfo_Transcript.DiscardUnknown(m) +} + +var xxx_messageInfo_Transcript proto.InternalMessageInfo + +func (m *Transcript) GetGeneId() string { + if m != nil { + return m.GeneId + } + return "" +} + +func (m *Transcript) GetExons() []*Transcript_Exon { + if m != nil { + return m.Exons + } + return nil +} + +func (m *Transcript) GetCodingSequence() *Transcript_CodingSequence { + if m != nil { + return m.CodingSequence + } + return nil +} + +type Transcript_Exon struct { + // The start position of the exon on this annotation's reference sequence, + // 0-based inclusive. Note that this is relative to the reference start, and + // **not** the containing annotation start. + Start int64 `protobuf:"varint,1,opt,name=start,proto3" json:"start,omitempty"` + // The end position of the exon on this annotation's reference sequence, + // 0-based exclusive. Note that this is relative to the reference start, and + // *not* the containing annotation start. + End int64 `protobuf:"varint,2,opt,name=end,proto3" json:"end,omitempty"` + // The frame of this exon. Contains a value of 0, 1, or 2, which indicates + // the offset of the first coding base of the exon within the reading frame + // of the coding DNA sequence, if any. This field is dependent on the + // strandedness of this annotation (see + // [Annotation.reverse_strand][google.genomics.v1.Annotation.reverse_strand]). + // For forward stranded annotations, this offset is relative to the + // [exon.start][google.genomics.v1.Transcript.Exon.start]. For reverse + // strand annotations, this offset is relative to the + // [exon.end][google.genomics.v1.Transcript.Exon.end] `- 1`. + // + // Unset if this exon does not intersect the coding sequence. Upon creation + // of a transcript, the frame must be populated for all or none of the + // coding exons. + Frame *wrappers.Int32Value `protobuf:"bytes,3,opt,name=frame,proto3" json:"frame,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Transcript_Exon) Reset() { *m = Transcript_Exon{} } +func (m *Transcript_Exon) String() string { return proto.CompactTextString(m) } +func (*Transcript_Exon) ProtoMessage() {} +func (*Transcript_Exon) Descriptor() ([]byte, []int) { + return fileDescriptor_annotations_257ef94ec8d5dfc4, []int{3, 0} +} +func (m *Transcript_Exon) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Transcript_Exon.Unmarshal(m, b) +} +func (m *Transcript_Exon) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Transcript_Exon.Marshal(b, m, deterministic) +} +func (dst *Transcript_Exon) XXX_Merge(src proto.Message) { + xxx_messageInfo_Transcript_Exon.Merge(dst, src) +} +func (m *Transcript_Exon) XXX_Size() int { + return xxx_messageInfo_Transcript_Exon.Size(m) +} +func (m *Transcript_Exon) XXX_DiscardUnknown() { + xxx_messageInfo_Transcript_Exon.DiscardUnknown(m) +} + +var xxx_messageInfo_Transcript_Exon proto.InternalMessageInfo + +func (m *Transcript_Exon) GetStart() int64 { + if m != nil { + return m.Start + } + return 0 +} + +func (m *Transcript_Exon) GetEnd() int64 { + if m != nil { + return m.End + } + return 0 +} + +func (m *Transcript_Exon) GetFrame() *wrappers.Int32Value { + if m != nil { + return m.Frame + } + return nil +} + +type Transcript_CodingSequence struct { + // The start of the coding sequence on this annotation's reference sequence, + // 0-based inclusive. Note that this position is relative to the reference + // start, and *not* the containing annotation start. + Start int64 `protobuf:"varint,1,opt,name=start,proto3" json:"start,omitempty"` + // The end of the coding sequence on this annotation's reference sequence, + // 0-based exclusive. Note that this position is relative to the reference + // start, and *not* the containing annotation start. + End int64 `protobuf:"varint,2,opt,name=end,proto3" json:"end,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Transcript_CodingSequence) Reset() { *m = Transcript_CodingSequence{} } +func (m *Transcript_CodingSequence) String() string { return proto.CompactTextString(m) } +func (*Transcript_CodingSequence) ProtoMessage() {} +func (*Transcript_CodingSequence) Descriptor() ([]byte, []int) { + return fileDescriptor_annotations_257ef94ec8d5dfc4, []int{3, 1} +} +func (m *Transcript_CodingSequence) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Transcript_CodingSequence.Unmarshal(m, b) +} +func (m *Transcript_CodingSequence) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Transcript_CodingSequence.Marshal(b, m, deterministic) +} +func (dst *Transcript_CodingSequence) XXX_Merge(src proto.Message) { + xxx_messageInfo_Transcript_CodingSequence.Merge(dst, src) +} +func (m *Transcript_CodingSequence) XXX_Size() int { + return xxx_messageInfo_Transcript_CodingSequence.Size(m) +} +func (m *Transcript_CodingSequence) XXX_DiscardUnknown() { + xxx_messageInfo_Transcript_CodingSequence.DiscardUnknown(m) +} + +var xxx_messageInfo_Transcript_CodingSequence proto.InternalMessageInfo + +func (m *Transcript_CodingSequence) GetStart() int64 { + if m != nil { + return m.Start + } + return 0 +} + +func (m *Transcript_CodingSequence) GetEnd() int64 { + if m != nil { + return m.End + } + return 0 +} + +type ExternalId struct { + // The name of the source of this data. + SourceName string `protobuf:"bytes,1,opt,name=source_name,json=sourceName,proto3" json:"source_name,omitempty"` + // The id used by the source of this data. + Id string `protobuf:"bytes,2,opt,name=id,proto3" json:"id,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ExternalId) Reset() { *m = ExternalId{} } +func (m *ExternalId) String() string { return proto.CompactTextString(m) } +func (*ExternalId) ProtoMessage() {} +func (*ExternalId) Descriptor() ([]byte, []int) { + return fileDescriptor_annotations_257ef94ec8d5dfc4, []int{4} +} +func (m *ExternalId) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ExternalId.Unmarshal(m, b) +} +func (m *ExternalId) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ExternalId.Marshal(b, m, deterministic) +} +func (dst *ExternalId) XXX_Merge(src proto.Message) { + xxx_messageInfo_ExternalId.Merge(dst, src) +} +func (m *ExternalId) XXX_Size() int { + return xxx_messageInfo_ExternalId.Size(m) +} +func (m *ExternalId) XXX_DiscardUnknown() { + xxx_messageInfo_ExternalId.DiscardUnknown(m) +} + +var xxx_messageInfo_ExternalId proto.InternalMessageInfo + +func (m *ExternalId) GetSourceName() string { + if m != nil { + return m.SourceName + } + return "" +} + +func (m *ExternalId) GetId() string { + if m != nil { + return m.Id + } + return "" +} + +type CreateAnnotationSetRequest struct { + // The annotation set to create. + AnnotationSet *AnnotationSet `protobuf:"bytes,1,opt,name=annotation_set,json=annotationSet,proto3" json:"annotation_set,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *CreateAnnotationSetRequest) Reset() { *m = CreateAnnotationSetRequest{} } +func (m *CreateAnnotationSetRequest) String() string { return proto.CompactTextString(m) } +func (*CreateAnnotationSetRequest) ProtoMessage() {} +func (*CreateAnnotationSetRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_annotations_257ef94ec8d5dfc4, []int{5} +} +func (m *CreateAnnotationSetRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_CreateAnnotationSetRequest.Unmarshal(m, b) +} +func (m *CreateAnnotationSetRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_CreateAnnotationSetRequest.Marshal(b, m, deterministic) +} +func (dst *CreateAnnotationSetRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_CreateAnnotationSetRequest.Merge(dst, src) +} +func (m *CreateAnnotationSetRequest) XXX_Size() int { + return xxx_messageInfo_CreateAnnotationSetRequest.Size(m) +} +func (m *CreateAnnotationSetRequest) XXX_DiscardUnknown() { + xxx_messageInfo_CreateAnnotationSetRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_CreateAnnotationSetRequest proto.InternalMessageInfo + +func (m *CreateAnnotationSetRequest) GetAnnotationSet() *AnnotationSet { + if m != nil { + return m.AnnotationSet + } + return nil +} + +type GetAnnotationSetRequest struct { + // The ID of the annotation set to be retrieved. + AnnotationSetId string `protobuf:"bytes,1,opt,name=annotation_set_id,json=annotationSetId,proto3" json:"annotation_set_id,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GetAnnotationSetRequest) Reset() { *m = GetAnnotationSetRequest{} } +func (m *GetAnnotationSetRequest) String() string { return proto.CompactTextString(m) } +func (*GetAnnotationSetRequest) ProtoMessage() {} +func (*GetAnnotationSetRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_annotations_257ef94ec8d5dfc4, []int{6} +} +func (m *GetAnnotationSetRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GetAnnotationSetRequest.Unmarshal(m, b) +} +func (m *GetAnnotationSetRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GetAnnotationSetRequest.Marshal(b, m, deterministic) +} +func (dst *GetAnnotationSetRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_GetAnnotationSetRequest.Merge(dst, src) +} +func (m *GetAnnotationSetRequest) XXX_Size() int { + return xxx_messageInfo_GetAnnotationSetRequest.Size(m) +} +func (m *GetAnnotationSetRequest) XXX_DiscardUnknown() { + xxx_messageInfo_GetAnnotationSetRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_GetAnnotationSetRequest proto.InternalMessageInfo + +func (m *GetAnnotationSetRequest) GetAnnotationSetId() string { + if m != nil { + return m.AnnotationSetId + } + return "" +} + +type UpdateAnnotationSetRequest struct { + // The ID of the annotation set to be updated. + AnnotationSetId string `protobuf:"bytes,1,opt,name=annotation_set_id,json=annotationSetId,proto3" json:"annotation_set_id,omitempty"` + // The new annotation set. + AnnotationSet *AnnotationSet `protobuf:"bytes,2,opt,name=annotation_set,json=annotationSet,proto3" json:"annotation_set,omitempty"` + // An optional mask specifying which fields to update. Mutable fields are + // [name][google.genomics.v1.AnnotationSet.name], + // [source_uri][google.genomics.v1.AnnotationSet.source_uri], and + // [info][google.genomics.v1.AnnotationSet.info]. If unspecified, all + // mutable fields will be updated. + UpdateMask *field_mask.FieldMask `protobuf:"bytes,3,opt,name=update_mask,json=updateMask,proto3" json:"update_mask,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *UpdateAnnotationSetRequest) Reset() { *m = UpdateAnnotationSetRequest{} } +func (m *UpdateAnnotationSetRequest) String() string { return proto.CompactTextString(m) } +func (*UpdateAnnotationSetRequest) ProtoMessage() {} +func (*UpdateAnnotationSetRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_annotations_257ef94ec8d5dfc4, []int{7} +} +func (m *UpdateAnnotationSetRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_UpdateAnnotationSetRequest.Unmarshal(m, b) +} +func (m *UpdateAnnotationSetRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_UpdateAnnotationSetRequest.Marshal(b, m, deterministic) +} +func (dst *UpdateAnnotationSetRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_UpdateAnnotationSetRequest.Merge(dst, src) +} +func (m *UpdateAnnotationSetRequest) XXX_Size() int { + return xxx_messageInfo_UpdateAnnotationSetRequest.Size(m) +} +func (m *UpdateAnnotationSetRequest) XXX_DiscardUnknown() { + xxx_messageInfo_UpdateAnnotationSetRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_UpdateAnnotationSetRequest proto.InternalMessageInfo + +func (m *UpdateAnnotationSetRequest) GetAnnotationSetId() string { + if m != nil { + return m.AnnotationSetId + } + return "" +} + +func (m *UpdateAnnotationSetRequest) GetAnnotationSet() *AnnotationSet { + if m != nil { + return m.AnnotationSet + } + return nil +} + +func (m *UpdateAnnotationSetRequest) GetUpdateMask() *field_mask.FieldMask { + if m != nil { + return m.UpdateMask + } + return nil +} + +type DeleteAnnotationSetRequest struct { + // The ID of the annotation set to be deleted. + AnnotationSetId string `protobuf:"bytes,1,opt,name=annotation_set_id,json=annotationSetId,proto3" json:"annotation_set_id,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *DeleteAnnotationSetRequest) Reset() { *m = DeleteAnnotationSetRequest{} } +func (m *DeleteAnnotationSetRequest) String() string { return proto.CompactTextString(m) } +func (*DeleteAnnotationSetRequest) ProtoMessage() {} +func (*DeleteAnnotationSetRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_annotations_257ef94ec8d5dfc4, []int{8} +} +func (m *DeleteAnnotationSetRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_DeleteAnnotationSetRequest.Unmarshal(m, b) +} +func (m *DeleteAnnotationSetRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_DeleteAnnotationSetRequest.Marshal(b, m, deterministic) +} +func (dst *DeleteAnnotationSetRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_DeleteAnnotationSetRequest.Merge(dst, src) +} +func (m *DeleteAnnotationSetRequest) XXX_Size() int { + return xxx_messageInfo_DeleteAnnotationSetRequest.Size(m) +} +func (m *DeleteAnnotationSetRequest) XXX_DiscardUnknown() { + xxx_messageInfo_DeleteAnnotationSetRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_DeleteAnnotationSetRequest proto.InternalMessageInfo + +func (m *DeleteAnnotationSetRequest) GetAnnotationSetId() string { + if m != nil { + return m.AnnotationSetId + } + return "" +} + +type SearchAnnotationSetsRequest struct { + // Required. The dataset IDs to search within. Caller must have `READ` access + // to these datasets. + DatasetIds []string `protobuf:"bytes,1,rep,name=dataset_ids,json=datasetIds,proto3" json:"dataset_ids,omitempty"` + // If specified, only annotation sets associated with the given reference set + // are returned. + ReferenceSetId string `protobuf:"bytes,2,opt,name=reference_set_id,json=referenceSetId,proto3" json:"reference_set_id,omitempty"` + // Only return annotations sets for which a substring of the name matches this + // string (case insensitive). + Name string `protobuf:"bytes,3,opt,name=name,proto3" json:"name,omitempty"` + // If specified, only annotation sets that have any of these types are + // returned. + Types []AnnotationType `protobuf:"varint,4,rep,packed,name=types,proto3,enum=google.genomics.v1.AnnotationType" json:"types,omitempty"` + // The continuation token, which is used to page through large result sets. + // To get the next page of results, set this parameter to the value of + // `nextPageToken` from the previous response. + PageToken string `protobuf:"bytes,5,opt,name=page_token,json=pageToken,proto3" json:"page_token,omitempty"` + // The maximum number of results to return in a single page. If unspecified, + // defaults to 128. The maximum value is 1024. + PageSize int32 `protobuf:"varint,6,opt,name=page_size,json=pageSize,proto3" json:"page_size,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *SearchAnnotationSetsRequest) Reset() { *m = SearchAnnotationSetsRequest{} } +func (m *SearchAnnotationSetsRequest) String() string { return proto.CompactTextString(m) } +func (*SearchAnnotationSetsRequest) ProtoMessage() {} +func (*SearchAnnotationSetsRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_annotations_257ef94ec8d5dfc4, []int{9} +} +func (m *SearchAnnotationSetsRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_SearchAnnotationSetsRequest.Unmarshal(m, b) +} +func (m *SearchAnnotationSetsRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_SearchAnnotationSetsRequest.Marshal(b, m, deterministic) +} +func (dst *SearchAnnotationSetsRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_SearchAnnotationSetsRequest.Merge(dst, src) +} +func (m *SearchAnnotationSetsRequest) XXX_Size() int { + return xxx_messageInfo_SearchAnnotationSetsRequest.Size(m) +} +func (m *SearchAnnotationSetsRequest) XXX_DiscardUnknown() { + xxx_messageInfo_SearchAnnotationSetsRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_SearchAnnotationSetsRequest proto.InternalMessageInfo + +func (m *SearchAnnotationSetsRequest) GetDatasetIds() []string { + if m != nil { + return m.DatasetIds + } + return nil +} + +func (m *SearchAnnotationSetsRequest) GetReferenceSetId() string { + if m != nil { + return m.ReferenceSetId + } + return "" +} + +func (m *SearchAnnotationSetsRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *SearchAnnotationSetsRequest) GetTypes() []AnnotationType { + if m != nil { + return m.Types + } + return nil +} + +func (m *SearchAnnotationSetsRequest) GetPageToken() string { + if m != nil { + return m.PageToken + } + return "" +} + +func (m *SearchAnnotationSetsRequest) GetPageSize() int32 { + if m != nil { + return m.PageSize + } + return 0 +} + +type SearchAnnotationSetsResponse struct { + // The matching annotation sets. + AnnotationSets []*AnnotationSet `protobuf:"bytes,1,rep,name=annotation_sets,json=annotationSets,proto3" json:"annotation_sets,omitempty"` + // The continuation token, which is used to page through large result sets. + // Provide this value in a subsequent request to return the next page of + // results. This field will be empty if there aren't any additional results. + NextPageToken string `protobuf:"bytes,2,opt,name=next_page_token,json=nextPageToken,proto3" json:"next_page_token,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *SearchAnnotationSetsResponse) Reset() { *m = SearchAnnotationSetsResponse{} } +func (m *SearchAnnotationSetsResponse) String() string { return proto.CompactTextString(m) } +func (*SearchAnnotationSetsResponse) ProtoMessage() {} +func (*SearchAnnotationSetsResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_annotations_257ef94ec8d5dfc4, []int{10} +} +func (m *SearchAnnotationSetsResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_SearchAnnotationSetsResponse.Unmarshal(m, b) +} +func (m *SearchAnnotationSetsResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_SearchAnnotationSetsResponse.Marshal(b, m, deterministic) +} +func (dst *SearchAnnotationSetsResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_SearchAnnotationSetsResponse.Merge(dst, src) +} +func (m *SearchAnnotationSetsResponse) XXX_Size() int { + return xxx_messageInfo_SearchAnnotationSetsResponse.Size(m) +} +func (m *SearchAnnotationSetsResponse) XXX_DiscardUnknown() { + xxx_messageInfo_SearchAnnotationSetsResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_SearchAnnotationSetsResponse proto.InternalMessageInfo + +func (m *SearchAnnotationSetsResponse) GetAnnotationSets() []*AnnotationSet { + if m != nil { + return m.AnnotationSets + } + return nil +} + +func (m *SearchAnnotationSetsResponse) GetNextPageToken() string { + if m != nil { + return m.NextPageToken + } + return "" +} + +type CreateAnnotationRequest struct { + // The annotation to be created. + Annotation *Annotation `protobuf:"bytes,1,opt,name=annotation,proto3" json:"annotation,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *CreateAnnotationRequest) Reset() { *m = CreateAnnotationRequest{} } +func (m *CreateAnnotationRequest) String() string { return proto.CompactTextString(m) } +func (*CreateAnnotationRequest) ProtoMessage() {} +func (*CreateAnnotationRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_annotations_257ef94ec8d5dfc4, []int{11} +} +func (m *CreateAnnotationRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_CreateAnnotationRequest.Unmarshal(m, b) +} +func (m *CreateAnnotationRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_CreateAnnotationRequest.Marshal(b, m, deterministic) +} +func (dst *CreateAnnotationRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_CreateAnnotationRequest.Merge(dst, src) +} +func (m *CreateAnnotationRequest) XXX_Size() int { + return xxx_messageInfo_CreateAnnotationRequest.Size(m) +} +func (m *CreateAnnotationRequest) XXX_DiscardUnknown() { + xxx_messageInfo_CreateAnnotationRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_CreateAnnotationRequest proto.InternalMessageInfo + +func (m *CreateAnnotationRequest) GetAnnotation() *Annotation { + if m != nil { + return m.Annotation + } + return nil +} + +type BatchCreateAnnotationsRequest struct { + // The annotations to be created. At most 4096 can be specified in a single + // request. + Annotations []*Annotation `protobuf:"bytes,1,rep,name=annotations,proto3" json:"annotations,omitempty"` + // A unique request ID which enables the server to detect duplicated requests. + // If provided, duplicated requests will result in the same response; if not + // provided, duplicated requests may result in duplicated data. For a given + // annotation set, callers should not reuse `request_id`s when writing + // different batches of annotations - behavior in this case is undefined. + // A common approach is to use a UUID. For batch jobs where worker crashes are + // a possibility, consider using some unique variant of a worker or run ID. + RequestId string `protobuf:"bytes,2,opt,name=request_id,json=requestId,proto3" json:"request_id,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *BatchCreateAnnotationsRequest) Reset() { *m = BatchCreateAnnotationsRequest{} } +func (m *BatchCreateAnnotationsRequest) String() string { return proto.CompactTextString(m) } +func (*BatchCreateAnnotationsRequest) ProtoMessage() {} +func (*BatchCreateAnnotationsRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_annotations_257ef94ec8d5dfc4, []int{12} +} +func (m *BatchCreateAnnotationsRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_BatchCreateAnnotationsRequest.Unmarshal(m, b) +} +func (m *BatchCreateAnnotationsRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_BatchCreateAnnotationsRequest.Marshal(b, m, deterministic) +} +func (dst *BatchCreateAnnotationsRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_BatchCreateAnnotationsRequest.Merge(dst, src) +} +func (m *BatchCreateAnnotationsRequest) XXX_Size() int { + return xxx_messageInfo_BatchCreateAnnotationsRequest.Size(m) +} +func (m *BatchCreateAnnotationsRequest) XXX_DiscardUnknown() { + xxx_messageInfo_BatchCreateAnnotationsRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_BatchCreateAnnotationsRequest proto.InternalMessageInfo + +func (m *BatchCreateAnnotationsRequest) GetAnnotations() []*Annotation { + if m != nil { + return m.Annotations + } + return nil +} + +func (m *BatchCreateAnnotationsRequest) GetRequestId() string { + if m != nil { + return m.RequestId + } + return "" +} + +type BatchCreateAnnotationsResponse struct { + // The resulting per-annotation entries, ordered consistently with the + // original request. + Entries []*BatchCreateAnnotationsResponse_Entry `protobuf:"bytes,1,rep,name=entries,proto3" json:"entries,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *BatchCreateAnnotationsResponse) Reset() { *m = BatchCreateAnnotationsResponse{} } +func (m *BatchCreateAnnotationsResponse) String() string { return proto.CompactTextString(m) } +func (*BatchCreateAnnotationsResponse) ProtoMessage() {} +func (*BatchCreateAnnotationsResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_annotations_257ef94ec8d5dfc4, []int{13} +} +func (m *BatchCreateAnnotationsResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_BatchCreateAnnotationsResponse.Unmarshal(m, b) +} +func (m *BatchCreateAnnotationsResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_BatchCreateAnnotationsResponse.Marshal(b, m, deterministic) +} +func (dst *BatchCreateAnnotationsResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_BatchCreateAnnotationsResponse.Merge(dst, src) +} +func (m *BatchCreateAnnotationsResponse) XXX_Size() int { + return xxx_messageInfo_BatchCreateAnnotationsResponse.Size(m) +} +func (m *BatchCreateAnnotationsResponse) XXX_DiscardUnknown() { + xxx_messageInfo_BatchCreateAnnotationsResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_BatchCreateAnnotationsResponse proto.InternalMessageInfo + +func (m *BatchCreateAnnotationsResponse) GetEntries() []*BatchCreateAnnotationsResponse_Entry { + if m != nil { + return m.Entries + } + return nil +} + +type BatchCreateAnnotationsResponse_Entry struct { + // The creation status. + Status *status.Status `protobuf:"bytes,1,opt,name=status,proto3" json:"status,omitempty"` + // The created annotation, if creation was successful. + Annotation *Annotation `protobuf:"bytes,2,opt,name=annotation,proto3" json:"annotation,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *BatchCreateAnnotationsResponse_Entry) Reset() { *m = BatchCreateAnnotationsResponse_Entry{} } +func (m *BatchCreateAnnotationsResponse_Entry) String() string { return proto.CompactTextString(m) } +func (*BatchCreateAnnotationsResponse_Entry) ProtoMessage() {} +func (*BatchCreateAnnotationsResponse_Entry) Descriptor() ([]byte, []int) { + return fileDescriptor_annotations_257ef94ec8d5dfc4, []int{13, 0} +} +func (m *BatchCreateAnnotationsResponse_Entry) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_BatchCreateAnnotationsResponse_Entry.Unmarshal(m, b) +} +func (m *BatchCreateAnnotationsResponse_Entry) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_BatchCreateAnnotationsResponse_Entry.Marshal(b, m, deterministic) +} +func (dst *BatchCreateAnnotationsResponse_Entry) XXX_Merge(src proto.Message) { + xxx_messageInfo_BatchCreateAnnotationsResponse_Entry.Merge(dst, src) +} +func (m *BatchCreateAnnotationsResponse_Entry) XXX_Size() int { + return xxx_messageInfo_BatchCreateAnnotationsResponse_Entry.Size(m) +} +func (m *BatchCreateAnnotationsResponse_Entry) XXX_DiscardUnknown() { + xxx_messageInfo_BatchCreateAnnotationsResponse_Entry.DiscardUnknown(m) +} + +var xxx_messageInfo_BatchCreateAnnotationsResponse_Entry proto.InternalMessageInfo + +func (m *BatchCreateAnnotationsResponse_Entry) GetStatus() *status.Status { + if m != nil { + return m.Status + } + return nil +} + +func (m *BatchCreateAnnotationsResponse_Entry) GetAnnotation() *Annotation { + if m != nil { + return m.Annotation + } + return nil +} + +type GetAnnotationRequest struct { + // The ID of the annotation to be retrieved. + AnnotationId string `protobuf:"bytes,1,opt,name=annotation_id,json=annotationId,proto3" json:"annotation_id,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GetAnnotationRequest) Reset() { *m = GetAnnotationRequest{} } +func (m *GetAnnotationRequest) String() string { return proto.CompactTextString(m) } +func (*GetAnnotationRequest) ProtoMessage() {} +func (*GetAnnotationRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_annotations_257ef94ec8d5dfc4, []int{14} +} +func (m *GetAnnotationRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GetAnnotationRequest.Unmarshal(m, b) +} +func (m *GetAnnotationRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GetAnnotationRequest.Marshal(b, m, deterministic) +} +func (dst *GetAnnotationRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_GetAnnotationRequest.Merge(dst, src) +} +func (m *GetAnnotationRequest) XXX_Size() int { + return xxx_messageInfo_GetAnnotationRequest.Size(m) +} +func (m *GetAnnotationRequest) XXX_DiscardUnknown() { + xxx_messageInfo_GetAnnotationRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_GetAnnotationRequest proto.InternalMessageInfo + +func (m *GetAnnotationRequest) GetAnnotationId() string { + if m != nil { + return m.AnnotationId + } + return "" +} + +type UpdateAnnotationRequest struct { + // The ID of the annotation to be updated. + AnnotationId string `protobuf:"bytes,1,opt,name=annotation_id,json=annotationId,proto3" json:"annotation_id,omitempty"` + // The new annotation. + Annotation *Annotation `protobuf:"bytes,2,opt,name=annotation,proto3" json:"annotation,omitempty"` + // An optional mask specifying which fields to update. Mutable fields are + // [name][google.genomics.v1.Annotation.name], + // [variant][google.genomics.v1.Annotation.variant], + // [transcript][google.genomics.v1.Annotation.transcript], and + // [info][google.genomics.v1.Annotation.info]. If unspecified, all mutable + // fields will be updated. + UpdateMask *field_mask.FieldMask `protobuf:"bytes,3,opt,name=update_mask,json=updateMask,proto3" json:"update_mask,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *UpdateAnnotationRequest) Reset() { *m = UpdateAnnotationRequest{} } +func (m *UpdateAnnotationRequest) String() string { return proto.CompactTextString(m) } +func (*UpdateAnnotationRequest) ProtoMessage() {} +func (*UpdateAnnotationRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_annotations_257ef94ec8d5dfc4, []int{15} +} +func (m *UpdateAnnotationRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_UpdateAnnotationRequest.Unmarshal(m, b) +} +func (m *UpdateAnnotationRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_UpdateAnnotationRequest.Marshal(b, m, deterministic) +} +func (dst *UpdateAnnotationRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_UpdateAnnotationRequest.Merge(dst, src) +} +func (m *UpdateAnnotationRequest) XXX_Size() int { + return xxx_messageInfo_UpdateAnnotationRequest.Size(m) +} +func (m *UpdateAnnotationRequest) XXX_DiscardUnknown() { + xxx_messageInfo_UpdateAnnotationRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_UpdateAnnotationRequest proto.InternalMessageInfo + +func (m *UpdateAnnotationRequest) GetAnnotationId() string { + if m != nil { + return m.AnnotationId + } + return "" +} + +func (m *UpdateAnnotationRequest) GetAnnotation() *Annotation { + if m != nil { + return m.Annotation + } + return nil +} + +func (m *UpdateAnnotationRequest) GetUpdateMask() *field_mask.FieldMask { + if m != nil { + return m.UpdateMask + } + return nil +} + +type DeleteAnnotationRequest struct { + // The ID of the annotation to be deleted. + AnnotationId string `protobuf:"bytes,1,opt,name=annotation_id,json=annotationId,proto3" json:"annotation_id,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *DeleteAnnotationRequest) Reset() { *m = DeleteAnnotationRequest{} } +func (m *DeleteAnnotationRequest) String() string { return proto.CompactTextString(m) } +func (*DeleteAnnotationRequest) ProtoMessage() {} +func (*DeleteAnnotationRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_annotations_257ef94ec8d5dfc4, []int{16} +} +func (m *DeleteAnnotationRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_DeleteAnnotationRequest.Unmarshal(m, b) +} +func (m *DeleteAnnotationRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_DeleteAnnotationRequest.Marshal(b, m, deterministic) +} +func (dst *DeleteAnnotationRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_DeleteAnnotationRequest.Merge(dst, src) +} +func (m *DeleteAnnotationRequest) XXX_Size() int { + return xxx_messageInfo_DeleteAnnotationRequest.Size(m) +} +func (m *DeleteAnnotationRequest) XXX_DiscardUnknown() { + xxx_messageInfo_DeleteAnnotationRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_DeleteAnnotationRequest proto.InternalMessageInfo + +func (m *DeleteAnnotationRequest) GetAnnotationId() string { + if m != nil { + return m.AnnotationId + } + return "" +} + +type SearchAnnotationsRequest struct { + // Required. The annotation sets to search within. The caller must have + // `READ` access to these annotation sets. + // All queried annotation sets must have the same type. + AnnotationSetIds []string `protobuf:"bytes,1,rep,name=annotation_set_ids,json=annotationSetIds,proto3" json:"annotation_set_ids,omitempty"` + // Required. `reference_id` or `reference_name` must be set. + // + // Types that are valid to be assigned to Reference: + // *SearchAnnotationsRequest_ReferenceId + // *SearchAnnotationsRequest_ReferenceName + Reference isSearchAnnotationsRequest_Reference `protobuf_oneof:"reference"` + // The start position of the range on the reference, 0-based inclusive. If + // specified, + // [referenceId][google.genomics.v1.SearchAnnotationsRequest.reference_id] or + // [referenceName][google.genomics.v1.SearchAnnotationsRequest.reference_name] + // must be specified. Defaults to 0. + Start int64 `protobuf:"varint,4,opt,name=start,proto3" json:"start,omitempty"` + // The end position of the range on the reference, 0-based exclusive. If + // [referenceId][google.genomics.v1.SearchAnnotationsRequest.reference_id] or + // [referenceName][google.genomics.v1.SearchAnnotationsRequest.reference_name] + // must be specified, Defaults to the length of the reference. + End int64 `protobuf:"varint,5,opt,name=end,proto3" json:"end,omitempty"` + // The continuation token, which is used to page through large result sets. + // To get the next page of results, set this parameter to the value of + // `nextPageToken` from the previous response. + PageToken string `protobuf:"bytes,6,opt,name=page_token,json=pageToken,proto3" json:"page_token,omitempty"` + // The maximum number of results to return in a single page. If unspecified, + // defaults to 256. The maximum value is 2048. + PageSize int32 `protobuf:"varint,7,opt,name=page_size,json=pageSize,proto3" json:"page_size,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *SearchAnnotationsRequest) Reset() { *m = SearchAnnotationsRequest{} } +func (m *SearchAnnotationsRequest) String() string { return proto.CompactTextString(m) } +func (*SearchAnnotationsRequest) ProtoMessage() {} +func (*SearchAnnotationsRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_annotations_257ef94ec8d5dfc4, []int{17} +} +func (m *SearchAnnotationsRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_SearchAnnotationsRequest.Unmarshal(m, b) +} +func (m *SearchAnnotationsRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_SearchAnnotationsRequest.Marshal(b, m, deterministic) +} +func (dst *SearchAnnotationsRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_SearchAnnotationsRequest.Merge(dst, src) +} +func (m *SearchAnnotationsRequest) XXX_Size() int { + return xxx_messageInfo_SearchAnnotationsRequest.Size(m) +} +func (m *SearchAnnotationsRequest) XXX_DiscardUnknown() { + xxx_messageInfo_SearchAnnotationsRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_SearchAnnotationsRequest proto.InternalMessageInfo + +func (m *SearchAnnotationsRequest) GetAnnotationSetIds() []string { + if m != nil { + return m.AnnotationSetIds + } + return nil +} + +type isSearchAnnotationsRequest_Reference interface { + isSearchAnnotationsRequest_Reference() +} + +type SearchAnnotationsRequest_ReferenceId struct { + ReferenceId string `protobuf:"bytes,2,opt,name=reference_id,json=referenceId,proto3,oneof"` +} + +type SearchAnnotationsRequest_ReferenceName struct { + ReferenceName string `protobuf:"bytes,3,opt,name=reference_name,json=referenceName,proto3,oneof"` +} + +func (*SearchAnnotationsRequest_ReferenceId) isSearchAnnotationsRequest_Reference() {} + +func (*SearchAnnotationsRequest_ReferenceName) isSearchAnnotationsRequest_Reference() {} + +func (m *SearchAnnotationsRequest) GetReference() isSearchAnnotationsRequest_Reference { + if m != nil { + return m.Reference + } + return nil +} + +func (m *SearchAnnotationsRequest) GetReferenceId() string { + if x, ok := m.GetReference().(*SearchAnnotationsRequest_ReferenceId); ok { + return x.ReferenceId + } + return "" +} + +func (m *SearchAnnotationsRequest) GetReferenceName() string { + if x, ok := m.GetReference().(*SearchAnnotationsRequest_ReferenceName); ok { + return x.ReferenceName + } + return "" +} + +func (m *SearchAnnotationsRequest) GetStart() int64 { + if m != nil { + return m.Start + } + return 0 +} + +func (m *SearchAnnotationsRequest) GetEnd() int64 { + if m != nil { + return m.End + } + return 0 +} + +func (m *SearchAnnotationsRequest) GetPageToken() string { + if m != nil { + return m.PageToken + } + return "" +} + +func (m *SearchAnnotationsRequest) GetPageSize() int32 { + if m != nil { + return m.PageSize + } + return 0 +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*SearchAnnotationsRequest) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _SearchAnnotationsRequest_OneofMarshaler, _SearchAnnotationsRequest_OneofUnmarshaler, _SearchAnnotationsRequest_OneofSizer, []interface{}{ + (*SearchAnnotationsRequest_ReferenceId)(nil), + (*SearchAnnotationsRequest_ReferenceName)(nil), + } +} + +func _SearchAnnotationsRequest_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*SearchAnnotationsRequest) + // reference + switch x := m.Reference.(type) { + case *SearchAnnotationsRequest_ReferenceId: + b.EncodeVarint(2<<3 | proto.WireBytes) + b.EncodeStringBytes(x.ReferenceId) + case *SearchAnnotationsRequest_ReferenceName: + b.EncodeVarint(3<<3 | proto.WireBytes) + b.EncodeStringBytes(x.ReferenceName) + case nil: + default: + return fmt.Errorf("SearchAnnotationsRequest.Reference has unexpected type %T", x) + } + return nil +} + +func _SearchAnnotationsRequest_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*SearchAnnotationsRequest) + switch tag { + case 2: // reference.reference_id + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeStringBytes() + m.Reference = &SearchAnnotationsRequest_ReferenceId{x} + return true, err + case 3: // reference.reference_name + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeStringBytes() + m.Reference = &SearchAnnotationsRequest_ReferenceName{x} + return true, err + default: + return false, nil + } +} + +func _SearchAnnotationsRequest_OneofSizer(msg proto.Message) (n int) { + m := msg.(*SearchAnnotationsRequest) + // reference + switch x := m.Reference.(type) { + case *SearchAnnotationsRequest_ReferenceId: + n += 1 // tag and wire + n += proto.SizeVarint(uint64(len(x.ReferenceId))) + n += len(x.ReferenceId) + case *SearchAnnotationsRequest_ReferenceName: + n += 1 // tag and wire + n += proto.SizeVarint(uint64(len(x.ReferenceName))) + n += len(x.ReferenceName) + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +type SearchAnnotationsResponse struct { + // The matching annotations. + Annotations []*Annotation `protobuf:"bytes,1,rep,name=annotations,proto3" json:"annotations,omitempty"` + // The continuation token, which is used to page through large result sets. + // Provide this value in a subsequent request to return the next page of + // results. This field will be empty if there aren't any additional results. + NextPageToken string `protobuf:"bytes,2,opt,name=next_page_token,json=nextPageToken,proto3" json:"next_page_token,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *SearchAnnotationsResponse) Reset() { *m = SearchAnnotationsResponse{} } +func (m *SearchAnnotationsResponse) String() string { return proto.CompactTextString(m) } +func (*SearchAnnotationsResponse) ProtoMessage() {} +func (*SearchAnnotationsResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_annotations_257ef94ec8d5dfc4, []int{18} +} +func (m *SearchAnnotationsResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_SearchAnnotationsResponse.Unmarshal(m, b) +} +func (m *SearchAnnotationsResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_SearchAnnotationsResponse.Marshal(b, m, deterministic) +} +func (dst *SearchAnnotationsResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_SearchAnnotationsResponse.Merge(dst, src) +} +func (m *SearchAnnotationsResponse) XXX_Size() int { + return xxx_messageInfo_SearchAnnotationsResponse.Size(m) +} +func (m *SearchAnnotationsResponse) XXX_DiscardUnknown() { + xxx_messageInfo_SearchAnnotationsResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_SearchAnnotationsResponse proto.InternalMessageInfo + +func (m *SearchAnnotationsResponse) GetAnnotations() []*Annotation { + if m != nil { + return m.Annotations + } + return nil +} + +func (m *SearchAnnotationsResponse) GetNextPageToken() string { + if m != nil { + return m.NextPageToken + } + return "" +} + +func init() { + proto.RegisterType((*AnnotationSet)(nil), "google.genomics.v1.AnnotationSet") + proto.RegisterMapType((map[string]*_struct.ListValue)(nil), "google.genomics.v1.AnnotationSet.InfoEntry") + proto.RegisterType((*Annotation)(nil), "google.genomics.v1.Annotation") + proto.RegisterMapType((map[string]*_struct.ListValue)(nil), "google.genomics.v1.Annotation.InfoEntry") + proto.RegisterType((*VariantAnnotation)(nil), "google.genomics.v1.VariantAnnotation") + proto.RegisterType((*VariantAnnotation_ClinicalCondition)(nil), "google.genomics.v1.VariantAnnotation.ClinicalCondition") + proto.RegisterType((*Transcript)(nil), "google.genomics.v1.Transcript") + proto.RegisterType((*Transcript_Exon)(nil), "google.genomics.v1.Transcript.Exon") + proto.RegisterType((*Transcript_CodingSequence)(nil), "google.genomics.v1.Transcript.CodingSequence") + proto.RegisterType((*ExternalId)(nil), "google.genomics.v1.ExternalId") + proto.RegisterType((*CreateAnnotationSetRequest)(nil), "google.genomics.v1.CreateAnnotationSetRequest") + proto.RegisterType((*GetAnnotationSetRequest)(nil), "google.genomics.v1.GetAnnotationSetRequest") + proto.RegisterType((*UpdateAnnotationSetRequest)(nil), "google.genomics.v1.UpdateAnnotationSetRequest") + proto.RegisterType((*DeleteAnnotationSetRequest)(nil), "google.genomics.v1.DeleteAnnotationSetRequest") + proto.RegisterType((*SearchAnnotationSetsRequest)(nil), "google.genomics.v1.SearchAnnotationSetsRequest") + proto.RegisterType((*SearchAnnotationSetsResponse)(nil), "google.genomics.v1.SearchAnnotationSetsResponse") + proto.RegisterType((*CreateAnnotationRequest)(nil), "google.genomics.v1.CreateAnnotationRequest") + proto.RegisterType((*BatchCreateAnnotationsRequest)(nil), "google.genomics.v1.BatchCreateAnnotationsRequest") + proto.RegisterType((*BatchCreateAnnotationsResponse)(nil), "google.genomics.v1.BatchCreateAnnotationsResponse") + proto.RegisterType((*BatchCreateAnnotationsResponse_Entry)(nil), "google.genomics.v1.BatchCreateAnnotationsResponse.Entry") + proto.RegisterType((*GetAnnotationRequest)(nil), "google.genomics.v1.GetAnnotationRequest") + proto.RegisterType((*UpdateAnnotationRequest)(nil), "google.genomics.v1.UpdateAnnotationRequest") + proto.RegisterType((*DeleteAnnotationRequest)(nil), "google.genomics.v1.DeleteAnnotationRequest") + proto.RegisterType((*SearchAnnotationsRequest)(nil), "google.genomics.v1.SearchAnnotationsRequest") + proto.RegisterType((*SearchAnnotationsResponse)(nil), "google.genomics.v1.SearchAnnotationsResponse") + proto.RegisterEnum("google.genomics.v1.AnnotationType", AnnotationType_name, AnnotationType_value) + proto.RegisterEnum("google.genomics.v1.VariantAnnotation_Type", VariantAnnotation_Type_name, VariantAnnotation_Type_value) + proto.RegisterEnum("google.genomics.v1.VariantAnnotation_Effect", VariantAnnotation_Effect_name, VariantAnnotation_Effect_value) + proto.RegisterEnum("google.genomics.v1.VariantAnnotation_ClinicalSignificance", VariantAnnotation_ClinicalSignificance_name, VariantAnnotation_ClinicalSignificance_value) +} + +// Reference imports to suppress errors if they are not otherwise used. +var _ context.Context +var _ grpc.ClientConn + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the grpc package it is being compiled against. +const _ = grpc.SupportPackageIsVersion4 + +// AnnotationServiceV1Client is the client API for AnnotationServiceV1 service. +// +// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://godoc.org/google.golang.org/grpc#ClientConn.NewStream. +type AnnotationServiceV1Client interface { + // Creates a new annotation set. Caller must have WRITE permission for the + // associated dataset. + // + // The following fields are required: + // + // * [datasetId][google.genomics.v1.AnnotationSet.dataset_id] + // * [referenceSetId][google.genomics.v1.AnnotationSet.reference_set_id] + // + // All other fields may be optionally specified, unless documented as being + // server-generated (for example, the `id` field). + CreateAnnotationSet(ctx context.Context, in *CreateAnnotationSetRequest, opts ...grpc.CallOption) (*AnnotationSet, error) + // Gets an annotation set. Caller must have READ permission for + // the associated dataset. + GetAnnotationSet(ctx context.Context, in *GetAnnotationSetRequest, opts ...grpc.CallOption) (*AnnotationSet, error) + // Updates an annotation set. The update must respect all mutability + // restrictions and other invariants described on the annotation set resource. + // Caller must have WRITE permission for the associated dataset. + UpdateAnnotationSet(ctx context.Context, in *UpdateAnnotationSetRequest, opts ...grpc.CallOption) (*AnnotationSet, error) + // Deletes an annotation set. Caller must have WRITE permission + // for the associated annotation set. + DeleteAnnotationSet(ctx context.Context, in *DeleteAnnotationSetRequest, opts ...grpc.CallOption) (*empty.Empty, error) + // Searches for annotation sets that match the given criteria. Annotation sets + // are returned in an unspecified order. This order is consistent, such that + // two queries for the same content (regardless of page size) yield annotation + // sets in the same order across their respective streams of paginated + // responses. Caller must have READ permission for the queried datasets. + SearchAnnotationSets(ctx context.Context, in *SearchAnnotationSetsRequest, opts ...grpc.CallOption) (*SearchAnnotationSetsResponse, error) + // Creates a new annotation. Caller must have WRITE permission + // for the associated annotation set. + // + // The following fields are required: + // + // * [annotationSetId][google.genomics.v1.Annotation.annotation_set_id] + // * [referenceName][google.genomics.v1.Annotation.reference_name] or + // [referenceId][google.genomics.v1.Annotation.reference_id] + // + // ### Transcripts + // + // For annotations of type TRANSCRIPT, the following fields of + // [transcript][google.genomics.v1.Annotation.transcript] must be provided: + // + // * [exons.start][google.genomics.v1.Transcript.Exon.start] + // * [exons.end][google.genomics.v1.Transcript.Exon.end] + // + // All other fields may be optionally specified, unless documented as being + // server-generated (for example, the `id` field). The annotated + // range must be no longer than 100Mbp (mega base pairs). See the + // [Annotation resource][google.genomics.v1.Annotation] + // for additional restrictions on each field. + CreateAnnotation(ctx context.Context, in *CreateAnnotationRequest, opts ...grpc.CallOption) (*Annotation, error) + // Creates one or more new annotations atomically. All annotations must + // belong to the same annotation set. Caller must have WRITE + // permission for this annotation set. For optimal performance, batch + // positionally adjacent annotations together. + // + // If the request has a systemic issue, such as an attempt to write to + // an inaccessible annotation set, the entire RPC will fail accordingly. For + // lesser data issues, when possible an error will be isolated to the + // corresponding batch entry in the response; the remaining well formed + // annotations will be created normally. + // + // For details on the requirements for each individual annotation resource, + // see + // [CreateAnnotation][google.genomics.v1.AnnotationServiceV1.CreateAnnotation]. + BatchCreateAnnotations(ctx context.Context, in *BatchCreateAnnotationsRequest, opts ...grpc.CallOption) (*BatchCreateAnnotationsResponse, error) + // Gets an annotation. Caller must have READ permission + // for the associated annotation set. + GetAnnotation(ctx context.Context, in *GetAnnotationRequest, opts ...grpc.CallOption) (*Annotation, error) + // Updates an annotation. Caller must have + // WRITE permission for the associated dataset. + UpdateAnnotation(ctx context.Context, in *UpdateAnnotationRequest, opts ...grpc.CallOption) (*Annotation, error) + // Deletes an annotation. Caller must have WRITE permission for + // the associated annotation set. + DeleteAnnotation(ctx context.Context, in *DeleteAnnotationRequest, opts ...grpc.CallOption) (*empty.Empty, error) + // Searches for annotations that match the given criteria. Results are + // ordered by genomic coordinate (by reference sequence, then position). + // Annotations with equivalent genomic coordinates are returned in an + // unspecified order. This order is consistent, such that two queries for the + // same content (regardless of page size) yield annotations in the same order + // across their respective streams of paginated responses. Caller must have + // READ permission for the queried annotation sets. + SearchAnnotations(ctx context.Context, in *SearchAnnotationsRequest, opts ...grpc.CallOption) (*SearchAnnotationsResponse, error) +} + +type annotationServiceV1Client struct { + cc *grpc.ClientConn +} + +func NewAnnotationServiceV1Client(cc *grpc.ClientConn) AnnotationServiceV1Client { + return &annotationServiceV1Client{cc} +} + +func (c *annotationServiceV1Client) CreateAnnotationSet(ctx context.Context, in *CreateAnnotationSetRequest, opts ...grpc.CallOption) (*AnnotationSet, error) { + out := new(AnnotationSet) + err := c.cc.Invoke(ctx, "/google.genomics.v1.AnnotationServiceV1/CreateAnnotationSet", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *annotationServiceV1Client) GetAnnotationSet(ctx context.Context, in *GetAnnotationSetRequest, opts ...grpc.CallOption) (*AnnotationSet, error) { + out := new(AnnotationSet) + err := c.cc.Invoke(ctx, "/google.genomics.v1.AnnotationServiceV1/GetAnnotationSet", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *annotationServiceV1Client) UpdateAnnotationSet(ctx context.Context, in *UpdateAnnotationSetRequest, opts ...grpc.CallOption) (*AnnotationSet, error) { + out := new(AnnotationSet) + err := c.cc.Invoke(ctx, "/google.genomics.v1.AnnotationServiceV1/UpdateAnnotationSet", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *annotationServiceV1Client) DeleteAnnotationSet(ctx context.Context, in *DeleteAnnotationSetRequest, opts ...grpc.CallOption) (*empty.Empty, error) { + out := new(empty.Empty) + err := c.cc.Invoke(ctx, "/google.genomics.v1.AnnotationServiceV1/DeleteAnnotationSet", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *annotationServiceV1Client) SearchAnnotationSets(ctx context.Context, in *SearchAnnotationSetsRequest, opts ...grpc.CallOption) (*SearchAnnotationSetsResponse, error) { + out := new(SearchAnnotationSetsResponse) + err := c.cc.Invoke(ctx, "/google.genomics.v1.AnnotationServiceV1/SearchAnnotationSets", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *annotationServiceV1Client) CreateAnnotation(ctx context.Context, in *CreateAnnotationRequest, opts ...grpc.CallOption) (*Annotation, error) { + out := new(Annotation) + err := c.cc.Invoke(ctx, "/google.genomics.v1.AnnotationServiceV1/CreateAnnotation", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *annotationServiceV1Client) BatchCreateAnnotations(ctx context.Context, in *BatchCreateAnnotationsRequest, opts ...grpc.CallOption) (*BatchCreateAnnotationsResponse, error) { + out := new(BatchCreateAnnotationsResponse) + err := c.cc.Invoke(ctx, "/google.genomics.v1.AnnotationServiceV1/BatchCreateAnnotations", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *annotationServiceV1Client) GetAnnotation(ctx context.Context, in *GetAnnotationRequest, opts ...grpc.CallOption) (*Annotation, error) { + out := new(Annotation) + err := c.cc.Invoke(ctx, "/google.genomics.v1.AnnotationServiceV1/GetAnnotation", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *annotationServiceV1Client) UpdateAnnotation(ctx context.Context, in *UpdateAnnotationRequest, opts ...grpc.CallOption) (*Annotation, error) { + out := new(Annotation) + err := c.cc.Invoke(ctx, "/google.genomics.v1.AnnotationServiceV1/UpdateAnnotation", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *annotationServiceV1Client) DeleteAnnotation(ctx context.Context, in *DeleteAnnotationRequest, opts ...grpc.CallOption) (*empty.Empty, error) { + out := new(empty.Empty) + err := c.cc.Invoke(ctx, "/google.genomics.v1.AnnotationServiceV1/DeleteAnnotation", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *annotationServiceV1Client) SearchAnnotations(ctx context.Context, in *SearchAnnotationsRequest, opts ...grpc.CallOption) (*SearchAnnotationsResponse, error) { + out := new(SearchAnnotationsResponse) + err := c.cc.Invoke(ctx, "/google.genomics.v1.AnnotationServiceV1/SearchAnnotations", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +// AnnotationServiceV1Server is the server API for AnnotationServiceV1 service. +type AnnotationServiceV1Server interface { + // Creates a new annotation set. Caller must have WRITE permission for the + // associated dataset. + // + // The following fields are required: + // + // * [datasetId][google.genomics.v1.AnnotationSet.dataset_id] + // * [referenceSetId][google.genomics.v1.AnnotationSet.reference_set_id] + // + // All other fields may be optionally specified, unless documented as being + // server-generated (for example, the `id` field). + CreateAnnotationSet(context.Context, *CreateAnnotationSetRequest) (*AnnotationSet, error) + // Gets an annotation set. Caller must have READ permission for + // the associated dataset. + GetAnnotationSet(context.Context, *GetAnnotationSetRequest) (*AnnotationSet, error) + // Updates an annotation set. The update must respect all mutability + // restrictions and other invariants described on the annotation set resource. + // Caller must have WRITE permission for the associated dataset. + UpdateAnnotationSet(context.Context, *UpdateAnnotationSetRequest) (*AnnotationSet, error) + // Deletes an annotation set. Caller must have WRITE permission + // for the associated annotation set. + DeleteAnnotationSet(context.Context, *DeleteAnnotationSetRequest) (*empty.Empty, error) + // Searches for annotation sets that match the given criteria. Annotation sets + // are returned in an unspecified order. This order is consistent, such that + // two queries for the same content (regardless of page size) yield annotation + // sets in the same order across their respective streams of paginated + // responses. Caller must have READ permission for the queried datasets. + SearchAnnotationSets(context.Context, *SearchAnnotationSetsRequest) (*SearchAnnotationSetsResponse, error) + // Creates a new annotation. Caller must have WRITE permission + // for the associated annotation set. + // + // The following fields are required: + // + // * [annotationSetId][google.genomics.v1.Annotation.annotation_set_id] + // * [referenceName][google.genomics.v1.Annotation.reference_name] or + // [referenceId][google.genomics.v1.Annotation.reference_id] + // + // ### Transcripts + // + // For annotations of type TRANSCRIPT, the following fields of + // [transcript][google.genomics.v1.Annotation.transcript] must be provided: + // + // * [exons.start][google.genomics.v1.Transcript.Exon.start] + // * [exons.end][google.genomics.v1.Transcript.Exon.end] + // + // All other fields may be optionally specified, unless documented as being + // server-generated (for example, the `id` field). The annotated + // range must be no longer than 100Mbp (mega base pairs). See the + // [Annotation resource][google.genomics.v1.Annotation] + // for additional restrictions on each field. + CreateAnnotation(context.Context, *CreateAnnotationRequest) (*Annotation, error) + // Creates one or more new annotations atomically. All annotations must + // belong to the same annotation set. Caller must have WRITE + // permission for this annotation set. For optimal performance, batch + // positionally adjacent annotations together. + // + // If the request has a systemic issue, such as an attempt to write to + // an inaccessible annotation set, the entire RPC will fail accordingly. For + // lesser data issues, when possible an error will be isolated to the + // corresponding batch entry in the response; the remaining well formed + // annotations will be created normally. + // + // For details on the requirements for each individual annotation resource, + // see + // [CreateAnnotation][google.genomics.v1.AnnotationServiceV1.CreateAnnotation]. + BatchCreateAnnotations(context.Context, *BatchCreateAnnotationsRequest) (*BatchCreateAnnotationsResponse, error) + // Gets an annotation. Caller must have READ permission + // for the associated annotation set. + GetAnnotation(context.Context, *GetAnnotationRequest) (*Annotation, error) + // Updates an annotation. Caller must have + // WRITE permission for the associated dataset. + UpdateAnnotation(context.Context, *UpdateAnnotationRequest) (*Annotation, error) + // Deletes an annotation. Caller must have WRITE permission for + // the associated annotation set. + DeleteAnnotation(context.Context, *DeleteAnnotationRequest) (*empty.Empty, error) + // Searches for annotations that match the given criteria. Results are + // ordered by genomic coordinate (by reference sequence, then position). + // Annotations with equivalent genomic coordinates are returned in an + // unspecified order. This order is consistent, such that two queries for the + // same content (regardless of page size) yield annotations in the same order + // across their respective streams of paginated responses. Caller must have + // READ permission for the queried annotation sets. + SearchAnnotations(context.Context, *SearchAnnotationsRequest) (*SearchAnnotationsResponse, error) +} + +func RegisterAnnotationServiceV1Server(s *grpc.Server, srv AnnotationServiceV1Server) { + s.RegisterService(&_AnnotationServiceV1_serviceDesc, srv) +} + +func _AnnotationServiceV1_CreateAnnotationSet_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(CreateAnnotationSetRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(AnnotationServiceV1Server).CreateAnnotationSet(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.genomics.v1.AnnotationServiceV1/CreateAnnotationSet", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(AnnotationServiceV1Server).CreateAnnotationSet(ctx, req.(*CreateAnnotationSetRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _AnnotationServiceV1_GetAnnotationSet_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(GetAnnotationSetRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(AnnotationServiceV1Server).GetAnnotationSet(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.genomics.v1.AnnotationServiceV1/GetAnnotationSet", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(AnnotationServiceV1Server).GetAnnotationSet(ctx, req.(*GetAnnotationSetRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _AnnotationServiceV1_UpdateAnnotationSet_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(UpdateAnnotationSetRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(AnnotationServiceV1Server).UpdateAnnotationSet(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.genomics.v1.AnnotationServiceV1/UpdateAnnotationSet", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(AnnotationServiceV1Server).UpdateAnnotationSet(ctx, req.(*UpdateAnnotationSetRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _AnnotationServiceV1_DeleteAnnotationSet_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(DeleteAnnotationSetRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(AnnotationServiceV1Server).DeleteAnnotationSet(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.genomics.v1.AnnotationServiceV1/DeleteAnnotationSet", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(AnnotationServiceV1Server).DeleteAnnotationSet(ctx, req.(*DeleteAnnotationSetRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _AnnotationServiceV1_SearchAnnotationSets_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(SearchAnnotationSetsRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(AnnotationServiceV1Server).SearchAnnotationSets(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.genomics.v1.AnnotationServiceV1/SearchAnnotationSets", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(AnnotationServiceV1Server).SearchAnnotationSets(ctx, req.(*SearchAnnotationSetsRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _AnnotationServiceV1_CreateAnnotation_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(CreateAnnotationRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(AnnotationServiceV1Server).CreateAnnotation(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.genomics.v1.AnnotationServiceV1/CreateAnnotation", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(AnnotationServiceV1Server).CreateAnnotation(ctx, req.(*CreateAnnotationRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _AnnotationServiceV1_BatchCreateAnnotations_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(BatchCreateAnnotationsRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(AnnotationServiceV1Server).BatchCreateAnnotations(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.genomics.v1.AnnotationServiceV1/BatchCreateAnnotations", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(AnnotationServiceV1Server).BatchCreateAnnotations(ctx, req.(*BatchCreateAnnotationsRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _AnnotationServiceV1_GetAnnotation_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(GetAnnotationRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(AnnotationServiceV1Server).GetAnnotation(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.genomics.v1.AnnotationServiceV1/GetAnnotation", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(AnnotationServiceV1Server).GetAnnotation(ctx, req.(*GetAnnotationRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _AnnotationServiceV1_UpdateAnnotation_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(UpdateAnnotationRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(AnnotationServiceV1Server).UpdateAnnotation(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.genomics.v1.AnnotationServiceV1/UpdateAnnotation", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(AnnotationServiceV1Server).UpdateAnnotation(ctx, req.(*UpdateAnnotationRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _AnnotationServiceV1_DeleteAnnotation_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(DeleteAnnotationRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(AnnotationServiceV1Server).DeleteAnnotation(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.genomics.v1.AnnotationServiceV1/DeleteAnnotation", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(AnnotationServiceV1Server).DeleteAnnotation(ctx, req.(*DeleteAnnotationRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _AnnotationServiceV1_SearchAnnotations_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(SearchAnnotationsRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(AnnotationServiceV1Server).SearchAnnotations(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.genomics.v1.AnnotationServiceV1/SearchAnnotations", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(AnnotationServiceV1Server).SearchAnnotations(ctx, req.(*SearchAnnotationsRequest)) + } + return interceptor(ctx, in, info, handler) +} + +var _AnnotationServiceV1_serviceDesc = grpc.ServiceDesc{ + ServiceName: "google.genomics.v1.AnnotationServiceV1", + HandlerType: (*AnnotationServiceV1Server)(nil), + Methods: []grpc.MethodDesc{ + { + MethodName: "CreateAnnotationSet", + Handler: _AnnotationServiceV1_CreateAnnotationSet_Handler, + }, + { + MethodName: "GetAnnotationSet", + Handler: _AnnotationServiceV1_GetAnnotationSet_Handler, + }, + { + MethodName: "UpdateAnnotationSet", + Handler: _AnnotationServiceV1_UpdateAnnotationSet_Handler, + }, + { + MethodName: "DeleteAnnotationSet", + Handler: _AnnotationServiceV1_DeleteAnnotationSet_Handler, + }, + { + MethodName: "SearchAnnotationSets", + Handler: _AnnotationServiceV1_SearchAnnotationSets_Handler, + }, + { + MethodName: "CreateAnnotation", + Handler: _AnnotationServiceV1_CreateAnnotation_Handler, + }, + { + MethodName: "BatchCreateAnnotations", + Handler: _AnnotationServiceV1_BatchCreateAnnotations_Handler, + }, + { + MethodName: "GetAnnotation", + Handler: _AnnotationServiceV1_GetAnnotation_Handler, + }, + { + MethodName: "UpdateAnnotation", + Handler: _AnnotationServiceV1_UpdateAnnotation_Handler, + }, + { + MethodName: "DeleteAnnotation", + Handler: _AnnotationServiceV1_DeleteAnnotation_Handler, + }, + { + MethodName: "SearchAnnotations", + Handler: _AnnotationServiceV1_SearchAnnotations_Handler, + }, + }, + Streams: []grpc.StreamDesc{}, + Metadata: "google/genomics/v1/annotations.proto", +} + +func init() { + proto.RegisterFile("google/genomics/v1/annotations.proto", fileDescriptor_annotations_257ef94ec8d5dfc4) +} + +var fileDescriptor_annotations_257ef94ec8d5dfc4 = []byte{ + // 2188 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xbc, 0x59, 0xcd, 0x6f, 0xdb, 0xc8, + 0x15, 0x0f, 0xf5, 0x69, 0x3f, 0xd9, 0x32, 0x3d, 0xf1, 0xda, 0x5a, 0x39, 0x1f, 0x0e, 0xf3, 0x65, + 0xb8, 0x89, 0xbc, 0x51, 0x80, 0x36, 0x75, 0xda, 0x74, 0x65, 0x9a, 0xb6, 0xa7, 0xab, 0x50, 0xc2, + 0x90, 0x72, 0xe1, 0x5e, 0x08, 0x86, 0x1a, 0x7b, 0x89, 0xd8, 0x94, 0x4a, 0xd2, 0x6e, 0xbc, 0xc5, + 0x02, 0x8b, 0xc5, 0x16, 0x3d, 0xf5, 0xb2, 0xbb, 0xf7, 0x5e, 0x0a, 0xb4, 0xff, 0x43, 0x2f, 0x05, + 0x7a, 0x6f, 0x0f, 0x45, 0xff, 0x83, 0x5e, 0xf6, 0xda, 0x53, 0x81, 0x5e, 0x8a, 0x19, 0x92, 0x12, + 0x45, 0x51, 0xb6, 0xdc, 0x00, 0xbd, 0x71, 0xde, 0xbc, 0xf7, 0xe6, 0x37, 0xbf, 0x99, 0xf7, 0x31, + 0x12, 0x3c, 0x38, 0xee, 0xf5, 0x8e, 0x4f, 0xe8, 0xe6, 0x31, 0x75, 0x7a, 0xa7, 0xb6, 0xe5, 0x6d, + 0x9e, 0x3f, 0xdb, 0x34, 0x1d, 0xa7, 0xe7, 0x9b, 0xbe, 0xdd, 0x73, 0xbc, 0x5a, 0xdf, 0xed, 0xf9, + 0x3d, 0x84, 0x02, 0xad, 0x5a, 0xa4, 0x55, 0x3b, 0x7f, 0x56, 0xbd, 0x15, 0x5a, 0x9a, 0x7d, 0x7b, + 0xdc, 0xa2, 0xba, 0x1a, 0xce, 0xf2, 0xd1, 0x9b, 0xb3, 0xa3, 0x4d, 0x7a, 0xda, 0xf7, 0x2f, 0xc2, + 0xc9, 0xb5, 0xe4, 0xe4, 0x91, 0x4d, 0x4f, 0xba, 0xc6, 0xa9, 0xe9, 0xbd, 0x0d, 0x35, 0x6e, 0x25, + 0x35, 0x3c, 0xdf, 0x3d, 0xb3, 0xfc, 0x70, 0xf6, 0x4e, 0x72, 0xf6, 0x97, 0xae, 0xd9, 0xef, 0x53, + 0x37, 0x5a, 0x7c, 0x25, 0x9c, 0x77, 0xfb, 0xd6, 0xa6, 0xe7, 0x9b, 0xfe, 0x59, 0x38, 0x21, 0x7d, + 0x97, 0x81, 0xf9, 0xc6, 0x00, 0xab, 0x46, 0x7d, 0x54, 0x86, 0x8c, 0xdd, 0xad, 0x08, 0x6b, 0xc2, + 0xfa, 0x2c, 0xc9, 0xd8, 0x5d, 0x74, 0x1b, 0xa0, 0x6b, 0xfa, 0xa6, 0x47, 0x7d, 0xc3, 0xee, 0x56, + 0x32, 0x5c, 0x3e, 0x1b, 0x4a, 0x70, 0x17, 0xad, 0x83, 0xe8, 0xd2, 0x23, 0xea, 0x52, 0xc7, 0xa2, + 0x46, 0xa8, 0x94, 0xe5, 0x4a, 0xe5, 0x81, 0x5c, 0xe3, 0x9a, 0x08, 0x72, 0x8e, 0x79, 0x4a, 0x2b, + 0x39, 0x3e, 0xcb, 0xbf, 0x99, 0x73, 0xaf, 0x77, 0xe6, 0x5a, 0xd4, 0x38, 0x73, 0xed, 0x4a, 0x3e, + 0x70, 0x1e, 0x48, 0x3a, 0xae, 0x8d, 0xbe, 0x0f, 0x39, 0xff, 0xa2, 0x4f, 0x2b, 0x85, 0x35, 0x61, + 0xbd, 0x5c, 0x97, 0x6a, 0xe3, 0xa4, 0xd7, 0x86, 0xe0, 0xf5, 0x8b, 0x3e, 0x25, 0x5c, 0x1f, 0xfd, + 0x04, 0x72, 0xb6, 0x73, 0xd4, 0xab, 0x2c, 0xae, 0x65, 0xd7, 0x4b, 0xf5, 0xef, 0x5d, 0x6e, 0xa7, + 0x51, 0xbf, 0x86, 0x9d, 0xa3, 0x9e, 0xe2, 0xf8, 0xee, 0x05, 0xe1, 0x86, 0x55, 0x0d, 0x66, 0x07, + 0x22, 0x24, 0x42, 0xf6, 0x2d, 0xbd, 0x08, 0x29, 0x61, 0x9f, 0xe8, 0x23, 0xc8, 0x9f, 0x9b, 0x27, + 0x67, 0x94, 0xd3, 0x51, 0xaa, 0x57, 0xa3, 0x05, 0x22, 0xfa, 0x6b, 0x4d, 0xdb, 0xf3, 0x0f, 0x98, + 0x06, 0x09, 0x14, 0xb7, 0x32, 0x2f, 0x04, 0xe9, 0xaf, 0x39, 0x80, 0xe1, 0xb2, 0x63, 0x44, 0x6f, + 0xc0, 0xe2, 0xf0, 0xd6, 0x18, 0x23, 0x7c, 0x2f, 0x98, 0x71, 0xb4, 0x31, 0x2e, 0xb3, 0x31, 0x2e, + 0xef, 0xc1, 0xdc, 0xf0, 0x24, 0xec, 0x6e, 0xc8, 0x73, 0x69, 0x20, 0xc3, 0x5d, 0xf4, 0x10, 0x86, + 0x87, 0x62, 0x70, 0x07, 0x01, 0xe5, 0xf3, 0x03, 0xa9, 0xca, 0x3c, 0x2d, 0x41, 0xde, 0xf3, 0x4d, + 0xd7, 0xe7, 0xbc, 0x67, 0x49, 0x30, 0x60, 0x34, 0x50, 0xa7, 0x5b, 0x29, 0x72, 0x19, 0xfb, 0x0c, + 0xdc, 0x9d, 0x53, 0xd7, 0xa3, 0x86, 0xe7, 0xbb, 0xa6, 0xd3, 0xad, 0xcc, 0xac, 0x09, 0xeb, 0x33, + 0xcc, 0x1d, 0x97, 0x6a, 0x5c, 0x38, 0x38, 0xc5, 0xd9, 0x6b, 0x9e, 0x62, 0x03, 0x8a, 0xe7, 0xa6, + 0x6b, 0x9b, 0x8e, 0x5f, 0x01, 0xce, 0xf3, 0xc3, 0x34, 0xd3, 0x83, 0x40, 0x65, 0xe8, 0x61, 0xff, + 0x06, 0x89, 0xec, 0xd0, 0xc7, 0x00, 0x0c, 0x83, 0x67, 0xb9, 0x76, 0xdf, 0xaf, 0x94, 0xb8, 0x97, + 0x3b, 0x69, 0x5e, 0xf4, 0x81, 0xd6, 0xfe, 0x0d, 0x12, 0xb3, 0x41, 0x3f, 0x0a, 0xaf, 0xd2, 0x1c, + 0xbf, 0x4a, 0xeb, 0x97, 0x83, 0xff, 0xbf, 0xdc, 0xa3, 0xed, 0x62, 0x68, 0x25, 0xfd, 0x67, 0x16, + 0x16, 0xc7, 0xb6, 0x8f, 0x5e, 0x85, 0x74, 0x0b, 0x9c, 0xee, 0x8d, 0xa9, 0x38, 0xab, 0xc5, 0x68, + 0xdf, 0x81, 0x02, 0x3d, 0x3a, 0xa2, 0x96, 0xcf, 0x51, 0x95, 0xeb, 0x4f, 0xa6, 0xf3, 0xa0, 0x70, + 0x1b, 0x12, 0xda, 0xa2, 0xc7, 0xb0, 0x60, 0x9e, 0xf8, 0xd4, 0x75, 0x4c, 0x9f, 0x1a, 0x6f, 0x4c, + 0x8f, 0x7a, 0x51, 0x5a, 0x18, 0x88, 0xb7, 0x99, 0x14, 0xad, 0x40, 0xf1, 0x98, 0x3a, 0xb1, 0x1b, + 0x5b, 0x60, 0xc3, 0xe0, 0xb2, 0x0e, 0xcf, 0xc1, 0xb0, 0xbb, 0x5e, 0x25, 0xbf, 0x96, 0x65, 0x97, + 0x75, 0x28, 0xc5, 0x5d, 0x0f, 0xfd, 0x0c, 0xc0, 0xea, 0x39, 0x5d, 0x9b, 0xe7, 0xda, 0x4a, 0x81, + 0x1f, 0xd3, 0x0f, 0xa6, 0x83, 0x2c, 0x9f, 0xd8, 0x8e, 0x6d, 0x99, 0x27, 0x72, 0x64, 0x4f, 0x62, + 0xae, 0x50, 0x0f, 0x3e, 0xb0, 0x42, 0x05, 0xc3, 0xb3, 0x8f, 0x1d, 0xfb, 0xc8, 0xb6, 0x4c, 0xc7, + 0xa2, 0x3c, 0x02, 0xca, 0xf5, 0xad, 0xeb, 0xad, 0xa1, 0xc5, 0x3c, 0x90, 0x25, 0x2b, 0x45, 0x5a, + 0xfd, 0xbd, 0x00, 0x8b, 0x63, 0x90, 0x58, 0x30, 0xb2, 0x48, 0xf5, 0x2a, 0x02, 0xdf, 0x7d, 0x30, + 0x40, 0x0d, 0x98, 0xa3, 0xef, 0x38, 0x8f, 0x27, 0x9c, 0x9a, 0x0c, 0xdf, 0x77, 0xea, 0xd5, 0x56, + 0x42, 0x3d, 0xdc, 0x25, 0x25, 0x3a, 0xf8, 0xf6, 0x58, 0xee, 0xb5, 0x7a, 0x8e, 0x45, 0xfb, 0xb1, + 0x9c, 0x3d, 0x1b, 0x4a, 0x70, 0x97, 0x9d, 0x4b, 0xef, 0xd4, 0x3e, 0x8d, 0x9d, 0x0b, 0x1b, 0xe2, + 0xae, 0xf4, 0x39, 0xe4, 0xd8, 0x6d, 0x41, 0x4b, 0x20, 0xea, 0x87, 0x6d, 0xc5, 0xe8, 0xa8, 0x5a, + 0x5b, 0x91, 0xf1, 0x2e, 0x56, 0x76, 0xc4, 0x1b, 0xa8, 0x0c, 0xc0, 0xa5, 0x2d, 0x7d, 0x5f, 0x21, + 0xa2, 0x80, 0xe6, 0x61, 0x16, 0xab, 0x9a, 0x42, 0x74, 0xdc, 0x52, 0xc5, 0x0c, 0x9a, 0x83, 0x99, + 0x1d, 0xa5, 0xa9, 0xf0, 0x51, 0x16, 0x89, 0x30, 0xa7, 0x75, 0xb6, 0x35, 0x1d, 0xeb, 0x1d, 0x2e, + 0xc9, 0xa1, 0x22, 0x64, 0x35, 0xb5, 0x2d, 0xe6, 0x99, 0x1f, 0x4d, 0x27, 0x1d, 0x59, 0xef, 0x90, + 0x46, 0x53, 0x2c, 0xb0, 0x09, 0x59, 0x3d, 0x10, 0x8b, 0xd2, 0x5f, 0x04, 0x28, 0x04, 0x77, 0x0d, + 0x2d, 0x03, 0x52, 0x76, 0x77, 0x15, 0x59, 0x4f, 0x60, 0x10, 0x61, 0x2e, 0x94, 0x47, 0x28, 0xca, + 0x00, 0xbb, 0xa4, 0xf1, 0x5a, 0xd1, 0xf6, 0xf1, 0xae, 0x2e, 0x66, 0x50, 0x15, 0x96, 0xf9, 0xd8, + 0x68, 0x13, 0x45, 0x53, 0xc8, 0x01, 0x56, 0xf7, 0x0c, 0xac, 0xee, 0x28, 0x4d, 0x31, 0x8b, 0x10, + 0x94, 0xb5, 0x43, 0xb5, 0xa5, 0x1e, 0xbe, 0x6e, 0x75, 0x34, 0x83, 0xa1, 0xc9, 0xa1, 0x0f, 0x60, + 0x51, 0x6d, 0xa9, 0x09, 0x71, 0x9e, 0x6d, 0x4e, 0xd3, 0x5b, 0x6d, 0x63, 0xaf, 0x81, 0x55, 0xb1, + 0x30, 0x18, 0x36, 0x5b, 0x9a, 0x26, 0x16, 0xd9, 0x22, 0x5a, 0xbb, 0x89, 0x65, 0xc5, 0xd0, 0xb0, + 0xae, 0x18, 0x3b, 0x58, 0x23, 0x9d, 0x36, 0xdf, 0xe7, 0x8c, 0xf4, 0xe7, 0x0c, 0x2c, 0xa5, 0x5d, + 0x0d, 0xf4, 0x10, 0xee, 0xc9, 0x4d, 0xac, 0x62, 0xb9, 0xd1, 0x34, 0x34, 0xbc, 0xa7, 0xe2, 0x5d, + 0x2c, 0x37, 0x54, 0x39, 0x49, 0xf3, 0x5d, 0x58, 0x4d, 0x57, 0x8b, 0xf1, 0xde, 0x51, 0x65, 0x85, + 0xe8, 0x0c, 0x5a, 0x06, 0x01, 0x14, 0xb6, 0x15, 0x15, 0xef, 0x31, 0xd6, 0x17, 0x61, 0xbe, 0x89, + 0x3f, 0x51, 0x9a, 0x87, 0x46, 0x28, 0xe2, 0xfb, 0x0b, 0x45, 0xed, 0x86, 0xbe, 0xdf, 0xda, 0x53, + 0x54, 0x2c, 0x07, 0x87, 0x10, 0x1b, 0x17, 0x98, 0xe5, 0x0e, 0xe9, 0xec, 0x19, 0x44, 0xd1, 0xda, + 0x2d, 0x55, 0x53, 0xc4, 0x22, 0x3b, 0x83, 0x7d, 0xac, 0xe9, 0x2d, 0xb9, 0xf5, 0xba, 0xdd, 0xd0, + 0xf1, 0x36, 0x6e, 0x62, 0xfd, 0x50, 0x9c, 0x41, 0x2b, 0x70, 0x53, 0x6e, 0xa9, 0xbb, 0x0a, 0xd1, + 0x0c, 0x4d, 0x51, 0x35, 0xac, 0xe3, 0x03, 0x36, 0x31, 0x8b, 0x16, 0xa0, 0x44, 0xb0, 0xf6, 0x89, + 0xb1, 0xdb, 0x90, 0xf5, 0x16, 0x11, 0x81, 0x09, 0x1a, 0x9a, 0xd6, 0x92, 0x71, 0x83, 0x73, 0x53, + 0xe2, 0xab, 0x92, 0x96, 0xae, 0xc8, 0x3a, 0x3e, 0x50, 0xc4, 0x39, 0x06, 0xee, 0x75, 0xa7, 0xa9, + 0xe3, 0x76, 0x53, 0x31, 0x88, 0xd2, 0x6e, 0x11, 0x5d, 0xd9, 0x11, 0xe7, 0xa5, 0x7f, 0x64, 0x00, + 0x86, 0x69, 0x3b, 0x9e, 0x47, 0x84, 0x91, 0x3c, 0xf2, 0x43, 0xc8, 0xd3, 0x77, 0x2c, 0x37, 0x04, + 0x31, 0x72, 0xff, 0xf2, 0xf4, 0x5f, 0x53, 0xde, 0xf5, 0x1c, 0x12, 0x58, 0xa0, 0x03, 0x58, 0xb0, + 0x7a, 0x5d, 0xdb, 0x39, 0x36, 0x3c, 0xfa, 0x8b, 0x33, 0x56, 0x1f, 0x79, 0x9c, 0x94, 0xea, 0x4f, + 0xaf, 0x70, 0x22, 0x73, 0x2b, 0x2d, 0x34, 0x22, 0x65, 0x6b, 0x64, 0x5c, 0x35, 0x21, 0xc7, 0x96, + 0x19, 0x16, 0x5a, 0x21, 0xa5, 0xd0, 0x66, 0x86, 0x85, 0xf6, 0x19, 0xe4, 0x8f, 0xdc, 0xa8, 0xde, + 0x97, 0xea, 0xab, 0x63, 0x75, 0x02, 0x3b, 0xfe, 0xf3, 0x7a, 0x58, 0x28, 0xb8, 0x66, 0xf5, 0x05, + 0x94, 0x47, 0x41, 0x4c, 0xbb, 0x98, 0xf4, 0x63, 0x80, 0x61, 0xca, 0x40, 0x77, 0xa1, 0x14, 0x76, + 0x68, 0xbc, 0x5f, 0x08, 0xa8, 0x0d, 0x9b, 0x36, 0xde, 0x2c, 0x04, 0x6d, 0x4c, 0x26, 0x6a, 0x63, + 0xa4, 0x23, 0xa8, 0xca, 0x2e, 0x35, 0x7d, 0x3a, 0xd2, 0x61, 0x11, 0x86, 0xc2, 0xf3, 0xd1, 0x3e, + 0x94, 0x47, 0x9b, 0x1c, 0xee, 0xb1, 0x54, 0xbf, 0x77, 0x65, 0x8f, 0x46, 0xe6, 0x47, 0x9a, 0x20, + 0x49, 0x81, 0x95, 0x3d, 0xea, 0xa7, 0x2e, 0x92, 0xda, 0x49, 0x09, 0xa9, 0x9d, 0x94, 0xf4, 0x37, + 0x01, 0xaa, 0x9d, 0x7e, 0x77, 0x12, 0xde, 0x6b, 0xb8, 0x4a, 0xd9, 0x5b, 0xe6, 0x7f, 0xdb, 0x1b, + 0x7a, 0x09, 0xa5, 0x33, 0x8e, 0x89, 0xbf, 0x00, 0xc2, 0x53, 0x1f, 0xef, 0x0e, 0x76, 0xd9, 0x23, + 0xe1, 0xb5, 0xe9, 0xbd, 0x25, 0x10, 0xa8, 0xb3, 0x6f, 0x69, 0x1f, 0xaa, 0x3b, 0xf4, 0x84, 0xbe, + 0xff, 0x86, 0xa4, 0x7f, 0x09, 0xb0, 0xaa, 0x51, 0xd3, 0xb5, 0x3e, 0x1d, 0x71, 0xe5, 0x45, 0xbe, + 0xee, 0x42, 0x69, 0xf8, 0x34, 0x88, 0x0a, 0x14, 0x0c, 0xde, 0x06, 0x5e, 0xea, 0xe3, 0x20, 0x73, + 0xe9, 0xe3, 0x20, 0xde, 0xd0, 0xbe, 0x80, 0x3c, 0x6b, 0x48, 0xbc, 0x4a, 0x6e, 0x2d, 0x3b, 0x65, + 0xe3, 0x18, 0x18, 0xb0, 0xd2, 0xd6, 0x37, 0x8f, 0xa9, 0xe1, 0xf7, 0xde, 0x52, 0x27, 0x7a, 0x56, + 0x30, 0x89, 0xce, 0x04, 0x68, 0x15, 0xf8, 0xc0, 0xf0, 0xec, 0xcf, 0x82, 0xb7, 0x45, 0x9e, 0xcc, + 0x30, 0x81, 0x66, 0x7f, 0x46, 0xa5, 0xaf, 0x05, 0xb8, 0x95, 0xbe, 0x69, 0xaf, 0xdf, 0x73, 0x3c, + 0x8a, 0x7e, 0x0a, 0x0b, 0xa3, 0x0c, 0x06, 0x3b, 0x9f, 0xea, 0x9c, 0xcb, 0x23, 0x14, 0x7b, 0xe8, + 0x11, 0x2c, 0x38, 0xf4, 0x9d, 0x6f, 0xc4, 0xd0, 0x06, 0xfc, 0xcc, 0x33, 0x71, 0x3b, 0x42, 0x2c, + 0x1d, 0xc2, 0x4a, 0x32, 0xa8, 0xa2, 0x43, 0x78, 0x05, 0x30, 0x74, 0x1a, 0x46, 0xd3, 0x9d, 0xcb, + 0x91, 0x90, 0x98, 0x85, 0xf4, 0x85, 0x00, 0xb7, 0xb7, 0x4d, 0xdf, 0xfa, 0x34, 0xb9, 0xc0, 0xe0, + 0x98, 0x3f, 0x86, 0x52, 0xec, 0x39, 0x1b, 0x6e, 0xf6, 0xaa, 0x25, 0xe2, 0x26, 0xec, 0x3c, 0xdc, + 0xc0, 0x59, 0xec, 0x0d, 0x19, 0x4a, 0x70, 0x57, 0xfa, 0x4e, 0x80, 0x3b, 0x93, 0x20, 0x84, 0xa4, + 0x13, 0x28, 0x52, 0xc7, 0x77, 0x6d, 0x1a, 0xad, 0xff, 0x22, 0x6d, 0xfd, 0xcb, 0x9d, 0xd4, 0x82, + 0xce, 0x3c, 0x72, 0x54, 0xf5, 0x20, 0x1f, 0x34, 0xe6, 0x1b, 0x50, 0x08, 0x1e, 0xc5, 0x21, 0x7d, + 0x28, 0xf2, 0xed, 0xf6, 0xad, 0x9a, 0xc6, 0x67, 0x48, 0xa8, 0x91, 0xa0, 0x3b, 0x73, 0x6d, 0xba, + 0x5f, 0xc2, 0xd2, 0x48, 0xda, 0x8a, 0x48, 0xbe, 0x0f, 0xb1, 0x1c, 0x30, 0x8c, 0xc9, 0xb9, 0xa1, + 0x10, 0x77, 0xa5, 0x3f, 0x09, 0xb0, 0x92, 0x4c, 0x56, 0xd7, 0x71, 0xf0, 0xbe, 0xe8, 0xdf, 0x2f, + 0x31, 0xbd, 0x82, 0x95, 0x64, 0x62, 0xba, 0xd6, 0xee, 0x7f, 0x93, 0x81, 0x4a, 0x32, 0x32, 0x07, + 0x97, 0xf4, 0x09, 0xa0, 0xb1, 0xbc, 0x16, 0xa5, 0x24, 0x31, 0x91, 0xd8, 0x3c, 0x74, 0x3f, 0xf1, + 0x56, 0xe6, 0x57, 0x72, 0xff, 0xc6, 0xe8, 0x6b, 0xf9, 0xf1, 0xd8, 0x6b, 0x39, 0x1b, 0xaa, 0x4d, + 0x7a, 0x2f, 0xe7, 0x52, 0x2a, 0x6b, 0x7e, 0x58, 0xc6, 0x47, 0xd3, 0x52, 0xe1, 0xd2, 0xb4, 0x54, + 0x1c, 0x4d, 0x4b, 0xdb, 0x25, 0x98, 0x1d, 0x2c, 0x2a, 0xfd, 0x5a, 0x80, 0x0f, 0x53, 0x98, 0x08, + 0x63, 0xe5, 0xfd, 0xe3, 0x75, 0xca, 0xb4, 0xb4, 0x41, 0xa1, 0x3c, 0x9a, 0x80, 0x59, 0x5f, 0xda, + 0x50, 0xd5, 0x96, 0xce, 0x7b, 0x39, 0x23, 0xe5, 0x7d, 0x50, 0x82, 0xe2, 0x9e, 0xa2, 0x2a, 0x04, + 0xcb, 0xa2, 0xc0, 0x06, 0x07, 0x0d, 0x82, 0x1b, 0x2a, 0xeb, 0xc9, 0x67, 0x20, 0xc7, 0x66, 0xc4, + 0x2c, 0x7f, 0x43, 0x90, 0x86, 0xaa, 0xc9, 0x04, 0xb7, 0x75, 0x31, 0x57, 0xff, 0x6a, 0x1e, 0x6e, + 0xc6, 0xf3, 0xa8, 0x7b, 0x6e, 0x5b, 0xf4, 0xe0, 0x19, 0xfa, 0x56, 0x80, 0x9b, 0x29, 0xbd, 0x06, + 0xaa, 0xa5, 0xed, 0x75, 0x72, 0x53, 0x52, 0xbd, 0x3a, 0x71, 0x4b, 0x1b, 0x5f, 0xfe, 0xfd, 0x9f, + 0xdf, 0x64, 0x1e, 0x48, 0x28, 0xf1, 0x73, 0x20, 0xf5, 0xbd, 0xad, 0x44, 0xd5, 0x47, 0x5f, 0x0b, + 0x20, 0x26, 0x5b, 0x13, 0x94, 0xfa, 0x23, 0xd4, 0x84, 0x06, 0x66, 0x1a, 0x40, 0x35, 0x0e, 0x68, + 0x1d, 0x3d, 0x1a, 0x07, 0xb4, 0xf9, 0xab, 0xb1, 0x48, 0xf8, 0x1c, 0xfd, 0x51, 0x80, 0x9b, 0x29, + 0x7d, 0x4e, 0x3a, 0x57, 0x93, 0x1b, 0xa2, 0x69, 0xa0, 0xbd, 0xe2, 0xd0, 0x5e, 0x54, 0xa7, 0x84, + 0x36, 0xc6, 0xdf, 0x6f, 0x05, 0xb8, 0x99, 0xd2, 0xc1, 0xa4, 0x43, 0x9d, 0xdc, 0xea, 0x54, 0x97, + 0xc7, 0xf2, 0x92, 0x72, 0xda, 0xf7, 0x2f, 0x22, 0xea, 0x36, 0xa6, 0xa5, 0xee, 0x77, 0x02, 0x2c, + 0xa5, 0x75, 0x04, 0x68, 0x33, 0x0d, 0xd0, 0x25, 0x0d, 0x53, 0xf5, 0xa3, 0xe9, 0x0d, 0x82, 0x58, + 0x96, 0x1e, 0x70, 0xac, 0x77, 0xa4, 0x0f, 0x53, 0xb0, 0x7a, 0xdc, 0x70, 0x4b, 0xd8, 0x40, 0x5f, + 0x09, 0x20, 0x26, 0xef, 0x77, 0xfa, 0x8d, 0x9b, 0xd0, 0x45, 0x54, 0xaf, 0x48, 0x0f, 0xd2, 0x7d, + 0x8e, 0xe3, 0xb6, 0xb4, 0x90, 0xc0, 0xb1, 0x15, 0xaf, 0x0e, 0x7f, 0x10, 0x60, 0x39, 0xbd, 0x04, + 0xa3, 0x67, 0xd7, 0x29, 0xd7, 0x01, 0xa4, 0xfa, 0xf5, 0x2b, 0xbc, 0xf4, 0x88, 0xc3, 0x5c, 0x93, + 0x56, 0x93, 0x30, 0xdf, 0x0c, 0xed, 0x18, 0x61, 0x5f, 0x0a, 0x30, 0x3f, 0x12, 0x7c, 0x68, 0xfd, + 0xca, 0xf8, 0x9c, 0x96, 0xaa, 0xc7, 0x1c, 0xc3, 0x3d, 0x74, 0x37, 0x81, 0x61, 0xe4, 0x6e, 0xb1, + 0x7b, 0xf5, 0xad, 0x00, 0x62, 0x32, 0xd2, 0xd2, 0x4f, 0x6d, 0x42, 0xcd, 0xbf, 0x12, 0xca, 0x73, + 0x0e, 0xe5, 0x69, 0xf5, 0x2a, 0x28, 0x23, 0xa7, 0xf8, 0x85, 0x00, 0x62, 0x32, 0xaa, 0xd2, 0x61, + 0x4d, 0xa8, 0xe6, 0x13, 0x03, 0x2f, 0x64, 0x66, 0xe3, 0x4a, 0x66, 0xbe, 0x11, 0x60, 0x71, 0xac, + 0xbe, 0xa1, 0x27, 0xd3, 0x44, 0xcf, 0xe0, 0xfa, 0x3c, 0x9d, 0x52, 0x3b, 0xbc, 0x39, 0xf7, 0x38, + 0xb6, 0x55, 0x69, 0x39, 0x89, 0x6d, 0x10, 0x65, 0xdb, 0x6f, 0x61, 0xd9, 0xea, 0x9d, 0xa6, 0xb8, + 0xdd, 0x16, 0x63, 0x1e, 0xdb, 0x6c, 0xcf, 0x6d, 0xe1, 0xe7, 0x5b, 0x91, 0x5e, 0xef, 0xc4, 0x74, + 0x8e, 0x6b, 0x3d, 0xf7, 0x78, 0xf3, 0x98, 0x3a, 0x9c, 0x91, 0xcd, 0x60, 0xca, 0xec, 0xdb, 0x5e, + 0xfc, 0x6f, 0xa6, 0x97, 0xd1, 0xf7, 0xbf, 0x05, 0xe1, 0x4d, 0x81, 0x6b, 0x3e, 0xff, 0x6f, 0x00, + 0x00, 0x00, 0xff, 0xff, 0xf4, 0x94, 0x75, 0x44, 0x8f, 0x1a, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/genomics/v1/cigar.pb.go b/vendor/google.golang.org/genproto/googleapis/genomics/v1/cigar.pb.go new file mode 100644 index 0000000..012ec90 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/genomics/v1/cigar.pb.go @@ -0,0 +1,200 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/genomics/v1/cigar.proto + +package genomics // import "google.golang.org/genproto/googleapis/genomics/v1" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Describes the different types of CIGAR alignment operations that exist. +// Used wherever CIGAR alignments are used. +type CigarUnit_Operation int32 + +const ( + CigarUnit_OPERATION_UNSPECIFIED CigarUnit_Operation = 0 + // An alignment match indicates that a sequence can be aligned to the + // reference without evidence of an INDEL. Unlike the + // `SEQUENCE_MATCH` and `SEQUENCE_MISMATCH` operators, + // the `ALIGNMENT_MATCH` operator does not indicate whether the + // reference and read sequences are an exact match. This operator is + // equivalent to SAM's `M`. + CigarUnit_ALIGNMENT_MATCH CigarUnit_Operation = 1 + // The insert operator indicates that the read contains evidence of bases + // being inserted into the reference. This operator is equivalent to SAM's + // `I`. + CigarUnit_INSERT CigarUnit_Operation = 2 + // The delete operator indicates that the read contains evidence of bases + // being deleted from the reference. This operator is equivalent to SAM's + // `D`. + CigarUnit_DELETE CigarUnit_Operation = 3 + // The skip operator indicates that this read skips a long segment of the + // reference, but the bases have not been deleted. This operator is commonly + // used when working with RNA-seq data, where reads may skip long segments + // of the reference between exons. This operator is equivalent to SAM's + // `N`. + CigarUnit_SKIP CigarUnit_Operation = 4 + // The soft clip operator indicates that bases at the start/end of a read + // have not been considered during alignment. This may occur if the majority + // of a read maps, except for low quality bases at the start/end of a read. + // This operator is equivalent to SAM's `S`. Bases that are soft + // clipped will still be stored in the read. + CigarUnit_CLIP_SOFT CigarUnit_Operation = 5 + // The hard clip operator indicates that bases at the start/end of a read + // have been omitted from this alignment. This may occur if this linear + // alignment is part of a chimeric alignment, or if the read has been + // trimmed (for example, during error correction or to trim poly-A tails for + // RNA-seq). This operator is equivalent to SAM's `H`. + CigarUnit_CLIP_HARD CigarUnit_Operation = 6 + // The pad operator indicates that there is padding in an alignment. This + // operator is equivalent to SAM's `P`. + CigarUnit_PAD CigarUnit_Operation = 7 + // This operator indicates that this portion of the aligned sequence exactly + // matches the reference. This operator is equivalent to SAM's `=`. + CigarUnit_SEQUENCE_MATCH CigarUnit_Operation = 8 + // This operator indicates that this portion of the aligned sequence is an + // alignment match to the reference, but a sequence mismatch. This can + // indicate a SNP or a read error. This operator is equivalent to SAM's + // `X`. + CigarUnit_SEQUENCE_MISMATCH CigarUnit_Operation = 9 +) + +var CigarUnit_Operation_name = map[int32]string{ + 0: "OPERATION_UNSPECIFIED", + 1: "ALIGNMENT_MATCH", + 2: "INSERT", + 3: "DELETE", + 4: "SKIP", + 5: "CLIP_SOFT", + 6: "CLIP_HARD", + 7: "PAD", + 8: "SEQUENCE_MATCH", + 9: "SEQUENCE_MISMATCH", +} +var CigarUnit_Operation_value = map[string]int32{ + "OPERATION_UNSPECIFIED": 0, + "ALIGNMENT_MATCH": 1, + "INSERT": 2, + "DELETE": 3, + "SKIP": 4, + "CLIP_SOFT": 5, + "CLIP_HARD": 6, + "PAD": 7, + "SEQUENCE_MATCH": 8, + "SEQUENCE_MISMATCH": 9, +} + +func (x CigarUnit_Operation) String() string { + return proto.EnumName(CigarUnit_Operation_name, int32(x)) +} +func (CigarUnit_Operation) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_cigar_2258d7e5d1ecd4a5, []int{0, 0} +} + +// A single CIGAR operation. +type CigarUnit struct { + Operation CigarUnit_Operation `protobuf:"varint,1,opt,name=operation,proto3,enum=google.genomics.v1.CigarUnit_Operation" json:"operation,omitempty"` + // The number of genomic bases that the operation runs for. Required. + OperationLength int64 `protobuf:"varint,2,opt,name=operation_length,json=operationLength,proto3" json:"operation_length,omitempty"` + // `referenceSequence` is only used at mismatches + // (`SEQUENCE_MISMATCH`) and deletions (`DELETE`). + // Filling this field replaces SAM's MD tag. If the relevant information is + // not available, this field is unset. + ReferenceSequence string `protobuf:"bytes,3,opt,name=reference_sequence,json=referenceSequence,proto3" json:"reference_sequence,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *CigarUnit) Reset() { *m = CigarUnit{} } +func (m *CigarUnit) String() string { return proto.CompactTextString(m) } +func (*CigarUnit) ProtoMessage() {} +func (*CigarUnit) Descriptor() ([]byte, []int) { + return fileDescriptor_cigar_2258d7e5d1ecd4a5, []int{0} +} +func (m *CigarUnit) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_CigarUnit.Unmarshal(m, b) +} +func (m *CigarUnit) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_CigarUnit.Marshal(b, m, deterministic) +} +func (dst *CigarUnit) XXX_Merge(src proto.Message) { + xxx_messageInfo_CigarUnit.Merge(dst, src) +} +func (m *CigarUnit) XXX_Size() int { + return xxx_messageInfo_CigarUnit.Size(m) +} +func (m *CigarUnit) XXX_DiscardUnknown() { + xxx_messageInfo_CigarUnit.DiscardUnknown(m) +} + +var xxx_messageInfo_CigarUnit proto.InternalMessageInfo + +func (m *CigarUnit) GetOperation() CigarUnit_Operation { + if m != nil { + return m.Operation + } + return CigarUnit_OPERATION_UNSPECIFIED +} + +func (m *CigarUnit) GetOperationLength() int64 { + if m != nil { + return m.OperationLength + } + return 0 +} + +func (m *CigarUnit) GetReferenceSequence() string { + if m != nil { + return m.ReferenceSequence + } + return "" +} + +func init() { + proto.RegisterType((*CigarUnit)(nil), "google.genomics.v1.CigarUnit") + proto.RegisterEnum("google.genomics.v1.CigarUnit_Operation", CigarUnit_Operation_name, CigarUnit_Operation_value) +} + +func init() { + proto.RegisterFile("google/genomics/v1/cigar.proto", fileDescriptor_cigar_2258d7e5d1ecd4a5) +} + +var fileDescriptor_cigar_2258d7e5d1ecd4a5 = []byte{ + // 367 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x6c, 0x51, 0xcf, 0x0e, 0x93, 0x30, + 0x1c, 0xb6, 0x63, 0x6e, 0xe3, 0x97, 0xb8, 0x75, 0x35, 0x33, 0xd3, 0x18, 0xb3, 0xec, 0xe2, 0x3c, + 0x08, 0x99, 0xde, 0xf4, 0xc4, 0xa0, 0x73, 0x8d, 0x0c, 0x10, 0xd8, 0xc5, 0x0b, 0x41, 0x52, 0x91, + 0x64, 0x6b, 0x11, 0x70, 0xaf, 0xe5, 0x1b, 0xf9, 0x1c, 0x1e, 0x0d, 0x30, 0x98, 0x89, 0xde, 0xbe, + 0x7e, 0xff, 0x9a, 0xfc, 0x3e, 0x78, 0x91, 0x4a, 0x99, 0x9e, 0xb9, 0x9e, 0x72, 0x21, 0x2f, 0x59, + 0x52, 0xea, 0xd7, 0xad, 0x9e, 0x64, 0x69, 0x5c, 0x68, 0x79, 0x21, 0x2b, 0x49, 0x48, 0xab, 0x6b, + 0x9d, 0xae, 0x5d, 0xb7, 0xcf, 0x9e, 0xdf, 0x32, 0x71, 0x9e, 0xe9, 0xb1, 0x10, 0xb2, 0x8a, 0xab, + 0x4c, 0x8a, 0xb2, 0x4d, 0xac, 0x7f, 0x0d, 0x40, 0x35, 0xeb, 0x86, 0x93, 0xc8, 0x2a, 0x42, 0x41, + 0x95, 0x39, 0x2f, 0x1a, 0xc7, 0x12, 0xad, 0xd0, 0x66, 0xfa, 0xe6, 0xa5, 0xf6, 0x6f, 0xa7, 0xd6, + 0x27, 0x34, 0xb7, 0xb3, 0xfb, 0xf7, 0x24, 0x79, 0x05, 0xb8, 0x7f, 0x44, 0x67, 0x2e, 0xd2, 0xea, + 0xdb, 0x72, 0xb0, 0x42, 0x1b, 0xc5, 0x9f, 0xf5, 0xbc, 0xdd, 0xd0, 0xe4, 0x35, 0x90, 0x82, 0x7f, + 0xe5, 0x05, 0x17, 0x09, 0x8f, 0x4a, 0xfe, 0xfd, 0x47, 0x0d, 0x96, 0xca, 0x0a, 0x6d, 0x54, 0x7f, + 0xde, 0x2b, 0xc1, 0x4d, 0x58, 0xff, 0x44, 0xa0, 0xf6, 0x5f, 0x92, 0xa7, 0xb0, 0x70, 0x3d, 0xea, + 0x1b, 0x21, 0x73, 0x9d, 0xe8, 0xe4, 0x04, 0x1e, 0x35, 0xd9, 0x9e, 0x51, 0x0b, 0x3f, 0x20, 0x8f, + 0x61, 0x66, 0xd8, 0xec, 0x83, 0x73, 0xa4, 0x4e, 0x18, 0x1d, 0x8d, 0xd0, 0x3c, 0x60, 0x44, 0x00, + 0x46, 0xcc, 0x09, 0xa8, 0x1f, 0xe2, 0x41, 0x8d, 0x2d, 0x6a, 0xd3, 0x90, 0x62, 0x85, 0x4c, 0x60, + 0x18, 0x7c, 0x64, 0x1e, 0x1e, 0x92, 0x47, 0xa0, 0x9a, 0x36, 0xf3, 0xa2, 0xc0, 0xdd, 0x87, 0xf8, + 0x61, 0xff, 0x3c, 0x18, 0xbe, 0x85, 0x47, 0x64, 0x0c, 0x8a, 0x67, 0x58, 0x78, 0x4c, 0x08, 0x4c, + 0x03, 0xfa, 0xe9, 0x44, 0x1d, 0x93, 0xde, 0xca, 0x27, 0x64, 0x01, 0xf3, 0x3b, 0xc7, 0x82, 0x96, + 0x56, 0x77, 0x1c, 0x9e, 0x24, 0xf2, 0xf2, 0x9f, 0x23, 0xee, 0xa0, 0xb9, 0xa2, 0x57, 0xcf, 0xe0, + 0xa1, 0xcf, 0xef, 0x3a, 0x87, 0x3c, 0xc7, 0x22, 0xd5, 0x64, 0x91, 0xd6, 0x2b, 0x37, 0x23, 0xe9, + 0xad, 0x14, 0xe7, 0x59, 0xf9, 0xf7, 0xf2, 0xef, 0x3b, 0xfc, 0x1b, 0xa1, 0x2f, 0xa3, 0xc6, 0xf9, + 0xf6, 0x4f, 0x00, 0x00, 0x00, 0xff, 0xff, 0x98, 0xcc, 0xce, 0xde, 0x22, 0x02, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/genomics/v1/datasets.pb.go b/vendor/google.golang.org/genproto/googleapis/genomics/v1/datasets.pb.go new file mode 100644 index 0000000..90925d6 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/genomics/v1/datasets.pb.go @@ -0,0 +1,960 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/genomics/v1/datasets.proto + +package genomics // import "google.golang.org/genproto/googleapis/genomics/v1" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import empty "github.com/golang/protobuf/ptypes/empty" +import timestamp "github.com/golang/protobuf/ptypes/timestamp" +import _ "google.golang.org/genproto/googleapis/api/annotations" +import v1 "google.golang.org/genproto/googleapis/iam/v1" +import field_mask "google.golang.org/genproto/protobuf/field_mask" + +import ( + context "golang.org/x/net/context" + grpc "google.golang.org/grpc" +) + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// A Dataset is a collection of genomic data. +// +// For more genomics resource definitions, see [Fundamentals of Google +// Genomics](https://cloud.google.com/genomics/fundamentals-of-google-genomics) +type Dataset struct { + // The server-generated dataset ID, unique across all datasets. + Id string `protobuf:"bytes,1,opt,name=id,proto3" json:"id,omitempty"` + // The Google Cloud project ID that this dataset belongs to. + ProjectId string `protobuf:"bytes,2,opt,name=project_id,json=projectId,proto3" json:"project_id,omitempty"` + // The dataset name. + Name string `protobuf:"bytes,3,opt,name=name,proto3" json:"name,omitempty"` + // The time this dataset was created, in seconds from the epoch. + CreateTime *timestamp.Timestamp `protobuf:"bytes,4,opt,name=create_time,json=createTime,proto3" json:"create_time,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Dataset) Reset() { *m = Dataset{} } +func (m *Dataset) String() string { return proto.CompactTextString(m) } +func (*Dataset) ProtoMessage() {} +func (*Dataset) Descriptor() ([]byte, []int) { + return fileDescriptor_datasets_17c76b380aa3a6d9, []int{0} +} +func (m *Dataset) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Dataset.Unmarshal(m, b) +} +func (m *Dataset) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Dataset.Marshal(b, m, deterministic) +} +func (dst *Dataset) XXX_Merge(src proto.Message) { + xxx_messageInfo_Dataset.Merge(dst, src) +} +func (m *Dataset) XXX_Size() int { + return xxx_messageInfo_Dataset.Size(m) +} +func (m *Dataset) XXX_DiscardUnknown() { + xxx_messageInfo_Dataset.DiscardUnknown(m) +} + +var xxx_messageInfo_Dataset proto.InternalMessageInfo + +func (m *Dataset) GetId() string { + if m != nil { + return m.Id + } + return "" +} + +func (m *Dataset) GetProjectId() string { + if m != nil { + return m.ProjectId + } + return "" +} + +func (m *Dataset) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *Dataset) GetCreateTime() *timestamp.Timestamp { + if m != nil { + return m.CreateTime + } + return nil +} + +// The dataset list request. +type ListDatasetsRequest struct { + // Required. The Google Cloud project ID to list datasets for. + ProjectId string `protobuf:"bytes,1,opt,name=project_id,json=projectId,proto3" json:"project_id,omitempty"` + // The maximum number of results to return in a single page. If unspecified, + // defaults to 50. The maximum value is 1024. + PageSize int32 `protobuf:"varint,2,opt,name=page_size,json=pageSize,proto3" json:"page_size,omitempty"` + // The continuation token, which is used to page through large result sets. + // To get the next page of results, set this parameter to the value of + // `nextPageToken` from the previous response. + PageToken string `protobuf:"bytes,3,opt,name=page_token,json=pageToken,proto3" json:"page_token,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ListDatasetsRequest) Reset() { *m = ListDatasetsRequest{} } +func (m *ListDatasetsRequest) String() string { return proto.CompactTextString(m) } +func (*ListDatasetsRequest) ProtoMessage() {} +func (*ListDatasetsRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_datasets_17c76b380aa3a6d9, []int{1} +} +func (m *ListDatasetsRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ListDatasetsRequest.Unmarshal(m, b) +} +func (m *ListDatasetsRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ListDatasetsRequest.Marshal(b, m, deterministic) +} +func (dst *ListDatasetsRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_ListDatasetsRequest.Merge(dst, src) +} +func (m *ListDatasetsRequest) XXX_Size() int { + return xxx_messageInfo_ListDatasetsRequest.Size(m) +} +func (m *ListDatasetsRequest) XXX_DiscardUnknown() { + xxx_messageInfo_ListDatasetsRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_ListDatasetsRequest proto.InternalMessageInfo + +func (m *ListDatasetsRequest) GetProjectId() string { + if m != nil { + return m.ProjectId + } + return "" +} + +func (m *ListDatasetsRequest) GetPageSize() int32 { + if m != nil { + return m.PageSize + } + return 0 +} + +func (m *ListDatasetsRequest) GetPageToken() string { + if m != nil { + return m.PageToken + } + return "" +} + +// The dataset list response. +type ListDatasetsResponse struct { + // The list of matching Datasets. + Datasets []*Dataset `protobuf:"bytes,1,rep,name=datasets,proto3" json:"datasets,omitempty"` + // The continuation token, which is used to page through large result sets. + // Provide this value in a subsequent request to return the next page of + // results. This field will be empty if there aren't any additional results. + NextPageToken string `protobuf:"bytes,2,opt,name=next_page_token,json=nextPageToken,proto3" json:"next_page_token,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ListDatasetsResponse) Reset() { *m = ListDatasetsResponse{} } +func (m *ListDatasetsResponse) String() string { return proto.CompactTextString(m) } +func (*ListDatasetsResponse) ProtoMessage() {} +func (*ListDatasetsResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_datasets_17c76b380aa3a6d9, []int{2} +} +func (m *ListDatasetsResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ListDatasetsResponse.Unmarshal(m, b) +} +func (m *ListDatasetsResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ListDatasetsResponse.Marshal(b, m, deterministic) +} +func (dst *ListDatasetsResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_ListDatasetsResponse.Merge(dst, src) +} +func (m *ListDatasetsResponse) XXX_Size() int { + return xxx_messageInfo_ListDatasetsResponse.Size(m) +} +func (m *ListDatasetsResponse) XXX_DiscardUnknown() { + xxx_messageInfo_ListDatasetsResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_ListDatasetsResponse proto.InternalMessageInfo + +func (m *ListDatasetsResponse) GetDatasets() []*Dataset { + if m != nil { + return m.Datasets + } + return nil +} + +func (m *ListDatasetsResponse) GetNextPageToken() string { + if m != nil { + return m.NextPageToken + } + return "" +} + +type CreateDatasetRequest struct { + // The dataset to be created. Must contain projectId and name. + Dataset *Dataset `protobuf:"bytes,1,opt,name=dataset,proto3" json:"dataset,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *CreateDatasetRequest) Reset() { *m = CreateDatasetRequest{} } +func (m *CreateDatasetRequest) String() string { return proto.CompactTextString(m) } +func (*CreateDatasetRequest) ProtoMessage() {} +func (*CreateDatasetRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_datasets_17c76b380aa3a6d9, []int{3} +} +func (m *CreateDatasetRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_CreateDatasetRequest.Unmarshal(m, b) +} +func (m *CreateDatasetRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_CreateDatasetRequest.Marshal(b, m, deterministic) +} +func (dst *CreateDatasetRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_CreateDatasetRequest.Merge(dst, src) +} +func (m *CreateDatasetRequest) XXX_Size() int { + return xxx_messageInfo_CreateDatasetRequest.Size(m) +} +func (m *CreateDatasetRequest) XXX_DiscardUnknown() { + xxx_messageInfo_CreateDatasetRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_CreateDatasetRequest proto.InternalMessageInfo + +func (m *CreateDatasetRequest) GetDataset() *Dataset { + if m != nil { + return m.Dataset + } + return nil +} + +type UpdateDatasetRequest struct { + // The ID of the dataset to be updated. + DatasetId string `protobuf:"bytes,1,opt,name=dataset_id,json=datasetId,proto3" json:"dataset_id,omitempty"` + // The new dataset data. + Dataset *Dataset `protobuf:"bytes,2,opt,name=dataset,proto3" json:"dataset,omitempty"` + // An optional mask specifying which fields to update. At this time, the only + // mutable field is [name][google.genomics.v1.Dataset.name]. The only + // acceptable value is "name". If unspecified, all mutable fields will be + // updated. + UpdateMask *field_mask.FieldMask `protobuf:"bytes,3,opt,name=update_mask,json=updateMask,proto3" json:"update_mask,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *UpdateDatasetRequest) Reset() { *m = UpdateDatasetRequest{} } +func (m *UpdateDatasetRequest) String() string { return proto.CompactTextString(m) } +func (*UpdateDatasetRequest) ProtoMessage() {} +func (*UpdateDatasetRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_datasets_17c76b380aa3a6d9, []int{4} +} +func (m *UpdateDatasetRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_UpdateDatasetRequest.Unmarshal(m, b) +} +func (m *UpdateDatasetRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_UpdateDatasetRequest.Marshal(b, m, deterministic) +} +func (dst *UpdateDatasetRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_UpdateDatasetRequest.Merge(dst, src) +} +func (m *UpdateDatasetRequest) XXX_Size() int { + return xxx_messageInfo_UpdateDatasetRequest.Size(m) +} +func (m *UpdateDatasetRequest) XXX_DiscardUnknown() { + xxx_messageInfo_UpdateDatasetRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_UpdateDatasetRequest proto.InternalMessageInfo + +func (m *UpdateDatasetRequest) GetDatasetId() string { + if m != nil { + return m.DatasetId + } + return "" +} + +func (m *UpdateDatasetRequest) GetDataset() *Dataset { + if m != nil { + return m.Dataset + } + return nil +} + +func (m *UpdateDatasetRequest) GetUpdateMask() *field_mask.FieldMask { + if m != nil { + return m.UpdateMask + } + return nil +} + +type DeleteDatasetRequest struct { + // The ID of the dataset to be deleted. + DatasetId string `protobuf:"bytes,1,opt,name=dataset_id,json=datasetId,proto3" json:"dataset_id,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *DeleteDatasetRequest) Reset() { *m = DeleteDatasetRequest{} } +func (m *DeleteDatasetRequest) String() string { return proto.CompactTextString(m) } +func (*DeleteDatasetRequest) ProtoMessage() {} +func (*DeleteDatasetRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_datasets_17c76b380aa3a6d9, []int{5} +} +func (m *DeleteDatasetRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_DeleteDatasetRequest.Unmarshal(m, b) +} +func (m *DeleteDatasetRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_DeleteDatasetRequest.Marshal(b, m, deterministic) +} +func (dst *DeleteDatasetRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_DeleteDatasetRequest.Merge(dst, src) +} +func (m *DeleteDatasetRequest) XXX_Size() int { + return xxx_messageInfo_DeleteDatasetRequest.Size(m) +} +func (m *DeleteDatasetRequest) XXX_DiscardUnknown() { + xxx_messageInfo_DeleteDatasetRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_DeleteDatasetRequest proto.InternalMessageInfo + +func (m *DeleteDatasetRequest) GetDatasetId() string { + if m != nil { + return m.DatasetId + } + return "" +} + +type UndeleteDatasetRequest struct { + // The ID of the dataset to be undeleted. + DatasetId string `protobuf:"bytes,1,opt,name=dataset_id,json=datasetId,proto3" json:"dataset_id,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *UndeleteDatasetRequest) Reset() { *m = UndeleteDatasetRequest{} } +func (m *UndeleteDatasetRequest) String() string { return proto.CompactTextString(m) } +func (*UndeleteDatasetRequest) ProtoMessage() {} +func (*UndeleteDatasetRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_datasets_17c76b380aa3a6d9, []int{6} +} +func (m *UndeleteDatasetRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_UndeleteDatasetRequest.Unmarshal(m, b) +} +func (m *UndeleteDatasetRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_UndeleteDatasetRequest.Marshal(b, m, deterministic) +} +func (dst *UndeleteDatasetRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_UndeleteDatasetRequest.Merge(dst, src) +} +func (m *UndeleteDatasetRequest) XXX_Size() int { + return xxx_messageInfo_UndeleteDatasetRequest.Size(m) +} +func (m *UndeleteDatasetRequest) XXX_DiscardUnknown() { + xxx_messageInfo_UndeleteDatasetRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_UndeleteDatasetRequest proto.InternalMessageInfo + +func (m *UndeleteDatasetRequest) GetDatasetId() string { + if m != nil { + return m.DatasetId + } + return "" +} + +type GetDatasetRequest struct { + // The ID of the dataset. + DatasetId string `protobuf:"bytes,1,opt,name=dataset_id,json=datasetId,proto3" json:"dataset_id,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GetDatasetRequest) Reset() { *m = GetDatasetRequest{} } +func (m *GetDatasetRequest) String() string { return proto.CompactTextString(m) } +func (*GetDatasetRequest) ProtoMessage() {} +func (*GetDatasetRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_datasets_17c76b380aa3a6d9, []int{7} +} +func (m *GetDatasetRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GetDatasetRequest.Unmarshal(m, b) +} +func (m *GetDatasetRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GetDatasetRequest.Marshal(b, m, deterministic) +} +func (dst *GetDatasetRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_GetDatasetRequest.Merge(dst, src) +} +func (m *GetDatasetRequest) XXX_Size() int { + return xxx_messageInfo_GetDatasetRequest.Size(m) +} +func (m *GetDatasetRequest) XXX_DiscardUnknown() { + xxx_messageInfo_GetDatasetRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_GetDatasetRequest proto.InternalMessageInfo + +func (m *GetDatasetRequest) GetDatasetId() string { + if m != nil { + return m.DatasetId + } + return "" +} + +func init() { + proto.RegisterType((*Dataset)(nil), "google.genomics.v1.Dataset") + proto.RegisterType((*ListDatasetsRequest)(nil), "google.genomics.v1.ListDatasetsRequest") + proto.RegisterType((*ListDatasetsResponse)(nil), "google.genomics.v1.ListDatasetsResponse") + proto.RegisterType((*CreateDatasetRequest)(nil), "google.genomics.v1.CreateDatasetRequest") + proto.RegisterType((*UpdateDatasetRequest)(nil), "google.genomics.v1.UpdateDatasetRequest") + proto.RegisterType((*DeleteDatasetRequest)(nil), "google.genomics.v1.DeleteDatasetRequest") + proto.RegisterType((*UndeleteDatasetRequest)(nil), "google.genomics.v1.UndeleteDatasetRequest") + proto.RegisterType((*GetDatasetRequest)(nil), "google.genomics.v1.GetDatasetRequest") +} + +// Reference imports to suppress errors if they are not otherwise used. +var _ context.Context +var _ grpc.ClientConn + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the grpc package it is being compiled against. +const _ = grpc.SupportPackageIsVersion4 + +// DatasetServiceV1Client is the client API for DatasetServiceV1 service. +// +// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://godoc.org/google.golang.org/grpc#ClientConn.NewStream. +type DatasetServiceV1Client interface { + // Lists datasets within a project. + // + // For the definitions of datasets and other genomics resources, see + // [Fundamentals of Google + // Genomics](https://cloud.google.com/genomics/fundamentals-of-google-genomics) + ListDatasets(ctx context.Context, in *ListDatasetsRequest, opts ...grpc.CallOption) (*ListDatasetsResponse, error) + // Creates a new dataset. + // + // For the definitions of datasets and other genomics resources, see + // [Fundamentals of Google + // Genomics](https://cloud.google.com/genomics/fundamentals-of-google-genomics) + CreateDataset(ctx context.Context, in *CreateDatasetRequest, opts ...grpc.CallOption) (*Dataset, error) + // Gets a dataset by ID. + // + // For the definitions of datasets and other genomics resources, see + // [Fundamentals of Google + // Genomics](https://cloud.google.com/genomics/fundamentals-of-google-genomics) + GetDataset(ctx context.Context, in *GetDatasetRequest, opts ...grpc.CallOption) (*Dataset, error) + // Updates a dataset. + // + // For the definitions of datasets and other genomics resources, see + // [Fundamentals of Google + // Genomics](https://cloud.google.com/genomics/fundamentals-of-google-genomics) + // + // This method supports patch semantics. + UpdateDataset(ctx context.Context, in *UpdateDatasetRequest, opts ...grpc.CallOption) (*Dataset, error) + // Deletes a dataset and all of its contents (all read group sets, + // reference sets, variant sets, call sets, annotation sets, etc.) + // This is reversible (up to one week after the deletion) via + // the + // [datasets.undelete][google.genomics.v1.DatasetServiceV1.UndeleteDataset] + // operation. + // + // For the definitions of datasets and other genomics resources, see + // [Fundamentals of Google + // Genomics](https://cloud.google.com/genomics/fundamentals-of-google-genomics) + DeleteDataset(ctx context.Context, in *DeleteDatasetRequest, opts ...grpc.CallOption) (*empty.Empty, error) + // Undeletes a dataset by restoring a dataset which was deleted via this API. + // + // For the definitions of datasets and other genomics resources, see + // [Fundamentals of Google + // Genomics](https://cloud.google.com/genomics/fundamentals-of-google-genomics) + // + // This operation is only possible for a week after the deletion occurred. + UndeleteDataset(ctx context.Context, in *UndeleteDatasetRequest, opts ...grpc.CallOption) (*Dataset, error) + // Sets the access control policy on the specified dataset. Replaces any + // existing policy. + // + // For the definitions of datasets and other genomics resources, see + // [Fundamentals of Google + // Genomics](https://cloud.google.com/genomics/fundamentals-of-google-genomics) + // + // See Setting a + // Policy for more information. + SetIamPolicy(ctx context.Context, in *v1.SetIamPolicyRequest, opts ...grpc.CallOption) (*v1.Policy, error) + // Gets the access control policy for the dataset. This is empty if the + // policy or resource does not exist. + // + // See Getting a + // Policy for more information. + // + // For the definitions of datasets and other genomics resources, see + // [Fundamentals of Google + // Genomics](https://cloud.google.com/genomics/fundamentals-of-google-genomics) + GetIamPolicy(ctx context.Context, in *v1.GetIamPolicyRequest, opts ...grpc.CallOption) (*v1.Policy, error) + // Returns permissions that a caller has on the specified resource. + // See Testing + // Permissions for more information. + // + // For the definitions of datasets and other genomics resources, see + // [Fundamentals of Google + // Genomics](https://cloud.google.com/genomics/fundamentals-of-google-genomics) + TestIamPermissions(ctx context.Context, in *v1.TestIamPermissionsRequest, opts ...grpc.CallOption) (*v1.TestIamPermissionsResponse, error) +} + +type datasetServiceV1Client struct { + cc *grpc.ClientConn +} + +func NewDatasetServiceV1Client(cc *grpc.ClientConn) DatasetServiceV1Client { + return &datasetServiceV1Client{cc} +} + +func (c *datasetServiceV1Client) ListDatasets(ctx context.Context, in *ListDatasetsRequest, opts ...grpc.CallOption) (*ListDatasetsResponse, error) { + out := new(ListDatasetsResponse) + err := c.cc.Invoke(ctx, "/google.genomics.v1.DatasetServiceV1/ListDatasets", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *datasetServiceV1Client) CreateDataset(ctx context.Context, in *CreateDatasetRequest, opts ...grpc.CallOption) (*Dataset, error) { + out := new(Dataset) + err := c.cc.Invoke(ctx, "/google.genomics.v1.DatasetServiceV1/CreateDataset", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *datasetServiceV1Client) GetDataset(ctx context.Context, in *GetDatasetRequest, opts ...grpc.CallOption) (*Dataset, error) { + out := new(Dataset) + err := c.cc.Invoke(ctx, "/google.genomics.v1.DatasetServiceV1/GetDataset", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *datasetServiceV1Client) UpdateDataset(ctx context.Context, in *UpdateDatasetRequest, opts ...grpc.CallOption) (*Dataset, error) { + out := new(Dataset) + err := c.cc.Invoke(ctx, "/google.genomics.v1.DatasetServiceV1/UpdateDataset", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *datasetServiceV1Client) DeleteDataset(ctx context.Context, in *DeleteDatasetRequest, opts ...grpc.CallOption) (*empty.Empty, error) { + out := new(empty.Empty) + err := c.cc.Invoke(ctx, "/google.genomics.v1.DatasetServiceV1/DeleteDataset", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *datasetServiceV1Client) UndeleteDataset(ctx context.Context, in *UndeleteDatasetRequest, opts ...grpc.CallOption) (*Dataset, error) { + out := new(Dataset) + err := c.cc.Invoke(ctx, "/google.genomics.v1.DatasetServiceV1/UndeleteDataset", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *datasetServiceV1Client) SetIamPolicy(ctx context.Context, in *v1.SetIamPolicyRequest, opts ...grpc.CallOption) (*v1.Policy, error) { + out := new(v1.Policy) + err := c.cc.Invoke(ctx, "/google.genomics.v1.DatasetServiceV1/SetIamPolicy", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *datasetServiceV1Client) GetIamPolicy(ctx context.Context, in *v1.GetIamPolicyRequest, opts ...grpc.CallOption) (*v1.Policy, error) { + out := new(v1.Policy) + err := c.cc.Invoke(ctx, "/google.genomics.v1.DatasetServiceV1/GetIamPolicy", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *datasetServiceV1Client) TestIamPermissions(ctx context.Context, in *v1.TestIamPermissionsRequest, opts ...grpc.CallOption) (*v1.TestIamPermissionsResponse, error) { + out := new(v1.TestIamPermissionsResponse) + err := c.cc.Invoke(ctx, "/google.genomics.v1.DatasetServiceV1/TestIamPermissions", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +// DatasetServiceV1Server is the server API for DatasetServiceV1 service. +type DatasetServiceV1Server interface { + // Lists datasets within a project. + // + // For the definitions of datasets and other genomics resources, see + // [Fundamentals of Google + // Genomics](https://cloud.google.com/genomics/fundamentals-of-google-genomics) + ListDatasets(context.Context, *ListDatasetsRequest) (*ListDatasetsResponse, error) + // Creates a new dataset. + // + // For the definitions of datasets and other genomics resources, see + // [Fundamentals of Google + // Genomics](https://cloud.google.com/genomics/fundamentals-of-google-genomics) + CreateDataset(context.Context, *CreateDatasetRequest) (*Dataset, error) + // Gets a dataset by ID. + // + // For the definitions of datasets and other genomics resources, see + // [Fundamentals of Google + // Genomics](https://cloud.google.com/genomics/fundamentals-of-google-genomics) + GetDataset(context.Context, *GetDatasetRequest) (*Dataset, error) + // Updates a dataset. + // + // For the definitions of datasets and other genomics resources, see + // [Fundamentals of Google + // Genomics](https://cloud.google.com/genomics/fundamentals-of-google-genomics) + // + // This method supports patch semantics. + UpdateDataset(context.Context, *UpdateDatasetRequest) (*Dataset, error) + // Deletes a dataset and all of its contents (all read group sets, + // reference sets, variant sets, call sets, annotation sets, etc.) + // This is reversible (up to one week after the deletion) via + // the + // [datasets.undelete][google.genomics.v1.DatasetServiceV1.UndeleteDataset] + // operation. + // + // For the definitions of datasets and other genomics resources, see + // [Fundamentals of Google + // Genomics](https://cloud.google.com/genomics/fundamentals-of-google-genomics) + DeleteDataset(context.Context, *DeleteDatasetRequest) (*empty.Empty, error) + // Undeletes a dataset by restoring a dataset which was deleted via this API. + // + // For the definitions of datasets and other genomics resources, see + // [Fundamentals of Google + // Genomics](https://cloud.google.com/genomics/fundamentals-of-google-genomics) + // + // This operation is only possible for a week after the deletion occurred. + UndeleteDataset(context.Context, *UndeleteDatasetRequest) (*Dataset, error) + // Sets the access control policy on the specified dataset. Replaces any + // existing policy. + // + // For the definitions of datasets and other genomics resources, see + // [Fundamentals of Google + // Genomics](https://cloud.google.com/genomics/fundamentals-of-google-genomics) + // + // See Setting a + // Policy for more information. + SetIamPolicy(context.Context, *v1.SetIamPolicyRequest) (*v1.Policy, error) + // Gets the access control policy for the dataset. This is empty if the + // policy or resource does not exist. + // + // See Getting a + // Policy for more information. + // + // For the definitions of datasets and other genomics resources, see + // [Fundamentals of Google + // Genomics](https://cloud.google.com/genomics/fundamentals-of-google-genomics) + GetIamPolicy(context.Context, *v1.GetIamPolicyRequest) (*v1.Policy, error) + // Returns permissions that a caller has on the specified resource. + // See Testing + // Permissions for more information. + // + // For the definitions of datasets and other genomics resources, see + // [Fundamentals of Google + // Genomics](https://cloud.google.com/genomics/fundamentals-of-google-genomics) + TestIamPermissions(context.Context, *v1.TestIamPermissionsRequest) (*v1.TestIamPermissionsResponse, error) +} + +func RegisterDatasetServiceV1Server(s *grpc.Server, srv DatasetServiceV1Server) { + s.RegisterService(&_DatasetServiceV1_serviceDesc, srv) +} + +func _DatasetServiceV1_ListDatasets_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(ListDatasetsRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(DatasetServiceV1Server).ListDatasets(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.genomics.v1.DatasetServiceV1/ListDatasets", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(DatasetServiceV1Server).ListDatasets(ctx, req.(*ListDatasetsRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _DatasetServiceV1_CreateDataset_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(CreateDatasetRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(DatasetServiceV1Server).CreateDataset(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.genomics.v1.DatasetServiceV1/CreateDataset", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(DatasetServiceV1Server).CreateDataset(ctx, req.(*CreateDatasetRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _DatasetServiceV1_GetDataset_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(GetDatasetRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(DatasetServiceV1Server).GetDataset(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.genomics.v1.DatasetServiceV1/GetDataset", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(DatasetServiceV1Server).GetDataset(ctx, req.(*GetDatasetRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _DatasetServiceV1_UpdateDataset_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(UpdateDatasetRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(DatasetServiceV1Server).UpdateDataset(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.genomics.v1.DatasetServiceV1/UpdateDataset", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(DatasetServiceV1Server).UpdateDataset(ctx, req.(*UpdateDatasetRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _DatasetServiceV1_DeleteDataset_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(DeleteDatasetRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(DatasetServiceV1Server).DeleteDataset(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.genomics.v1.DatasetServiceV1/DeleteDataset", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(DatasetServiceV1Server).DeleteDataset(ctx, req.(*DeleteDatasetRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _DatasetServiceV1_UndeleteDataset_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(UndeleteDatasetRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(DatasetServiceV1Server).UndeleteDataset(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.genomics.v1.DatasetServiceV1/UndeleteDataset", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(DatasetServiceV1Server).UndeleteDataset(ctx, req.(*UndeleteDatasetRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _DatasetServiceV1_SetIamPolicy_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(v1.SetIamPolicyRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(DatasetServiceV1Server).SetIamPolicy(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.genomics.v1.DatasetServiceV1/SetIamPolicy", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(DatasetServiceV1Server).SetIamPolicy(ctx, req.(*v1.SetIamPolicyRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _DatasetServiceV1_GetIamPolicy_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(v1.GetIamPolicyRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(DatasetServiceV1Server).GetIamPolicy(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.genomics.v1.DatasetServiceV1/GetIamPolicy", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(DatasetServiceV1Server).GetIamPolicy(ctx, req.(*v1.GetIamPolicyRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _DatasetServiceV1_TestIamPermissions_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(v1.TestIamPermissionsRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(DatasetServiceV1Server).TestIamPermissions(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.genomics.v1.DatasetServiceV1/TestIamPermissions", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(DatasetServiceV1Server).TestIamPermissions(ctx, req.(*v1.TestIamPermissionsRequest)) + } + return interceptor(ctx, in, info, handler) +} + +var _DatasetServiceV1_serviceDesc = grpc.ServiceDesc{ + ServiceName: "google.genomics.v1.DatasetServiceV1", + HandlerType: (*DatasetServiceV1Server)(nil), + Methods: []grpc.MethodDesc{ + { + MethodName: "ListDatasets", + Handler: _DatasetServiceV1_ListDatasets_Handler, + }, + { + MethodName: "CreateDataset", + Handler: _DatasetServiceV1_CreateDataset_Handler, + }, + { + MethodName: "GetDataset", + Handler: _DatasetServiceV1_GetDataset_Handler, + }, + { + MethodName: "UpdateDataset", + Handler: _DatasetServiceV1_UpdateDataset_Handler, + }, + { + MethodName: "DeleteDataset", + Handler: _DatasetServiceV1_DeleteDataset_Handler, + }, + { + MethodName: "UndeleteDataset", + Handler: _DatasetServiceV1_UndeleteDataset_Handler, + }, + { + MethodName: "SetIamPolicy", + Handler: _DatasetServiceV1_SetIamPolicy_Handler, + }, + { + MethodName: "GetIamPolicy", + Handler: _DatasetServiceV1_GetIamPolicy_Handler, + }, + { + MethodName: "TestIamPermissions", + Handler: _DatasetServiceV1_TestIamPermissions_Handler, + }, + }, + Streams: []grpc.StreamDesc{}, + Metadata: "google/genomics/v1/datasets.proto", +} + +func init() { + proto.RegisterFile("google/genomics/v1/datasets.proto", fileDescriptor_datasets_17c76b380aa3a6d9) +} + +var fileDescriptor_datasets_17c76b380aa3a6d9 = []byte{ + // 786 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xac, 0x56, 0xd1, 0x4e, 0x13, 0x4d, + 0x14, 0xce, 0x16, 0xfe, 0x1f, 0x7a, 0xa0, 0xa0, 0x63, 0xc5, 0xda, 0x8a, 0x96, 0x8d, 0x42, 0xad, + 0xba, 0x4d, 0x6b, 0x08, 0x49, 0x89, 0x37, 0x88, 0x12, 0x12, 0x49, 0x9a, 0x02, 0x5e, 0x78, 0xd3, + 0x0c, 0xdd, 0xa1, 0x8e, 0x74, 0x77, 0xd6, 0x9d, 0x29, 0x28, 0xc8, 0x0d, 0x77, 0x5c, 0xfb, 0x00, + 0x26, 0xde, 0xf9, 0x3c, 0xbe, 0x82, 0x0f, 0xe1, 0xa5, 0x99, 0xd9, 0xd9, 0x76, 0xdb, 0x2e, 0x05, + 0x8c, 0x77, 0xdb, 0x73, 0xbe, 0x73, 0xbe, 0xef, 0xcc, 0xf9, 0x76, 0xba, 0xb0, 0xd0, 0x62, 0xac, + 0xd5, 0x26, 0xa5, 0x16, 0x71, 0x99, 0x43, 0x9b, 0xbc, 0x74, 0x58, 0x2e, 0xd9, 0x58, 0x60, 0x4e, + 0x04, 0xb7, 0x3c, 0x9f, 0x09, 0x86, 0x50, 0x00, 0xb1, 0x42, 0x88, 0x75, 0x58, 0xce, 0xde, 0xd3, + 0x65, 0xd8, 0xa3, 0x25, 0xec, 0xba, 0x4c, 0x60, 0x41, 0x99, 0xab, 0x2b, 0xb2, 0xf7, 0x75, 0x96, + 0x62, 0x47, 0xf6, 0xa3, 0xd8, 0x69, 0x78, 0xac, 0x4d, 0x9b, 0x9f, 0x75, 0x3e, 0xdb, 0x9f, 0xef, + 0xcb, 0xe5, 0x74, 0x4e, 0xfd, 0xda, 0xeb, 0xec, 0x97, 0x88, 0xe3, 0x89, 0x30, 0x99, 0x1f, 0x4c, + 0xee, 0x53, 0xd2, 0xb6, 0x1b, 0x0e, 0xe6, 0x07, 0x1a, 0xf1, 0x60, 0x10, 0x21, 0xa8, 0x43, 0xb8, + 0xc0, 0x8e, 0x17, 0x00, 0xcc, 0x73, 0x03, 0x26, 0xd6, 0x83, 0x01, 0xd1, 0x0c, 0x24, 0xa8, 0x9d, + 0x31, 0xf2, 0x46, 0x21, 0x59, 0x4f, 0x50, 0x1b, 0xcd, 0x03, 0x78, 0x3e, 0xfb, 0x40, 0x9a, 0xa2, + 0x41, 0xed, 0x4c, 0x42, 0xc5, 0x93, 0x3a, 0xb2, 0x69, 0x23, 0x04, 0xe3, 0x2e, 0x76, 0x48, 0x66, + 0x4c, 0x25, 0xd4, 0x33, 0x5a, 0x85, 0xa9, 0xa6, 0x4f, 0xb0, 0x20, 0x0d, 0x49, 0x94, 0x19, 0xcf, + 0x1b, 0x85, 0xa9, 0x4a, 0xd6, 0xd2, 0x47, 0x16, 0xaa, 0xb0, 0x76, 0x42, 0x15, 0x75, 0x08, 0xe0, + 0x32, 0x60, 0x7a, 0x70, 0xeb, 0x0d, 0xe5, 0x42, 0xcb, 0xe1, 0x75, 0xf2, 0xb1, 0x43, 0xb8, 0x18, + 0x90, 0x61, 0x0c, 0xca, 0xc8, 0x41, 0xd2, 0xc3, 0x2d, 0xd2, 0xe0, 0xf4, 0x98, 0x28, 0x91, 0xff, + 0xd5, 0x27, 0x65, 0x60, 0x9b, 0x1e, 0x13, 0x55, 0x2b, 0x93, 0x82, 0x1d, 0x10, 0x57, 0x2b, 0x55, + 0xf0, 0x1d, 0x19, 0x30, 0x8f, 0x20, 0xdd, 0xcf, 0xc8, 0x3d, 0xe6, 0x72, 0x82, 0x56, 0x60, 0x32, + 0xdc, 0x7a, 0xc6, 0xc8, 0x8f, 0x15, 0xa6, 0x2a, 0x39, 0x6b, 0x78, 0xed, 0x96, 0xae, 0xab, 0x77, + 0xc1, 0x68, 0x11, 0x66, 0x5d, 0xf2, 0x49, 0x34, 0x22, 0xa4, 0xc1, 0xb9, 0xa5, 0x64, 0xb8, 0xd6, + 0x25, 0xde, 0x82, 0xf4, 0x4b, 0x35, 0x78, 0xd8, 0x42, 0xcf, 0xba, 0x0c, 0x13, 0xba, 0x97, 0x1a, + 0xf4, 0x12, 0xde, 0x10, 0x6b, 0xfe, 0x30, 0x20, 0xbd, 0xeb, 0xd9, 0xc3, 0xfd, 0xe6, 0x01, 0x34, + 0x26, 0x72, 0x76, 0x3a, 0xb2, 0x69, 0x47, 0xe9, 0x12, 0x57, 0xa7, 0x93, 0x5b, 0xee, 0x28, 0x36, + 0x65, 0x35, 0x75, 0xac, 0x71, 0x5b, 0x7e, 0x2d, 0xdd, 0xb8, 0x85, 0xf9, 0x41, 0x1d, 0x02, 0xb8, + 0x7c, 0x36, 0x97, 0x21, 0xbd, 0x4e, 0xda, 0xe4, 0x9a, 0x52, 0xcd, 0x15, 0x98, 0xdb, 0x75, 0xed, + 0xbf, 0x28, 0xac, 0xc0, 0xcd, 0x0d, 0x22, 0xae, 0x55, 0x53, 0xf9, 0x96, 0x84, 0x1b, 0xba, 0x62, + 0x9b, 0xf8, 0x87, 0xb4, 0x49, 0xde, 0x96, 0xd1, 0x11, 0x4c, 0x47, 0xcd, 0x82, 0x96, 0xe2, 0xce, + 0x2a, 0xc6, 0xc0, 0xd9, 0xc2, 0xe5, 0xc0, 0xc0, 0x77, 0x66, 0xfa, 0xec, 0xe7, 0xaf, 0xaf, 0x89, + 0x19, 0x34, 0x1d, 0xbd, 0x77, 0x50, 0x07, 0x52, 0x7d, 0x66, 0x41, 0xb1, 0x0d, 0xe3, 0xfc, 0x94, + 0x1d, 0xb5, 0x4f, 0x73, 0x5e, 0xb1, 0xdd, 0x31, 0xfb, 0xd8, 0xaa, 0xdd, 0x2d, 0x73, 0x80, 0xde, + 0xc1, 0xa1, 0x47, 0x71, 0x9d, 0x86, 0x0e, 0x76, 0x34, 0xe1, 0x82, 0x22, 0xcc, 0xa1, 0xbb, 0x51, + 0xc2, 0xd2, 0x49, 0x6f, 0x13, 0xa7, 0xe8, 0xcc, 0x80, 0x54, 0x9f, 0x93, 0xe3, 0x87, 0x8d, 0x33, + 0xfb, 0x68, 0xee, 0xa2, 0xe2, 0x7e, 0x58, 0xb9, 0x98, 0xbb, 0x37, 0xb9, 0x80, 0x54, 0x9f, 0x45, + 0xe3, 0x35, 0xc4, 0xb9, 0x38, 0x3b, 0x37, 0xf4, 0x16, 0xbc, 0x92, 0x17, 0x76, 0x38, 0x7a, 0x71, + 0xc4, 0xe8, 0xe7, 0x06, 0xcc, 0x0e, 0x58, 0x1c, 0x15, 0x63, 0x87, 0x8f, 0x7d, 0x0f, 0x46, 0x8f, + 0xff, 0x4c, 0xf1, 0x2f, 0x99, 0xe6, 0xc5, 0xe3, 0x77, 0x74, 0xdb, 0xaa, 0x51, 0x44, 0x5f, 0x60, + 0x7a, 0x9b, 0x88, 0x4d, 0xec, 0xd4, 0xd4, 0x9f, 0x11, 0x32, 0xc3, 0xde, 0x14, 0x3b, 0xb2, 0x6d, + 0x34, 0x19, 0xf2, 0xdf, 0x1e, 0xc0, 0x04, 0x59, 0xb3, 0xac, 0x98, 0x9f, 0x98, 0x8b, 0x92, 0xf9, + 0xc4, 0x27, 0x9c, 0x75, 0xfc, 0x26, 0x79, 0xd1, 0xd5, 0x50, 0x3c, 0xad, 0xf2, 0x48, 0x37, 0xcd, + 0xbe, 0x31, 0x8a, 0x7d, 0xe3, 0x9f, 0xb2, 0xb7, 0x06, 0xd8, 0xbf, 0x1b, 0x80, 0x76, 0x08, 0x57, + 0x41, 0xe2, 0x3b, 0x94, 0x73, 0xf9, 0x5f, 0xde, 0xf3, 0x80, 0x26, 0x18, 0x86, 0x84, 0x52, 0x1e, + 0x5f, 0x01, 0xa9, 0x5f, 0xf8, 0x15, 0x25, 0xaf, 0x6c, 0x3e, 0xbd, 0x58, 0x9e, 0x18, 0xaa, 0xae, + 0x1a, 0xc5, 0xb5, 0xf7, 0x30, 0xd7, 0x64, 0x4e, 0xcc, 0xc6, 0xd7, 0x52, 0xe1, 0xad, 0x52, 0x93, + 0x0e, 0xac, 0x19, 0xef, 0xaa, 0x21, 0x88, 0xb5, 0xb1, 0xdb, 0xb2, 0x98, 0xdf, 0x92, 0x9f, 0x37, + 0xca, 0x9f, 0xa5, 0x20, 0x85, 0x3d, 0xca, 0xa3, 0x9f, 0x3c, 0xab, 0xe1, 0xf3, 0x6f, 0xc3, 0xd8, + 0xfb, 0x5f, 0x21, 0x9f, 0xff, 0x09, 0x00, 0x00, 0xff, 0xff, 0x87, 0x48, 0x07, 0xbb, 0x1b, 0x09, + 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/genomics/v1/operations.pb.go b/vendor/google.golang.org/genproto/googleapis/genomics/v1/operations.pb.go new file mode 100644 index 0000000..e86c89f --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/genomics/v1/operations.pb.go @@ -0,0 +1,242 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/genomics/v1/operations.proto + +package genomics // import "google.golang.org/genproto/googleapis/genomics/v1" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import any "github.com/golang/protobuf/ptypes/any" +import timestamp "github.com/golang/protobuf/ptypes/timestamp" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Metadata describing an [Operation][google.longrunning.Operation]. +type OperationMetadata struct { + // The Google Cloud Project in which the job is scoped. + ProjectId string `protobuf:"bytes,1,opt,name=project_id,json=projectId,proto3" json:"project_id,omitempty"` + // The time at which the job was submitted to the Genomics service. + CreateTime *timestamp.Timestamp `protobuf:"bytes,2,opt,name=create_time,json=createTime,proto3" json:"create_time,omitempty"` + // The time at which the job began to run. + StartTime *timestamp.Timestamp `protobuf:"bytes,3,opt,name=start_time,json=startTime,proto3" json:"start_time,omitempty"` + // The time at which the job stopped running. + EndTime *timestamp.Timestamp `protobuf:"bytes,4,opt,name=end_time,json=endTime,proto3" json:"end_time,omitempty"` + // The original request that started the operation. Note that this will be in + // current version of the API. If the operation was started with v1beta2 API + // and a GetOperation is performed on v1 API, a v1 request will be returned. + Request *any.Any `protobuf:"bytes,5,opt,name=request,proto3" json:"request,omitempty"` + // Optional event messages that were generated during the job's execution. + // This also contains any warnings that were generated during import + // or export. + Events []*OperationEvent `protobuf:"bytes,6,rep,name=events,proto3" json:"events,omitempty"` + // This field is deprecated. Use `labels` instead. Optionally provided by the + // caller when submitting the request that creates the operation. + ClientId string `protobuf:"bytes,7,opt,name=client_id,json=clientId,proto3" json:"client_id,omitempty"` + // Runtime metadata on this Operation. + RuntimeMetadata *any.Any `protobuf:"bytes,8,opt,name=runtime_metadata,json=runtimeMetadata,proto3" json:"runtime_metadata,omitempty"` + // Optionally provided by the caller when submitting the request that creates + // the operation. + Labels map[string]string `protobuf:"bytes,9,rep,name=labels,proto3" json:"labels,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *OperationMetadata) Reset() { *m = OperationMetadata{} } +func (m *OperationMetadata) String() string { return proto.CompactTextString(m) } +func (*OperationMetadata) ProtoMessage() {} +func (*OperationMetadata) Descriptor() ([]byte, []int) { + return fileDescriptor_operations_74a16f9c10fa9d67, []int{0} +} +func (m *OperationMetadata) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_OperationMetadata.Unmarshal(m, b) +} +func (m *OperationMetadata) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_OperationMetadata.Marshal(b, m, deterministic) +} +func (dst *OperationMetadata) XXX_Merge(src proto.Message) { + xxx_messageInfo_OperationMetadata.Merge(dst, src) +} +func (m *OperationMetadata) XXX_Size() int { + return xxx_messageInfo_OperationMetadata.Size(m) +} +func (m *OperationMetadata) XXX_DiscardUnknown() { + xxx_messageInfo_OperationMetadata.DiscardUnknown(m) +} + +var xxx_messageInfo_OperationMetadata proto.InternalMessageInfo + +func (m *OperationMetadata) GetProjectId() string { + if m != nil { + return m.ProjectId + } + return "" +} + +func (m *OperationMetadata) GetCreateTime() *timestamp.Timestamp { + if m != nil { + return m.CreateTime + } + return nil +} + +func (m *OperationMetadata) GetStartTime() *timestamp.Timestamp { + if m != nil { + return m.StartTime + } + return nil +} + +func (m *OperationMetadata) GetEndTime() *timestamp.Timestamp { + if m != nil { + return m.EndTime + } + return nil +} + +func (m *OperationMetadata) GetRequest() *any.Any { + if m != nil { + return m.Request + } + return nil +} + +func (m *OperationMetadata) GetEvents() []*OperationEvent { + if m != nil { + return m.Events + } + return nil +} + +func (m *OperationMetadata) GetClientId() string { + if m != nil { + return m.ClientId + } + return "" +} + +func (m *OperationMetadata) GetRuntimeMetadata() *any.Any { + if m != nil { + return m.RuntimeMetadata + } + return nil +} + +func (m *OperationMetadata) GetLabels() map[string]string { + if m != nil { + return m.Labels + } + return nil +} + +// An event that occurred during an [Operation][google.longrunning.Operation]. +type OperationEvent struct { + // Optional time of when event started. + StartTime *timestamp.Timestamp `protobuf:"bytes,1,opt,name=start_time,json=startTime,proto3" json:"start_time,omitempty"` + // Optional time of when event finished. An event can have a start time and no + // finish time. If an event has a finish time, there must be a start time. + EndTime *timestamp.Timestamp `protobuf:"bytes,2,opt,name=end_time,json=endTime,proto3" json:"end_time,omitempty"` + // Required description of event. + Description string `protobuf:"bytes,3,opt,name=description,proto3" json:"description,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *OperationEvent) Reset() { *m = OperationEvent{} } +func (m *OperationEvent) String() string { return proto.CompactTextString(m) } +func (*OperationEvent) ProtoMessage() {} +func (*OperationEvent) Descriptor() ([]byte, []int) { + return fileDescriptor_operations_74a16f9c10fa9d67, []int{1} +} +func (m *OperationEvent) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_OperationEvent.Unmarshal(m, b) +} +func (m *OperationEvent) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_OperationEvent.Marshal(b, m, deterministic) +} +func (dst *OperationEvent) XXX_Merge(src proto.Message) { + xxx_messageInfo_OperationEvent.Merge(dst, src) +} +func (m *OperationEvent) XXX_Size() int { + return xxx_messageInfo_OperationEvent.Size(m) +} +func (m *OperationEvent) XXX_DiscardUnknown() { + xxx_messageInfo_OperationEvent.DiscardUnknown(m) +} + +var xxx_messageInfo_OperationEvent proto.InternalMessageInfo + +func (m *OperationEvent) GetStartTime() *timestamp.Timestamp { + if m != nil { + return m.StartTime + } + return nil +} + +func (m *OperationEvent) GetEndTime() *timestamp.Timestamp { + if m != nil { + return m.EndTime + } + return nil +} + +func (m *OperationEvent) GetDescription() string { + if m != nil { + return m.Description + } + return "" +} + +func init() { + proto.RegisterType((*OperationMetadata)(nil), "google.genomics.v1.OperationMetadata") + proto.RegisterMapType((map[string]string)(nil), "google.genomics.v1.OperationMetadata.LabelsEntry") + proto.RegisterType((*OperationEvent)(nil), "google.genomics.v1.OperationEvent") +} + +func init() { + proto.RegisterFile("google/genomics/v1/operations.proto", fileDescriptor_operations_74a16f9c10fa9d67) +} + +var fileDescriptor_operations_74a16f9c10fa9d67 = []byte{ + // 456 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x9c, 0x93, 0x41, 0x6f, 0xd3, 0x30, + 0x14, 0xc7, 0xe5, 0x76, 0x6b, 0x9b, 0x17, 0x89, 0x0d, 0x6b, 0x42, 0xa1, 0x80, 0xa8, 0xca, 0xa5, + 0x27, 0x47, 0x1d, 0x42, 0x62, 0xdd, 0x01, 0x31, 0x69, 0x87, 0x4a, 0x20, 0xa6, 0x88, 0x13, 0x97, + 0xca, 0x4d, 0x1e, 0x51, 0x46, 0x62, 0x07, 0xdb, 0xad, 0xd4, 0xef, 0xc3, 0x17, 0xe0, 0xdb, 0x71, + 0x44, 0xb1, 0x9d, 0x2a, 0x6c, 0x68, 0x45, 0xdc, 0xec, 0xf7, 0xfe, 0x3f, 0xfb, 0x9f, 0xf7, 0x8f, + 0xe1, 0x55, 0x2e, 0x65, 0x5e, 0x62, 0x9c, 0xa3, 0x90, 0x55, 0x91, 0xea, 0x78, 0x3b, 0x8f, 0x65, + 0x8d, 0x8a, 0x9b, 0x42, 0x0a, 0xcd, 0x6a, 0x25, 0x8d, 0xa4, 0xd4, 0x89, 0x58, 0x2b, 0x62, 0xdb, + 0xf9, 0xf8, 0xb9, 0x07, 0x79, 0x5d, 0xc4, 0x5c, 0x08, 0x69, 0xba, 0xc4, 0xf8, 0xa9, 0xef, 0xda, + 0xdd, 0x7a, 0xf3, 0x35, 0xe6, 0x62, 0xe7, 0x5b, 0x2f, 0xef, 0xb6, 0x4c, 0x51, 0xa1, 0x36, 0xbc, + 0xaa, 0x9d, 0x60, 0xfa, 0xf3, 0x08, 0x1e, 0x7f, 0x6a, 0x2d, 0x7c, 0x44, 0xc3, 0x33, 0x6e, 0x38, + 0x7d, 0x01, 0x50, 0x2b, 0x79, 0x8b, 0xa9, 0x59, 0x15, 0x59, 0x44, 0x26, 0x64, 0x16, 0x24, 0x81, + 0xaf, 0x2c, 0x33, 0x7a, 0x09, 0x61, 0xaa, 0x90, 0x1b, 0x5c, 0x35, 0xc7, 0x45, 0xbd, 0x09, 0x99, + 0x85, 0xe7, 0x63, 0xe6, 0x8d, 0xb7, 0x77, 0xb1, 0xcf, 0xed, 0x5d, 0x09, 0x38, 0x79, 0x53, 0xa0, + 0x17, 0x00, 0xda, 0x70, 0x65, 0x1c, 0xdb, 0x3f, 0xc8, 0x06, 0x56, 0x6d, 0xd1, 0x37, 0x30, 0x42, + 0x91, 0x39, 0xf0, 0xe8, 0x20, 0x38, 0x44, 0x91, 0x59, 0x8c, 0xc1, 0x50, 0xe1, 0xf7, 0x0d, 0x6a, + 0x13, 0x1d, 0x5b, 0xea, 0xec, 0x1e, 0xf5, 0x5e, 0xec, 0x92, 0x56, 0x44, 0x17, 0x30, 0xc0, 0x2d, + 0x0a, 0xa3, 0xa3, 0xc1, 0xa4, 0x3f, 0x0b, 0xcf, 0xa7, 0xec, 0x7e, 0x24, 0x6c, 0x3f, 0xb4, 0xeb, + 0x46, 0x9a, 0x78, 0x82, 0x3e, 0x83, 0x20, 0x2d, 0x0b, 0x14, 0x76, 0x70, 0x43, 0x3b, 0xb8, 0x91, + 0x2b, 0x2c, 0x33, 0xfa, 0x0e, 0x4e, 0xd5, 0x46, 0x34, 0xf6, 0x57, 0x95, 0x1f, 0x75, 0x34, 0x7a, + 0xc0, 0xd1, 0x89, 0x57, 0xef, 0x73, 0x59, 0xc2, 0xa0, 0xe4, 0x6b, 0x2c, 0x75, 0x14, 0x58, 0x67, + 0xf3, 0x07, 0x9d, 0xb5, 0x18, 0xfb, 0x60, 0x99, 0x6b, 0x61, 0xd4, 0x2e, 0xf1, 0x07, 0x8c, 0x2f, + 0x20, 0xec, 0x94, 0xe9, 0x29, 0xf4, 0xbf, 0xe1, 0xce, 0x47, 0xdd, 0x2c, 0xe9, 0x19, 0x1c, 0x6f, + 0x79, 0xb9, 0x71, 0xf1, 0x06, 0x89, 0xdb, 0x2c, 0x7a, 0x6f, 0xc9, 0xf4, 0x07, 0x81, 0x47, 0x7f, + 0x7e, 0xfe, 0x9d, 0x50, 0xc9, 0xff, 0x86, 0xda, 0xfb, 0xf7, 0x50, 0x27, 0x10, 0x66, 0xa8, 0x53, + 0x55, 0xd4, 0x8d, 0x0b, 0xfb, 0x1f, 0x05, 0x49, 0xb7, 0x74, 0x75, 0x0b, 0x4f, 0x52, 0x59, 0xfd, + 0x65, 0x42, 0x57, 0x27, 0x7b, 0xf7, 0xfa, 0xa6, 0xb9, 0xe2, 0x86, 0x7c, 0x59, 0xb4, 0x32, 0x59, + 0x72, 0x91, 0x33, 0xa9, 0xf2, 0xe6, 0x95, 0x5a, 0x03, 0xb1, 0x6b, 0xf1, 0xba, 0xd0, 0xdd, 0x97, + 0x7b, 0xd9, 0xae, 0x7f, 0x11, 0xb2, 0x1e, 0x58, 0xe5, 0xeb, 0xdf, 0x01, 0x00, 0x00, 0xff, 0xff, + 0x6a, 0xf6, 0xa8, 0x9a, 0xe2, 0x03, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/genomics/v1/position.pb.go b/vendor/google.golang.org/genproto/googleapis/genomics/v1/position.pb.go new file mode 100644 index 0000000..95f7494 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/genomics/v1/position.pb.go @@ -0,0 +1,108 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/genomics/v1/position.proto + +package genomics // import "google.golang.org/genproto/googleapis/genomics/v1" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// An abstraction for referring to a genomic position, in relation to some +// already known reference. For now, represents a genomic position as a +// reference name, a base number on that reference (0-based), and a +// determination of forward or reverse strand. +type Position struct { + // The name of the reference in whatever reference set is being used. + ReferenceName string `protobuf:"bytes,1,opt,name=reference_name,json=referenceName,proto3" json:"reference_name,omitempty"` + // The 0-based offset from the start of the forward strand for that reference. + Position int64 `protobuf:"varint,2,opt,name=position,proto3" json:"position,omitempty"` + // Whether this position is on the reverse strand, as opposed to the forward + // strand. + ReverseStrand bool `protobuf:"varint,3,opt,name=reverse_strand,json=reverseStrand,proto3" json:"reverse_strand,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Position) Reset() { *m = Position{} } +func (m *Position) String() string { return proto.CompactTextString(m) } +func (*Position) ProtoMessage() {} +func (*Position) Descriptor() ([]byte, []int) { + return fileDescriptor_position_a77d4b065ce2a236, []int{0} +} +func (m *Position) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Position.Unmarshal(m, b) +} +func (m *Position) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Position.Marshal(b, m, deterministic) +} +func (dst *Position) XXX_Merge(src proto.Message) { + xxx_messageInfo_Position.Merge(dst, src) +} +func (m *Position) XXX_Size() int { + return xxx_messageInfo_Position.Size(m) +} +func (m *Position) XXX_DiscardUnknown() { + xxx_messageInfo_Position.DiscardUnknown(m) +} + +var xxx_messageInfo_Position proto.InternalMessageInfo + +func (m *Position) GetReferenceName() string { + if m != nil { + return m.ReferenceName + } + return "" +} + +func (m *Position) GetPosition() int64 { + if m != nil { + return m.Position + } + return 0 +} + +func (m *Position) GetReverseStrand() bool { + if m != nil { + return m.ReverseStrand + } + return false +} + +func init() { + proto.RegisterType((*Position)(nil), "google.genomics.v1.Position") +} + +func init() { + proto.RegisterFile("google/genomics/v1/position.proto", fileDescriptor_position_a77d4b065ce2a236) +} + +var fileDescriptor_position_a77d4b065ce2a236 = []byte{ + // 223 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x64, 0x90, 0x41, 0x4b, 0x03, 0x31, + 0x14, 0x84, 0x89, 0x05, 0x59, 0x03, 0xf5, 0xb0, 0x07, 0x59, 0x8a, 0x87, 0x55, 0x10, 0xf6, 0x94, + 0x50, 0xbc, 0xe9, 0xad, 0x3f, 0x40, 0x96, 0x7a, 0xf3, 0x52, 0x9e, 0xeb, 0x33, 0x06, 0xba, 0xef, + 0x85, 0x24, 0xec, 0x6f, 0xf7, 0x28, 0x49, 0x9a, 0x22, 0xf4, 0x96, 0x4c, 0x66, 0x26, 0x1f, 0x23, + 0x1f, 0x0c, 0xb3, 0x39, 0xa2, 0x36, 0x48, 0x3c, 0xdb, 0x29, 0xe8, 0x65, 0xab, 0x1d, 0x07, 0x1b, + 0x2d, 0x93, 0x72, 0x9e, 0x23, 0xb7, 0x6d, 0xb1, 0xa8, 0x6a, 0x51, 0xcb, 0x76, 0x73, 0x7f, 0x8a, + 0x81, 0xb3, 0x1a, 0x88, 0x38, 0x42, 0x0a, 0x84, 0x92, 0x78, 0x8c, 0xb2, 0x19, 0x4f, 0x1d, 0xed, + 0x93, 0xbc, 0xf5, 0xf8, 0x8d, 0x1e, 0x69, 0xc2, 0x03, 0xc1, 0x8c, 0x9d, 0xe8, 0xc5, 0x70, 0xb3, + 0x5f, 0x9f, 0xd5, 0x37, 0x98, 0xb1, 0xdd, 0xc8, 0xa6, 0x7e, 0xdb, 0x5d, 0xf5, 0x62, 0x58, 0xed, + 0xcf, 0xf7, 0x52, 0xb1, 0xa0, 0x0f, 0x78, 0x08, 0xd1, 0x03, 0x7d, 0x75, 0xab, 0x5e, 0x0c, 0x4d, + 0xaa, 0xc8, 0xea, 0x7b, 0x16, 0x77, 0x3f, 0xf2, 0x6e, 0xe2, 0x59, 0x5d, 0xd2, 0xee, 0xd6, 0x95, + 0x66, 0x4c, 0x78, 0xa3, 0xf8, 0x78, 0xa9, 0x26, 0x3e, 0x02, 0x19, 0xc5, 0xde, 0xa4, 0x01, 0x32, + 0xbc, 0x2e, 0x4f, 0xe0, 0x6c, 0xf8, 0x3f, 0xca, 0x6b, 0x3d, 0xff, 0x0a, 0xf1, 0x79, 0x9d, 0x9d, + 0xcf, 0x7f, 0x01, 0x00, 0x00, 0xff, 0xff, 0x5c, 0xc6, 0x22, 0xea, 0x3d, 0x01, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/genomics/v1/range.pb.go b/vendor/google.golang.org/genproto/googleapis/genomics/v1/range.pb.go new file mode 100644 index 0000000..e9baeca --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/genomics/v1/range.pb.go @@ -0,0 +1,105 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/genomics/v1/range.proto + +package genomics // import "google.golang.org/genproto/googleapis/genomics/v1" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// A 0-based half-open genomic coordinate range for search requests. +type Range struct { + // The reference sequence name, for example `chr1`, + // `1`, or `chrX`. + ReferenceName string `protobuf:"bytes,1,opt,name=reference_name,json=referenceName,proto3" json:"reference_name,omitempty"` + // The start position of the range on the reference, 0-based inclusive. + Start int64 `protobuf:"varint,2,opt,name=start,proto3" json:"start,omitempty"` + // The end position of the range on the reference, 0-based exclusive. + End int64 `protobuf:"varint,3,opt,name=end,proto3" json:"end,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Range) Reset() { *m = Range{} } +func (m *Range) String() string { return proto.CompactTextString(m) } +func (*Range) ProtoMessage() {} +func (*Range) Descriptor() ([]byte, []int) { + return fileDescriptor_range_b9d65b625aacf976, []int{0} +} +func (m *Range) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Range.Unmarshal(m, b) +} +func (m *Range) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Range.Marshal(b, m, deterministic) +} +func (dst *Range) XXX_Merge(src proto.Message) { + xxx_messageInfo_Range.Merge(dst, src) +} +func (m *Range) XXX_Size() int { + return xxx_messageInfo_Range.Size(m) +} +func (m *Range) XXX_DiscardUnknown() { + xxx_messageInfo_Range.DiscardUnknown(m) +} + +var xxx_messageInfo_Range proto.InternalMessageInfo + +func (m *Range) GetReferenceName() string { + if m != nil { + return m.ReferenceName + } + return "" +} + +func (m *Range) GetStart() int64 { + if m != nil { + return m.Start + } + return 0 +} + +func (m *Range) GetEnd() int64 { + if m != nil { + return m.End + } + return 0 +} + +func init() { + proto.RegisterType((*Range)(nil), "google.genomics.v1.Range") +} + +func init() { + proto.RegisterFile("google/genomics/v1/range.proto", fileDescriptor_range_b9d65b625aacf976) +} + +var fileDescriptor_range_b9d65b625aacf976 = []byte{ + // 209 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x64, 0x4f, 0x4d, 0x4b, 0xc4, 0x30, + 0x10, 0x25, 0x96, 0x15, 0x0c, 0x28, 0x12, 0x44, 0x8a, 0x88, 0x2c, 0x82, 0xb0, 0xa7, 0x84, 0xe2, + 0x4d, 0x6f, 0xfd, 0x01, 0x52, 0x7a, 0xf0, 0xe0, 0x45, 0xc6, 0x3a, 0x86, 0x40, 0x33, 0x53, 0x92, + 0xd0, 0xdf, 0xee, 0x51, 0x92, 0x58, 0x11, 0xf6, 0x36, 0x79, 0x1f, 0x79, 0xef, 0xc9, 0x3b, 0xcb, + 0x6c, 0x67, 0x34, 0x16, 0x89, 0xbd, 0x9b, 0xa2, 0x59, 0x3b, 0x13, 0x80, 0x2c, 0xea, 0x25, 0x70, + 0x62, 0xa5, 0x2a, 0xaf, 0x37, 0x5e, 0xaf, 0xdd, 0xcd, 0xed, 0xaf, 0x07, 0x16, 0x67, 0x80, 0x88, + 0x13, 0x24, 0xc7, 0x14, 0xab, 0xe3, 0xfe, 0x55, 0xee, 0xc6, 0xfc, 0x81, 0x7a, 0x90, 0x17, 0x01, + 0xbf, 0x30, 0x20, 0x4d, 0xf8, 0x4e, 0xe0, 0xb1, 0x15, 0x7b, 0x71, 0x38, 0x1b, 0xcf, 0xff, 0xd0, + 0x17, 0xf0, 0xa8, 0xae, 0xe4, 0x2e, 0x26, 0x08, 0xa9, 0x3d, 0xd9, 0x8b, 0x43, 0x33, 0xd6, 0x87, + 0xba, 0x94, 0x0d, 0xd2, 0x67, 0xdb, 0x14, 0x2c, 0x9f, 0x3d, 0xca, 0xeb, 0x89, 0xbd, 0x3e, 0xee, + 0xd3, 0xcb, 0x92, 0x37, 0xe4, 0xf4, 0x41, 0xbc, 0x3d, 0x6d, 0x0a, 0x9e, 0x81, 0xac, 0xe6, 0x60, + 0xf3, 0xb8, 0xd2, 0xcd, 0x54, 0x0a, 0x16, 0x17, 0xff, 0x0f, 0x7e, 0xde, 0xee, 0x6f, 0x21, 0x3e, + 0x4e, 0x8b, 0xf2, 0xf1, 0x27, 0x00, 0x00, 0xff, 0xff, 0xb7, 0x3e, 0xf1, 0x62, 0x19, 0x01, 0x00, + 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/genomics/v1/readalignment.pb.go b/vendor/google.golang.org/genproto/googleapis/genomics/v1/readalignment.pb.go new file mode 100644 index 0000000..17aff05 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/genomics/v1/readalignment.pb.go @@ -0,0 +1,446 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/genomics/v1/readalignment.proto + +package genomics // import "google.golang.org/genproto/googleapis/genomics/v1" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _struct "github.com/golang/protobuf/ptypes/struct" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// A linear alignment can be represented by one CIGAR string. Describes the +// mapped position and local alignment of the read to the reference. +type LinearAlignment struct { + // The position of this alignment. + Position *Position `protobuf:"bytes,1,opt,name=position,proto3" json:"position,omitempty"` + // The mapping quality of this alignment. Represents how likely + // the read maps to this position as opposed to other locations. + // + // Specifically, this is -10 log10 Pr(mapping position is wrong), rounded to + // the nearest integer. + MappingQuality int32 `protobuf:"varint,2,opt,name=mapping_quality,json=mappingQuality,proto3" json:"mapping_quality,omitempty"` + // Represents the local alignment of this sequence (alignment matches, indels, + // etc) against the reference. + Cigar []*CigarUnit `protobuf:"bytes,3,rep,name=cigar,proto3" json:"cigar,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *LinearAlignment) Reset() { *m = LinearAlignment{} } +func (m *LinearAlignment) String() string { return proto.CompactTextString(m) } +func (*LinearAlignment) ProtoMessage() {} +func (*LinearAlignment) Descriptor() ([]byte, []int) { + return fileDescriptor_readalignment_631bd8edb248b735, []int{0} +} +func (m *LinearAlignment) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_LinearAlignment.Unmarshal(m, b) +} +func (m *LinearAlignment) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_LinearAlignment.Marshal(b, m, deterministic) +} +func (dst *LinearAlignment) XXX_Merge(src proto.Message) { + xxx_messageInfo_LinearAlignment.Merge(dst, src) +} +func (m *LinearAlignment) XXX_Size() int { + return xxx_messageInfo_LinearAlignment.Size(m) +} +func (m *LinearAlignment) XXX_DiscardUnknown() { + xxx_messageInfo_LinearAlignment.DiscardUnknown(m) +} + +var xxx_messageInfo_LinearAlignment proto.InternalMessageInfo + +func (m *LinearAlignment) GetPosition() *Position { + if m != nil { + return m.Position + } + return nil +} + +func (m *LinearAlignment) GetMappingQuality() int32 { + if m != nil { + return m.MappingQuality + } + return 0 +} + +func (m *LinearAlignment) GetCigar() []*CigarUnit { + if m != nil { + return m.Cigar + } + return nil +} + +// A read alignment describes a linear alignment of a string of DNA to a +// [reference sequence][google.genomics.v1.Reference], in addition to metadata +// about the fragment (the molecule of DNA sequenced) and the read (the bases +// which were read by the sequencer). A read is equivalent to a line in a SAM +// file. A read belongs to exactly one read group and exactly one +// [read group set][google.genomics.v1.ReadGroupSet]. +// +// For more genomics resource definitions, see [Fundamentals of Google +// Genomics](https://cloud.google.com/genomics/fundamentals-of-google-genomics) +// +// ### Reverse-stranded reads +// +// Mapped reads (reads having a non-null `alignment`) can be aligned to either +// the forward or the reverse strand of their associated reference. Strandedness +// of a mapped read is encoded by `alignment.position.reverseStrand`. +// +// If we consider the reference to be a forward-stranded coordinate space of +// `[0, reference.length)` with `0` as the left-most position and +// `reference.length` as the right-most position, reads are always aligned left +// to right. That is, `alignment.position.position` always refers to the +// left-most reference coordinate and `alignment.cigar` describes the alignment +// of this read to the reference from left to right. All per-base fields such as +// `alignedSequence` and `alignedQuality` share this same left-to-right +// orientation; this is true of reads which are aligned to either strand. For +// reverse-stranded reads, this means that `alignedSequence` is the reverse +// complement of the bases that were originally reported by the sequencing +// machine. +// +// ### Generating a reference-aligned sequence string +// +// When interacting with mapped reads, it's often useful to produce a string +// representing the local alignment of the read to reference. The following +// pseudocode demonstrates one way of doing this: +// +// out = "" +// offset = 0 +// for c in read.alignment.cigar { +// switch c.operation { +// case "ALIGNMENT_MATCH", "SEQUENCE_MATCH", "SEQUENCE_MISMATCH": +// out += read.alignedSequence[offset:offset+c.operationLength] +// offset += c.operationLength +// break +// case "CLIP_SOFT", "INSERT": +// offset += c.operationLength +// break +// case "PAD": +// out += repeat("*", c.operationLength) +// break +// case "DELETE": +// out += repeat("-", c.operationLength) +// break +// case "SKIP": +// out += repeat(" ", c.operationLength) +// break +// case "CLIP_HARD": +// break +// } +// } +// return out +// +// ### Converting to SAM's CIGAR string +// +// The following pseudocode generates a SAM CIGAR string from the +// `cigar` field. Note that this is a lossy conversion +// (`cigar.referenceSequence` is lost). +// +// cigarMap = { +// "ALIGNMENT_MATCH": "M", +// "INSERT": "I", +// "DELETE": "D", +// "SKIP": "N", +// "CLIP_SOFT": "S", +// "CLIP_HARD": "H", +// "PAD": "P", +// "SEQUENCE_MATCH": "=", +// "SEQUENCE_MISMATCH": "X", +// } +// cigarStr = "" +// for c in read.alignment.cigar { +// cigarStr += c.operationLength + cigarMap[c.operation] +// } +// return cigarStr +type Read struct { + // The server-generated read ID, unique across all reads. This is different + // from the `fragmentName`. + Id string `protobuf:"bytes,1,opt,name=id,proto3" json:"id,omitempty"` + // The ID of the read group this read belongs to. A read belongs to exactly + // one read group. This is a server-generated ID which is distinct from SAM's + // RG tag (for that value, see + // [ReadGroup.name][google.genomics.v1.ReadGroup.name]). + ReadGroupId string `protobuf:"bytes,2,opt,name=read_group_id,json=readGroupId,proto3" json:"read_group_id,omitempty"` + // The ID of the read group set this read belongs to. A read belongs to + // exactly one read group set. + ReadGroupSetId string `protobuf:"bytes,3,opt,name=read_group_set_id,json=readGroupSetId,proto3" json:"read_group_set_id,omitempty"` + // The fragment name. Equivalent to QNAME (query template name) in SAM. + FragmentName string `protobuf:"bytes,4,opt,name=fragment_name,json=fragmentName,proto3" json:"fragment_name,omitempty"` + // The orientation and the distance between reads from the fragment are + // consistent with the sequencing protocol (SAM flag 0x2). + ProperPlacement bool `protobuf:"varint,5,opt,name=proper_placement,json=properPlacement,proto3" json:"proper_placement,omitempty"` + // The fragment is a PCR or optical duplicate (SAM flag 0x400). + DuplicateFragment bool `protobuf:"varint,6,opt,name=duplicate_fragment,json=duplicateFragment,proto3" json:"duplicate_fragment,omitempty"` + // The observed length of the fragment, equivalent to TLEN in SAM. + FragmentLength int32 `protobuf:"varint,7,opt,name=fragment_length,json=fragmentLength,proto3" json:"fragment_length,omitempty"` + // The read number in sequencing. 0-based and less than numberReads. This + // field replaces SAM flag 0x40 and 0x80. + ReadNumber int32 `protobuf:"varint,8,opt,name=read_number,json=readNumber,proto3" json:"read_number,omitempty"` + // The number of reads in the fragment (extension to SAM flag 0x1). + NumberReads int32 `protobuf:"varint,9,opt,name=number_reads,json=numberReads,proto3" json:"number_reads,omitempty"` + // Whether this read did not pass filters, such as platform or vendor quality + // controls (SAM flag 0x200). + FailedVendorQualityChecks bool `protobuf:"varint,10,opt,name=failed_vendor_quality_checks,json=failedVendorQualityChecks,proto3" json:"failed_vendor_quality_checks,omitempty"` + // The linear alignment for this alignment record. This field is null for + // unmapped reads. + Alignment *LinearAlignment `protobuf:"bytes,11,opt,name=alignment,proto3" json:"alignment,omitempty"` + // Whether this alignment is secondary. Equivalent to SAM flag 0x100. + // A secondary alignment represents an alternative to the primary alignment + // for this read. Aligners may return secondary alignments if a read can map + // ambiguously to multiple coordinates in the genome. By convention, each read + // has one and only one alignment where both `secondaryAlignment` + // and `supplementaryAlignment` are false. + SecondaryAlignment bool `protobuf:"varint,12,opt,name=secondary_alignment,json=secondaryAlignment,proto3" json:"secondary_alignment,omitempty"` + // Whether this alignment is supplementary. Equivalent to SAM flag 0x800. + // Supplementary alignments are used in the representation of a chimeric + // alignment. In a chimeric alignment, a read is split into multiple + // linear alignments that map to different reference contigs. The first + // linear alignment in the read will be designated as the representative + // alignment; the remaining linear alignments will be designated as + // supplementary alignments. These alignments may have different mapping + // quality scores. In each linear alignment in a chimeric alignment, the read + // will be hard clipped. The `alignedSequence` and + // `alignedQuality` fields in the alignment record will only + // represent the bases for its respective linear alignment. + SupplementaryAlignment bool `protobuf:"varint,13,opt,name=supplementary_alignment,json=supplementaryAlignment,proto3" json:"supplementary_alignment,omitempty"` + // The bases of the read sequence contained in this alignment record, + // **without CIGAR operations applied** (equivalent to SEQ in SAM). + // `alignedSequence` and `alignedQuality` may be + // shorter than the full read sequence and quality. This will occur if the + // alignment is part of a chimeric alignment, or if the read was trimmed. When + // this occurs, the CIGAR for this read will begin/end with a hard clip + // operator that will indicate the length of the excised sequence. + AlignedSequence string `protobuf:"bytes,14,opt,name=aligned_sequence,json=alignedSequence,proto3" json:"aligned_sequence,omitempty"` + // The quality of the read sequence contained in this alignment record + // (equivalent to QUAL in SAM). + // `alignedSequence` and `alignedQuality` may be shorter than the full read + // sequence and quality. This will occur if the alignment is part of a + // chimeric alignment, or if the read was trimmed. When this occurs, the CIGAR + // for this read will begin/end with a hard clip operator that will indicate + // the length of the excised sequence. + AlignedQuality []int32 `protobuf:"varint,15,rep,packed,name=aligned_quality,json=alignedQuality,proto3" json:"aligned_quality,omitempty"` + // The mapping of the primary alignment of the + // `(readNumber+1)%numberReads` read in the fragment. It replaces + // mate position and mate strand in SAM. + NextMatePosition *Position `protobuf:"bytes,16,opt,name=next_mate_position,json=nextMatePosition,proto3" json:"next_mate_position,omitempty"` + // A map of additional read alignment information. This must be of the form + // map (string key mapping to a list of string values). + Info map[string]*_struct.ListValue `protobuf:"bytes,17,rep,name=info,proto3" json:"info,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Read) Reset() { *m = Read{} } +func (m *Read) String() string { return proto.CompactTextString(m) } +func (*Read) ProtoMessage() {} +func (*Read) Descriptor() ([]byte, []int) { + return fileDescriptor_readalignment_631bd8edb248b735, []int{1} +} +func (m *Read) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Read.Unmarshal(m, b) +} +func (m *Read) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Read.Marshal(b, m, deterministic) +} +func (dst *Read) XXX_Merge(src proto.Message) { + xxx_messageInfo_Read.Merge(dst, src) +} +func (m *Read) XXX_Size() int { + return xxx_messageInfo_Read.Size(m) +} +func (m *Read) XXX_DiscardUnknown() { + xxx_messageInfo_Read.DiscardUnknown(m) +} + +var xxx_messageInfo_Read proto.InternalMessageInfo + +func (m *Read) GetId() string { + if m != nil { + return m.Id + } + return "" +} + +func (m *Read) GetReadGroupId() string { + if m != nil { + return m.ReadGroupId + } + return "" +} + +func (m *Read) GetReadGroupSetId() string { + if m != nil { + return m.ReadGroupSetId + } + return "" +} + +func (m *Read) GetFragmentName() string { + if m != nil { + return m.FragmentName + } + return "" +} + +func (m *Read) GetProperPlacement() bool { + if m != nil { + return m.ProperPlacement + } + return false +} + +func (m *Read) GetDuplicateFragment() bool { + if m != nil { + return m.DuplicateFragment + } + return false +} + +func (m *Read) GetFragmentLength() int32 { + if m != nil { + return m.FragmentLength + } + return 0 +} + +func (m *Read) GetReadNumber() int32 { + if m != nil { + return m.ReadNumber + } + return 0 +} + +func (m *Read) GetNumberReads() int32 { + if m != nil { + return m.NumberReads + } + return 0 +} + +func (m *Read) GetFailedVendorQualityChecks() bool { + if m != nil { + return m.FailedVendorQualityChecks + } + return false +} + +func (m *Read) GetAlignment() *LinearAlignment { + if m != nil { + return m.Alignment + } + return nil +} + +func (m *Read) GetSecondaryAlignment() bool { + if m != nil { + return m.SecondaryAlignment + } + return false +} + +func (m *Read) GetSupplementaryAlignment() bool { + if m != nil { + return m.SupplementaryAlignment + } + return false +} + +func (m *Read) GetAlignedSequence() string { + if m != nil { + return m.AlignedSequence + } + return "" +} + +func (m *Read) GetAlignedQuality() []int32 { + if m != nil { + return m.AlignedQuality + } + return nil +} + +func (m *Read) GetNextMatePosition() *Position { + if m != nil { + return m.NextMatePosition + } + return nil +} + +func (m *Read) GetInfo() map[string]*_struct.ListValue { + if m != nil { + return m.Info + } + return nil +} + +func init() { + proto.RegisterType((*LinearAlignment)(nil), "google.genomics.v1.LinearAlignment") + proto.RegisterType((*Read)(nil), "google.genomics.v1.Read") + proto.RegisterMapType((map[string]*_struct.ListValue)(nil), "google.genomics.v1.Read.InfoEntry") +} + +func init() { + proto.RegisterFile("google/genomics/v1/readalignment.proto", fileDescriptor_readalignment_631bd8edb248b735) +} + +var fileDescriptor_readalignment_631bd8edb248b735 = []byte{ + // 683 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x8c, 0x94, 0xcd, 0x4e, 0xdb, 0x4a, + 0x14, 0xc7, 0xe5, 0x84, 0x70, 0xc9, 0x09, 0x24, 0x61, 0xae, 0xc4, 0xf5, 0x8d, 0xb8, 0xb7, 0x21, + 0x48, 0x6d, 0x58, 0xd4, 0x2e, 0x20, 0xb5, 0x88, 0x2e, 0x2a, 0x40, 0x6d, 0x45, 0x45, 0x51, 0x6a, + 0x54, 0x16, 0xdd, 0x58, 0x83, 0x7d, 0x62, 0x46, 0xd8, 0x33, 0xc6, 0x1e, 0x47, 0xcd, 0x23, 0xf5, + 0xdd, 0xfa, 0x00, 0x5d, 0x56, 0x33, 0xf6, 0x38, 0xd0, 0x66, 0xd1, 0x5d, 0xf2, 0x3f, 0xbf, 0xf3, + 0xe1, 0xf3, 0x31, 0xf0, 0x34, 0x12, 0x22, 0x8a, 0xd1, 0x8d, 0x90, 0x8b, 0x84, 0x05, 0xb9, 0x3b, + 0xdb, 0x77, 0x33, 0xa4, 0x21, 0x8d, 0x59, 0xc4, 0x13, 0xe4, 0xd2, 0x49, 0x33, 0x21, 0x05, 0x21, + 0x25, 0xe7, 0x18, 0xce, 0x99, 0xed, 0x0f, 0xb6, 0x2b, 0x5f, 0x9a, 0x32, 0x97, 0x72, 0x2e, 0x24, + 0x95, 0x4c, 0xf0, 0xbc, 0xf4, 0x18, 0xfc, 0xbf, 0x24, 0x72, 0xc0, 0x22, 0x9a, 0x55, 0xf6, 0x9d, + 0x25, 0xf6, 0x54, 0xe4, 0x4c, 0xc5, 0xa8, 0x10, 0x93, 0x40, 0xff, 0xbb, 0x29, 0xa6, 0x6e, 0x2e, + 0xb3, 0x22, 0xa8, 0x4a, 0x1a, 0x7d, 0xb3, 0xa0, 0x77, 0xc1, 0x38, 0xd2, 0xec, 0xc4, 0x14, 0x4b, + 0x8e, 0x60, 0xcd, 0xc4, 0xb0, 0xad, 0xa1, 0x35, 0xee, 0x1c, 0x6c, 0x3b, 0xbf, 0x57, 0xee, 0x4c, + 0x2a, 0xc6, 0xab, 0x69, 0xf2, 0x0c, 0x7a, 0x09, 0x4d, 0x53, 0xc6, 0x23, 0xff, 0xbe, 0xa0, 0x31, + 0x93, 0x73, 0xbb, 0x31, 0xb4, 0xc6, 0x2d, 0xaf, 0x5b, 0xc9, 0x9f, 0x4a, 0x95, 0x1c, 0x42, 0x4b, + 0x7f, 0x86, 0xdd, 0x1c, 0x36, 0xc7, 0x9d, 0x83, 0xff, 0x96, 0xc5, 0x3f, 0x53, 0xc0, 0x67, 0xce, + 0xa4, 0x57, 0xb2, 0xa3, 0xef, 0xab, 0xb0, 0xe2, 0x21, 0x0d, 0x49, 0x17, 0x1a, 0x2c, 0xd4, 0xa5, + 0xb5, 0xbd, 0x06, 0x0b, 0xc9, 0x08, 0x36, 0x54, 0xbb, 0xfd, 0x28, 0x13, 0x45, 0xea, 0xb3, 0x50, + 0x27, 0x6d, 0x7b, 0x1d, 0x25, 0xbe, 0x57, 0xda, 0x79, 0x48, 0xf6, 0x60, 0xf3, 0x01, 0x93, 0xa3, + 0x54, 0x5c, 0x53, 0x73, 0xdd, 0x9a, 0xbb, 0x42, 0x79, 0x1e, 0x92, 0x5d, 0xd8, 0x98, 0x66, 0x34, + 0x52, 0xbd, 0xf0, 0x39, 0x4d, 0xd0, 0x5e, 0xd1, 0xd8, 0xba, 0x11, 0x2f, 0x69, 0x82, 0x64, 0x0f, + 0xfa, 0x69, 0x26, 0x52, 0xcc, 0xfc, 0x34, 0xa6, 0x01, 0x2a, 0xdd, 0x6e, 0x0d, 0xad, 0xf1, 0x9a, + 0xd7, 0x2b, 0xf5, 0x89, 0x91, 0xc9, 0x73, 0x20, 0x61, 0x91, 0xc6, 0x2c, 0xa0, 0x12, 0x7d, 0x13, + 0xc4, 0x5e, 0xd5, 0xf0, 0x66, 0x6d, 0x79, 0x57, 0x19, 0x54, 0x13, 0xeb, 0xf4, 0x31, 0xf2, 0x48, + 0xde, 0xda, 0x7f, 0x95, 0x4d, 0x34, 0xf2, 0x85, 0x56, 0xc9, 0x13, 0xd0, 0x5f, 0xe8, 0xf3, 0x22, + 0xb9, 0xc1, 0xcc, 0x5e, 0xd3, 0x10, 0x28, 0xe9, 0x52, 0x2b, 0x64, 0x07, 0xd6, 0x4b, 0x9b, 0xaf, + 0xc4, 0xdc, 0x6e, 0x6b, 0xa2, 0x53, 0x6a, 0xaa, 0x93, 0x39, 0x79, 0x03, 0xdb, 0x53, 0xca, 0x62, + 0x0c, 0xfd, 0x19, 0xf2, 0x50, 0x64, 0x66, 0x6e, 0x7e, 0x70, 0x8b, 0xc1, 0x5d, 0x6e, 0x83, 0xae, + 0xf2, 0xdf, 0x92, 0xb9, 0xd6, 0x48, 0x35, 0xc3, 0x33, 0x0d, 0x90, 0x13, 0x68, 0xd7, 0x6b, 0x6e, + 0x77, 0xf4, 0xb6, 0xec, 0x2e, 0x9b, 0xe6, 0x2f, 0x4b, 0xe6, 0x2d, 0xbc, 0x88, 0x0b, 0x7f, 0xe7, + 0x18, 0x08, 0x1e, 0xd2, 0x6c, 0xee, 0x2f, 0x82, 0xad, 0xeb, 0xd4, 0xa4, 0x36, 0x2d, 0x16, 0xf4, + 0x15, 0xfc, 0x93, 0x17, 0x69, 0x1a, 0xeb, 0xf6, 0x3e, 0x76, 0xda, 0xd0, 0x4e, 0x5b, 0x8f, 0xcc, + 0x0b, 0xc7, 0x3d, 0xe8, 0x6b, 0x14, 0x43, 0x3f, 0xc7, 0xfb, 0x02, 0x79, 0x80, 0x76, 0x57, 0x0f, + 0xb7, 0x57, 0xe9, 0x57, 0x95, 0xac, 0xa6, 0x60, 0x50, 0xb3, 0xca, 0xbd, 0x61, 0x53, 0x4d, 0xa1, + 0x92, 0xcd, 0x2a, 0x7f, 0x00, 0xc2, 0xf1, 0xab, 0xf4, 0x13, 0x35, 0xdd, 0xfa, 0x6e, 0xfa, 0x7f, + 0x70, 0x37, 0x7d, 0xe5, 0xf7, 0x91, 0x4a, 0x34, 0x0a, 0x79, 0x09, 0x2b, 0x8c, 0x4f, 0x85, 0xbd, + 0xa9, 0xaf, 0x62, 0xb4, 0xcc, 0x5b, 0x8d, 0xcd, 0x39, 0xe7, 0x53, 0xf1, 0x96, 0xcb, 0x6c, 0xee, + 0x69, 0x7e, 0x70, 0x05, 0xed, 0x5a, 0x22, 0x7d, 0x68, 0xde, 0xe1, 0xbc, 0x3a, 0x0f, 0xf5, 0x93, + 0xbc, 0x80, 0xd6, 0x8c, 0xc6, 0x05, 0xea, 0xbb, 0xe8, 0x1c, 0x0c, 0x4c, 0x5c, 0xf3, 0x24, 0x38, + 0x17, 0x2c, 0x97, 0xd7, 0x8a, 0xf0, 0x4a, 0xf0, 0xb8, 0x71, 0x64, 0x9d, 0x26, 0xb0, 0x15, 0x88, + 0x64, 0x49, 0x0d, 0xa7, 0x44, 0x15, 0x51, 0x77, 0x75, 0xa2, 0xa2, 0x4c, 0xac, 0x2f, 0xc7, 0x86, + 0x14, 0x31, 0xe5, 0x91, 0x23, 0xb2, 0x48, 0x3d, 0x4b, 0x3a, 0x87, 0x5b, 0x9a, 0x68, 0xca, 0xf2, + 0x87, 0x4f, 0xd5, 0x6b, 0xf3, 0xfb, 0x87, 0x65, 0xdd, 0xac, 0x6a, 0xf2, 0xf0, 0x67, 0x00, 0x00, + 0x00, 0xff, 0xff, 0xd0, 0xe1, 0xf6, 0x57, 0x4d, 0x05, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/genomics/v1/readgroup.pb.go b/vendor/google.golang.org/genproto/googleapis/genomics/v1/readgroup.pb.go new file mode 100644 index 0000000..800691a --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/genomics/v1/readgroup.pb.go @@ -0,0 +1,350 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/genomics/v1/readgroup.proto + +package genomics // import "google.golang.org/genproto/googleapis/genomics/v1" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _struct "github.com/golang/protobuf/ptypes/struct" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// A read group is all the data that's processed the same way by the sequencer. +type ReadGroup struct { + // The server-generated read group ID, unique for all read groups. + // Note: This is different than the @RG ID field in the SAM spec. For that + // value, see [name][google.genomics.v1.ReadGroup.name]. + Id string `protobuf:"bytes,1,opt,name=id,proto3" json:"id,omitempty"` + // The dataset to which this read group belongs. + DatasetId string `protobuf:"bytes,2,opt,name=dataset_id,json=datasetId,proto3" json:"dataset_id,omitempty"` + // The read group name. This corresponds to the @RG ID field in the SAM spec. + Name string `protobuf:"bytes,3,opt,name=name,proto3" json:"name,omitempty"` + // A free-form text description of this read group. + Description string `protobuf:"bytes,4,opt,name=description,proto3" json:"description,omitempty"` + // A client-supplied sample identifier for the reads in this read group. + SampleId string `protobuf:"bytes,5,opt,name=sample_id,json=sampleId,proto3" json:"sample_id,omitempty"` + // The experiment used to generate this read group. + Experiment *ReadGroup_Experiment `protobuf:"bytes,6,opt,name=experiment,proto3" json:"experiment,omitempty"` + // The predicted insert size of this read group. The insert size is the length + // the sequenced DNA fragment from end-to-end, not including the adapters. + PredictedInsertSize int32 `protobuf:"varint,7,opt,name=predicted_insert_size,json=predictedInsertSize,proto3" json:"predicted_insert_size,omitempty"` + // The programs used to generate this read group. Programs are always + // identical for all read groups within a read group set. For this reason, + // only the first read group in a returned set will have this field + // populated. + Programs []*ReadGroup_Program `protobuf:"bytes,10,rep,name=programs,proto3" json:"programs,omitempty"` + // The reference set the reads in this read group are aligned to. + ReferenceSetId string `protobuf:"bytes,11,opt,name=reference_set_id,json=referenceSetId,proto3" json:"reference_set_id,omitempty"` + // A map of additional read group information. This must be of the form + // map (string key mapping to a list of string values). + Info map[string]*_struct.ListValue `protobuf:"bytes,12,rep,name=info,proto3" json:"info,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ReadGroup) Reset() { *m = ReadGroup{} } +func (m *ReadGroup) String() string { return proto.CompactTextString(m) } +func (*ReadGroup) ProtoMessage() {} +func (*ReadGroup) Descriptor() ([]byte, []int) { + return fileDescriptor_readgroup_f15cee86d9a45532, []int{0} +} +func (m *ReadGroup) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ReadGroup.Unmarshal(m, b) +} +func (m *ReadGroup) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ReadGroup.Marshal(b, m, deterministic) +} +func (dst *ReadGroup) XXX_Merge(src proto.Message) { + xxx_messageInfo_ReadGroup.Merge(dst, src) +} +func (m *ReadGroup) XXX_Size() int { + return xxx_messageInfo_ReadGroup.Size(m) +} +func (m *ReadGroup) XXX_DiscardUnknown() { + xxx_messageInfo_ReadGroup.DiscardUnknown(m) +} + +var xxx_messageInfo_ReadGroup proto.InternalMessageInfo + +func (m *ReadGroup) GetId() string { + if m != nil { + return m.Id + } + return "" +} + +func (m *ReadGroup) GetDatasetId() string { + if m != nil { + return m.DatasetId + } + return "" +} + +func (m *ReadGroup) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *ReadGroup) GetDescription() string { + if m != nil { + return m.Description + } + return "" +} + +func (m *ReadGroup) GetSampleId() string { + if m != nil { + return m.SampleId + } + return "" +} + +func (m *ReadGroup) GetExperiment() *ReadGroup_Experiment { + if m != nil { + return m.Experiment + } + return nil +} + +func (m *ReadGroup) GetPredictedInsertSize() int32 { + if m != nil { + return m.PredictedInsertSize + } + return 0 +} + +func (m *ReadGroup) GetPrograms() []*ReadGroup_Program { + if m != nil { + return m.Programs + } + return nil +} + +func (m *ReadGroup) GetReferenceSetId() string { + if m != nil { + return m.ReferenceSetId + } + return "" +} + +func (m *ReadGroup) GetInfo() map[string]*_struct.ListValue { + if m != nil { + return m.Info + } + return nil +} + +type ReadGroup_Experiment struct { + // A client-supplied library identifier; a library is a collection of DNA + // fragments which have been prepared for sequencing from a sample. This + // field is important for quality control as error or bias can be introduced + // during sample preparation. + LibraryId string `protobuf:"bytes,1,opt,name=library_id,json=libraryId,proto3" json:"library_id,omitempty"` + // The platform unit used as part of this experiment, for example + // flowcell-barcode.lane for Illumina or slide for SOLiD. Corresponds to the + // @RG PU field in the SAM spec. + PlatformUnit string `protobuf:"bytes,2,opt,name=platform_unit,json=platformUnit,proto3" json:"platform_unit,omitempty"` + // The sequencing center used as part of this experiment. + SequencingCenter string `protobuf:"bytes,3,opt,name=sequencing_center,json=sequencingCenter,proto3" json:"sequencing_center,omitempty"` + // The instrument model used as part of this experiment. This maps to + // sequencing technology in the SAM spec. + InstrumentModel string `protobuf:"bytes,4,opt,name=instrument_model,json=instrumentModel,proto3" json:"instrument_model,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ReadGroup_Experiment) Reset() { *m = ReadGroup_Experiment{} } +func (m *ReadGroup_Experiment) String() string { return proto.CompactTextString(m) } +func (*ReadGroup_Experiment) ProtoMessage() {} +func (*ReadGroup_Experiment) Descriptor() ([]byte, []int) { + return fileDescriptor_readgroup_f15cee86d9a45532, []int{0, 0} +} +func (m *ReadGroup_Experiment) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ReadGroup_Experiment.Unmarshal(m, b) +} +func (m *ReadGroup_Experiment) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ReadGroup_Experiment.Marshal(b, m, deterministic) +} +func (dst *ReadGroup_Experiment) XXX_Merge(src proto.Message) { + xxx_messageInfo_ReadGroup_Experiment.Merge(dst, src) +} +func (m *ReadGroup_Experiment) XXX_Size() int { + return xxx_messageInfo_ReadGroup_Experiment.Size(m) +} +func (m *ReadGroup_Experiment) XXX_DiscardUnknown() { + xxx_messageInfo_ReadGroup_Experiment.DiscardUnknown(m) +} + +var xxx_messageInfo_ReadGroup_Experiment proto.InternalMessageInfo + +func (m *ReadGroup_Experiment) GetLibraryId() string { + if m != nil { + return m.LibraryId + } + return "" +} + +func (m *ReadGroup_Experiment) GetPlatformUnit() string { + if m != nil { + return m.PlatformUnit + } + return "" +} + +func (m *ReadGroup_Experiment) GetSequencingCenter() string { + if m != nil { + return m.SequencingCenter + } + return "" +} + +func (m *ReadGroup_Experiment) GetInstrumentModel() string { + if m != nil { + return m.InstrumentModel + } + return "" +} + +type ReadGroup_Program struct { + // The command line used to run this program. + CommandLine string `protobuf:"bytes,1,opt,name=command_line,json=commandLine,proto3" json:"command_line,omitempty"` + // The user specified locally unique ID of the program. Used along with + // `prevProgramId` to define an ordering between programs. + Id string `protobuf:"bytes,2,opt,name=id,proto3" json:"id,omitempty"` + // The display name of the program. This is typically the colloquial name of + // the tool used, for example 'bwa' or 'picard'. + Name string `protobuf:"bytes,3,opt,name=name,proto3" json:"name,omitempty"` + // The ID of the program run before this one. + PrevProgramId string `protobuf:"bytes,4,opt,name=prev_program_id,json=prevProgramId,proto3" json:"prev_program_id,omitempty"` + // The version of the program run. + Version string `protobuf:"bytes,5,opt,name=version,proto3" json:"version,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ReadGroup_Program) Reset() { *m = ReadGroup_Program{} } +func (m *ReadGroup_Program) String() string { return proto.CompactTextString(m) } +func (*ReadGroup_Program) ProtoMessage() {} +func (*ReadGroup_Program) Descriptor() ([]byte, []int) { + return fileDescriptor_readgroup_f15cee86d9a45532, []int{0, 1} +} +func (m *ReadGroup_Program) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ReadGroup_Program.Unmarshal(m, b) +} +func (m *ReadGroup_Program) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ReadGroup_Program.Marshal(b, m, deterministic) +} +func (dst *ReadGroup_Program) XXX_Merge(src proto.Message) { + xxx_messageInfo_ReadGroup_Program.Merge(dst, src) +} +func (m *ReadGroup_Program) XXX_Size() int { + return xxx_messageInfo_ReadGroup_Program.Size(m) +} +func (m *ReadGroup_Program) XXX_DiscardUnknown() { + xxx_messageInfo_ReadGroup_Program.DiscardUnknown(m) +} + +var xxx_messageInfo_ReadGroup_Program proto.InternalMessageInfo + +func (m *ReadGroup_Program) GetCommandLine() string { + if m != nil { + return m.CommandLine + } + return "" +} + +func (m *ReadGroup_Program) GetId() string { + if m != nil { + return m.Id + } + return "" +} + +func (m *ReadGroup_Program) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *ReadGroup_Program) GetPrevProgramId() string { + if m != nil { + return m.PrevProgramId + } + return "" +} + +func (m *ReadGroup_Program) GetVersion() string { + if m != nil { + return m.Version + } + return "" +} + +func init() { + proto.RegisterType((*ReadGroup)(nil), "google.genomics.v1.ReadGroup") + proto.RegisterMapType((map[string]*_struct.ListValue)(nil), "google.genomics.v1.ReadGroup.InfoEntry") + proto.RegisterType((*ReadGroup_Experiment)(nil), "google.genomics.v1.ReadGroup.Experiment") + proto.RegisterType((*ReadGroup_Program)(nil), "google.genomics.v1.ReadGroup.Program") +} + +func init() { + proto.RegisterFile("google/genomics/v1/readgroup.proto", fileDescriptor_readgroup_f15cee86d9a45532) +} + +var fileDescriptor_readgroup_f15cee86d9a45532 = []byte{ + // 585 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x7c, 0x54, 0xcb, 0x6e, 0xd4, 0x30, + 0x14, 0x55, 0xa6, 0xcf, 0xb9, 0xd3, 0xc7, 0x60, 0x04, 0x8a, 0x06, 0x90, 0x86, 0x22, 0x60, 0x10, + 0x52, 0x42, 0x87, 0x0d, 0x6a, 0x57, 0x14, 0x55, 0x10, 0xa9, 0x48, 0x55, 0x2a, 0x58, 0xb0, 0x89, + 0xdc, 0xf8, 0x4e, 0x64, 0x91, 0xd8, 0xc1, 0x76, 0x46, 0xb4, 0x9f, 0xc1, 0x57, 0xf0, 0x2d, 0x7c, + 0x11, 0x4b, 0x64, 0xc7, 0x49, 0x47, 0xa2, 0xea, 0xce, 0x39, 0xe7, 0x5c, 0xdf, 0xc7, 0xb9, 0x0e, + 0x1c, 0x14, 0x52, 0x16, 0x25, 0xc6, 0x05, 0x0a, 0x59, 0xf1, 0x5c, 0xc7, 0xcb, 0xc3, 0x58, 0x21, + 0x65, 0x85, 0x92, 0x4d, 0x1d, 0xd5, 0x4a, 0x1a, 0x49, 0x48, 0xab, 0x89, 0x3a, 0x4d, 0xb4, 0x3c, + 0x9c, 0x3c, 0xf6, 0x71, 0xb4, 0xe6, 0x31, 0x15, 0x42, 0x1a, 0x6a, 0xb8, 0x14, 0xba, 0x8d, 0xe8, + 0x59, 0xf7, 0x75, 0xd9, 0x2c, 0x62, 0x6d, 0x54, 0x93, 0x9b, 0x96, 0x3d, 0xf8, 0xb3, 0x09, 0xc3, + 0x14, 0x29, 0xfb, 0x68, 0x73, 0x90, 0x3d, 0x18, 0x70, 0x16, 0x06, 0xd3, 0x60, 0x36, 0x4c, 0x07, + 0x9c, 0x91, 0x27, 0x00, 0x8c, 0x1a, 0xaa, 0xd1, 0x64, 0x9c, 0x85, 0x03, 0x87, 0x0f, 0x3d, 0x92, + 0x30, 0x42, 0x60, 0x5d, 0xd0, 0x0a, 0xc3, 0x35, 0x47, 0xb8, 0x33, 0x99, 0xc2, 0x88, 0xa1, 0xce, + 0x15, 0xaf, 0x6d, 0x11, 0xe1, 0xba, 0xa3, 0x56, 0x21, 0xf2, 0x08, 0x86, 0x9a, 0x56, 0x75, 0x89, + 0xf6, 0xce, 0x0d, 0xc7, 0x6f, 0xb7, 0x40, 0xc2, 0xc8, 0x27, 0x00, 0xfc, 0x59, 0xa3, 0xe2, 0x15, + 0x0a, 0x13, 0x6e, 0x4e, 0x83, 0xd9, 0x68, 0x3e, 0x8b, 0xfe, 0x6f, 0x3a, 0xea, 0x8b, 0x8e, 0x4e, + 0x7b, 0x7d, 0xba, 0x12, 0x4b, 0xe6, 0xf0, 0xa0, 0x56, 0xc8, 0x78, 0x6e, 0x90, 0x65, 0x5c, 0x68, + 0x54, 0x26, 0xd3, 0xfc, 0x1a, 0xc3, 0xad, 0x69, 0x30, 0xdb, 0x48, 0xef, 0xf7, 0x64, 0xe2, 0xb8, + 0x0b, 0x7e, 0x8d, 0xe4, 0x3d, 0x6c, 0xd7, 0x4a, 0x16, 0x8a, 0x56, 0x3a, 0x84, 0xe9, 0xda, 0x6c, + 0x34, 0x7f, 0x7e, 0x77, 0xee, 0xf3, 0x56, 0x9d, 0xf6, 0x61, 0x64, 0x06, 0x63, 0x85, 0x0b, 0x54, + 0x28, 0x72, 0xcc, 0xfc, 0xe0, 0x46, 0xae, 0xc9, 0xbd, 0x1e, 0xbf, 0x70, 0xd3, 0x3b, 0x86, 0x75, + 0x2e, 0x16, 0x32, 0xdc, 0x71, 0x89, 0x5e, 0xde, 0x9d, 0x28, 0x11, 0x0b, 0x79, 0x2a, 0x8c, 0xba, + 0x4a, 0x5d, 0xd0, 0xe4, 0x77, 0x00, 0x70, 0xd3, 0xb8, 0x35, 0xaa, 0xe4, 0x97, 0x8a, 0xaa, 0xab, + 0xac, 0x37, 0x70, 0xe8, 0x91, 0x84, 0x91, 0x67, 0xb0, 0x5b, 0x97, 0xd4, 0x2c, 0xa4, 0xaa, 0xb2, + 0x46, 0x70, 0xe3, 0xad, 0xdc, 0xe9, 0xc0, 0x2f, 0x82, 0x1b, 0xf2, 0x1a, 0xee, 0x69, 0xfc, 0xd1, + 0xa0, 0xc8, 0xb9, 0x28, 0xb2, 0x1c, 0x85, 0x41, 0xe5, 0xad, 0x1d, 0xdf, 0x10, 0x1f, 0x1c, 0x4e, + 0x5e, 0xc1, 0x98, 0x0b, 0xbb, 0x49, 0x36, 0x7d, 0x56, 0x49, 0x86, 0xa5, 0xf7, 0x7a, 0xff, 0x06, + 0xff, 0x6c, 0xe1, 0xc9, 0xaf, 0x00, 0xb6, 0xfc, 0x9c, 0xc8, 0x53, 0xd8, 0xc9, 0x65, 0x55, 0x51, + 0xc1, 0xb2, 0x92, 0x0b, 0xf4, 0x95, 0x8e, 0x3c, 0x76, 0xc6, 0x05, 0xfa, 0x1d, 0x1c, 0xf4, 0x3b, + 0x78, 0xdb, 0x92, 0xbd, 0x80, 0xfd, 0x5a, 0xe1, 0x32, 0xf3, 0x53, 0xb7, 0x3d, 0xb7, 0xc9, 0x77, + 0x2d, 0xec, 0x93, 0x25, 0x8c, 0x84, 0xb0, 0xb5, 0x44, 0xa5, 0xed, 0x22, 0xb6, 0x8b, 0xd6, 0x7d, + 0x4e, 0x2e, 0x60, 0xd8, 0x8f, 0x94, 0x8c, 0x61, 0xed, 0x3b, 0x5e, 0xf9, 0x62, 0xec, 0x91, 0xbc, + 0x81, 0x8d, 0x25, 0x2d, 0x1b, 0x74, 0x75, 0x8c, 0xe6, 0x93, 0xce, 0x9c, 0xee, 0x11, 0x45, 0x67, + 0x5c, 0x9b, 0xaf, 0x56, 0x91, 0xb6, 0xc2, 0xa3, 0xc1, 0xbb, 0xe0, 0x84, 0xc3, 0xc3, 0x5c, 0x56, + 0xb7, 0x18, 0x79, 0xb2, 0xd7, 0x3b, 0x79, 0x6e, 0x6f, 0x38, 0x0f, 0xbe, 0x1d, 0x75, 0x2a, 0x59, + 0x52, 0x51, 0x44, 0x52, 0x15, 0xf6, 0xdd, 0xbb, 0xfb, 0xe3, 0x96, 0xa2, 0x35, 0xd7, 0xab, 0xff, + 0x82, 0xe3, 0xee, 0xfc, 0x37, 0x08, 0x2e, 0x37, 0x9d, 0xf2, 0xed, 0xbf, 0x00, 0x00, 0x00, 0xff, + 0xff, 0x37, 0xed, 0xaa, 0xaa, 0x34, 0x04, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/genomics/v1/readgroupset.pb.go b/vendor/google.golang.org/genproto/googleapis/genomics/v1/readgroupset.pb.go new file mode 100644 index 0000000..ceb0c46 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/genomics/v1/readgroupset.pb.go @@ -0,0 +1,163 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/genomics/v1/readgroupset.proto + +package genomics // import "google.golang.org/genproto/googleapis/genomics/v1" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _struct "github.com/golang/protobuf/ptypes/struct" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// A read group set is a logical collection of read groups, which are +// collections of reads produced by a sequencer. A read group set typically +// models reads corresponding to one sample, sequenced one way, and aligned one +// way. +// +// * A read group set belongs to one dataset. +// * A read group belongs to one read group set. +// * A read belongs to one read group. +// +// For more genomics resource definitions, see [Fundamentals of Google +// Genomics](https://cloud.google.com/genomics/fundamentals-of-google-genomics) +type ReadGroupSet struct { + // The server-generated read group set ID, unique for all read group sets. + Id string `protobuf:"bytes,1,opt,name=id,proto3" json:"id,omitempty"` + // The dataset to which this read group set belongs. + DatasetId string `protobuf:"bytes,2,opt,name=dataset_id,json=datasetId,proto3" json:"dataset_id,omitempty"` + // The reference set to which the reads in this read group set are aligned. + ReferenceSetId string `protobuf:"bytes,3,opt,name=reference_set_id,json=referenceSetId,proto3" json:"reference_set_id,omitempty"` + // The read group set name. By default this will be initialized to the sample + // name of the sequenced data contained in this set. + Name string `protobuf:"bytes,4,opt,name=name,proto3" json:"name,omitempty"` + // The filename of the original source file for this read group set, if any. + Filename string `protobuf:"bytes,5,opt,name=filename,proto3" json:"filename,omitempty"` + // The read groups in this set. There are typically 1-10 read groups in a read + // group set. + ReadGroups []*ReadGroup `protobuf:"bytes,6,rep,name=read_groups,json=readGroups,proto3" json:"read_groups,omitempty"` + // A map of additional read group set information. + Info map[string]*_struct.ListValue `protobuf:"bytes,7,rep,name=info,proto3" json:"info,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ReadGroupSet) Reset() { *m = ReadGroupSet{} } +func (m *ReadGroupSet) String() string { return proto.CompactTextString(m) } +func (*ReadGroupSet) ProtoMessage() {} +func (*ReadGroupSet) Descriptor() ([]byte, []int) { + return fileDescriptor_readgroupset_b9edcdfdbb1584f9, []int{0} +} +func (m *ReadGroupSet) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ReadGroupSet.Unmarshal(m, b) +} +func (m *ReadGroupSet) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ReadGroupSet.Marshal(b, m, deterministic) +} +func (dst *ReadGroupSet) XXX_Merge(src proto.Message) { + xxx_messageInfo_ReadGroupSet.Merge(dst, src) +} +func (m *ReadGroupSet) XXX_Size() int { + return xxx_messageInfo_ReadGroupSet.Size(m) +} +func (m *ReadGroupSet) XXX_DiscardUnknown() { + xxx_messageInfo_ReadGroupSet.DiscardUnknown(m) +} + +var xxx_messageInfo_ReadGroupSet proto.InternalMessageInfo + +func (m *ReadGroupSet) GetId() string { + if m != nil { + return m.Id + } + return "" +} + +func (m *ReadGroupSet) GetDatasetId() string { + if m != nil { + return m.DatasetId + } + return "" +} + +func (m *ReadGroupSet) GetReferenceSetId() string { + if m != nil { + return m.ReferenceSetId + } + return "" +} + +func (m *ReadGroupSet) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *ReadGroupSet) GetFilename() string { + if m != nil { + return m.Filename + } + return "" +} + +func (m *ReadGroupSet) GetReadGroups() []*ReadGroup { + if m != nil { + return m.ReadGroups + } + return nil +} + +func (m *ReadGroupSet) GetInfo() map[string]*_struct.ListValue { + if m != nil { + return m.Info + } + return nil +} + +func init() { + proto.RegisterType((*ReadGroupSet)(nil), "google.genomics.v1.ReadGroupSet") + proto.RegisterMapType((map[string]*_struct.ListValue)(nil), "google.genomics.v1.ReadGroupSet.InfoEntry") +} + +func init() { + proto.RegisterFile("google/genomics/v1/readgroupset.proto", fileDescriptor_readgroupset_b9edcdfdbb1584f9) +} + +var fileDescriptor_readgroupset_b9edcdfdbb1584f9 = []byte{ + // 367 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x7c, 0x91, 0x4f, 0x8b, 0xdb, 0x30, + 0x10, 0xc5, 0xb1, 0xf3, 0xa7, 0xcd, 0xa4, 0x84, 0x54, 0x87, 0x62, 0x4c, 0x03, 0x21, 0x50, 0x08, + 0x3d, 0xc8, 0x4d, 0x7a, 0x29, 0x29, 0xe4, 0x10, 0x28, 0x25, 0xb0, 0x87, 0x60, 0xc3, 0x1e, 0xf6, + 0x12, 0x14, 0x7b, 0x6c, 0xc4, 0x3a, 0x92, 0x91, 0xe4, 0x40, 0xbe, 0xf3, 0x7e, 0x80, 0x3d, 0x2e, + 0x96, 0xff, 0x10, 0xd8, 0x25, 0xb7, 0xd1, 0xd3, 0xef, 0x8d, 0x46, 0x6f, 0xe0, 0x47, 0x26, 0x65, + 0x96, 0x63, 0x90, 0xa1, 0x90, 0x67, 0x1e, 0xeb, 0xe0, 0xb2, 0x0a, 0x14, 0xb2, 0x24, 0x53, 0xb2, + 0x2c, 0x34, 0x1a, 0x5a, 0x28, 0x69, 0x24, 0x21, 0x35, 0x46, 0x5b, 0x8c, 0x5e, 0x56, 0xfe, 0xf7, + 0xc6, 0xca, 0x0a, 0x1e, 0x30, 0x21, 0xa4, 0x61, 0x86, 0x4b, 0xa1, 0x6b, 0x87, 0xbf, 0xb8, 0xd7, + 0xb8, 0x61, 0xda, 0x0e, 0xf6, 0x74, 0x2a, 0xd3, 0x40, 0x1b, 0x55, 0xc6, 0xcd, 0x9b, 0x8b, 0x17, + 0x17, 0xbe, 0x84, 0xc8, 0x92, 0xff, 0x95, 0x23, 0x42, 0x43, 0x26, 0xe0, 0xf2, 0xc4, 0x73, 0xe6, + 0xce, 0x72, 0x14, 0xba, 0x3c, 0x21, 0x33, 0x80, 0x84, 0x19, 0xa6, 0xd1, 0x1c, 0x79, 0xe2, 0xb9, + 0x56, 0x1f, 0x35, 0xca, 0x3e, 0x21, 0x4b, 0x98, 0x2a, 0x4c, 0x51, 0xa1, 0x88, 0xf1, 0xd8, 0x40, + 0x3d, 0x0b, 0x4d, 0x3a, 0x3d, 0xb2, 0x24, 0x81, 0xbe, 0x60, 0x67, 0xf4, 0xfa, 0xf6, 0xd6, 0xd6, + 0xc4, 0x87, 0xcf, 0x29, 0xcf, 0xd1, 0xea, 0x03, 0xab, 0x77, 0x67, 0xb2, 0x85, 0x71, 0xf5, 0x95, + 0x63, 0x1d, 0x92, 0x37, 0x9c, 0xf7, 0x96, 0xe3, 0xf5, 0x8c, 0xbe, 0xcf, 0x88, 0x76, 0xf3, 0x87, + 0xa0, 0xda, 0x52, 0x93, 0x2d, 0xf4, 0xb9, 0x48, 0xa5, 0xf7, 0xc9, 0x1a, 0x7f, 0xde, 0x35, 0x46, + 0x68, 0xe8, 0x5e, 0xa4, 0xf2, 0x9f, 0x30, 0xea, 0x1a, 0x5a, 0x9f, 0x1f, 0xc1, 0xa8, 0x93, 0xc8, + 0x14, 0x7a, 0xcf, 0x78, 0x6d, 0x62, 0xa9, 0x4a, 0xf2, 0x0b, 0x06, 0x17, 0x96, 0x97, 0x68, 0x23, + 0x19, 0xaf, 0xfd, 0xb6, 0x7f, 0x1b, 0x33, 0x7d, 0xe0, 0xda, 0x3c, 0x56, 0x44, 0x58, 0x83, 0x1b, + 0xf7, 0x8f, 0xb3, 0xcb, 0xe1, 0x5b, 0x2c, 0xcf, 0x1f, 0xcc, 0xb2, 0xfb, 0x7a, 0x3b, 0xcc, 0xa1, + 0x6a, 0x72, 0x70, 0x9e, 0x36, 0x2d, 0x28, 0x73, 0x26, 0x32, 0x2a, 0x55, 0x56, 0xad, 0xda, 0x3e, + 0x11, 0xd4, 0x57, 0xac, 0xe0, 0xfa, 0x76, 0xfd, 0x7f, 0xdb, 0xfa, 0xd5, 0x71, 0x4e, 0x43, 0x4b, + 0xfe, 0x7e, 0x0b, 0x00, 0x00, 0xff, 0xff, 0x1f, 0xa9, 0x2f, 0xa5, 0x80, 0x02, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/genomics/v1/reads.pb.go b/vendor/google.golang.org/genproto/googleapis/genomics/v1/reads.pb.go new file mode 100644 index 0000000..5e6a7e2 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/genomics/v1/reads.pb.go @@ -0,0 +1,1752 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/genomics/v1/reads.proto + +package genomics // import "google.golang.org/genproto/googleapis/genomics/v1" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import empty "github.com/golang/protobuf/ptypes/empty" +import _ "google.golang.org/genproto/googleapis/api/annotations" +import longrunning "google.golang.org/genproto/googleapis/longrunning" +import field_mask "google.golang.org/genproto/protobuf/field_mask" + +import ( + context "golang.org/x/net/context" + grpc "google.golang.org/grpc" +) + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +type ImportReadGroupSetsRequest_PartitionStrategy int32 + +const ( + ImportReadGroupSetsRequest_PARTITION_STRATEGY_UNSPECIFIED ImportReadGroupSetsRequest_PartitionStrategy = 0 + // In most cases, this strategy yields one read group set per file. This is + // the default behavior. + // + // Allocate one read group set per file per sample. For BAM files, read + // groups are considered to share a sample if they have identical sample + // names. Furthermore, all reads for each file which do not belong to a read + // group, if any, will be grouped into a single read group set per-file. + ImportReadGroupSetsRequest_PER_FILE_PER_SAMPLE ImportReadGroupSetsRequest_PartitionStrategy = 1 + // Includes all read groups in all imported files into a single read group + // set. Requires that the headers for all imported files are equivalent. All + // reads which do not belong to a read group, if any, will be grouped into a + // separate read group set. + ImportReadGroupSetsRequest_MERGE_ALL ImportReadGroupSetsRequest_PartitionStrategy = 2 +) + +var ImportReadGroupSetsRequest_PartitionStrategy_name = map[int32]string{ + 0: "PARTITION_STRATEGY_UNSPECIFIED", + 1: "PER_FILE_PER_SAMPLE", + 2: "MERGE_ALL", +} +var ImportReadGroupSetsRequest_PartitionStrategy_value = map[string]int32{ + "PARTITION_STRATEGY_UNSPECIFIED": 0, + "PER_FILE_PER_SAMPLE": 1, + "MERGE_ALL": 2, +} + +func (x ImportReadGroupSetsRequest_PartitionStrategy) String() string { + return proto.EnumName(ImportReadGroupSetsRequest_PartitionStrategy_name, int32(x)) +} +func (ImportReadGroupSetsRequest_PartitionStrategy) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_reads_fd372a563c554464, []int{2, 0} +} + +// The read group set search request. +type SearchReadGroupSetsRequest struct { + // Restricts this query to read group sets within the given datasets. At least + // one ID must be provided. + DatasetIds []string `protobuf:"bytes,1,rep,name=dataset_ids,json=datasetIds,proto3" json:"dataset_ids,omitempty"` + // Only return read group sets for which a substring of the name matches this + // string. + Name string `protobuf:"bytes,3,opt,name=name,proto3" json:"name,omitempty"` + // The continuation token, which is used to page through large result sets. + // To get the next page of results, set this parameter to the value of + // `nextPageToken` from the previous response. + PageToken string `protobuf:"bytes,2,opt,name=page_token,json=pageToken,proto3" json:"page_token,omitempty"` + // The maximum number of results to return in a single page. If unspecified, + // defaults to 256. The maximum value is 1024. + PageSize int32 `protobuf:"varint,4,opt,name=page_size,json=pageSize,proto3" json:"page_size,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *SearchReadGroupSetsRequest) Reset() { *m = SearchReadGroupSetsRequest{} } +func (m *SearchReadGroupSetsRequest) String() string { return proto.CompactTextString(m) } +func (*SearchReadGroupSetsRequest) ProtoMessage() {} +func (*SearchReadGroupSetsRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_reads_fd372a563c554464, []int{0} +} +func (m *SearchReadGroupSetsRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_SearchReadGroupSetsRequest.Unmarshal(m, b) +} +func (m *SearchReadGroupSetsRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_SearchReadGroupSetsRequest.Marshal(b, m, deterministic) +} +func (dst *SearchReadGroupSetsRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_SearchReadGroupSetsRequest.Merge(dst, src) +} +func (m *SearchReadGroupSetsRequest) XXX_Size() int { + return xxx_messageInfo_SearchReadGroupSetsRequest.Size(m) +} +func (m *SearchReadGroupSetsRequest) XXX_DiscardUnknown() { + xxx_messageInfo_SearchReadGroupSetsRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_SearchReadGroupSetsRequest proto.InternalMessageInfo + +func (m *SearchReadGroupSetsRequest) GetDatasetIds() []string { + if m != nil { + return m.DatasetIds + } + return nil +} + +func (m *SearchReadGroupSetsRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *SearchReadGroupSetsRequest) GetPageToken() string { + if m != nil { + return m.PageToken + } + return "" +} + +func (m *SearchReadGroupSetsRequest) GetPageSize() int32 { + if m != nil { + return m.PageSize + } + return 0 +} + +// The read group set search response. +type SearchReadGroupSetsResponse struct { + // The list of matching read group sets. + ReadGroupSets []*ReadGroupSet `protobuf:"bytes,1,rep,name=read_group_sets,json=readGroupSets,proto3" json:"read_group_sets,omitempty"` + // The continuation token, which is used to page through large result sets. + // Provide this value in a subsequent request to return the next page of + // results. This field will be empty if there aren't any additional results. + NextPageToken string `protobuf:"bytes,2,opt,name=next_page_token,json=nextPageToken,proto3" json:"next_page_token,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *SearchReadGroupSetsResponse) Reset() { *m = SearchReadGroupSetsResponse{} } +func (m *SearchReadGroupSetsResponse) String() string { return proto.CompactTextString(m) } +func (*SearchReadGroupSetsResponse) ProtoMessage() {} +func (*SearchReadGroupSetsResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_reads_fd372a563c554464, []int{1} +} +func (m *SearchReadGroupSetsResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_SearchReadGroupSetsResponse.Unmarshal(m, b) +} +func (m *SearchReadGroupSetsResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_SearchReadGroupSetsResponse.Marshal(b, m, deterministic) +} +func (dst *SearchReadGroupSetsResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_SearchReadGroupSetsResponse.Merge(dst, src) +} +func (m *SearchReadGroupSetsResponse) XXX_Size() int { + return xxx_messageInfo_SearchReadGroupSetsResponse.Size(m) +} +func (m *SearchReadGroupSetsResponse) XXX_DiscardUnknown() { + xxx_messageInfo_SearchReadGroupSetsResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_SearchReadGroupSetsResponse proto.InternalMessageInfo + +func (m *SearchReadGroupSetsResponse) GetReadGroupSets() []*ReadGroupSet { + if m != nil { + return m.ReadGroupSets + } + return nil +} + +func (m *SearchReadGroupSetsResponse) GetNextPageToken() string { + if m != nil { + return m.NextPageToken + } + return "" +} + +// The read group set import request. +type ImportReadGroupSetsRequest struct { + // Required. The ID of the dataset these read group sets will belong to. The + // caller must have WRITE permissions to this dataset. + DatasetId string `protobuf:"bytes,1,opt,name=dataset_id,json=datasetId,proto3" json:"dataset_id,omitempty"` + // The reference set to which the imported read group sets are aligned to, if + // any. The reference names of this reference set must be a superset of those + // found in the imported file headers. If no reference set id is provided, a + // best effort is made to associate with a matching reference set. + ReferenceSetId string `protobuf:"bytes,4,opt,name=reference_set_id,json=referenceSetId,proto3" json:"reference_set_id,omitempty"` + // A list of URIs pointing at [BAM + // files](https://samtools.github.io/hts-specs/SAMv1.pdf) + // in Google Cloud Storage. + // Those URIs can include wildcards (*), but do not add or remove + // matching files before import has completed. + // + // Note that Google Cloud Storage object listing is only eventually + // consistent: files added may be not be immediately visible to + // everyone. Thus, if using a wildcard it is preferable not to start + // the import immediately after the files are created. + SourceUris []string `protobuf:"bytes,2,rep,name=source_uris,json=sourceUris,proto3" json:"source_uris,omitempty"` + // The partition strategy describes how read groups are partitioned into read + // group sets. + PartitionStrategy ImportReadGroupSetsRequest_PartitionStrategy `protobuf:"varint,5,opt,name=partition_strategy,json=partitionStrategy,proto3,enum=google.genomics.v1.ImportReadGroupSetsRequest_PartitionStrategy" json:"partition_strategy,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ImportReadGroupSetsRequest) Reset() { *m = ImportReadGroupSetsRequest{} } +func (m *ImportReadGroupSetsRequest) String() string { return proto.CompactTextString(m) } +func (*ImportReadGroupSetsRequest) ProtoMessage() {} +func (*ImportReadGroupSetsRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_reads_fd372a563c554464, []int{2} +} +func (m *ImportReadGroupSetsRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ImportReadGroupSetsRequest.Unmarshal(m, b) +} +func (m *ImportReadGroupSetsRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ImportReadGroupSetsRequest.Marshal(b, m, deterministic) +} +func (dst *ImportReadGroupSetsRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_ImportReadGroupSetsRequest.Merge(dst, src) +} +func (m *ImportReadGroupSetsRequest) XXX_Size() int { + return xxx_messageInfo_ImportReadGroupSetsRequest.Size(m) +} +func (m *ImportReadGroupSetsRequest) XXX_DiscardUnknown() { + xxx_messageInfo_ImportReadGroupSetsRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_ImportReadGroupSetsRequest proto.InternalMessageInfo + +func (m *ImportReadGroupSetsRequest) GetDatasetId() string { + if m != nil { + return m.DatasetId + } + return "" +} + +func (m *ImportReadGroupSetsRequest) GetReferenceSetId() string { + if m != nil { + return m.ReferenceSetId + } + return "" +} + +func (m *ImportReadGroupSetsRequest) GetSourceUris() []string { + if m != nil { + return m.SourceUris + } + return nil +} + +func (m *ImportReadGroupSetsRequest) GetPartitionStrategy() ImportReadGroupSetsRequest_PartitionStrategy { + if m != nil { + return m.PartitionStrategy + } + return ImportReadGroupSetsRequest_PARTITION_STRATEGY_UNSPECIFIED +} + +// The read group set import response. +type ImportReadGroupSetsResponse struct { + // IDs of the read group sets that were created. + ReadGroupSetIds []string `protobuf:"bytes,1,rep,name=read_group_set_ids,json=readGroupSetIds,proto3" json:"read_group_set_ids,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ImportReadGroupSetsResponse) Reset() { *m = ImportReadGroupSetsResponse{} } +func (m *ImportReadGroupSetsResponse) String() string { return proto.CompactTextString(m) } +func (*ImportReadGroupSetsResponse) ProtoMessage() {} +func (*ImportReadGroupSetsResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_reads_fd372a563c554464, []int{3} +} +func (m *ImportReadGroupSetsResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ImportReadGroupSetsResponse.Unmarshal(m, b) +} +func (m *ImportReadGroupSetsResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ImportReadGroupSetsResponse.Marshal(b, m, deterministic) +} +func (dst *ImportReadGroupSetsResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_ImportReadGroupSetsResponse.Merge(dst, src) +} +func (m *ImportReadGroupSetsResponse) XXX_Size() int { + return xxx_messageInfo_ImportReadGroupSetsResponse.Size(m) +} +func (m *ImportReadGroupSetsResponse) XXX_DiscardUnknown() { + xxx_messageInfo_ImportReadGroupSetsResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_ImportReadGroupSetsResponse proto.InternalMessageInfo + +func (m *ImportReadGroupSetsResponse) GetReadGroupSetIds() []string { + if m != nil { + return m.ReadGroupSetIds + } + return nil +} + +// The read group set export request. +type ExportReadGroupSetRequest struct { + // Required. The Google Cloud project ID that owns this + // export. The caller must have WRITE access to this project. + ProjectId string `protobuf:"bytes,1,opt,name=project_id,json=projectId,proto3" json:"project_id,omitempty"` + // Required. A Google Cloud Storage URI for the exported BAM file. + // The currently authenticated user must have write access to the new file. + // An error will be returned if the URI already contains data. + ExportUri string `protobuf:"bytes,2,opt,name=export_uri,json=exportUri,proto3" json:"export_uri,omitempty"` + // Required. The ID of the read group set to export. The caller must have + // READ access to this read group set. + ReadGroupSetId string `protobuf:"bytes,3,opt,name=read_group_set_id,json=readGroupSetId,proto3" json:"read_group_set_id,omitempty"` + // The reference names to export. If this is not specified, all reference + // sequences, including unmapped reads, are exported. + // Use `*` to export only unmapped reads. + ReferenceNames []string `protobuf:"bytes,4,rep,name=reference_names,json=referenceNames,proto3" json:"reference_names,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ExportReadGroupSetRequest) Reset() { *m = ExportReadGroupSetRequest{} } +func (m *ExportReadGroupSetRequest) String() string { return proto.CompactTextString(m) } +func (*ExportReadGroupSetRequest) ProtoMessage() {} +func (*ExportReadGroupSetRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_reads_fd372a563c554464, []int{4} +} +func (m *ExportReadGroupSetRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ExportReadGroupSetRequest.Unmarshal(m, b) +} +func (m *ExportReadGroupSetRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ExportReadGroupSetRequest.Marshal(b, m, deterministic) +} +func (dst *ExportReadGroupSetRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_ExportReadGroupSetRequest.Merge(dst, src) +} +func (m *ExportReadGroupSetRequest) XXX_Size() int { + return xxx_messageInfo_ExportReadGroupSetRequest.Size(m) +} +func (m *ExportReadGroupSetRequest) XXX_DiscardUnknown() { + xxx_messageInfo_ExportReadGroupSetRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_ExportReadGroupSetRequest proto.InternalMessageInfo + +func (m *ExportReadGroupSetRequest) GetProjectId() string { + if m != nil { + return m.ProjectId + } + return "" +} + +func (m *ExportReadGroupSetRequest) GetExportUri() string { + if m != nil { + return m.ExportUri + } + return "" +} + +func (m *ExportReadGroupSetRequest) GetReadGroupSetId() string { + if m != nil { + return m.ReadGroupSetId + } + return "" +} + +func (m *ExportReadGroupSetRequest) GetReferenceNames() []string { + if m != nil { + return m.ReferenceNames + } + return nil +} + +type UpdateReadGroupSetRequest struct { + // The ID of the read group set to be updated. The caller must have WRITE + // permissions to the dataset associated with this read group set. + ReadGroupSetId string `protobuf:"bytes,1,opt,name=read_group_set_id,json=readGroupSetId,proto3" json:"read_group_set_id,omitempty"` + // The new read group set data. See `updateMask` for details on mutability of + // fields. + ReadGroupSet *ReadGroupSet `protobuf:"bytes,2,opt,name=read_group_set,json=readGroupSet,proto3" json:"read_group_set,omitempty"` + // An optional mask specifying which fields to update. Supported fields: + // + // * [name][google.genomics.v1.ReadGroupSet.name]. + // * [referenceSetId][google.genomics.v1.ReadGroupSet.reference_set_id]. + // + // Leaving `updateMask` unset is equivalent to specifying all mutable + // fields. + UpdateMask *field_mask.FieldMask `protobuf:"bytes,3,opt,name=update_mask,json=updateMask,proto3" json:"update_mask,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *UpdateReadGroupSetRequest) Reset() { *m = UpdateReadGroupSetRequest{} } +func (m *UpdateReadGroupSetRequest) String() string { return proto.CompactTextString(m) } +func (*UpdateReadGroupSetRequest) ProtoMessage() {} +func (*UpdateReadGroupSetRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_reads_fd372a563c554464, []int{5} +} +func (m *UpdateReadGroupSetRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_UpdateReadGroupSetRequest.Unmarshal(m, b) +} +func (m *UpdateReadGroupSetRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_UpdateReadGroupSetRequest.Marshal(b, m, deterministic) +} +func (dst *UpdateReadGroupSetRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_UpdateReadGroupSetRequest.Merge(dst, src) +} +func (m *UpdateReadGroupSetRequest) XXX_Size() int { + return xxx_messageInfo_UpdateReadGroupSetRequest.Size(m) +} +func (m *UpdateReadGroupSetRequest) XXX_DiscardUnknown() { + xxx_messageInfo_UpdateReadGroupSetRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_UpdateReadGroupSetRequest proto.InternalMessageInfo + +func (m *UpdateReadGroupSetRequest) GetReadGroupSetId() string { + if m != nil { + return m.ReadGroupSetId + } + return "" +} + +func (m *UpdateReadGroupSetRequest) GetReadGroupSet() *ReadGroupSet { + if m != nil { + return m.ReadGroupSet + } + return nil +} + +func (m *UpdateReadGroupSetRequest) GetUpdateMask() *field_mask.FieldMask { + if m != nil { + return m.UpdateMask + } + return nil +} + +type DeleteReadGroupSetRequest struct { + // The ID of the read group set to be deleted. The caller must have WRITE + // permissions to the dataset associated with this read group set. + ReadGroupSetId string `protobuf:"bytes,1,opt,name=read_group_set_id,json=readGroupSetId,proto3" json:"read_group_set_id,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *DeleteReadGroupSetRequest) Reset() { *m = DeleteReadGroupSetRequest{} } +func (m *DeleteReadGroupSetRequest) String() string { return proto.CompactTextString(m) } +func (*DeleteReadGroupSetRequest) ProtoMessage() {} +func (*DeleteReadGroupSetRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_reads_fd372a563c554464, []int{6} +} +func (m *DeleteReadGroupSetRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_DeleteReadGroupSetRequest.Unmarshal(m, b) +} +func (m *DeleteReadGroupSetRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_DeleteReadGroupSetRequest.Marshal(b, m, deterministic) +} +func (dst *DeleteReadGroupSetRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_DeleteReadGroupSetRequest.Merge(dst, src) +} +func (m *DeleteReadGroupSetRequest) XXX_Size() int { + return xxx_messageInfo_DeleteReadGroupSetRequest.Size(m) +} +func (m *DeleteReadGroupSetRequest) XXX_DiscardUnknown() { + xxx_messageInfo_DeleteReadGroupSetRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_DeleteReadGroupSetRequest proto.InternalMessageInfo + +func (m *DeleteReadGroupSetRequest) GetReadGroupSetId() string { + if m != nil { + return m.ReadGroupSetId + } + return "" +} + +type GetReadGroupSetRequest struct { + // The ID of the read group set. + ReadGroupSetId string `protobuf:"bytes,1,opt,name=read_group_set_id,json=readGroupSetId,proto3" json:"read_group_set_id,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GetReadGroupSetRequest) Reset() { *m = GetReadGroupSetRequest{} } +func (m *GetReadGroupSetRequest) String() string { return proto.CompactTextString(m) } +func (*GetReadGroupSetRequest) ProtoMessage() {} +func (*GetReadGroupSetRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_reads_fd372a563c554464, []int{7} +} +func (m *GetReadGroupSetRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GetReadGroupSetRequest.Unmarshal(m, b) +} +func (m *GetReadGroupSetRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GetReadGroupSetRequest.Marshal(b, m, deterministic) +} +func (dst *GetReadGroupSetRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_GetReadGroupSetRequest.Merge(dst, src) +} +func (m *GetReadGroupSetRequest) XXX_Size() int { + return xxx_messageInfo_GetReadGroupSetRequest.Size(m) +} +func (m *GetReadGroupSetRequest) XXX_DiscardUnknown() { + xxx_messageInfo_GetReadGroupSetRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_GetReadGroupSetRequest proto.InternalMessageInfo + +func (m *GetReadGroupSetRequest) GetReadGroupSetId() string { + if m != nil { + return m.ReadGroupSetId + } + return "" +} + +type ListCoverageBucketsRequest struct { + // Required. The ID of the read group set over which coverage is requested. + ReadGroupSetId string `protobuf:"bytes,1,opt,name=read_group_set_id,json=readGroupSetId,proto3" json:"read_group_set_id,omitempty"` + // The name of the reference to query, within the reference set associated + // with this query. Optional. + ReferenceName string `protobuf:"bytes,3,opt,name=reference_name,json=referenceName,proto3" json:"reference_name,omitempty"` + // The start position of the range on the reference, 0-based inclusive. If + // specified, `referenceName` must also be specified. Defaults to 0. + Start int64 `protobuf:"varint,4,opt,name=start,proto3" json:"start,omitempty"` + // The end position of the range on the reference, 0-based exclusive. If + // specified, `referenceName` must also be specified. If unset or 0, defaults + // to the length of the reference. + End int64 `protobuf:"varint,5,opt,name=end,proto3" json:"end,omitempty"` + // The desired width of each reported coverage bucket in base pairs. This + // will be rounded down to the nearest precomputed bucket width; the value + // of which is returned as `bucketWidth` in the response. Defaults + // to infinity (each bucket spans an entire reference sequence) or the length + // of the target range, if specified. The smallest precomputed + // `bucketWidth` is currently 2048 base pairs; this is subject to + // change. + TargetBucketWidth int64 `protobuf:"varint,6,opt,name=target_bucket_width,json=targetBucketWidth,proto3" json:"target_bucket_width,omitempty"` + // The continuation token, which is used to page through large result sets. + // To get the next page of results, set this parameter to the value of + // `nextPageToken` from the previous response. + PageToken string `protobuf:"bytes,7,opt,name=page_token,json=pageToken,proto3" json:"page_token,omitempty"` + // The maximum number of results to return in a single page. If unspecified, + // defaults to 1024. The maximum value is 2048. + PageSize int32 `protobuf:"varint,8,opt,name=page_size,json=pageSize,proto3" json:"page_size,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ListCoverageBucketsRequest) Reset() { *m = ListCoverageBucketsRequest{} } +func (m *ListCoverageBucketsRequest) String() string { return proto.CompactTextString(m) } +func (*ListCoverageBucketsRequest) ProtoMessage() {} +func (*ListCoverageBucketsRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_reads_fd372a563c554464, []int{8} +} +func (m *ListCoverageBucketsRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ListCoverageBucketsRequest.Unmarshal(m, b) +} +func (m *ListCoverageBucketsRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ListCoverageBucketsRequest.Marshal(b, m, deterministic) +} +func (dst *ListCoverageBucketsRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_ListCoverageBucketsRequest.Merge(dst, src) +} +func (m *ListCoverageBucketsRequest) XXX_Size() int { + return xxx_messageInfo_ListCoverageBucketsRequest.Size(m) +} +func (m *ListCoverageBucketsRequest) XXX_DiscardUnknown() { + xxx_messageInfo_ListCoverageBucketsRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_ListCoverageBucketsRequest proto.InternalMessageInfo + +func (m *ListCoverageBucketsRequest) GetReadGroupSetId() string { + if m != nil { + return m.ReadGroupSetId + } + return "" +} + +func (m *ListCoverageBucketsRequest) GetReferenceName() string { + if m != nil { + return m.ReferenceName + } + return "" +} + +func (m *ListCoverageBucketsRequest) GetStart() int64 { + if m != nil { + return m.Start + } + return 0 +} + +func (m *ListCoverageBucketsRequest) GetEnd() int64 { + if m != nil { + return m.End + } + return 0 +} + +func (m *ListCoverageBucketsRequest) GetTargetBucketWidth() int64 { + if m != nil { + return m.TargetBucketWidth + } + return 0 +} + +func (m *ListCoverageBucketsRequest) GetPageToken() string { + if m != nil { + return m.PageToken + } + return "" +} + +func (m *ListCoverageBucketsRequest) GetPageSize() int32 { + if m != nil { + return m.PageSize + } + return 0 +} + +// A bucket over which read coverage has been precomputed. A bucket corresponds +// to a specific range of the reference sequence. +type CoverageBucket struct { + // The genomic coordinate range spanned by this bucket. + Range *Range `protobuf:"bytes,1,opt,name=range,proto3" json:"range,omitempty"` + // The average number of reads which are aligned to each individual + // reference base in this bucket. + MeanCoverage float32 `protobuf:"fixed32,2,opt,name=mean_coverage,json=meanCoverage,proto3" json:"mean_coverage,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *CoverageBucket) Reset() { *m = CoverageBucket{} } +func (m *CoverageBucket) String() string { return proto.CompactTextString(m) } +func (*CoverageBucket) ProtoMessage() {} +func (*CoverageBucket) Descriptor() ([]byte, []int) { + return fileDescriptor_reads_fd372a563c554464, []int{9} +} +func (m *CoverageBucket) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_CoverageBucket.Unmarshal(m, b) +} +func (m *CoverageBucket) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_CoverageBucket.Marshal(b, m, deterministic) +} +func (dst *CoverageBucket) XXX_Merge(src proto.Message) { + xxx_messageInfo_CoverageBucket.Merge(dst, src) +} +func (m *CoverageBucket) XXX_Size() int { + return xxx_messageInfo_CoverageBucket.Size(m) +} +func (m *CoverageBucket) XXX_DiscardUnknown() { + xxx_messageInfo_CoverageBucket.DiscardUnknown(m) +} + +var xxx_messageInfo_CoverageBucket proto.InternalMessageInfo + +func (m *CoverageBucket) GetRange() *Range { + if m != nil { + return m.Range + } + return nil +} + +func (m *CoverageBucket) GetMeanCoverage() float32 { + if m != nil { + return m.MeanCoverage + } + return 0 +} + +type ListCoverageBucketsResponse struct { + // The length of each coverage bucket in base pairs. Note that buckets at the + // end of a reference sequence may be shorter. This value is omitted if the + // bucket width is infinity (the default behaviour, with no range or + // `targetBucketWidth`). + BucketWidth int64 `protobuf:"varint,1,opt,name=bucket_width,json=bucketWidth,proto3" json:"bucket_width,omitempty"` + // The coverage buckets. The list of buckets is sparse; a bucket with 0 + // overlapping reads is not returned. A bucket never crosses more than one + // reference sequence. Each bucket has width `bucketWidth`, unless + // its end is the end of the reference sequence. + CoverageBuckets []*CoverageBucket `protobuf:"bytes,2,rep,name=coverage_buckets,json=coverageBuckets,proto3" json:"coverage_buckets,omitempty"` + // The continuation token, which is used to page through large result sets. + // Provide this value in a subsequent request to return the next page of + // results. This field will be empty if there aren't any additional results. + NextPageToken string `protobuf:"bytes,3,opt,name=next_page_token,json=nextPageToken,proto3" json:"next_page_token,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ListCoverageBucketsResponse) Reset() { *m = ListCoverageBucketsResponse{} } +func (m *ListCoverageBucketsResponse) String() string { return proto.CompactTextString(m) } +func (*ListCoverageBucketsResponse) ProtoMessage() {} +func (*ListCoverageBucketsResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_reads_fd372a563c554464, []int{10} +} +func (m *ListCoverageBucketsResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ListCoverageBucketsResponse.Unmarshal(m, b) +} +func (m *ListCoverageBucketsResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ListCoverageBucketsResponse.Marshal(b, m, deterministic) +} +func (dst *ListCoverageBucketsResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_ListCoverageBucketsResponse.Merge(dst, src) +} +func (m *ListCoverageBucketsResponse) XXX_Size() int { + return xxx_messageInfo_ListCoverageBucketsResponse.Size(m) +} +func (m *ListCoverageBucketsResponse) XXX_DiscardUnknown() { + xxx_messageInfo_ListCoverageBucketsResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_ListCoverageBucketsResponse proto.InternalMessageInfo + +func (m *ListCoverageBucketsResponse) GetBucketWidth() int64 { + if m != nil { + return m.BucketWidth + } + return 0 +} + +func (m *ListCoverageBucketsResponse) GetCoverageBuckets() []*CoverageBucket { + if m != nil { + return m.CoverageBuckets + } + return nil +} + +func (m *ListCoverageBucketsResponse) GetNextPageToken() string { + if m != nil { + return m.NextPageToken + } + return "" +} + +// The read search request. +type SearchReadsRequest struct { + // The IDs of the read groups sets within which to search for reads. All + // specified read group sets must be aligned against a common set of reference + // sequences; this defines the genomic coordinates for the query. Must specify + // one of `readGroupSetIds` or `readGroupIds`. + ReadGroupSetIds []string `protobuf:"bytes,1,rep,name=read_group_set_ids,json=readGroupSetIds,proto3" json:"read_group_set_ids,omitempty"` + // The IDs of the read groups within which to search for reads. All specified + // read groups must belong to the same read group sets. Must specify one of + // `readGroupSetIds` or `readGroupIds`. + ReadGroupIds []string `protobuf:"bytes,5,rep,name=read_group_ids,json=readGroupIds,proto3" json:"read_group_ids,omitempty"` + // The reference sequence name, for example `chr1`, `1`, or `chrX`. If set to + // `*`, only unmapped reads are returned. If unspecified, all reads (mapped + // and unmapped) are returned. + ReferenceName string `protobuf:"bytes,7,opt,name=reference_name,json=referenceName,proto3" json:"reference_name,omitempty"` + // The start position of the range on the reference, 0-based inclusive. If + // specified, `referenceName` must also be specified. + Start int64 `protobuf:"varint,8,opt,name=start,proto3" json:"start,omitempty"` + // The end position of the range on the reference, 0-based exclusive. If + // specified, `referenceName` must also be specified. + End int64 `protobuf:"varint,9,opt,name=end,proto3" json:"end,omitempty"` + // The continuation token, which is used to page through large result sets. + // To get the next page of results, set this parameter to the value of + // `nextPageToken` from the previous response. + PageToken string `protobuf:"bytes,3,opt,name=page_token,json=pageToken,proto3" json:"page_token,omitempty"` + // The maximum number of results to return in a single page. If unspecified, + // defaults to 256. The maximum value is 2048. + PageSize int32 `protobuf:"varint,4,opt,name=page_size,json=pageSize,proto3" json:"page_size,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *SearchReadsRequest) Reset() { *m = SearchReadsRequest{} } +func (m *SearchReadsRequest) String() string { return proto.CompactTextString(m) } +func (*SearchReadsRequest) ProtoMessage() {} +func (*SearchReadsRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_reads_fd372a563c554464, []int{11} +} +func (m *SearchReadsRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_SearchReadsRequest.Unmarshal(m, b) +} +func (m *SearchReadsRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_SearchReadsRequest.Marshal(b, m, deterministic) +} +func (dst *SearchReadsRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_SearchReadsRequest.Merge(dst, src) +} +func (m *SearchReadsRequest) XXX_Size() int { + return xxx_messageInfo_SearchReadsRequest.Size(m) +} +func (m *SearchReadsRequest) XXX_DiscardUnknown() { + xxx_messageInfo_SearchReadsRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_SearchReadsRequest proto.InternalMessageInfo + +func (m *SearchReadsRequest) GetReadGroupSetIds() []string { + if m != nil { + return m.ReadGroupSetIds + } + return nil +} + +func (m *SearchReadsRequest) GetReadGroupIds() []string { + if m != nil { + return m.ReadGroupIds + } + return nil +} + +func (m *SearchReadsRequest) GetReferenceName() string { + if m != nil { + return m.ReferenceName + } + return "" +} + +func (m *SearchReadsRequest) GetStart() int64 { + if m != nil { + return m.Start + } + return 0 +} + +func (m *SearchReadsRequest) GetEnd() int64 { + if m != nil { + return m.End + } + return 0 +} + +func (m *SearchReadsRequest) GetPageToken() string { + if m != nil { + return m.PageToken + } + return "" +} + +func (m *SearchReadsRequest) GetPageSize() int32 { + if m != nil { + return m.PageSize + } + return 0 +} + +// The read search response. +type SearchReadsResponse struct { + // The list of matching alignments sorted by mapped genomic coordinate, + // if any, ascending in position within the same reference. Unmapped reads, + // which have no position, are returned contiguously and are sorted in + // ascending lexicographic order by fragment name. + Alignments []*Read `protobuf:"bytes,1,rep,name=alignments,proto3" json:"alignments,omitempty"` + // The continuation token, which is used to page through large result sets. + // Provide this value in a subsequent request to return the next page of + // results. This field will be empty if there aren't any additional results. + NextPageToken string `protobuf:"bytes,2,opt,name=next_page_token,json=nextPageToken,proto3" json:"next_page_token,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *SearchReadsResponse) Reset() { *m = SearchReadsResponse{} } +func (m *SearchReadsResponse) String() string { return proto.CompactTextString(m) } +func (*SearchReadsResponse) ProtoMessage() {} +func (*SearchReadsResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_reads_fd372a563c554464, []int{12} +} +func (m *SearchReadsResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_SearchReadsResponse.Unmarshal(m, b) +} +func (m *SearchReadsResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_SearchReadsResponse.Marshal(b, m, deterministic) +} +func (dst *SearchReadsResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_SearchReadsResponse.Merge(dst, src) +} +func (m *SearchReadsResponse) XXX_Size() int { + return xxx_messageInfo_SearchReadsResponse.Size(m) +} +func (m *SearchReadsResponse) XXX_DiscardUnknown() { + xxx_messageInfo_SearchReadsResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_SearchReadsResponse proto.InternalMessageInfo + +func (m *SearchReadsResponse) GetAlignments() []*Read { + if m != nil { + return m.Alignments + } + return nil +} + +func (m *SearchReadsResponse) GetNextPageToken() string { + if m != nil { + return m.NextPageToken + } + return "" +} + +// The stream reads request. +type StreamReadsRequest struct { + // The Google Cloud project ID which will be billed + // for this access. The caller must have WRITE access to this project. + // Required. + ProjectId string `protobuf:"bytes,1,opt,name=project_id,json=projectId,proto3" json:"project_id,omitempty"` + // The ID of the read group set from which to stream reads. + ReadGroupSetId string `protobuf:"bytes,2,opt,name=read_group_set_id,json=readGroupSetId,proto3" json:"read_group_set_id,omitempty"` + // The reference sequence name, for example `chr1`, + // `1`, or `chrX`. If set to *, only unmapped reads are + // returned. + ReferenceName string `protobuf:"bytes,3,opt,name=reference_name,json=referenceName,proto3" json:"reference_name,omitempty"` + // The start position of the range on the reference, 0-based inclusive. If + // specified, `referenceName` must also be specified. + Start int64 `protobuf:"varint,4,opt,name=start,proto3" json:"start,omitempty"` + // The end position of the range on the reference, 0-based exclusive. If + // specified, `referenceName` must also be specified. + End int64 `protobuf:"varint,5,opt,name=end,proto3" json:"end,omitempty"` + // Restricts results to a shard containing approximately `1/totalShards` + // of the normal response payload for this query. Results from a sharded + // request are disjoint from those returned by all queries which differ only + // in their shard parameter. A shard may yield 0 results; this is especially + // likely for large values of `totalShards`. + // + // Valid values are `[0, totalShards)`. + Shard int32 `protobuf:"varint,6,opt,name=shard,proto3" json:"shard,omitempty"` + // Specifying `totalShards` causes a disjoint subset of the normal response + // payload to be returned for each query with a unique `shard` parameter + // specified. A best effort is made to yield equally sized shards. Sharding + // can be used to distribute processing amongst workers, where each worker is + // assigned a unique `shard` number and all workers specify the same + // `totalShards` number. The union of reads returned for all sharded queries + // `[0, totalShards)` is equal to those returned by a single unsharded query. + // + // Queries for different values of `totalShards` with common divisors will + // share shard boundaries. For example, streaming `shard` 2 of 5 + // `totalShards` yields the same results as streaming `shard`s 4 and 5 of 10 + // `totalShards`. This property can be leveraged for adaptive retries. + TotalShards int32 `protobuf:"varint,7,opt,name=total_shards,json=totalShards,proto3" json:"total_shards,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *StreamReadsRequest) Reset() { *m = StreamReadsRequest{} } +func (m *StreamReadsRequest) String() string { return proto.CompactTextString(m) } +func (*StreamReadsRequest) ProtoMessage() {} +func (*StreamReadsRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_reads_fd372a563c554464, []int{13} +} +func (m *StreamReadsRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_StreamReadsRequest.Unmarshal(m, b) +} +func (m *StreamReadsRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_StreamReadsRequest.Marshal(b, m, deterministic) +} +func (dst *StreamReadsRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_StreamReadsRequest.Merge(dst, src) +} +func (m *StreamReadsRequest) XXX_Size() int { + return xxx_messageInfo_StreamReadsRequest.Size(m) +} +func (m *StreamReadsRequest) XXX_DiscardUnknown() { + xxx_messageInfo_StreamReadsRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_StreamReadsRequest proto.InternalMessageInfo + +func (m *StreamReadsRequest) GetProjectId() string { + if m != nil { + return m.ProjectId + } + return "" +} + +func (m *StreamReadsRequest) GetReadGroupSetId() string { + if m != nil { + return m.ReadGroupSetId + } + return "" +} + +func (m *StreamReadsRequest) GetReferenceName() string { + if m != nil { + return m.ReferenceName + } + return "" +} + +func (m *StreamReadsRequest) GetStart() int64 { + if m != nil { + return m.Start + } + return 0 +} + +func (m *StreamReadsRequest) GetEnd() int64 { + if m != nil { + return m.End + } + return 0 +} + +func (m *StreamReadsRequest) GetShard() int32 { + if m != nil { + return m.Shard + } + return 0 +} + +func (m *StreamReadsRequest) GetTotalShards() int32 { + if m != nil { + return m.TotalShards + } + return 0 +} + +type StreamReadsResponse struct { + Alignments []*Read `protobuf:"bytes,1,rep,name=alignments,proto3" json:"alignments,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *StreamReadsResponse) Reset() { *m = StreamReadsResponse{} } +func (m *StreamReadsResponse) String() string { return proto.CompactTextString(m) } +func (*StreamReadsResponse) ProtoMessage() {} +func (*StreamReadsResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_reads_fd372a563c554464, []int{14} +} +func (m *StreamReadsResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_StreamReadsResponse.Unmarshal(m, b) +} +func (m *StreamReadsResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_StreamReadsResponse.Marshal(b, m, deterministic) +} +func (dst *StreamReadsResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_StreamReadsResponse.Merge(dst, src) +} +func (m *StreamReadsResponse) XXX_Size() int { + return xxx_messageInfo_StreamReadsResponse.Size(m) +} +func (m *StreamReadsResponse) XXX_DiscardUnknown() { + xxx_messageInfo_StreamReadsResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_StreamReadsResponse proto.InternalMessageInfo + +func (m *StreamReadsResponse) GetAlignments() []*Read { + if m != nil { + return m.Alignments + } + return nil +} + +func init() { + proto.RegisterType((*SearchReadGroupSetsRequest)(nil), "google.genomics.v1.SearchReadGroupSetsRequest") + proto.RegisterType((*SearchReadGroupSetsResponse)(nil), "google.genomics.v1.SearchReadGroupSetsResponse") + proto.RegisterType((*ImportReadGroupSetsRequest)(nil), "google.genomics.v1.ImportReadGroupSetsRequest") + proto.RegisterType((*ImportReadGroupSetsResponse)(nil), "google.genomics.v1.ImportReadGroupSetsResponse") + proto.RegisterType((*ExportReadGroupSetRequest)(nil), "google.genomics.v1.ExportReadGroupSetRequest") + proto.RegisterType((*UpdateReadGroupSetRequest)(nil), "google.genomics.v1.UpdateReadGroupSetRequest") + proto.RegisterType((*DeleteReadGroupSetRequest)(nil), "google.genomics.v1.DeleteReadGroupSetRequest") + proto.RegisterType((*GetReadGroupSetRequest)(nil), "google.genomics.v1.GetReadGroupSetRequest") + proto.RegisterType((*ListCoverageBucketsRequest)(nil), "google.genomics.v1.ListCoverageBucketsRequest") + proto.RegisterType((*CoverageBucket)(nil), "google.genomics.v1.CoverageBucket") + proto.RegisterType((*ListCoverageBucketsResponse)(nil), "google.genomics.v1.ListCoverageBucketsResponse") + proto.RegisterType((*SearchReadsRequest)(nil), "google.genomics.v1.SearchReadsRequest") + proto.RegisterType((*SearchReadsResponse)(nil), "google.genomics.v1.SearchReadsResponse") + proto.RegisterType((*StreamReadsRequest)(nil), "google.genomics.v1.StreamReadsRequest") + proto.RegisterType((*StreamReadsResponse)(nil), "google.genomics.v1.StreamReadsResponse") + proto.RegisterEnum("google.genomics.v1.ImportReadGroupSetsRequest_PartitionStrategy", ImportReadGroupSetsRequest_PartitionStrategy_name, ImportReadGroupSetsRequest_PartitionStrategy_value) +} + +// Reference imports to suppress errors if they are not otherwise used. +var _ context.Context +var _ grpc.ClientConn + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the grpc package it is being compiled against. +const _ = grpc.SupportPackageIsVersion4 + +// StreamingReadServiceClient is the client API for StreamingReadService service. +// +// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://godoc.org/google.golang.org/grpc#ClientConn.NewStream. +type StreamingReadServiceClient interface { + // Returns a stream of all the reads matching the search request, ordered + // by reference name, position, and ID. + StreamReads(ctx context.Context, in *StreamReadsRequest, opts ...grpc.CallOption) (StreamingReadService_StreamReadsClient, error) +} + +type streamingReadServiceClient struct { + cc *grpc.ClientConn +} + +func NewStreamingReadServiceClient(cc *grpc.ClientConn) StreamingReadServiceClient { + return &streamingReadServiceClient{cc} +} + +func (c *streamingReadServiceClient) StreamReads(ctx context.Context, in *StreamReadsRequest, opts ...grpc.CallOption) (StreamingReadService_StreamReadsClient, error) { + stream, err := c.cc.NewStream(ctx, &_StreamingReadService_serviceDesc.Streams[0], "/google.genomics.v1.StreamingReadService/StreamReads", opts...) + if err != nil { + return nil, err + } + x := &streamingReadServiceStreamReadsClient{stream} + if err := x.ClientStream.SendMsg(in); err != nil { + return nil, err + } + if err := x.ClientStream.CloseSend(); err != nil { + return nil, err + } + return x, nil +} + +type StreamingReadService_StreamReadsClient interface { + Recv() (*StreamReadsResponse, error) + grpc.ClientStream +} + +type streamingReadServiceStreamReadsClient struct { + grpc.ClientStream +} + +func (x *streamingReadServiceStreamReadsClient) Recv() (*StreamReadsResponse, error) { + m := new(StreamReadsResponse) + if err := x.ClientStream.RecvMsg(m); err != nil { + return nil, err + } + return m, nil +} + +// StreamingReadServiceServer is the server API for StreamingReadService service. +type StreamingReadServiceServer interface { + // Returns a stream of all the reads matching the search request, ordered + // by reference name, position, and ID. + StreamReads(*StreamReadsRequest, StreamingReadService_StreamReadsServer) error +} + +func RegisterStreamingReadServiceServer(s *grpc.Server, srv StreamingReadServiceServer) { + s.RegisterService(&_StreamingReadService_serviceDesc, srv) +} + +func _StreamingReadService_StreamReads_Handler(srv interface{}, stream grpc.ServerStream) error { + m := new(StreamReadsRequest) + if err := stream.RecvMsg(m); err != nil { + return err + } + return srv.(StreamingReadServiceServer).StreamReads(m, &streamingReadServiceStreamReadsServer{stream}) +} + +type StreamingReadService_StreamReadsServer interface { + Send(*StreamReadsResponse) error + grpc.ServerStream +} + +type streamingReadServiceStreamReadsServer struct { + grpc.ServerStream +} + +func (x *streamingReadServiceStreamReadsServer) Send(m *StreamReadsResponse) error { + return x.ServerStream.SendMsg(m) +} + +var _StreamingReadService_serviceDesc = grpc.ServiceDesc{ + ServiceName: "google.genomics.v1.StreamingReadService", + HandlerType: (*StreamingReadServiceServer)(nil), + Methods: []grpc.MethodDesc{}, + Streams: []grpc.StreamDesc{ + { + StreamName: "StreamReads", + Handler: _StreamingReadService_StreamReads_Handler, + ServerStreams: true, + }, + }, + Metadata: "google/genomics/v1/reads.proto", +} + +// ReadServiceV1Client is the client API for ReadServiceV1 service. +// +// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://godoc.org/google.golang.org/grpc#ClientConn.NewStream. +type ReadServiceV1Client interface { + // Creates read group sets by asynchronously importing the provided + // information. + // + // For the definitions of read group sets and other genomics resources, see + // [Fundamentals of Google + // Genomics](https://cloud.google.com/genomics/fundamentals-of-google-genomics) + // + // The caller must have WRITE permissions to the dataset. + // + // ## Notes on [BAM](https://samtools.github.io/hts-specs/SAMv1.pdf) import + // + // - Tags will be converted to strings - tag types are not preserved + // - Comments (`@CO`) in the input file header will not be preserved + // - Original header order of references (`@SQ`) will not be preserved + // - Any reverse stranded unmapped reads will be reverse complemented, and + // their qualities (also the "BQ" and "OQ" tags, if any) will be reversed + // - Unmapped reads will be stripped of positional information (reference name + // and position) + ImportReadGroupSets(ctx context.Context, in *ImportReadGroupSetsRequest, opts ...grpc.CallOption) (*longrunning.Operation, error) + // Exports a read group set to a BAM file in Google Cloud Storage. + // + // For the definitions of read group sets and other genomics resources, see + // [Fundamentals of Google + // Genomics](https://cloud.google.com/genomics/fundamentals-of-google-genomics) + // + // Note that currently there may be some differences between exported BAM + // files and the original BAM file at the time of import. See + // [ImportReadGroupSets][google.genomics.v1.ReadServiceV1.ImportReadGroupSets] + // for caveats. + ExportReadGroupSet(ctx context.Context, in *ExportReadGroupSetRequest, opts ...grpc.CallOption) (*longrunning.Operation, error) + // Searches for read group sets matching the criteria. + // + // For the definitions of read group sets and other genomics resources, see + // [Fundamentals of Google + // Genomics](https://cloud.google.com/genomics/fundamentals-of-google-genomics) + // + // Implements + // [GlobalAllianceApi.searchReadGroupSets](https://github.com/ga4gh/schemas/blob/v0.5.1/src/main/resources/avro/readmethods.avdl#L135). + SearchReadGroupSets(ctx context.Context, in *SearchReadGroupSetsRequest, opts ...grpc.CallOption) (*SearchReadGroupSetsResponse, error) + // Updates a read group set. + // + // For the definitions of read group sets and other genomics resources, see + // [Fundamentals of Google + // Genomics](https://cloud.google.com/genomics/fundamentals-of-google-genomics) + // + // This method supports patch semantics. + UpdateReadGroupSet(ctx context.Context, in *UpdateReadGroupSetRequest, opts ...grpc.CallOption) (*ReadGroupSet, error) + // Deletes a read group set. + // + // For the definitions of read group sets and other genomics resources, see + // [Fundamentals of Google + // Genomics](https://cloud.google.com/genomics/fundamentals-of-google-genomics) + DeleteReadGroupSet(ctx context.Context, in *DeleteReadGroupSetRequest, opts ...grpc.CallOption) (*empty.Empty, error) + // Gets a read group set by ID. + // + // For the definitions of read group sets and other genomics resources, see + // [Fundamentals of Google + // Genomics](https://cloud.google.com/genomics/fundamentals-of-google-genomics) + GetReadGroupSet(ctx context.Context, in *GetReadGroupSetRequest, opts ...grpc.CallOption) (*ReadGroupSet, error) + // Lists fixed width coverage buckets for a read group set, each of which + // correspond to a range of a reference sequence. Each bucket summarizes + // coverage information across its corresponding genomic range. + // + // For the definitions of read group sets and other genomics resources, see + // [Fundamentals of Google + // Genomics](https://cloud.google.com/genomics/fundamentals-of-google-genomics) + // + // Coverage is defined as the number of reads which are aligned to a given + // base in the reference sequence. Coverage buckets are available at several + // precomputed bucket widths, enabling retrieval of various coverage 'zoom + // levels'. The caller must have READ permissions for the target read group + // set. + ListCoverageBuckets(ctx context.Context, in *ListCoverageBucketsRequest, opts ...grpc.CallOption) (*ListCoverageBucketsResponse, error) + // Gets a list of reads for one or more read group sets. + // + // For the definitions of read group sets and other genomics resources, see + // [Fundamentals of Google + // Genomics](https://cloud.google.com/genomics/fundamentals-of-google-genomics) + // + // Reads search operates over a genomic coordinate space of reference sequence + // & position defined over the reference sequences to which the requested + // read group sets are aligned. + // + // If a target positional range is specified, search returns all reads whose + // alignment to the reference genome overlap the range. A query which + // specifies only read group set IDs yields all reads in those read group + // sets, including unmapped reads. + // + // All reads returned (including reads on subsequent pages) are ordered by + // genomic coordinate (by reference sequence, then position). Reads with + // equivalent genomic coordinates are returned in an unspecified order. This + // order is consistent, such that two queries for the same content (regardless + // of page size) yield reads in the same order across their respective streams + // of paginated responses. + // + // Implements + // [GlobalAllianceApi.searchReads](https://github.com/ga4gh/schemas/blob/v0.5.1/src/main/resources/avro/readmethods.avdl#L85). + SearchReads(ctx context.Context, in *SearchReadsRequest, opts ...grpc.CallOption) (*SearchReadsResponse, error) +} + +type readServiceV1Client struct { + cc *grpc.ClientConn +} + +func NewReadServiceV1Client(cc *grpc.ClientConn) ReadServiceV1Client { + return &readServiceV1Client{cc} +} + +func (c *readServiceV1Client) ImportReadGroupSets(ctx context.Context, in *ImportReadGroupSetsRequest, opts ...grpc.CallOption) (*longrunning.Operation, error) { + out := new(longrunning.Operation) + err := c.cc.Invoke(ctx, "/google.genomics.v1.ReadServiceV1/ImportReadGroupSets", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *readServiceV1Client) ExportReadGroupSet(ctx context.Context, in *ExportReadGroupSetRequest, opts ...grpc.CallOption) (*longrunning.Operation, error) { + out := new(longrunning.Operation) + err := c.cc.Invoke(ctx, "/google.genomics.v1.ReadServiceV1/ExportReadGroupSet", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *readServiceV1Client) SearchReadGroupSets(ctx context.Context, in *SearchReadGroupSetsRequest, opts ...grpc.CallOption) (*SearchReadGroupSetsResponse, error) { + out := new(SearchReadGroupSetsResponse) + err := c.cc.Invoke(ctx, "/google.genomics.v1.ReadServiceV1/SearchReadGroupSets", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *readServiceV1Client) UpdateReadGroupSet(ctx context.Context, in *UpdateReadGroupSetRequest, opts ...grpc.CallOption) (*ReadGroupSet, error) { + out := new(ReadGroupSet) + err := c.cc.Invoke(ctx, "/google.genomics.v1.ReadServiceV1/UpdateReadGroupSet", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *readServiceV1Client) DeleteReadGroupSet(ctx context.Context, in *DeleteReadGroupSetRequest, opts ...grpc.CallOption) (*empty.Empty, error) { + out := new(empty.Empty) + err := c.cc.Invoke(ctx, "/google.genomics.v1.ReadServiceV1/DeleteReadGroupSet", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *readServiceV1Client) GetReadGroupSet(ctx context.Context, in *GetReadGroupSetRequest, opts ...grpc.CallOption) (*ReadGroupSet, error) { + out := new(ReadGroupSet) + err := c.cc.Invoke(ctx, "/google.genomics.v1.ReadServiceV1/GetReadGroupSet", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *readServiceV1Client) ListCoverageBuckets(ctx context.Context, in *ListCoverageBucketsRequest, opts ...grpc.CallOption) (*ListCoverageBucketsResponse, error) { + out := new(ListCoverageBucketsResponse) + err := c.cc.Invoke(ctx, "/google.genomics.v1.ReadServiceV1/ListCoverageBuckets", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *readServiceV1Client) SearchReads(ctx context.Context, in *SearchReadsRequest, opts ...grpc.CallOption) (*SearchReadsResponse, error) { + out := new(SearchReadsResponse) + err := c.cc.Invoke(ctx, "/google.genomics.v1.ReadServiceV1/SearchReads", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +// ReadServiceV1Server is the server API for ReadServiceV1 service. +type ReadServiceV1Server interface { + // Creates read group sets by asynchronously importing the provided + // information. + // + // For the definitions of read group sets and other genomics resources, see + // [Fundamentals of Google + // Genomics](https://cloud.google.com/genomics/fundamentals-of-google-genomics) + // + // The caller must have WRITE permissions to the dataset. + // + // ## Notes on [BAM](https://samtools.github.io/hts-specs/SAMv1.pdf) import + // + // - Tags will be converted to strings - tag types are not preserved + // - Comments (`@CO`) in the input file header will not be preserved + // - Original header order of references (`@SQ`) will not be preserved + // - Any reverse stranded unmapped reads will be reverse complemented, and + // their qualities (also the "BQ" and "OQ" tags, if any) will be reversed + // - Unmapped reads will be stripped of positional information (reference name + // and position) + ImportReadGroupSets(context.Context, *ImportReadGroupSetsRequest) (*longrunning.Operation, error) + // Exports a read group set to a BAM file in Google Cloud Storage. + // + // For the definitions of read group sets and other genomics resources, see + // [Fundamentals of Google + // Genomics](https://cloud.google.com/genomics/fundamentals-of-google-genomics) + // + // Note that currently there may be some differences between exported BAM + // files and the original BAM file at the time of import. See + // [ImportReadGroupSets][google.genomics.v1.ReadServiceV1.ImportReadGroupSets] + // for caveats. + ExportReadGroupSet(context.Context, *ExportReadGroupSetRequest) (*longrunning.Operation, error) + // Searches for read group sets matching the criteria. + // + // For the definitions of read group sets and other genomics resources, see + // [Fundamentals of Google + // Genomics](https://cloud.google.com/genomics/fundamentals-of-google-genomics) + // + // Implements + // [GlobalAllianceApi.searchReadGroupSets](https://github.com/ga4gh/schemas/blob/v0.5.1/src/main/resources/avro/readmethods.avdl#L135). + SearchReadGroupSets(context.Context, *SearchReadGroupSetsRequest) (*SearchReadGroupSetsResponse, error) + // Updates a read group set. + // + // For the definitions of read group sets and other genomics resources, see + // [Fundamentals of Google + // Genomics](https://cloud.google.com/genomics/fundamentals-of-google-genomics) + // + // This method supports patch semantics. + UpdateReadGroupSet(context.Context, *UpdateReadGroupSetRequest) (*ReadGroupSet, error) + // Deletes a read group set. + // + // For the definitions of read group sets and other genomics resources, see + // [Fundamentals of Google + // Genomics](https://cloud.google.com/genomics/fundamentals-of-google-genomics) + DeleteReadGroupSet(context.Context, *DeleteReadGroupSetRequest) (*empty.Empty, error) + // Gets a read group set by ID. + // + // For the definitions of read group sets and other genomics resources, see + // [Fundamentals of Google + // Genomics](https://cloud.google.com/genomics/fundamentals-of-google-genomics) + GetReadGroupSet(context.Context, *GetReadGroupSetRequest) (*ReadGroupSet, error) + // Lists fixed width coverage buckets for a read group set, each of which + // correspond to a range of a reference sequence. Each bucket summarizes + // coverage information across its corresponding genomic range. + // + // For the definitions of read group sets and other genomics resources, see + // [Fundamentals of Google + // Genomics](https://cloud.google.com/genomics/fundamentals-of-google-genomics) + // + // Coverage is defined as the number of reads which are aligned to a given + // base in the reference sequence. Coverage buckets are available at several + // precomputed bucket widths, enabling retrieval of various coverage 'zoom + // levels'. The caller must have READ permissions for the target read group + // set. + ListCoverageBuckets(context.Context, *ListCoverageBucketsRequest) (*ListCoverageBucketsResponse, error) + // Gets a list of reads for one or more read group sets. + // + // For the definitions of read group sets and other genomics resources, see + // [Fundamentals of Google + // Genomics](https://cloud.google.com/genomics/fundamentals-of-google-genomics) + // + // Reads search operates over a genomic coordinate space of reference sequence + // & position defined over the reference sequences to which the requested + // read group sets are aligned. + // + // If a target positional range is specified, search returns all reads whose + // alignment to the reference genome overlap the range. A query which + // specifies only read group set IDs yields all reads in those read group + // sets, including unmapped reads. + // + // All reads returned (including reads on subsequent pages) are ordered by + // genomic coordinate (by reference sequence, then position). Reads with + // equivalent genomic coordinates are returned in an unspecified order. This + // order is consistent, such that two queries for the same content (regardless + // of page size) yield reads in the same order across their respective streams + // of paginated responses. + // + // Implements + // [GlobalAllianceApi.searchReads](https://github.com/ga4gh/schemas/blob/v0.5.1/src/main/resources/avro/readmethods.avdl#L85). + SearchReads(context.Context, *SearchReadsRequest) (*SearchReadsResponse, error) +} + +func RegisterReadServiceV1Server(s *grpc.Server, srv ReadServiceV1Server) { + s.RegisterService(&_ReadServiceV1_serviceDesc, srv) +} + +func _ReadServiceV1_ImportReadGroupSets_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(ImportReadGroupSetsRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(ReadServiceV1Server).ImportReadGroupSets(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.genomics.v1.ReadServiceV1/ImportReadGroupSets", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(ReadServiceV1Server).ImportReadGroupSets(ctx, req.(*ImportReadGroupSetsRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _ReadServiceV1_ExportReadGroupSet_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(ExportReadGroupSetRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(ReadServiceV1Server).ExportReadGroupSet(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.genomics.v1.ReadServiceV1/ExportReadGroupSet", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(ReadServiceV1Server).ExportReadGroupSet(ctx, req.(*ExportReadGroupSetRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _ReadServiceV1_SearchReadGroupSets_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(SearchReadGroupSetsRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(ReadServiceV1Server).SearchReadGroupSets(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.genomics.v1.ReadServiceV1/SearchReadGroupSets", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(ReadServiceV1Server).SearchReadGroupSets(ctx, req.(*SearchReadGroupSetsRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _ReadServiceV1_UpdateReadGroupSet_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(UpdateReadGroupSetRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(ReadServiceV1Server).UpdateReadGroupSet(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.genomics.v1.ReadServiceV1/UpdateReadGroupSet", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(ReadServiceV1Server).UpdateReadGroupSet(ctx, req.(*UpdateReadGroupSetRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _ReadServiceV1_DeleteReadGroupSet_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(DeleteReadGroupSetRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(ReadServiceV1Server).DeleteReadGroupSet(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.genomics.v1.ReadServiceV1/DeleteReadGroupSet", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(ReadServiceV1Server).DeleteReadGroupSet(ctx, req.(*DeleteReadGroupSetRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _ReadServiceV1_GetReadGroupSet_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(GetReadGroupSetRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(ReadServiceV1Server).GetReadGroupSet(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.genomics.v1.ReadServiceV1/GetReadGroupSet", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(ReadServiceV1Server).GetReadGroupSet(ctx, req.(*GetReadGroupSetRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _ReadServiceV1_ListCoverageBuckets_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(ListCoverageBucketsRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(ReadServiceV1Server).ListCoverageBuckets(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.genomics.v1.ReadServiceV1/ListCoverageBuckets", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(ReadServiceV1Server).ListCoverageBuckets(ctx, req.(*ListCoverageBucketsRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _ReadServiceV1_SearchReads_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(SearchReadsRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(ReadServiceV1Server).SearchReads(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.genomics.v1.ReadServiceV1/SearchReads", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(ReadServiceV1Server).SearchReads(ctx, req.(*SearchReadsRequest)) + } + return interceptor(ctx, in, info, handler) +} + +var _ReadServiceV1_serviceDesc = grpc.ServiceDesc{ + ServiceName: "google.genomics.v1.ReadServiceV1", + HandlerType: (*ReadServiceV1Server)(nil), + Methods: []grpc.MethodDesc{ + { + MethodName: "ImportReadGroupSets", + Handler: _ReadServiceV1_ImportReadGroupSets_Handler, + }, + { + MethodName: "ExportReadGroupSet", + Handler: _ReadServiceV1_ExportReadGroupSet_Handler, + }, + { + MethodName: "SearchReadGroupSets", + Handler: _ReadServiceV1_SearchReadGroupSets_Handler, + }, + { + MethodName: "UpdateReadGroupSet", + Handler: _ReadServiceV1_UpdateReadGroupSet_Handler, + }, + { + MethodName: "DeleteReadGroupSet", + Handler: _ReadServiceV1_DeleteReadGroupSet_Handler, + }, + { + MethodName: "GetReadGroupSet", + Handler: _ReadServiceV1_GetReadGroupSet_Handler, + }, + { + MethodName: "ListCoverageBuckets", + Handler: _ReadServiceV1_ListCoverageBuckets_Handler, + }, + { + MethodName: "SearchReads", + Handler: _ReadServiceV1_SearchReads_Handler, + }, + }, + Streams: []grpc.StreamDesc{}, + Metadata: "google/genomics/v1/reads.proto", +} + +func init() { + proto.RegisterFile("google/genomics/v1/reads.proto", fileDescriptor_reads_fd372a563c554464) +} + +var fileDescriptor_reads_fd372a563c554464 = []byte{ + // 1333 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xbc, 0x57, 0xcf, 0x6f, 0x1b, 0xc5, + 0x17, 0xff, 0x8e, 0x1d, 0xb7, 0xcd, 0x73, 0x93, 0x38, 0xe3, 0x7e, 0x8b, 0xe3, 0x90, 0x36, 0x6c, + 0x69, 0x1b, 0x02, 0xb5, 0x89, 0x11, 0x2a, 0x4a, 0x85, 0x44, 0xda, 0x3a, 0xc1, 0x28, 0x69, 0xad, + 0x75, 0x02, 0x82, 0xcb, 0x6a, 0x62, 0x4f, 0xb6, 0x4b, 0xec, 0xdd, 0x65, 0x66, 0x9c, 0xfe, 0x52, + 0x2f, 0xbd, 0x81, 0x04, 0x1c, 0x10, 0x27, 0xae, 0x5c, 0x39, 0x22, 0xfe, 0x08, 0x4e, 0x88, 0x0b, + 0x7f, 0x00, 0xe2, 0x0f, 0xe0, 0xc4, 0x11, 0xcd, 0xec, 0x6e, 0xbc, 0xeb, 0x9d, 0x6d, 0x1c, 0x55, + 0xe2, 0xb6, 0xfb, 0xe6, 0xb3, 0x6f, 0x3e, 0xef, 0xf7, 0x5b, 0xb8, 0x64, 0x7b, 0x9e, 0xdd, 0xa7, + 0x75, 0x9b, 0xba, 0xde, 0xc0, 0xe9, 0xf2, 0xfa, 0xd1, 0x5a, 0x9d, 0x51, 0xd2, 0xe3, 0x35, 0x9f, + 0x79, 0xc2, 0xc3, 0x38, 0x38, 0xaf, 0x45, 0xe7, 0xb5, 0xa3, 0xb5, 0xea, 0xab, 0xe1, 0x37, 0xc4, + 0x77, 0xea, 0xc4, 0x75, 0x3d, 0x41, 0x84, 0xe3, 0xb9, 0xe1, 0x17, 0x55, 0xad, 0x46, 0xe2, 0xda, + 0x34, 0x3c, 0xbf, 0x96, 0x71, 0x23, 0xe9, 0x3b, 0xb6, 0x3b, 0xa0, 0xae, 0x08, 0x71, 0x57, 0x33, + 0x70, 0x36, 0xf3, 0x86, 0x3e, 0xa7, 0x11, 0xec, 0x4a, 0x08, 0xeb, 0x7b, 0xae, 0xcd, 0x86, 0xae, + 0xeb, 0xb8, 0x76, 0xdd, 0xf3, 0x29, 0x4b, 0x70, 0x5a, 0x0c, 0x41, 0xea, 0x6d, 0x7f, 0x78, 0x50, + 0xa7, 0x03, 0x5f, 0x3c, 0x0e, 0x0f, 0x97, 0xc7, 0x0f, 0x0f, 0x1c, 0xda, 0xef, 0x59, 0x03, 0xc2, + 0x0f, 0x03, 0x84, 0xf1, 0x35, 0x82, 0x6a, 0x87, 0x12, 0xd6, 0x7d, 0x60, 0x52, 0xd2, 0xdb, 0x92, + 0x04, 0x3a, 0x54, 0x70, 0x93, 0x7e, 0x31, 0xa4, 0x5c, 0xe0, 0xcb, 0x50, 0xec, 0x11, 0x41, 0x38, + 0x15, 0x96, 0xd3, 0xe3, 0x15, 0xb4, 0x9c, 0x5f, 0x99, 0x36, 0x21, 0x14, 0xb5, 0x7a, 0x1c, 0x63, + 0x98, 0x72, 0xc9, 0x80, 0x56, 0xf2, 0xcb, 0x68, 0x65, 0xda, 0x54, 0xcf, 0x78, 0x09, 0xc0, 0x27, + 0x36, 0xb5, 0x84, 0x77, 0x48, 0xdd, 0x4a, 0x4e, 0x9d, 0x4c, 0x4b, 0xc9, 0xae, 0x14, 0xe0, 0x45, + 0x50, 0x2f, 0x16, 0x77, 0x9e, 0xd0, 0xca, 0xd4, 0x32, 0x5a, 0x29, 0x98, 0xe7, 0xa4, 0xa0, 0xe3, + 0x3c, 0xa1, 0xc6, 0xb7, 0x08, 0x16, 0xb5, 0x7c, 0xb8, 0xef, 0xb9, 0x9c, 0xe2, 0x0f, 0x61, 0x4e, + 0x7a, 0xca, 0x52, 0xae, 0xb2, 0x38, 0x15, 0x01, 0xa9, 0x62, 0x63, 0xb9, 0x96, 0x0e, 0x67, 0x2d, + 0xae, 0xc3, 0x9c, 0x61, 0x71, 0x8d, 0xf8, 0x1a, 0xcc, 0xb9, 0xf4, 0x91, 0xb0, 0x52, 0x54, 0x67, + 0xa4, 0xb8, 0x1d, 0xd1, 0x35, 0xfe, 0xc8, 0x41, 0xb5, 0x35, 0xf0, 0x3d, 0x26, 0xb4, 0x1e, 0x5a, + 0x02, 0x18, 0x79, 0xa8, 0x82, 0x02, 0x63, 0x8f, 0x1d, 0x84, 0x57, 0xa0, 0xc4, 0xe8, 0x01, 0x65, + 0xd4, 0xed, 0x52, 0x2b, 0x04, 0x4d, 0x29, 0xd0, 0xec, 0xb1, 0xbc, 0xa3, 0x90, 0x97, 0xa1, 0xc8, + 0xbd, 0x21, 0xeb, 0x52, 0x6b, 0xc8, 0x1c, 0x5e, 0xc9, 0x05, 0xae, 0x0e, 0x44, 0x7b, 0xcc, 0xe1, + 0xd8, 0x03, 0xec, 0x13, 0x26, 0x1c, 0x19, 0x7d, 0x8b, 0x0b, 0x46, 0x04, 0xb5, 0x1f, 0x57, 0x0a, + 0xcb, 0x68, 0x65, 0xb6, 0xf1, 0x81, 0xce, 0xfa, 0x6c, 0xd6, 0xb5, 0x76, 0xa4, 0xa8, 0x13, 0xea, + 0x31, 0xe7, 0xfd, 0x71, 0x91, 0x61, 0xc1, 0x7c, 0x0a, 0x87, 0x0d, 0xb8, 0xd4, 0xde, 0x30, 0x77, + 0x5b, 0xbb, 0xad, 0xfb, 0xf7, 0xac, 0xce, 0xae, 0xb9, 0xb1, 0xdb, 0xdc, 0xfa, 0xd4, 0xda, 0xbb, + 0xd7, 0x69, 0x37, 0xef, 0xb4, 0x36, 0x5b, 0xcd, 0xbb, 0xa5, 0xff, 0xe1, 0x57, 0xa0, 0xdc, 0x6e, + 0x9a, 0xd6, 0x66, 0x6b, 0xbb, 0x69, 0xc9, 0x87, 0xce, 0xc6, 0x4e, 0x7b, 0xbb, 0x59, 0x42, 0x78, + 0x06, 0xa6, 0x77, 0x9a, 0xe6, 0x56, 0xd3, 0xda, 0xd8, 0xde, 0x2e, 0xe5, 0x8c, 0x8f, 0x60, 0x51, + 0xcb, 0x31, 0x8c, 0xf5, 0x9b, 0x80, 0x93, 0xb1, 0x8e, 0xe5, 0xe0, 0x5c, 0x3c, 0x98, 0xad, 0x1e, + 0x37, 0x7e, 0x42, 0xb0, 0xd0, 0x7c, 0x34, 0xae, 0x2c, 0x16, 0x25, 0x9f, 0x79, 0x9f, 0xd3, 0x6e, + 0x3c, 0x4a, 0xa1, 0xa4, 0xd5, 0x93, 0xc7, 0x54, 0x7d, 0x2b, 0x7d, 0x1f, 0x65, 0x6c, 0x20, 0xd9, + 0x63, 0x0e, 0x7e, 0x03, 0xe6, 0x53, 0x44, 0xc2, 0x8c, 0x9f, 0x4d, 0xf2, 0xc0, 0xd7, 0x65, 0x7e, + 0x46, 0xf1, 0x96, 0xd5, 0xc0, 0x2b, 0x53, 0x8a, 0xf0, 0x28, 0xdc, 0xf7, 0xa4, 0xd4, 0xf8, 0x15, + 0xc1, 0xc2, 0x9e, 0xdf, 0x23, 0x82, 0xea, 0xf8, 0x6a, 0x6f, 0x44, 0xda, 0x1b, 0x37, 0x61, 0x36, + 0x09, 0x55, 0xfc, 0x27, 0x29, 0x88, 0xf3, 0x71, 0x4d, 0xf8, 0x16, 0x14, 0x87, 0x8a, 0x8f, 0x6a, + 0x0f, 0xca, 0xbc, 0x62, 0xa3, 0x1a, 0x29, 0x89, 0x3a, 0x48, 0x6d, 0x53, 0x76, 0x90, 0x1d, 0xc2, + 0x0f, 0x4d, 0x08, 0xe0, 0xf2, 0xd9, 0xd8, 0x84, 0x85, 0xbb, 0xb4, 0x4f, 0x5f, 0xd6, 0x18, 0xe3, + 0x0e, 0x5c, 0xdc, 0xa2, 0xe2, 0x25, 0x95, 0x3c, 0xcf, 0x41, 0x75, 0xdb, 0xe1, 0xe2, 0x8e, 0x77, + 0x44, 0x19, 0xb1, 0xe9, 0xed, 0x61, 0xf7, 0x30, 0x56, 0xb1, 0xa7, 0xf0, 0xed, 0x55, 0x98, 0x4d, + 0x46, 0x33, 0x8c, 0xfa, 0x4c, 0x22, 0x98, 0xf8, 0x02, 0x14, 0xb8, 0x20, 0x4c, 0xa8, 0xca, 0xce, + 0x9b, 0xc1, 0x0b, 0x2e, 0x41, 0x9e, 0xba, 0x3d, 0x55, 0xa0, 0x79, 0x53, 0x3e, 0xe2, 0x1a, 0x94, + 0x05, 0x61, 0x36, 0x15, 0xd6, 0xbe, 0xa2, 0x64, 0x3d, 0x74, 0x7a, 0xe2, 0x41, 0xe5, 0x8c, 0x42, + 0xcc, 0x07, 0x47, 0x01, 0xd9, 0x4f, 0xe4, 0xc1, 0x58, 0x23, 0x3d, 0xfb, 0xc2, 0x46, 0x7a, 0x6e, + 0xac, 0x91, 0x1e, 0xc0, 0x6c, 0xd2, 0x7e, 0x5c, 0x87, 0x82, 0x1a, 0x56, 0xca, 0xd6, 0x62, 0x63, + 0x41, 0x9b, 0x1f, 0x12, 0x60, 0x06, 0x38, 0x7c, 0x05, 0x66, 0x06, 0x94, 0xb8, 0x56, 0x37, 0xd4, + 0xa3, 0x12, 0x2b, 0x67, 0x9e, 0x97, 0xc2, 0x48, 0xb7, 0xf1, 0x0b, 0x82, 0x45, 0xad, 0xb3, 0xc3, + 0x22, 0x7e, 0x0d, 0xce, 0x27, 0x8c, 0x45, 0xca, 0xd8, 0xe2, 0x7e, 0xcc, 0xcc, 0x1d, 0x28, 0x45, + 0x57, 0x84, 0x8e, 0x09, 0xda, 0x5f, 0xb1, 0x61, 0xe8, 0x38, 0x26, 0x6f, 0x32, 0xe7, 0xba, 0xc9, + 0x9b, 0x75, 0x8d, 0x3d, 0xaf, 0x6b, 0xec, 0x7f, 0x23, 0xc0, 0xa3, 0x51, 0x73, 0x9c, 0x1e, 0xa7, + 0xe9, 0x3a, 0xf8, 0xf5, 0x44, 0xf1, 0x49, 0x60, 0x41, 0x01, 0x47, 0xa5, 0x25, 0x51, 0xe9, 0x34, + 0x3a, 0xfb, 0xc2, 0x34, 0x3a, 0xa7, 0x49, 0xa3, 0xe9, 0x51, 0x1a, 0x25, 0xd3, 0x22, 0x7f, 0xaa, + 0xf9, 0xfa, 0x10, 0xca, 0x09, 0x9b, 0xc3, 0x28, 0xbd, 0x07, 0x70, 0xbc, 0xa4, 0x44, 0x13, 0xb5, + 0x92, 0xd5, 0x40, 0xcc, 0x18, 0x76, 0xe2, 0x31, 0xfa, 0x97, 0xf4, 0xb6, 0x60, 0x94, 0x0c, 0x12, + 0xde, 0x3e, 0xa1, 0x31, 0x6b, 0x6b, 0x35, 0xf7, 0x5f, 0xd4, 0xaa, 0xc4, 0x3d, 0x20, 0xac, 0xa7, + 0xaa, 0xb3, 0x60, 0x06, 0x2f, 0x32, 0x9b, 0x85, 0x27, 0x48, 0xdf, 0x52, 0xaf, 0x5c, 0xc5, 0xb1, + 0x60, 0x16, 0x95, 0xac, 0xa3, 0x44, 0xc6, 0x7d, 0x28, 0x27, 0xec, 0x7c, 0x59, 0x0f, 0x37, 0xbe, + 0x47, 0x70, 0x21, 0xd0, 0xe8, 0xb8, 0xb6, 0x3c, 0xed, 0x50, 0x76, 0xe4, 0x74, 0x29, 0x7e, 0x06, + 0xc5, 0xd8, 0x4d, 0xf8, 0x9a, 0x4e, 0x5b, 0xda, 0xe5, 0xd5, 0xeb, 0x27, 0xe2, 0x02, 0xca, 0xc6, + 0xe2, 0xf3, 0xdf, 0xff, 0xfc, 0x2e, 0xf7, 0x7f, 0xa3, 0x74, 0xbc, 0x39, 0xaf, 0x73, 0x05, 0x5b, + 0x47, 0xab, 0x6f, 0xa3, 0xc6, 0x6f, 0xd3, 0x30, 0x13, 0xa3, 0xf3, 0xf1, 0x1a, 0xfe, 0x12, 0x41, + 0x59, 0x33, 0xd0, 0x71, 0xed, 0x74, 0xdb, 0x49, 0x75, 0x29, 0xc2, 0xc7, 0x36, 0xdf, 0xda, 0xfd, + 0x68, 0xf3, 0x35, 0xae, 0x28, 0x5e, 0x4b, 0x46, 0x65, 0x7c, 0x6f, 0xe6, 0xeb, 0x8e, 0x52, 0xba, + 0x8e, 0x56, 0xf1, 0x0f, 0x08, 0x70, 0x7a, 0x1f, 0xc0, 0x37, 0x74, 0x54, 0x32, 0xf7, 0x86, 0x93, + 0x98, 0xdc, 0x54, 0x4c, 0xd6, 0x8c, 0xb7, 0x52, 0x4c, 0xea, 0x4f, 0x53, 0x79, 0xfb, 0x6c, 0x3d, + 0xd8, 0x28, 0x42, 0x76, 0x65, 0xcd, 0x9a, 0xab, 0xf7, 0x54, 0xf6, 0x7e, 0x5e, 0xad, 0x4f, 0x8c, + 0x0f, 0x63, 0x9a, 0xed, 0xbb, 0x3a, 0x57, 0x9f, 0x49, 0x76, 0x3f, 0x22, 0xc0, 0xe9, 0xdd, 0x44, + 0xef, 0xbb, 0xcc, 0x1d, 0xa6, 0x7a, 0xe2, 0x02, 0x62, 0xbc, 0xaf, 0xc8, 0xdc, 0x6c, 0x5c, 0x9d, + 0xcc, 0x7d, 0x63, 0x7b, 0x0e, 0xfe, 0x0a, 0x01, 0x4e, 0x6f, 0x1d, 0x7a, 0x9a, 0x99, 0xdb, 0x49, + 0xf5, 0x62, 0x6a, 0xc5, 0x69, 0xca, 0x3f, 0x28, 0xe3, 0x86, 0x22, 0x77, 0x7d, 0x75, 0x32, 0x72, + 0xf8, 0x1b, 0x04, 0x73, 0x63, 0xab, 0x0b, 0x5e, 0xd5, 0x31, 0xd1, 0xef, 0x37, 0x13, 0x78, 0x2b, + 0x24, 0x84, 0x27, 0x24, 0xf4, 0x33, 0x82, 0xb2, 0x66, 0x30, 0xeb, 0x53, 0x2c, 0x7b, 0x5d, 0xd2, + 0xa7, 0xd8, 0x0b, 0x26, 0x7e, 0x14, 0x55, 0xfc, 0xee, 0x44, 0x3c, 0xeb, 0xd1, 0xf8, 0x0e, 0x27, + 0x3f, 0x7e, 0x0a, 0xc5, 0xd8, 0x84, 0xca, 0xe8, 0x6a, 0xa9, 0xb1, 0x9d, 0xd1, 0xd5, 0xd2, 0xa3, + 0x4e, 0xd3, 0xd5, 0x46, 0x99, 0x7f, 0x9b, 0xc2, 0xc5, 0xae, 0x37, 0xd0, 0xa8, 0xba, 0x0d, 0x4a, + 0x4b, 0x5b, 0xe6, 0x48, 0x1b, 0x7d, 0xb6, 0x1e, 0x21, 0xbc, 0x3e, 0x71, 0xed, 0x9a, 0xc7, 0x6c, + 0xf9, 0x33, 0xaf, 0x32, 0xa8, 0x1e, 0x1c, 0x11, 0xdf, 0xe1, 0xf1, 0x1f, 0xfc, 0x5b, 0xd1, 0xf3, + 0x3f, 0x08, 0xed, 0x9f, 0x51, 0xc8, 0x77, 0xfe, 0x0d, 0x00, 0x00, 0xff, 0xff, 0x1b, 0x9b, 0xce, + 0x6e, 0xa3, 0x10, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/genomics/v1/references.pb.go b/vendor/google.golang.org/genproto/googleapis/genomics/v1/references.pb.go new file mode 100644 index 0000000..744addd --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/genomics/v1/references.pb.go @@ -0,0 +1,1092 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/genomics/v1/references.proto + +package genomics // import "google.golang.org/genproto/googleapis/genomics/v1" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +import ( + context "golang.org/x/net/context" + grpc "google.golang.org/grpc" +) + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// A reference is a canonical assembled DNA sequence, intended to act as a +// reference coordinate space for other genomic annotations. A single reference +// might represent the human chromosome 1 or mitochandrial DNA, for instance. A +// reference belongs to one or more reference sets. +// +// For more genomics resource definitions, see [Fundamentals of Google +// Genomics](https://cloud.google.com/genomics/fundamentals-of-google-genomics) +type Reference struct { + // The server-generated reference ID, unique across all references. + Id string `protobuf:"bytes,1,opt,name=id,proto3" json:"id,omitempty"` + // The length of this reference's sequence. + Length int64 `protobuf:"varint,2,opt,name=length,proto3" json:"length,omitempty"` + // MD5 of the upper-case sequence excluding all whitespace characters (this + // is equivalent to SQ:M5 in SAM). This value is represented in lower case + // hexadecimal format. + Md5Checksum string `protobuf:"bytes,3,opt,name=md5checksum,proto3" json:"md5checksum,omitempty"` + // The name of this reference, for example `22`. + Name string `protobuf:"bytes,4,opt,name=name,proto3" json:"name,omitempty"` + // The URI from which the sequence was obtained. Typically specifies a FASTA + // format file. + SourceUri string `protobuf:"bytes,5,opt,name=source_uri,json=sourceUri,proto3" json:"source_uri,omitempty"` + // All known corresponding accession IDs in INSDC (GenBank/ENA/DDBJ) ideally + // with a version number, for example `GCF_000001405.26`. + SourceAccessions []string `protobuf:"bytes,6,rep,name=source_accessions,json=sourceAccessions,proto3" json:"source_accessions,omitempty"` + // ID from http://www.ncbi.nlm.nih.gov/taxonomy. For example, 9606 for human. + NcbiTaxonId int32 `protobuf:"varint,7,opt,name=ncbi_taxon_id,json=ncbiTaxonId,proto3" json:"ncbi_taxon_id,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Reference) Reset() { *m = Reference{} } +func (m *Reference) String() string { return proto.CompactTextString(m) } +func (*Reference) ProtoMessage() {} +func (*Reference) Descriptor() ([]byte, []int) { + return fileDescriptor_references_4bc5034e4d222e4d, []int{0} +} +func (m *Reference) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Reference.Unmarshal(m, b) +} +func (m *Reference) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Reference.Marshal(b, m, deterministic) +} +func (dst *Reference) XXX_Merge(src proto.Message) { + xxx_messageInfo_Reference.Merge(dst, src) +} +func (m *Reference) XXX_Size() int { + return xxx_messageInfo_Reference.Size(m) +} +func (m *Reference) XXX_DiscardUnknown() { + xxx_messageInfo_Reference.DiscardUnknown(m) +} + +var xxx_messageInfo_Reference proto.InternalMessageInfo + +func (m *Reference) GetId() string { + if m != nil { + return m.Id + } + return "" +} + +func (m *Reference) GetLength() int64 { + if m != nil { + return m.Length + } + return 0 +} + +func (m *Reference) GetMd5Checksum() string { + if m != nil { + return m.Md5Checksum + } + return "" +} + +func (m *Reference) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *Reference) GetSourceUri() string { + if m != nil { + return m.SourceUri + } + return "" +} + +func (m *Reference) GetSourceAccessions() []string { + if m != nil { + return m.SourceAccessions + } + return nil +} + +func (m *Reference) GetNcbiTaxonId() int32 { + if m != nil { + return m.NcbiTaxonId + } + return 0 +} + +// A reference set is a set of references which typically comprise a reference +// assembly for a species, such as `GRCh38` which is representative +// of the human genome. A reference set defines a common coordinate space for +// comparing reference-aligned experimental data. A reference set contains 1 or +// more references. +// +// For more genomics resource definitions, see [Fundamentals of Google +// Genomics](https://cloud.google.com/genomics/fundamentals-of-google-genomics) +type ReferenceSet struct { + // The server-generated reference set ID, unique across all reference sets. + Id string `protobuf:"bytes,1,opt,name=id,proto3" json:"id,omitempty"` + // The IDs of the reference objects that are part of this set. + // `Reference.md5checksum` must be unique within this set. + ReferenceIds []string `protobuf:"bytes,2,rep,name=reference_ids,json=referenceIds,proto3" json:"reference_ids,omitempty"` + // Order-independent MD5 checksum which identifies this reference set. The + // checksum is computed by sorting all lower case hexidecimal string + // `reference.md5checksum` (for all reference in this set) in + // ascending lexicographic order, concatenating, and taking the MD5 of that + // value. The resulting value is represented in lower case hexadecimal format. + Md5Checksum string `protobuf:"bytes,3,opt,name=md5checksum,proto3" json:"md5checksum,omitempty"` + // ID from http://www.ncbi.nlm.nih.gov/taxonomy (for example, 9606 for human) + // indicating the species which this reference set is intended to model. Note + // that contained references may specify a different `ncbiTaxonId`, as + // assemblies may contain reference sequences which do not belong to the + // modeled species, for example EBV in a human reference genome. + NcbiTaxonId int32 `protobuf:"varint,4,opt,name=ncbi_taxon_id,json=ncbiTaxonId,proto3" json:"ncbi_taxon_id,omitempty"` + // Free text description of this reference set. + Description string `protobuf:"bytes,5,opt,name=description,proto3" json:"description,omitempty"` + // Public id of this reference set, such as `GRCh37`. + AssemblyId string `protobuf:"bytes,6,opt,name=assembly_id,json=assemblyId,proto3" json:"assembly_id,omitempty"` + // The URI from which the references were obtained. + SourceUri string `protobuf:"bytes,7,opt,name=source_uri,json=sourceUri,proto3" json:"source_uri,omitempty"` + // All known corresponding accession IDs in INSDC (GenBank/ENA/DDBJ) ideally + // with a version number, for example `NC_000001.11`. + SourceAccessions []string `protobuf:"bytes,8,rep,name=source_accessions,json=sourceAccessions,proto3" json:"source_accessions,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ReferenceSet) Reset() { *m = ReferenceSet{} } +func (m *ReferenceSet) String() string { return proto.CompactTextString(m) } +func (*ReferenceSet) ProtoMessage() {} +func (*ReferenceSet) Descriptor() ([]byte, []int) { + return fileDescriptor_references_4bc5034e4d222e4d, []int{1} +} +func (m *ReferenceSet) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ReferenceSet.Unmarshal(m, b) +} +func (m *ReferenceSet) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ReferenceSet.Marshal(b, m, deterministic) +} +func (dst *ReferenceSet) XXX_Merge(src proto.Message) { + xxx_messageInfo_ReferenceSet.Merge(dst, src) +} +func (m *ReferenceSet) XXX_Size() int { + return xxx_messageInfo_ReferenceSet.Size(m) +} +func (m *ReferenceSet) XXX_DiscardUnknown() { + xxx_messageInfo_ReferenceSet.DiscardUnknown(m) +} + +var xxx_messageInfo_ReferenceSet proto.InternalMessageInfo + +func (m *ReferenceSet) GetId() string { + if m != nil { + return m.Id + } + return "" +} + +func (m *ReferenceSet) GetReferenceIds() []string { + if m != nil { + return m.ReferenceIds + } + return nil +} + +func (m *ReferenceSet) GetMd5Checksum() string { + if m != nil { + return m.Md5Checksum + } + return "" +} + +func (m *ReferenceSet) GetNcbiTaxonId() int32 { + if m != nil { + return m.NcbiTaxonId + } + return 0 +} + +func (m *ReferenceSet) GetDescription() string { + if m != nil { + return m.Description + } + return "" +} + +func (m *ReferenceSet) GetAssemblyId() string { + if m != nil { + return m.AssemblyId + } + return "" +} + +func (m *ReferenceSet) GetSourceUri() string { + if m != nil { + return m.SourceUri + } + return "" +} + +func (m *ReferenceSet) GetSourceAccessions() []string { + if m != nil { + return m.SourceAccessions + } + return nil +} + +type SearchReferenceSetsRequest struct { + // If present, return reference sets for which the + // [md5checksum][google.genomics.v1.ReferenceSet.md5checksum] matches exactly. + Md5Checksums []string `protobuf:"bytes,1,rep,name=md5checksums,proto3" json:"md5checksums,omitempty"` + // If present, return reference sets for which a prefix of any of + // [sourceAccessions][google.genomics.v1.ReferenceSet.source_accessions] + // match any of these strings. Accession numbers typically have a main number + // and a version, for example `NC_000001.11`. + Accessions []string `protobuf:"bytes,2,rep,name=accessions,proto3" json:"accessions,omitempty"` + // If present, return reference sets for which a substring of their + // `assemblyId` matches this string (case insensitive). + AssemblyId string `protobuf:"bytes,3,opt,name=assembly_id,json=assemblyId,proto3" json:"assembly_id,omitempty"` + // The continuation token, which is used to page through large result sets. + // To get the next page of results, set this parameter to the value of + // `nextPageToken` from the previous response. + PageToken string `protobuf:"bytes,4,opt,name=page_token,json=pageToken,proto3" json:"page_token,omitempty"` + // The maximum number of results to return in a single page. If unspecified, + // defaults to 1024. The maximum value is 4096. + PageSize int32 `protobuf:"varint,5,opt,name=page_size,json=pageSize,proto3" json:"page_size,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *SearchReferenceSetsRequest) Reset() { *m = SearchReferenceSetsRequest{} } +func (m *SearchReferenceSetsRequest) String() string { return proto.CompactTextString(m) } +func (*SearchReferenceSetsRequest) ProtoMessage() {} +func (*SearchReferenceSetsRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_references_4bc5034e4d222e4d, []int{2} +} +func (m *SearchReferenceSetsRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_SearchReferenceSetsRequest.Unmarshal(m, b) +} +func (m *SearchReferenceSetsRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_SearchReferenceSetsRequest.Marshal(b, m, deterministic) +} +func (dst *SearchReferenceSetsRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_SearchReferenceSetsRequest.Merge(dst, src) +} +func (m *SearchReferenceSetsRequest) XXX_Size() int { + return xxx_messageInfo_SearchReferenceSetsRequest.Size(m) +} +func (m *SearchReferenceSetsRequest) XXX_DiscardUnknown() { + xxx_messageInfo_SearchReferenceSetsRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_SearchReferenceSetsRequest proto.InternalMessageInfo + +func (m *SearchReferenceSetsRequest) GetMd5Checksums() []string { + if m != nil { + return m.Md5Checksums + } + return nil +} + +func (m *SearchReferenceSetsRequest) GetAccessions() []string { + if m != nil { + return m.Accessions + } + return nil +} + +func (m *SearchReferenceSetsRequest) GetAssemblyId() string { + if m != nil { + return m.AssemblyId + } + return "" +} + +func (m *SearchReferenceSetsRequest) GetPageToken() string { + if m != nil { + return m.PageToken + } + return "" +} + +func (m *SearchReferenceSetsRequest) GetPageSize() int32 { + if m != nil { + return m.PageSize + } + return 0 +} + +type SearchReferenceSetsResponse struct { + // The matching references sets. + ReferenceSets []*ReferenceSet `protobuf:"bytes,1,rep,name=reference_sets,json=referenceSets,proto3" json:"reference_sets,omitempty"` + // The continuation token, which is used to page through large result sets. + // Provide this value in a subsequent request to return the next page of + // results. This field will be empty if there aren't any additional results. + NextPageToken string `protobuf:"bytes,2,opt,name=next_page_token,json=nextPageToken,proto3" json:"next_page_token,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *SearchReferenceSetsResponse) Reset() { *m = SearchReferenceSetsResponse{} } +func (m *SearchReferenceSetsResponse) String() string { return proto.CompactTextString(m) } +func (*SearchReferenceSetsResponse) ProtoMessage() {} +func (*SearchReferenceSetsResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_references_4bc5034e4d222e4d, []int{3} +} +func (m *SearchReferenceSetsResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_SearchReferenceSetsResponse.Unmarshal(m, b) +} +func (m *SearchReferenceSetsResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_SearchReferenceSetsResponse.Marshal(b, m, deterministic) +} +func (dst *SearchReferenceSetsResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_SearchReferenceSetsResponse.Merge(dst, src) +} +func (m *SearchReferenceSetsResponse) XXX_Size() int { + return xxx_messageInfo_SearchReferenceSetsResponse.Size(m) +} +func (m *SearchReferenceSetsResponse) XXX_DiscardUnknown() { + xxx_messageInfo_SearchReferenceSetsResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_SearchReferenceSetsResponse proto.InternalMessageInfo + +func (m *SearchReferenceSetsResponse) GetReferenceSets() []*ReferenceSet { + if m != nil { + return m.ReferenceSets + } + return nil +} + +func (m *SearchReferenceSetsResponse) GetNextPageToken() string { + if m != nil { + return m.NextPageToken + } + return "" +} + +type GetReferenceSetRequest struct { + // The ID of the reference set. + ReferenceSetId string `protobuf:"bytes,1,opt,name=reference_set_id,json=referenceSetId,proto3" json:"reference_set_id,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GetReferenceSetRequest) Reset() { *m = GetReferenceSetRequest{} } +func (m *GetReferenceSetRequest) String() string { return proto.CompactTextString(m) } +func (*GetReferenceSetRequest) ProtoMessage() {} +func (*GetReferenceSetRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_references_4bc5034e4d222e4d, []int{4} +} +func (m *GetReferenceSetRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GetReferenceSetRequest.Unmarshal(m, b) +} +func (m *GetReferenceSetRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GetReferenceSetRequest.Marshal(b, m, deterministic) +} +func (dst *GetReferenceSetRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_GetReferenceSetRequest.Merge(dst, src) +} +func (m *GetReferenceSetRequest) XXX_Size() int { + return xxx_messageInfo_GetReferenceSetRequest.Size(m) +} +func (m *GetReferenceSetRequest) XXX_DiscardUnknown() { + xxx_messageInfo_GetReferenceSetRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_GetReferenceSetRequest proto.InternalMessageInfo + +func (m *GetReferenceSetRequest) GetReferenceSetId() string { + if m != nil { + return m.ReferenceSetId + } + return "" +} + +type SearchReferencesRequest struct { + // If present, return references for which the + // [md5checksum][google.genomics.v1.Reference.md5checksum] matches exactly. + Md5Checksums []string `protobuf:"bytes,1,rep,name=md5checksums,proto3" json:"md5checksums,omitempty"` + // If present, return references for which a prefix of any of + // [sourceAccessions][google.genomics.v1.Reference.source_accessions] match + // any of these strings. Accession numbers typically have a main number and a + // version, for example `GCF_000001405.26`. + Accessions []string `protobuf:"bytes,2,rep,name=accessions,proto3" json:"accessions,omitempty"` + // If present, return only references which belong to this reference set. + ReferenceSetId string `protobuf:"bytes,3,opt,name=reference_set_id,json=referenceSetId,proto3" json:"reference_set_id,omitempty"` + // The continuation token, which is used to page through large result sets. + // To get the next page of results, set this parameter to the value of + // `nextPageToken` from the previous response. + PageToken string `protobuf:"bytes,4,opt,name=page_token,json=pageToken,proto3" json:"page_token,omitempty"` + // The maximum number of results to return in a single page. If unspecified, + // defaults to 1024. The maximum value is 4096. + PageSize int32 `protobuf:"varint,5,opt,name=page_size,json=pageSize,proto3" json:"page_size,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *SearchReferencesRequest) Reset() { *m = SearchReferencesRequest{} } +func (m *SearchReferencesRequest) String() string { return proto.CompactTextString(m) } +func (*SearchReferencesRequest) ProtoMessage() {} +func (*SearchReferencesRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_references_4bc5034e4d222e4d, []int{5} +} +func (m *SearchReferencesRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_SearchReferencesRequest.Unmarshal(m, b) +} +func (m *SearchReferencesRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_SearchReferencesRequest.Marshal(b, m, deterministic) +} +func (dst *SearchReferencesRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_SearchReferencesRequest.Merge(dst, src) +} +func (m *SearchReferencesRequest) XXX_Size() int { + return xxx_messageInfo_SearchReferencesRequest.Size(m) +} +func (m *SearchReferencesRequest) XXX_DiscardUnknown() { + xxx_messageInfo_SearchReferencesRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_SearchReferencesRequest proto.InternalMessageInfo + +func (m *SearchReferencesRequest) GetMd5Checksums() []string { + if m != nil { + return m.Md5Checksums + } + return nil +} + +func (m *SearchReferencesRequest) GetAccessions() []string { + if m != nil { + return m.Accessions + } + return nil +} + +func (m *SearchReferencesRequest) GetReferenceSetId() string { + if m != nil { + return m.ReferenceSetId + } + return "" +} + +func (m *SearchReferencesRequest) GetPageToken() string { + if m != nil { + return m.PageToken + } + return "" +} + +func (m *SearchReferencesRequest) GetPageSize() int32 { + if m != nil { + return m.PageSize + } + return 0 +} + +type SearchReferencesResponse struct { + // The matching references. + References []*Reference `protobuf:"bytes,1,rep,name=references,proto3" json:"references,omitempty"` + // The continuation token, which is used to page through large result sets. + // Provide this value in a subsequent request to return the next page of + // results. This field will be empty if there aren't any additional results. + NextPageToken string `protobuf:"bytes,2,opt,name=next_page_token,json=nextPageToken,proto3" json:"next_page_token,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *SearchReferencesResponse) Reset() { *m = SearchReferencesResponse{} } +func (m *SearchReferencesResponse) String() string { return proto.CompactTextString(m) } +func (*SearchReferencesResponse) ProtoMessage() {} +func (*SearchReferencesResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_references_4bc5034e4d222e4d, []int{6} +} +func (m *SearchReferencesResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_SearchReferencesResponse.Unmarshal(m, b) +} +func (m *SearchReferencesResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_SearchReferencesResponse.Marshal(b, m, deterministic) +} +func (dst *SearchReferencesResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_SearchReferencesResponse.Merge(dst, src) +} +func (m *SearchReferencesResponse) XXX_Size() int { + return xxx_messageInfo_SearchReferencesResponse.Size(m) +} +func (m *SearchReferencesResponse) XXX_DiscardUnknown() { + xxx_messageInfo_SearchReferencesResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_SearchReferencesResponse proto.InternalMessageInfo + +func (m *SearchReferencesResponse) GetReferences() []*Reference { + if m != nil { + return m.References + } + return nil +} + +func (m *SearchReferencesResponse) GetNextPageToken() string { + if m != nil { + return m.NextPageToken + } + return "" +} + +type GetReferenceRequest struct { + // The ID of the reference. + ReferenceId string `protobuf:"bytes,1,opt,name=reference_id,json=referenceId,proto3" json:"reference_id,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GetReferenceRequest) Reset() { *m = GetReferenceRequest{} } +func (m *GetReferenceRequest) String() string { return proto.CompactTextString(m) } +func (*GetReferenceRequest) ProtoMessage() {} +func (*GetReferenceRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_references_4bc5034e4d222e4d, []int{7} +} +func (m *GetReferenceRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GetReferenceRequest.Unmarshal(m, b) +} +func (m *GetReferenceRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GetReferenceRequest.Marshal(b, m, deterministic) +} +func (dst *GetReferenceRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_GetReferenceRequest.Merge(dst, src) +} +func (m *GetReferenceRequest) XXX_Size() int { + return xxx_messageInfo_GetReferenceRequest.Size(m) +} +func (m *GetReferenceRequest) XXX_DiscardUnknown() { + xxx_messageInfo_GetReferenceRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_GetReferenceRequest proto.InternalMessageInfo + +func (m *GetReferenceRequest) GetReferenceId() string { + if m != nil { + return m.ReferenceId + } + return "" +} + +type ListBasesRequest struct { + // The ID of the reference. + ReferenceId string `protobuf:"bytes,1,opt,name=reference_id,json=referenceId,proto3" json:"reference_id,omitempty"` + // The start position (0-based) of this query. Defaults to 0. + Start int64 `protobuf:"varint,2,opt,name=start,proto3" json:"start,omitempty"` + // The end position (0-based, exclusive) of this query. Defaults to the length + // of this reference. + End int64 `protobuf:"varint,3,opt,name=end,proto3" json:"end,omitempty"` + // The continuation token, which is used to page through large result sets. + // To get the next page of results, set this parameter to the value of + // `nextPageToken` from the previous response. + PageToken string `protobuf:"bytes,4,opt,name=page_token,json=pageToken,proto3" json:"page_token,omitempty"` + // The maximum number of bases to return in a single page. If unspecified, + // defaults to 200Kbp (kilo base pairs). The maximum value is 10Mbp (mega base + // pairs). + PageSize int32 `protobuf:"varint,5,opt,name=page_size,json=pageSize,proto3" json:"page_size,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ListBasesRequest) Reset() { *m = ListBasesRequest{} } +func (m *ListBasesRequest) String() string { return proto.CompactTextString(m) } +func (*ListBasesRequest) ProtoMessage() {} +func (*ListBasesRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_references_4bc5034e4d222e4d, []int{8} +} +func (m *ListBasesRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ListBasesRequest.Unmarshal(m, b) +} +func (m *ListBasesRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ListBasesRequest.Marshal(b, m, deterministic) +} +func (dst *ListBasesRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_ListBasesRequest.Merge(dst, src) +} +func (m *ListBasesRequest) XXX_Size() int { + return xxx_messageInfo_ListBasesRequest.Size(m) +} +func (m *ListBasesRequest) XXX_DiscardUnknown() { + xxx_messageInfo_ListBasesRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_ListBasesRequest proto.InternalMessageInfo + +func (m *ListBasesRequest) GetReferenceId() string { + if m != nil { + return m.ReferenceId + } + return "" +} + +func (m *ListBasesRequest) GetStart() int64 { + if m != nil { + return m.Start + } + return 0 +} + +func (m *ListBasesRequest) GetEnd() int64 { + if m != nil { + return m.End + } + return 0 +} + +func (m *ListBasesRequest) GetPageToken() string { + if m != nil { + return m.PageToken + } + return "" +} + +func (m *ListBasesRequest) GetPageSize() int32 { + if m != nil { + return m.PageSize + } + return 0 +} + +type ListBasesResponse struct { + // The offset position (0-based) of the given `sequence` from the + // start of this `Reference`. This value will differ for each page + // in a paginated request. + Offset int64 `protobuf:"varint,1,opt,name=offset,proto3" json:"offset,omitempty"` + // A substring of the bases that make up this reference. + Sequence string `protobuf:"bytes,2,opt,name=sequence,proto3" json:"sequence,omitempty"` + // The continuation token, which is used to page through large result sets. + // Provide this value in a subsequent request to return the next page of + // results. This field will be empty if there aren't any additional results. + NextPageToken string `protobuf:"bytes,3,opt,name=next_page_token,json=nextPageToken,proto3" json:"next_page_token,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ListBasesResponse) Reset() { *m = ListBasesResponse{} } +func (m *ListBasesResponse) String() string { return proto.CompactTextString(m) } +func (*ListBasesResponse) ProtoMessage() {} +func (*ListBasesResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_references_4bc5034e4d222e4d, []int{9} +} +func (m *ListBasesResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ListBasesResponse.Unmarshal(m, b) +} +func (m *ListBasesResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ListBasesResponse.Marshal(b, m, deterministic) +} +func (dst *ListBasesResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_ListBasesResponse.Merge(dst, src) +} +func (m *ListBasesResponse) XXX_Size() int { + return xxx_messageInfo_ListBasesResponse.Size(m) +} +func (m *ListBasesResponse) XXX_DiscardUnknown() { + xxx_messageInfo_ListBasesResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_ListBasesResponse proto.InternalMessageInfo + +func (m *ListBasesResponse) GetOffset() int64 { + if m != nil { + return m.Offset + } + return 0 +} + +func (m *ListBasesResponse) GetSequence() string { + if m != nil { + return m.Sequence + } + return "" +} + +func (m *ListBasesResponse) GetNextPageToken() string { + if m != nil { + return m.NextPageToken + } + return "" +} + +func init() { + proto.RegisterType((*Reference)(nil), "google.genomics.v1.Reference") + proto.RegisterType((*ReferenceSet)(nil), "google.genomics.v1.ReferenceSet") + proto.RegisterType((*SearchReferenceSetsRequest)(nil), "google.genomics.v1.SearchReferenceSetsRequest") + proto.RegisterType((*SearchReferenceSetsResponse)(nil), "google.genomics.v1.SearchReferenceSetsResponse") + proto.RegisterType((*GetReferenceSetRequest)(nil), "google.genomics.v1.GetReferenceSetRequest") + proto.RegisterType((*SearchReferencesRequest)(nil), "google.genomics.v1.SearchReferencesRequest") + proto.RegisterType((*SearchReferencesResponse)(nil), "google.genomics.v1.SearchReferencesResponse") + proto.RegisterType((*GetReferenceRequest)(nil), "google.genomics.v1.GetReferenceRequest") + proto.RegisterType((*ListBasesRequest)(nil), "google.genomics.v1.ListBasesRequest") + proto.RegisterType((*ListBasesResponse)(nil), "google.genomics.v1.ListBasesResponse") +} + +// Reference imports to suppress errors if they are not otherwise used. +var _ context.Context +var _ grpc.ClientConn + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the grpc package it is being compiled against. +const _ = grpc.SupportPackageIsVersion4 + +// ReferenceServiceV1Client is the client API for ReferenceServiceV1 service. +// +// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://godoc.org/google.golang.org/grpc#ClientConn.NewStream. +type ReferenceServiceV1Client interface { + // Searches for reference sets which match the given criteria. + // + // For the definitions of references and other genomics resources, see + // [Fundamentals of Google + // Genomics](https://cloud.google.com/genomics/fundamentals-of-google-genomics) + // + // Implements + // [GlobalAllianceApi.searchReferenceSets](https://github.com/ga4gh/schemas/blob/v0.5.1/src/main/resources/avro/referencemethods.avdl#L71) + SearchReferenceSets(ctx context.Context, in *SearchReferenceSetsRequest, opts ...grpc.CallOption) (*SearchReferenceSetsResponse, error) + // Gets a reference set. + // + // For the definitions of references and other genomics resources, see + // [Fundamentals of Google + // Genomics](https://cloud.google.com/genomics/fundamentals-of-google-genomics) + // + // Implements + // [GlobalAllianceApi.getReferenceSet](https://github.com/ga4gh/schemas/blob/v0.5.1/src/main/resources/avro/referencemethods.avdl#L83). + GetReferenceSet(ctx context.Context, in *GetReferenceSetRequest, opts ...grpc.CallOption) (*ReferenceSet, error) + // Searches for references which match the given criteria. + // + // For the definitions of references and other genomics resources, see + // [Fundamentals of Google + // Genomics](https://cloud.google.com/genomics/fundamentals-of-google-genomics) + // + // Implements + // [GlobalAllianceApi.searchReferences](https://github.com/ga4gh/schemas/blob/v0.5.1/src/main/resources/avro/referencemethods.avdl#L146). + SearchReferences(ctx context.Context, in *SearchReferencesRequest, opts ...grpc.CallOption) (*SearchReferencesResponse, error) + // Gets a reference. + // + // For the definitions of references and other genomics resources, see + // [Fundamentals of Google + // Genomics](https://cloud.google.com/genomics/fundamentals-of-google-genomics) + // + // Implements + // [GlobalAllianceApi.getReference](https://github.com/ga4gh/schemas/blob/v0.5.1/src/main/resources/avro/referencemethods.avdl#L158). + GetReference(ctx context.Context, in *GetReferenceRequest, opts ...grpc.CallOption) (*Reference, error) + // Lists the bases in a reference, optionally restricted to a range. + // + // For the definitions of references and other genomics resources, see + // [Fundamentals of Google + // Genomics](https://cloud.google.com/genomics/fundamentals-of-google-genomics) + // + // Implements + // [GlobalAllianceApi.getReferenceBases](https://github.com/ga4gh/schemas/blob/v0.5.1/src/main/resources/avro/referencemethods.avdl#L221). + ListBases(ctx context.Context, in *ListBasesRequest, opts ...grpc.CallOption) (*ListBasesResponse, error) +} + +type referenceServiceV1Client struct { + cc *grpc.ClientConn +} + +func NewReferenceServiceV1Client(cc *grpc.ClientConn) ReferenceServiceV1Client { + return &referenceServiceV1Client{cc} +} + +func (c *referenceServiceV1Client) SearchReferenceSets(ctx context.Context, in *SearchReferenceSetsRequest, opts ...grpc.CallOption) (*SearchReferenceSetsResponse, error) { + out := new(SearchReferenceSetsResponse) + err := c.cc.Invoke(ctx, "/google.genomics.v1.ReferenceServiceV1/SearchReferenceSets", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *referenceServiceV1Client) GetReferenceSet(ctx context.Context, in *GetReferenceSetRequest, opts ...grpc.CallOption) (*ReferenceSet, error) { + out := new(ReferenceSet) + err := c.cc.Invoke(ctx, "/google.genomics.v1.ReferenceServiceV1/GetReferenceSet", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *referenceServiceV1Client) SearchReferences(ctx context.Context, in *SearchReferencesRequest, opts ...grpc.CallOption) (*SearchReferencesResponse, error) { + out := new(SearchReferencesResponse) + err := c.cc.Invoke(ctx, "/google.genomics.v1.ReferenceServiceV1/SearchReferences", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *referenceServiceV1Client) GetReference(ctx context.Context, in *GetReferenceRequest, opts ...grpc.CallOption) (*Reference, error) { + out := new(Reference) + err := c.cc.Invoke(ctx, "/google.genomics.v1.ReferenceServiceV1/GetReference", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *referenceServiceV1Client) ListBases(ctx context.Context, in *ListBasesRequest, opts ...grpc.CallOption) (*ListBasesResponse, error) { + out := new(ListBasesResponse) + err := c.cc.Invoke(ctx, "/google.genomics.v1.ReferenceServiceV1/ListBases", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +// ReferenceServiceV1Server is the server API for ReferenceServiceV1 service. +type ReferenceServiceV1Server interface { + // Searches for reference sets which match the given criteria. + // + // For the definitions of references and other genomics resources, see + // [Fundamentals of Google + // Genomics](https://cloud.google.com/genomics/fundamentals-of-google-genomics) + // + // Implements + // [GlobalAllianceApi.searchReferenceSets](https://github.com/ga4gh/schemas/blob/v0.5.1/src/main/resources/avro/referencemethods.avdl#L71) + SearchReferenceSets(context.Context, *SearchReferenceSetsRequest) (*SearchReferenceSetsResponse, error) + // Gets a reference set. + // + // For the definitions of references and other genomics resources, see + // [Fundamentals of Google + // Genomics](https://cloud.google.com/genomics/fundamentals-of-google-genomics) + // + // Implements + // [GlobalAllianceApi.getReferenceSet](https://github.com/ga4gh/schemas/blob/v0.5.1/src/main/resources/avro/referencemethods.avdl#L83). + GetReferenceSet(context.Context, *GetReferenceSetRequest) (*ReferenceSet, error) + // Searches for references which match the given criteria. + // + // For the definitions of references and other genomics resources, see + // [Fundamentals of Google + // Genomics](https://cloud.google.com/genomics/fundamentals-of-google-genomics) + // + // Implements + // [GlobalAllianceApi.searchReferences](https://github.com/ga4gh/schemas/blob/v0.5.1/src/main/resources/avro/referencemethods.avdl#L146). + SearchReferences(context.Context, *SearchReferencesRequest) (*SearchReferencesResponse, error) + // Gets a reference. + // + // For the definitions of references and other genomics resources, see + // [Fundamentals of Google + // Genomics](https://cloud.google.com/genomics/fundamentals-of-google-genomics) + // + // Implements + // [GlobalAllianceApi.getReference](https://github.com/ga4gh/schemas/blob/v0.5.1/src/main/resources/avro/referencemethods.avdl#L158). + GetReference(context.Context, *GetReferenceRequest) (*Reference, error) + // Lists the bases in a reference, optionally restricted to a range. + // + // For the definitions of references and other genomics resources, see + // [Fundamentals of Google + // Genomics](https://cloud.google.com/genomics/fundamentals-of-google-genomics) + // + // Implements + // [GlobalAllianceApi.getReferenceBases](https://github.com/ga4gh/schemas/blob/v0.5.1/src/main/resources/avro/referencemethods.avdl#L221). + ListBases(context.Context, *ListBasesRequest) (*ListBasesResponse, error) +} + +func RegisterReferenceServiceV1Server(s *grpc.Server, srv ReferenceServiceV1Server) { + s.RegisterService(&_ReferenceServiceV1_serviceDesc, srv) +} + +func _ReferenceServiceV1_SearchReferenceSets_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(SearchReferenceSetsRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(ReferenceServiceV1Server).SearchReferenceSets(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.genomics.v1.ReferenceServiceV1/SearchReferenceSets", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(ReferenceServiceV1Server).SearchReferenceSets(ctx, req.(*SearchReferenceSetsRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _ReferenceServiceV1_GetReferenceSet_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(GetReferenceSetRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(ReferenceServiceV1Server).GetReferenceSet(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.genomics.v1.ReferenceServiceV1/GetReferenceSet", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(ReferenceServiceV1Server).GetReferenceSet(ctx, req.(*GetReferenceSetRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _ReferenceServiceV1_SearchReferences_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(SearchReferencesRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(ReferenceServiceV1Server).SearchReferences(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.genomics.v1.ReferenceServiceV1/SearchReferences", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(ReferenceServiceV1Server).SearchReferences(ctx, req.(*SearchReferencesRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _ReferenceServiceV1_GetReference_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(GetReferenceRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(ReferenceServiceV1Server).GetReference(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.genomics.v1.ReferenceServiceV1/GetReference", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(ReferenceServiceV1Server).GetReference(ctx, req.(*GetReferenceRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _ReferenceServiceV1_ListBases_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(ListBasesRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(ReferenceServiceV1Server).ListBases(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.genomics.v1.ReferenceServiceV1/ListBases", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(ReferenceServiceV1Server).ListBases(ctx, req.(*ListBasesRequest)) + } + return interceptor(ctx, in, info, handler) +} + +var _ReferenceServiceV1_serviceDesc = grpc.ServiceDesc{ + ServiceName: "google.genomics.v1.ReferenceServiceV1", + HandlerType: (*ReferenceServiceV1Server)(nil), + Methods: []grpc.MethodDesc{ + { + MethodName: "SearchReferenceSets", + Handler: _ReferenceServiceV1_SearchReferenceSets_Handler, + }, + { + MethodName: "GetReferenceSet", + Handler: _ReferenceServiceV1_GetReferenceSet_Handler, + }, + { + MethodName: "SearchReferences", + Handler: _ReferenceServiceV1_SearchReferences_Handler, + }, + { + MethodName: "GetReference", + Handler: _ReferenceServiceV1_GetReference_Handler, + }, + { + MethodName: "ListBases", + Handler: _ReferenceServiceV1_ListBases_Handler, + }, + }, + Streams: []grpc.StreamDesc{}, + Metadata: "google/genomics/v1/references.proto", +} + +func init() { + proto.RegisterFile("google/genomics/v1/references.proto", fileDescriptor_references_4bc5034e4d222e4d) +} + +var fileDescriptor_references_4bc5034e4d222e4d = []byte{ + // 851 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xac, 0x56, 0x41, 0x6f, 0x1b, 0x45, + 0x14, 0xd6, 0x78, 0x63, 0x37, 0x7e, 0x76, 0x12, 0xf7, 0x15, 0xc2, 0xca, 0x25, 0xd4, 0x6c, 0x9a, + 0x62, 0x35, 0x95, 0x57, 0x29, 0x42, 0x42, 0x45, 0x1c, 0xc8, 0xa5, 0x8a, 0xc4, 0x21, 0xda, 0x14, + 0x0e, 0x5c, 0x56, 0x9b, 0xdd, 0x89, 0x33, 0x34, 0xde, 0x31, 0x3b, 0x93, 0xa8, 0xb4, 0xca, 0x01, + 0x24, 0x8e, 0xc0, 0x81, 0x0b, 0x88, 0xdf, 0xc2, 0x89, 0x9f, 0xc0, 0x09, 0x71, 0xe5, 0x47, 0x70, + 0x44, 0x33, 0x3b, 0xbb, 0x1e, 0xaf, 0x97, 0xd8, 0x52, 0xb9, 0xed, 0x7c, 0xf3, 0xe6, 0xcd, 0xf7, + 0x7d, 0x6f, 0xde, 0xec, 0xc0, 0xee, 0x98, 0xf3, 0xf1, 0x05, 0xf5, 0xc7, 0x34, 0xe5, 0x13, 0x16, + 0x0b, 0xff, 0xea, 0xc0, 0xcf, 0xe8, 0x19, 0xcd, 0x68, 0x1a, 0x53, 0x31, 0x9a, 0x66, 0x5c, 0x72, + 0xc4, 0x3c, 0x68, 0x54, 0x04, 0x8d, 0xae, 0x0e, 0xfa, 0x6f, 0x9b, 0x85, 0xd1, 0x94, 0xf9, 0x51, + 0x9a, 0x72, 0x19, 0x49, 0xc6, 0x53, 0xb3, 0xc2, 0xfb, 0x93, 0x40, 0x3b, 0x28, 0xd2, 0xe0, 0x26, + 0x34, 0x58, 0xe2, 0x92, 0x01, 0x19, 0xb6, 0x83, 0x06, 0x4b, 0x70, 0x1b, 0x5a, 0x17, 0x34, 0x1d, + 0xcb, 0x73, 0xb7, 0x31, 0x20, 0x43, 0x27, 0x30, 0x23, 0x1c, 0x40, 0x67, 0x92, 0x7c, 0x10, 0x9f, + 0xd3, 0xf8, 0xb9, 0xb8, 0x9c, 0xb8, 0x8e, 0x5e, 0x60, 0x43, 0x88, 0xb0, 0x96, 0x46, 0x13, 0xea, + 0xae, 0xe9, 0x29, 0xfd, 0x8d, 0x3b, 0x00, 0x82, 0x5f, 0x66, 0x31, 0x0d, 0x2f, 0x33, 0xe6, 0x36, + 0xf5, 0x4c, 0x3b, 0x47, 0x3e, 0xcb, 0x18, 0xee, 0xc3, 0x6d, 0x33, 0x1d, 0xc5, 0x31, 0x15, 0x42, + 0xb1, 0x74, 0x5b, 0x03, 0x67, 0xd8, 0x0e, 0x7a, 0xf9, 0xc4, 0x27, 0x25, 0x8e, 0x1e, 0x6c, 0xa4, + 0xf1, 0x29, 0x0b, 0x65, 0xf4, 0x82, 0xa7, 0x21, 0x4b, 0xdc, 0x5b, 0x03, 0x32, 0x6c, 0x06, 0x1d, + 0x05, 0x3e, 0x53, 0xd8, 0x51, 0xe2, 0xfd, 0xdc, 0x80, 0x6e, 0xa9, 0xed, 0x84, 0xca, 0x05, 0x79, + 0xbb, 0xb0, 0x51, 0x5a, 0x18, 0xb2, 0x44, 0xb8, 0x0d, 0xbd, 0x5b, 0xb7, 0x04, 0x8f, 0x12, 0xb1, + 0x82, 0xd6, 0x05, 0x2e, 0x6b, 0x0b, 0x5c, 0x54, 0x96, 0x84, 0x8a, 0x38, 0x63, 0x53, 0xe5, 0xbe, + 0x11, 0x6f, 0x43, 0x78, 0x0f, 0x3a, 0x91, 0x10, 0x74, 0x72, 0x7a, 0xf1, 0xb5, 0xca, 0xd1, 0xd2, + 0x11, 0x50, 0x40, 0x47, 0x49, 0xc5, 0xbe, 0x5b, 0x2b, 0xd9, 0xb7, 0x5e, 0x6f, 0x9f, 0xf7, 0x1b, + 0x81, 0xfe, 0x09, 0x8d, 0xb2, 0xf8, 0xdc, 0x36, 0x48, 0x04, 0xf4, 0xab, 0x4b, 0x2a, 0x24, 0x7a, + 0xd0, 0xb5, 0x04, 0x0a, 0x97, 0xe4, 0xbe, 0xd8, 0x18, 0xbe, 0x03, 0x60, 0x6d, 0x94, 0x3b, 0x67, + 0x21, 0x55, 0x3d, 0x4e, 0x9d, 0x9e, 0x69, 0x34, 0xa6, 0xa1, 0xe4, 0xcf, 0x69, 0x6a, 0x0e, 0x4a, + 0x5b, 0x21, 0xcf, 0x14, 0x80, 0x77, 0x41, 0x0f, 0x42, 0xc1, 0x5e, 0x52, 0xed, 0x57, 0x33, 0x58, + 0x57, 0xc0, 0x09, 0x7b, 0x49, 0xbd, 0x1f, 0x08, 0xdc, 0xad, 0xe5, 0x2f, 0xa6, 0x3c, 0x15, 0x14, + 0x9f, 0xc2, 0xe6, 0xac, 0xb2, 0x82, 0xca, 0x5c, 0x42, 0xe7, 0xf1, 0x60, 0xb4, 0xd8, 0x21, 0x23, + 0x3b, 0x45, 0x30, 0x3b, 0x11, 0x2a, 0x21, 0x3e, 0x80, 0xad, 0x94, 0xbe, 0x90, 0xa1, 0xc5, 0xb4, + 0xa1, 0x99, 0x6e, 0x28, 0xf8, 0xb8, 0x60, 0xeb, 0x1d, 0xc2, 0xf6, 0x53, 0x2a, 0xe7, 0x32, 0x19, + 0x2f, 0x87, 0xd0, 0x9b, 0xa3, 0x12, 0x96, 0x47, 0x70, 0xd3, 0xde, 0xea, 0x28, 0xf1, 0x7e, 0x27, + 0xf0, 0x56, 0x45, 0xd4, 0xff, 0x5a, 0x91, 0x3a, 0x26, 0x4e, 0x1d, 0x93, 0xd7, 0x2a, 0xcd, 0x37, + 0x04, 0xdc, 0x45, 0x15, 0xa6, 0x2e, 0x1f, 0x03, 0xcc, 0x2e, 0x2d, 0x53, 0x93, 0x9d, 0x1b, 0x6b, + 0x12, 0x58, 0x0b, 0x56, 0xae, 0xc6, 0x87, 0x70, 0xc7, 0xae, 0x46, 0x61, 0xe2, 0xbb, 0xd0, 0xb5, + 0xfb, 0xdd, 0x94, 0xa1, 0x63, 0xb5, 0xbb, 0xf7, 0x0b, 0x81, 0xde, 0xa7, 0x4c, 0xc8, 0xc3, 0x48, + 0xcc, 0xcc, 0x5f, 0xbe, 0x0e, 0xdf, 0x80, 0xa6, 0x90, 0x51, 0x26, 0xcd, 0x45, 0x99, 0x0f, 0xb0, + 0x07, 0x0e, 0x4d, 0x73, 0x93, 0x9d, 0x40, 0x7d, 0xbe, 0x96, 0xb3, 0x1c, 0x6e, 0x5b, 0xd4, 0x8c, + 0xa3, 0xdb, 0xd0, 0xe2, 0x67, 0x67, 0x82, 0x4a, 0xcd, 0xca, 0x09, 0xcc, 0x08, 0xfb, 0xb0, 0x2e, + 0x14, 0xfd, 0x34, 0xa6, 0xc6, 0xa3, 0x72, 0x5c, 0x67, 0xa3, 0x53, 0x63, 0xe3, 0xe3, 0xbf, 0x9a, + 0x80, 0xd6, 0x91, 0xce, 0xae, 0x58, 0x4c, 0x3f, 0x3f, 0xc0, 0x5f, 0x09, 0xdc, 0xa9, 0x69, 0x3e, + 0x1c, 0xd5, 0x15, 0xf2, 0xbf, 0x6f, 0x99, 0xbe, 0xbf, 0x72, 0x7c, 0xae, 0xd5, 0xdb, 0xfd, 0xf6, + 0x8f, 0xbf, 0x7f, 0x6a, 0xec, 0x78, 0xee, 0xfc, 0xcf, 0x8f, 0x4a, 0xe1, 0x0b, 0xbd, 0xec, 0x09, + 0x79, 0x88, 0xdf, 0x13, 0xd8, 0xaa, 0xb4, 0x22, 0x3e, 0xac, 0xdb, 0xa9, 0xbe, 0x5f, 0xfb, 0x4b, + 0xaf, 0x08, 0xef, 0x91, 0xa6, 0xf1, 0x00, 0xef, 0x2f, 0xd2, 0x78, 0x55, 0x6d, 0xb0, 0x6b, 0xfc, + 0x91, 0x40, 0xaf, 0xda, 0x0f, 0xb8, 0xbf, 0x82, 0xf4, 0xd2, 0xa7, 0x47, 0xab, 0x05, 0x1b, 0x93, + 0x06, 0x9a, 0x5d, 0xdf, 0x7b, 0x73, 0x9e, 0x9d, 0xe5, 0xd0, 0x35, 0x74, 0x6d, 0xed, 0xf8, 0xde, + 0x32, 0x77, 0x0a, 0x22, 0x37, 0x77, 0xaa, 0xb7, 0xa7, 0x77, 0xbe, 0x87, 0x3b, 0x95, 0x9d, 0x5f, + 0xd9, 0xcd, 0x73, 0x8d, 0xdf, 0x11, 0x68, 0x97, 0xe7, 0x18, 0xef, 0xd7, 0xe5, 0xac, 0x76, 0x60, + 0x7f, 0x6f, 0x49, 0x94, 0xd1, 0xbe, 0xaf, 0x19, 0xec, 0xe1, 0xee, 0x8d, 0x0c, 0xfc, 0x53, 0xb5, + 0xe8, 0xf0, 0x4b, 0xd8, 0x8e, 0xf9, 0xa4, 0x26, 0xf1, 0xe1, 0xd6, 0xcc, 0xd6, 0x63, 0xf5, 0x4a, + 0x3a, 0x26, 0x5f, 0x3c, 0x29, 0xc2, 0xf8, 0x45, 0x94, 0x8e, 0x47, 0x3c, 0x1b, 0xab, 0x97, 0x98, + 0x7e, 0x43, 0xf9, 0xf9, 0x54, 0x34, 0x65, 0xc2, 0x7e, 0x9d, 0x7d, 0x54, 0x7c, 0xff, 0x43, 0xc8, + 0x69, 0x4b, 0x47, 0xbe, 0xff, 0x6f, 0x00, 0x00, 0x00, 0xff, 0xff, 0xd7, 0x9f, 0xb6, 0x11, 0xc6, + 0x09, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/genomics/v1/variants.pb.go b/vendor/google.golang.org/genproto/googleapis/genomics/v1/variants.pb.go new file mode 100644 index 0000000..ab9a130 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/genomics/v1/variants.pb.go @@ -0,0 +1,3486 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/genomics/v1/variants.proto + +package genomics // import "google.golang.org/genproto/googleapis/genomics/v1" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import empty "github.com/golang/protobuf/ptypes/empty" +import _struct "github.com/golang/protobuf/ptypes/struct" +import _ "google.golang.org/genproto/googleapis/api/annotations" +import longrunning "google.golang.org/genproto/googleapis/longrunning" +import field_mask "google.golang.org/genproto/protobuf/field_mask" + +import ( + context "golang.org/x/net/context" + grpc "google.golang.org/grpc" +) + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Operations to be performed during import on Variant info fields. +// These operations are set for each info field in the info_merge_config +// map of ImportVariantsRequest, which is plumbed down to the +// MergeVariantRequests generated by the import job. +type InfoMergeOperation int32 + +const ( + InfoMergeOperation_INFO_MERGE_OPERATION_UNSPECIFIED InfoMergeOperation = 0 + // By default, Variant info fields are persisted if the Variant doesn't + // already exist in the variantset. If the Variant is equivalent to a + // Variant already in the variantset, the incoming Variant's info field + // is ignored in favor of that of the already persisted Variant. + InfoMergeOperation_IGNORE_NEW InfoMergeOperation = 1 + // This operation removes an info field from the incoming Variant + // and persists this info field in each of the incoming Variant's Calls. + InfoMergeOperation_MOVE_TO_CALLS InfoMergeOperation = 2 +) + +var InfoMergeOperation_name = map[int32]string{ + 0: "INFO_MERGE_OPERATION_UNSPECIFIED", + 1: "IGNORE_NEW", + 2: "MOVE_TO_CALLS", +} +var InfoMergeOperation_value = map[string]int32{ + "INFO_MERGE_OPERATION_UNSPECIFIED": 0, + "IGNORE_NEW": 1, + "MOVE_TO_CALLS": 2, +} + +func (x InfoMergeOperation) String() string { + return proto.EnumName(InfoMergeOperation_name, int32(x)) +} +func (InfoMergeOperation) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_variants_b9a43810f0f9306f, []int{0} +} + +type VariantSetMetadata_Type int32 + +const ( + VariantSetMetadata_TYPE_UNSPECIFIED VariantSetMetadata_Type = 0 + VariantSetMetadata_INTEGER VariantSetMetadata_Type = 1 + VariantSetMetadata_FLOAT VariantSetMetadata_Type = 2 + VariantSetMetadata_FLAG VariantSetMetadata_Type = 3 + VariantSetMetadata_CHARACTER VariantSetMetadata_Type = 4 + VariantSetMetadata_STRING VariantSetMetadata_Type = 5 +) + +var VariantSetMetadata_Type_name = map[int32]string{ + 0: "TYPE_UNSPECIFIED", + 1: "INTEGER", + 2: "FLOAT", + 3: "FLAG", + 4: "CHARACTER", + 5: "STRING", +} +var VariantSetMetadata_Type_value = map[string]int32{ + "TYPE_UNSPECIFIED": 0, + "INTEGER": 1, + "FLOAT": 2, + "FLAG": 3, + "CHARACTER": 4, + "STRING": 5, +} + +func (x VariantSetMetadata_Type) String() string { + return proto.EnumName(VariantSetMetadata_Type_name, int32(x)) +} +func (VariantSetMetadata_Type) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_variants_b9a43810f0f9306f, []int{0, 0} +} + +type ImportVariantsRequest_Format int32 + +const ( + ImportVariantsRequest_FORMAT_UNSPECIFIED ImportVariantsRequest_Format = 0 + // VCF (Variant Call Format). The VCF files may be gzip compressed. gVCF is + // also supported. + ImportVariantsRequest_FORMAT_VCF ImportVariantsRequest_Format = 1 + // Complete Genomics masterVarBeta format. The masterVarBeta files may + // be bzip2 compressed. + ImportVariantsRequest_FORMAT_COMPLETE_GENOMICS ImportVariantsRequest_Format = 2 +) + +var ImportVariantsRequest_Format_name = map[int32]string{ + 0: "FORMAT_UNSPECIFIED", + 1: "FORMAT_VCF", + 2: "FORMAT_COMPLETE_GENOMICS", +} +var ImportVariantsRequest_Format_value = map[string]int32{ + "FORMAT_UNSPECIFIED": 0, + "FORMAT_VCF": 1, + "FORMAT_COMPLETE_GENOMICS": 2, +} + +func (x ImportVariantsRequest_Format) String() string { + return proto.EnumName(ImportVariantsRequest_Format_name, int32(x)) +} +func (ImportVariantsRequest_Format) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_variants_b9a43810f0f9306f, []int{6, 0} +} + +type ExportVariantSetRequest_Format int32 + +const ( + ExportVariantSetRequest_FORMAT_UNSPECIFIED ExportVariantSetRequest_Format = 0 + // Export the data to Google BigQuery. + ExportVariantSetRequest_FORMAT_BIGQUERY ExportVariantSetRequest_Format = 1 +) + +var ExportVariantSetRequest_Format_name = map[int32]string{ + 0: "FORMAT_UNSPECIFIED", + 1: "FORMAT_BIGQUERY", +} +var ExportVariantSetRequest_Format_value = map[string]int32{ + "FORMAT_UNSPECIFIED": 0, + "FORMAT_BIGQUERY": 1, +} + +func (x ExportVariantSetRequest_Format) String() string { + return proto.EnumName(ExportVariantSetRequest_Format_name, int32(x)) +} +func (ExportVariantSetRequest_Format) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_variants_b9a43810f0f9306f, []int{9, 0} +} + +// Metadata describes a single piece of variant call metadata. +// These data include a top level key and either a single value string (value) +// or a list of key-value pairs (info.) +// Value and info are mutually exclusive. +type VariantSetMetadata struct { + // The top-level key. + Key string `protobuf:"bytes,1,opt,name=key,proto3" json:"key,omitempty"` + // The value field for simple metadata + Value string `protobuf:"bytes,2,opt,name=value,proto3" json:"value,omitempty"` + // User-provided ID field, not enforced by this API. + // Two or more pieces of structured metadata with identical + // id and key fields are considered equivalent. + Id string `protobuf:"bytes,4,opt,name=id,proto3" json:"id,omitempty"` + // The type of data. Possible types include: Integer, Float, + // Flag, Character, and String. + Type VariantSetMetadata_Type `protobuf:"varint,5,opt,name=type,proto3,enum=google.genomics.v1.VariantSetMetadata_Type" json:"type,omitempty"` + // The number of values that can be included in a field described by this + // metadata. + Number string `protobuf:"bytes,8,opt,name=number,proto3" json:"number,omitempty"` + // A textual description of this metadata. + Description string `protobuf:"bytes,7,opt,name=description,proto3" json:"description,omitempty"` + // Remaining structured metadata key-value pairs. This must be of the form + // map (string key mapping to a list of string values). + Info map[string]*_struct.ListValue `protobuf:"bytes,3,rep,name=info,proto3" json:"info,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *VariantSetMetadata) Reset() { *m = VariantSetMetadata{} } +func (m *VariantSetMetadata) String() string { return proto.CompactTextString(m) } +func (*VariantSetMetadata) ProtoMessage() {} +func (*VariantSetMetadata) Descriptor() ([]byte, []int) { + return fileDescriptor_variants_b9a43810f0f9306f, []int{0} +} +func (m *VariantSetMetadata) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_VariantSetMetadata.Unmarshal(m, b) +} +func (m *VariantSetMetadata) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_VariantSetMetadata.Marshal(b, m, deterministic) +} +func (dst *VariantSetMetadata) XXX_Merge(src proto.Message) { + xxx_messageInfo_VariantSetMetadata.Merge(dst, src) +} +func (m *VariantSetMetadata) XXX_Size() int { + return xxx_messageInfo_VariantSetMetadata.Size(m) +} +func (m *VariantSetMetadata) XXX_DiscardUnknown() { + xxx_messageInfo_VariantSetMetadata.DiscardUnknown(m) +} + +var xxx_messageInfo_VariantSetMetadata proto.InternalMessageInfo + +func (m *VariantSetMetadata) GetKey() string { + if m != nil { + return m.Key + } + return "" +} + +func (m *VariantSetMetadata) GetValue() string { + if m != nil { + return m.Value + } + return "" +} + +func (m *VariantSetMetadata) GetId() string { + if m != nil { + return m.Id + } + return "" +} + +func (m *VariantSetMetadata) GetType() VariantSetMetadata_Type { + if m != nil { + return m.Type + } + return VariantSetMetadata_TYPE_UNSPECIFIED +} + +func (m *VariantSetMetadata) GetNumber() string { + if m != nil { + return m.Number + } + return "" +} + +func (m *VariantSetMetadata) GetDescription() string { + if m != nil { + return m.Description + } + return "" +} + +func (m *VariantSetMetadata) GetInfo() map[string]*_struct.ListValue { + if m != nil { + return m.Info + } + return nil +} + +// A variant set is a collection of call sets and variants. It contains summary +// statistics of those contents. A variant set belongs to a dataset. +// +// For more genomics resource definitions, see [Fundamentals of Google +// Genomics](https://cloud.google.com/genomics/fundamentals-of-google-genomics) +type VariantSet struct { + // The dataset to which this variant set belongs. + DatasetId string `protobuf:"bytes,1,opt,name=dataset_id,json=datasetId,proto3" json:"dataset_id,omitempty"` + // The server-generated variant set ID, unique across all variant sets. + Id string `protobuf:"bytes,2,opt,name=id,proto3" json:"id,omitempty"` + // The reference set to which the variant set is mapped. The reference set + // describes the alignment provenance of the variant set, while the + // `referenceBounds` describe the shape of the actual variant data. The + // reference set's reference names are a superset of those found in the + // `referenceBounds`. + // + // For example, given a variant set that is mapped to the GRCh38 reference set + // and contains a single variant on reference 'X', `referenceBounds` would + // contain only an entry for 'X', while the associated reference set + // enumerates all possible references: '1', '2', 'X', 'Y', 'MT', etc. + ReferenceSetId string `protobuf:"bytes,6,opt,name=reference_set_id,json=referenceSetId,proto3" json:"reference_set_id,omitempty"` + // A list of all references used by the variants in a variant set + // with associated coordinate upper bounds for each one. + ReferenceBounds []*ReferenceBound `protobuf:"bytes,5,rep,name=reference_bounds,json=referenceBounds,proto3" json:"reference_bounds,omitempty"` + // The metadata associated with this variant set. + Metadata []*VariantSetMetadata `protobuf:"bytes,4,rep,name=metadata,proto3" json:"metadata,omitempty"` + // User-specified, mutable name. + Name string `protobuf:"bytes,7,opt,name=name,proto3" json:"name,omitempty"` + // A textual description of this variant set. + Description string `protobuf:"bytes,8,opt,name=description,proto3" json:"description,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *VariantSet) Reset() { *m = VariantSet{} } +func (m *VariantSet) String() string { return proto.CompactTextString(m) } +func (*VariantSet) ProtoMessage() {} +func (*VariantSet) Descriptor() ([]byte, []int) { + return fileDescriptor_variants_b9a43810f0f9306f, []int{1} +} +func (m *VariantSet) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_VariantSet.Unmarshal(m, b) +} +func (m *VariantSet) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_VariantSet.Marshal(b, m, deterministic) +} +func (dst *VariantSet) XXX_Merge(src proto.Message) { + xxx_messageInfo_VariantSet.Merge(dst, src) +} +func (m *VariantSet) XXX_Size() int { + return xxx_messageInfo_VariantSet.Size(m) +} +func (m *VariantSet) XXX_DiscardUnknown() { + xxx_messageInfo_VariantSet.DiscardUnknown(m) +} + +var xxx_messageInfo_VariantSet proto.InternalMessageInfo + +func (m *VariantSet) GetDatasetId() string { + if m != nil { + return m.DatasetId + } + return "" +} + +func (m *VariantSet) GetId() string { + if m != nil { + return m.Id + } + return "" +} + +func (m *VariantSet) GetReferenceSetId() string { + if m != nil { + return m.ReferenceSetId + } + return "" +} + +func (m *VariantSet) GetReferenceBounds() []*ReferenceBound { + if m != nil { + return m.ReferenceBounds + } + return nil +} + +func (m *VariantSet) GetMetadata() []*VariantSetMetadata { + if m != nil { + return m.Metadata + } + return nil +} + +func (m *VariantSet) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *VariantSet) GetDescription() string { + if m != nil { + return m.Description + } + return "" +} + +// A variant represents a change in DNA sequence relative to a reference +// sequence. For example, a variant could represent a SNP or an insertion. +// Variants belong to a variant set. +// +// For more genomics resource definitions, see [Fundamentals of Google +// Genomics](https://cloud.google.com/genomics/fundamentals-of-google-genomics) +// +// Each of the calls on a variant represent a determination of genotype with +// respect to that variant. For example, a call might assign probability of 0.32 +// to the occurrence of a SNP named rs1234 in a sample named NA12345. A call +// belongs to a call set, which contains related calls typically from one +// sample. +type Variant struct { + // The ID of the variant set this variant belongs to. + VariantSetId string `protobuf:"bytes,15,opt,name=variant_set_id,json=variantSetId,proto3" json:"variant_set_id,omitempty"` + // The server-generated variant ID, unique across all variants. + Id string `protobuf:"bytes,2,opt,name=id,proto3" json:"id,omitempty"` + // Names for the variant, for example a RefSNP ID. + Names []string `protobuf:"bytes,3,rep,name=names,proto3" json:"names,omitempty"` + // The date this variant was created, in milliseconds from the epoch. + Created int64 `protobuf:"varint,12,opt,name=created,proto3" json:"created,omitempty"` + // The reference on which this variant occurs. + // (such as `chr20` or `X`) + ReferenceName string `protobuf:"bytes,14,opt,name=reference_name,json=referenceName,proto3" json:"reference_name,omitempty"` + // The position at which this variant occurs (0-based). + // This corresponds to the first base of the string of reference bases. + Start int64 `protobuf:"varint,16,opt,name=start,proto3" json:"start,omitempty"` + // The end position (0-based) of this variant. This corresponds to the first + // base after the last base in the reference allele. So, the length of + // the reference allele is (end - start). This is useful for variants + // that don't explicitly give alternate bases, for example large deletions. + End int64 `protobuf:"varint,13,opt,name=end,proto3" json:"end,omitempty"` + // The reference bases for this variant. They start at the given + // position. + ReferenceBases string `protobuf:"bytes,6,opt,name=reference_bases,json=referenceBases,proto3" json:"reference_bases,omitempty"` + // The bases that appear instead of the reference bases. + AlternateBases []string `protobuf:"bytes,7,rep,name=alternate_bases,json=alternateBases,proto3" json:"alternate_bases,omitempty"` + // A measure of how likely this variant is to be real. + // A higher value is better. + Quality float64 `protobuf:"fixed64,8,opt,name=quality,proto3" json:"quality,omitempty"` + // A list of filters (normally quality filters) this variant has failed. + // `PASS` indicates this variant has passed all filters. + Filter []string `protobuf:"bytes,9,rep,name=filter,proto3" json:"filter,omitempty"` + // A map of additional variant information. This must be of the form + // map (string key mapping to a list of string values). + Info map[string]*_struct.ListValue `protobuf:"bytes,10,rep,name=info,proto3" json:"info,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` + // The variant calls for this particular variant. Each one represents the + // determination of genotype with respect to this variant. + Calls []*VariantCall `protobuf:"bytes,11,rep,name=calls,proto3" json:"calls,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Variant) Reset() { *m = Variant{} } +func (m *Variant) String() string { return proto.CompactTextString(m) } +func (*Variant) ProtoMessage() {} +func (*Variant) Descriptor() ([]byte, []int) { + return fileDescriptor_variants_b9a43810f0f9306f, []int{2} +} +func (m *Variant) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Variant.Unmarshal(m, b) +} +func (m *Variant) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Variant.Marshal(b, m, deterministic) +} +func (dst *Variant) XXX_Merge(src proto.Message) { + xxx_messageInfo_Variant.Merge(dst, src) +} +func (m *Variant) XXX_Size() int { + return xxx_messageInfo_Variant.Size(m) +} +func (m *Variant) XXX_DiscardUnknown() { + xxx_messageInfo_Variant.DiscardUnknown(m) +} + +var xxx_messageInfo_Variant proto.InternalMessageInfo + +func (m *Variant) GetVariantSetId() string { + if m != nil { + return m.VariantSetId + } + return "" +} + +func (m *Variant) GetId() string { + if m != nil { + return m.Id + } + return "" +} + +func (m *Variant) GetNames() []string { + if m != nil { + return m.Names + } + return nil +} + +func (m *Variant) GetCreated() int64 { + if m != nil { + return m.Created + } + return 0 +} + +func (m *Variant) GetReferenceName() string { + if m != nil { + return m.ReferenceName + } + return "" +} + +func (m *Variant) GetStart() int64 { + if m != nil { + return m.Start + } + return 0 +} + +func (m *Variant) GetEnd() int64 { + if m != nil { + return m.End + } + return 0 +} + +func (m *Variant) GetReferenceBases() string { + if m != nil { + return m.ReferenceBases + } + return "" +} + +func (m *Variant) GetAlternateBases() []string { + if m != nil { + return m.AlternateBases + } + return nil +} + +func (m *Variant) GetQuality() float64 { + if m != nil { + return m.Quality + } + return 0 +} + +func (m *Variant) GetFilter() []string { + if m != nil { + return m.Filter + } + return nil +} + +func (m *Variant) GetInfo() map[string]*_struct.ListValue { + if m != nil { + return m.Info + } + return nil +} + +func (m *Variant) GetCalls() []*VariantCall { + if m != nil { + return m.Calls + } + return nil +} + +// A call represents the determination of genotype with respect to a particular +// variant. It may include associated information such as quality and phasing. +// For example, a call might assign a probability of 0.32 to the occurrence of +// a SNP named rs1234 in a call set with the name NA12345. +type VariantCall struct { + // The ID of the call set this variant call belongs to. + CallSetId string `protobuf:"bytes,8,opt,name=call_set_id,json=callSetId,proto3" json:"call_set_id,omitempty"` + // The name of the call set this variant call belongs to. + CallSetName string `protobuf:"bytes,9,opt,name=call_set_name,json=callSetName,proto3" json:"call_set_name,omitempty"` + // The genotype of this variant call. Each value represents either the value + // of the `referenceBases` field or a 1-based index into + // `alternateBases`. If a variant had a `referenceBases` + // value of `T` and an `alternateBases` + // value of `["A", "C"]`, and the `genotype` was + // `[2, 1]`, that would mean the call + // represented the heterozygous value `CA` for this variant. + // If the `genotype` was instead `[0, 1]`, the + // represented value would be `TA`. Ordering of the + // genotype values is important if the `phaseset` is present. + // If a genotype is not called (that is, a `.` is present in the + // GT string) -1 is returned. + Genotype []int32 `protobuf:"varint,7,rep,packed,name=genotype,proto3" json:"genotype,omitempty"` + // If this field is present, this variant call's genotype ordering implies + // the phase of the bases and is consistent with any other variant calls in + // the same reference sequence which have the same phaseset value. + // When importing data from VCF, if the genotype data was phased but no + // phase set was specified this field will be set to `*`. + Phaseset string `protobuf:"bytes,5,opt,name=phaseset,proto3" json:"phaseset,omitempty"` + // The genotype likelihoods for this variant call. Each array entry + // represents how likely a specific genotype is for this call. The value + // ordering is defined by the GL tag in the VCF spec. + // If Phred-scaled genotype likelihood scores (PL) are available and + // log10(P) genotype likelihood scores (GL) are not, PL scores are converted + // to GL scores. If both are available, PL scores are stored in `info`. + GenotypeLikelihood []float64 `protobuf:"fixed64,6,rep,packed,name=genotype_likelihood,json=genotypeLikelihood,proto3" json:"genotype_likelihood,omitempty"` + // A map of additional variant call information. This must be of the form + // map (string key mapping to a list of string values). + Info map[string]*_struct.ListValue `protobuf:"bytes,2,rep,name=info,proto3" json:"info,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *VariantCall) Reset() { *m = VariantCall{} } +func (m *VariantCall) String() string { return proto.CompactTextString(m) } +func (*VariantCall) ProtoMessage() {} +func (*VariantCall) Descriptor() ([]byte, []int) { + return fileDescriptor_variants_b9a43810f0f9306f, []int{3} +} +func (m *VariantCall) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_VariantCall.Unmarshal(m, b) +} +func (m *VariantCall) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_VariantCall.Marshal(b, m, deterministic) +} +func (dst *VariantCall) XXX_Merge(src proto.Message) { + xxx_messageInfo_VariantCall.Merge(dst, src) +} +func (m *VariantCall) XXX_Size() int { + return xxx_messageInfo_VariantCall.Size(m) +} +func (m *VariantCall) XXX_DiscardUnknown() { + xxx_messageInfo_VariantCall.DiscardUnknown(m) +} + +var xxx_messageInfo_VariantCall proto.InternalMessageInfo + +func (m *VariantCall) GetCallSetId() string { + if m != nil { + return m.CallSetId + } + return "" +} + +func (m *VariantCall) GetCallSetName() string { + if m != nil { + return m.CallSetName + } + return "" +} + +func (m *VariantCall) GetGenotype() []int32 { + if m != nil { + return m.Genotype + } + return nil +} + +func (m *VariantCall) GetPhaseset() string { + if m != nil { + return m.Phaseset + } + return "" +} + +func (m *VariantCall) GetGenotypeLikelihood() []float64 { + if m != nil { + return m.GenotypeLikelihood + } + return nil +} + +func (m *VariantCall) GetInfo() map[string]*_struct.ListValue { + if m != nil { + return m.Info + } + return nil +} + +// A call set is a collection of variant calls, typically for one sample. It +// belongs to a variant set. +// +// For more genomics resource definitions, see [Fundamentals of Google +// Genomics](https://cloud.google.com/genomics/fundamentals-of-google-genomics) +type CallSet struct { + // The server-generated call set ID, unique across all call sets. + Id string `protobuf:"bytes,1,opt,name=id,proto3" json:"id,omitempty"` + // The call set name. + Name string `protobuf:"bytes,2,opt,name=name,proto3" json:"name,omitempty"` + // The sample ID this call set corresponds to. + SampleId string `protobuf:"bytes,7,opt,name=sample_id,json=sampleId,proto3" json:"sample_id,omitempty"` + // The IDs of the variant sets this call set belongs to. This field must + // have exactly length one, as a call set belongs to a single variant set. + // This field is repeated for compatibility with the + // [GA4GH 0.5.1 + // API](https://github.com/ga4gh/schemas/blob/v0.5.1/src/main/resources/avro/variants.avdl#L76). + VariantSetIds []string `protobuf:"bytes,6,rep,name=variant_set_ids,json=variantSetIds,proto3" json:"variant_set_ids,omitempty"` + // The date this call set was created in milliseconds from the epoch. + Created int64 `protobuf:"varint,5,opt,name=created,proto3" json:"created,omitempty"` + // A map of additional call set information. This must be of the form + // map (string key mapping to a list of string values). + Info map[string]*_struct.ListValue `protobuf:"bytes,4,rep,name=info,proto3" json:"info,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *CallSet) Reset() { *m = CallSet{} } +func (m *CallSet) String() string { return proto.CompactTextString(m) } +func (*CallSet) ProtoMessage() {} +func (*CallSet) Descriptor() ([]byte, []int) { + return fileDescriptor_variants_b9a43810f0f9306f, []int{4} +} +func (m *CallSet) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_CallSet.Unmarshal(m, b) +} +func (m *CallSet) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_CallSet.Marshal(b, m, deterministic) +} +func (dst *CallSet) XXX_Merge(src proto.Message) { + xxx_messageInfo_CallSet.Merge(dst, src) +} +func (m *CallSet) XXX_Size() int { + return xxx_messageInfo_CallSet.Size(m) +} +func (m *CallSet) XXX_DiscardUnknown() { + xxx_messageInfo_CallSet.DiscardUnknown(m) +} + +var xxx_messageInfo_CallSet proto.InternalMessageInfo + +func (m *CallSet) GetId() string { + if m != nil { + return m.Id + } + return "" +} + +func (m *CallSet) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *CallSet) GetSampleId() string { + if m != nil { + return m.SampleId + } + return "" +} + +func (m *CallSet) GetVariantSetIds() []string { + if m != nil { + return m.VariantSetIds + } + return nil +} + +func (m *CallSet) GetCreated() int64 { + if m != nil { + return m.Created + } + return 0 +} + +func (m *CallSet) GetInfo() map[string]*_struct.ListValue { + if m != nil { + return m.Info + } + return nil +} + +// ReferenceBound records an upper bound for the starting coordinate of +// variants in a particular reference. +type ReferenceBound struct { + // The name of the reference associated with this reference bound. + ReferenceName string `protobuf:"bytes,1,opt,name=reference_name,json=referenceName,proto3" json:"reference_name,omitempty"` + // An upper bound (inclusive) on the starting coordinate of any + // variant in the reference sequence. + UpperBound int64 `protobuf:"varint,2,opt,name=upper_bound,json=upperBound,proto3" json:"upper_bound,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ReferenceBound) Reset() { *m = ReferenceBound{} } +func (m *ReferenceBound) String() string { return proto.CompactTextString(m) } +func (*ReferenceBound) ProtoMessage() {} +func (*ReferenceBound) Descriptor() ([]byte, []int) { + return fileDescriptor_variants_b9a43810f0f9306f, []int{5} +} +func (m *ReferenceBound) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ReferenceBound.Unmarshal(m, b) +} +func (m *ReferenceBound) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ReferenceBound.Marshal(b, m, deterministic) +} +func (dst *ReferenceBound) XXX_Merge(src proto.Message) { + xxx_messageInfo_ReferenceBound.Merge(dst, src) +} +func (m *ReferenceBound) XXX_Size() int { + return xxx_messageInfo_ReferenceBound.Size(m) +} +func (m *ReferenceBound) XXX_DiscardUnknown() { + xxx_messageInfo_ReferenceBound.DiscardUnknown(m) +} + +var xxx_messageInfo_ReferenceBound proto.InternalMessageInfo + +func (m *ReferenceBound) GetReferenceName() string { + if m != nil { + return m.ReferenceName + } + return "" +} + +func (m *ReferenceBound) GetUpperBound() int64 { + if m != nil { + return m.UpperBound + } + return 0 +} + +// The variant data import request. +type ImportVariantsRequest struct { + // Required. The variant set to which variant data should be imported. + VariantSetId string `protobuf:"bytes,1,opt,name=variant_set_id,json=variantSetId,proto3" json:"variant_set_id,omitempty"` + // A list of URIs referencing variant files in Google Cloud Storage. URIs can + // include wildcards [as described + // here](https://cloud.google.com/storage/docs/gsutil/addlhelp/WildcardNames). + // Note that recursive wildcards ('**') are not supported. + SourceUris []string `protobuf:"bytes,2,rep,name=source_uris,json=sourceUris,proto3" json:"source_uris,omitempty"` + // The format of the variant data being imported. If unspecified, defaults to + // to `VCF`. + Format ImportVariantsRequest_Format `protobuf:"varint,3,opt,name=format,proto3,enum=google.genomics.v1.ImportVariantsRequest_Format" json:"format,omitempty"` + // Convert reference names to the canonical representation. + // hg19 haploytypes (those reference names containing "_hap") + // are not modified in any way. + // All other reference names are modified according to the following rules: + // The reference name is capitalized. + // The "chr" prefix is dropped for all autosomes and sex chromsomes. + // For example "chr17" becomes "17" and "chrX" becomes "X". + // All mitochondrial chromosomes ("chrM", "chrMT", etc) become "MT". + NormalizeReferenceNames bool `protobuf:"varint,5,opt,name=normalize_reference_names,json=normalizeReferenceNames,proto3" json:"normalize_reference_names,omitempty"` + // A mapping between info field keys and the InfoMergeOperations to + // be performed on them. This is plumbed down to the MergeVariantRequests + // generated by the resulting import job. + InfoMergeConfig map[string]InfoMergeOperation `protobuf:"bytes,6,rep,name=info_merge_config,json=infoMergeConfig,proto3" json:"info_merge_config,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"varint,2,opt,name=value,proto3,enum=google.genomics.v1.InfoMergeOperation"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ImportVariantsRequest) Reset() { *m = ImportVariantsRequest{} } +func (m *ImportVariantsRequest) String() string { return proto.CompactTextString(m) } +func (*ImportVariantsRequest) ProtoMessage() {} +func (*ImportVariantsRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_variants_b9a43810f0f9306f, []int{6} +} +func (m *ImportVariantsRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ImportVariantsRequest.Unmarshal(m, b) +} +func (m *ImportVariantsRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ImportVariantsRequest.Marshal(b, m, deterministic) +} +func (dst *ImportVariantsRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_ImportVariantsRequest.Merge(dst, src) +} +func (m *ImportVariantsRequest) XXX_Size() int { + return xxx_messageInfo_ImportVariantsRequest.Size(m) +} +func (m *ImportVariantsRequest) XXX_DiscardUnknown() { + xxx_messageInfo_ImportVariantsRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_ImportVariantsRequest proto.InternalMessageInfo + +func (m *ImportVariantsRequest) GetVariantSetId() string { + if m != nil { + return m.VariantSetId + } + return "" +} + +func (m *ImportVariantsRequest) GetSourceUris() []string { + if m != nil { + return m.SourceUris + } + return nil +} + +func (m *ImportVariantsRequest) GetFormat() ImportVariantsRequest_Format { + if m != nil { + return m.Format + } + return ImportVariantsRequest_FORMAT_UNSPECIFIED +} + +func (m *ImportVariantsRequest) GetNormalizeReferenceNames() bool { + if m != nil { + return m.NormalizeReferenceNames + } + return false +} + +func (m *ImportVariantsRequest) GetInfoMergeConfig() map[string]InfoMergeOperation { + if m != nil { + return m.InfoMergeConfig + } + return nil +} + +// The variant data import response. +type ImportVariantsResponse struct { + // IDs of the call sets created during the import. + CallSetIds []string `protobuf:"bytes,1,rep,name=call_set_ids,json=callSetIds,proto3" json:"call_set_ids,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ImportVariantsResponse) Reset() { *m = ImportVariantsResponse{} } +func (m *ImportVariantsResponse) String() string { return proto.CompactTextString(m) } +func (*ImportVariantsResponse) ProtoMessage() {} +func (*ImportVariantsResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_variants_b9a43810f0f9306f, []int{7} +} +func (m *ImportVariantsResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ImportVariantsResponse.Unmarshal(m, b) +} +func (m *ImportVariantsResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ImportVariantsResponse.Marshal(b, m, deterministic) +} +func (dst *ImportVariantsResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_ImportVariantsResponse.Merge(dst, src) +} +func (m *ImportVariantsResponse) XXX_Size() int { + return xxx_messageInfo_ImportVariantsResponse.Size(m) +} +func (m *ImportVariantsResponse) XXX_DiscardUnknown() { + xxx_messageInfo_ImportVariantsResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_ImportVariantsResponse proto.InternalMessageInfo + +func (m *ImportVariantsResponse) GetCallSetIds() []string { + if m != nil { + return m.CallSetIds + } + return nil +} + +// The CreateVariantSet request +type CreateVariantSetRequest struct { + // Required. The variant set to be created. Must have a valid `datasetId`. + VariantSet *VariantSet `protobuf:"bytes,1,opt,name=variant_set,json=variantSet,proto3" json:"variant_set,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *CreateVariantSetRequest) Reset() { *m = CreateVariantSetRequest{} } +func (m *CreateVariantSetRequest) String() string { return proto.CompactTextString(m) } +func (*CreateVariantSetRequest) ProtoMessage() {} +func (*CreateVariantSetRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_variants_b9a43810f0f9306f, []int{8} +} +func (m *CreateVariantSetRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_CreateVariantSetRequest.Unmarshal(m, b) +} +func (m *CreateVariantSetRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_CreateVariantSetRequest.Marshal(b, m, deterministic) +} +func (dst *CreateVariantSetRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_CreateVariantSetRequest.Merge(dst, src) +} +func (m *CreateVariantSetRequest) XXX_Size() int { + return xxx_messageInfo_CreateVariantSetRequest.Size(m) +} +func (m *CreateVariantSetRequest) XXX_DiscardUnknown() { + xxx_messageInfo_CreateVariantSetRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_CreateVariantSetRequest proto.InternalMessageInfo + +func (m *CreateVariantSetRequest) GetVariantSet() *VariantSet { + if m != nil { + return m.VariantSet + } + return nil +} + +// The variant data export request. +type ExportVariantSetRequest struct { + // Required. The ID of the variant set that contains variant data which + // should be exported. The caller must have READ access to this variant set. + VariantSetId string `protobuf:"bytes,1,opt,name=variant_set_id,json=variantSetId,proto3" json:"variant_set_id,omitempty"` + // If provided, only variant call information from the specified call sets + // will be exported. By default all variant calls are exported. + CallSetIds []string `protobuf:"bytes,2,rep,name=call_set_ids,json=callSetIds,proto3" json:"call_set_ids,omitempty"` + // Required. The Google Cloud project ID that owns the destination + // BigQuery dataset. The caller must have WRITE access to this project. This + // project will also own the resulting export job. + ProjectId string `protobuf:"bytes,3,opt,name=project_id,json=projectId,proto3" json:"project_id,omitempty"` + // The format for the exported data. + Format ExportVariantSetRequest_Format `protobuf:"varint,4,opt,name=format,proto3,enum=google.genomics.v1.ExportVariantSetRequest_Format" json:"format,omitempty"` + // Required. The BigQuery dataset to export data to. This dataset must already + // exist. Note that this is distinct from the Genomics concept of "dataset". + BigqueryDataset string `protobuf:"bytes,5,opt,name=bigquery_dataset,json=bigqueryDataset,proto3" json:"bigquery_dataset,omitempty"` + // Required. The BigQuery table to export data to. + // If the table doesn't exist, it will be created. If it already exists, it + // will be overwritten. + BigqueryTable string `protobuf:"bytes,6,opt,name=bigquery_table,json=bigqueryTable,proto3" json:"bigquery_table,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ExportVariantSetRequest) Reset() { *m = ExportVariantSetRequest{} } +func (m *ExportVariantSetRequest) String() string { return proto.CompactTextString(m) } +func (*ExportVariantSetRequest) ProtoMessage() {} +func (*ExportVariantSetRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_variants_b9a43810f0f9306f, []int{9} +} +func (m *ExportVariantSetRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ExportVariantSetRequest.Unmarshal(m, b) +} +func (m *ExportVariantSetRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ExportVariantSetRequest.Marshal(b, m, deterministic) +} +func (dst *ExportVariantSetRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_ExportVariantSetRequest.Merge(dst, src) +} +func (m *ExportVariantSetRequest) XXX_Size() int { + return xxx_messageInfo_ExportVariantSetRequest.Size(m) +} +func (m *ExportVariantSetRequest) XXX_DiscardUnknown() { + xxx_messageInfo_ExportVariantSetRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_ExportVariantSetRequest proto.InternalMessageInfo + +func (m *ExportVariantSetRequest) GetVariantSetId() string { + if m != nil { + return m.VariantSetId + } + return "" +} + +func (m *ExportVariantSetRequest) GetCallSetIds() []string { + if m != nil { + return m.CallSetIds + } + return nil +} + +func (m *ExportVariantSetRequest) GetProjectId() string { + if m != nil { + return m.ProjectId + } + return "" +} + +func (m *ExportVariantSetRequest) GetFormat() ExportVariantSetRequest_Format { + if m != nil { + return m.Format + } + return ExportVariantSetRequest_FORMAT_UNSPECIFIED +} + +func (m *ExportVariantSetRequest) GetBigqueryDataset() string { + if m != nil { + return m.BigqueryDataset + } + return "" +} + +func (m *ExportVariantSetRequest) GetBigqueryTable() string { + if m != nil { + return m.BigqueryTable + } + return "" +} + +// The variant set request. +type GetVariantSetRequest struct { + // Required. The ID of the variant set. + VariantSetId string `protobuf:"bytes,1,opt,name=variant_set_id,json=variantSetId,proto3" json:"variant_set_id,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GetVariantSetRequest) Reset() { *m = GetVariantSetRequest{} } +func (m *GetVariantSetRequest) String() string { return proto.CompactTextString(m) } +func (*GetVariantSetRequest) ProtoMessage() {} +func (*GetVariantSetRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_variants_b9a43810f0f9306f, []int{10} +} +func (m *GetVariantSetRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GetVariantSetRequest.Unmarshal(m, b) +} +func (m *GetVariantSetRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GetVariantSetRequest.Marshal(b, m, deterministic) +} +func (dst *GetVariantSetRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_GetVariantSetRequest.Merge(dst, src) +} +func (m *GetVariantSetRequest) XXX_Size() int { + return xxx_messageInfo_GetVariantSetRequest.Size(m) +} +func (m *GetVariantSetRequest) XXX_DiscardUnknown() { + xxx_messageInfo_GetVariantSetRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_GetVariantSetRequest proto.InternalMessageInfo + +func (m *GetVariantSetRequest) GetVariantSetId() string { + if m != nil { + return m.VariantSetId + } + return "" +} + +// The search variant sets request. +type SearchVariantSetsRequest struct { + // Exactly one dataset ID must be provided here. Only variant sets which + // belong to this dataset will be returned. + DatasetIds []string `protobuf:"bytes,1,rep,name=dataset_ids,json=datasetIds,proto3" json:"dataset_ids,omitempty"` + // The continuation token, which is used to page through large result sets. + // To get the next page of results, set this parameter to the value of + // `nextPageToken` from the previous response. + PageToken string `protobuf:"bytes,2,opt,name=page_token,json=pageToken,proto3" json:"page_token,omitempty"` + // The maximum number of results to return in a single page. If unspecified, + // defaults to 1024. + PageSize int32 `protobuf:"varint,3,opt,name=page_size,json=pageSize,proto3" json:"page_size,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *SearchVariantSetsRequest) Reset() { *m = SearchVariantSetsRequest{} } +func (m *SearchVariantSetsRequest) String() string { return proto.CompactTextString(m) } +func (*SearchVariantSetsRequest) ProtoMessage() {} +func (*SearchVariantSetsRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_variants_b9a43810f0f9306f, []int{11} +} +func (m *SearchVariantSetsRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_SearchVariantSetsRequest.Unmarshal(m, b) +} +func (m *SearchVariantSetsRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_SearchVariantSetsRequest.Marshal(b, m, deterministic) +} +func (dst *SearchVariantSetsRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_SearchVariantSetsRequest.Merge(dst, src) +} +func (m *SearchVariantSetsRequest) XXX_Size() int { + return xxx_messageInfo_SearchVariantSetsRequest.Size(m) +} +func (m *SearchVariantSetsRequest) XXX_DiscardUnknown() { + xxx_messageInfo_SearchVariantSetsRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_SearchVariantSetsRequest proto.InternalMessageInfo + +func (m *SearchVariantSetsRequest) GetDatasetIds() []string { + if m != nil { + return m.DatasetIds + } + return nil +} + +func (m *SearchVariantSetsRequest) GetPageToken() string { + if m != nil { + return m.PageToken + } + return "" +} + +func (m *SearchVariantSetsRequest) GetPageSize() int32 { + if m != nil { + return m.PageSize + } + return 0 +} + +// The search variant sets response. +type SearchVariantSetsResponse struct { + // The variant sets belonging to the requested dataset. + VariantSets []*VariantSet `protobuf:"bytes,1,rep,name=variant_sets,json=variantSets,proto3" json:"variant_sets,omitempty"` + // The continuation token, which is used to page through large result sets. + // Provide this value in a subsequent request to return the next page of + // results. This field will be empty if there aren't any additional results. + NextPageToken string `protobuf:"bytes,2,opt,name=next_page_token,json=nextPageToken,proto3" json:"next_page_token,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *SearchVariantSetsResponse) Reset() { *m = SearchVariantSetsResponse{} } +func (m *SearchVariantSetsResponse) String() string { return proto.CompactTextString(m) } +func (*SearchVariantSetsResponse) ProtoMessage() {} +func (*SearchVariantSetsResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_variants_b9a43810f0f9306f, []int{12} +} +func (m *SearchVariantSetsResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_SearchVariantSetsResponse.Unmarshal(m, b) +} +func (m *SearchVariantSetsResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_SearchVariantSetsResponse.Marshal(b, m, deterministic) +} +func (dst *SearchVariantSetsResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_SearchVariantSetsResponse.Merge(dst, src) +} +func (m *SearchVariantSetsResponse) XXX_Size() int { + return xxx_messageInfo_SearchVariantSetsResponse.Size(m) +} +func (m *SearchVariantSetsResponse) XXX_DiscardUnknown() { + xxx_messageInfo_SearchVariantSetsResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_SearchVariantSetsResponse proto.InternalMessageInfo + +func (m *SearchVariantSetsResponse) GetVariantSets() []*VariantSet { + if m != nil { + return m.VariantSets + } + return nil +} + +func (m *SearchVariantSetsResponse) GetNextPageToken() string { + if m != nil { + return m.NextPageToken + } + return "" +} + +// The delete variant set request. +type DeleteVariantSetRequest struct { + // The ID of the variant set to be deleted. + VariantSetId string `protobuf:"bytes,1,opt,name=variant_set_id,json=variantSetId,proto3" json:"variant_set_id,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *DeleteVariantSetRequest) Reset() { *m = DeleteVariantSetRequest{} } +func (m *DeleteVariantSetRequest) String() string { return proto.CompactTextString(m) } +func (*DeleteVariantSetRequest) ProtoMessage() {} +func (*DeleteVariantSetRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_variants_b9a43810f0f9306f, []int{13} +} +func (m *DeleteVariantSetRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_DeleteVariantSetRequest.Unmarshal(m, b) +} +func (m *DeleteVariantSetRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_DeleteVariantSetRequest.Marshal(b, m, deterministic) +} +func (dst *DeleteVariantSetRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_DeleteVariantSetRequest.Merge(dst, src) +} +func (m *DeleteVariantSetRequest) XXX_Size() int { + return xxx_messageInfo_DeleteVariantSetRequest.Size(m) +} +func (m *DeleteVariantSetRequest) XXX_DiscardUnknown() { + xxx_messageInfo_DeleteVariantSetRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_DeleteVariantSetRequest proto.InternalMessageInfo + +func (m *DeleteVariantSetRequest) GetVariantSetId() string { + if m != nil { + return m.VariantSetId + } + return "" +} + +type UpdateVariantSetRequest struct { + // The ID of the variant to be updated (must already exist). + VariantSetId string `protobuf:"bytes,1,opt,name=variant_set_id,json=variantSetId,proto3" json:"variant_set_id,omitempty"` + // The new variant data. Only the variant_set.metadata will be considered + // for update. + VariantSet *VariantSet `protobuf:"bytes,2,opt,name=variant_set,json=variantSet,proto3" json:"variant_set,omitempty"` + // An optional mask specifying which fields to update. Supported fields: + // + // * [metadata][google.genomics.v1.VariantSet.metadata]. + // * [name][google.genomics.v1.VariantSet.name]. + // * [description][google.genomics.v1.VariantSet.description]. + // + // Leaving `updateMask` unset is equivalent to specifying all mutable + // fields. + UpdateMask *field_mask.FieldMask `protobuf:"bytes,5,opt,name=update_mask,json=updateMask,proto3" json:"update_mask,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *UpdateVariantSetRequest) Reset() { *m = UpdateVariantSetRequest{} } +func (m *UpdateVariantSetRequest) String() string { return proto.CompactTextString(m) } +func (*UpdateVariantSetRequest) ProtoMessage() {} +func (*UpdateVariantSetRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_variants_b9a43810f0f9306f, []int{14} +} +func (m *UpdateVariantSetRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_UpdateVariantSetRequest.Unmarshal(m, b) +} +func (m *UpdateVariantSetRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_UpdateVariantSetRequest.Marshal(b, m, deterministic) +} +func (dst *UpdateVariantSetRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_UpdateVariantSetRequest.Merge(dst, src) +} +func (m *UpdateVariantSetRequest) XXX_Size() int { + return xxx_messageInfo_UpdateVariantSetRequest.Size(m) +} +func (m *UpdateVariantSetRequest) XXX_DiscardUnknown() { + xxx_messageInfo_UpdateVariantSetRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_UpdateVariantSetRequest proto.InternalMessageInfo + +func (m *UpdateVariantSetRequest) GetVariantSetId() string { + if m != nil { + return m.VariantSetId + } + return "" +} + +func (m *UpdateVariantSetRequest) GetVariantSet() *VariantSet { + if m != nil { + return m.VariantSet + } + return nil +} + +func (m *UpdateVariantSetRequest) GetUpdateMask() *field_mask.FieldMask { + if m != nil { + return m.UpdateMask + } + return nil +} + +// The variant search request. +type SearchVariantsRequest struct { + // At most one variant set ID must be provided. Only variants from this + // variant set will be returned. If omitted, a call set id must be included in + // the request. + VariantSetIds []string `protobuf:"bytes,1,rep,name=variant_set_ids,json=variantSetIds,proto3" json:"variant_set_ids,omitempty"` + // Only return variants which have exactly this name. + VariantName string `protobuf:"bytes,2,opt,name=variant_name,json=variantName,proto3" json:"variant_name,omitempty"` + // Only return variant calls which belong to call sets with these ids. + // Leaving this blank returns all variant calls. If a variant has no + // calls belonging to any of these call sets, it won't be returned at all. + CallSetIds []string `protobuf:"bytes,3,rep,name=call_set_ids,json=callSetIds,proto3" json:"call_set_ids,omitempty"` + // Required. Only return variants in this reference sequence. + ReferenceName string `protobuf:"bytes,4,opt,name=reference_name,json=referenceName,proto3" json:"reference_name,omitempty"` + // The beginning of the window (0-based, inclusive) for which + // overlapping variants should be returned. If unspecified, defaults to 0. + Start int64 `protobuf:"varint,5,opt,name=start,proto3" json:"start,omitempty"` + // The end of the window, 0-based exclusive. If unspecified or 0, defaults to + // the length of the reference. + End int64 `protobuf:"varint,6,opt,name=end,proto3" json:"end,omitempty"` + // The continuation token, which is used to page through large result sets. + // To get the next page of results, set this parameter to the value of + // `nextPageToken` from the previous response. + PageToken string `protobuf:"bytes,7,opt,name=page_token,json=pageToken,proto3" json:"page_token,omitempty"` + // The maximum number of variants to return in a single page. If unspecified, + // defaults to 5000. The maximum value is 10000. + PageSize int32 `protobuf:"varint,8,opt,name=page_size,json=pageSize,proto3" json:"page_size,omitempty"` + // The maximum number of calls to return in a single page. Note that this + // limit may be exceeded in the event that a matching variant contains more + // calls than the requested maximum. If unspecified, defaults to 5000. The + // maximum value is 10000. + MaxCalls int32 `protobuf:"varint,9,opt,name=max_calls,json=maxCalls,proto3" json:"max_calls,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *SearchVariantsRequest) Reset() { *m = SearchVariantsRequest{} } +func (m *SearchVariantsRequest) String() string { return proto.CompactTextString(m) } +func (*SearchVariantsRequest) ProtoMessage() {} +func (*SearchVariantsRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_variants_b9a43810f0f9306f, []int{15} +} +func (m *SearchVariantsRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_SearchVariantsRequest.Unmarshal(m, b) +} +func (m *SearchVariantsRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_SearchVariantsRequest.Marshal(b, m, deterministic) +} +func (dst *SearchVariantsRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_SearchVariantsRequest.Merge(dst, src) +} +func (m *SearchVariantsRequest) XXX_Size() int { + return xxx_messageInfo_SearchVariantsRequest.Size(m) +} +func (m *SearchVariantsRequest) XXX_DiscardUnknown() { + xxx_messageInfo_SearchVariantsRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_SearchVariantsRequest proto.InternalMessageInfo + +func (m *SearchVariantsRequest) GetVariantSetIds() []string { + if m != nil { + return m.VariantSetIds + } + return nil +} + +func (m *SearchVariantsRequest) GetVariantName() string { + if m != nil { + return m.VariantName + } + return "" +} + +func (m *SearchVariantsRequest) GetCallSetIds() []string { + if m != nil { + return m.CallSetIds + } + return nil +} + +func (m *SearchVariantsRequest) GetReferenceName() string { + if m != nil { + return m.ReferenceName + } + return "" +} + +func (m *SearchVariantsRequest) GetStart() int64 { + if m != nil { + return m.Start + } + return 0 +} + +func (m *SearchVariantsRequest) GetEnd() int64 { + if m != nil { + return m.End + } + return 0 +} + +func (m *SearchVariantsRequest) GetPageToken() string { + if m != nil { + return m.PageToken + } + return "" +} + +func (m *SearchVariantsRequest) GetPageSize() int32 { + if m != nil { + return m.PageSize + } + return 0 +} + +func (m *SearchVariantsRequest) GetMaxCalls() int32 { + if m != nil { + return m.MaxCalls + } + return 0 +} + +// The variant search response. +type SearchVariantsResponse struct { + // The list of matching Variants. + Variants []*Variant `protobuf:"bytes,1,rep,name=variants,proto3" json:"variants,omitempty"` + // The continuation token, which is used to page through large result sets. + // Provide this value in a subsequent request to return the next page of + // results. This field will be empty if there aren't any additional results. + NextPageToken string `protobuf:"bytes,2,opt,name=next_page_token,json=nextPageToken,proto3" json:"next_page_token,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *SearchVariantsResponse) Reset() { *m = SearchVariantsResponse{} } +func (m *SearchVariantsResponse) String() string { return proto.CompactTextString(m) } +func (*SearchVariantsResponse) ProtoMessage() {} +func (*SearchVariantsResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_variants_b9a43810f0f9306f, []int{16} +} +func (m *SearchVariantsResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_SearchVariantsResponse.Unmarshal(m, b) +} +func (m *SearchVariantsResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_SearchVariantsResponse.Marshal(b, m, deterministic) +} +func (dst *SearchVariantsResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_SearchVariantsResponse.Merge(dst, src) +} +func (m *SearchVariantsResponse) XXX_Size() int { + return xxx_messageInfo_SearchVariantsResponse.Size(m) +} +func (m *SearchVariantsResponse) XXX_DiscardUnknown() { + xxx_messageInfo_SearchVariantsResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_SearchVariantsResponse proto.InternalMessageInfo + +func (m *SearchVariantsResponse) GetVariants() []*Variant { + if m != nil { + return m.Variants + } + return nil +} + +func (m *SearchVariantsResponse) GetNextPageToken() string { + if m != nil { + return m.NextPageToken + } + return "" +} + +type CreateVariantRequest struct { + // The variant to be created. + Variant *Variant `protobuf:"bytes,1,opt,name=variant,proto3" json:"variant,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *CreateVariantRequest) Reset() { *m = CreateVariantRequest{} } +func (m *CreateVariantRequest) String() string { return proto.CompactTextString(m) } +func (*CreateVariantRequest) ProtoMessage() {} +func (*CreateVariantRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_variants_b9a43810f0f9306f, []int{17} +} +func (m *CreateVariantRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_CreateVariantRequest.Unmarshal(m, b) +} +func (m *CreateVariantRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_CreateVariantRequest.Marshal(b, m, deterministic) +} +func (dst *CreateVariantRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_CreateVariantRequest.Merge(dst, src) +} +func (m *CreateVariantRequest) XXX_Size() int { + return xxx_messageInfo_CreateVariantRequest.Size(m) +} +func (m *CreateVariantRequest) XXX_DiscardUnknown() { + xxx_messageInfo_CreateVariantRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_CreateVariantRequest proto.InternalMessageInfo + +func (m *CreateVariantRequest) GetVariant() *Variant { + if m != nil { + return m.Variant + } + return nil +} + +type UpdateVariantRequest struct { + // The ID of the variant to be updated. + VariantId string `protobuf:"bytes,1,opt,name=variant_id,json=variantId,proto3" json:"variant_id,omitempty"` + // The new variant data. + Variant *Variant `protobuf:"bytes,2,opt,name=variant,proto3" json:"variant,omitempty"` + // An optional mask specifying which fields to update. At this time, mutable + // fields are [names][google.genomics.v1.Variant.names] and + // [info][google.genomics.v1.Variant.info]. Acceptable values are "names" and + // "info". If unspecified, all mutable fields will be updated. + UpdateMask *field_mask.FieldMask `protobuf:"bytes,3,opt,name=update_mask,json=updateMask,proto3" json:"update_mask,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *UpdateVariantRequest) Reset() { *m = UpdateVariantRequest{} } +func (m *UpdateVariantRequest) String() string { return proto.CompactTextString(m) } +func (*UpdateVariantRequest) ProtoMessage() {} +func (*UpdateVariantRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_variants_b9a43810f0f9306f, []int{18} +} +func (m *UpdateVariantRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_UpdateVariantRequest.Unmarshal(m, b) +} +func (m *UpdateVariantRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_UpdateVariantRequest.Marshal(b, m, deterministic) +} +func (dst *UpdateVariantRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_UpdateVariantRequest.Merge(dst, src) +} +func (m *UpdateVariantRequest) XXX_Size() int { + return xxx_messageInfo_UpdateVariantRequest.Size(m) +} +func (m *UpdateVariantRequest) XXX_DiscardUnknown() { + xxx_messageInfo_UpdateVariantRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_UpdateVariantRequest proto.InternalMessageInfo + +func (m *UpdateVariantRequest) GetVariantId() string { + if m != nil { + return m.VariantId + } + return "" +} + +func (m *UpdateVariantRequest) GetVariant() *Variant { + if m != nil { + return m.Variant + } + return nil +} + +func (m *UpdateVariantRequest) GetUpdateMask() *field_mask.FieldMask { + if m != nil { + return m.UpdateMask + } + return nil +} + +type DeleteVariantRequest struct { + // The ID of the variant to be deleted. + VariantId string `protobuf:"bytes,1,opt,name=variant_id,json=variantId,proto3" json:"variant_id,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *DeleteVariantRequest) Reset() { *m = DeleteVariantRequest{} } +func (m *DeleteVariantRequest) String() string { return proto.CompactTextString(m) } +func (*DeleteVariantRequest) ProtoMessage() {} +func (*DeleteVariantRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_variants_b9a43810f0f9306f, []int{19} +} +func (m *DeleteVariantRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_DeleteVariantRequest.Unmarshal(m, b) +} +func (m *DeleteVariantRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_DeleteVariantRequest.Marshal(b, m, deterministic) +} +func (dst *DeleteVariantRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_DeleteVariantRequest.Merge(dst, src) +} +func (m *DeleteVariantRequest) XXX_Size() int { + return xxx_messageInfo_DeleteVariantRequest.Size(m) +} +func (m *DeleteVariantRequest) XXX_DiscardUnknown() { + xxx_messageInfo_DeleteVariantRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_DeleteVariantRequest proto.InternalMessageInfo + +func (m *DeleteVariantRequest) GetVariantId() string { + if m != nil { + return m.VariantId + } + return "" +} + +type GetVariantRequest struct { + // The ID of the variant. + VariantId string `protobuf:"bytes,1,opt,name=variant_id,json=variantId,proto3" json:"variant_id,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GetVariantRequest) Reset() { *m = GetVariantRequest{} } +func (m *GetVariantRequest) String() string { return proto.CompactTextString(m) } +func (*GetVariantRequest) ProtoMessage() {} +func (*GetVariantRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_variants_b9a43810f0f9306f, []int{20} +} +func (m *GetVariantRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GetVariantRequest.Unmarshal(m, b) +} +func (m *GetVariantRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GetVariantRequest.Marshal(b, m, deterministic) +} +func (dst *GetVariantRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_GetVariantRequest.Merge(dst, src) +} +func (m *GetVariantRequest) XXX_Size() int { + return xxx_messageInfo_GetVariantRequest.Size(m) +} +func (m *GetVariantRequest) XXX_DiscardUnknown() { + xxx_messageInfo_GetVariantRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_GetVariantRequest proto.InternalMessageInfo + +func (m *GetVariantRequest) GetVariantId() string { + if m != nil { + return m.VariantId + } + return "" +} + +type MergeVariantsRequest struct { + // The destination variant set. + VariantSetId string `protobuf:"bytes,1,opt,name=variant_set_id,json=variantSetId,proto3" json:"variant_set_id,omitempty"` + // The variants to be merged with existing variants. + Variants []*Variant `protobuf:"bytes,2,rep,name=variants,proto3" json:"variants,omitempty"` + // A mapping between info field keys and the InfoMergeOperations to + // be performed on them. + InfoMergeConfig map[string]InfoMergeOperation `protobuf:"bytes,3,rep,name=info_merge_config,json=infoMergeConfig,proto3" json:"info_merge_config,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"varint,2,opt,name=value,proto3,enum=google.genomics.v1.InfoMergeOperation"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *MergeVariantsRequest) Reset() { *m = MergeVariantsRequest{} } +func (m *MergeVariantsRequest) String() string { return proto.CompactTextString(m) } +func (*MergeVariantsRequest) ProtoMessage() {} +func (*MergeVariantsRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_variants_b9a43810f0f9306f, []int{21} +} +func (m *MergeVariantsRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_MergeVariantsRequest.Unmarshal(m, b) +} +func (m *MergeVariantsRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_MergeVariantsRequest.Marshal(b, m, deterministic) +} +func (dst *MergeVariantsRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_MergeVariantsRequest.Merge(dst, src) +} +func (m *MergeVariantsRequest) XXX_Size() int { + return xxx_messageInfo_MergeVariantsRequest.Size(m) +} +func (m *MergeVariantsRequest) XXX_DiscardUnknown() { + xxx_messageInfo_MergeVariantsRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_MergeVariantsRequest proto.InternalMessageInfo + +func (m *MergeVariantsRequest) GetVariantSetId() string { + if m != nil { + return m.VariantSetId + } + return "" +} + +func (m *MergeVariantsRequest) GetVariants() []*Variant { + if m != nil { + return m.Variants + } + return nil +} + +func (m *MergeVariantsRequest) GetInfoMergeConfig() map[string]InfoMergeOperation { + if m != nil { + return m.InfoMergeConfig + } + return nil +} + +// The call set search request. +type SearchCallSetsRequest struct { + // Restrict the query to call sets within the given variant sets. At least one + // ID must be provided. + VariantSetIds []string `protobuf:"bytes,1,rep,name=variant_set_ids,json=variantSetIds,proto3" json:"variant_set_ids,omitempty"` + // Only return call sets for which a substring of the name matches this + // string. + Name string `protobuf:"bytes,2,opt,name=name,proto3" json:"name,omitempty"` + // The continuation token, which is used to page through large result sets. + // To get the next page of results, set this parameter to the value of + // `nextPageToken` from the previous response. + PageToken string `protobuf:"bytes,3,opt,name=page_token,json=pageToken,proto3" json:"page_token,omitempty"` + // The maximum number of results to return in a single page. If unspecified, + // defaults to 1024. + PageSize int32 `protobuf:"varint,4,opt,name=page_size,json=pageSize,proto3" json:"page_size,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *SearchCallSetsRequest) Reset() { *m = SearchCallSetsRequest{} } +func (m *SearchCallSetsRequest) String() string { return proto.CompactTextString(m) } +func (*SearchCallSetsRequest) ProtoMessage() {} +func (*SearchCallSetsRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_variants_b9a43810f0f9306f, []int{22} +} +func (m *SearchCallSetsRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_SearchCallSetsRequest.Unmarshal(m, b) +} +func (m *SearchCallSetsRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_SearchCallSetsRequest.Marshal(b, m, deterministic) +} +func (dst *SearchCallSetsRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_SearchCallSetsRequest.Merge(dst, src) +} +func (m *SearchCallSetsRequest) XXX_Size() int { + return xxx_messageInfo_SearchCallSetsRequest.Size(m) +} +func (m *SearchCallSetsRequest) XXX_DiscardUnknown() { + xxx_messageInfo_SearchCallSetsRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_SearchCallSetsRequest proto.InternalMessageInfo + +func (m *SearchCallSetsRequest) GetVariantSetIds() []string { + if m != nil { + return m.VariantSetIds + } + return nil +} + +func (m *SearchCallSetsRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *SearchCallSetsRequest) GetPageToken() string { + if m != nil { + return m.PageToken + } + return "" +} + +func (m *SearchCallSetsRequest) GetPageSize() int32 { + if m != nil { + return m.PageSize + } + return 0 +} + +// The call set search response. +type SearchCallSetsResponse struct { + // The list of matching call sets. + CallSets []*CallSet `protobuf:"bytes,1,rep,name=call_sets,json=callSets,proto3" json:"call_sets,omitempty"` + // The continuation token, which is used to page through large result sets. + // Provide this value in a subsequent request to return the next page of + // results. This field will be empty if there aren't any additional results. + NextPageToken string `protobuf:"bytes,2,opt,name=next_page_token,json=nextPageToken,proto3" json:"next_page_token,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *SearchCallSetsResponse) Reset() { *m = SearchCallSetsResponse{} } +func (m *SearchCallSetsResponse) String() string { return proto.CompactTextString(m) } +func (*SearchCallSetsResponse) ProtoMessage() {} +func (*SearchCallSetsResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_variants_b9a43810f0f9306f, []int{23} +} +func (m *SearchCallSetsResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_SearchCallSetsResponse.Unmarshal(m, b) +} +func (m *SearchCallSetsResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_SearchCallSetsResponse.Marshal(b, m, deterministic) +} +func (dst *SearchCallSetsResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_SearchCallSetsResponse.Merge(dst, src) +} +func (m *SearchCallSetsResponse) XXX_Size() int { + return xxx_messageInfo_SearchCallSetsResponse.Size(m) +} +func (m *SearchCallSetsResponse) XXX_DiscardUnknown() { + xxx_messageInfo_SearchCallSetsResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_SearchCallSetsResponse proto.InternalMessageInfo + +func (m *SearchCallSetsResponse) GetCallSets() []*CallSet { + if m != nil { + return m.CallSets + } + return nil +} + +func (m *SearchCallSetsResponse) GetNextPageToken() string { + if m != nil { + return m.NextPageToken + } + return "" +} + +type CreateCallSetRequest struct { + // The call set to be created. + CallSet *CallSet `protobuf:"bytes,1,opt,name=call_set,json=callSet,proto3" json:"call_set,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *CreateCallSetRequest) Reset() { *m = CreateCallSetRequest{} } +func (m *CreateCallSetRequest) String() string { return proto.CompactTextString(m) } +func (*CreateCallSetRequest) ProtoMessage() {} +func (*CreateCallSetRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_variants_b9a43810f0f9306f, []int{24} +} +func (m *CreateCallSetRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_CreateCallSetRequest.Unmarshal(m, b) +} +func (m *CreateCallSetRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_CreateCallSetRequest.Marshal(b, m, deterministic) +} +func (dst *CreateCallSetRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_CreateCallSetRequest.Merge(dst, src) +} +func (m *CreateCallSetRequest) XXX_Size() int { + return xxx_messageInfo_CreateCallSetRequest.Size(m) +} +func (m *CreateCallSetRequest) XXX_DiscardUnknown() { + xxx_messageInfo_CreateCallSetRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_CreateCallSetRequest proto.InternalMessageInfo + +func (m *CreateCallSetRequest) GetCallSet() *CallSet { + if m != nil { + return m.CallSet + } + return nil +} + +type UpdateCallSetRequest struct { + // The ID of the call set to be updated. + CallSetId string `protobuf:"bytes,1,opt,name=call_set_id,json=callSetId,proto3" json:"call_set_id,omitempty"` + // The new call set data. + CallSet *CallSet `protobuf:"bytes,2,opt,name=call_set,json=callSet,proto3" json:"call_set,omitempty"` + // An optional mask specifying which fields to update. At this time, the only + // mutable field is [name][google.genomics.v1.CallSet.name]. The only + // acceptable value is "name". If unspecified, all mutable fields will be + // updated. + UpdateMask *field_mask.FieldMask `protobuf:"bytes,3,opt,name=update_mask,json=updateMask,proto3" json:"update_mask,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *UpdateCallSetRequest) Reset() { *m = UpdateCallSetRequest{} } +func (m *UpdateCallSetRequest) String() string { return proto.CompactTextString(m) } +func (*UpdateCallSetRequest) ProtoMessage() {} +func (*UpdateCallSetRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_variants_b9a43810f0f9306f, []int{25} +} +func (m *UpdateCallSetRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_UpdateCallSetRequest.Unmarshal(m, b) +} +func (m *UpdateCallSetRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_UpdateCallSetRequest.Marshal(b, m, deterministic) +} +func (dst *UpdateCallSetRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_UpdateCallSetRequest.Merge(dst, src) +} +func (m *UpdateCallSetRequest) XXX_Size() int { + return xxx_messageInfo_UpdateCallSetRequest.Size(m) +} +func (m *UpdateCallSetRequest) XXX_DiscardUnknown() { + xxx_messageInfo_UpdateCallSetRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_UpdateCallSetRequest proto.InternalMessageInfo + +func (m *UpdateCallSetRequest) GetCallSetId() string { + if m != nil { + return m.CallSetId + } + return "" +} + +func (m *UpdateCallSetRequest) GetCallSet() *CallSet { + if m != nil { + return m.CallSet + } + return nil +} + +func (m *UpdateCallSetRequest) GetUpdateMask() *field_mask.FieldMask { + if m != nil { + return m.UpdateMask + } + return nil +} + +type DeleteCallSetRequest struct { + // The ID of the call set to be deleted. + CallSetId string `protobuf:"bytes,1,opt,name=call_set_id,json=callSetId,proto3" json:"call_set_id,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *DeleteCallSetRequest) Reset() { *m = DeleteCallSetRequest{} } +func (m *DeleteCallSetRequest) String() string { return proto.CompactTextString(m) } +func (*DeleteCallSetRequest) ProtoMessage() {} +func (*DeleteCallSetRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_variants_b9a43810f0f9306f, []int{26} +} +func (m *DeleteCallSetRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_DeleteCallSetRequest.Unmarshal(m, b) +} +func (m *DeleteCallSetRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_DeleteCallSetRequest.Marshal(b, m, deterministic) +} +func (dst *DeleteCallSetRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_DeleteCallSetRequest.Merge(dst, src) +} +func (m *DeleteCallSetRequest) XXX_Size() int { + return xxx_messageInfo_DeleteCallSetRequest.Size(m) +} +func (m *DeleteCallSetRequest) XXX_DiscardUnknown() { + xxx_messageInfo_DeleteCallSetRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_DeleteCallSetRequest proto.InternalMessageInfo + +func (m *DeleteCallSetRequest) GetCallSetId() string { + if m != nil { + return m.CallSetId + } + return "" +} + +type GetCallSetRequest struct { + // The ID of the call set. + CallSetId string `protobuf:"bytes,1,opt,name=call_set_id,json=callSetId,proto3" json:"call_set_id,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GetCallSetRequest) Reset() { *m = GetCallSetRequest{} } +func (m *GetCallSetRequest) String() string { return proto.CompactTextString(m) } +func (*GetCallSetRequest) ProtoMessage() {} +func (*GetCallSetRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_variants_b9a43810f0f9306f, []int{27} +} +func (m *GetCallSetRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GetCallSetRequest.Unmarshal(m, b) +} +func (m *GetCallSetRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GetCallSetRequest.Marshal(b, m, deterministic) +} +func (dst *GetCallSetRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_GetCallSetRequest.Merge(dst, src) +} +func (m *GetCallSetRequest) XXX_Size() int { + return xxx_messageInfo_GetCallSetRequest.Size(m) +} +func (m *GetCallSetRequest) XXX_DiscardUnknown() { + xxx_messageInfo_GetCallSetRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_GetCallSetRequest proto.InternalMessageInfo + +func (m *GetCallSetRequest) GetCallSetId() string { + if m != nil { + return m.CallSetId + } + return "" +} + +// The stream variants request. +type StreamVariantsRequest struct { + // The Google Cloud project ID which will be billed + // for this access. The caller must have WRITE access to this project. + // Required. + ProjectId string `protobuf:"bytes,1,opt,name=project_id,json=projectId,proto3" json:"project_id,omitempty"` + // The variant set ID from which to stream variants. + VariantSetId string `protobuf:"bytes,2,opt,name=variant_set_id,json=variantSetId,proto3" json:"variant_set_id,omitempty"` + // Only return variant calls which belong to call sets with these IDs. + // Leaving this blank returns all variant calls. + CallSetIds []string `protobuf:"bytes,3,rep,name=call_set_ids,json=callSetIds,proto3" json:"call_set_ids,omitempty"` + // Required. Only return variants in this reference sequence. + ReferenceName string `protobuf:"bytes,4,opt,name=reference_name,json=referenceName,proto3" json:"reference_name,omitempty"` + // The beginning of the window (0-based, inclusive) for which + // overlapping variants should be returned. + Start int64 `protobuf:"varint,5,opt,name=start,proto3" json:"start,omitempty"` + // The end of the window (0-based, exclusive) for which overlapping + // variants should be returned. + End int64 `protobuf:"varint,6,opt,name=end,proto3" json:"end,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *StreamVariantsRequest) Reset() { *m = StreamVariantsRequest{} } +func (m *StreamVariantsRequest) String() string { return proto.CompactTextString(m) } +func (*StreamVariantsRequest) ProtoMessage() {} +func (*StreamVariantsRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_variants_b9a43810f0f9306f, []int{28} +} +func (m *StreamVariantsRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_StreamVariantsRequest.Unmarshal(m, b) +} +func (m *StreamVariantsRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_StreamVariantsRequest.Marshal(b, m, deterministic) +} +func (dst *StreamVariantsRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_StreamVariantsRequest.Merge(dst, src) +} +func (m *StreamVariantsRequest) XXX_Size() int { + return xxx_messageInfo_StreamVariantsRequest.Size(m) +} +func (m *StreamVariantsRequest) XXX_DiscardUnknown() { + xxx_messageInfo_StreamVariantsRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_StreamVariantsRequest proto.InternalMessageInfo + +func (m *StreamVariantsRequest) GetProjectId() string { + if m != nil { + return m.ProjectId + } + return "" +} + +func (m *StreamVariantsRequest) GetVariantSetId() string { + if m != nil { + return m.VariantSetId + } + return "" +} + +func (m *StreamVariantsRequest) GetCallSetIds() []string { + if m != nil { + return m.CallSetIds + } + return nil +} + +func (m *StreamVariantsRequest) GetReferenceName() string { + if m != nil { + return m.ReferenceName + } + return "" +} + +func (m *StreamVariantsRequest) GetStart() int64 { + if m != nil { + return m.Start + } + return 0 +} + +func (m *StreamVariantsRequest) GetEnd() int64 { + if m != nil { + return m.End + } + return 0 +} + +type StreamVariantsResponse struct { + Variants []*Variant `protobuf:"bytes,1,rep,name=variants,proto3" json:"variants,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *StreamVariantsResponse) Reset() { *m = StreamVariantsResponse{} } +func (m *StreamVariantsResponse) String() string { return proto.CompactTextString(m) } +func (*StreamVariantsResponse) ProtoMessage() {} +func (*StreamVariantsResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_variants_b9a43810f0f9306f, []int{29} +} +func (m *StreamVariantsResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_StreamVariantsResponse.Unmarshal(m, b) +} +func (m *StreamVariantsResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_StreamVariantsResponse.Marshal(b, m, deterministic) +} +func (dst *StreamVariantsResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_StreamVariantsResponse.Merge(dst, src) +} +func (m *StreamVariantsResponse) XXX_Size() int { + return xxx_messageInfo_StreamVariantsResponse.Size(m) +} +func (m *StreamVariantsResponse) XXX_DiscardUnknown() { + xxx_messageInfo_StreamVariantsResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_StreamVariantsResponse proto.InternalMessageInfo + +func (m *StreamVariantsResponse) GetVariants() []*Variant { + if m != nil { + return m.Variants + } + return nil +} + +func init() { + proto.RegisterType((*VariantSetMetadata)(nil), "google.genomics.v1.VariantSetMetadata") + proto.RegisterMapType((map[string]*_struct.ListValue)(nil), "google.genomics.v1.VariantSetMetadata.InfoEntry") + proto.RegisterType((*VariantSet)(nil), "google.genomics.v1.VariantSet") + proto.RegisterType((*Variant)(nil), "google.genomics.v1.Variant") + proto.RegisterMapType((map[string]*_struct.ListValue)(nil), "google.genomics.v1.Variant.InfoEntry") + proto.RegisterType((*VariantCall)(nil), "google.genomics.v1.VariantCall") + proto.RegisterMapType((map[string]*_struct.ListValue)(nil), "google.genomics.v1.VariantCall.InfoEntry") + proto.RegisterType((*CallSet)(nil), "google.genomics.v1.CallSet") + proto.RegisterMapType((map[string]*_struct.ListValue)(nil), "google.genomics.v1.CallSet.InfoEntry") + proto.RegisterType((*ReferenceBound)(nil), "google.genomics.v1.ReferenceBound") + proto.RegisterType((*ImportVariantsRequest)(nil), "google.genomics.v1.ImportVariantsRequest") + proto.RegisterMapType((map[string]InfoMergeOperation)(nil), "google.genomics.v1.ImportVariantsRequest.InfoMergeConfigEntry") + proto.RegisterType((*ImportVariantsResponse)(nil), "google.genomics.v1.ImportVariantsResponse") + proto.RegisterType((*CreateVariantSetRequest)(nil), "google.genomics.v1.CreateVariantSetRequest") + proto.RegisterType((*ExportVariantSetRequest)(nil), "google.genomics.v1.ExportVariantSetRequest") + proto.RegisterType((*GetVariantSetRequest)(nil), "google.genomics.v1.GetVariantSetRequest") + proto.RegisterType((*SearchVariantSetsRequest)(nil), "google.genomics.v1.SearchVariantSetsRequest") + proto.RegisterType((*SearchVariantSetsResponse)(nil), "google.genomics.v1.SearchVariantSetsResponse") + proto.RegisterType((*DeleteVariantSetRequest)(nil), "google.genomics.v1.DeleteVariantSetRequest") + proto.RegisterType((*UpdateVariantSetRequest)(nil), "google.genomics.v1.UpdateVariantSetRequest") + proto.RegisterType((*SearchVariantsRequest)(nil), "google.genomics.v1.SearchVariantsRequest") + proto.RegisterType((*SearchVariantsResponse)(nil), "google.genomics.v1.SearchVariantsResponse") + proto.RegisterType((*CreateVariantRequest)(nil), "google.genomics.v1.CreateVariantRequest") + proto.RegisterType((*UpdateVariantRequest)(nil), "google.genomics.v1.UpdateVariantRequest") + proto.RegisterType((*DeleteVariantRequest)(nil), "google.genomics.v1.DeleteVariantRequest") + proto.RegisterType((*GetVariantRequest)(nil), "google.genomics.v1.GetVariantRequest") + proto.RegisterType((*MergeVariantsRequest)(nil), "google.genomics.v1.MergeVariantsRequest") + proto.RegisterMapType((map[string]InfoMergeOperation)(nil), "google.genomics.v1.MergeVariantsRequest.InfoMergeConfigEntry") + proto.RegisterType((*SearchCallSetsRequest)(nil), "google.genomics.v1.SearchCallSetsRequest") + proto.RegisterType((*SearchCallSetsResponse)(nil), "google.genomics.v1.SearchCallSetsResponse") + proto.RegisterType((*CreateCallSetRequest)(nil), "google.genomics.v1.CreateCallSetRequest") + proto.RegisterType((*UpdateCallSetRequest)(nil), "google.genomics.v1.UpdateCallSetRequest") + proto.RegisterType((*DeleteCallSetRequest)(nil), "google.genomics.v1.DeleteCallSetRequest") + proto.RegisterType((*GetCallSetRequest)(nil), "google.genomics.v1.GetCallSetRequest") + proto.RegisterType((*StreamVariantsRequest)(nil), "google.genomics.v1.StreamVariantsRequest") + proto.RegisterType((*StreamVariantsResponse)(nil), "google.genomics.v1.StreamVariantsResponse") + proto.RegisterEnum("google.genomics.v1.InfoMergeOperation", InfoMergeOperation_name, InfoMergeOperation_value) + proto.RegisterEnum("google.genomics.v1.VariantSetMetadata_Type", VariantSetMetadata_Type_name, VariantSetMetadata_Type_value) + proto.RegisterEnum("google.genomics.v1.ImportVariantsRequest_Format", ImportVariantsRequest_Format_name, ImportVariantsRequest_Format_value) + proto.RegisterEnum("google.genomics.v1.ExportVariantSetRequest_Format", ExportVariantSetRequest_Format_name, ExportVariantSetRequest_Format_value) +} + +// Reference imports to suppress errors if they are not otherwise used. +var _ context.Context +var _ grpc.ClientConn + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the grpc package it is being compiled against. +const _ = grpc.SupportPackageIsVersion4 + +// StreamingVariantServiceClient is the client API for StreamingVariantService service. +// +// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://godoc.org/google.golang.org/grpc#ClientConn.NewStream. +type StreamingVariantServiceClient interface { + // Returns a stream of all the variants matching the search request, ordered + // by reference name, position, and ID. + StreamVariants(ctx context.Context, in *StreamVariantsRequest, opts ...grpc.CallOption) (StreamingVariantService_StreamVariantsClient, error) +} + +type streamingVariantServiceClient struct { + cc *grpc.ClientConn +} + +func NewStreamingVariantServiceClient(cc *grpc.ClientConn) StreamingVariantServiceClient { + return &streamingVariantServiceClient{cc} +} + +func (c *streamingVariantServiceClient) StreamVariants(ctx context.Context, in *StreamVariantsRequest, opts ...grpc.CallOption) (StreamingVariantService_StreamVariantsClient, error) { + stream, err := c.cc.NewStream(ctx, &_StreamingVariantService_serviceDesc.Streams[0], "/google.genomics.v1.StreamingVariantService/StreamVariants", opts...) + if err != nil { + return nil, err + } + x := &streamingVariantServiceStreamVariantsClient{stream} + if err := x.ClientStream.SendMsg(in); err != nil { + return nil, err + } + if err := x.ClientStream.CloseSend(); err != nil { + return nil, err + } + return x, nil +} + +type StreamingVariantService_StreamVariantsClient interface { + Recv() (*StreamVariantsResponse, error) + grpc.ClientStream +} + +type streamingVariantServiceStreamVariantsClient struct { + grpc.ClientStream +} + +func (x *streamingVariantServiceStreamVariantsClient) Recv() (*StreamVariantsResponse, error) { + m := new(StreamVariantsResponse) + if err := x.ClientStream.RecvMsg(m); err != nil { + return nil, err + } + return m, nil +} + +// StreamingVariantServiceServer is the server API for StreamingVariantService service. +type StreamingVariantServiceServer interface { + // Returns a stream of all the variants matching the search request, ordered + // by reference name, position, and ID. + StreamVariants(*StreamVariantsRequest, StreamingVariantService_StreamVariantsServer) error +} + +func RegisterStreamingVariantServiceServer(s *grpc.Server, srv StreamingVariantServiceServer) { + s.RegisterService(&_StreamingVariantService_serviceDesc, srv) +} + +func _StreamingVariantService_StreamVariants_Handler(srv interface{}, stream grpc.ServerStream) error { + m := new(StreamVariantsRequest) + if err := stream.RecvMsg(m); err != nil { + return err + } + return srv.(StreamingVariantServiceServer).StreamVariants(m, &streamingVariantServiceStreamVariantsServer{stream}) +} + +type StreamingVariantService_StreamVariantsServer interface { + Send(*StreamVariantsResponse) error + grpc.ServerStream +} + +type streamingVariantServiceStreamVariantsServer struct { + grpc.ServerStream +} + +func (x *streamingVariantServiceStreamVariantsServer) Send(m *StreamVariantsResponse) error { + return x.ServerStream.SendMsg(m) +} + +var _StreamingVariantService_serviceDesc = grpc.ServiceDesc{ + ServiceName: "google.genomics.v1.StreamingVariantService", + HandlerType: (*StreamingVariantServiceServer)(nil), + Methods: []grpc.MethodDesc{}, + Streams: []grpc.StreamDesc{ + { + StreamName: "StreamVariants", + Handler: _StreamingVariantService_StreamVariants_Handler, + ServerStreams: true, + }, + }, + Metadata: "google/genomics/v1/variants.proto", +} + +// VariantServiceV1Client is the client API for VariantServiceV1 service. +// +// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://godoc.org/google.golang.org/grpc#ClientConn.NewStream. +type VariantServiceV1Client interface { + // Creates variant data by asynchronously importing the provided information. + // + // For the definitions of variant sets and other genomics resources, see + // [Fundamentals of Google + // Genomics](https://cloud.google.com/genomics/fundamentals-of-google-genomics) + // + // The variants for import will be merged with any existing variant that + // matches its reference sequence, start, end, reference bases, and + // alternative bases. If no such variant exists, a new one will be created. + // + // When variants are merged, the call information from the new variant + // is added to the existing variant, and Variant info fields are merged + // as specified in + // [infoMergeConfig][google.genomics.v1.ImportVariantsRequest.info_merge_config]. + // As a special case, for single-sample VCF files, QUAL and FILTER fields will + // be moved to the call level; these are sometimes interpreted in a + // call-specific context. + // Imported VCF headers are appended to the metadata already in a variant set. + ImportVariants(ctx context.Context, in *ImportVariantsRequest, opts ...grpc.CallOption) (*longrunning.Operation, error) + // Creates a new variant set. + // + // For the definitions of variant sets and other genomics resources, see + // [Fundamentals of Google + // Genomics](https://cloud.google.com/genomics/fundamentals-of-google-genomics) + // + // The provided variant set must have a valid `datasetId` set - all other + // fields are optional. Note that the `id` field will be ignored, as this is + // assigned by the server. + CreateVariantSet(ctx context.Context, in *CreateVariantSetRequest, opts ...grpc.CallOption) (*VariantSet, error) + // Exports variant set data to an external destination. + // + // For the definitions of variant sets and other genomics resources, see + // [Fundamentals of Google + // Genomics](https://cloud.google.com/genomics/fundamentals-of-google-genomics) + ExportVariantSet(ctx context.Context, in *ExportVariantSetRequest, opts ...grpc.CallOption) (*longrunning.Operation, error) + // Gets a variant set by ID. + // + // For the definitions of variant sets and other genomics resources, see + // [Fundamentals of Google + // Genomics](https://cloud.google.com/genomics/fundamentals-of-google-genomics) + GetVariantSet(ctx context.Context, in *GetVariantSetRequest, opts ...grpc.CallOption) (*VariantSet, error) + // Returns a list of all variant sets matching search criteria. + // + // For the definitions of variant sets and other genomics resources, see + // [Fundamentals of Google + // Genomics](https://cloud.google.com/genomics/fundamentals-of-google-genomics) + // + // Implements + // [GlobalAllianceApi.searchVariantSets](https://github.com/ga4gh/schemas/blob/v0.5.1/src/main/resources/avro/variantmethods.avdl#L49). + SearchVariantSets(ctx context.Context, in *SearchVariantSetsRequest, opts ...grpc.CallOption) (*SearchVariantSetsResponse, error) + // Deletes a variant set including all variants, call sets, and calls within. + // This is not reversible. + // + // For the definitions of variant sets and other genomics resources, see + // [Fundamentals of Google + // Genomics](https://cloud.google.com/genomics/fundamentals-of-google-genomics) + DeleteVariantSet(ctx context.Context, in *DeleteVariantSetRequest, opts ...grpc.CallOption) (*empty.Empty, error) + // Updates a variant set using patch semantics. + // + // For the definitions of variant sets and other genomics resources, see + // [Fundamentals of Google + // Genomics](https://cloud.google.com/genomics/fundamentals-of-google-genomics) + UpdateVariantSet(ctx context.Context, in *UpdateVariantSetRequest, opts ...grpc.CallOption) (*VariantSet, error) + // Gets a list of variants matching the criteria. + // + // For the definitions of variants and other genomics resources, see + // [Fundamentals of Google + // Genomics](https://cloud.google.com/genomics/fundamentals-of-google-genomics) + // + // Implements + // [GlobalAllianceApi.searchVariants](https://github.com/ga4gh/schemas/blob/v0.5.1/src/main/resources/avro/variantmethods.avdl#L126). + SearchVariants(ctx context.Context, in *SearchVariantsRequest, opts ...grpc.CallOption) (*SearchVariantsResponse, error) + // Creates a new variant. + // + // For the definitions of variants and other genomics resources, see + // [Fundamentals of Google + // Genomics](https://cloud.google.com/genomics/fundamentals-of-google-genomics) + CreateVariant(ctx context.Context, in *CreateVariantRequest, opts ...grpc.CallOption) (*Variant, error) + // Updates a variant. + // + // For the definitions of variants and other genomics resources, see + // [Fundamentals of Google + // Genomics](https://cloud.google.com/genomics/fundamentals-of-google-genomics) + // + // This method supports patch semantics. Returns the modified variant without + // its calls. + UpdateVariant(ctx context.Context, in *UpdateVariantRequest, opts ...grpc.CallOption) (*Variant, error) + // Deletes a variant. + // + // For the definitions of variants and other genomics resources, see + // [Fundamentals of Google + // Genomics](https://cloud.google.com/genomics/fundamentals-of-google-genomics) + DeleteVariant(ctx context.Context, in *DeleteVariantRequest, opts ...grpc.CallOption) (*empty.Empty, error) + // Gets a variant by ID. + // + // For the definitions of variants and other genomics resources, see + // [Fundamentals of Google + // Genomics](https://cloud.google.com/genomics/fundamentals-of-google-genomics) + GetVariant(ctx context.Context, in *GetVariantRequest, opts ...grpc.CallOption) (*Variant, error) + // Merges the given variants with existing variants. + // + // For the definitions of variants and other genomics resources, see + // [Fundamentals of Google + // Genomics](https://cloud.google.com/genomics/fundamentals-of-google-genomics) + // + // Each variant will be + // merged with an existing variant that matches its reference sequence, + // start, end, reference bases, and alternative bases. If no such variant + // exists, a new one will be created. + // + // When variants are merged, the call information from the new variant + // is added to the existing variant. Variant info fields are merged as + // specified in the + // [infoMergeConfig][google.genomics.v1.MergeVariantsRequest.info_merge_config] + // field of the MergeVariantsRequest. + // + // Please exercise caution when using this method! It is easy to introduce + // mistakes in existing variants and difficult to back out of them. For + // example, + // suppose you were trying to merge a new variant with an existing one and + // both + // variants contain calls that belong to callsets with the same callset ID. + // + // // Existing variant - irrelevant fields trimmed for clarity + // { + // "variantSetId": "10473108253681171589", + // "referenceName": "1", + // "start": "10582", + // "referenceBases": "G", + // "alternateBases": [ + // "A" + // ], + // "calls": [ + // { + // "callSetId": "10473108253681171589-0", + // "callSetName": "CALLSET0", + // "genotype": [ + // 0, + // 1 + // ], + // } + // ] + // } + // + // // New variant with conflicting call information + // { + // "variantSetId": "10473108253681171589", + // "referenceName": "1", + // "start": "10582", + // "referenceBases": "G", + // "alternateBases": [ + // "A" + // ], + // "calls": [ + // { + // "callSetId": "10473108253681171589-0", + // "callSetName": "CALLSET0", + // "genotype": [ + // 1, + // 1 + // ], + // } + // ] + // } + // + // The resulting merged variant would overwrite the existing calls with those + // from the new variant: + // + // { + // "variantSetId": "10473108253681171589", + // "referenceName": "1", + // "start": "10582", + // "referenceBases": "G", + // "alternateBases": [ + // "A" + // ], + // "calls": [ + // { + // "callSetId": "10473108253681171589-0", + // "callSetName": "CALLSET0", + // "genotype": [ + // 1, + // 1 + // ], + // } + // ] + // } + // + // This may be the desired outcome, but it is up to the user to determine if + // if that is indeed the case. + MergeVariants(ctx context.Context, in *MergeVariantsRequest, opts ...grpc.CallOption) (*empty.Empty, error) + // Gets a list of call sets matching the criteria. + // + // For the definitions of call sets and other genomics resources, see + // [Fundamentals of Google + // Genomics](https://cloud.google.com/genomics/fundamentals-of-google-genomics) + // + // Implements + // [GlobalAllianceApi.searchCallSets](https://github.com/ga4gh/schemas/blob/v0.5.1/src/main/resources/avro/variantmethods.avdl#L178). + SearchCallSets(ctx context.Context, in *SearchCallSetsRequest, opts ...grpc.CallOption) (*SearchCallSetsResponse, error) + // Creates a new call set. + // + // For the definitions of call sets and other genomics resources, see + // [Fundamentals of Google + // Genomics](https://cloud.google.com/genomics/fundamentals-of-google-genomics) + CreateCallSet(ctx context.Context, in *CreateCallSetRequest, opts ...grpc.CallOption) (*CallSet, error) + // Updates a call set. + // + // For the definitions of call sets and other genomics resources, see + // [Fundamentals of Google + // Genomics](https://cloud.google.com/genomics/fundamentals-of-google-genomics) + // + // This method supports patch semantics. + UpdateCallSet(ctx context.Context, in *UpdateCallSetRequest, opts ...grpc.CallOption) (*CallSet, error) + // Deletes a call set. + // + // For the definitions of call sets and other genomics resources, see + // [Fundamentals of Google + // Genomics](https://cloud.google.com/genomics/fundamentals-of-google-genomics) + DeleteCallSet(ctx context.Context, in *DeleteCallSetRequest, opts ...grpc.CallOption) (*empty.Empty, error) + // Gets a call set by ID. + // + // For the definitions of call sets and other genomics resources, see + // [Fundamentals of Google + // Genomics](https://cloud.google.com/genomics/fundamentals-of-google-genomics) + GetCallSet(ctx context.Context, in *GetCallSetRequest, opts ...grpc.CallOption) (*CallSet, error) +} + +type variantServiceV1Client struct { + cc *grpc.ClientConn +} + +func NewVariantServiceV1Client(cc *grpc.ClientConn) VariantServiceV1Client { + return &variantServiceV1Client{cc} +} + +func (c *variantServiceV1Client) ImportVariants(ctx context.Context, in *ImportVariantsRequest, opts ...grpc.CallOption) (*longrunning.Operation, error) { + out := new(longrunning.Operation) + err := c.cc.Invoke(ctx, "/google.genomics.v1.VariantServiceV1/ImportVariants", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *variantServiceV1Client) CreateVariantSet(ctx context.Context, in *CreateVariantSetRequest, opts ...grpc.CallOption) (*VariantSet, error) { + out := new(VariantSet) + err := c.cc.Invoke(ctx, "/google.genomics.v1.VariantServiceV1/CreateVariantSet", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *variantServiceV1Client) ExportVariantSet(ctx context.Context, in *ExportVariantSetRequest, opts ...grpc.CallOption) (*longrunning.Operation, error) { + out := new(longrunning.Operation) + err := c.cc.Invoke(ctx, "/google.genomics.v1.VariantServiceV1/ExportVariantSet", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *variantServiceV1Client) GetVariantSet(ctx context.Context, in *GetVariantSetRequest, opts ...grpc.CallOption) (*VariantSet, error) { + out := new(VariantSet) + err := c.cc.Invoke(ctx, "/google.genomics.v1.VariantServiceV1/GetVariantSet", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *variantServiceV1Client) SearchVariantSets(ctx context.Context, in *SearchVariantSetsRequest, opts ...grpc.CallOption) (*SearchVariantSetsResponse, error) { + out := new(SearchVariantSetsResponse) + err := c.cc.Invoke(ctx, "/google.genomics.v1.VariantServiceV1/SearchVariantSets", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *variantServiceV1Client) DeleteVariantSet(ctx context.Context, in *DeleteVariantSetRequest, opts ...grpc.CallOption) (*empty.Empty, error) { + out := new(empty.Empty) + err := c.cc.Invoke(ctx, "/google.genomics.v1.VariantServiceV1/DeleteVariantSet", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *variantServiceV1Client) UpdateVariantSet(ctx context.Context, in *UpdateVariantSetRequest, opts ...grpc.CallOption) (*VariantSet, error) { + out := new(VariantSet) + err := c.cc.Invoke(ctx, "/google.genomics.v1.VariantServiceV1/UpdateVariantSet", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *variantServiceV1Client) SearchVariants(ctx context.Context, in *SearchVariantsRequest, opts ...grpc.CallOption) (*SearchVariantsResponse, error) { + out := new(SearchVariantsResponse) + err := c.cc.Invoke(ctx, "/google.genomics.v1.VariantServiceV1/SearchVariants", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *variantServiceV1Client) CreateVariant(ctx context.Context, in *CreateVariantRequest, opts ...grpc.CallOption) (*Variant, error) { + out := new(Variant) + err := c.cc.Invoke(ctx, "/google.genomics.v1.VariantServiceV1/CreateVariant", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *variantServiceV1Client) UpdateVariant(ctx context.Context, in *UpdateVariantRequest, opts ...grpc.CallOption) (*Variant, error) { + out := new(Variant) + err := c.cc.Invoke(ctx, "/google.genomics.v1.VariantServiceV1/UpdateVariant", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *variantServiceV1Client) DeleteVariant(ctx context.Context, in *DeleteVariantRequest, opts ...grpc.CallOption) (*empty.Empty, error) { + out := new(empty.Empty) + err := c.cc.Invoke(ctx, "/google.genomics.v1.VariantServiceV1/DeleteVariant", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *variantServiceV1Client) GetVariant(ctx context.Context, in *GetVariantRequest, opts ...grpc.CallOption) (*Variant, error) { + out := new(Variant) + err := c.cc.Invoke(ctx, "/google.genomics.v1.VariantServiceV1/GetVariant", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *variantServiceV1Client) MergeVariants(ctx context.Context, in *MergeVariantsRequest, opts ...grpc.CallOption) (*empty.Empty, error) { + out := new(empty.Empty) + err := c.cc.Invoke(ctx, "/google.genomics.v1.VariantServiceV1/MergeVariants", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *variantServiceV1Client) SearchCallSets(ctx context.Context, in *SearchCallSetsRequest, opts ...grpc.CallOption) (*SearchCallSetsResponse, error) { + out := new(SearchCallSetsResponse) + err := c.cc.Invoke(ctx, "/google.genomics.v1.VariantServiceV1/SearchCallSets", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *variantServiceV1Client) CreateCallSet(ctx context.Context, in *CreateCallSetRequest, opts ...grpc.CallOption) (*CallSet, error) { + out := new(CallSet) + err := c.cc.Invoke(ctx, "/google.genomics.v1.VariantServiceV1/CreateCallSet", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *variantServiceV1Client) UpdateCallSet(ctx context.Context, in *UpdateCallSetRequest, opts ...grpc.CallOption) (*CallSet, error) { + out := new(CallSet) + err := c.cc.Invoke(ctx, "/google.genomics.v1.VariantServiceV1/UpdateCallSet", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *variantServiceV1Client) DeleteCallSet(ctx context.Context, in *DeleteCallSetRequest, opts ...grpc.CallOption) (*empty.Empty, error) { + out := new(empty.Empty) + err := c.cc.Invoke(ctx, "/google.genomics.v1.VariantServiceV1/DeleteCallSet", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *variantServiceV1Client) GetCallSet(ctx context.Context, in *GetCallSetRequest, opts ...grpc.CallOption) (*CallSet, error) { + out := new(CallSet) + err := c.cc.Invoke(ctx, "/google.genomics.v1.VariantServiceV1/GetCallSet", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +// VariantServiceV1Server is the server API for VariantServiceV1 service. +type VariantServiceV1Server interface { + // Creates variant data by asynchronously importing the provided information. + // + // For the definitions of variant sets and other genomics resources, see + // [Fundamentals of Google + // Genomics](https://cloud.google.com/genomics/fundamentals-of-google-genomics) + // + // The variants for import will be merged with any existing variant that + // matches its reference sequence, start, end, reference bases, and + // alternative bases. If no such variant exists, a new one will be created. + // + // When variants are merged, the call information from the new variant + // is added to the existing variant, and Variant info fields are merged + // as specified in + // [infoMergeConfig][google.genomics.v1.ImportVariantsRequest.info_merge_config]. + // As a special case, for single-sample VCF files, QUAL and FILTER fields will + // be moved to the call level; these are sometimes interpreted in a + // call-specific context. + // Imported VCF headers are appended to the metadata already in a variant set. + ImportVariants(context.Context, *ImportVariantsRequest) (*longrunning.Operation, error) + // Creates a new variant set. + // + // For the definitions of variant sets and other genomics resources, see + // [Fundamentals of Google + // Genomics](https://cloud.google.com/genomics/fundamentals-of-google-genomics) + // + // The provided variant set must have a valid `datasetId` set - all other + // fields are optional. Note that the `id` field will be ignored, as this is + // assigned by the server. + CreateVariantSet(context.Context, *CreateVariantSetRequest) (*VariantSet, error) + // Exports variant set data to an external destination. + // + // For the definitions of variant sets and other genomics resources, see + // [Fundamentals of Google + // Genomics](https://cloud.google.com/genomics/fundamentals-of-google-genomics) + ExportVariantSet(context.Context, *ExportVariantSetRequest) (*longrunning.Operation, error) + // Gets a variant set by ID. + // + // For the definitions of variant sets and other genomics resources, see + // [Fundamentals of Google + // Genomics](https://cloud.google.com/genomics/fundamentals-of-google-genomics) + GetVariantSet(context.Context, *GetVariantSetRequest) (*VariantSet, error) + // Returns a list of all variant sets matching search criteria. + // + // For the definitions of variant sets and other genomics resources, see + // [Fundamentals of Google + // Genomics](https://cloud.google.com/genomics/fundamentals-of-google-genomics) + // + // Implements + // [GlobalAllianceApi.searchVariantSets](https://github.com/ga4gh/schemas/blob/v0.5.1/src/main/resources/avro/variantmethods.avdl#L49). + SearchVariantSets(context.Context, *SearchVariantSetsRequest) (*SearchVariantSetsResponse, error) + // Deletes a variant set including all variants, call sets, and calls within. + // This is not reversible. + // + // For the definitions of variant sets and other genomics resources, see + // [Fundamentals of Google + // Genomics](https://cloud.google.com/genomics/fundamentals-of-google-genomics) + DeleteVariantSet(context.Context, *DeleteVariantSetRequest) (*empty.Empty, error) + // Updates a variant set using patch semantics. + // + // For the definitions of variant sets and other genomics resources, see + // [Fundamentals of Google + // Genomics](https://cloud.google.com/genomics/fundamentals-of-google-genomics) + UpdateVariantSet(context.Context, *UpdateVariantSetRequest) (*VariantSet, error) + // Gets a list of variants matching the criteria. + // + // For the definitions of variants and other genomics resources, see + // [Fundamentals of Google + // Genomics](https://cloud.google.com/genomics/fundamentals-of-google-genomics) + // + // Implements + // [GlobalAllianceApi.searchVariants](https://github.com/ga4gh/schemas/blob/v0.5.1/src/main/resources/avro/variantmethods.avdl#L126). + SearchVariants(context.Context, *SearchVariantsRequest) (*SearchVariantsResponse, error) + // Creates a new variant. + // + // For the definitions of variants and other genomics resources, see + // [Fundamentals of Google + // Genomics](https://cloud.google.com/genomics/fundamentals-of-google-genomics) + CreateVariant(context.Context, *CreateVariantRequest) (*Variant, error) + // Updates a variant. + // + // For the definitions of variants and other genomics resources, see + // [Fundamentals of Google + // Genomics](https://cloud.google.com/genomics/fundamentals-of-google-genomics) + // + // This method supports patch semantics. Returns the modified variant without + // its calls. + UpdateVariant(context.Context, *UpdateVariantRequest) (*Variant, error) + // Deletes a variant. + // + // For the definitions of variants and other genomics resources, see + // [Fundamentals of Google + // Genomics](https://cloud.google.com/genomics/fundamentals-of-google-genomics) + DeleteVariant(context.Context, *DeleteVariantRequest) (*empty.Empty, error) + // Gets a variant by ID. + // + // For the definitions of variants and other genomics resources, see + // [Fundamentals of Google + // Genomics](https://cloud.google.com/genomics/fundamentals-of-google-genomics) + GetVariant(context.Context, *GetVariantRequest) (*Variant, error) + // Merges the given variants with existing variants. + // + // For the definitions of variants and other genomics resources, see + // [Fundamentals of Google + // Genomics](https://cloud.google.com/genomics/fundamentals-of-google-genomics) + // + // Each variant will be + // merged with an existing variant that matches its reference sequence, + // start, end, reference bases, and alternative bases. If no such variant + // exists, a new one will be created. + // + // When variants are merged, the call information from the new variant + // is added to the existing variant. Variant info fields are merged as + // specified in the + // [infoMergeConfig][google.genomics.v1.MergeVariantsRequest.info_merge_config] + // field of the MergeVariantsRequest. + // + // Please exercise caution when using this method! It is easy to introduce + // mistakes in existing variants and difficult to back out of them. For + // example, + // suppose you were trying to merge a new variant with an existing one and + // both + // variants contain calls that belong to callsets with the same callset ID. + // + // // Existing variant - irrelevant fields trimmed for clarity + // { + // "variantSetId": "10473108253681171589", + // "referenceName": "1", + // "start": "10582", + // "referenceBases": "G", + // "alternateBases": [ + // "A" + // ], + // "calls": [ + // { + // "callSetId": "10473108253681171589-0", + // "callSetName": "CALLSET0", + // "genotype": [ + // 0, + // 1 + // ], + // } + // ] + // } + // + // // New variant with conflicting call information + // { + // "variantSetId": "10473108253681171589", + // "referenceName": "1", + // "start": "10582", + // "referenceBases": "G", + // "alternateBases": [ + // "A" + // ], + // "calls": [ + // { + // "callSetId": "10473108253681171589-0", + // "callSetName": "CALLSET0", + // "genotype": [ + // 1, + // 1 + // ], + // } + // ] + // } + // + // The resulting merged variant would overwrite the existing calls with those + // from the new variant: + // + // { + // "variantSetId": "10473108253681171589", + // "referenceName": "1", + // "start": "10582", + // "referenceBases": "G", + // "alternateBases": [ + // "A" + // ], + // "calls": [ + // { + // "callSetId": "10473108253681171589-0", + // "callSetName": "CALLSET0", + // "genotype": [ + // 1, + // 1 + // ], + // } + // ] + // } + // + // This may be the desired outcome, but it is up to the user to determine if + // if that is indeed the case. + MergeVariants(context.Context, *MergeVariantsRequest) (*empty.Empty, error) + // Gets a list of call sets matching the criteria. + // + // For the definitions of call sets and other genomics resources, see + // [Fundamentals of Google + // Genomics](https://cloud.google.com/genomics/fundamentals-of-google-genomics) + // + // Implements + // [GlobalAllianceApi.searchCallSets](https://github.com/ga4gh/schemas/blob/v0.5.1/src/main/resources/avro/variantmethods.avdl#L178). + SearchCallSets(context.Context, *SearchCallSetsRequest) (*SearchCallSetsResponse, error) + // Creates a new call set. + // + // For the definitions of call sets and other genomics resources, see + // [Fundamentals of Google + // Genomics](https://cloud.google.com/genomics/fundamentals-of-google-genomics) + CreateCallSet(context.Context, *CreateCallSetRequest) (*CallSet, error) + // Updates a call set. + // + // For the definitions of call sets and other genomics resources, see + // [Fundamentals of Google + // Genomics](https://cloud.google.com/genomics/fundamentals-of-google-genomics) + // + // This method supports patch semantics. + UpdateCallSet(context.Context, *UpdateCallSetRequest) (*CallSet, error) + // Deletes a call set. + // + // For the definitions of call sets and other genomics resources, see + // [Fundamentals of Google + // Genomics](https://cloud.google.com/genomics/fundamentals-of-google-genomics) + DeleteCallSet(context.Context, *DeleteCallSetRequest) (*empty.Empty, error) + // Gets a call set by ID. + // + // For the definitions of call sets and other genomics resources, see + // [Fundamentals of Google + // Genomics](https://cloud.google.com/genomics/fundamentals-of-google-genomics) + GetCallSet(context.Context, *GetCallSetRequest) (*CallSet, error) +} + +func RegisterVariantServiceV1Server(s *grpc.Server, srv VariantServiceV1Server) { + s.RegisterService(&_VariantServiceV1_serviceDesc, srv) +} + +func _VariantServiceV1_ImportVariants_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(ImportVariantsRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(VariantServiceV1Server).ImportVariants(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.genomics.v1.VariantServiceV1/ImportVariants", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(VariantServiceV1Server).ImportVariants(ctx, req.(*ImportVariantsRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _VariantServiceV1_CreateVariantSet_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(CreateVariantSetRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(VariantServiceV1Server).CreateVariantSet(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.genomics.v1.VariantServiceV1/CreateVariantSet", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(VariantServiceV1Server).CreateVariantSet(ctx, req.(*CreateVariantSetRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _VariantServiceV1_ExportVariantSet_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(ExportVariantSetRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(VariantServiceV1Server).ExportVariantSet(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.genomics.v1.VariantServiceV1/ExportVariantSet", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(VariantServiceV1Server).ExportVariantSet(ctx, req.(*ExportVariantSetRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _VariantServiceV1_GetVariantSet_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(GetVariantSetRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(VariantServiceV1Server).GetVariantSet(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.genomics.v1.VariantServiceV1/GetVariantSet", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(VariantServiceV1Server).GetVariantSet(ctx, req.(*GetVariantSetRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _VariantServiceV1_SearchVariantSets_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(SearchVariantSetsRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(VariantServiceV1Server).SearchVariantSets(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.genomics.v1.VariantServiceV1/SearchVariantSets", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(VariantServiceV1Server).SearchVariantSets(ctx, req.(*SearchVariantSetsRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _VariantServiceV1_DeleteVariantSet_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(DeleteVariantSetRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(VariantServiceV1Server).DeleteVariantSet(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.genomics.v1.VariantServiceV1/DeleteVariantSet", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(VariantServiceV1Server).DeleteVariantSet(ctx, req.(*DeleteVariantSetRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _VariantServiceV1_UpdateVariantSet_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(UpdateVariantSetRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(VariantServiceV1Server).UpdateVariantSet(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.genomics.v1.VariantServiceV1/UpdateVariantSet", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(VariantServiceV1Server).UpdateVariantSet(ctx, req.(*UpdateVariantSetRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _VariantServiceV1_SearchVariants_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(SearchVariantsRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(VariantServiceV1Server).SearchVariants(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.genomics.v1.VariantServiceV1/SearchVariants", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(VariantServiceV1Server).SearchVariants(ctx, req.(*SearchVariantsRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _VariantServiceV1_CreateVariant_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(CreateVariantRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(VariantServiceV1Server).CreateVariant(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.genomics.v1.VariantServiceV1/CreateVariant", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(VariantServiceV1Server).CreateVariant(ctx, req.(*CreateVariantRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _VariantServiceV1_UpdateVariant_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(UpdateVariantRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(VariantServiceV1Server).UpdateVariant(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.genomics.v1.VariantServiceV1/UpdateVariant", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(VariantServiceV1Server).UpdateVariant(ctx, req.(*UpdateVariantRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _VariantServiceV1_DeleteVariant_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(DeleteVariantRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(VariantServiceV1Server).DeleteVariant(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.genomics.v1.VariantServiceV1/DeleteVariant", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(VariantServiceV1Server).DeleteVariant(ctx, req.(*DeleteVariantRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _VariantServiceV1_GetVariant_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(GetVariantRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(VariantServiceV1Server).GetVariant(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.genomics.v1.VariantServiceV1/GetVariant", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(VariantServiceV1Server).GetVariant(ctx, req.(*GetVariantRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _VariantServiceV1_MergeVariants_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(MergeVariantsRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(VariantServiceV1Server).MergeVariants(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.genomics.v1.VariantServiceV1/MergeVariants", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(VariantServiceV1Server).MergeVariants(ctx, req.(*MergeVariantsRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _VariantServiceV1_SearchCallSets_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(SearchCallSetsRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(VariantServiceV1Server).SearchCallSets(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.genomics.v1.VariantServiceV1/SearchCallSets", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(VariantServiceV1Server).SearchCallSets(ctx, req.(*SearchCallSetsRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _VariantServiceV1_CreateCallSet_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(CreateCallSetRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(VariantServiceV1Server).CreateCallSet(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.genomics.v1.VariantServiceV1/CreateCallSet", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(VariantServiceV1Server).CreateCallSet(ctx, req.(*CreateCallSetRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _VariantServiceV1_UpdateCallSet_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(UpdateCallSetRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(VariantServiceV1Server).UpdateCallSet(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.genomics.v1.VariantServiceV1/UpdateCallSet", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(VariantServiceV1Server).UpdateCallSet(ctx, req.(*UpdateCallSetRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _VariantServiceV1_DeleteCallSet_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(DeleteCallSetRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(VariantServiceV1Server).DeleteCallSet(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.genomics.v1.VariantServiceV1/DeleteCallSet", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(VariantServiceV1Server).DeleteCallSet(ctx, req.(*DeleteCallSetRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _VariantServiceV1_GetCallSet_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(GetCallSetRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(VariantServiceV1Server).GetCallSet(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.genomics.v1.VariantServiceV1/GetCallSet", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(VariantServiceV1Server).GetCallSet(ctx, req.(*GetCallSetRequest)) + } + return interceptor(ctx, in, info, handler) +} + +var _VariantServiceV1_serviceDesc = grpc.ServiceDesc{ + ServiceName: "google.genomics.v1.VariantServiceV1", + HandlerType: (*VariantServiceV1Server)(nil), + Methods: []grpc.MethodDesc{ + { + MethodName: "ImportVariants", + Handler: _VariantServiceV1_ImportVariants_Handler, + }, + { + MethodName: "CreateVariantSet", + Handler: _VariantServiceV1_CreateVariantSet_Handler, + }, + { + MethodName: "ExportVariantSet", + Handler: _VariantServiceV1_ExportVariantSet_Handler, + }, + { + MethodName: "GetVariantSet", + Handler: _VariantServiceV1_GetVariantSet_Handler, + }, + { + MethodName: "SearchVariantSets", + Handler: _VariantServiceV1_SearchVariantSets_Handler, + }, + { + MethodName: "DeleteVariantSet", + Handler: _VariantServiceV1_DeleteVariantSet_Handler, + }, + { + MethodName: "UpdateVariantSet", + Handler: _VariantServiceV1_UpdateVariantSet_Handler, + }, + { + MethodName: "SearchVariants", + Handler: _VariantServiceV1_SearchVariants_Handler, + }, + { + MethodName: "CreateVariant", + Handler: _VariantServiceV1_CreateVariant_Handler, + }, + { + MethodName: "UpdateVariant", + Handler: _VariantServiceV1_UpdateVariant_Handler, + }, + { + MethodName: "DeleteVariant", + Handler: _VariantServiceV1_DeleteVariant_Handler, + }, + { + MethodName: "GetVariant", + Handler: _VariantServiceV1_GetVariant_Handler, + }, + { + MethodName: "MergeVariants", + Handler: _VariantServiceV1_MergeVariants_Handler, + }, + { + MethodName: "SearchCallSets", + Handler: _VariantServiceV1_SearchCallSets_Handler, + }, + { + MethodName: "CreateCallSet", + Handler: _VariantServiceV1_CreateCallSet_Handler, + }, + { + MethodName: "UpdateCallSet", + Handler: _VariantServiceV1_UpdateCallSet_Handler, + }, + { + MethodName: "DeleteCallSet", + Handler: _VariantServiceV1_DeleteCallSet_Handler, + }, + { + MethodName: "GetCallSet", + Handler: _VariantServiceV1_GetCallSet_Handler, + }, + }, + Streams: []grpc.StreamDesc{}, + Metadata: "google/genomics/v1/variants.proto", +} + +func init() { + proto.RegisterFile("google/genomics/v1/variants.proto", fileDescriptor_variants_b9a43810f0f9306f) +} + +var fileDescriptor_variants_b9a43810f0f9306f = []byte{ + // 2348 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xcc, 0x5a, 0xdd, 0x6e, 0x1b, 0xc7, + 0xf5, 0xff, 0xef, 0x92, 0x94, 0xc8, 0x43, 0x91, 0x5a, 0x4f, 0x14, 0x69, 0x43, 0x7f, 0xc9, 0xfb, + 0xb7, 0x1d, 0x45, 0x75, 0x45, 0x9b, 0x81, 0xd3, 0x54, 0x49, 0x6a, 0x48, 0x34, 0xa5, 0xb0, 0x90, + 0x48, 0x65, 0x45, 0xbb, 0x75, 0x80, 0x82, 0x58, 0x91, 0x23, 0x7a, 0x6d, 0x72, 0x97, 0xde, 0x5d, + 0xaa, 0x96, 0x0d, 0x5f, 0x34, 0xfd, 0x42, 0x80, 0x02, 0x05, 0x1a, 0xa0, 0x57, 0xbd, 0xed, 0x45, + 0xd1, 0xa2, 0x6f, 0xe0, 0x37, 0x68, 0x7b, 0x53, 0xf4, 0x0d, 0xfa, 0x10, 0xbd, 0x2c, 0x66, 0x76, + 0x66, 0xb9, 0xbb, 0x1c, 0xae, 0x28, 0x07, 0x09, 0x7a, 0xc7, 0x39, 0x73, 0x66, 0xce, 0xd7, 0xef, + 0x9c, 0x39, 0x67, 0x25, 0xb8, 0xd6, 0xb3, 0xed, 0x5e, 0x1f, 0x97, 0x7b, 0xd8, 0xb2, 0x07, 0x66, + 0xc7, 0x2d, 0x9f, 0xdc, 0x29, 0x9f, 0x18, 0x8e, 0x69, 0x58, 0x9e, 0xbb, 0x31, 0x74, 0x6c, 0xcf, + 0x46, 0xc8, 0x67, 0xd9, 0xe0, 0x2c, 0x1b, 0x27, 0x77, 0x4a, 0x97, 0xd8, 0x31, 0x63, 0x68, 0x96, + 0x0d, 0xcb, 0xb2, 0x3d, 0xc3, 0x33, 0x6d, 0x8b, 0x9d, 0x28, 0xfd, 0x3f, 0xdb, 0xed, 0xdb, 0x56, + 0xcf, 0x19, 0x59, 0x96, 0x69, 0xf5, 0xca, 0xf6, 0x10, 0x3b, 0x11, 0xa6, 0x8b, 0x8c, 0x89, 0xae, + 0x8e, 0x46, 0xc7, 0x65, 0x3c, 0x18, 0x7a, 0xa7, 0x6c, 0x73, 0x35, 0xbe, 0x79, 0x6c, 0xe2, 0x7e, + 0xb7, 0x3d, 0x30, 0xdc, 0xa7, 0x8c, 0xe3, 0x52, 0x9c, 0xc3, 0xf5, 0x9c, 0x51, 0xc7, 0xf3, 0x77, + 0xb5, 0xd7, 0x29, 0x40, 0x0f, 0x7d, 0x33, 0x0e, 0xb1, 0xb7, 0x8f, 0x3d, 0xa3, 0x6b, 0x78, 0x06, + 0x52, 0x20, 0xf5, 0x14, 0x9f, 0xaa, 0xd2, 0xaa, 0xb4, 0x96, 0xd3, 0xc9, 0x4f, 0xb4, 0x04, 0x99, + 0x13, 0xa3, 0x3f, 0xc2, 0xaa, 0x4c, 0x69, 0xfe, 0x02, 0x15, 0x41, 0x36, 0xbb, 0x6a, 0x9a, 0x92, + 0x64, 0xb3, 0x8b, 0xee, 0x41, 0xda, 0x3b, 0x1d, 0x62, 0x35, 0xb3, 0x2a, 0xad, 0x15, 0x2b, 0xdf, + 0xd9, 0x98, 0xf4, 0xc8, 0xc6, 0xa4, 0xb4, 0x8d, 0xd6, 0xe9, 0x10, 0xeb, 0xf4, 0x20, 0x5a, 0x86, + 0x39, 0x6b, 0x34, 0x38, 0xc2, 0x8e, 0x9a, 0xa5, 0x97, 0xb2, 0x15, 0x5a, 0x85, 0x7c, 0x17, 0xbb, + 0x1d, 0xc7, 0x1c, 0x12, 0xd7, 0xa8, 0xf3, 0x74, 0x33, 0x4c, 0x42, 0xf7, 0x21, 0x6d, 0x5a, 0xc7, + 0xb6, 0x9a, 0x5a, 0x4d, 0xad, 0xe5, 0x2b, 0xb7, 0x67, 0x14, 0x5d, 0xb7, 0x8e, 0xed, 0x9a, 0xe5, + 0x39, 0xa7, 0x3a, 0x3d, 0x5d, 0x3a, 0x84, 0x5c, 0x40, 0x12, 0x78, 0xe1, 0x76, 0xd8, 0x0b, 0xf9, + 0x4a, 0x89, 0x4b, 0xe1, 0xce, 0xdd, 0xd8, 0x33, 0x5d, 0xef, 0x21, 0xe1, 0x60, 0x1e, 0xda, 0x94, + 0x3f, 0x94, 0xb4, 0x47, 0x90, 0x26, 0x26, 0xa2, 0x25, 0x50, 0x5a, 0x8f, 0x0e, 0x6a, 0xed, 0x07, + 0x8d, 0xc3, 0x83, 0x5a, 0xb5, 0xbe, 0x53, 0xaf, 0xdd, 0x57, 0xfe, 0x0f, 0xe5, 0x61, 0xbe, 0xde, + 0x68, 0xd5, 0x76, 0x6b, 0xba, 0x22, 0xa1, 0x1c, 0x64, 0x76, 0xf6, 0x9a, 0x5b, 0x2d, 0x45, 0x46, + 0x59, 0x48, 0xef, 0xec, 0x6d, 0xed, 0x2a, 0x29, 0x54, 0x80, 0x5c, 0xf5, 0xd3, 0x2d, 0x7d, 0xab, + 0xda, 0xaa, 0xe9, 0x4a, 0x1a, 0x01, 0xcc, 0x1d, 0xb6, 0xf4, 0x7a, 0x63, 0x57, 0xc9, 0x68, 0x7f, + 0x95, 0x01, 0xc6, 0x66, 0xa1, 0xcb, 0x00, 0xc4, 0x2c, 0x17, 0x7b, 0x6d, 0xb3, 0xcb, 0x14, 0xcf, + 0x31, 0x4a, 0xbd, 0xcb, 0xc2, 0x25, 0x07, 0xe1, 0x5a, 0x03, 0xc5, 0xc1, 0xc7, 0xd8, 0xc1, 0x56, + 0x07, 0xb7, 0xd9, 0xa1, 0x39, 0xba, 0x5b, 0x0c, 0xe8, 0x87, 0xf4, 0xe4, 0x7e, 0x98, 0xf3, 0xc8, + 0x1e, 0x59, 0x5d, 0x57, 0xcd, 0x50, 0x4f, 0x6b, 0x22, 0x4f, 0xeb, 0x9c, 0x77, 0x9b, 0xb0, 0xea, + 0x8b, 0x4e, 0x64, 0xed, 0xa2, 0x6d, 0xc8, 0x0e, 0x58, 0x08, 0xd4, 0x34, 0xbd, 0xe6, 0xe6, 0x6c, + 0x01, 0xd3, 0x83, 0x73, 0x08, 0x41, 0xda, 0x32, 0x06, 0x98, 0x61, 0x81, 0xfe, 0x8e, 0xc3, 0x24, + 0x3b, 0x01, 0x13, 0xed, 0xcb, 0x34, 0xcc, 0xb3, 0x6b, 0xd1, 0x75, 0x28, 0xb2, 0x14, 0xe6, 0xc6, + 0x2f, 0xd2, 0x03, 0x0b, 0x27, 0x81, 0x5c, 0x81, 0xd3, 0x96, 0x20, 0x43, 0x64, 0xb9, 0x14, 0x69, + 0x39, 0xdd, 0x5f, 0x20, 0x15, 0xe6, 0x3b, 0x0e, 0x36, 0x3c, 0xdc, 0x55, 0x17, 0x56, 0xa5, 0xb5, + 0x94, 0xce, 0x97, 0xe8, 0x06, 0x8c, 0x9d, 0xd9, 0xa6, 0x1a, 0x17, 0xe9, 0x5d, 0x85, 0x80, 0xda, + 0x20, 0xaa, 0x2f, 0x41, 0xc6, 0xf5, 0x0c, 0xc7, 0x53, 0x15, 0x7a, 0xdc, 0x5f, 0x10, 0x08, 0x62, + 0xab, 0xab, 0x16, 0x28, 0x8d, 0xfc, 0x44, 0xef, 0xc2, 0x62, 0x28, 0x12, 0x86, 0x8b, 0xdd, 0x89, + 0x90, 0x6d, 0x13, 0x2a, 0x61, 0x34, 0xfa, 0x1e, 0x76, 0x2c, 0xc3, 0xe3, 0x8c, 0xf3, 0x54, 0xe3, + 0x62, 0x40, 0xf6, 0x19, 0x55, 0x98, 0x7f, 0x36, 0x32, 0xfa, 0xa6, 0x77, 0x4a, 0x1d, 0x26, 0xe9, + 0x7c, 0x49, 0xb2, 0xf1, 0xd8, 0x24, 0xcc, 0x6a, 0x8e, 0x9e, 0x64, 0x2b, 0xf4, 0x7d, 0x96, 0x6b, + 0x40, 0x43, 0x77, 0x23, 0x21, 0x74, 0xf1, 0x04, 0x43, 0x77, 0x21, 0xd3, 0x31, 0xfa, 0x7d, 0x57, + 0xcd, 0xd3, 0xb3, 0x57, 0x13, 0xce, 0x56, 0x8d, 0x7e, 0x5f, 0xf7, 0xb9, 0xbf, 0x99, 0xbc, 0xfc, + 0x87, 0x0c, 0xf9, 0x90, 0x2c, 0x74, 0x05, 0xf2, 0x44, 0x1a, 0x07, 0x83, 0x8f, 0x9e, 0x1c, 0x21, + 0xf9, 0x48, 0xd0, 0xa0, 0x10, 0xec, 0xd3, 0x40, 0xe6, 0x7c, 0x7c, 0x31, 0x0e, 0x1a, 0xc6, 0x12, + 0x64, 0x89, 0x29, 0xb4, 0x0a, 0x12, 0x77, 0x67, 0xf4, 0x60, 0x4d, 0xf6, 0x86, 0x8f, 0x89, 0xcb, + 0xb1, 0x47, 0x2b, 0x64, 0x4e, 0x0f, 0xd6, 0xa8, 0x0c, 0x6f, 0x71, 0xbe, 0x76, 0xdf, 0x7c, 0x8a, + 0xfb, 0xe6, 0x63, 0xdb, 0x26, 0xd9, 0x98, 0x5a, 0x93, 0x74, 0xc4, 0xb7, 0xf6, 0x82, 0x1d, 0xf4, + 0x09, 0x8b, 0x81, 0x4c, 0xfd, 0xf8, 0xde, 0x19, 0x7e, 0xfc, 0x76, 0x0a, 0xdd, 0x1f, 0x64, 0x98, + 0xaf, 0xfa, 0xce, 0x60, 0x69, 0x23, 0x05, 0x69, 0xc3, 0xd3, 0x55, 0x0e, 0xa5, 0xeb, 0x45, 0xc8, + 0xb9, 0xc6, 0x60, 0xd8, 0xc7, 0xc4, 0xdd, 0x7e, 0x1e, 0x67, 0x7d, 0x42, 0xbd, 0x8b, 0x6e, 0xc2, + 0x62, 0x34, 0x3b, 0x5d, 0xea, 0x8d, 0x9c, 0x5e, 0x08, 0xa7, 0x67, 0x24, 0xf3, 0x32, 0xd1, 0xcc, + 0xe3, 0x30, 0x4d, 0x4f, 0x87, 0x29, 0xd3, 0xf6, 0xdb, 0x71, 0xcf, 0x8f, 0xa1, 0x18, 0x2d, 0x8c, + 0x82, 0xda, 0x20, 0x89, 0x6a, 0xc3, 0x55, 0xc8, 0x8f, 0x86, 0x43, 0xec, 0xf8, 0x95, 0x97, 0x0a, + 0x4d, 0xe9, 0x40, 0x49, 0xf4, 0x1e, 0xed, 0x37, 0x69, 0x78, 0xbb, 0x3e, 0x18, 0xda, 0x8e, 0xc7, + 0x62, 0xee, 0xea, 0xf8, 0xd9, 0x08, 0xbb, 0xa2, 0x1a, 0x27, 0x09, 0x6a, 0xdc, 0x55, 0xc8, 0xbb, + 0xf6, 0xc8, 0xe9, 0xe0, 0xf6, 0xc8, 0x31, 0x5d, 0x8a, 0xa9, 0x9c, 0x0e, 0x3e, 0xe9, 0x81, 0x63, + 0xba, 0xe8, 0x53, 0x98, 0x3b, 0xb6, 0x9d, 0x81, 0xe1, 0xa9, 0x29, 0xfa, 0xb4, 0x0b, 0xdf, 0x57, + 0xa1, 0x06, 0x1b, 0x3b, 0xf4, 0x9c, 0xce, 0xce, 0xa3, 0x4d, 0x78, 0xc7, 0x22, 0xbf, 0xfa, 0xe6, + 0x0b, 0xdc, 0x8e, 0x1a, 0xef, 0xd2, 0x00, 0x66, 0xf5, 0x95, 0x80, 0x41, 0x0f, 0xbb, 0xc1, 0x45, + 0x4f, 0xe0, 0x02, 0x89, 0x4e, 0x7b, 0x80, 0x9d, 0x1e, 0x6e, 0x77, 0x6c, 0xeb, 0xd8, 0xec, 0x51, + 0x50, 0xe4, 0x2b, 0x3f, 0x98, 0x5d, 0x21, 0x12, 0xd8, 0x7d, 0x72, 0x43, 0x95, 0x5e, 0xe0, 0x87, + 0x7d, 0xd1, 0x8c, 0x52, 0x4b, 0x4f, 0x60, 0x49, 0xc4, 0x28, 0x00, 0xc3, 0xc7, 0x61, 0x30, 0x14, + 0xc5, 0x2f, 0x59, 0x70, 0x55, 0x93, 0xb7, 0x77, 0x61, 0x60, 0x34, 0x60, 0xce, 0xf7, 0x12, 0x5a, + 0x06, 0xb4, 0xd3, 0xd4, 0xf7, 0xb7, 0x5a, 0xb1, 0x26, 0xa1, 0x08, 0xc0, 0xe8, 0x0f, 0xab, 0x3b, + 0x8a, 0x84, 0x2e, 0x81, 0xca, 0xd6, 0xd5, 0xe6, 0xfe, 0xc1, 0x5e, 0xad, 0x55, 0x6b, 0xef, 0xd6, + 0x1a, 0xcd, 0xfd, 0x7a, 0xf5, 0x50, 0x91, 0xb5, 0x4d, 0x58, 0x8e, 0x9b, 0xee, 0x0e, 0x6d, 0xcb, + 0x25, 0x0f, 0xe4, 0x42, 0xa8, 0xc4, 0xb9, 0xaa, 0xe4, 0x47, 0x3a, 0xa8, 0x71, 0xae, 0xf6, 0x39, + 0xac, 0x54, 0x69, 0xfe, 0x8c, 0x1f, 0x5f, 0x8e, 0xa5, 0x7b, 0x90, 0x0f, 0x61, 0x89, 0xba, 0x20, + 0x5f, 0xb9, 0x92, 0xfc, 0x70, 0xeb, 0x30, 0x06, 0x9a, 0xf6, 0x2f, 0x19, 0x56, 0x6a, 0xcf, 0x43, + 0x8a, 0x85, 0x2e, 0x9f, 0x0d, 0xa8, 0x71, 0xfd, 0xe5, 0xb8, 0xfe, 0xa4, 0x05, 0x1a, 0x3a, 0xf6, + 0x13, 0xdc, 0xa1, 0x77, 0xa4, 0xfc, 0x1a, 0xce, 0x28, 0xf5, 0x2e, 0xfa, 0x61, 0x00, 0xe4, 0x34, + 0x8d, 0x56, 0x45, 0xa4, 0xfe, 0x14, 0x1d, 0xe3, 0x50, 0x7e, 0x0f, 0x94, 0x23, 0xb3, 0xf7, 0x6c, + 0x84, 0x9d, 0xd3, 0x36, 0x6b, 0xb2, 0x58, 0x5d, 0x5f, 0xe4, 0xf4, 0xfb, 0x3e, 0x99, 0x24, 0x7a, + 0xc0, 0xea, 0x19, 0x47, 0x7d, 0xcc, 0x1e, 0xed, 0x02, 0xa7, 0xb6, 0x08, 0x51, 0xbb, 0x7b, 0x26, + 0x10, 0xde, 0x82, 0x45, 0x46, 0xdf, 0xae, 0xef, 0x7e, 0xf6, 0xa0, 0xa6, 0x3f, 0x52, 0x24, 0xed, + 0x63, 0x58, 0xda, 0xc5, 0x6f, 0xea, 0x53, 0xed, 0xa7, 0xa0, 0x1e, 0x62, 0xc3, 0xe9, 0x3c, 0x1e, + 0x5f, 0x10, 0x94, 0x8f, 0xab, 0x90, 0x1f, 0x37, 0x94, 0x01, 0x5c, 0x82, 0x8e, 0xd2, 0x77, 0xb7, + 0xd1, 0xc3, 0x6d, 0xcf, 0x7e, 0x8a, 0x2d, 0x56, 0xdc, 0x73, 0x84, 0xd2, 0x22, 0x04, 0x52, 0xe1, + 0xe9, 0xb6, 0x6b, 0xbe, 0xc0, 0x34, 0x18, 0x19, 0x3d, 0x4b, 0x08, 0x87, 0xe6, 0x0b, 0xac, 0xfd, + 0x4a, 0x82, 0x77, 0x04, 0x92, 0x19, 0x54, 0xb7, 0x60, 0x21, 0xa4, 0xbc, 0x2f, 0xfb, 0x6c, 0xb8, + 0xe5, 0xc7, 0xa6, 0xb9, 0xe4, 0x09, 0xb1, 0xf0, 0x73, 0xaf, 0x3d, 0xa1, 0x61, 0x81, 0x90, 0x0f, + 0xb8, 0x96, 0xda, 0x3d, 0x58, 0xb9, 0x8f, 0xfb, 0x58, 0x84, 0xf9, 0xd9, 0x5c, 0xf8, 0x5a, 0x82, + 0x95, 0x07, 0xc3, 0xae, 0xf1, 0xc6, 0x37, 0xc4, 0x73, 0x4b, 0x3e, 0x6f, 0x6e, 0xa1, 0x8f, 0xc8, + 0x1b, 0x41, 0x34, 0xa0, 0xc3, 0x1f, 0xc5, 0xa1, 0xe8, 0x61, 0xda, 0x21, 0xf3, 0xe1, 0xbe, 0xe1, + 0x3e, 0x25, 0xef, 0x07, 0x61, 0x27, 0xbf, 0xb5, 0xbf, 0xc8, 0xf0, 0x76, 0x24, 0x12, 0x01, 0x00, + 0x04, 0xaf, 0xb0, 0x24, 0x7a, 0x85, 0xaf, 0x8d, 0xa3, 0x15, 0x7a, 0xe6, 0xb9, 0x4d, 0x0d, 0xbf, + 0x39, 0x8f, 0xe6, 0x6e, 0x6a, 0x22, 0x77, 0x27, 0x9f, 0xc3, 0x74, 0x62, 0xab, 0x9c, 0x11, 0xb4, + 0xca, 0x73, 0xe3, 0x56, 0x39, 0x8a, 0xcd, 0xf9, 0x44, 0x6c, 0x66, 0xa3, 0xd8, 0x24, 0x9b, 0x03, + 0xe3, 0x79, 0xdb, 0xef, 0x55, 0x73, 0xfe, 0xe6, 0xc0, 0x78, 0x4e, 0xfa, 0x05, 0x57, 0x3b, 0x85, + 0xe5, 0xb8, 0xb7, 0x18, 0x68, 0xbf, 0x07, 0x59, 0xfe, 0x55, 0x80, 0x01, 0xf6, 0x62, 0x42, 0x0c, + 0xf5, 0x80, 0x79, 0x66, 0xa8, 0xee, 0xc3, 0x52, 0xa4, 0x3c, 0xf3, 0x38, 0xdd, 0x85, 0x79, 0x76, + 0x17, 0xab, 0xcb, 0x89, 0x72, 0x39, 0xaf, 0xf6, 0x27, 0x09, 0x96, 0x22, 0xc0, 0xe5, 0xf7, 0x5d, + 0x06, 0x0e, 0xae, 0xd0, 0x24, 0xc9, 0x28, 0xf5, 0x6e, 0x58, 0x9c, 0x3c, 0xbb, 0xb8, 0x38, 0x48, + 0x53, 0xe7, 0x02, 0xe9, 0x5d, 0x58, 0x8a, 0x64, 0xe9, 0x6c, 0xaa, 0x6a, 0x15, 0xb8, 0x30, 0x2e, + 0x8e, 0x33, 0x9e, 0xf9, 0x9b, 0x0c, 0x4b, 0xf4, 0xb9, 0x7e, 0xb3, 0x76, 0x2a, 0x8c, 0x02, 0xf9, + 0x3c, 0x28, 0x30, 0x45, 0x0d, 0x8e, 0xff, 0x45, 0xe3, 0x13, 0xd1, 0x0d, 0x22, 0x1d, 0xff, 0x07, + 0xfb, 0x9b, 0xdf, 0x4a, 0xbc, 0xbc, 0xb0, 0x7e, 0xfb, 0xdc, 0xe5, 0x45, 0x34, 0x3d, 0x44, 0xd3, + 0x3b, 0x95, 0x98, 0xde, 0xe9, 0xd8, 0xd3, 0xf3, 0x82, 0x67, 0xf0, 0x58, 0x21, 0x96, 0xc1, 0x1f, + 0x42, 0x8e, 0x57, 0xa9, 0xc4, 0x14, 0x66, 0x07, 0xf5, 0x2c, 0xab, 0x5f, 0xb3, 0xa7, 0x70, 0x83, + 0xa7, 0x30, 0xbf, 0x82, 0xf9, 0xe2, 0x03, 0xc8, 0x72, 0xc9, 0x49, 0x39, 0xcc, 0x4f, 0xcd, 0x33, + 0xc1, 0xda, 0x9f, 0x83, 0x1c, 0x8e, 0x5d, 0x18, 0x9b, 0x67, 0xa5, 0xf8, 0x3c, 0x1b, 0x16, 0x28, + 0xcf, 0x2e, 0xf0, 0xeb, 0x65, 0xf1, 0x07, 0x3c, 0x8b, 0xcf, 0xa7, 0xac, 0xf6, 0x3e, 0x4d, 0xe3, + 0x73, 0x1e, 0xfa, 0x3b, 0x01, 0x9e, 0xe7, 0x60, 0x63, 0x10, 0x4f, 0xe4, 0x68, 0x9b, 0x28, 0xc5, + 0xdb, 0xc4, 0xc9, 0x3c, 0x97, 0x67, 0xe8, 0x46, 0xbf, 0xe9, 0x17, 0x4d, 0xfb, 0x0c, 0x96, 0xe3, + 0xe6, 0x7c, 0xcd, 0x87, 0x67, 0xfd, 0x27, 0x80, 0x26, 0x93, 0x17, 0x5d, 0x87, 0xd5, 0x7a, 0x63, + 0xa7, 0xd9, 0xde, 0xaf, 0xe9, 0xbb, 0xb5, 0x76, 0xf3, 0xa0, 0xa6, 0x6f, 0xb5, 0xea, 0xcd, 0xc6, + 0xe4, 0x54, 0x52, 0xdf, 0x6d, 0x34, 0xf5, 0x5a, 0xbb, 0x51, 0xfb, 0x91, 0x22, 0xa1, 0x0b, 0x50, + 0xd8, 0x6f, 0x3e, 0xac, 0xb5, 0x5b, 0xcd, 0x76, 0x75, 0x6b, 0x6f, 0xef, 0x50, 0x91, 0x2b, 0x7f, + 0x94, 0x60, 0xc5, 0x57, 0xd9, 0xb4, 0x7a, 0x41, 0xeb, 0xe2, 0x9c, 0x98, 0x1d, 0x8c, 0xbe, 0x94, + 0xa0, 0x18, 0x35, 0x07, 0x09, 0xbf, 0x63, 0x08, 0x23, 0x58, 0x5a, 0x9f, 0x85, 0xd5, 0xf7, 0x8e, + 0x76, 0xe5, 0x8b, 0x7f, 0xfe, 0xfb, 0x2b, 0x59, 0xd5, 0xde, 0x0a, 0x7f, 0xb6, 0xdf, 0x74, 0x29, + 0xf3, 0xa6, 0xb4, 0x7e, 0x5b, 0xaa, 0xbc, 0x46, 0xa0, 0x44, 0xd5, 0x7b, 0x78, 0x07, 0xbd, 0x80, + 0x62, 0x74, 0x8e, 0x12, 0xeb, 0x27, 0x1c, 0x33, 0x4b, 0x97, 0x39, 0x6b, 0xe8, 0xeb, 0xfe, 0x46, + 0xe0, 0xe1, 0x29, 0x2a, 0x99, 0xf4, 0xaa, 0x4d, 0x69, 0x1d, 0xfd, 0x52, 0x02, 0x25, 0x3e, 0x88, + 0x21, 0xe1, 0x17, 0xf5, 0x29, 0xe3, 0x5a, 0xe9, 0x8c, 0xee, 0x51, 0xbb, 0x4e, 0x35, 0xb8, 0xa2, + 0x2d, 0x86, 0x35, 0xc0, 0x9e, 0xbb, 0x19, 0xee, 0x44, 0xd1, 0xef, 0x24, 0x50, 0xe2, 0xf3, 0x90, + 0x58, 0x8f, 0x29, 0x53, 0xd3, 0x59, 0x8e, 0xa8, 0x50, 0x35, 0x6e, 0x69, 0xef, 0xc6, 0xd4, 0x28, + 0xbf, 0x8c, 0x66, 0xe0, 0xab, 0x4d, 0xfc, 0x9c, 0x3b, 0xe7, 0xe7, 0x12, 0x14, 0x22, 0x13, 0x0f, + 0x5a, 0x13, 0x69, 0x24, 0x1a, 0x8a, 0xce, 0x74, 0xcb, 0x1a, 0xd5, 0x47, 0x43, 0xab, 0x67, 0xe9, + 0x83, 0xbe, 0x92, 0xe0, 0xc2, 0xc4, 0xfc, 0x82, 0x6e, 0x09, 0x71, 0x39, 0x65, 0xc0, 0x2a, 0x7d, + 0x77, 0x46, 0x6e, 0x06, 0xe4, 0x6b, 0x54, 0xb9, 0x8b, 0xda, 0x72, 0x5c, 0x39, 0x97, 0x1e, 0x21, + 0xbe, 0xf9, 0x99, 0x04, 0x4a, 0x7c, 0x9a, 0x11, 0x07, 0x6c, 0xca, 0xcc, 0x53, 0x5a, 0x9e, 0x28, + 0xe5, 0xb5, 0xc1, 0xd0, 0x3b, 0xe5, 0x9e, 0x59, 0x3f, 0xdb, 0x33, 0xbf, 0x97, 0x40, 0x89, 0xcf, + 0x43, 0x62, 0x1d, 0xa6, 0x4c, 0x4d, 0x67, 0x46, 0xe9, 0x2e, 0xd5, 0xa5, 0x5c, 0x39, 0x53, 0x97, + 0x28, 0x9a, 0x7f, 0x4d, 0x4a, 0x4e, 0xa4, 0x75, 0x9f, 0x52, 0x72, 0x44, 0xc3, 0xd0, 0x94, 0x92, + 0x23, 0x9c, 0x04, 0xc4, 0xf9, 0x1d, 0x0a, 0xd3, 0x08, 0x0a, 0x91, 0xc4, 0x15, 0x23, 0x58, 0xd4, + 0xeb, 0x97, 0x92, 0x2a, 0xbb, 0x76, 0x99, 0xca, 0x5d, 0xd1, 0x16, 0x22, 0x75, 0x25, 0xe8, 0xc0, + 0xbf, 0x90, 0xa0, 0x10, 0xf1, 0xb9, 0x58, 0xae, 0x68, 0x26, 0x48, 0x96, 0xbb, 0x4e, 0xe5, 0x5e, + 0xaf, 0xbc, 0x13, 0xb1, 0xf7, 0xe5, 0xb8, 0xcb, 0x7e, 0x35, 0x56, 0xc2, 0x83, 0x42, 0x04, 0x7b, + 0x62, 0x1d, 0x44, 0xcd, 0xfe, 0x54, 0x6c, 0xb2, 0xc4, 0x58, 0x9f, 0x2e, 0x1e, 0xb9, 0x00, 0xe3, + 0x82, 0x80, 0x6e, 0x24, 0x17, 0x8c, 0x99, 0x6c, 0x66, 0x42, 0x51, 0x82, 0xd0, 0x21, 0x14, 0x22, + 0x4d, 0xba, 0xd8, 0x54, 0x51, 0x1f, 0x3f, 0xd5, 0x54, 0x1e, 0x61, 0x14, 0x89, 0x30, 0x1d, 0x19, + 0x08, 0xb0, 0xc6, 0x10, 0xe7, 0xbd, 0x6d, 0x12, 0xc4, 0x63, 0x0d, 0x79, 0x12, 0xc4, 0xe3, 0xad, + 0x72, 0x14, 0xe2, 0x74, 0x52, 0x8e, 0x56, 0xa2, 0x13, 0x0e, 0x71, 0xfe, 0x37, 0x81, 0x04, 0x88, + 0x47, 0x1b, 0xbb, 0x52, 0x52, 0x23, 0x1a, 0xc8, 0x5d, 0x08, 0xcb, 0xdd, 0x0c, 0x7a, 0x59, 0xf4, + 0x8b, 0x00, 0xe3, 0x89, 0x82, 0x45, 0x3d, 0x73, 0xb2, 0xe0, 0x5b, 0x54, 0xf0, 0xcd, 0x4a, 0x29, + 0x62, 0xf0, 0xcb, 0x50, 0x0f, 0xf8, 0x2a, 0xa4, 0xc6, 0x88, 0xa3, 0x3c, 0x51, 0x0b, 0x51, 0x33, + 0x3c, 0x35, 0xf4, 0x1a, 0x55, 0xe0, 0xd2, 0x7a, 0x82, 0x02, 0xc8, 0xa3, 0x30, 0xe7, 0x32, 0xa7, + 0xc1, 0xfc, 0x3c, 0x66, 0x33, 0xa9, 0x28, 0x41, 0xea, 0xf6, 0x63, 0x58, 0xee, 0xd8, 0x03, 0xc1, + 0x2d, 0xdb, 0x05, 0x8e, 0xeb, 0x03, 0x62, 0xcb, 0x81, 0xf4, 0xf9, 0x26, 0x67, 0xb2, 0xfb, 0x86, + 0xd5, 0xdb, 0xb0, 0x9d, 0x5e, 0xb9, 0x87, 0x2d, 0x6a, 0x69, 0xd9, 0xdf, 0x32, 0x86, 0xa6, 0x1b, + 0xfe, 0x4f, 0x8b, 0x8f, 0xf8, 0xef, 0xff, 0x48, 0xd2, 0xd1, 0x1c, 0xe5, 0x7c, 0xff, 0xbf, 0x01, + 0x00, 0x00, 0xff, 0xff, 0x7e, 0x5e, 0x37, 0xc0, 0x92, 0x21, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/genomics/v1alpha2/pipelines.pb.go b/vendor/google.golang.org/genproto/googleapis/genomics/v1alpha2/pipelines.pb.go new file mode 100644 index 0000000..ff7089a --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/genomics/v1alpha2/pipelines.pb.go @@ -0,0 +1,2304 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/genomics/v1alpha2/pipelines.proto + +package genomics // import "google.golang.org/genproto/googleapis/genomics/v1alpha2" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import duration "github.com/golang/protobuf/ptypes/duration" +import empty "github.com/golang/protobuf/ptypes/empty" +import timestamp "github.com/golang/protobuf/ptypes/timestamp" +import _ "google.golang.org/genproto/googleapis/api/annotations" +import longrunning "google.golang.org/genproto/googleapis/longrunning" +import code "google.golang.org/genproto/googleapis/rpc/code" + +import ( + context "golang.org/x/net/context" + grpc "google.golang.org/grpc" +) + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// The types of disks that may be attached to VMs. +type PipelineResources_Disk_Type int32 + +const ( + // Default disk type. Use one of the other options below. + PipelineResources_Disk_TYPE_UNSPECIFIED PipelineResources_Disk_Type = 0 + // Specifies a Google Compute Engine persistent hard disk. See + // https://cloud.google.com/compute/docs/disks/#pdspecs for details. + PipelineResources_Disk_PERSISTENT_HDD PipelineResources_Disk_Type = 1 + // Specifies a Google Compute Engine persistent solid-state disk. See + // https://cloud.google.com/compute/docs/disks/#pdspecs for details. + PipelineResources_Disk_PERSISTENT_SSD PipelineResources_Disk_Type = 2 + // Specifies a Google Compute Engine local SSD. + // See https://cloud.google.com/compute/docs/disks/local-ssd for details. + PipelineResources_Disk_LOCAL_SSD PipelineResources_Disk_Type = 3 +) + +var PipelineResources_Disk_Type_name = map[int32]string{ + 0: "TYPE_UNSPECIFIED", + 1: "PERSISTENT_HDD", + 2: "PERSISTENT_SSD", + 3: "LOCAL_SSD", +} +var PipelineResources_Disk_Type_value = map[string]int32{ + "TYPE_UNSPECIFIED": 0, + "PERSISTENT_HDD": 1, + "PERSISTENT_SSD": 2, + "LOCAL_SSD": 3, +} + +func (x PipelineResources_Disk_Type) String() string { + return proto.EnumName(PipelineResources_Disk_Type_name, int32(x)) +} +func (PipelineResources_Disk_Type) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_pipelines_64ae929b596fc0b5, []int{16, 0, 0} +} + +// Describes a Compute Engine resource that is being managed by a running +// [pipeline][google.genomics.v1alpha2.Pipeline]. +type ComputeEngine struct { + // The instance on which the operation is running. + InstanceName string `protobuf:"bytes,1,opt,name=instance_name,json=instanceName,proto3" json:"instance_name,omitempty"` + // The availability zone in which the instance resides. + Zone string `protobuf:"bytes,2,opt,name=zone,proto3" json:"zone,omitempty"` + // The machine type of the instance. + MachineType string `protobuf:"bytes,3,opt,name=machine_type,json=machineType,proto3" json:"machine_type,omitempty"` + // The names of the disks that were created for this pipeline. + DiskNames []string `protobuf:"bytes,4,rep,name=disk_names,json=diskNames,proto3" json:"disk_names,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ComputeEngine) Reset() { *m = ComputeEngine{} } +func (m *ComputeEngine) String() string { return proto.CompactTextString(m) } +func (*ComputeEngine) ProtoMessage() {} +func (*ComputeEngine) Descriptor() ([]byte, []int) { + return fileDescriptor_pipelines_64ae929b596fc0b5, []int{0} +} +func (m *ComputeEngine) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ComputeEngine.Unmarshal(m, b) +} +func (m *ComputeEngine) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ComputeEngine.Marshal(b, m, deterministic) +} +func (dst *ComputeEngine) XXX_Merge(src proto.Message) { + xxx_messageInfo_ComputeEngine.Merge(dst, src) +} +func (m *ComputeEngine) XXX_Size() int { + return xxx_messageInfo_ComputeEngine.Size(m) +} +func (m *ComputeEngine) XXX_DiscardUnknown() { + xxx_messageInfo_ComputeEngine.DiscardUnknown(m) +} + +var xxx_messageInfo_ComputeEngine proto.InternalMessageInfo + +func (m *ComputeEngine) GetInstanceName() string { + if m != nil { + return m.InstanceName + } + return "" +} + +func (m *ComputeEngine) GetZone() string { + if m != nil { + return m.Zone + } + return "" +} + +func (m *ComputeEngine) GetMachineType() string { + if m != nil { + return m.MachineType + } + return "" +} + +func (m *ComputeEngine) GetDiskNames() []string { + if m != nil { + return m.DiskNames + } + return nil +} + +// Runtime metadata that will be populated in the +// [runtimeMetadata][google.genomics.v1.OperationMetadata.runtime_metadata] +// field of the Operation associated with a RunPipeline execution. +type RuntimeMetadata struct { + // Execution information specific to Google Compute Engine. + ComputeEngine *ComputeEngine `protobuf:"bytes,1,opt,name=compute_engine,json=computeEngine,proto3" json:"compute_engine,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *RuntimeMetadata) Reset() { *m = RuntimeMetadata{} } +func (m *RuntimeMetadata) String() string { return proto.CompactTextString(m) } +func (*RuntimeMetadata) ProtoMessage() {} +func (*RuntimeMetadata) Descriptor() ([]byte, []int) { + return fileDescriptor_pipelines_64ae929b596fc0b5, []int{1} +} +func (m *RuntimeMetadata) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_RuntimeMetadata.Unmarshal(m, b) +} +func (m *RuntimeMetadata) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_RuntimeMetadata.Marshal(b, m, deterministic) +} +func (dst *RuntimeMetadata) XXX_Merge(src proto.Message) { + xxx_messageInfo_RuntimeMetadata.Merge(dst, src) +} +func (m *RuntimeMetadata) XXX_Size() int { + return xxx_messageInfo_RuntimeMetadata.Size(m) +} +func (m *RuntimeMetadata) XXX_DiscardUnknown() { + xxx_messageInfo_RuntimeMetadata.DiscardUnknown(m) +} + +var xxx_messageInfo_RuntimeMetadata proto.InternalMessageInfo + +func (m *RuntimeMetadata) GetComputeEngine() *ComputeEngine { + if m != nil { + return m.ComputeEngine + } + return nil +} + +// The pipeline object. Represents a transformation from a set of input +// parameters to a set of output parameters. The transformation is defined +// as a docker image and command to run within that image. Each pipeline +// is run on a Google Compute Engine VM. A pipeline can be created with the +// `create` method and then later run with the `run` method, or a pipeline can +// be defined and run all at once with the `run` method. +type Pipeline struct { + // Required. The project in which to create the pipeline. The caller must have + // WRITE access. + ProjectId string `protobuf:"bytes,1,opt,name=project_id,json=projectId,proto3" json:"project_id,omitempty"` + // Required. A user specified pipeline name that does not have to be unique. + // This name can be used for filtering Pipelines in ListPipelines. + Name string `protobuf:"bytes,2,opt,name=name,proto3" json:"name,omitempty"` + // User-specified description. + Description string `protobuf:"bytes,3,opt,name=description,proto3" json:"description,omitempty"` + // Input parameters of the pipeline. + InputParameters []*PipelineParameter `protobuf:"bytes,8,rep,name=input_parameters,json=inputParameters,proto3" json:"input_parameters,omitempty"` + // Output parameters of the pipeline. + OutputParameters []*PipelineParameter `protobuf:"bytes,9,rep,name=output_parameters,json=outputParameters,proto3" json:"output_parameters,omitempty"` + // Required. The executor indicates in which environment the pipeline runs. + // + // Types that are valid to be assigned to Executor: + // *Pipeline_Docker + Executor isPipeline_Executor `protobuf_oneof:"executor"` + // Required. Specifies resource requirements for the pipeline run. + // Required fields: + // + // * + // [minimumCpuCores][google.genomics.v1alpha2.PipelineResources.minimum_cpu_cores] + // + // * + // [minimumRamGb][google.genomics.v1alpha2.PipelineResources.minimum_ram_gb] + Resources *PipelineResources `protobuf:"bytes,6,opt,name=resources,proto3" json:"resources,omitempty"` + // Unique pipeline id that is generated by the service when CreatePipeline + // is called. Cannot be specified in the Pipeline used in the + // CreatePipelineRequest, and will be populated in the response to + // CreatePipeline and all subsequent Get and List calls. Indicates that the + // service has registered this pipeline. + PipelineId string `protobuf:"bytes,7,opt,name=pipeline_id,json=pipelineId,proto3" json:"pipeline_id,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Pipeline) Reset() { *m = Pipeline{} } +func (m *Pipeline) String() string { return proto.CompactTextString(m) } +func (*Pipeline) ProtoMessage() {} +func (*Pipeline) Descriptor() ([]byte, []int) { + return fileDescriptor_pipelines_64ae929b596fc0b5, []int{2} +} +func (m *Pipeline) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Pipeline.Unmarshal(m, b) +} +func (m *Pipeline) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Pipeline.Marshal(b, m, deterministic) +} +func (dst *Pipeline) XXX_Merge(src proto.Message) { + xxx_messageInfo_Pipeline.Merge(dst, src) +} +func (m *Pipeline) XXX_Size() int { + return xxx_messageInfo_Pipeline.Size(m) +} +func (m *Pipeline) XXX_DiscardUnknown() { + xxx_messageInfo_Pipeline.DiscardUnknown(m) +} + +var xxx_messageInfo_Pipeline proto.InternalMessageInfo + +func (m *Pipeline) GetProjectId() string { + if m != nil { + return m.ProjectId + } + return "" +} + +func (m *Pipeline) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *Pipeline) GetDescription() string { + if m != nil { + return m.Description + } + return "" +} + +func (m *Pipeline) GetInputParameters() []*PipelineParameter { + if m != nil { + return m.InputParameters + } + return nil +} + +func (m *Pipeline) GetOutputParameters() []*PipelineParameter { + if m != nil { + return m.OutputParameters + } + return nil +} + +type isPipeline_Executor interface { + isPipeline_Executor() +} + +type Pipeline_Docker struct { + Docker *DockerExecutor `protobuf:"bytes,5,opt,name=docker,proto3,oneof"` +} + +func (*Pipeline_Docker) isPipeline_Executor() {} + +func (m *Pipeline) GetExecutor() isPipeline_Executor { + if m != nil { + return m.Executor + } + return nil +} + +func (m *Pipeline) GetDocker() *DockerExecutor { + if x, ok := m.GetExecutor().(*Pipeline_Docker); ok { + return x.Docker + } + return nil +} + +func (m *Pipeline) GetResources() *PipelineResources { + if m != nil { + return m.Resources + } + return nil +} + +func (m *Pipeline) GetPipelineId() string { + if m != nil { + return m.PipelineId + } + return "" +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*Pipeline) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _Pipeline_OneofMarshaler, _Pipeline_OneofUnmarshaler, _Pipeline_OneofSizer, []interface{}{ + (*Pipeline_Docker)(nil), + } +} + +func _Pipeline_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*Pipeline) + // executor + switch x := m.Executor.(type) { + case *Pipeline_Docker: + b.EncodeVarint(5<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.Docker); err != nil { + return err + } + case nil: + default: + return fmt.Errorf("Pipeline.Executor has unexpected type %T", x) + } + return nil +} + +func _Pipeline_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*Pipeline) + switch tag { + case 5: // executor.docker + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(DockerExecutor) + err := b.DecodeMessage(msg) + m.Executor = &Pipeline_Docker{msg} + return true, err + default: + return false, nil + } +} + +func _Pipeline_OneofSizer(msg proto.Message) (n int) { + m := msg.(*Pipeline) + // executor + switch x := m.Executor.(type) { + case *Pipeline_Docker: + s := proto.Size(x.Docker) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +// The request to create a pipeline. The pipeline field here should not have +// `pipelineId` populated, as that will be populated by the server. +type CreatePipelineRequest struct { + // The pipeline to create. Should not have `pipelineId` populated. + Pipeline *Pipeline `protobuf:"bytes,1,opt,name=pipeline,proto3" json:"pipeline,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *CreatePipelineRequest) Reset() { *m = CreatePipelineRequest{} } +func (m *CreatePipelineRequest) String() string { return proto.CompactTextString(m) } +func (*CreatePipelineRequest) ProtoMessage() {} +func (*CreatePipelineRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_pipelines_64ae929b596fc0b5, []int{3} +} +func (m *CreatePipelineRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_CreatePipelineRequest.Unmarshal(m, b) +} +func (m *CreatePipelineRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_CreatePipelineRequest.Marshal(b, m, deterministic) +} +func (dst *CreatePipelineRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_CreatePipelineRequest.Merge(dst, src) +} +func (m *CreatePipelineRequest) XXX_Size() int { + return xxx_messageInfo_CreatePipelineRequest.Size(m) +} +func (m *CreatePipelineRequest) XXX_DiscardUnknown() { + xxx_messageInfo_CreatePipelineRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_CreatePipelineRequest proto.InternalMessageInfo + +func (m *CreatePipelineRequest) GetPipeline() *Pipeline { + if m != nil { + return m.Pipeline + } + return nil +} + +// The pipeline run arguments. +type RunPipelineArgs struct { + // Required. The project in which to run the pipeline. The caller must have + // WRITER access to all Google Cloud services and resources (e.g. Google + // Compute Engine) will be used. + ProjectId string `protobuf:"bytes,1,opt,name=project_id,json=projectId,proto3" json:"project_id,omitempty"` + // Pipeline input arguments; keys are defined in the pipeline documentation. + // All input parameters that do not have default values must be specified. + // If parameters with defaults are specified here, the defaults will be + // overridden. + Inputs map[string]string `protobuf:"bytes,2,rep,name=inputs,proto3" json:"inputs,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` + // Pipeline output arguments; keys are defined in the pipeline + // documentation. All output parameters of without default values + // must be specified. If parameters with defaults are specified + // here, the defaults will be overridden. + Outputs map[string]string `protobuf:"bytes,3,rep,name=outputs,proto3" json:"outputs,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` + // The Google Cloud Service Account that will be used to access data and + // services. By default, the compute service account associated with + // `projectId` is used. + ServiceAccount *ServiceAccount `protobuf:"bytes,4,opt,name=service_account,json=serviceAccount,proto3" json:"service_account,omitempty"` + // This field is deprecated. Use `labels` instead. Client-specified pipeline + // operation identifier. + ClientId string `protobuf:"bytes,5,opt,name=client_id,json=clientId,proto3" json:"client_id,omitempty"` + // Specifies resource requirements/overrides for the pipeline run. + Resources *PipelineResources `protobuf:"bytes,6,opt,name=resources,proto3" json:"resources,omitempty"` + // Required. Logging options. Used by the service to communicate results + // to the user. + Logging *LoggingOptions `protobuf:"bytes,7,opt,name=logging,proto3" json:"logging,omitempty"` + // How long to keep the VM up after a failure (for example docker command + // failed, copying input or output files failed, etc). While the VM is up, one + // can ssh into the VM to debug. Default is 0; maximum allowed value is 1 day. + KeepVmAliveOnFailureDuration *duration.Duration `protobuf:"bytes,8,opt,name=keep_vm_alive_on_failure_duration,json=keepVmAliveOnFailureDuration,proto3" json:"keep_vm_alive_on_failure_duration,omitempty"` + // Labels to apply to this pipeline run. Labels will also be applied to + // compute resources (VM, disks) created by this pipeline run. When listing + // operations, operations can [filtered by labels] + // [google.longrunning.ListOperationsRequest.filter]. + // Label keys may not be empty; label values may be empty. Non-empty labels + // must be 1-63 characters long, and comply with [RFC1035] + // (https://www.ietf.org/rfc/rfc1035.txt). + // Specifically, the name must be 1-63 characters long and match the regular + // expression `[a-z]([-a-z0-9]*[a-z0-9])?` which means the first + // character must be a lowercase letter, and all following characters must be + // a dash, lowercase letter, or digit, except the last character, which cannot + // be a dash. + Labels map[string]string `protobuf:"bytes,9,rep,name=labels,proto3" json:"labels,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *RunPipelineArgs) Reset() { *m = RunPipelineArgs{} } +func (m *RunPipelineArgs) String() string { return proto.CompactTextString(m) } +func (*RunPipelineArgs) ProtoMessage() {} +func (*RunPipelineArgs) Descriptor() ([]byte, []int) { + return fileDescriptor_pipelines_64ae929b596fc0b5, []int{4} +} +func (m *RunPipelineArgs) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_RunPipelineArgs.Unmarshal(m, b) +} +func (m *RunPipelineArgs) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_RunPipelineArgs.Marshal(b, m, deterministic) +} +func (dst *RunPipelineArgs) XXX_Merge(src proto.Message) { + xxx_messageInfo_RunPipelineArgs.Merge(dst, src) +} +func (m *RunPipelineArgs) XXX_Size() int { + return xxx_messageInfo_RunPipelineArgs.Size(m) +} +func (m *RunPipelineArgs) XXX_DiscardUnknown() { + xxx_messageInfo_RunPipelineArgs.DiscardUnknown(m) +} + +var xxx_messageInfo_RunPipelineArgs proto.InternalMessageInfo + +func (m *RunPipelineArgs) GetProjectId() string { + if m != nil { + return m.ProjectId + } + return "" +} + +func (m *RunPipelineArgs) GetInputs() map[string]string { + if m != nil { + return m.Inputs + } + return nil +} + +func (m *RunPipelineArgs) GetOutputs() map[string]string { + if m != nil { + return m.Outputs + } + return nil +} + +func (m *RunPipelineArgs) GetServiceAccount() *ServiceAccount { + if m != nil { + return m.ServiceAccount + } + return nil +} + +func (m *RunPipelineArgs) GetClientId() string { + if m != nil { + return m.ClientId + } + return "" +} + +func (m *RunPipelineArgs) GetResources() *PipelineResources { + if m != nil { + return m.Resources + } + return nil +} + +func (m *RunPipelineArgs) GetLogging() *LoggingOptions { + if m != nil { + return m.Logging + } + return nil +} + +func (m *RunPipelineArgs) GetKeepVmAliveOnFailureDuration() *duration.Duration { + if m != nil { + return m.KeepVmAliveOnFailureDuration + } + return nil +} + +func (m *RunPipelineArgs) GetLabels() map[string]string { + if m != nil { + return m.Labels + } + return nil +} + +// The request to run a pipeline. If `pipelineId` is specified, it +// refers to a saved pipeline created with CreatePipeline and set as +// the `pipelineId` of the returned Pipeline object. If +// `ephemeralPipeline` is specified, that pipeline is run once +// with the given args and not saved. It is an error to specify both +// `pipelineId` and `ephemeralPipeline`. `pipelineArgs` +// must be specified. +type RunPipelineRequest struct { + // Types that are valid to be assigned to Pipeline: + // *RunPipelineRequest_PipelineId + // *RunPipelineRequest_EphemeralPipeline + Pipeline isRunPipelineRequest_Pipeline `protobuf_oneof:"pipeline"` + // The arguments to use when running this pipeline. + PipelineArgs *RunPipelineArgs `protobuf:"bytes,3,opt,name=pipeline_args,json=pipelineArgs,proto3" json:"pipeline_args,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *RunPipelineRequest) Reset() { *m = RunPipelineRequest{} } +func (m *RunPipelineRequest) String() string { return proto.CompactTextString(m) } +func (*RunPipelineRequest) ProtoMessage() {} +func (*RunPipelineRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_pipelines_64ae929b596fc0b5, []int{5} +} +func (m *RunPipelineRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_RunPipelineRequest.Unmarshal(m, b) +} +func (m *RunPipelineRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_RunPipelineRequest.Marshal(b, m, deterministic) +} +func (dst *RunPipelineRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_RunPipelineRequest.Merge(dst, src) +} +func (m *RunPipelineRequest) XXX_Size() int { + return xxx_messageInfo_RunPipelineRequest.Size(m) +} +func (m *RunPipelineRequest) XXX_DiscardUnknown() { + xxx_messageInfo_RunPipelineRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_RunPipelineRequest proto.InternalMessageInfo + +type isRunPipelineRequest_Pipeline interface { + isRunPipelineRequest_Pipeline() +} + +type RunPipelineRequest_PipelineId struct { + PipelineId string `protobuf:"bytes,1,opt,name=pipeline_id,json=pipelineId,proto3,oneof"` +} + +type RunPipelineRequest_EphemeralPipeline struct { + EphemeralPipeline *Pipeline `protobuf:"bytes,2,opt,name=ephemeral_pipeline,json=ephemeralPipeline,proto3,oneof"` +} + +func (*RunPipelineRequest_PipelineId) isRunPipelineRequest_Pipeline() {} + +func (*RunPipelineRequest_EphemeralPipeline) isRunPipelineRequest_Pipeline() {} + +func (m *RunPipelineRequest) GetPipeline() isRunPipelineRequest_Pipeline { + if m != nil { + return m.Pipeline + } + return nil +} + +func (m *RunPipelineRequest) GetPipelineId() string { + if x, ok := m.GetPipeline().(*RunPipelineRequest_PipelineId); ok { + return x.PipelineId + } + return "" +} + +func (m *RunPipelineRequest) GetEphemeralPipeline() *Pipeline { + if x, ok := m.GetPipeline().(*RunPipelineRequest_EphemeralPipeline); ok { + return x.EphemeralPipeline + } + return nil +} + +func (m *RunPipelineRequest) GetPipelineArgs() *RunPipelineArgs { + if m != nil { + return m.PipelineArgs + } + return nil +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*RunPipelineRequest) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _RunPipelineRequest_OneofMarshaler, _RunPipelineRequest_OneofUnmarshaler, _RunPipelineRequest_OneofSizer, []interface{}{ + (*RunPipelineRequest_PipelineId)(nil), + (*RunPipelineRequest_EphemeralPipeline)(nil), + } +} + +func _RunPipelineRequest_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*RunPipelineRequest) + // pipeline + switch x := m.Pipeline.(type) { + case *RunPipelineRequest_PipelineId: + b.EncodeVarint(1<<3 | proto.WireBytes) + b.EncodeStringBytes(x.PipelineId) + case *RunPipelineRequest_EphemeralPipeline: + b.EncodeVarint(2<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.EphemeralPipeline); err != nil { + return err + } + case nil: + default: + return fmt.Errorf("RunPipelineRequest.Pipeline has unexpected type %T", x) + } + return nil +} + +func _RunPipelineRequest_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*RunPipelineRequest) + switch tag { + case 1: // pipeline.pipeline_id + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeStringBytes() + m.Pipeline = &RunPipelineRequest_PipelineId{x} + return true, err + case 2: // pipeline.ephemeral_pipeline + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(Pipeline) + err := b.DecodeMessage(msg) + m.Pipeline = &RunPipelineRequest_EphemeralPipeline{msg} + return true, err + default: + return false, nil + } +} + +func _RunPipelineRequest_OneofSizer(msg proto.Message) (n int) { + m := msg.(*RunPipelineRequest) + // pipeline + switch x := m.Pipeline.(type) { + case *RunPipelineRequest_PipelineId: + n += 1 // tag and wire + n += proto.SizeVarint(uint64(len(x.PipelineId))) + n += len(x.PipelineId) + case *RunPipelineRequest_EphemeralPipeline: + s := proto.Size(x.EphemeralPipeline) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +// A request to get a saved pipeline by id. +type GetPipelineRequest struct { + // Caller must have READ access to the project in which this pipeline + // is defined. + PipelineId string `protobuf:"bytes,1,opt,name=pipeline_id,json=pipelineId,proto3" json:"pipeline_id,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GetPipelineRequest) Reset() { *m = GetPipelineRequest{} } +func (m *GetPipelineRequest) String() string { return proto.CompactTextString(m) } +func (*GetPipelineRequest) ProtoMessage() {} +func (*GetPipelineRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_pipelines_64ae929b596fc0b5, []int{6} +} +func (m *GetPipelineRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GetPipelineRequest.Unmarshal(m, b) +} +func (m *GetPipelineRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GetPipelineRequest.Marshal(b, m, deterministic) +} +func (dst *GetPipelineRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_GetPipelineRequest.Merge(dst, src) +} +func (m *GetPipelineRequest) XXX_Size() int { + return xxx_messageInfo_GetPipelineRequest.Size(m) +} +func (m *GetPipelineRequest) XXX_DiscardUnknown() { + xxx_messageInfo_GetPipelineRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_GetPipelineRequest proto.InternalMessageInfo + +func (m *GetPipelineRequest) GetPipelineId() string { + if m != nil { + return m.PipelineId + } + return "" +} + +// A request to list pipelines in a given project. Pipelines can be +// filtered by name using `namePrefix`: all pipelines with names that +// begin with `namePrefix` will be returned. Uses standard pagination: +// `pageSize` indicates how many pipelines to return, and +// `pageToken` comes from a previous ListPipelinesResponse to +// indicate offset. +type ListPipelinesRequest struct { + // Required. The name of the project to search for pipelines. Caller + // must have READ access to this project. + ProjectId string `protobuf:"bytes,1,opt,name=project_id,json=projectId,proto3" json:"project_id,omitempty"` + // Pipelines with names that match this prefix should be + // returned. If unspecified, all pipelines in the project, up to + // `pageSize`, will be returned. + NamePrefix string `protobuf:"bytes,2,opt,name=name_prefix,json=namePrefix,proto3" json:"name_prefix,omitempty"` + // Number of pipelines to return at once. Defaults to 256, and max + // is 2048. + PageSize int32 `protobuf:"varint,3,opt,name=page_size,json=pageSize,proto3" json:"page_size,omitempty"` + // Token to use to indicate where to start getting results. + // If unspecified, returns the first page of results. + PageToken string `protobuf:"bytes,4,opt,name=page_token,json=pageToken,proto3" json:"page_token,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ListPipelinesRequest) Reset() { *m = ListPipelinesRequest{} } +func (m *ListPipelinesRequest) String() string { return proto.CompactTextString(m) } +func (*ListPipelinesRequest) ProtoMessage() {} +func (*ListPipelinesRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_pipelines_64ae929b596fc0b5, []int{7} +} +func (m *ListPipelinesRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ListPipelinesRequest.Unmarshal(m, b) +} +func (m *ListPipelinesRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ListPipelinesRequest.Marshal(b, m, deterministic) +} +func (dst *ListPipelinesRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_ListPipelinesRequest.Merge(dst, src) +} +func (m *ListPipelinesRequest) XXX_Size() int { + return xxx_messageInfo_ListPipelinesRequest.Size(m) +} +func (m *ListPipelinesRequest) XXX_DiscardUnknown() { + xxx_messageInfo_ListPipelinesRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_ListPipelinesRequest proto.InternalMessageInfo + +func (m *ListPipelinesRequest) GetProjectId() string { + if m != nil { + return m.ProjectId + } + return "" +} + +func (m *ListPipelinesRequest) GetNamePrefix() string { + if m != nil { + return m.NamePrefix + } + return "" +} + +func (m *ListPipelinesRequest) GetPageSize() int32 { + if m != nil { + return m.PageSize + } + return 0 +} + +func (m *ListPipelinesRequest) GetPageToken() string { + if m != nil { + return m.PageToken + } + return "" +} + +// The response of ListPipelines. Contains at most `pageSize` +// pipelines. If it contains `pageSize` pipelines, and more pipelines +// exist, then `nextPageToken` will be populated and should be +// used as the `pageToken` argument to a subsequent ListPipelines +// request. +type ListPipelinesResponse struct { + // The matched pipelines. + Pipelines []*Pipeline `protobuf:"bytes,1,rep,name=pipelines,proto3" json:"pipelines,omitempty"` + // The token to use to get the next page of results. + NextPageToken string `protobuf:"bytes,2,opt,name=next_page_token,json=nextPageToken,proto3" json:"next_page_token,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ListPipelinesResponse) Reset() { *m = ListPipelinesResponse{} } +func (m *ListPipelinesResponse) String() string { return proto.CompactTextString(m) } +func (*ListPipelinesResponse) ProtoMessage() {} +func (*ListPipelinesResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_pipelines_64ae929b596fc0b5, []int{8} +} +func (m *ListPipelinesResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ListPipelinesResponse.Unmarshal(m, b) +} +func (m *ListPipelinesResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ListPipelinesResponse.Marshal(b, m, deterministic) +} +func (dst *ListPipelinesResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_ListPipelinesResponse.Merge(dst, src) +} +func (m *ListPipelinesResponse) XXX_Size() int { + return xxx_messageInfo_ListPipelinesResponse.Size(m) +} +func (m *ListPipelinesResponse) XXX_DiscardUnknown() { + xxx_messageInfo_ListPipelinesResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_ListPipelinesResponse proto.InternalMessageInfo + +func (m *ListPipelinesResponse) GetPipelines() []*Pipeline { + if m != nil { + return m.Pipelines + } + return nil +} + +func (m *ListPipelinesResponse) GetNextPageToken() string { + if m != nil { + return m.NextPageToken + } + return "" +} + +// The request to delete a saved pipeline by ID. +type DeletePipelineRequest struct { + // Caller must have WRITE access to the project in which this pipeline + // is defined. + PipelineId string `protobuf:"bytes,1,opt,name=pipeline_id,json=pipelineId,proto3" json:"pipeline_id,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *DeletePipelineRequest) Reset() { *m = DeletePipelineRequest{} } +func (m *DeletePipelineRequest) String() string { return proto.CompactTextString(m) } +func (*DeletePipelineRequest) ProtoMessage() {} +func (*DeletePipelineRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_pipelines_64ae929b596fc0b5, []int{9} +} +func (m *DeletePipelineRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_DeletePipelineRequest.Unmarshal(m, b) +} +func (m *DeletePipelineRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_DeletePipelineRequest.Marshal(b, m, deterministic) +} +func (dst *DeletePipelineRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_DeletePipelineRequest.Merge(dst, src) +} +func (m *DeletePipelineRequest) XXX_Size() int { + return xxx_messageInfo_DeletePipelineRequest.Size(m) +} +func (m *DeletePipelineRequest) XXX_DiscardUnknown() { + xxx_messageInfo_DeletePipelineRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_DeletePipelineRequest proto.InternalMessageInfo + +func (m *DeletePipelineRequest) GetPipelineId() string { + if m != nil { + return m.PipelineId + } + return "" +} + +// Request to get controller configuation. Should only be used +// by VMs created by the Pipelines Service and not by end users. +type GetControllerConfigRequest struct { + // The operation to retrieve controller configuration for. + OperationId string `protobuf:"bytes,1,opt,name=operation_id,json=operationId,proto3" json:"operation_id,omitempty"` + ValidationToken uint64 `protobuf:"varint,2,opt,name=validation_token,json=validationToken,proto3" json:"validation_token,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GetControllerConfigRequest) Reset() { *m = GetControllerConfigRequest{} } +func (m *GetControllerConfigRequest) String() string { return proto.CompactTextString(m) } +func (*GetControllerConfigRequest) ProtoMessage() {} +func (*GetControllerConfigRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_pipelines_64ae929b596fc0b5, []int{10} +} +func (m *GetControllerConfigRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GetControllerConfigRequest.Unmarshal(m, b) +} +func (m *GetControllerConfigRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GetControllerConfigRequest.Marshal(b, m, deterministic) +} +func (dst *GetControllerConfigRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_GetControllerConfigRequest.Merge(dst, src) +} +func (m *GetControllerConfigRequest) XXX_Size() int { + return xxx_messageInfo_GetControllerConfigRequest.Size(m) +} +func (m *GetControllerConfigRequest) XXX_DiscardUnknown() { + xxx_messageInfo_GetControllerConfigRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_GetControllerConfigRequest proto.InternalMessageInfo + +func (m *GetControllerConfigRequest) GetOperationId() string { + if m != nil { + return m.OperationId + } + return "" +} + +func (m *GetControllerConfigRequest) GetValidationToken() uint64 { + if m != nil { + return m.ValidationToken + } + return 0 +} + +// Stores the information that the controller will fetch from the +// server in order to run. Should only be used by VMs created by the +// Pipelines Service and not by end users. +type ControllerConfig struct { + Image string `protobuf:"bytes,1,opt,name=image,proto3" json:"image,omitempty"` + Cmd string `protobuf:"bytes,2,opt,name=cmd,proto3" json:"cmd,omitempty"` + GcsLogPath string `protobuf:"bytes,3,opt,name=gcs_log_path,json=gcsLogPath,proto3" json:"gcs_log_path,omitempty"` + MachineType string `protobuf:"bytes,4,opt,name=machine_type,json=machineType,proto3" json:"machine_type,omitempty"` + Vars map[string]string `protobuf:"bytes,5,rep,name=vars,proto3" json:"vars,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` + Disks map[string]string `protobuf:"bytes,6,rep,name=disks,proto3" json:"disks,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` + GcsSources map[string]*ControllerConfig_RepeatedString `protobuf:"bytes,7,rep,name=gcs_sources,json=gcsSources,proto3" json:"gcs_sources,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` + GcsSinks map[string]*ControllerConfig_RepeatedString `protobuf:"bytes,8,rep,name=gcs_sinks,json=gcsSinks,proto3" json:"gcs_sinks,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ControllerConfig) Reset() { *m = ControllerConfig{} } +func (m *ControllerConfig) String() string { return proto.CompactTextString(m) } +func (*ControllerConfig) ProtoMessage() {} +func (*ControllerConfig) Descriptor() ([]byte, []int) { + return fileDescriptor_pipelines_64ae929b596fc0b5, []int{11} +} +func (m *ControllerConfig) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ControllerConfig.Unmarshal(m, b) +} +func (m *ControllerConfig) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ControllerConfig.Marshal(b, m, deterministic) +} +func (dst *ControllerConfig) XXX_Merge(src proto.Message) { + xxx_messageInfo_ControllerConfig.Merge(dst, src) +} +func (m *ControllerConfig) XXX_Size() int { + return xxx_messageInfo_ControllerConfig.Size(m) +} +func (m *ControllerConfig) XXX_DiscardUnknown() { + xxx_messageInfo_ControllerConfig.DiscardUnknown(m) +} + +var xxx_messageInfo_ControllerConfig proto.InternalMessageInfo + +func (m *ControllerConfig) GetImage() string { + if m != nil { + return m.Image + } + return "" +} + +func (m *ControllerConfig) GetCmd() string { + if m != nil { + return m.Cmd + } + return "" +} + +func (m *ControllerConfig) GetGcsLogPath() string { + if m != nil { + return m.GcsLogPath + } + return "" +} + +func (m *ControllerConfig) GetMachineType() string { + if m != nil { + return m.MachineType + } + return "" +} + +func (m *ControllerConfig) GetVars() map[string]string { + if m != nil { + return m.Vars + } + return nil +} + +func (m *ControllerConfig) GetDisks() map[string]string { + if m != nil { + return m.Disks + } + return nil +} + +func (m *ControllerConfig) GetGcsSources() map[string]*ControllerConfig_RepeatedString { + if m != nil { + return m.GcsSources + } + return nil +} + +func (m *ControllerConfig) GetGcsSinks() map[string]*ControllerConfig_RepeatedString { + if m != nil { + return m.GcsSinks + } + return nil +} + +type ControllerConfig_RepeatedString struct { + Values []string `protobuf:"bytes,1,rep,name=values,proto3" json:"values,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ControllerConfig_RepeatedString) Reset() { *m = ControllerConfig_RepeatedString{} } +func (m *ControllerConfig_RepeatedString) String() string { return proto.CompactTextString(m) } +func (*ControllerConfig_RepeatedString) ProtoMessage() {} +func (*ControllerConfig_RepeatedString) Descriptor() ([]byte, []int) { + return fileDescriptor_pipelines_64ae929b596fc0b5, []int{11, 0} +} +func (m *ControllerConfig_RepeatedString) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ControllerConfig_RepeatedString.Unmarshal(m, b) +} +func (m *ControllerConfig_RepeatedString) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ControllerConfig_RepeatedString.Marshal(b, m, deterministic) +} +func (dst *ControllerConfig_RepeatedString) XXX_Merge(src proto.Message) { + xxx_messageInfo_ControllerConfig_RepeatedString.Merge(dst, src) +} +func (m *ControllerConfig_RepeatedString) XXX_Size() int { + return xxx_messageInfo_ControllerConfig_RepeatedString.Size(m) +} +func (m *ControllerConfig_RepeatedString) XXX_DiscardUnknown() { + xxx_messageInfo_ControllerConfig_RepeatedString.DiscardUnknown(m) +} + +var xxx_messageInfo_ControllerConfig_RepeatedString proto.InternalMessageInfo + +func (m *ControllerConfig_RepeatedString) GetValues() []string { + if m != nil { + return m.Values + } + return nil +} + +// Stores the list of events and times they occured for major events in job +// execution. +type TimestampEvent struct { + // String indicating the type of event + Description string `protobuf:"bytes,1,opt,name=description,proto3" json:"description,omitempty"` + // The time this event occured. + Timestamp *timestamp.Timestamp `protobuf:"bytes,2,opt,name=timestamp,proto3" json:"timestamp,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *TimestampEvent) Reset() { *m = TimestampEvent{} } +func (m *TimestampEvent) String() string { return proto.CompactTextString(m) } +func (*TimestampEvent) ProtoMessage() {} +func (*TimestampEvent) Descriptor() ([]byte, []int) { + return fileDescriptor_pipelines_64ae929b596fc0b5, []int{12} +} +func (m *TimestampEvent) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_TimestampEvent.Unmarshal(m, b) +} +func (m *TimestampEvent) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_TimestampEvent.Marshal(b, m, deterministic) +} +func (dst *TimestampEvent) XXX_Merge(src proto.Message) { + xxx_messageInfo_TimestampEvent.Merge(dst, src) +} +func (m *TimestampEvent) XXX_Size() int { + return xxx_messageInfo_TimestampEvent.Size(m) +} +func (m *TimestampEvent) XXX_DiscardUnknown() { + xxx_messageInfo_TimestampEvent.DiscardUnknown(m) +} + +var xxx_messageInfo_TimestampEvent proto.InternalMessageInfo + +func (m *TimestampEvent) GetDescription() string { + if m != nil { + return m.Description + } + return "" +} + +func (m *TimestampEvent) GetTimestamp() *timestamp.Timestamp { + if m != nil { + return m.Timestamp + } + return nil +} + +// Request to set operation status. Should only be used by VMs +// created by the Pipelines Service and not by end users. +type SetOperationStatusRequest struct { + OperationId string `protobuf:"bytes,1,opt,name=operation_id,json=operationId,proto3" json:"operation_id,omitempty"` + TimestampEvents []*TimestampEvent `protobuf:"bytes,2,rep,name=timestamp_events,json=timestampEvents,proto3" json:"timestamp_events,omitempty"` + ErrorCode code.Code `protobuf:"varint,3,opt,name=error_code,json=errorCode,proto3,enum=google.rpc.Code" json:"error_code,omitempty"` + ErrorMessage string `protobuf:"bytes,4,opt,name=error_message,json=errorMessage,proto3" json:"error_message,omitempty"` + ValidationToken uint64 `protobuf:"varint,5,opt,name=validation_token,json=validationToken,proto3" json:"validation_token,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *SetOperationStatusRequest) Reset() { *m = SetOperationStatusRequest{} } +func (m *SetOperationStatusRequest) String() string { return proto.CompactTextString(m) } +func (*SetOperationStatusRequest) ProtoMessage() {} +func (*SetOperationStatusRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_pipelines_64ae929b596fc0b5, []int{13} +} +func (m *SetOperationStatusRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_SetOperationStatusRequest.Unmarshal(m, b) +} +func (m *SetOperationStatusRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_SetOperationStatusRequest.Marshal(b, m, deterministic) +} +func (dst *SetOperationStatusRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_SetOperationStatusRequest.Merge(dst, src) +} +func (m *SetOperationStatusRequest) XXX_Size() int { + return xxx_messageInfo_SetOperationStatusRequest.Size(m) +} +func (m *SetOperationStatusRequest) XXX_DiscardUnknown() { + xxx_messageInfo_SetOperationStatusRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_SetOperationStatusRequest proto.InternalMessageInfo + +func (m *SetOperationStatusRequest) GetOperationId() string { + if m != nil { + return m.OperationId + } + return "" +} + +func (m *SetOperationStatusRequest) GetTimestampEvents() []*TimestampEvent { + if m != nil { + return m.TimestampEvents + } + return nil +} + +func (m *SetOperationStatusRequest) GetErrorCode() code.Code { + if m != nil { + return m.ErrorCode + } + return code.Code_OK +} + +func (m *SetOperationStatusRequest) GetErrorMessage() string { + if m != nil { + return m.ErrorMessage + } + return "" +} + +func (m *SetOperationStatusRequest) GetValidationToken() uint64 { + if m != nil { + return m.ValidationToken + } + return 0 +} + +// A Google Cloud Service Account. +type ServiceAccount struct { + // Email address of the service account. Defaults to `default`, + // which uses the compute service account associated with the project. + Email string `protobuf:"bytes,1,opt,name=email,proto3" json:"email,omitempty"` + // List of scopes to be enabled for this service account on the VM. + // The following scopes are automatically included: + // + // * https://www.googleapis.com/auth/compute + // * https://www.googleapis.com/auth/devstorage.full_control + // * https://www.googleapis.com/auth/genomics + // * https://www.googleapis.com/auth/logging.write + // * https://www.googleapis.com/auth/monitoring.write + Scopes []string `protobuf:"bytes,2,rep,name=scopes,proto3" json:"scopes,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ServiceAccount) Reset() { *m = ServiceAccount{} } +func (m *ServiceAccount) String() string { return proto.CompactTextString(m) } +func (*ServiceAccount) ProtoMessage() {} +func (*ServiceAccount) Descriptor() ([]byte, []int) { + return fileDescriptor_pipelines_64ae929b596fc0b5, []int{14} +} +func (m *ServiceAccount) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ServiceAccount.Unmarshal(m, b) +} +func (m *ServiceAccount) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ServiceAccount.Marshal(b, m, deterministic) +} +func (dst *ServiceAccount) XXX_Merge(src proto.Message) { + xxx_messageInfo_ServiceAccount.Merge(dst, src) +} +func (m *ServiceAccount) XXX_Size() int { + return xxx_messageInfo_ServiceAccount.Size(m) +} +func (m *ServiceAccount) XXX_DiscardUnknown() { + xxx_messageInfo_ServiceAccount.DiscardUnknown(m) +} + +var xxx_messageInfo_ServiceAccount proto.InternalMessageInfo + +func (m *ServiceAccount) GetEmail() string { + if m != nil { + return m.Email + } + return "" +} + +func (m *ServiceAccount) GetScopes() []string { + if m != nil { + return m.Scopes + } + return nil +} + +// The logging options for the pipeline run. +type LoggingOptions struct { + // The location in Google Cloud Storage to which the pipeline logs + // will be copied. Can be specified as a fully qualified directory + // path, in which case logs will be output with a unique identifier + // as the filename in that directory, or as a fully specified path, + // which must end in `.log`, in which case that path will be + // used, and the user must ensure that logs are not + // overwritten. Stdout and stderr logs from the run are also + // generated and output as `-stdout.log` and `-stderr.log`. + GcsPath string `protobuf:"bytes,1,opt,name=gcs_path,json=gcsPath,proto3" json:"gcs_path,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *LoggingOptions) Reset() { *m = LoggingOptions{} } +func (m *LoggingOptions) String() string { return proto.CompactTextString(m) } +func (*LoggingOptions) ProtoMessage() {} +func (*LoggingOptions) Descriptor() ([]byte, []int) { + return fileDescriptor_pipelines_64ae929b596fc0b5, []int{15} +} +func (m *LoggingOptions) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_LoggingOptions.Unmarshal(m, b) +} +func (m *LoggingOptions) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_LoggingOptions.Marshal(b, m, deterministic) +} +func (dst *LoggingOptions) XXX_Merge(src proto.Message) { + xxx_messageInfo_LoggingOptions.Merge(dst, src) +} +func (m *LoggingOptions) XXX_Size() int { + return xxx_messageInfo_LoggingOptions.Size(m) +} +func (m *LoggingOptions) XXX_DiscardUnknown() { + xxx_messageInfo_LoggingOptions.DiscardUnknown(m) +} + +var xxx_messageInfo_LoggingOptions proto.InternalMessageInfo + +func (m *LoggingOptions) GetGcsPath() string { + if m != nil { + return m.GcsPath + } + return "" +} + +// The system resources for the pipeline run. +type PipelineResources struct { + // The minimum number of cores to use. Defaults to 1. + MinimumCpuCores int32 `protobuf:"varint,1,opt,name=minimum_cpu_cores,json=minimumCpuCores,proto3" json:"minimum_cpu_cores,omitempty"` + // Whether to use preemptible VMs. Defaults to `false`. In order to use this, + // must be true for both create time and run time. Cannot be true at run time + // if false at create time. + Preemptible bool `protobuf:"varint,2,opt,name=preemptible,proto3" json:"preemptible,omitempty"` + // The minimum amount of RAM to use. Defaults to 3.75 (GB) + MinimumRamGb float64 `protobuf:"fixed64,3,opt,name=minimum_ram_gb,json=minimumRamGb,proto3" json:"minimum_ram_gb,omitempty"` + // Disks to attach. + Disks []*PipelineResources_Disk `protobuf:"bytes,4,rep,name=disks,proto3" json:"disks,omitempty"` + // List of Google Compute Engine availability zones to which resource + // creation will restricted. If empty, any zone may be chosen. + Zones []string `protobuf:"bytes,5,rep,name=zones,proto3" json:"zones,omitempty"` + // The size of the boot disk. Defaults to 10 (GB). + BootDiskSizeGb int32 `protobuf:"varint,6,opt,name=boot_disk_size_gb,json=bootDiskSizeGb,proto3" json:"boot_disk_size_gb,omitempty"` + // Whether to assign an external IP to the instance. This is an experimental + // feature that may go away. Defaults to false. + // Corresponds to `--no_address` flag for [gcloud compute instances create] + // (https://cloud.google.com/sdk/gcloud/reference/compute/instances/create). + // In order to use this, must be true for both create time and run time. + // Cannot be true at run time if false at create time. If you need to ssh into + // a private IP VM for debugging, you can ssh to a public VM and then ssh into + // the private VM's Internal IP. If noAddress is set, this pipeline run may + // only load docker images from Google Container Registry and not Docker Hub. + // ** Note: To use this option, your project must be in Google Access for + // Private IPs Early Access Program.** + NoAddress bool `protobuf:"varint,7,opt,name=no_address,json=noAddress,proto3" json:"no_address,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *PipelineResources) Reset() { *m = PipelineResources{} } +func (m *PipelineResources) String() string { return proto.CompactTextString(m) } +func (*PipelineResources) ProtoMessage() {} +func (*PipelineResources) Descriptor() ([]byte, []int) { + return fileDescriptor_pipelines_64ae929b596fc0b5, []int{16} +} +func (m *PipelineResources) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_PipelineResources.Unmarshal(m, b) +} +func (m *PipelineResources) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_PipelineResources.Marshal(b, m, deterministic) +} +func (dst *PipelineResources) XXX_Merge(src proto.Message) { + xxx_messageInfo_PipelineResources.Merge(dst, src) +} +func (m *PipelineResources) XXX_Size() int { + return xxx_messageInfo_PipelineResources.Size(m) +} +func (m *PipelineResources) XXX_DiscardUnknown() { + xxx_messageInfo_PipelineResources.DiscardUnknown(m) +} + +var xxx_messageInfo_PipelineResources proto.InternalMessageInfo + +func (m *PipelineResources) GetMinimumCpuCores() int32 { + if m != nil { + return m.MinimumCpuCores + } + return 0 +} + +func (m *PipelineResources) GetPreemptible() bool { + if m != nil { + return m.Preemptible + } + return false +} + +func (m *PipelineResources) GetMinimumRamGb() float64 { + if m != nil { + return m.MinimumRamGb + } + return 0 +} + +func (m *PipelineResources) GetDisks() []*PipelineResources_Disk { + if m != nil { + return m.Disks + } + return nil +} + +func (m *PipelineResources) GetZones() []string { + if m != nil { + return m.Zones + } + return nil +} + +func (m *PipelineResources) GetBootDiskSizeGb() int32 { + if m != nil { + return m.BootDiskSizeGb + } + return 0 +} + +func (m *PipelineResources) GetNoAddress() bool { + if m != nil { + return m.NoAddress + } + return false +} + +// A Google Compute Engine disk resource specification. +type PipelineResources_Disk struct { + // Required. The name of the disk that can be used in the pipeline + // parameters. Must be 1 - 63 characters. + // The name "boot" is reserved for system use. + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // Required. The type of the disk to create. + Type PipelineResources_Disk_Type `protobuf:"varint,2,opt,name=type,proto3,enum=google.genomics.v1alpha2.PipelineResources_Disk_Type" json:"type,omitempty"` + // The size of the disk. Defaults to 500 (GB). + // This field is not applicable for local SSD. + SizeGb int32 `protobuf:"varint,3,opt,name=size_gb,json=sizeGb,proto3" json:"size_gb,omitempty"` + // The full or partial URL of the persistent disk to attach. See + // https://cloud.google.com/compute/docs/reference/latest/instances#resource + // and + // https://cloud.google.com/compute/docs/disks/persistent-disks#snapshots + // for more details. + Source string `protobuf:"bytes,4,opt,name=source,proto3" json:"source,omitempty"` + // Deprecated. Disks created by the Pipelines API will be deleted at the end + // of the pipeline run, regardless of what this field is set to. + AutoDelete bool `protobuf:"varint,6,opt,name=auto_delete,json=autoDelete,proto3" json:"auto_delete,omitempty"` + // Required at create time and cannot be overridden at run time. + // Specifies the path in the docker container where files on + // this disk should be located. For example, if `mountPoint` + // is `/mnt/disk`, and the parameter has `localPath` + // `inputs/file.txt`, the docker container can access the data at + // `/mnt/disk/inputs/file.txt`. + MountPoint string `protobuf:"bytes,8,opt,name=mount_point,json=mountPoint,proto3" json:"mount_point,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *PipelineResources_Disk) Reset() { *m = PipelineResources_Disk{} } +func (m *PipelineResources_Disk) String() string { return proto.CompactTextString(m) } +func (*PipelineResources_Disk) ProtoMessage() {} +func (*PipelineResources_Disk) Descriptor() ([]byte, []int) { + return fileDescriptor_pipelines_64ae929b596fc0b5, []int{16, 0} +} +func (m *PipelineResources_Disk) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_PipelineResources_Disk.Unmarshal(m, b) +} +func (m *PipelineResources_Disk) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_PipelineResources_Disk.Marshal(b, m, deterministic) +} +func (dst *PipelineResources_Disk) XXX_Merge(src proto.Message) { + xxx_messageInfo_PipelineResources_Disk.Merge(dst, src) +} +func (m *PipelineResources_Disk) XXX_Size() int { + return xxx_messageInfo_PipelineResources_Disk.Size(m) +} +func (m *PipelineResources_Disk) XXX_DiscardUnknown() { + xxx_messageInfo_PipelineResources_Disk.DiscardUnknown(m) +} + +var xxx_messageInfo_PipelineResources_Disk proto.InternalMessageInfo + +func (m *PipelineResources_Disk) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *PipelineResources_Disk) GetType() PipelineResources_Disk_Type { + if m != nil { + return m.Type + } + return PipelineResources_Disk_TYPE_UNSPECIFIED +} + +func (m *PipelineResources_Disk) GetSizeGb() int32 { + if m != nil { + return m.SizeGb + } + return 0 +} + +func (m *PipelineResources_Disk) GetSource() string { + if m != nil { + return m.Source + } + return "" +} + +func (m *PipelineResources_Disk) GetAutoDelete() bool { + if m != nil { + return m.AutoDelete + } + return false +} + +func (m *PipelineResources_Disk) GetMountPoint() string { + if m != nil { + return m.MountPoint + } + return "" +} + +// Parameters facilitate setting and delivering data into the +// pipeline's execution environment. They are defined at create time, +// with optional defaults, and can be overridden at run time. +// +// If `localCopy` is unset, then the parameter specifies a string that +// is passed as-is into the pipeline, as the value of the environment +// variable with the given name. A default value can be optionally +// specified at create time. The default can be overridden at run time +// using the inputs map. If no default is given, a value must be +// supplied at runtime. +// +// If `localCopy` is defined, then the parameter specifies a data +// source or sink, both in Google Cloud Storage and on the Docker container +// where the pipeline computation is run. The [service account associated with +// the Pipeline][google.genomics.v1alpha2.RunPipelineArgs.service_account] (by +// default the project's Compute Engine service account) must have access to the +// Google Cloud Storage paths. +// +// At run time, the Google Cloud Storage paths can be overridden if a default +// was provided at create time, or must be set otherwise. The pipeline runner +// should add a key/value pair to either the inputs or outputs map. The +// indicated data copies will be carried out before/after pipeline execution, +// just as if the corresponding arguments were provided to `gsutil cp`. +// +// For example: Given the following `PipelineParameter`, specified +// in the `inputParameters` list: +// +// ``` +// {name: "input_file", localCopy: {path: "file.txt", disk: "pd1"}} +// ``` +// +// where `disk` is defined in the `PipelineResources` object as: +// +// ``` +// {name: "pd1", mountPoint: "/mnt/disk/"} +// ``` +// +// We create a disk named `pd1`, mount it on the host VM, and map +// `/mnt/pd1` to `/mnt/disk` in the docker container. At +// runtime, an entry for `input_file` would be required in the inputs +// map, such as: +// +// ``` +// inputs["input_file"] = "gs://my-bucket/bar.txt" +// ``` +// +// This would generate the following gsutil call: +// +// ``` +// gsutil cp gs://my-bucket/bar.txt /mnt/pd1/file.txt +// ``` +// +// The file `/mnt/pd1/file.txt` maps to `/mnt/disk/file.txt` in the +// Docker container. Acceptable paths are: +// +// +// +// +// +// +// +// +// +//
Google Cloud storage pathLocal path
filefile
globdirectory
+// +// For outputs, the direction of the copy is reversed: +// +// ``` +// gsutil cp /mnt/disk/file.txt gs://my-bucket/bar.txt +// ``` +// +// Acceptable paths are: +// +// +// +// +// +// +// +// +// +// +// +// +// +// +// +//
Local pathGoogle Cloud Storage path
filefile
filedirectory - directory must already exist
globdirectory - directory will be created if it doesn't exist
+// +// One restriction due to docker limitations, is that for outputs that are found +// on the boot disk, the local path cannot be a glob and must be a file. +type PipelineParameter struct { + // Required. Name of the parameter - the pipeline runner uses this string + // as the key to the input and output maps in RunPipeline. + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // Human-readable description. + Description string `protobuf:"bytes,2,opt,name=description,proto3" json:"description,omitempty"` + // The default value for this parameter. Can be overridden at runtime. + // If `localCopy` is present, then this must be a Google Cloud Storage path + // beginning with `gs://`. + DefaultValue string `protobuf:"bytes,5,opt,name=default_value,json=defaultValue,proto3" json:"default_value,omitempty"` + // If present, this parameter is marked for copying to and from the VM. + // `LocalCopy` indicates where on the VM the file should be. The value + // given to this parameter (either at runtime or using `defaultValue`) + // must be the remote path where the file should be. + LocalCopy *PipelineParameter_LocalCopy `protobuf:"bytes,6,opt,name=local_copy,json=localCopy,proto3" json:"local_copy,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *PipelineParameter) Reset() { *m = PipelineParameter{} } +func (m *PipelineParameter) String() string { return proto.CompactTextString(m) } +func (*PipelineParameter) ProtoMessage() {} +func (*PipelineParameter) Descriptor() ([]byte, []int) { + return fileDescriptor_pipelines_64ae929b596fc0b5, []int{17} +} +func (m *PipelineParameter) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_PipelineParameter.Unmarshal(m, b) +} +func (m *PipelineParameter) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_PipelineParameter.Marshal(b, m, deterministic) +} +func (dst *PipelineParameter) XXX_Merge(src proto.Message) { + xxx_messageInfo_PipelineParameter.Merge(dst, src) +} +func (m *PipelineParameter) XXX_Size() int { + return xxx_messageInfo_PipelineParameter.Size(m) +} +func (m *PipelineParameter) XXX_DiscardUnknown() { + xxx_messageInfo_PipelineParameter.DiscardUnknown(m) +} + +var xxx_messageInfo_PipelineParameter proto.InternalMessageInfo + +func (m *PipelineParameter) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *PipelineParameter) GetDescription() string { + if m != nil { + return m.Description + } + return "" +} + +func (m *PipelineParameter) GetDefaultValue() string { + if m != nil { + return m.DefaultValue + } + return "" +} + +func (m *PipelineParameter) GetLocalCopy() *PipelineParameter_LocalCopy { + if m != nil { + return m.LocalCopy + } + return nil +} + +// LocalCopy defines how a remote file should be copied to and from the VM. +type PipelineParameter_LocalCopy struct { + // Required. The path within the user's docker container where + // this input should be localized to and from, relative to the specified + // disk's mount point. For example: file.txt, + Path string `protobuf:"bytes,1,opt,name=path,proto3" json:"path,omitempty"` + // Required. The name of the disk where this parameter is + // located. Can be the name of one of the disks specified in the + // Resources field, or "boot", which represents the Docker + // instance's boot disk and has a mount point of `/`. + Disk string `protobuf:"bytes,2,opt,name=disk,proto3" json:"disk,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *PipelineParameter_LocalCopy) Reset() { *m = PipelineParameter_LocalCopy{} } +func (m *PipelineParameter_LocalCopy) String() string { return proto.CompactTextString(m) } +func (*PipelineParameter_LocalCopy) ProtoMessage() {} +func (*PipelineParameter_LocalCopy) Descriptor() ([]byte, []int) { + return fileDescriptor_pipelines_64ae929b596fc0b5, []int{17, 0} +} +func (m *PipelineParameter_LocalCopy) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_PipelineParameter_LocalCopy.Unmarshal(m, b) +} +func (m *PipelineParameter_LocalCopy) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_PipelineParameter_LocalCopy.Marshal(b, m, deterministic) +} +func (dst *PipelineParameter_LocalCopy) XXX_Merge(src proto.Message) { + xxx_messageInfo_PipelineParameter_LocalCopy.Merge(dst, src) +} +func (m *PipelineParameter_LocalCopy) XXX_Size() int { + return xxx_messageInfo_PipelineParameter_LocalCopy.Size(m) +} +func (m *PipelineParameter_LocalCopy) XXX_DiscardUnknown() { + xxx_messageInfo_PipelineParameter_LocalCopy.DiscardUnknown(m) +} + +var xxx_messageInfo_PipelineParameter_LocalCopy proto.InternalMessageInfo + +func (m *PipelineParameter_LocalCopy) GetPath() string { + if m != nil { + return m.Path + } + return "" +} + +func (m *PipelineParameter_LocalCopy) GetDisk() string { + if m != nil { + return m.Disk + } + return "" +} + +// The Docker execuctor specification. +type DockerExecutor struct { + // Required. Image name from either Docker Hub or Google Container Registry. + // Users that run pipelines must have READ access to the image. + ImageName string `protobuf:"bytes,1,opt,name=image_name,json=imageName,proto3" json:"image_name,omitempty"` + // Required. The command or newline delimited script to run. The command + // string will be executed within a bash shell. + // + // If the command exits with a non-zero exit code, output parameter + // de-localization will be skipped and the pipeline operation's + // [`error`][google.longrunning.Operation.error] field will be populated. + // + // Maximum command string length is 16384. + Cmd string `protobuf:"bytes,2,opt,name=cmd,proto3" json:"cmd,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *DockerExecutor) Reset() { *m = DockerExecutor{} } +func (m *DockerExecutor) String() string { return proto.CompactTextString(m) } +func (*DockerExecutor) ProtoMessage() {} +func (*DockerExecutor) Descriptor() ([]byte, []int) { + return fileDescriptor_pipelines_64ae929b596fc0b5, []int{18} +} +func (m *DockerExecutor) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_DockerExecutor.Unmarshal(m, b) +} +func (m *DockerExecutor) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_DockerExecutor.Marshal(b, m, deterministic) +} +func (dst *DockerExecutor) XXX_Merge(src proto.Message) { + xxx_messageInfo_DockerExecutor.Merge(dst, src) +} +func (m *DockerExecutor) XXX_Size() int { + return xxx_messageInfo_DockerExecutor.Size(m) +} +func (m *DockerExecutor) XXX_DiscardUnknown() { + xxx_messageInfo_DockerExecutor.DiscardUnknown(m) +} + +var xxx_messageInfo_DockerExecutor proto.InternalMessageInfo + +func (m *DockerExecutor) GetImageName() string { + if m != nil { + return m.ImageName + } + return "" +} + +func (m *DockerExecutor) GetCmd() string { + if m != nil { + return m.Cmd + } + return "" +} + +func init() { + proto.RegisterType((*ComputeEngine)(nil), "google.genomics.v1alpha2.ComputeEngine") + proto.RegisterType((*RuntimeMetadata)(nil), "google.genomics.v1alpha2.RuntimeMetadata") + proto.RegisterType((*Pipeline)(nil), "google.genomics.v1alpha2.Pipeline") + proto.RegisterType((*CreatePipelineRequest)(nil), "google.genomics.v1alpha2.CreatePipelineRequest") + proto.RegisterType((*RunPipelineArgs)(nil), "google.genomics.v1alpha2.RunPipelineArgs") + proto.RegisterMapType((map[string]string)(nil), "google.genomics.v1alpha2.RunPipelineArgs.InputsEntry") + proto.RegisterMapType((map[string]string)(nil), "google.genomics.v1alpha2.RunPipelineArgs.LabelsEntry") + proto.RegisterMapType((map[string]string)(nil), "google.genomics.v1alpha2.RunPipelineArgs.OutputsEntry") + proto.RegisterType((*RunPipelineRequest)(nil), "google.genomics.v1alpha2.RunPipelineRequest") + proto.RegisterType((*GetPipelineRequest)(nil), "google.genomics.v1alpha2.GetPipelineRequest") + proto.RegisterType((*ListPipelinesRequest)(nil), "google.genomics.v1alpha2.ListPipelinesRequest") + proto.RegisterType((*ListPipelinesResponse)(nil), "google.genomics.v1alpha2.ListPipelinesResponse") + proto.RegisterType((*DeletePipelineRequest)(nil), "google.genomics.v1alpha2.DeletePipelineRequest") + proto.RegisterType((*GetControllerConfigRequest)(nil), "google.genomics.v1alpha2.GetControllerConfigRequest") + proto.RegisterType((*ControllerConfig)(nil), "google.genomics.v1alpha2.ControllerConfig") + proto.RegisterMapType((map[string]string)(nil), "google.genomics.v1alpha2.ControllerConfig.DisksEntry") + proto.RegisterMapType((map[string]*ControllerConfig_RepeatedString)(nil), "google.genomics.v1alpha2.ControllerConfig.GcsSinksEntry") + proto.RegisterMapType((map[string]*ControllerConfig_RepeatedString)(nil), "google.genomics.v1alpha2.ControllerConfig.GcsSourcesEntry") + proto.RegisterMapType((map[string]string)(nil), "google.genomics.v1alpha2.ControllerConfig.VarsEntry") + proto.RegisterType((*ControllerConfig_RepeatedString)(nil), "google.genomics.v1alpha2.ControllerConfig.RepeatedString") + proto.RegisterType((*TimestampEvent)(nil), "google.genomics.v1alpha2.TimestampEvent") + proto.RegisterType((*SetOperationStatusRequest)(nil), "google.genomics.v1alpha2.SetOperationStatusRequest") + proto.RegisterType((*ServiceAccount)(nil), "google.genomics.v1alpha2.ServiceAccount") + proto.RegisterType((*LoggingOptions)(nil), "google.genomics.v1alpha2.LoggingOptions") + proto.RegisterType((*PipelineResources)(nil), "google.genomics.v1alpha2.PipelineResources") + proto.RegisterType((*PipelineResources_Disk)(nil), "google.genomics.v1alpha2.PipelineResources.Disk") + proto.RegisterType((*PipelineParameter)(nil), "google.genomics.v1alpha2.PipelineParameter") + proto.RegisterType((*PipelineParameter_LocalCopy)(nil), "google.genomics.v1alpha2.PipelineParameter.LocalCopy") + proto.RegisterType((*DockerExecutor)(nil), "google.genomics.v1alpha2.DockerExecutor") + proto.RegisterEnum("google.genomics.v1alpha2.PipelineResources_Disk_Type", PipelineResources_Disk_Type_name, PipelineResources_Disk_Type_value) +} + +// Reference imports to suppress errors if they are not otherwise used. +var _ context.Context +var _ grpc.ClientConn + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the grpc package it is being compiled against. +const _ = grpc.SupportPackageIsVersion4 + +// PipelinesV1Alpha2Client is the client API for PipelinesV1Alpha2 service. +// +// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://godoc.org/google.golang.org/grpc#ClientConn.NewStream. +type PipelinesV1Alpha2Client interface { + // Creates a pipeline that can be run later. Create takes a Pipeline that + // has all fields other than `pipelineId` populated, and then returns + // the same pipeline with `pipelineId` populated. This id can be used + // to run the pipeline. + // + // Caller must have WRITE permission to the project. + CreatePipeline(ctx context.Context, in *CreatePipelineRequest, opts ...grpc.CallOption) (*Pipeline, error) + // Runs a pipeline. If `pipelineId` is specified in the request, then + // run a saved pipeline. If `ephemeralPipeline` is specified, then run + // that pipeline once without saving a copy. + // + // The caller must have READ permission to the project where the pipeline + // is stored and WRITE permission to the project where the pipeline will be + // run, as VMs will be created and storage will be used. + RunPipeline(ctx context.Context, in *RunPipelineRequest, opts ...grpc.CallOption) (*longrunning.Operation, error) + // Retrieves a pipeline based on ID. + // + // Caller must have READ permission to the project. + GetPipeline(ctx context.Context, in *GetPipelineRequest, opts ...grpc.CallOption) (*Pipeline, error) + // Lists pipelines. + // + // Caller must have READ permission to the project. + ListPipelines(ctx context.Context, in *ListPipelinesRequest, opts ...grpc.CallOption) (*ListPipelinesResponse, error) + // Deletes a pipeline based on ID. + // + // Caller must have WRITE permission to the project. + DeletePipeline(ctx context.Context, in *DeletePipelineRequest, opts ...grpc.CallOption) (*empty.Empty, error) + // Gets controller configuration information. Should only be called + // by VMs created by the Pipelines Service and not by end users. + GetControllerConfig(ctx context.Context, in *GetControllerConfigRequest, opts ...grpc.CallOption) (*ControllerConfig, error) + // Sets status of a given operation. Any new timestamps (as determined by + // description) are appended to TimestampEvents. Should only be called by VMs + // created by the Pipelines Service and not by end users. + SetOperationStatus(ctx context.Context, in *SetOperationStatusRequest, opts ...grpc.CallOption) (*empty.Empty, error) +} + +type pipelinesV1Alpha2Client struct { + cc *grpc.ClientConn +} + +func NewPipelinesV1Alpha2Client(cc *grpc.ClientConn) PipelinesV1Alpha2Client { + return &pipelinesV1Alpha2Client{cc} +} + +func (c *pipelinesV1Alpha2Client) CreatePipeline(ctx context.Context, in *CreatePipelineRequest, opts ...grpc.CallOption) (*Pipeline, error) { + out := new(Pipeline) + err := c.cc.Invoke(ctx, "/google.genomics.v1alpha2.PipelinesV1Alpha2/CreatePipeline", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *pipelinesV1Alpha2Client) RunPipeline(ctx context.Context, in *RunPipelineRequest, opts ...grpc.CallOption) (*longrunning.Operation, error) { + out := new(longrunning.Operation) + err := c.cc.Invoke(ctx, "/google.genomics.v1alpha2.PipelinesV1Alpha2/RunPipeline", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *pipelinesV1Alpha2Client) GetPipeline(ctx context.Context, in *GetPipelineRequest, opts ...grpc.CallOption) (*Pipeline, error) { + out := new(Pipeline) + err := c.cc.Invoke(ctx, "/google.genomics.v1alpha2.PipelinesV1Alpha2/GetPipeline", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *pipelinesV1Alpha2Client) ListPipelines(ctx context.Context, in *ListPipelinesRequest, opts ...grpc.CallOption) (*ListPipelinesResponse, error) { + out := new(ListPipelinesResponse) + err := c.cc.Invoke(ctx, "/google.genomics.v1alpha2.PipelinesV1Alpha2/ListPipelines", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *pipelinesV1Alpha2Client) DeletePipeline(ctx context.Context, in *DeletePipelineRequest, opts ...grpc.CallOption) (*empty.Empty, error) { + out := new(empty.Empty) + err := c.cc.Invoke(ctx, "/google.genomics.v1alpha2.PipelinesV1Alpha2/DeletePipeline", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *pipelinesV1Alpha2Client) GetControllerConfig(ctx context.Context, in *GetControllerConfigRequest, opts ...grpc.CallOption) (*ControllerConfig, error) { + out := new(ControllerConfig) + err := c.cc.Invoke(ctx, "/google.genomics.v1alpha2.PipelinesV1Alpha2/GetControllerConfig", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *pipelinesV1Alpha2Client) SetOperationStatus(ctx context.Context, in *SetOperationStatusRequest, opts ...grpc.CallOption) (*empty.Empty, error) { + out := new(empty.Empty) + err := c.cc.Invoke(ctx, "/google.genomics.v1alpha2.PipelinesV1Alpha2/SetOperationStatus", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +// PipelinesV1Alpha2Server is the server API for PipelinesV1Alpha2 service. +type PipelinesV1Alpha2Server interface { + // Creates a pipeline that can be run later. Create takes a Pipeline that + // has all fields other than `pipelineId` populated, and then returns + // the same pipeline with `pipelineId` populated. This id can be used + // to run the pipeline. + // + // Caller must have WRITE permission to the project. + CreatePipeline(context.Context, *CreatePipelineRequest) (*Pipeline, error) + // Runs a pipeline. If `pipelineId` is specified in the request, then + // run a saved pipeline. If `ephemeralPipeline` is specified, then run + // that pipeline once without saving a copy. + // + // The caller must have READ permission to the project where the pipeline + // is stored and WRITE permission to the project where the pipeline will be + // run, as VMs will be created and storage will be used. + RunPipeline(context.Context, *RunPipelineRequest) (*longrunning.Operation, error) + // Retrieves a pipeline based on ID. + // + // Caller must have READ permission to the project. + GetPipeline(context.Context, *GetPipelineRequest) (*Pipeline, error) + // Lists pipelines. + // + // Caller must have READ permission to the project. + ListPipelines(context.Context, *ListPipelinesRequest) (*ListPipelinesResponse, error) + // Deletes a pipeline based on ID. + // + // Caller must have WRITE permission to the project. + DeletePipeline(context.Context, *DeletePipelineRequest) (*empty.Empty, error) + // Gets controller configuration information. Should only be called + // by VMs created by the Pipelines Service and not by end users. + GetControllerConfig(context.Context, *GetControllerConfigRequest) (*ControllerConfig, error) + // Sets status of a given operation. Any new timestamps (as determined by + // description) are appended to TimestampEvents. Should only be called by VMs + // created by the Pipelines Service and not by end users. + SetOperationStatus(context.Context, *SetOperationStatusRequest) (*empty.Empty, error) +} + +func RegisterPipelinesV1Alpha2Server(s *grpc.Server, srv PipelinesV1Alpha2Server) { + s.RegisterService(&_PipelinesV1Alpha2_serviceDesc, srv) +} + +func _PipelinesV1Alpha2_CreatePipeline_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(CreatePipelineRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(PipelinesV1Alpha2Server).CreatePipeline(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.genomics.v1alpha2.PipelinesV1Alpha2/CreatePipeline", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(PipelinesV1Alpha2Server).CreatePipeline(ctx, req.(*CreatePipelineRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _PipelinesV1Alpha2_RunPipeline_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(RunPipelineRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(PipelinesV1Alpha2Server).RunPipeline(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.genomics.v1alpha2.PipelinesV1Alpha2/RunPipeline", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(PipelinesV1Alpha2Server).RunPipeline(ctx, req.(*RunPipelineRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _PipelinesV1Alpha2_GetPipeline_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(GetPipelineRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(PipelinesV1Alpha2Server).GetPipeline(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.genomics.v1alpha2.PipelinesV1Alpha2/GetPipeline", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(PipelinesV1Alpha2Server).GetPipeline(ctx, req.(*GetPipelineRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _PipelinesV1Alpha2_ListPipelines_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(ListPipelinesRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(PipelinesV1Alpha2Server).ListPipelines(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.genomics.v1alpha2.PipelinesV1Alpha2/ListPipelines", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(PipelinesV1Alpha2Server).ListPipelines(ctx, req.(*ListPipelinesRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _PipelinesV1Alpha2_DeletePipeline_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(DeletePipelineRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(PipelinesV1Alpha2Server).DeletePipeline(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.genomics.v1alpha2.PipelinesV1Alpha2/DeletePipeline", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(PipelinesV1Alpha2Server).DeletePipeline(ctx, req.(*DeletePipelineRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _PipelinesV1Alpha2_GetControllerConfig_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(GetControllerConfigRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(PipelinesV1Alpha2Server).GetControllerConfig(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.genomics.v1alpha2.PipelinesV1Alpha2/GetControllerConfig", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(PipelinesV1Alpha2Server).GetControllerConfig(ctx, req.(*GetControllerConfigRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _PipelinesV1Alpha2_SetOperationStatus_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(SetOperationStatusRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(PipelinesV1Alpha2Server).SetOperationStatus(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.genomics.v1alpha2.PipelinesV1Alpha2/SetOperationStatus", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(PipelinesV1Alpha2Server).SetOperationStatus(ctx, req.(*SetOperationStatusRequest)) + } + return interceptor(ctx, in, info, handler) +} + +var _PipelinesV1Alpha2_serviceDesc = grpc.ServiceDesc{ + ServiceName: "google.genomics.v1alpha2.PipelinesV1Alpha2", + HandlerType: (*PipelinesV1Alpha2Server)(nil), + Methods: []grpc.MethodDesc{ + { + MethodName: "CreatePipeline", + Handler: _PipelinesV1Alpha2_CreatePipeline_Handler, + }, + { + MethodName: "RunPipeline", + Handler: _PipelinesV1Alpha2_RunPipeline_Handler, + }, + { + MethodName: "GetPipeline", + Handler: _PipelinesV1Alpha2_GetPipeline_Handler, + }, + { + MethodName: "ListPipelines", + Handler: _PipelinesV1Alpha2_ListPipelines_Handler, + }, + { + MethodName: "DeletePipeline", + Handler: _PipelinesV1Alpha2_DeletePipeline_Handler, + }, + { + MethodName: "GetControllerConfig", + Handler: _PipelinesV1Alpha2_GetControllerConfig_Handler, + }, + { + MethodName: "SetOperationStatus", + Handler: _PipelinesV1Alpha2_SetOperationStatus_Handler, + }, + }, + Streams: []grpc.StreamDesc{}, + Metadata: "google/genomics/v1alpha2/pipelines.proto", +} + +func init() { + proto.RegisterFile("google/genomics/v1alpha2/pipelines.proto", fileDescriptor_pipelines_64ae929b596fc0b5) +} + +var fileDescriptor_pipelines_64ae929b596fc0b5 = []byte{ + // 2065 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xbc, 0x58, 0x4d, 0x73, 0xdb, 0xc8, + 0xd1, 0x36, 0x28, 0x4a, 0x22, 0x9a, 0x12, 0x45, 0xcf, 0xda, 0x6b, 0x9a, 0xf6, 0xbe, 0xb6, 0xe1, + 0x37, 0xbb, 0xb2, 0x9c, 0x22, 0x63, 0x79, 0x9d, 0xc8, 0x4a, 0xd5, 0xd6, 0x4a, 0x14, 0x2d, 0xb1, + 0x22, 0x4b, 0x0c, 0xa8, 0x55, 0xbe, 0x0e, 0xa8, 0x11, 0x38, 0x82, 0xb0, 0x02, 0x30, 0x08, 0x06, + 0x50, 0x59, 0x4e, 0x25, 0x55, 0x49, 0xe5, 0x90, 0xda, 0x4a, 0x2e, 0xc9, 0xfe, 0x88, 0x5c, 0x72, + 0xcc, 0xcf, 0xc8, 0x29, 0xa7, 0x9c, 0x72, 0xc9, 0x21, 0x3f, 0x21, 0xb9, 0xa5, 0x66, 0x06, 0x03, + 0x82, 0x1f, 0x92, 0xc8, 0xaa, 0x54, 0x6e, 0x33, 0x3d, 0xdd, 0x0f, 0x9e, 0xe9, 0xe9, 0xe9, 0xe9, + 0x06, 0xac, 0x3a, 0x94, 0x3a, 0x1e, 0x69, 0x3a, 0x24, 0xa0, 0xbe, 0x6b, 0xb3, 0xe6, 0xc5, 0x0b, + 0xec, 0x85, 0x67, 0x78, 0xbd, 0x19, 0xba, 0x21, 0xf1, 0xdc, 0x80, 0xb0, 0x46, 0x18, 0xd1, 0x98, + 0xa2, 0x9a, 0xd4, 0x6c, 0x28, 0xcd, 0x86, 0xd2, 0xac, 0x3f, 0x4c, 0x31, 0x70, 0xe8, 0x36, 0x71, + 0x10, 0xd0, 0x18, 0xc7, 0x2e, 0x0d, 0x52, 0xbb, 0xfa, 0xd3, 0x74, 0xd5, 0xa3, 0x81, 0x13, 0x25, + 0x41, 0xe0, 0x06, 0x4e, 0x93, 0x86, 0x24, 0x1a, 0x52, 0xfa, 0xbf, 0x54, 0x49, 0xcc, 0x4e, 0x92, + 0xd3, 0x66, 0x3f, 0x91, 0x0a, 0xe9, 0xfa, 0x83, 0xd1, 0x75, 0xe2, 0x87, 0xf1, 0x65, 0xba, 0xf8, + 0x68, 0x74, 0x31, 0x76, 0x7d, 0xc2, 0x62, 0xec, 0x87, 0xa9, 0xc2, 0xdd, 0x54, 0x21, 0x0a, 0xed, + 0xa6, 0x4d, 0xfb, 0x44, 0x8a, 0x8d, 0xaf, 0x34, 0x58, 0x6e, 0x51, 0x3f, 0x4c, 0x62, 0xd2, 0x0e, + 0x1c, 0x37, 0x20, 0xe8, 0x29, 0x2c, 0xbb, 0x01, 0x8b, 0x71, 0x60, 0x13, 0x2b, 0xc0, 0x3e, 0xa9, + 0x69, 0x8f, 0xb5, 0x55, 0xdd, 0x5c, 0x52, 0xc2, 0x03, 0xec, 0x13, 0x84, 0xa0, 0xf8, 0x9e, 0x06, + 0xa4, 0x56, 0x10, 0x6b, 0x62, 0x8c, 0x9e, 0xc0, 0x92, 0x8f, 0xed, 0x33, 0x37, 0x20, 0x56, 0x7c, + 0x19, 0x92, 0xda, 0x9c, 0x58, 0x2b, 0xa7, 0xb2, 0xa3, 0xcb, 0x90, 0xa0, 0x8f, 0x00, 0xfa, 0x2e, + 0x3b, 0x17, 0xb8, 0xac, 0x56, 0x7c, 0x3c, 0xb7, 0xaa, 0x9b, 0x3a, 0x97, 0x70, 0x50, 0x66, 0x60, + 0x58, 0x31, 0x93, 0x80, 0x33, 0x7f, 0x4b, 0x62, 0xdc, 0xc7, 0x31, 0x46, 0x07, 0x50, 0xb1, 0x25, + 0x3d, 0x8b, 0x08, 0x7e, 0x82, 0x4e, 0x79, 0xfd, 0x93, 0xc6, 0x55, 0x47, 0xd1, 0x18, 0xda, 0x8e, + 0xb9, 0x6c, 0xe7, 0xa7, 0xc6, 0x5f, 0xe6, 0xa0, 0xd4, 0x4d, 0x4f, 0x95, 0xd3, 0x09, 0x23, 0xfa, + 0x25, 0xb1, 0x63, 0xcb, 0xed, 0xa7, 0xfb, 0xd4, 0x53, 0x49, 0xa7, 0xcf, 0x37, 0x29, 0x1c, 0x90, + 0x6e, 0x92, 0x8f, 0xd1, 0x63, 0x28, 0xf7, 0x09, 0xb3, 0x23, 0x37, 0xe4, 0x27, 0xa3, 0xf6, 0x98, + 0x13, 0xa1, 0x63, 0xa8, 0xba, 0x41, 0x98, 0xc4, 0x56, 0x88, 0x23, 0xec, 0x93, 0x98, 0x44, 0xac, + 0x56, 0x7a, 0x3c, 0xb7, 0x5a, 0x5e, 0x7f, 0x7e, 0x35, 0x67, 0x45, 0xa9, 0xab, 0x6c, 0xcc, 0x15, + 0x01, 0x92, 0xcd, 0x19, 0xfa, 0x21, 0xdc, 0xa6, 0x49, 0x3c, 0x02, 0xac, 0xcf, 0x0e, 0x5c, 0x95, + 0x28, 0x39, 0xe4, 0x6d, 0x58, 0xe8, 0x53, 0xfb, 0x9c, 0x44, 0xb5, 0x79, 0xe1, 0xdb, 0xd5, 0xab, + 0xe1, 0x76, 0x84, 0x5e, 0xfb, 0x1d, 0xb1, 0x93, 0x98, 0x46, 0x7b, 0xb7, 0xcc, 0xd4, 0x12, 0x75, + 0x40, 0x8f, 0x08, 0xa3, 0x49, 0x64, 0x13, 0x56, 0x5b, 0x10, 0x30, 0x53, 0xb0, 0x32, 0x95, 0x89, + 0x39, 0xb0, 0x46, 0x8f, 0xa0, 0xac, 0xee, 0x1d, 0x3f, 0x96, 0x45, 0xe1, 0x62, 0x50, 0xa2, 0x4e, + 0x7f, 0x1b, 0xa0, 0x44, 0x52, 0x06, 0xc6, 0x0f, 0xe0, 0x6e, 0x2b, 0x22, 0x38, 0x26, 0x03, 0xc8, + 0x9f, 0x26, 0x84, 0xc5, 0xe8, 0x33, 0x28, 0x29, 0x93, 0x34, 0x64, 0x8c, 0x29, 0xf8, 0x64, 0x36, + 0xc6, 0x9f, 0x17, 0x44, 0x30, 0xaa, 0x95, 0xad, 0xc8, 0x61, 0x37, 0xc5, 0xcb, 0x5b, 0x58, 0x10, + 0x87, 0xc6, 0x6a, 0x05, 0x71, 0x2c, 0xaf, 0xae, 0xfe, 0xe0, 0x08, 0x72, 0xa3, 0x23, 0xec, 0xda, + 0x41, 0x1c, 0x5d, 0x9a, 0x29, 0x08, 0xea, 0xc2, 0xa2, 0x3c, 0x2a, 0x56, 0x9b, 0x13, 0x78, 0xdf, + 0x9e, 0x1e, 0xef, 0x50, 0x1a, 0x4a, 0x40, 0x05, 0x83, 0xbe, 0x0f, 0x2b, 0x8c, 0x44, 0x17, 0xae, + 0x4d, 0x2c, 0x6c, 0xdb, 0x34, 0x09, 0xe2, 0x5a, 0xf1, 0xa6, 0x13, 0xef, 0x49, 0x83, 0x2d, 0xa9, + 0x6f, 0x56, 0xd8, 0xd0, 0x1c, 0x3d, 0x00, 0xdd, 0xf6, 0x5c, 0x12, 0x08, 0x8f, 0xcc, 0x0b, 0x8f, + 0x94, 0xa4, 0xa0, 0xd3, 0xff, 0x6f, 0x06, 0xc5, 0x36, 0x2c, 0x7a, 0xd4, 0x71, 0xdc, 0xc0, 0x11, + 0x01, 0x71, 0x2d, 0xe5, 0x7d, 0xa9, 0x78, 0x28, 0xee, 0x23, 0x33, 0x95, 0x21, 0x3a, 0x81, 0x27, + 0xe7, 0x84, 0x84, 0xd6, 0x85, 0x6f, 0x61, 0xcf, 0xbd, 0x20, 0x16, 0x0d, 0xac, 0x53, 0xec, 0x7a, + 0x49, 0x44, 0x2c, 0x95, 0x6b, 0x6b, 0x25, 0x81, 0x7e, 0x5f, 0xa1, 0xab, 0x7c, 0xda, 0xd8, 0x49, + 0x15, 0xcc, 0x87, 0x1c, 0xe3, 0xd8, 0xdf, 0xe2, 0x08, 0x87, 0xc1, 0x1b, 0x69, 0xaf, 0x56, 0x79, + 0x0c, 0x78, 0xf8, 0x84, 0x78, 0xea, 0x6a, 0xce, 0x10, 0x03, 0xfb, 0xc2, 0x2e, 0x8d, 0x01, 0x09, + 0x52, 0x7f, 0x0d, 0xe5, 0x5c, 0x68, 0xa0, 0x2a, 0xcc, 0x9d, 0x93, 0xcb, 0x34, 0xf2, 0xf8, 0x10, + 0xdd, 0x81, 0xf9, 0x0b, 0xec, 0x25, 0x2a, 0x49, 0xc9, 0xc9, 0x66, 0x61, 0x43, 0xab, 0x6f, 0xc2, + 0x52, 0x3e, 0x0a, 0x66, 0xb2, 0x7d, 0x0d, 0xe5, 0x1c, 0x9b, 0x59, 0x4c, 0x8d, 0x7f, 0x6a, 0x80, + 0x72, 0x3b, 0x53, 0xd7, 0xf1, 0xc9, 0xf0, 0xa5, 0x16, 0x50, 0x7b, 0xb7, 0xf2, 0xd7, 0x1a, 0xf5, + 0x00, 0x91, 0xf0, 0x8c, 0xf8, 0x24, 0xc2, 0x9e, 0x95, 0xdd, 0xdd, 0xc2, 0xb4, 0x77, 0x77, 0xef, + 0x96, 0x79, 0x3b, 0xb3, 0xcf, 0x52, 0xfc, 0x01, 0x2c, 0x67, 0xdf, 0xc5, 0x91, 0xc3, 0x44, 0xc6, + 0x2e, 0xaf, 0x3f, 0x9b, 0xfa, 0x58, 0xcc, 0xa5, 0x30, 0x37, 0xe3, 0xb9, 0x27, 0x4b, 0x11, 0xaf, + 0x00, 0xed, 0x92, 0x78, 0x74, 0xa7, 0x8f, 0x26, 0xec, 0x34, 0xbf, 0x4f, 0xe3, 0xf7, 0x1a, 0xdc, + 0xd9, 0x77, 0x59, 0x66, 0xc8, 0x94, 0xe5, 0x0d, 0xe9, 0xe5, 0x11, 0x94, 0xf9, 0x13, 0x64, 0x85, + 0x11, 0x39, 0x75, 0xdf, 0xa5, 0x9e, 0x07, 0x2e, 0xea, 0x0a, 0x09, 0xbf, 0x8b, 0x21, 0x76, 0x88, + 0xc5, 0xdc, 0xf7, 0xf2, 0xf5, 0x9d, 0x37, 0x4b, 0x5c, 0xd0, 0x73, 0xdf, 0xcb, 0xb7, 0x8e, 0x2f, + 0xc6, 0xf4, 0x9c, 0x04, 0xe2, 0xda, 0x73, 0x70, 0xec, 0x90, 0x23, 0x2e, 0x30, 0x7e, 0xa9, 0xc1, + 0xdd, 0x11, 0x52, 0x2c, 0xa4, 0x01, 0x23, 0xe8, 0x73, 0xd0, 0xb3, 0x32, 0xa8, 0xa6, 0x89, 0xa0, + 0x9e, 0x26, 0x93, 0x0e, 0x8c, 0xd0, 0xc7, 0xb0, 0x12, 0x90, 0x77, 0xfc, 0xdd, 0xca, 0xbe, 0x2f, + 0xc9, 0x2f, 0x73, 0x71, 0x37, 0xe3, 0xb0, 0x01, 0x77, 0x77, 0x88, 0x47, 0xc6, 0x73, 0xf9, 0x8d, + 0x2e, 0xfd, 0x12, 0xea, 0xbb, 0x24, 0x6e, 0xd1, 0x20, 0x8e, 0xa8, 0xe7, 0x91, 0xa8, 0x45, 0x83, + 0x53, 0xd7, 0x19, 0xc4, 0xde, 0x52, 0x56, 0x6c, 0x0d, 0xec, 0xcb, 0x99, 0xac, 0xd3, 0x47, 0xcf, + 0xa0, 0x7a, 0x81, 0x3d, 0xb7, 0x2f, 0x75, 0x06, 0x1c, 0x8b, 0xe6, 0xca, 0x40, 0x2e, 0x59, 0xfe, + 0x6d, 0x01, 0xaa, 0xa3, 0x5f, 0xe2, 0xf7, 0xc1, 0xf5, 0xb1, 0xa3, 0x8a, 0x25, 0x39, 0xe1, 0xf7, + 0xc6, 0xf6, 0xfb, 0xe9, 0x66, 0xf9, 0x10, 0x3d, 0x86, 0x25, 0xc7, 0x66, 0x96, 0x47, 0x1d, 0x2b, + 0xc4, 0xf1, 0x59, 0x5a, 0x3f, 0x80, 0x63, 0xb3, 0x7d, 0xea, 0x74, 0x71, 0x7c, 0x36, 0x56, 0x45, + 0x15, 0xc7, 0xab, 0xa8, 0x3d, 0x28, 0x5e, 0xe0, 0x88, 0xd5, 0xe6, 0xc5, 0x61, 0x7c, 0x7a, 0x5d, + 0x25, 0x34, 0x4c, 0xb3, 0x71, 0x8c, 0xa3, 0x34, 0xc1, 0x08, 0x04, 0xf4, 0x3d, 0x98, 0xe7, 0xd5, + 0x17, 0x4f, 0xce, 0x37, 0x24, 0xab, 0x31, 0xa8, 0x1d, 0x6e, 0x27, 0xb1, 0x24, 0x06, 0xfa, 0x09, + 0x94, 0xf9, 0xde, 0x54, 0xbe, 0x5f, 0x14, 0x90, 0x9b, 0x33, 0x40, 0xee, 0xda, 0xac, 0x27, 0x8d, + 0x25, 0x2e, 0x77, 0x4b, 0x2a, 0x40, 0x5f, 0x80, 0x2e, 0xc0, 0xdd, 0xe0, 0x5c, 0x95, 0x53, 0x1b, + 0x33, 0x42, 0x73, 0x53, 0x09, 0x5c, 0x72, 0xd2, 0x69, 0x7d, 0x15, 0x2a, 0x26, 0x09, 0x79, 0xfd, + 0xd0, 0xef, 0xc5, 0x11, 0x7f, 0x24, 0x3e, 0x84, 0x05, 0x91, 0xcc, 0x64, 0xac, 0xeb, 0x66, 0x3a, + 0xab, 0x7f, 0x07, 0xf4, 0xcc, 0x7b, 0x33, 0xe5, 0xd2, 0x0d, 0x80, 0x81, 0xaf, 0x66, 0xb2, 0x7c, + 0x07, 0x2b, 0x23, 0x2e, 0x99, 0x60, 0x7e, 0x98, 0x37, 0x2f, 0xaf, 0xbf, 0x9e, 0xc1, 0x29, 0xc3, + 0x3b, 0xcf, 0x7f, 0xf9, 0x02, 0x96, 0x87, 0x3c, 0xf6, 0x3f, 0xfa, 0xae, 0xe1, 0x41, 0xe5, 0x48, + 0xf5, 0x2d, 0xed, 0x0b, 0x12, 0xc4, 0xa3, 0xf5, 0xb6, 0x36, 0x5e, 0x6f, 0x6f, 0x80, 0x9e, 0xf5, + 0x3a, 0x29, 0x99, 0xfa, 0xd8, 0xeb, 0x9d, 0xa1, 0x9a, 0x03, 0x65, 0xe3, 0xeb, 0x02, 0xdc, 0xef, + 0x91, 0xf8, 0x50, 0xe5, 0x81, 0x5e, 0x8c, 0xe3, 0x84, 0xcd, 0x90, 0x35, 0x7a, 0x50, 0xcd, 0xd0, + 0x2c, 0xc2, 0xf9, 0xaa, 0xd2, 0xef, 0x9a, 0xea, 0x64, 0x78, 0x83, 0xe6, 0x4a, 0x3c, 0x34, 0x67, + 0xa8, 0x09, 0x40, 0xa2, 0x88, 0x46, 0x16, 0xef, 0xd2, 0x44, 0x82, 0xa8, 0xac, 0x57, 0x15, 0x5c, + 0x14, 0xda, 0x8d, 0x16, 0xed, 0x13, 0x53, 0x17, 0x3a, 0x7c, 0xc8, 0x1b, 0x36, 0x69, 0xe0, 0x13, + 0xc6, 0x78, 0x0e, 0x92, 0x29, 0x63, 0x49, 0x08, 0xdf, 0x4a, 0xd9, 0xc4, 0x04, 0x37, 0x3f, 0x39, + 0xc1, 0x7d, 0x06, 0x95, 0xe1, 0xa2, 0x8f, 0x87, 0x28, 0xf1, 0xb1, 0xeb, 0xa9, 0xec, 0x26, 0x26, + 0xfc, 0xa6, 0x30, 0x9b, 0x86, 0x44, 0xee, 0x59, 0x37, 0xd3, 0x99, 0xf1, 0x1c, 0x2a, 0xc3, 0x15, + 0x18, 0xba, 0x0f, 0xfc, 0xc6, 0xc9, 0x8c, 0x27, 0x21, 0x16, 0x1d, 0x9b, 0xf1, 0x74, 0x67, 0xfc, + 0xbd, 0x08, 0xb7, 0xc7, 0x0a, 0x3f, 0xb4, 0x06, 0xb7, 0x7d, 0x37, 0x70, 0xfd, 0xc4, 0xb7, 0xec, + 0x30, 0xb1, 0x6c, 0x1a, 0x89, 0xfb, 0xc8, 0x5f, 0xb4, 0x95, 0x74, 0xa1, 0x15, 0x26, 0x2d, 0x2e, + 0xe6, 0x11, 0x12, 0x46, 0x84, 0xf7, 0xc2, 0xee, 0x89, 0x27, 0xc3, 0xb1, 0x64, 0xe6, 0x45, 0xe8, + 0xff, 0xa1, 0xa2, 0xd0, 0x22, 0xec, 0x5b, 0xce, 0x89, 0xf0, 0xaa, 0x66, 0x2e, 0xa5, 0x52, 0x13, + 0xfb, 0xbb, 0x27, 0xe8, 0x8d, 0xca, 0x85, 0x45, 0x71, 0x82, 0xdf, 0x9a, 0xa1, 0x50, 0x15, 0xc9, + 0x50, 0xa5, 0xc1, 0x3b, 0x30, 0xcf, 0xdb, 0x61, 0x99, 0x9e, 0x75, 0x53, 0x4e, 0xd0, 0x33, 0xb8, + 0x7d, 0x42, 0x69, 0x6c, 0x89, 0xf6, 0x97, 0x3f, 0xd0, 0x9c, 0xc6, 0x82, 0xd8, 0x51, 0x85, 0x2f, + 0x70, 0x04, 0xfe, 0x4e, 0xef, 0x9e, 0xf0, 0x97, 0x3a, 0xa0, 0x16, 0xee, 0xf7, 0x23, 0xc2, 0x98, + 0xa8, 0x76, 0x4b, 0xa6, 0x1e, 0xd0, 0x2d, 0x29, 0xa8, 0xff, 0xa9, 0x00, 0x45, 0xae, 0x9d, 0xb5, + 0xa7, 0x5a, 0xae, 0x3d, 0xed, 0x40, 0x51, 0xbc, 0x1a, 0x05, 0x11, 0x36, 0xaf, 0x66, 0xdd, 0x43, + 0x83, 0xbf, 0x2f, 0xa6, 0x80, 0x40, 0xf7, 0x60, 0x51, 0xf1, 0x94, 0xb5, 0xc4, 0x02, 0x93, 0xfc, + 0xf8, 0xb9, 0x0b, 0x9b, 0x34, 0xd0, 0xd2, 0x19, 0x7f, 0xa5, 0x71, 0x12, 0x53, 0xab, 0x2f, 0xde, + 0x70, 0xb1, 0xb9, 0x92, 0x09, 0x5c, 0x24, 0x5f, 0x75, 0xae, 0xe0, 0xf3, 0x78, 0xb2, 0x42, 0xea, + 0x06, 0xb1, 0xa8, 0xb4, 0x75, 0x13, 0x84, 0xa8, 0xcb, 0x25, 0x46, 0x0f, 0x8a, 0xe2, 0x81, 0xbb, + 0x03, 0xd5, 0xa3, 0x1f, 0x75, 0xdb, 0xd6, 0x17, 0x07, 0xbd, 0x6e, 0xbb, 0xd5, 0x79, 0xd3, 0x69, + 0xef, 0x54, 0x6f, 0x21, 0x04, 0x95, 0x6e, 0xdb, 0xec, 0x75, 0x7a, 0x47, 0xed, 0x83, 0x23, 0x6b, + 0x6f, 0x67, 0xa7, 0xaa, 0x8d, 0xc8, 0x7a, 0xbd, 0x9d, 0x6a, 0x01, 0x2d, 0x83, 0xbe, 0x7f, 0xd8, + 0xda, 0xda, 0x17, 0xd3, 0x39, 0xe3, 0xdf, 0xda, 0x20, 0xc2, 0xb2, 0xa6, 0x77, 0xa2, 0xf3, 0x46, + 0x72, 0x4d, 0x61, 0x3c, 0xd7, 0x3c, 0x85, 0xe5, 0x3e, 0x39, 0xc5, 0x89, 0x17, 0x5b, 0x32, 0xf9, + 0xc9, 0x8e, 0x67, 0x29, 0x15, 0x1e, 0x73, 0x19, 0x3a, 0x02, 0xf0, 0xa8, 0x8d, 0x3d, 0xcb, 0xa6, + 0xe1, 0x65, 0xda, 0xf6, 0xbc, 0x9a, 0xa1, 0x43, 0x6f, 0xec, 0x73, 0xeb, 0x16, 0x0d, 0x2f, 0x4d, + 0xdd, 0x53, 0xc3, 0xfa, 0x4b, 0xd0, 0x33, 0x39, 0x67, 0x9f, 0xbb, 0x4c, 0x62, 0xcc, 0x65, 0x3c, + 0xb8, 0xd4, 0xdf, 0x0a, 0x3e, 0x36, 0xb6, 0xa0, 0x32, 0xdc, 0xb1, 0xf3, 0xe0, 0x12, 0xb5, 0x49, + 0xfe, 0xd7, 0x8e, 0x2e, 0x24, 0xe2, 0xbf, 0xce, 0x58, 0xc5, 0xb2, 0xfe, 0x9b, 0xd2, 0xc0, 0x7d, + 0xec, 0xf8, 0xc5, 0x96, 0x20, 0x8d, 0x7e, 0xab, 0x41, 0x65, 0xb8, 0xef, 0x46, 0xcd, 0x6b, 0x5e, + 0x80, 0x49, 0x1d, 0x7a, 0x7d, 0x8a, 0x2a, 0xd2, 0xf8, 0xc6, 0xaf, 0xfe, 0xfa, 0x8f, 0x3f, 0x14, + 0x1e, 0x19, 0x1f, 0x4c, 0xf8, 0x27, 0xb7, 0x99, 0x55, 0xe2, 0xe8, 0x17, 0x50, 0xce, 0x95, 0xed, + 0xe8, 0x9b, 0x53, 0x55, 0xf7, 0x8a, 0xc7, 0x47, 0x4a, 0x3b, 0xf7, 0x77, 0xae, 0x91, 0x3d, 0x0a, + 0x86, 0x21, 0x28, 0x3c, 0x34, 0xee, 0x4d, 0xa2, 0x10, 0x25, 0xc1, 0xa6, 0xb6, 0x86, 0xbe, 0xd2, + 0xa0, 0x9c, 0x6b, 0x05, 0xae, 0x23, 0x30, 0xde, 0x31, 0x4c, 0xe5, 0x88, 0x67, 0x82, 0xc5, 0x53, + 0xf4, 0x64, 0x02, 0x8b, 0xe6, 0xcf, 0x72, 0xd5, 0xf1, 0xcf, 0xd1, 0xef, 0x34, 0x58, 0x1e, 0x2a, + 0xe5, 0x51, 0xe3, 0x9a, 0x5e, 0x79, 0x42, 0x23, 0x52, 0x6f, 0x4e, 0xad, 0x2f, 0x7b, 0x04, 0xe3, + 0x81, 0x60, 0x77, 0x17, 0x4d, 0x3a, 0x26, 0xf4, 0x6b, 0x0d, 0x2a, 0xc3, 0x75, 0xfd, 0x75, 0xb1, + 0x32, 0xb1, 0x03, 0xa8, 0x7f, 0x38, 0xf6, 0xa2, 0xb7, 0xfd, 0x30, 0xbe, 0x54, 0x6e, 0x59, 0x9b, + 0xc2, 0x2d, 0x7f, 0xd4, 0xe0, 0x83, 0x09, 0x4d, 0x02, 0xfa, 0xf4, 0xda, 0xb3, 0xba, 0xa2, 0xa7, + 0xa8, 0xaf, 0x4d, 0x5f, 0xef, 0x18, 0x4d, 0x41, 0xf2, 0x19, 0xfa, 0x64, 0x52, 0x04, 0x39, 0x13, + 0x28, 0x7d, 0xad, 0x01, 0x1a, 0x2f, 0x4c, 0xd0, 0xcb, 0xeb, 0xfe, 0xd2, 0x5c, 0x51, 0xc6, 0x5c, + 0xe9, 0xb9, 0x17, 0x82, 0xd4, 0xf3, 0xfa, 0xc7, 0x93, 0x48, 0xb1, 0x31, 0xb8, 0x4d, 0x6d, 0x6d, + 0x3b, 0x84, 0x7b, 0x36, 0xf5, 0x27, 0x91, 0xd8, 0xae, 0x64, 0x31, 0xd1, 0xe5, 0x9f, 0xe9, 0x6a, + 0x3f, 0xfe, 0x5c, 0xa9, 0x51, 0x0f, 0x07, 0x4e, 0x83, 0x46, 0x4e, 0xd3, 0x21, 0x81, 0x20, 0xd1, + 0x94, 0x4b, 0x38, 0x74, 0xd9, 0xf8, 0x3f, 0xf7, 0xef, 0x2a, 0xc9, 0xbf, 0x34, 0xed, 0x64, 0x41, + 0xe8, 0xbf, 0xfc, 0x4f, 0x00, 0x00, 0x00, 0xff, 0xff, 0x13, 0x10, 0x96, 0x1d, 0xa2, 0x17, 0x00, + 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/home/graph/v1/device.pb.go b/vendor/google.golang.org/genproto/googleapis/home/graph/v1/device.pb.go new file mode 100644 index 0000000..92e83f4 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/home/graph/v1/device.pb.go @@ -0,0 +1,316 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/home/graph/v1/device.proto + +package graph // import "google.golang.org/genproto/googleapis/home/graph/v1" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _struct "github.com/golang/protobuf/ptypes/struct" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Third-party partner's device definition. +type Device struct { + // Third-party partner's device ID. + Id string `protobuf:"bytes,1,opt,name=id,proto3" json:"id,omitempty"` + // Hardware type of the device (e.g. light, outlet, etc). + Type string `protobuf:"bytes,2,opt,name=type,proto3" json:"type,omitempty"` + // Traits supported by the device. + Traits []string `protobuf:"bytes,3,rep,name=traits,proto3" json:"traits,omitempty"` + // Name of the device given by the third party. This includes names given to + // the device via third party device manufacturer's app, model names for the + // device, etc. + Name *DeviceNames `protobuf:"bytes,4,opt,name=name,proto3" json:"name,omitempty"` + // Indicates whether the state of this device is being reported to Google + // through ReportStateAndNotification call. + WillReportState bool `protobuf:"varint,5,opt,name=will_report_state,json=willReportState,proto3" json:"will_report_state,omitempty"` + // If the third-party partner's cloud configuration includes placing devices + // in rooms, the name of the room can be provided here. + RoomHint string `protobuf:"bytes,6,opt,name=room_hint,json=roomHint,proto3" json:"room_hint,omitempty"` + // As in roomHint, for structures that users set up in the partner's system. + StructureHint string `protobuf:"bytes,7,opt,name=structure_hint,json=structureHint,proto3" json:"structure_hint,omitempty"` + // Device manufacturer, model, hardware version, and software version. + DeviceInfo *DeviceInfo `protobuf:"bytes,8,opt,name=device_info,json=deviceInfo,proto3" json:"device_info,omitempty"` + // Attributes for the traits supported by the device. + Attributes *_struct.Struct `protobuf:"bytes,9,opt,name=attributes,proto3" json:"attributes,omitempty"` + // Custom JSON data provided by the manufacturer and attached to QUERY and + // EXECUTE requests in AoG. + CustomData string `protobuf:"bytes,10,opt,name=custom_data,json=customData,proto3" json:"custom_data,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Device) Reset() { *m = Device{} } +func (m *Device) String() string { return proto.CompactTextString(m) } +func (*Device) ProtoMessage() {} +func (*Device) Descriptor() ([]byte, []int) { + return fileDescriptor_device_0de6da35c00d86c7, []int{0} +} +func (m *Device) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Device.Unmarshal(m, b) +} +func (m *Device) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Device.Marshal(b, m, deterministic) +} +func (dst *Device) XXX_Merge(src proto.Message) { + xxx_messageInfo_Device.Merge(dst, src) +} +func (m *Device) XXX_Size() int { + return xxx_messageInfo_Device.Size(m) +} +func (m *Device) XXX_DiscardUnknown() { + xxx_messageInfo_Device.DiscardUnknown(m) +} + +var xxx_messageInfo_Device proto.InternalMessageInfo + +func (m *Device) GetId() string { + if m != nil { + return m.Id + } + return "" +} + +func (m *Device) GetType() string { + if m != nil { + return m.Type + } + return "" +} + +func (m *Device) GetTraits() []string { + if m != nil { + return m.Traits + } + return nil +} + +func (m *Device) GetName() *DeviceNames { + if m != nil { + return m.Name + } + return nil +} + +func (m *Device) GetWillReportState() bool { + if m != nil { + return m.WillReportState + } + return false +} + +func (m *Device) GetRoomHint() string { + if m != nil { + return m.RoomHint + } + return "" +} + +func (m *Device) GetStructureHint() string { + if m != nil { + return m.StructureHint + } + return "" +} + +func (m *Device) GetDeviceInfo() *DeviceInfo { + if m != nil { + return m.DeviceInfo + } + return nil +} + +func (m *Device) GetAttributes() *_struct.Struct { + if m != nil { + return m.Attributes + } + return nil +} + +func (m *Device) GetCustomData() string { + if m != nil { + return m.CustomData + } + return "" +} + +// Different names for the device. +type DeviceNames struct { + // Primary name of the device, generally provided by the user. + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // Additional names provided by the user for the device. + Nicknames []string `protobuf:"bytes,2,rep,name=nicknames,proto3" json:"nicknames,omitempty"` + // List of names provided by the partner rather than the user, often + // manufacturer names, SKUs, etc. + DefaultNames []string `protobuf:"bytes,3,rep,name=default_names,json=defaultNames,proto3" json:"default_names,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *DeviceNames) Reset() { *m = DeviceNames{} } +func (m *DeviceNames) String() string { return proto.CompactTextString(m) } +func (*DeviceNames) ProtoMessage() {} +func (*DeviceNames) Descriptor() ([]byte, []int) { + return fileDescriptor_device_0de6da35c00d86c7, []int{1} +} +func (m *DeviceNames) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_DeviceNames.Unmarshal(m, b) +} +func (m *DeviceNames) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_DeviceNames.Marshal(b, m, deterministic) +} +func (dst *DeviceNames) XXX_Merge(src proto.Message) { + xxx_messageInfo_DeviceNames.Merge(dst, src) +} +func (m *DeviceNames) XXX_Size() int { + return xxx_messageInfo_DeviceNames.Size(m) +} +func (m *DeviceNames) XXX_DiscardUnknown() { + xxx_messageInfo_DeviceNames.DiscardUnknown(m) +} + +var xxx_messageInfo_DeviceNames proto.InternalMessageInfo + +func (m *DeviceNames) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *DeviceNames) GetNicknames() []string { + if m != nil { + return m.Nicknames + } + return nil +} + +func (m *DeviceNames) GetDefaultNames() []string { + if m != nil { + return m.DefaultNames + } + return nil +} + +// Device information. +type DeviceInfo struct { + // Device manufacturer. + Manufacturer string `protobuf:"bytes,1,opt,name=manufacturer,proto3" json:"manufacturer,omitempty"` + // Device model. + Model string `protobuf:"bytes,2,opt,name=model,proto3" json:"model,omitempty"` + // Device hardware version. + HwVersion string `protobuf:"bytes,3,opt,name=hw_version,json=hwVersion,proto3" json:"hw_version,omitempty"` + // Device software version. + SwVersion string `protobuf:"bytes,4,opt,name=sw_version,json=swVersion,proto3" json:"sw_version,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *DeviceInfo) Reset() { *m = DeviceInfo{} } +func (m *DeviceInfo) String() string { return proto.CompactTextString(m) } +func (*DeviceInfo) ProtoMessage() {} +func (*DeviceInfo) Descriptor() ([]byte, []int) { + return fileDescriptor_device_0de6da35c00d86c7, []int{2} +} +func (m *DeviceInfo) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_DeviceInfo.Unmarshal(m, b) +} +func (m *DeviceInfo) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_DeviceInfo.Marshal(b, m, deterministic) +} +func (dst *DeviceInfo) XXX_Merge(src proto.Message) { + xxx_messageInfo_DeviceInfo.Merge(dst, src) +} +func (m *DeviceInfo) XXX_Size() int { + return xxx_messageInfo_DeviceInfo.Size(m) +} +func (m *DeviceInfo) XXX_DiscardUnknown() { + xxx_messageInfo_DeviceInfo.DiscardUnknown(m) +} + +var xxx_messageInfo_DeviceInfo proto.InternalMessageInfo + +func (m *DeviceInfo) GetManufacturer() string { + if m != nil { + return m.Manufacturer + } + return "" +} + +func (m *DeviceInfo) GetModel() string { + if m != nil { + return m.Model + } + return "" +} + +func (m *DeviceInfo) GetHwVersion() string { + if m != nil { + return m.HwVersion + } + return "" +} + +func (m *DeviceInfo) GetSwVersion() string { + if m != nil { + return m.SwVersion + } + return "" +} + +func init() { + proto.RegisterType((*Device)(nil), "google.home.graph.v1.Device") + proto.RegisterType((*DeviceNames)(nil), "google.home.graph.v1.DeviceNames") + proto.RegisterType((*DeviceInfo)(nil), "google.home.graph.v1.DeviceInfo") +} + +func init() { + proto.RegisterFile("google/home/graph/v1/device.proto", fileDescriptor_device_0de6da35c00d86c7) +} + +var fileDescriptor_device_0de6da35c00d86c7 = []byte{ + // 470 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x7c, 0x93, 0xc1, 0x6f, 0xd4, 0x3c, + 0x10, 0xc5, 0x95, 0xdd, 0xed, 0x7e, 0x9b, 0xd9, 0xb6, 0x9f, 0xb0, 0x2a, 0xb0, 0xa0, 0x88, 0x74, + 0x11, 0xd2, 0x8a, 0x43, 0xa2, 0x82, 0x10, 0x42, 0x9c, 0xa8, 0x7a, 0x80, 0x0b, 0x42, 0xa9, 0xc4, + 0x81, 0x4b, 0xe4, 0x4d, 0x9c, 0xc4, 0x22, 0xb1, 0x23, 0x7b, 0xb2, 0x2b, 0xee, 0x1c, 0xf8, 0xb3, + 0x51, 0xc6, 0xd9, 0x6e, 0x2b, 0x55, 0xdc, 0xc6, 0xef, 0xfd, 0x3c, 0x1e, 0x3f, 0x27, 0x70, 0x51, + 0x19, 0x53, 0x35, 0x32, 0xa9, 0x4d, 0x2b, 0x93, 0xca, 0x8a, 0xae, 0x4e, 0xb6, 0x97, 0x49, 0x21, + 0xb7, 0x2a, 0x97, 0x71, 0x67, 0x0d, 0x1a, 0x76, 0xe6, 0x91, 0x78, 0x40, 0x62, 0x42, 0xe2, 0xed, + 0xe5, 0xd3, 0xf3, 0x71, 0x23, 0x31, 0x9b, 0xbe, 0x4c, 0x1c, 0xda, 0x3e, 0x47, 0xbf, 0x67, 0xf5, + 0x67, 0x0a, 0xf3, 0x6b, 0x6a, 0xc2, 0x4e, 0x61, 0xa2, 0x0a, 0x1e, 0x44, 0xc1, 0x3a, 0x4c, 0x27, + 0xaa, 0x60, 0x0c, 0x66, 0xf8, 0xab, 0x93, 0x7c, 0x42, 0x0a, 0xd5, 0xec, 0x31, 0xcc, 0xd1, 0x0a, + 0x85, 0x8e, 0x4f, 0xa3, 0xe9, 0x3a, 0x4c, 0xc7, 0x15, 0x7b, 0x07, 0x33, 0x2d, 0x5a, 0xc9, 0x67, + 0x51, 0xb0, 0x5e, 0xbe, 0xb9, 0x88, 0x1f, 0x9a, 0x24, 0xf6, 0xe7, 0x7c, 0x15, 0xad, 0x74, 0x29, + 0xe1, 0xec, 0x35, 0x3c, 0xda, 0xa9, 0xa6, 0xc9, 0xac, 0xec, 0x8c, 0xc5, 0xcc, 0xa1, 0x40, 0xc9, + 0x8f, 0xa2, 0x60, 0xbd, 0x48, 0xff, 0x1f, 0x8c, 0x94, 0xf4, 0x9b, 0x41, 0x66, 0xcf, 0x20, 0xb4, + 0xc6, 0xb4, 0x59, 0xad, 0x34, 0xf2, 0x39, 0xcd, 0xb4, 0x18, 0x84, 0xcf, 0x4a, 0x23, 0x7b, 0x05, + 0xa7, 0xfe, 0x5a, 0xbd, 0x95, 0x9e, 0xf8, 0x8f, 0x88, 0x93, 0x5b, 0x95, 0xb0, 0x4f, 0xb0, 0xf4, + 0x89, 0x65, 0x4a, 0x97, 0x86, 0x2f, 0x68, 0xda, 0xe8, 0x5f, 0xd3, 0x7e, 0xd1, 0xa5, 0x49, 0xa1, + 0xb8, 0xad, 0xd9, 0x7b, 0x00, 0x81, 0x68, 0xd5, 0xa6, 0x47, 0xe9, 0x78, 0x48, 0x1d, 0x9e, 0xec, + 0x3b, 0xec, 0x33, 0x8e, 0x6f, 0xe8, 0xd8, 0xf4, 0x0e, 0xca, 0x5e, 0xc0, 0x32, 0xef, 0x1d, 0x9a, + 0x36, 0x2b, 0x04, 0x0a, 0x0e, 0x34, 0x1f, 0x78, 0xe9, 0x5a, 0xa0, 0x58, 0x15, 0xb0, 0xbc, 0x93, + 0xd0, 0x10, 0x3f, 0x45, 0xea, 0x1f, 0xc4, 0xe7, 0x75, 0x0e, 0xa1, 0x56, 0xf9, 0xcf, 0xa1, 0x76, + 0x7c, 0x42, 0x2f, 0x70, 0x10, 0xd8, 0x4b, 0x38, 0x29, 0x64, 0x29, 0xfa, 0x06, 0x33, 0x4f, 0xf8, + 0x37, 0x3a, 0x1e, 0x45, 0x6a, 0xbb, 0xfa, 0x1d, 0x00, 0x1c, 0xae, 0xc6, 0x56, 0x70, 0xdc, 0x0a, + 0xdd, 0x97, 0x82, 0x42, 0xb2, 0xe3, 0x69, 0xf7, 0x34, 0x76, 0x06, 0x47, 0xad, 0x29, 0x64, 0x33, + 0x7e, 0x09, 0x7e, 0xc1, 0x9e, 0x03, 0xd4, 0xbb, 0x6c, 0x2b, 0xad, 0x53, 0x46, 0xf3, 0x29, 0x59, + 0x61, 0xbd, 0xfb, 0xee, 0x85, 0xc1, 0x76, 0x07, 0x7b, 0xe6, 0x6d, 0xb7, 0xb7, 0xaf, 0x36, 0xc0, + 0x73, 0xd3, 0x3e, 0x98, 0xfc, 0xd5, 0x18, 0xc3, 0xb7, 0x21, 0xcc, 0x1f, 0x1f, 0x46, 0xa4, 0x32, + 0x8d, 0xd0, 0x55, 0x6c, 0x6c, 0x95, 0x54, 0x52, 0x53, 0xd0, 0x89, 0xb7, 0x44, 0xa7, 0xdc, 0xfd, + 0xdf, 0xe2, 0x23, 0x15, 0x9b, 0x39, 0x51, 0x6f, 0xff, 0x06, 0x00, 0x00, 0xff, 0xff, 0xb7, 0x2a, + 0xc2, 0xaf, 0x3b, 0x03, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/home/graph/v1/homegraph.pb.go b/vendor/google.golang.org/genproto/googleapis/home/graph/v1/homegraph.pb.go new file mode 100644 index 0000000..f35341a --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/home/graph/v1/homegraph.pb.go @@ -0,0 +1,1259 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/home/graph/v1/homegraph.proto + +package graph // import "google.golang.org/genproto/googleapis/home/graph/v1" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import empty "github.com/golang/protobuf/ptypes/empty" +import _struct "github.com/golang/protobuf/ptypes/struct" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +import ( + context "golang.org/x/net/context" + grpc "google.golang.org/grpc" +) + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Request type for RequestSyncDevices call. +type RequestSyncDevicesRequest struct { + // Required. Third-party user id issued by agent's third-party identity + // provider. + AgentUserId string `protobuf:"bytes,1,opt,name=agent_user_id,json=agentUserId,proto3" json:"agent_user_id,omitempty"` + // Optional. If set, the request will be added to a queue and a response will + // be returned immediately. The queue allows for de-duplication of + // simultaneous requests. + Async bool `protobuf:"varint,2,opt,name=async,proto3" json:"async,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *RequestSyncDevicesRequest) Reset() { *m = RequestSyncDevicesRequest{} } +func (m *RequestSyncDevicesRequest) String() string { return proto.CompactTextString(m) } +func (*RequestSyncDevicesRequest) ProtoMessage() {} +func (*RequestSyncDevicesRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_homegraph_b39498a40939fff1, []int{0} +} +func (m *RequestSyncDevicesRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_RequestSyncDevicesRequest.Unmarshal(m, b) +} +func (m *RequestSyncDevicesRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_RequestSyncDevicesRequest.Marshal(b, m, deterministic) +} +func (dst *RequestSyncDevicesRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_RequestSyncDevicesRequest.Merge(dst, src) +} +func (m *RequestSyncDevicesRequest) XXX_Size() int { + return xxx_messageInfo_RequestSyncDevicesRequest.Size(m) +} +func (m *RequestSyncDevicesRequest) XXX_DiscardUnknown() { + xxx_messageInfo_RequestSyncDevicesRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_RequestSyncDevicesRequest proto.InternalMessageInfo + +func (m *RequestSyncDevicesRequest) GetAgentUserId() string { + if m != nil { + return m.AgentUserId + } + return "" +} + +func (m *RequestSyncDevicesRequest) GetAsync() bool { + if m != nil { + return m.Async + } + return false +} + +// Response type for RequestSyncDevices call. Intentionally empty upon success. +// An HTTP response code is returned with more details upon failure. +type RequestSyncDevicesResponse struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *RequestSyncDevicesResponse) Reset() { *m = RequestSyncDevicesResponse{} } +func (m *RequestSyncDevicesResponse) String() string { return proto.CompactTextString(m) } +func (*RequestSyncDevicesResponse) ProtoMessage() {} +func (*RequestSyncDevicesResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_homegraph_b39498a40939fff1, []int{1} +} +func (m *RequestSyncDevicesResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_RequestSyncDevicesResponse.Unmarshal(m, b) +} +func (m *RequestSyncDevicesResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_RequestSyncDevicesResponse.Marshal(b, m, deterministic) +} +func (dst *RequestSyncDevicesResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_RequestSyncDevicesResponse.Merge(dst, src) +} +func (m *RequestSyncDevicesResponse) XXX_Size() int { + return xxx_messageInfo_RequestSyncDevicesResponse.Size(m) +} +func (m *RequestSyncDevicesResponse) XXX_DiscardUnknown() { + xxx_messageInfo_RequestSyncDevicesResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_RequestSyncDevicesResponse proto.InternalMessageInfo + +// Sample ReportStateAndNotificationRequest, with states and notifications +// defined per device_id (eg: "123" and "456" in the following example): +// { +// "requestId": "ff36a3cc-ec34-11e6-b1a0-64510650abcf", +// "agentUserId": "1234", +// "payload": { +// "devices": { +// "states": { +// "123": { +// "on": true +// }, +// "456": { +// "on": true, +// "brightness": 10 +// } +// }, +// "notifications": { +// "123": { +// "ObjectDetected": { +// "priority": 0, +// "objects": { +// "NAMED": ["Alice", "Bob", "Carol", "Eve"] +// } +// }, +// "DoorUnlocked": { +// "priority": 0, +// "keyUsed": { +// "keyName": "Wife's key" +// } +// } +// }, +// "456": { +// "SprinklersOn": { +// "priority": 0, +// "timeStarted": "1513792702" +// } +// } +// } +// } +// } +// } +// Request type for ReportStateAndNotification call. It may include States, +// Notifications, or both. This request uses globally unique flattened state +// names instead of namespaces based on traits to align with the existing QUERY +// and EXECUTE APIs implemented by 90+ Smart Home partners. +// Next tag: 6. +type ReportStateAndNotificationRequest struct { + // Request id used for debugging. + RequestId string `protobuf:"bytes,1,opt,name=request_id,json=requestId,proto3" json:"request_id,omitempty"` + // Unique identifier per event (eg: doorbell press). + EventId string `protobuf:"bytes,4,opt,name=event_id,json=eventId,proto3" json:"event_id,omitempty"` + // Required. Third-party user id. + AgentUserId string `protobuf:"bytes,2,opt,name=agent_user_id,json=agentUserId,proto3" json:"agent_user_id,omitempty"` + // Token to maintain state in the follow up notification response. + FollowUpToken string `protobuf:"bytes,5,opt,name=follow_up_token,json=followUpToken,proto3" json:"follow_up_token,omitempty"` + // State of devices to update and notification metadata for devices. For + // example, if a user turns a light on manually, a State update should be + // sent so that the information is always the current status of the device. + // Notifications are independent from the state and its piece of the payload + // should contain everything necessary to notify the user. Although it may be + // related to a state change, it does not need to be. For example, if a + // device can turn on/off and change temperature, the states reported would + // include both "on" and "70 degrees" but the 3p may choose not to send any + // notification for that, or to only say that the "the room is heating up", + // keeping state and notification independent. + Payload *StateAndNotificationPayload `protobuf:"bytes,3,opt,name=payload,proto3" json:"payload,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ReportStateAndNotificationRequest) Reset() { *m = ReportStateAndNotificationRequest{} } +func (m *ReportStateAndNotificationRequest) String() string { return proto.CompactTextString(m) } +func (*ReportStateAndNotificationRequest) ProtoMessage() {} +func (*ReportStateAndNotificationRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_homegraph_b39498a40939fff1, []int{2} +} +func (m *ReportStateAndNotificationRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ReportStateAndNotificationRequest.Unmarshal(m, b) +} +func (m *ReportStateAndNotificationRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ReportStateAndNotificationRequest.Marshal(b, m, deterministic) +} +func (dst *ReportStateAndNotificationRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_ReportStateAndNotificationRequest.Merge(dst, src) +} +func (m *ReportStateAndNotificationRequest) XXX_Size() int { + return xxx_messageInfo_ReportStateAndNotificationRequest.Size(m) +} +func (m *ReportStateAndNotificationRequest) XXX_DiscardUnknown() { + xxx_messageInfo_ReportStateAndNotificationRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_ReportStateAndNotificationRequest proto.InternalMessageInfo + +func (m *ReportStateAndNotificationRequest) GetRequestId() string { + if m != nil { + return m.RequestId + } + return "" +} + +func (m *ReportStateAndNotificationRequest) GetEventId() string { + if m != nil { + return m.EventId + } + return "" +} + +func (m *ReportStateAndNotificationRequest) GetAgentUserId() string { + if m != nil { + return m.AgentUserId + } + return "" +} + +func (m *ReportStateAndNotificationRequest) GetFollowUpToken() string { + if m != nil { + return m.FollowUpToken + } + return "" +} + +func (m *ReportStateAndNotificationRequest) GetPayload() *StateAndNotificationPayload { + if m != nil { + return m.Payload + } + return nil +} + +// Response type for ReportStateAndNotification call. +type ReportStateAndNotificationResponse struct { + // Request id copied from ReportStateAndNotificationRequest. + RequestId string `protobuf:"bytes,1,opt,name=request_id,json=requestId,proto3" json:"request_id,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ReportStateAndNotificationResponse) Reset() { *m = ReportStateAndNotificationResponse{} } +func (m *ReportStateAndNotificationResponse) String() string { return proto.CompactTextString(m) } +func (*ReportStateAndNotificationResponse) ProtoMessage() {} +func (*ReportStateAndNotificationResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_homegraph_b39498a40939fff1, []int{3} +} +func (m *ReportStateAndNotificationResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ReportStateAndNotificationResponse.Unmarshal(m, b) +} +func (m *ReportStateAndNotificationResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ReportStateAndNotificationResponse.Marshal(b, m, deterministic) +} +func (dst *ReportStateAndNotificationResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_ReportStateAndNotificationResponse.Merge(dst, src) +} +func (m *ReportStateAndNotificationResponse) XXX_Size() int { + return xxx_messageInfo_ReportStateAndNotificationResponse.Size(m) +} +func (m *ReportStateAndNotificationResponse) XXX_DiscardUnknown() { + xxx_messageInfo_ReportStateAndNotificationResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_ReportStateAndNotificationResponse proto.InternalMessageInfo + +func (m *ReportStateAndNotificationResponse) GetRequestId() string { + if m != nil { + return m.RequestId + } + return "" +} + +// Payload containing the State and Notification information for devices. +type StateAndNotificationPayload struct { + // The devices for updating State and sending Notifications. + Devices *ReportStateAndNotificationDevice `protobuf:"bytes,1,opt,name=devices,proto3" json:"devices,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *StateAndNotificationPayload) Reset() { *m = StateAndNotificationPayload{} } +func (m *StateAndNotificationPayload) String() string { return proto.CompactTextString(m) } +func (*StateAndNotificationPayload) ProtoMessage() {} +func (*StateAndNotificationPayload) Descriptor() ([]byte, []int) { + return fileDescriptor_homegraph_b39498a40939fff1, []int{4} +} +func (m *StateAndNotificationPayload) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_StateAndNotificationPayload.Unmarshal(m, b) +} +func (m *StateAndNotificationPayload) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_StateAndNotificationPayload.Marshal(b, m, deterministic) +} +func (dst *StateAndNotificationPayload) XXX_Merge(src proto.Message) { + xxx_messageInfo_StateAndNotificationPayload.Merge(dst, src) +} +func (m *StateAndNotificationPayload) XXX_Size() int { + return xxx_messageInfo_StateAndNotificationPayload.Size(m) +} +func (m *StateAndNotificationPayload) XXX_DiscardUnknown() { + xxx_messageInfo_StateAndNotificationPayload.DiscardUnknown(m) +} + +var xxx_messageInfo_StateAndNotificationPayload proto.InternalMessageInfo + +func (m *StateAndNotificationPayload) GetDevices() *ReportStateAndNotificationDevice { + if m != nil { + return m.Devices + } + return nil +} + +// The States and Notifications specific to a device. +type ReportStateAndNotificationDevice struct { + // States of devices to update. + States *_struct.Struct `protobuf:"bytes,1,opt,name=states,proto3" json:"states,omitempty"` + // Notifications metadata for devices. + Notifications *_struct.Struct `protobuf:"bytes,2,opt,name=notifications,proto3" json:"notifications,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ReportStateAndNotificationDevice) Reset() { *m = ReportStateAndNotificationDevice{} } +func (m *ReportStateAndNotificationDevice) String() string { return proto.CompactTextString(m) } +func (*ReportStateAndNotificationDevice) ProtoMessage() {} +func (*ReportStateAndNotificationDevice) Descriptor() ([]byte, []int) { + return fileDescriptor_homegraph_b39498a40939fff1, []int{5} +} +func (m *ReportStateAndNotificationDevice) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ReportStateAndNotificationDevice.Unmarshal(m, b) +} +func (m *ReportStateAndNotificationDevice) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ReportStateAndNotificationDevice.Marshal(b, m, deterministic) +} +func (dst *ReportStateAndNotificationDevice) XXX_Merge(src proto.Message) { + xxx_messageInfo_ReportStateAndNotificationDevice.Merge(dst, src) +} +func (m *ReportStateAndNotificationDevice) XXX_Size() int { + return xxx_messageInfo_ReportStateAndNotificationDevice.Size(m) +} +func (m *ReportStateAndNotificationDevice) XXX_DiscardUnknown() { + xxx_messageInfo_ReportStateAndNotificationDevice.DiscardUnknown(m) +} + +var xxx_messageInfo_ReportStateAndNotificationDevice proto.InternalMessageInfo + +func (m *ReportStateAndNotificationDevice) GetStates() *_struct.Struct { + if m != nil { + return m.States + } + return nil +} + +func (m *ReportStateAndNotificationDevice) GetNotifications() *_struct.Struct { + if m != nil { + return m.Notifications + } + return nil +} + +// Request type for DeleteAgentUser call. +type DeleteAgentUserRequest struct { + // Request id used for debugging. + RequestId string `protobuf:"bytes,1,opt,name=request_id,json=requestId,proto3" json:"request_id,omitempty"` + // Required. Third-party user id. + AgentUserId string `protobuf:"bytes,2,opt,name=agent_user_id,json=agentUserId,proto3" json:"agent_user_id,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *DeleteAgentUserRequest) Reset() { *m = DeleteAgentUserRequest{} } +func (m *DeleteAgentUserRequest) String() string { return proto.CompactTextString(m) } +func (*DeleteAgentUserRequest) ProtoMessage() {} +func (*DeleteAgentUserRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_homegraph_b39498a40939fff1, []int{6} +} +func (m *DeleteAgentUserRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_DeleteAgentUserRequest.Unmarshal(m, b) +} +func (m *DeleteAgentUserRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_DeleteAgentUserRequest.Marshal(b, m, deterministic) +} +func (dst *DeleteAgentUserRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_DeleteAgentUserRequest.Merge(dst, src) +} +func (m *DeleteAgentUserRequest) XXX_Size() int { + return xxx_messageInfo_DeleteAgentUserRequest.Size(m) +} +func (m *DeleteAgentUserRequest) XXX_DiscardUnknown() { + xxx_messageInfo_DeleteAgentUserRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_DeleteAgentUserRequest proto.InternalMessageInfo + +func (m *DeleteAgentUserRequest) GetRequestId() string { + if m != nil { + return m.RequestId + } + return "" +} + +func (m *DeleteAgentUserRequest) GetAgentUserId() string { + if m != nil { + return m.AgentUserId + } + return "" +} + +// Request type for Query call. This should be the same format as the AoG +// action.devices.QUERY request +// (https://developers.google.com/actions/smarthome/create-app#actiondevicesquery) +// with the exception of the extra "agent_user_id" and no "intent" and +// "customData" field. +type QueryRequest struct { + // Request ID used for debugging. + RequestId string `protobuf:"bytes,1,opt,name=request_id,json=requestId,proto3" json:"request_id,omitempty"` + // Required. Third-party user ID. + AgentUserId string `protobuf:"bytes,2,opt,name=agent_user_id,json=agentUserId,proto3" json:"agent_user_id,omitempty"` + // Required. Inputs containing third-party partner's device IDs for which to + // get the device states. + Inputs []*QueryRequestInput `protobuf:"bytes,3,rep,name=inputs,proto3" json:"inputs,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *QueryRequest) Reset() { *m = QueryRequest{} } +func (m *QueryRequest) String() string { return proto.CompactTextString(m) } +func (*QueryRequest) ProtoMessage() {} +func (*QueryRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_homegraph_b39498a40939fff1, []int{7} +} +func (m *QueryRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_QueryRequest.Unmarshal(m, b) +} +func (m *QueryRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_QueryRequest.Marshal(b, m, deterministic) +} +func (dst *QueryRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_QueryRequest.Merge(dst, src) +} +func (m *QueryRequest) XXX_Size() int { + return xxx_messageInfo_QueryRequest.Size(m) +} +func (m *QueryRequest) XXX_DiscardUnknown() { + xxx_messageInfo_QueryRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_QueryRequest proto.InternalMessageInfo + +func (m *QueryRequest) GetRequestId() string { + if m != nil { + return m.RequestId + } + return "" +} + +func (m *QueryRequest) GetAgentUserId() string { + if m != nil { + return m.AgentUserId + } + return "" +} + +func (m *QueryRequest) GetInputs() []*QueryRequestInput { + if m != nil { + return m.Inputs + } + return nil +} + +// Device ID inputs to QueryRequest. +type QueryRequestInput struct { + // Payload containing third-party partner's device IDs. + Payload *QueryRequestPayload `protobuf:"bytes,1,opt,name=payload,proto3" json:"payload,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *QueryRequestInput) Reset() { *m = QueryRequestInput{} } +func (m *QueryRequestInput) String() string { return proto.CompactTextString(m) } +func (*QueryRequestInput) ProtoMessage() {} +func (*QueryRequestInput) Descriptor() ([]byte, []int) { + return fileDescriptor_homegraph_b39498a40939fff1, []int{8} +} +func (m *QueryRequestInput) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_QueryRequestInput.Unmarshal(m, b) +} +func (m *QueryRequestInput) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_QueryRequestInput.Marshal(b, m, deterministic) +} +func (dst *QueryRequestInput) XXX_Merge(src proto.Message) { + xxx_messageInfo_QueryRequestInput.Merge(dst, src) +} +func (m *QueryRequestInput) XXX_Size() int { + return xxx_messageInfo_QueryRequestInput.Size(m) +} +func (m *QueryRequestInput) XXX_DiscardUnknown() { + xxx_messageInfo_QueryRequestInput.DiscardUnknown(m) +} + +var xxx_messageInfo_QueryRequestInput proto.InternalMessageInfo + +func (m *QueryRequestInput) GetPayload() *QueryRequestPayload { + if m != nil { + return m.Payload + } + return nil +} + +// Payload containing device IDs. +type QueryRequestPayload struct { + // Third-party partner's device IDs to get device states for. + Devices []*AgentDeviceId `protobuf:"bytes,1,rep,name=devices,proto3" json:"devices,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *QueryRequestPayload) Reset() { *m = QueryRequestPayload{} } +func (m *QueryRequestPayload) String() string { return proto.CompactTextString(m) } +func (*QueryRequestPayload) ProtoMessage() {} +func (*QueryRequestPayload) Descriptor() ([]byte, []int) { + return fileDescriptor_homegraph_b39498a40939fff1, []int{9} +} +func (m *QueryRequestPayload) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_QueryRequestPayload.Unmarshal(m, b) +} +func (m *QueryRequestPayload) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_QueryRequestPayload.Marshal(b, m, deterministic) +} +func (dst *QueryRequestPayload) XXX_Merge(src proto.Message) { + xxx_messageInfo_QueryRequestPayload.Merge(dst, src) +} +func (m *QueryRequestPayload) XXX_Size() int { + return xxx_messageInfo_QueryRequestPayload.Size(m) +} +func (m *QueryRequestPayload) XXX_DiscardUnknown() { + xxx_messageInfo_QueryRequestPayload.DiscardUnknown(m) +} + +var xxx_messageInfo_QueryRequestPayload proto.InternalMessageInfo + +func (m *QueryRequestPayload) GetDevices() []*AgentDeviceId { + if m != nil { + return m.Devices + } + return nil +} + +// Third-party partner's device ID for one device. +type AgentDeviceId struct { + // Third-party partner's device ID. + Id string `protobuf:"bytes,1,opt,name=id,proto3" json:"id,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *AgentDeviceId) Reset() { *m = AgentDeviceId{} } +func (m *AgentDeviceId) String() string { return proto.CompactTextString(m) } +func (*AgentDeviceId) ProtoMessage() {} +func (*AgentDeviceId) Descriptor() ([]byte, []int) { + return fileDescriptor_homegraph_b39498a40939fff1, []int{10} +} +func (m *AgentDeviceId) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_AgentDeviceId.Unmarshal(m, b) +} +func (m *AgentDeviceId) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_AgentDeviceId.Marshal(b, m, deterministic) +} +func (dst *AgentDeviceId) XXX_Merge(src proto.Message) { + xxx_messageInfo_AgentDeviceId.Merge(dst, src) +} +func (m *AgentDeviceId) XXX_Size() int { + return xxx_messageInfo_AgentDeviceId.Size(m) +} +func (m *AgentDeviceId) XXX_DiscardUnknown() { + xxx_messageInfo_AgentDeviceId.DiscardUnknown(m) +} + +var xxx_messageInfo_AgentDeviceId proto.InternalMessageInfo + +func (m *AgentDeviceId) GetId() string { + if m != nil { + return m.Id + } + return "" +} + +// Response type for Query call. This should follow the same format as AoG +// action.devices.QUERY response +// (https://developers.google.com/actions/smarthome/create-app#actiondevicesquery). +type QueryResponse struct { + // Request ID used for debugging. Copied from the request. + RequestId string `protobuf:"bytes,1,opt,name=request_id,json=requestId,proto3" json:"request_id,omitempty"` + // Device states for the devices given in the request. + Payload *QueryResponsePayload `protobuf:"bytes,2,opt,name=payload,proto3" json:"payload,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *QueryResponse) Reset() { *m = QueryResponse{} } +func (m *QueryResponse) String() string { return proto.CompactTextString(m) } +func (*QueryResponse) ProtoMessage() {} +func (*QueryResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_homegraph_b39498a40939fff1, []int{11} +} +func (m *QueryResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_QueryResponse.Unmarshal(m, b) +} +func (m *QueryResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_QueryResponse.Marshal(b, m, deterministic) +} +func (dst *QueryResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_QueryResponse.Merge(dst, src) +} +func (m *QueryResponse) XXX_Size() int { + return xxx_messageInfo_QueryResponse.Size(m) +} +func (m *QueryResponse) XXX_DiscardUnknown() { + xxx_messageInfo_QueryResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_QueryResponse proto.InternalMessageInfo + +func (m *QueryResponse) GetRequestId() string { + if m != nil { + return m.RequestId + } + return "" +} + +func (m *QueryResponse) GetPayload() *QueryResponsePayload { + if m != nil { + return m.Payload + } + return nil +} + +// Payload containing device states information. +type QueryResponsePayload struct { + // States of the devices. Map of third-party device ID to struct of device + // states. + Devices map[string]*_struct.Struct `protobuf:"bytes,1,rep,name=devices,proto3" json:"devices,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *QueryResponsePayload) Reset() { *m = QueryResponsePayload{} } +func (m *QueryResponsePayload) String() string { return proto.CompactTextString(m) } +func (*QueryResponsePayload) ProtoMessage() {} +func (*QueryResponsePayload) Descriptor() ([]byte, []int) { + return fileDescriptor_homegraph_b39498a40939fff1, []int{12} +} +func (m *QueryResponsePayload) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_QueryResponsePayload.Unmarshal(m, b) +} +func (m *QueryResponsePayload) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_QueryResponsePayload.Marshal(b, m, deterministic) +} +func (dst *QueryResponsePayload) XXX_Merge(src proto.Message) { + xxx_messageInfo_QueryResponsePayload.Merge(dst, src) +} +func (m *QueryResponsePayload) XXX_Size() int { + return xxx_messageInfo_QueryResponsePayload.Size(m) +} +func (m *QueryResponsePayload) XXX_DiscardUnknown() { + xxx_messageInfo_QueryResponsePayload.DiscardUnknown(m) +} + +var xxx_messageInfo_QueryResponsePayload proto.InternalMessageInfo + +func (m *QueryResponsePayload) GetDevices() map[string]*_struct.Struct { + if m != nil { + return m.Devices + } + return nil +} + +// Request type for Sync call. This should follow the same format as AoG +// action.devices.SYNC request +// (https://developers.google.com/actions/smarthome/create-app#actiondevicessync) +// with the exception of the extra "agent_user_id" and no "intent" field. +type SyncRequest struct { + // Request ID used for debugging. + RequestId string `protobuf:"bytes,1,opt,name=request_id,json=requestId,proto3" json:"request_id,omitempty"` + // Required. Third-party user ID. + AgentUserId string `protobuf:"bytes,2,opt,name=agent_user_id,json=agentUserId,proto3" json:"agent_user_id,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *SyncRequest) Reset() { *m = SyncRequest{} } +func (m *SyncRequest) String() string { return proto.CompactTextString(m) } +func (*SyncRequest) ProtoMessage() {} +func (*SyncRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_homegraph_b39498a40939fff1, []int{13} +} +func (m *SyncRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_SyncRequest.Unmarshal(m, b) +} +func (m *SyncRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_SyncRequest.Marshal(b, m, deterministic) +} +func (dst *SyncRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_SyncRequest.Merge(dst, src) +} +func (m *SyncRequest) XXX_Size() int { + return xxx_messageInfo_SyncRequest.Size(m) +} +func (m *SyncRequest) XXX_DiscardUnknown() { + xxx_messageInfo_SyncRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_SyncRequest proto.InternalMessageInfo + +func (m *SyncRequest) GetRequestId() string { + if m != nil { + return m.RequestId + } + return "" +} + +func (m *SyncRequest) GetAgentUserId() string { + if m != nil { + return m.AgentUserId + } + return "" +} + +// Example SyncResponse: +// { +// "requestId": "ff36a3cc-ec34-11e6-b1a0-64510650abcf", +// "payload": { +// "agentUserId": "1836.15267389", +// "devices": [{ +// "id": "123", +// "type": "action.devices.types.OUTLET", +// "traits": [ +// "action.devices.traits.OnOff" +// ], +// "name": { +// "defaultNames": ["My Outlet 1234"], +// "name": "Night light", +// "nicknames": ["wall plug"] +// }, +// "willReportState": false, +// "deviceInfo": { +// "manufacturer": "lights-out-inc", +// "model": "hs1234", +// "hwVersion": "3.2", +// "swVersion": "11.4" +// }, +// "customData": { +// "fooValue": 74, +// "barValue": true, +// "bazValue": "foo" +// } +// }] +// } +// } +// +// Response type for Sync call. This should follow the same format as AoG +// action.devices.SYNC response +// (https://developers.google.com/actions/smarthome/create-app#actiondevicessync). +type SyncResponse struct { + // Request ID used for debugging. Copied from the request. + RequestId string `protobuf:"bytes,1,opt,name=request_id,json=requestId,proto3" json:"request_id,omitempty"` + // Devices associated with the third-party user. + Payload *SyncResponsePayload `protobuf:"bytes,2,opt,name=payload,proto3" json:"payload,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *SyncResponse) Reset() { *m = SyncResponse{} } +func (m *SyncResponse) String() string { return proto.CompactTextString(m) } +func (*SyncResponse) ProtoMessage() {} +func (*SyncResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_homegraph_b39498a40939fff1, []int{14} +} +func (m *SyncResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_SyncResponse.Unmarshal(m, b) +} +func (m *SyncResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_SyncResponse.Marshal(b, m, deterministic) +} +func (dst *SyncResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_SyncResponse.Merge(dst, src) +} +func (m *SyncResponse) XXX_Size() int { + return xxx_messageInfo_SyncResponse.Size(m) +} +func (m *SyncResponse) XXX_DiscardUnknown() { + xxx_messageInfo_SyncResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_SyncResponse proto.InternalMessageInfo + +func (m *SyncResponse) GetRequestId() string { + if m != nil { + return m.RequestId + } + return "" +} + +func (m *SyncResponse) GetPayload() *SyncResponsePayload { + if m != nil { + return m.Payload + } + return nil +} + +// Payload containing device information. +type SyncResponsePayload struct { + // Third-party user ID + AgentUserId string `protobuf:"bytes,1,opt,name=agent_user_id,json=agentUserId,proto3" json:"agent_user_id,omitempty"` + // Devices associated with the third-party user. + Devices []*Device `protobuf:"bytes,2,rep,name=devices,proto3" json:"devices,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *SyncResponsePayload) Reset() { *m = SyncResponsePayload{} } +func (m *SyncResponsePayload) String() string { return proto.CompactTextString(m) } +func (*SyncResponsePayload) ProtoMessage() {} +func (*SyncResponsePayload) Descriptor() ([]byte, []int) { + return fileDescriptor_homegraph_b39498a40939fff1, []int{15} +} +func (m *SyncResponsePayload) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_SyncResponsePayload.Unmarshal(m, b) +} +func (m *SyncResponsePayload) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_SyncResponsePayload.Marshal(b, m, deterministic) +} +func (dst *SyncResponsePayload) XXX_Merge(src proto.Message) { + xxx_messageInfo_SyncResponsePayload.Merge(dst, src) +} +func (m *SyncResponsePayload) XXX_Size() int { + return xxx_messageInfo_SyncResponsePayload.Size(m) +} +func (m *SyncResponsePayload) XXX_DiscardUnknown() { + xxx_messageInfo_SyncResponsePayload.DiscardUnknown(m) +} + +var xxx_messageInfo_SyncResponsePayload proto.InternalMessageInfo + +func (m *SyncResponsePayload) GetAgentUserId() string { + if m != nil { + return m.AgentUserId + } + return "" +} + +func (m *SyncResponsePayload) GetDevices() []*Device { + if m != nil { + return m.Devices + } + return nil +} + +func init() { + proto.RegisterType((*RequestSyncDevicesRequest)(nil), "google.home.graph.v1.RequestSyncDevicesRequest") + proto.RegisterType((*RequestSyncDevicesResponse)(nil), "google.home.graph.v1.RequestSyncDevicesResponse") + proto.RegisterType((*ReportStateAndNotificationRequest)(nil), "google.home.graph.v1.ReportStateAndNotificationRequest") + proto.RegisterType((*ReportStateAndNotificationResponse)(nil), "google.home.graph.v1.ReportStateAndNotificationResponse") + proto.RegisterType((*StateAndNotificationPayload)(nil), "google.home.graph.v1.StateAndNotificationPayload") + proto.RegisterType((*ReportStateAndNotificationDevice)(nil), "google.home.graph.v1.ReportStateAndNotificationDevice") + proto.RegisterType((*DeleteAgentUserRequest)(nil), "google.home.graph.v1.DeleteAgentUserRequest") + proto.RegisterType((*QueryRequest)(nil), "google.home.graph.v1.QueryRequest") + proto.RegisterType((*QueryRequestInput)(nil), "google.home.graph.v1.QueryRequestInput") + proto.RegisterType((*QueryRequestPayload)(nil), "google.home.graph.v1.QueryRequestPayload") + proto.RegisterType((*AgentDeviceId)(nil), "google.home.graph.v1.AgentDeviceId") + proto.RegisterType((*QueryResponse)(nil), "google.home.graph.v1.QueryResponse") + proto.RegisterType((*QueryResponsePayload)(nil), "google.home.graph.v1.QueryResponsePayload") + proto.RegisterMapType((map[string]*_struct.Struct)(nil), "google.home.graph.v1.QueryResponsePayload.DevicesEntry") + proto.RegisterType((*SyncRequest)(nil), "google.home.graph.v1.SyncRequest") + proto.RegisterType((*SyncResponse)(nil), "google.home.graph.v1.SyncResponse") + proto.RegisterType((*SyncResponsePayload)(nil), "google.home.graph.v1.SyncResponsePayload") +} + +// Reference imports to suppress errors if they are not otherwise used. +var _ context.Context +var _ grpc.ClientConn + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the grpc package it is being compiled against. +const _ = grpc.SupportPackageIsVersion4 + +// HomeGraphApiServiceClient is the client API for HomeGraphApiService service. +// +// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://godoc.org/google.golang.org/grpc#ClientConn.NewStream. +type HomeGraphApiServiceClient interface { + // Requests a Sync call from Google to a 3p partner's home control agent for + // a user. + // + // + // Third-party user's identity is passed in as agent_user_id. + // (see + // [RequestSyncDevicesRequest][google.home.graph.v1.RequestSyncDevicesRequest]) + // and forwarded back to the agent. Agent is identified by the API key or JWT + // signed by the partner's service account. + RequestSyncDevices(ctx context.Context, in *RequestSyncDevicesRequest, opts ...grpc.CallOption) (*RequestSyncDevicesResponse, error) + // Reports device state and optionally sends device notifications. Called by + // an agent when the device state of a third-party changes or the agent wants + // to send a notification about the device. + // This method updates a predefined set of States for a device, which all + // devices have (for example a light will have OnOff, Color, Brightness). + // A new State may not be created and an INVALID_ARGUMENT code will be thrown + // if so. It also optionally takes in a list of Notifications that may be + // created, which are associated to this State change. + // + // Third-party user's identity is passed in as agent_user_id. + // Agent is identified by the JWT signed by the partner's service account. + ReportStateAndNotification(ctx context.Context, in *ReportStateAndNotificationRequest, opts ...grpc.CallOption) (*ReportStateAndNotificationResponse, error) + // Unlink an agent user from Google. As result, all data related to this user + // will be deleted. + // + // Here is how the agent user is created in Google: + // When users open their Google Home App, they can begin linking a 3p + // partner. User is guided through the OAuth process. After entering the 3p + // credentials, Google gets the 3p OAuth token, and uses it to make a + // Sync call to the 3p partner and gets back all the user's data, including + // agent_user_id and devices. + // Google then creates the agent user and stores a mapping from the + // agent_user_id -> Google ID mapping. Google also stores all user's devices + // under that Google ID. + // The mapping from agent_user_id -> Google ID is many to many, since one + // Google user can have multiple 3p accounts, and multiple Google users can + // map to one agent_user_id (e.g. husband and wife share one Nest account + // username/password). + // + // Third-party user's identity is passed in as agent_user_id + // Agent is identified by the JWT signed by the partner's service account. + // + // Note: Special characters (except "/") in agent_user_id must be URL encoded. + DeleteAgentUser(ctx context.Context, in *DeleteAgentUserRequest, opts ...grpc.CallOption) (*empty.Empty, error) + // Gets the device states for the devices in QueryRequest. + // Third-party user's identity is passed in as agent_user_id. Agent is + // identified by the JWT signed by the third-party partner's service account. + Query(ctx context.Context, in *QueryRequest, opts ...grpc.CallOption) (*QueryResponse, error) + // Gets all the devices associated with the given third-party user. + // Third-party user's identity is passed in as agent_user_id. Agent is + // identified by the JWT signed by the third-party partner's service account. + Sync(ctx context.Context, in *SyncRequest, opts ...grpc.CallOption) (*SyncResponse, error) +} + +type homeGraphApiServiceClient struct { + cc *grpc.ClientConn +} + +func NewHomeGraphApiServiceClient(cc *grpc.ClientConn) HomeGraphApiServiceClient { + return &homeGraphApiServiceClient{cc} +} + +func (c *homeGraphApiServiceClient) RequestSyncDevices(ctx context.Context, in *RequestSyncDevicesRequest, opts ...grpc.CallOption) (*RequestSyncDevicesResponse, error) { + out := new(RequestSyncDevicesResponse) + err := c.cc.Invoke(ctx, "/google.home.graph.v1.HomeGraphApiService/RequestSyncDevices", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *homeGraphApiServiceClient) ReportStateAndNotification(ctx context.Context, in *ReportStateAndNotificationRequest, opts ...grpc.CallOption) (*ReportStateAndNotificationResponse, error) { + out := new(ReportStateAndNotificationResponse) + err := c.cc.Invoke(ctx, "/google.home.graph.v1.HomeGraphApiService/ReportStateAndNotification", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *homeGraphApiServiceClient) DeleteAgentUser(ctx context.Context, in *DeleteAgentUserRequest, opts ...grpc.CallOption) (*empty.Empty, error) { + out := new(empty.Empty) + err := c.cc.Invoke(ctx, "/google.home.graph.v1.HomeGraphApiService/DeleteAgentUser", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *homeGraphApiServiceClient) Query(ctx context.Context, in *QueryRequest, opts ...grpc.CallOption) (*QueryResponse, error) { + out := new(QueryResponse) + err := c.cc.Invoke(ctx, "/google.home.graph.v1.HomeGraphApiService/Query", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *homeGraphApiServiceClient) Sync(ctx context.Context, in *SyncRequest, opts ...grpc.CallOption) (*SyncResponse, error) { + out := new(SyncResponse) + err := c.cc.Invoke(ctx, "/google.home.graph.v1.HomeGraphApiService/Sync", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +// HomeGraphApiServiceServer is the server API for HomeGraphApiService service. +type HomeGraphApiServiceServer interface { + // Requests a Sync call from Google to a 3p partner's home control agent for + // a user. + // + // + // Third-party user's identity is passed in as agent_user_id. + // (see + // [RequestSyncDevicesRequest][google.home.graph.v1.RequestSyncDevicesRequest]) + // and forwarded back to the agent. Agent is identified by the API key or JWT + // signed by the partner's service account. + RequestSyncDevices(context.Context, *RequestSyncDevicesRequest) (*RequestSyncDevicesResponse, error) + // Reports device state and optionally sends device notifications. Called by + // an agent when the device state of a third-party changes or the agent wants + // to send a notification about the device. + // This method updates a predefined set of States for a device, which all + // devices have (for example a light will have OnOff, Color, Brightness). + // A new State may not be created and an INVALID_ARGUMENT code will be thrown + // if so. It also optionally takes in a list of Notifications that may be + // created, which are associated to this State change. + // + // Third-party user's identity is passed in as agent_user_id. + // Agent is identified by the JWT signed by the partner's service account. + ReportStateAndNotification(context.Context, *ReportStateAndNotificationRequest) (*ReportStateAndNotificationResponse, error) + // Unlink an agent user from Google. As result, all data related to this user + // will be deleted. + // + // Here is how the agent user is created in Google: + // When users open their Google Home App, they can begin linking a 3p + // partner. User is guided through the OAuth process. After entering the 3p + // credentials, Google gets the 3p OAuth token, and uses it to make a + // Sync call to the 3p partner and gets back all the user's data, including + // agent_user_id and devices. + // Google then creates the agent user and stores a mapping from the + // agent_user_id -> Google ID mapping. Google also stores all user's devices + // under that Google ID. + // The mapping from agent_user_id -> Google ID is many to many, since one + // Google user can have multiple 3p accounts, and multiple Google users can + // map to one agent_user_id (e.g. husband and wife share one Nest account + // username/password). + // + // Third-party user's identity is passed in as agent_user_id + // Agent is identified by the JWT signed by the partner's service account. + // + // Note: Special characters (except "/") in agent_user_id must be URL encoded. + DeleteAgentUser(context.Context, *DeleteAgentUserRequest) (*empty.Empty, error) + // Gets the device states for the devices in QueryRequest. + // Third-party user's identity is passed in as agent_user_id. Agent is + // identified by the JWT signed by the third-party partner's service account. + Query(context.Context, *QueryRequest) (*QueryResponse, error) + // Gets all the devices associated with the given third-party user. + // Third-party user's identity is passed in as agent_user_id. Agent is + // identified by the JWT signed by the third-party partner's service account. + Sync(context.Context, *SyncRequest) (*SyncResponse, error) +} + +func RegisterHomeGraphApiServiceServer(s *grpc.Server, srv HomeGraphApiServiceServer) { + s.RegisterService(&_HomeGraphApiService_serviceDesc, srv) +} + +func _HomeGraphApiService_RequestSyncDevices_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(RequestSyncDevicesRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(HomeGraphApiServiceServer).RequestSyncDevices(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.home.graph.v1.HomeGraphApiService/RequestSyncDevices", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(HomeGraphApiServiceServer).RequestSyncDevices(ctx, req.(*RequestSyncDevicesRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _HomeGraphApiService_ReportStateAndNotification_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(ReportStateAndNotificationRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(HomeGraphApiServiceServer).ReportStateAndNotification(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.home.graph.v1.HomeGraphApiService/ReportStateAndNotification", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(HomeGraphApiServiceServer).ReportStateAndNotification(ctx, req.(*ReportStateAndNotificationRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _HomeGraphApiService_DeleteAgentUser_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(DeleteAgentUserRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(HomeGraphApiServiceServer).DeleteAgentUser(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.home.graph.v1.HomeGraphApiService/DeleteAgentUser", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(HomeGraphApiServiceServer).DeleteAgentUser(ctx, req.(*DeleteAgentUserRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _HomeGraphApiService_Query_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(QueryRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(HomeGraphApiServiceServer).Query(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.home.graph.v1.HomeGraphApiService/Query", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(HomeGraphApiServiceServer).Query(ctx, req.(*QueryRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _HomeGraphApiService_Sync_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(SyncRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(HomeGraphApiServiceServer).Sync(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.home.graph.v1.HomeGraphApiService/Sync", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(HomeGraphApiServiceServer).Sync(ctx, req.(*SyncRequest)) + } + return interceptor(ctx, in, info, handler) +} + +var _HomeGraphApiService_serviceDesc = grpc.ServiceDesc{ + ServiceName: "google.home.graph.v1.HomeGraphApiService", + HandlerType: (*HomeGraphApiServiceServer)(nil), + Methods: []grpc.MethodDesc{ + { + MethodName: "RequestSyncDevices", + Handler: _HomeGraphApiService_RequestSyncDevices_Handler, + }, + { + MethodName: "ReportStateAndNotification", + Handler: _HomeGraphApiService_ReportStateAndNotification_Handler, + }, + { + MethodName: "DeleteAgentUser", + Handler: _HomeGraphApiService_DeleteAgentUser_Handler, + }, + { + MethodName: "Query", + Handler: _HomeGraphApiService_Query_Handler, + }, + { + MethodName: "Sync", + Handler: _HomeGraphApiService_Sync_Handler, + }, + }, + Streams: []grpc.StreamDesc{}, + Metadata: "google/home/graph/v1/homegraph.proto", +} + +func init() { + proto.RegisterFile("google/home/graph/v1/homegraph.proto", fileDescriptor_homegraph_b39498a40939fff1) +} + +var fileDescriptor_homegraph_b39498a40939fff1 = []byte{ + // 879 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xac, 0x56, 0x4f, 0x8f, 0xdb, 0x44, + 0x14, 0x97, 0x93, 0x66, 0xb7, 0x7d, 0xd9, 0xd0, 0x76, 0x76, 0xd9, 0xba, 0xd9, 0x20, 0x92, 0x59, + 0x54, 0xd2, 0x00, 0x36, 0x09, 0x52, 0x5b, 0x16, 0xad, 0xd0, 0x76, 0xb7, 0x82, 0x08, 0x09, 0xa5, + 0x4e, 0x57, 0x42, 0x70, 0x88, 0xdc, 0x78, 0xd6, 0x35, 0x4d, 0x3c, 0x5e, 0xcf, 0x38, 0x28, 0x42, + 0x5c, 0xfa, 0x11, 0xe0, 0xc8, 0x77, 0xe1, 0xc0, 0x57, 0xe0, 0xc4, 0x9d, 0xcf, 0xc0, 0xb9, 0x9a, + 0x19, 0x7b, 0xf3, 0xc7, 0x4e, 0xe2, 0x4a, 0x7b, 0xf3, 0xcc, 0xfb, 0xbd, 0xdf, 0x7b, 0x6f, 0xde, + 0x6f, 0xde, 0x18, 0x3e, 0x72, 0x29, 0x75, 0x47, 0xc4, 0x7c, 0x45, 0xc7, 0xc4, 0x74, 0x43, 0x3b, + 0x78, 0x65, 0x4e, 0xda, 0x72, 0x25, 0x17, 0x46, 0x10, 0x52, 0x4e, 0xd1, 0x9e, 0x42, 0x19, 0x62, + 0xdf, 0x50, 0x86, 0x49, 0xbb, 0x5a, 0x8b, 0x7d, 0xed, 0xc0, 0x33, 0x6d, 0xdf, 0xa7, 0xdc, 0xe6, + 0x1e, 0xf5, 0x99, 0xf2, 0xa9, 0x36, 0x32, 0x99, 0x1d, 0x32, 0xf1, 0x86, 0x24, 0x86, 0x1c, 0xc4, + 0x10, 0xb9, 0x7a, 0x19, 0x5d, 0x98, 0x64, 0x1c, 0xf0, 0x69, 0x6c, 0xac, 0x2d, 0x1b, 0x19, 0x0f, + 0xa3, 0x21, 0x57, 0x56, 0x7c, 0x0e, 0xf7, 0x2d, 0x72, 0x19, 0x11, 0xc6, 0xfb, 0x53, 0x7f, 0x78, + 0x26, 0x59, 0x59, 0xbc, 0x83, 0x30, 0x54, 0x6c, 0x97, 0xf8, 0x7c, 0x10, 0x31, 0x12, 0x0e, 0x3c, + 0x47, 0xd7, 0xea, 0x5a, 0xf3, 0x96, 0x55, 0x96, 0x9b, 0xe7, 0x8c, 0x84, 0x5d, 0x07, 0xed, 0x41, + 0xc9, 0x66, 0x53, 0x7f, 0xa8, 0x17, 0xea, 0x5a, 0xf3, 0xa6, 0xa5, 0x16, 0xb8, 0x06, 0xd5, 0x2c, + 0x5a, 0x16, 0x50, 0x9f, 0x11, 0xfc, 0xbf, 0x06, 0x0d, 0x8b, 0x04, 0x34, 0xe4, 0x7d, 0x6e, 0x73, + 0x72, 0xe2, 0x3b, 0xdf, 0x53, 0xee, 0x5d, 0x78, 0x43, 0x59, 0x78, 0x12, 0xfd, 0x03, 0x80, 0x50, + 0x7d, 0xce, 0x42, 0xdf, 0x8a, 0x77, 0xba, 0x0e, 0xba, 0x0f, 0x37, 0xc9, 0x44, 0x24, 0xe7, 0x39, + 0xfa, 0x0d, 0x69, 0xdc, 0x96, 0xeb, 0xae, 0x93, 0xce, 0xbb, 0x90, 0xce, 0xfb, 0x01, 0xdc, 0xbe, + 0xa0, 0xa3, 0x11, 0xfd, 0x65, 0x10, 0x05, 0x03, 0x4e, 0x5f, 0x13, 0x5f, 0x2f, 0x49, 0x54, 0x45, + 0x6d, 0x9f, 0x07, 0x2f, 0xc4, 0x26, 0xfa, 0x0e, 0xb6, 0x03, 0x7b, 0x3a, 0xa2, 0xb6, 0xa3, 0x17, + 0xeb, 0x5a, 0xb3, 0xdc, 0x69, 0x1b, 0x59, 0x4d, 0x34, 0xb2, 0x2a, 0xe9, 0x29, 0x47, 0x2b, 0x61, + 0xc0, 0xa7, 0x80, 0xd7, 0xd5, 0xad, 0x8e, 0x67, 0x43, 0xe1, 0x98, 0xc2, 0xc1, 0x9a, 0x60, 0xa8, + 0x07, 0xdb, 0x4a, 0x1c, 0x4c, 0xba, 0x96, 0x3b, 0x8f, 0xb2, 0x13, 0x5e, 0x9d, 0x88, 0x6a, 0x97, + 0x95, 0xd0, 0xe0, 0xdf, 0x35, 0xa8, 0x6f, 0x42, 0x23, 0x13, 0xb6, 0x98, 0xb0, 0x26, 0x51, 0xef, + 0x25, 0x51, 0x13, 0xdd, 0x19, 0x7d, 0xa9, 0x3b, 0x2b, 0x86, 0xa1, 0x63, 0xa8, 0xf8, 0x73, 0x34, + 0x4c, 0x36, 0x69, 0x8d, 0xdf, 0x22, 0x1a, 0xff, 0x04, 0xfb, 0x67, 0x64, 0x44, 0x38, 0x39, 0x49, + 0x9a, 0x9a, 0x53, 0x37, 0x39, 0xc4, 0x21, 0x2a, 0xde, 0x79, 0x1e, 0x91, 0x70, 0x7a, 0x7d, 0x9c, + 0xe8, 0x6b, 0xd8, 0xf2, 0xfc, 0x20, 0xe2, 0x4c, 0x2f, 0xd6, 0x8b, 0xcd, 0x72, 0xe7, 0xe3, 0xec, + 0xb6, 0xcc, 0x87, 0xed, 0x0a, 0xbc, 0x15, 0xbb, 0xe1, 0x1f, 0xe0, 0x6e, 0xca, 0x88, 0x4e, 0x67, + 0xf2, 0x54, 0xe7, 0xfe, 0x70, 0x33, 0x6d, 0x4a, 0x96, 0x2f, 0x60, 0x37, 0xc3, 0x8e, 0x8e, 0xe7, + 0x95, 0x24, 0x52, 0x3e, 0xcc, 0xe6, 0x96, 0x1d, 0x50, 0x32, 0xe8, 0x3a, 0x33, 0xd9, 0x7c, 0x08, + 0x95, 0x05, 0x0b, 0x7a, 0x0f, 0x0a, 0x57, 0x87, 0x57, 0xf0, 0x1c, 0xcc, 0xa1, 0x12, 0x87, 0xcd, + 0x25, 0x7c, 0x74, 0x36, 0xab, 0x55, 0x69, 0xa5, 0xb5, 0xb6, 0x56, 0x45, 0x9a, 0x2a, 0xf6, 0x2f, + 0x0d, 0xf6, 0xb2, 0x10, 0xe8, 0xf9, 0x72, 0xb9, 0x8f, 0xf3, 0xd3, 0x1b, 0xf1, 0x88, 0x7b, 0xe6, + 0xf3, 0x70, 0x7a, 0x75, 0x04, 0xd5, 0x3e, 0xec, 0xcc, 0x1b, 0xd0, 0x1d, 0x28, 0xbe, 0x26, 0xd3, + 0xb8, 0x32, 0xf1, 0x89, 0x3e, 0x83, 0xd2, 0xc4, 0x1e, 0x45, 0x64, 0x93, 0xfa, 0x15, 0xea, 0xa8, + 0xf0, 0x44, 0xc3, 0x3d, 0x28, 0x8b, 0xa1, 0x7a, 0x8d, 0x72, 0x0f, 0x61, 0x47, 0x31, 0xe6, 0xeb, + 0xc3, 0xe9, 0x72, 0x1f, 0x56, 0x68, 0x6e, 0x9e, 0x33, 0xd5, 0x86, 0x4b, 0xd8, 0xcd, 0xb0, 0xe7, + 0x7a, 0x72, 0x1e, 0xcd, 0x1a, 0x55, 0x90, 0x8d, 0xaa, 0x65, 0xc7, 0x5f, 0x9a, 0x63, 0x9d, 0x7f, + 0x4b, 0xb0, 0xfb, 0x2d, 0x1d, 0x93, 0x6f, 0x04, 0xe0, 0x24, 0xf0, 0xfa, 0x24, 0x94, 0xa3, 0xeb, + 0x4f, 0x0d, 0x50, 0xfa, 0xb5, 0x42, 0xe6, 0xaa, 0xb9, 0xb9, 0xe2, 0xb9, 0xac, 0x7e, 0x9e, 0xdf, + 0x21, 0x7e, 0x08, 0xf1, 0x9b, 0x7f, 0xfe, 0xfb, 0xa3, 0x50, 0xc3, 0xf7, 0x66, 0x4f, 0x3a, 0x3b, + 0x0a, 0x67, 0xf8, 0x23, 0xad, 0x85, 0xfe, 0xd6, 0xc4, 0x5b, 0xba, 0x6a, 0xfa, 0xa2, 0xc7, 0xef, + 0x3a, 0xdd, 0x93, 0x6c, 0x9f, 0xbc, 0xbb, 0x63, 0x9c, 0x75, 0x5b, 0x66, 0xfd, 0x09, 0x7e, 0xb0, + 0x98, 0xf5, 0x2a, 0x3f, 0x51, 0xc4, 0x1b, 0x0d, 0x6e, 0x2f, 0x8d, 0x6b, 0xf4, 0xe9, 0xaa, 0xae, + 0x65, 0x4d, 0xf5, 0xea, 0x7e, 0xea, 0x66, 0x3c, 0x13, 0x3f, 0x39, 0xf8, 0xa1, 0x4c, 0xe6, 0xb0, + 0xd5, 0x10, 0xc9, 0xfc, 0xba, 0xa0, 0x9d, 0xe3, 0x2b, 0xd1, 0x30, 0xb3, 0xd5, 0xfa, 0x0d, 0xf9, + 0x50, 0x92, 0x77, 0x17, 0xe1, 0xcd, 0x33, 0xb2, 0x7a, 0x98, 0xe3, 0xf2, 0xe3, 0x9a, 0x0c, 0xbe, + 0x8f, 0xef, 0xce, 0x9f, 0xc4, 0xa5, 0x80, 0x88, 0xa2, 0x7f, 0x86, 0x1b, 0xa2, 0x89, 0xa8, 0xb1, + 0xee, 0x7a, 0xa8, 0x68, 0x78, 0xf3, 0x0d, 0xc2, 0x07, 0x32, 0xd8, 0xfb, 0xf8, 0xce, 0x7c, 0x30, + 0xa6, 0x54, 0xf2, 0x94, 0x82, 0x3e, 0xa4, 0xe3, 0x4c, 0x96, 0xa7, 0x7a, 0x86, 0xe8, 0x7b, 0xe2, + 0x14, 0x7f, 0xfc, 0x32, 0xc6, 0xbb, 0x74, 0x64, 0xfb, 0xae, 0x41, 0x43, 0xd7, 0x74, 0x89, 0x2f, + 0x4f, 0xd8, 0x54, 0x26, 0x3b, 0xf0, 0xd8, 0xe2, 0xaf, 0xe7, 0x57, 0xf2, 0xe3, 0xe5, 0x96, 0x44, + 0x7d, 0xf1, 0x36, 0x00, 0x00, 0xff, 0xff, 0x5b, 0x87, 0x36, 0x9c, 0xf9, 0x0a, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/iam/admin/v1/iam.pb.go b/vendor/google.golang.org/genproto/googleapis/iam/admin/v1/iam.pb.go new file mode 100644 index 0000000..fad9db5 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/iam/admin/v1/iam.pb.go @@ -0,0 +1,3129 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/iam/admin/v1/iam.proto + +package admin // import "google.golang.org/genproto/googleapis/iam/admin/v1" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import empty "github.com/golang/protobuf/ptypes/empty" +import timestamp "github.com/golang/protobuf/ptypes/timestamp" +import _ "google.golang.org/genproto/googleapis/api/annotations" +import v1 "google.golang.org/genproto/googleapis/iam/v1" +import field_mask "google.golang.org/genproto/protobuf/field_mask" + +import ( + context "golang.org/x/net/context" + grpc "google.golang.org/grpc" +) + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Supported key algorithms. +type ServiceAccountKeyAlgorithm int32 + +const ( + // An unspecified key algorithm. + ServiceAccountKeyAlgorithm_KEY_ALG_UNSPECIFIED ServiceAccountKeyAlgorithm = 0 + // 1k RSA Key. + ServiceAccountKeyAlgorithm_KEY_ALG_RSA_1024 ServiceAccountKeyAlgorithm = 1 + // 2k RSA Key. + ServiceAccountKeyAlgorithm_KEY_ALG_RSA_2048 ServiceAccountKeyAlgorithm = 2 +) + +var ServiceAccountKeyAlgorithm_name = map[int32]string{ + 0: "KEY_ALG_UNSPECIFIED", + 1: "KEY_ALG_RSA_1024", + 2: "KEY_ALG_RSA_2048", +} +var ServiceAccountKeyAlgorithm_value = map[string]int32{ + "KEY_ALG_UNSPECIFIED": 0, + "KEY_ALG_RSA_1024": 1, + "KEY_ALG_RSA_2048": 2, +} + +func (x ServiceAccountKeyAlgorithm) String() string { + return proto.EnumName(ServiceAccountKeyAlgorithm_name, int32(x)) +} +func (ServiceAccountKeyAlgorithm) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_iam_153c5ceb8e95593c, []int{0} +} + +// Supported private key output formats. +type ServiceAccountPrivateKeyType int32 + +const ( + // Unspecified. Equivalent to `TYPE_GOOGLE_CREDENTIALS_FILE`. + ServiceAccountPrivateKeyType_TYPE_UNSPECIFIED ServiceAccountPrivateKeyType = 0 + // PKCS12 format. + // The password for the PKCS12 file is `notasecret`. + // For more information, see https://tools.ietf.org/html/rfc7292. + ServiceAccountPrivateKeyType_TYPE_PKCS12_FILE ServiceAccountPrivateKeyType = 1 + // Google Credentials File format. + ServiceAccountPrivateKeyType_TYPE_GOOGLE_CREDENTIALS_FILE ServiceAccountPrivateKeyType = 2 +) + +var ServiceAccountPrivateKeyType_name = map[int32]string{ + 0: "TYPE_UNSPECIFIED", + 1: "TYPE_PKCS12_FILE", + 2: "TYPE_GOOGLE_CREDENTIALS_FILE", +} +var ServiceAccountPrivateKeyType_value = map[string]int32{ + "TYPE_UNSPECIFIED": 0, + "TYPE_PKCS12_FILE": 1, + "TYPE_GOOGLE_CREDENTIALS_FILE": 2, +} + +func (x ServiceAccountPrivateKeyType) String() string { + return proto.EnumName(ServiceAccountPrivateKeyType_name, int32(x)) +} +func (ServiceAccountPrivateKeyType) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_iam_153c5ceb8e95593c, []int{1} +} + +// Supported public key output formats. +type ServiceAccountPublicKeyType int32 + +const ( + // Unspecified. Returns nothing here. + ServiceAccountPublicKeyType_TYPE_NONE ServiceAccountPublicKeyType = 0 + // X509 PEM format. + ServiceAccountPublicKeyType_TYPE_X509_PEM_FILE ServiceAccountPublicKeyType = 1 + // Raw public key. + ServiceAccountPublicKeyType_TYPE_RAW_PUBLIC_KEY ServiceAccountPublicKeyType = 2 +) + +var ServiceAccountPublicKeyType_name = map[int32]string{ + 0: "TYPE_NONE", + 1: "TYPE_X509_PEM_FILE", + 2: "TYPE_RAW_PUBLIC_KEY", +} +var ServiceAccountPublicKeyType_value = map[string]int32{ + "TYPE_NONE": 0, + "TYPE_X509_PEM_FILE": 1, + "TYPE_RAW_PUBLIC_KEY": 2, +} + +func (x ServiceAccountPublicKeyType) String() string { + return proto.EnumName(ServiceAccountPublicKeyType_name, int32(x)) +} +func (ServiceAccountPublicKeyType) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_iam_153c5ceb8e95593c, []int{2} +} + +// A view for Role objects. +type RoleView int32 + +const ( + // Omits the `included_permissions` field. + // This is the default value. + RoleView_BASIC RoleView = 0 + // Returns all fields. + RoleView_FULL RoleView = 1 +) + +var RoleView_name = map[int32]string{ + 0: "BASIC", + 1: "FULL", +} +var RoleView_value = map[string]int32{ + "BASIC": 0, + "FULL": 1, +} + +func (x RoleView) String() string { + return proto.EnumName(RoleView_name, int32(x)) +} +func (RoleView) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_iam_153c5ceb8e95593c, []int{3} +} + +// `KeyType` filters to selectively retrieve certain varieties +// of keys. +type ListServiceAccountKeysRequest_KeyType int32 + +const ( + // Unspecified key type. The presence of this in the + // message will immediately result in an error. + ListServiceAccountKeysRequest_KEY_TYPE_UNSPECIFIED ListServiceAccountKeysRequest_KeyType = 0 + // User-managed keys (managed and rotated by the user). + ListServiceAccountKeysRequest_USER_MANAGED ListServiceAccountKeysRequest_KeyType = 1 + // System-managed keys (managed and rotated by Google). + ListServiceAccountKeysRequest_SYSTEM_MANAGED ListServiceAccountKeysRequest_KeyType = 2 +) + +var ListServiceAccountKeysRequest_KeyType_name = map[int32]string{ + 0: "KEY_TYPE_UNSPECIFIED", + 1: "USER_MANAGED", + 2: "SYSTEM_MANAGED", +} +var ListServiceAccountKeysRequest_KeyType_value = map[string]int32{ + "KEY_TYPE_UNSPECIFIED": 0, + "USER_MANAGED": 1, + "SYSTEM_MANAGED": 2, +} + +func (x ListServiceAccountKeysRequest_KeyType) String() string { + return proto.EnumName(ListServiceAccountKeysRequest_KeyType_name, int32(x)) +} +func (ListServiceAccountKeysRequest_KeyType) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_iam_153c5ceb8e95593c, []int{6, 0} +} + +// A stage representing a role's lifecycle phase. +type Role_RoleLaunchStage int32 + +const ( + // The user has indicated this role is currently in an alpha phase. + Role_ALPHA Role_RoleLaunchStage = 0 + // The user has indicated this role is currently in a beta phase. + Role_BETA Role_RoleLaunchStage = 1 + // The user has indicated this role is generally available. + Role_GA Role_RoleLaunchStage = 2 + // The user has indicated this role is being deprecated. + Role_DEPRECATED Role_RoleLaunchStage = 4 + // This role is disabled and will not contribute permissions to any members + // it is granted to in policies. + Role_DISABLED Role_RoleLaunchStage = 5 + // The user has indicated this role is currently in an eap phase. + Role_EAP Role_RoleLaunchStage = 6 +) + +var Role_RoleLaunchStage_name = map[int32]string{ + 0: "ALPHA", + 1: "BETA", + 2: "GA", + 4: "DEPRECATED", + 5: "DISABLED", + 6: "EAP", +} +var Role_RoleLaunchStage_value = map[string]int32{ + "ALPHA": 0, + "BETA": 1, + "GA": 2, + "DEPRECATED": 4, + "DISABLED": 5, + "EAP": 6, +} + +func (x Role_RoleLaunchStage) String() string { + return proto.EnumName(Role_RoleLaunchStage_name, int32(x)) +} +func (Role_RoleLaunchStage) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_iam_153c5ceb8e95593c, []int{16, 0} +} + +// A stage representing a permission's lifecycle phase. +type Permission_PermissionLaunchStage int32 + +const ( + // The permission is currently in an alpha phase. + Permission_ALPHA Permission_PermissionLaunchStage = 0 + // The permission is currently in a beta phase. + Permission_BETA Permission_PermissionLaunchStage = 1 + // The permission is generally available. + Permission_GA Permission_PermissionLaunchStage = 2 + // The permission is being deprecated. + Permission_DEPRECATED Permission_PermissionLaunchStage = 3 +) + +var Permission_PermissionLaunchStage_name = map[int32]string{ + 0: "ALPHA", + 1: "BETA", + 2: "GA", + 3: "DEPRECATED", +} +var Permission_PermissionLaunchStage_value = map[string]int32{ + "ALPHA": 0, + "BETA": 1, + "GA": 2, + "DEPRECATED": 3, +} + +func (x Permission_PermissionLaunchStage) String() string { + return proto.EnumName(Permission_PermissionLaunchStage_name, int32(x)) +} +func (Permission_PermissionLaunchStage) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_iam_153c5ceb8e95593c, []int{26, 0} +} + +// The state of the permission with regards to custom roles. +type Permission_CustomRolesSupportLevel int32 + +const ( + // Permission is fully supported for custom role use. + Permission_SUPPORTED Permission_CustomRolesSupportLevel = 0 + // Permission is being tested to check custom role compatibility. + Permission_TESTING Permission_CustomRolesSupportLevel = 1 + // Permission is not supported for custom role use. + Permission_NOT_SUPPORTED Permission_CustomRolesSupportLevel = 2 +) + +var Permission_CustomRolesSupportLevel_name = map[int32]string{ + 0: "SUPPORTED", + 1: "TESTING", + 2: "NOT_SUPPORTED", +} +var Permission_CustomRolesSupportLevel_value = map[string]int32{ + "SUPPORTED": 0, + "TESTING": 1, + "NOT_SUPPORTED": 2, +} + +func (x Permission_CustomRolesSupportLevel) String() string { + return proto.EnumName(Permission_CustomRolesSupportLevel_name, int32(x)) +} +func (Permission_CustomRolesSupportLevel) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_iam_153c5ceb8e95593c, []int{26, 1} +} + +// A service account in the Identity and Access Management API. +// +// To create a service account, specify the `project_id` and the `account_id` +// for the account. The `account_id` is unique within the project, and is used +// to generate the service account email address and a stable +// `unique_id`. +// +// If the account already exists, the account's resource name is returned +// in util::Status's ResourceInfo.resource_name in the format of +// projects/{PROJECT_ID}/serviceAccounts/{SERVICE_ACCOUNT_EMAIL}. The caller can +// use the name in other methods to access the account. +// +// All other methods can identify the service account using the format +// `projects/{PROJECT_ID}/serviceAccounts/{SERVICE_ACCOUNT_EMAIL}`. +// Using `-` as a wildcard for the project will infer the project from +// the account. The `account` value can be the `email` address or the +// `unique_id` of the service account. +type ServiceAccount struct { + // The resource name of the service account in the following format: + // `projects/{PROJECT_ID}/serviceAccounts/{SERVICE_ACCOUNT_EMAIL}`. + // + // Requests using `-` as a wildcard for the project will infer the project + // from the `account` and the `account` value can be the `email` address or + // the `unique_id` of the service account. + // + // In responses the resource name will always be in the format + // `projects/{PROJECT_ID}/serviceAccounts/{SERVICE_ACCOUNT_EMAIL}`. + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // @OutputOnly The id of the project that owns the service account. + ProjectId string `protobuf:"bytes,2,opt,name=project_id,json=projectId,proto3" json:"project_id,omitempty"` + // @OutputOnly The unique and stable id of the service account. + UniqueId string `protobuf:"bytes,4,opt,name=unique_id,json=uniqueId,proto3" json:"unique_id,omitempty"` + // @OutputOnly The email address of the service account. + Email string `protobuf:"bytes,5,opt,name=email,proto3" json:"email,omitempty"` + // Optional. A user-specified description of the service account. Must be + // fewer than 100 UTF-8 bytes. + DisplayName string `protobuf:"bytes,6,opt,name=display_name,json=displayName,proto3" json:"display_name,omitempty"` + // Used to perform a consistent read-modify-write. + Etag []byte `protobuf:"bytes,7,opt,name=etag,proto3" json:"etag,omitempty"` + // @OutputOnly. The OAuth2 client id for the service account. + // This is used in conjunction with the OAuth2 clientconfig API to make + // three legged OAuth2 (3LO) flows to access the data of Google users. + Oauth2ClientId string `protobuf:"bytes,9,opt,name=oauth2_client_id,json=oauth2ClientId,proto3" json:"oauth2_client_id,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ServiceAccount) Reset() { *m = ServiceAccount{} } +func (m *ServiceAccount) String() string { return proto.CompactTextString(m) } +func (*ServiceAccount) ProtoMessage() {} +func (*ServiceAccount) Descriptor() ([]byte, []int) { + return fileDescriptor_iam_153c5ceb8e95593c, []int{0} +} +func (m *ServiceAccount) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ServiceAccount.Unmarshal(m, b) +} +func (m *ServiceAccount) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ServiceAccount.Marshal(b, m, deterministic) +} +func (dst *ServiceAccount) XXX_Merge(src proto.Message) { + xxx_messageInfo_ServiceAccount.Merge(dst, src) +} +func (m *ServiceAccount) XXX_Size() int { + return xxx_messageInfo_ServiceAccount.Size(m) +} +func (m *ServiceAccount) XXX_DiscardUnknown() { + xxx_messageInfo_ServiceAccount.DiscardUnknown(m) +} + +var xxx_messageInfo_ServiceAccount proto.InternalMessageInfo + +func (m *ServiceAccount) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *ServiceAccount) GetProjectId() string { + if m != nil { + return m.ProjectId + } + return "" +} + +func (m *ServiceAccount) GetUniqueId() string { + if m != nil { + return m.UniqueId + } + return "" +} + +func (m *ServiceAccount) GetEmail() string { + if m != nil { + return m.Email + } + return "" +} + +func (m *ServiceAccount) GetDisplayName() string { + if m != nil { + return m.DisplayName + } + return "" +} + +func (m *ServiceAccount) GetEtag() []byte { + if m != nil { + return m.Etag + } + return nil +} + +func (m *ServiceAccount) GetOauth2ClientId() string { + if m != nil { + return m.Oauth2ClientId + } + return "" +} + +// The service account create request. +type CreateServiceAccountRequest struct { + // Required. The resource name of the project associated with the service + // accounts, such as `projects/my-project-123`. + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // Required. The account id that is used to generate the service account + // email address and a stable unique id. It is unique within a project, + // must be 6-30 characters long, and match the regular expression + // `[a-z]([-a-z0-9]*[a-z0-9])` to comply with RFC1035. + AccountId string `protobuf:"bytes,2,opt,name=account_id,json=accountId,proto3" json:"account_id,omitempty"` + // The [ServiceAccount][google.iam.admin.v1.ServiceAccount] resource to + // create. Currently, only the following values are user assignable: + // `display_name` . + ServiceAccount *ServiceAccount `protobuf:"bytes,3,opt,name=service_account,json=serviceAccount,proto3" json:"service_account,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *CreateServiceAccountRequest) Reset() { *m = CreateServiceAccountRequest{} } +func (m *CreateServiceAccountRequest) String() string { return proto.CompactTextString(m) } +func (*CreateServiceAccountRequest) ProtoMessage() {} +func (*CreateServiceAccountRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_iam_153c5ceb8e95593c, []int{1} +} +func (m *CreateServiceAccountRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_CreateServiceAccountRequest.Unmarshal(m, b) +} +func (m *CreateServiceAccountRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_CreateServiceAccountRequest.Marshal(b, m, deterministic) +} +func (dst *CreateServiceAccountRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_CreateServiceAccountRequest.Merge(dst, src) +} +func (m *CreateServiceAccountRequest) XXX_Size() int { + return xxx_messageInfo_CreateServiceAccountRequest.Size(m) +} +func (m *CreateServiceAccountRequest) XXX_DiscardUnknown() { + xxx_messageInfo_CreateServiceAccountRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_CreateServiceAccountRequest proto.InternalMessageInfo + +func (m *CreateServiceAccountRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *CreateServiceAccountRequest) GetAccountId() string { + if m != nil { + return m.AccountId + } + return "" +} + +func (m *CreateServiceAccountRequest) GetServiceAccount() *ServiceAccount { + if m != nil { + return m.ServiceAccount + } + return nil +} + +// The service account list request. +type ListServiceAccountsRequest struct { + // Required. The resource name of the project associated with the service + // accounts, such as `projects/my-project-123`. + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // Optional limit on the number of service accounts to include in the + // response. Further accounts can subsequently be obtained by including the + // [ListServiceAccountsResponse.next_page_token][google.iam.admin.v1.ListServiceAccountsResponse.next_page_token] + // in a subsequent request. + PageSize int32 `protobuf:"varint,2,opt,name=page_size,json=pageSize,proto3" json:"page_size,omitempty"` + // Optional pagination token returned in an earlier + // [ListServiceAccountsResponse.next_page_token][google.iam.admin.v1.ListServiceAccountsResponse.next_page_token]. + PageToken string `protobuf:"bytes,3,opt,name=page_token,json=pageToken,proto3" json:"page_token,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ListServiceAccountsRequest) Reset() { *m = ListServiceAccountsRequest{} } +func (m *ListServiceAccountsRequest) String() string { return proto.CompactTextString(m) } +func (*ListServiceAccountsRequest) ProtoMessage() {} +func (*ListServiceAccountsRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_iam_153c5ceb8e95593c, []int{2} +} +func (m *ListServiceAccountsRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ListServiceAccountsRequest.Unmarshal(m, b) +} +func (m *ListServiceAccountsRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ListServiceAccountsRequest.Marshal(b, m, deterministic) +} +func (dst *ListServiceAccountsRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_ListServiceAccountsRequest.Merge(dst, src) +} +func (m *ListServiceAccountsRequest) XXX_Size() int { + return xxx_messageInfo_ListServiceAccountsRequest.Size(m) +} +func (m *ListServiceAccountsRequest) XXX_DiscardUnknown() { + xxx_messageInfo_ListServiceAccountsRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_ListServiceAccountsRequest proto.InternalMessageInfo + +func (m *ListServiceAccountsRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *ListServiceAccountsRequest) GetPageSize() int32 { + if m != nil { + return m.PageSize + } + return 0 +} + +func (m *ListServiceAccountsRequest) GetPageToken() string { + if m != nil { + return m.PageToken + } + return "" +} + +// The service account list response. +type ListServiceAccountsResponse struct { + // The list of matching service accounts. + Accounts []*ServiceAccount `protobuf:"bytes,1,rep,name=accounts,proto3" json:"accounts,omitempty"` + // To retrieve the next page of results, set + // [ListServiceAccountsRequest.page_token][google.iam.admin.v1.ListServiceAccountsRequest.page_token] + // to this value. + NextPageToken string `protobuf:"bytes,2,opt,name=next_page_token,json=nextPageToken,proto3" json:"next_page_token,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ListServiceAccountsResponse) Reset() { *m = ListServiceAccountsResponse{} } +func (m *ListServiceAccountsResponse) String() string { return proto.CompactTextString(m) } +func (*ListServiceAccountsResponse) ProtoMessage() {} +func (*ListServiceAccountsResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_iam_153c5ceb8e95593c, []int{3} +} +func (m *ListServiceAccountsResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ListServiceAccountsResponse.Unmarshal(m, b) +} +func (m *ListServiceAccountsResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ListServiceAccountsResponse.Marshal(b, m, deterministic) +} +func (dst *ListServiceAccountsResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_ListServiceAccountsResponse.Merge(dst, src) +} +func (m *ListServiceAccountsResponse) XXX_Size() int { + return xxx_messageInfo_ListServiceAccountsResponse.Size(m) +} +func (m *ListServiceAccountsResponse) XXX_DiscardUnknown() { + xxx_messageInfo_ListServiceAccountsResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_ListServiceAccountsResponse proto.InternalMessageInfo + +func (m *ListServiceAccountsResponse) GetAccounts() []*ServiceAccount { + if m != nil { + return m.Accounts + } + return nil +} + +func (m *ListServiceAccountsResponse) GetNextPageToken() string { + if m != nil { + return m.NextPageToken + } + return "" +} + +// The service account get request. +type GetServiceAccountRequest struct { + // The resource name of the service account in the following format: + // `projects/{PROJECT_ID}/serviceAccounts/{SERVICE_ACCOUNT_EMAIL}`. + // Using `-` as a wildcard for the project will infer the project from + // the account. The `account` value can be the `email` address or the + // `unique_id` of the service account. + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GetServiceAccountRequest) Reset() { *m = GetServiceAccountRequest{} } +func (m *GetServiceAccountRequest) String() string { return proto.CompactTextString(m) } +func (*GetServiceAccountRequest) ProtoMessage() {} +func (*GetServiceAccountRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_iam_153c5ceb8e95593c, []int{4} +} +func (m *GetServiceAccountRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GetServiceAccountRequest.Unmarshal(m, b) +} +func (m *GetServiceAccountRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GetServiceAccountRequest.Marshal(b, m, deterministic) +} +func (dst *GetServiceAccountRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_GetServiceAccountRequest.Merge(dst, src) +} +func (m *GetServiceAccountRequest) XXX_Size() int { + return xxx_messageInfo_GetServiceAccountRequest.Size(m) +} +func (m *GetServiceAccountRequest) XXX_DiscardUnknown() { + xxx_messageInfo_GetServiceAccountRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_GetServiceAccountRequest proto.InternalMessageInfo + +func (m *GetServiceAccountRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +// The service account delete request. +type DeleteServiceAccountRequest struct { + // The resource name of the service account in the following format: + // `projects/{PROJECT_ID}/serviceAccounts/{SERVICE_ACCOUNT_EMAIL}`. + // Using `-` as a wildcard for the project will infer the project from + // the account. The `account` value can be the `email` address or the + // `unique_id` of the service account. + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *DeleteServiceAccountRequest) Reset() { *m = DeleteServiceAccountRequest{} } +func (m *DeleteServiceAccountRequest) String() string { return proto.CompactTextString(m) } +func (*DeleteServiceAccountRequest) ProtoMessage() {} +func (*DeleteServiceAccountRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_iam_153c5ceb8e95593c, []int{5} +} +func (m *DeleteServiceAccountRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_DeleteServiceAccountRequest.Unmarshal(m, b) +} +func (m *DeleteServiceAccountRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_DeleteServiceAccountRequest.Marshal(b, m, deterministic) +} +func (dst *DeleteServiceAccountRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_DeleteServiceAccountRequest.Merge(dst, src) +} +func (m *DeleteServiceAccountRequest) XXX_Size() int { + return xxx_messageInfo_DeleteServiceAccountRequest.Size(m) +} +func (m *DeleteServiceAccountRequest) XXX_DiscardUnknown() { + xxx_messageInfo_DeleteServiceAccountRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_DeleteServiceAccountRequest proto.InternalMessageInfo + +func (m *DeleteServiceAccountRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +// The service account keys list request. +type ListServiceAccountKeysRequest struct { + // The resource name of the service account in the following format: + // `projects/{PROJECT_ID}/serviceAccounts/{SERVICE_ACCOUNT_EMAIL}`. + // + // Using `-` as a wildcard for the project, will infer the project from + // the account. The `account` value can be the `email` address or the + // `unique_id` of the service account. + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // Filters the types of keys the user wants to include in the list + // response. Duplicate key types are not allowed. If no key type + // is provided, all keys are returned. + KeyTypes []ListServiceAccountKeysRequest_KeyType `protobuf:"varint,2,rep,packed,name=key_types,json=keyTypes,proto3,enum=google.iam.admin.v1.ListServiceAccountKeysRequest_KeyType" json:"key_types,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ListServiceAccountKeysRequest) Reset() { *m = ListServiceAccountKeysRequest{} } +func (m *ListServiceAccountKeysRequest) String() string { return proto.CompactTextString(m) } +func (*ListServiceAccountKeysRequest) ProtoMessage() {} +func (*ListServiceAccountKeysRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_iam_153c5ceb8e95593c, []int{6} +} +func (m *ListServiceAccountKeysRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ListServiceAccountKeysRequest.Unmarshal(m, b) +} +func (m *ListServiceAccountKeysRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ListServiceAccountKeysRequest.Marshal(b, m, deterministic) +} +func (dst *ListServiceAccountKeysRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_ListServiceAccountKeysRequest.Merge(dst, src) +} +func (m *ListServiceAccountKeysRequest) XXX_Size() int { + return xxx_messageInfo_ListServiceAccountKeysRequest.Size(m) +} +func (m *ListServiceAccountKeysRequest) XXX_DiscardUnknown() { + xxx_messageInfo_ListServiceAccountKeysRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_ListServiceAccountKeysRequest proto.InternalMessageInfo + +func (m *ListServiceAccountKeysRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *ListServiceAccountKeysRequest) GetKeyTypes() []ListServiceAccountKeysRequest_KeyType { + if m != nil { + return m.KeyTypes + } + return nil +} + +// The service account keys list response. +type ListServiceAccountKeysResponse struct { + // The public keys for the service account. + Keys []*ServiceAccountKey `protobuf:"bytes,1,rep,name=keys,proto3" json:"keys,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ListServiceAccountKeysResponse) Reset() { *m = ListServiceAccountKeysResponse{} } +func (m *ListServiceAccountKeysResponse) String() string { return proto.CompactTextString(m) } +func (*ListServiceAccountKeysResponse) ProtoMessage() {} +func (*ListServiceAccountKeysResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_iam_153c5ceb8e95593c, []int{7} +} +func (m *ListServiceAccountKeysResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ListServiceAccountKeysResponse.Unmarshal(m, b) +} +func (m *ListServiceAccountKeysResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ListServiceAccountKeysResponse.Marshal(b, m, deterministic) +} +func (dst *ListServiceAccountKeysResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_ListServiceAccountKeysResponse.Merge(dst, src) +} +func (m *ListServiceAccountKeysResponse) XXX_Size() int { + return xxx_messageInfo_ListServiceAccountKeysResponse.Size(m) +} +func (m *ListServiceAccountKeysResponse) XXX_DiscardUnknown() { + xxx_messageInfo_ListServiceAccountKeysResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_ListServiceAccountKeysResponse proto.InternalMessageInfo + +func (m *ListServiceAccountKeysResponse) GetKeys() []*ServiceAccountKey { + if m != nil { + return m.Keys + } + return nil +} + +// The service account key get by id request. +type GetServiceAccountKeyRequest struct { + // The resource name of the service account key in the following format: + // `projects/{PROJECT_ID}/serviceAccounts/{SERVICE_ACCOUNT_EMAIL}/keys/{key}`. + // + // Using `-` as a wildcard for the project will infer the project from + // the account. The `account` value can be the `email` address or the + // `unique_id` of the service account. + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // The output format of the public key requested. + // X509_PEM is the default output format. + PublicKeyType ServiceAccountPublicKeyType `protobuf:"varint,2,opt,name=public_key_type,json=publicKeyType,proto3,enum=google.iam.admin.v1.ServiceAccountPublicKeyType" json:"public_key_type,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GetServiceAccountKeyRequest) Reset() { *m = GetServiceAccountKeyRequest{} } +func (m *GetServiceAccountKeyRequest) String() string { return proto.CompactTextString(m) } +func (*GetServiceAccountKeyRequest) ProtoMessage() {} +func (*GetServiceAccountKeyRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_iam_153c5ceb8e95593c, []int{8} +} +func (m *GetServiceAccountKeyRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GetServiceAccountKeyRequest.Unmarshal(m, b) +} +func (m *GetServiceAccountKeyRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GetServiceAccountKeyRequest.Marshal(b, m, deterministic) +} +func (dst *GetServiceAccountKeyRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_GetServiceAccountKeyRequest.Merge(dst, src) +} +func (m *GetServiceAccountKeyRequest) XXX_Size() int { + return xxx_messageInfo_GetServiceAccountKeyRequest.Size(m) +} +func (m *GetServiceAccountKeyRequest) XXX_DiscardUnknown() { + xxx_messageInfo_GetServiceAccountKeyRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_GetServiceAccountKeyRequest proto.InternalMessageInfo + +func (m *GetServiceAccountKeyRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *GetServiceAccountKeyRequest) GetPublicKeyType() ServiceAccountPublicKeyType { + if m != nil { + return m.PublicKeyType + } + return ServiceAccountPublicKeyType_TYPE_NONE +} + +// Represents a service account key. +// +// A service account has two sets of key-pairs: user-managed, and +// system-managed. +// +// User-managed key-pairs can be created and deleted by users. Users are +// responsible for rotating these keys periodically to ensure security of +// their service accounts. Users retain the private key of these key-pairs, +// and Google retains ONLY the public key. +// +// System-managed key-pairs are managed automatically by Google, and rotated +// daily without user intervention. The private key never leaves Google's +// servers to maximize security. +// +// Public keys for all service accounts are also published at the OAuth2 +// Service Account API. +type ServiceAccountKey struct { + // The resource name of the service account key in the following format + // `projects/{PROJECT_ID}/serviceAccounts/{SERVICE_ACCOUNT_EMAIL}/keys/{key}`. + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // The output format for the private key. + // Only provided in `CreateServiceAccountKey` responses, not + // in `GetServiceAccountKey` or `ListServiceAccountKey` responses. + // + // Google never exposes system-managed private keys, and never retains + // user-managed private keys. + PrivateKeyType ServiceAccountPrivateKeyType `protobuf:"varint,2,opt,name=private_key_type,json=privateKeyType,proto3,enum=google.iam.admin.v1.ServiceAccountPrivateKeyType" json:"private_key_type,omitempty"` + // Specifies the algorithm (and possibly key size) for the key. + KeyAlgorithm ServiceAccountKeyAlgorithm `protobuf:"varint,8,opt,name=key_algorithm,json=keyAlgorithm,proto3,enum=google.iam.admin.v1.ServiceAccountKeyAlgorithm" json:"key_algorithm,omitempty"` + // The private key data. Only provided in `CreateServiceAccountKey` + // responses. Make sure to keep the private key data secure because it + // allows for the assertion of the service account identity. + // When decoded, the private key data can be used to authenticate with + // Google API client libraries and with + // gcloud + // auth activate-service-account. + PrivateKeyData []byte `protobuf:"bytes,3,opt,name=private_key_data,json=privateKeyData,proto3" json:"private_key_data,omitempty"` + // The public key data. Only provided in `GetServiceAccountKey` responses. + PublicKeyData []byte `protobuf:"bytes,7,opt,name=public_key_data,json=publicKeyData,proto3" json:"public_key_data,omitempty"` + // The key can be used after this timestamp. + ValidAfterTime *timestamp.Timestamp `protobuf:"bytes,4,opt,name=valid_after_time,json=validAfterTime,proto3" json:"valid_after_time,omitempty"` + // The key can be used before this timestamp. + ValidBeforeTime *timestamp.Timestamp `protobuf:"bytes,5,opt,name=valid_before_time,json=validBeforeTime,proto3" json:"valid_before_time,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ServiceAccountKey) Reset() { *m = ServiceAccountKey{} } +func (m *ServiceAccountKey) String() string { return proto.CompactTextString(m) } +func (*ServiceAccountKey) ProtoMessage() {} +func (*ServiceAccountKey) Descriptor() ([]byte, []int) { + return fileDescriptor_iam_153c5ceb8e95593c, []int{9} +} +func (m *ServiceAccountKey) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ServiceAccountKey.Unmarshal(m, b) +} +func (m *ServiceAccountKey) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ServiceAccountKey.Marshal(b, m, deterministic) +} +func (dst *ServiceAccountKey) XXX_Merge(src proto.Message) { + xxx_messageInfo_ServiceAccountKey.Merge(dst, src) +} +func (m *ServiceAccountKey) XXX_Size() int { + return xxx_messageInfo_ServiceAccountKey.Size(m) +} +func (m *ServiceAccountKey) XXX_DiscardUnknown() { + xxx_messageInfo_ServiceAccountKey.DiscardUnknown(m) +} + +var xxx_messageInfo_ServiceAccountKey proto.InternalMessageInfo + +func (m *ServiceAccountKey) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *ServiceAccountKey) GetPrivateKeyType() ServiceAccountPrivateKeyType { + if m != nil { + return m.PrivateKeyType + } + return ServiceAccountPrivateKeyType_TYPE_UNSPECIFIED +} + +func (m *ServiceAccountKey) GetKeyAlgorithm() ServiceAccountKeyAlgorithm { + if m != nil { + return m.KeyAlgorithm + } + return ServiceAccountKeyAlgorithm_KEY_ALG_UNSPECIFIED +} + +func (m *ServiceAccountKey) GetPrivateKeyData() []byte { + if m != nil { + return m.PrivateKeyData + } + return nil +} + +func (m *ServiceAccountKey) GetPublicKeyData() []byte { + if m != nil { + return m.PublicKeyData + } + return nil +} + +func (m *ServiceAccountKey) GetValidAfterTime() *timestamp.Timestamp { + if m != nil { + return m.ValidAfterTime + } + return nil +} + +func (m *ServiceAccountKey) GetValidBeforeTime() *timestamp.Timestamp { + if m != nil { + return m.ValidBeforeTime + } + return nil +} + +// The service account key create request. +type CreateServiceAccountKeyRequest struct { + // The resource name of the service account in the following format: + // `projects/{PROJECT_ID}/serviceAccounts/{SERVICE_ACCOUNT_EMAIL}`. + // Using `-` as a wildcard for the project will infer the project from + // the account. The `account` value can be the `email` address or the + // `unique_id` of the service account. + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // The output format of the private key. `GOOGLE_CREDENTIALS_FILE` is the + // default output format. + PrivateKeyType ServiceAccountPrivateKeyType `protobuf:"varint,2,opt,name=private_key_type,json=privateKeyType,proto3,enum=google.iam.admin.v1.ServiceAccountPrivateKeyType" json:"private_key_type,omitempty"` + // Which type of key and algorithm to use for the key. + // The default is currently a 2K RSA key. However this may change in the + // future. + KeyAlgorithm ServiceAccountKeyAlgorithm `protobuf:"varint,3,opt,name=key_algorithm,json=keyAlgorithm,proto3,enum=google.iam.admin.v1.ServiceAccountKeyAlgorithm" json:"key_algorithm,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *CreateServiceAccountKeyRequest) Reset() { *m = CreateServiceAccountKeyRequest{} } +func (m *CreateServiceAccountKeyRequest) String() string { return proto.CompactTextString(m) } +func (*CreateServiceAccountKeyRequest) ProtoMessage() {} +func (*CreateServiceAccountKeyRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_iam_153c5ceb8e95593c, []int{10} +} +func (m *CreateServiceAccountKeyRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_CreateServiceAccountKeyRequest.Unmarshal(m, b) +} +func (m *CreateServiceAccountKeyRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_CreateServiceAccountKeyRequest.Marshal(b, m, deterministic) +} +func (dst *CreateServiceAccountKeyRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_CreateServiceAccountKeyRequest.Merge(dst, src) +} +func (m *CreateServiceAccountKeyRequest) XXX_Size() int { + return xxx_messageInfo_CreateServiceAccountKeyRequest.Size(m) +} +func (m *CreateServiceAccountKeyRequest) XXX_DiscardUnknown() { + xxx_messageInfo_CreateServiceAccountKeyRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_CreateServiceAccountKeyRequest proto.InternalMessageInfo + +func (m *CreateServiceAccountKeyRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *CreateServiceAccountKeyRequest) GetPrivateKeyType() ServiceAccountPrivateKeyType { + if m != nil { + return m.PrivateKeyType + } + return ServiceAccountPrivateKeyType_TYPE_UNSPECIFIED +} + +func (m *CreateServiceAccountKeyRequest) GetKeyAlgorithm() ServiceAccountKeyAlgorithm { + if m != nil { + return m.KeyAlgorithm + } + return ServiceAccountKeyAlgorithm_KEY_ALG_UNSPECIFIED +} + +// The service account key delete request. +type DeleteServiceAccountKeyRequest struct { + // The resource name of the service account key in the following format: + // `projects/{PROJECT_ID}/serviceAccounts/{SERVICE_ACCOUNT_EMAIL}/keys/{key}`. + // Using `-` as a wildcard for the project will infer the project from + // the account. The `account` value can be the `email` address or the + // `unique_id` of the service account. + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *DeleteServiceAccountKeyRequest) Reset() { *m = DeleteServiceAccountKeyRequest{} } +func (m *DeleteServiceAccountKeyRequest) String() string { return proto.CompactTextString(m) } +func (*DeleteServiceAccountKeyRequest) ProtoMessage() {} +func (*DeleteServiceAccountKeyRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_iam_153c5ceb8e95593c, []int{11} +} +func (m *DeleteServiceAccountKeyRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_DeleteServiceAccountKeyRequest.Unmarshal(m, b) +} +func (m *DeleteServiceAccountKeyRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_DeleteServiceAccountKeyRequest.Marshal(b, m, deterministic) +} +func (dst *DeleteServiceAccountKeyRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_DeleteServiceAccountKeyRequest.Merge(dst, src) +} +func (m *DeleteServiceAccountKeyRequest) XXX_Size() int { + return xxx_messageInfo_DeleteServiceAccountKeyRequest.Size(m) +} +func (m *DeleteServiceAccountKeyRequest) XXX_DiscardUnknown() { + xxx_messageInfo_DeleteServiceAccountKeyRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_DeleteServiceAccountKeyRequest proto.InternalMessageInfo + +func (m *DeleteServiceAccountKeyRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +// The service account sign blob request. +type SignBlobRequest struct { + // The resource name of the service account in the following format: + // `projects/{PROJECT_ID}/serviceAccounts/{SERVICE_ACCOUNT_EMAIL}`. + // Using `-` as a wildcard for the project will infer the project from + // the account. The `account` value can be the `email` address or the + // `unique_id` of the service account. + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // The bytes to sign. + BytesToSign []byte `protobuf:"bytes,2,opt,name=bytes_to_sign,json=bytesToSign,proto3" json:"bytes_to_sign,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *SignBlobRequest) Reset() { *m = SignBlobRequest{} } +func (m *SignBlobRequest) String() string { return proto.CompactTextString(m) } +func (*SignBlobRequest) ProtoMessage() {} +func (*SignBlobRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_iam_153c5ceb8e95593c, []int{12} +} +func (m *SignBlobRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_SignBlobRequest.Unmarshal(m, b) +} +func (m *SignBlobRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_SignBlobRequest.Marshal(b, m, deterministic) +} +func (dst *SignBlobRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_SignBlobRequest.Merge(dst, src) +} +func (m *SignBlobRequest) XXX_Size() int { + return xxx_messageInfo_SignBlobRequest.Size(m) +} +func (m *SignBlobRequest) XXX_DiscardUnknown() { + xxx_messageInfo_SignBlobRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_SignBlobRequest proto.InternalMessageInfo + +func (m *SignBlobRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *SignBlobRequest) GetBytesToSign() []byte { + if m != nil { + return m.BytesToSign + } + return nil +} + +// The service account sign blob response. +type SignBlobResponse struct { + // The id of the key used to sign the blob. + KeyId string `protobuf:"bytes,1,opt,name=key_id,json=keyId,proto3" json:"key_id,omitempty"` + // The signed blob. + Signature []byte `protobuf:"bytes,2,opt,name=signature,proto3" json:"signature,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *SignBlobResponse) Reset() { *m = SignBlobResponse{} } +func (m *SignBlobResponse) String() string { return proto.CompactTextString(m) } +func (*SignBlobResponse) ProtoMessage() {} +func (*SignBlobResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_iam_153c5ceb8e95593c, []int{13} +} +func (m *SignBlobResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_SignBlobResponse.Unmarshal(m, b) +} +func (m *SignBlobResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_SignBlobResponse.Marshal(b, m, deterministic) +} +func (dst *SignBlobResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_SignBlobResponse.Merge(dst, src) +} +func (m *SignBlobResponse) XXX_Size() int { + return xxx_messageInfo_SignBlobResponse.Size(m) +} +func (m *SignBlobResponse) XXX_DiscardUnknown() { + xxx_messageInfo_SignBlobResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_SignBlobResponse proto.InternalMessageInfo + +func (m *SignBlobResponse) GetKeyId() string { + if m != nil { + return m.KeyId + } + return "" +} + +func (m *SignBlobResponse) GetSignature() []byte { + if m != nil { + return m.Signature + } + return nil +} + +// The service account sign JWT request. +type SignJwtRequest struct { + // The resource name of the service account in the following format: + // `projects/{PROJECT_ID}/serviceAccounts/{SERVICE_ACCOUNT_EMAIL}`. + // Using `-` as a wildcard for the project will infer the project from + // the account. The `account` value can be the `email` address or the + // `unique_id` of the service account. + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // The JWT payload to sign, a JSON JWT Claim set. + Payload string `protobuf:"bytes,2,opt,name=payload,proto3" json:"payload,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *SignJwtRequest) Reset() { *m = SignJwtRequest{} } +func (m *SignJwtRequest) String() string { return proto.CompactTextString(m) } +func (*SignJwtRequest) ProtoMessage() {} +func (*SignJwtRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_iam_153c5ceb8e95593c, []int{14} +} +func (m *SignJwtRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_SignJwtRequest.Unmarshal(m, b) +} +func (m *SignJwtRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_SignJwtRequest.Marshal(b, m, deterministic) +} +func (dst *SignJwtRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_SignJwtRequest.Merge(dst, src) +} +func (m *SignJwtRequest) XXX_Size() int { + return xxx_messageInfo_SignJwtRequest.Size(m) +} +func (m *SignJwtRequest) XXX_DiscardUnknown() { + xxx_messageInfo_SignJwtRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_SignJwtRequest proto.InternalMessageInfo + +func (m *SignJwtRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *SignJwtRequest) GetPayload() string { + if m != nil { + return m.Payload + } + return "" +} + +// The service account sign JWT response. +type SignJwtResponse struct { + // The id of the key used to sign the JWT. + KeyId string `protobuf:"bytes,1,opt,name=key_id,json=keyId,proto3" json:"key_id,omitempty"` + // The signed JWT. + SignedJwt string `protobuf:"bytes,2,opt,name=signed_jwt,json=signedJwt,proto3" json:"signed_jwt,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *SignJwtResponse) Reset() { *m = SignJwtResponse{} } +func (m *SignJwtResponse) String() string { return proto.CompactTextString(m) } +func (*SignJwtResponse) ProtoMessage() {} +func (*SignJwtResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_iam_153c5ceb8e95593c, []int{15} +} +func (m *SignJwtResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_SignJwtResponse.Unmarshal(m, b) +} +func (m *SignJwtResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_SignJwtResponse.Marshal(b, m, deterministic) +} +func (dst *SignJwtResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_SignJwtResponse.Merge(dst, src) +} +func (m *SignJwtResponse) XXX_Size() int { + return xxx_messageInfo_SignJwtResponse.Size(m) +} +func (m *SignJwtResponse) XXX_DiscardUnknown() { + xxx_messageInfo_SignJwtResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_SignJwtResponse proto.InternalMessageInfo + +func (m *SignJwtResponse) GetKeyId() string { + if m != nil { + return m.KeyId + } + return "" +} + +func (m *SignJwtResponse) GetSignedJwt() string { + if m != nil { + return m.SignedJwt + } + return "" +} + +// A role in the Identity and Access Management API. +type Role struct { + // The name of the role. + // + // When Role is used in CreateRole, the role name must not be set. + // + // When Role is used in output and other input such as UpdateRole, the role + // name is the complete path, e.g., roles/logging.viewer for curated roles + // and organizations/{ORGANIZATION_ID}/roles/logging.viewer for custom roles. + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // Optional. A human-readable title for the role. Typically this + // is limited to 100 UTF-8 bytes. + Title string `protobuf:"bytes,2,opt,name=title,proto3" json:"title,omitempty"` + // Optional. A human-readable description for the role. + Description string `protobuf:"bytes,3,opt,name=description,proto3" json:"description,omitempty"` + // The names of the permissions this role grants when bound in an IAM policy. + IncludedPermissions []string `protobuf:"bytes,7,rep,name=included_permissions,json=includedPermissions,proto3" json:"included_permissions,omitempty"` + // The current launch stage of the role. + Stage Role_RoleLaunchStage `protobuf:"varint,8,opt,name=stage,proto3,enum=google.iam.admin.v1.Role_RoleLaunchStage" json:"stage,omitempty"` + // Used to perform a consistent read-modify-write. + Etag []byte `protobuf:"bytes,9,opt,name=etag,proto3" json:"etag,omitempty"` + // The current deleted state of the role. This field is read only. + // It will be ignored in calls to CreateRole and UpdateRole. + Deleted bool `protobuf:"varint,11,opt,name=deleted,proto3" json:"deleted,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Role) Reset() { *m = Role{} } +func (m *Role) String() string { return proto.CompactTextString(m) } +func (*Role) ProtoMessage() {} +func (*Role) Descriptor() ([]byte, []int) { + return fileDescriptor_iam_153c5ceb8e95593c, []int{16} +} +func (m *Role) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Role.Unmarshal(m, b) +} +func (m *Role) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Role.Marshal(b, m, deterministic) +} +func (dst *Role) XXX_Merge(src proto.Message) { + xxx_messageInfo_Role.Merge(dst, src) +} +func (m *Role) XXX_Size() int { + return xxx_messageInfo_Role.Size(m) +} +func (m *Role) XXX_DiscardUnknown() { + xxx_messageInfo_Role.DiscardUnknown(m) +} + +var xxx_messageInfo_Role proto.InternalMessageInfo + +func (m *Role) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *Role) GetTitle() string { + if m != nil { + return m.Title + } + return "" +} + +func (m *Role) GetDescription() string { + if m != nil { + return m.Description + } + return "" +} + +func (m *Role) GetIncludedPermissions() []string { + if m != nil { + return m.IncludedPermissions + } + return nil +} + +func (m *Role) GetStage() Role_RoleLaunchStage { + if m != nil { + return m.Stage + } + return Role_ALPHA +} + +func (m *Role) GetEtag() []byte { + if m != nil { + return m.Etag + } + return nil +} + +func (m *Role) GetDeleted() bool { + if m != nil { + return m.Deleted + } + return false +} + +// The grantable role query request. +type QueryGrantableRolesRequest struct { + // Required. The full resource name to query from the list of grantable roles. + // + // The name follows the Google Cloud Platform resource format. + // For example, a Cloud Platform project with id `my-project` will be named + // `//cloudresourcemanager.googleapis.com/projects/my-project`. + FullResourceName string `protobuf:"bytes,1,opt,name=full_resource_name,json=fullResourceName,proto3" json:"full_resource_name,omitempty"` + View RoleView `protobuf:"varint,2,opt,name=view,proto3,enum=google.iam.admin.v1.RoleView" json:"view,omitempty"` + // Optional limit on the number of roles to include in the response. + PageSize int32 `protobuf:"varint,3,opt,name=page_size,json=pageSize,proto3" json:"page_size,omitempty"` + // Optional pagination token returned in an earlier + // QueryGrantableRolesResponse. + PageToken string `protobuf:"bytes,4,opt,name=page_token,json=pageToken,proto3" json:"page_token,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *QueryGrantableRolesRequest) Reset() { *m = QueryGrantableRolesRequest{} } +func (m *QueryGrantableRolesRequest) String() string { return proto.CompactTextString(m) } +func (*QueryGrantableRolesRequest) ProtoMessage() {} +func (*QueryGrantableRolesRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_iam_153c5ceb8e95593c, []int{17} +} +func (m *QueryGrantableRolesRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_QueryGrantableRolesRequest.Unmarshal(m, b) +} +func (m *QueryGrantableRolesRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_QueryGrantableRolesRequest.Marshal(b, m, deterministic) +} +func (dst *QueryGrantableRolesRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_QueryGrantableRolesRequest.Merge(dst, src) +} +func (m *QueryGrantableRolesRequest) XXX_Size() int { + return xxx_messageInfo_QueryGrantableRolesRequest.Size(m) +} +func (m *QueryGrantableRolesRequest) XXX_DiscardUnknown() { + xxx_messageInfo_QueryGrantableRolesRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_QueryGrantableRolesRequest proto.InternalMessageInfo + +func (m *QueryGrantableRolesRequest) GetFullResourceName() string { + if m != nil { + return m.FullResourceName + } + return "" +} + +func (m *QueryGrantableRolesRequest) GetView() RoleView { + if m != nil { + return m.View + } + return RoleView_BASIC +} + +func (m *QueryGrantableRolesRequest) GetPageSize() int32 { + if m != nil { + return m.PageSize + } + return 0 +} + +func (m *QueryGrantableRolesRequest) GetPageToken() string { + if m != nil { + return m.PageToken + } + return "" +} + +// The grantable role query response. +type QueryGrantableRolesResponse struct { + // The list of matching roles. + Roles []*Role `protobuf:"bytes,1,rep,name=roles,proto3" json:"roles,omitempty"` + // To retrieve the next page of results, set + // `QueryGrantableRolesRequest.page_token` to this value. + NextPageToken string `protobuf:"bytes,2,opt,name=next_page_token,json=nextPageToken,proto3" json:"next_page_token,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *QueryGrantableRolesResponse) Reset() { *m = QueryGrantableRolesResponse{} } +func (m *QueryGrantableRolesResponse) String() string { return proto.CompactTextString(m) } +func (*QueryGrantableRolesResponse) ProtoMessage() {} +func (*QueryGrantableRolesResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_iam_153c5ceb8e95593c, []int{18} +} +func (m *QueryGrantableRolesResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_QueryGrantableRolesResponse.Unmarshal(m, b) +} +func (m *QueryGrantableRolesResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_QueryGrantableRolesResponse.Marshal(b, m, deterministic) +} +func (dst *QueryGrantableRolesResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_QueryGrantableRolesResponse.Merge(dst, src) +} +func (m *QueryGrantableRolesResponse) XXX_Size() int { + return xxx_messageInfo_QueryGrantableRolesResponse.Size(m) +} +func (m *QueryGrantableRolesResponse) XXX_DiscardUnknown() { + xxx_messageInfo_QueryGrantableRolesResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_QueryGrantableRolesResponse proto.InternalMessageInfo + +func (m *QueryGrantableRolesResponse) GetRoles() []*Role { + if m != nil { + return m.Roles + } + return nil +} + +func (m *QueryGrantableRolesResponse) GetNextPageToken() string { + if m != nil { + return m.NextPageToken + } + return "" +} + +// The request to get all roles defined under a resource. +type ListRolesRequest struct { + // The resource name of the parent resource in one of the following formats: + // `` (empty string) -- this refers to curated roles. + // `organizations/{ORGANIZATION_ID}` + // `projects/{PROJECT_ID}` + Parent string `protobuf:"bytes,1,opt,name=parent,proto3" json:"parent,omitempty"` + // Optional limit on the number of roles to include in the response. + PageSize int32 `protobuf:"varint,2,opt,name=page_size,json=pageSize,proto3" json:"page_size,omitempty"` + // Optional pagination token returned in an earlier ListRolesResponse. + PageToken string `protobuf:"bytes,3,opt,name=page_token,json=pageToken,proto3" json:"page_token,omitempty"` + // Optional view for the returned Role objects. + View RoleView `protobuf:"varint,4,opt,name=view,proto3,enum=google.iam.admin.v1.RoleView" json:"view,omitempty"` + // Include Roles that have been deleted. + ShowDeleted bool `protobuf:"varint,6,opt,name=show_deleted,json=showDeleted,proto3" json:"show_deleted,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ListRolesRequest) Reset() { *m = ListRolesRequest{} } +func (m *ListRolesRequest) String() string { return proto.CompactTextString(m) } +func (*ListRolesRequest) ProtoMessage() {} +func (*ListRolesRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_iam_153c5ceb8e95593c, []int{19} +} +func (m *ListRolesRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ListRolesRequest.Unmarshal(m, b) +} +func (m *ListRolesRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ListRolesRequest.Marshal(b, m, deterministic) +} +func (dst *ListRolesRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_ListRolesRequest.Merge(dst, src) +} +func (m *ListRolesRequest) XXX_Size() int { + return xxx_messageInfo_ListRolesRequest.Size(m) +} +func (m *ListRolesRequest) XXX_DiscardUnknown() { + xxx_messageInfo_ListRolesRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_ListRolesRequest proto.InternalMessageInfo + +func (m *ListRolesRequest) GetParent() string { + if m != nil { + return m.Parent + } + return "" +} + +func (m *ListRolesRequest) GetPageSize() int32 { + if m != nil { + return m.PageSize + } + return 0 +} + +func (m *ListRolesRequest) GetPageToken() string { + if m != nil { + return m.PageToken + } + return "" +} + +func (m *ListRolesRequest) GetView() RoleView { + if m != nil { + return m.View + } + return RoleView_BASIC +} + +func (m *ListRolesRequest) GetShowDeleted() bool { + if m != nil { + return m.ShowDeleted + } + return false +} + +// The response containing the roles defined under a resource. +type ListRolesResponse struct { + // The Roles defined on this resource. + Roles []*Role `protobuf:"bytes,1,rep,name=roles,proto3" json:"roles,omitempty"` + // To retrieve the next page of results, set + // `ListRolesRequest.page_token` to this value. + NextPageToken string `protobuf:"bytes,2,opt,name=next_page_token,json=nextPageToken,proto3" json:"next_page_token,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ListRolesResponse) Reset() { *m = ListRolesResponse{} } +func (m *ListRolesResponse) String() string { return proto.CompactTextString(m) } +func (*ListRolesResponse) ProtoMessage() {} +func (*ListRolesResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_iam_153c5ceb8e95593c, []int{20} +} +func (m *ListRolesResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ListRolesResponse.Unmarshal(m, b) +} +func (m *ListRolesResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ListRolesResponse.Marshal(b, m, deterministic) +} +func (dst *ListRolesResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_ListRolesResponse.Merge(dst, src) +} +func (m *ListRolesResponse) XXX_Size() int { + return xxx_messageInfo_ListRolesResponse.Size(m) +} +func (m *ListRolesResponse) XXX_DiscardUnknown() { + xxx_messageInfo_ListRolesResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_ListRolesResponse proto.InternalMessageInfo + +func (m *ListRolesResponse) GetRoles() []*Role { + if m != nil { + return m.Roles + } + return nil +} + +func (m *ListRolesResponse) GetNextPageToken() string { + if m != nil { + return m.NextPageToken + } + return "" +} + +// The request to get the definition of an existing role. +type GetRoleRequest struct { + // The resource name of the role in one of the following formats: + // `roles/{ROLE_NAME}` + // `organizations/{ORGANIZATION_ID}/roles/{ROLE_NAME}` + // `projects/{PROJECT_ID}/roles/{ROLE_NAME}` + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GetRoleRequest) Reset() { *m = GetRoleRequest{} } +func (m *GetRoleRequest) String() string { return proto.CompactTextString(m) } +func (*GetRoleRequest) ProtoMessage() {} +func (*GetRoleRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_iam_153c5ceb8e95593c, []int{21} +} +func (m *GetRoleRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GetRoleRequest.Unmarshal(m, b) +} +func (m *GetRoleRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GetRoleRequest.Marshal(b, m, deterministic) +} +func (dst *GetRoleRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_GetRoleRequest.Merge(dst, src) +} +func (m *GetRoleRequest) XXX_Size() int { + return xxx_messageInfo_GetRoleRequest.Size(m) +} +func (m *GetRoleRequest) XXX_DiscardUnknown() { + xxx_messageInfo_GetRoleRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_GetRoleRequest proto.InternalMessageInfo + +func (m *GetRoleRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +// The request to create a new role. +type CreateRoleRequest struct { + // The resource name of the parent resource in one of the following formats: + // `organizations/{ORGANIZATION_ID}` + // `projects/{PROJECT_ID}` + Parent string `protobuf:"bytes,1,opt,name=parent,proto3" json:"parent,omitempty"` + // The role id to use for this role. + RoleId string `protobuf:"bytes,2,opt,name=role_id,json=roleId,proto3" json:"role_id,omitempty"` + // The Role resource to create. + Role *Role `protobuf:"bytes,3,opt,name=role,proto3" json:"role,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *CreateRoleRequest) Reset() { *m = CreateRoleRequest{} } +func (m *CreateRoleRequest) String() string { return proto.CompactTextString(m) } +func (*CreateRoleRequest) ProtoMessage() {} +func (*CreateRoleRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_iam_153c5ceb8e95593c, []int{22} +} +func (m *CreateRoleRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_CreateRoleRequest.Unmarshal(m, b) +} +func (m *CreateRoleRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_CreateRoleRequest.Marshal(b, m, deterministic) +} +func (dst *CreateRoleRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_CreateRoleRequest.Merge(dst, src) +} +func (m *CreateRoleRequest) XXX_Size() int { + return xxx_messageInfo_CreateRoleRequest.Size(m) +} +func (m *CreateRoleRequest) XXX_DiscardUnknown() { + xxx_messageInfo_CreateRoleRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_CreateRoleRequest proto.InternalMessageInfo + +func (m *CreateRoleRequest) GetParent() string { + if m != nil { + return m.Parent + } + return "" +} + +func (m *CreateRoleRequest) GetRoleId() string { + if m != nil { + return m.RoleId + } + return "" +} + +func (m *CreateRoleRequest) GetRole() *Role { + if m != nil { + return m.Role + } + return nil +} + +// The request to update a role. +type UpdateRoleRequest struct { + // The resource name of the role in one of the following formats: + // `roles/{ROLE_NAME}` + // `organizations/{ORGANIZATION_ID}/roles/{ROLE_NAME}` + // `projects/{PROJECT_ID}/roles/{ROLE_NAME}` + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // The updated role. + Role *Role `protobuf:"bytes,2,opt,name=role,proto3" json:"role,omitempty"` + // A mask describing which fields in the Role have changed. + UpdateMask *field_mask.FieldMask `protobuf:"bytes,3,opt,name=update_mask,json=updateMask,proto3" json:"update_mask,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *UpdateRoleRequest) Reset() { *m = UpdateRoleRequest{} } +func (m *UpdateRoleRequest) String() string { return proto.CompactTextString(m) } +func (*UpdateRoleRequest) ProtoMessage() {} +func (*UpdateRoleRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_iam_153c5ceb8e95593c, []int{23} +} +func (m *UpdateRoleRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_UpdateRoleRequest.Unmarshal(m, b) +} +func (m *UpdateRoleRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_UpdateRoleRequest.Marshal(b, m, deterministic) +} +func (dst *UpdateRoleRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_UpdateRoleRequest.Merge(dst, src) +} +func (m *UpdateRoleRequest) XXX_Size() int { + return xxx_messageInfo_UpdateRoleRequest.Size(m) +} +func (m *UpdateRoleRequest) XXX_DiscardUnknown() { + xxx_messageInfo_UpdateRoleRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_UpdateRoleRequest proto.InternalMessageInfo + +func (m *UpdateRoleRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *UpdateRoleRequest) GetRole() *Role { + if m != nil { + return m.Role + } + return nil +} + +func (m *UpdateRoleRequest) GetUpdateMask() *field_mask.FieldMask { + if m != nil { + return m.UpdateMask + } + return nil +} + +// The request to delete an existing role. +type DeleteRoleRequest struct { + // The resource name of the role in one of the following formats: + // `organizations/{ORGANIZATION_ID}/roles/{ROLE_NAME}` + // `projects/{PROJECT_ID}/roles/{ROLE_NAME}` + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // Used to perform a consistent read-modify-write. + Etag []byte `protobuf:"bytes,2,opt,name=etag,proto3" json:"etag,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *DeleteRoleRequest) Reset() { *m = DeleteRoleRequest{} } +func (m *DeleteRoleRequest) String() string { return proto.CompactTextString(m) } +func (*DeleteRoleRequest) ProtoMessage() {} +func (*DeleteRoleRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_iam_153c5ceb8e95593c, []int{24} +} +func (m *DeleteRoleRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_DeleteRoleRequest.Unmarshal(m, b) +} +func (m *DeleteRoleRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_DeleteRoleRequest.Marshal(b, m, deterministic) +} +func (dst *DeleteRoleRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_DeleteRoleRequest.Merge(dst, src) +} +func (m *DeleteRoleRequest) XXX_Size() int { + return xxx_messageInfo_DeleteRoleRequest.Size(m) +} +func (m *DeleteRoleRequest) XXX_DiscardUnknown() { + xxx_messageInfo_DeleteRoleRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_DeleteRoleRequest proto.InternalMessageInfo + +func (m *DeleteRoleRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *DeleteRoleRequest) GetEtag() []byte { + if m != nil { + return m.Etag + } + return nil +} + +// The request to undelete an existing role. +type UndeleteRoleRequest struct { + // The resource name of the role in one of the following formats: + // `organizations/{ORGANIZATION_ID}/roles/{ROLE_NAME}` + // `projects/{PROJECT_ID}/roles/{ROLE_NAME}` + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // Used to perform a consistent read-modify-write. + Etag []byte `protobuf:"bytes,2,opt,name=etag,proto3" json:"etag,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *UndeleteRoleRequest) Reset() { *m = UndeleteRoleRequest{} } +func (m *UndeleteRoleRequest) String() string { return proto.CompactTextString(m) } +func (*UndeleteRoleRequest) ProtoMessage() {} +func (*UndeleteRoleRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_iam_153c5ceb8e95593c, []int{25} +} +func (m *UndeleteRoleRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_UndeleteRoleRequest.Unmarshal(m, b) +} +func (m *UndeleteRoleRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_UndeleteRoleRequest.Marshal(b, m, deterministic) +} +func (dst *UndeleteRoleRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_UndeleteRoleRequest.Merge(dst, src) +} +func (m *UndeleteRoleRequest) XXX_Size() int { + return xxx_messageInfo_UndeleteRoleRequest.Size(m) +} +func (m *UndeleteRoleRequest) XXX_DiscardUnknown() { + xxx_messageInfo_UndeleteRoleRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_UndeleteRoleRequest proto.InternalMessageInfo + +func (m *UndeleteRoleRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *UndeleteRoleRequest) GetEtag() []byte { + if m != nil { + return m.Etag + } + return nil +} + +// A permission which can be included by a role. +type Permission struct { + // The name of this Permission. + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // The title of this Permission. + Title string `protobuf:"bytes,2,opt,name=title,proto3" json:"title,omitempty"` + // A brief description of what this Permission is used for. + Description string `protobuf:"bytes,3,opt,name=description,proto3" json:"description,omitempty"` + // This permission can ONLY be used in predefined roles. + OnlyInPredefinedRoles bool `protobuf:"varint,4,opt,name=only_in_predefined_roles,json=onlyInPredefinedRoles,proto3" json:"only_in_predefined_roles,omitempty"` + // The current launch stage of the permission. + Stage Permission_PermissionLaunchStage `protobuf:"varint,5,opt,name=stage,proto3,enum=google.iam.admin.v1.Permission_PermissionLaunchStage" json:"stage,omitempty"` + // The current custom role support level. + CustomRolesSupportLevel Permission_CustomRolesSupportLevel `protobuf:"varint,6,opt,name=custom_roles_support_level,json=customRolesSupportLevel,proto3,enum=google.iam.admin.v1.Permission_CustomRolesSupportLevel" json:"custom_roles_support_level,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Permission) Reset() { *m = Permission{} } +func (m *Permission) String() string { return proto.CompactTextString(m) } +func (*Permission) ProtoMessage() {} +func (*Permission) Descriptor() ([]byte, []int) { + return fileDescriptor_iam_153c5ceb8e95593c, []int{26} +} +func (m *Permission) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Permission.Unmarshal(m, b) +} +func (m *Permission) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Permission.Marshal(b, m, deterministic) +} +func (dst *Permission) XXX_Merge(src proto.Message) { + xxx_messageInfo_Permission.Merge(dst, src) +} +func (m *Permission) XXX_Size() int { + return xxx_messageInfo_Permission.Size(m) +} +func (m *Permission) XXX_DiscardUnknown() { + xxx_messageInfo_Permission.DiscardUnknown(m) +} + +var xxx_messageInfo_Permission proto.InternalMessageInfo + +func (m *Permission) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *Permission) GetTitle() string { + if m != nil { + return m.Title + } + return "" +} + +func (m *Permission) GetDescription() string { + if m != nil { + return m.Description + } + return "" +} + +func (m *Permission) GetOnlyInPredefinedRoles() bool { + if m != nil { + return m.OnlyInPredefinedRoles + } + return false +} + +func (m *Permission) GetStage() Permission_PermissionLaunchStage { + if m != nil { + return m.Stage + } + return Permission_ALPHA +} + +func (m *Permission) GetCustomRolesSupportLevel() Permission_CustomRolesSupportLevel { + if m != nil { + return m.CustomRolesSupportLevel + } + return Permission_SUPPORTED +} + +// A request to get permissions which can be tested on a resource. +type QueryTestablePermissionsRequest struct { + // Required. The full resource name to query from the list of testable + // permissions. + // + // The name follows the Google Cloud Platform resource format. + // For example, a Cloud Platform project with id `my-project` will be named + // `//cloudresourcemanager.googleapis.com/projects/my-project`. + FullResourceName string `protobuf:"bytes,1,opt,name=full_resource_name,json=fullResourceName,proto3" json:"full_resource_name,omitempty"` + // Optional limit on the number of permissions to include in the response. + PageSize int32 `protobuf:"varint,2,opt,name=page_size,json=pageSize,proto3" json:"page_size,omitempty"` + // Optional pagination token returned in an earlier + // QueryTestablePermissionsRequest. + PageToken string `protobuf:"bytes,3,opt,name=page_token,json=pageToken,proto3" json:"page_token,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *QueryTestablePermissionsRequest) Reset() { *m = QueryTestablePermissionsRequest{} } +func (m *QueryTestablePermissionsRequest) String() string { return proto.CompactTextString(m) } +func (*QueryTestablePermissionsRequest) ProtoMessage() {} +func (*QueryTestablePermissionsRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_iam_153c5ceb8e95593c, []int{27} +} +func (m *QueryTestablePermissionsRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_QueryTestablePermissionsRequest.Unmarshal(m, b) +} +func (m *QueryTestablePermissionsRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_QueryTestablePermissionsRequest.Marshal(b, m, deterministic) +} +func (dst *QueryTestablePermissionsRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_QueryTestablePermissionsRequest.Merge(dst, src) +} +func (m *QueryTestablePermissionsRequest) XXX_Size() int { + return xxx_messageInfo_QueryTestablePermissionsRequest.Size(m) +} +func (m *QueryTestablePermissionsRequest) XXX_DiscardUnknown() { + xxx_messageInfo_QueryTestablePermissionsRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_QueryTestablePermissionsRequest proto.InternalMessageInfo + +func (m *QueryTestablePermissionsRequest) GetFullResourceName() string { + if m != nil { + return m.FullResourceName + } + return "" +} + +func (m *QueryTestablePermissionsRequest) GetPageSize() int32 { + if m != nil { + return m.PageSize + } + return 0 +} + +func (m *QueryTestablePermissionsRequest) GetPageToken() string { + if m != nil { + return m.PageToken + } + return "" +} + +// The response containing permissions which can be tested on a resource. +type QueryTestablePermissionsResponse struct { + // The Permissions testable on the requested resource. + Permissions []*Permission `protobuf:"bytes,1,rep,name=permissions,proto3" json:"permissions,omitempty"` + // To retrieve the next page of results, set + // `QueryTestableRolesRequest.page_token` to this value. + NextPageToken string `protobuf:"bytes,2,opt,name=next_page_token,json=nextPageToken,proto3" json:"next_page_token,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *QueryTestablePermissionsResponse) Reset() { *m = QueryTestablePermissionsResponse{} } +func (m *QueryTestablePermissionsResponse) String() string { return proto.CompactTextString(m) } +func (*QueryTestablePermissionsResponse) ProtoMessage() {} +func (*QueryTestablePermissionsResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_iam_153c5ceb8e95593c, []int{28} +} +func (m *QueryTestablePermissionsResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_QueryTestablePermissionsResponse.Unmarshal(m, b) +} +func (m *QueryTestablePermissionsResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_QueryTestablePermissionsResponse.Marshal(b, m, deterministic) +} +func (dst *QueryTestablePermissionsResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_QueryTestablePermissionsResponse.Merge(dst, src) +} +func (m *QueryTestablePermissionsResponse) XXX_Size() int { + return xxx_messageInfo_QueryTestablePermissionsResponse.Size(m) +} +func (m *QueryTestablePermissionsResponse) XXX_DiscardUnknown() { + xxx_messageInfo_QueryTestablePermissionsResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_QueryTestablePermissionsResponse proto.InternalMessageInfo + +func (m *QueryTestablePermissionsResponse) GetPermissions() []*Permission { + if m != nil { + return m.Permissions + } + return nil +} + +func (m *QueryTestablePermissionsResponse) GetNextPageToken() string { + if m != nil { + return m.NextPageToken + } + return "" +} + +func init() { + proto.RegisterType((*ServiceAccount)(nil), "google.iam.admin.v1.ServiceAccount") + proto.RegisterType((*CreateServiceAccountRequest)(nil), "google.iam.admin.v1.CreateServiceAccountRequest") + proto.RegisterType((*ListServiceAccountsRequest)(nil), "google.iam.admin.v1.ListServiceAccountsRequest") + proto.RegisterType((*ListServiceAccountsResponse)(nil), "google.iam.admin.v1.ListServiceAccountsResponse") + proto.RegisterType((*GetServiceAccountRequest)(nil), "google.iam.admin.v1.GetServiceAccountRequest") + proto.RegisterType((*DeleteServiceAccountRequest)(nil), "google.iam.admin.v1.DeleteServiceAccountRequest") + proto.RegisterType((*ListServiceAccountKeysRequest)(nil), "google.iam.admin.v1.ListServiceAccountKeysRequest") + proto.RegisterType((*ListServiceAccountKeysResponse)(nil), "google.iam.admin.v1.ListServiceAccountKeysResponse") + proto.RegisterType((*GetServiceAccountKeyRequest)(nil), "google.iam.admin.v1.GetServiceAccountKeyRequest") + proto.RegisterType((*ServiceAccountKey)(nil), "google.iam.admin.v1.ServiceAccountKey") + proto.RegisterType((*CreateServiceAccountKeyRequest)(nil), "google.iam.admin.v1.CreateServiceAccountKeyRequest") + proto.RegisterType((*DeleteServiceAccountKeyRequest)(nil), "google.iam.admin.v1.DeleteServiceAccountKeyRequest") + proto.RegisterType((*SignBlobRequest)(nil), "google.iam.admin.v1.SignBlobRequest") + proto.RegisterType((*SignBlobResponse)(nil), "google.iam.admin.v1.SignBlobResponse") + proto.RegisterType((*SignJwtRequest)(nil), "google.iam.admin.v1.SignJwtRequest") + proto.RegisterType((*SignJwtResponse)(nil), "google.iam.admin.v1.SignJwtResponse") + proto.RegisterType((*Role)(nil), "google.iam.admin.v1.Role") + proto.RegisterType((*QueryGrantableRolesRequest)(nil), "google.iam.admin.v1.QueryGrantableRolesRequest") + proto.RegisterType((*QueryGrantableRolesResponse)(nil), "google.iam.admin.v1.QueryGrantableRolesResponse") + proto.RegisterType((*ListRolesRequest)(nil), "google.iam.admin.v1.ListRolesRequest") + proto.RegisterType((*ListRolesResponse)(nil), "google.iam.admin.v1.ListRolesResponse") + proto.RegisterType((*GetRoleRequest)(nil), "google.iam.admin.v1.GetRoleRequest") + proto.RegisterType((*CreateRoleRequest)(nil), "google.iam.admin.v1.CreateRoleRequest") + proto.RegisterType((*UpdateRoleRequest)(nil), "google.iam.admin.v1.UpdateRoleRequest") + proto.RegisterType((*DeleteRoleRequest)(nil), "google.iam.admin.v1.DeleteRoleRequest") + proto.RegisterType((*UndeleteRoleRequest)(nil), "google.iam.admin.v1.UndeleteRoleRequest") + proto.RegisterType((*Permission)(nil), "google.iam.admin.v1.Permission") + proto.RegisterType((*QueryTestablePermissionsRequest)(nil), "google.iam.admin.v1.QueryTestablePermissionsRequest") + proto.RegisterType((*QueryTestablePermissionsResponse)(nil), "google.iam.admin.v1.QueryTestablePermissionsResponse") + proto.RegisterEnum("google.iam.admin.v1.ServiceAccountKeyAlgorithm", ServiceAccountKeyAlgorithm_name, ServiceAccountKeyAlgorithm_value) + proto.RegisterEnum("google.iam.admin.v1.ServiceAccountPrivateKeyType", ServiceAccountPrivateKeyType_name, ServiceAccountPrivateKeyType_value) + proto.RegisterEnum("google.iam.admin.v1.ServiceAccountPublicKeyType", ServiceAccountPublicKeyType_name, ServiceAccountPublicKeyType_value) + proto.RegisterEnum("google.iam.admin.v1.RoleView", RoleView_name, RoleView_value) + proto.RegisterEnum("google.iam.admin.v1.ListServiceAccountKeysRequest_KeyType", ListServiceAccountKeysRequest_KeyType_name, ListServiceAccountKeysRequest_KeyType_value) + proto.RegisterEnum("google.iam.admin.v1.Role_RoleLaunchStage", Role_RoleLaunchStage_name, Role_RoleLaunchStage_value) + proto.RegisterEnum("google.iam.admin.v1.Permission_PermissionLaunchStage", Permission_PermissionLaunchStage_name, Permission_PermissionLaunchStage_value) + proto.RegisterEnum("google.iam.admin.v1.Permission_CustomRolesSupportLevel", Permission_CustomRolesSupportLevel_name, Permission_CustomRolesSupportLevel_value) +} + +// Reference imports to suppress errors if they are not otherwise used. +var _ context.Context +var _ grpc.ClientConn + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the grpc package it is being compiled against. +const _ = grpc.SupportPackageIsVersion4 + +// IAMClient is the client API for IAM service. +// +// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://godoc.org/google.golang.org/grpc#ClientConn.NewStream. +type IAMClient interface { + // Lists [ServiceAccounts][google.iam.admin.v1.ServiceAccount] for a project. + ListServiceAccounts(ctx context.Context, in *ListServiceAccountsRequest, opts ...grpc.CallOption) (*ListServiceAccountsResponse, error) + // Gets a [ServiceAccount][google.iam.admin.v1.ServiceAccount]. + GetServiceAccount(ctx context.Context, in *GetServiceAccountRequest, opts ...grpc.CallOption) (*ServiceAccount, error) + // Creates a [ServiceAccount][google.iam.admin.v1.ServiceAccount] + // and returns it. + CreateServiceAccount(ctx context.Context, in *CreateServiceAccountRequest, opts ...grpc.CallOption) (*ServiceAccount, error) + // Updates a [ServiceAccount][google.iam.admin.v1.ServiceAccount]. + // + // Currently, only the following fields are updatable: + // `display_name` . + // The `etag` is mandatory. + UpdateServiceAccount(ctx context.Context, in *ServiceAccount, opts ...grpc.CallOption) (*ServiceAccount, error) + // Deletes a [ServiceAccount][google.iam.admin.v1.ServiceAccount]. + DeleteServiceAccount(ctx context.Context, in *DeleteServiceAccountRequest, opts ...grpc.CallOption) (*empty.Empty, error) + // Lists [ServiceAccountKeys][google.iam.admin.v1.ServiceAccountKey]. + ListServiceAccountKeys(ctx context.Context, in *ListServiceAccountKeysRequest, opts ...grpc.CallOption) (*ListServiceAccountKeysResponse, error) + // Gets the [ServiceAccountKey][google.iam.admin.v1.ServiceAccountKey] + // by key id. + GetServiceAccountKey(ctx context.Context, in *GetServiceAccountKeyRequest, opts ...grpc.CallOption) (*ServiceAccountKey, error) + // Creates a [ServiceAccountKey][google.iam.admin.v1.ServiceAccountKey] + // and returns it. + CreateServiceAccountKey(ctx context.Context, in *CreateServiceAccountKeyRequest, opts ...grpc.CallOption) (*ServiceAccountKey, error) + // Deletes a [ServiceAccountKey][google.iam.admin.v1.ServiceAccountKey]. + DeleteServiceAccountKey(ctx context.Context, in *DeleteServiceAccountKeyRequest, opts ...grpc.CallOption) (*empty.Empty, error) + // Signs a blob using a service account's system-managed private key. + SignBlob(ctx context.Context, in *SignBlobRequest, opts ...grpc.CallOption) (*SignBlobResponse, error) + // Signs a JWT using a service account's system-managed private key. + // + // If no expiry time (`exp`) is provided in the `SignJwtRequest`, IAM sets an + // an expiry time of one hour by default. If you request an expiry time of + // more than one hour, the request will fail. + SignJwt(ctx context.Context, in *SignJwtRequest, opts ...grpc.CallOption) (*SignJwtResponse, error) + // Returns the IAM access control policy for a + // [ServiceAccount][google.iam.admin.v1.ServiceAccount]. + GetIamPolicy(ctx context.Context, in *v1.GetIamPolicyRequest, opts ...grpc.CallOption) (*v1.Policy, error) + // Sets the IAM access control policy for a + // [ServiceAccount][google.iam.admin.v1.ServiceAccount]. + SetIamPolicy(ctx context.Context, in *v1.SetIamPolicyRequest, opts ...grpc.CallOption) (*v1.Policy, error) + // Tests the specified permissions against the IAM access control policy + // for a [ServiceAccount][google.iam.admin.v1.ServiceAccount]. + TestIamPermissions(ctx context.Context, in *v1.TestIamPermissionsRequest, opts ...grpc.CallOption) (*v1.TestIamPermissionsResponse, error) + // Queries roles that can be granted on a particular resource. + // A role is grantable if it can be used as the role in a binding for a policy + // for that resource. + QueryGrantableRoles(ctx context.Context, in *QueryGrantableRolesRequest, opts ...grpc.CallOption) (*QueryGrantableRolesResponse, error) + // Lists the Roles defined on a resource. + ListRoles(ctx context.Context, in *ListRolesRequest, opts ...grpc.CallOption) (*ListRolesResponse, error) + // Gets a Role definition. + GetRole(ctx context.Context, in *GetRoleRequest, opts ...grpc.CallOption) (*Role, error) + // Creates a new Role. + CreateRole(ctx context.Context, in *CreateRoleRequest, opts ...grpc.CallOption) (*Role, error) + // Updates a Role definition. + UpdateRole(ctx context.Context, in *UpdateRoleRequest, opts ...grpc.CallOption) (*Role, error) + // Soft deletes a role. The role is suspended and cannot be used to create new + // IAM Policy Bindings. + // The Role will not be included in `ListRoles()` unless `show_deleted` is set + // in the `ListRolesRequest`. The Role contains the deleted boolean set. + // Existing Bindings remains, but are inactive. The Role can be undeleted + // within 7 days. After 7 days the Role is deleted and all Bindings associated + // with the role are removed. + DeleteRole(ctx context.Context, in *DeleteRoleRequest, opts ...grpc.CallOption) (*Role, error) + // Undelete a Role, bringing it back in its previous state. + UndeleteRole(ctx context.Context, in *UndeleteRoleRequest, opts ...grpc.CallOption) (*Role, error) + // Lists the permissions testable on a resource. + // A permission is testable if it can be tested for an identity on a resource. + QueryTestablePermissions(ctx context.Context, in *QueryTestablePermissionsRequest, opts ...grpc.CallOption) (*QueryTestablePermissionsResponse, error) +} + +type iAMClient struct { + cc *grpc.ClientConn +} + +func NewIAMClient(cc *grpc.ClientConn) IAMClient { + return &iAMClient{cc} +} + +func (c *iAMClient) ListServiceAccounts(ctx context.Context, in *ListServiceAccountsRequest, opts ...grpc.CallOption) (*ListServiceAccountsResponse, error) { + out := new(ListServiceAccountsResponse) + err := c.cc.Invoke(ctx, "/google.iam.admin.v1.IAM/ListServiceAccounts", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *iAMClient) GetServiceAccount(ctx context.Context, in *GetServiceAccountRequest, opts ...grpc.CallOption) (*ServiceAccount, error) { + out := new(ServiceAccount) + err := c.cc.Invoke(ctx, "/google.iam.admin.v1.IAM/GetServiceAccount", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *iAMClient) CreateServiceAccount(ctx context.Context, in *CreateServiceAccountRequest, opts ...grpc.CallOption) (*ServiceAccount, error) { + out := new(ServiceAccount) + err := c.cc.Invoke(ctx, "/google.iam.admin.v1.IAM/CreateServiceAccount", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *iAMClient) UpdateServiceAccount(ctx context.Context, in *ServiceAccount, opts ...grpc.CallOption) (*ServiceAccount, error) { + out := new(ServiceAccount) + err := c.cc.Invoke(ctx, "/google.iam.admin.v1.IAM/UpdateServiceAccount", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *iAMClient) DeleteServiceAccount(ctx context.Context, in *DeleteServiceAccountRequest, opts ...grpc.CallOption) (*empty.Empty, error) { + out := new(empty.Empty) + err := c.cc.Invoke(ctx, "/google.iam.admin.v1.IAM/DeleteServiceAccount", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *iAMClient) ListServiceAccountKeys(ctx context.Context, in *ListServiceAccountKeysRequest, opts ...grpc.CallOption) (*ListServiceAccountKeysResponse, error) { + out := new(ListServiceAccountKeysResponse) + err := c.cc.Invoke(ctx, "/google.iam.admin.v1.IAM/ListServiceAccountKeys", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *iAMClient) GetServiceAccountKey(ctx context.Context, in *GetServiceAccountKeyRequest, opts ...grpc.CallOption) (*ServiceAccountKey, error) { + out := new(ServiceAccountKey) + err := c.cc.Invoke(ctx, "/google.iam.admin.v1.IAM/GetServiceAccountKey", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *iAMClient) CreateServiceAccountKey(ctx context.Context, in *CreateServiceAccountKeyRequest, opts ...grpc.CallOption) (*ServiceAccountKey, error) { + out := new(ServiceAccountKey) + err := c.cc.Invoke(ctx, "/google.iam.admin.v1.IAM/CreateServiceAccountKey", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *iAMClient) DeleteServiceAccountKey(ctx context.Context, in *DeleteServiceAccountKeyRequest, opts ...grpc.CallOption) (*empty.Empty, error) { + out := new(empty.Empty) + err := c.cc.Invoke(ctx, "/google.iam.admin.v1.IAM/DeleteServiceAccountKey", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *iAMClient) SignBlob(ctx context.Context, in *SignBlobRequest, opts ...grpc.CallOption) (*SignBlobResponse, error) { + out := new(SignBlobResponse) + err := c.cc.Invoke(ctx, "/google.iam.admin.v1.IAM/SignBlob", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *iAMClient) SignJwt(ctx context.Context, in *SignJwtRequest, opts ...grpc.CallOption) (*SignJwtResponse, error) { + out := new(SignJwtResponse) + err := c.cc.Invoke(ctx, "/google.iam.admin.v1.IAM/SignJwt", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *iAMClient) GetIamPolicy(ctx context.Context, in *v1.GetIamPolicyRequest, opts ...grpc.CallOption) (*v1.Policy, error) { + out := new(v1.Policy) + err := c.cc.Invoke(ctx, "/google.iam.admin.v1.IAM/GetIamPolicy", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *iAMClient) SetIamPolicy(ctx context.Context, in *v1.SetIamPolicyRequest, opts ...grpc.CallOption) (*v1.Policy, error) { + out := new(v1.Policy) + err := c.cc.Invoke(ctx, "/google.iam.admin.v1.IAM/SetIamPolicy", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *iAMClient) TestIamPermissions(ctx context.Context, in *v1.TestIamPermissionsRequest, opts ...grpc.CallOption) (*v1.TestIamPermissionsResponse, error) { + out := new(v1.TestIamPermissionsResponse) + err := c.cc.Invoke(ctx, "/google.iam.admin.v1.IAM/TestIamPermissions", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *iAMClient) QueryGrantableRoles(ctx context.Context, in *QueryGrantableRolesRequest, opts ...grpc.CallOption) (*QueryGrantableRolesResponse, error) { + out := new(QueryGrantableRolesResponse) + err := c.cc.Invoke(ctx, "/google.iam.admin.v1.IAM/QueryGrantableRoles", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *iAMClient) ListRoles(ctx context.Context, in *ListRolesRequest, opts ...grpc.CallOption) (*ListRolesResponse, error) { + out := new(ListRolesResponse) + err := c.cc.Invoke(ctx, "/google.iam.admin.v1.IAM/ListRoles", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *iAMClient) GetRole(ctx context.Context, in *GetRoleRequest, opts ...grpc.CallOption) (*Role, error) { + out := new(Role) + err := c.cc.Invoke(ctx, "/google.iam.admin.v1.IAM/GetRole", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *iAMClient) CreateRole(ctx context.Context, in *CreateRoleRequest, opts ...grpc.CallOption) (*Role, error) { + out := new(Role) + err := c.cc.Invoke(ctx, "/google.iam.admin.v1.IAM/CreateRole", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *iAMClient) UpdateRole(ctx context.Context, in *UpdateRoleRequest, opts ...grpc.CallOption) (*Role, error) { + out := new(Role) + err := c.cc.Invoke(ctx, "/google.iam.admin.v1.IAM/UpdateRole", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *iAMClient) DeleteRole(ctx context.Context, in *DeleteRoleRequest, opts ...grpc.CallOption) (*Role, error) { + out := new(Role) + err := c.cc.Invoke(ctx, "/google.iam.admin.v1.IAM/DeleteRole", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *iAMClient) UndeleteRole(ctx context.Context, in *UndeleteRoleRequest, opts ...grpc.CallOption) (*Role, error) { + out := new(Role) + err := c.cc.Invoke(ctx, "/google.iam.admin.v1.IAM/UndeleteRole", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *iAMClient) QueryTestablePermissions(ctx context.Context, in *QueryTestablePermissionsRequest, opts ...grpc.CallOption) (*QueryTestablePermissionsResponse, error) { + out := new(QueryTestablePermissionsResponse) + err := c.cc.Invoke(ctx, "/google.iam.admin.v1.IAM/QueryTestablePermissions", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +// IAMServer is the server API for IAM service. +type IAMServer interface { + // Lists [ServiceAccounts][google.iam.admin.v1.ServiceAccount] for a project. + ListServiceAccounts(context.Context, *ListServiceAccountsRequest) (*ListServiceAccountsResponse, error) + // Gets a [ServiceAccount][google.iam.admin.v1.ServiceAccount]. + GetServiceAccount(context.Context, *GetServiceAccountRequest) (*ServiceAccount, error) + // Creates a [ServiceAccount][google.iam.admin.v1.ServiceAccount] + // and returns it. + CreateServiceAccount(context.Context, *CreateServiceAccountRequest) (*ServiceAccount, error) + // Updates a [ServiceAccount][google.iam.admin.v1.ServiceAccount]. + // + // Currently, only the following fields are updatable: + // `display_name` . + // The `etag` is mandatory. + UpdateServiceAccount(context.Context, *ServiceAccount) (*ServiceAccount, error) + // Deletes a [ServiceAccount][google.iam.admin.v1.ServiceAccount]. + DeleteServiceAccount(context.Context, *DeleteServiceAccountRequest) (*empty.Empty, error) + // Lists [ServiceAccountKeys][google.iam.admin.v1.ServiceAccountKey]. + ListServiceAccountKeys(context.Context, *ListServiceAccountKeysRequest) (*ListServiceAccountKeysResponse, error) + // Gets the [ServiceAccountKey][google.iam.admin.v1.ServiceAccountKey] + // by key id. + GetServiceAccountKey(context.Context, *GetServiceAccountKeyRequest) (*ServiceAccountKey, error) + // Creates a [ServiceAccountKey][google.iam.admin.v1.ServiceAccountKey] + // and returns it. + CreateServiceAccountKey(context.Context, *CreateServiceAccountKeyRequest) (*ServiceAccountKey, error) + // Deletes a [ServiceAccountKey][google.iam.admin.v1.ServiceAccountKey]. + DeleteServiceAccountKey(context.Context, *DeleteServiceAccountKeyRequest) (*empty.Empty, error) + // Signs a blob using a service account's system-managed private key. + SignBlob(context.Context, *SignBlobRequest) (*SignBlobResponse, error) + // Signs a JWT using a service account's system-managed private key. + // + // If no expiry time (`exp`) is provided in the `SignJwtRequest`, IAM sets an + // an expiry time of one hour by default. If you request an expiry time of + // more than one hour, the request will fail. + SignJwt(context.Context, *SignJwtRequest) (*SignJwtResponse, error) + // Returns the IAM access control policy for a + // [ServiceAccount][google.iam.admin.v1.ServiceAccount]. + GetIamPolicy(context.Context, *v1.GetIamPolicyRequest) (*v1.Policy, error) + // Sets the IAM access control policy for a + // [ServiceAccount][google.iam.admin.v1.ServiceAccount]. + SetIamPolicy(context.Context, *v1.SetIamPolicyRequest) (*v1.Policy, error) + // Tests the specified permissions against the IAM access control policy + // for a [ServiceAccount][google.iam.admin.v1.ServiceAccount]. + TestIamPermissions(context.Context, *v1.TestIamPermissionsRequest) (*v1.TestIamPermissionsResponse, error) + // Queries roles that can be granted on a particular resource. + // A role is grantable if it can be used as the role in a binding for a policy + // for that resource. + QueryGrantableRoles(context.Context, *QueryGrantableRolesRequest) (*QueryGrantableRolesResponse, error) + // Lists the Roles defined on a resource. + ListRoles(context.Context, *ListRolesRequest) (*ListRolesResponse, error) + // Gets a Role definition. + GetRole(context.Context, *GetRoleRequest) (*Role, error) + // Creates a new Role. + CreateRole(context.Context, *CreateRoleRequest) (*Role, error) + // Updates a Role definition. + UpdateRole(context.Context, *UpdateRoleRequest) (*Role, error) + // Soft deletes a role. The role is suspended and cannot be used to create new + // IAM Policy Bindings. + // The Role will not be included in `ListRoles()` unless `show_deleted` is set + // in the `ListRolesRequest`. The Role contains the deleted boolean set. + // Existing Bindings remains, but are inactive. The Role can be undeleted + // within 7 days. After 7 days the Role is deleted and all Bindings associated + // with the role are removed. + DeleteRole(context.Context, *DeleteRoleRequest) (*Role, error) + // Undelete a Role, bringing it back in its previous state. + UndeleteRole(context.Context, *UndeleteRoleRequest) (*Role, error) + // Lists the permissions testable on a resource. + // A permission is testable if it can be tested for an identity on a resource. + QueryTestablePermissions(context.Context, *QueryTestablePermissionsRequest) (*QueryTestablePermissionsResponse, error) +} + +func RegisterIAMServer(s *grpc.Server, srv IAMServer) { + s.RegisterService(&_IAM_serviceDesc, srv) +} + +func _IAM_ListServiceAccounts_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(ListServiceAccountsRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(IAMServer).ListServiceAccounts(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.iam.admin.v1.IAM/ListServiceAccounts", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(IAMServer).ListServiceAccounts(ctx, req.(*ListServiceAccountsRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _IAM_GetServiceAccount_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(GetServiceAccountRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(IAMServer).GetServiceAccount(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.iam.admin.v1.IAM/GetServiceAccount", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(IAMServer).GetServiceAccount(ctx, req.(*GetServiceAccountRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _IAM_CreateServiceAccount_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(CreateServiceAccountRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(IAMServer).CreateServiceAccount(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.iam.admin.v1.IAM/CreateServiceAccount", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(IAMServer).CreateServiceAccount(ctx, req.(*CreateServiceAccountRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _IAM_UpdateServiceAccount_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(ServiceAccount) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(IAMServer).UpdateServiceAccount(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.iam.admin.v1.IAM/UpdateServiceAccount", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(IAMServer).UpdateServiceAccount(ctx, req.(*ServiceAccount)) + } + return interceptor(ctx, in, info, handler) +} + +func _IAM_DeleteServiceAccount_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(DeleteServiceAccountRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(IAMServer).DeleteServiceAccount(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.iam.admin.v1.IAM/DeleteServiceAccount", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(IAMServer).DeleteServiceAccount(ctx, req.(*DeleteServiceAccountRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _IAM_ListServiceAccountKeys_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(ListServiceAccountKeysRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(IAMServer).ListServiceAccountKeys(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.iam.admin.v1.IAM/ListServiceAccountKeys", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(IAMServer).ListServiceAccountKeys(ctx, req.(*ListServiceAccountKeysRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _IAM_GetServiceAccountKey_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(GetServiceAccountKeyRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(IAMServer).GetServiceAccountKey(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.iam.admin.v1.IAM/GetServiceAccountKey", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(IAMServer).GetServiceAccountKey(ctx, req.(*GetServiceAccountKeyRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _IAM_CreateServiceAccountKey_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(CreateServiceAccountKeyRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(IAMServer).CreateServiceAccountKey(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.iam.admin.v1.IAM/CreateServiceAccountKey", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(IAMServer).CreateServiceAccountKey(ctx, req.(*CreateServiceAccountKeyRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _IAM_DeleteServiceAccountKey_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(DeleteServiceAccountKeyRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(IAMServer).DeleteServiceAccountKey(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.iam.admin.v1.IAM/DeleteServiceAccountKey", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(IAMServer).DeleteServiceAccountKey(ctx, req.(*DeleteServiceAccountKeyRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _IAM_SignBlob_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(SignBlobRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(IAMServer).SignBlob(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.iam.admin.v1.IAM/SignBlob", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(IAMServer).SignBlob(ctx, req.(*SignBlobRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _IAM_SignJwt_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(SignJwtRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(IAMServer).SignJwt(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.iam.admin.v1.IAM/SignJwt", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(IAMServer).SignJwt(ctx, req.(*SignJwtRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _IAM_GetIamPolicy_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(v1.GetIamPolicyRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(IAMServer).GetIamPolicy(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.iam.admin.v1.IAM/GetIamPolicy", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(IAMServer).GetIamPolicy(ctx, req.(*v1.GetIamPolicyRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _IAM_SetIamPolicy_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(v1.SetIamPolicyRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(IAMServer).SetIamPolicy(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.iam.admin.v1.IAM/SetIamPolicy", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(IAMServer).SetIamPolicy(ctx, req.(*v1.SetIamPolicyRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _IAM_TestIamPermissions_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(v1.TestIamPermissionsRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(IAMServer).TestIamPermissions(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.iam.admin.v1.IAM/TestIamPermissions", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(IAMServer).TestIamPermissions(ctx, req.(*v1.TestIamPermissionsRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _IAM_QueryGrantableRoles_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(QueryGrantableRolesRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(IAMServer).QueryGrantableRoles(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.iam.admin.v1.IAM/QueryGrantableRoles", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(IAMServer).QueryGrantableRoles(ctx, req.(*QueryGrantableRolesRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _IAM_ListRoles_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(ListRolesRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(IAMServer).ListRoles(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.iam.admin.v1.IAM/ListRoles", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(IAMServer).ListRoles(ctx, req.(*ListRolesRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _IAM_GetRole_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(GetRoleRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(IAMServer).GetRole(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.iam.admin.v1.IAM/GetRole", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(IAMServer).GetRole(ctx, req.(*GetRoleRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _IAM_CreateRole_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(CreateRoleRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(IAMServer).CreateRole(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.iam.admin.v1.IAM/CreateRole", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(IAMServer).CreateRole(ctx, req.(*CreateRoleRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _IAM_UpdateRole_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(UpdateRoleRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(IAMServer).UpdateRole(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.iam.admin.v1.IAM/UpdateRole", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(IAMServer).UpdateRole(ctx, req.(*UpdateRoleRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _IAM_DeleteRole_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(DeleteRoleRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(IAMServer).DeleteRole(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.iam.admin.v1.IAM/DeleteRole", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(IAMServer).DeleteRole(ctx, req.(*DeleteRoleRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _IAM_UndeleteRole_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(UndeleteRoleRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(IAMServer).UndeleteRole(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.iam.admin.v1.IAM/UndeleteRole", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(IAMServer).UndeleteRole(ctx, req.(*UndeleteRoleRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _IAM_QueryTestablePermissions_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(QueryTestablePermissionsRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(IAMServer).QueryTestablePermissions(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.iam.admin.v1.IAM/QueryTestablePermissions", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(IAMServer).QueryTestablePermissions(ctx, req.(*QueryTestablePermissionsRequest)) + } + return interceptor(ctx, in, info, handler) +} + +var _IAM_serviceDesc = grpc.ServiceDesc{ + ServiceName: "google.iam.admin.v1.IAM", + HandlerType: (*IAMServer)(nil), + Methods: []grpc.MethodDesc{ + { + MethodName: "ListServiceAccounts", + Handler: _IAM_ListServiceAccounts_Handler, + }, + { + MethodName: "GetServiceAccount", + Handler: _IAM_GetServiceAccount_Handler, + }, + { + MethodName: "CreateServiceAccount", + Handler: _IAM_CreateServiceAccount_Handler, + }, + { + MethodName: "UpdateServiceAccount", + Handler: _IAM_UpdateServiceAccount_Handler, + }, + { + MethodName: "DeleteServiceAccount", + Handler: _IAM_DeleteServiceAccount_Handler, + }, + { + MethodName: "ListServiceAccountKeys", + Handler: _IAM_ListServiceAccountKeys_Handler, + }, + { + MethodName: "GetServiceAccountKey", + Handler: _IAM_GetServiceAccountKey_Handler, + }, + { + MethodName: "CreateServiceAccountKey", + Handler: _IAM_CreateServiceAccountKey_Handler, + }, + { + MethodName: "DeleteServiceAccountKey", + Handler: _IAM_DeleteServiceAccountKey_Handler, + }, + { + MethodName: "SignBlob", + Handler: _IAM_SignBlob_Handler, + }, + { + MethodName: "SignJwt", + Handler: _IAM_SignJwt_Handler, + }, + { + MethodName: "GetIamPolicy", + Handler: _IAM_GetIamPolicy_Handler, + }, + { + MethodName: "SetIamPolicy", + Handler: _IAM_SetIamPolicy_Handler, + }, + { + MethodName: "TestIamPermissions", + Handler: _IAM_TestIamPermissions_Handler, + }, + { + MethodName: "QueryGrantableRoles", + Handler: _IAM_QueryGrantableRoles_Handler, + }, + { + MethodName: "ListRoles", + Handler: _IAM_ListRoles_Handler, + }, + { + MethodName: "GetRole", + Handler: _IAM_GetRole_Handler, + }, + { + MethodName: "CreateRole", + Handler: _IAM_CreateRole_Handler, + }, + { + MethodName: "UpdateRole", + Handler: _IAM_UpdateRole_Handler, + }, + { + MethodName: "DeleteRole", + Handler: _IAM_DeleteRole_Handler, + }, + { + MethodName: "UndeleteRole", + Handler: _IAM_UndeleteRole_Handler, + }, + { + MethodName: "QueryTestablePermissions", + Handler: _IAM_QueryTestablePermissions_Handler, + }, + }, + Streams: []grpc.StreamDesc{}, + Metadata: "google/iam/admin/v1/iam.proto", +} + +func init() { proto.RegisterFile("google/iam/admin/v1/iam.proto", fileDescriptor_iam_153c5ceb8e95593c) } + +var fileDescriptor_iam_153c5ceb8e95593c = []byte{ + // 2430 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xbc, 0x5a, 0x4f, 0x73, 0xdb, 0xc6, + 0x15, 0x37, 0x28, 0xea, 0x0f, 0x9f, 0x24, 0x0a, 0x5a, 0xc9, 0x16, 0x4b, 0x59, 0xb6, 0xb2, 0xb5, + 0x1d, 0x99, 0xb5, 0x45, 0x89, 0x96, 0x6b, 0x57, 0x1e, 0x27, 0xa5, 0x44, 0x9a, 0xa1, 0x45, 0xcb, + 0x2c, 0x48, 0x35, 0x71, 0xff, 0x0c, 0x06, 0x22, 0x56, 0x34, 0x22, 0x10, 0x80, 0x01, 0x50, 0x2a, + 0x9d, 0x49, 0x67, 0xda, 0x43, 0x2f, 0x99, 0x76, 0xda, 0x49, 0x0e, 0x39, 0xa5, 0x33, 0xbd, 0xb4, + 0xb7, 0x5e, 0x3a, 0xd3, 0x69, 0x27, 0xdf, 0xa0, 0xc7, 0x1e, 0xfa, 0x05, 0x32, 0xd3, 0xaf, 0xd0, + 0x63, 0x67, 0x17, 0x80, 0x08, 0x92, 0x00, 0x04, 0x39, 0x69, 0x2f, 0x1a, 0xec, 0xfb, 0xfb, 0xdb, + 0xb7, 0xbb, 0x6f, 0xdf, 0x3e, 0x0a, 0x56, 0xda, 0xba, 0xde, 0x56, 0x49, 0x5e, 0x91, 0x3a, 0x79, + 0x49, 0xee, 0x28, 0x5a, 0xfe, 0x64, 0x93, 0x0e, 0xd6, 0x0d, 0x53, 0xb7, 0x75, 0xb4, 0xe0, 0xb0, + 0xd7, 0x29, 0x85, 0xb1, 0xd7, 0x4f, 0x36, 0xb3, 0x57, 0x5d, 0x1d, 0xc9, 0x50, 0xf2, 0x92, 0xa6, + 0xe9, 0xb6, 0x64, 0x2b, 0xba, 0x66, 0x39, 0x2a, 0xd9, 0x6b, 0x3e, 0x8b, 0x8e, 0x2d, 0xd1, 0xd0, + 0x55, 0xa5, 0xd5, 0x73, 0xf9, 0xd9, 0x41, 0xfe, 0x00, 0x6f, 0xd9, 0xe5, 0xb1, 0xd1, 0x61, 0xf7, + 0x28, 0x4f, 0x3a, 0x86, 0xed, 0x31, 0x57, 0x87, 0x99, 0x47, 0x0a, 0x51, 0x65, 0xb1, 0x23, 0x59, + 0xc7, 0xae, 0xc4, 0xf5, 0x61, 0x09, 0x5b, 0xe9, 0x10, 0xcb, 0x96, 0x3a, 0x86, 0x23, 0x80, 0xff, + 0xc5, 0x41, 0xba, 0x41, 0xcc, 0x13, 0xa5, 0x45, 0x8a, 0xad, 0x96, 0xde, 0xd5, 0x6c, 0x84, 0x20, + 0xa9, 0x49, 0x1d, 0x92, 0xe1, 0x56, 0xb9, 0xb5, 0x94, 0xc0, 0xbe, 0xd1, 0x0a, 0x80, 0x61, 0xea, + 0x1f, 0x92, 0x96, 0x2d, 0x2a, 0x72, 0x26, 0xc1, 0x38, 0x29, 0x97, 0x52, 0x95, 0xd1, 0x32, 0xa4, + 0xba, 0x9a, 0xf2, 0xaa, 0x4b, 0x28, 0x37, 0xc9, 0xb8, 0x53, 0x0e, 0xa1, 0x2a, 0xa3, 0x45, 0x18, + 0x27, 0x1d, 0x49, 0x51, 0x33, 0xe3, 0x8c, 0xe1, 0x0c, 0xd0, 0x5b, 0x30, 0x23, 0x2b, 0x96, 0xa1, + 0x4a, 0x3d, 0x91, 0x79, 0x9b, 0x60, 0xcc, 0x69, 0x97, 0xb6, 0x4f, 0x9d, 0x22, 0x48, 0x12, 0x5b, + 0x6a, 0x67, 0x26, 0x57, 0xb9, 0xb5, 0x19, 0x81, 0x7d, 0xa3, 0x35, 0xe0, 0x75, 0xa9, 0x6b, 0xbf, + 0x2c, 0x88, 0x2d, 0x55, 0x21, 0x1a, 0x83, 0x93, 0x62, 0xaa, 0x69, 0x87, 0xbe, 0xcb, 0xc8, 0x55, + 0x19, 0x7f, 0xc1, 0xc1, 0xf2, 0xae, 0x49, 0x24, 0x9b, 0x0c, 0xce, 0x4f, 0x20, 0xaf, 0xba, 0xc4, + 0x0a, 0x9d, 0xa6, 0xe4, 0x48, 0xf9, 0xa6, 0xe9, 0x52, 0xaa, 0x32, 0xaa, 0xc1, 0x9c, 0xe5, 0xd8, + 0x12, 0x5d, 0x62, 0x66, 0x6c, 0x95, 0x5b, 0x9b, 0x2e, 0x7c, 0x7b, 0x3d, 0x60, 0x57, 0xac, 0x0f, + 0xf9, 0x4d, 0x5b, 0x03, 0x63, 0xac, 0x42, 0xb6, 0xa6, 0x58, 0xf6, 0xa0, 0x94, 0x15, 0x05, 0x6f, + 0x19, 0x52, 0x86, 0xd4, 0x26, 0xa2, 0xa5, 0xbc, 0x26, 0x0c, 0xdd, 0xb8, 0x30, 0x45, 0x09, 0x0d, + 0xe5, 0xb5, 0xb3, 0x44, 0x94, 0x69, 0xeb, 0xc7, 0x44, 0x63, 0xb8, 0xe8, 0x12, 0x49, 0x6d, 0xd2, + 0xa4, 0x04, 0xfc, 0x2b, 0x0e, 0x96, 0x03, 0xdd, 0x59, 0x86, 0xae, 0x59, 0x04, 0xbd, 0x0b, 0x53, + 0xee, 0x9c, 0xac, 0x0c, 0xb7, 0x3a, 0x16, 0x77, 0x52, 0x67, 0x4a, 0xe8, 0x16, 0xcc, 0x69, 0xe4, + 0x67, 0xb6, 0xe8, 0x03, 0xe1, 0x04, 0x70, 0x96, 0x92, 0xeb, 0x67, 0x40, 0xd6, 0x21, 0x53, 0x21, + 0x76, 0xec, 0x35, 0xc1, 0x9b, 0xb0, 0x5c, 0x22, 0x2a, 0xb9, 0xc0, 0x32, 0xd2, 0x4d, 0xbd, 0x32, + 0x3a, 0xd7, 0x3d, 0xd2, 0x8b, 0x8c, 0xee, 0xfb, 0x90, 0x3a, 0x26, 0x3d, 0xd1, 0xee, 0x19, 0xc4, + 0xca, 0x24, 0x56, 0xc7, 0xd6, 0xd2, 0x85, 0xed, 0xc0, 0x10, 0x44, 0x9a, 0x5e, 0xdf, 0x23, 0xbd, + 0x66, 0xcf, 0x20, 0xc2, 0xd4, 0xb1, 0xf3, 0x61, 0xe1, 0x2a, 0x4c, 0xba, 0x44, 0x94, 0x81, 0xc5, + 0xbd, 0xf2, 0x0b, 0xb1, 0xf9, 0xa2, 0x5e, 0x16, 0x0f, 0xf6, 0x1b, 0xf5, 0xf2, 0x6e, 0xf5, 0x49, + 0xb5, 0x5c, 0xe2, 0x2f, 0x21, 0x1e, 0x66, 0x0e, 0x1a, 0x65, 0x41, 0x7c, 0x56, 0xdc, 0x2f, 0x56, + 0xca, 0x25, 0x9e, 0x43, 0x08, 0xd2, 0x8d, 0x17, 0x8d, 0x66, 0xf9, 0xd9, 0x19, 0x2d, 0x81, 0x7f, + 0x02, 0xd7, 0xc2, 0xbc, 0xbb, 0xeb, 0xb8, 0x0d, 0xc9, 0x63, 0xd2, 0xf3, 0xd6, 0xf0, 0x56, 0x8c, + 0x35, 0xdc, 0x23, 0x3d, 0x81, 0xe9, 0xe0, 0x4f, 0x38, 0x58, 0x1e, 0x59, 0x1b, 0xca, 0x8e, 0x88, + 0xda, 0x07, 0x30, 0x67, 0x74, 0x0f, 0x55, 0xa5, 0x25, 0x7a, 0xc1, 0x63, 0xcb, 0x9e, 0x2e, 0x6c, + 0xc4, 0x70, 0x5d, 0x67, 0x9a, 0x5e, 0xc4, 0x66, 0x0d, 0xff, 0x10, 0xff, 0x7d, 0x0c, 0xe6, 0x47, + 0xa0, 0x04, 0x62, 0xf8, 0x31, 0xf0, 0x86, 0xa9, 0x9c, 0x48, 0x36, 0x19, 0x06, 0xb1, 0x19, 0x07, + 0x84, 0xa3, 0xea, 0xa1, 0x48, 0x1b, 0x03, 0x63, 0xd4, 0x84, 0x59, 0x6a, 0x54, 0x52, 0xdb, 0xba, + 0xa9, 0xd8, 0x2f, 0x3b, 0x99, 0x29, 0x66, 0x39, 0x1f, 0x2f, 0xb2, 0x45, 0x4f, 0x4d, 0x98, 0x39, + 0xf6, 0x8d, 0x68, 0x1e, 0xf3, 0x43, 0x96, 0x25, 0x5b, 0x62, 0x67, 0x76, 0xc6, 0xef, 0xbf, 0x24, + 0xd9, 0x12, 0x3d, 0x57, 0xbe, 0x00, 0x33, 0x41, 0x27, 0x21, 0xf6, 0xc3, 0xc5, 0xe4, 0x4a, 0xc0, + 0x9f, 0x48, 0xaa, 0x22, 0x8b, 0xd2, 0x91, 0x4d, 0x4c, 0x91, 0x26, 0x7a, 0x96, 0x8a, 0xa7, 0x0b, + 0x59, 0x0f, 0xaa, 0x77, 0x0b, 0xac, 0x37, 0xbd, 0x5b, 0x40, 0x48, 0x33, 0x9d, 0x22, 0x55, 0xa1, + 0x44, 0xf4, 0x04, 0xe6, 0x1d, 0x2b, 0x87, 0xe4, 0x48, 0x37, 0x89, 0x63, 0x66, 0xfc, 0x5c, 0x33, + 0x73, 0x4c, 0x69, 0x87, 0xe9, 0x50, 0x2a, 0xfe, 0x37, 0x07, 0xd7, 0x82, 0xb2, 0xef, 0x39, 0xbb, + 0xe9, 0xff, 0xbb, 0x92, 0x63, 0xdf, 0xc0, 0x4a, 0xe2, 0x2d, 0xb8, 0x16, 0x94, 0x9f, 0xa2, 0x27, + 0x8a, 0xab, 0x30, 0xd7, 0x50, 0xda, 0xda, 0x8e, 0xaa, 0x1f, 0x46, 0xc5, 0x03, 0xc3, 0xec, 0x61, + 0xcf, 0x26, 0x96, 0x68, 0xeb, 0xa2, 0xa5, 0xb4, 0x9d, 0x94, 0x3a, 0x23, 0x4c, 0x33, 0x62, 0x53, + 0xa7, 0x26, 0x70, 0x05, 0xf8, 0xbe, 0x29, 0x37, 0x0b, 0x5c, 0x86, 0x09, 0x3a, 0x55, 0x45, 0x76, + 0xad, 0x8d, 0x1f, 0x93, 0x5e, 0x55, 0x46, 0x57, 0x21, 0x45, 0xad, 0x48, 0x76, 0xd7, 0x24, 0xae, + 0xa9, 0x3e, 0x01, 0xbf, 0x03, 0x69, 0x6a, 0xe8, 0xe9, 0x69, 0xe4, 0x1d, 0x99, 0x81, 0x49, 0x43, + 0xea, 0xa9, 0xba, 0xe4, 0x5d, 0x90, 0xde, 0x10, 0x57, 0x9c, 0x39, 0x31, 0xfd, 0x68, 0x1c, 0x2b, + 0x00, 0xd4, 0x2d, 0x91, 0xc5, 0x0f, 0x4f, 0x6d, 0xef, 0x9e, 0x75, 0x28, 0x4f, 0x4f, 0x6d, 0xfc, + 0x8f, 0x04, 0x24, 0x05, 0x5d, 0x25, 0x81, 0xfe, 0x17, 0x61, 0xdc, 0x56, 0x6c, 0x95, 0xb8, 0x6a, + 0xce, 0x00, 0xad, 0xc2, 0xb4, 0x4c, 0xac, 0x96, 0xa9, 0x18, 0xb4, 0xf2, 0x72, 0xaf, 0x3f, 0x3f, + 0x09, 0x6d, 0xc2, 0xa2, 0xa2, 0xb5, 0xd4, 0xae, 0x4c, 0x64, 0xd1, 0x20, 0x66, 0x47, 0xb1, 0x2c, + 0x5a, 0xa3, 0x65, 0x26, 0x57, 0xc7, 0xd6, 0x52, 0xc2, 0x82, 0xc7, 0xab, 0xf7, 0x59, 0xe8, 0x5d, + 0x18, 0xb7, 0x6c, 0xa9, 0x4d, 0xdc, 0x23, 0x7f, 0x3b, 0x70, 0xa3, 0x50, 0xa0, 0xec, 0x4f, 0x4d, + 0xea, 0x6a, 0xad, 0x97, 0x0d, 0xaa, 0x20, 0x38, 0x7a, 0x67, 0x15, 0x4c, 0xca, 0x57, 0xc1, 0x64, + 0x60, 0x52, 0x66, 0xfb, 0x45, 0xce, 0x4c, 0xaf, 0x72, 0x6b, 0x53, 0x82, 0x37, 0xc4, 0x07, 0x30, + 0x37, 0x64, 0x07, 0xa5, 0x60, 0xbc, 0x58, 0xab, 0xbf, 0x57, 0xe4, 0x2f, 0xa1, 0x29, 0x48, 0xee, + 0x94, 0x9b, 0x45, 0x9e, 0x43, 0x13, 0x90, 0xa8, 0x14, 0xf9, 0x04, 0x4a, 0x03, 0x94, 0xca, 0x75, + 0xa1, 0xbc, 0x5b, 0x6c, 0x96, 0x4b, 0x7c, 0x12, 0xcd, 0xc0, 0x54, 0xa9, 0xda, 0x28, 0xee, 0xd4, + 0xca, 0x25, 0x7e, 0x1c, 0x4d, 0xc2, 0x58, 0xb9, 0x58, 0xe7, 0x27, 0xf0, 0xdf, 0x38, 0xc8, 0xfe, + 0xa0, 0x4b, 0xcc, 0x5e, 0xc5, 0x94, 0x34, 0x5b, 0x3a, 0x54, 0x09, 0xf5, 0x72, 0x76, 0x15, 0xde, + 0x01, 0x74, 0xd4, 0x55, 0x55, 0xd1, 0x24, 0x96, 0xde, 0x35, 0x5b, 0x44, 0xf4, 0x45, 0x9c, 0xa7, + 0x1c, 0xc1, 0x65, 0xb0, 0x9a, 0x6c, 0x13, 0x92, 0x27, 0x0a, 0x39, 0x75, 0x0f, 0xe5, 0x4a, 0x68, + 0x44, 0x7e, 0xa8, 0x90, 0x53, 0x81, 0x89, 0x0e, 0x56, 0x2d, 0x63, 0x91, 0x55, 0x4b, 0x72, 0xb8, + 0x6a, 0x39, 0x81, 0xe5, 0x40, 0xe8, 0xee, 0xf6, 0xca, 0xc3, 0xb8, 0x49, 0x09, 0xee, 0x6d, 0xf7, + 0xad, 0x50, 0x38, 0x82, 0x23, 0x17, 0xbb, 0x48, 0xf9, 0x92, 0x03, 0x9e, 0x5e, 0xb4, 0x03, 0x91, + 0xba, 0x02, 0x13, 0x86, 0x64, 0x12, 0xcd, 0x76, 0xa3, 0xe3, 0x8e, 0xbe, 0x4e, 0x59, 0x76, 0x16, + 0xcf, 0x64, 0xfc, 0x78, 0xbe, 0x05, 0x33, 0xd6, 0x4b, 0xfd, 0x54, 0xf4, 0x76, 0xd1, 0x04, 0xdb, + 0x45, 0xd3, 0x94, 0x56, 0x72, 0x77, 0x92, 0x0a, 0xf3, 0x3e, 0xf4, 0xff, 0xeb, 0x60, 0xdd, 0x80, + 0x74, 0x85, 0x30, 0x67, 0x51, 0x19, 0xcf, 0x82, 0x79, 0xe7, 0x42, 0xf0, 0x0b, 0x86, 0x85, 0x74, + 0x09, 0x26, 0x29, 0x86, 0x7e, 0x15, 0x3e, 0x41, 0x87, 0x55, 0x19, 0xdd, 0x85, 0x24, 0xfd, 0x72, + 0xeb, 0xee, 0x88, 0x39, 0x30, 0x31, 0xfc, 0x29, 0x07, 0xf3, 0x07, 0x86, 0x3c, 0xe4, 0x35, 0x28, + 0xad, 0x78, 0x86, 0x13, 0xb1, 0x0c, 0xa3, 0x47, 0x30, 0xdd, 0x65, 0x76, 0xd9, 0x6b, 0xcb, 0x85, + 0x33, 0x7a, 0x43, 0x3e, 0xa1, 0x0f, 0xb2, 0x67, 0x92, 0x75, 0x2c, 0x80, 0x23, 0x4e, 0xbf, 0xf1, + 0x23, 0x98, 0x77, 0x56, 0xea, 0x3c, 0x50, 0x5e, 0xfe, 0x48, 0xf4, 0xf3, 0x07, 0x7e, 0x0c, 0x0b, + 0x07, 0x9a, 0xfc, 0xc6, 0xea, 0x5f, 0x8d, 0x01, 0xf4, 0x73, 0xdc, 0x37, 0x9a, 0x61, 0x1f, 0x40, + 0x46, 0xd7, 0xd4, 0x9e, 0xa8, 0x68, 0xa2, 0x61, 0x12, 0x99, 0x1c, 0x29, 0x34, 0xc3, 0x3b, 0x7b, + 0x2e, 0xc9, 0x36, 0xe9, 0x65, 0xca, 0xaf, 0x6a, 0xf5, 0x33, 0x2e, 0xdb, 0xa1, 0x68, 0xcf, 0xcb, + 0xb3, 0xe3, 0xec, 0x14, 0xdc, 0x0f, 0x0c, 0x7e, 0x1f, 0xb4, 0xef, 0x33, 0x20, 0xe7, 0xda, 0x90, + 0x6d, 0x75, 0x2d, 0x5b, 0xef, 0x38, 0x9e, 0x45, 0xab, 0x6b, 0x18, 0xba, 0x69, 0x8b, 0x2a, 0x39, + 0x21, 0x2a, 0x3b, 0x2c, 0xe9, 0xc2, 0x83, 0xf3, 0x3c, 0xec, 0x32, 0x0b, 0x0c, 0x5d, 0xc3, 0xd1, + 0xaf, 0x51, 0x75, 0x61, 0xa9, 0x15, 0xcc, 0xc0, 0x25, 0xb8, 0x1c, 0x88, 0xea, 0x22, 0x19, 0x7c, + 0x0c, 0xbf, 0x07, 0x4b, 0x21, 0x9e, 0xd1, 0x2c, 0xa4, 0x1a, 0x07, 0xf5, 0xfa, 0x73, 0xa1, 0xc9, + 0x9e, 0x0b, 0xd3, 0x30, 0xd9, 0x2c, 0x37, 0x9a, 0xd5, 0xfd, 0x0a, 0xcf, 0xa1, 0x79, 0x98, 0xdd, + 0x7f, 0xde, 0x14, 0xfb, 0xfc, 0x04, 0x2d, 0xe5, 0xaf, 0xb3, 0xcc, 0xd9, 0xa4, 0x15, 0xda, 0xa1, + 0x4a, 0x7c, 0xf7, 0xda, 0x9b, 0x65, 0xfe, 0xaf, 0xf3, 0xf8, 0xfc, 0x35, 0x07, 0xab, 0xe1, 0x68, + 0xdc, 0xfc, 0x54, 0x84, 0x69, 0xff, 0xbd, 0xec, 0x64, 0xa9, 0xeb, 0xe7, 0xac, 0x94, 0xe0, 0xd7, + 0x89, 0x9b, 0xb1, 0x72, 0x12, 0x64, 0xc3, 0xeb, 0x3b, 0xb4, 0x04, 0x0b, 0xf4, 0x91, 0x56, 0xac, + 0x55, 0x86, 0xde, 0x68, 0x8b, 0xc0, 0x7b, 0x0c, 0xa1, 0x51, 0x14, 0x37, 0x37, 0x0a, 0x5b, 0x3c, + 0x37, 0x4c, 0x2d, 0x6c, 0x6c, 0x3d, 0xe4, 0x13, 0x39, 0x15, 0xae, 0x46, 0x15, 0xa7, 0x54, 0x2b, + 0xe0, 0x15, 0xe8, 0x51, 0xeb, 0x7b, 0xbb, 0x8d, 0xcd, 0x82, 0xf8, 0xa4, 0x5a, 0x2b, 0xf3, 0x1c, + 0x5a, 0x85, 0xab, 0x8c, 0x5a, 0x79, 0xfe, 0xbc, 0x52, 0x2b, 0x8b, 0xbb, 0x42, 0xb9, 0x54, 0xde, + 0x6f, 0x56, 0x8b, 0xb5, 0x86, 0x23, 0x91, 0xc8, 0xfd, 0x14, 0x96, 0x23, 0x5e, 0x56, 0x74, 0xf3, + 0x30, 0x03, 0xfb, 0xcf, 0xf7, 0xcb, 0xfc, 0x25, 0x74, 0x05, 0x10, 0x1b, 0x7e, 0x70, 0x7f, 0xe3, + 0x7b, 0x62, 0xbd, 0xfc, 0xcc, 0xf3, 0xb3, 0x04, 0x0b, 0x8c, 0x2e, 0x14, 0xdf, 0x17, 0xeb, 0x07, + 0x3b, 0xb5, 0xea, 0xae, 0xb8, 0x57, 0x7e, 0xc1, 0x27, 0x72, 0xd7, 0x61, 0xca, 0xbb, 0x84, 0xe8, + 0x86, 0xde, 0x29, 0x36, 0xaa, 0xbb, 0xce, 0x86, 0x7e, 0x72, 0x50, 0xab, 0xf1, 0x5c, 0xe1, 0xf3, + 0x2c, 0x8c, 0x55, 0x8b, 0xcf, 0xd0, 0x1f, 0x39, 0x58, 0x08, 0xe8, 0x32, 0xa0, 0x7c, 0xcc, 0x87, + 0xb4, 0xb7, 0x37, 0xb3, 0x1b, 0xf1, 0x15, 0x9c, 0xed, 0x83, 0xef, 0xfe, 0xf2, 0x9f, 0x5f, 0x7d, + 0x9a, 0x78, 0x1b, 0xdd, 0xcc, 0x9f, 0x6c, 0xe6, 0x3f, 0xa2, 0x5b, 0xf9, 0xb1, 0xdb, 0xa0, 0xb2, + 0xf2, 0xb9, 0x8f, 0xf3, 0xd6, 0x10, 0xa2, 0xcf, 0x39, 0x98, 0x1f, 0x79, 0xeb, 0xa2, 0xbb, 0x81, + 0x6e, 0xc3, 0xfa, 0x15, 0xd9, 0x38, 0x2d, 0x12, 0x9c, 0x67, 0xc0, 0x6e, 0xa3, 0xb7, 0x83, 0x80, + 0x0d, 0xe3, 0xca, 0xe7, 0x3e, 0x46, 0xbf, 0xe7, 0x60, 0x31, 0xe8, 0xed, 0x84, 0x82, 0x83, 0x12, + 0xd1, 0xe4, 0x8a, 0x07, 0x70, 0x83, 0x01, 0xcc, 0xe1, 0x78, 0x91, 0xdb, 0xe6, 0x72, 0xe8, 0x33, + 0x0e, 0x16, 0x9d, 0x6b, 0x75, 0x08, 0x61, 0x1c, 0x7f, 0xf1, 0x40, 0x15, 0x18, 0xa8, 0x3b, 0xd9, + 0xb8, 0x51, 0xa3, 0xb0, 0x7e, 0xc7, 0xc1, 0x62, 0xd0, 0x5b, 0x2c, 0x24, 0x70, 0x11, 0x6d, 0xa5, + 0xec, 0x95, 0x91, 0xab, 0xbc, 0xdc, 0x31, 0xec, 0x9e, 0xb7, 0x98, 0xb9, 0xd8, 0x8b, 0xf9, 0x57, + 0x0e, 0xae, 0x04, 0xb7, 0x6c, 0x50, 0xe1, 0xe2, 0xdd, 0xa5, 0xec, 0xbd, 0x0b, 0xe9, 0xb8, 0x47, + 0x63, 0x8b, 0x81, 0x5e, 0x47, 0x77, 0x62, 0x82, 0xce, 0x1f, 0x53, 0x78, 0x7f, 0xe2, 0x60, 0x31, + 0xa8, 0x1b, 0x14, 0x12, 0xcd, 0x88, 0xc6, 0x51, 0x36, 0x66, 0x1b, 0x0a, 0x7f, 0x97, 0x01, 0xdd, + 0x40, 0xeb, 0xf1, 0x80, 0x32, 0x9c, 0x34, 0xc8, 0x7f, 0xe6, 0x60, 0x29, 0xa4, 0xdb, 0x80, 0xee, + 0xc5, 0x3e, 0x34, 0x6f, 0x00, 0xf8, 0x01, 0x03, 0xbc, 0x89, 0x2f, 0x14, 0x59, 0xba, 0x55, 0xbf, + 0xe0, 0x60, 0x29, 0xa4, 0x6d, 0x10, 0x82, 0x38, 0xba, 0xc9, 0x10, 0xba, 0x61, 0xdd, 0x90, 0xe6, + 0x2e, 0x1a, 0xd2, 0xcf, 0x38, 0x98, 0xf2, 0xda, 0x0a, 0xe8, 0x46, 0x70, 0x38, 0x06, 0x1b, 0x18, + 0xd9, 0x9b, 0xe7, 0x48, 0xb9, 0xbb, 0xf1, 0x11, 0x43, 0x74, 0x1f, 0x6f, 0xc4, 0x3d, 0xd9, 0x96, + 0x6b, 0x81, 0xc6, 0xed, 0xb7, 0x1c, 0x4c, 0xba, 0x4d, 0x86, 0xb0, 0x64, 0x33, 0xd0, 0xc2, 0xc8, + 0xde, 0x88, 0x16, 0x72, 0x31, 0x6d, 0x33, 0x4c, 0x5b, 0x38, 0x7f, 0x11, 0x4c, 0x4f, 0x4f, 0x6d, + 0x0a, 0xe9, 0x13, 0x0e, 0x66, 0x2a, 0xc4, 0xae, 0x4a, 0x9d, 0x3a, 0xfb, 0xe5, 0x06, 0x61, 0xbf, + 0x4b, 0xe7, 0x64, 0x9c, 0x31, 0x3d, 0x58, 0x97, 0x87, 0x64, 0x1c, 0x2e, 0xfe, 0x3e, 0xc3, 0xb1, + 0x8d, 0x1f, 0x32, 0x1c, 0x5e, 0x61, 0x76, 0x0e, 0x96, 0xb6, 0xdf, 0xf9, 0x6f, 0x38, 0x98, 0x69, + 0x44, 0xa1, 0x69, 0xc4, 0x47, 0xb3, 0xcb, 0xd0, 0x3c, 0xbe, 0x18, 0x1a, 0xcb, 0x67, 0x9f, 0x86, + 0xe7, 0x2f, 0x1c, 0x20, 0x5a, 0xf6, 0x51, 0xa2, 0xaf, 0x54, 0x5b, 0x1b, 0x72, 0x39, 0x2a, 0xe2, + 0x81, 0xbb, 0x1d, 0x43, 0xd2, 0x5d, 0xc6, 0x2a, 0x03, 0xbc, 0x8b, 0xdf, 0xb9, 0x08, 0x60, 0x7b, + 0xc4, 0x1e, 0x85, 0xfd, 0x07, 0x0e, 0x16, 0x02, 0x5a, 0x0f, 0x21, 0x95, 0x4c, 0x78, 0x7f, 0x25, + 0xa4, 0x92, 0x89, 0xe8, 0x6a, 0xe0, 0x35, 0x36, 0x0b, 0x8c, 0x57, 0xe8, 0x2c, 0xd8, 0x23, 0x66, + 0xfb, 0xd5, 0xa8, 0x38, 0x05, 0xd9, 0x81, 0xd4, 0xd9, 0x3b, 0x1f, 0xdd, 0x0c, 0xbd, 0x1a, 0x06, + 0xf0, 0xdc, 0x3a, 0x4f, 0xcc, 0x45, 0x31, 0xcf, 0x50, 0x4c, 0xa3, 0xd4, 0x19, 0x0a, 0x44, 0x60, + 0xd2, 0x7d, 0xe8, 0x87, 0x9c, 0xbd, 0xc1, 0x36, 0x40, 0x36, 0xfc, 0x15, 0x8d, 0xb3, 0xcc, 0xfa, + 0x22, 0x42, 0xfd, 0x03, 0xc7, 0x7c, 0xd0, 0xd4, 0xf3, 0x73, 0x80, 0x7e, 0xa7, 0x00, 0xdd, 0x8a, + 0xc8, 0xdf, 0x31, 0x9d, 0xb9, 0xa5, 0x21, 0xc6, 0xcc, 0x99, 0xd3, 0x62, 0x78, 0xac, 0x9b, 0x6d, + 0x49, 0x53, 0x5e, 0x3b, 0xbf, 0xd4, 0xd2, 0xd4, 0x6c, 0x7a, 0x51, 0xfd, 0x05, 0x07, 0xd0, 0x6f, + 0x1a, 0x84, 0x00, 0x18, 0xe9, 0x2a, 0x44, 0x01, 0x70, 0x2b, 0xac, 0x02, 0xee, 0xcf, 0x76, 0xc8, + 0x7d, 0xde, 0x9b, 0xfd, 0xb6, 0xd3, 0x5f, 0xf8, 0x08, 0xa0, 0xdf, 0x22, 0x08, 0x81, 0x30, 0xd2, + 0x43, 0x88, 0x82, 0x90, 0x63, 0x10, 0x6e, 0xe4, 0x62, 0x40, 0x60, 0x19, 0xcd, 0xdf, 0x63, 0x18, + 0x3c, 0xac, 0xfd, 0x10, 0x8c, 0xb6, 0x21, 0xa2, 0x10, 0xb8, 0x37, 0x11, 0xfe, 0x4e, 0x8c, 0x20, + 0x74, 0x5d, 0xd3, 0x74, 0x39, 0xbe, 0xe4, 0x20, 0x13, 0xf6, 0x78, 0x44, 0x5b, 0xe1, 0xa7, 0x2b, + 0xfc, 0xe5, 0x9b, 0xbd, 0x7f, 0x41, 0x2d, 0xf7, 0x48, 0xdc, 0x63, 0x33, 0xb8, 0x8b, 0xd7, 0xd8, + 0xcf, 0xf4, 0xbe, 0x7c, 0xf1, 0x2a, 0x44, 0x73, 0x9b, 0xcb, 0xed, 0x1c, 0xc2, 0x52, 0x4b, 0xef, + 0x04, 0x39, 0xdc, 0x99, 0xa2, 0x69, 0x87, 0x5e, 0xd7, 0x75, 0xee, 0x47, 0x0f, 0x5d, 0x81, 0xb6, + 0xae, 0x4a, 0x5a, 0x7b, 0x5d, 0x37, 0xdb, 0xf9, 0x36, 0xd1, 0xd8, 0x65, 0x9e, 0x77, 0x58, 0x92, + 0xa1, 0x58, 0x03, 0xff, 0x95, 0xf0, 0x88, 0x7d, 0xfc, 0x87, 0xe3, 0x0e, 0x27, 0x98, 0xdc, 0xbd, + 0xff, 0x06, 0x00, 0x00, 0xff, 0xff, 0x75, 0xcc, 0x0e, 0xa5, 0xbc, 0x20, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/iam/credentials/v1/common.pb.go b/vendor/google.golang.org/genproto/googleapis/iam/credentials/v1/common.pb.go new file mode 100644 index 0000000..cbd5192 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/iam/credentials/v1/common.pb.go @@ -0,0 +1,717 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/iam/credentials/v1/common.proto + +package credentials // import "google.golang.org/genproto/googleapis/iam/credentials/v1" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import duration "github.com/golang/protobuf/ptypes/duration" +import timestamp "github.com/golang/protobuf/ptypes/timestamp" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +type GenerateAccessTokenRequest struct { + // The resource name of the service account for which the credentials + // are requested, in the following format: + // `projects/-/serviceAccounts/{ACCOUNT_EMAIL_OR_UNIQUEID}`. + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // The sequence of service accounts in a delegation chain. Each service + // account must be granted the `roles/iam.serviceAccountTokenCreator` role + // on its next service account in the chain. The last service account in the + // chain must be granted the `roles/iam.serviceAccountTokenCreator` role + // on the service account that is specified in the `name` field of the + // request. + // + // The delegates must have the following format: + // `projects/-/serviceAccounts/{ACCOUNT_EMAIL_OR_UNIQUEID}` + Delegates []string `protobuf:"bytes,2,rep,name=delegates,proto3" json:"delegates,omitempty"` + // Code to identify the scopes to be included in the OAuth 2.0 access token. + // See https://developers.google.com/identity/protocols/googlescopes for more + // information. + // At least one value required. + Scope []string `protobuf:"bytes,4,rep,name=scope,proto3" json:"scope,omitempty"` + // The desired lifetime duration of the access token in seconds. + // Must be set to a value less than or equal to 3600 (1 hour). If a value is + // not specified, the token's lifetime will be set to a default value of one + // hour. + Lifetime *duration.Duration `protobuf:"bytes,7,opt,name=lifetime,proto3" json:"lifetime,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GenerateAccessTokenRequest) Reset() { *m = GenerateAccessTokenRequest{} } +func (m *GenerateAccessTokenRequest) String() string { return proto.CompactTextString(m) } +func (*GenerateAccessTokenRequest) ProtoMessage() {} +func (*GenerateAccessTokenRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_common_f41ae9f35d063a8f, []int{0} +} +func (m *GenerateAccessTokenRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GenerateAccessTokenRequest.Unmarshal(m, b) +} +func (m *GenerateAccessTokenRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GenerateAccessTokenRequest.Marshal(b, m, deterministic) +} +func (dst *GenerateAccessTokenRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_GenerateAccessTokenRequest.Merge(dst, src) +} +func (m *GenerateAccessTokenRequest) XXX_Size() int { + return xxx_messageInfo_GenerateAccessTokenRequest.Size(m) +} +func (m *GenerateAccessTokenRequest) XXX_DiscardUnknown() { + xxx_messageInfo_GenerateAccessTokenRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_GenerateAccessTokenRequest proto.InternalMessageInfo + +func (m *GenerateAccessTokenRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *GenerateAccessTokenRequest) GetDelegates() []string { + if m != nil { + return m.Delegates + } + return nil +} + +func (m *GenerateAccessTokenRequest) GetScope() []string { + if m != nil { + return m.Scope + } + return nil +} + +func (m *GenerateAccessTokenRequest) GetLifetime() *duration.Duration { + if m != nil { + return m.Lifetime + } + return nil +} + +type GenerateAccessTokenResponse struct { + // The OAuth 2.0 access token. + AccessToken string `protobuf:"bytes,1,opt,name=access_token,json=accessToken,proto3" json:"access_token,omitempty"` + // Token expiration time. + // The expiration time is always set. + ExpireTime *timestamp.Timestamp `protobuf:"bytes,3,opt,name=expire_time,json=expireTime,proto3" json:"expire_time,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GenerateAccessTokenResponse) Reset() { *m = GenerateAccessTokenResponse{} } +func (m *GenerateAccessTokenResponse) String() string { return proto.CompactTextString(m) } +func (*GenerateAccessTokenResponse) ProtoMessage() {} +func (*GenerateAccessTokenResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_common_f41ae9f35d063a8f, []int{1} +} +func (m *GenerateAccessTokenResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GenerateAccessTokenResponse.Unmarshal(m, b) +} +func (m *GenerateAccessTokenResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GenerateAccessTokenResponse.Marshal(b, m, deterministic) +} +func (dst *GenerateAccessTokenResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_GenerateAccessTokenResponse.Merge(dst, src) +} +func (m *GenerateAccessTokenResponse) XXX_Size() int { + return xxx_messageInfo_GenerateAccessTokenResponse.Size(m) +} +func (m *GenerateAccessTokenResponse) XXX_DiscardUnknown() { + xxx_messageInfo_GenerateAccessTokenResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_GenerateAccessTokenResponse proto.InternalMessageInfo + +func (m *GenerateAccessTokenResponse) GetAccessToken() string { + if m != nil { + return m.AccessToken + } + return "" +} + +func (m *GenerateAccessTokenResponse) GetExpireTime() *timestamp.Timestamp { + if m != nil { + return m.ExpireTime + } + return nil +} + +type SignBlobRequest struct { + // The resource name of the service account for which the credentials + // are requested, in the following format: + // `projects/-/serviceAccounts/{ACCOUNT_EMAIL_OR_UNIQUEID}`. + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // The sequence of service accounts in a delegation chain. Each service + // account must be granted the `roles/iam.serviceAccountTokenCreator` role + // on its next service account in the chain. The last service account in the + // chain must be granted the `roles/iam.serviceAccountTokenCreator` role + // on the service account that is specified in the `name` field of the + // request. + // + // The delegates must have the following format: + // `projects/-/serviceAccounts/{ACCOUNT_EMAIL_OR_UNIQUEID}` + Delegates []string `protobuf:"bytes,3,rep,name=delegates,proto3" json:"delegates,omitempty"` + // The bytes to sign. + Payload []byte `protobuf:"bytes,5,opt,name=payload,proto3" json:"payload,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *SignBlobRequest) Reset() { *m = SignBlobRequest{} } +func (m *SignBlobRequest) String() string { return proto.CompactTextString(m) } +func (*SignBlobRequest) ProtoMessage() {} +func (*SignBlobRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_common_f41ae9f35d063a8f, []int{2} +} +func (m *SignBlobRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_SignBlobRequest.Unmarshal(m, b) +} +func (m *SignBlobRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_SignBlobRequest.Marshal(b, m, deterministic) +} +func (dst *SignBlobRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_SignBlobRequest.Merge(dst, src) +} +func (m *SignBlobRequest) XXX_Size() int { + return xxx_messageInfo_SignBlobRequest.Size(m) +} +func (m *SignBlobRequest) XXX_DiscardUnknown() { + xxx_messageInfo_SignBlobRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_SignBlobRequest proto.InternalMessageInfo + +func (m *SignBlobRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *SignBlobRequest) GetDelegates() []string { + if m != nil { + return m.Delegates + } + return nil +} + +func (m *SignBlobRequest) GetPayload() []byte { + if m != nil { + return m.Payload + } + return nil +} + +type SignBlobResponse struct { + // The ID of the key used to sign the blob. + KeyId string `protobuf:"bytes,1,opt,name=key_id,json=keyId,proto3" json:"key_id,omitempty"` + // The signed blob. + SignedBlob []byte `protobuf:"bytes,4,opt,name=signed_blob,json=signedBlob,proto3" json:"signed_blob,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *SignBlobResponse) Reset() { *m = SignBlobResponse{} } +func (m *SignBlobResponse) String() string { return proto.CompactTextString(m) } +func (*SignBlobResponse) ProtoMessage() {} +func (*SignBlobResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_common_f41ae9f35d063a8f, []int{3} +} +func (m *SignBlobResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_SignBlobResponse.Unmarshal(m, b) +} +func (m *SignBlobResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_SignBlobResponse.Marshal(b, m, deterministic) +} +func (dst *SignBlobResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_SignBlobResponse.Merge(dst, src) +} +func (m *SignBlobResponse) XXX_Size() int { + return xxx_messageInfo_SignBlobResponse.Size(m) +} +func (m *SignBlobResponse) XXX_DiscardUnknown() { + xxx_messageInfo_SignBlobResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_SignBlobResponse proto.InternalMessageInfo + +func (m *SignBlobResponse) GetKeyId() string { + if m != nil { + return m.KeyId + } + return "" +} + +func (m *SignBlobResponse) GetSignedBlob() []byte { + if m != nil { + return m.SignedBlob + } + return nil +} + +type SignJwtRequest struct { + // The resource name of the service account for which the credentials + // are requested, in the following format: + // `projects/-/serviceAccounts/{ACCOUNT_EMAIL_OR_UNIQUEID}`. + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // The sequence of service accounts in a delegation chain. Each service + // account must be granted the `roles/iam.serviceAccountTokenCreator` role + // on its next service account in the chain. The last service account in the + // chain must be granted the `roles/iam.serviceAccountTokenCreator` role + // on the service account that is specified in the `name` field of the + // request. + // + // The delegates must have the following format: + // `projects/-/serviceAccounts/{ACCOUNT_EMAIL_OR_UNIQUEID}` + Delegates []string `protobuf:"bytes,3,rep,name=delegates,proto3" json:"delegates,omitempty"` + // The JWT payload to sign: a JSON object that contains a JWT Claims Set. + Payload string `protobuf:"bytes,5,opt,name=payload,proto3" json:"payload,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *SignJwtRequest) Reset() { *m = SignJwtRequest{} } +func (m *SignJwtRequest) String() string { return proto.CompactTextString(m) } +func (*SignJwtRequest) ProtoMessage() {} +func (*SignJwtRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_common_f41ae9f35d063a8f, []int{4} +} +func (m *SignJwtRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_SignJwtRequest.Unmarshal(m, b) +} +func (m *SignJwtRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_SignJwtRequest.Marshal(b, m, deterministic) +} +func (dst *SignJwtRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_SignJwtRequest.Merge(dst, src) +} +func (m *SignJwtRequest) XXX_Size() int { + return xxx_messageInfo_SignJwtRequest.Size(m) +} +func (m *SignJwtRequest) XXX_DiscardUnknown() { + xxx_messageInfo_SignJwtRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_SignJwtRequest proto.InternalMessageInfo + +func (m *SignJwtRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *SignJwtRequest) GetDelegates() []string { + if m != nil { + return m.Delegates + } + return nil +} + +func (m *SignJwtRequest) GetPayload() string { + if m != nil { + return m.Payload + } + return "" +} + +type SignJwtResponse struct { + // The ID of the key used to sign the JWT. + KeyId string `protobuf:"bytes,1,opt,name=key_id,json=keyId,proto3" json:"key_id,omitempty"` + // The signed JWT. + SignedJwt string `protobuf:"bytes,2,opt,name=signed_jwt,json=signedJwt,proto3" json:"signed_jwt,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *SignJwtResponse) Reset() { *m = SignJwtResponse{} } +func (m *SignJwtResponse) String() string { return proto.CompactTextString(m) } +func (*SignJwtResponse) ProtoMessage() {} +func (*SignJwtResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_common_f41ae9f35d063a8f, []int{5} +} +func (m *SignJwtResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_SignJwtResponse.Unmarshal(m, b) +} +func (m *SignJwtResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_SignJwtResponse.Marshal(b, m, deterministic) +} +func (dst *SignJwtResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_SignJwtResponse.Merge(dst, src) +} +func (m *SignJwtResponse) XXX_Size() int { + return xxx_messageInfo_SignJwtResponse.Size(m) +} +func (m *SignJwtResponse) XXX_DiscardUnknown() { + xxx_messageInfo_SignJwtResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_SignJwtResponse proto.InternalMessageInfo + +func (m *SignJwtResponse) GetKeyId() string { + if m != nil { + return m.KeyId + } + return "" +} + +func (m *SignJwtResponse) GetSignedJwt() string { + if m != nil { + return m.SignedJwt + } + return "" +} + +type GenerateIdTokenRequest struct { + // The resource name of the service account for which the credentials + // are requested, in the following format: + // `projects/-/serviceAccounts/{ACCOUNT_EMAIL_OR_UNIQUEID}`. + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // The sequence of service accounts in a delegation chain. Each service + // account must be granted the `roles/iam.serviceAccountTokenCreator` role + // on its next service account in the chain. The last service account in the + // chain must be granted the `roles/iam.serviceAccountTokenCreator` role + // on the service account that is specified in the `name` field of the + // request. + // + // The delegates must have the following format: + // `projects/-/serviceAccounts/{ACCOUNT_EMAIL_OR_UNIQUEID}` + Delegates []string `protobuf:"bytes,2,rep,name=delegates,proto3" json:"delegates,omitempty"` + // The audience for the token, such as the API or account that this token + // grants access to. + Audience string `protobuf:"bytes,3,opt,name=audience,proto3" json:"audience,omitempty"` + // Include the service account email in the token. If set to `true`, the + // token will contain `email` and `email_verified` claims. + IncludeEmail bool `protobuf:"varint,4,opt,name=include_email,json=includeEmail,proto3" json:"include_email,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GenerateIdTokenRequest) Reset() { *m = GenerateIdTokenRequest{} } +func (m *GenerateIdTokenRequest) String() string { return proto.CompactTextString(m) } +func (*GenerateIdTokenRequest) ProtoMessage() {} +func (*GenerateIdTokenRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_common_f41ae9f35d063a8f, []int{6} +} +func (m *GenerateIdTokenRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GenerateIdTokenRequest.Unmarshal(m, b) +} +func (m *GenerateIdTokenRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GenerateIdTokenRequest.Marshal(b, m, deterministic) +} +func (dst *GenerateIdTokenRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_GenerateIdTokenRequest.Merge(dst, src) +} +func (m *GenerateIdTokenRequest) XXX_Size() int { + return xxx_messageInfo_GenerateIdTokenRequest.Size(m) +} +func (m *GenerateIdTokenRequest) XXX_DiscardUnknown() { + xxx_messageInfo_GenerateIdTokenRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_GenerateIdTokenRequest proto.InternalMessageInfo + +func (m *GenerateIdTokenRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *GenerateIdTokenRequest) GetDelegates() []string { + if m != nil { + return m.Delegates + } + return nil +} + +func (m *GenerateIdTokenRequest) GetAudience() string { + if m != nil { + return m.Audience + } + return "" +} + +func (m *GenerateIdTokenRequest) GetIncludeEmail() bool { + if m != nil { + return m.IncludeEmail + } + return false +} + +type GenerateIdTokenResponse struct { + // The OpenId Connect ID token. + Token string `protobuf:"bytes,1,opt,name=token,proto3" json:"token,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GenerateIdTokenResponse) Reset() { *m = GenerateIdTokenResponse{} } +func (m *GenerateIdTokenResponse) String() string { return proto.CompactTextString(m) } +func (*GenerateIdTokenResponse) ProtoMessage() {} +func (*GenerateIdTokenResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_common_f41ae9f35d063a8f, []int{7} +} +func (m *GenerateIdTokenResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GenerateIdTokenResponse.Unmarshal(m, b) +} +func (m *GenerateIdTokenResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GenerateIdTokenResponse.Marshal(b, m, deterministic) +} +func (dst *GenerateIdTokenResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_GenerateIdTokenResponse.Merge(dst, src) +} +func (m *GenerateIdTokenResponse) XXX_Size() int { + return xxx_messageInfo_GenerateIdTokenResponse.Size(m) +} +func (m *GenerateIdTokenResponse) XXX_DiscardUnknown() { + xxx_messageInfo_GenerateIdTokenResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_GenerateIdTokenResponse proto.InternalMessageInfo + +func (m *GenerateIdTokenResponse) GetToken() string { + if m != nil { + return m.Token + } + return "" +} + +type GenerateIdentityBindingAccessTokenRequest struct { + // The resource name of the service account for which the credentials + // are requested, in the following format: + // `projects/-/serviceAccounts/{ACCOUNT_EMAIL_OR_UNIQUEID}`. + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // Code to identify the scopes to be included in the OAuth 2.0 access token. + // See https://developers.google.com/identity/protocols/googlescopes for more + // information. + // At least one value required. + Scope []string `protobuf:"bytes,2,rep,name=scope,proto3" json:"scope,omitempty"` + // Required. Input token. + // Must be in JWT format according to + // RFC7523 (https://tools.ietf.org/html/rfc7523) + // and must have 'kid' field in the header. + // Supported signing algorithms: RS256 (RS512, ES256, ES512 coming soon). + // Mandatory payload fields (along the lines of RFC 7523, section 3): + // - iss: issuer of the token. Must provide a discovery document at + // $iss/.well-known/openid-configuration . The document needs to be + // formatted according to section 4.2 of the OpenID Connect Discovery + // 1.0 specification. + // - iat: Issue time in seconds since epoch. Must be in the past. + // - exp: Expiration time in seconds since epoch. Must be less than 48 hours + // after iat. We recommend to create tokens that last shorter than 6 + // hours to improve security unless business reasons mandate longer + // expiration times. Shorter token lifetimes are generally more secure + // since tokens that have been exfiltrated by attackers can be used for + // a shorter time. you can configure the maximum lifetime of the + // incoming token in the configuration of the mapper. + // The resulting Google token will expire within an hour or at "exp", + // whichever is earlier. + // - sub: JWT subject, identity asserted in the JWT. + // - aud: Configured in the mapper policy. By default the service account + // email. + // + // Claims from the incoming token can be transferred into the output token + // accoding to the mapper configuration. The outgoing claim size is limited. + // Outgoing claims size must be less than 4kB serialized as JSON without + // whitespace. + // + // Example header: + // { + // "alg": "RS256", + // "kid": "92a4265e14ab04d4d228a48d10d4ca31610936f8" + // } + // Example payload: + // { + // "iss": "https://accounts.google.com", + // "iat": 1517963104, + // "exp": 1517966704, + // "aud": "https://iamcredentials.googleapis.com/", + // "sub": "113475438248934895348", + // "my_claims": { + // "additional_claim": "value" + // } + // } + Jwt string `protobuf:"bytes,3,opt,name=jwt,proto3" json:"jwt,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GenerateIdentityBindingAccessTokenRequest) Reset() { + *m = GenerateIdentityBindingAccessTokenRequest{} +} +func (m *GenerateIdentityBindingAccessTokenRequest) String() string { return proto.CompactTextString(m) } +func (*GenerateIdentityBindingAccessTokenRequest) ProtoMessage() {} +func (*GenerateIdentityBindingAccessTokenRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_common_f41ae9f35d063a8f, []int{8} +} +func (m *GenerateIdentityBindingAccessTokenRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GenerateIdentityBindingAccessTokenRequest.Unmarshal(m, b) +} +func (m *GenerateIdentityBindingAccessTokenRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GenerateIdentityBindingAccessTokenRequest.Marshal(b, m, deterministic) +} +func (dst *GenerateIdentityBindingAccessTokenRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_GenerateIdentityBindingAccessTokenRequest.Merge(dst, src) +} +func (m *GenerateIdentityBindingAccessTokenRequest) XXX_Size() int { + return xxx_messageInfo_GenerateIdentityBindingAccessTokenRequest.Size(m) +} +func (m *GenerateIdentityBindingAccessTokenRequest) XXX_DiscardUnknown() { + xxx_messageInfo_GenerateIdentityBindingAccessTokenRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_GenerateIdentityBindingAccessTokenRequest proto.InternalMessageInfo + +func (m *GenerateIdentityBindingAccessTokenRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *GenerateIdentityBindingAccessTokenRequest) GetScope() []string { + if m != nil { + return m.Scope + } + return nil +} + +func (m *GenerateIdentityBindingAccessTokenRequest) GetJwt() string { + if m != nil { + return m.Jwt + } + return "" +} + +type GenerateIdentityBindingAccessTokenResponse struct { + // The OAuth 2.0 access token. + AccessToken string `protobuf:"bytes,1,opt,name=access_token,json=accessToken,proto3" json:"access_token,omitempty"` + // Token expiration time. + // The expiration time is always set. + ExpireTime *timestamp.Timestamp `protobuf:"bytes,2,opt,name=expire_time,json=expireTime,proto3" json:"expire_time,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GenerateIdentityBindingAccessTokenResponse) Reset() { + *m = GenerateIdentityBindingAccessTokenResponse{} +} +func (m *GenerateIdentityBindingAccessTokenResponse) String() string { + return proto.CompactTextString(m) +} +func (*GenerateIdentityBindingAccessTokenResponse) ProtoMessage() {} +func (*GenerateIdentityBindingAccessTokenResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_common_f41ae9f35d063a8f, []int{9} +} +func (m *GenerateIdentityBindingAccessTokenResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GenerateIdentityBindingAccessTokenResponse.Unmarshal(m, b) +} +func (m *GenerateIdentityBindingAccessTokenResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GenerateIdentityBindingAccessTokenResponse.Marshal(b, m, deterministic) +} +func (dst *GenerateIdentityBindingAccessTokenResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_GenerateIdentityBindingAccessTokenResponse.Merge(dst, src) +} +func (m *GenerateIdentityBindingAccessTokenResponse) XXX_Size() int { + return xxx_messageInfo_GenerateIdentityBindingAccessTokenResponse.Size(m) +} +func (m *GenerateIdentityBindingAccessTokenResponse) XXX_DiscardUnknown() { + xxx_messageInfo_GenerateIdentityBindingAccessTokenResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_GenerateIdentityBindingAccessTokenResponse proto.InternalMessageInfo + +func (m *GenerateIdentityBindingAccessTokenResponse) GetAccessToken() string { + if m != nil { + return m.AccessToken + } + return "" +} + +func (m *GenerateIdentityBindingAccessTokenResponse) GetExpireTime() *timestamp.Timestamp { + if m != nil { + return m.ExpireTime + } + return nil +} + +func init() { + proto.RegisterType((*GenerateAccessTokenRequest)(nil), "google.iam.credentials.v1.GenerateAccessTokenRequest") + proto.RegisterType((*GenerateAccessTokenResponse)(nil), "google.iam.credentials.v1.GenerateAccessTokenResponse") + proto.RegisterType((*SignBlobRequest)(nil), "google.iam.credentials.v1.SignBlobRequest") + proto.RegisterType((*SignBlobResponse)(nil), "google.iam.credentials.v1.SignBlobResponse") + proto.RegisterType((*SignJwtRequest)(nil), "google.iam.credentials.v1.SignJwtRequest") + proto.RegisterType((*SignJwtResponse)(nil), "google.iam.credentials.v1.SignJwtResponse") + proto.RegisterType((*GenerateIdTokenRequest)(nil), "google.iam.credentials.v1.GenerateIdTokenRequest") + proto.RegisterType((*GenerateIdTokenResponse)(nil), "google.iam.credentials.v1.GenerateIdTokenResponse") + proto.RegisterType((*GenerateIdentityBindingAccessTokenRequest)(nil), "google.iam.credentials.v1.GenerateIdentityBindingAccessTokenRequest") + proto.RegisterType((*GenerateIdentityBindingAccessTokenResponse)(nil), "google.iam.credentials.v1.GenerateIdentityBindingAccessTokenResponse") +} + +func init() { + proto.RegisterFile("google/iam/credentials/v1/common.proto", fileDescriptor_common_f41ae9f35d063a8f) +} + +var fileDescriptor_common_f41ae9f35d063a8f = []byte{ + // 563 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xac, 0x54, 0xcd, 0x6e, 0xd3, 0x40, + 0x10, 0x96, 0x93, 0xa6, 0x8d, 0x27, 0x01, 0x2a, 0xab, 0x80, 0x13, 0x7e, 0x1a, 0x5c, 0x09, 0x05, + 0x0e, 0xb6, 0x0a, 0xe2, 0xd4, 0x53, 0xd3, 0xa2, 0x2a, 0x91, 0x90, 0x2a, 0xd3, 0x13, 0x02, 0x59, + 0x1b, 0xef, 0xd4, 0x5a, 0x62, 0xef, 0x1a, 0x7b, 0xdd, 0x90, 0x03, 0x4f, 0x50, 0xde, 0x80, 0x17, + 0xe5, 0x88, 0xbc, 0xb6, 0xe3, 0xa8, 0x8d, 0x50, 0xf8, 0xb9, 0xed, 0x7c, 0xf3, 0xcd, 0xcc, 0x37, + 0xb3, 0x3b, 0x0b, 0xcf, 0x03, 0x21, 0x82, 0x10, 0x1d, 0x46, 0x22, 0xc7, 0x4f, 0x90, 0x22, 0x97, + 0x8c, 0x84, 0xa9, 0x73, 0x75, 0xe8, 0xf8, 0x22, 0x8a, 0x04, 0xb7, 0xe3, 0x44, 0x48, 0x61, 0xf4, + 0x0a, 0x9e, 0xcd, 0x48, 0x64, 0xaf, 0xf0, 0xec, 0xab, 0xc3, 0xfe, 0xd3, 0x32, 0x85, 0x22, 0x4e, + 0xb3, 0x4b, 0x87, 0x66, 0x09, 0x91, 0xac, 0x0a, 0xed, 0xef, 0xdf, 0xf4, 0x4b, 0x16, 0x61, 0x2a, + 0x49, 0x14, 0x17, 0x04, 0xeb, 0x87, 0x06, 0xfd, 0x33, 0xe4, 0x98, 0x10, 0x89, 0xc7, 0xbe, 0x8f, + 0x69, 0x7a, 0x21, 0x66, 0xc8, 0x5d, 0xfc, 0x92, 0x61, 0x2a, 0x0d, 0x03, 0xb6, 0x38, 0x89, 0xd0, + 0xd4, 0x06, 0xda, 0x50, 0x77, 0xd5, 0xd9, 0x78, 0x0c, 0x3a, 0xc5, 0x10, 0x03, 0x22, 0x31, 0x35, + 0x1b, 0x83, 0xe6, 0x50, 0x77, 0x6b, 0xc0, 0xd8, 0x83, 0x56, 0xea, 0x8b, 0x18, 0xcd, 0x2d, 0xe5, + 0x29, 0x0c, 0xe3, 0x0d, 0xb4, 0x43, 0x76, 0x89, 0x79, 0x75, 0x73, 0x67, 0xa0, 0x0d, 0x3b, 0xaf, + 0x7a, 0x76, 0xd9, 0x55, 0x25, 0xcd, 0x3e, 0x2d, 0xa5, 0xbb, 0x4b, 0xaa, 0xf5, 0x0d, 0x1e, 0xad, + 0x15, 0x97, 0xc6, 0x82, 0xa7, 0x68, 0x3c, 0x83, 0x2e, 0x51, 0xb0, 0x27, 0x73, 0xbc, 0x54, 0xd9, + 0x21, 0x35, 0xd5, 0x38, 0x82, 0x0e, 0x7e, 0x8d, 0x59, 0x82, 0x9e, 0xaa, 0xdd, 0x54, 0xb5, 0xfb, + 0xb7, 0x6a, 0x5f, 0x54, 0x63, 0x71, 0xa1, 0xa0, 0xe7, 0x80, 0xf5, 0x09, 0xee, 0xbd, 0x67, 0x01, + 0x1f, 0x85, 0x62, 0xba, 0xf1, 0x40, 0x9a, 0x37, 0x07, 0x62, 0xc2, 0x4e, 0x4c, 0x16, 0xa1, 0x20, + 0xd4, 0x6c, 0x0d, 0xb4, 0x61, 0xd7, 0xad, 0x4c, 0x6b, 0x02, 0xbb, 0x75, 0xfa, 0xb2, 0xa5, 0xfb, + 0xb0, 0x3d, 0xc3, 0x85, 0xc7, 0x68, 0x59, 0xa1, 0x35, 0xc3, 0xc5, 0x98, 0x1a, 0xfb, 0xd0, 0x49, + 0x59, 0xc0, 0x91, 0x7a, 0xd3, 0x50, 0x4c, 0xcd, 0x2d, 0x95, 0x08, 0x0a, 0x28, 0x8f, 0xb7, 0x3e, + 0xc2, 0xdd, 0x3c, 0xd7, 0x64, 0x2e, 0xff, 0x9b, 0x52, 0xbd, 0x56, 0x7a, 0x56, 0x0c, 0x42, 0x65, + 0xff, 0xbd, 0xd0, 0x27, 0x50, 0xaa, 0xf2, 0x3e, 0xcf, 0xa5, 0xd9, 0x50, 0x2e, 0xbd, 0x40, 0x26, + 0x73, 0x69, 0x5d, 0x6b, 0xf0, 0xa0, 0xba, 0xd1, 0x31, 0xfd, 0xc7, 0xa7, 0xd6, 0x87, 0x36, 0xc9, + 0x28, 0x43, 0xee, 0x17, 0x17, 0xab, 0xbb, 0x4b, 0xdb, 0x38, 0x80, 0x3b, 0x8c, 0xfb, 0x61, 0x46, + 0xd1, 0xc3, 0x88, 0xb0, 0x50, 0x8d, 0xac, 0xed, 0x76, 0x4b, 0xf0, 0x6d, 0x8e, 0x59, 0x0e, 0x3c, + 0xbc, 0x25, 0xa6, 0x6c, 0x6f, 0x0f, 0x5a, 0xab, 0x6f, 0xaa, 0x30, 0xac, 0x00, 0x5e, 0xd4, 0x01, + 0xf9, 0x1a, 0xca, 0xc5, 0x88, 0x71, 0xca, 0x78, 0xb0, 0xe1, 0xee, 0x2c, 0xb7, 0xa3, 0xb1, 0xba, + 0x1d, 0xbb, 0xd0, 0xcc, 0xa7, 0x55, 0xf4, 0x90, 0x1f, 0xad, 0xef, 0x1a, 0xbc, 0xdc, 0xa4, 0xd2, + 0x5f, 0x2f, 0x42, 0xe3, 0x4f, 0x16, 0x61, 0x74, 0xad, 0xc1, 0x81, 0x2f, 0xa2, 0x8a, 0xed, 0x87, + 0x22, 0xa3, 0x6b, 0xbe, 0xa3, 0x51, 0x6f, 0x7c, 0xfc, 0xee, 0xa4, 0x86, 0x4e, 0xd4, 0x2f, 0x76, + 0x9e, 0xe7, 0x3e, 0xd7, 0x3e, 0x9c, 0x96, 0xd1, 0x81, 0x08, 0x09, 0x0f, 0x6c, 0x91, 0x04, 0x4e, + 0x80, 0x5c, 0x55, 0x76, 0x0a, 0x17, 0x89, 0x59, 0xba, 0xe6, 0x37, 0x3c, 0x5a, 0x31, 0x7f, 0x6a, + 0xda, 0x74, 0x5b, 0xc5, 0xbc, 0xfe, 0x15, 0x00, 0x00, 0xff, 0xff, 0x97, 0xa7, 0x4c, 0xe2, 0x40, + 0x05, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/iam/credentials/v1/iamcredentials.pb.go b/vendor/google.golang.org/genproto/googleapis/iam/credentials/v1/iamcredentials.pb.go new file mode 100644 index 0000000..c0ab0f4 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/iam/credentials/v1/iamcredentials.pb.go @@ -0,0 +1,277 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/iam/credentials/v1/iamcredentials.proto + +package credentials // import "google.golang.org/genproto/googleapis/iam/credentials/v1" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +import ( + context "golang.org/x/net/context" + grpc "google.golang.org/grpc" +) + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Reference imports to suppress errors if they are not otherwise used. +var _ context.Context +var _ grpc.ClientConn + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the grpc package it is being compiled against. +const _ = grpc.SupportPackageIsVersion4 + +// IAMCredentialsClient is the client API for IAMCredentials service. +// +// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://godoc.org/google.golang.org/grpc#ClientConn.NewStream. +type IAMCredentialsClient interface { + // Generates an OAuth 2.0 access token for a service account. + GenerateAccessToken(ctx context.Context, in *GenerateAccessTokenRequest, opts ...grpc.CallOption) (*GenerateAccessTokenResponse, error) + // Generates an OpenID Connect ID token for a service account. + GenerateIdToken(ctx context.Context, in *GenerateIdTokenRequest, opts ...grpc.CallOption) (*GenerateIdTokenResponse, error) + // Signs a blob using a service account's system-managed private key. + SignBlob(ctx context.Context, in *SignBlobRequest, opts ...grpc.CallOption) (*SignBlobResponse, error) + // Signs a JWT using a service account's system-managed private key. + SignJwt(ctx context.Context, in *SignJwtRequest, opts ...grpc.CallOption) (*SignJwtResponse, error) + // Exchange a JWT signed by third party identity provider to an OAuth 2.0 + // access token + GenerateIdentityBindingAccessToken(ctx context.Context, in *GenerateIdentityBindingAccessTokenRequest, opts ...grpc.CallOption) (*GenerateIdentityBindingAccessTokenResponse, error) +} + +type iAMCredentialsClient struct { + cc *grpc.ClientConn +} + +func NewIAMCredentialsClient(cc *grpc.ClientConn) IAMCredentialsClient { + return &iAMCredentialsClient{cc} +} + +func (c *iAMCredentialsClient) GenerateAccessToken(ctx context.Context, in *GenerateAccessTokenRequest, opts ...grpc.CallOption) (*GenerateAccessTokenResponse, error) { + out := new(GenerateAccessTokenResponse) + err := c.cc.Invoke(ctx, "/google.iam.credentials.v1.IAMCredentials/GenerateAccessToken", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *iAMCredentialsClient) GenerateIdToken(ctx context.Context, in *GenerateIdTokenRequest, opts ...grpc.CallOption) (*GenerateIdTokenResponse, error) { + out := new(GenerateIdTokenResponse) + err := c.cc.Invoke(ctx, "/google.iam.credentials.v1.IAMCredentials/GenerateIdToken", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *iAMCredentialsClient) SignBlob(ctx context.Context, in *SignBlobRequest, opts ...grpc.CallOption) (*SignBlobResponse, error) { + out := new(SignBlobResponse) + err := c.cc.Invoke(ctx, "/google.iam.credentials.v1.IAMCredentials/SignBlob", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *iAMCredentialsClient) SignJwt(ctx context.Context, in *SignJwtRequest, opts ...grpc.CallOption) (*SignJwtResponse, error) { + out := new(SignJwtResponse) + err := c.cc.Invoke(ctx, "/google.iam.credentials.v1.IAMCredentials/SignJwt", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *iAMCredentialsClient) GenerateIdentityBindingAccessToken(ctx context.Context, in *GenerateIdentityBindingAccessTokenRequest, opts ...grpc.CallOption) (*GenerateIdentityBindingAccessTokenResponse, error) { + out := new(GenerateIdentityBindingAccessTokenResponse) + err := c.cc.Invoke(ctx, "/google.iam.credentials.v1.IAMCredentials/GenerateIdentityBindingAccessToken", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +// IAMCredentialsServer is the server API for IAMCredentials service. +type IAMCredentialsServer interface { + // Generates an OAuth 2.0 access token for a service account. + GenerateAccessToken(context.Context, *GenerateAccessTokenRequest) (*GenerateAccessTokenResponse, error) + // Generates an OpenID Connect ID token for a service account. + GenerateIdToken(context.Context, *GenerateIdTokenRequest) (*GenerateIdTokenResponse, error) + // Signs a blob using a service account's system-managed private key. + SignBlob(context.Context, *SignBlobRequest) (*SignBlobResponse, error) + // Signs a JWT using a service account's system-managed private key. + SignJwt(context.Context, *SignJwtRequest) (*SignJwtResponse, error) + // Exchange a JWT signed by third party identity provider to an OAuth 2.0 + // access token + GenerateIdentityBindingAccessToken(context.Context, *GenerateIdentityBindingAccessTokenRequest) (*GenerateIdentityBindingAccessTokenResponse, error) +} + +func RegisterIAMCredentialsServer(s *grpc.Server, srv IAMCredentialsServer) { + s.RegisterService(&_IAMCredentials_serviceDesc, srv) +} + +func _IAMCredentials_GenerateAccessToken_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(GenerateAccessTokenRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(IAMCredentialsServer).GenerateAccessToken(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.iam.credentials.v1.IAMCredentials/GenerateAccessToken", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(IAMCredentialsServer).GenerateAccessToken(ctx, req.(*GenerateAccessTokenRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _IAMCredentials_GenerateIdToken_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(GenerateIdTokenRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(IAMCredentialsServer).GenerateIdToken(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.iam.credentials.v1.IAMCredentials/GenerateIdToken", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(IAMCredentialsServer).GenerateIdToken(ctx, req.(*GenerateIdTokenRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _IAMCredentials_SignBlob_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(SignBlobRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(IAMCredentialsServer).SignBlob(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.iam.credentials.v1.IAMCredentials/SignBlob", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(IAMCredentialsServer).SignBlob(ctx, req.(*SignBlobRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _IAMCredentials_SignJwt_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(SignJwtRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(IAMCredentialsServer).SignJwt(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.iam.credentials.v1.IAMCredentials/SignJwt", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(IAMCredentialsServer).SignJwt(ctx, req.(*SignJwtRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _IAMCredentials_GenerateIdentityBindingAccessToken_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(GenerateIdentityBindingAccessTokenRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(IAMCredentialsServer).GenerateIdentityBindingAccessToken(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.iam.credentials.v1.IAMCredentials/GenerateIdentityBindingAccessToken", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(IAMCredentialsServer).GenerateIdentityBindingAccessToken(ctx, req.(*GenerateIdentityBindingAccessTokenRequest)) + } + return interceptor(ctx, in, info, handler) +} + +var _IAMCredentials_serviceDesc = grpc.ServiceDesc{ + ServiceName: "google.iam.credentials.v1.IAMCredentials", + HandlerType: (*IAMCredentialsServer)(nil), + Methods: []grpc.MethodDesc{ + { + MethodName: "GenerateAccessToken", + Handler: _IAMCredentials_GenerateAccessToken_Handler, + }, + { + MethodName: "GenerateIdToken", + Handler: _IAMCredentials_GenerateIdToken_Handler, + }, + { + MethodName: "SignBlob", + Handler: _IAMCredentials_SignBlob_Handler, + }, + { + MethodName: "SignJwt", + Handler: _IAMCredentials_SignJwt_Handler, + }, + { + MethodName: "GenerateIdentityBindingAccessToken", + Handler: _IAMCredentials_GenerateIdentityBindingAccessToken_Handler, + }, + }, + Streams: []grpc.StreamDesc{}, + Metadata: "google/iam/credentials/v1/iamcredentials.proto", +} + +func init() { + proto.RegisterFile("google/iam/credentials/v1/iamcredentials.proto", fileDescriptor_iamcredentials_03d0b597c179fd49) +} + +var fileDescriptor_iamcredentials_03d0b597c179fd49 = []byte{ + // 443 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xa4, 0x94, 0xcf, 0xaa, 0xd4, 0x30, + 0x18, 0xc5, 0xc9, 0x5d, 0x78, 0x25, 0x0b, 0x85, 0xde, 0x95, 0x83, 0xab, 0x0a, 0x82, 0x15, 0x1a, + 0x67, 0x74, 0x14, 0x3a, 0x0a, 0x4e, 0x1d, 0x95, 0x29, 0x88, 0x83, 0x7f, 0x36, 0xee, 0x32, 0x69, + 0x08, 0xd1, 0x36, 0x5f, 0x6d, 0x32, 0x33, 0x88, 0xb8, 0x11, 0x04, 0xf7, 0x6e, 0x5d, 0xf8, 0x20, + 0x3e, 0x82, 0x3b, 0x5f, 0xc1, 0x9d, 0x2f, 0xe0, 0x52, 0xd2, 0xa6, 0x4c, 0xc5, 0xa9, 0xb6, 0xdc, + 0x65, 0xd3, 0x73, 0xce, 0xf7, 0x3b, 0x24, 0x7c, 0x38, 0x14, 0x00, 0x22, 0xe3, 0x44, 0xd2, 0x9c, + 0xb0, 0x92, 0xa7, 0x5c, 0x19, 0x49, 0x33, 0x4d, 0xb6, 0x63, 0x7b, 0xd4, 0x3a, 0x09, 0x8b, 0x12, + 0x0c, 0x78, 0x17, 0x6a, 0x7d, 0x28, 0x69, 0x1e, 0xb6, 0xff, 0x6e, 0xc7, 0xa3, 0x8b, 0x2e, 0x8a, + 0x16, 0x92, 0x50, 0xa5, 0xc0, 0x50, 0x23, 0x41, 0x39, 0xe3, 0xe8, 0x72, 0xf7, 0x20, 0x06, 0x79, + 0x0e, 0xaa, 0xd6, 0x4d, 0x7e, 0x1e, 0xe3, 0x73, 0xcb, 0xf9, 0xa3, 0x7b, 0x7b, 0x89, 0xf7, 0x0d, + 0xe1, 0x93, 0x87, 0x5c, 0xf1, 0x92, 0x1a, 0x3e, 0x67, 0x8c, 0x6b, 0xfd, 0x0c, 0x5e, 0x71, 0xe5, + 0x4d, 0xc3, 0x4e, 0x98, 0xf0, 0x80, 0xfe, 0x09, 0x7f, 0xbd, 0xe1, 0xda, 0x8c, 0x6e, 0x0e, 0xb5, + 0xe9, 0x02, 0x94, 0xe6, 0xfe, 0x83, 0xf7, 0xdf, 0x7f, 0x7c, 0x3a, 0xba, 0xeb, 0xcf, 0x2c, 0xf3, + 0x5b, 0x45, 0x73, 0x7e, 0xa7, 0x28, 0xe1, 0x25, 0x67, 0x46, 0x93, 0x80, 0x68, 0x5e, 0x6e, 0x25, + 0xb3, 0x46, 0xd8, 0x28, 0x7b, 0xf2, 0x2e, 0x12, 0x7f, 0x87, 0x45, 0x28, 0xf0, 0xbe, 0x22, 0x7c, + 0xbe, 0x99, 0xb3, 0x4c, 0xeb, 0x2a, 0xe3, 0x1e, 0x4c, 0x4e, 0xdb, 0xd4, 0x98, 0x0c, 0xb1, 0xb8, + 0x0a, 0x71, 0x55, 0xe1, 0xb6, 0x7f, 0x6b, 0x68, 0x05, 0x17, 0x64, 0xf1, 0xbf, 0x20, 0x7c, 0xf6, + 0xa9, 0x14, 0x2a, 0xce, 0x60, 0xed, 0x05, 0xff, 0x80, 0x68, 0x44, 0x0d, 0xf0, 0xd5, 0x5e, 0x5a, + 0x47, 0x3a, 0xab, 0x48, 0xa7, 0xfe, 0xb5, 0xbe, 0xa4, 0xda, 0x25, 0x58, 0xc4, 0xcf, 0x08, 0x1f, + 0xdb, 0xc4, 0x64, 0x67, 0xbc, 0x2b, 0xff, 0x99, 0x9a, 0xec, 0x4c, 0x03, 0x18, 0xf4, 0x91, 0x3a, + 0xbe, 0xa8, 0xe2, 0xbb, 0xe1, 0x93, 0x21, 0x7c, 0xc9, 0xce, 0x58, 0xbc, 0x8f, 0x47, 0xd8, 0xdf, + 0xdf, 0x90, 0x1d, 0x62, 0xde, 0xc4, 0x52, 0xa5, 0x52, 0x89, 0xf6, 0xf3, 0x5e, 0xf4, 0xba, 0xe0, + 0x2e, 0x7b, 0x53, 0xea, 0xfe, 0x29, 0x53, 0x5c, 0xdf, 0xe7, 0x55, 0xdf, 0xc7, 0x7e, 0x32, 0xfc, + 0xe5, 0x74, 0x65, 0x47, 0x28, 0x88, 0x3f, 0x20, 0x7c, 0x89, 0x41, 0xde, 0x30, 0xb2, 0x0c, 0x36, + 0xe9, 0x01, 0xd2, 0xf8, 0xe4, 0xcf, 0x95, 0xb0, 0xb2, 0xab, 0x62, 0x85, 0x5e, 0x2c, 0x9c, 0x4f, + 0x40, 0x46, 0x95, 0x08, 0xa1, 0x14, 0x44, 0x70, 0x55, 0x2d, 0x12, 0x52, 0xff, 0xa2, 0x85, 0xd4, + 0x07, 0x76, 0xce, 0xac, 0xf5, 0xf9, 0x0b, 0xa1, 0xf5, 0x99, 0xca, 0x73, 0xfd, 0x77, 0x00, 0x00, + 0x00, 0xff, 0xff, 0x2e, 0x2e, 0xa2, 0x0d, 0x0f, 0x05, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/iam/v1/iam_policy.pb.go b/vendor/google.golang.org/genproto/googleapis/iam/v1/iam_policy.pb.go new file mode 100644 index 0000000..1207018 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/iam/v1/iam_policy.pb.go @@ -0,0 +1,417 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/iam/v1/iam_policy.proto + +package iam // import "google.golang.org/genproto/googleapis/iam/v1" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +import ( + context "golang.org/x/net/context" + grpc "google.golang.org/grpc" +) + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Request message for `SetIamPolicy` method. +type SetIamPolicyRequest struct { + // REQUIRED: The resource for which the policy is being specified. + // See the operation documentation for the appropriate value for this field. + Resource string `protobuf:"bytes,1,opt,name=resource,proto3" json:"resource,omitempty"` + // REQUIRED: The complete policy to be applied to the `resource`. The size of + // the policy is limited to a few 10s of KB. An empty policy is a + // valid policy but certain Cloud Platform services (such as Projects) + // might reject them. + Policy *Policy `protobuf:"bytes,2,opt,name=policy,proto3" json:"policy,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *SetIamPolicyRequest) Reset() { *m = SetIamPolicyRequest{} } +func (m *SetIamPolicyRequest) String() string { return proto.CompactTextString(m) } +func (*SetIamPolicyRequest) ProtoMessage() {} +func (*SetIamPolicyRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_iam_policy_b56603532236728f, []int{0} +} +func (m *SetIamPolicyRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_SetIamPolicyRequest.Unmarshal(m, b) +} +func (m *SetIamPolicyRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_SetIamPolicyRequest.Marshal(b, m, deterministic) +} +func (dst *SetIamPolicyRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_SetIamPolicyRequest.Merge(dst, src) +} +func (m *SetIamPolicyRequest) XXX_Size() int { + return xxx_messageInfo_SetIamPolicyRequest.Size(m) +} +func (m *SetIamPolicyRequest) XXX_DiscardUnknown() { + xxx_messageInfo_SetIamPolicyRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_SetIamPolicyRequest proto.InternalMessageInfo + +func (m *SetIamPolicyRequest) GetResource() string { + if m != nil { + return m.Resource + } + return "" +} + +func (m *SetIamPolicyRequest) GetPolicy() *Policy { + if m != nil { + return m.Policy + } + return nil +} + +// Request message for `GetIamPolicy` method. +type GetIamPolicyRequest struct { + // REQUIRED: The resource for which the policy is being requested. + // See the operation documentation for the appropriate value for this field. + Resource string `protobuf:"bytes,1,opt,name=resource,proto3" json:"resource,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GetIamPolicyRequest) Reset() { *m = GetIamPolicyRequest{} } +func (m *GetIamPolicyRequest) String() string { return proto.CompactTextString(m) } +func (*GetIamPolicyRequest) ProtoMessage() {} +func (*GetIamPolicyRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_iam_policy_b56603532236728f, []int{1} +} +func (m *GetIamPolicyRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GetIamPolicyRequest.Unmarshal(m, b) +} +func (m *GetIamPolicyRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GetIamPolicyRequest.Marshal(b, m, deterministic) +} +func (dst *GetIamPolicyRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_GetIamPolicyRequest.Merge(dst, src) +} +func (m *GetIamPolicyRequest) XXX_Size() int { + return xxx_messageInfo_GetIamPolicyRequest.Size(m) +} +func (m *GetIamPolicyRequest) XXX_DiscardUnknown() { + xxx_messageInfo_GetIamPolicyRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_GetIamPolicyRequest proto.InternalMessageInfo + +func (m *GetIamPolicyRequest) GetResource() string { + if m != nil { + return m.Resource + } + return "" +} + +// Request message for `TestIamPermissions` method. +type TestIamPermissionsRequest struct { + // REQUIRED: The resource for which the policy detail is being requested. + // See the operation documentation for the appropriate value for this field. + Resource string `protobuf:"bytes,1,opt,name=resource,proto3" json:"resource,omitempty"` + // The set of permissions to check for the `resource`. Permissions with + // wildcards (such as '*' or 'storage.*') are not allowed. For more + // information see + // [IAM Overview](https://cloud.google.com/iam/docs/overview#permissions). + Permissions []string `protobuf:"bytes,2,rep,name=permissions,proto3" json:"permissions,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *TestIamPermissionsRequest) Reset() { *m = TestIamPermissionsRequest{} } +func (m *TestIamPermissionsRequest) String() string { return proto.CompactTextString(m) } +func (*TestIamPermissionsRequest) ProtoMessage() {} +func (*TestIamPermissionsRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_iam_policy_b56603532236728f, []int{2} +} +func (m *TestIamPermissionsRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_TestIamPermissionsRequest.Unmarshal(m, b) +} +func (m *TestIamPermissionsRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_TestIamPermissionsRequest.Marshal(b, m, deterministic) +} +func (dst *TestIamPermissionsRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_TestIamPermissionsRequest.Merge(dst, src) +} +func (m *TestIamPermissionsRequest) XXX_Size() int { + return xxx_messageInfo_TestIamPermissionsRequest.Size(m) +} +func (m *TestIamPermissionsRequest) XXX_DiscardUnknown() { + xxx_messageInfo_TestIamPermissionsRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_TestIamPermissionsRequest proto.InternalMessageInfo + +func (m *TestIamPermissionsRequest) GetResource() string { + if m != nil { + return m.Resource + } + return "" +} + +func (m *TestIamPermissionsRequest) GetPermissions() []string { + if m != nil { + return m.Permissions + } + return nil +} + +// Response message for `TestIamPermissions` method. +type TestIamPermissionsResponse struct { + // A subset of `TestPermissionsRequest.permissions` that the caller is + // allowed. + Permissions []string `protobuf:"bytes,1,rep,name=permissions,proto3" json:"permissions,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *TestIamPermissionsResponse) Reset() { *m = TestIamPermissionsResponse{} } +func (m *TestIamPermissionsResponse) String() string { return proto.CompactTextString(m) } +func (*TestIamPermissionsResponse) ProtoMessage() {} +func (*TestIamPermissionsResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_iam_policy_b56603532236728f, []int{3} +} +func (m *TestIamPermissionsResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_TestIamPermissionsResponse.Unmarshal(m, b) +} +func (m *TestIamPermissionsResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_TestIamPermissionsResponse.Marshal(b, m, deterministic) +} +func (dst *TestIamPermissionsResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_TestIamPermissionsResponse.Merge(dst, src) +} +func (m *TestIamPermissionsResponse) XXX_Size() int { + return xxx_messageInfo_TestIamPermissionsResponse.Size(m) +} +func (m *TestIamPermissionsResponse) XXX_DiscardUnknown() { + xxx_messageInfo_TestIamPermissionsResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_TestIamPermissionsResponse proto.InternalMessageInfo + +func (m *TestIamPermissionsResponse) GetPermissions() []string { + if m != nil { + return m.Permissions + } + return nil +} + +func init() { + proto.RegisterType((*SetIamPolicyRequest)(nil), "google.iam.v1.SetIamPolicyRequest") + proto.RegisterType((*GetIamPolicyRequest)(nil), "google.iam.v1.GetIamPolicyRequest") + proto.RegisterType((*TestIamPermissionsRequest)(nil), "google.iam.v1.TestIamPermissionsRequest") + proto.RegisterType((*TestIamPermissionsResponse)(nil), "google.iam.v1.TestIamPermissionsResponse") +} + +// Reference imports to suppress errors if they are not otherwise used. +var _ context.Context +var _ grpc.ClientConn + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the grpc package it is being compiled against. +const _ = grpc.SupportPackageIsVersion4 + +// IAMPolicyClient is the client API for IAMPolicy service. +// +// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://godoc.org/google.golang.org/grpc#ClientConn.NewStream. +type IAMPolicyClient interface { + // Sets the access control policy on the specified resource. Replaces any + // existing policy. + SetIamPolicy(ctx context.Context, in *SetIamPolicyRequest, opts ...grpc.CallOption) (*Policy, error) + // Gets the access control policy for a resource. + // Returns an empty policy if the resource exists and does not have a policy + // set. + GetIamPolicy(ctx context.Context, in *GetIamPolicyRequest, opts ...grpc.CallOption) (*Policy, error) + // Returns permissions that a caller has on the specified resource. + // If the resource does not exist, this will return an empty set of + // permissions, not a NOT_FOUND error. + // + // Note: This operation is designed to be used for building permission-aware + // UIs and command-line tools, not for authorization checking. This operation + // may "fail open" without warning. + TestIamPermissions(ctx context.Context, in *TestIamPermissionsRequest, opts ...grpc.CallOption) (*TestIamPermissionsResponse, error) +} + +type iAMPolicyClient struct { + cc *grpc.ClientConn +} + +func NewIAMPolicyClient(cc *grpc.ClientConn) IAMPolicyClient { + return &iAMPolicyClient{cc} +} + +func (c *iAMPolicyClient) SetIamPolicy(ctx context.Context, in *SetIamPolicyRequest, opts ...grpc.CallOption) (*Policy, error) { + out := new(Policy) + err := c.cc.Invoke(ctx, "/google.iam.v1.IAMPolicy/SetIamPolicy", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *iAMPolicyClient) GetIamPolicy(ctx context.Context, in *GetIamPolicyRequest, opts ...grpc.CallOption) (*Policy, error) { + out := new(Policy) + err := c.cc.Invoke(ctx, "/google.iam.v1.IAMPolicy/GetIamPolicy", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *iAMPolicyClient) TestIamPermissions(ctx context.Context, in *TestIamPermissionsRequest, opts ...grpc.CallOption) (*TestIamPermissionsResponse, error) { + out := new(TestIamPermissionsResponse) + err := c.cc.Invoke(ctx, "/google.iam.v1.IAMPolicy/TestIamPermissions", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +// IAMPolicyServer is the server API for IAMPolicy service. +type IAMPolicyServer interface { + // Sets the access control policy on the specified resource. Replaces any + // existing policy. + SetIamPolicy(context.Context, *SetIamPolicyRequest) (*Policy, error) + // Gets the access control policy for a resource. + // Returns an empty policy if the resource exists and does not have a policy + // set. + GetIamPolicy(context.Context, *GetIamPolicyRequest) (*Policy, error) + // Returns permissions that a caller has on the specified resource. + // If the resource does not exist, this will return an empty set of + // permissions, not a NOT_FOUND error. + // + // Note: This operation is designed to be used for building permission-aware + // UIs and command-line tools, not for authorization checking. This operation + // may "fail open" without warning. + TestIamPermissions(context.Context, *TestIamPermissionsRequest) (*TestIamPermissionsResponse, error) +} + +func RegisterIAMPolicyServer(s *grpc.Server, srv IAMPolicyServer) { + s.RegisterService(&_IAMPolicy_serviceDesc, srv) +} + +func _IAMPolicy_SetIamPolicy_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(SetIamPolicyRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(IAMPolicyServer).SetIamPolicy(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.iam.v1.IAMPolicy/SetIamPolicy", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(IAMPolicyServer).SetIamPolicy(ctx, req.(*SetIamPolicyRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _IAMPolicy_GetIamPolicy_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(GetIamPolicyRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(IAMPolicyServer).GetIamPolicy(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.iam.v1.IAMPolicy/GetIamPolicy", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(IAMPolicyServer).GetIamPolicy(ctx, req.(*GetIamPolicyRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _IAMPolicy_TestIamPermissions_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(TestIamPermissionsRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(IAMPolicyServer).TestIamPermissions(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.iam.v1.IAMPolicy/TestIamPermissions", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(IAMPolicyServer).TestIamPermissions(ctx, req.(*TestIamPermissionsRequest)) + } + return interceptor(ctx, in, info, handler) +} + +var _IAMPolicy_serviceDesc = grpc.ServiceDesc{ + ServiceName: "google.iam.v1.IAMPolicy", + HandlerType: (*IAMPolicyServer)(nil), + Methods: []grpc.MethodDesc{ + { + MethodName: "SetIamPolicy", + Handler: _IAMPolicy_SetIamPolicy_Handler, + }, + { + MethodName: "GetIamPolicy", + Handler: _IAMPolicy_GetIamPolicy_Handler, + }, + { + MethodName: "TestIamPermissions", + Handler: _IAMPolicy_TestIamPermissions_Handler, + }, + }, + Streams: []grpc.StreamDesc{}, + Metadata: "google/iam/v1/iam_policy.proto", +} + +func init() { + proto.RegisterFile("google/iam/v1/iam_policy.proto", fileDescriptor_iam_policy_b56603532236728f) +} + +var fileDescriptor_iam_policy_b56603532236728f = []byte{ + // 419 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xe2, 0x92, 0x4b, 0xcf, 0xcf, 0x4f, + 0xcf, 0x49, 0xd5, 0xcf, 0x4c, 0xcc, 0xd5, 0x2f, 0x33, 0x04, 0x51, 0xf1, 0x05, 0xf9, 0x39, 0x99, + 0xc9, 0x95, 0x7a, 0x05, 0x45, 0xf9, 0x25, 0xf9, 0x42, 0xbc, 0x10, 0x79, 0xbd, 0xcc, 0xc4, 0x5c, + 0xbd, 0x32, 0x43, 0x29, 0x49, 0xa8, 0xf2, 0xc4, 0x82, 0x4c, 0xfd, 0xa2, 0xd4, 0xe2, 0xfc, 0xd2, + 0xa2, 0xe4, 0x54, 0x88, 0x4a, 0x29, 0x29, 0x54, 0x93, 0x90, 0x4d, 0x91, 0x92, 0x41, 0xd2, 0x96, + 0x98, 0x97, 0x97, 0x5f, 0x92, 0x58, 0x92, 0x99, 0x9f, 0x57, 0x0c, 0x91, 0x55, 0x4a, 0xe0, 0x12, + 0x0e, 0x4e, 0x2d, 0xf1, 0x4c, 0xcc, 0x0d, 0x00, 0xeb, 0x09, 0x4a, 0x2d, 0x2c, 0x4d, 0x2d, 0x2e, + 0x11, 0x92, 0xe2, 0xe2, 0x80, 0x59, 0x21, 0xc1, 0xa8, 0xc0, 0xa8, 0xc1, 0x19, 0x04, 0xe7, 0x0b, + 0xe9, 0x72, 0xb1, 0x41, 0x2c, 0x90, 0x60, 0x52, 0x60, 0xd4, 0xe0, 0x36, 0x12, 0xd5, 0x43, 0x71, + 0xa7, 0x1e, 0xd4, 0x24, 0xa8, 0x22, 0x25, 0x43, 0x2e, 0x61, 0x77, 0xd2, 0x6c, 0x50, 0x8a, 0xe4, + 0x92, 0x0c, 0x49, 0x2d, 0x06, 0xeb, 0x49, 0x2d, 0xca, 0xcd, 0x2c, 0x2e, 0x06, 0x39, 0x98, 0x18, + 0xa7, 0x29, 0x70, 0x71, 0x17, 0x20, 0x74, 0x48, 0x30, 0x29, 0x30, 0x6b, 0x70, 0x06, 0x21, 0x0b, + 0x29, 0xd9, 0x71, 0x49, 0x61, 0x33, 0xba, 0xb8, 0x20, 0x3f, 0xaf, 0x18, 0x43, 0x3f, 0x23, 0x86, + 0x7e, 0xa3, 0x29, 0xcc, 0x5c, 0x9c, 0x9e, 0x8e, 0xbe, 0x10, 0xbf, 0x08, 0x95, 0x70, 0xf1, 0x20, + 0x87, 0x9e, 0x90, 0x12, 0x5a, 0x50, 0x60, 0x09, 0x5a, 0x29, 0xec, 0xc1, 0xa5, 0xa4, 0xd9, 0x74, + 0xf9, 0xc9, 0x64, 0x26, 0x65, 0x25, 0x39, 0x50, 0x04, 0x56, 0xc3, 0x7c, 0x64, 0xab, 0xa5, 0x55, + 0x6b, 0x55, 0x8c, 0x64, 0x8a, 0x15, 0xa3, 0x16, 0xc8, 0x56, 0x77, 0x7c, 0xb6, 0xba, 0x53, 0xc5, + 0xd6, 0x74, 0x34, 0x5b, 0x67, 0x31, 0x72, 0x09, 0x61, 0x06, 0x9d, 0x90, 0x06, 0x9a, 0xc1, 0x38, + 0x23, 0x4e, 0x4a, 0x93, 0x08, 0x95, 0x90, 0x78, 0x50, 0xd2, 0x07, 0x3b, 0x4b, 0x53, 0x49, 0x05, + 0xd3, 0x59, 0x25, 0x18, 0xba, 0xac, 0x18, 0xb5, 0x9c, 0xda, 0x18, 0xb9, 0x04, 0x93, 0xf3, 0x73, + 0x51, 0x6d, 0x70, 0xe2, 0x83, 0x7b, 0x20, 0x00, 0x94, 0xd8, 0x03, 0x18, 0xa3, 0x0c, 0xa0, 0x0a, + 0xd2, 0xf3, 0x73, 0x12, 0xf3, 0xd2, 0xf5, 0xf2, 0x8b, 0xd2, 0xf5, 0xd3, 0x53, 0xf3, 0xc0, 0x59, + 0x41, 0x1f, 0x22, 0x95, 0x58, 0x90, 0x59, 0x0c, 0xcd, 0x47, 0xd6, 0x99, 0x89, 0xb9, 0x3f, 0x18, + 0x19, 0x57, 0x31, 0x09, 0xbb, 0x43, 0x74, 0x39, 0xe7, 0xe4, 0x97, 0xa6, 0xe8, 0x79, 0x26, 0xe6, + 0xea, 0x85, 0x19, 0x9e, 0x82, 0x89, 0xc6, 0x80, 0x45, 0x63, 0x3c, 0x13, 0x73, 0x63, 0xc2, 0x0c, + 0x93, 0xd8, 0xc0, 0x66, 0x19, 0x03, 0x02, 0x00, 0x00, 0xff, 0xff, 0xba, 0x05, 0xa3, 0xc3, 0xdc, + 0x03, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/iam/v1/logging/audit_data.pb.go b/vendor/google.golang.org/genproto/googleapis/iam/v1/logging/audit_data.pb.go new file mode 100644 index 0000000..a60eb17 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/iam/v1/logging/audit_data.pb.go @@ -0,0 +1,90 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/iam/v1/logging/audit_data.proto + +package logging // import "google.golang.org/genproto/googleapis/iam/v1/logging" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "google.golang.org/genproto/googleapis/api/annotations" +import v1 "google.golang.org/genproto/googleapis/iam/v1" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Audit log information specific to Cloud IAM. This message is serialized +// as an `Any` type in the `ServiceData` message of an +// `AuditLog` message. +type AuditData struct { + // Policy delta between the original policy and the newly set policy. + PolicyDelta *v1.PolicyDelta `protobuf:"bytes,2,opt,name=policy_delta,json=policyDelta,proto3" json:"policy_delta,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *AuditData) Reset() { *m = AuditData{} } +func (m *AuditData) String() string { return proto.CompactTextString(m) } +func (*AuditData) ProtoMessage() {} +func (*AuditData) Descriptor() ([]byte, []int) { + return fileDescriptor_audit_data_25c24f9d71f1cfd7, []int{0} +} +func (m *AuditData) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_AuditData.Unmarshal(m, b) +} +func (m *AuditData) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_AuditData.Marshal(b, m, deterministic) +} +func (dst *AuditData) XXX_Merge(src proto.Message) { + xxx_messageInfo_AuditData.Merge(dst, src) +} +func (m *AuditData) XXX_Size() int { + return xxx_messageInfo_AuditData.Size(m) +} +func (m *AuditData) XXX_DiscardUnknown() { + xxx_messageInfo_AuditData.DiscardUnknown(m) +} + +var xxx_messageInfo_AuditData proto.InternalMessageInfo + +func (m *AuditData) GetPolicyDelta() *v1.PolicyDelta { + if m != nil { + return m.PolicyDelta + } + return nil +} + +func init() { + proto.RegisterType((*AuditData)(nil), "google.iam.v1.logging.AuditData") +} + +func init() { + proto.RegisterFile("google/iam/v1/logging/audit_data.proto", fileDescriptor_audit_data_25c24f9d71f1cfd7) +} + +var fileDescriptor_audit_data_25c24f9d71f1cfd7 = []byte{ + // 236 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x6c, 0x90, 0xb1, 0x4a, 0x04, 0x31, + 0x10, 0x86, 0xd9, 0x2b, 0x04, 0x73, 0x62, 0x71, 0x20, 0x68, 0xb4, 0x10, 0x0b, 0xb1, 0x9a, 0xb0, + 0x5a, 0xaa, 0x85, 0xe7, 0x81, 0x28, 0x16, 0x8b, 0x85, 0x85, 0xcd, 0x31, 0x5e, 0x96, 0x61, 0x20, + 0xc9, 0x84, 0xbb, 0xdc, 0x82, 0x8f, 0xe0, 0xab, 0xf8, 0x94, 0xb2, 0x9b, 0xa0, 0xac, 0x58, 0x85, + 0xf0, 0x7f, 0xff, 0x7c, 0xc3, 0xa8, 0x73, 0x12, 0x21, 0xd7, 0x1a, 0x46, 0x6f, 0xba, 0xda, 0x38, + 0x21, 0xe2, 0x40, 0x06, 0xb7, 0x96, 0xd3, 0xd2, 0x62, 0x42, 0x88, 0x6b, 0x49, 0x32, 0x3b, 0xc8, + 0x1c, 0x30, 0x7a, 0xe8, 0x6a, 0x28, 0x9c, 0x3e, 0x29, 0x75, 0x8c, 0x6c, 0x30, 0x04, 0x49, 0x98, + 0x58, 0xc2, 0x26, 0x97, 0xb4, 0x1e, 0x0f, 0x8f, 0xe2, 0x78, 0xf5, 0x91, 0xb3, 0xb3, 0x27, 0xb5, + 0x7b, 0xd7, 0x4b, 0x16, 0x98, 0x70, 0x76, 0xab, 0xf6, 0x72, 0xb8, 0xb4, 0xad, 0x4b, 0x78, 0x38, + 0x39, 0xad, 0x2e, 0xa6, 0x97, 0x1a, 0xc6, 0xd2, 0x66, 0x40, 0x16, 0x3d, 0xf1, 0x32, 0x8d, 0xbf, + 0x9f, 0xf9, 0x67, 0xa5, 0x8e, 0x56, 0xe2, 0xe1, 0xdf, 0x1d, 0xe7, 0xfb, 0x3f, 0x9e, 0xa6, 0x37, + 0x37, 0xd5, 0xdb, 0x4d, 0x01, 0x49, 0x1c, 0x06, 0x02, 0x59, 0x93, 0xa1, 0x36, 0x0c, 0x7b, 0x99, + 0x1c, 0x61, 0xe4, 0xcd, 0x9f, 0x9b, 0x5c, 0x97, 0xf7, 0x6b, 0x72, 0xfc, 0x90, 0xeb, 0xf7, 0x4e, + 0xb6, 0x16, 0x1e, 0xd1, 0xc3, 0x6b, 0x0d, 0xcf, 0x39, 0x7d, 0xdf, 0x19, 0xc6, 0x5c, 0x7d, 0x07, + 0x00, 0x00, 0xff, 0xff, 0x29, 0xf1, 0xcb, 0x3a, 0x59, 0x01, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/iam/v1/policy.pb.go b/vendor/google.golang.org/genproto/googleapis/iam/v1/policy.pb.go new file mode 100644 index 0000000..9100ef9 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/iam/v1/policy.pb.go @@ -0,0 +1,528 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/iam/v1/policy.proto + +package iam // import "google.golang.org/genproto/googleapis/iam/v1" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "google.golang.org/genproto/googleapis/api/annotations" +import expr "google.golang.org/genproto/googleapis/type/expr" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// The type of action performed on a Binding in a policy. +type BindingDelta_Action int32 + +const ( + // Unspecified. + BindingDelta_ACTION_UNSPECIFIED BindingDelta_Action = 0 + // Addition of a Binding. + BindingDelta_ADD BindingDelta_Action = 1 + // Removal of a Binding. + BindingDelta_REMOVE BindingDelta_Action = 2 +) + +var BindingDelta_Action_name = map[int32]string{ + 0: "ACTION_UNSPECIFIED", + 1: "ADD", + 2: "REMOVE", +} +var BindingDelta_Action_value = map[string]int32{ + "ACTION_UNSPECIFIED": 0, + "ADD": 1, + "REMOVE": 2, +} + +func (x BindingDelta_Action) String() string { + return proto.EnumName(BindingDelta_Action_name, int32(x)) +} +func (BindingDelta_Action) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_policy_178c0196ea84d022, []int{3, 0} +} + +// The type of action performed on an audit configuration in a policy. +type AuditConfigDelta_Action int32 + +const ( + // Unspecified. + AuditConfigDelta_ACTION_UNSPECIFIED AuditConfigDelta_Action = 0 + // Addition of an audit configuration. + AuditConfigDelta_ADD AuditConfigDelta_Action = 1 + // Removal of an audit configuration. + AuditConfigDelta_REMOVE AuditConfigDelta_Action = 2 +) + +var AuditConfigDelta_Action_name = map[int32]string{ + 0: "ACTION_UNSPECIFIED", + 1: "ADD", + 2: "REMOVE", +} +var AuditConfigDelta_Action_value = map[string]int32{ + "ACTION_UNSPECIFIED": 0, + "ADD": 1, + "REMOVE": 2, +} + +func (x AuditConfigDelta_Action) String() string { + return proto.EnumName(AuditConfigDelta_Action_name, int32(x)) +} +func (AuditConfigDelta_Action) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_policy_178c0196ea84d022, []int{4, 0} +} + +// Defines an Identity and Access Management (IAM) policy. It is used to +// specify access control policies for Cloud Platform resources. +// +// +// A `Policy` consists of a list of `bindings`. A `binding` binds a list of +// `members` to a `role`, where the members can be user accounts, Google groups, +// Google domains, and service accounts. A `role` is a named list of permissions +// defined by IAM. +// +// **JSON Example** +// +// { +// "bindings": [ +// { +// "role": "roles/owner", +// "members": [ +// "user:mike@example.com", +// "group:admins@example.com", +// "domain:google.com", +// "serviceAccount:my-other-app@appspot.gserviceaccount.com" +// ] +// }, +// { +// "role": "roles/viewer", +// "members": ["user:sean@example.com"] +// } +// ] +// } +// +// **YAML Example** +// +// bindings: +// - members: +// - user:mike@example.com +// - group:admins@example.com +// - domain:google.com +// - serviceAccount:my-other-app@appspot.gserviceaccount.com +// role: roles/owner +// - members: +// - user:sean@example.com +// role: roles/viewer +// +// +// For a description of IAM and its features, see the +// [IAM developer's guide](https://cloud.google.com/iam/docs). +type Policy struct { + // Deprecated. + Version int32 `protobuf:"varint,1,opt,name=version,proto3" json:"version,omitempty"` + // Associates a list of `members` to a `role`. + // `bindings` with no members will result in an error. + Bindings []*Binding `protobuf:"bytes,4,rep,name=bindings,proto3" json:"bindings,omitempty"` + // `etag` is used for optimistic concurrency control as a way to help + // prevent simultaneous updates of a policy from overwriting each other. + // It is strongly suggested that systems make use of the `etag` in the + // read-modify-write cycle to perform policy updates in order to avoid race + // conditions: An `etag` is returned in the response to `getIamPolicy`, and + // systems are expected to put that etag in the request to `setIamPolicy` to + // ensure that their change will be applied to the same version of the policy. + // + // If no `etag` is provided in the call to `setIamPolicy`, then the existing + // policy is overwritten blindly. + Etag []byte `protobuf:"bytes,3,opt,name=etag,proto3" json:"etag,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Policy) Reset() { *m = Policy{} } +func (m *Policy) String() string { return proto.CompactTextString(m) } +func (*Policy) ProtoMessage() {} +func (*Policy) Descriptor() ([]byte, []int) { + return fileDescriptor_policy_178c0196ea84d022, []int{0} +} +func (m *Policy) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Policy.Unmarshal(m, b) +} +func (m *Policy) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Policy.Marshal(b, m, deterministic) +} +func (dst *Policy) XXX_Merge(src proto.Message) { + xxx_messageInfo_Policy.Merge(dst, src) +} +func (m *Policy) XXX_Size() int { + return xxx_messageInfo_Policy.Size(m) +} +func (m *Policy) XXX_DiscardUnknown() { + xxx_messageInfo_Policy.DiscardUnknown(m) +} + +var xxx_messageInfo_Policy proto.InternalMessageInfo + +func (m *Policy) GetVersion() int32 { + if m != nil { + return m.Version + } + return 0 +} + +func (m *Policy) GetBindings() []*Binding { + if m != nil { + return m.Bindings + } + return nil +} + +func (m *Policy) GetEtag() []byte { + if m != nil { + return m.Etag + } + return nil +} + +// Associates `members` with a `role`. +type Binding struct { + // Role that is assigned to `members`. + // For example, `roles/viewer`, `roles/editor`, or `roles/owner`. + Role string `protobuf:"bytes,1,opt,name=role,proto3" json:"role,omitempty"` + // Specifies the identities requesting access for a Cloud Platform resource. + // `members` can have the following values: + // + // * `allUsers`: A special identifier that represents anyone who is + // on the internet; with or without a Google account. + // + // * `allAuthenticatedUsers`: A special identifier that represents anyone + // who is authenticated with a Google account or a service account. + // + // * `user:{emailid}`: An email address that represents a specific Google + // account. For example, `alice@gmail.com` . + // + // + // * `serviceAccount:{emailid}`: An email address that represents a service + // account. For example, `my-other-app@appspot.gserviceaccount.com`. + // + // * `group:{emailid}`: An email address that represents a Google group. + // For example, `admins@example.com`. + // + // + // * `domain:{domain}`: The G Suite domain (primary) that represents all the + // users of that domain. For example, `google.com` or `example.com`. + // + // + Members []string `protobuf:"bytes,2,rep,name=members,proto3" json:"members,omitempty"` + // The condition that is associated with this binding. + // NOTE: An unsatisfied condition will not allow user access via current + // binding. Different bindings, including their conditions, are examined + // independently. + Condition *expr.Expr `protobuf:"bytes,3,opt,name=condition,proto3" json:"condition,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Binding) Reset() { *m = Binding{} } +func (m *Binding) String() string { return proto.CompactTextString(m) } +func (*Binding) ProtoMessage() {} +func (*Binding) Descriptor() ([]byte, []int) { + return fileDescriptor_policy_178c0196ea84d022, []int{1} +} +func (m *Binding) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Binding.Unmarshal(m, b) +} +func (m *Binding) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Binding.Marshal(b, m, deterministic) +} +func (dst *Binding) XXX_Merge(src proto.Message) { + xxx_messageInfo_Binding.Merge(dst, src) +} +func (m *Binding) XXX_Size() int { + return xxx_messageInfo_Binding.Size(m) +} +func (m *Binding) XXX_DiscardUnknown() { + xxx_messageInfo_Binding.DiscardUnknown(m) +} + +var xxx_messageInfo_Binding proto.InternalMessageInfo + +func (m *Binding) GetRole() string { + if m != nil { + return m.Role + } + return "" +} + +func (m *Binding) GetMembers() []string { + if m != nil { + return m.Members + } + return nil +} + +func (m *Binding) GetCondition() *expr.Expr { + if m != nil { + return m.Condition + } + return nil +} + +// The difference delta between two policies. +type PolicyDelta struct { + // The delta for Bindings between two policies. + BindingDeltas []*BindingDelta `protobuf:"bytes,1,rep,name=binding_deltas,json=bindingDeltas,proto3" json:"binding_deltas,omitempty"` + // The delta for AuditConfigs between two policies. + AuditConfigDeltas []*AuditConfigDelta `protobuf:"bytes,2,rep,name=audit_config_deltas,json=auditConfigDeltas,proto3" json:"audit_config_deltas,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *PolicyDelta) Reset() { *m = PolicyDelta{} } +func (m *PolicyDelta) String() string { return proto.CompactTextString(m) } +func (*PolicyDelta) ProtoMessage() {} +func (*PolicyDelta) Descriptor() ([]byte, []int) { + return fileDescriptor_policy_178c0196ea84d022, []int{2} +} +func (m *PolicyDelta) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_PolicyDelta.Unmarshal(m, b) +} +func (m *PolicyDelta) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_PolicyDelta.Marshal(b, m, deterministic) +} +func (dst *PolicyDelta) XXX_Merge(src proto.Message) { + xxx_messageInfo_PolicyDelta.Merge(dst, src) +} +func (m *PolicyDelta) XXX_Size() int { + return xxx_messageInfo_PolicyDelta.Size(m) +} +func (m *PolicyDelta) XXX_DiscardUnknown() { + xxx_messageInfo_PolicyDelta.DiscardUnknown(m) +} + +var xxx_messageInfo_PolicyDelta proto.InternalMessageInfo + +func (m *PolicyDelta) GetBindingDeltas() []*BindingDelta { + if m != nil { + return m.BindingDeltas + } + return nil +} + +func (m *PolicyDelta) GetAuditConfigDeltas() []*AuditConfigDelta { + if m != nil { + return m.AuditConfigDeltas + } + return nil +} + +// One delta entry for Binding. Each individual change (only one member in each +// entry) to a binding will be a separate entry. +type BindingDelta struct { + // The action that was performed on a Binding. + // Required + Action BindingDelta_Action `protobuf:"varint,1,opt,name=action,proto3,enum=google.iam.v1.BindingDelta_Action" json:"action,omitempty"` + // Role that is assigned to `members`. + // For example, `roles/viewer`, `roles/editor`, or `roles/owner`. + // Required + Role string `protobuf:"bytes,2,opt,name=role,proto3" json:"role,omitempty"` + // A single identity requesting access for a Cloud Platform resource. + // Follows the same format of Binding.members. + // Required + Member string `protobuf:"bytes,3,opt,name=member,proto3" json:"member,omitempty"` + // Unimplemented. The condition that is associated with this binding. + // This field is logged only for Cloud Audit Logging. + Condition *expr.Expr `protobuf:"bytes,4,opt,name=condition,proto3" json:"condition,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *BindingDelta) Reset() { *m = BindingDelta{} } +func (m *BindingDelta) String() string { return proto.CompactTextString(m) } +func (*BindingDelta) ProtoMessage() {} +func (*BindingDelta) Descriptor() ([]byte, []int) { + return fileDescriptor_policy_178c0196ea84d022, []int{3} +} +func (m *BindingDelta) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_BindingDelta.Unmarshal(m, b) +} +func (m *BindingDelta) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_BindingDelta.Marshal(b, m, deterministic) +} +func (dst *BindingDelta) XXX_Merge(src proto.Message) { + xxx_messageInfo_BindingDelta.Merge(dst, src) +} +func (m *BindingDelta) XXX_Size() int { + return xxx_messageInfo_BindingDelta.Size(m) +} +func (m *BindingDelta) XXX_DiscardUnknown() { + xxx_messageInfo_BindingDelta.DiscardUnknown(m) +} + +var xxx_messageInfo_BindingDelta proto.InternalMessageInfo + +func (m *BindingDelta) GetAction() BindingDelta_Action { + if m != nil { + return m.Action + } + return BindingDelta_ACTION_UNSPECIFIED +} + +func (m *BindingDelta) GetRole() string { + if m != nil { + return m.Role + } + return "" +} + +func (m *BindingDelta) GetMember() string { + if m != nil { + return m.Member + } + return "" +} + +func (m *BindingDelta) GetCondition() *expr.Expr { + if m != nil { + return m.Condition + } + return nil +} + +// One delta entry for AuditConfig. Each individual change (only one +// exempted_member in each entry) to a AuditConfig will be a separate entry. +type AuditConfigDelta struct { + // The action that was performed on an audit configuration in a policy. + // Required + Action AuditConfigDelta_Action `protobuf:"varint,1,opt,name=action,proto3,enum=google.iam.v1.AuditConfigDelta_Action" json:"action,omitempty"` + // Specifies a service that was configured for Cloud Audit Logging. + // For example, `storage.googleapis.com`, `cloudsql.googleapis.com`. + // `allServices` is a special value that covers all services. + // Required + Service string `protobuf:"bytes,2,opt,name=service,proto3" json:"service,omitempty"` + // A single identity that is exempted from "data access" audit + // logging for the `service` specified above. + // Follows the same format of Binding.members. + ExemptedMember string `protobuf:"bytes,3,opt,name=exempted_member,json=exemptedMember,proto3" json:"exempted_member,omitempty"` + // Specifies the log_type that was be enabled. ADMIN_ACTIVITY is always + // enabled, and cannot be configured. + // Required + LogType string `protobuf:"bytes,4,opt,name=log_type,json=logType,proto3" json:"log_type,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *AuditConfigDelta) Reset() { *m = AuditConfigDelta{} } +func (m *AuditConfigDelta) String() string { return proto.CompactTextString(m) } +func (*AuditConfigDelta) ProtoMessage() {} +func (*AuditConfigDelta) Descriptor() ([]byte, []int) { + return fileDescriptor_policy_178c0196ea84d022, []int{4} +} +func (m *AuditConfigDelta) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_AuditConfigDelta.Unmarshal(m, b) +} +func (m *AuditConfigDelta) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_AuditConfigDelta.Marshal(b, m, deterministic) +} +func (dst *AuditConfigDelta) XXX_Merge(src proto.Message) { + xxx_messageInfo_AuditConfigDelta.Merge(dst, src) +} +func (m *AuditConfigDelta) XXX_Size() int { + return xxx_messageInfo_AuditConfigDelta.Size(m) +} +func (m *AuditConfigDelta) XXX_DiscardUnknown() { + xxx_messageInfo_AuditConfigDelta.DiscardUnknown(m) +} + +var xxx_messageInfo_AuditConfigDelta proto.InternalMessageInfo + +func (m *AuditConfigDelta) GetAction() AuditConfigDelta_Action { + if m != nil { + return m.Action + } + return AuditConfigDelta_ACTION_UNSPECIFIED +} + +func (m *AuditConfigDelta) GetService() string { + if m != nil { + return m.Service + } + return "" +} + +func (m *AuditConfigDelta) GetExemptedMember() string { + if m != nil { + return m.ExemptedMember + } + return "" +} + +func (m *AuditConfigDelta) GetLogType() string { + if m != nil { + return m.LogType + } + return "" +} + +func init() { + proto.RegisterType((*Policy)(nil), "google.iam.v1.Policy") + proto.RegisterType((*Binding)(nil), "google.iam.v1.Binding") + proto.RegisterType((*PolicyDelta)(nil), "google.iam.v1.PolicyDelta") + proto.RegisterType((*BindingDelta)(nil), "google.iam.v1.BindingDelta") + proto.RegisterType((*AuditConfigDelta)(nil), "google.iam.v1.AuditConfigDelta") + proto.RegisterEnum("google.iam.v1.BindingDelta_Action", BindingDelta_Action_name, BindingDelta_Action_value) + proto.RegisterEnum("google.iam.v1.AuditConfigDelta_Action", AuditConfigDelta_Action_name, AuditConfigDelta_Action_value) +} + +func init() { proto.RegisterFile("google/iam/v1/policy.proto", fileDescriptor_policy_178c0196ea84d022) } + +var fileDescriptor_policy_178c0196ea84d022 = []byte{ + // 550 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x9c, 0x93, 0xcf, 0xae, 0xd2, 0x4e, + 0x14, 0xc7, 0x7f, 0x03, 0xfc, 0xca, 0xe5, 0x70, 0x2f, 0xc2, 0xdc, 0x84, 0x54, 0x34, 0x91, 0x74, + 0xa1, 0xac, 0x5a, 0xc1, 0xb8, 0xd1, 0xc4, 0x84, 0x7f, 0x1a, 0x16, 0xf7, 0x42, 0xc6, 0x2b, 0x0b, + 0x43, 0x42, 0x86, 0x76, 0xac, 0x63, 0xda, 0x4e, 0xd3, 0xf6, 0x12, 0x58, 0xfb, 0x26, 0x2e, 0x7d, + 0x14, 0x1f, 0xc2, 0xad, 0xaf, 0xe0, 0xd2, 0x74, 0xa6, 0x45, 0x68, 0x8c, 0x1a, 0x77, 0x73, 0xce, + 0xf9, 0xce, 0x39, 0xe7, 0xf3, 0x6d, 0x07, 0x3a, 0xae, 0x10, 0xae, 0xc7, 0x2c, 0x4e, 0x7d, 0x6b, + 0xdb, 0xb7, 0x42, 0xe1, 0x71, 0x7b, 0x6f, 0x86, 0x91, 0x48, 0x04, 0xbe, 0x50, 0x35, 0x93, 0x53, + 0xdf, 0xdc, 0xf6, 0x3b, 0xed, 0x4c, 0x9a, 0xec, 0x43, 0x66, 0xb1, 0x5d, 0x18, 0x29, 0x59, 0xe7, + 0x7e, 0x96, 0xa7, 0x21, 0xb7, 0x68, 0x10, 0x88, 0x84, 0x26, 0x5c, 0x04, 0xb1, 0xaa, 0x1a, 0x1f, + 0x40, 0x5b, 0xc8, 0xa6, 0x58, 0x87, 0xea, 0x96, 0x45, 0x31, 0x17, 0x81, 0x8e, 0xba, 0xa8, 0xf7, + 0x3f, 0xc9, 0x43, 0x3c, 0x80, 0xb3, 0x0d, 0x0f, 0x1c, 0x1e, 0xb8, 0xb1, 0x5e, 0xe9, 0x96, 0x7b, + 0xf5, 0x41, 0xdb, 0x3c, 0x99, 0x6d, 0x8e, 0x54, 0x99, 0x1c, 0x74, 0x18, 0x43, 0x85, 0x25, 0xd4, + 0xd5, 0xcb, 0x5d, 0xd4, 0x3b, 0x27, 0xf2, 0x6c, 0xbc, 0x87, 0x6a, 0x26, 0x4c, 0xcb, 0x91, 0xf0, + 0x98, 0x9c, 0x54, 0x23, 0xf2, 0x9c, 0x2e, 0xe0, 0x33, 0x7f, 0xc3, 0xa2, 0x58, 0x2f, 0x75, 0xcb, + 0xbd, 0x1a, 0xc9, 0x43, 0x6c, 0x41, 0xcd, 0x16, 0x81, 0xc3, 0xd3, 0xc5, 0x65, 0xc7, 0xfa, 0xa0, + 0x95, 0x6f, 0x90, 0xe2, 0x9a, 0xd3, 0x5d, 0x18, 0x91, 0x9f, 0x1a, 0xe3, 0x13, 0x82, 0xba, 0xc2, + 0x9a, 0x30, 0x2f, 0xa1, 0x78, 0x04, 0x8d, 0x6c, 0xb3, 0xb5, 0x93, 0x26, 0x62, 0x1d, 0x49, 0x8e, + 0x7b, 0xbf, 0xe6, 0x90, 0x97, 0xc8, 0xc5, 0xe6, 0x28, 0x8a, 0xf1, 0x1c, 0x2e, 0xe9, 0xad, 0xc3, + 0x93, 0xb5, 0x2d, 0x82, 0x77, 0xfc, 0xd0, 0xa8, 0x24, 0x1b, 0x3d, 0x28, 0x34, 0x1a, 0xa6, 0xca, + 0xb1, 0x14, 0xaa, 0x66, 0x2d, 0x5a, 0xc8, 0xc4, 0xc6, 0x57, 0x04, 0xe7, 0xc7, 0x03, 0xf1, 0x33, + 0xd0, 0xa8, 0x9d, 0xe4, 0x1f, 0xa0, 0x31, 0x30, 0x7e, 0xb3, 0x9d, 0x39, 0x94, 0x4a, 0x92, 0xdd, + 0x38, 0x18, 0x5a, 0x3a, 0x32, 0xb4, 0x0d, 0x9a, 0x72, 0x50, 0x7a, 0x56, 0x23, 0x59, 0x74, 0x6a, + 0x67, 0xe5, 0x2f, 0xec, 0x7c, 0x0a, 0x9a, 0x1a, 0x87, 0xdb, 0x80, 0x87, 0xe3, 0x9b, 0xd9, 0xfc, + 0x7a, 0xfd, 0xe6, 0xfa, 0xf5, 0x62, 0x3a, 0x9e, 0xbd, 0x9c, 0x4d, 0x27, 0xcd, 0xff, 0x70, 0x15, + 0xca, 0xc3, 0xc9, 0xa4, 0x89, 0x30, 0x80, 0x46, 0xa6, 0x57, 0xf3, 0xe5, 0xb4, 0x59, 0x32, 0xbe, + 0x21, 0x68, 0x16, 0x8d, 0xc0, 0x2f, 0x0a, 0x90, 0x0f, 0xff, 0xe0, 0x5c, 0x11, 0x54, 0x87, 0x6a, + 0xcc, 0xa2, 0x2d, 0xb7, 0x73, 0xd6, 0x3c, 0xc4, 0x8f, 0xe0, 0x0e, 0xdb, 0x31, 0x3f, 0x4c, 0x98, + 0xb3, 0x3e, 0xe1, 0x6e, 0xe4, 0xe9, 0x2b, 0xc5, 0x7f, 0x17, 0xce, 0x3c, 0xe1, 0xae, 0x53, 0x54, + 0x89, 0x5f, 0x23, 0x55, 0x4f, 0xb8, 0x37, 0xfb, 0x90, 0xfd, 0x23, 0xe9, 0xe8, 0x23, 0x82, 0x96, + 0x2d, 0xfc, 0x53, 0x94, 0x51, 0xf6, 0x0b, 0x2e, 0xd2, 0x87, 0xb6, 0x40, 0x6f, 0x1f, 0x67, 0x55, + 0x57, 0x78, 0x34, 0x70, 0x4d, 0x11, 0xb9, 0x96, 0xcb, 0x02, 0xf9, 0x0c, 0x2d, 0x55, 0xa2, 0x21, + 0x8f, 0xb3, 0xa7, 0xfe, 0x9c, 0x53, 0xff, 0x3b, 0x42, 0x9f, 0x4b, 0x97, 0xaf, 0xd4, 0xad, 0xb1, + 0x27, 0x6e, 0x1d, 0x73, 0x46, 0x7d, 0x73, 0xd9, 0xff, 0x92, 0x67, 0x57, 0x32, 0xbb, 0x9a, 0x51, + 0x7f, 0xb5, 0xec, 0x6f, 0x34, 0xd9, 0xeb, 0xc9, 0x8f, 0x00, 0x00, 0x00, 0xff, 0xff, 0x29, 0x86, + 0x8f, 0x3e, 0x35, 0x04, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/logging/type/http_request.pb.go b/vendor/google.golang.org/genproto/googleapis/logging/type/http_request.pb.go new file mode 100644 index 0000000..2645d3b --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/logging/type/http_request.pb.go @@ -0,0 +1,249 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/logging/type/http_request.proto + +package ltype // import "google.golang.org/genproto/googleapis/logging/type" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import duration "github.com/golang/protobuf/ptypes/duration" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// A common proto for logging HTTP requests. Only contains semantics +// defined by the HTTP specification. Product-specific logging +// information MUST be defined in a separate message. +type HttpRequest struct { + // The request method. Examples: `"GET"`, `"HEAD"`, `"PUT"`, `"POST"`. + RequestMethod string `protobuf:"bytes,1,opt,name=request_method,json=requestMethod,proto3" json:"request_method,omitempty"` + // The scheme (http, https), the host name, the path and the query + // portion of the URL that was requested. + // Example: `"http://example.com/some/info?color=red"`. + RequestUrl string `protobuf:"bytes,2,opt,name=request_url,json=requestUrl,proto3" json:"request_url,omitempty"` + // The size of the HTTP request message in bytes, including the request + // headers and the request body. + RequestSize int64 `protobuf:"varint,3,opt,name=request_size,json=requestSize,proto3" json:"request_size,omitempty"` + // The response code indicating the status of response. + // Examples: 200, 404. + Status int32 `protobuf:"varint,4,opt,name=status,proto3" json:"status,omitempty"` + // The size of the HTTP response message sent back to the client, in bytes, + // including the response headers and the response body. + ResponseSize int64 `protobuf:"varint,5,opt,name=response_size,json=responseSize,proto3" json:"response_size,omitempty"` + // The user agent sent by the client. Example: + // `"Mozilla/4.0 (compatible; MSIE 6.0; Windows 98; Q312461; .NET + // CLR 1.0.3705)"`. + UserAgent string `protobuf:"bytes,6,opt,name=user_agent,json=userAgent,proto3" json:"user_agent,omitempty"` + // The IP address (IPv4 or IPv6) of the client that issued the HTTP + // request. Examples: `"192.168.1.1"`, `"FE80::0202:B3FF:FE1E:8329"`. + RemoteIp string `protobuf:"bytes,7,opt,name=remote_ip,json=remoteIp,proto3" json:"remote_ip,omitempty"` + // The IP address (IPv4 or IPv6) of the origin server that the request was + // sent to. + ServerIp string `protobuf:"bytes,13,opt,name=server_ip,json=serverIp,proto3" json:"server_ip,omitempty"` + // The referer URL of the request, as defined in + // [HTTP/1.1 Header Field + // Definitions](http://www.w3.org/Protocols/rfc2616/rfc2616-sec14.html). + Referer string `protobuf:"bytes,8,opt,name=referer,proto3" json:"referer,omitempty"` + // The request processing latency on the server, from the time the request was + // received until the response was sent. + Latency *duration.Duration `protobuf:"bytes,14,opt,name=latency,proto3" json:"latency,omitempty"` + // Whether or not a cache lookup was attempted. + CacheLookup bool `protobuf:"varint,11,opt,name=cache_lookup,json=cacheLookup,proto3" json:"cache_lookup,omitempty"` + // Whether or not an entity was served from cache + // (with or without validation). + CacheHit bool `protobuf:"varint,9,opt,name=cache_hit,json=cacheHit,proto3" json:"cache_hit,omitempty"` + // Whether or not the response was validated with the origin server before + // being served from cache. This field is only meaningful if `cache_hit` is + // True. + CacheValidatedWithOriginServer bool `protobuf:"varint,10,opt,name=cache_validated_with_origin_server,json=cacheValidatedWithOriginServer,proto3" json:"cache_validated_with_origin_server,omitempty"` + // The number of HTTP response bytes inserted into cache. Set only when a + // cache fill was attempted. + CacheFillBytes int64 `protobuf:"varint,12,opt,name=cache_fill_bytes,json=cacheFillBytes,proto3" json:"cache_fill_bytes,omitempty"` + // Protocol used for the request. Examples: "HTTP/1.1", "HTTP/2", "websocket" + Protocol string `protobuf:"bytes,15,opt,name=protocol,proto3" json:"protocol,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *HttpRequest) Reset() { *m = HttpRequest{} } +func (m *HttpRequest) String() string { return proto.CompactTextString(m) } +func (*HttpRequest) ProtoMessage() {} +func (*HttpRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_http_request_d38264e6a7ae2dc7, []int{0} +} +func (m *HttpRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_HttpRequest.Unmarshal(m, b) +} +func (m *HttpRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_HttpRequest.Marshal(b, m, deterministic) +} +func (dst *HttpRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_HttpRequest.Merge(dst, src) +} +func (m *HttpRequest) XXX_Size() int { + return xxx_messageInfo_HttpRequest.Size(m) +} +func (m *HttpRequest) XXX_DiscardUnknown() { + xxx_messageInfo_HttpRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_HttpRequest proto.InternalMessageInfo + +func (m *HttpRequest) GetRequestMethod() string { + if m != nil { + return m.RequestMethod + } + return "" +} + +func (m *HttpRequest) GetRequestUrl() string { + if m != nil { + return m.RequestUrl + } + return "" +} + +func (m *HttpRequest) GetRequestSize() int64 { + if m != nil { + return m.RequestSize + } + return 0 +} + +func (m *HttpRequest) GetStatus() int32 { + if m != nil { + return m.Status + } + return 0 +} + +func (m *HttpRequest) GetResponseSize() int64 { + if m != nil { + return m.ResponseSize + } + return 0 +} + +func (m *HttpRequest) GetUserAgent() string { + if m != nil { + return m.UserAgent + } + return "" +} + +func (m *HttpRequest) GetRemoteIp() string { + if m != nil { + return m.RemoteIp + } + return "" +} + +func (m *HttpRequest) GetServerIp() string { + if m != nil { + return m.ServerIp + } + return "" +} + +func (m *HttpRequest) GetReferer() string { + if m != nil { + return m.Referer + } + return "" +} + +func (m *HttpRequest) GetLatency() *duration.Duration { + if m != nil { + return m.Latency + } + return nil +} + +func (m *HttpRequest) GetCacheLookup() bool { + if m != nil { + return m.CacheLookup + } + return false +} + +func (m *HttpRequest) GetCacheHit() bool { + if m != nil { + return m.CacheHit + } + return false +} + +func (m *HttpRequest) GetCacheValidatedWithOriginServer() bool { + if m != nil { + return m.CacheValidatedWithOriginServer + } + return false +} + +func (m *HttpRequest) GetCacheFillBytes() int64 { + if m != nil { + return m.CacheFillBytes + } + return 0 +} + +func (m *HttpRequest) GetProtocol() string { + if m != nil { + return m.Protocol + } + return "" +} + +func init() { + proto.RegisterType((*HttpRequest)(nil), "google.logging.type.HttpRequest") +} + +func init() { + proto.RegisterFile("google/logging/type/http_request.proto", fileDescriptor_http_request_d38264e6a7ae2dc7) +} + +var fileDescriptor_http_request_d38264e6a7ae2dc7 = []byte{ + // 511 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x6c, 0x92, 0x5b, 0x6b, 0x14, 0x31, + 0x18, 0x86, 0x99, 0x1e, 0xf6, 0x90, 0x3d, 0x58, 0x22, 0x68, 0xba, 0x6a, 0x5d, 0x2b, 0xca, 0x5c, + 0xcd, 0x80, 0xbd, 0x11, 0xbc, 0x72, 0x15, 0x6d, 0xa5, 0x62, 0x99, 0x7a, 0x00, 0x59, 0x18, 0x66, + 0x77, 0xbf, 0x9d, 0x09, 0x66, 0x27, 0x31, 0xc9, 0x54, 0xb6, 0x7f, 0xc6, 0x7b, 0x6f, 0xfc, 0x1f, + 0xfe, 0x2a, 0xc9, 0x97, 0x0c, 0x28, 0xf4, 0x66, 0x21, 0xef, 0xf3, 0xbc, 0x49, 0xf6, 0x9b, 0x90, + 0xa7, 0xa5, 0x94, 0xa5, 0x80, 0x54, 0xc8, 0xb2, 0xe4, 0x75, 0x99, 0xda, 0xad, 0x82, 0xb4, 0xb2, + 0x56, 0xe5, 0x1a, 0xbe, 0x37, 0x60, 0x6c, 0xa2, 0xb4, 0xb4, 0x92, 0xde, 0xf6, 0x5e, 0x12, 0xbc, + 0xc4, 0x79, 0x93, 0xfb, 0xa1, 0x5c, 0x28, 0x9e, 0x16, 0x75, 0x2d, 0x6d, 0x61, 0xb9, 0xac, 0x8d, + 0xaf, 0x4c, 0x8e, 0x02, 0xc5, 0xd5, 0xa2, 0x59, 0xa7, 0xab, 0x46, 0xa3, 0xe0, 0xf9, 0xf1, 0xef, + 0x3d, 0x32, 0x38, 0xb5, 0x56, 0x65, 0xfe, 0x20, 0xfa, 0x84, 0x8c, 0xc3, 0x99, 0xf9, 0x06, 0x6c, + 0x25, 0x57, 0x2c, 0x9a, 0x46, 0x71, 0x3f, 0x1b, 0x85, 0xf4, 0x3d, 0x86, 0xf4, 0x21, 0x19, 0xb4, + 0x5a, 0xa3, 0x05, 0xdb, 0x41, 0x87, 0x84, 0xe8, 0x93, 0x16, 0xf4, 0x11, 0x19, 0xb6, 0x82, 0xe1, + 0xd7, 0xc0, 0x76, 0xa7, 0x51, 0xbc, 0x9b, 0xb5, 0xa5, 0x4b, 0x7e, 0x0d, 0xf4, 0x0e, 0xe9, 0x18, + 0x5b, 0xd8, 0xc6, 0xb0, 0xbd, 0x69, 0x14, 0xef, 0x67, 0x61, 0x45, 0x1f, 0x93, 0x91, 0x06, 0xa3, + 0x64, 0x6d, 0xc0, 0x77, 0xf7, 0xb1, 0x3b, 0x6c, 0x43, 0x2c, 0x3f, 0x20, 0xa4, 0x31, 0xa0, 0xf3, + 0xa2, 0x84, 0xda, 0xb2, 0x0e, 0x9e, 0xdf, 0x77, 0xc9, 0x4b, 0x17, 0xd0, 0x7b, 0xa4, 0xaf, 0x61, + 0x23, 0x2d, 0xe4, 0x5c, 0xb1, 0x2e, 0xd2, 0x9e, 0x0f, 0xce, 0x94, 0x83, 0x06, 0xf4, 0x15, 0x68, + 0x07, 0x47, 0x1e, 0xfa, 0xe0, 0x4c, 0x51, 0x46, 0xba, 0x1a, 0xd6, 0xa0, 0x41, 0xb3, 0x1e, 0xa2, + 0x76, 0x49, 0x4f, 0x48, 0x57, 0x14, 0x16, 0xea, 0xe5, 0x96, 0x8d, 0xa7, 0x51, 0x3c, 0x78, 0x76, + 0x98, 0x84, 0xef, 0xd1, 0x0e, 0x37, 0x79, 0x1d, 0x86, 0x9b, 0xb5, 0xa6, 0x9b, 0xc3, 0xb2, 0x58, + 0x56, 0x90, 0x0b, 0x29, 0xbf, 0x35, 0x8a, 0x0d, 0xa6, 0x51, 0xdc, 0xcb, 0x06, 0x98, 0x9d, 0x63, + 0xe4, 0xae, 0xe3, 0x95, 0x8a, 0x5b, 0xd6, 0x47, 0xde, 0xc3, 0xe0, 0x94, 0x5b, 0xfa, 0x8e, 0x1c, + 0x7b, 0x78, 0x55, 0x08, 0xbe, 0x2a, 0x2c, 0xac, 0xf2, 0x1f, 0xdc, 0x56, 0xb9, 0xd4, 0xbc, 0xe4, + 0x75, 0xee, 0xaf, 0xcd, 0x08, 0xb6, 0x8e, 0xd0, 0xfc, 0xdc, 0x8a, 0x5f, 0xb8, 0xad, 0x3e, 0xa0, + 0x76, 0x89, 0x16, 0x8d, 0xc9, 0x81, 0xdf, 0x6b, 0xcd, 0x85, 0xc8, 0x17, 0x5b, 0x0b, 0x86, 0x0d, + 0x71, 0xb6, 0x63, 0xcc, 0xdf, 0x70, 0x21, 0x66, 0x2e, 0xa5, 0x13, 0xd2, 0xc3, 0xff, 0xb4, 0x94, + 0x82, 0xdd, 0xf2, 0x03, 0x6a, 0xd7, 0xb3, 0x9f, 0x11, 0xb9, 0xbb, 0x94, 0x9b, 0xe4, 0x86, 0xb7, + 0x38, 0x3b, 0xf8, 0xe7, 0x29, 0x5d, 0xb8, 0xc2, 0x45, 0xf4, 0xf5, 0x79, 0x10, 0x4b, 0x29, 0x8a, + 0xba, 0x4c, 0xa4, 0x2e, 0xd3, 0x12, 0x6a, 0xdc, 0x2e, 0xf5, 0xa8, 0x50, 0xdc, 0xfc, 0xf7, 0xf6, + 0x5f, 0x08, 0xf7, 0xfb, 0x6b, 0xe7, 0xf0, 0xad, 0xaf, 0xbe, 0x12, 0xb2, 0x59, 0x25, 0xe7, 0xe1, + 0xa4, 0x8f, 0x5b, 0x05, 0x7f, 0x5a, 0x36, 0x47, 0x36, 0x0f, 0x6c, 0xee, 0xd8, 0xa2, 0x83, 0x9b, + 0x9f, 0xfc, 0x0d, 0x00, 0x00, 0xff, 0xff, 0x7d, 0xa3, 0x36, 0xbb, 0x57, 0x03, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/logging/type/log_severity.pb.go b/vendor/google.golang.org/genproto/googleapis/logging/type/log_severity.pb.go new file mode 100644 index 0000000..9a4464d --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/logging/type/log_severity.pb.go @@ -0,0 +1,121 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/logging/type/log_severity.proto + +package ltype // import "google.golang.org/genproto/googleapis/logging/type" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// The severity of the event described in a log entry, expressed as one of the +// standard severity levels listed below. For your reference, the levels are +// assigned the listed numeric values. The effect of using numeric values other +// than those listed is undefined. +// +// You can filter for log entries by severity. For example, the following +// filter expression will match log entries with severities `INFO`, `NOTICE`, +// and `WARNING`: +// +// severity > DEBUG AND severity <= WARNING +// +// If you are writing log entries, you should map other severity encodings to +// one of these standard levels. For example, you might map all of Java's FINE, +// FINER, and FINEST levels to `LogSeverity.DEBUG`. You can preserve the +// original severity level in the log entry payload if you wish. +type LogSeverity int32 + +const ( + // (0) The log entry has no assigned severity level. + LogSeverity_DEFAULT LogSeverity = 0 + // (100) Debug or trace information. + LogSeverity_DEBUG LogSeverity = 100 + // (200) Routine information, such as ongoing status or performance. + LogSeverity_INFO LogSeverity = 200 + // (300) Normal but significant events, such as start up, shut down, or + // a configuration change. + LogSeverity_NOTICE LogSeverity = 300 + // (400) Warning events might cause problems. + LogSeverity_WARNING LogSeverity = 400 + // (500) Error events are likely to cause problems. + LogSeverity_ERROR LogSeverity = 500 + // (600) Critical events cause more severe problems or outages. + LogSeverity_CRITICAL LogSeverity = 600 + // (700) A person must take an action immediately. + LogSeverity_ALERT LogSeverity = 700 + // (800) One or more systems are unusable. + LogSeverity_EMERGENCY LogSeverity = 800 +) + +var LogSeverity_name = map[int32]string{ + 0: "DEFAULT", + 100: "DEBUG", + 200: "INFO", + 300: "NOTICE", + 400: "WARNING", + 500: "ERROR", + 600: "CRITICAL", + 700: "ALERT", + 800: "EMERGENCY", +} +var LogSeverity_value = map[string]int32{ + "DEFAULT": 0, + "DEBUG": 100, + "INFO": 200, + "NOTICE": 300, + "WARNING": 400, + "ERROR": 500, + "CRITICAL": 600, + "ALERT": 700, + "EMERGENCY": 800, +} + +func (x LogSeverity) String() string { + return proto.EnumName(LogSeverity_name, int32(x)) +} +func (LogSeverity) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_log_severity_03d6cc62dd9e1ef9, []int{0} +} + +func init() { + proto.RegisterEnum("google.logging.type.LogSeverity", LogSeverity_name, LogSeverity_value) +} + +func init() { + proto.RegisterFile("google/logging/type/log_severity.proto", fileDescriptor_log_severity_03d6cc62dd9e1ef9) +} + +var fileDescriptor_log_severity_03d6cc62dd9e1ef9 = []byte{ + // 309 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xe2, 0x52, 0x4b, 0xcf, 0xcf, 0x4f, + 0xcf, 0x49, 0xd5, 0xcf, 0xc9, 0x4f, 0x4f, 0xcf, 0xcc, 0x4b, 0xd7, 0x2f, 0xa9, 0x2c, 0x00, 0x73, + 0xe2, 0x8b, 0x53, 0xcb, 0x52, 0x8b, 0x32, 0x4b, 0x2a, 0xf5, 0x0a, 0x8a, 0xf2, 0x4b, 0xf2, 0x85, + 0x84, 0x21, 0xea, 0xf4, 0xa0, 0xea, 0xf4, 0x40, 0xea, 0xa4, 0x64, 0xa0, 0x9a, 0x13, 0x0b, 0x32, + 0xf5, 0x13, 0xf3, 0xf2, 0xf2, 0x4b, 0x12, 0x4b, 0x32, 0xf3, 0xf3, 0x8a, 0x21, 0x5a, 0xb4, 0x9a, + 0x18, 0xb9, 0xb8, 0x7d, 0xf2, 0xd3, 0x83, 0xa1, 0x06, 0x09, 0x71, 0x73, 0xb1, 0xbb, 0xb8, 0xba, + 0x39, 0x86, 0xfa, 0x84, 0x08, 0x30, 0x08, 0x71, 0x72, 0xb1, 0xba, 0xb8, 0x3a, 0x85, 0xba, 0x0b, + 0xa4, 0x08, 0x71, 0x72, 0xb1, 0x78, 0xfa, 0xb9, 0xf9, 0x0b, 0x9c, 0x60, 0x14, 0xe2, 0xe6, 0x62, + 0xf3, 0xf3, 0x0f, 0xf1, 0x74, 0x76, 0x15, 0x58, 0xc3, 0x24, 0xc4, 0xc3, 0xc5, 0x1e, 0xee, 0x18, + 0xe4, 0xe7, 0xe9, 0xe7, 0x2e, 0x30, 0x81, 0x59, 0x88, 0x8b, 0x8b, 0xd5, 0x35, 0x28, 0xc8, 0x3f, + 0x48, 0xe0, 0x0b, 0xb3, 0x10, 0x2f, 0x17, 0x87, 0x73, 0x90, 0x67, 0x88, 0xa7, 0xb3, 0xa3, 0x8f, + 0xc0, 0x0d, 0x16, 0x90, 0x94, 0xa3, 0x8f, 0x6b, 0x50, 0x88, 0xc0, 0x1e, 0x56, 0x21, 0x3e, 0x2e, + 0x4e, 0x57, 0x5f, 0xd7, 0x20, 0x77, 0x57, 0x3f, 0xe7, 0x48, 0x81, 0x05, 0x6c, 0x4e, 0xf3, 0x19, + 0xb9, 0xc4, 0x93, 0xf3, 0x73, 0xf5, 0xb0, 0x38, 0xdf, 0x49, 0x00, 0xc9, 0x75, 0x01, 0x20, 0x27, + 0x07, 0x30, 0x46, 0x59, 0x40, 0x15, 0xa6, 0xe7, 0xe7, 0x24, 0xe6, 0xa5, 0xeb, 0xe5, 0x17, 0xa5, + 0xeb, 0xa7, 0xa7, 0xe6, 0x81, 0x3d, 0xa4, 0x0f, 0x91, 0x4a, 0x2c, 0xc8, 0x2c, 0x46, 0x09, 0x2e, + 0xeb, 0x1c, 0x10, 0xb9, 0x8a, 0x49, 0xd2, 0x1d, 0xa2, 0xd5, 0x39, 0x27, 0xbf, 0x34, 0x45, 0xcf, + 0x07, 0x6a, 0x53, 0x48, 0x65, 0x41, 0xea, 0x29, 0x98, 0x5c, 0x0c, 0x58, 0x2e, 0x06, 0x2a, 0x17, + 0x03, 0x92, 0x4b, 0x62, 0x03, 0x1b, 0x6e, 0x0c, 0x08, 0x00, 0x00, 0xff, 0xff, 0xea, 0x8a, 0xa7, + 0x20, 0x8a, 0x01, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/logging/v2/log_entry.pb.go b/vendor/google.golang.org/genproto/googleapis/logging/v2/log_entry.pb.go new file mode 100644 index 0000000..d4c792d --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/logging/v2/log_entry.pb.go @@ -0,0 +1,588 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/logging/v2/log_entry.proto + +package logging // import "google.golang.org/genproto/googleapis/logging/v2" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import any "github.com/golang/protobuf/ptypes/any" +import _struct "github.com/golang/protobuf/ptypes/struct" +import timestamp "github.com/golang/protobuf/ptypes/timestamp" +import _ "google.golang.org/genproto/googleapis/api/annotations" +import monitoredres "google.golang.org/genproto/googleapis/api/monitoredres" +import _type "google.golang.org/genproto/googleapis/logging/type" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// An individual entry in a log. +type LogEntry struct { + // Required. The resource name of the log to which this log entry belongs: + // + // "projects/[PROJECT_ID]/logs/[LOG_ID]" + // "organizations/[ORGANIZATION_ID]/logs/[LOG_ID]" + // "billingAccounts/[BILLING_ACCOUNT_ID]/logs/[LOG_ID]" + // "folders/[FOLDER_ID]/logs/[LOG_ID]" + // + // A project number may optionally be used in place of PROJECT_ID. The + // project number is translated to its corresponding PROJECT_ID internally + // and the `log_name` field will contain PROJECT_ID in queries and exports. + // + // `[LOG_ID]` must be URL-encoded within `log_name`. Example: + // `"organizations/1234567890/logs/cloudresourcemanager.googleapis.com%2Factivity"`. + // `[LOG_ID]` must be less than 512 characters long and can only include the + // following characters: upper and lower case alphanumeric characters, + // forward-slash, underscore, hyphen, and period. + // + // For backward compatibility, if `log_name` begins with a forward-slash, such + // as `/projects/...`, then the log entry is ingested as usual but the + // forward-slash is removed. Listing the log entry will not show the leading + // slash and filtering for a log name with a leading slash will never return + // any results. + LogName string `protobuf:"bytes,12,opt,name=log_name,json=logName,proto3" json:"log_name,omitempty"` + // Required. The primary monitored resource associated with this log entry. + // Example: a log entry that reports a database error would be + // associated with the monitored resource designating the particular + // database that reported the error. + Resource *monitoredres.MonitoredResource `protobuf:"bytes,8,opt,name=resource,proto3" json:"resource,omitempty"` + // Optional. The log entry payload, which can be one of multiple types. + // + // Types that are valid to be assigned to Payload: + // *LogEntry_ProtoPayload + // *LogEntry_TextPayload + // *LogEntry_JsonPayload + Payload isLogEntry_Payload `protobuf_oneof:"payload"` + // Optional. The time the event described by the log entry occurred. + // This time is used to compute the log entry's age and to enforce + // the logs retention period. If this field is omitted in a new log + // entry, then Logging assigns it the current time. + // Timestamps have nanosecond accuracy, but trailing zeros in the fractional + // seconds might be omitted when the timestamp is displayed. + // + // Incoming log entries should have timestamps that are no more than + // the [logs retention period](/logging/quotas) in the past, + // and no more than 24 hours in the future. Log entries outside those time + // boundaries will not be available when calling `entries.list`, but + // those log entries can still be exported with + // [LogSinks](/logging/docs/api/tasks/exporting-logs). + Timestamp *timestamp.Timestamp `protobuf:"bytes,9,opt,name=timestamp,proto3" json:"timestamp,omitempty"` + // Output only. The time the log entry was received by Logging. + ReceiveTimestamp *timestamp.Timestamp `protobuf:"bytes,24,opt,name=receive_timestamp,json=receiveTimestamp,proto3" json:"receive_timestamp,omitempty"` + // Optional. The severity of the log entry. The default value is + // `LogSeverity.DEFAULT`. + Severity _type.LogSeverity `protobuf:"varint,10,opt,name=severity,proto3,enum=google.logging.type.LogSeverity" json:"severity,omitempty"` + // Optional. A unique identifier for the log entry. If you provide a value, + // then Logging considers other log entries in the same project, + // with the same `timestamp`, and with the same `insert_id` to be duplicates + // which can be removed. If omitted in new log entries, then + // Logging assigns its own unique identifier. The `insert_id` is also used + // to order log entries that have the same `timestamp` value. + InsertId string `protobuf:"bytes,4,opt,name=insert_id,json=insertId,proto3" json:"insert_id,omitempty"` + // Optional. Information about the HTTP request associated with this + // log entry, if applicable. + HttpRequest *_type.HttpRequest `protobuf:"bytes,7,opt,name=http_request,json=httpRequest,proto3" json:"http_request,omitempty"` + // Optional. A set of user-defined (key, value) data that provides additional + // information about the log entry. + Labels map[string]string `protobuf:"bytes,11,rep,name=labels,proto3" json:"labels,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` + // Output only. Additional metadata about the monitored resource. + // Only `k8s_container`, `k8s_pod`, and `k8s_node` MonitoredResources have + // this field populated. + Metadata *monitoredres.MonitoredResourceMetadata `protobuf:"bytes,25,opt,name=metadata,proto3" json:"metadata,omitempty"` + // Optional. Information about an operation associated with the log entry, if + // applicable. + Operation *LogEntryOperation `protobuf:"bytes,15,opt,name=operation,proto3" json:"operation,omitempty"` + // Optional. Resource name of the trace associated with the log entry, if any. + // If it contains a relative resource name, the name is assumed to be relative + // to `//tracing.googleapis.com`. Example: + // `projects/my-projectid/traces/06796866738c859f2f19b7cfb3214824` + Trace string `protobuf:"bytes,22,opt,name=trace,proto3" json:"trace,omitempty"` + // Optional. The span ID within the trace associated with the log entry. + // For Trace spans, this is the same format that the Trace + // API v2 uses: a 16-character hexadecimal encoding of an 8-byte array, such + // as "000000000000004a". + SpanId string `protobuf:"bytes,27,opt,name=span_id,json=spanId,proto3" json:"span_id,omitempty"` + // Optional. The sampling decision of the trace associated with the log entry. + // True means that the trace resource name in the `trace` field was sampled + // for storage in a trace backend. False means that the trace was not sampled + // for storage when this log entry was written, or the sampling decision was + // unknown at the time. A non-sampled `trace` value is still useful as a + // request correlation identifier. The default is False. + TraceSampled bool `protobuf:"varint,30,opt,name=trace_sampled,json=traceSampled,proto3" json:"trace_sampled,omitempty"` + // Optional. Source code location information associated with the log entry, + // if any. + SourceLocation *LogEntrySourceLocation `protobuf:"bytes,23,opt,name=source_location,json=sourceLocation,proto3" json:"source_location,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *LogEntry) Reset() { *m = LogEntry{} } +func (m *LogEntry) String() string { return proto.CompactTextString(m) } +func (*LogEntry) ProtoMessage() {} +func (*LogEntry) Descriptor() ([]byte, []int) { + return fileDescriptor_log_entry_8cde5d9d784a97a7, []int{0} +} +func (m *LogEntry) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_LogEntry.Unmarshal(m, b) +} +func (m *LogEntry) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_LogEntry.Marshal(b, m, deterministic) +} +func (dst *LogEntry) XXX_Merge(src proto.Message) { + xxx_messageInfo_LogEntry.Merge(dst, src) +} +func (m *LogEntry) XXX_Size() int { + return xxx_messageInfo_LogEntry.Size(m) +} +func (m *LogEntry) XXX_DiscardUnknown() { + xxx_messageInfo_LogEntry.DiscardUnknown(m) +} + +var xxx_messageInfo_LogEntry proto.InternalMessageInfo + +func (m *LogEntry) GetLogName() string { + if m != nil { + return m.LogName + } + return "" +} + +func (m *LogEntry) GetResource() *monitoredres.MonitoredResource { + if m != nil { + return m.Resource + } + return nil +} + +type isLogEntry_Payload interface { + isLogEntry_Payload() +} + +type LogEntry_ProtoPayload struct { + ProtoPayload *any.Any `protobuf:"bytes,2,opt,name=proto_payload,json=protoPayload,proto3,oneof"` +} + +type LogEntry_TextPayload struct { + TextPayload string `protobuf:"bytes,3,opt,name=text_payload,json=textPayload,proto3,oneof"` +} + +type LogEntry_JsonPayload struct { + JsonPayload *_struct.Struct `protobuf:"bytes,6,opt,name=json_payload,json=jsonPayload,proto3,oneof"` +} + +func (*LogEntry_ProtoPayload) isLogEntry_Payload() {} + +func (*LogEntry_TextPayload) isLogEntry_Payload() {} + +func (*LogEntry_JsonPayload) isLogEntry_Payload() {} + +func (m *LogEntry) GetPayload() isLogEntry_Payload { + if m != nil { + return m.Payload + } + return nil +} + +func (m *LogEntry) GetProtoPayload() *any.Any { + if x, ok := m.GetPayload().(*LogEntry_ProtoPayload); ok { + return x.ProtoPayload + } + return nil +} + +func (m *LogEntry) GetTextPayload() string { + if x, ok := m.GetPayload().(*LogEntry_TextPayload); ok { + return x.TextPayload + } + return "" +} + +func (m *LogEntry) GetJsonPayload() *_struct.Struct { + if x, ok := m.GetPayload().(*LogEntry_JsonPayload); ok { + return x.JsonPayload + } + return nil +} + +func (m *LogEntry) GetTimestamp() *timestamp.Timestamp { + if m != nil { + return m.Timestamp + } + return nil +} + +func (m *LogEntry) GetReceiveTimestamp() *timestamp.Timestamp { + if m != nil { + return m.ReceiveTimestamp + } + return nil +} + +func (m *LogEntry) GetSeverity() _type.LogSeverity { + if m != nil { + return m.Severity + } + return _type.LogSeverity_DEFAULT +} + +func (m *LogEntry) GetInsertId() string { + if m != nil { + return m.InsertId + } + return "" +} + +func (m *LogEntry) GetHttpRequest() *_type.HttpRequest { + if m != nil { + return m.HttpRequest + } + return nil +} + +func (m *LogEntry) GetLabels() map[string]string { + if m != nil { + return m.Labels + } + return nil +} + +func (m *LogEntry) GetMetadata() *monitoredres.MonitoredResourceMetadata { + if m != nil { + return m.Metadata + } + return nil +} + +func (m *LogEntry) GetOperation() *LogEntryOperation { + if m != nil { + return m.Operation + } + return nil +} + +func (m *LogEntry) GetTrace() string { + if m != nil { + return m.Trace + } + return "" +} + +func (m *LogEntry) GetSpanId() string { + if m != nil { + return m.SpanId + } + return "" +} + +func (m *LogEntry) GetTraceSampled() bool { + if m != nil { + return m.TraceSampled + } + return false +} + +func (m *LogEntry) GetSourceLocation() *LogEntrySourceLocation { + if m != nil { + return m.SourceLocation + } + return nil +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*LogEntry) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _LogEntry_OneofMarshaler, _LogEntry_OneofUnmarshaler, _LogEntry_OneofSizer, []interface{}{ + (*LogEntry_ProtoPayload)(nil), + (*LogEntry_TextPayload)(nil), + (*LogEntry_JsonPayload)(nil), + } +} + +func _LogEntry_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*LogEntry) + // payload + switch x := m.Payload.(type) { + case *LogEntry_ProtoPayload: + b.EncodeVarint(2<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.ProtoPayload); err != nil { + return err + } + case *LogEntry_TextPayload: + b.EncodeVarint(3<<3 | proto.WireBytes) + b.EncodeStringBytes(x.TextPayload) + case *LogEntry_JsonPayload: + b.EncodeVarint(6<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.JsonPayload); err != nil { + return err + } + case nil: + default: + return fmt.Errorf("LogEntry.Payload has unexpected type %T", x) + } + return nil +} + +func _LogEntry_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*LogEntry) + switch tag { + case 2: // payload.proto_payload + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(any.Any) + err := b.DecodeMessage(msg) + m.Payload = &LogEntry_ProtoPayload{msg} + return true, err + case 3: // payload.text_payload + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeStringBytes() + m.Payload = &LogEntry_TextPayload{x} + return true, err + case 6: // payload.json_payload + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(_struct.Struct) + err := b.DecodeMessage(msg) + m.Payload = &LogEntry_JsonPayload{msg} + return true, err + default: + return false, nil + } +} + +func _LogEntry_OneofSizer(msg proto.Message) (n int) { + m := msg.(*LogEntry) + // payload + switch x := m.Payload.(type) { + case *LogEntry_ProtoPayload: + s := proto.Size(x.ProtoPayload) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *LogEntry_TextPayload: + n += 1 // tag and wire + n += proto.SizeVarint(uint64(len(x.TextPayload))) + n += len(x.TextPayload) + case *LogEntry_JsonPayload: + s := proto.Size(x.JsonPayload) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +// Additional information about a potentially long-running operation with which +// a log entry is associated. +type LogEntryOperation struct { + // Optional. An arbitrary operation identifier. Log entries with the + // same identifier are assumed to be part of the same operation. + Id string `protobuf:"bytes,1,opt,name=id,proto3" json:"id,omitempty"` + // Optional. An arbitrary producer identifier. The combination of + // `id` and `producer` must be globally unique. Examples for `producer`: + // `"MyDivision.MyBigCompany.com"`, `"github.com/MyProject/MyApplication"`. + Producer string `protobuf:"bytes,2,opt,name=producer,proto3" json:"producer,omitempty"` + // Optional. Set this to True if this is the first log entry in the operation. + First bool `protobuf:"varint,3,opt,name=first,proto3" json:"first,omitempty"` + // Optional. Set this to True if this is the last log entry in the operation. + Last bool `protobuf:"varint,4,opt,name=last,proto3" json:"last,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *LogEntryOperation) Reset() { *m = LogEntryOperation{} } +func (m *LogEntryOperation) String() string { return proto.CompactTextString(m) } +func (*LogEntryOperation) ProtoMessage() {} +func (*LogEntryOperation) Descriptor() ([]byte, []int) { + return fileDescriptor_log_entry_8cde5d9d784a97a7, []int{1} +} +func (m *LogEntryOperation) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_LogEntryOperation.Unmarshal(m, b) +} +func (m *LogEntryOperation) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_LogEntryOperation.Marshal(b, m, deterministic) +} +func (dst *LogEntryOperation) XXX_Merge(src proto.Message) { + xxx_messageInfo_LogEntryOperation.Merge(dst, src) +} +func (m *LogEntryOperation) XXX_Size() int { + return xxx_messageInfo_LogEntryOperation.Size(m) +} +func (m *LogEntryOperation) XXX_DiscardUnknown() { + xxx_messageInfo_LogEntryOperation.DiscardUnknown(m) +} + +var xxx_messageInfo_LogEntryOperation proto.InternalMessageInfo + +func (m *LogEntryOperation) GetId() string { + if m != nil { + return m.Id + } + return "" +} + +func (m *LogEntryOperation) GetProducer() string { + if m != nil { + return m.Producer + } + return "" +} + +func (m *LogEntryOperation) GetFirst() bool { + if m != nil { + return m.First + } + return false +} + +func (m *LogEntryOperation) GetLast() bool { + if m != nil { + return m.Last + } + return false +} + +// Additional information about the source code location that produced the log +// entry. +type LogEntrySourceLocation struct { + // Optional. Source file name. Depending on the runtime environment, this + // might be a simple name or a fully-qualified name. + File string `protobuf:"bytes,1,opt,name=file,proto3" json:"file,omitempty"` + // Optional. Line within the source file. 1-based; 0 indicates no line number + // available. + Line int64 `protobuf:"varint,2,opt,name=line,proto3" json:"line,omitempty"` + // Optional. Human-readable name of the function or method being invoked, with + // optional context such as the class or package name. This information may be + // used in contexts such as the logs viewer, where a file and line number are + // less meaningful. The format can vary by language. For example: + // `qual.if.ied.Class.method` (Java), `dir/package.func` (Go), `function` + // (Python). + Function string `protobuf:"bytes,3,opt,name=function,proto3" json:"function,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *LogEntrySourceLocation) Reset() { *m = LogEntrySourceLocation{} } +func (m *LogEntrySourceLocation) String() string { return proto.CompactTextString(m) } +func (*LogEntrySourceLocation) ProtoMessage() {} +func (*LogEntrySourceLocation) Descriptor() ([]byte, []int) { + return fileDescriptor_log_entry_8cde5d9d784a97a7, []int{2} +} +func (m *LogEntrySourceLocation) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_LogEntrySourceLocation.Unmarshal(m, b) +} +func (m *LogEntrySourceLocation) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_LogEntrySourceLocation.Marshal(b, m, deterministic) +} +func (dst *LogEntrySourceLocation) XXX_Merge(src proto.Message) { + xxx_messageInfo_LogEntrySourceLocation.Merge(dst, src) +} +func (m *LogEntrySourceLocation) XXX_Size() int { + return xxx_messageInfo_LogEntrySourceLocation.Size(m) +} +func (m *LogEntrySourceLocation) XXX_DiscardUnknown() { + xxx_messageInfo_LogEntrySourceLocation.DiscardUnknown(m) +} + +var xxx_messageInfo_LogEntrySourceLocation proto.InternalMessageInfo + +func (m *LogEntrySourceLocation) GetFile() string { + if m != nil { + return m.File + } + return "" +} + +func (m *LogEntrySourceLocation) GetLine() int64 { + if m != nil { + return m.Line + } + return 0 +} + +func (m *LogEntrySourceLocation) GetFunction() string { + if m != nil { + return m.Function + } + return "" +} + +func init() { + proto.RegisterType((*LogEntry)(nil), "google.logging.v2.LogEntry") + proto.RegisterMapType((map[string]string)(nil), "google.logging.v2.LogEntry.LabelsEntry") + proto.RegisterType((*LogEntryOperation)(nil), "google.logging.v2.LogEntryOperation") + proto.RegisterType((*LogEntrySourceLocation)(nil), "google.logging.v2.LogEntrySourceLocation") +} + +func init() { + proto.RegisterFile("google/logging/v2/log_entry.proto", fileDescriptor_log_entry_8cde5d9d784a97a7) +} + +var fileDescriptor_log_entry_8cde5d9d784a97a7 = []byte{ + // 772 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x84, 0x55, 0xdd, 0x8e, 0xdb, 0x44, + 0x14, 0xae, 0x93, 0xb2, 0x71, 0x26, 0xd9, 0x6d, 0x77, 0x54, 0x1a, 0x6f, 0x5a, 0x20, 0x6c, 0xf9, + 0x09, 0x37, 0x8e, 0x14, 0x6e, 0xb6, 0xb4, 0x12, 0x6a, 0x2a, 0xd4, 0xad, 0x94, 0x42, 0x35, 0x41, + 0xbd, 0x40, 0x91, 0xac, 0x59, 0x7b, 0xe2, 0x0e, 0xd8, 0x33, 0x66, 0x3c, 0x8e, 0xc8, 0xa3, 0xf0, + 0x0a, 0x3c, 0x0a, 0x6f, 0xc0, 0x9b, 0x70, 0x89, 0xe6, 0xcc, 0xd8, 0x09, 0xc9, 0x2a, 0xbd, 0x3b, + 0x3f, 0xdf, 0x77, 0xce, 0xe7, 0x33, 0xe7, 0x24, 0xe8, 0xf3, 0x54, 0xca, 0x34, 0x63, 0x93, 0x4c, + 0xa6, 0x29, 0x17, 0xe9, 0x64, 0x3d, 0x35, 0x66, 0xc4, 0x84, 0x56, 0x9b, 0xb0, 0x50, 0x52, 0x4b, + 0x7c, 0x6e, 0x21, 0xa1, 0x83, 0x84, 0xeb, 0xe9, 0xf0, 0xb1, 0x63, 0xd1, 0x82, 0x4f, 0xa8, 0x10, + 0x52, 0x53, 0xcd, 0xa5, 0x28, 0x2d, 0x61, 0xf8, 0x64, 0x27, 0x9b, 0x4b, 0xc1, 0xb5, 0x54, 0x2c, + 0x89, 0x14, 0x2b, 0x65, 0xa5, 0x62, 0xe6, 0x40, 0x5f, 0xed, 0x35, 0xd6, 0x9b, 0x82, 0x4d, 0xde, + 0x6b, 0x5d, 0x44, 0x8a, 0xfd, 0x5e, 0xb1, 0x52, 0x1f, 0xc3, 0x19, 0x89, 0x25, 0x5b, 0x33, 0xc5, + 0xb5, 0x53, 0x39, 0xbc, 0x70, 0x38, 0xf0, 0x6e, 0xaa, 0xd5, 0x84, 0x8a, 0x3a, 0xf5, 0x78, 0x3f, + 0x55, 0x6a, 0x55, 0xc5, 0x75, 0x83, 0xcf, 0xf6, 0xb3, 0x9a, 0xe7, 0xac, 0xd4, 0x34, 0x2f, 0x2c, + 0xe0, 0xf2, 0x9f, 0x0e, 0xf2, 0xe7, 0x32, 0xfd, 0xc1, 0x8c, 0x04, 0x5f, 0x20, 0xdf, 0x34, 0x17, + 0x34, 0x67, 0x41, 0x7f, 0xe4, 0x8d, 0xbb, 0xa4, 0x93, 0xc9, 0xf4, 0x47, 0x9a, 0x33, 0xfc, 0x14, + 0xf9, 0xf5, 0x37, 0x06, 0xfe, 0xc8, 0x1b, 0xf7, 0xa6, 0x9f, 0x84, 0x6e, 0x74, 0xb4, 0xe0, 0xe1, + 0x9b, 0x7a, 0x12, 0xc4, 0x81, 0x48, 0x03, 0xc7, 0xcf, 0xd0, 0x29, 0xf4, 0x8a, 0x0a, 0xba, 0xc9, + 0x24, 0x4d, 0x82, 0x16, 0xf0, 0x1f, 0xd4, 0xfc, 0x5a, 0x5b, 0xf8, 0x42, 0x6c, 0xae, 0xef, 0x90, + 0x3e, 0xf8, 0x6f, 0x2d, 0x16, 0x3f, 0x41, 0x7d, 0xcd, 0xfe, 0xd0, 0x0d, 0xb7, 0x6d, 0x64, 0x5d, + 0xdf, 0x21, 0x3d, 0x13, 0xad, 0x41, 0xcf, 0x51, 0xff, 0xd7, 0x52, 0x8a, 0x06, 0x74, 0x02, 0x0d, + 0x06, 0x07, 0x0d, 0x16, 0x30, 0x1a, 0xc3, 0x36, 0xf0, 0x9a, 0x7d, 0x85, 0xba, 0xcd, 0x54, 0x82, + 0x2e, 0x50, 0x87, 0x07, 0xd4, 0x9f, 0x6b, 0x04, 0xd9, 0x82, 0xf1, 0x2b, 0x74, 0xae, 0x58, 0xcc, + 0xf8, 0x9a, 0x45, 0xdb, 0x0a, 0xc1, 0x07, 0x2b, 0xdc, 0x77, 0xa4, 0x26, 0x82, 0x9f, 0x23, 0xbf, + 0x7e, 0xf1, 0x00, 0x8d, 0xbc, 0xf1, 0xd9, 0x74, 0x14, 0xee, 0x2d, 0xa6, 0x59, 0x8d, 0x70, 0x2e, + 0xd3, 0x85, 0xc3, 0x91, 0x86, 0x81, 0x1f, 0xa1, 0x2e, 0x17, 0x25, 0x53, 0x3a, 0xe2, 0x49, 0x70, + 0x17, 0xde, 0xcd, 0xb7, 0x81, 0xd7, 0x09, 0x7e, 0x89, 0xfa, 0xbb, 0x8b, 0x17, 0x74, 0x40, 0xde, + 0xed, 0xe5, 0xaf, 0xb5, 0x2e, 0x88, 0xc5, 0x91, 0xde, 0xfb, 0xad, 0x83, 0xbf, 0x47, 0x27, 0x19, + 0xbd, 0x61, 0x59, 0x19, 0xf4, 0x46, 0xed, 0x71, 0x6f, 0xfa, 0x75, 0x78, 0x70, 0x36, 0x61, 0xbd, + 0x45, 0xe1, 0x1c, 0x90, 0x60, 0x13, 0x47, 0xc3, 0x2f, 0x90, 0x9f, 0x33, 0x4d, 0x13, 0xaa, 0x69, + 0x70, 0x01, 0x0a, 0xbe, 0x3c, 0xba, 0x3e, 0x6f, 0x1c, 0x98, 0x34, 0x34, 0x3c, 0x43, 0x5d, 0x59, + 0x30, 0x05, 0xc7, 0x18, 0xdc, 0x83, 0x1a, 0x5f, 0x1c, 0x91, 0xf1, 0x53, 0x8d, 0x25, 0x5b, 0x1a, + 0x7e, 0x80, 0x3e, 0xd2, 0x8a, 0xc6, 0x2c, 0x78, 0x08, 0x53, 0xb2, 0x0e, 0x1e, 0xa0, 0x4e, 0x59, + 0x50, 0x61, 0xa6, 0xf7, 0x08, 0xe2, 0x27, 0xc6, 0x7d, 0x6d, 0x96, 0xef, 0x14, 0x10, 0x51, 0x49, + 0xf3, 0x22, 0x63, 0x49, 0xf0, 0xe9, 0xc8, 0x1b, 0xfb, 0xa4, 0x0f, 0xc1, 0x85, 0x8d, 0x61, 0x82, + 0xee, 0x59, 0xcd, 0x51, 0x26, 0x63, 0xab, 0x6e, 0x00, 0xea, 0xbe, 0x39, 0xa2, 0x6e, 0x01, 0x8c, + 0xb9, 0x23, 0x90, 0xb3, 0xf2, 0x7f, 0xfe, 0xf0, 0x29, 0xea, 0xed, 0x4c, 0x11, 0xdf, 0x47, 0xed, + 0xdf, 0xd8, 0x26, 0xf0, 0x40, 0x9c, 0x31, 0xcd, 0x87, 0xac, 0x69, 0x56, 0x31, 0xb8, 0xa5, 0x2e, + 0xb1, 0xce, 0x77, 0xad, 0x2b, 0x6f, 0xd6, 0x45, 0x1d, 0x77, 0x06, 0x97, 0x1c, 0x9d, 0x1f, 0x4c, + 0x03, 0x9f, 0xa1, 0x16, 0x4f, 0x5c, 0xa9, 0x16, 0x4f, 0xf0, 0x10, 0xf9, 0x85, 0x92, 0x49, 0x15, + 0x33, 0xe5, 0x8a, 0x35, 0xbe, 0xe9, 0xb2, 0xe2, 0xaa, 0xd4, 0x70, 0x75, 0x3e, 0xb1, 0x0e, 0xc6, + 0xe8, 0x6e, 0x46, 0x4b, 0x0d, 0x9b, 0xe6, 0x13, 0xb0, 0x2f, 0x97, 0xe8, 0xe1, 0xed, 0x9f, 0x66, + 0xd0, 0x2b, 0x9e, 0x31, 0xd7, 0x11, 0x6c, 0xa8, 0xc0, 0x85, 0x15, 0xdf, 0x26, 0x60, 0x1b, 0x1d, + 0xab, 0x4a, 0xc4, 0x30, 0xbf, 0xb6, 0xd5, 0x51, 0xfb, 0xb3, 0x3f, 0x3d, 0xf4, 0x71, 0x2c, 0xf3, + 0xc3, 0x79, 0xce, 0x4e, 0xeb, 0xae, 0x6f, 0xe1, 0x47, 0xc3, 0xfb, 0xe5, 0xca, 0x61, 0x52, 0x99, + 0x51, 0x91, 0x86, 0x52, 0xa5, 0x93, 0x94, 0x09, 0x38, 0xc2, 0x89, 0x4d, 0xd1, 0x82, 0x97, 0x3b, + 0xff, 0x08, 0xcf, 0x9c, 0xf9, 0xaf, 0xe7, 0xfd, 0xd5, 0x1a, 0xbc, 0xb2, 0xec, 0x97, 0x99, 0xac, + 0x12, 0xf3, 0x58, 0xd0, 0xe7, 0xdd, 0xf4, 0xef, 0x3a, 0xb3, 0x84, 0xcc, 0xd2, 0x65, 0x96, 0xef, + 0xa6, 0x37, 0x27, 0x50, 0xfb, 0xdb, 0xff, 0x02, 0x00, 0x00, 0xff, 0xff, 0x7a, 0x7b, 0xdc, 0xd5, + 0x6c, 0x06, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/logging/v2/logging.pb.go b/vendor/google.golang.org/genproto/googleapis/logging/v2/logging.pb.go new file mode 100644 index 0000000..3534427 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/logging/v2/logging.pb.go @@ -0,0 +1,1031 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/logging/v2/logging.proto + +package logging // import "google.golang.org/genproto/googleapis/logging/v2" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "github.com/golang/protobuf/ptypes/duration" +import empty "github.com/golang/protobuf/ptypes/empty" +import _ "github.com/golang/protobuf/ptypes/timestamp" +import _ "google.golang.org/genproto/googleapis/api/annotations" +import monitoredres "google.golang.org/genproto/googleapis/api/monitoredres" +import status "google.golang.org/genproto/googleapis/rpc/status" + +import ( + context "golang.org/x/net/context" + grpc "google.golang.org/grpc" +) + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// The parameters to DeleteLog. +type DeleteLogRequest struct { + // Required. The resource name of the log to delete: + // + // "projects/[PROJECT_ID]/logs/[LOG_ID]" + // "organizations/[ORGANIZATION_ID]/logs/[LOG_ID]" + // "billingAccounts/[BILLING_ACCOUNT_ID]/logs/[LOG_ID]" + // "folders/[FOLDER_ID]/logs/[LOG_ID]" + // + // `[LOG_ID]` must be URL-encoded. For example, + // `"projects/my-project-id/logs/syslog"`, + // `"organizations/1234567890/logs/cloudresourcemanager.googleapis.com%2Factivity"`. + // For more information about log names, see + // [LogEntry][google.logging.v2.LogEntry]. + LogName string `protobuf:"bytes,1,opt,name=log_name,json=logName,proto3" json:"log_name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *DeleteLogRequest) Reset() { *m = DeleteLogRequest{} } +func (m *DeleteLogRequest) String() string { return proto.CompactTextString(m) } +func (*DeleteLogRequest) ProtoMessage() {} +func (*DeleteLogRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_logging_edbca3a2edcc1759, []int{0} +} +func (m *DeleteLogRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_DeleteLogRequest.Unmarshal(m, b) +} +func (m *DeleteLogRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_DeleteLogRequest.Marshal(b, m, deterministic) +} +func (dst *DeleteLogRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_DeleteLogRequest.Merge(dst, src) +} +func (m *DeleteLogRequest) XXX_Size() int { + return xxx_messageInfo_DeleteLogRequest.Size(m) +} +func (m *DeleteLogRequest) XXX_DiscardUnknown() { + xxx_messageInfo_DeleteLogRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_DeleteLogRequest proto.InternalMessageInfo + +func (m *DeleteLogRequest) GetLogName() string { + if m != nil { + return m.LogName + } + return "" +} + +// The parameters to WriteLogEntries. +type WriteLogEntriesRequest struct { + // Optional. A default log resource name that is assigned to all log entries + // in `entries` that do not specify a value for `log_name`: + // + // "projects/[PROJECT_ID]/logs/[LOG_ID]" + // "organizations/[ORGANIZATION_ID]/logs/[LOG_ID]" + // "billingAccounts/[BILLING_ACCOUNT_ID]/logs/[LOG_ID]" + // "folders/[FOLDER_ID]/logs/[LOG_ID]" + // + // `[LOG_ID]` must be URL-encoded. For example: + // + // "projects/my-project-id/logs/syslog" + // "organizations/1234567890/logs/cloudresourcemanager.googleapis.com%2Factivity" + // + // The permission logging.logEntries.create is needed on each + // project, organization, billing account, or folder that is receiving + // new log entries, whether the resource is specified in + // logName or in an individual log entry. + LogName string `protobuf:"bytes,1,opt,name=log_name,json=logName,proto3" json:"log_name,omitempty"` + // Optional. A default monitored resource object that is assigned to all log + // entries in `entries` that do not specify a value for `resource`. Example: + // + // { "type": "gce_instance", + // "labels": { + // "zone": "us-central1-a", "instance_id": "00000000000000000000" }} + // + // See [LogEntry][google.logging.v2.LogEntry]. + Resource *monitoredres.MonitoredResource `protobuf:"bytes,2,opt,name=resource,proto3" json:"resource,omitempty"` + // Optional. Default labels that are added to the `labels` field of all log + // entries in `entries`. If a log entry already has a label with the same key + // as a label in this parameter, then the log entry's label is not changed. + // See [LogEntry][google.logging.v2.LogEntry]. + Labels map[string]string `protobuf:"bytes,3,rep,name=labels,proto3" json:"labels,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` + // Required. The log entries to send to Logging. The order of log + // entries in this list does not matter. Values supplied in this method's + // `log_name`, `resource`, and `labels` fields are copied into those log + // entries in this list that do not include values for their corresponding + // fields. For more information, see the + // [LogEntry][google.logging.v2.LogEntry] type. + // + // If the `timestamp` or `insert_id` fields are missing in log entries, then + // this method supplies the current time or a unique identifier, respectively. + // The supplied values are chosen so that, among the log entries that did not + // supply their own values, the entries earlier in the list will sort before + // the entries later in the list. See the `entries.list` method. + // + // Log entries with timestamps that are more than the + // [logs retention period](/logging/quota-policy) in the past or more than + // 24 hours in the future will not be available when calling `entries.list`. + // However, those log entries can still be exported with + // [LogSinks](/logging/docs/api/tasks/exporting-logs). + // + // To improve throughput and to avoid exceeding the + // [quota limit](/logging/quota-policy) for calls to `entries.write`, + // you should try to include several log entries in this list, + // rather than calling this method for each individual log entry. + Entries []*LogEntry `protobuf:"bytes,4,rep,name=entries,proto3" json:"entries,omitempty"` + // Optional. Whether valid entries should be written even if some other + // entries fail due to INVALID_ARGUMENT or PERMISSION_DENIED errors. If any + // entry is not written, then the response status is the error associated + // with one of the failed entries and the response includes error details + // keyed by the entries' zero-based index in the `entries.write` method. + PartialSuccess bool `protobuf:"varint,5,opt,name=partial_success,json=partialSuccess,proto3" json:"partial_success,omitempty"` + // Optional. If true, the request should expect normal response, but the + // entries won't be persisted nor exported. Useful for checking whether the + // logging API endpoints are working properly before sending valuable data. + DryRun bool `protobuf:"varint,6,opt,name=dry_run,json=dryRun,proto3" json:"dry_run,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *WriteLogEntriesRequest) Reset() { *m = WriteLogEntriesRequest{} } +func (m *WriteLogEntriesRequest) String() string { return proto.CompactTextString(m) } +func (*WriteLogEntriesRequest) ProtoMessage() {} +func (*WriteLogEntriesRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_logging_edbca3a2edcc1759, []int{1} +} +func (m *WriteLogEntriesRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_WriteLogEntriesRequest.Unmarshal(m, b) +} +func (m *WriteLogEntriesRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_WriteLogEntriesRequest.Marshal(b, m, deterministic) +} +func (dst *WriteLogEntriesRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_WriteLogEntriesRequest.Merge(dst, src) +} +func (m *WriteLogEntriesRequest) XXX_Size() int { + return xxx_messageInfo_WriteLogEntriesRequest.Size(m) +} +func (m *WriteLogEntriesRequest) XXX_DiscardUnknown() { + xxx_messageInfo_WriteLogEntriesRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_WriteLogEntriesRequest proto.InternalMessageInfo + +func (m *WriteLogEntriesRequest) GetLogName() string { + if m != nil { + return m.LogName + } + return "" +} + +func (m *WriteLogEntriesRequest) GetResource() *monitoredres.MonitoredResource { + if m != nil { + return m.Resource + } + return nil +} + +func (m *WriteLogEntriesRequest) GetLabels() map[string]string { + if m != nil { + return m.Labels + } + return nil +} + +func (m *WriteLogEntriesRequest) GetEntries() []*LogEntry { + if m != nil { + return m.Entries + } + return nil +} + +func (m *WriteLogEntriesRequest) GetPartialSuccess() bool { + if m != nil { + return m.PartialSuccess + } + return false +} + +func (m *WriteLogEntriesRequest) GetDryRun() bool { + if m != nil { + return m.DryRun + } + return false +} + +// Result returned from WriteLogEntries. +// empty +type WriteLogEntriesResponse struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *WriteLogEntriesResponse) Reset() { *m = WriteLogEntriesResponse{} } +func (m *WriteLogEntriesResponse) String() string { return proto.CompactTextString(m) } +func (*WriteLogEntriesResponse) ProtoMessage() {} +func (*WriteLogEntriesResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_logging_edbca3a2edcc1759, []int{2} +} +func (m *WriteLogEntriesResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_WriteLogEntriesResponse.Unmarshal(m, b) +} +func (m *WriteLogEntriesResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_WriteLogEntriesResponse.Marshal(b, m, deterministic) +} +func (dst *WriteLogEntriesResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_WriteLogEntriesResponse.Merge(dst, src) +} +func (m *WriteLogEntriesResponse) XXX_Size() int { + return xxx_messageInfo_WriteLogEntriesResponse.Size(m) +} +func (m *WriteLogEntriesResponse) XXX_DiscardUnknown() { + xxx_messageInfo_WriteLogEntriesResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_WriteLogEntriesResponse proto.InternalMessageInfo + +// Error details for WriteLogEntries with partial success. +type WriteLogEntriesPartialErrors struct { + // When `WriteLogEntriesRequest.partial_success` is true, records the error + // status for entries that were not written due to a permanent error, keyed + // by the entry's zero-based index in `WriteLogEntriesRequest.entries`. + // + // Failed requests for which no entries are written will not include + // per-entry errors. + LogEntryErrors map[int32]*status.Status `protobuf:"bytes,1,rep,name=log_entry_errors,json=logEntryErrors,proto3" json:"log_entry_errors,omitempty" protobuf_key:"varint,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *WriteLogEntriesPartialErrors) Reset() { *m = WriteLogEntriesPartialErrors{} } +func (m *WriteLogEntriesPartialErrors) String() string { return proto.CompactTextString(m) } +func (*WriteLogEntriesPartialErrors) ProtoMessage() {} +func (*WriteLogEntriesPartialErrors) Descriptor() ([]byte, []int) { + return fileDescriptor_logging_edbca3a2edcc1759, []int{3} +} +func (m *WriteLogEntriesPartialErrors) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_WriteLogEntriesPartialErrors.Unmarshal(m, b) +} +func (m *WriteLogEntriesPartialErrors) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_WriteLogEntriesPartialErrors.Marshal(b, m, deterministic) +} +func (dst *WriteLogEntriesPartialErrors) XXX_Merge(src proto.Message) { + xxx_messageInfo_WriteLogEntriesPartialErrors.Merge(dst, src) +} +func (m *WriteLogEntriesPartialErrors) XXX_Size() int { + return xxx_messageInfo_WriteLogEntriesPartialErrors.Size(m) +} +func (m *WriteLogEntriesPartialErrors) XXX_DiscardUnknown() { + xxx_messageInfo_WriteLogEntriesPartialErrors.DiscardUnknown(m) +} + +var xxx_messageInfo_WriteLogEntriesPartialErrors proto.InternalMessageInfo + +func (m *WriteLogEntriesPartialErrors) GetLogEntryErrors() map[int32]*status.Status { + if m != nil { + return m.LogEntryErrors + } + return nil +} + +// The parameters to `ListLogEntries`. +type ListLogEntriesRequest struct { + // Deprecated. Use `resource_names` instead. One or more project identifiers + // or project numbers from which to retrieve log entries. Example: + // `"my-project-1A"`. If present, these project identifiers are converted to + // resource name format and added to the list of resources in + // `resource_names`. + ProjectIds []string `protobuf:"bytes,1,rep,name=project_ids,json=projectIds,proto3" json:"project_ids,omitempty"` // Deprecated: Do not use. + // Required. Names of one or more parent resources from which to + // retrieve log entries: + // + // "projects/[PROJECT_ID]" + // "organizations/[ORGANIZATION_ID]" + // "billingAccounts/[BILLING_ACCOUNT_ID]" + // "folders/[FOLDER_ID]" + // + // Projects listed in the `project_ids` field are added to this list. + ResourceNames []string `protobuf:"bytes,8,rep,name=resource_names,json=resourceNames,proto3" json:"resource_names,omitempty"` + // Optional. A filter that chooses which log entries to return. See [Advanced + // Logs Filters](/logging/docs/view/advanced_filters). Only log entries that + // match the filter are returned. An empty filter matches all log entries in + // the resources listed in `resource_names`. Referencing a parent resource + // that is not listed in `resource_names` will cause the filter to return no + // results. + // The maximum length of the filter is 20000 characters. + Filter string `protobuf:"bytes,2,opt,name=filter,proto3" json:"filter,omitempty"` + // Optional. How the results should be sorted. Presently, the only permitted + // values are `"timestamp asc"` (default) and `"timestamp desc"`. The first + // option returns entries in order of increasing values of + // `LogEntry.timestamp` (oldest first), and the second option returns entries + // in order of decreasing timestamps (newest first). Entries with equal + // timestamps are returned in order of their `insert_id` values. + OrderBy string `protobuf:"bytes,3,opt,name=order_by,json=orderBy,proto3" json:"order_by,omitempty"` + // Optional. The maximum number of results to return from this request. + // Non-positive values are ignored. The presence of `next_page_token` in the + // response indicates that more results might be available. + PageSize int32 `protobuf:"varint,4,opt,name=page_size,json=pageSize,proto3" json:"page_size,omitempty"` + // Optional. If present, then retrieve the next batch of results from the + // preceding call to this method. `page_token` must be the value of + // `next_page_token` from the previous response. The values of other method + // parameters should be identical to those in the previous call. + PageToken string `protobuf:"bytes,5,opt,name=page_token,json=pageToken,proto3" json:"page_token,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ListLogEntriesRequest) Reset() { *m = ListLogEntriesRequest{} } +func (m *ListLogEntriesRequest) String() string { return proto.CompactTextString(m) } +func (*ListLogEntriesRequest) ProtoMessage() {} +func (*ListLogEntriesRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_logging_edbca3a2edcc1759, []int{4} +} +func (m *ListLogEntriesRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ListLogEntriesRequest.Unmarshal(m, b) +} +func (m *ListLogEntriesRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ListLogEntriesRequest.Marshal(b, m, deterministic) +} +func (dst *ListLogEntriesRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_ListLogEntriesRequest.Merge(dst, src) +} +func (m *ListLogEntriesRequest) XXX_Size() int { + return xxx_messageInfo_ListLogEntriesRequest.Size(m) +} +func (m *ListLogEntriesRequest) XXX_DiscardUnknown() { + xxx_messageInfo_ListLogEntriesRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_ListLogEntriesRequest proto.InternalMessageInfo + +// Deprecated: Do not use. +func (m *ListLogEntriesRequest) GetProjectIds() []string { + if m != nil { + return m.ProjectIds + } + return nil +} + +func (m *ListLogEntriesRequest) GetResourceNames() []string { + if m != nil { + return m.ResourceNames + } + return nil +} + +func (m *ListLogEntriesRequest) GetFilter() string { + if m != nil { + return m.Filter + } + return "" +} + +func (m *ListLogEntriesRequest) GetOrderBy() string { + if m != nil { + return m.OrderBy + } + return "" +} + +func (m *ListLogEntriesRequest) GetPageSize() int32 { + if m != nil { + return m.PageSize + } + return 0 +} + +func (m *ListLogEntriesRequest) GetPageToken() string { + if m != nil { + return m.PageToken + } + return "" +} + +// Result returned from `ListLogEntries`. +type ListLogEntriesResponse struct { + // A list of log entries. If `entries` is empty, `nextPageToken` may still be + // returned, indicating that more entries may exist. See `nextPageToken` for + // more information. + Entries []*LogEntry `protobuf:"bytes,1,rep,name=entries,proto3" json:"entries,omitempty"` + // If there might be more results than those appearing in this response, then + // `nextPageToken` is included. To get the next set of results, call this + // method again using the value of `nextPageToken` as `pageToken`. + // + // If a value for `next_page_token` appears and the `entries` field is empty, + // it means that the search found no log entries so far but it did not have + // time to search all the possible log entries. Retry the method with this + // value for `page_token` to continue the search. Alternatively, consider + // speeding up the search by changing your filter to specify a single log name + // or resource type, or to narrow the time range of the search. + NextPageToken string `protobuf:"bytes,2,opt,name=next_page_token,json=nextPageToken,proto3" json:"next_page_token,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ListLogEntriesResponse) Reset() { *m = ListLogEntriesResponse{} } +func (m *ListLogEntriesResponse) String() string { return proto.CompactTextString(m) } +func (*ListLogEntriesResponse) ProtoMessage() {} +func (*ListLogEntriesResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_logging_edbca3a2edcc1759, []int{5} +} +func (m *ListLogEntriesResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ListLogEntriesResponse.Unmarshal(m, b) +} +func (m *ListLogEntriesResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ListLogEntriesResponse.Marshal(b, m, deterministic) +} +func (dst *ListLogEntriesResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_ListLogEntriesResponse.Merge(dst, src) +} +func (m *ListLogEntriesResponse) XXX_Size() int { + return xxx_messageInfo_ListLogEntriesResponse.Size(m) +} +func (m *ListLogEntriesResponse) XXX_DiscardUnknown() { + xxx_messageInfo_ListLogEntriesResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_ListLogEntriesResponse proto.InternalMessageInfo + +func (m *ListLogEntriesResponse) GetEntries() []*LogEntry { + if m != nil { + return m.Entries + } + return nil +} + +func (m *ListLogEntriesResponse) GetNextPageToken() string { + if m != nil { + return m.NextPageToken + } + return "" +} + +// The parameters to ListMonitoredResourceDescriptors +type ListMonitoredResourceDescriptorsRequest struct { + // Optional. The maximum number of results to return from this request. + // Non-positive values are ignored. The presence of `nextPageToken` in the + // response indicates that more results might be available. + PageSize int32 `protobuf:"varint,1,opt,name=page_size,json=pageSize,proto3" json:"page_size,omitempty"` + // Optional. If present, then retrieve the next batch of results from the + // preceding call to this method. `pageToken` must be the value of + // `nextPageToken` from the previous response. The values of other method + // parameters should be identical to those in the previous call. + PageToken string `protobuf:"bytes,2,opt,name=page_token,json=pageToken,proto3" json:"page_token,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ListMonitoredResourceDescriptorsRequest) Reset() { + *m = ListMonitoredResourceDescriptorsRequest{} +} +func (m *ListMonitoredResourceDescriptorsRequest) String() string { return proto.CompactTextString(m) } +func (*ListMonitoredResourceDescriptorsRequest) ProtoMessage() {} +func (*ListMonitoredResourceDescriptorsRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_logging_edbca3a2edcc1759, []int{6} +} +func (m *ListMonitoredResourceDescriptorsRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ListMonitoredResourceDescriptorsRequest.Unmarshal(m, b) +} +func (m *ListMonitoredResourceDescriptorsRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ListMonitoredResourceDescriptorsRequest.Marshal(b, m, deterministic) +} +func (dst *ListMonitoredResourceDescriptorsRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_ListMonitoredResourceDescriptorsRequest.Merge(dst, src) +} +func (m *ListMonitoredResourceDescriptorsRequest) XXX_Size() int { + return xxx_messageInfo_ListMonitoredResourceDescriptorsRequest.Size(m) +} +func (m *ListMonitoredResourceDescriptorsRequest) XXX_DiscardUnknown() { + xxx_messageInfo_ListMonitoredResourceDescriptorsRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_ListMonitoredResourceDescriptorsRequest proto.InternalMessageInfo + +func (m *ListMonitoredResourceDescriptorsRequest) GetPageSize() int32 { + if m != nil { + return m.PageSize + } + return 0 +} + +func (m *ListMonitoredResourceDescriptorsRequest) GetPageToken() string { + if m != nil { + return m.PageToken + } + return "" +} + +// Result returned from ListMonitoredResourceDescriptors. +type ListMonitoredResourceDescriptorsResponse struct { + // A list of resource descriptors. + ResourceDescriptors []*monitoredres.MonitoredResourceDescriptor `protobuf:"bytes,1,rep,name=resource_descriptors,json=resourceDescriptors,proto3" json:"resource_descriptors,omitempty"` + // If there might be more results than those appearing in this response, then + // `nextPageToken` is included. To get the next set of results, call this + // method again using the value of `nextPageToken` as `pageToken`. + NextPageToken string `protobuf:"bytes,2,opt,name=next_page_token,json=nextPageToken,proto3" json:"next_page_token,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ListMonitoredResourceDescriptorsResponse) Reset() { + *m = ListMonitoredResourceDescriptorsResponse{} +} +func (m *ListMonitoredResourceDescriptorsResponse) String() string { return proto.CompactTextString(m) } +func (*ListMonitoredResourceDescriptorsResponse) ProtoMessage() {} +func (*ListMonitoredResourceDescriptorsResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_logging_edbca3a2edcc1759, []int{7} +} +func (m *ListMonitoredResourceDescriptorsResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ListMonitoredResourceDescriptorsResponse.Unmarshal(m, b) +} +func (m *ListMonitoredResourceDescriptorsResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ListMonitoredResourceDescriptorsResponse.Marshal(b, m, deterministic) +} +func (dst *ListMonitoredResourceDescriptorsResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_ListMonitoredResourceDescriptorsResponse.Merge(dst, src) +} +func (m *ListMonitoredResourceDescriptorsResponse) XXX_Size() int { + return xxx_messageInfo_ListMonitoredResourceDescriptorsResponse.Size(m) +} +func (m *ListMonitoredResourceDescriptorsResponse) XXX_DiscardUnknown() { + xxx_messageInfo_ListMonitoredResourceDescriptorsResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_ListMonitoredResourceDescriptorsResponse proto.InternalMessageInfo + +func (m *ListMonitoredResourceDescriptorsResponse) GetResourceDescriptors() []*monitoredres.MonitoredResourceDescriptor { + if m != nil { + return m.ResourceDescriptors + } + return nil +} + +func (m *ListMonitoredResourceDescriptorsResponse) GetNextPageToken() string { + if m != nil { + return m.NextPageToken + } + return "" +} + +// The parameters to ListLogs. +type ListLogsRequest struct { + // Required. The resource name that owns the logs: + // + // "projects/[PROJECT_ID]" + // "organizations/[ORGANIZATION_ID]" + // "billingAccounts/[BILLING_ACCOUNT_ID]" + // "folders/[FOLDER_ID]" + Parent string `protobuf:"bytes,1,opt,name=parent,proto3" json:"parent,omitempty"` + // Optional. The maximum number of results to return from this request. + // Non-positive values are ignored. The presence of `nextPageToken` in the + // response indicates that more results might be available. + PageSize int32 `protobuf:"varint,2,opt,name=page_size,json=pageSize,proto3" json:"page_size,omitempty"` + // Optional. If present, then retrieve the next batch of results from the + // preceding call to this method. `pageToken` must be the value of + // `nextPageToken` from the previous response. The values of other method + // parameters should be identical to those in the previous call. + PageToken string `protobuf:"bytes,3,opt,name=page_token,json=pageToken,proto3" json:"page_token,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ListLogsRequest) Reset() { *m = ListLogsRequest{} } +func (m *ListLogsRequest) String() string { return proto.CompactTextString(m) } +func (*ListLogsRequest) ProtoMessage() {} +func (*ListLogsRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_logging_edbca3a2edcc1759, []int{8} +} +func (m *ListLogsRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ListLogsRequest.Unmarshal(m, b) +} +func (m *ListLogsRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ListLogsRequest.Marshal(b, m, deterministic) +} +func (dst *ListLogsRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_ListLogsRequest.Merge(dst, src) +} +func (m *ListLogsRequest) XXX_Size() int { + return xxx_messageInfo_ListLogsRequest.Size(m) +} +func (m *ListLogsRequest) XXX_DiscardUnknown() { + xxx_messageInfo_ListLogsRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_ListLogsRequest proto.InternalMessageInfo + +func (m *ListLogsRequest) GetParent() string { + if m != nil { + return m.Parent + } + return "" +} + +func (m *ListLogsRequest) GetPageSize() int32 { + if m != nil { + return m.PageSize + } + return 0 +} + +func (m *ListLogsRequest) GetPageToken() string { + if m != nil { + return m.PageToken + } + return "" +} + +// Result returned from ListLogs. +type ListLogsResponse struct { + // A list of log names. For example, + // `"projects/my-project/syslog"` or + // `"organizations/123/cloudresourcemanager.googleapis.com%2Factivity"`. + LogNames []string `protobuf:"bytes,3,rep,name=log_names,json=logNames,proto3" json:"log_names,omitempty"` + // If there might be more results than those appearing in this response, then + // `nextPageToken` is included. To get the next set of results, call this + // method again using the value of `nextPageToken` as `pageToken`. + NextPageToken string `protobuf:"bytes,2,opt,name=next_page_token,json=nextPageToken,proto3" json:"next_page_token,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ListLogsResponse) Reset() { *m = ListLogsResponse{} } +func (m *ListLogsResponse) String() string { return proto.CompactTextString(m) } +func (*ListLogsResponse) ProtoMessage() {} +func (*ListLogsResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_logging_edbca3a2edcc1759, []int{9} +} +func (m *ListLogsResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ListLogsResponse.Unmarshal(m, b) +} +func (m *ListLogsResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ListLogsResponse.Marshal(b, m, deterministic) +} +func (dst *ListLogsResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_ListLogsResponse.Merge(dst, src) +} +func (m *ListLogsResponse) XXX_Size() int { + return xxx_messageInfo_ListLogsResponse.Size(m) +} +func (m *ListLogsResponse) XXX_DiscardUnknown() { + xxx_messageInfo_ListLogsResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_ListLogsResponse proto.InternalMessageInfo + +func (m *ListLogsResponse) GetLogNames() []string { + if m != nil { + return m.LogNames + } + return nil +} + +func (m *ListLogsResponse) GetNextPageToken() string { + if m != nil { + return m.NextPageToken + } + return "" +} + +func init() { + proto.RegisterType((*DeleteLogRequest)(nil), "google.logging.v2.DeleteLogRequest") + proto.RegisterType((*WriteLogEntriesRequest)(nil), "google.logging.v2.WriteLogEntriesRequest") + proto.RegisterMapType((map[string]string)(nil), "google.logging.v2.WriteLogEntriesRequest.LabelsEntry") + proto.RegisterType((*WriteLogEntriesResponse)(nil), "google.logging.v2.WriteLogEntriesResponse") + proto.RegisterType((*WriteLogEntriesPartialErrors)(nil), "google.logging.v2.WriteLogEntriesPartialErrors") + proto.RegisterMapType((map[int32]*status.Status)(nil), "google.logging.v2.WriteLogEntriesPartialErrors.LogEntryErrorsEntry") + proto.RegisterType((*ListLogEntriesRequest)(nil), "google.logging.v2.ListLogEntriesRequest") + proto.RegisterType((*ListLogEntriesResponse)(nil), "google.logging.v2.ListLogEntriesResponse") + proto.RegisterType((*ListMonitoredResourceDescriptorsRequest)(nil), "google.logging.v2.ListMonitoredResourceDescriptorsRequest") + proto.RegisterType((*ListMonitoredResourceDescriptorsResponse)(nil), "google.logging.v2.ListMonitoredResourceDescriptorsResponse") + proto.RegisterType((*ListLogsRequest)(nil), "google.logging.v2.ListLogsRequest") + proto.RegisterType((*ListLogsResponse)(nil), "google.logging.v2.ListLogsResponse") +} + +// Reference imports to suppress errors if they are not otherwise used. +var _ context.Context +var _ grpc.ClientConn + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the grpc package it is being compiled against. +const _ = grpc.SupportPackageIsVersion4 + +// LoggingServiceV2Client is the client API for LoggingServiceV2 service. +// +// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://godoc.org/google.golang.org/grpc#ClientConn.NewStream. +type LoggingServiceV2Client interface { + // Deletes all the log entries in a log. + // The log reappears if it receives new entries. + // Log entries written shortly before the delete operation might not be + // deleted. + DeleteLog(ctx context.Context, in *DeleteLogRequest, opts ...grpc.CallOption) (*empty.Empty, error) + // Writes log entries to Logging. This API method is the + // only way to send log entries to Logging. This method + // is used, directly or indirectly, by the Logging agent + // (fluentd) and all logging libraries configured to use Logging. + // A single request may contain log entries for a maximum of 1000 + // different resources (projects, organizations, billing accounts or + // folders) + WriteLogEntries(ctx context.Context, in *WriteLogEntriesRequest, opts ...grpc.CallOption) (*WriteLogEntriesResponse, error) + // Lists log entries. Use this method to retrieve log entries from + // Logging. For ways to export log entries, see + // [Exporting Logs](/logging/docs/export). + ListLogEntries(ctx context.Context, in *ListLogEntriesRequest, opts ...grpc.CallOption) (*ListLogEntriesResponse, error) + // Lists the descriptors for monitored resource types used by Logging. + ListMonitoredResourceDescriptors(ctx context.Context, in *ListMonitoredResourceDescriptorsRequest, opts ...grpc.CallOption) (*ListMonitoredResourceDescriptorsResponse, error) + // Lists the logs in projects, organizations, folders, or billing accounts. + // Only logs that have entries are listed. + ListLogs(ctx context.Context, in *ListLogsRequest, opts ...grpc.CallOption) (*ListLogsResponse, error) +} + +type loggingServiceV2Client struct { + cc *grpc.ClientConn +} + +func NewLoggingServiceV2Client(cc *grpc.ClientConn) LoggingServiceV2Client { + return &loggingServiceV2Client{cc} +} + +func (c *loggingServiceV2Client) DeleteLog(ctx context.Context, in *DeleteLogRequest, opts ...grpc.CallOption) (*empty.Empty, error) { + out := new(empty.Empty) + err := c.cc.Invoke(ctx, "/google.logging.v2.LoggingServiceV2/DeleteLog", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *loggingServiceV2Client) WriteLogEntries(ctx context.Context, in *WriteLogEntriesRequest, opts ...grpc.CallOption) (*WriteLogEntriesResponse, error) { + out := new(WriteLogEntriesResponse) + err := c.cc.Invoke(ctx, "/google.logging.v2.LoggingServiceV2/WriteLogEntries", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *loggingServiceV2Client) ListLogEntries(ctx context.Context, in *ListLogEntriesRequest, opts ...grpc.CallOption) (*ListLogEntriesResponse, error) { + out := new(ListLogEntriesResponse) + err := c.cc.Invoke(ctx, "/google.logging.v2.LoggingServiceV2/ListLogEntries", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *loggingServiceV2Client) ListMonitoredResourceDescriptors(ctx context.Context, in *ListMonitoredResourceDescriptorsRequest, opts ...grpc.CallOption) (*ListMonitoredResourceDescriptorsResponse, error) { + out := new(ListMonitoredResourceDescriptorsResponse) + err := c.cc.Invoke(ctx, "/google.logging.v2.LoggingServiceV2/ListMonitoredResourceDescriptors", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *loggingServiceV2Client) ListLogs(ctx context.Context, in *ListLogsRequest, opts ...grpc.CallOption) (*ListLogsResponse, error) { + out := new(ListLogsResponse) + err := c.cc.Invoke(ctx, "/google.logging.v2.LoggingServiceV2/ListLogs", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +// LoggingServiceV2Server is the server API for LoggingServiceV2 service. +type LoggingServiceV2Server interface { + // Deletes all the log entries in a log. + // The log reappears if it receives new entries. + // Log entries written shortly before the delete operation might not be + // deleted. + DeleteLog(context.Context, *DeleteLogRequest) (*empty.Empty, error) + // Writes log entries to Logging. This API method is the + // only way to send log entries to Logging. This method + // is used, directly or indirectly, by the Logging agent + // (fluentd) and all logging libraries configured to use Logging. + // A single request may contain log entries for a maximum of 1000 + // different resources (projects, organizations, billing accounts or + // folders) + WriteLogEntries(context.Context, *WriteLogEntriesRequest) (*WriteLogEntriesResponse, error) + // Lists log entries. Use this method to retrieve log entries from + // Logging. For ways to export log entries, see + // [Exporting Logs](/logging/docs/export). + ListLogEntries(context.Context, *ListLogEntriesRequest) (*ListLogEntriesResponse, error) + // Lists the descriptors for monitored resource types used by Logging. + ListMonitoredResourceDescriptors(context.Context, *ListMonitoredResourceDescriptorsRequest) (*ListMonitoredResourceDescriptorsResponse, error) + // Lists the logs in projects, organizations, folders, or billing accounts. + // Only logs that have entries are listed. + ListLogs(context.Context, *ListLogsRequest) (*ListLogsResponse, error) +} + +func RegisterLoggingServiceV2Server(s *grpc.Server, srv LoggingServiceV2Server) { + s.RegisterService(&_LoggingServiceV2_serviceDesc, srv) +} + +func _LoggingServiceV2_DeleteLog_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(DeleteLogRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(LoggingServiceV2Server).DeleteLog(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.logging.v2.LoggingServiceV2/DeleteLog", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(LoggingServiceV2Server).DeleteLog(ctx, req.(*DeleteLogRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _LoggingServiceV2_WriteLogEntries_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(WriteLogEntriesRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(LoggingServiceV2Server).WriteLogEntries(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.logging.v2.LoggingServiceV2/WriteLogEntries", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(LoggingServiceV2Server).WriteLogEntries(ctx, req.(*WriteLogEntriesRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _LoggingServiceV2_ListLogEntries_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(ListLogEntriesRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(LoggingServiceV2Server).ListLogEntries(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.logging.v2.LoggingServiceV2/ListLogEntries", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(LoggingServiceV2Server).ListLogEntries(ctx, req.(*ListLogEntriesRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _LoggingServiceV2_ListMonitoredResourceDescriptors_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(ListMonitoredResourceDescriptorsRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(LoggingServiceV2Server).ListMonitoredResourceDescriptors(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.logging.v2.LoggingServiceV2/ListMonitoredResourceDescriptors", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(LoggingServiceV2Server).ListMonitoredResourceDescriptors(ctx, req.(*ListMonitoredResourceDescriptorsRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _LoggingServiceV2_ListLogs_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(ListLogsRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(LoggingServiceV2Server).ListLogs(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.logging.v2.LoggingServiceV2/ListLogs", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(LoggingServiceV2Server).ListLogs(ctx, req.(*ListLogsRequest)) + } + return interceptor(ctx, in, info, handler) +} + +var _LoggingServiceV2_serviceDesc = grpc.ServiceDesc{ + ServiceName: "google.logging.v2.LoggingServiceV2", + HandlerType: (*LoggingServiceV2Server)(nil), + Methods: []grpc.MethodDesc{ + { + MethodName: "DeleteLog", + Handler: _LoggingServiceV2_DeleteLog_Handler, + }, + { + MethodName: "WriteLogEntries", + Handler: _LoggingServiceV2_WriteLogEntries_Handler, + }, + { + MethodName: "ListLogEntries", + Handler: _LoggingServiceV2_ListLogEntries_Handler, + }, + { + MethodName: "ListMonitoredResourceDescriptors", + Handler: _LoggingServiceV2_ListMonitoredResourceDescriptors_Handler, + }, + { + MethodName: "ListLogs", + Handler: _LoggingServiceV2_ListLogs_Handler, + }, + }, + Streams: []grpc.StreamDesc{}, + Metadata: "google/logging/v2/logging.proto", +} + +func init() { + proto.RegisterFile("google/logging/v2/logging.proto", fileDescriptor_logging_edbca3a2edcc1759) +} + +var fileDescriptor_logging_edbca3a2edcc1759 = []byte{ + // 1093 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x9c, 0x56, 0x4f, 0x6f, 0xdc, 0x44, + 0x14, 0xd7, 0x6c, 0x9a, 0xcd, 0xee, 0x84, 0x26, 0xdb, 0x69, 0xb3, 0x71, 0x77, 0x93, 0x66, 0xb3, + 0xab, 0x90, 0x8d, 0x25, 0x6c, 0x69, 0x51, 0xa5, 0x36, 0x55, 0x0f, 0xa4, 0x8d, 0x10, 0x52, 0x8a, + 0x22, 0x07, 0x52, 0x29, 0x8a, 0xb4, 0x72, 0xec, 0x89, 0x35, 0xd4, 0xeb, 0x31, 0x33, 0xe3, 0x94, + 0x0d, 0xea, 0xa5, 0x07, 0xbe, 0x00, 0x17, 0xb8, 0x21, 0x6e, 0x70, 0xe0, 0x5b, 0x70, 0xe1, 0x08, + 0x17, 0x3e, 0x40, 0x6f, 0x9c, 0xb8, 0x71, 0x03, 0x79, 0x66, 0xec, 0x78, 0xff, 0x34, 0x49, 0xb9, + 0x79, 0xde, 0xfb, 0xcd, 0xbc, 0xf7, 0x7b, 0xf3, 0x7b, 0x6f, 0x0c, 0xd7, 0x02, 0x4a, 0x83, 0x10, + 0xdb, 0x21, 0x0d, 0x02, 0x12, 0x05, 0xf6, 0x59, 0x2f, 0xfb, 0xb4, 0x62, 0x46, 0x05, 0x45, 0xb7, + 0x14, 0xc0, 0xca, 0xac, 0x67, 0xbd, 0xc6, 0x8a, 0xde, 0xe3, 0xc6, 0xc4, 0x76, 0xa3, 0x88, 0x0a, + 0x57, 0x10, 0x1a, 0x71, 0xb5, 0xa1, 0xd1, 0x29, 0x78, 0x07, 0x34, 0x22, 0x82, 0x32, 0xec, 0xf7, + 0x19, 0xe6, 0x34, 0x61, 0x1e, 0xd6, 0xa0, 0xf5, 0xa9, 0x61, 0xfb, 0x38, 0x12, 0x6c, 0xa8, 0x21, + 0xf7, 0x34, 0x44, 0xae, 0x4e, 0x92, 0x53, 0xdb, 0x4f, 0x98, 0x0c, 0xa4, 0xfd, 0xcd, 0x71, 0x3f, + 0x1e, 0xc4, 0x22, 0xdb, 0xbc, 0x36, 0xee, 0x14, 0x64, 0x80, 0xb9, 0x70, 0x07, 0xb1, 0x06, 0x2c, + 0x6b, 0x00, 0x8b, 0x3d, 0x9b, 0x0b, 0x57, 0x24, 0x3a, 0xfd, 0xf6, 0x07, 0xb0, 0xf6, 0x14, 0x87, + 0x58, 0xe0, 0x3d, 0x1a, 0x38, 0xf8, 0xcb, 0x04, 0x73, 0x81, 0xee, 0xc2, 0x4a, 0x9a, 0x5d, 0xe4, + 0x0e, 0xb0, 0x01, 0x5a, 0xa0, 0x5b, 0x75, 0xe6, 0x42, 0x1a, 0x7c, 0xea, 0x0e, 0x70, 0xfb, 0xef, + 0x12, 0xac, 0x3f, 0x67, 0x44, 0xc2, 0x77, 0x23, 0xc1, 0x08, 0xe6, 0x57, 0xef, 0x42, 0x0f, 0x61, + 0x25, 0x2b, 0x88, 0x51, 0x6a, 0x81, 0xee, 0x7c, 0x6f, 0xd5, 0xd2, 0x75, 0x76, 0x63, 0x62, 0x3d, + 0xcb, 0xca, 0xe6, 0x68, 0x90, 0x93, 0xc3, 0xd1, 0x33, 0x58, 0x0e, 0xdd, 0x13, 0x1c, 0x72, 0x63, + 0xa6, 0x35, 0xd3, 0x9d, 0xef, 0xdd, 0xb7, 0x26, 0x2e, 0xc8, 0x9a, 0x9e, 0x90, 0xb5, 0x27, 0xf7, + 0xa5, 0xc6, 0xa1, 0xa3, 0x0f, 0x41, 0xf7, 0xe1, 0x1c, 0x56, 0x28, 0xe3, 0x86, 0x3c, 0xaf, 0x39, + 0xe5, 0x3c, 0x7d, 0xd4, 0xd0, 0xc9, 0xb0, 0x68, 0x13, 0x2e, 0xc6, 0x2e, 0x13, 0xc4, 0x0d, 0xfb, + 0x3c, 0xf1, 0x3c, 0xcc, 0xb9, 0x31, 0xdb, 0x02, 0xdd, 0x8a, 0xb3, 0xa0, 0xcd, 0x07, 0xca, 0x8a, + 0x96, 0xe1, 0x9c, 0xcf, 0x86, 0x7d, 0x96, 0x44, 0x46, 0x59, 0x02, 0xca, 0x3e, 0x1b, 0x3a, 0x49, + 0xd4, 0x78, 0x08, 0xe7, 0x0b, 0xf9, 0xa0, 0x1a, 0x9c, 0x79, 0x81, 0x87, 0xba, 0x4e, 0xe9, 0x27, + 0xba, 0x03, 0x67, 0xcf, 0xdc, 0x30, 0x51, 0x05, 0xaa, 0x3a, 0x6a, 0xb1, 0x5d, 0x7a, 0x00, 0xda, + 0x77, 0xe1, 0xf2, 0x04, 0x43, 0x1e, 0xd3, 0x88, 0xe3, 0xf6, 0x1b, 0x00, 0x57, 0xc6, 0x7c, 0xfb, + 0x2a, 0xa1, 0x5d, 0xc6, 0x28, 0xe3, 0x68, 0x00, 0x6b, 0xb9, 0xd0, 0xfa, 0x58, 0xda, 0x0c, 0x20, + 0x89, 0x3f, 0xb9, 0xba, 0x90, 0x23, 0x47, 0xe5, 0x55, 0x51, 0x4b, 0x55, 0xa0, 0x85, 0x70, 0xc4, + 0xd8, 0xf8, 0x1c, 0xde, 0x9e, 0x02, 0x2b, 0xb2, 0x9d, 0x55, 0x6c, 0xbb, 0x45, 0xb6, 0xf3, 0x3d, + 0x94, 0x25, 0xc3, 0x62, 0xcf, 0x3a, 0x90, 0xfa, 0x2c, 0x56, 0xe0, 0x77, 0x00, 0x97, 0xf6, 0x08, + 0x17, 0x93, 0xa2, 0xeb, 0xc0, 0xf9, 0x98, 0xd1, 0x2f, 0xb0, 0x27, 0xfa, 0xc4, 0x57, 0xd4, 0xaa, + 0x3b, 0x25, 0x03, 0x38, 0x50, 0x9b, 0x3f, 0xf1, 0x39, 0xda, 0x80, 0x0b, 0x99, 0x9e, 0xa4, 0x3c, + 0xb9, 0x51, 0x49, 0x71, 0xce, 0xcd, 0xcc, 0x9a, 0x8a, 0x94, 0xa3, 0x3a, 0x2c, 0x9f, 0x92, 0x50, + 0x60, 0xa6, 0xaf, 0x40, 0xaf, 0x52, 0x61, 0x53, 0xe6, 0x63, 0xd6, 0x3f, 0x19, 0x1a, 0x33, 0x4a, + 0xd8, 0x72, 0xbd, 0x33, 0x44, 0x4d, 0x58, 0x8d, 0xdd, 0x00, 0xf7, 0x39, 0x39, 0xc7, 0xc6, 0x0d, + 0x49, 0xaf, 0x92, 0x1a, 0x0e, 0xc8, 0x39, 0x46, 0xab, 0x10, 0x4a, 0xa7, 0xa0, 0x2f, 0x70, 0x24, + 0xf5, 0x52, 0x75, 0x24, 0xfc, 0xb3, 0xd4, 0xd0, 0x7e, 0x09, 0xeb, 0xe3, 0x9c, 0xd4, 0xad, 0x16, + 0x45, 0x0a, 0xde, 0x41, 0xa4, 0xef, 0xc3, 0xc5, 0x08, 0x7f, 0x25, 0xfa, 0x85, 0xa0, 0x8a, 0xc8, + 0xcd, 0xd4, 0xbc, 0x9f, 0x07, 0xc6, 0x70, 0x33, 0x0d, 0x3c, 0xd1, 0x75, 0x4f, 0x31, 0xf7, 0x18, + 0x89, 0x05, 0x65, 0x79, 0x79, 0x47, 0xf8, 0x81, 0x4b, 0xf9, 0x95, 0xc6, 0xf9, 0xfd, 0x02, 0x60, + 0xf7, 0xea, 0x38, 0x9a, 0xf2, 0x11, 0xbc, 0x93, 0x5f, 0x91, 0x7f, 0xe1, 0xd7, 0xfc, 0x37, 0x2f, + 0x9d, 0x16, 0x17, 0xe7, 0x39, 0xb7, 0xd9, 0x64, 0x8c, 0x77, 0xa8, 0xcb, 0xa2, 0xbe, 0x90, 0x9c, + 0x7f, 0x1d, 0x96, 0x63, 0x97, 0xe1, 0x48, 0xe8, 0x4e, 0xd5, 0xab, 0xd1, 0xba, 0x94, 0x2e, 0xad, + 0xcb, 0xcc, 0x78, 0x5d, 0x9e, 0xc3, 0xda, 0x45, 0x18, 0x4d, 0xbf, 0x09, 0xab, 0xd9, 0xec, 0x54, + 0x83, 0xae, 0xea, 0x54, 0xf4, 0xf0, 0xbc, 0x76, 0xfe, 0xbd, 0x3f, 0xe7, 0x60, 0x6d, 0x4f, 0x09, + 0xe4, 0x00, 0xb3, 0x33, 0xe2, 0xe1, 0xc3, 0x1e, 0xfa, 0x0b, 0xc0, 0x6a, 0x3e, 0xe0, 0x51, 0x67, + 0x8a, 0x90, 0xc6, 0xc7, 0x7f, 0xa3, 0x9e, 0x81, 0xb2, 0xd7, 0xc4, 0xda, 0x4d, 0x9f, 0x9a, 0xf6, + 0x0f, 0xe0, 0xf5, 0x1f, 0x6f, 0xbe, 0x2d, 0x7d, 0x0f, 0xcc, 0x56, 0xfa, 0x80, 0x7d, 0x9d, 0x65, + 0xfc, 0x58, 0xb7, 0x1a, 0xb7, 0xcd, 0xf4, 0x55, 0xe3, 0xb6, 0xf9, 0xea, 0x68, 0xd3, 0xdc, 0x18, + 0xc5, 0x50, 0x16, 0xb8, 0x11, 0x39, 0x57, 0x4f, 0x67, 0x01, 0xb8, 0x6e, 0xae, 0x8d, 0x02, 0x4f, + 0x69, 0xe8, 0x63, 0x56, 0x84, 0x6c, 0x99, 0x9b, 0xa3, 0x90, 0x13, 0x12, 0x86, 0x24, 0x0a, 0x3e, + 0xf2, 0x3c, 0x9a, 0x44, 0xc5, 0xb0, 0xe8, 0x1b, 0x00, 0x17, 0xc7, 0x86, 0x18, 0xda, 0xba, 0xf6, + 0x8b, 0xd1, 0x30, 0xaf, 0x03, 0xd5, 0xa3, 0x77, 0x45, 0x16, 0xa3, 0xde, 0xbe, 0x95, 0xe6, 0xa6, + 0x5b, 0x70, 0xfb, 0x65, 0x0a, 0xde, 0x06, 0x26, 0x7a, 0x0d, 0xe0, 0xc2, 0x68, 0x77, 0xa3, 0xee, + 0xb4, 0x26, 0x9e, 0x36, 0xd4, 0x1a, 0x5b, 0xd7, 0x40, 0xea, 0x2c, 0x9a, 0x32, 0x8b, 0xa5, 0x76, + 0xad, 0x98, 0x45, 0x48, 0xb8, 0x48, 0x93, 0xf8, 0x15, 0xc0, 0xd6, 0x55, 0x1d, 0x88, 0xb6, 0xdf, + 0x12, 0xec, 0x1a, 0xe3, 0xa1, 0xf1, 0xe8, 0x7f, 0xed, 0xd5, 0xa9, 0x77, 0x65, 0xea, 0x6d, 0x24, + 0xc5, 0x34, 0xb8, 0x2c, 0xc5, 0x7f, 0x01, 0xac, 0x64, 0x2d, 0x83, 0xda, 0x6f, 0x2f, 0x4e, 0x9e, + 0x57, 0xe7, 0x52, 0x8c, 0x8e, 0xff, 0x93, 0x92, 0xf3, 0x8f, 0x00, 0x2d, 0x49, 0x79, 0xa9, 0xd6, + 0x7e, 0x6c, 0xda, 0xe6, 0x2b, 0xa9, 0xa7, 0xa3, 0x7b, 0x68, 0xa5, 0xe8, 0xb8, 0x50, 0xb9, 0xf6, + 0x77, 0xd0, 0x7a, 0xd1, 0x3f, 0xa6, 0x70, 0x0d, 0x5a, 0x45, 0xcd, 0x22, 0x28, 0x57, 0xb7, 0x76, + 0x6f, 0xa0, 0x4e, 0xd1, 0x3d, 0xa1, 0x6c, 0x05, 0xdb, 0xf9, 0x0e, 0xc0, 0x25, 0x8f, 0x0e, 0x26, + 0x69, 0xed, 0xbc, 0xa7, 0x3b, 0x7e, 0x3f, 0xed, 0xd5, 0x7d, 0x70, 0xf4, 0x40, 0x43, 0x02, 0x1a, + 0xba, 0x51, 0x60, 0x51, 0x16, 0xd8, 0x01, 0x8e, 0x64, 0x27, 0xdb, 0xca, 0xe5, 0xc6, 0x84, 0x17, + 0x7e, 0x44, 0x1f, 0xe9, 0xcf, 0x7f, 0x00, 0xf8, 0xb9, 0xb4, 0xfc, 0xb1, 0xda, 0xfd, 0x24, 0xa4, + 0x89, 0x6f, 0xe9, 0xa3, 0xad, 0xc3, 0xde, 0x6f, 0x99, 0xe7, 0x58, 0x7a, 0x8e, 0xb5, 0xe7, 0xf8, + 0xb0, 0x77, 0x52, 0x96, 0x67, 0x7f, 0xf8, 0x5f, 0x00, 0x00, 0x00, 0xff, 0xff, 0xc7, 0xec, 0x91, + 0x2d, 0x5a, 0x0b, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/logging/v2/logging_config.pb.go b/vendor/google.golang.org/genproto/googleapis/logging/v2/logging_config.pb.go new file mode 100644 index 0000000..76f6131 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/logging/v2/logging_config.pb.go @@ -0,0 +1,1557 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/logging/v2/logging_config.proto + +package logging // import "google.golang.org/genproto/googleapis/logging/v2" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import empty "github.com/golang/protobuf/ptypes/empty" +import timestamp "github.com/golang/protobuf/ptypes/timestamp" +import _ "google.golang.org/genproto/googleapis/api/annotations" +import field_mask "google.golang.org/genproto/protobuf/field_mask" + +import ( + context "golang.org/x/net/context" + grpc "google.golang.org/grpc" +) + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Available log entry formats. Log entries can be written to +// Logging in either format and can be exported in either format. +// Version 2 is the preferred format. +type LogSink_VersionFormat int32 + +const ( + // An unspecified format version that will default to V2. + LogSink_VERSION_FORMAT_UNSPECIFIED LogSink_VersionFormat = 0 + // `LogEntry` version 2 format. + LogSink_V2 LogSink_VersionFormat = 1 + // `LogEntry` version 1 format. + LogSink_V1 LogSink_VersionFormat = 2 +) + +var LogSink_VersionFormat_name = map[int32]string{ + 0: "VERSION_FORMAT_UNSPECIFIED", + 1: "V2", + 2: "V1", +} +var LogSink_VersionFormat_value = map[string]int32{ + "VERSION_FORMAT_UNSPECIFIED": 0, + "V2": 1, + "V1": 2, +} + +func (x LogSink_VersionFormat) String() string { + return proto.EnumName(LogSink_VersionFormat_name, int32(x)) +} +func (LogSink_VersionFormat) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_logging_config_eaca28b770adee30, []int{0, 0} +} + +// Describes a sink used to export log entries to one of the following +// destinations in any project: a Cloud Storage bucket, a BigQuery dataset, or a +// Cloud Pub/Sub topic. A logs filter controls which log entries are +// exported. The sink must be created within a project, organization, billing +// account, or folder. +type LogSink struct { + // Required. The client-assigned sink identifier, unique within the + // project. Example: `"my-syslog-errors-to-pubsub"`. Sink identifiers are + // limited to 100 characters and can include only the following characters: + // upper and lower-case alphanumeric characters, underscores, hyphens, and + // periods. + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // Required. The export destination: + // + // "storage.googleapis.com/[GCS_BUCKET]" + // "bigquery.googleapis.com/projects/[PROJECT_ID]/datasets/[DATASET]" + // "pubsub.googleapis.com/projects/[PROJECT_ID]/topics/[TOPIC_ID]" + // + // The sink's `writer_identity`, set when the sink is created, must + // have permission to write to the destination or else the log + // entries are not exported. For more information, see + // [Exporting Logs With Sinks](/logging/docs/api/tasks/exporting-logs). + Destination string `protobuf:"bytes,3,opt,name=destination,proto3" json:"destination,omitempty"` + // Optional. + // An [advanced logs filter](/logging/docs/view/advanced_filters). The only + // exported log entries are those that are in the resource owning the sink and + // that match the filter. For example: + // + // logName="projects/[PROJECT_ID]/logs/[LOG_ID]" AND severity>=ERROR + Filter string `protobuf:"bytes,5,opt,name=filter,proto3" json:"filter,omitempty"` + // Deprecated. The log entry format to use for this sink's exported log + // entries. The v2 format is used by default and cannot be changed. + OutputVersionFormat LogSink_VersionFormat `protobuf:"varint,6,opt,name=output_version_format,json=outputVersionFormat,proto3,enum=google.logging.v2.LogSink_VersionFormat" json:"output_version_format,omitempty"` // Deprecated: Do not use. + // Output only. An IAM identity—a service account or group—under + // which Logging writes the exported log entries to the sink's + // destination. This field is set by + // [sinks.create](/logging/docs/api/reference/rest/v2/projects.sinks/create) + // and + // [sinks.update](/logging/docs/api/reference/rest/v2/projects.sinks/update), + // based on the setting of `unique_writer_identity` in those methods. + // + // Until you grant this identity write-access to the destination, log entry + // exports from this sink will fail. For more information, + // see [Granting access for a + // resource](/iam/docs/granting-roles-to-service-accounts#granting_access_to_a_service_account_for_a_resource). + // Consult the destination service's documentation to determine the + // appropriate IAM roles to assign to the identity. + WriterIdentity string `protobuf:"bytes,8,opt,name=writer_identity,json=writerIdentity,proto3" json:"writer_identity,omitempty"` + // Optional. This field applies only to sinks owned by organizations and + // folders. If the field is false, the default, only the logs owned by the + // sink's parent resource are available for export. If the field is true, then + // logs from all the projects, folders, and billing accounts contained in the + // sink's parent resource are also available for export. Whether a particular + // log entry from the children is exported depends on the sink's filter + // expression. For example, if this field is true, then the filter + // `resource.type=gce_instance` would export all Compute Engine VM instance + // log entries from all projects in the sink's parent. To only export entries + // from certain child projects, filter on the project part of the log name: + // + // logName:("projects/test-project1/" OR "projects/test-project2/") AND + // resource.type=gce_instance + IncludeChildren bool `protobuf:"varint,9,opt,name=include_children,json=includeChildren,proto3" json:"include_children,omitempty"` + // Deprecated. This field is ignored when creating or updating sinks. + StartTime *timestamp.Timestamp `protobuf:"bytes,10,opt,name=start_time,json=startTime,proto3" json:"start_time,omitempty"` // Deprecated: Do not use. + // Deprecated. This field is ignored when creating or updating sinks. + EndTime *timestamp.Timestamp `protobuf:"bytes,11,opt,name=end_time,json=endTime,proto3" json:"end_time,omitempty"` // Deprecated: Do not use. + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *LogSink) Reset() { *m = LogSink{} } +func (m *LogSink) String() string { return proto.CompactTextString(m) } +func (*LogSink) ProtoMessage() {} +func (*LogSink) Descriptor() ([]byte, []int) { + return fileDescriptor_logging_config_eaca28b770adee30, []int{0} +} +func (m *LogSink) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_LogSink.Unmarshal(m, b) +} +func (m *LogSink) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_LogSink.Marshal(b, m, deterministic) +} +func (dst *LogSink) XXX_Merge(src proto.Message) { + xxx_messageInfo_LogSink.Merge(dst, src) +} +func (m *LogSink) XXX_Size() int { + return xxx_messageInfo_LogSink.Size(m) +} +func (m *LogSink) XXX_DiscardUnknown() { + xxx_messageInfo_LogSink.DiscardUnknown(m) +} + +var xxx_messageInfo_LogSink proto.InternalMessageInfo + +func (m *LogSink) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *LogSink) GetDestination() string { + if m != nil { + return m.Destination + } + return "" +} + +func (m *LogSink) GetFilter() string { + if m != nil { + return m.Filter + } + return "" +} + +// Deprecated: Do not use. +func (m *LogSink) GetOutputVersionFormat() LogSink_VersionFormat { + if m != nil { + return m.OutputVersionFormat + } + return LogSink_VERSION_FORMAT_UNSPECIFIED +} + +func (m *LogSink) GetWriterIdentity() string { + if m != nil { + return m.WriterIdentity + } + return "" +} + +func (m *LogSink) GetIncludeChildren() bool { + if m != nil { + return m.IncludeChildren + } + return false +} + +// Deprecated: Do not use. +func (m *LogSink) GetStartTime() *timestamp.Timestamp { + if m != nil { + return m.StartTime + } + return nil +} + +// Deprecated: Do not use. +func (m *LogSink) GetEndTime() *timestamp.Timestamp { + if m != nil { + return m.EndTime + } + return nil +} + +// The parameters to `ListSinks`. +type ListSinksRequest struct { + // Required. The parent resource whose sinks are to be listed: + // + // "projects/[PROJECT_ID]" + // "organizations/[ORGANIZATION_ID]" + // "billingAccounts/[BILLING_ACCOUNT_ID]" + // "folders/[FOLDER_ID]" + Parent string `protobuf:"bytes,1,opt,name=parent,proto3" json:"parent,omitempty"` + // Optional. If present, then retrieve the next batch of results from the + // preceding call to this method. `pageToken` must be the value of + // `nextPageToken` from the previous response. The values of other method + // parameters should be identical to those in the previous call. + PageToken string `protobuf:"bytes,2,opt,name=page_token,json=pageToken,proto3" json:"page_token,omitempty"` + // Optional. The maximum number of results to return from this request. + // Non-positive values are ignored. The presence of `nextPageToken` in the + // response indicates that more results might be available. + PageSize int32 `protobuf:"varint,3,opt,name=page_size,json=pageSize,proto3" json:"page_size,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ListSinksRequest) Reset() { *m = ListSinksRequest{} } +func (m *ListSinksRequest) String() string { return proto.CompactTextString(m) } +func (*ListSinksRequest) ProtoMessage() {} +func (*ListSinksRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_logging_config_eaca28b770adee30, []int{1} +} +func (m *ListSinksRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ListSinksRequest.Unmarshal(m, b) +} +func (m *ListSinksRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ListSinksRequest.Marshal(b, m, deterministic) +} +func (dst *ListSinksRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_ListSinksRequest.Merge(dst, src) +} +func (m *ListSinksRequest) XXX_Size() int { + return xxx_messageInfo_ListSinksRequest.Size(m) +} +func (m *ListSinksRequest) XXX_DiscardUnknown() { + xxx_messageInfo_ListSinksRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_ListSinksRequest proto.InternalMessageInfo + +func (m *ListSinksRequest) GetParent() string { + if m != nil { + return m.Parent + } + return "" +} + +func (m *ListSinksRequest) GetPageToken() string { + if m != nil { + return m.PageToken + } + return "" +} + +func (m *ListSinksRequest) GetPageSize() int32 { + if m != nil { + return m.PageSize + } + return 0 +} + +// Result returned from `ListSinks`. +type ListSinksResponse struct { + // A list of sinks. + Sinks []*LogSink `protobuf:"bytes,1,rep,name=sinks,proto3" json:"sinks,omitempty"` + // If there might be more results than appear in this response, then + // `nextPageToken` is included. To get the next set of results, call the same + // method again using the value of `nextPageToken` as `pageToken`. + NextPageToken string `protobuf:"bytes,2,opt,name=next_page_token,json=nextPageToken,proto3" json:"next_page_token,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ListSinksResponse) Reset() { *m = ListSinksResponse{} } +func (m *ListSinksResponse) String() string { return proto.CompactTextString(m) } +func (*ListSinksResponse) ProtoMessage() {} +func (*ListSinksResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_logging_config_eaca28b770adee30, []int{2} +} +func (m *ListSinksResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ListSinksResponse.Unmarshal(m, b) +} +func (m *ListSinksResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ListSinksResponse.Marshal(b, m, deterministic) +} +func (dst *ListSinksResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_ListSinksResponse.Merge(dst, src) +} +func (m *ListSinksResponse) XXX_Size() int { + return xxx_messageInfo_ListSinksResponse.Size(m) +} +func (m *ListSinksResponse) XXX_DiscardUnknown() { + xxx_messageInfo_ListSinksResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_ListSinksResponse proto.InternalMessageInfo + +func (m *ListSinksResponse) GetSinks() []*LogSink { + if m != nil { + return m.Sinks + } + return nil +} + +func (m *ListSinksResponse) GetNextPageToken() string { + if m != nil { + return m.NextPageToken + } + return "" +} + +// The parameters to `GetSink`. +type GetSinkRequest struct { + // Required. The resource name of the sink: + // + // "projects/[PROJECT_ID]/sinks/[SINK_ID]" + // "organizations/[ORGANIZATION_ID]/sinks/[SINK_ID]" + // "billingAccounts/[BILLING_ACCOUNT_ID]/sinks/[SINK_ID]" + // "folders/[FOLDER_ID]/sinks/[SINK_ID]" + // + // Example: `"projects/my-project-id/sinks/my-sink-id"`. + SinkName string `protobuf:"bytes,1,opt,name=sink_name,json=sinkName,proto3" json:"sink_name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GetSinkRequest) Reset() { *m = GetSinkRequest{} } +func (m *GetSinkRequest) String() string { return proto.CompactTextString(m) } +func (*GetSinkRequest) ProtoMessage() {} +func (*GetSinkRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_logging_config_eaca28b770adee30, []int{3} +} +func (m *GetSinkRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GetSinkRequest.Unmarshal(m, b) +} +func (m *GetSinkRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GetSinkRequest.Marshal(b, m, deterministic) +} +func (dst *GetSinkRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_GetSinkRequest.Merge(dst, src) +} +func (m *GetSinkRequest) XXX_Size() int { + return xxx_messageInfo_GetSinkRequest.Size(m) +} +func (m *GetSinkRequest) XXX_DiscardUnknown() { + xxx_messageInfo_GetSinkRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_GetSinkRequest proto.InternalMessageInfo + +func (m *GetSinkRequest) GetSinkName() string { + if m != nil { + return m.SinkName + } + return "" +} + +// The parameters to `CreateSink`. +type CreateSinkRequest struct { + // Required. The resource in which to create the sink: + // + // "projects/[PROJECT_ID]" + // "organizations/[ORGANIZATION_ID]" + // "billingAccounts/[BILLING_ACCOUNT_ID]" + // "folders/[FOLDER_ID]" + // + // Examples: `"projects/my-logging-project"`, `"organizations/123456789"`. + Parent string `protobuf:"bytes,1,opt,name=parent,proto3" json:"parent,omitempty"` + // Required. The new sink, whose `name` parameter is a sink identifier that + // is not already in use. + Sink *LogSink `protobuf:"bytes,2,opt,name=sink,proto3" json:"sink,omitempty"` + // Optional. Determines the kind of IAM identity returned as `writer_identity` + // in the new sink. If this value is omitted or set to false, and if the + // sink's parent is a project, then the value returned as `writer_identity` is + // the same group or service account used by Logging before the + // addition of writer identities to this API. The sink's destination must be + // in the same project as the sink itself. + // + // If this field is set to true, or if the sink is owned by a non-project + // resource such as an organization, then the value of `writer_identity` will + // be a unique service account used only for exports from the new sink. For + // more information, see `writer_identity` in + // [LogSink][google.logging.v2.LogSink]. + UniqueWriterIdentity bool `protobuf:"varint,3,opt,name=unique_writer_identity,json=uniqueWriterIdentity,proto3" json:"unique_writer_identity,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *CreateSinkRequest) Reset() { *m = CreateSinkRequest{} } +func (m *CreateSinkRequest) String() string { return proto.CompactTextString(m) } +func (*CreateSinkRequest) ProtoMessage() {} +func (*CreateSinkRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_logging_config_eaca28b770adee30, []int{4} +} +func (m *CreateSinkRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_CreateSinkRequest.Unmarshal(m, b) +} +func (m *CreateSinkRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_CreateSinkRequest.Marshal(b, m, deterministic) +} +func (dst *CreateSinkRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_CreateSinkRequest.Merge(dst, src) +} +func (m *CreateSinkRequest) XXX_Size() int { + return xxx_messageInfo_CreateSinkRequest.Size(m) +} +func (m *CreateSinkRequest) XXX_DiscardUnknown() { + xxx_messageInfo_CreateSinkRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_CreateSinkRequest proto.InternalMessageInfo + +func (m *CreateSinkRequest) GetParent() string { + if m != nil { + return m.Parent + } + return "" +} + +func (m *CreateSinkRequest) GetSink() *LogSink { + if m != nil { + return m.Sink + } + return nil +} + +func (m *CreateSinkRequest) GetUniqueWriterIdentity() bool { + if m != nil { + return m.UniqueWriterIdentity + } + return false +} + +// The parameters to `UpdateSink`. +type UpdateSinkRequest struct { + // Required. The full resource name of the sink to update, including the + // parent resource and the sink identifier: + // + // "projects/[PROJECT_ID]/sinks/[SINK_ID]" + // "organizations/[ORGANIZATION_ID]/sinks/[SINK_ID]" + // "billingAccounts/[BILLING_ACCOUNT_ID]/sinks/[SINK_ID]" + // "folders/[FOLDER_ID]/sinks/[SINK_ID]" + // + // Example: `"projects/my-project-id/sinks/my-sink-id"`. + SinkName string `protobuf:"bytes,1,opt,name=sink_name,json=sinkName,proto3" json:"sink_name,omitempty"` + // Required. The updated sink, whose name is the same identifier that appears + // as part of `sink_name`. + Sink *LogSink `protobuf:"bytes,2,opt,name=sink,proto3" json:"sink,omitempty"` + // Optional. See + // [sinks.create](/logging/docs/api/reference/rest/v2/projects.sinks/create) + // for a description of this field. When updating a sink, the effect of this + // field on the value of `writer_identity` in the updated sink depends on both + // the old and new values of this field: + // + // + If the old and new values of this field are both false or both true, + // then there is no change to the sink's `writer_identity`. + // + If the old value is false and the new value is true, then + // `writer_identity` is changed to a unique service account. + // + It is an error if the old value is true and the new value is + // set to false or defaulted to false. + UniqueWriterIdentity bool `protobuf:"varint,3,opt,name=unique_writer_identity,json=uniqueWriterIdentity,proto3" json:"unique_writer_identity,omitempty"` + // Optional. Field mask that specifies the fields in `sink` that need + // an update. A sink field will be overwritten if, and only if, it is + // in the update mask. `name` and output only fields cannot be updated. + // + // An empty updateMask is temporarily treated as using the following mask + // for backwards compatibility purposes: + // destination,filter,includeChildren + // At some point in the future, behavior will be removed and specifying an + // empty updateMask will be an error. + // + // For a detailed `FieldMask` definition, see + // https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#google.protobuf.FieldMask + // + // Example: `updateMask=filter`. + UpdateMask *field_mask.FieldMask `protobuf:"bytes,4,opt,name=update_mask,json=updateMask,proto3" json:"update_mask,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *UpdateSinkRequest) Reset() { *m = UpdateSinkRequest{} } +func (m *UpdateSinkRequest) String() string { return proto.CompactTextString(m) } +func (*UpdateSinkRequest) ProtoMessage() {} +func (*UpdateSinkRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_logging_config_eaca28b770adee30, []int{5} +} +func (m *UpdateSinkRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_UpdateSinkRequest.Unmarshal(m, b) +} +func (m *UpdateSinkRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_UpdateSinkRequest.Marshal(b, m, deterministic) +} +func (dst *UpdateSinkRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_UpdateSinkRequest.Merge(dst, src) +} +func (m *UpdateSinkRequest) XXX_Size() int { + return xxx_messageInfo_UpdateSinkRequest.Size(m) +} +func (m *UpdateSinkRequest) XXX_DiscardUnknown() { + xxx_messageInfo_UpdateSinkRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_UpdateSinkRequest proto.InternalMessageInfo + +func (m *UpdateSinkRequest) GetSinkName() string { + if m != nil { + return m.SinkName + } + return "" +} + +func (m *UpdateSinkRequest) GetSink() *LogSink { + if m != nil { + return m.Sink + } + return nil +} + +func (m *UpdateSinkRequest) GetUniqueWriterIdentity() bool { + if m != nil { + return m.UniqueWriterIdentity + } + return false +} + +func (m *UpdateSinkRequest) GetUpdateMask() *field_mask.FieldMask { + if m != nil { + return m.UpdateMask + } + return nil +} + +// The parameters to `DeleteSink`. +type DeleteSinkRequest struct { + // Required. The full resource name of the sink to delete, including the + // parent resource and the sink identifier: + // + // "projects/[PROJECT_ID]/sinks/[SINK_ID]" + // "organizations/[ORGANIZATION_ID]/sinks/[SINK_ID]" + // "billingAccounts/[BILLING_ACCOUNT_ID]/sinks/[SINK_ID]" + // "folders/[FOLDER_ID]/sinks/[SINK_ID]" + // + // Example: `"projects/my-project-id/sinks/my-sink-id"`. + SinkName string `protobuf:"bytes,1,opt,name=sink_name,json=sinkName,proto3" json:"sink_name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *DeleteSinkRequest) Reset() { *m = DeleteSinkRequest{} } +func (m *DeleteSinkRequest) String() string { return proto.CompactTextString(m) } +func (*DeleteSinkRequest) ProtoMessage() {} +func (*DeleteSinkRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_logging_config_eaca28b770adee30, []int{6} +} +func (m *DeleteSinkRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_DeleteSinkRequest.Unmarshal(m, b) +} +func (m *DeleteSinkRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_DeleteSinkRequest.Marshal(b, m, deterministic) +} +func (dst *DeleteSinkRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_DeleteSinkRequest.Merge(dst, src) +} +func (m *DeleteSinkRequest) XXX_Size() int { + return xxx_messageInfo_DeleteSinkRequest.Size(m) +} +func (m *DeleteSinkRequest) XXX_DiscardUnknown() { + xxx_messageInfo_DeleteSinkRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_DeleteSinkRequest proto.InternalMessageInfo + +func (m *DeleteSinkRequest) GetSinkName() string { + if m != nil { + return m.SinkName + } + return "" +} + +// Specifies a set of log entries that are not to be stored in +// Logging. If your project receives a large volume of logs, you might be able +// to use exclusions to reduce your chargeable logs. Exclusions are processed +// after log sinks, so you can export log entries before they are excluded. +// Audit log entries and log entries from Amazon Web Services are never +// excluded. +type LogExclusion struct { + // Required. A client-assigned identifier, such as + // `"load-balancer-exclusion"`. Identifiers are limited to 100 characters and + // can include only letters, digits, underscores, hyphens, and periods. + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // Optional. A description of this exclusion. + Description string `protobuf:"bytes,2,opt,name=description,proto3" json:"description,omitempty"` + // Required. + // An [advanced logs filter](/logging/docs/view/advanced_filters) + // that matches the log entries to be excluded. By using the + // [sample function](/logging/docs/view/advanced_filters#sample), + // you can exclude less than 100% of the matching log entries. + // For example, the following filter matches 99% of low-severity log + // entries from load balancers: + // + // `"resource.type=http_load_balancer severity=ERROR" + // + // The maximum length of the filter is 20000 characters. + Filter string `protobuf:"bytes,3,opt,name=filter,proto3" json:"filter,omitempty"` + // Optional. The metric descriptor associated with the logs-based metric. + // If unspecified, it uses a default metric descriptor with a DELTA metric + // kind, INT64 value type, with no labels and a unit of "1". Such a metric + // counts the number of log entries matching the `filter` expression. + // + // The `name`, `type`, and `description` fields in the `metric_descriptor` + // are output only, and is constructed using the `name` and `description` + // field in the LogMetric. + // + // To create a logs-based metric that records a distribution of log values, a + // DELTA metric kind with a DISTRIBUTION value type must be used along with + // a `value_extractor` expression in the LogMetric. + // + // Each label in the metric descriptor must have a matching label + // name as the key and an extractor expression as the value in the + // `label_extractors` map. + // + // The `metric_kind` and `value_type` fields in the `metric_descriptor` cannot + // be updated once initially configured. New labels can be added in the + // `metric_descriptor`, but existing labels cannot be modified except for + // their description. + MetricDescriptor *metric.MetricDescriptor `protobuf:"bytes,5,opt,name=metric_descriptor,json=metricDescriptor,proto3" json:"metric_descriptor,omitempty"` + // Optional. A `value_extractor` is required when using a distribution + // logs-based metric to extract the values to record from a log entry. + // Two functions are supported for value extraction: `EXTRACT(field)` or + // `REGEXP_EXTRACT(field, regex)`. The argument are: + // 1. field: The name of the log entry field from which the value is to be + // extracted. + // 2. regex: A regular expression using the Google RE2 syntax + // (https://github.com/google/re2/wiki/Syntax) with a single capture + // group to extract data from the specified log entry field. The value + // of the field is converted to a string before applying the regex. + // It is an error to specify a regex that does not include exactly one + // capture group. + // + // The result of the extraction must be convertible to a double type, as the + // distribution always records double values. If either the extraction or + // the conversion to double fails, then those values are not recorded in the + // distribution. + // + // Example: `REGEXP_EXTRACT(jsonPayload.request, ".*quantity=(\d+).*")` + ValueExtractor string `protobuf:"bytes,6,opt,name=value_extractor,json=valueExtractor,proto3" json:"value_extractor,omitempty"` + // Optional. A map from a label key string to an extractor expression which is + // used to extract data from a log entry field and assign as the label value. + // Each label key specified in the LabelDescriptor must have an associated + // extractor expression in this map. The syntax of the extractor expression + // is the same as for the `value_extractor` field. + // + // The extracted value is converted to the type defined in the label + // descriptor. If the either the extraction or the type conversion fails, + // the label will have a default value. The default value for a string + // label is an empty string, for an integer label its 0, and for a boolean + // label its `false`. + // + // Note that there are upper bounds on the maximum number of labels and the + // number of active time series that are allowed in a project. + LabelExtractors map[string]string `protobuf:"bytes,7,rep,name=label_extractors,json=labelExtractors,proto3" json:"label_extractors,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` + // Optional. The `bucket_options` are required when the logs-based metric is + // using a DISTRIBUTION value type and it describes the bucket boundaries + // used to create a histogram of the extracted values. + BucketOptions *distribution.Distribution_BucketOptions `protobuf:"bytes,8,opt,name=bucket_options,json=bucketOptions,proto3" json:"bucket_options,omitempty"` + // Deprecated. The API version that created or updated this metric. + // The v2 format is used by default and cannot be changed. + Version LogMetric_ApiVersion `protobuf:"varint,4,opt,name=version,proto3,enum=google.logging.v2.LogMetric_ApiVersion" json:"version,omitempty"` // Deprecated: Do not use. + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *LogMetric) Reset() { *m = LogMetric{} } +func (m *LogMetric) String() string { return proto.CompactTextString(m) } +func (*LogMetric) ProtoMessage() {} +func (*LogMetric) Descriptor() ([]byte, []int) { + return fileDescriptor_logging_metrics_29fe293c1106d204, []int{0} +} +func (m *LogMetric) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_LogMetric.Unmarshal(m, b) +} +func (m *LogMetric) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_LogMetric.Marshal(b, m, deterministic) +} +func (dst *LogMetric) XXX_Merge(src proto.Message) { + xxx_messageInfo_LogMetric.Merge(dst, src) +} +func (m *LogMetric) XXX_Size() int { + return xxx_messageInfo_LogMetric.Size(m) +} +func (m *LogMetric) XXX_DiscardUnknown() { + xxx_messageInfo_LogMetric.DiscardUnknown(m) +} + +var xxx_messageInfo_LogMetric proto.InternalMessageInfo + +func (m *LogMetric) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *LogMetric) GetDescription() string { + if m != nil { + return m.Description + } + return "" +} + +func (m *LogMetric) GetFilter() string { + if m != nil { + return m.Filter + } + return "" +} + +func (m *LogMetric) GetMetricDescriptor() *metric.MetricDescriptor { + if m != nil { + return m.MetricDescriptor + } + return nil +} + +func (m *LogMetric) GetValueExtractor() string { + if m != nil { + return m.ValueExtractor + } + return "" +} + +func (m *LogMetric) GetLabelExtractors() map[string]string { + if m != nil { + return m.LabelExtractors + } + return nil +} + +func (m *LogMetric) GetBucketOptions() *distribution.Distribution_BucketOptions { + if m != nil { + return m.BucketOptions + } + return nil +} + +// Deprecated: Do not use. +func (m *LogMetric) GetVersion() LogMetric_ApiVersion { + if m != nil { + return m.Version + } + return LogMetric_V2 +} + +// The parameters to ListLogMetrics. +type ListLogMetricsRequest struct { + // Required. The name of the project containing the metrics: + // + // "projects/[PROJECT_ID]" + Parent string `protobuf:"bytes,1,opt,name=parent,proto3" json:"parent,omitempty"` + // Optional. If present, then retrieve the next batch of results from the + // preceding call to this method. `pageToken` must be the value of + // `nextPageToken` from the previous response. The values of other method + // parameters should be identical to those in the previous call. + PageToken string `protobuf:"bytes,2,opt,name=page_token,json=pageToken,proto3" json:"page_token,omitempty"` + // Optional. The maximum number of results to return from this request. + // Non-positive values are ignored. The presence of `nextPageToken` in the + // response indicates that more results might be available. + PageSize int32 `protobuf:"varint,3,opt,name=page_size,json=pageSize,proto3" json:"page_size,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ListLogMetricsRequest) Reset() { *m = ListLogMetricsRequest{} } +func (m *ListLogMetricsRequest) String() string { return proto.CompactTextString(m) } +func (*ListLogMetricsRequest) ProtoMessage() {} +func (*ListLogMetricsRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_logging_metrics_29fe293c1106d204, []int{1} +} +func (m *ListLogMetricsRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ListLogMetricsRequest.Unmarshal(m, b) +} +func (m *ListLogMetricsRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ListLogMetricsRequest.Marshal(b, m, deterministic) +} +func (dst *ListLogMetricsRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_ListLogMetricsRequest.Merge(dst, src) +} +func (m *ListLogMetricsRequest) XXX_Size() int { + return xxx_messageInfo_ListLogMetricsRequest.Size(m) +} +func (m *ListLogMetricsRequest) XXX_DiscardUnknown() { + xxx_messageInfo_ListLogMetricsRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_ListLogMetricsRequest proto.InternalMessageInfo + +func (m *ListLogMetricsRequest) GetParent() string { + if m != nil { + return m.Parent + } + return "" +} + +func (m *ListLogMetricsRequest) GetPageToken() string { + if m != nil { + return m.PageToken + } + return "" +} + +func (m *ListLogMetricsRequest) GetPageSize() int32 { + if m != nil { + return m.PageSize + } + return 0 +} + +// Result returned from ListLogMetrics. +type ListLogMetricsResponse struct { + // A list of logs-based metrics. + Metrics []*LogMetric `protobuf:"bytes,1,rep,name=metrics,proto3" json:"metrics,omitempty"` + // If there might be more results than appear in this response, then + // `nextPageToken` is included. To get the next set of results, call this + // method again using the value of `nextPageToken` as `pageToken`. + NextPageToken string `protobuf:"bytes,2,opt,name=next_page_token,json=nextPageToken,proto3" json:"next_page_token,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ListLogMetricsResponse) Reset() { *m = ListLogMetricsResponse{} } +func (m *ListLogMetricsResponse) String() string { return proto.CompactTextString(m) } +func (*ListLogMetricsResponse) ProtoMessage() {} +func (*ListLogMetricsResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_logging_metrics_29fe293c1106d204, []int{2} +} +func (m *ListLogMetricsResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ListLogMetricsResponse.Unmarshal(m, b) +} +func (m *ListLogMetricsResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ListLogMetricsResponse.Marshal(b, m, deterministic) +} +func (dst *ListLogMetricsResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_ListLogMetricsResponse.Merge(dst, src) +} +func (m *ListLogMetricsResponse) XXX_Size() int { + return xxx_messageInfo_ListLogMetricsResponse.Size(m) +} +func (m *ListLogMetricsResponse) XXX_DiscardUnknown() { + xxx_messageInfo_ListLogMetricsResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_ListLogMetricsResponse proto.InternalMessageInfo + +func (m *ListLogMetricsResponse) GetMetrics() []*LogMetric { + if m != nil { + return m.Metrics + } + return nil +} + +func (m *ListLogMetricsResponse) GetNextPageToken() string { + if m != nil { + return m.NextPageToken + } + return "" +} + +// The parameters to GetLogMetric. +type GetLogMetricRequest struct { + // The resource name of the desired metric: + // + // "projects/[PROJECT_ID]/metrics/[METRIC_ID]" + MetricName string `protobuf:"bytes,1,opt,name=metric_name,json=metricName,proto3" json:"metric_name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GetLogMetricRequest) Reset() { *m = GetLogMetricRequest{} } +func (m *GetLogMetricRequest) String() string { return proto.CompactTextString(m) } +func (*GetLogMetricRequest) ProtoMessage() {} +func (*GetLogMetricRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_logging_metrics_29fe293c1106d204, []int{3} +} +func (m *GetLogMetricRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GetLogMetricRequest.Unmarshal(m, b) +} +func (m *GetLogMetricRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GetLogMetricRequest.Marshal(b, m, deterministic) +} +func (dst *GetLogMetricRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_GetLogMetricRequest.Merge(dst, src) +} +func (m *GetLogMetricRequest) XXX_Size() int { + return xxx_messageInfo_GetLogMetricRequest.Size(m) +} +func (m *GetLogMetricRequest) XXX_DiscardUnknown() { + xxx_messageInfo_GetLogMetricRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_GetLogMetricRequest proto.InternalMessageInfo + +func (m *GetLogMetricRequest) GetMetricName() string { + if m != nil { + return m.MetricName + } + return "" +} + +// The parameters to CreateLogMetric. +type CreateLogMetricRequest struct { + // The resource name of the project in which to create the metric: + // + // "projects/[PROJECT_ID]" + // + // The new metric must be provided in the request. + Parent string `protobuf:"bytes,1,opt,name=parent,proto3" json:"parent,omitempty"` + // The new logs-based metric, which must not have an identifier that + // already exists. + Metric *LogMetric `protobuf:"bytes,2,opt,name=metric,proto3" json:"metric,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *CreateLogMetricRequest) Reset() { *m = CreateLogMetricRequest{} } +func (m *CreateLogMetricRequest) String() string { return proto.CompactTextString(m) } +func (*CreateLogMetricRequest) ProtoMessage() {} +func (*CreateLogMetricRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_logging_metrics_29fe293c1106d204, []int{4} +} +func (m *CreateLogMetricRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_CreateLogMetricRequest.Unmarshal(m, b) +} +func (m *CreateLogMetricRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_CreateLogMetricRequest.Marshal(b, m, deterministic) +} +func (dst *CreateLogMetricRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_CreateLogMetricRequest.Merge(dst, src) +} +func (m *CreateLogMetricRequest) XXX_Size() int { + return xxx_messageInfo_CreateLogMetricRequest.Size(m) +} +func (m *CreateLogMetricRequest) XXX_DiscardUnknown() { + xxx_messageInfo_CreateLogMetricRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_CreateLogMetricRequest proto.InternalMessageInfo + +func (m *CreateLogMetricRequest) GetParent() string { + if m != nil { + return m.Parent + } + return "" +} + +func (m *CreateLogMetricRequest) GetMetric() *LogMetric { + if m != nil { + return m.Metric + } + return nil +} + +// The parameters to UpdateLogMetric. +type UpdateLogMetricRequest struct { + // The resource name of the metric to update: + // + // "projects/[PROJECT_ID]/metrics/[METRIC_ID]" + // + // The updated metric must be provided in the request and it's + // `name` field must be the same as `[METRIC_ID]` If the metric + // does not exist in `[PROJECT_ID]`, then a new metric is created. + MetricName string `protobuf:"bytes,1,opt,name=metric_name,json=metricName,proto3" json:"metric_name,omitempty"` + // The updated metric. + Metric *LogMetric `protobuf:"bytes,2,opt,name=metric,proto3" json:"metric,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *UpdateLogMetricRequest) Reset() { *m = UpdateLogMetricRequest{} } +func (m *UpdateLogMetricRequest) String() string { return proto.CompactTextString(m) } +func (*UpdateLogMetricRequest) ProtoMessage() {} +func (*UpdateLogMetricRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_logging_metrics_29fe293c1106d204, []int{5} +} +func (m *UpdateLogMetricRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_UpdateLogMetricRequest.Unmarshal(m, b) +} +func (m *UpdateLogMetricRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_UpdateLogMetricRequest.Marshal(b, m, deterministic) +} +func (dst *UpdateLogMetricRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_UpdateLogMetricRequest.Merge(dst, src) +} +func (m *UpdateLogMetricRequest) XXX_Size() int { + return xxx_messageInfo_UpdateLogMetricRequest.Size(m) +} +func (m *UpdateLogMetricRequest) XXX_DiscardUnknown() { + xxx_messageInfo_UpdateLogMetricRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_UpdateLogMetricRequest proto.InternalMessageInfo + +func (m *UpdateLogMetricRequest) GetMetricName() string { + if m != nil { + return m.MetricName + } + return "" +} + +func (m *UpdateLogMetricRequest) GetMetric() *LogMetric { + if m != nil { + return m.Metric + } + return nil +} + +// The parameters to DeleteLogMetric. +type DeleteLogMetricRequest struct { + // The resource name of the metric to delete: + // + // "projects/[PROJECT_ID]/metrics/[METRIC_ID]" + MetricName string `protobuf:"bytes,1,opt,name=metric_name,json=metricName,proto3" json:"metric_name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *DeleteLogMetricRequest) Reset() { *m = DeleteLogMetricRequest{} } +func (m *DeleteLogMetricRequest) String() string { return proto.CompactTextString(m) } +func (*DeleteLogMetricRequest) ProtoMessage() {} +func (*DeleteLogMetricRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_logging_metrics_29fe293c1106d204, []int{6} +} +func (m *DeleteLogMetricRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_DeleteLogMetricRequest.Unmarshal(m, b) +} +func (m *DeleteLogMetricRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_DeleteLogMetricRequest.Marshal(b, m, deterministic) +} +func (dst *DeleteLogMetricRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_DeleteLogMetricRequest.Merge(dst, src) +} +func (m *DeleteLogMetricRequest) XXX_Size() int { + return xxx_messageInfo_DeleteLogMetricRequest.Size(m) +} +func (m *DeleteLogMetricRequest) XXX_DiscardUnknown() { + xxx_messageInfo_DeleteLogMetricRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_DeleteLogMetricRequest proto.InternalMessageInfo + +func (m *DeleteLogMetricRequest) GetMetricName() string { + if m != nil { + return m.MetricName + } + return "" +} + +func init() { + proto.RegisterType((*LogMetric)(nil), "google.logging.v2.LogMetric") + proto.RegisterMapType((map[string]string)(nil), "google.logging.v2.LogMetric.LabelExtractorsEntry") + proto.RegisterType((*ListLogMetricsRequest)(nil), "google.logging.v2.ListLogMetricsRequest") + proto.RegisterType((*ListLogMetricsResponse)(nil), "google.logging.v2.ListLogMetricsResponse") + proto.RegisterType((*GetLogMetricRequest)(nil), "google.logging.v2.GetLogMetricRequest") + proto.RegisterType((*CreateLogMetricRequest)(nil), "google.logging.v2.CreateLogMetricRequest") + proto.RegisterType((*UpdateLogMetricRequest)(nil), "google.logging.v2.UpdateLogMetricRequest") + proto.RegisterType((*DeleteLogMetricRequest)(nil), "google.logging.v2.DeleteLogMetricRequest") + proto.RegisterEnum("google.logging.v2.LogMetric_ApiVersion", LogMetric_ApiVersion_name, LogMetric_ApiVersion_value) +} + +// Reference imports to suppress errors if they are not otherwise used. +var _ context.Context +var _ grpc.ClientConn + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the grpc package it is being compiled against. +const _ = grpc.SupportPackageIsVersion4 + +// MetricsServiceV2Client is the client API for MetricsServiceV2 service. +// +// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://godoc.org/google.golang.org/grpc#ClientConn.NewStream. +type MetricsServiceV2Client interface { + // Lists logs-based metrics. + ListLogMetrics(ctx context.Context, in *ListLogMetricsRequest, opts ...grpc.CallOption) (*ListLogMetricsResponse, error) + // Gets a logs-based metric. + GetLogMetric(ctx context.Context, in *GetLogMetricRequest, opts ...grpc.CallOption) (*LogMetric, error) + // Creates a logs-based metric. + CreateLogMetric(ctx context.Context, in *CreateLogMetricRequest, opts ...grpc.CallOption) (*LogMetric, error) + // Creates or updates a logs-based metric. + UpdateLogMetric(ctx context.Context, in *UpdateLogMetricRequest, opts ...grpc.CallOption) (*LogMetric, error) + // Deletes a logs-based metric. + DeleteLogMetric(ctx context.Context, in *DeleteLogMetricRequest, opts ...grpc.CallOption) (*empty.Empty, error) +} + +type metricsServiceV2Client struct { + cc *grpc.ClientConn +} + +func NewMetricsServiceV2Client(cc *grpc.ClientConn) MetricsServiceV2Client { + return &metricsServiceV2Client{cc} +} + +func (c *metricsServiceV2Client) ListLogMetrics(ctx context.Context, in *ListLogMetricsRequest, opts ...grpc.CallOption) (*ListLogMetricsResponse, error) { + out := new(ListLogMetricsResponse) + err := c.cc.Invoke(ctx, "/google.logging.v2.MetricsServiceV2/ListLogMetrics", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *metricsServiceV2Client) GetLogMetric(ctx context.Context, in *GetLogMetricRequest, opts ...grpc.CallOption) (*LogMetric, error) { + out := new(LogMetric) + err := c.cc.Invoke(ctx, "/google.logging.v2.MetricsServiceV2/GetLogMetric", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *metricsServiceV2Client) CreateLogMetric(ctx context.Context, in *CreateLogMetricRequest, opts ...grpc.CallOption) (*LogMetric, error) { + out := new(LogMetric) + err := c.cc.Invoke(ctx, "/google.logging.v2.MetricsServiceV2/CreateLogMetric", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *metricsServiceV2Client) UpdateLogMetric(ctx context.Context, in *UpdateLogMetricRequest, opts ...grpc.CallOption) (*LogMetric, error) { + out := new(LogMetric) + err := c.cc.Invoke(ctx, "/google.logging.v2.MetricsServiceV2/UpdateLogMetric", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *metricsServiceV2Client) DeleteLogMetric(ctx context.Context, in *DeleteLogMetricRequest, opts ...grpc.CallOption) (*empty.Empty, error) { + out := new(empty.Empty) + err := c.cc.Invoke(ctx, "/google.logging.v2.MetricsServiceV2/DeleteLogMetric", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +// MetricsServiceV2Server is the server API for MetricsServiceV2 service. +type MetricsServiceV2Server interface { + // Lists logs-based metrics. + ListLogMetrics(context.Context, *ListLogMetricsRequest) (*ListLogMetricsResponse, error) + // Gets a logs-based metric. + GetLogMetric(context.Context, *GetLogMetricRequest) (*LogMetric, error) + // Creates a logs-based metric. + CreateLogMetric(context.Context, *CreateLogMetricRequest) (*LogMetric, error) + // Creates or updates a logs-based metric. + UpdateLogMetric(context.Context, *UpdateLogMetricRequest) (*LogMetric, error) + // Deletes a logs-based metric. + DeleteLogMetric(context.Context, *DeleteLogMetricRequest) (*empty.Empty, error) +} + +func RegisterMetricsServiceV2Server(s *grpc.Server, srv MetricsServiceV2Server) { + s.RegisterService(&_MetricsServiceV2_serviceDesc, srv) +} + +func _MetricsServiceV2_ListLogMetrics_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(ListLogMetricsRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(MetricsServiceV2Server).ListLogMetrics(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.logging.v2.MetricsServiceV2/ListLogMetrics", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(MetricsServiceV2Server).ListLogMetrics(ctx, req.(*ListLogMetricsRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _MetricsServiceV2_GetLogMetric_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(GetLogMetricRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(MetricsServiceV2Server).GetLogMetric(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.logging.v2.MetricsServiceV2/GetLogMetric", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(MetricsServiceV2Server).GetLogMetric(ctx, req.(*GetLogMetricRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _MetricsServiceV2_CreateLogMetric_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(CreateLogMetricRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(MetricsServiceV2Server).CreateLogMetric(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.logging.v2.MetricsServiceV2/CreateLogMetric", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(MetricsServiceV2Server).CreateLogMetric(ctx, req.(*CreateLogMetricRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _MetricsServiceV2_UpdateLogMetric_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(UpdateLogMetricRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(MetricsServiceV2Server).UpdateLogMetric(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.logging.v2.MetricsServiceV2/UpdateLogMetric", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(MetricsServiceV2Server).UpdateLogMetric(ctx, req.(*UpdateLogMetricRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _MetricsServiceV2_DeleteLogMetric_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(DeleteLogMetricRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(MetricsServiceV2Server).DeleteLogMetric(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.logging.v2.MetricsServiceV2/DeleteLogMetric", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(MetricsServiceV2Server).DeleteLogMetric(ctx, req.(*DeleteLogMetricRequest)) + } + return interceptor(ctx, in, info, handler) +} + +var _MetricsServiceV2_serviceDesc = grpc.ServiceDesc{ + ServiceName: "google.logging.v2.MetricsServiceV2", + HandlerType: (*MetricsServiceV2Server)(nil), + Methods: []grpc.MethodDesc{ + { + MethodName: "ListLogMetrics", + Handler: _MetricsServiceV2_ListLogMetrics_Handler, + }, + { + MethodName: "GetLogMetric", + Handler: _MetricsServiceV2_GetLogMetric_Handler, + }, + { + MethodName: "CreateLogMetric", + Handler: _MetricsServiceV2_CreateLogMetric_Handler, + }, + { + MethodName: "UpdateLogMetric", + Handler: _MetricsServiceV2_UpdateLogMetric_Handler, + }, + { + MethodName: "DeleteLogMetric", + Handler: _MetricsServiceV2_DeleteLogMetric_Handler, + }, + }, + Streams: []grpc.StreamDesc{}, + Metadata: "google/logging/v2/logging_metrics.proto", +} + +func init() { + proto.RegisterFile("google/logging/v2/logging_metrics.proto", fileDescriptor_logging_metrics_29fe293c1106d204) +} + +var fileDescriptor_logging_metrics_29fe293c1106d204 = []byte{ + // 861 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x9c, 0x56, 0x4f, 0x6f, 0x1b, 0x45, + 0x14, 0x67, 0x9c, 0xc4, 0x69, 0x5e, 0x68, 0xec, 0x4e, 0x5b, 0xd7, 0x72, 0x53, 0xc5, 0xec, 0x21, + 0x71, 0x73, 0xd8, 0xa5, 0x0b, 0x8a, 0x4a, 0x11, 0x07, 0xdc, 0x44, 0x15, 0x52, 0x0a, 0x91, 0x0b, + 0x3e, 0xa0, 0x48, 0xab, 0xb1, 0xfd, 0xb2, 0x1a, 0xbc, 0xbb, 0xb3, 0xec, 0x8c, 0xad, 0xa4, 0xa8, + 0x17, 0xd4, 0x1b, 0x12, 0x07, 0xf8, 0x00, 0x1c, 0xb8, 0xf1, 0x51, 0xb8, 0xc2, 0x47, 0xe0, 0x43, + 0x70, 0x44, 0x3b, 0x3b, 0xeb, 0x6c, 0xed, 0x25, 0x8e, 0x72, 0xca, 0xcc, 0xfb, 0xfd, 0xde, 0xfc, + 0x7e, 0xef, 0x8f, 0x63, 0xc3, 0x9e, 0x2f, 0x84, 0x1f, 0xa0, 0x13, 0x08, 0xdf, 0xe7, 0x91, 0xef, + 0x4c, 0xdd, 0xfc, 0xe8, 0x85, 0xa8, 0x12, 0x3e, 0x94, 0x76, 0x9c, 0x08, 0x25, 0xe8, 0x9d, 0x8c, + 0x68, 0x1b, 0xd4, 0x9e, 0xba, 0xad, 0x6d, 0x93, 0xcb, 0x62, 0xee, 0xb0, 0x28, 0x12, 0x8a, 0x29, + 0x2e, 0x22, 0x93, 0xd0, 0x7a, 0x54, 0x40, 0x47, 0x5c, 0xaa, 0x84, 0x0f, 0x26, 0x29, 0x6e, 0xe0, + 0x07, 0x05, 0x38, 0x53, 0x32, 0xc0, 0x43, 0x03, 0xe8, 0xdb, 0x60, 0x72, 0xe6, 0x60, 0x18, 0xab, + 0x0b, 0x03, 0xb6, 0xe7, 0xc1, 0x33, 0x8e, 0xc1, 0xc8, 0x0b, 0x99, 0x1c, 0x1b, 0xc6, 0xce, 0x3c, + 0x43, 0xf1, 0x10, 0xa5, 0x62, 0x61, 0x9c, 0x11, 0xac, 0xdf, 0x57, 0x61, 0xe3, 0x58, 0xf8, 0x2f, + 0xb5, 0x26, 0xa5, 0xb0, 0x1a, 0xb1, 0x10, 0x9b, 0xa4, 0x4d, 0x3a, 0x1b, 0x3d, 0x7d, 0xa6, 0x6d, + 0xd8, 0x1c, 0xa1, 0x1c, 0x26, 0x3c, 0x4e, 0xfd, 0x36, 0x2b, 0x1a, 0x2a, 0x86, 0x68, 0x03, 0xaa, + 0x67, 0x3c, 0x50, 0x98, 0x34, 0x57, 0x34, 0x68, 0x6e, 0xf4, 0x0b, 0xb8, 0x93, 0xd5, 0xe2, 0xe5, + 0x6c, 0x91, 0x34, 0xd7, 0xda, 0xa4, 0xb3, 0xe9, 0x6e, 0xdb, 0xa6, 0x81, 0x2c, 0xe6, 0x76, 0x26, + 0x7e, 0x38, 0xe3, 0xf4, 0xea, 0xe1, 0x5c, 0x84, 0xee, 0x41, 0x6d, 0xca, 0x82, 0x09, 0x7a, 0x78, + 0xae, 0x12, 0x36, 0x4c, 0x1f, 0xaa, 0x6a, 0xad, 0x2d, 0x1d, 0x3e, 0xca, 0xa3, 0xf4, 0x14, 0xea, + 0x01, 0x1b, 0x60, 0x70, 0x49, 0x94, 0xcd, 0xf5, 0xf6, 0x4a, 0x67, 0xd3, 0x7d, 0x62, 0x2f, 0xcc, + 0xcc, 0x9e, 0x55, 0x6e, 0x1f, 0xa7, 0x49, 0xb3, 0x67, 0xe4, 0x51, 0xa4, 0x92, 0x8b, 0x5e, 0x2d, + 0x78, 0x37, 0x4a, 0x5f, 0xc2, 0xd6, 0x60, 0x32, 0x1c, 0xa3, 0xf2, 0x84, 0x2e, 0x5d, 0x36, 0x6f, + 0xe9, 0x72, 0x76, 0x8b, 0xe5, 0x1c, 0x16, 0xc7, 0xdb, 0xd5, 0xf4, 0xaf, 0x32, 0x76, 0xef, 0xf6, + 0xa0, 0x78, 0xa5, 0x47, 0xb0, 0x3e, 0xc5, 0x44, 0xa6, 0x6d, 0x5d, 0x6d, 0x93, 0xce, 0x96, 0xbb, + 0x77, 0xa5, 0xc7, 0xcf, 0x63, 0xde, 0xcf, 0xe8, 0xdd, 0x4a, 0x93, 0xf4, 0xf2, 0xdc, 0x56, 0x17, + 0xee, 0x95, 0xd9, 0xa7, 0x75, 0x58, 0x19, 0xe3, 0x85, 0x19, 0x66, 0x7a, 0xa4, 0xf7, 0x60, 0x4d, + 0xf7, 0xcb, 0x4c, 0x31, 0xbb, 0x3c, 0xab, 0x3c, 0x25, 0xd6, 0x36, 0xc0, 0xe5, 0xf3, 0xb4, 0x0a, + 0x95, 0xbe, 0x5b, 0x7f, 0x4f, 0xff, 0x7d, 0x52, 0x27, 0xd6, 0x18, 0xee, 0x1f, 0x73, 0xa9, 0x66, + 0x56, 0x64, 0x0f, 0xbf, 0x9f, 0xa0, 0x54, 0xe9, 0xe8, 0x63, 0x96, 0x60, 0xa4, 0x8c, 0x8a, 0xb9, + 0xd1, 0x47, 0x00, 0x31, 0xf3, 0xd1, 0x53, 0x62, 0x8c, 0xf9, 0xce, 0x6c, 0xa4, 0x91, 0xaf, 0xd3, + 0x00, 0x7d, 0x08, 0xfa, 0xe2, 0x49, 0xfe, 0x1a, 0xf5, 0xd2, 0xac, 0xf5, 0x6e, 0xa5, 0x81, 0x57, + 0xfc, 0x35, 0x5a, 0xe7, 0xd0, 0x98, 0x17, 0x93, 0xb1, 0x88, 0x24, 0xd2, 0x03, 0x58, 0x37, 0x1f, + 0xc3, 0x26, 0xd1, 0x33, 0xdd, 0xbe, 0xaa, 0x5f, 0xbd, 0x9c, 0x4c, 0x77, 0xa1, 0x16, 0xe1, 0xb9, + 0xf2, 0x16, 0x2c, 0xdd, 0x4e, 0xc3, 0x27, 0xb9, 0x2d, 0xeb, 0x00, 0xee, 0xbe, 0xc0, 0x4b, 0xe1, + 0xbc, 0xc8, 0x1d, 0xd8, 0x34, 0x7b, 0x5c, 0xf8, 0x70, 0x40, 0x16, 0xfa, 0x92, 0x85, 0x68, 0x9d, + 0x41, 0xe3, 0x79, 0x82, 0x4c, 0xe1, 0x42, 0xea, 0xff, 0xf5, 0xe7, 0x63, 0xa8, 0x66, 0xf9, 0xda, + 0xc8, 0xb2, 0x42, 0x0c, 0xd7, 0x12, 0xd0, 0xf8, 0x26, 0x1e, 0x95, 0xe9, 0x2c, 0xb3, 0x78, 0x43, + 0xc1, 0x4f, 0xa0, 0x71, 0x88, 0x01, 0xde, 0x40, 0xd0, 0xfd, 0x7b, 0x0d, 0xea, 0x66, 0x7e, 0xaf, + 0x30, 0x99, 0xf2, 0x21, 0xf6, 0x5d, 0xfa, 0x33, 0x81, 0xad, 0x77, 0x67, 0x4b, 0x3b, 0x65, 0x46, + 0xca, 0x76, 0xad, 0xf5, 0xf8, 0x1a, 0xcc, 0x6c, 0x51, 0xac, 0xbd, 0x1f, 0xff, 0xfa, 0xe7, 0xd7, + 0xca, 0x07, 0x74, 0x27, 0xfd, 0x0f, 0xfe, 0x43, 0xd6, 0xf3, 0xcf, 0xe2, 0x44, 0x7c, 0x87, 0x43, + 0x25, 0x9d, 0xfd, 0x37, 0x4e, 0xbe, 0x19, 0x6f, 0x09, 0xbc, 0x5f, 0x1c, 0x39, 0xdd, 0x2d, 0x11, + 0x29, 0xd9, 0x89, 0xd6, 0x95, 0xfd, 0xb3, 0x6c, 0xad, 0xdf, 0xa1, 0xbb, 0x5a, 0xbf, 0xd0, 0xa8, + 0x82, 0x89, 0xdc, 0x83, 0xb3, 0xff, 0x86, 0xfe, 0x44, 0xa0, 0x36, 0xb7, 0x41, 0xb4, 0xac, 0xdc, + 0xf2, 0x2d, 0x5b, 0x62, 0xc6, 0xd1, 0x66, 0x1e, 0x5b, 0xcb, 0x9a, 0xf1, 0xcc, 0x4c, 0x9d, 0xfe, + 0x42, 0xa0, 0x36, 0xb7, 0x67, 0xa5, 0x6e, 0xca, 0x77, 0x71, 0x89, 0x9b, 0x03, 0xed, 0xe6, 0xc3, + 0xd6, 0x35, 0x5b, 0x33, 0x33, 0xf5, 0x96, 0x40, 0x6d, 0x6e, 0x17, 0x4b, 0x4d, 0x95, 0xef, 0x6b, + 0xab, 0x91, 0x53, 0xf3, 0x6f, 0x42, 0xfb, 0x28, 0xfd, 0x22, 0xcd, 0x27, 0xb5, 0x7f, 0x4d, 0x3b, + 0xdd, 0xdf, 0x08, 0xdc, 0x1f, 0x8a, 0x70, 0x51, 0xb8, 0x7b, 0xf7, 0x38, 0x3b, 0x9b, 0x5d, 0x3c, + 0x49, 0x75, 0x4e, 0xc8, 0xb7, 0x4f, 0x0d, 0xd3, 0x17, 0x01, 0x8b, 0x7c, 0x5b, 0x24, 0xbe, 0xe3, + 0x63, 0xa4, 0x5d, 0x38, 0x19, 0xc4, 0x62, 0x2e, 0x0b, 0xbf, 0x38, 0x3e, 0x35, 0xc7, 0x7f, 0x09, + 0xf9, 0xa3, 0xf2, 0xe0, 0x45, 0x96, 0xfd, 0x3c, 0x10, 0x93, 0x91, 0x6d, 0x14, 0xec, 0xbe, 0xfb, + 0x67, 0x8e, 0x9c, 0x6a, 0xe4, 0xd4, 0x20, 0xa7, 0x7d, 0x77, 0x50, 0xd5, 0x6f, 0x7f, 0xf4, 0x5f, + 0x00, 0x00, 0x00, 0xff, 0xff, 0xca, 0x84, 0x19, 0x3d, 0xcc, 0x08, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/longrunning/operations.pb.go b/vendor/google.golang.org/genproto/googleapis/longrunning/operations.pb.go new file mode 100644 index 0000000..b725657 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/longrunning/operations.pb.go @@ -0,0 +1,926 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/longrunning/operations.proto + +package longrunning // import "google.golang.org/genproto/googleapis/longrunning" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import descriptor "github.com/golang/protobuf/protoc-gen-go/descriptor" +import any "github.com/golang/protobuf/ptypes/any" +import duration "github.com/golang/protobuf/ptypes/duration" +import empty "github.com/golang/protobuf/ptypes/empty" +import _ "google.golang.org/genproto/googleapis/api/annotations" +import status "google.golang.org/genproto/googleapis/rpc/status" + +import ( + context "golang.org/x/net/context" + grpc "google.golang.org/grpc" +) + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// This resource represents a long-running operation that is the result of a +// network API call. +type Operation struct { + // The server-assigned name, which is only unique within the same service that + // originally returns it. If you use the default HTTP mapping, the + // `name` should have the format of `operations/some/unique/name`. + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // Service-specific metadata associated with the operation. It typically + // contains progress information and common metadata such as create time. + // Some services might not provide such metadata. Any method that returns a + // long-running operation should document the metadata type, if any. + Metadata *any.Any `protobuf:"bytes,2,opt,name=metadata,proto3" json:"metadata,omitempty"` + // If the value is `false`, it means the operation is still in progress. + // If `true`, the operation is completed, and either `error` or `response` is + // available. + Done bool `protobuf:"varint,3,opt,name=done,proto3" json:"done,omitempty"` + // The operation result, which can be either an `error` or a valid `response`. + // If `done` == `false`, neither `error` nor `response` is set. + // If `done` == `true`, exactly one of `error` or `response` is set. + // + // Types that are valid to be assigned to Result: + // *Operation_Error + // *Operation_Response + Result isOperation_Result `protobuf_oneof:"result"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Operation) Reset() { *m = Operation{} } +func (m *Operation) String() string { return proto.CompactTextString(m) } +func (*Operation) ProtoMessage() {} +func (*Operation) Descriptor() ([]byte, []int) { + return fileDescriptor_operations_6d5856629ff294fa, []int{0} +} +func (m *Operation) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Operation.Unmarshal(m, b) +} +func (m *Operation) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Operation.Marshal(b, m, deterministic) +} +func (dst *Operation) XXX_Merge(src proto.Message) { + xxx_messageInfo_Operation.Merge(dst, src) +} +func (m *Operation) XXX_Size() int { + return xxx_messageInfo_Operation.Size(m) +} +func (m *Operation) XXX_DiscardUnknown() { + xxx_messageInfo_Operation.DiscardUnknown(m) +} + +var xxx_messageInfo_Operation proto.InternalMessageInfo + +func (m *Operation) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *Operation) GetMetadata() *any.Any { + if m != nil { + return m.Metadata + } + return nil +} + +func (m *Operation) GetDone() bool { + if m != nil { + return m.Done + } + return false +} + +type isOperation_Result interface { + isOperation_Result() +} + +type Operation_Error struct { + Error *status.Status `protobuf:"bytes,4,opt,name=error,proto3,oneof"` +} + +type Operation_Response struct { + Response *any.Any `protobuf:"bytes,5,opt,name=response,proto3,oneof"` +} + +func (*Operation_Error) isOperation_Result() {} + +func (*Operation_Response) isOperation_Result() {} + +func (m *Operation) GetResult() isOperation_Result { + if m != nil { + return m.Result + } + return nil +} + +func (m *Operation) GetError() *status.Status { + if x, ok := m.GetResult().(*Operation_Error); ok { + return x.Error + } + return nil +} + +func (m *Operation) GetResponse() *any.Any { + if x, ok := m.GetResult().(*Operation_Response); ok { + return x.Response + } + return nil +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*Operation) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _Operation_OneofMarshaler, _Operation_OneofUnmarshaler, _Operation_OneofSizer, []interface{}{ + (*Operation_Error)(nil), + (*Operation_Response)(nil), + } +} + +func _Operation_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*Operation) + // result + switch x := m.Result.(type) { + case *Operation_Error: + b.EncodeVarint(4<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.Error); err != nil { + return err + } + case *Operation_Response: + b.EncodeVarint(5<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.Response); err != nil { + return err + } + case nil: + default: + return fmt.Errorf("Operation.Result has unexpected type %T", x) + } + return nil +} + +func _Operation_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*Operation) + switch tag { + case 4: // result.error + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(status.Status) + err := b.DecodeMessage(msg) + m.Result = &Operation_Error{msg} + return true, err + case 5: // result.response + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(any.Any) + err := b.DecodeMessage(msg) + m.Result = &Operation_Response{msg} + return true, err + default: + return false, nil + } +} + +func _Operation_OneofSizer(msg proto.Message) (n int) { + m := msg.(*Operation) + // result + switch x := m.Result.(type) { + case *Operation_Error: + s := proto.Size(x.Error) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *Operation_Response: + s := proto.Size(x.Response) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +// The request message for [Operations.GetOperation][google.longrunning.Operations.GetOperation]. +type GetOperationRequest struct { + // The name of the operation resource. + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GetOperationRequest) Reset() { *m = GetOperationRequest{} } +func (m *GetOperationRequest) String() string { return proto.CompactTextString(m) } +func (*GetOperationRequest) ProtoMessage() {} +func (*GetOperationRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_operations_6d5856629ff294fa, []int{1} +} +func (m *GetOperationRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GetOperationRequest.Unmarshal(m, b) +} +func (m *GetOperationRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GetOperationRequest.Marshal(b, m, deterministic) +} +func (dst *GetOperationRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_GetOperationRequest.Merge(dst, src) +} +func (m *GetOperationRequest) XXX_Size() int { + return xxx_messageInfo_GetOperationRequest.Size(m) +} +func (m *GetOperationRequest) XXX_DiscardUnknown() { + xxx_messageInfo_GetOperationRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_GetOperationRequest proto.InternalMessageInfo + +func (m *GetOperationRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +// The request message for [Operations.ListOperations][google.longrunning.Operations.ListOperations]. +type ListOperationsRequest struct { + // The name of the operation's parent resource. + Name string `protobuf:"bytes,4,opt,name=name,proto3" json:"name,omitempty"` + // The standard list filter. + Filter string `protobuf:"bytes,1,opt,name=filter,proto3" json:"filter,omitempty"` + // The standard list page size. + PageSize int32 `protobuf:"varint,2,opt,name=page_size,json=pageSize,proto3" json:"page_size,omitempty"` + // The standard list page token. + PageToken string `protobuf:"bytes,3,opt,name=page_token,json=pageToken,proto3" json:"page_token,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ListOperationsRequest) Reset() { *m = ListOperationsRequest{} } +func (m *ListOperationsRequest) String() string { return proto.CompactTextString(m) } +func (*ListOperationsRequest) ProtoMessage() {} +func (*ListOperationsRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_operations_6d5856629ff294fa, []int{2} +} +func (m *ListOperationsRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ListOperationsRequest.Unmarshal(m, b) +} +func (m *ListOperationsRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ListOperationsRequest.Marshal(b, m, deterministic) +} +func (dst *ListOperationsRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_ListOperationsRequest.Merge(dst, src) +} +func (m *ListOperationsRequest) XXX_Size() int { + return xxx_messageInfo_ListOperationsRequest.Size(m) +} +func (m *ListOperationsRequest) XXX_DiscardUnknown() { + xxx_messageInfo_ListOperationsRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_ListOperationsRequest proto.InternalMessageInfo + +func (m *ListOperationsRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *ListOperationsRequest) GetFilter() string { + if m != nil { + return m.Filter + } + return "" +} + +func (m *ListOperationsRequest) GetPageSize() int32 { + if m != nil { + return m.PageSize + } + return 0 +} + +func (m *ListOperationsRequest) GetPageToken() string { + if m != nil { + return m.PageToken + } + return "" +} + +// The response message for [Operations.ListOperations][google.longrunning.Operations.ListOperations]. +type ListOperationsResponse struct { + // A list of operations that matches the specified filter in the request. + Operations []*Operation `protobuf:"bytes,1,rep,name=operations,proto3" json:"operations,omitempty"` + // The standard List next-page token. + NextPageToken string `protobuf:"bytes,2,opt,name=next_page_token,json=nextPageToken,proto3" json:"next_page_token,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ListOperationsResponse) Reset() { *m = ListOperationsResponse{} } +func (m *ListOperationsResponse) String() string { return proto.CompactTextString(m) } +func (*ListOperationsResponse) ProtoMessage() {} +func (*ListOperationsResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_operations_6d5856629ff294fa, []int{3} +} +func (m *ListOperationsResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ListOperationsResponse.Unmarshal(m, b) +} +func (m *ListOperationsResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ListOperationsResponse.Marshal(b, m, deterministic) +} +func (dst *ListOperationsResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_ListOperationsResponse.Merge(dst, src) +} +func (m *ListOperationsResponse) XXX_Size() int { + return xxx_messageInfo_ListOperationsResponse.Size(m) +} +func (m *ListOperationsResponse) XXX_DiscardUnknown() { + xxx_messageInfo_ListOperationsResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_ListOperationsResponse proto.InternalMessageInfo + +func (m *ListOperationsResponse) GetOperations() []*Operation { + if m != nil { + return m.Operations + } + return nil +} + +func (m *ListOperationsResponse) GetNextPageToken() string { + if m != nil { + return m.NextPageToken + } + return "" +} + +// The request message for [Operations.CancelOperation][google.longrunning.Operations.CancelOperation]. +type CancelOperationRequest struct { + // The name of the operation resource to be cancelled. + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *CancelOperationRequest) Reset() { *m = CancelOperationRequest{} } +func (m *CancelOperationRequest) String() string { return proto.CompactTextString(m) } +func (*CancelOperationRequest) ProtoMessage() {} +func (*CancelOperationRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_operations_6d5856629ff294fa, []int{4} +} +func (m *CancelOperationRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_CancelOperationRequest.Unmarshal(m, b) +} +func (m *CancelOperationRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_CancelOperationRequest.Marshal(b, m, deterministic) +} +func (dst *CancelOperationRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_CancelOperationRequest.Merge(dst, src) +} +func (m *CancelOperationRequest) XXX_Size() int { + return xxx_messageInfo_CancelOperationRequest.Size(m) +} +func (m *CancelOperationRequest) XXX_DiscardUnknown() { + xxx_messageInfo_CancelOperationRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_CancelOperationRequest proto.InternalMessageInfo + +func (m *CancelOperationRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +// The request message for [Operations.DeleteOperation][google.longrunning.Operations.DeleteOperation]. +type DeleteOperationRequest struct { + // The name of the operation resource to be deleted. + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *DeleteOperationRequest) Reset() { *m = DeleteOperationRequest{} } +func (m *DeleteOperationRequest) String() string { return proto.CompactTextString(m) } +func (*DeleteOperationRequest) ProtoMessage() {} +func (*DeleteOperationRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_operations_6d5856629ff294fa, []int{5} +} +func (m *DeleteOperationRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_DeleteOperationRequest.Unmarshal(m, b) +} +func (m *DeleteOperationRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_DeleteOperationRequest.Marshal(b, m, deterministic) +} +func (dst *DeleteOperationRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_DeleteOperationRequest.Merge(dst, src) +} +func (m *DeleteOperationRequest) XXX_Size() int { + return xxx_messageInfo_DeleteOperationRequest.Size(m) +} +func (m *DeleteOperationRequest) XXX_DiscardUnknown() { + xxx_messageInfo_DeleteOperationRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_DeleteOperationRequest proto.InternalMessageInfo + +func (m *DeleteOperationRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +// The request message for [Operations.WaitOperation][google.longrunning.Operations.WaitOperation]. +type WaitOperationRequest struct { + // The name of the operation resource to wait on. + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // The maximum duration to wait before timing out. If left blank, the wait + // will be at most the time permitted by the underlying HTTP/RPC protocol. + // If RPC context deadline is also specified, the shorter one will be used. + Timeout *duration.Duration `protobuf:"bytes,2,opt,name=timeout,proto3" json:"timeout,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *WaitOperationRequest) Reset() { *m = WaitOperationRequest{} } +func (m *WaitOperationRequest) String() string { return proto.CompactTextString(m) } +func (*WaitOperationRequest) ProtoMessage() {} +func (*WaitOperationRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_operations_6d5856629ff294fa, []int{6} +} +func (m *WaitOperationRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_WaitOperationRequest.Unmarshal(m, b) +} +func (m *WaitOperationRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_WaitOperationRequest.Marshal(b, m, deterministic) +} +func (dst *WaitOperationRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_WaitOperationRequest.Merge(dst, src) +} +func (m *WaitOperationRequest) XXX_Size() int { + return xxx_messageInfo_WaitOperationRequest.Size(m) +} +func (m *WaitOperationRequest) XXX_DiscardUnknown() { + xxx_messageInfo_WaitOperationRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_WaitOperationRequest proto.InternalMessageInfo + +func (m *WaitOperationRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *WaitOperationRequest) GetTimeout() *duration.Duration { + if m != nil { + return m.Timeout + } + return nil +} + +// A message representing the message types used by a long-running operation. +// +// Example: +// +// rpc LongRunningRecognize(LongRunningRecognizeRequest) +// returns (google.longrunning.Operation) { +// option (google.longrunning.operation_info) = { +// response_type: "LongRunningRecognizeResponse" +// metadata_type: "LongRunningRecognizeMetadata" +// }; +// } +type OperationInfo struct { + // Required. The message name of the primary return type for this + // long-running operation. + // This type will be used to deserialize the LRO's response. + // + // If the response is in a different package from the rpc, a fully-qualified + // message name must be used (e.g. `google.protobuf.Struct`). + // + // Note: Altering this value constitutes a breaking change. + ResponseType string `protobuf:"bytes,1,opt,name=response_type,json=responseType,proto3" json:"response_type,omitempty"` + // Required. The message name of the metadata type for this long-running + // operation. + // + // If the response is in a different package from the rpc, a fully-qualified + // message name must be used (e.g. `google.protobuf.Struct`). + // + // Note: Altering this value constitutes a breaking change. + MetadataType string `protobuf:"bytes,2,opt,name=metadata_type,json=metadataType,proto3" json:"metadata_type,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *OperationInfo) Reset() { *m = OperationInfo{} } +func (m *OperationInfo) String() string { return proto.CompactTextString(m) } +func (*OperationInfo) ProtoMessage() {} +func (*OperationInfo) Descriptor() ([]byte, []int) { + return fileDescriptor_operations_6d5856629ff294fa, []int{7} +} +func (m *OperationInfo) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_OperationInfo.Unmarshal(m, b) +} +func (m *OperationInfo) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_OperationInfo.Marshal(b, m, deterministic) +} +func (dst *OperationInfo) XXX_Merge(src proto.Message) { + xxx_messageInfo_OperationInfo.Merge(dst, src) +} +func (m *OperationInfo) XXX_Size() int { + return xxx_messageInfo_OperationInfo.Size(m) +} +func (m *OperationInfo) XXX_DiscardUnknown() { + xxx_messageInfo_OperationInfo.DiscardUnknown(m) +} + +var xxx_messageInfo_OperationInfo proto.InternalMessageInfo + +func (m *OperationInfo) GetResponseType() string { + if m != nil { + return m.ResponseType + } + return "" +} + +func (m *OperationInfo) GetMetadataType() string { + if m != nil { + return m.MetadataType + } + return "" +} + +var E_OperationInfo = &proto.ExtensionDesc{ + ExtendedType: (*descriptor.MethodOptions)(nil), + ExtensionType: (*OperationInfo)(nil), + Field: 1049, + Name: "google.longrunning.operation_info", + Tag: "bytes,1049,opt,name=operation_info,json=operationInfo", + Filename: "google/longrunning/operations.proto", +} + +func init() { + proto.RegisterType((*Operation)(nil), "google.longrunning.Operation") + proto.RegisterType((*GetOperationRequest)(nil), "google.longrunning.GetOperationRequest") + proto.RegisterType((*ListOperationsRequest)(nil), "google.longrunning.ListOperationsRequest") + proto.RegisterType((*ListOperationsResponse)(nil), "google.longrunning.ListOperationsResponse") + proto.RegisterType((*CancelOperationRequest)(nil), "google.longrunning.CancelOperationRequest") + proto.RegisterType((*DeleteOperationRequest)(nil), "google.longrunning.DeleteOperationRequest") + proto.RegisterType((*WaitOperationRequest)(nil), "google.longrunning.WaitOperationRequest") + proto.RegisterType((*OperationInfo)(nil), "google.longrunning.OperationInfo") + proto.RegisterExtension(E_OperationInfo) +} + +// Reference imports to suppress errors if they are not otherwise used. +var _ context.Context +var _ grpc.ClientConn + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the grpc package it is being compiled against. +const _ = grpc.SupportPackageIsVersion4 + +// OperationsClient is the client API for Operations service. +// +// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://godoc.org/google.golang.org/grpc#ClientConn.NewStream. +type OperationsClient interface { + // Lists operations that match the specified filter in the request. If the + // server doesn't support this method, it returns `UNIMPLEMENTED`. + // + // NOTE: the `name` binding allows API services to override the binding + // to use different resource name schemes, such as `users/*/operations`. To + // override the binding, API services can add a binding such as + // `"/v1/{name=users/*}/operations"` to their service configuration. + // For backwards compatibility, the default name includes the operations + // collection id, however overriding users must ensure the name binding + // is the parent resource, without the operations collection id. + ListOperations(ctx context.Context, in *ListOperationsRequest, opts ...grpc.CallOption) (*ListOperationsResponse, error) + // Gets the latest state of a long-running operation. Clients can use this + // method to poll the operation result at intervals as recommended by the API + // service. + GetOperation(ctx context.Context, in *GetOperationRequest, opts ...grpc.CallOption) (*Operation, error) + // Deletes a long-running operation. This method indicates that the client is + // no longer interested in the operation result. It does not cancel the + // operation. If the server doesn't support this method, it returns + // `google.rpc.Code.UNIMPLEMENTED`. + DeleteOperation(ctx context.Context, in *DeleteOperationRequest, opts ...grpc.CallOption) (*empty.Empty, error) + // Starts asynchronous cancellation on a long-running operation. The server + // makes a best effort to cancel the operation, but success is not + // guaranteed. If the server doesn't support this method, it returns + // `google.rpc.Code.UNIMPLEMENTED`. Clients can use + // [Operations.GetOperation][google.longrunning.Operations.GetOperation] or + // other methods to check whether the cancellation succeeded or whether the + // operation completed despite cancellation. On successful cancellation, + // the operation is not deleted; instead, it becomes an operation with + // an [Operation.error][google.longrunning.Operation.error] value with a [google.rpc.Status.code][google.rpc.Status.code] of 1, + // corresponding to `Code.CANCELLED`. + CancelOperation(ctx context.Context, in *CancelOperationRequest, opts ...grpc.CallOption) (*empty.Empty, error) + // Waits for the specified long-running operation until it is done or reaches + // at most a specified timeout, returning the latest state. If the operation + // is already done, the latest state is immediately returned. If the timeout + // specified is greater than the default HTTP/RPC timeout, the HTTP/RPC + // timeout is used. If the server does not support this method, it returns + // `google.rpc.Code.UNIMPLEMENTED`. + // Note that this method is on a best-effort basis. It may return the latest + // state before the specified timeout (including immediately), meaning even an + // immediate response is no guarantee that the operation is done. + WaitOperation(ctx context.Context, in *WaitOperationRequest, opts ...grpc.CallOption) (*Operation, error) +} + +type operationsClient struct { + cc *grpc.ClientConn +} + +func NewOperationsClient(cc *grpc.ClientConn) OperationsClient { + return &operationsClient{cc} +} + +func (c *operationsClient) ListOperations(ctx context.Context, in *ListOperationsRequest, opts ...grpc.CallOption) (*ListOperationsResponse, error) { + out := new(ListOperationsResponse) + err := c.cc.Invoke(ctx, "/google.longrunning.Operations/ListOperations", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *operationsClient) GetOperation(ctx context.Context, in *GetOperationRequest, opts ...grpc.CallOption) (*Operation, error) { + out := new(Operation) + err := c.cc.Invoke(ctx, "/google.longrunning.Operations/GetOperation", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *operationsClient) DeleteOperation(ctx context.Context, in *DeleteOperationRequest, opts ...grpc.CallOption) (*empty.Empty, error) { + out := new(empty.Empty) + err := c.cc.Invoke(ctx, "/google.longrunning.Operations/DeleteOperation", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *operationsClient) CancelOperation(ctx context.Context, in *CancelOperationRequest, opts ...grpc.CallOption) (*empty.Empty, error) { + out := new(empty.Empty) + err := c.cc.Invoke(ctx, "/google.longrunning.Operations/CancelOperation", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *operationsClient) WaitOperation(ctx context.Context, in *WaitOperationRequest, opts ...grpc.CallOption) (*Operation, error) { + out := new(Operation) + err := c.cc.Invoke(ctx, "/google.longrunning.Operations/WaitOperation", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +// OperationsServer is the server API for Operations service. +type OperationsServer interface { + // Lists operations that match the specified filter in the request. If the + // server doesn't support this method, it returns `UNIMPLEMENTED`. + // + // NOTE: the `name` binding allows API services to override the binding + // to use different resource name schemes, such as `users/*/operations`. To + // override the binding, API services can add a binding such as + // `"/v1/{name=users/*}/operations"` to their service configuration. + // For backwards compatibility, the default name includes the operations + // collection id, however overriding users must ensure the name binding + // is the parent resource, without the operations collection id. + ListOperations(context.Context, *ListOperationsRequest) (*ListOperationsResponse, error) + // Gets the latest state of a long-running operation. Clients can use this + // method to poll the operation result at intervals as recommended by the API + // service. + GetOperation(context.Context, *GetOperationRequest) (*Operation, error) + // Deletes a long-running operation. This method indicates that the client is + // no longer interested in the operation result. It does not cancel the + // operation. If the server doesn't support this method, it returns + // `google.rpc.Code.UNIMPLEMENTED`. + DeleteOperation(context.Context, *DeleteOperationRequest) (*empty.Empty, error) + // Starts asynchronous cancellation on a long-running operation. The server + // makes a best effort to cancel the operation, but success is not + // guaranteed. If the server doesn't support this method, it returns + // `google.rpc.Code.UNIMPLEMENTED`. Clients can use + // [Operations.GetOperation][google.longrunning.Operations.GetOperation] or + // other methods to check whether the cancellation succeeded or whether the + // operation completed despite cancellation. On successful cancellation, + // the operation is not deleted; instead, it becomes an operation with + // an [Operation.error][google.longrunning.Operation.error] value with a [google.rpc.Status.code][google.rpc.Status.code] of 1, + // corresponding to `Code.CANCELLED`. + CancelOperation(context.Context, *CancelOperationRequest) (*empty.Empty, error) + // Waits for the specified long-running operation until it is done or reaches + // at most a specified timeout, returning the latest state. If the operation + // is already done, the latest state is immediately returned. If the timeout + // specified is greater than the default HTTP/RPC timeout, the HTTP/RPC + // timeout is used. If the server does not support this method, it returns + // `google.rpc.Code.UNIMPLEMENTED`. + // Note that this method is on a best-effort basis. It may return the latest + // state before the specified timeout (including immediately), meaning even an + // immediate response is no guarantee that the operation is done. + WaitOperation(context.Context, *WaitOperationRequest) (*Operation, error) +} + +func RegisterOperationsServer(s *grpc.Server, srv OperationsServer) { + s.RegisterService(&_Operations_serviceDesc, srv) +} + +func _Operations_ListOperations_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(ListOperationsRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(OperationsServer).ListOperations(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.longrunning.Operations/ListOperations", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(OperationsServer).ListOperations(ctx, req.(*ListOperationsRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _Operations_GetOperation_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(GetOperationRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(OperationsServer).GetOperation(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.longrunning.Operations/GetOperation", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(OperationsServer).GetOperation(ctx, req.(*GetOperationRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _Operations_DeleteOperation_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(DeleteOperationRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(OperationsServer).DeleteOperation(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.longrunning.Operations/DeleteOperation", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(OperationsServer).DeleteOperation(ctx, req.(*DeleteOperationRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _Operations_CancelOperation_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(CancelOperationRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(OperationsServer).CancelOperation(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.longrunning.Operations/CancelOperation", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(OperationsServer).CancelOperation(ctx, req.(*CancelOperationRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _Operations_WaitOperation_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(WaitOperationRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(OperationsServer).WaitOperation(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.longrunning.Operations/WaitOperation", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(OperationsServer).WaitOperation(ctx, req.(*WaitOperationRequest)) + } + return interceptor(ctx, in, info, handler) +} + +var _Operations_serviceDesc = grpc.ServiceDesc{ + ServiceName: "google.longrunning.Operations", + HandlerType: (*OperationsServer)(nil), + Methods: []grpc.MethodDesc{ + { + MethodName: "ListOperations", + Handler: _Operations_ListOperations_Handler, + }, + { + MethodName: "GetOperation", + Handler: _Operations_GetOperation_Handler, + }, + { + MethodName: "DeleteOperation", + Handler: _Operations_DeleteOperation_Handler, + }, + { + MethodName: "CancelOperation", + Handler: _Operations_CancelOperation_Handler, + }, + { + MethodName: "WaitOperation", + Handler: _Operations_WaitOperation_Handler, + }, + }, + Streams: []grpc.StreamDesc{}, + Metadata: "google/longrunning/operations.proto", +} + +func init() { + proto.RegisterFile("google/longrunning/operations.proto", fileDescriptor_operations_6d5856629ff294fa) +} + +var fileDescriptor_operations_6d5856629ff294fa = []byte{ + // 741 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x8c, 0x55, 0xcd, 0x4e, 0x13, 0x51, + 0x14, 0x66, 0x4a, 0xc1, 0xf6, 0x40, 0x69, 0x72, 0x85, 0x52, 0x8a, 0x60, 0x1d, 0x8c, 0x96, 0x86, + 0xcc, 0x68, 0xd9, 0x61, 0x58, 0x88, 0x18, 0x30, 0xc1, 0x40, 0x06, 0x12, 0x23, 0x31, 0x69, 0x86, + 0xf6, 0x30, 0x4e, 0x6c, 0xef, 0x1d, 0xef, 0xdc, 0x51, 0x8a, 0x21, 0x44, 0x17, 0xbe, 0x80, 0x0b, + 0xe3, 0x2b, 0xf8, 0x28, 0x6e, 0x5c, 0xf8, 0x0a, 0x2e, 0x7c, 0x04, 0x97, 0x66, 0xee, 0xcc, 0xb4, + 0x43, 0x3b, 0xc5, 0xee, 0xe6, 0x9e, 0xf3, 0xdd, 0xef, 0x3b, 0x3f, 0xdf, 0x6d, 0x61, 0xc5, 0x62, + 0xcc, 0x6a, 0xa1, 0xde, 0x62, 0xd4, 0xe2, 0x1e, 0xa5, 0x36, 0xb5, 0x74, 0xe6, 0x20, 0x37, 0x85, + 0xcd, 0xa8, 0xab, 0x39, 0x9c, 0x09, 0x46, 0x48, 0x00, 0xd2, 0x62, 0xa0, 0xd2, 0xad, 0xf0, 0xa2, + 0xe9, 0xd8, 0xba, 0x49, 0x29, 0x13, 0xf1, 0x1b, 0xa5, 0x85, 0x30, 0x2b, 0x4f, 0x27, 0xde, 0xa9, + 0x6e, 0xd2, 0x4e, 0x98, 0x5a, 0xee, 0x4f, 0x35, 0xbd, 0x40, 0x2d, 0xcc, 0x2f, 0xf6, 0xe7, 0xb1, + 0xed, 0x88, 0xe8, 0xf2, 0x7c, 0x98, 0xe4, 0x4e, 0x43, 0x77, 0x85, 0x29, 0xbc, 0x48, 0xb0, 0x3c, + 0xc0, 0x8a, 0x6e, 0x83, 0xdb, 0x8e, 0x60, 0x3c, 0x40, 0xa8, 0x3f, 0x15, 0xc8, 0xee, 0x47, 0x9d, + 0x11, 0x02, 0x69, 0x6a, 0xb6, 0xb1, 0xa8, 0x94, 0x95, 0x4a, 0xd6, 0x90, 0xdf, 0xe4, 0x01, 0x64, + 0xda, 0x28, 0xcc, 0xa6, 0x29, 0xcc, 0x62, 0xaa, 0xac, 0x54, 0xa6, 0x6a, 0xb3, 0x5a, 0xd8, 0x79, + 0x44, 0xab, 0x3d, 0xa6, 0x1d, 0xa3, 0x8b, 0xf2, 0x59, 0x9a, 0x8c, 0x62, 0x71, 0xbc, 0xac, 0x54, + 0x32, 0x86, 0xfc, 0x26, 0x55, 0x98, 0x40, 0xce, 0x19, 0x2f, 0xa6, 0x25, 0x05, 0x89, 0x28, 0xb8, + 0xd3, 0xd0, 0x0e, 0x65, 0xc9, 0xbb, 0x63, 0x46, 0x00, 0x21, 0x35, 0xc8, 0x70, 0x74, 0x1d, 0x46, + 0x5d, 0x2c, 0x4e, 0x0c, 0x57, 0xdc, 0x1d, 0x33, 0xba, 0xb8, 0xad, 0x0c, 0x4c, 0x72, 0x74, 0xbd, + 0x96, 0x50, 0x57, 0xe1, 0xe6, 0x0e, 0x8a, 0x6e, 0x4f, 0x06, 0xbe, 0xf5, 0xd0, 0x15, 0x49, 0xad, + 0xa9, 0x97, 0x30, 0xb7, 0x67, 0xbb, 0x3d, 0xac, 0xdb, 0x0f, 0x4e, 0xc7, 0xe6, 0x50, 0x80, 0xc9, + 0x53, 0xbb, 0x25, 0x90, 0x87, 0x14, 0xe1, 0x89, 0x2c, 0x42, 0xd6, 0x31, 0x2d, 0xac, 0xbb, 0xf6, + 0x39, 0xca, 0x01, 0x4d, 0x18, 0x19, 0x3f, 0x70, 0x68, 0x9f, 0x23, 0x59, 0x02, 0x90, 0x49, 0xc1, + 0xde, 0x20, 0x95, 0x03, 0xc9, 0x1a, 0x12, 0x7e, 0xe4, 0x07, 0xd4, 0x4b, 0x28, 0xf4, 0x17, 0x10, + 0xf4, 0x43, 0x36, 0x01, 0x7a, 0x86, 0x2b, 0x2a, 0xe5, 0xf1, 0xca, 0x54, 0x6d, 0x49, 0x1b, 0x74, + 0x9c, 0xd6, 0x6b, 0x34, 0x76, 0x81, 0xdc, 0x83, 0x3c, 0xc5, 0x33, 0x51, 0x8f, 0x89, 0xa7, 0xa4, + 0x78, 0xce, 0x0f, 0x1f, 0x74, 0x0b, 0x58, 0x83, 0xc2, 0x13, 0x93, 0x36, 0xb0, 0x35, 0xd2, 0xbc, + 0xd6, 0xa0, 0xb0, 0x8d, 0x2d, 0x14, 0x38, 0x12, 0xba, 0x0e, 0xb3, 0x2f, 0x4c, 0x7b, 0xa4, 0x4d, + 0x90, 0x75, 0xb8, 0x21, 0xec, 0x36, 0x32, 0x4f, 0x84, 0x1e, 0x5b, 0x18, 0xd8, 0xf8, 0x76, 0xf8, + 0x20, 0x8c, 0x08, 0xa9, 0xbe, 0x84, 0x5c, 0x97, 0xfc, 0x19, 0x3d, 0x65, 0x64, 0x05, 0x72, 0x91, + 0x21, 0xea, 0xa2, 0xe3, 0x44, 0x12, 0xd3, 0x51, 0xf0, 0xa8, 0xe3, 0xa0, 0x0f, 0x8a, 0x9c, 0x1a, + 0x80, 0x82, 0xc1, 0x4c, 0x47, 0x41, 0x1f, 0x54, 0xfb, 0x93, 0x06, 0xe8, 0x6d, 0x85, 0x7c, 0x56, + 0x60, 0xe6, 0xea, 0xa2, 0xc8, 0x6a, 0xd2, 0x32, 0x12, 0xdd, 0x54, 0xaa, 0x8e, 0x02, 0x0d, 0x2a, + 0x54, 0x97, 0x3e, 0xfd, 0xfa, 0xfd, 0x25, 0x35, 0x4f, 0xe6, 0xf4, 0x77, 0x0f, 0xf5, 0x0f, 0xfe, + 0x6c, 0x36, 0x7b, 0x6b, 0xbd, 0x20, 0x67, 0x30, 0x1d, 0x37, 0x37, 0xb9, 0x9f, 0x44, 0x9d, 0x60, + 0xff, 0xd2, 0xf5, 0xde, 0x51, 0xcb, 0x52, 0xb6, 0x44, 0x8a, 0x49, 0xb2, 0x7a, 0xb5, 0x7a, 0x41, + 0xde, 0x43, 0xbe, 0x6f, 0xf7, 0x24, 0xb1, 0xaf, 0x64, 0x83, 0x94, 0x0a, 0x03, 0xfb, 0x7c, 0xea, + 0xff, 0x80, 0x45, 0xc2, 0xd5, 0xe1, 0xc2, 0x1f, 0x15, 0xc8, 0xf7, 0x79, 0x34, 0x59, 0x39, 0xd9, + 0xc8, 0x43, 0x95, 0xab, 0x52, 0xf9, 0xae, 0x7a, 0x7b, 0x98, 0xf2, 0x46, 0x43, 0x12, 0x6e, 0x28, + 0x55, 0x72, 0x0c, 0xb9, 0x2b, 0x56, 0x26, 0x95, 0xa4, 0x02, 0x92, 0xdc, 0xfe, 0xbf, 0xc1, 0x8f, + 0x6d, 0xd8, 0x30, 0xd3, 0xd5, 0xad, 0xdb, 0xbe, 0x8d, 0x97, 0x07, 0x2a, 0x7e, 0x8e, 0xe2, 0x35, + 0x6b, 0xee, 0x3b, 0xc1, 0xaf, 0xc1, 0xb7, 0x8c, 0x7c, 0x22, 0x77, 0xae, 0x65, 0xf6, 0x1f, 0x84, + 0x91, 0x63, 0xf1, 0xe3, 0xd6, 0x57, 0x05, 0x0a, 0x0d, 0xd6, 0x4e, 0xb8, 0xb7, 0x95, 0xef, 0x79, + 0xf1, 0xc0, 0x57, 0x3d, 0x50, 0x8e, 0x37, 0x43, 0x98, 0xc5, 0x5a, 0x26, 0xb5, 0x34, 0xc6, 0x2d, + 0xdd, 0x42, 0x2a, 0x6b, 0xd2, 0x83, 0x94, 0xe9, 0xd8, 0x6e, 0xfc, 0x3f, 0xf2, 0x51, 0xec, 0xfb, + 0xaf, 0xa2, 0x7c, 0x4f, 0x91, 0x9d, 0x80, 0x62, 0x8f, 0x51, 0xcb, 0x08, 0xe2, 0x3f, 0xa2, 0xe0, + 0xab, 0x58, 0xf0, 0x64, 0x52, 0xd2, 0xae, 0xff, 0x0b, 0x00, 0x00, 0xff, 0xff, 0x06, 0x86, 0x63, + 0xe5, 0x79, 0x07, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/monitoring/v3/alert.pb.go b/vendor/google.golang.org/genproto/googleapis/monitoring/v3/alert.pb.go new file mode 100644 index 0000000..a2af65c --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/monitoring/v3/alert.pb.go @@ -0,0 +1,962 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/monitoring/v3/alert.proto + +package monitoring // import "google.golang.org/genproto/googleapis/monitoring/v3" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import duration "github.com/golang/protobuf/ptypes/duration" +import wrappers "github.com/golang/protobuf/ptypes/wrappers" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Operators for combining conditions. +type AlertPolicy_ConditionCombinerType int32 + +const ( + // An unspecified combiner. + AlertPolicy_COMBINE_UNSPECIFIED AlertPolicy_ConditionCombinerType = 0 + // Combine conditions using the logical `AND` operator. An + // incident is created only if all conditions are met + // simultaneously. This combiner is satisfied if all conditions are + // met, even if they are met on completely different resources. + AlertPolicy_AND AlertPolicy_ConditionCombinerType = 1 + // Combine conditions using the logical `OR` operator. An incident + // is created if any of the listed conditions is met. + AlertPolicy_OR AlertPolicy_ConditionCombinerType = 2 + // Combine conditions using logical `AND` operator, but unlike the regular + // `AND` option, an incident is created only if all conditions are met + // simultaneously on at least one resource. + AlertPolicy_AND_WITH_MATCHING_RESOURCE AlertPolicy_ConditionCombinerType = 3 +) + +var AlertPolicy_ConditionCombinerType_name = map[int32]string{ + 0: "COMBINE_UNSPECIFIED", + 1: "AND", + 2: "OR", + 3: "AND_WITH_MATCHING_RESOURCE", +} +var AlertPolicy_ConditionCombinerType_value = map[string]int32{ + "COMBINE_UNSPECIFIED": 0, + "AND": 1, + "OR": 2, + "AND_WITH_MATCHING_RESOURCE": 3, +} + +func (x AlertPolicy_ConditionCombinerType) String() string { + return proto.EnumName(AlertPolicy_ConditionCombinerType_name, int32(x)) +} +func (AlertPolicy_ConditionCombinerType) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_alert_9493c79d6a8e0cc9, []int{0, 0} +} + +// A description of the conditions under which some aspect of your system is +// considered to be "unhealthy" and the ways to notify people or services about +// this state. For an overview of alert policies, see +// [Introduction to Alerting](/monitoring/alerts/). +type AlertPolicy struct { + // Required if the policy exists. The resource name for this policy. The + // syntax is: + // + // projects/[PROJECT_ID]/alertPolicies/[ALERT_POLICY_ID] + // + // `[ALERT_POLICY_ID]` is assigned by Stackdriver Monitoring when the policy + // is created. When calling the + // [alertPolicies.create][google.monitoring.v3.AlertPolicyService.CreateAlertPolicy] + // method, do not include the `name` field in the alerting policy passed as + // part of the request. + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // A short name or phrase used to identify the policy in dashboards, + // notifications, and incidents. To avoid confusion, don't use the same + // display name for multiple policies in the same project. The name is + // limited to 512 Unicode characters. + DisplayName string `protobuf:"bytes,2,opt,name=display_name,json=displayName,proto3" json:"display_name,omitempty"` + // Documentation that is included with notifications and incidents related to + // this policy. Best practice is for the documentation to include information + // to help responders understand, mitigate, escalate, and correct the + // underlying problems detected by the alerting policy. Notification channels + // that have limited capacity might not show this documentation. + Documentation *AlertPolicy_Documentation `protobuf:"bytes,13,opt,name=documentation,proto3" json:"documentation,omitempty"` + // User-supplied key/value data to be used for organizing and + // identifying the `AlertPolicy` objects. + // + // The field can contain up to 64 entries. Each key and value is limited to + // 63 Unicode characters or 128 bytes, whichever is smaller. Labels and + // values can contain only lowercase letters, numerals, underscores, and + // dashes. Keys must begin with a letter. + UserLabels map[string]string `protobuf:"bytes,16,rep,name=user_labels,json=userLabels,proto3" json:"user_labels,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` + // A list of conditions for the policy. The conditions are combined by AND or + // OR according to the `combiner` field. If the combined conditions evaluate + // to true, then an incident is created. A policy can have from one to six + // conditions. + Conditions []*AlertPolicy_Condition `protobuf:"bytes,12,rep,name=conditions,proto3" json:"conditions,omitempty"` + // How to combine the results of multiple conditions + // to determine if an incident should be opened. + Combiner AlertPolicy_ConditionCombinerType `protobuf:"varint,6,opt,name=combiner,proto3,enum=google.monitoring.v3.AlertPolicy_ConditionCombinerType" json:"combiner,omitempty"` + // Whether or not the policy is enabled. On write, the default interpretation + // if unset is that the policy is enabled. On read, clients should not make + // any assumption about the state if it has not been populated. The + // field should always be populated on List and Get operations, unless + // a field projection has been specified that strips it out. + Enabled *wrappers.BoolValue `protobuf:"bytes,17,opt,name=enabled,proto3" json:"enabled,omitempty"` + // Identifies the notification channels to which notifications should be sent + // when incidents are opened or closed or when new violations occur on + // an already opened incident. Each element of this array corresponds to + // the `name` field in each of the + // [`NotificationChannel`][google.monitoring.v3.NotificationChannel] + // objects that are returned from the [`ListNotificationChannels`] + // [google.monitoring.v3.NotificationChannelService.ListNotificationChannels] + // method. The syntax of the entries in this field is: + // + // projects/[PROJECT_ID]/notificationChannels/[CHANNEL_ID] + NotificationChannels []string `protobuf:"bytes,14,rep,name=notification_channels,json=notificationChannels,proto3" json:"notification_channels,omitempty"` + // A read-only record of the creation of the alerting policy. If provided + // in a call to create or update, this field will be ignored. + CreationRecord *MutationRecord `protobuf:"bytes,10,opt,name=creation_record,json=creationRecord,proto3" json:"creation_record,omitempty"` + // A read-only record of the most recent change to the alerting policy. If + // provided in a call to create or update, this field will be ignored. + MutationRecord *MutationRecord `protobuf:"bytes,11,opt,name=mutation_record,json=mutationRecord,proto3" json:"mutation_record,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *AlertPolicy) Reset() { *m = AlertPolicy{} } +func (m *AlertPolicy) String() string { return proto.CompactTextString(m) } +func (*AlertPolicy) ProtoMessage() {} +func (*AlertPolicy) Descriptor() ([]byte, []int) { + return fileDescriptor_alert_9493c79d6a8e0cc9, []int{0} +} +func (m *AlertPolicy) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_AlertPolicy.Unmarshal(m, b) +} +func (m *AlertPolicy) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_AlertPolicy.Marshal(b, m, deterministic) +} +func (dst *AlertPolicy) XXX_Merge(src proto.Message) { + xxx_messageInfo_AlertPolicy.Merge(dst, src) +} +func (m *AlertPolicy) XXX_Size() int { + return xxx_messageInfo_AlertPolicy.Size(m) +} +func (m *AlertPolicy) XXX_DiscardUnknown() { + xxx_messageInfo_AlertPolicy.DiscardUnknown(m) +} + +var xxx_messageInfo_AlertPolicy proto.InternalMessageInfo + +func (m *AlertPolicy) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *AlertPolicy) GetDisplayName() string { + if m != nil { + return m.DisplayName + } + return "" +} + +func (m *AlertPolicy) GetDocumentation() *AlertPolicy_Documentation { + if m != nil { + return m.Documentation + } + return nil +} + +func (m *AlertPolicy) GetUserLabels() map[string]string { + if m != nil { + return m.UserLabels + } + return nil +} + +func (m *AlertPolicy) GetConditions() []*AlertPolicy_Condition { + if m != nil { + return m.Conditions + } + return nil +} + +func (m *AlertPolicy) GetCombiner() AlertPolicy_ConditionCombinerType { + if m != nil { + return m.Combiner + } + return AlertPolicy_COMBINE_UNSPECIFIED +} + +func (m *AlertPolicy) GetEnabled() *wrappers.BoolValue { + if m != nil { + return m.Enabled + } + return nil +} + +func (m *AlertPolicy) GetNotificationChannels() []string { + if m != nil { + return m.NotificationChannels + } + return nil +} + +func (m *AlertPolicy) GetCreationRecord() *MutationRecord { + if m != nil { + return m.CreationRecord + } + return nil +} + +func (m *AlertPolicy) GetMutationRecord() *MutationRecord { + if m != nil { + return m.MutationRecord + } + return nil +} + +// A content string and a MIME type that describes the content string's +// format. +type AlertPolicy_Documentation struct { + // The text of the documentation, interpreted according to `mime_type`. + // The content may not exceed 8,192 Unicode characters and may not exceed + // more than 10,240 bytes when encoded in UTF-8 format, whichever is + // smaller. + Content string `protobuf:"bytes,1,opt,name=content,proto3" json:"content,omitempty"` + // The format of the `content` field. Presently, only the value + // `"text/markdown"` is supported. See + // [Markdown](https://en.wikipedia.org/wiki/Markdown) for more information. + MimeType string `protobuf:"bytes,2,opt,name=mime_type,json=mimeType,proto3" json:"mime_type,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *AlertPolicy_Documentation) Reset() { *m = AlertPolicy_Documentation{} } +func (m *AlertPolicy_Documentation) String() string { return proto.CompactTextString(m) } +func (*AlertPolicy_Documentation) ProtoMessage() {} +func (*AlertPolicy_Documentation) Descriptor() ([]byte, []int) { + return fileDescriptor_alert_9493c79d6a8e0cc9, []int{0, 0} +} +func (m *AlertPolicy_Documentation) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_AlertPolicy_Documentation.Unmarshal(m, b) +} +func (m *AlertPolicy_Documentation) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_AlertPolicy_Documentation.Marshal(b, m, deterministic) +} +func (dst *AlertPolicy_Documentation) XXX_Merge(src proto.Message) { + xxx_messageInfo_AlertPolicy_Documentation.Merge(dst, src) +} +func (m *AlertPolicy_Documentation) XXX_Size() int { + return xxx_messageInfo_AlertPolicy_Documentation.Size(m) +} +func (m *AlertPolicy_Documentation) XXX_DiscardUnknown() { + xxx_messageInfo_AlertPolicy_Documentation.DiscardUnknown(m) +} + +var xxx_messageInfo_AlertPolicy_Documentation proto.InternalMessageInfo + +func (m *AlertPolicy_Documentation) GetContent() string { + if m != nil { + return m.Content + } + return "" +} + +func (m *AlertPolicy_Documentation) GetMimeType() string { + if m != nil { + return m.MimeType + } + return "" +} + +// A condition is a true/false test that determines when an alerting policy +// should open an incident. If a condition evaluates to true, it signifies +// that something is wrong. +type AlertPolicy_Condition struct { + // Required if the condition exists. The unique resource name for this + // condition. Its syntax is: + // + // projects/[PROJECT_ID]/alertPolicies/[POLICY_ID]/conditions/[CONDITION_ID] + // + // `[CONDITION_ID]` is assigned by Stackdriver Monitoring when the + // condition is created as part of a new or updated alerting policy. + // + // When calling the + // [alertPolicies.create][google.monitoring.v3.AlertPolicyService.CreateAlertPolicy] + // method, do not include the `name` field in the conditions of the + // requested alerting policy. Stackdriver Monitoring creates the + // condition identifiers and includes them in the new policy. + // + // When calling the + // [alertPolicies.update][google.monitoring.v3.AlertPolicyService.UpdateAlertPolicy] + // method to update a policy, including a condition `name` causes the + // existing condition to be updated. Conditions without names are added to + // the updated policy. Existing conditions are deleted if they are not + // updated. + // + // Best practice is to preserve `[CONDITION_ID]` if you make only small + // changes, such as those to condition thresholds, durations, or trigger + // values. Otherwise, treat the change as a new condition and let the + // existing condition be deleted. + Name string `protobuf:"bytes,12,opt,name=name,proto3" json:"name,omitempty"` + // A short name or phrase used to identify the condition in dashboards, + // notifications, and incidents. To avoid confusion, don't use the same + // display name for multiple conditions in the same policy. + DisplayName string `protobuf:"bytes,6,opt,name=display_name,json=displayName,proto3" json:"display_name,omitempty"` + // Only one of the following condition types will be specified. + // + // Types that are valid to be assigned to Condition: + // *AlertPolicy_Condition_ConditionThreshold + // *AlertPolicy_Condition_ConditionAbsent + Condition isAlertPolicy_Condition_Condition `protobuf_oneof:"condition"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *AlertPolicy_Condition) Reset() { *m = AlertPolicy_Condition{} } +func (m *AlertPolicy_Condition) String() string { return proto.CompactTextString(m) } +func (*AlertPolicy_Condition) ProtoMessage() {} +func (*AlertPolicy_Condition) Descriptor() ([]byte, []int) { + return fileDescriptor_alert_9493c79d6a8e0cc9, []int{0, 1} +} +func (m *AlertPolicy_Condition) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_AlertPolicy_Condition.Unmarshal(m, b) +} +func (m *AlertPolicy_Condition) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_AlertPolicy_Condition.Marshal(b, m, deterministic) +} +func (dst *AlertPolicy_Condition) XXX_Merge(src proto.Message) { + xxx_messageInfo_AlertPolicy_Condition.Merge(dst, src) +} +func (m *AlertPolicy_Condition) XXX_Size() int { + return xxx_messageInfo_AlertPolicy_Condition.Size(m) +} +func (m *AlertPolicy_Condition) XXX_DiscardUnknown() { + xxx_messageInfo_AlertPolicy_Condition.DiscardUnknown(m) +} + +var xxx_messageInfo_AlertPolicy_Condition proto.InternalMessageInfo + +func (m *AlertPolicy_Condition) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *AlertPolicy_Condition) GetDisplayName() string { + if m != nil { + return m.DisplayName + } + return "" +} + +type isAlertPolicy_Condition_Condition interface { + isAlertPolicy_Condition_Condition() +} + +type AlertPolicy_Condition_ConditionThreshold struct { + ConditionThreshold *AlertPolicy_Condition_MetricThreshold `protobuf:"bytes,1,opt,name=condition_threshold,json=conditionThreshold,proto3,oneof"` +} + +type AlertPolicy_Condition_ConditionAbsent struct { + ConditionAbsent *AlertPolicy_Condition_MetricAbsence `protobuf:"bytes,2,opt,name=condition_absent,json=conditionAbsent,proto3,oneof"` +} + +func (*AlertPolicy_Condition_ConditionThreshold) isAlertPolicy_Condition_Condition() {} + +func (*AlertPolicy_Condition_ConditionAbsent) isAlertPolicy_Condition_Condition() {} + +func (m *AlertPolicy_Condition) GetCondition() isAlertPolicy_Condition_Condition { + if m != nil { + return m.Condition + } + return nil +} + +func (m *AlertPolicy_Condition) GetConditionThreshold() *AlertPolicy_Condition_MetricThreshold { + if x, ok := m.GetCondition().(*AlertPolicy_Condition_ConditionThreshold); ok { + return x.ConditionThreshold + } + return nil +} + +func (m *AlertPolicy_Condition) GetConditionAbsent() *AlertPolicy_Condition_MetricAbsence { + if x, ok := m.GetCondition().(*AlertPolicy_Condition_ConditionAbsent); ok { + return x.ConditionAbsent + } + return nil +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*AlertPolicy_Condition) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _AlertPolicy_Condition_OneofMarshaler, _AlertPolicy_Condition_OneofUnmarshaler, _AlertPolicy_Condition_OneofSizer, []interface{}{ + (*AlertPolicy_Condition_ConditionThreshold)(nil), + (*AlertPolicy_Condition_ConditionAbsent)(nil), + } +} + +func _AlertPolicy_Condition_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*AlertPolicy_Condition) + // condition + switch x := m.Condition.(type) { + case *AlertPolicy_Condition_ConditionThreshold: + b.EncodeVarint(1<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.ConditionThreshold); err != nil { + return err + } + case *AlertPolicy_Condition_ConditionAbsent: + b.EncodeVarint(2<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.ConditionAbsent); err != nil { + return err + } + case nil: + default: + return fmt.Errorf("AlertPolicy_Condition.Condition has unexpected type %T", x) + } + return nil +} + +func _AlertPolicy_Condition_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*AlertPolicy_Condition) + switch tag { + case 1: // condition.condition_threshold + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(AlertPolicy_Condition_MetricThreshold) + err := b.DecodeMessage(msg) + m.Condition = &AlertPolicy_Condition_ConditionThreshold{msg} + return true, err + case 2: // condition.condition_absent + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(AlertPolicy_Condition_MetricAbsence) + err := b.DecodeMessage(msg) + m.Condition = &AlertPolicy_Condition_ConditionAbsent{msg} + return true, err + default: + return false, nil + } +} + +func _AlertPolicy_Condition_OneofSizer(msg proto.Message) (n int) { + m := msg.(*AlertPolicy_Condition) + // condition + switch x := m.Condition.(type) { + case *AlertPolicy_Condition_ConditionThreshold: + s := proto.Size(x.ConditionThreshold) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *AlertPolicy_Condition_ConditionAbsent: + s := proto.Size(x.ConditionAbsent) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +// Specifies how many time series must fail a predicate to trigger a +// condition. If not specified, then a `{count: 1}` trigger is used. +type AlertPolicy_Condition_Trigger struct { + // A type of trigger. + // + // Types that are valid to be assigned to Type: + // *AlertPolicy_Condition_Trigger_Count + // *AlertPolicy_Condition_Trigger_Percent + Type isAlertPolicy_Condition_Trigger_Type `protobuf_oneof:"type"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *AlertPolicy_Condition_Trigger) Reset() { *m = AlertPolicy_Condition_Trigger{} } +func (m *AlertPolicy_Condition_Trigger) String() string { return proto.CompactTextString(m) } +func (*AlertPolicy_Condition_Trigger) ProtoMessage() {} +func (*AlertPolicy_Condition_Trigger) Descriptor() ([]byte, []int) { + return fileDescriptor_alert_9493c79d6a8e0cc9, []int{0, 1, 0} +} +func (m *AlertPolicy_Condition_Trigger) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_AlertPolicy_Condition_Trigger.Unmarshal(m, b) +} +func (m *AlertPolicy_Condition_Trigger) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_AlertPolicy_Condition_Trigger.Marshal(b, m, deterministic) +} +func (dst *AlertPolicy_Condition_Trigger) XXX_Merge(src proto.Message) { + xxx_messageInfo_AlertPolicy_Condition_Trigger.Merge(dst, src) +} +func (m *AlertPolicy_Condition_Trigger) XXX_Size() int { + return xxx_messageInfo_AlertPolicy_Condition_Trigger.Size(m) +} +func (m *AlertPolicy_Condition_Trigger) XXX_DiscardUnknown() { + xxx_messageInfo_AlertPolicy_Condition_Trigger.DiscardUnknown(m) +} + +var xxx_messageInfo_AlertPolicy_Condition_Trigger proto.InternalMessageInfo + +type isAlertPolicy_Condition_Trigger_Type interface { + isAlertPolicy_Condition_Trigger_Type() +} + +type AlertPolicy_Condition_Trigger_Count struct { + Count int32 `protobuf:"varint,1,opt,name=count,proto3,oneof"` +} + +type AlertPolicy_Condition_Trigger_Percent struct { + Percent float64 `protobuf:"fixed64,2,opt,name=percent,proto3,oneof"` +} + +func (*AlertPolicy_Condition_Trigger_Count) isAlertPolicy_Condition_Trigger_Type() {} + +func (*AlertPolicy_Condition_Trigger_Percent) isAlertPolicy_Condition_Trigger_Type() {} + +func (m *AlertPolicy_Condition_Trigger) GetType() isAlertPolicy_Condition_Trigger_Type { + if m != nil { + return m.Type + } + return nil +} + +func (m *AlertPolicy_Condition_Trigger) GetCount() int32 { + if x, ok := m.GetType().(*AlertPolicy_Condition_Trigger_Count); ok { + return x.Count + } + return 0 +} + +func (m *AlertPolicy_Condition_Trigger) GetPercent() float64 { + if x, ok := m.GetType().(*AlertPolicy_Condition_Trigger_Percent); ok { + return x.Percent + } + return 0 +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*AlertPolicy_Condition_Trigger) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _AlertPolicy_Condition_Trigger_OneofMarshaler, _AlertPolicy_Condition_Trigger_OneofUnmarshaler, _AlertPolicy_Condition_Trigger_OneofSizer, []interface{}{ + (*AlertPolicy_Condition_Trigger_Count)(nil), + (*AlertPolicy_Condition_Trigger_Percent)(nil), + } +} + +func _AlertPolicy_Condition_Trigger_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*AlertPolicy_Condition_Trigger) + // type + switch x := m.Type.(type) { + case *AlertPolicy_Condition_Trigger_Count: + b.EncodeVarint(1<<3 | proto.WireVarint) + b.EncodeVarint(uint64(x.Count)) + case *AlertPolicy_Condition_Trigger_Percent: + b.EncodeVarint(2<<3 | proto.WireFixed64) + b.EncodeFixed64(math.Float64bits(x.Percent)) + case nil: + default: + return fmt.Errorf("AlertPolicy_Condition_Trigger.Type has unexpected type %T", x) + } + return nil +} + +func _AlertPolicy_Condition_Trigger_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*AlertPolicy_Condition_Trigger) + switch tag { + case 1: // type.count + if wire != proto.WireVarint { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeVarint() + m.Type = &AlertPolicy_Condition_Trigger_Count{int32(x)} + return true, err + case 2: // type.percent + if wire != proto.WireFixed64 { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeFixed64() + m.Type = &AlertPolicy_Condition_Trigger_Percent{math.Float64frombits(x)} + return true, err + default: + return false, nil + } +} + +func _AlertPolicy_Condition_Trigger_OneofSizer(msg proto.Message) (n int) { + m := msg.(*AlertPolicy_Condition_Trigger) + // type + switch x := m.Type.(type) { + case *AlertPolicy_Condition_Trigger_Count: + n += 1 // tag and wire + n += proto.SizeVarint(uint64(x.Count)) + case *AlertPolicy_Condition_Trigger_Percent: + n += 1 // tag and wire + n += 8 + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +// A condition type that compares a collection of time series +// against a threshold. +type AlertPolicy_Condition_MetricThreshold struct { + // A [filter](/monitoring/api/v3/filters) that + // identifies which time series should be compared with the threshold. + // + // The filter is similar to the one that is specified in the + // [`MetricService.ListTimeSeries` + // request](/monitoring/api/ref_v3/rest/v3/projects.timeSeries/list) (that + // call is useful to verify the time series that will be retrieved / + // processed) and must specify the metric type and optionally may contain + // restrictions on resource type, resource labels, and metric labels. + // This field may not exceed 2048 Unicode characters in length. + Filter string `protobuf:"bytes,2,opt,name=filter,proto3" json:"filter,omitempty"` + // Specifies the alignment of data points in individual time series as + // well as how to combine the retrieved time series together (such as + // when aggregating multiple streams on each resource to a single + // stream for each resource or when aggregating streams across all + // members of a group of resrouces). Multiple aggregations + // are applied in the order specified. + // + // This field is similar to the one in the + // [`MetricService.ListTimeSeries` + // request](/monitoring/api/ref_v3/rest/v3/projects.timeSeries/list). It + // is advisable to use the `ListTimeSeries` method when debugging this + // field. + Aggregations []*Aggregation `protobuf:"bytes,8,rep,name=aggregations,proto3" json:"aggregations,omitempty"` + // A [filter](/monitoring/api/v3/filters) that identifies a time + // series that should be used as the denominator of a ratio that will be + // compared with the threshold. If a `denominator_filter` is specified, + // the time series specified by the `filter` field will be used as the + // numerator. + // + // The filter is similar to the one that is specified in the + // [`MetricService.ListTimeSeries` + // request](/monitoring/api/ref_v3/rest/v3/projects.timeSeries/list) (that + // call is useful to verify the time series that will be retrieved / + // processed) and must specify the metric type and optionally may contain + // restrictions on resource type, resource labels, and metric labels. + // This field may not exceed 2048 Unicode characters in length. + DenominatorFilter string `protobuf:"bytes,9,opt,name=denominator_filter,json=denominatorFilter,proto3" json:"denominator_filter,omitempty"` + // Specifies the alignment of data points in individual time series + // selected by `denominatorFilter` as + // well as how to combine the retrieved time series together (such as + // when aggregating multiple streams on each resource to a single + // stream for each resource or when aggregating streams across all + // members of a group of resources). + // + // When computing ratios, the `aggregations` and + // `denominator_aggregations` fields must use the same alignment period + // and produce time series that have the same periodicity and labels. + // + // This field is similar to the one in the + // [`MetricService.ListTimeSeries` + // request](/monitoring/api/ref_v3/rest/v3/projects.timeSeries/list). It + // is advisable to use the `ListTimeSeries` method when debugging this + // field. + DenominatorAggregations []*Aggregation `protobuf:"bytes,10,rep,name=denominator_aggregations,json=denominatorAggregations,proto3" json:"denominator_aggregations,omitempty"` + // The comparison to apply between the time series (indicated by `filter` + // and `aggregation`) and the threshold (indicated by `threshold_value`). + // The comparison is applied on each time series, with the time series + // on the left-hand side and the threshold on the right-hand side. + // + // Only `COMPARISON_LT` and `COMPARISON_GT` are supported currently. + Comparison ComparisonType `protobuf:"varint,4,opt,name=comparison,proto3,enum=google.monitoring.v3.ComparisonType" json:"comparison,omitempty"` + // A value against which to compare the time series. + ThresholdValue float64 `protobuf:"fixed64,5,opt,name=threshold_value,json=thresholdValue,proto3" json:"threshold_value,omitempty"` + // The amount of time that a time series must violate the + // threshold to be considered failing. Currently, only values + // that are a multiple of a minute--e.g., 0, 60, 120, or 300 + // seconds--are supported. If an invalid value is given, an + // error will be returned. When choosing a duration, it is useful to + // keep in mind the frequency of the underlying time series data + // (which may also be affected by any alignments specified in the + // `aggregations` field); a good duration is long enough so that a single + // outlier does not generate spurious alerts, but short enough that + // unhealthy states are detected and alerted on quickly. + Duration *duration.Duration `protobuf:"bytes,6,opt,name=duration,proto3" json:"duration,omitempty"` + // The number/percent of time series for which the comparison must hold + // in order for the condition to trigger. If unspecified, then the + // condition will trigger if the comparison is true for any of the + // time series that have been identified by `filter` and `aggregations`, + // or by the ratio, if `denominator_filter` and `denominator_aggregations` + // are specified. + Trigger *AlertPolicy_Condition_Trigger `protobuf:"bytes,7,opt,name=trigger,proto3" json:"trigger,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *AlertPolicy_Condition_MetricThreshold) Reset() { *m = AlertPolicy_Condition_MetricThreshold{} } +func (m *AlertPolicy_Condition_MetricThreshold) String() string { return proto.CompactTextString(m) } +func (*AlertPolicy_Condition_MetricThreshold) ProtoMessage() {} +func (*AlertPolicy_Condition_MetricThreshold) Descriptor() ([]byte, []int) { + return fileDescriptor_alert_9493c79d6a8e0cc9, []int{0, 1, 1} +} +func (m *AlertPolicy_Condition_MetricThreshold) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_AlertPolicy_Condition_MetricThreshold.Unmarshal(m, b) +} +func (m *AlertPolicy_Condition_MetricThreshold) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_AlertPolicy_Condition_MetricThreshold.Marshal(b, m, deterministic) +} +func (dst *AlertPolicy_Condition_MetricThreshold) XXX_Merge(src proto.Message) { + xxx_messageInfo_AlertPolicy_Condition_MetricThreshold.Merge(dst, src) +} +func (m *AlertPolicy_Condition_MetricThreshold) XXX_Size() int { + return xxx_messageInfo_AlertPolicy_Condition_MetricThreshold.Size(m) +} +func (m *AlertPolicy_Condition_MetricThreshold) XXX_DiscardUnknown() { + xxx_messageInfo_AlertPolicy_Condition_MetricThreshold.DiscardUnknown(m) +} + +var xxx_messageInfo_AlertPolicy_Condition_MetricThreshold proto.InternalMessageInfo + +func (m *AlertPolicy_Condition_MetricThreshold) GetFilter() string { + if m != nil { + return m.Filter + } + return "" +} + +func (m *AlertPolicy_Condition_MetricThreshold) GetAggregations() []*Aggregation { + if m != nil { + return m.Aggregations + } + return nil +} + +func (m *AlertPolicy_Condition_MetricThreshold) GetDenominatorFilter() string { + if m != nil { + return m.DenominatorFilter + } + return "" +} + +func (m *AlertPolicy_Condition_MetricThreshold) GetDenominatorAggregations() []*Aggregation { + if m != nil { + return m.DenominatorAggregations + } + return nil +} + +func (m *AlertPolicy_Condition_MetricThreshold) GetComparison() ComparisonType { + if m != nil { + return m.Comparison + } + return ComparisonType_COMPARISON_UNSPECIFIED +} + +func (m *AlertPolicy_Condition_MetricThreshold) GetThresholdValue() float64 { + if m != nil { + return m.ThresholdValue + } + return 0 +} + +func (m *AlertPolicy_Condition_MetricThreshold) GetDuration() *duration.Duration { + if m != nil { + return m.Duration + } + return nil +} + +func (m *AlertPolicy_Condition_MetricThreshold) GetTrigger() *AlertPolicy_Condition_Trigger { + if m != nil { + return m.Trigger + } + return nil +} + +// A condition type that checks that monitored resources +// are reporting data. The configuration defines a metric and +// a set of monitored resources. The predicate is considered in violation +// when a time series for the specified metric of a monitored +// resource does not include any data in the specified `duration`. +type AlertPolicy_Condition_MetricAbsence struct { + // A [filter](/monitoring/api/v3/filters) that + // identifies which time series should be compared with the threshold. + // + // The filter is similar to the one that is specified in the + // [`MetricService.ListTimeSeries` + // request](/monitoring/api/ref_v3/rest/v3/projects.timeSeries/list) (that + // call is useful to verify the time series that will be retrieved / + // processed) and must specify the metric type and optionally may contain + // restrictions on resource type, resource labels, and metric labels. + // This field may not exceed 2048 Unicode characters in length. + Filter string `protobuf:"bytes,1,opt,name=filter,proto3" json:"filter,omitempty"` + // Specifies the alignment of data points in individual time series as + // well as how to combine the retrieved time series together (such as + // when aggregating multiple streams on each resource to a single + // stream for each resource or when aggregating streams across all + // members of a group of resrouces). Multiple aggregations + // are applied in the order specified. + // + // This field is similar to the + // one in the [`MetricService.ListTimeSeries` + // request](/monitoring/api/ref_v3/rest/v3/projects.timeSeries/list). It + // is advisable to use the `ListTimeSeries` method when debugging this + // field. + Aggregations []*Aggregation `protobuf:"bytes,5,rep,name=aggregations,proto3" json:"aggregations,omitempty"` + // The amount of time that a time series must fail to report new + // data to be considered failing. Currently, only values that + // are a multiple of a minute--e.g. 60, 120, or 300 + // seconds--are supported. If an invalid value is given, an + // error will be returned. The `Duration.nanos` field is + // ignored. + Duration *duration.Duration `protobuf:"bytes,2,opt,name=duration,proto3" json:"duration,omitempty"` + // The number/percent of time series for which the comparison must hold + // in order for the condition to trigger. If unspecified, then the + // condition will trigger if the comparison is true for any of the + // time series that have been identified by `filter` and `aggregations`. + Trigger *AlertPolicy_Condition_Trigger `protobuf:"bytes,3,opt,name=trigger,proto3" json:"trigger,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *AlertPolicy_Condition_MetricAbsence) Reset() { *m = AlertPolicy_Condition_MetricAbsence{} } +func (m *AlertPolicy_Condition_MetricAbsence) String() string { return proto.CompactTextString(m) } +func (*AlertPolicy_Condition_MetricAbsence) ProtoMessage() {} +func (*AlertPolicy_Condition_MetricAbsence) Descriptor() ([]byte, []int) { + return fileDescriptor_alert_9493c79d6a8e0cc9, []int{0, 1, 2} +} +func (m *AlertPolicy_Condition_MetricAbsence) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_AlertPolicy_Condition_MetricAbsence.Unmarshal(m, b) +} +func (m *AlertPolicy_Condition_MetricAbsence) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_AlertPolicy_Condition_MetricAbsence.Marshal(b, m, deterministic) +} +func (dst *AlertPolicy_Condition_MetricAbsence) XXX_Merge(src proto.Message) { + xxx_messageInfo_AlertPolicy_Condition_MetricAbsence.Merge(dst, src) +} +func (m *AlertPolicy_Condition_MetricAbsence) XXX_Size() int { + return xxx_messageInfo_AlertPolicy_Condition_MetricAbsence.Size(m) +} +func (m *AlertPolicy_Condition_MetricAbsence) XXX_DiscardUnknown() { + xxx_messageInfo_AlertPolicy_Condition_MetricAbsence.DiscardUnknown(m) +} + +var xxx_messageInfo_AlertPolicy_Condition_MetricAbsence proto.InternalMessageInfo + +func (m *AlertPolicy_Condition_MetricAbsence) GetFilter() string { + if m != nil { + return m.Filter + } + return "" +} + +func (m *AlertPolicy_Condition_MetricAbsence) GetAggregations() []*Aggregation { + if m != nil { + return m.Aggregations + } + return nil +} + +func (m *AlertPolicy_Condition_MetricAbsence) GetDuration() *duration.Duration { + if m != nil { + return m.Duration + } + return nil +} + +func (m *AlertPolicy_Condition_MetricAbsence) GetTrigger() *AlertPolicy_Condition_Trigger { + if m != nil { + return m.Trigger + } + return nil +} + +func init() { + proto.RegisterType((*AlertPolicy)(nil), "google.monitoring.v3.AlertPolicy") + proto.RegisterMapType((map[string]string)(nil), "google.monitoring.v3.AlertPolicy.UserLabelsEntry") + proto.RegisterType((*AlertPolicy_Documentation)(nil), "google.monitoring.v3.AlertPolicy.Documentation") + proto.RegisterType((*AlertPolicy_Condition)(nil), "google.monitoring.v3.AlertPolicy.Condition") + proto.RegisterType((*AlertPolicy_Condition_Trigger)(nil), "google.monitoring.v3.AlertPolicy.Condition.Trigger") + proto.RegisterType((*AlertPolicy_Condition_MetricThreshold)(nil), "google.monitoring.v3.AlertPolicy.Condition.MetricThreshold") + proto.RegisterType((*AlertPolicy_Condition_MetricAbsence)(nil), "google.monitoring.v3.AlertPolicy.Condition.MetricAbsence") + proto.RegisterEnum("google.monitoring.v3.AlertPolicy_ConditionCombinerType", AlertPolicy_ConditionCombinerType_name, AlertPolicy_ConditionCombinerType_value) +} + +func init() { + proto.RegisterFile("google/monitoring/v3/alert.proto", fileDescriptor_alert_9493c79d6a8e0cc9) +} + +var fileDescriptor_alert_9493c79d6a8e0cc9 = []byte{ + // 941 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xa4, 0x56, 0xeb, 0x6e, 0xe3, 0x44, + 0x14, 0xae, 0x93, 0xe6, 0x76, 0xd2, 0x36, 0xd9, 0xd9, 0xee, 0xae, 0x31, 0x68, 0x95, 0xae, 0x90, + 0x88, 0x40, 0x38, 0x22, 0x01, 0x71, 0x59, 0x81, 0x94, 0x5b, 0x37, 0x11, 0x24, 0xad, 0xa6, 0x69, + 0x91, 0x50, 0x25, 0xcb, 0x71, 0xa6, 0xae, 0x85, 0x3d, 0x63, 0x4d, 0xec, 0xa2, 0xbc, 0x0e, 0x3f, + 0x79, 0x14, 0x1e, 0x81, 0x7f, 0xbc, 0x02, 0xe2, 0x01, 0x90, 0xc7, 0x63, 0xc7, 0xe9, 0xa6, 0xbb, + 0x64, 0xf7, 0x5f, 0xce, 0x9c, 0xef, 0x7c, 0xe7, 0xf6, 0xcd, 0x38, 0xd0, 0xb0, 0x19, 0xb3, 0x5d, + 0xd2, 0xf2, 0x18, 0x75, 0x02, 0xc6, 0x1d, 0x6a, 0xb7, 0xee, 0x3a, 0x2d, 0xd3, 0x25, 0x3c, 0xd0, + 0x7d, 0xce, 0x02, 0x86, 0x8e, 0x63, 0x84, 0xbe, 0x46, 0xe8, 0x77, 0x1d, 0xed, 0x23, 0x19, 0x67, + 0xfa, 0x4e, 0xcb, 0xa4, 0x94, 0x05, 0x66, 0xe0, 0x30, 0xba, 0x8c, 0x63, 0xb4, 0x93, 0xad, 0xac, + 0x16, 0xf3, 0x3c, 0x46, 0x25, 0xe4, 0xd3, 0xad, 0x10, 0x2f, 0x8c, 0x89, 0x0c, 0x4e, 0x2c, 0xc6, + 0x17, 0x12, 0xfb, 0x5c, 0x62, 0x85, 0x35, 0x0f, 0x6f, 0x5a, 0x8b, 0x90, 0x0b, 0xd8, 0x43, 0xfe, + 0xdf, 0xb8, 0xe9, 0xfb, 0x84, 0xcb, 0x72, 0x5e, 0xfc, 0x5d, 0x83, 0x6a, 0x37, 0x6a, 0xe9, 0x9c, + 0xb9, 0x8e, 0xb5, 0x42, 0x08, 0xf6, 0xa9, 0xe9, 0x11, 0x55, 0x69, 0x28, 0xcd, 0x0a, 0x16, 0xbf, + 0xd1, 0x09, 0x1c, 0x2c, 0x9c, 0xa5, 0xef, 0x9a, 0x2b, 0x43, 0xf8, 0x72, 0xc2, 0x57, 0x95, 0x67, + 0xd3, 0x08, 0x72, 0x09, 0x87, 0x0b, 0x66, 0x85, 0x1e, 0xa1, 0x71, 0x91, 0xea, 0x61, 0x43, 0x69, + 0x56, 0xdb, 0x2d, 0x7d, 0xdb, 0x84, 0xf4, 0x4c, 0x42, 0x7d, 0x90, 0x0d, 0xc3, 0x9b, 0x2c, 0x08, + 0x43, 0x35, 0x5c, 0x12, 0x6e, 0xb8, 0xe6, 0x9c, 0xb8, 0x4b, 0xb5, 0xde, 0xc8, 0x37, 0xab, 0xed, + 0x2f, 0xde, 0x4e, 0x7a, 0xb9, 0x24, 0xfc, 0x27, 0x11, 0x33, 0xa4, 0x01, 0x5f, 0x61, 0x08, 0xd3, + 0x03, 0xf4, 0x23, 0x80, 0xc5, 0xe8, 0xc2, 0x11, 0x4b, 0x51, 0x0f, 0x04, 0xe5, 0x67, 0x6f, 0xa7, + 0xec, 0x27, 0x31, 0x38, 0x13, 0x8e, 0x2e, 0xa0, 0x6c, 0x31, 0x6f, 0xee, 0x50, 0xc2, 0xd5, 0x62, + 0x43, 0x69, 0x1e, 0xb5, 0xbf, 0xde, 0x81, 0xaa, 0x2f, 0x43, 0x67, 0x2b, 0x9f, 0xe0, 0x94, 0x08, + 0x7d, 0x09, 0x25, 0x42, 0xcd, 0xb9, 0x4b, 0x16, 0xea, 0x23, 0x31, 0x46, 0x2d, 0xe1, 0x4c, 0xb6, + 0xa8, 0xf7, 0x18, 0x73, 0xaf, 0x4c, 0x37, 0x24, 0x38, 0x81, 0xa2, 0x0e, 0x3c, 0xa1, 0x2c, 0x70, + 0x6e, 0x1c, 0x2b, 0x96, 0x89, 0x75, 0x6b, 0x52, 0x1a, 0x4d, 0xed, 0xa8, 0x91, 0x6f, 0x56, 0xf0, + 0x71, 0xd6, 0xd9, 0x97, 0x3e, 0x34, 0x81, 0x9a, 0xc5, 0x49, 0x56, 0x57, 0x2a, 0x88, 0x94, 0x1f, + 0x6f, 0x6f, 0x63, 0x22, 0x45, 0x88, 0x05, 0x16, 0x1f, 0x25, 0xc1, 0xb1, 0x1d, 0xd1, 0xdd, 0x93, + 0xa9, 0x5a, 0xdd, 0x85, 0xce, 0xdb, 0xb0, 0xb5, 0x53, 0x38, 0xdc, 0x90, 0x07, 0x52, 0xa1, 0x64, + 0x31, 0x1a, 0x10, 0x1a, 0x48, 0x81, 0x26, 0x26, 0xfa, 0x10, 0x2a, 0x9e, 0xe3, 0x11, 0x23, 0x58, + 0xf9, 0x89, 0x40, 0xcb, 0xd1, 0x41, 0x34, 0x5a, 0xed, 0xaf, 0x32, 0x54, 0xd2, 0xa1, 0xa7, 0x12, + 0x3f, 0x78, 0x83, 0xc4, 0x8b, 0xaf, 0x4b, 0x9c, 0xc2, 0xe3, 0x74, 0xf1, 0x46, 0x70, 0xcb, 0xc9, + 0xf2, 0x96, 0xb9, 0x0b, 0x51, 0x47, 0xb5, 0xfd, 0x72, 0x87, 0xad, 0xeb, 0x13, 0x12, 0x70, 0xc7, + 0x9a, 0x25, 0x14, 0xa3, 0x3d, 0x8c, 0x52, 0xe6, 0xf4, 0x14, 0xdd, 0x40, 0x7d, 0x9d, 0xcf, 0x9c, + 0x2f, 0xa3, 0xa6, 0x73, 0x22, 0xd9, 0xb7, 0xbb, 0x27, 0xeb, 0x46, 0xf1, 0x16, 0x19, 0xed, 0xe1, + 0x5a, 0x4a, 0x2a, 0xce, 0x02, 0x6d, 0x08, 0xa5, 0x19, 0x77, 0x6c, 0x9b, 0x70, 0xf4, 0x14, 0x0a, + 0x16, 0x0b, 0xe5, 0x70, 0x0b, 0xa3, 0x3d, 0x1c, 0x9b, 0x48, 0x83, 0x92, 0x4f, 0xb8, 0x95, 0x54, + 0xa0, 0x8c, 0xf6, 0x70, 0x72, 0xd0, 0x2b, 0xc2, 0x7e, 0x34, 0x73, 0xed, 0x9f, 0x3c, 0xd4, 0xee, + 0x35, 0x86, 0x9e, 0x42, 0xf1, 0xc6, 0x71, 0x03, 0xc2, 0xe5, 0x46, 0xa4, 0x85, 0x86, 0x70, 0x60, + 0xda, 0x36, 0x27, 0x76, 0xfc, 0x32, 0xaa, 0x65, 0x71, 0x09, 0x4f, 0x1e, 0x68, 0x6b, 0x8d, 0xc4, + 0x1b, 0x61, 0xe8, 0x73, 0x40, 0x0b, 0x42, 0x99, 0xe7, 0x50, 0x33, 0x60, 0xdc, 0x90, 0xa9, 0x2a, + 0x22, 0xd5, 0xa3, 0x8c, 0xe7, 0x34, 0xce, 0x7a, 0x0d, 0x6a, 0x16, 0xbe, 0x51, 0x01, 0xfc, 0xdf, + 0x0a, 0x9e, 0x65, 0x28, 0xba, 0xd9, 0x62, 0x06, 0xd1, 0xb3, 0xe2, 0xf9, 0x26, 0x77, 0x96, 0x8c, + 0xaa, 0xfb, 0xe2, 0x2d, 0x78, 0x40, 0xf5, 0xfd, 0x14, 0x27, 0x2e, 0x7e, 0x26, 0x0e, 0x7d, 0x02, + 0xb5, 0x54, 0x5a, 0xc6, 0x5d, 0x74, 0xc1, 0xd5, 0x42, 0x34, 0x71, 0x7c, 0x94, 0x1e, 0x8b, 0x6b, + 0x8f, 0xbe, 0x82, 0x72, 0xf2, 0xd2, 0x0b, 0xb1, 0x56, 0xdb, 0x1f, 0xbc, 0xf6, 0x48, 0x0c, 0x24, + 0x00, 0xa7, 0x50, 0x34, 0x81, 0x52, 0x10, 0x2f, 0x5b, 0x2d, 0x89, 0xa8, 0xce, 0x2e, 0x5a, 0x92, + 0x3a, 0xc1, 0x09, 0x87, 0xf6, 0xaf, 0x02, 0x87, 0x1b, 0x02, 0xcb, 0xac, 0x5c, 0x79, 0xe3, 0xca, + 0x0b, 0xef, 0xb6, 0xf2, 0x6c, 0xdb, 0xb9, 0x77, 0x6a, 0x3b, 0xff, 0xfe, 0x6d, 0xf7, 0xaa, 0x50, + 0x49, 0x6f, 0x91, 0xf6, 0x3d, 0xd4, 0xee, 0x7d, 0x6e, 0x50, 0x1d, 0xf2, 0xbf, 0x92, 0x95, 0x9c, + 0x40, 0xf4, 0x13, 0x1d, 0x43, 0x21, 0xde, 0x66, 0x7c, 0x11, 0x62, 0xe3, 0xbb, 0xdc, 0x37, 0xca, + 0x0b, 0x13, 0x9e, 0x6c, 0xfd, 0x1e, 0xa0, 0x67, 0xf0, 0xb8, 0x7f, 0x36, 0xe9, 0x8d, 0xa7, 0x43, + 0xe3, 0x72, 0x7a, 0x71, 0x3e, 0xec, 0x8f, 0x4f, 0xc7, 0xc3, 0x41, 0x7d, 0x0f, 0x95, 0x20, 0xdf, + 0x9d, 0x0e, 0xea, 0x0a, 0x2a, 0x42, 0xee, 0x0c, 0xd7, 0x73, 0xe8, 0x39, 0x68, 0xdd, 0xe9, 0xc0, + 0xf8, 0x79, 0x3c, 0x1b, 0x19, 0x93, 0xee, 0xac, 0x3f, 0x1a, 0x4f, 0x5f, 0x19, 0x78, 0x78, 0x71, + 0x76, 0x89, 0xfb, 0xc3, 0x7a, 0xbe, 0xf7, 0xbb, 0x02, 0xaa, 0xc5, 0xbc, 0xad, 0x2d, 0xf7, 0x20, + 0xee, 0x39, 0x1a, 0xde, 0xb9, 0xf2, 0xcb, 0x0f, 0x12, 0x63, 0x33, 0xd7, 0xa4, 0xb6, 0xce, 0xb8, + 0xdd, 0xb2, 0x09, 0x15, 0xa3, 0x6d, 0xc5, 0x2e, 0xd3, 0x77, 0x96, 0x9b, 0xff, 0x4c, 0x5e, 0xae, + 0xad, 0x3f, 0x72, 0xda, 0xab, 0x98, 0xa0, 0xef, 0xb2, 0x70, 0xa1, 0x4f, 0xd6, 0xa9, 0xae, 0x3a, + 0x7f, 0x26, 0xce, 0x6b, 0xe1, 0xbc, 0x5e, 0x3b, 0xaf, 0xaf, 0x3a, 0xf3, 0xa2, 0x48, 0xd2, 0xf9, + 0x2f, 0x00, 0x00, 0xff, 0xff, 0x66, 0xb5, 0x16, 0x64, 0x76, 0x09, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/monitoring/v3/alert_service.pb.go b/vendor/google.golang.org/genproto/googleapis/monitoring/v3/alert_service.pb.go new file mode 100644 index 0000000..b1f27ca --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/monitoring/v3/alert_service.pb.go @@ -0,0 +1,667 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/monitoring/v3/alert_service.proto + +package monitoring // import "google.golang.org/genproto/googleapis/monitoring/v3" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import empty "github.com/golang/protobuf/ptypes/empty" +import _ "google.golang.org/genproto/googleapis/api/annotations" +import field_mask "google.golang.org/genproto/protobuf/field_mask" + +import ( + context "golang.org/x/net/context" + grpc "google.golang.org/grpc" +) + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// The protocol for the `CreateAlertPolicy` request. +type CreateAlertPolicyRequest struct { + // The project in which to create the alerting policy. The format is + // `projects/[PROJECT_ID]`. + // + // Note that this field names the parent container in which the alerting + // policy will be written, not the name of the created policy. The alerting + // policy that is returned will have a name that contains a normalized + // representation of this name as a prefix but adds a suffix of the form + // `/alertPolicies/[POLICY_ID]`, identifying the policy in the container. + Name string `protobuf:"bytes,3,opt,name=name,proto3" json:"name,omitempty"` + // The requested alerting policy. You should omit the `name` field in this + // policy. The name will be returned in the new policy, including + // a new [ALERT_POLICY_ID] value. + AlertPolicy *AlertPolicy `protobuf:"bytes,2,opt,name=alert_policy,json=alertPolicy,proto3" json:"alert_policy,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *CreateAlertPolicyRequest) Reset() { *m = CreateAlertPolicyRequest{} } +func (m *CreateAlertPolicyRequest) String() string { return proto.CompactTextString(m) } +func (*CreateAlertPolicyRequest) ProtoMessage() {} +func (*CreateAlertPolicyRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_alert_service_d83b47a0e5c8ce7d, []int{0} +} +func (m *CreateAlertPolicyRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_CreateAlertPolicyRequest.Unmarshal(m, b) +} +func (m *CreateAlertPolicyRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_CreateAlertPolicyRequest.Marshal(b, m, deterministic) +} +func (dst *CreateAlertPolicyRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_CreateAlertPolicyRequest.Merge(dst, src) +} +func (m *CreateAlertPolicyRequest) XXX_Size() int { + return xxx_messageInfo_CreateAlertPolicyRequest.Size(m) +} +func (m *CreateAlertPolicyRequest) XXX_DiscardUnknown() { + xxx_messageInfo_CreateAlertPolicyRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_CreateAlertPolicyRequest proto.InternalMessageInfo + +func (m *CreateAlertPolicyRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *CreateAlertPolicyRequest) GetAlertPolicy() *AlertPolicy { + if m != nil { + return m.AlertPolicy + } + return nil +} + +// The protocol for the `GetAlertPolicy` request. +type GetAlertPolicyRequest struct { + // The alerting policy to retrieve. The format is + // + // projects/[PROJECT_ID]/alertPolicies/[ALERT_POLICY_ID] + Name string `protobuf:"bytes,3,opt,name=name,proto3" json:"name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GetAlertPolicyRequest) Reset() { *m = GetAlertPolicyRequest{} } +func (m *GetAlertPolicyRequest) String() string { return proto.CompactTextString(m) } +func (*GetAlertPolicyRequest) ProtoMessage() {} +func (*GetAlertPolicyRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_alert_service_d83b47a0e5c8ce7d, []int{1} +} +func (m *GetAlertPolicyRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GetAlertPolicyRequest.Unmarshal(m, b) +} +func (m *GetAlertPolicyRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GetAlertPolicyRequest.Marshal(b, m, deterministic) +} +func (dst *GetAlertPolicyRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_GetAlertPolicyRequest.Merge(dst, src) +} +func (m *GetAlertPolicyRequest) XXX_Size() int { + return xxx_messageInfo_GetAlertPolicyRequest.Size(m) +} +func (m *GetAlertPolicyRequest) XXX_DiscardUnknown() { + xxx_messageInfo_GetAlertPolicyRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_GetAlertPolicyRequest proto.InternalMessageInfo + +func (m *GetAlertPolicyRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +// The protocol for the `ListAlertPolicies` request. +type ListAlertPoliciesRequest struct { + // The project whose alert policies are to be listed. The format is + // + // projects/[PROJECT_ID] + // + // Note that this field names the parent container in which the alerting + // policies to be listed are stored. To retrieve a single alerting policy + // by name, use the + // [GetAlertPolicy][google.monitoring.v3.AlertPolicyService.GetAlertPolicy] + // operation, instead. + Name string `protobuf:"bytes,4,opt,name=name,proto3" json:"name,omitempty"` + // If provided, this field specifies the criteria that must be met by + // alert policies to be included in the response. + // + // For more details, see [sorting and + // filtering](/monitoring/api/v3/sorting-and-filtering). + Filter string `protobuf:"bytes,5,opt,name=filter,proto3" json:"filter,omitempty"` + // A comma-separated list of fields by which to sort the result. Supports + // the same set of field references as the `filter` field. Entries can be + // prefixed with a minus sign to sort by the field in descending order. + // + // For more details, see [sorting and + // filtering](/monitoring/api/v3/sorting-and-filtering). + OrderBy string `protobuf:"bytes,6,opt,name=order_by,json=orderBy,proto3" json:"order_by,omitempty"` + // The maximum number of results to return in a single response. + PageSize int32 `protobuf:"varint,2,opt,name=page_size,json=pageSize,proto3" json:"page_size,omitempty"` + // If this field is not empty then it must contain the `nextPageToken` value + // returned by a previous call to this method. Using this field causes the + // method to return more results from the previous method call. + PageToken string `protobuf:"bytes,3,opt,name=page_token,json=pageToken,proto3" json:"page_token,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ListAlertPoliciesRequest) Reset() { *m = ListAlertPoliciesRequest{} } +func (m *ListAlertPoliciesRequest) String() string { return proto.CompactTextString(m) } +func (*ListAlertPoliciesRequest) ProtoMessage() {} +func (*ListAlertPoliciesRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_alert_service_d83b47a0e5c8ce7d, []int{2} +} +func (m *ListAlertPoliciesRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ListAlertPoliciesRequest.Unmarshal(m, b) +} +func (m *ListAlertPoliciesRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ListAlertPoliciesRequest.Marshal(b, m, deterministic) +} +func (dst *ListAlertPoliciesRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_ListAlertPoliciesRequest.Merge(dst, src) +} +func (m *ListAlertPoliciesRequest) XXX_Size() int { + return xxx_messageInfo_ListAlertPoliciesRequest.Size(m) +} +func (m *ListAlertPoliciesRequest) XXX_DiscardUnknown() { + xxx_messageInfo_ListAlertPoliciesRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_ListAlertPoliciesRequest proto.InternalMessageInfo + +func (m *ListAlertPoliciesRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *ListAlertPoliciesRequest) GetFilter() string { + if m != nil { + return m.Filter + } + return "" +} + +func (m *ListAlertPoliciesRequest) GetOrderBy() string { + if m != nil { + return m.OrderBy + } + return "" +} + +func (m *ListAlertPoliciesRequest) GetPageSize() int32 { + if m != nil { + return m.PageSize + } + return 0 +} + +func (m *ListAlertPoliciesRequest) GetPageToken() string { + if m != nil { + return m.PageToken + } + return "" +} + +// The protocol for the `ListAlertPolicies` response. +type ListAlertPoliciesResponse struct { + // The returned alert policies. + AlertPolicies []*AlertPolicy `protobuf:"bytes,3,rep,name=alert_policies,json=alertPolicies,proto3" json:"alert_policies,omitempty"` + // If there might be more results than were returned, then this field is set + // to a non-empty value. To see the additional results, + // use that value as `pageToken` in the next call to this method. + NextPageToken string `protobuf:"bytes,2,opt,name=next_page_token,json=nextPageToken,proto3" json:"next_page_token,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ListAlertPoliciesResponse) Reset() { *m = ListAlertPoliciesResponse{} } +func (m *ListAlertPoliciesResponse) String() string { return proto.CompactTextString(m) } +func (*ListAlertPoliciesResponse) ProtoMessage() {} +func (*ListAlertPoliciesResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_alert_service_d83b47a0e5c8ce7d, []int{3} +} +func (m *ListAlertPoliciesResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ListAlertPoliciesResponse.Unmarshal(m, b) +} +func (m *ListAlertPoliciesResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ListAlertPoliciesResponse.Marshal(b, m, deterministic) +} +func (dst *ListAlertPoliciesResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_ListAlertPoliciesResponse.Merge(dst, src) +} +func (m *ListAlertPoliciesResponse) XXX_Size() int { + return xxx_messageInfo_ListAlertPoliciesResponse.Size(m) +} +func (m *ListAlertPoliciesResponse) XXX_DiscardUnknown() { + xxx_messageInfo_ListAlertPoliciesResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_ListAlertPoliciesResponse proto.InternalMessageInfo + +func (m *ListAlertPoliciesResponse) GetAlertPolicies() []*AlertPolicy { + if m != nil { + return m.AlertPolicies + } + return nil +} + +func (m *ListAlertPoliciesResponse) GetNextPageToken() string { + if m != nil { + return m.NextPageToken + } + return "" +} + +// The protocol for the `UpdateAlertPolicy` request. +type UpdateAlertPolicyRequest struct { + // Optional. A list of alerting policy field names. If this field is not + // empty, each listed field in the existing alerting policy is set to the + // value of the corresponding field in the supplied policy (`alert_policy`), + // or to the field's default value if the field is not in the supplied + // alerting policy. Fields not listed retain their previous value. + // + // Examples of valid field masks include `display_name`, `documentation`, + // `documentation.content`, `documentation.mime_type`, `user_labels`, + // `user_label.nameofkey`, `enabled`, `conditions`, `combiner`, etc. + // + // If this field is empty, then the supplied alerting policy replaces the + // existing policy. It is the same as deleting the existing policy and + // adding the supplied policy, except for the following: + // + // + The new policy will have the same `[ALERT_POLICY_ID]` as the former + // policy. This gives you continuity with the former policy in your + // notifications and incidents. + // + Conditions in the new policy will keep their former `[CONDITION_ID]` if + // the supplied condition includes the `name` field with that + // `[CONDITION_ID]`. If the supplied condition omits the `name` field, + // then a new `[CONDITION_ID]` is created. + UpdateMask *field_mask.FieldMask `protobuf:"bytes,2,opt,name=update_mask,json=updateMask,proto3" json:"update_mask,omitempty"` + // Required. The updated alerting policy or the updated values for the + // fields listed in `update_mask`. + // If `update_mask` is not empty, any fields in this policy that are + // not in `update_mask` are ignored. + AlertPolicy *AlertPolicy `protobuf:"bytes,3,opt,name=alert_policy,json=alertPolicy,proto3" json:"alert_policy,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *UpdateAlertPolicyRequest) Reset() { *m = UpdateAlertPolicyRequest{} } +func (m *UpdateAlertPolicyRequest) String() string { return proto.CompactTextString(m) } +func (*UpdateAlertPolicyRequest) ProtoMessage() {} +func (*UpdateAlertPolicyRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_alert_service_d83b47a0e5c8ce7d, []int{4} +} +func (m *UpdateAlertPolicyRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_UpdateAlertPolicyRequest.Unmarshal(m, b) +} +func (m *UpdateAlertPolicyRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_UpdateAlertPolicyRequest.Marshal(b, m, deterministic) +} +func (dst *UpdateAlertPolicyRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_UpdateAlertPolicyRequest.Merge(dst, src) +} +func (m *UpdateAlertPolicyRequest) XXX_Size() int { + return xxx_messageInfo_UpdateAlertPolicyRequest.Size(m) +} +func (m *UpdateAlertPolicyRequest) XXX_DiscardUnknown() { + xxx_messageInfo_UpdateAlertPolicyRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_UpdateAlertPolicyRequest proto.InternalMessageInfo + +func (m *UpdateAlertPolicyRequest) GetUpdateMask() *field_mask.FieldMask { + if m != nil { + return m.UpdateMask + } + return nil +} + +func (m *UpdateAlertPolicyRequest) GetAlertPolicy() *AlertPolicy { + if m != nil { + return m.AlertPolicy + } + return nil +} + +// The protocol for the `DeleteAlertPolicy` request. +type DeleteAlertPolicyRequest struct { + // The alerting policy to delete. The format is: + // + // projects/[PROJECT_ID]/alertPolicies/[ALERT_POLICY_ID] + // + // For more information, see [AlertPolicy][google.monitoring.v3.AlertPolicy]. + Name string `protobuf:"bytes,3,opt,name=name,proto3" json:"name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *DeleteAlertPolicyRequest) Reset() { *m = DeleteAlertPolicyRequest{} } +func (m *DeleteAlertPolicyRequest) String() string { return proto.CompactTextString(m) } +func (*DeleteAlertPolicyRequest) ProtoMessage() {} +func (*DeleteAlertPolicyRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_alert_service_d83b47a0e5c8ce7d, []int{5} +} +func (m *DeleteAlertPolicyRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_DeleteAlertPolicyRequest.Unmarshal(m, b) +} +func (m *DeleteAlertPolicyRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_DeleteAlertPolicyRequest.Marshal(b, m, deterministic) +} +func (dst *DeleteAlertPolicyRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_DeleteAlertPolicyRequest.Merge(dst, src) +} +func (m *DeleteAlertPolicyRequest) XXX_Size() int { + return xxx_messageInfo_DeleteAlertPolicyRequest.Size(m) +} +func (m *DeleteAlertPolicyRequest) XXX_DiscardUnknown() { + xxx_messageInfo_DeleteAlertPolicyRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_DeleteAlertPolicyRequest proto.InternalMessageInfo + +func (m *DeleteAlertPolicyRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func init() { + proto.RegisterType((*CreateAlertPolicyRequest)(nil), "google.monitoring.v3.CreateAlertPolicyRequest") + proto.RegisterType((*GetAlertPolicyRequest)(nil), "google.monitoring.v3.GetAlertPolicyRequest") + proto.RegisterType((*ListAlertPoliciesRequest)(nil), "google.monitoring.v3.ListAlertPoliciesRequest") + proto.RegisterType((*ListAlertPoliciesResponse)(nil), "google.monitoring.v3.ListAlertPoliciesResponse") + proto.RegisterType((*UpdateAlertPolicyRequest)(nil), "google.monitoring.v3.UpdateAlertPolicyRequest") + proto.RegisterType((*DeleteAlertPolicyRequest)(nil), "google.monitoring.v3.DeleteAlertPolicyRequest") +} + +// Reference imports to suppress errors if they are not otherwise used. +var _ context.Context +var _ grpc.ClientConn + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the grpc package it is being compiled against. +const _ = grpc.SupportPackageIsVersion4 + +// AlertPolicyServiceClient is the client API for AlertPolicyService service. +// +// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://godoc.org/google.golang.org/grpc#ClientConn.NewStream. +type AlertPolicyServiceClient interface { + // Lists the existing alerting policies for the project. + ListAlertPolicies(ctx context.Context, in *ListAlertPoliciesRequest, opts ...grpc.CallOption) (*ListAlertPoliciesResponse, error) + // Gets a single alerting policy. + GetAlertPolicy(ctx context.Context, in *GetAlertPolicyRequest, opts ...grpc.CallOption) (*AlertPolicy, error) + // Creates a new alerting policy. + CreateAlertPolicy(ctx context.Context, in *CreateAlertPolicyRequest, opts ...grpc.CallOption) (*AlertPolicy, error) + // Deletes an alerting policy. + DeleteAlertPolicy(ctx context.Context, in *DeleteAlertPolicyRequest, opts ...grpc.CallOption) (*empty.Empty, error) + // Updates an alerting policy. You can either replace the entire policy with + // a new one or replace only certain fields in the current alerting policy by + // specifying the fields to be updated via `updateMask`. Returns the + // updated alerting policy. + UpdateAlertPolicy(ctx context.Context, in *UpdateAlertPolicyRequest, opts ...grpc.CallOption) (*AlertPolicy, error) +} + +type alertPolicyServiceClient struct { + cc *grpc.ClientConn +} + +func NewAlertPolicyServiceClient(cc *grpc.ClientConn) AlertPolicyServiceClient { + return &alertPolicyServiceClient{cc} +} + +func (c *alertPolicyServiceClient) ListAlertPolicies(ctx context.Context, in *ListAlertPoliciesRequest, opts ...grpc.CallOption) (*ListAlertPoliciesResponse, error) { + out := new(ListAlertPoliciesResponse) + err := c.cc.Invoke(ctx, "/google.monitoring.v3.AlertPolicyService/ListAlertPolicies", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *alertPolicyServiceClient) GetAlertPolicy(ctx context.Context, in *GetAlertPolicyRequest, opts ...grpc.CallOption) (*AlertPolicy, error) { + out := new(AlertPolicy) + err := c.cc.Invoke(ctx, "/google.monitoring.v3.AlertPolicyService/GetAlertPolicy", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *alertPolicyServiceClient) CreateAlertPolicy(ctx context.Context, in *CreateAlertPolicyRequest, opts ...grpc.CallOption) (*AlertPolicy, error) { + out := new(AlertPolicy) + err := c.cc.Invoke(ctx, "/google.monitoring.v3.AlertPolicyService/CreateAlertPolicy", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *alertPolicyServiceClient) DeleteAlertPolicy(ctx context.Context, in *DeleteAlertPolicyRequest, opts ...grpc.CallOption) (*empty.Empty, error) { + out := new(empty.Empty) + err := c.cc.Invoke(ctx, "/google.monitoring.v3.AlertPolicyService/DeleteAlertPolicy", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *alertPolicyServiceClient) UpdateAlertPolicy(ctx context.Context, in *UpdateAlertPolicyRequest, opts ...grpc.CallOption) (*AlertPolicy, error) { + out := new(AlertPolicy) + err := c.cc.Invoke(ctx, "/google.monitoring.v3.AlertPolicyService/UpdateAlertPolicy", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +// AlertPolicyServiceServer is the server API for AlertPolicyService service. +type AlertPolicyServiceServer interface { + // Lists the existing alerting policies for the project. + ListAlertPolicies(context.Context, *ListAlertPoliciesRequest) (*ListAlertPoliciesResponse, error) + // Gets a single alerting policy. + GetAlertPolicy(context.Context, *GetAlertPolicyRequest) (*AlertPolicy, error) + // Creates a new alerting policy. + CreateAlertPolicy(context.Context, *CreateAlertPolicyRequest) (*AlertPolicy, error) + // Deletes an alerting policy. + DeleteAlertPolicy(context.Context, *DeleteAlertPolicyRequest) (*empty.Empty, error) + // Updates an alerting policy. You can either replace the entire policy with + // a new one or replace only certain fields in the current alerting policy by + // specifying the fields to be updated via `updateMask`. Returns the + // updated alerting policy. + UpdateAlertPolicy(context.Context, *UpdateAlertPolicyRequest) (*AlertPolicy, error) +} + +func RegisterAlertPolicyServiceServer(s *grpc.Server, srv AlertPolicyServiceServer) { + s.RegisterService(&_AlertPolicyService_serviceDesc, srv) +} + +func _AlertPolicyService_ListAlertPolicies_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(ListAlertPoliciesRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(AlertPolicyServiceServer).ListAlertPolicies(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.monitoring.v3.AlertPolicyService/ListAlertPolicies", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(AlertPolicyServiceServer).ListAlertPolicies(ctx, req.(*ListAlertPoliciesRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _AlertPolicyService_GetAlertPolicy_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(GetAlertPolicyRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(AlertPolicyServiceServer).GetAlertPolicy(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.monitoring.v3.AlertPolicyService/GetAlertPolicy", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(AlertPolicyServiceServer).GetAlertPolicy(ctx, req.(*GetAlertPolicyRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _AlertPolicyService_CreateAlertPolicy_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(CreateAlertPolicyRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(AlertPolicyServiceServer).CreateAlertPolicy(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.monitoring.v3.AlertPolicyService/CreateAlertPolicy", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(AlertPolicyServiceServer).CreateAlertPolicy(ctx, req.(*CreateAlertPolicyRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _AlertPolicyService_DeleteAlertPolicy_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(DeleteAlertPolicyRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(AlertPolicyServiceServer).DeleteAlertPolicy(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.monitoring.v3.AlertPolicyService/DeleteAlertPolicy", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(AlertPolicyServiceServer).DeleteAlertPolicy(ctx, req.(*DeleteAlertPolicyRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _AlertPolicyService_UpdateAlertPolicy_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(UpdateAlertPolicyRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(AlertPolicyServiceServer).UpdateAlertPolicy(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.monitoring.v3.AlertPolicyService/UpdateAlertPolicy", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(AlertPolicyServiceServer).UpdateAlertPolicy(ctx, req.(*UpdateAlertPolicyRequest)) + } + return interceptor(ctx, in, info, handler) +} + +var _AlertPolicyService_serviceDesc = grpc.ServiceDesc{ + ServiceName: "google.monitoring.v3.AlertPolicyService", + HandlerType: (*AlertPolicyServiceServer)(nil), + Methods: []grpc.MethodDesc{ + { + MethodName: "ListAlertPolicies", + Handler: _AlertPolicyService_ListAlertPolicies_Handler, + }, + { + MethodName: "GetAlertPolicy", + Handler: _AlertPolicyService_GetAlertPolicy_Handler, + }, + { + MethodName: "CreateAlertPolicy", + Handler: _AlertPolicyService_CreateAlertPolicy_Handler, + }, + { + MethodName: "DeleteAlertPolicy", + Handler: _AlertPolicyService_DeleteAlertPolicy_Handler, + }, + { + MethodName: "UpdateAlertPolicy", + Handler: _AlertPolicyService_UpdateAlertPolicy_Handler, + }, + }, + Streams: []grpc.StreamDesc{}, + Metadata: "google/monitoring/v3/alert_service.proto", +} + +func init() { + proto.RegisterFile("google/monitoring/v3/alert_service.proto", fileDescriptor_alert_service_d83b47a0e5c8ce7d) +} + +var fileDescriptor_alert_service_d83b47a0e5c8ce7d = []byte{ + // 656 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x9c, 0x55, 0x41, 0x6f, 0xd3, 0x4c, + 0x10, 0x95, 0x93, 0x36, 0x5f, 0xbb, 0xfd, 0x5a, 0x94, 0x15, 0x54, 0xae, 0x0b, 0x52, 0x30, 0x2a, + 0x54, 0xad, 0xb0, 0xa5, 0xf8, 0x04, 0x15, 0x48, 0xa4, 0x85, 0xf6, 0x40, 0xa5, 0x28, 0x85, 0x1e, + 0x50, 0xa4, 0x68, 0x93, 0x4c, 0xac, 0x25, 0x8e, 0xd7, 0x78, 0x37, 0x11, 0x29, 0xea, 0x85, 0x23, + 0x12, 0xe2, 0xc0, 0x99, 0x03, 0x47, 0x38, 0x20, 0x7e, 0x07, 0x57, 0xfe, 0x02, 0x3f, 0x04, 0x79, + 0xed, 0x34, 0x76, 0x6d, 0xab, 0x16, 0xb7, 0xcc, 0xce, 0xdb, 0x99, 0xb7, 0x6f, 0xde, 0x38, 0x68, + 0xdb, 0x66, 0xcc, 0x76, 0xc0, 0x1c, 0x31, 0x97, 0x0a, 0xe6, 0x53, 0xd7, 0x36, 0x27, 0x96, 0x49, + 0x1c, 0xf0, 0x45, 0x87, 0x83, 0x3f, 0xa1, 0x3d, 0x30, 0x3c, 0x9f, 0x09, 0x86, 0xaf, 0x87, 0x48, + 0x63, 0x8e, 0x34, 0x26, 0x96, 0x76, 0x33, 0xba, 0x4f, 0x3c, 0x6a, 0x12, 0xd7, 0x65, 0x82, 0x08, + 0xca, 0x5c, 0x1e, 0xde, 0xd1, 0x6a, 0xf9, 0xd5, 0x23, 0xc4, 0x66, 0x84, 0x90, 0x51, 0x77, 0x3c, + 0x30, 0x61, 0xe4, 0x89, 0xe9, 0xa5, 0xeb, 0x17, 0xc9, 0x01, 0x05, 0xa7, 0xdf, 0x19, 0x11, 0x3e, + 0x0c, 0x11, 0xba, 0x40, 0xea, 0xbe, 0x0f, 0x44, 0xc0, 0x93, 0xa0, 0x66, 0x93, 0x39, 0xb4, 0x37, + 0x6d, 0xc1, 0x9b, 0x31, 0x70, 0x81, 0x31, 0x5a, 0x70, 0xc9, 0x08, 0xd4, 0x72, 0x4d, 0xd9, 0x5e, + 0x6e, 0xc9, 0xdf, 0xf8, 0x00, 0xfd, 0x1f, 0xbe, 0xcd, 0x93, 0x50, 0xb5, 0x54, 0x53, 0xb6, 0x57, + 0xea, 0xb7, 0x8d, 0xac, 0xb7, 0x19, 0xf1, 0x9a, 0x2b, 0x64, 0x1e, 0xe8, 0xbb, 0xe8, 0xc6, 0x21, + 0x88, 0x62, 0x2d, 0xf5, 0x2f, 0x0a, 0x52, 0x9f, 0x53, 0x1e, 0x83, 0x53, 0xe0, 0x97, 0x2f, 0x2c, + 0xc4, 0x38, 0xae, 0xa3, 0xca, 0x80, 0x3a, 0x02, 0x7c, 0x75, 0x51, 0x9e, 0x46, 0x11, 0xde, 0x40, + 0x4b, 0xcc, 0xef, 0x83, 0xdf, 0xe9, 0x4e, 0xd5, 0x8a, 0xcc, 0xfc, 0x27, 0xe3, 0xc6, 0x14, 0x6f, + 0xa2, 0x65, 0x8f, 0xd8, 0xd0, 0xe1, 0xf4, 0x0c, 0xe4, 0x9b, 0x16, 0x5b, 0x4b, 0xc1, 0xc1, 0x09, + 0x3d, 0x03, 0x7c, 0x0b, 0x21, 0x99, 0x14, 0x6c, 0x08, 0x6e, 0x44, 0x4d, 0xc2, 0x5f, 0x04, 0x07, + 0xfa, 0x47, 0x05, 0x6d, 0x64, 0xf0, 0xe3, 0x1e, 0x73, 0x39, 0xe0, 0x23, 0xb4, 0x16, 0x13, 0x8c, + 0x02, 0x57, 0xcb, 0xb5, 0x72, 0x31, 0xc9, 0x56, 0x49, 0xbc, 0x22, 0xbe, 0x8b, 0xae, 0xb9, 0xf0, + 0x56, 0x74, 0x62, 0x5c, 0x4a, 0x92, 0xcb, 0x6a, 0x70, 0xdc, 0xbc, 0xe0, 0x13, 0xe8, 0xf5, 0xd2, + 0xeb, 0x67, 0xcf, 0x74, 0x0f, 0xad, 0x8c, 0x65, 0x4e, 0x9a, 0x20, 0x1a, 0x9f, 0x36, 0xe3, 0x32, + 0xf3, 0x89, 0xf1, 0x2c, 0xf0, 0xc9, 0x31, 0xe1, 0xc3, 0x16, 0x0a, 0xe1, 0xc1, 0xef, 0xd4, 0xf0, + 0xcb, 0xff, 0x34, 0x7c, 0x03, 0xa9, 0x07, 0xe0, 0x40, 0x51, 0xcb, 0xd5, 0x7f, 0x54, 0x10, 0x8e, + 0x41, 0x4f, 0xc2, 0xa5, 0xc2, 0x5f, 0x15, 0x54, 0x4d, 0xc9, 0x8e, 0x8d, 0x6c, 0x32, 0x79, 0xfe, + 0xd1, 0xcc, 0xc2, 0xf8, 0x70, 0x9e, 0xfa, 0xee, 0xfb, 0xdf, 0x7f, 0x3e, 0x97, 0xb6, 0xf0, 0x9d, + 0x60, 0x11, 0xdf, 0x05, 0x04, 0x1f, 0x79, 0x3e, 0x7b, 0x0d, 0x3d, 0xc1, 0xcd, 0x9d, 0x73, 0x33, + 0x39, 0xb2, 0x4f, 0x0a, 0x5a, 0x4b, 0x1a, 0x1d, 0xef, 0x66, 0x37, 0xcc, 0x5c, 0x07, 0xed, 0x6a, + 0x69, 0xf5, 0xfb, 0x92, 0xcf, 0x3d, 0xbc, 0x95, 0xc5, 0x27, 0x49, 0xc7, 0xdc, 0x39, 0x97, 0xaa, + 0xa5, 0x16, 0x3e, 0x4f, 0xb5, 0xbc, 0x2f, 0x43, 0x11, 0x5e, 0x0f, 0x24, 0x2f, 0x4b, 0x2f, 0xa2, + 0xd3, 0xc3, 0x84, 0xad, 0xf0, 0x07, 0x05, 0x55, 0x53, 0x0e, 0xc9, 0xe3, 0x98, 0x67, 0x25, 0x6d, + 0x3d, 0x65, 0xea, 0xa7, 0xc1, 0x97, 0x71, 0x26, 0xd8, 0x4e, 0x41, 0xc1, 0x7e, 0x2a, 0xa8, 0x9a, + 0xda, 0xa6, 0x3c, 0x32, 0x79, 0x6b, 0x57, 0x44, 0xb0, 0x23, 0xc9, 0xab, 0x51, 0xaf, 0x4b, 0x5e, + 0x71, 0x41, 0x8c, 0xab, 0x48, 0x26, 0xf5, 0x6b, 0x7c, 0x53, 0x90, 0xda, 0x63, 0xa3, 0xcc, 0x96, + 0x8d, 0xaa, 0xec, 0x19, 0x2d, 0x51, 0x33, 0x90, 0xa6, 0xa9, 0xbc, 0x7a, 0x1c, 0x41, 0x6d, 0xe6, + 0x10, 0xd7, 0x36, 0x98, 0x6f, 0x9b, 0x36, 0xb8, 0x52, 0x38, 0x33, 0x4c, 0x11, 0x8f, 0xf2, 0xe4, + 0xbf, 0xd0, 0xde, 0x3c, 0xfa, 0x5e, 0xd2, 0x0e, 0xc3, 0x02, 0xfb, 0x0e, 0x1b, 0xf7, 0x8d, 0xe3, + 0x79, 0xc7, 0x53, 0xeb, 0xd7, 0x2c, 0xd9, 0x96, 0xc9, 0xf6, 0x3c, 0xd9, 0x3e, 0xb5, 0xba, 0x15, + 0xd9, 0xc4, 0xfa, 0x1b, 0x00, 0x00, 0xff, 0xff, 0x6f, 0x1f, 0xe6, 0xf0, 0x47, 0x07, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/monitoring/v3/common.pb.go b/vendor/google.golang.org/genproto/googleapis/monitoring/v3/common.pb.go new file mode 100644 index 0000000..33b2999 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/monitoring/v3/common.pb.go @@ -0,0 +1,890 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/monitoring/v3/common.proto + +package monitoring // import "google.golang.org/genproto/googleapis/monitoring/v3" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import duration "github.com/golang/protobuf/ptypes/duration" +import timestamp "github.com/golang/protobuf/ptypes/timestamp" +import _ "google.golang.org/genproto/googleapis/api/annotations" +import distribution "google.golang.org/genproto/googleapis/api/distribution" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Specifies an ordering relationship on two arguments, here called left and +// right. +type ComparisonType int32 + +const ( + // No ordering relationship is specified. + ComparisonType_COMPARISON_UNSPECIFIED ComparisonType = 0 + // The left argument is greater than the right argument. + ComparisonType_COMPARISON_GT ComparisonType = 1 + // The left argument is greater than or equal to the right argument. + ComparisonType_COMPARISON_GE ComparisonType = 2 + // The left argument is less than the right argument. + ComparisonType_COMPARISON_LT ComparisonType = 3 + // The left argument is less than or equal to the right argument. + ComparisonType_COMPARISON_LE ComparisonType = 4 + // The left argument is equal to the right argument. + ComparisonType_COMPARISON_EQ ComparisonType = 5 + // The left argument is not equal to the right argument. + ComparisonType_COMPARISON_NE ComparisonType = 6 +) + +var ComparisonType_name = map[int32]string{ + 0: "COMPARISON_UNSPECIFIED", + 1: "COMPARISON_GT", + 2: "COMPARISON_GE", + 3: "COMPARISON_LT", + 4: "COMPARISON_LE", + 5: "COMPARISON_EQ", + 6: "COMPARISON_NE", +} +var ComparisonType_value = map[string]int32{ + "COMPARISON_UNSPECIFIED": 0, + "COMPARISON_GT": 1, + "COMPARISON_GE": 2, + "COMPARISON_LT": 3, + "COMPARISON_LE": 4, + "COMPARISON_EQ": 5, + "COMPARISON_NE": 6, +} + +func (x ComparisonType) String() string { + return proto.EnumName(ComparisonType_name, int32(x)) +} +func (ComparisonType) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_common_a64b14621f6e1d51, []int{0} +} + +// The tier of service for a Workspace. Please see the +// [service tiers +// documentation](https://cloud.google.com/monitoring/workspaces/tiers) for more +// details. +type ServiceTier int32 // Deprecated: Do not use. +const ( + // An invalid sentinel value, used to indicate that a tier has not + // been provided explicitly. + ServiceTier_SERVICE_TIER_UNSPECIFIED ServiceTier = 0 + // The Stackdriver Basic tier, a free tier of service that provides basic + // features, a moderate allotment of logs, and access to built-in metrics. + // A number of features are not available in this tier. For more details, + // see [the service tiers + // documentation](https://cloud.google.com/monitoring/workspaces/tiers). + ServiceTier_SERVICE_TIER_BASIC ServiceTier = 1 + // The Stackdriver Premium tier, a higher, more expensive tier of service + // that provides access to all Stackdriver features, lets you use Stackdriver + // with AWS accounts, and has a larger allotments for logs and metrics. For + // more details, see [the service tiers + // documentation](https://cloud.google.com/monitoring/workspaces/tiers). + ServiceTier_SERVICE_TIER_PREMIUM ServiceTier = 2 +) + +var ServiceTier_name = map[int32]string{ + 0: "SERVICE_TIER_UNSPECIFIED", + 1: "SERVICE_TIER_BASIC", + 2: "SERVICE_TIER_PREMIUM", +} +var ServiceTier_value = map[string]int32{ + "SERVICE_TIER_UNSPECIFIED": 0, + "SERVICE_TIER_BASIC": 1, + "SERVICE_TIER_PREMIUM": 2, +} + +func (x ServiceTier) String() string { + return proto.EnumName(ServiceTier_name, int32(x)) +} +func (ServiceTier) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_common_a64b14621f6e1d51, []int{1} +} + +// The Aligner describes how to bring the data points in a single +// time series into temporal alignment. +type Aggregation_Aligner int32 + +const ( + // No alignment. Raw data is returned. Not valid if cross-time + // series reduction is requested. The value type of the result is + // the same as the value type of the input. + Aggregation_ALIGN_NONE Aggregation_Aligner = 0 + // Align and convert to delta metric type. This alignment is valid + // for cumulative metrics and delta metrics. Aligning an existing + // delta metric to a delta metric requires that the alignment + // period be increased. The value type of the result is the same + // as the value type of the input. + // + // One can think of this aligner as a rate but without time units; that + // is, the output is conceptually (second_point - first_point). + Aggregation_ALIGN_DELTA Aggregation_Aligner = 1 + // Align and convert to a rate. This alignment is valid for + // cumulative metrics and delta metrics with numeric values. The output is a + // gauge metric with value type + // [DOUBLE][google.api.MetricDescriptor.ValueType.DOUBLE]. + // + // One can think of this aligner as conceptually providing the slope of + // the line that passes through the value at the start and end of the + // window. In other words, this is conceptually ((y1 - y0)/(t1 - t0)), + // and the output unit is one that has a "/time" dimension. + // + // If, by rate, you are looking for percentage change, see the + // `ALIGN_PERCENT_CHANGE` aligner option. + Aggregation_ALIGN_RATE Aggregation_Aligner = 2 + // Align by interpolating between adjacent points around the + // period boundary. This alignment is valid for gauge + // metrics with numeric values. The value type of the result is the same + // as the value type of the input. + Aggregation_ALIGN_INTERPOLATE Aggregation_Aligner = 3 + // Align by shifting the oldest data point before the period + // boundary to the boundary. This alignment is valid for gauge + // metrics. The value type of the result is the same as the + // value type of the input. + Aggregation_ALIGN_NEXT_OLDER Aggregation_Aligner = 4 + // Align time series via aggregation. The resulting data point in + // the alignment period is the minimum of all data points in the + // period. This alignment is valid for gauge and delta metrics with numeric + // values. The value type of the result is the same as the value + // type of the input. + Aggregation_ALIGN_MIN Aggregation_Aligner = 10 + // Align time series via aggregation. The resulting data point in + // the alignment period is the maximum of all data points in the + // period. This alignment is valid for gauge and delta metrics with numeric + // values. The value type of the result is the same as the value + // type of the input. + Aggregation_ALIGN_MAX Aggregation_Aligner = 11 + // Align time series via aggregation. The resulting data point in + // the alignment period is the average or arithmetic mean of all + // data points in the period. This alignment is valid for gauge and delta + // metrics with numeric values. The value type of the output is + // [DOUBLE][google.api.MetricDescriptor.ValueType.DOUBLE]. + Aggregation_ALIGN_MEAN Aggregation_Aligner = 12 + // Align time series via aggregation. The resulting data point in + // the alignment period is the count of all data points in the + // period. This alignment is valid for gauge and delta metrics with numeric + // or Boolean values. The value type of the output is + // [INT64][google.api.MetricDescriptor.ValueType.INT64]. + Aggregation_ALIGN_COUNT Aggregation_Aligner = 13 + // Align time series via aggregation. The resulting data point in + // the alignment period is the sum of all data points in the + // period. This alignment is valid for gauge and delta metrics with numeric + // and distribution values. The value type of the output is the + // same as the value type of the input. + Aggregation_ALIGN_SUM Aggregation_Aligner = 14 + // Align time series via aggregation. The resulting data point in + // the alignment period is the standard deviation of all data + // points in the period. This alignment is valid for gauge and delta metrics + // with numeric values. The value type of the output is + // [DOUBLE][google.api.MetricDescriptor.ValueType.DOUBLE]. + Aggregation_ALIGN_STDDEV Aggregation_Aligner = 15 + // Align time series via aggregation. The resulting data point in + // the alignment period is the count of True-valued data points in the + // period. This alignment is valid for gauge metrics with + // Boolean values. The value type of the output is + // [INT64][google.api.MetricDescriptor.ValueType.INT64]. + Aggregation_ALIGN_COUNT_TRUE Aggregation_Aligner = 16 + // Align time series via aggregation. The resulting data point in + // the alignment period is the count of False-valued data points in the + // period. This alignment is valid for gauge metrics with + // Boolean values. The value type of the output is + // [INT64][google.api.MetricDescriptor.ValueType.INT64]. + Aggregation_ALIGN_COUNT_FALSE Aggregation_Aligner = 24 + // Align time series via aggregation. The resulting data point in + // the alignment period is the fraction of True-valued data points in the + // period. This alignment is valid for gauge metrics with Boolean values. + // The output value is in the range [0, 1] and has value type + // [DOUBLE][google.api.MetricDescriptor.ValueType.DOUBLE]. + Aggregation_ALIGN_FRACTION_TRUE Aggregation_Aligner = 17 + // Align time series via aggregation. The resulting data point in + // the alignment period is the 99th percentile of all data + // points in the period. This alignment is valid for gauge and delta metrics + // with distribution values. The output is a gauge metric with value type + // [DOUBLE][google.api.MetricDescriptor.ValueType.DOUBLE]. + Aggregation_ALIGN_PERCENTILE_99 Aggregation_Aligner = 18 + // Align time series via aggregation. The resulting data point in + // the alignment period is the 95th percentile of all data + // points in the period. This alignment is valid for gauge and delta metrics + // with distribution values. The output is a gauge metric with value type + // [DOUBLE][google.api.MetricDescriptor.ValueType.DOUBLE]. + Aggregation_ALIGN_PERCENTILE_95 Aggregation_Aligner = 19 + // Align time series via aggregation. The resulting data point in + // the alignment period is the 50th percentile of all data + // points in the period. This alignment is valid for gauge and delta metrics + // with distribution values. The output is a gauge metric with value type + // [DOUBLE][google.api.MetricDescriptor.ValueType.DOUBLE]. + Aggregation_ALIGN_PERCENTILE_50 Aggregation_Aligner = 20 + // Align time series via aggregation. The resulting data point in + // the alignment period is the 5th percentile of all data + // points in the period. This alignment is valid for gauge and delta metrics + // with distribution values. The output is a gauge metric with value type + // [DOUBLE][google.api.MetricDescriptor.ValueType.DOUBLE]. + Aggregation_ALIGN_PERCENTILE_05 Aggregation_Aligner = 21 + // Align and convert to a percentage change. This alignment is valid for + // gauge and delta metrics with numeric values. This alignment conceptually + // computes the equivalent of "((current - previous)/previous)*100" + // where previous value is determined based on the alignmentPeriod. + // In the event that previous is 0 the calculated value is infinity with the + // exception that if both (current - previous) and previous are 0 the + // calculated value is 0. + // A 10 minute moving mean is computed at each point of the time window + // prior to the above calculation to smooth the metric and prevent false + // positives from very short lived spikes. + // Only applicable for data that is >= 0. Any values < 0 are treated as + // no data. While delta metrics are accepted by this alignment special care + // should be taken that the values for the metric will always be positive. + // The output is a gauge metric with value type + // [DOUBLE][google.api.MetricDescriptor.ValueType.DOUBLE]. + Aggregation_ALIGN_PERCENT_CHANGE Aggregation_Aligner = 23 +) + +var Aggregation_Aligner_name = map[int32]string{ + 0: "ALIGN_NONE", + 1: "ALIGN_DELTA", + 2: "ALIGN_RATE", + 3: "ALIGN_INTERPOLATE", + 4: "ALIGN_NEXT_OLDER", + 10: "ALIGN_MIN", + 11: "ALIGN_MAX", + 12: "ALIGN_MEAN", + 13: "ALIGN_COUNT", + 14: "ALIGN_SUM", + 15: "ALIGN_STDDEV", + 16: "ALIGN_COUNT_TRUE", + 24: "ALIGN_COUNT_FALSE", + 17: "ALIGN_FRACTION_TRUE", + 18: "ALIGN_PERCENTILE_99", + 19: "ALIGN_PERCENTILE_95", + 20: "ALIGN_PERCENTILE_50", + 21: "ALIGN_PERCENTILE_05", + 23: "ALIGN_PERCENT_CHANGE", +} +var Aggregation_Aligner_value = map[string]int32{ + "ALIGN_NONE": 0, + "ALIGN_DELTA": 1, + "ALIGN_RATE": 2, + "ALIGN_INTERPOLATE": 3, + "ALIGN_NEXT_OLDER": 4, + "ALIGN_MIN": 10, + "ALIGN_MAX": 11, + "ALIGN_MEAN": 12, + "ALIGN_COUNT": 13, + "ALIGN_SUM": 14, + "ALIGN_STDDEV": 15, + "ALIGN_COUNT_TRUE": 16, + "ALIGN_COUNT_FALSE": 24, + "ALIGN_FRACTION_TRUE": 17, + "ALIGN_PERCENTILE_99": 18, + "ALIGN_PERCENTILE_95": 19, + "ALIGN_PERCENTILE_50": 20, + "ALIGN_PERCENTILE_05": 21, + "ALIGN_PERCENT_CHANGE": 23, +} + +func (x Aggregation_Aligner) String() string { + return proto.EnumName(Aggregation_Aligner_name, int32(x)) +} +func (Aggregation_Aligner) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_common_a64b14621f6e1d51, []int{2, 0} +} + +// A Reducer describes how to aggregate data points from multiple +// time series into a single time series. +type Aggregation_Reducer int32 + +const ( + // No cross-time series reduction. The output of the aligner is + // returned. + Aggregation_REDUCE_NONE Aggregation_Reducer = 0 + // Reduce by computing the mean across time series for each + // alignment period. This reducer is valid for delta and + // gauge metrics with numeric or distribution values. The value type of the + // output is [DOUBLE][google.api.MetricDescriptor.ValueType.DOUBLE]. + Aggregation_REDUCE_MEAN Aggregation_Reducer = 1 + // Reduce by computing the minimum across time series for each + // alignment period. This reducer is valid for delta and + // gauge metrics with numeric values. The value type of the output + // is the same as the value type of the input. + Aggregation_REDUCE_MIN Aggregation_Reducer = 2 + // Reduce by computing the maximum across time series for each + // alignment period. This reducer is valid for delta and + // gauge metrics with numeric values. The value type of the output + // is the same as the value type of the input. + Aggregation_REDUCE_MAX Aggregation_Reducer = 3 + // Reduce by computing the sum across time series for each + // alignment period. This reducer is valid for delta and + // gauge metrics with numeric and distribution values. The value type of + // the output is the same as the value type of the input. + Aggregation_REDUCE_SUM Aggregation_Reducer = 4 + // Reduce by computing the standard deviation across time series + // for each alignment period. This reducer is valid for delta + // and gauge metrics with numeric or distribution values. The value type of + // the output is [DOUBLE][google.api.MetricDescriptor.ValueType.DOUBLE]. + Aggregation_REDUCE_STDDEV Aggregation_Reducer = 5 + // Reduce by computing the count of data points across time series + // for each alignment period. This reducer is valid for delta + // and gauge metrics of numeric, Boolean, distribution, and string value + // type. The value type of the output is + // [INT64][google.api.MetricDescriptor.ValueType.INT64]. + Aggregation_REDUCE_COUNT Aggregation_Reducer = 6 + // Reduce by computing the count of True-valued data points across time + // series for each alignment period. This reducer is valid for delta + // and gauge metrics of Boolean value type. The value type of + // the output is [INT64][google.api.MetricDescriptor.ValueType.INT64]. + Aggregation_REDUCE_COUNT_TRUE Aggregation_Reducer = 7 + // Reduce by computing the count of False-valued data points across time + // series for each alignment period. This reducer is valid for delta + // and gauge metrics of Boolean value type. The value type of + // the output is [INT64][google.api.MetricDescriptor.ValueType.INT64]. + Aggregation_REDUCE_COUNT_FALSE Aggregation_Reducer = 15 + // Reduce by computing the fraction of True-valued data points across time + // series for each alignment period. This reducer is valid for delta + // and gauge metrics of Boolean value type. The output value is in the + // range [0, 1] and has value type + // [DOUBLE][google.api.MetricDescriptor.ValueType.DOUBLE]. + Aggregation_REDUCE_FRACTION_TRUE Aggregation_Reducer = 8 + // Reduce by computing 99th percentile of data points across time series + // for each alignment period. This reducer is valid for gauge and delta + // metrics of numeric and distribution type. The value of the output is + // [DOUBLE][google.api.MetricDescriptor.ValueType.DOUBLE] + Aggregation_REDUCE_PERCENTILE_99 Aggregation_Reducer = 9 + // Reduce by computing 95th percentile of data points across time series + // for each alignment period. This reducer is valid for gauge and delta + // metrics of numeric and distribution type. The value of the output is + // [DOUBLE][google.api.MetricDescriptor.ValueType.DOUBLE] + Aggregation_REDUCE_PERCENTILE_95 Aggregation_Reducer = 10 + // Reduce by computing 50th percentile of data points across time series + // for each alignment period. This reducer is valid for gauge and delta + // metrics of numeric and distribution type. The value of the output is + // [DOUBLE][google.api.MetricDescriptor.ValueType.DOUBLE] + Aggregation_REDUCE_PERCENTILE_50 Aggregation_Reducer = 11 + // Reduce by computing 5th percentile of data points across time series + // for each alignment period. This reducer is valid for gauge and delta + // metrics of numeric and distribution type. The value of the output is + // [DOUBLE][google.api.MetricDescriptor.ValueType.DOUBLE] + Aggregation_REDUCE_PERCENTILE_05 Aggregation_Reducer = 12 +) + +var Aggregation_Reducer_name = map[int32]string{ + 0: "REDUCE_NONE", + 1: "REDUCE_MEAN", + 2: "REDUCE_MIN", + 3: "REDUCE_MAX", + 4: "REDUCE_SUM", + 5: "REDUCE_STDDEV", + 6: "REDUCE_COUNT", + 7: "REDUCE_COUNT_TRUE", + 15: "REDUCE_COUNT_FALSE", + 8: "REDUCE_FRACTION_TRUE", + 9: "REDUCE_PERCENTILE_99", + 10: "REDUCE_PERCENTILE_95", + 11: "REDUCE_PERCENTILE_50", + 12: "REDUCE_PERCENTILE_05", +} +var Aggregation_Reducer_value = map[string]int32{ + "REDUCE_NONE": 0, + "REDUCE_MEAN": 1, + "REDUCE_MIN": 2, + "REDUCE_MAX": 3, + "REDUCE_SUM": 4, + "REDUCE_STDDEV": 5, + "REDUCE_COUNT": 6, + "REDUCE_COUNT_TRUE": 7, + "REDUCE_COUNT_FALSE": 15, + "REDUCE_FRACTION_TRUE": 8, + "REDUCE_PERCENTILE_99": 9, + "REDUCE_PERCENTILE_95": 10, + "REDUCE_PERCENTILE_50": 11, + "REDUCE_PERCENTILE_05": 12, +} + +func (x Aggregation_Reducer) String() string { + return proto.EnumName(Aggregation_Reducer_name, int32(x)) +} +func (Aggregation_Reducer) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_common_a64b14621f6e1d51, []int{2, 1} +} + +// A single strongly-typed value. +type TypedValue struct { + // The typed value field. + // + // Types that are valid to be assigned to Value: + // *TypedValue_BoolValue + // *TypedValue_Int64Value + // *TypedValue_DoubleValue + // *TypedValue_StringValue + // *TypedValue_DistributionValue + Value isTypedValue_Value `protobuf_oneof:"value"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *TypedValue) Reset() { *m = TypedValue{} } +func (m *TypedValue) String() string { return proto.CompactTextString(m) } +func (*TypedValue) ProtoMessage() {} +func (*TypedValue) Descriptor() ([]byte, []int) { + return fileDescriptor_common_a64b14621f6e1d51, []int{0} +} +func (m *TypedValue) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_TypedValue.Unmarshal(m, b) +} +func (m *TypedValue) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_TypedValue.Marshal(b, m, deterministic) +} +func (dst *TypedValue) XXX_Merge(src proto.Message) { + xxx_messageInfo_TypedValue.Merge(dst, src) +} +func (m *TypedValue) XXX_Size() int { + return xxx_messageInfo_TypedValue.Size(m) +} +func (m *TypedValue) XXX_DiscardUnknown() { + xxx_messageInfo_TypedValue.DiscardUnknown(m) +} + +var xxx_messageInfo_TypedValue proto.InternalMessageInfo + +type isTypedValue_Value interface { + isTypedValue_Value() +} + +type TypedValue_BoolValue struct { + BoolValue bool `protobuf:"varint,1,opt,name=bool_value,json=boolValue,proto3,oneof"` +} + +type TypedValue_Int64Value struct { + Int64Value int64 `protobuf:"varint,2,opt,name=int64_value,json=int64Value,proto3,oneof"` +} + +type TypedValue_DoubleValue struct { + DoubleValue float64 `protobuf:"fixed64,3,opt,name=double_value,json=doubleValue,proto3,oneof"` +} + +type TypedValue_StringValue struct { + StringValue string `protobuf:"bytes,4,opt,name=string_value,json=stringValue,proto3,oneof"` +} + +type TypedValue_DistributionValue struct { + DistributionValue *distribution.Distribution `protobuf:"bytes,5,opt,name=distribution_value,json=distributionValue,proto3,oneof"` +} + +func (*TypedValue_BoolValue) isTypedValue_Value() {} + +func (*TypedValue_Int64Value) isTypedValue_Value() {} + +func (*TypedValue_DoubleValue) isTypedValue_Value() {} + +func (*TypedValue_StringValue) isTypedValue_Value() {} + +func (*TypedValue_DistributionValue) isTypedValue_Value() {} + +func (m *TypedValue) GetValue() isTypedValue_Value { + if m != nil { + return m.Value + } + return nil +} + +func (m *TypedValue) GetBoolValue() bool { + if x, ok := m.GetValue().(*TypedValue_BoolValue); ok { + return x.BoolValue + } + return false +} + +func (m *TypedValue) GetInt64Value() int64 { + if x, ok := m.GetValue().(*TypedValue_Int64Value); ok { + return x.Int64Value + } + return 0 +} + +func (m *TypedValue) GetDoubleValue() float64 { + if x, ok := m.GetValue().(*TypedValue_DoubleValue); ok { + return x.DoubleValue + } + return 0 +} + +func (m *TypedValue) GetStringValue() string { + if x, ok := m.GetValue().(*TypedValue_StringValue); ok { + return x.StringValue + } + return "" +} + +func (m *TypedValue) GetDistributionValue() *distribution.Distribution { + if x, ok := m.GetValue().(*TypedValue_DistributionValue); ok { + return x.DistributionValue + } + return nil +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*TypedValue) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _TypedValue_OneofMarshaler, _TypedValue_OneofUnmarshaler, _TypedValue_OneofSizer, []interface{}{ + (*TypedValue_BoolValue)(nil), + (*TypedValue_Int64Value)(nil), + (*TypedValue_DoubleValue)(nil), + (*TypedValue_StringValue)(nil), + (*TypedValue_DistributionValue)(nil), + } +} + +func _TypedValue_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*TypedValue) + // value + switch x := m.Value.(type) { + case *TypedValue_BoolValue: + t := uint64(0) + if x.BoolValue { + t = 1 + } + b.EncodeVarint(1<<3 | proto.WireVarint) + b.EncodeVarint(t) + case *TypedValue_Int64Value: + b.EncodeVarint(2<<3 | proto.WireVarint) + b.EncodeVarint(uint64(x.Int64Value)) + case *TypedValue_DoubleValue: + b.EncodeVarint(3<<3 | proto.WireFixed64) + b.EncodeFixed64(math.Float64bits(x.DoubleValue)) + case *TypedValue_StringValue: + b.EncodeVarint(4<<3 | proto.WireBytes) + b.EncodeStringBytes(x.StringValue) + case *TypedValue_DistributionValue: + b.EncodeVarint(5<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.DistributionValue); err != nil { + return err + } + case nil: + default: + return fmt.Errorf("TypedValue.Value has unexpected type %T", x) + } + return nil +} + +func _TypedValue_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*TypedValue) + switch tag { + case 1: // value.bool_value + if wire != proto.WireVarint { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeVarint() + m.Value = &TypedValue_BoolValue{x != 0} + return true, err + case 2: // value.int64_value + if wire != proto.WireVarint { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeVarint() + m.Value = &TypedValue_Int64Value{int64(x)} + return true, err + case 3: // value.double_value + if wire != proto.WireFixed64 { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeFixed64() + m.Value = &TypedValue_DoubleValue{math.Float64frombits(x)} + return true, err + case 4: // value.string_value + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeStringBytes() + m.Value = &TypedValue_StringValue{x} + return true, err + case 5: // value.distribution_value + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(distribution.Distribution) + err := b.DecodeMessage(msg) + m.Value = &TypedValue_DistributionValue{msg} + return true, err + default: + return false, nil + } +} + +func _TypedValue_OneofSizer(msg proto.Message) (n int) { + m := msg.(*TypedValue) + // value + switch x := m.Value.(type) { + case *TypedValue_BoolValue: + n += 1 // tag and wire + n += 1 + case *TypedValue_Int64Value: + n += 1 // tag and wire + n += proto.SizeVarint(uint64(x.Int64Value)) + case *TypedValue_DoubleValue: + n += 1 // tag and wire + n += 8 + case *TypedValue_StringValue: + n += 1 // tag and wire + n += proto.SizeVarint(uint64(len(x.StringValue))) + n += len(x.StringValue) + case *TypedValue_DistributionValue: + s := proto.Size(x.DistributionValue) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +// A time interval extending just after a start time through an end time. +// If the start time is the same as the end time, then the interval +// represents a single point in time. +type TimeInterval struct { + // Required. The end of the time interval. + EndTime *timestamp.Timestamp `protobuf:"bytes,2,opt,name=end_time,json=endTime,proto3" json:"end_time,omitempty"` + // Optional. The beginning of the time interval. The default value + // for the start time is the end time. The start time must not be + // later than the end time. + StartTime *timestamp.Timestamp `protobuf:"bytes,1,opt,name=start_time,json=startTime,proto3" json:"start_time,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *TimeInterval) Reset() { *m = TimeInterval{} } +func (m *TimeInterval) String() string { return proto.CompactTextString(m) } +func (*TimeInterval) ProtoMessage() {} +func (*TimeInterval) Descriptor() ([]byte, []int) { + return fileDescriptor_common_a64b14621f6e1d51, []int{1} +} +func (m *TimeInterval) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_TimeInterval.Unmarshal(m, b) +} +func (m *TimeInterval) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_TimeInterval.Marshal(b, m, deterministic) +} +func (dst *TimeInterval) XXX_Merge(src proto.Message) { + xxx_messageInfo_TimeInterval.Merge(dst, src) +} +func (m *TimeInterval) XXX_Size() int { + return xxx_messageInfo_TimeInterval.Size(m) +} +func (m *TimeInterval) XXX_DiscardUnknown() { + xxx_messageInfo_TimeInterval.DiscardUnknown(m) +} + +var xxx_messageInfo_TimeInterval proto.InternalMessageInfo + +func (m *TimeInterval) GetEndTime() *timestamp.Timestamp { + if m != nil { + return m.EndTime + } + return nil +} + +func (m *TimeInterval) GetStartTime() *timestamp.Timestamp { + if m != nil { + return m.StartTime + } + return nil +} + +// Describes how to combine multiple time series to provide different views of +// the data. Aggregation consists of an alignment step on individual time +// series (`alignment_period` and `per_series_aligner`) followed by an optional +// reduction step of the data across the aligned time series +// (`cross_series_reducer` and `group_by_fields`). For more details, see +// [Aggregation](/monitoring/api/learn_more#aggregation). +type Aggregation struct { + // The alignment period for per-[time series][google.monitoring.v3.TimeSeries] + // alignment. If present, `alignmentPeriod` must be at least 60 + // seconds. After per-time series alignment, each time series will + // contain data points only on the period boundaries. If + // `perSeriesAligner` is not specified or equals `ALIGN_NONE`, then + // this field is ignored. If `perSeriesAligner` is specified and + // does not equal `ALIGN_NONE`, then this field must be defined; + // otherwise an error is returned. + AlignmentPeriod *duration.Duration `protobuf:"bytes,1,opt,name=alignment_period,json=alignmentPeriod,proto3" json:"alignment_period,omitempty"` + // The approach to be used to align individual time series. Not all + // alignment functions may be applied to all time series, depending + // on the metric type and value type of the original time + // series. Alignment may change the metric type or the value type of + // the time series. + // + // Time series data must be aligned in order to perform cross-time + // series reduction. If `crossSeriesReducer` is specified, then + // `perSeriesAligner` must be specified and not equal `ALIGN_NONE` + // and `alignmentPeriod` must be specified; otherwise, an error is + // returned. + PerSeriesAligner Aggregation_Aligner `protobuf:"varint,2,opt,name=per_series_aligner,json=perSeriesAligner,proto3,enum=google.monitoring.v3.Aggregation_Aligner" json:"per_series_aligner,omitempty"` + // The approach to be used to combine time series. Not all reducer + // functions may be applied to all time series, depending on the + // metric type and the value type of the original time + // series. Reduction may change the metric type of value type of the + // time series. + // + // Time series data must be aligned in order to perform cross-time + // series reduction. If `crossSeriesReducer` is specified, then + // `perSeriesAligner` must be specified and not equal `ALIGN_NONE` + // and `alignmentPeriod` must be specified; otherwise, an error is + // returned. + CrossSeriesReducer Aggregation_Reducer `protobuf:"varint,4,opt,name=cross_series_reducer,json=crossSeriesReducer,proto3,enum=google.monitoring.v3.Aggregation_Reducer" json:"cross_series_reducer,omitempty"` + // The set of fields to preserve when `crossSeriesReducer` is + // specified. The `groupByFields` determine how the time series are + // partitioned into subsets prior to applying the aggregation + // function. Each subset contains time series that have the same + // value for each of the grouping fields. Each individual time + // series is a member of exactly one subset. The + // `crossSeriesReducer` is applied to each subset of time series. + // It is not possible to reduce across different resource types, so + // this field implicitly contains `resource.type`. Fields not + // specified in `groupByFields` are aggregated away. If + // `groupByFields` is not specified and all the time series have + // the same resource type, then the time series are aggregated into + // a single output time series. If `crossSeriesReducer` is not + // defined, this field is ignored. + GroupByFields []string `protobuf:"bytes,5,rep,name=group_by_fields,json=groupByFields,proto3" json:"group_by_fields,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Aggregation) Reset() { *m = Aggregation{} } +func (m *Aggregation) String() string { return proto.CompactTextString(m) } +func (*Aggregation) ProtoMessage() {} +func (*Aggregation) Descriptor() ([]byte, []int) { + return fileDescriptor_common_a64b14621f6e1d51, []int{2} +} +func (m *Aggregation) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Aggregation.Unmarshal(m, b) +} +func (m *Aggregation) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Aggregation.Marshal(b, m, deterministic) +} +func (dst *Aggregation) XXX_Merge(src proto.Message) { + xxx_messageInfo_Aggregation.Merge(dst, src) +} +func (m *Aggregation) XXX_Size() int { + return xxx_messageInfo_Aggregation.Size(m) +} +func (m *Aggregation) XXX_DiscardUnknown() { + xxx_messageInfo_Aggregation.DiscardUnknown(m) +} + +var xxx_messageInfo_Aggregation proto.InternalMessageInfo + +func (m *Aggregation) GetAlignmentPeriod() *duration.Duration { + if m != nil { + return m.AlignmentPeriod + } + return nil +} + +func (m *Aggregation) GetPerSeriesAligner() Aggregation_Aligner { + if m != nil { + return m.PerSeriesAligner + } + return Aggregation_ALIGN_NONE +} + +func (m *Aggregation) GetCrossSeriesReducer() Aggregation_Reducer { + if m != nil { + return m.CrossSeriesReducer + } + return Aggregation_REDUCE_NONE +} + +func (m *Aggregation) GetGroupByFields() []string { + if m != nil { + return m.GroupByFields + } + return nil +} + +func init() { + proto.RegisterType((*TypedValue)(nil), "google.monitoring.v3.TypedValue") + proto.RegisterType((*TimeInterval)(nil), "google.monitoring.v3.TimeInterval") + proto.RegisterType((*Aggregation)(nil), "google.monitoring.v3.Aggregation") + proto.RegisterEnum("google.monitoring.v3.ComparisonType", ComparisonType_name, ComparisonType_value) + proto.RegisterEnum("google.monitoring.v3.ServiceTier", ServiceTier_name, ServiceTier_value) + proto.RegisterEnum("google.monitoring.v3.Aggregation_Aligner", Aggregation_Aligner_name, Aggregation_Aligner_value) + proto.RegisterEnum("google.monitoring.v3.Aggregation_Reducer", Aggregation_Reducer_name, Aggregation_Reducer_value) +} + +func init() { + proto.RegisterFile("google/monitoring/v3/common.proto", fileDescriptor_common_a64b14621f6e1d51) +} + +var fileDescriptor_common_a64b14621f6e1d51 = []byte{ + // 957 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x8c, 0x95, 0xc1, 0x6e, 0xe3, 0x44, + 0x18, 0xc7, 0xe3, 0x64, 0xdb, 0x34, 0x9f, 0xdb, 0x66, 0x3a, 0xdb, 0xed, 0x86, 0x68, 0x61, 0xb3, + 0x45, 0x42, 0x61, 0x0f, 0x4e, 0xd5, 0x12, 0xa4, 0x0a, 0x09, 0xc9, 0x75, 0xa6, 0xad, 0xa5, 0xc4, + 0x09, 0x13, 0xa7, 0x54, 0x50, 0xc9, 0x72, 0x9a, 0x59, 0xcb, 0x52, 0xe2, 0xb1, 0x6c, 0xa7, 0x52, + 0x6f, 0xdc, 0x79, 0x07, 0x2e, 0xdc, 0xb8, 0xf1, 0x1a, 0x3c, 0x0c, 0x17, 0x5e, 0x00, 0x79, 0xc6, + 0x59, 0x3b, 0x21, 0x08, 0x8e, 0xdf, 0xef, 0xff, 0xff, 0xbe, 0x99, 0xf9, 0x8f, 0x35, 0x86, 0x77, + 0x1e, 0xe7, 0xde, 0x9c, 0x75, 0x16, 0x3c, 0xf0, 0x13, 0x1e, 0xf9, 0x81, 0xd7, 0x79, 0xba, 0xe8, + 0x3c, 0xf2, 0xc5, 0x82, 0x07, 0x5a, 0x18, 0xf1, 0x84, 0xe3, 0x63, 0x69, 0xd1, 0x72, 0x8b, 0xf6, + 0x74, 0xd1, 0x7c, 0x93, 0x35, 0xba, 0xa1, 0xdf, 0x71, 0x83, 0x80, 0x27, 0x6e, 0xe2, 0xf3, 0x20, + 0x96, 0x3d, 0xcd, 0x4f, 0x0b, 0xea, 0xcc, 0x8f, 0x93, 0xc8, 0x9f, 0x2e, 0x53, 0x3d, 0x93, 0x3f, + 0xcb, 0x64, 0x51, 0x4d, 0x97, 0x1f, 0x3a, 0xb3, 0x65, 0xe4, 0x16, 0xf4, 0xb7, 0x9b, 0x7a, 0xe2, + 0x2f, 0x58, 0x9c, 0xb8, 0x8b, 0x50, 0x1a, 0x4e, 0xff, 0x54, 0x00, 0xec, 0xe7, 0x90, 0xcd, 0xee, + 0xdc, 0xf9, 0x92, 0xe1, 0xb7, 0x00, 0x53, 0xce, 0xe7, 0xce, 0x53, 0x5a, 0x35, 0x94, 0x96, 0xd2, + 0xde, 0xbb, 0x2d, 0xd1, 0x5a, 0xca, 0xa4, 0xe1, 0x1d, 0xa8, 0x7e, 0x90, 0x7c, 0xfd, 0x55, 0xe6, + 0x28, 0xb7, 0x94, 0x76, 0xe5, 0xb6, 0x44, 0x41, 0x40, 0x69, 0xf9, 0x1c, 0xf6, 0x67, 0x7c, 0x39, + 0x9d, 0xb3, 0xcc, 0x53, 0x69, 0x29, 0x6d, 0xe5, 0xb6, 0x44, 0x55, 0x49, 0x3f, 0x9a, 0xd2, 0xc3, + 0x04, 0x5e, 0x66, 0x7a, 0xd1, 0x52, 0xda, 0xb5, 0xd4, 0x24, 0xa9, 0x34, 0x99, 0x80, 0x8b, 0x67, + 0xce, 0xac, 0x3b, 0x2d, 0xa5, 0xad, 0x9e, 0x37, 0xb4, 0x2c, 0x4d, 0x37, 0xf4, 0xb5, 0x5e, 0xc1, + 0x75, 0x5b, 0xa2, 0x47, 0xc5, 0x2e, 0x31, 0xea, 0xaa, 0x0a, 0x3b, 0xa2, 0xfb, 0xf4, 0x27, 0x05, + 0xf6, 0x6d, 0x7f, 0xc1, 0xcc, 0x20, 0x61, 0xd1, 0x93, 0x3b, 0xc7, 0x5d, 0xd8, 0x63, 0xc1, 0xcc, + 0x49, 0x83, 0x11, 0xc7, 0x51, 0xcf, 0x9b, 0xab, 0xd1, 0xab, 0xd4, 0x34, 0x7b, 0x95, 0x1a, 0xad, + 0xb2, 0x60, 0x96, 0x56, 0xf8, 0x12, 0x20, 0x4e, 0xdc, 0x28, 0x91, 0x8d, 0xca, 0x7f, 0x36, 0xd6, + 0x84, 0x3b, 0xad, 0x4f, 0xff, 0xaa, 0x82, 0xaa, 0x7b, 0x5e, 0xc4, 0x3c, 0x71, 0x55, 0xb8, 0x07, + 0xc8, 0x9d, 0xfb, 0x5e, 0xb0, 0x60, 0x41, 0xe2, 0x84, 0x2c, 0xf2, 0xf9, 0x2c, 0x1b, 0xf8, 0xc9, + 0x3f, 0x06, 0xf6, 0xb2, 0xfb, 0xa5, 0xf5, 0x8f, 0x2d, 0x23, 0xd1, 0x81, 0xbf, 0x07, 0x1c, 0xb2, + 0xc8, 0x89, 0x59, 0xe4, 0xb3, 0xd8, 0x11, 0x2a, 0x8b, 0xc4, 0x89, 0x0e, 0xcf, 0xbf, 0xd4, 0xb6, + 0x7d, 0x7a, 0x5a, 0x61, 0x13, 0x9a, 0x2e, 0x1b, 0x28, 0x0a, 0x59, 0x34, 0x16, 0x33, 0x32, 0x82, + 0x7f, 0x84, 0xe3, 0xc7, 0x88, 0xc7, 0xf1, 0x6a, 0x74, 0xc4, 0x66, 0xcb, 0x47, 0x16, 0x89, 0x2b, + 0xfb, 0x5f, 0xa3, 0xa9, 0x6c, 0xa0, 0x58, 0x8c, 0x91, 0xc3, 0x33, 0x86, 0xbf, 0x80, 0xba, 0x17, + 0xf1, 0x65, 0xe8, 0x4c, 0x9f, 0x9d, 0x0f, 0x3e, 0x9b, 0xcf, 0xe2, 0xc6, 0x4e, 0xab, 0xd2, 0xae, + 0xd1, 0x03, 0x81, 0xaf, 0x9e, 0xaf, 0x05, 0x3c, 0xfd, 0xb9, 0x02, 0xd5, 0xd5, 0x86, 0x0e, 0x01, + 0xf4, 0xbe, 0x79, 0x63, 0x39, 0xd6, 0xd0, 0x22, 0xa8, 0x84, 0xeb, 0xa0, 0xca, 0xba, 0x47, 0xfa, + 0xb6, 0x8e, 0x94, 0xdc, 0x40, 0x75, 0x9b, 0xa0, 0x32, 0x7e, 0x05, 0x47, 0xb2, 0x36, 0x2d, 0x9b, + 0xd0, 0xd1, 0xb0, 0x9f, 0xe2, 0x0a, 0x3e, 0x06, 0x94, 0xcd, 0x21, 0xf7, 0xb6, 0x33, 0xec, 0xf7, + 0x08, 0x45, 0x2f, 0xf0, 0x01, 0xd4, 0x24, 0x1d, 0x98, 0x16, 0x82, 0x42, 0xa9, 0xdf, 0x23, 0x35, + 0x1f, 0x3d, 0x20, 0xba, 0x85, 0xf6, 0xf3, 0xb5, 0x8d, 0xe1, 0xc4, 0xb2, 0xd1, 0x41, 0xee, 0x1f, + 0x4f, 0x06, 0xe8, 0x10, 0x23, 0xd8, 0xcf, 0x4a, 0xbb, 0xd7, 0x23, 0x77, 0xa8, 0x9e, 0xaf, 0x2a, + 0x3a, 0x1c, 0x9b, 0x4e, 0x08, 0x42, 0xf9, 0x16, 0x25, 0xbd, 0xd6, 0xfb, 0x63, 0x82, 0x1a, 0xf8, + 0x35, 0xbc, 0x94, 0xf8, 0x9a, 0xea, 0x86, 0x6d, 0x0e, 0x2d, 0xe9, 0x3f, 0xca, 0x85, 0x11, 0xa1, + 0x06, 0xb1, 0x6c, 0xb3, 0x4f, 0x9c, 0xcb, 0x4b, 0x84, 0xb7, 0x0b, 0x5d, 0xf4, 0x72, 0xab, 0xd0, + 0x3d, 0x43, 0xc7, 0x5b, 0x85, 0xb3, 0x2e, 0x7a, 0x85, 0x1b, 0x70, 0xbc, 0x26, 0x38, 0xc6, 0xad, + 0x6e, 0xdd, 0x10, 0xf4, 0xfa, 0xf4, 0xf7, 0x32, 0x54, 0x57, 0x37, 0x58, 0x07, 0x95, 0x92, 0xde, + 0xc4, 0x20, 0x85, 0xeb, 0xc8, 0x80, 0xc8, 0x48, 0x5c, 0xc7, 0x0a, 0x98, 0x16, 0x2a, 0x17, 0x6b, + 0xfd, 0x1e, 0x55, 0x0a, 0x75, 0x9a, 0xd9, 0x0b, 0x7c, 0x04, 0x07, 0xab, 0x5a, 0x86, 0xb6, 0x93, + 0xc6, 0x98, 0x21, 0x99, 0xf3, 0x6e, 0x1a, 0x58, 0x91, 0xc8, 0x5c, 0xaa, 0xf8, 0x04, 0xf0, 0x1a, + 0x96, 0x41, 0xd6, 0xd3, 0xb3, 0x64, 0x7c, 0x3d, 0xc9, 0xbd, 0x82, 0xb2, 0x1e, 0x65, 0xed, 0x5f, + 0x94, 0x2e, 0x82, 0xed, 0x4a, 0xf7, 0x0c, 0xa9, 0xdb, 0x95, 0xb3, 0x2e, 0xda, 0x7f, 0xff, 0x8b, + 0x02, 0x87, 0x06, 0x5f, 0x84, 0x6e, 0xe4, 0xc7, 0x3c, 0x48, 0xdf, 0x5c, 0xdc, 0x84, 0x13, 0x63, + 0x38, 0x18, 0xe9, 0xd4, 0x1c, 0x0f, 0x2d, 0x67, 0x62, 0x8d, 0x47, 0xc4, 0x30, 0xaf, 0x4d, 0xd2, + 0x43, 0xa5, 0x34, 0x84, 0x82, 0x76, 0x63, 0x23, 0x65, 0x13, 0xa5, 0x5f, 0xf6, 0x3a, 0xea, 0xdb, + 0xa8, 0xb2, 0x89, 0x88, 0x0c, 0xb4, 0x80, 0xc8, 0x77, 0x68, 0x67, 0x03, 0x59, 0x04, 0xed, 0xbe, + 0x77, 0x41, 0x1d, 0xb3, 0xe8, 0xc9, 0x7f, 0x64, 0xb6, 0xcf, 0x22, 0xfc, 0x06, 0x1a, 0x63, 0x42, + 0xef, 0x4c, 0x83, 0x38, 0xb6, 0x49, 0xe8, 0xc6, 0xf6, 0x4e, 0x00, 0xaf, 0xa9, 0x57, 0xfa, 0xd8, + 0x34, 0x90, 0x92, 0x9e, 0x7f, 0x8d, 0x8f, 0x28, 0x19, 0x98, 0x93, 0x01, 0x2a, 0x37, 0xcb, 0x0d, + 0xe5, 0xea, 0x57, 0x05, 0x1a, 0x8f, 0x7c, 0xb1, 0xf5, 0xc9, 0xb8, 0x52, 0x0d, 0xf1, 0xb3, 0x1c, + 0xa5, 0x4f, 0xdd, 0x48, 0xf9, 0xe1, 0xdb, 0xcc, 0xe4, 0xf1, 0xb9, 0x1b, 0x78, 0x1a, 0x8f, 0xbc, + 0x8e, 0xc7, 0x02, 0xf1, 0x10, 0x76, 0xa4, 0xe4, 0x86, 0x7e, 0xbc, 0xfe, 0xbf, 0xfd, 0x26, 0xaf, + 0x7e, 0x2b, 0x37, 0x6f, 0xe4, 0x00, 0x63, 0xce, 0x97, 0x33, 0x6d, 0x90, 0xaf, 0x75, 0x77, 0xf1, + 0xc7, 0x4a, 0x7c, 0x10, 0xe2, 0x43, 0x2e, 0x3e, 0xdc, 0x5d, 0x4c, 0x77, 0xc5, 0x22, 0x17, 0x7f, + 0x07, 0x00, 0x00, 0xff, 0xff, 0x8d, 0x78, 0xd9, 0x96, 0xd3, 0x07, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/monitoring/v3/dropped_labels.pb.go b/vendor/google.golang.org/genproto/googleapis/monitoring/v3/dropped_labels.pb.go new file mode 100644 index 0000000..f633357 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/monitoring/v3/dropped_labels.pb.go @@ -0,0 +1,102 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/monitoring/v3/dropped_labels.proto + +package monitoring // import "google.golang.org/genproto/googleapis/monitoring/v3" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// A set of (label, value) pairs which were dropped during aggregation, attached +// to google.api.Distribution.Exemplars in google.api.Distribution values during +// aggregation. +// +// These values are used in combination with the label values that remain on the +// aggregated Distribution timeseries to construct the full label set for the +// exemplar values. The resulting full label set may be used to identify the +// specific task/job/instance (for example) which may be contributing to a +// long-tail, while allowing the storage savings of only storing aggregated +// distribution values for a large group. +// +// Note that there are no guarantees on ordering of the labels from +// exemplar-to-exemplar and from distribution-to-distribution in the same +// stream, and there may be duplicates. It is up to clients to resolve any +// ambiguities. +type DroppedLabels struct { + // Map from label to its value, for all labels dropped in any aggregation. + Label map[string]string `protobuf:"bytes,1,rep,name=label,proto3" json:"label,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *DroppedLabels) Reset() { *m = DroppedLabels{} } +func (m *DroppedLabels) String() string { return proto.CompactTextString(m) } +func (*DroppedLabels) ProtoMessage() {} +func (*DroppedLabels) Descriptor() ([]byte, []int) { + return fileDescriptor_dropped_labels_1340d25b5d2a29b0, []int{0} +} +func (m *DroppedLabels) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_DroppedLabels.Unmarshal(m, b) +} +func (m *DroppedLabels) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_DroppedLabels.Marshal(b, m, deterministic) +} +func (dst *DroppedLabels) XXX_Merge(src proto.Message) { + xxx_messageInfo_DroppedLabels.Merge(dst, src) +} +func (m *DroppedLabels) XXX_Size() int { + return xxx_messageInfo_DroppedLabels.Size(m) +} +func (m *DroppedLabels) XXX_DiscardUnknown() { + xxx_messageInfo_DroppedLabels.DiscardUnknown(m) +} + +var xxx_messageInfo_DroppedLabels proto.InternalMessageInfo + +func (m *DroppedLabels) GetLabel() map[string]string { + if m != nil { + return m.Label + } + return nil +} + +func init() { + proto.RegisterType((*DroppedLabels)(nil), "google.monitoring.v3.DroppedLabels") + proto.RegisterMapType((map[string]string)(nil), "google.monitoring.v3.DroppedLabels.LabelEntry") +} + +func init() { + proto.RegisterFile("google/monitoring/v3/dropped_labels.proto", fileDescriptor_dropped_labels_1340d25b5d2a29b0) +} + +var fileDescriptor_dropped_labels_1340d25b5d2a29b0 = []byte{ + // 235 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x6c, 0x90, 0x31, 0x4b, 0x43, 0x31, + 0x10, 0xc7, 0x49, 0x4b, 0x05, 0x4f, 0x04, 0x09, 0x1d, 0x42, 0x71, 0x28, 0x4e, 0x75, 0xb9, 0x80, + 0x6f, 0x29, 0x0a, 0x0e, 0xa5, 0x6e, 0x0e, 0xa5, 0xa3, 0x8b, 0xa4, 0x36, 0x84, 0x60, 0x7a, 0x17, + 0xd2, 0xf8, 0xa0, 0x9f, 0xc2, 0xaf, 0x5c, 0x9a, 0x3c, 0x78, 0x3c, 0xe8, 0x94, 0xdc, 0xe5, 0xf7, + 0xff, 0x1d, 0x17, 0x78, 0x76, 0xcc, 0x2e, 0x58, 0x7d, 0x60, 0xf2, 0x99, 0x93, 0x27, 0xa7, 0xdb, + 0x46, 0xef, 0x13, 0xc7, 0x68, 0xf7, 0xdf, 0xc1, 0xec, 0x6c, 0x38, 0x62, 0x4c, 0x9c, 0x59, 0x4e, + 0x2b, 0x8a, 0x3d, 0x8a, 0x6d, 0x33, 0x7b, 0xec, 0x04, 0x26, 0x7a, 0x6d, 0x88, 0x38, 0x9b, 0xec, + 0x99, 0xba, 0xcc, 0xd3, 0xbf, 0x80, 0xfb, 0x75, 0x95, 0x7d, 0x16, 0x97, 0x5c, 0xc3, 0xa4, 0x58, + 0x95, 0x98, 0x8f, 0x17, 0x77, 0x2f, 0x88, 0xd7, 0xac, 0x38, 0xc8, 0x60, 0x39, 0x3e, 0x28, 0xa7, + 0xd3, 0xb6, 0x86, 0x67, 0x4b, 0x80, 0xbe, 0x29, 0x1f, 0x60, 0xfc, 0x6b, 0x4f, 0x4a, 0xcc, 0xc5, + 0xe2, 0x76, 0x7b, 0xb9, 0xca, 0x29, 0x4c, 0x5a, 0x13, 0xfe, 0xac, 0x1a, 0x95, 0x5e, 0x2d, 0x5e, + 0x47, 0x4b, 0xb1, 0x8a, 0xa0, 0x7e, 0xf8, 0x70, 0x75, 0xea, 0x4a, 0x0e, 0xc6, 0x6e, 0x2e, 0x1b, + 0x6c, 0xc4, 0xd7, 0x7b, 0xc7, 0x3a, 0x0e, 0x86, 0x1c, 0x72, 0x72, 0xda, 0x59, 0x2a, 0xfb, 0xe9, + 0xfa, 0x64, 0xa2, 0x3f, 0x0e, 0x7f, 0xf0, 0xad, 0xaf, 0x76, 0x37, 0x05, 0x6d, 0xce, 0x01, 0x00, + 0x00, 0xff, 0xff, 0x41, 0xd0, 0x8d, 0xe9, 0x6b, 0x01, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/monitoring/v3/group.pb.go b/vendor/google.golang.org/genproto/googleapis/monitoring/v3/group.pb.go new file mode 100644 index 0000000..014ab23 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/monitoring/v3/group.pb.go @@ -0,0 +1,157 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/monitoring/v3/group.proto + +package monitoring // import "google.golang.org/genproto/googleapis/monitoring/v3" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// The description of a dynamic collection of monitored resources. Each group +// has a filter that is matched against monitored resources and their associated +// metadata. If a group's filter matches an available monitored resource, then +// that resource is a member of that group. Groups can contain any number of +// monitored resources, and each monitored resource can be a member of any +// number of groups. +// +// Groups can be nested in parent-child hierarchies. The `parentName` field +// identifies an optional parent for each group. If a group has a parent, then +// the only monitored resources available to be matched by the group's filter +// are the resources contained in the parent group. In other words, a group +// contains the monitored resources that match its filter and the filters of all +// the group's ancestors. A group without a parent can contain any monitored +// resource. +// +// For example, consider an infrastructure running a set of instances with two +// user-defined tags: `"environment"` and `"role"`. A parent group has a filter, +// `environment="production"`. A child of that parent group has a filter, +// `role="transcoder"`. The parent group contains all instances in the +// production environment, regardless of their roles. The child group contains +// instances that have the transcoder role *and* are in the production +// environment. +// +// The monitored resources contained in a group can change at any moment, +// depending on what resources exist and what filters are associated with the +// group and its ancestors. +type Group struct { + // Output only. The name of this group. The format is + // `"projects/{project_id_or_number}/groups/{group_id}"`. + // When creating a group, this field is ignored and a new name is created + // consisting of the project specified in the call to `CreateGroup` + // and a unique `{group_id}` that is generated automatically. + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // A user-assigned name for this group, used only for display purposes. + DisplayName string `protobuf:"bytes,2,opt,name=display_name,json=displayName,proto3" json:"display_name,omitempty"` + // The name of the group's parent, if it has one. + // The format is `"projects/{project_id_or_number}/groups/{group_id}"`. + // For groups with no parent, `parentName` is the empty string, `""`. + ParentName string `protobuf:"bytes,3,opt,name=parent_name,json=parentName,proto3" json:"parent_name,omitempty"` + // The filter used to determine which monitored resources belong to this + // group. + Filter string `protobuf:"bytes,5,opt,name=filter,proto3" json:"filter,omitempty"` + // If true, the members of this group are considered to be a cluster. + // The system can perform additional analysis on groups that are clusters. + IsCluster bool `protobuf:"varint,6,opt,name=is_cluster,json=isCluster,proto3" json:"is_cluster,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Group) Reset() { *m = Group{} } +func (m *Group) String() string { return proto.CompactTextString(m) } +func (*Group) ProtoMessage() {} +func (*Group) Descriptor() ([]byte, []int) { + return fileDescriptor_group_cf79a7f2e933706a, []int{0} +} +func (m *Group) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Group.Unmarshal(m, b) +} +func (m *Group) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Group.Marshal(b, m, deterministic) +} +func (dst *Group) XXX_Merge(src proto.Message) { + xxx_messageInfo_Group.Merge(dst, src) +} +func (m *Group) XXX_Size() int { + return xxx_messageInfo_Group.Size(m) +} +func (m *Group) XXX_DiscardUnknown() { + xxx_messageInfo_Group.DiscardUnknown(m) +} + +var xxx_messageInfo_Group proto.InternalMessageInfo + +func (m *Group) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *Group) GetDisplayName() string { + if m != nil { + return m.DisplayName + } + return "" +} + +func (m *Group) GetParentName() string { + if m != nil { + return m.ParentName + } + return "" +} + +func (m *Group) GetFilter() string { + if m != nil { + return m.Filter + } + return "" +} + +func (m *Group) GetIsCluster() bool { + if m != nil { + return m.IsCluster + } + return false +} + +func init() { + proto.RegisterType((*Group)(nil), "google.monitoring.v3.Group") +} + +func init() { + proto.RegisterFile("google/monitoring/v3/group.proto", fileDescriptor_group_cf79a7f2e933706a) +} + +var fileDescriptor_group_cf79a7f2e933706a = []byte{ + // 261 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x6c, 0x90, 0xcf, 0x4a, 0x2b, 0x31, + 0x14, 0x87, 0x49, 0xef, 0xed, 0x60, 0x4f, 0x5d, 0x0d, 0x22, 0x83, 0x20, 0x8e, 0xae, 0xba, 0xca, + 0x2c, 0xb2, 0x14, 0x5c, 0xb4, 0x8b, 0xae, 0x94, 0xd2, 0x45, 0x17, 0x32, 0x50, 0x62, 0x1b, 0x43, + 0x20, 0x93, 0x13, 0x92, 0x99, 0x82, 0x2f, 0xe2, 0x03, 0xb8, 0xf4, 0x51, 0x7c, 0x2a, 0x99, 0x93, + 0x91, 0x41, 0x70, 0x97, 0xf3, 0xfb, 0x3e, 0x72, 0xfe, 0x40, 0xa9, 0x11, 0xb5, 0x55, 0x55, 0x83, + 0xce, 0xb4, 0x18, 0x8c, 0xd3, 0xd5, 0x49, 0x54, 0x3a, 0x60, 0xe7, 0xb9, 0x0f, 0xd8, 0x62, 0x7e, + 0x91, 0x0c, 0x3e, 0x1a, 0xfc, 0x24, 0xee, 0xde, 0x19, 0x4c, 0xd7, 0xbd, 0x95, 0xe7, 0xf0, 0xdf, + 0xc9, 0x46, 0x15, 0xac, 0x64, 0x8b, 0xd9, 0x96, 0xde, 0xf9, 0x2d, 0x9c, 0x1f, 0x4d, 0xf4, 0x56, + 0xbe, 0xed, 0x89, 0x4d, 0x88, 0xcd, 0x87, 0xec, 0xa9, 0x57, 0x6e, 0x60, 0xee, 0x65, 0x50, 0xae, + 0x4d, 0xc6, 0x3f, 0x32, 0x20, 0x45, 0x24, 0x5c, 0x42, 0xf6, 0x6a, 0x6c, 0xab, 0x42, 0x31, 0x25, + 0x36, 0x54, 0xf9, 0x35, 0x80, 0x89, 0xfb, 0x83, 0xed, 0x62, 0xcf, 0xb2, 0x92, 0x2d, 0xce, 0xb6, + 0x33, 0x13, 0x57, 0x29, 0x58, 0x7e, 0x30, 0x28, 0x0e, 0xd8, 0xf0, 0xbf, 0xa6, 0x5e, 0x02, 0x8d, + 0xbc, 0xe9, 0xf7, 0xda, 0xb0, 0xe7, 0x87, 0xc1, 0xd1, 0x68, 0xa5, 0xd3, 0x1c, 0x83, 0xae, 0xb4, + 0x72, 0xb4, 0x75, 0x95, 0x90, 0xf4, 0x26, 0xfe, 0x3e, 0xcd, 0xfd, 0x58, 0x7d, 0x4e, 0xae, 0xd6, + 0xe9, 0x83, 0x95, 0xc5, 0xee, 0xc8, 0x1f, 0xc7, 0x56, 0x3b, 0xf1, 0xf5, 0x03, 0x6b, 0x82, 0xf5, + 0x08, 0xeb, 0x9d, 0x78, 0xc9, 0xa8, 0x89, 0xf8, 0x0e, 0x00, 0x00, 0xff, 0xff, 0x95, 0xd1, 0xa1, + 0x34, 0x7e, 0x01, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/monitoring/v3/group_service.pb.go b/vendor/google.golang.org/genproto/googleapis/monitoring/v3/group_service.pb.go new file mode 100644 index 0000000..40e6309 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/monitoring/v3/group_service.pb.go @@ -0,0 +1,941 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/monitoring/v3/group_service.proto + +package monitoring // import "google.golang.org/genproto/googleapis/monitoring/v3" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import empty "github.com/golang/protobuf/ptypes/empty" +import _ "google.golang.org/genproto/googleapis/api/annotations" +import monitoredres "google.golang.org/genproto/googleapis/api/monitoredres" + +import ( + context "golang.org/x/net/context" + grpc "google.golang.org/grpc" +) + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// The `ListGroup` request. +type ListGroupsRequest struct { + // The project whose groups are to be listed. The format is + // `"projects/{project_id_or_number}"`. + Name string `protobuf:"bytes,7,opt,name=name,proto3" json:"name,omitempty"` + // An optional filter consisting of a single group name. The filters limit + // the groups returned based on their parent-child relationship with the + // specified group. If no filter is specified, all groups are returned. + // + // Types that are valid to be assigned to Filter: + // *ListGroupsRequest_ChildrenOfGroup + // *ListGroupsRequest_AncestorsOfGroup + // *ListGroupsRequest_DescendantsOfGroup + Filter isListGroupsRequest_Filter `protobuf_oneof:"filter"` + // A positive number that is the maximum number of results to return. + PageSize int32 `protobuf:"varint,5,opt,name=page_size,json=pageSize,proto3" json:"page_size,omitempty"` + // If this field is not empty then it must contain the `nextPageToken` value + // returned by a previous call to this method. Using this field causes the + // method to return additional results from the previous method call. + PageToken string `protobuf:"bytes,6,opt,name=page_token,json=pageToken,proto3" json:"page_token,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ListGroupsRequest) Reset() { *m = ListGroupsRequest{} } +func (m *ListGroupsRequest) String() string { return proto.CompactTextString(m) } +func (*ListGroupsRequest) ProtoMessage() {} +func (*ListGroupsRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_group_service_990a70a97332ba2c, []int{0} +} +func (m *ListGroupsRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ListGroupsRequest.Unmarshal(m, b) +} +func (m *ListGroupsRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ListGroupsRequest.Marshal(b, m, deterministic) +} +func (dst *ListGroupsRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_ListGroupsRequest.Merge(dst, src) +} +func (m *ListGroupsRequest) XXX_Size() int { + return xxx_messageInfo_ListGroupsRequest.Size(m) +} +func (m *ListGroupsRequest) XXX_DiscardUnknown() { + xxx_messageInfo_ListGroupsRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_ListGroupsRequest proto.InternalMessageInfo + +func (m *ListGroupsRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +type isListGroupsRequest_Filter interface { + isListGroupsRequest_Filter() +} + +type ListGroupsRequest_ChildrenOfGroup struct { + ChildrenOfGroup string `protobuf:"bytes,2,opt,name=children_of_group,json=childrenOfGroup,proto3,oneof"` +} + +type ListGroupsRequest_AncestorsOfGroup struct { + AncestorsOfGroup string `protobuf:"bytes,3,opt,name=ancestors_of_group,json=ancestorsOfGroup,proto3,oneof"` +} + +type ListGroupsRequest_DescendantsOfGroup struct { + DescendantsOfGroup string `protobuf:"bytes,4,opt,name=descendants_of_group,json=descendantsOfGroup,proto3,oneof"` +} + +func (*ListGroupsRequest_ChildrenOfGroup) isListGroupsRequest_Filter() {} + +func (*ListGroupsRequest_AncestorsOfGroup) isListGroupsRequest_Filter() {} + +func (*ListGroupsRequest_DescendantsOfGroup) isListGroupsRequest_Filter() {} + +func (m *ListGroupsRequest) GetFilter() isListGroupsRequest_Filter { + if m != nil { + return m.Filter + } + return nil +} + +func (m *ListGroupsRequest) GetChildrenOfGroup() string { + if x, ok := m.GetFilter().(*ListGroupsRequest_ChildrenOfGroup); ok { + return x.ChildrenOfGroup + } + return "" +} + +func (m *ListGroupsRequest) GetAncestorsOfGroup() string { + if x, ok := m.GetFilter().(*ListGroupsRequest_AncestorsOfGroup); ok { + return x.AncestorsOfGroup + } + return "" +} + +func (m *ListGroupsRequest) GetDescendantsOfGroup() string { + if x, ok := m.GetFilter().(*ListGroupsRequest_DescendantsOfGroup); ok { + return x.DescendantsOfGroup + } + return "" +} + +func (m *ListGroupsRequest) GetPageSize() int32 { + if m != nil { + return m.PageSize + } + return 0 +} + +func (m *ListGroupsRequest) GetPageToken() string { + if m != nil { + return m.PageToken + } + return "" +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*ListGroupsRequest) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _ListGroupsRequest_OneofMarshaler, _ListGroupsRequest_OneofUnmarshaler, _ListGroupsRequest_OneofSizer, []interface{}{ + (*ListGroupsRequest_ChildrenOfGroup)(nil), + (*ListGroupsRequest_AncestorsOfGroup)(nil), + (*ListGroupsRequest_DescendantsOfGroup)(nil), + } +} + +func _ListGroupsRequest_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*ListGroupsRequest) + // filter + switch x := m.Filter.(type) { + case *ListGroupsRequest_ChildrenOfGroup: + b.EncodeVarint(2<<3 | proto.WireBytes) + b.EncodeStringBytes(x.ChildrenOfGroup) + case *ListGroupsRequest_AncestorsOfGroup: + b.EncodeVarint(3<<3 | proto.WireBytes) + b.EncodeStringBytes(x.AncestorsOfGroup) + case *ListGroupsRequest_DescendantsOfGroup: + b.EncodeVarint(4<<3 | proto.WireBytes) + b.EncodeStringBytes(x.DescendantsOfGroup) + case nil: + default: + return fmt.Errorf("ListGroupsRequest.Filter has unexpected type %T", x) + } + return nil +} + +func _ListGroupsRequest_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*ListGroupsRequest) + switch tag { + case 2: // filter.children_of_group + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeStringBytes() + m.Filter = &ListGroupsRequest_ChildrenOfGroup{x} + return true, err + case 3: // filter.ancestors_of_group + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeStringBytes() + m.Filter = &ListGroupsRequest_AncestorsOfGroup{x} + return true, err + case 4: // filter.descendants_of_group + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeStringBytes() + m.Filter = &ListGroupsRequest_DescendantsOfGroup{x} + return true, err + default: + return false, nil + } +} + +func _ListGroupsRequest_OneofSizer(msg proto.Message) (n int) { + m := msg.(*ListGroupsRequest) + // filter + switch x := m.Filter.(type) { + case *ListGroupsRequest_ChildrenOfGroup: + n += 1 // tag and wire + n += proto.SizeVarint(uint64(len(x.ChildrenOfGroup))) + n += len(x.ChildrenOfGroup) + case *ListGroupsRequest_AncestorsOfGroup: + n += 1 // tag and wire + n += proto.SizeVarint(uint64(len(x.AncestorsOfGroup))) + n += len(x.AncestorsOfGroup) + case *ListGroupsRequest_DescendantsOfGroup: + n += 1 // tag and wire + n += proto.SizeVarint(uint64(len(x.DescendantsOfGroup))) + n += len(x.DescendantsOfGroup) + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +// The `ListGroups` response. +type ListGroupsResponse struct { + // The groups that match the specified filters. + Group []*Group `protobuf:"bytes,1,rep,name=group,proto3" json:"group,omitempty"` + // If there are more results than have been returned, then this field is set + // to a non-empty value. To see the additional results, + // use that value as `pageToken` in the next call to this method. + NextPageToken string `protobuf:"bytes,2,opt,name=next_page_token,json=nextPageToken,proto3" json:"next_page_token,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ListGroupsResponse) Reset() { *m = ListGroupsResponse{} } +func (m *ListGroupsResponse) String() string { return proto.CompactTextString(m) } +func (*ListGroupsResponse) ProtoMessage() {} +func (*ListGroupsResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_group_service_990a70a97332ba2c, []int{1} +} +func (m *ListGroupsResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ListGroupsResponse.Unmarshal(m, b) +} +func (m *ListGroupsResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ListGroupsResponse.Marshal(b, m, deterministic) +} +func (dst *ListGroupsResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_ListGroupsResponse.Merge(dst, src) +} +func (m *ListGroupsResponse) XXX_Size() int { + return xxx_messageInfo_ListGroupsResponse.Size(m) +} +func (m *ListGroupsResponse) XXX_DiscardUnknown() { + xxx_messageInfo_ListGroupsResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_ListGroupsResponse proto.InternalMessageInfo + +func (m *ListGroupsResponse) GetGroup() []*Group { + if m != nil { + return m.Group + } + return nil +} + +func (m *ListGroupsResponse) GetNextPageToken() string { + if m != nil { + return m.NextPageToken + } + return "" +} + +// The `GetGroup` request. +type GetGroupRequest struct { + // The group to retrieve. The format is + // `"projects/{project_id_or_number}/groups/{group_id}"`. + Name string `protobuf:"bytes,3,opt,name=name,proto3" json:"name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GetGroupRequest) Reset() { *m = GetGroupRequest{} } +func (m *GetGroupRequest) String() string { return proto.CompactTextString(m) } +func (*GetGroupRequest) ProtoMessage() {} +func (*GetGroupRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_group_service_990a70a97332ba2c, []int{2} +} +func (m *GetGroupRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GetGroupRequest.Unmarshal(m, b) +} +func (m *GetGroupRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GetGroupRequest.Marshal(b, m, deterministic) +} +func (dst *GetGroupRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_GetGroupRequest.Merge(dst, src) +} +func (m *GetGroupRequest) XXX_Size() int { + return xxx_messageInfo_GetGroupRequest.Size(m) +} +func (m *GetGroupRequest) XXX_DiscardUnknown() { + xxx_messageInfo_GetGroupRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_GetGroupRequest proto.InternalMessageInfo + +func (m *GetGroupRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +// The `CreateGroup` request. +type CreateGroupRequest struct { + // The project in which to create the group. The format is + // `"projects/{project_id_or_number}"`. + Name string `protobuf:"bytes,4,opt,name=name,proto3" json:"name,omitempty"` + // A group definition. It is an error to define the `name` field because + // the system assigns the name. + Group *Group `protobuf:"bytes,2,opt,name=group,proto3" json:"group,omitempty"` + // If true, validate this request but do not create the group. + ValidateOnly bool `protobuf:"varint,3,opt,name=validate_only,json=validateOnly,proto3" json:"validate_only,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *CreateGroupRequest) Reset() { *m = CreateGroupRequest{} } +func (m *CreateGroupRequest) String() string { return proto.CompactTextString(m) } +func (*CreateGroupRequest) ProtoMessage() {} +func (*CreateGroupRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_group_service_990a70a97332ba2c, []int{3} +} +func (m *CreateGroupRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_CreateGroupRequest.Unmarshal(m, b) +} +func (m *CreateGroupRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_CreateGroupRequest.Marshal(b, m, deterministic) +} +func (dst *CreateGroupRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_CreateGroupRequest.Merge(dst, src) +} +func (m *CreateGroupRequest) XXX_Size() int { + return xxx_messageInfo_CreateGroupRequest.Size(m) +} +func (m *CreateGroupRequest) XXX_DiscardUnknown() { + xxx_messageInfo_CreateGroupRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_CreateGroupRequest proto.InternalMessageInfo + +func (m *CreateGroupRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *CreateGroupRequest) GetGroup() *Group { + if m != nil { + return m.Group + } + return nil +} + +func (m *CreateGroupRequest) GetValidateOnly() bool { + if m != nil { + return m.ValidateOnly + } + return false +} + +// The `UpdateGroup` request. +type UpdateGroupRequest struct { + // The new definition of the group. All fields of the existing group, + // excepting `name`, are replaced with the corresponding fields of this group. + Group *Group `protobuf:"bytes,2,opt,name=group,proto3" json:"group,omitempty"` + // If true, validate this request but do not update the existing group. + ValidateOnly bool `protobuf:"varint,3,opt,name=validate_only,json=validateOnly,proto3" json:"validate_only,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *UpdateGroupRequest) Reset() { *m = UpdateGroupRequest{} } +func (m *UpdateGroupRequest) String() string { return proto.CompactTextString(m) } +func (*UpdateGroupRequest) ProtoMessage() {} +func (*UpdateGroupRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_group_service_990a70a97332ba2c, []int{4} +} +func (m *UpdateGroupRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_UpdateGroupRequest.Unmarshal(m, b) +} +func (m *UpdateGroupRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_UpdateGroupRequest.Marshal(b, m, deterministic) +} +func (dst *UpdateGroupRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_UpdateGroupRequest.Merge(dst, src) +} +func (m *UpdateGroupRequest) XXX_Size() int { + return xxx_messageInfo_UpdateGroupRequest.Size(m) +} +func (m *UpdateGroupRequest) XXX_DiscardUnknown() { + xxx_messageInfo_UpdateGroupRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_UpdateGroupRequest proto.InternalMessageInfo + +func (m *UpdateGroupRequest) GetGroup() *Group { + if m != nil { + return m.Group + } + return nil +} + +func (m *UpdateGroupRequest) GetValidateOnly() bool { + if m != nil { + return m.ValidateOnly + } + return false +} + +// The `DeleteGroup` request. You can only delete a group if it has no children. +type DeleteGroupRequest struct { + // The group to delete. The format is + // `"projects/{project_id_or_number}/groups/{group_id}"`. + Name string `protobuf:"bytes,3,opt,name=name,proto3" json:"name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *DeleteGroupRequest) Reset() { *m = DeleteGroupRequest{} } +func (m *DeleteGroupRequest) String() string { return proto.CompactTextString(m) } +func (*DeleteGroupRequest) ProtoMessage() {} +func (*DeleteGroupRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_group_service_990a70a97332ba2c, []int{5} +} +func (m *DeleteGroupRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_DeleteGroupRequest.Unmarshal(m, b) +} +func (m *DeleteGroupRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_DeleteGroupRequest.Marshal(b, m, deterministic) +} +func (dst *DeleteGroupRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_DeleteGroupRequest.Merge(dst, src) +} +func (m *DeleteGroupRequest) XXX_Size() int { + return xxx_messageInfo_DeleteGroupRequest.Size(m) +} +func (m *DeleteGroupRequest) XXX_DiscardUnknown() { + xxx_messageInfo_DeleteGroupRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_DeleteGroupRequest proto.InternalMessageInfo + +func (m *DeleteGroupRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +// The `ListGroupMembers` request. +type ListGroupMembersRequest struct { + // The group whose members are listed. The format is + // `"projects/{project_id_or_number}/groups/{group_id}"`. + Name string `protobuf:"bytes,7,opt,name=name,proto3" json:"name,omitempty"` + // A positive number that is the maximum number of results to return. + PageSize int32 `protobuf:"varint,3,opt,name=page_size,json=pageSize,proto3" json:"page_size,omitempty"` + // If this field is not empty then it must contain the `nextPageToken` value + // returned by a previous call to this method. Using this field causes the + // method to return additional results from the previous method call. + PageToken string `protobuf:"bytes,4,opt,name=page_token,json=pageToken,proto3" json:"page_token,omitempty"` + // An optional [list filter](/monitoring/api/learn_more#filtering) describing + // the members to be returned. The filter may reference the type, labels, and + // metadata of monitored resources that comprise the group. + // For example, to return only resources representing Compute Engine VM + // instances, use this filter: + // + // resource.type = "gce_instance" + Filter string `protobuf:"bytes,5,opt,name=filter,proto3" json:"filter,omitempty"` + // An optional time interval for which results should be returned. Only + // members that were part of the group during the specified interval are + // included in the response. If no interval is provided then the group + // membership over the last minute is returned. + Interval *TimeInterval `protobuf:"bytes,6,opt,name=interval,proto3" json:"interval,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ListGroupMembersRequest) Reset() { *m = ListGroupMembersRequest{} } +func (m *ListGroupMembersRequest) String() string { return proto.CompactTextString(m) } +func (*ListGroupMembersRequest) ProtoMessage() {} +func (*ListGroupMembersRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_group_service_990a70a97332ba2c, []int{6} +} +func (m *ListGroupMembersRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ListGroupMembersRequest.Unmarshal(m, b) +} +func (m *ListGroupMembersRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ListGroupMembersRequest.Marshal(b, m, deterministic) +} +func (dst *ListGroupMembersRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_ListGroupMembersRequest.Merge(dst, src) +} +func (m *ListGroupMembersRequest) XXX_Size() int { + return xxx_messageInfo_ListGroupMembersRequest.Size(m) +} +func (m *ListGroupMembersRequest) XXX_DiscardUnknown() { + xxx_messageInfo_ListGroupMembersRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_ListGroupMembersRequest proto.InternalMessageInfo + +func (m *ListGroupMembersRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *ListGroupMembersRequest) GetPageSize() int32 { + if m != nil { + return m.PageSize + } + return 0 +} + +func (m *ListGroupMembersRequest) GetPageToken() string { + if m != nil { + return m.PageToken + } + return "" +} + +func (m *ListGroupMembersRequest) GetFilter() string { + if m != nil { + return m.Filter + } + return "" +} + +func (m *ListGroupMembersRequest) GetInterval() *TimeInterval { + if m != nil { + return m.Interval + } + return nil +} + +// The `ListGroupMembers` response. +type ListGroupMembersResponse struct { + // A set of monitored resources in the group. + Members []*monitoredres.MonitoredResource `protobuf:"bytes,1,rep,name=members,proto3" json:"members,omitempty"` + // If there are more results than have been returned, then this field is + // set to a non-empty value. To see the additional results, use that value as + // `pageToken` in the next call to this method. + NextPageToken string `protobuf:"bytes,2,opt,name=next_page_token,json=nextPageToken,proto3" json:"next_page_token,omitempty"` + // The total number of elements matching this request. + TotalSize int32 `protobuf:"varint,3,opt,name=total_size,json=totalSize,proto3" json:"total_size,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ListGroupMembersResponse) Reset() { *m = ListGroupMembersResponse{} } +func (m *ListGroupMembersResponse) String() string { return proto.CompactTextString(m) } +func (*ListGroupMembersResponse) ProtoMessage() {} +func (*ListGroupMembersResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_group_service_990a70a97332ba2c, []int{7} +} +func (m *ListGroupMembersResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ListGroupMembersResponse.Unmarshal(m, b) +} +func (m *ListGroupMembersResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ListGroupMembersResponse.Marshal(b, m, deterministic) +} +func (dst *ListGroupMembersResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_ListGroupMembersResponse.Merge(dst, src) +} +func (m *ListGroupMembersResponse) XXX_Size() int { + return xxx_messageInfo_ListGroupMembersResponse.Size(m) +} +func (m *ListGroupMembersResponse) XXX_DiscardUnknown() { + xxx_messageInfo_ListGroupMembersResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_ListGroupMembersResponse proto.InternalMessageInfo + +func (m *ListGroupMembersResponse) GetMembers() []*monitoredres.MonitoredResource { + if m != nil { + return m.Members + } + return nil +} + +func (m *ListGroupMembersResponse) GetNextPageToken() string { + if m != nil { + return m.NextPageToken + } + return "" +} + +func (m *ListGroupMembersResponse) GetTotalSize() int32 { + if m != nil { + return m.TotalSize + } + return 0 +} + +func init() { + proto.RegisterType((*ListGroupsRequest)(nil), "google.monitoring.v3.ListGroupsRequest") + proto.RegisterType((*ListGroupsResponse)(nil), "google.monitoring.v3.ListGroupsResponse") + proto.RegisterType((*GetGroupRequest)(nil), "google.monitoring.v3.GetGroupRequest") + proto.RegisterType((*CreateGroupRequest)(nil), "google.monitoring.v3.CreateGroupRequest") + proto.RegisterType((*UpdateGroupRequest)(nil), "google.monitoring.v3.UpdateGroupRequest") + proto.RegisterType((*DeleteGroupRequest)(nil), "google.monitoring.v3.DeleteGroupRequest") + proto.RegisterType((*ListGroupMembersRequest)(nil), "google.monitoring.v3.ListGroupMembersRequest") + proto.RegisterType((*ListGroupMembersResponse)(nil), "google.monitoring.v3.ListGroupMembersResponse") +} + +// Reference imports to suppress errors if they are not otherwise used. +var _ context.Context +var _ grpc.ClientConn + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the grpc package it is being compiled against. +const _ = grpc.SupportPackageIsVersion4 + +// GroupServiceClient is the client API for GroupService service. +// +// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://godoc.org/google.golang.org/grpc#ClientConn.NewStream. +type GroupServiceClient interface { + // Lists the existing groups. + ListGroups(ctx context.Context, in *ListGroupsRequest, opts ...grpc.CallOption) (*ListGroupsResponse, error) + // Gets a single group. + GetGroup(ctx context.Context, in *GetGroupRequest, opts ...grpc.CallOption) (*Group, error) + // Creates a new group. + CreateGroup(ctx context.Context, in *CreateGroupRequest, opts ...grpc.CallOption) (*Group, error) + // Updates an existing group. + // You can change any group attributes except `name`. + UpdateGroup(ctx context.Context, in *UpdateGroupRequest, opts ...grpc.CallOption) (*Group, error) + // Deletes an existing group. + DeleteGroup(ctx context.Context, in *DeleteGroupRequest, opts ...grpc.CallOption) (*empty.Empty, error) + // Lists the monitored resources that are members of a group. + ListGroupMembers(ctx context.Context, in *ListGroupMembersRequest, opts ...grpc.CallOption) (*ListGroupMembersResponse, error) +} + +type groupServiceClient struct { + cc *grpc.ClientConn +} + +func NewGroupServiceClient(cc *grpc.ClientConn) GroupServiceClient { + return &groupServiceClient{cc} +} + +func (c *groupServiceClient) ListGroups(ctx context.Context, in *ListGroupsRequest, opts ...grpc.CallOption) (*ListGroupsResponse, error) { + out := new(ListGroupsResponse) + err := c.cc.Invoke(ctx, "/google.monitoring.v3.GroupService/ListGroups", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *groupServiceClient) GetGroup(ctx context.Context, in *GetGroupRequest, opts ...grpc.CallOption) (*Group, error) { + out := new(Group) + err := c.cc.Invoke(ctx, "/google.monitoring.v3.GroupService/GetGroup", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *groupServiceClient) CreateGroup(ctx context.Context, in *CreateGroupRequest, opts ...grpc.CallOption) (*Group, error) { + out := new(Group) + err := c.cc.Invoke(ctx, "/google.monitoring.v3.GroupService/CreateGroup", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *groupServiceClient) UpdateGroup(ctx context.Context, in *UpdateGroupRequest, opts ...grpc.CallOption) (*Group, error) { + out := new(Group) + err := c.cc.Invoke(ctx, "/google.monitoring.v3.GroupService/UpdateGroup", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *groupServiceClient) DeleteGroup(ctx context.Context, in *DeleteGroupRequest, opts ...grpc.CallOption) (*empty.Empty, error) { + out := new(empty.Empty) + err := c.cc.Invoke(ctx, "/google.monitoring.v3.GroupService/DeleteGroup", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *groupServiceClient) ListGroupMembers(ctx context.Context, in *ListGroupMembersRequest, opts ...grpc.CallOption) (*ListGroupMembersResponse, error) { + out := new(ListGroupMembersResponse) + err := c.cc.Invoke(ctx, "/google.monitoring.v3.GroupService/ListGroupMembers", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +// GroupServiceServer is the server API for GroupService service. +type GroupServiceServer interface { + // Lists the existing groups. + ListGroups(context.Context, *ListGroupsRequest) (*ListGroupsResponse, error) + // Gets a single group. + GetGroup(context.Context, *GetGroupRequest) (*Group, error) + // Creates a new group. + CreateGroup(context.Context, *CreateGroupRequest) (*Group, error) + // Updates an existing group. + // You can change any group attributes except `name`. + UpdateGroup(context.Context, *UpdateGroupRequest) (*Group, error) + // Deletes an existing group. + DeleteGroup(context.Context, *DeleteGroupRequest) (*empty.Empty, error) + // Lists the monitored resources that are members of a group. + ListGroupMembers(context.Context, *ListGroupMembersRequest) (*ListGroupMembersResponse, error) +} + +func RegisterGroupServiceServer(s *grpc.Server, srv GroupServiceServer) { + s.RegisterService(&_GroupService_serviceDesc, srv) +} + +func _GroupService_ListGroups_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(ListGroupsRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(GroupServiceServer).ListGroups(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.monitoring.v3.GroupService/ListGroups", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(GroupServiceServer).ListGroups(ctx, req.(*ListGroupsRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _GroupService_GetGroup_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(GetGroupRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(GroupServiceServer).GetGroup(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.monitoring.v3.GroupService/GetGroup", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(GroupServiceServer).GetGroup(ctx, req.(*GetGroupRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _GroupService_CreateGroup_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(CreateGroupRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(GroupServiceServer).CreateGroup(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.monitoring.v3.GroupService/CreateGroup", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(GroupServiceServer).CreateGroup(ctx, req.(*CreateGroupRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _GroupService_UpdateGroup_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(UpdateGroupRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(GroupServiceServer).UpdateGroup(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.monitoring.v3.GroupService/UpdateGroup", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(GroupServiceServer).UpdateGroup(ctx, req.(*UpdateGroupRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _GroupService_DeleteGroup_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(DeleteGroupRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(GroupServiceServer).DeleteGroup(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.monitoring.v3.GroupService/DeleteGroup", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(GroupServiceServer).DeleteGroup(ctx, req.(*DeleteGroupRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _GroupService_ListGroupMembers_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(ListGroupMembersRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(GroupServiceServer).ListGroupMembers(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.monitoring.v3.GroupService/ListGroupMembers", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(GroupServiceServer).ListGroupMembers(ctx, req.(*ListGroupMembersRequest)) + } + return interceptor(ctx, in, info, handler) +} + +var _GroupService_serviceDesc = grpc.ServiceDesc{ + ServiceName: "google.monitoring.v3.GroupService", + HandlerType: (*GroupServiceServer)(nil), + Methods: []grpc.MethodDesc{ + { + MethodName: "ListGroups", + Handler: _GroupService_ListGroups_Handler, + }, + { + MethodName: "GetGroup", + Handler: _GroupService_GetGroup_Handler, + }, + { + MethodName: "CreateGroup", + Handler: _GroupService_CreateGroup_Handler, + }, + { + MethodName: "UpdateGroup", + Handler: _GroupService_UpdateGroup_Handler, + }, + { + MethodName: "DeleteGroup", + Handler: _GroupService_DeleteGroup_Handler, + }, + { + MethodName: "ListGroupMembers", + Handler: _GroupService_ListGroupMembers_Handler, + }, + }, + Streams: []grpc.StreamDesc{}, + Metadata: "google/monitoring/v3/group_service.proto", +} + +func init() { + proto.RegisterFile("google/monitoring/v3/group_service.proto", fileDescriptor_group_service_990a70a97332ba2c) +} + +var fileDescriptor_group_service_990a70a97332ba2c = []byte{ + // 826 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xb4, 0x55, 0x4d, 0x6f, 0xd3, 0x4c, + 0x10, 0x7e, 0xdd, 0xa4, 0x69, 0xb2, 0x69, 0xd5, 0x76, 0x55, 0xf5, 0x8d, 0xdc, 0x0f, 0x05, 0xf7, + 0x83, 0xa8, 0x50, 0x5b, 0x24, 0x07, 0x24, 0x10, 0x3d, 0xb4, 0xa0, 0x82, 0x44, 0xd5, 0xca, 0x2d, + 0x3d, 0xa0, 0x4a, 0x91, 0x9b, 0x4c, 0x8c, 0xc1, 0xde, 0x35, 0xf6, 0x26, 0xd0, 0xa2, 0x4a, 0x80, + 0xc4, 0x81, 0x33, 0x37, 0x6e, 0x1c, 0xe1, 0x2f, 0x70, 0xe2, 0xca, 0x95, 0xbf, 0xc0, 0xff, 0x00, + 0x79, 0xbd, 0x9b, 0x38, 0x9f, 0xed, 0x85, 0x5b, 0xb2, 0xf3, 0x8c, 0x9f, 0x67, 0x66, 0x9f, 0x99, + 0x45, 0x25, 0x9b, 0x52, 0xdb, 0x05, 0xc3, 0xa3, 0xc4, 0x61, 0x34, 0x70, 0x88, 0x6d, 0xb4, 0x2a, + 0x86, 0x1d, 0xd0, 0xa6, 0x5f, 0x0d, 0x21, 0x68, 0x39, 0x35, 0xd0, 0xfd, 0x80, 0x32, 0x8a, 0xe7, + 0x62, 0xa4, 0xde, 0x41, 0xea, 0xad, 0x8a, 0xba, 0x28, 0xf2, 0x2d, 0xdf, 0x31, 0x2c, 0x42, 0x28, + 0xb3, 0x98, 0x43, 0x49, 0x18, 0xe7, 0xa8, 0x2b, 0x89, 0xa8, 0xc8, 0x83, 0x7a, 0x35, 0x80, 0x90, + 0x36, 0x03, 0xf9, 0x61, 0xf5, 0xda, 0x40, 0x09, 0x35, 0xea, 0x79, 0x94, 0x08, 0x48, 0x71, 0xb8, + 0x4a, 0x81, 0x58, 0x10, 0x08, 0xfe, 0xef, 0xb4, 0xd9, 0x30, 0xc0, 0xf3, 0xd9, 0x59, 0x1c, 0xd4, + 0xfe, 0x28, 0x68, 0xf6, 0xb1, 0x13, 0xb2, 0xdd, 0x28, 0x21, 0x34, 0xe1, 0x65, 0x13, 0x42, 0x86, + 0x31, 0x4a, 0x13, 0xcb, 0x83, 0xc2, 0x44, 0x51, 0x29, 0xe5, 0x4c, 0xfe, 0x1b, 0xdf, 0x44, 0xb3, + 0xb5, 0x67, 0x8e, 0x5b, 0x0f, 0x80, 0x54, 0x69, 0xa3, 0xca, 0x19, 0x0a, 0x63, 0x11, 0xe0, 0xe1, + 0x7f, 0xe6, 0xb4, 0x0c, 0xed, 0x37, 0xf8, 0x97, 0xb0, 0x8e, 0xb0, 0x45, 0x6a, 0x10, 0x32, 0x1a, + 0x84, 0x1d, 0x78, 0x4a, 0xc0, 0x67, 0xda, 0x31, 0x89, 0x2f, 0xa3, 0xb9, 0x3a, 0x84, 0x35, 0x20, + 0x75, 0x8b, 0xb0, 0x44, 0x46, 0x5a, 0x64, 0xe0, 0x44, 0x54, 0xe6, 0x2c, 0xa0, 0x9c, 0x6f, 0xd9, + 0x50, 0x0d, 0x9d, 0x73, 0x28, 0x8c, 0x17, 0x95, 0xd2, 0xb8, 0x99, 0x8d, 0x0e, 0x0e, 0x9d, 0x73, + 0xc0, 0x4b, 0x08, 0xf1, 0x20, 0xa3, 0x2f, 0x80, 0x14, 0x32, 0xbc, 0x10, 0x0e, 0x3f, 0x8a, 0x0e, + 0xb6, 0xb3, 0x28, 0xd3, 0x70, 0x5c, 0x06, 0x81, 0x46, 0x11, 0x4e, 0x36, 0x20, 0xf4, 0x29, 0x09, + 0x01, 0xdf, 0x42, 0xe3, 0xb1, 0x00, 0xa5, 0x98, 0x2a, 0xe5, 0xcb, 0x0b, 0xfa, 0xa0, 0x2b, 0xd6, + 0x79, 0x92, 0x19, 0x23, 0xf1, 0x3a, 0x9a, 0x26, 0xf0, 0x9a, 0x55, 0x13, 0xb4, 0xbc, 0x3d, 0xe6, + 0x54, 0x74, 0x7c, 0x20, 0xa9, 0xb5, 0x35, 0x34, 0xbd, 0x0b, 0x31, 0x5f, 0x6f, 0xbf, 0x53, 0x9d, + 0x7e, 0x6b, 0x6f, 0x15, 0x84, 0x77, 0x02, 0xb0, 0x18, 0x0c, 0x84, 0xa6, 0x13, 0x57, 0xd3, 0x16, + 0x1b, 0xf1, 0x5d, 0x4d, 0xec, 0x0a, 0x9a, 0x6a, 0x59, 0xae, 0x53, 0xb7, 0x18, 0x54, 0x29, 0x71, + 0xcf, 0x38, 0x75, 0xd6, 0x9c, 0x94, 0x87, 0xfb, 0xc4, 0x3d, 0xd3, 0x5c, 0x84, 0x9f, 0xf8, 0xf5, + 0x5e, 0x05, 0xff, 0x8a, 0xad, 0x84, 0xf0, 0x7d, 0x70, 0x61, 0x48, 0xbd, 0xc9, 0xd6, 0xfc, 0x50, + 0xd0, 0xff, 0xed, 0x3b, 0xdb, 0x03, 0xef, 0x14, 0x82, 0x91, 0xd6, 0xed, 0x32, 0x4a, 0x6a, 0xa4, + 0x51, 0xd2, 0x3d, 0x46, 0xc1, 0xf3, 0xd2, 0x28, 0xdc, 0x61, 0x39, 0x53, 0xfc, 0xc3, 0x5b, 0x28, + 0xeb, 0x10, 0x06, 0x41, 0xcb, 0x72, 0xb9, 0xbb, 0xf2, 0x65, 0x6d, 0x70, 0x23, 0x8e, 0x1c, 0x0f, + 0x1e, 0x09, 0xa4, 0xd9, 0xce, 0xd1, 0x3e, 0x2b, 0xa8, 0xd0, 0x5f, 0x83, 0x70, 0xdf, 0x6d, 0x34, + 0xe1, 0xc5, 0x47, 0xc2, 0x7f, 0x4b, 0xf2, 0xdb, 0x96, 0xef, 0xe8, 0x7b, 0x72, 0x5d, 0x98, 0x62, + 0x5b, 0x98, 0x12, 0x7d, 0x55, 0x0f, 0x46, 0x45, 0x33, 0xca, 0x2c, 0x37, 0xd9, 0x92, 0x1c, 0x3f, + 0x89, 0x7a, 0x52, 0xfe, 0x9e, 0x41, 0x93, 0x5c, 0xd8, 0x61, 0xbc, 0xe7, 0xf0, 0x07, 0x05, 0xa1, + 0xce, 0x94, 0xe0, 0xeb, 0x83, 0x4b, 0xed, 0x5b, 0x24, 0x6a, 0xe9, 0x72, 0x60, 0x5c, 0xb2, 0xb6, + 0xfa, 0xfe, 0xd7, 0xef, 0x4f, 0x63, 0xcb, 0x78, 0x31, 0x5a, 0x5f, 0x6f, 0xa2, 0x6b, 0xbb, 0xe7, + 0x07, 0xf4, 0x39, 0xd4, 0x58, 0x68, 0x6c, 0x5c, 0xc4, 0x0b, 0x2d, 0xc4, 0x2d, 0x94, 0x95, 0xb3, + 0x83, 0xd7, 0x86, 0x18, 0xaf, 0x7b, 0xb6, 0xd4, 0x51, 0xfe, 0xd4, 0xd6, 0x39, 0x6b, 0x11, 0x2f, + 0x0f, 0x62, 0x15, 0xa4, 0xc6, 0xc6, 0x05, 0x7e, 0xa7, 0xa0, 0x7c, 0x62, 0x18, 0xf1, 0x90, 0xba, + 0xfa, 0xe7, 0x75, 0x34, 0xfd, 0x0d, 0x4e, 0xbf, 0xa6, 0x8d, 0x2c, 0xfa, 0x8e, 0x18, 0xa2, 0x8f, + 0x0a, 0xca, 0x27, 0xc6, 0x71, 0x98, 0x86, 0xfe, 0x89, 0x1d, 0xad, 0xa1, 0xc2, 0x35, 0x6c, 0xaa, + 0xab, 0x5c, 0x43, 0xfc, 0x70, 0x0c, 0x6d, 0x84, 0xd4, 0xf2, 0x0a, 0xe5, 0x13, 0xb3, 0x3a, 0x4c, + 0x4a, 0xff, 0x38, 0xab, 0xf3, 0x12, 0x29, 0x5f, 0x23, 0xfd, 0x41, 0xf4, 0x1a, 0xc9, 0x8b, 0xd8, + 0xb8, 0xec, 0x22, 0xbe, 0x28, 0x68, 0xa6, 0x77, 0x6c, 0xf0, 0xe6, 0x25, 0x2e, 0xeb, 0x5e, 0x11, + 0xaa, 0x7e, 0x55, 0xb8, 0xb0, 0xa6, 0xce, 0xb5, 0x95, 0xf0, 0xfa, 0x68, 0x6d, 0x86, 0x18, 0xc2, + 0xed, 0xaf, 0x0a, 0x2a, 0xd4, 0xa8, 0x37, 0x90, 0x65, 0x7b, 0x36, 0x39, 0x57, 0x07, 0x51, 0x13, + 0x0e, 0x94, 0xa7, 0x5b, 0x02, 0x6a, 0x53, 0xd7, 0x22, 0xb6, 0x4e, 0x03, 0xdb, 0xb0, 0x81, 0xf0, + 0x16, 0x19, 0x71, 0xc8, 0xf2, 0x9d, 0xb0, 0xfb, 0x8d, 0xbf, 0xdb, 0xf9, 0xf7, 0x6d, 0x4c, 0xdd, + 0x8d, 0x3f, 0xb0, 0xe3, 0xd2, 0x66, 0x5d, 0x2e, 0x88, 0x88, 0xf1, 0xb8, 0xf2, 0x53, 0x06, 0x4f, + 0x78, 0xf0, 0xa4, 0x13, 0x3c, 0x39, 0xae, 0x9c, 0x66, 0x38, 0x49, 0xe5, 0x6f, 0x00, 0x00, 0x00, + 0xff, 0xff, 0x86, 0x94, 0xf2, 0xde, 0xed, 0x08, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/monitoring/v3/metric.pb.go b/vendor/google.golang.org/genproto/googleapis/monitoring/v3/metric.pb.go new file mode 100644 index 0000000..bf80222 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/monitoring/v3/metric.pb.go @@ -0,0 +1,232 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/monitoring/v3/metric.proto + +package monitoring // import "google.golang.org/genproto/googleapis/monitoring/v3" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "google.golang.org/genproto/googleapis/api/annotations" +import _ "google.golang.org/genproto/googleapis/api/label" +import metric "google.golang.org/genproto/googleapis/api/metric" +import monitoredres "google.golang.org/genproto/googleapis/api/monitoredres" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// A single data point in a time series. +type Point struct { + // The time interval to which the data point applies. For `GAUGE` metrics, + // only the end time of the interval is used. For `DELTA` metrics, the start + // and end time should specify a non-zero interval, with subsequent points + // specifying contiguous and non-overlapping intervals. For `CUMULATIVE` + // metrics, the start and end time should specify a non-zero interval, with + // subsequent points specifying the same start time and increasing end times, + // until an event resets the cumulative value to zero and sets a new start + // time for the following points. + Interval *TimeInterval `protobuf:"bytes,1,opt,name=interval,proto3" json:"interval,omitempty"` + // The value of the data point. + Value *TypedValue `protobuf:"bytes,2,opt,name=value,proto3" json:"value,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Point) Reset() { *m = Point{} } +func (m *Point) String() string { return proto.CompactTextString(m) } +func (*Point) ProtoMessage() {} +func (*Point) Descriptor() ([]byte, []int) { + return fileDescriptor_metric_640ded4d066f50b5, []int{0} +} +func (m *Point) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Point.Unmarshal(m, b) +} +func (m *Point) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Point.Marshal(b, m, deterministic) +} +func (dst *Point) XXX_Merge(src proto.Message) { + xxx_messageInfo_Point.Merge(dst, src) +} +func (m *Point) XXX_Size() int { + return xxx_messageInfo_Point.Size(m) +} +func (m *Point) XXX_DiscardUnknown() { + xxx_messageInfo_Point.DiscardUnknown(m) +} + +var xxx_messageInfo_Point proto.InternalMessageInfo + +func (m *Point) GetInterval() *TimeInterval { + if m != nil { + return m.Interval + } + return nil +} + +func (m *Point) GetValue() *TypedValue { + if m != nil { + return m.Value + } + return nil +} + +// A collection of data points that describes the time-varying values +// of a metric. A time series is identified by a combination of a +// fully-specified monitored resource and a fully-specified metric. +// This type is used for both listing and creating time series. +type TimeSeries struct { + // The associated metric. A fully-specified metric used to identify the time + // series. + Metric *metric.Metric `protobuf:"bytes,1,opt,name=metric,proto3" json:"metric,omitempty"` + // The associated monitored resource. Custom metrics can use only certain + // monitored resource types in their time series data. + Resource *monitoredres.MonitoredResource `protobuf:"bytes,2,opt,name=resource,proto3" json:"resource,omitempty"` + // Output only. The associated monitored resource metadata. When reading a + // a timeseries, this field will include metadata labels that are explicitly + // named in the reduction. When creating a timeseries, this field is ignored. + Metadata *monitoredres.MonitoredResourceMetadata `protobuf:"bytes,7,opt,name=metadata,proto3" json:"metadata,omitempty"` + // The metric kind of the time series. When listing time series, this metric + // kind might be different from the metric kind of the associated metric if + // this time series is an alignment or reduction of other time series. + // + // When creating a time series, this field is optional. If present, it must be + // the same as the metric kind of the associated metric. If the associated + // metric's descriptor must be auto-created, then this field specifies the + // metric kind of the new descriptor and must be either `GAUGE` (the default) + // or `CUMULATIVE`. + MetricKind metric.MetricDescriptor_MetricKind `protobuf:"varint,3,opt,name=metric_kind,json=metricKind,proto3,enum=google.api.MetricDescriptor_MetricKind" json:"metric_kind,omitempty"` + // The value type of the time series. When listing time series, this value + // type might be different from the value type of the associated metric if + // this time series is an alignment or reduction of other time series. + // + // When creating a time series, this field is optional. If present, it must be + // the same as the type of the data in the `points` field. + ValueType metric.MetricDescriptor_ValueType `protobuf:"varint,4,opt,name=value_type,json=valueType,proto3,enum=google.api.MetricDescriptor_ValueType" json:"value_type,omitempty"` + // The data points of this time series. When listing time series, points are + // returned in reverse time order. + // + // When creating a time series, this field must contain exactly one point and + // the point's type must be the same as the value type of the associated + // metric. If the associated metric's descriptor must be auto-created, then + // the value type of the descriptor is determined by the point's type, which + // must be `BOOL`, `INT64`, `DOUBLE`, or `DISTRIBUTION`. + Points []*Point `protobuf:"bytes,5,rep,name=points,proto3" json:"points,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *TimeSeries) Reset() { *m = TimeSeries{} } +func (m *TimeSeries) String() string { return proto.CompactTextString(m) } +func (*TimeSeries) ProtoMessage() {} +func (*TimeSeries) Descriptor() ([]byte, []int) { + return fileDescriptor_metric_640ded4d066f50b5, []int{1} +} +func (m *TimeSeries) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_TimeSeries.Unmarshal(m, b) +} +func (m *TimeSeries) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_TimeSeries.Marshal(b, m, deterministic) +} +func (dst *TimeSeries) XXX_Merge(src proto.Message) { + xxx_messageInfo_TimeSeries.Merge(dst, src) +} +func (m *TimeSeries) XXX_Size() int { + return xxx_messageInfo_TimeSeries.Size(m) +} +func (m *TimeSeries) XXX_DiscardUnknown() { + xxx_messageInfo_TimeSeries.DiscardUnknown(m) +} + +var xxx_messageInfo_TimeSeries proto.InternalMessageInfo + +func (m *TimeSeries) GetMetric() *metric.Metric { + if m != nil { + return m.Metric + } + return nil +} + +func (m *TimeSeries) GetResource() *monitoredres.MonitoredResource { + if m != nil { + return m.Resource + } + return nil +} + +func (m *TimeSeries) GetMetadata() *monitoredres.MonitoredResourceMetadata { + if m != nil { + return m.Metadata + } + return nil +} + +func (m *TimeSeries) GetMetricKind() metric.MetricDescriptor_MetricKind { + if m != nil { + return m.MetricKind + } + return metric.MetricDescriptor_METRIC_KIND_UNSPECIFIED +} + +func (m *TimeSeries) GetValueType() metric.MetricDescriptor_ValueType { + if m != nil { + return m.ValueType + } + return metric.MetricDescriptor_VALUE_TYPE_UNSPECIFIED +} + +func (m *TimeSeries) GetPoints() []*Point { + if m != nil { + return m.Points + } + return nil +} + +func init() { + proto.RegisterType((*Point)(nil), "google.monitoring.v3.Point") + proto.RegisterType((*TimeSeries)(nil), "google.monitoring.v3.TimeSeries") +} + +func init() { + proto.RegisterFile("google/monitoring/v3/metric.proto", fileDescriptor_metric_640ded4d066f50b5) +} + +var fileDescriptor_metric_640ded4d066f50b5 = []byte{ + // 441 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x7c, 0x92, 0x51, 0xab, 0xd3, 0x30, + 0x14, 0xc7, 0xe9, 0xae, 0x9b, 0x33, 0x03, 0x1f, 0x82, 0x68, 0x99, 0x0a, 0x73, 0xa2, 0x0e, 0x1f, + 0x5a, 0x58, 0x41, 0x10, 0xe1, 0x82, 0x57, 0x45, 0x45, 0x2e, 0x8c, 0x28, 0x7b, 0x90, 0xc1, 0xc8, + 0x6d, 0x0f, 0x25, 0xd8, 0xe4, 0x84, 0x34, 0x2b, 0xdc, 0x27, 0x3f, 0x8c, 0x6f, 0x7e, 0x14, 0x3f, + 0x93, 0x0f, 0xd2, 0x24, 0xdd, 0x76, 0xb1, 0xf7, 0xbe, 0xb5, 0xf9, 0xff, 0xfe, 0xe7, 0x7f, 0x72, + 0x72, 0xc8, 0x93, 0x12, 0xb1, 0xac, 0x20, 0x95, 0xa8, 0x84, 0x45, 0x23, 0x54, 0x99, 0x36, 0x59, + 0x2a, 0xc1, 0x1a, 0x91, 0x27, 0xda, 0xa0, 0x45, 0x7a, 0xcf, 0x23, 0xc9, 0x01, 0x49, 0x9a, 0x6c, + 0xfa, 0x28, 0x18, 0xb9, 0x16, 0x29, 0x57, 0x0a, 0x2d, 0xb7, 0x02, 0x55, 0xed, 0x3d, 0xd3, 0xfb, + 0x47, 0x6a, 0xc5, 0x2f, 0xa0, 0x0a, 0xe7, 0x0f, 0x8e, 0xce, 0x8f, 0x43, 0xa6, 0x4f, 0x8f, 0x05, + 0x1f, 0x04, 0xc5, 0xd6, 0x40, 0x8d, 0x3b, 0x93, 0x43, 0x80, 0xfa, 0x9b, 0xcd, 0x51, 0x4a, 0x54, + 0x1e, 0x99, 0xff, 0x24, 0xc3, 0x15, 0x0a, 0x65, 0xe9, 0x29, 0x19, 0x0b, 0x65, 0xc1, 0x34, 0xbc, + 0x8a, 0xa3, 0x59, 0xb4, 0x98, 0x2c, 0xe7, 0x49, 0xdf, 0x45, 0x92, 0x6f, 0x42, 0xc2, 0xe7, 0x40, + 0xb2, 0xbd, 0x87, 0xbe, 0x22, 0xc3, 0x86, 0x57, 0x3b, 0x88, 0x07, 0xce, 0x3c, 0xbb, 0xc6, 0x7c, + 0xa9, 0xa1, 0x58, 0xb7, 0x1c, 0xf3, 0xf8, 0xfc, 0xef, 0x80, 0x90, 0xb6, 0xe4, 0x57, 0x30, 0x02, + 0x6a, 0xfa, 0x92, 0x8c, 0xfc, 0x3d, 0x43, 0x13, 0xb4, 0xab, 0xc3, 0xb5, 0x48, 0xce, 0x9d, 0xc2, + 0x02, 0x41, 0x5f, 0x93, 0x71, 0x77, 0xe1, 0x90, 0xfa, 0xf8, 0x0a, 0xdd, 0x8d, 0x85, 0x05, 0x88, + 0xed, 0x71, 0xfa, 0x96, 0x8c, 0x25, 0x58, 0x5e, 0x70, 0xcb, 0xe3, 0xdb, 0xce, 0xfa, 0xec, 0x46, + 0xeb, 0x79, 0x80, 0xd9, 0xde, 0x46, 0x3f, 0x91, 0x89, 0xef, 0x63, 0xfb, 0x43, 0xa8, 0x22, 0x3e, + 0x99, 0x45, 0x8b, 0xbb, 0xcb, 0x17, 0xff, 0xb7, 0xfb, 0x1e, 0xea, 0xdc, 0x08, 0x6d, 0xd1, 0x84, + 0x83, 0x2f, 0x42, 0x15, 0x8c, 0xc8, 0xfd, 0x37, 0xfd, 0x40, 0x88, 0x9b, 0xc5, 0xd6, 0x5e, 0x6a, + 0x88, 0x6f, 0xb9, 0x42, 0xcf, 0x6f, 0x2c, 0xe4, 0x26, 0xd8, 0xce, 0x92, 0xdd, 0x69, 0xba, 0x4f, + 0x9a, 0x91, 0x91, 0x6e, 0x9f, 0xb2, 0x8e, 0x87, 0xb3, 0x93, 0xc5, 0x64, 0xf9, 0xb0, 0xff, 0x09, + 0xdc, 0x73, 0xb3, 0x80, 0x9e, 0xfd, 0x8a, 0x48, 0x9c, 0xa3, 0xec, 0x45, 0xcf, 0x26, 0x3e, 0x78, + 0xd5, 0x6e, 0xca, 0x2a, 0xfa, 0x7e, 0x1a, 0xa0, 0x12, 0x2b, 0xae, 0xca, 0x04, 0x4d, 0x99, 0x96, + 0xa0, 0xdc, 0x1e, 0xa5, 0x5e, 0xe2, 0x5a, 0xd4, 0x57, 0xb7, 0xed, 0xcd, 0xe1, 0xef, 0xf7, 0x60, + 0xfa, 0xd1, 0x17, 0x78, 0x57, 0xe1, 0xae, 0xe8, 0x86, 0xdc, 0x66, 0xad, 0xb3, 0x3f, 0x9d, 0xb8, + 0x71, 0xe2, 0xe6, 0x20, 0x6e, 0xd6, 0xd9, 0xc5, 0xc8, 0x85, 0x64, 0xff, 0x02, 0x00, 0x00, 0xff, + 0xff, 0x5a, 0x88, 0xc9, 0x0b, 0x7e, 0x03, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/monitoring/v3/metric_service.pb.go b/vendor/google.golang.org/genproto/googleapis/monitoring/v3/metric_service.pb.go new file mode 100644 index 0000000..730d285 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/monitoring/v3/metric_service.pb.go @@ -0,0 +1,1218 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/monitoring/v3/metric_service.proto + +package monitoring // import "google.golang.org/genproto/googleapis/monitoring/v3" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "github.com/golang/protobuf/ptypes/duration" +import empty "github.com/golang/protobuf/ptypes/empty" +import _ "google.golang.org/genproto/googleapis/api/annotations" +import metric "google.golang.org/genproto/googleapis/api/metric" +import monitoredres "google.golang.org/genproto/googleapis/api/monitoredres" +import status "google.golang.org/genproto/googleapis/rpc/status" + +import ( + context "golang.org/x/net/context" + grpc "google.golang.org/grpc" +) + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Controls which fields are returned by `ListTimeSeries`. +type ListTimeSeriesRequest_TimeSeriesView int32 + +const ( + // Returns the identity of the metric(s), the time series, + // and the time series data. + ListTimeSeriesRequest_FULL ListTimeSeriesRequest_TimeSeriesView = 0 + // Returns the identity of the metric and the time series resource, + // but not the time series data. + ListTimeSeriesRequest_HEADERS ListTimeSeriesRequest_TimeSeriesView = 1 +) + +var ListTimeSeriesRequest_TimeSeriesView_name = map[int32]string{ + 0: "FULL", + 1: "HEADERS", +} +var ListTimeSeriesRequest_TimeSeriesView_value = map[string]int32{ + "FULL": 0, + "HEADERS": 1, +} + +func (x ListTimeSeriesRequest_TimeSeriesView) String() string { + return proto.EnumName(ListTimeSeriesRequest_TimeSeriesView_name, int32(x)) +} +func (ListTimeSeriesRequest_TimeSeriesView) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_metric_service_1409540f97d9ae0c, []int{8, 0} +} + +// The `ListMonitoredResourceDescriptors` request. +type ListMonitoredResourceDescriptorsRequest struct { + // The project on which to execute the request. The format is + // `"projects/{project_id_or_number}"`. + Name string `protobuf:"bytes,5,opt,name=name,proto3" json:"name,omitempty"` + // An optional [filter](/monitoring/api/v3/filters) describing + // the descriptors to be returned. The filter can reference + // the descriptor's type and labels. For example, the + // following filter returns only Google Compute Engine descriptors + // that have an `id` label: + // + // resource.type = starts_with("gce_") AND resource.label:id + Filter string `protobuf:"bytes,2,opt,name=filter,proto3" json:"filter,omitempty"` + // A positive number that is the maximum number of results to return. + PageSize int32 `protobuf:"varint,3,opt,name=page_size,json=pageSize,proto3" json:"page_size,omitempty"` + // If this field is not empty then it must contain the `nextPageToken` value + // returned by a previous call to this method. Using this field causes the + // method to return additional results from the previous method call. + PageToken string `protobuf:"bytes,4,opt,name=page_token,json=pageToken,proto3" json:"page_token,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ListMonitoredResourceDescriptorsRequest) Reset() { + *m = ListMonitoredResourceDescriptorsRequest{} +} +func (m *ListMonitoredResourceDescriptorsRequest) String() string { return proto.CompactTextString(m) } +func (*ListMonitoredResourceDescriptorsRequest) ProtoMessage() {} +func (*ListMonitoredResourceDescriptorsRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_metric_service_1409540f97d9ae0c, []int{0} +} +func (m *ListMonitoredResourceDescriptorsRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ListMonitoredResourceDescriptorsRequest.Unmarshal(m, b) +} +func (m *ListMonitoredResourceDescriptorsRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ListMonitoredResourceDescriptorsRequest.Marshal(b, m, deterministic) +} +func (dst *ListMonitoredResourceDescriptorsRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_ListMonitoredResourceDescriptorsRequest.Merge(dst, src) +} +func (m *ListMonitoredResourceDescriptorsRequest) XXX_Size() int { + return xxx_messageInfo_ListMonitoredResourceDescriptorsRequest.Size(m) +} +func (m *ListMonitoredResourceDescriptorsRequest) XXX_DiscardUnknown() { + xxx_messageInfo_ListMonitoredResourceDescriptorsRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_ListMonitoredResourceDescriptorsRequest proto.InternalMessageInfo + +func (m *ListMonitoredResourceDescriptorsRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *ListMonitoredResourceDescriptorsRequest) GetFilter() string { + if m != nil { + return m.Filter + } + return "" +} + +func (m *ListMonitoredResourceDescriptorsRequest) GetPageSize() int32 { + if m != nil { + return m.PageSize + } + return 0 +} + +func (m *ListMonitoredResourceDescriptorsRequest) GetPageToken() string { + if m != nil { + return m.PageToken + } + return "" +} + +// The `ListMonitoredResourceDescriptors` response. +type ListMonitoredResourceDescriptorsResponse struct { + // The monitored resource descriptors that are available to this project + // and that match `filter`, if present. + ResourceDescriptors []*monitoredres.MonitoredResourceDescriptor `protobuf:"bytes,1,rep,name=resource_descriptors,json=resourceDescriptors,proto3" json:"resource_descriptors,omitempty"` + // If there are more results than have been returned, then this field is set + // to a non-empty value. To see the additional results, + // use that value as `pageToken` in the next call to this method. + NextPageToken string `protobuf:"bytes,2,opt,name=next_page_token,json=nextPageToken,proto3" json:"next_page_token,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ListMonitoredResourceDescriptorsResponse) Reset() { + *m = ListMonitoredResourceDescriptorsResponse{} +} +func (m *ListMonitoredResourceDescriptorsResponse) String() string { return proto.CompactTextString(m) } +func (*ListMonitoredResourceDescriptorsResponse) ProtoMessage() {} +func (*ListMonitoredResourceDescriptorsResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_metric_service_1409540f97d9ae0c, []int{1} +} +func (m *ListMonitoredResourceDescriptorsResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ListMonitoredResourceDescriptorsResponse.Unmarshal(m, b) +} +func (m *ListMonitoredResourceDescriptorsResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ListMonitoredResourceDescriptorsResponse.Marshal(b, m, deterministic) +} +func (dst *ListMonitoredResourceDescriptorsResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_ListMonitoredResourceDescriptorsResponse.Merge(dst, src) +} +func (m *ListMonitoredResourceDescriptorsResponse) XXX_Size() int { + return xxx_messageInfo_ListMonitoredResourceDescriptorsResponse.Size(m) +} +func (m *ListMonitoredResourceDescriptorsResponse) XXX_DiscardUnknown() { + xxx_messageInfo_ListMonitoredResourceDescriptorsResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_ListMonitoredResourceDescriptorsResponse proto.InternalMessageInfo + +func (m *ListMonitoredResourceDescriptorsResponse) GetResourceDescriptors() []*monitoredres.MonitoredResourceDescriptor { + if m != nil { + return m.ResourceDescriptors + } + return nil +} + +func (m *ListMonitoredResourceDescriptorsResponse) GetNextPageToken() string { + if m != nil { + return m.NextPageToken + } + return "" +} + +// The `GetMonitoredResourceDescriptor` request. +type GetMonitoredResourceDescriptorRequest struct { + // The monitored resource descriptor to get. The format is + // `"projects/{project_id_or_number}/monitoredResourceDescriptors/{resource_type}"`. + // The `{resource_type}` is a predefined type, such as + // `cloudsql_database`. + Name string `protobuf:"bytes,3,opt,name=name,proto3" json:"name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GetMonitoredResourceDescriptorRequest) Reset() { *m = GetMonitoredResourceDescriptorRequest{} } +func (m *GetMonitoredResourceDescriptorRequest) String() string { return proto.CompactTextString(m) } +func (*GetMonitoredResourceDescriptorRequest) ProtoMessage() {} +func (*GetMonitoredResourceDescriptorRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_metric_service_1409540f97d9ae0c, []int{2} +} +func (m *GetMonitoredResourceDescriptorRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GetMonitoredResourceDescriptorRequest.Unmarshal(m, b) +} +func (m *GetMonitoredResourceDescriptorRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GetMonitoredResourceDescriptorRequest.Marshal(b, m, deterministic) +} +func (dst *GetMonitoredResourceDescriptorRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_GetMonitoredResourceDescriptorRequest.Merge(dst, src) +} +func (m *GetMonitoredResourceDescriptorRequest) XXX_Size() int { + return xxx_messageInfo_GetMonitoredResourceDescriptorRequest.Size(m) +} +func (m *GetMonitoredResourceDescriptorRequest) XXX_DiscardUnknown() { + xxx_messageInfo_GetMonitoredResourceDescriptorRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_GetMonitoredResourceDescriptorRequest proto.InternalMessageInfo + +func (m *GetMonitoredResourceDescriptorRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +// The `ListMetricDescriptors` request. +type ListMetricDescriptorsRequest struct { + // The project on which to execute the request. The format is + // `"projects/{project_id_or_number}"`. + Name string `protobuf:"bytes,5,opt,name=name,proto3" json:"name,omitempty"` + // If this field is empty, all custom and + // system-defined metric descriptors are returned. + // Otherwise, the [filter](/monitoring/api/v3/filters) + // specifies which metric descriptors are to be + // returned. For example, the following filter matches all + // [custom metrics](/monitoring/custom-metrics): + // + // metric.type = starts_with("custom.googleapis.com/") + Filter string `protobuf:"bytes,2,opt,name=filter,proto3" json:"filter,omitempty"` + // A positive number that is the maximum number of results to return. + PageSize int32 `protobuf:"varint,3,opt,name=page_size,json=pageSize,proto3" json:"page_size,omitempty"` + // If this field is not empty then it must contain the `nextPageToken` value + // returned by a previous call to this method. Using this field causes the + // method to return additional results from the previous method call. + PageToken string `protobuf:"bytes,4,opt,name=page_token,json=pageToken,proto3" json:"page_token,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ListMetricDescriptorsRequest) Reset() { *m = ListMetricDescriptorsRequest{} } +func (m *ListMetricDescriptorsRequest) String() string { return proto.CompactTextString(m) } +func (*ListMetricDescriptorsRequest) ProtoMessage() {} +func (*ListMetricDescriptorsRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_metric_service_1409540f97d9ae0c, []int{3} +} +func (m *ListMetricDescriptorsRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ListMetricDescriptorsRequest.Unmarshal(m, b) +} +func (m *ListMetricDescriptorsRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ListMetricDescriptorsRequest.Marshal(b, m, deterministic) +} +func (dst *ListMetricDescriptorsRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_ListMetricDescriptorsRequest.Merge(dst, src) +} +func (m *ListMetricDescriptorsRequest) XXX_Size() int { + return xxx_messageInfo_ListMetricDescriptorsRequest.Size(m) +} +func (m *ListMetricDescriptorsRequest) XXX_DiscardUnknown() { + xxx_messageInfo_ListMetricDescriptorsRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_ListMetricDescriptorsRequest proto.InternalMessageInfo + +func (m *ListMetricDescriptorsRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *ListMetricDescriptorsRequest) GetFilter() string { + if m != nil { + return m.Filter + } + return "" +} + +func (m *ListMetricDescriptorsRequest) GetPageSize() int32 { + if m != nil { + return m.PageSize + } + return 0 +} + +func (m *ListMetricDescriptorsRequest) GetPageToken() string { + if m != nil { + return m.PageToken + } + return "" +} + +// The `ListMetricDescriptors` response. +type ListMetricDescriptorsResponse struct { + // The metric descriptors that are available to the project + // and that match the value of `filter`, if present. + MetricDescriptors []*metric.MetricDescriptor `protobuf:"bytes,1,rep,name=metric_descriptors,json=metricDescriptors,proto3" json:"metric_descriptors,omitempty"` + // If there are more results than have been returned, then this field is set + // to a non-empty value. To see the additional results, + // use that value as `pageToken` in the next call to this method. + NextPageToken string `protobuf:"bytes,2,opt,name=next_page_token,json=nextPageToken,proto3" json:"next_page_token,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ListMetricDescriptorsResponse) Reset() { *m = ListMetricDescriptorsResponse{} } +func (m *ListMetricDescriptorsResponse) String() string { return proto.CompactTextString(m) } +func (*ListMetricDescriptorsResponse) ProtoMessage() {} +func (*ListMetricDescriptorsResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_metric_service_1409540f97d9ae0c, []int{4} +} +func (m *ListMetricDescriptorsResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ListMetricDescriptorsResponse.Unmarshal(m, b) +} +func (m *ListMetricDescriptorsResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ListMetricDescriptorsResponse.Marshal(b, m, deterministic) +} +func (dst *ListMetricDescriptorsResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_ListMetricDescriptorsResponse.Merge(dst, src) +} +func (m *ListMetricDescriptorsResponse) XXX_Size() int { + return xxx_messageInfo_ListMetricDescriptorsResponse.Size(m) +} +func (m *ListMetricDescriptorsResponse) XXX_DiscardUnknown() { + xxx_messageInfo_ListMetricDescriptorsResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_ListMetricDescriptorsResponse proto.InternalMessageInfo + +func (m *ListMetricDescriptorsResponse) GetMetricDescriptors() []*metric.MetricDescriptor { + if m != nil { + return m.MetricDescriptors + } + return nil +} + +func (m *ListMetricDescriptorsResponse) GetNextPageToken() string { + if m != nil { + return m.NextPageToken + } + return "" +} + +// The `GetMetricDescriptor` request. +type GetMetricDescriptorRequest struct { + // The metric descriptor on which to execute the request. The format is + // `"projects/{project_id_or_number}/metricDescriptors/{metric_id}"`. + // An example value of `{metric_id}` is + // `"compute.googleapis.com/instance/disk/read_bytes_count"`. + Name string `protobuf:"bytes,3,opt,name=name,proto3" json:"name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GetMetricDescriptorRequest) Reset() { *m = GetMetricDescriptorRequest{} } +func (m *GetMetricDescriptorRequest) String() string { return proto.CompactTextString(m) } +func (*GetMetricDescriptorRequest) ProtoMessage() {} +func (*GetMetricDescriptorRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_metric_service_1409540f97d9ae0c, []int{5} +} +func (m *GetMetricDescriptorRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GetMetricDescriptorRequest.Unmarshal(m, b) +} +func (m *GetMetricDescriptorRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GetMetricDescriptorRequest.Marshal(b, m, deterministic) +} +func (dst *GetMetricDescriptorRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_GetMetricDescriptorRequest.Merge(dst, src) +} +func (m *GetMetricDescriptorRequest) XXX_Size() int { + return xxx_messageInfo_GetMetricDescriptorRequest.Size(m) +} +func (m *GetMetricDescriptorRequest) XXX_DiscardUnknown() { + xxx_messageInfo_GetMetricDescriptorRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_GetMetricDescriptorRequest proto.InternalMessageInfo + +func (m *GetMetricDescriptorRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +// The `CreateMetricDescriptor` request. +type CreateMetricDescriptorRequest struct { + // The project on which to execute the request. The format is + // `"projects/{project_id_or_number}"`. + Name string `protobuf:"bytes,3,opt,name=name,proto3" json:"name,omitempty"` + // The new [custom metric](/monitoring/custom-metrics) + // descriptor. + MetricDescriptor *metric.MetricDescriptor `protobuf:"bytes,2,opt,name=metric_descriptor,json=metricDescriptor,proto3" json:"metric_descriptor,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *CreateMetricDescriptorRequest) Reset() { *m = CreateMetricDescriptorRequest{} } +func (m *CreateMetricDescriptorRequest) String() string { return proto.CompactTextString(m) } +func (*CreateMetricDescriptorRequest) ProtoMessage() {} +func (*CreateMetricDescriptorRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_metric_service_1409540f97d9ae0c, []int{6} +} +func (m *CreateMetricDescriptorRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_CreateMetricDescriptorRequest.Unmarshal(m, b) +} +func (m *CreateMetricDescriptorRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_CreateMetricDescriptorRequest.Marshal(b, m, deterministic) +} +func (dst *CreateMetricDescriptorRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_CreateMetricDescriptorRequest.Merge(dst, src) +} +func (m *CreateMetricDescriptorRequest) XXX_Size() int { + return xxx_messageInfo_CreateMetricDescriptorRequest.Size(m) +} +func (m *CreateMetricDescriptorRequest) XXX_DiscardUnknown() { + xxx_messageInfo_CreateMetricDescriptorRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_CreateMetricDescriptorRequest proto.InternalMessageInfo + +func (m *CreateMetricDescriptorRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *CreateMetricDescriptorRequest) GetMetricDescriptor() *metric.MetricDescriptor { + if m != nil { + return m.MetricDescriptor + } + return nil +} + +// The `DeleteMetricDescriptor` request. +type DeleteMetricDescriptorRequest struct { + // The metric descriptor on which to execute the request. The format is + // `"projects/{project_id_or_number}/metricDescriptors/{metric_id}"`. + // An example of `{metric_id}` is: + // `"custom.googleapis.com/my_test_metric"`. + Name string `protobuf:"bytes,3,opt,name=name,proto3" json:"name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *DeleteMetricDescriptorRequest) Reset() { *m = DeleteMetricDescriptorRequest{} } +func (m *DeleteMetricDescriptorRequest) String() string { return proto.CompactTextString(m) } +func (*DeleteMetricDescriptorRequest) ProtoMessage() {} +func (*DeleteMetricDescriptorRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_metric_service_1409540f97d9ae0c, []int{7} +} +func (m *DeleteMetricDescriptorRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_DeleteMetricDescriptorRequest.Unmarshal(m, b) +} +func (m *DeleteMetricDescriptorRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_DeleteMetricDescriptorRequest.Marshal(b, m, deterministic) +} +func (dst *DeleteMetricDescriptorRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_DeleteMetricDescriptorRequest.Merge(dst, src) +} +func (m *DeleteMetricDescriptorRequest) XXX_Size() int { + return xxx_messageInfo_DeleteMetricDescriptorRequest.Size(m) +} +func (m *DeleteMetricDescriptorRequest) XXX_DiscardUnknown() { + xxx_messageInfo_DeleteMetricDescriptorRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_DeleteMetricDescriptorRequest proto.InternalMessageInfo + +func (m *DeleteMetricDescriptorRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +// The `ListTimeSeries` request. +type ListTimeSeriesRequest struct { + // The project on which to execute the request. The format is + // "projects/{project_id_or_number}". + Name string `protobuf:"bytes,10,opt,name=name,proto3" json:"name,omitempty"` + // A [monitoring filter](/monitoring/api/v3/filters) that specifies which time + // series should be returned. The filter must specify a single metric type, + // and can additionally specify metric labels and other information. For + // example: + // + // metric.type = "compute.googleapis.com/instance/cpu/usage_time" AND + // metric.label.instance_name = "my-instance-name" + Filter string `protobuf:"bytes,2,opt,name=filter,proto3" json:"filter,omitempty"` + // The time interval for which results should be returned. Only time series + // that contain data points in the specified interval are included + // in the response. + Interval *TimeInterval `protobuf:"bytes,4,opt,name=interval,proto3" json:"interval,omitempty"` + // By default, the raw time series data is returned. + // Use this field to combine multiple time series for different + // views of the data. + Aggregation *Aggregation `protobuf:"bytes,5,opt,name=aggregation,proto3" json:"aggregation,omitempty"` + // Unsupported: must be left blank. The points in each time series are + // returned in reverse time order. + OrderBy string `protobuf:"bytes,6,opt,name=order_by,json=orderBy,proto3" json:"order_by,omitempty"` + // Specifies which information is returned about the time series. + View ListTimeSeriesRequest_TimeSeriesView `protobuf:"varint,7,opt,name=view,proto3,enum=google.monitoring.v3.ListTimeSeriesRequest_TimeSeriesView" json:"view,omitempty"` + // A positive number that is the maximum number of results to return. If + // `page_size` is empty or more than 100,000 results, the effective + // `page_size` is 100,000 results. If `view` is set to `FULL`, this is the + // maximum number of `Points` returned. If `view` is set to `HEADERS`, this is + // the maximum number of `TimeSeries` returned. + PageSize int32 `protobuf:"varint,8,opt,name=page_size,json=pageSize,proto3" json:"page_size,omitempty"` + // If this field is not empty then it must contain the `nextPageToken` value + // returned by a previous call to this method. Using this field causes the + // method to return additional results from the previous method call. + PageToken string `protobuf:"bytes,9,opt,name=page_token,json=pageToken,proto3" json:"page_token,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ListTimeSeriesRequest) Reset() { *m = ListTimeSeriesRequest{} } +func (m *ListTimeSeriesRequest) String() string { return proto.CompactTextString(m) } +func (*ListTimeSeriesRequest) ProtoMessage() {} +func (*ListTimeSeriesRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_metric_service_1409540f97d9ae0c, []int{8} +} +func (m *ListTimeSeriesRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ListTimeSeriesRequest.Unmarshal(m, b) +} +func (m *ListTimeSeriesRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ListTimeSeriesRequest.Marshal(b, m, deterministic) +} +func (dst *ListTimeSeriesRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_ListTimeSeriesRequest.Merge(dst, src) +} +func (m *ListTimeSeriesRequest) XXX_Size() int { + return xxx_messageInfo_ListTimeSeriesRequest.Size(m) +} +func (m *ListTimeSeriesRequest) XXX_DiscardUnknown() { + xxx_messageInfo_ListTimeSeriesRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_ListTimeSeriesRequest proto.InternalMessageInfo + +func (m *ListTimeSeriesRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *ListTimeSeriesRequest) GetFilter() string { + if m != nil { + return m.Filter + } + return "" +} + +func (m *ListTimeSeriesRequest) GetInterval() *TimeInterval { + if m != nil { + return m.Interval + } + return nil +} + +func (m *ListTimeSeriesRequest) GetAggregation() *Aggregation { + if m != nil { + return m.Aggregation + } + return nil +} + +func (m *ListTimeSeriesRequest) GetOrderBy() string { + if m != nil { + return m.OrderBy + } + return "" +} + +func (m *ListTimeSeriesRequest) GetView() ListTimeSeriesRequest_TimeSeriesView { + if m != nil { + return m.View + } + return ListTimeSeriesRequest_FULL +} + +func (m *ListTimeSeriesRequest) GetPageSize() int32 { + if m != nil { + return m.PageSize + } + return 0 +} + +func (m *ListTimeSeriesRequest) GetPageToken() string { + if m != nil { + return m.PageToken + } + return "" +} + +// The `ListTimeSeries` response. +type ListTimeSeriesResponse struct { + // One or more time series that match the filter included in the request. + TimeSeries []*TimeSeries `protobuf:"bytes,1,rep,name=time_series,json=timeSeries,proto3" json:"time_series,omitempty"` + // If there are more results than have been returned, then this field is set + // to a non-empty value. To see the additional results, + // use that value as `pageToken` in the next call to this method. + NextPageToken string `protobuf:"bytes,2,opt,name=next_page_token,json=nextPageToken,proto3" json:"next_page_token,omitempty"` + // Query execution errors that may have caused the time series data returned + // to be incomplete. + ExecutionErrors []*status.Status `protobuf:"bytes,3,rep,name=execution_errors,json=executionErrors,proto3" json:"execution_errors,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ListTimeSeriesResponse) Reset() { *m = ListTimeSeriesResponse{} } +func (m *ListTimeSeriesResponse) String() string { return proto.CompactTextString(m) } +func (*ListTimeSeriesResponse) ProtoMessage() {} +func (*ListTimeSeriesResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_metric_service_1409540f97d9ae0c, []int{9} +} +func (m *ListTimeSeriesResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ListTimeSeriesResponse.Unmarshal(m, b) +} +func (m *ListTimeSeriesResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ListTimeSeriesResponse.Marshal(b, m, deterministic) +} +func (dst *ListTimeSeriesResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_ListTimeSeriesResponse.Merge(dst, src) +} +func (m *ListTimeSeriesResponse) XXX_Size() int { + return xxx_messageInfo_ListTimeSeriesResponse.Size(m) +} +func (m *ListTimeSeriesResponse) XXX_DiscardUnknown() { + xxx_messageInfo_ListTimeSeriesResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_ListTimeSeriesResponse proto.InternalMessageInfo + +func (m *ListTimeSeriesResponse) GetTimeSeries() []*TimeSeries { + if m != nil { + return m.TimeSeries + } + return nil +} + +func (m *ListTimeSeriesResponse) GetNextPageToken() string { + if m != nil { + return m.NextPageToken + } + return "" +} + +func (m *ListTimeSeriesResponse) GetExecutionErrors() []*status.Status { + if m != nil { + return m.ExecutionErrors + } + return nil +} + +// The `CreateTimeSeries` request. +type CreateTimeSeriesRequest struct { + // The project on which to execute the request. The format is + // `"projects/{project_id_or_number}"`. + Name string `protobuf:"bytes,3,opt,name=name,proto3" json:"name,omitempty"` + // The new data to be added to a list of time series. + // Adds at most one data point to each of several time series. The new data + // point must be more recent than any other point in its time series. Each + // `TimeSeries` value must fully specify a unique time series by supplying + // all label values for the metric and the monitored resource. + TimeSeries []*TimeSeries `protobuf:"bytes,2,rep,name=time_series,json=timeSeries,proto3" json:"time_series,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *CreateTimeSeriesRequest) Reset() { *m = CreateTimeSeriesRequest{} } +func (m *CreateTimeSeriesRequest) String() string { return proto.CompactTextString(m) } +func (*CreateTimeSeriesRequest) ProtoMessage() {} +func (*CreateTimeSeriesRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_metric_service_1409540f97d9ae0c, []int{10} +} +func (m *CreateTimeSeriesRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_CreateTimeSeriesRequest.Unmarshal(m, b) +} +func (m *CreateTimeSeriesRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_CreateTimeSeriesRequest.Marshal(b, m, deterministic) +} +func (dst *CreateTimeSeriesRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_CreateTimeSeriesRequest.Merge(dst, src) +} +func (m *CreateTimeSeriesRequest) XXX_Size() int { + return xxx_messageInfo_CreateTimeSeriesRequest.Size(m) +} +func (m *CreateTimeSeriesRequest) XXX_DiscardUnknown() { + xxx_messageInfo_CreateTimeSeriesRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_CreateTimeSeriesRequest proto.InternalMessageInfo + +func (m *CreateTimeSeriesRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *CreateTimeSeriesRequest) GetTimeSeries() []*TimeSeries { + if m != nil { + return m.TimeSeries + } + return nil +} + +// Describes the result of a failed request to write data to a time series. +type CreateTimeSeriesError struct { + // The time series, including the `Metric`, `MonitoredResource`, + // and `Point`s (including timestamp and value) that resulted + // in the error. This field provides all of the context that + // would be needed to retry the operation. + TimeSeries *TimeSeries `protobuf:"bytes,1,opt,name=time_series,json=timeSeries,proto3" json:"time_series,omitempty"` + // The status of the requested write operation. + Status *status.Status `protobuf:"bytes,2,opt,name=status,proto3" json:"status,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *CreateTimeSeriesError) Reset() { *m = CreateTimeSeriesError{} } +func (m *CreateTimeSeriesError) String() string { return proto.CompactTextString(m) } +func (*CreateTimeSeriesError) ProtoMessage() {} +func (*CreateTimeSeriesError) Descriptor() ([]byte, []int) { + return fileDescriptor_metric_service_1409540f97d9ae0c, []int{11} +} +func (m *CreateTimeSeriesError) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_CreateTimeSeriesError.Unmarshal(m, b) +} +func (m *CreateTimeSeriesError) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_CreateTimeSeriesError.Marshal(b, m, deterministic) +} +func (dst *CreateTimeSeriesError) XXX_Merge(src proto.Message) { + xxx_messageInfo_CreateTimeSeriesError.Merge(dst, src) +} +func (m *CreateTimeSeriesError) XXX_Size() int { + return xxx_messageInfo_CreateTimeSeriesError.Size(m) +} +func (m *CreateTimeSeriesError) XXX_DiscardUnknown() { + xxx_messageInfo_CreateTimeSeriesError.DiscardUnknown(m) +} + +var xxx_messageInfo_CreateTimeSeriesError proto.InternalMessageInfo + +func (m *CreateTimeSeriesError) GetTimeSeries() *TimeSeries { + if m != nil { + return m.TimeSeries + } + return nil +} + +func (m *CreateTimeSeriesError) GetStatus() *status.Status { + if m != nil { + return m.Status + } + return nil +} + +func init() { + proto.RegisterType((*ListMonitoredResourceDescriptorsRequest)(nil), "google.monitoring.v3.ListMonitoredResourceDescriptorsRequest") + proto.RegisterType((*ListMonitoredResourceDescriptorsResponse)(nil), "google.monitoring.v3.ListMonitoredResourceDescriptorsResponse") + proto.RegisterType((*GetMonitoredResourceDescriptorRequest)(nil), "google.monitoring.v3.GetMonitoredResourceDescriptorRequest") + proto.RegisterType((*ListMetricDescriptorsRequest)(nil), "google.monitoring.v3.ListMetricDescriptorsRequest") + proto.RegisterType((*ListMetricDescriptorsResponse)(nil), "google.monitoring.v3.ListMetricDescriptorsResponse") + proto.RegisterType((*GetMetricDescriptorRequest)(nil), "google.monitoring.v3.GetMetricDescriptorRequest") + proto.RegisterType((*CreateMetricDescriptorRequest)(nil), "google.monitoring.v3.CreateMetricDescriptorRequest") + proto.RegisterType((*DeleteMetricDescriptorRequest)(nil), "google.monitoring.v3.DeleteMetricDescriptorRequest") + proto.RegisterType((*ListTimeSeriesRequest)(nil), "google.monitoring.v3.ListTimeSeriesRequest") + proto.RegisterType((*ListTimeSeriesResponse)(nil), "google.monitoring.v3.ListTimeSeriesResponse") + proto.RegisterType((*CreateTimeSeriesRequest)(nil), "google.monitoring.v3.CreateTimeSeriesRequest") + proto.RegisterType((*CreateTimeSeriesError)(nil), "google.monitoring.v3.CreateTimeSeriesError") + proto.RegisterEnum("google.monitoring.v3.ListTimeSeriesRequest_TimeSeriesView", ListTimeSeriesRequest_TimeSeriesView_name, ListTimeSeriesRequest_TimeSeriesView_value) +} + +// Reference imports to suppress errors if they are not otherwise used. +var _ context.Context +var _ grpc.ClientConn + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the grpc package it is being compiled against. +const _ = grpc.SupportPackageIsVersion4 + +// MetricServiceClient is the client API for MetricService service. +// +// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://godoc.org/google.golang.org/grpc#ClientConn.NewStream. +type MetricServiceClient interface { + // Lists monitored resource descriptors that match a filter. This method does + // not require a Stackdriver account. + ListMonitoredResourceDescriptors(ctx context.Context, in *ListMonitoredResourceDescriptorsRequest, opts ...grpc.CallOption) (*ListMonitoredResourceDescriptorsResponse, error) + // Gets a single monitored resource descriptor. This method does not require a + // Stackdriver account. + GetMonitoredResourceDescriptor(ctx context.Context, in *GetMonitoredResourceDescriptorRequest, opts ...grpc.CallOption) (*monitoredres.MonitoredResourceDescriptor, error) + // Lists metric descriptors that match a filter. This method does not require + // a Stackdriver account. + ListMetricDescriptors(ctx context.Context, in *ListMetricDescriptorsRequest, opts ...grpc.CallOption) (*ListMetricDescriptorsResponse, error) + // Gets a single metric descriptor. This method does not require a Stackdriver + // account. + GetMetricDescriptor(ctx context.Context, in *GetMetricDescriptorRequest, opts ...grpc.CallOption) (*metric.MetricDescriptor, error) + // Creates a new metric descriptor. + // User-created metric descriptors define + // [custom metrics](/monitoring/custom-metrics). + CreateMetricDescriptor(ctx context.Context, in *CreateMetricDescriptorRequest, opts ...grpc.CallOption) (*metric.MetricDescriptor, error) + // Deletes a metric descriptor. Only user-created + // [custom metrics](/monitoring/custom-metrics) can be deleted. + DeleteMetricDescriptor(ctx context.Context, in *DeleteMetricDescriptorRequest, opts ...grpc.CallOption) (*empty.Empty, error) + // Lists time series that match a filter. This method does not require a + // Stackdriver account. + ListTimeSeries(ctx context.Context, in *ListTimeSeriesRequest, opts ...grpc.CallOption) (*ListTimeSeriesResponse, error) + // Creates or adds data to one or more time series. + // The response is empty if all time series in the request were written. + // If any time series could not be written, a corresponding failure message is + // included in the error response. + CreateTimeSeries(ctx context.Context, in *CreateTimeSeriesRequest, opts ...grpc.CallOption) (*empty.Empty, error) +} + +type metricServiceClient struct { + cc *grpc.ClientConn +} + +func NewMetricServiceClient(cc *grpc.ClientConn) MetricServiceClient { + return &metricServiceClient{cc} +} + +func (c *metricServiceClient) ListMonitoredResourceDescriptors(ctx context.Context, in *ListMonitoredResourceDescriptorsRequest, opts ...grpc.CallOption) (*ListMonitoredResourceDescriptorsResponse, error) { + out := new(ListMonitoredResourceDescriptorsResponse) + err := c.cc.Invoke(ctx, "/google.monitoring.v3.MetricService/ListMonitoredResourceDescriptors", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *metricServiceClient) GetMonitoredResourceDescriptor(ctx context.Context, in *GetMonitoredResourceDescriptorRequest, opts ...grpc.CallOption) (*monitoredres.MonitoredResourceDescriptor, error) { + out := new(monitoredres.MonitoredResourceDescriptor) + err := c.cc.Invoke(ctx, "/google.monitoring.v3.MetricService/GetMonitoredResourceDescriptor", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *metricServiceClient) ListMetricDescriptors(ctx context.Context, in *ListMetricDescriptorsRequest, opts ...grpc.CallOption) (*ListMetricDescriptorsResponse, error) { + out := new(ListMetricDescriptorsResponse) + err := c.cc.Invoke(ctx, "/google.monitoring.v3.MetricService/ListMetricDescriptors", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *metricServiceClient) GetMetricDescriptor(ctx context.Context, in *GetMetricDescriptorRequest, opts ...grpc.CallOption) (*metric.MetricDescriptor, error) { + out := new(metric.MetricDescriptor) + err := c.cc.Invoke(ctx, "/google.monitoring.v3.MetricService/GetMetricDescriptor", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *metricServiceClient) CreateMetricDescriptor(ctx context.Context, in *CreateMetricDescriptorRequest, opts ...grpc.CallOption) (*metric.MetricDescriptor, error) { + out := new(metric.MetricDescriptor) + err := c.cc.Invoke(ctx, "/google.monitoring.v3.MetricService/CreateMetricDescriptor", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *metricServiceClient) DeleteMetricDescriptor(ctx context.Context, in *DeleteMetricDescriptorRequest, opts ...grpc.CallOption) (*empty.Empty, error) { + out := new(empty.Empty) + err := c.cc.Invoke(ctx, "/google.monitoring.v3.MetricService/DeleteMetricDescriptor", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *metricServiceClient) ListTimeSeries(ctx context.Context, in *ListTimeSeriesRequest, opts ...grpc.CallOption) (*ListTimeSeriesResponse, error) { + out := new(ListTimeSeriesResponse) + err := c.cc.Invoke(ctx, "/google.monitoring.v3.MetricService/ListTimeSeries", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *metricServiceClient) CreateTimeSeries(ctx context.Context, in *CreateTimeSeriesRequest, opts ...grpc.CallOption) (*empty.Empty, error) { + out := new(empty.Empty) + err := c.cc.Invoke(ctx, "/google.monitoring.v3.MetricService/CreateTimeSeries", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +// MetricServiceServer is the server API for MetricService service. +type MetricServiceServer interface { + // Lists monitored resource descriptors that match a filter. This method does + // not require a Stackdriver account. + ListMonitoredResourceDescriptors(context.Context, *ListMonitoredResourceDescriptorsRequest) (*ListMonitoredResourceDescriptorsResponse, error) + // Gets a single monitored resource descriptor. This method does not require a + // Stackdriver account. + GetMonitoredResourceDescriptor(context.Context, *GetMonitoredResourceDescriptorRequest) (*monitoredres.MonitoredResourceDescriptor, error) + // Lists metric descriptors that match a filter. This method does not require + // a Stackdriver account. + ListMetricDescriptors(context.Context, *ListMetricDescriptorsRequest) (*ListMetricDescriptorsResponse, error) + // Gets a single metric descriptor. This method does not require a Stackdriver + // account. + GetMetricDescriptor(context.Context, *GetMetricDescriptorRequest) (*metric.MetricDescriptor, error) + // Creates a new metric descriptor. + // User-created metric descriptors define + // [custom metrics](/monitoring/custom-metrics). + CreateMetricDescriptor(context.Context, *CreateMetricDescriptorRequest) (*metric.MetricDescriptor, error) + // Deletes a metric descriptor. Only user-created + // [custom metrics](/monitoring/custom-metrics) can be deleted. + DeleteMetricDescriptor(context.Context, *DeleteMetricDescriptorRequest) (*empty.Empty, error) + // Lists time series that match a filter. This method does not require a + // Stackdriver account. + ListTimeSeries(context.Context, *ListTimeSeriesRequest) (*ListTimeSeriesResponse, error) + // Creates or adds data to one or more time series. + // The response is empty if all time series in the request were written. + // If any time series could not be written, a corresponding failure message is + // included in the error response. + CreateTimeSeries(context.Context, *CreateTimeSeriesRequest) (*empty.Empty, error) +} + +func RegisterMetricServiceServer(s *grpc.Server, srv MetricServiceServer) { + s.RegisterService(&_MetricService_serviceDesc, srv) +} + +func _MetricService_ListMonitoredResourceDescriptors_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(ListMonitoredResourceDescriptorsRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(MetricServiceServer).ListMonitoredResourceDescriptors(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.monitoring.v3.MetricService/ListMonitoredResourceDescriptors", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(MetricServiceServer).ListMonitoredResourceDescriptors(ctx, req.(*ListMonitoredResourceDescriptorsRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _MetricService_GetMonitoredResourceDescriptor_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(GetMonitoredResourceDescriptorRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(MetricServiceServer).GetMonitoredResourceDescriptor(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.monitoring.v3.MetricService/GetMonitoredResourceDescriptor", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(MetricServiceServer).GetMonitoredResourceDescriptor(ctx, req.(*GetMonitoredResourceDescriptorRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _MetricService_ListMetricDescriptors_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(ListMetricDescriptorsRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(MetricServiceServer).ListMetricDescriptors(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.monitoring.v3.MetricService/ListMetricDescriptors", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(MetricServiceServer).ListMetricDescriptors(ctx, req.(*ListMetricDescriptorsRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _MetricService_GetMetricDescriptor_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(GetMetricDescriptorRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(MetricServiceServer).GetMetricDescriptor(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.monitoring.v3.MetricService/GetMetricDescriptor", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(MetricServiceServer).GetMetricDescriptor(ctx, req.(*GetMetricDescriptorRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _MetricService_CreateMetricDescriptor_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(CreateMetricDescriptorRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(MetricServiceServer).CreateMetricDescriptor(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.monitoring.v3.MetricService/CreateMetricDescriptor", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(MetricServiceServer).CreateMetricDescriptor(ctx, req.(*CreateMetricDescriptorRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _MetricService_DeleteMetricDescriptor_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(DeleteMetricDescriptorRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(MetricServiceServer).DeleteMetricDescriptor(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.monitoring.v3.MetricService/DeleteMetricDescriptor", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(MetricServiceServer).DeleteMetricDescriptor(ctx, req.(*DeleteMetricDescriptorRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _MetricService_ListTimeSeries_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(ListTimeSeriesRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(MetricServiceServer).ListTimeSeries(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.monitoring.v3.MetricService/ListTimeSeries", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(MetricServiceServer).ListTimeSeries(ctx, req.(*ListTimeSeriesRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _MetricService_CreateTimeSeries_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(CreateTimeSeriesRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(MetricServiceServer).CreateTimeSeries(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.monitoring.v3.MetricService/CreateTimeSeries", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(MetricServiceServer).CreateTimeSeries(ctx, req.(*CreateTimeSeriesRequest)) + } + return interceptor(ctx, in, info, handler) +} + +var _MetricService_serviceDesc = grpc.ServiceDesc{ + ServiceName: "google.monitoring.v3.MetricService", + HandlerType: (*MetricServiceServer)(nil), + Methods: []grpc.MethodDesc{ + { + MethodName: "ListMonitoredResourceDescriptors", + Handler: _MetricService_ListMonitoredResourceDescriptors_Handler, + }, + { + MethodName: "GetMonitoredResourceDescriptor", + Handler: _MetricService_GetMonitoredResourceDescriptor_Handler, + }, + { + MethodName: "ListMetricDescriptors", + Handler: _MetricService_ListMetricDescriptors_Handler, + }, + { + MethodName: "GetMetricDescriptor", + Handler: _MetricService_GetMetricDescriptor_Handler, + }, + { + MethodName: "CreateMetricDescriptor", + Handler: _MetricService_CreateMetricDescriptor_Handler, + }, + { + MethodName: "DeleteMetricDescriptor", + Handler: _MetricService_DeleteMetricDescriptor_Handler, + }, + { + MethodName: "ListTimeSeries", + Handler: _MetricService_ListTimeSeries_Handler, + }, + { + MethodName: "CreateTimeSeries", + Handler: _MetricService_CreateTimeSeries_Handler, + }, + }, + Streams: []grpc.StreamDesc{}, + Metadata: "google/monitoring/v3/metric_service.proto", +} + +func init() { + proto.RegisterFile("google/monitoring/v3/metric_service.proto", fileDescriptor_metric_service_1409540f97d9ae0c) +} + +var fileDescriptor_metric_service_1409540f97d9ae0c = []byte{ + // 1049 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xc4, 0x57, 0x4f, 0x6f, 0x1b, 0x45, + 0x14, 0x67, 0xe2, 0x34, 0x71, 0x9e, 0xd5, 0xd4, 0x9d, 0xb6, 0xae, 0xd9, 0x26, 0x95, 0xbb, 0xa8, + 0xc4, 0x75, 0xcb, 0x6e, 0x65, 0x57, 0x1c, 0x92, 0x26, 0x52, 0xfe, 0x51, 0x2a, 0x02, 0x8a, 0xd6, + 0x25, 0x87, 0x2a, 0x92, 0xb5, 0xb1, 0xa7, 0xab, 0x01, 0xef, 0xce, 0x32, 0x3b, 0x76, 0x9b, 0xa2, + 0x70, 0xe0, 0xd0, 0x3b, 0x02, 0x24, 0xf8, 0x0a, 0x39, 0x80, 0xf8, 0x0a, 0x88, 0x13, 0x57, 0xce, + 0xdc, 0xf8, 0x0a, 0xdc, 0xd1, 0xce, 0xee, 0xc6, 0xf6, 0x7a, 0x77, 0x6d, 0x73, 0xe9, 0xcd, 0x3b, + 0xef, 0xcd, 0x7b, 0xbf, 0xf7, 0x9b, 0x79, 0xef, 0x37, 0x86, 0x7b, 0x16, 0x63, 0x56, 0x97, 0xe8, + 0x36, 0x73, 0xa8, 0x60, 0x9c, 0x3a, 0x96, 0xde, 0x6f, 0xe8, 0x36, 0x11, 0x9c, 0xb6, 0x5b, 0x1e, + 0xe1, 0x7d, 0xda, 0x26, 0x9a, 0xcb, 0x99, 0x60, 0xf8, 0x7a, 0xe0, 0xaa, 0x0d, 0x5c, 0xb5, 0x7e, + 0x43, 0x59, 0x09, 0x03, 0x98, 0x2e, 0xd5, 0x4d, 0xc7, 0x61, 0xc2, 0x14, 0x94, 0x39, 0x5e, 0xb0, + 0x47, 0xb9, 0x39, 0x64, 0x0d, 0x82, 0x86, 0x86, 0xf7, 0x86, 0x0d, 0x41, 0x40, 0xd2, 0x69, 0x71, + 0xe2, 0xb1, 0x1e, 0x8f, 0x32, 0x2a, 0x77, 0x12, 0xc1, 0xb5, 0x99, 0x6d, 0x33, 0x27, 0xd3, 0x65, + 0x24, 0xd5, 0xed, 0xd0, 0x45, 0x7e, 0x9d, 0xf4, 0x5e, 0xe8, 0x9d, 0x1e, 0x97, 0x20, 0x43, 0xfb, + 0xad, 0xb8, 0x9d, 0xd8, 0xae, 0x38, 0x8d, 0x15, 0xc0, 0xdd, 0xb6, 0xee, 0x09, 0x53, 0xf4, 0xc2, + 0xca, 0xd4, 0xef, 0x10, 0xac, 0x1d, 0x50, 0x4f, 0x7c, 0x1a, 0x81, 0x37, 0x42, 0xec, 0x7b, 0xc4, + 0x6b, 0x73, 0xea, 0x0a, 0xc6, 0x3d, 0x83, 0x7c, 0xd5, 0x23, 0x9e, 0xc0, 0x18, 0xe6, 0x1d, 0xd3, + 0x26, 0xe5, 0x4b, 0x15, 0x54, 0x5d, 0x32, 0xe4, 0x6f, 0x5c, 0x82, 0x85, 0x17, 0xb4, 0x2b, 0x08, + 0x2f, 0xcf, 0xc9, 0xd5, 0xf0, 0x0b, 0xdf, 0x82, 0x25, 0xd7, 0xb4, 0x48, 0xcb, 0xa3, 0xaf, 0x49, + 0x39, 0x57, 0x41, 0xd5, 0x4b, 0x46, 0xde, 0x5f, 0x68, 0xd2, 0xd7, 0x04, 0xaf, 0x02, 0x48, 0xa3, + 0x60, 0x5f, 0x12, 0xa7, 0x3c, 0x2f, 0x37, 0x4a, 0xf7, 0x67, 0xfe, 0x82, 0xfa, 0x0b, 0x82, 0xea, + 0x64, 0x4c, 0x9e, 0xcb, 0x1c, 0x8f, 0xe0, 0xe7, 0x70, 0x3d, 0xa2, 0xbb, 0xd5, 0x19, 0xd8, 0xcb, + 0xa8, 0x92, 0xab, 0x16, 0xea, 0x6b, 0x5a, 0x78, 0xda, 0xa6, 0x4b, 0xb5, 0x8c, 0x78, 0xc6, 0x35, + 0x3e, 0x9e, 0x03, 0xbf, 0x0f, 0x57, 0x1c, 0xf2, 0x4a, 0xb4, 0x86, 0xc0, 0x06, 0x55, 0x5e, 0xf6, + 0x97, 0x0f, 0x2f, 0x00, 0x6f, 0xc0, 0xdd, 0x27, 0x24, 0x0b, 0x6e, 0x9c, 0xc1, 0xdc, 0x80, 0x41, + 0xf5, 0x0d, 0x82, 0x15, 0x59, 0xad, 0x3c, 0xec, 0xb7, 0x48, 0xfb, 0x0f, 0x08, 0x56, 0x53, 0x80, + 0x84, 0x5c, 0x7f, 0x02, 0x38, 0x6c, 0xa9, 0x71, 0xa6, 0x57, 0x46, 0x98, 0x8e, 0x85, 0x30, 0xae, + 0xda, 0xf1, 0xa0, 0x53, 0x93, 0xfb, 0x10, 0x14, 0x9f, 0xdc, 0x78, 0xc4, 0x0c, 0x46, 0xbf, 0x81, + 0xd5, 0x5d, 0x4e, 0x4c, 0x41, 0x66, 0xd8, 0x84, 0x9f, 0xc2, 0xd5, 0xb1, 0xda, 0x24, 0xa0, 0x49, + 0xa5, 0x15, 0xe3, 0xa5, 0xa9, 0x0d, 0x58, 0xdd, 0x23, 0x5d, 0x32, 0x53, 0x7e, 0xf5, 0xa7, 0x1c, + 0xdc, 0xf0, 0xd9, 0x7f, 0x46, 0x6d, 0xd2, 0x24, 0x9c, 0x92, 0xb1, 0xf3, 0x87, 0x29, 0xce, 0x7f, + 0x0b, 0xf2, 0xd4, 0x11, 0x84, 0xf7, 0xcd, 0xae, 0x3c, 0xe0, 0x42, 0x5d, 0xd5, 0x92, 0xe6, 0x9d, + 0xe6, 0xa7, 0x79, 0x1a, 0x7a, 0x1a, 0x17, 0x7b, 0xf0, 0x2e, 0x14, 0x4c, 0xcb, 0xe2, 0xc4, 0x92, + 0x93, 0x45, 0x5e, 0xb9, 0x42, 0xfd, 0x4e, 0x72, 0x88, 0xed, 0x81, 0xa3, 0x31, 0xbc, 0x0b, 0xbf, + 0x0b, 0x79, 0xc6, 0x3b, 0x84, 0xb7, 0x4e, 0x4e, 0xcb, 0x0b, 0x12, 0xde, 0xa2, 0xfc, 0xde, 0x39, + 0xc5, 0x9f, 0xc1, 0x7c, 0x9f, 0x92, 0x97, 0xe5, 0xc5, 0x0a, 0xaa, 0x2e, 0xd7, 0xd7, 0x93, 0x03, + 0x27, 0xd2, 0xa0, 0x0d, 0x56, 0x8e, 0x28, 0x79, 0x69, 0xc8, 0x38, 0xa3, 0xf7, 0x3d, 0x9f, 0x79, + 0xdf, 0x97, 0xe2, 0xf7, 0x7d, 0x0d, 0x96, 0x47, 0x63, 0xe2, 0x3c, 0xcc, 0x7f, 0xf4, 0xf9, 0xc1, + 0x41, 0xf1, 0x1d, 0x5c, 0x80, 0xc5, 0x8f, 0xf7, 0xb7, 0xf7, 0xf6, 0x8d, 0x66, 0x11, 0xa9, 0xbf, + 0x23, 0x28, 0xc5, 0x31, 0x85, 0x1d, 0xb1, 0x0d, 0x05, 0x41, 0x6d, 0xe2, 0x4b, 0x0c, 0x25, 0x51, + 0x2b, 0x54, 0xd2, 0x29, 0x0f, 0xb7, 0x83, 0xb8, 0xf8, 0x3d, 0x6d, 0x1f, 0xe0, 0x4d, 0x28, 0x92, + 0x57, 0xa4, 0xdd, 0xf3, 0x29, 0x6e, 0x11, 0xce, 0xfd, 0xd6, 0xcb, 0xc9, 0x7c, 0x38, 0xca, 0xc7, + 0xdd, 0xb6, 0xd6, 0x94, 0xd3, 0xdd, 0xb8, 0x72, 0xe1, 0xbb, 0x2f, 0x5d, 0x55, 0x17, 0x6e, 0x06, + 0x4d, 0x91, 0x7e, 0xc1, 0x86, 0xdb, 0x21, 0x56, 0xd8, 0xdc, 0xec, 0x85, 0xf9, 0x83, 0xed, 0x46, + 0x3c, 0xa5, 0x04, 0x33, 0xce, 0x1a, 0x9a, 0x99, 0xb5, 0x1a, 0x2c, 0x04, 0x3a, 0x16, 0xf6, 0x68, + 0x12, 0x07, 0xa1, 0x47, 0xfd, 0x5f, 0x80, 0xcb, 0x41, 0x2b, 0x36, 0x83, 0x97, 0x00, 0xfe, 0x1b, + 0x41, 0x65, 0x92, 0xc2, 0xe0, 0xcd, 0xf4, 0xdb, 0x39, 0x85, 0x5a, 0x2a, 0x5b, 0xff, 0x77, 0x7b, + 0x70, 0xb5, 0xd4, 0xf5, 0x6f, 0xff, 0xfa, 0xe7, 0xfb, 0xb9, 0x47, 0xb8, 0xee, 0xbf, 0x04, 0xbe, + 0xf6, 0x0f, 0x65, 0xd3, 0xe5, 0xec, 0x0b, 0xd2, 0x16, 0x9e, 0x5e, 0x3b, 0x1b, 0xbc, 0x36, 0x92, + 0xa0, 0xff, 0x81, 0xe0, 0x76, 0xb6, 0x22, 0xe1, 0x8d, 0x64, 0x78, 0x53, 0xe9, 0x98, 0x32, 0xad, + 0xac, 0xaa, 0x8f, 0x65, 0x11, 0x1f, 0xe2, 0x47, 0x49, 0x45, 0x64, 0xd6, 0xa0, 0xd7, 0xce, 0xf0, + 0x6f, 0x28, 0x98, 0x89, 0x63, 0x8a, 0x84, 0xeb, 0x19, 0xe4, 0xa6, 0xe8, 0xa8, 0xd2, 0x98, 0x69, + 0x4f, 0x78, 0x0a, 0xba, 0x2c, 0xe0, 0x1e, 0x5e, 0x4b, 0x39, 0x85, 0x31, 0x64, 0x3f, 0x23, 0xb8, + 0x96, 0xa0, 0x57, 0xf8, 0x61, 0x3a, 0xdf, 0xc9, 0x2a, 0xa1, 0x64, 0xca, 0x8e, 0x5a, 0x97, 0xc0, + 0x1e, 0xe0, 0x5a, 0x32, 0xb3, 0x71, 0x5c, 0x7a, 0xad, 0x76, 0x86, 0x7f, 0x45, 0x50, 0x4a, 0x56, + 0x46, 0x9c, 0x42, 0x4e, 0xa6, 0x8e, 0x4e, 0x40, 0xb8, 0x23, 0x11, 0x3e, 0x56, 0xa7, 0xa5, 0x6e, + 0x7d, 0x5c, 0x80, 0x7d, 0x36, 0x4b, 0xc9, 0x5a, 0x9a, 0x86, 0x38, 0x53, 0x79, 0x95, 0x52, 0xb4, + 0x29, 0x7a, 0x25, 0x6b, 0xfb, 0xfe, 0x2b, 0x39, 0x62, 0xb3, 0x36, 0x0b, 0x9b, 0x3f, 0x22, 0x58, + 0x1e, 0x95, 0x05, 0x7c, 0x7f, 0x06, 0x41, 0x53, 0x1e, 0x4c, 0xe7, 0x1c, 0x5e, 0xc4, 0xaa, 0x44, + 0xa8, 0xe2, 0x4a, 0x32, 0x9b, 0x43, 0xa3, 0xf1, 0x0d, 0x82, 0x62, 0x7c, 0xee, 0xe2, 0x0f, 0xb2, + 0xce, 0x77, 0x1c, 0x5b, 0x1a, 0x4f, 0xf7, 0x25, 0x8a, 0xbb, 0xea, 0x44, 0x14, 0xeb, 0xa8, 0xb6, + 0x73, 0x8e, 0xa0, 0xdc, 0x66, 0x76, 0x62, 0xe6, 0x1d, 0x3c, 0x32, 0x91, 0x0f, 0xfd, 0x34, 0x87, + 0xe8, 0xf9, 0x56, 0xe8, 0x6b, 0xb1, 0xae, 0xe9, 0x58, 0x1a, 0xe3, 0x96, 0x6e, 0x11, 0x47, 0x82, + 0xd0, 0x03, 0x93, 0xe9, 0x52, 0x6f, 0xf4, 0x6f, 0xd2, 0xc6, 0xe0, 0xeb, 0x7c, 0x4e, 0x79, 0x12, + 0x04, 0xd8, 0xed, 0xb2, 0x5e, 0x27, 0x1a, 0x4d, 0x7e, 0xca, 0xa3, 0xc6, 0x9f, 0x91, 0xf1, 0x58, + 0x1a, 0x8f, 0x07, 0xc6, 0xe3, 0xa3, 0xc6, 0xc9, 0x82, 0x4c, 0xd2, 0xf8, 0x2f, 0x00, 0x00, 0xff, + 0xff, 0x79, 0x2b, 0x3b, 0x90, 0x4a, 0x0e, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/monitoring/v3/mutation_record.pb.go b/vendor/google.golang.org/genproto/googleapis/monitoring/v3/mutation_record.pb.go new file mode 100644 index 0000000..a8885a5 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/monitoring/v3/mutation_record.pb.go @@ -0,0 +1,97 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/monitoring/v3/mutation_record.proto + +package monitoring // import "google.golang.org/genproto/googleapis/monitoring/v3" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import timestamp "github.com/golang/protobuf/ptypes/timestamp" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Describes a change made to a configuration. +type MutationRecord struct { + // When the change occurred. + MutateTime *timestamp.Timestamp `protobuf:"bytes,1,opt,name=mutate_time,json=mutateTime,proto3" json:"mutate_time,omitempty"` + // The email address of the user making the change. + MutatedBy string `protobuf:"bytes,2,opt,name=mutated_by,json=mutatedBy,proto3" json:"mutated_by,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *MutationRecord) Reset() { *m = MutationRecord{} } +func (m *MutationRecord) String() string { return proto.CompactTextString(m) } +func (*MutationRecord) ProtoMessage() {} +func (*MutationRecord) Descriptor() ([]byte, []int) { + return fileDescriptor_mutation_record_909f578c988d9451, []int{0} +} +func (m *MutationRecord) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_MutationRecord.Unmarshal(m, b) +} +func (m *MutationRecord) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_MutationRecord.Marshal(b, m, deterministic) +} +func (dst *MutationRecord) XXX_Merge(src proto.Message) { + xxx_messageInfo_MutationRecord.Merge(dst, src) +} +func (m *MutationRecord) XXX_Size() int { + return xxx_messageInfo_MutationRecord.Size(m) +} +func (m *MutationRecord) XXX_DiscardUnknown() { + xxx_messageInfo_MutationRecord.DiscardUnknown(m) +} + +var xxx_messageInfo_MutationRecord proto.InternalMessageInfo + +func (m *MutationRecord) GetMutateTime() *timestamp.Timestamp { + if m != nil { + return m.MutateTime + } + return nil +} + +func (m *MutationRecord) GetMutatedBy() string { + if m != nil { + return m.MutatedBy + } + return "" +} + +func init() { + proto.RegisterType((*MutationRecord)(nil), "google.monitoring.v3.MutationRecord") +} + +func init() { + proto.RegisterFile("google/monitoring/v3/mutation_record.proto", fileDescriptor_mutation_record_909f578c988d9451) +} + +var fileDescriptor_mutation_record_909f578c988d9451 = []byte{ + // 251 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xe2, 0xd2, 0x4a, 0xcf, 0xcf, 0x4f, + 0xcf, 0x49, 0xd5, 0xcf, 0xcd, 0xcf, 0xcb, 0x2c, 0xc9, 0x2f, 0xca, 0xcc, 0x4b, 0xd7, 0x2f, 0x33, + 0xd6, 0xcf, 0x2d, 0x2d, 0x49, 0x2c, 0xc9, 0xcc, 0xcf, 0x8b, 0x2f, 0x4a, 0x4d, 0xce, 0x2f, 0x4a, + 0xd1, 0x2b, 0x28, 0xca, 0x2f, 0xc9, 0x17, 0x12, 0x81, 0xa8, 0xd5, 0x43, 0xa8, 0xd5, 0x2b, 0x33, + 0x96, 0x92, 0x87, 0x9a, 0x00, 0x56, 0x93, 0x54, 0x9a, 0xa6, 0x5f, 0x92, 0x99, 0x9b, 0x5a, 0x5c, + 0x92, 0x98, 0x5b, 0x00, 0xd1, 0xa6, 0x94, 0xc3, 0xc5, 0xe7, 0x0b, 0x35, 0x2f, 0x08, 0x6c, 0x9c, + 0x90, 0x35, 0x17, 0x37, 0xd8, 0x86, 0xd4, 0x78, 0x90, 0x5a, 0x09, 0x46, 0x05, 0x46, 0x0d, 0x6e, + 0x23, 0x29, 0x3d, 0xa8, 0xf1, 0x30, 0x83, 0xf4, 0x42, 0x60, 0x06, 0x05, 0x71, 0x41, 0x94, 0x83, + 0x04, 0x84, 0x64, 0xb9, 0xa0, 0xbc, 0x94, 0xf8, 0xa4, 0x4a, 0x09, 0x26, 0x05, 0x46, 0x0d, 0xce, + 0x20, 0x4e, 0xa8, 0x88, 0x53, 0xa5, 0xd3, 0x6a, 0x46, 0x2e, 0x89, 0xe4, 0xfc, 0x5c, 0x3d, 0x6c, + 0x6e, 0x75, 0x12, 0x46, 0x75, 0x48, 0x00, 0xc8, 0xa6, 0x00, 0xc6, 0x28, 0x3b, 0xa8, 0xe2, 0xf4, + 0xfc, 0x9c, 0xc4, 0xbc, 0x74, 0xbd, 0xfc, 0xa2, 0x74, 0xfd, 0xf4, 0xd4, 0x3c, 0xb0, 0x3b, 0xf4, + 0x21, 0x52, 0x89, 0x05, 0x99, 0xc5, 0xa8, 0x61, 0x64, 0x8d, 0xe0, 0xad, 0x62, 0x92, 0x72, 0x87, + 0x18, 0xe0, 0x9c, 0x93, 0x5f, 0x9a, 0xa2, 0xe7, 0x8b, 0xb0, 0x33, 0xcc, 0xf8, 0x14, 0x4c, 0x32, + 0x06, 0x2c, 0x19, 0x83, 0x90, 0x8c, 0x09, 0x33, 0x4e, 0x62, 0x03, 0x5b, 0x62, 0x0c, 0x08, 0x00, + 0x00, 0xff, 0xff, 0x95, 0xa7, 0xf3, 0xbd, 0x87, 0x01, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/monitoring/v3/notification.pb.go b/vendor/google.golang.org/genproto/googleapis/monitoring/v3/notification.pb.go new file mode 100644 index 0000000..e9460c4 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/monitoring/v3/notification.pb.go @@ -0,0 +1,370 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/monitoring/v3/notification.proto + +package monitoring // import "google.golang.org/genproto/googleapis/monitoring/v3" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import wrappers "github.com/golang/protobuf/ptypes/wrappers" +import _ "google.golang.org/genproto/googleapis/api/annotations" +import label "google.golang.org/genproto/googleapis/api/label" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Indicates whether the channel has been verified or not. It is illegal +// to specify this field in a +// [`CreateNotificationChannel`][google.monitoring.v3.NotificationChannelService.CreateNotificationChannel] +// or an +// [`UpdateNotificationChannel`][google.monitoring.v3.NotificationChannelService.UpdateNotificationChannel] +// operation. +type NotificationChannel_VerificationStatus int32 + +const ( + // Sentinel value used to indicate that the state is unknown, omitted, or + // is not applicable (as in the case of channels that neither support + // nor require verification in order to function). + NotificationChannel_VERIFICATION_STATUS_UNSPECIFIED NotificationChannel_VerificationStatus = 0 + // The channel has yet to be verified and requires verification to function. + // Note that this state also applies to the case where the verification + // process has been initiated by sending a verification code but where + // the verification code has not been submitted to complete the process. + NotificationChannel_UNVERIFIED NotificationChannel_VerificationStatus = 1 + // It has been proven that notifications can be received on this + // notification channel and that someone on the project has access + // to messages that are delivered to that channel. + NotificationChannel_VERIFIED NotificationChannel_VerificationStatus = 2 +) + +var NotificationChannel_VerificationStatus_name = map[int32]string{ + 0: "VERIFICATION_STATUS_UNSPECIFIED", + 1: "UNVERIFIED", + 2: "VERIFIED", +} +var NotificationChannel_VerificationStatus_value = map[string]int32{ + "VERIFICATION_STATUS_UNSPECIFIED": 0, + "UNVERIFIED": 1, + "VERIFIED": 2, +} + +func (x NotificationChannel_VerificationStatus) String() string { + return proto.EnumName(NotificationChannel_VerificationStatus_name, int32(x)) +} +func (NotificationChannel_VerificationStatus) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_notification_bcd7cd313cd7d38f, []int{1, 0} +} + +// A description of a notification channel. The descriptor includes +// the properties of the channel and the set of labels or fields that +// must be specified to configure channels of a given type. +type NotificationChannelDescriptor struct { + // The full REST resource name for this descriptor. The syntax is: + // + // projects/[PROJECT_ID]/notificationChannelDescriptors/[TYPE] + // + // In the above, `[TYPE]` is the value of the `type` field. + Name string `protobuf:"bytes,6,opt,name=name,proto3" json:"name,omitempty"` + // The type of notification channel, such as "email", "sms", etc. + // Notification channel types are globally unique. + Type string `protobuf:"bytes,1,opt,name=type,proto3" json:"type,omitempty"` + // A human-readable name for the notification channel type. This + // form of the name is suitable for a user interface. + DisplayName string `protobuf:"bytes,2,opt,name=display_name,json=displayName,proto3" json:"display_name,omitempty"` + // A human-readable description of the notification channel + // type. The description may include a description of the properties + // of the channel and pointers to external documentation. + Description string `protobuf:"bytes,3,opt,name=description,proto3" json:"description,omitempty"` + // The set of labels that must be defined to identify a particular + // channel of the corresponding type. Each label includes a + // description for how that field should be populated. + Labels []*label.LabelDescriptor `protobuf:"bytes,4,rep,name=labels,proto3" json:"labels,omitempty"` + // The tiers that support this notification channel; the project service tier + // must be one of the supported_tiers. + SupportedTiers []ServiceTier `protobuf:"varint,5,rep,packed,name=supported_tiers,json=supportedTiers,proto3,enum=google.monitoring.v3.ServiceTier" json:"supported_tiers,omitempty"` // Deprecated: Do not use. + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *NotificationChannelDescriptor) Reset() { *m = NotificationChannelDescriptor{} } +func (m *NotificationChannelDescriptor) String() string { return proto.CompactTextString(m) } +func (*NotificationChannelDescriptor) ProtoMessage() {} +func (*NotificationChannelDescriptor) Descriptor() ([]byte, []int) { + return fileDescriptor_notification_bcd7cd313cd7d38f, []int{0} +} +func (m *NotificationChannelDescriptor) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_NotificationChannelDescriptor.Unmarshal(m, b) +} +func (m *NotificationChannelDescriptor) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_NotificationChannelDescriptor.Marshal(b, m, deterministic) +} +func (dst *NotificationChannelDescriptor) XXX_Merge(src proto.Message) { + xxx_messageInfo_NotificationChannelDescriptor.Merge(dst, src) +} +func (m *NotificationChannelDescriptor) XXX_Size() int { + return xxx_messageInfo_NotificationChannelDescriptor.Size(m) +} +func (m *NotificationChannelDescriptor) XXX_DiscardUnknown() { + xxx_messageInfo_NotificationChannelDescriptor.DiscardUnknown(m) +} + +var xxx_messageInfo_NotificationChannelDescriptor proto.InternalMessageInfo + +func (m *NotificationChannelDescriptor) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *NotificationChannelDescriptor) GetType() string { + if m != nil { + return m.Type + } + return "" +} + +func (m *NotificationChannelDescriptor) GetDisplayName() string { + if m != nil { + return m.DisplayName + } + return "" +} + +func (m *NotificationChannelDescriptor) GetDescription() string { + if m != nil { + return m.Description + } + return "" +} + +func (m *NotificationChannelDescriptor) GetLabels() []*label.LabelDescriptor { + if m != nil { + return m.Labels + } + return nil +} + +// Deprecated: Do not use. +func (m *NotificationChannelDescriptor) GetSupportedTiers() []ServiceTier { + if m != nil { + return m.SupportedTiers + } + return nil +} + +// A `NotificationChannel` is a medium through which an alert is +// delivered when a policy violation is detected. Examples of channels +// include email, SMS, and third-party messaging applications. Fields +// containing sensitive information like authentication tokens or +// contact info are only partially populated on retrieval. +type NotificationChannel struct { + // The type of the notification channel. This field matches the + // value of the + // [NotificationChannelDescriptor.type][google.monitoring.v3.NotificationChannelDescriptor.type] + // field. + Type string `protobuf:"bytes,1,opt,name=type,proto3" json:"type,omitempty"` + // The full REST resource name for this channel. The syntax is: + // + // projects/[PROJECT_ID]/notificationChannels/[CHANNEL_ID] + // + // The `[CHANNEL_ID]` is automatically assigned by the server on creation. + Name string `protobuf:"bytes,6,opt,name=name,proto3" json:"name,omitempty"` + // An optional human-readable name for this notification channel. It is + // recommended that you specify a non-empty and unique name in order to + // make it easier to identify the channels in your project, though this is + // not enforced. The display name is limited to 512 Unicode characters. + DisplayName string `protobuf:"bytes,3,opt,name=display_name,json=displayName,proto3" json:"display_name,omitempty"` + // An optional human-readable description of this notification channel. This + // description may provide additional details, beyond the display + // name, for the channel. This may not exceeed 1024 Unicode characters. + Description string `protobuf:"bytes,4,opt,name=description,proto3" json:"description,omitempty"` + // Configuration fields that define the channel and its behavior. The + // permissible and required labels are specified in the + // [NotificationChannelDescriptor.labels][google.monitoring.v3.NotificationChannelDescriptor.labels] + // of the `NotificationChannelDescriptor` corresponding to the `type` field. + Labels map[string]string `protobuf:"bytes,5,rep,name=labels,proto3" json:"labels,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` + // User-supplied key/value data that does not need to conform to + // the corresponding `NotificationChannelDescriptor`'s schema, unlike + // the `labels` field. This field is intended to be used for organizing + // and identifying the `NotificationChannel` objects. + // + // The field can contain up to 64 entries. Each key and value is limited to + // 63 Unicode characters or 128 bytes, whichever is smaller. Labels and + // values can contain only lowercase letters, numerals, underscores, and + // dashes. Keys must begin with a letter. + UserLabels map[string]string `protobuf:"bytes,8,rep,name=user_labels,json=userLabels,proto3" json:"user_labels,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` + // Indicates whether this channel has been verified or not. On a + // [`ListNotificationChannels`][google.monitoring.v3.NotificationChannelService.ListNotificationChannels] + // or + // [`GetNotificationChannel`][google.monitoring.v3.NotificationChannelService.GetNotificationChannel] + // operation, this field is expected to be populated. + // + // If the value is `UNVERIFIED`, then it indicates that the channel is + // non-functioning (it both requires verification and lacks verification); + // otherwise, it is assumed that the channel works. + // + // If the channel is neither `VERIFIED` nor `UNVERIFIED`, it implies that + // the channel is of a type that does not require verification or that + // this specific channel has been exempted from verification because it was + // created prior to verification being required for channels of this type. + // + // This field cannot be modified using a standard + // [`UpdateNotificationChannel`][google.monitoring.v3.NotificationChannelService.UpdateNotificationChannel] + // operation. To change the value of this field, you must call + // [`VerifyNotificationChannel`][google.monitoring.v3.NotificationChannelService.VerifyNotificationChannel]. + VerificationStatus NotificationChannel_VerificationStatus `protobuf:"varint,9,opt,name=verification_status,json=verificationStatus,proto3,enum=google.monitoring.v3.NotificationChannel_VerificationStatus" json:"verification_status,omitempty"` + // Whether notifications are forwarded to the described channel. This makes + // it possible to disable delivery of notifications to a particular channel + // without removing the channel from all alerting policies that reference + // the channel. This is a more convenient approach when the change is + // temporary and you want to receive notifications from the same set + // of alerting policies on the channel at some point in the future. + Enabled *wrappers.BoolValue `protobuf:"bytes,11,opt,name=enabled,proto3" json:"enabled,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *NotificationChannel) Reset() { *m = NotificationChannel{} } +func (m *NotificationChannel) String() string { return proto.CompactTextString(m) } +func (*NotificationChannel) ProtoMessage() {} +func (*NotificationChannel) Descriptor() ([]byte, []int) { + return fileDescriptor_notification_bcd7cd313cd7d38f, []int{1} +} +func (m *NotificationChannel) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_NotificationChannel.Unmarshal(m, b) +} +func (m *NotificationChannel) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_NotificationChannel.Marshal(b, m, deterministic) +} +func (dst *NotificationChannel) XXX_Merge(src proto.Message) { + xxx_messageInfo_NotificationChannel.Merge(dst, src) +} +func (m *NotificationChannel) XXX_Size() int { + return xxx_messageInfo_NotificationChannel.Size(m) +} +func (m *NotificationChannel) XXX_DiscardUnknown() { + xxx_messageInfo_NotificationChannel.DiscardUnknown(m) +} + +var xxx_messageInfo_NotificationChannel proto.InternalMessageInfo + +func (m *NotificationChannel) GetType() string { + if m != nil { + return m.Type + } + return "" +} + +func (m *NotificationChannel) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *NotificationChannel) GetDisplayName() string { + if m != nil { + return m.DisplayName + } + return "" +} + +func (m *NotificationChannel) GetDescription() string { + if m != nil { + return m.Description + } + return "" +} + +func (m *NotificationChannel) GetLabels() map[string]string { + if m != nil { + return m.Labels + } + return nil +} + +func (m *NotificationChannel) GetUserLabels() map[string]string { + if m != nil { + return m.UserLabels + } + return nil +} + +func (m *NotificationChannel) GetVerificationStatus() NotificationChannel_VerificationStatus { + if m != nil { + return m.VerificationStatus + } + return NotificationChannel_VERIFICATION_STATUS_UNSPECIFIED +} + +func (m *NotificationChannel) GetEnabled() *wrappers.BoolValue { + if m != nil { + return m.Enabled + } + return nil +} + +func init() { + proto.RegisterType((*NotificationChannelDescriptor)(nil), "google.monitoring.v3.NotificationChannelDescriptor") + proto.RegisterType((*NotificationChannel)(nil), "google.monitoring.v3.NotificationChannel") + proto.RegisterMapType((map[string]string)(nil), "google.monitoring.v3.NotificationChannel.LabelsEntry") + proto.RegisterMapType((map[string]string)(nil), "google.monitoring.v3.NotificationChannel.UserLabelsEntry") + proto.RegisterEnum("google.monitoring.v3.NotificationChannel_VerificationStatus", NotificationChannel_VerificationStatus_name, NotificationChannel_VerificationStatus_value) +} + +func init() { + proto.RegisterFile("google/monitoring/v3/notification.proto", fileDescriptor_notification_bcd7cd313cd7d38f) +} + +var fileDescriptor_notification_bcd7cd313cd7d38f = []byte{ + // 602 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x94, 0x54, 0x6d, 0x6b, 0xdb, 0x3c, + 0x14, 0x7d, 0x9c, 0x34, 0x7d, 0x5a, 0xb9, 0xa4, 0x9d, 0x5a, 0x86, 0xf1, 0xde, 0xd2, 0xee, 0xc3, + 0xf2, 0xc9, 0x86, 0x64, 0x83, 0x75, 0x6f, 0xd0, 0xa4, 0xe9, 0x08, 0xac, 0x59, 0xc9, 0xdb, 0xa0, + 0x14, 0x82, 0x92, 0xa8, 0x9e, 0x98, 0x2d, 0x19, 0x49, 0xf6, 0xc8, 0xcf, 0xd8, 0x8f, 0xd8, 0x87, + 0xed, 0xa7, 0xec, 0x57, 0x0d, 0xcb, 0x8a, 0xed, 0xb5, 0x86, 0x75, 0xdf, 0x74, 0xcf, 0x3d, 0xe7, + 0xdc, 0x7b, 0x4f, 0x4c, 0xc0, 0x33, 0x8f, 0x31, 0xcf, 0xc7, 0x6e, 0xc0, 0x28, 0x91, 0x8c, 0x13, + 0xea, 0xb9, 0x71, 0xdb, 0xa5, 0x4c, 0x92, 0x6b, 0xb2, 0x40, 0x92, 0x30, 0xea, 0x84, 0x9c, 0x49, + 0x06, 0x0f, 0x52, 0xa2, 0x93, 0x13, 0x9d, 0xb8, 0x6d, 0x3f, 0xd4, 0x72, 0x14, 0x12, 0x17, 0x51, + 0xca, 0xa4, 0x92, 0x88, 0x54, 0x63, 0xdf, 0x2f, 0x74, 0x7d, 0x34, 0xc7, 0xbe, 0xc6, 0x0f, 0x4b, + 0x87, 0x2e, 0x58, 0x10, 0xac, 0xc7, 0xd9, 0x8f, 0x35, 0x45, 0x55, 0xf3, 0xe8, 0xda, 0xfd, 0xca, + 0x51, 0x18, 0x62, 0xae, 0xad, 0x8f, 0xbe, 0x55, 0xc0, 0xa3, 0x41, 0x61, 0xcb, 0xee, 0x67, 0x44, + 0x29, 0xf6, 0x4f, 0xb1, 0x58, 0x70, 0x12, 0x4a, 0xc6, 0x21, 0x04, 0x1b, 0x14, 0x05, 0xd8, 0xda, + 0x6c, 0x18, 0xcd, 0xed, 0xa1, 0x7a, 0x27, 0x98, 0x5c, 0x85, 0xd8, 0x32, 0x52, 0x2c, 0x79, 0xc3, + 0x43, 0xb0, 0xb3, 0x24, 0x22, 0xf4, 0xd1, 0x6a, 0xa6, 0xf8, 0x15, 0xd5, 0x33, 0x35, 0x36, 0x48, + 0x64, 0x0d, 0x60, 0x2e, 0xb5, 0x31, 0x61, 0xd4, 0xaa, 0x6a, 0x46, 0x0e, 0xc1, 0x36, 0xd8, 0x54, + 0x07, 0x0a, 0x6b, 0xa3, 0x51, 0x6d, 0x9a, 0xad, 0x07, 0x8e, 0x8e, 0x0b, 0x85, 0xc4, 0xf9, 0x90, + 0x74, 0xf2, 0xcd, 0x86, 0x9a, 0x0a, 0x07, 0x60, 0x57, 0x44, 0x61, 0xc8, 0xb8, 0xc4, 0xcb, 0x99, + 0x24, 0x98, 0x0b, 0xab, 0xd6, 0xa8, 0x36, 0xeb, 0xad, 0x43, 0xa7, 0x2c, 0x6c, 0x67, 0x84, 0x79, + 0x4c, 0x16, 0x78, 0x4c, 0x30, 0xef, 0x54, 0x2c, 0x63, 0x58, 0xcf, 0xd4, 0x09, 0x24, 0x8e, 0xbe, + 0xd7, 0xc0, 0x7e, 0x49, 0x26, 0xa5, 0x57, 0x97, 0xa5, 0x73, 0x33, 0x89, 0xea, 0x5f, 0x93, 0xd8, + 0xb8, 0x9d, 0xc4, 0x79, 0x96, 0x44, 0x4d, 0x25, 0xf1, 0xa2, 0xfc, 0x96, 0x92, 0x3d, 0xd3, 0x9c, + 0x44, 0x8f, 0x4a, 0xbe, 0xca, 0x32, 0xba, 0x04, 0x66, 0x24, 0x30, 0x9f, 0x69, 0xcf, 0x2d, 0xe5, + 0x79, 0x7c, 0x77, 0xcf, 0x89, 0xc0, 0xbc, 0xe8, 0x0b, 0xa2, 0x0c, 0x80, 0x01, 0xd8, 0x8f, 0x31, + 0xcf, 0x24, 0x33, 0x21, 0x91, 0x8c, 0x84, 0xb5, 0xdd, 0x30, 0x9a, 0xf5, 0xd6, 0x9b, 0xbb, 0xcf, + 0x98, 0x16, 0x4c, 0x46, 0xca, 0x63, 0x08, 0xe3, 0x5b, 0x18, 0x7c, 0x0e, 0xfe, 0xc7, 0x14, 0xcd, + 0x7d, 0xbc, 0xb4, 0xcc, 0x86, 0xd1, 0x34, 0x5b, 0xf6, 0x7a, 0xc4, 0xfa, 0x23, 0x77, 0x3a, 0x8c, + 0xf9, 0x53, 0xe4, 0x47, 0x78, 0xb8, 0xa6, 0xda, 0xc7, 0xc0, 0x2c, 0xec, 0x0f, 0xf7, 0x40, 0xf5, + 0x0b, 0x5e, 0xe9, 0x9f, 0x32, 0x79, 0xc2, 0x03, 0x50, 0x8b, 0x13, 0x89, 0xfe, 0x70, 0xd3, 0xe2, + 0x55, 0xe5, 0xa5, 0x61, 0xbf, 0x05, 0xbb, 0x37, 0xce, 0xff, 0x17, 0xf9, 0xd1, 0x27, 0x00, 0x6f, + 0x5f, 0x06, 0x9f, 0x82, 0x27, 0xd3, 0xde, 0xb0, 0x7f, 0xd6, 0xef, 0x9e, 0x8c, 0xfb, 0x1f, 0x07, + 0xb3, 0xd1, 0xf8, 0x64, 0x3c, 0x19, 0xcd, 0x26, 0x83, 0xd1, 0x45, 0xaf, 0xdb, 0x3f, 0xeb, 0xf7, + 0x4e, 0xf7, 0xfe, 0x83, 0x75, 0x00, 0x26, 0x83, 0x94, 0xd6, 0x3b, 0xdd, 0x33, 0xe0, 0x0e, 0xd8, + 0xca, 0xaa, 0x4a, 0xe7, 0x87, 0x01, 0xac, 0x05, 0x0b, 0x4a, 0x03, 0xee, 0xdc, 0x2b, 0x26, 0x7c, + 0x91, 0x04, 0x73, 0x61, 0x5c, 0xbe, 0xd3, 0x54, 0x8f, 0xf9, 0x88, 0x7a, 0x0e, 0xe3, 0x9e, 0xeb, + 0x61, 0xaa, 0x62, 0x73, 0xd3, 0x16, 0x0a, 0x89, 0xf8, 0xf3, 0xff, 0xe4, 0x75, 0x5e, 0xfd, 0xac, + 0xd8, 0xef, 0x53, 0x83, 0xae, 0xcf, 0xa2, 0xa5, 0x73, 0x9e, 0x4f, 0x9c, 0xb6, 0x7f, 0xad, 0x9b, + 0x57, 0xaa, 0x79, 0x95, 0x37, 0xaf, 0xa6, 0xed, 0xf9, 0xa6, 0x1a, 0xd2, 0xfe, 0x1d, 0x00, 0x00, + 0xff, 0xff, 0xf7, 0x1b, 0x09, 0x21, 0x28, 0x05, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/monitoring/v3/notification_service.pb.go b/vendor/google.golang.org/genproto/googleapis/monitoring/v3/notification_service.pb.go new file mode 100644 index 0000000..58b8b97 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/monitoring/v3/notification_service.pb.go @@ -0,0 +1,1309 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/monitoring/v3/notification_service.proto + +package monitoring // import "google.golang.org/genproto/googleapis/monitoring/v3" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import empty "github.com/golang/protobuf/ptypes/empty" +import _ "github.com/golang/protobuf/ptypes/struct" +import timestamp "github.com/golang/protobuf/ptypes/timestamp" +import _ "google.golang.org/genproto/googleapis/api/annotations" +import field_mask "google.golang.org/genproto/protobuf/field_mask" + +import ( + context "golang.org/x/net/context" + grpc "google.golang.org/grpc" +) + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// The `ListNotificationChannelDescriptors` request. +type ListNotificationChannelDescriptorsRequest struct { + // The REST resource name of the parent from which to retrieve + // the notification channel descriptors. The expected syntax is: + // + // projects/[PROJECT_ID] + // + // Note that this names the parent container in which to look for the + // descriptors; to retrieve a single descriptor by name, use the + // [GetNotificationChannelDescriptor][google.monitoring.v3.NotificationChannelService.GetNotificationChannelDescriptor] + // operation, instead. + Name string `protobuf:"bytes,4,opt,name=name,proto3" json:"name,omitempty"` + // The maximum number of results to return in a single response. If + // not set to a positive number, a reasonable value will be chosen by the + // service. + PageSize int32 `protobuf:"varint,2,opt,name=page_size,json=pageSize,proto3" json:"page_size,omitempty"` + // If non-empty, `page_token` must contain a value returned as the + // `next_page_token` in a previous response to request the next set + // of results. + PageToken string `protobuf:"bytes,3,opt,name=page_token,json=pageToken,proto3" json:"page_token,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ListNotificationChannelDescriptorsRequest) Reset() { + *m = ListNotificationChannelDescriptorsRequest{} +} +func (m *ListNotificationChannelDescriptorsRequest) String() string { return proto.CompactTextString(m) } +func (*ListNotificationChannelDescriptorsRequest) ProtoMessage() {} +func (*ListNotificationChannelDescriptorsRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_notification_service_82acdfb78337ea2a, []int{0} +} +func (m *ListNotificationChannelDescriptorsRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ListNotificationChannelDescriptorsRequest.Unmarshal(m, b) +} +func (m *ListNotificationChannelDescriptorsRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ListNotificationChannelDescriptorsRequest.Marshal(b, m, deterministic) +} +func (dst *ListNotificationChannelDescriptorsRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_ListNotificationChannelDescriptorsRequest.Merge(dst, src) +} +func (m *ListNotificationChannelDescriptorsRequest) XXX_Size() int { + return xxx_messageInfo_ListNotificationChannelDescriptorsRequest.Size(m) +} +func (m *ListNotificationChannelDescriptorsRequest) XXX_DiscardUnknown() { + xxx_messageInfo_ListNotificationChannelDescriptorsRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_ListNotificationChannelDescriptorsRequest proto.InternalMessageInfo + +func (m *ListNotificationChannelDescriptorsRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *ListNotificationChannelDescriptorsRequest) GetPageSize() int32 { + if m != nil { + return m.PageSize + } + return 0 +} + +func (m *ListNotificationChannelDescriptorsRequest) GetPageToken() string { + if m != nil { + return m.PageToken + } + return "" +} + +// The `ListNotificationChannelDescriptors` response. +type ListNotificationChannelDescriptorsResponse struct { + // The monitored resource descriptors supported for the specified + // project, optionally filtered. + ChannelDescriptors []*NotificationChannelDescriptor `protobuf:"bytes,1,rep,name=channel_descriptors,json=channelDescriptors,proto3" json:"channel_descriptors,omitempty"` + // If not empty, indicates that there may be more results that match + // the request. Use the value in the `page_token` field in a + // subsequent request to fetch the next set of results. If empty, + // all results have been returned. + NextPageToken string `protobuf:"bytes,2,opt,name=next_page_token,json=nextPageToken,proto3" json:"next_page_token,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ListNotificationChannelDescriptorsResponse) Reset() { + *m = ListNotificationChannelDescriptorsResponse{} +} +func (m *ListNotificationChannelDescriptorsResponse) String() string { + return proto.CompactTextString(m) +} +func (*ListNotificationChannelDescriptorsResponse) ProtoMessage() {} +func (*ListNotificationChannelDescriptorsResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_notification_service_82acdfb78337ea2a, []int{1} +} +func (m *ListNotificationChannelDescriptorsResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ListNotificationChannelDescriptorsResponse.Unmarshal(m, b) +} +func (m *ListNotificationChannelDescriptorsResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ListNotificationChannelDescriptorsResponse.Marshal(b, m, deterministic) +} +func (dst *ListNotificationChannelDescriptorsResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_ListNotificationChannelDescriptorsResponse.Merge(dst, src) +} +func (m *ListNotificationChannelDescriptorsResponse) XXX_Size() int { + return xxx_messageInfo_ListNotificationChannelDescriptorsResponse.Size(m) +} +func (m *ListNotificationChannelDescriptorsResponse) XXX_DiscardUnknown() { + xxx_messageInfo_ListNotificationChannelDescriptorsResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_ListNotificationChannelDescriptorsResponse proto.InternalMessageInfo + +func (m *ListNotificationChannelDescriptorsResponse) GetChannelDescriptors() []*NotificationChannelDescriptor { + if m != nil { + return m.ChannelDescriptors + } + return nil +} + +func (m *ListNotificationChannelDescriptorsResponse) GetNextPageToken() string { + if m != nil { + return m.NextPageToken + } + return "" +} + +// The `GetNotificationChannelDescriptor` response. +type GetNotificationChannelDescriptorRequest struct { + // The channel type for which to execute the request. The format is + // `projects/[PROJECT_ID]/notificationChannelDescriptors/{channel_type}`. + Name string `protobuf:"bytes,3,opt,name=name,proto3" json:"name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GetNotificationChannelDescriptorRequest) Reset() { + *m = GetNotificationChannelDescriptorRequest{} +} +func (m *GetNotificationChannelDescriptorRequest) String() string { return proto.CompactTextString(m) } +func (*GetNotificationChannelDescriptorRequest) ProtoMessage() {} +func (*GetNotificationChannelDescriptorRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_notification_service_82acdfb78337ea2a, []int{2} +} +func (m *GetNotificationChannelDescriptorRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GetNotificationChannelDescriptorRequest.Unmarshal(m, b) +} +func (m *GetNotificationChannelDescriptorRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GetNotificationChannelDescriptorRequest.Marshal(b, m, deterministic) +} +func (dst *GetNotificationChannelDescriptorRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_GetNotificationChannelDescriptorRequest.Merge(dst, src) +} +func (m *GetNotificationChannelDescriptorRequest) XXX_Size() int { + return xxx_messageInfo_GetNotificationChannelDescriptorRequest.Size(m) +} +func (m *GetNotificationChannelDescriptorRequest) XXX_DiscardUnknown() { + xxx_messageInfo_GetNotificationChannelDescriptorRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_GetNotificationChannelDescriptorRequest proto.InternalMessageInfo + +func (m *GetNotificationChannelDescriptorRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +// The `CreateNotificationChannel` request. +type CreateNotificationChannelRequest struct { + // The project on which to execute the request. The format is: + // + // projects/[PROJECT_ID] + // + // Note that this names the container into which the channel will be + // written. This does not name the newly created channel. The resulting + // channel's name will have a normalized version of this field as a prefix, + // but will add `/notificationChannels/[CHANNEL_ID]` to identify the channel. + Name string `protobuf:"bytes,3,opt,name=name,proto3" json:"name,omitempty"` + // The definition of the `NotificationChannel` to create. + NotificationChannel *NotificationChannel `protobuf:"bytes,2,opt,name=notification_channel,json=notificationChannel,proto3" json:"notification_channel,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *CreateNotificationChannelRequest) Reset() { *m = CreateNotificationChannelRequest{} } +func (m *CreateNotificationChannelRequest) String() string { return proto.CompactTextString(m) } +func (*CreateNotificationChannelRequest) ProtoMessage() {} +func (*CreateNotificationChannelRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_notification_service_82acdfb78337ea2a, []int{3} +} +func (m *CreateNotificationChannelRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_CreateNotificationChannelRequest.Unmarshal(m, b) +} +func (m *CreateNotificationChannelRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_CreateNotificationChannelRequest.Marshal(b, m, deterministic) +} +func (dst *CreateNotificationChannelRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_CreateNotificationChannelRequest.Merge(dst, src) +} +func (m *CreateNotificationChannelRequest) XXX_Size() int { + return xxx_messageInfo_CreateNotificationChannelRequest.Size(m) +} +func (m *CreateNotificationChannelRequest) XXX_DiscardUnknown() { + xxx_messageInfo_CreateNotificationChannelRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_CreateNotificationChannelRequest proto.InternalMessageInfo + +func (m *CreateNotificationChannelRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *CreateNotificationChannelRequest) GetNotificationChannel() *NotificationChannel { + if m != nil { + return m.NotificationChannel + } + return nil +} + +// The `ListNotificationChannels` request. +type ListNotificationChannelsRequest struct { + // The project on which to execute the request. The format is + // `projects/[PROJECT_ID]`. That is, this names the container + // in which to look for the notification channels; it does not name a + // specific channel. To query a specific channel by REST resource name, use + // the + // [`GetNotificationChannel`][google.monitoring.v3.NotificationChannelService.GetNotificationChannel] + // operation. + Name string `protobuf:"bytes,5,opt,name=name,proto3" json:"name,omitempty"` + // If provided, this field specifies the criteria that must be met by + // notification channels to be included in the response. + // + // For more details, see [sorting and + // filtering](/monitoring/api/v3/sorting-and-filtering). + Filter string `protobuf:"bytes,6,opt,name=filter,proto3" json:"filter,omitempty"` + // A comma-separated list of fields by which to sort the result. Supports + // the same set of fields as in `filter`. Entries can be prefixed with + // a minus sign to sort in descending rather than ascending order. + // + // For more details, see [sorting and + // filtering](/monitoring/api/v3/sorting-and-filtering). + OrderBy string `protobuf:"bytes,7,opt,name=order_by,json=orderBy,proto3" json:"order_by,omitempty"` + // The maximum number of results to return in a single response. If + // not set to a positive number, a reasonable value will be chosen by the + // service. + PageSize int32 `protobuf:"varint,3,opt,name=page_size,json=pageSize,proto3" json:"page_size,omitempty"` + // If non-empty, `page_token` must contain a value returned as the + // `next_page_token` in a previous response to request the next set + // of results. + PageToken string `protobuf:"bytes,4,opt,name=page_token,json=pageToken,proto3" json:"page_token,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ListNotificationChannelsRequest) Reset() { *m = ListNotificationChannelsRequest{} } +func (m *ListNotificationChannelsRequest) String() string { return proto.CompactTextString(m) } +func (*ListNotificationChannelsRequest) ProtoMessage() {} +func (*ListNotificationChannelsRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_notification_service_82acdfb78337ea2a, []int{4} +} +func (m *ListNotificationChannelsRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ListNotificationChannelsRequest.Unmarshal(m, b) +} +func (m *ListNotificationChannelsRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ListNotificationChannelsRequest.Marshal(b, m, deterministic) +} +func (dst *ListNotificationChannelsRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_ListNotificationChannelsRequest.Merge(dst, src) +} +func (m *ListNotificationChannelsRequest) XXX_Size() int { + return xxx_messageInfo_ListNotificationChannelsRequest.Size(m) +} +func (m *ListNotificationChannelsRequest) XXX_DiscardUnknown() { + xxx_messageInfo_ListNotificationChannelsRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_ListNotificationChannelsRequest proto.InternalMessageInfo + +func (m *ListNotificationChannelsRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *ListNotificationChannelsRequest) GetFilter() string { + if m != nil { + return m.Filter + } + return "" +} + +func (m *ListNotificationChannelsRequest) GetOrderBy() string { + if m != nil { + return m.OrderBy + } + return "" +} + +func (m *ListNotificationChannelsRequest) GetPageSize() int32 { + if m != nil { + return m.PageSize + } + return 0 +} + +func (m *ListNotificationChannelsRequest) GetPageToken() string { + if m != nil { + return m.PageToken + } + return "" +} + +// The `ListNotificationChannels` response. +type ListNotificationChannelsResponse struct { + // The notification channels defined for the specified project. + NotificationChannels []*NotificationChannel `protobuf:"bytes,3,rep,name=notification_channels,json=notificationChannels,proto3" json:"notification_channels,omitempty"` + // If not empty, indicates that there may be more results that match + // the request. Use the value in the `page_token` field in a + // subsequent request to fetch the next set of results. If empty, + // all results have been returned. + NextPageToken string `protobuf:"bytes,2,opt,name=next_page_token,json=nextPageToken,proto3" json:"next_page_token,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ListNotificationChannelsResponse) Reset() { *m = ListNotificationChannelsResponse{} } +func (m *ListNotificationChannelsResponse) String() string { return proto.CompactTextString(m) } +func (*ListNotificationChannelsResponse) ProtoMessage() {} +func (*ListNotificationChannelsResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_notification_service_82acdfb78337ea2a, []int{5} +} +func (m *ListNotificationChannelsResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ListNotificationChannelsResponse.Unmarshal(m, b) +} +func (m *ListNotificationChannelsResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ListNotificationChannelsResponse.Marshal(b, m, deterministic) +} +func (dst *ListNotificationChannelsResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_ListNotificationChannelsResponse.Merge(dst, src) +} +func (m *ListNotificationChannelsResponse) XXX_Size() int { + return xxx_messageInfo_ListNotificationChannelsResponse.Size(m) +} +func (m *ListNotificationChannelsResponse) XXX_DiscardUnknown() { + xxx_messageInfo_ListNotificationChannelsResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_ListNotificationChannelsResponse proto.InternalMessageInfo + +func (m *ListNotificationChannelsResponse) GetNotificationChannels() []*NotificationChannel { + if m != nil { + return m.NotificationChannels + } + return nil +} + +func (m *ListNotificationChannelsResponse) GetNextPageToken() string { + if m != nil { + return m.NextPageToken + } + return "" +} + +// The `GetNotificationChannel` request. +type GetNotificationChannelRequest struct { + // The channel for which to execute the request. The format is + // `projects/[PROJECT_ID]/notificationChannels/[CHANNEL_ID]`. + Name string `protobuf:"bytes,3,opt,name=name,proto3" json:"name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GetNotificationChannelRequest) Reset() { *m = GetNotificationChannelRequest{} } +func (m *GetNotificationChannelRequest) String() string { return proto.CompactTextString(m) } +func (*GetNotificationChannelRequest) ProtoMessage() {} +func (*GetNotificationChannelRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_notification_service_82acdfb78337ea2a, []int{6} +} +func (m *GetNotificationChannelRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GetNotificationChannelRequest.Unmarshal(m, b) +} +func (m *GetNotificationChannelRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GetNotificationChannelRequest.Marshal(b, m, deterministic) +} +func (dst *GetNotificationChannelRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_GetNotificationChannelRequest.Merge(dst, src) +} +func (m *GetNotificationChannelRequest) XXX_Size() int { + return xxx_messageInfo_GetNotificationChannelRequest.Size(m) +} +func (m *GetNotificationChannelRequest) XXX_DiscardUnknown() { + xxx_messageInfo_GetNotificationChannelRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_GetNotificationChannelRequest proto.InternalMessageInfo + +func (m *GetNotificationChannelRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +// The `UpdateNotificationChannel` request. +type UpdateNotificationChannelRequest struct { + // The fields to update. + UpdateMask *field_mask.FieldMask `protobuf:"bytes,2,opt,name=update_mask,json=updateMask,proto3" json:"update_mask,omitempty"` + // A description of the changes to be applied to the specified + // notification channel. The description must provide a definition for + // fields to be updated; the names of these fields should also be + // included in the `update_mask`. + NotificationChannel *NotificationChannel `protobuf:"bytes,3,opt,name=notification_channel,json=notificationChannel,proto3" json:"notification_channel,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *UpdateNotificationChannelRequest) Reset() { *m = UpdateNotificationChannelRequest{} } +func (m *UpdateNotificationChannelRequest) String() string { return proto.CompactTextString(m) } +func (*UpdateNotificationChannelRequest) ProtoMessage() {} +func (*UpdateNotificationChannelRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_notification_service_82acdfb78337ea2a, []int{7} +} +func (m *UpdateNotificationChannelRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_UpdateNotificationChannelRequest.Unmarshal(m, b) +} +func (m *UpdateNotificationChannelRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_UpdateNotificationChannelRequest.Marshal(b, m, deterministic) +} +func (dst *UpdateNotificationChannelRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_UpdateNotificationChannelRequest.Merge(dst, src) +} +func (m *UpdateNotificationChannelRequest) XXX_Size() int { + return xxx_messageInfo_UpdateNotificationChannelRequest.Size(m) +} +func (m *UpdateNotificationChannelRequest) XXX_DiscardUnknown() { + xxx_messageInfo_UpdateNotificationChannelRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_UpdateNotificationChannelRequest proto.InternalMessageInfo + +func (m *UpdateNotificationChannelRequest) GetUpdateMask() *field_mask.FieldMask { + if m != nil { + return m.UpdateMask + } + return nil +} + +func (m *UpdateNotificationChannelRequest) GetNotificationChannel() *NotificationChannel { + if m != nil { + return m.NotificationChannel + } + return nil +} + +// The `DeleteNotificationChannel` request. +type DeleteNotificationChannelRequest struct { + // The channel for which to execute the request. The format is + // `projects/[PROJECT_ID]/notificationChannels/[CHANNEL_ID]`. + Name string `protobuf:"bytes,3,opt,name=name,proto3" json:"name,omitempty"` + // If true, the notification channel will be deleted regardless of its + // use in alert policies (the policies will be updated to remove the + // channel). If false, channels that are still referenced by an existing + // alerting policy will fail to be deleted in a delete operation. + Force bool `protobuf:"varint,5,opt,name=force,proto3" json:"force,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *DeleteNotificationChannelRequest) Reset() { *m = DeleteNotificationChannelRequest{} } +func (m *DeleteNotificationChannelRequest) String() string { return proto.CompactTextString(m) } +func (*DeleteNotificationChannelRequest) ProtoMessage() {} +func (*DeleteNotificationChannelRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_notification_service_82acdfb78337ea2a, []int{8} +} +func (m *DeleteNotificationChannelRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_DeleteNotificationChannelRequest.Unmarshal(m, b) +} +func (m *DeleteNotificationChannelRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_DeleteNotificationChannelRequest.Marshal(b, m, deterministic) +} +func (dst *DeleteNotificationChannelRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_DeleteNotificationChannelRequest.Merge(dst, src) +} +func (m *DeleteNotificationChannelRequest) XXX_Size() int { + return xxx_messageInfo_DeleteNotificationChannelRequest.Size(m) +} +func (m *DeleteNotificationChannelRequest) XXX_DiscardUnknown() { + xxx_messageInfo_DeleteNotificationChannelRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_DeleteNotificationChannelRequest proto.InternalMessageInfo + +func (m *DeleteNotificationChannelRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *DeleteNotificationChannelRequest) GetForce() bool { + if m != nil { + return m.Force + } + return false +} + +// The `SendNotificationChannelVerificationCode` request. +type SendNotificationChannelVerificationCodeRequest struct { + // The notification channel to which to send a verification code. + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *SendNotificationChannelVerificationCodeRequest) Reset() { + *m = SendNotificationChannelVerificationCodeRequest{} +} +func (m *SendNotificationChannelVerificationCodeRequest) String() string { + return proto.CompactTextString(m) +} +func (*SendNotificationChannelVerificationCodeRequest) ProtoMessage() {} +func (*SendNotificationChannelVerificationCodeRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_notification_service_82acdfb78337ea2a, []int{9} +} +func (m *SendNotificationChannelVerificationCodeRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_SendNotificationChannelVerificationCodeRequest.Unmarshal(m, b) +} +func (m *SendNotificationChannelVerificationCodeRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_SendNotificationChannelVerificationCodeRequest.Marshal(b, m, deterministic) +} +func (dst *SendNotificationChannelVerificationCodeRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_SendNotificationChannelVerificationCodeRequest.Merge(dst, src) +} +func (m *SendNotificationChannelVerificationCodeRequest) XXX_Size() int { + return xxx_messageInfo_SendNotificationChannelVerificationCodeRequest.Size(m) +} +func (m *SendNotificationChannelVerificationCodeRequest) XXX_DiscardUnknown() { + xxx_messageInfo_SendNotificationChannelVerificationCodeRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_SendNotificationChannelVerificationCodeRequest proto.InternalMessageInfo + +func (m *SendNotificationChannelVerificationCodeRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +// The `GetNotificationChannelVerificationCode` request. +type GetNotificationChannelVerificationCodeRequest struct { + // The notification channel for which a verification code is to be generated + // and retrieved. This must name a channel that is already verified; if + // the specified channel is not verified, the request will fail. + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // The desired expiration time. If specified, the API will guarantee that + // the returned code will not be valid after the specified timestamp; + // however, the API cannot guarantee that the returned code will be + // valid for at least as long as the requested time (the API puts an upper + // bound on the amount of time for which a code may be valid). If omitted, + // a default expiration will be used, which may be less than the max + // permissible expiration (so specifying an expiration may extend the + // code's lifetime over omitting an expiration, even though the API does + // impose an upper limit on the maximum expiration that is permitted). + ExpireTime *timestamp.Timestamp `protobuf:"bytes,2,opt,name=expire_time,json=expireTime,proto3" json:"expire_time,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GetNotificationChannelVerificationCodeRequest) Reset() { + *m = GetNotificationChannelVerificationCodeRequest{} +} +func (m *GetNotificationChannelVerificationCodeRequest) String() string { + return proto.CompactTextString(m) +} +func (*GetNotificationChannelVerificationCodeRequest) ProtoMessage() {} +func (*GetNotificationChannelVerificationCodeRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_notification_service_82acdfb78337ea2a, []int{10} +} +func (m *GetNotificationChannelVerificationCodeRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GetNotificationChannelVerificationCodeRequest.Unmarshal(m, b) +} +func (m *GetNotificationChannelVerificationCodeRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GetNotificationChannelVerificationCodeRequest.Marshal(b, m, deterministic) +} +func (dst *GetNotificationChannelVerificationCodeRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_GetNotificationChannelVerificationCodeRequest.Merge(dst, src) +} +func (m *GetNotificationChannelVerificationCodeRequest) XXX_Size() int { + return xxx_messageInfo_GetNotificationChannelVerificationCodeRequest.Size(m) +} +func (m *GetNotificationChannelVerificationCodeRequest) XXX_DiscardUnknown() { + xxx_messageInfo_GetNotificationChannelVerificationCodeRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_GetNotificationChannelVerificationCodeRequest proto.InternalMessageInfo + +func (m *GetNotificationChannelVerificationCodeRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *GetNotificationChannelVerificationCodeRequest) GetExpireTime() *timestamp.Timestamp { + if m != nil { + return m.ExpireTime + } + return nil +} + +// The `GetNotificationChannelVerificationCode` request. +type GetNotificationChannelVerificationCodeResponse struct { + // The verification code, which may be used to verify other channels + // that have an equivalent identity (i.e. other channels of the same + // type with the same fingerprint such as other email channels with + // the same email address or other sms channels with the same number). + Code string `protobuf:"bytes,1,opt,name=code,proto3" json:"code,omitempty"` + // The expiration time associated with the code that was returned. If + // an expiration was provided in the request, this is the minimum of the + // requested expiration in the request and the max permitted expiration. + ExpireTime *timestamp.Timestamp `protobuf:"bytes,2,opt,name=expire_time,json=expireTime,proto3" json:"expire_time,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GetNotificationChannelVerificationCodeResponse) Reset() { + *m = GetNotificationChannelVerificationCodeResponse{} +} +func (m *GetNotificationChannelVerificationCodeResponse) String() string { + return proto.CompactTextString(m) +} +func (*GetNotificationChannelVerificationCodeResponse) ProtoMessage() {} +func (*GetNotificationChannelVerificationCodeResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_notification_service_82acdfb78337ea2a, []int{11} +} +func (m *GetNotificationChannelVerificationCodeResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GetNotificationChannelVerificationCodeResponse.Unmarshal(m, b) +} +func (m *GetNotificationChannelVerificationCodeResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GetNotificationChannelVerificationCodeResponse.Marshal(b, m, deterministic) +} +func (dst *GetNotificationChannelVerificationCodeResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_GetNotificationChannelVerificationCodeResponse.Merge(dst, src) +} +func (m *GetNotificationChannelVerificationCodeResponse) XXX_Size() int { + return xxx_messageInfo_GetNotificationChannelVerificationCodeResponse.Size(m) +} +func (m *GetNotificationChannelVerificationCodeResponse) XXX_DiscardUnknown() { + xxx_messageInfo_GetNotificationChannelVerificationCodeResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_GetNotificationChannelVerificationCodeResponse proto.InternalMessageInfo + +func (m *GetNotificationChannelVerificationCodeResponse) GetCode() string { + if m != nil { + return m.Code + } + return "" +} + +func (m *GetNotificationChannelVerificationCodeResponse) GetExpireTime() *timestamp.Timestamp { + if m != nil { + return m.ExpireTime + } + return nil +} + +// The `VerifyNotificationChannel` request. +type VerifyNotificationChannelRequest struct { + // The notification channel to verify. + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // The verification code that was delivered to the channel as + // a result of invoking the `SendNotificationChannelVerificationCode` API + // method or that was retrieved from a verified channel via + // `GetNotificationChannelVerificationCode`. For example, one might have + // "G-123456" or "TKNZGhhd2EyN3I1MnRnMjRv" (in general, one is only + // guaranteed that the code is valid UTF-8; one should not + // make any assumptions regarding the structure or format of the code). + Code string `protobuf:"bytes,2,opt,name=code,proto3" json:"code,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *VerifyNotificationChannelRequest) Reset() { *m = VerifyNotificationChannelRequest{} } +func (m *VerifyNotificationChannelRequest) String() string { return proto.CompactTextString(m) } +func (*VerifyNotificationChannelRequest) ProtoMessage() {} +func (*VerifyNotificationChannelRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_notification_service_82acdfb78337ea2a, []int{12} +} +func (m *VerifyNotificationChannelRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_VerifyNotificationChannelRequest.Unmarshal(m, b) +} +func (m *VerifyNotificationChannelRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_VerifyNotificationChannelRequest.Marshal(b, m, deterministic) +} +func (dst *VerifyNotificationChannelRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_VerifyNotificationChannelRequest.Merge(dst, src) +} +func (m *VerifyNotificationChannelRequest) XXX_Size() int { + return xxx_messageInfo_VerifyNotificationChannelRequest.Size(m) +} +func (m *VerifyNotificationChannelRequest) XXX_DiscardUnknown() { + xxx_messageInfo_VerifyNotificationChannelRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_VerifyNotificationChannelRequest proto.InternalMessageInfo + +func (m *VerifyNotificationChannelRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *VerifyNotificationChannelRequest) GetCode() string { + if m != nil { + return m.Code + } + return "" +} + +func init() { + proto.RegisterType((*ListNotificationChannelDescriptorsRequest)(nil), "google.monitoring.v3.ListNotificationChannelDescriptorsRequest") + proto.RegisterType((*ListNotificationChannelDescriptorsResponse)(nil), "google.monitoring.v3.ListNotificationChannelDescriptorsResponse") + proto.RegisterType((*GetNotificationChannelDescriptorRequest)(nil), "google.monitoring.v3.GetNotificationChannelDescriptorRequest") + proto.RegisterType((*CreateNotificationChannelRequest)(nil), "google.monitoring.v3.CreateNotificationChannelRequest") + proto.RegisterType((*ListNotificationChannelsRequest)(nil), "google.monitoring.v3.ListNotificationChannelsRequest") + proto.RegisterType((*ListNotificationChannelsResponse)(nil), "google.monitoring.v3.ListNotificationChannelsResponse") + proto.RegisterType((*GetNotificationChannelRequest)(nil), "google.monitoring.v3.GetNotificationChannelRequest") + proto.RegisterType((*UpdateNotificationChannelRequest)(nil), "google.monitoring.v3.UpdateNotificationChannelRequest") + proto.RegisterType((*DeleteNotificationChannelRequest)(nil), "google.monitoring.v3.DeleteNotificationChannelRequest") + proto.RegisterType((*SendNotificationChannelVerificationCodeRequest)(nil), "google.monitoring.v3.SendNotificationChannelVerificationCodeRequest") + proto.RegisterType((*GetNotificationChannelVerificationCodeRequest)(nil), "google.monitoring.v3.GetNotificationChannelVerificationCodeRequest") + proto.RegisterType((*GetNotificationChannelVerificationCodeResponse)(nil), "google.monitoring.v3.GetNotificationChannelVerificationCodeResponse") + proto.RegisterType((*VerifyNotificationChannelRequest)(nil), "google.monitoring.v3.VerifyNotificationChannelRequest") +} + +// Reference imports to suppress errors if they are not otherwise used. +var _ context.Context +var _ grpc.ClientConn + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the grpc package it is being compiled against. +const _ = grpc.SupportPackageIsVersion4 + +// NotificationChannelServiceClient is the client API for NotificationChannelService service. +// +// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://godoc.org/google.golang.org/grpc#ClientConn.NewStream. +type NotificationChannelServiceClient interface { + // Lists the descriptors for supported channel types. The use of descriptors + // makes it possible for new channel types to be dynamically added. + ListNotificationChannelDescriptors(ctx context.Context, in *ListNotificationChannelDescriptorsRequest, opts ...grpc.CallOption) (*ListNotificationChannelDescriptorsResponse, error) + // Gets a single channel descriptor. The descriptor indicates which fields + // are expected / permitted for a notification channel of the given type. + GetNotificationChannelDescriptor(ctx context.Context, in *GetNotificationChannelDescriptorRequest, opts ...grpc.CallOption) (*NotificationChannelDescriptor, error) + // Lists the notification channels that have been created for the project. + ListNotificationChannels(ctx context.Context, in *ListNotificationChannelsRequest, opts ...grpc.CallOption) (*ListNotificationChannelsResponse, error) + // Gets a single notification channel. The channel includes the relevant + // configuration details with which the channel was created. However, the + // response may truncate or omit passwords, API keys, or other private key + // matter and thus the response may not be 100% identical to the information + // that was supplied in the call to the create method. + GetNotificationChannel(ctx context.Context, in *GetNotificationChannelRequest, opts ...grpc.CallOption) (*NotificationChannel, error) + // Creates a new notification channel, representing a single notification + // endpoint such as an email address, SMS number, or PagerDuty service. + CreateNotificationChannel(ctx context.Context, in *CreateNotificationChannelRequest, opts ...grpc.CallOption) (*NotificationChannel, error) + // Updates a notification channel. Fields not specified in the field mask + // remain unchanged. + UpdateNotificationChannel(ctx context.Context, in *UpdateNotificationChannelRequest, opts ...grpc.CallOption) (*NotificationChannel, error) + // Deletes a notification channel. + DeleteNotificationChannel(ctx context.Context, in *DeleteNotificationChannelRequest, opts ...grpc.CallOption) (*empty.Empty, error) + // Causes a verification code to be delivered to the channel. The code + // can then be supplied in `VerifyNotificationChannel` to verify the channel. + SendNotificationChannelVerificationCode(ctx context.Context, in *SendNotificationChannelVerificationCodeRequest, opts ...grpc.CallOption) (*empty.Empty, error) + // Requests a verification code for an already verified channel that can then + // be used in a call to VerifyNotificationChannel() on a different channel + // with an equivalent identity in the same or in a different project. This + // makes it possible to copy a channel between projects without requiring + // manual reverification of the channel. If the channel is not in the + // verified state, this method will fail (in other words, this may only be + // used if the SendNotificationChannelVerificationCode and + // VerifyNotificationChannel paths have already been used to put the given + // channel into the verified state). + // + // There is no guarantee that the verification codes returned by this method + // will be of a similar structure or form as the ones that are delivered + // to the channel via SendNotificationChannelVerificationCode; while + // VerifyNotificationChannel() will recognize both the codes delivered via + // SendNotificationChannelVerificationCode() and returned from + // GetNotificationChannelVerificationCode(), it is typically the case that + // the verification codes delivered via + // SendNotificationChannelVerificationCode() will be shorter and also + // have a shorter expiration (e.g. codes such as "G-123456") whereas + // GetVerificationCode() will typically return a much longer, websafe base + // 64 encoded string that has a longer expiration time. + GetNotificationChannelVerificationCode(ctx context.Context, in *GetNotificationChannelVerificationCodeRequest, opts ...grpc.CallOption) (*GetNotificationChannelVerificationCodeResponse, error) + // Verifies a `NotificationChannel` by proving receipt of the code + // delivered to the channel as a result of calling + // `SendNotificationChannelVerificationCode`. + VerifyNotificationChannel(ctx context.Context, in *VerifyNotificationChannelRequest, opts ...grpc.CallOption) (*NotificationChannel, error) +} + +type notificationChannelServiceClient struct { + cc *grpc.ClientConn +} + +func NewNotificationChannelServiceClient(cc *grpc.ClientConn) NotificationChannelServiceClient { + return ¬ificationChannelServiceClient{cc} +} + +func (c *notificationChannelServiceClient) ListNotificationChannelDescriptors(ctx context.Context, in *ListNotificationChannelDescriptorsRequest, opts ...grpc.CallOption) (*ListNotificationChannelDescriptorsResponse, error) { + out := new(ListNotificationChannelDescriptorsResponse) + err := c.cc.Invoke(ctx, "/google.monitoring.v3.NotificationChannelService/ListNotificationChannelDescriptors", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *notificationChannelServiceClient) GetNotificationChannelDescriptor(ctx context.Context, in *GetNotificationChannelDescriptorRequest, opts ...grpc.CallOption) (*NotificationChannelDescriptor, error) { + out := new(NotificationChannelDescriptor) + err := c.cc.Invoke(ctx, "/google.monitoring.v3.NotificationChannelService/GetNotificationChannelDescriptor", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *notificationChannelServiceClient) ListNotificationChannels(ctx context.Context, in *ListNotificationChannelsRequest, opts ...grpc.CallOption) (*ListNotificationChannelsResponse, error) { + out := new(ListNotificationChannelsResponse) + err := c.cc.Invoke(ctx, "/google.monitoring.v3.NotificationChannelService/ListNotificationChannels", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *notificationChannelServiceClient) GetNotificationChannel(ctx context.Context, in *GetNotificationChannelRequest, opts ...grpc.CallOption) (*NotificationChannel, error) { + out := new(NotificationChannel) + err := c.cc.Invoke(ctx, "/google.monitoring.v3.NotificationChannelService/GetNotificationChannel", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *notificationChannelServiceClient) CreateNotificationChannel(ctx context.Context, in *CreateNotificationChannelRequest, opts ...grpc.CallOption) (*NotificationChannel, error) { + out := new(NotificationChannel) + err := c.cc.Invoke(ctx, "/google.monitoring.v3.NotificationChannelService/CreateNotificationChannel", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *notificationChannelServiceClient) UpdateNotificationChannel(ctx context.Context, in *UpdateNotificationChannelRequest, opts ...grpc.CallOption) (*NotificationChannel, error) { + out := new(NotificationChannel) + err := c.cc.Invoke(ctx, "/google.monitoring.v3.NotificationChannelService/UpdateNotificationChannel", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *notificationChannelServiceClient) DeleteNotificationChannel(ctx context.Context, in *DeleteNotificationChannelRequest, opts ...grpc.CallOption) (*empty.Empty, error) { + out := new(empty.Empty) + err := c.cc.Invoke(ctx, "/google.monitoring.v3.NotificationChannelService/DeleteNotificationChannel", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *notificationChannelServiceClient) SendNotificationChannelVerificationCode(ctx context.Context, in *SendNotificationChannelVerificationCodeRequest, opts ...grpc.CallOption) (*empty.Empty, error) { + out := new(empty.Empty) + err := c.cc.Invoke(ctx, "/google.monitoring.v3.NotificationChannelService/SendNotificationChannelVerificationCode", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *notificationChannelServiceClient) GetNotificationChannelVerificationCode(ctx context.Context, in *GetNotificationChannelVerificationCodeRequest, opts ...grpc.CallOption) (*GetNotificationChannelVerificationCodeResponse, error) { + out := new(GetNotificationChannelVerificationCodeResponse) + err := c.cc.Invoke(ctx, "/google.monitoring.v3.NotificationChannelService/GetNotificationChannelVerificationCode", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *notificationChannelServiceClient) VerifyNotificationChannel(ctx context.Context, in *VerifyNotificationChannelRequest, opts ...grpc.CallOption) (*NotificationChannel, error) { + out := new(NotificationChannel) + err := c.cc.Invoke(ctx, "/google.monitoring.v3.NotificationChannelService/VerifyNotificationChannel", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +// NotificationChannelServiceServer is the server API for NotificationChannelService service. +type NotificationChannelServiceServer interface { + // Lists the descriptors for supported channel types. The use of descriptors + // makes it possible for new channel types to be dynamically added. + ListNotificationChannelDescriptors(context.Context, *ListNotificationChannelDescriptorsRequest) (*ListNotificationChannelDescriptorsResponse, error) + // Gets a single channel descriptor. The descriptor indicates which fields + // are expected / permitted for a notification channel of the given type. + GetNotificationChannelDescriptor(context.Context, *GetNotificationChannelDescriptorRequest) (*NotificationChannelDescriptor, error) + // Lists the notification channels that have been created for the project. + ListNotificationChannels(context.Context, *ListNotificationChannelsRequest) (*ListNotificationChannelsResponse, error) + // Gets a single notification channel. The channel includes the relevant + // configuration details with which the channel was created. However, the + // response may truncate or omit passwords, API keys, or other private key + // matter and thus the response may not be 100% identical to the information + // that was supplied in the call to the create method. + GetNotificationChannel(context.Context, *GetNotificationChannelRequest) (*NotificationChannel, error) + // Creates a new notification channel, representing a single notification + // endpoint such as an email address, SMS number, or PagerDuty service. + CreateNotificationChannel(context.Context, *CreateNotificationChannelRequest) (*NotificationChannel, error) + // Updates a notification channel. Fields not specified in the field mask + // remain unchanged. + UpdateNotificationChannel(context.Context, *UpdateNotificationChannelRequest) (*NotificationChannel, error) + // Deletes a notification channel. + DeleteNotificationChannel(context.Context, *DeleteNotificationChannelRequest) (*empty.Empty, error) + // Causes a verification code to be delivered to the channel. The code + // can then be supplied in `VerifyNotificationChannel` to verify the channel. + SendNotificationChannelVerificationCode(context.Context, *SendNotificationChannelVerificationCodeRequest) (*empty.Empty, error) + // Requests a verification code for an already verified channel that can then + // be used in a call to VerifyNotificationChannel() on a different channel + // with an equivalent identity in the same or in a different project. This + // makes it possible to copy a channel between projects without requiring + // manual reverification of the channel. If the channel is not in the + // verified state, this method will fail (in other words, this may only be + // used if the SendNotificationChannelVerificationCode and + // VerifyNotificationChannel paths have already been used to put the given + // channel into the verified state). + // + // There is no guarantee that the verification codes returned by this method + // will be of a similar structure or form as the ones that are delivered + // to the channel via SendNotificationChannelVerificationCode; while + // VerifyNotificationChannel() will recognize both the codes delivered via + // SendNotificationChannelVerificationCode() and returned from + // GetNotificationChannelVerificationCode(), it is typically the case that + // the verification codes delivered via + // SendNotificationChannelVerificationCode() will be shorter and also + // have a shorter expiration (e.g. codes such as "G-123456") whereas + // GetVerificationCode() will typically return a much longer, websafe base + // 64 encoded string that has a longer expiration time. + GetNotificationChannelVerificationCode(context.Context, *GetNotificationChannelVerificationCodeRequest) (*GetNotificationChannelVerificationCodeResponse, error) + // Verifies a `NotificationChannel` by proving receipt of the code + // delivered to the channel as a result of calling + // `SendNotificationChannelVerificationCode`. + VerifyNotificationChannel(context.Context, *VerifyNotificationChannelRequest) (*NotificationChannel, error) +} + +func RegisterNotificationChannelServiceServer(s *grpc.Server, srv NotificationChannelServiceServer) { + s.RegisterService(&_NotificationChannelService_serviceDesc, srv) +} + +func _NotificationChannelService_ListNotificationChannelDescriptors_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(ListNotificationChannelDescriptorsRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(NotificationChannelServiceServer).ListNotificationChannelDescriptors(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.monitoring.v3.NotificationChannelService/ListNotificationChannelDescriptors", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(NotificationChannelServiceServer).ListNotificationChannelDescriptors(ctx, req.(*ListNotificationChannelDescriptorsRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _NotificationChannelService_GetNotificationChannelDescriptor_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(GetNotificationChannelDescriptorRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(NotificationChannelServiceServer).GetNotificationChannelDescriptor(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.monitoring.v3.NotificationChannelService/GetNotificationChannelDescriptor", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(NotificationChannelServiceServer).GetNotificationChannelDescriptor(ctx, req.(*GetNotificationChannelDescriptorRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _NotificationChannelService_ListNotificationChannels_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(ListNotificationChannelsRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(NotificationChannelServiceServer).ListNotificationChannels(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.monitoring.v3.NotificationChannelService/ListNotificationChannels", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(NotificationChannelServiceServer).ListNotificationChannels(ctx, req.(*ListNotificationChannelsRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _NotificationChannelService_GetNotificationChannel_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(GetNotificationChannelRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(NotificationChannelServiceServer).GetNotificationChannel(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.monitoring.v3.NotificationChannelService/GetNotificationChannel", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(NotificationChannelServiceServer).GetNotificationChannel(ctx, req.(*GetNotificationChannelRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _NotificationChannelService_CreateNotificationChannel_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(CreateNotificationChannelRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(NotificationChannelServiceServer).CreateNotificationChannel(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.monitoring.v3.NotificationChannelService/CreateNotificationChannel", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(NotificationChannelServiceServer).CreateNotificationChannel(ctx, req.(*CreateNotificationChannelRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _NotificationChannelService_UpdateNotificationChannel_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(UpdateNotificationChannelRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(NotificationChannelServiceServer).UpdateNotificationChannel(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.monitoring.v3.NotificationChannelService/UpdateNotificationChannel", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(NotificationChannelServiceServer).UpdateNotificationChannel(ctx, req.(*UpdateNotificationChannelRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _NotificationChannelService_DeleteNotificationChannel_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(DeleteNotificationChannelRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(NotificationChannelServiceServer).DeleteNotificationChannel(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.monitoring.v3.NotificationChannelService/DeleteNotificationChannel", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(NotificationChannelServiceServer).DeleteNotificationChannel(ctx, req.(*DeleteNotificationChannelRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _NotificationChannelService_SendNotificationChannelVerificationCode_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(SendNotificationChannelVerificationCodeRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(NotificationChannelServiceServer).SendNotificationChannelVerificationCode(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.monitoring.v3.NotificationChannelService/SendNotificationChannelVerificationCode", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(NotificationChannelServiceServer).SendNotificationChannelVerificationCode(ctx, req.(*SendNotificationChannelVerificationCodeRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _NotificationChannelService_GetNotificationChannelVerificationCode_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(GetNotificationChannelVerificationCodeRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(NotificationChannelServiceServer).GetNotificationChannelVerificationCode(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.monitoring.v3.NotificationChannelService/GetNotificationChannelVerificationCode", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(NotificationChannelServiceServer).GetNotificationChannelVerificationCode(ctx, req.(*GetNotificationChannelVerificationCodeRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _NotificationChannelService_VerifyNotificationChannel_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(VerifyNotificationChannelRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(NotificationChannelServiceServer).VerifyNotificationChannel(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.monitoring.v3.NotificationChannelService/VerifyNotificationChannel", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(NotificationChannelServiceServer).VerifyNotificationChannel(ctx, req.(*VerifyNotificationChannelRequest)) + } + return interceptor(ctx, in, info, handler) +} + +var _NotificationChannelService_serviceDesc = grpc.ServiceDesc{ + ServiceName: "google.monitoring.v3.NotificationChannelService", + HandlerType: (*NotificationChannelServiceServer)(nil), + Methods: []grpc.MethodDesc{ + { + MethodName: "ListNotificationChannelDescriptors", + Handler: _NotificationChannelService_ListNotificationChannelDescriptors_Handler, + }, + { + MethodName: "GetNotificationChannelDescriptor", + Handler: _NotificationChannelService_GetNotificationChannelDescriptor_Handler, + }, + { + MethodName: "ListNotificationChannels", + Handler: _NotificationChannelService_ListNotificationChannels_Handler, + }, + { + MethodName: "GetNotificationChannel", + Handler: _NotificationChannelService_GetNotificationChannel_Handler, + }, + { + MethodName: "CreateNotificationChannel", + Handler: _NotificationChannelService_CreateNotificationChannel_Handler, + }, + { + MethodName: "UpdateNotificationChannel", + Handler: _NotificationChannelService_UpdateNotificationChannel_Handler, + }, + { + MethodName: "DeleteNotificationChannel", + Handler: _NotificationChannelService_DeleteNotificationChannel_Handler, + }, + { + MethodName: "SendNotificationChannelVerificationCode", + Handler: _NotificationChannelService_SendNotificationChannelVerificationCode_Handler, + }, + { + MethodName: "GetNotificationChannelVerificationCode", + Handler: _NotificationChannelService_GetNotificationChannelVerificationCode_Handler, + }, + { + MethodName: "VerifyNotificationChannel", + Handler: _NotificationChannelService_VerifyNotificationChannel_Handler, + }, + }, + Streams: []grpc.StreamDesc{}, + Metadata: "google/monitoring/v3/notification_service.proto", +} + +func init() { + proto.RegisterFile("google/monitoring/v3/notification_service.proto", fileDescriptor_notification_service_82acdfb78337ea2a) +} + +var fileDescriptor_notification_service_82acdfb78337ea2a = []byte{ + // 1020 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xac, 0x57, 0x41, 0x6f, 0xdc, 0x44, + 0x14, 0xd6, 0xec, 0x26, 0x69, 0xfa, 0x22, 0x04, 0x9a, 0x86, 0xc8, 0xd9, 0xb6, 0xaa, 0xe5, 0x43, + 0x93, 0xae, 0x8a, 0x2d, 0xad, 0x4b, 0x84, 0x52, 0x52, 0xda, 0x64, 0xdb, 0x22, 0x48, 0x51, 0xb4, + 0x29, 0x91, 0x40, 0x11, 0x2b, 0xc7, 0x9e, 0x35, 0x26, 0xbb, 0x33, 0xc6, 0x33, 0x1b, 0x35, 0xad, + 0x2a, 0x15, 0xfe, 0x02, 0xfc, 0x01, 0x24, 0x4e, 0x3d, 0x20, 0xce, 0xa0, 0x72, 0x46, 0x5c, 0x11, + 0x5c, 0xb9, 0xc0, 0xff, 0x40, 0x1e, 0xcf, 0x66, 0x37, 0x9b, 0xf1, 0xae, 0xdd, 0xf6, 0xe6, 0x99, + 0x37, 0xf3, 0xde, 0xf7, 0xbe, 0xf7, 0xf9, 0x3d, 0x1b, 0x9c, 0x90, 0xb1, 0xb0, 0x4b, 0x9c, 0x1e, + 0xa3, 0x91, 0x60, 0x49, 0x44, 0x43, 0xe7, 0xc8, 0x75, 0x28, 0x13, 0x51, 0x27, 0xf2, 0x3d, 0x11, + 0x31, 0xda, 0xe6, 0x24, 0x39, 0x8a, 0x7c, 0x62, 0xc7, 0x09, 0x13, 0x0c, 0x2f, 0x66, 0x17, 0xec, + 0xe1, 0x05, 0xfb, 0xc8, 0xad, 0x5d, 0x52, 0x6e, 0xbc, 0x38, 0x72, 0x3c, 0x4a, 0x99, 0x90, 0x57, + 0x79, 0x76, 0xa7, 0xb6, 0x32, 0x35, 0x88, 0x3a, 0x78, 0x51, 0x1d, 0x94, 0xab, 0x83, 0x7e, 0xc7, + 0x21, 0xbd, 0x58, 0x1c, 0x2b, 0xa3, 0x39, 0x6e, 0xec, 0x44, 0xa4, 0x1b, 0xb4, 0x7b, 0x1e, 0x3f, + 0x54, 0x27, 0x2e, 0x8d, 0x9f, 0xe0, 0x22, 0xe9, 0xfb, 0x42, 0x59, 0xaf, 0x8c, 0x5b, 0x45, 0xd4, + 0x23, 0x5c, 0x78, 0xbd, 0x38, 0x3b, 0x60, 0x3d, 0x81, 0x6b, 0xdb, 0x11, 0x17, 0x9f, 0x8c, 0xe0, + 0xda, 0xfa, 0xd2, 0xa3, 0x94, 0x74, 0x9b, 0x84, 0xfb, 0x49, 0x14, 0x0b, 0x96, 0xf0, 0x16, 0xf9, + 0xba, 0x4f, 0xb8, 0xc0, 0x18, 0x66, 0xa8, 0xd7, 0x23, 0xc6, 0x8c, 0x89, 0x56, 0xcf, 0xb7, 0xe4, + 0x33, 0xbe, 0x08, 0xe7, 0x63, 0x2f, 0x24, 0x6d, 0x1e, 0x3d, 0x26, 0x46, 0xc5, 0x44, 0xab, 0xb3, + 0xad, 0xf9, 0x74, 0x63, 0x37, 0x7a, 0x4c, 0xf0, 0x65, 0x00, 0x69, 0x14, 0xec, 0x90, 0x50, 0xa3, + 0x2a, 0xaf, 0xc9, 0xe3, 0x0f, 0xd3, 0x0d, 0xeb, 0x17, 0x04, 0xf5, 0x22, 0xd1, 0x79, 0xcc, 0x28, + 0x27, 0x38, 0x80, 0x0b, 0x7e, 0x66, 0x6d, 0x07, 0x43, 0xb3, 0x81, 0xcc, 0xea, 0xea, 0x42, 0xc3, + 0xb5, 0x75, 0x45, 0xb2, 0x27, 0xba, 0x6e, 0x61, 0xff, 0x4c, 0x34, 0x7c, 0x15, 0xde, 0xa4, 0xe4, + 0x91, 0x68, 0x8f, 0x00, 0xaf, 0x48, 0xe0, 0x6f, 0xa4, 0xdb, 0x3b, 0x27, 0xe0, 0x37, 0x60, 0xe5, + 0x3e, 0x99, 0x0c, 0x7d, 0x9c, 0xb7, 0xea, 0x90, 0x37, 0xeb, 0x7b, 0x04, 0xe6, 0x56, 0x42, 0x3c, + 0x41, 0x34, 0x2e, 0x26, 0x5c, 0xc4, 0xfb, 0xb0, 0x78, 0x4a, 0xaa, 0x2a, 0x05, 0x09, 0x72, 0xa1, + 0x71, 0xad, 0x30, 0x0d, 0xad, 0x0b, 0xf4, 0xec, 0xa6, 0xf5, 0x23, 0x82, 0x2b, 0x39, 0x25, 0x39, + 0x23, 0x83, 0xd9, 0x11, 0x54, 0x4b, 0x30, 0xd7, 0x89, 0xba, 0x82, 0x24, 0xc6, 0x9c, 0xdc, 0x55, + 0x2b, 0xbc, 0x0c, 0xf3, 0x2c, 0x09, 0x48, 0xd2, 0x3e, 0x38, 0x36, 0xce, 0x49, 0xcb, 0x39, 0xb9, + 0xde, 0x3c, 0x3e, 0xad, 0x9c, 0xea, 0x44, 0xe5, 0xcc, 0x8c, 0x2b, 0xe7, 0x39, 0x02, 0x33, 0x1f, + 0xa6, 0xd2, 0xcb, 0x17, 0xf0, 0xb6, 0x8e, 0x29, 0x6e, 0x54, 0xa5, 0x62, 0x4a, 0x50, 0xb5, 0xa8, + 0xa1, 0xaa, 0xb8, 0x52, 0x5c, 0xb8, 0xac, 0x57, 0xca, 0x24, 0x7d, 0xbc, 0x40, 0x60, 0x7e, 0x1a, + 0x07, 0x93, 0xf5, 0x71, 0x13, 0x16, 0xfa, 0xf2, 0x8c, 0xec, 0x08, 0x4a, 0x02, 0xb5, 0x41, 0x5e, + 0x83, 0x97, 0xde, 0xbe, 0x97, 0x36, 0x8d, 0x07, 0x1e, 0x3f, 0x6c, 0x41, 0x76, 0x3c, 0x7d, 0xce, + 0x15, 0x52, 0xf5, 0xb5, 0x08, 0x69, 0x1b, 0xcc, 0x26, 0xe9, 0x92, 0xd2, 0xf2, 0x5e, 0x84, 0xd9, + 0x0e, 0x4b, 0xfc, 0x4c, 0x5d, 0xf3, 0xad, 0x6c, 0x61, 0x35, 0xc1, 0xde, 0x25, 0x34, 0xd0, 0xf8, + 0xda, 0x23, 0xc9, 0x70, 0x8b, 0x05, 0x64, 0xdc, 0x37, 0x1a, 0xe1, 0xf4, 0x19, 0x82, 0x77, 0xf4, + 0x95, 0x28, 0xe1, 0x25, 0x25, 0x9d, 0x3c, 0x8a, 0xa3, 0x84, 0xb4, 0xd3, 0x66, 0x9a, 0x4b, 0xfa, + 0xc3, 0x41, 0xa7, 0x6d, 0x41, 0x76, 0x3c, 0xdd, 0xb0, 0xbe, 0x41, 0x60, 0x17, 0x85, 0xa0, 0x64, + 0x8c, 0x61, 0xc6, 0x67, 0xc1, 0x09, 0x86, 0xf4, 0xf9, 0xd5, 0x30, 0x7c, 0x04, 0xa6, 0x0c, 0x76, + 0x5c, 0xa0, 0x34, 0xa3, 0x89, 0x0f, 0x80, 0x54, 0x86, 0x40, 0x1a, 0xbf, 0xbe, 0x05, 0x35, 0x8d, + 0x9b, 0xdd, 0x6c, 0x7e, 0xe2, 0xff, 0x10, 0x58, 0xd3, 0x3b, 0x3c, 0xfe, 0x40, 0x2f, 0xb6, 0xc2, + 0x93, 0xa9, 0x76, 0xfb, 0xe5, 0x1d, 0x64, 0x2c, 0x5b, 0xef, 0x7f, 0xfb, 0xe7, 0xbf, 0xdf, 0x55, + 0xd6, 0xf0, 0x8d, 0x74, 0x4c, 0x3f, 0x49, 0xf3, 0xdd, 0x88, 0x13, 0xf6, 0x15, 0xf1, 0x05, 0x77, + 0xea, 0x4f, 0x1d, 0x3a, 0x39, 0x81, 0xbf, 0x11, 0x98, 0xd3, 0xa6, 0x01, 0xde, 0xd0, 0x83, 0x2c, + 0x38, 0x45, 0x6a, 0x2f, 0x33, 0xe1, 0xac, 0x5b, 0x32, 0xad, 0xf7, 0xf0, 0x9a, 0x2e, 0xad, 0x29, + 0x59, 0x39, 0xf5, 0xa7, 0xf8, 0x05, 0x02, 0x23, 0xaf, 0xd1, 0xe2, 0x77, 0x4b, 0xb1, 0x7e, 0x52, + 0xac, 0xb5, 0xb2, 0xd7, 0x54, 0x89, 0x1a, 0x32, 0x97, 0xeb, 0xb8, 0x5e, 0xb8, 0x44, 0x1c, 0xff, + 0x84, 0x60, 0x49, 0x4f, 0x30, 0x76, 0xcb, 0x94, 0x63, 0x80, 0xbd, 0x78, 0x5b, 0xb4, 0x6e, 0x48, + 0xb8, 0x36, 0xbe, 0x5e, 0x94, 0x7a, 0x49, 0xf8, 0xef, 0x08, 0x96, 0x73, 0xbf, 0x0b, 0x70, 0x0e, + 0x75, 0xd3, 0x3e, 0x24, 0xca, 0xc0, 0xfe, 0x50, 0xc2, 0xde, 0xb4, 0x4a, 0xb0, 0xbc, 0xae, 0x1d, + 0x24, 0xf8, 0x1f, 0x04, 0xcb, 0xb9, 0x23, 0x2c, 0x2f, 0x95, 0x69, 0x33, 0xaf, 0x4c, 0x2a, 0x6d, + 0x99, 0xca, 0x67, 0x8d, 0x3b, 0x59, 0x2a, 0x1a, 0x8c, 0x76, 0xc1, 0xb2, 0xe4, 0x64, 0xf8, 0x03, + 0x82, 0xe5, 0xdc, 0x29, 0x97, 0x97, 0xe1, 0xb4, 0xb1, 0x58, 0x5b, 0x3a, 0xd3, 0xc7, 0xef, 0xa6, + 0xbf, 0x04, 0x03, 0x41, 0xd5, 0xcb, 0x09, 0xea, 0x2f, 0x04, 0x2b, 0x05, 0x67, 0x27, 0x6e, 0xea, + 0x11, 0x97, 0x1b, 0xbd, 0xb9, 0xf8, 0xb7, 0x25, 0xfe, 0x7b, 0xd6, 0x9d, 0x32, 0xf8, 0xd7, 0x39, + 0xa1, 0xc1, 0x78, 0xa4, 0x75, 0x54, 0xc7, 0xcf, 0x2a, 0x70, 0xb5, 0xd8, 0x24, 0xc5, 0x5b, 0x65, + 0xde, 0xf4, 0xbc, 0xac, 0x9a, 0xaf, 0xe6, 0x44, 0xf5, 0xb0, 0x8f, 0x25, 0x07, 0x77, 0xad, 0xdb, + 0xa5, 0x38, 0x08, 0x89, 0xd0, 0x51, 0xf0, 0x1b, 0x82, 0xe5, 0xdc, 0x49, 0x9e, 0x27, 0xbf, 0x69, + 0xa3, 0xbf, 0xcc, 0x0b, 0xa6, 0xa6, 0x8b, 0xe5, 0x96, 0xca, 0xe6, 0x48, 0x22, 0x58, 0x47, 0xf5, + 0xcd, 0x9f, 0x11, 0x18, 0x3e, 0xeb, 0x69, 0x03, 0x6e, 0x1a, 0xa3, 0x11, 0xd5, 0x07, 0xc5, 0x4e, + 0xaa, 0xa8, 0x1d, 0xf4, 0xf9, 0x2d, 0x75, 0x23, 0x64, 0x5d, 0x8f, 0x86, 0x36, 0x4b, 0x42, 0x27, + 0x24, 0x54, 0xea, 0x4d, 0xfd, 0xdd, 0x7b, 0x71, 0xc4, 0x4f, 0xff, 0x7c, 0xdf, 0x1c, 0xae, 0x9e, + 0x57, 0x6a, 0xf7, 0x33, 0x07, 0x5b, 0x5d, 0xd6, 0x0f, 0xec, 0x07, 0xc3, 0xc0, 0x7b, 0xee, 0x1f, + 0x03, 0xe3, 0xbe, 0x34, 0xee, 0x0f, 0x8d, 0xfb, 0x7b, 0xee, 0xc1, 0x9c, 0x0c, 0xe2, 0xfe, 0x1f, + 0x00, 0x00, 0xff, 0xff, 0x57, 0x01, 0xd1, 0x1c, 0x45, 0x10, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/monitoring/v3/span_context.pb.go b/vendor/google.golang.org/genproto/googleapis/monitoring/v3/span_context.pb.go new file mode 100644 index 0000000..68740c9 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/monitoring/v3/span_context.pb.go @@ -0,0 +1,96 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/monitoring/v3/span_context.proto + +package monitoring // import "google.golang.org/genproto/googleapis/monitoring/v3" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// The context of a span, attached to google.api.Distribution.Exemplars +// in google.api.Distribution values during aggregation. +// +// It contains the name of a span with format: +// projects/[PROJECT_ID]/traces/[TRACE_ID]/spans/[SPAN_ID] +type SpanContext struct { + // The resource name of the span in the following format: + // + // projects/[PROJECT_ID]/traces/[TRACE_ID]/spans/[SPAN_ID] + // + // [TRACE_ID] is a unique identifier for a trace within a project; + // it is a 32-character hexadecimal encoding of a 16-byte array. + // + // [SPAN_ID] is a unique identifier for a span within a trace; it + // is a 16-character hexadecimal encoding of an 8-byte array. + SpanName string `protobuf:"bytes,1,opt,name=span_name,json=spanName,proto3" json:"span_name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *SpanContext) Reset() { *m = SpanContext{} } +func (m *SpanContext) String() string { return proto.CompactTextString(m) } +func (*SpanContext) ProtoMessage() {} +func (*SpanContext) Descriptor() ([]byte, []int) { + return fileDescriptor_span_context_4c30f2e57596ce1c, []int{0} +} +func (m *SpanContext) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_SpanContext.Unmarshal(m, b) +} +func (m *SpanContext) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_SpanContext.Marshal(b, m, deterministic) +} +func (dst *SpanContext) XXX_Merge(src proto.Message) { + xxx_messageInfo_SpanContext.Merge(dst, src) +} +func (m *SpanContext) XXX_Size() int { + return xxx_messageInfo_SpanContext.Size(m) +} +func (m *SpanContext) XXX_DiscardUnknown() { + xxx_messageInfo_SpanContext.DiscardUnknown(m) +} + +var xxx_messageInfo_SpanContext proto.InternalMessageInfo + +func (m *SpanContext) GetSpanName() string { + if m != nil { + return m.SpanName + } + return "" +} + +func init() { + proto.RegisterType((*SpanContext)(nil), "google.monitoring.v3.SpanContext") +} + +func init() { + proto.RegisterFile("google/monitoring/v3/span_context.proto", fileDescriptor_span_context_4c30f2e57596ce1c) +} + +var fileDescriptor_span_context_4c30f2e57596ce1c = []byte{ + // 197 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xe2, 0x52, 0x4f, 0xcf, 0xcf, 0x4f, + 0xcf, 0x49, 0xd5, 0xcf, 0xcd, 0xcf, 0xcb, 0x2c, 0xc9, 0x2f, 0xca, 0xcc, 0x4b, 0xd7, 0x2f, 0x33, + 0xd6, 0x2f, 0x2e, 0x48, 0xcc, 0x8b, 0x4f, 0xce, 0xcf, 0x2b, 0x49, 0xad, 0x28, 0xd1, 0x2b, 0x28, + 0xca, 0x2f, 0xc9, 0x17, 0x12, 0x81, 0x28, 0xd4, 0x43, 0x28, 0xd4, 0x2b, 0x33, 0x56, 0xd2, 0xe2, + 0xe2, 0x0e, 0x2e, 0x48, 0xcc, 0x73, 0x86, 0x28, 0x15, 0x92, 0xe6, 0xe2, 0x04, 0x6b, 0xcd, 0x4b, + 0xcc, 0x4d, 0x95, 0x60, 0x54, 0x60, 0xd4, 0xe0, 0x0c, 0xe2, 0x00, 0x09, 0xf8, 0x25, 0xe6, 0xa6, + 0x3a, 0xad, 0x60, 0xe4, 0x92, 0x48, 0xce, 0xcf, 0xd5, 0xc3, 0x66, 0x90, 0x93, 0x00, 0x92, 0x31, + 0x01, 0x20, 0x0b, 0x03, 0x18, 0xa3, 0xec, 0xa0, 0x2a, 0xd3, 0xf3, 0x73, 0x12, 0xf3, 0xd2, 0xf5, + 0xf2, 0x8b, 0xd2, 0xf5, 0xd3, 0x53, 0xf3, 0xc0, 0xce, 0xd1, 0x87, 0x48, 0x25, 0x16, 0x64, 0x16, + 0xa3, 0x3a, 0xdd, 0x1a, 0xc1, 0x5b, 0xc5, 0x24, 0xe5, 0x0e, 0x31, 0xc0, 0x39, 0x27, 0xbf, 0x34, + 0x45, 0xcf, 0x17, 0x61, 0x61, 0x98, 0xf1, 0x29, 0x98, 0x64, 0x0c, 0x58, 0x32, 0x06, 0x21, 0x19, + 0x13, 0x66, 0x9c, 0xc4, 0x06, 0xb6, 0xc4, 0x18, 0x10, 0x00, 0x00, 0xff, 0xff, 0xbd, 0x19, 0x01, + 0xcb, 0x1e, 0x01, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/monitoring/v3/uptime.pb.go b/vendor/google.golang.org/genproto/googleapis/monitoring/v3/uptime.pb.go new file mode 100644 index 0000000..794918e --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/monitoring/v3/uptime.pb.go @@ -0,0 +1,969 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/monitoring/v3/uptime.proto + +package monitoring // import "google.golang.org/genproto/googleapis/monitoring/v3" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import duration "github.com/golang/protobuf/ptypes/duration" +import monitoredres "google.golang.org/genproto/googleapis/api/monitoredres" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// The regions from which an uptime check can be run. +type UptimeCheckRegion int32 + +const ( + // Default value if no region is specified. Will result in uptime checks + // running from all regions. + UptimeCheckRegion_REGION_UNSPECIFIED UptimeCheckRegion = 0 + // Allows checks to run from locations within the United States of America. + UptimeCheckRegion_USA UptimeCheckRegion = 1 + // Allows checks to run from locations within the continent of Europe. + UptimeCheckRegion_EUROPE UptimeCheckRegion = 2 + // Allows checks to run from locations within the continent of South + // America. + UptimeCheckRegion_SOUTH_AMERICA UptimeCheckRegion = 3 + // Allows checks to run from locations within the Asia Pacific area (ex: + // Singapore). + UptimeCheckRegion_ASIA_PACIFIC UptimeCheckRegion = 4 +) + +var UptimeCheckRegion_name = map[int32]string{ + 0: "REGION_UNSPECIFIED", + 1: "USA", + 2: "EUROPE", + 3: "SOUTH_AMERICA", + 4: "ASIA_PACIFIC", +} +var UptimeCheckRegion_value = map[string]int32{ + "REGION_UNSPECIFIED": 0, + "USA": 1, + "EUROPE": 2, + "SOUTH_AMERICA": 3, + "ASIA_PACIFIC": 4, +} + +func (x UptimeCheckRegion) String() string { + return proto.EnumName(UptimeCheckRegion_name, int32(x)) +} +func (UptimeCheckRegion) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_uptime_bdbf1570f8b9a7cb, []int{0} +} + +// The supported resource types that can be used as values of +// `group_resource.resource_type`. +// `INSTANCE` includes `gce_instance` and `aws_ec2_instance` resource types. +// The resource types `gae_app` and `uptime_url` are not valid here because +// group checks on App Engine modules and URLs are not allowed. +type GroupResourceType int32 + +const ( + // Default value (not valid). + GroupResourceType_RESOURCE_TYPE_UNSPECIFIED GroupResourceType = 0 + // A group of instances from Google Cloud Platform (GCP) or + // Amazon Web Services (AWS). + GroupResourceType_INSTANCE GroupResourceType = 1 + // A group of Amazon ELB load balancers. + GroupResourceType_AWS_ELB_LOAD_BALANCER GroupResourceType = 2 +) + +var GroupResourceType_name = map[int32]string{ + 0: "RESOURCE_TYPE_UNSPECIFIED", + 1: "INSTANCE", + 2: "AWS_ELB_LOAD_BALANCER", +} +var GroupResourceType_value = map[string]int32{ + "RESOURCE_TYPE_UNSPECIFIED": 0, + "INSTANCE": 1, + "AWS_ELB_LOAD_BALANCER": 2, +} + +func (x GroupResourceType) String() string { + return proto.EnumName(GroupResourceType_name, int32(x)) +} +func (GroupResourceType) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_uptime_bdbf1570f8b9a7cb, []int{1} +} + +// An internal checker allows uptime checks to run on private/internal GCP +// resources. +type InternalChecker struct { + // A unique resource name for this InternalChecker. The format is: + // + // + // `projects/[PROJECT_ID]/internalCheckers/[INTERNAL_CHECKER_ID]`. + // + // PROJECT_ID is the stackdriver workspace project for the + // uptime check config associated with the internal checker. + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // The checker's human-readable name. The display name + // should be unique within a Stackdriver Workspace in order to make it easier + // to identify; however, uniqueness is not enforced. + DisplayName string `protobuf:"bytes,2,opt,name=display_name,json=displayName,proto3" json:"display_name,omitempty"` + // The [GCP VPC network](https://cloud.google.com/vpc/docs/vpc) where the + // internal resource lives (ex: "default"). + Network string `protobuf:"bytes,3,opt,name=network,proto3" json:"network,omitempty"` + // The GCP zone the uptime check should egress from. Only respected for + // internal uptime checks, where internal_network is specified. + GcpZone string `protobuf:"bytes,4,opt,name=gcp_zone,json=gcpZone,proto3" json:"gcp_zone,omitempty"` + // The GCP project_id where the internal checker lives. Not necessary + // the same as the workspace project. + PeerProjectId string `protobuf:"bytes,6,opt,name=peer_project_id,json=peerProjectId,proto3" json:"peer_project_id,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *InternalChecker) Reset() { *m = InternalChecker{} } +func (m *InternalChecker) String() string { return proto.CompactTextString(m) } +func (*InternalChecker) ProtoMessage() {} +func (*InternalChecker) Descriptor() ([]byte, []int) { + return fileDescriptor_uptime_bdbf1570f8b9a7cb, []int{0} +} +func (m *InternalChecker) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_InternalChecker.Unmarshal(m, b) +} +func (m *InternalChecker) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_InternalChecker.Marshal(b, m, deterministic) +} +func (dst *InternalChecker) XXX_Merge(src proto.Message) { + xxx_messageInfo_InternalChecker.Merge(dst, src) +} +func (m *InternalChecker) XXX_Size() int { + return xxx_messageInfo_InternalChecker.Size(m) +} +func (m *InternalChecker) XXX_DiscardUnknown() { + xxx_messageInfo_InternalChecker.DiscardUnknown(m) +} + +var xxx_messageInfo_InternalChecker proto.InternalMessageInfo + +func (m *InternalChecker) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *InternalChecker) GetDisplayName() string { + if m != nil { + return m.DisplayName + } + return "" +} + +func (m *InternalChecker) GetNetwork() string { + if m != nil { + return m.Network + } + return "" +} + +func (m *InternalChecker) GetGcpZone() string { + if m != nil { + return m.GcpZone + } + return "" +} + +func (m *InternalChecker) GetPeerProjectId() string { + if m != nil { + return m.PeerProjectId + } + return "" +} + +// This message configures which resources and services to monitor for +// availability. +type UptimeCheckConfig struct { + // A unique resource name for this UptimeCheckConfig. The format is: + // + // + // `projects/[PROJECT_ID]/uptimeCheckConfigs/[UPTIME_CHECK_ID]`. + // + // This field should be omitted when creating the uptime check configuration; + // on create, the resource name is assigned by the server and included in the + // response. + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // A human-friendly name for the uptime check configuration. The display name + // should be unique within a Stackdriver Workspace in order to make it easier + // to identify; however, uniqueness is not enforced. Required. + DisplayName string `protobuf:"bytes,2,opt,name=display_name,json=displayName,proto3" json:"display_name,omitempty"` + // The resource the check is checking. Required. + // + // Types that are valid to be assigned to Resource: + // *UptimeCheckConfig_MonitoredResource + // *UptimeCheckConfig_ResourceGroup_ + Resource isUptimeCheckConfig_Resource `protobuf_oneof:"resource"` + // The type of uptime check request. + // + // Types that are valid to be assigned to CheckRequestType: + // *UptimeCheckConfig_HttpCheck_ + // *UptimeCheckConfig_TcpCheck_ + CheckRequestType isUptimeCheckConfig_CheckRequestType `protobuf_oneof:"check_request_type"` + // How often, in seconds, the uptime check is performed. + // Currently, the only supported values are `60s` (1 minute), `300s` + // (5 minutes), `600s` (10 minutes), and `900s` (15 minutes). Optional, + // defaults to `300s`. + Period *duration.Duration `protobuf:"bytes,7,opt,name=period,proto3" json:"period,omitempty"` + // The maximum amount of time to wait for the request to complete (must be + // between 1 and 60 seconds). Required. + Timeout *duration.Duration `protobuf:"bytes,8,opt,name=timeout,proto3" json:"timeout,omitempty"` + // The expected content on the page the check is run against. + // Currently, only the first entry in the list is supported, and other entries + // will be ignored. The server will look for an exact match of the string in + // the page response's content. This field is optional and should only be + // specified if a content match is required. + ContentMatchers []*UptimeCheckConfig_ContentMatcher `protobuf:"bytes,9,rep,name=content_matchers,json=contentMatchers,proto3" json:"content_matchers,omitempty"` + // The list of regions from which the check will be run. + // Some regions contain one location, and others contain more than one. + // If this field is specified, enough regions to include a minimum of + // 3 locations must be provided, or an error message is returned. + // Not specifying this field will result in uptime checks running from all + // regions. + SelectedRegions []UptimeCheckRegion `protobuf:"varint,10,rep,packed,name=selected_regions,json=selectedRegions,proto3,enum=google.monitoring.v3.UptimeCheckRegion" json:"selected_regions,omitempty"` + // If this is true, then checks are made only from the 'internal_checkers'. + // If it is false, then checks are made only from the 'selected_regions'. + // It is an error to provide 'selected_regions' when is_internal is true, + // or to provide 'internal_checkers' when is_internal is false. + IsInternal bool `protobuf:"varint,15,opt,name=is_internal,json=isInternal,proto3" json:"is_internal,omitempty"` + // The internal checkers that this check will egress from. If `is_internal` is + // true and this list is empty, the check will egress from all the + // InternalCheckers configured for the project that owns this CheckConfig. + InternalCheckers []*InternalChecker `protobuf:"bytes,14,rep,name=internal_checkers,json=internalCheckers,proto3" json:"internal_checkers,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *UptimeCheckConfig) Reset() { *m = UptimeCheckConfig{} } +func (m *UptimeCheckConfig) String() string { return proto.CompactTextString(m) } +func (*UptimeCheckConfig) ProtoMessage() {} +func (*UptimeCheckConfig) Descriptor() ([]byte, []int) { + return fileDescriptor_uptime_bdbf1570f8b9a7cb, []int{1} +} +func (m *UptimeCheckConfig) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_UptimeCheckConfig.Unmarshal(m, b) +} +func (m *UptimeCheckConfig) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_UptimeCheckConfig.Marshal(b, m, deterministic) +} +func (dst *UptimeCheckConfig) XXX_Merge(src proto.Message) { + xxx_messageInfo_UptimeCheckConfig.Merge(dst, src) +} +func (m *UptimeCheckConfig) XXX_Size() int { + return xxx_messageInfo_UptimeCheckConfig.Size(m) +} +func (m *UptimeCheckConfig) XXX_DiscardUnknown() { + xxx_messageInfo_UptimeCheckConfig.DiscardUnknown(m) +} + +var xxx_messageInfo_UptimeCheckConfig proto.InternalMessageInfo + +func (m *UptimeCheckConfig) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *UptimeCheckConfig) GetDisplayName() string { + if m != nil { + return m.DisplayName + } + return "" +} + +type isUptimeCheckConfig_Resource interface { + isUptimeCheckConfig_Resource() +} + +type UptimeCheckConfig_MonitoredResource struct { + MonitoredResource *monitoredres.MonitoredResource `protobuf:"bytes,3,opt,name=monitored_resource,json=monitoredResource,proto3,oneof"` +} + +type UptimeCheckConfig_ResourceGroup_ struct { + ResourceGroup *UptimeCheckConfig_ResourceGroup `protobuf:"bytes,4,opt,name=resource_group,json=resourceGroup,proto3,oneof"` +} + +func (*UptimeCheckConfig_MonitoredResource) isUptimeCheckConfig_Resource() {} + +func (*UptimeCheckConfig_ResourceGroup_) isUptimeCheckConfig_Resource() {} + +func (m *UptimeCheckConfig) GetResource() isUptimeCheckConfig_Resource { + if m != nil { + return m.Resource + } + return nil +} + +func (m *UptimeCheckConfig) GetMonitoredResource() *monitoredres.MonitoredResource { + if x, ok := m.GetResource().(*UptimeCheckConfig_MonitoredResource); ok { + return x.MonitoredResource + } + return nil +} + +func (m *UptimeCheckConfig) GetResourceGroup() *UptimeCheckConfig_ResourceGroup { + if x, ok := m.GetResource().(*UptimeCheckConfig_ResourceGroup_); ok { + return x.ResourceGroup + } + return nil +} + +type isUptimeCheckConfig_CheckRequestType interface { + isUptimeCheckConfig_CheckRequestType() +} + +type UptimeCheckConfig_HttpCheck_ struct { + HttpCheck *UptimeCheckConfig_HttpCheck `protobuf:"bytes,5,opt,name=http_check,json=httpCheck,proto3,oneof"` +} + +type UptimeCheckConfig_TcpCheck_ struct { + TcpCheck *UptimeCheckConfig_TcpCheck `protobuf:"bytes,6,opt,name=tcp_check,json=tcpCheck,proto3,oneof"` +} + +func (*UptimeCheckConfig_HttpCheck_) isUptimeCheckConfig_CheckRequestType() {} + +func (*UptimeCheckConfig_TcpCheck_) isUptimeCheckConfig_CheckRequestType() {} + +func (m *UptimeCheckConfig) GetCheckRequestType() isUptimeCheckConfig_CheckRequestType { + if m != nil { + return m.CheckRequestType + } + return nil +} + +func (m *UptimeCheckConfig) GetHttpCheck() *UptimeCheckConfig_HttpCheck { + if x, ok := m.GetCheckRequestType().(*UptimeCheckConfig_HttpCheck_); ok { + return x.HttpCheck + } + return nil +} + +func (m *UptimeCheckConfig) GetTcpCheck() *UptimeCheckConfig_TcpCheck { + if x, ok := m.GetCheckRequestType().(*UptimeCheckConfig_TcpCheck_); ok { + return x.TcpCheck + } + return nil +} + +func (m *UptimeCheckConfig) GetPeriod() *duration.Duration { + if m != nil { + return m.Period + } + return nil +} + +func (m *UptimeCheckConfig) GetTimeout() *duration.Duration { + if m != nil { + return m.Timeout + } + return nil +} + +func (m *UptimeCheckConfig) GetContentMatchers() []*UptimeCheckConfig_ContentMatcher { + if m != nil { + return m.ContentMatchers + } + return nil +} + +func (m *UptimeCheckConfig) GetSelectedRegions() []UptimeCheckRegion { + if m != nil { + return m.SelectedRegions + } + return nil +} + +func (m *UptimeCheckConfig) GetIsInternal() bool { + if m != nil { + return m.IsInternal + } + return false +} + +func (m *UptimeCheckConfig) GetInternalCheckers() []*InternalChecker { + if m != nil { + return m.InternalCheckers + } + return nil +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*UptimeCheckConfig) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _UptimeCheckConfig_OneofMarshaler, _UptimeCheckConfig_OneofUnmarshaler, _UptimeCheckConfig_OneofSizer, []interface{}{ + (*UptimeCheckConfig_MonitoredResource)(nil), + (*UptimeCheckConfig_ResourceGroup_)(nil), + (*UptimeCheckConfig_HttpCheck_)(nil), + (*UptimeCheckConfig_TcpCheck_)(nil), + } +} + +func _UptimeCheckConfig_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*UptimeCheckConfig) + // resource + switch x := m.Resource.(type) { + case *UptimeCheckConfig_MonitoredResource: + b.EncodeVarint(3<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.MonitoredResource); err != nil { + return err + } + case *UptimeCheckConfig_ResourceGroup_: + b.EncodeVarint(4<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.ResourceGroup); err != nil { + return err + } + case nil: + default: + return fmt.Errorf("UptimeCheckConfig.Resource has unexpected type %T", x) + } + // check_request_type + switch x := m.CheckRequestType.(type) { + case *UptimeCheckConfig_HttpCheck_: + b.EncodeVarint(5<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.HttpCheck); err != nil { + return err + } + case *UptimeCheckConfig_TcpCheck_: + b.EncodeVarint(6<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.TcpCheck); err != nil { + return err + } + case nil: + default: + return fmt.Errorf("UptimeCheckConfig.CheckRequestType has unexpected type %T", x) + } + return nil +} + +func _UptimeCheckConfig_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*UptimeCheckConfig) + switch tag { + case 3: // resource.monitored_resource + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(monitoredres.MonitoredResource) + err := b.DecodeMessage(msg) + m.Resource = &UptimeCheckConfig_MonitoredResource{msg} + return true, err + case 4: // resource.resource_group + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(UptimeCheckConfig_ResourceGroup) + err := b.DecodeMessage(msg) + m.Resource = &UptimeCheckConfig_ResourceGroup_{msg} + return true, err + case 5: // check_request_type.http_check + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(UptimeCheckConfig_HttpCheck) + err := b.DecodeMessage(msg) + m.CheckRequestType = &UptimeCheckConfig_HttpCheck_{msg} + return true, err + case 6: // check_request_type.tcp_check + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(UptimeCheckConfig_TcpCheck) + err := b.DecodeMessage(msg) + m.CheckRequestType = &UptimeCheckConfig_TcpCheck_{msg} + return true, err + default: + return false, nil + } +} + +func _UptimeCheckConfig_OneofSizer(msg proto.Message) (n int) { + m := msg.(*UptimeCheckConfig) + // resource + switch x := m.Resource.(type) { + case *UptimeCheckConfig_MonitoredResource: + s := proto.Size(x.MonitoredResource) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *UptimeCheckConfig_ResourceGroup_: + s := proto.Size(x.ResourceGroup) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + // check_request_type + switch x := m.CheckRequestType.(type) { + case *UptimeCheckConfig_HttpCheck_: + s := proto.Size(x.HttpCheck) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *UptimeCheckConfig_TcpCheck_: + s := proto.Size(x.TcpCheck) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +// The resource submessage for group checks. It can be used instead of a +// monitored resource, when multiple resources are being monitored. +type UptimeCheckConfig_ResourceGroup struct { + // The group of resources being monitored. Should be only the + // group_id, not projects//groups/. + GroupId string `protobuf:"bytes,1,opt,name=group_id,json=groupId,proto3" json:"group_id,omitempty"` + // The resource type of the group members. + ResourceType GroupResourceType `protobuf:"varint,2,opt,name=resource_type,json=resourceType,proto3,enum=google.monitoring.v3.GroupResourceType" json:"resource_type,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *UptimeCheckConfig_ResourceGroup) Reset() { *m = UptimeCheckConfig_ResourceGroup{} } +func (m *UptimeCheckConfig_ResourceGroup) String() string { return proto.CompactTextString(m) } +func (*UptimeCheckConfig_ResourceGroup) ProtoMessage() {} +func (*UptimeCheckConfig_ResourceGroup) Descriptor() ([]byte, []int) { + return fileDescriptor_uptime_bdbf1570f8b9a7cb, []int{1, 0} +} +func (m *UptimeCheckConfig_ResourceGroup) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_UptimeCheckConfig_ResourceGroup.Unmarshal(m, b) +} +func (m *UptimeCheckConfig_ResourceGroup) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_UptimeCheckConfig_ResourceGroup.Marshal(b, m, deterministic) +} +func (dst *UptimeCheckConfig_ResourceGroup) XXX_Merge(src proto.Message) { + xxx_messageInfo_UptimeCheckConfig_ResourceGroup.Merge(dst, src) +} +func (m *UptimeCheckConfig_ResourceGroup) XXX_Size() int { + return xxx_messageInfo_UptimeCheckConfig_ResourceGroup.Size(m) +} +func (m *UptimeCheckConfig_ResourceGroup) XXX_DiscardUnknown() { + xxx_messageInfo_UptimeCheckConfig_ResourceGroup.DiscardUnknown(m) +} + +var xxx_messageInfo_UptimeCheckConfig_ResourceGroup proto.InternalMessageInfo + +func (m *UptimeCheckConfig_ResourceGroup) GetGroupId() string { + if m != nil { + return m.GroupId + } + return "" +} + +func (m *UptimeCheckConfig_ResourceGroup) GetResourceType() GroupResourceType { + if m != nil { + return m.ResourceType + } + return GroupResourceType_RESOURCE_TYPE_UNSPECIFIED +} + +// Information involved in an HTTP/HTTPS uptime check request. +type UptimeCheckConfig_HttpCheck struct { + // If true, use HTTPS instead of HTTP to run the check. + UseSsl bool `protobuf:"varint,1,opt,name=use_ssl,json=useSsl,proto3" json:"use_ssl,omitempty"` + // The path to the page to run the check against. Will be combined with the + // host (specified within the MonitoredResource) and port to construct the + // full URL. Optional (defaults to "/"). + Path string `protobuf:"bytes,2,opt,name=path,proto3" json:"path,omitempty"` + // The port to the page to run the check against. Will be combined with host + // (specified within the MonitoredResource) and path to construct the full + // URL. Optional (defaults to 80 without SSL, or 443 with SSL). + Port int32 `protobuf:"varint,3,opt,name=port,proto3" json:"port,omitempty"` + // The authentication information. Optional when creating an HTTP check; + // defaults to empty. + AuthInfo *UptimeCheckConfig_HttpCheck_BasicAuthentication `protobuf:"bytes,4,opt,name=auth_info,json=authInfo,proto3" json:"auth_info,omitempty"` + // Boolean specifiying whether to encrypt the header information. + // Encryption should be specified for any headers related to authentication + // that you do not wish to be seen when retrieving the configuration. The + // server will be responsible for encrypting the headers. + // On Get/List calls, if mask_headers is set to True then the headers + // will be obscured with ******. + MaskHeaders bool `protobuf:"varint,5,opt,name=mask_headers,json=maskHeaders,proto3" json:"mask_headers,omitempty"` + // The list of headers to send as part of the uptime check request. + // If two headers have the same key and different values, they should + // be entered as a single header, with the value being a comma-separated + // list of all the desired values as described at + // https://www.w3.org/Protocols/rfc2616/rfc2616.txt (page 31). + // Entering two separate headers with the same key in a Create call will + // cause the first to be overwritten by the second. + // The maximum number of headers allowed is 100. + Headers map[string]string `protobuf:"bytes,6,rep,name=headers,proto3" json:"headers,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *UptimeCheckConfig_HttpCheck) Reset() { *m = UptimeCheckConfig_HttpCheck{} } +func (m *UptimeCheckConfig_HttpCheck) String() string { return proto.CompactTextString(m) } +func (*UptimeCheckConfig_HttpCheck) ProtoMessage() {} +func (*UptimeCheckConfig_HttpCheck) Descriptor() ([]byte, []int) { + return fileDescriptor_uptime_bdbf1570f8b9a7cb, []int{1, 1} +} +func (m *UptimeCheckConfig_HttpCheck) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_UptimeCheckConfig_HttpCheck.Unmarshal(m, b) +} +func (m *UptimeCheckConfig_HttpCheck) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_UptimeCheckConfig_HttpCheck.Marshal(b, m, deterministic) +} +func (dst *UptimeCheckConfig_HttpCheck) XXX_Merge(src proto.Message) { + xxx_messageInfo_UptimeCheckConfig_HttpCheck.Merge(dst, src) +} +func (m *UptimeCheckConfig_HttpCheck) XXX_Size() int { + return xxx_messageInfo_UptimeCheckConfig_HttpCheck.Size(m) +} +func (m *UptimeCheckConfig_HttpCheck) XXX_DiscardUnknown() { + xxx_messageInfo_UptimeCheckConfig_HttpCheck.DiscardUnknown(m) +} + +var xxx_messageInfo_UptimeCheckConfig_HttpCheck proto.InternalMessageInfo + +func (m *UptimeCheckConfig_HttpCheck) GetUseSsl() bool { + if m != nil { + return m.UseSsl + } + return false +} + +func (m *UptimeCheckConfig_HttpCheck) GetPath() string { + if m != nil { + return m.Path + } + return "" +} + +func (m *UptimeCheckConfig_HttpCheck) GetPort() int32 { + if m != nil { + return m.Port + } + return 0 +} + +func (m *UptimeCheckConfig_HttpCheck) GetAuthInfo() *UptimeCheckConfig_HttpCheck_BasicAuthentication { + if m != nil { + return m.AuthInfo + } + return nil +} + +func (m *UptimeCheckConfig_HttpCheck) GetMaskHeaders() bool { + if m != nil { + return m.MaskHeaders + } + return false +} + +func (m *UptimeCheckConfig_HttpCheck) GetHeaders() map[string]string { + if m != nil { + return m.Headers + } + return nil +} + +// A type of authentication to perform against the specified resource or URL +// that uses username and password. +// Currently, only Basic authentication is supported in Uptime Monitoring. +type UptimeCheckConfig_HttpCheck_BasicAuthentication struct { + // The username to authenticate. + Username string `protobuf:"bytes,1,opt,name=username,proto3" json:"username,omitempty"` + // The password to authenticate. + Password string `protobuf:"bytes,2,opt,name=password,proto3" json:"password,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *UptimeCheckConfig_HttpCheck_BasicAuthentication) Reset() { + *m = UptimeCheckConfig_HttpCheck_BasicAuthentication{} +} +func (m *UptimeCheckConfig_HttpCheck_BasicAuthentication) String() string { + return proto.CompactTextString(m) +} +func (*UptimeCheckConfig_HttpCheck_BasicAuthentication) ProtoMessage() {} +func (*UptimeCheckConfig_HttpCheck_BasicAuthentication) Descriptor() ([]byte, []int) { + return fileDescriptor_uptime_bdbf1570f8b9a7cb, []int{1, 1, 0} +} +func (m *UptimeCheckConfig_HttpCheck_BasicAuthentication) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_UptimeCheckConfig_HttpCheck_BasicAuthentication.Unmarshal(m, b) +} +func (m *UptimeCheckConfig_HttpCheck_BasicAuthentication) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_UptimeCheckConfig_HttpCheck_BasicAuthentication.Marshal(b, m, deterministic) +} +func (dst *UptimeCheckConfig_HttpCheck_BasicAuthentication) XXX_Merge(src proto.Message) { + xxx_messageInfo_UptimeCheckConfig_HttpCheck_BasicAuthentication.Merge(dst, src) +} +func (m *UptimeCheckConfig_HttpCheck_BasicAuthentication) XXX_Size() int { + return xxx_messageInfo_UptimeCheckConfig_HttpCheck_BasicAuthentication.Size(m) +} +func (m *UptimeCheckConfig_HttpCheck_BasicAuthentication) XXX_DiscardUnknown() { + xxx_messageInfo_UptimeCheckConfig_HttpCheck_BasicAuthentication.DiscardUnknown(m) +} + +var xxx_messageInfo_UptimeCheckConfig_HttpCheck_BasicAuthentication proto.InternalMessageInfo + +func (m *UptimeCheckConfig_HttpCheck_BasicAuthentication) GetUsername() string { + if m != nil { + return m.Username + } + return "" +} + +func (m *UptimeCheckConfig_HttpCheck_BasicAuthentication) GetPassword() string { + if m != nil { + return m.Password + } + return "" +} + +// Information required for a TCP uptime check request. +type UptimeCheckConfig_TcpCheck struct { + // The port to the page to run the check against. Will be combined with host + // (specified within the MonitoredResource) to construct the full URL. + // Required. + Port int32 `protobuf:"varint,1,opt,name=port,proto3" json:"port,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *UptimeCheckConfig_TcpCheck) Reset() { *m = UptimeCheckConfig_TcpCheck{} } +func (m *UptimeCheckConfig_TcpCheck) String() string { return proto.CompactTextString(m) } +func (*UptimeCheckConfig_TcpCheck) ProtoMessage() {} +func (*UptimeCheckConfig_TcpCheck) Descriptor() ([]byte, []int) { + return fileDescriptor_uptime_bdbf1570f8b9a7cb, []int{1, 2} +} +func (m *UptimeCheckConfig_TcpCheck) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_UptimeCheckConfig_TcpCheck.Unmarshal(m, b) +} +func (m *UptimeCheckConfig_TcpCheck) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_UptimeCheckConfig_TcpCheck.Marshal(b, m, deterministic) +} +func (dst *UptimeCheckConfig_TcpCheck) XXX_Merge(src proto.Message) { + xxx_messageInfo_UptimeCheckConfig_TcpCheck.Merge(dst, src) +} +func (m *UptimeCheckConfig_TcpCheck) XXX_Size() int { + return xxx_messageInfo_UptimeCheckConfig_TcpCheck.Size(m) +} +func (m *UptimeCheckConfig_TcpCheck) XXX_DiscardUnknown() { + xxx_messageInfo_UptimeCheckConfig_TcpCheck.DiscardUnknown(m) +} + +var xxx_messageInfo_UptimeCheckConfig_TcpCheck proto.InternalMessageInfo + +func (m *UptimeCheckConfig_TcpCheck) GetPort() int32 { + if m != nil { + return m.Port + } + return 0 +} + +// Used to perform string matching. It allows substring and regular +// expressions, together with their negations. +type UptimeCheckConfig_ContentMatcher struct { + // String or regex content to match (max 1024 bytes) + Content string `protobuf:"bytes,1,opt,name=content,proto3" json:"content,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *UptimeCheckConfig_ContentMatcher) Reset() { *m = UptimeCheckConfig_ContentMatcher{} } +func (m *UptimeCheckConfig_ContentMatcher) String() string { return proto.CompactTextString(m) } +func (*UptimeCheckConfig_ContentMatcher) ProtoMessage() {} +func (*UptimeCheckConfig_ContentMatcher) Descriptor() ([]byte, []int) { + return fileDescriptor_uptime_bdbf1570f8b9a7cb, []int{1, 3} +} +func (m *UptimeCheckConfig_ContentMatcher) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_UptimeCheckConfig_ContentMatcher.Unmarshal(m, b) +} +func (m *UptimeCheckConfig_ContentMatcher) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_UptimeCheckConfig_ContentMatcher.Marshal(b, m, deterministic) +} +func (dst *UptimeCheckConfig_ContentMatcher) XXX_Merge(src proto.Message) { + xxx_messageInfo_UptimeCheckConfig_ContentMatcher.Merge(dst, src) +} +func (m *UptimeCheckConfig_ContentMatcher) XXX_Size() int { + return xxx_messageInfo_UptimeCheckConfig_ContentMatcher.Size(m) +} +func (m *UptimeCheckConfig_ContentMatcher) XXX_DiscardUnknown() { + xxx_messageInfo_UptimeCheckConfig_ContentMatcher.DiscardUnknown(m) +} + +var xxx_messageInfo_UptimeCheckConfig_ContentMatcher proto.InternalMessageInfo + +func (m *UptimeCheckConfig_ContentMatcher) GetContent() string { + if m != nil { + return m.Content + } + return "" +} + +// Contains the region, location, and list of IP +// addresses where checkers in the location run from. +type UptimeCheckIp struct { + // A broad region category in which the IP address is located. + Region UptimeCheckRegion `protobuf:"varint,1,opt,name=region,proto3,enum=google.monitoring.v3.UptimeCheckRegion" json:"region,omitempty"` + // A more specific location within the region that typically encodes + // a particular city/town/metro (and its containing state/province or country) + // within the broader umbrella region category. + Location string `protobuf:"bytes,2,opt,name=location,proto3" json:"location,omitempty"` + // The IP address from which the uptime check originates. This is a full + // IP address (not an IP address range). Most IP addresses, as of this + // publication, are in IPv4 format; however, one should not rely on the + // IP addresses being in IPv4 format indefinitely and should support + // interpreting this field in either IPv4 or IPv6 format. + IpAddress string `protobuf:"bytes,3,opt,name=ip_address,json=ipAddress,proto3" json:"ip_address,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *UptimeCheckIp) Reset() { *m = UptimeCheckIp{} } +func (m *UptimeCheckIp) String() string { return proto.CompactTextString(m) } +func (*UptimeCheckIp) ProtoMessage() {} +func (*UptimeCheckIp) Descriptor() ([]byte, []int) { + return fileDescriptor_uptime_bdbf1570f8b9a7cb, []int{2} +} +func (m *UptimeCheckIp) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_UptimeCheckIp.Unmarshal(m, b) +} +func (m *UptimeCheckIp) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_UptimeCheckIp.Marshal(b, m, deterministic) +} +func (dst *UptimeCheckIp) XXX_Merge(src proto.Message) { + xxx_messageInfo_UptimeCheckIp.Merge(dst, src) +} +func (m *UptimeCheckIp) XXX_Size() int { + return xxx_messageInfo_UptimeCheckIp.Size(m) +} +func (m *UptimeCheckIp) XXX_DiscardUnknown() { + xxx_messageInfo_UptimeCheckIp.DiscardUnknown(m) +} + +var xxx_messageInfo_UptimeCheckIp proto.InternalMessageInfo + +func (m *UptimeCheckIp) GetRegion() UptimeCheckRegion { + if m != nil { + return m.Region + } + return UptimeCheckRegion_REGION_UNSPECIFIED +} + +func (m *UptimeCheckIp) GetLocation() string { + if m != nil { + return m.Location + } + return "" +} + +func (m *UptimeCheckIp) GetIpAddress() string { + if m != nil { + return m.IpAddress + } + return "" +} + +func init() { + proto.RegisterType((*InternalChecker)(nil), "google.monitoring.v3.InternalChecker") + proto.RegisterType((*UptimeCheckConfig)(nil), "google.monitoring.v3.UptimeCheckConfig") + proto.RegisterType((*UptimeCheckConfig_ResourceGroup)(nil), "google.monitoring.v3.UptimeCheckConfig.ResourceGroup") + proto.RegisterType((*UptimeCheckConfig_HttpCheck)(nil), "google.monitoring.v3.UptimeCheckConfig.HttpCheck") + proto.RegisterMapType((map[string]string)(nil), "google.monitoring.v3.UptimeCheckConfig.HttpCheck.HeadersEntry") + proto.RegisterType((*UptimeCheckConfig_HttpCheck_BasicAuthentication)(nil), "google.monitoring.v3.UptimeCheckConfig.HttpCheck.BasicAuthentication") + proto.RegisterType((*UptimeCheckConfig_TcpCheck)(nil), "google.monitoring.v3.UptimeCheckConfig.TcpCheck") + proto.RegisterType((*UptimeCheckConfig_ContentMatcher)(nil), "google.monitoring.v3.UptimeCheckConfig.ContentMatcher") + proto.RegisterType((*UptimeCheckIp)(nil), "google.monitoring.v3.UptimeCheckIp") + proto.RegisterEnum("google.monitoring.v3.UptimeCheckRegion", UptimeCheckRegion_name, UptimeCheckRegion_value) + proto.RegisterEnum("google.monitoring.v3.GroupResourceType", GroupResourceType_name, GroupResourceType_value) +} + +func init() { + proto.RegisterFile("google/monitoring/v3/uptime.proto", fileDescriptor_uptime_bdbf1570f8b9a7cb) +} + +var fileDescriptor_uptime_bdbf1570f8b9a7cb = []byte{ + // 1036 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x9c, 0x56, 0xed, 0x6e, 0xe3, 0x44, + 0x17, 0xae, 0x9b, 0x36, 0x1f, 0x27, 0xfd, 0x70, 0xe7, 0xed, 0x0b, 0x6e, 0xa4, 0x2e, 0xdd, 0x22, + 0xa0, 0xea, 0x8f, 0x84, 0x6d, 0x04, 0x42, 0x8b, 0xb4, 0xc8, 0x49, 0x4d, 0x13, 0xa9, 0x4d, 0xa2, + 0x49, 0xb3, 0xc0, 0x52, 0x31, 0x72, 0xed, 0xa9, 0x63, 0x9a, 0x78, 0x8c, 0x67, 0xdc, 0xa5, 0xdc, + 0x02, 0x97, 0x81, 0xf8, 0xc3, 0x15, 0x70, 0x0d, 0x5c, 0x00, 0xd7, 0x83, 0x66, 0xec, 0x49, 0x9b, + 0xb6, 0x68, 0x5b, 0xfe, 0xcd, 0x73, 0x3e, 0x9e, 0x39, 0x73, 0xe6, 0x3c, 0x63, 0xc3, 0xf3, 0x80, + 0xb1, 0x60, 0x42, 0x1b, 0x53, 0x16, 0x85, 0x82, 0x25, 0x61, 0x14, 0x34, 0xae, 0x9a, 0x8d, 0x34, + 0x16, 0xe1, 0x94, 0xd6, 0xe3, 0x84, 0x09, 0x86, 0x36, 0xb3, 0x90, 0xfa, 0x4d, 0x48, 0xfd, 0xaa, + 0x59, 0xfb, 0x30, 0x4f, 0x74, 0xe3, 0x50, 0x27, 0x53, 0x9f, 0x24, 0x94, 0xb3, 0x34, 0xf1, 0xf2, + 0xd4, 0xda, 0xb3, 0x3c, 0x48, 0xa1, 0xf3, 0xf4, 0xa2, 0xe1, 0xa7, 0x89, 0x2b, 0x42, 0x16, 0x65, + 0xfe, 0xdd, 0xdf, 0x0d, 0x58, 0xef, 0x46, 0x82, 0x26, 0x91, 0x3b, 0x69, 0x8f, 0xa9, 0x77, 0x49, + 0x13, 0x84, 0x60, 0x29, 0x72, 0xa7, 0xd4, 0x32, 0x76, 0x8c, 0xbd, 0x0a, 0x56, 0x6b, 0xf4, 0x1c, + 0x56, 0xfc, 0x90, 0xc7, 0x13, 0xf7, 0x9a, 0x28, 0xdf, 0xa2, 0xf2, 0x55, 0x73, 0x5b, 0x4f, 0x86, + 0x58, 0x50, 0x8a, 0xa8, 0x78, 0xcb, 0x92, 0x4b, 0xab, 0xa0, 0xbc, 0x1a, 0xa2, 0x2d, 0x28, 0x07, + 0x5e, 0x4c, 0x7e, 0x61, 0x11, 0xb5, 0x96, 0x32, 0x57, 0xe0, 0xc5, 0x6f, 0x58, 0x44, 0xd1, 0xc7, + 0xb0, 0x1e, 0x53, 0x9a, 0x90, 0x38, 0x61, 0x3f, 0x52, 0x4f, 0x90, 0xd0, 0xb7, 0x8a, 0x2a, 0x62, + 0x55, 0x9a, 0x07, 0x99, 0xb5, 0xeb, 0xef, 0xfe, 0x5d, 0x85, 0x8d, 0x91, 0xea, 0x89, 0xaa, 0xb2, + 0xcd, 0xa2, 0x8b, 0x30, 0xf8, 0xaf, 0x95, 0xf6, 0x00, 0xdd, 0x6f, 0x98, 0x2a, 0xba, 0x7a, 0xb0, + 0x5d, 0xcf, 0x9b, 0xed, 0xc6, 0x61, 0xfd, 0x44, 0x47, 0xe1, 0x3c, 0xa8, 0xb3, 0x80, 0x37, 0xa6, + 0x77, 0x8d, 0xe8, 0x07, 0x58, 0xd3, 0x2c, 0x24, 0x48, 0x58, 0x1a, 0xab, 0x53, 0x56, 0x0f, 0x3e, + 0xab, 0x3f, 0x74, 0x71, 0xf5, 0x7b, 0xe7, 0xa8, 0x6b, 0xa6, 0x23, 0x99, 0xdc, 0x59, 0xc0, 0xab, + 0xc9, 0x6d, 0x03, 0xc2, 0x00, 0x63, 0x21, 0x62, 0xe2, 0xc9, 0x14, 0x6b, 0x59, 0x71, 0xbf, 0x78, + 0x2c, 0x77, 0x47, 0x88, 0x58, 0xe1, 0x8e, 0x81, 0x2b, 0x63, 0x0d, 0x50, 0x1f, 0x2a, 0xc2, 0xd3, + 0x94, 0x45, 0x45, 0xf9, 0xe9, 0x63, 0x29, 0x4f, 0xbd, 0x19, 0x63, 0x59, 0xe4, 0x6b, 0xf4, 0x02, + 0x8a, 0x31, 0x4d, 0x42, 0xe6, 0x5b, 0x25, 0xc5, 0xb6, 0xa5, 0xd9, 0xf4, 0xe8, 0xd5, 0x0f, 0xf3, + 0xd1, 0xc3, 0x79, 0x20, 0x6a, 0x42, 0x49, 0x52, 0xb3, 0x54, 0x58, 0xe5, 0x77, 0xe5, 0xe8, 0x48, + 0xe4, 0x82, 0xe9, 0xb1, 0x48, 0xd0, 0x48, 0x90, 0xa9, 0x2b, 0xbc, 0x31, 0x4d, 0xb8, 0x55, 0xd9, + 0x29, 0xec, 0x55, 0x0f, 0x3e, 0x7f, 0x6c, 0xfd, 0xed, 0x2c, 0xff, 0x24, 0x4b, 0xc7, 0xeb, 0xde, + 0x1c, 0xe6, 0x08, 0x83, 0xc9, 0xe9, 0x84, 0x7a, 0x42, 0x8d, 0x47, 0x10, 0xb2, 0x88, 0x5b, 0xb0, + 0x53, 0xd8, 0x5b, 0x3b, 0xf8, 0xe4, 0x9d, 0x5b, 0x60, 0x15, 0x8f, 0xd7, 0x35, 0x41, 0x86, 0x39, + 0xfa, 0x00, 0xaa, 0x21, 0x27, 0x61, 0x2e, 0x35, 0x6b, 0x7d, 0xc7, 0xd8, 0x2b, 0x63, 0x08, 0xb9, + 0x16, 0x1f, 0xc2, 0xb0, 0xa1, 0xbd, 0xd9, 0xad, 0xc8, 0x83, 0xad, 0xa9, 0x83, 0x7d, 0xf4, 0xf0, + 0xae, 0x77, 0x74, 0x8b, 0xcd, 0x70, 0xde, 0xc0, 0x6b, 0x3f, 0xc3, 0xea, 0xdc, 0x68, 0x29, 0x25, + 0xca, 0x85, 0xd4, 0x99, 0x91, 0x2b, 0x51, 0xe2, 0xae, 0x8f, 0x8e, 0x61, 0x36, 0x75, 0x44, 0x5c, + 0xc7, 0x99, 0x70, 0xfe, 0xf5, 0xc4, 0x8a, 0x4e, 0x73, 0x9f, 0x5e, 0xc7, 0x14, 0xaf, 0x24, 0xb7, + 0x50, 0xed, 0xcf, 0x02, 0x54, 0x66, 0x93, 0x87, 0xde, 0x87, 0x52, 0xca, 0x29, 0xe1, 0x7c, 0xa2, + 0x76, 0x2d, 0xe3, 0x62, 0xca, 0xe9, 0x90, 0x4f, 0xa4, 0x80, 0x63, 0x57, 0x8c, 0x73, 0x91, 0xaa, + 0xb5, 0xb2, 0xb1, 0x44, 0x28, 0x3d, 0x2e, 0x63, 0xb5, 0x46, 0xe7, 0x50, 0x71, 0x53, 0x31, 0x26, + 0x61, 0x74, 0xc1, 0x72, 0x71, 0x39, 0x4f, 0x16, 0x40, 0xbd, 0xe5, 0xf2, 0xd0, 0xb3, 0x53, 0x31, + 0xa6, 0x91, 0x08, 0xbd, 0x6c, 0xae, 0xca, 0x92, 0xb7, 0x1b, 0x5d, 0x30, 0xf9, 0x70, 0x4c, 0x5d, + 0x7e, 0x49, 0xc6, 0xd4, 0xf5, 0x65, 0xef, 0x97, 0x55, 0xa5, 0x55, 0x69, 0xeb, 0x64, 0x26, 0xf4, + 0x2d, 0x94, 0xb4, 0xb7, 0xa8, 0x6e, 0xe6, 0xd5, 0xd3, 0x8b, 0xc8, 0xb9, 0x9c, 0x48, 0x24, 0xd7, + 0x58, 0xd3, 0xd5, 0x4e, 0xe0, 0x7f, 0x0f, 0x54, 0x87, 0x6a, 0x50, 0x4e, 0xb9, 0xbc, 0xd3, 0xd9, + 0x23, 0x37, 0xc3, 0xd2, 0x17, 0xbb, 0x9c, 0xbf, 0x65, 0x89, 0x9f, 0xf7, 0x6f, 0x86, 0x6b, 0x2f, + 0x61, 0xe5, 0xf6, 0x3e, 0xc8, 0x84, 0xc2, 0x25, 0xbd, 0xce, 0x29, 0xe4, 0x12, 0x6d, 0xc2, 0xf2, + 0x95, 0x3b, 0x49, 0xf5, 0xfb, 0x98, 0x81, 0x97, 0x8b, 0x5f, 0x18, 0xb5, 0x67, 0x50, 0xd6, 0x02, + 0x9f, 0xdd, 0x85, 0x71, 0x73, 0x17, 0xb5, 0x7d, 0x58, 0x9b, 0x17, 0x90, 0x7c, 0xf9, 0x73, 0x09, + 0xe9, 0xa1, 0xca, 0x61, 0x0b, 0xa0, 0xac, 0xc7, 0xa2, 0xb5, 0x09, 0x48, 0xcd, 0x35, 0x49, 0xe8, + 0x4f, 0x29, 0xe5, 0x42, 0x4d, 0xd9, 0xee, 0xaf, 0x06, 0xac, 0xde, 0x6a, 0x57, 0x37, 0x46, 0x5f, + 0x41, 0x31, 0x13, 0x9d, 0x22, 0x7b, 0x82, 0xe6, 0xf2, 0x34, 0xd9, 0x98, 0x09, 0xcb, 0x1a, 0xa8, + 0x1b, 0xa3, 0x31, 0xda, 0x06, 0x08, 0x63, 0xe2, 0xfa, 0x7e, 0x42, 0x39, 0xcf, 0xbf, 0x53, 0x95, + 0x30, 0xb6, 0x33, 0xc3, 0x3e, 0x9d, 0xfb, 0xca, 0x64, 0xbc, 0xe8, 0x3d, 0x40, 0xd8, 0x39, 0xea, + 0xf6, 0x7b, 0x64, 0xd4, 0x1b, 0x0e, 0x9c, 0x76, 0xf7, 0xeb, 0xae, 0x73, 0x68, 0x2e, 0xa0, 0x12, + 0x14, 0x46, 0x43, 0xdb, 0x34, 0x10, 0x40, 0xd1, 0x19, 0xe1, 0xfe, 0xc0, 0x31, 0x17, 0xd1, 0x06, + 0xac, 0x0e, 0xfb, 0xa3, 0xd3, 0x0e, 0xb1, 0x4f, 0x1c, 0xdc, 0x6d, 0xdb, 0x66, 0x01, 0x99, 0xb0, + 0x62, 0x0f, 0xbb, 0x36, 0x19, 0xd8, 0x32, 0xb5, 0x6d, 0x2e, 0xed, 0x7f, 0x0f, 0x1b, 0xf7, 0x04, + 0x84, 0xb6, 0x61, 0x0b, 0x3b, 0xc3, 0xfe, 0x08, 0xb7, 0x1d, 0x72, 0xfa, 0xdd, 0xc0, 0xb9, 0xb3, + 0xdb, 0x0a, 0x94, 0xbb, 0xbd, 0xe1, 0xa9, 0xdd, 0x6b, 0x3b, 0xa6, 0x81, 0xb6, 0xe0, 0xff, 0xf6, + 0x37, 0x43, 0xe2, 0x1c, 0xb7, 0xc8, 0x71, 0xdf, 0x3e, 0x24, 0x2d, 0xfb, 0x58, 0x7a, 0xb0, 0xb9, + 0xd8, 0xfa, 0xcd, 0x00, 0xcb, 0x63, 0xd3, 0x07, 0xbb, 0xd6, 0xaa, 0x66, 0xc7, 0x1b, 0xc8, 0xf7, + 0x75, 0x60, 0xbc, 0x79, 0x95, 0x07, 0x05, 0x6c, 0xe2, 0x46, 0x41, 0x9d, 0x25, 0x41, 0x23, 0xa0, + 0x91, 0x7a, 0x7d, 0x1b, 0x99, 0xcb, 0x8d, 0x43, 0x3e, 0xff, 0x6f, 0xf2, 0xe5, 0x0d, 0xfa, 0x63, + 0xb1, 0x76, 0x94, 0x11, 0xb4, 0x27, 0x2c, 0xf5, 0xf5, 0xf7, 0x52, 0xee, 0xf5, 0xba, 0xf9, 0x97, + 0x76, 0x9e, 0x29, 0xe7, 0xd9, 0x8d, 0xf3, 0xec, 0x75, 0xf3, 0xbc, 0xa8, 0x36, 0x69, 0xfe, 0x13, + 0x00, 0x00, 0xff, 0xff, 0x0a, 0x0a, 0xbb, 0x6b, 0xff, 0x08, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/monitoring/v3/uptime_service.pb.go b/vendor/google.golang.org/genproto/googleapis/monitoring/v3/uptime_service.pb.go new file mode 100644 index 0000000..b29c771 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/monitoring/v3/uptime_service.pb.go @@ -0,0 +1,786 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/monitoring/v3/uptime_service.proto + +package monitoring // import "google.golang.org/genproto/googleapis/monitoring/v3" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import empty "github.com/golang/protobuf/ptypes/empty" +import _ "google.golang.org/genproto/googleapis/api/annotations" +import field_mask "google.golang.org/genproto/protobuf/field_mask" + +import ( + context "golang.org/x/net/context" + grpc "google.golang.org/grpc" +) + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// The protocol for the `ListUptimeCheckConfigs` request. +type ListUptimeCheckConfigsRequest struct { + // The project whose uptime check configurations are listed. The format + // is `projects/[PROJECT_ID]`. + Parent string `protobuf:"bytes,1,opt,name=parent,proto3" json:"parent,omitempty"` + // The maximum number of results to return in a single response. The server + // may further constrain the maximum number of results returned in a single + // page. If the page_size is <=0, the server will decide the number of results + // to be returned. + PageSize int32 `protobuf:"varint,3,opt,name=page_size,json=pageSize,proto3" json:"page_size,omitempty"` + // If this field is not empty then it must contain the `nextPageToken` value + // returned by a previous call to this method. Using this field causes the + // method to return more results from the previous method call. + PageToken string `protobuf:"bytes,4,opt,name=page_token,json=pageToken,proto3" json:"page_token,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ListUptimeCheckConfigsRequest) Reset() { *m = ListUptimeCheckConfigsRequest{} } +func (m *ListUptimeCheckConfigsRequest) String() string { return proto.CompactTextString(m) } +func (*ListUptimeCheckConfigsRequest) ProtoMessage() {} +func (*ListUptimeCheckConfigsRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_uptime_service_d74c53bfae98dff8, []int{0} +} +func (m *ListUptimeCheckConfigsRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ListUptimeCheckConfigsRequest.Unmarshal(m, b) +} +func (m *ListUptimeCheckConfigsRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ListUptimeCheckConfigsRequest.Marshal(b, m, deterministic) +} +func (dst *ListUptimeCheckConfigsRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_ListUptimeCheckConfigsRequest.Merge(dst, src) +} +func (m *ListUptimeCheckConfigsRequest) XXX_Size() int { + return xxx_messageInfo_ListUptimeCheckConfigsRequest.Size(m) +} +func (m *ListUptimeCheckConfigsRequest) XXX_DiscardUnknown() { + xxx_messageInfo_ListUptimeCheckConfigsRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_ListUptimeCheckConfigsRequest proto.InternalMessageInfo + +func (m *ListUptimeCheckConfigsRequest) GetParent() string { + if m != nil { + return m.Parent + } + return "" +} + +func (m *ListUptimeCheckConfigsRequest) GetPageSize() int32 { + if m != nil { + return m.PageSize + } + return 0 +} + +func (m *ListUptimeCheckConfigsRequest) GetPageToken() string { + if m != nil { + return m.PageToken + } + return "" +} + +// The protocol for the `ListUptimeCheckConfigs` response. +type ListUptimeCheckConfigsResponse struct { + // The returned uptime check configurations. + UptimeCheckConfigs []*UptimeCheckConfig `protobuf:"bytes,1,rep,name=uptime_check_configs,json=uptimeCheckConfigs,proto3" json:"uptime_check_configs,omitempty"` + // This field represents the pagination token to retrieve the next page of + // results. If the value is empty, it means no further results for the + // request. To retrieve the next page of results, the value of the + // next_page_token is passed to the subsequent List method call (in the + // request message's page_token field). + NextPageToken string `protobuf:"bytes,2,opt,name=next_page_token,json=nextPageToken,proto3" json:"next_page_token,omitempty"` + // The total number of uptime check configurations for the project, + // irrespective of any pagination. + TotalSize int32 `protobuf:"varint,3,opt,name=total_size,json=totalSize,proto3" json:"total_size,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ListUptimeCheckConfigsResponse) Reset() { *m = ListUptimeCheckConfigsResponse{} } +func (m *ListUptimeCheckConfigsResponse) String() string { return proto.CompactTextString(m) } +func (*ListUptimeCheckConfigsResponse) ProtoMessage() {} +func (*ListUptimeCheckConfigsResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_uptime_service_d74c53bfae98dff8, []int{1} +} +func (m *ListUptimeCheckConfigsResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ListUptimeCheckConfigsResponse.Unmarshal(m, b) +} +func (m *ListUptimeCheckConfigsResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ListUptimeCheckConfigsResponse.Marshal(b, m, deterministic) +} +func (dst *ListUptimeCheckConfigsResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_ListUptimeCheckConfigsResponse.Merge(dst, src) +} +func (m *ListUptimeCheckConfigsResponse) XXX_Size() int { + return xxx_messageInfo_ListUptimeCheckConfigsResponse.Size(m) +} +func (m *ListUptimeCheckConfigsResponse) XXX_DiscardUnknown() { + xxx_messageInfo_ListUptimeCheckConfigsResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_ListUptimeCheckConfigsResponse proto.InternalMessageInfo + +func (m *ListUptimeCheckConfigsResponse) GetUptimeCheckConfigs() []*UptimeCheckConfig { + if m != nil { + return m.UptimeCheckConfigs + } + return nil +} + +func (m *ListUptimeCheckConfigsResponse) GetNextPageToken() string { + if m != nil { + return m.NextPageToken + } + return "" +} + +func (m *ListUptimeCheckConfigsResponse) GetTotalSize() int32 { + if m != nil { + return m.TotalSize + } + return 0 +} + +// The protocol for the `GetUptimeCheckConfig` request. +type GetUptimeCheckConfigRequest struct { + // The uptime check configuration to retrieve. The format + // is `projects/[PROJECT_ID]/uptimeCheckConfigs/[UPTIME_CHECK_ID]`. + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GetUptimeCheckConfigRequest) Reset() { *m = GetUptimeCheckConfigRequest{} } +func (m *GetUptimeCheckConfigRequest) String() string { return proto.CompactTextString(m) } +func (*GetUptimeCheckConfigRequest) ProtoMessage() {} +func (*GetUptimeCheckConfigRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_uptime_service_d74c53bfae98dff8, []int{2} +} +func (m *GetUptimeCheckConfigRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GetUptimeCheckConfigRequest.Unmarshal(m, b) +} +func (m *GetUptimeCheckConfigRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GetUptimeCheckConfigRequest.Marshal(b, m, deterministic) +} +func (dst *GetUptimeCheckConfigRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_GetUptimeCheckConfigRequest.Merge(dst, src) +} +func (m *GetUptimeCheckConfigRequest) XXX_Size() int { + return xxx_messageInfo_GetUptimeCheckConfigRequest.Size(m) +} +func (m *GetUptimeCheckConfigRequest) XXX_DiscardUnknown() { + xxx_messageInfo_GetUptimeCheckConfigRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_GetUptimeCheckConfigRequest proto.InternalMessageInfo + +func (m *GetUptimeCheckConfigRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +// The protocol for the `CreateUptimeCheckConfig` request. +type CreateUptimeCheckConfigRequest struct { + // The project in which to create the uptime check. The format + // is `projects/[PROJECT_ID]`. + Parent string `protobuf:"bytes,1,opt,name=parent,proto3" json:"parent,omitempty"` + // The new uptime check configuration. + UptimeCheckConfig *UptimeCheckConfig `protobuf:"bytes,2,opt,name=uptime_check_config,json=uptimeCheckConfig,proto3" json:"uptime_check_config,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *CreateUptimeCheckConfigRequest) Reset() { *m = CreateUptimeCheckConfigRequest{} } +func (m *CreateUptimeCheckConfigRequest) String() string { return proto.CompactTextString(m) } +func (*CreateUptimeCheckConfigRequest) ProtoMessage() {} +func (*CreateUptimeCheckConfigRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_uptime_service_d74c53bfae98dff8, []int{3} +} +func (m *CreateUptimeCheckConfigRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_CreateUptimeCheckConfigRequest.Unmarshal(m, b) +} +func (m *CreateUptimeCheckConfigRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_CreateUptimeCheckConfigRequest.Marshal(b, m, deterministic) +} +func (dst *CreateUptimeCheckConfigRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_CreateUptimeCheckConfigRequest.Merge(dst, src) +} +func (m *CreateUptimeCheckConfigRequest) XXX_Size() int { + return xxx_messageInfo_CreateUptimeCheckConfigRequest.Size(m) +} +func (m *CreateUptimeCheckConfigRequest) XXX_DiscardUnknown() { + xxx_messageInfo_CreateUptimeCheckConfigRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_CreateUptimeCheckConfigRequest proto.InternalMessageInfo + +func (m *CreateUptimeCheckConfigRequest) GetParent() string { + if m != nil { + return m.Parent + } + return "" +} + +func (m *CreateUptimeCheckConfigRequest) GetUptimeCheckConfig() *UptimeCheckConfig { + if m != nil { + return m.UptimeCheckConfig + } + return nil +} + +// The protocol for the `UpdateUptimeCheckConfig` request. +type UpdateUptimeCheckConfigRequest struct { + // Optional. If present, only the listed fields in the current uptime check + // configuration are updated with values from the new configuration. If this + // field is empty, then the current configuration is completely replaced with + // the new configuration. + UpdateMask *field_mask.FieldMask `protobuf:"bytes,2,opt,name=update_mask,json=updateMask,proto3" json:"update_mask,omitempty"` + // Required. If an `"updateMask"` has been specified, this field gives + // the values for the set of fields mentioned in the `"updateMask"`. If an + // `"updateMask"` has not been given, this uptime check configuration replaces + // the current configuration. If a field is mentioned in `"updateMask"` but + // the corresonding field is omitted in this partial uptime check + // configuration, it has the effect of deleting/clearing the field from the + // configuration on the server. + // + // The following fields can be updated: `display_name`, + // `http_check`, `tcp_check`, `timeout`, `content_matchers`, and + // `selected_regions`. + UptimeCheckConfig *UptimeCheckConfig `protobuf:"bytes,3,opt,name=uptime_check_config,json=uptimeCheckConfig,proto3" json:"uptime_check_config,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *UpdateUptimeCheckConfigRequest) Reset() { *m = UpdateUptimeCheckConfigRequest{} } +func (m *UpdateUptimeCheckConfigRequest) String() string { return proto.CompactTextString(m) } +func (*UpdateUptimeCheckConfigRequest) ProtoMessage() {} +func (*UpdateUptimeCheckConfigRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_uptime_service_d74c53bfae98dff8, []int{4} +} +func (m *UpdateUptimeCheckConfigRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_UpdateUptimeCheckConfigRequest.Unmarshal(m, b) +} +func (m *UpdateUptimeCheckConfigRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_UpdateUptimeCheckConfigRequest.Marshal(b, m, deterministic) +} +func (dst *UpdateUptimeCheckConfigRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_UpdateUptimeCheckConfigRequest.Merge(dst, src) +} +func (m *UpdateUptimeCheckConfigRequest) XXX_Size() int { + return xxx_messageInfo_UpdateUptimeCheckConfigRequest.Size(m) +} +func (m *UpdateUptimeCheckConfigRequest) XXX_DiscardUnknown() { + xxx_messageInfo_UpdateUptimeCheckConfigRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_UpdateUptimeCheckConfigRequest proto.InternalMessageInfo + +func (m *UpdateUptimeCheckConfigRequest) GetUpdateMask() *field_mask.FieldMask { + if m != nil { + return m.UpdateMask + } + return nil +} + +func (m *UpdateUptimeCheckConfigRequest) GetUptimeCheckConfig() *UptimeCheckConfig { + if m != nil { + return m.UptimeCheckConfig + } + return nil +} + +// The protocol for the `DeleteUptimeCheckConfig` request. +type DeleteUptimeCheckConfigRequest struct { + // The uptime check configuration to delete. The format + // is `projects/[PROJECT_ID]/uptimeCheckConfigs/[UPTIME_CHECK_ID]`. + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *DeleteUptimeCheckConfigRequest) Reset() { *m = DeleteUptimeCheckConfigRequest{} } +func (m *DeleteUptimeCheckConfigRequest) String() string { return proto.CompactTextString(m) } +func (*DeleteUptimeCheckConfigRequest) ProtoMessage() {} +func (*DeleteUptimeCheckConfigRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_uptime_service_d74c53bfae98dff8, []int{5} +} +func (m *DeleteUptimeCheckConfigRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_DeleteUptimeCheckConfigRequest.Unmarshal(m, b) +} +func (m *DeleteUptimeCheckConfigRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_DeleteUptimeCheckConfigRequest.Marshal(b, m, deterministic) +} +func (dst *DeleteUptimeCheckConfigRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_DeleteUptimeCheckConfigRequest.Merge(dst, src) +} +func (m *DeleteUptimeCheckConfigRequest) XXX_Size() int { + return xxx_messageInfo_DeleteUptimeCheckConfigRequest.Size(m) +} +func (m *DeleteUptimeCheckConfigRequest) XXX_DiscardUnknown() { + xxx_messageInfo_DeleteUptimeCheckConfigRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_DeleteUptimeCheckConfigRequest proto.InternalMessageInfo + +func (m *DeleteUptimeCheckConfigRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +// The protocol for the `ListUptimeCheckIps` request. +type ListUptimeCheckIpsRequest struct { + // The maximum number of results to return in a single response. The server + // may further constrain the maximum number of results returned in a single + // page. If the page_size is <=0, the server will decide the number of results + // to be returned. + // NOTE: this field is not yet implemented + PageSize int32 `protobuf:"varint,2,opt,name=page_size,json=pageSize,proto3" json:"page_size,omitempty"` + // If this field is not empty then it must contain the `nextPageToken` value + // returned by a previous call to this method. Using this field causes the + // method to return more results from the previous method call. + // NOTE: this field is not yet implemented + PageToken string `protobuf:"bytes,3,opt,name=page_token,json=pageToken,proto3" json:"page_token,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ListUptimeCheckIpsRequest) Reset() { *m = ListUptimeCheckIpsRequest{} } +func (m *ListUptimeCheckIpsRequest) String() string { return proto.CompactTextString(m) } +func (*ListUptimeCheckIpsRequest) ProtoMessage() {} +func (*ListUptimeCheckIpsRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_uptime_service_d74c53bfae98dff8, []int{6} +} +func (m *ListUptimeCheckIpsRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ListUptimeCheckIpsRequest.Unmarshal(m, b) +} +func (m *ListUptimeCheckIpsRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ListUptimeCheckIpsRequest.Marshal(b, m, deterministic) +} +func (dst *ListUptimeCheckIpsRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_ListUptimeCheckIpsRequest.Merge(dst, src) +} +func (m *ListUptimeCheckIpsRequest) XXX_Size() int { + return xxx_messageInfo_ListUptimeCheckIpsRequest.Size(m) +} +func (m *ListUptimeCheckIpsRequest) XXX_DiscardUnknown() { + xxx_messageInfo_ListUptimeCheckIpsRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_ListUptimeCheckIpsRequest proto.InternalMessageInfo + +func (m *ListUptimeCheckIpsRequest) GetPageSize() int32 { + if m != nil { + return m.PageSize + } + return 0 +} + +func (m *ListUptimeCheckIpsRequest) GetPageToken() string { + if m != nil { + return m.PageToken + } + return "" +} + +// The protocol for the `ListUptimeCheckIps` response. +type ListUptimeCheckIpsResponse struct { + // The returned list of IP addresses (including region and location) that the + // checkers run from. + UptimeCheckIps []*UptimeCheckIp `protobuf:"bytes,1,rep,name=uptime_check_ips,json=uptimeCheckIps,proto3" json:"uptime_check_ips,omitempty"` + // This field represents the pagination token to retrieve the next page of + // results. If the value is empty, it means no further results for the + // request. To retrieve the next page of results, the value of the + // next_page_token is passed to the subsequent List method call (in the + // request message's page_token field). + // NOTE: this field is not yet implemented + NextPageToken string `protobuf:"bytes,2,opt,name=next_page_token,json=nextPageToken,proto3" json:"next_page_token,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ListUptimeCheckIpsResponse) Reset() { *m = ListUptimeCheckIpsResponse{} } +func (m *ListUptimeCheckIpsResponse) String() string { return proto.CompactTextString(m) } +func (*ListUptimeCheckIpsResponse) ProtoMessage() {} +func (*ListUptimeCheckIpsResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_uptime_service_d74c53bfae98dff8, []int{7} +} +func (m *ListUptimeCheckIpsResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ListUptimeCheckIpsResponse.Unmarshal(m, b) +} +func (m *ListUptimeCheckIpsResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ListUptimeCheckIpsResponse.Marshal(b, m, deterministic) +} +func (dst *ListUptimeCheckIpsResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_ListUptimeCheckIpsResponse.Merge(dst, src) +} +func (m *ListUptimeCheckIpsResponse) XXX_Size() int { + return xxx_messageInfo_ListUptimeCheckIpsResponse.Size(m) +} +func (m *ListUptimeCheckIpsResponse) XXX_DiscardUnknown() { + xxx_messageInfo_ListUptimeCheckIpsResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_ListUptimeCheckIpsResponse proto.InternalMessageInfo + +func (m *ListUptimeCheckIpsResponse) GetUptimeCheckIps() []*UptimeCheckIp { + if m != nil { + return m.UptimeCheckIps + } + return nil +} + +func (m *ListUptimeCheckIpsResponse) GetNextPageToken() string { + if m != nil { + return m.NextPageToken + } + return "" +} + +func init() { + proto.RegisterType((*ListUptimeCheckConfigsRequest)(nil), "google.monitoring.v3.ListUptimeCheckConfigsRequest") + proto.RegisterType((*ListUptimeCheckConfigsResponse)(nil), "google.monitoring.v3.ListUptimeCheckConfigsResponse") + proto.RegisterType((*GetUptimeCheckConfigRequest)(nil), "google.monitoring.v3.GetUptimeCheckConfigRequest") + proto.RegisterType((*CreateUptimeCheckConfigRequest)(nil), "google.monitoring.v3.CreateUptimeCheckConfigRequest") + proto.RegisterType((*UpdateUptimeCheckConfigRequest)(nil), "google.monitoring.v3.UpdateUptimeCheckConfigRequest") + proto.RegisterType((*DeleteUptimeCheckConfigRequest)(nil), "google.monitoring.v3.DeleteUptimeCheckConfigRequest") + proto.RegisterType((*ListUptimeCheckIpsRequest)(nil), "google.monitoring.v3.ListUptimeCheckIpsRequest") + proto.RegisterType((*ListUptimeCheckIpsResponse)(nil), "google.monitoring.v3.ListUptimeCheckIpsResponse") +} + +// Reference imports to suppress errors if they are not otherwise used. +var _ context.Context +var _ grpc.ClientConn + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the grpc package it is being compiled against. +const _ = grpc.SupportPackageIsVersion4 + +// UptimeCheckServiceClient is the client API for UptimeCheckService service. +// +// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://godoc.org/google.golang.org/grpc#ClientConn.NewStream. +type UptimeCheckServiceClient interface { + // Lists the existing valid uptime check configurations for the project, + // leaving out any invalid configurations. + ListUptimeCheckConfigs(ctx context.Context, in *ListUptimeCheckConfigsRequest, opts ...grpc.CallOption) (*ListUptimeCheckConfigsResponse, error) + // Gets a single uptime check configuration. + GetUptimeCheckConfig(ctx context.Context, in *GetUptimeCheckConfigRequest, opts ...grpc.CallOption) (*UptimeCheckConfig, error) + // Creates a new uptime check configuration. + CreateUptimeCheckConfig(ctx context.Context, in *CreateUptimeCheckConfigRequest, opts ...grpc.CallOption) (*UptimeCheckConfig, error) + // Updates an uptime check configuration. You can either replace the entire + // configuration with a new one or replace only certain fields in the current + // configuration by specifying the fields to be updated via `"updateMask"`. + // Returns the updated configuration. + UpdateUptimeCheckConfig(ctx context.Context, in *UpdateUptimeCheckConfigRequest, opts ...grpc.CallOption) (*UptimeCheckConfig, error) + // Deletes an uptime check configuration. Note that this method will fail + // if the uptime check configuration is referenced by an alert policy or + // other dependent configs that would be rendered invalid by the deletion. + DeleteUptimeCheckConfig(ctx context.Context, in *DeleteUptimeCheckConfigRequest, opts ...grpc.CallOption) (*empty.Empty, error) + // Returns the list of IPs that checkers run from + ListUptimeCheckIps(ctx context.Context, in *ListUptimeCheckIpsRequest, opts ...grpc.CallOption) (*ListUptimeCheckIpsResponse, error) +} + +type uptimeCheckServiceClient struct { + cc *grpc.ClientConn +} + +func NewUptimeCheckServiceClient(cc *grpc.ClientConn) UptimeCheckServiceClient { + return &uptimeCheckServiceClient{cc} +} + +func (c *uptimeCheckServiceClient) ListUptimeCheckConfigs(ctx context.Context, in *ListUptimeCheckConfigsRequest, opts ...grpc.CallOption) (*ListUptimeCheckConfigsResponse, error) { + out := new(ListUptimeCheckConfigsResponse) + err := c.cc.Invoke(ctx, "/google.monitoring.v3.UptimeCheckService/ListUptimeCheckConfigs", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *uptimeCheckServiceClient) GetUptimeCheckConfig(ctx context.Context, in *GetUptimeCheckConfigRequest, opts ...grpc.CallOption) (*UptimeCheckConfig, error) { + out := new(UptimeCheckConfig) + err := c.cc.Invoke(ctx, "/google.monitoring.v3.UptimeCheckService/GetUptimeCheckConfig", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *uptimeCheckServiceClient) CreateUptimeCheckConfig(ctx context.Context, in *CreateUptimeCheckConfigRequest, opts ...grpc.CallOption) (*UptimeCheckConfig, error) { + out := new(UptimeCheckConfig) + err := c.cc.Invoke(ctx, "/google.monitoring.v3.UptimeCheckService/CreateUptimeCheckConfig", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *uptimeCheckServiceClient) UpdateUptimeCheckConfig(ctx context.Context, in *UpdateUptimeCheckConfigRequest, opts ...grpc.CallOption) (*UptimeCheckConfig, error) { + out := new(UptimeCheckConfig) + err := c.cc.Invoke(ctx, "/google.monitoring.v3.UptimeCheckService/UpdateUptimeCheckConfig", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *uptimeCheckServiceClient) DeleteUptimeCheckConfig(ctx context.Context, in *DeleteUptimeCheckConfigRequest, opts ...grpc.CallOption) (*empty.Empty, error) { + out := new(empty.Empty) + err := c.cc.Invoke(ctx, "/google.monitoring.v3.UptimeCheckService/DeleteUptimeCheckConfig", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *uptimeCheckServiceClient) ListUptimeCheckIps(ctx context.Context, in *ListUptimeCheckIpsRequest, opts ...grpc.CallOption) (*ListUptimeCheckIpsResponse, error) { + out := new(ListUptimeCheckIpsResponse) + err := c.cc.Invoke(ctx, "/google.monitoring.v3.UptimeCheckService/ListUptimeCheckIps", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +// UptimeCheckServiceServer is the server API for UptimeCheckService service. +type UptimeCheckServiceServer interface { + // Lists the existing valid uptime check configurations for the project, + // leaving out any invalid configurations. + ListUptimeCheckConfigs(context.Context, *ListUptimeCheckConfigsRequest) (*ListUptimeCheckConfigsResponse, error) + // Gets a single uptime check configuration. + GetUptimeCheckConfig(context.Context, *GetUptimeCheckConfigRequest) (*UptimeCheckConfig, error) + // Creates a new uptime check configuration. + CreateUptimeCheckConfig(context.Context, *CreateUptimeCheckConfigRequest) (*UptimeCheckConfig, error) + // Updates an uptime check configuration. You can either replace the entire + // configuration with a new one or replace only certain fields in the current + // configuration by specifying the fields to be updated via `"updateMask"`. + // Returns the updated configuration. + UpdateUptimeCheckConfig(context.Context, *UpdateUptimeCheckConfigRequest) (*UptimeCheckConfig, error) + // Deletes an uptime check configuration. Note that this method will fail + // if the uptime check configuration is referenced by an alert policy or + // other dependent configs that would be rendered invalid by the deletion. + DeleteUptimeCheckConfig(context.Context, *DeleteUptimeCheckConfigRequest) (*empty.Empty, error) + // Returns the list of IPs that checkers run from + ListUptimeCheckIps(context.Context, *ListUptimeCheckIpsRequest) (*ListUptimeCheckIpsResponse, error) +} + +func RegisterUptimeCheckServiceServer(s *grpc.Server, srv UptimeCheckServiceServer) { + s.RegisterService(&_UptimeCheckService_serviceDesc, srv) +} + +func _UptimeCheckService_ListUptimeCheckConfigs_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(ListUptimeCheckConfigsRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(UptimeCheckServiceServer).ListUptimeCheckConfigs(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.monitoring.v3.UptimeCheckService/ListUptimeCheckConfigs", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(UptimeCheckServiceServer).ListUptimeCheckConfigs(ctx, req.(*ListUptimeCheckConfigsRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _UptimeCheckService_GetUptimeCheckConfig_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(GetUptimeCheckConfigRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(UptimeCheckServiceServer).GetUptimeCheckConfig(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.monitoring.v3.UptimeCheckService/GetUptimeCheckConfig", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(UptimeCheckServiceServer).GetUptimeCheckConfig(ctx, req.(*GetUptimeCheckConfigRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _UptimeCheckService_CreateUptimeCheckConfig_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(CreateUptimeCheckConfigRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(UptimeCheckServiceServer).CreateUptimeCheckConfig(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.monitoring.v3.UptimeCheckService/CreateUptimeCheckConfig", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(UptimeCheckServiceServer).CreateUptimeCheckConfig(ctx, req.(*CreateUptimeCheckConfigRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _UptimeCheckService_UpdateUptimeCheckConfig_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(UpdateUptimeCheckConfigRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(UptimeCheckServiceServer).UpdateUptimeCheckConfig(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.monitoring.v3.UptimeCheckService/UpdateUptimeCheckConfig", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(UptimeCheckServiceServer).UpdateUptimeCheckConfig(ctx, req.(*UpdateUptimeCheckConfigRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _UptimeCheckService_DeleteUptimeCheckConfig_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(DeleteUptimeCheckConfigRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(UptimeCheckServiceServer).DeleteUptimeCheckConfig(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.monitoring.v3.UptimeCheckService/DeleteUptimeCheckConfig", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(UptimeCheckServiceServer).DeleteUptimeCheckConfig(ctx, req.(*DeleteUptimeCheckConfigRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _UptimeCheckService_ListUptimeCheckIps_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(ListUptimeCheckIpsRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(UptimeCheckServiceServer).ListUptimeCheckIps(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.monitoring.v3.UptimeCheckService/ListUptimeCheckIps", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(UptimeCheckServiceServer).ListUptimeCheckIps(ctx, req.(*ListUptimeCheckIpsRequest)) + } + return interceptor(ctx, in, info, handler) +} + +var _UptimeCheckService_serviceDesc = grpc.ServiceDesc{ + ServiceName: "google.monitoring.v3.UptimeCheckService", + HandlerType: (*UptimeCheckServiceServer)(nil), + Methods: []grpc.MethodDesc{ + { + MethodName: "ListUptimeCheckConfigs", + Handler: _UptimeCheckService_ListUptimeCheckConfigs_Handler, + }, + { + MethodName: "GetUptimeCheckConfig", + Handler: _UptimeCheckService_GetUptimeCheckConfig_Handler, + }, + { + MethodName: "CreateUptimeCheckConfig", + Handler: _UptimeCheckService_CreateUptimeCheckConfig_Handler, + }, + { + MethodName: "UpdateUptimeCheckConfig", + Handler: _UptimeCheckService_UpdateUptimeCheckConfig_Handler, + }, + { + MethodName: "DeleteUptimeCheckConfig", + Handler: _UptimeCheckService_DeleteUptimeCheckConfig_Handler, + }, + { + MethodName: "ListUptimeCheckIps", + Handler: _UptimeCheckService_ListUptimeCheckIps_Handler, + }, + }, + Streams: []grpc.StreamDesc{}, + Metadata: "google/monitoring/v3/uptime_service.proto", +} + +func init() { + proto.RegisterFile("google/monitoring/v3/uptime_service.proto", fileDescriptor_uptime_service_d74c53bfae98dff8) +} + +var fileDescriptor_uptime_service_d74c53bfae98dff8 = []byte{ + // 747 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xa4, 0x56, 0xcd, 0x6e, 0xd3, 0x4a, + 0x14, 0xd6, 0x24, 0xbd, 0x55, 0x7b, 0xaa, 0x7b, 0x2f, 0x0c, 0x51, 0x1b, 0x5c, 0x1a, 0x05, 0x23, + 0x41, 0x89, 0x90, 0x4d, 0x93, 0xae, 0xa8, 0xa8, 0x44, 0x03, 0x54, 0x95, 0xa8, 0x54, 0xa5, 0xb4, + 0x15, 0x50, 0x29, 0x72, 0xd3, 0xa9, 0x31, 0x49, 0x3c, 0xc6, 0x33, 0xae, 0xa0, 0xa8, 0x1b, 0xde, + 0x00, 0x75, 0xc9, 0x9e, 0x45, 0x1f, 0x00, 0xd6, 0xb0, 0x41, 0x62, 0x8b, 0x78, 0x03, 0x1e, 0x04, + 0x79, 0x3c, 0x26, 0x7f, 0x63, 0xe3, 0x88, 0x5d, 0x3c, 0xe7, 0xcc, 0x39, 0xdf, 0xf9, 0xfc, 0x9d, + 0x2f, 0x86, 0x9b, 0x36, 0xa5, 0x76, 0x87, 0x98, 0x5d, 0xea, 0x3a, 0x9c, 0xfa, 0x8e, 0x6b, 0x9b, + 0xc7, 0x35, 0x33, 0xf0, 0xb8, 0xd3, 0x25, 0x4d, 0x46, 0xfc, 0x63, 0xa7, 0x45, 0x0c, 0xcf, 0xa7, + 0x9c, 0xe2, 0x42, 0x94, 0x6a, 0xf4, 0x52, 0x8d, 0xe3, 0x9a, 0x76, 0x45, 0x16, 0xb0, 0x3c, 0xc7, + 0xb4, 0x5c, 0x97, 0x72, 0x8b, 0x3b, 0xd4, 0x65, 0xd1, 0x1d, 0xed, 0x6a, 0x4a, 0x79, 0x99, 0x32, + 0x2f, 0x53, 0xc4, 0xd3, 0x41, 0x70, 0x64, 0x92, 0xae, 0xc7, 0x5f, 0xcb, 0x60, 0x79, 0x38, 0x78, + 0xe4, 0x90, 0xce, 0x61, 0xb3, 0x6b, 0xb1, 0x76, 0x94, 0xa1, 0x33, 0x58, 0x78, 0xe4, 0x30, 0xbe, + 0x23, 0x4a, 0xd6, 0x9f, 0x93, 0x56, 0xbb, 0x4e, 0xdd, 0x23, 0xc7, 0x66, 0x0d, 0xf2, 0x32, 0x20, + 0x8c, 0xe3, 0x59, 0x98, 0xf4, 0x2c, 0x9f, 0xb8, 0xbc, 0x88, 0xca, 0x68, 0x71, 0xba, 0x21, 0x9f, + 0xf0, 0x3c, 0x4c, 0x7b, 0x96, 0x4d, 0x9a, 0xcc, 0x39, 0x21, 0xc5, 0x7c, 0x19, 0x2d, 0xfe, 0xd3, + 0x98, 0x0a, 0x0f, 0xb6, 0x9d, 0x13, 0x82, 0x17, 0x00, 0x44, 0x90, 0xd3, 0x36, 0x71, 0x8b, 0x13, + 0xe2, 0xa2, 0x48, 0x7f, 0x1c, 0x1e, 0xe8, 0x5f, 0x10, 0x94, 0x92, 0xba, 0x32, 0x8f, 0xba, 0x8c, + 0xe0, 0x27, 0x50, 0x90, 0x2c, 0xb6, 0xc2, 0x70, 0xb3, 0x15, 0xc5, 0x8b, 0xa8, 0x9c, 0x5f, 0x9c, + 0xa9, 0xde, 0x30, 0x54, 0x64, 0x1a, 0x23, 0xf5, 0x1a, 0x38, 0x18, 0x69, 0x81, 0xaf, 0xc3, 0xff, + 0x2e, 0x79, 0xc5, 0x9b, 0x7d, 0x08, 0x73, 0x02, 0xe1, 0xbf, 0xe1, 0xf1, 0x56, 0x8c, 0x32, 0x1c, + 0x82, 0x53, 0x6e, 0x75, 0xfa, 0x47, 0x9c, 0x16, 0x27, 0xe1, 0x8c, 0xfa, 0x12, 0xcc, 0xaf, 0x93, + 0xd1, 0x11, 0x62, 0xde, 0x30, 0x4c, 0xb8, 0x56, 0x97, 0x48, 0xd6, 0xc4, 0x6f, 0xfd, 0x1d, 0x82, + 0x52, 0xdd, 0x27, 0x16, 0x27, 0x89, 0xd7, 0x92, 0xe8, 0xde, 0x83, 0x4b, 0x0a, 0x3e, 0x04, 0xf0, + 0x31, 0xe8, 0xb8, 0x38, 0x42, 0x87, 0xfe, 0x11, 0x41, 0x69, 0xc7, 0x3b, 0x4c, 0xc3, 0xb4, 0x02, + 0x33, 0x81, 0xc8, 0x10, 0xc2, 0x91, 0x3d, 0xb5, 0xb8, 0x67, 0xac, 0x2d, 0xe3, 0x61, 0xa8, 0xad, + 0x4d, 0x8b, 0xb5, 0x1b, 0x10, 0xa5, 0x87, 0xbf, 0x93, 0x80, 0xe7, 0xff, 0x1a, 0xf8, 0x32, 0x94, + 0xee, 0x93, 0x0e, 0x49, 0xc1, 0xad, 0x7a, 0x05, 0x7b, 0x70, 0x79, 0x48, 0x79, 0x1b, 0xde, 0x6f, + 0xad, 0x0f, 0x68, 0x3a, 0x97, 0xaa, 0xe9, 0xfc, 0xb0, 0xa6, 0xcf, 0x10, 0x68, 0xaa, 0xca, 0x52, + 0xcf, 0x9b, 0x70, 0x61, 0x80, 0x06, 0xc7, 0x8b, 0xb5, 0x7c, 0xed, 0x8f, 0x1c, 0x6c, 0x78, 0x8d, + 0xff, 0x82, 0x81, 0xb2, 0x59, 0x35, 0x5c, 0xfd, 0x3a, 0x05, 0xb8, 0xaf, 0xd2, 0x76, 0xe4, 0x48, + 0xf8, 0x13, 0x82, 0x59, 0xf5, 0x02, 0xe2, 0x9a, 0x1a, 0x4e, 0xaa, 0x49, 0x68, 0xcb, 0xe3, 0x5d, + 0x8a, 0x38, 0xd1, 0xab, 0x6f, 0xbf, 0xff, 0x3c, 0xcb, 0xdd, 0xc2, 0x95, 0xd0, 0xd4, 0xde, 0x44, + 0x42, 0xbf, 0xeb, 0xf9, 0xf4, 0x05, 0x69, 0x71, 0x66, 0x56, 0x4e, 0x4d, 0xc5, 0xf2, 0x7e, 0x40, + 0x50, 0x50, 0xad, 0x1d, 0x5e, 0x52, 0x43, 0x48, 0x59, 0x51, 0x2d, 0xab, 0xfa, 0x86, 0x80, 0x86, + 0x3a, 0xea, 0x83, 0xa9, 0x40, 0x69, 0x56, 0x4e, 0xf1, 0x67, 0x04, 0x73, 0x09, 0xbb, 0x8e, 0x13, + 0xe8, 0x4a, 0xb7, 0x86, 0xec, 0x70, 0xd7, 0x05, 0xdc, 0x7b, 0xfa, 0x18, 0xbc, 0xde, 0x51, 0x2d, + 0x29, 0xfe, 0x81, 0x60, 0x2e, 0xc1, 0x1b, 0x92, 0x66, 0x48, 0xb7, 0x92, 0xec, 0x33, 0x3c, 0x13, + 0x33, 0xec, 0x54, 0x57, 0xc5, 0x0c, 0x0a, 0x70, 0x46, 0xa6, 0xd7, 0xa0, 0x9e, 0xeb, 0x3d, 0x82, + 0xb9, 0x04, 0xef, 0x48, 0x9a, 0x2b, 0xdd, 0x6a, 0xb4, 0xd9, 0x11, 0x37, 0x7c, 0x10, 0xfe, 0x0d, + 0xc7, 0xca, 0xa9, 0x8c, 0xa3, 0x9c, 0x33, 0x04, 0x78, 0xd4, 0x49, 0xb0, 0x99, 0x69, 0xc7, 0x7a, + 0x6e, 0xa6, 0xdd, 0xce, 0x7e, 0x41, 0x2e, 0xa4, 0x26, 0xd0, 0x16, 0x30, 0xee, 0x7d, 0x65, 0xc4, + 0x39, 0x6b, 0xe7, 0x08, 0x8a, 0x2d, 0xda, 0x55, 0xd6, 0x5c, 0x93, 0x1e, 0x23, 0xed, 0x65, 0x2b, + 0xe4, 0x60, 0x0b, 0x3d, 0x5d, 0x95, 0xb9, 0x36, 0xed, 0x58, 0xae, 0x6d, 0x50, 0xdf, 0x36, 0x6d, + 0xe2, 0x0a, 0x86, 0xcc, 0x28, 0x64, 0x79, 0x0e, 0x1b, 0xfc, 0xb8, 0x59, 0xe9, 0x3d, 0x9d, 0xe7, + 0xb4, 0xf5, 0xa8, 0x40, 0xbd, 0x43, 0x83, 0x43, 0x63, 0xb3, 0xd7, 0x72, 0xb7, 0xf6, 0x2d, 0x0e, + 0xee, 0x8b, 0xe0, 0x7e, 0x2f, 0xb8, 0xbf, 0x5b, 0x3b, 0x98, 0x14, 0x4d, 0x6a, 0xbf, 0x02, 0x00, + 0x00, 0xff, 0xff, 0x27, 0xb8, 0x65, 0x92, 0x9f, 0x09, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/privacy/dlp/v2/dlp.pb.go b/vendor/google.golang.org/genproto/googleapis/privacy/dlp/v2/dlp.pb.go new file mode 100644 index 0000000..b7bdf13 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/privacy/dlp/v2/dlp.pb.go @@ -0,0 +1,15267 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/privacy/dlp/v2/dlp.proto + +package dlp // import "google.golang.org/genproto/googleapis/privacy/dlp/v2" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import duration "github.com/golang/protobuf/ptypes/duration" +import empty "github.com/golang/protobuf/ptypes/empty" +import timestamp "github.com/golang/protobuf/ptypes/timestamp" +import _ "google.golang.org/genproto/googleapis/api/annotations" +import status "google.golang.org/genproto/googleapis/rpc/status" +import date "google.golang.org/genproto/googleapis/type/date" +import dayofweek "google.golang.org/genproto/googleapis/type/dayofweek" +import timeofday "google.golang.org/genproto/googleapis/type/timeofday" +import field_mask "google.golang.org/genproto/protobuf/field_mask" + +import ( + context "golang.org/x/net/context" + grpc "google.golang.org/grpc" +) + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Options describing which parts of the provided content should be scanned. +type ContentOption int32 + +const ( + // Includes entire content of a file or a data stream. + ContentOption_CONTENT_UNSPECIFIED ContentOption = 0 + // Text content within the data, excluding any metadata. + ContentOption_CONTENT_TEXT ContentOption = 1 + // Images found in the data. + ContentOption_CONTENT_IMAGE ContentOption = 2 +) + +var ContentOption_name = map[int32]string{ + 0: "CONTENT_UNSPECIFIED", + 1: "CONTENT_TEXT", + 2: "CONTENT_IMAGE", +} +var ContentOption_value = map[string]int32{ + "CONTENT_UNSPECIFIED": 0, + "CONTENT_TEXT": 1, + "CONTENT_IMAGE": 2, +} + +func (x ContentOption) String() string { + return proto.EnumName(ContentOption_name, int32(x)) +} +func (ContentOption) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_dlp_1aebf9c18c267d70, []int{0} +} + +// Type of the match which can be applied to different ways of matching, like +// Dictionary, regular expression and intersecting with findings of another +// info type. +type MatchingType int32 + +const ( + // Invalid. + MatchingType_MATCHING_TYPE_UNSPECIFIED MatchingType = 0 + // Full match. + // + // - Dictionary: join of Dictionary results matched complete finding quote + // - Regex: all regex matches fill a finding quote start to end + // - Exclude info type: completely inside affecting info types findings + MatchingType_MATCHING_TYPE_FULL_MATCH MatchingType = 1 + // Partial match. + // + // - Dictionary: at least one of the tokens in the finding matches + // - Regex: substring of the finding matches + // - Exclude info type: intersects with affecting info types findings + MatchingType_MATCHING_TYPE_PARTIAL_MATCH MatchingType = 2 + // Inverse match. + // + // - Dictionary: no tokens in the finding match the dictionary + // - Regex: finding doesn't match the regex + // - Exclude info type: no intersection with affecting info types findings + MatchingType_MATCHING_TYPE_INVERSE_MATCH MatchingType = 3 +) + +var MatchingType_name = map[int32]string{ + 0: "MATCHING_TYPE_UNSPECIFIED", + 1: "MATCHING_TYPE_FULL_MATCH", + 2: "MATCHING_TYPE_PARTIAL_MATCH", + 3: "MATCHING_TYPE_INVERSE_MATCH", +} +var MatchingType_value = map[string]int32{ + "MATCHING_TYPE_UNSPECIFIED": 0, + "MATCHING_TYPE_FULL_MATCH": 1, + "MATCHING_TYPE_PARTIAL_MATCH": 2, + "MATCHING_TYPE_INVERSE_MATCH": 3, +} + +func (x MatchingType) String() string { + return proto.EnumName(MatchingType_name, int32(x)) +} +func (MatchingType) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_dlp_1aebf9c18c267d70, []int{1} +} + +// Parts of the APIs which use certain infoTypes. +type InfoTypeSupportedBy int32 + +const ( + InfoTypeSupportedBy_ENUM_TYPE_UNSPECIFIED InfoTypeSupportedBy = 0 + // Supported by the inspect operations. + InfoTypeSupportedBy_INSPECT InfoTypeSupportedBy = 1 + // Supported by the risk analysis operations. + InfoTypeSupportedBy_RISK_ANALYSIS InfoTypeSupportedBy = 2 +) + +var InfoTypeSupportedBy_name = map[int32]string{ + 0: "ENUM_TYPE_UNSPECIFIED", + 1: "INSPECT", + 2: "RISK_ANALYSIS", +} +var InfoTypeSupportedBy_value = map[string]int32{ + "ENUM_TYPE_UNSPECIFIED": 0, + "INSPECT": 1, + "RISK_ANALYSIS": 2, +} + +func (x InfoTypeSupportedBy) String() string { + return proto.EnumName(InfoTypeSupportedBy_name, int32(x)) +} +func (InfoTypeSupportedBy) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_dlp_1aebf9c18c267d70, []int{2} +} + +// Operators available for comparing the value of fields. +type RelationalOperator int32 + +const ( + RelationalOperator_RELATIONAL_OPERATOR_UNSPECIFIED RelationalOperator = 0 + // Equal. Attempts to match even with incompatible types. + RelationalOperator_EQUAL_TO RelationalOperator = 1 + // Not equal to. Attempts to match even with incompatible types. + RelationalOperator_NOT_EQUAL_TO RelationalOperator = 2 + // Greater than. + RelationalOperator_GREATER_THAN RelationalOperator = 3 + // Less than. + RelationalOperator_LESS_THAN RelationalOperator = 4 + // Greater than or equals. + RelationalOperator_GREATER_THAN_OR_EQUALS RelationalOperator = 5 + // Less than or equals. + RelationalOperator_LESS_THAN_OR_EQUALS RelationalOperator = 6 + // Exists + RelationalOperator_EXISTS RelationalOperator = 7 +) + +var RelationalOperator_name = map[int32]string{ + 0: "RELATIONAL_OPERATOR_UNSPECIFIED", + 1: "EQUAL_TO", + 2: "NOT_EQUAL_TO", + 3: "GREATER_THAN", + 4: "LESS_THAN", + 5: "GREATER_THAN_OR_EQUALS", + 6: "LESS_THAN_OR_EQUALS", + 7: "EXISTS", +} +var RelationalOperator_value = map[string]int32{ + "RELATIONAL_OPERATOR_UNSPECIFIED": 0, + "EQUAL_TO": 1, + "NOT_EQUAL_TO": 2, + "GREATER_THAN": 3, + "LESS_THAN": 4, + "GREATER_THAN_OR_EQUALS": 5, + "LESS_THAN_OR_EQUALS": 6, + "EXISTS": 7, +} + +func (x RelationalOperator) String() string { + return proto.EnumName(RelationalOperator_name, int32(x)) +} +func (RelationalOperator) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_dlp_1aebf9c18c267d70, []int{3} +} + +// An enum to represent the various type of DLP jobs. +type DlpJobType int32 + +const ( + DlpJobType_DLP_JOB_TYPE_UNSPECIFIED DlpJobType = 0 + // The job inspected Google Cloud for sensitive data. + DlpJobType_INSPECT_JOB DlpJobType = 1 + // The job executed a Risk Analysis computation. + DlpJobType_RISK_ANALYSIS_JOB DlpJobType = 2 +) + +var DlpJobType_name = map[int32]string{ + 0: "DLP_JOB_TYPE_UNSPECIFIED", + 1: "INSPECT_JOB", + 2: "RISK_ANALYSIS_JOB", +} +var DlpJobType_value = map[string]int32{ + "DLP_JOB_TYPE_UNSPECIFIED": 0, + "INSPECT_JOB": 1, + "RISK_ANALYSIS_JOB": 2, +} + +func (x DlpJobType) String() string { + return proto.EnumName(DlpJobType_name, int32(x)) +} +func (DlpJobType) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_dlp_1aebf9c18c267d70, []int{4} +} + +// State of a StoredInfoType version. +type StoredInfoTypeState int32 + +const ( + StoredInfoTypeState_STORED_INFO_TYPE_STATE_UNSPECIFIED StoredInfoTypeState = 0 + // StoredInfoType version is being created. + StoredInfoTypeState_PENDING StoredInfoTypeState = 1 + // StoredInfoType version is ready for use. + StoredInfoTypeState_READY StoredInfoTypeState = 2 + // StoredInfoType creation failed. All relevant error messages are returned in + // the `StoredInfoTypeVersion` message. + StoredInfoTypeState_FAILED StoredInfoTypeState = 3 + // StoredInfoType is no longer valid because artifacts stored in + // user-controlled storage were modified. To fix an invalid StoredInfoType, + // use the `UpdateStoredInfoType` method to create a new version. + StoredInfoTypeState_INVALID StoredInfoTypeState = 4 +) + +var StoredInfoTypeState_name = map[int32]string{ + 0: "STORED_INFO_TYPE_STATE_UNSPECIFIED", + 1: "PENDING", + 2: "READY", + 3: "FAILED", + 4: "INVALID", +} +var StoredInfoTypeState_value = map[string]int32{ + "STORED_INFO_TYPE_STATE_UNSPECIFIED": 0, + "PENDING": 1, + "READY": 2, + "FAILED": 3, + "INVALID": 4, +} + +func (x StoredInfoTypeState) String() string { + return proto.EnumName(StoredInfoTypeState_name, int32(x)) +} +func (StoredInfoTypeState) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_dlp_1aebf9c18c267d70, []int{5} +} + +type ByteContentItem_BytesType int32 + +const ( + ByteContentItem_BYTES_TYPE_UNSPECIFIED ByteContentItem_BytesType = 0 + ByteContentItem_IMAGE ByteContentItem_BytesType = 6 + ByteContentItem_IMAGE_JPEG ByteContentItem_BytesType = 1 + ByteContentItem_IMAGE_BMP ByteContentItem_BytesType = 2 + ByteContentItem_IMAGE_PNG ByteContentItem_BytesType = 3 + ByteContentItem_IMAGE_SVG ByteContentItem_BytesType = 4 + ByteContentItem_TEXT_UTF8 ByteContentItem_BytesType = 5 +) + +var ByteContentItem_BytesType_name = map[int32]string{ + 0: "BYTES_TYPE_UNSPECIFIED", + 6: "IMAGE", + 1: "IMAGE_JPEG", + 2: "IMAGE_BMP", + 3: "IMAGE_PNG", + 4: "IMAGE_SVG", + 5: "TEXT_UTF8", +} +var ByteContentItem_BytesType_value = map[string]int32{ + "BYTES_TYPE_UNSPECIFIED": 0, + "IMAGE": 6, + "IMAGE_JPEG": 1, + "IMAGE_BMP": 2, + "IMAGE_PNG": 3, + "IMAGE_SVG": 4, + "TEXT_UTF8": 5, +} + +func (x ByteContentItem_BytesType) String() string { + return proto.EnumName(ByteContentItem_BytesType_name, int32(x)) +} +func (ByteContentItem_BytesType) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_dlp_1aebf9c18c267d70, []int{5, 0} +} + +// Predefined schemas for storing findings. +type OutputStorageConfig_OutputSchema int32 + +const ( + OutputStorageConfig_OUTPUT_SCHEMA_UNSPECIFIED OutputStorageConfig_OutputSchema = 0 + // Basic schema including only `info_type`, `quote`, `certainty`, and + // `timestamp`. + OutputStorageConfig_BASIC_COLUMNS OutputStorageConfig_OutputSchema = 1 + // Schema tailored to findings from scanning Google Cloud Storage. + OutputStorageConfig_GCS_COLUMNS OutputStorageConfig_OutputSchema = 2 + // Schema tailored to findings from scanning Google Datastore. + OutputStorageConfig_DATASTORE_COLUMNS OutputStorageConfig_OutputSchema = 3 + // Schema tailored to findings from scanning Google BigQuery. + OutputStorageConfig_BIG_QUERY_COLUMNS OutputStorageConfig_OutputSchema = 4 + // Schema containing all columns. + OutputStorageConfig_ALL_COLUMNS OutputStorageConfig_OutputSchema = 5 +) + +var OutputStorageConfig_OutputSchema_name = map[int32]string{ + 0: "OUTPUT_SCHEMA_UNSPECIFIED", + 1: "BASIC_COLUMNS", + 2: "GCS_COLUMNS", + 3: "DATASTORE_COLUMNS", + 4: "BIG_QUERY_COLUMNS", + 5: "ALL_COLUMNS", +} +var OutputStorageConfig_OutputSchema_value = map[string]int32{ + "OUTPUT_SCHEMA_UNSPECIFIED": 0, + "BASIC_COLUMNS": 1, + "GCS_COLUMNS": 2, + "DATASTORE_COLUMNS": 3, + "BIG_QUERY_COLUMNS": 4, + "ALL_COLUMNS": 5, +} + +func (x OutputStorageConfig_OutputSchema) String() string { + return proto.EnumName(OutputStorageConfig_OutputSchema_name, int32(x)) +} +func (OutputStorageConfig_OutputSchema) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_dlp_1aebf9c18c267d70, []int{27, 0} +} + +type TimePartConfig_TimePart int32 + +const ( + TimePartConfig_TIME_PART_UNSPECIFIED TimePartConfig_TimePart = 0 + // [0-9999] + TimePartConfig_YEAR TimePartConfig_TimePart = 1 + // [1-12] + TimePartConfig_MONTH TimePartConfig_TimePart = 2 + // [1-31] + TimePartConfig_DAY_OF_MONTH TimePartConfig_TimePart = 3 + // [1-7] + TimePartConfig_DAY_OF_WEEK TimePartConfig_TimePart = 4 + // [1-52] + TimePartConfig_WEEK_OF_YEAR TimePartConfig_TimePart = 5 + // [0-23] + TimePartConfig_HOUR_OF_DAY TimePartConfig_TimePart = 6 +) + +var TimePartConfig_TimePart_name = map[int32]string{ + 0: "TIME_PART_UNSPECIFIED", + 1: "YEAR", + 2: "MONTH", + 3: "DAY_OF_MONTH", + 4: "DAY_OF_WEEK", + 5: "WEEK_OF_YEAR", + 6: "HOUR_OF_DAY", +} +var TimePartConfig_TimePart_value = map[string]int32{ + "TIME_PART_UNSPECIFIED": 0, + "YEAR": 1, + "MONTH": 2, + "DAY_OF_MONTH": 3, + "DAY_OF_WEEK": 4, + "WEEK_OF_YEAR": 5, + "HOUR_OF_DAY": 6, +} + +func (x TimePartConfig_TimePart) String() string { + return proto.EnumName(TimePartConfig_TimePart_name, int32(x)) +} +func (TimePartConfig_TimePart) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_dlp_1aebf9c18c267d70, []int{44, 0} +} + +type CharsToIgnore_CommonCharsToIgnore int32 + +const ( + CharsToIgnore_COMMON_CHARS_TO_IGNORE_UNSPECIFIED CharsToIgnore_CommonCharsToIgnore = 0 + // 0-9 + CharsToIgnore_NUMERIC CharsToIgnore_CommonCharsToIgnore = 1 + // A-Z + CharsToIgnore_ALPHA_UPPER_CASE CharsToIgnore_CommonCharsToIgnore = 2 + // a-z + CharsToIgnore_ALPHA_LOWER_CASE CharsToIgnore_CommonCharsToIgnore = 3 + // US Punctuation, one of !"#$%&'()*+,-./:;<=>?@[\]^_`{|}~ + CharsToIgnore_PUNCTUATION CharsToIgnore_CommonCharsToIgnore = 4 + // Whitespace character, one of [ \t\n\x0B\f\r] + CharsToIgnore_WHITESPACE CharsToIgnore_CommonCharsToIgnore = 5 +) + +var CharsToIgnore_CommonCharsToIgnore_name = map[int32]string{ + 0: "COMMON_CHARS_TO_IGNORE_UNSPECIFIED", + 1: "NUMERIC", + 2: "ALPHA_UPPER_CASE", + 3: "ALPHA_LOWER_CASE", + 4: "PUNCTUATION", + 5: "WHITESPACE", +} +var CharsToIgnore_CommonCharsToIgnore_value = map[string]int32{ + "COMMON_CHARS_TO_IGNORE_UNSPECIFIED": 0, + "NUMERIC": 1, + "ALPHA_UPPER_CASE": 2, + "ALPHA_LOWER_CASE": 3, + "PUNCTUATION": 4, + "WHITESPACE": 5, +} + +func (x CharsToIgnore_CommonCharsToIgnore) String() string { + return proto.EnumName(CharsToIgnore_CommonCharsToIgnore_name, int32(x)) +} +func (CharsToIgnore_CommonCharsToIgnore) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_dlp_1aebf9c18c267d70, []int{50, 0} +} + +// These are commonly used subsets of the alphabet that the FFX mode +// natively supports. In the algorithm, the alphabet is selected using +// the "radix". Therefore each corresponds to particular radix. +type CryptoReplaceFfxFpeConfig_FfxCommonNativeAlphabet int32 + +const ( + CryptoReplaceFfxFpeConfig_FFX_COMMON_NATIVE_ALPHABET_UNSPECIFIED CryptoReplaceFfxFpeConfig_FfxCommonNativeAlphabet = 0 + // [0-9] (radix of 10) + CryptoReplaceFfxFpeConfig_NUMERIC CryptoReplaceFfxFpeConfig_FfxCommonNativeAlphabet = 1 + // [0-9A-F] (radix of 16) + CryptoReplaceFfxFpeConfig_HEXADECIMAL CryptoReplaceFfxFpeConfig_FfxCommonNativeAlphabet = 2 + // [0-9A-Z] (radix of 36) + CryptoReplaceFfxFpeConfig_UPPER_CASE_ALPHA_NUMERIC CryptoReplaceFfxFpeConfig_FfxCommonNativeAlphabet = 3 + // [0-9A-Za-z] (radix of 62) + CryptoReplaceFfxFpeConfig_ALPHA_NUMERIC CryptoReplaceFfxFpeConfig_FfxCommonNativeAlphabet = 4 +) + +var CryptoReplaceFfxFpeConfig_FfxCommonNativeAlphabet_name = map[int32]string{ + 0: "FFX_COMMON_NATIVE_ALPHABET_UNSPECIFIED", + 1: "NUMERIC", + 2: "HEXADECIMAL", + 3: "UPPER_CASE_ALPHA_NUMERIC", + 4: "ALPHA_NUMERIC", +} +var CryptoReplaceFfxFpeConfig_FfxCommonNativeAlphabet_value = map[string]int32{ + "FFX_COMMON_NATIVE_ALPHABET_UNSPECIFIED": 0, + "NUMERIC": 1, + "HEXADECIMAL": 2, + "UPPER_CASE_ALPHA_NUMERIC": 3, + "ALPHA_NUMERIC": 4, +} + +func (x CryptoReplaceFfxFpeConfig_FfxCommonNativeAlphabet) String() string { + return proto.EnumName(CryptoReplaceFfxFpeConfig_FfxCommonNativeAlphabet_name, int32(x)) +} +func (CryptoReplaceFfxFpeConfig_FfxCommonNativeAlphabet) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_dlp_1aebf9c18c267d70, []int{54, 0} +} + +type RecordCondition_Expressions_LogicalOperator int32 + +const ( + RecordCondition_Expressions_LOGICAL_OPERATOR_UNSPECIFIED RecordCondition_Expressions_LogicalOperator = 0 + RecordCondition_Expressions_AND RecordCondition_Expressions_LogicalOperator = 1 +) + +var RecordCondition_Expressions_LogicalOperator_name = map[int32]string{ + 0: "LOGICAL_OPERATOR_UNSPECIFIED", + 1: "AND", +} +var RecordCondition_Expressions_LogicalOperator_value = map[string]int32{ + "LOGICAL_OPERATOR_UNSPECIFIED": 0, + "AND": 1, +} + +func (x RecordCondition_Expressions_LogicalOperator) String() string { + return proto.EnumName(RecordCondition_Expressions_LogicalOperator_name, int32(x)) +} +func (RecordCondition_Expressions_LogicalOperator) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_dlp_1aebf9c18c267d70, []int{64, 2, 0} +} + +// Possible outcomes of transformations. +type TransformationSummary_TransformationResultCode int32 + +const ( + TransformationSummary_TRANSFORMATION_RESULT_CODE_UNSPECIFIED TransformationSummary_TransformationResultCode = 0 + TransformationSummary_SUCCESS TransformationSummary_TransformationResultCode = 1 + TransformationSummary_ERROR TransformationSummary_TransformationResultCode = 2 +) + +var TransformationSummary_TransformationResultCode_name = map[int32]string{ + 0: "TRANSFORMATION_RESULT_CODE_UNSPECIFIED", + 1: "SUCCESS", + 2: "ERROR", +} +var TransformationSummary_TransformationResultCode_value = map[string]int32{ + "TRANSFORMATION_RESULT_CODE_UNSPECIFIED": 0, + "SUCCESS": 1, + "ERROR": 2, +} + +func (x TransformationSummary_TransformationResultCode) String() string { + return proto.EnumName(TransformationSummary_TransformationResultCode_name, int32(x)) +} +func (TransformationSummary_TransformationResultCode) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_dlp_1aebf9c18c267d70, []int{66, 0} +} + +// Whether the trigger is currently active. If PAUSED or CANCELLED, no jobs +// will be created with this configuration. The service may automatically +// pause triggers experiencing frequent errors. To restart a job, set the +// status to HEALTHY after correcting user errors. +type JobTrigger_Status int32 + +const ( + JobTrigger_STATUS_UNSPECIFIED JobTrigger_Status = 0 + // Trigger is healthy. + JobTrigger_HEALTHY JobTrigger_Status = 1 + // Trigger is temporarily paused. + JobTrigger_PAUSED JobTrigger_Status = 2 + // Trigger is cancelled and can not be resumed. + JobTrigger_CANCELLED JobTrigger_Status = 3 +) + +var JobTrigger_Status_name = map[int32]string{ + 0: "STATUS_UNSPECIFIED", + 1: "HEALTHY", + 2: "PAUSED", + 3: "CANCELLED", +} +var JobTrigger_Status_value = map[string]int32{ + "STATUS_UNSPECIFIED": 0, + "HEALTHY": 1, + "PAUSED": 2, + "CANCELLED": 3, +} + +func (x JobTrigger_Status) String() string { + return proto.EnumName(JobTrigger_Status_name, int32(x)) +} +func (JobTrigger_Status) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_dlp_1aebf9c18c267d70, []int{71, 0} +} + +type DlpJob_JobState int32 + +const ( + DlpJob_JOB_STATE_UNSPECIFIED DlpJob_JobState = 0 + // The job has not yet started. + DlpJob_PENDING DlpJob_JobState = 1 + // The job is currently running. + DlpJob_RUNNING DlpJob_JobState = 2 + // The job is no longer running. + DlpJob_DONE DlpJob_JobState = 3 + // The job was canceled before it could complete. + DlpJob_CANCELED DlpJob_JobState = 4 + // The job had an error and did not complete. + DlpJob_FAILED DlpJob_JobState = 5 +) + +var DlpJob_JobState_name = map[int32]string{ + 0: "JOB_STATE_UNSPECIFIED", + 1: "PENDING", + 2: "RUNNING", + 3: "DONE", + 4: "CANCELED", + 5: "FAILED", +} +var DlpJob_JobState_value = map[string]int32{ + "JOB_STATE_UNSPECIFIED": 0, + "PENDING": 1, + "RUNNING": 2, + "DONE": 3, + "CANCELED": 4, + "FAILED": 5, +} + +func (x DlpJob_JobState) String() string { + return proto.EnumName(DlpJob_JobState_name, int32(x)) +} +func (DlpJob_JobState) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_dlp_1aebf9c18c267d70, []int{88, 0} +} + +// List of exclude infoTypes. +type ExcludeInfoTypes struct { + // InfoType list in ExclusionRule rule drops a finding when it overlaps or + // contained within with a finding of an infoType from this list. For + // example, for `InspectionRuleSet.info_types` containing "PHONE_NUMBER"` and + // `exclusion_rule` containing `exclude_info_types.info_types` with + // "EMAIL_ADDRESS" the phone number findings are dropped if they overlap + // with EMAIL_ADDRESS finding. + // That leads to "555-222-2222@example.org" to generate only a single + // finding, namely email address. + InfoTypes []*InfoType `protobuf:"bytes,1,rep,name=info_types,json=infoTypes,proto3" json:"info_types,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ExcludeInfoTypes) Reset() { *m = ExcludeInfoTypes{} } +func (m *ExcludeInfoTypes) String() string { return proto.CompactTextString(m) } +func (*ExcludeInfoTypes) ProtoMessage() {} +func (*ExcludeInfoTypes) Descriptor() ([]byte, []int) { + return fileDescriptor_dlp_1aebf9c18c267d70, []int{0} +} +func (m *ExcludeInfoTypes) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ExcludeInfoTypes.Unmarshal(m, b) +} +func (m *ExcludeInfoTypes) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ExcludeInfoTypes.Marshal(b, m, deterministic) +} +func (dst *ExcludeInfoTypes) XXX_Merge(src proto.Message) { + xxx_messageInfo_ExcludeInfoTypes.Merge(dst, src) +} +func (m *ExcludeInfoTypes) XXX_Size() int { + return xxx_messageInfo_ExcludeInfoTypes.Size(m) +} +func (m *ExcludeInfoTypes) XXX_DiscardUnknown() { + xxx_messageInfo_ExcludeInfoTypes.DiscardUnknown(m) +} + +var xxx_messageInfo_ExcludeInfoTypes proto.InternalMessageInfo + +func (m *ExcludeInfoTypes) GetInfoTypes() []*InfoType { + if m != nil { + return m.InfoTypes + } + return nil +} + +// The rule that specifies conditions when findings of infoTypes specified in +// `InspectionRuleSet` are removed from results. +type ExclusionRule struct { + // Types that are valid to be assigned to Type: + // *ExclusionRule_Dictionary + // *ExclusionRule_Regex + // *ExclusionRule_ExcludeInfoTypes + Type isExclusionRule_Type `protobuf_oneof:"type"` + // How the rule is applied, see MatchingType documentation for details. + MatchingType MatchingType `protobuf:"varint,4,opt,name=matching_type,json=matchingType,proto3,enum=google.privacy.dlp.v2.MatchingType" json:"matching_type,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ExclusionRule) Reset() { *m = ExclusionRule{} } +func (m *ExclusionRule) String() string { return proto.CompactTextString(m) } +func (*ExclusionRule) ProtoMessage() {} +func (*ExclusionRule) Descriptor() ([]byte, []int) { + return fileDescriptor_dlp_1aebf9c18c267d70, []int{1} +} +func (m *ExclusionRule) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ExclusionRule.Unmarshal(m, b) +} +func (m *ExclusionRule) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ExclusionRule.Marshal(b, m, deterministic) +} +func (dst *ExclusionRule) XXX_Merge(src proto.Message) { + xxx_messageInfo_ExclusionRule.Merge(dst, src) +} +func (m *ExclusionRule) XXX_Size() int { + return xxx_messageInfo_ExclusionRule.Size(m) +} +func (m *ExclusionRule) XXX_DiscardUnknown() { + xxx_messageInfo_ExclusionRule.DiscardUnknown(m) +} + +var xxx_messageInfo_ExclusionRule proto.InternalMessageInfo + +type isExclusionRule_Type interface { + isExclusionRule_Type() +} + +type ExclusionRule_Dictionary struct { + Dictionary *CustomInfoType_Dictionary `protobuf:"bytes,1,opt,name=dictionary,proto3,oneof"` +} + +type ExclusionRule_Regex struct { + Regex *CustomInfoType_Regex `protobuf:"bytes,2,opt,name=regex,proto3,oneof"` +} + +type ExclusionRule_ExcludeInfoTypes struct { + ExcludeInfoTypes *ExcludeInfoTypes `protobuf:"bytes,3,opt,name=exclude_info_types,json=excludeInfoTypes,proto3,oneof"` +} + +func (*ExclusionRule_Dictionary) isExclusionRule_Type() {} + +func (*ExclusionRule_Regex) isExclusionRule_Type() {} + +func (*ExclusionRule_ExcludeInfoTypes) isExclusionRule_Type() {} + +func (m *ExclusionRule) GetType() isExclusionRule_Type { + if m != nil { + return m.Type + } + return nil +} + +func (m *ExclusionRule) GetDictionary() *CustomInfoType_Dictionary { + if x, ok := m.GetType().(*ExclusionRule_Dictionary); ok { + return x.Dictionary + } + return nil +} + +func (m *ExclusionRule) GetRegex() *CustomInfoType_Regex { + if x, ok := m.GetType().(*ExclusionRule_Regex); ok { + return x.Regex + } + return nil +} + +func (m *ExclusionRule) GetExcludeInfoTypes() *ExcludeInfoTypes { + if x, ok := m.GetType().(*ExclusionRule_ExcludeInfoTypes); ok { + return x.ExcludeInfoTypes + } + return nil +} + +func (m *ExclusionRule) GetMatchingType() MatchingType { + if m != nil { + return m.MatchingType + } + return MatchingType_MATCHING_TYPE_UNSPECIFIED +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*ExclusionRule) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _ExclusionRule_OneofMarshaler, _ExclusionRule_OneofUnmarshaler, _ExclusionRule_OneofSizer, []interface{}{ + (*ExclusionRule_Dictionary)(nil), + (*ExclusionRule_Regex)(nil), + (*ExclusionRule_ExcludeInfoTypes)(nil), + } +} + +func _ExclusionRule_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*ExclusionRule) + // type + switch x := m.Type.(type) { + case *ExclusionRule_Dictionary: + b.EncodeVarint(1<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.Dictionary); err != nil { + return err + } + case *ExclusionRule_Regex: + b.EncodeVarint(2<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.Regex); err != nil { + return err + } + case *ExclusionRule_ExcludeInfoTypes: + b.EncodeVarint(3<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.ExcludeInfoTypes); err != nil { + return err + } + case nil: + default: + return fmt.Errorf("ExclusionRule.Type has unexpected type %T", x) + } + return nil +} + +func _ExclusionRule_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*ExclusionRule) + switch tag { + case 1: // type.dictionary + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(CustomInfoType_Dictionary) + err := b.DecodeMessage(msg) + m.Type = &ExclusionRule_Dictionary{msg} + return true, err + case 2: // type.regex + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(CustomInfoType_Regex) + err := b.DecodeMessage(msg) + m.Type = &ExclusionRule_Regex{msg} + return true, err + case 3: // type.exclude_info_types + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(ExcludeInfoTypes) + err := b.DecodeMessage(msg) + m.Type = &ExclusionRule_ExcludeInfoTypes{msg} + return true, err + default: + return false, nil + } +} + +func _ExclusionRule_OneofSizer(msg proto.Message) (n int) { + m := msg.(*ExclusionRule) + // type + switch x := m.Type.(type) { + case *ExclusionRule_Dictionary: + s := proto.Size(x.Dictionary) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *ExclusionRule_Regex: + s := proto.Size(x.Regex) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *ExclusionRule_ExcludeInfoTypes: + s := proto.Size(x.ExcludeInfoTypes) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +// A single inspection rule to be applied to infoTypes, specified in +// `InspectionRuleSet`. +type InspectionRule struct { + // Types that are valid to be assigned to Type: + // *InspectionRule_HotwordRule + // *InspectionRule_ExclusionRule + Type isInspectionRule_Type `protobuf_oneof:"type"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *InspectionRule) Reset() { *m = InspectionRule{} } +func (m *InspectionRule) String() string { return proto.CompactTextString(m) } +func (*InspectionRule) ProtoMessage() {} +func (*InspectionRule) Descriptor() ([]byte, []int) { + return fileDescriptor_dlp_1aebf9c18c267d70, []int{2} +} +func (m *InspectionRule) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_InspectionRule.Unmarshal(m, b) +} +func (m *InspectionRule) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_InspectionRule.Marshal(b, m, deterministic) +} +func (dst *InspectionRule) XXX_Merge(src proto.Message) { + xxx_messageInfo_InspectionRule.Merge(dst, src) +} +func (m *InspectionRule) XXX_Size() int { + return xxx_messageInfo_InspectionRule.Size(m) +} +func (m *InspectionRule) XXX_DiscardUnknown() { + xxx_messageInfo_InspectionRule.DiscardUnknown(m) +} + +var xxx_messageInfo_InspectionRule proto.InternalMessageInfo + +type isInspectionRule_Type interface { + isInspectionRule_Type() +} + +type InspectionRule_HotwordRule struct { + HotwordRule *CustomInfoType_DetectionRule_HotwordRule `protobuf:"bytes,1,opt,name=hotword_rule,json=hotwordRule,proto3,oneof"` +} + +type InspectionRule_ExclusionRule struct { + ExclusionRule *ExclusionRule `protobuf:"bytes,2,opt,name=exclusion_rule,json=exclusionRule,proto3,oneof"` +} + +func (*InspectionRule_HotwordRule) isInspectionRule_Type() {} + +func (*InspectionRule_ExclusionRule) isInspectionRule_Type() {} + +func (m *InspectionRule) GetType() isInspectionRule_Type { + if m != nil { + return m.Type + } + return nil +} + +func (m *InspectionRule) GetHotwordRule() *CustomInfoType_DetectionRule_HotwordRule { + if x, ok := m.GetType().(*InspectionRule_HotwordRule); ok { + return x.HotwordRule + } + return nil +} + +func (m *InspectionRule) GetExclusionRule() *ExclusionRule { + if x, ok := m.GetType().(*InspectionRule_ExclusionRule); ok { + return x.ExclusionRule + } + return nil +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*InspectionRule) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _InspectionRule_OneofMarshaler, _InspectionRule_OneofUnmarshaler, _InspectionRule_OneofSizer, []interface{}{ + (*InspectionRule_HotwordRule)(nil), + (*InspectionRule_ExclusionRule)(nil), + } +} + +func _InspectionRule_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*InspectionRule) + // type + switch x := m.Type.(type) { + case *InspectionRule_HotwordRule: + b.EncodeVarint(1<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.HotwordRule); err != nil { + return err + } + case *InspectionRule_ExclusionRule: + b.EncodeVarint(2<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.ExclusionRule); err != nil { + return err + } + case nil: + default: + return fmt.Errorf("InspectionRule.Type has unexpected type %T", x) + } + return nil +} + +func _InspectionRule_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*InspectionRule) + switch tag { + case 1: // type.hotword_rule + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(CustomInfoType_DetectionRule_HotwordRule) + err := b.DecodeMessage(msg) + m.Type = &InspectionRule_HotwordRule{msg} + return true, err + case 2: // type.exclusion_rule + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(ExclusionRule) + err := b.DecodeMessage(msg) + m.Type = &InspectionRule_ExclusionRule{msg} + return true, err + default: + return false, nil + } +} + +func _InspectionRule_OneofSizer(msg proto.Message) (n int) { + m := msg.(*InspectionRule) + // type + switch x := m.Type.(type) { + case *InspectionRule_HotwordRule: + s := proto.Size(x.HotwordRule) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *InspectionRule_ExclusionRule: + s := proto.Size(x.ExclusionRule) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +// Rule set for modifying a set of infoTypes to alter behavior under certain +// circumstances, depending on the specific details of the rules within the set. +type InspectionRuleSet struct { + // List of infoTypes this rule set is applied to. + InfoTypes []*InfoType `protobuf:"bytes,1,rep,name=info_types,json=infoTypes,proto3" json:"info_types,omitempty"` + // Set of rules to be applied to infoTypes. The rules are applied in order. + Rules []*InspectionRule `protobuf:"bytes,2,rep,name=rules,proto3" json:"rules,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *InspectionRuleSet) Reset() { *m = InspectionRuleSet{} } +func (m *InspectionRuleSet) String() string { return proto.CompactTextString(m) } +func (*InspectionRuleSet) ProtoMessage() {} +func (*InspectionRuleSet) Descriptor() ([]byte, []int) { + return fileDescriptor_dlp_1aebf9c18c267d70, []int{3} +} +func (m *InspectionRuleSet) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_InspectionRuleSet.Unmarshal(m, b) +} +func (m *InspectionRuleSet) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_InspectionRuleSet.Marshal(b, m, deterministic) +} +func (dst *InspectionRuleSet) XXX_Merge(src proto.Message) { + xxx_messageInfo_InspectionRuleSet.Merge(dst, src) +} +func (m *InspectionRuleSet) XXX_Size() int { + return xxx_messageInfo_InspectionRuleSet.Size(m) +} +func (m *InspectionRuleSet) XXX_DiscardUnknown() { + xxx_messageInfo_InspectionRuleSet.DiscardUnknown(m) +} + +var xxx_messageInfo_InspectionRuleSet proto.InternalMessageInfo + +func (m *InspectionRuleSet) GetInfoTypes() []*InfoType { + if m != nil { + return m.InfoTypes + } + return nil +} + +func (m *InspectionRuleSet) GetRules() []*InspectionRule { + if m != nil { + return m.Rules + } + return nil +} + +// Configuration description of the scanning process. +// When used with redactContent only info_types and min_likelihood are currently +// used. +type InspectConfig struct { + // Restricts what info_types to look for. The values must correspond to + // InfoType values returned by ListInfoTypes or listed at + // https://cloud.google.com/dlp/docs/infotypes-reference. + // + // When no InfoTypes or CustomInfoTypes are specified in a request, the + // system may automatically choose what detectors to run. By default this may + // be all types, but may change over time as detectors are updated. + // + // The special InfoType name "ALL_BASIC" can be used to trigger all detectors, + // but may change over time as new InfoTypes are added. If you need precise + // control and predictability as to what detectors are run you should specify + // specific InfoTypes listed in the reference. + InfoTypes []*InfoType `protobuf:"bytes,1,rep,name=info_types,json=infoTypes,proto3" json:"info_types,omitempty"` + // Only returns findings equal or above this threshold. The default is + // POSSIBLE. + // See https://cloud.google.com/dlp/docs/likelihood to learn more. + MinLikelihood Likelihood `protobuf:"varint,2,opt,name=min_likelihood,json=minLikelihood,proto3,enum=google.privacy.dlp.v2.Likelihood" json:"min_likelihood,omitempty"` + Limits *InspectConfig_FindingLimits `protobuf:"bytes,3,opt,name=limits,proto3" json:"limits,omitempty"` + // When true, a contextual quote from the data that triggered a finding is + // included in the response; see Finding.quote. + IncludeQuote bool `protobuf:"varint,4,opt,name=include_quote,json=includeQuote,proto3" json:"include_quote,omitempty"` + // When true, excludes type information of the findings. + ExcludeInfoTypes bool `protobuf:"varint,5,opt,name=exclude_info_types,json=excludeInfoTypes,proto3" json:"exclude_info_types,omitempty"` + // CustomInfoTypes provided by the user. See + // https://cloud.google.com/dlp/docs/creating-custom-infotypes to learn more. + CustomInfoTypes []*CustomInfoType `protobuf:"bytes,6,rep,name=custom_info_types,json=customInfoTypes,proto3" json:"custom_info_types,omitempty"` + // List of options defining data content to scan. + // If empty, text, images, and other content will be included. + ContentOptions []ContentOption `protobuf:"varint,8,rep,packed,name=content_options,json=contentOptions,proto3,enum=google.privacy.dlp.v2.ContentOption" json:"content_options,omitempty"` + // Set of rules to apply to the findings for this InspectConfig. + // Exclusion rules, contained in the set are executed in the end, other + // rules are executed in the order they are specified for each info type. + RuleSet []*InspectionRuleSet `protobuf:"bytes,10,rep,name=rule_set,json=ruleSet,proto3" json:"rule_set,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *InspectConfig) Reset() { *m = InspectConfig{} } +func (m *InspectConfig) String() string { return proto.CompactTextString(m) } +func (*InspectConfig) ProtoMessage() {} +func (*InspectConfig) Descriptor() ([]byte, []int) { + return fileDescriptor_dlp_1aebf9c18c267d70, []int{4} +} +func (m *InspectConfig) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_InspectConfig.Unmarshal(m, b) +} +func (m *InspectConfig) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_InspectConfig.Marshal(b, m, deterministic) +} +func (dst *InspectConfig) XXX_Merge(src proto.Message) { + xxx_messageInfo_InspectConfig.Merge(dst, src) +} +func (m *InspectConfig) XXX_Size() int { + return xxx_messageInfo_InspectConfig.Size(m) +} +func (m *InspectConfig) XXX_DiscardUnknown() { + xxx_messageInfo_InspectConfig.DiscardUnknown(m) +} + +var xxx_messageInfo_InspectConfig proto.InternalMessageInfo + +func (m *InspectConfig) GetInfoTypes() []*InfoType { + if m != nil { + return m.InfoTypes + } + return nil +} + +func (m *InspectConfig) GetMinLikelihood() Likelihood { + if m != nil { + return m.MinLikelihood + } + return Likelihood_LIKELIHOOD_UNSPECIFIED +} + +func (m *InspectConfig) GetLimits() *InspectConfig_FindingLimits { + if m != nil { + return m.Limits + } + return nil +} + +func (m *InspectConfig) GetIncludeQuote() bool { + if m != nil { + return m.IncludeQuote + } + return false +} + +func (m *InspectConfig) GetExcludeInfoTypes() bool { + if m != nil { + return m.ExcludeInfoTypes + } + return false +} + +func (m *InspectConfig) GetCustomInfoTypes() []*CustomInfoType { + if m != nil { + return m.CustomInfoTypes + } + return nil +} + +func (m *InspectConfig) GetContentOptions() []ContentOption { + if m != nil { + return m.ContentOptions + } + return nil +} + +func (m *InspectConfig) GetRuleSet() []*InspectionRuleSet { + if m != nil { + return m.RuleSet + } + return nil +} + +type InspectConfig_FindingLimits struct { + // Max number of findings that will be returned for each item scanned. + // When set within `InspectDataSourceRequest`, + // the maximum returned is 2000 regardless if this is set higher. + // When set within `InspectContentRequest`, this field is ignored. + MaxFindingsPerItem int32 `protobuf:"varint,1,opt,name=max_findings_per_item,json=maxFindingsPerItem,proto3" json:"max_findings_per_item,omitempty"` + // Max number of findings that will be returned per request/job. + // When set within `InspectContentRequest`, the maximum returned is 2000 + // regardless if this is set higher. + MaxFindingsPerRequest int32 `protobuf:"varint,2,opt,name=max_findings_per_request,json=maxFindingsPerRequest,proto3" json:"max_findings_per_request,omitempty"` + // Configuration of findings limit given for specified infoTypes. + MaxFindingsPerInfoType []*InspectConfig_FindingLimits_InfoTypeLimit `protobuf:"bytes,3,rep,name=max_findings_per_info_type,json=maxFindingsPerInfoType,proto3" json:"max_findings_per_info_type,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *InspectConfig_FindingLimits) Reset() { *m = InspectConfig_FindingLimits{} } +func (m *InspectConfig_FindingLimits) String() string { return proto.CompactTextString(m) } +func (*InspectConfig_FindingLimits) ProtoMessage() {} +func (*InspectConfig_FindingLimits) Descriptor() ([]byte, []int) { + return fileDescriptor_dlp_1aebf9c18c267d70, []int{4, 0} +} +func (m *InspectConfig_FindingLimits) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_InspectConfig_FindingLimits.Unmarshal(m, b) +} +func (m *InspectConfig_FindingLimits) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_InspectConfig_FindingLimits.Marshal(b, m, deterministic) +} +func (dst *InspectConfig_FindingLimits) XXX_Merge(src proto.Message) { + xxx_messageInfo_InspectConfig_FindingLimits.Merge(dst, src) +} +func (m *InspectConfig_FindingLimits) XXX_Size() int { + return xxx_messageInfo_InspectConfig_FindingLimits.Size(m) +} +func (m *InspectConfig_FindingLimits) XXX_DiscardUnknown() { + xxx_messageInfo_InspectConfig_FindingLimits.DiscardUnknown(m) +} + +var xxx_messageInfo_InspectConfig_FindingLimits proto.InternalMessageInfo + +func (m *InspectConfig_FindingLimits) GetMaxFindingsPerItem() int32 { + if m != nil { + return m.MaxFindingsPerItem + } + return 0 +} + +func (m *InspectConfig_FindingLimits) GetMaxFindingsPerRequest() int32 { + if m != nil { + return m.MaxFindingsPerRequest + } + return 0 +} + +func (m *InspectConfig_FindingLimits) GetMaxFindingsPerInfoType() []*InspectConfig_FindingLimits_InfoTypeLimit { + if m != nil { + return m.MaxFindingsPerInfoType + } + return nil +} + +// Max findings configuration per infoType, per content item or long +// running DlpJob. +type InspectConfig_FindingLimits_InfoTypeLimit struct { + // Type of information the findings limit applies to. Only one limit per + // info_type should be provided. If InfoTypeLimit does not have an + // info_type, the DLP API applies the limit against all info_types that + // are found but not specified in another InfoTypeLimit. + InfoType *InfoType `protobuf:"bytes,1,opt,name=info_type,json=infoType,proto3" json:"info_type,omitempty"` + // Max findings limit for the given infoType. + MaxFindings int32 `protobuf:"varint,2,opt,name=max_findings,json=maxFindings,proto3" json:"max_findings,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *InspectConfig_FindingLimits_InfoTypeLimit) Reset() { + *m = InspectConfig_FindingLimits_InfoTypeLimit{} +} +func (m *InspectConfig_FindingLimits_InfoTypeLimit) String() string { return proto.CompactTextString(m) } +func (*InspectConfig_FindingLimits_InfoTypeLimit) ProtoMessage() {} +func (*InspectConfig_FindingLimits_InfoTypeLimit) Descriptor() ([]byte, []int) { + return fileDescriptor_dlp_1aebf9c18c267d70, []int{4, 0, 0} +} +func (m *InspectConfig_FindingLimits_InfoTypeLimit) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_InspectConfig_FindingLimits_InfoTypeLimit.Unmarshal(m, b) +} +func (m *InspectConfig_FindingLimits_InfoTypeLimit) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_InspectConfig_FindingLimits_InfoTypeLimit.Marshal(b, m, deterministic) +} +func (dst *InspectConfig_FindingLimits_InfoTypeLimit) XXX_Merge(src proto.Message) { + xxx_messageInfo_InspectConfig_FindingLimits_InfoTypeLimit.Merge(dst, src) +} +func (m *InspectConfig_FindingLimits_InfoTypeLimit) XXX_Size() int { + return xxx_messageInfo_InspectConfig_FindingLimits_InfoTypeLimit.Size(m) +} +func (m *InspectConfig_FindingLimits_InfoTypeLimit) XXX_DiscardUnknown() { + xxx_messageInfo_InspectConfig_FindingLimits_InfoTypeLimit.DiscardUnknown(m) +} + +var xxx_messageInfo_InspectConfig_FindingLimits_InfoTypeLimit proto.InternalMessageInfo + +func (m *InspectConfig_FindingLimits_InfoTypeLimit) GetInfoType() *InfoType { + if m != nil { + return m.InfoType + } + return nil +} + +func (m *InspectConfig_FindingLimits_InfoTypeLimit) GetMaxFindings() int32 { + if m != nil { + return m.MaxFindings + } + return 0 +} + +// Container for bytes to inspect or redact. +type ByteContentItem struct { + // The type of data stored in the bytes string. Default will be TEXT_UTF8. + Type ByteContentItem_BytesType `protobuf:"varint,1,opt,name=type,proto3,enum=google.privacy.dlp.v2.ByteContentItem_BytesType" json:"type,omitempty"` + // Content data to inspect or redact. + Data []byte `protobuf:"bytes,2,opt,name=data,proto3" json:"data,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ByteContentItem) Reset() { *m = ByteContentItem{} } +func (m *ByteContentItem) String() string { return proto.CompactTextString(m) } +func (*ByteContentItem) ProtoMessage() {} +func (*ByteContentItem) Descriptor() ([]byte, []int) { + return fileDescriptor_dlp_1aebf9c18c267d70, []int{5} +} +func (m *ByteContentItem) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ByteContentItem.Unmarshal(m, b) +} +func (m *ByteContentItem) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ByteContentItem.Marshal(b, m, deterministic) +} +func (dst *ByteContentItem) XXX_Merge(src proto.Message) { + xxx_messageInfo_ByteContentItem.Merge(dst, src) +} +func (m *ByteContentItem) XXX_Size() int { + return xxx_messageInfo_ByteContentItem.Size(m) +} +func (m *ByteContentItem) XXX_DiscardUnknown() { + xxx_messageInfo_ByteContentItem.DiscardUnknown(m) +} + +var xxx_messageInfo_ByteContentItem proto.InternalMessageInfo + +func (m *ByteContentItem) GetType() ByteContentItem_BytesType { + if m != nil { + return m.Type + } + return ByteContentItem_BYTES_TYPE_UNSPECIFIED +} + +func (m *ByteContentItem) GetData() []byte { + if m != nil { + return m.Data + } + return nil +} + +// Container structure for the content to inspect. +type ContentItem struct { + // Data of the item either in the byte array or UTF-8 string form, or table. + // + // Types that are valid to be assigned to DataItem: + // *ContentItem_Value + // *ContentItem_Table + // *ContentItem_ByteItem + DataItem isContentItem_DataItem `protobuf_oneof:"data_item"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ContentItem) Reset() { *m = ContentItem{} } +func (m *ContentItem) String() string { return proto.CompactTextString(m) } +func (*ContentItem) ProtoMessage() {} +func (*ContentItem) Descriptor() ([]byte, []int) { + return fileDescriptor_dlp_1aebf9c18c267d70, []int{6} +} +func (m *ContentItem) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ContentItem.Unmarshal(m, b) +} +func (m *ContentItem) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ContentItem.Marshal(b, m, deterministic) +} +func (dst *ContentItem) XXX_Merge(src proto.Message) { + xxx_messageInfo_ContentItem.Merge(dst, src) +} +func (m *ContentItem) XXX_Size() int { + return xxx_messageInfo_ContentItem.Size(m) +} +func (m *ContentItem) XXX_DiscardUnknown() { + xxx_messageInfo_ContentItem.DiscardUnknown(m) +} + +var xxx_messageInfo_ContentItem proto.InternalMessageInfo + +type isContentItem_DataItem interface { + isContentItem_DataItem() +} + +type ContentItem_Value struct { + Value string `protobuf:"bytes,3,opt,name=value,proto3,oneof"` +} + +type ContentItem_Table struct { + Table *Table `protobuf:"bytes,4,opt,name=table,proto3,oneof"` +} + +type ContentItem_ByteItem struct { + ByteItem *ByteContentItem `protobuf:"bytes,5,opt,name=byte_item,json=byteItem,proto3,oneof"` +} + +func (*ContentItem_Value) isContentItem_DataItem() {} + +func (*ContentItem_Table) isContentItem_DataItem() {} + +func (*ContentItem_ByteItem) isContentItem_DataItem() {} + +func (m *ContentItem) GetDataItem() isContentItem_DataItem { + if m != nil { + return m.DataItem + } + return nil +} + +func (m *ContentItem) GetValue() string { + if x, ok := m.GetDataItem().(*ContentItem_Value); ok { + return x.Value + } + return "" +} + +func (m *ContentItem) GetTable() *Table { + if x, ok := m.GetDataItem().(*ContentItem_Table); ok { + return x.Table + } + return nil +} + +func (m *ContentItem) GetByteItem() *ByteContentItem { + if x, ok := m.GetDataItem().(*ContentItem_ByteItem); ok { + return x.ByteItem + } + return nil +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*ContentItem) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _ContentItem_OneofMarshaler, _ContentItem_OneofUnmarshaler, _ContentItem_OneofSizer, []interface{}{ + (*ContentItem_Value)(nil), + (*ContentItem_Table)(nil), + (*ContentItem_ByteItem)(nil), + } +} + +func _ContentItem_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*ContentItem) + // data_item + switch x := m.DataItem.(type) { + case *ContentItem_Value: + b.EncodeVarint(3<<3 | proto.WireBytes) + b.EncodeStringBytes(x.Value) + case *ContentItem_Table: + b.EncodeVarint(4<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.Table); err != nil { + return err + } + case *ContentItem_ByteItem: + b.EncodeVarint(5<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.ByteItem); err != nil { + return err + } + case nil: + default: + return fmt.Errorf("ContentItem.DataItem has unexpected type %T", x) + } + return nil +} + +func _ContentItem_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*ContentItem) + switch tag { + case 3: // data_item.value + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeStringBytes() + m.DataItem = &ContentItem_Value{x} + return true, err + case 4: // data_item.table + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(Table) + err := b.DecodeMessage(msg) + m.DataItem = &ContentItem_Table{msg} + return true, err + case 5: // data_item.byte_item + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(ByteContentItem) + err := b.DecodeMessage(msg) + m.DataItem = &ContentItem_ByteItem{msg} + return true, err + default: + return false, nil + } +} + +func _ContentItem_OneofSizer(msg proto.Message) (n int) { + m := msg.(*ContentItem) + // data_item + switch x := m.DataItem.(type) { + case *ContentItem_Value: + n += 1 // tag and wire + n += proto.SizeVarint(uint64(len(x.Value))) + n += len(x.Value) + case *ContentItem_Table: + s := proto.Size(x.Table) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *ContentItem_ByteItem: + s := proto.Size(x.ByteItem) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +// Structured content to inspect. Up to 50,000 `Value`s per request allowed. +// See https://cloud.google.com/dlp/docs/inspecting-text#inspecting_a_table to +// learn more. +type Table struct { + Headers []*FieldId `protobuf:"bytes,1,rep,name=headers,proto3" json:"headers,omitempty"` + Rows []*Table_Row `protobuf:"bytes,2,rep,name=rows,proto3" json:"rows,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Table) Reset() { *m = Table{} } +func (m *Table) String() string { return proto.CompactTextString(m) } +func (*Table) ProtoMessage() {} +func (*Table) Descriptor() ([]byte, []int) { + return fileDescriptor_dlp_1aebf9c18c267d70, []int{7} +} +func (m *Table) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Table.Unmarshal(m, b) +} +func (m *Table) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Table.Marshal(b, m, deterministic) +} +func (dst *Table) XXX_Merge(src proto.Message) { + xxx_messageInfo_Table.Merge(dst, src) +} +func (m *Table) XXX_Size() int { + return xxx_messageInfo_Table.Size(m) +} +func (m *Table) XXX_DiscardUnknown() { + xxx_messageInfo_Table.DiscardUnknown(m) +} + +var xxx_messageInfo_Table proto.InternalMessageInfo + +func (m *Table) GetHeaders() []*FieldId { + if m != nil { + return m.Headers + } + return nil +} + +func (m *Table) GetRows() []*Table_Row { + if m != nil { + return m.Rows + } + return nil +} + +type Table_Row struct { + Values []*Value `protobuf:"bytes,1,rep,name=values,proto3" json:"values,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Table_Row) Reset() { *m = Table_Row{} } +func (m *Table_Row) String() string { return proto.CompactTextString(m) } +func (*Table_Row) ProtoMessage() {} +func (*Table_Row) Descriptor() ([]byte, []int) { + return fileDescriptor_dlp_1aebf9c18c267d70, []int{7, 0} +} +func (m *Table_Row) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Table_Row.Unmarshal(m, b) +} +func (m *Table_Row) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Table_Row.Marshal(b, m, deterministic) +} +func (dst *Table_Row) XXX_Merge(src proto.Message) { + xxx_messageInfo_Table_Row.Merge(dst, src) +} +func (m *Table_Row) XXX_Size() int { + return xxx_messageInfo_Table_Row.Size(m) +} +func (m *Table_Row) XXX_DiscardUnknown() { + xxx_messageInfo_Table_Row.DiscardUnknown(m) +} + +var xxx_messageInfo_Table_Row proto.InternalMessageInfo + +func (m *Table_Row) GetValues() []*Value { + if m != nil { + return m.Values + } + return nil +} + +// All the findings for a single scanned item. +type InspectResult struct { + // List of findings for an item. + Findings []*Finding `protobuf:"bytes,1,rep,name=findings,proto3" json:"findings,omitempty"` + // If true, then this item might have more findings than were returned, + // and the findings returned are an arbitrary subset of all findings. + // The findings list might be truncated because the input items were too + // large, or because the server reached the maximum amount of resources + // allowed for a single API call. For best results, divide the input into + // smaller batches. + FindingsTruncated bool `protobuf:"varint,2,opt,name=findings_truncated,json=findingsTruncated,proto3" json:"findings_truncated,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *InspectResult) Reset() { *m = InspectResult{} } +func (m *InspectResult) String() string { return proto.CompactTextString(m) } +func (*InspectResult) ProtoMessage() {} +func (*InspectResult) Descriptor() ([]byte, []int) { + return fileDescriptor_dlp_1aebf9c18c267d70, []int{8} +} +func (m *InspectResult) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_InspectResult.Unmarshal(m, b) +} +func (m *InspectResult) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_InspectResult.Marshal(b, m, deterministic) +} +func (dst *InspectResult) XXX_Merge(src proto.Message) { + xxx_messageInfo_InspectResult.Merge(dst, src) +} +func (m *InspectResult) XXX_Size() int { + return xxx_messageInfo_InspectResult.Size(m) +} +func (m *InspectResult) XXX_DiscardUnknown() { + xxx_messageInfo_InspectResult.DiscardUnknown(m) +} + +var xxx_messageInfo_InspectResult proto.InternalMessageInfo + +func (m *InspectResult) GetFindings() []*Finding { + if m != nil { + return m.Findings + } + return nil +} + +func (m *InspectResult) GetFindingsTruncated() bool { + if m != nil { + return m.FindingsTruncated + } + return false +} + +// Represents a piece of potentially sensitive content. +type Finding struct { + // The content that was found. Even if the content is not textual, it + // may be converted to a textual representation here. + // Provided if `include_quote` is true and the finding is + // less than or equal to 4096 bytes long. If the finding exceeds 4096 bytes + // in length, the quote may be omitted. + Quote string `protobuf:"bytes,1,opt,name=quote,proto3" json:"quote,omitempty"` + // The type of content that might have been found. + // Provided if `excluded_types` is false. + InfoType *InfoType `protobuf:"bytes,2,opt,name=info_type,json=infoType,proto3" json:"info_type,omitempty"` + // Confidence of how likely it is that the `info_type` is correct. + Likelihood Likelihood `protobuf:"varint,3,opt,name=likelihood,proto3,enum=google.privacy.dlp.v2.Likelihood" json:"likelihood,omitempty"` + // Where the content was found. + Location *Location `protobuf:"bytes,4,opt,name=location,proto3" json:"location,omitempty"` + // Timestamp when finding was detected. + CreateTime *timestamp.Timestamp `protobuf:"bytes,6,opt,name=create_time,json=createTime,proto3" json:"create_time,omitempty"` + // Contains data parsed from quotes. Only populated if include_quote was set + // to true and a supported infoType was requested. Currently supported + // infoTypes: DATE, DATE_OF_BIRTH and TIME. + QuoteInfo *QuoteInfo `protobuf:"bytes,7,opt,name=quote_info,json=quoteInfo,proto3" json:"quote_info,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Finding) Reset() { *m = Finding{} } +func (m *Finding) String() string { return proto.CompactTextString(m) } +func (*Finding) ProtoMessage() {} +func (*Finding) Descriptor() ([]byte, []int) { + return fileDescriptor_dlp_1aebf9c18c267d70, []int{9} +} +func (m *Finding) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Finding.Unmarshal(m, b) +} +func (m *Finding) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Finding.Marshal(b, m, deterministic) +} +func (dst *Finding) XXX_Merge(src proto.Message) { + xxx_messageInfo_Finding.Merge(dst, src) +} +func (m *Finding) XXX_Size() int { + return xxx_messageInfo_Finding.Size(m) +} +func (m *Finding) XXX_DiscardUnknown() { + xxx_messageInfo_Finding.DiscardUnknown(m) +} + +var xxx_messageInfo_Finding proto.InternalMessageInfo + +func (m *Finding) GetQuote() string { + if m != nil { + return m.Quote + } + return "" +} + +func (m *Finding) GetInfoType() *InfoType { + if m != nil { + return m.InfoType + } + return nil +} + +func (m *Finding) GetLikelihood() Likelihood { + if m != nil { + return m.Likelihood + } + return Likelihood_LIKELIHOOD_UNSPECIFIED +} + +func (m *Finding) GetLocation() *Location { + if m != nil { + return m.Location + } + return nil +} + +func (m *Finding) GetCreateTime() *timestamp.Timestamp { + if m != nil { + return m.CreateTime + } + return nil +} + +func (m *Finding) GetQuoteInfo() *QuoteInfo { + if m != nil { + return m.QuoteInfo + } + return nil +} + +// Specifies the location of the finding. +type Location struct { + // Zero-based byte offsets delimiting the finding. + // These are relative to the finding's containing element. + // Note that when the content is not textual, this references + // the UTF-8 encoded textual representation of the content. + // Omitted if content is an image. + ByteRange *Range `protobuf:"bytes,1,opt,name=byte_range,json=byteRange,proto3" json:"byte_range,omitempty"` + // Unicode character offsets delimiting the finding. + // These are relative to the finding's containing element. + // Provided when the content is text. + CodepointRange *Range `protobuf:"bytes,2,opt,name=codepoint_range,json=codepointRange,proto3" json:"codepoint_range,omitempty"` + // List of nested objects pointing to the precise location of the finding + // within the file or record. + ContentLocations []*ContentLocation `protobuf:"bytes,7,rep,name=content_locations,json=contentLocations,proto3" json:"content_locations,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Location) Reset() { *m = Location{} } +func (m *Location) String() string { return proto.CompactTextString(m) } +func (*Location) ProtoMessage() {} +func (*Location) Descriptor() ([]byte, []int) { + return fileDescriptor_dlp_1aebf9c18c267d70, []int{10} +} +func (m *Location) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Location.Unmarshal(m, b) +} +func (m *Location) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Location.Marshal(b, m, deterministic) +} +func (dst *Location) XXX_Merge(src proto.Message) { + xxx_messageInfo_Location.Merge(dst, src) +} +func (m *Location) XXX_Size() int { + return xxx_messageInfo_Location.Size(m) +} +func (m *Location) XXX_DiscardUnknown() { + xxx_messageInfo_Location.DiscardUnknown(m) +} + +var xxx_messageInfo_Location proto.InternalMessageInfo + +func (m *Location) GetByteRange() *Range { + if m != nil { + return m.ByteRange + } + return nil +} + +func (m *Location) GetCodepointRange() *Range { + if m != nil { + return m.CodepointRange + } + return nil +} + +func (m *Location) GetContentLocations() []*ContentLocation { + if m != nil { + return m.ContentLocations + } + return nil +} + +// Findings container location data. +type ContentLocation struct { + // Name of the container where the finding is located. + // The top level name is the source file name or table name. Names of some + // common storage containers are formatted as follows: + // + // * BigQuery tables: `:.` + // * Cloud Storage files: `gs:///` + // * Datastore namespace: + // + // Nested names could be absent if the embedded object has no string + // identifier (for an example an image contained within a document). + ContainerName string `protobuf:"bytes,1,opt,name=container_name,json=containerName,proto3" json:"container_name,omitempty"` + // Type of the container within the file with location of the finding. + // + // Types that are valid to be assigned to Location: + // *ContentLocation_RecordLocation + // *ContentLocation_ImageLocation + // *ContentLocation_DocumentLocation + Location isContentLocation_Location `protobuf_oneof:"location"` + // Findings container modification timestamp, if applicable. + // For Google Cloud Storage contains last file modification timestamp. + // For BigQuery table contains last_modified_time property. + // For Datastore - not populated. + ContainerTimestamp *timestamp.Timestamp `protobuf:"bytes,6,opt,name=container_timestamp,json=containerTimestamp,proto3" json:"container_timestamp,omitempty"` + // Findings container version, if available + // ("generation" for Google Cloud Storage). + ContainerVersion string `protobuf:"bytes,7,opt,name=container_version,json=containerVersion,proto3" json:"container_version,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ContentLocation) Reset() { *m = ContentLocation{} } +func (m *ContentLocation) String() string { return proto.CompactTextString(m) } +func (*ContentLocation) ProtoMessage() {} +func (*ContentLocation) Descriptor() ([]byte, []int) { + return fileDescriptor_dlp_1aebf9c18c267d70, []int{11} +} +func (m *ContentLocation) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ContentLocation.Unmarshal(m, b) +} +func (m *ContentLocation) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ContentLocation.Marshal(b, m, deterministic) +} +func (dst *ContentLocation) XXX_Merge(src proto.Message) { + xxx_messageInfo_ContentLocation.Merge(dst, src) +} +func (m *ContentLocation) XXX_Size() int { + return xxx_messageInfo_ContentLocation.Size(m) +} +func (m *ContentLocation) XXX_DiscardUnknown() { + xxx_messageInfo_ContentLocation.DiscardUnknown(m) +} + +var xxx_messageInfo_ContentLocation proto.InternalMessageInfo + +func (m *ContentLocation) GetContainerName() string { + if m != nil { + return m.ContainerName + } + return "" +} + +type isContentLocation_Location interface { + isContentLocation_Location() +} + +type ContentLocation_RecordLocation struct { + RecordLocation *RecordLocation `protobuf:"bytes,2,opt,name=record_location,json=recordLocation,proto3,oneof"` +} + +type ContentLocation_ImageLocation struct { + ImageLocation *ImageLocation `protobuf:"bytes,3,opt,name=image_location,json=imageLocation,proto3,oneof"` +} + +type ContentLocation_DocumentLocation struct { + DocumentLocation *DocumentLocation `protobuf:"bytes,5,opt,name=document_location,json=documentLocation,proto3,oneof"` +} + +func (*ContentLocation_RecordLocation) isContentLocation_Location() {} + +func (*ContentLocation_ImageLocation) isContentLocation_Location() {} + +func (*ContentLocation_DocumentLocation) isContentLocation_Location() {} + +func (m *ContentLocation) GetLocation() isContentLocation_Location { + if m != nil { + return m.Location + } + return nil +} + +func (m *ContentLocation) GetRecordLocation() *RecordLocation { + if x, ok := m.GetLocation().(*ContentLocation_RecordLocation); ok { + return x.RecordLocation + } + return nil +} + +func (m *ContentLocation) GetImageLocation() *ImageLocation { + if x, ok := m.GetLocation().(*ContentLocation_ImageLocation); ok { + return x.ImageLocation + } + return nil +} + +func (m *ContentLocation) GetDocumentLocation() *DocumentLocation { + if x, ok := m.GetLocation().(*ContentLocation_DocumentLocation); ok { + return x.DocumentLocation + } + return nil +} + +func (m *ContentLocation) GetContainerTimestamp() *timestamp.Timestamp { + if m != nil { + return m.ContainerTimestamp + } + return nil +} + +func (m *ContentLocation) GetContainerVersion() string { + if m != nil { + return m.ContainerVersion + } + return "" +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*ContentLocation) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _ContentLocation_OneofMarshaler, _ContentLocation_OneofUnmarshaler, _ContentLocation_OneofSizer, []interface{}{ + (*ContentLocation_RecordLocation)(nil), + (*ContentLocation_ImageLocation)(nil), + (*ContentLocation_DocumentLocation)(nil), + } +} + +func _ContentLocation_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*ContentLocation) + // location + switch x := m.Location.(type) { + case *ContentLocation_RecordLocation: + b.EncodeVarint(2<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.RecordLocation); err != nil { + return err + } + case *ContentLocation_ImageLocation: + b.EncodeVarint(3<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.ImageLocation); err != nil { + return err + } + case *ContentLocation_DocumentLocation: + b.EncodeVarint(5<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.DocumentLocation); err != nil { + return err + } + case nil: + default: + return fmt.Errorf("ContentLocation.Location has unexpected type %T", x) + } + return nil +} + +func _ContentLocation_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*ContentLocation) + switch tag { + case 2: // location.record_location + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(RecordLocation) + err := b.DecodeMessage(msg) + m.Location = &ContentLocation_RecordLocation{msg} + return true, err + case 3: // location.image_location + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(ImageLocation) + err := b.DecodeMessage(msg) + m.Location = &ContentLocation_ImageLocation{msg} + return true, err + case 5: // location.document_location + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(DocumentLocation) + err := b.DecodeMessage(msg) + m.Location = &ContentLocation_DocumentLocation{msg} + return true, err + default: + return false, nil + } +} + +func _ContentLocation_OneofSizer(msg proto.Message) (n int) { + m := msg.(*ContentLocation) + // location + switch x := m.Location.(type) { + case *ContentLocation_RecordLocation: + s := proto.Size(x.RecordLocation) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *ContentLocation_ImageLocation: + s := proto.Size(x.ImageLocation) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *ContentLocation_DocumentLocation: + s := proto.Size(x.DocumentLocation) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +// Location of a finding within a document. +type DocumentLocation struct { + // Offset of the line, from the beginning of the file, where the finding + // is located. + FileOffset int64 `protobuf:"varint,1,opt,name=file_offset,json=fileOffset,proto3" json:"file_offset,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *DocumentLocation) Reset() { *m = DocumentLocation{} } +func (m *DocumentLocation) String() string { return proto.CompactTextString(m) } +func (*DocumentLocation) ProtoMessage() {} +func (*DocumentLocation) Descriptor() ([]byte, []int) { + return fileDescriptor_dlp_1aebf9c18c267d70, []int{12} +} +func (m *DocumentLocation) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_DocumentLocation.Unmarshal(m, b) +} +func (m *DocumentLocation) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_DocumentLocation.Marshal(b, m, deterministic) +} +func (dst *DocumentLocation) XXX_Merge(src proto.Message) { + xxx_messageInfo_DocumentLocation.Merge(dst, src) +} +func (m *DocumentLocation) XXX_Size() int { + return xxx_messageInfo_DocumentLocation.Size(m) +} +func (m *DocumentLocation) XXX_DiscardUnknown() { + xxx_messageInfo_DocumentLocation.DiscardUnknown(m) +} + +var xxx_messageInfo_DocumentLocation proto.InternalMessageInfo + +func (m *DocumentLocation) GetFileOffset() int64 { + if m != nil { + return m.FileOffset + } + return 0 +} + +// Location of a finding within a row or record. +type RecordLocation struct { + // Key of the finding. + RecordKey *RecordKey `protobuf:"bytes,1,opt,name=record_key,json=recordKey,proto3" json:"record_key,omitempty"` + // Field id of the field containing the finding. + FieldId *FieldId `protobuf:"bytes,2,opt,name=field_id,json=fieldId,proto3" json:"field_id,omitempty"` + // Location within a `ContentItem.Table`. + TableLocation *TableLocation `protobuf:"bytes,3,opt,name=table_location,json=tableLocation,proto3" json:"table_location,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *RecordLocation) Reset() { *m = RecordLocation{} } +func (m *RecordLocation) String() string { return proto.CompactTextString(m) } +func (*RecordLocation) ProtoMessage() {} +func (*RecordLocation) Descriptor() ([]byte, []int) { + return fileDescriptor_dlp_1aebf9c18c267d70, []int{13} +} +func (m *RecordLocation) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_RecordLocation.Unmarshal(m, b) +} +func (m *RecordLocation) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_RecordLocation.Marshal(b, m, deterministic) +} +func (dst *RecordLocation) XXX_Merge(src proto.Message) { + xxx_messageInfo_RecordLocation.Merge(dst, src) +} +func (m *RecordLocation) XXX_Size() int { + return xxx_messageInfo_RecordLocation.Size(m) +} +func (m *RecordLocation) XXX_DiscardUnknown() { + xxx_messageInfo_RecordLocation.DiscardUnknown(m) +} + +var xxx_messageInfo_RecordLocation proto.InternalMessageInfo + +func (m *RecordLocation) GetRecordKey() *RecordKey { + if m != nil { + return m.RecordKey + } + return nil +} + +func (m *RecordLocation) GetFieldId() *FieldId { + if m != nil { + return m.FieldId + } + return nil +} + +func (m *RecordLocation) GetTableLocation() *TableLocation { + if m != nil { + return m.TableLocation + } + return nil +} + +// Location of a finding within a table. +type TableLocation struct { + // The zero-based index of the row where the finding is located. + RowIndex int64 `protobuf:"varint,1,opt,name=row_index,json=rowIndex,proto3" json:"row_index,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *TableLocation) Reset() { *m = TableLocation{} } +func (m *TableLocation) String() string { return proto.CompactTextString(m) } +func (*TableLocation) ProtoMessage() {} +func (*TableLocation) Descriptor() ([]byte, []int) { + return fileDescriptor_dlp_1aebf9c18c267d70, []int{14} +} +func (m *TableLocation) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_TableLocation.Unmarshal(m, b) +} +func (m *TableLocation) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_TableLocation.Marshal(b, m, deterministic) +} +func (dst *TableLocation) XXX_Merge(src proto.Message) { + xxx_messageInfo_TableLocation.Merge(dst, src) +} +func (m *TableLocation) XXX_Size() int { + return xxx_messageInfo_TableLocation.Size(m) +} +func (m *TableLocation) XXX_DiscardUnknown() { + xxx_messageInfo_TableLocation.DiscardUnknown(m) +} + +var xxx_messageInfo_TableLocation proto.InternalMessageInfo + +func (m *TableLocation) GetRowIndex() int64 { + if m != nil { + return m.RowIndex + } + return 0 +} + +// Generic half-open interval [start, end) +type Range struct { + // Index of the first character of the range (inclusive). + Start int64 `protobuf:"varint,1,opt,name=start,proto3" json:"start,omitempty"` + // Index of the last character of the range (exclusive). + End int64 `protobuf:"varint,2,opt,name=end,proto3" json:"end,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Range) Reset() { *m = Range{} } +func (m *Range) String() string { return proto.CompactTextString(m) } +func (*Range) ProtoMessage() {} +func (*Range) Descriptor() ([]byte, []int) { + return fileDescriptor_dlp_1aebf9c18c267d70, []int{15} +} +func (m *Range) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Range.Unmarshal(m, b) +} +func (m *Range) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Range.Marshal(b, m, deterministic) +} +func (dst *Range) XXX_Merge(src proto.Message) { + xxx_messageInfo_Range.Merge(dst, src) +} +func (m *Range) XXX_Size() int { + return xxx_messageInfo_Range.Size(m) +} +func (m *Range) XXX_DiscardUnknown() { + xxx_messageInfo_Range.DiscardUnknown(m) +} + +var xxx_messageInfo_Range proto.InternalMessageInfo + +func (m *Range) GetStart() int64 { + if m != nil { + return m.Start + } + return 0 +} + +func (m *Range) GetEnd() int64 { + if m != nil { + return m.End + } + return 0 +} + +// Location of the finding within an image. +type ImageLocation struct { + // Bounding boxes locating the pixels within the image containing the finding. + BoundingBoxes []*BoundingBox `protobuf:"bytes,1,rep,name=bounding_boxes,json=boundingBoxes,proto3" json:"bounding_boxes,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ImageLocation) Reset() { *m = ImageLocation{} } +func (m *ImageLocation) String() string { return proto.CompactTextString(m) } +func (*ImageLocation) ProtoMessage() {} +func (*ImageLocation) Descriptor() ([]byte, []int) { + return fileDescriptor_dlp_1aebf9c18c267d70, []int{16} +} +func (m *ImageLocation) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ImageLocation.Unmarshal(m, b) +} +func (m *ImageLocation) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ImageLocation.Marshal(b, m, deterministic) +} +func (dst *ImageLocation) XXX_Merge(src proto.Message) { + xxx_messageInfo_ImageLocation.Merge(dst, src) +} +func (m *ImageLocation) XXX_Size() int { + return xxx_messageInfo_ImageLocation.Size(m) +} +func (m *ImageLocation) XXX_DiscardUnknown() { + xxx_messageInfo_ImageLocation.DiscardUnknown(m) +} + +var xxx_messageInfo_ImageLocation proto.InternalMessageInfo + +func (m *ImageLocation) GetBoundingBoxes() []*BoundingBox { + if m != nil { + return m.BoundingBoxes + } + return nil +} + +// Bounding box encompassing detected text within an image. +type BoundingBox struct { + // Top coordinate of the bounding box. (0,0) is upper left. + Top int32 `protobuf:"varint,1,opt,name=top,proto3" json:"top,omitempty"` + // Left coordinate of the bounding box. (0,0) is upper left. + Left int32 `protobuf:"varint,2,opt,name=left,proto3" json:"left,omitempty"` + // Width of the bounding box in pixels. + Width int32 `protobuf:"varint,3,opt,name=width,proto3" json:"width,omitempty"` + // Height of the bounding box in pixels. + Height int32 `protobuf:"varint,4,opt,name=height,proto3" json:"height,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *BoundingBox) Reset() { *m = BoundingBox{} } +func (m *BoundingBox) String() string { return proto.CompactTextString(m) } +func (*BoundingBox) ProtoMessage() {} +func (*BoundingBox) Descriptor() ([]byte, []int) { + return fileDescriptor_dlp_1aebf9c18c267d70, []int{17} +} +func (m *BoundingBox) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_BoundingBox.Unmarshal(m, b) +} +func (m *BoundingBox) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_BoundingBox.Marshal(b, m, deterministic) +} +func (dst *BoundingBox) XXX_Merge(src proto.Message) { + xxx_messageInfo_BoundingBox.Merge(dst, src) +} +func (m *BoundingBox) XXX_Size() int { + return xxx_messageInfo_BoundingBox.Size(m) +} +func (m *BoundingBox) XXX_DiscardUnknown() { + xxx_messageInfo_BoundingBox.DiscardUnknown(m) +} + +var xxx_messageInfo_BoundingBox proto.InternalMessageInfo + +func (m *BoundingBox) GetTop() int32 { + if m != nil { + return m.Top + } + return 0 +} + +func (m *BoundingBox) GetLeft() int32 { + if m != nil { + return m.Left + } + return 0 +} + +func (m *BoundingBox) GetWidth() int32 { + if m != nil { + return m.Width + } + return 0 +} + +func (m *BoundingBox) GetHeight() int32 { + if m != nil { + return m.Height + } + return 0 +} + +// Request to search for potentially sensitive info in an image and redact it +// by covering it with a colored rectangle. +type RedactImageRequest struct { + // The parent resource name, for example projects/my-project-id. + Parent string `protobuf:"bytes,1,opt,name=parent,proto3" json:"parent,omitempty"` + // Configuration for the inspector. + InspectConfig *InspectConfig `protobuf:"bytes,2,opt,name=inspect_config,json=inspectConfig,proto3" json:"inspect_config,omitempty"` + // The configuration for specifying what content to redact from images. + ImageRedactionConfigs []*RedactImageRequest_ImageRedactionConfig `protobuf:"bytes,5,rep,name=image_redaction_configs,json=imageRedactionConfigs,proto3" json:"image_redaction_configs,omitempty"` + // Whether the response should include findings along with the redacted + // image. + IncludeFindings bool `protobuf:"varint,6,opt,name=include_findings,json=includeFindings,proto3" json:"include_findings,omitempty"` + // The content must be PNG, JPEG, SVG or BMP. + ByteItem *ByteContentItem `protobuf:"bytes,7,opt,name=byte_item,json=byteItem,proto3" json:"byte_item,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *RedactImageRequest) Reset() { *m = RedactImageRequest{} } +func (m *RedactImageRequest) String() string { return proto.CompactTextString(m) } +func (*RedactImageRequest) ProtoMessage() {} +func (*RedactImageRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_dlp_1aebf9c18c267d70, []int{18} +} +func (m *RedactImageRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_RedactImageRequest.Unmarshal(m, b) +} +func (m *RedactImageRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_RedactImageRequest.Marshal(b, m, deterministic) +} +func (dst *RedactImageRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_RedactImageRequest.Merge(dst, src) +} +func (m *RedactImageRequest) XXX_Size() int { + return xxx_messageInfo_RedactImageRequest.Size(m) +} +func (m *RedactImageRequest) XXX_DiscardUnknown() { + xxx_messageInfo_RedactImageRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_RedactImageRequest proto.InternalMessageInfo + +func (m *RedactImageRequest) GetParent() string { + if m != nil { + return m.Parent + } + return "" +} + +func (m *RedactImageRequest) GetInspectConfig() *InspectConfig { + if m != nil { + return m.InspectConfig + } + return nil +} + +func (m *RedactImageRequest) GetImageRedactionConfigs() []*RedactImageRequest_ImageRedactionConfig { + if m != nil { + return m.ImageRedactionConfigs + } + return nil +} + +func (m *RedactImageRequest) GetIncludeFindings() bool { + if m != nil { + return m.IncludeFindings + } + return false +} + +func (m *RedactImageRequest) GetByteItem() *ByteContentItem { + if m != nil { + return m.ByteItem + } + return nil +} + +// Configuration for determining how redaction of images should occur. +type RedactImageRequest_ImageRedactionConfig struct { + // Type of information to redact from images. + // + // Types that are valid to be assigned to Target: + // *RedactImageRequest_ImageRedactionConfig_InfoType + // *RedactImageRequest_ImageRedactionConfig_RedactAllText + Target isRedactImageRequest_ImageRedactionConfig_Target `protobuf_oneof:"target"` + // The color to use when redacting content from an image. If not specified, + // the default is black. + RedactionColor *Color `protobuf:"bytes,3,opt,name=redaction_color,json=redactionColor,proto3" json:"redaction_color,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *RedactImageRequest_ImageRedactionConfig) Reset() { + *m = RedactImageRequest_ImageRedactionConfig{} +} +func (m *RedactImageRequest_ImageRedactionConfig) String() string { return proto.CompactTextString(m) } +func (*RedactImageRequest_ImageRedactionConfig) ProtoMessage() {} +func (*RedactImageRequest_ImageRedactionConfig) Descriptor() ([]byte, []int) { + return fileDescriptor_dlp_1aebf9c18c267d70, []int{18, 0} +} +func (m *RedactImageRequest_ImageRedactionConfig) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_RedactImageRequest_ImageRedactionConfig.Unmarshal(m, b) +} +func (m *RedactImageRequest_ImageRedactionConfig) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_RedactImageRequest_ImageRedactionConfig.Marshal(b, m, deterministic) +} +func (dst *RedactImageRequest_ImageRedactionConfig) XXX_Merge(src proto.Message) { + xxx_messageInfo_RedactImageRequest_ImageRedactionConfig.Merge(dst, src) +} +func (m *RedactImageRequest_ImageRedactionConfig) XXX_Size() int { + return xxx_messageInfo_RedactImageRequest_ImageRedactionConfig.Size(m) +} +func (m *RedactImageRequest_ImageRedactionConfig) XXX_DiscardUnknown() { + xxx_messageInfo_RedactImageRequest_ImageRedactionConfig.DiscardUnknown(m) +} + +var xxx_messageInfo_RedactImageRequest_ImageRedactionConfig proto.InternalMessageInfo + +type isRedactImageRequest_ImageRedactionConfig_Target interface { + isRedactImageRequest_ImageRedactionConfig_Target() +} + +type RedactImageRequest_ImageRedactionConfig_InfoType struct { + InfoType *InfoType `protobuf:"bytes,1,opt,name=info_type,json=infoType,proto3,oneof"` +} + +type RedactImageRequest_ImageRedactionConfig_RedactAllText struct { + RedactAllText bool `protobuf:"varint,2,opt,name=redact_all_text,json=redactAllText,proto3,oneof"` +} + +func (*RedactImageRequest_ImageRedactionConfig_InfoType) isRedactImageRequest_ImageRedactionConfig_Target() { +} + +func (*RedactImageRequest_ImageRedactionConfig_RedactAllText) isRedactImageRequest_ImageRedactionConfig_Target() { +} + +func (m *RedactImageRequest_ImageRedactionConfig) GetTarget() isRedactImageRequest_ImageRedactionConfig_Target { + if m != nil { + return m.Target + } + return nil +} + +func (m *RedactImageRequest_ImageRedactionConfig) GetInfoType() *InfoType { + if x, ok := m.GetTarget().(*RedactImageRequest_ImageRedactionConfig_InfoType); ok { + return x.InfoType + } + return nil +} + +func (m *RedactImageRequest_ImageRedactionConfig) GetRedactAllText() bool { + if x, ok := m.GetTarget().(*RedactImageRequest_ImageRedactionConfig_RedactAllText); ok { + return x.RedactAllText + } + return false +} + +func (m *RedactImageRequest_ImageRedactionConfig) GetRedactionColor() *Color { + if m != nil { + return m.RedactionColor + } + return nil +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*RedactImageRequest_ImageRedactionConfig) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _RedactImageRequest_ImageRedactionConfig_OneofMarshaler, _RedactImageRequest_ImageRedactionConfig_OneofUnmarshaler, _RedactImageRequest_ImageRedactionConfig_OneofSizer, []interface{}{ + (*RedactImageRequest_ImageRedactionConfig_InfoType)(nil), + (*RedactImageRequest_ImageRedactionConfig_RedactAllText)(nil), + } +} + +func _RedactImageRequest_ImageRedactionConfig_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*RedactImageRequest_ImageRedactionConfig) + // target + switch x := m.Target.(type) { + case *RedactImageRequest_ImageRedactionConfig_InfoType: + b.EncodeVarint(1<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.InfoType); err != nil { + return err + } + case *RedactImageRequest_ImageRedactionConfig_RedactAllText: + t := uint64(0) + if x.RedactAllText { + t = 1 + } + b.EncodeVarint(2<<3 | proto.WireVarint) + b.EncodeVarint(t) + case nil: + default: + return fmt.Errorf("RedactImageRequest_ImageRedactionConfig.Target has unexpected type %T", x) + } + return nil +} + +func _RedactImageRequest_ImageRedactionConfig_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*RedactImageRequest_ImageRedactionConfig) + switch tag { + case 1: // target.info_type + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(InfoType) + err := b.DecodeMessage(msg) + m.Target = &RedactImageRequest_ImageRedactionConfig_InfoType{msg} + return true, err + case 2: // target.redact_all_text + if wire != proto.WireVarint { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeVarint() + m.Target = &RedactImageRequest_ImageRedactionConfig_RedactAllText{x != 0} + return true, err + default: + return false, nil + } +} + +func _RedactImageRequest_ImageRedactionConfig_OneofSizer(msg proto.Message) (n int) { + m := msg.(*RedactImageRequest_ImageRedactionConfig) + // target + switch x := m.Target.(type) { + case *RedactImageRequest_ImageRedactionConfig_InfoType: + s := proto.Size(x.InfoType) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *RedactImageRequest_ImageRedactionConfig_RedactAllText: + n += 1 // tag and wire + n += 1 + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +// Represents a color in the RGB color space. +type Color struct { + // The amount of red in the color as a value in the interval [0, 1]. + Red float32 `protobuf:"fixed32,1,opt,name=red,proto3" json:"red,omitempty"` + // The amount of green in the color as a value in the interval [0, 1]. + Green float32 `protobuf:"fixed32,2,opt,name=green,proto3" json:"green,omitempty"` + // The amount of blue in the color as a value in the interval [0, 1]. + Blue float32 `protobuf:"fixed32,3,opt,name=blue,proto3" json:"blue,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Color) Reset() { *m = Color{} } +func (m *Color) String() string { return proto.CompactTextString(m) } +func (*Color) ProtoMessage() {} +func (*Color) Descriptor() ([]byte, []int) { + return fileDescriptor_dlp_1aebf9c18c267d70, []int{19} +} +func (m *Color) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Color.Unmarshal(m, b) +} +func (m *Color) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Color.Marshal(b, m, deterministic) +} +func (dst *Color) XXX_Merge(src proto.Message) { + xxx_messageInfo_Color.Merge(dst, src) +} +func (m *Color) XXX_Size() int { + return xxx_messageInfo_Color.Size(m) +} +func (m *Color) XXX_DiscardUnknown() { + xxx_messageInfo_Color.DiscardUnknown(m) +} + +var xxx_messageInfo_Color proto.InternalMessageInfo + +func (m *Color) GetRed() float32 { + if m != nil { + return m.Red + } + return 0 +} + +func (m *Color) GetGreen() float32 { + if m != nil { + return m.Green + } + return 0 +} + +func (m *Color) GetBlue() float32 { + if m != nil { + return m.Blue + } + return 0 +} + +// Results of redacting an image. +type RedactImageResponse struct { + // The redacted image. The type will be the same as the original image. + RedactedImage []byte `protobuf:"bytes,1,opt,name=redacted_image,json=redactedImage,proto3" json:"redacted_image,omitempty"` + // If an image was being inspected and the InspectConfig's include_quote was + // set to true, then this field will include all text, if any, that was found + // in the image. + ExtractedText string `protobuf:"bytes,2,opt,name=extracted_text,json=extractedText,proto3" json:"extracted_text,omitempty"` + // The findings. Populated when include_findings in the request is true. + InspectResult *InspectResult `protobuf:"bytes,3,opt,name=inspect_result,json=inspectResult,proto3" json:"inspect_result,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *RedactImageResponse) Reset() { *m = RedactImageResponse{} } +func (m *RedactImageResponse) String() string { return proto.CompactTextString(m) } +func (*RedactImageResponse) ProtoMessage() {} +func (*RedactImageResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_dlp_1aebf9c18c267d70, []int{20} +} +func (m *RedactImageResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_RedactImageResponse.Unmarshal(m, b) +} +func (m *RedactImageResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_RedactImageResponse.Marshal(b, m, deterministic) +} +func (dst *RedactImageResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_RedactImageResponse.Merge(dst, src) +} +func (m *RedactImageResponse) XXX_Size() int { + return xxx_messageInfo_RedactImageResponse.Size(m) +} +func (m *RedactImageResponse) XXX_DiscardUnknown() { + xxx_messageInfo_RedactImageResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_RedactImageResponse proto.InternalMessageInfo + +func (m *RedactImageResponse) GetRedactedImage() []byte { + if m != nil { + return m.RedactedImage + } + return nil +} + +func (m *RedactImageResponse) GetExtractedText() string { + if m != nil { + return m.ExtractedText + } + return "" +} + +func (m *RedactImageResponse) GetInspectResult() *InspectResult { + if m != nil { + return m.InspectResult + } + return nil +} + +// Request to de-identify a list of items. +type DeidentifyContentRequest struct { + // The parent resource name, for example projects/my-project-id. + Parent string `protobuf:"bytes,1,opt,name=parent,proto3" json:"parent,omitempty"` + // Configuration for the de-identification of the content item. + // Items specified here will override the template referenced by the + // deidentify_template_name argument. + DeidentifyConfig *DeidentifyConfig `protobuf:"bytes,2,opt,name=deidentify_config,json=deidentifyConfig,proto3" json:"deidentify_config,omitempty"` + // Configuration for the inspector. + // Items specified here will override the template referenced by the + // inspect_template_name argument. + InspectConfig *InspectConfig `protobuf:"bytes,3,opt,name=inspect_config,json=inspectConfig,proto3" json:"inspect_config,omitempty"` + // The item to de-identify. Will be treated as text. + Item *ContentItem `protobuf:"bytes,4,opt,name=item,proto3" json:"item,omitempty"` + // Optional template to use. Any configuration directly specified in + // inspect_config will override those set in the template. Singular fields + // that are set in this request will replace their corresponding fields in the + // template. Repeated fields are appended. Singular sub-messages and groups + // are recursively merged. + InspectTemplateName string `protobuf:"bytes,5,opt,name=inspect_template_name,json=inspectTemplateName,proto3" json:"inspect_template_name,omitempty"` + // Optional template to use. Any configuration directly specified in + // deidentify_config will override those set in the template. Singular fields + // that are set in this request will replace their corresponding fields in the + // template. Repeated fields are appended. Singular sub-messages and groups + // are recursively merged. + DeidentifyTemplateName string `protobuf:"bytes,6,opt,name=deidentify_template_name,json=deidentifyTemplateName,proto3" json:"deidentify_template_name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *DeidentifyContentRequest) Reset() { *m = DeidentifyContentRequest{} } +func (m *DeidentifyContentRequest) String() string { return proto.CompactTextString(m) } +func (*DeidentifyContentRequest) ProtoMessage() {} +func (*DeidentifyContentRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_dlp_1aebf9c18c267d70, []int{21} +} +func (m *DeidentifyContentRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_DeidentifyContentRequest.Unmarshal(m, b) +} +func (m *DeidentifyContentRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_DeidentifyContentRequest.Marshal(b, m, deterministic) +} +func (dst *DeidentifyContentRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_DeidentifyContentRequest.Merge(dst, src) +} +func (m *DeidentifyContentRequest) XXX_Size() int { + return xxx_messageInfo_DeidentifyContentRequest.Size(m) +} +func (m *DeidentifyContentRequest) XXX_DiscardUnknown() { + xxx_messageInfo_DeidentifyContentRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_DeidentifyContentRequest proto.InternalMessageInfo + +func (m *DeidentifyContentRequest) GetParent() string { + if m != nil { + return m.Parent + } + return "" +} + +func (m *DeidentifyContentRequest) GetDeidentifyConfig() *DeidentifyConfig { + if m != nil { + return m.DeidentifyConfig + } + return nil +} + +func (m *DeidentifyContentRequest) GetInspectConfig() *InspectConfig { + if m != nil { + return m.InspectConfig + } + return nil +} + +func (m *DeidentifyContentRequest) GetItem() *ContentItem { + if m != nil { + return m.Item + } + return nil +} + +func (m *DeidentifyContentRequest) GetInspectTemplateName() string { + if m != nil { + return m.InspectTemplateName + } + return "" +} + +func (m *DeidentifyContentRequest) GetDeidentifyTemplateName() string { + if m != nil { + return m.DeidentifyTemplateName + } + return "" +} + +// Results of de-identifying a ContentItem. +type DeidentifyContentResponse struct { + // The de-identified item. + Item *ContentItem `protobuf:"bytes,1,opt,name=item,proto3" json:"item,omitempty"` + // An overview of the changes that were made on the `item`. + Overview *TransformationOverview `protobuf:"bytes,2,opt,name=overview,proto3" json:"overview,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *DeidentifyContentResponse) Reset() { *m = DeidentifyContentResponse{} } +func (m *DeidentifyContentResponse) String() string { return proto.CompactTextString(m) } +func (*DeidentifyContentResponse) ProtoMessage() {} +func (*DeidentifyContentResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_dlp_1aebf9c18c267d70, []int{22} +} +func (m *DeidentifyContentResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_DeidentifyContentResponse.Unmarshal(m, b) +} +func (m *DeidentifyContentResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_DeidentifyContentResponse.Marshal(b, m, deterministic) +} +func (dst *DeidentifyContentResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_DeidentifyContentResponse.Merge(dst, src) +} +func (m *DeidentifyContentResponse) XXX_Size() int { + return xxx_messageInfo_DeidentifyContentResponse.Size(m) +} +func (m *DeidentifyContentResponse) XXX_DiscardUnknown() { + xxx_messageInfo_DeidentifyContentResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_DeidentifyContentResponse proto.InternalMessageInfo + +func (m *DeidentifyContentResponse) GetItem() *ContentItem { + if m != nil { + return m.Item + } + return nil +} + +func (m *DeidentifyContentResponse) GetOverview() *TransformationOverview { + if m != nil { + return m.Overview + } + return nil +} + +// Request to re-identify an item. +type ReidentifyContentRequest struct { + // The parent resource name. + Parent string `protobuf:"bytes,1,opt,name=parent,proto3" json:"parent,omitempty"` + // Configuration for the re-identification of the content item. + // This field shares the same proto message type that is used for + // de-identification, however its usage here is for the reversal of the + // previous de-identification. Re-identification is performed by examining + // the transformations used to de-identify the items and executing the + // reverse. This requires that only reversible transformations + // be provided here. The reversible transformations are: + // + // - `CryptoReplaceFfxFpeConfig` + ReidentifyConfig *DeidentifyConfig `protobuf:"bytes,2,opt,name=reidentify_config,json=reidentifyConfig,proto3" json:"reidentify_config,omitempty"` + // Configuration for the inspector. + InspectConfig *InspectConfig `protobuf:"bytes,3,opt,name=inspect_config,json=inspectConfig,proto3" json:"inspect_config,omitempty"` + // The item to re-identify. Will be treated as text. + Item *ContentItem `protobuf:"bytes,4,opt,name=item,proto3" json:"item,omitempty"` + // Optional template to use. Any configuration directly specified in + // `inspect_config` will override those set in the template. Singular fields + // that are set in this request will replace their corresponding fields in the + // template. Repeated fields are appended. Singular sub-messages and groups + // are recursively merged. + InspectTemplateName string `protobuf:"bytes,5,opt,name=inspect_template_name,json=inspectTemplateName,proto3" json:"inspect_template_name,omitempty"` + // Optional template to use. References an instance of `DeidentifyTemplate`. + // Any configuration directly specified in `reidentify_config` or + // `inspect_config` will override those set in the template. Singular fields + // that are set in this request will replace their corresponding fields in the + // template. Repeated fields are appended. Singular sub-messages and groups + // are recursively merged. + ReidentifyTemplateName string `protobuf:"bytes,6,opt,name=reidentify_template_name,json=reidentifyTemplateName,proto3" json:"reidentify_template_name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ReidentifyContentRequest) Reset() { *m = ReidentifyContentRequest{} } +func (m *ReidentifyContentRequest) String() string { return proto.CompactTextString(m) } +func (*ReidentifyContentRequest) ProtoMessage() {} +func (*ReidentifyContentRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_dlp_1aebf9c18c267d70, []int{23} +} +func (m *ReidentifyContentRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ReidentifyContentRequest.Unmarshal(m, b) +} +func (m *ReidentifyContentRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ReidentifyContentRequest.Marshal(b, m, deterministic) +} +func (dst *ReidentifyContentRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_ReidentifyContentRequest.Merge(dst, src) +} +func (m *ReidentifyContentRequest) XXX_Size() int { + return xxx_messageInfo_ReidentifyContentRequest.Size(m) +} +func (m *ReidentifyContentRequest) XXX_DiscardUnknown() { + xxx_messageInfo_ReidentifyContentRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_ReidentifyContentRequest proto.InternalMessageInfo + +func (m *ReidentifyContentRequest) GetParent() string { + if m != nil { + return m.Parent + } + return "" +} + +func (m *ReidentifyContentRequest) GetReidentifyConfig() *DeidentifyConfig { + if m != nil { + return m.ReidentifyConfig + } + return nil +} + +func (m *ReidentifyContentRequest) GetInspectConfig() *InspectConfig { + if m != nil { + return m.InspectConfig + } + return nil +} + +func (m *ReidentifyContentRequest) GetItem() *ContentItem { + if m != nil { + return m.Item + } + return nil +} + +func (m *ReidentifyContentRequest) GetInspectTemplateName() string { + if m != nil { + return m.InspectTemplateName + } + return "" +} + +func (m *ReidentifyContentRequest) GetReidentifyTemplateName() string { + if m != nil { + return m.ReidentifyTemplateName + } + return "" +} + +// Results of re-identifying a item. +type ReidentifyContentResponse struct { + // The re-identified item. + Item *ContentItem `protobuf:"bytes,1,opt,name=item,proto3" json:"item,omitempty"` + // An overview of the changes that were made to the `item`. + Overview *TransformationOverview `protobuf:"bytes,2,opt,name=overview,proto3" json:"overview,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ReidentifyContentResponse) Reset() { *m = ReidentifyContentResponse{} } +func (m *ReidentifyContentResponse) String() string { return proto.CompactTextString(m) } +func (*ReidentifyContentResponse) ProtoMessage() {} +func (*ReidentifyContentResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_dlp_1aebf9c18c267d70, []int{24} +} +func (m *ReidentifyContentResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ReidentifyContentResponse.Unmarshal(m, b) +} +func (m *ReidentifyContentResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ReidentifyContentResponse.Marshal(b, m, deterministic) +} +func (dst *ReidentifyContentResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_ReidentifyContentResponse.Merge(dst, src) +} +func (m *ReidentifyContentResponse) XXX_Size() int { + return xxx_messageInfo_ReidentifyContentResponse.Size(m) +} +func (m *ReidentifyContentResponse) XXX_DiscardUnknown() { + xxx_messageInfo_ReidentifyContentResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_ReidentifyContentResponse proto.InternalMessageInfo + +func (m *ReidentifyContentResponse) GetItem() *ContentItem { + if m != nil { + return m.Item + } + return nil +} + +func (m *ReidentifyContentResponse) GetOverview() *TransformationOverview { + if m != nil { + return m.Overview + } + return nil +} + +// Request to search for potentially sensitive info in a ContentItem. +type InspectContentRequest struct { + // The parent resource name, for example projects/my-project-id. + Parent string `protobuf:"bytes,1,opt,name=parent,proto3" json:"parent,omitempty"` + // Configuration for the inspector. What specified here will override + // the template referenced by the inspect_template_name argument. + InspectConfig *InspectConfig `protobuf:"bytes,2,opt,name=inspect_config,json=inspectConfig,proto3" json:"inspect_config,omitempty"` + // The item to inspect. + Item *ContentItem `protobuf:"bytes,3,opt,name=item,proto3" json:"item,omitempty"` + // Optional template to use. Any configuration directly specified in + // inspect_config will override those set in the template. Singular fields + // that are set in this request will replace their corresponding fields in the + // template. Repeated fields are appended. Singular sub-messages and groups + // are recursively merged. + InspectTemplateName string `protobuf:"bytes,4,opt,name=inspect_template_name,json=inspectTemplateName,proto3" json:"inspect_template_name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *InspectContentRequest) Reset() { *m = InspectContentRequest{} } +func (m *InspectContentRequest) String() string { return proto.CompactTextString(m) } +func (*InspectContentRequest) ProtoMessage() {} +func (*InspectContentRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_dlp_1aebf9c18c267d70, []int{25} +} +func (m *InspectContentRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_InspectContentRequest.Unmarshal(m, b) +} +func (m *InspectContentRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_InspectContentRequest.Marshal(b, m, deterministic) +} +func (dst *InspectContentRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_InspectContentRequest.Merge(dst, src) +} +func (m *InspectContentRequest) XXX_Size() int { + return xxx_messageInfo_InspectContentRequest.Size(m) +} +func (m *InspectContentRequest) XXX_DiscardUnknown() { + xxx_messageInfo_InspectContentRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_InspectContentRequest proto.InternalMessageInfo + +func (m *InspectContentRequest) GetParent() string { + if m != nil { + return m.Parent + } + return "" +} + +func (m *InspectContentRequest) GetInspectConfig() *InspectConfig { + if m != nil { + return m.InspectConfig + } + return nil +} + +func (m *InspectContentRequest) GetItem() *ContentItem { + if m != nil { + return m.Item + } + return nil +} + +func (m *InspectContentRequest) GetInspectTemplateName() string { + if m != nil { + return m.InspectTemplateName + } + return "" +} + +// Results of inspecting an item. +type InspectContentResponse struct { + // The findings. + Result *InspectResult `protobuf:"bytes,1,opt,name=result,proto3" json:"result,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *InspectContentResponse) Reset() { *m = InspectContentResponse{} } +func (m *InspectContentResponse) String() string { return proto.CompactTextString(m) } +func (*InspectContentResponse) ProtoMessage() {} +func (*InspectContentResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_dlp_1aebf9c18c267d70, []int{26} +} +func (m *InspectContentResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_InspectContentResponse.Unmarshal(m, b) +} +func (m *InspectContentResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_InspectContentResponse.Marshal(b, m, deterministic) +} +func (dst *InspectContentResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_InspectContentResponse.Merge(dst, src) +} +func (m *InspectContentResponse) XXX_Size() int { + return xxx_messageInfo_InspectContentResponse.Size(m) +} +func (m *InspectContentResponse) XXX_DiscardUnknown() { + xxx_messageInfo_InspectContentResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_InspectContentResponse proto.InternalMessageInfo + +func (m *InspectContentResponse) GetResult() *InspectResult { + if m != nil { + return m.Result + } + return nil +} + +// Cloud repository for storing output. +type OutputStorageConfig struct { + // Types that are valid to be assigned to Type: + // *OutputStorageConfig_Table + Type isOutputStorageConfig_Type `protobuf_oneof:"type"` + // Schema used for writing the findings for Inspect jobs. This field is only + // used for Inspect and must be unspecified for Risk jobs. Columns are derived + // from the `Finding` object. If appending to an existing table, any columns + // from the predefined schema that are missing will be added. No columns in + // the existing table will be deleted. + // + // If unspecified, then all available columns will be used for a new table or + // an (existing) table with no schema, and no changes will be made to an + // existing table that has a schema. + OutputSchema OutputStorageConfig_OutputSchema `protobuf:"varint,3,opt,name=output_schema,json=outputSchema,proto3,enum=google.privacy.dlp.v2.OutputStorageConfig_OutputSchema" json:"output_schema,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *OutputStorageConfig) Reset() { *m = OutputStorageConfig{} } +func (m *OutputStorageConfig) String() string { return proto.CompactTextString(m) } +func (*OutputStorageConfig) ProtoMessage() {} +func (*OutputStorageConfig) Descriptor() ([]byte, []int) { + return fileDescriptor_dlp_1aebf9c18c267d70, []int{27} +} +func (m *OutputStorageConfig) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_OutputStorageConfig.Unmarshal(m, b) +} +func (m *OutputStorageConfig) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_OutputStorageConfig.Marshal(b, m, deterministic) +} +func (dst *OutputStorageConfig) XXX_Merge(src proto.Message) { + xxx_messageInfo_OutputStorageConfig.Merge(dst, src) +} +func (m *OutputStorageConfig) XXX_Size() int { + return xxx_messageInfo_OutputStorageConfig.Size(m) +} +func (m *OutputStorageConfig) XXX_DiscardUnknown() { + xxx_messageInfo_OutputStorageConfig.DiscardUnknown(m) +} + +var xxx_messageInfo_OutputStorageConfig proto.InternalMessageInfo + +type isOutputStorageConfig_Type interface { + isOutputStorageConfig_Type() +} + +type OutputStorageConfig_Table struct { + Table *BigQueryTable `protobuf:"bytes,1,opt,name=table,proto3,oneof"` +} + +func (*OutputStorageConfig_Table) isOutputStorageConfig_Type() {} + +func (m *OutputStorageConfig) GetType() isOutputStorageConfig_Type { + if m != nil { + return m.Type + } + return nil +} + +func (m *OutputStorageConfig) GetTable() *BigQueryTable { + if x, ok := m.GetType().(*OutputStorageConfig_Table); ok { + return x.Table + } + return nil +} + +func (m *OutputStorageConfig) GetOutputSchema() OutputStorageConfig_OutputSchema { + if m != nil { + return m.OutputSchema + } + return OutputStorageConfig_OUTPUT_SCHEMA_UNSPECIFIED +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*OutputStorageConfig) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _OutputStorageConfig_OneofMarshaler, _OutputStorageConfig_OneofUnmarshaler, _OutputStorageConfig_OneofSizer, []interface{}{ + (*OutputStorageConfig_Table)(nil), + } +} + +func _OutputStorageConfig_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*OutputStorageConfig) + // type + switch x := m.Type.(type) { + case *OutputStorageConfig_Table: + b.EncodeVarint(1<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.Table); err != nil { + return err + } + case nil: + default: + return fmt.Errorf("OutputStorageConfig.Type has unexpected type %T", x) + } + return nil +} + +func _OutputStorageConfig_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*OutputStorageConfig) + switch tag { + case 1: // type.table + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(BigQueryTable) + err := b.DecodeMessage(msg) + m.Type = &OutputStorageConfig_Table{msg} + return true, err + default: + return false, nil + } +} + +func _OutputStorageConfig_OneofSizer(msg proto.Message) (n int) { + m := msg.(*OutputStorageConfig) + // type + switch x := m.Type.(type) { + case *OutputStorageConfig_Table: + s := proto.Size(x.Table) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +// Statistics regarding a specific InfoType. +type InfoTypeStats struct { + // The type of finding this stat is for. + InfoType *InfoType `protobuf:"bytes,1,opt,name=info_type,json=infoType,proto3" json:"info_type,omitempty"` + // Number of findings for this infoType. + Count int64 `protobuf:"varint,2,opt,name=count,proto3" json:"count,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *InfoTypeStats) Reset() { *m = InfoTypeStats{} } +func (m *InfoTypeStats) String() string { return proto.CompactTextString(m) } +func (*InfoTypeStats) ProtoMessage() {} +func (*InfoTypeStats) Descriptor() ([]byte, []int) { + return fileDescriptor_dlp_1aebf9c18c267d70, []int{28} +} +func (m *InfoTypeStats) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_InfoTypeStats.Unmarshal(m, b) +} +func (m *InfoTypeStats) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_InfoTypeStats.Marshal(b, m, deterministic) +} +func (dst *InfoTypeStats) XXX_Merge(src proto.Message) { + xxx_messageInfo_InfoTypeStats.Merge(dst, src) +} +func (m *InfoTypeStats) XXX_Size() int { + return xxx_messageInfo_InfoTypeStats.Size(m) +} +func (m *InfoTypeStats) XXX_DiscardUnknown() { + xxx_messageInfo_InfoTypeStats.DiscardUnknown(m) +} + +var xxx_messageInfo_InfoTypeStats proto.InternalMessageInfo + +func (m *InfoTypeStats) GetInfoType() *InfoType { + if m != nil { + return m.InfoType + } + return nil +} + +func (m *InfoTypeStats) GetCount() int64 { + if m != nil { + return m.Count + } + return 0 +} + +// The results of an inspect DataSource job. +type InspectDataSourceDetails struct { + // The configuration used for this job. + RequestedOptions *InspectDataSourceDetails_RequestedOptions `protobuf:"bytes,2,opt,name=requested_options,json=requestedOptions,proto3" json:"requested_options,omitempty"` + // A summary of the outcome of this inspect job. + Result *InspectDataSourceDetails_Result `protobuf:"bytes,3,opt,name=result,proto3" json:"result,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *InspectDataSourceDetails) Reset() { *m = InspectDataSourceDetails{} } +func (m *InspectDataSourceDetails) String() string { return proto.CompactTextString(m) } +func (*InspectDataSourceDetails) ProtoMessage() {} +func (*InspectDataSourceDetails) Descriptor() ([]byte, []int) { + return fileDescriptor_dlp_1aebf9c18c267d70, []int{29} +} +func (m *InspectDataSourceDetails) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_InspectDataSourceDetails.Unmarshal(m, b) +} +func (m *InspectDataSourceDetails) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_InspectDataSourceDetails.Marshal(b, m, deterministic) +} +func (dst *InspectDataSourceDetails) XXX_Merge(src proto.Message) { + xxx_messageInfo_InspectDataSourceDetails.Merge(dst, src) +} +func (m *InspectDataSourceDetails) XXX_Size() int { + return xxx_messageInfo_InspectDataSourceDetails.Size(m) +} +func (m *InspectDataSourceDetails) XXX_DiscardUnknown() { + xxx_messageInfo_InspectDataSourceDetails.DiscardUnknown(m) +} + +var xxx_messageInfo_InspectDataSourceDetails proto.InternalMessageInfo + +func (m *InspectDataSourceDetails) GetRequestedOptions() *InspectDataSourceDetails_RequestedOptions { + if m != nil { + return m.RequestedOptions + } + return nil +} + +func (m *InspectDataSourceDetails) GetResult() *InspectDataSourceDetails_Result { + if m != nil { + return m.Result + } + return nil +} + +type InspectDataSourceDetails_RequestedOptions struct { + // If run with an InspectTemplate, a snapshot of its state at the time of + // this run. + SnapshotInspectTemplate *InspectTemplate `protobuf:"bytes,1,opt,name=snapshot_inspect_template,json=snapshotInspectTemplate,proto3" json:"snapshot_inspect_template,omitempty"` + JobConfig *InspectJobConfig `protobuf:"bytes,3,opt,name=job_config,json=jobConfig,proto3" json:"job_config,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *InspectDataSourceDetails_RequestedOptions) Reset() { + *m = InspectDataSourceDetails_RequestedOptions{} +} +func (m *InspectDataSourceDetails_RequestedOptions) String() string { return proto.CompactTextString(m) } +func (*InspectDataSourceDetails_RequestedOptions) ProtoMessage() {} +func (*InspectDataSourceDetails_RequestedOptions) Descriptor() ([]byte, []int) { + return fileDescriptor_dlp_1aebf9c18c267d70, []int{29, 0} +} +func (m *InspectDataSourceDetails_RequestedOptions) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_InspectDataSourceDetails_RequestedOptions.Unmarshal(m, b) +} +func (m *InspectDataSourceDetails_RequestedOptions) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_InspectDataSourceDetails_RequestedOptions.Marshal(b, m, deterministic) +} +func (dst *InspectDataSourceDetails_RequestedOptions) XXX_Merge(src proto.Message) { + xxx_messageInfo_InspectDataSourceDetails_RequestedOptions.Merge(dst, src) +} +func (m *InspectDataSourceDetails_RequestedOptions) XXX_Size() int { + return xxx_messageInfo_InspectDataSourceDetails_RequestedOptions.Size(m) +} +func (m *InspectDataSourceDetails_RequestedOptions) XXX_DiscardUnknown() { + xxx_messageInfo_InspectDataSourceDetails_RequestedOptions.DiscardUnknown(m) +} + +var xxx_messageInfo_InspectDataSourceDetails_RequestedOptions proto.InternalMessageInfo + +func (m *InspectDataSourceDetails_RequestedOptions) GetSnapshotInspectTemplate() *InspectTemplate { + if m != nil { + return m.SnapshotInspectTemplate + } + return nil +} + +func (m *InspectDataSourceDetails_RequestedOptions) GetJobConfig() *InspectJobConfig { + if m != nil { + return m.JobConfig + } + return nil +} + +// All result fields mentioned below are updated while the job is processing. +type InspectDataSourceDetails_Result struct { + // Total size in bytes that were processed. + ProcessedBytes int64 `protobuf:"varint,1,opt,name=processed_bytes,json=processedBytes,proto3" json:"processed_bytes,omitempty"` + // Estimate of the number of bytes to process. + TotalEstimatedBytes int64 `protobuf:"varint,2,opt,name=total_estimated_bytes,json=totalEstimatedBytes,proto3" json:"total_estimated_bytes,omitempty"` + // Statistics of how many instances of each info type were found during + // inspect job. + InfoTypeStats []*InfoTypeStats `protobuf:"bytes,3,rep,name=info_type_stats,json=infoTypeStats,proto3" json:"info_type_stats,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *InspectDataSourceDetails_Result) Reset() { *m = InspectDataSourceDetails_Result{} } +func (m *InspectDataSourceDetails_Result) String() string { return proto.CompactTextString(m) } +func (*InspectDataSourceDetails_Result) ProtoMessage() {} +func (*InspectDataSourceDetails_Result) Descriptor() ([]byte, []int) { + return fileDescriptor_dlp_1aebf9c18c267d70, []int{29, 1} +} +func (m *InspectDataSourceDetails_Result) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_InspectDataSourceDetails_Result.Unmarshal(m, b) +} +func (m *InspectDataSourceDetails_Result) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_InspectDataSourceDetails_Result.Marshal(b, m, deterministic) +} +func (dst *InspectDataSourceDetails_Result) XXX_Merge(src proto.Message) { + xxx_messageInfo_InspectDataSourceDetails_Result.Merge(dst, src) +} +func (m *InspectDataSourceDetails_Result) XXX_Size() int { + return xxx_messageInfo_InspectDataSourceDetails_Result.Size(m) +} +func (m *InspectDataSourceDetails_Result) XXX_DiscardUnknown() { + xxx_messageInfo_InspectDataSourceDetails_Result.DiscardUnknown(m) +} + +var xxx_messageInfo_InspectDataSourceDetails_Result proto.InternalMessageInfo + +func (m *InspectDataSourceDetails_Result) GetProcessedBytes() int64 { + if m != nil { + return m.ProcessedBytes + } + return 0 +} + +func (m *InspectDataSourceDetails_Result) GetTotalEstimatedBytes() int64 { + if m != nil { + return m.TotalEstimatedBytes + } + return 0 +} + +func (m *InspectDataSourceDetails_Result) GetInfoTypeStats() []*InfoTypeStats { + if m != nil { + return m.InfoTypeStats + } + return nil +} + +// InfoType description. +type InfoTypeDescription struct { + // Internal name of the infoType. + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // Human readable form of the infoType name. + DisplayName string `protobuf:"bytes,2,opt,name=display_name,json=displayName,proto3" json:"display_name,omitempty"` + // Which parts of the API supports this InfoType. + SupportedBy []InfoTypeSupportedBy `protobuf:"varint,3,rep,packed,name=supported_by,json=supportedBy,proto3,enum=google.privacy.dlp.v2.InfoTypeSupportedBy" json:"supported_by,omitempty"` + // Description of the infotype. Translated when language is provided in the + // request. + Description string `protobuf:"bytes,4,opt,name=description,proto3" json:"description,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *InfoTypeDescription) Reset() { *m = InfoTypeDescription{} } +func (m *InfoTypeDescription) String() string { return proto.CompactTextString(m) } +func (*InfoTypeDescription) ProtoMessage() {} +func (*InfoTypeDescription) Descriptor() ([]byte, []int) { + return fileDescriptor_dlp_1aebf9c18c267d70, []int{30} +} +func (m *InfoTypeDescription) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_InfoTypeDescription.Unmarshal(m, b) +} +func (m *InfoTypeDescription) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_InfoTypeDescription.Marshal(b, m, deterministic) +} +func (dst *InfoTypeDescription) XXX_Merge(src proto.Message) { + xxx_messageInfo_InfoTypeDescription.Merge(dst, src) +} +func (m *InfoTypeDescription) XXX_Size() int { + return xxx_messageInfo_InfoTypeDescription.Size(m) +} +func (m *InfoTypeDescription) XXX_DiscardUnknown() { + xxx_messageInfo_InfoTypeDescription.DiscardUnknown(m) +} + +var xxx_messageInfo_InfoTypeDescription proto.InternalMessageInfo + +func (m *InfoTypeDescription) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *InfoTypeDescription) GetDisplayName() string { + if m != nil { + return m.DisplayName + } + return "" +} + +func (m *InfoTypeDescription) GetSupportedBy() []InfoTypeSupportedBy { + if m != nil { + return m.SupportedBy + } + return nil +} + +func (m *InfoTypeDescription) GetDescription() string { + if m != nil { + return m.Description + } + return "" +} + +// Request for the list of infoTypes. +type ListInfoTypesRequest struct { + // Optional BCP-47 language code for localized infoType friendly + // names. If omitted, or if localized strings are not available, + // en-US strings will be returned. + LanguageCode string `protobuf:"bytes,1,opt,name=language_code,json=languageCode,proto3" json:"language_code,omitempty"` + // Optional filter to only return infoTypes supported by certain parts of the + // API. Defaults to supported_by=INSPECT. + Filter string `protobuf:"bytes,2,opt,name=filter,proto3" json:"filter,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ListInfoTypesRequest) Reset() { *m = ListInfoTypesRequest{} } +func (m *ListInfoTypesRequest) String() string { return proto.CompactTextString(m) } +func (*ListInfoTypesRequest) ProtoMessage() {} +func (*ListInfoTypesRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_dlp_1aebf9c18c267d70, []int{31} +} +func (m *ListInfoTypesRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ListInfoTypesRequest.Unmarshal(m, b) +} +func (m *ListInfoTypesRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ListInfoTypesRequest.Marshal(b, m, deterministic) +} +func (dst *ListInfoTypesRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_ListInfoTypesRequest.Merge(dst, src) +} +func (m *ListInfoTypesRequest) XXX_Size() int { + return xxx_messageInfo_ListInfoTypesRequest.Size(m) +} +func (m *ListInfoTypesRequest) XXX_DiscardUnknown() { + xxx_messageInfo_ListInfoTypesRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_ListInfoTypesRequest proto.InternalMessageInfo + +func (m *ListInfoTypesRequest) GetLanguageCode() string { + if m != nil { + return m.LanguageCode + } + return "" +} + +func (m *ListInfoTypesRequest) GetFilter() string { + if m != nil { + return m.Filter + } + return "" +} + +// Response to the ListInfoTypes request. +type ListInfoTypesResponse struct { + // Set of sensitive infoTypes. + InfoTypes []*InfoTypeDescription `protobuf:"bytes,1,rep,name=info_types,json=infoTypes,proto3" json:"info_types,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ListInfoTypesResponse) Reset() { *m = ListInfoTypesResponse{} } +func (m *ListInfoTypesResponse) String() string { return proto.CompactTextString(m) } +func (*ListInfoTypesResponse) ProtoMessage() {} +func (*ListInfoTypesResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_dlp_1aebf9c18c267d70, []int{32} +} +func (m *ListInfoTypesResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ListInfoTypesResponse.Unmarshal(m, b) +} +func (m *ListInfoTypesResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ListInfoTypesResponse.Marshal(b, m, deterministic) +} +func (dst *ListInfoTypesResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_ListInfoTypesResponse.Merge(dst, src) +} +func (m *ListInfoTypesResponse) XXX_Size() int { + return xxx_messageInfo_ListInfoTypesResponse.Size(m) +} +func (m *ListInfoTypesResponse) XXX_DiscardUnknown() { + xxx_messageInfo_ListInfoTypesResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_ListInfoTypesResponse proto.InternalMessageInfo + +func (m *ListInfoTypesResponse) GetInfoTypes() []*InfoTypeDescription { + if m != nil { + return m.InfoTypes + } + return nil +} + +// Configuration for a risk analysis job. See +// https://cloud.google.com/dlp/docs/concepts-risk-analysis to learn more. +type RiskAnalysisJobConfig struct { + // Privacy metric to compute. + PrivacyMetric *PrivacyMetric `protobuf:"bytes,1,opt,name=privacy_metric,json=privacyMetric,proto3" json:"privacy_metric,omitempty"` + // Input dataset to compute metrics over. + SourceTable *BigQueryTable `protobuf:"bytes,2,opt,name=source_table,json=sourceTable,proto3" json:"source_table,omitempty"` + // Actions to execute at the completion of the job. Are executed in the order + // provided. + Actions []*Action `protobuf:"bytes,3,rep,name=actions,proto3" json:"actions,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *RiskAnalysisJobConfig) Reset() { *m = RiskAnalysisJobConfig{} } +func (m *RiskAnalysisJobConfig) String() string { return proto.CompactTextString(m) } +func (*RiskAnalysisJobConfig) ProtoMessage() {} +func (*RiskAnalysisJobConfig) Descriptor() ([]byte, []int) { + return fileDescriptor_dlp_1aebf9c18c267d70, []int{33} +} +func (m *RiskAnalysisJobConfig) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_RiskAnalysisJobConfig.Unmarshal(m, b) +} +func (m *RiskAnalysisJobConfig) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_RiskAnalysisJobConfig.Marshal(b, m, deterministic) +} +func (dst *RiskAnalysisJobConfig) XXX_Merge(src proto.Message) { + xxx_messageInfo_RiskAnalysisJobConfig.Merge(dst, src) +} +func (m *RiskAnalysisJobConfig) XXX_Size() int { + return xxx_messageInfo_RiskAnalysisJobConfig.Size(m) +} +func (m *RiskAnalysisJobConfig) XXX_DiscardUnknown() { + xxx_messageInfo_RiskAnalysisJobConfig.DiscardUnknown(m) +} + +var xxx_messageInfo_RiskAnalysisJobConfig proto.InternalMessageInfo + +func (m *RiskAnalysisJobConfig) GetPrivacyMetric() *PrivacyMetric { + if m != nil { + return m.PrivacyMetric + } + return nil +} + +func (m *RiskAnalysisJobConfig) GetSourceTable() *BigQueryTable { + if m != nil { + return m.SourceTable + } + return nil +} + +func (m *RiskAnalysisJobConfig) GetActions() []*Action { + if m != nil { + return m.Actions + } + return nil +} + +// A column with a semantic tag attached. +type QuasiId struct { + // Identifies the column. [required] + Field *FieldId `protobuf:"bytes,1,opt,name=field,proto3" json:"field,omitempty"` + // Semantic tag that identifies what a column contains, to determine which + // statistical model to use to estimate the reidentifiability of each + // value. [required] + // + // Types that are valid to be assigned to Tag: + // *QuasiId_InfoType + // *QuasiId_CustomTag + // *QuasiId_Inferred + Tag isQuasiId_Tag `protobuf_oneof:"tag"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *QuasiId) Reset() { *m = QuasiId{} } +func (m *QuasiId) String() string { return proto.CompactTextString(m) } +func (*QuasiId) ProtoMessage() {} +func (*QuasiId) Descriptor() ([]byte, []int) { + return fileDescriptor_dlp_1aebf9c18c267d70, []int{34} +} +func (m *QuasiId) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_QuasiId.Unmarshal(m, b) +} +func (m *QuasiId) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_QuasiId.Marshal(b, m, deterministic) +} +func (dst *QuasiId) XXX_Merge(src proto.Message) { + xxx_messageInfo_QuasiId.Merge(dst, src) +} +func (m *QuasiId) XXX_Size() int { + return xxx_messageInfo_QuasiId.Size(m) +} +func (m *QuasiId) XXX_DiscardUnknown() { + xxx_messageInfo_QuasiId.DiscardUnknown(m) +} + +var xxx_messageInfo_QuasiId proto.InternalMessageInfo + +func (m *QuasiId) GetField() *FieldId { + if m != nil { + return m.Field + } + return nil +} + +type isQuasiId_Tag interface { + isQuasiId_Tag() +} + +type QuasiId_InfoType struct { + InfoType *InfoType `protobuf:"bytes,2,opt,name=info_type,json=infoType,proto3,oneof"` +} + +type QuasiId_CustomTag struct { + CustomTag string `protobuf:"bytes,3,opt,name=custom_tag,json=customTag,proto3,oneof"` +} + +type QuasiId_Inferred struct { + Inferred *empty.Empty `protobuf:"bytes,4,opt,name=inferred,proto3,oneof"` +} + +func (*QuasiId_InfoType) isQuasiId_Tag() {} + +func (*QuasiId_CustomTag) isQuasiId_Tag() {} + +func (*QuasiId_Inferred) isQuasiId_Tag() {} + +func (m *QuasiId) GetTag() isQuasiId_Tag { + if m != nil { + return m.Tag + } + return nil +} + +func (m *QuasiId) GetInfoType() *InfoType { + if x, ok := m.GetTag().(*QuasiId_InfoType); ok { + return x.InfoType + } + return nil +} + +func (m *QuasiId) GetCustomTag() string { + if x, ok := m.GetTag().(*QuasiId_CustomTag); ok { + return x.CustomTag + } + return "" +} + +func (m *QuasiId) GetInferred() *empty.Empty { + if x, ok := m.GetTag().(*QuasiId_Inferred); ok { + return x.Inferred + } + return nil +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*QuasiId) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _QuasiId_OneofMarshaler, _QuasiId_OneofUnmarshaler, _QuasiId_OneofSizer, []interface{}{ + (*QuasiId_InfoType)(nil), + (*QuasiId_CustomTag)(nil), + (*QuasiId_Inferred)(nil), + } +} + +func _QuasiId_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*QuasiId) + // tag + switch x := m.Tag.(type) { + case *QuasiId_InfoType: + b.EncodeVarint(2<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.InfoType); err != nil { + return err + } + case *QuasiId_CustomTag: + b.EncodeVarint(3<<3 | proto.WireBytes) + b.EncodeStringBytes(x.CustomTag) + case *QuasiId_Inferred: + b.EncodeVarint(4<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.Inferred); err != nil { + return err + } + case nil: + default: + return fmt.Errorf("QuasiId.Tag has unexpected type %T", x) + } + return nil +} + +func _QuasiId_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*QuasiId) + switch tag { + case 2: // tag.info_type + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(InfoType) + err := b.DecodeMessage(msg) + m.Tag = &QuasiId_InfoType{msg} + return true, err + case 3: // tag.custom_tag + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeStringBytes() + m.Tag = &QuasiId_CustomTag{x} + return true, err + case 4: // tag.inferred + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(empty.Empty) + err := b.DecodeMessage(msg) + m.Tag = &QuasiId_Inferred{msg} + return true, err + default: + return false, nil + } +} + +func _QuasiId_OneofSizer(msg proto.Message) (n int) { + m := msg.(*QuasiId) + // tag + switch x := m.Tag.(type) { + case *QuasiId_InfoType: + s := proto.Size(x.InfoType) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *QuasiId_CustomTag: + n += 1 // tag and wire + n += proto.SizeVarint(uint64(len(x.CustomTag))) + n += len(x.CustomTag) + case *QuasiId_Inferred: + s := proto.Size(x.Inferred) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +// An auxiliary table containing statistical information on the relative +// frequency of different quasi-identifiers values. It has one or several +// quasi-identifiers columns, and one column that indicates the relative +// frequency of each quasi-identifier tuple. +// If a tuple is present in the data but not in the auxiliary table, the +// corresponding relative frequency is assumed to be zero (and thus, the +// tuple is highly reidentifiable). +type StatisticalTable struct { + // Auxiliary table location. [required] + Table *BigQueryTable `protobuf:"bytes,3,opt,name=table,proto3" json:"table,omitempty"` + // Quasi-identifier columns. [required] + QuasiIds []*StatisticalTable_QuasiIdentifierField `protobuf:"bytes,1,rep,name=quasi_ids,json=quasiIds,proto3" json:"quasi_ids,omitempty"` + // The relative frequency column must contain a floating-point number + // between 0 and 1 (inclusive). Null values are assumed to be zero. + // [required] + RelativeFrequency *FieldId `protobuf:"bytes,2,opt,name=relative_frequency,json=relativeFrequency,proto3" json:"relative_frequency,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *StatisticalTable) Reset() { *m = StatisticalTable{} } +func (m *StatisticalTable) String() string { return proto.CompactTextString(m) } +func (*StatisticalTable) ProtoMessage() {} +func (*StatisticalTable) Descriptor() ([]byte, []int) { + return fileDescriptor_dlp_1aebf9c18c267d70, []int{35} +} +func (m *StatisticalTable) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_StatisticalTable.Unmarshal(m, b) +} +func (m *StatisticalTable) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_StatisticalTable.Marshal(b, m, deterministic) +} +func (dst *StatisticalTable) XXX_Merge(src proto.Message) { + xxx_messageInfo_StatisticalTable.Merge(dst, src) +} +func (m *StatisticalTable) XXX_Size() int { + return xxx_messageInfo_StatisticalTable.Size(m) +} +func (m *StatisticalTable) XXX_DiscardUnknown() { + xxx_messageInfo_StatisticalTable.DiscardUnknown(m) +} + +var xxx_messageInfo_StatisticalTable proto.InternalMessageInfo + +func (m *StatisticalTable) GetTable() *BigQueryTable { + if m != nil { + return m.Table + } + return nil +} + +func (m *StatisticalTable) GetQuasiIds() []*StatisticalTable_QuasiIdentifierField { + if m != nil { + return m.QuasiIds + } + return nil +} + +func (m *StatisticalTable) GetRelativeFrequency() *FieldId { + if m != nil { + return m.RelativeFrequency + } + return nil +} + +// A quasi-identifier column has a custom_tag, used to know which column +// in the data corresponds to which column in the statistical model. +type StatisticalTable_QuasiIdentifierField struct { + Field *FieldId `protobuf:"bytes,1,opt,name=field,proto3" json:"field,omitempty"` + CustomTag string `protobuf:"bytes,2,opt,name=custom_tag,json=customTag,proto3" json:"custom_tag,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *StatisticalTable_QuasiIdentifierField) Reset() { *m = StatisticalTable_QuasiIdentifierField{} } +func (m *StatisticalTable_QuasiIdentifierField) String() string { return proto.CompactTextString(m) } +func (*StatisticalTable_QuasiIdentifierField) ProtoMessage() {} +func (*StatisticalTable_QuasiIdentifierField) Descriptor() ([]byte, []int) { + return fileDescriptor_dlp_1aebf9c18c267d70, []int{35, 0} +} +func (m *StatisticalTable_QuasiIdentifierField) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_StatisticalTable_QuasiIdentifierField.Unmarshal(m, b) +} +func (m *StatisticalTable_QuasiIdentifierField) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_StatisticalTable_QuasiIdentifierField.Marshal(b, m, deterministic) +} +func (dst *StatisticalTable_QuasiIdentifierField) XXX_Merge(src proto.Message) { + xxx_messageInfo_StatisticalTable_QuasiIdentifierField.Merge(dst, src) +} +func (m *StatisticalTable_QuasiIdentifierField) XXX_Size() int { + return xxx_messageInfo_StatisticalTable_QuasiIdentifierField.Size(m) +} +func (m *StatisticalTable_QuasiIdentifierField) XXX_DiscardUnknown() { + xxx_messageInfo_StatisticalTable_QuasiIdentifierField.DiscardUnknown(m) +} + +var xxx_messageInfo_StatisticalTable_QuasiIdentifierField proto.InternalMessageInfo + +func (m *StatisticalTable_QuasiIdentifierField) GetField() *FieldId { + if m != nil { + return m.Field + } + return nil +} + +func (m *StatisticalTable_QuasiIdentifierField) GetCustomTag() string { + if m != nil { + return m.CustomTag + } + return "" +} + +// Privacy metric to compute for reidentification risk analysis. +type PrivacyMetric struct { + // Types that are valid to be assigned to Type: + // *PrivacyMetric_NumericalStatsConfig_ + // *PrivacyMetric_CategoricalStatsConfig_ + // *PrivacyMetric_KAnonymityConfig_ + // *PrivacyMetric_LDiversityConfig_ + // *PrivacyMetric_KMapEstimationConfig_ + // *PrivacyMetric_DeltaPresenceEstimationConfig_ + Type isPrivacyMetric_Type `protobuf_oneof:"type"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *PrivacyMetric) Reset() { *m = PrivacyMetric{} } +func (m *PrivacyMetric) String() string { return proto.CompactTextString(m) } +func (*PrivacyMetric) ProtoMessage() {} +func (*PrivacyMetric) Descriptor() ([]byte, []int) { + return fileDescriptor_dlp_1aebf9c18c267d70, []int{36} +} +func (m *PrivacyMetric) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_PrivacyMetric.Unmarshal(m, b) +} +func (m *PrivacyMetric) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_PrivacyMetric.Marshal(b, m, deterministic) +} +func (dst *PrivacyMetric) XXX_Merge(src proto.Message) { + xxx_messageInfo_PrivacyMetric.Merge(dst, src) +} +func (m *PrivacyMetric) XXX_Size() int { + return xxx_messageInfo_PrivacyMetric.Size(m) +} +func (m *PrivacyMetric) XXX_DiscardUnknown() { + xxx_messageInfo_PrivacyMetric.DiscardUnknown(m) +} + +var xxx_messageInfo_PrivacyMetric proto.InternalMessageInfo + +type isPrivacyMetric_Type interface { + isPrivacyMetric_Type() +} + +type PrivacyMetric_NumericalStatsConfig_ struct { + NumericalStatsConfig *PrivacyMetric_NumericalStatsConfig `protobuf:"bytes,1,opt,name=numerical_stats_config,json=numericalStatsConfig,proto3,oneof"` +} + +type PrivacyMetric_CategoricalStatsConfig_ struct { + CategoricalStatsConfig *PrivacyMetric_CategoricalStatsConfig `protobuf:"bytes,2,opt,name=categorical_stats_config,json=categoricalStatsConfig,proto3,oneof"` +} + +type PrivacyMetric_KAnonymityConfig_ struct { + KAnonymityConfig *PrivacyMetric_KAnonymityConfig `protobuf:"bytes,3,opt,name=k_anonymity_config,json=kAnonymityConfig,proto3,oneof"` +} + +type PrivacyMetric_LDiversityConfig_ struct { + LDiversityConfig *PrivacyMetric_LDiversityConfig `protobuf:"bytes,4,opt,name=l_diversity_config,json=lDiversityConfig,proto3,oneof"` +} + +type PrivacyMetric_KMapEstimationConfig_ struct { + KMapEstimationConfig *PrivacyMetric_KMapEstimationConfig `protobuf:"bytes,5,opt,name=k_map_estimation_config,json=kMapEstimationConfig,proto3,oneof"` +} + +type PrivacyMetric_DeltaPresenceEstimationConfig_ struct { + DeltaPresenceEstimationConfig *PrivacyMetric_DeltaPresenceEstimationConfig `protobuf:"bytes,6,opt,name=delta_presence_estimation_config,json=deltaPresenceEstimationConfig,proto3,oneof"` +} + +func (*PrivacyMetric_NumericalStatsConfig_) isPrivacyMetric_Type() {} + +func (*PrivacyMetric_CategoricalStatsConfig_) isPrivacyMetric_Type() {} + +func (*PrivacyMetric_KAnonymityConfig_) isPrivacyMetric_Type() {} + +func (*PrivacyMetric_LDiversityConfig_) isPrivacyMetric_Type() {} + +func (*PrivacyMetric_KMapEstimationConfig_) isPrivacyMetric_Type() {} + +func (*PrivacyMetric_DeltaPresenceEstimationConfig_) isPrivacyMetric_Type() {} + +func (m *PrivacyMetric) GetType() isPrivacyMetric_Type { + if m != nil { + return m.Type + } + return nil +} + +func (m *PrivacyMetric) GetNumericalStatsConfig() *PrivacyMetric_NumericalStatsConfig { + if x, ok := m.GetType().(*PrivacyMetric_NumericalStatsConfig_); ok { + return x.NumericalStatsConfig + } + return nil +} + +func (m *PrivacyMetric) GetCategoricalStatsConfig() *PrivacyMetric_CategoricalStatsConfig { + if x, ok := m.GetType().(*PrivacyMetric_CategoricalStatsConfig_); ok { + return x.CategoricalStatsConfig + } + return nil +} + +func (m *PrivacyMetric) GetKAnonymityConfig() *PrivacyMetric_KAnonymityConfig { + if x, ok := m.GetType().(*PrivacyMetric_KAnonymityConfig_); ok { + return x.KAnonymityConfig + } + return nil +} + +func (m *PrivacyMetric) GetLDiversityConfig() *PrivacyMetric_LDiversityConfig { + if x, ok := m.GetType().(*PrivacyMetric_LDiversityConfig_); ok { + return x.LDiversityConfig + } + return nil +} + +func (m *PrivacyMetric) GetKMapEstimationConfig() *PrivacyMetric_KMapEstimationConfig { + if x, ok := m.GetType().(*PrivacyMetric_KMapEstimationConfig_); ok { + return x.KMapEstimationConfig + } + return nil +} + +func (m *PrivacyMetric) GetDeltaPresenceEstimationConfig() *PrivacyMetric_DeltaPresenceEstimationConfig { + if x, ok := m.GetType().(*PrivacyMetric_DeltaPresenceEstimationConfig_); ok { + return x.DeltaPresenceEstimationConfig + } + return nil +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*PrivacyMetric) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _PrivacyMetric_OneofMarshaler, _PrivacyMetric_OneofUnmarshaler, _PrivacyMetric_OneofSizer, []interface{}{ + (*PrivacyMetric_NumericalStatsConfig_)(nil), + (*PrivacyMetric_CategoricalStatsConfig_)(nil), + (*PrivacyMetric_KAnonymityConfig_)(nil), + (*PrivacyMetric_LDiversityConfig_)(nil), + (*PrivacyMetric_KMapEstimationConfig_)(nil), + (*PrivacyMetric_DeltaPresenceEstimationConfig_)(nil), + } +} + +func _PrivacyMetric_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*PrivacyMetric) + // type + switch x := m.Type.(type) { + case *PrivacyMetric_NumericalStatsConfig_: + b.EncodeVarint(1<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.NumericalStatsConfig); err != nil { + return err + } + case *PrivacyMetric_CategoricalStatsConfig_: + b.EncodeVarint(2<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.CategoricalStatsConfig); err != nil { + return err + } + case *PrivacyMetric_KAnonymityConfig_: + b.EncodeVarint(3<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.KAnonymityConfig); err != nil { + return err + } + case *PrivacyMetric_LDiversityConfig_: + b.EncodeVarint(4<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.LDiversityConfig); err != nil { + return err + } + case *PrivacyMetric_KMapEstimationConfig_: + b.EncodeVarint(5<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.KMapEstimationConfig); err != nil { + return err + } + case *PrivacyMetric_DeltaPresenceEstimationConfig_: + b.EncodeVarint(6<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.DeltaPresenceEstimationConfig); err != nil { + return err + } + case nil: + default: + return fmt.Errorf("PrivacyMetric.Type has unexpected type %T", x) + } + return nil +} + +func _PrivacyMetric_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*PrivacyMetric) + switch tag { + case 1: // type.numerical_stats_config + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(PrivacyMetric_NumericalStatsConfig) + err := b.DecodeMessage(msg) + m.Type = &PrivacyMetric_NumericalStatsConfig_{msg} + return true, err + case 2: // type.categorical_stats_config + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(PrivacyMetric_CategoricalStatsConfig) + err := b.DecodeMessage(msg) + m.Type = &PrivacyMetric_CategoricalStatsConfig_{msg} + return true, err + case 3: // type.k_anonymity_config + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(PrivacyMetric_KAnonymityConfig) + err := b.DecodeMessage(msg) + m.Type = &PrivacyMetric_KAnonymityConfig_{msg} + return true, err + case 4: // type.l_diversity_config + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(PrivacyMetric_LDiversityConfig) + err := b.DecodeMessage(msg) + m.Type = &PrivacyMetric_LDiversityConfig_{msg} + return true, err + case 5: // type.k_map_estimation_config + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(PrivacyMetric_KMapEstimationConfig) + err := b.DecodeMessage(msg) + m.Type = &PrivacyMetric_KMapEstimationConfig_{msg} + return true, err + case 6: // type.delta_presence_estimation_config + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(PrivacyMetric_DeltaPresenceEstimationConfig) + err := b.DecodeMessage(msg) + m.Type = &PrivacyMetric_DeltaPresenceEstimationConfig_{msg} + return true, err + default: + return false, nil + } +} + +func _PrivacyMetric_OneofSizer(msg proto.Message) (n int) { + m := msg.(*PrivacyMetric) + // type + switch x := m.Type.(type) { + case *PrivacyMetric_NumericalStatsConfig_: + s := proto.Size(x.NumericalStatsConfig) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *PrivacyMetric_CategoricalStatsConfig_: + s := proto.Size(x.CategoricalStatsConfig) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *PrivacyMetric_KAnonymityConfig_: + s := proto.Size(x.KAnonymityConfig) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *PrivacyMetric_LDiversityConfig_: + s := proto.Size(x.LDiversityConfig) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *PrivacyMetric_KMapEstimationConfig_: + s := proto.Size(x.KMapEstimationConfig) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *PrivacyMetric_DeltaPresenceEstimationConfig_: + s := proto.Size(x.DeltaPresenceEstimationConfig) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +// Compute numerical stats over an individual column, including +// min, max, and quantiles. +type PrivacyMetric_NumericalStatsConfig struct { + // Field to compute numerical stats on. Supported types are + // integer, float, date, datetime, timestamp, time. + Field *FieldId `protobuf:"bytes,1,opt,name=field,proto3" json:"field,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *PrivacyMetric_NumericalStatsConfig) Reset() { *m = PrivacyMetric_NumericalStatsConfig{} } +func (m *PrivacyMetric_NumericalStatsConfig) String() string { return proto.CompactTextString(m) } +func (*PrivacyMetric_NumericalStatsConfig) ProtoMessage() {} +func (*PrivacyMetric_NumericalStatsConfig) Descriptor() ([]byte, []int) { + return fileDescriptor_dlp_1aebf9c18c267d70, []int{36, 0} +} +func (m *PrivacyMetric_NumericalStatsConfig) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_PrivacyMetric_NumericalStatsConfig.Unmarshal(m, b) +} +func (m *PrivacyMetric_NumericalStatsConfig) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_PrivacyMetric_NumericalStatsConfig.Marshal(b, m, deterministic) +} +func (dst *PrivacyMetric_NumericalStatsConfig) XXX_Merge(src proto.Message) { + xxx_messageInfo_PrivacyMetric_NumericalStatsConfig.Merge(dst, src) +} +func (m *PrivacyMetric_NumericalStatsConfig) XXX_Size() int { + return xxx_messageInfo_PrivacyMetric_NumericalStatsConfig.Size(m) +} +func (m *PrivacyMetric_NumericalStatsConfig) XXX_DiscardUnknown() { + xxx_messageInfo_PrivacyMetric_NumericalStatsConfig.DiscardUnknown(m) +} + +var xxx_messageInfo_PrivacyMetric_NumericalStatsConfig proto.InternalMessageInfo + +func (m *PrivacyMetric_NumericalStatsConfig) GetField() *FieldId { + if m != nil { + return m.Field + } + return nil +} + +// Compute numerical stats over an individual column, including +// number of distinct values and value count distribution. +type PrivacyMetric_CategoricalStatsConfig struct { + // Field to compute categorical stats on. All column types are + // supported except for arrays and structs. However, it may be more + // informative to use NumericalStats when the field type is supported, + // depending on the data. + Field *FieldId `protobuf:"bytes,1,opt,name=field,proto3" json:"field,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *PrivacyMetric_CategoricalStatsConfig) Reset() { *m = PrivacyMetric_CategoricalStatsConfig{} } +func (m *PrivacyMetric_CategoricalStatsConfig) String() string { return proto.CompactTextString(m) } +func (*PrivacyMetric_CategoricalStatsConfig) ProtoMessage() {} +func (*PrivacyMetric_CategoricalStatsConfig) Descriptor() ([]byte, []int) { + return fileDescriptor_dlp_1aebf9c18c267d70, []int{36, 1} +} +func (m *PrivacyMetric_CategoricalStatsConfig) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_PrivacyMetric_CategoricalStatsConfig.Unmarshal(m, b) +} +func (m *PrivacyMetric_CategoricalStatsConfig) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_PrivacyMetric_CategoricalStatsConfig.Marshal(b, m, deterministic) +} +func (dst *PrivacyMetric_CategoricalStatsConfig) XXX_Merge(src proto.Message) { + xxx_messageInfo_PrivacyMetric_CategoricalStatsConfig.Merge(dst, src) +} +func (m *PrivacyMetric_CategoricalStatsConfig) XXX_Size() int { + return xxx_messageInfo_PrivacyMetric_CategoricalStatsConfig.Size(m) +} +func (m *PrivacyMetric_CategoricalStatsConfig) XXX_DiscardUnknown() { + xxx_messageInfo_PrivacyMetric_CategoricalStatsConfig.DiscardUnknown(m) +} + +var xxx_messageInfo_PrivacyMetric_CategoricalStatsConfig proto.InternalMessageInfo + +func (m *PrivacyMetric_CategoricalStatsConfig) GetField() *FieldId { + if m != nil { + return m.Field + } + return nil +} + +// k-anonymity metric, used for analysis of reidentification risk. +type PrivacyMetric_KAnonymityConfig struct { + // Set of fields to compute k-anonymity over. When multiple fields are + // specified, they are considered a single composite key. Structs and + // repeated data types are not supported; however, nested fields are + // supported so long as they are not structs themselves or nested within + // a repeated field. + QuasiIds []*FieldId `protobuf:"bytes,1,rep,name=quasi_ids,json=quasiIds,proto3" json:"quasi_ids,omitempty"` + // Optional message indicating that multiple rows might be associated to a + // single individual. If the same entity_id is associated to multiple + // quasi-identifier tuples over distinct rows, we consider the entire + // collection of tuples as the composite quasi-identifier. This collection + // is a multiset: the order in which the different tuples appear in the + // dataset is ignored, but their frequency is taken into account. + // + // Important note: a maximum of 1000 rows can be associated to a single + // entity ID. If more rows are associated with the same entity ID, some + // might be ignored. + EntityId *EntityId `protobuf:"bytes,2,opt,name=entity_id,json=entityId,proto3" json:"entity_id,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *PrivacyMetric_KAnonymityConfig) Reset() { *m = PrivacyMetric_KAnonymityConfig{} } +func (m *PrivacyMetric_KAnonymityConfig) String() string { return proto.CompactTextString(m) } +func (*PrivacyMetric_KAnonymityConfig) ProtoMessage() {} +func (*PrivacyMetric_KAnonymityConfig) Descriptor() ([]byte, []int) { + return fileDescriptor_dlp_1aebf9c18c267d70, []int{36, 2} +} +func (m *PrivacyMetric_KAnonymityConfig) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_PrivacyMetric_KAnonymityConfig.Unmarshal(m, b) +} +func (m *PrivacyMetric_KAnonymityConfig) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_PrivacyMetric_KAnonymityConfig.Marshal(b, m, deterministic) +} +func (dst *PrivacyMetric_KAnonymityConfig) XXX_Merge(src proto.Message) { + xxx_messageInfo_PrivacyMetric_KAnonymityConfig.Merge(dst, src) +} +func (m *PrivacyMetric_KAnonymityConfig) XXX_Size() int { + return xxx_messageInfo_PrivacyMetric_KAnonymityConfig.Size(m) +} +func (m *PrivacyMetric_KAnonymityConfig) XXX_DiscardUnknown() { + xxx_messageInfo_PrivacyMetric_KAnonymityConfig.DiscardUnknown(m) +} + +var xxx_messageInfo_PrivacyMetric_KAnonymityConfig proto.InternalMessageInfo + +func (m *PrivacyMetric_KAnonymityConfig) GetQuasiIds() []*FieldId { + if m != nil { + return m.QuasiIds + } + return nil +} + +func (m *PrivacyMetric_KAnonymityConfig) GetEntityId() *EntityId { + if m != nil { + return m.EntityId + } + return nil +} + +// l-diversity metric, used for analysis of reidentification risk. +type PrivacyMetric_LDiversityConfig struct { + // Set of quasi-identifiers indicating how equivalence classes are + // defined for the l-diversity computation. When multiple fields are + // specified, they are considered a single composite key. + QuasiIds []*FieldId `protobuf:"bytes,1,rep,name=quasi_ids,json=quasiIds,proto3" json:"quasi_ids,omitempty"` + // Sensitive field for computing the l-value. + SensitiveAttribute *FieldId `protobuf:"bytes,2,opt,name=sensitive_attribute,json=sensitiveAttribute,proto3" json:"sensitive_attribute,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *PrivacyMetric_LDiversityConfig) Reset() { *m = PrivacyMetric_LDiversityConfig{} } +func (m *PrivacyMetric_LDiversityConfig) String() string { return proto.CompactTextString(m) } +func (*PrivacyMetric_LDiversityConfig) ProtoMessage() {} +func (*PrivacyMetric_LDiversityConfig) Descriptor() ([]byte, []int) { + return fileDescriptor_dlp_1aebf9c18c267d70, []int{36, 3} +} +func (m *PrivacyMetric_LDiversityConfig) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_PrivacyMetric_LDiversityConfig.Unmarshal(m, b) +} +func (m *PrivacyMetric_LDiversityConfig) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_PrivacyMetric_LDiversityConfig.Marshal(b, m, deterministic) +} +func (dst *PrivacyMetric_LDiversityConfig) XXX_Merge(src proto.Message) { + xxx_messageInfo_PrivacyMetric_LDiversityConfig.Merge(dst, src) +} +func (m *PrivacyMetric_LDiversityConfig) XXX_Size() int { + return xxx_messageInfo_PrivacyMetric_LDiversityConfig.Size(m) +} +func (m *PrivacyMetric_LDiversityConfig) XXX_DiscardUnknown() { + xxx_messageInfo_PrivacyMetric_LDiversityConfig.DiscardUnknown(m) +} + +var xxx_messageInfo_PrivacyMetric_LDiversityConfig proto.InternalMessageInfo + +func (m *PrivacyMetric_LDiversityConfig) GetQuasiIds() []*FieldId { + if m != nil { + return m.QuasiIds + } + return nil +} + +func (m *PrivacyMetric_LDiversityConfig) GetSensitiveAttribute() *FieldId { + if m != nil { + return m.SensitiveAttribute + } + return nil +} + +// Reidentifiability metric. This corresponds to a risk model similar to what +// is called "journalist risk" in the literature, except the attack dataset is +// statistically modeled instead of being perfectly known. This can be done +// using publicly available data (like the US Census), or using a custom +// statistical model (indicated as one or several BigQuery tables), or by +// extrapolating from the distribution of values in the input dataset. +// A column with a semantic tag attached. +type PrivacyMetric_KMapEstimationConfig struct { + // Fields considered to be quasi-identifiers. No two columns can have the + // same tag. [required] + QuasiIds []*PrivacyMetric_KMapEstimationConfig_TaggedField `protobuf:"bytes,1,rep,name=quasi_ids,json=quasiIds,proto3" json:"quasi_ids,omitempty"` + // ISO 3166-1 alpha-2 region code to use in the statistical modeling. + // Required if no column is tagged with a region-specific InfoType (like + // US_ZIP_5) or a region code. + RegionCode string `protobuf:"bytes,2,opt,name=region_code,json=regionCode,proto3" json:"region_code,omitempty"` + // Several auxiliary tables can be used in the analysis. Each custom_tag + // used to tag a quasi-identifiers column must appear in exactly one column + // of one auxiliary table. + AuxiliaryTables []*PrivacyMetric_KMapEstimationConfig_AuxiliaryTable `protobuf:"bytes,3,rep,name=auxiliary_tables,json=auxiliaryTables,proto3" json:"auxiliary_tables,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *PrivacyMetric_KMapEstimationConfig) Reset() { *m = PrivacyMetric_KMapEstimationConfig{} } +func (m *PrivacyMetric_KMapEstimationConfig) String() string { return proto.CompactTextString(m) } +func (*PrivacyMetric_KMapEstimationConfig) ProtoMessage() {} +func (*PrivacyMetric_KMapEstimationConfig) Descriptor() ([]byte, []int) { + return fileDescriptor_dlp_1aebf9c18c267d70, []int{36, 4} +} +func (m *PrivacyMetric_KMapEstimationConfig) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_PrivacyMetric_KMapEstimationConfig.Unmarshal(m, b) +} +func (m *PrivacyMetric_KMapEstimationConfig) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_PrivacyMetric_KMapEstimationConfig.Marshal(b, m, deterministic) +} +func (dst *PrivacyMetric_KMapEstimationConfig) XXX_Merge(src proto.Message) { + xxx_messageInfo_PrivacyMetric_KMapEstimationConfig.Merge(dst, src) +} +func (m *PrivacyMetric_KMapEstimationConfig) XXX_Size() int { + return xxx_messageInfo_PrivacyMetric_KMapEstimationConfig.Size(m) +} +func (m *PrivacyMetric_KMapEstimationConfig) XXX_DiscardUnknown() { + xxx_messageInfo_PrivacyMetric_KMapEstimationConfig.DiscardUnknown(m) +} + +var xxx_messageInfo_PrivacyMetric_KMapEstimationConfig proto.InternalMessageInfo + +func (m *PrivacyMetric_KMapEstimationConfig) GetQuasiIds() []*PrivacyMetric_KMapEstimationConfig_TaggedField { + if m != nil { + return m.QuasiIds + } + return nil +} + +func (m *PrivacyMetric_KMapEstimationConfig) GetRegionCode() string { + if m != nil { + return m.RegionCode + } + return "" +} + +func (m *PrivacyMetric_KMapEstimationConfig) GetAuxiliaryTables() []*PrivacyMetric_KMapEstimationConfig_AuxiliaryTable { + if m != nil { + return m.AuxiliaryTables + } + return nil +} + +type PrivacyMetric_KMapEstimationConfig_TaggedField struct { + // Identifies the column. [required] + Field *FieldId `protobuf:"bytes,1,opt,name=field,proto3" json:"field,omitempty"` + // Semantic tag that identifies what a column contains, to determine which + // statistical model to use to estimate the reidentifiability of each + // value. [required] + // + // Types that are valid to be assigned to Tag: + // *PrivacyMetric_KMapEstimationConfig_TaggedField_InfoType + // *PrivacyMetric_KMapEstimationConfig_TaggedField_CustomTag + // *PrivacyMetric_KMapEstimationConfig_TaggedField_Inferred + Tag isPrivacyMetric_KMapEstimationConfig_TaggedField_Tag `protobuf_oneof:"tag"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *PrivacyMetric_KMapEstimationConfig_TaggedField) Reset() { + *m = PrivacyMetric_KMapEstimationConfig_TaggedField{} +} +func (m *PrivacyMetric_KMapEstimationConfig_TaggedField) String() string { + return proto.CompactTextString(m) +} +func (*PrivacyMetric_KMapEstimationConfig_TaggedField) ProtoMessage() {} +func (*PrivacyMetric_KMapEstimationConfig_TaggedField) Descriptor() ([]byte, []int) { + return fileDescriptor_dlp_1aebf9c18c267d70, []int{36, 4, 0} +} +func (m *PrivacyMetric_KMapEstimationConfig_TaggedField) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_PrivacyMetric_KMapEstimationConfig_TaggedField.Unmarshal(m, b) +} +func (m *PrivacyMetric_KMapEstimationConfig_TaggedField) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_PrivacyMetric_KMapEstimationConfig_TaggedField.Marshal(b, m, deterministic) +} +func (dst *PrivacyMetric_KMapEstimationConfig_TaggedField) XXX_Merge(src proto.Message) { + xxx_messageInfo_PrivacyMetric_KMapEstimationConfig_TaggedField.Merge(dst, src) +} +func (m *PrivacyMetric_KMapEstimationConfig_TaggedField) XXX_Size() int { + return xxx_messageInfo_PrivacyMetric_KMapEstimationConfig_TaggedField.Size(m) +} +func (m *PrivacyMetric_KMapEstimationConfig_TaggedField) XXX_DiscardUnknown() { + xxx_messageInfo_PrivacyMetric_KMapEstimationConfig_TaggedField.DiscardUnknown(m) +} + +var xxx_messageInfo_PrivacyMetric_KMapEstimationConfig_TaggedField proto.InternalMessageInfo + +func (m *PrivacyMetric_KMapEstimationConfig_TaggedField) GetField() *FieldId { + if m != nil { + return m.Field + } + return nil +} + +type isPrivacyMetric_KMapEstimationConfig_TaggedField_Tag interface { + isPrivacyMetric_KMapEstimationConfig_TaggedField_Tag() +} + +type PrivacyMetric_KMapEstimationConfig_TaggedField_InfoType struct { + InfoType *InfoType `protobuf:"bytes,2,opt,name=info_type,json=infoType,proto3,oneof"` +} + +type PrivacyMetric_KMapEstimationConfig_TaggedField_CustomTag struct { + CustomTag string `protobuf:"bytes,3,opt,name=custom_tag,json=customTag,proto3,oneof"` +} + +type PrivacyMetric_KMapEstimationConfig_TaggedField_Inferred struct { + Inferred *empty.Empty `protobuf:"bytes,4,opt,name=inferred,proto3,oneof"` +} + +func (*PrivacyMetric_KMapEstimationConfig_TaggedField_InfoType) isPrivacyMetric_KMapEstimationConfig_TaggedField_Tag() { +} + +func (*PrivacyMetric_KMapEstimationConfig_TaggedField_CustomTag) isPrivacyMetric_KMapEstimationConfig_TaggedField_Tag() { +} + +func (*PrivacyMetric_KMapEstimationConfig_TaggedField_Inferred) isPrivacyMetric_KMapEstimationConfig_TaggedField_Tag() { +} + +func (m *PrivacyMetric_KMapEstimationConfig_TaggedField) GetTag() isPrivacyMetric_KMapEstimationConfig_TaggedField_Tag { + if m != nil { + return m.Tag + } + return nil +} + +func (m *PrivacyMetric_KMapEstimationConfig_TaggedField) GetInfoType() *InfoType { + if x, ok := m.GetTag().(*PrivacyMetric_KMapEstimationConfig_TaggedField_InfoType); ok { + return x.InfoType + } + return nil +} + +func (m *PrivacyMetric_KMapEstimationConfig_TaggedField) GetCustomTag() string { + if x, ok := m.GetTag().(*PrivacyMetric_KMapEstimationConfig_TaggedField_CustomTag); ok { + return x.CustomTag + } + return "" +} + +func (m *PrivacyMetric_KMapEstimationConfig_TaggedField) GetInferred() *empty.Empty { + if x, ok := m.GetTag().(*PrivacyMetric_KMapEstimationConfig_TaggedField_Inferred); ok { + return x.Inferred + } + return nil +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*PrivacyMetric_KMapEstimationConfig_TaggedField) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _PrivacyMetric_KMapEstimationConfig_TaggedField_OneofMarshaler, _PrivacyMetric_KMapEstimationConfig_TaggedField_OneofUnmarshaler, _PrivacyMetric_KMapEstimationConfig_TaggedField_OneofSizer, []interface{}{ + (*PrivacyMetric_KMapEstimationConfig_TaggedField_InfoType)(nil), + (*PrivacyMetric_KMapEstimationConfig_TaggedField_CustomTag)(nil), + (*PrivacyMetric_KMapEstimationConfig_TaggedField_Inferred)(nil), + } +} + +func _PrivacyMetric_KMapEstimationConfig_TaggedField_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*PrivacyMetric_KMapEstimationConfig_TaggedField) + // tag + switch x := m.Tag.(type) { + case *PrivacyMetric_KMapEstimationConfig_TaggedField_InfoType: + b.EncodeVarint(2<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.InfoType); err != nil { + return err + } + case *PrivacyMetric_KMapEstimationConfig_TaggedField_CustomTag: + b.EncodeVarint(3<<3 | proto.WireBytes) + b.EncodeStringBytes(x.CustomTag) + case *PrivacyMetric_KMapEstimationConfig_TaggedField_Inferred: + b.EncodeVarint(4<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.Inferred); err != nil { + return err + } + case nil: + default: + return fmt.Errorf("PrivacyMetric_KMapEstimationConfig_TaggedField.Tag has unexpected type %T", x) + } + return nil +} + +func _PrivacyMetric_KMapEstimationConfig_TaggedField_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*PrivacyMetric_KMapEstimationConfig_TaggedField) + switch tag { + case 2: // tag.info_type + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(InfoType) + err := b.DecodeMessage(msg) + m.Tag = &PrivacyMetric_KMapEstimationConfig_TaggedField_InfoType{msg} + return true, err + case 3: // tag.custom_tag + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeStringBytes() + m.Tag = &PrivacyMetric_KMapEstimationConfig_TaggedField_CustomTag{x} + return true, err + case 4: // tag.inferred + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(empty.Empty) + err := b.DecodeMessage(msg) + m.Tag = &PrivacyMetric_KMapEstimationConfig_TaggedField_Inferred{msg} + return true, err + default: + return false, nil + } +} + +func _PrivacyMetric_KMapEstimationConfig_TaggedField_OneofSizer(msg proto.Message) (n int) { + m := msg.(*PrivacyMetric_KMapEstimationConfig_TaggedField) + // tag + switch x := m.Tag.(type) { + case *PrivacyMetric_KMapEstimationConfig_TaggedField_InfoType: + s := proto.Size(x.InfoType) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *PrivacyMetric_KMapEstimationConfig_TaggedField_CustomTag: + n += 1 // tag and wire + n += proto.SizeVarint(uint64(len(x.CustomTag))) + n += len(x.CustomTag) + case *PrivacyMetric_KMapEstimationConfig_TaggedField_Inferred: + s := proto.Size(x.Inferred) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +// An auxiliary table contains statistical information on the relative +// frequency of different quasi-identifiers values. It has one or several +// quasi-identifiers columns, and one column that indicates the relative +// frequency of each quasi-identifier tuple. +// If a tuple is present in the data but not in the auxiliary table, the +// corresponding relative frequency is assumed to be zero (and thus, the +// tuple is highly reidentifiable). +type PrivacyMetric_KMapEstimationConfig_AuxiliaryTable struct { + // Auxiliary table location. [required] + Table *BigQueryTable `protobuf:"bytes,3,opt,name=table,proto3" json:"table,omitempty"` + // Quasi-identifier columns. [required] + QuasiIds []*PrivacyMetric_KMapEstimationConfig_AuxiliaryTable_QuasiIdField `protobuf:"bytes,1,rep,name=quasi_ids,json=quasiIds,proto3" json:"quasi_ids,omitempty"` + // The relative frequency column must contain a floating-point number + // between 0 and 1 (inclusive). Null values are assumed to be zero. + // [required] + RelativeFrequency *FieldId `protobuf:"bytes,2,opt,name=relative_frequency,json=relativeFrequency,proto3" json:"relative_frequency,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *PrivacyMetric_KMapEstimationConfig_AuxiliaryTable) Reset() { + *m = PrivacyMetric_KMapEstimationConfig_AuxiliaryTable{} +} +func (m *PrivacyMetric_KMapEstimationConfig_AuxiliaryTable) String() string { + return proto.CompactTextString(m) +} +func (*PrivacyMetric_KMapEstimationConfig_AuxiliaryTable) ProtoMessage() {} +func (*PrivacyMetric_KMapEstimationConfig_AuxiliaryTable) Descriptor() ([]byte, []int) { + return fileDescriptor_dlp_1aebf9c18c267d70, []int{36, 4, 1} +} +func (m *PrivacyMetric_KMapEstimationConfig_AuxiliaryTable) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_PrivacyMetric_KMapEstimationConfig_AuxiliaryTable.Unmarshal(m, b) +} +func (m *PrivacyMetric_KMapEstimationConfig_AuxiliaryTable) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_PrivacyMetric_KMapEstimationConfig_AuxiliaryTable.Marshal(b, m, deterministic) +} +func (dst *PrivacyMetric_KMapEstimationConfig_AuxiliaryTable) XXX_Merge(src proto.Message) { + xxx_messageInfo_PrivacyMetric_KMapEstimationConfig_AuxiliaryTable.Merge(dst, src) +} +func (m *PrivacyMetric_KMapEstimationConfig_AuxiliaryTable) XXX_Size() int { + return xxx_messageInfo_PrivacyMetric_KMapEstimationConfig_AuxiliaryTable.Size(m) +} +func (m *PrivacyMetric_KMapEstimationConfig_AuxiliaryTable) XXX_DiscardUnknown() { + xxx_messageInfo_PrivacyMetric_KMapEstimationConfig_AuxiliaryTable.DiscardUnknown(m) +} + +var xxx_messageInfo_PrivacyMetric_KMapEstimationConfig_AuxiliaryTable proto.InternalMessageInfo + +func (m *PrivacyMetric_KMapEstimationConfig_AuxiliaryTable) GetTable() *BigQueryTable { + if m != nil { + return m.Table + } + return nil +} + +func (m *PrivacyMetric_KMapEstimationConfig_AuxiliaryTable) GetQuasiIds() []*PrivacyMetric_KMapEstimationConfig_AuxiliaryTable_QuasiIdField { + if m != nil { + return m.QuasiIds + } + return nil +} + +func (m *PrivacyMetric_KMapEstimationConfig_AuxiliaryTable) GetRelativeFrequency() *FieldId { + if m != nil { + return m.RelativeFrequency + } + return nil +} + +// A quasi-identifier column has a custom_tag, used to know which column +// in the data corresponds to which column in the statistical model. +type PrivacyMetric_KMapEstimationConfig_AuxiliaryTable_QuasiIdField struct { + Field *FieldId `protobuf:"bytes,1,opt,name=field,proto3" json:"field,omitempty"` + CustomTag string `protobuf:"bytes,2,opt,name=custom_tag,json=customTag,proto3" json:"custom_tag,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *PrivacyMetric_KMapEstimationConfig_AuxiliaryTable_QuasiIdField) Reset() { + *m = PrivacyMetric_KMapEstimationConfig_AuxiliaryTable_QuasiIdField{} +} +func (m *PrivacyMetric_KMapEstimationConfig_AuxiliaryTable_QuasiIdField) String() string { + return proto.CompactTextString(m) +} +func (*PrivacyMetric_KMapEstimationConfig_AuxiliaryTable_QuasiIdField) ProtoMessage() {} +func (*PrivacyMetric_KMapEstimationConfig_AuxiliaryTable_QuasiIdField) Descriptor() ([]byte, []int) { + return fileDescriptor_dlp_1aebf9c18c267d70, []int{36, 4, 1, 0} +} +func (m *PrivacyMetric_KMapEstimationConfig_AuxiliaryTable_QuasiIdField) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_PrivacyMetric_KMapEstimationConfig_AuxiliaryTable_QuasiIdField.Unmarshal(m, b) +} +func (m *PrivacyMetric_KMapEstimationConfig_AuxiliaryTable_QuasiIdField) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_PrivacyMetric_KMapEstimationConfig_AuxiliaryTable_QuasiIdField.Marshal(b, m, deterministic) +} +func (dst *PrivacyMetric_KMapEstimationConfig_AuxiliaryTable_QuasiIdField) XXX_Merge(src proto.Message) { + xxx_messageInfo_PrivacyMetric_KMapEstimationConfig_AuxiliaryTable_QuasiIdField.Merge(dst, src) +} +func (m *PrivacyMetric_KMapEstimationConfig_AuxiliaryTable_QuasiIdField) XXX_Size() int { + return xxx_messageInfo_PrivacyMetric_KMapEstimationConfig_AuxiliaryTable_QuasiIdField.Size(m) +} +func (m *PrivacyMetric_KMapEstimationConfig_AuxiliaryTable_QuasiIdField) XXX_DiscardUnknown() { + xxx_messageInfo_PrivacyMetric_KMapEstimationConfig_AuxiliaryTable_QuasiIdField.DiscardUnknown(m) +} + +var xxx_messageInfo_PrivacyMetric_KMapEstimationConfig_AuxiliaryTable_QuasiIdField proto.InternalMessageInfo + +func (m *PrivacyMetric_KMapEstimationConfig_AuxiliaryTable_QuasiIdField) GetField() *FieldId { + if m != nil { + return m.Field + } + return nil +} + +func (m *PrivacyMetric_KMapEstimationConfig_AuxiliaryTable_QuasiIdField) GetCustomTag() string { + if m != nil { + return m.CustomTag + } + return "" +} + +// δ-presence metric, used to estimate how likely it is for an attacker to +// figure out that one given individual appears in a de-identified dataset. +// Similarly to the k-map metric, we cannot compute δ-presence exactly without +// knowing the attack dataset, so we use a statistical model instead. +type PrivacyMetric_DeltaPresenceEstimationConfig struct { + // Fields considered to be quasi-identifiers. No two fields can have the + // same tag. [required] + QuasiIds []*QuasiId `protobuf:"bytes,1,rep,name=quasi_ids,json=quasiIds,proto3" json:"quasi_ids,omitempty"` + // ISO 3166-1 alpha-2 region code to use in the statistical modeling. + // Required if no column is tagged with a region-specific InfoType (like + // US_ZIP_5) or a region code. + RegionCode string `protobuf:"bytes,2,opt,name=region_code,json=regionCode,proto3" json:"region_code,omitempty"` + // Several auxiliary tables can be used in the analysis. Each custom_tag + // used to tag a quasi-identifiers field must appear in exactly one + // field of one auxiliary table. + AuxiliaryTables []*StatisticalTable `protobuf:"bytes,3,rep,name=auxiliary_tables,json=auxiliaryTables,proto3" json:"auxiliary_tables,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *PrivacyMetric_DeltaPresenceEstimationConfig) Reset() { + *m = PrivacyMetric_DeltaPresenceEstimationConfig{} +} +func (m *PrivacyMetric_DeltaPresenceEstimationConfig) String() string { + return proto.CompactTextString(m) +} +func (*PrivacyMetric_DeltaPresenceEstimationConfig) ProtoMessage() {} +func (*PrivacyMetric_DeltaPresenceEstimationConfig) Descriptor() ([]byte, []int) { + return fileDescriptor_dlp_1aebf9c18c267d70, []int{36, 5} +} +func (m *PrivacyMetric_DeltaPresenceEstimationConfig) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_PrivacyMetric_DeltaPresenceEstimationConfig.Unmarshal(m, b) +} +func (m *PrivacyMetric_DeltaPresenceEstimationConfig) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_PrivacyMetric_DeltaPresenceEstimationConfig.Marshal(b, m, deterministic) +} +func (dst *PrivacyMetric_DeltaPresenceEstimationConfig) XXX_Merge(src proto.Message) { + xxx_messageInfo_PrivacyMetric_DeltaPresenceEstimationConfig.Merge(dst, src) +} +func (m *PrivacyMetric_DeltaPresenceEstimationConfig) XXX_Size() int { + return xxx_messageInfo_PrivacyMetric_DeltaPresenceEstimationConfig.Size(m) +} +func (m *PrivacyMetric_DeltaPresenceEstimationConfig) XXX_DiscardUnknown() { + xxx_messageInfo_PrivacyMetric_DeltaPresenceEstimationConfig.DiscardUnknown(m) +} + +var xxx_messageInfo_PrivacyMetric_DeltaPresenceEstimationConfig proto.InternalMessageInfo + +func (m *PrivacyMetric_DeltaPresenceEstimationConfig) GetQuasiIds() []*QuasiId { + if m != nil { + return m.QuasiIds + } + return nil +} + +func (m *PrivacyMetric_DeltaPresenceEstimationConfig) GetRegionCode() string { + if m != nil { + return m.RegionCode + } + return "" +} + +func (m *PrivacyMetric_DeltaPresenceEstimationConfig) GetAuxiliaryTables() []*StatisticalTable { + if m != nil { + return m.AuxiliaryTables + } + return nil +} + +// Result of a risk analysis operation request. +type AnalyzeDataSourceRiskDetails struct { + // Privacy metric to compute. + RequestedPrivacyMetric *PrivacyMetric `protobuf:"bytes,1,opt,name=requested_privacy_metric,json=requestedPrivacyMetric,proto3" json:"requested_privacy_metric,omitempty"` + // Input dataset to compute metrics over. + RequestedSourceTable *BigQueryTable `protobuf:"bytes,2,opt,name=requested_source_table,json=requestedSourceTable,proto3" json:"requested_source_table,omitempty"` + // Values associated with this metric. + // + // Types that are valid to be assigned to Result: + // *AnalyzeDataSourceRiskDetails_NumericalStatsResult_ + // *AnalyzeDataSourceRiskDetails_CategoricalStatsResult_ + // *AnalyzeDataSourceRiskDetails_KAnonymityResult_ + // *AnalyzeDataSourceRiskDetails_LDiversityResult_ + // *AnalyzeDataSourceRiskDetails_KMapEstimationResult_ + // *AnalyzeDataSourceRiskDetails_DeltaPresenceEstimationResult_ + Result isAnalyzeDataSourceRiskDetails_Result `protobuf_oneof:"result"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *AnalyzeDataSourceRiskDetails) Reset() { *m = AnalyzeDataSourceRiskDetails{} } +func (m *AnalyzeDataSourceRiskDetails) String() string { return proto.CompactTextString(m) } +func (*AnalyzeDataSourceRiskDetails) ProtoMessage() {} +func (*AnalyzeDataSourceRiskDetails) Descriptor() ([]byte, []int) { + return fileDescriptor_dlp_1aebf9c18c267d70, []int{37} +} +func (m *AnalyzeDataSourceRiskDetails) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_AnalyzeDataSourceRiskDetails.Unmarshal(m, b) +} +func (m *AnalyzeDataSourceRiskDetails) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_AnalyzeDataSourceRiskDetails.Marshal(b, m, deterministic) +} +func (dst *AnalyzeDataSourceRiskDetails) XXX_Merge(src proto.Message) { + xxx_messageInfo_AnalyzeDataSourceRiskDetails.Merge(dst, src) +} +func (m *AnalyzeDataSourceRiskDetails) XXX_Size() int { + return xxx_messageInfo_AnalyzeDataSourceRiskDetails.Size(m) +} +func (m *AnalyzeDataSourceRiskDetails) XXX_DiscardUnknown() { + xxx_messageInfo_AnalyzeDataSourceRiskDetails.DiscardUnknown(m) +} + +var xxx_messageInfo_AnalyzeDataSourceRiskDetails proto.InternalMessageInfo + +func (m *AnalyzeDataSourceRiskDetails) GetRequestedPrivacyMetric() *PrivacyMetric { + if m != nil { + return m.RequestedPrivacyMetric + } + return nil +} + +func (m *AnalyzeDataSourceRiskDetails) GetRequestedSourceTable() *BigQueryTable { + if m != nil { + return m.RequestedSourceTable + } + return nil +} + +type isAnalyzeDataSourceRiskDetails_Result interface { + isAnalyzeDataSourceRiskDetails_Result() +} + +type AnalyzeDataSourceRiskDetails_NumericalStatsResult_ struct { + NumericalStatsResult *AnalyzeDataSourceRiskDetails_NumericalStatsResult `protobuf:"bytes,3,opt,name=numerical_stats_result,json=numericalStatsResult,proto3,oneof"` +} + +type AnalyzeDataSourceRiskDetails_CategoricalStatsResult_ struct { + CategoricalStatsResult *AnalyzeDataSourceRiskDetails_CategoricalStatsResult `protobuf:"bytes,4,opt,name=categorical_stats_result,json=categoricalStatsResult,proto3,oneof"` +} + +type AnalyzeDataSourceRiskDetails_KAnonymityResult_ struct { + KAnonymityResult *AnalyzeDataSourceRiskDetails_KAnonymityResult `protobuf:"bytes,5,opt,name=k_anonymity_result,json=kAnonymityResult,proto3,oneof"` +} + +type AnalyzeDataSourceRiskDetails_LDiversityResult_ struct { + LDiversityResult *AnalyzeDataSourceRiskDetails_LDiversityResult `protobuf:"bytes,6,opt,name=l_diversity_result,json=lDiversityResult,proto3,oneof"` +} + +type AnalyzeDataSourceRiskDetails_KMapEstimationResult_ struct { + KMapEstimationResult *AnalyzeDataSourceRiskDetails_KMapEstimationResult `protobuf:"bytes,7,opt,name=k_map_estimation_result,json=kMapEstimationResult,proto3,oneof"` +} + +type AnalyzeDataSourceRiskDetails_DeltaPresenceEstimationResult_ struct { + DeltaPresenceEstimationResult *AnalyzeDataSourceRiskDetails_DeltaPresenceEstimationResult `protobuf:"bytes,9,opt,name=delta_presence_estimation_result,json=deltaPresenceEstimationResult,proto3,oneof"` +} + +func (*AnalyzeDataSourceRiskDetails_NumericalStatsResult_) isAnalyzeDataSourceRiskDetails_Result() {} + +func (*AnalyzeDataSourceRiskDetails_CategoricalStatsResult_) isAnalyzeDataSourceRiskDetails_Result() {} + +func (*AnalyzeDataSourceRiskDetails_KAnonymityResult_) isAnalyzeDataSourceRiskDetails_Result() {} + +func (*AnalyzeDataSourceRiskDetails_LDiversityResult_) isAnalyzeDataSourceRiskDetails_Result() {} + +func (*AnalyzeDataSourceRiskDetails_KMapEstimationResult_) isAnalyzeDataSourceRiskDetails_Result() {} + +func (*AnalyzeDataSourceRiskDetails_DeltaPresenceEstimationResult_) isAnalyzeDataSourceRiskDetails_Result() { +} + +func (m *AnalyzeDataSourceRiskDetails) GetResult() isAnalyzeDataSourceRiskDetails_Result { + if m != nil { + return m.Result + } + return nil +} + +func (m *AnalyzeDataSourceRiskDetails) GetNumericalStatsResult() *AnalyzeDataSourceRiskDetails_NumericalStatsResult { + if x, ok := m.GetResult().(*AnalyzeDataSourceRiskDetails_NumericalStatsResult_); ok { + return x.NumericalStatsResult + } + return nil +} + +func (m *AnalyzeDataSourceRiskDetails) GetCategoricalStatsResult() *AnalyzeDataSourceRiskDetails_CategoricalStatsResult { + if x, ok := m.GetResult().(*AnalyzeDataSourceRiskDetails_CategoricalStatsResult_); ok { + return x.CategoricalStatsResult + } + return nil +} + +func (m *AnalyzeDataSourceRiskDetails) GetKAnonymityResult() *AnalyzeDataSourceRiskDetails_KAnonymityResult { + if x, ok := m.GetResult().(*AnalyzeDataSourceRiskDetails_KAnonymityResult_); ok { + return x.KAnonymityResult + } + return nil +} + +func (m *AnalyzeDataSourceRiskDetails) GetLDiversityResult() *AnalyzeDataSourceRiskDetails_LDiversityResult { + if x, ok := m.GetResult().(*AnalyzeDataSourceRiskDetails_LDiversityResult_); ok { + return x.LDiversityResult + } + return nil +} + +func (m *AnalyzeDataSourceRiskDetails) GetKMapEstimationResult() *AnalyzeDataSourceRiskDetails_KMapEstimationResult { + if x, ok := m.GetResult().(*AnalyzeDataSourceRiskDetails_KMapEstimationResult_); ok { + return x.KMapEstimationResult + } + return nil +} + +func (m *AnalyzeDataSourceRiskDetails) GetDeltaPresenceEstimationResult() *AnalyzeDataSourceRiskDetails_DeltaPresenceEstimationResult { + if x, ok := m.GetResult().(*AnalyzeDataSourceRiskDetails_DeltaPresenceEstimationResult_); ok { + return x.DeltaPresenceEstimationResult + } + return nil +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*AnalyzeDataSourceRiskDetails) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _AnalyzeDataSourceRiskDetails_OneofMarshaler, _AnalyzeDataSourceRiskDetails_OneofUnmarshaler, _AnalyzeDataSourceRiskDetails_OneofSizer, []interface{}{ + (*AnalyzeDataSourceRiskDetails_NumericalStatsResult_)(nil), + (*AnalyzeDataSourceRiskDetails_CategoricalStatsResult_)(nil), + (*AnalyzeDataSourceRiskDetails_KAnonymityResult_)(nil), + (*AnalyzeDataSourceRiskDetails_LDiversityResult_)(nil), + (*AnalyzeDataSourceRiskDetails_KMapEstimationResult_)(nil), + (*AnalyzeDataSourceRiskDetails_DeltaPresenceEstimationResult_)(nil), + } +} + +func _AnalyzeDataSourceRiskDetails_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*AnalyzeDataSourceRiskDetails) + // result + switch x := m.Result.(type) { + case *AnalyzeDataSourceRiskDetails_NumericalStatsResult_: + b.EncodeVarint(3<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.NumericalStatsResult); err != nil { + return err + } + case *AnalyzeDataSourceRiskDetails_CategoricalStatsResult_: + b.EncodeVarint(4<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.CategoricalStatsResult); err != nil { + return err + } + case *AnalyzeDataSourceRiskDetails_KAnonymityResult_: + b.EncodeVarint(5<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.KAnonymityResult); err != nil { + return err + } + case *AnalyzeDataSourceRiskDetails_LDiversityResult_: + b.EncodeVarint(6<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.LDiversityResult); err != nil { + return err + } + case *AnalyzeDataSourceRiskDetails_KMapEstimationResult_: + b.EncodeVarint(7<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.KMapEstimationResult); err != nil { + return err + } + case *AnalyzeDataSourceRiskDetails_DeltaPresenceEstimationResult_: + b.EncodeVarint(9<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.DeltaPresenceEstimationResult); err != nil { + return err + } + case nil: + default: + return fmt.Errorf("AnalyzeDataSourceRiskDetails.Result has unexpected type %T", x) + } + return nil +} + +func _AnalyzeDataSourceRiskDetails_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*AnalyzeDataSourceRiskDetails) + switch tag { + case 3: // result.numerical_stats_result + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(AnalyzeDataSourceRiskDetails_NumericalStatsResult) + err := b.DecodeMessage(msg) + m.Result = &AnalyzeDataSourceRiskDetails_NumericalStatsResult_{msg} + return true, err + case 4: // result.categorical_stats_result + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(AnalyzeDataSourceRiskDetails_CategoricalStatsResult) + err := b.DecodeMessage(msg) + m.Result = &AnalyzeDataSourceRiskDetails_CategoricalStatsResult_{msg} + return true, err + case 5: // result.k_anonymity_result + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(AnalyzeDataSourceRiskDetails_KAnonymityResult) + err := b.DecodeMessage(msg) + m.Result = &AnalyzeDataSourceRiskDetails_KAnonymityResult_{msg} + return true, err + case 6: // result.l_diversity_result + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(AnalyzeDataSourceRiskDetails_LDiversityResult) + err := b.DecodeMessage(msg) + m.Result = &AnalyzeDataSourceRiskDetails_LDiversityResult_{msg} + return true, err + case 7: // result.k_map_estimation_result + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(AnalyzeDataSourceRiskDetails_KMapEstimationResult) + err := b.DecodeMessage(msg) + m.Result = &AnalyzeDataSourceRiskDetails_KMapEstimationResult_{msg} + return true, err + case 9: // result.delta_presence_estimation_result + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(AnalyzeDataSourceRiskDetails_DeltaPresenceEstimationResult) + err := b.DecodeMessage(msg) + m.Result = &AnalyzeDataSourceRiskDetails_DeltaPresenceEstimationResult_{msg} + return true, err + default: + return false, nil + } +} + +func _AnalyzeDataSourceRiskDetails_OneofSizer(msg proto.Message) (n int) { + m := msg.(*AnalyzeDataSourceRiskDetails) + // result + switch x := m.Result.(type) { + case *AnalyzeDataSourceRiskDetails_NumericalStatsResult_: + s := proto.Size(x.NumericalStatsResult) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *AnalyzeDataSourceRiskDetails_CategoricalStatsResult_: + s := proto.Size(x.CategoricalStatsResult) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *AnalyzeDataSourceRiskDetails_KAnonymityResult_: + s := proto.Size(x.KAnonymityResult) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *AnalyzeDataSourceRiskDetails_LDiversityResult_: + s := proto.Size(x.LDiversityResult) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *AnalyzeDataSourceRiskDetails_KMapEstimationResult_: + s := proto.Size(x.KMapEstimationResult) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *AnalyzeDataSourceRiskDetails_DeltaPresenceEstimationResult_: + s := proto.Size(x.DeltaPresenceEstimationResult) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +// Result of the numerical stats computation. +type AnalyzeDataSourceRiskDetails_NumericalStatsResult struct { + // Minimum value appearing in the column. + MinValue *Value `protobuf:"bytes,1,opt,name=min_value,json=minValue,proto3" json:"min_value,omitempty"` + // Maximum value appearing in the column. + MaxValue *Value `protobuf:"bytes,2,opt,name=max_value,json=maxValue,proto3" json:"max_value,omitempty"` + // List of 99 values that partition the set of field values into 100 equal + // sized buckets. + QuantileValues []*Value `protobuf:"bytes,4,rep,name=quantile_values,json=quantileValues,proto3" json:"quantile_values,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *AnalyzeDataSourceRiskDetails_NumericalStatsResult) Reset() { + *m = AnalyzeDataSourceRiskDetails_NumericalStatsResult{} +} +func (m *AnalyzeDataSourceRiskDetails_NumericalStatsResult) String() string { + return proto.CompactTextString(m) +} +func (*AnalyzeDataSourceRiskDetails_NumericalStatsResult) ProtoMessage() {} +func (*AnalyzeDataSourceRiskDetails_NumericalStatsResult) Descriptor() ([]byte, []int) { + return fileDescriptor_dlp_1aebf9c18c267d70, []int{37, 0} +} +func (m *AnalyzeDataSourceRiskDetails_NumericalStatsResult) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_AnalyzeDataSourceRiskDetails_NumericalStatsResult.Unmarshal(m, b) +} +func (m *AnalyzeDataSourceRiskDetails_NumericalStatsResult) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_AnalyzeDataSourceRiskDetails_NumericalStatsResult.Marshal(b, m, deterministic) +} +func (dst *AnalyzeDataSourceRiskDetails_NumericalStatsResult) XXX_Merge(src proto.Message) { + xxx_messageInfo_AnalyzeDataSourceRiskDetails_NumericalStatsResult.Merge(dst, src) +} +func (m *AnalyzeDataSourceRiskDetails_NumericalStatsResult) XXX_Size() int { + return xxx_messageInfo_AnalyzeDataSourceRiskDetails_NumericalStatsResult.Size(m) +} +func (m *AnalyzeDataSourceRiskDetails_NumericalStatsResult) XXX_DiscardUnknown() { + xxx_messageInfo_AnalyzeDataSourceRiskDetails_NumericalStatsResult.DiscardUnknown(m) +} + +var xxx_messageInfo_AnalyzeDataSourceRiskDetails_NumericalStatsResult proto.InternalMessageInfo + +func (m *AnalyzeDataSourceRiskDetails_NumericalStatsResult) GetMinValue() *Value { + if m != nil { + return m.MinValue + } + return nil +} + +func (m *AnalyzeDataSourceRiskDetails_NumericalStatsResult) GetMaxValue() *Value { + if m != nil { + return m.MaxValue + } + return nil +} + +func (m *AnalyzeDataSourceRiskDetails_NumericalStatsResult) GetQuantileValues() []*Value { + if m != nil { + return m.QuantileValues + } + return nil +} + +// Result of the categorical stats computation. +type AnalyzeDataSourceRiskDetails_CategoricalStatsResult struct { + // Histogram of value frequencies in the column. + ValueFrequencyHistogramBuckets []*AnalyzeDataSourceRiskDetails_CategoricalStatsResult_CategoricalStatsHistogramBucket `protobuf:"bytes,5,rep,name=value_frequency_histogram_buckets,json=valueFrequencyHistogramBuckets,proto3" json:"value_frequency_histogram_buckets,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *AnalyzeDataSourceRiskDetails_CategoricalStatsResult) Reset() { + *m = AnalyzeDataSourceRiskDetails_CategoricalStatsResult{} +} +func (m *AnalyzeDataSourceRiskDetails_CategoricalStatsResult) String() string { + return proto.CompactTextString(m) +} +func (*AnalyzeDataSourceRiskDetails_CategoricalStatsResult) ProtoMessage() {} +func (*AnalyzeDataSourceRiskDetails_CategoricalStatsResult) Descriptor() ([]byte, []int) { + return fileDescriptor_dlp_1aebf9c18c267d70, []int{37, 1} +} +func (m *AnalyzeDataSourceRiskDetails_CategoricalStatsResult) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_AnalyzeDataSourceRiskDetails_CategoricalStatsResult.Unmarshal(m, b) +} +func (m *AnalyzeDataSourceRiskDetails_CategoricalStatsResult) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_AnalyzeDataSourceRiskDetails_CategoricalStatsResult.Marshal(b, m, deterministic) +} +func (dst *AnalyzeDataSourceRiskDetails_CategoricalStatsResult) XXX_Merge(src proto.Message) { + xxx_messageInfo_AnalyzeDataSourceRiskDetails_CategoricalStatsResult.Merge(dst, src) +} +func (m *AnalyzeDataSourceRiskDetails_CategoricalStatsResult) XXX_Size() int { + return xxx_messageInfo_AnalyzeDataSourceRiskDetails_CategoricalStatsResult.Size(m) +} +func (m *AnalyzeDataSourceRiskDetails_CategoricalStatsResult) XXX_DiscardUnknown() { + xxx_messageInfo_AnalyzeDataSourceRiskDetails_CategoricalStatsResult.DiscardUnknown(m) +} + +var xxx_messageInfo_AnalyzeDataSourceRiskDetails_CategoricalStatsResult proto.InternalMessageInfo + +func (m *AnalyzeDataSourceRiskDetails_CategoricalStatsResult) GetValueFrequencyHistogramBuckets() []*AnalyzeDataSourceRiskDetails_CategoricalStatsResult_CategoricalStatsHistogramBucket { + if m != nil { + return m.ValueFrequencyHistogramBuckets + } + return nil +} + +type AnalyzeDataSourceRiskDetails_CategoricalStatsResult_CategoricalStatsHistogramBucket struct { + // Lower bound on the value frequency of the values in this bucket. + ValueFrequencyLowerBound int64 `protobuf:"varint,1,opt,name=value_frequency_lower_bound,json=valueFrequencyLowerBound,proto3" json:"value_frequency_lower_bound,omitempty"` + // Upper bound on the value frequency of the values in this bucket. + ValueFrequencyUpperBound int64 `protobuf:"varint,2,opt,name=value_frequency_upper_bound,json=valueFrequencyUpperBound,proto3" json:"value_frequency_upper_bound,omitempty"` + // Total number of values in this bucket. + BucketSize int64 `protobuf:"varint,3,opt,name=bucket_size,json=bucketSize,proto3" json:"bucket_size,omitempty"` + // Sample of value frequencies in this bucket. The total number of + // values returned per bucket is capped at 20. + BucketValues []*ValueFrequency `protobuf:"bytes,4,rep,name=bucket_values,json=bucketValues,proto3" json:"bucket_values,omitempty"` + // Total number of distinct values in this bucket. + BucketValueCount int64 `protobuf:"varint,5,opt,name=bucket_value_count,json=bucketValueCount,proto3" json:"bucket_value_count,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *AnalyzeDataSourceRiskDetails_CategoricalStatsResult_CategoricalStatsHistogramBucket) Reset() { + *m = AnalyzeDataSourceRiskDetails_CategoricalStatsResult_CategoricalStatsHistogramBucket{} +} +func (m *AnalyzeDataSourceRiskDetails_CategoricalStatsResult_CategoricalStatsHistogramBucket) String() string { + return proto.CompactTextString(m) +} +func (*AnalyzeDataSourceRiskDetails_CategoricalStatsResult_CategoricalStatsHistogramBucket) ProtoMessage() { +} +func (*AnalyzeDataSourceRiskDetails_CategoricalStatsResult_CategoricalStatsHistogramBucket) Descriptor() ([]byte, []int) { + return fileDescriptor_dlp_1aebf9c18c267d70, []int{37, 1, 0} +} +func (m *AnalyzeDataSourceRiskDetails_CategoricalStatsResult_CategoricalStatsHistogramBucket) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_AnalyzeDataSourceRiskDetails_CategoricalStatsResult_CategoricalStatsHistogramBucket.Unmarshal(m, b) +} +func (m *AnalyzeDataSourceRiskDetails_CategoricalStatsResult_CategoricalStatsHistogramBucket) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_AnalyzeDataSourceRiskDetails_CategoricalStatsResult_CategoricalStatsHistogramBucket.Marshal(b, m, deterministic) +} +func (dst *AnalyzeDataSourceRiskDetails_CategoricalStatsResult_CategoricalStatsHistogramBucket) XXX_Merge(src proto.Message) { + xxx_messageInfo_AnalyzeDataSourceRiskDetails_CategoricalStatsResult_CategoricalStatsHistogramBucket.Merge(dst, src) +} +func (m *AnalyzeDataSourceRiskDetails_CategoricalStatsResult_CategoricalStatsHistogramBucket) XXX_Size() int { + return xxx_messageInfo_AnalyzeDataSourceRiskDetails_CategoricalStatsResult_CategoricalStatsHistogramBucket.Size(m) +} +func (m *AnalyzeDataSourceRiskDetails_CategoricalStatsResult_CategoricalStatsHistogramBucket) XXX_DiscardUnknown() { + xxx_messageInfo_AnalyzeDataSourceRiskDetails_CategoricalStatsResult_CategoricalStatsHistogramBucket.DiscardUnknown(m) +} + +var xxx_messageInfo_AnalyzeDataSourceRiskDetails_CategoricalStatsResult_CategoricalStatsHistogramBucket proto.InternalMessageInfo + +func (m *AnalyzeDataSourceRiskDetails_CategoricalStatsResult_CategoricalStatsHistogramBucket) GetValueFrequencyLowerBound() int64 { + if m != nil { + return m.ValueFrequencyLowerBound + } + return 0 +} + +func (m *AnalyzeDataSourceRiskDetails_CategoricalStatsResult_CategoricalStatsHistogramBucket) GetValueFrequencyUpperBound() int64 { + if m != nil { + return m.ValueFrequencyUpperBound + } + return 0 +} + +func (m *AnalyzeDataSourceRiskDetails_CategoricalStatsResult_CategoricalStatsHistogramBucket) GetBucketSize() int64 { + if m != nil { + return m.BucketSize + } + return 0 +} + +func (m *AnalyzeDataSourceRiskDetails_CategoricalStatsResult_CategoricalStatsHistogramBucket) GetBucketValues() []*ValueFrequency { + if m != nil { + return m.BucketValues + } + return nil +} + +func (m *AnalyzeDataSourceRiskDetails_CategoricalStatsResult_CategoricalStatsHistogramBucket) GetBucketValueCount() int64 { + if m != nil { + return m.BucketValueCount + } + return 0 +} + +// Result of the k-anonymity computation. +type AnalyzeDataSourceRiskDetails_KAnonymityResult struct { + // Histogram of k-anonymity equivalence classes. + EquivalenceClassHistogramBuckets []*AnalyzeDataSourceRiskDetails_KAnonymityResult_KAnonymityHistogramBucket `protobuf:"bytes,5,rep,name=equivalence_class_histogram_buckets,json=equivalenceClassHistogramBuckets,proto3" json:"equivalence_class_histogram_buckets,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *AnalyzeDataSourceRiskDetails_KAnonymityResult) Reset() { + *m = AnalyzeDataSourceRiskDetails_KAnonymityResult{} +} +func (m *AnalyzeDataSourceRiskDetails_KAnonymityResult) String() string { + return proto.CompactTextString(m) +} +func (*AnalyzeDataSourceRiskDetails_KAnonymityResult) ProtoMessage() {} +func (*AnalyzeDataSourceRiskDetails_KAnonymityResult) Descriptor() ([]byte, []int) { + return fileDescriptor_dlp_1aebf9c18c267d70, []int{37, 2} +} +func (m *AnalyzeDataSourceRiskDetails_KAnonymityResult) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_AnalyzeDataSourceRiskDetails_KAnonymityResult.Unmarshal(m, b) +} +func (m *AnalyzeDataSourceRiskDetails_KAnonymityResult) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_AnalyzeDataSourceRiskDetails_KAnonymityResult.Marshal(b, m, deterministic) +} +func (dst *AnalyzeDataSourceRiskDetails_KAnonymityResult) XXX_Merge(src proto.Message) { + xxx_messageInfo_AnalyzeDataSourceRiskDetails_KAnonymityResult.Merge(dst, src) +} +func (m *AnalyzeDataSourceRiskDetails_KAnonymityResult) XXX_Size() int { + return xxx_messageInfo_AnalyzeDataSourceRiskDetails_KAnonymityResult.Size(m) +} +func (m *AnalyzeDataSourceRiskDetails_KAnonymityResult) XXX_DiscardUnknown() { + xxx_messageInfo_AnalyzeDataSourceRiskDetails_KAnonymityResult.DiscardUnknown(m) +} + +var xxx_messageInfo_AnalyzeDataSourceRiskDetails_KAnonymityResult proto.InternalMessageInfo + +func (m *AnalyzeDataSourceRiskDetails_KAnonymityResult) GetEquivalenceClassHistogramBuckets() []*AnalyzeDataSourceRiskDetails_KAnonymityResult_KAnonymityHistogramBucket { + if m != nil { + return m.EquivalenceClassHistogramBuckets + } + return nil +} + +// The set of columns' values that share the same ldiversity value +type AnalyzeDataSourceRiskDetails_KAnonymityResult_KAnonymityEquivalenceClass struct { + // Set of values defining the equivalence class. One value per + // quasi-identifier column in the original KAnonymity metric message. + // The order is always the same as the original request. + QuasiIdsValues []*Value `protobuf:"bytes,1,rep,name=quasi_ids_values,json=quasiIdsValues,proto3" json:"quasi_ids_values,omitempty"` + // Size of the equivalence class, for example number of rows with the + // above set of values. + EquivalenceClassSize int64 `protobuf:"varint,2,opt,name=equivalence_class_size,json=equivalenceClassSize,proto3" json:"equivalence_class_size,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *AnalyzeDataSourceRiskDetails_KAnonymityResult_KAnonymityEquivalenceClass) Reset() { + *m = AnalyzeDataSourceRiskDetails_KAnonymityResult_KAnonymityEquivalenceClass{} +} +func (m *AnalyzeDataSourceRiskDetails_KAnonymityResult_KAnonymityEquivalenceClass) String() string { + return proto.CompactTextString(m) +} +func (*AnalyzeDataSourceRiskDetails_KAnonymityResult_KAnonymityEquivalenceClass) ProtoMessage() {} +func (*AnalyzeDataSourceRiskDetails_KAnonymityResult_KAnonymityEquivalenceClass) Descriptor() ([]byte, []int) { + return fileDescriptor_dlp_1aebf9c18c267d70, []int{37, 2, 0} +} +func (m *AnalyzeDataSourceRiskDetails_KAnonymityResult_KAnonymityEquivalenceClass) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_AnalyzeDataSourceRiskDetails_KAnonymityResult_KAnonymityEquivalenceClass.Unmarshal(m, b) +} +func (m *AnalyzeDataSourceRiskDetails_KAnonymityResult_KAnonymityEquivalenceClass) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_AnalyzeDataSourceRiskDetails_KAnonymityResult_KAnonymityEquivalenceClass.Marshal(b, m, deterministic) +} +func (dst *AnalyzeDataSourceRiskDetails_KAnonymityResult_KAnonymityEquivalenceClass) XXX_Merge(src proto.Message) { + xxx_messageInfo_AnalyzeDataSourceRiskDetails_KAnonymityResult_KAnonymityEquivalenceClass.Merge(dst, src) +} +func (m *AnalyzeDataSourceRiskDetails_KAnonymityResult_KAnonymityEquivalenceClass) XXX_Size() int { + return xxx_messageInfo_AnalyzeDataSourceRiskDetails_KAnonymityResult_KAnonymityEquivalenceClass.Size(m) +} +func (m *AnalyzeDataSourceRiskDetails_KAnonymityResult_KAnonymityEquivalenceClass) XXX_DiscardUnknown() { + xxx_messageInfo_AnalyzeDataSourceRiskDetails_KAnonymityResult_KAnonymityEquivalenceClass.DiscardUnknown(m) +} + +var xxx_messageInfo_AnalyzeDataSourceRiskDetails_KAnonymityResult_KAnonymityEquivalenceClass proto.InternalMessageInfo + +func (m *AnalyzeDataSourceRiskDetails_KAnonymityResult_KAnonymityEquivalenceClass) GetQuasiIdsValues() []*Value { + if m != nil { + return m.QuasiIdsValues + } + return nil +} + +func (m *AnalyzeDataSourceRiskDetails_KAnonymityResult_KAnonymityEquivalenceClass) GetEquivalenceClassSize() int64 { + if m != nil { + return m.EquivalenceClassSize + } + return 0 +} + +type AnalyzeDataSourceRiskDetails_KAnonymityResult_KAnonymityHistogramBucket struct { + // Lower bound on the size of the equivalence classes in this bucket. + EquivalenceClassSizeLowerBound int64 `protobuf:"varint,1,opt,name=equivalence_class_size_lower_bound,json=equivalenceClassSizeLowerBound,proto3" json:"equivalence_class_size_lower_bound,omitempty"` + // Upper bound on the size of the equivalence classes in this bucket. + EquivalenceClassSizeUpperBound int64 `protobuf:"varint,2,opt,name=equivalence_class_size_upper_bound,json=equivalenceClassSizeUpperBound,proto3" json:"equivalence_class_size_upper_bound,omitempty"` + // Total number of equivalence classes in this bucket. + BucketSize int64 `protobuf:"varint,3,opt,name=bucket_size,json=bucketSize,proto3" json:"bucket_size,omitempty"` + // Sample of equivalence classes in this bucket. The total number of + // classes returned per bucket is capped at 20. + BucketValues []*AnalyzeDataSourceRiskDetails_KAnonymityResult_KAnonymityEquivalenceClass `protobuf:"bytes,4,rep,name=bucket_values,json=bucketValues,proto3" json:"bucket_values,omitempty"` + // Total number of distinct equivalence classes in this bucket. + BucketValueCount int64 `protobuf:"varint,5,opt,name=bucket_value_count,json=bucketValueCount,proto3" json:"bucket_value_count,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *AnalyzeDataSourceRiskDetails_KAnonymityResult_KAnonymityHistogramBucket) Reset() { + *m = AnalyzeDataSourceRiskDetails_KAnonymityResult_KAnonymityHistogramBucket{} +} +func (m *AnalyzeDataSourceRiskDetails_KAnonymityResult_KAnonymityHistogramBucket) String() string { + return proto.CompactTextString(m) +} +func (*AnalyzeDataSourceRiskDetails_KAnonymityResult_KAnonymityHistogramBucket) ProtoMessage() {} +func (*AnalyzeDataSourceRiskDetails_KAnonymityResult_KAnonymityHistogramBucket) Descriptor() ([]byte, []int) { + return fileDescriptor_dlp_1aebf9c18c267d70, []int{37, 2, 1} +} +func (m *AnalyzeDataSourceRiskDetails_KAnonymityResult_KAnonymityHistogramBucket) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_AnalyzeDataSourceRiskDetails_KAnonymityResult_KAnonymityHistogramBucket.Unmarshal(m, b) +} +func (m *AnalyzeDataSourceRiskDetails_KAnonymityResult_KAnonymityHistogramBucket) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_AnalyzeDataSourceRiskDetails_KAnonymityResult_KAnonymityHistogramBucket.Marshal(b, m, deterministic) +} +func (dst *AnalyzeDataSourceRiskDetails_KAnonymityResult_KAnonymityHistogramBucket) XXX_Merge(src proto.Message) { + xxx_messageInfo_AnalyzeDataSourceRiskDetails_KAnonymityResult_KAnonymityHistogramBucket.Merge(dst, src) +} +func (m *AnalyzeDataSourceRiskDetails_KAnonymityResult_KAnonymityHistogramBucket) XXX_Size() int { + return xxx_messageInfo_AnalyzeDataSourceRiskDetails_KAnonymityResult_KAnonymityHistogramBucket.Size(m) +} +func (m *AnalyzeDataSourceRiskDetails_KAnonymityResult_KAnonymityHistogramBucket) XXX_DiscardUnknown() { + xxx_messageInfo_AnalyzeDataSourceRiskDetails_KAnonymityResult_KAnonymityHistogramBucket.DiscardUnknown(m) +} + +var xxx_messageInfo_AnalyzeDataSourceRiskDetails_KAnonymityResult_KAnonymityHistogramBucket proto.InternalMessageInfo + +func (m *AnalyzeDataSourceRiskDetails_KAnonymityResult_KAnonymityHistogramBucket) GetEquivalenceClassSizeLowerBound() int64 { + if m != nil { + return m.EquivalenceClassSizeLowerBound + } + return 0 +} + +func (m *AnalyzeDataSourceRiskDetails_KAnonymityResult_KAnonymityHistogramBucket) GetEquivalenceClassSizeUpperBound() int64 { + if m != nil { + return m.EquivalenceClassSizeUpperBound + } + return 0 +} + +func (m *AnalyzeDataSourceRiskDetails_KAnonymityResult_KAnonymityHistogramBucket) GetBucketSize() int64 { + if m != nil { + return m.BucketSize + } + return 0 +} + +func (m *AnalyzeDataSourceRiskDetails_KAnonymityResult_KAnonymityHistogramBucket) GetBucketValues() []*AnalyzeDataSourceRiskDetails_KAnonymityResult_KAnonymityEquivalenceClass { + if m != nil { + return m.BucketValues + } + return nil +} + +func (m *AnalyzeDataSourceRiskDetails_KAnonymityResult_KAnonymityHistogramBucket) GetBucketValueCount() int64 { + if m != nil { + return m.BucketValueCount + } + return 0 +} + +// Result of the l-diversity computation. +type AnalyzeDataSourceRiskDetails_LDiversityResult struct { + // Histogram of l-diversity equivalence class sensitive value frequencies. + SensitiveValueFrequencyHistogramBuckets []*AnalyzeDataSourceRiskDetails_LDiversityResult_LDiversityHistogramBucket `protobuf:"bytes,5,rep,name=sensitive_value_frequency_histogram_buckets,json=sensitiveValueFrequencyHistogramBuckets,proto3" json:"sensitive_value_frequency_histogram_buckets,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *AnalyzeDataSourceRiskDetails_LDiversityResult) Reset() { + *m = AnalyzeDataSourceRiskDetails_LDiversityResult{} +} +func (m *AnalyzeDataSourceRiskDetails_LDiversityResult) String() string { + return proto.CompactTextString(m) +} +func (*AnalyzeDataSourceRiskDetails_LDiversityResult) ProtoMessage() {} +func (*AnalyzeDataSourceRiskDetails_LDiversityResult) Descriptor() ([]byte, []int) { + return fileDescriptor_dlp_1aebf9c18c267d70, []int{37, 3} +} +func (m *AnalyzeDataSourceRiskDetails_LDiversityResult) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_AnalyzeDataSourceRiskDetails_LDiversityResult.Unmarshal(m, b) +} +func (m *AnalyzeDataSourceRiskDetails_LDiversityResult) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_AnalyzeDataSourceRiskDetails_LDiversityResult.Marshal(b, m, deterministic) +} +func (dst *AnalyzeDataSourceRiskDetails_LDiversityResult) XXX_Merge(src proto.Message) { + xxx_messageInfo_AnalyzeDataSourceRiskDetails_LDiversityResult.Merge(dst, src) +} +func (m *AnalyzeDataSourceRiskDetails_LDiversityResult) XXX_Size() int { + return xxx_messageInfo_AnalyzeDataSourceRiskDetails_LDiversityResult.Size(m) +} +func (m *AnalyzeDataSourceRiskDetails_LDiversityResult) XXX_DiscardUnknown() { + xxx_messageInfo_AnalyzeDataSourceRiskDetails_LDiversityResult.DiscardUnknown(m) +} + +var xxx_messageInfo_AnalyzeDataSourceRiskDetails_LDiversityResult proto.InternalMessageInfo + +func (m *AnalyzeDataSourceRiskDetails_LDiversityResult) GetSensitiveValueFrequencyHistogramBuckets() []*AnalyzeDataSourceRiskDetails_LDiversityResult_LDiversityHistogramBucket { + if m != nil { + return m.SensitiveValueFrequencyHistogramBuckets + } + return nil +} + +// The set of columns' values that share the same ldiversity value. +type AnalyzeDataSourceRiskDetails_LDiversityResult_LDiversityEquivalenceClass struct { + // Quasi-identifier values defining the k-anonymity equivalence + // class. The order is always the same as the original request. + QuasiIdsValues []*Value `protobuf:"bytes,1,rep,name=quasi_ids_values,json=quasiIdsValues,proto3" json:"quasi_ids_values,omitempty"` + // Size of the k-anonymity equivalence class. + EquivalenceClassSize int64 `protobuf:"varint,2,opt,name=equivalence_class_size,json=equivalenceClassSize,proto3" json:"equivalence_class_size,omitempty"` + // Number of distinct sensitive values in this equivalence class. + NumDistinctSensitiveValues int64 `protobuf:"varint,3,opt,name=num_distinct_sensitive_values,json=numDistinctSensitiveValues,proto3" json:"num_distinct_sensitive_values,omitempty"` + // Estimated frequencies of top sensitive values. + TopSensitiveValues []*ValueFrequency `protobuf:"bytes,4,rep,name=top_sensitive_values,json=topSensitiveValues,proto3" json:"top_sensitive_values,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *AnalyzeDataSourceRiskDetails_LDiversityResult_LDiversityEquivalenceClass) Reset() { + *m = AnalyzeDataSourceRiskDetails_LDiversityResult_LDiversityEquivalenceClass{} +} +func (m *AnalyzeDataSourceRiskDetails_LDiversityResult_LDiversityEquivalenceClass) String() string { + return proto.CompactTextString(m) +} +func (*AnalyzeDataSourceRiskDetails_LDiversityResult_LDiversityEquivalenceClass) ProtoMessage() {} +func (*AnalyzeDataSourceRiskDetails_LDiversityResult_LDiversityEquivalenceClass) Descriptor() ([]byte, []int) { + return fileDescriptor_dlp_1aebf9c18c267d70, []int{37, 3, 0} +} +func (m *AnalyzeDataSourceRiskDetails_LDiversityResult_LDiversityEquivalenceClass) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_AnalyzeDataSourceRiskDetails_LDiversityResult_LDiversityEquivalenceClass.Unmarshal(m, b) +} +func (m *AnalyzeDataSourceRiskDetails_LDiversityResult_LDiversityEquivalenceClass) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_AnalyzeDataSourceRiskDetails_LDiversityResult_LDiversityEquivalenceClass.Marshal(b, m, deterministic) +} +func (dst *AnalyzeDataSourceRiskDetails_LDiversityResult_LDiversityEquivalenceClass) XXX_Merge(src proto.Message) { + xxx_messageInfo_AnalyzeDataSourceRiskDetails_LDiversityResult_LDiversityEquivalenceClass.Merge(dst, src) +} +func (m *AnalyzeDataSourceRiskDetails_LDiversityResult_LDiversityEquivalenceClass) XXX_Size() int { + return xxx_messageInfo_AnalyzeDataSourceRiskDetails_LDiversityResult_LDiversityEquivalenceClass.Size(m) +} +func (m *AnalyzeDataSourceRiskDetails_LDiversityResult_LDiversityEquivalenceClass) XXX_DiscardUnknown() { + xxx_messageInfo_AnalyzeDataSourceRiskDetails_LDiversityResult_LDiversityEquivalenceClass.DiscardUnknown(m) +} + +var xxx_messageInfo_AnalyzeDataSourceRiskDetails_LDiversityResult_LDiversityEquivalenceClass proto.InternalMessageInfo + +func (m *AnalyzeDataSourceRiskDetails_LDiversityResult_LDiversityEquivalenceClass) GetQuasiIdsValues() []*Value { + if m != nil { + return m.QuasiIdsValues + } + return nil +} + +func (m *AnalyzeDataSourceRiskDetails_LDiversityResult_LDiversityEquivalenceClass) GetEquivalenceClassSize() int64 { + if m != nil { + return m.EquivalenceClassSize + } + return 0 +} + +func (m *AnalyzeDataSourceRiskDetails_LDiversityResult_LDiversityEquivalenceClass) GetNumDistinctSensitiveValues() int64 { + if m != nil { + return m.NumDistinctSensitiveValues + } + return 0 +} + +func (m *AnalyzeDataSourceRiskDetails_LDiversityResult_LDiversityEquivalenceClass) GetTopSensitiveValues() []*ValueFrequency { + if m != nil { + return m.TopSensitiveValues + } + return nil +} + +type AnalyzeDataSourceRiskDetails_LDiversityResult_LDiversityHistogramBucket struct { + // Lower bound on the sensitive value frequencies of the equivalence + // classes in this bucket. + SensitiveValueFrequencyLowerBound int64 `protobuf:"varint,1,opt,name=sensitive_value_frequency_lower_bound,json=sensitiveValueFrequencyLowerBound,proto3" json:"sensitive_value_frequency_lower_bound,omitempty"` + // Upper bound on the sensitive value frequencies of the equivalence + // classes in this bucket. + SensitiveValueFrequencyUpperBound int64 `protobuf:"varint,2,opt,name=sensitive_value_frequency_upper_bound,json=sensitiveValueFrequencyUpperBound,proto3" json:"sensitive_value_frequency_upper_bound,omitempty"` + // Total number of equivalence classes in this bucket. + BucketSize int64 `protobuf:"varint,3,opt,name=bucket_size,json=bucketSize,proto3" json:"bucket_size,omitempty"` + // Sample of equivalence classes in this bucket. The total number of + // classes returned per bucket is capped at 20. + BucketValues []*AnalyzeDataSourceRiskDetails_LDiversityResult_LDiversityEquivalenceClass `protobuf:"bytes,4,rep,name=bucket_values,json=bucketValues,proto3" json:"bucket_values,omitempty"` + // Total number of distinct equivalence classes in this bucket. + BucketValueCount int64 `protobuf:"varint,5,opt,name=bucket_value_count,json=bucketValueCount,proto3" json:"bucket_value_count,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *AnalyzeDataSourceRiskDetails_LDiversityResult_LDiversityHistogramBucket) Reset() { + *m = AnalyzeDataSourceRiskDetails_LDiversityResult_LDiversityHistogramBucket{} +} +func (m *AnalyzeDataSourceRiskDetails_LDiversityResult_LDiversityHistogramBucket) String() string { + return proto.CompactTextString(m) +} +func (*AnalyzeDataSourceRiskDetails_LDiversityResult_LDiversityHistogramBucket) ProtoMessage() {} +func (*AnalyzeDataSourceRiskDetails_LDiversityResult_LDiversityHistogramBucket) Descriptor() ([]byte, []int) { + return fileDescriptor_dlp_1aebf9c18c267d70, []int{37, 3, 1} +} +func (m *AnalyzeDataSourceRiskDetails_LDiversityResult_LDiversityHistogramBucket) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_AnalyzeDataSourceRiskDetails_LDiversityResult_LDiversityHistogramBucket.Unmarshal(m, b) +} +func (m *AnalyzeDataSourceRiskDetails_LDiversityResult_LDiversityHistogramBucket) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_AnalyzeDataSourceRiskDetails_LDiversityResult_LDiversityHistogramBucket.Marshal(b, m, deterministic) +} +func (dst *AnalyzeDataSourceRiskDetails_LDiversityResult_LDiversityHistogramBucket) XXX_Merge(src proto.Message) { + xxx_messageInfo_AnalyzeDataSourceRiskDetails_LDiversityResult_LDiversityHistogramBucket.Merge(dst, src) +} +func (m *AnalyzeDataSourceRiskDetails_LDiversityResult_LDiversityHistogramBucket) XXX_Size() int { + return xxx_messageInfo_AnalyzeDataSourceRiskDetails_LDiversityResult_LDiversityHistogramBucket.Size(m) +} +func (m *AnalyzeDataSourceRiskDetails_LDiversityResult_LDiversityHistogramBucket) XXX_DiscardUnknown() { + xxx_messageInfo_AnalyzeDataSourceRiskDetails_LDiversityResult_LDiversityHistogramBucket.DiscardUnknown(m) +} + +var xxx_messageInfo_AnalyzeDataSourceRiskDetails_LDiversityResult_LDiversityHistogramBucket proto.InternalMessageInfo + +func (m *AnalyzeDataSourceRiskDetails_LDiversityResult_LDiversityHistogramBucket) GetSensitiveValueFrequencyLowerBound() int64 { + if m != nil { + return m.SensitiveValueFrequencyLowerBound + } + return 0 +} + +func (m *AnalyzeDataSourceRiskDetails_LDiversityResult_LDiversityHistogramBucket) GetSensitiveValueFrequencyUpperBound() int64 { + if m != nil { + return m.SensitiveValueFrequencyUpperBound + } + return 0 +} + +func (m *AnalyzeDataSourceRiskDetails_LDiversityResult_LDiversityHistogramBucket) GetBucketSize() int64 { + if m != nil { + return m.BucketSize + } + return 0 +} + +func (m *AnalyzeDataSourceRiskDetails_LDiversityResult_LDiversityHistogramBucket) GetBucketValues() []*AnalyzeDataSourceRiskDetails_LDiversityResult_LDiversityEquivalenceClass { + if m != nil { + return m.BucketValues + } + return nil +} + +func (m *AnalyzeDataSourceRiskDetails_LDiversityResult_LDiversityHistogramBucket) GetBucketValueCount() int64 { + if m != nil { + return m.BucketValueCount + } + return 0 +} + +// Result of the reidentifiability analysis. Note that these results are an +// estimation, not exact values. +type AnalyzeDataSourceRiskDetails_KMapEstimationResult struct { + // The intervals [min_anonymity, max_anonymity] do not overlap. If a value + // doesn't correspond to any such interval, the associated frequency is + // zero. For example, the following records: + // {min_anonymity: 1, max_anonymity: 1, frequency: 17} + // {min_anonymity: 2, max_anonymity: 3, frequency: 42} + // {min_anonymity: 5, max_anonymity: 10, frequency: 99} + // mean that there are no record with an estimated anonymity of 4, 5, or + // larger than 10. + KMapEstimationHistogram []*AnalyzeDataSourceRiskDetails_KMapEstimationResult_KMapEstimationHistogramBucket `protobuf:"bytes,1,rep,name=k_map_estimation_histogram,json=kMapEstimationHistogram,proto3" json:"k_map_estimation_histogram,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *AnalyzeDataSourceRiskDetails_KMapEstimationResult) Reset() { + *m = AnalyzeDataSourceRiskDetails_KMapEstimationResult{} +} +func (m *AnalyzeDataSourceRiskDetails_KMapEstimationResult) String() string { + return proto.CompactTextString(m) +} +func (*AnalyzeDataSourceRiskDetails_KMapEstimationResult) ProtoMessage() {} +func (*AnalyzeDataSourceRiskDetails_KMapEstimationResult) Descriptor() ([]byte, []int) { + return fileDescriptor_dlp_1aebf9c18c267d70, []int{37, 4} +} +func (m *AnalyzeDataSourceRiskDetails_KMapEstimationResult) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_AnalyzeDataSourceRiskDetails_KMapEstimationResult.Unmarshal(m, b) +} +func (m *AnalyzeDataSourceRiskDetails_KMapEstimationResult) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_AnalyzeDataSourceRiskDetails_KMapEstimationResult.Marshal(b, m, deterministic) +} +func (dst *AnalyzeDataSourceRiskDetails_KMapEstimationResult) XXX_Merge(src proto.Message) { + xxx_messageInfo_AnalyzeDataSourceRiskDetails_KMapEstimationResult.Merge(dst, src) +} +func (m *AnalyzeDataSourceRiskDetails_KMapEstimationResult) XXX_Size() int { + return xxx_messageInfo_AnalyzeDataSourceRiskDetails_KMapEstimationResult.Size(m) +} +func (m *AnalyzeDataSourceRiskDetails_KMapEstimationResult) XXX_DiscardUnknown() { + xxx_messageInfo_AnalyzeDataSourceRiskDetails_KMapEstimationResult.DiscardUnknown(m) +} + +var xxx_messageInfo_AnalyzeDataSourceRiskDetails_KMapEstimationResult proto.InternalMessageInfo + +func (m *AnalyzeDataSourceRiskDetails_KMapEstimationResult) GetKMapEstimationHistogram() []*AnalyzeDataSourceRiskDetails_KMapEstimationResult_KMapEstimationHistogramBucket { + if m != nil { + return m.KMapEstimationHistogram + } + return nil +} + +// A tuple of values for the quasi-identifier columns. +type AnalyzeDataSourceRiskDetails_KMapEstimationResult_KMapEstimationQuasiIdValues struct { + // The quasi-identifier values. + QuasiIdsValues []*Value `protobuf:"bytes,1,rep,name=quasi_ids_values,json=quasiIdsValues,proto3" json:"quasi_ids_values,omitempty"` + // The estimated anonymity for these quasi-identifier values. + EstimatedAnonymity int64 `protobuf:"varint,2,opt,name=estimated_anonymity,json=estimatedAnonymity,proto3" json:"estimated_anonymity,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *AnalyzeDataSourceRiskDetails_KMapEstimationResult_KMapEstimationQuasiIdValues) Reset() { + *m = AnalyzeDataSourceRiskDetails_KMapEstimationResult_KMapEstimationQuasiIdValues{} +} +func (m *AnalyzeDataSourceRiskDetails_KMapEstimationResult_KMapEstimationQuasiIdValues) String() string { + return proto.CompactTextString(m) +} +func (*AnalyzeDataSourceRiskDetails_KMapEstimationResult_KMapEstimationQuasiIdValues) ProtoMessage() {} +func (*AnalyzeDataSourceRiskDetails_KMapEstimationResult_KMapEstimationQuasiIdValues) Descriptor() ([]byte, []int) { + return fileDescriptor_dlp_1aebf9c18c267d70, []int{37, 4, 0} +} +func (m *AnalyzeDataSourceRiskDetails_KMapEstimationResult_KMapEstimationQuasiIdValues) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_AnalyzeDataSourceRiskDetails_KMapEstimationResult_KMapEstimationQuasiIdValues.Unmarshal(m, b) +} +func (m *AnalyzeDataSourceRiskDetails_KMapEstimationResult_KMapEstimationQuasiIdValues) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_AnalyzeDataSourceRiskDetails_KMapEstimationResult_KMapEstimationQuasiIdValues.Marshal(b, m, deterministic) +} +func (dst *AnalyzeDataSourceRiskDetails_KMapEstimationResult_KMapEstimationQuasiIdValues) XXX_Merge(src proto.Message) { + xxx_messageInfo_AnalyzeDataSourceRiskDetails_KMapEstimationResult_KMapEstimationQuasiIdValues.Merge(dst, src) +} +func (m *AnalyzeDataSourceRiskDetails_KMapEstimationResult_KMapEstimationQuasiIdValues) XXX_Size() int { + return xxx_messageInfo_AnalyzeDataSourceRiskDetails_KMapEstimationResult_KMapEstimationQuasiIdValues.Size(m) +} +func (m *AnalyzeDataSourceRiskDetails_KMapEstimationResult_KMapEstimationQuasiIdValues) XXX_DiscardUnknown() { + xxx_messageInfo_AnalyzeDataSourceRiskDetails_KMapEstimationResult_KMapEstimationQuasiIdValues.DiscardUnknown(m) +} + +var xxx_messageInfo_AnalyzeDataSourceRiskDetails_KMapEstimationResult_KMapEstimationQuasiIdValues proto.InternalMessageInfo + +func (m *AnalyzeDataSourceRiskDetails_KMapEstimationResult_KMapEstimationQuasiIdValues) GetQuasiIdsValues() []*Value { + if m != nil { + return m.QuasiIdsValues + } + return nil +} + +func (m *AnalyzeDataSourceRiskDetails_KMapEstimationResult_KMapEstimationQuasiIdValues) GetEstimatedAnonymity() int64 { + if m != nil { + return m.EstimatedAnonymity + } + return 0 +} + +// A KMapEstimationHistogramBucket message with the following values: +// min_anonymity: 3 +// max_anonymity: 5 +// frequency: 42 +// means that there are 42 records whose quasi-identifier values correspond +// to 3, 4 or 5 people in the overlying population. An important particular +// case is when min_anonymity = max_anonymity = 1: the frequency field then +// corresponds to the number of uniquely identifiable records. +type AnalyzeDataSourceRiskDetails_KMapEstimationResult_KMapEstimationHistogramBucket struct { + // Always positive. + MinAnonymity int64 `protobuf:"varint,1,opt,name=min_anonymity,json=minAnonymity,proto3" json:"min_anonymity,omitempty"` + // Always greater than or equal to min_anonymity. + MaxAnonymity int64 `protobuf:"varint,2,opt,name=max_anonymity,json=maxAnonymity,proto3" json:"max_anonymity,omitempty"` + // Number of records within these anonymity bounds. + BucketSize int64 `protobuf:"varint,5,opt,name=bucket_size,json=bucketSize,proto3" json:"bucket_size,omitempty"` + // Sample of quasi-identifier tuple values in this bucket. The total + // number of classes returned per bucket is capped at 20. + BucketValues []*AnalyzeDataSourceRiskDetails_KMapEstimationResult_KMapEstimationQuasiIdValues `protobuf:"bytes,6,rep,name=bucket_values,json=bucketValues,proto3" json:"bucket_values,omitempty"` + // Total number of distinct quasi-identifier tuple values in this bucket. + BucketValueCount int64 `protobuf:"varint,7,opt,name=bucket_value_count,json=bucketValueCount,proto3" json:"bucket_value_count,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *AnalyzeDataSourceRiskDetails_KMapEstimationResult_KMapEstimationHistogramBucket) Reset() { + *m = AnalyzeDataSourceRiskDetails_KMapEstimationResult_KMapEstimationHistogramBucket{} +} +func (m *AnalyzeDataSourceRiskDetails_KMapEstimationResult_KMapEstimationHistogramBucket) String() string { + return proto.CompactTextString(m) +} +func (*AnalyzeDataSourceRiskDetails_KMapEstimationResult_KMapEstimationHistogramBucket) ProtoMessage() { +} +func (*AnalyzeDataSourceRiskDetails_KMapEstimationResult_KMapEstimationHistogramBucket) Descriptor() ([]byte, []int) { + return fileDescriptor_dlp_1aebf9c18c267d70, []int{37, 4, 1} +} +func (m *AnalyzeDataSourceRiskDetails_KMapEstimationResult_KMapEstimationHistogramBucket) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_AnalyzeDataSourceRiskDetails_KMapEstimationResult_KMapEstimationHistogramBucket.Unmarshal(m, b) +} +func (m *AnalyzeDataSourceRiskDetails_KMapEstimationResult_KMapEstimationHistogramBucket) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_AnalyzeDataSourceRiskDetails_KMapEstimationResult_KMapEstimationHistogramBucket.Marshal(b, m, deterministic) +} +func (dst *AnalyzeDataSourceRiskDetails_KMapEstimationResult_KMapEstimationHistogramBucket) XXX_Merge(src proto.Message) { + xxx_messageInfo_AnalyzeDataSourceRiskDetails_KMapEstimationResult_KMapEstimationHistogramBucket.Merge(dst, src) +} +func (m *AnalyzeDataSourceRiskDetails_KMapEstimationResult_KMapEstimationHistogramBucket) XXX_Size() int { + return xxx_messageInfo_AnalyzeDataSourceRiskDetails_KMapEstimationResult_KMapEstimationHistogramBucket.Size(m) +} +func (m *AnalyzeDataSourceRiskDetails_KMapEstimationResult_KMapEstimationHistogramBucket) XXX_DiscardUnknown() { + xxx_messageInfo_AnalyzeDataSourceRiskDetails_KMapEstimationResult_KMapEstimationHistogramBucket.DiscardUnknown(m) +} + +var xxx_messageInfo_AnalyzeDataSourceRiskDetails_KMapEstimationResult_KMapEstimationHistogramBucket proto.InternalMessageInfo + +func (m *AnalyzeDataSourceRiskDetails_KMapEstimationResult_KMapEstimationHistogramBucket) GetMinAnonymity() int64 { + if m != nil { + return m.MinAnonymity + } + return 0 +} + +func (m *AnalyzeDataSourceRiskDetails_KMapEstimationResult_KMapEstimationHistogramBucket) GetMaxAnonymity() int64 { + if m != nil { + return m.MaxAnonymity + } + return 0 +} + +func (m *AnalyzeDataSourceRiskDetails_KMapEstimationResult_KMapEstimationHistogramBucket) GetBucketSize() int64 { + if m != nil { + return m.BucketSize + } + return 0 +} + +func (m *AnalyzeDataSourceRiskDetails_KMapEstimationResult_KMapEstimationHistogramBucket) GetBucketValues() []*AnalyzeDataSourceRiskDetails_KMapEstimationResult_KMapEstimationQuasiIdValues { + if m != nil { + return m.BucketValues + } + return nil +} + +func (m *AnalyzeDataSourceRiskDetails_KMapEstimationResult_KMapEstimationHistogramBucket) GetBucketValueCount() int64 { + if m != nil { + return m.BucketValueCount + } + return 0 +} + +// Result of the δ-presence computation. Note that these results are an +// estimation, not exact values. +type AnalyzeDataSourceRiskDetails_DeltaPresenceEstimationResult struct { + // The intervals [min_probability, max_probability) do not overlap. If a + // value doesn't correspond to any such interval, the associated frequency + // is zero. For example, the following records: + // {min_probability: 0, max_probability: 0.1, frequency: 17} + // {min_probability: 0.2, max_probability: 0.3, frequency: 42} + // {min_probability: 0.3, max_probability: 0.4, frequency: 99} + // mean that there are no record with an estimated probability in [0.1, 0.2) + // nor larger or equal to 0.4. + DeltaPresenceEstimationHistogram []*AnalyzeDataSourceRiskDetails_DeltaPresenceEstimationResult_DeltaPresenceEstimationHistogramBucket `protobuf:"bytes,1,rep,name=delta_presence_estimation_histogram,json=deltaPresenceEstimationHistogram,proto3" json:"delta_presence_estimation_histogram,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *AnalyzeDataSourceRiskDetails_DeltaPresenceEstimationResult) Reset() { + *m = AnalyzeDataSourceRiskDetails_DeltaPresenceEstimationResult{} +} +func (m *AnalyzeDataSourceRiskDetails_DeltaPresenceEstimationResult) String() string { + return proto.CompactTextString(m) +} +func (*AnalyzeDataSourceRiskDetails_DeltaPresenceEstimationResult) ProtoMessage() {} +func (*AnalyzeDataSourceRiskDetails_DeltaPresenceEstimationResult) Descriptor() ([]byte, []int) { + return fileDescriptor_dlp_1aebf9c18c267d70, []int{37, 5} +} +func (m *AnalyzeDataSourceRiskDetails_DeltaPresenceEstimationResult) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_AnalyzeDataSourceRiskDetails_DeltaPresenceEstimationResult.Unmarshal(m, b) +} +func (m *AnalyzeDataSourceRiskDetails_DeltaPresenceEstimationResult) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_AnalyzeDataSourceRiskDetails_DeltaPresenceEstimationResult.Marshal(b, m, deterministic) +} +func (dst *AnalyzeDataSourceRiskDetails_DeltaPresenceEstimationResult) XXX_Merge(src proto.Message) { + xxx_messageInfo_AnalyzeDataSourceRiskDetails_DeltaPresenceEstimationResult.Merge(dst, src) +} +func (m *AnalyzeDataSourceRiskDetails_DeltaPresenceEstimationResult) XXX_Size() int { + return xxx_messageInfo_AnalyzeDataSourceRiskDetails_DeltaPresenceEstimationResult.Size(m) +} +func (m *AnalyzeDataSourceRiskDetails_DeltaPresenceEstimationResult) XXX_DiscardUnknown() { + xxx_messageInfo_AnalyzeDataSourceRiskDetails_DeltaPresenceEstimationResult.DiscardUnknown(m) +} + +var xxx_messageInfo_AnalyzeDataSourceRiskDetails_DeltaPresenceEstimationResult proto.InternalMessageInfo + +func (m *AnalyzeDataSourceRiskDetails_DeltaPresenceEstimationResult) GetDeltaPresenceEstimationHistogram() []*AnalyzeDataSourceRiskDetails_DeltaPresenceEstimationResult_DeltaPresenceEstimationHistogramBucket { + if m != nil { + return m.DeltaPresenceEstimationHistogram + } + return nil +} + +// A tuple of values for the quasi-identifier columns. +type AnalyzeDataSourceRiskDetails_DeltaPresenceEstimationResult_DeltaPresenceEstimationQuasiIdValues struct { + // The quasi-identifier values. + QuasiIdsValues []*Value `protobuf:"bytes,1,rep,name=quasi_ids_values,json=quasiIdsValues,proto3" json:"quasi_ids_values,omitempty"` + // The estimated probability that a given individual sharing these + // quasi-identifier values is in the dataset. This value, typically called + // δ, is the ratio between the number of records in the dataset with these + // quasi-identifier values, and the total number of individuals (inside + // *and* outside the dataset) with these quasi-identifier values. + // For example, if there are 15 individuals in the dataset who share the + // same quasi-identifier values, and an estimated 100 people in the entire + // population with these values, then δ is 0.15. + EstimatedProbability float64 `protobuf:"fixed64,2,opt,name=estimated_probability,json=estimatedProbability,proto3" json:"estimated_probability,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *AnalyzeDataSourceRiskDetails_DeltaPresenceEstimationResult_DeltaPresenceEstimationQuasiIdValues) Reset() { + *m = AnalyzeDataSourceRiskDetails_DeltaPresenceEstimationResult_DeltaPresenceEstimationQuasiIdValues{} +} +func (m *AnalyzeDataSourceRiskDetails_DeltaPresenceEstimationResult_DeltaPresenceEstimationQuasiIdValues) String() string { + return proto.CompactTextString(m) +} +func (*AnalyzeDataSourceRiskDetails_DeltaPresenceEstimationResult_DeltaPresenceEstimationQuasiIdValues) ProtoMessage() { +} +func (*AnalyzeDataSourceRiskDetails_DeltaPresenceEstimationResult_DeltaPresenceEstimationQuasiIdValues) Descriptor() ([]byte, []int) { + return fileDescriptor_dlp_1aebf9c18c267d70, []int{37, 5, 0} +} +func (m *AnalyzeDataSourceRiskDetails_DeltaPresenceEstimationResult_DeltaPresenceEstimationQuasiIdValues) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_AnalyzeDataSourceRiskDetails_DeltaPresenceEstimationResult_DeltaPresenceEstimationQuasiIdValues.Unmarshal(m, b) +} +func (m *AnalyzeDataSourceRiskDetails_DeltaPresenceEstimationResult_DeltaPresenceEstimationQuasiIdValues) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_AnalyzeDataSourceRiskDetails_DeltaPresenceEstimationResult_DeltaPresenceEstimationQuasiIdValues.Marshal(b, m, deterministic) +} +func (dst *AnalyzeDataSourceRiskDetails_DeltaPresenceEstimationResult_DeltaPresenceEstimationQuasiIdValues) XXX_Merge(src proto.Message) { + xxx_messageInfo_AnalyzeDataSourceRiskDetails_DeltaPresenceEstimationResult_DeltaPresenceEstimationQuasiIdValues.Merge(dst, src) +} +func (m *AnalyzeDataSourceRiskDetails_DeltaPresenceEstimationResult_DeltaPresenceEstimationQuasiIdValues) XXX_Size() int { + return xxx_messageInfo_AnalyzeDataSourceRiskDetails_DeltaPresenceEstimationResult_DeltaPresenceEstimationQuasiIdValues.Size(m) +} +func (m *AnalyzeDataSourceRiskDetails_DeltaPresenceEstimationResult_DeltaPresenceEstimationQuasiIdValues) XXX_DiscardUnknown() { + xxx_messageInfo_AnalyzeDataSourceRiskDetails_DeltaPresenceEstimationResult_DeltaPresenceEstimationQuasiIdValues.DiscardUnknown(m) +} + +var xxx_messageInfo_AnalyzeDataSourceRiskDetails_DeltaPresenceEstimationResult_DeltaPresenceEstimationQuasiIdValues proto.InternalMessageInfo + +func (m *AnalyzeDataSourceRiskDetails_DeltaPresenceEstimationResult_DeltaPresenceEstimationQuasiIdValues) GetQuasiIdsValues() []*Value { + if m != nil { + return m.QuasiIdsValues + } + return nil +} + +func (m *AnalyzeDataSourceRiskDetails_DeltaPresenceEstimationResult_DeltaPresenceEstimationQuasiIdValues) GetEstimatedProbability() float64 { + if m != nil { + return m.EstimatedProbability + } + return 0 +} + +// A DeltaPresenceEstimationHistogramBucket message with the following +// values: +// min_probability: 0.1 +// max_probability: 0.2 +// frequency: 42 +// means that there are 42 records for which δ is in [0.1, 0.2). An +// important particular case is when min_probability = max_probability = 1: +// then, every individual who shares this quasi-identifier combination is in +// the dataset. +type AnalyzeDataSourceRiskDetails_DeltaPresenceEstimationResult_DeltaPresenceEstimationHistogramBucket struct { + // Between 0 and 1. + MinProbability float64 `protobuf:"fixed64,1,opt,name=min_probability,json=minProbability,proto3" json:"min_probability,omitempty"` + // Always greater than or equal to min_probability. + MaxProbability float64 `protobuf:"fixed64,2,opt,name=max_probability,json=maxProbability,proto3" json:"max_probability,omitempty"` + // Number of records within these probability bounds. + BucketSize int64 `protobuf:"varint,5,opt,name=bucket_size,json=bucketSize,proto3" json:"bucket_size,omitempty"` + // Sample of quasi-identifier tuple values in this bucket. The total + // number of classes returned per bucket is capped at 20. + BucketValues []*AnalyzeDataSourceRiskDetails_DeltaPresenceEstimationResult_DeltaPresenceEstimationQuasiIdValues `protobuf:"bytes,6,rep,name=bucket_values,json=bucketValues,proto3" json:"bucket_values,omitempty"` + // Total number of distinct quasi-identifier tuple values in this bucket. + BucketValueCount int64 `protobuf:"varint,7,opt,name=bucket_value_count,json=bucketValueCount,proto3" json:"bucket_value_count,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *AnalyzeDataSourceRiskDetails_DeltaPresenceEstimationResult_DeltaPresenceEstimationHistogramBucket) Reset() { + *m = AnalyzeDataSourceRiskDetails_DeltaPresenceEstimationResult_DeltaPresenceEstimationHistogramBucket{} +} +func (m *AnalyzeDataSourceRiskDetails_DeltaPresenceEstimationResult_DeltaPresenceEstimationHistogramBucket) String() string { + return proto.CompactTextString(m) +} +func (*AnalyzeDataSourceRiskDetails_DeltaPresenceEstimationResult_DeltaPresenceEstimationHistogramBucket) ProtoMessage() { +} +func (*AnalyzeDataSourceRiskDetails_DeltaPresenceEstimationResult_DeltaPresenceEstimationHistogramBucket) Descriptor() ([]byte, []int) { + return fileDescriptor_dlp_1aebf9c18c267d70, []int{37, 5, 1} +} +func (m *AnalyzeDataSourceRiskDetails_DeltaPresenceEstimationResult_DeltaPresenceEstimationHistogramBucket) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_AnalyzeDataSourceRiskDetails_DeltaPresenceEstimationResult_DeltaPresenceEstimationHistogramBucket.Unmarshal(m, b) +} +func (m *AnalyzeDataSourceRiskDetails_DeltaPresenceEstimationResult_DeltaPresenceEstimationHistogramBucket) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_AnalyzeDataSourceRiskDetails_DeltaPresenceEstimationResult_DeltaPresenceEstimationHistogramBucket.Marshal(b, m, deterministic) +} +func (dst *AnalyzeDataSourceRiskDetails_DeltaPresenceEstimationResult_DeltaPresenceEstimationHistogramBucket) XXX_Merge(src proto.Message) { + xxx_messageInfo_AnalyzeDataSourceRiskDetails_DeltaPresenceEstimationResult_DeltaPresenceEstimationHistogramBucket.Merge(dst, src) +} +func (m *AnalyzeDataSourceRiskDetails_DeltaPresenceEstimationResult_DeltaPresenceEstimationHistogramBucket) XXX_Size() int { + return xxx_messageInfo_AnalyzeDataSourceRiskDetails_DeltaPresenceEstimationResult_DeltaPresenceEstimationHistogramBucket.Size(m) +} +func (m *AnalyzeDataSourceRiskDetails_DeltaPresenceEstimationResult_DeltaPresenceEstimationHistogramBucket) XXX_DiscardUnknown() { + xxx_messageInfo_AnalyzeDataSourceRiskDetails_DeltaPresenceEstimationResult_DeltaPresenceEstimationHistogramBucket.DiscardUnknown(m) +} + +var xxx_messageInfo_AnalyzeDataSourceRiskDetails_DeltaPresenceEstimationResult_DeltaPresenceEstimationHistogramBucket proto.InternalMessageInfo + +func (m *AnalyzeDataSourceRiskDetails_DeltaPresenceEstimationResult_DeltaPresenceEstimationHistogramBucket) GetMinProbability() float64 { + if m != nil { + return m.MinProbability + } + return 0 +} + +func (m *AnalyzeDataSourceRiskDetails_DeltaPresenceEstimationResult_DeltaPresenceEstimationHistogramBucket) GetMaxProbability() float64 { + if m != nil { + return m.MaxProbability + } + return 0 +} + +func (m *AnalyzeDataSourceRiskDetails_DeltaPresenceEstimationResult_DeltaPresenceEstimationHistogramBucket) GetBucketSize() int64 { + if m != nil { + return m.BucketSize + } + return 0 +} + +func (m *AnalyzeDataSourceRiskDetails_DeltaPresenceEstimationResult_DeltaPresenceEstimationHistogramBucket) GetBucketValues() []*AnalyzeDataSourceRiskDetails_DeltaPresenceEstimationResult_DeltaPresenceEstimationQuasiIdValues { + if m != nil { + return m.BucketValues + } + return nil +} + +func (m *AnalyzeDataSourceRiskDetails_DeltaPresenceEstimationResult_DeltaPresenceEstimationHistogramBucket) GetBucketValueCount() int64 { + if m != nil { + return m.BucketValueCount + } + return 0 +} + +// A value of a field, including its frequency. +type ValueFrequency struct { + // A value contained in the field in question. + Value *Value `protobuf:"bytes,1,opt,name=value,proto3" json:"value,omitempty"` + // How many times the value is contained in the field. + Count int64 `protobuf:"varint,2,opt,name=count,proto3" json:"count,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ValueFrequency) Reset() { *m = ValueFrequency{} } +func (m *ValueFrequency) String() string { return proto.CompactTextString(m) } +func (*ValueFrequency) ProtoMessage() {} +func (*ValueFrequency) Descriptor() ([]byte, []int) { + return fileDescriptor_dlp_1aebf9c18c267d70, []int{38} +} +func (m *ValueFrequency) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ValueFrequency.Unmarshal(m, b) +} +func (m *ValueFrequency) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ValueFrequency.Marshal(b, m, deterministic) +} +func (dst *ValueFrequency) XXX_Merge(src proto.Message) { + xxx_messageInfo_ValueFrequency.Merge(dst, src) +} +func (m *ValueFrequency) XXX_Size() int { + return xxx_messageInfo_ValueFrequency.Size(m) +} +func (m *ValueFrequency) XXX_DiscardUnknown() { + xxx_messageInfo_ValueFrequency.DiscardUnknown(m) +} + +var xxx_messageInfo_ValueFrequency proto.InternalMessageInfo + +func (m *ValueFrequency) GetValue() *Value { + if m != nil { + return m.Value + } + return nil +} + +func (m *ValueFrequency) GetCount() int64 { + if m != nil { + return m.Count + } + return 0 +} + +// Set of primitive values supported by the system. +// Note that for the purposes of inspection or transformation, the number +// of bytes considered to comprise a 'Value' is based on its representation +// as a UTF-8 encoded string. For example, if 'integer_value' is set to +// 123456789, the number of bytes would be counted as 9, even though an +// int64 only holds up to 8 bytes of data. +type Value struct { + // Types that are valid to be assigned to Type: + // *Value_IntegerValue + // *Value_FloatValue + // *Value_StringValue + // *Value_BooleanValue + // *Value_TimestampValue + // *Value_TimeValue + // *Value_DateValue + // *Value_DayOfWeekValue + Type isValue_Type `protobuf_oneof:"type"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Value) Reset() { *m = Value{} } +func (m *Value) String() string { return proto.CompactTextString(m) } +func (*Value) ProtoMessage() {} +func (*Value) Descriptor() ([]byte, []int) { + return fileDescriptor_dlp_1aebf9c18c267d70, []int{39} +} +func (m *Value) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Value.Unmarshal(m, b) +} +func (m *Value) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Value.Marshal(b, m, deterministic) +} +func (dst *Value) XXX_Merge(src proto.Message) { + xxx_messageInfo_Value.Merge(dst, src) +} +func (m *Value) XXX_Size() int { + return xxx_messageInfo_Value.Size(m) +} +func (m *Value) XXX_DiscardUnknown() { + xxx_messageInfo_Value.DiscardUnknown(m) +} + +var xxx_messageInfo_Value proto.InternalMessageInfo + +type isValue_Type interface { + isValue_Type() +} + +type Value_IntegerValue struct { + IntegerValue int64 `protobuf:"varint,1,opt,name=integer_value,json=integerValue,proto3,oneof"` +} + +type Value_FloatValue struct { + FloatValue float64 `protobuf:"fixed64,2,opt,name=float_value,json=floatValue,proto3,oneof"` +} + +type Value_StringValue struct { + StringValue string `protobuf:"bytes,3,opt,name=string_value,json=stringValue,proto3,oneof"` +} + +type Value_BooleanValue struct { + BooleanValue bool `protobuf:"varint,4,opt,name=boolean_value,json=booleanValue,proto3,oneof"` +} + +type Value_TimestampValue struct { + TimestampValue *timestamp.Timestamp `protobuf:"bytes,5,opt,name=timestamp_value,json=timestampValue,proto3,oneof"` +} + +type Value_TimeValue struct { + TimeValue *timeofday.TimeOfDay `protobuf:"bytes,6,opt,name=time_value,json=timeValue,proto3,oneof"` +} + +type Value_DateValue struct { + DateValue *date.Date `protobuf:"bytes,7,opt,name=date_value,json=dateValue,proto3,oneof"` +} + +type Value_DayOfWeekValue struct { + DayOfWeekValue dayofweek.DayOfWeek `protobuf:"varint,8,opt,name=day_of_week_value,json=dayOfWeekValue,proto3,enum=google.type.DayOfWeek,oneof"` +} + +func (*Value_IntegerValue) isValue_Type() {} + +func (*Value_FloatValue) isValue_Type() {} + +func (*Value_StringValue) isValue_Type() {} + +func (*Value_BooleanValue) isValue_Type() {} + +func (*Value_TimestampValue) isValue_Type() {} + +func (*Value_TimeValue) isValue_Type() {} + +func (*Value_DateValue) isValue_Type() {} + +func (*Value_DayOfWeekValue) isValue_Type() {} + +func (m *Value) GetType() isValue_Type { + if m != nil { + return m.Type + } + return nil +} + +func (m *Value) GetIntegerValue() int64 { + if x, ok := m.GetType().(*Value_IntegerValue); ok { + return x.IntegerValue + } + return 0 +} + +func (m *Value) GetFloatValue() float64 { + if x, ok := m.GetType().(*Value_FloatValue); ok { + return x.FloatValue + } + return 0 +} + +func (m *Value) GetStringValue() string { + if x, ok := m.GetType().(*Value_StringValue); ok { + return x.StringValue + } + return "" +} + +func (m *Value) GetBooleanValue() bool { + if x, ok := m.GetType().(*Value_BooleanValue); ok { + return x.BooleanValue + } + return false +} + +func (m *Value) GetTimestampValue() *timestamp.Timestamp { + if x, ok := m.GetType().(*Value_TimestampValue); ok { + return x.TimestampValue + } + return nil +} + +func (m *Value) GetTimeValue() *timeofday.TimeOfDay { + if x, ok := m.GetType().(*Value_TimeValue); ok { + return x.TimeValue + } + return nil +} + +func (m *Value) GetDateValue() *date.Date { + if x, ok := m.GetType().(*Value_DateValue); ok { + return x.DateValue + } + return nil +} + +func (m *Value) GetDayOfWeekValue() dayofweek.DayOfWeek { + if x, ok := m.GetType().(*Value_DayOfWeekValue); ok { + return x.DayOfWeekValue + } + return dayofweek.DayOfWeek_DAY_OF_WEEK_UNSPECIFIED +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*Value) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _Value_OneofMarshaler, _Value_OneofUnmarshaler, _Value_OneofSizer, []interface{}{ + (*Value_IntegerValue)(nil), + (*Value_FloatValue)(nil), + (*Value_StringValue)(nil), + (*Value_BooleanValue)(nil), + (*Value_TimestampValue)(nil), + (*Value_TimeValue)(nil), + (*Value_DateValue)(nil), + (*Value_DayOfWeekValue)(nil), + } +} + +func _Value_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*Value) + // type + switch x := m.Type.(type) { + case *Value_IntegerValue: + b.EncodeVarint(1<<3 | proto.WireVarint) + b.EncodeVarint(uint64(x.IntegerValue)) + case *Value_FloatValue: + b.EncodeVarint(2<<3 | proto.WireFixed64) + b.EncodeFixed64(math.Float64bits(x.FloatValue)) + case *Value_StringValue: + b.EncodeVarint(3<<3 | proto.WireBytes) + b.EncodeStringBytes(x.StringValue) + case *Value_BooleanValue: + t := uint64(0) + if x.BooleanValue { + t = 1 + } + b.EncodeVarint(4<<3 | proto.WireVarint) + b.EncodeVarint(t) + case *Value_TimestampValue: + b.EncodeVarint(5<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.TimestampValue); err != nil { + return err + } + case *Value_TimeValue: + b.EncodeVarint(6<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.TimeValue); err != nil { + return err + } + case *Value_DateValue: + b.EncodeVarint(7<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.DateValue); err != nil { + return err + } + case *Value_DayOfWeekValue: + b.EncodeVarint(8<<3 | proto.WireVarint) + b.EncodeVarint(uint64(x.DayOfWeekValue)) + case nil: + default: + return fmt.Errorf("Value.Type has unexpected type %T", x) + } + return nil +} + +func _Value_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*Value) + switch tag { + case 1: // type.integer_value + if wire != proto.WireVarint { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeVarint() + m.Type = &Value_IntegerValue{int64(x)} + return true, err + case 2: // type.float_value + if wire != proto.WireFixed64 { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeFixed64() + m.Type = &Value_FloatValue{math.Float64frombits(x)} + return true, err + case 3: // type.string_value + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeStringBytes() + m.Type = &Value_StringValue{x} + return true, err + case 4: // type.boolean_value + if wire != proto.WireVarint { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeVarint() + m.Type = &Value_BooleanValue{x != 0} + return true, err + case 5: // type.timestamp_value + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(timestamp.Timestamp) + err := b.DecodeMessage(msg) + m.Type = &Value_TimestampValue{msg} + return true, err + case 6: // type.time_value + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(timeofday.TimeOfDay) + err := b.DecodeMessage(msg) + m.Type = &Value_TimeValue{msg} + return true, err + case 7: // type.date_value + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(date.Date) + err := b.DecodeMessage(msg) + m.Type = &Value_DateValue{msg} + return true, err + case 8: // type.day_of_week_value + if wire != proto.WireVarint { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeVarint() + m.Type = &Value_DayOfWeekValue{dayofweek.DayOfWeek(x)} + return true, err + default: + return false, nil + } +} + +func _Value_OneofSizer(msg proto.Message) (n int) { + m := msg.(*Value) + // type + switch x := m.Type.(type) { + case *Value_IntegerValue: + n += 1 // tag and wire + n += proto.SizeVarint(uint64(x.IntegerValue)) + case *Value_FloatValue: + n += 1 // tag and wire + n += 8 + case *Value_StringValue: + n += 1 // tag and wire + n += proto.SizeVarint(uint64(len(x.StringValue))) + n += len(x.StringValue) + case *Value_BooleanValue: + n += 1 // tag and wire + n += 1 + case *Value_TimestampValue: + s := proto.Size(x.TimestampValue) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *Value_TimeValue: + s := proto.Size(x.TimeValue) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *Value_DateValue: + s := proto.Size(x.DateValue) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *Value_DayOfWeekValue: + n += 1 // tag and wire + n += proto.SizeVarint(uint64(x.DayOfWeekValue)) + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +// Message for infoType-dependent details parsed from quote. +type QuoteInfo struct { + // Object representation of the quote. + // + // Types that are valid to be assigned to ParsedQuote: + // *QuoteInfo_DateTime + ParsedQuote isQuoteInfo_ParsedQuote `protobuf_oneof:"parsed_quote"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *QuoteInfo) Reset() { *m = QuoteInfo{} } +func (m *QuoteInfo) String() string { return proto.CompactTextString(m) } +func (*QuoteInfo) ProtoMessage() {} +func (*QuoteInfo) Descriptor() ([]byte, []int) { + return fileDescriptor_dlp_1aebf9c18c267d70, []int{40} +} +func (m *QuoteInfo) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_QuoteInfo.Unmarshal(m, b) +} +func (m *QuoteInfo) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_QuoteInfo.Marshal(b, m, deterministic) +} +func (dst *QuoteInfo) XXX_Merge(src proto.Message) { + xxx_messageInfo_QuoteInfo.Merge(dst, src) +} +func (m *QuoteInfo) XXX_Size() int { + return xxx_messageInfo_QuoteInfo.Size(m) +} +func (m *QuoteInfo) XXX_DiscardUnknown() { + xxx_messageInfo_QuoteInfo.DiscardUnknown(m) +} + +var xxx_messageInfo_QuoteInfo proto.InternalMessageInfo + +type isQuoteInfo_ParsedQuote interface { + isQuoteInfo_ParsedQuote() +} + +type QuoteInfo_DateTime struct { + DateTime *DateTime `protobuf:"bytes,2,opt,name=date_time,json=dateTime,proto3,oneof"` +} + +func (*QuoteInfo_DateTime) isQuoteInfo_ParsedQuote() {} + +func (m *QuoteInfo) GetParsedQuote() isQuoteInfo_ParsedQuote { + if m != nil { + return m.ParsedQuote + } + return nil +} + +func (m *QuoteInfo) GetDateTime() *DateTime { + if x, ok := m.GetParsedQuote().(*QuoteInfo_DateTime); ok { + return x.DateTime + } + return nil +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*QuoteInfo) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _QuoteInfo_OneofMarshaler, _QuoteInfo_OneofUnmarshaler, _QuoteInfo_OneofSizer, []interface{}{ + (*QuoteInfo_DateTime)(nil), + } +} + +func _QuoteInfo_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*QuoteInfo) + // parsed_quote + switch x := m.ParsedQuote.(type) { + case *QuoteInfo_DateTime: + b.EncodeVarint(2<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.DateTime); err != nil { + return err + } + case nil: + default: + return fmt.Errorf("QuoteInfo.ParsedQuote has unexpected type %T", x) + } + return nil +} + +func _QuoteInfo_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*QuoteInfo) + switch tag { + case 2: // parsed_quote.date_time + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(DateTime) + err := b.DecodeMessage(msg) + m.ParsedQuote = &QuoteInfo_DateTime{msg} + return true, err + default: + return false, nil + } +} + +func _QuoteInfo_OneofSizer(msg proto.Message) (n int) { + m := msg.(*QuoteInfo) + // parsed_quote + switch x := m.ParsedQuote.(type) { + case *QuoteInfo_DateTime: + s := proto.Size(x.DateTime) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +// Message for a date time object. +// e.g. 2018-01-01, 5th August. +type DateTime struct { + // One or more of the following must be set. All fields are optional, but + // when set must be valid date or time values. + Date *date.Date `protobuf:"bytes,1,opt,name=date,proto3" json:"date,omitempty"` + DayOfWeek dayofweek.DayOfWeek `protobuf:"varint,2,opt,name=day_of_week,json=dayOfWeek,proto3,enum=google.type.DayOfWeek" json:"day_of_week,omitempty"` + Time *timeofday.TimeOfDay `protobuf:"bytes,3,opt,name=time,proto3" json:"time,omitempty"` + TimeZone *DateTime_TimeZone `protobuf:"bytes,4,opt,name=time_zone,json=timeZone,proto3" json:"time_zone,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *DateTime) Reset() { *m = DateTime{} } +func (m *DateTime) String() string { return proto.CompactTextString(m) } +func (*DateTime) ProtoMessage() {} +func (*DateTime) Descriptor() ([]byte, []int) { + return fileDescriptor_dlp_1aebf9c18c267d70, []int{41} +} +func (m *DateTime) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_DateTime.Unmarshal(m, b) +} +func (m *DateTime) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_DateTime.Marshal(b, m, deterministic) +} +func (dst *DateTime) XXX_Merge(src proto.Message) { + xxx_messageInfo_DateTime.Merge(dst, src) +} +func (m *DateTime) XXX_Size() int { + return xxx_messageInfo_DateTime.Size(m) +} +func (m *DateTime) XXX_DiscardUnknown() { + xxx_messageInfo_DateTime.DiscardUnknown(m) +} + +var xxx_messageInfo_DateTime proto.InternalMessageInfo + +func (m *DateTime) GetDate() *date.Date { + if m != nil { + return m.Date + } + return nil +} + +func (m *DateTime) GetDayOfWeek() dayofweek.DayOfWeek { + if m != nil { + return m.DayOfWeek + } + return dayofweek.DayOfWeek_DAY_OF_WEEK_UNSPECIFIED +} + +func (m *DateTime) GetTime() *timeofday.TimeOfDay { + if m != nil { + return m.Time + } + return nil +} + +func (m *DateTime) GetTimeZone() *DateTime_TimeZone { + if m != nil { + return m.TimeZone + } + return nil +} + +type DateTime_TimeZone struct { + // Set only if the offset can be determined. Positive for time ahead of UTC. + // E.g. For "UTC-9", this value is -540. + OffsetMinutes int32 `protobuf:"varint,1,opt,name=offset_minutes,json=offsetMinutes,proto3" json:"offset_minutes,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *DateTime_TimeZone) Reset() { *m = DateTime_TimeZone{} } +func (m *DateTime_TimeZone) String() string { return proto.CompactTextString(m) } +func (*DateTime_TimeZone) ProtoMessage() {} +func (*DateTime_TimeZone) Descriptor() ([]byte, []int) { + return fileDescriptor_dlp_1aebf9c18c267d70, []int{41, 0} +} +func (m *DateTime_TimeZone) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_DateTime_TimeZone.Unmarshal(m, b) +} +func (m *DateTime_TimeZone) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_DateTime_TimeZone.Marshal(b, m, deterministic) +} +func (dst *DateTime_TimeZone) XXX_Merge(src proto.Message) { + xxx_messageInfo_DateTime_TimeZone.Merge(dst, src) +} +func (m *DateTime_TimeZone) XXX_Size() int { + return xxx_messageInfo_DateTime_TimeZone.Size(m) +} +func (m *DateTime_TimeZone) XXX_DiscardUnknown() { + xxx_messageInfo_DateTime_TimeZone.DiscardUnknown(m) +} + +var xxx_messageInfo_DateTime_TimeZone proto.InternalMessageInfo + +func (m *DateTime_TimeZone) GetOffsetMinutes() int32 { + if m != nil { + return m.OffsetMinutes + } + return 0 +} + +// The configuration that controls how the data will change. +type DeidentifyConfig struct { + // Types that are valid to be assigned to Transformation: + // *DeidentifyConfig_InfoTypeTransformations + // *DeidentifyConfig_RecordTransformations + Transformation isDeidentifyConfig_Transformation `protobuf_oneof:"transformation"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *DeidentifyConfig) Reset() { *m = DeidentifyConfig{} } +func (m *DeidentifyConfig) String() string { return proto.CompactTextString(m) } +func (*DeidentifyConfig) ProtoMessage() {} +func (*DeidentifyConfig) Descriptor() ([]byte, []int) { + return fileDescriptor_dlp_1aebf9c18c267d70, []int{42} +} +func (m *DeidentifyConfig) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_DeidentifyConfig.Unmarshal(m, b) +} +func (m *DeidentifyConfig) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_DeidentifyConfig.Marshal(b, m, deterministic) +} +func (dst *DeidentifyConfig) XXX_Merge(src proto.Message) { + xxx_messageInfo_DeidentifyConfig.Merge(dst, src) +} +func (m *DeidentifyConfig) XXX_Size() int { + return xxx_messageInfo_DeidentifyConfig.Size(m) +} +func (m *DeidentifyConfig) XXX_DiscardUnknown() { + xxx_messageInfo_DeidentifyConfig.DiscardUnknown(m) +} + +var xxx_messageInfo_DeidentifyConfig proto.InternalMessageInfo + +type isDeidentifyConfig_Transformation interface { + isDeidentifyConfig_Transformation() +} + +type DeidentifyConfig_InfoTypeTransformations struct { + InfoTypeTransformations *InfoTypeTransformations `protobuf:"bytes,1,opt,name=info_type_transformations,json=infoTypeTransformations,proto3,oneof"` +} + +type DeidentifyConfig_RecordTransformations struct { + RecordTransformations *RecordTransformations `protobuf:"bytes,2,opt,name=record_transformations,json=recordTransformations,proto3,oneof"` +} + +func (*DeidentifyConfig_InfoTypeTransformations) isDeidentifyConfig_Transformation() {} + +func (*DeidentifyConfig_RecordTransformations) isDeidentifyConfig_Transformation() {} + +func (m *DeidentifyConfig) GetTransformation() isDeidentifyConfig_Transformation { + if m != nil { + return m.Transformation + } + return nil +} + +func (m *DeidentifyConfig) GetInfoTypeTransformations() *InfoTypeTransformations { + if x, ok := m.GetTransformation().(*DeidentifyConfig_InfoTypeTransformations); ok { + return x.InfoTypeTransformations + } + return nil +} + +func (m *DeidentifyConfig) GetRecordTransformations() *RecordTransformations { + if x, ok := m.GetTransformation().(*DeidentifyConfig_RecordTransformations); ok { + return x.RecordTransformations + } + return nil +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*DeidentifyConfig) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _DeidentifyConfig_OneofMarshaler, _DeidentifyConfig_OneofUnmarshaler, _DeidentifyConfig_OneofSizer, []interface{}{ + (*DeidentifyConfig_InfoTypeTransformations)(nil), + (*DeidentifyConfig_RecordTransformations)(nil), + } +} + +func _DeidentifyConfig_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*DeidentifyConfig) + // transformation + switch x := m.Transformation.(type) { + case *DeidentifyConfig_InfoTypeTransformations: + b.EncodeVarint(1<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.InfoTypeTransformations); err != nil { + return err + } + case *DeidentifyConfig_RecordTransformations: + b.EncodeVarint(2<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.RecordTransformations); err != nil { + return err + } + case nil: + default: + return fmt.Errorf("DeidentifyConfig.Transformation has unexpected type %T", x) + } + return nil +} + +func _DeidentifyConfig_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*DeidentifyConfig) + switch tag { + case 1: // transformation.info_type_transformations + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(InfoTypeTransformations) + err := b.DecodeMessage(msg) + m.Transformation = &DeidentifyConfig_InfoTypeTransformations{msg} + return true, err + case 2: // transformation.record_transformations + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(RecordTransformations) + err := b.DecodeMessage(msg) + m.Transformation = &DeidentifyConfig_RecordTransformations{msg} + return true, err + default: + return false, nil + } +} + +func _DeidentifyConfig_OneofSizer(msg proto.Message) (n int) { + m := msg.(*DeidentifyConfig) + // transformation + switch x := m.Transformation.(type) { + case *DeidentifyConfig_InfoTypeTransformations: + s := proto.Size(x.InfoTypeTransformations) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *DeidentifyConfig_RecordTransformations: + s := proto.Size(x.RecordTransformations) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +// A rule for transforming a value. +type PrimitiveTransformation struct { + // Types that are valid to be assigned to Transformation: + // *PrimitiveTransformation_ReplaceConfig + // *PrimitiveTransformation_RedactConfig + // *PrimitiveTransformation_CharacterMaskConfig + // *PrimitiveTransformation_CryptoReplaceFfxFpeConfig + // *PrimitiveTransformation_FixedSizeBucketingConfig + // *PrimitiveTransformation_BucketingConfig + // *PrimitiveTransformation_ReplaceWithInfoTypeConfig + // *PrimitiveTransformation_TimePartConfig + // *PrimitiveTransformation_CryptoHashConfig + // *PrimitiveTransformation_DateShiftConfig + // *PrimitiveTransformation_CryptoDeterministicConfig + Transformation isPrimitiveTransformation_Transformation `protobuf_oneof:"transformation"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *PrimitiveTransformation) Reset() { *m = PrimitiveTransformation{} } +func (m *PrimitiveTransformation) String() string { return proto.CompactTextString(m) } +func (*PrimitiveTransformation) ProtoMessage() {} +func (*PrimitiveTransformation) Descriptor() ([]byte, []int) { + return fileDescriptor_dlp_1aebf9c18c267d70, []int{43} +} +func (m *PrimitiveTransformation) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_PrimitiveTransformation.Unmarshal(m, b) +} +func (m *PrimitiveTransformation) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_PrimitiveTransformation.Marshal(b, m, deterministic) +} +func (dst *PrimitiveTransformation) XXX_Merge(src proto.Message) { + xxx_messageInfo_PrimitiveTransformation.Merge(dst, src) +} +func (m *PrimitiveTransformation) XXX_Size() int { + return xxx_messageInfo_PrimitiveTransformation.Size(m) +} +func (m *PrimitiveTransformation) XXX_DiscardUnknown() { + xxx_messageInfo_PrimitiveTransformation.DiscardUnknown(m) +} + +var xxx_messageInfo_PrimitiveTransformation proto.InternalMessageInfo + +type isPrimitiveTransformation_Transformation interface { + isPrimitiveTransformation_Transformation() +} + +type PrimitiveTransformation_ReplaceConfig struct { + ReplaceConfig *ReplaceValueConfig `protobuf:"bytes,1,opt,name=replace_config,json=replaceConfig,proto3,oneof"` +} + +type PrimitiveTransformation_RedactConfig struct { + RedactConfig *RedactConfig `protobuf:"bytes,2,opt,name=redact_config,json=redactConfig,proto3,oneof"` +} + +type PrimitiveTransformation_CharacterMaskConfig struct { + CharacterMaskConfig *CharacterMaskConfig `protobuf:"bytes,3,opt,name=character_mask_config,json=characterMaskConfig,proto3,oneof"` +} + +type PrimitiveTransformation_CryptoReplaceFfxFpeConfig struct { + CryptoReplaceFfxFpeConfig *CryptoReplaceFfxFpeConfig `protobuf:"bytes,4,opt,name=crypto_replace_ffx_fpe_config,json=cryptoReplaceFfxFpeConfig,proto3,oneof"` +} + +type PrimitiveTransformation_FixedSizeBucketingConfig struct { + FixedSizeBucketingConfig *FixedSizeBucketingConfig `protobuf:"bytes,5,opt,name=fixed_size_bucketing_config,json=fixedSizeBucketingConfig,proto3,oneof"` +} + +type PrimitiveTransformation_BucketingConfig struct { + BucketingConfig *BucketingConfig `protobuf:"bytes,6,opt,name=bucketing_config,json=bucketingConfig,proto3,oneof"` +} + +type PrimitiveTransformation_ReplaceWithInfoTypeConfig struct { + ReplaceWithInfoTypeConfig *ReplaceWithInfoTypeConfig `protobuf:"bytes,7,opt,name=replace_with_info_type_config,json=replaceWithInfoTypeConfig,proto3,oneof"` +} + +type PrimitiveTransformation_TimePartConfig struct { + TimePartConfig *TimePartConfig `protobuf:"bytes,8,opt,name=time_part_config,json=timePartConfig,proto3,oneof"` +} + +type PrimitiveTransformation_CryptoHashConfig struct { + CryptoHashConfig *CryptoHashConfig `protobuf:"bytes,9,opt,name=crypto_hash_config,json=cryptoHashConfig,proto3,oneof"` +} + +type PrimitiveTransformation_DateShiftConfig struct { + DateShiftConfig *DateShiftConfig `protobuf:"bytes,11,opt,name=date_shift_config,json=dateShiftConfig,proto3,oneof"` +} + +type PrimitiveTransformation_CryptoDeterministicConfig struct { + CryptoDeterministicConfig *CryptoDeterministicConfig `protobuf:"bytes,12,opt,name=crypto_deterministic_config,json=cryptoDeterministicConfig,proto3,oneof"` +} + +func (*PrimitiveTransformation_ReplaceConfig) isPrimitiveTransformation_Transformation() {} + +func (*PrimitiveTransformation_RedactConfig) isPrimitiveTransformation_Transformation() {} + +func (*PrimitiveTransformation_CharacterMaskConfig) isPrimitiveTransformation_Transformation() {} + +func (*PrimitiveTransformation_CryptoReplaceFfxFpeConfig) isPrimitiveTransformation_Transformation() {} + +func (*PrimitiveTransformation_FixedSizeBucketingConfig) isPrimitiveTransformation_Transformation() {} + +func (*PrimitiveTransformation_BucketingConfig) isPrimitiveTransformation_Transformation() {} + +func (*PrimitiveTransformation_ReplaceWithInfoTypeConfig) isPrimitiveTransformation_Transformation() {} + +func (*PrimitiveTransformation_TimePartConfig) isPrimitiveTransformation_Transformation() {} + +func (*PrimitiveTransformation_CryptoHashConfig) isPrimitiveTransformation_Transformation() {} + +func (*PrimitiveTransformation_DateShiftConfig) isPrimitiveTransformation_Transformation() {} + +func (*PrimitiveTransformation_CryptoDeterministicConfig) isPrimitiveTransformation_Transformation() {} + +func (m *PrimitiveTransformation) GetTransformation() isPrimitiveTransformation_Transformation { + if m != nil { + return m.Transformation + } + return nil +} + +func (m *PrimitiveTransformation) GetReplaceConfig() *ReplaceValueConfig { + if x, ok := m.GetTransformation().(*PrimitiveTransformation_ReplaceConfig); ok { + return x.ReplaceConfig + } + return nil +} + +func (m *PrimitiveTransformation) GetRedactConfig() *RedactConfig { + if x, ok := m.GetTransformation().(*PrimitiveTransformation_RedactConfig); ok { + return x.RedactConfig + } + return nil +} + +func (m *PrimitiveTransformation) GetCharacterMaskConfig() *CharacterMaskConfig { + if x, ok := m.GetTransformation().(*PrimitiveTransformation_CharacterMaskConfig); ok { + return x.CharacterMaskConfig + } + return nil +} + +func (m *PrimitiveTransformation) GetCryptoReplaceFfxFpeConfig() *CryptoReplaceFfxFpeConfig { + if x, ok := m.GetTransformation().(*PrimitiveTransformation_CryptoReplaceFfxFpeConfig); ok { + return x.CryptoReplaceFfxFpeConfig + } + return nil +} + +func (m *PrimitiveTransformation) GetFixedSizeBucketingConfig() *FixedSizeBucketingConfig { + if x, ok := m.GetTransformation().(*PrimitiveTransformation_FixedSizeBucketingConfig); ok { + return x.FixedSizeBucketingConfig + } + return nil +} + +func (m *PrimitiveTransformation) GetBucketingConfig() *BucketingConfig { + if x, ok := m.GetTransformation().(*PrimitiveTransformation_BucketingConfig); ok { + return x.BucketingConfig + } + return nil +} + +func (m *PrimitiveTransformation) GetReplaceWithInfoTypeConfig() *ReplaceWithInfoTypeConfig { + if x, ok := m.GetTransformation().(*PrimitiveTransformation_ReplaceWithInfoTypeConfig); ok { + return x.ReplaceWithInfoTypeConfig + } + return nil +} + +func (m *PrimitiveTransformation) GetTimePartConfig() *TimePartConfig { + if x, ok := m.GetTransformation().(*PrimitiveTransformation_TimePartConfig); ok { + return x.TimePartConfig + } + return nil +} + +func (m *PrimitiveTransformation) GetCryptoHashConfig() *CryptoHashConfig { + if x, ok := m.GetTransformation().(*PrimitiveTransformation_CryptoHashConfig); ok { + return x.CryptoHashConfig + } + return nil +} + +func (m *PrimitiveTransformation) GetDateShiftConfig() *DateShiftConfig { + if x, ok := m.GetTransformation().(*PrimitiveTransformation_DateShiftConfig); ok { + return x.DateShiftConfig + } + return nil +} + +func (m *PrimitiveTransformation) GetCryptoDeterministicConfig() *CryptoDeterministicConfig { + if x, ok := m.GetTransformation().(*PrimitiveTransformation_CryptoDeterministicConfig); ok { + return x.CryptoDeterministicConfig + } + return nil +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*PrimitiveTransformation) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _PrimitiveTransformation_OneofMarshaler, _PrimitiveTransformation_OneofUnmarshaler, _PrimitiveTransformation_OneofSizer, []interface{}{ + (*PrimitiveTransformation_ReplaceConfig)(nil), + (*PrimitiveTransformation_RedactConfig)(nil), + (*PrimitiveTransformation_CharacterMaskConfig)(nil), + (*PrimitiveTransformation_CryptoReplaceFfxFpeConfig)(nil), + (*PrimitiveTransformation_FixedSizeBucketingConfig)(nil), + (*PrimitiveTransformation_BucketingConfig)(nil), + (*PrimitiveTransformation_ReplaceWithInfoTypeConfig)(nil), + (*PrimitiveTransformation_TimePartConfig)(nil), + (*PrimitiveTransformation_CryptoHashConfig)(nil), + (*PrimitiveTransformation_DateShiftConfig)(nil), + (*PrimitiveTransformation_CryptoDeterministicConfig)(nil), + } +} + +func _PrimitiveTransformation_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*PrimitiveTransformation) + // transformation + switch x := m.Transformation.(type) { + case *PrimitiveTransformation_ReplaceConfig: + b.EncodeVarint(1<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.ReplaceConfig); err != nil { + return err + } + case *PrimitiveTransformation_RedactConfig: + b.EncodeVarint(2<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.RedactConfig); err != nil { + return err + } + case *PrimitiveTransformation_CharacterMaskConfig: + b.EncodeVarint(3<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.CharacterMaskConfig); err != nil { + return err + } + case *PrimitiveTransformation_CryptoReplaceFfxFpeConfig: + b.EncodeVarint(4<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.CryptoReplaceFfxFpeConfig); err != nil { + return err + } + case *PrimitiveTransformation_FixedSizeBucketingConfig: + b.EncodeVarint(5<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.FixedSizeBucketingConfig); err != nil { + return err + } + case *PrimitiveTransformation_BucketingConfig: + b.EncodeVarint(6<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.BucketingConfig); err != nil { + return err + } + case *PrimitiveTransformation_ReplaceWithInfoTypeConfig: + b.EncodeVarint(7<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.ReplaceWithInfoTypeConfig); err != nil { + return err + } + case *PrimitiveTransformation_TimePartConfig: + b.EncodeVarint(8<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.TimePartConfig); err != nil { + return err + } + case *PrimitiveTransformation_CryptoHashConfig: + b.EncodeVarint(9<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.CryptoHashConfig); err != nil { + return err + } + case *PrimitiveTransformation_DateShiftConfig: + b.EncodeVarint(11<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.DateShiftConfig); err != nil { + return err + } + case *PrimitiveTransformation_CryptoDeterministicConfig: + b.EncodeVarint(12<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.CryptoDeterministicConfig); err != nil { + return err + } + case nil: + default: + return fmt.Errorf("PrimitiveTransformation.Transformation has unexpected type %T", x) + } + return nil +} + +func _PrimitiveTransformation_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*PrimitiveTransformation) + switch tag { + case 1: // transformation.replace_config + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(ReplaceValueConfig) + err := b.DecodeMessage(msg) + m.Transformation = &PrimitiveTransformation_ReplaceConfig{msg} + return true, err + case 2: // transformation.redact_config + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(RedactConfig) + err := b.DecodeMessage(msg) + m.Transformation = &PrimitiveTransformation_RedactConfig{msg} + return true, err + case 3: // transformation.character_mask_config + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(CharacterMaskConfig) + err := b.DecodeMessage(msg) + m.Transformation = &PrimitiveTransformation_CharacterMaskConfig{msg} + return true, err + case 4: // transformation.crypto_replace_ffx_fpe_config + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(CryptoReplaceFfxFpeConfig) + err := b.DecodeMessage(msg) + m.Transformation = &PrimitiveTransformation_CryptoReplaceFfxFpeConfig{msg} + return true, err + case 5: // transformation.fixed_size_bucketing_config + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(FixedSizeBucketingConfig) + err := b.DecodeMessage(msg) + m.Transformation = &PrimitiveTransformation_FixedSizeBucketingConfig{msg} + return true, err + case 6: // transformation.bucketing_config + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(BucketingConfig) + err := b.DecodeMessage(msg) + m.Transformation = &PrimitiveTransformation_BucketingConfig{msg} + return true, err + case 7: // transformation.replace_with_info_type_config + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(ReplaceWithInfoTypeConfig) + err := b.DecodeMessage(msg) + m.Transformation = &PrimitiveTransformation_ReplaceWithInfoTypeConfig{msg} + return true, err + case 8: // transformation.time_part_config + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(TimePartConfig) + err := b.DecodeMessage(msg) + m.Transformation = &PrimitiveTransformation_TimePartConfig{msg} + return true, err + case 9: // transformation.crypto_hash_config + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(CryptoHashConfig) + err := b.DecodeMessage(msg) + m.Transformation = &PrimitiveTransformation_CryptoHashConfig{msg} + return true, err + case 11: // transformation.date_shift_config + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(DateShiftConfig) + err := b.DecodeMessage(msg) + m.Transformation = &PrimitiveTransformation_DateShiftConfig{msg} + return true, err + case 12: // transformation.crypto_deterministic_config + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(CryptoDeterministicConfig) + err := b.DecodeMessage(msg) + m.Transformation = &PrimitiveTransformation_CryptoDeterministicConfig{msg} + return true, err + default: + return false, nil + } +} + +func _PrimitiveTransformation_OneofSizer(msg proto.Message) (n int) { + m := msg.(*PrimitiveTransformation) + // transformation + switch x := m.Transformation.(type) { + case *PrimitiveTransformation_ReplaceConfig: + s := proto.Size(x.ReplaceConfig) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *PrimitiveTransformation_RedactConfig: + s := proto.Size(x.RedactConfig) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *PrimitiveTransformation_CharacterMaskConfig: + s := proto.Size(x.CharacterMaskConfig) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *PrimitiveTransformation_CryptoReplaceFfxFpeConfig: + s := proto.Size(x.CryptoReplaceFfxFpeConfig) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *PrimitiveTransformation_FixedSizeBucketingConfig: + s := proto.Size(x.FixedSizeBucketingConfig) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *PrimitiveTransformation_BucketingConfig: + s := proto.Size(x.BucketingConfig) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *PrimitiveTransformation_ReplaceWithInfoTypeConfig: + s := proto.Size(x.ReplaceWithInfoTypeConfig) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *PrimitiveTransformation_TimePartConfig: + s := proto.Size(x.TimePartConfig) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *PrimitiveTransformation_CryptoHashConfig: + s := proto.Size(x.CryptoHashConfig) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *PrimitiveTransformation_DateShiftConfig: + s := proto.Size(x.DateShiftConfig) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *PrimitiveTransformation_CryptoDeterministicConfig: + s := proto.Size(x.CryptoDeterministicConfig) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +// For use with `Date`, `Timestamp`, and `TimeOfDay`, extract or preserve a +// portion of the value. +type TimePartConfig struct { + PartToExtract TimePartConfig_TimePart `protobuf:"varint,1,opt,name=part_to_extract,json=partToExtract,proto3,enum=google.privacy.dlp.v2.TimePartConfig_TimePart" json:"part_to_extract,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *TimePartConfig) Reset() { *m = TimePartConfig{} } +func (m *TimePartConfig) String() string { return proto.CompactTextString(m) } +func (*TimePartConfig) ProtoMessage() {} +func (*TimePartConfig) Descriptor() ([]byte, []int) { + return fileDescriptor_dlp_1aebf9c18c267d70, []int{44} +} +func (m *TimePartConfig) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_TimePartConfig.Unmarshal(m, b) +} +func (m *TimePartConfig) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_TimePartConfig.Marshal(b, m, deterministic) +} +func (dst *TimePartConfig) XXX_Merge(src proto.Message) { + xxx_messageInfo_TimePartConfig.Merge(dst, src) +} +func (m *TimePartConfig) XXX_Size() int { + return xxx_messageInfo_TimePartConfig.Size(m) +} +func (m *TimePartConfig) XXX_DiscardUnknown() { + xxx_messageInfo_TimePartConfig.DiscardUnknown(m) +} + +var xxx_messageInfo_TimePartConfig proto.InternalMessageInfo + +func (m *TimePartConfig) GetPartToExtract() TimePartConfig_TimePart { + if m != nil { + return m.PartToExtract + } + return TimePartConfig_TIME_PART_UNSPECIFIED +} + +// Pseudonymization method that generates surrogates via cryptographic hashing. +// Uses SHA-256. +// The key size must be either 32 or 64 bytes. +// Outputs a base64 encoded representation of the hashed output +// (for example, L7k0BHmF1ha5U3NfGykjro4xWi1MPVQPjhMAZbSV9mM=). +// Currently, only string and integer values can be hashed. +// See https://cloud.google.com/dlp/docs/pseudonymization to learn more. +type CryptoHashConfig struct { + // The key used by the hash function. + CryptoKey *CryptoKey `protobuf:"bytes,1,opt,name=crypto_key,json=cryptoKey,proto3" json:"crypto_key,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *CryptoHashConfig) Reset() { *m = CryptoHashConfig{} } +func (m *CryptoHashConfig) String() string { return proto.CompactTextString(m) } +func (*CryptoHashConfig) ProtoMessage() {} +func (*CryptoHashConfig) Descriptor() ([]byte, []int) { + return fileDescriptor_dlp_1aebf9c18c267d70, []int{45} +} +func (m *CryptoHashConfig) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_CryptoHashConfig.Unmarshal(m, b) +} +func (m *CryptoHashConfig) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_CryptoHashConfig.Marshal(b, m, deterministic) +} +func (dst *CryptoHashConfig) XXX_Merge(src proto.Message) { + xxx_messageInfo_CryptoHashConfig.Merge(dst, src) +} +func (m *CryptoHashConfig) XXX_Size() int { + return xxx_messageInfo_CryptoHashConfig.Size(m) +} +func (m *CryptoHashConfig) XXX_DiscardUnknown() { + xxx_messageInfo_CryptoHashConfig.DiscardUnknown(m) +} + +var xxx_messageInfo_CryptoHashConfig proto.InternalMessageInfo + +func (m *CryptoHashConfig) GetCryptoKey() *CryptoKey { + if m != nil { + return m.CryptoKey + } + return nil +} + +// Pseudonymization method that generates deterministic encryption for the given +// input. Outputs a base64 encoded representation of the encrypted output. +// Uses AES-SIV based on the RFC https://tools.ietf.org/html/rfc5297. +type CryptoDeterministicConfig struct { + // The key used by the encryption function. + CryptoKey *CryptoKey `protobuf:"bytes,1,opt,name=crypto_key,json=cryptoKey,proto3" json:"crypto_key,omitempty"` + // The custom info type to annotate the surrogate with. + // This annotation will be applied to the surrogate by prefixing it with + // the name of the custom info type followed by the number of + // characters comprising the surrogate. The following scheme defines the + // format: (): + // + // For example, if the name of custom info type is 'MY_TOKEN_INFO_TYPE' and + // the surrogate is 'abc', the full replacement value + // will be: 'MY_TOKEN_INFO_TYPE(3):abc' + // + // This annotation identifies the surrogate when inspecting content using the + // custom info type 'Surrogate'. This facilitates reversal of the + // surrogate when it occurs in free text. + // + // In order for inspection to work properly, the name of this info type must + // not occur naturally anywhere in your data; otherwise, inspection may either + // + // - reverse a surrogate that does not correspond to an actual identifier + // - be unable to parse the surrogate and result in an error + // + // Therefore, choose your custom info type name carefully after considering + // what your data looks like. One way to select a name that has a high chance + // of yielding reliable detection is to include one or more unicode characters + // that are highly improbable to exist in your data. + // For example, assuming your data is entered from a regular ASCII keyboard, + // the symbol with the hex code point 29DD might be used like so: + // ⧝MY_TOKEN_TYPE + SurrogateInfoType *InfoType `protobuf:"bytes,2,opt,name=surrogate_info_type,json=surrogateInfoType,proto3" json:"surrogate_info_type,omitempty"` + // Optional. A context may be used for higher security and maintaining + // referential integrity such that the same identifier in two different + // contexts will be given a distinct surrogate. The context is appended to + // plaintext value being encrypted. On decryption the provided context is + // validated against the value used during encryption. If a context was + // provided during encryption, same context must be provided during decryption + // as well. + // + // If the context is not set, plaintext would be used as is for encryption. + // If the context is set but: + // + // 1. there is no record present when transforming a given value or + // 2. the field is not present when transforming a given value, + // + // plaintext would be used as is for encryption. + // + // Note that case (1) is expected when an `InfoTypeTransformation` is + // applied to both structured and non-structured `ContentItem`s. + Context *FieldId `protobuf:"bytes,3,opt,name=context,proto3" json:"context,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *CryptoDeterministicConfig) Reset() { *m = CryptoDeterministicConfig{} } +func (m *CryptoDeterministicConfig) String() string { return proto.CompactTextString(m) } +func (*CryptoDeterministicConfig) ProtoMessage() {} +func (*CryptoDeterministicConfig) Descriptor() ([]byte, []int) { + return fileDescriptor_dlp_1aebf9c18c267d70, []int{46} +} +func (m *CryptoDeterministicConfig) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_CryptoDeterministicConfig.Unmarshal(m, b) +} +func (m *CryptoDeterministicConfig) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_CryptoDeterministicConfig.Marshal(b, m, deterministic) +} +func (dst *CryptoDeterministicConfig) XXX_Merge(src proto.Message) { + xxx_messageInfo_CryptoDeterministicConfig.Merge(dst, src) +} +func (m *CryptoDeterministicConfig) XXX_Size() int { + return xxx_messageInfo_CryptoDeterministicConfig.Size(m) +} +func (m *CryptoDeterministicConfig) XXX_DiscardUnknown() { + xxx_messageInfo_CryptoDeterministicConfig.DiscardUnknown(m) +} + +var xxx_messageInfo_CryptoDeterministicConfig proto.InternalMessageInfo + +func (m *CryptoDeterministicConfig) GetCryptoKey() *CryptoKey { + if m != nil { + return m.CryptoKey + } + return nil +} + +func (m *CryptoDeterministicConfig) GetSurrogateInfoType() *InfoType { + if m != nil { + return m.SurrogateInfoType + } + return nil +} + +func (m *CryptoDeterministicConfig) GetContext() *FieldId { + if m != nil { + return m.Context + } + return nil +} + +// Replace each input value with a given `Value`. +type ReplaceValueConfig struct { + // Value to replace it with. + NewValue *Value `protobuf:"bytes,1,opt,name=new_value,json=newValue,proto3" json:"new_value,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ReplaceValueConfig) Reset() { *m = ReplaceValueConfig{} } +func (m *ReplaceValueConfig) String() string { return proto.CompactTextString(m) } +func (*ReplaceValueConfig) ProtoMessage() {} +func (*ReplaceValueConfig) Descriptor() ([]byte, []int) { + return fileDescriptor_dlp_1aebf9c18c267d70, []int{47} +} +func (m *ReplaceValueConfig) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ReplaceValueConfig.Unmarshal(m, b) +} +func (m *ReplaceValueConfig) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ReplaceValueConfig.Marshal(b, m, deterministic) +} +func (dst *ReplaceValueConfig) XXX_Merge(src proto.Message) { + xxx_messageInfo_ReplaceValueConfig.Merge(dst, src) +} +func (m *ReplaceValueConfig) XXX_Size() int { + return xxx_messageInfo_ReplaceValueConfig.Size(m) +} +func (m *ReplaceValueConfig) XXX_DiscardUnknown() { + xxx_messageInfo_ReplaceValueConfig.DiscardUnknown(m) +} + +var xxx_messageInfo_ReplaceValueConfig proto.InternalMessageInfo + +func (m *ReplaceValueConfig) GetNewValue() *Value { + if m != nil { + return m.NewValue + } + return nil +} + +// Replace each matching finding with the name of the info_type. +type ReplaceWithInfoTypeConfig struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ReplaceWithInfoTypeConfig) Reset() { *m = ReplaceWithInfoTypeConfig{} } +func (m *ReplaceWithInfoTypeConfig) String() string { return proto.CompactTextString(m) } +func (*ReplaceWithInfoTypeConfig) ProtoMessage() {} +func (*ReplaceWithInfoTypeConfig) Descriptor() ([]byte, []int) { + return fileDescriptor_dlp_1aebf9c18c267d70, []int{48} +} +func (m *ReplaceWithInfoTypeConfig) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ReplaceWithInfoTypeConfig.Unmarshal(m, b) +} +func (m *ReplaceWithInfoTypeConfig) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ReplaceWithInfoTypeConfig.Marshal(b, m, deterministic) +} +func (dst *ReplaceWithInfoTypeConfig) XXX_Merge(src proto.Message) { + xxx_messageInfo_ReplaceWithInfoTypeConfig.Merge(dst, src) +} +func (m *ReplaceWithInfoTypeConfig) XXX_Size() int { + return xxx_messageInfo_ReplaceWithInfoTypeConfig.Size(m) +} +func (m *ReplaceWithInfoTypeConfig) XXX_DiscardUnknown() { + xxx_messageInfo_ReplaceWithInfoTypeConfig.DiscardUnknown(m) +} + +var xxx_messageInfo_ReplaceWithInfoTypeConfig proto.InternalMessageInfo + +// Redact a given value. For example, if used with an `InfoTypeTransformation` +// transforming PHONE_NUMBER, and input 'My phone number is 206-555-0123', the +// output would be 'My phone number is '. +type RedactConfig struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *RedactConfig) Reset() { *m = RedactConfig{} } +func (m *RedactConfig) String() string { return proto.CompactTextString(m) } +func (*RedactConfig) ProtoMessage() {} +func (*RedactConfig) Descriptor() ([]byte, []int) { + return fileDescriptor_dlp_1aebf9c18c267d70, []int{49} +} +func (m *RedactConfig) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_RedactConfig.Unmarshal(m, b) +} +func (m *RedactConfig) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_RedactConfig.Marshal(b, m, deterministic) +} +func (dst *RedactConfig) XXX_Merge(src proto.Message) { + xxx_messageInfo_RedactConfig.Merge(dst, src) +} +func (m *RedactConfig) XXX_Size() int { + return xxx_messageInfo_RedactConfig.Size(m) +} +func (m *RedactConfig) XXX_DiscardUnknown() { + xxx_messageInfo_RedactConfig.DiscardUnknown(m) +} + +var xxx_messageInfo_RedactConfig proto.InternalMessageInfo + +// Characters to skip when doing deidentification of a value. These will be left +// alone and skipped. +type CharsToIgnore struct { + // Types that are valid to be assigned to Characters: + // *CharsToIgnore_CharactersToSkip + // *CharsToIgnore_CommonCharactersToIgnore + Characters isCharsToIgnore_Characters `protobuf_oneof:"characters"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *CharsToIgnore) Reset() { *m = CharsToIgnore{} } +func (m *CharsToIgnore) String() string { return proto.CompactTextString(m) } +func (*CharsToIgnore) ProtoMessage() {} +func (*CharsToIgnore) Descriptor() ([]byte, []int) { + return fileDescriptor_dlp_1aebf9c18c267d70, []int{50} +} +func (m *CharsToIgnore) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_CharsToIgnore.Unmarshal(m, b) +} +func (m *CharsToIgnore) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_CharsToIgnore.Marshal(b, m, deterministic) +} +func (dst *CharsToIgnore) XXX_Merge(src proto.Message) { + xxx_messageInfo_CharsToIgnore.Merge(dst, src) +} +func (m *CharsToIgnore) XXX_Size() int { + return xxx_messageInfo_CharsToIgnore.Size(m) +} +func (m *CharsToIgnore) XXX_DiscardUnknown() { + xxx_messageInfo_CharsToIgnore.DiscardUnknown(m) +} + +var xxx_messageInfo_CharsToIgnore proto.InternalMessageInfo + +type isCharsToIgnore_Characters interface { + isCharsToIgnore_Characters() +} + +type CharsToIgnore_CharactersToSkip struct { + CharactersToSkip string `protobuf:"bytes,1,opt,name=characters_to_skip,json=charactersToSkip,proto3,oneof"` +} + +type CharsToIgnore_CommonCharactersToIgnore struct { + CommonCharactersToIgnore CharsToIgnore_CommonCharsToIgnore `protobuf:"varint,2,opt,name=common_characters_to_ignore,json=commonCharactersToIgnore,proto3,enum=google.privacy.dlp.v2.CharsToIgnore_CommonCharsToIgnore,oneof"` +} + +func (*CharsToIgnore_CharactersToSkip) isCharsToIgnore_Characters() {} + +func (*CharsToIgnore_CommonCharactersToIgnore) isCharsToIgnore_Characters() {} + +func (m *CharsToIgnore) GetCharacters() isCharsToIgnore_Characters { + if m != nil { + return m.Characters + } + return nil +} + +func (m *CharsToIgnore) GetCharactersToSkip() string { + if x, ok := m.GetCharacters().(*CharsToIgnore_CharactersToSkip); ok { + return x.CharactersToSkip + } + return "" +} + +func (m *CharsToIgnore) GetCommonCharactersToIgnore() CharsToIgnore_CommonCharsToIgnore { + if x, ok := m.GetCharacters().(*CharsToIgnore_CommonCharactersToIgnore); ok { + return x.CommonCharactersToIgnore + } + return CharsToIgnore_COMMON_CHARS_TO_IGNORE_UNSPECIFIED +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*CharsToIgnore) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _CharsToIgnore_OneofMarshaler, _CharsToIgnore_OneofUnmarshaler, _CharsToIgnore_OneofSizer, []interface{}{ + (*CharsToIgnore_CharactersToSkip)(nil), + (*CharsToIgnore_CommonCharactersToIgnore)(nil), + } +} + +func _CharsToIgnore_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*CharsToIgnore) + // characters + switch x := m.Characters.(type) { + case *CharsToIgnore_CharactersToSkip: + b.EncodeVarint(1<<3 | proto.WireBytes) + b.EncodeStringBytes(x.CharactersToSkip) + case *CharsToIgnore_CommonCharactersToIgnore: + b.EncodeVarint(2<<3 | proto.WireVarint) + b.EncodeVarint(uint64(x.CommonCharactersToIgnore)) + case nil: + default: + return fmt.Errorf("CharsToIgnore.Characters has unexpected type %T", x) + } + return nil +} + +func _CharsToIgnore_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*CharsToIgnore) + switch tag { + case 1: // characters.characters_to_skip + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeStringBytes() + m.Characters = &CharsToIgnore_CharactersToSkip{x} + return true, err + case 2: // characters.common_characters_to_ignore + if wire != proto.WireVarint { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeVarint() + m.Characters = &CharsToIgnore_CommonCharactersToIgnore{CharsToIgnore_CommonCharsToIgnore(x)} + return true, err + default: + return false, nil + } +} + +func _CharsToIgnore_OneofSizer(msg proto.Message) (n int) { + m := msg.(*CharsToIgnore) + // characters + switch x := m.Characters.(type) { + case *CharsToIgnore_CharactersToSkip: + n += 1 // tag and wire + n += proto.SizeVarint(uint64(len(x.CharactersToSkip))) + n += len(x.CharactersToSkip) + case *CharsToIgnore_CommonCharactersToIgnore: + n += 1 // tag and wire + n += proto.SizeVarint(uint64(x.CommonCharactersToIgnore)) + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +// Partially mask a string by replacing a given number of characters with a +// fixed character. Masking can start from the beginning or end of the string. +// This can be used on data of any type (numbers, longs, and so on) and when +// de-identifying structured data we'll attempt to preserve the original data's +// type. (This allows you to take a long like 123 and modify it to a string like +// **3. +type CharacterMaskConfig struct { + // Character to mask the sensitive values—for example, "*" for an + // alphabetic string such as name, or "0" for a numeric string such as ZIP + // code or credit card number. String must have length 1. If not supplied, we + // will default to "*" for strings, 0 for digits. + MaskingCharacter string `protobuf:"bytes,1,opt,name=masking_character,json=maskingCharacter,proto3" json:"masking_character,omitempty"` + // Number of characters to mask. If not set, all matching chars will be + // masked. Skipped characters do not count towards this tally. + NumberToMask int32 `protobuf:"varint,2,opt,name=number_to_mask,json=numberToMask,proto3" json:"number_to_mask,omitempty"` + // Mask characters in reverse order. For example, if `masking_character` is + // '0', number_to_mask is 14, and `reverse_order` is false, then + // 1234-5678-9012-3456 -> 00000000000000-3456 + // If `masking_character` is '*', `number_to_mask` is 3, and `reverse_order` + // is true, then 12345 -> 12*** + ReverseOrder bool `protobuf:"varint,3,opt,name=reverse_order,json=reverseOrder,proto3" json:"reverse_order,omitempty"` + // When masking a string, items in this list will be skipped when replacing. + // For example, if your string is 555-555-5555 and you ask us to skip `-` and + // mask 5 chars with * we would produce ***-*55-5555. + CharactersToIgnore []*CharsToIgnore `protobuf:"bytes,4,rep,name=characters_to_ignore,json=charactersToIgnore,proto3" json:"characters_to_ignore,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *CharacterMaskConfig) Reset() { *m = CharacterMaskConfig{} } +func (m *CharacterMaskConfig) String() string { return proto.CompactTextString(m) } +func (*CharacterMaskConfig) ProtoMessage() {} +func (*CharacterMaskConfig) Descriptor() ([]byte, []int) { + return fileDescriptor_dlp_1aebf9c18c267d70, []int{51} +} +func (m *CharacterMaskConfig) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_CharacterMaskConfig.Unmarshal(m, b) +} +func (m *CharacterMaskConfig) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_CharacterMaskConfig.Marshal(b, m, deterministic) +} +func (dst *CharacterMaskConfig) XXX_Merge(src proto.Message) { + xxx_messageInfo_CharacterMaskConfig.Merge(dst, src) +} +func (m *CharacterMaskConfig) XXX_Size() int { + return xxx_messageInfo_CharacterMaskConfig.Size(m) +} +func (m *CharacterMaskConfig) XXX_DiscardUnknown() { + xxx_messageInfo_CharacterMaskConfig.DiscardUnknown(m) +} + +var xxx_messageInfo_CharacterMaskConfig proto.InternalMessageInfo + +func (m *CharacterMaskConfig) GetMaskingCharacter() string { + if m != nil { + return m.MaskingCharacter + } + return "" +} + +func (m *CharacterMaskConfig) GetNumberToMask() int32 { + if m != nil { + return m.NumberToMask + } + return 0 +} + +func (m *CharacterMaskConfig) GetReverseOrder() bool { + if m != nil { + return m.ReverseOrder + } + return false +} + +func (m *CharacterMaskConfig) GetCharactersToIgnore() []*CharsToIgnore { + if m != nil { + return m.CharactersToIgnore + } + return nil +} + +// Buckets values based on fixed size ranges. The +// Bucketing transformation can provide all of this functionality, +// but requires more configuration. This message is provided as a convenience to +// the user for simple bucketing strategies. +// +// The transformed value will be a hyphenated string of +// -, i.e if lower_bound = 10 and upper_bound = 20 +// all values that are within this bucket will be replaced with "10-20". +// +// This can be used on data of type: double, long. +// +// If the bound Value type differs from the type of data +// being transformed, we will first attempt converting the type of the data to +// be transformed to match the type of the bound before comparing. +// +// See https://cloud.google.com/dlp/docs/concepts-bucketing to learn more. +type FixedSizeBucketingConfig struct { + // Lower bound value of buckets. All values less than `lower_bound` are + // grouped together into a single bucket; for example if `lower_bound` = 10, + // then all values less than 10 are replaced with the value “-10”. [Required]. + LowerBound *Value `protobuf:"bytes,1,opt,name=lower_bound,json=lowerBound,proto3" json:"lower_bound,omitempty"` + // Upper bound value of buckets. All values greater than upper_bound are + // grouped together into a single bucket; for example if `upper_bound` = 89, + // then all values greater than 89 are replaced with the value “89+”. + // [Required]. + UpperBound *Value `protobuf:"bytes,2,opt,name=upper_bound,json=upperBound,proto3" json:"upper_bound,omitempty"` + // Size of each bucket (except for minimum and maximum buckets). So if + // `lower_bound` = 10, `upper_bound` = 89, and `bucket_size` = 10, then the + // following buckets would be used: -10, 10-20, 20-30, 30-40, 40-50, 50-60, + // 60-70, 70-80, 80-89, 89+. Precision up to 2 decimals works. [Required]. + BucketSize float64 `protobuf:"fixed64,3,opt,name=bucket_size,json=bucketSize,proto3" json:"bucket_size,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *FixedSizeBucketingConfig) Reset() { *m = FixedSizeBucketingConfig{} } +func (m *FixedSizeBucketingConfig) String() string { return proto.CompactTextString(m) } +func (*FixedSizeBucketingConfig) ProtoMessage() {} +func (*FixedSizeBucketingConfig) Descriptor() ([]byte, []int) { + return fileDescriptor_dlp_1aebf9c18c267d70, []int{52} +} +func (m *FixedSizeBucketingConfig) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_FixedSizeBucketingConfig.Unmarshal(m, b) +} +func (m *FixedSizeBucketingConfig) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_FixedSizeBucketingConfig.Marshal(b, m, deterministic) +} +func (dst *FixedSizeBucketingConfig) XXX_Merge(src proto.Message) { + xxx_messageInfo_FixedSizeBucketingConfig.Merge(dst, src) +} +func (m *FixedSizeBucketingConfig) XXX_Size() int { + return xxx_messageInfo_FixedSizeBucketingConfig.Size(m) +} +func (m *FixedSizeBucketingConfig) XXX_DiscardUnknown() { + xxx_messageInfo_FixedSizeBucketingConfig.DiscardUnknown(m) +} + +var xxx_messageInfo_FixedSizeBucketingConfig proto.InternalMessageInfo + +func (m *FixedSizeBucketingConfig) GetLowerBound() *Value { + if m != nil { + return m.LowerBound + } + return nil +} + +func (m *FixedSizeBucketingConfig) GetUpperBound() *Value { + if m != nil { + return m.UpperBound + } + return nil +} + +func (m *FixedSizeBucketingConfig) GetBucketSize() float64 { + if m != nil { + return m.BucketSize + } + return 0 +} + +// Generalization function that buckets values based on ranges. The ranges and +// replacement values are dynamically provided by the user for custom behavior, +// such as 1-30 -> LOW 31-65 -> MEDIUM 66-100 -> HIGH +// This can be used on +// data of type: number, long, string, timestamp. +// If the bound `Value` type differs from the type of data being transformed, we +// will first attempt converting the type of the data to be transformed to match +// the type of the bound before comparing. +// See https://cloud.google.com/dlp/docs/concepts-bucketing to learn more. +type BucketingConfig struct { + // Set of buckets. Ranges must be non-overlapping. + Buckets []*BucketingConfig_Bucket `protobuf:"bytes,1,rep,name=buckets,proto3" json:"buckets,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *BucketingConfig) Reset() { *m = BucketingConfig{} } +func (m *BucketingConfig) String() string { return proto.CompactTextString(m) } +func (*BucketingConfig) ProtoMessage() {} +func (*BucketingConfig) Descriptor() ([]byte, []int) { + return fileDescriptor_dlp_1aebf9c18c267d70, []int{53} +} +func (m *BucketingConfig) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_BucketingConfig.Unmarshal(m, b) +} +func (m *BucketingConfig) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_BucketingConfig.Marshal(b, m, deterministic) +} +func (dst *BucketingConfig) XXX_Merge(src proto.Message) { + xxx_messageInfo_BucketingConfig.Merge(dst, src) +} +func (m *BucketingConfig) XXX_Size() int { + return xxx_messageInfo_BucketingConfig.Size(m) +} +func (m *BucketingConfig) XXX_DiscardUnknown() { + xxx_messageInfo_BucketingConfig.DiscardUnknown(m) +} + +var xxx_messageInfo_BucketingConfig proto.InternalMessageInfo + +func (m *BucketingConfig) GetBuckets() []*BucketingConfig_Bucket { + if m != nil { + return m.Buckets + } + return nil +} + +// Bucket is represented as a range, along with replacement values. +type BucketingConfig_Bucket struct { + // Lower bound of the range, inclusive. Type should be the same as max if + // used. + Min *Value `protobuf:"bytes,1,opt,name=min,proto3" json:"min,omitempty"` + // Upper bound of the range, exclusive; type must match min. + Max *Value `protobuf:"bytes,2,opt,name=max,proto3" json:"max,omitempty"` + // Replacement value for this bucket. If not provided + // the default behavior will be to hyphenate the min-max range. + ReplacementValue *Value `protobuf:"bytes,3,opt,name=replacement_value,json=replacementValue,proto3" json:"replacement_value,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *BucketingConfig_Bucket) Reset() { *m = BucketingConfig_Bucket{} } +func (m *BucketingConfig_Bucket) String() string { return proto.CompactTextString(m) } +func (*BucketingConfig_Bucket) ProtoMessage() {} +func (*BucketingConfig_Bucket) Descriptor() ([]byte, []int) { + return fileDescriptor_dlp_1aebf9c18c267d70, []int{53, 0} +} +func (m *BucketingConfig_Bucket) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_BucketingConfig_Bucket.Unmarshal(m, b) +} +func (m *BucketingConfig_Bucket) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_BucketingConfig_Bucket.Marshal(b, m, deterministic) +} +func (dst *BucketingConfig_Bucket) XXX_Merge(src proto.Message) { + xxx_messageInfo_BucketingConfig_Bucket.Merge(dst, src) +} +func (m *BucketingConfig_Bucket) XXX_Size() int { + return xxx_messageInfo_BucketingConfig_Bucket.Size(m) +} +func (m *BucketingConfig_Bucket) XXX_DiscardUnknown() { + xxx_messageInfo_BucketingConfig_Bucket.DiscardUnknown(m) +} + +var xxx_messageInfo_BucketingConfig_Bucket proto.InternalMessageInfo + +func (m *BucketingConfig_Bucket) GetMin() *Value { + if m != nil { + return m.Min + } + return nil +} + +func (m *BucketingConfig_Bucket) GetMax() *Value { + if m != nil { + return m.Max + } + return nil +} + +func (m *BucketingConfig_Bucket) GetReplacementValue() *Value { + if m != nil { + return m.ReplacementValue + } + return nil +} + +// Replaces an identifier with a surrogate using Format Preserving Encryption +// (FPE) with the FFX mode of operation; however when used in the +// `ReidentifyContent` API method, it serves the opposite function by reversing +// the surrogate back into the original identifier. The identifier must be +// encoded as ASCII. For a given crypto key and context, the same identifier +// will be replaced with the same surrogate. Identifiers must be at least two +// characters long. In the case that the identifier is the empty string, it will +// be skipped. See https://cloud.google.com/dlp/docs/pseudonymization to learn +// more. +// +// Note: We recommend using CryptoDeterministicConfig for all use cases which +// do not require preserving the input alphabet space and size, plus warrant +// referential integrity. +type CryptoReplaceFfxFpeConfig struct { + // The key used by the encryption algorithm. [required] + CryptoKey *CryptoKey `protobuf:"bytes,1,opt,name=crypto_key,json=cryptoKey,proto3" json:"crypto_key,omitempty"` + // The 'tweak', a context may be used for higher security since the same + // identifier in two different contexts won't be given the same surrogate. If + // the context is not set, a default tweak will be used. + // + // If the context is set but: + // + // 1. there is no record present when transforming a given value or + // 1. the field is not present when transforming a given value, + // + // a default tweak will be used. + // + // Note that case (1) is expected when an `InfoTypeTransformation` is + // applied to both structured and non-structured `ContentItem`s. + // Currently, the referenced field may be of value type integer or string. + // + // The tweak is constructed as a sequence of bytes in big endian byte order + // such that: + // + // - a 64 bit integer is encoded followed by a single byte of value 1 + // - a string is encoded in UTF-8 format followed by a single byte of value 2 + Context *FieldId `protobuf:"bytes,2,opt,name=context,proto3" json:"context,omitempty"` + // Types that are valid to be assigned to Alphabet: + // *CryptoReplaceFfxFpeConfig_CommonAlphabet + // *CryptoReplaceFfxFpeConfig_CustomAlphabet + // *CryptoReplaceFfxFpeConfig_Radix + Alphabet isCryptoReplaceFfxFpeConfig_Alphabet `protobuf_oneof:"alphabet"` + // The custom infoType to annotate the surrogate with. + // This annotation will be applied to the surrogate by prefixing it with + // the name of the custom infoType followed by the number of + // characters comprising the surrogate. The following scheme defines the + // format: info_type_name(surrogate_character_count):surrogate + // + // For example, if the name of custom infoType is 'MY_TOKEN_INFO_TYPE' and + // the surrogate is 'abc', the full replacement value + // will be: 'MY_TOKEN_INFO_TYPE(3):abc' + // + // This annotation identifies the surrogate when inspecting content using the + // custom infoType + // [`SurrogateType`](/dlp/docs/reference/rest/v2/InspectConfig#surrogatetype). + // This facilitates reversal of the surrogate when it occurs in free text. + // + // In order for inspection to work properly, the name of this infoType must + // not occur naturally anywhere in your data; otherwise, inspection may + // find a surrogate that does not correspond to an actual identifier. + // Therefore, choose your custom infoType name carefully after considering + // what your data looks like. One way to select a name that has a high chance + // of yielding reliable detection is to include one or more unicode characters + // that are highly improbable to exist in your data. + // For example, assuming your data is entered from a regular ASCII keyboard, + // the symbol with the hex code point 29DD might be used like so: + // ⧝MY_TOKEN_TYPE + SurrogateInfoType *InfoType `protobuf:"bytes,8,opt,name=surrogate_info_type,json=surrogateInfoType,proto3" json:"surrogate_info_type,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *CryptoReplaceFfxFpeConfig) Reset() { *m = CryptoReplaceFfxFpeConfig{} } +func (m *CryptoReplaceFfxFpeConfig) String() string { return proto.CompactTextString(m) } +func (*CryptoReplaceFfxFpeConfig) ProtoMessage() {} +func (*CryptoReplaceFfxFpeConfig) Descriptor() ([]byte, []int) { + return fileDescriptor_dlp_1aebf9c18c267d70, []int{54} +} +func (m *CryptoReplaceFfxFpeConfig) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_CryptoReplaceFfxFpeConfig.Unmarshal(m, b) +} +func (m *CryptoReplaceFfxFpeConfig) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_CryptoReplaceFfxFpeConfig.Marshal(b, m, deterministic) +} +func (dst *CryptoReplaceFfxFpeConfig) XXX_Merge(src proto.Message) { + xxx_messageInfo_CryptoReplaceFfxFpeConfig.Merge(dst, src) +} +func (m *CryptoReplaceFfxFpeConfig) XXX_Size() int { + return xxx_messageInfo_CryptoReplaceFfxFpeConfig.Size(m) +} +func (m *CryptoReplaceFfxFpeConfig) XXX_DiscardUnknown() { + xxx_messageInfo_CryptoReplaceFfxFpeConfig.DiscardUnknown(m) +} + +var xxx_messageInfo_CryptoReplaceFfxFpeConfig proto.InternalMessageInfo + +func (m *CryptoReplaceFfxFpeConfig) GetCryptoKey() *CryptoKey { + if m != nil { + return m.CryptoKey + } + return nil +} + +func (m *CryptoReplaceFfxFpeConfig) GetContext() *FieldId { + if m != nil { + return m.Context + } + return nil +} + +type isCryptoReplaceFfxFpeConfig_Alphabet interface { + isCryptoReplaceFfxFpeConfig_Alphabet() +} + +type CryptoReplaceFfxFpeConfig_CommonAlphabet struct { + CommonAlphabet CryptoReplaceFfxFpeConfig_FfxCommonNativeAlphabet `protobuf:"varint,4,opt,name=common_alphabet,json=commonAlphabet,proto3,enum=google.privacy.dlp.v2.CryptoReplaceFfxFpeConfig_FfxCommonNativeAlphabet,oneof"` +} + +type CryptoReplaceFfxFpeConfig_CustomAlphabet struct { + CustomAlphabet string `protobuf:"bytes,5,opt,name=custom_alphabet,json=customAlphabet,proto3,oneof"` +} + +type CryptoReplaceFfxFpeConfig_Radix struct { + Radix int32 `protobuf:"varint,6,opt,name=radix,proto3,oneof"` +} + +func (*CryptoReplaceFfxFpeConfig_CommonAlphabet) isCryptoReplaceFfxFpeConfig_Alphabet() {} + +func (*CryptoReplaceFfxFpeConfig_CustomAlphabet) isCryptoReplaceFfxFpeConfig_Alphabet() {} + +func (*CryptoReplaceFfxFpeConfig_Radix) isCryptoReplaceFfxFpeConfig_Alphabet() {} + +func (m *CryptoReplaceFfxFpeConfig) GetAlphabet() isCryptoReplaceFfxFpeConfig_Alphabet { + if m != nil { + return m.Alphabet + } + return nil +} + +func (m *CryptoReplaceFfxFpeConfig) GetCommonAlphabet() CryptoReplaceFfxFpeConfig_FfxCommonNativeAlphabet { + if x, ok := m.GetAlphabet().(*CryptoReplaceFfxFpeConfig_CommonAlphabet); ok { + return x.CommonAlphabet + } + return CryptoReplaceFfxFpeConfig_FFX_COMMON_NATIVE_ALPHABET_UNSPECIFIED +} + +func (m *CryptoReplaceFfxFpeConfig) GetCustomAlphabet() string { + if x, ok := m.GetAlphabet().(*CryptoReplaceFfxFpeConfig_CustomAlphabet); ok { + return x.CustomAlphabet + } + return "" +} + +func (m *CryptoReplaceFfxFpeConfig) GetRadix() int32 { + if x, ok := m.GetAlphabet().(*CryptoReplaceFfxFpeConfig_Radix); ok { + return x.Radix + } + return 0 +} + +func (m *CryptoReplaceFfxFpeConfig) GetSurrogateInfoType() *InfoType { + if m != nil { + return m.SurrogateInfoType + } + return nil +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*CryptoReplaceFfxFpeConfig) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _CryptoReplaceFfxFpeConfig_OneofMarshaler, _CryptoReplaceFfxFpeConfig_OneofUnmarshaler, _CryptoReplaceFfxFpeConfig_OneofSizer, []interface{}{ + (*CryptoReplaceFfxFpeConfig_CommonAlphabet)(nil), + (*CryptoReplaceFfxFpeConfig_CustomAlphabet)(nil), + (*CryptoReplaceFfxFpeConfig_Radix)(nil), + } +} + +func _CryptoReplaceFfxFpeConfig_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*CryptoReplaceFfxFpeConfig) + // alphabet + switch x := m.Alphabet.(type) { + case *CryptoReplaceFfxFpeConfig_CommonAlphabet: + b.EncodeVarint(4<<3 | proto.WireVarint) + b.EncodeVarint(uint64(x.CommonAlphabet)) + case *CryptoReplaceFfxFpeConfig_CustomAlphabet: + b.EncodeVarint(5<<3 | proto.WireBytes) + b.EncodeStringBytes(x.CustomAlphabet) + case *CryptoReplaceFfxFpeConfig_Radix: + b.EncodeVarint(6<<3 | proto.WireVarint) + b.EncodeVarint(uint64(x.Radix)) + case nil: + default: + return fmt.Errorf("CryptoReplaceFfxFpeConfig.Alphabet has unexpected type %T", x) + } + return nil +} + +func _CryptoReplaceFfxFpeConfig_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*CryptoReplaceFfxFpeConfig) + switch tag { + case 4: // alphabet.common_alphabet + if wire != proto.WireVarint { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeVarint() + m.Alphabet = &CryptoReplaceFfxFpeConfig_CommonAlphabet{CryptoReplaceFfxFpeConfig_FfxCommonNativeAlphabet(x)} + return true, err + case 5: // alphabet.custom_alphabet + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeStringBytes() + m.Alphabet = &CryptoReplaceFfxFpeConfig_CustomAlphabet{x} + return true, err + case 6: // alphabet.radix + if wire != proto.WireVarint { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeVarint() + m.Alphabet = &CryptoReplaceFfxFpeConfig_Radix{int32(x)} + return true, err + default: + return false, nil + } +} + +func _CryptoReplaceFfxFpeConfig_OneofSizer(msg proto.Message) (n int) { + m := msg.(*CryptoReplaceFfxFpeConfig) + // alphabet + switch x := m.Alphabet.(type) { + case *CryptoReplaceFfxFpeConfig_CommonAlphabet: + n += 1 // tag and wire + n += proto.SizeVarint(uint64(x.CommonAlphabet)) + case *CryptoReplaceFfxFpeConfig_CustomAlphabet: + n += 1 // tag and wire + n += proto.SizeVarint(uint64(len(x.CustomAlphabet))) + n += len(x.CustomAlphabet) + case *CryptoReplaceFfxFpeConfig_Radix: + n += 1 // tag and wire + n += proto.SizeVarint(uint64(x.Radix)) + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +// This is a data encryption key (DEK) (as opposed to +// a key encryption key (KEK) stored by KMS). +// When using KMS to wrap/unwrap DEKs, be sure to set an appropriate +// IAM policy on the KMS CryptoKey (KEK) to ensure an attacker cannot +// unwrap the data crypto key. +type CryptoKey struct { + // Types that are valid to be assigned to Source: + // *CryptoKey_Transient + // *CryptoKey_Unwrapped + // *CryptoKey_KmsWrapped + Source isCryptoKey_Source `protobuf_oneof:"source"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *CryptoKey) Reset() { *m = CryptoKey{} } +func (m *CryptoKey) String() string { return proto.CompactTextString(m) } +func (*CryptoKey) ProtoMessage() {} +func (*CryptoKey) Descriptor() ([]byte, []int) { + return fileDescriptor_dlp_1aebf9c18c267d70, []int{55} +} +func (m *CryptoKey) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_CryptoKey.Unmarshal(m, b) +} +func (m *CryptoKey) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_CryptoKey.Marshal(b, m, deterministic) +} +func (dst *CryptoKey) XXX_Merge(src proto.Message) { + xxx_messageInfo_CryptoKey.Merge(dst, src) +} +func (m *CryptoKey) XXX_Size() int { + return xxx_messageInfo_CryptoKey.Size(m) +} +func (m *CryptoKey) XXX_DiscardUnknown() { + xxx_messageInfo_CryptoKey.DiscardUnknown(m) +} + +var xxx_messageInfo_CryptoKey proto.InternalMessageInfo + +type isCryptoKey_Source interface { + isCryptoKey_Source() +} + +type CryptoKey_Transient struct { + Transient *TransientCryptoKey `protobuf:"bytes,1,opt,name=transient,proto3,oneof"` +} + +type CryptoKey_Unwrapped struct { + Unwrapped *UnwrappedCryptoKey `protobuf:"bytes,2,opt,name=unwrapped,proto3,oneof"` +} + +type CryptoKey_KmsWrapped struct { + KmsWrapped *KmsWrappedCryptoKey `protobuf:"bytes,3,opt,name=kms_wrapped,json=kmsWrapped,proto3,oneof"` +} + +func (*CryptoKey_Transient) isCryptoKey_Source() {} + +func (*CryptoKey_Unwrapped) isCryptoKey_Source() {} + +func (*CryptoKey_KmsWrapped) isCryptoKey_Source() {} + +func (m *CryptoKey) GetSource() isCryptoKey_Source { + if m != nil { + return m.Source + } + return nil +} + +func (m *CryptoKey) GetTransient() *TransientCryptoKey { + if x, ok := m.GetSource().(*CryptoKey_Transient); ok { + return x.Transient + } + return nil +} + +func (m *CryptoKey) GetUnwrapped() *UnwrappedCryptoKey { + if x, ok := m.GetSource().(*CryptoKey_Unwrapped); ok { + return x.Unwrapped + } + return nil +} + +func (m *CryptoKey) GetKmsWrapped() *KmsWrappedCryptoKey { + if x, ok := m.GetSource().(*CryptoKey_KmsWrapped); ok { + return x.KmsWrapped + } + return nil +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*CryptoKey) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _CryptoKey_OneofMarshaler, _CryptoKey_OneofUnmarshaler, _CryptoKey_OneofSizer, []interface{}{ + (*CryptoKey_Transient)(nil), + (*CryptoKey_Unwrapped)(nil), + (*CryptoKey_KmsWrapped)(nil), + } +} + +func _CryptoKey_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*CryptoKey) + // source + switch x := m.Source.(type) { + case *CryptoKey_Transient: + b.EncodeVarint(1<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.Transient); err != nil { + return err + } + case *CryptoKey_Unwrapped: + b.EncodeVarint(2<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.Unwrapped); err != nil { + return err + } + case *CryptoKey_KmsWrapped: + b.EncodeVarint(3<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.KmsWrapped); err != nil { + return err + } + case nil: + default: + return fmt.Errorf("CryptoKey.Source has unexpected type %T", x) + } + return nil +} + +func _CryptoKey_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*CryptoKey) + switch tag { + case 1: // source.transient + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(TransientCryptoKey) + err := b.DecodeMessage(msg) + m.Source = &CryptoKey_Transient{msg} + return true, err + case 2: // source.unwrapped + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(UnwrappedCryptoKey) + err := b.DecodeMessage(msg) + m.Source = &CryptoKey_Unwrapped{msg} + return true, err + case 3: // source.kms_wrapped + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(KmsWrappedCryptoKey) + err := b.DecodeMessage(msg) + m.Source = &CryptoKey_KmsWrapped{msg} + return true, err + default: + return false, nil + } +} + +func _CryptoKey_OneofSizer(msg proto.Message) (n int) { + m := msg.(*CryptoKey) + // source + switch x := m.Source.(type) { + case *CryptoKey_Transient: + s := proto.Size(x.Transient) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *CryptoKey_Unwrapped: + s := proto.Size(x.Unwrapped) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *CryptoKey_KmsWrapped: + s := proto.Size(x.KmsWrapped) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +// Use this to have a random data crypto key generated. +// It will be discarded after the request finishes. +type TransientCryptoKey struct { + // Name of the key. [required] + // This is an arbitrary string used to differentiate different keys. + // A unique key is generated per name: two separate `TransientCryptoKey` + // protos share the same generated key if their names are the same. + // When the data crypto key is generated, this name is not used in any way + // (repeating the api call will result in a different key being generated). + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *TransientCryptoKey) Reset() { *m = TransientCryptoKey{} } +func (m *TransientCryptoKey) String() string { return proto.CompactTextString(m) } +func (*TransientCryptoKey) ProtoMessage() {} +func (*TransientCryptoKey) Descriptor() ([]byte, []int) { + return fileDescriptor_dlp_1aebf9c18c267d70, []int{56} +} +func (m *TransientCryptoKey) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_TransientCryptoKey.Unmarshal(m, b) +} +func (m *TransientCryptoKey) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_TransientCryptoKey.Marshal(b, m, deterministic) +} +func (dst *TransientCryptoKey) XXX_Merge(src proto.Message) { + xxx_messageInfo_TransientCryptoKey.Merge(dst, src) +} +func (m *TransientCryptoKey) XXX_Size() int { + return xxx_messageInfo_TransientCryptoKey.Size(m) +} +func (m *TransientCryptoKey) XXX_DiscardUnknown() { + xxx_messageInfo_TransientCryptoKey.DiscardUnknown(m) +} + +var xxx_messageInfo_TransientCryptoKey proto.InternalMessageInfo + +func (m *TransientCryptoKey) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +// Using raw keys is prone to security risks due to accidentally +// leaking the key. Choose another type of key if possible. +type UnwrappedCryptoKey struct { + // A 128/192/256 bit key. [required] + Key []byte `protobuf:"bytes,1,opt,name=key,proto3" json:"key,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *UnwrappedCryptoKey) Reset() { *m = UnwrappedCryptoKey{} } +func (m *UnwrappedCryptoKey) String() string { return proto.CompactTextString(m) } +func (*UnwrappedCryptoKey) ProtoMessage() {} +func (*UnwrappedCryptoKey) Descriptor() ([]byte, []int) { + return fileDescriptor_dlp_1aebf9c18c267d70, []int{57} +} +func (m *UnwrappedCryptoKey) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_UnwrappedCryptoKey.Unmarshal(m, b) +} +func (m *UnwrappedCryptoKey) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_UnwrappedCryptoKey.Marshal(b, m, deterministic) +} +func (dst *UnwrappedCryptoKey) XXX_Merge(src proto.Message) { + xxx_messageInfo_UnwrappedCryptoKey.Merge(dst, src) +} +func (m *UnwrappedCryptoKey) XXX_Size() int { + return xxx_messageInfo_UnwrappedCryptoKey.Size(m) +} +func (m *UnwrappedCryptoKey) XXX_DiscardUnknown() { + xxx_messageInfo_UnwrappedCryptoKey.DiscardUnknown(m) +} + +var xxx_messageInfo_UnwrappedCryptoKey proto.InternalMessageInfo + +func (m *UnwrappedCryptoKey) GetKey() []byte { + if m != nil { + return m.Key + } + return nil +} + +// Include to use an existing data crypto key wrapped by KMS. +// The wrapped key must be a 128/192/256 bit key. +// Authorization requires the following IAM permissions when sending a request +// to perform a crypto transformation using a kms-wrapped crypto key: +// dlp.kms.encrypt +type KmsWrappedCryptoKey struct { + // The wrapped data crypto key. [required] + WrappedKey []byte `protobuf:"bytes,1,opt,name=wrapped_key,json=wrappedKey,proto3" json:"wrapped_key,omitempty"` + // The resource name of the KMS CryptoKey to use for unwrapping. [required] + CryptoKeyName string `protobuf:"bytes,2,opt,name=crypto_key_name,json=cryptoKeyName,proto3" json:"crypto_key_name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *KmsWrappedCryptoKey) Reset() { *m = KmsWrappedCryptoKey{} } +func (m *KmsWrappedCryptoKey) String() string { return proto.CompactTextString(m) } +func (*KmsWrappedCryptoKey) ProtoMessage() {} +func (*KmsWrappedCryptoKey) Descriptor() ([]byte, []int) { + return fileDescriptor_dlp_1aebf9c18c267d70, []int{58} +} +func (m *KmsWrappedCryptoKey) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_KmsWrappedCryptoKey.Unmarshal(m, b) +} +func (m *KmsWrappedCryptoKey) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_KmsWrappedCryptoKey.Marshal(b, m, deterministic) +} +func (dst *KmsWrappedCryptoKey) XXX_Merge(src proto.Message) { + xxx_messageInfo_KmsWrappedCryptoKey.Merge(dst, src) +} +func (m *KmsWrappedCryptoKey) XXX_Size() int { + return xxx_messageInfo_KmsWrappedCryptoKey.Size(m) +} +func (m *KmsWrappedCryptoKey) XXX_DiscardUnknown() { + xxx_messageInfo_KmsWrappedCryptoKey.DiscardUnknown(m) +} + +var xxx_messageInfo_KmsWrappedCryptoKey proto.InternalMessageInfo + +func (m *KmsWrappedCryptoKey) GetWrappedKey() []byte { + if m != nil { + return m.WrappedKey + } + return nil +} + +func (m *KmsWrappedCryptoKey) GetCryptoKeyName() string { + if m != nil { + return m.CryptoKeyName + } + return "" +} + +// Shifts dates by random number of days, with option to be consistent for the +// same context. See https://cloud.google.com/dlp/docs/concepts-date-shifting +// to learn more. +type DateShiftConfig struct { + // Range of shift in days. Actual shift will be selected at random within this + // range (inclusive ends). Negative means shift to earlier in time. Must not + // be more than 365250 days (1000 years) each direction. + // + // For example, 3 means shift date to at most 3 days into the future. + // [Required] + UpperBoundDays int32 `protobuf:"varint,1,opt,name=upper_bound_days,json=upperBoundDays,proto3" json:"upper_bound_days,omitempty"` + // For example, -5 means shift date to at most 5 days back in the past. + // [Required] + LowerBoundDays int32 `protobuf:"varint,2,opt,name=lower_bound_days,json=lowerBoundDays,proto3" json:"lower_bound_days,omitempty"` + // Points to the field that contains the context, for example, an entity id. + // If set, must also set method. If set, shift will be consistent for the + // given context. + Context *FieldId `protobuf:"bytes,3,opt,name=context,proto3" json:"context,omitempty"` + // Method for calculating shift that takes context into consideration. If + // set, must also set context. Can only be applied to table items. + // + // Types that are valid to be assigned to Method: + // *DateShiftConfig_CryptoKey + Method isDateShiftConfig_Method `protobuf_oneof:"method"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *DateShiftConfig) Reset() { *m = DateShiftConfig{} } +func (m *DateShiftConfig) String() string { return proto.CompactTextString(m) } +func (*DateShiftConfig) ProtoMessage() {} +func (*DateShiftConfig) Descriptor() ([]byte, []int) { + return fileDescriptor_dlp_1aebf9c18c267d70, []int{59} +} +func (m *DateShiftConfig) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_DateShiftConfig.Unmarshal(m, b) +} +func (m *DateShiftConfig) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_DateShiftConfig.Marshal(b, m, deterministic) +} +func (dst *DateShiftConfig) XXX_Merge(src proto.Message) { + xxx_messageInfo_DateShiftConfig.Merge(dst, src) +} +func (m *DateShiftConfig) XXX_Size() int { + return xxx_messageInfo_DateShiftConfig.Size(m) +} +func (m *DateShiftConfig) XXX_DiscardUnknown() { + xxx_messageInfo_DateShiftConfig.DiscardUnknown(m) +} + +var xxx_messageInfo_DateShiftConfig proto.InternalMessageInfo + +func (m *DateShiftConfig) GetUpperBoundDays() int32 { + if m != nil { + return m.UpperBoundDays + } + return 0 +} + +func (m *DateShiftConfig) GetLowerBoundDays() int32 { + if m != nil { + return m.LowerBoundDays + } + return 0 +} + +func (m *DateShiftConfig) GetContext() *FieldId { + if m != nil { + return m.Context + } + return nil +} + +type isDateShiftConfig_Method interface { + isDateShiftConfig_Method() +} + +type DateShiftConfig_CryptoKey struct { + CryptoKey *CryptoKey `protobuf:"bytes,4,opt,name=crypto_key,json=cryptoKey,proto3,oneof"` +} + +func (*DateShiftConfig_CryptoKey) isDateShiftConfig_Method() {} + +func (m *DateShiftConfig) GetMethod() isDateShiftConfig_Method { + if m != nil { + return m.Method + } + return nil +} + +func (m *DateShiftConfig) GetCryptoKey() *CryptoKey { + if x, ok := m.GetMethod().(*DateShiftConfig_CryptoKey); ok { + return x.CryptoKey + } + return nil +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*DateShiftConfig) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _DateShiftConfig_OneofMarshaler, _DateShiftConfig_OneofUnmarshaler, _DateShiftConfig_OneofSizer, []interface{}{ + (*DateShiftConfig_CryptoKey)(nil), + } +} + +func _DateShiftConfig_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*DateShiftConfig) + // method + switch x := m.Method.(type) { + case *DateShiftConfig_CryptoKey: + b.EncodeVarint(4<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.CryptoKey); err != nil { + return err + } + case nil: + default: + return fmt.Errorf("DateShiftConfig.Method has unexpected type %T", x) + } + return nil +} + +func _DateShiftConfig_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*DateShiftConfig) + switch tag { + case 4: // method.crypto_key + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(CryptoKey) + err := b.DecodeMessage(msg) + m.Method = &DateShiftConfig_CryptoKey{msg} + return true, err + default: + return false, nil + } +} + +func _DateShiftConfig_OneofSizer(msg proto.Message) (n int) { + m := msg.(*DateShiftConfig) + // method + switch x := m.Method.(type) { + case *DateShiftConfig_CryptoKey: + s := proto.Size(x.CryptoKey) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +// A type of transformation that will scan unstructured text and +// apply various `PrimitiveTransformation`s to each finding, where the +// transformation is applied to only values that were identified as a specific +// info_type. +type InfoTypeTransformations struct { + // Transformation for each infoType. Cannot specify more than one + // for a given infoType. [required] + Transformations []*InfoTypeTransformations_InfoTypeTransformation `protobuf:"bytes,1,rep,name=transformations,proto3" json:"transformations,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *InfoTypeTransformations) Reset() { *m = InfoTypeTransformations{} } +func (m *InfoTypeTransformations) String() string { return proto.CompactTextString(m) } +func (*InfoTypeTransformations) ProtoMessage() {} +func (*InfoTypeTransformations) Descriptor() ([]byte, []int) { + return fileDescriptor_dlp_1aebf9c18c267d70, []int{60} +} +func (m *InfoTypeTransformations) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_InfoTypeTransformations.Unmarshal(m, b) +} +func (m *InfoTypeTransformations) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_InfoTypeTransformations.Marshal(b, m, deterministic) +} +func (dst *InfoTypeTransformations) XXX_Merge(src proto.Message) { + xxx_messageInfo_InfoTypeTransformations.Merge(dst, src) +} +func (m *InfoTypeTransformations) XXX_Size() int { + return xxx_messageInfo_InfoTypeTransformations.Size(m) +} +func (m *InfoTypeTransformations) XXX_DiscardUnknown() { + xxx_messageInfo_InfoTypeTransformations.DiscardUnknown(m) +} + +var xxx_messageInfo_InfoTypeTransformations proto.InternalMessageInfo + +func (m *InfoTypeTransformations) GetTransformations() []*InfoTypeTransformations_InfoTypeTransformation { + if m != nil { + return m.Transformations + } + return nil +} + +// A transformation to apply to text that is identified as a specific +// info_type. +type InfoTypeTransformations_InfoTypeTransformation struct { + // InfoTypes to apply the transformation to. An empty list will cause + // this transformation to apply to all findings that correspond to + // infoTypes that were requested in `InspectConfig`. + InfoTypes []*InfoType `protobuf:"bytes,1,rep,name=info_types,json=infoTypes,proto3" json:"info_types,omitempty"` + // Primitive transformation to apply to the infoType. [required] + PrimitiveTransformation *PrimitiveTransformation `protobuf:"bytes,2,opt,name=primitive_transformation,json=primitiveTransformation,proto3" json:"primitive_transformation,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *InfoTypeTransformations_InfoTypeTransformation) Reset() { + *m = InfoTypeTransformations_InfoTypeTransformation{} +} +func (m *InfoTypeTransformations_InfoTypeTransformation) String() string { + return proto.CompactTextString(m) +} +func (*InfoTypeTransformations_InfoTypeTransformation) ProtoMessage() {} +func (*InfoTypeTransformations_InfoTypeTransformation) Descriptor() ([]byte, []int) { + return fileDescriptor_dlp_1aebf9c18c267d70, []int{60, 0} +} +func (m *InfoTypeTransformations_InfoTypeTransformation) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_InfoTypeTransformations_InfoTypeTransformation.Unmarshal(m, b) +} +func (m *InfoTypeTransformations_InfoTypeTransformation) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_InfoTypeTransformations_InfoTypeTransformation.Marshal(b, m, deterministic) +} +func (dst *InfoTypeTransformations_InfoTypeTransformation) XXX_Merge(src proto.Message) { + xxx_messageInfo_InfoTypeTransformations_InfoTypeTransformation.Merge(dst, src) +} +func (m *InfoTypeTransformations_InfoTypeTransformation) XXX_Size() int { + return xxx_messageInfo_InfoTypeTransformations_InfoTypeTransformation.Size(m) +} +func (m *InfoTypeTransformations_InfoTypeTransformation) XXX_DiscardUnknown() { + xxx_messageInfo_InfoTypeTransformations_InfoTypeTransformation.DiscardUnknown(m) +} + +var xxx_messageInfo_InfoTypeTransformations_InfoTypeTransformation proto.InternalMessageInfo + +func (m *InfoTypeTransformations_InfoTypeTransformation) GetInfoTypes() []*InfoType { + if m != nil { + return m.InfoTypes + } + return nil +} + +func (m *InfoTypeTransformations_InfoTypeTransformation) GetPrimitiveTransformation() *PrimitiveTransformation { + if m != nil { + return m.PrimitiveTransformation + } + return nil +} + +// The transformation to apply to the field. +type FieldTransformation struct { + // Input field(s) to apply the transformation to. [required] + Fields []*FieldId `protobuf:"bytes,1,rep,name=fields,proto3" json:"fields,omitempty"` + // Only apply the transformation if the condition evaluates to true for the + // given `RecordCondition`. The conditions are allowed to reference fields + // that are not used in the actual transformation. [optional] + // + // Example Use Cases: + // + // - Apply a different bucket transformation to an age column if the zip code + // column for the same record is within a specific range. + // - Redact a field if the date of birth field is greater than 85. + Condition *RecordCondition `protobuf:"bytes,3,opt,name=condition,proto3" json:"condition,omitempty"` + // Transformation to apply. [required] + // + // Types that are valid to be assigned to Transformation: + // *FieldTransformation_PrimitiveTransformation + // *FieldTransformation_InfoTypeTransformations + Transformation isFieldTransformation_Transformation `protobuf_oneof:"transformation"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *FieldTransformation) Reset() { *m = FieldTransformation{} } +func (m *FieldTransformation) String() string { return proto.CompactTextString(m) } +func (*FieldTransformation) ProtoMessage() {} +func (*FieldTransformation) Descriptor() ([]byte, []int) { + return fileDescriptor_dlp_1aebf9c18c267d70, []int{61} +} +func (m *FieldTransformation) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_FieldTransformation.Unmarshal(m, b) +} +func (m *FieldTransformation) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_FieldTransformation.Marshal(b, m, deterministic) +} +func (dst *FieldTransformation) XXX_Merge(src proto.Message) { + xxx_messageInfo_FieldTransformation.Merge(dst, src) +} +func (m *FieldTransformation) XXX_Size() int { + return xxx_messageInfo_FieldTransformation.Size(m) +} +func (m *FieldTransformation) XXX_DiscardUnknown() { + xxx_messageInfo_FieldTransformation.DiscardUnknown(m) +} + +var xxx_messageInfo_FieldTransformation proto.InternalMessageInfo + +func (m *FieldTransformation) GetFields() []*FieldId { + if m != nil { + return m.Fields + } + return nil +} + +func (m *FieldTransformation) GetCondition() *RecordCondition { + if m != nil { + return m.Condition + } + return nil +} + +type isFieldTransformation_Transformation interface { + isFieldTransformation_Transformation() +} + +type FieldTransformation_PrimitiveTransformation struct { + PrimitiveTransformation *PrimitiveTransformation `protobuf:"bytes,4,opt,name=primitive_transformation,json=primitiveTransformation,proto3,oneof"` +} + +type FieldTransformation_InfoTypeTransformations struct { + InfoTypeTransformations *InfoTypeTransformations `protobuf:"bytes,5,opt,name=info_type_transformations,json=infoTypeTransformations,proto3,oneof"` +} + +func (*FieldTransformation_PrimitiveTransformation) isFieldTransformation_Transformation() {} + +func (*FieldTransformation_InfoTypeTransformations) isFieldTransformation_Transformation() {} + +func (m *FieldTransformation) GetTransformation() isFieldTransformation_Transformation { + if m != nil { + return m.Transformation + } + return nil +} + +func (m *FieldTransformation) GetPrimitiveTransformation() *PrimitiveTransformation { + if x, ok := m.GetTransformation().(*FieldTransformation_PrimitiveTransformation); ok { + return x.PrimitiveTransformation + } + return nil +} + +func (m *FieldTransformation) GetInfoTypeTransformations() *InfoTypeTransformations { + if x, ok := m.GetTransformation().(*FieldTransformation_InfoTypeTransformations); ok { + return x.InfoTypeTransformations + } + return nil +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*FieldTransformation) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _FieldTransformation_OneofMarshaler, _FieldTransformation_OneofUnmarshaler, _FieldTransformation_OneofSizer, []interface{}{ + (*FieldTransformation_PrimitiveTransformation)(nil), + (*FieldTransformation_InfoTypeTransformations)(nil), + } +} + +func _FieldTransformation_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*FieldTransformation) + // transformation + switch x := m.Transformation.(type) { + case *FieldTransformation_PrimitiveTransformation: + b.EncodeVarint(4<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.PrimitiveTransformation); err != nil { + return err + } + case *FieldTransformation_InfoTypeTransformations: + b.EncodeVarint(5<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.InfoTypeTransformations); err != nil { + return err + } + case nil: + default: + return fmt.Errorf("FieldTransformation.Transformation has unexpected type %T", x) + } + return nil +} + +func _FieldTransformation_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*FieldTransformation) + switch tag { + case 4: // transformation.primitive_transformation + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(PrimitiveTransformation) + err := b.DecodeMessage(msg) + m.Transformation = &FieldTransformation_PrimitiveTransformation{msg} + return true, err + case 5: // transformation.info_type_transformations + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(InfoTypeTransformations) + err := b.DecodeMessage(msg) + m.Transformation = &FieldTransformation_InfoTypeTransformations{msg} + return true, err + default: + return false, nil + } +} + +func _FieldTransformation_OneofSizer(msg proto.Message) (n int) { + m := msg.(*FieldTransformation) + // transformation + switch x := m.Transformation.(type) { + case *FieldTransformation_PrimitiveTransformation: + s := proto.Size(x.PrimitiveTransformation) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *FieldTransformation_InfoTypeTransformations: + s := proto.Size(x.InfoTypeTransformations) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +// A type of transformation that is applied over structured data such as a +// table. +type RecordTransformations struct { + // Transform the record by applying various field transformations. + FieldTransformations []*FieldTransformation `protobuf:"bytes,1,rep,name=field_transformations,json=fieldTransformations,proto3" json:"field_transformations,omitempty"` + // Configuration defining which records get suppressed entirely. Records that + // match any suppression rule are omitted from the output [optional]. + RecordSuppressions []*RecordSuppression `protobuf:"bytes,2,rep,name=record_suppressions,json=recordSuppressions,proto3" json:"record_suppressions,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *RecordTransformations) Reset() { *m = RecordTransformations{} } +func (m *RecordTransformations) String() string { return proto.CompactTextString(m) } +func (*RecordTransformations) ProtoMessage() {} +func (*RecordTransformations) Descriptor() ([]byte, []int) { + return fileDescriptor_dlp_1aebf9c18c267d70, []int{62} +} +func (m *RecordTransformations) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_RecordTransformations.Unmarshal(m, b) +} +func (m *RecordTransformations) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_RecordTransformations.Marshal(b, m, deterministic) +} +func (dst *RecordTransformations) XXX_Merge(src proto.Message) { + xxx_messageInfo_RecordTransformations.Merge(dst, src) +} +func (m *RecordTransformations) XXX_Size() int { + return xxx_messageInfo_RecordTransformations.Size(m) +} +func (m *RecordTransformations) XXX_DiscardUnknown() { + xxx_messageInfo_RecordTransformations.DiscardUnknown(m) +} + +var xxx_messageInfo_RecordTransformations proto.InternalMessageInfo + +func (m *RecordTransformations) GetFieldTransformations() []*FieldTransformation { + if m != nil { + return m.FieldTransformations + } + return nil +} + +func (m *RecordTransformations) GetRecordSuppressions() []*RecordSuppression { + if m != nil { + return m.RecordSuppressions + } + return nil +} + +// Configuration to suppress records whose suppression conditions evaluate to +// true. +type RecordSuppression struct { + // A condition that when it evaluates to true will result in the record being + // evaluated to be suppressed from the transformed content. + Condition *RecordCondition `protobuf:"bytes,1,opt,name=condition,proto3" json:"condition,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *RecordSuppression) Reset() { *m = RecordSuppression{} } +func (m *RecordSuppression) String() string { return proto.CompactTextString(m) } +func (*RecordSuppression) ProtoMessage() {} +func (*RecordSuppression) Descriptor() ([]byte, []int) { + return fileDescriptor_dlp_1aebf9c18c267d70, []int{63} +} +func (m *RecordSuppression) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_RecordSuppression.Unmarshal(m, b) +} +func (m *RecordSuppression) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_RecordSuppression.Marshal(b, m, deterministic) +} +func (dst *RecordSuppression) XXX_Merge(src proto.Message) { + xxx_messageInfo_RecordSuppression.Merge(dst, src) +} +func (m *RecordSuppression) XXX_Size() int { + return xxx_messageInfo_RecordSuppression.Size(m) +} +func (m *RecordSuppression) XXX_DiscardUnknown() { + xxx_messageInfo_RecordSuppression.DiscardUnknown(m) +} + +var xxx_messageInfo_RecordSuppression proto.InternalMessageInfo + +func (m *RecordSuppression) GetCondition() *RecordCondition { + if m != nil { + return m.Condition + } + return nil +} + +// A condition for determining whether a transformation should be applied to +// a field. +type RecordCondition struct { + // An expression. + Expressions *RecordCondition_Expressions `protobuf:"bytes,3,opt,name=expressions,proto3" json:"expressions,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *RecordCondition) Reset() { *m = RecordCondition{} } +func (m *RecordCondition) String() string { return proto.CompactTextString(m) } +func (*RecordCondition) ProtoMessage() {} +func (*RecordCondition) Descriptor() ([]byte, []int) { + return fileDescriptor_dlp_1aebf9c18c267d70, []int{64} +} +func (m *RecordCondition) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_RecordCondition.Unmarshal(m, b) +} +func (m *RecordCondition) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_RecordCondition.Marshal(b, m, deterministic) +} +func (dst *RecordCondition) XXX_Merge(src proto.Message) { + xxx_messageInfo_RecordCondition.Merge(dst, src) +} +func (m *RecordCondition) XXX_Size() int { + return xxx_messageInfo_RecordCondition.Size(m) +} +func (m *RecordCondition) XXX_DiscardUnknown() { + xxx_messageInfo_RecordCondition.DiscardUnknown(m) +} + +var xxx_messageInfo_RecordCondition proto.InternalMessageInfo + +func (m *RecordCondition) GetExpressions() *RecordCondition_Expressions { + if m != nil { + return m.Expressions + } + return nil +} + +// The field type of `value` and `field` do not need to match to be +// considered equal, but not all comparisons are possible. +// EQUAL_TO and NOT_EQUAL_TO attempt to compare even with incompatible types, +// but all other comparisons are invalid with incompatible types. +// A `value` of type: +// +// - `string` can be compared against all other types +// - `boolean` can only be compared against other booleans +// - `integer` can be compared against doubles or a string if the string value +// can be parsed as an integer. +// - `double` can be compared against integers or a string if the string can +// be parsed as a double. +// - `Timestamp` can be compared against strings in RFC 3339 date string +// format. +// - `TimeOfDay` can be compared against timestamps and strings in the format +// of 'HH:mm:ss'. +// +// If we fail to compare do to type mismatch, a warning will be given and +// the condition will evaluate to false. +type RecordCondition_Condition struct { + // Field within the record this condition is evaluated against. [required] + Field *FieldId `protobuf:"bytes,1,opt,name=field,proto3" json:"field,omitempty"` + // Operator used to compare the field or infoType to the value. [required] + Operator RelationalOperator `protobuf:"varint,3,opt,name=operator,proto3,enum=google.privacy.dlp.v2.RelationalOperator" json:"operator,omitempty"` + // Value to compare against. [Required, except for `EXISTS` tests.] + Value *Value `protobuf:"bytes,4,opt,name=value,proto3" json:"value,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *RecordCondition_Condition) Reset() { *m = RecordCondition_Condition{} } +func (m *RecordCondition_Condition) String() string { return proto.CompactTextString(m) } +func (*RecordCondition_Condition) ProtoMessage() {} +func (*RecordCondition_Condition) Descriptor() ([]byte, []int) { + return fileDescriptor_dlp_1aebf9c18c267d70, []int{64, 0} +} +func (m *RecordCondition_Condition) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_RecordCondition_Condition.Unmarshal(m, b) +} +func (m *RecordCondition_Condition) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_RecordCondition_Condition.Marshal(b, m, deterministic) +} +func (dst *RecordCondition_Condition) XXX_Merge(src proto.Message) { + xxx_messageInfo_RecordCondition_Condition.Merge(dst, src) +} +func (m *RecordCondition_Condition) XXX_Size() int { + return xxx_messageInfo_RecordCondition_Condition.Size(m) +} +func (m *RecordCondition_Condition) XXX_DiscardUnknown() { + xxx_messageInfo_RecordCondition_Condition.DiscardUnknown(m) +} + +var xxx_messageInfo_RecordCondition_Condition proto.InternalMessageInfo + +func (m *RecordCondition_Condition) GetField() *FieldId { + if m != nil { + return m.Field + } + return nil +} + +func (m *RecordCondition_Condition) GetOperator() RelationalOperator { + if m != nil { + return m.Operator + } + return RelationalOperator_RELATIONAL_OPERATOR_UNSPECIFIED +} + +func (m *RecordCondition_Condition) GetValue() *Value { + if m != nil { + return m.Value + } + return nil +} + +// A collection of conditions. +type RecordCondition_Conditions struct { + Conditions []*RecordCondition_Condition `protobuf:"bytes,1,rep,name=conditions,proto3" json:"conditions,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *RecordCondition_Conditions) Reset() { *m = RecordCondition_Conditions{} } +func (m *RecordCondition_Conditions) String() string { return proto.CompactTextString(m) } +func (*RecordCondition_Conditions) ProtoMessage() {} +func (*RecordCondition_Conditions) Descriptor() ([]byte, []int) { + return fileDescriptor_dlp_1aebf9c18c267d70, []int{64, 1} +} +func (m *RecordCondition_Conditions) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_RecordCondition_Conditions.Unmarshal(m, b) +} +func (m *RecordCondition_Conditions) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_RecordCondition_Conditions.Marshal(b, m, deterministic) +} +func (dst *RecordCondition_Conditions) XXX_Merge(src proto.Message) { + xxx_messageInfo_RecordCondition_Conditions.Merge(dst, src) +} +func (m *RecordCondition_Conditions) XXX_Size() int { + return xxx_messageInfo_RecordCondition_Conditions.Size(m) +} +func (m *RecordCondition_Conditions) XXX_DiscardUnknown() { + xxx_messageInfo_RecordCondition_Conditions.DiscardUnknown(m) +} + +var xxx_messageInfo_RecordCondition_Conditions proto.InternalMessageInfo + +func (m *RecordCondition_Conditions) GetConditions() []*RecordCondition_Condition { + if m != nil { + return m.Conditions + } + return nil +} + +// An expression, consisting or an operator and conditions. +type RecordCondition_Expressions struct { + // The operator to apply to the result of conditions. Default and currently + // only supported value is `AND`. + LogicalOperator RecordCondition_Expressions_LogicalOperator `protobuf:"varint,1,opt,name=logical_operator,json=logicalOperator,proto3,enum=google.privacy.dlp.v2.RecordCondition_Expressions_LogicalOperator" json:"logical_operator,omitempty"` + // Types that are valid to be assigned to Type: + // *RecordCondition_Expressions_Conditions + Type isRecordCondition_Expressions_Type `protobuf_oneof:"type"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *RecordCondition_Expressions) Reset() { *m = RecordCondition_Expressions{} } +func (m *RecordCondition_Expressions) String() string { return proto.CompactTextString(m) } +func (*RecordCondition_Expressions) ProtoMessage() {} +func (*RecordCondition_Expressions) Descriptor() ([]byte, []int) { + return fileDescriptor_dlp_1aebf9c18c267d70, []int{64, 2} +} +func (m *RecordCondition_Expressions) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_RecordCondition_Expressions.Unmarshal(m, b) +} +func (m *RecordCondition_Expressions) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_RecordCondition_Expressions.Marshal(b, m, deterministic) +} +func (dst *RecordCondition_Expressions) XXX_Merge(src proto.Message) { + xxx_messageInfo_RecordCondition_Expressions.Merge(dst, src) +} +func (m *RecordCondition_Expressions) XXX_Size() int { + return xxx_messageInfo_RecordCondition_Expressions.Size(m) +} +func (m *RecordCondition_Expressions) XXX_DiscardUnknown() { + xxx_messageInfo_RecordCondition_Expressions.DiscardUnknown(m) +} + +var xxx_messageInfo_RecordCondition_Expressions proto.InternalMessageInfo + +func (m *RecordCondition_Expressions) GetLogicalOperator() RecordCondition_Expressions_LogicalOperator { + if m != nil { + return m.LogicalOperator + } + return RecordCondition_Expressions_LOGICAL_OPERATOR_UNSPECIFIED +} + +type isRecordCondition_Expressions_Type interface { + isRecordCondition_Expressions_Type() +} + +type RecordCondition_Expressions_Conditions struct { + Conditions *RecordCondition_Conditions `protobuf:"bytes,3,opt,name=conditions,proto3,oneof"` +} + +func (*RecordCondition_Expressions_Conditions) isRecordCondition_Expressions_Type() {} + +func (m *RecordCondition_Expressions) GetType() isRecordCondition_Expressions_Type { + if m != nil { + return m.Type + } + return nil +} + +func (m *RecordCondition_Expressions) GetConditions() *RecordCondition_Conditions { + if x, ok := m.GetType().(*RecordCondition_Expressions_Conditions); ok { + return x.Conditions + } + return nil +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*RecordCondition_Expressions) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _RecordCondition_Expressions_OneofMarshaler, _RecordCondition_Expressions_OneofUnmarshaler, _RecordCondition_Expressions_OneofSizer, []interface{}{ + (*RecordCondition_Expressions_Conditions)(nil), + } +} + +func _RecordCondition_Expressions_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*RecordCondition_Expressions) + // type + switch x := m.Type.(type) { + case *RecordCondition_Expressions_Conditions: + b.EncodeVarint(3<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.Conditions); err != nil { + return err + } + case nil: + default: + return fmt.Errorf("RecordCondition_Expressions.Type has unexpected type %T", x) + } + return nil +} + +func _RecordCondition_Expressions_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*RecordCondition_Expressions) + switch tag { + case 3: // type.conditions + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(RecordCondition_Conditions) + err := b.DecodeMessage(msg) + m.Type = &RecordCondition_Expressions_Conditions{msg} + return true, err + default: + return false, nil + } +} + +func _RecordCondition_Expressions_OneofSizer(msg proto.Message) (n int) { + m := msg.(*RecordCondition_Expressions) + // type + switch x := m.Type.(type) { + case *RecordCondition_Expressions_Conditions: + s := proto.Size(x.Conditions) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +// Overview of the modifications that occurred. +type TransformationOverview struct { + // Total size in bytes that were transformed in some way. + TransformedBytes int64 `protobuf:"varint,2,opt,name=transformed_bytes,json=transformedBytes,proto3" json:"transformed_bytes,omitempty"` + // Transformations applied to the dataset. + TransformationSummaries []*TransformationSummary `protobuf:"bytes,3,rep,name=transformation_summaries,json=transformationSummaries,proto3" json:"transformation_summaries,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *TransformationOverview) Reset() { *m = TransformationOverview{} } +func (m *TransformationOverview) String() string { return proto.CompactTextString(m) } +func (*TransformationOverview) ProtoMessage() {} +func (*TransformationOverview) Descriptor() ([]byte, []int) { + return fileDescriptor_dlp_1aebf9c18c267d70, []int{65} +} +func (m *TransformationOverview) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_TransformationOverview.Unmarshal(m, b) +} +func (m *TransformationOverview) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_TransformationOverview.Marshal(b, m, deterministic) +} +func (dst *TransformationOverview) XXX_Merge(src proto.Message) { + xxx_messageInfo_TransformationOverview.Merge(dst, src) +} +func (m *TransformationOverview) XXX_Size() int { + return xxx_messageInfo_TransformationOverview.Size(m) +} +func (m *TransformationOverview) XXX_DiscardUnknown() { + xxx_messageInfo_TransformationOverview.DiscardUnknown(m) +} + +var xxx_messageInfo_TransformationOverview proto.InternalMessageInfo + +func (m *TransformationOverview) GetTransformedBytes() int64 { + if m != nil { + return m.TransformedBytes + } + return 0 +} + +func (m *TransformationOverview) GetTransformationSummaries() []*TransformationSummary { + if m != nil { + return m.TransformationSummaries + } + return nil +} + +// Summary of a single transformation. +// Only one of 'transformation', 'field_transformation', or 'record_suppress' +// will be set. +type TransformationSummary struct { + // Set if the transformation was limited to a specific InfoType. + InfoType *InfoType `protobuf:"bytes,1,opt,name=info_type,json=infoType,proto3" json:"info_type,omitempty"` + // Set if the transformation was limited to a specific FieldId. + Field *FieldId `protobuf:"bytes,2,opt,name=field,proto3" json:"field,omitempty"` + // The specific transformation these stats apply to. + Transformation *PrimitiveTransformation `protobuf:"bytes,3,opt,name=transformation,proto3" json:"transformation,omitempty"` + // The field transformation that was applied. + // If multiple field transformations are requested for a single field, + // this list will contain all of them; otherwise, only one is supplied. + FieldTransformations []*FieldTransformation `protobuf:"bytes,5,rep,name=field_transformations,json=fieldTransformations,proto3" json:"field_transformations,omitempty"` + // The specific suppression option these stats apply to. + RecordSuppress *RecordSuppression `protobuf:"bytes,6,opt,name=record_suppress,json=recordSuppress,proto3" json:"record_suppress,omitempty"` + Results []*TransformationSummary_SummaryResult `protobuf:"bytes,4,rep,name=results,proto3" json:"results,omitempty"` + // Total size in bytes that were transformed in some way. + TransformedBytes int64 `protobuf:"varint,7,opt,name=transformed_bytes,json=transformedBytes,proto3" json:"transformed_bytes,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *TransformationSummary) Reset() { *m = TransformationSummary{} } +func (m *TransformationSummary) String() string { return proto.CompactTextString(m) } +func (*TransformationSummary) ProtoMessage() {} +func (*TransformationSummary) Descriptor() ([]byte, []int) { + return fileDescriptor_dlp_1aebf9c18c267d70, []int{66} +} +func (m *TransformationSummary) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_TransformationSummary.Unmarshal(m, b) +} +func (m *TransformationSummary) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_TransformationSummary.Marshal(b, m, deterministic) +} +func (dst *TransformationSummary) XXX_Merge(src proto.Message) { + xxx_messageInfo_TransformationSummary.Merge(dst, src) +} +func (m *TransformationSummary) XXX_Size() int { + return xxx_messageInfo_TransformationSummary.Size(m) +} +func (m *TransformationSummary) XXX_DiscardUnknown() { + xxx_messageInfo_TransformationSummary.DiscardUnknown(m) +} + +var xxx_messageInfo_TransformationSummary proto.InternalMessageInfo + +func (m *TransformationSummary) GetInfoType() *InfoType { + if m != nil { + return m.InfoType + } + return nil +} + +func (m *TransformationSummary) GetField() *FieldId { + if m != nil { + return m.Field + } + return nil +} + +func (m *TransformationSummary) GetTransformation() *PrimitiveTransformation { + if m != nil { + return m.Transformation + } + return nil +} + +func (m *TransformationSummary) GetFieldTransformations() []*FieldTransformation { + if m != nil { + return m.FieldTransformations + } + return nil +} + +func (m *TransformationSummary) GetRecordSuppress() *RecordSuppression { + if m != nil { + return m.RecordSuppress + } + return nil +} + +func (m *TransformationSummary) GetResults() []*TransformationSummary_SummaryResult { + if m != nil { + return m.Results + } + return nil +} + +func (m *TransformationSummary) GetTransformedBytes() int64 { + if m != nil { + return m.TransformedBytes + } + return 0 +} + +// A collection that informs the user the number of times a particular +// `TransformationResultCode` and error details occurred. +type TransformationSummary_SummaryResult struct { + Count int64 `protobuf:"varint,1,opt,name=count,proto3" json:"count,omitempty"` + Code TransformationSummary_TransformationResultCode `protobuf:"varint,2,opt,name=code,proto3,enum=google.privacy.dlp.v2.TransformationSummary_TransformationResultCode" json:"code,omitempty"` + // A place for warnings or errors to show up if a transformation didn't + // work as expected. + Details string `protobuf:"bytes,3,opt,name=details,proto3" json:"details,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *TransformationSummary_SummaryResult) Reset() { *m = TransformationSummary_SummaryResult{} } +func (m *TransformationSummary_SummaryResult) String() string { return proto.CompactTextString(m) } +func (*TransformationSummary_SummaryResult) ProtoMessage() {} +func (*TransformationSummary_SummaryResult) Descriptor() ([]byte, []int) { + return fileDescriptor_dlp_1aebf9c18c267d70, []int{66, 0} +} +func (m *TransformationSummary_SummaryResult) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_TransformationSummary_SummaryResult.Unmarshal(m, b) +} +func (m *TransformationSummary_SummaryResult) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_TransformationSummary_SummaryResult.Marshal(b, m, deterministic) +} +func (dst *TransformationSummary_SummaryResult) XXX_Merge(src proto.Message) { + xxx_messageInfo_TransformationSummary_SummaryResult.Merge(dst, src) +} +func (m *TransformationSummary_SummaryResult) XXX_Size() int { + return xxx_messageInfo_TransformationSummary_SummaryResult.Size(m) +} +func (m *TransformationSummary_SummaryResult) XXX_DiscardUnknown() { + xxx_messageInfo_TransformationSummary_SummaryResult.DiscardUnknown(m) +} + +var xxx_messageInfo_TransformationSummary_SummaryResult proto.InternalMessageInfo + +func (m *TransformationSummary_SummaryResult) GetCount() int64 { + if m != nil { + return m.Count + } + return 0 +} + +func (m *TransformationSummary_SummaryResult) GetCode() TransformationSummary_TransformationResultCode { + if m != nil { + return m.Code + } + return TransformationSummary_TRANSFORMATION_RESULT_CODE_UNSPECIFIED +} + +func (m *TransformationSummary_SummaryResult) GetDetails() string { + if m != nil { + return m.Details + } + return "" +} + +// Schedule for triggeredJobs. +type Schedule struct { + // Types that are valid to be assigned to Option: + // *Schedule_RecurrencePeriodDuration + Option isSchedule_Option `protobuf_oneof:"option"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Schedule) Reset() { *m = Schedule{} } +func (m *Schedule) String() string { return proto.CompactTextString(m) } +func (*Schedule) ProtoMessage() {} +func (*Schedule) Descriptor() ([]byte, []int) { + return fileDescriptor_dlp_1aebf9c18c267d70, []int{67} +} +func (m *Schedule) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Schedule.Unmarshal(m, b) +} +func (m *Schedule) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Schedule.Marshal(b, m, deterministic) +} +func (dst *Schedule) XXX_Merge(src proto.Message) { + xxx_messageInfo_Schedule.Merge(dst, src) +} +func (m *Schedule) XXX_Size() int { + return xxx_messageInfo_Schedule.Size(m) +} +func (m *Schedule) XXX_DiscardUnknown() { + xxx_messageInfo_Schedule.DiscardUnknown(m) +} + +var xxx_messageInfo_Schedule proto.InternalMessageInfo + +type isSchedule_Option interface { + isSchedule_Option() +} + +type Schedule_RecurrencePeriodDuration struct { + RecurrencePeriodDuration *duration.Duration `protobuf:"bytes,1,opt,name=recurrence_period_duration,json=recurrencePeriodDuration,proto3,oneof"` +} + +func (*Schedule_RecurrencePeriodDuration) isSchedule_Option() {} + +func (m *Schedule) GetOption() isSchedule_Option { + if m != nil { + return m.Option + } + return nil +} + +func (m *Schedule) GetRecurrencePeriodDuration() *duration.Duration { + if x, ok := m.GetOption().(*Schedule_RecurrencePeriodDuration); ok { + return x.RecurrencePeriodDuration + } + return nil +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*Schedule) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _Schedule_OneofMarshaler, _Schedule_OneofUnmarshaler, _Schedule_OneofSizer, []interface{}{ + (*Schedule_RecurrencePeriodDuration)(nil), + } +} + +func _Schedule_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*Schedule) + // option + switch x := m.Option.(type) { + case *Schedule_RecurrencePeriodDuration: + b.EncodeVarint(1<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.RecurrencePeriodDuration); err != nil { + return err + } + case nil: + default: + return fmt.Errorf("Schedule.Option has unexpected type %T", x) + } + return nil +} + +func _Schedule_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*Schedule) + switch tag { + case 1: // option.recurrence_period_duration + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(duration.Duration) + err := b.DecodeMessage(msg) + m.Option = &Schedule_RecurrencePeriodDuration{msg} + return true, err + default: + return false, nil + } +} + +func _Schedule_OneofSizer(msg proto.Message) (n int) { + m := msg.(*Schedule) + // option + switch x := m.Option.(type) { + case *Schedule_RecurrencePeriodDuration: + s := proto.Size(x.RecurrencePeriodDuration) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +// The inspectTemplate contains a configuration (set of types of sensitive data +// to be detected) to be used anywhere you otherwise would normally specify +// InspectConfig. See https://cloud.google.com/dlp/docs/concepts-templates +// to learn more. +type InspectTemplate struct { + // The template name. Output only. + // + // The template will have one of the following formats: + // `projects/PROJECT_ID/inspectTemplates/TEMPLATE_ID` OR + // `organizations/ORGANIZATION_ID/inspectTemplates/TEMPLATE_ID` + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // Display name (max 256 chars). + DisplayName string `protobuf:"bytes,2,opt,name=display_name,json=displayName,proto3" json:"display_name,omitempty"` + // Short description (max 256 chars). + Description string `protobuf:"bytes,3,opt,name=description,proto3" json:"description,omitempty"` + // The creation timestamp of a inspectTemplate, output only field. + CreateTime *timestamp.Timestamp `protobuf:"bytes,4,opt,name=create_time,json=createTime,proto3" json:"create_time,omitempty"` + // The last update timestamp of a inspectTemplate, output only field. + UpdateTime *timestamp.Timestamp `protobuf:"bytes,5,opt,name=update_time,json=updateTime,proto3" json:"update_time,omitempty"` + // The core content of the template. Configuration of the scanning process. + InspectConfig *InspectConfig `protobuf:"bytes,6,opt,name=inspect_config,json=inspectConfig,proto3" json:"inspect_config,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *InspectTemplate) Reset() { *m = InspectTemplate{} } +func (m *InspectTemplate) String() string { return proto.CompactTextString(m) } +func (*InspectTemplate) ProtoMessage() {} +func (*InspectTemplate) Descriptor() ([]byte, []int) { + return fileDescriptor_dlp_1aebf9c18c267d70, []int{68} +} +func (m *InspectTemplate) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_InspectTemplate.Unmarshal(m, b) +} +func (m *InspectTemplate) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_InspectTemplate.Marshal(b, m, deterministic) +} +func (dst *InspectTemplate) XXX_Merge(src proto.Message) { + xxx_messageInfo_InspectTemplate.Merge(dst, src) +} +func (m *InspectTemplate) XXX_Size() int { + return xxx_messageInfo_InspectTemplate.Size(m) +} +func (m *InspectTemplate) XXX_DiscardUnknown() { + xxx_messageInfo_InspectTemplate.DiscardUnknown(m) +} + +var xxx_messageInfo_InspectTemplate proto.InternalMessageInfo + +func (m *InspectTemplate) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *InspectTemplate) GetDisplayName() string { + if m != nil { + return m.DisplayName + } + return "" +} + +func (m *InspectTemplate) GetDescription() string { + if m != nil { + return m.Description + } + return "" +} + +func (m *InspectTemplate) GetCreateTime() *timestamp.Timestamp { + if m != nil { + return m.CreateTime + } + return nil +} + +func (m *InspectTemplate) GetUpdateTime() *timestamp.Timestamp { + if m != nil { + return m.UpdateTime + } + return nil +} + +func (m *InspectTemplate) GetInspectConfig() *InspectConfig { + if m != nil { + return m.InspectConfig + } + return nil +} + +// The DeidentifyTemplates contains instructions on how to deidentify content. +// See https://cloud.google.com/dlp/docs/concepts-templates to learn more. +type DeidentifyTemplate struct { + // The template name. Output only. + // + // The template will have one of the following formats: + // `projects/PROJECT_ID/deidentifyTemplates/TEMPLATE_ID` OR + // `organizations/ORGANIZATION_ID/deidentifyTemplates/TEMPLATE_ID` + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // Display name (max 256 chars). + DisplayName string `protobuf:"bytes,2,opt,name=display_name,json=displayName,proto3" json:"display_name,omitempty"` + // Short description (max 256 chars). + Description string `protobuf:"bytes,3,opt,name=description,proto3" json:"description,omitempty"` + // The creation timestamp of a inspectTemplate, output only field. + CreateTime *timestamp.Timestamp `protobuf:"bytes,4,opt,name=create_time,json=createTime,proto3" json:"create_time,omitempty"` + // The last update timestamp of a inspectTemplate, output only field. + UpdateTime *timestamp.Timestamp `protobuf:"bytes,5,opt,name=update_time,json=updateTime,proto3" json:"update_time,omitempty"` + // ///////////// // The core content of the template // /////////////// + DeidentifyConfig *DeidentifyConfig `protobuf:"bytes,6,opt,name=deidentify_config,json=deidentifyConfig,proto3" json:"deidentify_config,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *DeidentifyTemplate) Reset() { *m = DeidentifyTemplate{} } +func (m *DeidentifyTemplate) String() string { return proto.CompactTextString(m) } +func (*DeidentifyTemplate) ProtoMessage() {} +func (*DeidentifyTemplate) Descriptor() ([]byte, []int) { + return fileDescriptor_dlp_1aebf9c18c267d70, []int{69} +} +func (m *DeidentifyTemplate) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_DeidentifyTemplate.Unmarshal(m, b) +} +func (m *DeidentifyTemplate) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_DeidentifyTemplate.Marshal(b, m, deterministic) +} +func (dst *DeidentifyTemplate) XXX_Merge(src proto.Message) { + xxx_messageInfo_DeidentifyTemplate.Merge(dst, src) +} +func (m *DeidentifyTemplate) XXX_Size() int { + return xxx_messageInfo_DeidentifyTemplate.Size(m) +} +func (m *DeidentifyTemplate) XXX_DiscardUnknown() { + xxx_messageInfo_DeidentifyTemplate.DiscardUnknown(m) +} + +var xxx_messageInfo_DeidentifyTemplate proto.InternalMessageInfo + +func (m *DeidentifyTemplate) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *DeidentifyTemplate) GetDisplayName() string { + if m != nil { + return m.DisplayName + } + return "" +} + +func (m *DeidentifyTemplate) GetDescription() string { + if m != nil { + return m.Description + } + return "" +} + +func (m *DeidentifyTemplate) GetCreateTime() *timestamp.Timestamp { + if m != nil { + return m.CreateTime + } + return nil +} + +func (m *DeidentifyTemplate) GetUpdateTime() *timestamp.Timestamp { + if m != nil { + return m.UpdateTime + } + return nil +} + +func (m *DeidentifyTemplate) GetDeidentifyConfig() *DeidentifyConfig { + if m != nil { + return m.DeidentifyConfig + } + return nil +} + +// Details information about an error encountered during job execution or +// the results of an unsuccessful activation of the JobTrigger. +// Output only field. +type Error struct { + Details *status.Status `protobuf:"bytes,1,opt,name=details,proto3" json:"details,omitempty"` + // The times the error occurred. + Timestamps []*timestamp.Timestamp `protobuf:"bytes,2,rep,name=timestamps,proto3" json:"timestamps,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Error) Reset() { *m = Error{} } +func (m *Error) String() string { return proto.CompactTextString(m) } +func (*Error) ProtoMessage() {} +func (*Error) Descriptor() ([]byte, []int) { + return fileDescriptor_dlp_1aebf9c18c267d70, []int{70} +} +func (m *Error) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Error.Unmarshal(m, b) +} +func (m *Error) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Error.Marshal(b, m, deterministic) +} +func (dst *Error) XXX_Merge(src proto.Message) { + xxx_messageInfo_Error.Merge(dst, src) +} +func (m *Error) XXX_Size() int { + return xxx_messageInfo_Error.Size(m) +} +func (m *Error) XXX_DiscardUnknown() { + xxx_messageInfo_Error.DiscardUnknown(m) +} + +var xxx_messageInfo_Error proto.InternalMessageInfo + +func (m *Error) GetDetails() *status.Status { + if m != nil { + return m.Details + } + return nil +} + +func (m *Error) GetTimestamps() []*timestamp.Timestamp { + if m != nil { + return m.Timestamps + } + return nil +} + +// Contains a configuration to make dlp api calls on a repeating basis. +// See https://cloud.google.com/dlp/docs/concepts-job-triggers to learn more. +type JobTrigger struct { + // Unique resource name for the triggeredJob, assigned by the service when the + // triggeredJob is created, for example + // `projects/dlp-test-project/triggeredJobs/53234423`. + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // Display name (max 100 chars) + DisplayName string `protobuf:"bytes,2,opt,name=display_name,json=displayName,proto3" json:"display_name,omitempty"` + // User provided description (max 256 chars) + Description string `protobuf:"bytes,3,opt,name=description,proto3" json:"description,omitempty"` + // The configuration details for the specific type of job to run. + // + // Types that are valid to be assigned to Job: + // *JobTrigger_InspectJob + Job isJobTrigger_Job `protobuf_oneof:"job"` + // A list of triggers which will be OR'ed together. Only one in the list + // needs to trigger for a job to be started. The list may contain only + // a single Schedule trigger and must have at least one object. + Triggers []*JobTrigger_Trigger `protobuf:"bytes,5,rep,name=triggers,proto3" json:"triggers,omitempty"` + // A stream of errors encountered when the trigger was activated. Repeated + // errors may result in the JobTrigger automatically being paused. + // Will return the last 100 errors. Whenever the JobTrigger is modified + // this list will be cleared. Output only field. + Errors []*Error `protobuf:"bytes,6,rep,name=errors,proto3" json:"errors,omitempty"` + // The creation timestamp of a triggeredJob, output only field. + CreateTime *timestamp.Timestamp `protobuf:"bytes,7,opt,name=create_time,json=createTime,proto3" json:"create_time,omitempty"` + // The last update timestamp of a triggeredJob, output only field. + UpdateTime *timestamp.Timestamp `protobuf:"bytes,8,opt,name=update_time,json=updateTime,proto3" json:"update_time,omitempty"` + // The timestamp of the last time this trigger executed, output only field. + LastRunTime *timestamp.Timestamp `protobuf:"bytes,9,opt,name=last_run_time,json=lastRunTime,proto3" json:"last_run_time,omitempty"` + // A status for this trigger. [required] + Status JobTrigger_Status `protobuf:"varint,10,opt,name=status,proto3,enum=google.privacy.dlp.v2.JobTrigger_Status" json:"status,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *JobTrigger) Reset() { *m = JobTrigger{} } +func (m *JobTrigger) String() string { return proto.CompactTextString(m) } +func (*JobTrigger) ProtoMessage() {} +func (*JobTrigger) Descriptor() ([]byte, []int) { + return fileDescriptor_dlp_1aebf9c18c267d70, []int{71} +} +func (m *JobTrigger) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_JobTrigger.Unmarshal(m, b) +} +func (m *JobTrigger) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_JobTrigger.Marshal(b, m, deterministic) +} +func (dst *JobTrigger) XXX_Merge(src proto.Message) { + xxx_messageInfo_JobTrigger.Merge(dst, src) +} +func (m *JobTrigger) XXX_Size() int { + return xxx_messageInfo_JobTrigger.Size(m) +} +func (m *JobTrigger) XXX_DiscardUnknown() { + xxx_messageInfo_JobTrigger.DiscardUnknown(m) +} + +var xxx_messageInfo_JobTrigger proto.InternalMessageInfo + +func (m *JobTrigger) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *JobTrigger) GetDisplayName() string { + if m != nil { + return m.DisplayName + } + return "" +} + +func (m *JobTrigger) GetDescription() string { + if m != nil { + return m.Description + } + return "" +} + +type isJobTrigger_Job interface { + isJobTrigger_Job() +} + +type JobTrigger_InspectJob struct { + InspectJob *InspectJobConfig `protobuf:"bytes,4,opt,name=inspect_job,json=inspectJob,proto3,oneof"` +} + +func (*JobTrigger_InspectJob) isJobTrigger_Job() {} + +func (m *JobTrigger) GetJob() isJobTrigger_Job { + if m != nil { + return m.Job + } + return nil +} + +func (m *JobTrigger) GetInspectJob() *InspectJobConfig { + if x, ok := m.GetJob().(*JobTrigger_InspectJob); ok { + return x.InspectJob + } + return nil +} + +func (m *JobTrigger) GetTriggers() []*JobTrigger_Trigger { + if m != nil { + return m.Triggers + } + return nil +} + +func (m *JobTrigger) GetErrors() []*Error { + if m != nil { + return m.Errors + } + return nil +} + +func (m *JobTrigger) GetCreateTime() *timestamp.Timestamp { + if m != nil { + return m.CreateTime + } + return nil +} + +func (m *JobTrigger) GetUpdateTime() *timestamp.Timestamp { + if m != nil { + return m.UpdateTime + } + return nil +} + +func (m *JobTrigger) GetLastRunTime() *timestamp.Timestamp { + if m != nil { + return m.LastRunTime + } + return nil +} + +func (m *JobTrigger) GetStatus() JobTrigger_Status { + if m != nil { + return m.Status + } + return JobTrigger_STATUS_UNSPECIFIED +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*JobTrigger) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _JobTrigger_OneofMarshaler, _JobTrigger_OneofUnmarshaler, _JobTrigger_OneofSizer, []interface{}{ + (*JobTrigger_InspectJob)(nil), + } +} + +func _JobTrigger_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*JobTrigger) + // job + switch x := m.Job.(type) { + case *JobTrigger_InspectJob: + b.EncodeVarint(4<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.InspectJob); err != nil { + return err + } + case nil: + default: + return fmt.Errorf("JobTrigger.Job has unexpected type %T", x) + } + return nil +} + +func _JobTrigger_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*JobTrigger) + switch tag { + case 4: // job.inspect_job + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(InspectJobConfig) + err := b.DecodeMessage(msg) + m.Job = &JobTrigger_InspectJob{msg} + return true, err + default: + return false, nil + } +} + +func _JobTrigger_OneofSizer(msg proto.Message) (n int) { + m := msg.(*JobTrigger) + // job + switch x := m.Job.(type) { + case *JobTrigger_InspectJob: + s := proto.Size(x.InspectJob) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +// What event needs to occur for a new job to be started. +type JobTrigger_Trigger struct { + // Types that are valid to be assigned to Trigger: + // *JobTrigger_Trigger_Schedule + Trigger isJobTrigger_Trigger_Trigger `protobuf_oneof:"trigger"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *JobTrigger_Trigger) Reset() { *m = JobTrigger_Trigger{} } +func (m *JobTrigger_Trigger) String() string { return proto.CompactTextString(m) } +func (*JobTrigger_Trigger) ProtoMessage() {} +func (*JobTrigger_Trigger) Descriptor() ([]byte, []int) { + return fileDescriptor_dlp_1aebf9c18c267d70, []int{71, 0} +} +func (m *JobTrigger_Trigger) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_JobTrigger_Trigger.Unmarshal(m, b) +} +func (m *JobTrigger_Trigger) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_JobTrigger_Trigger.Marshal(b, m, deterministic) +} +func (dst *JobTrigger_Trigger) XXX_Merge(src proto.Message) { + xxx_messageInfo_JobTrigger_Trigger.Merge(dst, src) +} +func (m *JobTrigger_Trigger) XXX_Size() int { + return xxx_messageInfo_JobTrigger_Trigger.Size(m) +} +func (m *JobTrigger_Trigger) XXX_DiscardUnknown() { + xxx_messageInfo_JobTrigger_Trigger.DiscardUnknown(m) +} + +var xxx_messageInfo_JobTrigger_Trigger proto.InternalMessageInfo + +type isJobTrigger_Trigger_Trigger interface { + isJobTrigger_Trigger_Trigger() +} + +type JobTrigger_Trigger_Schedule struct { + Schedule *Schedule `protobuf:"bytes,1,opt,name=schedule,proto3,oneof"` +} + +func (*JobTrigger_Trigger_Schedule) isJobTrigger_Trigger_Trigger() {} + +func (m *JobTrigger_Trigger) GetTrigger() isJobTrigger_Trigger_Trigger { + if m != nil { + return m.Trigger + } + return nil +} + +func (m *JobTrigger_Trigger) GetSchedule() *Schedule { + if x, ok := m.GetTrigger().(*JobTrigger_Trigger_Schedule); ok { + return x.Schedule + } + return nil +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*JobTrigger_Trigger) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _JobTrigger_Trigger_OneofMarshaler, _JobTrigger_Trigger_OneofUnmarshaler, _JobTrigger_Trigger_OneofSizer, []interface{}{ + (*JobTrigger_Trigger_Schedule)(nil), + } +} + +func _JobTrigger_Trigger_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*JobTrigger_Trigger) + // trigger + switch x := m.Trigger.(type) { + case *JobTrigger_Trigger_Schedule: + b.EncodeVarint(1<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.Schedule); err != nil { + return err + } + case nil: + default: + return fmt.Errorf("JobTrigger_Trigger.Trigger has unexpected type %T", x) + } + return nil +} + +func _JobTrigger_Trigger_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*JobTrigger_Trigger) + switch tag { + case 1: // trigger.schedule + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(Schedule) + err := b.DecodeMessage(msg) + m.Trigger = &JobTrigger_Trigger_Schedule{msg} + return true, err + default: + return false, nil + } +} + +func _JobTrigger_Trigger_OneofSizer(msg proto.Message) (n int) { + m := msg.(*JobTrigger_Trigger) + // trigger + switch x := m.Trigger.(type) { + case *JobTrigger_Trigger_Schedule: + s := proto.Size(x.Schedule) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +// A task to execute on the completion of a job. +// See https://cloud.google.com/dlp/docs/concepts-actions to learn more. +type Action struct { + // Types that are valid to be assigned to Action: + // *Action_SaveFindings_ + // *Action_PubSub + // *Action_PublishSummaryToCscc_ + // *Action_JobNotificationEmails_ + Action isAction_Action `protobuf_oneof:"action"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Action) Reset() { *m = Action{} } +func (m *Action) String() string { return proto.CompactTextString(m) } +func (*Action) ProtoMessage() {} +func (*Action) Descriptor() ([]byte, []int) { + return fileDescriptor_dlp_1aebf9c18c267d70, []int{72} +} +func (m *Action) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Action.Unmarshal(m, b) +} +func (m *Action) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Action.Marshal(b, m, deterministic) +} +func (dst *Action) XXX_Merge(src proto.Message) { + xxx_messageInfo_Action.Merge(dst, src) +} +func (m *Action) XXX_Size() int { + return xxx_messageInfo_Action.Size(m) +} +func (m *Action) XXX_DiscardUnknown() { + xxx_messageInfo_Action.DiscardUnknown(m) +} + +var xxx_messageInfo_Action proto.InternalMessageInfo + +type isAction_Action interface { + isAction_Action() +} + +type Action_SaveFindings_ struct { + SaveFindings *Action_SaveFindings `protobuf:"bytes,1,opt,name=save_findings,json=saveFindings,proto3,oneof"` +} + +type Action_PubSub struct { + PubSub *Action_PublishToPubSub `protobuf:"bytes,2,opt,name=pub_sub,json=pubSub,proto3,oneof"` +} + +type Action_PublishSummaryToCscc_ struct { + PublishSummaryToCscc *Action_PublishSummaryToCscc `protobuf:"bytes,3,opt,name=publish_summary_to_cscc,json=publishSummaryToCscc,proto3,oneof"` +} + +type Action_JobNotificationEmails_ struct { + JobNotificationEmails *Action_JobNotificationEmails `protobuf:"bytes,8,opt,name=job_notification_emails,json=jobNotificationEmails,proto3,oneof"` +} + +func (*Action_SaveFindings_) isAction_Action() {} + +func (*Action_PubSub) isAction_Action() {} + +func (*Action_PublishSummaryToCscc_) isAction_Action() {} + +func (*Action_JobNotificationEmails_) isAction_Action() {} + +func (m *Action) GetAction() isAction_Action { + if m != nil { + return m.Action + } + return nil +} + +func (m *Action) GetSaveFindings() *Action_SaveFindings { + if x, ok := m.GetAction().(*Action_SaveFindings_); ok { + return x.SaveFindings + } + return nil +} + +func (m *Action) GetPubSub() *Action_PublishToPubSub { + if x, ok := m.GetAction().(*Action_PubSub); ok { + return x.PubSub + } + return nil +} + +func (m *Action) GetPublishSummaryToCscc() *Action_PublishSummaryToCscc { + if x, ok := m.GetAction().(*Action_PublishSummaryToCscc_); ok { + return x.PublishSummaryToCscc + } + return nil +} + +func (m *Action) GetJobNotificationEmails() *Action_JobNotificationEmails { + if x, ok := m.GetAction().(*Action_JobNotificationEmails_); ok { + return x.JobNotificationEmails + } + return nil +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*Action) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _Action_OneofMarshaler, _Action_OneofUnmarshaler, _Action_OneofSizer, []interface{}{ + (*Action_SaveFindings_)(nil), + (*Action_PubSub)(nil), + (*Action_PublishSummaryToCscc_)(nil), + (*Action_JobNotificationEmails_)(nil), + } +} + +func _Action_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*Action) + // action + switch x := m.Action.(type) { + case *Action_SaveFindings_: + b.EncodeVarint(1<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.SaveFindings); err != nil { + return err + } + case *Action_PubSub: + b.EncodeVarint(2<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.PubSub); err != nil { + return err + } + case *Action_PublishSummaryToCscc_: + b.EncodeVarint(3<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.PublishSummaryToCscc); err != nil { + return err + } + case *Action_JobNotificationEmails_: + b.EncodeVarint(8<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.JobNotificationEmails); err != nil { + return err + } + case nil: + default: + return fmt.Errorf("Action.Action has unexpected type %T", x) + } + return nil +} + +func _Action_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*Action) + switch tag { + case 1: // action.save_findings + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(Action_SaveFindings) + err := b.DecodeMessage(msg) + m.Action = &Action_SaveFindings_{msg} + return true, err + case 2: // action.pub_sub + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(Action_PublishToPubSub) + err := b.DecodeMessage(msg) + m.Action = &Action_PubSub{msg} + return true, err + case 3: // action.publish_summary_to_cscc + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(Action_PublishSummaryToCscc) + err := b.DecodeMessage(msg) + m.Action = &Action_PublishSummaryToCscc_{msg} + return true, err + case 8: // action.job_notification_emails + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(Action_JobNotificationEmails) + err := b.DecodeMessage(msg) + m.Action = &Action_JobNotificationEmails_{msg} + return true, err + default: + return false, nil + } +} + +func _Action_OneofSizer(msg proto.Message) (n int) { + m := msg.(*Action) + // action + switch x := m.Action.(type) { + case *Action_SaveFindings_: + s := proto.Size(x.SaveFindings) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *Action_PubSub: + s := proto.Size(x.PubSub) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *Action_PublishSummaryToCscc_: + s := proto.Size(x.PublishSummaryToCscc) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *Action_JobNotificationEmails_: + s := proto.Size(x.JobNotificationEmails) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +// If set, the detailed findings will be persisted to the specified +// OutputStorageConfig. Only a single instance of this action can be +// specified. +// Compatible with: Inspect, Risk +type Action_SaveFindings struct { + OutputConfig *OutputStorageConfig `protobuf:"bytes,1,opt,name=output_config,json=outputConfig,proto3" json:"output_config,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Action_SaveFindings) Reset() { *m = Action_SaveFindings{} } +func (m *Action_SaveFindings) String() string { return proto.CompactTextString(m) } +func (*Action_SaveFindings) ProtoMessage() {} +func (*Action_SaveFindings) Descriptor() ([]byte, []int) { + return fileDescriptor_dlp_1aebf9c18c267d70, []int{72, 0} +} +func (m *Action_SaveFindings) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Action_SaveFindings.Unmarshal(m, b) +} +func (m *Action_SaveFindings) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Action_SaveFindings.Marshal(b, m, deterministic) +} +func (dst *Action_SaveFindings) XXX_Merge(src proto.Message) { + xxx_messageInfo_Action_SaveFindings.Merge(dst, src) +} +func (m *Action_SaveFindings) XXX_Size() int { + return xxx_messageInfo_Action_SaveFindings.Size(m) +} +func (m *Action_SaveFindings) XXX_DiscardUnknown() { + xxx_messageInfo_Action_SaveFindings.DiscardUnknown(m) +} + +var xxx_messageInfo_Action_SaveFindings proto.InternalMessageInfo + +func (m *Action_SaveFindings) GetOutputConfig() *OutputStorageConfig { + if m != nil { + return m.OutputConfig + } + return nil +} + +// Publish the results of a DlpJob to a pub sub channel. +// Compatible with: Inspect, Risk +type Action_PublishToPubSub struct { + // Cloud Pub/Sub topic to send notifications to. The topic must have given + // publishing access rights to the DLP API service account executing + // the long running DlpJob sending the notifications. + // Format is projects/{project}/topics/{topic}. + Topic string `protobuf:"bytes,1,opt,name=topic,proto3" json:"topic,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Action_PublishToPubSub) Reset() { *m = Action_PublishToPubSub{} } +func (m *Action_PublishToPubSub) String() string { return proto.CompactTextString(m) } +func (*Action_PublishToPubSub) ProtoMessage() {} +func (*Action_PublishToPubSub) Descriptor() ([]byte, []int) { + return fileDescriptor_dlp_1aebf9c18c267d70, []int{72, 1} +} +func (m *Action_PublishToPubSub) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Action_PublishToPubSub.Unmarshal(m, b) +} +func (m *Action_PublishToPubSub) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Action_PublishToPubSub.Marshal(b, m, deterministic) +} +func (dst *Action_PublishToPubSub) XXX_Merge(src proto.Message) { + xxx_messageInfo_Action_PublishToPubSub.Merge(dst, src) +} +func (m *Action_PublishToPubSub) XXX_Size() int { + return xxx_messageInfo_Action_PublishToPubSub.Size(m) +} +func (m *Action_PublishToPubSub) XXX_DiscardUnknown() { + xxx_messageInfo_Action_PublishToPubSub.DiscardUnknown(m) +} + +var xxx_messageInfo_Action_PublishToPubSub proto.InternalMessageInfo + +func (m *Action_PublishToPubSub) GetTopic() string { + if m != nil { + return m.Topic + } + return "" +} + +// Publish the result summary of a DlpJob to the Cloud Security +// Command Center (CSCC Alpha). +// This action is only available for projects which are parts of +// an organization and whitelisted for the alpha Cloud Security Command +// Center. +// The action will publish count of finding instances and their info types. +// The summary of findings will be persisted in CSCC and are governed by CSCC +// service-specific policy, see https://cloud.google.com/terms/service-terms +// Only a single instance of this action can be specified. +// Compatible with: Inspect +type Action_PublishSummaryToCscc struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Action_PublishSummaryToCscc) Reset() { *m = Action_PublishSummaryToCscc{} } +func (m *Action_PublishSummaryToCscc) String() string { return proto.CompactTextString(m) } +func (*Action_PublishSummaryToCscc) ProtoMessage() {} +func (*Action_PublishSummaryToCscc) Descriptor() ([]byte, []int) { + return fileDescriptor_dlp_1aebf9c18c267d70, []int{72, 2} +} +func (m *Action_PublishSummaryToCscc) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Action_PublishSummaryToCscc.Unmarshal(m, b) +} +func (m *Action_PublishSummaryToCscc) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Action_PublishSummaryToCscc.Marshal(b, m, deterministic) +} +func (dst *Action_PublishSummaryToCscc) XXX_Merge(src proto.Message) { + xxx_messageInfo_Action_PublishSummaryToCscc.Merge(dst, src) +} +func (m *Action_PublishSummaryToCscc) XXX_Size() int { + return xxx_messageInfo_Action_PublishSummaryToCscc.Size(m) +} +func (m *Action_PublishSummaryToCscc) XXX_DiscardUnknown() { + xxx_messageInfo_Action_PublishSummaryToCscc.DiscardUnknown(m) +} + +var xxx_messageInfo_Action_PublishSummaryToCscc proto.InternalMessageInfo + +// Enable email notification to project owners and editors on jobs's +// completion/failure. +type Action_JobNotificationEmails struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Action_JobNotificationEmails) Reset() { *m = Action_JobNotificationEmails{} } +func (m *Action_JobNotificationEmails) String() string { return proto.CompactTextString(m) } +func (*Action_JobNotificationEmails) ProtoMessage() {} +func (*Action_JobNotificationEmails) Descriptor() ([]byte, []int) { + return fileDescriptor_dlp_1aebf9c18c267d70, []int{72, 3} +} +func (m *Action_JobNotificationEmails) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Action_JobNotificationEmails.Unmarshal(m, b) +} +func (m *Action_JobNotificationEmails) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Action_JobNotificationEmails.Marshal(b, m, deterministic) +} +func (dst *Action_JobNotificationEmails) XXX_Merge(src proto.Message) { + xxx_messageInfo_Action_JobNotificationEmails.Merge(dst, src) +} +func (m *Action_JobNotificationEmails) XXX_Size() int { + return xxx_messageInfo_Action_JobNotificationEmails.Size(m) +} +func (m *Action_JobNotificationEmails) XXX_DiscardUnknown() { + xxx_messageInfo_Action_JobNotificationEmails.DiscardUnknown(m) +} + +var xxx_messageInfo_Action_JobNotificationEmails proto.InternalMessageInfo + +// Request message for CreateInspectTemplate. +type CreateInspectTemplateRequest struct { + // The parent resource name, for example projects/my-project-id or + // organizations/my-org-id. + Parent string `protobuf:"bytes,1,opt,name=parent,proto3" json:"parent,omitempty"` + // The InspectTemplate to create. + InspectTemplate *InspectTemplate `protobuf:"bytes,2,opt,name=inspect_template,json=inspectTemplate,proto3" json:"inspect_template,omitempty"` + // The template id can contain uppercase and lowercase letters, + // numbers, and hyphens; that is, it must match the regular + // expression: `[a-zA-Z\\d-_]+`. The maximum length is 100 + // characters. Can be empty to allow the system to generate one. + TemplateId string `protobuf:"bytes,3,opt,name=template_id,json=templateId,proto3" json:"template_id,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *CreateInspectTemplateRequest) Reset() { *m = CreateInspectTemplateRequest{} } +func (m *CreateInspectTemplateRequest) String() string { return proto.CompactTextString(m) } +func (*CreateInspectTemplateRequest) ProtoMessage() {} +func (*CreateInspectTemplateRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_dlp_1aebf9c18c267d70, []int{73} +} +func (m *CreateInspectTemplateRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_CreateInspectTemplateRequest.Unmarshal(m, b) +} +func (m *CreateInspectTemplateRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_CreateInspectTemplateRequest.Marshal(b, m, deterministic) +} +func (dst *CreateInspectTemplateRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_CreateInspectTemplateRequest.Merge(dst, src) +} +func (m *CreateInspectTemplateRequest) XXX_Size() int { + return xxx_messageInfo_CreateInspectTemplateRequest.Size(m) +} +func (m *CreateInspectTemplateRequest) XXX_DiscardUnknown() { + xxx_messageInfo_CreateInspectTemplateRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_CreateInspectTemplateRequest proto.InternalMessageInfo + +func (m *CreateInspectTemplateRequest) GetParent() string { + if m != nil { + return m.Parent + } + return "" +} + +func (m *CreateInspectTemplateRequest) GetInspectTemplate() *InspectTemplate { + if m != nil { + return m.InspectTemplate + } + return nil +} + +func (m *CreateInspectTemplateRequest) GetTemplateId() string { + if m != nil { + return m.TemplateId + } + return "" +} + +// Request message for UpdateInspectTemplate. +type UpdateInspectTemplateRequest struct { + // Resource name of organization and inspectTemplate to be updated, for + // example `organizations/433245324/inspectTemplates/432452342` or + // projects/project-id/inspectTemplates/432452342. + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // New InspectTemplate value. + InspectTemplate *InspectTemplate `protobuf:"bytes,2,opt,name=inspect_template,json=inspectTemplate,proto3" json:"inspect_template,omitempty"` + // Mask to control which fields get updated. + UpdateMask *field_mask.FieldMask `protobuf:"bytes,3,opt,name=update_mask,json=updateMask,proto3" json:"update_mask,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *UpdateInspectTemplateRequest) Reset() { *m = UpdateInspectTemplateRequest{} } +func (m *UpdateInspectTemplateRequest) String() string { return proto.CompactTextString(m) } +func (*UpdateInspectTemplateRequest) ProtoMessage() {} +func (*UpdateInspectTemplateRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_dlp_1aebf9c18c267d70, []int{74} +} +func (m *UpdateInspectTemplateRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_UpdateInspectTemplateRequest.Unmarshal(m, b) +} +func (m *UpdateInspectTemplateRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_UpdateInspectTemplateRequest.Marshal(b, m, deterministic) +} +func (dst *UpdateInspectTemplateRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_UpdateInspectTemplateRequest.Merge(dst, src) +} +func (m *UpdateInspectTemplateRequest) XXX_Size() int { + return xxx_messageInfo_UpdateInspectTemplateRequest.Size(m) +} +func (m *UpdateInspectTemplateRequest) XXX_DiscardUnknown() { + xxx_messageInfo_UpdateInspectTemplateRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_UpdateInspectTemplateRequest proto.InternalMessageInfo + +func (m *UpdateInspectTemplateRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *UpdateInspectTemplateRequest) GetInspectTemplate() *InspectTemplate { + if m != nil { + return m.InspectTemplate + } + return nil +} + +func (m *UpdateInspectTemplateRequest) GetUpdateMask() *field_mask.FieldMask { + if m != nil { + return m.UpdateMask + } + return nil +} + +// Request message for GetInspectTemplate. +type GetInspectTemplateRequest struct { + // Resource name of the organization and inspectTemplate to be read, for + // example `organizations/433245324/inspectTemplates/432452342` or + // projects/project-id/inspectTemplates/432452342. + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GetInspectTemplateRequest) Reset() { *m = GetInspectTemplateRequest{} } +func (m *GetInspectTemplateRequest) String() string { return proto.CompactTextString(m) } +func (*GetInspectTemplateRequest) ProtoMessage() {} +func (*GetInspectTemplateRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_dlp_1aebf9c18c267d70, []int{75} +} +func (m *GetInspectTemplateRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GetInspectTemplateRequest.Unmarshal(m, b) +} +func (m *GetInspectTemplateRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GetInspectTemplateRequest.Marshal(b, m, deterministic) +} +func (dst *GetInspectTemplateRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_GetInspectTemplateRequest.Merge(dst, src) +} +func (m *GetInspectTemplateRequest) XXX_Size() int { + return xxx_messageInfo_GetInspectTemplateRequest.Size(m) +} +func (m *GetInspectTemplateRequest) XXX_DiscardUnknown() { + xxx_messageInfo_GetInspectTemplateRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_GetInspectTemplateRequest proto.InternalMessageInfo + +func (m *GetInspectTemplateRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +// Request message for ListInspectTemplates. +type ListInspectTemplatesRequest struct { + // The parent resource name, for example projects/my-project-id or + // organizations/my-org-id. + Parent string `protobuf:"bytes,1,opt,name=parent,proto3" json:"parent,omitempty"` + // Optional page token to continue retrieval. Comes from previous call + // to `ListInspectTemplates`. + PageToken string `protobuf:"bytes,2,opt,name=page_token,json=pageToken,proto3" json:"page_token,omitempty"` + // Optional size of the page, can be limited by server. If zero server returns + // a page of max size 100. + PageSize int32 `protobuf:"varint,3,opt,name=page_size,json=pageSize,proto3" json:"page_size,omitempty"` + // Optional comma separated list of fields to order by, + // followed by `asc` or `desc` postfix. This list is case-insensitive, + // default sorting order is ascending, redundant space characters are + // insignificant. + // + // Example: `name asc,update_time, create_time desc` + // + // Supported fields are: + // + // - `create_time`: corresponds to time the template was created. + // - `update_time`: corresponds to time the template was last updated. + // - `name`: corresponds to template's name. + // - `display_name`: corresponds to template's display name. + OrderBy string `protobuf:"bytes,4,opt,name=order_by,json=orderBy,proto3" json:"order_by,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ListInspectTemplatesRequest) Reset() { *m = ListInspectTemplatesRequest{} } +func (m *ListInspectTemplatesRequest) String() string { return proto.CompactTextString(m) } +func (*ListInspectTemplatesRequest) ProtoMessage() {} +func (*ListInspectTemplatesRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_dlp_1aebf9c18c267d70, []int{76} +} +func (m *ListInspectTemplatesRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ListInspectTemplatesRequest.Unmarshal(m, b) +} +func (m *ListInspectTemplatesRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ListInspectTemplatesRequest.Marshal(b, m, deterministic) +} +func (dst *ListInspectTemplatesRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_ListInspectTemplatesRequest.Merge(dst, src) +} +func (m *ListInspectTemplatesRequest) XXX_Size() int { + return xxx_messageInfo_ListInspectTemplatesRequest.Size(m) +} +func (m *ListInspectTemplatesRequest) XXX_DiscardUnknown() { + xxx_messageInfo_ListInspectTemplatesRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_ListInspectTemplatesRequest proto.InternalMessageInfo + +func (m *ListInspectTemplatesRequest) GetParent() string { + if m != nil { + return m.Parent + } + return "" +} + +func (m *ListInspectTemplatesRequest) GetPageToken() string { + if m != nil { + return m.PageToken + } + return "" +} + +func (m *ListInspectTemplatesRequest) GetPageSize() int32 { + if m != nil { + return m.PageSize + } + return 0 +} + +func (m *ListInspectTemplatesRequest) GetOrderBy() string { + if m != nil { + return m.OrderBy + } + return "" +} + +// Response message for ListInspectTemplates. +type ListInspectTemplatesResponse struct { + // List of inspectTemplates, up to page_size in ListInspectTemplatesRequest. + InspectTemplates []*InspectTemplate `protobuf:"bytes,1,rep,name=inspect_templates,json=inspectTemplates,proto3" json:"inspect_templates,omitempty"` + // If the next page is available then the next page token to be used + // in following ListInspectTemplates request. + NextPageToken string `protobuf:"bytes,2,opt,name=next_page_token,json=nextPageToken,proto3" json:"next_page_token,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ListInspectTemplatesResponse) Reset() { *m = ListInspectTemplatesResponse{} } +func (m *ListInspectTemplatesResponse) String() string { return proto.CompactTextString(m) } +func (*ListInspectTemplatesResponse) ProtoMessage() {} +func (*ListInspectTemplatesResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_dlp_1aebf9c18c267d70, []int{77} +} +func (m *ListInspectTemplatesResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ListInspectTemplatesResponse.Unmarshal(m, b) +} +func (m *ListInspectTemplatesResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ListInspectTemplatesResponse.Marshal(b, m, deterministic) +} +func (dst *ListInspectTemplatesResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_ListInspectTemplatesResponse.Merge(dst, src) +} +func (m *ListInspectTemplatesResponse) XXX_Size() int { + return xxx_messageInfo_ListInspectTemplatesResponse.Size(m) +} +func (m *ListInspectTemplatesResponse) XXX_DiscardUnknown() { + xxx_messageInfo_ListInspectTemplatesResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_ListInspectTemplatesResponse proto.InternalMessageInfo + +func (m *ListInspectTemplatesResponse) GetInspectTemplates() []*InspectTemplate { + if m != nil { + return m.InspectTemplates + } + return nil +} + +func (m *ListInspectTemplatesResponse) GetNextPageToken() string { + if m != nil { + return m.NextPageToken + } + return "" +} + +// Request message for DeleteInspectTemplate. +type DeleteInspectTemplateRequest struct { + // Resource name of the organization and inspectTemplate to be deleted, for + // example `organizations/433245324/inspectTemplates/432452342` or + // projects/project-id/inspectTemplates/432452342. + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *DeleteInspectTemplateRequest) Reset() { *m = DeleteInspectTemplateRequest{} } +func (m *DeleteInspectTemplateRequest) String() string { return proto.CompactTextString(m) } +func (*DeleteInspectTemplateRequest) ProtoMessage() {} +func (*DeleteInspectTemplateRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_dlp_1aebf9c18c267d70, []int{78} +} +func (m *DeleteInspectTemplateRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_DeleteInspectTemplateRequest.Unmarshal(m, b) +} +func (m *DeleteInspectTemplateRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_DeleteInspectTemplateRequest.Marshal(b, m, deterministic) +} +func (dst *DeleteInspectTemplateRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_DeleteInspectTemplateRequest.Merge(dst, src) +} +func (m *DeleteInspectTemplateRequest) XXX_Size() int { + return xxx_messageInfo_DeleteInspectTemplateRequest.Size(m) +} +func (m *DeleteInspectTemplateRequest) XXX_DiscardUnknown() { + xxx_messageInfo_DeleteInspectTemplateRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_DeleteInspectTemplateRequest proto.InternalMessageInfo + +func (m *DeleteInspectTemplateRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +// Request message for CreateJobTrigger. +type CreateJobTriggerRequest struct { + // The parent resource name, for example projects/my-project-id. + Parent string `protobuf:"bytes,1,opt,name=parent,proto3" json:"parent,omitempty"` + // The JobTrigger to create. + JobTrigger *JobTrigger `protobuf:"bytes,2,opt,name=job_trigger,json=jobTrigger,proto3" json:"job_trigger,omitempty"` + // The trigger id can contain uppercase and lowercase letters, + // numbers, and hyphens; that is, it must match the regular + // expression: `[a-zA-Z\\d-_]+`. The maximum length is 100 + // characters. Can be empty to allow the system to generate one. + TriggerId string `protobuf:"bytes,3,opt,name=trigger_id,json=triggerId,proto3" json:"trigger_id,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *CreateJobTriggerRequest) Reset() { *m = CreateJobTriggerRequest{} } +func (m *CreateJobTriggerRequest) String() string { return proto.CompactTextString(m) } +func (*CreateJobTriggerRequest) ProtoMessage() {} +func (*CreateJobTriggerRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_dlp_1aebf9c18c267d70, []int{79} +} +func (m *CreateJobTriggerRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_CreateJobTriggerRequest.Unmarshal(m, b) +} +func (m *CreateJobTriggerRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_CreateJobTriggerRequest.Marshal(b, m, deterministic) +} +func (dst *CreateJobTriggerRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_CreateJobTriggerRequest.Merge(dst, src) +} +func (m *CreateJobTriggerRequest) XXX_Size() int { + return xxx_messageInfo_CreateJobTriggerRequest.Size(m) +} +func (m *CreateJobTriggerRequest) XXX_DiscardUnknown() { + xxx_messageInfo_CreateJobTriggerRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_CreateJobTriggerRequest proto.InternalMessageInfo + +func (m *CreateJobTriggerRequest) GetParent() string { + if m != nil { + return m.Parent + } + return "" +} + +func (m *CreateJobTriggerRequest) GetJobTrigger() *JobTrigger { + if m != nil { + return m.JobTrigger + } + return nil +} + +func (m *CreateJobTriggerRequest) GetTriggerId() string { + if m != nil { + return m.TriggerId + } + return "" +} + +// Request message for ActivateJobTrigger. +type ActivateJobTriggerRequest struct { + // Resource name of the trigger to activate, for example + // `projects/dlp-test-project/jobTriggers/53234423`. + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ActivateJobTriggerRequest) Reset() { *m = ActivateJobTriggerRequest{} } +func (m *ActivateJobTriggerRequest) String() string { return proto.CompactTextString(m) } +func (*ActivateJobTriggerRequest) ProtoMessage() {} +func (*ActivateJobTriggerRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_dlp_1aebf9c18c267d70, []int{80} +} +func (m *ActivateJobTriggerRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ActivateJobTriggerRequest.Unmarshal(m, b) +} +func (m *ActivateJobTriggerRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ActivateJobTriggerRequest.Marshal(b, m, deterministic) +} +func (dst *ActivateJobTriggerRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_ActivateJobTriggerRequest.Merge(dst, src) +} +func (m *ActivateJobTriggerRequest) XXX_Size() int { + return xxx_messageInfo_ActivateJobTriggerRequest.Size(m) +} +func (m *ActivateJobTriggerRequest) XXX_DiscardUnknown() { + xxx_messageInfo_ActivateJobTriggerRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_ActivateJobTriggerRequest proto.InternalMessageInfo + +func (m *ActivateJobTriggerRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +// Request message for UpdateJobTrigger. +type UpdateJobTriggerRequest struct { + // Resource name of the project and the triggeredJob, for example + // `projects/dlp-test-project/jobTriggers/53234423`. + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // New JobTrigger value. + JobTrigger *JobTrigger `protobuf:"bytes,2,opt,name=job_trigger,json=jobTrigger,proto3" json:"job_trigger,omitempty"` + // Mask to control which fields get updated. + UpdateMask *field_mask.FieldMask `protobuf:"bytes,3,opt,name=update_mask,json=updateMask,proto3" json:"update_mask,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *UpdateJobTriggerRequest) Reset() { *m = UpdateJobTriggerRequest{} } +func (m *UpdateJobTriggerRequest) String() string { return proto.CompactTextString(m) } +func (*UpdateJobTriggerRequest) ProtoMessage() {} +func (*UpdateJobTriggerRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_dlp_1aebf9c18c267d70, []int{81} +} +func (m *UpdateJobTriggerRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_UpdateJobTriggerRequest.Unmarshal(m, b) +} +func (m *UpdateJobTriggerRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_UpdateJobTriggerRequest.Marshal(b, m, deterministic) +} +func (dst *UpdateJobTriggerRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_UpdateJobTriggerRequest.Merge(dst, src) +} +func (m *UpdateJobTriggerRequest) XXX_Size() int { + return xxx_messageInfo_UpdateJobTriggerRequest.Size(m) +} +func (m *UpdateJobTriggerRequest) XXX_DiscardUnknown() { + xxx_messageInfo_UpdateJobTriggerRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_UpdateJobTriggerRequest proto.InternalMessageInfo + +func (m *UpdateJobTriggerRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *UpdateJobTriggerRequest) GetJobTrigger() *JobTrigger { + if m != nil { + return m.JobTrigger + } + return nil +} + +func (m *UpdateJobTriggerRequest) GetUpdateMask() *field_mask.FieldMask { + if m != nil { + return m.UpdateMask + } + return nil +} + +// Request message for GetJobTrigger. +type GetJobTriggerRequest struct { + // Resource name of the project and the triggeredJob, for example + // `projects/dlp-test-project/jobTriggers/53234423`. + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GetJobTriggerRequest) Reset() { *m = GetJobTriggerRequest{} } +func (m *GetJobTriggerRequest) String() string { return proto.CompactTextString(m) } +func (*GetJobTriggerRequest) ProtoMessage() {} +func (*GetJobTriggerRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_dlp_1aebf9c18c267d70, []int{82} +} +func (m *GetJobTriggerRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GetJobTriggerRequest.Unmarshal(m, b) +} +func (m *GetJobTriggerRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GetJobTriggerRequest.Marshal(b, m, deterministic) +} +func (dst *GetJobTriggerRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_GetJobTriggerRequest.Merge(dst, src) +} +func (m *GetJobTriggerRequest) XXX_Size() int { + return xxx_messageInfo_GetJobTriggerRequest.Size(m) +} +func (m *GetJobTriggerRequest) XXX_DiscardUnknown() { + xxx_messageInfo_GetJobTriggerRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_GetJobTriggerRequest proto.InternalMessageInfo + +func (m *GetJobTriggerRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +// Request message for CreateDlpJobRequest. Used to initiate long running +// jobs such as calculating risk metrics or inspecting Google Cloud +// Storage. +type CreateDlpJobRequest struct { + // The parent resource name, for example projects/my-project-id. + Parent string `protobuf:"bytes,1,opt,name=parent,proto3" json:"parent,omitempty"` + // The configuration details for the specific type of job to run. + // + // Types that are valid to be assigned to Job: + // *CreateDlpJobRequest_InspectJob + // *CreateDlpJobRequest_RiskJob + Job isCreateDlpJobRequest_Job `protobuf_oneof:"job"` + // The job id can contain uppercase and lowercase letters, + // numbers, and hyphens; that is, it must match the regular + // expression: `[a-zA-Z\\d-_]+`. The maximum length is 100 + // characters. Can be empty to allow the system to generate one. + JobId string `protobuf:"bytes,4,opt,name=job_id,json=jobId,proto3" json:"job_id,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *CreateDlpJobRequest) Reset() { *m = CreateDlpJobRequest{} } +func (m *CreateDlpJobRequest) String() string { return proto.CompactTextString(m) } +func (*CreateDlpJobRequest) ProtoMessage() {} +func (*CreateDlpJobRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_dlp_1aebf9c18c267d70, []int{83} +} +func (m *CreateDlpJobRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_CreateDlpJobRequest.Unmarshal(m, b) +} +func (m *CreateDlpJobRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_CreateDlpJobRequest.Marshal(b, m, deterministic) +} +func (dst *CreateDlpJobRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_CreateDlpJobRequest.Merge(dst, src) +} +func (m *CreateDlpJobRequest) XXX_Size() int { + return xxx_messageInfo_CreateDlpJobRequest.Size(m) +} +func (m *CreateDlpJobRequest) XXX_DiscardUnknown() { + xxx_messageInfo_CreateDlpJobRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_CreateDlpJobRequest proto.InternalMessageInfo + +func (m *CreateDlpJobRequest) GetParent() string { + if m != nil { + return m.Parent + } + return "" +} + +type isCreateDlpJobRequest_Job interface { + isCreateDlpJobRequest_Job() +} + +type CreateDlpJobRequest_InspectJob struct { + InspectJob *InspectJobConfig `protobuf:"bytes,2,opt,name=inspect_job,json=inspectJob,proto3,oneof"` +} + +type CreateDlpJobRequest_RiskJob struct { + RiskJob *RiskAnalysisJobConfig `protobuf:"bytes,3,opt,name=risk_job,json=riskJob,proto3,oneof"` +} + +func (*CreateDlpJobRequest_InspectJob) isCreateDlpJobRequest_Job() {} + +func (*CreateDlpJobRequest_RiskJob) isCreateDlpJobRequest_Job() {} + +func (m *CreateDlpJobRequest) GetJob() isCreateDlpJobRequest_Job { + if m != nil { + return m.Job + } + return nil +} + +func (m *CreateDlpJobRequest) GetInspectJob() *InspectJobConfig { + if x, ok := m.GetJob().(*CreateDlpJobRequest_InspectJob); ok { + return x.InspectJob + } + return nil +} + +func (m *CreateDlpJobRequest) GetRiskJob() *RiskAnalysisJobConfig { + if x, ok := m.GetJob().(*CreateDlpJobRequest_RiskJob); ok { + return x.RiskJob + } + return nil +} + +func (m *CreateDlpJobRequest) GetJobId() string { + if m != nil { + return m.JobId + } + return "" +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*CreateDlpJobRequest) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _CreateDlpJobRequest_OneofMarshaler, _CreateDlpJobRequest_OneofUnmarshaler, _CreateDlpJobRequest_OneofSizer, []interface{}{ + (*CreateDlpJobRequest_InspectJob)(nil), + (*CreateDlpJobRequest_RiskJob)(nil), + } +} + +func _CreateDlpJobRequest_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*CreateDlpJobRequest) + // job + switch x := m.Job.(type) { + case *CreateDlpJobRequest_InspectJob: + b.EncodeVarint(2<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.InspectJob); err != nil { + return err + } + case *CreateDlpJobRequest_RiskJob: + b.EncodeVarint(3<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.RiskJob); err != nil { + return err + } + case nil: + default: + return fmt.Errorf("CreateDlpJobRequest.Job has unexpected type %T", x) + } + return nil +} + +func _CreateDlpJobRequest_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*CreateDlpJobRequest) + switch tag { + case 2: // job.inspect_job + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(InspectJobConfig) + err := b.DecodeMessage(msg) + m.Job = &CreateDlpJobRequest_InspectJob{msg} + return true, err + case 3: // job.risk_job + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(RiskAnalysisJobConfig) + err := b.DecodeMessage(msg) + m.Job = &CreateDlpJobRequest_RiskJob{msg} + return true, err + default: + return false, nil + } +} + +func _CreateDlpJobRequest_OneofSizer(msg proto.Message) (n int) { + m := msg.(*CreateDlpJobRequest) + // job + switch x := m.Job.(type) { + case *CreateDlpJobRequest_InspectJob: + s := proto.Size(x.InspectJob) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *CreateDlpJobRequest_RiskJob: + s := proto.Size(x.RiskJob) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +// Request message for ListJobTriggers. +type ListJobTriggersRequest struct { + // The parent resource name, for example `projects/my-project-id`. + Parent string `protobuf:"bytes,1,opt,name=parent,proto3" json:"parent,omitempty"` + // Optional page token to continue retrieval. Comes from previous call + // to ListJobTriggers. `order_by` field must not + // change for subsequent calls. + PageToken string `protobuf:"bytes,2,opt,name=page_token,json=pageToken,proto3" json:"page_token,omitempty"` + // Optional size of the page, can be limited by a server. + PageSize int32 `protobuf:"varint,3,opt,name=page_size,json=pageSize,proto3" json:"page_size,omitempty"` + // Optional comma separated list of triggeredJob fields to order by, + // followed by `asc` or `desc` postfix. This list is case-insensitive, + // default sorting order is ascending, redundant space characters are + // insignificant. + // + // Example: `name asc,update_time, create_time desc` + // + // Supported fields are: + // + // - `create_time`: corresponds to time the JobTrigger was created. + // - `update_time`: corresponds to time the JobTrigger was last updated. + // - `last_run_time`: corresponds to the last time the JobTrigger ran. + // - `name`: corresponds to JobTrigger's name. + // - `display_name`: corresponds to JobTrigger's display name. + // - `status`: corresponds to JobTrigger's status. + OrderBy string `protobuf:"bytes,4,opt,name=order_by,json=orderBy,proto3" json:"order_by,omitempty"` + // Optional. Allows filtering. + // + // Supported syntax: + // + // * Filter expressions are made up of one or more restrictions. + // * Restrictions can be combined by `AND` or `OR` logical operators. A + // sequence of restrictions implicitly uses `AND`. + // * A restriction has the form of ` `. + // * Supported fields/values for inspect jobs: + // - `status` - HEALTHY|PAUSED|CANCELLED + // - `inspected_storage` - DATASTORE|CLOUD_STORAGE|BIGQUERY + // - 'last_run_time` - RFC 3339 formatted timestamp, surrounded by + // quotation marks. Nanoseconds are ignored. + // - 'error_count' - Number of errors that have occurred while running. + // * The operator must be `=` or `!=` for status and inspected_storage. + // + // Examples: + // + // * inspected_storage = cloud_storage AND status = HEALTHY + // * inspected_storage = cloud_storage OR inspected_storage = bigquery + // * inspected_storage = cloud_storage AND (state = PAUSED OR state = HEALTHY) + // * last_run_time > \"2017-12-12T00:00:00+00:00\" + // + // The length of this field should be no more than 500 characters. + Filter string `protobuf:"bytes,5,opt,name=filter,proto3" json:"filter,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ListJobTriggersRequest) Reset() { *m = ListJobTriggersRequest{} } +func (m *ListJobTriggersRequest) String() string { return proto.CompactTextString(m) } +func (*ListJobTriggersRequest) ProtoMessage() {} +func (*ListJobTriggersRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_dlp_1aebf9c18c267d70, []int{84} +} +func (m *ListJobTriggersRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ListJobTriggersRequest.Unmarshal(m, b) +} +func (m *ListJobTriggersRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ListJobTriggersRequest.Marshal(b, m, deterministic) +} +func (dst *ListJobTriggersRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_ListJobTriggersRequest.Merge(dst, src) +} +func (m *ListJobTriggersRequest) XXX_Size() int { + return xxx_messageInfo_ListJobTriggersRequest.Size(m) +} +func (m *ListJobTriggersRequest) XXX_DiscardUnknown() { + xxx_messageInfo_ListJobTriggersRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_ListJobTriggersRequest proto.InternalMessageInfo + +func (m *ListJobTriggersRequest) GetParent() string { + if m != nil { + return m.Parent + } + return "" +} + +func (m *ListJobTriggersRequest) GetPageToken() string { + if m != nil { + return m.PageToken + } + return "" +} + +func (m *ListJobTriggersRequest) GetPageSize() int32 { + if m != nil { + return m.PageSize + } + return 0 +} + +func (m *ListJobTriggersRequest) GetOrderBy() string { + if m != nil { + return m.OrderBy + } + return "" +} + +func (m *ListJobTriggersRequest) GetFilter() string { + if m != nil { + return m.Filter + } + return "" +} + +// Response message for ListJobTriggers. +type ListJobTriggersResponse struct { + // List of triggeredJobs, up to page_size in ListJobTriggersRequest. + JobTriggers []*JobTrigger `protobuf:"bytes,1,rep,name=job_triggers,json=jobTriggers,proto3" json:"job_triggers,omitempty"` + // If the next page is available then the next page token to be used + // in following ListJobTriggers request. + NextPageToken string `protobuf:"bytes,2,opt,name=next_page_token,json=nextPageToken,proto3" json:"next_page_token,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ListJobTriggersResponse) Reset() { *m = ListJobTriggersResponse{} } +func (m *ListJobTriggersResponse) String() string { return proto.CompactTextString(m) } +func (*ListJobTriggersResponse) ProtoMessage() {} +func (*ListJobTriggersResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_dlp_1aebf9c18c267d70, []int{85} +} +func (m *ListJobTriggersResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ListJobTriggersResponse.Unmarshal(m, b) +} +func (m *ListJobTriggersResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ListJobTriggersResponse.Marshal(b, m, deterministic) +} +func (dst *ListJobTriggersResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_ListJobTriggersResponse.Merge(dst, src) +} +func (m *ListJobTriggersResponse) XXX_Size() int { + return xxx_messageInfo_ListJobTriggersResponse.Size(m) +} +func (m *ListJobTriggersResponse) XXX_DiscardUnknown() { + xxx_messageInfo_ListJobTriggersResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_ListJobTriggersResponse proto.InternalMessageInfo + +func (m *ListJobTriggersResponse) GetJobTriggers() []*JobTrigger { + if m != nil { + return m.JobTriggers + } + return nil +} + +func (m *ListJobTriggersResponse) GetNextPageToken() string { + if m != nil { + return m.NextPageToken + } + return "" +} + +// Request message for DeleteJobTrigger. +type DeleteJobTriggerRequest struct { + // Resource name of the project and the triggeredJob, for example + // `projects/dlp-test-project/jobTriggers/53234423`. + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *DeleteJobTriggerRequest) Reset() { *m = DeleteJobTriggerRequest{} } +func (m *DeleteJobTriggerRequest) String() string { return proto.CompactTextString(m) } +func (*DeleteJobTriggerRequest) ProtoMessage() {} +func (*DeleteJobTriggerRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_dlp_1aebf9c18c267d70, []int{86} +} +func (m *DeleteJobTriggerRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_DeleteJobTriggerRequest.Unmarshal(m, b) +} +func (m *DeleteJobTriggerRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_DeleteJobTriggerRequest.Marshal(b, m, deterministic) +} +func (dst *DeleteJobTriggerRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_DeleteJobTriggerRequest.Merge(dst, src) +} +func (m *DeleteJobTriggerRequest) XXX_Size() int { + return xxx_messageInfo_DeleteJobTriggerRequest.Size(m) +} +func (m *DeleteJobTriggerRequest) XXX_DiscardUnknown() { + xxx_messageInfo_DeleteJobTriggerRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_DeleteJobTriggerRequest proto.InternalMessageInfo + +func (m *DeleteJobTriggerRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +type InspectJobConfig struct { + // The data to scan. + StorageConfig *StorageConfig `protobuf:"bytes,1,opt,name=storage_config,json=storageConfig,proto3" json:"storage_config,omitempty"` + // How and what to scan for. + InspectConfig *InspectConfig `protobuf:"bytes,2,opt,name=inspect_config,json=inspectConfig,proto3" json:"inspect_config,omitempty"` + // If provided, will be used as the default for all values in InspectConfig. + // `inspect_config` will be merged into the values persisted as part of the + // template. + InspectTemplateName string `protobuf:"bytes,3,opt,name=inspect_template_name,json=inspectTemplateName,proto3" json:"inspect_template_name,omitempty"` + // Actions to execute at the completion of the job. Are executed in the order + // provided. + Actions []*Action `protobuf:"bytes,4,rep,name=actions,proto3" json:"actions,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *InspectJobConfig) Reset() { *m = InspectJobConfig{} } +func (m *InspectJobConfig) String() string { return proto.CompactTextString(m) } +func (*InspectJobConfig) ProtoMessage() {} +func (*InspectJobConfig) Descriptor() ([]byte, []int) { + return fileDescriptor_dlp_1aebf9c18c267d70, []int{87} +} +func (m *InspectJobConfig) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_InspectJobConfig.Unmarshal(m, b) +} +func (m *InspectJobConfig) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_InspectJobConfig.Marshal(b, m, deterministic) +} +func (dst *InspectJobConfig) XXX_Merge(src proto.Message) { + xxx_messageInfo_InspectJobConfig.Merge(dst, src) +} +func (m *InspectJobConfig) XXX_Size() int { + return xxx_messageInfo_InspectJobConfig.Size(m) +} +func (m *InspectJobConfig) XXX_DiscardUnknown() { + xxx_messageInfo_InspectJobConfig.DiscardUnknown(m) +} + +var xxx_messageInfo_InspectJobConfig proto.InternalMessageInfo + +func (m *InspectJobConfig) GetStorageConfig() *StorageConfig { + if m != nil { + return m.StorageConfig + } + return nil +} + +func (m *InspectJobConfig) GetInspectConfig() *InspectConfig { + if m != nil { + return m.InspectConfig + } + return nil +} + +func (m *InspectJobConfig) GetInspectTemplateName() string { + if m != nil { + return m.InspectTemplateName + } + return "" +} + +func (m *InspectJobConfig) GetActions() []*Action { + if m != nil { + return m.Actions + } + return nil +} + +// Combines all of the information about a DLP job. +type DlpJob struct { + // The server-assigned name. + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // The type of job. + Type DlpJobType `protobuf:"varint,2,opt,name=type,proto3,enum=google.privacy.dlp.v2.DlpJobType" json:"type,omitempty"` + // State of a job. + State DlpJob_JobState `protobuf:"varint,3,opt,name=state,proto3,enum=google.privacy.dlp.v2.DlpJob_JobState" json:"state,omitempty"` + // Types that are valid to be assigned to Details: + // *DlpJob_RiskDetails + // *DlpJob_InspectDetails + Details isDlpJob_Details `protobuf_oneof:"details"` + // Time when the job was created. + CreateTime *timestamp.Timestamp `protobuf:"bytes,6,opt,name=create_time,json=createTime,proto3" json:"create_time,omitempty"` + // Time when the job started. + StartTime *timestamp.Timestamp `protobuf:"bytes,7,opt,name=start_time,json=startTime,proto3" json:"start_time,omitempty"` + // Time when the job finished. + EndTime *timestamp.Timestamp `protobuf:"bytes,8,opt,name=end_time,json=endTime,proto3" json:"end_time,omitempty"` + // If created by a job trigger, the resource name of the trigger that + // instantiated the job. + JobTriggerName string `protobuf:"bytes,10,opt,name=job_trigger_name,json=jobTriggerName,proto3" json:"job_trigger_name,omitempty"` + // A stream of errors encountered running the job. + Errors []*Error `protobuf:"bytes,11,rep,name=errors,proto3" json:"errors,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *DlpJob) Reset() { *m = DlpJob{} } +func (m *DlpJob) String() string { return proto.CompactTextString(m) } +func (*DlpJob) ProtoMessage() {} +func (*DlpJob) Descriptor() ([]byte, []int) { + return fileDescriptor_dlp_1aebf9c18c267d70, []int{88} +} +func (m *DlpJob) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_DlpJob.Unmarshal(m, b) +} +func (m *DlpJob) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_DlpJob.Marshal(b, m, deterministic) +} +func (dst *DlpJob) XXX_Merge(src proto.Message) { + xxx_messageInfo_DlpJob.Merge(dst, src) +} +func (m *DlpJob) XXX_Size() int { + return xxx_messageInfo_DlpJob.Size(m) +} +func (m *DlpJob) XXX_DiscardUnknown() { + xxx_messageInfo_DlpJob.DiscardUnknown(m) +} + +var xxx_messageInfo_DlpJob proto.InternalMessageInfo + +func (m *DlpJob) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *DlpJob) GetType() DlpJobType { + if m != nil { + return m.Type + } + return DlpJobType_DLP_JOB_TYPE_UNSPECIFIED +} + +func (m *DlpJob) GetState() DlpJob_JobState { + if m != nil { + return m.State + } + return DlpJob_JOB_STATE_UNSPECIFIED +} + +type isDlpJob_Details interface { + isDlpJob_Details() +} + +type DlpJob_RiskDetails struct { + RiskDetails *AnalyzeDataSourceRiskDetails `protobuf:"bytes,4,opt,name=risk_details,json=riskDetails,proto3,oneof"` +} + +type DlpJob_InspectDetails struct { + InspectDetails *InspectDataSourceDetails `protobuf:"bytes,5,opt,name=inspect_details,json=inspectDetails,proto3,oneof"` +} + +func (*DlpJob_RiskDetails) isDlpJob_Details() {} + +func (*DlpJob_InspectDetails) isDlpJob_Details() {} + +func (m *DlpJob) GetDetails() isDlpJob_Details { + if m != nil { + return m.Details + } + return nil +} + +func (m *DlpJob) GetRiskDetails() *AnalyzeDataSourceRiskDetails { + if x, ok := m.GetDetails().(*DlpJob_RiskDetails); ok { + return x.RiskDetails + } + return nil +} + +func (m *DlpJob) GetInspectDetails() *InspectDataSourceDetails { + if x, ok := m.GetDetails().(*DlpJob_InspectDetails); ok { + return x.InspectDetails + } + return nil +} + +func (m *DlpJob) GetCreateTime() *timestamp.Timestamp { + if m != nil { + return m.CreateTime + } + return nil +} + +func (m *DlpJob) GetStartTime() *timestamp.Timestamp { + if m != nil { + return m.StartTime + } + return nil +} + +func (m *DlpJob) GetEndTime() *timestamp.Timestamp { + if m != nil { + return m.EndTime + } + return nil +} + +func (m *DlpJob) GetJobTriggerName() string { + if m != nil { + return m.JobTriggerName + } + return "" +} + +func (m *DlpJob) GetErrors() []*Error { + if m != nil { + return m.Errors + } + return nil +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*DlpJob) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _DlpJob_OneofMarshaler, _DlpJob_OneofUnmarshaler, _DlpJob_OneofSizer, []interface{}{ + (*DlpJob_RiskDetails)(nil), + (*DlpJob_InspectDetails)(nil), + } +} + +func _DlpJob_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*DlpJob) + // details + switch x := m.Details.(type) { + case *DlpJob_RiskDetails: + b.EncodeVarint(4<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.RiskDetails); err != nil { + return err + } + case *DlpJob_InspectDetails: + b.EncodeVarint(5<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.InspectDetails); err != nil { + return err + } + case nil: + default: + return fmt.Errorf("DlpJob.Details has unexpected type %T", x) + } + return nil +} + +func _DlpJob_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*DlpJob) + switch tag { + case 4: // details.risk_details + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(AnalyzeDataSourceRiskDetails) + err := b.DecodeMessage(msg) + m.Details = &DlpJob_RiskDetails{msg} + return true, err + case 5: // details.inspect_details + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(InspectDataSourceDetails) + err := b.DecodeMessage(msg) + m.Details = &DlpJob_InspectDetails{msg} + return true, err + default: + return false, nil + } +} + +func _DlpJob_OneofSizer(msg proto.Message) (n int) { + m := msg.(*DlpJob) + // details + switch x := m.Details.(type) { + case *DlpJob_RiskDetails: + s := proto.Size(x.RiskDetails) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *DlpJob_InspectDetails: + s := proto.Size(x.InspectDetails) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +// The request message for [DlpJobs.GetDlpJob][]. +type GetDlpJobRequest struct { + // The name of the DlpJob resource. + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GetDlpJobRequest) Reset() { *m = GetDlpJobRequest{} } +func (m *GetDlpJobRequest) String() string { return proto.CompactTextString(m) } +func (*GetDlpJobRequest) ProtoMessage() {} +func (*GetDlpJobRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_dlp_1aebf9c18c267d70, []int{89} +} +func (m *GetDlpJobRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GetDlpJobRequest.Unmarshal(m, b) +} +func (m *GetDlpJobRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GetDlpJobRequest.Marshal(b, m, deterministic) +} +func (dst *GetDlpJobRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_GetDlpJobRequest.Merge(dst, src) +} +func (m *GetDlpJobRequest) XXX_Size() int { + return xxx_messageInfo_GetDlpJobRequest.Size(m) +} +func (m *GetDlpJobRequest) XXX_DiscardUnknown() { + xxx_messageInfo_GetDlpJobRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_GetDlpJobRequest proto.InternalMessageInfo + +func (m *GetDlpJobRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +// The request message for listing DLP jobs. +type ListDlpJobsRequest struct { + // The parent resource name, for example projects/my-project-id. + Parent string `protobuf:"bytes,4,opt,name=parent,proto3" json:"parent,omitempty"` + // Optional. Allows filtering. + // + // Supported syntax: + // + // * Filter expressions are made up of one or more restrictions. + // * Restrictions can be combined by `AND` or `OR` logical operators. A + // sequence of restrictions implicitly uses `AND`. + // * A restriction has the form of ` `. + // * Supported fields/values for inspect jobs: + // - `state` - PENDING|RUNNING|CANCELED|FINISHED|FAILED + // - `inspected_storage` - DATASTORE|CLOUD_STORAGE|BIGQUERY + // - `trigger_name` - The resource name of the trigger that created job. + // * Supported fields for risk analysis jobs: + // - `state` - RUNNING|CANCELED|FINISHED|FAILED + // * The operator must be `=` or `!=`. + // + // Examples: + // + // * inspected_storage = cloud_storage AND state = done + // * inspected_storage = cloud_storage OR inspected_storage = bigquery + // * inspected_storage = cloud_storage AND (state = done OR state = canceled) + // + // The length of this field should be no more than 500 characters. + Filter string `protobuf:"bytes,1,opt,name=filter,proto3" json:"filter,omitempty"` + // The standard list page size. + PageSize int32 `protobuf:"varint,2,opt,name=page_size,json=pageSize,proto3" json:"page_size,omitempty"` + // The standard list page token. + PageToken string `protobuf:"bytes,3,opt,name=page_token,json=pageToken,proto3" json:"page_token,omitempty"` + // The type of job. Defaults to `DlpJobType.INSPECT` + Type DlpJobType `protobuf:"varint,5,opt,name=type,proto3,enum=google.privacy.dlp.v2.DlpJobType" json:"type,omitempty"` + // Optional comma separated list of fields to order by, + // followed by `asc` or `desc` postfix. This list is case-insensitive, + // default sorting order is ascending, redundant space characters are + // insignificant. + // + // Example: `name asc, end_time asc, create_time desc` + // + // Supported fields are: + // + // - `create_time`: corresponds to time the job was created. + // - `end_time`: corresponds to time the job ended. + // - `name`: corresponds to job's name. + // - `state`: corresponds to `state` + OrderBy string `protobuf:"bytes,6,opt,name=order_by,json=orderBy,proto3" json:"order_by,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ListDlpJobsRequest) Reset() { *m = ListDlpJobsRequest{} } +func (m *ListDlpJobsRequest) String() string { return proto.CompactTextString(m) } +func (*ListDlpJobsRequest) ProtoMessage() {} +func (*ListDlpJobsRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_dlp_1aebf9c18c267d70, []int{90} +} +func (m *ListDlpJobsRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ListDlpJobsRequest.Unmarshal(m, b) +} +func (m *ListDlpJobsRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ListDlpJobsRequest.Marshal(b, m, deterministic) +} +func (dst *ListDlpJobsRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_ListDlpJobsRequest.Merge(dst, src) +} +func (m *ListDlpJobsRequest) XXX_Size() int { + return xxx_messageInfo_ListDlpJobsRequest.Size(m) +} +func (m *ListDlpJobsRequest) XXX_DiscardUnknown() { + xxx_messageInfo_ListDlpJobsRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_ListDlpJobsRequest proto.InternalMessageInfo + +func (m *ListDlpJobsRequest) GetParent() string { + if m != nil { + return m.Parent + } + return "" +} + +func (m *ListDlpJobsRequest) GetFilter() string { + if m != nil { + return m.Filter + } + return "" +} + +func (m *ListDlpJobsRequest) GetPageSize() int32 { + if m != nil { + return m.PageSize + } + return 0 +} + +func (m *ListDlpJobsRequest) GetPageToken() string { + if m != nil { + return m.PageToken + } + return "" +} + +func (m *ListDlpJobsRequest) GetType() DlpJobType { + if m != nil { + return m.Type + } + return DlpJobType_DLP_JOB_TYPE_UNSPECIFIED +} + +func (m *ListDlpJobsRequest) GetOrderBy() string { + if m != nil { + return m.OrderBy + } + return "" +} + +// The response message for listing DLP jobs. +type ListDlpJobsResponse struct { + // A list of DlpJobs that matches the specified filter in the request. + Jobs []*DlpJob `protobuf:"bytes,1,rep,name=jobs,proto3" json:"jobs,omitempty"` + // The standard List next-page token. + NextPageToken string `protobuf:"bytes,2,opt,name=next_page_token,json=nextPageToken,proto3" json:"next_page_token,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ListDlpJobsResponse) Reset() { *m = ListDlpJobsResponse{} } +func (m *ListDlpJobsResponse) String() string { return proto.CompactTextString(m) } +func (*ListDlpJobsResponse) ProtoMessage() {} +func (*ListDlpJobsResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_dlp_1aebf9c18c267d70, []int{91} +} +func (m *ListDlpJobsResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ListDlpJobsResponse.Unmarshal(m, b) +} +func (m *ListDlpJobsResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ListDlpJobsResponse.Marshal(b, m, deterministic) +} +func (dst *ListDlpJobsResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_ListDlpJobsResponse.Merge(dst, src) +} +func (m *ListDlpJobsResponse) XXX_Size() int { + return xxx_messageInfo_ListDlpJobsResponse.Size(m) +} +func (m *ListDlpJobsResponse) XXX_DiscardUnknown() { + xxx_messageInfo_ListDlpJobsResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_ListDlpJobsResponse proto.InternalMessageInfo + +func (m *ListDlpJobsResponse) GetJobs() []*DlpJob { + if m != nil { + return m.Jobs + } + return nil +} + +func (m *ListDlpJobsResponse) GetNextPageToken() string { + if m != nil { + return m.NextPageToken + } + return "" +} + +// The request message for canceling a DLP job. +type CancelDlpJobRequest struct { + // The name of the DlpJob resource to be cancelled. + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *CancelDlpJobRequest) Reset() { *m = CancelDlpJobRequest{} } +func (m *CancelDlpJobRequest) String() string { return proto.CompactTextString(m) } +func (*CancelDlpJobRequest) ProtoMessage() {} +func (*CancelDlpJobRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_dlp_1aebf9c18c267d70, []int{92} +} +func (m *CancelDlpJobRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_CancelDlpJobRequest.Unmarshal(m, b) +} +func (m *CancelDlpJobRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_CancelDlpJobRequest.Marshal(b, m, deterministic) +} +func (dst *CancelDlpJobRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_CancelDlpJobRequest.Merge(dst, src) +} +func (m *CancelDlpJobRequest) XXX_Size() int { + return xxx_messageInfo_CancelDlpJobRequest.Size(m) +} +func (m *CancelDlpJobRequest) XXX_DiscardUnknown() { + xxx_messageInfo_CancelDlpJobRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_CancelDlpJobRequest proto.InternalMessageInfo + +func (m *CancelDlpJobRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +// The request message for deleting a DLP job. +type DeleteDlpJobRequest struct { + // The name of the DlpJob resource to be deleted. + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *DeleteDlpJobRequest) Reset() { *m = DeleteDlpJobRequest{} } +func (m *DeleteDlpJobRequest) String() string { return proto.CompactTextString(m) } +func (*DeleteDlpJobRequest) ProtoMessage() {} +func (*DeleteDlpJobRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_dlp_1aebf9c18c267d70, []int{93} +} +func (m *DeleteDlpJobRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_DeleteDlpJobRequest.Unmarshal(m, b) +} +func (m *DeleteDlpJobRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_DeleteDlpJobRequest.Marshal(b, m, deterministic) +} +func (dst *DeleteDlpJobRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_DeleteDlpJobRequest.Merge(dst, src) +} +func (m *DeleteDlpJobRequest) XXX_Size() int { + return xxx_messageInfo_DeleteDlpJobRequest.Size(m) +} +func (m *DeleteDlpJobRequest) XXX_DiscardUnknown() { + xxx_messageInfo_DeleteDlpJobRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_DeleteDlpJobRequest proto.InternalMessageInfo + +func (m *DeleteDlpJobRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +// Request message for CreateDeidentifyTemplate. +type CreateDeidentifyTemplateRequest struct { + // The parent resource name, for example projects/my-project-id or + // organizations/my-org-id. + Parent string `protobuf:"bytes,1,opt,name=parent,proto3" json:"parent,omitempty"` + // The DeidentifyTemplate to create. + DeidentifyTemplate *DeidentifyTemplate `protobuf:"bytes,2,opt,name=deidentify_template,json=deidentifyTemplate,proto3" json:"deidentify_template,omitempty"` + // The template id can contain uppercase and lowercase letters, + // numbers, and hyphens; that is, it must match the regular + // expression: `[a-zA-Z\\d-_]+`. The maximum length is 100 + // characters. Can be empty to allow the system to generate one. + TemplateId string `protobuf:"bytes,3,opt,name=template_id,json=templateId,proto3" json:"template_id,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *CreateDeidentifyTemplateRequest) Reset() { *m = CreateDeidentifyTemplateRequest{} } +func (m *CreateDeidentifyTemplateRequest) String() string { return proto.CompactTextString(m) } +func (*CreateDeidentifyTemplateRequest) ProtoMessage() {} +func (*CreateDeidentifyTemplateRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_dlp_1aebf9c18c267d70, []int{94} +} +func (m *CreateDeidentifyTemplateRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_CreateDeidentifyTemplateRequest.Unmarshal(m, b) +} +func (m *CreateDeidentifyTemplateRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_CreateDeidentifyTemplateRequest.Marshal(b, m, deterministic) +} +func (dst *CreateDeidentifyTemplateRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_CreateDeidentifyTemplateRequest.Merge(dst, src) +} +func (m *CreateDeidentifyTemplateRequest) XXX_Size() int { + return xxx_messageInfo_CreateDeidentifyTemplateRequest.Size(m) +} +func (m *CreateDeidentifyTemplateRequest) XXX_DiscardUnknown() { + xxx_messageInfo_CreateDeidentifyTemplateRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_CreateDeidentifyTemplateRequest proto.InternalMessageInfo + +func (m *CreateDeidentifyTemplateRequest) GetParent() string { + if m != nil { + return m.Parent + } + return "" +} + +func (m *CreateDeidentifyTemplateRequest) GetDeidentifyTemplate() *DeidentifyTemplate { + if m != nil { + return m.DeidentifyTemplate + } + return nil +} + +func (m *CreateDeidentifyTemplateRequest) GetTemplateId() string { + if m != nil { + return m.TemplateId + } + return "" +} + +// Request message for UpdateDeidentifyTemplate. +type UpdateDeidentifyTemplateRequest struct { + // Resource name of organization and deidentify template to be updated, for + // example `organizations/433245324/deidentifyTemplates/432452342` or + // projects/project-id/deidentifyTemplates/432452342. + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // New DeidentifyTemplate value. + DeidentifyTemplate *DeidentifyTemplate `protobuf:"bytes,2,opt,name=deidentify_template,json=deidentifyTemplate,proto3" json:"deidentify_template,omitempty"` + // Mask to control which fields get updated. + UpdateMask *field_mask.FieldMask `protobuf:"bytes,3,opt,name=update_mask,json=updateMask,proto3" json:"update_mask,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *UpdateDeidentifyTemplateRequest) Reset() { *m = UpdateDeidentifyTemplateRequest{} } +func (m *UpdateDeidentifyTemplateRequest) String() string { return proto.CompactTextString(m) } +func (*UpdateDeidentifyTemplateRequest) ProtoMessage() {} +func (*UpdateDeidentifyTemplateRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_dlp_1aebf9c18c267d70, []int{95} +} +func (m *UpdateDeidentifyTemplateRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_UpdateDeidentifyTemplateRequest.Unmarshal(m, b) +} +func (m *UpdateDeidentifyTemplateRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_UpdateDeidentifyTemplateRequest.Marshal(b, m, deterministic) +} +func (dst *UpdateDeidentifyTemplateRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_UpdateDeidentifyTemplateRequest.Merge(dst, src) +} +func (m *UpdateDeidentifyTemplateRequest) XXX_Size() int { + return xxx_messageInfo_UpdateDeidentifyTemplateRequest.Size(m) +} +func (m *UpdateDeidentifyTemplateRequest) XXX_DiscardUnknown() { + xxx_messageInfo_UpdateDeidentifyTemplateRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_UpdateDeidentifyTemplateRequest proto.InternalMessageInfo + +func (m *UpdateDeidentifyTemplateRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *UpdateDeidentifyTemplateRequest) GetDeidentifyTemplate() *DeidentifyTemplate { + if m != nil { + return m.DeidentifyTemplate + } + return nil +} + +func (m *UpdateDeidentifyTemplateRequest) GetUpdateMask() *field_mask.FieldMask { + if m != nil { + return m.UpdateMask + } + return nil +} + +// Request message for GetDeidentifyTemplate. +type GetDeidentifyTemplateRequest struct { + // Resource name of the organization and deidentify template to be read, for + // example `organizations/433245324/deidentifyTemplates/432452342` or + // projects/project-id/deidentifyTemplates/432452342. + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GetDeidentifyTemplateRequest) Reset() { *m = GetDeidentifyTemplateRequest{} } +func (m *GetDeidentifyTemplateRequest) String() string { return proto.CompactTextString(m) } +func (*GetDeidentifyTemplateRequest) ProtoMessage() {} +func (*GetDeidentifyTemplateRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_dlp_1aebf9c18c267d70, []int{96} +} +func (m *GetDeidentifyTemplateRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GetDeidentifyTemplateRequest.Unmarshal(m, b) +} +func (m *GetDeidentifyTemplateRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GetDeidentifyTemplateRequest.Marshal(b, m, deterministic) +} +func (dst *GetDeidentifyTemplateRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_GetDeidentifyTemplateRequest.Merge(dst, src) +} +func (m *GetDeidentifyTemplateRequest) XXX_Size() int { + return xxx_messageInfo_GetDeidentifyTemplateRequest.Size(m) +} +func (m *GetDeidentifyTemplateRequest) XXX_DiscardUnknown() { + xxx_messageInfo_GetDeidentifyTemplateRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_GetDeidentifyTemplateRequest proto.InternalMessageInfo + +func (m *GetDeidentifyTemplateRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +// Request message for ListDeidentifyTemplates. +type ListDeidentifyTemplatesRequest struct { + // The parent resource name, for example projects/my-project-id or + // organizations/my-org-id. + Parent string `protobuf:"bytes,1,opt,name=parent,proto3" json:"parent,omitempty"` + // Optional page token to continue retrieval. Comes from previous call + // to `ListDeidentifyTemplates`. + PageToken string `protobuf:"bytes,2,opt,name=page_token,json=pageToken,proto3" json:"page_token,omitempty"` + // Optional size of the page, can be limited by server. If zero server returns + // a page of max size 100. + PageSize int32 `protobuf:"varint,3,opt,name=page_size,json=pageSize,proto3" json:"page_size,omitempty"` + // Optional comma separated list of fields to order by, + // followed by `asc` or `desc` postfix. This list is case-insensitive, + // default sorting order is ascending, redundant space characters are + // insignificant. + // + // Example: `name asc,update_time, create_time desc` + // + // Supported fields are: + // + // - `create_time`: corresponds to time the template was created. + // - `update_time`: corresponds to time the template was last updated. + // - `name`: corresponds to template's name. + // - `display_name`: corresponds to template's display name. + OrderBy string `protobuf:"bytes,4,opt,name=order_by,json=orderBy,proto3" json:"order_by,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ListDeidentifyTemplatesRequest) Reset() { *m = ListDeidentifyTemplatesRequest{} } +func (m *ListDeidentifyTemplatesRequest) String() string { return proto.CompactTextString(m) } +func (*ListDeidentifyTemplatesRequest) ProtoMessage() {} +func (*ListDeidentifyTemplatesRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_dlp_1aebf9c18c267d70, []int{97} +} +func (m *ListDeidentifyTemplatesRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ListDeidentifyTemplatesRequest.Unmarshal(m, b) +} +func (m *ListDeidentifyTemplatesRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ListDeidentifyTemplatesRequest.Marshal(b, m, deterministic) +} +func (dst *ListDeidentifyTemplatesRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_ListDeidentifyTemplatesRequest.Merge(dst, src) +} +func (m *ListDeidentifyTemplatesRequest) XXX_Size() int { + return xxx_messageInfo_ListDeidentifyTemplatesRequest.Size(m) +} +func (m *ListDeidentifyTemplatesRequest) XXX_DiscardUnknown() { + xxx_messageInfo_ListDeidentifyTemplatesRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_ListDeidentifyTemplatesRequest proto.InternalMessageInfo + +func (m *ListDeidentifyTemplatesRequest) GetParent() string { + if m != nil { + return m.Parent + } + return "" +} + +func (m *ListDeidentifyTemplatesRequest) GetPageToken() string { + if m != nil { + return m.PageToken + } + return "" +} + +func (m *ListDeidentifyTemplatesRequest) GetPageSize() int32 { + if m != nil { + return m.PageSize + } + return 0 +} + +func (m *ListDeidentifyTemplatesRequest) GetOrderBy() string { + if m != nil { + return m.OrderBy + } + return "" +} + +// Response message for ListDeidentifyTemplates. +type ListDeidentifyTemplatesResponse struct { + // List of deidentify templates, up to page_size in + // ListDeidentifyTemplatesRequest. + DeidentifyTemplates []*DeidentifyTemplate `protobuf:"bytes,1,rep,name=deidentify_templates,json=deidentifyTemplates,proto3" json:"deidentify_templates,omitempty"` + // If the next page is available then the next page token to be used + // in following ListDeidentifyTemplates request. + NextPageToken string `protobuf:"bytes,2,opt,name=next_page_token,json=nextPageToken,proto3" json:"next_page_token,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ListDeidentifyTemplatesResponse) Reset() { *m = ListDeidentifyTemplatesResponse{} } +func (m *ListDeidentifyTemplatesResponse) String() string { return proto.CompactTextString(m) } +func (*ListDeidentifyTemplatesResponse) ProtoMessage() {} +func (*ListDeidentifyTemplatesResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_dlp_1aebf9c18c267d70, []int{98} +} +func (m *ListDeidentifyTemplatesResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ListDeidentifyTemplatesResponse.Unmarshal(m, b) +} +func (m *ListDeidentifyTemplatesResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ListDeidentifyTemplatesResponse.Marshal(b, m, deterministic) +} +func (dst *ListDeidentifyTemplatesResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_ListDeidentifyTemplatesResponse.Merge(dst, src) +} +func (m *ListDeidentifyTemplatesResponse) XXX_Size() int { + return xxx_messageInfo_ListDeidentifyTemplatesResponse.Size(m) +} +func (m *ListDeidentifyTemplatesResponse) XXX_DiscardUnknown() { + xxx_messageInfo_ListDeidentifyTemplatesResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_ListDeidentifyTemplatesResponse proto.InternalMessageInfo + +func (m *ListDeidentifyTemplatesResponse) GetDeidentifyTemplates() []*DeidentifyTemplate { + if m != nil { + return m.DeidentifyTemplates + } + return nil +} + +func (m *ListDeidentifyTemplatesResponse) GetNextPageToken() string { + if m != nil { + return m.NextPageToken + } + return "" +} + +// Request message for DeleteDeidentifyTemplate. +type DeleteDeidentifyTemplateRequest struct { + // Resource name of the organization and deidentify template to be deleted, + // for example `organizations/433245324/deidentifyTemplates/432452342` or + // projects/project-id/deidentifyTemplates/432452342. + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *DeleteDeidentifyTemplateRequest) Reset() { *m = DeleteDeidentifyTemplateRequest{} } +func (m *DeleteDeidentifyTemplateRequest) String() string { return proto.CompactTextString(m) } +func (*DeleteDeidentifyTemplateRequest) ProtoMessage() {} +func (*DeleteDeidentifyTemplateRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_dlp_1aebf9c18c267d70, []int{99} +} +func (m *DeleteDeidentifyTemplateRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_DeleteDeidentifyTemplateRequest.Unmarshal(m, b) +} +func (m *DeleteDeidentifyTemplateRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_DeleteDeidentifyTemplateRequest.Marshal(b, m, deterministic) +} +func (dst *DeleteDeidentifyTemplateRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_DeleteDeidentifyTemplateRequest.Merge(dst, src) +} +func (m *DeleteDeidentifyTemplateRequest) XXX_Size() int { + return xxx_messageInfo_DeleteDeidentifyTemplateRequest.Size(m) +} +func (m *DeleteDeidentifyTemplateRequest) XXX_DiscardUnknown() { + xxx_messageInfo_DeleteDeidentifyTemplateRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_DeleteDeidentifyTemplateRequest proto.InternalMessageInfo + +func (m *DeleteDeidentifyTemplateRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +// Configuration for a custom dictionary created from a data source of any size +// up to the maximum size defined in the +// [limits](https://cloud.google.com/dlp/limits) page. The artifacts of +// dictionary creation are stored in the specified Google Cloud Storage +// location. Consider using `CustomInfoType.Dictionary` for smaller dictionaries +// that satisfy the size requirements. +type LargeCustomDictionaryConfig struct { + // Location to store dictionary artifacts in Google Cloud Storage. These files + // will only be accessible by project owners and the DLP API. If any of these + // artifacts are modified, the dictionary is considered invalid and can no + // longer be used. + OutputPath *CloudStoragePath `protobuf:"bytes,1,opt,name=output_path,json=outputPath,proto3" json:"output_path,omitempty"` + // Types that are valid to be assigned to Source: + // *LargeCustomDictionaryConfig_CloudStorageFileSet + // *LargeCustomDictionaryConfig_BigQueryField + Source isLargeCustomDictionaryConfig_Source `protobuf_oneof:"source"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *LargeCustomDictionaryConfig) Reset() { *m = LargeCustomDictionaryConfig{} } +func (m *LargeCustomDictionaryConfig) String() string { return proto.CompactTextString(m) } +func (*LargeCustomDictionaryConfig) ProtoMessage() {} +func (*LargeCustomDictionaryConfig) Descriptor() ([]byte, []int) { + return fileDescriptor_dlp_1aebf9c18c267d70, []int{100} +} +func (m *LargeCustomDictionaryConfig) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_LargeCustomDictionaryConfig.Unmarshal(m, b) +} +func (m *LargeCustomDictionaryConfig) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_LargeCustomDictionaryConfig.Marshal(b, m, deterministic) +} +func (dst *LargeCustomDictionaryConfig) XXX_Merge(src proto.Message) { + xxx_messageInfo_LargeCustomDictionaryConfig.Merge(dst, src) +} +func (m *LargeCustomDictionaryConfig) XXX_Size() int { + return xxx_messageInfo_LargeCustomDictionaryConfig.Size(m) +} +func (m *LargeCustomDictionaryConfig) XXX_DiscardUnknown() { + xxx_messageInfo_LargeCustomDictionaryConfig.DiscardUnknown(m) +} + +var xxx_messageInfo_LargeCustomDictionaryConfig proto.InternalMessageInfo + +func (m *LargeCustomDictionaryConfig) GetOutputPath() *CloudStoragePath { + if m != nil { + return m.OutputPath + } + return nil +} + +type isLargeCustomDictionaryConfig_Source interface { + isLargeCustomDictionaryConfig_Source() +} + +type LargeCustomDictionaryConfig_CloudStorageFileSet struct { + CloudStorageFileSet *CloudStorageFileSet `protobuf:"bytes,2,opt,name=cloud_storage_file_set,json=cloudStorageFileSet,proto3,oneof"` +} + +type LargeCustomDictionaryConfig_BigQueryField struct { + BigQueryField *BigQueryField `protobuf:"bytes,3,opt,name=big_query_field,json=bigQueryField,proto3,oneof"` +} + +func (*LargeCustomDictionaryConfig_CloudStorageFileSet) isLargeCustomDictionaryConfig_Source() {} + +func (*LargeCustomDictionaryConfig_BigQueryField) isLargeCustomDictionaryConfig_Source() {} + +func (m *LargeCustomDictionaryConfig) GetSource() isLargeCustomDictionaryConfig_Source { + if m != nil { + return m.Source + } + return nil +} + +func (m *LargeCustomDictionaryConfig) GetCloudStorageFileSet() *CloudStorageFileSet { + if x, ok := m.GetSource().(*LargeCustomDictionaryConfig_CloudStorageFileSet); ok { + return x.CloudStorageFileSet + } + return nil +} + +func (m *LargeCustomDictionaryConfig) GetBigQueryField() *BigQueryField { + if x, ok := m.GetSource().(*LargeCustomDictionaryConfig_BigQueryField); ok { + return x.BigQueryField + } + return nil +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*LargeCustomDictionaryConfig) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _LargeCustomDictionaryConfig_OneofMarshaler, _LargeCustomDictionaryConfig_OneofUnmarshaler, _LargeCustomDictionaryConfig_OneofSizer, []interface{}{ + (*LargeCustomDictionaryConfig_CloudStorageFileSet)(nil), + (*LargeCustomDictionaryConfig_BigQueryField)(nil), + } +} + +func _LargeCustomDictionaryConfig_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*LargeCustomDictionaryConfig) + // source + switch x := m.Source.(type) { + case *LargeCustomDictionaryConfig_CloudStorageFileSet: + b.EncodeVarint(2<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.CloudStorageFileSet); err != nil { + return err + } + case *LargeCustomDictionaryConfig_BigQueryField: + b.EncodeVarint(3<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.BigQueryField); err != nil { + return err + } + case nil: + default: + return fmt.Errorf("LargeCustomDictionaryConfig.Source has unexpected type %T", x) + } + return nil +} + +func _LargeCustomDictionaryConfig_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*LargeCustomDictionaryConfig) + switch tag { + case 2: // source.cloud_storage_file_set + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(CloudStorageFileSet) + err := b.DecodeMessage(msg) + m.Source = &LargeCustomDictionaryConfig_CloudStorageFileSet{msg} + return true, err + case 3: // source.big_query_field + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(BigQueryField) + err := b.DecodeMessage(msg) + m.Source = &LargeCustomDictionaryConfig_BigQueryField{msg} + return true, err + default: + return false, nil + } +} + +func _LargeCustomDictionaryConfig_OneofSizer(msg proto.Message) (n int) { + m := msg.(*LargeCustomDictionaryConfig) + // source + switch x := m.Source.(type) { + case *LargeCustomDictionaryConfig_CloudStorageFileSet: + s := proto.Size(x.CloudStorageFileSet) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *LargeCustomDictionaryConfig_BigQueryField: + s := proto.Size(x.BigQueryField) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +// Configuration for a StoredInfoType. +type StoredInfoTypeConfig struct { + // Display name of the StoredInfoType (max 256 characters). + DisplayName string `protobuf:"bytes,1,opt,name=display_name,json=displayName,proto3" json:"display_name,omitempty"` + // Description of the StoredInfoType (max 256 characters). + Description string `protobuf:"bytes,2,opt,name=description,proto3" json:"description,omitempty"` + // Types that are valid to be assigned to Type: + // *StoredInfoTypeConfig_LargeCustomDictionary + Type isStoredInfoTypeConfig_Type `protobuf_oneof:"type"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *StoredInfoTypeConfig) Reset() { *m = StoredInfoTypeConfig{} } +func (m *StoredInfoTypeConfig) String() string { return proto.CompactTextString(m) } +func (*StoredInfoTypeConfig) ProtoMessage() {} +func (*StoredInfoTypeConfig) Descriptor() ([]byte, []int) { + return fileDescriptor_dlp_1aebf9c18c267d70, []int{101} +} +func (m *StoredInfoTypeConfig) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_StoredInfoTypeConfig.Unmarshal(m, b) +} +func (m *StoredInfoTypeConfig) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_StoredInfoTypeConfig.Marshal(b, m, deterministic) +} +func (dst *StoredInfoTypeConfig) XXX_Merge(src proto.Message) { + xxx_messageInfo_StoredInfoTypeConfig.Merge(dst, src) +} +func (m *StoredInfoTypeConfig) XXX_Size() int { + return xxx_messageInfo_StoredInfoTypeConfig.Size(m) +} +func (m *StoredInfoTypeConfig) XXX_DiscardUnknown() { + xxx_messageInfo_StoredInfoTypeConfig.DiscardUnknown(m) +} + +var xxx_messageInfo_StoredInfoTypeConfig proto.InternalMessageInfo + +func (m *StoredInfoTypeConfig) GetDisplayName() string { + if m != nil { + return m.DisplayName + } + return "" +} + +func (m *StoredInfoTypeConfig) GetDescription() string { + if m != nil { + return m.Description + } + return "" +} + +type isStoredInfoTypeConfig_Type interface { + isStoredInfoTypeConfig_Type() +} + +type StoredInfoTypeConfig_LargeCustomDictionary struct { + LargeCustomDictionary *LargeCustomDictionaryConfig `protobuf:"bytes,3,opt,name=large_custom_dictionary,json=largeCustomDictionary,proto3,oneof"` +} + +func (*StoredInfoTypeConfig_LargeCustomDictionary) isStoredInfoTypeConfig_Type() {} + +func (m *StoredInfoTypeConfig) GetType() isStoredInfoTypeConfig_Type { + if m != nil { + return m.Type + } + return nil +} + +func (m *StoredInfoTypeConfig) GetLargeCustomDictionary() *LargeCustomDictionaryConfig { + if x, ok := m.GetType().(*StoredInfoTypeConfig_LargeCustomDictionary); ok { + return x.LargeCustomDictionary + } + return nil +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*StoredInfoTypeConfig) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _StoredInfoTypeConfig_OneofMarshaler, _StoredInfoTypeConfig_OneofUnmarshaler, _StoredInfoTypeConfig_OneofSizer, []interface{}{ + (*StoredInfoTypeConfig_LargeCustomDictionary)(nil), + } +} + +func _StoredInfoTypeConfig_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*StoredInfoTypeConfig) + // type + switch x := m.Type.(type) { + case *StoredInfoTypeConfig_LargeCustomDictionary: + b.EncodeVarint(3<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.LargeCustomDictionary); err != nil { + return err + } + case nil: + default: + return fmt.Errorf("StoredInfoTypeConfig.Type has unexpected type %T", x) + } + return nil +} + +func _StoredInfoTypeConfig_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*StoredInfoTypeConfig) + switch tag { + case 3: // type.large_custom_dictionary + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(LargeCustomDictionaryConfig) + err := b.DecodeMessage(msg) + m.Type = &StoredInfoTypeConfig_LargeCustomDictionary{msg} + return true, err + default: + return false, nil + } +} + +func _StoredInfoTypeConfig_OneofSizer(msg proto.Message) (n int) { + m := msg.(*StoredInfoTypeConfig) + // type + switch x := m.Type.(type) { + case *StoredInfoTypeConfig_LargeCustomDictionary: + s := proto.Size(x.LargeCustomDictionary) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +// Version of a StoredInfoType, including the configuration used to build it, +// create timestamp, and current state. +type StoredInfoTypeVersion struct { + // StoredInfoType configuration. + Config *StoredInfoTypeConfig `protobuf:"bytes,1,opt,name=config,proto3" json:"config,omitempty"` + // Create timestamp of the version. Read-only, determined by the system + // when the version is created. + CreateTime *timestamp.Timestamp `protobuf:"bytes,2,opt,name=create_time,json=createTime,proto3" json:"create_time,omitempty"` + // Stored info type version state. Read-only, updated by the system + // during dictionary creation. + State StoredInfoTypeState `protobuf:"varint,3,opt,name=state,proto3,enum=google.privacy.dlp.v2.StoredInfoTypeState" json:"state,omitempty"` + // Errors that occurred when creating this storedInfoType version, or + // anomalies detected in the storedInfoType data that render it unusable. Only + // the five most recent errors will be displayed, with the most recent error + // appearing first. + //

For example, some of the data for stored custom dictionaries is put in + // the user's Google Cloud Storage bucket, and if this data is modified or + // deleted by the user or another system, the dictionary becomes invalid. + //

If any errors occur, fix the problem indicated by the error message and + // use the UpdateStoredInfoType API method to create another version of the + // storedInfoType to continue using it, reusing the same `config` if it was + // not the source of the error. + Errors []*Error `protobuf:"bytes,4,rep,name=errors,proto3" json:"errors,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *StoredInfoTypeVersion) Reset() { *m = StoredInfoTypeVersion{} } +func (m *StoredInfoTypeVersion) String() string { return proto.CompactTextString(m) } +func (*StoredInfoTypeVersion) ProtoMessage() {} +func (*StoredInfoTypeVersion) Descriptor() ([]byte, []int) { + return fileDescriptor_dlp_1aebf9c18c267d70, []int{102} +} +func (m *StoredInfoTypeVersion) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_StoredInfoTypeVersion.Unmarshal(m, b) +} +func (m *StoredInfoTypeVersion) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_StoredInfoTypeVersion.Marshal(b, m, deterministic) +} +func (dst *StoredInfoTypeVersion) XXX_Merge(src proto.Message) { + xxx_messageInfo_StoredInfoTypeVersion.Merge(dst, src) +} +func (m *StoredInfoTypeVersion) XXX_Size() int { + return xxx_messageInfo_StoredInfoTypeVersion.Size(m) +} +func (m *StoredInfoTypeVersion) XXX_DiscardUnknown() { + xxx_messageInfo_StoredInfoTypeVersion.DiscardUnknown(m) +} + +var xxx_messageInfo_StoredInfoTypeVersion proto.InternalMessageInfo + +func (m *StoredInfoTypeVersion) GetConfig() *StoredInfoTypeConfig { + if m != nil { + return m.Config + } + return nil +} + +func (m *StoredInfoTypeVersion) GetCreateTime() *timestamp.Timestamp { + if m != nil { + return m.CreateTime + } + return nil +} + +func (m *StoredInfoTypeVersion) GetState() StoredInfoTypeState { + if m != nil { + return m.State + } + return StoredInfoTypeState_STORED_INFO_TYPE_STATE_UNSPECIFIED +} + +func (m *StoredInfoTypeVersion) GetErrors() []*Error { + if m != nil { + return m.Errors + } + return nil +} + +// StoredInfoType resource message that contains information about the current +// version and any pending updates. +type StoredInfoType struct { + // Resource name. + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // Current version of the stored info type. + CurrentVersion *StoredInfoTypeVersion `protobuf:"bytes,2,opt,name=current_version,json=currentVersion,proto3" json:"current_version,omitempty"` + // Pending versions of the stored info type. Empty if no versions are + // pending. + PendingVersions []*StoredInfoTypeVersion `protobuf:"bytes,3,rep,name=pending_versions,json=pendingVersions,proto3" json:"pending_versions,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *StoredInfoType) Reset() { *m = StoredInfoType{} } +func (m *StoredInfoType) String() string { return proto.CompactTextString(m) } +func (*StoredInfoType) ProtoMessage() {} +func (*StoredInfoType) Descriptor() ([]byte, []int) { + return fileDescriptor_dlp_1aebf9c18c267d70, []int{103} +} +func (m *StoredInfoType) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_StoredInfoType.Unmarshal(m, b) +} +func (m *StoredInfoType) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_StoredInfoType.Marshal(b, m, deterministic) +} +func (dst *StoredInfoType) XXX_Merge(src proto.Message) { + xxx_messageInfo_StoredInfoType.Merge(dst, src) +} +func (m *StoredInfoType) XXX_Size() int { + return xxx_messageInfo_StoredInfoType.Size(m) +} +func (m *StoredInfoType) XXX_DiscardUnknown() { + xxx_messageInfo_StoredInfoType.DiscardUnknown(m) +} + +var xxx_messageInfo_StoredInfoType proto.InternalMessageInfo + +func (m *StoredInfoType) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *StoredInfoType) GetCurrentVersion() *StoredInfoTypeVersion { + if m != nil { + return m.CurrentVersion + } + return nil +} + +func (m *StoredInfoType) GetPendingVersions() []*StoredInfoTypeVersion { + if m != nil { + return m.PendingVersions + } + return nil +} + +// Request message for CreateStoredInfoType. +type CreateStoredInfoTypeRequest struct { + // The parent resource name, for example projects/my-project-id or + // organizations/my-org-id. + Parent string `protobuf:"bytes,1,opt,name=parent,proto3" json:"parent,omitempty"` + // Configuration of the storedInfoType to create. + Config *StoredInfoTypeConfig `protobuf:"bytes,2,opt,name=config,proto3" json:"config,omitempty"` + // The storedInfoType ID can contain uppercase and lowercase letters, + // numbers, and hyphens; that is, it must match the regular + // expression: `[a-zA-Z\\d-_]+`. The maximum length is 100 + // characters. Can be empty to allow the system to generate one. + StoredInfoTypeId string `protobuf:"bytes,3,opt,name=stored_info_type_id,json=storedInfoTypeId,proto3" json:"stored_info_type_id,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *CreateStoredInfoTypeRequest) Reset() { *m = CreateStoredInfoTypeRequest{} } +func (m *CreateStoredInfoTypeRequest) String() string { return proto.CompactTextString(m) } +func (*CreateStoredInfoTypeRequest) ProtoMessage() {} +func (*CreateStoredInfoTypeRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_dlp_1aebf9c18c267d70, []int{104} +} +func (m *CreateStoredInfoTypeRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_CreateStoredInfoTypeRequest.Unmarshal(m, b) +} +func (m *CreateStoredInfoTypeRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_CreateStoredInfoTypeRequest.Marshal(b, m, deterministic) +} +func (dst *CreateStoredInfoTypeRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_CreateStoredInfoTypeRequest.Merge(dst, src) +} +func (m *CreateStoredInfoTypeRequest) XXX_Size() int { + return xxx_messageInfo_CreateStoredInfoTypeRequest.Size(m) +} +func (m *CreateStoredInfoTypeRequest) XXX_DiscardUnknown() { + xxx_messageInfo_CreateStoredInfoTypeRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_CreateStoredInfoTypeRequest proto.InternalMessageInfo + +func (m *CreateStoredInfoTypeRequest) GetParent() string { + if m != nil { + return m.Parent + } + return "" +} + +func (m *CreateStoredInfoTypeRequest) GetConfig() *StoredInfoTypeConfig { + if m != nil { + return m.Config + } + return nil +} + +func (m *CreateStoredInfoTypeRequest) GetStoredInfoTypeId() string { + if m != nil { + return m.StoredInfoTypeId + } + return "" +} + +// Request message for UpdateStoredInfoType. +type UpdateStoredInfoTypeRequest struct { + // Resource name of organization and storedInfoType to be updated, for + // example `organizations/433245324/storedInfoTypes/432452342` or + // projects/project-id/storedInfoTypes/432452342. + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // Updated configuration for the storedInfoType. If not provided, a new + // version of the storedInfoType will be created with the existing + // configuration. + Config *StoredInfoTypeConfig `protobuf:"bytes,2,opt,name=config,proto3" json:"config,omitempty"` + // Mask to control which fields get updated. + UpdateMask *field_mask.FieldMask `protobuf:"bytes,3,opt,name=update_mask,json=updateMask,proto3" json:"update_mask,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *UpdateStoredInfoTypeRequest) Reset() { *m = UpdateStoredInfoTypeRequest{} } +func (m *UpdateStoredInfoTypeRequest) String() string { return proto.CompactTextString(m) } +func (*UpdateStoredInfoTypeRequest) ProtoMessage() {} +func (*UpdateStoredInfoTypeRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_dlp_1aebf9c18c267d70, []int{105} +} +func (m *UpdateStoredInfoTypeRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_UpdateStoredInfoTypeRequest.Unmarshal(m, b) +} +func (m *UpdateStoredInfoTypeRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_UpdateStoredInfoTypeRequest.Marshal(b, m, deterministic) +} +func (dst *UpdateStoredInfoTypeRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_UpdateStoredInfoTypeRequest.Merge(dst, src) +} +func (m *UpdateStoredInfoTypeRequest) XXX_Size() int { + return xxx_messageInfo_UpdateStoredInfoTypeRequest.Size(m) +} +func (m *UpdateStoredInfoTypeRequest) XXX_DiscardUnknown() { + xxx_messageInfo_UpdateStoredInfoTypeRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_UpdateStoredInfoTypeRequest proto.InternalMessageInfo + +func (m *UpdateStoredInfoTypeRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *UpdateStoredInfoTypeRequest) GetConfig() *StoredInfoTypeConfig { + if m != nil { + return m.Config + } + return nil +} + +func (m *UpdateStoredInfoTypeRequest) GetUpdateMask() *field_mask.FieldMask { + if m != nil { + return m.UpdateMask + } + return nil +} + +// Request message for GetStoredInfoType. +type GetStoredInfoTypeRequest struct { + // Resource name of the organization and storedInfoType to be read, for + // example `organizations/433245324/storedInfoTypes/432452342` or + // projects/project-id/storedInfoTypes/432452342. + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GetStoredInfoTypeRequest) Reset() { *m = GetStoredInfoTypeRequest{} } +func (m *GetStoredInfoTypeRequest) String() string { return proto.CompactTextString(m) } +func (*GetStoredInfoTypeRequest) ProtoMessage() {} +func (*GetStoredInfoTypeRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_dlp_1aebf9c18c267d70, []int{106} +} +func (m *GetStoredInfoTypeRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GetStoredInfoTypeRequest.Unmarshal(m, b) +} +func (m *GetStoredInfoTypeRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GetStoredInfoTypeRequest.Marshal(b, m, deterministic) +} +func (dst *GetStoredInfoTypeRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_GetStoredInfoTypeRequest.Merge(dst, src) +} +func (m *GetStoredInfoTypeRequest) XXX_Size() int { + return xxx_messageInfo_GetStoredInfoTypeRequest.Size(m) +} +func (m *GetStoredInfoTypeRequest) XXX_DiscardUnknown() { + xxx_messageInfo_GetStoredInfoTypeRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_GetStoredInfoTypeRequest proto.InternalMessageInfo + +func (m *GetStoredInfoTypeRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +// Request message for ListStoredInfoTypes. +type ListStoredInfoTypesRequest struct { + // The parent resource name, for example projects/my-project-id or + // organizations/my-org-id. + Parent string `protobuf:"bytes,1,opt,name=parent,proto3" json:"parent,omitempty"` + // Optional page token to continue retrieval. Comes from previous call + // to `ListStoredInfoTypes`. + PageToken string `protobuf:"bytes,2,opt,name=page_token,json=pageToken,proto3" json:"page_token,omitempty"` + // Optional size of the page, can be limited by server. If zero server returns + // a page of max size 100. + PageSize int32 `protobuf:"varint,3,opt,name=page_size,json=pageSize,proto3" json:"page_size,omitempty"` + // Optional comma separated list of fields to order by, + // followed by `asc` or `desc` postfix. This list is case-insensitive, + // default sorting order is ascending, redundant space characters are + // insignificant. + // + // Example: `name asc, display_name, create_time desc` + // + // Supported fields are: + // + // - `create_time`: corresponds to time the most recent version of the + // resource was created. + // - `state`: corresponds to the state of the resource. + // - `name`: corresponds to resource name. + // - `display_name`: corresponds to info type's display name. + OrderBy string `protobuf:"bytes,4,opt,name=order_by,json=orderBy,proto3" json:"order_by,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ListStoredInfoTypesRequest) Reset() { *m = ListStoredInfoTypesRequest{} } +func (m *ListStoredInfoTypesRequest) String() string { return proto.CompactTextString(m) } +func (*ListStoredInfoTypesRequest) ProtoMessage() {} +func (*ListStoredInfoTypesRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_dlp_1aebf9c18c267d70, []int{107} +} +func (m *ListStoredInfoTypesRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ListStoredInfoTypesRequest.Unmarshal(m, b) +} +func (m *ListStoredInfoTypesRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ListStoredInfoTypesRequest.Marshal(b, m, deterministic) +} +func (dst *ListStoredInfoTypesRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_ListStoredInfoTypesRequest.Merge(dst, src) +} +func (m *ListStoredInfoTypesRequest) XXX_Size() int { + return xxx_messageInfo_ListStoredInfoTypesRequest.Size(m) +} +func (m *ListStoredInfoTypesRequest) XXX_DiscardUnknown() { + xxx_messageInfo_ListStoredInfoTypesRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_ListStoredInfoTypesRequest proto.InternalMessageInfo + +func (m *ListStoredInfoTypesRequest) GetParent() string { + if m != nil { + return m.Parent + } + return "" +} + +func (m *ListStoredInfoTypesRequest) GetPageToken() string { + if m != nil { + return m.PageToken + } + return "" +} + +func (m *ListStoredInfoTypesRequest) GetPageSize() int32 { + if m != nil { + return m.PageSize + } + return 0 +} + +func (m *ListStoredInfoTypesRequest) GetOrderBy() string { + if m != nil { + return m.OrderBy + } + return "" +} + +// Response message for ListStoredInfoTypes. +type ListStoredInfoTypesResponse struct { + // List of storedInfoTypes, up to page_size in ListStoredInfoTypesRequest. + StoredInfoTypes []*StoredInfoType `protobuf:"bytes,1,rep,name=stored_info_types,json=storedInfoTypes,proto3" json:"stored_info_types,omitempty"` + // If the next page is available then the next page token to be used + // in following ListStoredInfoTypes request. + NextPageToken string `protobuf:"bytes,2,opt,name=next_page_token,json=nextPageToken,proto3" json:"next_page_token,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ListStoredInfoTypesResponse) Reset() { *m = ListStoredInfoTypesResponse{} } +func (m *ListStoredInfoTypesResponse) String() string { return proto.CompactTextString(m) } +func (*ListStoredInfoTypesResponse) ProtoMessage() {} +func (*ListStoredInfoTypesResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_dlp_1aebf9c18c267d70, []int{108} +} +func (m *ListStoredInfoTypesResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ListStoredInfoTypesResponse.Unmarshal(m, b) +} +func (m *ListStoredInfoTypesResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ListStoredInfoTypesResponse.Marshal(b, m, deterministic) +} +func (dst *ListStoredInfoTypesResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_ListStoredInfoTypesResponse.Merge(dst, src) +} +func (m *ListStoredInfoTypesResponse) XXX_Size() int { + return xxx_messageInfo_ListStoredInfoTypesResponse.Size(m) +} +func (m *ListStoredInfoTypesResponse) XXX_DiscardUnknown() { + xxx_messageInfo_ListStoredInfoTypesResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_ListStoredInfoTypesResponse proto.InternalMessageInfo + +func (m *ListStoredInfoTypesResponse) GetStoredInfoTypes() []*StoredInfoType { + if m != nil { + return m.StoredInfoTypes + } + return nil +} + +func (m *ListStoredInfoTypesResponse) GetNextPageToken() string { + if m != nil { + return m.NextPageToken + } + return "" +} + +// Request message for DeleteStoredInfoType. +type DeleteStoredInfoTypeRequest struct { + // Resource name of the organization and storedInfoType to be deleted, for + // example `organizations/433245324/storedInfoTypes/432452342` or + // projects/project-id/storedInfoTypes/432452342. + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *DeleteStoredInfoTypeRequest) Reset() { *m = DeleteStoredInfoTypeRequest{} } +func (m *DeleteStoredInfoTypeRequest) String() string { return proto.CompactTextString(m) } +func (*DeleteStoredInfoTypeRequest) ProtoMessage() {} +func (*DeleteStoredInfoTypeRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_dlp_1aebf9c18c267d70, []int{109} +} +func (m *DeleteStoredInfoTypeRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_DeleteStoredInfoTypeRequest.Unmarshal(m, b) +} +func (m *DeleteStoredInfoTypeRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_DeleteStoredInfoTypeRequest.Marshal(b, m, deterministic) +} +func (dst *DeleteStoredInfoTypeRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_DeleteStoredInfoTypeRequest.Merge(dst, src) +} +func (m *DeleteStoredInfoTypeRequest) XXX_Size() int { + return xxx_messageInfo_DeleteStoredInfoTypeRequest.Size(m) +} +func (m *DeleteStoredInfoTypeRequest) XXX_DiscardUnknown() { + xxx_messageInfo_DeleteStoredInfoTypeRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_DeleteStoredInfoTypeRequest proto.InternalMessageInfo + +func (m *DeleteStoredInfoTypeRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func init() { + proto.RegisterType((*ExcludeInfoTypes)(nil), "google.privacy.dlp.v2.ExcludeInfoTypes") + proto.RegisterType((*ExclusionRule)(nil), "google.privacy.dlp.v2.ExclusionRule") + proto.RegisterType((*InspectionRule)(nil), "google.privacy.dlp.v2.InspectionRule") + proto.RegisterType((*InspectionRuleSet)(nil), "google.privacy.dlp.v2.InspectionRuleSet") + proto.RegisterType((*InspectConfig)(nil), "google.privacy.dlp.v2.InspectConfig") + proto.RegisterType((*InspectConfig_FindingLimits)(nil), "google.privacy.dlp.v2.InspectConfig.FindingLimits") + proto.RegisterType((*InspectConfig_FindingLimits_InfoTypeLimit)(nil), "google.privacy.dlp.v2.InspectConfig.FindingLimits.InfoTypeLimit") + proto.RegisterType((*ByteContentItem)(nil), "google.privacy.dlp.v2.ByteContentItem") + proto.RegisterType((*ContentItem)(nil), "google.privacy.dlp.v2.ContentItem") + proto.RegisterType((*Table)(nil), "google.privacy.dlp.v2.Table") + proto.RegisterType((*Table_Row)(nil), "google.privacy.dlp.v2.Table.Row") + proto.RegisterType((*InspectResult)(nil), "google.privacy.dlp.v2.InspectResult") + proto.RegisterType((*Finding)(nil), "google.privacy.dlp.v2.Finding") + proto.RegisterType((*Location)(nil), "google.privacy.dlp.v2.Location") + proto.RegisterType((*ContentLocation)(nil), "google.privacy.dlp.v2.ContentLocation") + proto.RegisterType((*DocumentLocation)(nil), "google.privacy.dlp.v2.DocumentLocation") + proto.RegisterType((*RecordLocation)(nil), "google.privacy.dlp.v2.RecordLocation") + proto.RegisterType((*TableLocation)(nil), "google.privacy.dlp.v2.TableLocation") + proto.RegisterType((*Range)(nil), "google.privacy.dlp.v2.Range") + proto.RegisterType((*ImageLocation)(nil), "google.privacy.dlp.v2.ImageLocation") + proto.RegisterType((*BoundingBox)(nil), "google.privacy.dlp.v2.BoundingBox") + proto.RegisterType((*RedactImageRequest)(nil), "google.privacy.dlp.v2.RedactImageRequest") + proto.RegisterType((*RedactImageRequest_ImageRedactionConfig)(nil), "google.privacy.dlp.v2.RedactImageRequest.ImageRedactionConfig") + proto.RegisterType((*Color)(nil), "google.privacy.dlp.v2.Color") + proto.RegisterType((*RedactImageResponse)(nil), "google.privacy.dlp.v2.RedactImageResponse") + proto.RegisterType((*DeidentifyContentRequest)(nil), "google.privacy.dlp.v2.DeidentifyContentRequest") + proto.RegisterType((*DeidentifyContentResponse)(nil), "google.privacy.dlp.v2.DeidentifyContentResponse") + proto.RegisterType((*ReidentifyContentRequest)(nil), "google.privacy.dlp.v2.ReidentifyContentRequest") + proto.RegisterType((*ReidentifyContentResponse)(nil), "google.privacy.dlp.v2.ReidentifyContentResponse") + proto.RegisterType((*InspectContentRequest)(nil), "google.privacy.dlp.v2.InspectContentRequest") + proto.RegisterType((*InspectContentResponse)(nil), "google.privacy.dlp.v2.InspectContentResponse") + proto.RegisterType((*OutputStorageConfig)(nil), "google.privacy.dlp.v2.OutputStorageConfig") + proto.RegisterType((*InfoTypeStats)(nil), "google.privacy.dlp.v2.InfoTypeStats") + proto.RegisterType((*InspectDataSourceDetails)(nil), "google.privacy.dlp.v2.InspectDataSourceDetails") + proto.RegisterType((*InspectDataSourceDetails_RequestedOptions)(nil), "google.privacy.dlp.v2.InspectDataSourceDetails.RequestedOptions") + proto.RegisterType((*InspectDataSourceDetails_Result)(nil), "google.privacy.dlp.v2.InspectDataSourceDetails.Result") + proto.RegisterType((*InfoTypeDescription)(nil), "google.privacy.dlp.v2.InfoTypeDescription") + proto.RegisterType((*ListInfoTypesRequest)(nil), "google.privacy.dlp.v2.ListInfoTypesRequest") + proto.RegisterType((*ListInfoTypesResponse)(nil), "google.privacy.dlp.v2.ListInfoTypesResponse") + proto.RegisterType((*RiskAnalysisJobConfig)(nil), "google.privacy.dlp.v2.RiskAnalysisJobConfig") + proto.RegisterType((*QuasiId)(nil), "google.privacy.dlp.v2.QuasiId") + proto.RegisterType((*StatisticalTable)(nil), "google.privacy.dlp.v2.StatisticalTable") + proto.RegisterType((*StatisticalTable_QuasiIdentifierField)(nil), "google.privacy.dlp.v2.StatisticalTable.QuasiIdentifierField") + proto.RegisterType((*PrivacyMetric)(nil), "google.privacy.dlp.v2.PrivacyMetric") + proto.RegisterType((*PrivacyMetric_NumericalStatsConfig)(nil), "google.privacy.dlp.v2.PrivacyMetric.NumericalStatsConfig") + proto.RegisterType((*PrivacyMetric_CategoricalStatsConfig)(nil), "google.privacy.dlp.v2.PrivacyMetric.CategoricalStatsConfig") + proto.RegisterType((*PrivacyMetric_KAnonymityConfig)(nil), "google.privacy.dlp.v2.PrivacyMetric.KAnonymityConfig") + proto.RegisterType((*PrivacyMetric_LDiversityConfig)(nil), "google.privacy.dlp.v2.PrivacyMetric.LDiversityConfig") + proto.RegisterType((*PrivacyMetric_KMapEstimationConfig)(nil), "google.privacy.dlp.v2.PrivacyMetric.KMapEstimationConfig") + proto.RegisterType((*PrivacyMetric_KMapEstimationConfig_TaggedField)(nil), "google.privacy.dlp.v2.PrivacyMetric.KMapEstimationConfig.TaggedField") + proto.RegisterType((*PrivacyMetric_KMapEstimationConfig_AuxiliaryTable)(nil), "google.privacy.dlp.v2.PrivacyMetric.KMapEstimationConfig.AuxiliaryTable") + proto.RegisterType((*PrivacyMetric_KMapEstimationConfig_AuxiliaryTable_QuasiIdField)(nil), "google.privacy.dlp.v2.PrivacyMetric.KMapEstimationConfig.AuxiliaryTable.QuasiIdField") + proto.RegisterType((*PrivacyMetric_DeltaPresenceEstimationConfig)(nil), "google.privacy.dlp.v2.PrivacyMetric.DeltaPresenceEstimationConfig") + proto.RegisterType((*AnalyzeDataSourceRiskDetails)(nil), "google.privacy.dlp.v2.AnalyzeDataSourceRiskDetails") + proto.RegisterType((*AnalyzeDataSourceRiskDetails_NumericalStatsResult)(nil), "google.privacy.dlp.v2.AnalyzeDataSourceRiskDetails.NumericalStatsResult") + proto.RegisterType((*AnalyzeDataSourceRiskDetails_CategoricalStatsResult)(nil), "google.privacy.dlp.v2.AnalyzeDataSourceRiskDetails.CategoricalStatsResult") + proto.RegisterType((*AnalyzeDataSourceRiskDetails_CategoricalStatsResult_CategoricalStatsHistogramBucket)(nil), "google.privacy.dlp.v2.AnalyzeDataSourceRiskDetails.CategoricalStatsResult.CategoricalStatsHistogramBucket") + proto.RegisterType((*AnalyzeDataSourceRiskDetails_KAnonymityResult)(nil), "google.privacy.dlp.v2.AnalyzeDataSourceRiskDetails.KAnonymityResult") + proto.RegisterType((*AnalyzeDataSourceRiskDetails_KAnonymityResult_KAnonymityEquivalenceClass)(nil), "google.privacy.dlp.v2.AnalyzeDataSourceRiskDetails.KAnonymityResult.KAnonymityEquivalenceClass") + proto.RegisterType((*AnalyzeDataSourceRiskDetails_KAnonymityResult_KAnonymityHistogramBucket)(nil), "google.privacy.dlp.v2.AnalyzeDataSourceRiskDetails.KAnonymityResult.KAnonymityHistogramBucket") + proto.RegisterType((*AnalyzeDataSourceRiskDetails_LDiversityResult)(nil), "google.privacy.dlp.v2.AnalyzeDataSourceRiskDetails.LDiversityResult") + proto.RegisterType((*AnalyzeDataSourceRiskDetails_LDiversityResult_LDiversityEquivalenceClass)(nil), "google.privacy.dlp.v2.AnalyzeDataSourceRiskDetails.LDiversityResult.LDiversityEquivalenceClass") + proto.RegisterType((*AnalyzeDataSourceRiskDetails_LDiversityResult_LDiversityHistogramBucket)(nil), "google.privacy.dlp.v2.AnalyzeDataSourceRiskDetails.LDiversityResult.LDiversityHistogramBucket") + proto.RegisterType((*AnalyzeDataSourceRiskDetails_KMapEstimationResult)(nil), "google.privacy.dlp.v2.AnalyzeDataSourceRiskDetails.KMapEstimationResult") + proto.RegisterType((*AnalyzeDataSourceRiskDetails_KMapEstimationResult_KMapEstimationQuasiIdValues)(nil), "google.privacy.dlp.v2.AnalyzeDataSourceRiskDetails.KMapEstimationResult.KMapEstimationQuasiIdValues") + proto.RegisterType((*AnalyzeDataSourceRiskDetails_KMapEstimationResult_KMapEstimationHistogramBucket)(nil), "google.privacy.dlp.v2.AnalyzeDataSourceRiskDetails.KMapEstimationResult.KMapEstimationHistogramBucket") + proto.RegisterType((*AnalyzeDataSourceRiskDetails_DeltaPresenceEstimationResult)(nil), "google.privacy.dlp.v2.AnalyzeDataSourceRiskDetails.DeltaPresenceEstimationResult") + proto.RegisterType((*AnalyzeDataSourceRiskDetails_DeltaPresenceEstimationResult_DeltaPresenceEstimationQuasiIdValues)(nil), "google.privacy.dlp.v2.AnalyzeDataSourceRiskDetails.DeltaPresenceEstimationResult.DeltaPresenceEstimationQuasiIdValues") + proto.RegisterType((*AnalyzeDataSourceRiskDetails_DeltaPresenceEstimationResult_DeltaPresenceEstimationHistogramBucket)(nil), "google.privacy.dlp.v2.AnalyzeDataSourceRiskDetails.DeltaPresenceEstimationResult.DeltaPresenceEstimationHistogramBucket") + proto.RegisterType((*ValueFrequency)(nil), "google.privacy.dlp.v2.ValueFrequency") + proto.RegisterType((*Value)(nil), "google.privacy.dlp.v2.Value") + proto.RegisterType((*QuoteInfo)(nil), "google.privacy.dlp.v2.QuoteInfo") + proto.RegisterType((*DateTime)(nil), "google.privacy.dlp.v2.DateTime") + proto.RegisterType((*DateTime_TimeZone)(nil), "google.privacy.dlp.v2.DateTime.TimeZone") + proto.RegisterType((*DeidentifyConfig)(nil), "google.privacy.dlp.v2.DeidentifyConfig") + proto.RegisterType((*PrimitiveTransformation)(nil), "google.privacy.dlp.v2.PrimitiveTransformation") + proto.RegisterType((*TimePartConfig)(nil), "google.privacy.dlp.v2.TimePartConfig") + proto.RegisterType((*CryptoHashConfig)(nil), "google.privacy.dlp.v2.CryptoHashConfig") + proto.RegisterType((*CryptoDeterministicConfig)(nil), "google.privacy.dlp.v2.CryptoDeterministicConfig") + proto.RegisterType((*ReplaceValueConfig)(nil), "google.privacy.dlp.v2.ReplaceValueConfig") + proto.RegisterType((*ReplaceWithInfoTypeConfig)(nil), "google.privacy.dlp.v2.ReplaceWithInfoTypeConfig") + proto.RegisterType((*RedactConfig)(nil), "google.privacy.dlp.v2.RedactConfig") + proto.RegisterType((*CharsToIgnore)(nil), "google.privacy.dlp.v2.CharsToIgnore") + proto.RegisterType((*CharacterMaskConfig)(nil), "google.privacy.dlp.v2.CharacterMaskConfig") + proto.RegisterType((*FixedSizeBucketingConfig)(nil), "google.privacy.dlp.v2.FixedSizeBucketingConfig") + proto.RegisterType((*BucketingConfig)(nil), "google.privacy.dlp.v2.BucketingConfig") + proto.RegisterType((*BucketingConfig_Bucket)(nil), "google.privacy.dlp.v2.BucketingConfig.Bucket") + proto.RegisterType((*CryptoReplaceFfxFpeConfig)(nil), "google.privacy.dlp.v2.CryptoReplaceFfxFpeConfig") + proto.RegisterType((*CryptoKey)(nil), "google.privacy.dlp.v2.CryptoKey") + proto.RegisterType((*TransientCryptoKey)(nil), "google.privacy.dlp.v2.TransientCryptoKey") + proto.RegisterType((*UnwrappedCryptoKey)(nil), "google.privacy.dlp.v2.UnwrappedCryptoKey") + proto.RegisterType((*KmsWrappedCryptoKey)(nil), "google.privacy.dlp.v2.KmsWrappedCryptoKey") + proto.RegisterType((*DateShiftConfig)(nil), "google.privacy.dlp.v2.DateShiftConfig") + proto.RegisterType((*InfoTypeTransformations)(nil), "google.privacy.dlp.v2.InfoTypeTransformations") + proto.RegisterType((*InfoTypeTransformations_InfoTypeTransformation)(nil), "google.privacy.dlp.v2.InfoTypeTransformations.InfoTypeTransformation") + proto.RegisterType((*FieldTransformation)(nil), "google.privacy.dlp.v2.FieldTransformation") + proto.RegisterType((*RecordTransformations)(nil), "google.privacy.dlp.v2.RecordTransformations") + proto.RegisterType((*RecordSuppression)(nil), "google.privacy.dlp.v2.RecordSuppression") + proto.RegisterType((*RecordCondition)(nil), "google.privacy.dlp.v2.RecordCondition") + proto.RegisterType((*RecordCondition_Condition)(nil), "google.privacy.dlp.v2.RecordCondition.Condition") + proto.RegisterType((*RecordCondition_Conditions)(nil), "google.privacy.dlp.v2.RecordCondition.Conditions") + proto.RegisterType((*RecordCondition_Expressions)(nil), "google.privacy.dlp.v2.RecordCondition.Expressions") + proto.RegisterType((*TransformationOverview)(nil), "google.privacy.dlp.v2.TransformationOverview") + proto.RegisterType((*TransformationSummary)(nil), "google.privacy.dlp.v2.TransformationSummary") + proto.RegisterType((*TransformationSummary_SummaryResult)(nil), "google.privacy.dlp.v2.TransformationSummary.SummaryResult") + proto.RegisterType((*Schedule)(nil), "google.privacy.dlp.v2.Schedule") + proto.RegisterType((*InspectTemplate)(nil), "google.privacy.dlp.v2.InspectTemplate") + proto.RegisterType((*DeidentifyTemplate)(nil), "google.privacy.dlp.v2.DeidentifyTemplate") + proto.RegisterType((*Error)(nil), "google.privacy.dlp.v2.Error") + proto.RegisterType((*JobTrigger)(nil), "google.privacy.dlp.v2.JobTrigger") + proto.RegisterType((*JobTrigger_Trigger)(nil), "google.privacy.dlp.v2.JobTrigger.Trigger") + proto.RegisterType((*Action)(nil), "google.privacy.dlp.v2.Action") + proto.RegisterType((*Action_SaveFindings)(nil), "google.privacy.dlp.v2.Action.SaveFindings") + proto.RegisterType((*Action_PublishToPubSub)(nil), "google.privacy.dlp.v2.Action.PublishToPubSub") + proto.RegisterType((*Action_PublishSummaryToCscc)(nil), "google.privacy.dlp.v2.Action.PublishSummaryToCscc") + proto.RegisterType((*Action_JobNotificationEmails)(nil), "google.privacy.dlp.v2.Action.JobNotificationEmails") + proto.RegisterType((*CreateInspectTemplateRequest)(nil), "google.privacy.dlp.v2.CreateInspectTemplateRequest") + proto.RegisterType((*UpdateInspectTemplateRequest)(nil), "google.privacy.dlp.v2.UpdateInspectTemplateRequest") + proto.RegisterType((*GetInspectTemplateRequest)(nil), "google.privacy.dlp.v2.GetInspectTemplateRequest") + proto.RegisterType((*ListInspectTemplatesRequest)(nil), "google.privacy.dlp.v2.ListInspectTemplatesRequest") + proto.RegisterType((*ListInspectTemplatesResponse)(nil), "google.privacy.dlp.v2.ListInspectTemplatesResponse") + proto.RegisterType((*DeleteInspectTemplateRequest)(nil), "google.privacy.dlp.v2.DeleteInspectTemplateRequest") + proto.RegisterType((*CreateJobTriggerRequest)(nil), "google.privacy.dlp.v2.CreateJobTriggerRequest") + proto.RegisterType((*ActivateJobTriggerRequest)(nil), "google.privacy.dlp.v2.ActivateJobTriggerRequest") + proto.RegisterType((*UpdateJobTriggerRequest)(nil), "google.privacy.dlp.v2.UpdateJobTriggerRequest") + proto.RegisterType((*GetJobTriggerRequest)(nil), "google.privacy.dlp.v2.GetJobTriggerRequest") + proto.RegisterType((*CreateDlpJobRequest)(nil), "google.privacy.dlp.v2.CreateDlpJobRequest") + proto.RegisterType((*ListJobTriggersRequest)(nil), "google.privacy.dlp.v2.ListJobTriggersRequest") + proto.RegisterType((*ListJobTriggersResponse)(nil), "google.privacy.dlp.v2.ListJobTriggersResponse") + proto.RegisterType((*DeleteJobTriggerRequest)(nil), "google.privacy.dlp.v2.DeleteJobTriggerRequest") + proto.RegisterType((*InspectJobConfig)(nil), "google.privacy.dlp.v2.InspectJobConfig") + proto.RegisterType((*DlpJob)(nil), "google.privacy.dlp.v2.DlpJob") + proto.RegisterType((*GetDlpJobRequest)(nil), "google.privacy.dlp.v2.GetDlpJobRequest") + proto.RegisterType((*ListDlpJobsRequest)(nil), "google.privacy.dlp.v2.ListDlpJobsRequest") + proto.RegisterType((*ListDlpJobsResponse)(nil), "google.privacy.dlp.v2.ListDlpJobsResponse") + proto.RegisterType((*CancelDlpJobRequest)(nil), "google.privacy.dlp.v2.CancelDlpJobRequest") + proto.RegisterType((*DeleteDlpJobRequest)(nil), "google.privacy.dlp.v2.DeleteDlpJobRequest") + proto.RegisterType((*CreateDeidentifyTemplateRequest)(nil), "google.privacy.dlp.v2.CreateDeidentifyTemplateRequest") + proto.RegisterType((*UpdateDeidentifyTemplateRequest)(nil), "google.privacy.dlp.v2.UpdateDeidentifyTemplateRequest") + proto.RegisterType((*GetDeidentifyTemplateRequest)(nil), "google.privacy.dlp.v2.GetDeidentifyTemplateRequest") + proto.RegisterType((*ListDeidentifyTemplatesRequest)(nil), "google.privacy.dlp.v2.ListDeidentifyTemplatesRequest") + proto.RegisterType((*ListDeidentifyTemplatesResponse)(nil), "google.privacy.dlp.v2.ListDeidentifyTemplatesResponse") + proto.RegisterType((*DeleteDeidentifyTemplateRequest)(nil), "google.privacy.dlp.v2.DeleteDeidentifyTemplateRequest") + proto.RegisterType((*LargeCustomDictionaryConfig)(nil), "google.privacy.dlp.v2.LargeCustomDictionaryConfig") + proto.RegisterType((*StoredInfoTypeConfig)(nil), "google.privacy.dlp.v2.StoredInfoTypeConfig") + proto.RegisterType((*StoredInfoTypeVersion)(nil), "google.privacy.dlp.v2.StoredInfoTypeVersion") + proto.RegisterType((*StoredInfoType)(nil), "google.privacy.dlp.v2.StoredInfoType") + proto.RegisterType((*CreateStoredInfoTypeRequest)(nil), "google.privacy.dlp.v2.CreateStoredInfoTypeRequest") + proto.RegisterType((*UpdateStoredInfoTypeRequest)(nil), "google.privacy.dlp.v2.UpdateStoredInfoTypeRequest") + proto.RegisterType((*GetStoredInfoTypeRequest)(nil), "google.privacy.dlp.v2.GetStoredInfoTypeRequest") + proto.RegisterType((*ListStoredInfoTypesRequest)(nil), "google.privacy.dlp.v2.ListStoredInfoTypesRequest") + proto.RegisterType((*ListStoredInfoTypesResponse)(nil), "google.privacy.dlp.v2.ListStoredInfoTypesResponse") + proto.RegisterType((*DeleteStoredInfoTypeRequest)(nil), "google.privacy.dlp.v2.DeleteStoredInfoTypeRequest") + proto.RegisterEnum("google.privacy.dlp.v2.ContentOption", ContentOption_name, ContentOption_value) + proto.RegisterEnum("google.privacy.dlp.v2.MatchingType", MatchingType_name, MatchingType_value) + proto.RegisterEnum("google.privacy.dlp.v2.InfoTypeSupportedBy", InfoTypeSupportedBy_name, InfoTypeSupportedBy_value) + proto.RegisterEnum("google.privacy.dlp.v2.RelationalOperator", RelationalOperator_name, RelationalOperator_value) + proto.RegisterEnum("google.privacy.dlp.v2.DlpJobType", DlpJobType_name, DlpJobType_value) + proto.RegisterEnum("google.privacy.dlp.v2.StoredInfoTypeState", StoredInfoTypeState_name, StoredInfoTypeState_value) + proto.RegisterEnum("google.privacy.dlp.v2.ByteContentItem_BytesType", ByteContentItem_BytesType_name, ByteContentItem_BytesType_value) + proto.RegisterEnum("google.privacy.dlp.v2.OutputStorageConfig_OutputSchema", OutputStorageConfig_OutputSchema_name, OutputStorageConfig_OutputSchema_value) + proto.RegisterEnum("google.privacy.dlp.v2.TimePartConfig_TimePart", TimePartConfig_TimePart_name, TimePartConfig_TimePart_value) + proto.RegisterEnum("google.privacy.dlp.v2.CharsToIgnore_CommonCharsToIgnore", CharsToIgnore_CommonCharsToIgnore_name, CharsToIgnore_CommonCharsToIgnore_value) + proto.RegisterEnum("google.privacy.dlp.v2.CryptoReplaceFfxFpeConfig_FfxCommonNativeAlphabet", CryptoReplaceFfxFpeConfig_FfxCommonNativeAlphabet_name, CryptoReplaceFfxFpeConfig_FfxCommonNativeAlphabet_value) + proto.RegisterEnum("google.privacy.dlp.v2.RecordCondition_Expressions_LogicalOperator", RecordCondition_Expressions_LogicalOperator_name, RecordCondition_Expressions_LogicalOperator_value) + proto.RegisterEnum("google.privacy.dlp.v2.TransformationSummary_TransformationResultCode", TransformationSummary_TransformationResultCode_name, TransformationSummary_TransformationResultCode_value) + proto.RegisterEnum("google.privacy.dlp.v2.JobTrigger_Status", JobTrigger_Status_name, JobTrigger_Status_value) + proto.RegisterEnum("google.privacy.dlp.v2.DlpJob_JobState", DlpJob_JobState_name, DlpJob_JobState_value) +} + +// Reference imports to suppress errors if they are not otherwise used. +var _ context.Context +var _ grpc.ClientConn + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the grpc package it is being compiled against. +const _ = grpc.SupportPackageIsVersion4 + +// DlpServiceClient is the client API for DlpService service. +// +// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://godoc.org/google.golang.org/grpc#ClientConn.NewStream. +type DlpServiceClient interface { + // Finds potentially sensitive info in content. + // This method has limits on input size, processing time, and output size. + // + // When no InfoTypes or CustomInfoTypes are specified in this request, the + // system will automatically choose what detectors to run. By default this may + // be all types, but may change over time as detectors are updated. + // + // For how to guides, see https://cloud.google.com/dlp/docs/inspecting-images + // and https://cloud.google.com/dlp/docs/inspecting-text, + InspectContent(ctx context.Context, in *InspectContentRequest, opts ...grpc.CallOption) (*InspectContentResponse, error) + // Redacts potentially sensitive info from an image. + // This method has limits on input size, processing time, and output size. + // See https://cloud.google.com/dlp/docs/redacting-sensitive-data-images to + // learn more. + // + // When no InfoTypes or CustomInfoTypes are specified in this request, the + // system will automatically choose what detectors to run. By default this may + // be all types, but may change over time as detectors are updated. + RedactImage(ctx context.Context, in *RedactImageRequest, opts ...grpc.CallOption) (*RedactImageResponse, error) + // De-identifies potentially sensitive info from a ContentItem. + // This method has limits on input size and output size. + // See https://cloud.google.com/dlp/docs/deidentify-sensitive-data to + // learn more. + // + // When no InfoTypes or CustomInfoTypes are specified in this request, the + // system will automatically choose what detectors to run. By default this may + // be all types, but may change over time as detectors are updated. + DeidentifyContent(ctx context.Context, in *DeidentifyContentRequest, opts ...grpc.CallOption) (*DeidentifyContentResponse, error) + // Re-identifies content that has been de-identified. + // See + // https://cloud.google.com/dlp/docs/pseudonymization#re-identification_in_free_text_code_example + // to learn more. + ReidentifyContent(ctx context.Context, in *ReidentifyContentRequest, opts ...grpc.CallOption) (*ReidentifyContentResponse, error) + // Returns a list of the sensitive information types that the DLP API + // supports. See https://cloud.google.com/dlp/docs/infotypes-reference to + // learn more. + ListInfoTypes(ctx context.Context, in *ListInfoTypesRequest, opts ...grpc.CallOption) (*ListInfoTypesResponse, error) + // Creates an InspectTemplate for re-using frequently used configuration + // for inspecting content, images, and storage. + // See https://cloud.google.com/dlp/docs/creating-templates to learn more. + CreateInspectTemplate(ctx context.Context, in *CreateInspectTemplateRequest, opts ...grpc.CallOption) (*InspectTemplate, error) + // Updates the InspectTemplate. + // See https://cloud.google.com/dlp/docs/creating-templates to learn more. + UpdateInspectTemplate(ctx context.Context, in *UpdateInspectTemplateRequest, opts ...grpc.CallOption) (*InspectTemplate, error) + // Gets an InspectTemplate. + // See https://cloud.google.com/dlp/docs/creating-templates to learn more. + GetInspectTemplate(ctx context.Context, in *GetInspectTemplateRequest, opts ...grpc.CallOption) (*InspectTemplate, error) + // Lists InspectTemplates. + // See https://cloud.google.com/dlp/docs/creating-templates to learn more. + ListInspectTemplates(ctx context.Context, in *ListInspectTemplatesRequest, opts ...grpc.CallOption) (*ListInspectTemplatesResponse, error) + // Deletes an InspectTemplate. + // See https://cloud.google.com/dlp/docs/creating-templates to learn more. + DeleteInspectTemplate(ctx context.Context, in *DeleteInspectTemplateRequest, opts ...grpc.CallOption) (*empty.Empty, error) + // Creates a DeidentifyTemplate for re-using frequently used configuration + // for de-identifying content, images, and storage. + // See https://cloud.google.com/dlp/docs/creating-templates-deid to learn + // more. + CreateDeidentifyTemplate(ctx context.Context, in *CreateDeidentifyTemplateRequest, opts ...grpc.CallOption) (*DeidentifyTemplate, error) + // Updates the DeidentifyTemplate. + // See https://cloud.google.com/dlp/docs/creating-templates-deid to learn + // more. + UpdateDeidentifyTemplate(ctx context.Context, in *UpdateDeidentifyTemplateRequest, opts ...grpc.CallOption) (*DeidentifyTemplate, error) + // Gets a DeidentifyTemplate. + // See https://cloud.google.com/dlp/docs/creating-templates-deid to learn + // more. + GetDeidentifyTemplate(ctx context.Context, in *GetDeidentifyTemplateRequest, opts ...grpc.CallOption) (*DeidentifyTemplate, error) + // Lists DeidentifyTemplates. + // See https://cloud.google.com/dlp/docs/creating-templates-deid to learn + // more. + ListDeidentifyTemplates(ctx context.Context, in *ListDeidentifyTemplatesRequest, opts ...grpc.CallOption) (*ListDeidentifyTemplatesResponse, error) + // Deletes a DeidentifyTemplate. + // See https://cloud.google.com/dlp/docs/creating-templates-deid to learn + // more. + DeleteDeidentifyTemplate(ctx context.Context, in *DeleteDeidentifyTemplateRequest, opts ...grpc.CallOption) (*empty.Empty, error) + // Creates a job trigger to run DLP actions such as scanning storage for + // sensitive information on a set schedule. + // See https://cloud.google.com/dlp/docs/creating-job-triggers to learn more. + CreateJobTrigger(ctx context.Context, in *CreateJobTriggerRequest, opts ...grpc.CallOption) (*JobTrigger, error) + // Updates a job trigger. + // See https://cloud.google.com/dlp/docs/creating-job-triggers to learn more. + UpdateJobTrigger(ctx context.Context, in *UpdateJobTriggerRequest, opts ...grpc.CallOption) (*JobTrigger, error) + // Gets a job trigger. + // See https://cloud.google.com/dlp/docs/creating-job-triggers to learn more. + GetJobTrigger(ctx context.Context, in *GetJobTriggerRequest, opts ...grpc.CallOption) (*JobTrigger, error) + // Lists job triggers. + // See https://cloud.google.com/dlp/docs/creating-job-triggers to learn more. + ListJobTriggers(ctx context.Context, in *ListJobTriggersRequest, opts ...grpc.CallOption) (*ListJobTriggersResponse, error) + // Deletes a job trigger. + // See https://cloud.google.com/dlp/docs/creating-job-triggers to learn more. + DeleteJobTrigger(ctx context.Context, in *DeleteJobTriggerRequest, opts ...grpc.CallOption) (*empty.Empty, error) + // Activate a job trigger. Causes the immediate execute of a trigger + // instead of waiting on the trigger event to occur. + ActivateJobTrigger(ctx context.Context, in *ActivateJobTriggerRequest, opts ...grpc.CallOption) (*DlpJob, error) + // Creates a new job to inspect storage or calculate risk metrics. + // See https://cloud.google.com/dlp/docs/inspecting-storage and + // https://cloud.google.com/dlp/docs/compute-risk-analysis to learn more. + // + // When no InfoTypes or CustomInfoTypes are specified in inspect jobs, the + // system will automatically choose what detectors to run. By default this may + // be all types, but may change over time as detectors are updated. + CreateDlpJob(ctx context.Context, in *CreateDlpJobRequest, opts ...grpc.CallOption) (*DlpJob, error) + // Lists DlpJobs that match the specified filter in the request. + // See https://cloud.google.com/dlp/docs/inspecting-storage and + // https://cloud.google.com/dlp/docs/compute-risk-analysis to learn more. + ListDlpJobs(ctx context.Context, in *ListDlpJobsRequest, opts ...grpc.CallOption) (*ListDlpJobsResponse, error) + // Gets the latest state of a long-running DlpJob. + // See https://cloud.google.com/dlp/docs/inspecting-storage and + // https://cloud.google.com/dlp/docs/compute-risk-analysis to learn more. + GetDlpJob(ctx context.Context, in *GetDlpJobRequest, opts ...grpc.CallOption) (*DlpJob, error) + // Deletes a long-running DlpJob. This method indicates that the client is + // no longer interested in the DlpJob result. The job will be cancelled if + // possible. + // See https://cloud.google.com/dlp/docs/inspecting-storage and + // https://cloud.google.com/dlp/docs/compute-risk-analysis to learn more. + DeleteDlpJob(ctx context.Context, in *DeleteDlpJobRequest, opts ...grpc.CallOption) (*empty.Empty, error) + // Starts asynchronous cancellation on a long-running DlpJob. The server + // makes a best effort to cancel the DlpJob, but success is not + // guaranteed. + // See https://cloud.google.com/dlp/docs/inspecting-storage and + // https://cloud.google.com/dlp/docs/compute-risk-analysis to learn more. + CancelDlpJob(ctx context.Context, in *CancelDlpJobRequest, opts ...grpc.CallOption) (*empty.Empty, error) + // Creates a pre-built stored infoType to be used for inspection. + // See https://cloud.google.com/dlp/docs/creating-stored-infotypes to + // learn more. + CreateStoredInfoType(ctx context.Context, in *CreateStoredInfoTypeRequest, opts ...grpc.CallOption) (*StoredInfoType, error) + // Updates the stored infoType by creating a new version. The existing version + // will continue to be used until the new version is ready. + // See https://cloud.google.com/dlp/docs/creating-stored-infotypes to + // learn more. + UpdateStoredInfoType(ctx context.Context, in *UpdateStoredInfoTypeRequest, opts ...grpc.CallOption) (*StoredInfoType, error) + // Gets a stored infoType. + // See https://cloud.google.com/dlp/docs/creating-stored-infotypes to + // learn more. + GetStoredInfoType(ctx context.Context, in *GetStoredInfoTypeRequest, opts ...grpc.CallOption) (*StoredInfoType, error) + // Lists stored infoTypes. + // See https://cloud.google.com/dlp/docs/creating-stored-infotypes to + // learn more. + ListStoredInfoTypes(ctx context.Context, in *ListStoredInfoTypesRequest, opts ...grpc.CallOption) (*ListStoredInfoTypesResponse, error) + // Deletes a stored infoType. + // See https://cloud.google.com/dlp/docs/creating-stored-infotypes to + // learn more. + DeleteStoredInfoType(ctx context.Context, in *DeleteStoredInfoTypeRequest, opts ...grpc.CallOption) (*empty.Empty, error) +} + +type dlpServiceClient struct { + cc *grpc.ClientConn +} + +func NewDlpServiceClient(cc *grpc.ClientConn) DlpServiceClient { + return &dlpServiceClient{cc} +} + +func (c *dlpServiceClient) InspectContent(ctx context.Context, in *InspectContentRequest, opts ...grpc.CallOption) (*InspectContentResponse, error) { + out := new(InspectContentResponse) + err := c.cc.Invoke(ctx, "/google.privacy.dlp.v2.DlpService/InspectContent", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *dlpServiceClient) RedactImage(ctx context.Context, in *RedactImageRequest, opts ...grpc.CallOption) (*RedactImageResponse, error) { + out := new(RedactImageResponse) + err := c.cc.Invoke(ctx, "/google.privacy.dlp.v2.DlpService/RedactImage", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *dlpServiceClient) DeidentifyContent(ctx context.Context, in *DeidentifyContentRequest, opts ...grpc.CallOption) (*DeidentifyContentResponse, error) { + out := new(DeidentifyContentResponse) + err := c.cc.Invoke(ctx, "/google.privacy.dlp.v2.DlpService/DeidentifyContent", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *dlpServiceClient) ReidentifyContent(ctx context.Context, in *ReidentifyContentRequest, opts ...grpc.CallOption) (*ReidentifyContentResponse, error) { + out := new(ReidentifyContentResponse) + err := c.cc.Invoke(ctx, "/google.privacy.dlp.v2.DlpService/ReidentifyContent", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *dlpServiceClient) ListInfoTypes(ctx context.Context, in *ListInfoTypesRequest, opts ...grpc.CallOption) (*ListInfoTypesResponse, error) { + out := new(ListInfoTypesResponse) + err := c.cc.Invoke(ctx, "/google.privacy.dlp.v2.DlpService/ListInfoTypes", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *dlpServiceClient) CreateInspectTemplate(ctx context.Context, in *CreateInspectTemplateRequest, opts ...grpc.CallOption) (*InspectTemplate, error) { + out := new(InspectTemplate) + err := c.cc.Invoke(ctx, "/google.privacy.dlp.v2.DlpService/CreateInspectTemplate", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *dlpServiceClient) UpdateInspectTemplate(ctx context.Context, in *UpdateInspectTemplateRequest, opts ...grpc.CallOption) (*InspectTemplate, error) { + out := new(InspectTemplate) + err := c.cc.Invoke(ctx, "/google.privacy.dlp.v2.DlpService/UpdateInspectTemplate", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *dlpServiceClient) GetInspectTemplate(ctx context.Context, in *GetInspectTemplateRequest, opts ...grpc.CallOption) (*InspectTemplate, error) { + out := new(InspectTemplate) + err := c.cc.Invoke(ctx, "/google.privacy.dlp.v2.DlpService/GetInspectTemplate", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *dlpServiceClient) ListInspectTemplates(ctx context.Context, in *ListInspectTemplatesRequest, opts ...grpc.CallOption) (*ListInspectTemplatesResponse, error) { + out := new(ListInspectTemplatesResponse) + err := c.cc.Invoke(ctx, "/google.privacy.dlp.v2.DlpService/ListInspectTemplates", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *dlpServiceClient) DeleteInspectTemplate(ctx context.Context, in *DeleteInspectTemplateRequest, opts ...grpc.CallOption) (*empty.Empty, error) { + out := new(empty.Empty) + err := c.cc.Invoke(ctx, "/google.privacy.dlp.v2.DlpService/DeleteInspectTemplate", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *dlpServiceClient) CreateDeidentifyTemplate(ctx context.Context, in *CreateDeidentifyTemplateRequest, opts ...grpc.CallOption) (*DeidentifyTemplate, error) { + out := new(DeidentifyTemplate) + err := c.cc.Invoke(ctx, "/google.privacy.dlp.v2.DlpService/CreateDeidentifyTemplate", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *dlpServiceClient) UpdateDeidentifyTemplate(ctx context.Context, in *UpdateDeidentifyTemplateRequest, opts ...grpc.CallOption) (*DeidentifyTemplate, error) { + out := new(DeidentifyTemplate) + err := c.cc.Invoke(ctx, "/google.privacy.dlp.v2.DlpService/UpdateDeidentifyTemplate", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *dlpServiceClient) GetDeidentifyTemplate(ctx context.Context, in *GetDeidentifyTemplateRequest, opts ...grpc.CallOption) (*DeidentifyTemplate, error) { + out := new(DeidentifyTemplate) + err := c.cc.Invoke(ctx, "/google.privacy.dlp.v2.DlpService/GetDeidentifyTemplate", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *dlpServiceClient) ListDeidentifyTemplates(ctx context.Context, in *ListDeidentifyTemplatesRequest, opts ...grpc.CallOption) (*ListDeidentifyTemplatesResponse, error) { + out := new(ListDeidentifyTemplatesResponse) + err := c.cc.Invoke(ctx, "/google.privacy.dlp.v2.DlpService/ListDeidentifyTemplates", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *dlpServiceClient) DeleteDeidentifyTemplate(ctx context.Context, in *DeleteDeidentifyTemplateRequest, opts ...grpc.CallOption) (*empty.Empty, error) { + out := new(empty.Empty) + err := c.cc.Invoke(ctx, "/google.privacy.dlp.v2.DlpService/DeleteDeidentifyTemplate", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *dlpServiceClient) CreateJobTrigger(ctx context.Context, in *CreateJobTriggerRequest, opts ...grpc.CallOption) (*JobTrigger, error) { + out := new(JobTrigger) + err := c.cc.Invoke(ctx, "/google.privacy.dlp.v2.DlpService/CreateJobTrigger", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *dlpServiceClient) UpdateJobTrigger(ctx context.Context, in *UpdateJobTriggerRequest, opts ...grpc.CallOption) (*JobTrigger, error) { + out := new(JobTrigger) + err := c.cc.Invoke(ctx, "/google.privacy.dlp.v2.DlpService/UpdateJobTrigger", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *dlpServiceClient) GetJobTrigger(ctx context.Context, in *GetJobTriggerRequest, opts ...grpc.CallOption) (*JobTrigger, error) { + out := new(JobTrigger) + err := c.cc.Invoke(ctx, "/google.privacy.dlp.v2.DlpService/GetJobTrigger", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *dlpServiceClient) ListJobTriggers(ctx context.Context, in *ListJobTriggersRequest, opts ...grpc.CallOption) (*ListJobTriggersResponse, error) { + out := new(ListJobTriggersResponse) + err := c.cc.Invoke(ctx, "/google.privacy.dlp.v2.DlpService/ListJobTriggers", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *dlpServiceClient) DeleteJobTrigger(ctx context.Context, in *DeleteJobTriggerRequest, opts ...grpc.CallOption) (*empty.Empty, error) { + out := new(empty.Empty) + err := c.cc.Invoke(ctx, "/google.privacy.dlp.v2.DlpService/DeleteJobTrigger", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *dlpServiceClient) ActivateJobTrigger(ctx context.Context, in *ActivateJobTriggerRequest, opts ...grpc.CallOption) (*DlpJob, error) { + out := new(DlpJob) + err := c.cc.Invoke(ctx, "/google.privacy.dlp.v2.DlpService/ActivateJobTrigger", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *dlpServiceClient) CreateDlpJob(ctx context.Context, in *CreateDlpJobRequest, opts ...grpc.CallOption) (*DlpJob, error) { + out := new(DlpJob) + err := c.cc.Invoke(ctx, "/google.privacy.dlp.v2.DlpService/CreateDlpJob", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *dlpServiceClient) ListDlpJobs(ctx context.Context, in *ListDlpJobsRequest, opts ...grpc.CallOption) (*ListDlpJobsResponse, error) { + out := new(ListDlpJobsResponse) + err := c.cc.Invoke(ctx, "/google.privacy.dlp.v2.DlpService/ListDlpJobs", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *dlpServiceClient) GetDlpJob(ctx context.Context, in *GetDlpJobRequest, opts ...grpc.CallOption) (*DlpJob, error) { + out := new(DlpJob) + err := c.cc.Invoke(ctx, "/google.privacy.dlp.v2.DlpService/GetDlpJob", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *dlpServiceClient) DeleteDlpJob(ctx context.Context, in *DeleteDlpJobRequest, opts ...grpc.CallOption) (*empty.Empty, error) { + out := new(empty.Empty) + err := c.cc.Invoke(ctx, "/google.privacy.dlp.v2.DlpService/DeleteDlpJob", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *dlpServiceClient) CancelDlpJob(ctx context.Context, in *CancelDlpJobRequest, opts ...grpc.CallOption) (*empty.Empty, error) { + out := new(empty.Empty) + err := c.cc.Invoke(ctx, "/google.privacy.dlp.v2.DlpService/CancelDlpJob", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *dlpServiceClient) CreateStoredInfoType(ctx context.Context, in *CreateStoredInfoTypeRequest, opts ...grpc.CallOption) (*StoredInfoType, error) { + out := new(StoredInfoType) + err := c.cc.Invoke(ctx, "/google.privacy.dlp.v2.DlpService/CreateStoredInfoType", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *dlpServiceClient) UpdateStoredInfoType(ctx context.Context, in *UpdateStoredInfoTypeRequest, opts ...grpc.CallOption) (*StoredInfoType, error) { + out := new(StoredInfoType) + err := c.cc.Invoke(ctx, "/google.privacy.dlp.v2.DlpService/UpdateStoredInfoType", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *dlpServiceClient) GetStoredInfoType(ctx context.Context, in *GetStoredInfoTypeRequest, opts ...grpc.CallOption) (*StoredInfoType, error) { + out := new(StoredInfoType) + err := c.cc.Invoke(ctx, "/google.privacy.dlp.v2.DlpService/GetStoredInfoType", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *dlpServiceClient) ListStoredInfoTypes(ctx context.Context, in *ListStoredInfoTypesRequest, opts ...grpc.CallOption) (*ListStoredInfoTypesResponse, error) { + out := new(ListStoredInfoTypesResponse) + err := c.cc.Invoke(ctx, "/google.privacy.dlp.v2.DlpService/ListStoredInfoTypes", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *dlpServiceClient) DeleteStoredInfoType(ctx context.Context, in *DeleteStoredInfoTypeRequest, opts ...grpc.CallOption) (*empty.Empty, error) { + out := new(empty.Empty) + err := c.cc.Invoke(ctx, "/google.privacy.dlp.v2.DlpService/DeleteStoredInfoType", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +// DlpServiceServer is the server API for DlpService service. +type DlpServiceServer interface { + // Finds potentially sensitive info in content. + // This method has limits on input size, processing time, and output size. + // + // When no InfoTypes or CustomInfoTypes are specified in this request, the + // system will automatically choose what detectors to run. By default this may + // be all types, but may change over time as detectors are updated. + // + // For how to guides, see https://cloud.google.com/dlp/docs/inspecting-images + // and https://cloud.google.com/dlp/docs/inspecting-text, + InspectContent(context.Context, *InspectContentRequest) (*InspectContentResponse, error) + // Redacts potentially sensitive info from an image. + // This method has limits on input size, processing time, and output size. + // See https://cloud.google.com/dlp/docs/redacting-sensitive-data-images to + // learn more. + // + // When no InfoTypes or CustomInfoTypes are specified in this request, the + // system will automatically choose what detectors to run. By default this may + // be all types, but may change over time as detectors are updated. + RedactImage(context.Context, *RedactImageRequest) (*RedactImageResponse, error) + // De-identifies potentially sensitive info from a ContentItem. + // This method has limits on input size and output size. + // See https://cloud.google.com/dlp/docs/deidentify-sensitive-data to + // learn more. + // + // When no InfoTypes or CustomInfoTypes are specified in this request, the + // system will automatically choose what detectors to run. By default this may + // be all types, but may change over time as detectors are updated. + DeidentifyContent(context.Context, *DeidentifyContentRequest) (*DeidentifyContentResponse, error) + // Re-identifies content that has been de-identified. + // See + // https://cloud.google.com/dlp/docs/pseudonymization#re-identification_in_free_text_code_example + // to learn more. + ReidentifyContent(context.Context, *ReidentifyContentRequest) (*ReidentifyContentResponse, error) + // Returns a list of the sensitive information types that the DLP API + // supports. See https://cloud.google.com/dlp/docs/infotypes-reference to + // learn more. + ListInfoTypes(context.Context, *ListInfoTypesRequest) (*ListInfoTypesResponse, error) + // Creates an InspectTemplate for re-using frequently used configuration + // for inspecting content, images, and storage. + // See https://cloud.google.com/dlp/docs/creating-templates to learn more. + CreateInspectTemplate(context.Context, *CreateInspectTemplateRequest) (*InspectTemplate, error) + // Updates the InspectTemplate. + // See https://cloud.google.com/dlp/docs/creating-templates to learn more. + UpdateInspectTemplate(context.Context, *UpdateInspectTemplateRequest) (*InspectTemplate, error) + // Gets an InspectTemplate. + // See https://cloud.google.com/dlp/docs/creating-templates to learn more. + GetInspectTemplate(context.Context, *GetInspectTemplateRequest) (*InspectTemplate, error) + // Lists InspectTemplates. + // See https://cloud.google.com/dlp/docs/creating-templates to learn more. + ListInspectTemplates(context.Context, *ListInspectTemplatesRequest) (*ListInspectTemplatesResponse, error) + // Deletes an InspectTemplate. + // See https://cloud.google.com/dlp/docs/creating-templates to learn more. + DeleteInspectTemplate(context.Context, *DeleteInspectTemplateRequest) (*empty.Empty, error) + // Creates a DeidentifyTemplate for re-using frequently used configuration + // for de-identifying content, images, and storage. + // See https://cloud.google.com/dlp/docs/creating-templates-deid to learn + // more. + CreateDeidentifyTemplate(context.Context, *CreateDeidentifyTemplateRequest) (*DeidentifyTemplate, error) + // Updates the DeidentifyTemplate. + // See https://cloud.google.com/dlp/docs/creating-templates-deid to learn + // more. + UpdateDeidentifyTemplate(context.Context, *UpdateDeidentifyTemplateRequest) (*DeidentifyTemplate, error) + // Gets a DeidentifyTemplate. + // See https://cloud.google.com/dlp/docs/creating-templates-deid to learn + // more. + GetDeidentifyTemplate(context.Context, *GetDeidentifyTemplateRequest) (*DeidentifyTemplate, error) + // Lists DeidentifyTemplates. + // See https://cloud.google.com/dlp/docs/creating-templates-deid to learn + // more. + ListDeidentifyTemplates(context.Context, *ListDeidentifyTemplatesRequest) (*ListDeidentifyTemplatesResponse, error) + // Deletes a DeidentifyTemplate. + // See https://cloud.google.com/dlp/docs/creating-templates-deid to learn + // more. + DeleteDeidentifyTemplate(context.Context, *DeleteDeidentifyTemplateRequest) (*empty.Empty, error) + // Creates a job trigger to run DLP actions such as scanning storage for + // sensitive information on a set schedule. + // See https://cloud.google.com/dlp/docs/creating-job-triggers to learn more. + CreateJobTrigger(context.Context, *CreateJobTriggerRequest) (*JobTrigger, error) + // Updates a job trigger. + // See https://cloud.google.com/dlp/docs/creating-job-triggers to learn more. + UpdateJobTrigger(context.Context, *UpdateJobTriggerRequest) (*JobTrigger, error) + // Gets a job trigger. + // See https://cloud.google.com/dlp/docs/creating-job-triggers to learn more. + GetJobTrigger(context.Context, *GetJobTriggerRequest) (*JobTrigger, error) + // Lists job triggers. + // See https://cloud.google.com/dlp/docs/creating-job-triggers to learn more. + ListJobTriggers(context.Context, *ListJobTriggersRequest) (*ListJobTriggersResponse, error) + // Deletes a job trigger. + // See https://cloud.google.com/dlp/docs/creating-job-triggers to learn more. + DeleteJobTrigger(context.Context, *DeleteJobTriggerRequest) (*empty.Empty, error) + // Activate a job trigger. Causes the immediate execute of a trigger + // instead of waiting on the trigger event to occur. + ActivateJobTrigger(context.Context, *ActivateJobTriggerRequest) (*DlpJob, error) + // Creates a new job to inspect storage or calculate risk metrics. + // See https://cloud.google.com/dlp/docs/inspecting-storage and + // https://cloud.google.com/dlp/docs/compute-risk-analysis to learn more. + // + // When no InfoTypes or CustomInfoTypes are specified in inspect jobs, the + // system will automatically choose what detectors to run. By default this may + // be all types, but may change over time as detectors are updated. + CreateDlpJob(context.Context, *CreateDlpJobRequest) (*DlpJob, error) + // Lists DlpJobs that match the specified filter in the request. + // See https://cloud.google.com/dlp/docs/inspecting-storage and + // https://cloud.google.com/dlp/docs/compute-risk-analysis to learn more. + ListDlpJobs(context.Context, *ListDlpJobsRequest) (*ListDlpJobsResponse, error) + // Gets the latest state of a long-running DlpJob. + // See https://cloud.google.com/dlp/docs/inspecting-storage and + // https://cloud.google.com/dlp/docs/compute-risk-analysis to learn more. + GetDlpJob(context.Context, *GetDlpJobRequest) (*DlpJob, error) + // Deletes a long-running DlpJob. This method indicates that the client is + // no longer interested in the DlpJob result. The job will be cancelled if + // possible. + // See https://cloud.google.com/dlp/docs/inspecting-storage and + // https://cloud.google.com/dlp/docs/compute-risk-analysis to learn more. + DeleteDlpJob(context.Context, *DeleteDlpJobRequest) (*empty.Empty, error) + // Starts asynchronous cancellation on a long-running DlpJob. The server + // makes a best effort to cancel the DlpJob, but success is not + // guaranteed. + // See https://cloud.google.com/dlp/docs/inspecting-storage and + // https://cloud.google.com/dlp/docs/compute-risk-analysis to learn more. + CancelDlpJob(context.Context, *CancelDlpJobRequest) (*empty.Empty, error) + // Creates a pre-built stored infoType to be used for inspection. + // See https://cloud.google.com/dlp/docs/creating-stored-infotypes to + // learn more. + CreateStoredInfoType(context.Context, *CreateStoredInfoTypeRequest) (*StoredInfoType, error) + // Updates the stored infoType by creating a new version. The existing version + // will continue to be used until the new version is ready. + // See https://cloud.google.com/dlp/docs/creating-stored-infotypes to + // learn more. + UpdateStoredInfoType(context.Context, *UpdateStoredInfoTypeRequest) (*StoredInfoType, error) + // Gets a stored infoType. + // See https://cloud.google.com/dlp/docs/creating-stored-infotypes to + // learn more. + GetStoredInfoType(context.Context, *GetStoredInfoTypeRequest) (*StoredInfoType, error) + // Lists stored infoTypes. + // See https://cloud.google.com/dlp/docs/creating-stored-infotypes to + // learn more. + ListStoredInfoTypes(context.Context, *ListStoredInfoTypesRequest) (*ListStoredInfoTypesResponse, error) + // Deletes a stored infoType. + // See https://cloud.google.com/dlp/docs/creating-stored-infotypes to + // learn more. + DeleteStoredInfoType(context.Context, *DeleteStoredInfoTypeRequest) (*empty.Empty, error) +} + +func RegisterDlpServiceServer(s *grpc.Server, srv DlpServiceServer) { + s.RegisterService(&_DlpService_serviceDesc, srv) +} + +func _DlpService_InspectContent_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(InspectContentRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(DlpServiceServer).InspectContent(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.privacy.dlp.v2.DlpService/InspectContent", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(DlpServiceServer).InspectContent(ctx, req.(*InspectContentRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _DlpService_RedactImage_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(RedactImageRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(DlpServiceServer).RedactImage(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.privacy.dlp.v2.DlpService/RedactImage", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(DlpServiceServer).RedactImage(ctx, req.(*RedactImageRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _DlpService_DeidentifyContent_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(DeidentifyContentRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(DlpServiceServer).DeidentifyContent(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.privacy.dlp.v2.DlpService/DeidentifyContent", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(DlpServiceServer).DeidentifyContent(ctx, req.(*DeidentifyContentRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _DlpService_ReidentifyContent_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(ReidentifyContentRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(DlpServiceServer).ReidentifyContent(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.privacy.dlp.v2.DlpService/ReidentifyContent", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(DlpServiceServer).ReidentifyContent(ctx, req.(*ReidentifyContentRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _DlpService_ListInfoTypes_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(ListInfoTypesRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(DlpServiceServer).ListInfoTypes(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.privacy.dlp.v2.DlpService/ListInfoTypes", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(DlpServiceServer).ListInfoTypes(ctx, req.(*ListInfoTypesRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _DlpService_CreateInspectTemplate_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(CreateInspectTemplateRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(DlpServiceServer).CreateInspectTemplate(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.privacy.dlp.v2.DlpService/CreateInspectTemplate", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(DlpServiceServer).CreateInspectTemplate(ctx, req.(*CreateInspectTemplateRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _DlpService_UpdateInspectTemplate_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(UpdateInspectTemplateRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(DlpServiceServer).UpdateInspectTemplate(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.privacy.dlp.v2.DlpService/UpdateInspectTemplate", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(DlpServiceServer).UpdateInspectTemplate(ctx, req.(*UpdateInspectTemplateRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _DlpService_GetInspectTemplate_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(GetInspectTemplateRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(DlpServiceServer).GetInspectTemplate(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.privacy.dlp.v2.DlpService/GetInspectTemplate", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(DlpServiceServer).GetInspectTemplate(ctx, req.(*GetInspectTemplateRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _DlpService_ListInspectTemplates_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(ListInspectTemplatesRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(DlpServiceServer).ListInspectTemplates(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.privacy.dlp.v2.DlpService/ListInspectTemplates", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(DlpServiceServer).ListInspectTemplates(ctx, req.(*ListInspectTemplatesRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _DlpService_DeleteInspectTemplate_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(DeleteInspectTemplateRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(DlpServiceServer).DeleteInspectTemplate(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.privacy.dlp.v2.DlpService/DeleteInspectTemplate", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(DlpServiceServer).DeleteInspectTemplate(ctx, req.(*DeleteInspectTemplateRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _DlpService_CreateDeidentifyTemplate_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(CreateDeidentifyTemplateRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(DlpServiceServer).CreateDeidentifyTemplate(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.privacy.dlp.v2.DlpService/CreateDeidentifyTemplate", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(DlpServiceServer).CreateDeidentifyTemplate(ctx, req.(*CreateDeidentifyTemplateRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _DlpService_UpdateDeidentifyTemplate_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(UpdateDeidentifyTemplateRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(DlpServiceServer).UpdateDeidentifyTemplate(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.privacy.dlp.v2.DlpService/UpdateDeidentifyTemplate", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(DlpServiceServer).UpdateDeidentifyTemplate(ctx, req.(*UpdateDeidentifyTemplateRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _DlpService_GetDeidentifyTemplate_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(GetDeidentifyTemplateRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(DlpServiceServer).GetDeidentifyTemplate(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.privacy.dlp.v2.DlpService/GetDeidentifyTemplate", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(DlpServiceServer).GetDeidentifyTemplate(ctx, req.(*GetDeidentifyTemplateRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _DlpService_ListDeidentifyTemplates_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(ListDeidentifyTemplatesRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(DlpServiceServer).ListDeidentifyTemplates(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.privacy.dlp.v2.DlpService/ListDeidentifyTemplates", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(DlpServiceServer).ListDeidentifyTemplates(ctx, req.(*ListDeidentifyTemplatesRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _DlpService_DeleteDeidentifyTemplate_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(DeleteDeidentifyTemplateRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(DlpServiceServer).DeleteDeidentifyTemplate(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.privacy.dlp.v2.DlpService/DeleteDeidentifyTemplate", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(DlpServiceServer).DeleteDeidentifyTemplate(ctx, req.(*DeleteDeidentifyTemplateRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _DlpService_CreateJobTrigger_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(CreateJobTriggerRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(DlpServiceServer).CreateJobTrigger(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.privacy.dlp.v2.DlpService/CreateJobTrigger", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(DlpServiceServer).CreateJobTrigger(ctx, req.(*CreateJobTriggerRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _DlpService_UpdateJobTrigger_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(UpdateJobTriggerRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(DlpServiceServer).UpdateJobTrigger(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.privacy.dlp.v2.DlpService/UpdateJobTrigger", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(DlpServiceServer).UpdateJobTrigger(ctx, req.(*UpdateJobTriggerRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _DlpService_GetJobTrigger_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(GetJobTriggerRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(DlpServiceServer).GetJobTrigger(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.privacy.dlp.v2.DlpService/GetJobTrigger", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(DlpServiceServer).GetJobTrigger(ctx, req.(*GetJobTriggerRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _DlpService_ListJobTriggers_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(ListJobTriggersRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(DlpServiceServer).ListJobTriggers(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.privacy.dlp.v2.DlpService/ListJobTriggers", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(DlpServiceServer).ListJobTriggers(ctx, req.(*ListJobTriggersRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _DlpService_DeleteJobTrigger_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(DeleteJobTriggerRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(DlpServiceServer).DeleteJobTrigger(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.privacy.dlp.v2.DlpService/DeleteJobTrigger", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(DlpServiceServer).DeleteJobTrigger(ctx, req.(*DeleteJobTriggerRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _DlpService_ActivateJobTrigger_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(ActivateJobTriggerRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(DlpServiceServer).ActivateJobTrigger(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.privacy.dlp.v2.DlpService/ActivateJobTrigger", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(DlpServiceServer).ActivateJobTrigger(ctx, req.(*ActivateJobTriggerRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _DlpService_CreateDlpJob_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(CreateDlpJobRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(DlpServiceServer).CreateDlpJob(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.privacy.dlp.v2.DlpService/CreateDlpJob", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(DlpServiceServer).CreateDlpJob(ctx, req.(*CreateDlpJobRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _DlpService_ListDlpJobs_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(ListDlpJobsRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(DlpServiceServer).ListDlpJobs(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.privacy.dlp.v2.DlpService/ListDlpJobs", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(DlpServiceServer).ListDlpJobs(ctx, req.(*ListDlpJobsRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _DlpService_GetDlpJob_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(GetDlpJobRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(DlpServiceServer).GetDlpJob(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.privacy.dlp.v2.DlpService/GetDlpJob", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(DlpServiceServer).GetDlpJob(ctx, req.(*GetDlpJobRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _DlpService_DeleteDlpJob_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(DeleteDlpJobRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(DlpServiceServer).DeleteDlpJob(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.privacy.dlp.v2.DlpService/DeleteDlpJob", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(DlpServiceServer).DeleteDlpJob(ctx, req.(*DeleteDlpJobRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _DlpService_CancelDlpJob_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(CancelDlpJobRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(DlpServiceServer).CancelDlpJob(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.privacy.dlp.v2.DlpService/CancelDlpJob", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(DlpServiceServer).CancelDlpJob(ctx, req.(*CancelDlpJobRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _DlpService_CreateStoredInfoType_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(CreateStoredInfoTypeRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(DlpServiceServer).CreateStoredInfoType(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.privacy.dlp.v2.DlpService/CreateStoredInfoType", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(DlpServiceServer).CreateStoredInfoType(ctx, req.(*CreateStoredInfoTypeRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _DlpService_UpdateStoredInfoType_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(UpdateStoredInfoTypeRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(DlpServiceServer).UpdateStoredInfoType(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.privacy.dlp.v2.DlpService/UpdateStoredInfoType", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(DlpServiceServer).UpdateStoredInfoType(ctx, req.(*UpdateStoredInfoTypeRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _DlpService_GetStoredInfoType_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(GetStoredInfoTypeRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(DlpServiceServer).GetStoredInfoType(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.privacy.dlp.v2.DlpService/GetStoredInfoType", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(DlpServiceServer).GetStoredInfoType(ctx, req.(*GetStoredInfoTypeRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _DlpService_ListStoredInfoTypes_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(ListStoredInfoTypesRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(DlpServiceServer).ListStoredInfoTypes(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.privacy.dlp.v2.DlpService/ListStoredInfoTypes", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(DlpServiceServer).ListStoredInfoTypes(ctx, req.(*ListStoredInfoTypesRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _DlpService_DeleteStoredInfoType_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(DeleteStoredInfoTypeRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(DlpServiceServer).DeleteStoredInfoType(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.privacy.dlp.v2.DlpService/DeleteStoredInfoType", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(DlpServiceServer).DeleteStoredInfoType(ctx, req.(*DeleteStoredInfoTypeRequest)) + } + return interceptor(ctx, in, info, handler) +} + +var _DlpService_serviceDesc = grpc.ServiceDesc{ + ServiceName: "google.privacy.dlp.v2.DlpService", + HandlerType: (*DlpServiceServer)(nil), + Methods: []grpc.MethodDesc{ + { + MethodName: "InspectContent", + Handler: _DlpService_InspectContent_Handler, + }, + { + MethodName: "RedactImage", + Handler: _DlpService_RedactImage_Handler, + }, + { + MethodName: "DeidentifyContent", + Handler: _DlpService_DeidentifyContent_Handler, + }, + { + MethodName: "ReidentifyContent", + Handler: _DlpService_ReidentifyContent_Handler, + }, + { + MethodName: "ListInfoTypes", + Handler: _DlpService_ListInfoTypes_Handler, + }, + { + MethodName: "CreateInspectTemplate", + Handler: _DlpService_CreateInspectTemplate_Handler, + }, + { + MethodName: "UpdateInspectTemplate", + Handler: _DlpService_UpdateInspectTemplate_Handler, + }, + { + MethodName: "GetInspectTemplate", + Handler: _DlpService_GetInspectTemplate_Handler, + }, + { + MethodName: "ListInspectTemplates", + Handler: _DlpService_ListInspectTemplates_Handler, + }, + { + MethodName: "DeleteInspectTemplate", + Handler: _DlpService_DeleteInspectTemplate_Handler, + }, + { + MethodName: "CreateDeidentifyTemplate", + Handler: _DlpService_CreateDeidentifyTemplate_Handler, + }, + { + MethodName: "UpdateDeidentifyTemplate", + Handler: _DlpService_UpdateDeidentifyTemplate_Handler, + }, + { + MethodName: "GetDeidentifyTemplate", + Handler: _DlpService_GetDeidentifyTemplate_Handler, + }, + { + MethodName: "ListDeidentifyTemplates", + Handler: _DlpService_ListDeidentifyTemplates_Handler, + }, + { + MethodName: "DeleteDeidentifyTemplate", + Handler: _DlpService_DeleteDeidentifyTemplate_Handler, + }, + { + MethodName: "CreateJobTrigger", + Handler: _DlpService_CreateJobTrigger_Handler, + }, + { + MethodName: "UpdateJobTrigger", + Handler: _DlpService_UpdateJobTrigger_Handler, + }, + { + MethodName: "GetJobTrigger", + Handler: _DlpService_GetJobTrigger_Handler, + }, + { + MethodName: "ListJobTriggers", + Handler: _DlpService_ListJobTriggers_Handler, + }, + { + MethodName: "DeleteJobTrigger", + Handler: _DlpService_DeleteJobTrigger_Handler, + }, + { + MethodName: "ActivateJobTrigger", + Handler: _DlpService_ActivateJobTrigger_Handler, + }, + { + MethodName: "CreateDlpJob", + Handler: _DlpService_CreateDlpJob_Handler, + }, + { + MethodName: "ListDlpJobs", + Handler: _DlpService_ListDlpJobs_Handler, + }, + { + MethodName: "GetDlpJob", + Handler: _DlpService_GetDlpJob_Handler, + }, + { + MethodName: "DeleteDlpJob", + Handler: _DlpService_DeleteDlpJob_Handler, + }, + { + MethodName: "CancelDlpJob", + Handler: _DlpService_CancelDlpJob_Handler, + }, + { + MethodName: "CreateStoredInfoType", + Handler: _DlpService_CreateStoredInfoType_Handler, + }, + { + MethodName: "UpdateStoredInfoType", + Handler: _DlpService_UpdateStoredInfoType_Handler, + }, + { + MethodName: "GetStoredInfoType", + Handler: _DlpService_GetStoredInfoType_Handler, + }, + { + MethodName: "ListStoredInfoTypes", + Handler: _DlpService_ListStoredInfoTypes_Handler, + }, + { + MethodName: "DeleteStoredInfoType", + Handler: _DlpService_DeleteStoredInfoType_Handler, + }, + }, + Streams: []grpc.StreamDesc{}, + Metadata: "google/privacy/dlp/v2/dlp.proto", +} + +func init() { + proto.RegisterFile("google/privacy/dlp/v2/dlp.proto", fileDescriptor_dlp_1aebf9c18c267d70) +} + +var fileDescriptor_dlp_1aebf9c18c267d70 = []byte{ + // 9320 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xdc, 0x7d, 0x5b, 0x8c, 0x1b, 0x59, + 0x76, 0x58, 0x17, 0xd9, 0xec, 0x66, 0x1f, 0x36, 0xd9, 0xec, 0xdb, 0x4f, 0x51, 0xd2, 0x4a, 0x53, + 0x9a, 0x91, 0x34, 0x2d, 0xa9, 0x7b, 0xa6, 0xe7, 0xfd, 0xd8, 0xf1, 0xb2, 0x49, 0x4a, 0xa4, 0xa6, + 0x5f, 0x2a, 0xb2, 0xa5, 0x91, 0x3c, 0x98, 0x4a, 0x35, 0x79, 0x9b, 0x5d, 0x6a, 0x92, 0x45, 0x55, + 0x15, 0xa5, 0xee, 0xb1, 0x1d, 0xac, 0x83, 0x24, 0x86, 0x11, 0x0f, 0xbc, 0x81, 0xb3, 0xd9, 0xc4, + 0x76, 0x36, 0x4e, 0x1c, 0x20, 0x88, 0x01, 0x3b, 0x09, 0x1c, 0x3b, 0x48, 0x62, 0xe7, 0xc3, 0x0e, + 0x6c, 0xc0, 0x70, 0x82, 0xdd, 0x4c, 0x10, 0x24, 0x08, 0x16, 0x48, 0x36, 0x71, 0x6c, 0x60, 0xfc, + 0x13, 0x20, 0x3f, 0x4e, 0x7e, 0x82, 0xfb, 0xa8, 0x27, 0xab, 0xc8, 0x62, 0xb7, 0x26, 0x6b, 0xec, + 0x17, 0x79, 0xef, 0x3d, 0xe7, 0xdc, 0x73, 0xcf, 0x39, 0xf7, 0xdc, 0x73, 0x9f, 0x05, 0x97, 0x9a, + 0x9a, 0xd6, 0x6c, 0xe1, 0xb5, 0xae, 0xae, 0x3e, 0x55, 0xea, 0x27, 0x6b, 0x8d, 0x56, 0x77, 0xed, + 0xe9, 0x3a, 0xf9, 0x59, 0xed, 0xea, 0x9a, 0xa9, 0xa1, 0x05, 0x06, 0xb0, 0xca, 0x01, 0x56, 0x49, + 0xc9, 0xd3, 0xf5, 0xdc, 0x05, 0x8e, 0xa7, 0x74, 0xd5, 0x35, 0xa5, 0xd3, 0xd1, 0x4c, 0xc5, 0x54, + 0xb5, 0x8e, 0xc1, 0x90, 0x72, 0xe7, 0x5c, 0xa5, 0x3a, 0x36, 0xb4, 0x9e, 0x5e, 0xc7, 0xbc, 0xe8, + 0x4a, 0x70, 0x85, 0x86, 0xa9, 0xe9, 0x4a, 0xd3, 0x02, 0xfa, 0x8a, 0x0d, 0xa4, 0x99, 0xda, 0x7e, + 0xef, 0x60, 0xad, 0xd1, 0xd3, 0x69, 0x05, 0xbc, 0xfc, 0xbc, 0xbf, 0x1c, 0xb7, 0xbb, 0xe6, 0x09, + 0x2f, 0xbc, 0xec, 0x2f, 0x3c, 0x50, 0x71, 0xab, 0x21, 0xb7, 0x15, 0xe3, 0x88, 0x43, 0x5c, 0xf2, + 0x43, 0x98, 0x6a, 0x1b, 0x1b, 0xa6, 0xd2, 0xe6, 0x8d, 0xce, 0x2d, 0x71, 0x00, 0xbd, 0x5b, 0x5f, + 0x33, 0x4c, 0xc5, 0xec, 0x59, 0x0d, 0x5b, 0xe4, 0x05, 0xe6, 0x49, 0x17, 0xaf, 0x35, 0x14, 0x13, + 0xfb, 0x18, 0xe2, 0xf9, 0x27, 0xda, 0xc1, 0x33, 0x8c, 0x8f, 0x82, 0x0a, 0x49, 0x55, 0xda, 0x41, + 0x43, 0xe1, 0xdc, 0x8a, 0x12, 0x64, 0x4b, 0xc7, 0xf5, 0x56, 0xaf, 0x81, 0x2b, 0x9d, 0x03, 0xad, + 0x76, 0xd2, 0xc5, 0x06, 0xfa, 0x00, 0x40, 0xed, 0x1c, 0x68, 0x32, 0x41, 0x30, 0x96, 0x85, 0xcb, + 0xf1, 0xeb, 0xa9, 0xf5, 0x4b, 0xab, 0x81, 0x8a, 0x58, 0xb5, 0xb0, 0xa4, 0x29, 0xd5, 0xc2, 0x17, + 0x3f, 0x8f, 0x41, 0x9a, 0x12, 0x35, 0x54, 0xad, 0x23, 0xf5, 0x5a, 0x18, 0x49, 0x00, 0x0d, 0xb5, + 0x4e, 0x24, 0xa8, 0xe8, 0x27, 0xcb, 0xc2, 0x65, 0xe1, 0x7a, 0x6a, 0xfd, 0x95, 0x10, 0x8a, 0x85, + 0x9e, 0x61, 0x6a, 0x6d, 0x8b, 0xee, 0x6a, 0xd1, 0xc6, 0x2b, 0x8f, 0x49, 0x2e, 0x2a, 0xa8, 0x00, + 0x09, 0x1d, 0x37, 0xf1, 0xf1, 0x72, 0x8c, 0x92, 0xbb, 0x11, 0x8d, 0x9c, 0x44, 0x50, 0xca, 0x63, + 0x12, 0xc3, 0x45, 0x0f, 0x00, 0x61, 0xd6, 0x7c, 0xd9, 0xd5, 0xe4, 0x38, 0xa5, 0x78, 0x2d, 0x84, + 0xa2, 0x5f, 0x5e, 0xe5, 0x31, 0x29, 0x8b, 0xfd, 0x32, 0x2c, 0x43, 0xba, 0xad, 0x98, 0xf5, 0x43, + 0xb5, 0xd3, 0xa4, 0x44, 0x97, 0xc7, 0x2f, 0x0b, 0xd7, 0x33, 0xeb, 0x57, 0x42, 0x68, 0x6e, 0x71, + 0x58, 0x2a, 0xca, 0xe9, 0xb6, 0x2b, 0xb5, 0x31, 0x01, 0xe3, 0x84, 0x80, 0xf8, 0x6f, 0x05, 0xc8, + 0x54, 0x3a, 0x46, 0x17, 0x53, 0x09, 0x50, 0xb1, 0x36, 0x60, 0xfa, 0x50, 0x33, 0x9f, 0x69, 0x7a, + 0x43, 0xd6, 0x7b, 0x2d, 0xcc, 0x05, 0xfb, 0x23, 0x11, 0x05, 0x8b, 0x4d, 0x87, 0xd4, 0x6a, 0x99, + 0xd1, 0x21, 0xff, 0xcb, 0x63, 0x52, 0xea, 0xd0, 0x49, 0xa2, 0x2d, 0xc8, 0x60, 0x4b, 0x9b, 0xac, + 0x1e, 0x26, 0xf1, 0x17, 0x07, 0xc9, 0xc7, 0x52, 0x7d, 0x79, 0x4c, 0x4a, 0x63, 0x77, 0x86, 0xdd, + 0x9e, 0x6f, 0x08, 0x30, 0xeb, 0x6d, 0x4f, 0x15, 0x9b, 0x67, 0xb5, 0x3d, 0xf4, 0x1e, 0x24, 0x08, + 0x8b, 0xc6, 0x72, 0x8c, 0xa2, 0xbe, 0x14, 0x8a, 0xea, 0xae, 0x58, 0x62, 0x38, 0xe2, 0x67, 0x93, + 0x90, 0xe6, 0x25, 0x05, 0xad, 0x73, 0xa0, 0x36, 0xcf, 0xcc, 0x4e, 0x19, 0x32, 0x6d, 0xb5, 0x23, + 0xb7, 0xd4, 0x23, 0xdc, 0x52, 0x0f, 0x35, 0xad, 0x41, 0x65, 0x97, 0x59, 0x7f, 0x21, 0x84, 0xc6, + 0xa6, 0x0d, 0x28, 0xa5, 0xdb, 0x6a, 0xc7, 0x49, 0xa2, 0xbb, 0x30, 0xd1, 0x52, 0xdb, 0xaa, 0x69, + 0x59, 0xe7, 0xfa, 0xe0, 0x96, 0x31, 0xfe, 0x57, 0x6f, 0xab, 0x9d, 0x86, 0xda, 0x69, 0x6e, 0x52, + 0x4c, 0x89, 0x53, 0x40, 0x57, 0x20, 0xad, 0x76, 0x98, 0xd5, 0x3f, 0xe9, 0x69, 0x26, 0x33, 0xce, + 0xa4, 0x34, 0xcd, 0x33, 0xef, 0x91, 0x3c, 0x74, 0x33, 0xb0, 0x6b, 0x24, 0x28, 0x64, 0xbf, 0xbd, + 0xdf, 0x83, 0xd9, 0x3a, 0xb5, 0x2f, 0x37, 0xf0, 0xc4, 0x40, 0x1d, 0x78, 0xed, 0x51, 0x9a, 0xa9, + 0x7b, 0xd2, 0x06, 0xda, 0x82, 0x99, 0xba, 0xd6, 0x31, 0x71, 0xc7, 0x94, 0xb5, 0x2e, 0x75, 0xef, + 0xcb, 0xc9, 0xcb, 0xf1, 0xeb, 0x99, 0x50, 0xc3, 0x2b, 0x30, 0xe8, 0x1d, 0x0a, 0x2c, 0x65, 0xea, + 0xee, 0xa4, 0x81, 0x0a, 0x90, 0x24, 0x5a, 0x96, 0x0d, 0x6c, 0x2e, 0x03, 0x65, 0xec, 0x7a, 0x24, + 0xe3, 0xa8, 0x62, 0x53, 0x9a, 0xd4, 0xd9, 0x9f, 0xdc, 0x17, 0x31, 0x48, 0x7b, 0x64, 0x8a, 0x5e, + 0x85, 0x85, 0xb6, 0x72, 0x2c, 0x1f, 0xb0, 0x4c, 0x43, 0xee, 0x62, 0x5d, 0x56, 0x4d, 0xdc, 0xa6, + 0x9d, 0x31, 0x21, 0xa1, 0xb6, 0x72, 0xcc, 0x11, 0x8c, 0x5d, 0xac, 0x57, 0x4c, 0xdc, 0x46, 0x6f, + 0xc1, 0x72, 0x1f, 0x8a, 0x8e, 0x9f, 0xf4, 0xb0, 0x61, 0x52, 0xf3, 0x48, 0x48, 0x0b, 0x5e, 0x2c, + 0x89, 0x15, 0xa2, 0x1f, 0x87, 0x5c, 0x7f, 0x5d, 0x96, 0xb8, 0x97, 0xe3, 0xb4, 0x51, 0x5f, 0x1b, + 0xdd, 0x2e, 0x6c, 0xcb, 0xa5, 0x49, 0x69, 0xd1, 0xc7, 0x32, 0x2f, 0xcc, 0x75, 0x49, 0xe7, 0x70, + 0x01, 0xa2, 0xf7, 0x61, 0xca, 0xa9, 0x9d, 0xf9, 0x9e, 0xa1, 0x7d, 0x23, 0x69, 0xf5, 0x0d, 0xf4, + 0x02, 0x4c, 0xbb, 0x1b, 0xc3, 0x5b, 0x9e, 0x72, 0x55, 0x2e, 0x7e, 0x21, 0xc0, 0xcc, 0xc6, 0x89, + 0x89, 0xb9, 0x62, 0xa9, 0xf0, 0x8a, 0xcc, 0x7d, 0xd0, 0xfa, 0x32, 0xa1, 0x83, 0x88, 0x0f, 0x8b, + 0xa6, 0x0d, 0xca, 0x00, 0xc5, 0x46, 0x08, 0xc6, 0x1b, 0x8a, 0xa9, 0xd0, 0x4a, 0xa7, 0x25, 0xfa, + 0x5f, 0xfc, 0x8b, 0x30, 0x65, 0x83, 0xa1, 0x1c, 0x2c, 0x6e, 0x3c, 0xac, 0x95, 0xaa, 0x72, 0xed, + 0xe1, 0x6e, 0x49, 0xde, 0xdb, 0xae, 0xee, 0x96, 0x0a, 0x95, 0xdb, 0x95, 0x52, 0x31, 0x3b, 0x86, + 0xa6, 0x20, 0x51, 0xd9, 0xca, 0xdf, 0x29, 0x65, 0x27, 0x50, 0x06, 0x80, 0xfe, 0x95, 0xef, 0xee, + 0x96, 0xee, 0x64, 0x05, 0x94, 0x86, 0x29, 0x96, 0xde, 0xd8, 0xda, 0xcd, 0xc6, 0x9c, 0xe4, 0xee, + 0xf6, 0x9d, 0x6c, 0xdc, 0x49, 0x56, 0xef, 0xdf, 0xc9, 0x8e, 0x93, 0x64, 0xad, 0xf4, 0x51, 0x4d, + 0xde, 0xab, 0xdd, 0x7e, 0x3b, 0x9b, 0x10, 0xff, 0xb1, 0x00, 0x29, 0x77, 0x4b, 0x17, 0x21, 0xf1, + 0x54, 0x69, 0xf5, 0x30, 0xed, 0xf0, 0x53, 0x64, 0xcc, 0xa2, 0x49, 0xf4, 0x3a, 0x24, 0x4c, 0x65, + 0xbf, 0xc5, 0x7a, 0x6d, 0x6a, 0xfd, 0x42, 0x88, 0x08, 0x6a, 0x04, 0x86, 0x60, 0x51, 0x60, 0x54, + 0x82, 0xa9, 0xfd, 0x13, 0x13, 0x33, 0xdb, 0x4c, 0x50, 0xcc, 0xab, 0xd1, 0x84, 0x57, 0x1e, 0x93, + 0x92, 0x04, 0x95, 0xfc, 0xdf, 0x48, 0xc1, 0x14, 0x11, 0x16, 0x25, 0x23, 0xfe, 0x33, 0x01, 0x12, + 0xb4, 0x1a, 0xf4, 0x36, 0x4c, 0x1e, 0x62, 0xa5, 0x81, 0x75, 0xcb, 0x49, 0x7e, 0x25, 0x84, 0xf6, + 0x6d, 0x12, 0x0c, 0x55, 0x1a, 0x92, 0x05, 0x8e, 0x5e, 0x87, 0x71, 0x5d, 0x7b, 0x66, 0xf9, 0xeb, + 0xcb, 0x83, 0x1a, 0xb3, 0x2a, 0x69, 0xcf, 0x24, 0x0a, 0x9d, 0x7b, 0x0f, 0xe2, 0x92, 0xf6, 0x0c, + 0xbd, 0x0e, 0x13, 0x54, 0x26, 0x56, 0xad, 0x61, 0xb2, 0xb8, 0x4f, 0x80, 0x24, 0x0e, 0x2b, 0x7e, + 0x6a, 0x7b, 0x79, 0x09, 0x1b, 0xbd, 0x96, 0x89, 0xde, 0x85, 0xa4, 0x6d, 0x86, 0xc3, 0xd8, 0xa7, + 0x60, 0x92, 0x0d, 0x8f, 0x6e, 0x01, 0xb2, 0xfb, 0xa3, 0xa9, 0xf7, 0x3a, 0x75, 0xc5, 0xc4, 0xcc, + 0xcb, 0x27, 0xa5, 0x59, 0xab, 0xa4, 0x66, 0x15, 0x88, 0xff, 0x25, 0x06, 0x93, 0x9c, 0x08, 0x9a, + 0x87, 0x04, 0x73, 0xbf, 0xc4, 0x96, 0xa7, 0x24, 0x96, 0xf0, 0xf6, 0xaa, 0xd8, 0xa8, 0xbd, 0x2a, + 0x0f, 0xe0, 0x1a, 0x6c, 0xe2, 0x51, 0x07, 0x1b, 0x17, 0x12, 0x7a, 0x0f, 0x92, 0x2d, 0xad, 0x4e, + 0xe3, 0x5d, 0x6e, 0x62, 0x61, 0xf5, 0x6f, 0x72, 0x30, 0xc9, 0x46, 0x40, 0xef, 0x41, 0xaa, 0xae, + 0x63, 0xc5, 0xc4, 0x32, 0x89, 0x34, 0x97, 0x27, 0x28, 0x7e, 0xce, 0xc1, 0x67, 0x11, 0xef, 0x6a, + 0xcd, 0x8a, 0x78, 0x25, 0x60, 0xe0, 0x24, 0x03, 0xfd, 0x08, 0x00, 0x95, 0x01, 0x75, 0x6a, 0xcb, + 0x93, 0x14, 0x37, 0xcc, 0x22, 0xe8, 0x20, 0x45, 0x04, 0x20, 0x4d, 0x3d, 0xb1, 0xfe, 0x8a, 0xff, + 0x5d, 0x80, 0xe4, 0xa6, 0xc3, 0x0a, 0x50, 0x8b, 0xd7, 0x95, 0x4e, 0xd3, 0xf2, 0x4f, 0x61, 0x06, + 0x22, 0x11, 0x18, 0x89, 0xf6, 0x10, 0xfa, 0x17, 0x95, 0xc8, 0xe0, 0xd3, 0xc0, 0x5d, 0x4d, 0xed, + 0x98, 0x9c, 0x42, 0x2c, 0x02, 0x85, 0x8c, 0x8d, 0xc4, 0xc8, 0x54, 0x61, 0xd6, 0x1a, 0xc3, 0x2c, + 0x11, 0x19, 0xcb, 0x93, 0xd4, 0xc4, 0xae, 0x0e, 0x1e, 0xc5, 0x6c, 0xd9, 0x66, 0xeb, 0xde, 0x0c, + 0x43, 0xfc, 0xe7, 0x71, 0x98, 0xf1, 0x41, 0xa1, 0x97, 0x80, 0x8e, 0x77, 0x8a, 0xda, 0xc1, 0xba, + 0xdc, 0x51, 0xda, 0x96, 0x51, 0xa5, 0xed, 0xdc, 0x6d, 0xa5, 0x8d, 0xd1, 0x2e, 0xcc, 0xe8, 0xb8, + 0x4e, 0x02, 0x46, 0x5b, 0xc5, 0xac, 0x59, 0x61, 0x83, 0xb4, 0x44, 0xa1, 0xad, 0x6a, 0xca, 0x63, + 0x52, 0x46, 0xf7, 0xe4, 0x90, 0xe8, 0x50, 0x6d, 0x2b, 0x4d, 0xec, 0x10, 0x8c, 0x0f, 0x8c, 0x0e, + 0x2b, 0x04, 0xd8, 0x45, 0x2f, 0xad, 0xba, 0x33, 0xd0, 0x7d, 0x98, 0x6d, 0x68, 0xf5, 0x5e, 0xdb, + 0x2d, 0x31, 0xee, 0xae, 0xc2, 0xe2, 0xf1, 0x22, 0x87, 0x77, 0x11, 0xcd, 0x36, 0x7c, 0x79, 0xe8, + 0x43, 0x98, 0x73, 0xe4, 0x63, 0xcf, 0xb7, 0x22, 0xd8, 0x27, 0xb2, 0xd1, 0xec, 0x3c, 0x74, 0x83, + 0x69, 0x95, 0x11, 0x7b, 0x8a, 0x75, 0x12, 0xdb, 0x52, 0x73, 0x9d, 0x62, 0xda, 0xa2, 0x05, 0xf7, + 0x59, 0xfe, 0x06, 0x38, 0xdd, 0x49, 0x7c, 0x0d, 0xb2, 0x7e, 0x6e, 0xd1, 0x25, 0x48, 0x1d, 0xa8, + 0x2d, 0x2c, 0x6b, 0x07, 0x07, 0x24, 0x34, 0x21, 0x6a, 0x8b, 0x4b, 0x40, 0xb2, 0x76, 0x68, 0x8e, + 0xf8, 0x9f, 0x05, 0xc8, 0x78, 0xd5, 0x40, 0x3a, 0x0a, 0x57, 0xe3, 0x11, 0xb6, 0xe6, 0x53, 0x97, + 0x07, 0x6a, 0xf0, 0x43, 0x7c, 0x22, 0x4d, 0xe9, 0xd6, 0x5f, 0xf4, 0x0e, 0xf1, 0x78, 0x64, 0x5a, + 0xaa, 0x36, 0xb8, 0x01, 0x0c, 0x75, 0xd8, 0x07, 0xec, 0x0f, 0xfa, 0x10, 0x32, 0x74, 0x44, 0x89, + 0xaa, 0x70, 0xea, 0xba, 0x6d, 0x6b, 0x4e, 0x9b, 0xee, 0xa4, 0x78, 0x13, 0xd2, 0x9e, 0x72, 0x74, + 0x1e, 0xa6, 0x74, 0xed, 0x99, 0xac, 0x76, 0x1a, 0xf8, 0x98, 0xcb, 0x22, 0xa9, 0x6b, 0xcf, 0x2a, + 0x24, 0x2d, 0xae, 0x41, 0x82, 0x75, 0xab, 0x79, 0x48, 0x18, 0xa6, 0xa2, 0x5b, 0xd2, 0x62, 0x09, + 0x94, 0x85, 0x38, 0xee, 0xb0, 0xf6, 0xc4, 0x25, 0xf2, 0x57, 0x7c, 0x04, 0x69, 0x8f, 0xbd, 0xa1, + 0x0a, 0x64, 0xf6, 0xb5, 0x1e, 0x75, 0xbf, 0xf2, 0xbe, 0x76, 0x6c, 0x0f, 0x1c, 0x62, 0xd8, 0x50, + 0xc8, 0x81, 0x37, 0xb4, 0x63, 0x29, 0xbd, 0xef, 0x24, 0xb0, 0x21, 0x2a, 0x90, 0x72, 0x95, 0x92, + 0xca, 0x4d, 0xad, 0xcb, 0xa3, 0x3e, 0xf2, 0x97, 0xc4, 0x18, 0x2d, 0x7c, 0x60, 0x85, 0x74, 0xf4, + 0x3f, 0x61, 0xfc, 0x99, 0xda, 0x30, 0x0f, 0xa9, 0xcc, 0x12, 0x12, 0x4b, 0xa0, 0x45, 0x98, 0x38, + 0xc4, 0x6a, 0xf3, 0xd0, 0xa4, 0xfe, 0x36, 0x21, 0xf1, 0x94, 0xf8, 0x1b, 0xe3, 0x80, 0x24, 0xdc, + 0x50, 0xea, 0x26, 0x6d, 0x85, 0x15, 0x06, 0x2e, 0xc2, 0x44, 0x57, 0xd1, 0x71, 0xc7, 0xe4, 0x7d, + 0x9c, 0xa7, 0x88, 0x66, 0x54, 0x36, 0xae, 0xc9, 0x75, 0x1a, 0xe6, 0x0d, 0x99, 0xa8, 0x79, 0x42, + 0x42, 0x29, 0xad, 0x7a, 0x66, 0x3e, 0x4f, 0x61, 0x89, 0xf5, 0x6b, 0x9d, 0x32, 0x40, 0xe6, 0x7e, + 0x8c, 0x28, 0x99, 0x03, 0x10, 0x91, 0x7d, 0x10, 0x6a, 0x6f, 0x7e, 0x86, 0x57, 0x79, 0x82, 0xd3, + 0xe1, 0xf5, 0x2d, 0xa8, 0x01, 0xb9, 0x06, 0x7a, 0x19, 0xb2, 0xd6, 0xdc, 0xc4, 0x1e, 0x93, 0x27, + 0xe8, 0x68, 0x3a, 0xc3, 0xf3, 0xad, 0xf0, 0x10, 0x15, 0xdc, 0x21, 0xcd, 0xe4, 0x28, 0x21, 0x8d, + 0x13, 0xd0, 0xe4, 0xbe, 0x2b, 0xc0, 0x7c, 0x10, 0x7f, 0xe8, 0x83, 0xd1, 0xa3, 0x5b, 0x12, 0x29, + 0xd9, 0x23, 0xf1, 0x75, 0xe2, 0x6a, 0x09, 0x49, 0x59, 0x69, 0xb5, 0x64, 0x13, 0x1f, 0x33, 0x4b, + 0x48, 0x12, 0x9f, 0xc7, 0x0a, 0xf2, 0xad, 0x56, 0x0d, 0x1f, 0x9b, 0x64, 0xac, 0x71, 0x0b, 0xb9, + 0xa5, 0xe9, 0xbc, 0x4b, 0x5d, 0x08, 0x1d, 0x22, 0x5a, 0x9a, 0x4e, 0x3c, 0xb1, 0xcd, 0x71, 0x4b, + 0xd3, 0x37, 0x92, 0x30, 0x61, 0x2a, 0x7a, 0x13, 0x9b, 0x62, 0x01, 0x12, 0x34, 0x8b, 0x18, 0xa5, + 0x8e, 0x1b, 0x94, 0xfb, 0x98, 0x44, 0xfe, 0x12, 0x03, 0x6c, 0xea, 0x18, 0x33, 0xb7, 0x1f, 0x93, + 0x58, 0x82, 0x98, 0xea, 0xbe, 0x15, 0x69, 0xc6, 0x24, 0xfa, 0x5f, 0xfc, 0x27, 0x02, 0xcc, 0x79, + 0x74, 0x69, 0x74, 0xb5, 0x8e, 0x81, 0xc9, 0x48, 0xc3, 0x2a, 0xc6, 0x0d, 0x99, 0xaa, 0x90, 0x92, + 0x9f, 0xb6, 0x1a, 0x85, 0x1b, 0x14, 0x9c, 0x80, 0xe1, 0x63, 0x53, 0x67, 0x70, 0x76, 0xeb, 0xa7, + 0xa4, 0xb4, 0x9d, 0x4b, 0xdb, 0xee, 0xb2, 0x59, 0x9d, 0x06, 0x63, 0xc3, 0x86, 0x0f, 0x77, 0xe0, + 0x66, 0xdb, 0x2c, 0x4b, 0x8a, 0xff, 0x37, 0x06, 0xcb, 0x45, 0xac, 0x36, 0x70, 0xc7, 0x54, 0x0f, + 0x4e, 0xb8, 0xbe, 0x87, 0xf5, 0x9a, 0x1a, 0xcc, 0x36, 0x6c, 0x1c, 0x6f, 0xc7, 0x09, 0x1d, 0x71, + 0xdc, 0x75, 0x10, 0x5b, 0xce, 0x36, 0x7c, 0x39, 0x01, 0x7d, 0x31, 0x7e, 0xfa, 0xbe, 0xf8, 0x26, + 0x8c, 0x53, 0x1b, 0x67, 0xd1, 0x98, 0x38, 0x38, 0x70, 0xa0, 0xf6, 0x4d, 0xe1, 0xd1, 0x3a, 0x2c, + 0x58, 0x4c, 0x98, 0xb8, 0xdd, 0x6d, 0x91, 0xb0, 0x8c, 0xc6, 0x06, 0x09, 0x2a, 0x81, 0x39, 0x5e, + 0x58, 0xe3, 0x65, 0x34, 0x42, 0x78, 0x1b, 0x96, 0x5d, 0xe2, 0xf0, 0xa2, 0x4d, 0x50, 0xb4, 0x45, + 0xa7, 0xdc, 0x8d, 0x29, 0x7e, 0x5b, 0x80, 0x73, 0x01, 0xd2, 0xe7, 0x66, 0x63, 0xb5, 0x41, 0x18, + 0xb1, 0x0d, 0x15, 0x48, 0x6a, 0x4f, 0xb1, 0xfe, 0x54, 0xc5, 0xcf, 0xb8, 0x56, 0x6e, 0x85, 0x0d, + 0x34, 0xba, 0xd2, 0x31, 0x0e, 0x34, 0xbd, 0x4d, 0x5d, 0xfd, 0x0e, 0x47, 0x92, 0x6c, 0x74, 0x6a, + 0x1e, 0xd2, 0x29, 0xcc, 0x43, 0x3f, 0xb3, 0x79, 0xe8, 0x3f, 0x4c, 0xe6, 0xa1, 0x0f, 0x31, 0x0f, + 0x3d, 0xdc, 0x3c, 0xa4, 0x3f, 0xcf, 0xe6, 0xf1, 0x27, 0x02, 0x2c, 0x38, 0x72, 0x8e, 0x62, 0x1b, + 0xcf, 0x75, 0xc0, 0xb5, 0x24, 0x10, 0x7f, 0x5e, 0x5a, 0x1c, 0x0f, 0xd5, 0xa2, 0x78, 0x1f, 0x16, + 0xfd, 0x2d, 0xe5, 0x7a, 0x78, 0x1f, 0x26, 0xb8, 0x1f, 0x16, 0x46, 0xf0, 0xc3, 0x1c, 0x47, 0xfc, + 0xc3, 0x18, 0xcc, 0xed, 0xf4, 0xcc, 0x6e, 0xcf, 0xac, 0xb2, 0x0d, 0x15, 0xde, 0xb6, 0xf7, 0xad, + 0x25, 0x8b, 0xc1, 0x44, 0x37, 0xd4, 0xe6, 0xbd, 0x1e, 0xd6, 0x4f, 0x7c, 0x4b, 0x17, 0x1f, 0x43, + 0x5a, 0xa3, 0x44, 0x65, 0xa3, 0x7e, 0x88, 0xdb, 0x0a, 0x9f, 0xd6, 0xbe, 0x15, 0x42, 0x25, 0x80, + 0x01, 0x2b, 0x8f, 0xa2, 0x4b, 0xd3, 0x9a, 0x2b, 0x25, 0x7e, 0x43, 0x80, 0x69, 0x77, 0x31, 0xba, + 0x08, 0xe7, 0x76, 0xf6, 0x6a, 0xbb, 0x7b, 0x35, 0xb9, 0x5a, 0x28, 0x97, 0xb6, 0xf2, 0xbe, 0xd5, + 0x9f, 0x59, 0x48, 0x6f, 0xe4, 0xab, 0x95, 0x82, 0x5c, 0xd8, 0xd9, 0xdc, 0xdb, 0xda, 0xae, 0x66, + 0x05, 0x34, 0x03, 0xa9, 0x3b, 0x85, 0xaa, 0x9d, 0x11, 0x43, 0x0b, 0x30, 0x5b, 0xcc, 0xd7, 0xf2, + 0xd5, 0xda, 0x8e, 0x54, 0xb2, 0xb3, 0xe3, 0x24, 0x7b, 0xa3, 0x72, 0x47, 0xbe, 0xb7, 0x57, 0x92, + 0x1e, 0xda, 0xd9, 0xe3, 0x04, 0x3d, 0xbf, 0xb9, 0x69, 0x67, 0x24, 0xec, 0x25, 0xf2, 0xba, 0xb3, + 0xe2, 0x56, 0x35, 0x15, 0xd3, 0x38, 0xe3, 0x8a, 0xdb, 0x3c, 0x24, 0xea, 0x5a, 0xaf, 0x63, 0xf2, + 0x08, 0x99, 0x25, 0xc4, 0xef, 0x8c, 0xc3, 0x32, 0xd7, 0x66, 0x51, 0x31, 0x95, 0x2a, 0xdd, 0x2d, + 0x2b, 0x62, 0x53, 0x51, 0x5b, 0x06, 0x6a, 0x13, 0xef, 0x47, 0x3b, 0x01, 0x6e, 0xd8, 0xab, 0xb0, + 0xcc, 0xc8, 0x87, 0x2c, 0x34, 0xf6, 0xd1, 0x5a, 0x95, 0x2c, 0x42, 0x7c, 0x45, 0x96, 0xb8, 0x45, + 0x6f, 0x0e, 0xda, 0xb6, 0xad, 0x8f, 0xf5, 0x82, 0x37, 0x47, 0xaf, 0xc3, 0x6d, 0x8f, 0xb9, 0x7f, + 0x2d, 0x40, 0xd6, 0x5f, 0x2d, 0xda, 0x87, 0x73, 0x46, 0x47, 0xe9, 0x1a, 0x87, 0x9a, 0x29, 0xfb, + 0x7b, 0x0e, 0x17, 0xea, 0xd5, 0xc1, 0xf5, 0x5a, 0x7d, 0x49, 0x5a, 0xb2, 0x08, 0xf9, 0x0a, 0xd0, + 0x6d, 0x80, 0xc7, 0xda, 0xbe, 0xd7, 0xb7, 0x5f, 0x1b, 0x4c, 0xf4, 0xae, 0xb6, 0xcf, 0x1d, 0xc3, + 0xd4, 0x63, 0xeb, 0x6f, 0xee, 0xd7, 0x05, 0x98, 0xe0, 0x8b, 0x54, 0xd7, 0x60, 0xa6, 0xab, 0x6b, + 0x75, 0x6c, 0x18, 0xb8, 0x21, 0x93, 0xf0, 0xd5, 0xe0, 0xb3, 0x9f, 0x8c, 0x9d, 0x4d, 0x97, 0x2f, + 0x89, 0x43, 0x30, 0x35, 0x53, 0x69, 0xc9, 0xd8, 0x30, 0xd5, 0xb6, 0x62, 0xda, 0xe0, 0x4c, 0xed, + 0x73, 0xb4, 0xb0, 0x64, 0x95, 0x31, 0x9c, 0x4d, 0x98, 0xb1, 0x0d, 0x4b, 0x36, 0x88, 0xad, 0xf1, + 0xe5, 0xe4, 0x17, 0x87, 0x98, 0x17, 0xb5, 0x4b, 0xe2, 0xca, 0x5c, 0x49, 0xf1, 0xb7, 0x05, 0x98, + 0xb3, 0x00, 0x8a, 0xd8, 0xa8, 0xeb, 0x2a, 0x15, 0x3d, 0x09, 0x33, 0x5d, 0x4b, 0x13, 0xf4, 0x3f, + 0x7a, 0x01, 0xa6, 0x1b, 0xaa, 0xd1, 0x6d, 0x29, 0x27, 0xcc, 0x6b, 0xb1, 0x28, 0x31, 0xc5, 0xf3, + 0xe8, 0x98, 0xb3, 0x05, 0xd3, 0x46, 0xaf, 0xdb, 0xd5, 0x74, 0xd6, 0x14, 0xca, 0x59, 0x66, 0x7d, + 0x65, 0x18, 0x67, 0x16, 0xca, 0xc6, 0x89, 0x94, 0x32, 0x9c, 0x04, 0xba, 0x0c, 0xa9, 0x86, 0xc3, + 0x14, 0x77, 0x93, 0xee, 0x2c, 0xb1, 0x0a, 0xf3, 0x9b, 0xaa, 0x61, 0xda, 0x5b, 0x11, 0xd6, 0x38, + 0x70, 0x05, 0xd2, 0x2d, 0xa5, 0xd3, 0xec, 0x91, 0x69, 0x51, 0x5d, 0x6b, 0x58, 0x0d, 0x99, 0xb6, + 0x32, 0x0b, 0x5a, 0x03, 0x93, 0xc1, 0xe2, 0x40, 0x6d, 0x99, 0x58, 0xe7, 0x4d, 0xe1, 0x29, 0x71, + 0x1f, 0x16, 0x7c, 0x44, 0xb9, 0xcb, 0xad, 0x04, 0xec, 0x31, 0x0d, 0x6b, 0x9c, 0x4b, 0xaa, 0xee, + 0x9d, 0xd7, 0xff, 0x29, 0xc0, 0x82, 0xa4, 0x1a, 0x47, 0xf9, 0x8e, 0xd2, 0x3a, 0x31, 0x54, 0xc3, + 0xb6, 0x29, 0x32, 0x54, 0x71, 0x52, 0x72, 0x1b, 0x9b, 0xba, 0x5a, 0x1f, 0xe2, 0x8a, 0x77, 0x59, + 0x72, 0x8b, 0xc2, 0x4a, 0xe9, 0xae, 0x3b, 0x89, 0xee, 0xc0, 0x34, 0xdb, 0x54, 0x97, 0x99, 0x57, + 0x8f, 0x45, 0xf7, 0xea, 0x52, 0x8a, 0x61, 0xb2, 0x65, 0xe3, 0xb7, 0x60, 0x92, 0xcd, 0x60, 0x2c, + 0x73, 0xbb, 0x18, 0x42, 0x23, 0xcf, 0xb6, 0x63, 0x2c, 0x68, 0xf1, 0x7b, 0x02, 0x4c, 0xde, 0xeb, + 0x29, 0x86, 0x5a, 0x69, 0xa0, 0xd7, 0x21, 0x41, 0xd7, 0x26, 0x78, 0x8b, 0x86, 0x2d, 0x64, 0x30, + 0x60, 0xef, 0xf4, 0x2e, 0x36, 0xfa, 0xf4, 0xee, 0x12, 0x00, 0xdf, 0xf0, 0x32, 0x95, 0xa6, 0xbd, + 0x44, 0x3f, 0xc5, 0xf2, 0x6a, 0x4a, 0x13, 0xbd, 0x0e, 0x04, 0x18, 0xeb, 0x64, 0x02, 0xc6, 0x22, + 0xb3, 0xc5, 0xbe, 0x65, 0xa6, 0x52, 0xbb, 0x6b, 0x9e, 0x70, 0xb2, 0x14, 0x72, 0x23, 0x01, 0x71, + 0x53, 0x69, 0x8a, 0x7f, 0x14, 0x83, 0x2c, 0xe9, 0x4b, 0xaa, 0x61, 0xaa, 0x75, 0xa5, 0xc5, 0xa4, + 0xf5, 0xae, 0x35, 0x8a, 0xc6, 0x47, 0x90, 0x37, 0x1f, 0x43, 0x1f, 0xc2, 0xd4, 0x13, 0x22, 0x2f, + 0x59, 0x6d, 0x58, 0x36, 0xf6, 0x7e, 0x08, 0xbe, 0xbf, 0xde, 0x55, 0x2e, 0x68, 0x1a, 0xb8, 0xa9, + 0x58, 0xa7, 0x82, 0x94, 0x92, 0x4f, 0x58, 0xae, 0x81, 0xb6, 0x00, 0xe9, 0xb8, 0xa5, 0x98, 0xea, + 0x53, 0x2c, 0x1f, 0x50, 0x97, 0xde, 0xa9, 0x9f, 0x44, 0x5c, 0x55, 0x9a, 0xb5, 0x30, 0x6f, 0x5b, + 0x88, 0xb9, 0x23, 0x98, 0x0f, 0xaa, 0xf0, 0x94, 0x6a, 0xbe, 0xe8, 0x51, 0x13, 0xeb, 0x91, 0x8e, + 0x92, 0xc4, 0x3f, 0x99, 0x85, 0xb4, 0xc7, 0xd4, 0xd1, 0x13, 0x58, 0xec, 0xf4, 0xda, 0x58, 0x27, + 0xcd, 0x67, 0x9e, 0xd0, 0xf2, 0xe2, 0xac, 0xde, 0x77, 0xa2, 0x74, 0x98, 0xd5, 0x6d, 0x8b, 0x04, + 0x75, 0x88, 0xac, 0x0f, 0x96, 0xc7, 0xa4, 0xf9, 0x4e, 0x40, 0x3e, 0x7a, 0x06, 0xcb, 0x75, 0xc5, + 0xc4, 0x4d, 0x2d, 0xa0, 0x52, 0x26, 0xc6, 0xf7, 0x22, 0x55, 0x5a, 0x70, 0x88, 0x78, 0xab, 0x5d, + 0xac, 0x07, 0x96, 0x20, 0x0c, 0xe8, 0x48, 0x56, 0x3a, 0x5a, 0xe7, 0xa4, 0xad, 0x9a, 0x27, 0xde, + 0xd1, 0xea, 0x8d, 0x48, 0x55, 0x7e, 0x98, 0xb7, 0xb0, 0xed, 0xca, 0xb2, 0x47, 0xbe, 0x3c, 0x52, + 0x4d, 0x4b, 0x6e, 0xa8, 0x74, 0xa1, 0xd4, 0xa9, 0x66, 0x7c, 0x84, 0x6a, 0x36, 0x8b, 0x16, 0xb6, + 0x53, 0x4d, 0xcb, 0x97, 0x87, 0x74, 0x58, 0x3a, 0x92, 0xdb, 0x4a, 0xd7, 0x1a, 0xf7, 0x9c, 0x25, + 0x2b, 0xbe, 0x80, 0x1c, 0x4d, 0x75, 0x1f, 0x6e, 0x29, 0xdd, 0x92, 0x4d, 0xc1, 0x51, 0xdd, 0x51, + 0x40, 0x3e, 0xfa, 0x4c, 0x80, 0xcb, 0x0d, 0xdc, 0x32, 0x15, 0xb9, 0xab, 0x63, 0x03, 0x77, 0xea, + 0x38, 0xa0, 0x76, 0xb6, 0xc8, 0xbc, 0x11, 0xa9, 0xf6, 0x22, 0x21, 0xb6, 0xcb, 0x69, 0x05, 0xb0, + 0x71, 0xb1, 0x31, 0x08, 0x20, 0xb7, 0x09, 0xf3, 0x41, 0xa6, 0x77, 0xba, 0xce, 0x93, 0xdb, 0x86, + 0xc5, 0x60, 0x9b, 0x3a, 0x25, 0xbd, 0xcf, 0x04, 0xc8, 0xfa, 0x2d, 0x06, 0xbd, 0xd7, 0xef, 0x99, + 0x86, 0x91, 0x73, 0x7c, 0xcf, 0xfb, 0x30, 0x45, 0xbc, 0x84, 0x79, 0xe2, 0x2c, 0x64, 0x87, 0x79, + 0xf1, 0x12, 0x85, 0x23, 0xd8, 0x98, 0xff, 0xcb, 0xfd, 0x92, 0x00, 0x59, 0xbf, 0x69, 0x9d, 0x8d, + 0x9f, 0x1d, 0x98, 0x33, 0x70, 0xc7, 0x50, 0xa9, 0x33, 0x54, 0x4c, 0x53, 0x57, 0xf7, 0x7b, 0x26, + 0x8e, 0xe8, 0x0c, 0x91, 0x8d, 0x9a, 0xb7, 0x30, 0x73, 0x5f, 0x4c, 0xc0, 0x7c, 0x90, 0x45, 0xa2, + 0xfd, 0x7e, 0x36, 0x4b, 0xa7, 0xb6, 0xef, 0xd5, 0x9a, 0xd2, 0x6c, 0xe2, 0x86, 0xdf, 0xb3, 0x5f, + 0x82, 0x94, 0x8e, 0x9b, 0xcc, 0x92, 0x1b, 0x56, 0x68, 0x06, 0x2c, 0x8b, 0xc6, 0x3a, 0x06, 0x64, + 0x95, 0xde, 0xb1, 0xda, 0x52, 0x15, 0xfd, 0x84, 0xc5, 0x02, 0xd6, 0x40, 0x5e, 0x3e, 0x3d, 0x2f, + 0x79, 0x8b, 0x22, 0x1b, 0xc0, 0x66, 0x14, 0x4f, 0xda, 0xc8, 0xfd, 0x37, 0x01, 0x52, 0x2e, 0x7e, + 0x7f, 0x18, 0xc7, 0xff, 0xdc, 0xff, 0x8e, 0x41, 0xc6, 0x2b, 0x87, 0x33, 0x8d, 0xfe, 0x7a, 0xbf, + 0xb1, 0xec, 0x3d, 0x2f, 0x05, 0x59, 0xe1, 0xc1, 0x97, 0x1c, 0x16, 0xd4, 0x61, 0xda, 0x5d, 0xd1, + 0x97, 0x12, 0x0e, 0xe4, 0xbe, 0x2b, 0xc0, 0xc5, 0x81, 0x1e, 0x78, 0x14, 0xef, 0xc0, 0xd9, 0x1d, + 0xa5, 0x3f, 0x49, 0xa1, 0xfd, 0xe9, 0x5a, 0xc4, 0x60, 0xad, 0xaf, 0xbb, 0xd8, 0x8b, 0x09, 0x3f, + 0x77, 0x15, 0x2e, 0xd0, 0x79, 0xc1, 0xa7, 0xd8, 0x99, 0x37, 0x93, 0xc9, 0x82, 0x35, 0xd7, 0xff, + 0x04, 0x96, 0x9d, 0xb9, 0xfe, 0x19, 0x26, 0x0b, 0x8b, 0x36, 0x15, 0x6f, 0x64, 0xf5, 0x08, 0x9c, + 0x12, 0xf9, 0xd4, 0xf3, 0x87, 0x79, 0x9b, 0x46, 0xd5, 0x35, 0x91, 0xf8, 0xba, 0xd0, 0x1f, 0xb6, + 0x79, 0x56, 0x12, 0xc2, 0xfc, 0xd1, 0x20, 0x89, 0xf8, 0xa2, 0x38, 0x36, 0x0f, 0xef, 0x8f, 0xe2, + 0xf8, 0xfc, 0xfc, 0xaf, 0x0a, 0x41, 0x61, 0x1c, 0x67, 0x82, 0x39, 0x80, 0xbb, 0xa7, 0x61, 0xc2, + 0x3f, 0x02, 0xdb, 0x6c, 0xf4, 0x45, 0x75, 0x9c, 0x11, 0xd3, 0x1b, 0xd5, 0x71, 0x0e, 0x58, 0x08, + 0x54, 0x3c, 0x0d, 0x07, 0xce, 0x90, 0x6d, 0xd7, 0xed, 0x0a, 0xf2, 0x9c, 0x5a, 0xdd, 0x41, 0x1e, + 0xaf, 0x75, 0xe2, 0xf4, 0xb5, 0x3a, 0x03, 0xb3, 0x53, 0x6b, 0xcb, 0x97, 0x87, 0x7e, 0x52, 0x08, + 0x08, 0xfa, 0x78, 0xdd, 0x93, 0xa7, 0x57, 0xbc, 0xd7, 0xed, 0x39, 0x8a, 0x3f, 0x0a, 0xc8, 0x47, + 0xbf, 0x38, 0x30, 0x06, 0xe4, 0xcc, 0x4c, 0x51, 0x66, 0xee, 0x9d, 0x86, 0x99, 0x10, 0x87, 0x64, + 0x73, 0x15, 0x16, 0x12, 0x32, 0x80, 0xdc, 0xe7, 0x82, 0x3f, 0x26, 0xe4, 0x7c, 0xbf, 0x03, 0x53, + 0x6d, 0xb5, 0x23, 0xb3, 0x33, 0x66, 0x83, 0x8f, 0xc7, 0xb0, 0xf3, 0x53, 0xc9, 0xb6, 0xda, 0xa1, + 0xff, 0x28, 0xaa, 0x72, 0xcc, 0x51, 0x63, 0x91, 0x50, 0x95, 0x63, 0x86, 0x5a, 0x82, 0x99, 0x27, + 0x3d, 0xa5, 0x63, 0xaa, 0x2d, 0x2c, 0xf3, 0xb3, 0x5b, 0xe3, 0x11, 0xce, 0x6e, 0x65, 0x2c, 0x24, + 0x9a, 0x34, 0x72, 0x9f, 0x8d, 0xf7, 0xc7, 0xa6, 0xbc, 0x5d, 0xbf, 0x21, 0xc0, 0x0b, 0x94, 0xb2, + 0x33, 0xec, 0xc8, 0x87, 0xaa, 0x61, 0x6a, 0x4d, 0x5d, 0x69, 0xcb, 0xfb, 0xbd, 0xfa, 0x11, 0x36, + 0xad, 0x4d, 0xec, 0xc7, 0xcf, 0xaf, 0x47, 0xf6, 0x65, 0x97, 0xad, 0x3a, 0x37, 0x68, 0x95, 0xd2, + 0x57, 0x28, 0x53, 0xf6, 0x88, 0xe6, 0x2b, 0x36, 0x72, 0xff, 0x32, 0x06, 0x97, 0x86, 0xd0, 0x40, + 0x5f, 0x85, 0xf3, 0xfe, 0xa6, 0xb5, 0xb4, 0x67, 0x58, 0x97, 0xe9, 0xd9, 0x04, 0xbe, 0x20, 0xb8, + 0xec, 0xad, 0x68, 0x93, 0x00, 0xd0, 0xa3, 0x0a, 0x41, 0xe8, 0xbd, 0x6e, 0xd7, 0x46, 0x8f, 0x05, + 0xa1, 0xef, 0x11, 0x00, 0x86, 0x7e, 0x09, 0x52, 0x4c, 0x7c, 0xb2, 0xa1, 0x7e, 0xca, 0x02, 0x91, + 0xb8, 0x04, 0x2c, 0xab, 0xaa, 0x7e, 0x8a, 0xd1, 0x5d, 0x48, 0x73, 0x00, 0x8f, 0x6a, 0x5f, 0x1a, + 0xa4, 0x5a, 0xbb, 0x22, 0x69, 0x9a, 0xe1, 0x32, 0x0d, 0xa3, 0x9b, 0x80, 0xdc, 0xb4, 0x64, 0xb6, + 0x74, 0x9d, 0xa0, 0x75, 0x66, 0x5d, 0x90, 0x05, 0x92, 0x9f, 0xfb, 0x22, 0xe1, 0x9e, 0x5a, 0x70, + 0x4b, 0xf8, 0x55, 0x01, 0xae, 0xe0, 0x27, 0x3d, 0xf5, 0xa9, 0xd2, 0xa2, 0xdd, 0xb2, 0xde, 0x52, + 0x0c, 0x23, 0xd4, 0x16, 0x3e, 0x79, 0x1e, 0xbe, 0xd1, 0x95, 0xe1, 0xd7, 0xff, 0x65, 0x17, 0x2b, + 0x05, 0xc2, 0x49, 0x9f, 0x05, 0xfc, 0xbc, 0x00, 0x39, 0x07, 0xbf, 0xe4, 0x03, 0x47, 0xb7, 0x21, + 0x6b, 0xc7, 0x1e, 0xf2, 0x08, 0xe7, 0x1e, 0x33, 0x56, 0x00, 0xc2, 0x25, 0xfb, 0x3a, 0x2c, 0xf6, + 0x4b, 0x85, 0x6a, 0x94, 0x19, 0xc0, 0xbc, 0x9f, 0x51, 0xa2, 0xdb, 0xdc, 0x4f, 0xc7, 0xe1, 0x5c, + 0x68, 0xe3, 0xd0, 0x5d, 0x10, 0x83, 0x69, 0x06, 0xd8, 0xe7, 0x57, 0x82, 0xe8, 0xbb, 0xac, 0x34, + 0x9c, 0x56, 0xbf, 0xb1, 0x06, 0xd2, 0x1a, 0xc5, 0x64, 0xff, 0xb2, 0x10, 0x6c, 0xb3, 0xf2, 0x73, + 0xb6, 0x06, 0xbf, 0x36, 0xcf, 0x64, 0xed, 0x7f, 0x7f, 0xd2, 0x3d, 0x71, 0xe5, 0xd6, 0xfe, 0x5b, + 0x02, 0xdc, 0x70, 0x26, 0x9f, 0x51, 0x3d, 0xe0, 0x27, 0xcf, 0x63, 0x6c, 0x76, 0x65, 0xf8, 0xad, + 0xfe, 0x9a, 0xcd, 0xd2, 0xfd, 0xc1, 0xee, 0xef, 0x37, 0x63, 0x90, 0x73, 0xc8, 0xfc, 0xf9, 0x32, + 0x7e, 0x94, 0x87, 0x8b, 0x9d, 0x5e, 0x5b, 0x6e, 0x90, 0x50, 0xbb, 0x53, 0x37, 0x65, 0x9f, 0x9c, + 0x0d, 0x6e, 0x58, 0xb9, 0x4e, 0xaf, 0x5d, 0xe4, 0x30, 0x55, 0x4f, 0xbb, 0x0d, 0xf4, 0x00, 0xe6, + 0x4d, 0xad, 0xdb, 0x8f, 0x39, 0x92, 0x8b, 0x44, 0xa6, 0xd6, 0xf5, 0x11, 0xce, 0x7d, 0x33, 0x0e, + 0xe7, 0x42, 0xe5, 0x8f, 0x76, 0xe1, 0xa5, 0x70, 0xa3, 0xe8, 0xef, 0x9b, 0x2f, 0x84, 0xa8, 0xcb, + 0xd5, 0x3d, 0x07, 0x52, 0xec, 0xef, 0xa1, 0x61, 0x14, 0x7f, 0x60, 0x9d, 0x74, 0x80, 0xf1, 0x3e, + 0xd7, 0x4e, 0xfa, 0x8b, 0x09, 0xff, 0xd2, 0x0d, 0xef, 0xa8, 0xbf, 0x2c, 0x40, 0xae, 0x2f, 0x68, + 0xb5, 0xfb, 0x27, 0xb7, 0xea, 0x83, 0xe7, 0x15, 0xb7, 0xfa, 0x32, 0xfd, 0xfd, 0x73, 0xe9, 0x28, + 0xb8, 0x38, 0xf7, 0x37, 0x05, 0x38, 0xef, 0x45, 0xe5, 0x13, 0x5a, 0x2e, 0x8c, 0xe7, 0xd5, 0x21, + 0xd7, 0x60, 0xce, 0xd9, 0xa8, 0xb4, 0xa7, 0x2d, 0xdc, 0x78, 0x90, 0x5d, 0x64, 0x3b, 0xd2, 0xdc, + 0xbf, 0x89, 0xc1, 0xc5, 0x81, 0x6d, 0x42, 0x57, 0x20, 0x4d, 0x22, 0x5b, 0x87, 0x18, 0xb3, 0xed, + 0xe9, 0xb6, 0xda, 0xb1, 0xc9, 0x50, 0x20, 0xe5, 0xb8, 0xaf, 0xc6, 0xe9, 0xb6, 0x72, 0xec, 0x00, + 0xf9, 0x2c, 0x33, 0xd1, 0x67, 0x99, 0x3f, 0xdd, 0x67, 0x99, 0xec, 0xd2, 0x53, 0xe3, 0x4b, 0x52, + 0x9f, 0x47, 0x07, 0x91, 0xcc, 0x73, 0x32, 0xc4, 0x3c, 0xff, 0x2c, 0x11, 0xba, 0xd6, 0xc1, 0xed, + 0xf4, 0xf7, 0x05, 0xb8, 0x12, 0x3e, 0xb1, 0xf1, 0x1b, 0xec, 0xf1, 0x73, 0x9f, 0xdb, 0x84, 0x95, + 0xf6, 0x05, 0x56, 0x8d, 0x21, 0x70, 0xb9, 0x5f, 0x16, 0xe0, 0xc5, 0x10, 0x62, 0x5f, 0x8e, 0x51, + 0xbf, 0x06, 0x0b, 0x8e, 0x51, 0x77, 0x75, 0x6d, 0x5f, 0xd9, 0x57, 0x5b, 0x96, 0x91, 0x09, 0xd2, + 0xbc, 0x5d, 0xb8, 0xeb, 0x94, 0xe5, 0xfe, 0x38, 0x06, 0x57, 0xa3, 0x35, 0x19, 0x5d, 0x83, 0x19, + 0x62, 0xe1, 0x6e, 0xca, 0x02, 0xa5, 0x9c, 0x69, 0xab, 0x1d, 0x17, 0x4d, 0x0a, 0xa8, 0x1c, 0x07, + 0xb0, 0x90, 0x69, 0x2b, 0xc7, 0x6e, 0xc0, 0xa1, 0x96, 0xfe, 0x0b, 0x21, 0x96, 0xfe, 0xf4, 0xff, + 0x9b, 0xde, 0x9f, 0x9b, 0xed, 0x6f, 0x24, 0xad, 0x73, 0x26, 0xe2, 0x23, 0xc8, 0x78, 0x87, 0x25, + 0xb4, 0x6e, 0x5d, 0xbb, 0x8a, 0x32, 0x25, 0xe6, 0x57, 0xb2, 0x82, 0x4f, 0xd6, 0x7c, 0x3b, 0x0e, + 0x09, 0x36, 0xe9, 0x7d, 0x09, 0xd2, 0x6a, 0xc7, 0xc4, 0x4d, 0xac, 0xbb, 0xa6, 0xdb, 0xf1, 0xf2, + 0x98, 0x34, 0xcd, 0xb3, 0x19, 0xd8, 0x0b, 0x90, 0x3a, 0x68, 0x69, 0x8a, 0xe9, 0x9a, 0x58, 0x0b, + 0xe5, 0x31, 0x09, 0x68, 0x26, 0x03, 0xb9, 0x02, 0xd3, 0x86, 0xa9, 0xab, 0x9d, 0xa6, 0xec, 0xbd, + 0x1b, 0x96, 0x62, 0xb9, 0x76, 0x75, 0xfb, 0x9a, 0xd6, 0xc2, 0x8a, 0x35, 0xbb, 0x1f, 0xe7, 0x07, + 0x8f, 0xa7, 0x79, 0xb6, 0x3d, 0x15, 0xb7, 0x6f, 0x42, 0x70, 0xc0, 0xc4, 0xb0, 0xfb, 0x10, 0xe5, + 0x31, 0x29, 0x63, 0x23, 0x31, 0x32, 0x6f, 0x01, 0x90, 0x1c, 0x4e, 0x61, 0xc2, 0xbb, 0xd4, 0x6d, + 0x9e, 0x74, 0x31, 0xc5, 0xde, 0x39, 0x28, 0x2a, 0x27, 0xe5, 0x31, 0x69, 0x8a, 0xc0, 0x32, 0xc4, + 0x75, 0x80, 0x86, 0x62, 0x5a, 0x88, 0x6c, 0xb9, 0x66, 0xd6, 0x83, 0x58, 0x54, 0x4c, 0x4c, 0x70, + 0x08, 0x18, 0xc3, 0x29, 0xc0, 0x6c, 0x43, 0x39, 0x91, 0xb5, 0x03, 0xf9, 0x19, 0xc6, 0x47, 0x1c, + 0x35, 0x49, 0xcf, 0x83, 0x2d, 0xfa, 0x50, 0x4f, 0x76, 0x0e, 0x1e, 0x60, 0x7c, 0x44, 0x38, 0x6e, + 0x58, 0x09, 0x4a, 0xc4, 0x5e, 0x12, 0xfd, 0x51, 0x98, 0xb2, 0xaf, 0x11, 0xa1, 0x0f, 0xe8, 0xcd, + 0x36, 0x7e, 0x6f, 0x69, 0xf0, 0x86, 0x40, 0x91, 0x5f, 0x58, 0x2a, 0x8f, 0x49, 0xc9, 0x06, 0xff, + 0xbf, 0x91, 0x81, 0xe9, 0xae, 0xa2, 0x1b, 0xb8, 0xc1, 0xee, 0xd4, 0x8a, 0x3f, 0x1b, 0x83, 0xa4, + 0x05, 0x88, 0x5e, 0xa2, 0xf7, 0x0d, 0x2d, 0x9b, 0xea, 0x6f, 0x24, 0xbd, 0x82, 0x88, 0xd1, 0x9b, + 0x90, 0x72, 0xb5, 0x8e, 0xdf, 0x15, 0x0e, 0x69, 0x17, 0x91, 0x0a, 0xff, 0x8b, 0x56, 0x60, 0x9c, + 0xb2, 0x1d, 0x1f, 0x24, 0x7c, 0x89, 0xc2, 0xa0, 0x12, 0x50, 0x15, 0xc8, 0x9f, 0x6a, 0x1d, 0xeb, + 0x0a, 0xe1, 0xf5, 0x21, 0xed, 0xa4, 0x34, 0x1e, 0x69, 0x1d, 0x2c, 0x25, 0x4d, 0xfe, 0x2f, 0xf7, + 0x2a, 0x24, 0xad, 0x5c, 0xf4, 0x12, 0x64, 0xd8, 0xed, 0x15, 0xb9, 0xad, 0x76, 0x7a, 0xd6, 0xc9, + 0xa4, 0x84, 0x94, 0x66, 0xb9, 0x5b, 0x2c, 0x53, 0xfc, 0x33, 0x01, 0xb2, 0xfe, 0xb3, 0xb1, 0xa8, + 0x05, 0xe7, 0x9c, 0x93, 0x47, 0xa6, 0xe7, 0x8c, 0xa6, 0xc1, 0xc5, 0xb5, 0x3a, 0x64, 0x5f, 0xc6, + 0x7b, 0xb2, 0xd3, 0x28, 0x8f, 0x49, 0x4b, 0x6a, 0x70, 0x11, 0xc2, 0xb0, 0xc8, 0x2f, 0xce, 0xf8, + 0xab, 0x62, 0x1a, 0xbf, 0x39, 0xf0, 0x12, 0x4d, 0x7f, 0x45, 0x0b, 0x7a, 0x50, 0xc1, 0x46, 0x16, + 0x32, 0x5e, 0xfa, 0xe2, 0x7f, 0x4c, 0xc2, 0xd2, 0xae, 0xae, 0xb6, 0x69, 0x20, 0xec, 0x05, 0x47, + 0x12, 0x64, 0x74, 0xdc, 0x6d, 0x29, 0x64, 0x3a, 0xe2, 0x3e, 0x6a, 0xf0, 0x72, 0x28, 0x33, 0x14, + 0x98, 0xfb, 0x33, 0xbe, 0x31, 0x9c, 0xe6, 0x24, 0xb8, 0x58, 0xef, 0x02, 0x3f, 0x8f, 0xef, 0x3d, + 0x48, 0x70, 0x65, 0xe0, 0xa5, 0x0d, 0x9b, 0xd8, 0xb4, 0xee, 0x4a, 0xa3, 0xbf, 0x00, 0x0b, 0xf5, + 0x43, 0x85, 0x1e, 0xda, 0xd7, 0xe9, 0x3b, 0x16, 0xde, 0x93, 0x02, 0x61, 0x67, 0x95, 0x0a, 0x16, + 0xce, 0x96, 0x62, 0x1c, 0xd9, 0xa4, 0xe7, 0xea, 0xfd, 0xd9, 0xc8, 0x84, 0x8b, 0x75, 0xfd, 0xa4, + 0x6b, 0x6a, 0xb2, 0x25, 0x88, 0x83, 0x83, 0x63, 0xf9, 0xa0, 0x8b, 0xbd, 0x87, 0x05, 0x42, 0x9f, + 0x8c, 0xa0, 0xb8, 0x5c, 0x2c, 0xb7, 0x0f, 0x8e, 0x6f, 0x77, 0x1d, 0xb9, 0x9c, 0xab, 0x87, 0x15, + 0xa2, 0x2e, 0x9c, 0x3f, 0x50, 0x8f, 0x71, 0x83, 0xad, 0x2d, 0xb0, 0x41, 0x82, 0x78, 0x56, 0xcf, + 0xa1, 0x81, 0xb5, 0xd0, 0x8d, 0xa5, 0x63, 0xdc, 0x20, 0x03, 0xe3, 0x86, 0x85, 0x67, 0x57, 0xb9, + 0x7c, 0x10, 0x52, 0x86, 0xaa, 0x90, 0xed, 0xab, 0x66, 0x62, 0xf0, 0xc5, 0x95, 0x3e, 0xea, 0x33, + 0xfb, 0x3e, 0xa2, 0x26, 0x5c, 0xb4, 0xa4, 0xf6, 0x4c, 0x35, 0x0f, 0x9d, 0x1b, 0xe1, 0x56, 0x0d, + 0x93, 0x03, 0x85, 0xc7, 0x25, 0xf3, 0x40, 0x35, 0x0f, 0xad, 0x0e, 0xe5, 0x08, 0x4f, 0x0f, 0x2b, + 0x44, 0xf7, 0x20, 0x4b, 0xdd, 0x48, 0x57, 0xd1, 0x6d, 0x1b, 0x4b, 0x0e, 0xbc, 0x4a, 0x48, 0xdc, + 0xc5, 0xae, 0xa2, 0x3b, 0x56, 0x46, 0x07, 0x12, 0x27, 0x07, 0x3d, 0x00, 0xc4, 0xad, 0xe0, 0x50, + 0x31, 0x0e, 0x2d, 0xa2, 0x53, 0x03, 0x0f, 0x4f, 0x32, 0xd5, 0x97, 0x15, 0xe3, 0xd0, 0x39, 0x19, + 0x52, 0xf7, 0xe5, 0xd1, 0x2b, 0x1e, 0xc4, 0xb5, 0x1b, 0x87, 0xea, 0x81, 0xcd, 0x6c, 0x6a, 0xa0, + 0xdc, 0x89, 0xeb, 0xab, 0x12, 0x70, 0x47, 0xee, 0x0d, 0x6f, 0x16, 0xd2, 0xe1, 0x3c, 0x67, 0xb7, + 0x81, 0x4d, 0xac, 0xb7, 0xd5, 0x0e, 0xdd, 0x88, 0xb3, 0xe8, 0x4f, 0x47, 0x30, 0xd9, 0xa2, 0x1b, + 0xd1, 0x6f, 0xb2, 0x01, 0x85, 0x01, 0x8e, 0xe5, 0x0b, 0x01, 0x32, 0x5e, 0xc9, 0xa2, 0xfb, 0x30, + 0x43, 0xb5, 0x62, 0x6a, 0x32, 0xbf, 0x6c, 0xc3, 0x6f, 0xcb, 0xaf, 0x46, 0xd2, 0x8c, 0x9d, 0x94, + 0xd2, 0x84, 0x4c, 0x4d, 0x2b, 0x31, 0x22, 0xe2, 0xd7, 0x05, 0xe6, 0xf3, 0x49, 0x19, 0x3a, 0x07, + 0x0b, 0xb5, 0xca, 0x56, 0x49, 0xde, 0xcd, 0x4b, 0x35, 0xdf, 0x09, 0xe9, 0x24, 0x8c, 0x3f, 0x2c, + 0xe5, 0xa5, 0xac, 0x80, 0xa6, 0x20, 0xb1, 0xb5, 0xb3, 0x5d, 0x2b, 0x67, 0x63, 0x28, 0x0b, 0xd3, + 0xc5, 0xfc, 0x43, 0x79, 0xe7, 0xb6, 0xcc, 0x72, 0xe2, 0x68, 0x06, 0x52, 0x3c, 0xe7, 0x41, 0xa9, + 0xf4, 0x61, 0x76, 0x9c, 0x80, 0x90, 0x7f, 0x24, 0x87, 0xe2, 0x27, 0x08, 0x48, 0x79, 0x67, 0x4f, + 0x22, 0x39, 0xc5, 0xfc, 0xc3, 0xec, 0x84, 0x58, 0x85, 0xac, 0x5f, 0xe3, 0xe8, 0x47, 0x00, 0xb8, + 0x1e, 0x86, 0x5f, 0x86, 0x64, 0xc8, 0xf4, 0x32, 0x64, 0xdd, 0xfa, 0x2b, 0xfe, 0xb1, 0x00, 0xe7, + 0x42, 0xf5, 0x71, 0x66, 0xf2, 0xf4, 0x4c, 0x48, 0x4f, 0xd7, 0xb5, 0xa6, 0x62, 0xba, 0x9e, 0xd2, + 0x88, 0x7a, 0xb5, 0x7b, 0xd6, 0xc6, 0xb5, 0xb2, 0xd0, 0xdb, 0x30, 0x49, 0xef, 0x04, 0x1f, 0x5b, + 0x9b, 0x9b, 0x43, 0xef, 0x6e, 0x72, 0x70, 0x71, 0x07, 0x50, 0xff, 0xe0, 0x81, 0xde, 0x81, 0xa9, + 0x0e, 0x7e, 0x36, 0xca, 0x46, 0x50, 0x07, 0x3f, 0xa3, 0xff, 0xc4, 0xf3, 0x70, 0x2e, 0xd4, 0x7f, + 0x88, 0x19, 0x98, 0x76, 0x8f, 0x2b, 0xe2, 0xf7, 0x63, 0x90, 0x26, 0x83, 0x82, 0x51, 0xd3, 0x2a, + 0xcd, 0x8e, 0xa6, 0x63, 0xb4, 0x0a, 0xc8, 0x1e, 0x0e, 0x0c, 0x62, 0xaf, 0xc6, 0x91, 0xca, 0xee, + 0x50, 0x4e, 0xd1, 0x8e, 0x6c, 0x97, 0xd5, 0xb4, 0xea, 0x91, 0xda, 0x45, 0x27, 0x70, 0xbe, 0xae, + 0xb5, 0xdb, 0x5a, 0x47, 0xf6, 0xa2, 0xa9, 0x94, 0x1c, 0x8f, 0x97, 0xde, 0x1e, 0x30, 0x1e, 0xd9, + 0x55, 0xaf, 0x16, 0x28, 0x1d, 0x4f, 0x1e, 0x71, 0xdd, 0x75, 0x3b, 0xdb, 0xaa, 0x98, 0x95, 0x89, + 0xdf, 0x12, 0x60, 0x2e, 0x00, 0x07, 0x5d, 0x05, 0xb1, 0xb0, 0xb3, 0xb5, 0xb5, 0xb3, 0x2d, 0x17, + 0xca, 0x79, 0xa9, 0x2a, 0xd7, 0x76, 0xe4, 0xca, 0x9d, 0xed, 0x1d, 0xc9, 0xff, 0x68, 0x44, 0x0a, + 0x26, 0xb7, 0xf7, 0xb6, 0x4a, 0x52, 0xa5, 0x90, 0x15, 0xd0, 0x3c, 0x64, 0xf3, 0x9b, 0xbb, 0xe5, + 0xbc, 0xbc, 0xb7, 0xbb, 0x5b, 0x92, 0xe4, 0x42, 0xbe, 0x5a, 0xca, 0xc6, 0x9c, 0xdc, 0xcd, 0x9d, + 0x07, 0x56, 0x2e, 0xed, 0x26, 0xbb, 0x7b, 0xdb, 0x85, 0xda, 0x5e, 0xbe, 0x56, 0xd9, 0xd9, 0xce, + 0x8e, 0xa3, 0x0c, 0xc0, 0x83, 0x72, 0xa5, 0x56, 0xaa, 0xee, 0xe6, 0x0b, 0xa5, 0x6c, 0x62, 0x63, + 0x1a, 0xc0, 0x91, 0x86, 0xf8, 0x47, 0x84, 0xcf, 0x80, 0x21, 0xf6, 0x06, 0xcc, 0x92, 0xa1, 0x9b, + 0x0e, 0x3c, 0x56, 0x31, 0x3f, 0xbf, 0x9c, 0xe5, 0x05, 0x36, 0x1a, 0x7a, 0x11, 0x32, 0x9d, 0x5e, + 0x7b, 0x1f, 0xeb, 0x44, 0xb8, 0xa4, 0x94, 0x5f, 0x62, 0x9d, 0x66, 0xb9, 0x35, 0x8d, 0x10, 0x46, + 0x57, 0x48, 0x8c, 0xf1, 0x14, 0xeb, 0x06, 0x96, 0x35, 0xbd, 0x81, 0xd9, 0xad, 0xc5, 0x24, 0x09, + 0x1e, 0x68, 0xe6, 0x0e, 0xc9, 0x43, 0xf7, 0x61, 0x3e, 0x50, 0x57, 0xe3, 0x03, 0x8f, 0x97, 0x7b, + 0x64, 0x2c, 0xa1, 0x7a, 0xbf, 0x3e, 0xfe, 0x85, 0x00, 0xcb, 0x61, 0x63, 0x30, 0xfa, 0x2a, 0xa4, + 0xfc, 0x4b, 0x9b, 0xc3, 0x6c, 0x1a, 0x5a, 0xee, 0x6d, 0xb2, 0x94, 0x7f, 0x1d, 0x73, 0x28, 0x7a, + 0x6f, 0xe0, 0x72, 0xa6, 0xe0, 0x9e, 0x4a, 0x8b, 0xdf, 0x88, 0xc1, 0x8c, 0x9f, 0xe5, 0x3b, 0x30, + 0x69, 0x2d, 0xcc, 0xb3, 0x05, 0x87, 0x5b, 0xd1, 0x22, 0x02, 0x9e, 0x96, 0x2c, 0x6c, 0x7a, 0x65, + 0x80, 0xaf, 0x12, 0xac, 0x42, 0xbc, 0xad, 0x76, 0x22, 0x35, 0x9f, 0x00, 0x52, 0x78, 0xe5, 0x38, + 0x52, 0x7b, 0x09, 0x20, 0xaa, 0xc0, 0x2c, 0x0f, 0x10, 0xe8, 0x7d, 0x7d, 0x67, 0x46, 0x3a, 0x0c, + 0x3b, 0xeb, 0x42, 0x63, 0x8e, 0xe4, 0x77, 0xc6, 0x2d, 0x1f, 0x1c, 0x14, 0xa9, 0x9d, 0xd9, 0x07, + 0xbb, 0x5c, 0x66, 0x6c, 0x24, 0x97, 0x89, 0x0c, 0x98, 0xe1, 0x2e, 0x47, 0x69, 0x75, 0x0f, 0x95, + 0x7d, 0x6c, 0xf2, 0xa7, 0xbc, 0xca, 0xa3, 0x06, 0xa3, 0xab, 0xb7, 0x0f, 0x8e, 0x99, 0x07, 0xd9, + 0xa6, 0x67, 0x9b, 0xf2, 0x9c, 0x1e, 0x89, 0x84, 0x58, 0x15, 0x56, 0x0e, 0x7a, 0x19, 0xf8, 0x6b, + 0x48, 0x4e, 0xa5, 0x09, 0xee, 0x14, 0x33, 0xac, 0xc0, 0x06, 0x5d, 0x84, 0x84, 0xae, 0x34, 0xd4, + 0x63, 0x1a, 0x47, 0x26, 0xe8, 0xcb, 0x66, 0x24, 0x19, 0x36, 0xea, 0x24, 0x4f, 0x3b, 0xea, 0x88, + 0x7f, 0x43, 0x80, 0xa5, 0x90, 0x16, 0xa0, 0x15, 0xb8, 0x7a, 0xfb, 0xf6, 0x47, 0x32, 0x77, 0x84, + 0xdb, 0xf9, 0x5a, 0xe5, 0x7e, 0x49, 0xa6, 0xbe, 0x6c, 0xa3, 0x54, 0x1b, 0xe4, 0x08, 0xc9, 0x00, + 0x5f, 0xfa, 0x28, 0x5f, 0x2c, 0x15, 0x2a, 0x5b, 0xf9, 0xcd, 0x6c, 0x0c, 0x5d, 0x80, 0x65, 0xc7, + 0x27, 0x32, 0x12, 0xb2, 0x05, 0x1e, 0x47, 0xb3, 0x90, 0xf6, 0x66, 0x8d, 0x6f, 0x00, 0x24, 0x2d, + 0x19, 0x89, 0xff, 0x47, 0x80, 0x29, 0x5b, 0xfd, 0xa8, 0x02, 0x53, 0x34, 0x56, 0x52, 0xad, 0x8b, + 0x7b, 0xe1, 0x33, 0xaa, 0x9a, 0x05, 0x67, 0x63, 0xd3, 0x95, 0x0a, 0x2b, 0x97, 0x90, 0xea, 0x75, + 0x9e, 0xe9, 0x4a, 0xb7, 0x8b, 0x2d, 0x77, 0x10, 0x46, 0x6a, 0xcf, 0x82, 0xf3, 0x90, 0xb2, 0xb1, + 0xd1, 0x16, 0xa4, 0x8e, 0xda, 0x86, 0x6c, 0x11, 0x1b, 0x3c, 0x85, 0xfa, 0xb0, 0x6d, 0x3c, 0xe8, + 0xa7, 0x06, 0x47, 0x76, 0xf6, 0x46, 0x12, 0x26, 0xd8, 0x51, 0x2a, 0xf1, 0x3a, 0xa0, 0xfe, 0x66, + 0x04, 0x5d, 0xb9, 0x11, 0xaf, 0x02, 0xea, 0xe7, 0x12, 0x65, 0x21, 0x6e, 0x75, 0xae, 0x69, 0x89, + 0xfc, 0x15, 0x3f, 0x81, 0xb9, 0x00, 0x06, 0x88, 0x7b, 0xe3, 0xc8, 0xb2, 0x83, 0x00, 0x3c, 0x8b, + 0x00, 0x5c, 0x85, 0x19, 0xa7, 0xb7, 0xba, 0x6f, 0xf5, 0xa4, 0xed, 0x0e, 0x49, 0x6f, 0x21, 0xfe, + 0xa9, 0x00, 0x33, 0xbe, 0x38, 0x1b, 0x5d, 0x87, 0xac, 0xcb, 0xf5, 0xca, 0x0d, 0xe5, 0xc4, 0x5a, + 0x4c, 0xc8, 0x38, 0x1e, 0xb6, 0xa8, 0x9c, 0x18, 0x04, 0xd2, 0xe5, 0xe3, 0x19, 0x24, 0x1b, 0xa5, + 0x32, 0x8e, 0x2b, 0xa7, 0x90, 0xa7, 0x8e, 0x97, 0x50, 0xde, 0xe3, 0x77, 0xc6, 0xa3, 0xf9, 0x1d, + 0x7a, 0x0c, 0xd4, 0x4a, 0x10, 0x05, 0xb5, 0xb1, 0x79, 0xa8, 0x35, 0xc4, 0xef, 0xc4, 0x60, 0x29, + 0x64, 0xc9, 0x02, 0x69, 0x30, 0xd3, 0xbf, 0xf6, 0x31, 0xe8, 0x4c, 0x6f, 0x08, 0xa1, 0x90, 0x7c, + 0xc9, 0x4f, 0x3d, 0xf7, 0xbb, 0x02, 0x2c, 0x06, 0xc3, 0x9e, 0xf9, 0xcd, 0x3b, 0x15, 0x96, 0xbb, + 0xd6, 0x4a, 0x87, 0x6f, 0x99, 0x85, 0xf7, 0x9d, 0xd5, 0xf0, 0x73, 0x84, 0x41, 0x0b, 0x24, 0xd2, + 0x52, 0x37, 0xb8, 0x40, 0xfc, 0x7a, 0x1c, 0xe6, 0xa8, 0xd2, 0x7c, 0x4d, 0x78, 0x13, 0x26, 0xe8, + 0x79, 0xcf, 0xa8, 0x07, 0xb8, 0x39, 0x34, 0x2a, 0xc2, 0x54, 0x5d, 0xeb, 0x34, 0x54, 0xd7, 0xb3, + 0x26, 0x57, 0x07, 0xae, 0x08, 0x15, 0x2c, 0x68, 0xc9, 0x41, 0x44, 0x47, 0x03, 0x04, 0x30, 0x7e, + 0x1a, 0x01, 0x94, 0xc7, 0x42, 0x45, 0x30, 0x78, 0xfd, 0x2c, 0xf1, 0x9c, 0xd7, 0xcf, 0x02, 0xe6, + 0x9f, 0x9f, 0x0b, 0xb0, 0x10, 0xb8, 0x3a, 0x86, 0x64, 0x58, 0x60, 0x6f, 0xcc, 0x04, 0x5b, 0xf6, + 0xca, 0x20, 0x9d, 0xf8, 0x0c, 0x60, 0xfe, 0xa0, 0x3f, 0xd3, 0x40, 0x0f, 0x61, 0x8e, 0x2f, 0xe6, + 0x19, 0xbd, 0x6e, 0x57, 0xc7, 0x86, 0xc1, 0x57, 0xf2, 0x06, 0x3d, 0xee, 0xc7, 0x78, 0xad, 0x3a, + 0x08, 0x12, 0xd2, 0xfd, 0x59, 0x86, 0xf8, 0x10, 0x66, 0xfb, 0x00, 0xbd, 0xd6, 0x21, 0x9c, 0xd2, + 0x3a, 0xc4, 0x5f, 0x4a, 0xc0, 0x8c, 0xaf, 0x18, 0xd5, 0x20, 0x85, 0x8f, 0x9d, 0x16, 0x0c, 0x7e, + 0xe1, 0xd1, 0x87, 0xbc, 0x5a, 0x72, 0x30, 0x25, 0x37, 0x99, 0xdc, 0x6f, 0x91, 0xe1, 0xd0, 0xae, + 0xe3, 0x74, 0x07, 0xa6, 0x4b, 0x90, 0xd4, 0xba, 0x58, 0x57, 0x4c, 0xfe, 0x28, 0x49, 0x66, 0xc0, + 0xaa, 0x64, 0x8b, 0xea, 0x45, 0x69, 0xed, 0x70, 0x04, 0xc9, 0x46, 0x75, 0x36, 0x55, 0xc6, 0x23, + 0x6f, 0xaa, 0xe4, 0x3e, 0x01, 0xb0, 0xb9, 0x37, 0xd0, 0x2e, 0x80, 0x2d, 0x43, 0xcb, 0x84, 0x5e, + 0x89, 0x28, 0x21, 0x47, 0x0f, 0x2e, 0x1a, 0xb9, 0x6f, 0xc5, 0x20, 0xe5, 0x92, 0x1d, 0x6a, 0x93, + 0x01, 0xa5, 0x49, 0xcf, 0xee, 0xda, 0x4d, 0x66, 0xeb, 0x26, 0x1b, 0xa3, 0x6b, 0x62, 0x75, 0x93, + 0x91, 0xb2, 0x65, 0x31, 0xd3, 0xf2, 0x66, 0xa0, 0xaa, 0xa7, 0x41, 0x4c, 0xe5, 0xaf, 0x8e, 0xda, + 0x20, 0xd2, 0x59, 0x5d, 0x64, 0xc4, 0xf7, 0x61, 0xc6, 0x57, 0x31, 0xba, 0x0c, 0x17, 0x36, 0x77, + 0xee, 0x54, 0x0a, 0xf9, 0x4d, 0x79, 0x67, 0xb7, 0x24, 0xe5, 0x6b, 0x3b, 0x92, 0x2f, 0x22, 0x9b, + 0x84, 0x78, 0x7e, 0xbb, 0x98, 0x15, 0xec, 0x7d, 0x91, 0x5f, 0x13, 0x60, 0x31, 0xf8, 0xb9, 0x04, + 0x32, 0x8d, 0xb4, 0xbb, 0xb3, 0xef, 0x62, 0x71, 0xd6, 0x55, 0xc0, 0x6e, 0x15, 0x37, 0x61, 0xd9, + 0xdb, 0xf7, 0x65, 0xa3, 0xd7, 0x6e, 0x2b, 0xba, 0x6a, 0x1f, 0x6b, 0xbf, 0x19, 0xe9, 0xb1, 0x86, + 0x2a, 0xc5, 0x3a, 0x91, 0x96, 0xcc, 0x80, 0x6c, 0x15, 0x1b, 0xe2, 0xb7, 0x26, 0x60, 0x21, 0x10, + 0xe5, 0x8c, 0x37, 0xe6, 0xed, 0x3e, 0x13, 0x1b, 0xa5, 0xcf, 0xdc, 0xf7, 0x3b, 0x49, 0xae, 0xdd, + 0x51, 0x87, 0x3d, 0x1f, 0x95, 0x70, 0x87, 0x9a, 0x78, 0x4e, 0x0e, 0xf5, 0x9e, 0xfd, 0x3a, 0x9c, + 0xe5, 0x50, 0xf9, 0xea, 0x74, 0x74, 0x67, 0x9a, 0xf1, 0x3a, 0x53, 0x54, 0x83, 0x49, 0xb6, 0xd3, + 0x6a, 0x1d, 0xd9, 0x79, 0x77, 0x14, 0x8d, 0xaf, 0x5a, 0x9a, 0x67, 0xd7, 0xfa, 0x2d, 0x52, 0xc1, + 0x56, 0x38, 0x19, 0x6c, 0x85, 0xb9, 0x9f, 0x17, 0x20, 0xed, 0xa1, 0xe3, 0x6c, 0xd7, 0x0a, 0xae, + 0xed, 0x5a, 0xf4, 0x10, 0xc6, 0xed, 0x6b, 0x19, 0x99, 0xd0, 0xc0, 0x2b, 0x98, 0x4f, 0x9f, 0x78, + 0x69, 0x35, 0x05, 0xad, 0x81, 0x25, 0x4a, 0x12, 0x2d, 0xc3, 0x64, 0x83, 0xed, 0x7c, 0xb3, 0x0d, + 0x5b, 0xc9, 0x4a, 0x8a, 0x9f, 0xc0, 0x72, 0x18, 0x2e, 0x99, 0x55, 0xd5, 0xa4, 0xfc, 0x76, 0xf5, + 0xf6, 0x8e, 0xb4, 0x45, 0x17, 0x7f, 0x64, 0xa9, 0x54, 0xdd, 0xdb, 0xac, 0xc9, 0x85, 0x9d, 0x62, + 0xc0, 0xf2, 0x52, 0x75, 0xaf, 0x50, 0x28, 0x55, 0xab, 0x6c, 0xd9, 0xb5, 0x24, 0x49, 0x3b, 0x52, + 0x36, 0x26, 0x6a, 0x90, 0xac, 0xd6, 0x0f, 0x71, 0xa3, 0x47, 0xef, 0x00, 0xe7, 0x74, 0x5c, 0xef, + 0xe9, 0x3a, 0x3d, 0xca, 0xd1, 0xc5, 0xba, 0xaa, 0x35, 0x64, 0xeb, 0x69, 0x73, 0xde, 0x39, 0xce, + 0xf5, 0x6d, 0xfd, 0x16, 0x39, 0x40, 0x79, 0x4c, 0x5a, 0x76, 0xd0, 0x77, 0x29, 0xb6, 0x55, 0x46, + 0xa2, 0x5c, 0xf6, 0x3a, 0x84, 0xf8, 0x4f, 0x63, 0x30, 0xe3, 0x7f, 0x0d, 0xe1, 0x94, 0xf7, 0xfe, + 0x7d, 0x17, 0xf5, 0xe3, 0x7d, 0x17, 0xf5, 0xfd, 0xaf, 0x4d, 0x8e, 0x8f, 0xf4, 0xda, 0xe4, 0x7b, + 0x90, 0xea, 0x75, 0x9d, 0x2d, 0xdf, 0xc4, 0x70, 0x64, 0x06, 0x4e, 0x91, 0xfb, 0x9f, 0x7e, 0x99, + 0x38, 0xf5, 0xd3, 0x2f, 0xe2, 0x6f, 0xc7, 0x00, 0x15, 0xfb, 0x5e, 0xcd, 0xf9, 0x61, 0x14, 0x5b, + 0xe0, 0x63, 0x5b, 0x13, 0x67, 0x7c, 0x6c, 0x4b, 0x7c, 0x02, 0x89, 0x92, 0xae, 0x6b, 0x3a, 0xba, + 0xe9, 0xf4, 0x33, 0x66, 0xce, 0xc8, 0x22, 0xaa, 0x77, 0xeb, 0xf4, 0xae, 0x54, 0xcf, 0xb0, 0xfb, + 0x1e, 0x7a, 0x97, 0x1d, 0x5c, 0xa0, 0x5c, 0x5a, 0x61, 0xe3, 0xc0, 0x86, 0x38, 0xd0, 0xe2, 0x77, + 0x12, 0x00, 0x77, 0xb5, 0xfd, 0x9a, 0xae, 0x36, 0x9b, 0x58, 0xff, 0xf2, 0x54, 0x75, 0x17, 0x52, + 0x96, 0x9d, 0x3d, 0xd6, 0xf6, 0xb9, 0xaa, 0xa2, 0xbe, 0x24, 0x42, 0x82, 0x03, 0xd5, 0xce, 0x23, + 0xb1, 0x9c, 0xc9, 0xf8, 0xb5, 0x86, 0x8c, 0xb0, 0x58, 0xce, 0x69, 0xd9, 0x2a, 0xff, 0x95, 0x6c, + 0x54, 0xf4, 0x3a, 0x4c, 0x60, 0x22, 0x6d, 0xeb, 0x00, 0x50, 0x58, 0x30, 0x47, 0x55, 0x22, 0x71, + 0x58, 0xbf, 0xcd, 0x4d, 0x9e, 0xc5, 0xe6, 0x92, 0x23, 0xd9, 0xdc, 0x07, 0x90, 0x6e, 0x29, 0x86, + 0x29, 0xeb, 0xbd, 0x0e, 0x43, 0x9f, 0x1a, 0x8a, 0x9e, 0x22, 0x08, 0x52, 0xaf, 0x43, 0xf1, 0xbf, + 0x06, 0x13, 0xec, 0x23, 0x0c, 0xcb, 0x40, 0x47, 0x86, 0xeb, 0xc3, 0x85, 0xc6, 0x2d, 0x8d, 0xe3, + 0xe5, 0xaa, 0x30, 0x69, 0x19, 0xca, 0x57, 0x21, 0x69, 0x70, 0x7f, 0x3c, 0x24, 0x1c, 0xb1, 0xdc, + 0x76, 0x79, 0x4c, 0xb2, 0x51, 0x36, 0xa6, 0x60, 0x92, 0xeb, 0x41, 0x2c, 0xc3, 0x04, 0xab, 0x06, + 0x2d, 0x02, 0xaa, 0xd6, 0xf2, 0xb5, 0xbd, 0x6a, 0xff, 0x98, 0x50, 0x2e, 0xe5, 0x37, 0x6b, 0xe5, + 0x87, 0x59, 0x01, 0x01, 0x4c, 0xec, 0xe6, 0xf7, 0xaa, 0xa5, 0x22, 0x7b, 0x96, 0xba, 0x90, 0xdf, + 0x2e, 0x94, 0x36, 0x37, 0x4b, 0xc5, 0x6c, 0x7c, 0x23, 0x01, 0xf1, 0xc7, 0xda, 0xbe, 0xf8, 0x7b, + 0xe3, 0x30, 0xc1, 0xde, 0xd9, 0x40, 0xf7, 0x20, 0x6d, 0x28, 0x4f, 0x5d, 0x2f, 0x30, 0x0a, 0x03, + 0x97, 0xa9, 0x18, 0xd6, 0x6a, 0x55, 0x79, 0x6a, 0x3f, 0xce, 0x58, 0x1e, 0x93, 0xa6, 0x0d, 0x57, + 0x1a, 0x95, 0x61, 0xb2, 0xdb, 0xdb, 0x97, 0x8d, 0xde, 0xfe, 0x90, 0x77, 0xba, 0x38, 0xb1, 0xdd, + 0xde, 0x7e, 0x4b, 0x35, 0x0e, 0x6b, 0xda, 0x6e, 0x6f, 0xbf, 0xda, 0xdb, 0x2f, 0x8f, 0x49, 0x13, + 0x5d, 0xfa, 0x0f, 0x1d, 0xc1, 0x52, 0x97, 0x15, 0xf2, 0x70, 0xf2, 0x44, 0x36, 0x35, 0xb9, 0x6e, + 0xd4, 0xeb, 0x43, 0x26, 0x4e, 0x5e, 0xca, 0x7c, 0xe8, 0xae, 0x69, 0x05, 0xa3, 0x5e, 0x2f, 0x8f, + 0x49, 0xf3, 0xdd, 0x80, 0x7c, 0xd4, 0x86, 0xa5, 0xc7, 0xda, 0xbe, 0xdc, 0xd1, 0x4c, 0xf5, 0x40, + 0x65, 0x8f, 0x88, 0xca, 0xb8, 0x4d, 0x3d, 0x0c, 0xb3, 0xc2, 0xd7, 0x06, 0x57, 0x76, 0x57, 0xdb, + 0xdf, 0x76, 0xe1, 0x96, 0x28, 0x6a, 0x79, 0x4c, 0x5a, 0x78, 0x1c, 0x54, 0x90, 0x93, 0x61, 0xda, + 0x2d, 0x45, 0xb4, 0x63, 0x3f, 0x7d, 0xe5, 0x39, 0x19, 0xb2, 0x12, 0xfd, 0xe9, 0x2b, 0xeb, 0xb5, + 0x2b, 0xfe, 0x40, 0xc0, 0x35, 0x98, 0xf1, 0x49, 0x96, 0x44, 0x43, 0xa6, 0xd6, 0xe5, 0x97, 0x3c, + 0xa7, 0x24, 0x96, 0xc8, 0x2d, 0xc2, 0x7c, 0x90, 0xa0, 0x72, 0x4b, 0xb0, 0x10, 0xd8, 0x26, 0x12, + 0x02, 0xb0, 0xd7, 0x59, 0xc4, 0x5f, 0x11, 0xe0, 0x42, 0x81, 0x76, 0x5e, 0xff, 0x7b, 0x49, 0x43, + 0xde, 0x53, 0xbb, 0x07, 0xd9, 0xbe, 0x07, 0x99, 0x62, 0x23, 0x3d, 0xc8, 0x34, 0xe3, 0x7b, 0xed, + 0x0c, 0x5d, 0x82, 0x94, 0xfd, 0x2a, 0x9a, 0xda, 0xe0, 0x1e, 0x16, 0xac, 0xac, 0x4a, 0x43, 0xfc, + 0x1d, 0x01, 0x2e, 0xec, 0x51, 0x67, 0x11, 0xc2, 0x6c, 0x90, 0x6b, 0xff, 0x12, 0x18, 0x75, 0x5c, + 0x1c, 0xdd, 0x6f, 0x8b, 0x87, 0xf8, 0x28, 0x1a, 0xcc, 0x6f, 0x29, 0xc6, 0x91, 0xe5, 0xe2, 0xc8, + 0x7f, 0x71, 0x0d, 0xce, 0xdd, 0xc1, 0x66, 0xf4, 0x06, 0x88, 0x3f, 0x23, 0xc0, 0x79, 0xf6, 0x1a, + 0x91, 0x07, 0xc5, 0x18, 0xa6, 0xa1, 0x8b, 0x00, 0x5d, 0xa5, 0x89, 0x65, 0x53, 0x3b, 0xe2, 0x6f, + 0x88, 0x4e, 0x49, 0x53, 0x24, 0xa7, 0x46, 0x32, 0xd0, 0x79, 0xa0, 0x09, 0x67, 0xdf, 0x2b, 0x21, + 0x25, 0x49, 0x06, 0x3d, 0x40, 0x7a, 0x0e, 0x92, 0x74, 0x9b, 0x50, 0xde, 0x3f, 0xe1, 0x8f, 0x2e, + 0x4d, 0xd2, 0xf4, 0xc6, 0x89, 0xf8, 0x0b, 0x02, 0x5c, 0x08, 0x66, 0x87, 0xbf, 0x91, 0x54, 0x85, + 0x59, 0xbf, 0xc0, 0xad, 0x45, 0x80, 0xa8, 0x12, 0xcf, 0xfa, 0x24, 0x6e, 0xa0, 0xab, 0x30, 0xd3, + 0xc1, 0xc7, 0xa6, 0xdc, 0xd7, 0xa2, 0x34, 0xc9, 0xde, 0xb5, 0x5a, 0x25, 0xae, 0xc3, 0x85, 0x22, + 0x6e, 0xe1, 0x51, 0x2c, 0x84, 0xee, 0x96, 0xb0, 0x3e, 0xe0, 0x0c, 0x0b, 0xc3, 0x84, 0xbb, 0x01, + 0x29, 0xe2, 0x6b, 0xb8, 0x83, 0xe7, 0x06, 0xf5, 0xc2, 0xd0, 0xd1, 0x46, 0x82, 0xc7, 0x4e, 0x20, + 0x72, 0x11, 0x80, 0xe3, 0x3b, 0xe6, 0x3e, 0xc5, 0x73, 0x2a, 0x0d, 0x62, 0x28, 0xc4, 0x31, 0x3d, + 0x0d, 0xe4, 0x2b, 0xa8, 0x1d, 0xbf, 0x26, 0xc0, 0x12, 0xeb, 0x1e, 0x91, 0xe0, 0x9f, 0x4b, 0x1b, + 0xce, 0xd4, 0x15, 0x56, 0x60, 0xfe, 0x0e, 0x36, 0xa3, 0x35, 0xee, 0xfb, 0x02, 0xcc, 0x31, 0x25, + 0x15, 0x5b, 0xdd, 0xbb, 0xda, 0xfe, 0x30, 0x05, 0xf9, 0x82, 0xb1, 0xd8, 0x59, 0x82, 0xb1, 0x0a, + 0x24, 0x75, 0xd5, 0x38, 0xa2, 0x84, 0xe2, 0x83, 0xcf, 0x1e, 0x06, 0x3d, 0xe8, 0x55, 0x1e, 0x93, + 0x26, 0x09, 0x3e, 0x21, 0xb5, 0x00, 0x13, 0x44, 0xe6, 0x6a, 0x83, 0x77, 0xab, 0xc4, 0x63, 0x6d, + 0xbf, 0xd2, 0xb0, 0x86, 0xf5, 0xbf, 0x2b, 0xc0, 0x22, 0xe9, 0x5b, 0x8e, 0x48, 0x7e, 0x40, 0xbd, + 0xdc, 0xf5, 0x32, 0x5a, 0xc2, 0xf3, 0x32, 0xda, 0x4f, 0x09, 0xb0, 0xd4, 0xc7, 0x21, 0xef, 0xf8, + 0x45, 0x98, 0x76, 0xd9, 0x93, 0xd5, 0xe7, 0x23, 0x18, 0x54, 0xca, 0x31, 0xa8, 0xe8, 0x3d, 0xfd, + 0x16, 0x2c, 0xb1, 0x9e, 0x1e, 0xcd, 0x7e, 0xfe, 0x76, 0x0c, 0xb2, 0x7e, 0x35, 0x93, 0x99, 0x21, + 0xff, 0x9a, 0x98, 0x77, 0xcc, 0x7e, 0x31, 0xf4, 0x05, 0x07, 0xf7, 0x68, 0x9d, 0x36, 0x3c, 0x0f, + 0x67, 0x3e, 0xd7, 0x17, 0x46, 0x43, 0x5f, 0x0a, 0x8d, 0x87, 0xbf, 0xf7, 0xea, 0x7a, 0xa1, 0x6d, + 0x7c, 0xa4, 0x17, 0xda, 0x7e, 0x2f, 0x01, 0x13, 0xac, 0x57, 0x05, 0xfa, 0x89, 0x37, 0xf8, 0x67, + 0x5c, 0x06, 0x7f, 0x0e, 0x89, 0x11, 0x70, 0x7d, 0xb7, 0xe5, 0x7d, 0xfa, 0xf0, 0xbb, 0x89, 0xf9, + 0x5a, 0xf4, 0xd5, 0x81, 0x78, 0xc4, 0x1c, 0x48, 0x88, 0x8c, 0x25, 0x86, 0x84, 0x3e, 0x82, 0x69, + 0xda, 0xe7, 0xac, 0x39, 0xe2, 0xf8, 0xe0, 0x08, 0x6e, 0xc0, 0x05, 0x86, 0xf2, 0x98, 0x94, 0xd2, + 0x5d, 0x6f, 0x67, 0x3c, 0x02, 0x6b, 0x40, 0xb7, 0x89, 0x0f, 0x3e, 0x3e, 0x1a, 0xf6, 0x82, 0x65, + 0x79, 0x4c, 0xb2, 0x34, 0x6e, 0xd1, 0x3e, 0xd3, 0x27, 0x35, 0xde, 0x01, 0xa0, 0x8f, 0xe3, 0x47, + 0x9d, 0x75, 0x4d, 0x51, 0x68, 0x8a, 0xfa, 0x06, 0x24, 0x71, 0xa7, 0x11, 0x75, 0xc6, 0x35, 0x89, + 0x3b, 0x0d, 0x8a, 0x76, 0x1d, 0xb2, 0xae, 0x1e, 0xcb, 0x0c, 0x0c, 0xa8, 0xe6, 0x33, 0x4e, 0x97, + 0xa4, 0xb6, 0xe5, 0x4c, 0x24, 0x53, 0xd1, 0x27, 0x92, 0x62, 0x1d, 0x92, 0x96, 0x5e, 0xd1, 0x39, + 0x58, 0xb8, 0xbb, 0xb3, 0x21, 0x93, 0xd9, 0x4f, 0xc0, 0x82, 0xd8, 0x6e, 0x69, 0xbb, 0x58, 0xd9, + 0xbe, 0x93, 0x15, 0x48, 0x42, 0xda, 0xdb, 0xde, 0x26, 0x89, 0x18, 0x4a, 0xc2, 0x78, 0x71, 0x67, + 0xbb, 0x94, 0x8d, 0xa3, 0x69, 0x48, 0xb2, 0x79, 0x50, 0xa9, 0x98, 0x1d, 0x27, 0x33, 0xa4, 0xdb, + 0xf9, 0x0a, 0xf9, 0x9f, 0x20, 0xf3, 0x2c, 0x6b, 0x85, 0xee, 0x2a, 0x64, 0xef, 0x60, 0xd3, 0x3b, + 0x40, 0x04, 0x39, 0x83, 0x7f, 0x2f, 0x00, 0x22, 0x5e, 0x8c, 0x41, 0x06, 0xf8, 0xd8, 0x71, 0x8f, + 0x8f, 0x75, 0x9c, 0xa1, 0xe0, 0x76, 0x86, 0x5e, 0xe7, 0x1a, 0xf3, 0x39, 0x57, 0xaf, 0x63, 0x8e, + 0xfb, 0x1d, 0xb3, 0xd5, 0xa9, 0x12, 0xa3, 0x75, 0x2a, 0xb7, 0xcb, 0x9e, 0xf0, 0x06, 0x66, 0x5d, + 0x98, 0xf3, 0xb4, 0x89, 0x7b, 0xe5, 0x57, 0x61, 0xfc, 0xb1, 0xb6, 0x6f, 0x79, 0xe3, 0x8b, 0x03, + 0x2b, 0x92, 0x28, 0x68, 0x64, 0x17, 0xfc, 0x32, 0xcc, 0x15, 0x94, 0x4e, 0x1d, 0xb7, 0x86, 0x4b, + 0xfc, 0x65, 0x98, 0x63, 0xde, 0x7a, 0x38, 0xe8, 0x6f, 0x0a, 0x70, 0x89, 0x8f, 0xf4, 0x7d, 0xeb, + 0x6c, 0xc3, 0x46, 0xc3, 0x47, 0x30, 0x17, 0xf0, 0x22, 0xfa, 0x90, 0x63, 0x20, 0x01, 0xd5, 0xa0, + 0xfe, 0x77, 0xd3, 0x87, 0x4f, 0x4f, 0xfe, 0x9d, 0x00, 0x97, 0x58, 0xfc, 0x15, 0xce, 0x78, 0x90, + 0x7f, 0xfd, 0x32, 0x99, 0x3e, 0x53, 0x7c, 0xb6, 0x0e, 0x17, 0x48, 0x77, 0x1a, 0xa5, 0x31, 0xe2, + 0xcf, 0x0a, 0xf0, 0x15, 0x6a, 0x86, 0x7d, 0x58, 0x3f, 0xa8, 0x09, 0xcb, 0x3f, 0x14, 0xe0, 0x52, + 0x28, 0x47, 0xbc, 0x93, 0x7c, 0x0c, 0xf3, 0x01, 0x2a, 0xb0, 0x3a, 0xcd, 0x08, 0x3a, 0x98, 0xeb, + 0xd7, 0x41, 0xf4, 0xfe, 0xf4, 0x06, 0x5c, 0xe2, 0x9d, 0x64, 0x24, 0x91, 0xff, 0x83, 0x18, 0x9c, + 0xdf, 0x54, 0xf4, 0x26, 0x66, 0x5f, 0xe9, 0x73, 0xbe, 0xc2, 0xc9, 0x63, 0x89, 0x32, 0xa4, 0xf8, + 0xc2, 0x44, 0x57, 0x31, 0x0f, 0x79, 0x88, 0x13, 0x7a, 0x48, 0xbf, 0xa5, 0xf5, 0x1a, 0x3c, 0xce, + 0xd9, 0x55, 0xcc, 0x43, 0x09, 0x18, 0x2e, 0xf9, 0x8f, 0x14, 0x58, 0xac, 0x93, 0x72, 0xd9, 0x8a, + 0x9a, 0xe8, 0xd7, 0x70, 0x0c, 0x6c, 0x9d, 0xd4, 0x5b, 0x89, 0x40, 0xf4, 0xb6, 0x4a, 0x3f, 0xcf, + 0x47, 0xaf, 0x97, 0xf4, 0x67, 0xa3, 0x6d, 0x98, 0xd9, 0x57, 0x9b, 0xf2, 0x93, 0x1e, 0xd6, 0x4f, + 0x64, 0xb6, 0xa1, 0x17, 0xed, 0x11, 0x35, 0x6a, 0xc2, 0xe5, 0x31, 0x29, 0xbd, 0xef, 0xce, 0x70, + 0x1d, 0xba, 0xfa, 0xae, 0x00, 0xf3, 0xa4, 0x32, 0xdc, 0xf0, 0x5d, 0x8f, 0xf0, 0x2f, 0xfe, 0x0a, + 0x43, 0x17, 0x7f, 0x63, 0xfd, 0x8b, 0xbf, 0x2d, 0x58, 0x6a, 0x11, 0x1d, 0xc8, 0xfc, 0x30, 0xa0, + 0xeb, 0x1b, 0xaa, 0x83, 0x57, 0xba, 0x06, 0x68, 0xae, 0x3c, 0x26, 0x2d, 0xb4, 0x82, 0x8a, 0xed, + 0xdd, 0xdf, 0xbf, 0x1e, 0x83, 0x05, 0x6f, 0x9b, 0xf8, 0xa7, 0x8c, 0x50, 0x01, 0x26, 0x3c, 0x21, + 0xed, 0x8d, 0x01, 0x21, 0xad, 0x5f, 0x22, 0x12, 0x47, 0xf5, 0x87, 0x33, 0xb1, 0x91, 0xc2, 0x99, + 0xaf, 0x79, 0xe3, 0xbf, 0x95, 0x48, 0x0c, 0x78, 0x62, 0x40, 0x27, 0xe8, 0x18, 0x1f, 0x21, 0xe8, + 0xf8, 0x0f, 0x02, 0x64, 0xbc, 0x44, 0x03, 0xbd, 0xee, 0x1e, 0xcc, 0xb0, 0x0d, 0x32, 0xd3, 0xfe, + 0x2c, 0xd4, 0xe0, 0x7b, 0x65, 0x81, 0x72, 0x96, 0x32, 0x9c, 0x88, 0x25, 0xf7, 0x07, 0x90, 0xed, + 0x62, 0xf6, 0xd1, 0x22, 0x4e, 0x76, 0xd8, 0xfe, 0x79, 0x30, 0xdd, 0x19, 0x4e, 0x85, 0xa7, 0x0d, + 0xf1, 0x1f, 0x09, 0x70, 0x9e, 0x0d, 0x8b, 0x5e, 0x84, 0x61, 0x5e, 0xd5, 0x31, 0x84, 0xd8, 0xe9, + 0x0d, 0xe1, 0x16, 0xcc, 0x19, 0xb4, 0xdc, 0x75, 0x63, 0xc9, 0x1e, 0x03, 0xb3, 0x86, 0x07, 0xb5, + 0xd2, 0x10, 0x7f, 0x5d, 0x80, 0xf3, 0x6c, 0x24, 0x0c, 0xe6, 0x35, 0x48, 0x1f, 0xcf, 0x85, 0xcf, + 0x33, 0x0d, 0x77, 0xab, 0xb0, 0x7c, 0x07, 0x9b, 0x91, 0x39, 0x16, 0xff, 0x9a, 0x00, 0x39, 0x32, + 0xb0, 0x78, 0x31, 0x7e, 0x50, 0xc3, 0xdc, 0xdf, 0xe2, 0xcb, 0x84, 0x7d, 0xdc, 0xf0, 0x21, 0xee, + 0x1e, 0xcc, 0xfa, 0x55, 0x68, 0x8d, 0x6f, 0x2f, 0x45, 0x12, 0xb5, 0x34, 0xe3, 0xd5, 0x73, 0xf4, + 0x71, 0xed, 0x55, 0x38, 0xcf, 0xc6, 0xb5, 0xc8, 0xb2, 0x5d, 0xd9, 0x82, 0xb4, 0xe7, 0x13, 0xb1, + 0x68, 0x09, 0xe6, 0x0a, 0x3b, 0xdb, 0xb5, 0xd2, 0xb6, 0xff, 0x8c, 0x72, 0x16, 0xa6, 0xad, 0x82, + 0x5a, 0xe9, 0xa3, 0x5a, 0x56, 0x40, 0xb3, 0x90, 0xb6, 0x72, 0xd8, 0xb7, 0x3f, 0x63, 0x2b, 0x9f, + 0x09, 0x30, 0xed, 0xfe, 0x6e, 0x33, 0xba, 0x08, 0xe7, 0xb6, 0xf2, 0xb5, 0x42, 0xb9, 0xb2, 0x7d, + 0x27, 0xe8, 0xb3, 0xa1, 0x17, 0x60, 0xd9, 0x5b, 0x7c, 0x7b, 0x6f, 0x73, 0x53, 0xa6, 0x59, 0x59, + 0x01, 0x5d, 0x82, 0xf3, 0xde, 0xd2, 0xdd, 0xbc, 0x54, 0xab, 0xe4, 0x2d, 0x80, 0x58, 0x3f, 0x40, + 0x65, 0xfb, 0x7e, 0x49, 0xaa, 0x96, 0x38, 0x40, 0x7c, 0x65, 0xd7, 0x79, 0x74, 0xdf, 0xf5, 0xf6, + 0x3d, 0x99, 0x23, 0x95, 0xb6, 0xf7, 0xb6, 0x82, 0x38, 0x4a, 0xc1, 0x64, 0x85, 0x66, 0xf0, 0x16, + 0x4a, 0x95, 0xea, 0x87, 0x72, 0x7e, 0x3b, 0xbf, 0xf9, 0xb0, 0x5a, 0xa9, 0x66, 0x63, 0x2b, 0xff, + 0x4a, 0x00, 0xd4, 0x7f, 0xac, 0x0b, 0x5d, 0x81, 0x4b, 0x52, 0x69, 0x93, 0x9e, 0x48, 0x08, 0x3f, + 0x54, 0x34, 0x0d, 0xc9, 0xd2, 0xbd, 0xbd, 0xfc, 0xa6, 0x5c, 0xdb, 0xc9, 0x0a, 0x44, 0xa0, 0xdb, + 0x3b, 0x35, 0xd9, 0xce, 0xa1, 0xf7, 0xc1, 0xee, 0x48, 0xa5, 0x7c, 0xad, 0x24, 0xc9, 0xb5, 0x72, + 0x7e, 0x9b, 0x7d, 0x1d, 0x75, 0xb3, 0x54, 0xad, 0xb2, 0xe4, 0x38, 0xca, 0xc1, 0xa2, 0x1b, 0x40, + 0xde, 0x91, 0x18, 0x7a, 0x35, 0x9b, 0x20, 0x8a, 0xb3, 0x41, 0x5d, 0x05, 0x13, 0x64, 0x0e, 0x57, + 0xfa, 0xa8, 0x52, 0xad, 0x55, 0xb3, 0x93, 0x2b, 0x12, 0x80, 0x33, 0xd5, 0x21, 0xd2, 0x2f, 0x6e, + 0xee, 0xca, 0x64, 0xba, 0x18, 0x20, 0x89, 0x19, 0x48, 0x71, 0x49, 0x10, 0x88, 0xac, 0x80, 0x16, + 0x60, 0xd6, 0x23, 0x0d, 0x9a, 0x1d, 0x5b, 0xe9, 0xc0, 0x5c, 0xc0, 0xd8, 0x82, 0xae, 0x82, 0x48, + 0xbf, 0xf5, 0x51, 0x94, 0x2b, 0xdb, 0xb7, 0x77, 0x58, 0x05, 0x43, 0x27, 0xa5, 0x53, 0x90, 0x90, + 0x4a, 0xf9, 0xe2, 0xc3, 0x6c, 0xcc, 0x35, 0xf5, 0x8c, 0x33, 0xa5, 0xdc, 0xcf, 0x6f, 0x56, 0x8a, + 0xd9, 0xf1, 0xf5, 0x3f, 0xb8, 0x49, 0x1b, 0x51, 0xc5, 0xfa, 0x53, 0xb5, 0x8e, 0xd1, 0xdf, 0x73, + 0xbe, 0x01, 0xce, 0x2d, 0x19, 0xdd, 0x1c, 0xba, 0x12, 0xe4, 0xfa, 0x92, 0x4d, 0xee, 0x56, 0x44, + 0x68, 0xd6, 0xbf, 0xc5, 0xf5, 0xbf, 0xf4, 0xf9, 0xff, 0xf8, 0xb9, 0xd8, 0x4d, 0xf1, 0xda, 0xda, + 0xd3, 0xf5, 0xb5, 0x1f, 0x63, 0xbe, 0xe6, 0xab, 0x5d, 0x5d, 0x7b, 0x8c, 0xeb, 0xa6, 0xb1, 0xb6, + 0xf2, 0x13, 0x6b, 0xfc, 0xdb, 0x94, 0xef, 0xf2, 0x25, 0x8b, 0x77, 0x85, 0x15, 0xf4, 0x4d, 0x01, + 0x52, 0xae, 0xef, 0x86, 0xa1, 0x97, 0x23, 0x7f, 0x27, 0x2e, 0xb7, 0x12, 0x05, 0x94, 0xb3, 0xb6, + 0x46, 0x59, 0x7b, 0x59, 0x7c, 0x31, 0x8c, 0x35, 0xfa, 0x6d, 0xb2, 0x77, 0xd9, 0x45, 0x66, 0xc2, + 0xd7, 0xaf, 0x0a, 0x30, 0xdb, 0xf7, 0x79, 0x2a, 0xb4, 0x16, 0xe5, 0xd4, 0x81, 0x5b, 0x82, 0xaf, + 0x44, 0x47, 0xe0, 0x9c, 0xbe, 0x41, 0x39, 0x5d, 0x13, 0x57, 0x86, 0x09, 0xd1, 0x09, 0xf3, 0x2d, + 0x7e, 0xa5, 0xc8, 0xfc, 0x4a, 0xa3, 0xf2, 0x2b, 0x9d, 0x9d, 0x5f, 0xdd, 0xc3, 0xef, 0x4f, 0x0a, + 0x90, 0xf6, 0x7c, 0xe0, 0x02, 0xdd, 0x08, 0xfd, 0xee, 0x6c, 0xff, 0xb7, 0x35, 0x72, 0x37, 0xa3, + 0x01, 0x73, 0x1e, 0x17, 0x28, 0x8f, 0x33, 0x28, 0x4d, 0x78, 0x74, 0x8e, 0x9e, 0x7f, 0x4f, 0x80, + 0x85, 0xc0, 0x9d, 0x47, 0xf4, 0x5a, 0xe8, 0xa9, 0xfd, 0xf0, 0x7d, 0xca, 0x5c, 0xc4, 0xad, 0x25, + 0xb1, 0x49, 0xb9, 0x51, 0xc4, 0x5b, 0x6e, 0x89, 0x69, 0x7a, 0x53, 0xe9, 0xa8, 0x9f, 0xb2, 0xa3, + 0x75, 0xd4, 0x20, 0x7d, 0xfb, 0x4f, 0xef, 0x0a, 0x2b, 0x8f, 0x6e, 0x89, 0xd7, 0x43, 0xed, 0xb7, + 0x1f, 0x9c, 0xb6, 0x2f, 0x70, 0xb3, 0x32, 0xb4, 0x7d, 0x83, 0xb6, 0x36, 0x47, 0x6d, 0xdf, 0x3a, + 0x6b, 0x1f, 0x19, 0x4b, 0xfd, 0xad, 0xeb, 0xe3, 0x76, 0x6d, 0xe5, 0x27, 0x68, 0xfb, 0xd6, 0xaf, + 0x3b, 0x38, 0x4e, 0xeb, 0x42, 0xc0, 0xd1, 0x77, 0x05, 0x40, 0xfd, 0x1b, 0x99, 0x28, 0xcc, 0x86, + 0x43, 0xf7, 0x3c, 0x23, 0xb7, 0x4c, 0xa1, 0x2d, 0xfb, 0x51, 0x34, 0x5a, 0xcb, 0x1e, 0xad, 0xa0, + 0xc8, 0xcd, 0x42, 0xdf, 0x17, 0xac, 0xaf, 0xc9, 0xf8, 0xb6, 0x1f, 0xd7, 0x07, 0x5a, 0x7c, 0xe0, + 0xbe, 0x6c, 0xee, 0xb5, 0x91, 0x70, 0x78, 0x67, 0xf1, 0x36, 0x32, 0xaa, 0x79, 0xda, 0x8d, 0x8c, + 0x60, 0x9b, 0xe8, 0xf7, 0x05, 0x58, 0x08, 0xdc, 0x23, 0x0d, 0x35, 0xcc, 0x41, 0x3b, 0xaa, 0xb9, + 0x90, 0x97, 0xd2, 0xad, 0x96, 0xac, 0x8c, 0xaa, 0xae, 0x95, 0xe8, 0xea, 0xfa, 0x53, 0x01, 0x96, + 0xc3, 0x56, 0x0a, 0xd1, 0x9b, 0x03, 0xbd, 0x48, 0xe8, 0x0a, 0x4b, 0x2e, 0xfa, 0x62, 0x8f, 0xd8, + 0xa6, 0x4d, 0x6c, 0x8a, 0xaf, 0x0c, 0x54, 0x56, 0xc0, 0x8a, 0x10, 0xe9, 0x6e, 0xaf, 0x88, 0x37, + 0xc2, 0x54, 0x16, 0x8c, 0x41, 0x9b, 0x1b, 0xb6, 0xbe, 0x18, 0xda, 0xdc, 0x21, 0x0b, 0x92, 0xa7, + 0x68, 0xee, 0xfa, 0x2b, 0xe1, 0x1a, 0x0d, 0xe0, 0x9c, 0x7b, 0x97, 0x57, 0xd6, 0x6f, 0x04, 0xea, + 0x35, 0x14, 0x03, 0xfd, 0x57, 0x01, 0x16, 0x02, 0x97, 0x1f, 0x43, 0xed, 0x74, 0xd0, 0x62, 0xe5, + 0x28, 0x0d, 0xe5, 0x3e, 0x14, 0x8d, 0xdc, 0xd0, 0x47, 0xb7, 0xd0, 0x28, 0xad, 0x44, 0xff, 0x8b, + 0xef, 0xa6, 0x06, 0x2c, 0x4d, 0xa2, 0x37, 0x06, 0xb8, 0x8f, 0xf0, 0xc5, 0xd5, 0xdc, 0x9b, 0xa3, + 0xa2, 0x71, 0xc7, 0xe3, 0x6d, 0xf3, 0x08, 0xb6, 0x6c, 0xb7, 0x39, 0x9a, 0x21, 0xa3, 0xcf, 0x05, + 0x58, 0x0e, 0x5b, 0xe5, 0x0c, 0xb5, 0xe2, 0x21, 0xcb, 0xa2, 0xa1, 0x4e, 0x88, 0xb7, 0x6a, 0xe5, + 0x14, 0x9a, 0x5c, 0x19, 0x49, 0x93, 0xdf, 0x14, 0x20, 0xeb, 0x3f, 0x43, 0x82, 0x56, 0x07, 0xba, + 0xa0, 0xbe, 0x7d, 0xeb, 0xdc, 0xf0, 0xad, 0x72, 0x71, 0x95, 0x36, 0xe8, 0xba, 0x78, 0x25, 0x4c, + 0xe4, 0xae, 0xad, 0x74, 0x1e, 0xe1, 0x67, 0xfd, 0x67, 0x42, 0x42, 0xf9, 0x0a, 0x39, 0x3c, 0x32, + 0x02, 0x5f, 0xeb, 0x57, 0x02, 0x85, 0xe6, 0x62, 0x8a, 0x77, 0xee, 0x9f, 0x11, 0x20, 0xed, 0x39, + 0xfb, 0x11, 0x1a, 0x81, 0x06, 0x9d, 0x10, 0x89, 0xc2, 0xd1, 0x0d, 0xca, 0xd1, 0x4b, 0x28, 0x0a, + 0x47, 0xe8, 0xef, 0x08, 0x30, 0xe3, 0x3b, 0xd6, 0x80, 0x6e, 0x0d, 0xe8, 0x49, 0xfd, 0x07, 0x34, + 0x72, 0xab, 0x51, 0xc1, 0x79, 0x87, 0xf3, 0xf2, 0x37, 0x58, 0x93, 0xe8, 0xa7, 0xe8, 0x73, 0x5c, + 0xde, 0xd3, 0x0e, 0xa1, 0x6a, 0x0c, 0x39, 0x16, 0x11, 0xda, 0x49, 0x38, 0x27, 0x2b, 0x91, 0x24, + 0xf5, 0x6d, 0x01, 0x50, 0xff, 0xb1, 0xa4, 0xd0, 0xb0, 0x2f, 0xf4, 0x04, 0x53, 0x6e, 0xf0, 0x4e, + 0xa4, 0xf8, 0x16, 0x65, 0xea, 0x55, 0xf1, 0x66, 0x14, 0x83, 0x52, 0x78, 0x2d, 0xc4, 0xb2, 0xfe, + 0x8a, 0x00, 0xd3, 0xee, 0x83, 0x42, 0x68, 0x65, 0x70, 0x20, 0xe0, 0xde, 0x8f, 0x1c, 0xc6, 0xd4, + 0x0a, 0x65, 0xea, 0x45, 0xf1, 0x52, 0xa8, 0xc3, 0x63, 0x9b, 0xaf, 0x84, 0x8f, 0xcf, 0x04, 0x48, + 0xb9, 0xf6, 0x63, 0x43, 0xe7, 0xd6, 0xfd, 0xfb, 0xd0, 0xa1, 0x73, 0xeb, 0x80, 0xed, 0x5d, 0xf1, + 0x1a, 0x65, 0xe9, 0x05, 0x34, 0x8c, 0x25, 0xf4, 0xe3, 0x30, 0x65, 0xef, 0x8d, 0xa3, 0x6b, 0x03, + 0x46, 0xd0, 0x51, 0x04, 0xe2, 0xad, 0xbd, 0xcf, 0x57, 0xb2, 0xaa, 0x89, 0xd9, 0xfc, 0x18, 0x4c, + 0xbb, 0xf7, 0x7f, 0x43, 0x95, 0x12, 0xb0, 0x49, 0x1c, 0x6a, 0xb7, 0xbc, 0xf2, 0x95, 0xa1, 0x95, + 0x53, 0x93, 0x70, 0x6d, 0x54, 0x87, 0x9b, 0x44, 0xff, 0x6e, 0x76, 0x68, 0xed, 0xaf, 0xd2, 0xda, + 0x6f, 0x88, 0x57, 0x87, 0xd4, 0xfe, 0x6e, 0x9d, 0x12, 0x25, 0x26, 0xf1, 0x9f, 0x04, 0x98, 0x0f, + 0x5a, 0xc2, 0x0f, 0x9d, 0x5e, 0x0c, 0x58, 0xef, 0xcf, 0x45, 0x5b, 0xb4, 0x15, 0x31, 0x65, 0x53, + 0xe6, 0xfd, 0x28, 0x6c, 0x5c, 0xf7, 0xad, 0xec, 0x92, 0x80, 0x6d, 0xc0, 0x4a, 0x52, 0x3f, 0x34, + 0x6d, 0x5a, 0xd0, 0x8a, 0x7f, 0x68, 0xd3, 0x06, 0x6c, 0x0f, 0x8c, 0xd8, 0xb4, 0xf5, 0x9b, 0xe1, + 0x83, 0xbb, 0x8f, 0x53, 0x1e, 0x8b, 0xde, 0x5c, 0xbf, 0x16, 0xa8, 0xb4, 0x40, 0x68, 0xf4, 0x87, + 0x02, 0xcc, 0xf6, 0xed, 0x0b, 0x84, 0x2e, 0xee, 0x84, 0xed, 0x20, 0x44, 0x6d, 0x94, 0x4c, 0x1b, + 0xf5, 0x10, 0x8d, 0xd4, 0xa8, 0x47, 0x2f, 0xa3, 0xa8, 0x2d, 0x42, 0xdf, 0x13, 0xd8, 0x39, 0x11, + 0xdf, 0x3e, 0x01, 0x7a, 0x75, 0x80, 0xd3, 0x09, 0xde, 0xe1, 0xc8, 0xad, 0x8f, 0x82, 0xc2, 0xfd, + 0x95, 0xb7, 0x7d, 0x11, 0xed, 0xd1, 0x6e, 0xdf, 0x70, 0x63, 0x44, 0xbf, 0x2b, 0xc0, 0x7c, 0xd0, + 0x6e, 0x43, 0xa8, 0x25, 0x0e, 0xd8, 0x9a, 0x08, 0xed, 0xfc, 0xbc, 0x15, 0x2b, 0x23, 0x6a, 0x69, + 0x25, 0xaa, 0x96, 0x36, 0x3e, 0x13, 0xe0, 0x5c, 0x5d, 0x6b, 0x07, 0xb3, 0xbc, 0x91, 0x2c, 0xb6, + 0xba, 0xbb, 0x84, 0xa3, 0x5d, 0xe1, 0xd1, 0xdb, 0x1c, 0xa4, 0xa9, 0xb5, 0x94, 0x4e, 0x73, 0x55, + 0xd3, 0x9b, 0x6b, 0x4d, 0xdc, 0xa1, 0xfc, 0xae, 0xb1, 0x22, 0xa5, 0xab, 0x1a, 0x6b, 0x9c, 0x02, + 0x71, 0x4c, 0x6b, 0x4f, 0xd7, 0xdf, 0x6b, 0xb4, 0xba, 0xbf, 0x12, 0x9b, 0xbb, 0xc3, 0x50, 0xe9, + 0xf6, 0x3c, 0x71, 0xe9, 0xab, 0xf7, 0xd7, 0xff, 0xc0, 0xca, 0xfd, 0x98, 0xe6, 0x7e, 0x5c, 0x6c, + 0x75, 0x3f, 0xbe, 0xbf, 0xbe, 0x3f, 0x41, 0x09, 0xbe, 0xf6, 0xff, 0x02, 0x00, 0x00, 0xff, 0xff, + 0x8e, 0x7f, 0xf9, 0x3f, 0x05, 0x96, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/privacy/dlp/v2/storage.pb.go b/vendor/google.golang.org/genproto/googleapis/privacy/dlp/v2/storage.pb.go new file mode 100644 index 0000000..978259e --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/privacy/dlp/v2/storage.pb.go @@ -0,0 +1,2903 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/privacy/dlp/v2/storage.proto + +package dlp // import "google.golang.org/genproto/googleapis/privacy/dlp/v2" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import timestamp "github.com/golang/protobuf/ptypes/timestamp" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Categorization of results based on how likely they are to represent a match, +// based on the number of elements they contain which imply a match. +type Likelihood int32 + +const ( + // Default value; same as POSSIBLE. + Likelihood_LIKELIHOOD_UNSPECIFIED Likelihood = 0 + // Few matching elements. + Likelihood_VERY_UNLIKELY Likelihood = 1 + Likelihood_UNLIKELY Likelihood = 2 + // Some matching elements. + Likelihood_POSSIBLE Likelihood = 3 + Likelihood_LIKELY Likelihood = 4 + // Many matching elements. + Likelihood_VERY_LIKELY Likelihood = 5 +) + +var Likelihood_name = map[int32]string{ + 0: "LIKELIHOOD_UNSPECIFIED", + 1: "VERY_UNLIKELY", + 2: "UNLIKELY", + 3: "POSSIBLE", + 4: "LIKELY", + 5: "VERY_LIKELY", +} +var Likelihood_value = map[string]int32{ + "LIKELIHOOD_UNSPECIFIED": 0, + "VERY_UNLIKELY": 1, + "UNLIKELY": 2, + "POSSIBLE": 3, + "LIKELY": 4, + "VERY_LIKELY": 5, +} + +func (x Likelihood) String() string { + return proto.EnumName(Likelihood_name, int32(x)) +} +func (Likelihood) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_storage_0642a70333f19630, []int{0} +} + +// Definitions of file type groups to scan. +type FileType int32 + +const ( + // Includes all files. + FileType_FILE_TYPE_UNSPECIFIED FileType = 0 + // Includes all file extensions not covered by text file types. + FileType_BINARY_FILE FileType = 1 + // Included file extensions: + // asc, brf, c, cc, cpp, csv, cxx, c++, cs, css, dart, eml, go, h, hh, hpp, + // hxx, h++, hs, html, htm, shtml, shtm, xhtml, lhs, ini, java, js, json, + // ocaml, md, mkd, markdown, m, ml, mli, pl, pm, php, phtml, pht, py, pyw, + // rb, rbw, rs, rc, scala, sh, sql, tex, txt, text, tsv, vcard, vcs, wml, + // xml, xsl, xsd, yml, yaml. + FileType_TEXT_FILE FileType = 2 + // Included file extensions: + // bmp, gif, jpg, jpeg, jpe, png. + // bytes_limit_per_file has no effect on image files. + FileType_IMAGE FileType = 3 +) + +var FileType_name = map[int32]string{ + 0: "FILE_TYPE_UNSPECIFIED", + 1: "BINARY_FILE", + 2: "TEXT_FILE", + 3: "IMAGE", +} +var FileType_value = map[string]int32{ + "FILE_TYPE_UNSPECIFIED": 0, + "BINARY_FILE": 1, + "TEXT_FILE": 2, + "IMAGE": 3, +} + +func (x FileType) String() string { + return proto.EnumName(FileType_name, int32(x)) +} +func (FileType) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_storage_0642a70333f19630, []int{1} +} + +type CustomInfoType_ExclusionType int32 + +const ( + // A finding of this custom info type will not be excluded from results. + CustomInfoType_EXCLUSION_TYPE_UNSPECIFIED CustomInfoType_ExclusionType = 0 + // A finding of this custom info type will be excluded from final results, + // but can still affect rule execution. + CustomInfoType_EXCLUSION_TYPE_EXCLUDE CustomInfoType_ExclusionType = 1 +) + +var CustomInfoType_ExclusionType_name = map[int32]string{ + 0: "EXCLUSION_TYPE_UNSPECIFIED", + 1: "EXCLUSION_TYPE_EXCLUDE", +} +var CustomInfoType_ExclusionType_value = map[string]int32{ + "EXCLUSION_TYPE_UNSPECIFIED": 0, + "EXCLUSION_TYPE_EXCLUDE": 1, +} + +func (x CustomInfoType_ExclusionType) String() string { + return proto.EnumName(CustomInfoType_ExclusionType_name, int32(x)) +} +func (CustomInfoType_ExclusionType) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_storage_0642a70333f19630, []int{2, 0} +} + +// How to sample bytes if not all bytes are scanned. Meaningful only when used +// in conjunction with bytes_limit_per_file. If not specified, scanning would +// start from the top. +type CloudStorageOptions_SampleMethod int32 + +const ( + CloudStorageOptions_SAMPLE_METHOD_UNSPECIFIED CloudStorageOptions_SampleMethod = 0 + // Scan from the top (default). + CloudStorageOptions_TOP CloudStorageOptions_SampleMethod = 1 + // For each file larger than bytes_limit_per_file, randomly pick the offset + // to start scanning. The scanned bytes are contiguous. + CloudStorageOptions_RANDOM_START CloudStorageOptions_SampleMethod = 2 +) + +var CloudStorageOptions_SampleMethod_name = map[int32]string{ + 0: "SAMPLE_METHOD_UNSPECIFIED", + 1: "TOP", + 2: "RANDOM_START", +} +var CloudStorageOptions_SampleMethod_value = map[string]int32{ + "SAMPLE_METHOD_UNSPECIFIED": 0, + "TOP": 1, + "RANDOM_START": 2, +} + +func (x CloudStorageOptions_SampleMethod) String() string { + return proto.EnumName(CloudStorageOptions_SampleMethod_name, int32(x)) +} +func (CloudStorageOptions_SampleMethod) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_storage_0642a70333f19630, []int{8, 0} +} + +// How to sample rows if not all rows are scanned. Meaningful only when used +// in conjunction with either rows_limit or rows_limit_percent. If not +// specified, scanning would start from the top. +type BigQueryOptions_SampleMethod int32 + +const ( + BigQueryOptions_SAMPLE_METHOD_UNSPECIFIED BigQueryOptions_SampleMethod = 0 + // Scan from the top (default). + BigQueryOptions_TOP BigQueryOptions_SampleMethod = 1 + // Randomly pick the row to start scanning. The scanned rows are contiguous. + BigQueryOptions_RANDOM_START BigQueryOptions_SampleMethod = 2 +) + +var BigQueryOptions_SampleMethod_name = map[int32]string{ + 0: "SAMPLE_METHOD_UNSPECIFIED", + 1: "TOP", + 2: "RANDOM_START", +} +var BigQueryOptions_SampleMethod_value = map[string]int32{ + "SAMPLE_METHOD_UNSPECIFIED": 0, + "TOP": 1, + "RANDOM_START": 2, +} + +func (x BigQueryOptions_SampleMethod) String() string { + return proto.EnumName(BigQueryOptions_SampleMethod_name, int32(x)) +} +func (BigQueryOptions_SampleMethod) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_storage_0642a70333f19630, []int{11, 0} +} + +// Type of information detected by the API. +type InfoType struct { + // Name of the information type. Either a name of your choosing when + // creating a CustomInfoType, or one of the names listed + // at https://cloud.google.com/dlp/docs/infotypes-reference when specifying + // a built-in type. InfoType names should conform to the pattern + // [a-zA-Z0-9_]{1,64}. + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *InfoType) Reset() { *m = InfoType{} } +func (m *InfoType) String() string { return proto.CompactTextString(m) } +func (*InfoType) ProtoMessage() {} +func (*InfoType) Descriptor() ([]byte, []int) { + return fileDescriptor_storage_0642a70333f19630, []int{0} +} +func (m *InfoType) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_InfoType.Unmarshal(m, b) +} +func (m *InfoType) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_InfoType.Marshal(b, m, deterministic) +} +func (dst *InfoType) XXX_Merge(src proto.Message) { + xxx_messageInfo_InfoType.Merge(dst, src) +} +func (m *InfoType) XXX_Size() int { + return xxx_messageInfo_InfoType.Size(m) +} +func (m *InfoType) XXX_DiscardUnknown() { + xxx_messageInfo_InfoType.DiscardUnknown(m) +} + +var xxx_messageInfo_InfoType proto.InternalMessageInfo + +func (m *InfoType) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +// A reference to a StoredInfoType to use with scanning. +type StoredType struct { + // Resource name of the requested `StoredInfoType`, for example + // `organizations/433245324/storedInfoTypes/432452342` or + // `projects/project-id/storedInfoTypes/432452342`. + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // Timestamp indicating when the version of the `StoredInfoType` used for + // inspection was created. Output-only field, populated by the system. + CreateTime *timestamp.Timestamp `protobuf:"bytes,2,opt,name=create_time,json=createTime,proto3" json:"create_time,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *StoredType) Reset() { *m = StoredType{} } +func (m *StoredType) String() string { return proto.CompactTextString(m) } +func (*StoredType) ProtoMessage() {} +func (*StoredType) Descriptor() ([]byte, []int) { + return fileDescriptor_storage_0642a70333f19630, []int{1} +} +func (m *StoredType) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_StoredType.Unmarshal(m, b) +} +func (m *StoredType) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_StoredType.Marshal(b, m, deterministic) +} +func (dst *StoredType) XXX_Merge(src proto.Message) { + xxx_messageInfo_StoredType.Merge(dst, src) +} +func (m *StoredType) XXX_Size() int { + return xxx_messageInfo_StoredType.Size(m) +} +func (m *StoredType) XXX_DiscardUnknown() { + xxx_messageInfo_StoredType.DiscardUnknown(m) +} + +var xxx_messageInfo_StoredType proto.InternalMessageInfo + +func (m *StoredType) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *StoredType) GetCreateTime() *timestamp.Timestamp { + if m != nil { + return m.CreateTime + } + return nil +} + +// Custom information type provided by the user. Used to find domain-specific +// sensitive information configurable to the data in question. +type CustomInfoType struct { + // CustomInfoType can either be a new infoType, or an extension of built-in + // infoType, when the name matches one of existing infoTypes and that infoType + // is specified in `InspectContent.info_types` field. Specifying the latter + // adds findings to the one detected by the system. If built-in info type is + // not specified in `InspectContent.info_types` list then the name is treated + // as a custom info type. + InfoType *InfoType `protobuf:"bytes,1,opt,name=info_type,json=infoType,proto3" json:"info_type,omitempty"` + // Likelihood to return for this CustomInfoType. This base value can be + // altered by a detection rule if the finding meets the criteria specified by + // the rule. Defaults to `VERY_LIKELY` if not specified. + Likelihood Likelihood `protobuf:"varint,6,opt,name=likelihood,proto3,enum=google.privacy.dlp.v2.Likelihood" json:"likelihood,omitempty"` + // Types that are valid to be assigned to Type: + // *CustomInfoType_Dictionary_ + // *CustomInfoType_Regex_ + // *CustomInfoType_SurrogateType_ + // *CustomInfoType_StoredType + Type isCustomInfoType_Type `protobuf_oneof:"type"` + // Set of detection rules to apply to all findings of this CustomInfoType. + // Rules are applied in order that they are specified. Not supported for the + // `surrogate_type` CustomInfoType. + DetectionRules []*CustomInfoType_DetectionRule `protobuf:"bytes,7,rep,name=detection_rules,json=detectionRules,proto3" json:"detection_rules,omitempty"` + // If set to EXCLUSION_TYPE_EXCLUDE this infoType will not cause a finding + // to be returned. It still can be used for rules matching. + ExclusionType CustomInfoType_ExclusionType `protobuf:"varint,8,opt,name=exclusion_type,json=exclusionType,proto3,enum=google.privacy.dlp.v2.CustomInfoType_ExclusionType" json:"exclusion_type,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *CustomInfoType) Reset() { *m = CustomInfoType{} } +func (m *CustomInfoType) String() string { return proto.CompactTextString(m) } +func (*CustomInfoType) ProtoMessage() {} +func (*CustomInfoType) Descriptor() ([]byte, []int) { + return fileDescriptor_storage_0642a70333f19630, []int{2} +} +func (m *CustomInfoType) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_CustomInfoType.Unmarshal(m, b) +} +func (m *CustomInfoType) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_CustomInfoType.Marshal(b, m, deterministic) +} +func (dst *CustomInfoType) XXX_Merge(src proto.Message) { + xxx_messageInfo_CustomInfoType.Merge(dst, src) +} +func (m *CustomInfoType) XXX_Size() int { + return xxx_messageInfo_CustomInfoType.Size(m) +} +func (m *CustomInfoType) XXX_DiscardUnknown() { + xxx_messageInfo_CustomInfoType.DiscardUnknown(m) +} + +var xxx_messageInfo_CustomInfoType proto.InternalMessageInfo + +func (m *CustomInfoType) GetInfoType() *InfoType { + if m != nil { + return m.InfoType + } + return nil +} + +func (m *CustomInfoType) GetLikelihood() Likelihood { + if m != nil { + return m.Likelihood + } + return Likelihood_LIKELIHOOD_UNSPECIFIED +} + +type isCustomInfoType_Type interface { + isCustomInfoType_Type() +} + +type CustomInfoType_Dictionary_ struct { + Dictionary *CustomInfoType_Dictionary `protobuf:"bytes,2,opt,name=dictionary,proto3,oneof"` +} + +type CustomInfoType_Regex_ struct { + Regex *CustomInfoType_Regex `protobuf:"bytes,3,opt,name=regex,proto3,oneof"` +} + +type CustomInfoType_SurrogateType_ struct { + SurrogateType *CustomInfoType_SurrogateType `protobuf:"bytes,4,opt,name=surrogate_type,json=surrogateType,proto3,oneof"` +} + +type CustomInfoType_StoredType struct { + StoredType *StoredType `protobuf:"bytes,5,opt,name=stored_type,json=storedType,proto3,oneof"` +} + +func (*CustomInfoType_Dictionary_) isCustomInfoType_Type() {} + +func (*CustomInfoType_Regex_) isCustomInfoType_Type() {} + +func (*CustomInfoType_SurrogateType_) isCustomInfoType_Type() {} + +func (*CustomInfoType_StoredType) isCustomInfoType_Type() {} + +func (m *CustomInfoType) GetType() isCustomInfoType_Type { + if m != nil { + return m.Type + } + return nil +} + +func (m *CustomInfoType) GetDictionary() *CustomInfoType_Dictionary { + if x, ok := m.GetType().(*CustomInfoType_Dictionary_); ok { + return x.Dictionary + } + return nil +} + +func (m *CustomInfoType) GetRegex() *CustomInfoType_Regex { + if x, ok := m.GetType().(*CustomInfoType_Regex_); ok { + return x.Regex + } + return nil +} + +func (m *CustomInfoType) GetSurrogateType() *CustomInfoType_SurrogateType { + if x, ok := m.GetType().(*CustomInfoType_SurrogateType_); ok { + return x.SurrogateType + } + return nil +} + +func (m *CustomInfoType) GetStoredType() *StoredType { + if x, ok := m.GetType().(*CustomInfoType_StoredType); ok { + return x.StoredType + } + return nil +} + +func (m *CustomInfoType) GetDetectionRules() []*CustomInfoType_DetectionRule { + if m != nil { + return m.DetectionRules + } + return nil +} + +func (m *CustomInfoType) GetExclusionType() CustomInfoType_ExclusionType { + if m != nil { + return m.ExclusionType + } + return CustomInfoType_EXCLUSION_TYPE_UNSPECIFIED +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*CustomInfoType) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _CustomInfoType_OneofMarshaler, _CustomInfoType_OneofUnmarshaler, _CustomInfoType_OneofSizer, []interface{}{ + (*CustomInfoType_Dictionary_)(nil), + (*CustomInfoType_Regex_)(nil), + (*CustomInfoType_SurrogateType_)(nil), + (*CustomInfoType_StoredType)(nil), + } +} + +func _CustomInfoType_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*CustomInfoType) + // type + switch x := m.Type.(type) { + case *CustomInfoType_Dictionary_: + b.EncodeVarint(2<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.Dictionary); err != nil { + return err + } + case *CustomInfoType_Regex_: + b.EncodeVarint(3<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.Regex); err != nil { + return err + } + case *CustomInfoType_SurrogateType_: + b.EncodeVarint(4<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.SurrogateType); err != nil { + return err + } + case *CustomInfoType_StoredType: + b.EncodeVarint(5<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.StoredType); err != nil { + return err + } + case nil: + default: + return fmt.Errorf("CustomInfoType.Type has unexpected type %T", x) + } + return nil +} + +func _CustomInfoType_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*CustomInfoType) + switch tag { + case 2: // type.dictionary + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(CustomInfoType_Dictionary) + err := b.DecodeMessage(msg) + m.Type = &CustomInfoType_Dictionary_{msg} + return true, err + case 3: // type.regex + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(CustomInfoType_Regex) + err := b.DecodeMessage(msg) + m.Type = &CustomInfoType_Regex_{msg} + return true, err + case 4: // type.surrogate_type + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(CustomInfoType_SurrogateType) + err := b.DecodeMessage(msg) + m.Type = &CustomInfoType_SurrogateType_{msg} + return true, err + case 5: // type.stored_type + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(StoredType) + err := b.DecodeMessage(msg) + m.Type = &CustomInfoType_StoredType{msg} + return true, err + default: + return false, nil + } +} + +func _CustomInfoType_OneofSizer(msg proto.Message) (n int) { + m := msg.(*CustomInfoType) + // type + switch x := m.Type.(type) { + case *CustomInfoType_Dictionary_: + s := proto.Size(x.Dictionary) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *CustomInfoType_Regex_: + s := proto.Size(x.Regex) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *CustomInfoType_SurrogateType_: + s := proto.Size(x.SurrogateType) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *CustomInfoType_StoredType: + s := proto.Size(x.StoredType) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +// Custom information type based on a dictionary of words or phrases. This can +// be used to match sensitive information specific to the data, such as a list +// of employee IDs or job titles. +// +// Dictionary words are case-insensitive and all characters other than letters +// and digits in the unicode [Basic Multilingual +// Plane](https://en.wikipedia.org/wiki/Plane_%28Unicode%29#Basic_Multilingual_Plane) +// will be replaced with whitespace when scanning for matches, so the +// dictionary phrase "Sam Johnson" will match all three phrases "sam johnson", +// "Sam, Johnson", and "Sam (Johnson)". Additionally, the characters +// surrounding any match must be of a different type than the adjacent +// characters within the word, so letters must be next to non-letters and +// digits next to non-digits. For example, the dictionary word "jen" will +// match the first three letters of the text "jen123" but will return no +// matches for "jennifer". +// +// Dictionary words containing a large number of characters that are not +// letters or digits may result in unexpected findings because such characters +// are treated as whitespace. The +// [limits](https://cloud.google.com/dlp/limits) page contains details about +// the size limits of dictionaries. For dictionaries that do not fit within +// these constraints, consider using `LargeCustomDictionaryConfig` in the +// `StoredInfoType` API. +type CustomInfoType_Dictionary struct { + // Types that are valid to be assigned to Source: + // *CustomInfoType_Dictionary_WordList_ + // *CustomInfoType_Dictionary_CloudStoragePath + Source isCustomInfoType_Dictionary_Source `protobuf_oneof:"source"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *CustomInfoType_Dictionary) Reset() { *m = CustomInfoType_Dictionary{} } +func (m *CustomInfoType_Dictionary) String() string { return proto.CompactTextString(m) } +func (*CustomInfoType_Dictionary) ProtoMessage() {} +func (*CustomInfoType_Dictionary) Descriptor() ([]byte, []int) { + return fileDescriptor_storage_0642a70333f19630, []int{2, 0} +} +func (m *CustomInfoType_Dictionary) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_CustomInfoType_Dictionary.Unmarshal(m, b) +} +func (m *CustomInfoType_Dictionary) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_CustomInfoType_Dictionary.Marshal(b, m, deterministic) +} +func (dst *CustomInfoType_Dictionary) XXX_Merge(src proto.Message) { + xxx_messageInfo_CustomInfoType_Dictionary.Merge(dst, src) +} +func (m *CustomInfoType_Dictionary) XXX_Size() int { + return xxx_messageInfo_CustomInfoType_Dictionary.Size(m) +} +func (m *CustomInfoType_Dictionary) XXX_DiscardUnknown() { + xxx_messageInfo_CustomInfoType_Dictionary.DiscardUnknown(m) +} + +var xxx_messageInfo_CustomInfoType_Dictionary proto.InternalMessageInfo + +type isCustomInfoType_Dictionary_Source interface { + isCustomInfoType_Dictionary_Source() +} + +type CustomInfoType_Dictionary_WordList_ struct { + WordList *CustomInfoType_Dictionary_WordList `protobuf:"bytes,1,opt,name=word_list,json=wordList,proto3,oneof"` +} + +type CustomInfoType_Dictionary_CloudStoragePath struct { + CloudStoragePath *CloudStoragePath `protobuf:"bytes,3,opt,name=cloud_storage_path,json=cloudStoragePath,proto3,oneof"` +} + +func (*CustomInfoType_Dictionary_WordList_) isCustomInfoType_Dictionary_Source() {} + +func (*CustomInfoType_Dictionary_CloudStoragePath) isCustomInfoType_Dictionary_Source() {} + +func (m *CustomInfoType_Dictionary) GetSource() isCustomInfoType_Dictionary_Source { + if m != nil { + return m.Source + } + return nil +} + +func (m *CustomInfoType_Dictionary) GetWordList() *CustomInfoType_Dictionary_WordList { + if x, ok := m.GetSource().(*CustomInfoType_Dictionary_WordList_); ok { + return x.WordList + } + return nil +} + +func (m *CustomInfoType_Dictionary) GetCloudStoragePath() *CloudStoragePath { + if x, ok := m.GetSource().(*CustomInfoType_Dictionary_CloudStoragePath); ok { + return x.CloudStoragePath + } + return nil +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*CustomInfoType_Dictionary) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _CustomInfoType_Dictionary_OneofMarshaler, _CustomInfoType_Dictionary_OneofUnmarshaler, _CustomInfoType_Dictionary_OneofSizer, []interface{}{ + (*CustomInfoType_Dictionary_WordList_)(nil), + (*CustomInfoType_Dictionary_CloudStoragePath)(nil), + } +} + +func _CustomInfoType_Dictionary_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*CustomInfoType_Dictionary) + // source + switch x := m.Source.(type) { + case *CustomInfoType_Dictionary_WordList_: + b.EncodeVarint(1<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.WordList); err != nil { + return err + } + case *CustomInfoType_Dictionary_CloudStoragePath: + b.EncodeVarint(3<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.CloudStoragePath); err != nil { + return err + } + case nil: + default: + return fmt.Errorf("CustomInfoType_Dictionary.Source has unexpected type %T", x) + } + return nil +} + +func _CustomInfoType_Dictionary_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*CustomInfoType_Dictionary) + switch tag { + case 1: // source.word_list + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(CustomInfoType_Dictionary_WordList) + err := b.DecodeMessage(msg) + m.Source = &CustomInfoType_Dictionary_WordList_{msg} + return true, err + case 3: // source.cloud_storage_path + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(CloudStoragePath) + err := b.DecodeMessage(msg) + m.Source = &CustomInfoType_Dictionary_CloudStoragePath{msg} + return true, err + default: + return false, nil + } +} + +func _CustomInfoType_Dictionary_OneofSizer(msg proto.Message) (n int) { + m := msg.(*CustomInfoType_Dictionary) + // source + switch x := m.Source.(type) { + case *CustomInfoType_Dictionary_WordList_: + s := proto.Size(x.WordList) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *CustomInfoType_Dictionary_CloudStoragePath: + s := proto.Size(x.CloudStoragePath) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +// Message defining a list of words or phrases to search for in the data. +type CustomInfoType_Dictionary_WordList struct { + // Words or phrases defining the dictionary. The dictionary must contain + // at least one phrase and every phrase must contain at least 2 characters + // that are letters or digits. [required] + Words []string `protobuf:"bytes,1,rep,name=words,proto3" json:"words,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *CustomInfoType_Dictionary_WordList) Reset() { *m = CustomInfoType_Dictionary_WordList{} } +func (m *CustomInfoType_Dictionary_WordList) String() string { return proto.CompactTextString(m) } +func (*CustomInfoType_Dictionary_WordList) ProtoMessage() {} +func (*CustomInfoType_Dictionary_WordList) Descriptor() ([]byte, []int) { + return fileDescriptor_storage_0642a70333f19630, []int{2, 0, 0} +} +func (m *CustomInfoType_Dictionary_WordList) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_CustomInfoType_Dictionary_WordList.Unmarshal(m, b) +} +func (m *CustomInfoType_Dictionary_WordList) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_CustomInfoType_Dictionary_WordList.Marshal(b, m, deterministic) +} +func (dst *CustomInfoType_Dictionary_WordList) XXX_Merge(src proto.Message) { + xxx_messageInfo_CustomInfoType_Dictionary_WordList.Merge(dst, src) +} +func (m *CustomInfoType_Dictionary_WordList) XXX_Size() int { + return xxx_messageInfo_CustomInfoType_Dictionary_WordList.Size(m) +} +func (m *CustomInfoType_Dictionary_WordList) XXX_DiscardUnknown() { + xxx_messageInfo_CustomInfoType_Dictionary_WordList.DiscardUnknown(m) +} + +var xxx_messageInfo_CustomInfoType_Dictionary_WordList proto.InternalMessageInfo + +func (m *CustomInfoType_Dictionary_WordList) GetWords() []string { + if m != nil { + return m.Words + } + return nil +} + +// Message defining a custom regular expression. +type CustomInfoType_Regex struct { + // Pattern defining the regular expression. Its syntax + // (https://github.com/google/re2/wiki/Syntax) can be found under the + // google/re2 repository on GitHub. + Pattern string `protobuf:"bytes,1,opt,name=pattern,proto3" json:"pattern,omitempty"` + // The index of the submatch to extract as findings. When not + // specified, the entire match is returned. No more than 3 may be included. + GroupIndexes []int32 `protobuf:"varint,2,rep,packed,name=group_indexes,json=groupIndexes,proto3" json:"group_indexes,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *CustomInfoType_Regex) Reset() { *m = CustomInfoType_Regex{} } +func (m *CustomInfoType_Regex) String() string { return proto.CompactTextString(m) } +func (*CustomInfoType_Regex) ProtoMessage() {} +func (*CustomInfoType_Regex) Descriptor() ([]byte, []int) { + return fileDescriptor_storage_0642a70333f19630, []int{2, 1} +} +func (m *CustomInfoType_Regex) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_CustomInfoType_Regex.Unmarshal(m, b) +} +func (m *CustomInfoType_Regex) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_CustomInfoType_Regex.Marshal(b, m, deterministic) +} +func (dst *CustomInfoType_Regex) XXX_Merge(src proto.Message) { + xxx_messageInfo_CustomInfoType_Regex.Merge(dst, src) +} +func (m *CustomInfoType_Regex) XXX_Size() int { + return xxx_messageInfo_CustomInfoType_Regex.Size(m) +} +func (m *CustomInfoType_Regex) XXX_DiscardUnknown() { + xxx_messageInfo_CustomInfoType_Regex.DiscardUnknown(m) +} + +var xxx_messageInfo_CustomInfoType_Regex proto.InternalMessageInfo + +func (m *CustomInfoType_Regex) GetPattern() string { + if m != nil { + return m.Pattern + } + return "" +} + +func (m *CustomInfoType_Regex) GetGroupIndexes() []int32 { + if m != nil { + return m.GroupIndexes + } + return nil +} + +// Message for detecting output from deidentification transformations +// such as +// [`CryptoReplaceFfxFpeConfig`](/dlp/docs/reference/rest/v2/organizations.deidentifyTemplates#cryptoreplaceffxfpeconfig). +// These types of transformations are +// those that perform pseudonymization, thereby producing a "surrogate" as +// output. This should be used in conjunction with a field on the +// transformation such as `surrogate_info_type`. This CustomInfoType does +// not support the use of `detection_rules`. +type CustomInfoType_SurrogateType struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *CustomInfoType_SurrogateType) Reset() { *m = CustomInfoType_SurrogateType{} } +func (m *CustomInfoType_SurrogateType) String() string { return proto.CompactTextString(m) } +func (*CustomInfoType_SurrogateType) ProtoMessage() {} +func (*CustomInfoType_SurrogateType) Descriptor() ([]byte, []int) { + return fileDescriptor_storage_0642a70333f19630, []int{2, 2} +} +func (m *CustomInfoType_SurrogateType) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_CustomInfoType_SurrogateType.Unmarshal(m, b) +} +func (m *CustomInfoType_SurrogateType) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_CustomInfoType_SurrogateType.Marshal(b, m, deterministic) +} +func (dst *CustomInfoType_SurrogateType) XXX_Merge(src proto.Message) { + xxx_messageInfo_CustomInfoType_SurrogateType.Merge(dst, src) +} +func (m *CustomInfoType_SurrogateType) XXX_Size() int { + return xxx_messageInfo_CustomInfoType_SurrogateType.Size(m) +} +func (m *CustomInfoType_SurrogateType) XXX_DiscardUnknown() { + xxx_messageInfo_CustomInfoType_SurrogateType.DiscardUnknown(m) +} + +var xxx_messageInfo_CustomInfoType_SurrogateType proto.InternalMessageInfo + +// Rule for modifying a CustomInfoType to alter behavior under certain +// circumstances, depending on the specific details of the rule. Not supported +// for the `surrogate_type` custom info type. +type CustomInfoType_DetectionRule struct { + // Types that are valid to be assigned to Type: + // *CustomInfoType_DetectionRule_HotwordRule_ + Type isCustomInfoType_DetectionRule_Type `protobuf_oneof:"type"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *CustomInfoType_DetectionRule) Reset() { *m = CustomInfoType_DetectionRule{} } +func (m *CustomInfoType_DetectionRule) String() string { return proto.CompactTextString(m) } +func (*CustomInfoType_DetectionRule) ProtoMessage() {} +func (*CustomInfoType_DetectionRule) Descriptor() ([]byte, []int) { + return fileDescriptor_storage_0642a70333f19630, []int{2, 3} +} +func (m *CustomInfoType_DetectionRule) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_CustomInfoType_DetectionRule.Unmarshal(m, b) +} +func (m *CustomInfoType_DetectionRule) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_CustomInfoType_DetectionRule.Marshal(b, m, deterministic) +} +func (dst *CustomInfoType_DetectionRule) XXX_Merge(src proto.Message) { + xxx_messageInfo_CustomInfoType_DetectionRule.Merge(dst, src) +} +func (m *CustomInfoType_DetectionRule) XXX_Size() int { + return xxx_messageInfo_CustomInfoType_DetectionRule.Size(m) +} +func (m *CustomInfoType_DetectionRule) XXX_DiscardUnknown() { + xxx_messageInfo_CustomInfoType_DetectionRule.DiscardUnknown(m) +} + +var xxx_messageInfo_CustomInfoType_DetectionRule proto.InternalMessageInfo + +type isCustomInfoType_DetectionRule_Type interface { + isCustomInfoType_DetectionRule_Type() +} + +type CustomInfoType_DetectionRule_HotwordRule_ struct { + HotwordRule *CustomInfoType_DetectionRule_HotwordRule `protobuf:"bytes,1,opt,name=hotword_rule,json=hotwordRule,proto3,oneof"` +} + +func (*CustomInfoType_DetectionRule_HotwordRule_) isCustomInfoType_DetectionRule_Type() {} + +func (m *CustomInfoType_DetectionRule) GetType() isCustomInfoType_DetectionRule_Type { + if m != nil { + return m.Type + } + return nil +} + +func (m *CustomInfoType_DetectionRule) GetHotwordRule() *CustomInfoType_DetectionRule_HotwordRule { + if x, ok := m.GetType().(*CustomInfoType_DetectionRule_HotwordRule_); ok { + return x.HotwordRule + } + return nil +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*CustomInfoType_DetectionRule) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _CustomInfoType_DetectionRule_OneofMarshaler, _CustomInfoType_DetectionRule_OneofUnmarshaler, _CustomInfoType_DetectionRule_OneofSizer, []interface{}{ + (*CustomInfoType_DetectionRule_HotwordRule_)(nil), + } +} + +func _CustomInfoType_DetectionRule_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*CustomInfoType_DetectionRule) + // type + switch x := m.Type.(type) { + case *CustomInfoType_DetectionRule_HotwordRule_: + b.EncodeVarint(1<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.HotwordRule); err != nil { + return err + } + case nil: + default: + return fmt.Errorf("CustomInfoType_DetectionRule.Type has unexpected type %T", x) + } + return nil +} + +func _CustomInfoType_DetectionRule_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*CustomInfoType_DetectionRule) + switch tag { + case 1: // type.hotword_rule + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(CustomInfoType_DetectionRule_HotwordRule) + err := b.DecodeMessage(msg) + m.Type = &CustomInfoType_DetectionRule_HotwordRule_{msg} + return true, err + default: + return false, nil + } +} + +func _CustomInfoType_DetectionRule_OneofSizer(msg proto.Message) (n int) { + m := msg.(*CustomInfoType_DetectionRule) + // type + switch x := m.Type.(type) { + case *CustomInfoType_DetectionRule_HotwordRule_: + s := proto.Size(x.HotwordRule) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +// Message for specifying a window around a finding to apply a detection +// rule. +type CustomInfoType_DetectionRule_Proximity struct { + // Number of characters before the finding to consider. + WindowBefore int32 `protobuf:"varint,1,opt,name=window_before,json=windowBefore,proto3" json:"window_before,omitempty"` + // Number of characters after the finding to consider. + WindowAfter int32 `protobuf:"varint,2,opt,name=window_after,json=windowAfter,proto3" json:"window_after,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *CustomInfoType_DetectionRule_Proximity) Reset() { + *m = CustomInfoType_DetectionRule_Proximity{} +} +func (m *CustomInfoType_DetectionRule_Proximity) String() string { return proto.CompactTextString(m) } +func (*CustomInfoType_DetectionRule_Proximity) ProtoMessage() {} +func (*CustomInfoType_DetectionRule_Proximity) Descriptor() ([]byte, []int) { + return fileDescriptor_storage_0642a70333f19630, []int{2, 3, 0} +} +func (m *CustomInfoType_DetectionRule_Proximity) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_CustomInfoType_DetectionRule_Proximity.Unmarshal(m, b) +} +func (m *CustomInfoType_DetectionRule_Proximity) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_CustomInfoType_DetectionRule_Proximity.Marshal(b, m, deterministic) +} +func (dst *CustomInfoType_DetectionRule_Proximity) XXX_Merge(src proto.Message) { + xxx_messageInfo_CustomInfoType_DetectionRule_Proximity.Merge(dst, src) +} +func (m *CustomInfoType_DetectionRule_Proximity) XXX_Size() int { + return xxx_messageInfo_CustomInfoType_DetectionRule_Proximity.Size(m) +} +func (m *CustomInfoType_DetectionRule_Proximity) XXX_DiscardUnknown() { + xxx_messageInfo_CustomInfoType_DetectionRule_Proximity.DiscardUnknown(m) +} + +var xxx_messageInfo_CustomInfoType_DetectionRule_Proximity proto.InternalMessageInfo + +func (m *CustomInfoType_DetectionRule_Proximity) GetWindowBefore() int32 { + if m != nil { + return m.WindowBefore + } + return 0 +} + +func (m *CustomInfoType_DetectionRule_Proximity) GetWindowAfter() int32 { + if m != nil { + return m.WindowAfter + } + return 0 +} + +// Message for specifying an adjustment to the likelihood of a finding as +// part of a detection rule. +type CustomInfoType_DetectionRule_LikelihoodAdjustment struct { + // Types that are valid to be assigned to Adjustment: + // *CustomInfoType_DetectionRule_LikelihoodAdjustment_FixedLikelihood + // *CustomInfoType_DetectionRule_LikelihoodAdjustment_RelativeLikelihood + Adjustment isCustomInfoType_DetectionRule_LikelihoodAdjustment_Adjustment `protobuf_oneof:"adjustment"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *CustomInfoType_DetectionRule_LikelihoodAdjustment) Reset() { + *m = CustomInfoType_DetectionRule_LikelihoodAdjustment{} +} +func (m *CustomInfoType_DetectionRule_LikelihoodAdjustment) String() string { + return proto.CompactTextString(m) +} +func (*CustomInfoType_DetectionRule_LikelihoodAdjustment) ProtoMessage() {} +func (*CustomInfoType_DetectionRule_LikelihoodAdjustment) Descriptor() ([]byte, []int) { + return fileDescriptor_storage_0642a70333f19630, []int{2, 3, 1} +} +func (m *CustomInfoType_DetectionRule_LikelihoodAdjustment) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_CustomInfoType_DetectionRule_LikelihoodAdjustment.Unmarshal(m, b) +} +func (m *CustomInfoType_DetectionRule_LikelihoodAdjustment) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_CustomInfoType_DetectionRule_LikelihoodAdjustment.Marshal(b, m, deterministic) +} +func (dst *CustomInfoType_DetectionRule_LikelihoodAdjustment) XXX_Merge(src proto.Message) { + xxx_messageInfo_CustomInfoType_DetectionRule_LikelihoodAdjustment.Merge(dst, src) +} +func (m *CustomInfoType_DetectionRule_LikelihoodAdjustment) XXX_Size() int { + return xxx_messageInfo_CustomInfoType_DetectionRule_LikelihoodAdjustment.Size(m) +} +func (m *CustomInfoType_DetectionRule_LikelihoodAdjustment) XXX_DiscardUnknown() { + xxx_messageInfo_CustomInfoType_DetectionRule_LikelihoodAdjustment.DiscardUnknown(m) +} + +var xxx_messageInfo_CustomInfoType_DetectionRule_LikelihoodAdjustment proto.InternalMessageInfo + +type isCustomInfoType_DetectionRule_LikelihoodAdjustment_Adjustment interface { + isCustomInfoType_DetectionRule_LikelihoodAdjustment_Adjustment() +} + +type CustomInfoType_DetectionRule_LikelihoodAdjustment_FixedLikelihood struct { + FixedLikelihood Likelihood `protobuf:"varint,1,opt,name=fixed_likelihood,json=fixedLikelihood,proto3,enum=google.privacy.dlp.v2.Likelihood,oneof"` +} + +type CustomInfoType_DetectionRule_LikelihoodAdjustment_RelativeLikelihood struct { + RelativeLikelihood int32 `protobuf:"varint,2,opt,name=relative_likelihood,json=relativeLikelihood,proto3,oneof"` +} + +func (*CustomInfoType_DetectionRule_LikelihoodAdjustment_FixedLikelihood) isCustomInfoType_DetectionRule_LikelihoodAdjustment_Adjustment() { +} + +func (*CustomInfoType_DetectionRule_LikelihoodAdjustment_RelativeLikelihood) isCustomInfoType_DetectionRule_LikelihoodAdjustment_Adjustment() { +} + +func (m *CustomInfoType_DetectionRule_LikelihoodAdjustment) GetAdjustment() isCustomInfoType_DetectionRule_LikelihoodAdjustment_Adjustment { + if m != nil { + return m.Adjustment + } + return nil +} + +func (m *CustomInfoType_DetectionRule_LikelihoodAdjustment) GetFixedLikelihood() Likelihood { + if x, ok := m.GetAdjustment().(*CustomInfoType_DetectionRule_LikelihoodAdjustment_FixedLikelihood); ok { + return x.FixedLikelihood + } + return Likelihood_LIKELIHOOD_UNSPECIFIED +} + +func (m *CustomInfoType_DetectionRule_LikelihoodAdjustment) GetRelativeLikelihood() int32 { + if x, ok := m.GetAdjustment().(*CustomInfoType_DetectionRule_LikelihoodAdjustment_RelativeLikelihood); ok { + return x.RelativeLikelihood + } + return 0 +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*CustomInfoType_DetectionRule_LikelihoodAdjustment) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _CustomInfoType_DetectionRule_LikelihoodAdjustment_OneofMarshaler, _CustomInfoType_DetectionRule_LikelihoodAdjustment_OneofUnmarshaler, _CustomInfoType_DetectionRule_LikelihoodAdjustment_OneofSizer, []interface{}{ + (*CustomInfoType_DetectionRule_LikelihoodAdjustment_FixedLikelihood)(nil), + (*CustomInfoType_DetectionRule_LikelihoodAdjustment_RelativeLikelihood)(nil), + } +} + +func _CustomInfoType_DetectionRule_LikelihoodAdjustment_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*CustomInfoType_DetectionRule_LikelihoodAdjustment) + // adjustment + switch x := m.Adjustment.(type) { + case *CustomInfoType_DetectionRule_LikelihoodAdjustment_FixedLikelihood: + b.EncodeVarint(1<<3 | proto.WireVarint) + b.EncodeVarint(uint64(x.FixedLikelihood)) + case *CustomInfoType_DetectionRule_LikelihoodAdjustment_RelativeLikelihood: + b.EncodeVarint(2<<3 | proto.WireVarint) + b.EncodeVarint(uint64(x.RelativeLikelihood)) + case nil: + default: + return fmt.Errorf("CustomInfoType_DetectionRule_LikelihoodAdjustment.Adjustment has unexpected type %T", x) + } + return nil +} + +func _CustomInfoType_DetectionRule_LikelihoodAdjustment_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*CustomInfoType_DetectionRule_LikelihoodAdjustment) + switch tag { + case 1: // adjustment.fixed_likelihood + if wire != proto.WireVarint { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeVarint() + m.Adjustment = &CustomInfoType_DetectionRule_LikelihoodAdjustment_FixedLikelihood{Likelihood(x)} + return true, err + case 2: // adjustment.relative_likelihood + if wire != proto.WireVarint { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeVarint() + m.Adjustment = &CustomInfoType_DetectionRule_LikelihoodAdjustment_RelativeLikelihood{int32(x)} + return true, err + default: + return false, nil + } +} + +func _CustomInfoType_DetectionRule_LikelihoodAdjustment_OneofSizer(msg proto.Message) (n int) { + m := msg.(*CustomInfoType_DetectionRule_LikelihoodAdjustment) + // adjustment + switch x := m.Adjustment.(type) { + case *CustomInfoType_DetectionRule_LikelihoodAdjustment_FixedLikelihood: + n += 1 // tag and wire + n += proto.SizeVarint(uint64(x.FixedLikelihood)) + case *CustomInfoType_DetectionRule_LikelihoodAdjustment_RelativeLikelihood: + n += 1 // tag and wire + n += proto.SizeVarint(uint64(x.RelativeLikelihood)) + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +// The rule that adjusts the likelihood of findings within a certain +// proximity of hotwords. +type CustomInfoType_DetectionRule_HotwordRule struct { + // Regular expression pattern defining what qualifies as a hotword. + HotwordRegex *CustomInfoType_Regex `protobuf:"bytes,1,opt,name=hotword_regex,json=hotwordRegex,proto3" json:"hotword_regex,omitempty"` + // Proximity of the finding within which the entire hotword must reside. + // The total length of the window cannot exceed 1000 characters. Note that + // the finding itself will be included in the window, so that hotwords may + // be used to match substrings of the finding itself. For example, the + // certainty of a phone number regex "\(\d{3}\) \d{3}-\d{4}" could be + // adjusted upwards if the area code is known to be the local area code of + // a company office using the hotword regex "\(xxx\)", where "xxx" + // is the area code in question. + Proximity *CustomInfoType_DetectionRule_Proximity `protobuf:"bytes,2,opt,name=proximity,proto3" json:"proximity,omitempty"` + // Likelihood adjustment to apply to all matching findings. + LikelihoodAdjustment *CustomInfoType_DetectionRule_LikelihoodAdjustment `protobuf:"bytes,3,opt,name=likelihood_adjustment,json=likelihoodAdjustment,proto3" json:"likelihood_adjustment,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *CustomInfoType_DetectionRule_HotwordRule) Reset() { + *m = CustomInfoType_DetectionRule_HotwordRule{} +} +func (m *CustomInfoType_DetectionRule_HotwordRule) String() string { return proto.CompactTextString(m) } +func (*CustomInfoType_DetectionRule_HotwordRule) ProtoMessage() {} +func (*CustomInfoType_DetectionRule_HotwordRule) Descriptor() ([]byte, []int) { + return fileDescriptor_storage_0642a70333f19630, []int{2, 3, 2} +} +func (m *CustomInfoType_DetectionRule_HotwordRule) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_CustomInfoType_DetectionRule_HotwordRule.Unmarshal(m, b) +} +func (m *CustomInfoType_DetectionRule_HotwordRule) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_CustomInfoType_DetectionRule_HotwordRule.Marshal(b, m, deterministic) +} +func (dst *CustomInfoType_DetectionRule_HotwordRule) XXX_Merge(src proto.Message) { + xxx_messageInfo_CustomInfoType_DetectionRule_HotwordRule.Merge(dst, src) +} +func (m *CustomInfoType_DetectionRule_HotwordRule) XXX_Size() int { + return xxx_messageInfo_CustomInfoType_DetectionRule_HotwordRule.Size(m) +} +func (m *CustomInfoType_DetectionRule_HotwordRule) XXX_DiscardUnknown() { + xxx_messageInfo_CustomInfoType_DetectionRule_HotwordRule.DiscardUnknown(m) +} + +var xxx_messageInfo_CustomInfoType_DetectionRule_HotwordRule proto.InternalMessageInfo + +func (m *CustomInfoType_DetectionRule_HotwordRule) GetHotwordRegex() *CustomInfoType_Regex { + if m != nil { + return m.HotwordRegex + } + return nil +} + +func (m *CustomInfoType_DetectionRule_HotwordRule) GetProximity() *CustomInfoType_DetectionRule_Proximity { + if m != nil { + return m.Proximity + } + return nil +} + +func (m *CustomInfoType_DetectionRule_HotwordRule) GetLikelihoodAdjustment() *CustomInfoType_DetectionRule_LikelihoodAdjustment { + if m != nil { + return m.LikelihoodAdjustment + } + return nil +} + +// General identifier of a data field in a storage service. +type FieldId struct { + // Name describing the field. + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *FieldId) Reset() { *m = FieldId{} } +func (m *FieldId) String() string { return proto.CompactTextString(m) } +func (*FieldId) ProtoMessage() {} +func (*FieldId) Descriptor() ([]byte, []int) { + return fileDescriptor_storage_0642a70333f19630, []int{3} +} +func (m *FieldId) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_FieldId.Unmarshal(m, b) +} +func (m *FieldId) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_FieldId.Marshal(b, m, deterministic) +} +func (dst *FieldId) XXX_Merge(src proto.Message) { + xxx_messageInfo_FieldId.Merge(dst, src) +} +func (m *FieldId) XXX_Size() int { + return xxx_messageInfo_FieldId.Size(m) +} +func (m *FieldId) XXX_DiscardUnknown() { + xxx_messageInfo_FieldId.DiscardUnknown(m) +} + +var xxx_messageInfo_FieldId proto.InternalMessageInfo + +func (m *FieldId) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +// Datastore partition ID. +// A partition ID identifies a grouping of entities. The grouping is always +// by project and namespace, however the namespace ID may be empty. +// +// A partition ID contains several dimensions: +// project ID and namespace ID. +type PartitionId struct { + // The ID of the project to which the entities belong. + ProjectId string `protobuf:"bytes,2,opt,name=project_id,json=projectId,proto3" json:"project_id,omitempty"` + // If not empty, the ID of the namespace to which the entities belong. + NamespaceId string `protobuf:"bytes,4,opt,name=namespace_id,json=namespaceId,proto3" json:"namespace_id,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *PartitionId) Reset() { *m = PartitionId{} } +func (m *PartitionId) String() string { return proto.CompactTextString(m) } +func (*PartitionId) ProtoMessage() {} +func (*PartitionId) Descriptor() ([]byte, []int) { + return fileDescriptor_storage_0642a70333f19630, []int{4} +} +func (m *PartitionId) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_PartitionId.Unmarshal(m, b) +} +func (m *PartitionId) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_PartitionId.Marshal(b, m, deterministic) +} +func (dst *PartitionId) XXX_Merge(src proto.Message) { + xxx_messageInfo_PartitionId.Merge(dst, src) +} +func (m *PartitionId) XXX_Size() int { + return xxx_messageInfo_PartitionId.Size(m) +} +func (m *PartitionId) XXX_DiscardUnknown() { + xxx_messageInfo_PartitionId.DiscardUnknown(m) +} + +var xxx_messageInfo_PartitionId proto.InternalMessageInfo + +func (m *PartitionId) GetProjectId() string { + if m != nil { + return m.ProjectId + } + return "" +} + +func (m *PartitionId) GetNamespaceId() string { + if m != nil { + return m.NamespaceId + } + return "" +} + +// A representation of a Datastore kind. +type KindExpression struct { + // The name of the kind. + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *KindExpression) Reset() { *m = KindExpression{} } +func (m *KindExpression) String() string { return proto.CompactTextString(m) } +func (*KindExpression) ProtoMessage() {} +func (*KindExpression) Descriptor() ([]byte, []int) { + return fileDescriptor_storage_0642a70333f19630, []int{5} +} +func (m *KindExpression) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_KindExpression.Unmarshal(m, b) +} +func (m *KindExpression) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_KindExpression.Marshal(b, m, deterministic) +} +func (dst *KindExpression) XXX_Merge(src proto.Message) { + xxx_messageInfo_KindExpression.Merge(dst, src) +} +func (m *KindExpression) XXX_Size() int { + return xxx_messageInfo_KindExpression.Size(m) +} +func (m *KindExpression) XXX_DiscardUnknown() { + xxx_messageInfo_KindExpression.DiscardUnknown(m) +} + +var xxx_messageInfo_KindExpression proto.InternalMessageInfo + +func (m *KindExpression) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +// Options defining a data set within Google Cloud Datastore. +type DatastoreOptions struct { + // A partition ID identifies a grouping of entities. The grouping is always + // by project and namespace, however the namespace ID may be empty. + PartitionId *PartitionId `protobuf:"bytes,1,opt,name=partition_id,json=partitionId,proto3" json:"partition_id,omitempty"` + // The kind to process. + Kind *KindExpression `protobuf:"bytes,2,opt,name=kind,proto3" json:"kind,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *DatastoreOptions) Reset() { *m = DatastoreOptions{} } +func (m *DatastoreOptions) String() string { return proto.CompactTextString(m) } +func (*DatastoreOptions) ProtoMessage() {} +func (*DatastoreOptions) Descriptor() ([]byte, []int) { + return fileDescriptor_storage_0642a70333f19630, []int{6} +} +func (m *DatastoreOptions) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_DatastoreOptions.Unmarshal(m, b) +} +func (m *DatastoreOptions) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_DatastoreOptions.Marshal(b, m, deterministic) +} +func (dst *DatastoreOptions) XXX_Merge(src proto.Message) { + xxx_messageInfo_DatastoreOptions.Merge(dst, src) +} +func (m *DatastoreOptions) XXX_Size() int { + return xxx_messageInfo_DatastoreOptions.Size(m) +} +func (m *DatastoreOptions) XXX_DiscardUnknown() { + xxx_messageInfo_DatastoreOptions.DiscardUnknown(m) +} + +var xxx_messageInfo_DatastoreOptions proto.InternalMessageInfo + +func (m *DatastoreOptions) GetPartitionId() *PartitionId { + if m != nil { + return m.PartitionId + } + return nil +} + +func (m *DatastoreOptions) GetKind() *KindExpression { + if m != nil { + return m.Kind + } + return nil +} + +// Message representing a set of files in a Cloud Storage bucket. Regular +// expressions are used to allow fine-grained control over which files in the +// bucket to include. +// +// Included files are those that match at least one item in `include_regex` and +// do not match any items in `exclude_regex`. Note that a file that matches +// items from both lists will _not_ be included. For a match to occur, the +// entire file path (i.e., everything in the url after the bucket name) must +// match the regular expression. +// +// For example, given the input `{bucket_name: "mybucket", include_regex: +// ["directory1/.*"], exclude_regex: +// ["directory1/excluded.*"]}`: +// +// * `gs://mybucket/directory1/myfile` will be included +// * `gs://mybucket/directory1/directory2/myfile` will be included (`.*` matches +// across `/`) +// * `gs://mybucket/directory0/directory1/myfile` will _not_ be included (the +// full path doesn't match any items in `include_regex`) +// * `gs://mybucket/directory1/excludedfile` will _not_ be included (the path +// matches an item in `exclude_regex`) +// +// If `include_regex` is left empty, it will match all files by default +// (this is equivalent to setting `include_regex: [".*"]`). +// +// Some other common use cases: +// +// * `{bucket_name: "mybucket", exclude_regex: [".*\.pdf"]}` will include all +// files in `mybucket` except for .pdf files +// * `{bucket_name: "mybucket", include_regex: ["directory/[^/]+"]}` will +// include all files directly under `gs://mybucket/directory/`, without matching +// across `/` +type CloudStorageRegexFileSet struct { + // The name of a Cloud Storage bucket. Required. + BucketName string `protobuf:"bytes,1,opt,name=bucket_name,json=bucketName,proto3" json:"bucket_name,omitempty"` + // A list of regular expressions matching file paths to include. All files in + // the bucket that match at least one of these regular expressions will be + // included in the set of files, except for those that also match an item in + // `exclude_regex`. Leaving this field empty will match all files by default + // (this is equivalent to including `.*` in the list). + // + // Regular expressions use RE2 + // [syntax](https://github.com/google/re2/wiki/Syntax); a guide can be found + // under the google/re2 repository on GitHub. + IncludeRegex []string `protobuf:"bytes,2,rep,name=include_regex,json=includeRegex,proto3" json:"include_regex,omitempty"` + // A list of regular expressions matching file paths to exclude. All files in + // the bucket that match at least one of these regular expressions will be + // excluded from the scan. + // + // Regular expressions use RE2 + // [syntax](https://github.com/google/re2/wiki/Syntax); a guide can be found + // under the google/re2 repository on GitHub. + ExcludeRegex []string `protobuf:"bytes,3,rep,name=exclude_regex,json=excludeRegex,proto3" json:"exclude_regex,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *CloudStorageRegexFileSet) Reset() { *m = CloudStorageRegexFileSet{} } +func (m *CloudStorageRegexFileSet) String() string { return proto.CompactTextString(m) } +func (*CloudStorageRegexFileSet) ProtoMessage() {} +func (*CloudStorageRegexFileSet) Descriptor() ([]byte, []int) { + return fileDescriptor_storage_0642a70333f19630, []int{7} +} +func (m *CloudStorageRegexFileSet) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_CloudStorageRegexFileSet.Unmarshal(m, b) +} +func (m *CloudStorageRegexFileSet) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_CloudStorageRegexFileSet.Marshal(b, m, deterministic) +} +func (dst *CloudStorageRegexFileSet) XXX_Merge(src proto.Message) { + xxx_messageInfo_CloudStorageRegexFileSet.Merge(dst, src) +} +func (m *CloudStorageRegexFileSet) XXX_Size() int { + return xxx_messageInfo_CloudStorageRegexFileSet.Size(m) +} +func (m *CloudStorageRegexFileSet) XXX_DiscardUnknown() { + xxx_messageInfo_CloudStorageRegexFileSet.DiscardUnknown(m) +} + +var xxx_messageInfo_CloudStorageRegexFileSet proto.InternalMessageInfo + +func (m *CloudStorageRegexFileSet) GetBucketName() string { + if m != nil { + return m.BucketName + } + return "" +} + +func (m *CloudStorageRegexFileSet) GetIncludeRegex() []string { + if m != nil { + return m.IncludeRegex + } + return nil +} + +func (m *CloudStorageRegexFileSet) GetExcludeRegex() []string { + if m != nil { + return m.ExcludeRegex + } + return nil +} + +// Options defining a file or a set of files within a Google Cloud Storage +// bucket. +type CloudStorageOptions struct { + // The set of one or more files to scan. + FileSet *CloudStorageOptions_FileSet `protobuf:"bytes,1,opt,name=file_set,json=fileSet,proto3" json:"file_set,omitempty"` + // Max number of bytes to scan from a file. If a scanned file's size is bigger + // than this value then the rest of the bytes are omitted. Only one + // of bytes_limit_per_file and bytes_limit_per_file_percent can be specified. + BytesLimitPerFile int64 `protobuf:"varint,4,opt,name=bytes_limit_per_file,json=bytesLimitPerFile,proto3" json:"bytes_limit_per_file,omitempty"` + // Max percentage of bytes to scan from a file. The rest are omitted. The + // number of bytes scanned is rounded down. Must be between 0 and 100, + // inclusively. Both 0 and 100 means no limit. Defaults to 0. Only one + // of bytes_limit_per_file and bytes_limit_per_file_percent can be specified. + BytesLimitPerFilePercent int32 `protobuf:"varint,8,opt,name=bytes_limit_per_file_percent,json=bytesLimitPerFilePercent,proto3" json:"bytes_limit_per_file_percent,omitempty"` + // List of file type groups to include in the scan. + // If empty, all files are scanned and available data format processors + // are applied. In addition, the binary content of the selected files + // is always scanned as well. + FileTypes []FileType `protobuf:"varint,5,rep,packed,name=file_types,json=fileTypes,proto3,enum=google.privacy.dlp.v2.FileType" json:"file_types,omitempty"` + SampleMethod CloudStorageOptions_SampleMethod `protobuf:"varint,6,opt,name=sample_method,json=sampleMethod,proto3,enum=google.privacy.dlp.v2.CloudStorageOptions_SampleMethod" json:"sample_method,omitempty"` + // Limits the number of files to scan to this percentage of the input FileSet. + // Number of files scanned is rounded down. Must be between 0 and 100, + // inclusively. Both 0 and 100 means no limit. Defaults to 0. + FilesLimitPercent int32 `protobuf:"varint,7,opt,name=files_limit_percent,json=filesLimitPercent,proto3" json:"files_limit_percent,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *CloudStorageOptions) Reset() { *m = CloudStorageOptions{} } +func (m *CloudStorageOptions) String() string { return proto.CompactTextString(m) } +func (*CloudStorageOptions) ProtoMessage() {} +func (*CloudStorageOptions) Descriptor() ([]byte, []int) { + return fileDescriptor_storage_0642a70333f19630, []int{8} +} +func (m *CloudStorageOptions) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_CloudStorageOptions.Unmarshal(m, b) +} +func (m *CloudStorageOptions) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_CloudStorageOptions.Marshal(b, m, deterministic) +} +func (dst *CloudStorageOptions) XXX_Merge(src proto.Message) { + xxx_messageInfo_CloudStorageOptions.Merge(dst, src) +} +func (m *CloudStorageOptions) XXX_Size() int { + return xxx_messageInfo_CloudStorageOptions.Size(m) +} +func (m *CloudStorageOptions) XXX_DiscardUnknown() { + xxx_messageInfo_CloudStorageOptions.DiscardUnknown(m) +} + +var xxx_messageInfo_CloudStorageOptions proto.InternalMessageInfo + +func (m *CloudStorageOptions) GetFileSet() *CloudStorageOptions_FileSet { + if m != nil { + return m.FileSet + } + return nil +} + +func (m *CloudStorageOptions) GetBytesLimitPerFile() int64 { + if m != nil { + return m.BytesLimitPerFile + } + return 0 +} + +func (m *CloudStorageOptions) GetBytesLimitPerFilePercent() int32 { + if m != nil { + return m.BytesLimitPerFilePercent + } + return 0 +} + +func (m *CloudStorageOptions) GetFileTypes() []FileType { + if m != nil { + return m.FileTypes + } + return nil +} + +func (m *CloudStorageOptions) GetSampleMethod() CloudStorageOptions_SampleMethod { + if m != nil { + return m.SampleMethod + } + return CloudStorageOptions_SAMPLE_METHOD_UNSPECIFIED +} + +func (m *CloudStorageOptions) GetFilesLimitPercent() int32 { + if m != nil { + return m.FilesLimitPercent + } + return 0 +} + +// Set of files to scan. +type CloudStorageOptions_FileSet struct { + // The Cloud Storage url of the file(s) to scan, in the format + // `gs:///`. Trailing wildcard in the path is allowed. + // + // If the url ends in a trailing slash, the bucket or directory represented + // by the url will be scanned non-recursively (content in sub-directories + // will not be scanned). This means that `gs://mybucket/` is equivalent to + // `gs://mybucket/*`, and `gs://mybucket/directory/` is equivalent to + // `gs://mybucket/directory/*`. + // + // Exactly one of `url` or `regex_file_set` must be set. + Url string `protobuf:"bytes,1,opt,name=url,proto3" json:"url,omitempty"` + // The regex-filtered set of files to scan. Exactly one of `url` or + // `regex_file_set` must be set. + RegexFileSet *CloudStorageRegexFileSet `protobuf:"bytes,2,opt,name=regex_file_set,json=regexFileSet,proto3" json:"regex_file_set,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *CloudStorageOptions_FileSet) Reset() { *m = CloudStorageOptions_FileSet{} } +func (m *CloudStorageOptions_FileSet) String() string { return proto.CompactTextString(m) } +func (*CloudStorageOptions_FileSet) ProtoMessage() {} +func (*CloudStorageOptions_FileSet) Descriptor() ([]byte, []int) { + return fileDescriptor_storage_0642a70333f19630, []int{8, 0} +} +func (m *CloudStorageOptions_FileSet) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_CloudStorageOptions_FileSet.Unmarshal(m, b) +} +func (m *CloudStorageOptions_FileSet) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_CloudStorageOptions_FileSet.Marshal(b, m, deterministic) +} +func (dst *CloudStorageOptions_FileSet) XXX_Merge(src proto.Message) { + xxx_messageInfo_CloudStorageOptions_FileSet.Merge(dst, src) +} +func (m *CloudStorageOptions_FileSet) XXX_Size() int { + return xxx_messageInfo_CloudStorageOptions_FileSet.Size(m) +} +func (m *CloudStorageOptions_FileSet) XXX_DiscardUnknown() { + xxx_messageInfo_CloudStorageOptions_FileSet.DiscardUnknown(m) +} + +var xxx_messageInfo_CloudStorageOptions_FileSet proto.InternalMessageInfo + +func (m *CloudStorageOptions_FileSet) GetUrl() string { + if m != nil { + return m.Url + } + return "" +} + +func (m *CloudStorageOptions_FileSet) GetRegexFileSet() *CloudStorageRegexFileSet { + if m != nil { + return m.RegexFileSet + } + return nil +} + +// Message representing a set of files in Cloud Storage. +type CloudStorageFileSet struct { + // The url, in the format `gs:///`. Trailing wildcard in the + // path is allowed. + Url string `protobuf:"bytes,1,opt,name=url,proto3" json:"url,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *CloudStorageFileSet) Reset() { *m = CloudStorageFileSet{} } +func (m *CloudStorageFileSet) String() string { return proto.CompactTextString(m) } +func (*CloudStorageFileSet) ProtoMessage() {} +func (*CloudStorageFileSet) Descriptor() ([]byte, []int) { + return fileDescriptor_storage_0642a70333f19630, []int{9} +} +func (m *CloudStorageFileSet) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_CloudStorageFileSet.Unmarshal(m, b) +} +func (m *CloudStorageFileSet) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_CloudStorageFileSet.Marshal(b, m, deterministic) +} +func (dst *CloudStorageFileSet) XXX_Merge(src proto.Message) { + xxx_messageInfo_CloudStorageFileSet.Merge(dst, src) +} +func (m *CloudStorageFileSet) XXX_Size() int { + return xxx_messageInfo_CloudStorageFileSet.Size(m) +} +func (m *CloudStorageFileSet) XXX_DiscardUnknown() { + xxx_messageInfo_CloudStorageFileSet.DiscardUnknown(m) +} + +var xxx_messageInfo_CloudStorageFileSet proto.InternalMessageInfo + +func (m *CloudStorageFileSet) GetUrl() string { + if m != nil { + return m.Url + } + return "" +} + +// Message representing a single file or path in Cloud Storage. +type CloudStoragePath struct { + // A url representing a file or path (no wildcards) in Cloud Storage. + // Example: gs://[BUCKET_NAME]/dictionary.txt + Path string `protobuf:"bytes,1,opt,name=path,proto3" json:"path,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *CloudStoragePath) Reset() { *m = CloudStoragePath{} } +func (m *CloudStoragePath) String() string { return proto.CompactTextString(m) } +func (*CloudStoragePath) ProtoMessage() {} +func (*CloudStoragePath) Descriptor() ([]byte, []int) { + return fileDescriptor_storage_0642a70333f19630, []int{10} +} +func (m *CloudStoragePath) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_CloudStoragePath.Unmarshal(m, b) +} +func (m *CloudStoragePath) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_CloudStoragePath.Marshal(b, m, deterministic) +} +func (dst *CloudStoragePath) XXX_Merge(src proto.Message) { + xxx_messageInfo_CloudStoragePath.Merge(dst, src) +} +func (m *CloudStoragePath) XXX_Size() int { + return xxx_messageInfo_CloudStoragePath.Size(m) +} +func (m *CloudStoragePath) XXX_DiscardUnknown() { + xxx_messageInfo_CloudStoragePath.DiscardUnknown(m) +} + +var xxx_messageInfo_CloudStoragePath proto.InternalMessageInfo + +func (m *CloudStoragePath) GetPath() string { + if m != nil { + return m.Path + } + return "" +} + +// Options defining BigQuery table and row identifiers. +type BigQueryOptions struct { + // Complete BigQuery table reference. + TableReference *BigQueryTable `protobuf:"bytes,1,opt,name=table_reference,json=tableReference,proto3" json:"table_reference,omitempty"` + // References to fields uniquely identifying rows within the table. + // Nested fields in the format, like `person.birthdate.year`, are allowed. + IdentifyingFields []*FieldId `protobuf:"bytes,2,rep,name=identifying_fields,json=identifyingFields,proto3" json:"identifying_fields,omitempty"` + // Max number of rows to scan. If the table has more rows than this value, the + // rest of the rows are omitted. If not set, or if set to 0, all rows will be + // scanned. Only one of rows_limit and rows_limit_percent can be specified. + // Cannot be used in conjunction with TimespanConfig. + RowsLimit int64 `protobuf:"varint,3,opt,name=rows_limit,json=rowsLimit,proto3" json:"rows_limit,omitempty"` + // Max percentage of rows to scan. The rest are omitted. The number of rows + // scanned is rounded down. Must be between 0 and 100, inclusively. Both 0 and + // 100 means no limit. Defaults to 0. Only one of rows_limit and + // rows_limit_percent can be specified. Cannot be used in conjunction with + // TimespanConfig. + RowsLimitPercent int32 `protobuf:"varint,6,opt,name=rows_limit_percent,json=rowsLimitPercent,proto3" json:"rows_limit_percent,omitempty"` + SampleMethod BigQueryOptions_SampleMethod `protobuf:"varint,4,opt,name=sample_method,json=sampleMethod,proto3,enum=google.privacy.dlp.v2.BigQueryOptions_SampleMethod" json:"sample_method,omitempty"` + // References to fields excluded from scanning. This allows you to skip + // inspection of entire columns which you know have no findings. + ExcludedFields []*FieldId `protobuf:"bytes,5,rep,name=excluded_fields,json=excludedFields,proto3" json:"excluded_fields,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *BigQueryOptions) Reset() { *m = BigQueryOptions{} } +func (m *BigQueryOptions) String() string { return proto.CompactTextString(m) } +func (*BigQueryOptions) ProtoMessage() {} +func (*BigQueryOptions) Descriptor() ([]byte, []int) { + return fileDescriptor_storage_0642a70333f19630, []int{11} +} +func (m *BigQueryOptions) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_BigQueryOptions.Unmarshal(m, b) +} +func (m *BigQueryOptions) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_BigQueryOptions.Marshal(b, m, deterministic) +} +func (dst *BigQueryOptions) XXX_Merge(src proto.Message) { + xxx_messageInfo_BigQueryOptions.Merge(dst, src) +} +func (m *BigQueryOptions) XXX_Size() int { + return xxx_messageInfo_BigQueryOptions.Size(m) +} +func (m *BigQueryOptions) XXX_DiscardUnknown() { + xxx_messageInfo_BigQueryOptions.DiscardUnknown(m) +} + +var xxx_messageInfo_BigQueryOptions proto.InternalMessageInfo + +func (m *BigQueryOptions) GetTableReference() *BigQueryTable { + if m != nil { + return m.TableReference + } + return nil +} + +func (m *BigQueryOptions) GetIdentifyingFields() []*FieldId { + if m != nil { + return m.IdentifyingFields + } + return nil +} + +func (m *BigQueryOptions) GetRowsLimit() int64 { + if m != nil { + return m.RowsLimit + } + return 0 +} + +func (m *BigQueryOptions) GetRowsLimitPercent() int32 { + if m != nil { + return m.RowsLimitPercent + } + return 0 +} + +func (m *BigQueryOptions) GetSampleMethod() BigQueryOptions_SampleMethod { + if m != nil { + return m.SampleMethod + } + return BigQueryOptions_SAMPLE_METHOD_UNSPECIFIED +} + +func (m *BigQueryOptions) GetExcludedFields() []*FieldId { + if m != nil { + return m.ExcludedFields + } + return nil +} + +// Shared message indicating Cloud storage type. +type StorageConfig struct { + // Types that are valid to be assigned to Type: + // *StorageConfig_DatastoreOptions + // *StorageConfig_CloudStorageOptions + // *StorageConfig_BigQueryOptions + Type isStorageConfig_Type `protobuf_oneof:"type"` + TimespanConfig *StorageConfig_TimespanConfig `protobuf:"bytes,6,opt,name=timespan_config,json=timespanConfig,proto3" json:"timespan_config,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *StorageConfig) Reset() { *m = StorageConfig{} } +func (m *StorageConfig) String() string { return proto.CompactTextString(m) } +func (*StorageConfig) ProtoMessage() {} +func (*StorageConfig) Descriptor() ([]byte, []int) { + return fileDescriptor_storage_0642a70333f19630, []int{12} +} +func (m *StorageConfig) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_StorageConfig.Unmarshal(m, b) +} +func (m *StorageConfig) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_StorageConfig.Marshal(b, m, deterministic) +} +func (dst *StorageConfig) XXX_Merge(src proto.Message) { + xxx_messageInfo_StorageConfig.Merge(dst, src) +} +func (m *StorageConfig) XXX_Size() int { + return xxx_messageInfo_StorageConfig.Size(m) +} +func (m *StorageConfig) XXX_DiscardUnknown() { + xxx_messageInfo_StorageConfig.DiscardUnknown(m) +} + +var xxx_messageInfo_StorageConfig proto.InternalMessageInfo + +type isStorageConfig_Type interface { + isStorageConfig_Type() +} + +type StorageConfig_DatastoreOptions struct { + DatastoreOptions *DatastoreOptions `protobuf:"bytes,2,opt,name=datastore_options,json=datastoreOptions,proto3,oneof"` +} + +type StorageConfig_CloudStorageOptions struct { + CloudStorageOptions *CloudStorageOptions `protobuf:"bytes,3,opt,name=cloud_storage_options,json=cloudStorageOptions,proto3,oneof"` +} + +type StorageConfig_BigQueryOptions struct { + BigQueryOptions *BigQueryOptions `protobuf:"bytes,4,opt,name=big_query_options,json=bigQueryOptions,proto3,oneof"` +} + +func (*StorageConfig_DatastoreOptions) isStorageConfig_Type() {} + +func (*StorageConfig_CloudStorageOptions) isStorageConfig_Type() {} + +func (*StorageConfig_BigQueryOptions) isStorageConfig_Type() {} + +func (m *StorageConfig) GetType() isStorageConfig_Type { + if m != nil { + return m.Type + } + return nil +} + +func (m *StorageConfig) GetDatastoreOptions() *DatastoreOptions { + if x, ok := m.GetType().(*StorageConfig_DatastoreOptions); ok { + return x.DatastoreOptions + } + return nil +} + +func (m *StorageConfig) GetCloudStorageOptions() *CloudStorageOptions { + if x, ok := m.GetType().(*StorageConfig_CloudStorageOptions); ok { + return x.CloudStorageOptions + } + return nil +} + +func (m *StorageConfig) GetBigQueryOptions() *BigQueryOptions { + if x, ok := m.GetType().(*StorageConfig_BigQueryOptions); ok { + return x.BigQueryOptions + } + return nil +} + +func (m *StorageConfig) GetTimespanConfig() *StorageConfig_TimespanConfig { + if m != nil { + return m.TimespanConfig + } + return nil +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*StorageConfig) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _StorageConfig_OneofMarshaler, _StorageConfig_OneofUnmarshaler, _StorageConfig_OneofSizer, []interface{}{ + (*StorageConfig_DatastoreOptions)(nil), + (*StorageConfig_CloudStorageOptions)(nil), + (*StorageConfig_BigQueryOptions)(nil), + } +} + +func _StorageConfig_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*StorageConfig) + // type + switch x := m.Type.(type) { + case *StorageConfig_DatastoreOptions: + b.EncodeVarint(2<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.DatastoreOptions); err != nil { + return err + } + case *StorageConfig_CloudStorageOptions: + b.EncodeVarint(3<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.CloudStorageOptions); err != nil { + return err + } + case *StorageConfig_BigQueryOptions: + b.EncodeVarint(4<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.BigQueryOptions); err != nil { + return err + } + case nil: + default: + return fmt.Errorf("StorageConfig.Type has unexpected type %T", x) + } + return nil +} + +func _StorageConfig_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*StorageConfig) + switch tag { + case 2: // type.datastore_options + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(DatastoreOptions) + err := b.DecodeMessage(msg) + m.Type = &StorageConfig_DatastoreOptions{msg} + return true, err + case 3: // type.cloud_storage_options + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(CloudStorageOptions) + err := b.DecodeMessage(msg) + m.Type = &StorageConfig_CloudStorageOptions{msg} + return true, err + case 4: // type.big_query_options + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(BigQueryOptions) + err := b.DecodeMessage(msg) + m.Type = &StorageConfig_BigQueryOptions{msg} + return true, err + default: + return false, nil + } +} + +func _StorageConfig_OneofSizer(msg proto.Message) (n int) { + m := msg.(*StorageConfig) + // type + switch x := m.Type.(type) { + case *StorageConfig_DatastoreOptions: + s := proto.Size(x.DatastoreOptions) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *StorageConfig_CloudStorageOptions: + s := proto.Size(x.CloudStorageOptions) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *StorageConfig_BigQueryOptions: + s := proto.Size(x.BigQueryOptions) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +// Configuration of the timespan of the items to include in scanning. +// Currently only supported when inspecting Google Cloud Storage and BigQuery. +type StorageConfig_TimespanConfig struct { + // Exclude files or rows older than this value. + StartTime *timestamp.Timestamp `protobuf:"bytes,1,opt,name=start_time,json=startTime,proto3" json:"start_time,omitempty"` + // Exclude files or rows newer than this value. + // If set to zero, no upper time limit is applied. + EndTime *timestamp.Timestamp `protobuf:"bytes,2,opt,name=end_time,json=endTime,proto3" json:"end_time,omitempty"` + // Specification of the field containing the timestamp of scanned items. + // Used for data sources like Datastore or BigQuery. + // If not specified for BigQuery, table last modification timestamp + // is checked against given time span. + // The valid data types of the timestamp field are: + // for BigQuery - timestamp, date, datetime; + // for Datastore - timestamp. + // Datastore entity will be scanned if the timestamp property does not exist + // or its value is empty or invalid. + TimestampField *FieldId `protobuf:"bytes,3,opt,name=timestamp_field,json=timestampField,proto3" json:"timestamp_field,omitempty"` + // When the job is started by a JobTrigger we will automatically figure out + // a valid start_time to avoid scanning files that have not been modified + // since the last time the JobTrigger executed. This will be based on the + // time of the execution of the last run of the JobTrigger. + EnableAutoPopulationOfTimespanConfig bool `protobuf:"varint,4,opt,name=enable_auto_population_of_timespan_config,json=enableAutoPopulationOfTimespanConfig,proto3" json:"enable_auto_population_of_timespan_config,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *StorageConfig_TimespanConfig) Reset() { *m = StorageConfig_TimespanConfig{} } +func (m *StorageConfig_TimespanConfig) String() string { return proto.CompactTextString(m) } +func (*StorageConfig_TimespanConfig) ProtoMessage() {} +func (*StorageConfig_TimespanConfig) Descriptor() ([]byte, []int) { + return fileDescriptor_storage_0642a70333f19630, []int{12, 0} +} +func (m *StorageConfig_TimespanConfig) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_StorageConfig_TimespanConfig.Unmarshal(m, b) +} +func (m *StorageConfig_TimespanConfig) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_StorageConfig_TimespanConfig.Marshal(b, m, deterministic) +} +func (dst *StorageConfig_TimespanConfig) XXX_Merge(src proto.Message) { + xxx_messageInfo_StorageConfig_TimespanConfig.Merge(dst, src) +} +func (m *StorageConfig_TimespanConfig) XXX_Size() int { + return xxx_messageInfo_StorageConfig_TimespanConfig.Size(m) +} +func (m *StorageConfig_TimespanConfig) XXX_DiscardUnknown() { + xxx_messageInfo_StorageConfig_TimespanConfig.DiscardUnknown(m) +} + +var xxx_messageInfo_StorageConfig_TimespanConfig proto.InternalMessageInfo + +func (m *StorageConfig_TimespanConfig) GetStartTime() *timestamp.Timestamp { + if m != nil { + return m.StartTime + } + return nil +} + +func (m *StorageConfig_TimespanConfig) GetEndTime() *timestamp.Timestamp { + if m != nil { + return m.EndTime + } + return nil +} + +func (m *StorageConfig_TimespanConfig) GetTimestampField() *FieldId { + if m != nil { + return m.TimestampField + } + return nil +} + +func (m *StorageConfig_TimespanConfig) GetEnableAutoPopulationOfTimespanConfig() bool { + if m != nil { + return m.EnableAutoPopulationOfTimespanConfig + } + return false +} + +// Row key for identifying a record in BigQuery table. +type BigQueryKey struct { + // Complete BigQuery table reference. + TableReference *BigQueryTable `protobuf:"bytes,1,opt,name=table_reference,json=tableReference,proto3" json:"table_reference,omitempty"` + // Absolute number of the row from the beginning of the table at the time + // of scanning. + RowNumber int64 `protobuf:"varint,2,opt,name=row_number,json=rowNumber,proto3" json:"row_number,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *BigQueryKey) Reset() { *m = BigQueryKey{} } +func (m *BigQueryKey) String() string { return proto.CompactTextString(m) } +func (*BigQueryKey) ProtoMessage() {} +func (*BigQueryKey) Descriptor() ([]byte, []int) { + return fileDescriptor_storage_0642a70333f19630, []int{13} +} +func (m *BigQueryKey) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_BigQueryKey.Unmarshal(m, b) +} +func (m *BigQueryKey) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_BigQueryKey.Marshal(b, m, deterministic) +} +func (dst *BigQueryKey) XXX_Merge(src proto.Message) { + xxx_messageInfo_BigQueryKey.Merge(dst, src) +} +func (m *BigQueryKey) XXX_Size() int { + return xxx_messageInfo_BigQueryKey.Size(m) +} +func (m *BigQueryKey) XXX_DiscardUnknown() { + xxx_messageInfo_BigQueryKey.DiscardUnknown(m) +} + +var xxx_messageInfo_BigQueryKey proto.InternalMessageInfo + +func (m *BigQueryKey) GetTableReference() *BigQueryTable { + if m != nil { + return m.TableReference + } + return nil +} + +func (m *BigQueryKey) GetRowNumber() int64 { + if m != nil { + return m.RowNumber + } + return 0 +} + +// Record key for a finding in Cloud Datastore. +type DatastoreKey struct { + // Datastore entity key. + EntityKey *Key `protobuf:"bytes,1,opt,name=entity_key,json=entityKey,proto3" json:"entity_key,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *DatastoreKey) Reset() { *m = DatastoreKey{} } +func (m *DatastoreKey) String() string { return proto.CompactTextString(m) } +func (*DatastoreKey) ProtoMessage() {} +func (*DatastoreKey) Descriptor() ([]byte, []int) { + return fileDescriptor_storage_0642a70333f19630, []int{14} +} +func (m *DatastoreKey) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_DatastoreKey.Unmarshal(m, b) +} +func (m *DatastoreKey) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_DatastoreKey.Marshal(b, m, deterministic) +} +func (dst *DatastoreKey) XXX_Merge(src proto.Message) { + xxx_messageInfo_DatastoreKey.Merge(dst, src) +} +func (m *DatastoreKey) XXX_Size() int { + return xxx_messageInfo_DatastoreKey.Size(m) +} +func (m *DatastoreKey) XXX_DiscardUnknown() { + xxx_messageInfo_DatastoreKey.DiscardUnknown(m) +} + +var xxx_messageInfo_DatastoreKey proto.InternalMessageInfo + +func (m *DatastoreKey) GetEntityKey() *Key { + if m != nil { + return m.EntityKey + } + return nil +} + +// A unique identifier for a Datastore entity. +// If a key's partition ID or any of its path kinds or names are +// reserved/read-only, the key is reserved/read-only. +// A reserved/read-only key is forbidden in certain documented contexts. +type Key struct { + // Entities are partitioned into subsets, currently identified by a project + // ID and namespace ID. + // Queries are scoped to a single partition. + PartitionId *PartitionId `protobuf:"bytes,1,opt,name=partition_id,json=partitionId,proto3" json:"partition_id,omitempty"` + // The entity path. + // An entity path consists of one or more elements composed of a kind and a + // string or numerical identifier, which identify entities. The first + // element identifies a _root entity_, the second element identifies + // a _child_ of the root entity, the third element identifies a child of the + // second entity, and so forth. The entities identified by all prefixes of + // the path are called the element's _ancestors_. + // + // A path can never be empty, and a path can have at most 100 elements. + Path []*Key_PathElement `protobuf:"bytes,2,rep,name=path,proto3" json:"path,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Key) Reset() { *m = Key{} } +func (m *Key) String() string { return proto.CompactTextString(m) } +func (*Key) ProtoMessage() {} +func (*Key) Descriptor() ([]byte, []int) { + return fileDescriptor_storage_0642a70333f19630, []int{15} +} +func (m *Key) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Key.Unmarshal(m, b) +} +func (m *Key) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Key.Marshal(b, m, deterministic) +} +func (dst *Key) XXX_Merge(src proto.Message) { + xxx_messageInfo_Key.Merge(dst, src) +} +func (m *Key) XXX_Size() int { + return xxx_messageInfo_Key.Size(m) +} +func (m *Key) XXX_DiscardUnknown() { + xxx_messageInfo_Key.DiscardUnknown(m) +} + +var xxx_messageInfo_Key proto.InternalMessageInfo + +func (m *Key) GetPartitionId() *PartitionId { + if m != nil { + return m.PartitionId + } + return nil +} + +func (m *Key) GetPath() []*Key_PathElement { + if m != nil { + return m.Path + } + return nil +} + +// A (kind, ID/name) pair used to construct a key path. +// +// If either name or ID is set, the element is complete. +// If neither is set, the element is incomplete. +type Key_PathElement struct { + // The kind of the entity. + // A kind matching regex `__.*__` is reserved/read-only. + // A kind must not contain more than 1500 bytes when UTF-8 encoded. + // Cannot be `""`. + Kind string `protobuf:"bytes,1,opt,name=kind,proto3" json:"kind,omitempty"` + // The type of ID. + // + // Types that are valid to be assigned to IdType: + // *Key_PathElement_Id + // *Key_PathElement_Name + IdType isKey_PathElement_IdType `protobuf_oneof:"id_type"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Key_PathElement) Reset() { *m = Key_PathElement{} } +func (m *Key_PathElement) String() string { return proto.CompactTextString(m) } +func (*Key_PathElement) ProtoMessage() {} +func (*Key_PathElement) Descriptor() ([]byte, []int) { + return fileDescriptor_storage_0642a70333f19630, []int{15, 0} +} +func (m *Key_PathElement) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Key_PathElement.Unmarshal(m, b) +} +func (m *Key_PathElement) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Key_PathElement.Marshal(b, m, deterministic) +} +func (dst *Key_PathElement) XXX_Merge(src proto.Message) { + xxx_messageInfo_Key_PathElement.Merge(dst, src) +} +func (m *Key_PathElement) XXX_Size() int { + return xxx_messageInfo_Key_PathElement.Size(m) +} +func (m *Key_PathElement) XXX_DiscardUnknown() { + xxx_messageInfo_Key_PathElement.DiscardUnknown(m) +} + +var xxx_messageInfo_Key_PathElement proto.InternalMessageInfo + +func (m *Key_PathElement) GetKind() string { + if m != nil { + return m.Kind + } + return "" +} + +type isKey_PathElement_IdType interface { + isKey_PathElement_IdType() +} + +type Key_PathElement_Id struct { + Id int64 `protobuf:"varint,2,opt,name=id,proto3,oneof"` +} + +type Key_PathElement_Name struct { + Name string `protobuf:"bytes,3,opt,name=name,proto3,oneof"` +} + +func (*Key_PathElement_Id) isKey_PathElement_IdType() {} + +func (*Key_PathElement_Name) isKey_PathElement_IdType() {} + +func (m *Key_PathElement) GetIdType() isKey_PathElement_IdType { + if m != nil { + return m.IdType + } + return nil +} + +func (m *Key_PathElement) GetId() int64 { + if x, ok := m.GetIdType().(*Key_PathElement_Id); ok { + return x.Id + } + return 0 +} + +func (m *Key_PathElement) GetName() string { + if x, ok := m.GetIdType().(*Key_PathElement_Name); ok { + return x.Name + } + return "" +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*Key_PathElement) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _Key_PathElement_OneofMarshaler, _Key_PathElement_OneofUnmarshaler, _Key_PathElement_OneofSizer, []interface{}{ + (*Key_PathElement_Id)(nil), + (*Key_PathElement_Name)(nil), + } +} + +func _Key_PathElement_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*Key_PathElement) + // id_type + switch x := m.IdType.(type) { + case *Key_PathElement_Id: + b.EncodeVarint(2<<3 | proto.WireVarint) + b.EncodeVarint(uint64(x.Id)) + case *Key_PathElement_Name: + b.EncodeVarint(3<<3 | proto.WireBytes) + b.EncodeStringBytes(x.Name) + case nil: + default: + return fmt.Errorf("Key_PathElement.IdType has unexpected type %T", x) + } + return nil +} + +func _Key_PathElement_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*Key_PathElement) + switch tag { + case 2: // id_type.id + if wire != proto.WireVarint { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeVarint() + m.IdType = &Key_PathElement_Id{int64(x)} + return true, err + case 3: // id_type.name + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeStringBytes() + m.IdType = &Key_PathElement_Name{x} + return true, err + default: + return false, nil + } +} + +func _Key_PathElement_OneofSizer(msg proto.Message) (n int) { + m := msg.(*Key_PathElement) + // id_type + switch x := m.IdType.(type) { + case *Key_PathElement_Id: + n += 1 // tag and wire + n += proto.SizeVarint(uint64(x.Id)) + case *Key_PathElement_Name: + n += 1 // tag and wire + n += proto.SizeVarint(uint64(len(x.Name))) + n += len(x.Name) + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +// Message for a unique key indicating a record that contains a finding. +type RecordKey struct { + // Types that are valid to be assigned to Type: + // *RecordKey_DatastoreKey + // *RecordKey_BigQueryKey + Type isRecordKey_Type `protobuf_oneof:"type"` + // Values of identifying columns in the given row. Order of values matches + // the order of field identifiers specified in the scanning request. + IdValues []string `protobuf:"bytes,5,rep,name=id_values,json=idValues,proto3" json:"id_values,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *RecordKey) Reset() { *m = RecordKey{} } +func (m *RecordKey) String() string { return proto.CompactTextString(m) } +func (*RecordKey) ProtoMessage() {} +func (*RecordKey) Descriptor() ([]byte, []int) { + return fileDescriptor_storage_0642a70333f19630, []int{16} +} +func (m *RecordKey) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_RecordKey.Unmarshal(m, b) +} +func (m *RecordKey) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_RecordKey.Marshal(b, m, deterministic) +} +func (dst *RecordKey) XXX_Merge(src proto.Message) { + xxx_messageInfo_RecordKey.Merge(dst, src) +} +func (m *RecordKey) XXX_Size() int { + return xxx_messageInfo_RecordKey.Size(m) +} +func (m *RecordKey) XXX_DiscardUnknown() { + xxx_messageInfo_RecordKey.DiscardUnknown(m) +} + +var xxx_messageInfo_RecordKey proto.InternalMessageInfo + +type isRecordKey_Type interface { + isRecordKey_Type() +} + +type RecordKey_DatastoreKey struct { + DatastoreKey *DatastoreKey `protobuf:"bytes,2,opt,name=datastore_key,json=datastoreKey,proto3,oneof"` +} + +type RecordKey_BigQueryKey struct { + BigQueryKey *BigQueryKey `protobuf:"bytes,3,opt,name=big_query_key,json=bigQueryKey,proto3,oneof"` +} + +func (*RecordKey_DatastoreKey) isRecordKey_Type() {} + +func (*RecordKey_BigQueryKey) isRecordKey_Type() {} + +func (m *RecordKey) GetType() isRecordKey_Type { + if m != nil { + return m.Type + } + return nil +} + +func (m *RecordKey) GetDatastoreKey() *DatastoreKey { + if x, ok := m.GetType().(*RecordKey_DatastoreKey); ok { + return x.DatastoreKey + } + return nil +} + +func (m *RecordKey) GetBigQueryKey() *BigQueryKey { + if x, ok := m.GetType().(*RecordKey_BigQueryKey); ok { + return x.BigQueryKey + } + return nil +} + +func (m *RecordKey) GetIdValues() []string { + if m != nil { + return m.IdValues + } + return nil +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*RecordKey) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _RecordKey_OneofMarshaler, _RecordKey_OneofUnmarshaler, _RecordKey_OneofSizer, []interface{}{ + (*RecordKey_DatastoreKey)(nil), + (*RecordKey_BigQueryKey)(nil), + } +} + +func _RecordKey_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*RecordKey) + // type + switch x := m.Type.(type) { + case *RecordKey_DatastoreKey: + b.EncodeVarint(2<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.DatastoreKey); err != nil { + return err + } + case *RecordKey_BigQueryKey: + b.EncodeVarint(3<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.BigQueryKey); err != nil { + return err + } + case nil: + default: + return fmt.Errorf("RecordKey.Type has unexpected type %T", x) + } + return nil +} + +func _RecordKey_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*RecordKey) + switch tag { + case 2: // type.datastore_key + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(DatastoreKey) + err := b.DecodeMessage(msg) + m.Type = &RecordKey_DatastoreKey{msg} + return true, err + case 3: // type.big_query_key + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(BigQueryKey) + err := b.DecodeMessage(msg) + m.Type = &RecordKey_BigQueryKey{msg} + return true, err + default: + return false, nil + } +} + +func _RecordKey_OneofSizer(msg proto.Message) (n int) { + m := msg.(*RecordKey) + // type + switch x := m.Type.(type) { + case *RecordKey_DatastoreKey: + s := proto.Size(x.DatastoreKey) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *RecordKey_BigQueryKey: + s := proto.Size(x.BigQueryKey) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +// Message defining the location of a BigQuery table. A table is uniquely +// identified by its project_id, dataset_id, and table_name. Within a query +// a table is often referenced with a string in the format of: +// `:.` or +// `..`. +type BigQueryTable struct { + // The Google Cloud Platform project ID of the project containing the table. + // If omitted, project ID is inferred from the API call. + ProjectId string `protobuf:"bytes,1,opt,name=project_id,json=projectId,proto3" json:"project_id,omitempty"` + // Dataset ID of the table. + DatasetId string `protobuf:"bytes,2,opt,name=dataset_id,json=datasetId,proto3" json:"dataset_id,omitempty"` + // Name of the table. + TableId string `protobuf:"bytes,3,opt,name=table_id,json=tableId,proto3" json:"table_id,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *BigQueryTable) Reset() { *m = BigQueryTable{} } +func (m *BigQueryTable) String() string { return proto.CompactTextString(m) } +func (*BigQueryTable) ProtoMessage() {} +func (*BigQueryTable) Descriptor() ([]byte, []int) { + return fileDescriptor_storage_0642a70333f19630, []int{17} +} +func (m *BigQueryTable) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_BigQueryTable.Unmarshal(m, b) +} +func (m *BigQueryTable) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_BigQueryTable.Marshal(b, m, deterministic) +} +func (dst *BigQueryTable) XXX_Merge(src proto.Message) { + xxx_messageInfo_BigQueryTable.Merge(dst, src) +} +func (m *BigQueryTable) XXX_Size() int { + return xxx_messageInfo_BigQueryTable.Size(m) +} +func (m *BigQueryTable) XXX_DiscardUnknown() { + xxx_messageInfo_BigQueryTable.DiscardUnknown(m) +} + +var xxx_messageInfo_BigQueryTable proto.InternalMessageInfo + +func (m *BigQueryTable) GetProjectId() string { + if m != nil { + return m.ProjectId + } + return "" +} + +func (m *BigQueryTable) GetDatasetId() string { + if m != nil { + return m.DatasetId + } + return "" +} + +func (m *BigQueryTable) GetTableId() string { + if m != nil { + return m.TableId + } + return "" +} + +// Message defining a field of a BigQuery table. +type BigQueryField struct { + // Source table of the field. + Table *BigQueryTable `protobuf:"bytes,1,opt,name=table,proto3" json:"table,omitempty"` + // Designated field in the BigQuery table. + Field *FieldId `protobuf:"bytes,2,opt,name=field,proto3" json:"field,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *BigQueryField) Reset() { *m = BigQueryField{} } +func (m *BigQueryField) String() string { return proto.CompactTextString(m) } +func (*BigQueryField) ProtoMessage() {} +func (*BigQueryField) Descriptor() ([]byte, []int) { + return fileDescriptor_storage_0642a70333f19630, []int{18} +} +func (m *BigQueryField) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_BigQueryField.Unmarshal(m, b) +} +func (m *BigQueryField) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_BigQueryField.Marshal(b, m, deterministic) +} +func (dst *BigQueryField) XXX_Merge(src proto.Message) { + xxx_messageInfo_BigQueryField.Merge(dst, src) +} +func (m *BigQueryField) XXX_Size() int { + return xxx_messageInfo_BigQueryField.Size(m) +} +func (m *BigQueryField) XXX_DiscardUnknown() { + xxx_messageInfo_BigQueryField.DiscardUnknown(m) +} + +var xxx_messageInfo_BigQueryField proto.InternalMessageInfo + +func (m *BigQueryField) GetTable() *BigQueryTable { + if m != nil { + return m.Table + } + return nil +} + +func (m *BigQueryField) GetField() *FieldId { + if m != nil { + return m.Field + } + return nil +} + +// An entity in a dataset is a field or set of fields that correspond to a +// single person. For example, in medical records the `EntityId` might be a +// patient identifier, or for financial records it might be an account +// identifier. This message is used when generalizations or analysis must take +// into account that multiple rows correspond to the same entity. +type EntityId struct { + // Composite key indicating which field contains the entity identifier. + Field *FieldId `protobuf:"bytes,1,opt,name=field,proto3" json:"field,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *EntityId) Reset() { *m = EntityId{} } +func (m *EntityId) String() string { return proto.CompactTextString(m) } +func (*EntityId) ProtoMessage() {} +func (*EntityId) Descriptor() ([]byte, []int) { + return fileDescriptor_storage_0642a70333f19630, []int{19} +} +func (m *EntityId) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_EntityId.Unmarshal(m, b) +} +func (m *EntityId) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_EntityId.Marshal(b, m, deterministic) +} +func (dst *EntityId) XXX_Merge(src proto.Message) { + xxx_messageInfo_EntityId.Merge(dst, src) +} +func (m *EntityId) XXX_Size() int { + return xxx_messageInfo_EntityId.Size(m) +} +func (m *EntityId) XXX_DiscardUnknown() { + xxx_messageInfo_EntityId.DiscardUnknown(m) +} + +var xxx_messageInfo_EntityId proto.InternalMessageInfo + +func (m *EntityId) GetField() *FieldId { + if m != nil { + return m.Field + } + return nil +} + +func init() { + proto.RegisterType((*InfoType)(nil), "google.privacy.dlp.v2.InfoType") + proto.RegisterType((*StoredType)(nil), "google.privacy.dlp.v2.StoredType") + proto.RegisterType((*CustomInfoType)(nil), "google.privacy.dlp.v2.CustomInfoType") + proto.RegisterType((*CustomInfoType_Dictionary)(nil), "google.privacy.dlp.v2.CustomInfoType.Dictionary") + proto.RegisterType((*CustomInfoType_Dictionary_WordList)(nil), "google.privacy.dlp.v2.CustomInfoType.Dictionary.WordList") + proto.RegisterType((*CustomInfoType_Regex)(nil), "google.privacy.dlp.v2.CustomInfoType.Regex") + proto.RegisterType((*CustomInfoType_SurrogateType)(nil), "google.privacy.dlp.v2.CustomInfoType.SurrogateType") + proto.RegisterType((*CustomInfoType_DetectionRule)(nil), "google.privacy.dlp.v2.CustomInfoType.DetectionRule") + proto.RegisterType((*CustomInfoType_DetectionRule_Proximity)(nil), "google.privacy.dlp.v2.CustomInfoType.DetectionRule.Proximity") + proto.RegisterType((*CustomInfoType_DetectionRule_LikelihoodAdjustment)(nil), "google.privacy.dlp.v2.CustomInfoType.DetectionRule.LikelihoodAdjustment") + proto.RegisterType((*CustomInfoType_DetectionRule_HotwordRule)(nil), "google.privacy.dlp.v2.CustomInfoType.DetectionRule.HotwordRule") + proto.RegisterType((*FieldId)(nil), "google.privacy.dlp.v2.FieldId") + proto.RegisterType((*PartitionId)(nil), "google.privacy.dlp.v2.PartitionId") + proto.RegisterType((*KindExpression)(nil), "google.privacy.dlp.v2.KindExpression") + proto.RegisterType((*DatastoreOptions)(nil), "google.privacy.dlp.v2.DatastoreOptions") + proto.RegisterType((*CloudStorageRegexFileSet)(nil), "google.privacy.dlp.v2.CloudStorageRegexFileSet") + proto.RegisterType((*CloudStorageOptions)(nil), "google.privacy.dlp.v2.CloudStorageOptions") + proto.RegisterType((*CloudStorageOptions_FileSet)(nil), "google.privacy.dlp.v2.CloudStorageOptions.FileSet") + proto.RegisterType((*CloudStorageFileSet)(nil), "google.privacy.dlp.v2.CloudStorageFileSet") + proto.RegisterType((*CloudStoragePath)(nil), "google.privacy.dlp.v2.CloudStoragePath") + proto.RegisterType((*BigQueryOptions)(nil), "google.privacy.dlp.v2.BigQueryOptions") + proto.RegisterType((*StorageConfig)(nil), "google.privacy.dlp.v2.StorageConfig") + proto.RegisterType((*StorageConfig_TimespanConfig)(nil), "google.privacy.dlp.v2.StorageConfig.TimespanConfig") + proto.RegisterType((*BigQueryKey)(nil), "google.privacy.dlp.v2.BigQueryKey") + proto.RegisterType((*DatastoreKey)(nil), "google.privacy.dlp.v2.DatastoreKey") + proto.RegisterType((*Key)(nil), "google.privacy.dlp.v2.Key") + proto.RegisterType((*Key_PathElement)(nil), "google.privacy.dlp.v2.Key.PathElement") + proto.RegisterType((*RecordKey)(nil), "google.privacy.dlp.v2.RecordKey") + proto.RegisterType((*BigQueryTable)(nil), "google.privacy.dlp.v2.BigQueryTable") + proto.RegisterType((*BigQueryField)(nil), "google.privacy.dlp.v2.BigQueryField") + proto.RegisterType((*EntityId)(nil), "google.privacy.dlp.v2.EntityId") + proto.RegisterEnum("google.privacy.dlp.v2.Likelihood", Likelihood_name, Likelihood_value) + proto.RegisterEnum("google.privacy.dlp.v2.FileType", FileType_name, FileType_value) + proto.RegisterEnum("google.privacy.dlp.v2.CustomInfoType_ExclusionType", CustomInfoType_ExclusionType_name, CustomInfoType_ExclusionType_value) + proto.RegisterEnum("google.privacy.dlp.v2.CloudStorageOptions_SampleMethod", CloudStorageOptions_SampleMethod_name, CloudStorageOptions_SampleMethod_value) + proto.RegisterEnum("google.privacy.dlp.v2.BigQueryOptions_SampleMethod", BigQueryOptions_SampleMethod_name, BigQueryOptions_SampleMethod_value) +} + +func init() { + proto.RegisterFile("google/privacy/dlp/v2/storage.proto", fileDescriptor_storage_0642a70333f19630) +} + +var fileDescriptor_storage_0642a70333f19630 = []byte{ + // 2075 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xac, 0x58, 0xdd, 0x72, 0x1b, 0x49, + 0xf5, 0xd7, 0x48, 0x96, 0x2d, 0x1d, 0x7d, 0x58, 0xee, 0x24, 0x5b, 0xb2, 0xfe, 0x9b, 0xc4, 0x2b, + 0xe7, 0x9f, 0x35, 0x81, 0x92, 0xc0, 0x0b, 0x05, 0xd9, 0x85, 0x05, 0xcb, 0x92, 0xa3, 0xc1, 0x5f, + 0xa2, 0xa5, 0x38, 0xc9, 0x62, 0x6a, 0x18, 0x69, 0x5a, 0xf2, 0x6c, 0x46, 0x33, 0xc3, 0x4c, 0x8f, + 0x6d, 0x15, 0xc5, 0x05, 0x17, 0x5c, 0x73, 0xc3, 0x23, 0x50, 0x45, 0x15, 0xc5, 0x13, 0xc0, 0x3d, + 0x17, 0x3c, 0x04, 0x55, 0x7b, 0x49, 0xf1, 0x12, 0x54, 0x7f, 0xcc, 0x68, 0xa4, 0x58, 0x89, 0x9c, + 0xca, 0xdd, 0xf4, 0xe9, 0xf3, 0x3b, 0xdd, 0xe7, 0x9c, 0x5f, 0x9f, 0x3e, 0x3d, 0xb0, 0x3d, 0x72, + 0x9c, 0x91, 0x45, 0xea, 0xae, 0x67, 0x5e, 0xea, 0x83, 0x49, 0xdd, 0xb0, 0xdc, 0xfa, 0xe5, 0x6e, + 0xdd, 0xa7, 0x8e, 0xa7, 0x8f, 0x48, 0xcd, 0xf5, 0x1c, 0xea, 0xa0, 0x7b, 0x42, 0xa9, 0x26, 0x95, + 0x6a, 0x86, 0xe5, 0xd6, 0x2e, 0x77, 0x2b, 0x1f, 0x4b, 0xac, 0xee, 0x9a, 0x75, 0xdd, 0xb6, 0x1d, + 0xaa, 0x53, 0xd3, 0xb1, 0x7d, 0x01, 0xaa, 0x6c, 0xc6, 0x66, 0x3d, 0xe2, 0x3b, 0x81, 0x37, 0x90, + 0xf6, 0x2a, 0x0f, 0xa3, 0x45, 0x1d, 0xea, 0xf4, 0x83, 0x61, 0x9d, 0x9a, 0x63, 0xe2, 0x53, 0x7d, + 0xec, 0x0a, 0x85, 0xea, 0x03, 0xc8, 0xa8, 0xf6, 0xd0, 0xe9, 0x4d, 0x5c, 0x82, 0x10, 0xac, 0xd8, + 0xfa, 0x98, 0x94, 0x95, 0x2d, 0x65, 0x27, 0x8b, 0xf9, 0x77, 0xf5, 0x57, 0x00, 0x5d, 0xea, 0x78, + 0xc4, 0x58, 0xa4, 0x81, 0xbe, 0x80, 0xdc, 0xc0, 0x23, 0x3a, 0x25, 0x1a, 0xb3, 0x5d, 0x4e, 0x6e, + 0x29, 0x3b, 0xb9, 0xdd, 0x4a, 0x2d, 0x72, 0x44, 0x2c, 0x5c, 0xeb, 0x85, 0x0b, 0x63, 0x10, 0xea, + 0x4c, 0x50, 0xfd, 0xa6, 0x00, 0xc5, 0xfd, 0xc0, 0xa7, 0xce, 0x38, 0xda, 0xc5, 0x8f, 0x21, 0x6b, + 0xda, 0x43, 0x47, 0xa3, 0x13, 0x57, 0x2c, 0x94, 0xdb, 0x7d, 0x58, 0xbb, 0x31, 0x2c, 0xb5, 0x10, + 0x83, 0x33, 0x66, 0x88, 0xde, 0x03, 0xb0, 0xcc, 0xd7, 0xc4, 0x32, 0x2f, 0x1c, 0xc7, 0x28, 0xaf, + 0x6e, 0x29, 0x3b, 0xc5, 0xdd, 0x4f, 0x16, 0xc0, 0x8f, 0x22, 0x45, 0x1c, 0x03, 0x21, 0x0c, 0x60, + 0x98, 0x03, 0x16, 0x60, 0xdd, 0x9b, 0x48, 0x7f, 0xbe, 0xbb, 0xc0, 0xc4, 0xec, 0xde, 0x6b, 0xcd, + 0x08, 0xd7, 0x4e, 0xe0, 0x98, 0x15, 0xb4, 0x0f, 0x69, 0x8f, 0x8c, 0xc8, 0x75, 0x39, 0xc5, 0xcd, + 0x7d, 0x7b, 0x39, 0x73, 0x98, 0x41, 0xda, 0x09, 0x2c, 0xb0, 0xe8, 0x1c, 0x8a, 0x7e, 0xe0, 0x79, + 0xce, 0x88, 0x07, 0x9b, 0x85, 0x67, 0x85, 0x5b, 0xfb, 0x6c, 0x39, 0x6b, 0xdd, 0x10, 0xcb, 0x46, + 0xed, 0x04, 0x2e, 0xf8, 0x71, 0x01, 0x6a, 0x42, 0xce, 0xe7, 0x99, 0x16, 0xa6, 0xd3, 0xdc, 0xf4, + 0xa2, 0xd0, 0x4d, 0x39, 0xc1, 0x1c, 0xf5, 0xa7, 0x0c, 0x39, 0x87, 0x75, 0x83, 0x50, 0xc2, 0x1d, + 0xd7, 0xbc, 0xc0, 0x22, 0x7e, 0x79, 0x6d, 0x2b, 0xb5, 0xfc, 0x26, 0x9b, 0x21, 0x18, 0x07, 0x16, + 0xc1, 0x45, 0x23, 0x3e, 0xf4, 0xd1, 0x57, 0x50, 0x24, 0xd7, 0x03, 0x2b, 0xf0, 0x99, 0x75, 0xbe, + 0xcd, 0x0c, 0xcf, 0xf0, 0x92, 0xc6, 0x5b, 0x21, 0x96, 0x93, 0xa6, 0x40, 0xe2, 0xc3, 0xca, 0x7f, + 0x15, 0x80, 0x69, 0xfe, 0xd0, 0x4b, 0xc8, 0x5e, 0x39, 0x9e, 0xa1, 0x59, 0xa6, 0x4f, 0x25, 0x0d, + 0x9f, 0xde, 0x96, 0x04, 0xb5, 0x17, 0x8e, 0x67, 0x1c, 0x99, 0x3e, 0x6d, 0x27, 0x70, 0xe6, 0x4a, + 0x7e, 0xa3, 0x17, 0x80, 0x06, 0x96, 0x13, 0x18, 0x9a, 0x3c, 0xfa, 0x9a, 0xab, 0xd3, 0x0b, 0x49, + 0x8c, 0x4f, 0x17, 0x2d, 0xc1, 0x00, 0x5d, 0xa1, 0xdf, 0xd1, 0xe9, 0x45, 0x3b, 0x81, 0x4b, 0x83, + 0x39, 0x59, 0x65, 0x0b, 0x32, 0xe1, 0x82, 0xe8, 0x2e, 0xa4, 0xd9, 0x82, 0x7e, 0x59, 0xd9, 0x4a, + 0xed, 0x64, 0xb1, 0x18, 0x34, 0x32, 0xb0, 0x2a, 0xca, 0x43, 0xe5, 0x00, 0xd2, 0x9c, 0x5d, 0xa8, + 0x0c, 0x6b, 0xae, 0x4e, 0x29, 0xf1, 0x6c, 0x79, 0xaa, 0xc3, 0x21, 0xda, 0x86, 0xc2, 0xc8, 0x73, + 0x02, 0x57, 0x33, 0x6d, 0x83, 0x5c, 0x13, 0xbf, 0x9c, 0xdc, 0x4a, 0xed, 0xa4, 0x71, 0x9e, 0x0b, + 0x55, 0x21, 0xab, 0xac, 0x43, 0x61, 0x86, 0x57, 0x95, 0xbf, 0xa7, 0xa1, 0x30, 0x93, 0x44, 0x64, + 0x40, 0xfe, 0xc2, 0xa1, 0x3c, 0x98, 0x8c, 0x10, 0x32, 0x98, 0x3f, 0x7d, 0x0f, 0x3e, 0xd4, 0xda, + 0xc2, 0x0e, 0xfb, 0x6e, 0x27, 0x70, 0xee, 0x62, 0x3a, 0xac, 0x74, 0x21, 0xdb, 0xf1, 0x9c, 0x6b, + 0x73, 0x6c, 0xd2, 0x09, 0xdb, 0xfa, 0x95, 0x69, 0x1b, 0xce, 0x95, 0xd6, 0x27, 0x43, 0xc7, 0x13, + 0x6b, 0xa6, 0x71, 0x5e, 0x08, 0x1b, 0x5c, 0x86, 0x3e, 0x01, 0x39, 0xd6, 0xf4, 0x21, 0x25, 0x1e, + 0x3f, 0xe9, 0x69, 0x9c, 0x13, 0xb2, 0x3d, 0x26, 0xaa, 0xfc, 0x45, 0x81, 0xbb, 0xd3, 0x2a, 0xb1, + 0x67, 0x7c, 0x1d, 0xf8, 0x74, 0x4c, 0x6c, 0x8a, 0x4e, 0xa0, 0x34, 0x34, 0xaf, 0x09, 0xa3, 0x47, + 0x54, 0x6c, 0x94, 0x25, 0x8b, 0x4d, 0x3b, 0x81, 0xd7, 0x39, 0x78, 0x2a, 0x42, 0xdf, 0x83, 0x3b, + 0x1e, 0xb1, 0x74, 0x6a, 0x5e, 0x92, 0xb8, 0x49, 0xbe, 0xa5, 0x76, 0x02, 0xa3, 0x70, 0x72, 0x0a, + 0x69, 0xe4, 0x01, 0xf4, 0x68, 0x43, 0x95, 0x7f, 0x24, 0x21, 0x17, 0x8b, 0x0e, 0xea, 0x40, 0x21, + 0x0a, 0x3a, 0x2f, 0x3c, 0xca, 0xad, 0x0b, 0x0f, 0x0e, 0xd3, 0x26, 0x88, 0xf2, 0x4b, 0xc8, 0xba, + 0x61, 0x80, 0x65, 0x55, 0xfc, 0xc9, 0xfb, 0xe4, 0x30, 0xca, 0x12, 0x9e, 0xda, 0x43, 0xbf, 0x83, + 0x7b, 0x53, 0xb7, 0xb5, 0xa9, 0x5f, 0xf2, 0x58, 0xb4, 0xdf, 0x67, 0xa1, 0x9b, 0x12, 0x87, 0xef, + 0x5a, 0x37, 0x48, 0x1b, 0xab, 0xb0, 0xc2, 0xaa, 0x49, 0xf5, 0x10, 0x0a, 0x33, 0x35, 0x02, 0x3d, + 0x80, 0x4a, 0xeb, 0xe5, 0xfe, 0xd1, 0xf3, 0xae, 0x7a, 0x7a, 0xa2, 0xf5, 0x5e, 0x75, 0x5a, 0xda, + 0xf3, 0x93, 0x6e, 0xa7, 0xb5, 0xaf, 0x1e, 0xa8, 0xad, 0x66, 0x29, 0x81, 0x2a, 0xf0, 0xd1, 0xdc, + 0x3c, 0x1f, 0x36, 0x5b, 0x25, 0x25, 0x32, 0x7a, 0x1f, 0xd6, 0x0e, 0x4c, 0x62, 0x19, 0xaa, 0x71, + 0xe3, 0x0d, 0x7b, 0x0a, 0xb9, 0x8e, 0xee, 0x51, 0x93, 0xed, 0x5b, 0x35, 0xd0, 0x7d, 0x00, 0xd7, + 0x73, 0xbe, 0x26, 0x03, 0xaa, 0x99, 0x82, 0x00, 0x59, 0x1e, 0x28, 0x26, 0x51, 0x0d, 0x46, 0x5a, + 0x86, 0xf2, 0x5d, 0x7d, 0x40, 0x98, 0xc2, 0x0a, 0x57, 0xc8, 0x45, 0x32, 0xd5, 0xa8, 0x3e, 0x82, + 0xe2, 0xa1, 0x69, 0x1b, 0xad, 0x6b, 0xd7, 0x23, 0x3e, 0xf3, 0xe4, 0xc6, 0x65, 0xff, 0xa4, 0x40, + 0xa9, 0xa9, 0x53, 0x9d, 0xd7, 0xee, 0x53, 0x97, 0xf7, 0x13, 0xa8, 0x05, 0x79, 0x37, 0xdc, 0x0b, + 0xb3, 0x2e, 0x48, 0x53, 0x5d, 0x10, 0xfd, 0xd8, 0xb6, 0x71, 0xce, 0x8d, 0xf9, 0xf0, 0x14, 0x56, + 0x5e, 0x9b, 0xb6, 0x21, 0x59, 0xf2, 0xff, 0x0b, 0xe0, 0xb3, 0x9b, 0xc4, 0x1c, 0x52, 0xfd, 0x83, + 0x02, 0xe5, 0x78, 0xb1, 0xe3, 0xdc, 0x3b, 0x30, 0x2d, 0xd2, 0x25, 0x14, 0x3d, 0x84, 0x5c, 0x3f, + 0x18, 0xbc, 0x26, 0x54, 0x8b, 0xb9, 0x03, 0x42, 0x74, 0xc2, 0x7a, 0x91, 0x6d, 0x28, 0x98, 0xf6, + 0xc0, 0x0a, 0x0c, 0x22, 0x59, 0x9f, 0xe4, 0xd5, 0x2f, 0x2f, 0x85, 0x82, 0xc8, 0xdb, 0x20, 0x2a, + 0x7f, 0xa4, 0x94, 0x12, 0x4a, 0x52, 0xc8, 0x95, 0xaa, 0xdf, 0xac, 0xc0, 0x9d, 0xf8, 0x3e, 0xc2, + 0x08, 0x1d, 0x43, 0x66, 0x68, 0x5a, 0x44, 0xf3, 0x49, 0x78, 0x2b, 0xec, 0x2e, 0x51, 0xb2, 0x25, + 0xba, 0x26, 0x1d, 0xc1, 0x6b, 0x43, 0xe9, 0x51, 0x1d, 0xee, 0xf6, 0x27, 0x94, 0xf8, 0x9a, 0xc5, + 0xce, 0x81, 0xe6, 0x12, 0x4f, 0x63, 0x53, 0x3c, 0xad, 0x29, 0xbc, 0xc1, 0xe7, 0x8e, 0xd8, 0x54, + 0x87, 0x78, 0x0c, 0x8c, 0xbe, 0x84, 0x8f, 0x6f, 0x02, 0xb0, 0x8f, 0x01, 0x3b, 0x2f, 0x19, 0x5e, + 0xc4, 0xca, 0x6f, 0x00, 0x3b, 0x62, 0x1e, 0x7d, 0x09, 0xc0, 0xf5, 0x19, 0x33, 0xfd, 0x72, 0x7a, + 0x2b, 0xb5, 0x53, 0x5c, 0xd8, 0x5e, 0x31, 0x1c, 0xbf, 0x29, 0xb3, 0x43, 0xf9, 0xe5, 0xa3, 0x73, + 0x28, 0xf8, 0xfa, 0xd8, 0xb5, 0x88, 0x36, 0x26, 0xf4, 0x22, 0x6a, 0xb1, 0x7e, 0x78, 0x8b, 0x20, + 0x74, 0x39, 0xfe, 0x98, 0xc3, 0x71, 0xde, 0x8f, 0x8d, 0x50, 0x0d, 0xee, 0xb0, 0xa5, 0x62, 0xde, + 0x71, 0xa7, 0xd6, 0xb8, 0x53, 0x1b, 0x7c, 0x2a, 0x74, 0x8a, 0x4d, 0x54, 0x3c, 0x76, 0xb4, 0x44, + 0x24, 0x4b, 0x90, 0x0a, 0x3c, 0x4b, 0x72, 0x82, 0x7d, 0xa2, 0xe7, 0x50, 0xe4, 0xf9, 0xd5, 0xa2, + 0x84, 0x09, 0x3e, 0xd6, 0x97, 0xd8, 0x6b, 0x9c, 0x76, 0x38, 0xef, 0xc5, 0x46, 0xd5, 0x36, 0xe4, + 0xe3, 0x1e, 0xa0, 0xfb, 0xb0, 0xd9, 0xdd, 0x3b, 0xee, 0x1c, 0xb5, 0xb4, 0xe3, 0x56, 0xaf, 0x7d, + 0xda, 0x9c, 0xab, 0x10, 0x6b, 0x90, 0xea, 0x9d, 0x76, 0x4a, 0x0a, 0x2a, 0x41, 0x1e, 0xef, 0x9d, + 0x34, 0x4f, 0x8f, 0xb5, 0x6e, 0x6f, 0x0f, 0xf7, 0x4a, 0xc9, 0xea, 0xa7, 0xb3, 0x14, 0x5b, 0xe8, + 0x49, 0xf5, 0x31, 0x94, 0xe6, 0x1b, 0x00, 0x76, 0xa6, 0x79, 0xdf, 0x20, 0xcf, 0x34, 0xfb, 0xae, + 0xfe, 0x27, 0x05, 0xeb, 0x0d, 0x73, 0xf4, 0x8b, 0x80, 0x78, 0x93, 0x29, 0x61, 0xd7, 0xa9, 0xde, + 0xb7, 0x18, 0xd7, 0x87, 0xc4, 0x23, 0xf6, 0x20, 0xbc, 0x80, 0x1f, 0x2d, 0x08, 0x43, 0x68, 0xa0, + 0xc7, 0x50, 0xb8, 0xc8, 0xc1, 0x38, 0xc4, 0xa2, 0x63, 0x40, 0xa6, 0x41, 0x6c, 0x6a, 0x0e, 0x27, + 0xa6, 0x3d, 0xd2, 0x86, 0xac, 0xb0, 0x89, 0xce, 0x20, 0xb7, 0xfb, 0x60, 0x21, 0x8f, 0x78, 0xf5, + 0xc3, 0x1b, 0x31, 0x24, 0x97, 0xf9, 0xac, 0xda, 0x79, 0xce, 0x95, 0xcc, 0x37, 0x2f, 0xf6, 0x29, + 0x9c, 0x65, 0x12, 0x9e, 0x66, 0xf4, 0x1d, 0x40, 0xd3, 0xe9, 0x88, 0x0e, 0xab, 0x9c, 0x0e, 0xa5, + 0x48, 0x2d, 0xe4, 0xf6, 0xcb, 0x79, 0x6e, 0xae, 0xbc, 0xb5, 0x39, 0x9c, 0x8b, 0xd4, 0xdb, 0x78, + 0xf9, 0x0c, 0xd6, 0x65, 0x75, 0x30, 0x42, 0x97, 0xd3, 0x4b, 0xb9, 0x5c, 0x0c, 0x61, 0xc2, 0xdf, + 0x0f, 0x48, 0x9e, 0xbf, 0xa5, 0xa1, 0x20, 0xf9, 0xb0, 0xef, 0xd8, 0x43, 0x73, 0x84, 0xce, 0x60, + 0xc3, 0x08, 0x0b, 0xba, 0xe6, 0x08, 0xa7, 0x24, 0xe5, 0x17, 0xb5, 0x95, 0xf3, 0x17, 0x00, 0x6b, + 0x2b, 0x8d, 0xf9, 0x4b, 0xe1, 0xd7, 0x70, 0x6f, 0xb6, 0x5f, 0x0d, 0x6d, 0x8b, 0xbb, 0xf9, 0xc9, + 0xf2, 0x47, 0xbf, 0x9d, 0xc0, 0x77, 0x06, 0x37, 0x14, 0xd5, 0x1e, 0x6c, 0xf4, 0xcd, 0x91, 0xf6, + 0x1b, 0x96, 0x8d, 0xc8, 0xba, 0x78, 0xdb, 0x3c, 0x5e, 0x2e, 0x79, 0xac, 0xa7, 0xea, 0xcf, 0x31, + 0xff, 0x1c, 0xd6, 0xf9, 0x6b, 0xd7, 0xd5, 0x6d, 0x6d, 0xc0, 0x43, 0xc4, 0x99, 0xb3, 0xf8, 0x29, + 0x32, 0x13, 0x4e, 0xf1, 0x60, 0x75, 0x75, 0x5b, 0x0c, 0x71, 0x91, 0xce, 0x8c, 0x2b, 0x7f, 0x4e, + 0x42, 0x71, 0x56, 0x05, 0x3d, 0x05, 0xf0, 0xa9, 0xee, 0x51, 0xf1, 0x10, 0x56, 0xde, 0xf9, 0x10, + 0xce, 0x72, 0x6d, 0x36, 0x46, 0x3f, 0x80, 0x0c, 0xb1, 0x8d, 0x65, 0x5f, 0xd0, 0x6b, 0xc4, 0x36, + 0x38, 0xec, 0x99, 0x74, 0x91, 0x49, 0x05, 0x31, 0x65, 0x52, 0xde, 0xc9, 0xcb, 0x08, 0xc6, 0x25, + 0xe8, 0x05, 0x7c, 0x8b, 0xd8, 0xbc, 0x4c, 0xe8, 0x01, 0x75, 0x34, 0xd7, 0x71, 0x03, 0x8b, 0xff, + 0x63, 0xd0, 0x9c, 0xa1, 0x36, 0x1f, 0x45, 0x96, 0x99, 0x0c, 0x7e, 0x24, 0x00, 0x7b, 0x01, 0x75, + 0x3a, 0x91, 0xfa, 0xe9, 0x70, 0x36, 0x26, 0x51, 0x13, 0xf4, 0x5b, 0xc8, 0x85, 0x29, 0x3b, 0x24, + 0x93, 0x0f, 0x5d, 0x95, 0x44, 0x19, 0xd1, 0xec, 0x60, 0xdc, 0x97, 0x8d, 0xbc, 0x28, 0x23, 0x27, + 0x5c, 0x50, 0x55, 0x21, 0x1f, 0x31, 0x9d, 0xad, 0xfe, 0x14, 0x80, 0x15, 0x22, 0x3a, 0xd1, 0x5e, + 0x93, 0xc9, 0x9b, 0x89, 0x9a, 0xed, 0x52, 0xc8, 0x04, 0x67, 0x85, 0xf6, 0x21, 0x99, 0x54, 0xff, + 0xad, 0x40, 0x8a, 0x99, 0xf8, 0x40, 0x9d, 0xd2, 0xe7, 0xb2, 0x8a, 0x8b, 0x02, 0xfa, 0x78, 0xf1, + 0x1e, 0x6a, 0xac, 0xe8, 0xb7, 0x2c, 0xc2, 0x9b, 0x58, 0x8e, 0xa9, 0xf4, 0x58, 0xe3, 0x18, 0x09, + 0xd9, 0x85, 0xc0, 0x9b, 0x2e, 0x79, 0x21, 0xb0, 0x6f, 0x54, 0x82, 0xa4, 0x6c, 0x22, 0x53, 0xed, + 0x04, 0x4e, 0x9a, 0x06, 0xba, 0x2b, 0x5b, 0x41, 0x46, 0x93, 0x6c, 0x3b, 0x21, 0x9a, 0xc1, 0x46, + 0x16, 0xd6, 0x4c, 0xf1, 0xee, 0xaf, 0xfe, 0x53, 0x81, 0x2c, 0x26, 0x03, 0xc7, 0x33, 0x98, 0x9b, + 0x3f, 0x87, 0xc2, 0xb4, 0xa6, 0xb0, 0x60, 0x09, 0x72, 0x6e, 0xbf, 0xab, 0x9e, 0x1c, 0x92, 0x49, + 0x3b, 0x81, 0xf3, 0x46, 0x3c, 0xea, 0x6d, 0x28, 0x4c, 0x4f, 0x39, 0xb3, 0x95, 0x7a, 0x6b, 0xcc, + 0x62, 0x74, 0x61, 0x6f, 0xbd, 0x7e, 0x8c, 0x3d, 0xff, 0x07, 0x59, 0xd3, 0xd0, 0x2e, 0x75, 0x2b, + 0x90, 0x3d, 0x4c, 0x16, 0x67, 0x4c, 0xe3, 0x8c, 0x8f, 0x23, 0xc6, 0x5d, 0x40, 0x61, 0x86, 0x34, + 0x73, 0x9d, 0xb5, 0x32, 0xdf, 0x59, 0xdf, 0x07, 0xe0, 0xdb, 0x25, 0xf1, 0xc6, 0x5b, 0x4a, 0x54, + 0x03, 0x6d, 0x42, 0x46, 0x30, 0xd6, 0x14, 0x67, 0x2c, 0x8b, 0xd7, 0xf8, 0x58, 0x35, 0xaa, 0xbf, + 0x57, 0xa6, 0x4b, 0x89, 0xe3, 0xf4, 0x39, 0xa4, 0xf9, 0xe4, 0xad, 0x48, 0x2d, 0x20, 0xe8, 0xfb, + 0x90, 0x16, 0x27, 0x39, 0xb9, 0xd4, 0x49, 0x16, 0xca, 0xd5, 0x9f, 0x41, 0xa6, 0xc5, 0x49, 0xaa, + 0x1a, 0x53, 0x0b, 0xca, 0x2d, 0x2c, 0x3c, 0xa1, 0x00, 0xb1, 0x07, 0x69, 0x05, 0x3e, 0x3a, 0x52, + 0x0f, 0x5b, 0x47, 0x6a, 0xfb, 0xf4, 0x8d, 0x5b, 0x69, 0x03, 0x0a, 0x67, 0x2d, 0xfc, 0x4a, 0x7b, + 0x7e, 0xc2, 0x55, 0x5e, 0x95, 0x14, 0x94, 0x87, 0x4c, 0x34, 0x4a, 0xb2, 0x51, 0xe7, 0xb4, 0xdb, + 0x55, 0x1b, 0x47, 0xad, 0x52, 0x0a, 0x01, 0xac, 0xca, 0x99, 0x15, 0xb4, 0x0e, 0x39, 0x0e, 0x95, + 0x82, 0xf4, 0x93, 0x0e, 0x64, 0xc2, 0x36, 0x13, 0x6d, 0xc2, 0xbd, 0x03, 0xf5, 0xa8, 0x75, 0xd3, + 0x3b, 0x6b, 0x1d, 0x72, 0x0d, 0xf5, 0x64, 0x0f, 0xbf, 0xd2, 0x98, 0x46, 0x49, 0x41, 0x05, 0xc8, + 0xf6, 0x5a, 0x2f, 0x7b, 0x62, 0x98, 0x44, 0x59, 0x48, 0xab, 0xc7, 0x7b, 0xcf, 0x5a, 0xa5, 0x54, + 0xe3, 0x8f, 0x0a, 0x6c, 0x0e, 0x9c, 0xf1, 0xcd, 0x4e, 0x37, 0xa0, 0x69, 0xb9, 0x61, 0x1b, 0xa5, + 0x7c, 0xf5, 0x23, 0xa9, 0x34, 0x72, 0x2c, 0xdd, 0x1e, 0xd5, 0x1c, 0x6f, 0x54, 0x1f, 0x11, 0x9b, + 0x57, 0xdc, 0xba, 0x98, 0xd2, 0x5d, 0xd3, 0x9f, 0xfb, 0x65, 0xfb, 0x85, 0x61, 0xb9, 0x7f, 0x4d, + 0xde, 0x79, 0x26, 0xa0, 0xfc, 0x9a, 0xab, 0x35, 0x2d, 0xb7, 0x76, 0xb6, 0xfb, 0xaf, 0x50, 0x7a, + 0xce, 0xa5, 0xe7, 0x4d, 0xcb, 0x3d, 0x3f, 0xdb, 0xed, 0xaf, 0x72, 0x83, 0x9f, 0xfd, 0x2f, 0x00, + 0x00, 0xff, 0xff, 0xdc, 0x22, 0xe3, 0x78, 0x02, 0x16, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/pubsub/v1/pubsub.pb.go b/vendor/google.golang.org/genproto/googleapis/pubsub/v1/pubsub.pb.go new file mode 100644 index 0000000..ff8f3a3 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/pubsub/v1/pubsub.pb.go @@ -0,0 +1,3888 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/pubsub/v1/pubsub.proto + +package pubsub // import "google.golang.org/genproto/googleapis/pubsub/v1" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import duration "github.com/golang/protobuf/ptypes/duration" +import empty "github.com/golang/protobuf/ptypes/empty" +import timestamp "github.com/golang/protobuf/ptypes/timestamp" +import _ "google.golang.org/genproto/googleapis/api/annotations" +import field_mask "google.golang.org/genproto/protobuf/field_mask" + +import ( + context "golang.org/x/net/context" + grpc "google.golang.org/grpc" +) + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +type MessageStoragePolicy struct { + // The list of GCP region IDs where messages that are published to the topic + // may be persisted in storage. Messages published by publishers running in + // non-allowed GCP regions (or running outside of GCP altogether) will be + // routed for storage in one of the allowed regions. An empty list indicates a + // misconfiguration at the project or organization level, which will result in + // all Publish operations failing. + AllowedPersistenceRegions []string `protobuf:"bytes,1,rep,name=allowed_persistence_regions,json=allowedPersistenceRegions,proto3" json:"allowed_persistence_regions,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *MessageStoragePolicy) Reset() { *m = MessageStoragePolicy{} } +func (m *MessageStoragePolicy) String() string { return proto.CompactTextString(m) } +func (*MessageStoragePolicy) ProtoMessage() {} +func (*MessageStoragePolicy) Descriptor() ([]byte, []int) { + return fileDescriptor_pubsub_790424aa8109440f, []int{0} +} +func (m *MessageStoragePolicy) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_MessageStoragePolicy.Unmarshal(m, b) +} +func (m *MessageStoragePolicy) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_MessageStoragePolicy.Marshal(b, m, deterministic) +} +func (dst *MessageStoragePolicy) XXX_Merge(src proto.Message) { + xxx_messageInfo_MessageStoragePolicy.Merge(dst, src) +} +func (m *MessageStoragePolicy) XXX_Size() int { + return xxx_messageInfo_MessageStoragePolicy.Size(m) +} +func (m *MessageStoragePolicy) XXX_DiscardUnknown() { + xxx_messageInfo_MessageStoragePolicy.DiscardUnknown(m) +} + +var xxx_messageInfo_MessageStoragePolicy proto.InternalMessageInfo + +func (m *MessageStoragePolicy) GetAllowedPersistenceRegions() []string { + if m != nil { + return m.AllowedPersistenceRegions + } + return nil +} + +// A topic resource. +type Topic struct { + // The name of the topic. It must have the format + // `"projects/{project}/topics/{topic}"`. `{topic}` must start with a letter, + // and contain only letters (`[A-Za-z]`), numbers (`[0-9]`), dashes (`-`), + // underscores (`_`), periods (`.`), tildes (`~`), plus (`+`) or percent + // signs (`%`). It must be between 3 and 255 characters in length, and it + // must not start with `"goog"`. + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // See Creating and + // managing labels. + Labels map[string]string `protobuf:"bytes,2,rep,name=labels,proto3" json:"labels,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` + // Policy constraining how messages published to the topic may be stored. It + // is determined when the topic is created based on the policy configured at + // the project level. It must not be set by the caller in the request to + // CreateTopic or to UpdateTopic. This field will be populated in the + // responses for GetTopic, CreateTopic, and UpdateTopic: if not present in the + // response, then no constraints are in effect. + MessageStoragePolicy *MessageStoragePolicy `protobuf:"bytes,3,opt,name=message_storage_policy,json=messageStoragePolicy,proto3" json:"message_storage_policy,omitempty"` + // The resource name of the Cloud KMS CryptoKey to be used to protect access + // to messages published on this topic. + // + // The expected format is `projects/*/locations/*/keyRings/*/cryptoKeys/*`. + // EXPERIMENTAL: This feature is part of a closed alpha release. This + // API might be changed in backward-incompatible ways and is not recommended + // for production use. It is not subject to any SLA or deprecation policy. + KmsKeyName string `protobuf:"bytes,5,opt,name=kms_key_name,json=kmsKeyName,proto3" json:"kms_key_name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Topic) Reset() { *m = Topic{} } +func (m *Topic) String() string { return proto.CompactTextString(m) } +func (*Topic) ProtoMessage() {} +func (*Topic) Descriptor() ([]byte, []int) { + return fileDescriptor_pubsub_790424aa8109440f, []int{1} +} +func (m *Topic) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Topic.Unmarshal(m, b) +} +func (m *Topic) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Topic.Marshal(b, m, deterministic) +} +func (dst *Topic) XXX_Merge(src proto.Message) { + xxx_messageInfo_Topic.Merge(dst, src) +} +func (m *Topic) XXX_Size() int { + return xxx_messageInfo_Topic.Size(m) +} +func (m *Topic) XXX_DiscardUnknown() { + xxx_messageInfo_Topic.DiscardUnknown(m) +} + +var xxx_messageInfo_Topic proto.InternalMessageInfo + +func (m *Topic) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *Topic) GetLabels() map[string]string { + if m != nil { + return m.Labels + } + return nil +} + +func (m *Topic) GetMessageStoragePolicy() *MessageStoragePolicy { + if m != nil { + return m.MessageStoragePolicy + } + return nil +} + +func (m *Topic) GetKmsKeyName() string { + if m != nil { + return m.KmsKeyName + } + return "" +} + +// A message that is published by publishers and consumed by subscribers. The +// message must contain either a non-empty data field or at least one attribute. +// Note that client libraries represent this object differently +// depending on the language. See the corresponding +// client +// library documentation for more information. See +// Quotas and limits +// for more information about message limits. +type PubsubMessage struct { + // The message data field. If this field is empty, the message must contain + // at least one attribute. + Data []byte `protobuf:"bytes,1,opt,name=data,proto3" json:"data,omitempty"` + // Optional attributes for this message. + Attributes map[string]string `protobuf:"bytes,2,rep,name=attributes,proto3" json:"attributes,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` + // ID of this message, assigned by the server when the message is published. + // Guaranteed to be unique within the topic. This value may be read by a + // subscriber that receives a `PubsubMessage` via a `Pull` call or a push + // delivery. It must not be populated by the publisher in a `Publish` call. + MessageId string `protobuf:"bytes,3,opt,name=message_id,json=messageId,proto3" json:"message_id,omitempty"` + // The time at which the message was published, populated by the server when + // it receives the `Publish` call. It must not be populated by the + // publisher in a `Publish` call. + PublishTime *timestamp.Timestamp `protobuf:"bytes,4,opt,name=publish_time,json=publishTime,proto3" json:"publish_time,omitempty"` + // Identifies related messages for which publish order should be respected. + // If a `Subscription` has `enable_message_ordering` set to `true`, messages + // published with the same `ordering_key` value will be delivered to + // subscribers in the order in which they are received by the Pub/Sub system. + // EXPERIMENTAL: This feature is part of a closed alpha release. This + // API might be changed in backward-incompatible ways and is not recommended + // for production use. It is not subject to any SLA or deprecation policy. + OrderingKey string `protobuf:"bytes,5,opt,name=ordering_key,json=orderingKey,proto3" json:"ordering_key,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *PubsubMessage) Reset() { *m = PubsubMessage{} } +func (m *PubsubMessage) String() string { return proto.CompactTextString(m) } +func (*PubsubMessage) ProtoMessage() {} +func (*PubsubMessage) Descriptor() ([]byte, []int) { + return fileDescriptor_pubsub_790424aa8109440f, []int{2} +} +func (m *PubsubMessage) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_PubsubMessage.Unmarshal(m, b) +} +func (m *PubsubMessage) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_PubsubMessage.Marshal(b, m, deterministic) +} +func (dst *PubsubMessage) XXX_Merge(src proto.Message) { + xxx_messageInfo_PubsubMessage.Merge(dst, src) +} +func (m *PubsubMessage) XXX_Size() int { + return xxx_messageInfo_PubsubMessage.Size(m) +} +func (m *PubsubMessage) XXX_DiscardUnknown() { + xxx_messageInfo_PubsubMessage.DiscardUnknown(m) +} + +var xxx_messageInfo_PubsubMessage proto.InternalMessageInfo + +func (m *PubsubMessage) GetData() []byte { + if m != nil { + return m.Data + } + return nil +} + +func (m *PubsubMessage) GetAttributes() map[string]string { + if m != nil { + return m.Attributes + } + return nil +} + +func (m *PubsubMessage) GetMessageId() string { + if m != nil { + return m.MessageId + } + return "" +} + +func (m *PubsubMessage) GetPublishTime() *timestamp.Timestamp { + if m != nil { + return m.PublishTime + } + return nil +} + +func (m *PubsubMessage) GetOrderingKey() string { + if m != nil { + return m.OrderingKey + } + return "" +} + +// Request for the GetTopic method. +type GetTopicRequest struct { + // The name of the topic to get. + // Format is `projects/{project}/topics/{topic}`. + Topic string `protobuf:"bytes,1,opt,name=topic,proto3" json:"topic,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GetTopicRequest) Reset() { *m = GetTopicRequest{} } +func (m *GetTopicRequest) String() string { return proto.CompactTextString(m) } +func (*GetTopicRequest) ProtoMessage() {} +func (*GetTopicRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_pubsub_790424aa8109440f, []int{3} +} +func (m *GetTopicRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GetTopicRequest.Unmarshal(m, b) +} +func (m *GetTopicRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GetTopicRequest.Marshal(b, m, deterministic) +} +func (dst *GetTopicRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_GetTopicRequest.Merge(dst, src) +} +func (m *GetTopicRequest) XXX_Size() int { + return xxx_messageInfo_GetTopicRequest.Size(m) +} +func (m *GetTopicRequest) XXX_DiscardUnknown() { + xxx_messageInfo_GetTopicRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_GetTopicRequest proto.InternalMessageInfo + +func (m *GetTopicRequest) GetTopic() string { + if m != nil { + return m.Topic + } + return "" +} + +// Request for the UpdateTopic method. +type UpdateTopicRequest struct { + // The updated topic object. + Topic *Topic `protobuf:"bytes,1,opt,name=topic,proto3" json:"topic,omitempty"` + // Indicates which fields in the provided topic to update. Must be specified + // and non-empty. Note that if `update_mask` contains + // "message_storage_policy" then the new value will be determined based on the + // policy configured at the project or organization level. The + // `message_storage_policy` must not be set in the `topic` provided above. + UpdateMask *field_mask.FieldMask `protobuf:"bytes,2,opt,name=update_mask,json=updateMask,proto3" json:"update_mask,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *UpdateTopicRequest) Reset() { *m = UpdateTopicRequest{} } +func (m *UpdateTopicRequest) String() string { return proto.CompactTextString(m) } +func (*UpdateTopicRequest) ProtoMessage() {} +func (*UpdateTopicRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_pubsub_790424aa8109440f, []int{4} +} +func (m *UpdateTopicRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_UpdateTopicRequest.Unmarshal(m, b) +} +func (m *UpdateTopicRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_UpdateTopicRequest.Marshal(b, m, deterministic) +} +func (dst *UpdateTopicRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_UpdateTopicRequest.Merge(dst, src) +} +func (m *UpdateTopicRequest) XXX_Size() int { + return xxx_messageInfo_UpdateTopicRequest.Size(m) +} +func (m *UpdateTopicRequest) XXX_DiscardUnknown() { + xxx_messageInfo_UpdateTopicRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_UpdateTopicRequest proto.InternalMessageInfo + +func (m *UpdateTopicRequest) GetTopic() *Topic { + if m != nil { + return m.Topic + } + return nil +} + +func (m *UpdateTopicRequest) GetUpdateMask() *field_mask.FieldMask { + if m != nil { + return m.UpdateMask + } + return nil +} + +// Request for the Publish method. +type PublishRequest struct { + // The messages in the request will be published on this topic. + // Format is `projects/{project}/topics/{topic}`. + Topic string `protobuf:"bytes,1,opt,name=topic,proto3" json:"topic,omitempty"` + // The messages to publish. + Messages []*PubsubMessage `protobuf:"bytes,2,rep,name=messages,proto3" json:"messages,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *PublishRequest) Reset() { *m = PublishRequest{} } +func (m *PublishRequest) String() string { return proto.CompactTextString(m) } +func (*PublishRequest) ProtoMessage() {} +func (*PublishRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_pubsub_790424aa8109440f, []int{5} +} +func (m *PublishRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_PublishRequest.Unmarshal(m, b) +} +func (m *PublishRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_PublishRequest.Marshal(b, m, deterministic) +} +func (dst *PublishRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_PublishRequest.Merge(dst, src) +} +func (m *PublishRequest) XXX_Size() int { + return xxx_messageInfo_PublishRequest.Size(m) +} +func (m *PublishRequest) XXX_DiscardUnknown() { + xxx_messageInfo_PublishRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_PublishRequest proto.InternalMessageInfo + +func (m *PublishRequest) GetTopic() string { + if m != nil { + return m.Topic + } + return "" +} + +func (m *PublishRequest) GetMessages() []*PubsubMessage { + if m != nil { + return m.Messages + } + return nil +} + +// Response for the `Publish` method. +type PublishResponse struct { + // The server-assigned ID of each published message, in the same order as + // the messages in the request. IDs are guaranteed to be unique within + // the topic. + MessageIds []string `protobuf:"bytes,1,rep,name=message_ids,json=messageIds,proto3" json:"message_ids,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *PublishResponse) Reset() { *m = PublishResponse{} } +func (m *PublishResponse) String() string { return proto.CompactTextString(m) } +func (*PublishResponse) ProtoMessage() {} +func (*PublishResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_pubsub_790424aa8109440f, []int{6} +} +func (m *PublishResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_PublishResponse.Unmarshal(m, b) +} +func (m *PublishResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_PublishResponse.Marshal(b, m, deterministic) +} +func (dst *PublishResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_PublishResponse.Merge(dst, src) +} +func (m *PublishResponse) XXX_Size() int { + return xxx_messageInfo_PublishResponse.Size(m) +} +func (m *PublishResponse) XXX_DiscardUnknown() { + xxx_messageInfo_PublishResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_PublishResponse proto.InternalMessageInfo + +func (m *PublishResponse) GetMessageIds() []string { + if m != nil { + return m.MessageIds + } + return nil +} + +// Request for the `ListTopics` method. +type ListTopicsRequest struct { + // The name of the project in which to list topics. + // Format is `projects/{project-id}`. + Project string `protobuf:"bytes,1,opt,name=project,proto3" json:"project,omitempty"` + // Maximum number of topics to return. + PageSize int32 `protobuf:"varint,2,opt,name=page_size,json=pageSize,proto3" json:"page_size,omitempty"` + // The value returned by the last `ListTopicsResponse`; indicates that this is + // a continuation of a prior `ListTopics` call, and that the system should + // return the next page of data. + PageToken string `protobuf:"bytes,3,opt,name=page_token,json=pageToken,proto3" json:"page_token,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ListTopicsRequest) Reset() { *m = ListTopicsRequest{} } +func (m *ListTopicsRequest) String() string { return proto.CompactTextString(m) } +func (*ListTopicsRequest) ProtoMessage() {} +func (*ListTopicsRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_pubsub_790424aa8109440f, []int{7} +} +func (m *ListTopicsRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ListTopicsRequest.Unmarshal(m, b) +} +func (m *ListTopicsRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ListTopicsRequest.Marshal(b, m, deterministic) +} +func (dst *ListTopicsRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_ListTopicsRequest.Merge(dst, src) +} +func (m *ListTopicsRequest) XXX_Size() int { + return xxx_messageInfo_ListTopicsRequest.Size(m) +} +func (m *ListTopicsRequest) XXX_DiscardUnknown() { + xxx_messageInfo_ListTopicsRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_ListTopicsRequest proto.InternalMessageInfo + +func (m *ListTopicsRequest) GetProject() string { + if m != nil { + return m.Project + } + return "" +} + +func (m *ListTopicsRequest) GetPageSize() int32 { + if m != nil { + return m.PageSize + } + return 0 +} + +func (m *ListTopicsRequest) GetPageToken() string { + if m != nil { + return m.PageToken + } + return "" +} + +// Response for the `ListTopics` method. +type ListTopicsResponse struct { + // The resulting topics. + Topics []*Topic `protobuf:"bytes,1,rep,name=topics,proto3" json:"topics,omitempty"` + // If not empty, indicates that there may be more topics that match the + // request; this value should be passed in a new `ListTopicsRequest`. + NextPageToken string `protobuf:"bytes,2,opt,name=next_page_token,json=nextPageToken,proto3" json:"next_page_token,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ListTopicsResponse) Reset() { *m = ListTopicsResponse{} } +func (m *ListTopicsResponse) String() string { return proto.CompactTextString(m) } +func (*ListTopicsResponse) ProtoMessage() {} +func (*ListTopicsResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_pubsub_790424aa8109440f, []int{8} +} +func (m *ListTopicsResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ListTopicsResponse.Unmarshal(m, b) +} +func (m *ListTopicsResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ListTopicsResponse.Marshal(b, m, deterministic) +} +func (dst *ListTopicsResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_ListTopicsResponse.Merge(dst, src) +} +func (m *ListTopicsResponse) XXX_Size() int { + return xxx_messageInfo_ListTopicsResponse.Size(m) +} +func (m *ListTopicsResponse) XXX_DiscardUnknown() { + xxx_messageInfo_ListTopicsResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_ListTopicsResponse proto.InternalMessageInfo + +func (m *ListTopicsResponse) GetTopics() []*Topic { + if m != nil { + return m.Topics + } + return nil +} + +func (m *ListTopicsResponse) GetNextPageToken() string { + if m != nil { + return m.NextPageToken + } + return "" +} + +// Request for the `ListTopicSubscriptions` method. +type ListTopicSubscriptionsRequest struct { + // The name of the topic that subscriptions are attached to. + // Format is `projects/{project}/topics/{topic}`. + Topic string `protobuf:"bytes,1,opt,name=topic,proto3" json:"topic,omitempty"` + // Maximum number of subscription names to return. + PageSize int32 `protobuf:"varint,2,opt,name=page_size,json=pageSize,proto3" json:"page_size,omitempty"` + // The value returned by the last `ListTopicSubscriptionsResponse`; indicates + // that this is a continuation of a prior `ListTopicSubscriptions` call, and + // that the system should return the next page of data. + PageToken string `protobuf:"bytes,3,opt,name=page_token,json=pageToken,proto3" json:"page_token,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ListTopicSubscriptionsRequest) Reset() { *m = ListTopicSubscriptionsRequest{} } +func (m *ListTopicSubscriptionsRequest) String() string { return proto.CompactTextString(m) } +func (*ListTopicSubscriptionsRequest) ProtoMessage() {} +func (*ListTopicSubscriptionsRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_pubsub_790424aa8109440f, []int{9} +} +func (m *ListTopicSubscriptionsRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ListTopicSubscriptionsRequest.Unmarshal(m, b) +} +func (m *ListTopicSubscriptionsRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ListTopicSubscriptionsRequest.Marshal(b, m, deterministic) +} +func (dst *ListTopicSubscriptionsRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_ListTopicSubscriptionsRequest.Merge(dst, src) +} +func (m *ListTopicSubscriptionsRequest) XXX_Size() int { + return xxx_messageInfo_ListTopicSubscriptionsRequest.Size(m) +} +func (m *ListTopicSubscriptionsRequest) XXX_DiscardUnknown() { + xxx_messageInfo_ListTopicSubscriptionsRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_ListTopicSubscriptionsRequest proto.InternalMessageInfo + +func (m *ListTopicSubscriptionsRequest) GetTopic() string { + if m != nil { + return m.Topic + } + return "" +} + +func (m *ListTopicSubscriptionsRequest) GetPageSize() int32 { + if m != nil { + return m.PageSize + } + return 0 +} + +func (m *ListTopicSubscriptionsRequest) GetPageToken() string { + if m != nil { + return m.PageToken + } + return "" +} + +// Response for the `ListTopicSubscriptions` method. +type ListTopicSubscriptionsResponse struct { + // The names of the subscriptions that match the request. + Subscriptions []string `protobuf:"bytes,1,rep,name=subscriptions,proto3" json:"subscriptions,omitempty"` + // If not empty, indicates that there may be more subscriptions that match + // the request; this value should be passed in a new + // `ListTopicSubscriptionsRequest` to get more subscriptions. + NextPageToken string `protobuf:"bytes,2,opt,name=next_page_token,json=nextPageToken,proto3" json:"next_page_token,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ListTopicSubscriptionsResponse) Reset() { *m = ListTopicSubscriptionsResponse{} } +func (m *ListTopicSubscriptionsResponse) String() string { return proto.CompactTextString(m) } +func (*ListTopicSubscriptionsResponse) ProtoMessage() {} +func (*ListTopicSubscriptionsResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_pubsub_790424aa8109440f, []int{10} +} +func (m *ListTopicSubscriptionsResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ListTopicSubscriptionsResponse.Unmarshal(m, b) +} +func (m *ListTopicSubscriptionsResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ListTopicSubscriptionsResponse.Marshal(b, m, deterministic) +} +func (dst *ListTopicSubscriptionsResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_ListTopicSubscriptionsResponse.Merge(dst, src) +} +func (m *ListTopicSubscriptionsResponse) XXX_Size() int { + return xxx_messageInfo_ListTopicSubscriptionsResponse.Size(m) +} +func (m *ListTopicSubscriptionsResponse) XXX_DiscardUnknown() { + xxx_messageInfo_ListTopicSubscriptionsResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_ListTopicSubscriptionsResponse proto.InternalMessageInfo + +func (m *ListTopicSubscriptionsResponse) GetSubscriptions() []string { + if m != nil { + return m.Subscriptions + } + return nil +} + +func (m *ListTopicSubscriptionsResponse) GetNextPageToken() string { + if m != nil { + return m.NextPageToken + } + return "" +} + +// Request for the `ListTopicSnapshots` method. +type ListTopicSnapshotsRequest struct { + // The name of the topic that snapshots are attached to. + // Format is `projects/{project}/topics/{topic}`. + Topic string `protobuf:"bytes,1,opt,name=topic,proto3" json:"topic,omitempty"` + // Maximum number of snapshot names to return. + PageSize int32 `protobuf:"varint,2,opt,name=page_size,json=pageSize,proto3" json:"page_size,omitempty"` + // The value returned by the last `ListTopicSnapshotsResponse`; indicates + // that this is a continuation of a prior `ListTopicSnapshots` call, and + // that the system should return the next page of data. + PageToken string `protobuf:"bytes,3,opt,name=page_token,json=pageToken,proto3" json:"page_token,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ListTopicSnapshotsRequest) Reset() { *m = ListTopicSnapshotsRequest{} } +func (m *ListTopicSnapshotsRequest) String() string { return proto.CompactTextString(m) } +func (*ListTopicSnapshotsRequest) ProtoMessage() {} +func (*ListTopicSnapshotsRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_pubsub_790424aa8109440f, []int{11} +} +func (m *ListTopicSnapshotsRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ListTopicSnapshotsRequest.Unmarshal(m, b) +} +func (m *ListTopicSnapshotsRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ListTopicSnapshotsRequest.Marshal(b, m, deterministic) +} +func (dst *ListTopicSnapshotsRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_ListTopicSnapshotsRequest.Merge(dst, src) +} +func (m *ListTopicSnapshotsRequest) XXX_Size() int { + return xxx_messageInfo_ListTopicSnapshotsRequest.Size(m) +} +func (m *ListTopicSnapshotsRequest) XXX_DiscardUnknown() { + xxx_messageInfo_ListTopicSnapshotsRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_ListTopicSnapshotsRequest proto.InternalMessageInfo + +func (m *ListTopicSnapshotsRequest) GetTopic() string { + if m != nil { + return m.Topic + } + return "" +} + +func (m *ListTopicSnapshotsRequest) GetPageSize() int32 { + if m != nil { + return m.PageSize + } + return 0 +} + +func (m *ListTopicSnapshotsRequest) GetPageToken() string { + if m != nil { + return m.PageToken + } + return "" +} + +// Response for the `ListTopicSnapshots` method. +type ListTopicSnapshotsResponse struct { + // The names of the snapshots that match the request. + Snapshots []string `protobuf:"bytes,1,rep,name=snapshots,proto3" json:"snapshots,omitempty"` + // If not empty, indicates that there may be more snapshots that match + // the request; this value should be passed in a new + // `ListTopicSnapshotsRequest` to get more snapshots. + NextPageToken string `protobuf:"bytes,2,opt,name=next_page_token,json=nextPageToken,proto3" json:"next_page_token,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ListTopicSnapshotsResponse) Reset() { *m = ListTopicSnapshotsResponse{} } +func (m *ListTopicSnapshotsResponse) String() string { return proto.CompactTextString(m) } +func (*ListTopicSnapshotsResponse) ProtoMessage() {} +func (*ListTopicSnapshotsResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_pubsub_790424aa8109440f, []int{12} +} +func (m *ListTopicSnapshotsResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ListTopicSnapshotsResponse.Unmarshal(m, b) +} +func (m *ListTopicSnapshotsResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ListTopicSnapshotsResponse.Marshal(b, m, deterministic) +} +func (dst *ListTopicSnapshotsResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_ListTopicSnapshotsResponse.Merge(dst, src) +} +func (m *ListTopicSnapshotsResponse) XXX_Size() int { + return xxx_messageInfo_ListTopicSnapshotsResponse.Size(m) +} +func (m *ListTopicSnapshotsResponse) XXX_DiscardUnknown() { + xxx_messageInfo_ListTopicSnapshotsResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_ListTopicSnapshotsResponse proto.InternalMessageInfo + +func (m *ListTopicSnapshotsResponse) GetSnapshots() []string { + if m != nil { + return m.Snapshots + } + return nil +} + +func (m *ListTopicSnapshotsResponse) GetNextPageToken() string { + if m != nil { + return m.NextPageToken + } + return "" +} + +// Request for the `DeleteTopic` method. +type DeleteTopicRequest struct { + // Name of the topic to delete. + // Format is `projects/{project}/topics/{topic}`. + Topic string `protobuf:"bytes,1,opt,name=topic,proto3" json:"topic,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *DeleteTopicRequest) Reset() { *m = DeleteTopicRequest{} } +func (m *DeleteTopicRequest) String() string { return proto.CompactTextString(m) } +func (*DeleteTopicRequest) ProtoMessage() {} +func (*DeleteTopicRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_pubsub_790424aa8109440f, []int{13} +} +func (m *DeleteTopicRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_DeleteTopicRequest.Unmarshal(m, b) +} +func (m *DeleteTopicRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_DeleteTopicRequest.Marshal(b, m, deterministic) +} +func (dst *DeleteTopicRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_DeleteTopicRequest.Merge(dst, src) +} +func (m *DeleteTopicRequest) XXX_Size() int { + return xxx_messageInfo_DeleteTopicRequest.Size(m) +} +func (m *DeleteTopicRequest) XXX_DiscardUnknown() { + xxx_messageInfo_DeleteTopicRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_DeleteTopicRequest proto.InternalMessageInfo + +func (m *DeleteTopicRequest) GetTopic() string { + if m != nil { + return m.Topic + } + return "" +} + +// A subscription resource. +type Subscription struct { + // The name of the subscription. It must have the format + // `"projects/{project}/subscriptions/{subscription}"`. `{subscription}` must + // start with a letter, and contain only letters (`[A-Za-z]`), numbers + // (`[0-9]`), dashes (`-`), underscores (`_`), periods (`.`), tildes (`~`), + // plus (`+`) or percent signs (`%`). It must be between 3 and 255 characters + // in length, and it must not start with `"goog"`. + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // The name of the topic from which this subscription is receiving messages. + // Format is `projects/{project}/topics/{topic}`. + // The value of this field will be `_deleted-topic_` if the topic has been + // deleted. + Topic string `protobuf:"bytes,2,opt,name=topic,proto3" json:"topic,omitempty"` + // If push delivery is used with this subscription, this field is + // used to configure it. An empty `pushConfig` signifies that the subscriber + // will pull and ack messages using API methods. + PushConfig *PushConfig `protobuf:"bytes,4,opt,name=push_config,json=pushConfig,proto3" json:"push_config,omitempty"` + // The approximate amount of time (on a best-effort basis) Pub/Sub waits for + // the subscriber to acknowledge receipt before resending the message. In the + // interval after the message is delivered and before it is acknowledged, it + // is considered to be outstanding. During that time period, the + // message will not be redelivered (on a best-effort basis). + // + // For pull subscriptions, this value is used as the initial value for the ack + // deadline. To override this value for a given message, call + // `ModifyAckDeadline` with the corresponding `ack_id` if using + // non-streaming pull or send the `ack_id` in a + // `StreamingModifyAckDeadlineRequest` if using streaming pull. + // The minimum custom deadline you can specify is 10 seconds. + // The maximum custom deadline you can specify is 600 seconds (10 minutes). + // If this parameter is 0, a default value of 10 seconds is used. + // + // For push delivery, this value is also used to set the request timeout for + // the call to the push endpoint. + // + // If the subscriber never acknowledges the message, the Pub/Sub + // system will eventually redeliver the message. + AckDeadlineSeconds int32 `protobuf:"varint,5,opt,name=ack_deadline_seconds,json=ackDeadlineSeconds,proto3" json:"ack_deadline_seconds,omitempty"` + // Indicates whether to retain acknowledged messages. If true, then + // messages are not expunged from the subscription's backlog, even if they are + // acknowledged, until they fall out of the `message_retention_duration` + // window. This must be true if you would like to + // + // Seek to a timestamp. + RetainAckedMessages bool `protobuf:"varint,7,opt,name=retain_acked_messages,json=retainAckedMessages,proto3" json:"retain_acked_messages,omitempty"` + // How long to retain unacknowledged messages in the subscription's backlog, + // from the moment a message is published. + // If `retain_acked_messages` is true, then this also configures the retention + // of acknowledged messages, and thus configures how far back in time a `Seek` + // can be done. Defaults to 7 days. Cannot be more than 7 days or less than 10 + // minutes. + MessageRetentionDuration *duration.Duration `protobuf:"bytes,8,opt,name=message_retention_duration,json=messageRetentionDuration,proto3" json:"message_retention_duration,omitempty"` + // See Creating and + // managing labels. + Labels map[string]string `protobuf:"bytes,9,rep,name=labels,proto3" json:"labels,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` + // If true, messages published with the same `ordering_key` in `PubsubMessage` + // will be delivered to the subscribers in the order in which they + // are received by the Pub/Sub system. Otherwise, they may be delivered in + // any order. + // EXPERIMENTAL: This feature is part of a closed alpha release. This + // API might be changed in backward-incompatible ways and is not recommended + // for production use. It is not subject to any SLA or deprecation policy. + EnableMessageOrdering bool `protobuf:"varint,10,opt,name=enable_message_ordering,json=enableMessageOrdering,proto3" json:"enable_message_ordering,omitempty"` + // A policy that specifies the conditions for this subscription's expiration. + // A subscription is considered active as long as any connected subscriber is + // successfully consuming messages from the subscription or is issuing + // operations on the subscription. If `expiration_policy` is not set, a + // *default policy* with `ttl` of 31 days will be used. The minimum allowed + // value for `expiration_policy.ttl` is 1 day. + ExpirationPolicy *ExpirationPolicy `protobuf:"bytes,11,opt,name=expiration_policy,json=expirationPolicy,proto3" json:"expiration_policy,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Subscription) Reset() { *m = Subscription{} } +func (m *Subscription) String() string { return proto.CompactTextString(m) } +func (*Subscription) ProtoMessage() {} +func (*Subscription) Descriptor() ([]byte, []int) { + return fileDescriptor_pubsub_790424aa8109440f, []int{14} +} +func (m *Subscription) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Subscription.Unmarshal(m, b) +} +func (m *Subscription) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Subscription.Marshal(b, m, deterministic) +} +func (dst *Subscription) XXX_Merge(src proto.Message) { + xxx_messageInfo_Subscription.Merge(dst, src) +} +func (m *Subscription) XXX_Size() int { + return xxx_messageInfo_Subscription.Size(m) +} +func (m *Subscription) XXX_DiscardUnknown() { + xxx_messageInfo_Subscription.DiscardUnknown(m) +} + +var xxx_messageInfo_Subscription proto.InternalMessageInfo + +func (m *Subscription) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *Subscription) GetTopic() string { + if m != nil { + return m.Topic + } + return "" +} + +func (m *Subscription) GetPushConfig() *PushConfig { + if m != nil { + return m.PushConfig + } + return nil +} + +func (m *Subscription) GetAckDeadlineSeconds() int32 { + if m != nil { + return m.AckDeadlineSeconds + } + return 0 +} + +func (m *Subscription) GetRetainAckedMessages() bool { + if m != nil { + return m.RetainAckedMessages + } + return false +} + +func (m *Subscription) GetMessageRetentionDuration() *duration.Duration { + if m != nil { + return m.MessageRetentionDuration + } + return nil +} + +func (m *Subscription) GetLabels() map[string]string { + if m != nil { + return m.Labels + } + return nil +} + +func (m *Subscription) GetEnableMessageOrdering() bool { + if m != nil { + return m.EnableMessageOrdering + } + return false +} + +func (m *Subscription) GetExpirationPolicy() *ExpirationPolicy { + if m != nil { + return m.ExpirationPolicy + } + return nil +} + +// A policy that specifies the conditions for resource expiration (i.e., +// automatic resource deletion). +type ExpirationPolicy struct { + // Specifies the "time-to-live" duration for an associated resource. The + // resource expires if it is not active for a period of `ttl`. The definition + // of "activity" depends on the type of the associated resource. The minimum + // and maximum allowed values for `ttl` depend on the type of the associated + // resource, as well. If `ttl` is not set, the associated resource never + // expires. + Ttl *duration.Duration `protobuf:"bytes,1,opt,name=ttl,proto3" json:"ttl,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ExpirationPolicy) Reset() { *m = ExpirationPolicy{} } +func (m *ExpirationPolicy) String() string { return proto.CompactTextString(m) } +func (*ExpirationPolicy) ProtoMessage() {} +func (*ExpirationPolicy) Descriptor() ([]byte, []int) { + return fileDescriptor_pubsub_790424aa8109440f, []int{15} +} +func (m *ExpirationPolicy) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ExpirationPolicy.Unmarshal(m, b) +} +func (m *ExpirationPolicy) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ExpirationPolicy.Marshal(b, m, deterministic) +} +func (dst *ExpirationPolicy) XXX_Merge(src proto.Message) { + xxx_messageInfo_ExpirationPolicy.Merge(dst, src) +} +func (m *ExpirationPolicy) XXX_Size() int { + return xxx_messageInfo_ExpirationPolicy.Size(m) +} +func (m *ExpirationPolicy) XXX_DiscardUnknown() { + xxx_messageInfo_ExpirationPolicy.DiscardUnknown(m) +} + +var xxx_messageInfo_ExpirationPolicy proto.InternalMessageInfo + +func (m *ExpirationPolicy) GetTtl() *duration.Duration { + if m != nil { + return m.Ttl + } + return nil +} + +// Configuration for a push delivery endpoint. +type PushConfig struct { + // A URL locating the endpoint to which messages should be pushed. + // For example, a Webhook endpoint might use "https://example.com/push". + PushEndpoint string `protobuf:"bytes,1,opt,name=push_endpoint,json=pushEndpoint,proto3" json:"push_endpoint,omitempty"` + // Endpoint configuration attributes. + // + // Every endpoint has a set of API supported attributes that can be used to + // control different aspects of the message delivery. + // + // The currently supported attribute is `x-goog-version`, which you can + // use to change the format of the pushed message. This attribute + // indicates the version of the data expected by the endpoint. This + // controls the shape of the pushed message (i.e., its fields and metadata). + // The endpoint version is based on the version of the Pub/Sub API. + // + // If not present during the `CreateSubscription` call, it will default to + // the version of the API used to make such call. If not present during a + // `ModifyPushConfig` call, its value will not be changed. `GetSubscription` + // calls will always return a valid version, even if the subscription was + // created without this attribute. + // + // The possible values for this attribute are: + // + // * `v1beta1`: uses the push format defined in the v1beta1 Pub/Sub API. + // * `v1` or `v1beta2`: uses the push format defined in the v1 Pub/Sub API. + Attributes map[string]string `protobuf:"bytes,2,rep,name=attributes,proto3" json:"attributes,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` + // An authentication method used by push endpoints to verify the source of + // push requests. This can be used with push endpoints that are private by + // default to allow requests only from the Cloud Pub/Sub system, for example. + // This field is optional and should be set only by users interested in + // authenticated push. + // + // Types that are valid to be assigned to AuthenticationMethod: + // *PushConfig_OidcToken_ + AuthenticationMethod isPushConfig_AuthenticationMethod `protobuf_oneof:"authentication_method"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *PushConfig) Reset() { *m = PushConfig{} } +func (m *PushConfig) String() string { return proto.CompactTextString(m) } +func (*PushConfig) ProtoMessage() {} +func (*PushConfig) Descriptor() ([]byte, []int) { + return fileDescriptor_pubsub_790424aa8109440f, []int{16} +} +func (m *PushConfig) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_PushConfig.Unmarshal(m, b) +} +func (m *PushConfig) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_PushConfig.Marshal(b, m, deterministic) +} +func (dst *PushConfig) XXX_Merge(src proto.Message) { + xxx_messageInfo_PushConfig.Merge(dst, src) +} +func (m *PushConfig) XXX_Size() int { + return xxx_messageInfo_PushConfig.Size(m) +} +func (m *PushConfig) XXX_DiscardUnknown() { + xxx_messageInfo_PushConfig.DiscardUnknown(m) +} + +var xxx_messageInfo_PushConfig proto.InternalMessageInfo + +func (m *PushConfig) GetPushEndpoint() string { + if m != nil { + return m.PushEndpoint + } + return "" +} + +func (m *PushConfig) GetAttributes() map[string]string { + if m != nil { + return m.Attributes + } + return nil +} + +type isPushConfig_AuthenticationMethod interface { + isPushConfig_AuthenticationMethod() +} + +type PushConfig_OidcToken_ struct { + OidcToken *PushConfig_OidcToken `protobuf:"bytes,3,opt,name=oidc_token,json=oidcToken,proto3,oneof"` +} + +func (*PushConfig_OidcToken_) isPushConfig_AuthenticationMethod() {} + +func (m *PushConfig) GetAuthenticationMethod() isPushConfig_AuthenticationMethod { + if m != nil { + return m.AuthenticationMethod + } + return nil +} + +func (m *PushConfig) GetOidcToken() *PushConfig_OidcToken { + if x, ok := m.GetAuthenticationMethod().(*PushConfig_OidcToken_); ok { + return x.OidcToken + } + return nil +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*PushConfig) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _PushConfig_OneofMarshaler, _PushConfig_OneofUnmarshaler, _PushConfig_OneofSizer, []interface{}{ + (*PushConfig_OidcToken_)(nil), + } +} + +func _PushConfig_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*PushConfig) + // authentication_method + switch x := m.AuthenticationMethod.(type) { + case *PushConfig_OidcToken_: + b.EncodeVarint(3<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.OidcToken); err != nil { + return err + } + case nil: + default: + return fmt.Errorf("PushConfig.AuthenticationMethod has unexpected type %T", x) + } + return nil +} + +func _PushConfig_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*PushConfig) + switch tag { + case 3: // authentication_method.oidc_token + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(PushConfig_OidcToken) + err := b.DecodeMessage(msg) + m.AuthenticationMethod = &PushConfig_OidcToken_{msg} + return true, err + default: + return false, nil + } +} + +func _PushConfig_OneofSizer(msg proto.Message) (n int) { + m := msg.(*PushConfig) + // authentication_method + switch x := m.AuthenticationMethod.(type) { + case *PushConfig_OidcToken_: + s := proto.Size(x.OidcToken) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +// Contains information needed for generating an +// [OpenID Connect +// token](https://developers.google.com/identity/protocols/OpenIDConnect). +type PushConfig_OidcToken struct { + // [Service account + // email](https://cloud.google.com/iam/docs/service-accounts) + // to be used for generating the OIDC token. The caller (for + // CreateSubscription, UpdateSubscription, and ModifyPushConfig RPCs) must + // have the iam.serviceAccounts.actAs permission for the service account. + ServiceAccountEmail string `protobuf:"bytes,1,opt,name=service_account_email,json=serviceAccountEmail,proto3" json:"service_account_email,omitempty"` + // Audience to be used when generating OIDC token. The audience claim + // identifies the recipients that the JWT is intended for. The audience + // value is a single case-sensitive string. Having multiple values (array) + // for the audience field is not supported. More info about the OIDC JWT + // token audience here: https://tools.ietf.org/html/rfc7519#section-4.1.3 + // Note: if not specified, the Push endpoint URL will be used. + Audience string `protobuf:"bytes,2,opt,name=audience,proto3" json:"audience,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *PushConfig_OidcToken) Reset() { *m = PushConfig_OidcToken{} } +func (m *PushConfig_OidcToken) String() string { return proto.CompactTextString(m) } +func (*PushConfig_OidcToken) ProtoMessage() {} +func (*PushConfig_OidcToken) Descriptor() ([]byte, []int) { + return fileDescriptor_pubsub_790424aa8109440f, []int{16, 0} +} +func (m *PushConfig_OidcToken) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_PushConfig_OidcToken.Unmarshal(m, b) +} +func (m *PushConfig_OidcToken) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_PushConfig_OidcToken.Marshal(b, m, deterministic) +} +func (dst *PushConfig_OidcToken) XXX_Merge(src proto.Message) { + xxx_messageInfo_PushConfig_OidcToken.Merge(dst, src) +} +func (m *PushConfig_OidcToken) XXX_Size() int { + return xxx_messageInfo_PushConfig_OidcToken.Size(m) +} +func (m *PushConfig_OidcToken) XXX_DiscardUnknown() { + xxx_messageInfo_PushConfig_OidcToken.DiscardUnknown(m) +} + +var xxx_messageInfo_PushConfig_OidcToken proto.InternalMessageInfo + +func (m *PushConfig_OidcToken) GetServiceAccountEmail() string { + if m != nil { + return m.ServiceAccountEmail + } + return "" +} + +func (m *PushConfig_OidcToken) GetAudience() string { + if m != nil { + return m.Audience + } + return "" +} + +// A message and its corresponding acknowledgment ID. +type ReceivedMessage struct { + // This ID can be used to acknowledge the received message. + AckId string `protobuf:"bytes,1,opt,name=ack_id,json=ackId,proto3" json:"ack_id,omitempty"` + // The message. + Message *PubsubMessage `protobuf:"bytes,2,opt,name=message,proto3" json:"message,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ReceivedMessage) Reset() { *m = ReceivedMessage{} } +func (m *ReceivedMessage) String() string { return proto.CompactTextString(m) } +func (*ReceivedMessage) ProtoMessage() {} +func (*ReceivedMessage) Descriptor() ([]byte, []int) { + return fileDescriptor_pubsub_790424aa8109440f, []int{17} +} +func (m *ReceivedMessage) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ReceivedMessage.Unmarshal(m, b) +} +func (m *ReceivedMessage) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ReceivedMessage.Marshal(b, m, deterministic) +} +func (dst *ReceivedMessage) XXX_Merge(src proto.Message) { + xxx_messageInfo_ReceivedMessage.Merge(dst, src) +} +func (m *ReceivedMessage) XXX_Size() int { + return xxx_messageInfo_ReceivedMessage.Size(m) +} +func (m *ReceivedMessage) XXX_DiscardUnknown() { + xxx_messageInfo_ReceivedMessage.DiscardUnknown(m) +} + +var xxx_messageInfo_ReceivedMessage proto.InternalMessageInfo + +func (m *ReceivedMessage) GetAckId() string { + if m != nil { + return m.AckId + } + return "" +} + +func (m *ReceivedMessage) GetMessage() *PubsubMessage { + if m != nil { + return m.Message + } + return nil +} + +// Request for the GetSubscription method. +type GetSubscriptionRequest struct { + // The name of the subscription to get. + // Format is `projects/{project}/subscriptions/{sub}`. + Subscription string `protobuf:"bytes,1,opt,name=subscription,proto3" json:"subscription,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GetSubscriptionRequest) Reset() { *m = GetSubscriptionRequest{} } +func (m *GetSubscriptionRequest) String() string { return proto.CompactTextString(m) } +func (*GetSubscriptionRequest) ProtoMessage() {} +func (*GetSubscriptionRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_pubsub_790424aa8109440f, []int{18} +} +func (m *GetSubscriptionRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GetSubscriptionRequest.Unmarshal(m, b) +} +func (m *GetSubscriptionRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GetSubscriptionRequest.Marshal(b, m, deterministic) +} +func (dst *GetSubscriptionRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_GetSubscriptionRequest.Merge(dst, src) +} +func (m *GetSubscriptionRequest) XXX_Size() int { + return xxx_messageInfo_GetSubscriptionRequest.Size(m) +} +func (m *GetSubscriptionRequest) XXX_DiscardUnknown() { + xxx_messageInfo_GetSubscriptionRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_GetSubscriptionRequest proto.InternalMessageInfo + +func (m *GetSubscriptionRequest) GetSubscription() string { + if m != nil { + return m.Subscription + } + return "" +} + +// Request for the UpdateSubscription method. +type UpdateSubscriptionRequest struct { + // The updated subscription object. + Subscription *Subscription `protobuf:"bytes,1,opt,name=subscription,proto3" json:"subscription,omitempty"` + // Indicates which fields in the provided subscription to update. + // Must be specified and non-empty. + UpdateMask *field_mask.FieldMask `protobuf:"bytes,2,opt,name=update_mask,json=updateMask,proto3" json:"update_mask,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *UpdateSubscriptionRequest) Reset() { *m = UpdateSubscriptionRequest{} } +func (m *UpdateSubscriptionRequest) String() string { return proto.CompactTextString(m) } +func (*UpdateSubscriptionRequest) ProtoMessage() {} +func (*UpdateSubscriptionRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_pubsub_790424aa8109440f, []int{19} +} +func (m *UpdateSubscriptionRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_UpdateSubscriptionRequest.Unmarshal(m, b) +} +func (m *UpdateSubscriptionRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_UpdateSubscriptionRequest.Marshal(b, m, deterministic) +} +func (dst *UpdateSubscriptionRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_UpdateSubscriptionRequest.Merge(dst, src) +} +func (m *UpdateSubscriptionRequest) XXX_Size() int { + return xxx_messageInfo_UpdateSubscriptionRequest.Size(m) +} +func (m *UpdateSubscriptionRequest) XXX_DiscardUnknown() { + xxx_messageInfo_UpdateSubscriptionRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_UpdateSubscriptionRequest proto.InternalMessageInfo + +func (m *UpdateSubscriptionRequest) GetSubscription() *Subscription { + if m != nil { + return m.Subscription + } + return nil +} + +func (m *UpdateSubscriptionRequest) GetUpdateMask() *field_mask.FieldMask { + if m != nil { + return m.UpdateMask + } + return nil +} + +// Request for the `ListSubscriptions` method. +type ListSubscriptionsRequest struct { + // The name of the project in which to list subscriptions. + // Format is `projects/{project-id}`. + Project string `protobuf:"bytes,1,opt,name=project,proto3" json:"project,omitempty"` + // Maximum number of subscriptions to return. + PageSize int32 `protobuf:"varint,2,opt,name=page_size,json=pageSize,proto3" json:"page_size,omitempty"` + // The value returned by the last `ListSubscriptionsResponse`; indicates that + // this is a continuation of a prior `ListSubscriptions` call, and that the + // system should return the next page of data. + PageToken string `protobuf:"bytes,3,opt,name=page_token,json=pageToken,proto3" json:"page_token,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ListSubscriptionsRequest) Reset() { *m = ListSubscriptionsRequest{} } +func (m *ListSubscriptionsRequest) String() string { return proto.CompactTextString(m) } +func (*ListSubscriptionsRequest) ProtoMessage() {} +func (*ListSubscriptionsRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_pubsub_790424aa8109440f, []int{20} +} +func (m *ListSubscriptionsRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ListSubscriptionsRequest.Unmarshal(m, b) +} +func (m *ListSubscriptionsRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ListSubscriptionsRequest.Marshal(b, m, deterministic) +} +func (dst *ListSubscriptionsRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_ListSubscriptionsRequest.Merge(dst, src) +} +func (m *ListSubscriptionsRequest) XXX_Size() int { + return xxx_messageInfo_ListSubscriptionsRequest.Size(m) +} +func (m *ListSubscriptionsRequest) XXX_DiscardUnknown() { + xxx_messageInfo_ListSubscriptionsRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_ListSubscriptionsRequest proto.InternalMessageInfo + +func (m *ListSubscriptionsRequest) GetProject() string { + if m != nil { + return m.Project + } + return "" +} + +func (m *ListSubscriptionsRequest) GetPageSize() int32 { + if m != nil { + return m.PageSize + } + return 0 +} + +func (m *ListSubscriptionsRequest) GetPageToken() string { + if m != nil { + return m.PageToken + } + return "" +} + +// Response for the `ListSubscriptions` method. +type ListSubscriptionsResponse struct { + // The subscriptions that match the request. + Subscriptions []*Subscription `protobuf:"bytes,1,rep,name=subscriptions,proto3" json:"subscriptions,omitempty"` + // If not empty, indicates that there may be more subscriptions that match + // the request; this value should be passed in a new + // `ListSubscriptionsRequest` to get more subscriptions. + NextPageToken string `protobuf:"bytes,2,opt,name=next_page_token,json=nextPageToken,proto3" json:"next_page_token,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ListSubscriptionsResponse) Reset() { *m = ListSubscriptionsResponse{} } +func (m *ListSubscriptionsResponse) String() string { return proto.CompactTextString(m) } +func (*ListSubscriptionsResponse) ProtoMessage() {} +func (*ListSubscriptionsResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_pubsub_790424aa8109440f, []int{21} +} +func (m *ListSubscriptionsResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ListSubscriptionsResponse.Unmarshal(m, b) +} +func (m *ListSubscriptionsResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ListSubscriptionsResponse.Marshal(b, m, deterministic) +} +func (dst *ListSubscriptionsResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_ListSubscriptionsResponse.Merge(dst, src) +} +func (m *ListSubscriptionsResponse) XXX_Size() int { + return xxx_messageInfo_ListSubscriptionsResponse.Size(m) +} +func (m *ListSubscriptionsResponse) XXX_DiscardUnknown() { + xxx_messageInfo_ListSubscriptionsResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_ListSubscriptionsResponse proto.InternalMessageInfo + +func (m *ListSubscriptionsResponse) GetSubscriptions() []*Subscription { + if m != nil { + return m.Subscriptions + } + return nil +} + +func (m *ListSubscriptionsResponse) GetNextPageToken() string { + if m != nil { + return m.NextPageToken + } + return "" +} + +// Request for the DeleteSubscription method. +type DeleteSubscriptionRequest struct { + // The subscription to delete. + // Format is `projects/{project}/subscriptions/{sub}`. + Subscription string `protobuf:"bytes,1,opt,name=subscription,proto3" json:"subscription,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *DeleteSubscriptionRequest) Reset() { *m = DeleteSubscriptionRequest{} } +func (m *DeleteSubscriptionRequest) String() string { return proto.CompactTextString(m) } +func (*DeleteSubscriptionRequest) ProtoMessage() {} +func (*DeleteSubscriptionRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_pubsub_790424aa8109440f, []int{22} +} +func (m *DeleteSubscriptionRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_DeleteSubscriptionRequest.Unmarshal(m, b) +} +func (m *DeleteSubscriptionRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_DeleteSubscriptionRequest.Marshal(b, m, deterministic) +} +func (dst *DeleteSubscriptionRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_DeleteSubscriptionRequest.Merge(dst, src) +} +func (m *DeleteSubscriptionRequest) XXX_Size() int { + return xxx_messageInfo_DeleteSubscriptionRequest.Size(m) +} +func (m *DeleteSubscriptionRequest) XXX_DiscardUnknown() { + xxx_messageInfo_DeleteSubscriptionRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_DeleteSubscriptionRequest proto.InternalMessageInfo + +func (m *DeleteSubscriptionRequest) GetSubscription() string { + if m != nil { + return m.Subscription + } + return "" +} + +// Request for the ModifyPushConfig method. +type ModifyPushConfigRequest struct { + // The name of the subscription. + // Format is `projects/{project}/subscriptions/{sub}`. + Subscription string `protobuf:"bytes,1,opt,name=subscription,proto3" json:"subscription,omitempty"` + // The push configuration for future deliveries. + // + // An empty `pushConfig` indicates that the Pub/Sub system should + // stop pushing messages from the given subscription and allow + // messages to be pulled and acknowledged - effectively pausing + // the subscription if `Pull` or `StreamingPull` is not called. + PushConfig *PushConfig `protobuf:"bytes,2,opt,name=push_config,json=pushConfig,proto3" json:"push_config,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ModifyPushConfigRequest) Reset() { *m = ModifyPushConfigRequest{} } +func (m *ModifyPushConfigRequest) String() string { return proto.CompactTextString(m) } +func (*ModifyPushConfigRequest) ProtoMessage() {} +func (*ModifyPushConfigRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_pubsub_790424aa8109440f, []int{23} +} +func (m *ModifyPushConfigRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ModifyPushConfigRequest.Unmarshal(m, b) +} +func (m *ModifyPushConfigRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ModifyPushConfigRequest.Marshal(b, m, deterministic) +} +func (dst *ModifyPushConfigRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_ModifyPushConfigRequest.Merge(dst, src) +} +func (m *ModifyPushConfigRequest) XXX_Size() int { + return xxx_messageInfo_ModifyPushConfigRequest.Size(m) +} +func (m *ModifyPushConfigRequest) XXX_DiscardUnknown() { + xxx_messageInfo_ModifyPushConfigRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_ModifyPushConfigRequest proto.InternalMessageInfo + +func (m *ModifyPushConfigRequest) GetSubscription() string { + if m != nil { + return m.Subscription + } + return "" +} + +func (m *ModifyPushConfigRequest) GetPushConfig() *PushConfig { + if m != nil { + return m.PushConfig + } + return nil +} + +// Request for the `Pull` method. +type PullRequest struct { + // The subscription from which messages should be pulled. + // Format is `projects/{project}/subscriptions/{sub}`. + Subscription string `protobuf:"bytes,1,opt,name=subscription,proto3" json:"subscription,omitempty"` + // If this field set to true, the system will respond immediately even if + // it there are no messages available to return in the `Pull` response. + // Otherwise, the system may wait (for a bounded amount of time) until at + // least one message is available, rather than returning no messages. + ReturnImmediately bool `protobuf:"varint,2,opt,name=return_immediately,json=returnImmediately,proto3" json:"return_immediately,omitempty"` + // The maximum number of messages returned for this request. The Pub/Sub + // system may return fewer than the number specified. + MaxMessages int32 `protobuf:"varint,3,opt,name=max_messages,json=maxMessages,proto3" json:"max_messages,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *PullRequest) Reset() { *m = PullRequest{} } +func (m *PullRequest) String() string { return proto.CompactTextString(m) } +func (*PullRequest) ProtoMessage() {} +func (*PullRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_pubsub_790424aa8109440f, []int{24} +} +func (m *PullRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_PullRequest.Unmarshal(m, b) +} +func (m *PullRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_PullRequest.Marshal(b, m, deterministic) +} +func (dst *PullRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_PullRequest.Merge(dst, src) +} +func (m *PullRequest) XXX_Size() int { + return xxx_messageInfo_PullRequest.Size(m) +} +func (m *PullRequest) XXX_DiscardUnknown() { + xxx_messageInfo_PullRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_PullRequest proto.InternalMessageInfo + +func (m *PullRequest) GetSubscription() string { + if m != nil { + return m.Subscription + } + return "" +} + +func (m *PullRequest) GetReturnImmediately() bool { + if m != nil { + return m.ReturnImmediately + } + return false +} + +func (m *PullRequest) GetMaxMessages() int32 { + if m != nil { + return m.MaxMessages + } + return 0 +} + +// Response for the `Pull` method. +type PullResponse struct { + // Received Pub/Sub messages. The list will be empty if there are no more + // messages available in the backlog. For JSON, the response can be entirely + // empty. The Pub/Sub system may return fewer than the `maxMessages` requested + // even if there are more messages available in the backlog. + ReceivedMessages []*ReceivedMessage `protobuf:"bytes,1,rep,name=received_messages,json=receivedMessages,proto3" json:"received_messages,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *PullResponse) Reset() { *m = PullResponse{} } +func (m *PullResponse) String() string { return proto.CompactTextString(m) } +func (*PullResponse) ProtoMessage() {} +func (*PullResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_pubsub_790424aa8109440f, []int{25} +} +func (m *PullResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_PullResponse.Unmarshal(m, b) +} +func (m *PullResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_PullResponse.Marshal(b, m, deterministic) +} +func (dst *PullResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_PullResponse.Merge(dst, src) +} +func (m *PullResponse) XXX_Size() int { + return xxx_messageInfo_PullResponse.Size(m) +} +func (m *PullResponse) XXX_DiscardUnknown() { + xxx_messageInfo_PullResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_PullResponse proto.InternalMessageInfo + +func (m *PullResponse) GetReceivedMessages() []*ReceivedMessage { + if m != nil { + return m.ReceivedMessages + } + return nil +} + +// Request for the ModifyAckDeadline method. +type ModifyAckDeadlineRequest struct { + // The name of the subscription. + // Format is `projects/{project}/subscriptions/{sub}`. + Subscription string `protobuf:"bytes,1,opt,name=subscription,proto3" json:"subscription,omitempty"` + // List of acknowledgment IDs. + AckIds []string `protobuf:"bytes,4,rep,name=ack_ids,json=ackIds,proto3" json:"ack_ids,omitempty"` + // The new ack deadline with respect to the time this request was sent to + // the Pub/Sub system. For example, if the value is 10, the new + // ack deadline will expire 10 seconds after the `ModifyAckDeadline` call + // was made. Specifying zero might immediately make the message available for + // delivery to another subscriber client. This typically results in an + // increase in the rate of message redeliveries (that is, duplicates). + // The minimum deadline you can specify is 0 seconds. + // The maximum deadline you can specify is 600 seconds (10 minutes). + AckDeadlineSeconds int32 `protobuf:"varint,3,opt,name=ack_deadline_seconds,json=ackDeadlineSeconds,proto3" json:"ack_deadline_seconds,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ModifyAckDeadlineRequest) Reset() { *m = ModifyAckDeadlineRequest{} } +func (m *ModifyAckDeadlineRequest) String() string { return proto.CompactTextString(m) } +func (*ModifyAckDeadlineRequest) ProtoMessage() {} +func (*ModifyAckDeadlineRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_pubsub_790424aa8109440f, []int{26} +} +func (m *ModifyAckDeadlineRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ModifyAckDeadlineRequest.Unmarshal(m, b) +} +func (m *ModifyAckDeadlineRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ModifyAckDeadlineRequest.Marshal(b, m, deterministic) +} +func (dst *ModifyAckDeadlineRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_ModifyAckDeadlineRequest.Merge(dst, src) +} +func (m *ModifyAckDeadlineRequest) XXX_Size() int { + return xxx_messageInfo_ModifyAckDeadlineRequest.Size(m) +} +func (m *ModifyAckDeadlineRequest) XXX_DiscardUnknown() { + xxx_messageInfo_ModifyAckDeadlineRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_ModifyAckDeadlineRequest proto.InternalMessageInfo + +func (m *ModifyAckDeadlineRequest) GetSubscription() string { + if m != nil { + return m.Subscription + } + return "" +} + +func (m *ModifyAckDeadlineRequest) GetAckIds() []string { + if m != nil { + return m.AckIds + } + return nil +} + +func (m *ModifyAckDeadlineRequest) GetAckDeadlineSeconds() int32 { + if m != nil { + return m.AckDeadlineSeconds + } + return 0 +} + +// Request for the Acknowledge method. +type AcknowledgeRequest struct { + // The subscription whose message is being acknowledged. + // Format is `projects/{project}/subscriptions/{sub}`. + Subscription string `protobuf:"bytes,1,opt,name=subscription,proto3" json:"subscription,omitempty"` + // The acknowledgment ID for the messages being acknowledged that was returned + // by the Pub/Sub system in the `Pull` response. Must not be empty. + AckIds []string `protobuf:"bytes,2,rep,name=ack_ids,json=ackIds,proto3" json:"ack_ids,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *AcknowledgeRequest) Reset() { *m = AcknowledgeRequest{} } +func (m *AcknowledgeRequest) String() string { return proto.CompactTextString(m) } +func (*AcknowledgeRequest) ProtoMessage() {} +func (*AcknowledgeRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_pubsub_790424aa8109440f, []int{27} +} +func (m *AcknowledgeRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_AcknowledgeRequest.Unmarshal(m, b) +} +func (m *AcknowledgeRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_AcknowledgeRequest.Marshal(b, m, deterministic) +} +func (dst *AcknowledgeRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_AcknowledgeRequest.Merge(dst, src) +} +func (m *AcknowledgeRequest) XXX_Size() int { + return xxx_messageInfo_AcknowledgeRequest.Size(m) +} +func (m *AcknowledgeRequest) XXX_DiscardUnknown() { + xxx_messageInfo_AcknowledgeRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_AcknowledgeRequest proto.InternalMessageInfo + +func (m *AcknowledgeRequest) GetSubscription() string { + if m != nil { + return m.Subscription + } + return "" +} + +func (m *AcknowledgeRequest) GetAckIds() []string { + if m != nil { + return m.AckIds + } + return nil +} + +// Request for the `StreamingPull` streaming RPC method. This request is used to +// establish the initial stream as well as to stream acknowledgements and ack +// deadline modifications from the client to the server. +type StreamingPullRequest struct { + // The subscription for which to initialize the new stream. This must be + // provided in the first request on the stream, and must not be set in + // subsequent requests from client to server. + // Format is `projects/{project}/subscriptions/{sub}`. + Subscription string `protobuf:"bytes,1,opt,name=subscription,proto3" json:"subscription,omitempty"` + // List of acknowledgement IDs for acknowledging previously received messages + // (received on this stream or a different stream). If an ack ID has expired, + // the corresponding message may be redelivered later. Acknowledging a message + // more than once will not result in an error. If the acknowledgement ID is + // malformed, the stream will be aborted with status `INVALID_ARGUMENT`. + AckIds []string `protobuf:"bytes,2,rep,name=ack_ids,json=ackIds,proto3" json:"ack_ids,omitempty"` + // The list of new ack deadlines for the IDs listed in + // `modify_deadline_ack_ids`. The size of this list must be the same as the + // size of `modify_deadline_ack_ids`. If it differs the stream will be aborted + // with `INVALID_ARGUMENT`. Each element in this list is applied to the + // element in the same position in `modify_deadline_ack_ids`. The new ack + // deadline is with respect to the time this request was sent to the Pub/Sub + // system. Must be >= 0. For example, if the value is 10, the new ack deadline + // will expire 10 seconds after this request is received. If the value is 0, + // the message is immediately made available for another streaming or + // non-streaming pull request. If the value is < 0 (an error), the stream will + // be aborted with status `INVALID_ARGUMENT`. + ModifyDeadlineSeconds []int32 `protobuf:"varint,3,rep,packed,name=modify_deadline_seconds,json=modifyDeadlineSeconds,proto3" json:"modify_deadline_seconds,omitempty"` + // List of acknowledgement IDs whose deadline will be modified based on the + // corresponding element in `modify_deadline_seconds`. This field can be used + // to indicate that more time is needed to process a message by the + // subscriber, or to make the message available for redelivery if the + // processing was interrupted. + ModifyDeadlineAckIds []string `protobuf:"bytes,4,rep,name=modify_deadline_ack_ids,json=modifyDeadlineAckIds,proto3" json:"modify_deadline_ack_ids,omitempty"` + // The ack deadline to use for the stream. This must be provided in the + // first request on the stream, but it can also be updated on subsequent + // requests from client to server. The minimum deadline you can specify is 10 + // seconds. The maximum deadline you can specify is 600 seconds (10 minutes). + StreamAckDeadlineSeconds int32 `protobuf:"varint,5,opt,name=stream_ack_deadline_seconds,json=streamAckDeadlineSeconds,proto3" json:"stream_ack_deadline_seconds,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *StreamingPullRequest) Reset() { *m = StreamingPullRequest{} } +func (m *StreamingPullRequest) String() string { return proto.CompactTextString(m) } +func (*StreamingPullRequest) ProtoMessage() {} +func (*StreamingPullRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_pubsub_790424aa8109440f, []int{28} +} +func (m *StreamingPullRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_StreamingPullRequest.Unmarshal(m, b) +} +func (m *StreamingPullRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_StreamingPullRequest.Marshal(b, m, deterministic) +} +func (dst *StreamingPullRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_StreamingPullRequest.Merge(dst, src) +} +func (m *StreamingPullRequest) XXX_Size() int { + return xxx_messageInfo_StreamingPullRequest.Size(m) +} +func (m *StreamingPullRequest) XXX_DiscardUnknown() { + xxx_messageInfo_StreamingPullRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_StreamingPullRequest proto.InternalMessageInfo + +func (m *StreamingPullRequest) GetSubscription() string { + if m != nil { + return m.Subscription + } + return "" +} + +func (m *StreamingPullRequest) GetAckIds() []string { + if m != nil { + return m.AckIds + } + return nil +} + +func (m *StreamingPullRequest) GetModifyDeadlineSeconds() []int32 { + if m != nil { + return m.ModifyDeadlineSeconds + } + return nil +} + +func (m *StreamingPullRequest) GetModifyDeadlineAckIds() []string { + if m != nil { + return m.ModifyDeadlineAckIds + } + return nil +} + +func (m *StreamingPullRequest) GetStreamAckDeadlineSeconds() int32 { + if m != nil { + return m.StreamAckDeadlineSeconds + } + return 0 +} + +// Response for the `StreamingPull` method. This response is used to stream +// messages from the server to the client. +type StreamingPullResponse struct { + // Received Pub/Sub messages. This will not be empty. + ReceivedMessages []*ReceivedMessage `protobuf:"bytes,1,rep,name=received_messages,json=receivedMessages,proto3" json:"received_messages,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *StreamingPullResponse) Reset() { *m = StreamingPullResponse{} } +func (m *StreamingPullResponse) String() string { return proto.CompactTextString(m) } +func (*StreamingPullResponse) ProtoMessage() {} +func (*StreamingPullResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_pubsub_790424aa8109440f, []int{29} +} +func (m *StreamingPullResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_StreamingPullResponse.Unmarshal(m, b) +} +func (m *StreamingPullResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_StreamingPullResponse.Marshal(b, m, deterministic) +} +func (dst *StreamingPullResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_StreamingPullResponse.Merge(dst, src) +} +func (m *StreamingPullResponse) XXX_Size() int { + return xxx_messageInfo_StreamingPullResponse.Size(m) +} +func (m *StreamingPullResponse) XXX_DiscardUnknown() { + xxx_messageInfo_StreamingPullResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_StreamingPullResponse proto.InternalMessageInfo + +func (m *StreamingPullResponse) GetReceivedMessages() []*ReceivedMessage { + if m != nil { + return m.ReceivedMessages + } + return nil +} + +// Request for the `CreateSnapshot` method. +type CreateSnapshotRequest struct { + // Optional user-provided name for this snapshot. + // If the name is not provided in the request, the server will assign a random + // name for this snapshot on the same project as the subscription. + // Note that for REST API requests, you must specify a name. See the + // + // resource name rules. + // Format is `projects/{project}/snapshots/{snap}`. + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // The subscription whose backlog the snapshot retains. + // Specifically, the created snapshot is guaranteed to retain: + // (a) The existing backlog on the subscription. More precisely, this is + // defined as the messages in the subscription's backlog that are + // unacknowledged upon the successful completion of the + // `CreateSnapshot` request; as well as: + // (b) Any messages published to the subscription's topic following the + // successful completion of the CreateSnapshot request. + // Format is `projects/{project}/subscriptions/{sub}`. + Subscription string `protobuf:"bytes,2,opt,name=subscription,proto3" json:"subscription,omitempty"` + // See Creating and + // managing labels. + Labels map[string]string `protobuf:"bytes,3,rep,name=labels,proto3" json:"labels,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *CreateSnapshotRequest) Reset() { *m = CreateSnapshotRequest{} } +func (m *CreateSnapshotRequest) String() string { return proto.CompactTextString(m) } +func (*CreateSnapshotRequest) ProtoMessage() {} +func (*CreateSnapshotRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_pubsub_790424aa8109440f, []int{30} +} +func (m *CreateSnapshotRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_CreateSnapshotRequest.Unmarshal(m, b) +} +func (m *CreateSnapshotRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_CreateSnapshotRequest.Marshal(b, m, deterministic) +} +func (dst *CreateSnapshotRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_CreateSnapshotRequest.Merge(dst, src) +} +func (m *CreateSnapshotRequest) XXX_Size() int { + return xxx_messageInfo_CreateSnapshotRequest.Size(m) +} +func (m *CreateSnapshotRequest) XXX_DiscardUnknown() { + xxx_messageInfo_CreateSnapshotRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_CreateSnapshotRequest proto.InternalMessageInfo + +func (m *CreateSnapshotRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *CreateSnapshotRequest) GetSubscription() string { + if m != nil { + return m.Subscription + } + return "" +} + +func (m *CreateSnapshotRequest) GetLabels() map[string]string { + if m != nil { + return m.Labels + } + return nil +} + +// Request for the UpdateSnapshot method. +type UpdateSnapshotRequest struct { + // The updated snapshot object. + Snapshot *Snapshot `protobuf:"bytes,1,opt,name=snapshot,proto3" json:"snapshot,omitempty"` + // Indicates which fields in the provided snapshot to update. + // Must be specified and non-empty. + UpdateMask *field_mask.FieldMask `protobuf:"bytes,2,opt,name=update_mask,json=updateMask,proto3" json:"update_mask,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *UpdateSnapshotRequest) Reset() { *m = UpdateSnapshotRequest{} } +func (m *UpdateSnapshotRequest) String() string { return proto.CompactTextString(m) } +func (*UpdateSnapshotRequest) ProtoMessage() {} +func (*UpdateSnapshotRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_pubsub_790424aa8109440f, []int{31} +} +func (m *UpdateSnapshotRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_UpdateSnapshotRequest.Unmarshal(m, b) +} +func (m *UpdateSnapshotRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_UpdateSnapshotRequest.Marshal(b, m, deterministic) +} +func (dst *UpdateSnapshotRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_UpdateSnapshotRequest.Merge(dst, src) +} +func (m *UpdateSnapshotRequest) XXX_Size() int { + return xxx_messageInfo_UpdateSnapshotRequest.Size(m) +} +func (m *UpdateSnapshotRequest) XXX_DiscardUnknown() { + xxx_messageInfo_UpdateSnapshotRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_UpdateSnapshotRequest proto.InternalMessageInfo + +func (m *UpdateSnapshotRequest) GetSnapshot() *Snapshot { + if m != nil { + return m.Snapshot + } + return nil +} + +func (m *UpdateSnapshotRequest) GetUpdateMask() *field_mask.FieldMask { + if m != nil { + return m.UpdateMask + } + return nil +} + +// A snapshot resource. Snapshots are used in +// Seek +// operations, which allow +// you to manage message acknowledgments in bulk. That is, you can set the +// acknowledgment state of messages in an existing subscription to the state +// captured by a snapshot. +type Snapshot struct { + // The name of the snapshot. + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // The name of the topic from which this snapshot is retaining messages. + Topic string `protobuf:"bytes,2,opt,name=topic,proto3" json:"topic,omitempty"` + // The snapshot is guaranteed to exist up until this time. + // A newly-created snapshot expires no later than 7 days from the time of its + // creation. Its exact lifetime is determined at creation by the existing + // backlog in the source subscription. Specifically, the lifetime of the + // snapshot is `7 days - (age of oldest unacked message in the subscription)`. + // For example, consider a subscription whose oldest unacked message is 3 days + // old. If a snapshot is created from this subscription, the snapshot -- which + // will always capture this 3-day-old backlog as long as the snapshot + // exists -- will expire in 4 days. The service will refuse to create a + // snapshot that would expire in less than 1 hour after creation. + ExpireTime *timestamp.Timestamp `protobuf:"bytes,3,opt,name=expire_time,json=expireTime,proto3" json:"expire_time,omitempty"` + // See Creating and + // managing labels. + Labels map[string]string `protobuf:"bytes,4,rep,name=labels,proto3" json:"labels,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Snapshot) Reset() { *m = Snapshot{} } +func (m *Snapshot) String() string { return proto.CompactTextString(m) } +func (*Snapshot) ProtoMessage() {} +func (*Snapshot) Descriptor() ([]byte, []int) { + return fileDescriptor_pubsub_790424aa8109440f, []int{32} +} +func (m *Snapshot) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Snapshot.Unmarshal(m, b) +} +func (m *Snapshot) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Snapshot.Marshal(b, m, deterministic) +} +func (dst *Snapshot) XXX_Merge(src proto.Message) { + xxx_messageInfo_Snapshot.Merge(dst, src) +} +func (m *Snapshot) XXX_Size() int { + return xxx_messageInfo_Snapshot.Size(m) +} +func (m *Snapshot) XXX_DiscardUnknown() { + xxx_messageInfo_Snapshot.DiscardUnknown(m) +} + +var xxx_messageInfo_Snapshot proto.InternalMessageInfo + +func (m *Snapshot) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *Snapshot) GetTopic() string { + if m != nil { + return m.Topic + } + return "" +} + +func (m *Snapshot) GetExpireTime() *timestamp.Timestamp { + if m != nil { + return m.ExpireTime + } + return nil +} + +func (m *Snapshot) GetLabels() map[string]string { + if m != nil { + return m.Labels + } + return nil +} + +// Request for the GetSnapshot method. +type GetSnapshotRequest struct { + // The name of the snapshot to get. + // Format is `projects/{project}/snapshots/{snap}`. + Snapshot string `protobuf:"bytes,1,opt,name=snapshot,proto3" json:"snapshot,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GetSnapshotRequest) Reset() { *m = GetSnapshotRequest{} } +func (m *GetSnapshotRequest) String() string { return proto.CompactTextString(m) } +func (*GetSnapshotRequest) ProtoMessage() {} +func (*GetSnapshotRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_pubsub_790424aa8109440f, []int{33} +} +func (m *GetSnapshotRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GetSnapshotRequest.Unmarshal(m, b) +} +func (m *GetSnapshotRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GetSnapshotRequest.Marshal(b, m, deterministic) +} +func (dst *GetSnapshotRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_GetSnapshotRequest.Merge(dst, src) +} +func (m *GetSnapshotRequest) XXX_Size() int { + return xxx_messageInfo_GetSnapshotRequest.Size(m) +} +func (m *GetSnapshotRequest) XXX_DiscardUnknown() { + xxx_messageInfo_GetSnapshotRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_GetSnapshotRequest proto.InternalMessageInfo + +func (m *GetSnapshotRequest) GetSnapshot() string { + if m != nil { + return m.Snapshot + } + return "" +} + +// Request for the `ListSnapshots` method. +type ListSnapshotsRequest struct { + // The name of the project in which to list snapshots. + // Format is `projects/{project-id}`. + Project string `protobuf:"bytes,1,opt,name=project,proto3" json:"project,omitempty"` + // Maximum number of snapshots to return. + PageSize int32 `protobuf:"varint,2,opt,name=page_size,json=pageSize,proto3" json:"page_size,omitempty"` + // The value returned by the last `ListSnapshotsResponse`; indicates that this + // is a continuation of a prior `ListSnapshots` call, and that the system + // should return the next page of data. + PageToken string `protobuf:"bytes,3,opt,name=page_token,json=pageToken,proto3" json:"page_token,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ListSnapshotsRequest) Reset() { *m = ListSnapshotsRequest{} } +func (m *ListSnapshotsRequest) String() string { return proto.CompactTextString(m) } +func (*ListSnapshotsRequest) ProtoMessage() {} +func (*ListSnapshotsRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_pubsub_790424aa8109440f, []int{34} +} +func (m *ListSnapshotsRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ListSnapshotsRequest.Unmarshal(m, b) +} +func (m *ListSnapshotsRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ListSnapshotsRequest.Marshal(b, m, deterministic) +} +func (dst *ListSnapshotsRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_ListSnapshotsRequest.Merge(dst, src) +} +func (m *ListSnapshotsRequest) XXX_Size() int { + return xxx_messageInfo_ListSnapshotsRequest.Size(m) +} +func (m *ListSnapshotsRequest) XXX_DiscardUnknown() { + xxx_messageInfo_ListSnapshotsRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_ListSnapshotsRequest proto.InternalMessageInfo + +func (m *ListSnapshotsRequest) GetProject() string { + if m != nil { + return m.Project + } + return "" +} + +func (m *ListSnapshotsRequest) GetPageSize() int32 { + if m != nil { + return m.PageSize + } + return 0 +} + +func (m *ListSnapshotsRequest) GetPageToken() string { + if m != nil { + return m.PageToken + } + return "" +} + +// Response for the `ListSnapshots` method. +type ListSnapshotsResponse struct { + // The resulting snapshots. + Snapshots []*Snapshot `protobuf:"bytes,1,rep,name=snapshots,proto3" json:"snapshots,omitempty"` + // If not empty, indicates that there may be more snapshot that match the + // request; this value should be passed in a new `ListSnapshotsRequest`. + NextPageToken string `protobuf:"bytes,2,opt,name=next_page_token,json=nextPageToken,proto3" json:"next_page_token,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ListSnapshotsResponse) Reset() { *m = ListSnapshotsResponse{} } +func (m *ListSnapshotsResponse) String() string { return proto.CompactTextString(m) } +func (*ListSnapshotsResponse) ProtoMessage() {} +func (*ListSnapshotsResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_pubsub_790424aa8109440f, []int{35} +} +func (m *ListSnapshotsResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ListSnapshotsResponse.Unmarshal(m, b) +} +func (m *ListSnapshotsResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ListSnapshotsResponse.Marshal(b, m, deterministic) +} +func (dst *ListSnapshotsResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_ListSnapshotsResponse.Merge(dst, src) +} +func (m *ListSnapshotsResponse) XXX_Size() int { + return xxx_messageInfo_ListSnapshotsResponse.Size(m) +} +func (m *ListSnapshotsResponse) XXX_DiscardUnknown() { + xxx_messageInfo_ListSnapshotsResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_ListSnapshotsResponse proto.InternalMessageInfo + +func (m *ListSnapshotsResponse) GetSnapshots() []*Snapshot { + if m != nil { + return m.Snapshots + } + return nil +} + +func (m *ListSnapshotsResponse) GetNextPageToken() string { + if m != nil { + return m.NextPageToken + } + return "" +} + +// Request for the `DeleteSnapshot` method. +type DeleteSnapshotRequest struct { + // The name of the snapshot to delete. + // Format is `projects/{project}/snapshots/{snap}`. + Snapshot string `protobuf:"bytes,1,opt,name=snapshot,proto3" json:"snapshot,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *DeleteSnapshotRequest) Reset() { *m = DeleteSnapshotRequest{} } +func (m *DeleteSnapshotRequest) String() string { return proto.CompactTextString(m) } +func (*DeleteSnapshotRequest) ProtoMessage() {} +func (*DeleteSnapshotRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_pubsub_790424aa8109440f, []int{36} +} +func (m *DeleteSnapshotRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_DeleteSnapshotRequest.Unmarshal(m, b) +} +func (m *DeleteSnapshotRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_DeleteSnapshotRequest.Marshal(b, m, deterministic) +} +func (dst *DeleteSnapshotRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_DeleteSnapshotRequest.Merge(dst, src) +} +func (m *DeleteSnapshotRequest) XXX_Size() int { + return xxx_messageInfo_DeleteSnapshotRequest.Size(m) +} +func (m *DeleteSnapshotRequest) XXX_DiscardUnknown() { + xxx_messageInfo_DeleteSnapshotRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_DeleteSnapshotRequest proto.InternalMessageInfo + +func (m *DeleteSnapshotRequest) GetSnapshot() string { + if m != nil { + return m.Snapshot + } + return "" +} + +// Request for the `Seek` method. +type SeekRequest struct { + // The subscription to affect. + Subscription string `protobuf:"bytes,1,opt,name=subscription,proto3" json:"subscription,omitempty"` + // Types that are valid to be assigned to Target: + // *SeekRequest_Time + // *SeekRequest_Snapshot + Target isSeekRequest_Target `protobuf_oneof:"target"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *SeekRequest) Reset() { *m = SeekRequest{} } +func (m *SeekRequest) String() string { return proto.CompactTextString(m) } +func (*SeekRequest) ProtoMessage() {} +func (*SeekRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_pubsub_790424aa8109440f, []int{37} +} +func (m *SeekRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_SeekRequest.Unmarshal(m, b) +} +func (m *SeekRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_SeekRequest.Marshal(b, m, deterministic) +} +func (dst *SeekRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_SeekRequest.Merge(dst, src) +} +func (m *SeekRequest) XXX_Size() int { + return xxx_messageInfo_SeekRequest.Size(m) +} +func (m *SeekRequest) XXX_DiscardUnknown() { + xxx_messageInfo_SeekRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_SeekRequest proto.InternalMessageInfo + +func (m *SeekRequest) GetSubscription() string { + if m != nil { + return m.Subscription + } + return "" +} + +type isSeekRequest_Target interface { + isSeekRequest_Target() +} + +type SeekRequest_Time struct { + Time *timestamp.Timestamp `protobuf:"bytes,2,opt,name=time,proto3,oneof"` +} + +type SeekRequest_Snapshot struct { + Snapshot string `protobuf:"bytes,3,opt,name=snapshot,proto3,oneof"` +} + +func (*SeekRequest_Time) isSeekRequest_Target() {} + +func (*SeekRequest_Snapshot) isSeekRequest_Target() {} + +func (m *SeekRequest) GetTarget() isSeekRequest_Target { + if m != nil { + return m.Target + } + return nil +} + +func (m *SeekRequest) GetTime() *timestamp.Timestamp { + if x, ok := m.GetTarget().(*SeekRequest_Time); ok { + return x.Time + } + return nil +} + +func (m *SeekRequest) GetSnapshot() string { + if x, ok := m.GetTarget().(*SeekRequest_Snapshot); ok { + return x.Snapshot + } + return "" +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*SeekRequest) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _SeekRequest_OneofMarshaler, _SeekRequest_OneofUnmarshaler, _SeekRequest_OneofSizer, []interface{}{ + (*SeekRequest_Time)(nil), + (*SeekRequest_Snapshot)(nil), + } +} + +func _SeekRequest_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*SeekRequest) + // target + switch x := m.Target.(type) { + case *SeekRequest_Time: + b.EncodeVarint(2<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.Time); err != nil { + return err + } + case *SeekRequest_Snapshot: + b.EncodeVarint(3<<3 | proto.WireBytes) + b.EncodeStringBytes(x.Snapshot) + case nil: + default: + return fmt.Errorf("SeekRequest.Target has unexpected type %T", x) + } + return nil +} + +func _SeekRequest_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*SeekRequest) + switch tag { + case 2: // target.time + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(timestamp.Timestamp) + err := b.DecodeMessage(msg) + m.Target = &SeekRequest_Time{msg} + return true, err + case 3: // target.snapshot + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeStringBytes() + m.Target = &SeekRequest_Snapshot{x} + return true, err + default: + return false, nil + } +} + +func _SeekRequest_OneofSizer(msg proto.Message) (n int) { + m := msg.(*SeekRequest) + // target + switch x := m.Target.(type) { + case *SeekRequest_Time: + s := proto.Size(x.Time) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *SeekRequest_Snapshot: + n += 1 // tag and wire + n += proto.SizeVarint(uint64(len(x.Snapshot))) + n += len(x.Snapshot) + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +// Response for the `Seek` method (this response is empty). +type SeekResponse struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *SeekResponse) Reset() { *m = SeekResponse{} } +func (m *SeekResponse) String() string { return proto.CompactTextString(m) } +func (*SeekResponse) ProtoMessage() {} +func (*SeekResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_pubsub_790424aa8109440f, []int{38} +} +func (m *SeekResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_SeekResponse.Unmarshal(m, b) +} +func (m *SeekResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_SeekResponse.Marshal(b, m, deterministic) +} +func (dst *SeekResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_SeekResponse.Merge(dst, src) +} +func (m *SeekResponse) XXX_Size() int { + return xxx_messageInfo_SeekResponse.Size(m) +} +func (m *SeekResponse) XXX_DiscardUnknown() { + xxx_messageInfo_SeekResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_SeekResponse proto.InternalMessageInfo + +func init() { + proto.RegisterType((*MessageStoragePolicy)(nil), "google.pubsub.v1.MessageStoragePolicy") + proto.RegisterType((*Topic)(nil), "google.pubsub.v1.Topic") + proto.RegisterMapType((map[string]string)(nil), "google.pubsub.v1.Topic.LabelsEntry") + proto.RegisterType((*PubsubMessage)(nil), "google.pubsub.v1.PubsubMessage") + proto.RegisterMapType((map[string]string)(nil), "google.pubsub.v1.PubsubMessage.AttributesEntry") + proto.RegisterType((*GetTopicRequest)(nil), "google.pubsub.v1.GetTopicRequest") + proto.RegisterType((*UpdateTopicRequest)(nil), "google.pubsub.v1.UpdateTopicRequest") + proto.RegisterType((*PublishRequest)(nil), "google.pubsub.v1.PublishRequest") + proto.RegisterType((*PublishResponse)(nil), "google.pubsub.v1.PublishResponse") + proto.RegisterType((*ListTopicsRequest)(nil), "google.pubsub.v1.ListTopicsRequest") + proto.RegisterType((*ListTopicsResponse)(nil), "google.pubsub.v1.ListTopicsResponse") + proto.RegisterType((*ListTopicSubscriptionsRequest)(nil), "google.pubsub.v1.ListTopicSubscriptionsRequest") + proto.RegisterType((*ListTopicSubscriptionsResponse)(nil), "google.pubsub.v1.ListTopicSubscriptionsResponse") + proto.RegisterType((*ListTopicSnapshotsRequest)(nil), "google.pubsub.v1.ListTopicSnapshotsRequest") + proto.RegisterType((*ListTopicSnapshotsResponse)(nil), "google.pubsub.v1.ListTopicSnapshotsResponse") + proto.RegisterType((*DeleteTopicRequest)(nil), "google.pubsub.v1.DeleteTopicRequest") + proto.RegisterType((*Subscription)(nil), "google.pubsub.v1.Subscription") + proto.RegisterMapType((map[string]string)(nil), "google.pubsub.v1.Subscription.LabelsEntry") + proto.RegisterType((*ExpirationPolicy)(nil), "google.pubsub.v1.ExpirationPolicy") + proto.RegisterType((*PushConfig)(nil), "google.pubsub.v1.PushConfig") + proto.RegisterMapType((map[string]string)(nil), "google.pubsub.v1.PushConfig.AttributesEntry") + proto.RegisterType((*PushConfig_OidcToken)(nil), "google.pubsub.v1.PushConfig.OidcToken") + proto.RegisterType((*ReceivedMessage)(nil), "google.pubsub.v1.ReceivedMessage") + proto.RegisterType((*GetSubscriptionRequest)(nil), "google.pubsub.v1.GetSubscriptionRequest") + proto.RegisterType((*UpdateSubscriptionRequest)(nil), "google.pubsub.v1.UpdateSubscriptionRequest") + proto.RegisterType((*ListSubscriptionsRequest)(nil), "google.pubsub.v1.ListSubscriptionsRequest") + proto.RegisterType((*ListSubscriptionsResponse)(nil), "google.pubsub.v1.ListSubscriptionsResponse") + proto.RegisterType((*DeleteSubscriptionRequest)(nil), "google.pubsub.v1.DeleteSubscriptionRequest") + proto.RegisterType((*ModifyPushConfigRequest)(nil), "google.pubsub.v1.ModifyPushConfigRequest") + proto.RegisterType((*PullRequest)(nil), "google.pubsub.v1.PullRequest") + proto.RegisterType((*PullResponse)(nil), "google.pubsub.v1.PullResponse") + proto.RegisterType((*ModifyAckDeadlineRequest)(nil), "google.pubsub.v1.ModifyAckDeadlineRequest") + proto.RegisterType((*AcknowledgeRequest)(nil), "google.pubsub.v1.AcknowledgeRequest") + proto.RegisterType((*StreamingPullRequest)(nil), "google.pubsub.v1.StreamingPullRequest") + proto.RegisterType((*StreamingPullResponse)(nil), "google.pubsub.v1.StreamingPullResponse") + proto.RegisterType((*CreateSnapshotRequest)(nil), "google.pubsub.v1.CreateSnapshotRequest") + proto.RegisterMapType((map[string]string)(nil), "google.pubsub.v1.CreateSnapshotRequest.LabelsEntry") + proto.RegisterType((*UpdateSnapshotRequest)(nil), "google.pubsub.v1.UpdateSnapshotRequest") + proto.RegisterType((*Snapshot)(nil), "google.pubsub.v1.Snapshot") + proto.RegisterMapType((map[string]string)(nil), "google.pubsub.v1.Snapshot.LabelsEntry") + proto.RegisterType((*GetSnapshotRequest)(nil), "google.pubsub.v1.GetSnapshotRequest") + proto.RegisterType((*ListSnapshotsRequest)(nil), "google.pubsub.v1.ListSnapshotsRequest") + proto.RegisterType((*ListSnapshotsResponse)(nil), "google.pubsub.v1.ListSnapshotsResponse") + proto.RegisterType((*DeleteSnapshotRequest)(nil), "google.pubsub.v1.DeleteSnapshotRequest") + proto.RegisterType((*SeekRequest)(nil), "google.pubsub.v1.SeekRequest") + proto.RegisterType((*SeekResponse)(nil), "google.pubsub.v1.SeekResponse") +} + +// Reference imports to suppress errors if they are not otherwise used. +var _ context.Context +var _ grpc.ClientConn + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the grpc package it is being compiled against. +const _ = grpc.SupportPackageIsVersion4 + +// PublisherClient is the client API for Publisher service. +// +// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://godoc.org/google.golang.org/grpc#ClientConn.NewStream. +type PublisherClient interface { + // Creates the given topic with the given name. See the + // + // resource name rules. + CreateTopic(ctx context.Context, in *Topic, opts ...grpc.CallOption) (*Topic, error) + // Updates an existing topic. Note that certain properties of a + // topic are not modifiable. + UpdateTopic(ctx context.Context, in *UpdateTopicRequest, opts ...grpc.CallOption) (*Topic, error) + // Adds one or more messages to the topic. Returns `NOT_FOUND` if the topic + // does not exist. + Publish(ctx context.Context, in *PublishRequest, opts ...grpc.CallOption) (*PublishResponse, error) + // Gets the configuration of a topic. + GetTopic(ctx context.Context, in *GetTopicRequest, opts ...grpc.CallOption) (*Topic, error) + // Lists matching topics. + ListTopics(ctx context.Context, in *ListTopicsRequest, opts ...grpc.CallOption) (*ListTopicsResponse, error) + // Lists the names of the subscriptions on this topic. + ListTopicSubscriptions(ctx context.Context, in *ListTopicSubscriptionsRequest, opts ...grpc.CallOption) (*ListTopicSubscriptionsResponse, error) + // Lists the names of the snapshots on this topic. Snapshots are used in + // Seek + // operations, which allow + // you to manage message acknowledgments in bulk. That is, you can set the + // acknowledgment state of messages in an existing subscription to the state + // captured by a snapshot. + ListTopicSnapshots(ctx context.Context, in *ListTopicSnapshotsRequest, opts ...grpc.CallOption) (*ListTopicSnapshotsResponse, error) + // Deletes the topic with the given name. Returns `NOT_FOUND` if the topic + // does not exist. After a topic is deleted, a new topic may be created with + // the same name; this is an entirely new topic with none of the old + // configuration or subscriptions. Existing subscriptions to this topic are + // not deleted, but their `topic` field is set to `_deleted-topic_`. + DeleteTopic(ctx context.Context, in *DeleteTopicRequest, opts ...grpc.CallOption) (*empty.Empty, error) +} + +type publisherClient struct { + cc *grpc.ClientConn +} + +func NewPublisherClient(cc *grpc.ClientConn) PublisherClient { + return &publisherClient{cc} +} + +func (c *publisherClient) CreateTopic(ctx context.Context, in *Topic, opts ...grpc.CallOption) (*Topic, error) { + out := new(Topic) + err := c.cc.Invoke(ctx, "/google.pubsub.v1.Publisher/CreateTopic", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *publisherClient) UpdateTopic(ctx context.Context, in *UpdateTopicRequest, opts ...grpc.CallOption) (*Topic, error) { + out := new(Topic) + err := c.cc.Invoke(ctx, "/google.pubsub.v1.Publisher/UpdateTopic", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *publisherClient) Publish(ctx context.Context, in *PublishRequest, opts ...grpc.CallOption) (*PublishResponse, error) { + out := new(PublishResponse) + err := c.cc.Invoke(ctx, "/google.pubsub.v1.Publisher/Publish", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *publisherClient) GetTopic(ctx context.Context, in *GetTopicRequest, opts ...grpc.CallOption) (*Topic, error) { + out := new(Topic) + err := c.cc.Invoke(ctx, "/google.pubsub.v1.Publisher/GetTopic", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *publisherClient) ListTopics(ctx context.Context, in *ListTopicsRequest, opts ...grpc.CallOption) (*ListTopicsResponse, error) { + out := new(ListTopicsResponse) + err := c.cc.Invoke(ctx, "/google.pubsub.v1.Publisher/ListTopics", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *publisherClient) ListTopicSubscriptions(ctx context.Context, in *ListTopicSubscriptionsRequest, opts ...grpc.CallOption) (*ListTopicSubscriptionsResponse, error) { + out := new(ListTopicSubscriptionsResponse) + err := c.cc.Invoke(ctx, "/google.pubsub.v1.Publisher/ListTopicSubscriptions", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *publisherClient) ListTopicSnapshots(ctx context.Context, in *ListTopicSnapshotsRequest, opts ...grpc.CallOption) (*ListTopicSnapshotsResponse, error) { + out := new(ListTopicSnapshotsResponse) + err := c.cc.Invoke(ctx, "/google.pubsub.v1.Publisher/ListTopicSnapshots", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *publisherClient) DeleteTopic(ctx context.Context, in *DeleteTopicRequest, opts ...grpc.CallOption) (*empty.Empty, error) { + out := new(empty.Empty) + err := c.cc.Invoke(ctx, "/google.pubsub.v1.Publisher/DeleteTopic", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +// PublisherServer is the server API for Publisher service. +type PublisherServer interface { + // Creates the given topic with the given name. See the + // + // resource name rules. + CreateTopic(context.Context, *Topic) (*Topic, error) + // Updates an existing topic. Note that certain properties of a + // topic are not modifiable. + UpdateTopic(context.Context, *UpdateTopicRequest) (*Topic, error) + // Adds one or more messages to the topic. Returns `NOT_FOUND` if the topic + // does not exist. + Publish(context.Context, *PublishRequest) (*PublishResponse, error) + // Gets the configuration of a topic. + GetTopic(context.Context, *GetTopicRequest) (*Topic, error) + // Lists matching topics. + ListTopics(context.Context, *ListTopicsRequest) (*ListTopicsResponse, error) + // Lists the names of the subscriptions on this topic. + ListTopicSubscriptions(context.Context, *ListTopicSubscriptionsRequest) (*ListTopicSubscriptionsResponse, error) + // Lists the names of the snapshots on this topic. Snapshots are used in + // Seek + // operations, which allow + // you to manage message acknowledgments in bulk. That is, you can set the + // acknowledgment state of messages in an existing subscription to the state + // captured by a snapshot. + ListTopicSnapshots(context.Context, *ListTopicSnapshotsRequest) (*ListTopicSnapshotsResponse, error) + // Deletes the topic with the given name. Returns `NOT_FOUND` if the topic + // does not exist. After a topic is deleted, a new topic may be created with + // the same name; this is an entirely new topic with none of the old + // configuration or subscriptions. Existing subscriptions to this topic are + // not deleted, but their `topic` field is set to `_deleted-topic_`. + DeleteTopic(context.Context, *DeleteTopicRequest) (*empty.Empty, error) +} + +func RegisterPublisherServer(s *grpc.Server, srv PublisherServer) { + s.RegisterService(&_Publisher_serviceDesc, srv) +} + +func _Publisher_CreateTopic_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(Topic) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(PublisherServer).CreateTopic(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.pubsub.v1.Publisher/CreateTopic", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(PublisherServer).CreateTopic(ctx, req.(*Topic)) + } + return interceptor(ctx, in, info, handler) +} + +func _Publisher_UpdateTopic_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(UpdateTopicRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(PublisherServer).UpdateTopic(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.pubsub.v1.Publisher/UpdateTopic", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(PublisherServer).UpdateTopic(ctx, req.(*UpdateTopicRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _Publisher_Publish_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(PublishRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(PublisherServer).Publish(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.pubsub.v1.Publisher/Publish", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(PublisherServer).Publish(ctx, req.(*PublishRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _Publisher_GetTopic_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(GetTopicRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(PublisherServer).GetTopic(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.pubsub.v1.Publisher/GetTopic", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(PublisherServer).GetTopic(ctx, req.(*GetTopicRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _Publisher_ListTopics_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(ListTopicsRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(PublisherServer).ListTopics(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.pubsub.v1.Publisher/ListTopics", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(PublisherServer).ListTopics(ctx, req.(*ListTopicsRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _Publisher_ListTopicSubscriptions_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(ListTopicSubscriptionsRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(PublisherServer).ListTopicSubscriptions(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.pubsub.v1.Publisher/ListTopicSubscriptions", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(PublisherServer).ListTopicSubscriptions(ctx, req.(*ListTopicSubscriptionsRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _Publisher_ListTopicSnapshots_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(ListTopicSnapshotsRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(PublisherServer).ListTopicSnapshots(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.pubsub.v1.Publisher/ListTopicSnapshots", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(PublisherServer).ListTopicSnapshots(ctx, req.(*ListTopicSnapshotsRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _Publisher_DeleteTopic_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(DeleteTopicRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(PublisherServer).DeleteTopic(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.pubsub.v1.Publisher/DeleteTopic", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(PublisherServer).DeleteTopic(ctx, req.(*DeleteTopicRequest)) + } + return interceptor(ctx, in, info, handler) +} + +var _Publisher_serviceDesc = grpc.ServiceDesc{ + ServiceName: "google.pubsub.v1.Publisher", + HandlerType: (*PublisherServer)(nil), + Methods: []grpc.MethodDesc{ + { + MethodName: "CreateTopic", + Handler: _Publisher_CreateTopic_Handler, + }, + { + MethodName: "UpdateTopic", + Handler: _Publisher_UpdateTopic_Handler, + }, + { + MethodName: "Publish", + Handler: _Publisher_Publish_Handler, + }, + { + MethodName: "GetTopic", + Handler: _Publisher_GetTopic_Handler, + }, + { + MethodName: "ListTopics", + Handler: _Publisher_ListTopics_Handler, + }, + { + MethodName: "ListTopicSubscriptions", + Handler: _Publisher_ListTopicSubscriptions_Handler, + }, + { + MethodName: "ListTopicSnapshots", + Handler: _Publisher_ListTopicSnapshots_Handler, + }, + { + MethodName: "DeleteTopic", + Handler: _Publisher_DeleteTopic_Handler, + }, + }, + Streams: []grpc.StreamDesc{}, + Metadata: "google/pubsub/v1/pubsub.proto", +} + +// SubscriberClient is the client API for Subscriber service. +// +// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://godoc.org/google.golang.org/grpc#ClientConn.NewStream. +type SubscriberClient interface { + // Creates a subscription to a given topic. See the + // + // resource name rules. + // If the subscription already exists, returns `ALREADY_EXISTS`. + // If the corresponding topic doesn't exist, returns `NOT_FOUND`. + // + // If the name is not provided in the request, the server will assign a random + // name for this subscription on the same project as the topic, conforming + // to the + // [resource name + // format](https://cloud.google.com/pubsub/docs/admin#resource_names). The + // generated name is populated in the returned Subscription object. Note that + // for REST API requests, you must specify a name in the request. + CreateSubscription(ctx context.Context, in *Subscription, opts ...grpc.CallOption) (*Subscription, error) + // Gets the configuration details of a subscription. + GetSubscription(ctx context.Context, in *GetSubscriptionRequest, opts ...grpc.CallOption) (*Subscription, error) + // Updates an existing subscription. Note that certain properties of a + // subscription, such as its topic, are not modifiable. + UpdateSubscription(ctx context.Context, in *UpdateSubscriptionRequest, opts ...grpc.CallOption) (*Subscription, error) + // Lists matching subscriptions. + ListSubscriptions(ctx context.Context, in *ListSubscriptionsRequest, opts ...grpc.CallOption) (*ListSubscriptionsResponse, error) + // Deletes an existing subscription. All messages retained in the subscription + // are immediately dropped. Calls to `Pull` after deletion will return + // `NOT_FOUND`. After a subscription is deleted, a new one may be created with + // the same name, but the new one has no association with the old + // subscription or its topic unless the same topic is specified. + DeleteSubscription(ctx context.Context, in *DeleteSubscriptionRequest, opts ...grpc.CallOption) (*empty.Empty, error) + // Modifies the ack deadline for a specific message. This method is useful + // to indicate that more time is needed to process a message by the + // subscriber, or to make the message available for redelivery if the + // processing was interrupted. Note that this does not modify the + // subscription-level `ackDeadlineSeconds` used for subsequent messages. + ModifyAckDeadline(ctx context.Context, in *ModifyAckDeadlineRequest, opts ...grpc.CallOption) (*empty.Empty, error) + // Acknowledges the messages associated with the `ack_ids` in the + // `AcknowledgeRequest`. The Pub/Sub system can remove the relevant messages + // from the subscription. + // + // Acknowledging a message whose ack deadline has expired may succeed, + // but such a message may be redelivered later. Acknowledging a message more + // than once will not result in an error. + Acknowledge(ctx context.Context, in *AcknowledgeRequest, opts ...grpc.CallOption) (*empty.Empty, error) + // Pulls messages from the server. The server may return `UNAVAILABLE` if + // there are too many concurrent pull requests pending for the given + // subscription. + Pull(ctx context.Context, in *PullRequest, opts ...grpc.CallOption) (*PullResponse, error) + // Establishes a stream with the server, which sends messages down to the + // client. The client streams acknowledgements and ack deadline modifications + // back to the server. The server will close the stream and return the status + // on any error. The server may close the stream with status `UNAVAILABLE` to + // reassign server-side resources, in which case, the client should + // re-establish the stream. Flow control can be achieved by configuring the + // underlying RPC channel. + StreamingPull(ctx context.Context, opts ...grpc.CallOption) (Subscriber_StreamingPullClient, error) + // Modifies the `PushConfig` for a specified subscription. + // + // This may be used to change a push subscription to a pull one (signified by + // an empty `PushConfig`) or vice versa, or change the endpoint URL and other + // attributes of a push subscription. Messages will accumulate for delivery + // continuously through the call regardless of changes to the `PushConfig`. + ModifyPushConfig(ctx context.Context, in *ModifyPushConfigRequest, opts ...grpc.CallOption) (*empty.Empty, error) + // Gets the configuration details of a snapshot. Snapshots are used in + // Seek + // operations, which allow you to manage message acknowledgments in bulk. That + // is, you can set the acknowledgment state of messages in an existing + // subscription to the state captured by a snapshot. + GetSnapshot(ctx context.Context, in *GetSnapshotRequest, opts ...grpc.CallOption) (*Snapshot, error) + // Lists the existing snapshots. Snapshots are used in + // Seek + // operations, which allow + // you to manage message acknowledgments in bulk. That is, you can set the + // acknowledgment state of messages in an existing subscription to the state + // captured by a snapshot. + ListSnapshots(ctx context.Context, in *ListSnapshotsRequest, opts ...grpc.CallOption) (*ListSnapshotsResponse, error) + // Creates a snapshot from the requested subscription. Snapshots are used in + // Seek + // operations, which allow + // you to manage message acknowledgments in bulk. That is, you can set the + // acknowledgment state of messages in an existing subscription to the state + // captured by a snapshot. + //

If the snapshot already exists, returns `ALREADY_EXISTS`. + // If the requested subscription doesn't exist, returns `NOT_FOUND`. + // If the backlog in the subscription is too old -- and the resulting snapshot + // would expire in less than 1 hour -- then `FAILED_PRECONDITION` is returned. + // See also the `Snapshot.expire_time` field. If the name is not provided in + // the request, the server will assign a random + // name for this snapshot on the same project as the subscription, conforming + // to the + // [resource name + // format](https://cloud.google.com/pubsub/docs/admin#resource_names). The + // generated name is populated in the returned Snapshot object. Note that for + // REST API requests, you must specify a name in the request. + CreateSnapshot(ctx context.Context, in *CreateSnapshotRequest, opts ...grpc.CallOption) (*Snapshot, error) + // Updates an existing snapshot. Snapshots are used in + // Seek + // operations, which allow + // you to manage message acknowledgments in bulk. That is, you can set the + // acknowledgment state of messages in an existing subscription to the state + // captured by a snapshot. + UpdateSnapshot(ctx context.Context, in *UpdateSnapshotRequest, opts ...grpc.CallOption) (*Snapshot, error) + // Removes an existing snapshot. Snapshots are used in + // Seek + // operations, which allow + // you to manage message acknowledgments in bulk. That is, you can set the + // acknowledgment state of messages in an existing subscription to the state + // captured by a snapshot.

+ // When the snapshot is deleted, all messages retained in the snapshot + // are immediately dropped. After a snapshot is deleted, a new one may be + // created with the same name, but the new one has no association with the old + // snapshot or its subscription, unless the same subscription is specified. + DeleteSnapshot(ctx context.Context, in *DeleteSnapshotRequest, opts ...grpc.CallOption) (*empty.Empty, error) + // Seeks an existing subscription to a point in time or to a given snapshot, + // whichever is provided in the request. Snapshots are used in + // Seek + // operations, which allow + // you to manage message acknowledgments in bulk. That is, you can set the + // acknowledgment state of messages in an existing subscription to the state + // captured by a snapshot. Note that both the subscription and the snapshot + // must be on the same topic. + Seek(ctx context.Context, in *SeekRequest, opts ...grpc.CallOption) (*SeekResponse, error) +} + +type subscriberClient struct { + cc *grpc.ClientConn +} + +func NewSubscriberClient(cc *grpc.ClientConn) SubscriberClient { + return &subscriberClient{cc} +} + +func (c *subscriberClient) CreateSubscription(ctx context.Context, in *Subscription, opts ...grpc.CallOption) (*Subscription, error) { + out := new(Subscription) + err := c.cc.Invoke(ctx, "/google.pubsub.v1.Subscriber/CreateSubscription", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *subscriberClient) GetSubscription(ctx context.Context, in *GetSubscriptionRequest, opts ...grpc.CallOption) (*Subscription, error) { + out := new(Subscription) + err := c.cc.Invoke(ctx, "/google.pubsub.v1.Subscriber/GetSubscription", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *subscriberClient) UpdateSubscription(ctx context.Context, in *UpdateSubscriptionRequest, opts ...grpc.CallOption) (*Subscription, error) { + out := new(Subscription) + err := c.cc.Invoke(ctx, "/google.pubsub.v1.Subscriber/UpdateSubscription", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *subscriberClient) ListSubscriptions(ctx context.Context, in *ListSubscriptionsRequest, opts ...grpc.CallOption) (*ListSubscriptionsResponse, error) { + out := new(ListSubscriptionsResponse) + err := c.cc.Invoke(ctx, "/google.pubsub.v1.Subscriber/ListSubscriptions", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *subscriberClient) DeleteSubscription(ctx context.Context, in *DeleteSubscriptionRequest, opts ...grpc.CallOption) (*empty.Empty, error) { + out := new(empty.Empty) + err := c.cc.Invoke(ctx, "/google.pubsub.v1.Subscriber/DeleteSubscription", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *subscriberClient) ModifyAckDeadline(ctx context.Context, in *ModifyAckDeadlineRequest, opts ...grpc.CallOption) (*empty.Empty, error) { + out := new(empty.Empty) + err := c.cc.Invoke(ctx, "/google.pubsub.v1.Subscriber/ModifyAckDeadline", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *subscriberClient) Acknowledge(ctx context.Context, in *AcknowledgeRequest, opts ...grpc.CallOption) (*empty.Empty, error) { + out := new(empty.Empty) + err := c.cc.Invoke(ctx, "/google.pubsub.v1.Subscriber/Acknowledge", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *subscriberClient) Pull(ctx context.Context, in *PullRequest, opts ...grpc.CallOption) (*PullResponse, error) { + out := new(PullResponse) + err := c.cc.Invoke(ctx, "/google.pubsub.v1.Subscriber/Pull", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *subscriberClient) StreamingPull(ctx context.Context, opts ...grpc.CallOption) (Subscriber_StreamingPullClient, error) { + stream, err := c.cc.NewStream(ctx, &_Subscriber_serviceDesc.Streams[0], "/google.pubsub.v1.Subscriber/StreamingPull", opts...) + if err != nil { + return nil, err + } + x := &subscriberStreamingPullClient{stream} + return x, nil +} + +type Subscriber_StreamingPullClient interface { + Send(*StreamingPullRequest) error + Recv() (*StreamingPullResponse, error) + grpc.ClientStream +} + +type subscriberStreamingPullClient struct { + grpc.ClientStream +} + +func (x *subscriberStreamingPullClient) Send(m *StreamingPullRequest) error { + return x.ClientStream.SendMsg(m) +} + +func (x *subscriberStreamingPullClient) Recv() (*StreamingPullResponse, error) { + m := new(StreamingPullResponse) + if err := x.ClientStream.RecvMsg(m); err != nil { + return nil, err + } + return m, nil +} + +func (c *subscriberClient) ModifyPushConfig(ctx context.Context, in *ModifyPushConfigRequest, opts ...grpc.CallOption) (*empty.Empty, error) { + out := new(empty.Empty) + err := c.cc.Invoke(ctx, "/google.pubsub.v1.Subscriber/ModifyPushConfig", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *subscriberClient) GetSnapshot(ctx context.Context, in *GetSnapshotRequest, opts ...grpc.CallOption) (*Snapshot, error) { + out := new(Snapshot) + err := c.cc.Invoke(ctx, "/google.pubsub.v1.Subscriber/GetSnapshot", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *subscriberClient) ListSnapshots(ctx context.Context, in *ListSnapshotsRequest, opts ...grpc.CallOption) (*ListSnapshotsResponse, error) { + out := new(ListSnapshotsResponse) + err := c.cc.Invoke(ctx, "/google.pubsub.v1.Subscriber/ListSnapshots", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *subscriberClient) CreateSnapshot(ctx context.Context, in *CreateSnapshotRequest, opts ...grpc.CallOption) (*Snapshot, error) { + out := new(Snapshot) + err := c.cc.Invoke(ctx, "/google.pubsub.v1.Subscriber/CreateSnapshot", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *subscriberClient) UpdateSnapshot(ctx context.Context, in *UpdateSnapshotRequest, opts ...grpc.CallOption) (*Snapshot, error) { + out := new(Snapshot) + err := c.cc.Invoke(ctx, "/google.pubsub.v1.Subscriber/UpdateSnapshot", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *subscriberClient) DeleteSnapshot(ctx context.Context, in *DeleteSnapshotRequest, opts ...grpc.CallOption) (*empty.Empty, error) { + out := new(empty.Empty) + err := c.cc.Invoke(ctx, "/google.pubsub.v1.Subscriber/DeleteSnapshot", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *subscriberClient) Seek(ctx context.Context, in *SeekRequest, opts ...grpc.CallOption) (*SeekResponse, error) { + out := new(SeekResponse) + err := c.cc.Invoke(ctx, "/google.pubsub.v1.Subscriber/Seek", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +// SubscriberServer is the server API for Subscriber service. +type SubscriberServer interface { + // Creates a subscription to a given topic. See the + // + // resource name rules. + // If the subscription already exists, returns `ALREADY_EXISTS`. + // If the corresponding topic doesn't exist, returns `NOT_FOUND`. + // + // If the name is not provided in the request, the server will assign a random + // name for this subscription on the same project as the topic, conforming + // to the + // [resource name + // format](https://cloud.google.com/pubsub/docs/admin#resource_names). The + // generated name is populated in the returned Subscription object. Note that + // for REST API requests, you must specify a name in the request. + CreateSubscription(context.Context, *Subscription) (*Subscription, error) + // Gets the configuration details of a subscription. + GetSubscription(context.Context, *GetSubscriptionRequest) (*Subscription, error) + // Updates an existing subscription. Note that certain properties of a + // subscription, such as its topic, are not modifiable. + UpdateSubscription(context.Context, *UpdateSubscriptionRequest) (*Subscription, error) + // Lists matching subscriptions. + ListSubscriptions(context.Context, *ListSubscriptionsRequest) (*ListSubscriptionsResponse, error) + // Deletes an existing subscription. All messages retained in the subscription + // are immediately dropped. Calls to `Pull` after deletion will return + // `NOT_FOUND`. After a subscription is deleted, a new one may be created with + // the same name, but the new one has no association with the old + // subscription or its topic unless the same topic is specified. + DeleteSubscription(context.Context, *DeleteSubscriptionRequest) (*empty.Empty, error) + // Modifies the ack deadline for a specific message. This method is useful + // to indicate that more time is needed to process a message by the + // subscriber, or to make the message available for redelivery if the + // processing was interrupted. Note that this does not modify the + // subscription-level `ackDeadlineSeconds` used for subsequent messages. + ModifyAckDeadline(context.Context, *ModifyAckDeadlineRequest) (*empty.Empty, error) + // Acknowledges the messages associated with the `ack_ids` in the + // `AcknowledgeRequest`. The Pub/Sub system can remove the relevant messages + // from the subscription. + // + // Acknowledging a message whose ack deadline has expired may succeed, + // but such a message may be redelivered later. Acknowledging a message more + // than once will not result in an error. + Acknowledge(context.Context, *AcknowledgeRequest) (*empty.Empty, error) + // Pulls messages from the server. The server may return `UNAVAILABLE` if + // there are too many concurrent pull requests pending for the given + // subscription. + Pull(context.Context, *PullRequest) (*PullResponse, error) + // Establishes a stream with the server, which sends messages down to the + // client. The client streams acknowledgements and ack deadline modifications + // back to the server. The server will close the stream and return the status + // on any error. The server may close the stream with status `UNAVAILABLE` to + // reassign server-side resources, in which case, the client should + // re-establish the stream. Flow control can be achieved by configuring the + // underlying RPC channel. + StreamingPull(Subscriber_StreamingPullServer) error + // Modifies the `PushConfig` for a specified subscription. + // + // This may be used to change a push subscription to a pull one (signified by + // an empty `PushConfig`) or vice versa, or change the endpoint URL and other + // attributes of a push subscription. Messages will accumulate for delivery + // continuously through the call regardless of changes to the `PushConfig`. + ModifyPushConfig(context.Context, *ModifyPushConfigRequest) (*empty.Empty, error) + // Gets the configuration details of a snapshot. Snapshots are used in + // Seek + // operations, which allow you to manage message acknowledgments in bulk. That + // is, you can set the acknowledgment state of messages in an existing + // subscription to the state captured by a snapshot. + GetSnapshot(context.Context, *GetSnapshotRequest) (*Snapshot, error) + // Lists the existing snapshots. Snapshots are used in + // Seek + // operations, which allow + // you to manage message acknowledgments in bulk. That is, you can set the + // acknowledgment state of messages in an existing subscription to the state + // captured by a snapshot. + ListSnapshots(context.Context, *ListSnapshotsRequest) (*ListSnapshotsResponse, error) + // Creates a snapshot from the requested subscription. Snapshots are used in + // Seek + // operations, which allow + // you to manage message acknowledgments in bulk. That is, you can set the + // acknowledgment state of messages in an existing subscription to the state + // captured by a snapshot. + //

If the snapshot already exists, returns `ALREADY_EXISTS`. + // If the requested subscription doesn't exist, returns `NOT_FOUND`. + // If the backlog in the subscription is too old -- and the resulting snapshot + // would expire in less than 1 hour -- then `FAILED_PRECONDITION` is returned. + // See also the `Snapshot.expire_time` field. If the name is not provided in + // the request, the server will assign a random + // name for this snapshot on the same project as the subscription, conforming + // to the + // [resource name + // format](https://cloud.google.com/pubsub/docs/admin#resource_names). The + // generated name is populated in the returned Snapshot object. Note that for + // REST API requests, you must specify a name in the request. + CreateSnapshot(context.Context, *CreateSnapshotRequest) (*Snapshot, error) + // Updates an existing snapshot. Snapshots are used in + // Seek + // operations, which allow + // you to manage message acknowledgments in bulk. That is, you can set the + // acknowledgment state of messages in an existing subscription to the state + // captured by a snapshot. + UpdateSnapshot(context.Context, *UpdateSnapshotRequest) (*Snapshot, error) + // Removes an existing snapshot. Snapshots are used in + // Seek + // operations, which allow + // you to manage message acknowledgments in bulk. That is, you can set the + // acknowledgment state of messages in an existing subscription to the state + // captured by a snapshot.

+ // When the snapshot is deleted, all messages retained in the snapshot + // are immediately dropped. After a snapshot is deleted, a new one may be + // created with the same name, but the new one has no association with the old + // snapshot or its subscription, unless the same subscription is specified. + DeleteSnapshot(context.Context, *DeleteSnapshotRequest) (*empty.Empty, error) + // Seeks an existing subscription to a point in time or to a given snapshot, + // whichever is provided in the request. Snapshots are used in + // Seek + // operations, which allow + // you to manage message acknowledgments in bulk. That is, you can set the + // acknowledgment state of messages in an existing subscription to the state + // captured by a snapshot. Note that both the subscription and the snapshot + // must be on the same topic. + Seek(context.Context, *SeekRequest) (*SeekResponse, error) +} + +func RegisterSubscriberServer(s *grpc.Server, srv SubscriberServer) { + s.RegisterService(&_Subscriber_serviceDesc, srv) +} + +func _Subscriber_CreateSubscription_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(Subscription) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(SubscriberServer).CreateSubscription(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.pubsub.v1.Subscriber/CreateSubscription", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(SubscriberServer).CreateSubscription(ctx, req.(*Subscription)) + } + return interceptor(ctx, in, info, handler) +} + +func _Subscriber_GetSubscription_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(GetSubscriptionRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(SubscriberServer).GetSubscription(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.pubsub.v1.Subscriber/GetSubscription", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(SubscriberServer).GetSubscription(ctx, req.(*GetSubscriptionRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _Subscriber_UpdateSubscription_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(UpdateSubscriptionRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(SubscriberServer).UpdateSubscription(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.pubsub.v1.Subscriber/UpdateSubscription", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(SubscriberServer).UpdateSubscription(ctx, req.(*UpdateSubscriptionRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _Subscriber_ListSubscriptions_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(ListSubscriptionsRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(SubscriberServer).ListSubscriptions(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.pubsub.v1.Subscriber/ListSubscriptions", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(SubscriberServer).ListSubscriptions(ctx, req.(*ListSubscriptionsRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _Subscriber_DeleteSubscription_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(DeleteSubscriptionRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(SubscriberServer).DeleteSubscription(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.pubsub.v1.Subscriber/DeleteSubscription", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(SubscriberServer).DeleteSubscription(ctx, req.(*DeleteSubscriptionRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _Subscriber_ModifyAckDeadline_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(ModifyAckDeadlineRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(SubscriberServer).ModifyAckDeadline(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.pubsub.v1.Subscriber/ModifyAckDeadline", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(SubscriberServer).ModifyAckDeadline(ctx, req.(*ModifyAckDeadlineRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _Subscriber_Acknowledge_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(AcknowledgeRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(SubscriberServer).Acknowledge(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.pubsub.v1.Subscriber/Acknowledge", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(SubscriberServer).Acknowledge(ctx, req.(*AcknowledgeRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _Subscriber_Pull_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(PullRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(SubscriberServer).Pull(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.pubsub.v1.Subscriber/Pull", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(SubscriberServer).Pull(ctx, req.(*PullRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _Subscriber_StreamingPull_Handler(srv interface{}, stream grpc.ServerStream) error { + return srv.(SubscriberServer).StreamingPull(&subscriberStreamingPullServer{stream}) +} + +type Subscriber_StreamingPullServer interface { + Send(*StreamingPullResponse) error + Recv() (*StreamingPullRequest, error) + grpc.ServerStream +} + +type subscriberStreamingPullServer struct { + grpc.ServerStream +} + +func (x *subscriberStreamingPullServer) Send(m *StreamingPullResponse) error { + return x.ServerStream.SendMsg(m) +} + +func (x *subscriberStreamingPullServer) Recv() (*StreamingPullRequest, error) { + m := new(StreamingPullRequest) + if err := x.ServerStream.RecvMsg(m); err != nil { + return nil, err + } + return m, nil +} + +func _Subscriber_ModifyPushConfig_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(ModifyPushConfigRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(SubscriberServer).ModifyPushConfig(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.pubsub.v1.Subscriber/ModifyPushConfig", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(SubscriberServer).ModifyPushConfig(ctx, req.(*ModifyPushConfigRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _Subscriber_GetSnapshot_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(GetSnapshotRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(SubscriberServer).GetSnapshot(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.pubsub.v1.Subscriber/GetSnapshot", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(SubscriberServer).GetSnapshot(ctx, req.(*GetSnapshotRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _Subscriber_ListSnapshots_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(ListSnapshotsRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(SubscriberServer).ListSnapshots(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.pubsub.v1.Subscriber/ListSnapshots", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(SubscriberServer).ListSnapshots(ctx, req.(*ListSnapshotsRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _Subscriber_CreateSnapshot_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(CreateSnapshotRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(SubscriberServer).CreateSnapshot(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.pubsub.v1.Subscriber/CreateSnapshot", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(SubscriberServer).CreateSnapshot(ctx, req.(*CreateSnapshotRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _Subscriber_UpdateSnapshot_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(UpdateSnapshotRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(SubscriberServer).UpdateSnapshot(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.pubsub.v1.Subscriber/UpdateSnapshot", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(SubscriberServer).UpdateSnapshot(ctx, req.(*UpdateSnapshotRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _Subscriber_DeleteSnapshot_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(DeleteSnapshotRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(SubscriberServer).DeleteSnapshot(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.pubsub.v1.Subscriber/DeleteSnapshot", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(SubscriberServer).DeleteSnapshot(ctx, req.(*DeleteSnapshotRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _Subscriber_Seek_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(SeekRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(SubscriberServer).Seek(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.pubsub.v1.Subscriber/Seek", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(SubscriberServer).Seek(ctx, req.(*SeekRequest)) + } + return interceptor(ctx, in, info, handler) +} + +var _Subscriber_serviceDesc = grpc.ServiceDesc{ + ServiceName: "google.pubsub.v1.Subscriber", + HandlerType: (*SubscriberServer)(nil), + Methods: []grpc.MethodDesc{ + { + MethodName: "CreateSubscription", + Handler: _Subscriber_CreateSubscription_Handler, + }, + { + MethodName: "GetSubscription", + Handler: _Subscriber_GetSubscription_Handler, + }, + { + MethodName: "UpdateSubscription", + Handler: _Subscriber_UpdateSubscription_Handler, + }, + { + MethodName: "ListSubscriptions", + Handler: _Subscriber_ListSubscriptions_Handler, + }, + { + MethodName: "DeleteSubscription", + Handler: _Subscriber_DeleteSubscription_Handler, + }, + { + MethodName: "ModifyAckDeadline", + Handler: _Subscriber_ModifyAckDeadline_Handler, + }, + { + MethodName: "Acknowledge", + Handler: _Subscriber_Acknowledge_Handler, + }, + { + MethodName: "Pull", + Handler: _Subscriber_Pull_Handler, + }, + { + MethodName: "ModifyPushConfig", + Handler: _Subscriber_ModifyPushConfig_Handler, + }, + { + MethodName: "GetSnapshot", + Handler: _Subscriber_GetSnapshot_Handler, + }, + { + MethodName: "ListSnapshots", + Handler: _Subscriber_ListSnapshots_Handler, + }, + { + MethodName: "CreateSnapshot", + Handler: _Subscriber_CreateSnapshot_Handler, + }, + { + MethodName: "UpdateSnapshot", + Handler: _Subscriber_UpdateSnapshot_Handler, + }, + { + MethodName: "DeleteSnapshot", + Handler: _Subscriber_DeleteSnapshot_Handler, + }, + { + MethodName: "Seek", + Handler: _Subscriber_Seek_Handler, + }, + }, + Streams: []grpc.StreamDesc{ + { + StreamName: "StreamingPull", + Handler: _Subscriber_StreamingPull_Handler, + ServerStreams: true, + ClientStreams: true, + }, + }, + Metadata: "google/pubsub/v1/pubsub.proto", +} + +func init() { + proto.RegisterFile("google/pubsub/v1/pubsub.proto", fileDescriptor_pubsub_790424aa8109440f) +} + +var fileDescriptor_pubsub_790424aa8109440f = []byte{ + // 2428 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xb4, 0x5a, 0x4b, 0x73, 0x1b, 0x59, + 0x15, 0x4e, 0xcb, 0x8f, 0x48, 0x47, 0x76, 0x62, 0xdf, 0xb1, 0x13, 0xb9, 0xf3, 0x72, 0x7a, 0x42, + 0xec, 0x28, 0x89, 0x64, 0x2b, 0x35, 0x61, 0xe2, 0xe0, 0xa4, 0xec, 0x38, 0x24, 0x21, 0xc9, 0xc4, + 0xb4, 0x43, 0xa8, 0x82, 0x14, 0xaa, 0x56, 0xf7, 0xb5, 0xdc, 0xa3, 0x7e, 0x4d, 0x77, 0xcb, 0x13, + 0x0d, 0x84, 0x0a, 0x33, 0x14, 0x55, 0x14, 0x59, 0x00, 0xb3, 0x9d, 0x05, 0x05, 0x3b, 0x16, 0x2c, + 0x28, 0xd6, 0xb0, 0x67, 0xcb, 0x82, 0x3f, 0xc0, 0x8a, 0xe2, 0x07, 0xc0, 0x82, 0x2a, 0xea, 0x3e, + 0xba, 0xd5, 0xdd, 0xba, 0x92, 0x2c, 0x9b, 0xec, 0xac, 0x7b, 0xce, 0xed, 0xf3, 0x9d, 0xf7, 0x3d, + 0xa7, 0x0c, 0xe7, 0x9a, 0xae, 0xdb, 0xb4, 0x70, 0xd5, 0x6b, 0x37, 0x82, 0x76, 0xa3, 0xba, 0xbf, + 0xca, 0xff, 0xaa, 0x78, 0xbe, 0x1b, 0xba, 0x68, 0x86, 0x91, 0x2b, 0xfc, 0x70, 0x7f, 0x55, 0x3e, + 0xcb, 0x2f, 0x68, 0x9e, 0x59, 0xd5, 0x1c, 0xc7, 0x0d, 0xb5, 0xd0, 0x74, 0x9d, 0x80, 0xf1, 0xcb, + 0x0b, 0x09, 0xaa, 0x8f, 0x03, 0xb7, 0xed, 0xeb, 0x98, 0x93, 0xce, 0x47, 0x92, 0xc8, 0xaf, 0x46, + 0x7b, 0xb7, 0x6a, 0xb4, 0x7d, 0x7a, 0x97, 0xd3, 0xcf, 0x64, 0xe9, 0xd8, 0xf6, 0xc2, 0x0e, 0x27, + 0x2e, 0x66, 0x89, 0xbb, 0x26, 0xb6, 0x8c, 0xba, 0xad, 0x05, 0x2d, 0xce, 0x71, 0x21, 0xcb, 0x11, + 0x9a, 0x36, 0x0e, 0x42, 0xcd, 0xf6, 0x18, 0x83, 0xf2, 0x02, 0xe6, 0x9e, 0xe2, 0x20, 0xd0, 0x9a, + 0x78, 0x27, 0x74, 0x7d, 0xad, 0x89, 0xb7, 0x5d, 0xcb, 0xd4, 0x3b, 0xe8, 0x0e, 0x9c, 0xd1, 0x2c, + 0xcb, 0xfd, 0x14, 0x1b, 0x75, 0x0f, 0xfb, 0x81, 0x19, 0x84, 0xd8, 0xd1, 0x71, 0xdd, 0xc7, 0x4d, + 0xa2, 0x57, 0x49, 0x5a, 0x1c, 0x5b, 0x2e, 0xa8, 0x0b, 0x9c, 0x65, 0xbb, 0xcb, 0xa1, 0x32, 0x06, + 0xe5, 0xcb, 0x1c, 0x4c, 0x3c, 0x77, 0x3d, 0x53, 0x47, 0x08, 0xc6, 0x1d, 0xcd, 0xc6, 0x25, 0x69, + 0x51, 0x5a, 0x2e, 0xa8, 0xf4, 0x6f, 0x74, 0x1b, 0x26, 0x2d, 0xad, 0x81, 0xad, 0xa0, 0x94, 0x5b, + 0x1c, 0x5b, 0x2e, 0xd6, 0xde, 0xaf, 0x64, 0x2d, 0x5a, 0xa1, 0x97, 0x2b, 0x4f, 0x28, 0xd7, 0x7d, + 0x27, 0xf4, 0x3b, 0x2a, 0xbf, 0x82, 0x5e, 0xc2, 0x29, 0x9b, 0x41, 0xae, 0x07, 0x0c, 0x73, 0xdd, + 0xa3, 0xa0, 0x4b, 0x63, 0x8b, 0xd2, 0x72, 0xb1, 0x76, 0xb9, 0xf7, 0x63, 0x22, 0x15, 0xd5, 0x39, + 0x5b, 0xa4, 0xf8, 0x22, 0x4c, 0xb5, 0xec, 0xa0, 0xde, 0xc2, 0x9d, 0x3a, 0x85, 0x3d, 0x41, 0x61, + 0x43, 0xcb, 0x0e, 0x1e, 0xe3, 0xce, 0x47, 0x9a, 0x8d, 0xe5, 0x5b, 0x50, 0x4c, 0xc0, 0x42, 0x33, + 0x30, 0xd6, 0xc2, 0x1d, 0xae, 0x1e, 0xf9, 0x13, 0xcd, 0xc1, 0xc4, 0xbe, 0x66, 0xb5, 0x71, 0x29, + 0x47, 0xcf, 0xd8, 0x8f, 0xb5, 0xdc, 0x87, 0x92, 0xf2, 0xa7, 0x1c, 0x4c, 0x6f, 0x53, 0x54, 0x1c, + 0x11, 0xb1, 0x8e, 0xa1, 0x85, 0x1a, 0xbd, 0x3e, 0xa5, 0xd2, 0xbf, 0xd1, 0x33, 0x00, 0x2d, 0x0c, + 0x7d, 0xb3, 0xd1, 0x0e, 0x71, 0x64, 0xa1, 0x6a, 0xaf, 0x52, 0xa9, 0x0f, 0x55, 0x36, 0xe2, 0x1b, + 0xcc, 0x5a, 0x89, 0x4f, 0xa0, 0x73, 0x00, 0x91, 0xc5, 0x4c, 0x83, 0x5a, 0xa9, 0xa0, 0x16, 0xf8, + 0xc9, 0x23, 0x03, 0xad, 0xc3, 0x94, 0xd7, 0x6e, 0x58, 0x66, 0xb0, 0x57, 0x27, 0xe1, 0x51, 0x1a, + 0xa7, 0x66, 0x94, 0x63, 0x89, 0x3c, 0x76, 0x2a, 0xcf, 0xa3, 0xd8, 0x51, 0x8b, 0x9c, 0x9f, 0x9c, + 0xa0, 0x8b, 0x30, 0xe5, 0xfa, 0x06, 0xf6, 0x4d, 0xa7, 0x49, 0xcc, 0xc6, 0x2d, 0x56, 0x8c, 0xce, + 0x1e, 0xe3, 0x8e, 0xbc, 0x0e, 0x27, 0x33, 0xf8, 0x46, 0x32, 0xdb, 0x12, 0x9c, 0x7c, 0x80, 0x43, + 0x1a, 0x11, 0x2a, 0xfe, 0xa4, 0x8d, 0x83, 0x90, 0x30, 0x87, 0xe4, 0x37, 0xff, 0x00, 0xfb, 0xa1, + 0xbc, 0x91, 0x00, 0x7d, 0xc7, 0x33, 0xb4, 0x10, 0xa7, 0x98, 0xaf, 0x27, 0x99, 0x8b, 0xb5, 0xd3, + 0x7d, 0xa2, 0x8d, 0x7f, 0x05, 0xdd, 0x86, 0x62, 0x9b, 0x7e, 0x84, 0x66, 0x12, 0x85, 0x23, 0x32, + 0xc7, 0x37, 0x49, 0xb2, 0x3d, 0xd5, 0x82, 0x96, 0x0a, 0x8c, 0x9d, 0xfc, 0xad, 0xe8, 0x70, 0x62, + 0x9b, 0x19, 0x67, 0x20, 0x54, 0x74, 0x1b, 0xf2, 0xdc, 0x03, 0x91, 0x8b, 0x2f, 0x0c, 0x71, 0xb1, + 0x1a, 0x5f, 0x50, 0x6a, 0x70, 0x32, 0x16, 0x12, 0x78, 0xae, 0x13, 0x60, 0x74, 0x01, 0x8a, 0x5d, + 0x1f, 0x47, 0x09, 0x0a, 0xb1, 0x93, 0x03, 0xc5, 0x84, 0xd9, 0x27, 0x66, 0xc0, 0xac, 0x18, 0x44, + 0xd8, 0x4a, 0x70, 0xdc, 0xf3, 0xdd, 0x8f, 0xb1, 0x1e, 0x72, 0x74, 0xd1, 0x4f, 0x74, 0x06, 0x0a, + 0x1e, 0x4d, 0x31, 0xf3, 0x33, 0xe6, 0x91, 0x09, 0x35, 0x4f, 0x0e, 0x76, 0xcc, 0xcf, 0x30, 0x09, + 0x28, 0x4a, 0x0c, 0xdd, 0x16, 0x76, 0xa2, 0x80, 0x22, 0x27, 0xcf, 0xc9, 0x81, 0x62, 0x03, 0x4a, + 0x8a, 0xe2, 0x08, 0xab, 0x30, 0x49, 0x55, 0x67, 0xe0, 0x06, 0xb8, 0x81, 0xb3, 0xa1, 0xcb, 0x70, + 0xd2, 0xc1, 0xaf, 0xc2, 0x7a, 0x42, 0x14, 0x0b, 0x8d, 0x69, 0x72, 0xbc, 0x1d, 0x8b, 0xfb, 0x04, + 0xce, 0xc5, 0xe2, 0x76, 0xda, 0x8d, 0x40, 0xf7, 0x4d, 0x8f, 0x96, 0xdf, 0xc1, 0x1e, 0x38, 0x8a, + 0x86, 0x0e, 0x9c, 0xef, 0x27, 0x92, 0x6b, 0x7b, 0x09, 0xa6, 0x83, 0x24, 0x81, 0x7b, 0x24, 0x7d, + 0x78, 0x60, 0x15, 0x6d, 0x58, 0xe8, 0xca, 0x73, 0x34, 0x2f, 0xd8, 0x73, 0xc3, 0x77, 0xa8, 0x5e, + 0x03, 0x64, 0x91, 0x38, 0xae, 0xda, 0x59, 0x28, 0x04, 0xd1, 0x21, 0x57, 0xab, 0x7b, 0x70, 0x60, + 0x95, 0xca, 0x80, 0xb6, 0xb0, 0x85, 0x33, 0xa9, 0x2a, 0xce, 0xeb, 0x3f, 0x8f, 0xc3, 0x54, 0xd2, + 0xcc, 0xc2, 0xa6, 0x12, 0x5f, 0xcd, 0x25, 0xcd, 0xb0, 0x0e, 0x45, 0xaf, 0x1d, 0xec, 0xd5, 0x75, + 0xd7, 0xd9, 0x35, 0x9b, 0xbc, 0xb6, 0x9d, 0x15, 0xa5, 0x5a, 0xb0, 0x77, 0x8f, 0xf2, 0xa8, 0xe0, + 0xc5, 0x7f, 0xa3, 0x15, 0x98, 0xd3, 0xf4, 0x56, 0xdd, 0xc0, 0x9a, 0x61, 0x99, 0x0e, 0xae, 0x07, + 0x58, 0x77, 0x1d, 0x23, 0xa0, 0x45, 0x6e, 0x42, 0x45, 0x9a, 0xde, 0xda, 0xe2, 0xa4, 0x1d, 0x46, + 0x41, 0x35, 0x98, 0xf7, 0x71, 0xa8, 0x99, 0x4e, 0x5d, 0xd3, 0x5b, 0xd8, 0xa8, 0xc7, 0x59, 0x7e, + 0x7c, 0x51, 0x5a, 0xce, 0xab, 0xef, 0x31, 0xe2, 0x06, 0xa1, 0xf1, 0xc4, 0x0e, 0xd0, 0x77, 0x41, + 0x8e, 0x92, 0xd7, 0xc7, 0x21, 0x76, 0x88, 0x8e, 0xf5, 0xe8, 0x25, 0x50, 0xca, 0x53, 0xcc, 0x0b, + 0x3d, 0x05, 0x68, 0x8b, 0x33, 0xa8, 0x25, 0x7e, 0x59, 0x8d, 0xee, 0x46, 0x14, 0xb4, 0x19, 0x37, + 0xda, 0x02, 0xcd, 0xb9, 0x72, 0xaf, 0xe2, 0x49, 0xbb, 0x0a, 0xfb, 0xed, 0x4d, 0x38, 0x8d, 0x1d, + 0xad, 0x61, 0xe1, 0x48, 0x95, 0x7a, 0x54, 0xda, 0x4b, 0x40, 0x55, 0x9a, 0x67, 0x64, 0xae, 0xcd, + 0x33, 0x4e, 0x44, 0xcf, 0x60, 0x16, 0xbf, 0xf2, 0x4c, 0x86, 0x24, 0x6a, 0xd1, 0x45, 0xaa, 0x8b, + 0xd2, 0x0b, 0xe3, 0x7e, 0xcc, 0xca, 0xdb, 0xf3, 0x0c, 0xce, 0x9c, 0x1c, 0xa5, 0xf1, 0xde, 0x85, + 0x99, 0xac, 0x00, 0x74, 0x15, 0xc6, 0xc2, 0xd0, 0xe2, 0x3d, 0x61, 0x80, 0x75, 0x09, 0x97, 0xf2, + 0xdf, 0x1c, 0x40, 0x37, 0x44, 0xd0, 0xfb, 0x30, 0x4d, 0xa3, 0x0a, 0x3b, 0x86, 0xe7, 0x9a, 0x4e, + 0x54, 0x3d, 0xa7, 0xc8, 0xe1, 0x7d, 0x7e, 0x86, 0x9e, 0x08, 0xfa, 0xf8, 0xb5, 0x41, 0x91, 0x37, + 0xb0, 0x89, 0x3f, 0x00, 0x70, 0x4d, 0x43, 0x4f, 0xa4, 0xac, 0xf0, 0xa9, 0x93, 0xf8, 0xda, 0x33, + 0xd3, 0xd0, 0x69, 0xae, 0x3d, 0x3c, 0xa6, 0x16, 0xdc, 0xe8, 0x87, 0xfc, 0x7d, 0x28, 0xc4, 0x14, + 0x12, 0xad, 0x01, 0xf6, 0xf7, 0x4d, 0x1d, 0xd7, 0x35, 0x5d, 0x77, 0xdb, 0x4e, 0x58, 0xc7, 0xb6, + 0x66, 0x5a, 0x5c, 0xa1, 0xf7, 0x38, 0x71, 0x83, 0xd1, 0xee, 0x13, 0x12, 0x92, 0x21, 0xaf, 0xb5, + 0x0d, 0x93, 0x3c, 0xf7, 0xb8, 0xa5, 0xe3, 0xdf, 0x47, 0xec, 0xf4, 0x9b, 0xa7, 0x61, 0x5e, 0x6b, + 0x87, 0x7b, 0x24, 0x8a, 0x75, 0x16, 0x37, 0x36, 0x0e, 0xf7, 0x5c, 0x43, 0xd1, 0xe1, 0xa4, 0x8a, + 0x75, 0x6c, 0xee, 0xc7, 0x59, 0x83, 0xe6, 0x61, 0x92, 0xa4, 0xa6, 0x69, 0x44, 0xb5, 0x42, 0xd3, + 0x5b, 0x8f, 0x0c, 0x74, 0x0b, 0x8e, 0xf3, 0x38, 0xe5, 0x9d, 0x7b, 0x68, 0x5f, 0x8d, 0xf8, 0x95, + 0x6f, 0xc0, 0xa9, 0x07, 0x38, 0x4c, 0x26, 0x44, 0x54, 0x96, 0x14, 0x98, 0x4a, 0x16, 0xee, 0xc8, + 0xdd, 0xc9, 0x33, 0xe5, 0x2b, 0x09, 0x16, 0xd8, 0xe3, 0x43, 0xf4, 0x85, 0x4d, 0xc1, 0x17, 0x8a, + 0xb5, 0xf3, 0x83, 0xf3, 0x31, 0x2d, 0xe1, 0x68, 0x0f, 0x13, 0x0f, 0x4a, 0xa4, 0xa6, 0x0b, 0x1b, + 0xe4, 0xbb, 0x79, 0x06, 0xfc, 0x5c, 0x62, 0x5d, 0x4b, 0xdc, 0x20, 0xb7, 0x44, 0x0d, 0x72, 0xb8, + 0x45, 0x0e, 0xd9, 0x40, 0xef, 0xc2, 0x02, 0xeb, 0x36, 0x87, 0xf5, 0xee, 0x8f, 0xe0, 0xf4, 0x53, + 0xd7, 0x30, 0x77, 0x3b, 0x89, 0x46, 0x71, 0xf0, 0xeb, 0xd9, 0x36, 0x94, 0x1b, 0xad, 0x0d, 0x29, + 0x5f, 0x48, 0x50, 0xdc, 0x6e, 0x5b, 0xd6, 0x28, 0x22, 0xaf, 0x03, 0xf2, 0x71, 0xd8, 0xf6, 0x9d, + 0xba, 0x69, 0xdb, 0xd8, 0x30, 0xb5, 0x10, 0x5b, 0x1d, 0x2a, 0x39, 0xaf, 0xce, 0x32, 0xca, 0xa3, + 0x2e, 0x81, 0x3c, 0xe3, 0x6d, 0xed, 0x55, 0xb7, 0x5d, 0x8d, 0x51, 0x67, 0x17, 0x6d, 0xed, 0x55, + 0xd4, 0xa6, 0x94, 0x1f, 0xc0, 0x14, 0x03, 0xc1, 0x5d, 0xf8, 0x11, 0xcc, 0xfa, 0x3c, 0x29, 0xbb, + 0xf7, 0x98, 0x1b, 0x2f, 0xf6, 0xaa, 0x96, 0xc9, 0x5f, 0x75, 0xc6, 0x4f, 0x1f, 0x04, 0x24, 0x60, + 0x4a, 0xcc, 0xc8, 0x1b, 0xdd, 0xbe, 0x3a, 0x8a, 0xca, 0xa7, 0xe1, 0x38, 0x2b, 0x09, 0x41, 0x69, + 0x9c, 0xbe, 0x4b, 0x26, 0x69, 0x4d, 0x08, 0xfa, 0xb6, 0xf1, 0xb1, 0x7e, 0x6d, 0x5c, 0xf9, 0x36, + 0xa0, 0x0d, 0xbd, 0xe5, 0xb8, 0x9f, 0x5a, 0xd8, 0x68, 0x1e, 0x16, 0x44, 0x2e, 0x09, 0x42, 0xf9, + 0x49, 0x0e, 0xe6, 0x76, 0x42, 0x1f, 0x6b, 0xb6, 0xe9, 0x34, 0x47, 0xf5, 0x66, 0xbf, 0xaf, 0x92, + 0xf6, 0x6c, 0x53, 0x9b, 0x89, 0xb4, 0x1b, 0x5b, 0x9e, 0x50, 0xe7, 0x19, 0x39, 0xfb, 0x4e, 0xf9, + 0xa0, 0xf7, 0x5e, 0xda, 0x76, 0x73, 0xe9, 0x7b, 0x1b, 0x4c, 0xdc, 0x3a, 0x9c, 0x09, 0xa8, 0x0e, + 0xf5, 0x01, 0xef, 0xa2, 0x12, 0x63, 0xd9, 0xe8, 0x35, 0x6b, 0x13, 0xe6, 0x33, 0x26, 0x78, 0x47, + 0xb1, 0xf4, 0x77, 0x09, 0xe6, 0xef, 0xf9, 0x98, 0x54, 0x63, 0xfe, 0x34, 0x8d, 0xac, 0x2d, 0x7a, + 0x3b, 0x66, 0x3d, 0x90, 0x13, 0x78, 0xe0, 0x71, 0xfc, 0x96, 0x1a, 0xa3, 0xb0, 0x6e, 0xf4, 0xc2, + 0x12, 0x0a, 0x14, 0x3d, 0xaa, 0x8e, 0xf2, 0x96, 0x79, 0x2b, 0xc1, 0x3c, 0xef, 0x33, 0x19, 0xcd, + 0x6e, 0x42, 0x3e, 0x7a, 0x87, 0xf3, 0xfe, 0x22, 0x0b, 0xaa, 0x69, 0x74, 0x29, 0xe6, 0x3d, 0x5a, + 0x5f, 0xf9, 0x97, 0x04, 0xf9, 0xe8, 0x9b, 0x23, 0xbc, 0xcb, 0x6f, 0x43, 0x91, 0x3e, 0xf0, 0x30, + 0xdb, 0x39, 0x8c, 0x0d, 0xdd, 0x39, 0x00, 0x63, 0xa7, 0x2b, 0x87, 0x3b, 0xb1, 0x2b, 0xc6, 0xa9, + 0x2b, 0x2e, 0xf7, 0x57, 0xf3, 0xff, 0x6d, 0xfd, 0x15, 0x40, 0xe4, 0x8d, 0x90, 0xb1, 0xbc, 0x9c, + 0xb1, 0x7c, 0xa1, 0x6b, 0x5d, 0xc5, 0x82, 0x39, 0xda, 0x05, 0xb3, 0x63, 0xdb, 0xbb, 0x69, 0xba, + 0x1d, 0x98, 0xcf, 0x48, 0xe3, 0x09, 0xf6, 0x61, 0x76, 0x6a, 0x1b, 0x1c, 0x1d, 0x87, 0x98, 0xe8, + 0x6e, 0xc0, 0x3c, 0xef, 0xb1, 0x23, 0x58, 0xe7, 0x17, 0x12, 0x14, 0x77, 0x30, 0x6e, 0x8d, 0x52, + 0x0b, 0x57, 0x60, 0x9c, 0x06, 0x4d, 0x6e, 0x58, 0xd0, 0x3c, 0x3c, 0xa6, 0x52, 0x4e, 0x74, 0x36, + 0x81, 0x80, 0x9a, 0xec, 0xe1, 0xb1, 0x2e, 0x86, 0xcd, 0x3c, 0x4c, 0x86, 0x9a, 0xdf, 0xc4, 0xa1, + 0x72, 0x02, 0xa6, 0x18, 0x18, 0x66, 0xb4, 0xda, 0x5f, 0xf2, 0x50, 0xe0, 0x9b, 0x16, 0xec, 0xa3, + 0x8f, 0xa1, 0xc8, 0x32, 0x9c, 0x6d, 0x36, 0xfb, 0x2d, 0x30, 0xe4, 0x7e, 0x04, 0xe5, 0xca, 0xe7, + 0x7f, 0xfb, 0xc7, 0x97, 0xb9, 0xf7, 0xe5, 0xf3, 0xd5, 0xfd, 0xd5, 0xea, 0x0f, 0x49, 0x76, 0xac, + 0x73, 0x9f, 0x07, 0xd5, 0x72, 0x95, 0xad, 0x3d, 0xaa, 0xe5, 0xd7, 0x6b, 0x52, 0x19, 0xbd, 0x86, + 0x62, 0x62, 0x93, 0x85, 0x2e, 0xf5, 0x7e, 0xb2, 0x77, 0xd1, 0xd5, 0x5f, 0x70, 0x95, 0x0a, 0xbe, + 0x52, 0xbb, 0x44, 0x05, 0x53, 0x41, 0x95, 0x81, 0xe2, 0x3f, 0x97, 0xe0, 0x38, 0x57, 0x1c, 0x2d, + 0x0a, 0x1f, 0xd0, 0x89, 0x15, 0x97, 0x7c, 0x71, 0x00, 0x07, 0xb3, 0xa4, 0x52, 0xa3, 0x08, 0xae, + 0x29, 0x4b, 0x5d, 0x04, 0x62, 0xe1, 0x7c, 0xb5, 0x48, 0x40, 0xb8, 0x90, 0x8f, 0xf6, 0x7e, 0x48, + 0x20, 0x22, 0xb3, 0x13, 0xec, 0xaf, 0xfd, 0x12, 0x95, 0x7d, 0x11, 0x5d, 0x18, 0x22, 0x1b, 0xbd, + 0x91, 0x00, 0xba, 0x9b, 0x2b, 0x24, 0x58, 0x4b, 0xf7, 0xac, 0xd0, 0xe4, 0x4b, 0x83, 0x99, 0xb8, + 0xfa, 0x69, 0x08, 0x5c, 0x78, 0x02, 0xc4, 0x6b, 0x8e, 0x02, 0xfd, 0x51, 0x82, 0x53, 0xe2, 0xd5, + 0x12, 0xaa, 0x0e, 0x90, 0x24, 0x7a, 0xd6, 0xcb, 0x2b, 0x07, 0xbf, 0xc0, 0x61, 0x7e, 0x40, 0x61, + 0x56, 0xd1, 0xf5, 0x21, 0x96, 0xaa, 0xa6, 0x5f, 0xe1, 0xbf, 0x95, 0x12, 0x1b, 0xbf, 0xb8, 0xf4, + 0xa0, 0xab, 0x83, 0xe4, 0x67, 0xca, 0xa1, 0x7c, 0xed, 0x60, 0xcc, 0x1c, 0xe8, 0x2a, 0x05, 0x7a, + 0x15, 0x5d, 0x19, 0x0a, 0x34, 0x46, 0x13, 0x42, 0x31, 0xb1, 0x70, 0x12, 0x65, 0x54, 0xef, 0x3e, + 0x4a, 0x3e, 0xd5, 0x53, 0x5c, 0xee, 0xdb, 0x5e, 0xd8, 0x89, 0xfc, 0x59, 0x1e, 0x16, 0x52, 0xb5, + 0xff, 0xcc, 0x02, 0x70, 0x5b, 0x37, 0xb0, 0x8f, 0x7e, 0x26, 0x01, 0xe2, 0xaf, 0x84, 0x64, 0x45, + 0x1b, 0x32, 0xf5, 0xc8, 0x43, 0xe8, 0xca, 0x0a, 0x85, 0x53, 0x96, 0xbf, 0x26, 0x2c, 0x2c, 0x29, + 0x67, 0xf1, 0x04, 0xff, 0xb5, 0x44, 0x97, 0xea, 0x29, 0x14, 0xcb, 0xc2, 0x1c, 0x13, 0x4c, 0x4c, + 0x43, 0xf1, 0xa4, 0xe3, 0x28, 0x29, 0x7f, 0x10, 0x2e, 0xf4, 0x9b, 0x78, 0x7f, 0x9f, 0xc2, 0x75, + 0xb5, 0x5f, 0xf1, 0x3b, 0x0c, 0xb4, 0x75, 0x0a, 0xed, 0xeb, 0xb5, 0x5a, 0x0f, 0xb4, 0xca, 0x41, + 0xec, 0xf6, 0x95, 0xc4, 0xf6, 0xe8, 0xe9, 0xd4, 0x2c, 0x8b, 0x83, 0x57, 0x98, 0x95, 0x57, 0x0f, + 0xc4, 0xcb, 0xe3, 0xbc, 0x42, 0xd1, 0x2e, 0xa3, 0xcb, 0x7d, 0xeb, 0x46, 0x3a, 0x13, 0x7f, 0x29, + 0x45, 0x6b, 0xd5, 0x61, 0x16, 0xec, 0x3b, 0x0e, 0xf7, 0x8d, 0x79, 0xee, 0xd4, 0xf2, 0x88, 0x4e, + 0xfd, 0x9d, 0x04, 0xb3, 0x3d, 0x53, 0x9d, 0xc8, 0x62, 0xfd, 0x46, 0xbf, 0xbe, 0x80, 0xbe, 0x45, + 0x01, 0x6d, 0x29, 0x77, 0x47, 0x02, 0xb4, 0x66, 0x67, 0xe5, 0x10, 0xbf, 0xfe, 0x4a, 0x82, 0x62, + 0x62, 0xe0, 0x13, 0x95, 0x87, 0xde, 0x79, 0xb0, 0x2f, 0xb2, 0x2d, 0x8a, 0xec, 0x8e, 0x72, 0x6b, + 0x34, 0x64, 0x5a, 0x57, 0x02, 0xc1, 0xf4, 0x53, 0x09, 0xc6, 0xc9, 0x90, 0x84, 0xce, 0x89, 0xfa, + 0x6b, 0x3c, 0x3f, 0x8a, 0x42, 0x3e, 0x39, 0x5b, 0x45, 0x21, 0xaf, 0xd4, 0x46, 0x43, 0xe3, 0xb5, + 0x2d, 0x8b, 0xc0, 0xd8, 0x85, 0xe9, 0xd4, 0xcc, 0x86, 0x44, 0xcf, 0x6d, 0xc1, 0x5c, 0x2b, 0x2f, + 0x0d, 0xe5, 0xe3, 0x00, 0x8f, 0x2d, 0x4b, 0x2b, 0x12, 0xc9, 0xfe, 0x99, 0xec, 0x8e, 0x05, 0x5d, + 0xe9, 0x17, 0x27, 0x3d, 0x7b, 0x98, 0xbe, 0xce, 0x78, 0x44, 0xd5, 0xbf, 0xa7, 0xdc, 0x39, 0x4c, + 0x98, 0x74, 0xc5, 0x10, 0x53, 0xfc, 0x18, 0x8a, 0x89, 0xd7, 0xbf, 0x28, 0x48, 0x7a, 0x87, 0x03, + 0x79, 0xc0, 0x33, 0x5b, 0xb9, 0x4e, 0xb1, 0x2d, 0x21, 0x56, 0xb8, 0xa3, 0x66, 0x95, 0xc2, 0x15, + 0x35, 0x30, 0x92, 0x4b, 0x6f, 0x25, 0x98, 0x4e, 0x3d, 0xef, 0x45, 0xbe, 0x10, 0x4d, 0x1b, 0x22, + 0x5f, 0x08, 0xe7, 0x04, 0xa5, 0x4c, 0x11, 0x5d, 0x42, 0x4a, 0xff, 0x8a, 0x13, 0x0b, 0xff, 0x42, + 0x82, 0x13, 0xe9, 0x99, 0x17, 0x2d, 0x1d, 0x70, 0x2a, 0x1e, 0x68, 0x95, 0x6b, 0x14, 0xc3, 0x65, + 0xf9, 0xa2, 0xb8, 0x9d, 0x25, 0x2c, 0x42, 0x9c, 0xf2, 0x56, 0x82, 0x13, 0xe9, 0x81, 0x58, 0x84, + 0x42, 0x38, 0x32, 0x0f, 0x44, 0xc1, 0xeb, 0x5d, 0xad, 0x9c, 0xf2, 0x4d, 0x65, 0x18, 0x9c, 0x37, + 0x12, 0x9c, 0x48, 0xcf, 0x41, 0x22, 0x38, 0xc2, 0x49, 0xa9, 0x6f, 0x08, 0xf3, 0x30, 0x29, 0x1f, + 0x30, 0x4c, 0x48, 0xe1, 0x20, 0x73, 0x8c, 0xa8, 0x70, 0x24, 0x86, 0x2d, 0x61, 0xaf, 0x4c, 0x8c, + 0x3f, 0x87, 0x2d, 0x1c, 0x01, 0xc6, 0xad, 0x35, 0xa9, 0xbc, 0xf9, 0x07, 0x09, 0xe6, 0x74, 0xd7, + 0xee, 0x11, 0xb2, 0x59, 0x64, 0x0b, 0xf8, 0x6d, 0xa2, 0xe4, 0xb6, 0xf4, 0xbd, 0x9b, 0x9c, 0xa1, + 0xe9, 0x5a, 0x9a, 0xd3, 0xac, 0xb8, 0x7e, 0xb3, 0xda, 0xc4, 0x0e, 0x35, 0x41, 0x95, 0x91, 0x34, + 0xcf, 0x0c, 0xba, 0xff, 0x8d, 0x73, 0x9b, 0xfd, 0xf5, 0x6f, 0x49, 0xfa, 0x7d, 0xee, 0xd4, 0x03, + 0x76, 0xf7, 0x9e, 0xe5, 0xb6, 0x0d, 0x32, 0x75, 0xec, 0xb4, 0x1b, 0x95, 0x17, 0xab, 0x7f, 0x8d, + 0x08, 0x2f, 0x29, 0xe1, 0x25, 0x23, 0xbc, 0x7c, 0xb1, 0xfa, 0xcf, 0xdc, 0x02, 0x23, 0xac, 0xad, + 0x51, 0xca, 0xda, 0x1a, 0x23, 0xad, 0xad, 0xbd, 0x58, 0x6d, 0x4c, 0x52, 0x99, 0x37, 0xfe, 0x17, + 0x00, 0x00, 0xff, 0xff, 0x03, 0x5c, 0x45, 0xef, 0x00, 0x24, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/pubsub/v1beta2/pubsub.pb.go b/vendor/google.golang.org/genproto/googleapis/pubsub/v1beta2/pubsub.pb.go new file mode 100644 index 0000000..f0dd7af --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/pubsub/v1beta2/pubsub.pb.go @@ -0,0 +1,1917 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/pubsub/v1beta2/pubsub.proto + +package pubsub // import "google.golang.org/genproto/googleapis/pubsub/v1beta2" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import empty "github.com/golang/protobuf/ptypes/empty" + +import ( + context "golang.org/x/net/context" + grpc "google.golang.org/grpc" +) + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// A topic resource. +type Topic struct { + // Name of the topic. + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Topic) Reset() { *m = Topic{} } +func (m *Topic) String() string { return proto.CompactTextString(m) } +func (*Topic) ProtoMessage() {} +func (*Topic) Descriptor() ([]byte, []int) { + return fileDescriptor_pubsub_6c7fd79cadae8b09, []int{0} +} +func (m *Topic) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Topic.Unmarshal(m, b) +} +func (m *Topic) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Topic.Marshal(b, m, deterministic) +} +func (dst *Topic) XXX_Merge(src proto.Message) { + xxx_messageInfo_Topic.Merge(dst, src) +} +func (m *Topic) XXX_Size() int { + return xxx_messageInfo_Topic.Size(m) +} +func (m *Topic) XXX_DiscardUnknown() { + xxx_messageInfo_Topic.DiscardUnknown(m) +} + +var xxx_messageInfo_Topic proto.InternalMessageInfo + +func (m *Topic) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +// A message data and its attributes. +type PubsubMessage struct { + // The message payload. For JSON requests, the value of this field must be + // base64-encoded. + Data []byte `protobuf:"bytes,1,opt,name=data,proto3" json:"data,omitempty"` + // Optional attributes for this message. + Attributes map[string]string `protobuf:"bytes,2,rep,name=attributes,proto3" json:"attributes,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` + // ID of this message assigned by the server at publication time. Guaranteed + // to be unique within the topic. This value may be read by a subscriber + // that receives a PubsubMessage via a Pull call or a push delivery. It must + // not be populated by a publisher in a Publish call. + MessageId string `protobuf:"bytes,3,opt,name=message_id,json=messageId,proto3" json:"message_id,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *PubsubMessage) Reset() { *m = PubsubMessage{} } +func (m *PubsubMessage) String() string { return proto.CompactTextString(m) } +func (*PubsubMessage) ProtoMessage() {} +func (*PubsubMessage) Descriptor() ([]byte, []int) { + return fileDescriptor_pubsub_6c7fd79cadae8b09, []int{1} +} +func (m *PubsubMessage) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_PubsubMessage.Unmarshal(m, b) +} +func (m *PubsubMessage) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_PubsubMessage.Marshal(b, m, deterministic) +} +func (dst *PubsubMessage) XXX_Merge(src proto.Message) { + xxx_messageInfo_PubsubMessage.Merge(dst, src) +} +func (m *PubsubMessage) XXX_Size() int { + return xxx_messageInfo_PubsubMessage.Size(m) +} +func (m *PubsubMessage) XXX_DiscardUnknown() { + xxx_messageInfo_PubsubMessage.DiscardUnknown(m) +} + +var xxx_messageInfo_PubsubMessage proto.InternalMessageInfo + +func (m *PubsubMessage) GetData() []byte { + if m != nil { + return m.Data + } + return nil +} + +func (m *PubsubMessage) GetAttributes() map[string]string { + if m != nil { + return m.Attributes + } + return nil +} + +func (m *PubsubMessage) GetMessageId() string { + if m != nil { + return m.MessageId + } + return "" +} + +// Request for the GetTopic method. +type GetTopicRequest struct { + // The name of the topic to get. + Topic string `protobuf:"bytes,1,opt,name=topic,proto3" json:"topic,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GetTopicRequest) Reset() { *m = GetTopicRequest{} } +func (m *GetTopicRequest) String() string { return proto.CompactTextString(m) } +func (*GetTopicRequest) ProtoMessage() {} +func (*GetTopicRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_pubsub_6c7fd79cadae8b09, []int{2} +} +func (m *GetTopicRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GetTopicRequest.Unmarshal(m, b) +} +func (m *GetTopicRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GetTopicRequest.Marshal(b, m, deterministic) +} +func (dst *GetTopicRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_GetTopicRequest.Merge(dst, src) +} +func (m *GetTopicRequest) XXX_Size() int { + return xxx_messageInfo_GetTopicRequest.Size(m) +} +func (m *GetTopicRequest) XXX_DiscardUnknown() { + xxx_messageInfo_GetTopicRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_GetTopicRequest proto.InternalMessageInfo + +func (m *GetTopicRequest) GetTopic() string { + if m != nil { + return m.Topic + } + return "" +} + +// Request for the Publish method. +type PublishRequest struct { + // The messages in the request will be published on this topic. + Topic string `protobuf:"bytes,1,opt,name=topic,proto3" json:"topic,omitempty"` + // The messages to publish. + Messages []*PubsubMessage `protobuf:"bytes,2,rep,name=messages,proto3" json:"messages,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *PublishRequest) Reset() { *m = PublishRequest{} } +func (m *PublishRequest) String() string { return proto.CompactTextString(m) } +func (*PublishRequest) ProtoMessage() {} +func (*PublishRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_pubsub_6c7fd79cadae8b09, []int{3} +} +func (m *PublishRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_PublishRequest.Unmarshal(m, b) +} +func (m *PublishRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_PublishRequest.Marshal(b, m, deterministic) +} +func (dst *PublishRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_PublishRequest.Merge(dst, src) +} +func (m *PublishRequest) XXX_Size() int { + return xxx_messageInfo_PublishRequest.Size(m) +} +func (m *PublishRequest) XXX_DiscardUnknown() { + xxx_messageInfo_PublishRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_PublishRequest proto.InternalMessageInfo + +func (m *PublishRequest) GetTopic() string { + if m != nil { + return m.Topic + } + return "" +} + +func (m *PublishRequest) GetMessages() []*PubsubMessage { + if m != nil { + return m.Messages + } + return nil +} + +// Response for the Publish method. +type PublishResponse struct { + // The server-assigned ID of each published message, in the same order as + // the messages in the request. IDs are guaranteed to be unique within + // the topic. + MessageIds []string `protobuf:"bytes,1,rep,name=message_ids,json=messageIds,proto3" json:"message_ids,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *PublishResponse) Reset() { *m = PublishResponse{} } +func (m *PublishResponse) String() string { return proto.CompactTextString(m) } +func (*PublishResponse) ProtoMessage() {} +func (*PublishResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_pubsub_6c7fd79cadae8b09, []int{4} +} +func (m *PublishResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_PublishResponse.Unmarshal(m, b) +} +func (m *PublishResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_PublishResponse.Marshal(b, m, deterministic) +} +func (dst *PublishResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_PublishResponse.Merge(dst, src) +} +func (m *PublishResponse) XXX_Size() int { + return xxx_messageInfo_PublishResponse.Size(m) +} +func (m *PublishResponse) XXX_DiscardUnknown() { + xxx_messageInfo_PublishResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_PublishResponse proto.InternalMessageInfo + +func (m *PublishResponse) GetMessageIds() []string { + if m != nil { + return m.MessageIds + } + return nil +} + +// Request for the ListTopics method. +type ListTopicsRequest struct { + // The name of the cloud project that topics belong to. + Project string `protobuf:"bytes,1,opt,name=project,proto3" json:"project,omitempty"` + // Maximum number of topics to return. + PageSize int32 `protobuf:"varint,2,opt,name=page_size,json=pageSize,proto3" json:"page_size,omitempty"` + // The value returned by the last ListTopicsResponse; indicates that this is + // a continuation of a prior ListTopics call, and that the system should + // return the next page of data. + PageToken string `protobuf:"bytes,3,opt,name=page_token,json=pageToken,proto3" json:"page_token,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ListTopicsRequest) Reset() { *m = ListTopicsRequest{} } +func (m *ListTopicsRequest) String() string { return proto.CompactTextString(m) } +func (*ListTopicsRequest) ProtoMessage() {} +func (*ListTopicsRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_pubsub_6c7fd79cadae8b09, []int{5} +} +func (m *ListTopicsRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ListTopicsRequest.Unmarshal(m, b) +} +func (m *ListTopicsRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ListTopicsRequest.Marshal(b, m, deterministic) +} +func (dst *ListTopicsRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_ListTopicsRequest.Merge(dst, src) +} +func (m *ListTopicsRequest) XXX_Size() int { + return xxx_messageInfo_ListTopicsRequest.Size(m) +} +func (m *ListTopicsRequest) XXX_DiscardUnknown() { + xxx_messageInfo_ListTopicsRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_ListTopicsRequest proto.InternalMessageInfo + +func (m *ListTopicsRequest) GetProject() string { + if m != nil { + return m.Project + } + return "" +} + +func (m *ListTopicsRequest) GetPageSize() int32 { + if m != nil { + return m.PageSize + } + return 0 +} + +func (m *ListTopicsRequest) GetPageToken() string { + if m != nil { + return m.PageToken + } + return "" +} + +// Response for the ListTopics method. +type ListTopicsResponse struct { + // The resulting topics. + Topics []*Topic `protobuf:"bytes,1,rep,name=topics,proto3" json:"topics,omitempty"` + // If not empty, indicates that there may be more topics that match the + // request; this value should be passed in a new ListTopicsRequest. + NextPageToken string `protobuf:"bytes,2,opt,name=next_page_token,json=nextPageToken,proto3" json:"next_page_token,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ListTopicsResponse) Reset() { *m = ListTopicsResponse{} } +func (m *ListTopicsResponse) String() string { return proto.CompactTextString(m) } +func (*ListTopicsResponse) ProtoMessage() {} +func (*ListTopicsResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_pubsub_6c7fd79cadae8b09, []int{6} +} +func (m *ListTopicsResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ListTopicsResponse.Unmarshal(m, b) +} +func (m *ListTopicsResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ListTopicsResponse.Marshal(b, m, deterministic) +} +func (dst *ListTopicsResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_ListTopicsResponse.Merge(dst, src) +} +func (m *ListTopicsResponse) XXX_Size() int { + return xxx_messageInfo_ListTopicsResponse.Size(m) +} +func (m *ListTopicsResponse) XXX_DiscardUnknown() { + xxx_messageInfo_ListTopicsResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_ListTopicsResponse proto.InternalMessageInfo + +func (m *ListTopicsResponse) GetTopics() []*Topic { + if m != nil { + return m.Topics + } + return nil +} + +func (m *ListTopicsResponse) GetNextPageToken() string { + if m != nil { + return m.NextPageToken + } + return "" +} + +// Request for the ListTopicSubscriptions method. +type ListTopicSubscriptionsRequest struct { + // The name of the topic that subscriptions are attached to. + Topic string `protobuf:"bytes,1,opt,name=topic,proto3" json:"topic,omitempty"` + // Maximum number of subscription names to return. + PageSize int32 `protobuf:"varint,2,opt,name=page_size,json=pageSize,proto3" json:"page_size,omitempty"` + // The value returned by the last ListTopicSubscriptionsResponse; indicates + // that this is a continuation of a prior ListTopicSubscriptions call, and + // that the system should return the next page of data. + PageToken string `protobuf:"bytes,3,opt,name=page_token,json=pageToken,proto3" json:"page_token,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ListTopicSubscriptionsRequest) Reset() { *m = ListTopicSubscriptionsRequest{} } +func (m *ListTopicSubscriptionsRequest) String() string { return proto.CompactTextString(m) } +func (*ListTopicSubscriptionsRequest) ProtoMessage() {} +func (*ListTopicSubscriptionsRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_pubsub_6c7fd79cadae8b09, []int{7} +} +func (m *ListTopicSubscriptionsRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ListTopicSubscriptionsRequest.Unmarshal(m, b) +} +func (m *ListTopicSubscriptionsRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ListTopicSubscriptionsRequest.Marshal(b, m, deterministic) +} +func (dst *ListTopicSubscriptionsRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_ListTopicSubscriptionsRequest.Merge(dst, src) +} +func (m *ListTopicSubscriptionsRequest) XXX_Size() int { + return xxx_messageInfo_ListTopicSubscriptionsRequest.Size(m) +} +func (m *ListTopicSubscriptionsRequest) XXX_DiscardUnknown() { + xxx_messageInfo_ListTopicSubscriptionsRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_ListTopicSubscriptionsRequest proto.InternalMessageInfo + +func (m *ListTopicSubscriptionsRequest) GetTopic() string { + if m != nil { + return m.Topic + } + return "" +} + +func (m *ListTopicSubscriptionsRequest) GetPageSize() int32 { + if m != nil { + return m.PageSize + } + return 0 +} + +func (m *ListTopicSubscriptionsRequest) GetPageToken() string { + if m != nil { + return m.PageToken + } + return "" +} + +// Response for the ListTopicSubscriptions method. +type ListTopicSubscriptionsResponse struct { + // The names of the subscriptions that match the request. + Subscriptions []string `protobuf:"bytes,1,rep,name=subscriptions,proto3" json:"subscriptions,omitempty"` + // If not empty, indicates that there may be more subscriptions that match + // the request; this value should be passed in a new + // ListTopicSubscriptionsRequest to get more subscriptions. + NextPageToken string `protobuf:"bytes,2,opt,name=next_page_token,json=nextPageToken,proto3" json:"next_page_token,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ListTopicSubscriptionsResponse) Reset() { *m = ListTopicSubscriptionsResponse{} } +func (m *ListTopicSubscriptionsResponse) String() string { return proto.CompactTextString(m) } +func (*ListTopicSubscriptionsResponse) ProtoMessage() {} +func (*ListTopicSubscriptionsResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_pubsub_6c7fd79cadae8b09, []int{8} +} +func (m *ListTopicSubscriptionsResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ListTopicSubscriptionsResponse.Unmarshal(m, b) +} +func (m *ListTopicSubscriptionsResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ListTopicSubscriptionsResponse.Marshal(b, m, deterministic) +} +func (dst *ListTopicSubscriptionsResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_ListTopicSubscriptionsResponse.Merge(dst, src) +} +func (m *ListTopicSubscriptionsResponse) XXX_Size() int { + return xxx_messageInfo_ListTopicSubscriptionsResponse.Size(m) +} +func (m *ListTopicSubscriptionsResponse) XXX_DiscardUnknown() { + xxx_messageInfo_ListTopicSubscriptionsResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_ListTopicSubscriptionsResponse proto.InternalMessageInfo + +func (m *ListTopicSubscriptionsResponse) GetSubscriptions() []string { + if m != nil { + return m.Subscriptions + } + return nil +} + +func (m *ListTopicSubscriptionsResponse) GetNextPageToken() string { + if m != nil { + return m.NextPageToken + } + return "" +} + +// Request for the DeleteTopic method. +type DeleteTopicRequest struct { + // Name of the topic to delete. + Topic string `protobuf:"bytes,1,opt,name=topic,proto3" json:"topic,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *DeleteTopicRequest) Reset() { *m = DeleteTopicRequest{} } +func (m *DeleteTopicRequest) String() string { return proto.CompactTextString(m) } +func (*DeleteTopicRequest) ProtoMessage() {} +func (*DeleteTopicRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_pubsub_6c7fd79cadae8b09, []int{9} +} +func (m *DeleteTopicRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_DeleteTopicRequest.Unmarshal(m, b) +} +func (m *DeleteTopicRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_DeleteTopicRequest.Marshal(b, m, deterministic) +} +func (dst *DeleteTopicRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_DeleteTopicRequest.Merge(dst, src) +} +func (m *DeleteTopicRequest) XXX_Size() int { + return xxx_messageInfo_DeleteTopicRequest.Size(m) +} +func (m *DeleteTopicRequest) XXX_DiscardUnknown() { + xxx_messageInfo_DeleteTopicRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_DeleteTopicRequest proto.InternalMessageInfo + +func (m *DeleteTopicRequest) GetTopic() string { + if m != nil { + return m.Topic + } + return "" +} + +// A subscription resource. +type Subscription struct { + // Name of the subscription. + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // The name of the topic from which this subscription is receiving messages. + // This will be present if and only if the subscription has not been detached + // from its topic. + Topic string `protobuf:"bytes,2,opt,name=topic,proto3" json:"topic,omitempty"` + // If push delivery is used with this subscription, this field is + // used to configure it. An empty pushConfig signifies that the subscriber + // will pull and ack messages using API methods. + PushConfig *PushConfig `protobuf:"bytes,4,opt,name=push_config,json=pushConfig,proto3" json:"push_config,omitempty"` + // This value is the maximum time after a subscriber receives a message + // before the subscriber should acknowledge the message. After message + // delivery but before the ack deadline expires and before the message is + // acknowledged, it is an outstanding message and will not be delivered + // again during that time (on a best-effort basis). + // + // For pull delivery this value + // is used as the initial value for the ack deadline. It may be overridden + // for a specific message by calling ModifyAckDeadline. + // + // For push delivery, this value is also used to set the request timeout for + // the call to the push endpoint. + // + // If the subscriber never acknowledges the message, the Pub/Sub + // system will eventually redeliver the message. + AckDeadlineSeconds int32 `protobuf:"varint,5,opt,name=ack_deadline_seconds,json=ackDeadlineSeconds,proto3" json:"ack_deadline_seconds,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Subscription) Reset() { *m = Subscription{} } +func (m *Subscription) String() string { return proto.CompactTextString(m) } +func (*Subscription) ProtoMessage() {} +func (*Subscription) Descriptor() ([]byte, []int) { + return fileDescriptor_pubsub_6c7fd79cadae8b09, []int{10} +} +func (m *Subscription) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Subscription.Unmarshal(m, b) +} +func (m *Subscription) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Subscription.Marshal(b, m, deterministic) +} +func (dst *Subscription) XXX_Merge(src proto.Message) { + xxx_messageInfo_Subscription.Merge(dst, src) +} +func (m *Subscription) XXX_Size() int { + return xxx_messageInfo_Subscription.Size(m) +} +func (m *Subscription) XXX_DiscardUnknown() { + xxx_messageInfo_Subscription.DiscardUnknown(m) +} + +var xxx_messageInfo_Subscription proto.InternalMessageInfo + +func (m *Subscription) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *Subscription) GetTopic() string { + if m != nil { + return m.Topic + } + return "" +} + +func (m *Subscription) GetPushConfig() *PushConfig { + if m != nil { + return m.PushConfig + } + return nil +} + +func (m *Subscription) GetAckDeadlineSeconds() int32 { + if m != nil { + return m.AckDeadlineSeconds + } + return 0 +} + +// Configuration for a push delivery endpoint. +type PushConfig struct { + // A URL locating the endpoint to which messages should be pushed. + // For example, a Webhook endpoint might use "https://example.com/push". + PushEndpoint string `protobuf:"bytes,1,opt,name=push_endpoint,json=pushEndpoint,proto3" json:"push_endpoint,omitempty"` + // Endpoint configuration attributes. + // + // Every endpoint has a set of API supported attributes that can be used to + // control different aspects of the message delivery. + // + // The currently supported attribute is `x-goog-version`, which you can + // use to change the format of the push message. This attribute + // indicates the version of the data expected by the endpoint. This + // controls the shape of the envelope (i.e. its fields and metadata). + // The endpoint version is based on the version of the Pub/Sub + // API. + // + // If not present during the CreateSubscription call, it will default to + // the version of the API used to make such call. If not present during a + // ModifyPushConfig call, its value will not be changed. GetSubscription + // calls will always return a valid version, even if the subscription was + // created without this attribute. + // + // The possible values for this attribute are: + // + // * `v1beta1`: uses the push format defined in the v1beta1 Pub/Sub API. + // * `v1beta2`: uses the push format defined in the v1beta2 Pub/Sub API. + // + Attributes map[string]string `protobuf:"bytes,2,rep,name=attributes,proto3" json:"attributes,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *PushConfig) Reset() { *m = PushConfig{} } +func (m *PushConfig) String() string { return proto.CompactTextString(m) } +func (*PushConfig) ProtoMessage() {} +func (*PushConfig) Descriptor() ([]byte, []int) { + return fileDescriptor_pubsub_6c7fd79cadae8b09, []int{11} +} +func (m *PushConfig) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_PushConfig.Unmarshal(m, b) +} +func (m *PushConfig) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_PushConfig.Marshal(b, m, deterministic) +} +func (dst *PushConfig) XXX_Merge(src proto.Message) { + xxx_messageInfo_PushConfig.Merge(dst, src) +} +func (m *PushConfig) XXX_Size() int { + return xxx_messageInfo_PushConfig.Size(m) +} +func (m *PushConfig) XXX_DiscardUnknown() { + xxx_messageInfo_PushConfig.DiscardUnknown(m) +} + +var xxx_messageInfo_PushConfig proto.InternalMessageInfo + +func (m *PushConfig) GetPushEndpoint() string { + if m != nil { + return m.PushEndpoint + } + return "" +} + +func (m *PushConfig) GetAttributes() map[string]string { + if m != nil { + return m.Attributes + } + return nil +} + +// A message and its corresponding acknowledgment ID. +type ReceivedMessage struct { + // This ID can be used to acknowledge the received message. + AckId string `protobuf:"bytes,1,opt,name=ack_id,json=ackId,proto3" json:"ack_id,omitempty"` + // The message. + Message *PubsubMessage `protobuf:"bytes,2,opt,name=message,proto3" json:"message,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ReceivedMessage) Reset() { *m = ReceivedMessage{} } +func (m *ReceivedMessage) String() string { return proto.CompactTextString(m) } +func (*ReceivedMessage) ProtoMessage() {} +func (*ReceivedMessage) Descriptor() ([]byte, []int) { + return fileDescriptor_pubsub_6c7fd79cadae8b09, []int{12} +} +func (m *ReceivedMessage) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ReceivedMessage.Unmarshal(m, b) +} +func (m *ReceivedMessage) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ReceivedMessage.Marshal(b, m, deterministic) +} +func (dst *ReceivedMessage) XXX_Merge(src proto.Message) { + xxx_messageInfo_ReceivedMessage.Merge(dst, src) +} +func (m *ReceivedMessage) XXX_Size() int { + return xxx_messageInfo_ReceivedMessage.Size(m) +} +func (m *ReceivedMessage) XXX_DiscardUnknown() { + xxx_messageInfo_ReceivedMessage.DiscardUnknown(m) +} + +var xxx_messageInfo_ReceivedMessage proto.InternalMessageInfo + +func (m *ReceivedMessage) GetAckId() string { + if m != nil { + return m.AckId + } + return "" +} + +func (m *ReceivedMessage) GetMessage() *PubsubMessage { + if m != nil { + return m.Message + } + return nil +} + +// Request for the GetSubscription method. +type GetSubscriptionRequest struct { + // The name of the subscription to get. + Subscription string `protobuf:"bytes,1,opt,name=subscription,proto3" json:"subscription,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GetSubscriptionRequest) Reset() { *m = GetSubscriptionRequest{} } +func (m *GetSubscriptionRequest) String() string { return proto.CompactTextString(m) } +func (*GetSubscriptionRequest) ProtoMessage() {} +func (*GetSubscriptionRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_pubsub_6c7fd79cadae8b09, []int{13} +} +func (m *GetSubscriptionRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GetSubscriptionRequest.Unmarshal(m, b) +} +func (m *GetSubscriptionRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GetSubscriptionRequest.Marshal(b, m, deterministic) +} +func (dst *GetSubscriptionRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_GetSubscriptionRequest.Merge(dst, src) +} +func (m *GetSubscriptionRequest) XXX_Size() int { + return xxx_messageInfo_GetSubscriptionRequest.Size(m) +} +func (m *GetSubscriptionRequest) XXX_DiscardUnknown() { + xxx_messageInfo_GetSubscriptionRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_GetSubscriptionRequest proto.InternalMessageInfo + +func (m *GetSubscriptionRequest) GetSubscription() string { + if m != nil { + return m.Subscription + } + return "" +} + +// Request for the ListSubscriptions method. +type ListSubscriptionsRequest struct { + // The name of the cloud project that subscriptions belong to. + Project string `protobuf:"bytes,1,opt,name=project,proto3" json:"project,omitempty"` + // Maximum number of subscriptions to return. + PageSize int32 `protobuf:"varint,2,opt,name=page_size,json=pageSize,proto3" json:"page_size,omitempty"` + // The value returned by the last ListSubscriptionsResponse; indicates that + // this is a continuation of a prior ListSubscriptions call, and that the + // system should return the next page of data. + PageToken string `protobuf:"bytes,3,opt,name=page_token,json=pageToken,proto3" json:"page_token,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ListSubscriptionsRequest) Reset() { *m = ListSubscriptionsRequest{} } +func (m *ListSubscriptionsRequest) String() string { return proto.CompactTextString(m) } +func (*ListSubscriptionsRequest) ProtoMessage() {} +func (*ListSubscriptionsRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_pubsub_6c7fd79cadae8b09, []int{14} +} +func (m *ListSubscriptionsRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ListSubscriptionsRequest.Unmarshal(m, b) +} +func (m *ListSubscriptionsRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ListSubscriptionsRequest.Marshal(b, m, deterministic) +} +func (dst *ListSubscriptionsRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_ListSubscriptionsRequest.Merge(dst, src) +} +func (m *ListSubscriptionsRequest) XXX_Size() int { + return xxx_messageInfo_ListSubscriptionsRequest.Size(m) +} +func (m *ListSubscriptionsRequest) XXX_DiscardUnknown() { + xxx_messageInfo_ListSubscriptionsRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_ListSubscriptionsRequest proto.InternalMessageInfo + +func (m *ListSubscriptionsRequest) GetProject() string { + if m != nil { + return m.Project + } + return "" +} + +func (m *ListSubscriptionsRequest) GetPageSize() int32 { + if m != nil { + return m.PageSize + } + return 0 +} + +func (m *ListSubscriptionsRequest) GetPageToken() string { + if m != nil { + return m.PageToken + } + return "" +} + +// Response for the ListSubscriptions method. +type ListSubscriptionsResponse struct { + // The subscriptions that match the request. + Subscriptions []*Subscription `protobuf:"bytes,1,rep,name=subscriptions,proto3" json:"subscriptions,omitempty"` + // If not empty, indicates that there may be more subscriptions that match + // the request; this value should be passed in a new ListSubscriptionsRequest + // to get more subscriptions. + NextPageToken string `protobuf:"bytes,2,opt,name=next_page_token,json=nextPageToken,proto3" json:"next_page_token,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ListSubscriptionsResponse) Reset() { *m = ListSubscriptionsResponse{} } +func (m *ListSubscriptionsResponse) String() string { return proto.CompactTextString(m) } +func (*ListSubscriptionsResponse) ProtoMessage() {} +func (*ListSubscriptionsResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_pubsub_6c7fd79cadae8b09, []int{15} +} +func (m *ListSubscriptionsResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ListSubscriptionsResponse.Unmarshal(m, b) +} +func (m *ListSubscriptionsResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ListSubscriptionsResponse.Marshal(b, m, deterministic) +} +func (dst *ListSubscriptionsResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_ListSubscriptionsResponse.Merge(dst, src) +} +func (m *ListSubscriptionsResponse) XXX_Size() int { + return xxx_messageInfo_ListSubscriptionsResponse.Size(m) +} +func (m *ListSubscriptionsResponse) XXX_DiscardUnknown() { + xxx_messageInfo_ListSubscriptionsResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_ListSubscriptionsResponse proto.InternalMessageInfo + +func (m *ListSubscriptionsResponse) GetSubscriptions() []*Subscription { + if m != nil { + return m.Subscriptions + } + return nil +} + +func (m *ListSubscriptionsResponse) GetNextPageToken() string { + if m != nil { + return m.NextPageToken + } + return "" +} + +// Request for the DeleteSubscription method. +type DeleteSubscriptionRequest struct { + // The subscription to delete. + Subscription string `protobuf:"bytes,1,opt,name=subscription,proto3" json:"subscription,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *DeleteSubscriptionRequest) Reset() { *m = DeleteSubscriptionRequest{} } +func (m *DeleteSubscriptionRequest) String() string { return proto.CompactTextString(m) } +func (*DeleteSubscriptionRequest) ProtoMessage() {} +func (*DeleteSubscriptionRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_pubsub_6c7fd79cadae8b09, []int{16} +} +func (m *DeleteSubscriptionRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_DeleteSubscriptionRequest.Unmarshal(m, b) +} +func (m *DeleteSubscriptionRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_DeleteSubscriptionRequest.Marshal(b, m, deterministic) +} +func (dst *DeleteSubscriptionRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_DeleteSubscriptionRequest.Merge(dst, src) +} +func (m *DeleteSubscriptionRequest) XXX_Size() int { + return xxx_messageInfo_DeleteSubscriptionRequest.Size(m) +} +func (m *DeleteSubscriptionRequest) XXX_DiscardUnknown() { + xxx_messageInfo_DeleteSubscriptionRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_DeleteSubscriptionRequest proto.InternalMessageInfo + +func (m *DeleteSubscriptionRequest) GetSubscription() string { + if m != nil { + return m.Subscription + } + return "" +} + +// Request for the ModifyPushConfig method. +type ModifyPushConfigRequest struct { + // The name of the subscription. + Subscription string `protobuf:"bytes,1,opt,name=subscription,proto3" json:"subscription,omitempty"` + // The push configuration for future deliveries. + // + // An empty pushConfig indicates that the Pub/Sub system should + // stop pushing messages from the given subscription and allow + // messages to be pulled and acknowledged - effectively pausing + // the subscription if Pull is not called. + PushConfig *PushConfig `protobuf:"bytes,2,opt,name=push_config,json=pushConfig,proto3" json:"push_config,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ModifyPushConfigRequest) Reset() { *m = ModifyPushConfigRequest{} } +func (m *ModifyPushConfigRequest) String() string { return proto.CompactTextString(m) } +func (*ModifyPushConfigRequest) ProtoMessage() {} +func (*ModifyPushConfigRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_pubsub_6c7fd79cadae8b09, []int{17} +} +func (m *ModifyPushConfigRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ModifyPushConfigRequest.Unmarshal(m, b) +} +func (m *ModifyPushConfigRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ModifyPushConfigRequest.Marshal(b, m, deterministic) +} +func (dst *ModifyPushConfigRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_ModifyPushConfigRequest.Merge(dst, src) +} +func (m *ModifyPushConfigRequest) XXX_Size() int { + return xxx_messageInfo_ModifyPushConfigRequest.Size(m) +} +func (m *ModifyPushConfigRequest) XXX_DiscardUnknown() { + xxx_messageInfo_ModifyPushConfigRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_ModifyPushConfigRequest proto.InternalMessageInfo + +func (m *ModifyPushConfigRequest) GetSubscription() string { + if m != nil { + return m.Subscription + } + return "" +} + +func (m *ModifyPushConfigRequest) GetPushConfig() *PushConfig { + if m != nil { + return m.PushConfig + } + return nil +} + +// Request for the Pull method. +type PullRequest struct { + // The subscription from which messages should be pulled. + Subscription string `protobuf:"bytes,1,opt,name=subscription,proto3" json:"subscription,omitempty"` + // If this is specified as true the system will respond immediately even if + // it is not able to return a message in the Pull response. Otherwise the + // system is allowed to wait until at least one message is available rather + // than returning no messages. The client may cancel the request if it does + // not wish to wait any longer for the response. + ReturnImmediately bool `protobuf:"varint,2,opt,name=return_immediately,json=returnImmediately,proto3" json:"return_immediately,omitempty"` + // The maximum number of messages returned for this request. The Pub/Sub + // system may return fewer than the number specified. + MaxMessages int32 `protobuf:"varint,3,opt,name=max_messages,json=maxMessages,proto3" json:"max_messages,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *PullRequest) Reset() { *m = PullRequest{} } +func (m *PullRequest) String() string { return proto.CompactTextString(m) } +func (*PullRequest) ProtoMessage() {} +func (*PullRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_pubsub_6c7fd79cadae8b09, []int{18} +} +func (m *PullRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_PullRequest.Unmarshal(m, b) +} +func (m *PullRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_PullRequest.Marshal(b, m, deterministic) +} +func (dst *PullRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_PullRequest.Merge(dst, src) +} +func (m *PullRequest) XXX_Size() int { + return xxx_messageInfo_PullRequest.Size(m) +} +func (m *PullRequest) XXX_DiscardUnknown() { + xxx_messageInfo_PullRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_PullRequest proto.InternalMessageInfo + +func (m *PullRequest) GetSubscription() string { + if m != nil { + return m.Subscription + } + return "" +} + +func (m *PullRequest) GetReturnImmediately() bool { + if m != nil { + return m.ReturnImmediately + } + return false +} + +func (m *PullRequest) GetMaxMessages() int32 { + if m != nil { + return m.MaxMessages + } + return 0 +} + +// Response for the Pull method. +type PullResponse struct { + // Received Pub/Sub messages. The Pub/Sub system will return zero messages if + // there are no more available in the backlog. The Pub/Sub system may return + // fewer than the maxMessages requested even if there are more messages + // available in the backlog. + ReceivedMessages []*ReceivedMessage `protobuf:"bytes,1,rep,name=received_messages,json=receivedMessages,proto3" json:"received_messages,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *PullResponse) Reset() { *m = PullResponse{} } +func (m *PullResponse) String() string { return proto.CompactTextString(m) } +func (*PullResponse) ProtoMessage() {} +func (*PullResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_pubsub_6c7fd79cadae8b09, []int{19} +} +func (m *PullResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_PullResponse.Unmarshal(m, b) +} +func (m *PullResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_PullResponse.Marshal(b, m, deterministic) +} +func (dst *PullResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_PullResponse.Merge(dst, src) +} +func (m *PullResponse) XXX_Size() int { + return xxx_messageInfo_PullResponse.Size(m) +} +func (m *PullResponse) XXX_DiscardUnknown() { + xxx_messageInfo_PullResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_PullResponse proto.InternalMessageInfo + +func (m *PullResponse) GetReceivedMessages() []*ReceivedMessage { + if m != nil { + return m.ReceivedMessages + } + return nil +} + +// Request for the ModifyAckDeadline method. +type ModifyAckDeadlineRequest struct { + // The name of the subscription. + Subscription string `protobuf:"bytes,1,opt,name=subscription,proto3" json:"subscription,omitempty"` + // The acknowledgment ID. + AckId string `protobuf:"bytes,2,opt,name=ack_id,json=ackId,proto3" json:"ack_id,omitempty"` + // The new ack deadline with respect to the time this request was sent to the + // Pub/Sub system. Must be >= 0. For example, if the value is 10, the new ack + // deadline will expire 10 seconds after the ModifyAckDeadline call was made. + // Specifying zero may immediately make the message available for another pull + // request. + AckDeadlineSeconds int32 `protobuf:"varint,3,opt,name=ack_deadline_seconds,json=ackDeadlineSeconds,proto3" json:"ack_deadline_seconds,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ModifyAckDeadlineRequest) Reset() { *m = ModifyAckDeadlineRequest{} } +func (m *ModifyAckDeadlineRequest) String() string { return proto.CompactTextString(m) } +func (*ModifyAckDeadlineRequest) ProtoMessage() {} +func (*ModifyAckDeadlineRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_pubsub_6c7fd79cadae8b09, []int{20} +} +func (m *ModifyAckDeadlineRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ModifyAckDeadlineRequest.Unmarshal(m, b) +} +func (m *ModifyAckDeadlineRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ModifyAckDeadlineRequest.Marshal(b, m, deterministic) +} +func (dst *ModifyAckDeadlineRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_ModifyAckDeadlineRequest.Merge(dst, src) +} +func (m *ModifyAckDeadlineRequest) XXX_Size() int { + return xxx_messageInfo_ModifyAckDeadlineRequest.Size(m) +} +func (m *ModifyAckDeadlineRequest) XXX_DiscardUnknown() { + xxx_messageInfo_ModifyAckDeadlineRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_ModifyAckDeadlineRequest proto.InternalMessageInfo + +func (m *ModifyAckDeadlineRequest) GetSubscription() string { + if m != nil { + return m.Subscription + } + return "" +} + +func (m *ModifyAckDeadlineRequest) GetAckId() string { + if m != nil { + return m.AckId + } + return "" +} + +func (m *ModifyAckDeadlineRequest) GetAckDeadlineSeconds() int32 { + if m != nil { + return m.AckDeadlineSeconds + } + return 0 +} + +// Request for the Acknowledge method. +type AcknowledgeRequest struct { + // The subscription whose message is being acknowledged. + Subscription string `protobuf:"bytes,1,opt,name=subscription,proto3" json:"subscription,omitempty"` + // The acknowledgment ID for the messages being acknowledged that was returned + // by the Pub/Sub system in the Pull response. Must not be empty. + AckIds []string `protobuf:"bytes,2,rep,name=ack_ids,json=ackIds,proto3" json:"ack_ids,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *AcknowledgeRequest) Reset() { *m = AcknowledgeRequest{} } +func (m *AcknowledgeRequest) String() string { return proto.CompactTextString(m) } +func (*AcknowledgeRequest) ProtoMessage() {} +func (*AcknowledgeRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_pubsub_6c7fd79cadae8b09, []int{21} +} +func (m *AcknowledgeRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_AcknowledgeRequest.Unmarshal(m, b) +} +func (m *AcknowledgeRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_AcknowledgeRequest.Marshal(b, m, deterministic) +} +func (dst *AcknowledgeRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_AcknowledgeRequest.Merge(dst, src) +} +func (m *AcknowledgeRequest) XXX_Size() int { + return xxx_messageInfo_AcknowledgeRequest.Size(m) +} +func (m *AcknowledgeRequest) XXX_DiscardUnknown() { + xxx_messageInfo_AcknowledgeRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_AcknowledgeRequest proto.InternalMessageInfo + +func (m *AcknowledgeRequest) GetSubscription() string { + if m != nil { + return m.Subscription + } + return "" +} + +func (m *AcknowledgeRequest) GetAckIds() []string { + if m != nil { + return m.AckIds + } + return nil +} + +func init() { + proto.RegisterType((*Topic)(nil), "google.pubsub.v1beta2.Topic") + proto.RegisterType((*PubsubMessage)(nil), "google.pubsub.v1beta2.PubsubMessage") + proto.RegisterMapType((map[string]string)(nil), "google.pubsub.v1beta2.PubsubMessage.AttributesEntry") + proto.RegisterType((*GetTopicRequest)(nil), "google.pubsub.v1beta2.GetTopicRequest") + proto.RegisterType((*PublishRequest)(nil), "google.pubsub.v1beta2.PublishRequest") + proto.RegisterType((*PublishResponse)(nil), "google.pubsub.v1beta2.PublishResponse") + proto.RegisterType((*ListTopicsRequest)(nil), "google.pubsub.v1beta2.ListTopicsRequest") + proto.RegisterType((*ListTopicsResponse)(nil), "google.pubsub.v1beta2.ListTopicsResponse") + proto.RegisterType((*ListTopicSubscriptionsRequest)(nil), "google.pubsub.v1beta2.ListTopicSubscriptionsRequest") + proto.RegisterType((*ListTopicSubscriptionsResponse)(nil), "google.pubsub.v1beta2.ListTopicSubscriptionsResponse") + proto.RegisterType((*DeleteTopicRequest)(nil), "google.pubsub.v1beta2.DeleteTopicRequest") + proto.RegisterType((*Subscription)(nil), "google.pubsub.v1beta2.Subscription") + proto.RegisterType((*PushConfig)(nil), "google.pubsub.v1beta2.PushConfig") + proto.RegisterMapType((map[string]string)(nil), "google.pubsub.v1beta2.PushConfig.AttributesEntry") + proto.RegisterType((*ReceivedMessage)(nil), "google.pubsub.v1beta2.ReceivedMessage") + proto.RegisterType((*GetSubscriptionRequest)(nil), "google.pubsub.v1beta2.GetSubscriptionRequest") + proto.RegisterType((*ListSubscriptionsRequest)(nil), "google.pubsub.v1beta2.ListSubscriptionsRequest") + proto.RegisterType((*ListSubscriptionsResponse)(nil), "google.pubsub.v1beta2.ListSubscriptionsResponse") + proto.RegisterType((*DeleteSubscriptionRequest)(nil), "google.pubsub.v1beta2.DeleteSubscriptionRequest") + proto.RegisterType((*ModifyPushConfigRequest)(nil), "google.pubsub.v1beta2.ModifyPushConfigRequest") + proto.RegisterType((*PullRequest)(nil), "google.pubsub.v1beta2.PullRequest") + proto.RegisterType((*PullResponse)(nil), "google.pubsub.v1beta2.PullResponse") + proto.RegisterType((*ModifyAckDeadlineRequest)(nil), "google.pubsub.v1beta2.ModifyAckDeadlineRequest") + proto.RegisterType((*AcknowledgeRequest)(nil), "google.pubsub.v1beta2.AcknowledgeRequest") +} + +// Reference imports to suppress errors if they are not otherwise used. +var _ context.Context +var _ grpc.ClientConn + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the grpc package it is being compiled against. +const _ = grpc.SupportPackageIsVersion4 + +// SubscriberClient is the client API for Subscriber service. +// +// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://godoc.org/google.golang.org/grpc#ClientConn.NewStream. +type SubscriberClient interface { + // Creates a subscription to a given topic for a given subscriber. + // If the subscription already exists, returns ALREADY_EXISTS. + // If the corresponding topic doesn't exist, returns NOT_FOUND. + // + // If the name is not provided in the request, the server will assign a random + // name for this subscription on the same project as the topic. + CreateSubscription(ctx context.Context, in *Subscription, opts ...grpc.CallOption) (*Subscription, error) + // Gets the configuration details of a subscription. + GetSubscription(ctx context.Context, in *GetSubscriptionRequest, opts ...grpc.CallOption) (*Subscription, error) + // Lists matching subscriptions. + ListSubscriptions(ctx context.Context, in *ListSubscriptionsRequest, opts ...grpc.CallOption) (*ListSubscriptionsResponse, error) + // Deletes an existing subscription. All pending messages in the subscription + // are immediately dropped. Calls to Pull after deletion will return + // NOT_FOUND. After a subscription is deleted, a new one may be created with + // the same name, but the new one has no association with the old + // subscription, or its topic unless the same topic is specified. + DeleteSubscription(ctx context.Context, in *DeleteSubscriptionRequest, opts ...grpc.CallOption) (*empty.Empty, error) + // Modifies the ack deadline for a specific message. This method is useful to + // indicate that more time is needed to process a message by the subscriber, + // or to make the message available for redelivery if the processing was + // interrupted. + ModifyAckDeadline(ctx context.Context, in *ModifyAckDeadlineRequest, opts ...grpc.CallOption) (*empty.Empty, error) + // Acknowledges the messages associated with the ack tokens in the + // AcknowledgeRequest. The Pub/Sub system can remove the relevant messages + // from the subscription. + // + // Acknowledging a message whose ack deadline has expired may succeed, + // but such a message may be redelivered later. Acknowledging a message more + // than once will not result in an error. + Acknowledge(ctx context.Context, in *AcknowledgeRequest, opts ...grpc.CallOption) (*empty.Empty, error) + // Pulls messages from the server. Returns an empty list if there are no + // messages available in the backlog. The server may return UNAVAILABLE if + // there are too many concurrent pull requests pending for the given + // subscription. + Pull(ctx context.Context, in *PullRequest, opts ...grpc.CallOption) (*PullResponse, error) + // Modifies the PushConfig for a specified subscription. + // + // This may be used to change a push subscription to a pull one (signified + // by an empty PushConfig) or vice versa, or change the endpoint URL and other + // attributes of a push subscription. Messages will accumulate for + // delivery continuously through the call regardless of changes to the + // PushConfig. + ModifyPushConfig(ctx context.Context, in *ModifyPushConfigRequest, opts ...grpc.CallOption) (*empty.Empty, error) +} + +type subscriberClient struct { + cc *grpc.ClientConn +} + +func NewSubscriberClient(cc *grpc.ClientConn) SubscriberClient { + return &subscriberClient{cc} +} + +func (c *subscriberClient) CreateSubscription(ctx context.Context, in *Subscription, opts ...grpc.CallOption) (*Subscription, error) { + out := new(Subscription) + err := c.cc.Invoke(ctx, "/google.pubsub.v1beta2.Subscriber/CreateSubscription", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *subscriberClient) GetSubscription(ctx context.Context, in *GetSubscriptionRequest, opts ...grpc.CallOption) (*Subscription, error) { + out := new(Subscription) + err := c.cc.Invoke(ctx, "/google.pubsub.v1beta2.Subscriber/GetSubscription", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *subscriberClient) ListSubscriptions(ctx context.Context, in *ListSubscriptionsRequest, opts ...grpc.CallOption) (*ListSubscriptionsResponse, error) { + out := new(ListSubscriptionsResponse) + err := c.cc.Invoke(ctx, "/google.pubsub.v1beta2.Subscriber/ListSubscriptions", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *subscriberClient) DeleteSubscription(ctx context.Context, in *DeleteSubscriptionRequest, opts ...grpc.CallOption) (*empty.Empty, error) { + out := new(empty.Empty) + err := c.cc.Invoke(ctx, "/google.pubsub.v1beta2.Subscriber/DeleteSubscription", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *subscriberClient) ModifyAckDeadline(ctx context.Context, in *ModifyAckDeadlineRequest, opts ...grpc.CallOption) (*empty.Empty, error) { + out := new(empty.Empty) + err := c.cc.Invoke(ctx, "/google.pubsub.v1beta2.Subscriber/ModifyAckDeadline", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *subscriberClient) Acknowledge(ctx context.Context, in *AcknowledgeRequest, opts ...grpc.CallOption) (*empty.Empty, error) { + out := new(empty.Empty) + err := c.cc.Invoke(ctx, "/google.pubsub.v1beta2.Subscriber/Acknowledge", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *subscriberClient) Pull(ctx context.Context, in *PullRequest, opts ...grpc.CallOption) (*PullResponse, error) { + out := new(PullResponse) + err := c.cc.Invoke(ctx, "/google.pubsub.v1beta2.Subscriber/Pull", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *subscriberClient) ModifyPushConfig(ctx context.Context, in *ModifyPushConfigRequest, opts ...grpc.CallOption) (*empty.Empty, error) { + out := new(empty.Empty) + err := c.cc.Invoke(ctx, "/google.pubsub.v1beta2.Subscriber/ModifyPushConfig", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +// SubscriberServer is the server API for Subscriber service. +type SubscriberServer interface { + // Creates a subscription to a given topic for a given subscriber. + // If the subscription already exists, returns ALREADY_EXISTS. + // If the corresponding topic doesn't exist, returns NOT_FOUND. + // + // If the name is not provided in the request, the server will assign a random + // name for this subscription on the same project as the topic. + CreateSubscription(context.Context, *Subscription) (*Subscription, error) + // Gets the configuration details of a subscription. + GetSubscription(context.Context, *GetSubscriptionRequest) (*Subscription, error) + // Lists matching subscriptions. + ListSubscriptions(context.Context, *ListSubscriptionsRequest) (*ListSubscriptionsResponse, error) + // Deletes an existing subscription. All pending messages in the subscription + // are immediately dropped. Calls to Pull after deletion will return + // NOT_FOUND. After a subscription is deleted, a new one may be created with + // the same name, but the new one has no association with the old + // subscription, or its topic unless the same topic is specified. + DeleteSubscription(context.Context, *DeleteSubscriptionRequest) (*empty.Empty, error) + // Modifies the ack deadline for a specific message. This method is useful to + // indicate that more time is needed to process a message by the subscriber, + // or to make the message available for redelivery if the processing was + // interrupted. + ModifyAckDeadline(context.Context, *ModifyAckDeadlineRequest) (*empty.Empty, error) + // Acknowledges the messages associated with the ack tokens in the + // AcknowledgeRequest. The Pub/Sub system can remove the relevant messages + // from the subscription. + // + // Acknowledging a message whose ack deadline has expired may succeed, + // but such a message may be redelivered later. Acknowledging a message more + // than once will not result in an error. + Acknowledge(context.Context, *AcknowledgeRequest) (*empty.Empty, error) + // Pulls messages from the server. Returns an empty list if there are no + // messages available in the backlog. The server may return UNAVAILABLE if + // there are too many concurrent pull requests pending for the given + // subscription. + Pull(context.Context, *PullRequest) (*PullResponse, error) + // Modifies the PushConfig for a specified subscription. + // + // This may be used to change a push subscription to a pull one (signified + // by an empty PushConfig) or vice versa, or change the endpoint URL and other + // attributes of a push subscription. Messages will accumulate for + // delivery continuously through the call regardless of changes to the + // PushConfig. + ModifyPushConfig(context.Context, *ModifyPushConfigRequest) (*empty.Empty, error) +} + +func RegisterSubscriberServer(s *grpc.Server, srv SubscriberServer) { + s.RegisterService(&_Subscriber_serviceDesc, srv) +} + +func _Subscriber_CreateSubscription_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(Subscription) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(SubscriberServer).CreateSubscription(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.pubsub.v1beta2.Subscriber/CreateSubscription", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(SubscriberServer).CreateSubscription(ctx, req.(*Subscription)) + } + return interceptor(ctx, in, info, handler) +} + +func _Subscriber_GetSubscription_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(GetSubscriptionRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(SubscriberServer).GetSubscription(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.pubsub.v1beta2.Subscriber/GetSubscription", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(SubscriberServer).GetSubscription(ctx, req.(*GetSubscriptionRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _Subscriber_ListSubscriptions_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(ListSubscriptionsRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(SubscriberServer).ListSubscriptions(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.pubsub.v1beta2.Subscriber/ListSubscriptions", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(SubscriberServer).ListSubscriptions(ctx, req.(*ListSubscriptionsRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _Subscriber_DeleteSubscription_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(DeleteSubscriptionRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(SubscriberServer).DeleteSubscription(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.pubsub.v1beta2.Subscriber/DeleteSubscription", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(SubscriberServer).DeleteSubscription(ctx, req.(*DeleteSubscriptionRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _Subscriber_ModifyAckDeadline_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(ModifyAckDeadlineRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(SubscriberServer).ModifyAckDeadline(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.pubsub.v1beta2.Subscriber/ModifyAckDeadline", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(SubscriberServer).ModifyAckDeadline(ctx, req.(*ModifyAckDeadlineRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _Subscriber_Acknowledge_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(AcknowledgeRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(SubscriberServer).Acknowledge(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.pubsub.v1beta2.Subscriber/Acknowledge", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(SubscriberServer).Acknowledge(ctx, req.(*AcknowledgeRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _Subscriber_Pull_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(PullRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(SubscriberServer).Pull(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.pubsub.v1beta2.Subscriber/Pull", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(SubscriberServer).Pull(ctx, req.(*PullRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _Subscriber_ModifyPushConfig_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(ModifyPushConfigRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(SubscriberServer).ModifyPushConfig(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.pubsub.v1beta2.Subscriber/ModifyPushConfig", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(SubscriberServer).ModifyPushConfig(ctx, req.(*ModifyPushConfigRequest)) + } + return interceptor(ctx, in, info, handler) +} + +var _Subscriber_serviceDesc = grpc.ServiceDesc{ + ServiceName: "google.pubsub.v1beta2.Subscriber", + HandlerType: (*SubscriberServer)(nil), + Methods: []grpc.MethodDesc{ + { + MethodName: "CreateSubscription", + Handler: _Subscriber_CreateSubscription_Handler, + }, + { + MethodName: "GetSubscription", + Handler: _Subscriber_GetSubscription_Handler, + }, + { + MethodName: "ListSubscriptions", + Handler: _Subscriber_ListSubscriptions_Handler, + }, + { + MethodName: "DeleteSubscription", + Handler: _Subscriber_DeleteSubscription_Handler, + }, + { + MethodName: "ModifyAckDeadline", + Handler: _Subscriber_ModifyAckDeadline_Handler, + }, + { + MethodName: "Acknowledge", + Handler: _Subscriber_Acknowledge_Handler, + }, + { + MethodName: "Pull", + Handler: _Subscriber_Pull_Handler, + }, + { + MethodName: "ModifyPushConfig", + Handler: _Subscriber_ModifyPushConfig_Handler, + }, + }, + Streams: []grpc.StreamDesc{}, + Metadata: "google/pubsub/v1beta2/pubsub.proto", +} + +// PublisherClient is the client API for Publisher service. +// +// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://godoc.org/google.golang.org/grpc#ClientConn.NewStream. +type PublisherClient interface { + // Creates the given topic with the given name. + CreateTopic(ctx context.Context, in *Topic, opts ...grpc.CallOption) (*Topic, error) + // Adds one or more messages to the topic. Returns NOT_FOUND if the topic does + // not exist. + Publish(ctx context.Context, in *PublishRequest, opts ...grpc.CallOption) (*PublishResponse, error) + // Gets the configuration of a topic. + GetTopic(ctx context.Context, in *GetTopicRequest, opts ...grpc.CallOption) (*Topic, error) + // Lists matching topics. + ListTopics(ctx context.Context, in *ListTopicsRequest, opts ...grpc.CallOption) (*ListTopicsResponse, error) + // Lists the name of the subscriptions for this topic. + ListTopicSubscriptions(ctx context.Context, in *ListTopicSubscriptionsRequest, opts ...grpc.CallOption) (*ListTopicSubscriptionsResponse, error) + // Deletes the topic with the given name. Returns NOT_FOUND if the topic does + // not exist. After a topic is deleted, a new topic may be created with the + // same name; this is an entirely new topic with none of the old + // configuration or subscriptions. Existing subscriptions to this topic are + // not deleted. + DeleteTopic(ctx context.Context, in *DeleteTopicRequest, opts ...grpc.CallOption) (*empty.Empty, error) +} + +type publisherClient struct { + cc *grpc.ClientConn +} + +func NewPublisherClient(cc *grpc.ClientConn) PublisherClient { + return &publisherClient{cc} +} + +func (c *publisherClient) CreateTopic(ctx context.Context, in *Topic, opts ...grpc.CallOption) (*Topic, error) { + out := new(Topic) + err := c.cc.Invoke(ctx, "/google.pubsub.v1beta2.Publisher/CreateTopic", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *publisherClient) Publish(ctx context.Context, in *PublishRequest, opts ...grpc.CallOption) (*PublishResponse, error) { + out := new(PublishResponse) + err := c.cc.Invoke(ctx, "/google.pubsub.v1beta2.Publisher/Publish", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *publisherClient) GetTopic(ctx context.Context, in *GetTopicRequest, opts ...grpc.CallOption) (*Topic, error) { + out := new(Topic) + err := c.cc.Invoke(ctx, "/google.pubsub.v1beta2.Publisher/GetTopic", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *publisherClient) ListTopics(ctx context.Context, in *ListTopicsRequest, opts ...grpc.CallOption) (*ListTopicsResponse, error) { + out := new(ListTopicsResponse) + err := c.cc.Invoke(ctx, "/google.pubsub.v1beta2.Publisher/ListTopics", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *publisherClient) ListTopicSubscriptions(ctx context.Context, in *ListTopicSubscriptionsRequest, opts ...grpc.CallOption) (*ListTopicSubscriptionsResponse, error) { + out := new(ListTopicSubscriptionsResponse) + err := c.cc.Invoke(ctx, "/google.pubsub.v1beta2.Publisher/ListTopicSubscriptions", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *publisherClient) DeleteTopic(ctx context.Context, in *DeleteTopicRequest, opts ...grpc.CallOption) (*empty.Empty, error) { + out := new(empty.Empty) + err := c.cc.Invoke(ctx, "/google.pubsub.v1beta2.Publisher/DeleteTopic", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +// PublisherServer is the server API for Publisher service. +type PublisherServer interface { + // Creates the given topic with the given name. + CreateTopic(context.Context, *Topic) (*Topic, error) + // Adds one or more messages to the topic. Returns NOT_FOUND if the topic does + // not exist. + Publish(context.Context, *PublishRequest) (*PublishResponse, error) + // Gets the configuration of a topic. + GetTopic(context.Context, *GetTopicRequest) (*Topic, error) + // Lists matching topics. + ListTopics(context.Context, *ListTopicsRequest) (*ListTopicsResponse, error) + // Lists the name of the subscriptions for this topic. + ListTopicSubscriptions(context.Context, *ListTopicSubscriptionsRequest) (*ListTopicSubscriptionsResponse, error) + // Deletes the topic with the given name. Returns NOT_FOUND if the topic does + // not exist. After a topic is deleted, a new topic may be created with the + // same name; this is an entirely new topic with none of the old + // configuration or subscriptions. Existing subscriptions to this topic are + // not deleted. + DeleteTopic(context.Context, *DeleteTopicRequest) (*empty.Empty, error) +} + +func RegisterPublisherServer(s *grpc.Server, srv PublisherServer) { + s.RegisterService(&_Publisher_serviceDesc, srv) +} + +func _Publisher_CreateTopic_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(Topic) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(PublisherServer).CreateTopic(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.pubsub.v1beta2.Publisher/CreateTopic", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(PublisherServer).CreateTopic(ctx, req.(*Topic)) + } + return interceptor(ctx, in, info, handler) +} + +func _Publisher_Publish_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(PublishRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(PublisherServer).Publish(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.pubsub.v1beta2.Publisher/Publish", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(PublisherServer).Publish(ctx, req.(*PublishRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _Publisher_GetTopic_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(GetTopicRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(PublisherServer).GetTopic(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.pubsub.v1beta2.Publisher/GetTopic", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(PublisherServer).GetTopic(ctx, req.(*GetTopicRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _Publisher_ListTopics_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(ListTopicsRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(PublisherServer).ListTopics(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.pubsub.v1beta2.Publisher/ListTopics", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(PublisherServer).ListTopics(ctx, req.(*ListTopicsRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _Publisher_ListTopicSubscriptions_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(ListTopicSubscriptionsRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(PublisherServer).ListTopicSubscriptions(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.pubsub.v1beta2.Publisher/ListTopicSubscriptions", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(PublisherServer).ListTopicSubscriptions(ctx, req.(*ListTopicSubscriptionsRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _Publisher_DeleteTopic_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(DeleteTopicRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(PublisherServer).DeleteTopic(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.pubsub.v1beta2.Publisher/DeleteTopic", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(PublisherServer).DeleteTopic(ctx, req.(*DeleteTopicRequest)) + } + return interceptor(ctx, in, info, handler) +} + +var _Publisher_serviceDesc = grpc.ServiceDesc{ + ServiceName: "google.pubsub.v1beta2.Publisher", + HandlerType: (*PublisherServer)(nil), + Methods: []grpc.MethodDesc{ + { + MethodName: "CreateTopic", + Handler: _Publisher_CreateTopic_Handler, + }, + { + MethodName: "Publish", + Handler: _Publisher_Publish_Handler, + }, + { + MethodName: "GetTopic", + Handler: _Publisher_GetTopic_Handler, + }, + { + MethodName: "ListTopics", + Handler: _Publisher_ListTopics_Handler, + }, + { + MethodName: "ListTopicSubscriptions", + Handler: _Publisher_ListTopicSubscriptions_Handler, + }, + { + MethodName: "DeleteTopic", + Handler: _Publisher_DeleteTopic_Handler, + }, + }, + Streams: []grpc.StreamDesc{}, + Metadata: "google/pubsub/v1beta2/pubsub.proto", +} + +func init() { + proto.RegisterFile("google/pubsub/v1beta2/pubsub.proto", fileDescriptor_pubsub_6c7fd79cadae8b09) +} + +var fileDescriptor_pubsub_6c7fd79cadae8b09 = []byte{ + // 1107 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xb4, 0x57, 0x5f, 0x6f, 0xdb, 0x54, + 0x14, 0x97, 0x93, 0xfe, 0xcb, 0x71, 0x4a, 0xda, 0xab, 0xad, 0x73, 0x53, 0x06, 0x9d, 0x37, 0x4a, + 0x86, 0xb4, 0x64, 0x0b, 0x45, 0x42, 0x8c, 0x7f, 0xed, 0x56, 0x4d, 0x91, 0xa8, 0xc8, 0xdc, 0x3e, + 0xa0, 0x09, 0x2d, 0x72, 0xec, 0x5b, 0xc7, 0xc4, 0xb1, 0x3d, 0x5f, 0xbb, 0x34, 0x7b, 0x43, 0x08, + 0xc1, 0x13, 0x1f, 0x85, 0xaf, 0xc0, 0x03, 0x1f, 0x82, 0xaf, 0x83, 0x7c, 0xef, 0xb5, 0x63, 0xa7, + 0xbe, 0xa9, 0xdb, 0x89, 0x37, 0xdf, 0x73, 0xcf, 0xff, 0x73, 0x7e, 0xe7, 0x1e, 0x83, 0x6a, 0x79, + 0x9e, 0xe5, 0xe0, 0x8e, 0x1f, 0x0d, 0x49, 0x34, 0xec, 0x9c, 0x3f, 0x19, 0xe2, 0x50, 0xef, 0xf2, + 0x63, 0xdb, 0x0f, 0xbc, 0xd0, 0x43, 0xb7, 0x19, 0x4f, 0x9b, 0x13, 0x39, 0x4f, 0x73, 0x27, 0x11, + 0x8d, 0x99, 0x86, 0xd1, 0x59, 0x07, 0x4f, 0xfc, 0x70, 0xca, 0x64, 0xd4, 0x1d, 0x58, 0x3e, 0xf5, + 0x7c, 0xdb, 0x40, 0x08, 0x96, 0x5c, 0x7d, 0x82, 0x15, 0x69, 0x57, 0x6a, 0xd5, 0x34, 0xfa, 0xad, + 0xfe, 0x2b, 0xc1, 0x7a, 0x9f, 0x2a, 0x3b, 0xc6, 0x84, 0xe8, 0x16, 0x8e, 0xb9, 0x4c, 0x3d, 0xd4, + 0x29, 0x57, 0x5d, 0xa3, 0xdf, 0xe8, 0x14, 0x40, 0x0f, 0xc3, 0xc0, 0x1e, 0x46, 0x21, 0x26, 0x4a, + 0x65, 0xb7, 0xda, 0x92, 0xbb, 0xfb, 0xed, 0x42, 0x5f, 0xda, 0x39, 0x6d, 0xed, 0x83, 0x54, 0xec, + 0xc8, 0x0d, 0x83, 0xa9, 0x96, 0xd1, 0x83, 0xee, 0x02, 0x4c, 0x18, 0xdb, 0xc0, 0x36, 0x95, 0x2a, + 0xf5, 0xaa, 0xc6, 0x29, 0x3d, 0xb3, 0xf9, 0x15, 0x34, 0xe6, 0xa4, 0xd1, 0x06, 0x54, 0xc7, 0x78, + 0xca, 0x03, 0x88, 0x3f, 0xd1, 0x2d, 0x58, 0x3e, 0xd7, 0x9d, 0x08, 0x2b, 0x15, 0x4a, 0x63, 0x87, + 0x2f, 0x2a, 0x9f, 0x4b, 0xea, 0xc7, 0xd0, 0x78, 0x81, 0x43, 0x1a, 0xb9, 0x86, 0xdf, 0x44, 0x98, + 0x84, 0x31, 0x73, 0x18, 0x9f, 0xb9, 0x02, 0x76, 0x50, 0x47, 0xf0, 0x5e, 0x3f, 0x1a, 0x3a, 0x36, + 0x19, 0x2d, 0xe4, 0x43, 0xdf, 0xc2, 0x1a, 0x77, 0x2e, 0x49, 0xc1, 0x83, 0x32, 0x29, 0xd0, 0x52, + 0x29, 0xb5, 0x0b, 0x8d, 0xd4, 0x12, 0xf1, 0x3d, 0x97, 0x60, 0xf4, 0x21, 0xc8, 0xb3, 0x1c, 0x10, + 0x45, 0xda, 0xad, 0xb6, 0x6a, 0x1a, 0xa4, 0x49, 0x20, 0xaa, 0x0d, 0x9b, 0xdf, 0xd9, 0x84, 0xc5, + 0x41, 0x12, 0x07, 0x15, 0x58, 0xf5, 0x03, 0xef, 0x27, 0x6c, 0x84, 0xdc, 0xc5, 0xe4, 0x88, 0x76, + 0xa0, 0xe6, 0xc7, 0xca, 0x88, 0xfd, 0x96, 0xe5, 0x64, 0x59, 0x5b, 0x8b, 0x09, 0x27, 0xf6, 0x5b, + 0x1c, 0x27, 0x9c, 0x5e, 0x86, 0xde, 0x18, 0xbb, 0x49, 0xc2, 0x63, 0xca, 0x69, 0x4c, 0x50, 0x03, + 0x40, 0x59, 0x53, 0xdc, 0xc3, 0x7d, 0x58, 0xa1, 0xf1, 0x33, 0xe7, 0xe4, 0xee, 0xfb, 0x82, 0xa0, + 0x59, 0xa6, 0x39, 0x2f, 0xda, 0x83, 0x86, 0x8b, 0x2f, 0xc2, 0x41, 0xc6, 0x1e, 0xab, 0xd0, 0x7a, + 0x4c, 0xee, 0xa7, 0x36, 0xdf, 0xc0, 0xdd, 0xd4, 0xe6, 0x49, 0x34, 0x24, 0x46, 0x60, 0xfb, 0xa1, + 0xed, 0xb9, 0x64, 0x71, 0x2d, 0xde, 0x25, 0x4c, 0x17, 0x3e, 0x10, 0x99, 0xe4, 0x21, 0x3f, 0x80, + 0x75, 0x92, 0xbd, 0xe0, 0x65, 0xc9, 0x13, 0x4b, 0x87, 0xf8, 0x09, 0xa0, 0xe7, 0xd8, 0xc1, 0x21, + 0x2e, 0xd1, 0x8b, 0x7f, 0x49, 0x50, 0xcf, 0xfa, 0x54, 0x84, 0xd9, 0x99, 0x68, 0x25, 0x9b, 0x92, + 0x43, 0x90, 0xfd, 0x88, 0x8c, 0x06, 0x86, 0xe7, 0x9e, 0xd9, 0x96, 0xb2, 0xb4, 0x2b, 0xb5, 0xe4, + 0xee, 0x3d, 0x61, 0x87, 0x92, 0xd1, 0x33, 0xca, 0xa8, 0x81, 0x9f, 0x7e, 0xa3, 0xc7, 0x70, 0x4b, + 0x37, 0xc6, 0x03, 0x13, 0xeb, 0xa6, 0x63, 0xbb, 0x78, 0x40, 0xb0, 0xe1, 0xb9, 0x26, 0x51, 0x96, + 0x69, 0x86, 0x91, 0x6e, 0x8c, 0x9f, 0xf3, 0xab, 0x13, 0x76, 0xa3, 0xfe, 0x23, 0x01, 0xcc, 0x94, + 0xa1, 0xfb, 0xb0, 0x4e, 0x9d, 0xc0, 0xae, 0xe9, 0x7b, 0xb6, 0x9b, 0xb4, 0x67, 0x3d, 0x26, 0x1e, + 0x71, 0x1a, 0x7a, 0x59, 0x30, 0x4d, 0x9e, 0x5c, 0xe9, 0xe8, 0xa2, 0x51, 0xf2, 0xae, 0xb3, 0x62, + 0x04, 0x0d, 0x0d, 0x1b, 0xd8, 0x3e, 0xc7, 0x66, 0x32, 0x06, 0x6f, 0xc3, 0x4a, 0x9c, 0x0a, 0xdb, + 0x4c, 0x0a, 0xa4, 0x1b, 0xe3, 0x9e, 0x89, 0xbe, 0x86, 0x55, 0x0e, 0x4e, 0xaa, 0xa5, 0xec, 0x0c, + 0x48, 0x84, 0xd4, 0x2f, 0x61, 0xeb, 0x05, 0x0e, 0xb3, 0x25, 0x4e, 0x1a, 0x42, 0x85, 0x7a, 0xb6, + 0xbf, 0x92, 0xcc, 0x65, 0x69, 0xaa, 0x0f, 0x4a, 0xdc, 0xba, 0x85, 0x40, 0xf9, 0x7f, 0x66, 0xc2, + 0x9f, 0x12, 0x6c, 0x17, 0x98, 0xe4, 0x40, 0xe9, 0x15, 0x01, 0x45, 0xee, 0xde, 0x17, 0xe4, 0x24, + 0x17, 0xf6, 0x0d, 0xd1, 0xf4, 0x0d, 0x6c, 0x33, 0x34, 0xdd, 0x34, 0x87, 0xbf, 0x48, 0x70, 0xe7, + 0xd8, 0x33, 0xed, 0xb3, 0x69, 0x06, 0x04, 0xe5, 0xe5, 0xe7, 0x71, 0x56, 0xb9, 0x01, 0xce, 0xd4, + 0x5f, 0x25, 0x90, 0xfb, 0x91, 0xe3, 0x5c, 0xc7, 0xee, 0x23, 0x40, 0x01, 0x0e, 0xa3, 0xc0, 0x1d, + 0xd8, 0x93, 0x09, 0x36, 0x6d, 0x3d, 0xc4, 0xce, 0x94, 0x9a, 0x5f, 0xd3, 0x36, 0xd9, 0x4d, 0x6f, + 0x76, 0x81, 0xee, 0x41, 0x7d, 0xa2, 0x5f, 0x0c, 0xd2, 0x17, 0xab, 0x4a, 0xeb, 0x2e, 0x4f, 0xf4, + 0x8b, 0xe3, 0xe4, 0x39, 0x32, 0xa0, 0xce, 0x9c, 0xe0, 0xd5, 0x3c, 0x81, 0xcd, 0x80, 0xa3, 0x60, + 0x26, 0xc7, 0x2a, 0xba, 0x27, 0x88, 0x6f, 0x0e, 0x35, 0xda, 0x46, 0x90, 0x27, 0x10, 0xf5, 0x77, + 0x09, 0x14, 0x96, 0xee, 0x83, 0xd9, 0xf4, 0xb8, 0x4e, 0xdc, 0x33, 0x20, 0x56, 0xb2, 0x40, 0x14, + 0x8d, 0xaa, 0xaa, 0x70, 0x54, 0xbd, 0x04, 0x74, 0x60, 0x8c, 0x5d, 0xef, 0x67, 0x07, 0x9b, 0xd6, + 0xb5, 0x5c, 0xb8, 0x03, 0xab, 0xcc, 0x05, 0x36, 0xad, 0x6a, 0xda, 0x0a, 0xf5, 0x81, 0x74, 0xff, + 0x58, 0x01, 0xe0, 0x7d, 0x38, 0xc4, 0x01, 0x7a, 0x0d, 0xe8, 0x59, 0x80, 0xf5, 0x7c, 0x6f, 0xa2, + 0x32, 0x68, 0x68, 0x96, 0x61, 0x42, 0x98, 0xae, 0x34, 0x39, 0xd2, 0x23, 0x81, 0x5c, 0xf1, 0x90, + 0x29, 0x67, 0xe6, 0x9c, 0xad, 0x1c, 0x39, 0xc8, 0xa3, 0x8e, 0x40, 0x52, 0x34, 0x8f, 0x9a, 0x8f, + 0xcb, 0x0b, 0xf0, 0xfe, 0x7b, 0x9d, 0x3c, 0x94, 0x39, 0x6f, 0x44, 0x7a, 0x84, 0x53, 0xa0, 0xb9, + 0x95, 0x4a, 0xf0, 0x75, 0xb8, 0x7d, 0x14, 0xaf, 0xc3, 0xe8, 0x47, 0xd8, 0xbc, 0xd4, 0x89, 0xc2, + 0xb8, 0x44, 0x3d, 0x2b, 0xd4, 0xde, 0x07, 0x39, 0xd3, 0x5e, 0xe8, 0xa1, 0x40, 0xef, 0xe5, 0x16, + 0x14, 0x6a, 0xfc, 0x1e, 0x96, 0x62, 0x7c, 0x22, 0x55, 0x38, 0x5c, 0xd2, 0x09, 0x22, 0x2c, 0x6c, + 0x0e, 0xe0, 0xaf, 0x60, 0x63, 0x7e, 0xf2, 0xa1, 0xf6, 0xc2, 0xf8, 0x2f, 0x8d, 0x48, 0x91, 0xb3, + 0xdd, 0xbf, 0x97, 0xa0, 0xc6, 0x97, 0x5b, 0x1c, 0xa0, 0x1e, 0xc8, 0x0c, 0x09, 0xec, 0xcf, 0x63, + 0xe1, 0xce, 0xd8, 0x5c, 0x78, 0x8b, 0x7e, 0x80, 0x55, 0xae, 0x17, 0x7d, 0x24, 0x7e, 0x6b, 0x33, + 0xeb, 0x7b, 0x73, 0xef, 0x2a, 0x36, 0x9e, 0x8e, 0x3e, 0xac, 0x25, 0x7f, 0x08, 0x68, 0x4f, 0x8c, + 0xa3, 0xec, 0xda, 0x76, 0x85, 0xaf, 0x3a, 0xc0, 0x6c, 0x83, 0x46, 0xad, 0x05, 0x08, 0xc8, 0xed, + 0xf3, 0xcd, 0x87, 0x25, 0x38, 0xb9, 0xd3, 0xbf, 0x49, 0xb0, 0x55, 0xbc, 0xbe, 0xa2, 0xfd, 0xab, + 0xb4, 0x14, 0xe2, 0xf4, 0xb3, 0x6b, 0x4a, 0xa5, 0xc9, 0x93, 0x33, 0x5b, 0xad, 0xb0, 0xdd, 0x2f, + 0x6f, 0xbe, 0xa2, 0x0e, 0x3a, 0xb4, 0x60, 0xdb, 0xf0, 0x26, 0xc5, 0x7a, 0x0e, 0x65, 0xb6, 0x4f, + 0xf5, 0x63, 0x91, 0xbe, 0xf4, 0xea, 0x29, 0xe7, 0xb2, 0x3c, 0x47, 0x77, 0xad, 0xb6, 0x17, 0x58, + 0x1d, 0x0b, 0xbb, 0x54, 0x61, 0x87, 0x5d, 0xe9, 0xbe, 0x4d, 0xe6, 0x7e, 0xa5, 0x9f, 0xb2, 0xe3, + 0x70, 0x85, 0xf2, 0x7d, 0xfa, 0x5f, 0x00, 0x00, 0x00, 0xff, 0xff, 0xe2, 0x2d, 0xb0, 0x67, 0x71, + 0x0f, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/rpc/code/code.pb.go b/vendor/google.golang.org/genproto/googleapis/rpc/code/code.pb.go new file mode 100644 index 0000000..d438885 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/rpc/code/code.pb.go @@ -0,0 +1,246 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/rpc/code.proto + +package code // import "google.golang.org/genproto/googleapis/rpc/code" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// The canonical error codes for Google APIs. +// +// +// Sometimes multiple error codes may apply. Services should return +// the most specific error code that applies. For example, prefer +// `OUT_OF_RANGE` over `FAILED_PRECONDITION` if both codes apply. +// Similarly prefer `NOT_FOUND` or `ALREADY_EXISTS` over `FAILED_PRECONDITION`. +type Code int32 + +const ( + // Not an error; returned on success + // + // HTTP Mapping: 200 OK + Code_OK Code = 0 + // The operation was cancelled, typically by the caller. + // + // HTTP Mapping: 499 Client Closed Request + Code_CANCELLED Code = 1 + // Unknown error. For example, this error may be returned when + // a `Status` value received from another address space belongs to + // an error space that is not known in this address space. Also + // errors raised by APIs that do not return enough error information + // may be converted to this error. + // + // HTTP Mapping: 500 Internal Server Error + Code_UNKNOWN Code = 2 + // The client specified an invalid argument. Note that this differs + // from `FAILED_PRECONDITION`. `INVALID_ARGUMENT` indicates arguments + // that are problematic regardless of the state of the system + // (e.g., a malformed file name). + // + // HTTP Mapping: 400 Bad Request + Code_INVALID_ARGUMENT Code = 3 + // The deadline expired before the operation could complete. For operations + // that change the state of the system, this error may be returned + // even if the operation has completed successfully. For example, a + // successful response from a server could have been delayed long + // enough for the deadline to expire. + // + // HTTP Mapping: 504 Gateway Timeout + Code_DEADLINE_EXCEEDED Code = 4 + // Some requested entity (e.g., file or directory) was not found. + // + // Note to server developers: if a request is denied for an entire class + // of users, such as gradual feature rollout or undocumented whitelist, + // `NOT_FOUND` may be used. If a request is denied for some users within + // a class of users, such as user-based access control, `PERMISSION_DENIED` + // must be used. + // + // HTTP Mapping: 404 Not Found + Code_NOT_FOUND Code = 5 + // The entity that a client attempted to create (e.g., file or directory) + // already exists. + // + // HTTP Mapping: 409 Conflict + Code_ALREADY_EXISTS Code = 6 + // The caller does not have permission to execute the specified + // operation. `PERMISSION_DENIED` must not be used for rejections + // caused by exhausting some resource (use `RESOURCE_EXHAUSTED` + // instead for those errors). `PERMISSION_DENIED` must not be + // used if the caller can not be identified (use `UNAUTHENTICATED` + // instead for those errors). This error code does not imply the + // request is valid or the requested entity exists or satisfies + // other pre-conditions. + // + // HTTP Mapping: 403 Forbidden + Code_PERMISSION_DENIED Code = 7 + // The request does not have valid authentication credentials for the + // operation. + // + // HTTP Mapping: 401 Unauthorized + Code_UNAUTHENTICATED Code = 16 + // Some resource has been exhausted, perhaps a per-user quota, or + // perhaps the entire file system is out of space. + // + // HTTP Mapping: 429 Too Many Requests + Code_RESOURCE_EXHAUSTED Code = 8 + // The operation was rejected because the system is not in a state + // required for the operation's execution. For example, the directory + // to be deleted is non-empty, an rmdir operation is applied to + // a non-directory, etc. + // + // Service implementors can use the following guidelines to decide + // between `FAILED_PRECONDITION`, `ABORTED`, and `UNAVAILABLE`: + // (a) Use `UNAVAILABLE` if the client can retry just the failing call. + // (b) Use `ABORTED` if the client should retry at a higher level + // (e.g., when a client-specified test-and-set fails, indicating the + // client should restart a read-modify-write sequence). + // (c) Use `FAILED_PRECONDITION` if the client should not retry until + // the system state has been explicitly fixed. E.g., if an "rmdir" + // fails because the directory is non-empty, `FAILED_PRECONDITION` + // should be returned since the client should not retry unless + // the files are deleted from the directory. + // + // HTTP Mapping: 400 Bad Request + Code_FAILED_PRECONDITION Code = 9 + // The operation was aborted, typically due to a concurrency issue such as + // a sequencer check failure or transaction abort. + // + // See the guidelines above for deciding between `FAILED_PRECONDITION`, + // `ABORTED`, and `UNAVAILABLE`. + // + // HTTP Mapping: 409 Conflict + Code_ABORTED Code = 10 + // The operation was attempted past the valid range. E.g., seeking or + // reading past end-of-file. + // + // Unlike `INVALID_ARGUMENT`, this error indicates a problem that may + // be fixed if the system state changes. For example, a 32-bit file + // system will generate `INVALID_ARGUMENT` if asked to read at an + // offset that is not in the range [0,2^32-1], but it will generate + // `OUT_OF_RANGE` if asked to read from an offset past the current + // file size. + // + // There is a fair bit of overlap between `FAILED_PRECONDITION` and + // `OUT_OF_RANGE`. We recommend using `OUT_OF_RANGE` (the more specific + // error) when it applies so that callers who are iterating through + // a space can easily look for an `OUT_OF_RANGE` error to detect when + // they are done. + // + // HTTP Mapping: 400 Bad Request + Code_OUT_OF_RANGE Code = 11 + // The operation is not implemented or is not supported/enabled in this + // service. + // + // HTTP Mapping: 501 Not Implemented + Code_UNIMPLEMENTED Code = 12 + // Internal errors. This means that some invariants expected by the + // underlying system have been broken. This error code is reserved + // for serious errors. + // + // HTTP Mapping: 500 Internal Server Error + Code_INTERNAL Code = 13 + // The service is currently unavailable. This is most likely a + // transient condition, which can be corrected by retrying with + // a backoff. + // + // See the guidelines above for deciding between `FAILED_PRECONDITION`, + // `ABORTED`, and `UNAVAILABLE`. + // + // HTTP Mapping: 503 Service Unavailable + Code_UNAVAILABLE Code = 14 + // Unrecoverable data loss or corruption. + // + // HTTP Mapping: 500 Internal Server Error + Code_DATA_LOSS Code = 15 +) + +var Code_name = map[int32]string{ + 0: "OK", + 1: "CANCELLED", + 2: "UNKNOWN", + 3: "INVALID_ARGUMENT", + 4: "DEADLINE_EXCEEDED", + 5: "NOT_FOUND", + 6: "ALREADY_EXISTS", + 7: "PERMISSION_DENIED", + 16: "UNAUTHENTICATED", + 8: "RESOURCE_EXHAUSTED", + 9: "FAILED_PRECONDITION", + 10: "ABORTED", + 11: "OUT_OF_RANGE", + 12: "UNIMPLEMENTED", + 13: "INTERNAL", + 14: "UNAVAILABLE", + 15: "DATA_LOSS", +} +var Code_value = map[string]int32{ + "OK": 0, + "CANCELLED": 1, + "UNKNOWN": 2, + "INVALID_ARGUMENT": 3, + "DEADLINE_EXCEEDED": 4, + "NOT_FOUND": 5, + "ALREADY_EXISTS": 6, + "PERMISSION_DENIED": 7, + "UNAUTHENTICATED": 16, + "RESOURCE_EXHAUSTED": 8, + "FAILED_PRECONDITION": 9, + "ABORTED": 10, + "OUT_OF_RANGE": 11, + "UNIMPLEMENTED": 12, + "INTERNAL": 13, + "UNAVAILABLE": 14, + "DATA_LOSS": 15, +} + +func (x Code) String() string { + return proto.EnumName(Code_name, int32(x)) +} +func (Code) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_code_95b35ec52f4ac337, []int{0} +} + +func init() { + proto.RegisterEnum("google.rpc.Code", Code_name, Code_value) +} + +func init() { proto.RegisterFile("google/rpc/code.proto", fileDescriptor_code_95b35ec52f4ac337) } + +var fileDescriptor_code_95b35ec52f4ac337 = []byte{ + // 362 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x44, 0x51, 0xcd, 0x6e, 0x93, 0x31, + 0x10, 0xa4, 0x69, 0x49, 0x9b, 0xcd, 0xdf, 0xd6, 0xa5, 0xf0, 0x0e, 0x1c, 0x92, 0x43, 0x8f, 0x9c, + 0x36, 0x9f, 0x37, 0xad, 0x55, 0x67, 0xfd, 0xc9, 0x3f, 0x25, 0x70, 0xb1, 0x4a, 0x1a, 0x7d, 0x42, + 0x2a, 0x75, 0xf4, 0xc1, 0x13, 0xf1, 0x12, 0xbc, 0x1e, 0x72, 0x8b, 0xe8, 0xc5, 0x87, 0x99, 0xf1, + 0xee, 0xce, 0x0c, 0x5c, 0x76, 0xa5, 0x74, 0x8f, 0xfb, 0x65, 0x7f, 0xd8, 0x2d, 0x77, 0xe5, 0x61, + 0xbf, 0x38, 0xf4, 0xe5, 0x57, 0x51, 0xf0, 0x02, 0x2f, 0xfa, 0xc3, 0xee, 0xe3, 0x9f, 0x01, 0x9c, + 0x34, 0xe5, 0x61, 0xaf, 0x86, 0x30, 0x70, 0xb7, 0xf8, 0x46, 0x4d, 0x61, 0xd4, 0x90, 0x34, 0x6c, + 0x2d, 0x6b, 0x3c, 0x52, 0x63, 0x38, 0x4d, 0x72, 0x2b, 0xee, 0xb3, 0xe0, 0x40, 0xbd, 0x03, 0x34, + 0x72, 0x47, 0xd6, 0xe8, 0x4c, 0xfe, 0x3a, 0x6d, 0x58, 0x22, 0x1e, 0xab, 0x4b, 0x38, 0xd7, 0x4c, + 0xda, 0x1a, 0xe1, 0xcc, 0xdb, 0x86, 0x59, 0xb3, 0xc6, 0x93, 0x3a, 0x48, 0x5c, 0xcc, 0x6b, 0x97, + 0x44, 0xe3, 0x5b, 0xa5, 0x60, 0x46, 0xd6, 0x33, 0xe9, 0x2f, 0x99, 0xb7, 0x26, 0xc4, 0x80, 0xc3, + 0xfa, 0xb3, 0x65, 0xbf, 0x31, 0x21, 0x18, 0x27, 0x59, 0xb3, 0x18, 0xd6, 0x78, 0xaa, 0x2e, 0x60, + 0x9e, 0x84, 0x52, 0xbc, 0x61, 0x89, 0xa6, 0xa1, 0xc8, 0x1a, 0x51, 0xbd, 0x07, 0xe5, 0x39, 0xb8, + 0xe4, 0x9b, 0xba, 0xe5, 0x86, 0x52, 0xa8, 0xf8, 0x99, 0xfa, 0x00, 0x17, 0x6b, 0x32, 0x96, 0x75, + 0x6e, 0x3d, 0x37, 0x4e, 0xb4, 0x89, 0xc6, 0x09, 0x8e, 0xea, 0xe5, 0xb4, 0x72, 0xbe, 0xaa, 0x40, + 0x21, 0x4c, 0x5c, 0x8a, 0xd9, 0xad, 0xb3, 0x27, 0xb9, 0x66, 0x1c, 0xab, 0x73, 0x98, 0x26, 0x31, + 0x9b, 0xd6, 0x72, 0xb5, 0xc1, 0x1a, 0x27, 0x6a, 0x02, 0x67, 0x46, 0x22, 0x7b, 0x21, 0x8b, 0x53, + 0x35, 0x87, 0x71, 0x12, 0xba, 0x23, 0x63, 0x69, 0x65, 0x19, 0x67, 0xd5, 0x90, 0xa6, 0x48, 0xd9, + 0xba, 0x10, 0x70, 0xbe, 0xda, 0xc2, 0x6c, 0x57, 0x7e, 0x2c, 0x5e, 0xb3, 0x5c, 0x8d, 0x6a, 0x90, + 0x6d, 0x8d, 0xb8, 0x3d, 0xfa, 0x7a, 0xf5, 0x8f, 0xe8, 0xca, 0xe3, 0xfd, 0x53, 0xb7, 0x28, 0x7d, + 0xb7, 0xec, 0xf6, 0x4f, 0xcf, 0x05, 0x2c, 0x5f, 0xa8, 0xfb, 0xc3, 0xf7, 0x9f, 0xff, 0xab, 0xf9, + 0x54, 0x9f, 0xdf, 0x83, 0x63, 0xdf, 0x36, 0xdf, 0x86, 0xcf, 0xaa, 0xab, 0xbf, 0x01, 0x00, 0x00, + 0xff, 0xff, 0x8e, 0x97, 0x77, 0xc2, 0xbf, 0x01, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/rpc/errdetails/error_details.pb.go b/vendor/google.golang.org/genproto/googleapis/rpc/errdetails/error_details.pb.go new file mode 100644 index 0000000..bbdcbac --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/rpc/errdetails/error_details.pb.go @@ -0,0 +1,765 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/rpc/error_details.proto + +package errdetails // import "google.golang.org/genproto/googleapis/rpc/errdetails" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import duration "github.com/golang/protobuf/ptypes/duration" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Describes when the clients can retry a failed request. Clients could ignore +// the recommendation here or retry when this information is missing from error +// responses. +// +// It's always recommended that clients should use exponential backoff when +// retrying. +// +// Clients should wait until `retry_delay` amount of time has passed since +// receiving the error response before retrying. If retrying requests also +// fail, clients should use an exponential backoff scheme to gradually increase +// the delay between retries based on `retry_delay`, until either a maximum +// number of retires have been reached or a maximum retry delay cap has been +// reached. +type RetryInfo struct { + // Clients should wait at least this long between retrying the same request. + RetryDelay *duration.Duration `protobuf:"bytes,1,opt,name=retry_delay,json=retryDelay,proto3" json:"retry_delay,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *RetryInfo) Reset() { *m = RetryInfo{} } +func (m *RetryInfo) String() string { return proto.CompactTextString(m) } +func (*RetryInfo) ProtoMessage() {} +func (*RetryInfo) Descriptor() ([]byte, []int) { + return fileDescriptor_error_details_c8b155055b3404ad, []int{0} +} +func (m *RetryInfo) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_RetryInfo.Unmarshal(m, b) +} +func (m *RetryInfo) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_RetryInfo.Marshal(b, m, deterministic) +} +func (dst *RetryInfo) XXX_Merge(src proto.Message) { + xxx_messageInfo_RetryInfo.Merge(dst, src) +} +func (m *RetryInfo) XXX_Size() int { + return xxx_messageInfo_RetryInfo.Size(m) +} +func (m *RetryInfo) XXX_DiscardUnknown() { + xxx_messageInfo_RetryInfo.DiscardUnknown(m) +} + +var xxx_messageInfo_RetryInfo proto.InternalMessageInfo + +func (m *RetryInfo) GetRetryDelay() *duration.Duration { + if m != nil { + return m.RetryDelay + } + return nil +} + +// Describes additional debugging info. +type DebugInfo struct { + // The stack trace entries indicating where the error occurred. + StackEntries []string `protobuf:"bytes,1,rep,name=stack_entries,json=stackEntries,proto3" json:"stack_entries,omitempty"` + // Additional debugging information provided by the server. + Detail string `protobuf:"bytes,2,opt,name=detail,proto3" json:"detail,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *DebugInfo) Reset() { *m = DebugInfo{} } +func (m *DebugInfo) String() string { return proto.CompactTextString(m) } +func (*DebugInfo) ProtoMessage() {} +func (*DebugInfo) Descriptor() ([]byte, []int) { + return fileDescriptor_error_details_c8b155055b3404ad, []int{1} +} +func (m *DebugInfo) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_DebugInfo.Unmarshal(m, b) +} +func (m *DebugInfo) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_DebugInfo.Marshal(b, m, deterministic) +} +func (dst *DebugInfo) XXX_Merge(src proto.Message) { + xxx_messageInfo_DebugInfo.Merge(dst, src) +} +func (m *DebugInfo) XXX_Size() int { + return xxx_messageInfo_DebugInfo.Size(m) +} +func (m *DebugInfo) XXX_DiscardUnknown() { + xxx_messageInfo_DebugInfo.DiscardUnknown(m) +} + +var xxx_messageInfo_DebugInfo proto.InternalMessageInfo + +func (m *DebugInfo) GetStackEntries() []string { + if m != nil { + return m.StackEntries + } + return nil +} + +func (m *DebugInfo) GetDetail() string { + if m != nil { + return m.Detail + } + return "" +} + +// Describes how a quota check failed. +// +// For example if a daily limit was exceeded for the calling project, +// a service could respond with a QuotaFailure detail containing the project +// id and the description of the quota limit that was exceeded. If the +// calling project hasn't enabled the service in the developer console, then +// a service could respond with the project id and set `service_disabled` +// to true. +// +// Also see RetryDetail and Help types for other details about handling a +// quota failure. +type QuotaFailure struct { + // Describes all quota violations. + Violations []*QuotaFailure_Violation `protobuf:"bytes,1,rep,name=violations,proto3" json:"violations,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *QuotaFailure) Reset() { *m = QuotaFailure{} } +func (m *QuotaFailure) String() string { return proto.CompactTextString(m) } +func (*QuotaFailure) ProtoMessage() {} +func (*QuotaFailure) Descriptor() ([]byte, []int) { + return fileDescriptor_error_details_c8b155055b3404ad, []int{2} +} +func (m *QuotaFailure) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_QuotaFailure.Unmarshal(m, b) +} +func (m *QuotaFailure) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_QuotaFailure.Marshal(b, m, deterministic) +} +func (dst *QuotaFailure) XXX_Merge(src proto.Message) { + xxx_messageInfo_QuotaFailure.Merge(dst, src) +} +func (m *QuotaFailure) XXX_Size() int { + return xxx_messageInfo_QuotaFailure.Size(m) +} +func (m *QuotaFailure) XXX_DiscardUnknown() { + xxx_messageInfo_QuotaFailure.DiscardUnknown(m) +} + +var xxx_messageInfo_QuotaFailure proto.InternalMessageInfo + +func (m *QuotaFailure) GetViolations() []*QuotaFailure_Violation { + if m != nil { + return m.Violations + } + return nil +} + +// A message type used to describe a single quota violation. For example, a +// daily quota or a custom quota that was exceeded. +type QuotaFailure_Violation struct { + // The subject on which the quota check failed. + // For example, "clientip:" or "project:". + Subject string `protobuf:"bytes,1,opt,name=subject,proto3" json:"subject,omitempty"` + // A description of how the quota check failed. Clients can use this + // description to find more about the quota configuration in the service's + // public documentation, or find the relevant quota limit to adjust through + // developer console. + // + // For example: "Service disabled" or "Daily Limit for read operations + // exceeded". + Description string `protobuf:"bytes,2,opt,name=description,proto3" json:"description,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *QuotaFailure_Violation) Reset() { *m = QuotaFailure_Violation{} } +func (m *QuotaFailure_Violation) String() string { return proto.CompactTextString(m) } +func (*QuotaFailure_Violation) ProtoMessage() {} +func (*QuotaFailure_Violation) Descriptor() ([]byte, []int) { + return fileDescriptor_error_details_c8b155055b3404ad, []int{2, 0} +} +func (m *QuotaFailure_Violation) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_QuotaFailure_Violation.Unmarshal(m, b) +} +func (m *QuotaFailure_Violation) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_QuotaFailure_Violation.Marshal(b, m, deterministic) +} +func (dst *QuotaFailure_Violation) XXX_Merge(src proto.Message) { + xxx_messageInfo_QuotaFailure_Violation.Merge(dst, src) +} +func (m *QuotaFailure_Violation) XXX_Size() int { + return xxx_messageInfo_QuotaFailure_Violation.Size(m) +} +func (m *QuotaFailure_Violation) XXX_DiscardUnknown() { + xxx_messageInfo_QuotaFailure_Violation.DiscardUnknown(m) +} + +var xxx_messageInfo_QuotaFailure_Violation proto.InternalMessageInfo + +func (m *QuotaFailure_Violation) GetSubject() string { + if m != nil { + return m.Subject + } + return "" +} + +func (m *QuotaFailure_Violation) GetDescription() string { + if m != nil { + return m.Description + } + return "" +} + +// Describes what preconditions have failed. +// +// For example, if an RPC failed because it required the Terms of Service to be +// acknowledged, it could list the terms of service violation in the +// PreconditionFailure message. +type PreconditionFailure struct { + // Describes all precondition violations. + Violations []*PreconditionFailure_Violation `protobuf:"bytes,1,rep,name=violations,proto3" json:"violations,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *PreconditionFailure) Reset() { *m = PreconditionFailure{} } +func (m *PreconditionFailure) String() string { return proto.CompactTextString(m) } +func (*PreconditionFailure) ProtoMessage() {} +func (*PreconditionFailure) Descriptor() ([]byte, []int) { + return fileDescriptor_error_details_c8b155055b3404ad, []int{3} +} +func (m *PreconditionFailure) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_PreconditionFailure.Unmarshal(m, b) +} +func (m *PreconditionFailure) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_PreconditionFailure.Marshal(b, m, deterministic) +} +func (dst *PreconditionFailure) XXX_Merge(src proto.Message) { + xxx_messageInfo_PreconditionFailure.Merge(dst, src) +} +func (m *PreconditionFailure) XXX_Size() int { + return xxx_messageInfo_PreconditionFailure.Size(m) +} +func (m *PreconditionFailure) XXX_DiscardUnknown() { + xxx_messageInfo_PreconditionFailure.DiscardUnknown(m) +} + +var xxx_messageInfo_PreconditionFailure proto.InternalMessageInfo + +func (m *PreconditionFailure) GetViolations() []*PreconditionFailure_Violation { + if m != nil { + return m.Violations + } + return nil +} + +// A message type used to describe a single precondition failure. +type PreconditionFailure_Violation struct { + // The type of PreconditionFailure. We recommend using a service-specific + // enum type to define the supported precondition violation types. For + // example, "TOS" for "Terms of Service violation". + Type string `protobuf:"bytes,1,opt,name=type,proto3" json:"type,omitempty"` + // The subject, relative to the type, that failed. + // For example, "google.com/cloud" relative to the "TOS" type would + // indicate which terms of service is being referenced. + Subject string `protobuf:"bytes,2,opt,name=subject,proto3" json:"subject,omitempty"` + // A description of how the precondition failed. Developers can use this + // description to understand how to fix the failure. + // + // For example: "Terms of service not accepted". + Description string `protobuf:"bytes,3,opt,name=description,proto3" json:"description,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *PreconditionFailure_Violation) Reset() { *m = PreconditionFailure_Violation{} } +func (m *PreconditionFailure_Violation) String() string { return proto.CompactTextString(m) } +func (*PreconditionFailure_Violation) ProtoMessage() {} +func (*PreconditionFailure_Violation) Descriptor() ([]byte, []int) { + return fileDescriptor_error_details_c8b155055b3404ad, []int{3, 0} +} +func (m *PreconditionFailure_Violation) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_PreconditionFailure_Violation.Unmarshal(m, b) +} +func (m *PreconditionFailure_Violation) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_PreconditionFailure_Violation.Marshal(b, m, deterministic) +} +func (dst *PreconditionFailure_Violation) XXX_Merge(src proto.Message) { + xxx_messageInfo_PreconditionFailure_Violation.Merge(dst, src) +} +func (m *PreconditionFailure_Violation) XXX_Size() int { + return xxx_messageInfo_PreconditionFailure_Violation.Size(m) +} +func (m *PreconditionFailure_Violation) XXX_DiscardUnknown() { + xxx_messageInfo_PreconditionFailure_Violation.DiscardUnknown(m) +} + +var xxx_messageInfo_PreconditionFailure_Violation proto.InternalMessageInfo + +func (m *PreconditionFailure_Violation) GetType() string { + if m != nil { + return m.Type + } + return "" +} + +func (m *PreconditionFailure_Violation) GetSubject() string { + if m != nil { + return m.Subject + } + return "" +} + +func (m *PreconditionFailure_Violation) GetDescription() string { + if m != nil { + return m.Description + } + return "" +} + +// Describes violations in a client request. This error type focuses on the +// syntactic aspects of the request. +type BadRequest struct { + // Describes all violations in a client request. + FieldViolations []*BadRequest_FieldViolation `protobuf:"bytes,1,rep,name=field_violations,json=fieldViolations,proto3" json:"field_violations,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *BadRequest) Reset() { *m = BadRequest{} } +func (m *BadRequest) String() string { return proto.CompactTextString(m) } +func (*BadRequest) ProtoMessage() {} +func (*BadRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_error_details_c8b155055b3404ad, []int{4} +} +func (m *BadRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_BadRequest.Unmarshal(m, b) +} +func (m *BadRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_BadRequest.Marshal(b, m, deterministic) +} +func (dst *BadRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_BadRequest.Merge(dst, src) +} +func (m *BadRequest) XXX_Size() int { + return xxx_messageInfo_BadRequest.Size(m) +} +func (m *BadRequest) XXX_DiscardUnknown() { + xxx_messageInfo_BadRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_BadRequest proto.InternalMessageInfo + +func (m *BadRequest) GetFieldViolations() []*BadRequest_FieldViolation { + if m != nil { + return m.FieldViolations + } + return nil +} + +// A message type used to describe a single bad request field. +type BadRequest_FieldViolation struct { + // A path leading to a field in the request body. The value will be a + // sequence of dot-separated identifiers that identify a protocol buffer + // field. E.g., "field_violations.field" would identify this field. + Field string `protobuf:"bytes,1,opt,name=field,proto3" json:"field,omitempty"` + // A description of why the request element is bad. + Description string `protobuf:"bytes,2,opt,name=description,proto3" json:"description,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *BadRequest_FieldViolation) Reset() { *m = BadRequest_FieldViolation{} } +func (m *BadRequest_FieldViolation) String() string { return proto.CompactTextString(m) } +func (*BadRequest_FieldViolation) ProtoMessage() {} +func (*BadRequest_FieldViolation) Descriptor() ([]byte, []int) { + return fileDescriptor_error_details_c8b155055b3404ad, []int{4, 0} +} +func (m *BadRequest_FieldViolation) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_BadRequest_FieldViolation.Unmarshal(m, b) +} +func (m *BadRequest_FieldViolation) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_BadRequest_FieldViolation.Marshal(b, m, deterministic) +} +func (dst *BadRequest_FieldViolation) XXX_Merge(src proto.Message) { + xxx_messageInfo_BadRequest_FieldViolation.Merge(dst, src) +} +func (m *BadRequest_FieldViolation) XXX_Size() int { + return xxx_messageInfo_BadRequest_FieldViolation.Size(m) +} +func (m *BadRequest_FieldViolation) XXX_DiscardUnknown() { + xxx_messageInfo_BadRequest_FieldViolation.DiscardUnknown(m) +} + +var xxx_messageInfo_BadRequest_FieldViolation proto.InternalMessageInfo + +func (m *BadRequest_FieldViolation) GetField() string { + if m != nil { + return m.Field + } + return "" +} + +func (m *BadRequest_FieldViolation) GetDescription() string { + if m != nil { + return m.Description + } + return "" +} + +// Contains metadata about the request that clients can attach when filing a bug +// or providing other forms of feedback. +type RequestInfo struct { + // An opaque string that should only be interpreted by the service generating + // it. For example, it can be used to identify requests in the service's logs. + RequestId string `protobuf:"bytes,1,opt,name=request_id,json=requestId,proto3" json:"request_id,omitempty"` + // Any data that was used to serve this request. For example, an encrypted + // stack trace that can be sent back to the service provider for debugging. + ServingData string `protobuf:"bytes,2,opt,name=serving_data,json=servingData,proto3" json:"serving_data,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *RequestInfo) Reset() { *m = RequestInfo{} } +func (m *RequestInfo) String() string { return proto.CompactTextString(m) } +func (*RequestInfo) ProtoMessage() {} +func (*RequestInfo) Descriptor() ([]byte, []int) { + return fileDescriptor_error_details_c8b155055b3404ad, []int{5} +} +func (m *RequestInfo) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_RequestInfo.Unmarshal(m, b) +} +func (m *RequestInfo) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_RequestInfo.Marshal(b, m, deterministic) +} +func (dst *RequestInfo) XXX_Merge(src proto.Message) { + xxx_messageInfo_RequestInfo.Merge(dst, src) +} +func (m *RequestInfo) XXX_Size() int { + return xxx_messageInfo_RequestInfo.Size(m) +} +func (m *RequestInfo) XXX_DiscardUnknown() { + xxx_messageInfo_RequestInfo.DiscardUnknown(m) +} + +var xxx_messageInfo_RequestInfo proto.InternalMessageInfo + +func (m *RequestInfo) GetRequestId() string { + if m != nil { + return m.RequestId + } + return "" +} + +func (m *RequestInfo) GetServingData() string { + if m != nil { + return m.ServingData + } + return "" +} + +// Describes the resource that is being accessed. +type ResourceInfo struct { + // A name for the type of resource being accessed, e.g. "sql table", + // "cloud storage bucket", "file", "Google calendar"; or the type URL + // of the resource: e.g. "type.googleapis.com/google.pubsub.v1.Topic". + ResourceType string `protobuf:"bytes,1,opt,name=resource_type,json=resourceType,proto3" json:"resource_type,omitempty"` + // The name of the resource being accessed. For example, a shared calendar + // name: "example.com_4fghdhgsrgh@group.calendar.google.com", if the current + // error is + // [google.rpc.Code.PERMISSION_DENIED][google.rpc.Code.PERMISSION_DENIED]. + ResourceName string `protobuf:"bytes,2,opt,name=resource_name,json=resourceName,proto3" json:"resource_name,omitempty"` + // The owner of the resource (optional). + // For example, "user:" or "project:". + Owner string `protobuf:"bytes,3,opt,name=owner,proto3" json:"owner,omitempty"` + // Describes what error is encountered when accessing this resource. + // For example, updating a cloud project may require the `writer` permission + // on the developer console project. + Description string `protobuf:"bytes,4,opt,name=description,proto3" json:"description,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ResourceInfo) Reset() { *m = ResourceInfo{} } +func (m *ResourceInfo) String() string { return proto.CompactTextString(m) } +func (*ResourceInfo) ProtoMessage() {} +func (*ResourceInfo) Descriptor() ([]byte, []int) { + return fileDescriptor_error_details_c8b155055b3404ad, []int{6} +} +func (m *ResourceInfo) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ResourceInfo.Unmarshal(m, b) +} +func (m *ResourceInfo) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ResourceInfo.Marshal(b, m, deterministic) +} +func (dst *ResourceInfo) XXX_Merge(src proto.Message) { + xxx_messageInfo_ResourceInfo.Merge(dst, src) +} +func (m *ResourceInfo) XXX_Size() int { + return xxx_messageInfo_ResourceInfo.Size(m) +} +func (m *ResourceInfo) XXX_DiscardUnknown() { + xxx_messageInfo_ResourceInfo.DiscardUnknown(m) +} + +var xxx_messageInfo_ResourceInfo proto.InternalMessageInfo + +func (m *ResourceInfo) GetResourceType() string { + if m != nil { + return m.ResourceType + } + return "" +} + +func (m *ResourceInfo) GetResourceName() string { + if m != nil { + return m.ResourceName + } + return "" +} + +func (m *ResourceInfo) GetOwner() string { + if m != nil { + return m.Owner + } + return "" +} + +func (m *ResourceInfo) GetDescription() string { + if m != nil { + return m.Description + } + return "" +} + +// Provides links to documentation or for performing an out of band action. +// +// For example, if a quota check failed with an error indicating the calling +// project hasn't enabled the accessed service, this can contain a URL pointing +// directly to the right place in the developer console to flip the bit. +type Help struct { + // URL(s) pointing to additional information on handling the current error. + Links []*Help_Link `protobuf:"bytes,1,rep,name=links,proto3" json:"links,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Help) Reset() { *m = Help{} } +func (m *Help) String() string { return proto.CompactTextString(m) } +func (*Help) ProtoMessage() {} +func (*Help) Descriptor() ([]byte, []int) { + return fileDescriptor_error_details_c8b155055b3404ad, []int{7} +} +func (m *Help) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Help.Unmarshal(m, b) +} +func (m *Help) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Help.Marshal(b, m, deterministic) +} +func (dst *Help) XXX_Merge(src proto.Message) { + xxx_messageInfo_Help.Merge(dst, src) +} +func (m *Help) XXX_Size() int { + return xxx_messageInfo_Help.Size(m) +} +func (m *Help) XXX_DiscardUnknown() { + xxx_messageInfo_Help.DiscardUnknown(m) +} + +var xxx_messageInfo_Help proto.InternalMessageInfo + +func (m *Help) GetLinks() []*Help_Link { + if m != nil { + return m.Links + } + return nil +} + +// Describes a URL link. +type Help_Link struct { + // Describes what the link offers. + Description string `protobuf:"bytes,1,opt,name=description,proto3" json:"description,omitempty"` + // The URL of the link. + Url string `protobuf:"bytes,2,opt,name=url,proto3" json:"url,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Help_Link) Reset() { *m = Help_Link{} } +func (m *Help_Link) String() string { return proto.CompactTextString(m) } +func (*Help_Link) ProtoMessage() {} +func (*Help_Link) Descriptor() ([]byte, []int) { + return fileDescriptor_error_details_c8b155055b3404ad, []int{7, 0} +} +func (m *Help_Link) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Help_Link.Unmarshal(m, b) +} +func (m *Help_Link) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Help_Link.Marshal(b, m, deterministic) +} +func (dst *Help_Link) XXX_Merge(src proto.Message) { + xxx_messageInfo_Help_Link.Merge(dst, src) +} +func (m *Help_Link) XXX_Size() int { + return xxx_messageInfo_Help_Link.Size(m) +} +func (m *Help_Link) XXX_DiscardUnknown() { + xxx_messageInfo_Help_Link.DiscardUnknown(m) +} + +var xxx_messageInfo_Help_Link proto.InternalMessageInfo + +func (m *Help_Link) GetDescription() string { + if m != nil { + return m.Description + } + return "" +} + +func (m *Help_Link) GetUrl() string { + if m != nil { + return m.Url + } + return "" +} + +// Provides a localized error message that is safe to return to the user +// which can be attached to an RPC error. +type LocalizedMessage struct { + // The locale used following the specification defined at + // http://www.rfc-editor.org/rfc/bcp/bcp47.txt. + // Examples are: "en-US", "fr-CH", "es-MX" + Locale string `protobuf:"bytes,1,opt,name=locale,proto3" json:"locale,omitempty"` + // The localized error message in the above locale. + Message string `protobuf:"bytes,2,opt,name=message,proto3" json:"message,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *LocalizedMessage) Reset() { *m = LocalizedMessage{} } +func (m *LocalizedMessage) String() string { return proto.CompactTextString(m) } +func (*LocalizedMessage) ProtoMessage() {} +func (*LocalizedMessage) Descriptor() ([]byte, []int) { + return fileDescriptor_error_details_c8b155055b3404ad, []int{8} +} +func (m *LocalizedMessage) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_LocalizedMessage.Unmarshal(m, b) +} +func (m *LocalizedMessage) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_LocalizedMessage.Marshal(b, m, deterministic) +} +func (dst *LocalizedMessage) XXX_Merge(src proto.Message) { + xxx_messageInfo_LocalizedMessage.Merge(dst, src) +} +func (m *LocalizedMessage) XXX_Size() int { + return xxx_messageInfo_LocalizedMessage.Size(m) +} +func (m *LocalizedMessage) XXX_DiscardUnknown() { + xxx_messageInfo_LocalizedMessage.DiscardUnknown(m) +} + +var xxx_messageInfo_LocalizedMessage proto.InternalMessageInfo + +func (m *LocalizedMessage) GetLocale() string { + if m != nil { + return m.Locale + } + return "" +} + +func (m *LocalizedMessage) GetMessage() string { + if m != nil { + return m.Message + } + return "" +} + +func init() { + proto.RegisterType((*RetryInfo)(nil), "google.rpc.RetryInfo") + proto.RegisterType((*DebugInfo)(nil), "google.rpc.DebugInfo") + proto.RegisterType((*QuotaFailure)(nil), "google.rpc.QuotaFailure") + proto.RegisterType((*QuotaFailure_Violation)(nil), "google.rpc.QuotaFailure.Violation") + proto.RegisterType((*PreconditionFailure)(nil), "google.rpc.PreconditionFailure") + proto.RegisterType((*PreconditionFailure_Violation)(nil), "google.rpc.PreconditionFailure.Violation") + proto.RegisterType((*BadRequest)(nil), "google.rpc.BadRequest") + proto.RegisterType((*BadRequest_FieldViolation)(nil), "google.rpc.BadRequest.FieldViolation") + proto.RegisterType((*RequestInfo)(nil), "google.rpc.RequestInfo") + proto.RegisterType((*ResourceInfo)(nil), "google.rpc.ResourceInfo") + proto.RegisterType((*Help)(nil), "google.rpc.Help") + proto.RegisterType((*Help_Link)(nil), "google.rpc.Help.Link") + proto.RegisterType((*LocalizedMessage)(nil), "google.rpc.LocalizedMessage") +} + +func init() { + proto.RegisterFile("google/rpc/error_details.proto", fileDescriptor_error_details_c8b155055b3404ad) +} + +var fileDescriptor_error_details_c8b155055b3404ad = []byte{ + // 595 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x8c, 0x54, 0xcd, 0x6e, 0xd3, 0x4c, + 0x14, 0x95, 0x9b, 0xb4, 0x9f, 0x7c, 0x93, 0xaf, 0x14, 0xf3, 0xa3, 0x10, 0x09, 0x14, 0x8c, 0x90, + 0x8a, 0x90, 0x1c, 0xa9, 0xec, 0xca, 0x02, 0x29, 0xb8, 0x7f, 0x52, 0x81, 0x60, 0x21, 0x16, 0xb0, + 0xb0, 0x26, 0xf6, 0x8d, 0x35, 0x74, 0xe2, 0x31, 0x33, 0xe3, 0xa2, 0xf0, 0x14, 0xec, 0xd9, 0xb1, + 0xe2, 0x25, 0x78, 0x37, 0x34, 0x9e, 0x99, 0xc6, 0x6d, 0x0a, 0x62, 0x37, 0xe7, 0xcc, 0x99, 0xe3, + 0x73, 0xaf, 0xae, 0x2f, 0x3c, 0x28, 0x38, 0x2f, 0x18, 0x8e, 0x45, 0x95, 0x8d, 0x51, 0x08, 0x2e, + 0xd2, 0x1c, 0x15, 0xa1, 0x4c, 0x46, 0x95, 0xe0, 0x8a, 0x07, 0x60, 0xee, 0x23, 0x51, 0x65, 0x43, + 0xa7, 0x6d, 0x6e, 0x66, 0xf5, 0x7c, 0x9c, 0xd7, 0x82, 0x28, 0xca, 0x4b, 0xa3, 0x0d, 0x8f, 0xc0, + 0x4f, 0x50, 0x89, 0xe5, 0x49, 0x39, 0xe7, 0xc1, 0x3e, 0xf4, 0x84, 0x06, 0x69, 0x8e, 0x8c, 0x2c, + 0x07, 0xde, 0xc8, 0xdb, 0xed, 0xed, 0xdd, 0x8b, 0xac, 0x9d, 0xb3, 0x88, 0x62, 0x6b, 0x91, 0x40, + 0xa3, 0x8e, 0xb5, 0x38, 0x3c, 0x06, 0x3f, 0xc6, 0x59, 0x5d, 0x34, 0x46, 0x8f, 0xe0, 0x7f, 0xa9, + 0x48, 0x76, 0x96, 0x62, 0xa9, 0x04, 0x45, 0x39, 0xf0, 0x46, 0x9d, 0x5d, 0x3f, 0xe9, 0x37, 0xe4, + 0x81, 0xe1, 0x82, 0xbb, 0xb0, 0x65, 0x72, 0x0f, 0x36, 0x46, 0xde, 0xae, 0x9f, 0x58, 0x14, 0x7e, + 0xf7, 0xa0, 0xff, 0xb6, 0xe6, 0x8a, 0x1c, 0x12, 0xca, 0x6a, 0x81, 0xc1, 0x04, 0xe0, 0x9c, 0x72, + 0xd6, 0x7c, 0xd3, 0x58, 0xf5, 0xf6, 0xc2, 0x68, 0x55, 0x64, 0xd4, 0x56, 0x47, 0xef, 0x9d, 0x34, + 0x69, 0xbd, 0x1a, 0x1e, 0x81, 0x7f, 0x71, 0x11, 0x0c, 0xe0, 0x3f, 0x59, 0xcf, 0x3e, 0x61, 0xa6, + 0x9a, 0x1a, 0xfd, 0xc4, 0xc1, 0x60, 0x04, 0xbd, 0x1c, 0x65, 0x26, 0x68, 0xa5, 0x85, 0x36, 0x58, + 0x9b, 0x0a, 0x7f, 0x79, 0x70, 0x6b, 0x2a, 0x30, 0xe3, 0x65, 0x4e, 0x35, 0xe1, 0x42, 0x9e, 0x5c, + 0x13, 0xf2, 0x49, 0x3b, 0xe4, 0x35, 0x8f, 0xfe, 0x90, 0xf5, 0x63, 0x3b, 0x6b, 0x00, 0x5d, 0xb5, + 0xac, 0xd0, 0x06, 0x6d, 0xce, 0xed, 0xfc, 0x1b, 0x7f, 0xcd, 0xdf, 0x59, 0xcf, 0xff, 0xd3, 0x03, + 0x98, 0x90, 0x3c, 0xc1, 0xcf, 0x35, 0x4a, 0x15, 0x4c, 0x61, 0x67, 0x4e, 0x91, 0xe5, 0xe9, 0x5a, + 0xf8, 0xc7, 0xed, 0xf0, 0xab, 0x17, 0xd1, 0xa1, 0x96, 0xaf, 0x82, 0xdf, 0x98, 0x5f, 0xc2, 0x72, + 0x78, 0x0c, 0xdb, 0x97, 0x25, 0xc1, 0x6d, 0xd8, 0x6c, 0x44, 0xb6, 0x06, 0x03, 0xfe, 0xa1, 0xd5, + 0x6f, 0xa0, 0x67, 0x3f, 0xda, 0x0c, 0xd5, 0x7d, 0x00, 0x61, 0x60, 0x4a, 0x9d, 0x97, 0x6f, 0x99, + 0x93, 0x3c, 0x78, 0x08, 0x7d, 0x89, 0xe2, 0x9c, 0x96, 0x45, 0x9a, 0x13, 0x45, 0x9c, 0xa1, 0xe5, + 0x62, 0xa2, 0x48, 0xf8, 0xcd, 0x83, 0x7e, 0x82, 0x92, 0xd7, 0x22, 0x43, 0x37, 0xa7, 0xc2, 0xe2, + 0xb4, 0xd5, 0xe5, 0xbe, 0x23, 0xdf, 0xe9, 0x6e, 0xb7, 0x45, 0x25, 0x59, 0xa0, 0x75, 0xbe, 0x10, + 0xbd, 0x26, 0x0b, 0xd4, 0x35, 0xf2, 0x2f, 0x25, 0x0a, 0xdb, 0x72, 0x03, 0xae, 0xd6, 0xd8, 0x5d, + 0xaf, 0x91, 0x43, 0xf7, 0x18, 0x59, 0x15, 0x3c, 0x85, 0x4d, 0x46, 0xcb, 0x33, 0xd7, 0xfc, 0x3b, + 0xed, 0xe6, 0x6b, 0x41, 0x74, 0x4a, 0xcb, 0xb3, 0xc4, 0x68, 0x86, 0xfb, 0xd0, 0xd5, 0xf0, 0xaa, + 0xbd, 0xb7, 0x66, 0x1f, 0xec, 0x40, 0xa7, 0x16, 0xee, 0x07, 0xd3, 0xc7, 0x30, 0x86, 0x9d, 0x53, + 0x9e, 0x11, 0x46, 0xbf, 0x62, 0xfe, 0x0a, 0xa5, 0x24, 0x05, 0xea, 0x3f, 0x91, 0x69, 0xce, 0xd5, + 0x6f, 0x91, 0x9e, 0xb3, 0x85, 0x91, 0xb8, 0x39, 0xb3, 0x70, 0xc2, 0x60, 0x3b, 0xe3, 0x8b, 0x56, + 0xc8, 0xc9, 0xcd, 0x03, 0xbd, 0x89, 0x62, 0xb3, 0x88, 0xa6, 0x7a, 0x55, 0x4c, 0xbd, 0x0f, 0x2f, + 0xac, 0xa0, 0xe0, 0x8c, 0x94, 0x45, 0xc4, 0x45, 0x31, 0x2e, 0xb0, 0x6c, 0x16, 0xc9, 0xd8, 0x5c, + 0x91, 0x8a, 0x4a, 0xb7, 0xc8, 0xec, 0x16, 0x7b, 0xbe, 0x3a, 0xfe, 0xd8, 0xe8, 0x24, 0xd3, 0x97, + 0xb3, 0xad, 0xe6, 0xc5, 0xb3, 0xdf, 0x01, 0x00, 0x00, 0xff, 0xff, 0x90, 0x15, 0x46, 0x2d, 0xf9, + 0x04, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/rpc/status/status.pb.go b/vendor/google.golang.org/genproto/googleapis/rpc/status/status.pb.go new file mode 100644 index 0000000..a203bd4 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/rpc/status/status.pb.go @@ -0,0 +1,159 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/rpc/status.proto + +package status // import "google.golang.org/genproto/googleapis/rpc/status" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import any "github.com/golang/protobuf/ptypes/any" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// The `Status` type defines a logical error model that is suitable for +// different programming environments, including REST APIs and RPC APIs. It is +// used by [gRPC](https://github.com/grpc). The error model is designed to be: +// +// - Simple to use and understand for most users +// - Flexible enough to meet unexpected needs +// +// # Overview +// +// The `Status` message contains three pieces of data: error code, error +// message, and error details. The error code should be an enum value of +// [google.rpc.Code][google.rpc.Code], but it may accept additional error codes +// if needed. The error message should be a developer-facing English message +// that helps developers *understand* and *resolve* the error. If a localized +// user-facing error message is needed, put the localized message in the error +// details or localize it in the client. The optional error details may contain +// arbitrary information about the error. There is a predefined set of error +// detail types in the package `google.rpc` that can be used for common error +// conditions. +// +// # Language mapping +// +// The `Status` message is the logical representation of the error model, but it +// is not necessarily the actual wire format. When the `Status` message is +// exposed in different client libraries and different wire protocols, it can be +// mapped differently. For example, it will likely be mapped to some exceptions +// in Java, but more likely mapped to some error codes in C. +// +// # Other uses +// +// The error model and the `Status` message can be used in a variety of +// environments, either with or without APIs, to provide a +// consistent developer experience across different environments. +// +// Example uses of this error model include: +// +// - Partial errors. If a service needs to return partial errors to the client, +// it may embed the `Status` in the normal response to indicate the partial +// errors. +// +// - Workflow errors. A typical workflow has multiple steps. Each step may +// have a `Status` message for error reporting. +// +// - Batch operations. If a client uses batch request and batch response, the +// `Status` message should be used directly inside batch response, one for +// each error sub-response. +// +// - Asynchronous operations. If an API call embeds asynchronous operation +// results in its response, the status of those operations should be +// represented directly using the `Status` message. +// +// - Logging. If some API errors are stored in logs, the message `Status` could +// be used directly after any stripping needed for security/privacy reasons. +type Status struct { + // The status code, which should be an enum value of + // [google.rpc.Code][google.rpc.Code]. + Code int32 `protobuf:"varint,1,opt,name=code,proto3" json:"code,omitempty"` + // A developer-facing error message, which should be in English. Any + // user-facing error message should be localized and sent in the + // [google.rpc.Status.details][google.rpc.Status.details] field, or localized + // by the client. + Message string `protobuf:"bytes,2,opt,name=message,proto3" json:"message,omitempty"` + // A list of messages that carry the error details. There is a common set of + // message types for APIs to use. + Details []*any.Any `protobuf:"bytes,3,rep,name=details,proto3" json:"details,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Status) Reset() { *m = Status{} } +func (m *Status) String() string { return proto.CompactTextString(m) } +func (*Status) ProtoMessage() {} +func (*Status) Descriptor() ([]byte, []int) { + return fileDescriptor_status_8a460da8fb0891c1, []int{0} +} +func (m *Status) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Status.Unmarshal(m, b) +} +func (m *Status) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Status.Marshal(b, m, deterministic) +} +func (dst *Status) XXX_Merge(src proto.Message) { + xxx_messageInfo_Status.Merge(dst, src) +} +func (m *Status) XXX_Size() int { + return xxx_messageInfo_Status.Size(m) +} +func (m *Status) XXX_DiscardUnknown() { + xxx_messageInfo_Status.DiscardUnknown(m) +} + +var xxx_messageInfo_Status proto.InternalMessageInfo + +func (m *Status) GetCode() int32 { + if m != nil { + return m.Code + } + return 0 +} + +func (m *Status) GetMessage() string { + if m != nil { + return m.Message + } + return "" +} + +func (m *Status) GetDetails() []*any.Any { + if m != nil { + return m.Details + } + return nil +} + +func init() { + proto.RegisterType((*Status)(nil), "google.rpc.Status") +} + +func init() { proto.RegisterFile("google/rpc/status.proto", fileDescriptor_status_8a460da8fb0891c1) } + +var fileDescriptor_status_8a460da8fb0891c1 = []byte{ + // 209 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xe2, 0x12, 0x4f, 0xcf, 0xcf, 0x4f, + 0xcf, 0x49, 0xd5, 0x2f, 0x2a, 0x48, 0xd6, 0x2f, 0x2e, 0x49, 0x2c, 0x29, 0x2d, 0xd6, 0x2b, 0x28, + 0xca, 0x2f, 0xc9, 0x17, 0xe2, 0x82, 0x48, 0xe8, 0x15, 0x15, 0x24, 0x4b, 0x49, 0x42, 0x15, 0x81, + 0x65, 0x92, 0x4a, 0xd3, 0xf4, 0x13, 0xf3, 0x2a, 0x21, 0xca, 0x94, 0xd2, 0xb8, 0xd8, 0x82, 0xc1, + 0xda, 0x84, 0x84, 0xb8, 0x58, 0x92, 0xf3, 0x53, 0x52, 0x25, 0x18, 0x15, 0x18, 0x35, 0x58, 0x83, + 0xc0, 0x6c, 0x21, 0x09, 0x2e, 0xf6, 0xdc, 0xd4, 0xe2, 0xe2, 0xc4, 0xf4, 0x54, 0x09, 0x26, 0x05, + 0x46, 0x0d, 0xce, 0x20, 0x18, 0x57, 0x48, 0x8f, 0x8b, 0x3d, 0x25, 0xb5, 0x24, 0x31, 0x33, 0xa7, + 0x58, 0x82, 0x59, 0x81, 0x59, 0x83, 0xdb, 0x48, 0x44, 0x0f, 0x6a, 0x21, 0xcc, 0x12, 0x3d, 0xc7, + 0xbc, 0xca, 0x20, 0x98, 0x22, 0xa7, 0x38, 0x2e, 0xbe, 0xe4, 0xfc, 0x5c, 0x3d, 0x84, 0xa3, 0x9c, + 0xb8, 0x21, 0xf6, 0x06, 0x80, 0x94, 0x07, 0x30, 0x46, 0x99, 0x43, 0xa5, 0xd2, 0xf3, 0x73, 0x12, + 0xf3, 0xd2, 0xf5, 0xf2, 0x8b, 0xd2, 0xf5, 0xd3, 0x53, 0xf3, 0xc0, 0x86, 0xe9, 0x43, 0xa4, 0x12, + 0x0b, 0x32, 0x8b, 0x91, 0xfc, 0x69, 0x0d, 0xa1, 0x16, 0x31, 0x31, 0x07, 0x05, 0x38, 0x27, 0xb1, + 0x81, 0x55, 0x1a, 0x03, 0x02, 0x00, 0x00, 0xff, 0xff, 0xa4, 0x53, 0xf0, 0x7c, 0x10, 0x01, 0x00, + 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/spanner/admin/database/v1/spanner_database_admin.pb.go b/vendor/google.golang.org/genproto/googleapis/spanner/admin/database/v1/spanner_database_admin.pb.go new file mode 100644 index 0000000..c3f4be4 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/spanner/admin/database/v1/spanner_database_admin.pb.go @@ -0,0 +1,1158 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/spanner/admin/database/v1/spanner_database_admin.proto + +package database // import "google.golang.org/genproto/googleapis/spanner/admin/database/v1" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import empty "github.com/golang/protobuf/ptypes/empty" +import timestamp "github.com/golang/protobuf/ptypes/timestamp" +import _ "google.golang.org/genproto/googleapis/api/annotations" +import v1 "google.golang.org/genproto/googleapis/iam/v1" +import longrunning "google.golang.org/genproto/googleapis/longrunning" + +import ( + context "golang.org/x/net/context" + grpc "google.golang.org/grpc" +) + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Indicates the current state of the database. +type Database_State int32 + +const ( + // Not specified. + Database_STATE_UNSPECIFIED Database_State = 0 + // The database is still being created. Operations on the database may fail + // with `FAILED_PRECONDITION` in this state. + Database_CREATING Database_State = 1 + // The database is fully created and ready for use. + Database_READY Database_State = 2 +) + +var Database_State_name = map[int32]string{ + 0: "STATE_UNSPECIFIED", + 1: "CREATING", + 2: "READY", +} +var Database_State_value = map[string]int32{ + "STATE_UNSPECIFIED": 0, + "CREATING": 1, + "READY": 2, +} + +func (x Database_State) String() string { + return proto.EnumName(Database_State_name, int32(x)) +} +func (Database_State) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_spanner_database_admin_742316449a28c935, []int{0, 0} +} + +// A Cloud Spanner database. +type Database struct { + // Required. The name of the database. Values are of the form + // `projects//instances//databases/`, + // where `` is as specified in the `CREATE DATABASE` + // statement. This name can be passed to other API methods to + // identify the database. + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // Output only. The current database state. + State Database_State `protobuf:"varint,2,opt,name=state,proto3,enum=google.spanner.admin.database.v1.Database_State" json:"state,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Database) Reset() { *m = Database{} } +func (m *Database) String() string { return proto.CompactTextString(m) } +func (*Database) ProtoMessage() {} +func (*Database) Descriptor() ([]byte, []int) { + return fileDescriptor_spanner_database_admin_742316449a28c935, []int{0} +} +func (m *Database) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Database.Unmarshal(m, b) +} +func (m *Database) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Database.Marshal(b, m, deterministic) +} +func (dst *Database) XXX_Merge(src proto.Message) { + xxx_messageInfo_Database.Merge(dst, src) +} +func (m *Database) XXX_Size() int { + return xxx_messageInfo_Database.Size(m) +} +func (m *Database) XXX_DiscardUnknown() { + xxx_messageInfo_Database.DiscardUnknown(m) +} + +var xxx_messageInfo_Database proto.InternalMessageInfo + +func (m *Database) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *Database) GetState() Database_State { + if m != nil { + return m.State + } + return Database_STATE_UNSPECIFIED +} + +// The request for +// [ListDatabases][google.spanner.admin.database.v1.DatabaseAdmin.ListDatabases]. +type ListDatabasesRequest struct { + // Required. The instance whose databases should be listed. + // Values are of the form `projects//instances/`. + Parent string `protobuf:"bytes,1,opt,name=parent,proto3" json:"parent,omitempty"` + // Number of databases to be returned in the response. If 0 or less, + // defaults to the server's maximum allowed page size. + PageSize int32 `protobuf:"varint,3,opt,name=page_size,json=pageSize,proto3" json:"page_size,omitempty"` + // If non-empty, `page_token` should contain a + // [next_page_token][google.spanner.admin.database.v1.ListDatabasesResponse.next_page_token] + // from a previous + // [ListDatabasesResponse][google.spanner.admin.database.v1.ListDatabasesResponse]. + PageToken string `protobuf:"bytes,4,opt,name=page_token,json=pageToken,proto3" json:"page_token,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ListDatabasesRequest) Reset() { *m = ListDatabasesRequest{} } +func (m *ListDatabasesRequest) String() string { return proto.CompactTextString(m) } +func (*ListDatabasesRequest) ProtoMessage() {} +func (*ListDatabasesRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_spanner_database_admin_742316449a28c935, []int{1} +} +func (m *ListDatabasesRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ListDatabasesRequest.Unmarshal(m, b) +} +func (m *ListDatabasesRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ListDatabasesRequest.Marshal(b, m, deterministic) +} +func (dst *ListDatabasesRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_ListDatabasesRequest.Merge(dst, src) +} +func (m *ListDatabasesRequest) XXX_Size() int { + return xxx_messageInfo_ListDatabasesRequest.Size(m) +} +func (m *ListDatabasesRequest) XXX_DiscardUnknown() { + xxx_messageInfo_ListDatabasesRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_ListDatabasesRequest proto.InternalMessageInfo + +func (m *ListDatabasesRequest) GetParent() string { + if m != nil { + return m.Parent + } + return "" +} + +func (m *ListDatabasesRequest) GetPageSize() int32 { + if m != nil { + return m.PageSize + } + return 0 +} + +func (m *ListDatabasesRequest) GetPageToken() string { + if m != nil { + return m.PageToken + } + return "" +} + +// The response for +// [ListDatabases][google.spanner.admin.database.v1.DatabaseAdmin.ListDatabases]. +type ListDatabasesResponse struct { + // Databases that matched the request. + Databases []*Database `protobuf:"bytes,1,rep,name=databases,proto3" json:"databases,omitempty"` + // `next_page_token` can be sent in a subsequent + // [ListDatabases][google.spanner.admin.database.v1.DatabaseAdmin.ListDatabases] + // call to fetch more of the matching databases. + NextPageToken string `protobuf:"bytes,2,opt,name=next_page_token,json=nextPageToken,proto3" json:"next_page_token,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ListDatabasesResponse) Reset() { *m = ListDatabasesResponse{} } +func (m *ListDatabasesResponse) String() string { return proto.CompactTextString(m) } +func (*ListDatabasesResponse) ProtoMessage() {} +func (*ListDatabasesResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_spanner_database_admin_742316449a28c935, []int{2} +} +func (m *ListDatabasesResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ListDatabasesResponse.Unmarshal(m, b) +} +func (m *ListDatabasesResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ListDatabasesResponse.Marshal(b, m, deterministic) +} +func (dst *ListDatabasesResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_ListDatabasesResponse.Merge(dst, src) +} +func (m *ListDatabasesResponse) XXX_Size() int { + return xxx_messageInfo_ListDatabasesResponse.Size(m) +} +func (m *ListDatabasesResponse) XXX_DiscardUnknown() { + xxx_messageInfo_ListDatabasesResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_ListDatabasesResponse proto.InternalMessageInfo + +func (m *ListDatabasesResponse) GetDatabases() []*Database { + if m != nil { + return m.Databases + } + return nil +} + +func (m *ListDatabasesResponse) GetNextPageToken() string { + if m != nil { + return m.NextPageToken + } + return "" +} + +// The request for +// [CreateDatabase][google.spanner.admin.database.v1.DatabaseAdmin.CreateDatabase]. +type CreateDatabaseRequest struct { + // Required. The name of the instance that will serve the new database. + // Values are of the form `projects//instances/`. + Parent string `protobuf:"bytes,1,opt,name=parent,proto3" json:"parent,omitempty"` + // Required. A `CREATE DATABASE` statement, which specifies the ID of the + // new database. The database ID must conform to the regular expression + // `[a-z][a-z0-9_\-]*[a-z0-9]` and be between 2 and 30 characters in length. + // If the database ID is a reserved word or if it contains a hyphen, the + // database ID must be enclosed in backticks (`` ` ``). + CreateStatement string `protobuf:"bytes,2,opt,name=create_statement,json=createStatement,proto3" json:"create_statement,omitempty"` + // An optional list of DDL statements to run inside the newly created + // database. Statements can create tables, indexes, etc. These + // statements execute atomically with the creation of the database: + // if there is an error in any statement, the database is not created. + ExtraStatements []string `protobuf:"bytes,3,rep,name=extra_statements,json=extraStatements,proto3" json:"extra_statements,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *CreateDatabaseRequest) Reset() { *m = CreateDatabaseRequest{} } +func (m *CreateDatabaseRequest) String() string { return proto.CompactTextString(m) } +func (*CreateDatabaseRequest) ProtoMessage() {} +func (*CreateDatabaseRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_spanner_database_admin_742316449a28c935, []int{3} +} +func (m *CreateDatabaseRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_CreateDatabaseRequest.Unmarshal(m, b) +} +func (m *CreateDatabaseRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_CreateDatabaseRequest.Marshal(b, m, deterministic) +} +func (dst *CreateDatabaseRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_CreateDatabaseRequest.Merge(dst, src) +} +func (m *CreateDatabaseRequest) XXX_Size() int { + return xxx_messageInfo_CreateDatabaseRequest.Size(m) +} +func (m *CreateDatabaseRequest) XXX_DiscardUnknown() { + xxx_messageInfo_CreateDatabaseRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_CreateDatabaseRequest proto.InternalMessageInfo + +func (m *CreateDatabaseRequest) GetParent() string { + if m != nil { + return m.Parent + } + return "" +} + +func (m *CreateDatabaseRequest) GetCreateStatement() string { + if m != nil { + return m.CreateStatement + } + return "" +} + +func (m *CreateDatabaseRequest) GetExtraStatements() []string { + if m != nil { + return m.ExtraStatements + } + return nil +} + +// Metadata type for the operation returned by +// [CreateDatabase][google.spanner.admin.database.v1.DatabaseAdmin.CreateDatabase]. +type CreateDatabaseMetadata struct { + // The database being created. + Database string `protobuf:"bytes,1,opt,name=database,proto3" json:"database,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *CreateDatabaseMetadata) Reset() { *m = CreateDatabaseMetadata{} } +func (m *CreateDatabaseMetadata) String() string { return proto.CompactTextString(m) } +func (*CreateDatabaseMetadata) ProtoMessage() {} +func (*CreateDatabaseMetadata) Descriptor() ([]byte, []int) { + return fileDescriptor_spanner_database_admin_742316449a28c935, []int{4} +} +func (m *CreateDatabaseMetadata) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_CreateDatabaseMetadata.Unmarshal(m, b) +} +func (m *CreateDatabaseMetadata) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_CreateDatabaseMetadata.Marshal(b, m, deterministic) +} +func (dst *CreateDatabaseMetadata) XXX_Merge(src proto.Message) { + xxx_messageInfo_CreateDatabaseMetadata.Merge(dst, src) +} +func (m *CreateDatabaseMetadata) XXX_Size() int { + return xxx_messageInfo_CreateDatabaseMetadata.Size(m) +} +func (m *CreateDatabaseMetadata) XXX_DiscardUnknown() { + xxx_messageInfo_CreateDatabaseMetadata.DiscardUnknown(m) +} + +var xxx_messageInfo_CreateDatabaseMetadata proto.InternalMessageInfo + +func (m *CreateDatabaseMetadata) GetDatabase() string { + if m != nil { + return m.Database + } + return "" +} + +// The request for +// [GetDatabase][google.spanner.admin.database.v1.DatabaseAdmin.GetDatabase]. +type GetDatabaseRequest struct { + // Required. The name of the requested database. Values are of the form + // `projects//instances//databases/`. + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GetDatabaseRequest) Reset() { *m = GetDatabaseRequest{} } +func (m *GetDatabaseRequest) String() string { return proto.CompactTextString(m) } +func (*GetDatabaseRequest) ProtoMessage() {} +func (*GetDatabaseRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_spanner_database_admin_742316449a28c935, []int{5} +} +func (m *GetDatabaseRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GetDatabaseRequest.Unmarshal(m, b) +} +func (m *GetDatabaseRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GetDatabaseRequest.Marshal(b, m, deterministic) +} +func (dst *GetDatabaseRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_GetDatabaseRequest.Merge(dst, src) +} +func (m *GetDatabaseRequest) XXX_Size() int { + return xxx_messageInfo_GetDatabaseRequest.Size(m) +} +func (m *GetDatabaseRequest) XXX_DiscardUnknown() { + xxx_messageInfo_GetDatabaseRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_GetDatabaseRequest proto.InternalMessageInfo + +func (m *GetDatabaseRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +// Enqueues the given DDL statements to be applied, in order but not +// necessarily all at once, to the database schema at some point (or +// points) in the future. The server checks that the statements +// are executable (syntactically valid, name tables that exist, etc.) +// before enqueueing them, but they may still fail upon +// later execution (e.g., if a statement from another batch of +// statements is applied first and it conflicts in some way, or if +// there is some data-related problem like a `NULL` value in a column to +// which `NOT NULL` would be added). If a statement fails, all +// subsequent statements in the batch are automatically cancelled. +// +// Each batch of statements is assigned a name which can be used with +// the [Operations][google.longrunning.Operations] API to monitor +// progress. See the +// [operation_id][google.spanner.admin.database.v1.UpdateDatabaseDdlRequest.operation_id] +// field for more details. +type UpdateDatabaseDdlRequest struct { + // Required. The database to update. + Database string `protobuf:"bytes,1,opt,name=database,proto3" json:"database,omitempty"` + // DDL statements to be applied to the database. + Statements []string `protobuf:"bytes,2,rep,name=statements,proto3" json:"statements,omitempty"` + // If empty, the new update request is assigned an + // automatically-generated operation ID. Otherwise, `operation_id` + // is used to construct the name of the resulting + // [Operation][google.longrunning.Operation]. + // + // Specifying an explicit operation ID simplifies determining + // whether the statements were executed in the event that the + // [UpdateDatabaseDdl][google.spanner.admin.database.v1.DatabaseAdmin.UpdateDatabaseDdl] + // call is replayed, or the return value is otherwise lost: the + // [database][google.spanner.admin.database.v1.UpdateDatabaseDdlRequest.database] + // and `operation_id` fields can be combined to form the + // [name][google.longrunning.Operation.name] of the resulting + // [longrunning.Operation][google.longrunning.Operation]: + // `/operations/`. + // + // `operation_id` should be unique within the database, and must be + // a valid identifier: `[a-z][a-z0-9_]*`. Note that + // automatically-generated operation IDs always begin with an + // underscore. If the named operation already exists, + // [UpdateDatabaseDdl][google.spanner.admin.database.v1.DatabaseAdmin.UpdateDatabaseDdl] + // returns `ALREADY_EXISTS`. + OperationId string `protobuf:"bytes,3,opt,name=operation_id,json=operationId,proto3" json:"operation_id,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *UpdateDatabaseDdlRequest) Reset() { *m = UpdateDatabaseDdlRequest{} } +func (m *UpdateDatabaseDdlRequest) String() string { return proto.CompactTextString(m) } +func (*UpdateDatabaseDdlRequest) ProtoMessage() {} +func (*UpdateDatabaseDdlRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_spanner_database_admin_742316449a28c935, []int{6} +} +func (m *UpdateDatabaseDdlRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_UpdateDatabaseDdlRequest.Unmarshal(m, b) +} +func (m *UpdateDatabaseDdlRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_UpdateDatabaseDdlRequest.Marshal(b, m, deterministic) +} +func (dst *UpdateDatabaseDdlRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_UpdateDatabaseDdlRequest.Merge(dst, src) +} +func (m *UpdateDatabaseDdlRequest) XXX_Size() int { + return xxx_messageInfo_UpdateDatabaseDdlRequest.Size(m) +} +func (m *UpdateDatabaseDdlRequest) XXX_DiscardUnknown() { + xxx_messageInfo_UpdateDatabaseDdlRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_UpdateDatabaseDdlRequest proto.InternalMessageInfo + +func (m *UpdateDatabaseDdlRequest) GetDatabase() string { + if m != nil { + return m.Database + } + return "" +} + +func (m *UpdateDatabaseDdlRequest) GetStatements() []string { + if m != nil { + return m.Statements + } + return nil +} + +func (m *UpdateDatabaseDdlRequest) GetOperationId() string { + if m != nil { + return m.OperationId + } + return "" +} + +// Metadata type for the operation returned by +// [UpdateDatabaseDdl][google.spanner.admin.database.v1.DatabaseAdmin.UpdateDatabaseDdl]. +type UpdateDatabaseDdlMetadata struct { + // The database being modified. + Database string `protobuf:"bytes,1,opt,name=database,proto3" json:"database,omitempty"` + // For an update this list contains all the statements. For an + // individual statement, this list contains only that statement. + Statements []string `protobuf:"bytes,2,rep,name=statements,proto3" json:"statements,omitempty"` + // Reports the commit timestamps of all statements that have + // succeeded so far, where `commit_timestamps[i]` is the commit + // timestamp for the statement `statements[i]`. + CommitTimestamps []*timestamp.Timestamp `protobuf:"bytes,3,rep,name=commit_timestamps,json=commitTimestamps,proto3" json:"commit_timestamps,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *UpdateDatabaseDdlMetadata) Reset() { *m = UpdateDatabaseDdlMetadata{} } +func (m *UpdateDatabaseDdlMetadata) String() string { return proto.CompactTextString(m) } +func (*UpdateDatabaseDdlMetadata) ProtoMessage() {} +func (*UpdateDatabaseDdlMetadata) Descriptor() ([]byte, []int) { + return fileDescriptor_spanner_database_admin_742316449a28c935, []int{7} +} +func (m *UpdateDatabaseDdlMetadata) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_UpdateDatabaseDdlMetadata.Unmarshal(m, b) +} +func (m *UpdateDatabaseDdlMetadata) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_UpdateDatabaseDdlMetadata.Marshal(b, m, deterministic) +} +func (dst *UpdateDatabaseDdlMetadata) XXX_Merge(src proto.Message) { + xxx_messageInfo_UpdateDatabaseDdlMetadata.Merge(dst, src) +} +func (m *UpdateDatabaseDdlMetadata) XXX_Size() int { + return xxx_messageInfo_UpdateDatabaseDdlMetadata.Size(m) +} +func (m *UpdateDatabaseDdlMetadata) XXX_DiscardUnknown() { + xxx_messageInfo_UpdateDatabaseDdlMetadata.DiscardUnknown(m) +} + +var xxx_messageInfo_UpdateDatabaseDdlMetadata proto.InternalMessageInfo + +func (m *UpdateDatabaseDdlMetadata) GetDatabase() string { + if m != nil { + return m.Database + } + return "" +} + +func (m *UpdateDatabaseDdlMetadata) GetStatements() []string { + if m != nil { + return m.Statements + } + return nil +} + +func (m *UpdateDatabaseDdlMetadata) GetCommitTimestamps() []*timestamp.Timestamp { + if m != nil { + return m.CommitTimestamps + } + return nil +} + +// The request for +// [DropDatabase][google.spanner.admin.database.v1.DatabaseAdmin.DropDatabase]. +type DropDatabaseRequest struct { + // Required. The database to be dropped. + Database string `protobuf:"bytes,1,opt,name=database,proto3" json:"database,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *DropDatabaseRequest) Reset() { *m = DropDatabaseRequest{} } +func (m *DropDatabaseRequest) String() string { return proto.CompactTextString(m) } +func (*DropDatabaseRequest) ProtoMessage() {} +func (*DropDatabaseRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_spanner_database_admin_742316449a28c935, []int{8} +} +func (m *DropDatabaseRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_DropDatabaseRequest.Unmarshal(m, b) +} +func (m *DropDatabaseRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_DropDatabaseRequest.Marshal(b, m, deterministic) +} +func (dst *DropDatabaseRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_DropDatabaseRequest.Merge(dst, src) +} +func (m *DropDatabaseRequest) XXX_Size() int { + return xxx_messageInfo_DropDatabaseRequest.Size(m) +} +func (m *DropDatabaseRequest) XXX_DiscardUnknown() { + xxx_messageInfo_DropDatabaseRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_DropDatabaseRequest proto.InternalMessageInfo + +func (m *DropDatabaseRequest) GetDatabase() string { + if m != nil { + return m.Database + } + return "" +} + +// The request for +// [GetDatabaseDdl][google.spanner.admin.database.v1.DatabaseAdmin.GetDatabaseDdl]. +type GetDatabaseDdlRequest struct { + // Required. The database whose schema we wish to get. + Database string `protobuf:"bytes,1,opt,name=database,proto3" json:"database,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GetDatabaseDdlRequest) Reset() { *m = GetDatabaseDdlRequest{} } +func (m *GetDatabaseDdlRequest) String() string { return proto.CompactTextString(m) } +func (*GetDatabaseDdlRequest) ProtoMessage() {} +func (*GetDatabaseDdlRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_spanner_database_admin_742316449a28c935, []int{9} +} +func (m *GetDatabaseDdlRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GetDatabaseDdlRequest.Unmarshal(m, b) +} +func (m *GetDatabaseDdlRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GetDatabaseDdlRequest.Marshal(b, m, deterministic) +} +func (dst *GetDatabaseDdlRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_GetDatabaseDdlRequest.Merge(dst, src) +} +func (m *GetDatabaseDdlRequest) XXX_Size() int { + return xxx_messageInfo_GetDatabaseDdlRequest.Size(m) +} +func (m *GetDatabaseDdlRequest) XXX_DiscardUnknown() { + xxx_messageInfo_GetDatabaseDdlRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_GetDatabaseDdlRequest proto.InternalMessageInfo + +func (m *GetDatabaseDdlRequest) GetDatabase() string { + if m != nil { + return m.Database + } + return "" +} + +// The response for +// [GetDatabaseDdl][google.spanner.admin.database.v1.DatabaseAdmin.GetDatabaseDdl]. +type GetDatabaseDdlResponse struct { + // A list of formatted DDL statements defining the schema of the database + // specified in the request. + Statements []string `protobuf:"bytes,1,rep,name=statements,proto3" json:"statements,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GetDatabaseDdlResponse) Reset() { *m = GetDatabaseDdlResponse{} } +func (m *GetDatabaseDdlResponse) String() string { return proto.CompactTextString(m) } +func (*GetDatabaseDdlResponse) ProtoMessage() {} +func (*GetDatabaseDdlResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_spanner_database_admin_742316449a28c935, []int{10} +} +func (m *GetDatabaseDdlResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GetDatabaseDdlResponse.Unmarshal(m, b) +} +func (m *GetDatabaseDdlResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GetDatabaseDdlResponse.Marshal(b, m, deterministic) +} +func (dst *GetDatabaseDdlResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_GetDatabaseDdlResponse.Merge(dst, src) +} +func (m *GetDatabaseDdlResponse) XXX_Size() int { + return xxx_messageInfo_GetDatabaseDdlResponse.Size(m) +} +func (m *GetDatabaseDdlResponse) XXX_DiscardUnknown() { + xxx_messageInfo_GetDatabaseDdlResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_GetDatabaseDdlResponse proto.InternalMessageInfo + +func (m *GetDatabaseDdlResponse) GetStatements() []string { + if m != nil { + return m.Statements + } + return nil +} + +func init() { + proto.RegisterType((*Database)(nil), "google.spanner.admin.database.v1.Database") + proto.RegisterType((*ListDatabasesRequest)(nil), "google.spanner.admin.database.v1.ListDatabasesRequest") + proto.RegisterType((*ListDatabasesResponse)(nil), "google.spanner.admin.database.v1.ListDatabasesResponse") + proto.RegisterType((*CreateDatabaseRequest)(nil), "google.spanner.admin.database.v1.CreateDatabaseRequest") + proto.RegisterType((*CreateDatabaseMetadata)(nil), "google.spanner.admin.database.v1.CreateDatabaseMetadata") + proto.RegisterType((*GetDatabaseRequest)(nil), "google.spanner.admin.database.v1.GetDatabaseRequest") + proto.RegisterType((*UpdateDatabaseDdlRequest)(nil), "google.spanner.admin.database.v1.UpdateDatabaseDdlRequest") + proto.RegisterType((*UpdateDatabaseDdlMetadata)(nil), "google.spanner.admin.database.v1.UpdateDatabaseDdlMetadata") + proto.RegisterType((*DropDatabaseRequest)(nil), "google.spanner.admin.database.v1.DropDatabaseRequest") + proto.RegisterType((*GetDatabaseDdlRequest)(nil), "google.spanner.admin.database.v1.GetDatabaseDdlRequest") + proto.RegisterType((*GetDatabaseDdlResponse)(nil), "google.spanner.admin.database.v1.GetDatabaseDdlResponse") + proto.RegisterEnum("google.spanner.admin.database.v1.Database_State", Database_State_name, Database_State_value) +} + +// Reference imports to suppress errors if they are not otherwise used. +var _ context.Context +var _ grpc.ClientConn + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the grpc package it is being compiled against. +const _ = grpc.SupportPackageIsVersion4 + +// DatabaseAdminClient is the client API for DatabaseAdmin service. +// +// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://godoc.org/google.golang.org/grpc#ClientConn.NewStream. +type DatabaseAdminClient interface { + // Lists Cloud Spanner databases. + ListDatabases(ctx context.Context, in *ListDatabasesRequest, opts ...grpc.CallOption) (*ListDatabasesResponse, error) + // Creates a new Cloud Spanner database and starts to prepare it for serving. + // The returned [long-running operation][google.longrunning.Operation] will + // have a name of the format `/operations/` and + // can be used to track preparation of the database. The + // [metadata][google.longrunning.Operation.metadata] field type is + // [CreateDatabaseMetadata][google.spanner.admin.database.v1.CreateDatabaseMetadata]. + // The [response][google.longrunning.Operation.response] field type is + // [Database][google.spanner.admin.database.v1.Database], if successful. + CreateDatabase(ctx context.Context, in *CreateDatabaseRequest, opts ...grpc.CallOption) (*longrunning.Operation, error) + // Gets the state of a Cloud Spanner database. + GetDatabase(ctx context.Context, in *GetDatabaseRequest, opts ...grpc.CallOption) (*Database, error) + // Updates the schema of a Cloud Spanner database by + // creating/altering/dropping tables, columns, indexes, etc. The returned + // [long-running operation][google.longrunning.Operation] will have a name of + // the format `/operations/` and can be used to + // track execution of the schema change(s). The + // [metadata][google.longrunning.Operation.metadata] field type is + // [UpdateDatabaseDdlMetadata][google.spanner.admin.database.v1.UpdateDatabaseDdlMetadata]. + // The operation has no response. + UpdateDatabaseDdl(ctx context.Context, in *UpdateDatabaseDdlRequest, opts ...grpc.CallOption) (*longrunning.Operation, error) + // Drops (aka deletes) a Cloud Spanner database. + DropDatabase(ctx context.Context, in *DropDatabaseRequest, opts ...grpc.CallOption) (*empty.Empty, error) + // Returns the schema of a Cloud Spanner database as a list of formatted + // DDL statements. This method does not show pending schema updates, those may + // be queried using the [Operations][google.longrunning.Operations] API. + GetDatabaseDdl(ctx context.Context, in *GetDatabaseDdlRequest, opts ...grpc.CallOption) (*GetDatabaseDdlResponse, error) + // Sets the access control policy on a database resource. Replaces any + // existing policy. + // + // Authorization requires `spanner.databases.setIamPolicy` permission on + // [resource][google.iam.v1.SetIamPolicyRequest.resource]. + SetIamPolicy(ctx context.Context, in *v1.SetIamPolicyRequest, opts ...grpc.CallOption) (*v1.Policy, error) + // Gets the access control policy for a database resource. Returns an empty + // policy if a database exists but does not have a policy set. + // + // Authorization requires `spanner.databases.getIamPolicy` permission on + // [resource][google.iam.v1.GetIamPolicyRequest.resource]. + GetIamPolicy(ctx context.Context, in *v1.GetIamPolicyRequest, opts ...grpc.CallOption) (*v1.Policy, error) + // Returns permissions that the caller has on the specified database resource. + // + // Attempting this RPC on a non-existent Cloud Spanner database will result in + // a NOT_FOUND error if the user has `spanner.databases.list` permission on + // the containing Cloud Spanner instance. Otherwise returns an empty set of + // permissions. + TestIamPermissions(ctx context.Context, in *v1.TestIamPermissionsRequest, opts ...grpc.CallOption) (*v1.TestIamPermissionsResponse, error) +} + +type databaseAdminClient struct { + cc *grpc.ClientConn +} + +func NewDatabaseAdminClient(cc *grpc.ClientConn) DatabaseAdminClient { + return &databaseAdminClient{cc} +} + +func (c *databaseAdminClient) ListDatabases(ctx context.Context, in *ListDatabasesRequest, opts ...grpc.CallOption) (*ListDatabasesResponse, error) { + out := new(ListDatabasesResponse) + err := c.cc.Invoke(ctx, "/google.spanner.admin.database.v1.DatabaseAdmin/ListDatabases", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *databaseAdminClient) CreateDatabase(ctx context.Context, in *CreateDatabaseRequest, opts ...grpc.CallOption) (*longrunning.Operation, error) { + out := new(longrunning.Operation) + err := c.cc.Invoke(ctx, "/google.spanner.admin.database.v1.DatabaseAdmin/CreateDatabase", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *databaseAdminClient) GetDatabase(ctx context.Context, in *GetDatabaseRequest, opts ...grpc.CallOption) (*Database, error) { + out := new(Database) + err := c.cc.Invoke(ctx, "/google.spanner.admin.database.v1.DatabaseAdmin/GetDatabase", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *databaseAdminClient) UpdateDatabaseDdl(ctx context.Context, in *UpdateDatabaseDdlRequest, opts ...grpc.CallOption) (*longrunning.Operation, error) { + out := new(longrunning.Operation) + err := c.cc.Invoke(ctx, "/google.spanner.admin.database.v1.DatabaseAdmin/UpdateDatabaseDdl", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *databaseAdminClient) DropDatabase(ctx context.Context, in *DropDatabaseRequest, opts ...grpc.CallOption) (*empty.Empty, error) { + out := new(empty.Empty) + err := c.cc.Invoke(ctx, "/google.spanner.admin.database.v1.DatabaseAdmin/DropDatabase", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *databaseAdminClient) GetDatabaseDdl(ctx context.Context, in *GetDatabaseDdlRequest, opts ...grpc.CallOption) (*GetDatabaseDdlResponse, error) { + out := new(GetDatabaseDdlResponse) + err := c.cc.Invoke(ctx, "/google.spanner.admin.database.v1.DatabaseAdmin/GetDatabaseDdl", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *databaseAdminClient) SetIamPolicy(ctx context.Context, in *v1.SetIamPolicyRequest, opts ...grpc.CallOption) (*v1.Policy, error) { + out := new(v1.Policy) + err := c.cc.Invoke(ctx, "/google.spanner.admin.database.v1.DatabaseAdmin/SetIamPolicy", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *databaseAdminClient) GetIamPolicy(ctx context.Context, in *v1.GetIamPolicyRequest, opts ...grpc.CallOption) (*v1.Policy, error) { + out := new(v1.Policy) + err := c.cc.Invoke(ctx, "/google.spanner.admin.database.v1.DatabaseAdmin/GetIamPolicy", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *databaseAdminClient) TestIamPermissions(ctx context.Context, in *v1.TestIamPermissionsRequest, opts ...grpc.CallOption) (*v1.TestIamPermissionsResponse, error) { + out := new(v1.TestIamPermissionsResponse) + err := c.cc.Invoke(ctx, "/google.spanner.admin.database.v1.DatabaseAdmin/TestIamPermissions", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +// DatabaseAdminServer is the server API for DatabaseAdmin service. +type DatabaseAdminServer interface { + // Lists Cloud Spanner databases. + ListDatabases(context.Context, *ListDatabasesRequest) (*ListDatabasesResponse, error) + // Creates a new Cloud Spanner database and starts to prepare it for serving. + // The returned [long-running operation][google.longrunning.Operation] will + // have a name of the format `/operations/` and + // can be used to track preparation of the database. The + // [metadata][google.longrunning.Operation.metadata] field type is + // [CreateDatabaseMetadata][google.spanner.admin.database.v1.CreateDatabaseMetadata]. + // The [response][google.longrunning.Operation.response] field type is + // [Database][google.spanner.admin.database.v1.Database], if successful. + CreateDatabase(context.Context, *CreateDatabaseRequest) (*longrunning.Operation, error) + // Gets the state of a Cloud Spanner database. + GetDatabase(context.Context, *GetDatabaseRequest) (*Database, error) + // Updates the schema of a Cloud Spanner database by + // creating/altering/dropping tables, columns, indexes, etc. The returned + // [long-running operation][google.longrunning.Operation] will have a name of + // the format `/operations/` and can be used to + // track execution of the schema change(s). The + // [metadata][google.longrunning.Operation.metadata] field type is + // [UpdateDatabaseDdlMetadata][google.spanner.admin.database.v1.UpdateDatabaseDdlMetadata]. + // The operation has no response. + UpdateDatabaseDdl(context.Context, *UpdateDatabaseDdlRequest) (*longrunning.Operation, error) + // Drops (aka deletes) a Cloud Spanner database. + DropDatabase(context.Context, *DropDatabaseRequest) (*empty.Empty, error) + // Returns the schema of a Cloud Spanner database as a list of formatted + // DDL statements. This method does not show pending schema updates, those may + // be queried using the [Operations][google.longrunning.Operations] API. + GetDatabaseDdl(context.Context, *GetDatabaseDdlRequest) (*GetDatabaseDdlResponse, error) + // Sets the access control policy on a database resource. Replaces any + // existing policy. + // + // Authorization requires `spanner.databases.setIamPolicy` permission on + // [resource][google.iam.v1.SetIamPolicyRequest.resource]. + SetIamPolicy(context.Context, *v1.SetIamPolicyRequest) (*v1.Policy, error) + // Gets the access control policy for a database resource. Returns an empty + // policy if a database exists but does not have a policy set. + // + // Authorization requires `spanner.databases.getIamPolicy` permission on + // [resource][google.iam.v1.GetIamPolicyRequest.resource]. + GetIamPolicy(context.Context, *v1.GetIamPolicyRequest) (*v1.Policy, error) + // Returns permissions that the caller has on the specified database resource. + // + // Attempting this RPC on a non-existent Cloud Spanner database will result in + // a NOT_FOUND error if the user has `spanner.databases.list` permission on + // the containing Cloud Spanner instance. Otherwise returns an empty set of + // permissions. + TestIamPermissions(context.Context, *v1.TestIamPermissionsRequest) (*v1.TestIamPermissionsResponse, error) +} + +func RegisterDatabaseAdminServer(s *grpc.Server, srv DatabaseAdminServer) { + s.RegisterService(&_DatabaseAdmin_serviceDesc, srv) +} + +func _DatabaseAdmin_ListDatabases_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(ListDatabasesRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(DatabaseAdminServer).ListDatabases(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.spanner.admin.database.v1.DatabaseAdmin/ListDatabases", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(DatabaseAdminServer).ListDatabases(ctx, req.(*ListDatabasesRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _DatabaseAdmin_CreateDatabase_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(CreateDatabaseRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(DatabaseAdminServer).CreateDatabase(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.spanner.admin.database.v1.DatabaseAdmin/CreateDatabase", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(DatabaseAdminServer).CreateDatabase(ctx, req.(*CreateDatabaseRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _DatabaseAdmin_GetDatabase_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(GetDatabaseRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(DatabaseAdminServer).GetDatabase(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.spanner.admin.database.v1.DatabaseAdmin/GetDatabase", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(DatabaseAdminServer).GetDatabase(ctx, req.(*GetDatabaseRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _DatabaseAdmin_UpdateDatabaseDdl_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(UpdateDatabaseDdlRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(DatabaseAdminServer).UpdateDatabaseDdl(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.spanner.admin.database.v1.DatabaseAdmin/UpdateDatabaseDdl", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(DatabaseAdminServer).UpdateDatabaseDdl(ctx, req.(*UpdateDatabaseDdlRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _DatabaseAdmin_DropDatabase_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(DropDatabaseRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(DatabaseAdminServer).DropDatabase(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.spanner.admin.database.v1.DatabaseAdmin/DropDatabase", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(DatabaseAdminServer).DropDatabase(ctx, req.(*DropDatabaseRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _DatabaseAdmin_GetDatabaseDdl_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(GetDatabaseDdlRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(DatabaseAdminServer).GetDatabaseDdl(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.spanner.admin.database.v1.DatabaseAdmin/GetDatabaseDdl", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(DatabaseAdminServer).GetDatabaseDdl(ctx, req.(*GetDatabaseDdlRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _DatabaseAdmin_SetIamPolicy_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(v1.SetIamPolicyRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(DatabaseAdminServer).SetIamPolicy(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.spanner.admin.database.v1.DatabaseAdmin/SetIamPolicy", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(DatabaseAdminServer).SetIamPolicy(ctx, req.(*v1.SetIamPolicyRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _DatabaseAdmin_GetIamPolicy_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(v1.GetIamPolicyRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(DatabaseAdminServer).GetIamPolicy(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.spanner.admin.database.v1.DatabaseAdmin/GetIamPolicy", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(DatabaseAdminServer).GetIamPolicy(ctx, req.(*v1.GetIamPolicyRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _DatabaseAdmin_TestIamPermissions_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(v1.TestIamPermissionsRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(DatabaseAdminServer).TestIamPermissions(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.spanner.admin.database.v1.DatabaseAdmin/TestIamPermissions", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(DatabaseAdminServer).TestIamPermissions(ctx, req.(*v1.TestIamPermissionsRequest)) + } + return interceptor(ctx, in, info, handler) +} + +var _DatabaseAdmin_serviceDesc = grpc.ServiceDesc{ + ServiceName: "google.spanner.admin.database.v1.DatabaseAdmin", + HandlerType: (*DatabaseAdminServer)(nil), + Methods: []grpc.MethodDesc{ + { + MethodName: "ListDatabases", + Handler: _DatabaseAdmin_ListDatabases_Handler, + }, + { + MethodName: "CreateDatabase", + Handler: _DatabaseAdmin_CreateDatabase_Handler, + }, + { + MethodName: "GetDatabase", + Handler: _DatabaseAdmin_GetDatabase_Handler, + }, + { + MethodName: "UpdateDatabaseDdl", + Handler: _DatabaseAdmin_UpdateDatabaseDdl_Handler, + }, + { + MethodName: "DropDatabase", + Handler: _DatabaseAdmin_DropDatabase_Handler, + }, + { + MethodName: "GetDatabaseDdl", + Handler: _DatabaseAdmin_GetDatabaseDdl_Handler, + }, + { + MethodName: "SetIamPolicy", + Handler: _DatabaseAdmin_SetIamPolicy_Handler, + }, + { + MethodName: "GetIamPolicy", + Handler: _DatabaseAdmin_GetIamPolicy_Handler, + }, + { + MethodName: "TestIamPermissions", + Handler: _DatabaseAdmin_TestIamPermissions_Handler, + }, + }, + Streams: []grpc.StreamDesc{}, + Metadata: "google/spanner/admin/database/v1/spanner_database_admin.proto", +} + +func init() { + proto.RegisterFile("google/spanner/admin/database/v1/spanner_database_admin.proto", fileDescriptor_spanner_database_admin_742316449a28c935) +} + +var fileDescriptor_spanner_database_admin_742316449a28c935 = []byte{ + // 1033 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xbc, 0x96, 0xcf, 0x6f, 0x1b, 0x45, + 0x14, 0xc7, 0x19, 0xa7, 0xa9, 0x92, 0x17, 0x27, 0x75, 0x06, 0x1c, 0xb9, 0x5b, 0x5a, 0xcc, 0x82, + 0x2a, 0xd7, 0x12, 0xbb, 0xd8, 0x69, 0x48, 0x30, 0x0a, 0x22, 0xb5, 0x5d, 0xd7, 0x12, 0xb4, 0x96, + 0xed, 0x56, 0x02, 0x59, 0xb2, 0x26, 0xf6, 0xb0, 0xda, 0xe2, 0xfd, 0xc1, 0xce, 0xb8, 0x6a, 0x8b, + 0x7a, 0x41, 0xe2, 0xc0, 0x19, 0x90, 0xb8, 0x81, 0x38, 0x70, 0xe0, 0xc4, 0x0d, 0x89, 0x23, 0x47, + 0xae, 0xfc, 0x01, 0x5c, 0xf8, 0x43, 0xd0, 0xcc, 0xee, 0xd8, 0xeb, 0x75, 0x12, 0xdb, 0x1c, 0xb8, + 0x79, 0xdf, 0xfb, 0xbe, 0x79, 0x9f, 0x79, 0x3b, 0xdf, 0xf1, 0xc2, 0xb1, 0xe5, 0x79, 0xd6, 0x88, + 0x9a, 0xcc, 0x27, 0xae, 0x4b, 0x03, 0x93, 0x0c, 0x1d, 0xdb, 0x35, 0x87, 0x84, 0x93, 0x53, 0xc2, + 0xa8, 0xf9, 0xa4, 0xa4, 0x32, 0x7d, 0x15, 0xeb, 0x4b, 0x89, 0xe1, 0x07, 0x1e, 0xf7, 0x70, 0x3e, + 0x2c, 0x37, 0x22, 0x91, 0x11, 0xe6, 0x94, 0xd4, 0x78, 0x52, 0xd2, 0x5e, 0x8d, 0x1a, 0x10, 0xdf, + 0x36, 0x89, 0xeb, 0x7a, 0x9c, 0x70, 0xdb, 0x73, 0x59, 0x58, 0xaf, 0xdd, 0x88, 0xb2, 0x36, 0x71, + 0x44, 0x2f, 0x9b, 0x38, 0x7d, 0xdf, 0x1b, 0xd9, 0x83, 0x67, 0x51, 0x5e, 0x9b, 0xcd, 0xcf, 0xe4, + 0xde, 0x88, 0x72, 0x23, 0xcf, 0xb5, 0x82, 0xb1, 0xeb, 0xda, 0xae, 0x65, 0x7a, 0x3e, 0x0d, 0x66, + 0x1a, 0x5c, 0x8b, 0x44, 0xf2, 0xe9, 0x74, 0xfc, 0xa9, 0x49, 0x1d, 0x9f, 0xab, 0x15, 0x5e, 0x4b, + 0x26, 0xb9, 0xed, 0x50, 0xc6, 0x89, 0xe3, 0x87, 0x02, 0xfd, 0x07, 0x04, 0x1b, 0xb5, 0x68, 0x33, + 0x18, 0xc3, 0x25, 0x97, 0x38, 0x34, 0x87, 0xf2, 0xa8, 0xb0, 0xd9, 0x96, 0xbf, 0xf1, 0x5d, 0x58, + 0x67, 0x9c, 0x70, 0x9a, 0x4b, 0xe5, 0x51, 0x61, 0xa7, 0xfc, 0xb6, 0xb1, 0x68, 0x1e, 0x86, 0x5a, + 0xce, 0xe8, 0x88, 0xba, 0x76, 0x58, 0xae, 0x1f, 0xc2, 0xba, 0x7c, 0xc6, 0x59, 0xd8, 0xed, 0x74, + 0x4f, 0xba, 0xf5, 0xfe, 0xc3, 0xfb, 0x9d, 0x56, 0xbd, 0xda, 0xbc, 0xdb, 0xac, 0xd7, 0x32, 0x2f, + 0xe1, 0x34, 0x6c, 0x54, 0xdb, 0xf5, 0x93, 0x6e, 0xf3, 0x7e, 0x23, 0x83, 0xf0, 0x26, 0xac, 0xb7, + 0xeb, 0x27, 0xb5, 0x8f, 0x33, 0x29, 0xfd, 0x31, 0xbc, 0xf2, 0xa1, 0xcd, 0xb8, 0x5a, 0x95, 0xb5, + 0xe9, 0xe7, 0x63, 0xca, 0x38, 0xde, 0x83, 0xcb, 0x3e, 0x09, 0xa8, 0xcb, 0x23, 0xdc, 0xe8, 0x09, + 0x5f, 0x83, 0x4d, 0x9f, 0x58, 0xb4, 0xcf, 0xec, 0xe7, 0x34, 0xb7, 0x96, 0x47, 0x85, 0xf5, 0xf6, + 0x86, 0x08, 0x74, 0xec, 0xe7, 0x14, 0x5f, 0x07, 0x90, 0x49, 0xee, 0x7d, 0x46, 0xdd, 0xdc, 0x25, + 0x59, 0x28, 0xe5, 0x5d, 0x11, 0xd0, 0xbf, 0x46, 0x90, 0x4d, 0x34, 0x63, 0xbe, 0xe7, 0x32, 0x8a, + 0xef, 0xc1, 0xa6, 0xda, 0x23, 0xcb, 0xa1, 0xfc, 0x5a, 0x61, 0xab, 0x5c, 0x5c, 0x7e, 0x14, 0xed, + 0x69, 0x31, 0xbe, 0x09, 0x57, 0x5c, 0xfa, 0x94, 0xf7, 0x63, 0x1c, 0x29, 0xc9, 0xb1, 0x2d, 0xc2, + 0xad, 0x09, 0xcb, 0x57, 0x08, 0xb2, 0xd5, 0x80, 0x12, 0x4e, 0x27, 0xab, 0x2c, 0xd8, 0xf9, 0x2d, + 0xc8, 0x0c, 0x64, 0x41, 0x5f, 0x8e, 0xdc, 0x11, 0x8a, 0x70, 0xe9, 0x2b, 0x61, 0xbc, 0xa3, 0xc2, + 0x42, 0x4a, 0x9f, 0xf2, 0x80, 0x4c, 0x95, 0x2c, 0xb7, 0x96, 0x5f, 0x13, 0x52, 0x19, 0x9f, 0x28, + 0x99, 0x7e, 0x1b, 0xf6, 0x66, 0x31, 0x3e, 0xa2, 0x9c, 0x88, 0xed, 0x60, 0x0d, 0x36, 0xd4, 0xb6, + 0x22, 0x92, 0xc9, 0xb3, 0x5e, 0x00, 0xdc, 0xa0, 0x3c, 0x49, 0x7e, 0xc6, 0x01, 0xd3, 0x9f, 0x41, + 0xee, 0xa1, 0x3f, 0x8c, 0xad, 0x5f, 0x1b, 0x8e, 0x94, 0xfe, 0x82, 0x0e, 0xf8, 0x06, 0x40, 0x0c, + 0x3e, 0x25, 0xe1, 0x63, 0x11, 0xfc, 0x3a, 0xa4, 0x27, 0x5e, 0xe9, 0xdb, 0x43, 0x79, 0x14, 0x36, + 0xdb, 0x5b, 0x93, 0x58, 0x73, 0xa8, 0xff, 0x88, 0xe0, 0xea, 0x5c, 0xef, 0x65, 0xb6, 0xb7, 0xb0, + 0x79, 0x03, 0x76, 0x07, 0x9e, 0xe3, 0xd8, 0xbc, 0x3f, 0x31, 0x5c, 0x38, 0xe0, 0xad, 0xb2, 0xa6, + 0x8e, 0x8d, 0xf2, 0xa4, 0xd1, 0x55, 0x92, 0x76, 0x26, 0x2c, 0x9a, 0x04, 0x98, 0x5e, 0x82, 0x97, + 0x6b, 0x81, 0xe7, 0x27, 0x07, 0x79, 0xd1, 0xe8, 0xf7, 0x21, 0x1b, 0x1b, 0xfd, 0x72, 0xd3, 0xd4, + 0x8f, 0x60, 0x2f, 0x59, 0x14, 0x9d, 0xfc, 0xd9, 0xad, 0xa2, 0xe4, 0x56, 0xcb, 0xdf, 0xa5, 0x61, + 0x5b, 0xd5, 0x9d, 0x08, 0x07, 0xe0, 0xdf, 0x10, 0x6c, 0xcf, 0xb8, 0x08, 0xbf, 0xb3, 0xd8, 0x2a, + 0x67, 0x79, 0x5c, 0x3b, 0x5c, 0xb9, 0x2e, 0x84, 0xd6, 0x0f, 0xbe, 0xfc, 0xeb, 0x9f, 0x6f, 0x52, + 0x26, 0x7e, 0x4b, 0xdc, 0xa9, 0x5f, 0x84, 0xfe, 0x38, 0xf6, 0x03, 0xef, 0x31, 0x1d, 0x70, 0x66, + 0x16, 0x4d, 0xdb, 0x65, 0x9c, 0xb8, 0x03, 0xca, 0xcc, 0xe2, 0x0b, 0x73, 0xea, 0xcd, 0x9f, 0x10, + 0xec, 0xcc, 0x1e, 0x76, 0xbc, 0x04, 0xc2, 0x99, 0x2e, 0xd5, 0xae, 0xab, 0xc2, 0xd8, 0xed, 0x6d, + 0x3c, 0x50, 0xa7, 0x4f, 0x3f, 0x92, 0x84, 0x65, 0x7d, 0x35, 0xc2, 0x0a, 0x2a, 0xe2, 0x9f, 0x11, + 0x6c, 0xc5, 0xde, 0x15, 0xbe, 0xbd, 0x98, 0x70, 0xde, 0x8a, 0xda, 0x0a, 0xb7, 0x57, 0x62, 0x9a, + 0xc2, 0xb5, 0xe7, 0x90, 0x4e, 0x41, 0xcd, 0xe2, 0x0b, 0xfc, 0x2b, 0x82, 0xdd, 0x39, 0x7b, 0xe1, + 0xca, 0xe2, 0xc6, 0xe7, 0xdd, 0x07, 0x8b, 0x66, 0xfa, 0x81, 0xe4, 0xac, 0x94, 0x0f, 0x24, 0xa7, + 0x5a, 0x71, 0x19, 0x56, 0x73, 0x38, 0x1c, 0x89, 0xd9, 0x7e, 0x8f, 0x20, 0x1d, 0xf7, 0x1b, 0x3e, + 0x58, 0x62, 0x4c, 0xf3, 0xfe, 0xd4, 0xf6, 0xe6, 0x4c, 0x5e, 0x17, 0xff, 0xca, 0xfa, 0xbb, 0x92, + 0x70, 0xbf, 0x58, 0x5a, 0x99, 0x10, 0xff, 0x81, 0x60, 0x67, 0xd6, 0xa2, 0xcb, 0x9c, 0xcd, 0x33, + 0x6f, 0x02, 0xed, 0x68, 0xf5, 0xc2, 0xc8, 0x58, 0xc7, 0x72, 0x03, 0x87, 0xf8, 0xbf, 0x8d, 0x18, + 0x7f, 0x8b, 0x20, 0xdd, 0xa1, 0xbc, 0x49, 0x9c, 0x96, 0xfc, 0xd0, 0xc1, 0xba, 0x22, 0xb1, 0x89, + 0x23, 0xda, 0xc6, 0x93, 0x8a, 0x36, 0x9b, 0xd0, 0x84, 0x59, 0xbd, 0x29, 0x51, 0xaa, 0xfa, 0xfb, + 0x12, 0x25, 0xa0, 0xcc, 0x1b, 0x07, 0x83, 0xa5, 0x50, 0x2a, 0x2c, 0xd6, 0x45, 0xbc, 0x76, 0x81, + 0xd5, 0xb8, 0x08, 0xab, 0xf1, 0xbf, 0x60, 0x59, 0x09, 0xac, 0xdf, 0x11, 0xe0, 0x2e, 0x65, 0x32, + 0x48, 0x03, 0xc7, 0x66, 0x4c, 0x7c, 0xf7, 0xe1, 0x42, 0xa2, 0xf1, 0xbc, 0x44, 0x21, 0xde, 0x5a, + 0x42, 0x19, 0xbd, 0xd8, 0x07, 0x12, 0xbb, 0xa9, 0xd7, 0x56, 0xc7, 0xe6, 0x73, 0xab, 0x56, 0x50, + 0xf1, 0xce, 0xdf, 0x08, 0xde, 0x1c, 0x78, 0xce, 0xc2, 0x93, 0x76, 0xe7, 0x6a, 0x27, 0x4c, 0xcd, + 0xfc, 0x89, 0xb4, 0x84, 0x6f, 0x5a, 0xe8, 0x93, 0x7b, 0x51, 0xb9, 0xe5, 0x8d, 0x88, 0x6b, 0x19, + 0x5e, 0x60, 0x99, 0x16, 0x75, 0xa5, 0xab, 0xcc, 0x30, 0x45, 0x7c, 0x9b, 0x9d, 0xff, 0x71, 0xff, + 0x9e, 0xfa, 0xfd, 0x4b, 0xea, 0x66, 0x23, 0x5c, 0xaa, 0x3a, 0xf2, 0xc6, 0x43, 0x23, 0x6a, 0x6a, + 0xc8, 0x6e, 0xd3, 0x6f, 0xd6, 0x47, 0xa5, 0x3f, 0x95, 0xb0, 0x27, 0x85, 0xbd, 0x48, 0xd8, 0x93, + 0xc2, 0x9e, 0x12, 0xf6, 0x1e, 0x95, 0x4e, 0x2f, 0x4b, 0x8c, 0xfd, 0x7f, 0x03, 0x00, 0x00, 0xff, + 0xff, 0x1d, 0xbc, 0x89, 0x54, 0x62, 0x0c, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/spanner/admin/instance/v1/spanner_instance_admin.pb.go b/vendor/google.golang.org/genproto/googleapis/spanner/admin/instance/v1/spanner_instance_admin.pb.go new file mode 100644 index 0000000..e31a6e6 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/spanner/admin/instance/v1/spanner_instance_admin.pb.go @@ -0,0 +1,1568 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/spanner/admin/instance/v1/spanner_instance_admin.proto + +package instance // import "google.golang.org/genproto/googleapis/spanner/admin/instance/v1" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import empty "github.com/golang/protobuf/ptypes/empty" +import timestamp "github.com/golang/protobuf/ptypes/timestamp" +import _ "google.golang.org/genproto/googleapis/api/annotations" +import v1 "google.golang.org/genproto/googleapis/iam/v1" +import longrunning "google.golang.org/genproto/googleapis/longrunning" +import field_mask "google.golang.org/genproto/protobuf/field_mask" + +import ( + context "golang.org/x/net/context" + grpc "google.golang.org/grpc" +) + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Indicates the current state of the instance. +type Instance_State int32 + +const ( + // Not specified. + Instance_STATE_UNSPECIFIED Instance_State = 0 + // The instance is still being created. Resources may not be + // available yet, and operations such as database creation may not + // work. + Instance_CREATING Instance_State = 1 + // The instance is fully created and ready to do work such as + // creating databases. + Instance_READY Instance_State = 2 +) + +var Instance_State_name = map[int32]string{ + 0: "STATE_UNSPECIFIED", + 1: "CREATING", + 2: "READY", +} +var Instance_State_value = map[string]int32{ + "STATE_UNSPECIFIED": 0, + "CREATING": 1, + "READY": 2, +} + +func (x Instance_State) String() string { + return proto.EnumName(Instance_State_name, int32(x)) +} +func (Instance_State) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_spanner_instance_admin_d8dd1634480afab7, []int{1, 0} +} + +// A possible configuration for a Cloud Spanner instance. Configurations +// define the geographic placement of nodes and their replication. +type InstanceConfig struct { + // A unique identifier for the instance configuration. Values + // are of the form + // `projects//instanceConfigs/[a-z][-a-z0-9]*` + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // The name of this instance configuration as it appears in UIs. + DisplayName string `protobuf:"bytes,2,opt,name=display_name,json=displayName,proto3" json:"display_name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *InstanceConfig) Reset() { *m = InstanceConfig{} } +func (m *InstanceConfig) String() string { return proto.CompactTextString(m) } +func (*InstanceConfig) ProtoMessage() {} +func (*InstanceConfig) Descriptor() ([]byte, []int) { + return fileDescriptor_spanner_instance_admin_d8dd1634480afab7, []int{0} +} +func (m *InstanceConfig) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_InstanceConfig.Unmarshal(m, b) +} +func (m *InstanceConfig) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_InstanceConfig.Marshal(b, m, deterministic) +} +func (dst *InstanceConfig) XXX_Merge(src proto.Message) { + xxx_messageInfo_InstanceConfig.Merge(dst, src) +} +func (m *InstanceConfig) XXX_Size() int { + return xxx_messageInfo_InstanceConfig.Size(m) +} +func (m *InstanceConfig) XXX_DiscardUnknown() { + xxx_messageInfo_InstanceConfig.DiscardUnknown(m) +} + +var xxx_messageInfo_InstanceConfig proto.InternalMessageInfo + +func (m *InstanceConfig) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *InstanceConfig) GetDisplayName() string { + if m != nil { + return m.DisplayName + } + return "" +} + +// An isolated set of Cloud Spanner resources on which databases can be hosted. +type Instance struct { + // Required. A unique identifier for the instance, which cannot be changed + // after the instance is created. Values are of the form + // `projects//instances/[a-z][-a-z0-9]*[a-z0-9]`. The final + // segment of the name must be between 6 and 30 characters in length. + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // Required. The name of the instance's configuration. Values are of the form + // `projects//instanceConfigs/`. See + // also [InstanceConfig][google.spanner.admin.instance.v1.InstanceConfig] and + // [ListInstanceConfigs][google.spanner.admin.instance.v1.InstanceAdmin.ListInstanceConfigs]. + Config string `protobuf:"bytes,2,opt,name=config,proto3" json:"config,omitempty"` + // Required. The descriptive name for this instance as it appears in UIs. + // Must be unique per project and between 4 and 30 characters in length. + DisplayName string `protobuf:"bytes,3,opt,name=display_name,json=displayName,proto3" json:"display_name,omitempty"` + // Required. The number of nodes allocated to this instance. This may be zero + // in API responses for instances that are not yet in state `READY`. + // + // See [the + // documentation](https://cloud.google.com/spanner/docs/instances#node_count) + // for more information about nodes. + NodeCount int32 `protobuf:"varint,5,opt,name=node_count,json=nodeCount,proto3" json:"node_count,omitempty"` + // Output only. The current instance state. For + // [CreateInstance][google.spanner.admin.instance.v1.InstanceAdmin.CreateInstance], + // the state must be either omitted or set to `CREATING`. For + // [UpdateInstance][google.spanner.admin.instance.v1.InstanceAdmin.UpdateInstance], + // the state must be either omitted or set to `READY`. + State Instance_State `protobuf:"varint,6,opt,name=state,proto3,enum=google.spanner.admin.instance.v1.Instance_State" json:"state,omitempty"` + // Cloud Labels are a flexible and lightweight mechanism for organizing cloud + // resources into groups that reflect a customer's organizational needs and + // deployment strategies. Cloud Labels can be used to filter collections of + // resources. They can be used to control how resource metrics are aggregated. + // And they can be used as arguments to policy management rules (e.g. route, + // firewall, load balancing, etc.). + // + // * Label keys must be between 1 and 63 characters long and must conform to + // the following regular expression: `[a-z]([-a-z0-9]*[a-z0-9])?`. + // * Label values must be between 0 and 63 characters long and must conform + // to the regular expression `([a-z]([-a-z0-9]*[a-z0-9])?)?`. + // * No more than 64 labels can be associated with a given resource. + // + // See https://goo.gl/xmQnxf for more information on and examples of labels. + // + // If you plan to use labels in your own code, please note that additional + // characters may be allowed in the future. And so you are advised to use an + // internal label representation, such as JSON, which doesn't rely upon + // specific characters being disallowed. For example, representing labels + // as the string: name + "_" + value would prove problematic if we were to + // allow "_" in a future release. + Labels map[string]string `protobuf:"bytes,7,rep,name=labels,proto3" json:"labels,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Instance) Reset() { *m = Instance{} } +func (m *Instance) String() string { return proto.CompactTextString(m) } +func (*Instance) ProtoMessage() {} +func (*Instance) Descriptor() ([]byte, []int) { + return fileDescriptor_spanner_instance_admin_d8dd1634480afab7, []int{1} +} +func (m *Instance) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Instance.Unmarshal(m, b) +} +func (m *Instance) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Instance.Marshal(b, m, deterministic) +} +func (dst *Instance) XXX_Merge(src proto.Message) { + xxx_messageInfo_Instance.Merge(dst, src) +} +func (m *Instance) XXX_Size() int { + return xxx_messageInfo_Instance.Size(m) +} +func (m *Instance) XXX_DiscardUnknown() { + xxx_messageInfo_Instance.DiscardUnknown(m) +} + +var xxx_messageInfo_Instance proto.InternalMessageInfo + +func (m *Instance) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *Instance) GetConfig() string { + if m != nil { + return m.Config + } + return "" +} + +func (m *Instance) GetDisplayName() string { + if m != nil { + return m.DisplayName + } + return "" +} + +func (m *Instance) GetNodeCount() int32 { + if m != nil { + return m.NodeCount + } + return 0 +} + +func (m *Instance) GetState() Instance_State { + if m != nil { + return m.State + } + return Instance_STATE_UNSPECIFIED +} + +func (m *Instance) GetLabels() map[string]string { + if m != nil { + return m.Labels + } + return nil +} + +// The request for +// [ListInstanceConfigs][google.spanner.admin.instance.v1.InstanceAdmin.ListInstanceConfigs]. +type ListInstanceConfigsRequest struct { + // Required. The name of the project for which a list of supported instance + // configurations is requested. Values are of the form + // `projects/`. + Parent string `protobuf:"bytes,1,opt,name=parent,proto3" json:"parent,omitempty"` + // Number of instance configurations to be returned in the response. If 0 or + // less, defaults to the server's maximum allowed page size. + PageSize int32 `protobuf:"varint,2,opt,name=page_size,json=pageSize,proto3" json:"page_size,omitempty"` + // If non-empty, `page_token` should contain a + // [next_page_token][google.spanner.admin.instance.v1.ListInstanceConfigsResponse.next_page_token] + // from a previous + // [ListInstanceConfigsResponse][google.spanner.admin.instance.v1.ListInstanceConfigsResponse]. + PageToken string `protobuf:"bytes,3,opt,name=page_token,json=pageToken,proto3" json:"page_token,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ListInstanceConfigsRequest) Reset() { *m = ListInstanceConfigsRequest{} } +func (m *ListInstanceConfigsRequest) String() string { return proto.CompactTextString(m) } +func (*ListInstanceConfigsRequest) ProtoMessage() {} +func (*ListInstanceConfigsRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_spanner_instance_admin_d8dd1634480afab7, []int{2} +} +func (m *ListInstanceConfigsRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ListInstanceConfigsRequest.Unmarshal(m, b) +} +func (m *ListInstanceConfigsRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ListInstanceConfigsRequest.Marshal(b, m, deterministic) +} +func (dst *ListInstanceConfigsRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_ListInstanceConfigsRequest.Merge(dst, src) +} +func (m *ListInstanceConfigsRequest) XXX_Size() int { + return xxx_messageInfo_ListInstanceConfigsRequest.Size(m) +} +func (m *ListInstanceConfigsRequest) XXX_DiscardUnknown() { + xxx_messageInfo_ListInstanceConfigsRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_ListInstanceConfigsRequest proto.InternalMessageInfo + +func (m *ListInstanceConfigsRequest) GetParent() string { + if m != nil { + return m.Parent + } + return "" +} + +func (m *ListInstanceConfigsRequest) GetPageSize() int32 { + if m != nil { + return m.PageSize + } + return 0 +} + +func (m *ListInstanceConfigsRequest) GetPageToken() string { + if m != nil { + return m.PageToken + } + return "" +} + +// The response for +// [ListInstanceConfigs][google.spanner.admin.instance.v1.InstanceAdmin.ListInstanceConfigs]. +type ListInstanceConfigsResponse struct { + // The list of requested instance configurations. + InstanceConfigs []*InstanceConfig `protobuf:"bytes,1,rep,name=instance_configs,json=instanceConfigs,proto3" json:"instance_configs,omitempty"` + // `next_page_token` can be sent in a subsequent + // [ListInstanceConfigs][google.spanner.admin.instance.v1.InstanceAdmin.ListInstanceConfigs] + // call to fetch more of the matching instance configurations. + NextPageToken string `protobuf:"bytes,2,opt,name=next_page_token,json=nextPageToken,proto3" json:"next_page_token,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ListInstanceConfigsResponse) Reset() { *m = ListInstanceConfigsResponse{} } +func (m *ListInstanceConfigsResponse) String() string { return proto.CompactTextString(m) } +func (*ListInstanceConfigsResponse) ProtoMessage() {} +func (*ListInstanceConfigsResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_spanner_instance_admin_d8dd1634480afab7, []int{3} +} +func (m *ListInstanceConfigsResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ListInstanceConfigsResponse.Unmarshal(m, b) +} +func (m *ListInstanceConfigsResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ListInstanceConfigsResponse.Marshal(b, m, deterministic) +} +func (dst *ListInstanceConfigsResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_ListInstanceConfigsResponse.Merge(dst, src) +} +func (m *ListInstanceConfigsResponse) XXX_Size() int { + return xxx_messageInfo_ListInstanceConfigsResponse.Size(m) +} +func (m *ListInstanceConfigsResponse) XXX_DiscardUnknown() { + xxx_messageInfo_ListInstanceConfigsResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_ListInstanceConfigsResponse proto.InternalMessageInfo + +func (m *ListInstanceConfigsResponse) GetInstanceConfigs() []*InstanceConfig { + if m != nil { + return m.InstanceConfigs + } + return nil +} + +func (m *ListInstanceConfigsResponse) GetNextPageToken() string { + if m != nil { + return m.NextPageToken + } + return "" +} + +// The request for +// [GetInstanceConfigRequest][google.spanner.admin.instance.v1.InstanceAdmin.GetInstanceConfig]. +type GetInstanceConfigRequest struct { + // Required. The name of the requested instance configuration. Values are of + // the form `projects//instanceConfigs/`. + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GetInstanceConfigRequest) Reset() { *m = GetInstanceConfigRequest{} } +func (m *GetInstanceConfigRequest) String() string { return proto.CompactTextString(m) } +func (*GetInstanceConfigRequest) ProtoMessage() {} +func (*GetInstanceConfigRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_spanner_instance_admin_d8dd1634480afab7, []int{4} +} +func (m *GetInstanceConfigRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GetInstanceConfigRequest.Unmarshal(m, b) +} +func (m *GetInstanceConfigRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GetInstanceConfigRequest.Marshal(b, m, deterministic) +} +func (dst *GetInstanceConfigRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_GetInstanceConfigRequest.Merge(dst, src) +} +func (m *GetInstanceConfigRequest) XXX_Size() int { + return xxx_messageInfo_GetInstanceConfigRequest.Size(m) +} +func (m *GetInstanceConfigRequest) XXX_DiscardUnknown() { + xxx_messageInfo_GetInstanceConfigRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_GetInstanceConfigRequest proto.InternalMessageInfo + +func (m *GetInstanceConfigRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +// The request for +// [GetInstance][google.spanner.admin.instance.v1.InstanceAdmin.GetInstance]. +type GetInstanceRequest struct { + // Required. The name of the requested instance. Values are of the form + // `projects//instances/`. + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GetInstanceRequest) Reset() { *m = GetInstanceRequest{} } +func (m *GetInstanceRequest) String() string { return proto.CompactTextString(m) } +func (*GetInstanceRequest) ProtoMessage() {} +func (*GetInstanceRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_spanner_instance_admin_d8dd1634480afab7, []int{5} +} +func (m *GetInstanceRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GetInstanceRequest.Unmarshal(m, b) +} +func (m *GetInstanceRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GetInstanceRequest.Marshal(b, m, deterministic) +} +func (dst *GetInstanceRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_GetInstanceRequest.Merge(dst, src) +} +func (m *GetInstanceRequest) XXX_Size() int { + return xxx_messageInfo_GetInstanceRequest.Size(m) +} +func (m *GetInstanceRequest) XXX_DiscardUnknown() { + xxx_messageInfo_GetInstanceRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_GetInstanceRequest proto.InternalMessageInfo + +func (m *GetInstanceRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +// The request for +// [CreateInstance][google.spanner.admin.instance.v1.InstanceAdmin.CreateInstance]. +type CreateInstanceRequest struct { + // Required. The name of the project in which to create the instance. Values + // are of the form `projects/`. + Parent string `protobuf:"bytes,1,opt,name=parent,proto3" json:"parent,omitempty"` + // Required. The ID of the instance to create. Valid identifiers are of the + // form `[a-z][-a-z0-9]*[a-z0-9]` and must be between 6 and 30 characters in + // length. + InstanceId string `protobuf:"bytes,2,opt,name=instance_id,json=instanceId,proto3" json:"instance_id,omitempty"` + // Required. The instance to create. The name may be omitted, but if + // specified must be `/instances/`. + Instance *Instance `protobuf:"bytes,3,opt,name=instance,proto3" json:"instance,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *CreateInstanceRequest) Reset() { *m = CreateInstanceRequest{} } +func (m *CreateInstanceRequest) String() string { return proto.CompactTextString(m) } +func (*CreateInstanceRequest) ProtoMessage() {} +func (*CreateInstanceRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_spanner_instance_admin_d8dd1634480afab7, []int{6} +} +func (m *CreateInstanceRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_CreateInstanceRequest.Unmarshal(m, b) +} +func (m *CreateInstanceRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_CreateInstanceRequest.Marshal(b, m, deterministic) +} +func (dst *CreateInstanceRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_CreateInstanceRequest.Merge(dst, src) +} +func (m *CreateInstanceRequest) XXX_Size() int { + return xxx_messageInfo_CreateInstanceRequest.Size(m) +} +func (m *CreateInstanceRequest) XXX_DiscardUnknown() { + xxx_messageInfo_CreateInstanceRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_CreateInstanceRequest proto.InternalMessageInfo + +func (m *CreateInstanceRequest) GetParent() string { + if m != nil { + return m.Parent + } + return "" +} + +func (m *CreateInstanceRequest) GetInstanceId() string { + if m != nil { + return m.InstanceId + } + return "" +} + +func (m *CreateInstanceRequest) GetInstance() *Instance { + if m != nil { + return m.Instance + } + return nil +} + +// The request for +// [ListInstances][google.spanner.admin.instance.v1.InstanceAdmin.ListInstances]. +type ListInstancesRequest struct { + // Required. The name of the project for which a list of instances is + // requested. Values are of the form `projects/`. + Parent string `protobuf:"bytes,1,opt,name=parent,proto3" json:"parent,omitempty"` + // Number of instances to be returned in the response. If 0 or less, defaults + // to the server's maximum allowed page size. + PageSize int32 `protobuf:"varint,2,opt,name=page_size,json=pageSize,proto3" json:"page_size,omitempty"` + // If non-empty, `page_token` should contain a + // [next_page_token][google.spanner.admin.instance.v1.ListInstancesResponse.next_page_token] + // from a previous + // [ListInstancesResponse][google.spanner.admin.instance.v1.ListInstancesResponse]. + PageToken string `protobuf:"bytes,3,opt,name=page_token,json=pageToken,proto3" json:"page_token,omitempty"` + // An expression for filtering the results of the request. Filter rules are + // case insensitive. The fields eligible for filtering are: + // + // * `name` + // * `display_name` + // * `labels.key` where key is the name of a label + // + // Some examples of using filters are: + // + // * `name:*` --> The instance has a name. + // * `name:Howl` --> The instance's name contains the string "howl". + // * `name:HOWL` --> Equivalent to above. + // * `NAME:howl` --> Equivalent to above. + // * `labels.env:*` --> The instance has the label "env". + // * `labels.env:dev` --> The instance has the label "env" and the value of + // the label contains the string "dev". + // * `name:howl labels.env:dev` --> The instance's name contains "howl" and + // it has the label "env" with its value + // containing "dev". + Filter string `protobuf:"bytes,4,opt,name=filter,proto3" json:"filter,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ListInstancesRequest) Reset() { *m = ListInstancesRequest{} } +func (m *ListInstancesRequest) String() string { return proto.CompactTextString(m) } +func (*ListInstancesRequest) ProtoMessage() {} +func (*ListInstancesRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_spanner_instance_admin_d8dd1634480afab7, []int{7} +} +func (m *ListInstancesRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ListInstancesRequest.Unmarshal(m, b) +} +func (m *ListInstancesRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ListInstancesRequest.Marshal(b, m, deterministic) +} +func (dst *ListInstancesRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_ListInstancesRequest.Merge(dst, src) +} +func (m *ListInstancesRequest) XXX_Size() int { + return xxx_messageInfo_ListInstancesRequest.Size(m) +} +func (m *ListInstancesRequest) XXX_DiscardUnknown() { + xxx_messageInfo_ListInstancesRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_ListInstancesRequest proto.InternalMessageInfo + +func (m *ListInstancesRequest) GetParent() string { + if m != nil { + return m.Parent + } + return "" +} + +func (m *ListInstancesRequest) GetPageSize() int32 { + if m != nil { + return m.PageSize + } + return 0 +} + +func (m *ListInstancesRequest) GetPageToken() string { + if m != nil { + return m.PageToken + } + return "" +} + +func (m *ListInstancesRequest) GetFilter() string { + if m != nil { + return m.Filter + } + return "" +} + +// The response for +// [ListInstances][google.spanner.admin.instance.v1.InstanceAdmin.ListInstances]. +type ListInstancesResponse struct { + // The list of requested instances. + Instances []*Instance `protobuf:"bytes,1,rep,name=instances,proto3" json:"instances,omitempty"` + // `next_page_token` can be sent in a subsequent + // [ListInstances][google.spanner.admin.instance.v1.InstanceAdmin.ListInstances] + // call to fetch more of the matching instances. + NextPageToken string `protobuf:"bytes,2,opt,name=next_page_token,json=nextPageToken,proto3" json:"next_page_token,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ListInstancesResponse) Reset() { *m = ListInstancesResponse{} } +func (m *ListInstancesResponse) String() string { return proto.CompactTextString(m) } +func (*ListInstancesResponse) ProtoMessage() {} +func (*ListInstancesResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_spanner_instance_admin_d8dd1634480afab7, []int{8} +} +func (m *ListInstancesResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ListInstancesResponse.Unmarshal(m, b) +} +func (m *ListInstancesResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ListInstancesResponse.Marshal(b, m, deterministic) +} +func (dst *ListInstancesResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_ListInstancesResponse.Merge(dst, src) +} +func (m *ListInstancesResponse) XXX_Size() int { + return xxx_messageInfo_ListInstancesResponse.Size(m) +} +func (m *ListInstancesResponse) XXX_DiscardUnknown() { + xxx_messageInfo_ListInstancesResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_ListInstancesResponse proto.InternalMessageInfo + +func (m *ListInstancesResponse) GetInstances() []*Instance { + if m != nil { + return m.Instances + } + return nil +} + +func (m *ListInstancesResponse) GetNextPageToken() string { + if m != nil { + return m.NextPageToken + } + return "" +} + +// The request for +// [UpdateInstance][google.spanner.admin.instance.v1.InstanceAdmin.UpdateInstance]. +type UpdateInstanceRequest struct { + // Required. The instance to update, which must always include the instance + // name. Otherwise, only fields mentioned in + // [][google.spanner.admin.instance.v1.UpdateInstanceRequest.field_mask] need + // be included. + Instance *Instance `protobuf:"bytes,1,opt,name=instance,proto3" json:"instance,omitempty"` + // Required. A mask specifying which fields in + // [][google.spanner.admin.instance.v1.UpdateInstanceRequest.instance] should + // be updated. The field mask must always be specified; this prevents any + // future fields in + // [][google.spanner.admin.instance.v1.Instance] from being erased + // accidentally by clients that do not know about them. + FieldMask *field_mask.FieldMask `protobuf:"bytes,2,opt,name=field_mask,json=fieldMask,proto3" json:"field_mask,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *UpdateInstanceRequest) Reset() { *m = UpdateInstanceRequest{} } +func (m *UpdateInstanceRequest) String() string { return proto.CompactTextString(m) } +func (*UpdateInstanceRequest) ProtoMessage() {} +func (*UpdateInstanceRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_spanner_instance_admin_d8dd1634480afab7, []int{9} +} +func (m *UpdateInstanceRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_UpdateInstanceRequest.Unmarshal(m, b) +} +func (m *UpdateInstanceRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_UpdateInstanceRequest.Marshal(b, m, deterministic) +} +func (dst *UpdateInstanceRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_UpdateInstanceRequest.Merge(dst, src) +} +func (m *UpdateInstanceRequest) XXX_Size() int { + return xxx_messageInfo_UpdateInstanceRequest.Size(m) +} +func (m *UpdateInstanceRequest) XXX_DiscardUnknown() { + xxx_messageInfo_UpdateInstanceRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_UpdateInstanceRequest proto.InternalMessageInfo + +func (m *UpdateInstanceRequest) GetInstance() *Instance { + if m != nil { + return m.Instance + } + return nil +} + +func (m *UpdateInstanceRequest) GetFieldMask() *field_mask.FieldMask { + if m != nil { + return m.FieldMask + } + return nil +} + +// The request for +// [DeleteInstance][google.spanner.admin.instance.v1.InstanceAdmin.DeleteInstance]. +type DeleteInstanceRequest struct { + // Required. The name of the instance to be deleted. Values are of the form + // `projects//instances/` + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *DeleteInstanceRequest) Reset() { *m = DeleteInstanceRequest{} } +func (m *DeleteInstanceRequest) String() string { return proto.CompactTextString(m) } +func (*DeleteInstanceRequest) ProtoMessage() {} +func (*DeleteInstanceRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_spanner_instance_admin_d8dd1634480afab7, []int{10} +} +func (m *DeleteInstanceRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_DeleteInstanceRequest.Unmarshal(m, b) +} +func (m *DeleteInstanceRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_DeleteInstanceRequest.Marshal(b, m, deterministic) +} +func (dst *DeleteInstanceRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_DeleteInstanceRequest.Merge(dst, src) +} +func (m *DeleteInstanceRequest) XXX_Size() int { + return xxx_messageInfo_DeleteInstanceRequest.Size(m) +} +func (m *DeleteInstanceRequest) XXX_DiscardUnknown() { + xxx_messageInfo_DeleteInstanceRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_DeleteInstanceRequest proto.InternalMessageInfo + +func (m *DeleteInstanceRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +// Metadata type for the operation returned by +// [CreateInstance][google.spanner.admin.instance.v1.InstanceAdmin.CreateInstance]. +type CreateInstanceMetadata struct { + // The instance being created. + Instance *Instance `protobuf:"bytes,1,opt,name=instance,proto3" json:"instance,omitempty"` + // The time at which the + // [CreateInstance][google.spanner.admin.instance.v1.InstanceAdmin.CreateInstance] + // request was received. + StartTime *timestamp.Timestamp `protobuf:"bytes,2,opt,name=start_time,json=startTime,proto3" json:"start_time,omitempty"` + // The time at which this operation was cancelled. If set, this operation is + // in the process of undoing itself (which is guaranteed to succeed) and + // cannot be cancelled again. + CancelTime *timestamp.Timestamp `protobuf:"bytes,3,opt,name=cancel_time,json=cancelTime,proto3" json:"cancel_time,omitempty"` + // The time at which this operation failed or was completed successfully. + EndTime *timestamp.Timestamp `protobuf:"bytes,4,opt,name=end_time,json=endTime,proto3" json:"end_time,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *CreateInstanceMetadata) Reset() { *m = CreateInstanceMetadata{} } +func (m *CreateInstanceMetadata) String() string { return proto.CompactTextString(m) } +func (*CreateInstanceMetadata) ProtoMessage() {} +func (*CreateInstanceMetadata) Descriptor() ([]byte, []int) { + return fileDescriptor_spanner_instance_admin_d8dd1634480afab7, []int{11} +} +func (m *CreateInstanceMetadata) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_CreateInstanceMetadata.Unmarshal(m, b) +} +func (m *CreateInstanceMetadata) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_CreateInstanceMetadata.Marshal(b, m, deterministic) +} +func (dst *CreateInstanceMetadata) XXX_Merge(src proto.Message) { + xxx_messageInfo_CreateInstanceMetadata.Merge(dst, src) +} +func (m *CreateInstanceMetadata) XXX_Size() int { + return xxx_messageInfo_CreateInstanceMetadata.Size(m) +} +func (m *CreateInstanceMetadata) XXX_DiscardUnknown() { + xxx_messageInfo_CreateInstanceMetadata.DiscardUnknown(m) +} + +var xxx_messageInfo_CreateInstanceMetadata proto.InternalMessageInfo + +func (m *CreateInstanceMetadata) GetInstance() *Instance { + if m != nil { + return m.Instance + } + return nil +} + +func (m *CreateInstanceMetadata) GetStartTime() *timestamp.Timestamp { + if m != nil { + return m.StartTime + } + return nil +} + +func (m *CreateInstanceMetadata) GetCancelTime() *timestamp.Timestamp { + if m != nil { + return m.CancelTime + } + return nil +} + +func (m *CreateInstanceMetadata) GetEndTime() *timestamp.Timestamp { + if m != nil { + return m.EndTime + } + return nil +} + +// Metadata type for the operation returned by +// [UpdateInstance][google.spanner.admin.instance.v1.InstanceAdmin.UpdateInstance]. +type UpdateInstanceMetadata struct { + // The desired end state of the update. + Instance *Instance `protobuf:"bytes,1,opt,name=instance,proto3" json:"instance,omitempty"` + // The time at which + // [UpdateInstance][google.spanner.admin.instance.v1.InstanceAdmin.UpdateInstance] + // request was received. + StartTime *timestamp.Timestamp `protobuf:"bytes,2,opt,name=start_time,json=startTime,proto3" json:"start_time,omitempty"` + // The time at which this operation was cancelled. If set, this operation is + // in the process of undoing itself (which is guaranteed to succeed) and + // cannot be cancelled again. + CancelTime *timestamp.Timestamp `protobuf:"bytes,3,opt,name=cancel_time,json=cancelTime,proto3" json:"cancel_time,omitempty"` + // The time at which this operation failed or was completed successfully. + EndTime *timestamp.Timestamp `protobuf:"bytes,4,opt,name=end_time,json=endTime,proto3" json:"end_time,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *UpdateInstanceMetadata) Reset() { *m = UpdateInstanceMetadata{} } +func (m *UpdateInstanceMetadata) String() string { return proto.CompactTextString(m) } +func (*UpdateInstanceMetadata) ProtoMessage() {} +func (*UpdateInstanceMetadata) Descriptor() ([]byte, []int) { + return fileDescriptor_spanner_instance_admin_d8dd1634480afab7, []int{12} +} +func (m *UpdateInstanceMetadata) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_UpdateInstanceMetadata.Unmarshal(m, b) +} +func (m *UpdateInstanceMetadata) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_UpdateInstanceMetadata.Marshal(b, m, deterministic) +} +func (dst *UpdateInstanceMetadata) XXX_Merge(src proto.Message) { + xxx_messageInfo_UpdateInstanceMetadata.Merge(dst, src) +} +func (m *UpdateInstanceMetadata) XXX_Size() int { + return xxx_messageInfo_UpdateInstanceMetadata.Size(m) +} +func (m *UpdateInstanceMetadata) XXX_DiscardUnknown() { + xxx_messageInfo_UpdateInstanceMetadata.DiscardUnknown(m) +} + +var xxx_messageInfo_UpdateInstanceMetadata proto.InternalMessageInfo + +func (m *UpdateInstanceMetadata) GetInstance() *Instance { + if m != nil { + return m.Instance + } + return nil +} + +func (m *UpdateInstanceMetadata) GetStartTime() *timestamp.Timestamp { + if m != nil { + return m.StartTime + } + return nil +} + +func (m *UpdateInstanceMetadata) GetCancelTime() *timestamp.Timestamp { + if m != nil { + return m.CancelTime + } + return nil +} + +func (m *UpdateInstanceMetadata) GetEndTime() *timestamp.Timestamp { + if m != nil { + return m.EndTime + } + return nil +} + +func init() { + proto.RegisterType((*InstanceConfig)(nil), "google.spanner.admin.instance.v1.InstanceConfig") + proto.RegisterType((*Instance)(nil), "google.spanner.admin.instance.v1.Instance") + proto.RegisterMapType((map[string]string)(nil), "google.spanner.admin.instance.v1.Instance.LabelsEntry") + proto.RegisterType((*ListInstanceConfigsRequest)(nil), "google.spanner.admin.instance.v1.ListInstanceConfigsRequest") + proto.RegisterType((*ListInstanceConfigsResponse)(nil), "google.spanner.admin.instance.v1.ListInstanceConfigsResponse") + proto.RegisterType((*GetInstanceConfigRequest)(nil), "google.spanner.admin.instance.v1.GetInstanceConfigRequest") + proto.RegisterType((*GetInstanceRequest)(nil), "google.spanner.admin.instance.v1.GetInstanceRequest") + proto.RegisterType((*CreateInstanceRequest)(nil), "google.spanner.admin.instance.v1.CreateInstanceRequest") + proto.RegisterType((*ListInstancesRequest)(nil), "google.spanner.admin.instance.v1.ListInstancesRequest") + proto.RegisterType((*ListInstancesResponse)(nil), "google.spanner.admin.instance.v1.ListInstancesResponse") + proto.RegisterType((*UpdateInstanceRequest)(nil), "google.spanner.admin.instance.v1.UpdateInstanceRequest") + proto.RegisterType((*DeleteInstanceRequest)(nil), "google.spanner.admin.instance.v1.DeleteInstanceRequest") + proto.RegisterType((*CreateInstanceMetadata)(nil), "google.spanner.admin.instance.v1.CreateInstanceMetadata") + proto.RegisterType((*UpdateInstanceMetadata)(nil), "google.spanner.admin.instance.v1.UpdateInstanceMetadata") + proto.RegisterEnum("google.spanner.admin.instance.v1.Instance_State", Instance_State_name, Instance_State_value) +} + +// Reference imports to suppress errors if they are not otherwise used. +var _ context.Context +var _ grpc.ClientConn + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the grpc package it is being compiled against. +const _ = grpc.SupportPackageIsVersion4 + +// InstanceAdminClient is the client API for InstanceAdmin service. +// +// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://godoc.org/google.golang.org/grpc#ClientConn.NewStream. +type InstanceAdminClient interface { + // Lists the supported instance configurations for a given project. + ListInstanceConfigs(ctx context.Context, in *ListInstanceConfigsRequest, opts ...grpc.CallOption) (*ListInstanceConfigsResponse, error) + // Gets information about a particular instance configuration. + GetInstanceConfig(ctx context.Context, in *GetInstanceConfigRequest, opts ...grpc.CallOption) (*InstanceConfig, error) + // Lists all instances in the given project. + ListInstances(ctx context.Context, in *ListInstancesRequest, opts ...grpc.CallOption) (*ListInstancesResponse, error) + // Gets information about a particular instance. + GetInstance(ctx context.Context, in *GetInstanceRequest, opts ...grpc.CallOption) (*Instance, error) + // Creates an instance and begins preparing it to begin serving. The + // returned [long-running operation][google.longrunning.Operation] + // can be used to track the progress of preparing the new + // instance. The instance name is assigned by the caller. If the + // named instance already exists, `CreateInstance` returns + // `ALREADY_EXISTS`. + // + // Immediately upon completion of this request: + // + // * The instance is readable via the API, with all requested attributes + // but no allocated resources. Its state is `CREATING`. + // + // Until completion of the returned operation: + // + // * Cancelling the operation renders the instance immediately unreadable + // via the API. + // * The instance can be deleted. + // * All other attempts to modify the instance are rejected. + // + // Upon completion of the returned operation: + // + // * Billing for all successfully-allocated resources begins (some types + // may have lower than the requested levels). + // * Databases can be created in the instance. + // * The instance's allocated resource levels are readable via the API. + // * The instance's state becomes `READY`. + // + // The returned [long-running operation][google.longrunning.Operation] will + // have a name of the format `/operations/` and + // can be used to track creation of the instance. The + // [metadata][google.longrunning.Operation.metadata] field type is + // [CreateInstanceMetadata][google.spanner.admin.instance.v1.CreateInstanceMetadata]. + // The [response][google.longrunning.Operation.response] field type is + // [Instance][google.spanner.admin.instance.v1.Instance], if successful. + CreateInstance(ctx context.Context, in *CreateInstanceRequest, opts ...grpc.CallOption) (*longrunning.Operation, error) + // Updates an instance, and begins allocating or releasing resources + // as requested. The returned [long-running + // operation][google.longrunning.Operation] can be used to track the + // progress of updating the instance. If the named instance does not + // exist, returns `NOT_FOUND`. + // + // Immediately upon completion of this request: + // + // * For resource types for which a decrease in the instance's allocation + // has been requested, billing is based on the newly-requested level. + // + // Until completion of the returned operation: + // + // * Cancelling the operation sets its metadata's + // [cancel_time][google.spanner.admin.instance.v1.UpdateInstanceMetadata.cancel_time], + // and begins restoring resources to their pre-request values. The + // operation is guaranteed to succeed at undoing all resource changes, + // after which point it terminates with a `CANCELLED` status. + // * All other attempts to modify the instance are rejected. + // * Reading the instance via the API continues to give the pre-request + // resource levels. + // + // Upon completion of the returned operation: + // + // * Billing begins for all successfully-allocated resources (some types + // may have lower than the requested levels). + // * All newly-reserved resources are available for serving the instance's + // tables. + // * The instance's new resource levels are readable via the API. + // + // The returned [long-running operation][google.longrunning.Operation] will + // have a name of the format `/operations/` and + // can be used to track the instance modification. The + // [metadata][google.longrunning.Operation.metadata] field type is + // [UpdateInstanceMetadata][google.spanner.admin.instance.v1.UpdateInstanceMetadata]. + // The [response][google.longrunning.Operation.response] field type is + // [Instance][google.spanner.admin.instance.v1.Instance], if successful. + // + // Authorization requires `spanner.instances.update` permission on + // resource [name][google.spanner.admin.instance.v1.Instance.name]. + UpdateInstance(ctx context.Context, in *UpdateInstanceRequest, opts ...grpc.CallOption) (*longrunning.Operation, error) + // Deletes an instance. + // + // Immediately upon completion of the request: + // + // * Billing ceases for all of the instance's reserved resources. + // + // Soon afterward: + // + // * The instance and *all of its databases* immediately and + // irrevocably disappear from the API. All data in the databases + // is permanently deleted. + DeleteInstance(ctx context.Context, in *DeleteInstanceRequest, opts ...grpc.CallOption) (*empty.Empty, error) + // Sets the access control policy on an instance resource. Replaces any + // existing policy. + // + // Authorization requires `spanner.instances.setIamPolicy` on + // [resource][google.iam.v1.SetIamPolicyRequest.resource]. + SetIamPolicy(ctx context.Context, in *v1.SetIamPolicyRequest, opts ...grpc.CallOption) (*v1.Policy, error) + // Gets the access control policy for an instance resource. Returns an empty + // policy if an instance exists but does not have a policy set. + // + // Authorization requires `spanner.instances.getIamPolicy` on + // [resource][google.iam.v1.GetIamPolicyRequest.resource]. + GetIamPolicy(ctx context.Context, in *v1.GetIamPolicyRequest, opts ...grpc.CallOption) (*v1.Policy, error) + // Returns permissions that the caller has on the specified instance resource. + // + // Attempting this RPC on a non-existent Cloud Spanner instance resource will + // result in a NOT_FOUND error if the user has `spanner.instances.list` + // permission on the containing Google Cloud Project. Otherwise returns an + // empty set of permissions. + TestIamPermissions(ctx context.Context, in *v1.TestIamPermissionsRequest, opts ...grpc.CallOption) (*v1.TestIamPermissionsResponse, error) +} + +type instanceAdminClient struct { + cc *grpc.ClientConn +} + +func NewInstanceAdminClient(cc *grpc.ClientConn) InstanceAdminClient { + return &instanceAdminClient{cc} +} + +func (c *instanceAdminClient) ListInstanceConfigs(ctx context.Context, in *ListInstanceConfigsRequest, opts ...grpc.CallOption) (*ListInstanceConfigsResponse, error) { + out := new(ListInstanceConfigsResponse) + err := c.cc.Invoke(ctx, "/google.spanner.admin.instance.v1.InstanceAdmin/ListInstanceConfigs", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *instanceAdminClient) GetInstanceConfig(ctx context.Context, in *GetInstanceConfigRequest, opts ...grpc.CallOption) (*InstanceConfig, error) { + out := new(InstanceConfig) + err := c.cc.Invoke(ctx, "/google.spanner.admin.instance.v1.InstanceAdmin/GetInstanceConfig", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *instanceAdminClient) ListInstances(ctx context.Context, in *ListInstancesRequest, opts ...grpc.CallOption) (*ListInstancesResponse, error) { + out := new(ListInstancesResponse) + err := c.cc.Invoke(ctx, "/google.spanner.admin.instance.v1.InstanceAdmin/ListInstances", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *instanceAdminClient) GetInstance(ctx context.Context, in *GetInstanceRequest, opts ...grpc.CallOption) (*Instance, error) { + out := new(Instance) + err := c.cc.Invoke(ctx, "/google.spanner.admin.instance.v1.InstanceAdmin/GetInstance", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *instanceAdminClient) CreateInstance(ctx context.Context, in *CreateInstanceRequest, opts ...grpc.CallOption) (*longrunning.Operation, error) { + out := new(longrunning.Operation) + err := c.cc.Invoke(ctx, "/google.spanner.admin.instance.v1.InstanceAdmin/CreateInstance", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *instanceAdminClient) UpdateInstance(ctx context.Context, in *UpdateInstanceRequest, opts ...grpc.CallOption) (*longrunning.Operation, error) { + out := new(longrunning.Operation) + err := c.cc.Invoke(ctx, "/google.spanner.admin.instance.v1.InstanceAdmin/UpdateInstance", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *instanceAdminClient) DeleteInstance(ctx context.Context, in *DeleteInstanceRequest, opts ...grpc.CallOption) (*empty.Empty, error) { + out := new(empty.Empty) + err := c.cc.Invoke(ctx, "/google.spanner.admin.instance.v1.InstanceAdmin/DeleteInstance", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *instanceAdminClient) SetIamPolicy(ctx context.Context, in *v1.SetIamPolicyRequest, opts ...grpc.CallOption) (*v1.Policy, error) { + out := new(v1.Policy) + err := c.cc.Invoke(ctx, "/google.spanner.admin.instance.v1.InstanceAdmin/SetIamPolicy", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *instanceAdminClient) GetIamPolicy(ctx context.Context, in *v1.GetIamPolicyRequest, opts ...grpc.CallOption) (*v1.Policy, error) { + out := new(v1.Policy) + err := c.cc.Invoke(ctx, "/google.spanner.admin.instance.v1.InstanceAdmin/GetIamPolicy", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *instanceAdminClient) TestIamPermissions(ctx context.Context, in *v1.TestIamPermissionsRequest, opts ...grpc.CallOption) (*v1.TestIamPermissionsResponse, error) { + out := new(v1.TestIamPermissionsResponse) + err := c.cc.Invoke(ctx, "/google.spanner.admin.instance.v1.InstanceAdmin/TestIamPermissions", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +// InstanceAdminServer is the server API for InstanceAdmin service. +type InstanceAdminServer interface { + // Lists the supported instance configurations for a given project. + ListInstanceConfigs(context.Context, *ListInstanceConfigsRequest) (*ListInstanceConfigsResponse, error) + // Gets information about a particular instance configuration. + GetInstanceConfig(context.Context, *GetInstanceConfigRequest) (*InstanceConfig, error) + // Lists all instances in the given project. + ListInstances(context.Context, *ListInstancesRequest) (*ListInstancesResponse, error) + // Gets information about a particular instance. + GetInstance(context.Context, *GetInstanceRequest) (*Instance, error) + // Creates an instance and begins preparing it to begin serving. The + // returned [long-running operation][google.longrunning.Operation] + // can be used to track the progress of preparing the new + // instance. The instance name is assigned by the caller. If the + // named instance already exists, `CreateInstance` returns + // `ALREADY_EXISTS`. + // + // Immediately upon completion of this request: + // + // * The instance is readable via the API, with all requested attributes + // but no allocated resources. Its state is `CREATING`. + // + // Until completion of the returned operation: + // + // * Cancelling the operation renders the instance immediately unreadable + // via the API. + // * The instance can be deleted. + // * All other attempts to modify the instance are rejected. + // + // Upon completion of the returned operation: + // + // * Billing for all successfully-allocated resources begins (some types + // may have lower than the requested levels). + // * Databases can be created in the instance. + // * The instance's allocated resource levels are readable via the API. + // * The instance's state becomes `READY`. + // + // The returned [long-running operation][google.longrunning.Operation] will + // have a name of the format `/operations/` and + // can be used to track creation of the instance. The + // [metadata][google.longrunning.Operation.metadata] field type is + // [CreateInstanceMetadata][google.spanner.admin.instance.v1.CreateInstanceMetadata]. + // The [response][google.longrunning.Operation.response] field type is + // [Instance][google.spanner.admin.instance.v1.Instance], if successful. + CreateInstance(context.Context, *CreateInstanceRequest) (*longrunning.Operation, error) + // Updates an instance, and begins allocating or releasing resources + // as requested. The returned [long-running + // operation][google.longrunning.Operation] can be used to track the + // progress of updating the instance. If the named instance does not + // exist, returns `NOT_FOUND`. + // + // Immediately upon completion of this request: + // + // * For resource types for which a decrease in the instance's allocation + // has been requested, billing is based on the newly-requested level. + // + // Until completion of the returned operation: + // + // * Cancelling the operation sets its metadata's + // [cancel_time][google.spanner.admin.instance.v1.UpdateInstanceMetadata.cancel_time], + // and begins restoring resources to their pre-request values. The + // operation is guaranteed to succeed at undoing all resource changes, + // after which point it terminates with a `CANCELLED` status. + // * All other attempts to modify the instance are rejected. + // * Reading the instance via the API continues to give the pre-request + // resource levels. + // + // Upon completion of the returned operation: + // + // * Billing begins for all successfully-allocated resources (some types + // may have lower than the requested levels). + // * All newly-reserved resources are available for serving the instance's + // tables. + // * The instance's new resource levels are readable via the API. + // + // The returned [long-running operation][google.longrunning.Operation] will + // have a name of the format `/operations/` and + // can be used to track the instance modification. The + // [metadata][google.longrunning.Operation.metadata] field type is + // [UpdateInstanceMetadata][google.spanner.admin.instance.v1.UpdateInstanceMetadata]. + // The [response][google.longrunning.Operation.response] field type is + // [Instance][google.spanner.admin.instance.v1.Instance], if successful. + // + // Authorization requires `spanner.instances.update` permission on + // resource [name][google.spanner.admin.instance.v1.Instance.name]. + UpdateInstance(context.Context, *UpdateInstanceRequest) (*longrunning.Operation, error) + // Deletes an instance. + // + // Immediately upon completion of the request: + // + // * Billing ceases for all of the instance's reserved resources. + // + // Soon afterward: + // + // * The instance and *all of its databases* immediately and + // irrevocably disappear from the API. All data in the databases + // is permanently deleted. + DeleteInstance(context.Context, *DeleteInstanceRequest) (*empty.Empty, error) + // Sets the access control policy on an instance resource. Replaces any + // existing policy. + // + // Authorization requires `spanner.instances.setIamPolicy` on + // [resource][google.iam.v1.SetIamPolicyRequest.resource]. + SetIamPolicy(context.Context, *v1.SetIamPolicyRequest) (*v1.Policy, error) + // Gets the access control policy for an instance resource. Returns an empty + // policy if an instance exists but does not have a policy set. + // + // Authorization requires `spanner.instances.getIamPolicy` on + // [resource][google.iam.v1.GetIamPolicyRequest.resource]. + GetIamPolicy(context.Context, *v1.GetIamPolicyRequest) (*v1.Policy, error) + // Returns permissions that the caller has on the specified instance resource. + // + // Attempting this RPC on a non-existent Cloud Spanner instance resource will + // result in a NOT_FOUND error if the user has `spanner.instances.list` + // permission on the containing Google Cloud Project. Otherwise returns an + // empty set of permissions. + TestIamPermissions(context.Context, *v1.TestIamPermissionsRequest) (*v1.TestIamPermissionsResponse, error) +} + +func RegisterInstanceAdminServer(s *grpc.Server, srv InstanceAdminServer) { + s.RegisterService(&_InstanceAdmin_serviceDesc, srv) +} + +func _InstanceAdmin_ListInstanceConfigs_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(ListInstanceConfigsRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(InstanceAdminServer).ListInstanceConfigs(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.spanner.admin.instance.v1.InstanceAdmin/ListInstanceConfigs", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(InstanceAdminServer).ListInstanceConfigs(ctx, req.(*ListInstanceConfigsRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _InstanceAdmin_GetInstanceConfig_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(GetInstanceConfigRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(InstanceAdminServer).GetInstanceConfig(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.spanner.admin.instance.v1.InstanceAdmin/GetInstanceConfig", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(InstanceAdminServer).GetInstanceConfig(ctx, req.(*GetInstanceConfigRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _InstanceAdmin_ListInstances_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(ListInstancesRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(InstanceAdminServer).ListInstances(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.spanner.admin.instance.v1.InstanceAdmin/ListInstances", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(InstanceAdminServer).ListInstances(ctx, req.(*ListInstancesRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _InstanceAdmin_GetInstance_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(GetInstanceRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(InstanceAdminServer).GetInstance(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.spanner.admin.instance.v1.InstanceAdmin/GetInstance", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(InstanceAdminServer).GetInstance(ctx, req.(*GetInstanceRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _InstanceAdmin_CreateInstance_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(CreateInstanceRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(InstanceAdminServer).CreateInstance(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.spanner.admin.instance.v1.InstanceAdmin/CreateInstance", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(InstanceAdminServer).CreateInstance(ctx, req.(*CreateInstanceRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _InstanceAdmin_UpdateInstance_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(UpdateInstanceRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(InstanceAdminServer).UpdateInstance(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.spanner.admin.instance.v1.InstanceAdmin/UpdateInstance", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(InstanceAdminServer).UpdateInstance(ctx, req.(*UpdateInstanceRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _InstanceAdmin_DeleteInstance_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(DeleteInstanceRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(InstanceAdminServer).DeleteInstance(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.spanner.admin.instance.v1.InstanceAdmin/DeleteInstance", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(InstanceAdminServer).DeleteInstance(ctx, req.(*DeleteInstanceRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _InstanceAdmin_SetIamPolicy_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(v1.SetIamPolicyRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(InstanceAdminServer).SetIamPolicy(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.spanner.admin.instance.v1.InstanceAdmin/SetIamPolicy", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(InstanceAdminServer).SetIamPolicy(ctx, req.(*v1.SetIamPolicyRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _InstanceAdmin_GetIamPolicy_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(v1.GetIamPolicyRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(InstanceAdminServer).GetIamPolicy(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.spanner.admin.instance.v1.InstanceAdmin/GetIamPolicy", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(InstanceAdminServer).GetIamPolicy(ctx, req.(*v1.GetIamPolicyRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _InstanceAdmin_TestIamPermissions_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(v1.TestIamPermissionsRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(InstanceAdminServer).TestIamPermissions(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.spanner.admin.instance.v1.InstanceAdmin/TestIamPermissions", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(InstanceAdminServer).TestIamPermissions(ctx, req.(*v1.TestIamPermissionsRequest)) + } + return interceptor(ctx, in, info, handler) +} + +var _InstanceAdmin_serviceDesc = grpc.ServiceDesc{ + ServiceName: "google.spanner.admin.instance.v1.InstanceAdmin", + HandlerType: (*InstanceAdminServer)(nil), + Methods: []grpc.MethodDesc{ + { + MethodName: "ListInstanceConfigs", + Handler: _InstanceAdmin_ListInstanceConfigs_Handler, + }, + { + MethodName: "GetInstanceConfig", + Handler: _InstanceAdmin_GetInstanceConfig_Handler, + }, + { + MethodName: "ListInstances", + Handler: _InstanceAdmin_ListInstances_Handler, + }, + { + MethodName: "GetInstance", + Handler: _InstanceAdmin_GetInstance_Handler, + }, + { + MethodName: "CreateInstance", + Handler: _InstanceAdmin_CreateInstance_Handler, + }, + { + MethodName: "UpdateInstance", + Handler: _InstanceAdmin_UpdateInstance_Handler, + }, + { + MethodName: "DeleteInstance", + Handler: _InstanceAdmin_DeleteInstance_Handler, + }, + { + MethodName: "SetIamPolicy", + Handler: _InstanceAdmin_SetIamPolicy_Handler, + }, + { + MethodName: "GetIamPolicy", + Handler: _InstanceAdmin_GetIamPolicy_Handler, + }, + { + MethodName: "TestIamPermissions", + Handler: _InstanceAdmin_TestIamPermissions_Handler, + }, + }, + Streams: []grpc.StreamDesc{}, + Metadata: "google/spanner/admin/instance/v1/spanner_instance_admin.proto", +} + +func init() { + proto.RegisterFile("google/spanner/admin/instance/v1/spanner_instance_admin.proto", fileDescriptor_spanner_instance_admin_d8dd1634480afab7) +} + +var fileDescriptor_spanner_instance_admin_d8dd1634480afab7 = []byte{ + // 1210 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xe4, 0x57, 0x4f, 0x6f, 0x1b, 0x45, + 0x14, 0x67, 0x92, 0x3a, 0x8d, 0x9f, 0xd3, 0x34, 0x1d, 0x9a, 0xca, 0xb8, 0x94, 0xa6, 0x5b, 0x54, + 0x5c, 0x83, 0x76, 0x89, 0xa1, 0xff, 0x52, 0x72, 0x48, 0x5d, 0xc7, 0xb5, 0xd4, 0x86, 0x68, 0xed, + 0x56, 0x02, 0x22, 0x59, 0x53, 0x7b, 0x62, 0x2d, 0xd9, 0x9d, 0x5d, 0x76, 0xc6, 0x15, 0x29, 0xea, + 0xa5, 0xe2, 0x00, 0x12, 0x12, 0x07, 0x24, 0x84, 0x7a, 0x41, 0xe2, 0x08, 0x12, 0x07, 0xbe, 0x02, + 0x37, 0xae, 0x7c, 0x00, 0x2e, 0x88, 0xcf, 0x81, 0x66, 0x76, 0xc7, 0xf5, 0xae, 0xed, 0xd8, 0xae, + 0xe8, 0x89, 0xdb, 0xce, 0xbc, 0xdf, 0x7b, 0xef, 0x37, 0xbf, 0x37, 0xfb, 0xde, 0x2e, 0x6c, 0x76, + 0x7d, 0xbf, 0xeb, 0x52, 0x8b, 0x07, 0x84, 0x31, 0x1a, 0x5a, 0xa4, 0xe3, 0x39, 0xcc, 0x72, 0x18, + 0x17, 0x84, 0xb5, 0xa9, 0xf5, 0x68, 0x5d, 0x5b, 0x5a, 0x7a, 0xaf, 0xa5, 0x20, 0x66, 0x10, 0xfa, + 0xc2, 0xc7, 0x6b, 0x91, 0xbb, 0x19, 0x83, 0xcc, 0xc8, 0xa6, 0xa1, 0xe6, 0xa3, 0xf5, 0xc2, 0xeb, + 0x71, 0x02, 0x12, 0x38, 0x16, 0x61, 0xcc, 0x17, 0x44, 0x38, 0x3e, 0xe3, 0x91, 0x7f, 0xe1, 0x8d, + 0xd8, 0xea, 0x10, 0x4f, 0xe6, 0x72, 0x88, 0xd7, 0x0a, 0x7c, 0xd7, 0x69, 0x1f, 0xc6, 0xf6, 0x42, + 0xd2, 0x9e, 0xb0, 0x5d, 0x8c, 0x6d, 0xae, 0xcf, 0xba, 0x61, 0x8f, 0x31, 0x87, 0x75, 0x2d, 0x3f, + 0xa0, 0x61, 0x22, 0xc1, 0xd9, 0x18, 0xa4, 0x56, 0x0f, 0x7b, 0xfb, 0x16, 0xf5, 0x02, 0xa1, 0x23, + 0xac, 0xa5, 0x8d, 0xfb, 0x0e, 0x75, 0x3b, 0x2d, 0x8f, 0xf0, 0x83, 0x18, 0x71, 0x3e, 0x8d, 0x10, + 0x8e, 0x47, 0xb9, 0x20, 0x5e, 0x10, 0x01, 0x8c, 0x1a, 0x2c, 0xd7, 0xe3, 0xd3, 0x56, 0x7c, 0xb6, + 0xef, 0x74, 0x31, 0x86, 0x63, 0x8c, 0x78, 0x34, 0x8f, 0xd6, 0x50, 0x31, 0x6b, 0xab, 0x67, 0x7c, + 0x01, 0x96, 0x3a, 0x0e, 0x0f, 0x5c, 0x72, 0xd8, 0x52, 0xb6, 0x39, 0x65, 0xcb, 0xc5, 0x7b, 0x3b, + 0xc4, 0xa3, 0xc6, 0x97, 0xf3, 0xb0, 0xa8, 0x23, 0x8d, 0x8c, 0x71, 0x06, 0x16, 0xda, 0x2a, 0x43, + 0xec, 0x1d, 0xaf, 0x86, 0x62, 0xcf, 0x0f, 0xc5, 0xc6, 0xe7, 0x00, 0x98, 0xdf, 0xa1, 0xad, 0xb6, + 0xdf, 0x63, 0x22, 0x9f, 0x59, 0x43, 0xc5, 0x8c, 0x9d, 0x95, 0x3b, 0x15, 0xb9, 0x81, 0xb7, 0x21, + 0xc3, 0x05, 0x11, 0x34, 0xbf, 0xb0, 0x86, 0x8a, 0xcb, 0xe5, 0x77, 0xcd, 0x49, 0x45, 0x35, 0x35, + 0x51, 0xb3, 0x21, 0xfd, 0xec, 0xc8, 0x1d, 0xef, 0xc0, 0x82, 0x4b, 0x1e, 0x52, 0x97, 0xe7, 0x8f, + 0xaf, 0xcd, 0x17, 0x73, 0xe5, 0xab, 0x33, 0x04, 0xba, 0xab, 0x1c, 0xab, 0x4c, 0x84, 0x87, 0x76, + 0x1c, 0xa5, 0x70, 0x03, 0x72, 0x03, 0xdb, 0x78, 0x05, 0xe6, 0x0f, 0xe8, 0x61, 0xac, 0x89, 0x7c, + 0xc4, 0xa7, 0x21, 0xf3, 0x88, 0xb8, 0x3d, 0xad, 0x67, 0xb4, 0xd8, 0x98, 0xbb, 0x8e, 0x8c, 0x6b, + 0x90, 0x51, 0xd4, 0xf0, 0x2a, 0x9c, 0x6a, 0x34, 0xb7, 0x9a, 0xd5, 0xd6, 0xfd, 0x9d, 0xc6, 0x6e, + 0xb5, 0x52, 0xdf, 0xae, 0x57, 0x6f, 0xaf, 0xbc, 0x82, 0x97, 0x60, 0xb1, 0x62, 0x57, 0xb7, 0x9a, + 0xf5, 0x9d, 0xda, 0x0a, 0xc2, 0x59, 0xc8, 0xd8, 0xd5, 0xad, 0xdb, 0x1f, 0xad, 0xcc, 0x19, 0x01, + 0x14, 0xee, 0x3a, 0x5c, 0x24, 0x6b, 0xca, 0x6d, 0xfa, 0x59, 0x8f, 0x72, 0x21, 0x6b, 0x10, 0x90, + 0x90, 0x32, 0x11, 0xb3, 0x88, 0x57, 0xf8, 0x2c, 0x64, 0x03, 0xd2, 0xa5, 0x2d, 0xee, 0x3c, 0x8e, + 0xc8, 0x64, 0xec, 0x45, 0xb9, 0xd1, 0x70, 0x1e, 0x2b, 0xf5, 0x95, 0x51, 0xf8, 0x07, 0x94, 0xc5, + 0xe5, 0x51, 0xf0, 0xa6, 0xdc, 0x30, 0x7e, 0x42, 0x70, 0x76, 0x64, 0x4a, 0x1e, 0xf8, 0x8c, 0x53, + 0xfc, 0x09, 0xac, 0xf4, 0x5f, 0xbd, 0xa8, 0xe4, 0x3c, 0x8f, 0x94, 0xbe, 0x33, 0x14, 0x2a, 0x0a, + 0x6a, 0x9f, 0x74, 0x92, 0x49, 0xf0, 0x25, 0x38, 0xc9, 0xe8, 0xe7, 0xa2, 0x35, 0x40, 0x30, 0xd2, + 0xf2, 0x84, 0xdc, 0xde, 0xed, 0x93, 0x34, 0x21, 0x5f, 0xa3, 0x29, 0x8a, 0x5a, 0x94, 0x11, 0x97, + 0xd5, 0x28, 0x02, 0x1e, 0xc0, 0x1f, 0x85, 0xfc, 0x01, 0xc1, 0x6a, 0x25, 0xa4, 0x44, 0xd0, 0x34, + 0x7a, 0x9c, 0xd8, 0xe7, 0x21, 0xd7, 0x17, 0xc4, 0xe9, 0xc4, 0x7c, 0x41, 0x6f, 0xd5, 0x3b, 0x78, + 0x1b, 0x16, 0xf5, 0x4a, 0xc9, 0x9d, 0x2b, 0x97, 0xa6, 0x57, 0xca, 0xee, 0xfb, 0x1a, 0x4f, 0x11, + 0x9c, 0x1e, 0xac, 0xcc, 0xcb, 0xbc, 0x06, 0x32, 0xe6, 0xbe, 0xe3, 0x0a, 0x1a, 0xe6, 0x8f, 0x45, + 0x31, 0xa3, 0x95, 0xf1, 0x35, 0x82, 0xd5, 0x14, 0x89, 0xf8, 0x62, 0xdc, 0x81, 0xac, 0xa6, 0xaa, + 0x6f, 0xc4, 0x2c, 0xe7, 0x7c, 0xee, 0x3c, 0xf5, 0x2d, 0x78, 0x86, 0x60, 0xf5, 0x7e, 0xd0, 0x19, + 0x51, 0xab, 0x41, 0xc9, 0xd1, 0x8b, 0x4b, 0x8e, 0x6f, 0x00, 0x3c, 0xef, 0xc1, 0x8a, 0x44, 0xae, + 0x5c, 0xd0, 0x91, 0x74, 0x13, 0x36, 0xb7, 0x25, 0xe4, 0x1e, 0xe1, 0x07, 0x76, 0x76, 0x5f, 0x3f, + 0x1a, 0x6f, 0xc3, 0xea, 0x6d, 0xea, 0xd2, 0x61, 0x6e, 0xa3, 0x6e, 0xdd, 0xb7, 0x73, 0x70, 0x26, + 0x79, 0xeb, 0xee, 0x51, 0x41, 0x3a, 0x44, 0x90, 0xff, 0xf2, 0x28, 0x5c, 0x90, 0x50, 0xb4, 0xe4, + 0xc8, 0x18, 0x7b, 0x94, 0xa6, 0x9e, 0x27, 0x76, 0x56, 0xa1, 0xe5, 0x1a, 0xdf, 0x84, 0x5c, 0x5b, + 0xc6, 0x70, 0x23, 0xdf, 0xf9, 0x89, 0xbe, 0x10, 0xc1, 0x95, 0xf3, 0x15, 0x58, 0xa4, 0xac, 0x13, + 0x79, 0x1e, 0x9b, 0xe8, 0x79, 0x9c, 0xb2, 0x8e, 0x5c, 0x29, 0x45, 0x92, 0xb5, 0xfd, 0x9f, 0x2b, + 0x52, 0xfe, 0x67, 0x09, 0x4e, 0xe8, 0x53, 0x6c, 0xc9, 0xf3, 0xe1, 0xdf, 0x11, 0xbc, 0x3a, 0xa2, + 0x55, 0xe3, 0x0f, 0x26, 0xcb, 0x31, 0x7e, 0xa8, 0x14, 0x36, 0x5f, 0xd0, 0x3b, 0x6a, 0x03, 0x86, + 0xf5, 0xf4, 0xcf, 0xbf, 0xbf, 0x9b, 0xbb, 0x8c, 0xdf, 0x92, 0x1f, 0x48, 0x5f, 0x44, 0x9d, 0x68, + 0x33, 0x08, 0xfd, 0x4f, 0x69, 0x5b, 0x70, 0xab, 0xf4, 0xc4, 0x4a, 0xf7, 0xfc, 0xdf, 0x10, 0x9c, + 0x1a, 0x6a, 0xe6, 0x78, 0x63, 0x32, 0x8b, 0x71, 0x13, 0xa0, 0x30, 0xf3, 0x20, 0x4a, 0x91, 0x96, + 0xaf, 0xe4, 0x00, 0xe5, 0x34, 0x63, 0xab, 0xf4, 0x04, 0xff, 0x82, 0xe0, 0x44, 0xa2, 0x0d, 0xe2, + 0xab, 0xb3, 0xc9, 0xd6, 0x97, 0xfb, 0xda, 0xcc, 0x7e, 0xb1, 0xd0, 0x97, 0x15, 0xe7, 0x8b, 0xf8, + 0xc2, 0x24, 0xa1, 0x39, 0x7e, 0x86, 0x20, 0x37, 0xa0, 0x16, 0x7e, 0x7f, 0x26, 0x71, 0x35, 0xd3, + 0x19, 0xde, 0xb2, 0x14, 0xb9, 0x71, 0x82, 0x2a, 0x29, 0xbf, 0x47, 0xb0, 0x9c, 0xec, 0x7d, 0x78, + 0x0a, 0x4d, 0x46, 0xce, 0xe8, 0xc2, 0x39, 0xed, 0x38, 0xf0, 0x11, 0x6e, 0x7e, 0xa8, 0x3f, 0xc2, + 0x8d, 0x77, 0x14, 0xab, 0x4b, 0xc6, 0x64, 0xc9, 0x36, 0x50, 0x09, 0xff, 0x88, 0x60, 0x39, 0xd9, + 0x82, 0xa6, 0x21, 0x36, 0x72, 0x20, 0x4d, 0x22, 0x76, 0x45, 0x11, 0xb3, 0xca, 0x25, 0x45, 0xac, + 0x1f, 0xee, 0x28, 0xdd, 0x24, 0xc3, 0x6f, 0x10, 0x2c, 0x27, 0x87, 0xcc, 0x34, 0x0c, 0x47, 0x8e, + 0xa5, 0xc2, 0x99, 0xa1, 0x1e, 0x54, 0x95, 0xbf, 0x26, 0xba, 0x92, 0xa5, 0x29, 0x2a, 0xf9, 0x15, + 0x82, 0xa5, 0x06, 0x15, 0x75, 0xe2, 0xed, 0xaa, 0x1f, 0x23, 0x6c, 0xe8, 0x98, 0x0e, 0xf1, 0x64, + 0xe6, 0x41, 0xa3, 0xce, 0xbb, 0x9a, 0xc2, 0x44, 0x56, 0x63, 0x53, 0xa5, 0xbd, 0x66, 0x94, 0x55, + 0xda, 0x90, 0x72, 0xbf, 0x17, 0xb6, 0xc7, 0x8b, 0xc1, 0x07, 0x22, 0x4b, 0x65, 0x24, 0x95, 0xda, + 0x51, 0x54, 0x6a, 0x2f, 0x8d, 0x4a, 0x37, 0x45, 0xe5, 0x57, 0x04, 0xb8, 0x49, 0xb9, 0xda, 0xa4, + 0xa1, 0xe7, 0x70, 0x2e, 0xff, 0x07, 0x71, 0x31, 0x95, 0x6c, 0x18, 0xa2, 0x69, 0x5d, 0x9e, 0x02, + 0x19, 0xf7, 0x84, 0x8a, 0xa2, 0xba, 0x69, 0x5c, 0x9f, 0x8e, 0xaa, 0x18, 0x8a, 0xb4, 0x81, 0x4a, + 0xb7, 0xfe, 0x42, 0xf0, 0x66, 0xdb, 0xf7, 0x26, 0x5e, 0xa4, 0x5b, 0xaf, 0x35, 0x22, 0x53, 0x62, + 0x2a, 0xed, 0xca, 0xeb, 0xb3, 0x8b, 0x3e, 0xbe, 0x13, 0xbb, 0x77, 0x7d, 0x97, 0xb0, 0xae, 0xe9, + 0x87, 0x5d, 0xab, 0x4b, 0x99, 0xba, 0x5c, 0x56, 0x64, 0x22, 0x81, 0xc3, 0xc7, 0xff, 0xe8, 0xdf, + 0xd4, 0xcf, 0x3f, 0xcf, 0x5d, 0xaa, 0x45, 0xa1, 0x2a, 0xae, 0xdf, 0xeb, 0x98, 0x71, 0x52, 0x53, + 0x65, 0x7b, 0xfe, 0xc7, 0xf6, 0x60, 0xfd, 0x0f, 0x0d, 0xdc, 0x53, 0xc0, 0xbd, 0x18, 0xb8, 0xa7, + 0x80, 0x7b, 0x1a, 0xb8, 0xf7, 0x60, 0xfd, 0xe1, 0x82, 0xa2, 0xf1, 0xde, 0xbf, 0x01, 0x00, 0x00, + 0xff, 0xff, 0x23, 0xcc, 0x85, 0xa9, 0x6e, 0x10, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/spanner/v1/keys.pb.go b/vendor/google.golang.org/genproto/googleapis/spanner/v1/keys.pb.go new file mode 100644 index 0000000..ccb2407 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/spanner/v1/keys.pb.go @@ -0,0 +1,454 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/spanner/v1/keys.proto + +package spanner // import "google.golang.org/genproto/googleapis/spanner/v1" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _struct "github.com/golang/protobuf/ptypes/struct" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// KeyRange represents a range of rows in a table or index. +// +// A range has a start key and an end key. These keys can be open or +// closed, indicating if the range includes rows with that key. +// +// Keys are represented by lists, where the ith value in the list +// corresponds to the ith component of the table or index primary key. +// Individual values are encoded as described +// [here][google.spanner.v1.TypeCode]. +// +// For example, consider the following table definition: +// +// CREATE TABLE UserEvents ( +// UserName STRING(MAX), +// EventDate STRING(10) +// ) PRIMARY KEY(UserName, EventDate); +// +// The following keys name rows in this table: +// +// ["Bob", "2014-09-23"] +// ["Alfred", "2015-06-12"] +// +// Since the `UserEvents` table's `PRIMARY KEY` clause names two +// columns, each `UserEvents` key has two elements; the first is the +// `UserName`, and the second is the `EventDate`. +// +// Key ranges with multiple components are interpreted +// lexicographically by component using the table or index key's declared +// sort order. For example, the following range returns all events for +// user `"Bob"` that occurred in the year 2015: +// +// "start_closed": ["Bob", "2015-01-01"] +// "end_closed": ["Bob", "2015-12-31"] +// +// Start and end keys can omit trailing key components. This affects the +// inclusion and exclusion of rows that exactly match the provided key +// components: if the key is closed, then rows that exactly match the +// provided components are included; if the key is open, then rows +// that exactly match are not included. +// +// For example, the following range includes all events for `"Bob"` that +// occurred during and after the year 2000: +// +// "start_closed": ["Bob", "2000-01-01"] +// "end_closed": ["Bob"] +// +// The next example retrieves all events for `"Bob"`: +// +// "start_closed": ["Bob"] +// "end_closed": ["Bob"] +// +// To retrieve events before the year 2000: +// +// "start_closed": ["Bob"] +// "end_open": ["Bob", "2000-01-01"] +// +// The following range includes all rows in the table: +// +// "start_closed": [] +// "end_closed": [] +// +// This range returns all users whose `UserName` begins with any +// character from A to C: +// +// "start_closed": ["A"] +// "end_open": ["D"] +// +// This range returns all users whose `UserName` begins with B: +// +// "start_closed": ["B"] +// "end_open": ["C"] +// +// Key ranges honor column sort order. For example, suppose a table is +// defined as follows: +// +// CREATE TABLE DescendingSortedTable { +// Key INT64, +// ... +// ) PRIMARY KEY(Key DESC); +// +// The following range retrieves all rows with key values between 1 +// and 100 inclusive: +// +// "start_closed": ["100"] +// "end_closed": ["1"] +// +// Note that 100 is passed as the start, and 1 is passed as the end, +// because `Key` is a descending column in the schema. +type KeyRange struct { + // The start key must be provided. It can be either closed or open. + // + // Types that are valid to be assigned to StartKeyType: + // *KeyRange_StartClosed + // *KeyRange_StartOpen + StartKeyType isKeyRange_StartKeyType `protobuf_oneof:"start_key_type"` + // The end key must be provided. It can be either closed or open. + // + // Types that are valid to be assigned to EndKeyType: + // *KeyRange_EndClosed + // *KeyRange_EndOpen + EndKeyType isKeyRange_EndKeyType `protobuf_oneof:"end_key_type"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *KeyRange) Reset() { *m = KeyRange{} } +func (m *KeyRange) String() string { return proto.CompactTextString(m) } +func (*KeyRange) ProtoMessage() {} +func (*KeyRange) Descriptor() ([]byte, []int) { + return fileDescriptor_keys_04f9296dfa9b90cb, []int{0} +} +func (m *KeyRange) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_KeyRange.Unmarshal(m, b) +} +func (m *KeyRange) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_KeyRange.Marshal(b, m, deterministic) +} +func (dst *KeyRange) XXX_Merge(src proto.Message) { + xxx_messageInfo_KeyRange.Merge(dst, src) +} +func (m *KeyRange) XXX_Size() int { + return xxx_messageInfo_KeyRange.Size(m) +} +func (m *KeyRange) XXX_DiscardUnknown() { + xxx_messageInfo_KeyRange.DiscardUnknown(m) +} + +var xxx_messageInfo_KeyRange proto.InternalMessageInfo + +type isKeyRange_StartKeyType interface { + isKeyRange_StartKeyType() +} + +type KeyRange_StartClosed struct { + StartClosed *_struct.ListValue `protobuf:"bytes,1,opt,name=start_closed,json=startClosed,proto3,oneof"` +} + +type KeyRange_StartOpen struct { + StartOpen *_struct.ListValue `protobuf:"bytes,2,opt,name=start_open,json=startOpen,proto3,oneof"` +} + +func (*KeyRange_StartClosed) isKeyRange_StartKeyType() {} + +func (*KeyRange_StartOpen) isKeyRange_StartKeyType() {} + +func (m *KeyRange) GetStartKeyType() isKeyRange_StartKeyType { + if m != nil { + return m.StartKeyType + } + return nil +} + +func (m *KeyRange) GetStartClosed() *_struct.ListValue { + if x, ok := m.GetStartKeyType().(*KeyRange_StartClosed); ok { + return x.StartClosed + } + return nil +} + +func (m *KeyRange) GetStartOpen() *_struct.ListValue { + if x, ok := m.GetStartKeyType().(*KeyRange_StartOpen); ok { + return x.StartOpen + } + return nil +} + +type isKeyRange_EndKeyType interface { + isKeyRange_EndKeyType() +} + +type KeyRange_EndClosed struct { + EndClosed *_struct.ListValue `protobuf:"bytes,3,opt,name=end_closed,json=endClosed,proto3,oneof"` +} + +type KeyRange_EndOpen struct { + EndOpen *_struct.ListValue `protobuf:"bytes,4,opt,name=end_open,json=endOpen,proto3,oneof"` +} + +func (*KeyRange_EndClosed) isKeyRange_EndKeyType() {} + +func (*KeyRange_EndOpen) isKeyRange_EndKeyType() {} + +func (m *KeyRange) GetEndKeyType() isKeyRange_EndKeyType { + if m != nil { + return m.EndKeyType + } + return nil +} + +func (m *KeyRange) GetEndClosed() *_struct.ListValue { + if x, ok := m.GetEndKeyType().(*KeyRange_EndClosed); ok { + return x.EndClosed + } + return nil +} + +func (m *KeyRange) GetEndOpen() *_struct.ListValue { + if x, ok := m.GetEndKeyType().(*KeyRange_EndOpen); ok { + return x.EndOpen + } + return nil +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*KeyRange) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _KeyRange_OneofMarshaler, _KeyRange_OneofUnmarshaler, _KeyRange_OneofSizer, []interface{}{ + (*KeyRange_StartClosed)(nil), + (*KeyRange_StartOpen)(nil), + (*KeyRange_EndClosed)(nil), + (*KeyRange_EndOpen)(nil), + } +} + +func _KeyRange_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*KeyRange) + // start_key_type + switch x := m.StartKeyType.(type) { + case *KeyRange_StartClosed: + b.EncodeVarint(1<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.StartClosed); err != nil { + return err + } + case *KeyRange_StartOpen: + b.EncodeVarint(2<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.StartOpen); err != nil { + return err + } + case nil: + default: + return fmt.Errorf("KeyRange.StartKeyType has unexpected type %T", x) + } + // end_key_type + switch x := m.EndKeyType.(type) { + case *KeyRange_EndClosed: + b.EncodeVarint(3<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.EndClosed); err != nil { + return err + } + case *KeyRange_EndOpen: + b.EncodeVarint(4<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.EndOpen); err != nil { + return err + } + case nil: + default: + return fmt.Errorf("KeyRange.EndKeyType has unexpected type %T", x) + } + return nil +} + +func _KeyRange_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*KeyRange) + switch tag { + case 1: // start_key_type.start_closed + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(_struct.ListValue) + err := b.DecodeMessage(msg) + m.StartKeyType = &KeyRange_StartClosed{msg} + return true, err + case 2: // start_key_type.start_open + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(_struct.ListValue) + err := b.DecodeMessage(msg) + m.StartKeyType = &KeyRange_StartOpen{msg} + return true, err + case 3: // end_key_type.end_closed + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(_struct.ListValue) + err := b.DecodeMessage(msg) + m.EndKeyType = &KeyRange_EndClosed{msg} + return true, err + case 4: // end_key_type.end_open + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(_struct.ListValue) + err := b.DecodeMessage(msg) + m.EndKeyType = &KeyRange_EndOpen{msg} + return true, err + default: + return false, nil + } +} + +func _KeyRange_OneofSizer(msg proto.Message) (n int) { + m := msg.(*KeyRange) + // start_key_type + switch x := m.StartKeyType.(type) { + case *KeyRange_StartClosed: + s := proto.Size(x.StartClosed) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *KeyRange_StartOpen: + s := proto.Size(x.StartOpen) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + // end_key_type + switch x := m.EndKeyType.(type) { + case *KeyRange_EndClosed: + s := proto.Size(x.EndClosed) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *KeyRange_EndOpen: + s := proto.Size(x.EndOpen) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +// `KeySet` defines a collection of Cloud Spanner keys and/or key ranges. All +// the keys are expected to be in the same table or index. The keys need +// not be sorted in any particular way. +// +// If the same key is specified multiple times in the set (for example +// if two ranges, two keys, or a key and a range overlap), Cloud Spanner +// behaves as if the key were only specified once. +type KeySet struct { + // A list of specific keys. Entries in `keys` should have exactly as + // many elements as there are columns in the primary or index key + // with which this `KeySet` is used. Individual key values are + // encoded as described [here][google.spanner.v1.TypeCode]. + Keys []*_struct.ListValue `protobuf:"bytes,1,rep,name=keys,proto3" json:"keys,omitempty"` + // A list of key ranges. See [KeyRange][google.spanner.v1.KeyRange] for more + // information about key range specifications. + Ranges []*KeyRange `protobuf:"bytes,2,rep,name=ranges,proto3" json:"ranges,omitempty"` + // For convenience `all` can be set to `true` to indicate that this + // `KeySet` matches all keys in the table or index. Note that any keys + // specified in `keys` or `ranges` are only yielded once. + All bool `protobuf:"varint,3,opt,name=all,proto3" json:"all,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *KeySet) Reset() { *m = KeySet{} } +func (m *KeySet) String() string { return proto.CompactTextString(m) } +func (*KeySet) ProtoMessage() {} +func (*KeySet) Descriptor() ([]byte, []int) { + return fileDescriptor_keys_04f9296dfa9b90cb, []int{1} +} +func (m *KeySet) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_KeySet.Unmarshal(m, b) +} +func (m *KeySet) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_KeySet.Marshal(b, m, deterministic) +} +func (dst *KeySet) XXX_Merge(src proto.Message) { + xxx_messageInfo_KeySet.Merge(dst, src) +} +func (m *KeySet) XXX_Size() int { + return xxx_messageInfo_KeySet.Size(m) +} +func (m *KeySet) XXX_DiscardUnknown() { + xxx_messageInfo_KeySet.DiscardUnknown(m) +} + +var xxx_messageInfo_KeySet proto.InternalMessageInfo + +func (m *KeySet) GetKeys() []*_struct.ListValue { + if m != nil { + return m.Keys + } + return nil +} + +func (m *KeySet) GetRanges() []*KeyRange { + if m != nil { + return m.Ranges + } + return nil +} + +func (m *KeySet) GetAll() bool { + if m != nil { + return m.All + } + return false +} + +func init() { + proto.RegisterType((*KeyRange)(nil), "google.spanner.v1.KeyRange") + proto.RegisterType((*KeySet)(nil), "google.spanner.v1.KeySet") +} + +func init() { proto.RegisterFile("google/spanner/v1/keys.proto", fileDescriptor_keys_04f9296dfa9b90cb) } + +var fileDescriptor_keys_04f9296dfa9b90cb = []byte{ + // 371 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x8c, 0x92, 0xc1, 0x6b, 0xea, 0x30, + 0x1c, 0xc7, 0x5f, 0xab, 0xf8, 0x34, 0x8a, 0xf8, 0x0a, 0x8f, 0x57, 0x7c, 0x3b, 0x88, 0xa7, 0x9d, + 0x52, 0x3a, 0x0f, 0x1b, 0x78, 0x18, 0xd4, 0xc3, 0x06, 0x0e, 0x26, 0x15, 0x3c, 0x0c, 0x41, 0xa2, + 0xfd, 0xad, 0x14, 0xb3, 0x24, 0x34, 0xa9, 0xd0, 0xd3, 0xfe, 0x87, 0xfd, 0x05, 0x3b, 0xef, 0x4f, + 0xd9, 0x5f, 0x35, 0x92, 0xa6, 0x63, 0x20, 0x6c, 0xde, 0x12, 0x3e, 0xbf, 0xcf, 0xf7, 0x9b, 0x26, + 0x45, 0x67, 0x29, 0xe7, 0x29, 0x85, 0x40, 0x0a, 0xc2, 0x18, 0xe4, 0xc1, 0x21, 0x0c, 0xf6, 0x50, + 0x4a, 0x2c, 0x72, 0xae, 0xb8, 0xf7, 0xa7, 0xa2, 0xd8, 0x52, 0x7c, 0x08, 0x87, 0xb5, 0x40, 0x44, + 0x16, 0x10, 0xc6, 0xb8, 0x22, 0x2a, 0xe3, 0xcc, 0x0a, 0x9f, 0xd4, 0xec, 0xb6, 0xc5, 0x63, 0x20, + 0x55, 0x5e, 0xec, 0x54, 0x45, 0xc7, 0xaf, 0x2e, 0x6a, 0xcf, 0xa1, 0x8c, 0x09, 0x4b, 0xc1, 0xbb, + 0x46, 0x3d, 0xa9, 0x48, 0xae, 0x36, 0x3b, 0xca, 0x25, 0x24, 0xbe, 0x33, 0x72, 0xce, 0xbb, 0x17, + 0x43, 0x6c, 0x2b, 0xeb, 0x04, 0x7c, 0x97, 0x49, 0xb5, 0x22, 0xb4, 0x80, 0xdb, 0x5f, 0x71, 0xd7, + 0x18, 0x33, 0x23, 0x78, 0x53, 0x84, 0xaa, 0x00, 0x2e, 0x80, 0xf9, 0xee, 0x09, 0x7a, 0xc7, 0xcc, + 0xdf, 0x0b, 0x60, 0x5a, 0x06, 0x96, 0xd4, 0xdd, 0x8d, 0x1f, 0x65, 0x27, 0xee, 0x00, 0x4b, 0x6c, + 0xf3, 0x25, 0x6a, 0x6b, 0xd9, 0xf4, 0x36, 0x4f, 0x50, 0x7f, 0x03, 0x4b, 0x74, 0x6b, 0x34, 0x40, + 0xfd, 0xea, 0xc8, 0x7b, 0x28, 0x37, 0xaa, 0x14, 0x10, 0xf5, 0x51, 0x4f, 0x47, 0xd5, 0xfb, 0xf1, + 0x33, 0x6a, 0xcd, 0xa1, 0x5c, 0x82, 0xf2, 0x30, 0x6a, 0xea, 0x97, 0xf0, 0x9d, 0x51, 0xe3, 0xfb, + 0x82, 0xd8, 0xcc, 0x79, 0x13, 0xd4, 0xca, 0xf5, 0xc5, 0x4a, 0xdf, 0x35, 0xc6, 0x7f, 0x7c, 0xf4, + 0x78, 0xb8, 0xbe, 0xfc, 0xd8, 0x8e, 0x7a, 0x03, 0xd4, 0x20, 0x94, 0x9a, 0xef, 0x6f, 0xc7, 0x7a, + 0x19, 0xbd, 0x38, 0xe8, 0xef, 0x8e, 0x3f, 0x1d, 0xcb, 0x51, 0x67, 0x0e, 0xa5, 0x5c, 0xe8, 0xfa, + 0x85, 0xf3, 0x70, 0x65, 0x79, 0xca, 0x29, 0x61, 0x29, 0xe6, 0x79, 0x1a, 0xa4, 0xc0, 0xcc, 0xe1, + 0x82, 0x0a, 0x11, 0x91, 0xc9, 0x2f, 0xbf, 0xd5, 0xd4, 0x2e, 0xdf, 0xdc, 0x7f, 0x37, 0x95, 0x3a, + 0xa3, 0xbc, 0x48, 0xf0, 0xd2, 0x16, 0xac, 0xc2, 0xf7, 0x9a, 0xac, 0x0d, 0x59, 0x5b, 0xb2, 0x5e, + 0x85, 0xdb, 0x96, 0x09, 0x9e, 0x7c, 0x04, 0x00, 0x00, 0xff, 0xff, 0x27, 0x88, 0xea, 0x11, 0xae, + 0x02, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/spanner/v1/mutation.pb.go b/vendor/google.golang.org/genproto/googleapis/spanner/v1/mutation.pb.go new file mode 100644 index 0000000..7887ab3 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/spanner/v1/mutation.pb.go @@ -0,0 +1,437 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/spanner/v1/mutation.proto + +package spanner // import "google.golang.org/genproto/googleapis/spanner/v1" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _struct "github.com/golang/protobuf/ptypes/struct" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// A modification to one or more Cloud Spanner rows. Mutations can be +// applied to a Cloud Spanner database by sending them in a +// [Commit][google.spanner.v1.Spanner.Commit] call. +type Mutation struct { + // Required. The operation to perform. + // + // Types that are valid to be assigned to Operation: + // *Mutation_Insert + // *Mutation_Update + // *Mutation_InsertOrUpdate + // *Mutation_Replace + // *Mutation_Delete_ + Operation isMutation_Operation `protobuf_oneof:"operation"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Mutation) Reset() { *m = Mutation{} } +func (m *Mutation) String() string { return proto.CompactTextString(m) } +func (*Mutation) ProtoMessage() {} +func (*Mutation) Descriptor() ([]byte, []int) { + return fileDescriptor_mutation_469b217da29c28c2, []int{0} +} +func (m *Mutation) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Mutation.Unmarshal(m, b) +} +func (m *Mutation) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Mutation.Marshal(b, m, deterministic) +} +func (dst *Mutation) XXX_Merge(src proto.Message) { + xxx_messageInfo_Mutation.Merge(dst, src) +} +func (m *Mutation) XXX_Size() int { + return xxx_messageInfo_Mutation.Size(m) +} +func (m *Mutation) XXX_DiscardUnknown() { + xxx_messageInfo_Mutation.DiscardUnknown(m) +} + +var xxx_messageInfo_Mutation proto.InternalMessageInfo + +type isMutation_Operation interface { + isMutation_Operation() +} + +type Mutation_Insert struct { + Insert *Mutation_Write `protobuf:"bytes,1,opt,name=insert,proto3,oneof"` +} + +type Mutation_Update struct { + Update *Mutation_Write `protobuf:"bytes,2,opt,name=update,proto3,oneof"` +} + +type Mutation_InsertOrUpdate struct { + InsertOrUpdate *Mutation_Write `protobuf:"bytes,3,opt,name=insert_or_update,json=insertOrUpdate,proto3,oneof"` +} + +type Mutation_Replace struct { + Replace *Mutation_Write `protobuf:"bytes,4,opt,name=replace,proto3,oneof"` +} + +type Mutation_Delete_ struct { + Delete *Mutation_Delete `protobuf:"bytes,5,opt,name=delete,proto3,oneof"` +} + +func (*Mutation_Insert) isMutation_Operation() {} + +func (*Mutation_Update) isMutation_Operation() {} + +func (*Mutation_InsertOrUpdate) isMutation_Operation() {} + +func (*Mutation_Replace) isMutation_Operation() {} + +func (*Mutation_Delete_) isMutation_Operation() {} + +func (m *Mutation) GetOperation() isMutation_Operation { + if m != nil { + return m.Operation + } + return nil +} + +func (m *Mutation) GetInsert() *Mutation_Write { + if x, ok := m.GetOperation().(*Mutation_Insert); ok { + return x.Insert + } + return nil +} + +func (m *Mutation) GetUpdate() *Mutation_Write { + if x, ok := m.GetOperation().(*Mutation_Update); ok { + return x.Update + } + return nil +} + +func (m *Mutation) GetInsertOrUpdate() *Mutation_Write { + if x, ok := m.GetOperation().(*Mutation_InsertOrUpdate); ok { + return x.InsertOrUpdate + } + return nil +} + +func (m *Mutation) GetReplace() *Mutation_Write { + if x, ok := m.GetOperation().(*Mutation_Replace); ok { + return x.Replace + } + return nil +} + +func (m *Mutation) GetDelete() *Mutation_Delete { + if x, ok := m.GetOperation().(*Mutation_Delete_); ok { + return x.Delete + } + return nil +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*Mutation) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _Mutation_OneofMarshaler, _Mutation_OneofUnmarshaler, _Mutation_OneofSizer, []interface{}{ + (*Mutation_Insert)(nil), + (*Mutation_Update)(nil), + (*Mutation_InsertOrUpdate)(nil), + (*Mutation_Replace)(nil), + (*Mutation_Delete_)(nil), + } +} + +func _Mutation_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*Mutation) + // operation + switch x := m.Operation.(type) { + case *Mutation_Insert: + b.EncodeVarint(1<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.Insert); err != nil { + return err + } + case *Mutation_Update: + b.EncodeVarint(2<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.Update); err != nil { + return err + } + case *Mutation_InsertOrUpdate: + b.EncodeVarint(3<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.InsertOrUpdate); err != nil { + return err + } + case *Mutation_Replace: + b.EncodeVarint(4<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.Replace); err != nil { + return err + } + case *Mutation_Delete_: + b.EncodeVarint(5<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.Delete); err != nil { + return err + } + case nil: + default: + return fmt.Errorf("Mutation.Operation has unexpected type %T", x) + } + return nil +} + +func _Mutation_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*Mutation) + switch tag { + case 1: // operation.insert + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(Mutation_Write) + err := b.DecodeMessage(msg) + m.Operation = &Mutation_Insert{msg} + return true, err + case 2: // operation.update + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(Mutation_Write) + err := b.DecodeMessage(msg) + m.Operation = &Mutation_Update{msg} + return true, err + case 3: // operation.insert_or_update + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(Mutation_Write) + err := b.DecodeMessage(msg) + m.Operation = &Mutation_InsertOrUpdate{msg} + return true, err + case 4: // operation.replace + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(Mutation_Write) + err := b.DecodeMessage(msg) + m.Operation = &Mutation_Replace{msg} + return true, err + case 5: // operation.delete + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(Mutation_Delete) + err := b.DecodeMessage(msg) + m.Operation = &Mutation_Delete_{msg} + return true, err + default: + return false, nil + } +} + +func _Mutation_OneofSizer(msg proto.Message) (n int) { + m := msg.(*Mutation) + // operation + switch x := m.Operation.(type) { + case *Mutation_Insert: + s := proto.Size(x.Insert) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *Mutation_Update: + s := proto.Size(x.Update) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *Mutation_InsertOrUpdate: + s := proto.Size(x.InsertOrUpdate) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *Mutation_Replace: + s := proto.Size(x.Replace) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *Mutation_Delete_: + s := proto.Size(x.Delete) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +// Arguments to [insert][google.spanner.v1.Mutation.insert], +// [update][google.spanner.v1.Mutation.update], +// [insert_or_update][google.spanner.v1.Mutation.insert_or_update], and +// [replace][google.spanner.v1.Mutation.replace] operations. +type Mutation_Write struct { + // Required. The table whose rows will be written. + Table string `protobuf:"bytes,1,opt,name=table,proto3" json:"table,omitempty"` + // The names of the columns in + // [table][google.spanner.v1.Mutation.Write.table] to be written. + // + // The list of columns must contain enough columns to allow + // Cloud Spanner to derive values for all primary key columns in the + // row(s) to be modified. + Columns []string `protobuf:"bytes,2,rep,name=columns,proto3" json:"columns,omitempty"` + // The values to be written. `values` can contain more than one + // list of values. If it does, then multiple rows are written, one + // for each entry in `values`. Each list in `values` must have + // exactly as many entries as there are entries in + // [columns][google.spanner.v1.Mutation.Write.columns] above. Sending + // multiple lists is equivalent to sending multiple `Mutation`s, each + // containing one `values` entry and repeating + // [table][google.spanner.v1.Mutation.Write.table] and + // [columns][google.spanner.v1.Mutation.Write.columns]. Individual values in + // each list are encoded as described [here][google.spanner.v1.TypeCode]. + Values []*_struct.ListValue `protobuf:"bytes,3,rep,name=values,proto3" json:"values,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Mutation_Write) Reset() { *m = Mutation_Write{} } +func (m *Mutation_Write) String() string { return proto.CompactTextString(m) } +func (*Mutation_Write) ProtoMessage() {} +func (*Mutation_Write) Descriptor() ([]byte, []int) { + return fileDescriptor_mutation_469b217da29c28c2, []int{0, 0} +} +func (m *Mutation_Write) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Mutation_Write.Unmarshal(m, b) +} +func (m *Mutation_Write) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Mutation_Write.Marshal(b, m, deterministic) +} +func (dst *Mutation_Write) XXX_Merge(src proto.Message) { + xxx_messageInfo_Mutation_Write.Merge(dst, src) +} +func (m *Mutation_Write) XXX_Size() int { + return xxx_messageInfo_Mutation_Write.Size(m) +} +func (m *Mutation_Write) XXX_DiscardUnknown() { + xxx_messageInfo_Mutation_Write.DiscardUnknown(m) +} + +var xxx_messageInfo_Mutation_Write proto.InternalMessageInfo + +func (m *Mutation_Write) GetTable() string { + if m != nil { + return m.Table + } + return "" +} + +func (m *Mutation_Write) GetColumns() []string { + if m != nil { + return m.Columns + } + return nil +} + +func (m *Mutation_Write) GetValues() []*_struct.ListValue { + if m != nil { + return m.Values + } + return nil +} + +// Arguments to [delete][google.spanner.v1.Mutation.delete] operations. +type Mutation_Delete struct { + // Required. The table whose rows will be deleted. + Table string `protobuf:"bytes,1,opt,name=table,proto3" json:"table,omitempty"` + // Required. The primary keys of the rows within + // [table][google.spanner.v1.Mutation.Delete.table] to delete. Delete is + // idempotent. The transaction will succeed even if some or all rows do not + // exist. + KeySet *KeySet `protobuf:"bytes,2,opt,name=key_set,json=keySet,proto3" json:"key_set,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Mutation_Delete) Reset() { *m = Mutation_Delete{} } +func (m *Mutation_Delete) String() string { return proto.CompactTextString(m) } +func (*Mutation_Delete) ProtoMessage() {} +func (*Mutation_Delete) Descriptor() ([]byte, []int) { + return fileDescriptor_mutation_469b217da29c28c2, []int{0, 1} +} +func (m *Mutation_Delete) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Mutation_Delete.Unmarshal(m, b) +} +func (m *Mutation_Delete) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Mutation_Delete.Marshal(b, m, deterministic) +} +func (dst *Mutation_Delete) XXX_Merge(src proto.Message) { + xxx_messageInfo_Mutation_Delete.Merge(dst, src) +} +func (m *Mutation_Delete) XXX_Size() int { + return xxx_messageInfo_Mutation_Delete.Size(m) +} +func (m *Mutation_Delete) XXX_DiscardUnknown() { + xxx_messageInfo_Mutation_Delete.DiscardUnknown(m) +} + +var xxx_messageInfo_Mutation_Delete proto.InternalMessageInfo + +func (m *Mutation_Delete) GetTable() string { + if m != nil { + return m.Table + } + return "" +} + +func (m *Mutation_Delete) GetKeySet() *KeySet { + if m != nil { + return m.KeySet + } + return nil +} + +func init() { + proto.RegisterType((*Mutation)(nil), "google.spanner.v1.Mutation") + proto.RegisterType((*Mutation_Write)(nil), "google.spanner.v1.Mutation.Write") + proto.RegisterType((*Mutation_Delete)(nil), "google.spanner.v1.Mutation.Delete") +} + +func init() { + proto.RegisterFile("google/spanner/v1/mutation.proto", fileDescriptor_mutation_469b217da29c28c2) +} + +var fileDescriptor_mutation_469b217da29c28c2 = []byte{ + // 413 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x94, 0x92, 0xd1, 0xea, 0xd3, 0x30, + 0x14, 0xc6, 0xed, 0xba, 0x75, 0x2e, 0x43, 0xd1, 0xa2, 0x58, 0x8b, 0x17, 0x75, 0x57, 0xbb, 0x4a, + 0x69, 0xbd, 0x11, 0xa6, 0x37, 0x53, 0x50, 0xd0, 0xe1, 0xe8, 0x70, 0x82, 0x0c, 0x46, 0xd6, 0x1d, + 0x4b, 0x69, 0x96, 0x94, 0x24, 0x1d, 0xec, 0x45, 0xbc, 0xf4, 0x01, 0x7c, 0x14, 0x9f, 0x4a, 0x9a, + 0xa4, 0x32, 0x9c, 0xfe, 0xd9, 0xff, 0xaa, 0x3d, 0x7c, 0xdf, 0xef, 0x3b, 0xe7, 0x24, 0x41, 0x51, + 0xc1, 0x79, 0x41, 0x21, 0x96, 0x35, 0x61, 0x0c, 0x44, 0x7c, 0x4c, 0xe2, 0x43, 0xa3, 0x88, 0x2a, + 0x39, 0xc3, 0xb5, 0xe0, 0x8a, 0xfb, 0x0f, 0x8d, 0x03, 0x5b, 0x07, 0x3e, 0x26, 0xe1, 0x33, 0x0b, + 0x91, 0xba, 0x8c, 0x09, 0x63, 0xdc, 0xf8, 0xa5, 0x01, 0xfe, 0xa8, 0xba, 0xda, 0x35, 0xdf, 0x62, + 0xa9, 0x44, 0x93, 0xab, 0xbf, 0xd4, 0xb3, 0x86, 0x15, 0x9c, 0x2c, 0x3b, 0xf9, 0xd1, 0x47, 0x77, + 0x17, 0xb6, 0xbf, 0x3f, 0x43, 0x5e, 0xc9, 0x24, 0x08, 0x15, 0x38, 0x91, 0x33, 0x1d, 0xa7, 0xcf, + 0xf1, 0xc5, 0x28, 0xb8, 0x33, 0xe3, 0x2f, 0xa2, 0x54, 0xf0, 0xfe, 0x4e, 0x66, 0x91, 0x16, 0x6e, + 0xea, 0x3d, 0x51, 0x10, 0xf4, 0x6e, 0x01, 0x1b, 0xc4, 0x5f, 0xa0, 0x07, 0x26, 0x66, 0xcb, 0xc5, + 0xd6, 0xc6, 0xb8, 0xd7, 0xc7, 0xdc, 0x37, 0xf0, 0x27, 0xf1, 0xd9, 0xc4, 0xbd, 0x46, 0x43, 0x01, + 0x35, 0x25, 0x39, 0x04, 0xfd, 0xeb, 0x53, 0x3a, 0xc6, 0x7f, 0x85, 0xbc, 0x3d, 0x50, 0x50, 0x10, + 0x0c, 0x34, 0x3d, 0xb9, 0x89, 0x7e, 0xab, 0x9d, 0xed, 0x2e, 0x86, 0x09, 0x2b, 0x34, 0xd0, 0x89, + 0xfe, 0x23, 0x34, 0x50, 0x64, 0x47, 0x41, 0x9f, 0xe6, 0x28, 0x33, 0x85, 0x1f, 0xa0, 0x61, 0xce, + 0x69, 0x73, 0x60, 0x32, 0xe8, 0x45, 0xee, 0x74, 0x94, 0x75, 0xa5, 0x9f, 0x22, 0xef, 0x48, 0x68, + 0x03, 0x32, 0x70, 0x23, 0x77, 0x3a, 0x4e, 0xc3, 0xae, 0x6d, 0x77, 0xb1, 0xf8, 0x63, 0x29, 0xd5, + 0xba, 0xb5, 0x64, 0xd6, 0x19, 0x66, 0xc8, 0x33, 0x03, 0xfc, 0xa7, 0x5b, 0x8a, 0x86, 0x15, 0x9c, + 0xb6, 0x12, 0x94, 0xbd, 0x96, 0xa7, 0xff, 0xd8, 0xe5, 0x03, 0x9c, 0x56, 0xa0, 0x32, 0xaf, 0xd2, + 0xdf, 0xf9, 0x18, 0x8d, 0x78, 0x0d, 0x42, 0xaf, 0x37, 0xff, 0xee, 0xa0, 0xc7, 0x39, 0x3f, 0x5c, + 0x52, 0xf3, 0x7b, 0xdd, 0x11, 0x2c, 0xdb, 0xf1, 0x96, 0xce, 0xd7, 0x97, 0xd6, 0x53, 0x70, 0x4a, + 0x58, 0x81, 0xb9, 0x28, 0xe2, 0x02, 0x98, 0x1e, 0x3e, 0x36, 0x12, 0xa9, 0x4b, 0x79, 0xf6, 0x10, + 0x67, 0xf6, 0xf7, 0x67, 0xef, 0xc9, 0x3b, 0x83, 0xbe, 0xa1, 0xbc, 0xd9, 0xe3, 0x95, 0x6d, 0xb2, + 0x4e, 0x7e, 0x75, 0xca, 0x46, 0x2b, 0x1b, 0xab, 0x6c, 0xd6, 0xc9, 0xce, 0xd3, 0xc1, 0x2f, 0x7e, + 0x07, 0x00, 0x00, 0xff, 0xff, 0x6b, 0x69, 0x1c, 0xbc, 0x51, 0x03, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/spanner/v1/query_plan.pb.go b/vendor/google.golang.org/genproto/googleapis/spanner/v1/query_plan.pb.go new file mode 100644 index 0000000..588ca58 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/spanner/v1/query_plan.pb.go @@ -0,0 +1,388 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/spanner/v1/query_plan.proto + +package spanner // import "google.golang.org/genproto/googleapis/spanner/v1" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _struct "github.com/golang/protobuf/ptypes/struct" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// The kind of [PlanNode][google.spanner.v1.PlanNode]. Distinguishes between +// the two different kinds of nodes that can appear in a query plan. +type PlanNode_Kind int32 + +const ( + // Not specified. + PlanNode_KIND_UNSPECIFIED PlanNode_Kind = 0 + // Denotes a Relational operator node in the expression tree. Relational + // operators represent iterative processing of rows during query execution. + // For example, a `TableScan` operation that reads rows from a table. + PlanNode_RELATIONAL PlanNode_Kind = 1 + // Denotes a Scalar node in the expression tree. Scalar nodes represent + // non-iterable entities in the query plan. For example, constants or + // arithmetic operators appearing inside predicate expressions or references + // to column names. + PlanNode_SCALAR PlanNode_Kind = 2 +) + +var PlanNode_Kind_name = map[int32]string{ + 0: "KIND_UNSPECIFIED", + 1: "RELATIONAL", + 2: "SCALAR", +} +var PlanNode_Kind_value = map[string]int32{ + "KIND_UNSPECIFIED": 0, + "RELATIONAL": 1, + "SCALAR": 2, +} + +func (x PlanNode_Kind) String() string { + return proto.EnumName(PlanNode_Kind_name, int32(x)) +} +func (PlanNode_Kind) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_query_plan_b863df8d6eeb1d82, []int{0, 0} +} + +// Node information for nodes appearing in a +// [QueryPlan.plan_nodes][google.spanner.v1.QueryPlan.plan_nodes]. +type PlanNode struct { + // The `PlanNode`'s index in [node + // list][google.spanner.v1.QueryPlan.plan_nodes]. + Index int32 `protobuf:"varint,1,opt,name=index,proto3" json:"index,omitempty"` + // Used to determine the type of node. May be needed for visualizing + // different kinds of nodes differently. For example, If the node is a + // [SCALAR][google.spanner.v1.PlanNode.Kind.SCALAR] node, it will have a + // condensed representation which can be used to directly embed a description + // of the node in its parent. + Kind PlanNode_Kind `protobuf:"varint,2,opt,name=kind,proto3,enum=google.spanner.v1.PlanNode_Kind" json:"kind,omitempty"` + // The display name for the node. + DisplayName string `protobuf:"bytes,3,opt,name=display_name,json=displayName,proto3" json:"display_name,omitempty"` + // List of child node `index`es and their relationship to this parent. + ChildLinks []*PlanNode_ChildLink `protobuf:"bytes,4,rep,name=child_links,json=childLinks,proto3" json:"child_links,omitempty"` + // Condensed representation for + // [SCALAR][google.spanner.v1.PlanNode.Kind.SCALAR] nodes. + ShortRepresentation *PlanNode_ShortRepresentation `protobuf:"bytes,5,opt,name=short_representation,json=shortRepresentation,proto3" json:"short_representation,omitempty"` + // Attributes relevant to the node contained in a group of key-value pairs. + // For example, a Parameter Reference node could have the following + // information in its metadata: + // + // { + // "parameter_reference": "param1", + // "parameter_type": "array" + // } + Metadata *_struct.Struct `protobuf:"bytes,6,opt,name=metadata,proto3" json:"metadata,omitempty"` + // The execution statistics associated with the node, contained in a group of + // key-value pairs. Only present if the plan was returned as a result of a + // profile query. For example, number of executions, number of rows/time per + // execution etc. + ExecutionStats *_struct.Struct `protobuf:"bytes,7,opt,name=execution_stats,json=executionStats,proto3" json:"execution_stats,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *PlanNode) Reset() { *m = PlanNode{} } +func (m *PlanNode) String() string { return proto.CompactTextString(m) } +func (*PlanNode) ProtoMessage() {} +func (*PlanNode) Descriptor() ([]byte, []int) { + return fileDescriptor_query_plan_b863df8d6eeb1d82, []int{0} +} +func (m *PlanNode) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_PlanNode.Unmarshal(m, b) +} +func (m *PlanNode) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_PlanNode.Marshal(b, m, deterministic) +} +func (dst *PlanNode) XXX_Merge(src proto.Message) { + xxx_messageInfo_PlanNode.Merge(dst, src) +} +func (m *PlanNode) XXX_Size() int { + return xxx_messageInfo_PlanNode.Size(m) +} +func (m *PlanNode) XXX_DiscardUnknown() { + xxx_messageInfo_PlanNode.DiscardUnknown(m) +} + +var xxx_messageInfo_PlanNode proto.InternalMessageInfo + +func (m *PlanNode) GetIndex() int32 { + if m != nil { + return m.Index + } + return 0 +} + +func (m *PlanNode) GetKind() PlanNode_Kind { + if m != nil { + return m.Kind + } + return PlanNode_KIND_UNSPECIFIED +} + +func (m *PlanNode) GetDisplayName() string { + if m != nil { + return m.DisplayName + } + return "" +} + +func (m *PlanNode) GetChildLinks() []*PlanNode_ChildLink { + if m != nil { + return m.ChildLinks + } + return nil +} + +func (m *PlanNode) GetShortRepresentation() *PlanNode_ShortRepresentation { + if m != nil { + return m.ShortRepresentation + } + return nil +} + +func (m *PlanNode) GetMetadata() *_struct.Struct { + if m != nil { + return m.Metadata + } + return nil +} + +func (m *PlanNode) GetExecutionStats() *_struct.Struct { + if m != nil { + return m.ExecutionStats + } + return nil +} + +// Metadata associated with a parent-child relationship appearing in a +// [PlanNode][google.spanner.v1.PlanNode]. +type PlanNode_ChildLink struct { + // The node to which the link points. + ChildIndex int32 `protobuf:"varint,1,opt,name=child_index,json=childIndex,proto3" json:"child_index,omitempty"` + // The type of the link. For example, in Hash Joins this could be used to + // distinguish between the build child and the probe child, or in the case + // of the child being an output variable, to represent the tag associated + // with the output variable. + Type string `protobuf:"bytes,2,opt,name=type,proto3" json:"type,omitempty"` + // Only present if the child node is + // [SCALAR][google.spanner.v1.PlanNode.Kind.SCALAR] and corresponds to an + // output variable of the parent node. The field carries the name of the + // output variable. For example, a `TableScan` operator that reads rows from + // a table will have child links to the `SCALAR` nodes representing the + // output variables created for each column that is read by the operator. + // The corresponding `variable` fields will be set to the variable names + // assigned to the columns. + Variable string `protobuf:"bytes,3,opt,name=variable,proto3" json:"variable,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *PlanNode_ChildLink) Reset() { *m = PlanNode_ChildLink{} } +func (m *PlanNode_ChildLink) String() string { return proto.CompactTextString(m) } +func (*PlanNode_ChildLink) ProtoMessage() {} +func (*PlanNode_ChildLink) Descriptor() ([]byte, []int) { + return fileDescriptor_query_plan_b863df8d6eeb1d82, []int{0, 0} +} +func (m *PlanNode_ChildLink) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_PlanNode_ChildLink.Unmarshal(m, b) +} +func (m *PlanNode_ChildLink) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_PlanNode_ChildLink.Marshal(b, m, deterministic) +} +func (dst *PlanNode_ChildLink) XXX_Merge(src proto.Message) { + xxx_messageInfo_PlanNode_ChildLink.Merge(dst, src) +} +func (m *PlanNode_ChildLink) XXX_Size() int { + return xxx_messageInfo_PlanNode_ChildLink.Size(m) +} +func (m *PlanNode_ChildLink) XXX_DiscardUnknown() { + xxx_messageInfo_PlanNode_ChildLink.DiscardUnknown(m) +} + +var xxx_messageInfo_PlanNode_ChildLink proto.InternalMessageInfo + +func (m *PlanNode_ChildLink) GetChildIndex() int32 { + if m != nil { + return m.ChildIndex + } + return 0 +} + +func (m *PlanNode_ChildLink) GetType() string { + if m != nil { + return m.Type + } + return "" +} + +func (m *PlanNode_ChildLink) GetVariable() string { + if m != nil { + return m.Variable + } + return "" +} + +// Condensed representation of a node and its subtree. Only present for +// `SCALAR` [PlanNode(s)][google.spanner.v1.PlanNode]. +type PlanNode_ShortRepresentation struct { + // A string representation of the expression subtree rooted at this node. + Description string `protobuf:"bytes,1,opt,name=description,proto3" json:"description,omitempty"` + // A mapping of (subquery variable name) -> (subquery node id) for cases + // where the `description` string of this node references a `SCALAR` + // subquery contained in the expression subtree rooted at this node. The + // referenced `SCALAR` subquery may not necessarily be a direct child of + // this node. + Subqueries map[string]int32 `protobuf:"bytes,2,rep,name=subqueries,proto3" json:"subqueries,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"varint,2,opt,name=value,proto3"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *PlanNode_ShortRepresentation) Reset() { *m = PlanNode_ShortRepresentation{} } +func (m *PlanNode_ShortRepresentation) String() string { return proto.CompactTextString(m) } +func (*PlanNode_ShortRepresentation) ProtoMessage() {} +func (*PlanNode_ShortRepresentation) Descriptor() ([]byte, []int) { + return fileDescriptor_query_plan_b863df8d6eeb1d82, []int{0, 1} +} +func (m *PlanNode_ShortRepresentation) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_PlanNode_ShortRepresentation.Unmarshal(m, b) +} +func (m *PlanNode_ShortRepresentation) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_PlanNode_ShortRepresentation.Marshal(b, m, deterministic) +} +func (dst *PlanNode_ShortRepresentation) XXX_Merge(src proto.Message) { + xxx_messageInfo_PlanNode_ShortRepresentation.Merge(dst, src) +} +func (m *PlanNode_ShortRepresentation) XXX_Size() int { + return xxx_messageInfo_PlanNode_ShortRepresentation.Size(m) +} +func (m *PlanNode_ShortRepresentation) XXX_DiscardUnknown() { + xxx_messageInfo_PlanNode_ShortRepresentation.DiscardUnknown(m) +} + +var xxx_messageInfo_PlanNode_ShortRepresentation proto.InternalMessageInfo + +func (m *PlanNode_ShortRepresentation) GetDescription() string { + if m != nil { + return m.Description + } + return "" +} + +func (m *PlanNode_ShortRepresentation) GetSubqueries() map[string]int32 { + if m != nil { + return m.Subqueries + } + return nil +} + +// Contains an ordered list of nodes appearing in the query plan. +type QueryPlan struct { + // The nodes in the query plan. Plan nodes are returned in pre-order starting + // with the plan root. Each [PlanNode][google.spanner.v1.PlanNode]'s `id` + // corresponds to its index in `plan_nodes`. + PlanNodes []*PlanNode `protobuf:"bytes,1,rep,name=plan_nodes,json=planNodes,proto3" json:"plan_nodes,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *QueryPlan) Reset() { *m = QueryPlan{} } +func (m *QueryPlan) String() string { return proto.CompactTextString(m) } +func (*QueryPlan) ProtoMessage() {} +func (*QueryPlan) Descriptor() ([]byte, []int) { + return fileDescriptor_query_plan_b863df8d6eeb1d82, []int{1} +} +func (m *QueryPlan) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_QueryPlan.Unmarshal(m, b) +} +func (m *QueryPlan) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_QueryPlan.Marshal(b, m, deterministic) +} +func (dst *QueryPlan) XXX_Merge(src proto.Message) { + xxx_messageInfo_QueryPlan.Merge(dst, src) +} +func (m *QueryPlan) XXX_Size() int { + return xxx_messageInfo_QueryPlan.Size(m) +} +func (m *QueryPlan) XXX_DiscardUnknown() { + xxx_messageInfo_QueryPlan.DiscardUnknown(m) +} + +var xxx_messageInfo_QueryPlan proto.InternalMessageInfo + +func (m *QueryPlan) GetPlanNodes() []*PlanNode { + if m != nil { + return m.PlanNodes + } + return nil +} + +func init() { + proto.RegisterType((*PlanNode)(nil), "google.spanner.v1.PlanNode") + proto.RegisterType((*PlanNode_ChildLink)(nil), "google.spanner.v1.PlanNode.ChildLink") + proto.RegisterType((*PlanNode_ShortRepresentation)(nil), "google.spanner.v1.PlanNode.ShortRepresentation") + proto.RegisterMapType((map[string]int32)(nil), "google.spanner.v1.PlanNode.ShortRepresentation.SubqueriesEntry") + proto.RegisterType((*QueryPlan)(nil), "google.spanner.v1.QueryPlan") + proto.RegisterEnum("google.spanner.v1.PlanNode_Kind", PlanNode_Kind_name, PlanNode_Kind_value) +} + +func init() { + proto.RegisterFile("google/spanner/v1/query_plan.proto", fileDescriptor_query_plan_b863df8d6eeb1d82) +} + +var fileDescriptor_query_plan_b863df8d6eeb1d82 = []byte{ + // 604 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x94, 0x54, 0xdd, 0x6e, 0xd3, 0x4c, + 0x10, 0xfd, 0x9c, 0x26, 0xf9, 0x9a, 0x09, 0x4a, 0xc3, 0xb6, 0xa8, 0x56, 0x40, 0xc2, 0x44, 0x42, + 0xca, 0x95, 0xad, 0xb4, 0x5c, 0x54, 0x45, 0x08, 0xd2, 0x34, 0xad, 0xa2, 0x46, 0x21, 0xac, 0xa1, + 0x17, 0x28, 0x92, 0xb5, 0x89, 0x97, 0x74, 0x15, 0x67, 0xd7, 0x78, 0xed, 0xa8, 0x79, 0x09, 0x6e, + 0x79, 0x07, 0x1e, 0x85, 0x17, 0xe0, 0x75, 0xd0, 0xae, 0x7f, 0x28, 0x14, 0x45, 0xe2, 0x6e, 0x66, + 0xe7, 0xcc, 0xf1, 0xce, 0x39, 0xb3, 0x86, 0xf6, 0x42, 0x88, 0x45, 0x40, 0x1d, 0x19, 0x12, 0xce, + 0x69, 0xe4, 0xac, 0xbb, 0xce, 0xe7, 0x84, 0x46, 0x1b, 0x2f, 0x0c, 0x08, 0xb7, 0xc3, 0x48, 0xc4, + 0x02, 0x3d, 0x4c, 0x31, 0x76, 0x86, 0xb1, 0xd7, 0xdd, 0xd6, 0x93, 0xac, 0x8d, 0x84, 0xcc, 0x21, + 0x9c, 0x8b, 0x98, 0xc4, 0x4c, 0x70, 0x99, 0x36, 0x14, 0x55, 0x9d, 0xcd, 0x92, 0x4f, 0x8e, 0x8c, + 0xa3, 0x64, 0x1e, 0xa7, 0xd5, 0xf6, 0x97, 0x2a, 0xec, 0x4e, 0x02, 0xc2, 0xc7, 0xc2, 0xa7, 0xe8, + 0x00, 0x2a, 0x8c, 0xfb, 0xf4, 0xd6, 0x34, 0x2c, 0xa3, 0x53, 0xc1, 0x69, 0x82, 0x5e, 0x40, 0x79, + 0xc9, 0xb8, 0x6f, 0x96, 0x2c, 0xa3, 0xd3, 0x38, 0xb2, 0xec, 0x7b, 0x17, 0xb0, 0x73, 0x02, 0xfb, + 0x8a, 0x71, 0x1f, 0x6b, 0x34, 0x7a, 0x06, 0x0f, 0x7c, 0x26, 0xc3, 0x80, 0x6c, 0x3c, 0x4e, 0x56, + 0xd4, 0xdc, 0xb1, 0x8c, 0x4e, 0x0d, 0xd7, 0xb3, 0xb3, 0x31, 0x59, 0x51, 0x74, 0x01, 0xf5, 0xf9, + 0x0d, 0x0b, 0x7c, 0x2f, 0x60, 0x7c, 0x29, 0xcd, 0xb2, 0xb5, 0xd3, 0xa9, 0x1f, 0x3d, 0xdf, 0xc6, + 0xdf, 0x57, 0xf0, 0x11, 0xe3, 0x4b, 0x0c, 0xf3, 0x3c, 0x94, 0x68, 0x06, 0x07, 0xf2, 0x46, 0x44, + 0xb1, 0x17, 0xd1, 0x30, 0xa2, 0x92, 0xf2, 0x54, 0x00, 0xb3, 0x62, 0x19, 0x9d, 0xfa, 0x91, 0xb3, + 0x8d, 0xd0, 0x55, 0x7d, 0xf8, 0xb7, 0x36, 0xbc, 0x2f, 0xef, 0x1f, 0xa2, 0x63, 0xd8, 0x5d, 0xd1, + 0x98, 0xf8, 0x24, 0x26, 0x66, 0x55, 0xf3, 0x1e, 0xe6, 0xbc, 0xb9, 0xb0, 0xb6, 0xab, 0x85, 0xc5, + 0x05, 0x10, 0xbd, 0x81, 0x3d, 0x7a, 0x4b, 0xe7, 0x89, 0x62, 0xf0, 0x64, 0x4c, 0x62, 0x69, 0xfe, + 0xbf, 0xbd, 0xb7, 0x51, 0xe0, 0x5d, 0x05, 0x6f, 0x4d, 0xa1, 0x56, 0xcc, 0x8c, 0x9e, 0xe6, 0x7a, + 0xdd, 0x35, 0x29, 0x15, 0x62, 0xa8, 0x9d, 0x42, 0x50, 0x8e, 0x37, 0x21, 0xd5, 0x4e, 0xd5, 0xb0, + 0x8e, 0x51, 0x0b, 0x76, 0xd7, 0x24, 0x62, 0x64, 0x16, 0xe4, 0x1e, 0x14, 0x79, 0xeb, 0x87, 0x01, + 0xfb, 0x7f, 0x51, 0x00, 0x59, 0x50, 0xf7, 0xa9, 0x9c, 0x47, 0x2c, 0xd4, 0x3a, 0x1a, 0x99, 0x75, + 0xbf, 0x8e, 0x90, 0x07, 0x20, 0x93, 0x99, 0x5a, 0x4e, 0x46, 0xa5, 0x59, 0xd2, 0xce, 0xbd, 0xfe, + 0x47, 0xa1, 0x6d, 0xb7, 0x60, 0x18, 0xf0, 0x38, 0xda, 0xe0, 0x3b, 0x94, 0xad, 0x57, 0xb0, 0xf7, + 0x47, 0x19, 0x35, 0x61, 0x67, 0x49, 0x37, 0xd9, 0x6d, 0x54, 0xa8, 0xf6, 0x75, 0x4d, 0x82, 0x24, + 0x1d, 0xb8, 0x82, 0xd3, 0xe4, 0xb4, 0x74, 0x62, 0xb4, 0x4f, 0xa0, 0xac, 0x76, 0x11, 0x1d, 0x40, + 0xf3, 0x6a, 0x38, 0x3e, 0xf7, 0x3e, 0x8c, 0xdd, 0xc9, 0xa0, 0x3f, 0xbc, 0x18, 0x0e, 0xce, 0x9b, + 0xff, 0xa1, 0x06, 0x00, 0x1e, 0x8c, 0x7a, 0xef, 0x87, 0x6f, 0xc7, 0xbd, 0x51, 0xd3, 0x40, 0x00, + 0x55, 0xb7, 0xdf, 0x1b, 0xf5, 0x70, 0xb3, 0xd4, 0xbe, 0x84, 0xda, 0x3b, 0xf5, 0xe6, 0xd4, 0xcd, + 0xd1, 0x29, 0x80, 0x7a, 0x7a, 0x1e, 0x17, 0x3e, 0x95, 0xa6, 0xa1, 0xc7, 0x7c, 0xbc, 0x65, 0x4c, + 0x5c, 0x0b, 0xb3, 0x48, 0x9e, 0x7d, 0x35, 0xe0, 0xd1, 0x5c, 0xac, 0xee, 0xa3, 0xcf, 0x1a, 0xc5, + 0x07, 0x26, 0xca, 0xfe, 0x89, 0xf1, 0xf1, 0x24, 0x03, 0x2d, 0x44, 0x40, 0xf8, 0xc2, 0x16, 0xd1, + 0xc2, 0x59, 0x50, 0xae, 0x97, 0xc3, 0x49, 0x4b, 0x24, 0x64, 0xf2, 0xce, 0x7f, 0xe1, 0x65, 0x16, + 0x7e, 0x2b, 0x1d, 0x5e, 0xa6, 0xad, 0xfd, 0x40, 0x24, 0xbe, 0xed, 0x66, 0x5f, 0xb9, 0xee, 0x7e, + 0xcf, 0x2b, 0x53, 0x5d, 0x99, 0x66, 0x95, 0xe9, 0x75, 0x77, 0x56, 0xd5, 0xc4, 0xc7, 0x3f, 0x03, + 0x00, 0x00, 0xff, 0xff, 0x53, 0xdb, 0x51, 0xa6, 0x6f, 0x04, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/spanner/v1/result_set.pb.go b/vendor/google.golang.org/genproto/googleapis/spanner/v1/result_set.pb.go new file mode 100644 index 0000000..7172f9d --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/spanner/v1/result_set.pb.go @@ -0,0 +1,529 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/spanner/v1/result_set.proto + +package spanner // import "google.golang.org/genproto/googleapis/spanner/v1" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _struct "github.com/golang/protobuf/ptypes/struct" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Results from [Read][google.spanner.v1.Spanner.Read] or +// [ExecuteSql][google.spanner.v1.Spanner.ExecuteSql]. +type ResultSet struct { + // Metadata about the result set, such as row type information. + Metadata *ResultSetMetadata `protobuf:"bytes,1,opt,name=metadata,proto3" json:"metadata,omitempty"` + // Each element in `rows` is a row whose format is defined by + // [metadata.row_type][google.spanner.v1.ResultSetMetadata.row_type]. The ith + // element in each row matches the ith field in + // [metadata.row_type][google.spanner.v1.ResultSetMetadata.row_type]. Elements + // are encoded based on type as described [here][google.spanner.v1.TypeCode]. + Rows []*_struct.ListValue `protobuf:"bytes,2,rep,name=rows,proto3" json:"rows,omitempty"` + // Query plan and execution statistics for the SQL statement that + // produced this result set. These can be requested by setting + // [ExecuteSqlRequest.query_mode][google.spanner.v1.ExecuteSqlRequest.query_mode]. + // DML statements always produce stats containing the number of rows + // modified, unless executed using the + // [ExecuteSqlRequest.QueryMode.PLAN][google.spanner.v1.ExecuteSqlRequest.QueryMode.PLAN] + // [ExecuteSqlRequest.query_mode][google.spanner.v1.ExecuteSqlRequest.query_mode]. + // Other fields may or may not be populated, based on the + // [ExecuteSqlRequest.query_mode][google.spanner.v1.ExecuteSqlRequest.query_mode]. + Stats *ResultSetStats `protobuf:"bytes,3,opt,name=stats,proto3" json:"stats,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ResultSet) Reset() { *m = ResultSet{} } +func (m *ResultSet) String() string { return proto.CompactTextString(m) } +func (*ResultSet) ProtoMessage() {} +func (*ResultSet) Descriptor() ([]byte, []int) { + return fileDescriptor_result_set_643b079f04352bdc, []int{0} +} +func (m *ResultSet) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ResultSet.Unmarshal(m, b) +} +func (m *ResultSet) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ResultSet.Marshal(b, m, deterministic) +} +func (dst *ResultSet) XXX_Merge(src proto.Message) { + xxx_messageInfo_ResultSet.Merge(dst, src) +} +func (m *ResultSet) XXX_Size() int { + return xxx_messageInfo_ResultSet.Size(m) +} +func (m *ResultSet) XXX_DiscardUnknown() { + xxx_messageInfo_ResultSet.DiscardUnknown(m) +} + +var xxx_messageInfo_ResultSet proto.InternalMessageInfo + +func (m *ResultSet) GetMetadata() *ResultSetMetadata { + if m != nil { + return m.Metadata + } + return nil +} + +func (m *ResultSet) GetRows() []*_struct.ListValue { + if m != nil { + return m.Rows + } + return nil +} + +func (m *ResultSet) GetStats() *ResultSetStats { + if m != nil { + return m.Stats + } + return nil +} + +// Partial results from a streaming read or SQL query. Streaming reads and +// SQL queries better tolerate large result sets, large rows, and large +// values, but are a little trickier to consume. +type PartialResultSet struct { + // Metadata about the result set, such as row type information. + // Only present in the first response. + Metadata *ResultSetMetadata `protobuf:"bytes,1,opt,name=metadata,proto3" json:"metadata,omitempty"` + // A streamed result set consists of a stream of values, which might + // be split into many `PartialResultSet` messages to accommodate + // large rows and/or large values. Every N complete values defines a + // row, where N is equal to the number of entries in + // [metadata.row_type.fields][google.spanner.v1.StructType.fields]. + // + // Most values are encoded based on type as described + // [here][google.spanner.v1.TypeCode]. + // + // It is possible that the last value in values is "chunked", + // meaning that the rest of the value is sent in subsequent + // `PartialResultSet`(s). This is denoted by the + // [chunked_value][google.spanner.v1.PartialResultSet.chunked_value] field. + // Two or more chunked values can be merged to form a complete value as + // follows: + // + // * `bool/number/null`: cannot be chunked + // * `string`: concatenate the strings + // * `list`: concatenate the lists. If the last element in a list is a + // `string`, `list`, or `object`, merge it with the first element in + // the next list by applying these rules recursively. + // * `object`: concatenate the (field name, field value) pairs. If a + // field name is duplicated, then apply these rules recursively + // to merge the field values. + // + // Some examples of merging: + // + // # Strings are concatenated. + // "foo", "bar" => "foobar" + // + // # Lists of non-strings are concatenated. + // [2, 3], [4] => [2, 3, 4] + // + // # Lists are concatenated, but the last and first elements are merged + // # because they are strings. + // ["a", "b"], ["c", "d"] => ["a", "bc", "d"] + // + // # Lists are concatenated, but the last and first elements are merged + // # because they are lists. Recursively, the last and first elements + // # of the inner lists are merged because they are strings. + // ["a", ["b", "c"]], [["d"], "e"] => ["a", ["b", "cd"], "e"] + // + // # Non-overlapping object fields are combined. + // {"a": "1"}, {"b": "2"} => {"a": "1", "b": 2"} + // + // # Overlapping object fields are merged. + // {"a": "1"}, {"a": "2"} => {"a": "12"} + // + // # Examples of merging objects containing lists of strings. + // {"a": ["1"]}, {"a": ["2"]} => {"a": ["12"]} + // + // For a more complete example, suppose a streaming SQL query is + // yielding a result set whose rows contain a single string + // field. The following `PartialResultSet`s might be yielded: + // + // { + // "metadata": { ... } + // "values": ["Hello", "W"] + // "chunked_value": true + // "resume_token": "Af65..." + // } + // { + // "values": ["orl"] + // "chunked_value": true + // "resume_token": "Bqp2..." + // } + // { + // "values": ["d"] + // "resume_token": "Zx1B..." + // } + // + // This sequence of `PartialResultSet`s encodes two rows, one + // containing the field value `"Hello"`, and a second containing the + // field value `"World" = "W" + "orl" + "d"`. + Values []*_struct.Value `protobuf:"bytes,2,rep,name=values,proto3" json:"values,omitempty"` + // If true, then the final value in + // [values][google.spanner.v1.PartialResultSet.values] is chunked, and must be + // combined with more values from subsequent `PartialResultSet`s to obtain a + // complete field value. + ChunkedValue bool `protobuf:"varint,3,opt,name=chunked_value,json=chunkedValue,proto3" json:"chunked_value,omitempty"` + // Streaming calls might be interrupted for a variety of reasons, such + // as TCP connection loss. If this occurs, the stream of results can + // be resumed by re-sending the original request and including + // `resume_token`. Note that executing any other transaction in the + // same session invalidates the token. + ResumeToken []byte `protobuf:"bytes,4,opt,name=resume_token,json=resumeToken,proto3" json:"resume_token,omitempty"` + // Query plan and execution statistics for the statement that produced this + // streaming result set. These can be requested by setting + // [ExecuteSqlRequest.query_mode][google.spanner.v1.ExecuteSqlRequest.query_mode] + // and are sent only once with the last response in the stream. This field + // will also be present in the last response for DML statements. + Stats *ResultSetStats `protobuf:"bytes,5,opt,name=stats,proto3" json:"stats,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *PartialResultSet) Reset() { *m = PartialResultSet{} } +func (m *PartialResultSet) String() string { return proto.CompactTextString(m) } +func (*PartialResultSet) ProtoMessage() {} +func (*PartialResultSet) Descriptor() ([]byte, []int) { + return fileDescriptor_result_set_643b079f04352bdc, []int{1} +} +func (m *PartialResultSet) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_PartialResultSet.Unmarshal(m, b) +} +func (m *PartialResultSet) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_PartialResultSet.Marshal(b, m, deterministic) +} +func (dst *PartialResultSet) XXX_Merge(src proto.Message) { + xxx_messageInfo_PartialResultSet.Merge(dst, src) +} +func (m *PartialResultSet) XXX_Size() int { + return xxx_messageInfo_PartialResultSet.Size(m) +} +func (m *PartialResultSet) XXX_DiscardUnknown() { + xxx_messageInfo_PartialResultSet.DiscardUnknown(m) +} + +var xxx_messageInfo_PartialResultSet proto.InternalMessageInfo + +func (m *PartialResultSet) GetMetadata() *ResultSetMetadata { + if m != nil { + return m.Metadata + } + return nil +} + +func (m *PartialResultSet) GetValues() []*_struct.Value { + if m != nil { + return m.Values + } + return nil +} + +func (m *PartialResultSet) GetChunkedValue() bool { + if m != nil { + return m.ChunkedValue + } + return false +} + +func (m *PartialResultSet) GetResumeToken() []byte { + if m != nil { + return m.ResumeToken + } + return nil +} + +func (m *PartialResultSet) GetStats() *ResultSetStats { + if m != nil { + return m.Stats + } + return nil +} + +// Metadata about a [ResultSet][google.spanner.v1.ResultSet] or +// [PartialResultSet][google.spanner.v1.PartialResultSet]. +type ResultSetMetadata struct { + // Indicates the field names and types for the rows in the result + // set. For example, a SQL query like `"SELECT UserId, UserName FROM + // Users"` could return a `row_type` value like: + // + // "fields": [ + // { "name": "UserId", "type": { "code": "INT64" } }, + // { "name": "UserName", "type": { "code": "STRING" } }, + // ] + RowType *StructType `protobuf:"bytes,1,opt,name=row_type,json=rowType,proto3" json:"row_type,omitempty"` + // If the read or SQL query began a transaction as a side-effect, the + // information about the new transaction is yielded here. + Transaction *Transaction `protobuf:"bytes,2,opt,name=transaction,proto3" json:"transaction,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ResultSetMetadata) Reset() { *m = ResultSetMetadata{} } +func (m *ResultSetMetadata) String() string { return proto.CompactTextString(m) } +func (*ResultSetMetadata) ProtoMessage() {} +func (*ResultSetMetadata) Descriptor() ([]byte, []int) { + return fileDescriptor_result_set_643b079f04352bdc, []int{2} +} +func (m *ResultSetMetadata) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ResultSetMetadata.Unmarshal(m, b) +} +func (m *ResultSetMetadata) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ResultSetMetadata.Marshal(b, m, deterministic) +} +func (dst *ResultSetMetadata) XXX_Merge(src proto.Message) { + xxx_messageInfo_ResultSetMetadata.Merge(dst, src) +} +func (m *ResultSetMetadata) XXX_Size() int { + return xxx_messageInfo_ResultSetMetadata.Size(m) +} +func (m *ResultSetMetadata) XXX_DiscardUnknown() { + xxx_messageInfo_ResultSetMetadata.DiscardUnknown(m) +} + +var xxx_messageInfo_ResultSetMetadata proto.InternalMessageInfo + +func (m *ResultSetMetadata) GetRowType() *StructType { + if m != nil { + return m.RowType + } + return nil +} + +func (m *ResultSetMetadata) GetTransaction() *Transaction { + if m != nil { + return m.Transaction + } + return nil +} + +// Additional statistics about a [ResultSet][google.spanner.v1.ResultSet] or +// [PartialResultSet][google.spanner.v1.PartialResultSet]. +type ResultSetStats struct { + // [QueryPlan][google.spanner.v1.QueryPlan] for the query associated with this + // result. + QueryPlan *QueryPlan `protobuf:"bytes,1,opt,name=query_plan,json=queryPlan,proto3" json:"query_plan,omitempty"` + // Aggregated statistics from the execution of the query. Only present when + // the query is profiled. For example, a query could return the statistics as + // follows: + // + // { + // "rows_returned": "3", + // "elapsed_time": "1.22 secs", + // "cpu_time": "1.19 secs" + // } + QueryStats *_struct.Struct `protobuf:"bytes,2,opt,name=query_stats,json=queryStats,proto3" json:"query_stats,omitempty"` + // The number of rows modified by the DML statement. + // + // Types that are valid to be assigned to RowCount: + // *ResultSetStats_RowCountExact + // *ResultSetStats_RowCountLowerBound + RowCount isResultSetStats_RowCount `protobuf_oneof:"row_count"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ResultSetStats) Reset() { *m = ResultSetStats{} } +func (m *ResultSetStats) String() string { return proto.CompactTextString(m) } +func (*ResultSetStats) ProtoMessage() {} +func (*ResultSetStats) Descriptor() ([]byte, []int) { + return fileDescriptor_result_set_643b079f04352bdc, []int{3} +} +func (m *ResultSetStats) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ResultSetStats.Unmarshal(m, b) +} +func (m *ResultSetStats) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ResultSetStats.Marshal(b, m, deterministic) +} +func (dst *ResultSetStats) XXX_Merge(src proto.Message) { + xxx_messageInfo_ResultSetStats.Merge(dst, src) +} +func (m *ResultSetStats) XXX_Size() int { + return xxx_messageInfo_ResultSetStats.Size(m) +} +func (m *ResultSetStats) XXX_DiscardUnknown() { + xxx_messageInfo_ResultSetStats.DiscardUnknown(m) +} + +var xxx_messageInfo_ResultSetStats proto.InternalMessageInfo + +func (m *ResultSetStats) GetQueryPlan() *QueryPlan { + if m != nil { + return m.QueryPlan + } + return nil +} + +func (m *ResultSetStats) GetQueryStats() *_struct.Struct { + if m != nil { + return m.QueryStats + } + return nil +} + +type isResultSetStats_RowCount interface { + isResultSetStats_RowCount() +} + +type ResultSetStats_RowCountExact struct { + RowCountExact int64 `protobuf:"varint,3,opt,name=row_count_exact,json=rowCountExact,proto3,oneof"` +} + +type ResultSetStats_RowCountLowerBound struct { + RowCountLowerBound int64 `protobuf:"varint,4,opt,name=row_count_lower_bound,json=rowCountLowerBound,proto3,oneof"` +} + +func (*ResultSetStats_RowCountExact) isResultSetStats_RowCount() {} + +func (*ResultSetStats_RowCountLowerBound) isResultSetStats_RowCount() {} + +func (m *ResultSetStats) GetRowCount() isResultSetStats_RowCount { + if m != nil { + return m.RowCount + } + return nil +} + +func (m *ResultSetStats) GetRowCountExact() int64 { + if x, ok := m.GetRowCount().(*ResultSetStats_RowCountExact); ok { + return x.RowCountExact + } + return 0 +} + +func (m *ResultSetStats) GetRowCountLowerBound() int64 { + if x, ok := m.GetRowCount().(*ResultSetStats_RowCountLowerBound); ok { + return x.RowCountLowerBound + } + return 0 +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*ResultSetStats) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _ResultSetStats_OneofMarshaler, _ResultSetStats_OneofUnmarshaler, _ResultSetStats_OneofSizer, []interface{}{ + (*ResultSetStats_RowCountExact)(nil), + (*ResultSetStats_RowCountLowerBound)(nil), + } +} + +func _ResultSetStats_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*ResultSetStats) + // row_count + switch x := m.RowCount.(type) { + case *ResultSetStats_RowCountExact: + b.EncodeVarint(3<<3 | proto.WireVarint) + b.EncodeVarint(uint64(x.RowCountExact)) + case *ResultSetStats_RowCountLowerBound: + b.EncodeVarint(4<<3 | proto.WireVarint) + b.EncodeVarint(uint64(x.RowCountLowerBound)) + case nil: + default: + return fmt.Errorf("ResultSetStats.RowCount has unexpected type %T", x) + } + return nil +} + +func _ResultSetStats_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*ResultSetStats) + switch tag { + case 3: // row_count.row_count_exact + if wire != proto.WireVarint { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeVarint() + m.RowCount = &ResultSetStats_RowCountExact{int64(x)} + return true, err + case 4: // row_count.row_count_lower_bound + if wire != proto.WireVarint { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeVarint() + m.RowCount = &ResultSetStats_RowCountLowerBound{int64(x)} + return true, err + default: + return false, nil + } +} + +func _ResultSetStats_OneofSizer(msg proto.Message) (n int) { + m := msg.(*ResultSetStats) + // row_count + switch x := m.RowCount.(type) { + case *ResultSetStats_RowCountExact: + n += 1 // tag and wire + n += proto.SizeVarint(uint64(x.RowCountExact)) + case *ResultSetStats_RowCountLowerBound: + n += 1 // tag and wire + n += proto.SizeVarint(uint64(x.RowCountLowerBound)) + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +func init() { + proto.RegisterType((*ResultSet)(nil), "google.spanner.v1.ResultSet") + proto.RegisterType((*PartialResultSet)(nil), "google.spanner.v1.PartialResultSet") + proto.RegisterType((*ResultSetMetadata)(nil), "google.spanner.v1.ResultSetMetadata") + proto.RegisterType((*ResultSetStats)(nil), "google.spanner.v1.ResultSetStats") +} + +func init() { + proto.RegisterFile("google/spanner/v1/result_set.proto", fileDescriptor_result_set_643b079f04352bdc) +} + +var fileDescriptor_result_set_643b079f04352bdc = []byte{ + // 560 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xac, 0x93, 0xcf, 0x6e, 0x13, 0x3f, + 0x10, 0xc7, 0x7f, 0x4e, 0xda, 0xfe, 0x12, 0x6f, 0x0a, 0xd4, 0x52, 0x69, 0x14, 0x15, 0x94, 0xa6, + 0x1c, 0x72, 0xda, 0x55, 0xda, 0x03, 0x91, 0x7a, 0xa9, 0x52, 0x21, 0x38, 0x14, 0x29, 0x38, 0x51, + 0x0e, 0x28, 0xd2, 0xca, 0xd9, 0x98, 0x25, 0xea, 0xc6, 0xde, 0xda, 0xde, 0x84, 0x3c, 0x00, 0x67, + 0xee, 0x3c, 0x02, 0x0f, 0xc0, 0x43, 0xf0, 0x3a, 0x5c, 0x38, 0x22, 0xff, 0xd9, 0x24, 0xb0, 0x11, + 0x12, 0x12, 0x37, 0xef, 0xcc, 0xe7, 0xeb, 0x99, 0xef, 0x78, 0x16, 0xb6, 0x62, 0xce, 0xe3, 0x84, + 0x06, 0x32, 0x25, 0x8c, 0x51, 0x11, 0x2c, 0x3a, 0x81, 0xa0, 0x32, 0x4b, 0x54, 0x28, 0xa9, 0xf2, + 0x53, 0xc1, 0x15, 0x47, 0x47, 0x96, 0xf1, 0x1d, 0xe3, 0x2f, 0x3a, 0x8d, 0x53, 0x27, 0x23, 0xe9, + 0x2c, 0x20, 0x8c, 0x71, 0x45, 0xd4, 0x8c, 0x33, 0x69, 0x05, 0xeb, 0xac, 0xf9, 0x9a, 0x64, 0xef, + 0x02, 0xa9, 0x44, 0x16, 0xb9, 0xeb, 0x1a, 0x3b, 0x4a, 0xde, 0x67, 0x54, 0xac, 0xc2, 0x34, 0x21, + 0xcc, 0x31, 0xe7, 0x45, 0x46, 0x09, 0xc2, 0x24, 0x89, 0x74, 0x9d, 0xdf, 0xca, 0x6c, 0x43, 0xab, + 0x94, 0xda, 0x6c, 0xeb, 0x2b, 0x80, 0x55, 0x6c, 0xac, 0x0c, 0xa8, 0x42, 0xd7, 0xb0, 0x32, 0xa7, + 0x8a, 0x4c, 0x89, 0x22, 0x75, 0xd0, 0x04, 0x6d, 0xef, 0xe2, 0x99, 0x5f, 0xb0, 0xe5, 0xaf, 0xf9, + 0xd7, 0x8e, 0xc5, 0x6b, 0x15, 0xf2, 0xe1, 0x9e, 0xe0, 0x4b, 0x59, 0x2f, 0x35, 0xcb, 0x6d, 0xef, + 0xa2, 0x91, 0xab, 0x73, 0x8f, 0xfe, 0xed, 0x4c, 0xaa, 0x11, 0x49, 0x32, 0x8a, 0x0d, 0x87, 0x9e, + 0xc3, 0x7d, 0xa9, 0x88, 0x92, 0xf5, 0xb2, 0x29, 0x77, 0xf6, 0xa7, 0x72, 0x03, 0x0d, 0x62, 0xcb, + 0xb7, 0x3e, 0x96, 0xe0, 0xa3, 0x3e, 0x11, 0x6a, 0x46, 0x92, 0x7f, 0xdb, 0xff, 0xc1, 0x42, 0xb7, + 0x97, 0x3b, 0x78, 0x5c, 0x70, 0x60, 0xbb, 0x77, 0x14, 0x3a, 0x87, 0x87, 0xd1, 0xfb, 0x8c, 0xdd, + 0xd1, 0x69, 0x68, 0x22, 0xc6, 0x47, 0x05, 0xd7, 0x5c, 0xd0, 0xc0, 0xe8, 0x0c, 0xd6, 0xf4, 0xba, + 0xcc, 0x69, 0xa8, 0xf8, 0x1d, 0x65, 0xf5, 0xbd, 0x26, 0x68, 0xd7, 0xb0, 0x67, 0x63, 0x43, 0x1d, + 0xda, 0xcc, 0x61, 0xff, 0x2f, 0xe7, 0xf0, 0x09, 0xc0, 0xa3, 0x82, 0x21, 0xd4, 0x85, 0x15, 0xc1, + 0x97, 0xa1, 0x7e, 0x68, 0x37, 0x88, 0x27, 0x3b, 0x6e, 0x1c, 0x98, 0x85, 0x1b, 0xae, 0x52, 0x8a, + 0xff, 0x17, 0x7c, 0xa9, 0x0f, 0xe8, 0x1a, 0x7a, 0x5b, 0x3b, 0x54, 0x2f, 0x19, 0xf1, 0xd3, 0x1d, + 0xe2, 0xe1, 0x86, 0xc2, 0xdb, 0x92, 0xd6, 0x77, 0x00, 0x1f, 0xfc, 0xda, 0x2b, 0xba, 0x82, 0x70, + 0xb3, 0xbc, 0xae, 0xa1, 0xd3, 0x1d, 0x77, 0xbe, 0xd1, 0x50, 0x3f, 0x21, 0x0c, 0x57, 0xef, 0xf3, + 0x23, 0xea, 0x42, 0xcf, 0x8a, 0xed, 0x80, 0x6c, 0x47, 0x27, 0x85, 0x77, 0xb1, 0x66, 0xb0, 0x2d, + 0x64, 0xcb, 0xb6, 0xe1, 0x43, 0x3d, 0x85, 0x88, 0x67, 0x4c, 0x85, 0xf4, 0x03, 0x89, 0x94, 0x79, + 0x9e, 0xf2, 0xab, 0xff, 0xf0, 0xa1, 0xe0, 0xcb, 0x1b, 0x1d, 0x7f, 0xa1, 0xc3, 0xe8, 0x12, 0x1e, + 0x6f, 0xc8, 0x84, 0x2f, 0xa9, 0x08, 0x27, 0x3c, 0x63, 0x53, 0xf3, 0x54, 0x9a, 0x47, 0x39, 0x7f, + 0xab, 0x93, 0x3d, 0x9d, 0xeb, 0x79, 0xb0, 0xba, 0x16, 0xf5, 0x3e, 0x03, 0x78, 0x1c, 0xf1, 0x79, + 0xd1, 0x54, 0x6f, 0x33, 0x8c, 0xbe, 0xee, 0xb5, 0x0f, 0xde, 0x76, 0x1d, 0x14, 0xf3, 0x84, 0xb0, + 0xd8, 0xe7, 0x22, 0x0e, 0x62, 0xca, 0x8c, 0x93, 0xc0, 0xa6, 0x48, 0x3a, 0x93, 0x5b, 0x7f, 0xec, + 0x95, 0x3b, 0xfe, 0x00, 0xe0, 0x4b, 0xe9, 0xe4, 0xa5, 0x55, 0xdf, 0x24, 0x3c, 0x9b, 0xfa, 0x03, + 0x57, 0x68, 0xd4, 0xf9, 0x96, 0x67, 0xc6, 0x26, 0x33, 0x76, 0x99, 0xf1, 0xa8, 0x33, 0x39, 0x30, + 0x77, 0x5f, 0xfe, 0x0c, 0x00, 0x00, 0xff, 0xff, 0x73, 0xdc, 0x50, 0xf9, 0xc8, 0x04, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/spanner/v1/spanner.pb.go b/vendor/google.golang.org/genproto/googleapis/spanner/v1/spanner.pb.go new file mode 100644 index 0000000..72705f5 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/spanner/v1/spanner.pb.go @@ -0,0 +1,2592 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/spanner/v1/spanner.proto + +package spanner // import "google.golang.org/genproto/googleapis/spanner/v1" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import empty "github.com/golang/protobuf/ptypes/empty" +import _struct "github.com/golang/protobuf/ptypes/struct" +import timestamp "github.com/golang/protobuf/ptypes/timestamp" +import _ "google.golang.org/genproto/googleapis/api/annotations" +import status "google.golang.org/genproto/googleapis/rpc/status" + +import ( + context "golang.org/x/net/context" + grpc "google.golang.org/grpc" +) + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Mode in which the statement must be processed. +type ExecuteSqlRequest_QueryMode int32 + +const ( + // The default mode. Only the statement results are returned. + ExecuteSqlRequest_NORMAL ExecuteSqlRequest_QueryMode = 0 + // This mode returns only the query plan, without any results or + // execution statistics information. + ExecuteSqlRequest_PLAN ExecuteSqlRequest_QueryMode = 1 + // This mode returns both the query plan and the execution statistics along + // with the results. + ExecuteSqlRequest_PROFILE ExecuteSqlRequest_QueryMode = 2 +) + +var ExecuteSqlRequest_QueryMode_name = map[int32]string{ + 0: "NORMAL", + 1: "PLAN", + 2: "PROFILE", +} +var ExecuteSqlRequest_QueryMode_value = map[string]int32{ + "NORMAL": 0, + "PLAN": 1, + "PROFILE": 2, +} + +func (x ExecuteSqlRequest_QueryMode) String() string { + return proto.EnumName(ExecuteSqlRequest_QueryMode_name, int32(x)) +} +func (ExecuteSqlRequest_QueryMode) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_spanner_9927dc9412546f3a, []int{6, 0} +} + +// The request for [CreateSession][google.spanner.v1.Spanner.CreateSession]. +type CreateSessionRequest struct { + // Required. The database in which the new session is created. + Database string `protobuf:"bytes,1,opt,name=database,proto3" json:"database,omitempty"` + // The session to create. + Session *Session `protobuf:"bytes,2,opt,name=session,proto3" json:"session,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *CreateSessionRequest) Reset() { *m = CreateSessionRequest{} } +func (m *CreateSessionRequest) String() string { return proto.CompactTextString(m) } +func (*CreateSessionRequest) ProtoMessage() {} +func (*CreateSessionRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_spanner_9927dc9412546f3a, []int{0} +} +func (m *CreateSessionRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_CreateSessionRequest.Unmarshal(m, b) +} +func (m *CreateSessionRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_CreateSessionRequest.Marshal(b, m, deterministic) +} +func (dst *CreateSessionRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_CreateSessionRequest.Merge(dst, src) +} +func (m *CreateSessionRequest) XXX_Size() int { + return xxx_messageInfo_CreateSessionRequest.Size(m) +} +func (m *CreateSessionRequest) XXX_DiscardUnknown() { + xxx_messageInfo_CreateSessionRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_CreateSessionRequest proto.InternalMessageInfo + +func (m *CreateSessionRequest) GetDatabase() string { + if m != nil { + return m.Database + } + return "" +} + +func (m *CreateSessionRequest) GetSession() *Session { + if m != nil { + return m.Session + } + return nil +} + +// A session in the Cloud Spanner API. +type Session struct { + // The name of the session. This is always system-assigned; values provided + // when creating a session are ignored. + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // The labels for the session. + // + // * Label keys must be between 1 and 63 characters long and must conform to + // the following regular expression: `[a-z]([-a-z0-9]*[a-z0-9])?`. + // * Label values must be between 0 and 63 characters long and must conform + // to the regular expression `([a-z]([-a-z0-9]*[a-z0-9])?)?`. + // * No more than 64 labels can be associated with a given session. + // + // See https://goo.gl/xmQnxf for more information on and examples of labels. + Labels map[string]string `protobuf:"bytes,2,rep,name=labels,proto3" json:"labels,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` + // Output only. The timestamp when the session is created. + CreateTime *timestamp.Timestamp `protobuf:"bytes,3,opt,name=create_time,json=createTime,proto3" json:"create_time,omitempty"` + // Output only. The approximate timestamp when the session is last used. It is + // typically earlier than the actual last use time. + ApproximateLastUseTime *timestamp.Timestamp `protobuf:"bytes,4,opt,name=approximate_last_use_time,json=approximateLastUseTime,proto3" json:"approximate_last_use_time,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Session) Reset() { *m = Session{} } +func (m *Session) String() string { return proto.CompactTextString(m) } +func (*Session) ProtoMessage() {} +func (*Session) Descriptor() ([]byte, []int) { + return fileDescriptor_spanner_9927dc9412546f3a, []int{1} +} +func (m *Session) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Session.Unmarshal(m, b) +} +func (m *Session) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Session.Marshal(b, m, deterministic) +} +func (dst *Session) XXX_Merge(src proto.Message) { + xxx_messageInfo_Session.Merge(dst, src) +} +func (m *Session) XXX_Size() int { + return xxx_messageInfo_Session.Size(m) +} +func (m *Session) XXX_DiscardUnknown() { + xxx_messageInfo_Session.DiscardUnknown(m) +} + +var xxx_messageInfo_Session proto.InternalMessageInfo + +func (m *Session) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *Session) GetLabels() map[string]string { + if m != nil { + return m.Labels + } + return nil +} + +func (m *Session) GetCreateTime() *timestamp.Timestamp { + if m != nil { + return m.CreateTime + } + return nil +} + +func (m *Session) GetApproximateLastUseTime() *timestamp.Timestamp { + if m != nil { + return m.ApproximateLastUseTime + } + return nil +} + +// The request for [GetSession][google.spanner.v1.Spanner.GetSession]. +type GetSessionRequest struct { + // Required. The name of the session to retrieve. + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GetSessionRequest) Reset() { *m = GetSessionRequest{} } +func (m *GetSessionRequest) String() string { return proto.CompactTextString(m) } +func (*GetSessionRequest) ProtoMessage() {} +func (*GetSessionRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_spanner_9927dc9412546f3a, []int{2} +} +func (m *GetSessionRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GetSessionRequest.Unmarshal(m, b) +} +func (m *GetSessionRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GetSessionRequest.Marshal(b, m, deterministic) +} +func (dst *GetSessionRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_GetSessionRequest.Merge(dst, src) +} +func (m *GetSessionRequest) XXX_Size() int { + return xxx_messageInfo_GetSessionRequest.Size(m) +} +func (m *GetSessionRequest) XXX_DiscardUnknown() { + xxx_messageInfo_GetSessionRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_GetSessionRequest proto.InternalMessageInfo + +func (m *GetSessionRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +// The request for [ListSessions][google.spanner.v1.Spanner.ListSessions]. +type ListSessionsRequest struct { + // Required. The database in which to list sessions. + Database string `protobuf:"bytes,1,opt,name=database,proto3" json:"database,omitempty"` + // Number of sessions to be returned in the response. If 0 or less, defaults + // to the server's maximum allowed page size. + PageSize int32 `protobuf:"varint,2,opt,name=page_size,json=pageSize,proto3" json:"page_size,omitempty"` + // If non-empty, `page_token` should contain a + // [next_page_token][google.spanner.v1.ListSessionsResponse.next_page_token] + // from a previous + // [ListSessionsResponse][google.spanner.v1.ListSessionsResponse]. + PageToken string `protobuf:"bytes,3,opt,name=page_token,json=pageToken,proto3" json:"page_token,omitempty"` + // An expression for filtering the results of the request. Filter rules are + // case insensitive. The fields eligible for filtering are: + // + // * `labels.key` where key is the name of a label + // + // Some examples of using filters are: + // + // * `labels.env:*` --> The session has the label "env". + // * `labels.env:dev` --> The session has the label "env" and the value of + // the label contains the string "dev". + Filter string `protobuf:"bytes,4,opt,name=filter,proto3" json:"filter,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ListSessionsRequest) Reset() { *m = ListSessionsRequest{} } +func (m *ListSessionsRequest) String() string { return proto.CompactTextString(m) } +func (*ListSessionsRequest) ProtoMessage() {} +func (*ListSessionsRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_spanner_9927dc9412546f3a, []int{3} +} +func (m *ListSessionsRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ListSessionsRequest.Unmarshal(m, b) +} +func (m *ListSessionsRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ListSessionsRequest.Marshal(b, m, deterministic) +} +func (dst *ListSessionsRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_ListSessionsRequest.Merge(dst, src) +} +func (m *ListSessionsRequest) XXX_Size() int { + return xxx_messageInfo_ListSessionsRequest.Size(m) +} +func (m *ListSessionsRequest) XXX_DiscardUnknown() { + xxx_messageInfo_ListSessionsRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_ListSessionsRequest proto.InternalMessageInfo + +func (m *ListSessionsRequest) GetDatabase() string { + if m != nil { + return m.Database + } + return "" +} + +func (m *ListSessionsRequest) GetPageSize() int32 { + if m != nil { + return m.PageSize + } + return 0 +} + +func (m *ListSessionsRequest) GetPageToken() string { + if m != nil { + return m.PageToken + } + return "" +} + +func (m *ListSessionsRequest) GetFilter() string { + if m != nil { + return m.Filter + } + return "" +} + +// The response for [ListSessions][google.spanner.v1.Spanner.ListSessions]. +type ListSessionsResponse struct { + // The list of requested sessions. + Sessions []*Session `protobuf:"bytes,1,rep,name=sessions,proto3" json:"sessions,omitempty"` + // `next_page_token` can be sent in a subsequent + // [ListSessions][google.spanner.v1.Spanner.ListSessions] call to fetch more + // of the matching sessions. + NextPageToken string `protobuf:"bytes,2,opt,name=next_page_token,json=nextPageToken,proto3" json:"next_page_token,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ListSessionsResponse) Reset() { *m = ListSessionsResponse{} } +func (m *ListSessionsResponse) String() string { return proto.CompactTextString(m) } +func (*ListSessionsResponse) ProtoMessage() {} +func (*ListSessionsResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_spanner_9927dc9412546f3a, []int{4} +} +func (m *ListSessionsResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ListSessionsResponse.Unmarshal(m, b) +} +func (m *ListSessionsResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ListSessionsResponse.Marshal(b, m, deterministic) +} +func (dst *ListSessionsResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_ListSessionsResponse.Merge(dst, src) +} +func (m *ListSessionsResponse) XXX_Size() int { + return xxx_messageInfo_ListSessionsResponse.Size(m) +} +func (m *ListSessionsResponse) XXX_DiscardUnknown() { + xxx_messageInfo_ListSessionsResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_ListSessionsResponse proto.InternalMessageInfo + +func (m *ListSessionsResponse) GetSessions() []*Session { + if m != nil { + return m.Sessions + } + return nil +} + +func (m *ListSessionsResponse) GetNextPageToken() string { + if m != nil { + return m.NextPageToken + } + return "" +} + +// The request for [DeleteSession][google.spanner.v1.Spanner.DeleteSession]. +type DeleteSessionRequest struct { + // Required. The name of the session to delete. + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *DeleteSessionRequest) Reset() { *m = DeleteSessionRequest{} } +func (m *DeleteSessionRequest) String() string { return proto.CompactTextString(m) } +func (*DeleteSessionRequest) ProtoMessage() {} +func (*DeleteSessionRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_spanner_9927dc9412546f3a, []int{5} +} +func (m *DeleteSessionRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_DeleteSessionRequest.Unmarshal(m, b) +} +func (m *DeleteSessionRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_DeleteSessionRequest.Marshal(b, m, deterministic) +} +func (dst *DeleteSessionRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_DeleteSessionRequest.Merge(dst, src) +} +func (m *DeleteSessionRequest) XXX_Size() int { + return xxx_messageInfo_DeleteSessionRequest.Size(m) +} +func (m *DeleteSessionRequest) XXX_DiscardUnknown() { + xxx_messageInfo_DeleteSessionRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_DeleteSessionRequest proto.InternalMessageInfo + +func (m *DeleteSessionRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +// The request for [ExecuteSql][google.spanner.v1.Spanner.ExecuteSql] and +// [ExecuteStreamingSql][google.spanner.v1.Spanner.ExecuteStreamingSql]. +type ExecuteSqlRequest struct { + // Required. The session in which the SQL query should be performed. + Session string `protobuf:"bytes,1,opt,name=session,proto3" json:"session,omitempty"` + // The transaction to use. If none is provided, the default is a + // temporary read-only transaction with strong concurrency. + // + // The transaction to use. + // + // For queries, if none is provided, the default is a temporary read-only + // transaction with strong concurrency. + // + // Standard DML statements require a ReadWrite transaction. Single-use + // transactions are not supported (to avoid replay). The caller must + // either supply an existing transaction ID or begin a new transaction. + // + // Partitioned DML requires an existing PartitionedDml transaction ID. + Transaction *TransactionSelector `protobuf:"bytes,2,opt,name=transaction,proto3" json:"transaction,omitempty"` + // Required. The SQL string. + Sql string `protobuf:"bytes,3,opt,name=sql,proto3" json:"sql,omitempty"` + // The SQL string can contain parameter placeholders. A parameter + // placeholder consists of `'@'` followed by the parameter + // name. Parameter names consist of any combination of letters, + // numbers, and underscores. + // + // Parameters can appear anywhere that a literal value is expected. The same + // parameter name can be used more than once, for example: + // `"WHERE id > @msg_id AND id < @msg_id + 100"` + // + // It is an error to execute an SQL statement with unbound parameters. + // + // Parameter values are specified using `params`, which is a JSON + // object whose keys are parameter names, and whose values are the + // corresponding parameter values. + Params *_struct.Struct `protobuf:"bytes,4,opt,name=params,proto3" json:"params,omitempty"` + // It is not always possible for Cloud Spanner to infer the right SQL type + // from a JSON value. For example, values of type `BYTES` and values + // of type `STRING` both appear in + // [params][google.spanner.v1.ExecuteSqlRequest.params] as JSON strings. + // + // In these cases, `param_types` can be used to specify the exact + // SQL type for some or all of the SQL statement parameters. See the + // definition of [Type][google.spanner.v1.Type] for more information + // about SQL types. + ParamTypes map[string]*Type `protobuf:"bytes,5,rep,name=param_types,json=paramTypes,proto3" json:"param_types,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` + // If this request is resuming a previously interrupted SQL statement + // execution, `resume_token` should be copied from the last + // [PartialResultSet][google.spanner.v1.PartialResultSet] yielded before the + // interruption. Doing this enables the new SQL statement execution to resume + // where the last one left off. The rest of the request parameters must + // exactly match the request that yielded this token. + ResumeToken []byte `protobuf:"bytes,6,opt,name=resume_token,json=resumeToken,proto3" json:"resume_token,omitempty"` + // Used to control the amount of debugging information returned in + // [ResultSetStats][google.spanner.v1.ResultSetStats]. If + // [partition_token][google.spanner.v1.ExecuteSqlRequest.partition_token] is + // set, [query_mode][google.spanner.v1.ExecuteSqlRequest.query_mode] can only + // be set to + // [QueryMode.NORMAL][google.spanner.v1.ExecuteSqlRequest.QueryMode.NORMAL]. + QueryMode ExecuteSqlRequest_QueryMode `protobuf:"varint,7,opt,name=query_mode,json=queryMode,proto3,enum=google.spanner.v1.ExecuteSqlRequest_QueryMode" json:"query_mode,omitempty"` + // If present, results will be restricted to the specified partition + // previously created using PartitionQuery(). There must be an exact + // match for the values of fields common to this message and the + // PartitionQueryRequest message used to create this partition_token. + PartitionToken []byte `protobuf:"bytes,8,opt,name=partition_token,json=partitionToken,proto3" json:"partition_token,omitempty"` + // A per-transaction sequence number used to identify this request. This + // makes each request idempotent such that if the request is received multiple + // times, at most one will succeed. + // + // The sequence number must be monotonically increasing within the + // transaction. If a request arrives for the first time with an out-of-order + // sequence number, the transaction may be aborted. Replays of previously + // handled requests will yield the same response as the first execution. + // + // Required for DML statements. Ignored for queries. + Seqno int64 `protobuf:"varint,9,opt,name=seqno,proto3" json:"seqno,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ExecuteSqlRequest) Reset() { *m = ExecuteSqlRequest{} } +func (m *ExecuteSqlRequest) String() string { return proto.CompactTextString(m) } +func (*ExecuteSqlRequest) ProtoMessage() {} +func (*ExecuteSqlRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_spanner_9927dc9412546f3a, []int{6} +} +func (m *ExecuteSqlRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ExecuteSqlRequest.Unmarshal(m, b) +} +func (m *ExecuteSqlRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ExecuteSqlRequest.Marshal(b, m, deterministic) +} +func (dst *ExecuteSqlRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_ExecuteSqlRequest.Merge(dst, src) +} +func (m *ExecuteSqlRequest) XXX_Size() int { + return xxx_messageInfo_ExecuteSqlRequest.Size(m) +} +func (m *ExecuteSqlRequest) XXX_DiscardUnknown() { + xxx_messageInfo_ExecuteSqlRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_ExecuteSqlRequest proto.InternalMessageInfo + +func (m *ExecuteSqlRequest) GetSession() string { + if m != nil { + return m.Session + } + return "" +} + +func (m *ExecuteSqlRequest) GetTransaction() *TransactionSelector { + if m != nil { + return m.Transaction + } + return nil +} + +func (m *ExecuteSqlRequest) GetSql() string { + if m != nil { + return m.Sql + } + return "" +} + +func (m *ExecuteSqlRequest) GetParams() *_struct.Struct { + if m != nil { + return m.Params + } + return nil +} + +func (m *ExecuteSqlRequest) GetParamTypes() map[string]*Type { + if m != nil { + return m.ParamTypes + } + return nil +} + +func (m *ExecuteSqlRequest) GetResumeToken() []byte { + if m != nil { + return m.ResumeToken + } + return nil +} + +func (m *ExecuteSqlRequest) GetQueryMode() ExecuteSqlRequest_QueryMode { + if m != nil { + return m.QueryMode + } + return ExecuteSqlRequest_NORMAL +} + +func (m *ExecuteSqlRequest) GetPartitionToken() []byte { + if m != nil { + return m.PartitionToken + } + return nil +} + +func (m *ExecuteSqlRequest) GetSeqno() int64 { + if m != nil { + return m.Seqno + } + return 0 +} + +// The request for [ExecuteBatchDml][google.spanner.v1.Spanner.ExecuteBatchDml] +type ExecuteBatchDmlRequest struct { + // Required. The session in which the DML statements should be performed. + Session string `protobuf:"bytes,1,opt,name=session,proto3" json:"session,omitempty"` + // The transaction to use. A ReadWrite transaction is required. Single-use + // transactions are not supported (to avoid replay). The caller must either + // supply an existing transaction ID or begin a new transaction. + Transaction *TransactionSelector `protobuf:"bytes,2,opt,name=transaction,proto3" json:"transaction,omitempty"` + // The list of statements to execute in this batch. Statements are executed + // serially, such that the effects of statement i are visible to statement + // i+1. Each statement must be a DML statement. Execution will stop at the + // first failed statement; the remaining statements will not run. + // + // REQUIRES: statements_size() > 0. + Statements []*ExecuteBatchDmlRequest_Statement `protobuf:"bytes,3,rep,name=statements,proto3" json:"statements,omitempty"` + // A per-transaction sequence number used to identify this request. This is + // used in the same space as the seqno in + // [ExecuteSqlRequest][Spanner.ExecuteSqlRequest]. See more details + // in [ExecuteSqlRequest][Spanner.ExecuteSqlRequest]. + Seqno int64 `protobuf:"varint,4,opt,name=seqno,proto3" json:"seqno,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ExecuteBatchDmlRequest) Reset() { *m = ExecuteBatchDmlRequest{} } +func (m *ExecuteBatchDmlRequest) String() string { return proto.CompactTextString(m) } +func (*ExecuteBatchDmlRequest) ProtoMessage() {} +func (*ExecuteBatchDmlRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_spanner_9927dc9412546f3a, []int{7} +} +func (m *ExecuteBatchDmlRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ExecuteBatchDmlRequest.Unmarshal(m, b) +} +func (m *ExecuteBatchDmlRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ExecuteBatchDmlRequest.Marshal(b, m, deterministic) +} +func (dst *ExecuteBatchDmlRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_ExecuteBatchDmlRequest.Merge(dst, src) +} +func (m *ExecuteBatchDmlRequest) XXX_Size() int { + return xxx_messageInfo_ExecuteBatchDmlRequest.Size(m) +} +func (m *ExecuteBatchDmlRequest) XXX_DiscardUnknown() { + xxx_messageInfo_ExecuteBatchDmlRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_ExecuteBatchDmlRequest proto.InternalMessageInfo + +func (m *ExecuteBatchDmlRequest) GetSession() string { + if m != nil { + return m.Session + } + return "" +} + +func (m *ExecuteBatchDmlRequest) GetTransaction() *TransactionSelector { + if m != nil { + return m.Transaction + } + return nil +} + +func (m *ExecuteBatchDmlRequest) GetStatements() []*ExecuteBatchDmlRequest_Statement { + if m != nil { + return m.Statements + } + return nil +} + +func (m *ExecuteBatchDmlRequest) GetSeqno() int64 { + if m != nil { + return m.Seqno + } + return 0 +} + +// A single DML statement. +type ExecuteBatchDmlRequest_Statement struct { + // Required. The DML string. + Sql string `protobuf:"bytes,1,opt,name=sql,proto3" json:"sql,omitempty"` + // The DML string can contain parameter placeholders. A parameter + // placeholder consists of `'@'` followed by the parameter + // name. Parameter names consist of any combination of letters, + // numbers, and underscores. + // + // Parameters can appear anywhere that a literal value is expected. The + // same parameter name can be used more than once, for example: + // `"WHERE id > @msg_id AND id < @msg_id + 100"` + // + // It is an error to execute an SQL statement with unbound parameters. + // + // Parameter values are specified using `params`, which is a JSON + // object whose keys are parameter names, and whose values are the + // corresponding parameter values. + Params *_struct.Struct `protobuf:"bytes,2,opt,name=params,proto3" json:"params,omitempty"` + // It is not always possible for Cloud Spanner to infer the right SQL type + // from a JSON value. For example, values of type `BYTES` and values + // of type `STRING` both appear in [params][google.spanner.v1.ExecuteBatchDmlRequest.Statement.params] as JSON strings. + // + // In these cases, `param_types` can be used to specify the exact + // SQL type for some or all of the SQL statement parameters. See the + // definition of [Type][google.spanner.v1.Type] for more information + // about SQL types. + ParamTypes map[string]*Type `protobuf:"bytes,3,rep,name=param_types,json=paramTypes,proto3" json:"param_types,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ExecuteBatchDmlRequest_Statement) Reset() { *m = ExecuteBatchDmlRequest_Statement{} } +func (m *ExecuteBatchDmlRequest_Statement) String() string { return proto.CompactTextString(m) } +func (*ExecuteBatchDmlRequest_Statement) ProtoMessage() {} +func (*ExecuteBatchDmlRequest_Statement) Descriptor() ([]byte, []int) { + return fileDescriptor_spanner_9927dc9412546f3a, []int{7, 0} +} +func (m *ExecuteBatchDmlRequest_Statement) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ExecuteBatchDmlRequest_Statement.Unmarshal(m, b) +} +func (m *ExecuteBatchDmlRequest_Statement) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ExecuteBatchDmlRequest_Statement.Marshal(b, m, deterministic) +} +func (dst *ExecuteBatchDmlRequest_Statement) XXX_Merge(src proto.Message) { + xxx_messageInfo_ExecuteBatchDmlRequest_Statement.Merge(dst, src) +} +func (m *ExecuteBatchDmlRequest_Statement) XXX_Size() int { + return xxx_messageInfo_ExecuteBatchDmlRequest_Statement.Size(m) +} +func (m *ExecuteBatchDmlRequest_Statement) XXX_DiscardUnknown() { + xxx_messageInfo_ExecuteBatchDmlRequest_Statement.DiscardUnknown(m) +} + +var xxx_messageInfo_ExecuteBatchDmlRequest_Statement proto.InternalMessageInfo + +func (m *ExecuteBatchDmlRequest_Statement) GetSql() string { + if m != nil { + return m.Sql + } + return "" +} + +func (m *ExecuteBatchDmlRequest_Statement) GetParams() *_struct.Struct { + if m != nil { + return m.Params + } + return nil +} + +func (m *ExecuteBatchDmlRequest_Statement) GetParamTypes() map[string]*Type { + if m != nil { + return m.ParamTypes + } + return nil +} + +// The response for [ExecuteBatchDml][google.spanner.v1.Spanner.ExecuteBatchDml]. Contains a list +// of [ResultSet][google.spanner.v1.ResultSet], one for each DML statement that has successfully executed. +// If a statement fails, the error is returned as part of the response payload. +// Clients can determine whether all DML statements have run successfully, or if +// a statement failed, using one of the following approaches: +// +// 1. Check if 'status' field is OkStatus. +// 2. Check if result_sets_size() equals the number of statements in +// [ExecuteBatchDmlRequest][Spanner.ExecuteBatchDmlRequest]. +// +// Example 1: A request with 5 DML statements, all executed successfully. +// Result: A response with 5 ResultSets, one for each statement in the same +// order, and an OK status. +// +// Example 2: A request with 5 DML statements. The 3rd statement has a syntax +// error. +// Result: A response with 2 ResultSets, for the first 2 statements that +// run successfully, and a syntax error (INVALID_ARGUMENT) status. From +// result_set_size() client can determine that the 3rd statement has failed. +type ExecuteBatchDmlResponse struct { + // ResultSets, one for each statement in the request that ran successfully, in + // the same order as the statements in the request. Each [ResultSet][google.spanner.v1.ResultSet] will + // not contain any rows. The [ResultSetStats][google.spanner.v1.ResultSetStats] in each [ResultSet][google.spanner.v1.ResultSet] will + // contain the number of rows modified by the statement. + // + // Only the first ResultSet in the response contains a valid + // [ResultSetMetadata][google.spanner.v1.ResultSetMetadata]. + ResultSets []*ResultSet `protobuf:"bytes,1,rep,name=result_sets,json=resultSets,proto3" json:"result_sets,omitempty"` + // If all DML statements are executed successfully, status will be OK. + // Otherwise, the error status of the first failed statement. + Status *status.Status `protobuf:"bytes,2,opt,name=status,proto3" json:"status,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ExecuteBatchDmlResponse) Reset() { *m = ExecuteBatchDmlResponse{} } +func (m *ExecuteBatchDmlResponse) String() string { return proto.CompactTextString(m) } +func (*ExecuteBatchDmlResponse) ProtoMessage() {} +func (*ExecuteBatchDmlResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_spanner_9927dc9412546f3a, []int{8} +} +func (m *ExecuteBatchDmlResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ExecuteBatchDmlResponse.Unmarshal(m, b) +} +func (m *ExecuteBatchDmlResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ExecuteBatchDmlResponse.Marshal(b, m, deterministic) +} +func (dst *ExecuteBatchDmlResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_ExecuteBatchDmlResponse.Merge(dst, src) +} +func (m *ExecuteBatchDmlResponse) XXX_Size() int { + return xxx_messageInfo_ExecuteBatchDmlResponse.Size(m) +} +func (m *ExecuteBatchDmlResponse) XXX_DiscardUnknown() { + xxx_messageInfo_ExecuteBatchDmlResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_ExecuteBatchDmlResponse proto.InternalMessageInfo + +func (m *ExecuteBatchDmlResponse) GetResultSets() []*ResultSet { + if m != nil { + return m.ResultSets + } + return nil +} + +func (m *ExecuteBatchDmlResponse) GetStatus() *status.Status { + if m != nil { + return m.Status + } + return nil +} + +// Options for a PartitionQueryRequest and +// PartitionReadRequest. +type PartitionOptions struct { + // **Note:** This hint is currently ignored by PartitionQuery and + // PartitionRead requests. + // + // The desired data size for each partition generated. The default for this + // option is currently 1 GiB. This is only a hint. The actual size of each + // partition may be smaller or larger than this size request. + PartitionSizeBytes int64 `protobuf:"varint,1,opt,name=partition_size_bytes,json=partitionSizeBytes,proto3" json:"partition_size_bytes,omitempty"` + // **Note:** This hint is currently ignored by PartitionQuery and + // PartitionRead requests. + // + // The desired maximum number of partitions to return. For example, this may + // be set to the number of workers available. The default for this option + // is currently 10,000. The maximum value is currently 200,000. This is only + // a hint. The actual number of partitions returned may be smaller or larger + // than this maximum count request. + MaxPartitions int64 `protobuf:"varint,2,opt,name=max_partitions,json=maxPartitions,proto3" json:"max_partitions,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *PartitionOptions) Reset() { *m = PartitionOptions{} } +func (m *PartitionOptions) String() string { return proto.CompactTextString(m) } +func (*PartitionOptions) ProtoMessage() {} +func (*PartitionOptions) Descriptor() ([]byte, []int) { + return fileDescriptor_spanner_9927dc9412546f3a, []int{9} +} +func (m *PartitionOptions) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_PartitionOptions.Unmarshal(m, b) +} +func (m *PartitionOptions) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_PartitionOptions.Marshal(b, m, deterministic) +} +func (dst *PartitionOptions) XXX_Merge(src proto.Message) { + xxx_messageInfo_PartitionOptions.Merge(dst, src) +} +func (m *PartitionOptions) XXX_Size() int { + return xxx_messageInfo_PartitionOptions.Size(m) +} +func (m *PartitionOptions) XXX_DiscardUnknown() { + xxx_messageInfo_PartitionOptions.DiscardUnknown(m) +} + +var xxx_messageInfo_PartitionOptions proto.InternalMessageInfo + +func (m *PartitionOptions) GetPartitionSizeBytes() int64 { + if m != nil { + return m.PartitionSizeBytes + } + return 0 +} + +func (m *PartitionOptions) GetMaxPartitions() int64 { + if m != nil { + return m.MaxPartitions + } + return 0 +} + +// The request for [PartitionQuery][google.spanner.v1.Spanner.PartitionQuery] +type PartitionQueryRequest struct { + // Required. The session used to create the partitions. + Session string `protobuf:"bytes,1,opt,name=session,proto3" json:"session,omitempty"` + // Read only snapshot transactions are supported, read/write and single use + // transactions are not. + Transaction *TransactionSelector `protobuf:"bytes,2,opt,name=transaction,proto3" json:"transaction,omitempty"` + // The query request to generate partitions for. The request will fail if + // the query is not root partitionable. The query plan of a root + // partitionable query has a single distributed union operator. A distributed + // union operator conceptually divides one or more tables into multiple + // splits, remotely evaluates a subquery independently on each split, and + // then unions all results. + // + // This must not contain DML commands, such as INSERT, UPDATE, or + // DELETE. Use + // [ExecuteStreamingSql][google.spanner.v1.Spanner.ExecuteStreamingSql] with a + // PartitionedDml transaction for large, partition-friendly DML operations. + Sql string `protobuf:"bytes,3,opt,name=sql,proto3" json:"sql,omitempty"` + // The SQL query string can contain parameter placeholders. A parameter + // placeholder consists of `'@'` followed by the parameter + // name. Parameter names consist of any combination of letters, + // numbers, and underscores. + // + // Parameters can appear anywhere that a literal value is expected. The same + // parameter name can be used more than once, for example: + // `"WHERE id > @msg_id AND id < @msg_id + 100"` + // + // It is an error to execute an SQL query with unbound parameters. + // + // Parameter values are specified using `params`, which is a JSON + // object whose keys are parameter names, and whose values are the + // corresponding parameter values. + Params *_struct.Struct `protobuf:"bytes,4,opt,name=params,proto3" json:"params,omitempty"` + // It is not always possible for Cloud Spanner to infer the right SQL type + // from a JSON value. For example, values of type `BYTES` and values + // of type `STRING` both appear in + // [params][google.spanner.v1.PartitionQueryRequest.params] as JSON strings. + // + // In these cases, `param_types` can be used to specify the exact + // SQL type for some or all of the SQL query parameters. See the + // definition of [Type][google.spanner.v1.Type] for more information + // about SQL types. + ParamTypes map[string]*Type `protobuf:"bytes,5,rep,name=param_types,json=paramTypes,proto3" json:"param_types,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` + // Additional options that affect how many partitions are created. + PartitionOptions *PartitionOptions `protobuf:"bytes,6,opt,name=partition_options,json=partitionOptions,proto3" json:"partition_options,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *PartitionQueryRequest) Reset() { *m = PartitionQueryRequest{} } +func (m *PartitionQueryRequest) String() string { return proto.CompactTextString(m) } +func (*PartitionQueryRequest) ProtoMessage() {} +func (*PartitionQueryRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_spanner_9927dc9412546f3a, []int{10} +} +func (m *PartitionQueryRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_PartitionQueryRequest.Unmarshal(m, b) +} +func (m *PartitionQueryRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_PartitionQueryRequest.Marshal(b, m, deterministic) +} +func (dst *PartitionQueryRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_PartitionQueryRequest.Merge(dst, src) +} +func (m *PartitionQueryRequest) XXX_Size() int { + return xxx_messageInfo_PartitionQueryRequest.Size(m) +} +func (m *PartitionQueryRequest) XXX_DiscardUnknown() { + xxx_messageInfo_PartitionQueryRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_PartitionQueryRequest proto.InternalMessageInfo + +func (m *PartitionQueryRequest) GetSession() string { + if m != nil { + return m.Session + } + return "" +} + +func (m *PartitionQueryRequest) GetTransaction() *TransactionSelector { + if m != nil { + return m.Transaction + } + return nil +} + +func (m *PartitionQueryRequest) GetSql() string { + if m != nil { + return m.Sql + } + return "" +} + +func (m *PartitionQueryRequest) GetParams() *_struct.Struct { + if m != nil { + return m.Params + } + return nil +} + +func (m *PartitionQueryRequest) GetParamTypes() map[string]*Type { + if m != nil { + return m.ParamTypes + } + return nil +} + +func (m *PartitionQueryRequest) GetPartitionOptions() *PartitionOptions { + if m != nil { + return m.PartitionOptions + } + return nil +} + +// The request for [PartitionRead][google.spanner.v1.Spanner.PartitionRead] +type PartitionReadRequest struct { + // Required. The session used to create the partitions. + Session string `protobuf:"bytes,1,opt,name=session,proto3" json:"session,omitempty"` + // Read only snapshot transactions are supported, read/write and single use + // transactions are not. + Transaction *TransactionSelector `protobuf:"bytes,2,opt,name=transaction,proto3" json:"transaction,omitempty"` + // Required. The name of the table in the database to be read. + Table string `protobuf:"bytes,3,opt,name=table,proto3" json:"table,omitempty"` + // If non-empty, the name of an index on + // [table][google.spanner.v1.PartitionReadRequest.table]. This index is used + // instead of the table primary key when interpreting + // [key_set][google.spanner.v1.PartitionReadRequest.key_set] and sorting + // result rows. See [key_set][google.spanner.v1.PartitionReadRequest.key_set] + // for further information. + Index string `protobuf:"bytes,4,opt,name=index,proto3" json:"index,omitempty"` + // The columns of [table][google.spanner.v1.PartitionReadRequest.table] to be + // returned for each row matching this request. + Columns []string `protobuf:"bytes,5,rep,name=columns,proto3" json:"columns,omitempty"` + // Required. `key_set` identifies the rows to be yielded. `key_set` names the + // primary keys of the rows in + // [table][google.spanner.v1.PartitionReadRequest.table] to be yielded, unless + // [index][google.spanner.v1.PartitionReadRequest.index] is present. If + // [index][google.spanner.v1.PartitionReadRequest.index] is present, then + // [key_set][google.spanner.v1.PartitionReadRequest.key_set] instead names + // index keys in [index][google.spanner.v1.PartitionReadRequest.index]. + // + // It is not an error for the `key_set` to name rows that do not + // exist in the database. Read yields nothing for nonexistent rows. + KeySet *KeySet `protobuf:"bytes,6,opt,name=key_set,json=keySet,proto3" json:"key_set,omitempty"` + // Additional options that affect how many partitions are created. + PartitionOptions *PartitionOptions `protobuf:"bytes,9,opt,name=partition_options,json=partitionOptions,proto3" json:"partition_options,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *PartitionReadRequest) Reset() { *m = PartitionReadRequest{} } +func (m *PartitionReadRequest) String() string { return proto.CompactTextString(m) } +func (*PartitionReadRequest) ProtoMessage() {} +func (*PartitionReadRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_spanner_9927dc9412546f3a, []int{11} +} +func (m *PartitionReadRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_PartitionReadRequest.Unmarshal(m, b) +} +func (m *PartitionReadRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_PartitionReadRequest.Marshal(b, m, deterministic) +} +func (dst *PartitionReadRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_PartitionReadRequest.Merge(dst, src) +} +func (m *PartitionReadRequest) XXX_Size() int { + return xxx_messageInfo_PartitionReadRequest.Size(m) +} +func (m *PartitionReadRequest) XXX_DiscardUnknown() { + xxx_messageInfo_PartitionReadRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_PartitionReadRequest proto.InternalMessageInfo + +func (m *PartitionReadRequest) GetSession() string { + if m != nil { + return m.Session + } + return "" +} + +func (m *PartitionReadRequest) GetTransaction() *TransactionSelector { + if m != nil { + return m.Transaction + } + return nil +} + +func (m *PartitionReadRequest) GetTable() string { + if m != nil { + return m.Table + } + return "" +} + +func (m *PartitionReadRequest) GetIndex() string { + if m != nil { + return m.Index + } + return "" +} + +func (m *PartitionReadRequest) GetColumns() []string { + if m != nil { + return m.Columns + } + return nil +} + +func (m *PartitionReadRequest) GetKeySet() *KeySet { + if m != nil { + return m.KeySet + } + return nil +} + +func (m *PartitionReadRequest) GetPartitionOptions() *PartitionOptions { + if m != nil { + return m.PartitionOptions + } + return nil +} + +// Information returned for each partition returned in a +// PartitionResponse. +type Partition struct { + // This token can be passed to Read, StreamingRead, ExecuteSql, or + // ExecuteStreamingSql requests to restrict the results to those identified by + // this partition token. + PartitionToken []byte `protobuf:"bytes,1,opt,name=partition_token,json=partitionToken,proto3" json:"partition_token,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Partition) Reset() { *m = Partition{} } +func (m *Partition) String() string { return proto.CompactTextString(m) } +func (*Partition) ProtoMessage() {} +func (*Partition) Descriptor() ([]byte, []int) { + return fileDescriptor_spanner_9927dc9412546f3a, []int{12} +} +func (m *Partition) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Partition.Unmarshal(m, b) +} +func (m *Partition) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Partition.Marshal(b, m, deterministic) +} +func (dst *Partition) XXX_Merge(src proto.Message) { + xxx_messageInfo_Partition.Merge(dst, src) +} +func (m *Partition) XXX_Size() int { + return xxx_messageInfo_Partition.Size(m) +} +func (m *Partition) XXX_DiscardUnknown() { + xxx_messageInfo_Partition.DiscardUnknown(m) +} + +var xxx_messageInfo_Partition proto.InternalMessageInfo + +func (m *Partition) GetPartitionToken() []byte { + if m != nil { + return m.PartitionToken + } + return nil +} + +// The response for [PartitionQuery][google.spanner.v1.Spanner.PartitionQuery] +// or [PartitionRead][google.spanner.v1.Spanner.PartitionRead] +type PartitionResponse struct { + // Partitions created by this request. + Partitions []*Partition `protobuf:"bytes,1,rep,name=partitions,proto3" json:"partitions,omitempty"` + // Transaction created by this request. + Transaction *Transaction `protobuf:"bytes,2,opt,name=transaction,proto3" json:"transaction,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *PartitionResponse) Reset() { *m = PartitionResponse{} } +func (m *PartitionResponse) String() string { return proto.CompactTextString(m) } +func (*PartitionResponse) ProtoMessage() {} +func (*PartitionResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_spanner_9927dc9412546f3a, []int{13} +} +func (m *PartitionResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_PartitionResponse.Unmarshal(m, b) +} +func (m *PartitionResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_PartitionResponse.Marshal(b, m, deterministic) +} +func (dst *PartitionResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_PartitionResponse.Merge(dst, src) +} +func (m *PartitionResponse) XXX_Size() int { + return xxx_messageInfo_PartitionResponse.Size(m) +} +func (m *PartitionResponse) XXX_DiscardUnknown() { + xxx_messageInfo_PartitionResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_PartitionResponse proto.InternalMessageInfo + +func (m *PartitionResponse) GetPartitions() []*Partition { + if m != nil { + return m.Partitions + } + return nil +} + +func (m *PartitionResponse) GetTransaction() *Transaction { + if m != nil { + return m.Transaction + } + return nil +} + +// The request for [Read][google.spanner.v1.Spanner.Read] and +// [StreamingRead][google.spanner.v1.Spanner.StreamingRead]. +type ReadRequest struct { + // Required. The session in which the read should be performed. + Session string `protobuf:"bytes,1,opt,name=session,proto3" json:"session,omitempty"` + // The transaction to use. If none is provided, the default is a + // temporary read-only transaction with strong concurrency. + Transaction *TransactionSelector `protobuf:"bytes,2,opt,name=transaction,proto3" json:"transaction,omitempty"` + // Required. The name of the table in the database to be read. + Table string `protobuf:"bytes,3,opt,name=table,proto3" json:"table,omitempty"` + // If non-empty, the name of an index on + // [table][google.spanner.v1.ReadRequest.table]. This index is used instead of + // the table primary key when interpreting + // [key_set][google.spanner.v1.ReadRequest.key_set] and sorting result rows. + // See [key_set][google.spanner.v1.ReadRequest.key_set] for further + // information. + Index string `protobuf:"bytes,4,opt,name=index,proto3" json:"index,omitempty"` + // The columns of [table][google.spanner.v1.ReadRequest.table] to be returned + // for each row matching this request. + Columns []string `protobuf:"bytes,5,rep,name=columns,proto3" json:"columns,omitempty"` + // Required. `key_set` identifies the rows to be yielded. `key_set` names the + // primary keys of the rows in [table][google.spanner.v1.ReadRequest.table] to + // be yielded, unless [index][google.spanner.v1.ReadRequest.index] is present. + // If [index][google.spanner.v1.ReadRequest.index] is present, then + // [key_set][google.spanner.v1.ReadRequest.key_set] instead names index keys + // in [index][google.spanner.v1.ReadRequest.index]. + // + // If the [partition_token][google.spanner.v1.ReadRequest.partition_token] + // field is empty, rows are yielded in table primary key order (if + // [index][google.spanner.v1.ReadRequest.index] is empty) or index key order + // (if [index][google.spanner.v1.ReadRequest.index] is non-empty). If the + // [partition_token][google.spanner.v1.ReadRequest.partition_token] field is + // not empty, rows will be yielded in an unspecified order. + // + // It is not an error for the `key_set` to name rows that do not + // exist in the database. Read yields nothing for nonexistent rows. + KeySet *KeySet `protobuf:"bytes,6,opt,name=key_set,json=keySet,proto3" json:"key_set,omitempty"` + // If greater than zero, only the first `limit` rows are yielded. If `limit` + // is zero, the default is no limit. A limit cannot be specified if + // `partition_token` is set. + Limit int64 `protobuf:"varint,8,opt,name=limit,proto3" json:"limit,omitempty"` + // If this request is resuming a previously interrupted read, + // `resume_token` should be copied from the last + // [PartialResultSet][google.spanner.v1.PartialResultSet] yielded before the + // interruption. Doing this enables the new read to resume where the last read + // left off. The rest of the request parameters must exactly match the request + // that yielded this token. + ResumeToken []byte `protobuf:"bytes,9,opt,name=resume_token,json=resumeToken,proto3" json:"resume_token,omitempty"` + // If present, results will be restricted to the specified partition + // previously created using PartitionRead(). There must be an exact + // match for the values of fields common to this message and the + // PartitionReadRequest message used to create this partition_token. + PartitionToken []byte `protobuf:"bytes,10,opt,name=partition_token,json=partitionToken,proto3" json:"partition_token,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ReadRequest) Reset() { *m = ReadRequest{} } +func (m *ReadRequest) String() string { return proto.CompactTextString(m) } +func (*ReadRequest) ProtoMessage() {} +func (*ReadRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_spanner_9927dc9412546f3a, []int{14} +} +func (m *ReadRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ReadRequest.Unmarshal(m, b) +} +func (m *ReadRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ReadRequest.Marshal(b, m, deterministic) +} +func (dst *ReadRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_ReadRequest.Merge(dst, src) +} +func (m *ReadRequest) XXX_Size() int { + return xxx_messageInfo_ReadRequest.Size(m) +} +func (m *ReadRequest) XXX_DiscardUnknown() { + xxx_messageInfo_ReadRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_ReadRequest proto.InternalMessageInfo + +func (m *ReadRequest) GetSession() string { + if m != nil { + return m.Session + } + return "" +} + +func (m *ReadRequest) GetTransaction() *TransactionSelector { + if m != nil { + return m.Transaction + } + return nil +} + +func (m *ReadRequest) GetTable() string { + if m != nil { + return m.Table + } + return "" +} + +func (m *ReadRequest) GetIndex() string { + if m != nil { + return m.Index + } + return "" +} + +func (m *ReadRequest) GetColumns() []string { + if m != nil { + return m.Columns + } + return nil +} + +func (m *ReadRequest) GetKeySet() *KeySet { + if m != nil { + return m.KeySet + } + return nil +} + +func (m *ReadRequest) GetLimit() int64 { + if m != nil { + return m.Limit + } + return 0 +} + +func (m *ReadRequest) GetResumeToken() []byte { + if m != nil { + return m.ResumeToken + } + return nil +} + +func (m *ReadRequest) GetPartitionToken() []byte { + if m != nil { + return m.PartitionToken + } + return nil +} + +// The request for +// [BeginTransaction][google.spanner.v1.Spanner.BeginTransaction]. +type BeginTransactionRequest struct { + // Required. The session in which the transaction runs. + Session string `protobuf:"bytes,1,opt,name=session,proto3" json:"session,omitempty"` + // Required. Options for the new transaction. + Options *TransactionOptions `protobuf:"bytes,2,opt,name=options,proto3" json:"options,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *BeginTransactionRequest) Reset() { *m = BeginTransactionRequest{} } +func (m *BeginTransactionRequest) String() string { return proto.CompactTextString(m) } +func (*BeginTransactionRequest) ProtoMessage() {} +func (*BeginTransactionRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_spanner_9927dc9412546f3a, []int{15} +} +func (m *BeginTransactionRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_BeginTransactionRequest.Unmarshal(m, b) +} +func (m *BeginTransactionRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_BeginTransactionRequest.Marshal(b, m, deterministic) +} +func (dst *BeginTransactionRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_BeginTransactionRequest.Merge(dst, src) +} +func (m *BeginTransactionRequest) XXX_Size() int { + return xxx_messageInfo_BeginTransactionRequest.Size(m) +} +func (m *BeginTransactionRequest) XXX_DiscardUnknown() { + xxx_messageInfo_BeginTransactionRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_BeginTransactionRequest proto.InternalMessageInfo + +func (m *BeginTransactionRequest) GetSession() string { + if m != nil { + return m.Session + } + return "" +} + +func (m *BeginTransactionRequest) GetOptions() *TransactionOptions { + if m != nil { + return m.Options + } + return nil +} + +// The request for [Commit][google.spanner.v1.Spanner.Commit]. +type CommitRequest struct { + // Required. The session in which the transaction to be committed is running. + Session string `protobuf:"bytes,1,opt,name=session,proto3" json:"session,omitempty"` + // Required. The transaction in which to commit. + // + // Types that are valid to be assigned to Transaction: + // *CommitRequest_TransactionId + // *CommitRequest_SingleUseTransaction + Transaction isCommitRequest_Transaction `protobuf_oneof:"transaction"` + // The mutations to be executed when this transaction commits. All + // mutations are applied atomically, in the order they appear in + // this list. + Mutations []*Mutation `protobuf:"bytes,4,rep,name=mutations,proto3" json:"mutations,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *CommitRequest) Reset() { *m = CommitRequest{} } +func (m *CommitRequest) String() string { return proto.CompactTextString(m) } +func (*CommitRequest) ProtoMessage() {} +func (*CommitRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_spanner_9927dc9412546f3a, []int{16} +} +func (m *CommitRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_CommitRequest.Unmarshal(m, b) +} +func (m *CommitRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_CommitRequest.Marshal(b, m, deterministic) +} +func (dst *CommitRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_CommitRequest.Merge(dst, src) +} +func (m *CommitRequest) XXX_Size() int { + return xxx_messageInfo_CommitRequest.Size(m) +} +func (m *CommitRequest) XXX_DiscardUnknown() { + xxx_messageInfo_CommitRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_CommitRequest proto.InternalMessageInfo + +func (m *CommitRequest) GetSession() string { + if m != nil { + return m.Session + } + return "" +} + +type isCommitRequest_Transaction interface { + isCommitRequest_Transaction() +} + +type CommitRequest_TransactionId struct { + TransactionId []byte `protobuf:"bytes,2,opt,name=transaction_id,json=transactionId,proto3,oneof"` +} + +type CommitRequest_SingleUseTransaction struct { + SingleUseTransaction *TransactionOptions `protobuf:"bytes,3,opt,name=single_use_transaction,json=singleUseTransaction,proto3,oneof"` +} + +func (*CommitRequest_TransactionId) isCommitRequest_Transaction() {} + +func (*CommitRequest_SingleUseTransaction) isCommitRequest_Transaction() {} + +func (m *CommitRequest) GetTransaction() isCommitRequest_Transaction { + if m != nil { + return m.Transaction + } + return nil +} + +func (m *CommitRequest) GetTransactionId() []byte { + if x, ok := m.GetTransaction().(*CommitRequest_TransactionId); ok { + return x.TransactionId + } + return nil +} + +func (m *CommitRequest) GetSingleUseTransaction() *TransactionOptions { + if x, ok := m.GetTransaction().(*CommitRequest_SingleUseTransaction); ok { + return x.SingleUseTransaction + } + return nil +} + +func (m *CommitRequest) GetMutations() []*Mutation { + if m != nil { + return m.Mutations + } + return nil +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*CommitRequest) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _CommitRequest_OneofMarshaler, _CommitRequest_OneofUnmarshaler, _CommitRequest_OneofSizer, []interface{}{ + (*CommitRequest_TransactionId)(nil), + (*CommitRequest_SingleUseTransaction)(nil), + } +} + +func _CommitRequest_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*CommitRequest) + // transaction + switch x := m.Transaction.(type) { + case *CommitRequest_TransactionId: + b.EncodeVarint(2<<3 | proto.WireBytes) + b.EncodeRawBytes(x.TransactionId) + case *CommitRequest_SingleUseTransaction: + b.EncodeVarint(3<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.SingleUseTransaction); err != nil { + return err + } + case nil: + default: + return fmt.Errorf("CommitRequest.Transaction has unexpected type %T", x) + } + return nil +} + +func _CommitRequest_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*CommitRequest) + switch tag { + case 2: // transaction.transaction_id + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeRawBytes(true) + m.Transaction = &CommitRequest_TransactionId{x} + return true, err + case 3: // transaction.single_use_transaction + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(TransactionOptions) + err := b.DecodeMessage(msg) + m.Transaction = &CommitRequest_SingleUseTransaction{msg} + return true, err + default: + return false, nil + } +} + +func _CommitRequest_OneofSizer(msg proto.Message) (n int) { + m := msg.(*CommitRequest) + // transaction + switch x := m.Transaction.(type) { + case *CommitRequest_TransactionId: + n += 1 // tag and wire + n += proto.SizeVarint(uint64(len(x.TransactionId))) + n += len(x.TransactionId) + case *CommitRequest_SingleUseTransaction: + s := proto.Size(x.SingleUseTransaction) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +// The response for [Commit][google.spanner.v1.Spanner.Commit]. +type CommitResponse struct { + // The Cloud Spanner timestamp at which the transaction committed. + CommitTimestamp *timestamp.Timestamp `protobuf:"bytes,1,opt,name=commit_timestamp,json=commitTimestamp,proto3" json:"commit_timestamp,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *CommitResponse) Reset() { *m = CommitResponse{} } +func (m *CommitResponse) String() string { return proto.CompactTextString(m) } +func (*CommitResponse) ProtoMessage() {} +func (*CommitResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_spanner_9927dc9412546f3a, []int{17} +} +func (m *CommitResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_CommitResponse.Unmarshal(m, b) +} +func (m *CommitResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_CommitResponse.Marshal(b, m, deterministic) +} +func (dst *CommitResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_CommitResponse.Merge(dst, src) +} +func (m *CommitResponse) XXX_Size() int { + return xxx_messageInfo_CommitResponse.Size(m) +} +func (m *CommitResponse) XXX_DiscardUnknown() { + xxx_messageInfo_CommitResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_CommitResponse proto.InternalMessageInfo + +func (m *CommitResponse) GetCommitTimestamp() *timestamp.Timestamp { + if m != nil { + return m.CommitTimestamp + } + return nil +} + +// The request for [Rollback][google.spanner.v1.Spanner.Rollback]. +type RollbackRequest struct { + // Required. The session in which the transaction to roll back is running. + Session string `protobuf:"bytes,1,opt,name=session,proto3" json:"session,omitempty"` + // Required. The transaction to roll back. + TransactionId []byte `protobuf:"bytes,2,opt,name=transaction_id,json=transactionId,proto3" json:"transaction_id,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *RollbackRequest) Reset() { *m = RollbackRequest{} } +func (m *RollbackRequest) String() string { return proto.CompactTextString(m) } +func (*RollbackRequest) ProtoMessage() {} +func (*RollbackRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_spanner_9927dc9412546f3a, []int{18} +} +func (m *RollbackRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_RollbackRequest.Unmarshal(m, b) +} +func (m *RollbackRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_RollbackRequest.Marshal(b, m, deterministic) +} +func (dst *RollbackRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_RollbackRequest.Merge(dst, src) +} +func (m *RollbackRequest) XXX_Size() int { + return xxx_messageInfo_RollbackRequest.Size(m) +} +func (m *RollbackRequest) XXX_DiscardUnknown() { + xxx_messageInfo_RollbackRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_RollbackRequest proto.InternalMessageInfo + +func (m *RollbackRequest) GetSession() string { + if m != nil { + return m.Session + } + return "" +} + +func (m *RollbackRequest) GetTransactionId() []byte { + if m != nil { + return m.TransactionId + } + return nil +} + +func init() { + proto.RegisterType((*CreateSessionRequest)(nil), "google.spanner.v1.CreateSessionRequest") + proto.RegisterType((*Session)(nil), "google.spanner.v1.Session") + proto.RegisterMapType((map[string]string)(nil), "google.spanner.v1.Session.LabelsEntry") + proto.RegisterType((*GetSessionRequest)(nil), "google.spanner.v1.GetSessionRequest") + proto.RegisterType((*ListSessionsRequest)(nil), "google.spanner.v1.ListSessionsRequest") + proto.RegisterType((*ListSessionsResponse)(nil), "google.spanner.v1.ListSessionsResponse") + proto.RegisterType((*DeleteSessionRequest)(nil), "google.spanner.v1.DeleteSessionRequest") + proto.RegisterType((*ExecuteSqlRequest)(nil), "google.spanner.v1.ExecuteSqlRequest") + proto.RegisterMapType((map[string]*Type)(nil), "google.spanner.v1.ExecuteSqlRequest.ParamTypesEntry") + proto.RegisterType((*ExecuteBatchDmlRequest)(nil), "google.spanner.v1.ExecuteBatchDmlRequest") + proto.RegisterType((*ExecuteBatchDmlRequest_Statement)(nil), "google.spanner.v1.ExecuteBatchDmlRequest.Statement") + proto.RegisterMapType((map[string]*Type)(nil), "google.spanner.v1.ExecuteBatchDmlRequest.Statement.ParamTypesEntry") + proto.RegisterType((*ExecuteBatchDmlResponse)(nil), "google.spanner.v1.ExecuteBatchDmlResponse") + proto.RegisterType((*PartitionOptions)(nil), "google.spanner.v1.PartitionOptions") + proto.RegisterType((*PartitionQueryRequest)(nil), "google.spanner.v1.PartitionQueryRequest") + proto.RegisterMapType((map[string]*Type)(nil), "google.spanner.v1.PartitionQueryRequest.ParamTypesEntry") + proto.RegisterType((*PartitionReadRequest)(nil), "google.spanner.v1.PartitionReadRequest") + proto.RegisterType((*Partition)(nil), "google.spanner.v1.Partition") + proto.RegisterType((*PartitionResponse)(nil), "google.spanner.v1.PartitionResponse") + proto.RegisterType((*ReadRequest)(nil), "google.spanner.v1.ReadRequest") + proto.RegisterType((*BeginTransactionRequest)(nil), "google.spanner.v1.BeginTransactionRequest") + proto.RegisterType((*CommitRequest)(nil), "google.spanner.v1.CommitRequest") + proto.RegisterType((*CommitResponse)(nil), "google.spanner.v1.CommitResponse") + proto.RegisterType((*RollbackRequest)(nil), "google.spanner.v1.RollbackRequest") + proto.RegisterEnum("google.spanner.v1.ExecuteSqlRequest_QueryMode", ExecuteSqlRequest_QueryMode_name, ExecuteSqlRequest_QueryMode_value) +} + +// Reference imports to suppress errors if they are not otherwise used. +var _ context.Context +var _ grpc.ClientConn + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the grpc package it is being compiled against. +const _ = grpc.SupportPackageIsVersion4 + +// SpannerClient is the client API for Spanner service. +// +// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://godoc.org/google.golang.org/grpc#ClientConn.NewStream. +type SpannerClient interface { + // Creates a new session. A session can be used to perform + // transactions that read and/or modify data in a Cloud Spanner database. + // Sessions are meant to be reused for many consecutive + // transactions. + // + // Sessions can only execute one transaction at a time. To execute + // multiple concurrent read-write/write-only transactions, create + // multiple sessions. Note that standalone reads and queries use a + // transaction internally, and count toward the one transaction + // limit. + // + // Cloud Spanner limits the number of sessions that can exist at any given + // time; thus, it is a good idea to delete idle and/or unneeded sessions. + // Aside from explicit deletes, Cloud Spanner can delete sessions for which no + // operations are sent for more than an hour. If a session is deleted, + // requests to it return `NOT_FOUND`. + // + // Idle sessions can be kept alive by sending a trivial SQL query + // periodically, e.g., `"SELECT 1"`. + CreateSession(ctx context.Context, in *CreateSessionRequest, opts ...grpc.CallOption) (*Session, error) + // Gets a session. Returns `NOT_FOUND` if the session does not exist. + // This is mainly useful for determining whether a session is still + // alive. + GetSession(ctx context.Context, in *GetSessionRequest, opts ...grpc.CallOption) (*Session, error) + // Lists all sessions in a given database. + ListSessions(ctx context.Context, in *ListSessionsRequest, opts ...grpc.CallOption) (*ListSessionsResponse, error) + // Ends a session, releasing server resources associated with it. This will + // asynchronously trigger cancellation of any operations that are running with + // this session. + DeleteSession(ctx context.Context, in *DeleteSessionRequest, opts ...grpc.CallOption) (*empty.Empty, error) + // Executes an SQL statement, returning all results in a single reply. This + // method cannot be used to return a result set larger than 10 MiB; + // if the query yields more data than that, the query fails with + // a `FAILED_PRECONDITION` error. + // + // Operations inside read-write transactions might return `ABORTED`. If + // this occurs, the application should restart the transaction from + // the beginning. See [Transaction][google.spanner.v1.Transaction] for more + // details. + // + // Larger result sets can be fetched in streaming fashion by calling + // [ExecuteStreamingSql][google.spanner.v1.Spanner.ExecuteStreamingSql] + // instead. + ExecuteSql(ctx context.Context, in *ExecuteSqlRequest, opts ...grpc.CallOption) (*ResultSet, error) + // Like [ExecuteSql][google.spanner.v1.Spanner.ExecuteSql], except returns the + // result set as a stream. Unlike + // [ExecuteSql][google.spanner.v1.Spanner.ExecuteSql], there is no limit on + // the size of the returned result set. However, no individual row in the + // result set can exceed 100 MiB, and no column value can exceed 10 MiB. + ExecuteStreamingSql(ctx context.Context, in *ExecuteSqlRequest, opts ...grpc.CallOption) (Spanner_ExecuteStreamingSqlClient, error) + // Executes a batch of SQL DML statements. This method allows many statements + // to be run with lower latency than submitting them sequentially with + // [ExecuteSql][google.spanner.v1.Spanner.ExecuteSql]. + // + // Statements are executed in order, sequentially. + // [ExecuteBatchDmlResponse][Spanner.ExecuteBatchDmlResponse] will contain a + // [ResultSet][google.spanner.v1.ResultSet] for each DML statement that has successfully executed. If a + // statement fails, its error status will be returned as part of the + // [ExecuteBatchDmlResponse][Spanner.ExecuteBatchDmlResponse]. Execution will + // stop at the first failed statement; the remaining statements will not run. + // + // ExecuteBatchDml is expected to return an OK status with a response even if + // there was an error while processing one of the DML statements. Clients must + // inspect response.status to determine if there were any errors while + // processing the request. + // + // See more details in + // [ExecuteBatchDmlRequest][Spanner.ExecuteBatchDmlRequest] and + // [ExecuteBatchDmlResponse][Spanner.ExecuteBatchDmlResponse]. + ExecuteBatchDml(ctx context.Context, in *ExecuteBatchDmlRequest, opts ...grpc.CallOption) (*ExecuteBatchDmlResponse, error) + // Reads rows from the database using key lookups and scans, as a + // simple key/value style alternative to + // [ExecuteSql][google.spanner.v1.Spanner.ExecuteSql]. This method cannot be + // used to return a result set larger than 10 MiB; if the read matches more + // data than that, the read fails with a `FAILED_PRECONDITION` + // error. + // + // Reads inside read-write transactions might return `ABORTED`. If + // this occurs, the application should restart the transaction from + // the beginning. See [Transaction][google.spanner.v1.Transaction] for more + // details. + // + // Larger result sets can be yielded in streaming fashion by calling + // [StreamingRead][google.spanner.v1.Spanner.StreamingRead] instead. + Read(ctx context.Context, in *ReadRequest, opts ...grpc.CallOption) (*ResultSet, error) + // Like [Read][google.spanner.v1.Spanner.Read], except returns the result set + // as a stream. Unlike [Read][google.spanner.v1.Spanner.Read], there is no + // limit on the size of the returned result set. However, no individual row in + // the result set can exceed 100 MiB, and no column value can exceed + // 10 MiB. + StreamingRead(ctx context.Context, in *ReadRequest, opts ...grpc.CallOption) (Spanner_StreamingReadClient, error) + // Begins a new transaction. This step can often be skipped: + // [Read][google.spanner.v1.Spanner.Read], + // [ExecuteSql][google.spanner.v1.Spanner.ExecuteSql] and + // [Commit][google.spanner.v1.Spanner.Commit] can begin a new transaction as a + // side-effect. + BeginTransaction(ctx context.Context, in *BeginTransactionRequest, opts ...grpc.CallOption) (*Transaction, error) + // Commits a transaction. The request includes the mutations to be + // applied to rows in the database. + // + // `Commit` might return an `ABORTED` error. This can occur at any time; + // commonly, the cause is conflicts with concurrent + // transactions. However, it can also happen for a variety of other + // reasons. If `Commit` returns `ABORTED`, the caller should re-attempt + // the transaction from the beginning, re-using the same session. + Commit(ctx context.Context, in *CommitRequest, opts ...grpc.CallOption) (*CommitResponse, error) + // Rolls back a transaction, releasing any locks it holds. It is a good + // idea to call this for any transaction that includes one or more + // [Read][google.spanner.v1.Spanner.Read] or + // [ExecuteSql][google.spanner.v1.Spanner.ExecuteSql] requests and ultimately + // decides not to commit. + // + // `Rollback` returns `OK` if it successfully aborts the transaction, the + // transaction was already aborted, or the transaction is not + // found. `Rollback` never returns `ABORTED`. + Rollback(ctx context.Context, in *RollbackRequest, opts ...grpc.CallOption) (*empty.Empty, error) + // Creates a set of partition tokens that can be used to execute a query + // operation in parallel. Each of the returned partition tokens can be used + // by [ExecuteStreamingSql][google.spanner.v1.Spanner.ExecuteStreamingSql] to + // specify a subset of the query result to read. The same session and + // read-only transaction must be used by the PartitionQueryRequest used to + // create the partition tokens and the ExecuteSqlRequests that use the + // partition tokens. + // + // Partition tokens become invalid when the session used to create them + // is deleted, is idle for too long, begins a new transaction, or becomes too + // old. When any of these happen, it is not possible to resume the query, and + // the whole operation must be restarted from the beginning. + PartitionQuery(ctx context.Context, in *PartitionQueryRequest, opts ...grpc.CallOption) (*PartitionResponse, error) + // Creates a set of partition tokens that can be used to execute a read + // operation in parallel. Each of the returned partition tokens can be used + // by [StreamingRead][google.spanner.v1.Spanner.StreamingRead] to specify a + // subset of the read result to read. The same session and read-only + // transaction must be used by the PartitionReadRequest used to create the + // partition tokens and the ReadRequests that use the partition tokens. There + // are no ordering guarantees on rows returned among the returned partition + // tokens, or even within each individual StreamingRead call issued with a + // partition_token. + // + // Partition tokens become invalid when the session used to create them + // is deleted, is idle for too long, begins a new transaction, or becomes too + // old. When any of these happen, it is not possible to resume the read, and + // the whole operation must be restarted from the beginning. + PartitionRead(ctx context.Context, in *PartitionReadRequest, opts ...grpc.CallOption) (*PartitionResponse, error) +} + +type spannerClient struct { + cc *grpc.ClientConn +} + +func NewSpannerClient(cc *grpc.ClientConn) SpannerClient { + return &spannerClient{cc} +} + +func (c *spannerClient) CreateSession(ctx context.Context, in *CreateSessionRequest, opts ...grpc.CallOption) (*Session, error) { + out := new(Session) + err := c.cc.Invoke(ctx, "/google.spanner.v1.Spanner/CreateSession", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *spannerClient) GetSession(ctx context.Context, in *GetSessionRequest, opts ...grpc.CallOption) (*Session, error) { + out := new(Session) + err := c.cc.Invoke(ctx, "/google.spanner.v1.Spanner/GetSession", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *spannerClient) ListSessions(ctx context.Context, in *ListSessionsRequest, opts ...grpc.CallOption) (*ListSessionsResponse, error) { + out := new(ListSessionsResponse) + err := c.cc.Invoke(ctx, "/google.spanner.v1.Spanner/ListSessions", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *spannerClient) DeleteSession(ctx context.Context, in *DeleteSessionRequest, opts ...grpc.CallOption) (*empty.Empty, error) { + out := new(empty.Empty) + err := c.cc.Invoke(ctx, "/google.spanner.v1.Spanner/DeleteSession", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *spannerClient) ExecuteSql(ctx context.Context, in *ExecuteSqlRequest, opts ...grpc.CallOption) (*ResultSet, error) { + out := new(ResultSet) + err := c.cc.Invoke(ctx, "/google.spanner.v1.Spanner/ExecuteSql", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *spannerClient) ExecuteStreamingSql(ctx context.Context, in *ExecuteSqlRequest, opts ...grpc.CallOption) (Spanner_ExecuteStreamingSqlClient, error) { + stream, err := c.cc.NewStream(ctx, &_Spanner_serviceDesc.Streams[0], "/google.spanner.v1.Spanner/ExecuteStreamingSql", opts...) + if err != nil { + return nil, err + } + x := &spannerExecuteStreamingSqlClient{stream} + if err := x.ClientStream.SendMsg(in); err != nil { + return nil, err + } + if err := x.ClientStream.CloseSend(); err != nil { + return nil, err + } + return x, nil +} + +type Spanner_ExecuteStreamingSqlClient interface { + Recv() (*PartialResultSet, error) + grpc.ClientStream +} + +type spannerExecuteStreamingSqlClient struct { + grpc.ClientStream +} + +func (x *spannerExecuteStreamingSqlClient) Recv() (*PartialResultSet, error) { + m := new(PartialResultSet) + if err := x.ClientStream.RecvMsg(m); err != nil { + return nil, err + } + return m, nil +} + +func (c *spannerClient) ExecuteBatchDml(ctx context.Context, in *ExecuteBatchDmlRequest, opts ...grpc.CallOption) (*ExecuteBatchDmlResponse, error) { + out := new(ExecuteBatchDmlResponse) + err := c.cc.Invoke(ctx, "/google.spanner.v1.Spanner/ExecuteBatchDml", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *spannerClient) Read(ctx context.Context, in *ReadRequest, opts ...grpc.CallOption) (*ResultSet, error) { + out := new(ResultSet) + err := c.cc.Invoke(ctx, "/google.spanner.v1.Spanner/Read", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *spannerClient) StreamingRead(ctx context.Context, in *ReadRequest, opts ...grpc.CallOption) (Spanner_StreamingReadClient, error) { + stream, err := c.cc.NewStream(ctx, &_Spanner_serviceDesc.Streams[1], "/google.spanner.v1.Spanner/StreamingRead", opts...) + if err != nil { + return nil, err + } + x := &spannerStreamingReadClient{stream} + if err := x.ClientStream.SendMsg(in); err != nil { + return nil, err + } + if err := x.ClientStream.CloseSend(); err != nil { + return nil, err + } + return x, nil +} + +type Spanner_StreamingReadClient interface { + Recv() (*PartialResultSet, error) + grpc.ClientStream +} + +type spannerStreamingReadClient struct { + grpc.ClientStream +} + +func (x *spannerStreamingReadClient) Recv() (*PartialResultSet, error) { + m := new(PartialResultSet) + if err := x.ClientStream.RecvMsg(m); err != nil { + return nil, err + } + return m, nil +} + +func (c *spannerClient) BeginTransaction(ctx context.Context, in *BeginTransactionRequest, opts ...grpc.CallOption) (*Transaction, error) { + out := new(Transaction) + err := c.cc.Invoke(ctx, "/google.spanner.v1.Spanner/BeginTransaction", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *spannerClient) Commit(ctx context.Context, in *CommitRequest, opts ...grpc.CallOption) (*CommitResponse, error) { + out := new(CommitResponse) + err := c.cc.Invoke(ctx, "/google.spanner.v1.Spanner/Commit", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *spannerClient) Rollback(ctx context.Context, in *RollbackRequest, opts ...grpc.CallOption) (*empty.Empty, error) { + out := new(empty.Empty) + err := c.cc.Invoke(ctx, "/google.spanner.v1.Spanner/Rollback", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *spannerClient) PartitionQuery(ctx context.Context, in *PartitionQueryRequest, opts ...grpc.CallOption) (*PartitionResponse, error) { + out := new(PartitionResponse) + err := c.cc.Invoke(ctx, "/google.spanner.v1.Spanner/PartitionQuery", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *spannerClient) PartitionRead(ctx context.Context, in *PartitionReadRequest, opts ...grpc.CallOption) (*PartitionResponse, error) { + out := new(PartitionResponse) + err := c.cc.Invoke(ctx, "/google.spanner.v1.Spanner/PartitionRead", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +// SpannerServer is the server API for Spanner service. +type SpannerServer interface { + // Creates a new session. A session can be used to perform + // transactions that read and/or modify data in a Cloud Spanner database. + // Sessions are meant to be reused for many consecutive + // transactions. + // + // Sessions can only execute one transaction at a time. To execute + // multiple concurrent read-write/write-only transactions, create + // multiple sessions. Note that standalone reads and queries use a + // transaction internally, and count toward the one transaction + // limit. + // + // Cloud Spanner limits the number of sessions that can exist at any given + // time; thus, it is a good idea to delete idle and/or unneeded sessions. + // Aside from explicit deletes, Cloud Spanner can delete sessions for which no + // operations are sent for more than an hour. If a session is deleted, + // requests to it return `NOT_FOUND`. + // + // Idle sessions can be kept alive by sending a trivial SQL query + // periodically, e.g., `"SELECT 1"`. + CreateSession(context.Context, *CreateSessionRequest) (*Session, error) + // Gets a session. Returns `NOT_FOUND` if the session does not exist. + // This is mainly useful for determining whether a session is still + // alive. + GetSession(context.Context, *GetSessionRequest) (*Session, error) + // Lists all sessions in a given database. + ListSessions(context.Context, *ListSessionsRequest) (*ListSessionsResponse, error) + // Ends a session, releasing server resources associated with it. This will + // asynchronously trigger cancellation of any operations that are running with + // this session. + DeleteSession(context.Context, *DeleteSessionRequest) (*empty.Empty, error) + // Executes an SQL statement, returning all results in a single reply. This + // method cannot be used to return a result set larger than 10 MiB; + // if the query yields more data than that, the query fails with + // a `FAILED_PRECONDITION` error. + // + // Operations inside read-write transactions might return `ABORTED`. If + // this occurs, the application should restart the transaction from + // the beginning. See [Transaction][google.spanner.v1.Transaction] for more + // details. + // + // Larger result sets can be fetched in streaming fashion by calling + // [ExecuteStreamingSql][google.spanner.v1.Spanner.ExecuteStreamingSql] + // instead. + ExecuteSql(context.Context, *ExecuteSqlRequest) (*ResultSet, error) + // Like [ExecuteSql][google.spanner.v1.Spanner.ExecuteSql], except returns the + // result set as a stream. Unlike + // [ExecuteSql][google.spanner.v1.Spanner.ExecuteSql], there is no limit on + // the size of the returned result set. However, no individual row in the + // result set can exceed 100 MiB, and no column value can exceed 10 MiB. + ExecuteStreamingSql(*ExecuteSqlRequest, Spanner_ExecuteStreamingSqlServer) error + // Executes a batch of SQL DML statements. This method allows many statements + // to be run with lower latency than submitting them sequentially with + // [ExecuteSql][google.spanner.v1.Spanner.ExecuteSql]. + // + // Statements are executed in order, sequentially. + // [ExecuteBatchDmlResponse][Spanner.ExecuteBatchDmlResponse] will contain a + // [ResultSet][google.spanner.v1.ResultSet] for each DML statement that has successfully executed. If a + // statement fails, its error status will be returned as part of the + // [ExecuteBatchDmlResponse][Spanner.ExecuteBatchDmlResponse]. Execution will + // stop at the first failed statement; the remaining statements will not run. + // + // ExecuteBatchDml is expected to return an OK status with a response even if + // there was an error while processing one of the DML statements. Clients must + // inspect response.status to determine if there were any errors while + // processing the request. + // + // See more details in + // [ExecuteBatchDmlRequest][Spanner.ExecuteBatchDmlRequest] and + // [ExecuteBatchDmlResponse][Spanner.ExecuteBatchDmlResponse]. + ExecuteBatchDml(context.Context, *ExecuteBatchDmlRequest) (*ExecuteBatchDmlResponse, error) + // Reads rows from the database using key lookups and scans, as a + // simple key/value style alternative to + // [ExecuteSql][google.spanner.v1.Spanner.ExecuteSql]. This method cannot be + // used to return a result set larger than 10 MiB; if the read matches more + // data than that, the read fails with a `FAILED_PRECONDITION` + // error. + // + // Reads inside read-write transactions might return `ABORTED`. If + // this occurs, the application should restart the transaction from + // the beginning. See [Transaction][google.spanner.v1.Transaction] for more + // details. + // + // Larger result sets can be yielded in streaming fashion by calling + // [StreamingRead][google.spanner.v1.Spanner.StreamingRead] instead. + Read(context.Context, *ReadRequest) (*ResultSet, error) + // Like [Read][google.spanner.v1.Spanner.Read], except returns the result set + // as a stream. Unlike [Read][google.spanner.v1.Spanner.Read], there is no + // limit on the size of the returned result set. However, no individual row in + // the result set can exceed 100 MiB, and no column value can exceed + // 10 MiB. + StreamingRead(*ReadRequest, Spanner_StreamingReadServer) error + // Begins a new transaction. This step can often be skipped: + // [Read][google.spanner.v1.Spanner.Read], + // [ExecuteSql][google.spanner.v1.Spanner.ExecuteSql] and + // [Commit][google.spanner.v1.Spanner.Commit] can begin a new transaction as a + // side-effect. + BeginTransaction(context.Context, *BeginTransactionRequest) (*Transaction, error) + // Commits a transaction. The request includes the mutations to be + // applied to rows in the database. + // + // `Commit` might return an `ABORTED` error. This can occur at any time; + // commonly, the cause is conflicts with concurrent + // transactions. However, it can also happen for a variety of other + // reasons. If `Commit` returns `ABORTED`, the caller should re-attempt + // the transaction from the beginning, re-using the same session. + Commit(context.Context, *CommitRequest) (*CommitResponse, error) + // Rolls back a transaction, releasing any locks it holds. It is a good + // idea to call this for any transaction that includes one or more + // [Read][google.spanner.v1.Spanner.Read] or + // [ExecuteSql][google.spanner.v1.Spanner.ExecuteSql] requests and ultimately + // decides not to commit. + // + // `Rollback` returns `OK` if it successfully aborts the transaction, the + // transaction was already aborted, or the transaction is not + // found. `Rollback` never returns `ABORTED`. + Rollback(context.Context, *RollbackRequest) (*empty.Empty, error) + // Creates a set of partition tokens that can be used to execute a query + // operation in parallel. Each of the returned partition tokens can be used + // by [ExecuteStreamingSql][google.spanner.v1.Spanner.ExecuteStreamingSql] to + // specify a subset of the query result to read. The same session and + // read-only transaction must be used by the PartitionQueryRequest used to + // create the partition tokens and the ExecuteSqlRequests that use the + // partition tokens. + // + // Partition tokens become invalid when the session used to create them + // is deleted, is idle for too long, begins a new transaction, or becomes too + // old. When any of these happen, it is not possible to resume the query, and + // the whole operation must be restarted from the beginning. + PartitionQuery(context.Context, *PartitionQueryRequest) (*PartitionResponse, error) + // Creates a set of partition tokens that can be used to execute a read + // operation in parallel. Each of the returned partition tokens can be used + // by [StreamingRead][google.spanner.v1.Spanner.StreamingRead] to specify a + // subset of the read result to read. The same session and read-only + // transaction must be used by the PartitionReadRequest used to create the + // partition tokens and the ReadRequests that use the partition tokens. There + // are no ordering guarantees on rows returned among the returned partition + // tokens, or even within each individual StreamingRead call issued with a + // partition_token. + // + // Partition tokens become invalid when the session used to create them + // is deleted, is idle for too long, begins a new transaction, or becomes too + // old. When any of these happen, it is not possible to resume the read, and + // the whole operation must be restarted from the beginning. + PartitionRead(context.Context, *PartitionReadRequest) (*PartitionResponse, error) +} + +func RegisterSpannerServer(s *grpc.Server, srv SpannerServer) { + s.RegisterService(&_Spanner_serviceDesc, srv) +} + +func _Spanner_CreateSession_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(CreateSessionRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(SpannerServer).CreateSession(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.spanner.v1.Spanner/CreateSession", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(SpannerServer).CreateSession(ctx, req.(*CreateSessionRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _Spanner_GetSession_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(GetSessionRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(SpannerServer).GetSession(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.spanner.v1.Spanner/GetSession", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(SpannerServer).GetSession(ctx, req.(*GetSessionRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _Spanner_ListSessions_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(ListSessionsRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(SpannerServer).ListSessions(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.spanner.v1.Spanner/ListSessions", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(SpannerServer).ListSessions(ctx, req.(*ListSessionsRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _Spanner_DeleteSession_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(DeleteSessionRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(SpannerServer).DeleteSession(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.spanner.v1.Spanner/DeleteSession", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(SpannerServer).DeleteSession(ctx, req.(*DeleteSessionRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _Spanner_ExecuteSql_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(ExecuteSqlRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(SpannerServer).ExecuteSql(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.spanner.v1.Spanner/ExecuteSql", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(SpannerServer).ExecuteSql(ctx, req.(*ExecuteSqlRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _Spanner_ExecuteStreamingSql_Handler(srv interface{}, stream grpc.ServerStream) error { + m := new(ExecuteSqlRequest) + if err := stream.RecvMsg(m); err != nil { + return err + } + return srv.(SpannerServer).ExecuteStreamingSql(m, &spannerExecuteStreamingSqlServer{stream}) +} + +type Spanner_ExecuteStreamingSqlServer interface { + Send(*PartialResultSet) error + grpc.ServerStream +} + +type spannerExecuteStreamingSqlServer struct { + grpc.ServerStream +} + +func (x *spannerExecuteStreamingSqlServer) Send(m *PartialResultSet) error { + return x.ServerStream.SendMsg(m) +} + +func _Spanner_ExecuteBatchDml_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(ExecuteBatchDmlRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(SpannerServer).ExecuteBatchDml(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.spanner.v1.Spanner/ExecuteBatchDml", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(SpannerServer).ExecuteBatchDml(ctx, req.(*ExecuteBatchDmlRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _Spanner_Read_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(ReadRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(SpannerServer).Read(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.spanner.v1.Spanner/Read", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(SpannerServer).Read(ctx, req.(*ReadRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _Spanner_StreamingRead_Handler(srv interface{}, stream grpc.ServerStream) error { + m := new(ReadRequest) + if err := stream.RecvMsg(m); err != nil { + return err + } + return srv.(SpannerServer).StreamingRead(m, &spannerStreamingReadServer{stream}) +} + +type Spanner_StreamingReadServer interface { + Send(*PartialResultSet) error + grpc.ServerStream +} + +type spannerStreamingReadServer struct { + grpc.ServerStream +} + +func (x *spannerStreamingReadServer) Send(m *PartialResultSet) error { + return x.ServerStream.SendMsg(m) +} + +func _Spanner_BeginTransaction_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(BeginTransactionRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(SpannerServer).BeginTransaction(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.spanner.v1.Spanner/BeginTransaction", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(SpannerServer).BeginTransaction(ctx, req.(*BeginTransactionRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _Spanner_Commit_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(CommitRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(SpannerServer).Commit(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.spanner.v1.Spanner/Commit", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(SpannerServer).Commit(ctx, req.(*CommitRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _Spanner_Rollback_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(RollbackRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(SpannerServer).Rollback(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.spanner.v1.Spanner/Rollback", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(SpannerServer).Rollback(ctx, req.(*RollbackRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _Spanner_PartitionQuery_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(PartitionQueryRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(SpannerServer).PartitionQuery(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.spanner.v1.Spanner/PartitionQuery", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(SpannerServer).PartitionQuery(ctx, req.(*PartitionQueryRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _Spanner_PartitionRead_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(PartitionReadRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(SpannerServer).PartitionRead(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.spanner.v1.Spanner/PartitionRead", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(SpannerServer).PartitionRead(ctx, req.(*PartitionReadRequest)) + } + return interceptor(ctx, in, info, handler) +} + +var _Spanner_serviceDesc = grpc.ServiceDesc{ + ServiceName: "google.spanner.v1.Spanner", + HandlerType: (*SpannerServer)(nil), + Methods: []grpc.MethodDesc{ + { + MethodName: "CreateSession", + Handler: _Spanner_CreateSession_Handler, + }, + { + MethodName: "GetSession", + Handler: _Spanner_GetSession_Handler, + }, + { + MethodName: "ListSessions", + Handler: _Spanner_ListSessions_Handler, + }, + { + MethodName: "DeleteSession", + Handler: _Spanner_DeleteSession_Handler, + }, + { + MethodName: "ExecuteSql", + Handler: _Spanner_ExecuteSql_Handler, + }, + { + MethodName: "ExecuteBatchDml", + Handler: _Spanner_ExecuteBatchDml_Handler, + }, + { + MethodName: "Read", + Handler: _Spanner_Read_Handler, + }, + { + MethodName: "BeginTransaction", + Handler: _Spanner_BeginTransaction_Handler, + }, + { + MethodName: "Commit", + Handler: _Spanner_Commit_Handler, + }, + { + MethodName: "Rollback", + Handler: _Spanner_Rollback_Handler, + }, + { + MethodName: "PartitionQuery", + Handler: _Spanner_PartitionQuery_Handler, + }, + { + MethodName: "PartitionRead", + Handler: _Spanner_PartitionRead_Handler, + }, + }, + Streams: []grpc.StreamDesc{ + { + StreamName: "ExecuteStreamingSql", + Handler: _Spanner_ExecuteStreamingSql_Handler, + ServerStreams: true, + }, + { + StreamName: "StreamingRead", + Handler: _Spanner_StreamingRead_Handler, + ServerStreams: true, + }, + }, + Metadata: "google/spanner/v1/spanner.proto", +} + +func init() { + proto.RegisterFile("google/spanner/v1/spanner.proto", fileDescriptor_spanner_9927dc9412546f3a) +} + +var fileDescriptor_spanner_9927dc9412546f3a = []byte{ + // 1832 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xe4, 0x59, 0xcd, 0x6f, 0x1b, 0xc7, + 0x15, 0xf7, 0x92, 0x12, 0x25, 0x3e, 0xea, 0x73, 0xc2, 0x48, 0x0c, 0xed, 0x26, 0xca, 0x26, 0x8e, + 0x54, 0x02, 0x25, 0x63, 0xc5, 0x28, 0x1c, 0x25, 0x69, 0x1c, 0xd9, 0x8a, 0xed, 0x5a, 0xb2, 0xe8, + 0xa5, 0xec, 0xa0, 0x81, 0x0b, 0x62, 0x44, 0x4e, 0x98, 0xad, 0xf6, 0x4b, 0x3b, 0x43, 0x43, 0x4c, + 0x91, 0x4b, 0xd1, 0xde, 0x7a, 0x68, 0x1b, 0x14, 0x3d, 0xb4, 0xb7, 0xde, 0x8a, 0x1c, 0x0b, 0xe4, + 0x56, 0x14, 0x28, 0x90, 0x43, 0x80, 0x9e, 0xfa, 0x2f, 0xf4, 0x6f, 0xe8, 0xa5, 0x97, 0x62, 0xbe, + 0x96, 0x4b, 0x72, 0x44, 0x31, 0xa5, 0x13, 0xa0, 0xe8, 0x89, 0x3b, 0xf3, 0xde, 0xbc, 0xf9, 0xcd, + 0xfb, 0xfd, 0xe6, 0xe3, 0x81, 0xf0, 0x52, 0x27, 0x0c, 0x3b, 0x1e, 0xa9, 0xd1, 0x08, 0x07, 0x01, + 0x89, 0x6b, 0x4f, 0xaf, 0xe9, 0xcf, 0x6a, 0x14, 0x87, 0x2c, 0x44, 0xab, 0xd2, 0xa1, 0xaa, 0x7b, + 0x9f, 0x5e, 0x2b, 0x5f, 0x51, 0x63, 0x70, 0xe4, 0xd6, 0x70, 0x10, 0x84, 0x0c, 0x33, 0x37, 0x0c, + 0xa8, 0x1c, 0x50, 0xbe, 0xac, 0xac, 0xa2, 0x75, 0xdc, 0xfd, 0xa8, 0x46, 0xfc, 0x88, 0xf5, 0x94, + 0xf1, 0xca, 0xb0, 0x91, 0xb2, 0xb8, 0xdb, 0x62, 0xca, 0xfa, 0xd2, 0xb0, 0x95, 0xb9, 0x3e, 0xa1, + 0x0c, 0xfb, 0x91, 0x72, 0x58, 0x57, 0x0e, 0x71, 0xd4, 0xaa, 0x51, 0x86, 0x59, 0x97, 0x0e, 0xc5, + 0x4d, 0x2d, 0xe3, 0x84, 0xf4, 0xb4, 0x75, 0x63, 0xd4, 0xea, 0x77, 0x25, 0x6a, 0xe5, 0x61, 0x8f, + 0x7a, 0xc4, 0x84, 0x76, 0x3d, 0xd6, 0xa4, 0x44, 0xa3, 0x7b, 0x65, 0xd4, 0x87, 0xc5, 0x38, 0xa0, + 0xb8, 0x95, 0x0a, 0x64, 0x00, 0xc2, 0x7a, 0x11, 0x91, 0x56, 0xfb, 0x63, 0x28, 0xde, 0x8a, 0x09, + 0x66, 0xa4, 0x41, 0x28, 0x75, 0xc3, 0xc0, 0x21, 0xa7, 0x5d, 0x42, 0x19, 0x2a, 0xc3, 0x7c, 0x1b, + 0x33, 0x7c, 0x8c, 0x29, 0x29, 0x59, 0x1b, 0xd6, 0x56, 0xde, 0x49, 0xda, 0xe8, 0x3a, 0xcc, 0x51, + 0xe9, 0x5d, 0xca, 0x6c, 0x58, 0x5b, 0x85, 0xed, 0x72, 0x75, 0x84, 0x92, 0xaa, 0x8e, 0xa7, 0x5d, + 0xed, 0xcf, 0x33, 0x30, 0xa7, 0x3a, 0x11, 0x82, 0x99, 0x00, 0xfb, 0x3a, 0xb2, 0xf8, 0x46, 0x3f, + 0x80, 0x9c, 0x87, 0x8f, 0x89, 0x47, 0x4b, 0x99, 0x8d, 0xec, 0x56, 0x61, 0xfb, 0xb5, 0xf3, 0x83, + 0x56, 0xf7, 0x85, 0xe3, 0x5e, 0xc0, 0xe2, 0x9e, 0xa3, 0x46, 0xa1, 0xb7, 0xa0, 0xd0, 0x12, 0x2b, + 0x69, 0x72, 0x8e, 0x4a, 0xd9, 0x41, 0x64, 0x9a, 0xc0, 0xea, 0x91, 0x26, 0xd0, 0x01, 0xe9, 0xce, + 0x3b, 0xd0, 0x23, 0x78, 0x01, 0x47, 0x51, 0x1c, 0x9e, 0xb9, 0x3e, 0x8f, 0xe0, 0x61, 0xca, 0x9a, + 0x5d, 0xaa, 0x42, 0xcd, 0x5c, 0x18, 0x6a, 0x2d, 0x35, 0x78, 0x1f, 0x53, 0xf6, 0x88, 0x8a, 0xb0, + 0xe5, 0x37, 0xa1, 0x90, 0x82, 0x8a, 0x56, 0x20, 0x7b, 0x42, 0x7a, 0x6a, 0xd5, 0xfc, 0x13, 0x15, + 0x61, 0xf6, 0x29, 0xf6, 0xba, 0x44, 0x24, 0x32, 0xef, 0xc8, 0xc6, 0x4e, 0xe6, 0x86, 0x65, 0x6f, + 0xc2, 0xea, 0x1d, 0xc2, 0x86, 0x58, 0x31, 0xe4, 0xcd, 0xfe, 0x85, 0x05, 0xcf, 0xed, 0xbb, 0x54, + 0xbb, 0xd2, 0x49, 0x18, 0xbc, 0x0c, 0xf9, 0x08, 0x77, 0x48, 0x93, 0xba, 0x9f, 0xc8, 0xa9, 0x67, + 0x9d, 0x79, 0xde, 0xd1, 0x70, 0x3f, 0x21, 0xe8, 0x3b, 0x00, 0xc2, 0xc8, 0xc2, 0x13, 0x12, 0x88, + 0x3c, 0xe6, 0x1d, 0xe1, 0x7e, 0xc4, 0x3b, 0xd0, 0x1a, 0xe4, 0x3e, 0x72, 0x3d, 0x46, 0x62, 0x91, + 0x97, 0xbc, 0xa3, 0x5a, 0xf6, 0x53, 0x28, 0x0e, 0xc2, 0xa0, 0x51, 0x18, 0x50, 0x82, 0xbe, 0x0f, + 0xf3, 0x4a, 0x02, 0xb4, 0x64, 0x09, 0x66, 0xc7, 0xc9, 0x25, 0xf1, 0x45, 0xaf, 0xc1, 0x72, 0x40, + 0xce, 0x58, 0x33, 0x85, 0x45, 0x26, 0x69, 0x91, 0x77, 0xd7, 0x35, 0x1e, 0xbb, 0x02, 0xc5, 0xdb, + 0xc4, 0x23, 0x23, 0x0a, 0x36, 0xe5, 0xea, 0xab, 0x19, 0x58, 0xdd, 0x3b, 0x23, 0xad, 0x2e, 0x23, + 0x8d, 0x53, 0x4f, 0x7b, 0x96, 0xfa, 0x7a, 0x96, 0xce, 0xba, 0x89, 0xee, 0x42, 0x21, 0xb5, 0xa1, + 0x94, 0xda, 0x4d, 0xc2, 0x3c, 0xea, 0x7b, 0x35, 0x88, 0x47, 0x5a, 0x2c, 0x8c, 0x9d, 0xf4, 0x50, + 0x4e, 0x3d, 0x3d, 0xf5, 0x54, 0x36, 0xf9, 0x27, 0xaa, 0x41, 0x2e, 0xc2, 0x31, 0xf6, 0xa9, 0xd2, + 0xd7, 0xfa, 0x88, 0xbe, 0x1a, 0xe2, 0x24, 0x72, 0x94, 0x1b, 0x7a, 0x04, 0x05, 0xf1, 0xd5, 0xe4, + 0xdb, 0x97, 0x96, 0x66, 0x45, 0x2e, 0xaf, 0x1b, 0xc0, 0x8c, 0xac, 0xb0, 0x5a, 0xe7, 0xe3, 0x8e, + 0xf8, 0x30, 0xb9, 0x67, 0x20, 0x4a, 0x3a, 0xd0, 0xcb, 0xb0, 0xc0, 0x0f, 0x16, 0x5f, 0x27, 0x39, + 0xb7, 0x61, 0x6d, 0x2d, 0x38, 0x05, 0xd9, 0x27, 0x29, 0x3f, 0x00, 0x38, 0xed, 0x92, 0xb8, 0xd7, + 0xf4, 0xc3, 0x36, 0x29, 0xcd, 0x6d, 0x58, 0x5b, 0x4b, 0xdb, 0xd5, 0x89, 0x26, 0x7e, 0xc8, 0x87, + 0x1d, 0x84, 0x6d, 0xe2, 0xe4, 0x4f, 0xf5, 0x27, 0xda, 0x84, 0xe5, 0x08, 0xc7, 0xcc, 0xe5, 0x89, + 0x51, 0x93, 0xce, 0x8b, 0x49, 0x97, 0x92, 0x6e, 0x39, 0x6f, 0x11, 0x66, 0x29, 0x39, 0x0d, 0xc2, + 0x52, 0x7e, 0xc3, 0xda, 0xca, 0x3a, 0xb2, 0x51, 0x7e, 0x0c, 0xcb, 0x43, 0xeb, 0x31, 0x6c, 0xac, + 0xef, 0xa5, 0x37, 0x56, 0x2a, 0xb9, 0x69, 0xce, 0x7a, 0x11, 0x49, 0xef, 0xb8, 0x2a, 0xe4, 0x13, + 0xb8, 0x08, 0x20, 0xf7, 0xe0, 0xd0, 0x39, 0x78, 0x6f, 0x7f, 0xe5, 0x12, 0x9a, 0x87, 0x99, 0xfa, + 0xfe, 0x7b, 0x0f, 0x56, 0x2c, 0x54, 0x80, 0xb9, 0xba, 0x73, 0xf8, 0xfe, 0xbd, 0xfd, 0xbd, 0x95, + 0x8c, 0xfd, 0xaf, 0x2c, 0xac, 0xa9, 0x15, 0xef, 0x62, 0xd6, 0xfa, 0xf8, 0xb6, 0xff, 0xad, 0x2a, + 0xaa, 0x01, 0xc0, 0x2f, 0x1c, 0xe2, 0x93, 0x80, 0xd1, 0x52, 0x56, 0xa8, 0xe1, 0x8d, 0xf3, 0x49, + 0x19, 0x82, 0x58, 0x6d, 0xe8, 0xb1, 0x4e, 0x2a, 0x4c, 0x3f, 0xe3, 0x33, 0xe9, 0x8c, 0xff, 0x32, + 0x03, 0xf9, 0xc4, 0x5f, 0x4b, 0xd9, 0x32, 0x49, 0x39, 0x33, 0x99, 0x94, 0xdb, 0x83, 0x52, 0x96, + 0xe0, 0x6f, 0xfd, 0x17, 0xe0, 0xc7, 0x29, 0xfb, 0x1b, 0x13, 0xca, 0xcf, 0x2d, 0x58, 0x1f, 0x01, + 0xa6, 0x4e, 0xbb, 0x77, 0xa0, 0xd0, 0xbf, 0xa6, 0xf5, 0x81, 0x77, 0xc5, 0x10, 0xd4, 0x11, 0x5e, + 0x0d, 0xc2, 0x1c, 0x88, 0xf5, 0x27, 0x45, 0x15, 0xc8, 0xc9, 0x57, 0x84, 0x82, 0x83, 0xf4, 0xc8, + 0x38, 0x6a, 0x89, 0x55, 0x77, 0xa9, 0xa3, 0x3c, 0xec, 0x13, 0x58, 0xa9, 0xeb, 0xfd, 0x72, 0x18, + 0x89, 0x07, 0x0f, 0x7a, 0x1d, 0x8a, 0xfd, 0xad, 0xc5, 0x4f, 0xf7, 0xe6, 0x71, 0x8f, 0x11, 0x2a, + 0x16, 0x9c, 0x75, 0x50, 0x62, 0xe3, 0x07, 0xfd, 0x2e, 0xb7, 0xa0, 0xab, 0xb0, 0xe4, 0xe3, 0xb3, + 0x66, 0x62, 0x91, 0x33, 0x67, 0x9d, 0x45, 0x1f, 0x9f, 0x25, 0xe1, 0xa9, 0xfd, 0xb7, 0x2c, 0x3c, + 0x9f, 0x34, 0xc5, 0x36, 0xf9, 0x1f, 0x3b, 0x3d, 0x7f, 0x64, 0x3a, 0x3d, 0x6f, 0x18, 0xc0, 0x18, + 0x57, 0x39, 0xf6, 0x04, 0xad, 0xc3, 0x6a, 0x3f, 0xe9, 0xa1, 0x64, 0x42, 0x1c, 0xa3, 0x85, 0xed, + 0x57, 0xc6, 0x4d, 0xa0, 0x48, 0x73, 0x56, 0xa2, 0xa1, 0x9e, 0x6f, 0x4c, 0xb9, 0x5f, 0x66, 0xa0, + 0x98, 0x4c, 0xef, 0x10, 0xdc, 0xfe, 0x36, 0x49, 0x2c, 0xc2, 0x2c, 0xc3, 0xc7, 0x1e, 0x51, 0x34, + 0xca, 0x06, 0xef, 0x75, 0x83, 0x36, 0x39, 0x53, 0xaf, 0x09, 0xd9, 0xe0, 0x78, 0x5a, 0xa1, 0xd7, + 0xf5, 0x03, 0xc9, 0x54, 0xde, 0xd1, 0x4d, 0xb4, 0x0d, 0x73, 0x27, 0xa4, 0xc7, 0x77, 0x97, 0x4a, + 0xf1, 0x0b, 0x06, 0x2c, 0xf7, 0x49, 0x8f, 0xef, 0xac, 0xdc, 0x89, 0xf8, 0x35, 0x13, 0x94, 0x9f, + 0x82, 0x20, 0xfb, 0x3a, 0xe4, 0x13, 0x2f, 0xd3, 0x7d, 0x66, 0x99, 0xee, 0x33, 0xfb, 0x33, 0x0b, + 0x56, 0x53, 0xe9, 0x57, 0x47, 0xc6, 0xdb, 0xfc, 0xbd, 0x95, 0xec, 0xbe, 0xf3, 0x4f, 0x8c, 0xfe, + 0xc8, 0x94, 0x3f, 0xba, 0x69, 0xe2, 0xe7, 0xc5, 0xf1, 0xfc, 0x0c, 0xf0, 0x62, 0xff, 0x3d, 0x03, + 0x85, 0xff, 0x1f, 0x2d, 0x14, 0x61, 0xd6, 0x73, 0x7d, 0x97, 0x89, 0x27, 0x47, 0xd6, 0x91, 0x8d, + 0x91, 0x47, 0x50, 0x7e, 0xf4, 0x11, 0x64, 0x60, 0x19, 0x8c, 0x2c, 0x33, 0x58, 0xdf, 0x25, 0x1d, + 0x37, 0x48, 0x27, 0xfc, 0xc2, 0xd4, 0xbe, 0x0b, 0x73, 0x5a, 0x98, 0x32, 0xad, 0x57, 0xc7, 0xa7, + 0x55, 0x4b, 0x53, 0x8f, 0xb2, 0xff, 0x6d, 0xc1, 0xe2, 0xad, 0xd0, 0xf7, 0x5d, 0x76, 0xf1, 0x64, + 0x9b, 0xb0, 0x94, 0x22, 0xa3, 0xe9, 0xb6, 0xc5, 0x9c, 0x0b, 0x77, 0x2f, 0x39, 0x8b, 0xa9, 0xfe, + 0x7b, 0x6d, 0xf4, 0x63, 0x58, 0xa3, 0x6e, 0xd0, 0xf1, 0x88, 0x2c, 0x86, 0x52, 0xdc, 0x67, 0xbf, + 0x06, 0xc8, 0xbb, 0x97, 0x9c, 0xa2, 0x0c, 0xc3, 0xeb, 0xa2, 0x94, 0x0a, 0xde, 0x84, 0xbc, 0xae, + 0x7a, 0xf9, 0x39, 0xce, 0x85, 0x7f, 0xd9, 0x10, 0xf1, 0x40, 0xf9, 0x38, 0x7d, 0xef, 0xdd, 0xc5, + 0x01, 0x29, 0xda, 0x1f, 0xc0, 0x92, 0x5e, 0xbc, 0xda, 0x55, 0x7b, 0xb0, 0xd2, 0x12, 0x3d, 0xcd, + 0xa4, 0x64, 0x17, 0x69, 0x18, 0x5f, 0xc8, 0x2d, 0xcb, 0x31, 0x49, 0x87, 0xed, 0xc0, 0xb2, 0x13, + 0x7a, 0xde, 0x31, 0x6e, 0x9d, 0x5c, 0x9c, 0xd7, 0xab, 0xe6, 0xbc, 0x0e, 0x65, 0x75, 0xfb, 0x4b, + 0x04, 0x73, 0x0d, 0xb9, 0x3c, 0xf4, 0x7b, 0x4e, 0x5b, 0xba, 0x00, 0x47, 0x9b, 0x86, 0x0c, 0x98, + 0x4a, 0xf4, 0xf2, 0x98, 0x32, 0xca, 0xde, 0xfb, 0xd9, 0x3f, 0xfe, 0xf9, 0x59, 0xe6, 0x5d, 0x7b, + 0x87, 0x97, 0xfb, 0x3f, 0xd5, 0x75, 0xdf, 0x3b, 0x51, 0x1c, 0xfe, 0x84, 0xb4, 0x18, 0xad, 0x55, + 0x6a, 0x6e, 0x40, 0x19, 0x0e, 0x5a, 0x84, 0x7f, 0x6b, 0x3b, 0xad, 0x55, 0x3e, 0xad, 0xe9, 0x02, + 0x6c, 0xc7, 0xaa, 0xa0, 0x5f, 0x59, 0x00, 0xfd, 0x2a, 0x14, 0xbd, 0x6a, 0x98, 0x71, 0xa4, 0x48, + 0x1d, 0x8b, 0xeb, 0xa6, 0xc0, 0xb5, 0x83, 0x6e, 0x08, 0x5c, 0xbc, 0x26, 0x9b, 0x00, 0x53, 0x02, + 0xa9, 0x56, 0xf9, 0x14, 0xfd, 0xd1, 0x82, 0x85, 0x74, 0x9d, 0x89, 0x4c, 0xe7, 0x8f, 0xa1, 0x1e, + 0x2e, 0x6f, 0x5e, 0xe8, 0x27, 0x95, 0x63, 0xef, 0x0a, 0x8c, 0x6f, 0xa3, 0x29, 0x72, 0x87, 0x7e, + 0x63, 0xc1, 0xe2, 0x40, 0x55, 0x6a, 0xa4, 0xd5, 0x54, 0xb7, 0x96, 0xd7, 0x46, 0xe4, 0xb9, 0xe7, + 0x47, 0xac, 0xa7, 0x53, 0x57, 0x99, 0x2a, 0x75, 0xd0, 0x2f, 0xd1, 0x8c, 0x6c, 0x8e, 0x54, 0x70, + 0xe5, 0xb1, 0x6f, 0x57, 0xfb, 0xa1, 0x00, 0x75, 0xdf, 0x7e, 0x5f, 0x80, 0x52, 0x93, 0x7d, 0x4d, + 0x5c, 0x3b, 0x24, 0x99, 0x94, 0x6b, 0xee, 0x2f, 0x16, 0x3c, 0xa7, 0x61, 0xb0, 0x98, 0x60, 0xdf, + 0x0d, 0x3a, 0x93, 0xc3, 0x3d, 0xf7, 0x3e, 0xc7, 0x5e, 0x1f, 0xf5, 0x87, 0x02, 0xf5, 0x91, 0x7d, + 0xf8, 0x2c, 0x50, 0xa7, 0x30, 0xee, 0x58, 0x95, 0xd7, 0x2d, 0xf4, 0x57, 0x0b, 0x96, 0x87, 0xca, + 0x03, 0xf4, 0xdd, 0x89, 0x6b, 0x9b, 0x72, 0x65, 0x12, 0x57, 0x25, 0xd5, 0xc7, 0x62, 0x21, 0x75, + 0xfb, 0xfe, 0x33, 0x58, 0x88, 0x0e, 0xce, 0x39, 0xf8, 0xb5, 0x05, 0x33, 0xfc, 0x49, 0x80, 0x5e, + 0x34, 0xb2, 0x9f, 0xbc, 0x15, 0x2e, 0x50, 0xc7, 0x7d, 0x01, 0x6f, 0xcf, 0xbe, 0x39, 0x0d, 0xbc, + 0x98, 0xe0, 0x36, 0xc7, 0xf4, 0xb9, 0x05, 0x8b, 0x49, 0xb2, 0x27, 0x02, 0x37, 0x91, 0x16, 0x8e, + 0x04, 0xc6, 0x07, 0xf6, 0xbd, 0x69, 0x30, 0xd2, 0x34, 0x2e, 0xa9, 0x82, 0x2f, 0x2c, 0x58, 0x19, + 0x7e, 0x06, 0x20, 0x13, 0xb7, 0xe7, 0xbc, 0x15, 0xca, 0x17, 0xbc, 0xe1, 0xec, 0x0f, 0x04, 0xf0, + 0x87, 0xf6, 0xfe, 0x34, 0xc0, 0x8f, 0x87, 0x26, 0xe7, 0x89, 0xfe, 0x83, 0x05, 0x39, 0x79, 0x99, + 0xa2, 0x0d, 0xd3, 0x5d, 0x94, 0x7e, 0x64, 0x94, 0x5f, 0x1e, 0xe3, 0xa1, 0x44, 0x7a, 0x20, 0x80, + 0xde, 0xb1, 0x77, 0xa7, 0x01, 0x2a, 0xef, 0x65, 0x0e, 0xef, 0x77, 0x16, 0xcc, 0xeb, 0x2b, 0x19, + 0xd9, 0x26, 0x09, 0x0c, 0xde, 0xd7, 0xe7, 0x1e, 0xa8, 0x87, 0x02, 0xd7, 0x3d, 0xfb, 0xf6, 0x54, + 0xea, 0x54, 0x93, 0x71, 0x64, 0x5f, 0x58, 0xb0, 0x34, 0x58, 0x3d, 0xa2, 0xad, 0x49, 0x0b, 0xcc, + 0xf2, 0xab, 0x63, 0x5f, 0xfc, 0x3a, 0x97, 0x8f, 0x04, 0xe6, 0x43, 0xfb, 0x87, 0xd3, 0x60, 0x8e, + 0x06, 0x00, 0x70, 0xe4, 0x7f, 0xb6, 0x60, 0x71, 0xa0, 0x2e, 0x34, 0x5e, 0x57, 0xa6, 0xca, 0x71, + 0x42, 0xdc, 0xcf, 0x64, 0x97, 0x45, 0xe9, 0xf9, 0x77, 0xac, 0xca, 0xee, 0x6f, 0x2d, 0x78, 0xbe, + 0x15, 0xfa, 0xa3, 0x08, 0x76, 0x17, 0xd4, 0xfb, 0xaa, 0xce, 0x29, 0xaf, 0x5b, 0x1f, 0xde, 0x50, + 0x2e, 0x9d, 0xd0, 0xc3, 0x41, 0xa7, 0x1a, 0xc6, 0x9d, 0x5a, 0x87, 0x04, 0x42, 0x10, 0x35, 0x69, + 0xc2, 0x91, 0x4b, 0x53, 0xff, 0x91, 0xbc, 0xa5, 0x3e, 0xff, 0x94, 0x59, 0xbf, 0x23, 0x87, 0xde, + 0xf2, 0xc2, 0x6e, 0xbb, 0xaa, 0xe2, 0x56, 0x1f, 0x5f, 0xfb, 0x4a, 0x5b, 0x9e, 0x08, 0xcb, 0x13, + 0x65, 0x79, 0xf2, 0xf8, 0xda, 0x71, 0x4e, 0x04, 0x7e, 0xe3, 0x3f, 0x01, 0x00, 0x00, 0xff, 0xff, + 0x33, 0x44, 0xce, 0x51, 0xcb, 0x1a, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/spanner/v1/transaction.pb.go b/vendor/google.golang.org/genproto/googleapis/spanner/v1/transaction.pb.go new file mode 100644 index 0000000..fcf3bec --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/spanner/v1/transaction.pb.go @@ -0,0 +1,1101 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/spanner/v1/transaction.proto + +package spanner // import "google.golang.org/genproto/googleapis/spanner/v1" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import duration "github.com/golang/protobuf/ptypes/duration" +import timestamp "github.com/golang/protobuf/ptypes/timestamp" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// # Transactions +// +// +// Each session can have at most one active transaction at a time. After the +// active transaction is completed, the session can immediately be +// re-used for the next transaction. It is not necessary to create a +// new session for each transaction. +// +// # Transaction Modes +// +// Cloud Spanner supports three transaction modes: +// +// 1. Locking read-write. This type of transaction is the only way +// to write data into Cloud Spanner. These transactions rely on +// pessimistic locking and, if necessary, two-phase commit. +// Locking read-write transactions may abort, requiring the +// application to retry. +// +// 2. Snapshot read-only. This transaction type provides guaranteed +// consistency across several reads, but does not allow +// writes. Snapshot read-only transactions can be configured to +// read at timestamps in the past. Snapshot read-only +// transactions do not need to be committed. +// +// 3. Partitioned DML. This type of transaction is used to execute +// a single Partitioned DML statement. Partitioned DML partitions +// the key space and runs the DML statement over each partition +// in parallel using separate, internal transactions that commit +// independently. Partitioned DML transactions do not need to be +// committed. +// +// For transactions that only read, snapshot read-only transactions +// provide simpler semantics and are almost always faster. In +// particular, read-only transactions do not take locks, so they do +// not conflict with read-write transactions. As a consequence of not +// taking locks, they also do not abort, so retry loops are not needed. +// +// Transactions may only read/write data in a single database. They +// may, however, read/write data in different tables within that +// database. +// +// ## Locking Read-Write Transactions +// +// Locking transactions may be used to atomically read-modify-write +// data anywhere in a database. This type of transaction is externally +// consistent. +// +// Clients should attempt to minimize the amount of time a transaction +// is active. Faster transactions commit with higher probability +// and cause less contention. Cloud Spanner attempts to keep read locks +// active as long as the transaction continues to do reads, and the +// transaction has not been terminated by +// [Commit][google.spanner.v1.Spanner.Commit] or +// [Rollback][google.spanner.v1.Spanner.Rollback]. Long periods of +// inactivity at the client may cause Cloud Spanner to release a +// transaction's locks and abort it. +// +// Conceptually, a read-write transaction consists of zero or more +// reads or SQL statements followed by +// [Commit][google.spanner.v1.Spanner.Commit]. At any time before +// [Commit][google.spanner.v1.Spanner.Commit], the client can send a +// [Rollback][google.spanner.v1.Spanner.Rollback] request to abort the +// transaction. +// +// ### Semantics +// +// Cloud Spanner can commit the transaction if all read locks it acquired +// are still valid at commit time, and it is able to acquire write +// locks for all writes. Cloud Spanner can abort the transaction for any +// reason. If a commit attempt returns `ABORTED`, Cloud Spanner guarantees +// that the transaction has not modified any user data in Cloud Spanner. +// +// Unless the transaction commits, Cloud Spanner makes no guarantees about +// how long the transaction's locks were held for. It is an error to +// use Cloud Spanner locks for any sort of mutual exclusion other than +// between Cloud Spanner transactions themselves. +// +// ### Retrying Aborted Transactions +// +// When a transaction aborts, the application can choose to retry the +// whole transaction again. To maximize the chances of successfully +// committing the retry, the client should execute the retry in the +// same session as the original attempt. The original session's lock +// priority increases with each consecutive abort, meaning that each +// attempt has a slightly better chance of success than the previous. +// +// Under some circumstances (e.g., many transactions attempting to +// modify the same row(s)), a transaction can abort many times in a +// short period before successfully committing. Thus, it is not a good +// idea to cap the number of retries a transaction can attempt; +// instead, it is better to limit the total amount of wall time spent +// retrying. +// +// ### Idle Transactions +// +// A transaction is considered idle if it has no outstanding reads or +// SQL queries and has not started a read or SQL query within the last 10 +// seconds. Idle transactions can be aborted by Cloud Spanner so that they +// don't hold on to locks indefinitely. In that case, the commit will +// fail with error `ABORTED`. +// +// If this behavior is undesirable, periodically executing a simple +// SQL query in the transaction (e.g., `SELECT 1`) prevents the +// transaction from becoming idle. +// +// ## Snapshot Read-Only Transactions +// +// Snapshot read-only transactions provides a simpler method than +// locking read-write transactions for doing several consistent +// reads. However, this type of transaction does not support writes. +// +// Snapshot transactions do not take locks. Instead, they work by +// choosing a Cloud Spanner timestamp, then executing all reads at that +// timestamp. Since they do not acquire locks, they do not block +// concurrent read-write transactions. +// +// Unlike locking read-write transactions, snapshot read-only +// transactions never abort. They can fail if the chosen read +// timestamp is garbage collected; however, the default garbage +// collection policy is generous enough that most applications do not +// need to worry about this in practice. +// +// Snapshot read-only transactions do not need to call +// [Commit][google.spanner.v1.Spanner.Commit] or +// [Rollback][google.spanner.v1.Spanner.Rollback] (and in fact are not +// permitted to do so). +// +// To execute a snapshot transaction, the client specifies a timestamp +// bound, which tells Cloud Spanner how to choose a read timestamp. +// +// The types of timestamp bound are: +// +// - Strong (the default). +// - Bounded staleness. +// - Exact staleness. +// +// If the Cloud Spanner database to be read is geographically distributed, +// stale read-only transactions can execute more quickly than strong +// or read-write transaction, because they are able to execute far +// from the leader replica. +// +// Each type of timestamp bound is discussed in detail below. +// +// ### Strong +// +// Strong reads are guaranteed to see the effects of all transactions +// that have committed before the start of the read. Furthermore, all +// rows yielded by a single read are consistent with each other -- if +// any part of the read observes a transaction, all parts of the read +// see the transaction. +// +// Strong reads are not repeatable: two consecutive strong read-only +// transactions might return inconsistent results if there are +// concurrent writes. If consistency across reads is required, the +// reads should be executed within a transaction or at an exact read +// timestamp. +// +// See +// [TransactionOptions.ReadOnly.strong][google.spanner.v1.TransactionOptions.ReadOnly.strong]. +// +// ### Exact Staleness +// +// These timestamp bounds execute reads at a user-specified +// timestamp. Reads at a timestamp are guaranteed to see a consistent +// prefix of the global transaction history: they observe +// modifications done by all transactions with a commit timestamp <= +// the read timestamp, and observe none of the modifications done by +// transactions with a larger commit timestamp. They will block until +// all conflicting transactions that may be assigned commit timestamps +// <= the read timestamp have finished. +// +// The timestamp can either be expressed as an absolute Cloud Spanner commit +// timestamp or a staleness relative to the current time. +// +// These modes do not require a "negotiation phase" to pick a +// timestamp. As a result, they execute slightly faster than the +// equivalent boundedly stale concurrency modes. On the other hand, +// boundedly stale reads usually return fresher results. +// +// See +// [TransactionOptions.ReadOnly.read_timestamp][google.spanner.v1.TransactionOptions.ReadOnly.read_timestamp] +// and +// [TransactionOptions.ReadOnly.exact_staleness][google.spanner.v1.TransactionOptions.ReadOnly.exact_staleness]. +// +// ### Bounded Staleness +// +// Bounded staleness modes allow Cloud Spanner to pick the read timestamp, +// subject to a user-provided staleness bound. Cloud Spanner chooses the +// newest timestamp within the staleness bound that allows execution +// of the reads at the closest available replica without blocking. +// +// All rows yielded are consistent with each other -- if any part of +// the read observes a transaction, all parts of the read see the +// transaction. Boundedly stale reads are not repeatable: two stale +// reads, even if they use the same staleness bound, can execute at +// different timestamps and thus return inconsistent results. +// +// Boundedly stale reads execute in two phases: the first phase +// negotiates a timestamp among all replicas needed to serve the +// read. In the second phase, reads are executed at the negotiated +// timestamp. +// +// As a result of the two phase execution, bounded staleness reads are +// usually a little slower than comparable exact staleness +// reads. However, they are typically able to return fresher +// results, and are more likely to execute at the closest replica. +// +// Because the timestamp negotiation requires up-front knowledge of +// which rows will be read, it can only be used with single-use +// read-only transactions. +// +// See +// [TransactionOptions.ReadOnly.max_staleness][google.spanner.v1.TransactionOptions.ReadOnly.max_staleness] +// and +// [TransactionOptions.ReadOnly.min_read_timestamp][google.spanner.v1.TransactionOptions.ReadOnly.min_read_timestamp]. +// +// ### Old Read Timestamps and Garbage Collection +// +// Cloud Spanner continuously garbage collects deleted and overwritten data +// in the background to reclaim storage space. This process is known +// as "version GC". By default, version GC reclaims versions after they +// are one hour old. Because of this, Cloud Spanner cannot perform reads +// at read timestamps more than one hour in the past. This +// restriction also applies to in-progress reads and/or SQL queries whose +// timestamp become too old while executing. Reads and SQL queries with +// too-old read timestamps fail with the error `FAILED_PRECONDITION`. +// +// ## Partitioned DML Transactions +// +// Partitioned DML transactions are used to execute DML statements with a +// different execution strategy that provides different, and often better, +// scalability properties for large, table-wide operations than DML in a +// ReadWrite transaction. Smaller scoped statements, such as an OLTP workload, +// should prefer using ReadWrite transactions. +// +// Partitioned DML partitions the keyspace and runs the DML statement on each +// partition in separate, internal transactions. These transactions commit +// automatically when complete, and run independently from one another. +// +// To reduce lock contention, this execution strategy only acquires read locks +// on rows that match the WHERE clause of the statement. Additionally, the +// smaller per-partition transactions hold locks for less time. +// +// That said, Partitioned DML is not a drop-in replacement for standard DML used +// in ReadWrite transactions. +// +// - The DML statement must be fully-partitionable. Specifically, the statement +// must be expressible as the union of many statements which each access only +// a single row of the table. +// +// - The statement is not applied atomically to all rows of the table. Rather, +// the statement is applied atomically to partitions of the table, in +// independent transactions. Secondary index rows are updated atomically +// with the base table rows. +// +// - Partitioned DML does not guarantee exactly-once execution semantics +// against a partition. The statement will be applied at least once to each +// partition. It is strongly recommended that the DML statement should be +// idempotent to avoid unexpected results. For instance, it is potentially +// dangerous to run a statement such as +// `UPDATE table SET column = column + 1` as it could be run multiple times +// against some rows. +// +// - The partitions are committed automatically - there is no support for +// Commit or Rollback. If the call returns an error, or if the client issuing +// the ExecuteSql call dies, it is possible that some rows had the statement +// executed on them successfully. It is also possible that statement was +// never executed against other rows. +// +// - Partitioned DML transactions may only contain the execution of a single +// DML statement via ExecuteSql or ExecuteStreamingSql. +// +// - If any error is encountered during the execution of the partitioned DML +// operation (for instance, a UNIQUE INDEX violation, division by zero, or a +// value that cannot be stored due to schema constraints), then the +// operation is stopped at that point and an error is returned. It is +// possible that at this point, some partitions have been committed (or even +// committed multiple times), and other partitions have not been run at all. +// +// Given the above, Partitioned DML is good fit for large, database-wide, +// operations that are idempotent, such as deleting old rows from a very large +// table. +type TransactionOptions struct { + // Required. The type of transaction. + // + // Types that are valid to be assigned to Mode: + // *TransactionOptions_ReadWrite_ + // *TransactionOptions_PartitionedDml_ + // *TransactionOptions_ReadOnly_ + Mode isTransactionOptions_Mode `protobuf_oneof:"mode"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *TransactionOptions) Reset() { *m = TransactionOptions{} } +func (m *TransactionOptions) String() string { return proto.CompactTextString(m) } +func (*TransactionOptions) ProtoMessage() {} +func (*TransactionOptions) Descriptor() ([]byte, []int) { + return fileDescriptor_transaction_949bbf6c9797af8e, []int{0} +} +func (m *TransactionOptions) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_TransactionOptions.Unmarshal(m, b) +} +func (m *TransactionOptions) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_TransactionOptions.Marshal(b, m, deterministic) +} +func (dst *TransactionOptions) XXX_Merge(src proto.Message) { + xxx_messageInfo_TransactionOptions.Merge(dst, src) +} +func (m *TransactionOptions) XXX_Size() int { + return xxx_messageInfo_TransactionOptions.Size(m) +} +func (m *TransactionOptions) XXX_DiscardUnknown() { + xxx_messageInfo_TransactionOptions.DiscardUnknown(m) +} + +var xxx_messageInfo_TransactionOptions proto.InternalMessageInfo + +type isTransactionOptions_Mode interface { + isTransactionOptions_Mode() +} + +type TransactionOptions_ReadWrite_ struct { + ReadWrite *TransactionOptions_ReadWrite `protobuf:"bytes,1,opt,name=read_write,json=readWrite,proto3,oneof"` +} + +type TransactionOptions_PartitionedDml_ struct { + PartitionedDml *TransactionOptions_PartitionedDml `protobuf:"bytes,3,opt,name=partitioned_dml,json=partitionedDml,proto3,oneof"` +} + +type TransactionOptions_ReadOnly_ struct { + ReadOnly *TransactionOptions_ReadOnly `protobuf:"bytes,2,opt,name=read_only,json=readOnly,proto3,oneof"` +} + +func (*TransactionOptions_ReadWrite_) isTransactionOptions_Mode() {} + +func (*TransactionOptions_PartitionedDml_) isTransactionOptions_Mode() {} + +func (*TransactionOptions_ReadOnly_) isTransactionOptions_Mode() {} + +func (m *TransactionOptions) GetMode() isTransactionOptions_Mode { + if m != nil { + return m.Mode + } + return nil +} + +func (m *TransactionOptions) GetReadWrite() *TransactionOptions_ReadWrite { + if x, ok := m.GetMode().(*TransactionOptions_ReadWrite_); ok { + return x.ReadWrite + } + return nil +} + +func (m *TransactionOptions) GetPartitionedDml() *TransactionOptions_PartitionedDml { + if x, ok := m.GetMode().(*TransactionOptions_PartitionedDml_); ok { + return x.PartitionedDml + } + return nil +} + +func (m *TransactionOptions) GetReadOnly() *TransactionOptions_ReadOnly { + if x, ok := m.GetMode().(*TransactionOptions_ReadOnly_); ok { + return x.ReadOnly + } + return nil +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*TransactionOptions) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _TransactionOptions_OneofMarshaler, _TransactionOptions_OneofUnmarshaler, _TransactionOptions_OneofSizer, []interface{}{ + (*TransactionOptions_ReadWrite_)(nil), + (*TransactionOptions_PartitionedDml_)(nil), + (*TransactionOptions_ReadOnly_)(nil), + } +} + +func _TransactionOptions_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*TransactionOptions) + // mode + switch x := m.Mode.(type) { + case *TransactionOptions_ReadWrite_: + b.EncodeVarint(1<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.ReadWrite); err != nil { + return err + } + case *TransactionOptions_PartitionedDml_: + b.EncodeVarint(3<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.PartitionedDml); err != nil { + return err + } + case *TransactionOptions_ReadOnly_: + b.EncodeVarint(2<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.ReadOnly); err != nil { + return err + } + case nil: + default: + return fmt.Errorf("TransactionOptions.Mode has unexpected type %T", x) + } + return nil +} + +func _TransactionOptions_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*TransactionOptions) + switch tag { + case 1: // mode.read_write + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(TransactionOptions_ReadWrite) + err := b.DecodeMessage(msg) + m.Mode = &TransactionOptions_ReadWrite_{msg} + return true, err + case 3: // mode.partitioned_dml + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(TransactionOptions_PartitionedDml) + err := b.DecodeMessage(msg) + m.Mode = &TransactionOptions_PartitionedDml_{msg} + return true, err + case 2: // mode.read_only + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(TransactionOptions_ReadOnly) + err := b.DecodeMessage(msg) + m.Mode = &TransactionOptions_ReadOnly_{msg} + return true, err + default: + return false, nil + } +} + +func _TransactionOptions_OneofSizer(msg proto.Message) (n int) { + m := msg.(*TransactionOptions) + // mode + switch x := m.Mode.(type) { + case *TransactionOptions_ReadWrite_: + s := proto.Size(x.ReadWrite) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *TransactionOptions_PartitionedDml_: + s := proto.Size(x.PartitionedDml) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *TransactionOptions_ReadOnly_: + s := proto.Size(x.ReadOnly) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +// Message type to initiate a read-write transaction. Currently this +// transaction type has no options. +type TransactionOptions_ReadWrite struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *TransactionOptions_ReadWrite) Reset() { *m = TransactionOptions_ReadWrite{} } +func (m *TransactionOptions_ReadWrite) String() string { return proto.CompactTextString(m) } +func (*TransactionOptions_ReadWrite) ProtoMessage() {} +func (*TransactionOptions_ReadWrite) Descriptor() ([]byte, []int) { + return fileDescriptor_transaction_949bbf6c9797af8e, []int{0, 0} +} +func (m *TransactionOptions_ReadWrite) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_TransactionOptions_ReadWrite.Unmarshal(m, b) +} +func (m *TransactionOptions_ReadWrite) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_TransactionOptions_ReadWrite.Marshal(b, m, deterministic) +} +func (dst *TransactionOptions_ReadWrite) XXX_Merge(src proto.Message) { + xxx_messageInfo_TransactionOptions_ReadWrite.Merge(dst, src) +} +func (m *TransactionOptions_ReadWrite) XXX_Size() int { + return xxx_messageInfo_TransactionOptions_ReadWrite.Size(m) +} +func (m *TransactionOptions_ReadWrite) XXX_DiscardUnknown() { + xxx_messageInfo_TransactionOptions_ReadWrite.DiscardUnknown(m) +} + +var xxx_messageInfo_TransactionOptions_ReadWrite proto.InternalMessageInfo + +// Message type to initiate a Partitioned DML transaction. +type TransactionOptions_PartitionedDml struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *TransactionOptions_PartitionedDml) Reset() { *m = TransactionOptions_PartitionedDml{} } +func (m *TransactionOptions_PartitionedDml) String() string { return proto.CompactTextString(m) } +func (*TransactionOptions_PartitionedDml) ProtoMessage() {} +func (*TransactionOptions_PartitionedDml) Descriptor() ([]byte, []int) { + return fileDescriptor_transaction_949bbf6c9797af8e, []int{0, 1} +} +func (m *TransactionOptions_PartitionedDml) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_TransactionOptions_PartitionedDml.Unmarshal(m, b) +} +func (m *TransactionOptions_PartitionedDml) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_TransactionOptions_PartitionedDml.Marshal(b, m, deterministic) +} +func (dst *TransactionOptions_PartitionedDml) XXX_Merge(src proto.Message) { + xxx_messageInfo_TransactionOptions_PartitionedDml.Merge(dst, src) +} +func (m *TransactionOptions_PartitionedDml) XXX_Size() int { + return xxx_messageInfo_TransactionOptions_PartitionedDml.Size(m) +} +func (m *TransactionOptions_PartitionedDml) XXX_DiscardUnknown() { + xxx_messageInfo_TransactionOptions_PartitionedDml.DiscardUnknown(m) +} + +var xxx_messageInfo_TransactionOptions_PartitionedDml proto.InternalMessageInfo + +// Message type to initiate a read-only transaction. +type TransactionOptions_ReadOnly struct { + // How to choose the timestamp for the read-only transaction. + // + // Types that are valid to be assigned to TimestampBound: + // *TransactionOptions_ReadOnly_Strong + // *TransactionOptions_ReadOnly_MinReadTimestamp + // *TransactionOptions_ReadOnly_MaxStaleness + // *TransactionOptions_ReadOnly_ReadTimestamp + // *TransactionOptions_ReadOnly_ExactStaleness + TimestampBound isTransactionOptions_ReadOnly_TimestampBound `protobuf_oneof:"timestamp_bound"` + // If true, the Cloud Spanner-selected read timestamp is included in + // the [Transaction][google.spanner.v1.Transaction] message that describes + // the transaction. + ReturnReadTimestamp bool `protobuf:"varint,6,opt,name=return_read_timestamp,json=returnReadTimestamp,proto3" json:"return_read_timestamp,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *TransactionOptions_ReadOnly) Reset() { *m = TransactionOptions_ReadOnly{} } +func (m *TransactionOptions_ReadOnly) String() string { return proto.CompactTextString(m) } +func (*TransactionOptions_ReadOnly) ProtoMessage() {} +func (*TransactionOptions_ReadOnly) Descriptor() ([]byte, []int) { + return fileDescriptor_transaction_949bbf6c9797af8e, []int{0, 2} +} +func (m *TransactionOptions_ReadOnly) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_TransactionOptions_ReadOnly.Unmarshal(m, b) +} +func (m *TransactionOptions_ReadOnly) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_TransactionOptions_ReadOnly.Marshal(b, m, deterministic) +} +func (dst *TransactionOptions_ReadOnly) XXX_Merge(src proto.Message) { + xxx_messageInfo_TransactionOptions_ReadOnly.Merge(dst, src) +} +func (m *TransactionOptions_ReadOnly) XXX_Size() int { + return xxx_messageInfo_TransactionOptions_ReadOnly.Size(m) +} +func (m *TransactionOptions_ReadOnly) XXX_DiscardUnknown() { + xxx_messageInfo_TransactionOptions_ReadOnly.DiscardUnknown(m) +} + +var xxx_messageInfo_TransactionOptions_ReadOnly proto.InternalMessageInfo + +type isTransactionOptions_ReadOnly_TimestampBound interface { + isTransactionOptions_ReadOnly_TimestampBound() +} + +type TransactionOptions_ReadOnly_Strong struct { + Strong bool `protobuf:"varint,1,opt,name=strong,proto3,oneof"` +} + +type TransactionOptions_ReadOnly_MinReadTimestamp struct { + MinReadTimestamp *timestamp.Timestamp `protobuf:"bytes,2,opt,name=min_read_timestamp,json=minReadTimestamp,proto3,oneof"` +} + +type TransactionOptions_ReadOnly_MaxStaleness struct { + MaxStaleness *duration.Duration `protobuf:"bytes,3,opt,name=max_staleness,json=maxStaleness,proto3,oneof"` +} + +type TransactionOptions_ReadOnly_ReadTimestamp struct { + ReadTimestamp *timestamp.Timestamp `protobuf:"bytes,4,opt,name=read_timestamp,json=readTimestamp,proto3,oneof"` +} + +type TransactionOptions_ReadOnly_ExactStaleness struct { + ExactStaleness *duration.Duration `protobuf:"bytes,5,opt,name=exact_staleness,json=exactStaleness,proto3,oneof"` +} + +func (*TransactionOptions_ReadOnly_Strong) isTransactionOptions_ReadOnly_TimestampBound() {} + +func (*TransactionOptions_ReadOnly_MinReadTimestamp) isTransactionOptions_ReadOnly_TimestampBound() {} + +func (*TransactionOptions_ReadOnly_MaxStaleness) isTransactionOptions_ReadOnly_TimestampBound() {} + +func (*TransactionOptions_ReadOnly_ReadTimestamp) isTransactionOptions_ReadOnly_TimestampBound() {} + +func (*TransactionOptions_ReadOnly_ExactStaleness) isTransactionOptions_ReadOnly_TimestampBound() {} + +func (m *TransactionOptions_ReadOnly) GetTimestampBound() isTransactionOptions_ReadOnly_TimestampBound { + if m != nil { + return m.TimestampBound + } + return nil +} + +func (m *TransactionOptions_ReadOnly) GetStrong() bool { + if x, ok := m.GetTimestampBound().(*TransactionOptions_ReadOnly_Strong); ok { + return x.Strong + } + return false +} + +func (m *TransactionOptions_ReadOnly) GetMinReadTimestamp() *timestamp.Timestamp { + if x, ok := m.GetTimestampBound().(*TransactionOptions_ReadOnly_MinReadTimestamp); ok { + return x.MinReadTimestamp + } + return nil +} + +func (m *TransactionOptions_ReadOnly) GetMaxStaleness() *duration.Duration { + if x, ok := m.GetTimestampBound().(*TransactionOptions_ReadOnly_MaxStaleness); ok { + return x.MaxStaleness + } + return nil +} + +func (m *TransactionOptions_ReadOnly) GetReadTimestamp() *timestamp.Timestamp { + if x, ok := m.GetTimestampBound().(*TransactionOptions_ReadOnly_ReadTimestamp); ok { + return x.ReadTimestamp + } + return nil +} + +func (m *TransactionOptions_ReadOnly) GetExactStaleness() *duration.Duration { + if x, ok := m.GetTimestampBound().(*TransactionOptions_ReadOnly_ExactStaleness); ok { + return x.ExactStaleness + } + return nil +} + +func (m *TransactionOptions_ReadOnly) GetReturnReadTimestamp() bool { + if m != nil { + return m.ReturnReadTimestamp + } + return false +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*TransactionOptions_ReadOnly) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _TransactionOptions_ReadOnly_OneofMarshaler, _TransactionOptions_ReadOnly_OneofUnmarshaler, _TransactionOptions_ReadOnly_OneofSizer, []interface{}{ + (*TransactionOptions_ReadOnly_Strong)(nil), + (*TransactionOptions_ReadOnly_MinReadTimestamp)(nil), + (*TransactionOptions_ReadOnly_MaxStaleness)(nil), + (*TransactionOptions_ReadOnly_ReadTimestamp)(nil), + (*TransactionOptions_ReadOnly_ExactStaleness)(nil), + } +} + +func _TransactionOptions_ReadOnly_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*TransactionOptions_ReadOnly) + // timestamp_bound + switch x := m.TimestampBound.(type) { + case *TransactionOptions_ReadOnly_Strong: + t := uint64(0) + if x.Strong { + t = 1 + } + b.EncodeVarint(1<<3 | proto.WireVarint) + b.EncodeVarint(t) + case *TransactionOptions_ReadOnly_MinReadTimestamp: + b.EncodeVarint(2<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.MinReadTimestamp); err != nil { + return err + } + case *TransactionOptions_ReadOnly_MaxStaleness: + b.EncodeVarint(3<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.MaxStaleness); err != nil { + return err + } + case *TransactionOptions_ReadOnly_ReadTimestamp: + b.EncodeVarint(4<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.ReadTimestamp); err != nil { + return err + } + case *TransactionOptions_ReadOnly_ExactStaleness: + b.EncodeVarint(5<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.ExactStaleness); err != nil { + return err + } + case nil: + default: + return fmt.Errorf("TransactionOptions_ReadOnly.TimestampBound has unexpected type %T", x) + } + return nil +} + +func _TransactionOptions_ReadOnly_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*TransactionOptions_ReadOnly) + switch tag { + case 1: // timestamp_bound.strong + if wire != proto.WireVarint { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeVarint() + m.TimestampBound = &TransactionOptions_ReadOnly_Strong{x != 0} + return true, err + case 2: // timestamp_bound.min_read_timestamp + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(timestamp.Timestamp) + err := b.DecodeMessage(msg) + m.TimestampBound = &TransactionOptions_ReadOnly_MinReadTimestamp{msg} + return true, err + case 3: // timestamp_bound.max_staleness + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(duration.Duration) + err := b.DecodeMessage(msg) + m.TimestampBound = &TransactionOptions_ReadOnly_MaxStaleness{msg} + return true, err + case 4: // timestamp_bound.read_timestamp + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(timestamp.Timestamp) + err := b.DecodeMessage(msg) + m.TimestampBound = &TransactionOptions_ReadOnly_ReadTimestamp{msg} + return true, err + case 5: // timestamp_bound.exact_staleness + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(duration.Duration) + err := b.DecodeMessage(msg) + m.TimestampBound = &TransactionOptions_ReadOnly_ExactStaleness{msg} + return true, err + default: + return false, nil + } +} + +func _TransactionOptions_ReadOnly_OneofSizer(msg proto.Message) (n int) { + m := msg.(*TransactionOptions_ReadOnly) + // timestamp_bound + switch x := m.TimestampBound.(type) { + case *TransactionOptions_ReadOnly_Strong: + n += 1 // tag and wire + n += 1 + case *TransactionOptions_ReadOnly_MinReadTimestamp: + s := proto.Size(x.MinReadTimestamp) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *TransactionOptions_ReadOnly_MaxStaleness: + s := proto.Size(x.MaxStaleness) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *TransactionOptions_ReadOnly_ReadTimestamp: + s := proto.Size(x.ReadTimestamp) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *TransactionOptions_ReadOnly_ExactStaleness: + s := proto.Size(x.ExactStaleness) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +// A transaction. +type Transaction struct { + // `id` may be used to identify the transaction in subsequent + // [Read][google.spanner.v1.Spanner.Read], + // [ExecuteSql][google.spanner.v1.Spanner.ExecuteSql], + // [Commit][google.spanner.v1.Spanner.Commit], or + // [Rollback][google.spanner.v1.Spanner.Rollback] calls. + // + // Single-use read-only transactions do not have IDs, because + // single-use transactions do not support multiple requests. + Id []byte `protobuf:"bytes,1,opt,name=id,proto3" json:"id,omitempty"` + // For snapshot read-only transactions, the read timestamp chosen + // for the transaction. Not returned by default: see + // [TransactionOptions.ReadOnly.return_read_timestamp][google.spanner.v1.TransactionOptions.ReadOnly.return_read_timestamp]. + // + // A timestamp in RFC3339 UTC \"Zulu\" format, accurate to nanoseconds. + // Example: `"2014-10-02T15:01:23.045123456Z"`. + ReadTimestamp *timestamp.Timestamp `protobuf:"bytes,2,opt,name=read_timestamp,json=readTimestamp,proto3" json:"read_timestamp,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Transaction) Reset() { *m = Transaction{} } +func (m *Transaction) String() string { return proto.CompactTextString(m) } +func (*Transaction) ProtoMessage() {} +func (*Transaction) Descriptor() ([]byte, []int) { + return fileDescriptor_transaction_949bbf6c9797af8e, []int{1} +} +func (m *Transaction) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Transaction.Unmarshal(m, b) +} +func (m *Transaction) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Transaction.Marshal(b, m, deterministic) +} +func (dst *Transaction) XXX_Merge(src proto.Message) { + xxx_messageInfo_Transaction.Merge(dst, src) +} +func (m *Transaction) XXX_Size() int { + return xxx_messageInfo_Transaction.Size(m) +} +func (m *Transaction) XXX_DiscardUnknown() { + xxx_messageInfo_Transaction.DiscardUnknown(m) +} + +var xxx_messageInfo_Transaction proto.InternalMessageInfo + +func (m *Transaction) GetId() []byte { + if m != nil { + return m.Id + } + return nil +} + +func (m *Transaction) GetReadTimestamp() *timestamp.Timestamp { + if m != nil { + return m.ReadTimestamp + } + return nil +} + +// This message is used to select the transaction in which a +// [Read][google.spanner.v1.Spanner.Read] or +// [ExecuteSql][google.spanner.v1.Spanner.ExecuteSql] call runs. +// +// See [TransactionOptions][google.spanner.v1.TransactionOptions] for more +// information about transactions. +type TransactionSelector struct { + // If no fields are set, the default is a single use transaction + // with strong concurrency. + // + // Types that are valid to be assigned to Selector: + // *TransactionSelector_SingleUse + // *TransactionSelector_Id + // *TransactionSelector_Begin + Selector isTransactionSelector_Selector `protobuf_oneof:"selector"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *TransactionSelector) Reset() { *m = TransactionSelector{} } +func (m *TransactionSelector) String() string { return proto.CompactTextString(m) } +func (*TransactionSelector) ProtoMessage() {} +func (*TransactionSelector) Descriptor() ([]byte, []int) { + return fileDescriptor_transaction_949bbf6c9797af8e, []int{2} +} +func (m *TransactionSelector) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_TransactionSelector.Unmarshal(m, b) +} +func (m *TransactionSelector) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_TransactionSelector.Marshal(b, m, deterministic) +} +func (dst *TransactionSelector) XXX_Merge(src proto.Message) { + xxx_messageInfo_TransactionSelector.Merge(dst, src) +} +func (m *TransactionSelector) XXX_Size() int { + return xxx_messageInfo_TransactionSelector.Size(m) +} +func (m *TransactionSelector) XXX_DiscardUnknown() { + xxx_messageInfo_TransactionSelector.DiscardUnknown(m) +} + +var xxx_messageInfo_TransactionSelector proto.InternalMessageInfo + +type isTransactionSelector_Selector interface { + isTransactionSelector_Selector() +} + +type TransactionSelector_SingleUse struct { + SingleUse *TransactionOptions `protobuf:"bytes,1,opt,name=single_use,json=singleUse,proto3,oneof"` +} + +type TransactionSelector_Id struct { + Id []byte `protobuf:"bytes,2,opt,name=id,proto3,oneof"` +} + +type TransactionSelector_Begin struct { + Begin *TransactionOptions `protobuf:"bytes,3,opt,name=begin,proto3,oneof"` +} + +func (*TransactionSelector_SingleUse) isTransactionSelector_Selector() {} + +func (*TransactionSelector_Id) isTransactionSelector_Selector() {} + +func (*TransactionSelector_Begin) isTransactionSelector_Selector() {} + +func (m *TransactionSelector) GetSelector() isTransactionSelector_Selector { + if m != nil { + return m.Selector + } + return nil +} + +func (m *TransactionSelector) GetSingleUse() *TransactionOptions { + if x, ok := m.GetSelector().(*TransactionSelector_SingleUse); ok { + return x.SingleUse + } + return nil +} + +func (m *TransactionSelector) GetId() []byte { + if x, ok := m.GetSelector().(*TransactionSelector_Id); ok { + return x.Id + } + return nil +} + +func (m *TransactionSelector) GetBegin() *TransactionOptions { + if x, ok := m.GetSelector().(*TransactionSelector_Begin); ok { + return x.Begin + } + return nil +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*TransactionSelector) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _TransactionSelector_OneofMarshaler, _TransactionSelector_OneofUnmarshaler, _TransactionSelector_OneofSizer, []interface{}{ + (*TransactionSelector_SingleUse)(nil), + (*TransactionSelector_Id)(nil), + (*TransactionSelector_Begin)(nil), + } +} + +func _TransactionSelector_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*TransactionSelector) + // selector + switch x := m.Selector.(type) { + case *TransactionSelector_SingleUse: + b.EncodeVarint(1<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.SingleUse); err != nil { + return err + } + case *TransactionSelector_Id: + b.EncodeVarint(2<<3 | proto.WireBytes) + b.EncodeRawBytes(x.Id) + case *TransactionSelector_Begin: + b.EncodeVarint(3<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.Begin); err != nil { + return err + } + case nil: + default: + return fmt.Errorf("TransactionSelector.Selector has unexpected type %T", x) + } + return nil +} + +func _TransactionSelector_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*TransactionSelector) + switch tag { + case 1: // selector.single_use + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(TransactionOptions) + err := b.DecodeMessage(msg) + m.Selector = &TransactionSelector_SingleUse{msg} + return true, err + case 2: // selector.id + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeRawBytes(true) + m.Selector = &TransactionSelector_Id{x} + return true, err + case 3: // selector.begin + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(TransactionOptions) + err := b.DecodeMessage(msg) + m.Selector = &TransactionSelector_Begin{msg} + return true, err + default: + return false, nil + } +} + +func _TransactionSelector_OneofSizer(msg proto.Message) (n int) { + m := msg.(*TransactionSelector) + // selector + switch x := m.Selector.(type) { + case *TransactionSelector_SingleUse: + s := proto.Size(x.SingleUse) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *TransactionSelector_Id: + n += 1 // tag and wire + n += proto.SizeVarint(uint64(len(x.Id))) + n += len(x.Id) + case *TransactionSelector_Begin: + s := proto.Size(x.Begin) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +func init() { + proto.RegisterType((*TransactionOptions)(nil), "google.spanner.v1.TransactionOptions") + proto.RegisterType((*TransactionOptions_ReadWrite)(nil), "google.spanner.v1.TransactionOptions.ReadWrite") + proto.RegisterType((*TransactionOptions_PartitionedDml)(nil), "google.spanner.v1.TransactionOptions.PartitionedDml") + proto.RegisterType((*TransactionOptions_ReadOnly)(nil), "google.spanner.v1.TransactionOptions.ReadOnly") + proto.RegisterType((*Transaction)(nil), "google.spanner.v1.Transaction") + proto.RegisterType((*TransactionSelector)(nil), "google.spanner.v1.TransactionSelector") +} + +func init() { + proto.RegisterFile("google/spanner/v1/transaction.proto", fileDescriptor_transaction_949bbf6c9797af8e) +} + +var fileDescriptor_transaction_949bbf6c9797af8e = []byte{ + // 573 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x8c, 0x94, 0xdf, 0x6e, 0xd3, 0x3e, + 0x14, 0xc7, 0xd3, 0x6e, 0xab, 0xba, 0xd3, 0xae, 0xed, 0x3c, 0x4d, 0xbf, 0xfe, 0x22, 0x04, 0xa8, + 0x08, 0x89, 0xab, 0x44, 0x1d, 0x5c, 0x20, 0x21, 0x24, 0xe8, 0x2a, 0x88, 0x90, 0xd0, 0xaa, 0x74, + 0x0c, 0x09, 0x55, 0x0a, 0x6e, 0x63, 0x22, 0x4b, 0x89, 0x1d, 0xd9, 0xce, 0xe8, 0xee, 0x79, 0x09, + 0x5e, 0x81, 0x47, 0xe0, 0x9a, 0x2b, 0x9e, 0x0a, 0xc5, 0x71, 0xfa, 0x2f, 0x17, 0xeb, 0x5d, 0xec, + 0x73, 0xbe, 0xdf, 0xf3, 0xf1, 0x39, 0x76, 0xe0, 0x49, 0xc4, 0x79, 0x14, 0x13, 0x57, 0xa6, 0x98, + 0x31, 0x22, 0xdc, 0xdb, 0xa1, 0xab, 0x04, 0x66, 0x12, 0x2f, 0x14, 0xe5, 0xcc, 0x49, 0x05, 0x57, + 0x1c, 0x9d, 0x16, 0x49, 0x8e, 0x49, 0x72, 0x6e, 0x87, 0xf6, 0x03, 0xa3, 0xc3, 0x29, 0x75, 0x31, + 0x63, 0x5c, 0xe1, 0x3c, 0x5f, 0x16, 0x02, 0xfb, 0xa1, 0x89, 0xea, 0xd5, 0x3c, 0xfb, 0xe6, 0x86, + 0x99, 0xc0, 0x6b, 0x43, 0xfb, 0xd1, 0x6e, 0x5c, 0xd1, 0x84, 0x48, 0x85, 0x93, 0xb4, 0x48, 0x18, + 0xfc, 0x39, 0x02, 0x74, 0xbd, 0xe6, 0xb8, 0x4a, 0xb5, 0x3b, 0x9a, 0x00, 0x08, 0x82, 0xc3, 0xe0, + 0xbb, 0xa0, 0x8a, 0xf4, 0x6b, 0x8f, 0x6b, 0xcf, 0x5a, 0x17, 0xae, 0x53, 0xa1, 0x73, 0xaa, 0x52, + 0xc7, 0x27, 0x38, 0xfc, 0x9c, 0xcb, 0x3c, 0xcb, 0x3f, 0x16, 0xe5, 0x02, 0x05, 0xd0, 0x4d, 0xb1, + 0x50, 0x34, 0x4f, 0x22, 0x61, 0x10, 0x26, 0x71, 0xff, 0x40, 0xdb, 0xbe, 0xd8, 0xcf, 0x76, 0xb2, + 0x16, 0x8f, 0x93, 0xd8, 0xb3, 0xfc, 0x4e, 0xba, 0xb5, 0x83, 0x3e, 0x82, 0xae, 0x16, 0x70, 0x16, + 0xdf, 0xf5, 0xeb, 0xda, 0xda, 0xd9, 0x9f, 0xf8, 0x8a, 0xc5, 0x77, 0x9e, 0xe5, 0x37, 0x85, 0xf9, + 0xb6, 0x5b, 0x70, 0xbc, 0x3a, 0x89, 0xdd, 0x83, 0xce, 0x76, 0x7d, 0xfb, 0xc7, 0x01, 0x34, 0x4b, + 0x1d, 0xea, 0x43, 0x43, 0x2a, 0xc1, 0x59, 0xa4, 0x3b, 0xd5, 0xf4, 0x2c, 0xdf, 0xac, 0xd1, 0x07, + 0x40, 0x09, 0x65, 0x81, 0x06, 0x5b, 0xb5, 0xde, 0xd0, 0xd9, 0x25, 0x5d, 0x39, 0x1c, 0xe7, 0xba, + 0xcc, 0xf0, 0x2c, 0xbf, 0x97, 0x50, 0x96, 0x17, 0x58, 0xed, 0xa1, 0x37, 0x70, 0x92, 0xe0, 0x65, + 0x20, 0x15, 0x8e, 0x09, 0x23, 0x52, 0x9a, 0xfe, 0xfd, 0x5f, 0xb1, 0x19, 0x9b, 0x3b, 0xe0, 0x59, + 0x7e, 0x3b, 0xc1, 0xcb, 0x69, 0x29, 0x40, 0x97, 0xd0, 0xd9, 0x21, 0x39, 0xdc, 0x83, 0xe4, 0x44, + 0x6c, 0x61, 0x8c, 0xa1, 0x4b, 0x96, 0x78, 0xa1, 0x36, 0x40, 0x8e, 0xee, 0x07, 0xe9, 0x68, 0xcd, + 0x1a, 0xe5, 0x02, 0xce, 0x05, 0x51, 0x99, 0xa8, 0xf4, 0xa6, 0x91, 0x77, 0xd0, 0x3f, 0x2b, 0x82, + 0x5b, 0x0d, 0x18, 0x9d, 0x42, 0x77, 0x95, 0x17, 0xcc, 0x79, 0xc6, 0xc2, 0x51, 0x03, 0x0e, 0x13, + 0x1e, 0x92, 0xc1, 0x57, 0x68, 0x6d, 0x0c, 0x16, 0x75, 0xa0, 0x4e, 0x43, 0x3d, 0x8c, 0xb6, 0x5f, + 0xa7, 0x21, 0x7a, 0x5b, 0x39, 0xf8, 0xbd, 0x23, 0xd8, 0x39, 0xf6, 0xe0, 0x77, 0x0d, 0xce, 0x36, + 0x4a, 0x4c, 0x49, 0x4c, 0x16, 0x8a, 0x0b, 0xf4, 0x0e, 0x40, 0x52, 0x16, 0xc5, 0x24, 0xc8, 0x64, + 0xf9, 0x52, 0x9e, 0xee, 0x75, 0xef, 0xf2, 0xf7, 0x51, 0x48, 0x3f, 0x49, 0x82, 0x7a, 0x1a, 0x39, + 0xc7, 0x6a, 0x7b, 0x96, 0x86, 0x7e, 0x0d, 0x47, 0x73, 0x12, 0x51, 0x66, 0xe6, 0xbc, 0xb7, 0x69, + 0xa1, 0x1a, 0x01, 0x34, 0xa5, 0x81, 0x1c, 0xfd, 0xac, 0xc1, 0xf9, 0x82, 0x27, 0x55, 0x87, 0x51, + 0x6f, 0xc3, 0x62, 0x92, 0x37, 0x61, 0x52, 0xfb, 0xf2, 0xd2, 0xa4, 0x45, 0x3c, 0xc6, 0x2c, 0x72, + 0xb8, 0x88, 0xdc, 0x88, 0x30, 0xdd, 0x22, 0xb7, 0x08, 0xe1, 0x94, 0xca, 0x8d, 0x3f, 0xd9, 0x2b, + 0xf3, 0xf9, 0xab, 0xfe, 0xdf, 0xfb, 0x42, 0x7a, 0x19, 0xf3, 0x2c, 0x74, 0xa6, 0xa6, 0xce, 0xcd, + 0xf0, 0x6f, 0x19, 0x99, 0xe9, 0xc8, 0xcc, 0x44, 0x66, 0x37, 0xc3, 0x79, 0x43, 0x1b, 0x3f, 0xff, + 0x17, 0x00, 0x00, 0xff, 0xff, 0x81, 0xd7, 0x1c, 0x8e, 0x21, 0x05, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/spanner/v1/type.pb.go b/vendor/google.golang.org/genproto/googleapis/spanner/v1/type.pb.go new file mode 100644 index 0000000..674cfa9 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/spanner/v1/type.pb.go @@ -0,0 +1,301 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/spanner/v1/type.proto + +package spanner // import "google.golang.org/genproto/googleapis/spanner/v1" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// `TypeCode` is used as part of [Type][google.spanner.v1.Type] to +// indicate the type of a Cloud Spanner value. +// +// Each legal value of a type can be encoded to or decoded from a JSON +// value, using the encodings described below. All Cloud Spanner values can +// be `null`, regardless of type; `null`s are always encoded as a JSON +// `null`. +type TypeCode int32 + +const ( + // Not specified. + TypeCode_TYPE_CODE_UNSPECIFIED TypeCode = 0 + // Encoded as JSON `true` or `false`. + TypeCode_BOOL TypeCode = 1 + // Encoded as `string`, in decimal format. + TypeCode_INT64 TypeCode = 2 + // Encoded as `number`, or the strings `"NaN"`, `"Infinity"`, or + // `"-Infinity"`. + TypeCode_FLOAT64 TypeCode = 3 + // Encoded as `string` in RFC 3339 timestamp format. The time zone + // must be present, and must be `"Z"`. + // + // If the schema has the column option + // `allow_commit_timestamp=true`, the placeholder string + // `"spanner.commit_timestamp()"` can be used to instruct the system + // to insert the commit timestamp associated with the transaction + // commit. + TypeCode_TIMESTAMP TypeCode = 4 + // Encoded as `string` in RFC 3339 date format. + TypeCode_DATE TypeCode = 5 + // Encoded as `string`. + TypeCode_STRING TypeCode = 6 + // Encoded as a base64-encoded `string`, as described in RFC 4648, + // section 4. + TypeCode_BYTES TypeCode = 7 + // Encoded as `list`, where the list elements are represented + // according to + // [array_element_type][google.spanner.v1.Type.array_element_type]. + TypeCode_ARRAY TypeCode = 8 + // Encoded as `list`, where list element `i` is represented according + // to [struct_type.fields[i]][google.spanner.v1.StructType.fields]. + TypeCode_STRUCT TypeCode = 9 +) + +var TypeCode_name = map[int32]string{ + 0: "TYPE_CODE_UNSPECIFIED", + 1: "BOOL", + 2: "INT64", + 3: "FLOAT64", + 4: "TIMESTAMP", + 5: "DATE", + 6: "STRING", + 7: "BYTES", + 8: "ARRAY", + 9: "STRUCT", +} +var TypeCode_value = map[string]int32{ + "TYPE_CODE_UNSPECIFIED": 0, + "BOOL": 1, + "INT64": 2, + "FLOAT64": 3, + "TIMESTAMP": 4, + "DATE": 5, + "STRING": 6, + "BYTES": 7, + "ARRAY": 8, + "STRUCT": 9, +} + +func (x TypeCode) String() string { + return proto.EnumName(TypeCode_name, int32(x)) +} +func (TypeCode) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_type_aaff9ec10660d5c1, []int{0} +} + +// `Type` indicates the type of a Cloud Spanner value, as might be stored in a +// table cell or returned from an SQL query. +type Type struct { + // Required. The [TypeCode][google.spanner.v1.TypeCode] for this type. + Code TypeCode `protobuf:"varint,1,opt,name=code,proto3,enum=google.spanner.v1.TypeCode" json:"code,omitempty"` + // If [code][google.spanner.v1.Type.code] == + // [ARRAY][google.spanner.v1.TypeCode.ARRAY], then `array_element_type` is the + // type of the array elements. + ArrayElementType *Type `protobuf:"bytes,2,opt,name=array_element_type,json=arrayElementType,proto3" json:"array_element_type,omitempty"` + // If [code][google.spanner.v1.Type.code] == + // [STRUCT][google.spanner.v1.TypeCode.STRUCT], then `struct_type` provides + // type information for the struct's fields. + StructType *StructType `protobuf:"bytes,3,opt,name=struct_type,json=structType,proto3" json:"struct_type,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Type) Reset() { *m = Type{} } +func (m *Type) String() string { return proto.CompactTextString(m) } +func (*Type) ProtoMessage() {} +func (*Type) Descriptor() ([]byte, []int) { + return fileDescriptor_type_aaff9ec10660d5c1, []int{0} +} +func (m *Type) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Type.Unmarshal(m, b) +} +func (m *Type) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Type.Marshal(b, m, deterministic) +} +func (dst *Type) XXX_Merge(src proto.Message) { + xxx_messageInfo_Type.Merge(dst, src) +} +func (m *Type) XXX_Size() int { + return xxx_messageInfo_Type.Size(m) +} +func (m *Type) XXX_DiscardUnknown() { + xxx_messageInfo_Type.DiscardUnknown(m) +} + +var xxx_messageInfo_Type proto.InternalMessageInfo + +func (m *Type) GetCode() TypeCode { + if m != nil { + return m.Code + } + return TypeCode_TYPE_CODE_UNSPECIFIED +} + +func (m *Type) GetArrayElementType() *Type { + if m != nil { + return m.ArrayElementType + } + return nil +} + +func (m *Type) GetStructType() *StructType { + if m != nil { + return m.StructType + } + return nil +} + +// `StructType` defines the fields of a +// [STRUCT][google.spanner.v1.TypeCode.STRUCT] type. +type StructType struct { + // The list of fields that make up this struct. Order is + // significant, because values of this struct type are represented as + // lists, where the order of field values matches the order of + // fields in the [StructType][google.spanner.v1.StructType]. In turn, the + // order of fields matches the order of columns in a read request, or the + // order of fields in the `SELECT` clause of a query. + Fields []*StructType_Field `protobuf:"bytes,1,rep,name=fields,proto3" json:"fields,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *StructType) Reset() { *m = StructType{} } +func (m *StructType) String() string { return proto.CompactTextString(m) } +func (*StructType) ProtoMessage() {} +func (*StructType) Descriptor() ([]byte, []int) { + return fileDescriptor_type_aaff9ec10660d5c1, []int{1} +} +func (m *StructType) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_StructType.Unmarshal(m, b) +} +func (m *StructType) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_StructType.Marshal(b, m, deterministic) +} +func (dst *StructType) XXX_Merge(src proto.Message) { + xxx_messageInfo_StructType.Merge(dst, src) +} +func (m *StructType) XXX_Size() int { + return xxx_messageInfo_StructType.Size(m) +} +func (m *StructType) XXX_DiscardUnknown() { + xxx_messageInfo_StructType.DiscardUnknown(m) +} + +var xxx_messageInfo_StructType proto.InternalMessageInfo + +func (m *StructType) GetFields() []*StructType_Field { + if m != nil { + return m.Fields + } + return nil +} + +// Message representing a single field of a struct. +type StructType_Field struct { + // The name of the field. For reads, this is the column name. For + // SQL queries, it is the column alias (e.g., `"Word"` in the + // query `"SELECT 'hello' AS Word"`), or the column name (e.g., + // `"ColName"` in the query `"SELECT ColName FROM Table"`). Some + // columns might have an empty name (e.g., !"SELECT + // UPPER(ColName)"`). Note that a query result can contain + // multiple fields with the same name. + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // The type of the field. + Type *Type `protobuf:"bytes,2,opt,name=type,proto3" json:"type,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *StructType_Field) Reset() { *m = StructType_Field{} } +func (m *StructType_Field) String() string { return proto.CompactTextString(m) } +func (*StructType_Field) ProtoMessage() {} +func (*StructType_Field) Descriptor() ([]byte, []int) { + return fileDescriptor_type_aaff9ec10660d5c1, []int{1, 0} +} +func (m *StructType_Field) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_StructType_Field.Unmarshal(m, b) +} +func (m *StructType_Field) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_StructType_Field.Marshal(b, m, deterministic) +} +func (dst *StructType_Field) XXX_Merge(src proto.Message) { + xxx_messageInfo_StructType_Field.Merge(dst, src) +} +func (m *StructType_Field) XXX_Size() int { + return xxx_messageInfo_StructType_Field.Size(m) +} +func (m *StructType_Field) XXX_DiscardUnknown() { + xxx_messageInfo_StructType_Field.DiscardUnknown(m) +} + +var xxx_messageInfo_StructType_Field proto.InternalMessageInfo + +func (m *StructType_Field) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *StructType_Field) GetType() *Type { + if m != nil { + return m.Type + } + return nil +} + +func init() { + proto.RegisterType((*Type)(nil), "google.spanner.v1.Type") + proto.RegisterType((*StructType)(nil), "google.spanner.v1.StructType") + proto.RegisterType((*StructType_Field)(nil), "google.spanner.v1.StructType.Field") + proto.RegisterEnum("google.spanner.v1.TypeCode", TypeCode_name, TypeCode_value) +} + +func init() { proto.RegisterFile("google/spanner/v1/type.proto", fileDescriptor_type_aaff9ec10660d5c1) } + +var fileDescriptor_type_aaff9ec10660d5c1 = []byte{ + // 444 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x8c, 0x92, 0xd1, 0x8a, 0xd3, 0x40, + 0x14, 0x86, 0x9d, 0x6d, 0xda, 0x6d, 0x4e, 0x51, 0xc6, 0x81, 0x65, 0xeb, 0xaa, 0x50, 0xd6, 0x9b, + 0xa2, 0x90, 0xd0, 0x2a, 0x22, 0x2c, 0x08, 0x69, 0x3a, 0x5d, 0x03, 0xbb, 0x6d, 0x48, 0x66, 0x17, + 0x2a, 0x85, 0x32, 0xb6, 0x63, 0x28, 0xa4, 0x33, 0x21, 0xc9, 0x2e, 0xf4, 0x25, 0xbc, 0xd0, 0xb7, + 0xf0, 0x21, 0x7c, 0x00, 0x9f, 0x4a, 0x66, 0x92, 0xaa, 0xb0, 0x2a, 0xde, 0x9d, 0xe4, 0xfb, 0xbf, + 0x33, 0x67, 0x86, 0x03, 0x4f, 0x12, 0xa5, 0x92, 0x54, 0xb8, 0x45, 0xc6, 0xa5, 0x14, 0xb9, 0x7b, + 0x3b, 0x70, 0xcb, 0x5d, 0x26, 0x9c, 0x2c, 0x57, 0xa5, 0x22, 0x0f, 0x2b, 0xea, 0xd4, 0xd4, 0xb9, + 0x1d, 0x9c, 0xec, 0x05, 0x9e, 0x6d, 0x5c, 0x2e, 0xa5, 0x2a, 0x79, 0xb9, 0x51, 0xb2, 0xa8, 0x84, + 0xd3, 0x6f, 0x08, 0x2c, 0xb6, 0xcb, 0x04, 0x71, 0xc1, 0x5a, 0xa9, 0xb5, 0xe8, 0xa2, 0x1e, 0xea, + 0x3f, 0x18, 0x3e, 0x76, 0xee, 0x34, 0x72, 0x74, 0xcc, 0x57, 0x6b, 0x11, 0x99, 0x20, 0xa1, 0x40, + 0x78, 0x9e, 0xf3, 0xdd, 0x52, 0xa4, 0x62, 0x2b, 0x64, 0xb9, 0xd4, 0x63, 0x74, 0x0f, 0x7a, 0xa8, + 0xdf, 0x19, 0x1e, 0xff, 0x45, 0x8f, 0xb0, 0x51, 0x68, 0x65, 0x98, 0x73, 0xdf, 0x42, 0xa7, 0x28, + 0xf3, 0x9b, 0x55, 0xed, 0x37, 0x8c, 0xff, 0xf4, 0x0f, 0x7e, 0x6c, 0x52, 0xa6, 0x0b, 0x14, 0x3f, + 0xeb, 0xd3, 0x2f, 0x08, 0xe0, 0x17, 0x22, 0x67, 0xd0, 0xfa, 0xb8, 0x11, 0xe9, 0xba, 0xe8, 0xa2, + 0x5e, 0xa3, 0xdf, 0x19, 0x3e, 0xfb, 0x67, 0x27, 0x67, 0xa2, 0xb3, 0x51, 0xad, 0x9c, 0xbc, 0x83, + 0xa6, 0xf9, 0x41, 0x08, 0x58, 0x92, 0x6f, 0xab, 0xc7, 0xb0, 0x23, 0x53, 0x93, 0x17, 0x60, 0xfd, + 0xcf, 0x0d, 0x4d, 0xe8, 0xf9, 0x27, 0x04, 0xed, 0xfd, 0x7b, 0x91, 0x47, 0x70, 0xc4, 0xe6, 0x21, + 0x5d, 0xfa, 0xb3, 0x31, 0x5d, 0x5e, 0x4d, 0xe3, 0x90, 0xfa, 0xc1, 0x24, 0xa0, 0x63, 0x7c, 0x8f, + 0xb4, 0xc1, 0x1a, 0xcd, 0x66, 0x17, 0x18, 0x11, 0x1b, 0x9a, 0xc1, 0x94, 0xbd, 0x7e, 0x85, 0x0f, + 0x48, 0x07, 0x0e, 0x27, 0x17, 0x33, 0x4f, 0x7f, 0x34, 0xc8, 0x7d, 0xb0, 0x59, 0x70, 0x49, 0x63, + 0xe6, 0x5d, 0x86, 0xd8, 0xd2, 0xc2, 0xd8, 0x63, 0x14, 0x37, 0x09, 0x40, 0x2b, 0x66, 0x51, 0x30, + 0x3d, 0xc7, 0x2d, 0x2d, 0x8f, 0xe6, 0x8c, 0xc6, 0xf8, 0x50, 0x97, 0x5e, 0x14, 0x79, 0x73, 0xdc, + 0xae, 0x13, 0x57, 0x3e, 0xc3, 0xf6, 0xe8, 0x33, 0x82, 0xa3, 0x95, 0xda, 0xde, 0x9d, 0x7a, 0x64, + 0xeb, 0x39, 0x43, 0xbd, 0x0c, 0x21, 0x7a, 0xff, 0xa6, 0xe6, 0x89, 0x4a, 0xb9, 0x4c, 0x1c, 0x95, + 0x27, 0x6e, 0x22, 0xa4, 0x59, 0x15, 0xb7, 0x42, 0x3c, 0xdb, 0x14, 0xbf, 0x2d, 0xdf, 0x59, 0x5d, + 0x7e, 0x3d, 0x38, 0x3e, 0xaf, 0x54, 0x3f, 0x55, 0x37, 0x6b, 0x27, 0xae, 0x0f, 0xb8, 0x1e, 0x7c, + 0xdf, 0x93, 0x85, 0x21, 0x8b, 0x9a, 0x2c, 0xae, 0x07, 0x1f, 0x5a, 0xa6, 0xf1, 0xcb, 0x1f, 0x01, + 0x00, 0x00, 0xff, 0xff, 0x55, 0xc4, 0x6e, 0xd4, 0xd4, 0x02, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/storagetransfer/v1/transfer.pb.go b/vendor/google.golang.org/genproto/googleapis/storagetransfer/v1/transfer.pb.go new file mode 100644 index 0000000..8d96348 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/storagetransfer/v1/transfer.pb.go @@ -0,0 +1,807 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/storagetransfer/v1/transfer.proto + +package storagetransfer // import "google.golang.org/genproto/googleapis/storagetransfer/v1" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import empty "github.com/golang/protobuf/ptypes/empty" +import _ "google.golang.org/genproto/googleapis/api/annotations" +import field_mask "google.golang.org/genproto/protobuf/field_mask" + +import ( + context "golang.org/x/net/context" + grpc "google.golang.org/grpc" +) + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Request passed to GetGoogleServiceAccount. +type GetGoogleServiceAccountRequest struct { + // The ID of the Google Cloud Platform Console project that the Google service + // account is associated with. + // Required. + ProjectId string `protobuf:"bytes,1,opt,name=project_id,json=projectId,proto3" json:"project_id,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GetGoogleServiceAccountRequest) Reset() { *m = GetGoogleServiceAccountRequest{} } +func (m *GetGoogleServiceAccountRequest) String() string { return proto.CompactTextString(m) } +func (*GetGoogleServiceAccountRequest) ProtoMessage() {} +func (*GetGoogleServiceAccountRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_transfer_2bfd38eb67d3a440, []int{0} +} +func (m *GetGoogleServiceAccountRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GetGoogleServiceAccountRequest.Unmarshal(m, b) +} +func (m *GetGoogleServiceAccountRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GetGoogleServiceAccountRequest.Marshal(b, m, deterministic) +} +func (dst *GetGoogleServiceAccountRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_GetGoogleServiceAccountRequest.Merge(dst, src) +} +func (m *GetGoogleServiceAccountRequest) XXX_Size() int { + return xxx_messageInfo_GetGoogleServiceAccountRequest.Size(m) +} +func (m *GetGoogleServiceAccountRequest) XXX_DiscardUnknown() { + xxx_messageInfo_GetGoogleServiceAccountRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_GetGoogleServiceAccountRequest proto.InternalMessageInfo + +func (m *GetGoogleServiceAccountRequest) GetProjectId() string { + if m != nil { + return m.ProjectId + } + return "" +} + +// Request passed to CreateTransferJob. +type CreateTransferJobRequest struct { + // The job to create. + // Required. + TransferJob *TransferJob `protobuf:"bytes,1,opt,name=transfer_job,json=transferJob,proto3" json:"transfer_job,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *CreateTransferJobRequest) Reset() { *m = CreateTransferJobRequest{} } +func (m *CreateTransferJobRequest) String() string { return proto.CompactTextString(m) } +func (*CreateTransferJobRequest) ProtoMessage() {} +func (*CreateTransferJobRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_transfer_2bfd38eb67d3a440, []int{1} +} +func (m *CreateTransferJobRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_CreateTransferJobRequest.Unmarshal(m, b) +} +func (m *CreateTransferJobRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_CreateTransferJobRequest.Marshal(b, m, deterministic) +} +func (dst *CreateTransferJobRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_CreateTransferJobRequest.Merge(dst, src) +} +func (m *CreateTransferJobRequest) XXX_Size() int { + return xxx_messageInfo_CreateTransferJobRequest.Size(m) +} +func (m *CreateTransferJobRequest) XXX_DiscardUnknown() { + xxx_messageInfo_CreateTransferJobRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_CreateTransferJobRequest proto.InternalMessageInfo + +func (m *CreateTransferJobRequest) GetTransferJob() *TransferJob { + if m != nil { + return m.TransferJob + } + return nil +} + +// Request passed to UpdateTransferJob. +type UpdateTransferJobRequest struct { + // The name of job to update. + // Required. + JobName string `protobuf:"bytes,1,opt,name=job_name,json=jobName,proto3" json:"job_name,omitempty"` + // The ID of the Google Cloud Platform Console project that owns the job. + // Required. + ProjectId string `protobuf:"bytes,2,opt,name=project_id,json=projectId,proto3" json:"project_id,omitempty"` + // The job to update. `transferJob` is expected to specify only three fields: + // `description`, `transferSpec`, and `status`. An UpdateTransferJobRequest + // that specifies other fields will be rejected with an error + // `INVALID_ARGUMENT`. + // Required. + TransferJob *TransferJob `protobuf:"bytes,3,opt,name=transfer_job,json=transferJob,proto3" json:"transfer_job,omitempty"` + // The field mask of the fields in `transferJob` that are to be updated in + // this request. Fields in `transferJob` that can be updated are: + // `description`, `transferSpec`, and `status`. To update the `transferSpec` + // of the job, a complete transfer specification has to be provided. An + // incomplete specification which misses any required fields will be rejected + // with the error `INVALID_ARGUMENT`. + UpdateTransferJobFieldMask *field_mask.FieldMask `protobuf:"bytes,4,opt,name=update_transfer_job_field_mask,json=updateTransferJobFieldMask,proto3" json:"update_transfer_job_field_mask,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *UpdateTransferJobRequest) Reset() { *m = UpdateTransferJobRequest{} } +func (m *UpdateTransferJobRequest) String() string { return proto.CompactTextString(m) } +func (*UpdateTransferJobRequest) ProtoMessage() {} +func (*UpdateTransferJobRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_transfer_2bfd38eb67d3a440, []int{2} +} +func (m *UpdateTransferJobRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_UpdateTransferJobRequest.Unmarshal(m, b) +} +func (m *UpdateTransferJobRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_UpdateTransferJobRequest.Marshal(b, m, deterministic) +} +func (dst *UpdateTransferJobRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_UpdateTransferJobRequest.Merge(dst, src) +} +func (m *UpdateTransferJobRequest) XXX_Size() int { + return xxx_messageInfo_UpdateTransferJobRequest.Size(m) +} +func (m *UpdateTransferJobRequest) XXX_DiscardUnknown() { + xxx_messageInfo_UpdateTransferJobRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_UpdateTransferJobRequest proto.InternalMessageInfo + +func (m *UpdateTransferJobRequest) GetJobName() string { + if m != nil { + return m.JobName + } + return "" +} + +func (m *UpdateTransferJobRequest) GetProjectId() string { + if m != nil { + return m.ProjectId + } + return "" +} + +func (m *UpdateTransferJobRequest) GetTransferJob() *TransferJob { + if m != nil { + return m.TransferJob + } + return nil +} + +func (m *UpdateTransferJobRequest) GetUpdateTransferJobFieldMask() *field_mask.FieldMask { + if m != nil { + return m.UpdateTransferJobFieldMask + } + return nil +} + +// Request passed to GetTransferJob. +type GetTransferJobRequest struct { + // The job to get. + // Required. + JobName string `protobuf:"bytes,1,opt,name=job_name,json=jobName,proto3" json:"job_name,omitempty"` + // The ID of the Google Cloud Platform Console project that owns the job. + // Required. + ProjectId string `protobuf:"bytes,2,opt,name=project_id,json=projectId,proto3" json:"project_id,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GetTransferJobRequest) Reset() { *m = GetTransferJobRequest{} } +func (m *GetTransferJobRequest) String() string { return proto.CompactTextString(m) } +func (*GetTransferJobRequest) ProtoMessage() {} +func (*GetTransferJobRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_transfer_2bfd38eb67d3a440, []int{3} +} +func (m *GetTransferJobRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GetTransferJobRequest.Unmarshal(m, b) +} +func (m *GetTransferJobRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GetTransferJobRequest.Marshal(b, m, deterministic) +} +func (dst *GetTransferJobRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_GetTransferJobRequest.Merge(dst, src) +} +func (m *GetTransferJobRequest) XXX_Size() int { + return xxx_messageInfo_GetTransferJobRequest.Size(m) +} +func (m *GetTransferJobRequest) XXX_DiscardUnknown() { + xxx_messageInfo_GetTransferJobRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_GetTransferJobRequest proto.InternalMessageInfo + +func (m *GetTransferJobRequest) GetJobName() string { + if m != nil { + return m.JobName + } + return "" +} + +func (m *GetTransferJobRequest) GetProjectId() string { + if m != nil { + return m.ProjectId + } + return "" +} + +// `project_id`, `job_names`, and `job_statuses` are query parameters that can +// be specified when listing transfer jobs. +type ListTransferJobsRequest struct { + // A list of query parameters specified as JSON text in the form of + // {"project_id":"my_project_id", + // "job_names":["jobid1","jobid2",...], + // "job_statuses":["status1","status2",...]}. + // Since `job_names` and `job_statuses` support multiple values, their values + // must be specified with array notation. `project_id` is required. + // `job_names` and `job_statuses` are optional. The valid values for + // `job_statuses` are case-insensitive: `ENABLED`, `DISABLED`, and `DELETED`. + Filter string `protobuf:"bytes,1,opt,name=filter,proto3" json:"filter,omitempty"` + // The list page size. The max allowed value is 256. + PageSize int32 `protobuf:"varint,4,opt,name=page_size,json=pageSize,proto3" json:"page_size,omitempty"` + // The list page token. + PageToken string `protobuf:"bytes,5,opt,name=page_token,json=pageToken,proto3" json:"page_token,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ListTransferJobsRequest) Reset() { *m = ListTransferJobsRequest{} } +func (m *ListTransferJobsRequest) String() string { return proto.CompactTextString(m) } +func (*ListTransferJobsRequest) ProtoMessage() {} +func (*ListTransferJobsRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_transfer_2bfd38eb67d3a440, []int{4} +} +func (m *ListTransferJobsRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ListTransferJobsRequest.Unmarshal(m, b) +} +func (m *ListTransferJobsRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ListTransferJobsRequest.Marshal(b, m, deterministic) +} +func (dst *ListTransferJobsRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_ListTransferJobsRequest.Merge(dst, src) +} +func (m *ListTransferJobsRequest) XXX_Size() int { + return xxx_messageInfo_ListTransferJobsRequest.Size(m) +} +func (m *ListTransferJobsRequest) XXX_DiscardUnknown() { + xxx_messageInfo_ListTransferJobsRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_ListTransferJobsRequest proto.InternalMessageInfo + +func (m *ListTransferJobsRequest) GetFilter() string { + if m != nil { + return m.Filter + } + return "" +} + +func (m *ListTransferJobsRequest) GetPageSize() int32 { + if m != nil { + return m.PageSize + } + return 0 +} + +func (m *ListTransferJobsRequest) GetPageToken() string { + if m != nil { + return m.PageToken + } + return "" +} + +// Response from ListTransferJobs. +type ListTransferJobsResponse struct { + // A list of transfer jobs. + TransferJobs []*TransferJob `protobuf:"bytes,1,rep,name=transfer_jobs,json=transferJobs,proto3" json:"transfer_jobs,omitempty"` + // The list next page token. + NextPageToken string `protobuf:"bytes,2,opt,name=next_page_token,json=nextPageToken,proto3" json:"next_page_token,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ListTransferJobsResponse) Reset() { *m = ListTransferJobsResponse{} } +func (m *ListTransferJobsResponse) String() string { return proto.CompactTextString(m) } +func (*ListTransferJobsResponse) ProtoMessage() {} +func (*ListTransferJobsResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_transfer_2bfd38eb67d3a440, []int{5} +} +func (m *ListTransferJobsResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ListTransferJobsResponse.Unmarshal(m, b) +} +func (m *ListTransferJobsResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ListTransferJobsResponse.Marshal(b, m, deterministic) +} +func (dst *ListTransferJobsResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_ListTransferJobsResponse.Merge(dst, src) +} +func (m *ListTransferJobsResponse) XXX_Size() int { + return xxx_messageInfo_ListTransferJobsResponse.Size(m) +} +func (m *ListTransferJobsResponse) XXX_DiscardUnknown() { + xxx_messageInfo_ListTransferJobsResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_ListTransferJobsResponse proto.InternalMessageInfo + +func (m *ListTransferJobsResponse) GetTransferJobs() []*TransferJob { + if m != nil { + return m.TransferJobs + } + return nil +} + +func (m *ListTransferJobsResponse) GetNextPageToken() string { + if m != nil { + return m.NextPageToken + } + return "" +} + +// Request passed to PauseTransferOperation. +type PauseTransferOperationRequest struct { + // The name of the transfer operation. + // Required. + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *PauseTransferOperationRequest) Reset() { *m = PauseTransferOperationRequest{} } +func (m *PauseTransferOperationRequest) String() string { return proto.CompactTextString(m) } +func (*PauseTransferOperationRequest) ProtoMessage() {} +func (*PauseTransferOperationRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_transfer_2bfd38eb67d3a440, []int{6} +} +func (m *PauseTransferOperationRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_PauseTransferOperationRequest.Unmarshal(m, b) +} +func (m *PauseTransferOperationRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_PauseTransferOperationRequest.Marshal(b, m, deterministic) +} +func (dst *PauseTransferOperationRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_PauseTransferOperationRequest.Merge(dst, src) +} +func (m *PauseTransferOperationRequest) XXX_Size() int { + return xxx_messageInfo_PauseTransferOperationRequest.Size(m) +} +func (m *PauseTransferOperationRequest) XXX_DiscardUnknown() { + xxx_messageInfo_PauseTransferOperationRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_PauseTransferOperationRequest proto.InternalMessageInfo + +func (m *PauseTransferOperationRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +// Request passed to ResumeTransferOperation. +type ResumeTransferOperationRequest struct { + // The name of the transfer operation. + // Required. + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ResumeTransferOperationRequest) Reset() { *m = ResumeTransferOperationRequest{} } +func (m *ResumeTransferOperationRequest) String() string { return proto.CompactTextString(m) } +func (*ResumeTransferOperationRequest) ProtoMessage() {} +func (*ResumeTransferOperationRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_transfer_2bfd38eb67d3a440, []int{7} +} +func (m *ResumeTransferOperationRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ResumeTransferOperationRequest.Unmarshal(m, b) +} +func (m *ResumeTransferOperationRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ResumeTransferOperationRequest.Marshal(b, m, deterministic) +} +func (dst *ResumeTransferOperationRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_ResumeTransferOperationRequest.Merge(dst, src) +} +func (m *ResumeTransferOperationRequest) XXX_Size() int { + return xxx_messageInfo_ResumeTransferOperationRequest.Size(m) +} +func (m *ResumeTransferOperationRequest) XXX_DiscardUnknown() { + xxx_messageInfo_ResumeTransferOperationRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_ResumeTransferOperationRequest proto.InternalMessageInfo + +func (m *ResumeTransferOperationRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func init() { + proto.RegisterType((*GetGoogleServiceAccountRequest)(nil), "google.storagetransfer.v1.GetGoogleServiceAccountRequest") + proto.RegisterType((*CreateTransferJobRequest)(nil), "google.storagetransfer.v1.CreateTransferJobRequest") + proto.RegisterType((*UpdateTransferJobRequest)(nil), "google.storagetransfer.v1.UpdateTransferJobRequest") + proto.RegisterType((*GetTransferJobRequest)(nil), "google.storagetransfer.v1.GetTransferJobRequest") + proto.RegisterType((*ListTransferJobsRequest)(nil), "google.storagetransfer.v1.ListTransferJobsRequest") + proto.RegisterType((*ListTransferJobsResponse)(nil), "google.storagetransfer.v1.ListTransferJobsResponse") + proto.RegisterType((*PauseTransferOperationRequest)(nil), "google.storagetransfer.v1.PauseTransferOperationRequest") + proto.RegisterType((*ResumeTransferOperationRequest)(nil), "google.storagetransfer.v1.ResumeTransferOperationRequest") +} + +// Reference imports to suppress errors if they are not otherwise used. +var _ context.Context +var _ grpc.ClientConn + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the grpc package it is being compiled against. +const _ = grpc.SupportPackageIsVersion4 + +// StorageTransferServiceClient is the client API for StorageTransferService service. +// +// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://godoc.org/google.golang.org/grpc#ClientConn.NewStream. +type StorageTransferServiceClient interface { + // Returns the Google service account that is used by Storage Transfer + // Service to access buckets in the project where transfers + // run or in other projects. Each Google service account is associated + // with one Google Cloud Platform Console project. Users + // should add this service account to the Google Cloud Storage bucket + // ACLs to grant access to Storage Transfer Service. This service + // account is created and owned by Storage Transfer Service and can + // only be used by Storage Transfer Service. + GetGoogleServiceAccount(ctx context.Context, in *GetGoogleServiceAccountRequest, opts ...grpc.CallOption) (*GoogleServiceAccount, error) + // Creates a transfer job that runs periodically. + CreateTransferJob(ctx context.Context, in *CreateTransferJobRequest, opts ...grpc.CallOption) (*TransferJob, error) + // Updates a transfer job. Updating a job's transfer spec does not affect + // transfer operations that are running already. Updating the scheduling + // of a job is not allowed. + UpdateTransferJob(ctx context.Context, in *UpdateTransferJobRequest, opts ...grpc.CallOption) (*TransferJob, error) + // Gets a transfer job. + GetTransferJob(ctx context.Context, in *GetTransferJobRequest, opts ...grpc.CallOption) (*TransferJob, error) + // Lists transfer jobs. + ListTransferJobs(ctx context.Context, in *ListTransferJobsRequest, opts ...grpc.CallOption) (*ListTransferJobsResponse, error) + // Pauses a transfer operation. + PauseTransferOperation(ctx context.Context, in *PauseTransferOperationRequest, opts ...grpc.CallOption) (*empty.Empty, error) + // Resumes a transfer operation that is paused. + ResumeTransferOperation(ctx context.Context, in *ResumeTransferOperationRequest, opts ...grpc.CallOption) (*empty.Empty, error) +} + +type storageTransferServiceClient struct { + cc *grpc.ClientConn +} + +func NewStorageTransferServiceClient(cc *grpc.ClientConn) StorageTransferServiceClient { + return &storageTransferServiceClient{cc} +} + +func (c *storageTransferServiceClient) GetGoogleServiceAccount(ctx context.Context, in *GetGoogleServiceAccountRequest, opts ...grpc.CallOption) (*GoogleServiceAccount, error) { + out := new(GoogleServiceAccount) + err := c.cc.Invoke(ctx, "/google.storagetransfer.v1.StorageTransferService/GetGoogleServiceAccount", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *storageTransferServiceClient) CreateTransferJob(ctx context.Context, in *CreateTransferJobRequest, opts ...grpc.CallOption) (*TransferJob, error) { + out := new(TransferJob) + err := c.cc.Invoke(ctx, "/google.storagetransfer.v1.StorageTransferService/CreateTransferJob", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *storageTransferServiceClient) UpdateTransferJob(ctx context.Context, in *UpdateTransferJobRequest, opts ...grpc.CallOption) (*TransferJob, error) { + out := new(TransferJob) + err := c.cc.Invoke(ctx, "/google.storagetransfer.v1.StorageTransferService/UpdateTransferJob", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *storageTransferServiceClient) GetTransferJob(ctx context.Context, in *GetTransferJobRequest, opts ...grpc.CallOption) (*TransferJob, error) { + out := new(TransferJob) + err := c.cc.Invoke(ctx, "/google.storagetransfer.v1.StorageTransferService/GetTransferJob", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *storageTransferServiceClient) ListTransferJobs(ctx context.Context, in *ListTransferJobsRequest, opts ...grpc.CallOption) (*ListTransferJobsResponse, error) { + out := new(ListTransferJobsResponse) + err := c.cc.Invoke(ctx, "/google.storagetransfer.v1.StorageTransferService/ListTransferJobs", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *storageTransferServiceClient) PauseTransferOperation(ctx context.Context, in *PauseTransferOperationRequest, opts ...grpc.CallOption) (*empty.Empty, error) { + out := new(empty.Empty) + err := c.cc.Invoke(ctx, "/google.storagetransfer.v1.StorageTransferService/PauseTransferOperation", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *storageTransferServiceClient) ResumeTransferOperation(ctx context.Context, in *ResumeTransferOperationRequest, opts ...grpc.CallOption) (*empty.Empty, error) { + out := new(empty.Empty) + err := c.cc.Invoke(ctx, "/google.storagetransfer.v1.StorageTransferService/ResumeTransferOperation", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +// StorageTransferServiceServer is the server API for StorageTransferService service. +type StorageTransferServiceServer interface { + // Returns the Google service account that is used by Storage Transfer + // Service to access buckets in the project where transfers + // run or in other projects. Each Google service account is associated + // with one Google Cloud Platform Console project. Users + // should add this service account to the Google Cloud Storage bucket + // ACLs to grant access to Storage Transfer Service. This service + // account is created and owned by Storage Transfer Service and can + // only be used by Storage Transfer Service. + GetGoogleServiceAccount(context.Context, *GetGoogleServiceAccountRequest) (*GoogleServiceAccount, error) + // Creates a transfer job that runs periodically. + CreateTransferJob(context.Context, *CreateTransferJobRequest) (*TransferJob, error) + // Updates a transfer job. Updating a job's transfer spec does not affect + // transfer operations that are running already. Updating the scheduling + // of a job is not allowed. + UpdateTransferJob(context.Context, *UpdateTransferJobRequest) (*TransferJob, error) + // Gets a transfer job. + GetTransferJob(context.Context, *GetTransferJobRequest) (*TransferJob, error) + // Lists transfer jobs. + ListTransferJobs(context.Context, *ListTransferJobsRequest) (*ListTransferJobsResponse, error) + // Pauses a transfer operation. + PauseTransferOperation(context.Context, *PauseTransferOperationRequest) (*empty.Empty, error) + // Resumes a transfer operation that is paused. + ResumeTransferOperation(context.Context, *ResumeTransferOperationRequest) (*empty.Empty, error) +} + +func RegisterStorageTransferServiceServer(s *grpc.Server, srv StorageTransferServiceServer) { + s.RegisterService(&_StorageTransferService_serviceDesc, srv) +} + +func _StorageTransferService_GetGoogleServiceAccount_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(GetGoogleServiceAccountRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(StorageTransferServiceServer).GetGoogleServiceAccount(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.storagetransfer.v1.StorageTransferService/GetGoogleServiceAccount", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(StorageTransferServiceServer).GetGoogleServiceAccount(ctx, req.(*GetGoogleServiceAccountRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _StorageTransferService_CreateTransferJob_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(CreateTransferJobRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(StorageTransferServiceServer).CreateTransferJob(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.storagetransfer.v1.StorageTransferService/CreateTransferJob", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(StorageTransferServiceServer).CreateTransferJob(ctx, req.(*CreateTransferJobRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _StorageTransferService_UpdateTransferJob_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(UpdateTransferJobRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(StorageTransferServiceServer).UpdateTransferJob(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.storagetransfer.v1.StorageTransferService/UpdateTransferJob", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(StorageTransferServiceServer).UpdateTransferJob(ctx, req.(*UpdateTransferJobRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _StorageTransferService_GetTransferJob_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(GetTransferJobRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(StorageTransferServiceServer).GetTransferJob(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.storagetransfer.v1.StorageTransferService/GetTransferJob", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(StorageTransferServiceServer).GetTransferJob(ctx, req.(*GetTransferJobRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _StorageTransferService_ListTransferJobs_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(ListTransferJobsRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(StorageTransferServiceServer).ListTransferJobs(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.storagetransfer.v1.StorageTransferService/ListTransferJobs", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(StorageTransferServiceServer).ListTransferJobs(ctx, req.(*ListTransferJobsRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _StorageTransferService_PauseTransferOperation_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(PauseTransferOperationRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(StorageTransferServiceServer).PauseTransferOperation(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.storagetransfer.v1.StorageTransferService/PauseTransferOperation", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(StorageTransferServiceServer).PauseTransferOperation(ctx, req.(*PauseTransferOperationRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _StorageTransferService_ResumeTransferOperation_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(ResumeTransferOperationRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(StorageTransferServiceServer).ResumeTransferOperation(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.storagetransfer.v1.StorageTransferService/ResumeTransferOperation", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(StorageTransferServiceServer).ResumeTransferOperation(ctx, req.(*ResumeTransferOperationRequest)) + } + return interceptor(ctx, in, info, handler) +} + +var _StorageTransferService_serviceDesc = grpc.ServiceDesc{ + ServiceName: "google.storagetransfer.v1.StorageTransferService", + HandlerType: (*StorageTransferServiceServer)(nil), + Methods: []grpc.MethodDesc{ + { + MethodName: "GetGoogleServiceAccount", + Handler: _StorageTransferService_GetGoogleServiceAccount_Handler, + }, + { + MethodName: "CreateTransferJob", + Handler: _StorageTransferService_CreateTransferJob_Handler, + }, + { + MethodName: "UpdateTransferJob", + Handler: _StorageTransferService_UpdateTransferJob_Handler, + }, + { + MethodName: "GetTransferJob", + Handler: _StorageTransferService_GetTransferJob_Handler, + }, + { + MethodName: "ListTransferJobs", + Handler: _StorageTransferService_ListTransferJobs_Handler, + }, + { + MethodName: "PauseTransferOperation", + Handler: _StorageTransferService_PauseTransferOperation_Handler, + }, + { + MethodName: "ResumeTransferOperation", + Handler: _StorageTransferService_ResumeTransferOperation_Handler, + }, + }, + Streams: []grpc.StreamDesc{}, + Metadata: "google/storagetransfer/v1/transfer.proto", +} + +func init() { + proto.RegisterFile("google/storagetransfer/v1/transfer.proto", fileDescriptor_transfer_2bfd38eb67d3a440) +} + +var fileDescriptor_transfer_2bfd38eb67d3a440 = []byte{ + // 786 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xac, 0x56, 0xdf, 0x4e, 0x13, 0x4f, + 0x14, 0xce, 0xf0, 0xef, 0x07, 0x03, 0xfc, 0x84, 0x49, 0x2c, 0x4b, 0x91, 0xda, 0x2c, 0x49, 0xc5, + 0x6a, 0x76, 0x6d, 0xeb, 0x85, 0x62, 0x8c, 0x11, 0xa2, 0x88, 0x7f, 0xb1, 0x20, 0x17, 0x86, 0xb8, + 0x99, 0xb6, 0xd3, 0xcd, 0x96, 0x76, 0x67, 0xdd, 0x99, 0x25, 0x02, 0xe1, 0xc6, 0x17, 0x30, 0xd1, + 0x98, 0x98, 0x18, 0x13, 0xaf, 0xbd, 0xf7, 0x11, 0xbc, 0xf1, 0xd6, 0x57, 0xf0, 0x21, 0xf4, 0x4a, + 0xb3, 0xb3, 0xb3, 0x65, 0xd9, 0xb6, 0x0b, 0xa8, 0x77, 0x3b, 0x33, 0xe7, 0x9c, 0xef, 0x3b, 0xe7, + 0xf0, 0x7d, 0x14, 0xce, 0x9b, 0x94, 0x9a, 0x4d, 0xa2, 0x33, 0x4e, 0x5d, 0x6c, 0x12, 0xee, 0x62, + 0x9b, 0xd5, 0x89, 0xab, 0x6f, 0x17, 0xf4, 0xf0, 0x5b, 0x73, 0x5c, 0xca, 0x29, 0x9a, 0x0e, 0x22, + 0xb5, 0x58, 0xa4, 0xb6, 0x5d, 0x48, 0x9f, 0x91, 0x45, 0xb0, 0x63, 0xe9, 0xd8, 0xb6, 0x29, 0xc7, + 0xdc, 0xa2, 0x36, 0x0b, 0x12, 0xd3, 0x33, 0xf2, 0x55, 0x9c, 0x2a, 0x5e, 0x5d, 0x27, 0x2d, 0x87, + 0xef, 0xc8, 0xc7, 0x6c, 0xfc, 0xb1, 0x6e, 0x91, 0x66, 0xcd, 0x68, 0x61, 0xb6, 0x25, 0x23, 0xb4, + 0xa3, 0x19, 0x1a, 0x7c, 0xc7, 0x21, 0x12, 0x4e, 0xbd, 0x01, 0x33, 0xcb, 0x84, 0x2f, 0x8b, 0xa4, + 0x35, 0xe2, 0x6e, 0x5b, 0x55, 0x72, 0xb3, 0x5a, 0xa5, 0x9e, 0xcd, 0xcb, 0xe4, 0xb9, 0x47, 0x18, + 0x47, 0xb3, 0x10, 0x3a, 0x2e, 0x6d, 0x90, 0x2a, 0x37, 0xac, 0x9a, 0x02, 0xb2, 0x60, 0x7e, 0xa4, + 0x3c, 0x22, 0x6f, 0x56, 0x6a, 0x2a, 0x81, 0xca, 0x92, 0x4b, 0x30, 0x27, 0xeb, 0xb2, 0xfc, 0x5d, + 0x5a, 0x09, 0x53, 0x57, 0xe0, 0x58, 0x1b, 0xb4, 0x41, 0x2b, 0x22, 0x79, 0xb4, 0x98, 0xd3, 0x7a, + 0xce, 0x46, 0x8b, 0x16, 0x19, 0xe5, 0x07, 0x07, 0xf5, 0x17, 0x80, 0xca, 0x13, 0xa7, 0xd6, 0x1d, + 0x67, 0x1a, 0x0e, 0x37, 0x68, 0xc5, 0xb0, 0x71, 0x8b, 0x48, 0x82, 0xff, 0x35, 0x68, 0xe5, 0x21, + 0x6e, 0x91, 0x18, 0xfb, 0xbe, 0x18, 0xfb, 0x0e, 0x86, 0xfd, 0x7f, 0xcc, 0x10, 0x3d, 0x83, 0x19, + 0x4f, 0x10, 0x34, 0xa2, 0x15, 0x8d, 0x83, 0x0d, 0x29, 0x03, 0xa2, 0x78, 0xb8, 0x22, 0x2d, 0x5c, + 0xa2, 0x76, 0xdb, 0x0f, 0x79, 0x80, 0xd9, 0x56, 0x39, 0xed, 0xc5, 0x5b, 0x6c, 0xbf, 0xa9, 0x8f, + 0xe1, 0xe9, 0x65, 0xc2, 0xff, 0x65, 0xf7, 0x6a, 0x0b, 0x4e, 0xdd, 0xb7, 0x58, 0xb4, 0x26, 0x0b, + 0x8b, 0xa6, 0xe0, 0x50, 0xdd, 0x6a, 0x72, 0xe2, 0xca, 0x92, 0xf2, 0x84, 0x66, 0xe0, 0x88, 0x83, + 0x4d, 0x62, 0x30, 0x6b, 0x97, 0x88, 0x86, 0x06, 0xcb, 0xc3, 0xfe, 0xc5, 0x9a, 0xb5, 0x1b, 0xc0, + 0xf9, 0x8f, 0x9c, 0x6e, 0x11, 0x5b, 0x19, 0x94, 0x70, 0xd8, 0x24, 0xeb, 0xfe, 0x85, 0xfa, 0x0a, + 0x40, 0xa5, 0x13, 0x8f, 0x39, 0xd4, 0x66, 0x04, 0xdd, 0x83, 0xe3, 0xd1, 0xb9, 0x31, 0x05, 0x64, + 0xfb, 0x4f, 0xb0, 0x8a, 0xb1, 0xc8, 0x2a, 0x18, 0xca, 0xc1, 0x53, 0x36, 0x79, 0xc1, 0x8d, 0x08, + 0x9b, 0xa0, 0xf9, 0x71, 0xff, 0x7a, 0xb5, 0xcd, 0xa8, 0x04, 0x67, 0x57, 0xb1, 0xc7, 0xda, 0x03, + 0x7f, 0xe4, 0x10, 0x57, 0xa8, 0x31, 0x1c, 0x03, 0x82, 0x03, 0x91, 0xb9, 0x8a, 0x6f, 0xf5, 0x32, + 0xcc, 0x94, 0x09, 0xf3, 0x5a, 0x27, 0xca, 0x2a, 0xfe, 0x1c, 0x86, 0xa9, 0xb5, 0xa0, 0x87, 0x30, + 0x4f, 0xea, 0x0d, 0x7d, 0x06, 0x70, 0xaa, 0x87, 0x08, 0xd1, 0xd5, 0x84, 0xfe, 0x93, 0x85, 0x9b, + 0xd6, 0x93, 0x52, 0xbb, 0xe4, 0xa9, 0xda, 0xcb, 0x6f, 0xdf, 0xdf, 0xf4, 0xcd, 0xa3, 0x9c, 0xef, + 0x16, 0x66, 0x97, 0x08, 0xa6, 0xef, 0x1d, 0xfc, 0x39, 0xed, 0xa3, 0x77, 0x00, 0x4e, 0x76, 0x68, + 0x1f, 0x95, 0x12, 0x60, 0x7b, 0x39, 0x45, 0xfa, 0x98, 0x6b, 0x56, 0x73, 0x82, 0x62, 0x56, 0x9d, + 0x88, 0x1a, 0x9a, 0xbf, 0xf2, 0x85, 0x43, 0x3a, 0x46, 0xef, 0x01, 0x9c, 0xec, 0xb0, 0x8b, 0x44, + 0x6a, 0xbd, 0xcc, 0xe5, 0xd8, 0xd4, 0xce, 0x0b, 0x6a, 0x73, 0xc5, 0x8c, 0x4f, 0x6d, 0x2f, 0x54, + 0xe4, 0xf5, 0x28, 0x49, 0x3d, 0x9f, 0xdf, 0x5f, 0x00, 0x79, 0xf4, 0x1a, 0xc0, 0xff, 0x0f, 0x6b, + 0x19, 0x5d, 0x4a, 0xde, 0xf3, 0xdf, 0x8f, 0x0c, 0x1d, 0xc1, 0x0b, 0xbd, 0x05, 0x70, 0x22, 0xae, + 0x4e, 0x54, 0x4c, 0x00, 0xe9, 0x61, 0x1d, 0xe9, 0xd2, 0x89, 0x72, 0x02, 0xf9, 0xab, 0x8a, 0x60, + 0x89, 0x50, 0xc7, 0x62, 0xd1, 0x07, 0x00, 0x53, 0xdd, 0x45, 0x8a, 0xae, 0x24, 0x20, 0x25, 0xea, + 0x3a, 0x9d, 0xea, 0x30, 0xe1, 0x5b, 0xfe, 0xbf, 0x59, 0xb5, 0x20, 0x68, 0x5c, 0x50, 0x85, 0x04, + 0xf6, 0x0e, 0x0d, 0xaa, 0x5d, 0x23, 0x58, 0xa3, 0xe3, 0xd7, 0xf7, 0x97, 0xf9, 0x11, 0xc0, 0xa9, + 0x1e, 0x7e, 0x90, 0xa8, 0xde, 0x64, 0x0f, 0xe9, 0xc9, 0xb0, 0x28, 0x18, 0x5e, 0x54, 0xcf, 0x1d, + 0xc9, 0xd0, 0x15, 0x00, 0x0b, 0x20, 0xbf, 0xf8, 0x05, 0xc0, 0xb9, 0x2a, 0x6d, 0x25, 0x90, 0x11, + 0x20, 0x8b, 0xe3, 0x21, 0x9b, 0x55, 0xff, 0xf8, 0xf4, 0x8e, 0x8c, 0x37, 0x69, 0x13, 0xdb, 0xa6, + 0x46, 0x5d, 0x53, 0x37, 0x89, 0x2d, 0x42, 0xa5, 0x3d, 0x60, 0xc7, 0x62, 0x5d, 0x7e, 0x6a, 0x5c, + 0x8b, 0x5d, 0xfd, 0x00, 0xe0, 0x53, 0xdf, 0xd9, 0xc0, 0x73, 0xb4, 0xa5, 0x26, 0xf5, 0x6a, 0x5a, + 0xcc, 0x0a, 0xb5, 0x8d, 0xc2, 0xd7, 0x30, 0x62, 0x53, 0x44, 0x6c, 0xc6, 0x22, 0x36, 0x37, 0x0a, + 0x95, 0x21, 0x81, 0x5d, 0xfa, 0x1d, 0x00, 0x00, 0xff, 0xff, 0x9e, 0x71, 0x04, 0x73, 0x87, 0x09, + 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/storagetransfer/v1/transfer_types.pb.go b/vendor/google.golang.org/genproto/googleapis/storagetransfer/v1/transfer_types.pb.go new file mode 100644 index 0000000..a91e047 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/storagetransfer/v1/transfer_types.pb.go @@ -0,0 +1,1553 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/storagetransfer/v1/transfer_types.proto + +package storagetransfer // import "google.golang.org/genproto/googleapis/storagetransfer/v1" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import duration "github.com/golang/protobuf/ptypes/duration" +import timestamp "github.com/golang/protobuf/ptypes/timestamp" +import _ "google.golang.org/genproto/googleapis/api/annotations" +import code "google.golang.org/genproto/googleapis/rpc/code" +import date "google.golang.org/genproto/googleapis/type/date" +import timeofday "google.golang.org/genproto/googleapis/type/timeofday" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// The status of the transfer job. +type TransferJob_Status int32 + +const ( + // Zero is an illegal value. + TransferJob_STATUS_UNSPECIFIED TransferJob_Status = 0 + // New transfers will be performed based on the schedule. + TransferJob_ENABLED TransferJob_Status = 1 + // New transfers will not be scheduled. + TransferJob_DISABLED TransferJob_Status = 2 + // This is a soft delete state. After a transfer job is set to this + // state, the job and all the transfer executions are subject to + // garbage collection. + TransferJob_DELETED TransferJob_Status = 3 +) + +var TransferJob_Status_name = map[int32]string{ + 0: "STATUS_UNSPECIFIED", + 1: "ENABLED", + 2: "DISABLED", + 3: "DELETED", +} +var TransferJob_Status_value = map[string]int32{ + "STATUS_UNSPECIFIED": 0, + "ENABLED": 1, + "DISABLED": 2, + "DELETED": 3, +} + +func (x TransferJob_Status) String() string { + return proto.EnumName(TransferJob_Status_name, int32(x)) +} +func (TransferJob_Status) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_transfer_types_1b0f782d53ee7fbd, []int{9, 0} +} + +// The status of a TransferOperation. +type TransferOperation_Status int32 + +const ( + // Zero is an illegal value. + TransferOperation_STATUS_UNSPECIFIED TransferOperation_Status = 0 + // In progress. + TransferOperation_IN_PROGRESS TransferOperation_Status = 1 + // Paused. + TransferOperation_PAUSED TransferOperation_Status = 2 + // Completed successfully. + TransferOperation_SUCCESS TransferOperation_Status = 3 + // Terminated due to an unrecoverable failure. + TransferOperation_FAILED TransferOperation_Status = 4 + // Aborted by the user. + TransferOperation_ABORTED TransferOperation_Status = 5 +) + +var TransferOperation_Status_name = map[int32]string{ + 0: "STATUS_UNSPECIFIED", + 1: "IN_PROGRESS", + 2: "PAUSED", + 3: "SUCCESS", + 4: "FAILED", + 5: "ABORTED", +} +var TransferOperation_Status_value = map[string]int32{ + "STATUS_UNSPECIFIED": 0, + "IN_PROGRESS": 1, + "PAUSED": 2, + "SUCCESS": 3, + "FAILED": 4, + "ABORTED": 5, +} + +func (x TransferOperation_Status) String() string { + return proto.EnumName(TransferOperation_Status_name, int32(x)) +} +func (TransferOperation_Status) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_transfer_types_1b0f782d53ee7fbd, []int{13, 0} +} + +// Google service account +type GoogleServiceAccount struct { + // Required. + AccountEmail string `protobuf:"bytes,1,opt,name=account_email,json=accountEmail,proto3" json:"account_email,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GoogleServiceAccount) Reset() { *m = GoogleServiceAccount{} } +func (m *GoogleServiceAccount) String() string { return proto.CompactTextString(m) } +func (*GoogleServiceAccount) ProtoMessage() {} +func (*GoogleServiceAccount) Descriptor() ([]byte, []int) { + return fileDescriptor_transfer_types_1b0f782d53ee7fbd, []int{0} +} +func (m *GoogleServiceAccount) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GoogleServiceAccount.Unmarshal(m, b) +} +func (m *GoogleServiceAccount) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GoogleServiceAccount.Marshal(b, m, deterministic) +} +func (dst *GoogleServiceAccount) XXX_Merge(src proto.Message) { + xxx_messageInfo_GoogleServiceAccount.Merge(dst, src) +} +func (m *GoogleServiceAccount) XXX_Size() int { + return xxx_messageInfo_GoogleServiceAccount.Size(m) +} +func (m *GoogleServiceAccount) XXX_DiscardUnknown() { + xxx_messageInfo_GoogleServiceAccount.DiscardUnknown(m) +} + +var xxx_messageInfo_GoogleServiceAccount proto.InternalMessageInfo + +func (m *GoogleServiceAccount) GetAccountEmail() string { + if m != nil { + return m.AccountEmail + } + return "" +} + +// AWS access key (see +// [AWS Security +// Credentials](http://docs.aws.amazon.com/general/latest/gr/aws-security-credentials.html)). +type AwsAccessKey struct { + // AWS access key ID. + // Required. + AccessKeyId string `protobuf:"bytes,1,opt,name=access_key_id,json=accessKeyId,proto3" json:"access_key_id,omitempty"` + // AWS secret access key. This field is not returned in RPC responses. + // Required. + SecretAccessKey string `protobuf:"bytes,2,opt,name=secret_access_key,json=secretAccessKey,proto3" json:"secret_access_key,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *AwsAccessKey) Reset() { *m = AwsAccessKey{} } +func (m *AwsAccessKey) String() string { return proto.CompactTextString(m) } +func (*AwsAccessKey) ProtoMessage() {} +func (*AwsAccessKey) Descriptor() ([]byte, []int) { + return fileDescriptor_transfer_types_1b0f782d53ee7fbd, []int{1} +} +func (m *AwsAccessKey) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_AwsAccessKey.Unmarshal(m, b) +} +func (m *AwsAccessKey) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_AwsAccessKey.Marshal(b, m, deterministic) +} +func (dst *AwsAccessKey) XXX_Merge(src proto.Message) { + xxx_messageInfo_AwsAccessKey.Merge(dst, src) +} +func (m *AwsAccessKey) XXX_Size() int { + return xxx_messageInfo_AwsAccessKey.Size(m) +} +func (m *AwsAccessKey) XXX_DiscardUnknown() { + xxx_messageInfo_AwsAccessKey.DiscardUnknown(m) +} + +var xxx_messageInfo_AwsAccessKey proto.InternalMessageInfo + +func (m *AwsAccessKey) GetAccessKeyId() string { + if m != nil { + return m.AccessKeyId + } + return "" +} + +func (m *AwsAccessKey) GetSecretAccessKey() string { + if m != nil { + return m.SecretAccessKey + } + return "" +} + +// Conditions that determine which objects will be transferred. +type ObjectConditions struct { + // If unspecified, `minTimeElapsedSinceLastModification` takes a zero value + // and `maxTimeElapsedSinceLastModification` takes the maximum possible + // value of Duration. Objects that satisfy the object conditions + // must either have a `lastModificationTime` greater or equal to + // `NOW` - `maxTimeElapsedSinceLastModification` and less than + // `NOW` - `minTimeElapsedSinceLastModification`, or not have a + // `lastModificationTime`. + MinTimeElapsedSinceLastModification *duration.Duration `protobuf:"bytes,1,opt,name=min_time_elapsed_since_last_modification,json=minTimeElapsedSinceLastModification,proto3" json:"min_time_elapsed_since_last_modification,omitempty"` + // `maxTimeElapsedSinceLastModification` is the complement to + // `minTimeElapsedSinceLastModification`. + MaxTimeElapsedSinceLastModification *duration.Duration `protobuf:"bytes,2,opt,name=max_time_elapsed_since_last_modification,json=maxTimeElapsedSinceLastModification,proto3" json:"max_time_elapsed_since_last_modification,omitempty"` + // If `includePrefixes` is specified, objects that satisfy the object + // conditions must have names that start with one of the `includePrefixes` + // and that do not start with any of the `excludePrefixes`. If + // `includePrefixes` is not specified, all objects except those that have + // names starting with one of the `excludePrefixes` must satisfy the object + // conditions. + // + // Requirements: + // + // * Each include-prefix and exclude-prefix can contain any sequence of + // Unicode characters, of max length 1024 bytes when UTF8-encoded, and + // must not contain Carriage Return or Line Feed characters. Wildcard + // matching and regular expression matching are not supported. + // + // * Each include-prefix and exclude-prefix must omit the leading slash. + // For example, to include the `requests.gz` object in a transfer from + // `s3://my-aws-bucket/logs/y=2015/requests.gz`, specify the include + // prefix as `logs/y=2015/requests.gz`. + // + // * None of the include-prefix or the exclude-prefix values can be empty, + // if specified. + // + // * Each include-prefix must include a distinct portion of the object + // namespace, i.e., no include-prefix may be a prefix of another + // include-prefix. + // + // * Each exclude-prefix must exclude a distinct portion of the object + // namespace, i.e., no exclude-prefix may be a prefix of another + // exclude-prefix. + // + // * If `includePrefixes` is specified, then each exclude-prefix must start + // with the value of a path explicitly included by `includePrefixes`. + // + // The max size of `includePrefixes` is 1000. + IncludePrefixes []string `protobuf:"bytes,3,rep,name=include_prefixes,json=includePrefixes,proto3" json:"include_prefixes,omitempty"` + // `excludePrefixes` must follow the requirements described for + // `includePrefixes`. + // + // The max size of `excludePrefixes` is 1000. + ExcludePrefixes []string `protobuf:"bytes,4,rep,name=exclude_prefixes,json=excludePrefixes,proto3" json:"exclude_prefixes,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ObjectConditions) Reset() { *m = ObjectConditions{} } +func (m *ObjectConditions) String() string { return proto.CompactTextString(m) } +func (*ObjectConditions) ProtoMessage() {} +func (*ObjectConditions) Descriptor() ([]byte, []int) { + return fileDescriptor_transfer_types_1b0f782d53ee7fbd, []int{2} +} +func (m *ObjectConditions) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ObjectConditions.Unmarshal(m, b) +} +func (m *ObjectConditions) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ObjectConditions.Marshal(b, m, deterministic) +} +func (dst *ObjectConditions) XXX_Merge(src proto.Message) { + xxx_messageInfo_ObjectConditions.Merge(dst, src) +} +func (m *ObjectConditions) XXX_Size() int { + return xxx_messageInfo_ObjectConditions.Size(m) +} +func (m *ObjectConditions) XXX_DiscardUnknown() { + xxx_messageInfo_ObjectConditions.DiscardUnknown(m) +} + +var xxx_messageInfo_ObjectConditions proto.InternalMessageInfo + +func (m *ObjectConditions) GetMinTimeElapsedSinceLastModification() *duration.Duration { + if m != nil { + return m.MinTimeElapsedSinceLastModification + } + return nil +} + +func (m *ObjectConditions) GetMaxTimeElapsedSinceLastModification() *duration.Duration { + if m != nil { + return m.MaxTimeElapsedSinceLastModification + } + return nil +} + +func (m *ObjectConditions) GetIncludePrefixes() []string { + if m != nil { + return m.IncludePrefixes + } + return nil +} + +func (m *ObjectConditions) GetExcludePrefixes() []string { + if m != nil { + return m.ExcludePrefixes + } + return nil +} + +// In a GcsData, an object's name is the Google Cloud Storage object's name and +// its `lastModificationTime` refers to the object's updated time, which changes +// when the content or the metadata of the object is updated. +type GcsData struct { + // Google Cloud Storage bucket name (see + // [Bucket Name + // Requirements](https://cloud.google.com/storage/docs/bucket-naming#requirements)). + // Required. + BucketName string `protobuf:"bytes,1,opt,name=bucket_name,json=bucketName,proto3" json:"bucket_name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GcsData) Reset() { *m = GcsData{} } +func (m *GcsData) String() string { return proto.CompactTextString(m) } +func (*GcsData) ProtoMessage() {} +func (*GcsData) Descriptor() ([]byte, []int) { + return fileDescriptor_transfer_types_1b0f782d53ee7fbd, []int{3} +} +func (m *GcsData) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GcsData.Unmarshal(m, b) +} +func (m *GcsData) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GcsData.Marshal(b, m, deterministic) +} +func (dst *GcsData) XXX_Merge(src proto.Message) { + xxx_messageInfo_GcsData.Merge(dst, src) +} +func (m *GcsData) XXX_Size() int { + return xxx_messageInfo_GcsData.Size(m) +} +func (m *GcsData) XXX_DiscardUnknown() { + xxx_messageInfo_GcsData.DiscardUnknown(m) +} + +var xxx_messageInfo_GcsData proto.InternalMessageInfo + +func (m *GcsData) GetBucketName() string { + if m != nil { + return m.BucketName + } + return "" +} + +// An AwsS3Data can be a data source, but not a data sink. +// In an AwsS3Data, an object's name is the S3 object's key name. +type AwsS3Data struct { + // S3 Bucket name (see + // [Creating a + // bucket](http://docs.aws.amazon.com/AmazonS3/latest/dev/create-bucket-get-location-example.html)). + // Required. + BucketName string `protobuf:"bytes,1,opt,name=bucket_name,json=bucketName,proto3" json:"bucket_name,omitempty"` + // AWS access key used to sign the API requests to the AWS S3 bucket. + // Permissions on the bucket must be granted to the access ID of the + // AWS access key. + // Required. + AwsAccessKey *AwsAccessKey `protobuf:"bytes,2,opt,name=aws_access_key,json=awsAccessKey,proto3" json:"aws_access_key,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *AwsS3Data) Reset() { *m = AwsS3Data{} } +func (m *AwsS3Data) String() string { return proto.CompactTextString(m) } +func (*AwsS3Data) ProtoMessage() {} +func (*AwsS3Data) Descriptor() ([]byte, []int) { + return fileDescriptor_transfer_types_1b0f782d53ee7fbd, []int{4} +} +func (m *AwsS3Data) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_AwsS3Data.Unmarshal(m, b) +} +func (m *AwsS3Data) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_AwsS3Data.Marshal(b, m, deterministic) +} +func (dst *AwsS3Data) XXX_Merge(src proto.Message) { + xxx_messageInfo_AwsS3Data.Merge(dst, src) +} +func (m *AwsS3Data) XXX_Size() int { + return xxx_messageInfo_AwsS3Data.Size(m) +} +func (m *AwsS3Data) XXX_DiscardUnknown() { + xxx_messageInfo_AwsS3Data.DiscardUnknown(m) +} + +var xxx_messageInfo_AwsS3Data proto.InternalMessageInfo + +func (m *AwsS3Data) GetBucketName() string { + if m != nil { + return m.BucketName + } + return "" +} + +func (m *AwsS3Data) GetAwsAccessKey() *AwsAccessKey { + if m != nil { + return m.AwsAccessKey + } + return nil +} + +// An HttpData specifies a list of objects on the web to be transferred over +// HTTP. The information of the objects to be transferred is contained in a +// file referenced by a URL. The first line in the file must be +// "TsvHttpData-1.0", which specifies the format of the file. Subsequent lines +// specify the information of the list of objects, one object per list entry. +// Each entry has the following tab-delimited fields: +// +// * HTTP URL - The location of the object. +// +// * Length - The size of the object in bytes. +// +// * MD5 - The base64-encoded MD5 hash of the object. +// +// For an example of a valid TSV file, see +// [Transferring data from +// URLs](https://cloud.google.com/storage/transfer/create-url-list). +// +// When transferring data based on a URL list, keep the following in mind: +// +// * When an object located at `http(s)://hostname:port/` is +// transferred to a data sink, the name of the object at the data sink is +// `/`. +// +// * If the specified size of an object does not match the actual size of the +// object fetched, the object will not be transferred. +// +// * If the specified MD5 does not match the MD5 computed from the transferred +// bytes, the object transfer will fail. For more information, see +// [Generating MD5 hashes](https://cloud.google.com/storage/transfer/#md5) +// +// * Ensure that each URL you specify is publicly accessible. For +// example, in Google Cloud Storage you can +// [share an object publicly] +// (https://cloud.google.com/storage/docs/cloud-console#_sharingdata) and get +// a link to it. +// +// * Storage Transfer Service obeys `robots.txt` rules and requires the source +// HTTP server to support `Range` requests and to return a `Content-Length` +// header in each response. +// +// * [ObjectConditions](#ObjectConditions) have no effect when filtering objects +// to transfer. +type HttpData struct { + // The URL that points to the file that stores the object list entries. + // This file must allow public access. Currently, only URLs with HTTP and + // HTTPS schemes are supported. + // Required. + ListUrl string `protobuf:"bytes,1,opt,name=list_url,json=listUrl,proto3" json:"list_url,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *HttpData) Reset() { *m = HttpData{} } +func (m *HttpData) String() string { return proto.CompactTextString(m) } +func (*HttpData) ProtoMessage() {} +func (*HttpData) Descriptor() ([]byte, []int) { + return fileDescriptor_transfer_types_1b0f782d53ee7fbd, []int{5} +} +func (m *HttpData) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_HttpData.Unmarshal(m, b) +} +func (m *HttpData) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_HttpData.Marshal(b, m, deterministic) +} +func (dst *HttpData) XXX_Merge(src proto.Message) { + xxx_messageInfo_HttpData.Merge(dst, src) +} +func (m *HttpData) XXX_Size() int { + return xxx_messageInfo_HttpData.Size(m) +} +func (m *HttpData) XXX_DiscardUnknown() { + xxx_messageInfo_HttpData.DiscardUnknown(m) +} + +var xxx_messageInfo_HttpData proto.InternalMessageInfo + +func (m *HttpData) GetListUrl() string { + if m != nil { + return m.ListUrl + } + return "" +} + +// TransferOptions uses three boolean parameters to define the actions +// to be performed on objects in a transfer. +type TransferOptions struct { + // Whether overwriting objects that already exist in the sink is allowed. + OverwriteObjectsAlreadyExistingInSink bool `protobuf:"varint,1,opt,name=overwrite_objects_already_existing_in_sink,json=overwriteObjectsAlreadyExistingInSink,proto3" json:"overwrite_objects_already_existing_in_sink,omitempty"` + // Whether objects that exist only in the sink should be deleted. Note that + // this option and `deleteObjectsFromSourceAfterTransfer` are mutually + // exclusive. + DeleteObjectsUniqueInSink bool `protobuf:"varint,2,opt,name=delete_objects_unique_in_sink,json=deleteObjectsUniqueInSink,proto3" json:"delete_objects_unique_in_sink,omitempty"` + // Whether objects should be deleted from the source after they are + // transferred to the sink. Note that this option and + // `deleteObjectsUniqueInSink` are mutually exclusive. + DeleteObjectsFromSourceAfterTransfer bool `protobuf:"varint,3,opt,name=delete_objects_from_source_after_transfer,json=deleteObjectsFromSourceAfterTransfer,proto3" json:"delete_objects_from_source_after_transfer,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *TransferOptions) Reset() { *m = TransferOptions{} } +func (m *TransferOptions) String() string { return proto.CompactTextString(m) } +func (*TransferOptions) ProtoMessage() {} +func (*TransferOptions) Descriptor() ([]byte, []int) { + return fileDescriptor_transfer_types_1b0f782d53ee7fbd, []int{6} +} +func (m *TransferOptions) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_TransferOptions.Unmarshal(m, b) +} +func (m *TransferOptions) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_TransferOptions.Marshal(b, m, deterministic) +} +func (dst *TransferOptions) XXX_Merge(src proto.Message) { + xxx_messageInfo_TransferOptions.Merge(dst, src) +} +func (m *TransferOptions) XXX_Size() int { + return xxx_messageInfo_TransferOptions.Size(m) +} +func (m *TransferOptions) XXX_DiscardUnknown() { + xxx_messageInfo_TransferOptions.DiscardUnknown(m) +} + +var xxx_messageInfo_TransferOptions proto.InternalMessageInfo + +func (m *TransferOptions) GetOverwriteObjectsAlreadyExistingInSink() bool { + if m != nil { + return m.OverwriteObjectsAlreadyExistingInSink + } + return false +} + +func (m *TransferOptions) GetDeleteObjectsUniqueInSink() bool { + if m != nil { + return m.DeleteObjectsUniqueInSink + } + return false +} + +func (m *TransferOptions) GetDeleteObjectsFromSourceAfterTransfer() bool { + if m != nil { + return m.DeleteObjectsFromSourceAfterTransfer + } + return false +} + +// Configuration for running a transfer. +type TransferSpec struct { + // The read source of the data. + // + // Types that are valid to be assigned to DataSource: + // *TransferSpec_GcsDataSource + // *TransferSpec_AwsS3DataSource + // *TransferSpec_HttpDataSource + DataSource isTransferSpec_DataSource `protobuf_oneof:"data_source"` + // The write sink for the data. + // + // Types that are valid to be assigned to DataSink: + // *TransferSpec_GcsDataSink + DataSink isTransferSpec_DataSink `protobuf_oneof:"data_sink"` + // Only objects that satisfy these object conditions are included in the set + // of data source and data sink objects. Object conditions based on + // objects' `lastModificationTime` do not exclude objects in a data sink. + ObjectConditions *ObjectConditions `protobuf:"bytes,5,opt,name=object_conditions,json=objectConditions,proto3" json:"object_conditions,omitempty"` + // If the option `deleteObjectsUniqueInSink` is `true`, object conditions + // based on objects' `lastModificationTime` are ignored and do not exclude + // objects in a data source or a data sink. + TransferOptions *TransferOptions `protobuf:"bytes,6,opt,name=transfer_options,json=transferOptions,proto3" json:"transfer_options,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *TransferSpec) Reset() { *m = TransferSpec{} } +func (m *TransferSpec) String() string { return proto.CompactTextString(m) } +func (*TransferSpec) ProtoMessage() {} +func (*TransferSpec) Descriptor() ([]byte, []int) { + return fileDescriptor_transfer_types_1b0f782d53ee7fbd, []int{7} +} +func (m *TransferSpec) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_TransferSpec.Unmarshal(m, b) +} +func (m *TransferSpec) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_TransferSpec.Marshal(b, m, deterministic) +} +func (dst *TransferSpec) XXX_Merge(src proto.Message) { + xxx_messageInfo_TransferSpec.Merge(dst, src) +} +func (m *TransferSpec) XXX_Size() int { + return xxx_messageInfo_TransferSpec.Size(m) +} +func (m *TransferSpec) XXX_DiscardUnknown() { + xxx_messageInfo_TransferSpec.DiscardUnknown(m) +} + +var xxx_messageInfo_TransferSpec proto.InternalMessageInfo + +type isTransferSpec_DataSource interface { + isTransferSpec_DataSource() +} + +type TransferSpec_GcsDataSource struct { + GcsDataSource *GcsData `protobuf:"bytes,1,opt,name=gcs_data_source,json=gcsDataSource,proto3,oneof"` +} + +type TransferSpec_AwsS3DataSource struct { + AwsS3DataSource *AwsS3Data `protobuf:"bytes,2,opt,name=aws_s3_data_source,json=awsS3DataSource,proto3,oneof"` +} + +type TransferSpec_HttpDataSource struct { + HttpDataSource *HttpData `protobuf:"bytes,3,opt,name=http_data_source,json=httpDataSource,proto3,oneof"` +} + +func (*TransferSpec_GcsDataSource) isTransferSpec_DataSource() {} + +func (*TransferSpec_AwsS3DataSource) isTransferSpec_DataSource() {} + +func (*TransferSpec_HttpDataSource) isTransferSpec_DataSource() {} + +func (m *TransferSpec) GetDataSource() isTransferSpec_DataSource { + if m != nil { + return m.DataSource + } + return nil +} + +func (m *TransferSpec) GetGcsDataSource() *GcsData { + if x, ok := m.GetDataSource().(*TransferSpec_GcsDataSource); ok { + return x.GcsDataSource + } + return nil +} + +func (m *TransferSpec) GetAwsS3DataSource() *AwsS3Data { + if x, ok := m.GetDataSource().(*TransferSpec_AwsS3DataSource); ok { + return x.AwsS3DataSource + } + return nil +} + +func (m *TransferSpec) GetHttpDataSource() *HttpData { + if x, ok := m.GetDataSource().(*TransferSpec_HttpDataSource); ok { + return x.HttpDataSource + } + return nil +} + +type isTransferSpec_DataSink interface { + isTransferSpec_DataSink() +} + +type TransferSpec_GcsDataSink struct { + GcsDataSink *GcsData `protobuf:"bytes,4,opt,name=gcs_data_sink,json=gcsDataSink,proto3,oneof"` +} + +func (*TransferSpec_GcsDataSink) isTransferSpec_DataSink() {} + +func (m *TransferSpec) GetDataSink() isTransferSpec_DataSink { + if m != nil { + return m.DataSink + } + return nil +} + +func (m *TransferSpec) GetGcsDataSink() *GcsData { + if x, ok := m.GetDataSink().(*TransferSpec_GcsDataSink); ok { + return x.GcsDataSink + } + return nil +} + +func (m *TransferSpec) GetObjectConditions() *ObjectConditions { + if m != nil { + return m.ObjectConditions + } + return nil +} + +func (m *TransferSpec) GetTransferOptions() *TransferOptions { + if m != nil { + return m.TransferOptions + } + return nil +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*TransferSpec) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _TransferSpec_OneofMarshaler, _TransferSpec_OneofUnmarshaler, _TransferSpec_OneofSizer, []interface{}{ + (*TransferSpec_GcsDataSource)(nil), + (*TransferSpec_AwsS3DataSource)(nil), + (*TransferSpec_HttpDataSource)(nil), + (*TransferSpec_GcsDataSink)(nil), + } +} + +func _TransferSpec_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*TransferSpec) + // data_source + switch x := m.DataSource.(type) { + case *TransferSpec_GcsDataSource: + b.EncodeVarint(1<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.GcsDataSource); err != nil { + return err + } + case *TransferSpec_AwsS3DataSource: + b.EncodeVarint(2<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.AwsS3DataSource); err != nil { + return err + } + case *TransferSpec_HttpDataSource: + b.EncodeVarint(3<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.HttpDataSource); err != nil { + return err + } + case nil: + default: + return fmt.Errorf("TransferSpec.DataSource has unexpected type %T", x) + } + // data_sink + switch x := m.DataSink.(type) { + case *TransferSpec_GcsDataSink: + b.EncodeVarint(4<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.GcsDataSink); err != nil { + return err + } + case nil: + default: + return fmt.Errorf("TransferSpec.DataSink has unexpected type %T", x) + } + return nil +} + +func _TransferSpec_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*TransferSpec) + switch tag { + case 1: // data_source.gcs_data_source + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(GcsData) + err := b.DecodeMessage(msg) + m.DataSource = &TransferSpec_GcsDataSource{msg} + return true, err + case 2: // data_source.aws_s3_data_source + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(AwsS3Data) + err := b.DecodeMessage(msg) + m.DataSource = &TransferSpec_AwsS3DataSource{msg} + return true, err + case 3: // data_source.http_data_source + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(HttpData) + err := b.DecodeMessage(msg) + m.DataSource = &TransferSpec_HttpDataSource{msg} + return true, err + case 4: // data_sink.gcs_data_sink + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(GcsData) + err := b.DecodeMessage(msg) + m.DataSink = &TransferSpec_GcsDataSink{msg} + return true, err + default: + return false, nil + } +} + +func _TransferSpec_OneofSizer(msg proto.Message) (n int) { + m := msg.(*TransferSpec) + // data_source + switch x := m.DataSource.(type) { + case *TransferSpec_GcsDataSource: + s := proto.Size(x.GcsDataSource) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *TransferSpec_AwsS3DataSource: + s := proto.Size(x.AwsS3DataSource) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *TransferSpec_HttpDataSource: + s := proto.Size(x.HttpDataSource) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + // data_sink + switch x := m.DataSink.(type) { + case *TransferSpec_GcsDataSink: + s := proto.Size(x.GcsDataSink) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +// Transfers can be scheduled to recur or to run just once. +type Schedule struct { + // The first day the recurring transfer is scheduled to run. If + // `scheduleStartDate` is in the past, the transfer will run for the first + // time on the following day. + // Required. + ScheduleStartDate *date.Date `protobuf:"bytes,1,opt,name=schedule_start_date,json=scheduleStartDate,proto3" json:"schedule_start_date,omitempty"` + // The last day the recurring transfer will be run. If `scheduleEndDate` + // is the same as `scheduleStartDate`, the transfer will be executed only + // once. + ScheduleEndDate *date.Date `protobuf:"bytes,2,opt,name=schedule_end_date,json=scheduleEndDate,proto3" json:"schedule_end_date,omitempty"` + // The time in UTC at which the transfer will be scheduled to start in a day. + // Transfers may start later than this time. If not specified, recurring and + // one-time transfers that are scheduled to run today will run immediately; + // recurring transfers that are scheduled to run on a future date will start + // at approximately midnight UTC on that date. Note that when configuring a + // transfer with the Cloud Platform Console, the transfer's start time in a + // day is specified in your local timezone. + StartTimeOfDay *timeofday.TimeOfDay `protobuf:"bytes,3,opt,name=start_time_of_day,json=startTimeOfDay,proto3" json:"start_time_of_day,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Schedule) Reset() { *m = Schedule{} } +func (m *Schedule) String() string { return proto.CompactTextString(m) } +func (*Schedule) ProtoMessage() {} +func (*Schedule) Descriptor() ([]byte, []int) { + return fileDescriptor_transfer_types_1b0f782d53ee7fbd, []int{8} +} +func (m *Schedule) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Schedule.Unmarshal(m, b) +} +func (m *Schedule) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Schedule.Marshal(b, m, deterministic) +} +func (dst *Schedule) XXX_Merge(src proto.Message) { + xxx_messageInfo_Schedule.Merge(dst, src) +} +func (m *Schedule) XXX_Size() int { + return xxx_messageInfo_Schedule.Size(m) +} +func (m *Schedule) XXX_DiscardUnknown() { + xxx_messageInfo_Schedule.DiscardUnknown(m) +} + +var xxx_messageInfo_Schedule proto.InternalMessageInfo + +func (m *Schedule) GetScheduleStartDate() *date.Date { + if m != nil { + return m.ScheduleStartDate + } + return nil +} + +func (m *Schedule) GetScheduleEndDate() *date.Date { + if m != nil { + return m.ScheduleEndDate + } + return nil +} + +func (m *Schedule) GetStartTimeOfDay() *timeofday.TimeOfDay { + if m != nil { + return m.StartTimeOfDay + } + return nil +} + +// This resource represents the configuration of a transfer job that runs +// periodically. +type TransferJob struct { + // A globally unique name assigned by Storage Transfer Service when the + // job is created. This field should be left empty in requests to create a new + // transfer job; otherwise, the requests result in an `INVALID_ARGUMENT` + // error. + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // A description provided by the user for the job. Its max length is 1024 + // bytes when Unicode-encoded. + Description string `protobuf:"bytes,2,opt,name=description,proto3" json:"description,omitempty"` + // The ID of the Google Cloud Platform Console project that owns the job. + ProjectId string `protobuf:"bytes,3,opt,name=project_id,json=projectId,proto3" json:"project_id,omitempty"` + // Transfer specification. + TransferSpec *TransferSpec `protobuf:"bytes,4,opt,name=transfer_spec,json=transferSpec,proto3" json:"transfer_spec,omitempty"` + // Schedule specification. + Schedule *Schedule `protobuf:"bytes,5,opt,name=schedule,proto3" json:"schedule,omitempty"` + // Status of the job. This value MUST be specified for + // `CreateTransferJobRequests`. + // + // NOTE: The effect of the new job status takes place during a subsequent job + // run. For example, if you change the job status from `ENABLED` to + // `DISABLED`, and an operation spawned by the transfer is running, the status + // change would not affect the current operation. + Status TransferJob_Status `protobuf:"varint,6,opt,name=status,proto3,enum=google.storagetransfer.v1.TransferJob_Status" json:"status,omitempty"` + // This field cannot be changed by user requests. + CreationTime *timestamp.Timestamp `protobuf:"bytes,7,opt,name=creation_time,json=creationTime,proto3" json:"creation_time,omitempty"` + // This field cannot be changed by user requests. + LastModificationTime *timestamp.Timestamp `protobuf:"bytes,8,opt,name=last_modification_time,json=lastModificationTime,proto3" json:"last_modification_time,omitempty"` + // This field cannot be changed by user requests. + DeletionTime *timestamp.Timestamp `protobuf:"bytes,9,opt,name=deletion_time,json=deletionTime,proto3" json:"deletion_time,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *TransferJob) Reset() { *m = TransferJob{} } +func (m *TransferJob) String() string { return proto.CompactTextString(m) } +func (*TransferJob) ProtoMessage() {} +func (*TransferJob) Descriptor() ([]byte, []int) { + return fileDescriptor_transfer_types_1b0f782d53ee7fbd, []int{9} +} +func (m *TransferJob) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_TransferJob.Unmarshal(m, b) +} +func (m *TransferJob) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_TransferJob.Marshal(b, m, deterministic) +} +func (dst *TransferJob) XXX_Merge(src proto.Message) { + xxx_messageInfo_TransferJob.Merge(dst, src) +} +func (m *TransferJob) XXX_Size() int { + return xxx_messageInfo_TransferJob.Size(m) +} +func (m *TransferJob) XXX_DiscardUnknown() { + xxx_messageInfo_TransferJob.DiscardUnknown(m) +} + +var xxx_messageInfo_TransferJob proto.InternalMessageInfo + +func (m *TransferJob) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *TransferJob) GetDescription() string { + if m != nil { + return m.Description + } + return "" +} + +func (m *TransferJob) GetProjectId() string { + if m != nil { + return m.ProjectId + } + return "" +} + +func (m *TransferJob) GetTransferSpec() *TransferSpec { + if m != nil { + return m.TransferSpec + } + return nil +} + +func (m *TransferJob) GetSchedule() *Schedule { + if m != nil { + return m.Schedule + } + return nil +} + +func (m *TransferJob) GetStatus() TransferJob_Status { + if m != nil { + return m.Status + } + return TransferJob_STATUS_UNSPECIFIED +} + +func (m *TransferJob) GetCreationTime() *timestamp.Timestamp { + if m != nil { + return m.CreationTime + } + return nil +} + +func (m *TransferJob) GetLastModificationTime() *timestamp.Timestamp { + if m != nil { + return m.LastModificationTime + } + return nil +} + +func (m *TransferJob) GetDeletionTime() *timestamp.Timestamp { + if m != nil { + return m.DeletionTime + } + return nil +} + +// An entry describing an error that has occurred. +type ErrorLogEntry struct { + // A URL that refers to the target (a data source, a data sink, + // or an object) with which the error is associated. + // Required. + Url string `protobuf:"bytes,1,opt,name=url,proto3" json:"url,omitempty"` + // A list of messages that carry the error details. + ErrorDetails []string `protobuf:"bytes,3,rep,name=error_details,json=errorDetails,proto3" json:"error_details,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ErrorLogEntry) Reset() { *m = ErrorLogEntry{} } +func (m *ErrorLogEntry) String() string { return proto.CompactTextString(m) } +func (*ErrorLogEntry) ProtoMessage() {} +func (*ErrorLogEntry) Descriptor() ([]byte, []int) { + return fileDescriptor_transfer_types_1b0f782d53ee7fbd, []int{10} +} +func (m *ErrorLogEntry) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ErrorLogEntry.Unmarshal(m, b) +} +func (m *ErrorLogEntry) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ErrorLogEntry.Marshal(b, m, deterministic) +} +func (dst *ErrorLogEntry) XXX_Merge(src proto.Message) { + xxx_messageInfo_ErrorLogEntry.Merge(dst, src) +} +func (m *ErrorLogEntry) XXX_Size() int { + return xxx_messageInfo_ErrorLogEntry.Size(m) +} +func (m *ErrorLogEntry) XXX_DiscardUnknown() { + xxx_messageInfo_ErrorLogEntry.DiscardUnknown(m) +} + +var xxx_messageInfo_ErrorLogEntry proto.InternalMessageInfo + +func (m *ErrorLogEntry) GetUrl() string { + if m != nil { + return m.Url + } + return "" +} + +func (m *ErrorLogEntry) GetErrorDetails() []string { + if m != nil { + return m.ErrorDetails + } + return nil +} + +// A summary of errors by error code, plus a count and sample error log +// entries. +type ErrorSummary struct { + // Required. + ErrorCode code.Code `protobuf:"varint,1,opt,name=error_code,json=errorCode,proto3,enum=google.rpc.Code" json:"error_code,omitempty"` + // Count of this type of error. + // Required. + ErrorCount int64 `protobuf:"varint,2,opt,name=error_count,json=errorCount,proto3" json:"error_count,omitempty"` + // Error samples. + ErrorLogEntries []*ErrorLogEntry `protobuf:"bytes,3,rep,name=error_log_entries,json=errorLogEntries,proto3" json:"error_log_entries,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ErrorSummary) Reset() { *m = ErrorSummary{} } +func (m *ErrorSummary) String() string { return proto.CompactTextString(m) } +func (*ErrorSummary) ProtoMessage() {} +func (*ErrorSummary) Descriptor() ([]byte, []int) { + return fileDescriptor_transfer_types_1b0f782d53ee7fbd, []int{11} +} +func (m *ErrorSummary) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ErrorSummary.Unmarshal(m, b) +} +func (m *ErrorSummary) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ErrorSummary.Marshal(b, m, deterministic) +} +func (dst *ErrorSummary) XXX_Merge(src proto.Message) { + xxx_messageInfo_ErrorSummary.Merge(dst, src) +} +func (m *ErrorSummary) XXX_Size() int { + return xxx_messageInfo_ErrorSummary.Size(m) +} +func (m *ErrorSummary) XXX_DiscardUnknown() { + xxx_messageInfo_ErrorSummary.DiscardUnknown(m) +} + +var xxx_messageInfo_ErrorSummary proto.InternalMessageInfo + +func (m *ErrorSummary) GetErrorCode() code.Code { + if m != nil { + return m.ErrorCode + } + return code.Code_OK +} + +func (m *ErrorSummary) GetErrorCount() int64 { + if m != nil { + return m.ErrorCount + } + return 0 +} + +func (m *ErrorSummary) GetErrorLogEntries() []*ErrorLogEntry { + if m != nil { + return m.ErrorLogEntries + } + return nil +} + +// A collection of counters that report the progress of a transfer operation. +type TransferCounters struct { + // Objects found in the data source that are scheduled to be transferred, + // excluding any that are filtered based on object conditions or skipped due + // to sync. + ObjectsFoundFromSource int64 `protobuf:"varint,1,opt,name=objects_found_from_source,json=objectsFoundFromSource,proto3" json:"objects_found_from_source,omitempty"` + // Bytes found in the data source that are scheduled to be transferred, + // excluding any that are filtered based on object conditions or skipped due + // to sync. + BytesFoundFromSource int64 `protobuf:"varint,2,opt,name=bytes_found_from_source,json=bytesFoundFromSource,proto3" json:"bytes_found_from_source,omitempty"` + // Objects found only in the data sink that are scheduled to be deleted. + ObjectsFoundOnlyFromSink int64 `protobuf:"varint,3,opt,name=objects_found_only_from_sink,json=objectsFoundOnlyFromSink,proto3" json:"objects_found_only_from_sink,omitempty"` + // Bytes found only in the data sink that are scheduled to be deleted. + BytesFoundOnlyFromSink int64 `protobuf:"varint,4,opt,name=bytes_found_only_from_sink,json=bytesFoundOnlyFromSink,proto3" json:"bytes_found_only_from_sink,omitempty"` + // Objects in the data source that are not transferred because they already + // exist in the data sink. + ObjectsFromSourceSkippedBySync int64 `protobuf:"varint,5,opt,name=objects_from_source_skipped_by_sync,json=objectsFromSourceSkippedBySync,proto3" json:"objects_from_source_skipped_by_sync,omitempty"` + // Bytes in the data source that are not transferred because they already + // exist in the data sink. + BytesFromSourceSkippedBySync int64 `protobuf:"varint,6,opt,name=bytes_from_source_skipped_by_sync,json=bytesFromSourceSkippedBySync,proto3" json:"bytes_from_source_skipped_by_sync,omitempty"` + // Objects that are copied to the data sink. + ObjectsCopiedToSink int64 `protobuf:"varint,7,opt,name=objects_copied_to_sink,json=objectsCopiedToSink,proto3" json:"objects_copied_to_sink,omitempty"` + // Bytes that are copied to the data sink. + BytesCopiedToSink int64 `protobuf:"varint,8,opt,name=bytes_copied_to_sink,json=bytesCopiedToSink,proto3" json:"bytes_copied_to_sink,omitempty"` + // Objects that are deleted from the data source. + ObjectsDeletedFromSource int64 `protobuf:"varint,9,opt,name=objects_deleted_from_source,json=objectsDeletedFromSource,proto3" json:"objects_deleted_from_source,omitempty"` + // Bytes that are deleted from the data source. + BytesDeletedFromSource int64 `protobuf:"varint,10,opt,name=bytes_deleted_from_source,json=bytesDeletedFromSource,proto3" json:"bytes_deleted_from_source,omitempty"` + // Objects that are deleted from the data sink. + ObjectsDeletedFromSink int64 `protobuf:"varint,11,opt,name=objects_deleted_from_sink,json=objectsDeletedFromSink,proto3" json:"objects_deleted_from_sink,omitempty"` + // Bytes that are deleted from the data sink. + BytesDeletedFromSink int64 `protobuf:"varint,12,opt,name=bytes_deleted_from_sink,json=bytesDeletedFromSink,proto3" json:"bytes_deleted_from_sink,omitempty"` + // Objects in the data source that failed during the transfer. + ObjectsFromSourceFailed int64 `protobuf:"varint,13,opt,name=objects_from_source_failed,json=objectsFromSourceFailed,proto3" json:"objects_from_source_failed,omitempty"` + // Bytes in the data source that failed during the transfer. + BytesFromSourceFailed int64 `protobuf:"varint,14,opt,name=bytes_from_source_failed,json=bytesFromSourceFailed,proto3" json:"bytes_from_source_failed,omitempty"` + // Objects that failed to be deleted from the data sink. + ObjectsFailedToDeleteFromSink int64 `protobuf:"varint,15,opt,name=objects_failed_to_delete_from_sink,json=objectsFailedToDeleteFromSink,proto3" json:"objects_failed_to_delete_from_sink,omitempty"` + // Bytes that failed to be deleted from the data sink. + BytesFailedToDeleteFromSink int64 `protobuf:"varint,16,opt,name=bytes_failed_to_delete_from_sink,json=bytesFailedToDeleteFromSink,proto3" json:"bytes_failed_to_delete_from_sink,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *TransferCounters) Reset() { *m = TransferCounters{} } +func (m *TransferCounters) String() string { return proto.CompactTextString(m) } +func (*TransferCounters) ProtoMessage() {} +func (*TransferCounters) Descriptor() ([]byte, []int) { + return fileDescriptor_transfer_types_1b0f782d53ee7fbd, []int{12} +} +func (m *TransferCounters) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_TransferCounters.Unmarshal(m, b) +} +func (m *TransferCounters) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_TransferCounters.Marshal(b, m, deterministic) +} +func (dst *TransferCounters) XXX_Merge(src proto.Message) { + xxx_messageInfo_TransferCounters.Merge(dst, src) +} +func (m *TransferCounters) XXX_Size() int { + return xxx_messageInfo_TransferCounters.Size(m) +} +func (m *TransferCounters) XXX_DiscardUnknown() { + xxx_messageInfo_TransferCounters.DiscardUnknown(m) +} + +var xxx_messageInfo_TransferCounters proto.InternalMessageInfo + +func (m *TransferCounters) GetObjectsFoundFromSource() int64 { + if m != nil { + return m.ObjectsFoundFromSource + } + return 0 +} + +func (m *TransferCounters) GetBytesFoundFromSource() int64 { + if m != nil { + return m.BytesFoundFromSource + } + return 0 +} + +func (m *TransferCounters) GetObjectsFoundOnlyFromSink() int64 { + if m != nil { + return m.ObjectsFoundOnlyFromSink + } + return 0 +} + +func (m *TransferCounters) GetBytesFoundOnlyFromSink() int64 { + if m != nil { + return m.BytesFoundOnlyFromSink + } + return 0 +} + +func (m *TransferCounters) GetObjectsFromSourceSkippedBySync() int64 { + if m != nil { + return m.ObjectsFromSourceSkippedBySync + } + return 0 +} + +func (m *TransferCounters) GetBytesFromSourceSkippedBySync() int64 { + if m != nil { + return m.BytesFromSourceSkippedBySync + } + return 0 +} + +func (m *TransferCounters) GetObjectsCopiedToSink() int64 { + if m != nil { + return m.ObjectsCopiedToSink + } + return 0 +} + +func (m *TransferCounters) GetBytesCopiedToSink() int64 { + if m != nil { + return m.BytesCopiedToSink + } + return 0 +} + +func (m *TransferCounters) GetObjectsDeletedFromSource() int64 { + if m != nil { + return m.ObjectsDeletedFromSource + } + return 0 +} + +func (m *TransferCounters) GetBytesDeletedFromSource() int64 { + if m != nil { + return m.BytesDeletedFromSource + } + return 0 +} + +func (m *TransferCounters) GetObjectsDeletedFromSink() int64 { + if m != nil { + return m.ObjectsDeletedFromSink + } + return 0 +} + +func (m *TransferCounters) GetBytesDeletedFromSink() int64 { + if m != nil { + return m.BytesDeletedFromSink + } + return 0 +} + +func (m *TransferCounters) GetObjectsFromSourceFailed() int64 { + if m != nil { + return m.ObjectsFromSourceFailed + } + return 0 +} + +func (m *TransferCounters) GetBytesFromSourceFailed() int64 { + if m != nil { + return m.BytesFromSourceFailed + } + return 0 +} + +func (m *TransferCounters) GetObjectsFailedToDeleteFromSink() int64 { + if m != nil { + return m.ObjectsFailedToDeleteFromSink + } + return 0 +} + +func (m *TransferCounters) GetBytesFailedToDeleteFromSink() int64 { + if m != nil { + return m.BytesFailedToDeleteFromSink + } + return 0 +} + +// A description of the execution of a transfer. +type TransferOperation struct { + // A globally unique ID assigned by the system. + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // The ID of the Google Cloud Platform Console project that owns the + // operation. Required. + ProjectId string `protobuf:"bytes,2,opt,name=project_id,json=projectId,proto3" json:"project_id,omitempty"` + // Transfer specification. + // Required. + TransferSpec *TransferSpec `protobuf:"bytes,3,opt,name=transfer_spec,json=transferSpec,proto3" json:"transfer_spec,omitempty"` + // Start time of this transfer execution. + StartTime *timestamp.Timestamp `protobuf:"bytes,4,opt,name=start_time,json=startTime,proto3" json:"start_time,omitempty"` + // End time of this transfer execution. + EndTime *timestamp.Timestamp `protobuf:"bytes,5,opt,name=end_time,json=endTime,proto3" json:"end_time,omitempty"` + // Status of the transfer operation. + Status TransferOperation_Status `protobuf:"varint,6,opt,name=status,proto3,enum=google.storagetransfer.v1.TransferOperation_Status" json:"status,omitempty"` + // Information about the progress of the transfer operation. + Counters *TransferCounters `protobuf:"bytes,7,opt,name=counters,proto3" json:"counters,omitempty"` + // Summarizes errors encountered with sample error log entries. + ErrorBreakdowns []*ErrorSummary `protobuf:"bytes,8,rep,name=error_breakdowns,json=errorBreakdowns,proto3" json:"error_breakdowns,omitempty"` + // The name of the transfer job that triggers this transfer operation. + TransferJobName string `protobuf:"bytes,9,opt,name=transfer_job_name,json=transferJobName,proto3" json:"transfer_job_name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *TransferOperation) Reset() { *m = TransferOperation{} } +func (m *TransferOperation) String() string { return proto.CompactTextString(m) } +func (*TransferOperation) ProtoMessage() {} +func (*TransferOperation) Descriptor() ([]byte, []int) { + return fileDescriptor_transfer_types_1b0f782d53ee7fbd, []int{13} +} +func (m *TransferOperation) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_TransferOperation.Unmarshal(m, b) +} +func (m *TransferOperation) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_TransferOperation.Marshal(b, m, deterministic) +} +func (dst *TransferOperation) XXX_Merge(src proto.Message) { + xxx_messageInfo_TransferOperation.Merge(dst, src) +} +func (m *TransferOperation) XXX_Size() int { + return xxx_messageInfo_TransferOperation.Size(m) +} +func (m *TransferOperation) XXX_DiscardUnknown() { + xxx_messageInfo_TransferOperation.DiscardUnknown(m) +} + +var xxx_messageInfo_TransferOperation proto.InternalMessageInfo + +func (m *TransferOperation) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *TransferOperation) GetProjectId() string { + if m != nil { + return m.ProjectId + } + return "" +} + +func (m *TransferOperation) GetTransferSpec() *TransferSpec { + if m != nil { + return m.TransferSpec + } + return nil +} + +func (m *TransferOperation) GetStartTime() *timestamp.Timestamp { + if m != nil { + return m.StartTime + } + return nil +} + +func (m *TransferOperation) GetEndTime() *timestamp.Timestamp { + if m != nil { + return m.EndTime + } + return nil +} + +func (m *TransferOperation) GetStatus() TransferOperation_Status { + if m != nil { + return m.Status + } + return TransferOperation_STATUS_UNSPECIFIED +} + +func (m *TransferOperation) GetCounters() *TransferCounters { + if m != nil { + return m.Counters + } + return nil +} + +func (m *TransferOperation) GetErrorBreakdowns() []*ErrorSummary { + if m != nil { + return m.ErrorBreakdowns + } + return nil +} + +func (m *TransferOperation) GetTransferJobName() string { + if m != nil { + return m.TransferJobName + } + return "" +} + +func init() { + proto.RegisterType((*GoogleServiceAccount)(nil), "google.storagetransfer.v1.GoogleServiceAccount") + proto.RegisterType((*AwsAccessKey)(nil), "google.storagetransfer.v1.AwsAccessKey") + proto.RegisterType((*ObjectConditions)(nil), "google.storagetransfer.v1.ObjectConditions") + proto.RegisterType((*GcsData)(nil), "google.storagetransfer.v1.GcsData") + proto.RegisterType((*AwsS3Data)(nil), "google.storagetransfer.v1.AwsS3Data") + proto.RegisterType((*HttpData)(nil), "google.storagetransfer.v1.HttpData") + proto.RegisterType((*TransferOptions)(nil), "google.storagetransfer.v1.TransferOptions") + proto.RegisterType((*TransferSpec)(nil), "google.storagetransfer.v1.TransferSpec") + proto.RegisterType((*Schedule)(nil), "google.storagetransfer.v1.Schedule") + proto.RegisterType((*TransferJob)(nil), "google.storagetransfer.v1.TransferJob") + proto.RegisterType((*ErrorLogEntry)(nil), "google.storagetransfer.v1.ErrorLogEntry") + proto.RegisterType((*ErrorSummary)(nil), "google.storagetransfer.v1.ErrorSummary") + proto.RegisterType((*TransferCounters)(nil), "google.storagetransfer.v1.TransferCounters") + proto.RegisterType((*TransferOperation)(nil), "google.storagetransfer.v1.TransferOperation") + proto.RegisterEnum("google.storagetransfer.v1.TransferJob_Status", TransferJob_Status_name, TransferJob_Status_value) + proto.RegisterEnum("google.storagetransfer.v1.TransferOperation_Status", TransferOperation_Status_name, TransferOperation_Status_value) +} + +func init() { + proto.RegisterFile("google/storagetransfer/v1/transfer_types.proto", fileDescriptor_transfer_types_1b0f782d53ee7fbd) +} + +var fileDescriptor_transfer_types_1b0f782d53ee7fbd = []byte{ + // 1767 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xa4, 0x58, 0xdd, 0x6e, 0xdb, 0xc8, + 0x15, 0x8e, 0x24, 0xc7, 0x96, 0x8e, 0x24, 0x4b, 0x9e, 0xcd, 0x7a, 0x65, 0x27, 0xd9, 0xa4, 0xf4, + 0x2e, 0x92, 0xcd, 0xa2, 0x12, 0x62, 0x63, 0x51, 0xa4, 0xc1, 0x36, 0x95, 0x2d, 0xd9, 0xd6, 0xc6, + 0x1b, 0x1b, 0xa4, 0xbc, 0xfd, 0x41, 0x50, 0x62, 0x44, 0x8e, 0x14, 0xae, 0x29, 0x0e, 0xcb, 0x19, + 0xc5, 0x16, 0x7a, 0xd5, 0xab, 0x3e, 0x43, 0x5f, 0xa1, 0x0f, 0xd0, 0x37, 0xe8, 0x4d, 0x51, 0xa0, + 0x77, 0x7d, 0x8e, 0x5e, 0xf6, 0xb2, 0x98, 0x1f, 0x52, 0x14, 0x2d, 0xcb, 0x06, 0xf6, 0x8e, 0x3c, + 0xe7, 0xfb, 0xbe, 0x33, 0x33, 0xe7, 0xe8, 0x9c, 0xa1, 0xa0, 0x39, 0xa2, 0x74, 0xe4, 0x93, 0x16, + 0xe3, 0x34, 0xc2, 0x23, 0xc2, 0x23, 0x1c, 0xb0, 0x21, 0x89, 0x5a, 0x1f, 0x5f, 0xb6, 0xe2, 0x67, + 0x9b, 0x4f, 0x43, 0xc2, 0x9a, 0x61, 0x44, 0x39, 0x45, 0x5b, 0x0a, 0xdf, 0xcc, 0xe0, 0x9b, 0x1f, + 0x5f, 0x6e, 0x3f, 0xd2, 0x52, 0x38, 0xf4, 0x5a, 0x38, 0x08, 0x28, 0xc7, 0xdc, 0xa3, 0x81, 0x26, + 0x6e, 0x7f, 0xae, 0xbd, 0xf2, 0x6d, 0x30, 0x19, 0xb6, 0xdc, 0x49, 0x24, 0x01, 0xda, 0xff, 0x24, + 0xeb, 0xe7, 0xde, 0x98, 0x30, 0x8e, 0xc7, 0xa1, 0x06, 0x7c, 0xaa, 0x01, 0x51, 0xe8, 0xb4, 0x1c, + 0xea, 0x12, 0x6d, 0xde, 0xd4, 0x66, 0xb1, 0xc8, 0x96, 0x8b, 0x79, 0x6c, 0x7f, 0x98, 0xb6, 0x0b, + 0x2d, 0x3a, 0x74, 0xf1, 0x54, 0x39, 0x8d, 0xd7, 0xf0, 0xe0, 0x48, 0xba, 0x2d, 0x12, 0x7d, 0xf4, + 0x1c, 0xd2, 0x76, 0x1c, 0x3a, 0x09, 0x38, 0xda, 0x81, 0x2a, 0x56, 0x8f, 0x36, 0x19, 0x63, 0xcf, + 0x6f, 0xe4, 0x9e, 0xe6, 0x9e, 0x97, 0xcc, 0x8a, 0x36, 0x76, 0x85, 0xcd, 0xf8, 0x03, 0x54, 0xda, + 0x97, 0xac, 0xed, 0x38, 0x84, 0xb1, 0xb7, 0x64, 0x8a, 0x0c, 0x49, 0x22, 0x8c, 0xd9, 0x17, 0x64, + 0x6a, 0x7b, 0xae, 0x26, 0x95, 0x71, 0x8c, 0xe8, 0xb9, 0xe8, 0x05, 0x6c, 0x30, 0xe2, 0x44, 0x84, + 0xdb, 0x33, 0x68, 0x23, 0x2f, 0x71, 0x35, 0xe5, 0x48, 0xf4, 0x8c, 0x7f, 0xe5, 0xa1, 0x7e, 0x3a, + 0xf8, 0x91, 0x38, 0xfc, 0x80, 0x06, 0xae, 0x27, 0x0f, 0x11, 0x85, 0xf0, 0x7c, 0xec, 0x05, 0xb6, + 0xd8, 0x88, 0x4d, 0x7c, 0x1c, 0x32, 0xe2, 0xda, 0xcc, 0x0b, 0x1c, 0x62, 0xfb, 0x98, 0x71, 0x7b, + 0x4c, 0x5d, 0x6f, 0xe8, 0x39, 0xf2, 0x40, 0x65, 0xfc, 0xf2, 0xee, 0x96, 0x4e, 0x6d, 0x33, 0x3e, + 0xd1, 0x66, 0x47, 0x9f, 0xb8, 0xb9, 0x33, 0xf6, 0x82, 0xbe, 0x37, 0x26, 0x5d, 0x25, 0x64, 0x09, + 0x9d, 0x13, 0xcc, 0xf8, 0xf7, 0x29, 0x15, 0x19, 0x11, 0x5f, 0xdd, 0x2d, 0x62, 0xfe, 0xf6, 0x88, + 0xf8, 0xea, 0xd6, 0x88, 0x5f, 0x41, 0xdd, 0x0b, 0x1c, 0x7f, 0xe2, 0x12, 0x3b, 0x8c, 0xc8, 0xd0, + 0xbb, 0x22, 0xac, 0x51, 0x78, 0x5a, 0x10, 0x67, 0xa4, 0xed, 0x67, 0xda, 0x2c, 0xa0, 0xe4, 0x2a, + 0x03, 0x5d, 0x51, 0x50, 0x6d, 0x8f, 0xa1, 0xc6, 0x0b, 0x58, 0x3b, 0x72, 0x58, 0x07, 0x73, 0x8c, + 0x9e, 0x40, 0x79, 0x30, 0x71, 0x2e, 0x08, 0xb7, 0x03, 0x3c, 0x26, 0x3a, 0x4f, 0xa0, 0x4c, 0xef, + 0xf0, 0x98, 0x18, 0x7f, 0x82, 0x52, 0xfb, 0x92, 0x59, 0x7b, 0x77, 0x42, 0xa3, 0xef, 0x61, 0x1d, + 0x5f, 0xb2, 0x6c, 0x46, 0xcb, 0xbb, 0xcf, 0x9a, 0x37, 0xfe, 0x48, 0x9a, 0xe9, 0xca, 0x31, 0x2b, + 0x38, 0xf5, 0x66, 0x7c, 0x09, 0xc5, 0x63, 0xce, 0x43, 0x19, 0x7b, 0x0b, 0x8a, 0xbe, 0xc7, 0xb8, + 0x3d, 0x89, 0xe2, 0x1a, 0x5c, 0x13, 0xef, 0xe7, 0x91, 0x6f, 0xfc, 0x25, 0x0f, 0xb5, 0xbe, 0x56, + 0x3c, 0x0d, 0x55, 0x75, 0xfc, 0x0e, 0x5e, 0xd0, 0x8f, 0x24, 0xba, 0x8c, 0x3c, 0x4e, 0x6c, 0x2a, + 0x6b, 0x87, 0xd9, 0xd8, 0x8f, 0x08, 0x76, 0xa7, 0x36, 0xb9, 0xf2, 0x18, 0xf7, 0x82, 0x91, 0xed, + 0x05, 0x22, 0x81, 0x17, 0x52, 0xb0, 0x68, 0x7e, 0x99, 0x30, 0x54, 0xb1, 0xb1, 0xb6, 0xc2, 0x77, + 0x35, 0xbc, 0x17, 0x58, 0x5e, 0x70, 0x81, 0x7e, 0x0d, 0x8f, 0x5d, 0xe2, 0x93, 0x94, 0xee, 0x24, + 0xf0, 0xfe, 0x38, 0x21, 0x89, 0x5a, 0x5e, 0xaa, 0x6d, 0x29, 0x90, 0x96, 0x3a, 0x97, 0x10, 0xad, + 0xf0, 0x1b, 0xf8, 0x2a, 0xa3, 0x30, 0x8c, 0xe8, 0xd8, 0x66, 0x74, 0x12, 0x39, 0xc4, 0xc6, 0x43, + 0x2e, 0x5a, 0x8c, 0xde, 0x50, 0xa3, 0x20, 0xd5, 0xbe, 0x98, 0x53, 0x3b, 0x8c, 0xe8, 0xd8, 0x92, + 0xe8, 0xb6, 0x00, 0xc7, 0x9b, 0x37, 0xfe, 0xba, 0x02, 0x95, 0xf8, 0xc5, 0x0a, 0x89, 0x83, 0x4e, + 0xa0, 0x36, 0x72, 0x98, 0xed, 0x62, 0x8e, 0xb5, 0xbc, 0xfe, 0x2d, 0x18, 0x4b, 0x32, 0xa2, 0x8b, + 0xe3, 0xf8, 0x9e, 0x59, 0x1d, 0xa9, 0x47, 0x15, 0x0b, 0x59, 0x80, 0x44, 0x7a, 0xd9, 0xde, 0x9c, + 0xa0, 0x4a, 0xf1, 0x17, 0xcb, 0x53, 0xac, 0x2a, 0xe8, 0xf8, 0x9e, 0x59, 0xc3, 0xf1, 0x8b, 0x16, + 0x3d, 0x85, 0xfa, 0x07, 0xce, 0xc3, 0x39, 0xc9, 0x82, 0x94, 0xdc, 0x59, 0x22, 0x19, 0xd7, 0xc5, + 0xf1, 0x3d, 0x73, 0xfd, 0x83, 0x7e, 0xd6, 0x82, 0xc7, 0x50, 0x9d, 0xed, 0x59, 0xe4, 0x63, 0xe5, + 0xce, 0x3b, 0xce, 0x99, 0xe5, 0x78, 0xc7, 0x22, 0x4f, 0xbf, 0x85, 0x0d, 0x95, 0x20, 0xdb, 0x49, + 0xfa, 0x4e, 0xe3, 0xbe, 0x54, 0xfb, 0x7a, 0x89, 0x5a, 0xb6, 0x55, 0x99, 0x75, 0x9a, 0x6d, 0x5e, + 0xe7, 0x50, 0x4f, 0x86, 0x09, 0x55, 0x25, 0xdb, 0x58, 0x95, 0xc2, 0x2f, 0x96, 0x08, 0x67, 0x8a, + 0xdc, 0xac, 0xf1, 0x79, 0xc3, 0x7e, 0x15, 0xca, 0xa9, 0x63, 0xdc, 0x2f, 0x43, 0x29, 0x39, 0x05, + 0xe3, 0xdf, 0x39, 0x28, 0x5a, 0xce, 0x07, 0xe2, 0x4e, 0x7c, 0x82, 0xda, 0xf0, 0x09, 0xd3, 0xcf, + 0x36, 0xe3, 0x38, 0xe2, 0xe2, 0xb8, 0xe2, 0xda, 0xd8, 0x88, 0x97, 0x20, 0x26, 0x45, 0xb3, 0x83, + 0x39, 0x31, 0x37, 0x62, 0xb4, 0x25, 0xc0, 0xc2, 0x84, 0xbe, 0x85, 0xc4, 0x68, 0x93, 0xc0, 0x55, + 0x02, 0xf9, 0x9b, 0x04, 0x6a, 0x31, 0xb6, 0x1b, 0xb8, 0x92, 0xde, 0x86, 0x0d, 0x15, 0x58, 0xb6, + 0x53, 0x3a, 0xb4, 0x5d, 0x3c, 0xd5, 0x79, 0xdf, 0x9c, 0xa3, 0x8b, 0x26, 0x79, 0x3a, 0xec, 0xe0, + 0xa9, 0xb9, 0x2e, 0x09, 0xc9, 0xbb, 0xf1, 0x9f, 0x15, 0x28, 0xc7, 0x47, 0xf2, 0x1d, 0x1d, 0x20, + 0x04, 0x2b, 0xa9, 0xbe, 0x24, 0x9f, 0xd1, 0x53, 0x28, 0xbb, 0x84, 0x39, 0x91, 0x17, 0x26, 0x6d, + 0xb9, 0x64, 0xa6, 0x4d, 0xe8, 0x31, 0x40, 0x18, 0x51, 0x99, 0x65, 0xcf, 0x95, 0x2b, 0x28, 0x99, + 0x25, 0x6d, 0xe9, 0xb9, 0xe8, 0x04, 0xaa, 0x49, 0xa6, 0x58, 0x48, 0x1c, 0x5d, 0x4d, 0xcf, 0xee, + 0x90, 0x26, 0xf1, 0x0b, 0x34, 0x2b, 0x3c, 0xfd, 0x7b, 0x7c, 0x03, 0xc5, 0xf8, 0x20, 0x74, 0x21, + 0x2d, 0x2b, 0xf2, 0x38, 0x5d, 0x66, 0x42, 0x42, 0x5d, 0x58, 0x65, 0x1c, 0xf3, 0x89, 0x2a, 0x97, + 0xf5, 0xdd, 0x9f, 0xdf, 0x61, 0x1d, 0xdf, 0xd1, 0x41, 0xd3, 0x92, 0x24, 0x53, 0x93, 0xd1, 0x1b, + 0xa8, 0x3a, 0x11, 0x91, 0x43, 0x46, 0x26, 0xa0, 0xb1, 0x26, 0x17, 0xb3, 0x7d, 0x6d, 0x5e, 0xf5, + 0xe3, 0x3b, 0x87, 0x59, 0x89, 0x09, 0xc2, 0x84, 0xce, 0x60, 0xf3, 0xda, 0xd0, 0x53, 0x4a, 0xc5, + 0x5b, 0x95, 0x1e, 0xf8, 0x99, 0x39, 0x27, 0x15, 0xdf, 0x40, 0x55, 0xf6, 0xb8, 0x44, 0xa8, 0x74, + 0xfb, 0x92, 0x62, 0x82, 0x30, 0x19, 0xc7, 0xb0, 0xaa, 0x76, 0x89, 0x36, 0x01, 0x59, 0xfd, 0x76, + 0xff, 0xdc, 0xb2, 0xcf, 0xdf, 0x59, 0x67, 0xdd, 0x83, 0xde, 0x61, 0xaf, 0xdb, 0xa9, 0xdf, 0x43, + 0x65, 0x58, 0xeb, 0xbe, 0x6b, 0xef, 0x9f, 0x74, 0x3b, 0xf5, 0x1c, 0xaa, 0x40, 0xb1, 0xd3, 0xb3, + 0xd4, 0x5b, 0x5e, 0xb8, 0x3a, 0xdd, 0x93, 0x6e, 0xbf, 0xdb, 0xa9, 0x17, 0x8c, 0x43, 0xa8, 0x76, + 0xa3, 0x88, 0x46, 0x27, 0x74, 0xd4, 0x0d, 0x78, 0x34, 0x45, 0x75, 0x28, 0xcc, 0xe6, 0x8e, 0x78, + 0x14, 0xf7, 0x22, 0x22, 0x20, 0xb6, 0x4b, 0x38, 0xf6, 0xfc, 0x78, 0x2c, 0x57, 0xa4, 0xb1, 0xa3, + 0x6c, 0xc6, 0xdf, 0x73, 0x50, 0x91, 0x42, 0xd6, 0x64, 0x3c, 0xc6, 0xd1, 0x14, 0xb5, 0x00, 0x14, + 0x4b, 0x5c, 0xd7, 0xa4, 0xdc, 0xfa, 0x6e, 0x3d, 0xde, 0x60, 0x14, 0x3a, 0xcd, 0x03, 0xea, 0x12, + 0xb3, 0x24, 0x31, 0xe2, 0x51, 0x4c, 0xdc, 0x98, 0x30, 0x09, 0xb8, 0x2c, 0xdf, 0x82, 0x09, 0xda, + 0x2f, 0xee, 0x67, 0x7d, 0xd8, 0x50, 0x00, 0x9f, 0x8e, 0x6c, 0x12, 0xf0, 0xc8, 0xd3, 0x57, 0x84, + 0xf2, 0xee, 0xf3, 0x25, 0xa5, 0x31, 0xb7, 0x3d, 0xb3, 0x46, 0x52, 0xaf, 0x1e, 0x61, 0xc6, 0x7f, + 0xd7, 0xa0, 0x1e, 0x57, 0x8f, 0x8c, 0x43, 0x22, 0x86, 0x5e, 0xc1, 0x56, 0x32, 0xae, 0xe8, 0x24, + 0x70, 0xd3, 0x43, 0x4b, 0xee, 0xa5, 0x60, 0x6e, 0x6a, 0xc0, 0xa1, 0xf0, 0xcf, 0x86, 0x14, 0xfa, + 0x06, 0x3e, 0x1b, 0x4c, 0x39, 0x59, 0x44, 0x54, 0x5b, 0x7a, 0x20, 0xdd, 0x59, 0xda, 0xaf, 0xe0, + 0xd1, 0x7c, 0x44, 0x1a, 0xf8, 0x53, 0xcd, 0x16, 0x8d, 0xbd, 0x20, 0xb9, 0x8d, 0x74, 0xd0, 0xd3, + 0xc0, 0x9f, 0x4a, 0x05, 0xd1, 0xbf, 0x7f, 0x09, 0xdb, 0xe9, 0xb0, 0x19, 0xf6, 0x8a, 0x5a, 0xf2, + 0x2c, 0xf2, 0x1c, 0xf7, 0x2d, 0xec, 0x2c, 0x1a, 0xce, 0xec, 0xc2, 0x0b, 0x43, 0xe2, 0xda, 0x83, + 0xa9, 0xcd, 0xa6, 0x81, 0x23, 0x7f, 0xc4, 0x05, 0xf3, 0x73, 0x9a, 0x9d, 0xcb, 0x96, 0xc2, 0xed, + 0x4f, 0xad, 0x69, 0xe0, 0xa0, 0x23, 0xf8, 0x99, 0x5e, 0xc8, 0x12, 0xa9, 0x55, 0x29, 0xf5, 0x48, + 0xad, 0xe7, 0x06, 0xa1, 0x3d, 0x88, 0x8f, 0xd8, 0x76, 0x68, 0xe8, 0x11, 0xd7, 0xe6, 0x54, 0xed, + 0x66, 0x4d, 0xb2, 0x3f, 0xd1, 0xde, 0x03, 0xe9, 0xec, 0x53, 0xb9, 0x95, 0x16, 0xa8, 0xe3, 0xcd, + 0x52, 0x8a, 0x92, 0xb2, 0x21, 0x7d, 0x73, 0x84, 0x6f, 0xe1, 0x61, 0x1c, 0x45, 0x5d, 0x3b, 0xe6, + 0x53, 0x56, 0x9a, 0x3b, 0xf6, 0x8e, 0x42, 0xa4, 0xd2, 0xf6, 0x0a, 0xb6, 0x54, 0xbc, 0x45, 0x64, + 0x48, 0x9d, 0xfa, 0x42, 0xea, 0xe2, 0xc8, 0x62, 0xbd, 0xe5, 0xb9, 0x1a, 0x4b, 0x93, 0xc5, 0xa2, + 0x93, 0x1a, 0xbb, 0x4e, 0xac, 0xa4, 0x6a, 0x2c, 0x4b, 0x7b, 0x0d, 0xdb, 0x8b, 0xf2, 0x3c, 0xc4, + 0x9e, 0x4f, 0xdc, 0x46, 0x55, 0x32, 0x3f, 0xbb, 0x96, 0xde, 0x43, 0xe9, 0x46, 0xbf, 0x80, 0xc6, + 0xf5, 0xbc, 0x6a, 0xea, 0xba, 0xa4, 0x7e, 0x9a, 0x49, 0xa7, 0x26, 0xf6, 0xc0, 0x48, 0xa2, 0x4a, + 0x8b, 0x48, 0x8a, 0xbe, 0x13, 0xce, 0xd6, 0x5d, 0x93, 0x12, 0x8f, 0xe3, 0xe8, 0x12, 0xd8, 0xa7, + 0x6a, 0x07, 0xc9, 0x06, 0xba, 0xf0, 0x54, 0xaf, 0xe1, 0x66, 0xa1, 0xba, 0x14, 0x7a, 0xa8, 0xd6, + 0xb2, 0x50, 0xc6, 0xf8, 0xf3, 0x7d, 0xd8, 0x98, 0xdd, 0x2f, 0x88, 0xfa, 0x4a, 0x59, 0x38, 0x52, + 0xe7, 0x07, 0x66, 0xfe, 0xd6, 0x81, 0x59, 0xf8, 0x29, 0x03, 0xf3, 0x15, 0xc0, 0xec, 0x9a, 0xa0, + 0x67, 0xef, 0xb2, 0x91, 0x50, 0x4a, 0xee, 0x08, 0xe8, 0x1b, 0x28, 0x8a, 0x7b, 0x89, 0x24, 0xde, + 0xbf, 0x95, 0xb8, 0x46, 0x02, 0x57, 0xd2, 0xde, 0x66, 0x26, 0xec, 0xde, 0x9d, 0x2e, 0x64, 0xfa, + 0xc0, 0xb2, 0x73, 0xf6, 0x08, 0x8a, 0x8e, 0xee, 0x9f, 0x7a, 0xc4, 0x7e, 0x7d, 0x07, 0xb9, 0xb8, + 0xe5, 0x9a, 0x09, 0x19, 0x99, 0x50, 0x57, 0x7d, 0x7e, 0x10, 0x11, 0x7c, 0xe1, 0xd2, 0xcb, 0x80, + 0x35, 0x8a, 0xb2, 0xcd, 0x3f, 0xbb, 0xad, 0xcd, 0xeb, 0xe1, 0xa3, 0xbb, 0xfc, 0x7e, 0xc2, 0x17, + 0x9f, 0xe0, 0x49, 0xa6, 0x7e, 0xa4, 0x03, 0xf5, 0x51, 0x57, 0x52, 0x9f, 0xe0, 0x7c, 0x76, 0x77, + 0x90, 0xdf, 0x81, 0xce, 0xad, 0xc3, 0xb5, 0x06, 0xe5, 0xde, 0x3b, 0xfb, 0xcc, 0x3c, 0x3d, 0x32, + 0xbb, 0x96, 0x55, 0xcf, 0x21, 0x80, 0xd5, 0xb3, 0xf6, 0xb9, 0x15, 0x8f, 0x57, 0xeb, 0xfc, 0xe0, + 0x40, 0x38, 0x0a, 0xc2, 0x71, 0xd8, 0xee, 0x89, 0xb9, 0xbb, 0x22, 0x1c, 0xed, 0xfd, 0x53, 0x53, + 0xcc, 0xdd, 0xfb, 0xfb, 0xff, 0xc8, 0xc1, 0x8e, 0x43, 0xc7, 0x4b, 0x36, 0x24, 0x13, 0xb7, 0x5f, + 0x8d, 0x0f, 0xaa, 0x3f, 0x0d, 0x09, 0xfb, 0xfd, 0xb1, 0xc6, 0x8f, 0xa8, 0x8f, 0x83, 0x51, 0x93, + 0x46, 0xa3, 0xd6, 0x88, 0x04, 0x12, 0xda, 0x52, 0x2e, 0x1c, 0x7a, 0x6c, 0xc1, 0x1f, 0x3a, 0xaf, + 0x33, 0xa6, 0xff, 0xe5, 0x72, 0x7f, 0xcb, 0x3f, 0x51, 0xff, 0x83, 0x34, 0x0f, 0x7c, 0x3a, 0x71, + 0x9b, 0x96, 0x42, 0xc4, 0x01, 0x9b, 0x3f, 0xbc, 0xfc, 0x67, 0x8c, 0x78, 0x2f, 0x11, 0xef, 0x33, + 0x88, 0xf7, 0x3f, 0xbc, 0x1c, 0xac, 0xca, 0xd8, 0x7b, 0xff, 0x0f, 0x00, 0x00, 0xff, 0xff, 0x0c, + 0xec, 0x5b, 0x90, 0x4b, 0x12, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/streetview/publish/v1/resources.pb.go b/vendor/google.golang.org/genproto/googleapis/streetview/publish/v1/resources.pb.go new file mode 100644 index 0000000..7c813ce --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/streetview/publish/v1/resources.pb.go @@ -0,0 +1,681 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/streetview/publish/v1/resources.proto + +package publish // import "google.golang.org/genproto/googleapis/streetview/publish/v1" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import timestamp "github.com/golang/protobuf/ptypes/timestamp" +import _ "google.golang.org/genproto/googleapis/api/annotations" +import latlng "google.golang.org/genproto/googleapis/type/latlng" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Status of rights transfer. +type Photo_TransferStatus int32 + +const ( + // The status of this transfer is unspecified. + Photo_TRANSFER_STATUS_UNKNOWN Photo_TransferStatus = 0 + // This photo has never been in a transfer. + Photo_NEVER_TRANSFERRED Photo_TransferStatus = 1 + // This photo transfer has been initiated, but the receiver has not yet + // responded. + Photo_PENDING Photo_TransferStatus = 2 + // The photo transfer has been completed, and this photo has been + // transferred to the recipient. + Photo_COMPLETED Photo_TransferStatus = 3 + // The recipient rejected this photo transfer. + Photo_REJECTED Photo_TransferStatus = 4 + // The photo transfer expired before the recipient took any action. + Photo_EXPIRED Photo_TransferStatus = 5 + // The sender cancelled this photo transfer. + Photo_CANCELLED Photo_TransferStatus = 6 + // The recipient owns this photo due to a rights transfer. + Photo_RECEIVED_VIA_TRANSFER Photo_TransferStatus = 7 +) + +var Photo_TransferStatus_name = map[int32]string{ + 0: "TRANSFER_STATUS_UNKNOWN", + 1: "NEVER_TRANSFERRED", + 2: "PENDING", + 3: "COMPLETED", + 4: "REJECTED", + 5: "EXPIRED", + 6: "CANCELLED", + 7: "RECEIVED_VIA_TRANSFER", +} +var Photo_TransferStatus_value = map[string]int32{ + "TRANSFER_STATUS_UNKNOWN": 0, + "NEVER_TRANSFERRED": 1, + "PENDING": 2, + "COMPLETED": 3, + "REJECTED": 4, + "EXPIRED": 5, + "CANCELLED": 6, + "RECEIVED_VIA_TRANSFER": 7, +} + +func (x Photo_TransferStatus) String() string { + return proto.EnumName(Photo_TransferStatus_name, int32(x)) +} +func (Photo_TransferStatus) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_resources_4fb97700fa63963a, []int{6, 0} +} + +// Publication status of the photo in Google Maps. +type Photo_MapsPublishStatus int32 + +const ( + // The status of the photo is unknown. + Photo_UNSPECIFIED_MAPS_PUBLISH_STATUS Photo_MapsPublishStatus = 0 + // The photo is published to the public through Google Maps. + Photo_PUBLISHED Photo_MapsPublishStatus = 1 + // The photo has been rejected for an unknown reason. + Photo_REJECTED_UNKNOWN Photo_MapsPublishStatus = 2 +) + +var Photo_MapsPublishStatus_name = map[int32]string{ + 0: "UNSPECIFIED_MAPS_PUBLISH_STATUS", + 1: "PUBLISHED", + 2: "REJECTED_UNKNOWN", +} +var Photo_MapsPublishStatus_value = map[string]int32{ + "UNSPECIFIED_MAPS_PUBLISH_STATUS": 0, + "PUBLISHED": 1, + "REJECTED_UNKNOWN": 2, +} + +func (x Photo_MapsPublishStatus) String() string { + return proto.EnumName(Photo_MapsPublishStatus_name, int32(x)) +} +func (Photo_MapsPublishStatus) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_resources_4fb97700fa63963a, []int{6, 1} +} + +// Upload reference for media files. +type UploadRef struct { + // Required. An upload reference should be unique for each user. It follows + // the form: + // "https://streetviewpublish.googleapis.com/media/user/{account_id}/photo/{upload_reference}" + UploadUrl string `protobuf:"bytes,1,opt,name=upload_url,json=uploadUrl,proto3" json:"upload_url,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *UploadRef) Reset() { *m = UploadRef{} } +func (m *UploadRef) String() string { return proto.CompactTextString(m) } +func (*UploadRef) ProtoMessage() {} +func (*UploadRef) Descriptor() ([]byte, []int) { + return fileDescriptor_resources_4fb97700fa63963a, []int{0} +} +func (m *UploadRef) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_UploadRef.Unmarshal(m, b) +} +func (m *UploadRef) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_UploadRef.Marshal(b, m, deterministic) +} +func (dst *UploadRef) XXX_Merge(src proto.Message) { + xxx_messageInfo_UploadRef.Merge(dst, src) +} +func (m *UploadRef) XXX_Size() int { + return xxx_messageInfo_UploadRef.Size(m) +} +func (m *UploadRef) XXX_DiscardUnknown() { + xxx_messageInfo_UploadRef.DiscardUnknown(m) +} + +var xxx_messageInfo_UploadRef proto.InternalMessageInfo + +func (m *UploadRef) GetUploadUrl() string { + if m != nil { + return m.UploadUrl + } + return "" +} + +// Identifier for a [Photo][google.streetview.publish.v1.Photo]. +type PhotoId struct { + // Required. A unique identifier for a photo. + Id string `protobuf:"bytes,1,opt,name=id,proto3" json:"id,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *PhotoId) Reset() { *m = PhotoId{} } +func (m *PhotoId) String() string { return proto.CompactTextString(m) } +func (*PhotoId) ProtoMessage() {} +func (*PhotoId) Descriptor() ([]byte, []int) { + return fileDescriptor_resources_4fb97700fa63963a, []int{1} +} +func (m *PhotoId) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_PhotoId.Unmarshal(m, b) +} +func (m *PhotoId) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_PhotoId.Marshal(b, m, deterministic) +} +func (dst *PhotoId) XXX_Merge(src proto.Message) { + xxx_messageInfo_PhotoId.Merge(dst, src) +} +func (m *PhotoId) XXX_Size() int { + return xxx_messageInfo_PhotoId.Size(m) +} +func (m *PhotoId) XXX_DiscardUnknown() { + xxx_messageInfo_PhotoId.DiscardUnknown(m) +} + +var xxx_messageInfo_PhotoId proto.InternalMessageInfo + +func (m *PhotoId) GetId() string { + if m != nil { + return m.Id + } + return "" +} + +// Level information containing level number and its corresponding name. +type Level struct { + // Floor number, used for ordering. 0 indicates the ground level, 1 indicates + // the first level above ground level, -1 indicates the first level under + // ground level. Non-integer values are OK. + Number float64 `protobuf:"fixed64,1,opt,name=number,proto3" json:"number,omitempty"` + // Required. A name assigned to this Level, restricted to 3 characters. + // Consider how the elevator buttons would be labeled for this level if there + // was an elevator. + Name string `protobuf:"bytes,2,opt,name=name,proto3" json:"name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Level) Reset() { *m = Level{} } +func (m *Level) String() string { return proto.CompactTextString(m) } +func (*Level) ProtoMessage() {} +func (*Level) Descriptor() ([]byte, []int) { + return fileDescriptor_resources_4fb97700fa63963a, []int{2} +} +func (m *Level) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Level.Unmarshal(m, b) +} +func (m *Level) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Level.Marshal(b, m, deterministic) +} +func (dst *Level) XXX_Merge(src proto.Message) { + xxx_messageInfo_Level.Merge(dst, src) +} +func (m *Level) XXX_Size() int { + return xxx_messageInfo_Level.Size(m) +} +func (m *Level) XXX_DiscardUnknown() { + xxx_messageInfo_Level.DiscardUnknown(m) +} + +var xxx_messageInfo_Level proto.InternalMessageInfo + +func (m *Level) GetNumber() float64 { + if m != nil { + return m.Number + } + return 0 +} + +func (m *Level) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +// Raw pose measurement for an entity. +type Pose struct { + // Latitude and longitude pair of the pose, as explained here: + // https://cloud.google.com/datastore/docs/reference/rest/Shared.Types/LatLng + // When creating a [Photo][google.streetview.publish.v1.Photo], if the + // latitude and longitude pair are not provided, the geolocation from the + // exif header is used. A latitude and longitude pair not provided in the + // photo or exif header causes the create photo process to fail. + LatLngPair *latlng.LatLng `protobuf:"bytes,1,opt,name=lat_lng_pair,json=latLngPair,proto3" json:"lat_lng_pair,omitempty"` + // Altitude of the pose in meters above WGS84 ellipsoid. + // NaN indicates an unmeasured quantity. + Altitude float64 `protobuf:"fixed64,2,opt,name=altitude,proto3" json:"altitude,omitempty"` + // Compass heading, measured at the center of the photo in degrees clockwise + // from North. Value must be >=0 and <360. + // NaN indicates an unmeasured quantity. + Heading float64 `protobuf:"fixed64,3,opt,name=heading,proto3" json:"heading,omitempty"` + // Pitch, measured at the center of the photo in degrees. Value must be >=-90 + // and <= 90. A value of -90 means looking directly down, and a value of 90 + // means looking directly up. + // NaN indicates an unmeasured quantity. + Pitch float64 `protobuf:"fixed64,4,opt,name=pitch,proto3" json:"pitch,omitempty"` + // Roll, measured in degrees. Value must be >= 0 and <360. A value of 0 + // means level with the horizon. + // NaN indicates an unmeasured quantity. + Roll float64 `protobuf:"fixed64,5,opt,name=roll,proto3" json:"roll,omitempty"` + // Level (the floor in a building) used to configure vertical navigation. + Level *Level `protobuf:"bytes,7,opt,name=level,proto3" json:"level,omitempty"` + // The estimated horizontal accuracy of this pose in meters with 68% + // confidence (one standard deviation). For example, on Android, this value is + // available from this method: + // https://developer.android.com/reference/android/location/Location#getAccuracy(). + // Other platforms have different methods of obtaining similar accuracy + // estimations. + AccuracyMeters float32 `protobuf:"fixed32,9,opt,name=accuracy_meters,json=accuracyMeters,proto3" json:"accuracy_meters,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Pose) Reset() { *m = Pose{} } +func (m *Pose) String() string { return proto.CompactTextString(m) } +func (*Pose) ProtoMessage() {} +func (*Pose) Descriptor() ([]byte, []int) { + return fileDescriptor_resources_4fb97700fa63963a, []int{3} +} +func (m *Pose) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Pose.Unmarshal(m, b) +} +func (m *Pose) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Pose.Marshal(b, m, deterministic) +} +func (dst *Pose) XXX_Merge(src proto.Message) { + xxx_messageInfo_Pose.Merge(dst, src) +} +func (m *Pose) XXX_Size() int { + return xxx_messageInfo_Pose.Size(m) +} +func (m *Pose) XXX_DiscardUnknown() { + xxx_messageInfo_Pose.DiscardUnknown(m) +} + +var xxx_messageInfo_Pose proto.InternalMessageInfo + +func (m *Pose) GetLatLngPair() *latlng.LatLng { + if m != nil { + return m.LatLngPair + } + return nil +} + +func (m *Pose) GetAltitude() float64 { + if m != nil { + return m.Altitude + } + return 0 +} + +func (m *Pose) GetHeading() float64 { + if m != nil { + return m.Heading + } + return 0 +} + +func (m *Pose) GetPitch() float64 { + if m != nil { + return m.Pitch + } + return 0 +} + +func (m *Pose) GetRoll() float64 { + if m != nil { + return m.Roll + } + return 0 +} + +func (m *Pose) GetLevel() *Level { + if m != nil { + return m.Level + } + return nil +} + +func (m *Pose) GetAccuracyMeters() float32 { + if m != nil { + return m.AccuracyMeters + } + return 0 +} + +// Place metadata for an entity. +type Place struct { + // Place identifier, as described in + // https://developers.google.com/places/place-id. + PlaceId string `protobuf:"bytes,1,opt,name=place_id,json=placeId,proto3" json:"place_id,omitempty"` + // Output-only. The name of the place, localized to the language_code. + Name string `protobuf:"bytes,2,opt,name=name,proto3" json:"name,omitempty"` + // Output-only. The language_code that the name is localized with. This should + // be the language_code specified in the request, but may be a fallback. + LanguageCode string `protobuf:"bytes,3,opt,name=language_code,json=languageCode,proto3" json:"language_code,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Place) Reset() { *m = Place{} } +func (m *Place) String() string { return proto.CompactTextString(m) } +func (*Place) ProtoMessage() {} +func (*Place) Descriptor() ([]byte, []int) { + return fileDescriptor_resources_4fb97700fa63963a, []int{4} +} +func (m *Place) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Place.Unmarshal(m, b) +} +func (m *Place) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Place.Marshal(b, m, deterministic) +} +func (dst *Place) XXX_Merge(src proto.Message) { + xxx_messageInfo_Place.Merge(dst, src) +} +func (m *Place) XXX_Size() int { + return xxx_messageInfo_Place.Size(m) +} +func (m *Place) XXX_DiscardUnknown() { + xxx_messageInfo_Place.DiscardUnknown(m) +} + +var xxx_messageInfo_Place proto.InternalMessageInfo + +func (m *Place) GetPlaceId() string { + if m != nil { + return m.PlaceId + } + return "" +} + +func (m *Place) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *Place) GetLanguageCode() string { + if m != nil { + return m.LanguageCode + } + return "" +} + +// A connection is the link from a source photo to a destination photo. +type Connection struct { + // Required. The destination of the connection from the containing photo to + // another photo. + Target *PhotoId `protobuf:"bytes,1,opt,name=target,proto3" json:"target,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Connection) Reset() { *m = Connection{} } +func (m *Connection) String() string { return proto.CompactTextString(m) } +func (*Connection) ProtoMessage() {} +func (*Connection) Descriptor() ([]byte, []int) { + return fileDescriptor_resources_4fb97700fa63963a, []int{5} +} +func (m *Connection) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Connection.Unmarshal(m, b) +} +func (m *Connection) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Connection.Marshal(b, m, deterministic) +} +func (dst *Connection) XXX_Merge(src proto.Message) { + xxx_messageInfo_Connection.Merge(dst, src) +} +func (m *Connection) XXX_Size() int { + return xxx_messageInfo_Connection.Size(m) +} +func (m *Connection) XXX_DiscardUnknown() { + xxx_messageInfo_Connection.DiscardUnknown(m) +} + +var xxx_messageInfo_Connection proto.InternalMessageInfo + +func (m *Connection) GetTarget() *PhotoId { + if m != nil { + return m.Target + } + return nil +} + +// Photo is used to store 360 photos along with photo metadata. +type Photo struct { + // Required when updating a photo. Output only when creating a photo. + // Identifier for the photo, which is unique among all photos in + // Google. + PhotoId *PhotoId `protobuf:"bytes,1,opt,name=photo_id,json=photoId,proto3" json:"photo_id,omitempty"` + // Required when creating a photo. Input only. The resource URL where the + // photo bytes are uploaded to. + UploadReference *UploadRef `protobuf:"bytes,2,opt,name=upload_reference,json=uploadReference,proto3" json:"upload_reference,omitempty"` + // Output only. The download URL for the photo bytes. This field is set only + // when + // [GetPhotoRequest.view][google.streetview.publish.v1.GetPhotoRequest.view] + // is set to + // [PhotoView.INCLUDE_DOWNLOAD_URL][google.streetview.publish.v1.PhotoView.INCLUDE_DOWNLOAD_URL]. + DownloadUrl string `protobuf:"bytes,3,opt,name=download_url,json=downloadUrl,proto3" json:"download_url,omitempty"` + // Output only. The thumbnail URL for showing a preview of the given photo. + ThumbnailUrl string `protobuf:"bytes,9,opt,name=thumbnail_url,json=thumbnailUrl,proto3" json:"thumbnail_url,omitempty"` + // Output only. The share link for the photo. + ShareLink string `protobuf:"bytes,11,opt,name=share_link,json=shareLink,proto3" json:"share_link,omitempty"` + // Pose of the photo. + Pose *Pose `protobuf:"bytes,4,opt,name=pose,proto3" json:"pose,omitempty"` + // Connections to other photos. A connection represents the link from this + // photo to another photo. + Connections []*Connection `protobuf:"bytes,5,rep,name=connections,proto3" json:"connections,omitempty"` + // Absolute time when the photo was captured. + // When the photo has no exif timestamp, this is used to set a timestamp in + // the photo metadata. + CaptureTime *timestamp.Timestamp `protobuf:"bytes,6,opt,name=capture_time,json=captureTime,proto3" json:"capture_time,omitempty"` + // Places where this photo belongs. + Places []*Place `protobuf:"bytes,7,rep,name=places,proto3" json:"places,omitempty"` + // Output only. View count of the photo. + ViewCount int64 `protobuf:"varint,10,opt,name=view_count,json=viewCount,proto3" json:"view_count,omitempty"` + // Output only. Status of rights transfer on this photo. + TransferStatus Photo_TransferStatus `protobuf:"varint,12,opt,name=transfer_status,json=transferStatus,proto3,enum=google.streetview.publish.v1.Photo_TransferStatus" json:"transfer_status,omitempty"` + // Output only. Status in Google Maps, whether this photo was published or + // rejected. + MapsPublishStatus Photo_MapsPublishStatus `protobuf:"varint,13,opt,name=maps_publish_status,json=mapsPublishStatus,proto3,enum=google.streetview.publish.v1.Photo_MapsPublishStatus" json:"maps_publish_status,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Photo) Reset() { *m = Photo{} } +func (m *Photo) String() string { return proto.CompactTextString(m) } +func (*Photo) ProtoMessage() {} +func (*Photo) Descriptor() ([]byte, []int) { + return fileDescriptor_resources_4fb97700fa63963a, []int{6} +} +func (m *Photo) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Photo.Unmarshal(m, b) +} +func (m *Photo) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Photo.Marshal(b, m, deterministic) +} +func (dst *Photo) XXX_Merge(src proto.Message) { + xxx_messageInfo_Photo.Merge(dst, src) +} +func (m *Photo) XXX_Size() int { + return xxx_messageInfo_Photo.Size(m) +} +func (m *Photo) XXX_DiscardUnknown() { + xxx_messageInfo_Photo.DiscardUnknown(m) +} + +var xxx_messageInfo_Photo proto.InternalMessageInfo + +func (m *Photo) GetPhotoId() *PhotoId { + if m != nil { + return m.PhotoId + } + return nil +} + +func (m *Photo) GetUploadReference() *UploadRef { + if m != nil { + return m.UploadReference + } + return nil +} + +func (m *Photo) GetDownloadUrl() string { + if m != nil { + return m.DownloadUrl + } + return "" +} + +func (m *Photo) GetThumbnailUrl() string { + if m != nil { + return m.ThumbnailUrl + } + return "" +} + +func (m *Photo) GetShareLink() string { + if m != nil { + return m.ShareLink + } + return "" +} + +func (m *Photo) GetPose() *Pose { + if m != nil { + return m.Pose + } + return nil +} + +func (m *Photo) GetConnections() []*Connection { + if m != nil { + return m.Connections + } + return nil +} + +func (m *Photo) GetCaptureTime() *timestamp.Timestamp { + if m != nil { + return m.CaptureTime + } + return nil +} + +func (m *Photo) GetPlaces() []*Place { + if m != nil { + return m.Places + } + return nil +} + +func (m *Photo) GetViewCount() int64 { + if m != nil { + return m.ViewCount + } + return 0 +} + +func (m *Photo) GetTransferStatus() Photo_TransferStatus { + if m != nil { + return m.TransferStatus + } + return Photo_TRANSFER_STATUS_UNKNOWN +} + +func (m *Photo) GetMapsPublishStatus() Photo_MapsPublishStatus { + if m != nil { + return m.MapsPublishStatus + } + return Photo_UNSPECIFIED_MAPS_PUBLISH_STATUS +} + +func init() { + proto.RegisterType((*UploadRef)(nil), "google.streetview.publish.v1.UploadRef") + proto.RegisterType((*PhotoId)(nil), "google.streetview.publish.v1.PhotoId") + proto.RegisterType((*Level)(nil), "google.streetview.publish.v1.Level") + proto.RegisterType((*Pose)(nil), "google.streetview.publish.v1.Pose") + proto.RegisterType((*Place)(nil), "google.streetview.publish.v1.Place") + proto.RegisterType((*Connection)(nil), "google.streetview.publish.v1.Connection") + proto.RegisterType((*Photo)(nil), "google.streetview.publish.v1.Photo") + proto.RegisterEnum("google.streetview.publish.v1.Photo_TransferStatus", Photo_TransferStatus_name, Photo_TransferStatus_value) + proto.RegisterEnum("google.streetview.publish.v1.Photo_MapsPublishStatus", Photo_MapsPublishStatus_name, Photo_MapsPublishStatus_value) +} + +func init() { + proto.RegisterFile("google/streetview/publish/v1/resources.proto", fileDescriptor_resources_4fb97700fa63963a) +} + +var fileDescriptor_resources_4fb97700fa63963a = []byte{ + // 940 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x8c, 0x55, 0x6f, 0x6f, 0xdb, 0xb6, + 0x13, 0xae, 0x9d, 0xd8, 0x8e, 0xcf, 0x4e, 0xe2, 0xb0, 0xed, 0xef, 0xa7, 0x64, 0x2b, 0x9a, 0x29, + 0x18, 0x6a, 0x0c, 0x83, 0x84, 0xba, 0xe8, 0x80, 0xa1, 0x28, 0xb0, 0xc4, 0x56, 0x37, 0xb7, 0x8e, + 0x2b, 0xd0, 0x76, 0x36, 0xac, 0x18, 0x04, 0x5a, 0x62, 0x64, 0xa1, 0x34, 0x29, 0x50, 0x54, 0x82, + 0xbe, 0xdc, 0xeb, 0x7d, 0x8e, 0x7d, 0xbd, 0x7d, 0x86, 0x41, 0x14, 0xe5, 0xae, 0x5b, 0x90, 0xe6, + 0x95, 0xef, 0x9e, 0x7b, 0x1e, 0xde, 0x1f, 0x9e, 0x29, 0xf8, 0x36, 0x16, 0x22, 0x66, 0xd4, 0xcd, + 0x94, 0xa4, 0x54, 0x5d, 0x25, 0xf4, 0xda, 0x4d, 0xf3, 0x25, 0x4b, 0xb2, 0x95, 0x7b, 0xf5, 0xd4, + 0x95, 0x34, 0x13, 0xb9, 0x0c, 0x69, 0xe6, 0xa4, 0x52, 0x28, 0x81, 0xbe, 0x2c, 0xd9, 0xce, 0x47, + 0xb6, 0x63, 0xd8, 0xce, 0xd5, 0xd3, 0x23, 0x13, 0x75, 0x49, 0x9a, 0xb8, 0x84, 0x73, 0xa1, 0x88, + 0x4a, 0x04, 0x37, 0xda, 0xa3, 0xc7, 0x26, 0xaa, 0xbd, 0x65, 0x7e, 0xe9, 0xaa, 0x64, 0x4d, 0x33, + 0x45, 0xd6, 0xa9, 0x21, 0x58, 0x86, 0xa0, 0x3e, 0xa4, 0xd4, 0x65, 0x44, 0x31, 0x1e, 0x97, 0x11, + 0xfb, 0x1b, 0x68, 0x2f, 0x52, 0x26, 0x48, 0x84, 0xe9, 0x25, 0x7a, 0x04, 0x90, 0x6b, 0x27, 0xc8, + 0x25, 0xb3, 0x6a, 0xc7, 0xb5, 0x7e, 0x1b, 0xb7, 0x4b, 0x64, 0x21, 0x99, 0x7d, 0x08, 0x2d, 0x7f, + 0x25, 0x94, 0x18, 0x47, 0x68, 0x0f, 0xea, 0x49, 0x64, 0x18, 0xf5, 0x24, 0xb2, 0x9f, 0x41, 0x63, + 0x42, 0xaf, 0x28, 0x43, 0xff, 0x83, 0x26, 0xcf, 0xd7, 0x4b, 0x2a, 0x75, 0xb0, 0x86, 0x8d, 0x87, + 0x10, 0x6c, 0x73, 0xb2, 0xa6, 0x56, 0x5d, 0x4b, 0xb4, 0x6d, 0xff, 0x5e, 0x87, 0x6d, 0x5f, 0x64, + 0x14, 0x3d, 0x87, 0x2e, 0x23, 0x2a, 0x60, 0x3c, 0x0e, 0x52, 0x92, 0x94, 0xd2, 0xce, 0xe0, 0xbe, + 0x63, 0x46, 0x52, 0x54, 0xed, 0x4c, 0x88, 0x9a, 0xf0, 0x18, 0x03, 0xd3, 0xbf, 0x3e, 0x49, 0x24, + 0x3a, 0x82, 0x1d, 0xc2, 0x54, 0xa2, 0xf2, 0xa8, 0x3c, 0xb7, 0x86, 0x37, 0x3e, 0xb2, 0xa0, 0xb5, + 0xa2, 0x24, 0x4a, 0x78, 0x6c, 0x6d, 0xe9, 0x50, 0xe5, 0xa2, 0x07, 0xd0, 0x48, 0x13, 0x15, 0xae, + 0xac, 0x6d, 0x8d, 0x97, 0x4e, 0x51, 0x9f, 0x14, 0x8c, 0x59, 0x0d, 0x0d, 0x6a, 0x1b, 0x7d, 0x0f, + 0x0d, 0x56, 0x34, 0x65, 0xb5, 0x74, 0x3d, 0x27, 0xce, 0x6d, 0x57, 0xe4, 0xe8, 0xfe, 0x71, 0xa9, + 0x40, 0x4f, 0x60, 0x9f, 0x84, 0x61, 0x2e, 0x49, 0xf8, 0x21, 0x58, 0x53, 0x45, 0x65, 0x66, 0xb5, + 0x8f, 0x6b, 0xfd, 0x3a, 0xde, 0xab, 0xe0, 0x73, 0x8d, 0xda, 0xef, 0xa0, 0xe1, 0x33, 0x12, 0x52, + 0x74, 0x08, 0x3b, 0x69, 0x61, 0x04, 0x9b, 0xb9, 0xb6, 0xb4, 0x3f, 0x8e, 0x6e, 0x9a, 0x1d, 0x3a, + 0x81, 0x5d, 0x46, 0x78, 0x9c, 0x93, 0x98, 0x06, 0xa1, 0x88, 0xa8, 0xee, 0xb2, 0x8d, 0xbb, 0x15, + 0x38, 0x14, 0x11, 0xb5, 0xdf, 0x00, 0x0c, 0x05, 0xe7, 0x34, 0x2c, 0x96, 0x05, 0xbd, 0x84, 0xa6, + 0x22, 0x32, 0xa6, 0xca, 0xcc, 0xf7, 0xeb, 0xdb, 0xfb, 0x31, 0x57, 0x8d, 0x8d, 0xc8, 0xfe, 0xab, + 0x05, 0x0d, 0x8d, 0xa1, 0x1f, 0x60, 0x27, 0x2d, 0x8c, 0xaa, 0xd4, 0x3b, 0x1f, 0xd5, 0x4a, 0xcd, + 0xfa, 0x60, 0xe8, 0x99, 0x45, 0x93, 0xf4, 0x92, 0x4a, 0xca, 0xc3, 0xb2, 0xbb, 0xce, 0xe0, 0xc9, + 0xed, 0x27, 0x6d, 0x76, 0x15, 0xef, 0xe7, 0x95, 0x59, 0xea, 0xd1, 0x57, 0xd0, 0x8d, 0xc4, 0x35, + 0xdf, 0xac, 0x6f, 0x39, 0x90, 0x4e, 0x85, 0x2d, 0x24, 0x2b, 0x86, 0xa6, 0x56, 0xf9, 0x7a, 0xc9, + 0x49, 0xc2, 0x34, 0xa7, 0x5d, 0x0e, 0x6d, 0x03, 0x16, 0xa4, 0x47, 0x00, 0xd9, 0x8a, 0x48, 0x1a, + 0xb0, 0x84, 0xbf, 0xb7, 0x3a, 0xe5, 0x9f, 0x40, 0x23, 0x93, 0x84, 0xbf, 0x47, 0xdf, 0xc1, 0x76, + 0x2a, 0x32, 0xaa, 0xb7, 0xa7, 0x33, 0xb0, 0x3f, 0xd3, 0xb8, 0xc8, 0x28, 0xd6, 0x7c, 0xf4, 0x1a, + 0x3a, 0xe1, 0xe6, 0x2e, 0x32, 0xab, 0x71, 0xbc, 0xd5, 0xef, 0x0c, 0xfa, 0xb7, 0xcb, 0x3f, 0x5e, + 0x1e, 0xfe, 0xa7, 0x18, 0xbd, 0x84, 0x6e, 0x48, 0x52, 0x95, 0x4b, 0x1a, 0x14, 0xff, 0x74, 0xab, + 0xa9, 0x6b, 0x39, 0xaa, 0x0e, 0xab, 0x9e, 0x01, 0x67, 0x5e, 0x3d, 0x03, 0xb8, 0x63, 0xf8, 0x05, + 0x82, 0x5e, 0x40, 0x53, 0xaf, 0x56, 0x66, 0xb5, 0x74, 0x15, 0x9f, 0x59, 0x6c, 0xbd, 0x9f, 0xd8, + 0x48, 0x8a, 0xf1, 0x14, 0x84, 0x20, 0x14, 0x39, 0x57, 0x16, 0x1c, 0xd7, 0xfa, 0x5b, 0xb8, 0x5d, + 0x20, 0xc3, 0x02, 0x40, 0xef, 0x60, 0x5f, 0x49, 0xc2, 0xb3, 0x4b, 0x2a, 0x83, 0x4c, 0x11, 0x95, + 0x67, 0x56, 0xf7, 0xb8, 0xd6, 0xdf, 0x1b, 0x0c, 0xee, 0xb0, 0x22, 0xce, 0xdc, 0x48, 0x67, 0x5a, + 0x89, 0xf7, 0xd4, 0x27, 0x3e, 0xa2, 0x70, 0x7f, 0x4d, 0xd2, 0x2c, 0x30, 0xba, 0x2a, 0xc1, 0xae, + 0x4e, 0xf0, 0xfc, 0x2e, 0x09, 0xce, 0x49, 0x9a, 0xf9, 0x25, 0x68, 0x72, 0x1c, 0xac, 0xff, 0x0d, + 0xd9, 0x7f, 0xd6, 0x60, 0xef, 0xd3, 0x4a, 0xd0, 0x17, 0xf0, 0xff, 0x39, 0x3e, 0x9d, 0xce, 0x5e, + 0x79, 0x38, 0x98, 0xcd, 0x4f, 0xe7, 0x8b, 0x59, 0xb0, 0x98, 0xbe, 0x99, 0xbe, 0xfd, 0x79, 0xda, + 0xbb, 0x87, 0x1e, 0xc2, 0xc1, 0xd4, 0xbb, 0xf0, 0x70, 0x50, 0x51, 0xb0, 0x37, 0xea, 0xd5, 0x50, + 0x07, 0x5a, 0xbe, 0x37, 0x1d, 0x8d, 0xa7, 0x3f, 0xf6, 0xea, 0x68, 0x17, 0xda, 0xc3, 0xb7, 0xe7, + 0xfe, 0xc4, 0x9b, 0x7b, 0xa3, 0xde, 0x16, 0xea, 0xc2, 0x0e, 0xf6, 0x5e, 0x7b, 0xc3, 0xc2, 0xdb, + 0x2e, 0x98, 0xde, 0x2f, 0xfe, 0xb8, 0x90, 0x35, 0x34, 0xf3, 0x74, 0x3a, 0xf4, 0x26, 0x13, 0x6f, + 0xd4, 0x6b, 0xa2, 0x43, 0x78, 0x88, 0xbd, 0xa1, 0x37, 0xbe, 0xf0, 0x46, 0xc1, 0xc5, 0xf8, 0x74, + 0x93, 0xa3, 0xd7, 0xb2, 0x7f, 0x83, 0x83, 0xff, 0xf4, 0x83, 0x4e, 0xe0, 0xf1, 0x62, 0x3a, 0xf3, + 0xbd, 0xe1, 0xf8, 0xd5, 0xd8, 0x1b, 0x05, 0xe7, 0xa7, 0xfe, 0x2c, 0xf0, 0x17, 0x67, 0x93, 0xf1, + 0xec, 0x27, 0x53, 0x79, 0xef, 0x5e, 0x91, 0xc3, 0x60, 0xba, 0xd2, 0x07, 0xd0, 0xab, 0xaa, 0xd9, + 0xb4, 0x55, 0x3f, 0xfb, 0xa3, 0x06, 0xfd, 0x50, 0xac, 0xab, 0xb1, 0xc6, 0x54, 0x38, 0x79, 0x1c, + 0xde, 0x3c, 0xde, 0xb3, 0xa3, 0x99, 0x86, 0x2f, 0x12, 0x7a, 0x6d, 0xea, 0xc1, 0xd5, 0x07, 0xee, + 0xd7, 0x61, 0x75, 0x82, 0x28, 0x5e, 0x27, 0x47, 0xc8, 0xd8, 0x8d, 0x29, 0xd7, 0x5b, 0xea, 0x96, + 0x21, 0x92, 0x26, 0xd9, 0xcd, 0xdf, 0xc9, 0x17, 0xc6, 0x5c, 0x36, 0x35, 0xff, 0xd9, 0xdf, 0x01, + 0x00, 0x00, 0xff, 0xff, 0x6d, 0x23, 0x7d, 0x07, 0x56, 0x07, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/streetview/publish/v1/rpcmessages.pb.go b/vendor/google.golang.org/genproto/googleapis/streetview/publish/v1/rpcmessages.pb.go new file mode 100644 index 0000000..87c06cf --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/streetview/publish/v1/rpcmessages.pb.go @@ -0,0 +1,841 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/streetview/publish/v1/rpcmessages.proto + +package publish // import "google.golang.org/genproto/googleapis/streetview/publish/v1" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import status "google.golang.org/genproto/googleapis/rpc/status" +import field_mask "google.golang.org/genproto/protobuf/field_mask" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Specifies which view of the [Photo][google.streetview.publish.v1.Photo] +// to include in the response. +type PhotoView int32 + +const ( + // Server reponses do not include the download URL for the photo bytes. + // The default value. + PhotoView_BASIC PhotoView = 0 + // Server responses include the download URL for the photo bytes. + PhotoView_INCLUDE_DOWNLOAD_URL PhotoView = 1 +) + +var PhotoView_name = map[int32]string{ + 0: "BASIC", + 1: "INCLUDE_DOWNLOAD_URL", +} +var PhotoView_value = map[string]int32{ + "BASIC": 0, + "INCLUDE_DOWNLOAD_URL": 1, +} + +func (x PhotoView) String() string { + return proto.EnumName(PhotoView_name, int32(x)) +} +func (PhotoView) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_rpcmessages_3d6515828940d466, []int{0} +} + +// Request to create a [Photo][google.streetview.publish.v1.Photo]. +type CreatePhotoRequest struct { + // Required. Photo to create. + Photo *Photo `protobuf:"bytes,1,opt,name=photo,proto3" json:"photo,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *CreatePhotoRequest) Reset() { *m = CreatePhotoRequest{} } +func (m *CreatePhotoRequest) String() string { return proto.CompactTextString(m) } +func (*CreatePhotoRequest) ProtoMessage() {} +func (*CreatePhotoRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_rpcmessages_3d6515828940d466, []int{0} +} +func (m *CreatePhotoRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_CreatePhotoRequest.Unmarshal(m, b) +} +func (m *CreatePhotoRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_CreatePhotoRequest.Marshal(b, m, deterministic) +} +func (dst *CreatePhotoRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_CreatePhotoRequest.Merge(dst, src) +} +func (m *CreatePhotoRequest) XXX_Size() int { + return xxx_messageInfo_CreatePhotoRequest.Size(m) +} +func (m *CreatePhotoRequest) XXX_DiscardUnknown() { + xxx_messageInfo_CreatePhotoRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_CreatePhotoRequest proto.InternalMessageInfo + +func (m *CreatePhotoRequest) GetPhoto() *Photo { + if m != nil { + return m.Photo + } + return nil +} + +// Request to get a [Photo][google.streetview.publish.v1.Photo]. +// +// By default +// +// * does not return the download URL for the photo bytes. +// +// Parameters: +// +// * `view` controls if the download URL for the photo bytes is returned. +type GetPhotoRequest struct { + // Required. ID of the [Photo][google.streetview.publish.v1.Photo]. + PhotoId string `protobuf:"bytes,1,opt,name=photo_id,json=photoId,proto3" json:"photo_id,omitempty"` + // Specifies if a download URL for the photo bytes should be returned in the + // [Photo][google.streetview.publish.v1.Photo] response. + View PhotoView `protobuf:"varint,2,opt,name=view,proto3,enum=google.streetview.publish.v1.PhotoView" json:"view,omitempty"` + // The BCP-47 language code, such as "en-US" or "sr-Latn". For more + // information, see + // http://www.unicode.org/reports/tr35/#Unicode_locale_identifier. + // If language_code is unspecified, the user's language preference for Google + // services is used. + LanguageCode string `protobuf:"bytes,3,opt,name=language_code,json=languageCode,proto3" json:"language_code,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GetPhotoRequest) Reset() { *m = GetPhotoRequest{} } +func (m *GetPhotoRequest) String() string { return proto.CompactTextString(m) } +func (*GetPhotoRequest) ProtoMessage() {} +func (*GetPhotoRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_rpcmessages_3d6515828940d466, []int{1} +} +func (m *GetPhotoRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GetPhotoRequest.Unmarshal(m, b) +} +func (m *GetPhotoRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GetPhotoRequest.Marshal(b, m, deterministic) +} +func (dst *GetPhotoRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_GetPhotoRequest.Merge(dst, src) +} +func (m *GetPhotoRequest) XXX_Size() int { + return xxx_messageInfo_GetPhotoRequest.Size(m) +} +func (m *GetPhotoRequest) XXX_DiscardUnknown() { + xxx_messageInfo_GetPhotoRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_GetPhotoRequest proto.InternalMessageInfo + +func (m *GetPhotoRequest) GetPhotoId() string { + if m != nil { + return m.PhotoId + } + return "" +} + +func (m *GetPhotoRequest) GetView() PhotoView { + if m != nil { + return m.View + } + return PhotoView_BASIC +} + +func (m *GetPhotoRequest) GetLanguageCode() string { + if m != nil { + return m.LanguageCode + } + return "" +} + +// Request to get one or more [Photos][google.streetview.publish.v1.Photo]. +// By default +// +// * does not return the download URL for the photo bytes. +// +// Parameters: +// +// * `view` controls if the download URL for the photo bytes is returned. +type BatchGetPhotosRequest struct { + // Required. IDs of the [Photos][google.streetview.publish.v1.Photo]. HTTP GET + // requests require the following syntax for the URL query parameter: + // `photoIds=&photoIds=&...`. + PhotoIds []string `protobuf:"bytes,1,rep,name=photo_ids,json=photoIds,proto3" json:"photo_ids,omitempty"` + // Specifies if a download URL for the photo bytes should be returned in the + // Photo response. + View PhotoView `protobuf:"varint,2,opt,name=view,proto3,enum=google.streetview.publish.v1.PhotoView" json:"view,omitempty"` + // The BCP-47 language code, such as "en-US" or "sr-Latn". For more + // information, see + // http://www.unicode.org/reports/tr35/#Unicode_locale_identifier. + // If language_code is unspecified, the user's language preference for Google + // services is used. + LanguageCode string `protobuf:"bytes,3,opt,name=language_code,json=languageCode,proto3" json:"language_code,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *BatchGetPhotosRequest) Reset() { *m = BatchGetPhotosRequest{} } +func (m *BatchGetPhotosRequest) String() string { return proto.CompactTextString(m) } +func (*BatchGetPhotosRequest) ProtoMessage() {} +func (*BatchGetPhotosRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_rpcmessages_3d6515828940d466, []int{2} +} +func (m *BatchGetPhotosRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_BatchGetPhotosRequest.Unmarshal(m, b) +} +func (m *BatchGetPhotosRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_BatchGetPhotosRequest.Marshal(b, m, deterministic) +} +func (dst *BatchGetPhotosRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_BatchGetPhotosRequest.Merge(dst, src) +} +func (m *BatchGetPhotosRequest) XXX_Size() int { + return xxx_messageInfo_BatchGetPhotosRequest.Size(m) +} +func (m *BatchGetPhotosRequest) XXX_DiscardUnknown() { + xxx_messageInfo_BatchGetPhotosRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_BatchGetPhotosRequest proto.InternalMessageInfo + +func (m *BatchGetPhotosRequest) GetPhotoIds() []string { + if m != nil { + return m.PhotoIds + } + return nil +} + +func (m *BatchGetPhotosRequest) GetView() PhotoView { + if m != nil { + return m.View + } + return PhotoView_BASIC +} + +func (m *BatchGetPhotosRequest) GetLanguageCode() string { + if m != nil { + return m.LanguageCode + } + return "" +} + +// Response to batch get of [Photos][google.streetview.publish.v1.Photo]. +type BatchGetPhotosResponse struct { + // List of results for each individual + // [Photo][google.streetview.publish.v1.Photo] requested, in the same order as + // the requests in + // [BatchGetPhotos][google.streetview.publish.v1.StreetViewPublishService.BatchGetPhotos]. + Results []*PhotoResponse `protobuf:"bytes,1,rep,name=results,proto3" json:"results,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *BatchGetPhotosResponse) Reset() { *m = BatchGetPhotosResponse{} } +func (m *BatchGetPhotosResponse) String() string { return proto.CompactTextString(m) } +func (*BatchGetPhotosResponse) ProtoMessage() {} +func (*BatchGetPhotosResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_rpcmessages_3d6515828940d466, []int{3} +} +func (m *BatchGetPhotosResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_BatchGetPhotosResponse.Unmarshal(m, b) +} +func (m *BatchGetPhotosResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_BatchGetPhotosResponse.Marshal(b, m, deterministic) +} +func (dst *BatchGetPhotosResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_BatchGetPhotosResponse.Merge(dst, src) +} +func (m *BatchGetPhotosResponse) XXX_Size() int { + return xxx_messageInfo_BatchGetPhotosResponse.Size(m) +} +func (m *BatchGetPhotosResponse) XXX_DiscardUnknown() { + xxx_messageInfo_BatchGetPhotosResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_BatchGetPhotosResponse proto.InternalMessageInfo + +func (m *BatchGetPhotosResponse) GetResults() []*PhotoResponse { + if m != nil { + return m.Results + } + return nil +} + +// Response payload for a single +// [Photo][google.streetview.publish.v1.Photo] +// in batch operations including +// [BatchGetPhotos][google.streetview.publish.v1.StreetViewPublishService.BatchGetPhotos] +// and +// [BatchUpdatePhotos][google.streetview.publish.v1.StreetViewPublishService.BatchUpdatePhotos]. +type PhotoResponse struct { + // The status for the operation to get or update a single photo in the batch + // request. + Status *status.Status `protobuf:"bytes,1,opt,name=status,proto3" json:"status,omitempty"` + // The [Photo][google.streetview.publish.v1.Photo] resource, if the request + // was successful. + Photo *Photo `protobuf:"bytes,2,opt,name=photo,proto3" json:"photo,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *PhotoResponse) Reset() { *m = PhotoResponse{} } +func (m *PhotoResponse) String() string { return proto.CompactTextString(m) } +func (*PhotoResponse) ProtoMessage() {} +func (*PhotoResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_rpcmessages_3d6515828940d466, []int{4} +} +func (m *PhotoResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_PhotoResponse.Unmarshal(m, b) +} +func (m *PhotoResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_PhotoResponse.Marshal(b, m, deterministic) +} +func (dst *PhotoResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_PhotoResponse.Merge(dst, src) +} +func (m *PhotoResponse) XXX_Size() int { + return xxx_messageInfo_PhotoResponse.Size(m) +} +func (m *PhotoResponse) XXX_DiscardUnknown() { + xxx_messageInfo_PhotoResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_PhotoResponse proto.InternalMessageInfo + +func (m *PhotoResponse) GetStatus() *status.Status { + if m != nil { + return m.Status + } + return nil +} + +func (m *PhotoResponse) GetPhoto() *Photo { + if m != nil { + return m.Photo + } + return nil +} + +// Request to list all photos that belong to the user sending the request. +// +// By default +// +// * does not return the download URL for the photo bytes. +// +// Parameters: +// +// * `view` controls if the download URL for the photo bytes is returned. +// * `pageSize` determines the maximum number of photos to return. +// * `pageToken` is the next page token value returned from a previous +// [ListPhotos][google.streetview.publish.v1.StreetViewPublishService.ListPhotos] +// request, if any. +// * `filter` allows filtering by a given parameter. 'placeId' is the only +// parameter supported at the moment. +type ListPhotosRequest struct { + // Specifies if a download URL for the photos bytes should be returned in the + // Photos response. + View PhotoView `protobuf:"varint,1,opt,name=view,proto3,enum=google.streetview.publish.v1.PhotoView" json:"view,omitempty"` + // The maximum number of photos to return. + // `pageSize` must be non-negative. If `pageSize` is zero or is not provided, + // the default page size of 100 is used. + // The number of photos returned in the response may be less than `pageSize` + // if the number of photos that belong to the user is less than `pageSize`. + PageSize int32 `protobuf:"varint,2,opt,name=page_size,json=pageSize,proto3" json:"page_size,omitempty"` + // The + // [nextPageToken][google.streetview.publish.v1.ListPhotosResponse.next_page_token] + // value returned from a previous + // [ListPhotos][google.streetview.publish.v1.StreetViewPublishService.ListPhotos] + // request, if any. + PageToken string `protobuf:"bytes,3,opt,name=page_token,json=pageToken,proto3" json:"page_token,omitempty"` + // The filter expression. For example: `placeId=ChIJj61dQgK6j4AR4GeTYWZsKWw`. + // + // The only filter supported at the moment is `placeId`. + Filter string `protobuf:"bytes,4,opt,name=filter,proto3" json:"filter,omitempty"` + // The BCP-47 language code, such as "en-US" or "sr-Latn". For more + // information, see + // http://www.unicode.org/reports/tr35/#Unicode_locale_identifier. + // If language_code is unspecified, the user's language preference for Google + // services is used. + LanguageCode string `protobuf:"bytes,5,opt,name=language_code,json=languageCode,proto3" json:"language_code,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ListPhotosRequest) Reset() { *m = ListPhotosRequest{} } +func (m *ListPhotosRequest) String() string { return proto.CompactTextString(m) } +func (*ListPhotosRequest) ProtoMessage() {} +func (*ListPhotosRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_rpcmessages_3d6515828940d466, []int{5} +} +func (m *ListPhotosRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ListPhotosRequest.Unmarshal(m, b) +} +func (m *ListPhotosRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ListPhotosRequest.Marshal(b, m, deterministic) +} +func (dst *ListPhotosRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_ListPhotosRequest.Merge(dst, src) +} +func (m *ListPhotosRequest) XXX_Size() int { + return xxx_messageInfo_ListPhotosRequest.Size(m) +} +func (m *ListPhotosRequest) XXX_DiscardUnknown() { + xxx_messageInfo_ListPhotosRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_ListPhotosRequest proto.InternalMessageInfo + +func (m *ListPhotosRequest) GetView() PhotoView { + if m != nil { + return m.View + } + return PhotoView_BASIC +} + +func (m *ListPhotosRequest) GetPageSize() int32 { + if m != nil { + return m.PageSize + } + return 0 +} + +func (m *ListPhotosRequest) GetPageToken() string { + if m != nil { + return m.PageToken + } + return "" +} + +func (m *ListPhotosRequest) GetFilter() string { + if m != nil { + return m.Filter + } + return "" +} + +func (m *ListPhotosRequest) GetLanguageCode() string { + if m != nil { + return m.LanguageCode + } + return "" +} + +// Response to list all photos that belong to a user. +type ListPhotosResponse struct { + // List of photos. The + // [pageSize][google.streetview.publish.v1.ListPhotosRequest.page_size] field + // in the request determines the number of items returned. + Photos []*Photo `protobuf:"bytes,1,rep,name=photos,proto3" json:"photos,omitempty"` + // Token to retrieve the next page of results, or empty if there are no more + // results in the list. + NextPageToken string `protobuf:"bytes,2,opt,name=next_page_token,json=nextPageToken,proto3" json:"next_page_token,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ListPhotosResponse) Reset() { *m = ListPhotosResponse{} } +func (m *ListPhotosResponse) String() string { return proto.CompactTextString(m) } +func (*ListPhotosResponse) ProtoMessage() {} +func (*ListPhotosResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_rpcmessages_3d6515828940d466, []int{6} +} +func (m *ListPhotosResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ListPhotosResponse.Unmarshal(m, b) +} +func (m *ListPhotosResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ListPhotosResponse.Marshal(b, m, deterministic) +} +func (dst *ListPhotosResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_ListPhotosResponse.Merge(dst, src) +} +func (m *ListPhotosResponse) XXX_Size() int { + return xxx_messageInfo_ListPhotosResponse.Size(m) +} +func (m *ListPhotosResponse) XXX_DiscardUnknown() { + xxx_messageInfo_ListPhotosResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_ListPhotosResponse proto.InternalMessageInfo + +func (m *ListPhotosResponse) GetPhotos() []*Photo { + if m != nil { + return m.Photos + } + return nil +} + +func (m *ListPhotosResponse) GetNextPageToken() string { + if m != nil { + return m.NextPageToken + } + return "" +} + +// Request to update the metadata of a +// [Photo][google.streetview.publish.v1.Photo]. Updating the pixels of a photo +// is not supported. +type UpdatePhotoRequest struct { + // Required. [Photo][google.streetview.publish.v1.Photo] object containing the + // new metadata. + Photo *Photo `protobuf:"bytes,1,opt,name=photo,proto3" json:"photo,omitempty"` + // Mask that identifies fields on the photo metadata to update. + // If not present, the old [Photo][google.streetview.publish.v1.Photo] + // metadata is entirely replaced with the + // new [Photo][google.streetview.publish.v1.Photo] metadata in this request. + // The update fails if invalid fields are specified. Multiple fields can be + // specified in a comma-delimited list. + // + // The following fields are valid: + // + // * `pose.heading` + // * `pose.latLngPair` + // * `pose.pitch` + // * `pose.roll` + // * `pose.level` + // * `pose.altitude` + // * `connections` + // * `places` + // + // + //

+ UpdateMask *field_mask.FieldMask `protobuf:"bytes,2,opt,name=update_mask,json=updateMask,proto3" json:"update_mask,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *UpdatePhotoRequest) Reset() { *m = UpdatePhotoRequest{} } +func (m *UpdatePhotoRequest) String() string { return proto.CompactTextString(m) } +func (*UpdatePhotoRequest) ProtoMessage() {} +func (*UpdatePhotoRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_rpcmessages_3d6515828940d466, []int{7} +} +func (m *UpdatePhotoRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_UpdatePhotoRequest.Unmarshal(m, b) +} +func (m *UpdatePhotoRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_UpdatePhotoRequest.Marshal(b, m, deterministic) +} +func (dst *UpdatePhotoRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_UpdatePhotoRequest.Merge(dst, src) +} +func (m *UpdatePhotoRequest) XXX_Size() int { + return xxx_messageInfo_UpdatePhotoRequest.Size(m) +} +func (m *UpdatePhotoRequest) XXX_DiscardUnknown() { + xxx_messageInfo_UpdatePhotoRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_UpdatePhotoRequest proto.InternalMessageInfo + +func (m *UpdatePhotoRequest) GetPhoto() *Photo { + if m != nil { + return m.Photo + } + return nil +} + +func (m *UpdatePhotoRequest) GetUpdateMask() *field_mask.FieldMask { + if m != nil { + return m.UpdateMask + } + return nil +} + +// Request to update the metadata of photos. +// Updating the pixels of photos is not supported. +type BatchUpdatePhotosRequest struct { + // Required. List of + // [UpdatePhotoRequests][google.streetview.publish.v1.UpdatePhotoRequest]. + UpdatePhotoRequests []*UpdatePhotoRequest `protobuf:"bytes,1,rep,name=update_photo_requests,json=updatePhotoRequests,proto3" json:"update_photo_requests,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *BatchUpdatePhotosRequest) Reset() { *m = BatchUpdatePhotosRequest{} } +func (m *BatchUpdatePhotosRequest) String() string { return proto.CompactTextString(m) } +func (*BatchUpdatePhotosRequest) ProtoMessage() {} +func (*BatchUpdatePhotosRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_rpcmessages_3d6515828940d466, []int{8} +} +func (m *BatchUpdatePhotosRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_BatchUpdatePhotosRequest.Unmarshal(m, b) +} +func (m *BatchUpdatePhotosRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_BatchUpdatePhotosRequest.Marshal(b, m, deterministic) +} +func (dst *BatchUpdatePhotosRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_BatchUpdatePhotosRequest.Merge(dst, src) +} +func (m *BatchUpdatePhotosRequest) XXX_Size() int { + return xxx_messageInfo_BatchUpdatePhotosRequest.Size(m) +} +func (m *BatchUpdatePhotosRequest) XXX_DiscardUnknown() { + xxx_messageInfo_BatchUpdatePhotosRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_BatchUpdatePhotosRequest proto.InternalMessageInfo + +func (m *BatchUpdatePhotosRequest) GetUpdatePhotoRequests() []*UpdatePhotoRequest { + if m != nil { + return m.UpdatePhotoRequests + } + return nil +} + +// Response to batch update of metadata of one or more +// [Photos][google.streetview.publish.v1.Photo]. +type BatchUpdatePhotosResponse struct { + // List of results for each individual + // [Photo][google.streetview.publish.v1.Photo] updated, in the same order as + // the request. + Results []*PhotoResponse `protobuf:"bytes,1,rep,name=results,proto3" json:"results,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *BatchUpdatePhotosResponse) Reset() { *m = BatchUpdatePhotosResponse{} } +func (m *BatchUpdatePhotosResponse) String() string { return proto.CompactTextString(m) } +func (*BatchUpdatePhotosResponse) ProtoMessage() {} +func (*BatchUpdatePhotosResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_rpcmessages_3d6515828940d466, []int{9} +} +func (m *BatchUpdatePhotosResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_BatchUpdatePhotosResponse.Unmarshal(m, b) +} +func (m *BatchUpdatePhotosResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_BatchUpdatePhotosResponse.Marshal(b, m, deterministic) +} +func (dst *BatchUpdatePhotosResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_BatchUpdatePhotosResponse.Merge(dst, src) +} +func (m *BatchUpdatePhotosResponse) XXX_Size() int { + return xxx_messageInfo_BatchUpdatePhotosResponse.Size(m) +} +func (m *BatchUpdatePhotosResponse) XXX_DiscardUnknown() { + xxx_messageInfo_BatchUpdatePhotosResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_BatchUpdatePhotosResponse proto.InternalMessageInfo + +func (m *BatchUpdatePhotosResponse) GetResults() []*PhotoResponse { + if m != nil { + return m.Results + } + return nil +} + +// Request to delete a [Photo][google.streetview.publish.v1.Photo]. +type DeletePhotoRequest struct { + // Required. ID of the [Photo][google.streetview.publish.v1.Photo]. + PhotoId string `protobuf:"bytes,1,opt,name=photo_id,json=photoId,proto3" json:"photo_id,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *DeletePhotoRequest) Reset() { *m = DeletePhotoRequest{} } +func (m *DeletePhotoRequest) String() string { return proto.CompactTextString(m) } +func (*DeletePhotoRequest) ProtoMessage() {} +func (*DeletePhotoRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_rpcmessages_3d6515828940d466, []int{10} +} +func (m *DeletePhotoRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_DeletePhotoRequest.Unmarshal(m, b) +} +func (m *DeletePhotoRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_DeletePhotoRequest.Marshal(b, m, deterministic) +} +func (dst *DeletePhotoRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_DeletePhotoRequest.Merge(dst, src) +} +func (m *DeletePhotoRequest) XXX_Size() int { + return xxx_messageInfo_DeletePhotoRequest.Size(m) +} +func (m *DeletePhotoRequest) XXX_DiscardUnknown() { + xxx_messageInfo_DeletePhotoRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_DeletePhotoRequest proto.InternalMessageInfo + +func (m *DeletePhotoRequest) GetPhotoId() string { + if m != nil { + return m.PhotoId + } + return "" +} + +// Request to delete multiple [Photos][google.streetview.publish.v1.Photo]. +type BatchDeletePhotosRequest struct { + // Required. IDs of the [Photos][google.streetview.publish.v1.Photo]. HTTP + // GET requests require the following syntax for the URL query parameter: + // `photoIds=&photoIds=&...`. + PhotoIds []string `protobuf:"bytes,1,rep,name=photo_ids,json=photoIds,proto3" json:"photo_ids,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *BatchDeletePhotosRequest) Reset() { *m = BatchDeletePhotosRequest{} } +func (m *BatchDeletePhotosRequest) String() string { return proto.CompactTextString(m) } +func (*BatchDeletePhotosRequest) ProtoMessage() {} +func (*BatchDeletePhotosRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_rpcmessages_3d6515828940d466, []int{11} +} +func (m *BatchDeletePhotosRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_BatchDeletePhotosRequest.Unmarshal(m, b) +} +func (m *BatchDeletePhotosRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_BatchDeletePhotosRequest.Marshal(b, m, deterministic) +} +func (dst *BatchDeletePhotosRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_BatchDeletePhotosRequest.Merge(dst, src) +} +func (m *BatchDeletePhotosRequest) XXX_Size() int { + return xxx_messageInfo_BatchDeletePhotosRequest.Size(m) +} +func (m *BatchDeletePhotosRequest) XXX_DiscardUnknown() { + xxx_messageInfo_BatchDeletePhotosRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_BatchDeletePhotosRequest proto.InternalMessageInfo + +func (m *BatchDeletePhotosRequest) GetPhotoIds() []string { + if m != nil { + return m.PhotoIds + } + return nil +} + +// Response to batch delete of one or more +// [Photos][google.streetview.publish.v1.Photo]. +type BatchDeletePhotosResponse struct { + // The status for the operation to delete a single + // [Photo][google.streetview.publish.v1.Photo] in the batch request. + Status []*status.Status `protobuf:"bytes,1,rep,name=status,proto3" json:"status,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *BatchDeletePhotosResponse) Reset() { *m = BatchDeletePhotosResponse{} } +func (m *BatchDeletePhotosResponse) String() string { return proto.CompactTextString(m) } +func (*BatchDeletePhotosResponse) ProtoMessage() {} +func (*BatchDeletePhotosResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_rpcmessages_3d6515828940d466, []int{12} +} +func (m *BatchDeletePhotosResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_BatchDeletePhotosResponse.Unmarshal(m, b) +} +func (m *BatchDeletePhotosResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_BatchDeletePhotosResponse.Marshal(b, m, deterministic) +} +func (dst *BatchDeletePhotosResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_BatchDeletePhotosResponse.Merge(dst, src) +} +func (m *BatchDeletePhotosResponse) XXX_Size() int { + return xxx_messageInfo_BatchDeletePhotosResponse.Size(m) +} +func (m *BatchDeletePhotosResponse) XXX_DiscardUnknown() { + xxx_messageInfo_BatchDeletePhotosResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_BatchDeletePhotosResponse proto.InternalMessageInfo + +func (m *BatchDeletePhotosResponse) GetStatus() []*status.Status { + if m != nil { + return m.Status + } + return nil +} + +func init() { + proto.RegisterType((*CreatePhotoRequest)(nil), "google.streetview.publish.v1.CreatePhotoRequest") + proto.RegisterType((*GetPhotoRequest)(nil), "google.streetview.publish.v1.GetPhotoRequest") + proto.RegisterType((*BatchGetPhotosRequest)(nil), "google.streetview.publish.v1.BatchGetPhotosRequest") + proto.RegisterType((*BatchGetPhotosResponse)(nil), "google.streetview.publish.v1.BatchGetPhotosResponse") + proto.RegisterType((*PhotoResponse)(nil), "google.streetview.publish.v1.PhotoResponse") + proto.RegisterType((*ListPhotosRequest)(nil), "google.streetview.publish.v1.ListPhotosRequest") + proto.RegisterType((*ListPhotosResponse)(nil), "google.streetview.publish.v1.ListPhotosResponse") + proto.RegisterType((*UpdatePhotoRequest)(nil), "google.streetview.publish.v1.UpdatePhotoRequest") + proto.RegisterType((*BatchUpdatePhotosRequest)(nil), "google.streetview.publish.v1.BatchUpdatePhotosRequest") + proto.RegisterType((*BatchUpdatePhotosResponse)(nil), "google.streetview.publish.v1.BatchUpdatePhotosResponse") + proto.RegisterType((*DeletePhotoRequest)(nil), "google.streetview.publish.v1.DeletePhotoRequest") + proto.RegisterType((*BatchDeletePhotosRequest)(nil), "google.streetview.publish.v1.BatchDeletePhotosRequest") + proto.RegisterType((*BatchDeletePhotosResponse)(nil), "google.streetview.publish.v1.BatchDeletePhotosResponse") + proto.RegisterEnum("google.streetview.publish.v1.PhotoView", PhotoView_name, PhotoView_value) +} + +func init() { + proto.RegisterFile("google/streetview/publish/v1/rpcmessages.proto", fileDescriptor_rpcmessages_3d6515828940d466) +} + +var fileDescriptor_rpcmessages_3d6515828940d466 = []byte{ + // 669 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xbc, 0x55, 0x5d, 0x4f, 0xd3, 0x5e, + 0x18, 0xff, 0x17, 0xd8, 0x60, 0x0f, 0x7f, 0x04, 0x8f, 0x82, 0x65, 0x62, 0xb2, 0x94, 0x44, 0x17, + 0x34, 0x2d, 0xe0, 0x85, 0x31, 0xbb, 0x62, 0x1b, 0x12, 0x92, 0xf1, 0x92, 0x4e, 0x34, 0xf1, 0xa6, + 0xe9, 0xda, 0x87, 0xd2, 0x50, 0x76, 0x6a, 0xcf, 0xe9, 0x50, 0xae, 0xfc, 0x00, 0xea, 0xa5, 0xdf, + 0xc9, 0x6f, 0x65, 0x7a, 0x7a, 0x0e, 0x8c, 0x6d, 0x2e, 0xd3, 0x10, 0xef, 0x7a, 0x9e, 0x97, 0x5f, + 0x7f, 0xe7, 0xf7, 0xbc, 0x1c, 0x30, 0x03, 0x4a, 0x83, 0x08, 0x2d, 0xc6, 0x13, 0x44, 0xde, 0x0b, + 0xf1, 0xd2, 0x8a, 0xd3, 0x4e, 0x14, 0xb2, 0x33, 0xab, 0xb7, 0x65, 0x25, 0xb1, 0x77, 0x81, 0x8c, + 0xb9, 0x01, 0x32, 0x33, 0x4e, 0x28, 0xa7, 0x64, 0x2d, 0x8f, 0x37, 0x6f, 0xe2, 0x4d, 0x19, 0x6f, + 0xf6, 0xb6, 0xca, 0x15, 0x89, 0x26, 0x62, 0x3b, 0xe9, 0xa9, 0x75, 0x1a, 0x62, 0xe4, 0x3b, 0x17, + 0x2e, 0x3b, 0xcf, 0xf3, 0xcb, 0x8f, 0x64, 0x44, 0x12, 0x7b, 0x16, 0xe3, 0x2e, 0x4f, 0x25, 0x70, + 0xf9, 0xc5, 0x78, 0x22, 0xc8, 0x68, 0x9a, 0x78, 0x8a, 0x86, 0x71, 0x04, 0xa4, 0x91, 0xa0, 0xcb, + 0xf1, 0xf8, 0x8c, 0x72, 0x6a, 0xe3, 0xc7, 0x14, 0x19, 0x27, 0xaf, 0xa1, 0x10, 0x67, 0x67, 0x5d, + 0xab, 0x68, 0xd5, 0xf9, 0xed, 0x75, 0x73, 0x1c, 0x59, 0x33, 0x4f, 0xcd, 0x33, 0x8c, 0xef, 0x1a, + 0x2c, 0xee, 0x21, 0xbf, 0x05, 0xb7, 0x0a, 0x73, 0xc2, 0xe9, 0x84, 0xbe, 0x40, 0x2c, 0xd9, 0xb3, + 0xe2, 0xbc, 0xef, 0x93, 0x1a, 0xcc, 0x64, 0x70, 0xfa, 0x54, 0x45, 0xab, 0xde, 0xdb, 0x7e, 0x36, + 0xc1, 0x8f, 0xde, 0x85, 0x78, 0x69, 0x8b, 0x24, 0xb2, 0x0e, 0x0b, 0x91, 0xdb, 0x0d, 0x52, 0x37, + 0x40, 0xc7, 0xa3, 0x3e, 0xea, 0xd3, 0x02, 0xfc, 0x7f, 0x65, 0x6c, 0x50, 0x1f, 0x8d, 0x1f, 0x1a, + 0x2c, 0xd7, 0x5d, 0xee, 0x9d, 0x29, 0x56, 0x4c, 0xd1, 0x7a, 0x0c, 0x25, 0x45, 0x8b, 0xe9, 0x5a, + 0x65, 0xba, 0x5a, 0xb2, 0xe7, 0x24, 0x2f, 0xf6, 0x0f, 0x88, 0x39, 0xb0, 0x32, 0xc8, 0x8b, 0xc5, + 0xb4, 0xcb, 0x90, 0xec, 0xc2, 0x6c, 0x82, 0x2c, 0x8d, 0x78, 0x4e, 0x6b, 0x7e, 0xfb, 0xf9, 0x24, + 0x05, 0x90, 0xd9, 0xb6, 0xca, 0x35, 0x7a, 0xb0, 0x70, 0xcb, 0x43, 0x36, 0xa0, 0x98, 0xb7, 0x8a, + 0xac, 0x2b, 0x51, 0xb0, 0x49, 0xec, 0x99, 0x6d, 0xe1, 0xb1, 0x65, 0xc4, 0x4d, 0x0b, 0x4c, 0xfd, + 0x71, 0x0b, 0xfc, 0xd4, 0xe0, 0x7e, 0x2b, 0x64, 0x03, 0x6a, 0x2b, 0x41, 0xb5, 0xbf, 0x11, 0x34, + 0x2b, 0x55, 0x26, 0x26, 0x0b, 0xaf, 0x50, 0x30, 0x2a, 0xd8, 0x73, 0x99, 0xa1, 0x1d, 0x5e, 0x21, + 0x79, 0x02, 0x20, 0x9c, 0x9c, 0x9e, 0x63, 0x57, 0x4a, 0x2d, 0xc2, 0xdf, 0x66, 0x06, 0xb2, 0x02, + 0xc5, 0xd3, 0x30, 0xe2, 0x98, 0xe8, 0x33, 0xc2, 0x25, 0x4f, 0xc3, 0x45, 0x2a, 0x8c, 0x28, 0xd2, + 0x67, 0x20, 0xfd, 0x57, 0x91, 0x42, 0xd6, 0xa0, 0x28, 0xae, 0xaa, 0xea, 0x33, 0x91, 0x3a, 0x32, + 0x85, 0x3c, 0x85, 0xc5, 0x2e, 0x7e, 0xe2, 0x4e, 0x1f, 0xe7, 0x29, 0xf1, 0xe7, 0x85, 0xcc, 0x7c, + 0xac, 0x78, 0x1b, 0x5f, 0x35, 0x20, 0x27, 0xb1, 0x7f, 0x77, 0xb3, 0x49, 0x6a, 0x30, 0x9f, 0x0a, + 0x40, 0xb1, 0x48, 0x64, 0x65, 0xcb, 0x0a, 0x40, 0xed, 0x1a, 0xf3, 0x4d, 0xb6, 0x6b, 0x0e, 0x5c, + 0x76, 0x6e, 0x43, 0x1e, 0x9e, 0x7d, 0x1b, 0x5f, 0x34, 0xd0, 0x45, 0xbf, 0xf6, 0x71, 0xba, 0x2e, + 0xae, 0x0f, 0xcb, 0x12, 0x39, 0x9f, 0xa8, 0x24, 0xb7, 0x2b, 0x7d, 0x36, 0xc7, 0x93, 0x1c, 0xbe, + 0xa5, 0xfd, 0x20, 0x1d, 0xb2, 0x31, 0xa3, 0x03, 0xab, 0x23, 0x18, 0xdc, 0xed, 0xd0, 0x58, 0x40, + 0x9a, 0x18, 0xe1, 0x80, 0xe8, 0xbf, 0xdf, 0x60, 0xc6, 0x2b, 0x29, 0x4b, 0x5f, 0xd6, 0x44, 0x1b, + 0xc6, 0xd8, 0x93, 0xb7, 0xb9, 0x9d, 0x38, 0x62, 0x54, 0xa7, 0xc7, 0x8f, 0xea, 0xc6, 0x26, 0x94, + 0xae, 0xe7, 0x85, 0x94, 0xa0, 0x50, 0xdf, 0x69, 0xef, 0x37, 0x96, 0xfe, 0x23, 0x3a, 0x3c, 0xdc, + 0x3f, 0x6c, 0xb4, 0x4e, 0x9a, 0xbb, 0x4e, 0xf3, 0xe8, 0xfd, 0x61, 0xeb, 0x68, 0xa7, 0xe9, 0x9c, + 0xd8, 0xad, 0x25, 0xad, 0xfe, 0x4d, 0x83, 0xaa, 0x47, 0x2f, 0x14, 0x66, 0x80, 0xd4, 0x4c, 0x03, + 0x6f, 0xb4, 0x50, 0xf5, 0xb5, 0xb6, 0x30, 0x67, 0xe8, 0xc7, 0xb9, 0xd5, 0x8e, 0xbd, 0x03, 0xf9, + 0x9a, 0x7d, 0x68, 0x28, 0x0c, 0x9a, 0xcd, 0x8d, 0x49, 0x93, 0xc0, 0x0a, 0xb0, 0x2b, 0x7a, 0xc9, + 0xca, 0x5d, 0x6e, 0x1c, 0xb2, 0xd1, 0xaf, 0x51, 0x4d, 0x7e, 0x76, 0x8a, 0x22, 0xfe, 0xe5, 0xaf, + 0x00, 0x00, 0x00, 0xff, 0xff, 0xab, 0x7d, 0x68, 0xfd, 0x45, 0x07, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/streetview/publish/v1/streetview_publish.pb.go b/vendor/google.golang.org/genproto/googleapis/streetview/publish/v1/streetview_publish.pb.go new file mode 100644 index 0000000..86c33ff --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/streetview/publish/v1/streetview_publish.pb.go @@ -0,0 +1,706 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/streetview/publish/v1/streetview_publish.proto + +package publish // import "google.golang.org/genproto/googleapis/streetview/publish/v1" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import empty "github.com/golang/protobuf/ptypes/empty" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +import ( + context "golang.org/x/net/context" + grpc "google.golang.org/grpc" +) + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Reference imports to suppress errors if they are not otherwise used. +var _ context.Context +var _ grpc.ClientConn + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the grpc package it is being compiled against. +const _ = grpc.SupportPackageIsVersion4 + +// StreetViewPublishServiceClient is the client API for StreetViewPublishService service. +// +// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://godoc.org/google.golang.org/grpc#ClientConn.NewStream. +type StreetViewPublishServiceClient interface { + // Creates an upload session to start uploading photo bytes. The method uses + // the upload URL of the returned + // [UploadRef][google.streetview.publish.v1.UploadRef] to upload the bytes for + // the [Photo][google.streetview.publish.v1.Photo]. + // + // In addition to the photo requirements shown in + // https://support.google.com/maps/answer/7012050?hl=en&ref_topic=6275604, + // the photo must meet the following requirements: + // + // * Photo Sphere XMP metadata must be included in the photo medadata. See + // https://developers.google.com/streetview/spherical-metadata for the + // required fields. + // * The pixel size of the photo must meet the size requirements listed in + // https://support.google.com/maps/answer/7012050?hl=en&ref_topic=6275604, and + // the photo must be a full 360 horizontally. + // + // After the upload completes, the method uses + // [UploadRef][google.streetview.publish.v1.UploadRef] with + // [CreatePhoto][google.streetview.publish.v1.StreetViewPublishService.CreatePhoto] + // to create the [Photo][google.streetview.publish.v1.Photo] object entry. + StartUpload(ctx context.Context, in *empty.Empty, opts ...grpc.CallOption) (*UploadRef, error) + // After the client finishes uploading the photo with the returned + // [UploadRef][google.streetview.publish.v1.UploadRef], + // [CreatePhoto][google.streetview.publish.v1.StreetViewPublishService.CreatePhoto] + // publishes the uploaded [Photo][google.streetview.publish.v1.Photo] to + // Street View on Google Maps. + // + // Currently, the only way to set heading, pitch, and roll in CreatePhoto is + // through the [Photo Sphere XMP + // metadata](https://developers.google.com/streetview/spherical-metadata) in + // the photo bytes. CreatePhoto ignores the `pose.heading`, `pose.pitch`, + // `pose.roll`, `pose.altitude`, and `pose.level` fields in Pose. + // + // This method returns the following error codes: + // + // * [google.rpc.Code.INVALID_ARGUMENT][google.rpc.Code.INVALID_ARGUMENT] if + // the request is malformed or if the uploaded photo is not a 360 photo. + // * [google.rpc.Code.NOT_FOUND][google.rpc.Code.NOT_FOUND] if the upload + // reference does not exist. + // * [google.rpc.Code.RESOURCE_EXHAUSTED][google.rpc.Code.RESOURCE_EXHAUSTED] + // if the account has reached the storage limit. + CreatePhoto(ctx context.Context, in *CreatePhotoRequest, opts ...grpc.CallOption) (*Photo, error) + // Gets the metadata of the specified + // [Photo][google.streetview.publish.v1.Photo]. + // + // This method returns the following error codes: + // + // * [google.rpc.Code.PERMISSION_DENIED][google.rpc.Code.PERMISSION_DENIED] if + // the requesting user did not create the requested + // [Photo][google.streetview.publish.v1.Photo]. + // * [google.rpc.Code.NOT_FOUND][google.rpc.Code.NOT_FOUND] if the requested + // [Photo][google.streetview.publish.v1.Photo] does not exist. + // * [google.rpc.Code.UNAVAILABLE][google.rpc.Code.UNAVAILABLE] if the + // requested [Photo][google.streetview.publish.v1.Photo] is still being + // indexed. + GetPhoto(ctx context.Context, in *GetPhotoRequest, opts ...grpc.CallOption) (*Photo, error) + // Gets the metadata of the specified + // [Photo][google.streetview.publish.v1.Photo] batch. + // + // Note that if + // [BatchGetPhotos][google.streetview.publish.v1.StreetViewPublishService.BatchGetPhotos] + // fails, either critical fields are missing or there is an authentication + // error. Even if + // [BatchGetPhotos][google.streetview.publish.v1.StreetViewPublishService.BatchGetPhotos] + // succeeds, individual photos in the batch may have failures. + // These failures are specified in each + // [PhotoResponse.status][google.streetview.publish.v1.PhotoResponse.status] + // in + // [BatchGetPhotosResponse.results][google.streetview.publish.v1.BatchGetPhotosResponse.results]. + // See + // [GetPhoto][google.streetview.publish.v1.StreetViewPublishService.GetPhoto] + // for specific failures that can occur per photo. + BatchGetPhotos(ctx context.Context, in *BatchGetPhotosRequest, opts ...grpc.CallOption) (*BatchGetPhotosResponse, error) + // Lists all the [Photos][google.streetview.publish.v1.Photo] that belong to + // the user. + // + // + ListPhotos(ctx context.Context, in *ListPhotosRequest, opts ...grpc.CallOption) (*ListPhotosResponse, error) + // Updates the metadata of a [Photo][google.streetview.publish.v1.Photo], such + // as pose, place association, connections, etc. Changing the pixels of a + // photo is not supported. + // + // Only the fields specified in the + // [updateMask][google.streetview.publish.v1.UpdatePhotoRequest.update_mask] + // field are used. If `updateMask` is not present, the update applies to all + // fields. + // + // This method returns the following error codes: + // + // * [google.rpc.Code.PERMISSION_DENIED][google.rpc.Code.PERMISSION_DENIED] if + // the requesting user did not create the requested photo. + // * [google.rpc.Code.INVALID_ARGUMENT][google.rpc.Code.INVALID_ARGUMENT] if + // the request is malformed. + // * [google.rpc.Code.NOT_FOUND][google.rpc.Code.NOT_FOUND] if the requested + // photo does not exist. + // * [google.rpc.Code.UNAVAILABLE][google.rpc.Code.UNAVAILABLE] if the + // requested [Photo][google.streetview.publish.v1.Photo] is still being + // indexed. + UpdatePhoto(ctx context.Context, in *UpdatePhotoRequest, opts ...grpc.CallOption) (*Photo, error) + // Updates the metadata of [Photos][google.streetview.publish.v1.Photo], such + // as pose, place association, connections, etc. Changing the pixels of photos + // is not supported. + // + // Note that if + // [BatchUpdatePhotos][google.streetview.publish.v1.StreetViewPublishService.BatchUpdatePhotos] + // fails, either critical fields are missing or there is an authentication + // error. Even if + // [BatchUpdatePhotos][google.streetview.publish.v1.StreetViewPublishService.BatchUpdatePhotos] + // succeeds, individual photos in the batch may have failures. + // These failures are specified in each + // [PhotoResponse.status][google.streetview.publish.v1.PhotoResponse.status] + // in + // [BatchUpdatePhotosResponse.results][google.streetview.publish.v1.BatchUpdatePhotosResponse.results]. + // See + // [UpdatePhoto][google.streetview.publish.v1.StreetViewPublishService.UpdatePhoto] + // for specific failures that can occur per photo. + // + // Only the fields specified in + // [updateMask][google.streetview.publish.v1.UpdatePhotoRequest.update_mask] + // field are used. If `updateMask` is not present, the update applies to all + // fields. + // + // The number of + // [UpdatePhotoRequest][google.streetview.publish.v1.UpdatePhotoRequest] + // messages in a + // [BatchUpdatePhotosRequest][google.streetview.publish.v1.BatchUpdatePhotosRequest] + // must not exceed 20. + // + // + BatchUpdatePhotos(ctx context.Context, in *BatchUpdatePhotosRequest, opts ...grpc.CallOption) (*BatchUpdatePhotosResponse, error) + // Deletes a [Photo][google.streetview.publish.v1.Photo] and its metadata. + // + // This method returns the following error codes: + // + // * [google.rpc.Code.PERMISSION_DENIED][google.rpc.Code.PERMISSION_DENIED] if + // the requesting user did not create the requested photo. + // * [google.rpc.Code.NOT_FOUND][google.rpc.Code.NOT_FOUND] if the photo ID + // does not exist. + DeletePhoto(ctx context.Context, in *DeletePhotoRequest, opts ...grpc.CallOption) (*empty.Empty, error) + // Deletes a list of [Photos][google.streetview.publish.v1.Photo] and their + // metadata. + // + // Note that if + // [BatchDeletePhotos][google.streetview.publish.v1.StreetViewPublishService.BatchDeletePhotos] + // fails, either critical fields are missing or there was an authentication + // error. Even if + // [BatchDeletePhotos][google.streetview.publish.v1.StreetViewPublishService.BatchDeletePhotos] + // succeeds, individual photos in the batch may have failures. + // These failures are specified in each + // [PhotoResponse.status][google.streetview.publish.v1.PhotoResponse.status] + // in + // [BatchDeletePhotosResponse.results][google.streetview.publish.v1.BatchDeletePhotosResponse.status]. + // See + // [DeletePhoto][google.streetview.publish.v1.StreetViewPublishService.DeletePhoto] + // for specific failures that can occur per photo. + BatchDeletePhotos(ctx context.Context, in *BatchDeletePhotosRequest, opts ...grpc.CallOption) (*BatchDeletePhotosResponse, error) +} + +type streetViewPublishServiceClient struct { + cc *grpc.ClientConn +} + +func NewStreetViewPublishServiceClient(cc *grpc.ClientConn) StreetViewPublishServiceClient { + return &streetViewPublishServiceClient{cc} +} + +func (c *streetViewPublishServiceClient) StartUpload(ctx context.Context, in *empty.Empty, opts ...grpc.CallOption) (*UploadRef, error) { + out := new(UploadRef) + err := c.cc.Invoke(ctx, "/google.streetview.publish.v1.StreetViewPublishService/StartUpload", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *streetViewPublishServiceClient) CreatePhoto(ctx context.Context, in *CreatePhotoRequest, opts ...grpc.CallOption) (*Photo, error) { + out := new(Photo) + err := c.cc.Invoke(ctx, "/google.streetview.publish.v1.StreetViewPublishService/CreatePhoto", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *streetViewPublishServiceClient) GetPhoto(ctx context.Context, in *GetPhotoRequest, opts ...grpc.CallOption) (*Photo, error) { + out := new(Photo) + err := c.cc.Invoke(ctx, "/google.streetview.publish.v1.StreetViewPublishService/GetPhoto", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *streetViewPublishServiceClient) BatchGetPhotos(ctx context.Context, in *BatchGetPhotosRequest, opts ...grpc.CallOption) (*BatchGetPhotosResponse, error) { + out := new(BatchGetPhotosResponse) + err := c.cc.Invoke(ctx, "/google.streetview.publish.v1.StreetViewPublishService/BatchGetPhotos", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *streetViewPublishServiceClient) ListPhotos(ctx context.Context, in *ListPhotosRequest, opts ...grpc.CallOption) (*ListPhotosResponse, error) { + out := new(ListPhotosResponse) + err := c.cc.Invoke(ctx, "/google.streetview.publish.v1.StreetViewPublishService/ListPhotos", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *streetViewPublishServiceClient) UpdatePhoto(ctx context.Context, in *UpdatePhotoRequest, opts ...grpc.CallOption) (*Photo, error) { + out := new(Photo) + err := c.cc.Invoke(ctx, "/google.streetview.publish.v1.StreetViewPublishService/UpdatePhoto", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *streetViewPublishServiceClient) BatchUpdatePhotos(ctx context.Context, in *BatchUpdatePhotosRequest, opts ...grpc.CallOption) (*BatchUpdatePhotosResponse, error) { + out := new(BatchUpdatePhotosResponse) + err := c.cc.Invoke(ctx, "/google.streetview.publish.v1.StreetViewPublishService/BatchUpdatePhotos", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *streetViewPublishServiceClient) DeletePhoto(ctx context.Context, in *DeletePhotoRequest, opts ...grpc.CallOption) (*empty.Empty, error) { + out := new(empty.Empty) + err := c.cc.Invoke(ctx, "/google.streetview.publish.v1.StreetViewPublishService/DeletePhoto", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *streetViewPublishServiceClient) BatchDeletePhotos(ctx context.Context, in *BatchDeletePhotosRequest, opts ...grpc.CallOption) (*BatchDeletePhotosResponse, error) { + out := new(BatchDeletePhotosResponse) + err := c.cc.Invoke(ctx, "/google.streetview.publish.v1.StreetViewPublishService/BatchDeletePhotos", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +// StreetViewPublishServiceServer is the server API for StreetViewPublishService service. +type StreetViewPublishServiceServer interface { + // Creates an upload session to start uploading photo bytes. The method uses + // the upload URL of the returned + // [UploadRef][google.streetview.publish.v1.UploadRef] to upload the bytes for + // the [Photo][google.streetview.publish.v1.Photo]. + // + // In addition to the photo requirements shown in + // https://support.google.com/maps/answer/7012050?hl=en&ref_topic=6275604, + // the photo must meet the following requirements: + // + // * Photo Sphere XMP metadata must be included in the photo medadata. See + // https://developers.google.com/streetview/spherical-metadata for the + // required fields. + // * The pixel size of the photo must meet the size requirements listed in + // https://support.google.com/maps/answer/7012050?hl=en&ref_topic=6275604, and + // the photo must be a full 360 horizontally. + // + // After the upload completes, the method uses + // [UploadRef][google.streetview.publish.v1.UploadRef] with + // [CreatePhoto][google.streetview.publish.v1.StreetViewPublishService.CreatePhoto] + // to create the [Photo][google.streetview.publish.v1.Photo] object entry. + StartUpload(context.Context, *empty.Empty) (*UploadRef, error) + // After the client finishes uploading the photo with the returned + // [UploadRef][google.streetview.publish.v1.UploadRef], + // [CreatePhoto][google.streetview.publish.v1.StreetViewPublishService.CreatePhoto] + // publishes the uploaded [Photo][google.streetview.publish.v1.Photo] to + // Street View on Google Maps. + // + // Currently, the only way to set heading, pitch, and roll in CreatePhoto is + // through the [Photo Sphere XMP + // metadata](https://developers.google.com/streetview/spherical-metadata) in + // the photo bytes. CreatePhoto ignores the `pose.heading`, `pose.pitch`, + // `pose.roll`, `pose.altitude`, and `pose.level` fields in Pose. + // + // This method returns the following error codes: + // + // * [google.rpc.Code.INVALID_ARGUMENT][google.rpc.Code.INVALID_ARGUMENT] if + // the request is malformed or if the uploaded photo is not a 360 photo. + // * [google.rpc.Code.NOT_FOUND][google.rpc.Code.NOT_FOUND] if the upload + // reference does not exist. + // * [google.rpc.Code.RESOURCE_EXHAUSTED][google.rpc.Code.RESOURCE_EXHAUSTED] + // if the account has reached the storage limit. + CreatePhoto(context.Context, *CreatePhotoRequest) (*Photo, error) + // Gets the metadata of the specified + // [Photo][google.streetview.publish.v1.Photo]. + // + // This method returns the following error codes: + // + // * [google.rpc.Code.PERMISSION_DENIED][google.rpc.Code.PERMISSION_DENIED] if + // the requesting user did not create the requested + // [Photo][google.streetview.publish.v1.Photo]. + // * [google.rpc.Code.NOT_FOUND][google.rpc.Code.NOT_FOUND] if the requested + // [Photo][google.streetview.publish.v1.Photo] does not exist. + // * [google.rpc.Code.UNAVAILABLE][google.rpc.Code.UNAVAILABLE] if the + // requested [Photo][google.streetview.publish.v1.Photo] is still being + // indexed. + GetPhoto(context.Context, *GetPhotoRequest) (*Photo, error) + // Gets the metadata of the specified + // [Photo][google.streetview.publish.v1.Photo] batch. + // + // Note that if + // [BatchGetPhotos][google.streetview.publish.v1.StreetViewPublishService.BatchGetPhotos] + // fails, either critical fields are missing or there is an authentication + // error. Even if + // [BatchGetPhotos][google.streetview.publish.v1.StreetViewPublishService.BatchGetPhotos] + // succeeds, individual photos in the batch may have failures. + // These failures are specified in each + // [PhotoResponse.status][google.streetview.publish.v1.PhotoResponse.status] + // in + // [BatchGetPhotosResponse.results][google.streetview.publish.v1.BatchGetPhotosResponse.results]. + // See + // [GetPhoto][google.streetview.publish.v1.StreetViewPublishService.GetPhoto] + // for specific failures that can occur per photo. + BatchGetPhotos(context.Context, *BatchGetPhotosRequest) (*BatchGetPhotosResponse, error) + // Lists all the [Photos][google.streetview.publish.v1.Photo] that belong to + // the user. + // + // + ListPhotos(context.Context, *ListPhotosRequest) (*ListPhotosResponse, error) + // Updates the metadata of a [Photo][google.streetview.publish.v1.Photo], such + // as pose, place association, connections, etc. Changing the pixels of a + // photo is not supported. + // + // Only the fields specified in the + // [updateMask][google.streetview.publish.v1.UpdatePhotoRequest.update_mask] + // field are used. If `updateMask` is not present, the update applies to all + // fields. + // + // This method returns the following error codes: + // + // * [google.rpc.Code.PERMISSION_DENIED][google.rpc.Code.PERMISSION_DENIED] if + // the requesting user did not create the requested photo. + // * [google.rpc.Code.INVALID_ARGUMENT][google.rpc.Code.INVALID_ARGUMENT] if + // the request is malformed. + // * [google.rpc.Code.NOT_FOUND][google.rpc.Code.NOT_FOUND] if the requested + // photo does not exist. + // * [google.rpc.Code.UNAVAILABLE][google.rpc.Code.UNAVAILABLE] if the + // requested [Photo][google.streetview.publish.v1.Photo] is still being + // indexed. + UpdatePhoto(context.Context, *UpdatePhotoRequest) (*Photo, error) + // Updates the metadata of [Photos][google.streetview.publish.v1.Photo], such + // as pose, place association, connections, etc. Changing the pixels of photos + // is not supported. + // + // Note that if + // [BatchUpdatePhotos][google.streetview.publish.v1.StreetViewPublishService.BatchUpdatePhotos] + // fails, either critical fields are missing or there is an authentication + // error. Even if + // [BatchUpdatePhotos][google.streetview.publish.v1.StreetViewPublishService.BatchUpdatePhotos] + // succeeds, individual photos in the batch may have failures. + // These failures are specified in each + // [PhotoResponse.status][google.streetview.publish.v1.PhotoResponse.status] + // in + // [BatchUpdatePhotosResponse.results][google.streetview.publish.v1.BatchUpdatePhotosResponse.results]. + // See + // [UpdatePhoto][google.streetview.publish.v1.StreetViewPublishService.UpdatePhoto] + // for specific failures that can occur per photo. + // + // Only the fields specified in + // [updateMask][google.streetview.publish.v1.UpdatePhotoRequest.update_mask] + // field are used. If `updateMask` is not present, the update applies to all + // fields. + // + // The number of + // [UpdatePhotoRequest][google.streetview.publish.v1.UpdatePhotoRequest] + // messages in a + // [BatchUpdatePhotosRequest][google.streetview.publish.v1.BatchUpdatePhotosRequest] + // must not exceed 20. + // + // + BatchUpdatePhotos(context.Context, *BatchUpdatePhotosRequest) (*BatchUpdatePhotosResponse, error) + // Deletes a [Photo][google.streetview.publish.v1.Photo] and its metadata. + // + // This method returns the following error codes: + // + // * [google.rpc.Code.PERMISSION_DENIED][google.rpc.Code.PERMISSION_DENIED] if + // the requesting user did not create the requested photo. + // * [google.rpc.Code.NOT_FOUND][google.rpc.Code.NOT_FOUND] if the photo ID + // does not exist. + DeletePhoto(context.Context, *DeletePhotoRequest) (*empty.Empty, error) + // Deletes a list of [Photos][google.streetview.publish.v1.Photo] and their + // metadata. + // + // Note that if + // [BatchDeletePhotos][google.streetview.publish.v1.StreetViewPublishService.BatchDeletePhotos] + // fails, either critical fields are missing or there was an authentication + // error. Even if + // [BatchDeletePhotos][google.streetview.publish.v1.StreetViewPublishService.BatchDeletePhotos] + // succeeds, individual photos in the batch may have failures. + // These failures are specified in each + // [PhotoResponse.status][google.streetview.publish.v1.PhotoResponse.status] + // in + // [BatchDeletePhotosResponse.results][google.streetview.publish.v1.BatchDeletePhotosResponse.status]. + // See + // [DeletePhoto][google.streetview.publish.v1.StreetViewPublishService.DeletePhoto] + // for specific failures that can occur per photo. + BatchDeletePhotos(context.Context, *BatchDeletePhotosRequest) (*BatchDeletePhotosResponse, error) +} + +func RegisterStreetViewPublishServiceServer(s *grpc.Server, srv StreetViewPublishServiceServer) { + s.RegisterService(&_StreetViewPublishService_serviceDesc, srv) +} + +func _StreetViewPublishService_StartUpload_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(empty.Empty) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(StreetViewPublishServiceServer).StartUpload(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.streetview.publish.v1.StreetViewPublishService/StartUpload", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(StreetViewPublishServiceServer).StartUpload(ctx, req.(*empty.Empty)) + } + return interceptor(ctx, in, info, handler) +} + +func _StreetViewPublishService_CreatePhoto_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(CreatePhotoRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(StreetViewPublishServiceServer).CreatePhoto(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.streetview.publish.v1.StreetViewPublishService/CreatePhoto", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(StreetViewPublishServiceServer).CreatePhoto(ctx, req.(*CreatePhotoRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _StreetViewPublishService_GetPhoto_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(GetPhotoRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(StreetViewPublishServiceServer).GetPhoto(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.streetview.publish.v1.StreetViewPublishService/GetPhoto", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(StreetViewPublishServiceServer).GetPhoto(ctx, req.(*GetPhotoRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _StreetViewPublishService_BatchGetPhotos_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(BatchGetPhotosRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(StreetViewPublishServiceServer).BatchGetPhotos(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.streetview.publish.v1.StreetViewPublishService/BatchGetPhotos", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(StreetViewPublishServiceServer).BatchGetPhotos(ctx, req.(*BatchGetPhotosRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _StreetViewPublishService_ListPhotos_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(ListPhotosRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(StreetViewPublishServiceServer).ListPhotos(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.streetview.publish.v1.StreetViewPublishService/ListPhotos", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(StreetViewPublishServiceServer).ListPhotos(ctx, req.(*ListPhotosRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _StreetViewPublishService_UpdatePhoto_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(UpdatePhotoRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(StreetViewPublishServiceServer).UpdatePhoto(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.streetview.publish.v1.StreetViewPublishService/UpdatePhoto", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(StreetViewPublishServiceServer).UpdatePhoto(ctx, req.(*UpdatePhotoRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _StreetViewPublishService_BatchUpdatePhotos_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(BatchUpdatePhotosRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(StreetViewPublishServiceServer).BatchUpdatePhotos(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.streetview.publish.v1.StreetViewPublishService/BatchUpdatePhotos", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(StreetViewPublishServiceServer).BatchUpdatePhotos(ctx, req.(*BatchUpdatePhotosRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _StreetViewPublishService_DeletePhoto_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(DeletePhotoRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(StreetViewPublishServiceServer).DeletePhoto(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.streetview.publish.v1.StreetViewPublishService/DeletePhoto", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(StreetViewPublishServiceServer).DeletePhoto(ctx, req.(*DeletePhotoRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _StreetViewPublishService_BatchDeletePhotos_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(BatchDeletePhotosRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(StreetViewPublishServiceServer).BatchDeletePhotos(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/google.streetview.publish.v1.StreetViewPublishService/BatchDeletePhotos", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(StreetViewPublishServiceServer).BatchDeletePhotos(ctx, req.(*BatchDeletePhotosRequest)) + } + return interceptor(ctx, in, info, handler) +} + +var _StreetViewPublishService_serviceDesc = grpc.ServiceDesc{ + ServiceName: "google.streetview.publish.v1.StreetViewPublishService", + HandlerType: (*StreetViewPublishServiceServer)(nil), + Methods: []grpc.MethodDesc{ + { + MethodName: "StartUpload", + Handler: _StreetViewPublishService_StartUpload_Handler, + }, + { + MethodName: "CreatePhoto", + Handler: _StreetViewPublishService_CreatePhoto_Handler, + }, + { + MethodName: "GetPhoto", + Handler: _StreetViewPublishService_GetPhoto_Handler, + }, + { + MethodName: "BatchGetPhotos", + Handler: _StreetViewPublishService_BatchGetPhotos_Handler, + }, + { + MethodName: "ListPhotos", + Handler: _StreetViewPublishService_ListPhotos_Handler, + }, + { + MethodName: "UpdatePhoto", + Handler: _StreetViewPublishService_UpdatePhoto_Handler, + }, + { + MethodName: "BatchUpdatePhotos", + Handler: _StreetViewPublishService_BatchUpdatePhotos_Handler, + }, + { + MethodName: "DeletePhoto", + Handler: _StreetViewPublishService_DeletePhoto_Handler, + }, + { + MethodName: "BatchDeletePhotos", + Handler: _StreetViewPublishService_BatchDeletePhotos_Handler, + }, + }, + Streams: []grpc.StreamDesc{}, + Metadata: "google/streetview/publish/v1/streetview_publish.proto", +} + +func init() { + proto.RegisterFile("google/streetview/publish/v1/streetview_publish.proto", fileDescriptor_streetview_publish_0ab84337183ba29d) +} + +var fileDescriptor_streetview_publish_0ab84337183ba29d = []byte{ + // 533 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x94, 0x94, 0x4f, 0x6f, 0xd3, 0x30, + 0x18, 0xc6, 0x15, 0x24, 0x10, 0xb8, 0x08, 0x69, 0x86, 0x55, 0x53, 0x3a, 0x24, 0x08, 0x12, 0xa0, + 0x6a, 0xd8, 0x1b, 0xe3, 0x8f, 0x54, 0x6e, 0x1d, 0x88, 0x0b, 0x87, 0x69, 0xd5, 0x38, 0x70, 0x99, + 0xdc, 0xf4, 0x5d, 0x6a, 0x29, 0x8d, 0x4d, 0xec, 0x74, 0x42, 0x30, 0x0e, 0xe3, 0xc8, 0x0d, 0x2e, + 0x7c, 0x03, 0x3e, 0x10, 0x5f, 0x81, 0x0f, 0x82, 0xea, 0xd8, 0x4d, 0x36, 0x8a, 0x49, 0x4e, 0x69, + 0xf3, 0x3e, 0xcf, 0xfb, 0xfc, 0xfa, 0xbe, 0xae, 0xd1, 0xd3, 0x44, 0x88, 0x24, 0x05, 0xaa, 0x74, + 0x0e, 0xa0, 0xe7, 0x1c, 0x4e, 0xa8, 0x2c, 0xc6, 0x29, 0x57, 0x53, 0x3a, 0xdf, 0xa9, 0xbd, 0x3d, + 0xb2, 0x6f, 0x89, 0xcc, 0x85, 0x16, 0x78, 0xb3, 0xb4, 0x91, 0x4a, 0x40, 0x9c, 0x60, 0xbe, 0x13, + 0xda, 0x2a, 0x65, 0x92, 0x53, 0x96, 0x65, 0x42, 0x33, 0xcd, 0x45, 0xa6, 0x4a, 0x6f, 0xd8, 0xb3, + 0x55, 0xf3, 0x6d, 0x5c, 0x1c, 0x53, 0x98, 0x49, 0xfd, 0xc1, 0x16, 0xb7, 0xbc, 0x3c, 0x39, 0x28, + 0x51, 0xe4, 0x31, 0xb8, 0x56, 0xc4, 0xaf, 0x96, 0xf1, 0x0c, 0x94, 0x62, 0x89, 0xd3, 0x3f, 0xfe, + 0x8a, 0xd0, 0xc6, 0xc8, 0x68, 0xdf, 0x72, 0x38, 0xd9, 0x2f, 0xa5, 0x23, 0xc8, 0xe7, 0x3c, 0x06, + 0x2c, 0x51, 0x67, 0xa4, 0x59, 0xae, 0x0f, 0x65, 0x2a, 0xd8, 0x04, 0x77, 0x6d, 0x73, 0xe2, 0x38, + 0xc9, 0xab, 0x05, 0x67, 0xf8, 0x80, 0xf8, 0x7e, 0x3b, 0x29, 0xdd, 0x07, 0x70, 0x1c, 0xdd, 0x39, + 0xfb, 0xf5, 0xfb, 0xfb, 0xa5, 0x30, 0x5a, 0x5f, 0xb0, 0xc8, 0xa9, 0xd0, 0x62, 0xa0, 0xaa, 0xfe, + 0x83, 0xa0, 0x8f, 0x3f, 0xa3, 0xce, 0x5e, 0x0e, 0x4c, 0xc3, 0xfe, 0xa2, 0x8a, 0xb7, 0xfd, 0x9d, + 0x6b, 0xd2, 0x03, 0x78, 0x5f, 0x80, 0xd2, 0xe1, 0x3d, 0xbf, 0xc3, 0x68, 0xa3, 0x0d, 0xc3, 0x81, + 0xa3, 0x6b, 0x15, 0xc7, 0x65, 0xf3, 0xc0, 0x9f, 0xd0, 0xd5, 0xd7, 0xa0, 0xcb, 0xf0, 0x47, 0xfe, + 0x56, 0x4e, 0xd7, 0x2a, 0x79, 0xd3, 0x24, 0x77, 0xf1, 0xad, 0x65, 0x32, 0xfd, 0x68, 0x1e, 0x47, + 0x7c, 0x72, 0x8a, 0x7f, 0x04, 0xe8, 0xc6, 0x90, 0xe9, 0x78, 0xea, 0x7a, 0x2b, 0xbc, 0xeb, 0xef, + 0x7a, 0x5e, 0xed, 0x50, 0x9e, 0xb4, 0x33, 0x29, 0x29, 0x32, 0x05, 0x51, 0xcf, 0xb0, 0xad, 0xe3, + 0x9b, 0x4b, 0x36, 0x35, 0x18, 0x5b, 0x29, 0xfe, 0x12, 0x20, 0xf4, 0x86, 0x2b, 0x87, 0x45, 0xfd, + 0x09, 0x95, 0xd2, 0x21, 0x6d, 0x37, 0x37, 0x58, 0x1c, 0x6c, 0x70, 0xae, 0x63, 0x54, 0xe1, 0xe0, + 0x6f, 0x01, 0xea, 0x1c, 0xca, 0x49, 0xd3, 0xf3, 0x51, 0x93, 0xb6, 0xda, 0xd2, 0x96, 0x89, 0xbe, + 0x1f, 0xde, 0xbe, 0xb8, 0x25, 0xe2, 0x76, 0x45, 0xf8, 0xe4, 0xd4, 0x9d, 0x99, 0x9f, 0x01, 0x5a, + 0x33, 0x23, 0xad, 0xc5, 0x29, 0xfc, 0xac, 0xc1, 0x0e, 0xea, 0x06, 0x07, 0xf8, 0xbc, 0xb5, 0xcf, + 0xce, 0xeb, 0xae, 0x81, 0xee, 0x45, 0xdd, 0x8b, 0xeb, 0x2b, 0xd5, 0x8b, 0x7f, 0x57, 0x81, 0x3a, + 0x2f, 0x21, 0x85, 0x86, 0xd3, 0xab, 0x49, 0x1d, 0xdc, 0x3f, 0x6e, 0x00, 0x77, 0xac, 0xfb, 0xab, + 0x8f, 0xf5, 0x72, 0x40, 0xb5, 0x8e, 0xcd, 0x06, 0x54, 0x37, 0xb4, 0x19, 0xd0, 0x79, 0xdf, 0xff, + 0x06, 0x54, 0xaa, 0x07, 0x41, 0x7f, 0x78, 0x16, 0xa0, 0x87, 0xb1, 0x98, 0xb9, 0x84, 0x04, 0x04, + 0x29, 0x92, 0x78, 0x75, 0xd2, 0x70, 0xed, 0xaf, 0x7b, 0xf3, 0xdd, 0x9e, 0x33, 0x8a, 0x94, 0x65, + 0x09, 0x11, 0x79, 0x42, 0x13, 0xc8, 0xcc, 0xb0, 0x68, 0x59, 0x62, 0x92, 0xab, 0xd5, 0x97, 0xf3, + 0x0b, 0xfb, 0x71, 0x7c, 0xc5, 0xe8, 0x77, 0xff, 0x04, 0x00, 0x00, 0xff, 0xff, 0x7d, 0x9d, 0xfe, + 0x1c, 0x89, 0x06, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/type/calendarperiod/calendar_period.pb.go b/vendor/google.golang.org/genproto/googleapis/type/calendarperiod/calendar_period.pb.go new file mode 100644 index 0000000..b2fe47c --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/type/calendarperiod/calendar_period.pb.go @@ -0,0 +1,103 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/type/calendar_period.proto + +package calendarperiod // import "google.golang.org/genproto/googleapis/type/calendarperiod" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// A `CalendarPeriod` represents the abstract concept of a time period that has +// a canonical start. Grammatically, "the start of the current +// `CalendarPeriod`." All calendar times begin at midnight UTC. +type CalendarPeriod int32 + +const ( + // Undefined period, raises an error. + CalendarPeriod_CALENDAR_PERIOD_UNSPECIFIED CalendarPeriod = 0 + // A day. + CalendarPeriod_DAY CalendarPeriod = 1 + // A week. Weeks begin on Monday, following + // [ISO 8601](https://en.wikipedia.org/wiki/ISO_week_date). + CalendarPeriod_WEEK CalendarPeriod = 2 + // A fortnight. The first calendar fortnight of the year begins at the start + // of week 1 according to + // [ISO 8601](https://en.wikipedia.org/wiki/ISO_week_date). + CalendarPeriod_FORTNIGHT CalendarPeriod = 3 + // A month. + CalendarPeriod_MONTH CalendarPeriod = 4 + // A quarter. Quarters start on dates 1-Jan, 1-Apr, 1-Jul, and 1-Oct of each + // year. + CalendarPeriod_QUARTER CalendarPeriod = 5 + // A half-year. Half-years start on dates 1-Jan and 1-Jul. + CalendarPeriod_HALF CalendarPeriod = 6 + // A year. + CalendarPeriod_YEAR CalendarPeriod = 7 +) + +var CalendarPeriod_name = map[int32]string{ + 0: "CALENDAR_PERIOD_UNSPECIFIED", + 1: "DAY", + 2: "WEEK", + 3: "FORTNIGHT", + 4: "MONTH", + 5: "QUARTER", + 6: "HALF", + 7: "YEAR", +} +var CalendarPeriod_value = map[string]int32{ + "CALENDAR_PERIOD_UNSPECIFIED": 0, + "DAY": 1, + "WEEK": 2, + "FORTNIGHT": 3, + "MONTH": 4, + "QUARTER": 5, + "HALF": 6, + "YEAR": 7, +} + +func (x CalendarPeriod) String() string { + return proto.EnumName(CalendarPeriod_name, int32(x)) +} +func (CalendarPeriod) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_calendar_period_167ed11c4514c3c9, []int{0} +} + +func init() { + proto.RegisterEnum("google.type.CalendarPeriod", CalendarPeriod_name, CalendarPeriod_value) +} + +func init() { + proto.RegisterFile("google/type/calendar_period.proto", fileDescriptor_calendar_period_167ed11c4514c3c9) +} + +var fileDescriptor_calendar_period_167ed11c4514c3c9 = []byte{ + // 248 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x54, 0x8f, 0xb1, 0x4f, 0x83, 0x40, + 0x14, 0x87, 0x6d, 0x69, 0x8b, 0x7d, 0x8d, 0x7a, 0x39, 0x47, 0x07, 0xe3, 0xea, 0x00, 0x83, 0xa3, + 0xd3, 0x15, 0x8e, 0x42, 0xac, 0x70, 0x9e, 0xd7, 0x98, 0xba, 0x10, 0x6c, 0x2f, 0x97, 0x26, 0xc8, + 0x23, 0xd8, 0x41, 0x27, 0xff, 0x17, 0xff, 0x52, 0x73, 0xc0, 0x50, 0xb6, 0xbb, 0xbc, 0xef, 0x97, + 0x7c, 0x1f, 0xdc, 0x19, 0x44, 0x53, 0x6a, 0xff, 0xf8, 0x53, 0x6b, 0x7f, 0x57, 0x94, 0xba, 0xda, + 0x17, 0x4d, 0x5e, 0xeb, 0xe6, 0x80, 0x7b, 0xaf, 0x6e, 0xf0, 0x88, 0x74, 0xd1, 0x21, 0x9e, 0x45, + 0xee, 0x7f, 0xe1, 0x32, 0xe8, 0x29, 0xd1, 0x42, 0xf4, 0x16, 0x6e, 0x02, 0xb6, 0xe6, 0x69, 0xc8, + 0x64, 0x2e, 0xb8, 0x4c, 0xb2, 0x30, 0xdf, 0xa4, 0xaf, 0x82, 0x07, 0x49, 0x94, 0xf0, 0x90, 0x9c, + 0x51, 0x17, 0x9c, 0x90, 0x6d, 0xc9, 0x88, 0x9e, 0xc3, 0xe4, 0x8d, 0xf3, 0x27, 0x32, 0xa6, 0x17, + 0x30, 0x8f, 0x32, 0xa9, 0xd2, 0x64, 0x15, 0x2b, 0xe2, 0xd0, 0x39, 0x4c, 0x9f, 0xb3, 0x54, 0xc5, + 0x64, 0x42, 0x17, 0xe0, 0xbe, 0x6c, 0x98, 0x54, 0x5c, 0x92, 0xa9, 0x1d, 0xc4, 0x6c, 0x1d, 0x91, + 0x99, 0x7d, 0x6d, 0x39, 0x93, 0xc4, 0x5d, 0x7e, 0xc3, 0xd5, 0x0e, 0x3f, 0xbd, 0x13, 0xa7, 0xe5, + 0xf5, 0xd0, 0x48, 0x58, 0x6b, 0x31, 0x7a, 0x8f, 0x7b, 0xc6, 0x60, 0x59, 0x54, 0xc6, 0xc3, 0xc6, + 0xf8, 0x46, 0x57, 0x6d, 0x93, 0xdf, 0x9d, 0x8a, 0xfa, 0xf0, 0x35, 0x2c, 0xef, 0xc2, 0x1f, 0x87, + 0xdf, 0xbf, 0xb1, 0xb3, 0x52, 0xe2, 0x63, 0xd6, 0x4e, 0x1f, 0xfe, 0x03, 0x00, 0x00, 0xff, 0xff, + 0x91, 0x18, 0xaa, 0x3f, 0x33, 0x01, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/type/color/color.pb.go b/vendor/google.golang.org/genproto/googleapis/type/color/color.pb.go new file mode 100644 index 0000000..d2dac1c --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/type/color/color.pb.go @@ -0,0 +1,240 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/type/color.proto + +package color // import "google.golang.org/genproto/googleapis/type/color" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import wrappers "github.com/golang/protobuf/ptypes/wrappers" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Represents a color in the RGBA color space. This representation is designed +// for simplicity of conversion to/from color representations in various +// languages over compactness; for example, the fields of this representation +// can be trivially provided to the constructor of "java.awt.Color" in Java; it +// can also be trivially provided to UIColor's "+colorWithRed:green:blue:alpha" +// method in iOS; and, with just a little work, it can be easily formatted into +// a CSS "rgba()" string in JavaScript, as well. +// +// Note: this proto does not carry information about the absolute color space +// that should be used to interpret the RGB value (e.g. sRGB, Adobe RGB, +// DCI-P3, BT.2020, etc.). By default, applications SHOULD assume the sRGB color +// space. +// +// Example (Java): +// +// import com.google.type.Color; +// +// // ... +// public static java.awt.Color fromProto(Color protocolor) { +// float alpha = protocolor.hasAlpha() +// ? protocolor.getAlpha().getValue() +// : 1.0; +// +// return new java.awt.Color( +// protocolor.getRed(), +// protocolor.getGreen(), +// protocolor.getBlue(), +// alpha); +// } +// +// public static Color toProto(java.awt.Color color) { +// float red = (float) color.getRed(); +// float green = (float) color.getGreen(); +// float blue = (float) color.getBlue(); +// float denominator = 255.0; +// Color.Builder resultBuilder = +// Color +// .newBuilder() +// .setRed(red / denominator) +// .setGreen(green / denominator) +// .setBlue(blue / denominator); +// int alpha = color.getAlpha(); +// if (alpha != 255) { +// result.setAlpha( +// FloatValue +// .newBuilder() +// .setValue(((float) alpha) / denominator) +// .build()); +// } +// return resultBuilder.build(); +// } +// // ... +// +// Example (iOS / Obj-C): +// +// // ... +// static UIColor* fromProto(Color* protocolor) { +// float red = [protocolor red]; +// float green = [protocolor green]; +// float blue = [protocolor blue]; +// FloatValue* alpha_wrapper = [protocolor alpha]; +// float alpha = 1.0; +// if (alpha_wrapper != nil) { +// alpha = [alpha_wrapper value]; +// } +// return [UIColor colorWithRed:red green:green blue:blue alpha:alpha]; +// } +// +// static Color* toProto(UIColor* color) { +// CGFloat red, green, blue, alpha; +// if (![color getRed:&red green:&green blue:&blue alpha:&alpha]) { +// return nil; +// } +// Color* result = [[Color alloc] init]; +// [result setRed:red]; +// [result setGreen:green]; +// [result setBlue:blue]; +// if (alpha <= 0.9999) { +// [result setAlpha:floatWrapperWithValue(alpha)]; +// } +// [result autorelease]; +// return result; +// } +// // ... +// +// Example (JavaScript): +// +// // ... +// +// var protoToCssColor = function(rgb_color) { +// var redFrac = rgb_color.red || 0.0; +// var greenFrac = rgb_color.green || 0.0; +// var blueFrac = rgb_color.blue || 0.0; +// var red = Math.floor(redFrac * 255); +// var green = Math.floor(greenFrac * 255); +// var blue = Math.floor(blueFrac * 255); +// +// if (!('alpha' in rgb_color)) { +// return rgbToCssColor_(red, green, blue); +// } +// +// var alphaFrac = rgb_color.alpha.value || 0.0; +// var rgbParams = [red, green, blue].join(','); +// return ['rgba(', rgbParams, ',', alphaFrac, ')'].join(''); +// }; +// +// var rgbToCssColor_ = function(red, green, blue) { +// var rgbNumber = new Number((red << 16) | (green << 8) | blue); +// var hexString = rgbNumber.toString(16); +// var missingZeros = 6 - hexString.length; +// var resultBuilder = ['#']; +// for (var i = 0; i < missingZeros; i++) { +// resultBuilder.push('0'); +// } +// resultBuilder.push(hexString); +// return resultBuilder.join(''); +// }; +// +// // ... +type Color struct { + // The amount of red in the color as a value in the interval [0, 1]. + Red float32 `protobuf:"fixed32,1,opt,name=red,proto3" json:"red,omitempty"` + // The amount of green in the color as a value in the interval [0, 1]. + Green float32 `protobuf:"fixed32,2,opt,name=green,proto3" json:"green,omitempty"` + // The amount of blue in the color as a value in the interval [0, 1]. + Blue float32 `protobuf:"fixed32,3,opt,name=blue,proto3" json:"blue,omitempty"` + // The fraction of this color that should be applied to the pixel. That is, + // the final pixel color is defined by the equation: + // + // pixel color = alpha * (this color) + (1.0 - alpha) * (background color) + // + // This means that a value of 1.0 corresponds to a solid color, whereas + // a value of 0.0 corresponds to a completely transparent color. This + // uses a wrapper message rather than a simple float scalar so that it is + // possible to distinguish between a default value and the value being unset. + // If omitted, this color object is to be rendered as a solid color + // (as if the alpha value had been explicitly given with a value of 1.0). + Alpha *wrappers.FloatValue `protobuf:"bytes,4,opt,name=alpha,proto3" json:"alpha,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Color) Reset() { *m = Color{} } +func (m *Color) String() string { return proto.CompactTextString(m) } +func (*Color) ProtoMessage() {} +func (*Color) Descriptor() ([]byte, []int) { + return fileDescriptor_color_f98155ed8cbb2ba9, []int{0} +} +func (m *Color) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Color.Unmarshal(m, b) +} +func (m *Color) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Color.Marshal(b, m, deterministic) +} +func (dst *Color) XXX_Merge(src proto.Message) { + xxx_messageInfo_Color.Merge(dst, src) +} +func (m *Color) XXX_Size() int { + return xxx_messageInfo_Color.Size(m) +} +func (m *Color) XXX_DiscardUnknown() { + xxx_messageInfo_Color.DiscardUnknown(m) +} + +var xxx_messageInfo_Color proto.InternalMessageInfo + +func (m *Color) GetRed() float32 { + if m != nil { + return m.Red + } + return 0 +} + +func (m *Color) GetGreen() float32 { + if m != nil { + return m.Green + } + return 0 +} + +func (m *Color) GetBlue() float32 { + if m != nil { + return m.Blue + } + return 0 +} + +func (m *Color) GetAlpha() *wrappers.FloatValue { + if m != nil { + return m.Alpha + } + return nil +} + +func init() { + proto.RegisterType((*Color)(nil), "google.type.Color") +} + +func init() { proto.RegisterFile("google/type/color.proto", fileDescriptor_color_f98155ed8cbb2ba9) } + +var fileDescriptor_color_f98155ed8cbb2ba9 = []byte{ + // 228 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x4c, 0x8f, 0x31, 0x4b, 0xc5, 0x30, + 0x10, 0xc7, 0x49, 0xfb, 0xea, 0x70, 0x6f, 0x50, 0x82, 0x60, 0x50, 0x90, 0x87, 0xd3, 0x9b, 0x12, + 0x54, 0x70, 0x71, 0x7b, 0x82, 0xae, 0xa5, 0x88, 0x83, 0x93, 0x69, 0x3d, 0xa3, 0x10, 0x7b, 0x21, + 0x6d, 0x15, 0xbf, 0x8e, 0x9f, 0xd2, 0x51, 0x72, 0xe9, 0x83, 0x2e, 0xe1, 0x72, 0xbf, 0xdf, 0x25, + 0xff, 0x83, 0x13, 0x47, 0xe4, 0x3c, 0x9a, 0xf1, 0x27, 0xa0, 0xe9, 0xc8, 0x53, 0xd4, 0x21, 0xd2, + 0x48, 0x72, 0x9d, 0x81, 0x4e, 0xe0, 0xf4, 0x7c, 0xb6, 0x18, 0xb5, 0xd3, 0x9b, 0xf9, 0x8e, 0x36, + 0x04, 0x8c, 0x43, 0x96, 0x2f, 0xbe, 0xa0, 0xba, 0x4b, 0xb3, 0xf2, 0x08, 0xca, 0x88, 0xaf, 0x4a, + 0x6c, 0xc4, 0xb6, 0x68, 0x52, 0x29, 0x8f, 0xa1, 0x72, 0x11, 0xb1, 0x57, 0x05, 0xf7, 0xf2, 0x45, + 0x4a, 0x58, 0xb5, 0x7e, 0x42, 0x55, 0x72, 0x93, 0x6b, 0x79, 0x09, 0x95, 0xf5, 0xe1, 0xdd, 0xaa, + 0xd5, 0x46, 0x6c, 0xd7, 0x57, 0x67, 0x7a, 0x4e, 0xb0, 0xff, 0x54, 0xdf, 0x7b, 0xb2, 0xe3, 0x93, + 0xf5, 0x13, 0x36, 0xd9, 0xdc, 0xbd, 0xc0, 0x61, 0x47, 0x9f, 0x7a, 0x11, 0x75, 0x07, 0x1c, 0xa4, + 0x4e, 0x33, 0xb5, 0x78, 0xbe, 0x99, 0x91, 0x23, 0x6f, 0x7b, 0xa7, 0x29, 0x3a, 0xe3, 0xb0, 0xe7, + 0x17, 0x4d, 0x46, 0x36, 0x7c, 0x0c, 0x8b, 0xed, 0x6f, 0xf9, 0xfc, 0x13, 0xe2, 0xb7, 0x28, 0x1f, + 0x1e, 0xeb, 0xf6, 0x80, 0xdd, 0xeb, 0xff, 0x00, 0x00, 0x00, 0xff, 0xff, 0xdc, 0x38, 0x5a, 0x5f, + 0x28, 0x01, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/type/date/date.pb.go b/vendor/google.golang.org/genproto/googleapis/type/date/date.pb.go new file mode 100644 index 0000000..5e0a59d --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/type/date/date.pb.go @@ -0,0 +1,111 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/type/date.proto + +package date // import "google.golang.org/genproto/googleapis/type/date" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Represents a whole or partial calendar date, e.g. a birthday. The time of day +// and time zone are either specified elsewhere or are not significant. The date +// is relative to the Proleptic Gregorian Calendar. This can represent: +// +// * A full date, with non-zero year, month and day values +// * A month and day value, with a zero year, e.g. an anniversary +// * A year on its own, with zero month and day values +// * A year and month value, with a zero day, e.g. a credit card expiration date +// +// Related types are [google.type.TimeOfDay][google.type.TimeOfDay] and `google.protobuf.Timestamp`. +type Date struct { + // Year of date. Must be from 1 to 9999, or 0 if specifying a date without + // a year. + Year int32 `protobuf:"varint,1,opt,name=year,proto3" json:"year,omitempty"` + // Month of year. Must be from 1 to 12, or 0 if specifying a year without a + // month and day. + Month int32 `protobuf:"varint,2,opt,name=month,proto3" json:"month,omitempty"` + // Day of month. Must be from 1 to 31 and valid for the year and month, or 0 + // if specifying a year by itself or a year and month where the day is not + // significant. + Day int32 `protobuf:"varint,3,opt,name=day,proto3" json:"day,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Date) Reset() { *m = Date{} } +func (m *Date) String() string { return proto.CompactTextString(m) } +func (*Date) ProtoMessage() {} +func (*Date) Descriptor() ([]byte, []int) { + return fileDescriptor_date_c55f47ce38fcfb50, []int{0} +} +func (m *Date) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Date.Unmarshal(m, b) +} +func (m *Date) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Date.Marshal(b, m, deterministic) +} +func (dst *Date) XXX_Merge(src proto.Message) { + xxx_messageInfo_Date.Merge(dst, src) +} +func (m *Date) XXX_Size() int { + return xxx_messageInfo_Date.Size(m) +} +func (m *Date) XXX_DiscardUnknown() { + xxx_messageInfo_Date.DiscardUnknown(m) +} + +var xxx_messageInfo_Date proto.InternalMessageInfo + +func (m *Date) GetYear() int32 { + if m != nil { + return m.Year + } + return 0 +} + +func (m *Date) GetMonth() int32 { + if m != nil { + return m.Month + } + return 0 +} + +func (m *Date) GetDay() int32 { + if m != nil { + return m.Day + } + return 0 +} + +func init() { + proto.RegisterType((*Date)(nil), "google.type.Date") +} + +func init() { proto.RegisterFile("google/type/date.proto", fileDescriptor_date_c55f47ce38fcfb50) } + +var fileDescriptor_date_c55f47ce38fcfb50 = []byte{ + // 172 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xe2, 0x12, 0x4b, 0xcf, 0xcf, 0x4f, + 0xcf, 0x49, 0xd5, 0x2f, 0xa9, 0x2c, 0x48, 0xd5, 0x4f, 0x49, 0x2c, 0x49, 0xd5, 0x2b, 0x28, 0xca, + 0x2f, 0xc9, 0x17, 0xe2, 0x86, 0x88, 0xeb, 0x81, 0xc4, 0x95, 0x9c, 0xb8, 0x58, 0x5c, 0x12, 0x4b, + 0x52, 0x85, 0x84, 0xb8, 0x58, 0x2a, 0x53, 0x13, 0x8b, 0x24, 0x18, 0x15, 0x18, 0x35, 0x58, 0x83, + 0xc0, 0x6c, 0x21, 0x11, 0x2e, 0xd6, 0xdc, 0xfc, 0xbc, 0x92, 0x0c, 0x09, 0x26, 0xb0, 0x20, 0x84, + 0x23, 0x24, 0xc0, 0xc5, 0x9c, 0x92, 0x58, 0x29, 0xc1, 0x0c, 0x16, 0x03, 0x31, 0x9d, 0x62, 0xb9, + 0xf8, 0x93, 0xf3, 0x73, 0xf5, 0x90, 0x8c, 0x75, 0xe2, 0x04, 0x19, 0x1a, 0x00, 0xb2, 0x2e, 0x80, + 0x31, 0xca, 0x04, 0x2a, 0x93, 0x9e, 0x9f, 0x93, 0x98, 0x97, 0xae, 0x97, 0x5f, 0x94, 0xae, 0x9f, + 0x9e, 0x9a, 0x07, 0x76, 0x8c, 0x3e, 0x44, 0x2a, 0xb1, 0x20, 0xb3, 0x18, 0xe1, 0x4e, 0x6b, 0x10, + 0xf1, 0x83, 0x91, 0x71, 0x11, 0x13, 0xb3, 0x7b, 0x48, 0x40, 0x12, 0x1b, 0x58, 0xa5, 0x31, 0x20, + 0x00, 0x00, 0xff, 0xff, 0x84, 0x95, 0xf3, 0x4c, 0xd0, 0x00, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/type/dayofweek/dayofweek.pb.go b/vendor/google.golang.org/genproto/googleapis/type/dayofweek/dayofweek.pb.go new file mode 100644 index 0000000..6ce58d7 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/type/dayofweek/dayofweek.pb.go @@ -0,0 +1,96 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/type/dayofweek.proto + +package dayofweek // import "google.golang.org/genproto/googleapis/type/dayofweek" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Represents a day of week. +type DayOfWeek int32 + +const ( + // The unspecified day-of-week. + DayOfWeek_DAY_OF_WEEK_UNSPECIFIED DayOfWeek = 0 + // The day-of-week of Monday. + DayOfWeek_MONDAY DayOfWeek = 1 + // The day-of-week of Tuesday. + DayOfWeek_TUESDAY DayOfWeek = 2 + // The day-of-week of Wednesday. + DayOfWeek_WEDNESDAY DayOfWeek = 3 + // The day-of-week of Thursday. + DayOfWeek_THURSDAY DayOfWeek = 4 + // The day-of-week of Friday. + DayOfWeek_FRIDAY DayOfWeek = 5 + // The day-of-week of Saturday. + DayOfWeek_SATURDAY DayOfWeek = 6 + // The day-of-week of Sunday. + DayOfWeek_SUNDAY DayOfWeek = 7 +) + +var DayOfWeek_name = map[int32]string{ + 0: "DAY_OF_WEEK_UNSPECIFIED", + 1: "MONDAY", + 2: "TUESDAY", + 3: "WEDNESDAY", + 4: "THURSDAY", + 5: "FRIDAY", + 6: "SATURDAY", + 7: "SUNDAY", +} +var DayOfWeek_value = map[string]int32{ + "DAY_OF_WEEK_UNSPECIFIED": 0, + "MONDAY": 1, + "TUESDAY": 2, + "WEDNESDAY": 3, + "THURSDAY": 4, + "FRIDAY": 5, + "SATURDAY": 6, + "SUNDAY": 7, +} + +func (x DayOfWeek) String() string { + return proto.EnumName(DayOfWeek_name, int32(x)) +} +func (DayOfWeek) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_dayofweek_b79b7fd6c2a3d9e8, []int{0} +} + +func init() { + proto.RegisterEnum("google.type.DayOfWeek", DayOfWeek_name, DayOfWeek_value) +} + +func init() { + proto.RegisterFile("google/type/dayofweek.proto", fileDescriptor_dayofweek_b79b7fd6c2a3d9e8) +} + +var fileDescriptor_dayofweek_b79b7fd6c2a3d9e8 = []byte{ + // 235 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xe2, 0x92, 0x4e, 0xcf, 0xcf, 0x4f, + 0xcf, 0x49, 0xd5, 0x2f, 0xa9, 0x2c, 0x48, 0xd5, 0x4f, 0x49, 0xac, 0xcc, 0x4f, 0x2b, 0x4f, 0x4d, + 0xcd, 0xd6, 0x2b, 0x28, 0xca, 0x2f, 0xc9, 0x17, 0xe2, 0x86, 0x48, 0xea, 0x81, 0x24, 0xb5, 0x5a, + 0x18, 0xb9, 0x38, 0x5d, 0x12, 0x2b, 0xfd, 0xd3, 0xc2, 0x53, 0x53, 0xb3, 0x85, 0xa4, 0xb9, 0xc4, + 0x5d, 0x1c, 0x23, 0xe3, 0xfd, 0xdd, 0xe2, 0xc3, 0x5d, 0x5d, 0xbd, 0xe3, 0x43, 0xfd, 0x82, 0x03, + 0x5c, 0x9d, 0x3d, 0xdd, 0x3c, 0x5d, 0x5d, 0x04, 0x18, 0x84, 0xb8, 0xb8, 0xd8, 0x7c, 0xfd, 0xfd, + 0x5c, 0x1c, 0x23, 0x05, 0x18, 0x85, 0xb8, 0xb9, 0xd8, 0x43, 0x42, 0x5d, 0x83, 0x41, 0x1c, 0x26, + 0x21, 0x5e, 0x2e, 0xce, 0x70, 0x57, 0x17, 0x3f, 0x08, 0x97, 0x59, 0x88, 0x87, 0x8b, 0x23, 0xc4, + 0x23, 0x34, 0x08, 0xcc, 0x63, 0x01, 0xe9, 0x72, 0x0b, 0xf2, 0x04, 0xb1, 0x59, 0x41, 0x32, 0xc1, + 0x8e, 0x21, 0xa1, 0x41, 0x20, 0x1e, 0x1b, 0x48, 0x26, 0x38, 0x14, 0x6c, 0x1e, 0xbb, 0x53, 0x26, + 0x17, 0x7f, 0x72, 0x7e, 0xae, 0x1e, 0x92, 0xcb, 0x9c, 0xf8, 0xe0, 0xce, 0x0a, 0x00, 0x39, 0x3b, + 0x80, 0x31, 0xca, 0x0e, 0x2a, 0x9d, 0x9e, 0x9f, 0x93, 0x98, 0x97, 0xae, 0x97, 0x5f, 0x94, 0xae, + 0x9f, 0x9e, 0x9a, 0x07, 0xf6, 0x94, 0x3e, 0x44, 0x2a, 0xb1, 0x20, 0xb3, 0x18, 0xcd, 0xd3, 0xd6, + 0x70, 0xd6, 0x22, 0x26, 0x66, 0xf7, 0x90, 0x80, 0x24, 0x36, 0xb0, 0x06, 0x63, 0x40, 0x00, 0x00, + 0x00, 0xff, 0xff, 0x6e, 0x23, 0xb2, 0xb3, 0x24, 0x01, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/type/expr/expr.pb.go b/vendor/google.golang.org/genproto/googleapis/type/expr/expr.pb.go new file mode 100644 index 0000000..ed0566e --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/type/expr/expr.pb.go @@ -0,0 +1,121 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/type/expr.proto + +package expr // import "google.golang.org/genproto/googleapis/type/expr" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Represents an expression text. Example: +// +// title: "User account presence" +// description: "Determines whether the request has a user account" +// expression: "size(request.user) > 0" +type Expr struct { + // Textual representation of an expression in + // Common Expression Language syntax. + // + // The application context of the containing message determines which + // well-known feature set of CEL is supported. + Expression string `protobuf:"bytes,1,opt,name=expression,proto3" json:"expression,omitempty"` + // An optional title for the expression, i.e. a short string describing + // its purpose. This can be used e.g. in UIs which allow to enter the + // expression. + Title string `protobuf:"bytes,2,opt,name=title,proto3" json:"title,omitempty"` + // An optional description of the expression. This is a longer text which + // describes the expression, e.g. when hovered over it in a UI. + Description string `protobuf:"bytes,3,opt,name=description,proto3" json:"description,omitempty"` + // An optional string indicating the location of the expression for error + // reporting, e.g. a file name and a position in the file. + Location string `protobuf:"bytes,4,opt,name=location,proto3" json:"location,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Expr) Reset() { *m = Expr{} } +func (m *Expr) String() string { return proto.CompactTextString(m) } +func (*Expr) ProtoMessage() {} +func (*Expr) Descriptor() ([]byte, []int) { + return fileDescriptor_expr_e508ac4329fc65cc, []int{0} +} +func (m *Expr) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Expr.Unmarshal(m, b) +} +func (m *Expr) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Expr.Marshal(b, m, deterministic) +} +func (dst *Expr) XXX_Merge(src proto.Message) { + xxx_messageInfo_Expr.Merge(dst, src) +} +func (m *Expr) XXX_Size() int { + return xxx_messageInfo_Expr.Size(m) +} +func (m *Expr) XXX_DiscardUnknown() { + xxx_messageInfo_Expr.DiscardUnknown(m) +} + +var xxx_messageInfo_Expr proto.InternalMessageInfo + +func (m *Expr) GetExpression() string { + if m != nil { + return m.Expression + } + return "" +} + +func (m *Expr) GetTitle() string { + if m != nil { + return m.Title + } + return "" +} + +func (m *Expr) GetDescription() string { + if m != nil { + return m.Description + } + return "" +} + +func (m *Expr) GetLocation() string { + if m != nil { + return m.Location + } + return "" +} + +func init() { + proto.RegisterType((*Expr)(nil), "google.type.Expr") +} + +func init() { proto.RegisterFile("google/type/expr.proto", fileDescriptor_expr_e508ac4329fc65cc) } + +var fileDescriptor_expr_e508ac4329fc65cc = []byte{ + // 195 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xe2, 0x12, 0x4b, 0xcf, 0xcf, 0x4f, + 0xcf, 0x49, 0xd5, 0x2f, 0xa9, 0x2c, 0x48, 0xd5, 0x4f, 0xad, 0x28, 0x28, 0xd2, 0x2b, 0x28, 0xca, + 0x2f, 0xc9, 0x17, 0xe2, 0x86, 0x88, 0xeb, 0x81, 0xc4, 0x95, 0xaa, 0xb8, 0x58, 0x5c, 0x2b, 0x0a, + 0x8a, 0x84, 0xe4, 0xb8, 0xb8, 0x40, 0x4a, 0x52, 0x8b, 0x8b, 0x33, 0xf3, 0xf3, 0x24, 0x18, 0x15, + 0x18, 0x35, 0x38, 0x83, 0x90, 0x44, 0x84, 0x44, 0xb8, 0x58, 0x4b, 0x32, 0x4b, 0x72, 0x52, 0x25, + 0x98, 0xc0, 0x52, 0x10, 0x8e, 0x90, 0x02, 0x17, 0x77, 0x4a, 0x6a, 0x71, 0x72, 0x51, 0x66, 0x41, + 0x09, 0x48, 0x1b, 0x33, 0x58, 0x0e, 0x59, 0x48, 0x48, 0x8a, 0x8b, 0x23, 0x27, 0x3f, 0x39, 0x11, + 0x2c, 0xcd, 0x02, 0x96, 0x86, 0xf3, 0x9d, 0xa2, 0xb8, 0xf8, 0x93, 0xf3, 0x73, 0xf5, 0x90, 0x9c, + 0xe3, 0xc4, 0x09, 0x72, 0x4c, 0x00, 0xc8, 0x99, 0x01, 0x8c, 0x51, 0x26, 0x50, 0x99, 0xf4, 0xfc, + 0x9c, 0xc4, 0xbc, 0x74, 0xbd, 0xfc, 0xa2, 0x74, 0xfd, 0xf4, 0xd4, 0x3c, 0xb0, 0x27, 0xf4, 0x21, + 0x52, 0x89, 0x05, 0x99, 0xc5, 0x08, 0xff, 0x59, 0x83, 0x88, 0x45, 0x4c, 0xcc, 0xee, 0x21, 0x01, + 0x49, 0x6c, 0x60, 0x65, 0xc6, 0x80, 0x00, 0x00, 0x00, 0xff, 0xff, 0xe7, 0x67, 0x9e, 0xf5, 0x05, + 0x01, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/type/fraction/fraction.pb.go b/vendor/google.golang.org/genproto/googleapis/type/fraction/fraction.pb.go new file mode 100644 index 0000000..80560de --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/type/fraction/fraction.pb.go @@ -0,0 +1,92 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/type/fraction.proto + +package fraction // import "google.golang.org/genproto/googleapis/type/fraction" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Represents a fraction in terms of a numerator divided by a denominator. +type Fraction struct { + // The portion of the denominator in the faction, e.g. 2 in 2/3. + Numerator int64 `protobuf:"varint,1,opt,name=numerator,proto3" json:"numerator,omitempty"` + // The value by which the numerator is divided, e.g. 3 in 2/3. Must be + // positive. + Denominator int64 `protobuf:"varint,2,opt,name=denominator,proto3" json:"denominator,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Fraction) Reset() { *m = Fraction{} } +func (m *Fraction) String() string { return proto.CompactTextString(m) } +func (*Fraction) ProtoMessage() {} +func (*Fraction) Descriptor() ([]byte, []int) { + return fileDescriptor_fraction_4cfdd333c72eef64, []int{0} +} +func (m *Fraction) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Fraction.Unmarshal(m, b) +} +func (m *Fraction) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Fraction.Marshal(b, m, deterministic) +} +func (dst *Fraction) XXX_Merge(src proto.Message) { + xxx_messageInfo_Fraction.Merge(dst, src) +} +func (m *Fraction) XXX_Size() int { + return xxx_messageInfo_Fraction.Size(m) +} +func (m *Fraction) XXX_DiscardUnknown() { + xxx_messageInfo_Fraction.DiscardUnknown(m) +} + +var xxx_messageInfo_Fraction proto.InternalMessageInfo + +func (m *Fraction) GetNumerator() int64 { + if m != nil { + return m.Numerator + } + return 0 +} + +func (m *Fraction) GetDenominator() int64 { + if m != nil { + return m.Denominator + } + return 0 +} + +func init() { + proto.RegisterType((*Fraction)(nil), "google.type.Fraction") +} + +func init() { + proto.RegisterFile("google/type/fraction.proto", fileDescriptor_fraction_4cfdd333c72eef64) +} + +var fileDescriptor_fraction_4cfdd333c72eef64 = []byte{ + // 168 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xe2, 0x92, 0x4a, 0xcf, 0xcf, 0x4f, + 0xcf, 0x49, 0xd5, 0x2f, 0xa9, 0x2c, 0x48, 0xd5, 0x4f, 0x2b, 0x4a, 0x4c, 0x2e, 0xc9, 0xcc, 0xcf, + 0xd3, 0x2b, 0x28, 0xca, 0x2f, 0xc9, 0x17, 0xe2, 0x86, 0xc8, 0xe9, 0x81, 0xe4, 0x94, 0xbc, 0xb8, + 0x38, 0xdc, 0xa0, 0xd2, 0x42, 0x32, 0x5c, 0x9c, 0x79, 0xa5, 0xb9, 0xa9, 0x45, 0x89, 0x25, 0xf9, + 0x45, 0x12, 0x8c, 0x0a, 0x8c, 0x1a, 0xcc, 0x41, 0x08, 0x01, 0x21, 0x05, 0x2e, 0xee, 0x94, 0xd4, + 0xbc, 0xfc, 0xdc, 0xcc, 0x3c, 0xb0, 0x3c, 0x13, 0x58, 0x1e, 0x59, 0xc8, 0x29, 0x8d, 0x8b, 0x3f, + 0x39, 0x3f, 0x57, 0x0f, 0xc9, 0x78, 0x27, 0x5e, 0x98, 0xe1, 0x01, 0x20, 0xab, 0x03, 0x18, 0xa3, + 0x6c, 0xa0, 0xb2, 0xe9, 0xf9, 0x39, 0x89, 0x79, 0xe9, 0x7a, 0xf9, 0x45, 0xe9, 0xfa, 0xe9, 0xa9, + 0x79, 0x60, 0x87, 0xe9, 0x43, 0xa4, 0x12, 0x0b, 0x32, 0x8b, 0x51, 0xdd, 0x6d, 0x0d, 0x63, 0x2c, + 0x62, 0x62, 0x76, 0x0f, 0x09, 0x48, 0x62, 0x03, 0x2b, 0x37, 0x06, 0x04, 0x00, 0x00, 0xff, 0xff, + 0xd9, 0xdd, 0xa8, 0x56, 0xe5, 0x00, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/type/latlng/latlng.pb.go b/vendor/google.golang.org/genproto/googleapis/type/latlng/latlng.pb.go new file mode 100644 index 0000000..2ba98d6 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/type/latlng/latlng.pb.go @@ -0,0 +1,93 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/type/latlng.proto + +package latlng // import "google.golang.org/genproto/googleapis/type/latlng" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// An object representing a latitude/longitude pair. This is expressed as a pair +// of doubles representing degrees latitude and degrees longitude. Unless +// specified otherwise, this must conform to the +// WGS84 +// standard. Values must be within normalized ranges. +type LatLng struct { + // The latitude in degrees. It must be in the range [-90.0, +90.0]. + Latitude float64 `protobuf:"fixed64,1,opt,name=latitude,proto3" json:"latitude,omitempty"` + // The longitude in degrees. It must be in the range [-180.0, +180.0]. + Longitude float64 `protobuf:"fixed64,2,opt,name=longitude,proto3" json:"longitude,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *LatLng) Reset() { *m = LatLng{} } +func (m *LatLng) String() string { return proto.CompactTextString(m) } +func (*LatLng) ProtoMessage() {} +func (*LatLng) Descriptor() ([]byte, []int) { + return fileDescriptor_latlng_9542c128e5dfd41d, []int{0} +} +func (m *LatLng) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_LatLng.Unmarshal(m, b) +} +func (m *LatLng) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_LatLng.Marshal(b, m, deterministic) +} +func (dst *LatLng) XXX_Merge(src proto.Message) { + xxx_messageInfo_LatLng.Merge(dst, src) +} +func (m *LatLng) XXX_Size() int { + return xxx_messageInfo_LatLng.Size(m) +} +func (m *LatLng) XXX_DiscardUnknown() { + xxx_messageInfo_LatLng.DiscardUnknown(m) +} + +var xxx_messageInfo_LatLng proto.InternalMessageInfo + +func (m *LatLng) GetLatitude() float64 { + if m != nil { + return m.Latitude + } + return 0 +} + +func (m *LatLng) GetLongitude() float64 { + if m != nil { + return m.Longitude + } + return 0 +} + +func init() { + proto.RegisterType((*LatLng)(nil), "google.type.LatLng") +} + +func init() { proto.RegisterFile("google/type/latlng.proto", fileDescriptor_latlng_9542c128e5dfd41d) } + +var fileDescriptor_latlng_9542c128e5dfd41d = []byte{ + // 168 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xe2, 0x92, 0x48, 0xcf, 0xcf, 0x4f, + 0xcf, 0x49, 0xd5, 0x2f, 0xa9, 0x2c, 0x48, 0xd5, 0xcf, 0x49, 0x2c, 0xc9, 0xc9, 0x4b, 0xd7, 0x2b, + 0x28, 0xca, 0x2f, 0xc9, 0x17, 0xe2, 0x86, 0xc8, 0xe8, 0x81, 0x64, 0x94, 0x9c, 0xb8, 0xd8, 0x7c, + 0x12, 0x4b, 0x7c, 0xf2, 0xd2, 0x85, 0xa4, 0xb8, 0x38, 0x72, 0x12, 0x4b, 0x32, 0x4b, 0x4a, 0x53, + 0x52, 0x25, 0x18, 0x15, 0x18, 0x35, 0x18, 0x83, 0xe0, 0x7c, 0x21, 0x19, 0x2e, 0xce, 0x9c, 0xfc, + 0xbc, 0x74, 0x88, 0x24, 0x13, 0x58, 0x12, 0x21, 0xe0, 0x94, 0xcc, 0xc5, 0x9f, 0x9c, 0x9f, 0xab, + 0x87, 0x64, 0xac, 0x13, 0x37, 0xc4, 0xd0, 0x00, 0x90, 0x85, 0x01, 0x8c, 0x51, 0x16, 0x50, 0xb9, + 0xf4, 0xfc, 0x9c, 0xc4, 0xbc, 0x74, 0xbd, 0xfc, 0xa2, 0x74, 0xfd, 0xf4, 0xd4, 0x3c, 0xb0, 0x73, + 0xf4, 0x21, 0x52, 0x89, 0x05, 0x99, 0xc5, 0xc8, 0x6e, 0xb5, 0x86, 0x50, 0x3f, 0x18, 0x19, 0x17, + 0x31, 0x31, 0xbb, 0x87, 0x04, 0x24, 0xb1, 0x81, 0x55, 0x1b, 0x03, 0x02, 0x00, 0x00, 0xff, 0xff, + 0xc0, 0x7b, 0xd0, 0x8b, 0xd8, 0x00, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/type/money/money.pb.go b/vendor/google.golang.org/genproto/googleapis/type/money/money.pb.go new file mode 100644 index 0000000..bb6f7bd --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/type/money/money.pb.go @@ -0,0 +1,106 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/type/money.proto + +package money // import "google.golang.org/genproto/googleapis/type/money" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Represents an amount of money with its currency type. +type Money struct { + // The 3-letter currency code defined in ISO 4217. + CurrencyCode string `protobuf:"bytes,1,opt,name=currency_code,json=currencyCode,proto3" json:"currency_code,omitempty"` + // The whole units of the amount. + // For example if `currencyCode` is `"USD"`, then 1 unit is one US dollar. + Units int64 `protobuf:"varint,2,opt,name=units,proto3" json:"units,omitempty"` + // Number of nano (10^-9) units of the amount. + // The value must be between -999,999,999 and +999,999,999 inclusive. + // If `units` is positive, `nanos` must be positive or zero. + // If `units` is zero, `nanos` can be positive, zero, or negative. + // If `units` is negative, `nanos` must be negative or zero. + // For example $-1.75 is represented as `units`=-1 and `nanos`=-750,000,000. + Nanos int32 `protobuf:"varint,3,opt,name=nanos,proto3" json:"nanos,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Money) Reset() { *m = Money{} } +func (m *Money) String() string { return proto.CompactTextString(m) } +func (*Money) ProtoMessage() {} +func (*Money) Descriptor() ([]byte, []int) { + return fileDescriptor_money_a475a5747334583b, []int{0} +} +func (m *Money) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Money.Unmarshal(m, b) +} +func (m *Money) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Money.Marshal(b, m, deterministic) +} +func (dst *Money) XXX_Merge(src proto.Message) { + xxx_messageInfo_Money.Merge(dst, src) +} +func (m *Money) XXX_Size() int { + return xxx_messageInfo_Money.Size(m) +} +func (m *Money) XXX_DiscardUnknown() { + xxx_messageInfo_Money.DiscardUnknown(m) +} + +var xxx_messageInfo_Money proto.InternalMessageInfo + +func (m *Money) GetCurrencyCode() string { + if m != nil { + return m.CurrencyCode + } + return "" +} + +func (m *Money) GetUnits() int64 { + if m != nil { + return m.Units + } + return 0 +} + +func (m *Money) GetNanos() int32 { + if m != nil { + return m.Nanos + } + return 0 +} + +func init() { + proto.RegisterType((*Money)(nil), "google.type.Money") +} + +func init() { proto.RegisterFile("google/type/money.proto", fileDescriptor_money_a475a5747334583b) } + +var fileDescriptor_money_a475a5747334583b = []byte{ + // 193 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xe2, 0x12, 0x4f, 0xcf, 0xcf, 0x4f, + 0xcf, 0x49, 0xd5, 0x2f, 0xa9, 0x2c, 0x48, 0xd5, 0xcf, 0xcd, 0xcf, 0x4b, 0xad, 0xd4, 0x2b, 0x28, + 0xca, 0x2f, 0xc9, 0x17, 0xe2, 0x86, 0x48, 0xe8, 0x81, 0x24, 0x94, 0x22, 0xb8, 0x58, 0x7d, 0x41, + 0x72, 0x42, 0xca, 0x5c, 0xbc, 0xc9, 0xa5, 0x45, 0x45, 0xa9, 0x79, 0xc9, 0x95, 0xf1, 0xc9, 0xf9, + 0x29, 0xa9, 0x12, 0x8c, 0x0a, 0x8c, 0x1a, 0x9c, 0x41, 0x3c, 0x30, 0x41, 0xe7, 0xfc, 0x94, 0x54, + 0x21, 0x11, 0x2e, 0xd6, 0xd2, 0xbc, 0xcc, 0x92, 0x62, 0x09, 0x26, 0x05, 0x46, 0x0d, 0xe6, 0x20, + 0x08, 0x07, 0x24, 0x9a, 0x97, 0x98, 0x97, 0x5f, 0x2c, 0xc1, 0xac, 0xc0, 0xa8, 0xc1, 0x1a, 0x04, + 0xe1, 0x38, 0x25, 0x70, 0xf1, 0x27, 0xe7, 0xe7, 0xea, 0x21, 0x59, 0xe6, 0xc4, 0x05, 0xb6, 0x2a, + 0x00, 0xe4, 0x8a, 0x00, 0xc6, 0x28, 0x33, 0xa8, 0x54, 0x7a, 0x7e, 0x4e, 0x62, 0x5e, 0xba, 0x5e, + 0x7e, 0x51, 0xba, 0x7e, 0x7a, 0x6a, 0x1e, 0xd8, 0x8d, 0xfa, 0x10, 0xa9, 0xc4, 0x82, 0xcc, 0x62, + 0x24, 0xf7, 0x5b, 0x83, 0xc9, 0x1f, 0x8c, 0x8c, 0x8b, 0x98, 0x98, 0xdd, 0x43, 0x02, 0x92, 0xd8, + 0xc0, 0x6a, 0x8d, 0x01, 0x01, 0x00, 0x00, 0xff, 0xff, 0x2d, 0x5b, 0x21, 0x82, 0xea, 0x00, 0x00, + 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/type/postaladdress/postal_address.pb.go b/vendor/google.golang.org/genproto/googleapis/type/postaladdress/postal_address.pb.go new file mode 100644 index 0000000..e009129 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/type/postaladdress/postal_address.pb.go @@ -0,0 +1,255 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/type/postal_address.proto + +package postaladdress // import "google.golang.org/genproto/googleapis/type/postaladdress" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Represents a postal address, e.g. for postal delivery or payments addresses. +// Given a postal address, a postal service can deliver items to a premise, P.O. +// Box or similar. +// It is not intended to model geographical locations (roads, towns, +// mountains). +// +// In typical usage an address would be created via user input or from importing +// existing data, depending on the type of process. +// +// Advice on address input / editing: +// - Use an i18n-ready address widget such as +// https://github.com/googlei18n/libaddressinput) +// - Users should not be presented with UI elements for input or editing of +// fields outside countries where that field is used. +// +// For more guidance on how to use this schema, please see: +// https://support.google.com/business/answer/6397478 +type PostalAddress struct { + // The schema revision of the `PostalAddress`. + // All new revisions **must** be backward compatible with old revisions. + Revision int32 `protobuf:"varint,1,opt,name=revision,proto3" json:"revision,omitempty"` + // Required. CLDR region code of the country/region of the address. This + // is never inferred and it is up to the user to ensure the value is + // correct. See http://cldr.unicode.org/ and + // http://www.unicode.org/cldr/charts/30/supplemental/territory_information.html + // for details. Example: "CH" for Switzerland. + RegionCode string `protobuf:"bytes,2,opt,name=region_code,json=regionCode,proto3" json:"region_code,omitempty"` + // Optional. BCP-47 language code of the contents of this address (if + // known). This is often the UI language of the input form or is expected + // to match one of the languages used in the address' country/region, or their + // transliterated equivalents. + // This can affect formatting in certain countries, but is not critical + // to the correctness of the data and will never affect any validation or + // other non-formatting related operations. + // + // If this value is not known, it should be omitted (rather than specifying a + // possibly incorrect default). + // + // Examples: "zh-Hant", "ja", "ja-Latn", "en". + LanguageCode string `protobuf:"bytes,3,opt,name=language_code,json=languageCode,proto3" json:"language_code,omitempty"` + // Optional. Postal code of the address. Not all countries use or require + // postal codes to be present, but where they are used, they may trigger + // additional validation with other parts of the address (e.g. state/zip + // validation in the U.S.A.). + PostalCode string `protobuf:"bytes,4,opt,name=postal_code,json=postalCode,proto3" json:"postal_code,omitempty"` + // Optional. Additional, country-specific, sorting code. This is not used + // in most regions. Where it is used, the value is either a string like + // "CEDEX", optionally followed by a number (e.g. "CEDEX 7"), or just a number + // alone, representing the "sector code" (Jamaica), "delivery area indicator" + // (Malawi) or "post office indicator" (e.g. Côte d'Ivoire). + SortingCode string `protobuf:"bytes,5,opt,name=sorting_code,json=sortingCode,proto3" json:"sorting_code,omitempty"` + // Optional. Highest administrative subdivision which is used for postal + // addresses of a country or region. + // For example, this can be a state, a province, an oblast, or a prefecture. + // Specifically, for Spain this is the province and not the autonomous + // community (e.g. "Barcelona" and not "Catalonia"). + // Many countries don't use an administrative area in postal addresses. E.g. + // in Switzerland this should be left unpopulated. + AdministrativeArea string `protobuf:"bytes,6,opt,name=administrative_area,json=administrativeArea,proto3" json:"administrative_area,omitempty"` + // Optional. Generally refers to the city/town portion of the address. + // Examples: US city, IT comune, UK post town. + // In regions of the world where localities are not well defined or do not fit + // into this structure well, leave locality empty and use address_lines. + Locality string `protobuf:"bytes,7,opt,name=locality,proto3" json:"locality,omitempty"` + // Optional. Sublocality of the address. + // For example, this can be neighborhoods, boroughs, districts. + Sublocality string `protobuf:"bytes,8,opt,name=sublocality,proto3" json:"sublocality,omitempty"` + // Unstructured address lines describing the lower levels of an address. + // + // Because values in address_lines do not have type information and may + // sometimes contain multiple values in a single field (e.g. + // "Austin, TX"), it is important that the line order is clear. The order of + // address lines should be "envelope order" for the country/region of the + // address. In places where this can vary (e.g. Japan), address_language is + // used to make it explicit (e.g. "ja" for large-to-small ordering and + // "ja-Latn" or "en" for small-to-large). This way, the most specific line of + // an address can be selected based on the language. + // + // The minimum permitted structural representation of an address consists + // of a region_code with all remaining information placed in the + // address_lines. It would be possible to format such an address very + // approximately without geocoding, but no semantic reasoning could be + // made about any of the address components until it was at least + // partially resolved. + // + // Creating an address only containing a region_code and address_lines, and + // then geocoding is the recommended way to handle completely unstructured + // addresses (as opposed to guessing which parts of the address should be + // localities or administrative areas). + AddressLines []string `protobuf:"bytes,9,rep,name=address_lines,json=addressLines,proto3" json:"address_lines,omitempty"` + // Optional. The recipient at the address. + // This field may, under certain circumstances, contain multiline information. + // For example, it might contain "care of" information. + Recipients []string `protobuf:"bytes,10,rep,name=recipients,proto3" json:"recipients,omitempty"` + // Optional. The name of the organization at the address. + Organization string `protobuf:"bytes,11,opt,name=organization,proto3" json:"organization,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *PostalAddress) Reset() { *m = PostalAddress{} } +func (m *PostalAddress) String() string { return proto.CompactTextString(m) } +func (*PostalAddress) ProtoMessage() {} +func (*PostalAddress) Descriptor() ([]byte, []int) { + return fileDescriptor_postal_address_f1ce9eb95f7d1aa5, []int{0} +} +func (m *PostalAddress) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_PostalAddress.Unmarshal(m, b) +} +func (m *PostalAddress) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_PostalAddress.Marshal(b, m, deterministic) +} +func (dst *PostalAddress) XXX_Merge(src proto.Message) { + xxx_messageInfo_PostalAddress.Merge(dst, src) +} +func (m *PostalAddress) XXX_Size() int { + return xxx_messageInfo_PostalAddress.Size(m) +} +func (m *PostalAddress) XXX_DiscardUnknown() { + xxx_messageInfo_PostalAddress.DiscardUnknown(m) +} + +var xxx_messageInfo_PostalAddress proto.InternalMessageInfo + +func (m *PostalAddress) GetRevision() int32 { + if m != nil { + return m.Revision + } + return 0 +} + +func (m *PostalAddress) GetRegionCode() string { + if m != nil { + return m.RegionCode + } + return "" +} + +func (m *PostalAddress) GetLanguageCode() string { + if m != nil { + return m.LanguageCode + } + return "" +} + +func (m *PostalAddress) GetPostalCode() string { + if m != nil { + return m.PostalCode + } + return "" +} + +func (m *PostalAddress) GetSortingCode() string { + if m != nil { + return m.SortingCode + } + return "" +} + +func (m *PostalAddress) GetAdministrativeArea() string { + if m != nil { + return m.AdministrativeArea + } + return "" +} + +func (m *PostalAddress) GetLocality() string { + if m != nil { + return m.Locality + } + return "" +} + +func (m *PostalAddress) GetSublocality() string { + if m != nil { + return m.Sublocality + } + return "" +} + +func (m *PostalAddress) GetAddressLines() []string { + if m != nil { + return m.AddressLines + } + return nil +} + +func (m *PostalAddress) GetRecipients() []string { + if m != nil { + return m.Recipients + } + return nil +} + +func (m *PostalAddress) GetOrganization() string { + if m != nil { + return m.Organization + } + return "" +} + +func init() { + proto.RegisterType((*PostalAddress)(nil), "google.type.PostalAddress") +} + +func init() { + proto.RegisterFile("google/type/postal_address.proto", fileDescriptor_postal_address_f1ce9eb95f7d1aa5) +} + +var fileDescriptor_postal_address_f1ce9eb95f7d1aa5 = []byte{ + // 338 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x54, 0x92, 0x31, 0x6f, 0xea, 0x30, + 0x10, 0xc7, 0x15, 0xf2, 0xe0, 0xc1, 0x05, 0xf4, 0x24, 0xbf, 0x25, 0xea, 0x50, 0x52, 0xba, 0x30, + 0x25, 0x43, 0xc7, 0x4e, 0x50, 0xa9, 0x5d, 0x3a, 0x44, 0xa8, 0x53, 0x97, 0xc8, 0x24, 0x27, 0xcb, + 0x52, 0xf0, 0x45, 0xb6, 0x41, 0xa2, 0xdf, 0xa1, 0x5f, 0xa2, 0x9f, 0xb4, 0xb2, 0x9d, 0xd2, 0x30, + 0xde, 0xef, 0x7e, 0x49, 0xee, 0xee, 0x1f, 0xc8, 0x04, 0x91, 0x68, 0xb1, 0xb0, 0xe7, 0x0e, 0x8b, + 0x8e, 0x8c, 0xe5, 0x6d, 0xc5, 0x9b, 0x46, 0xa3, 0x31, 0x79, 0xa7, 0xc9, 0x12, 0x4b, 0x82, 0x91, + 0x3b, 0x63, 0xf5, 0x19, 0xc3, 0xa2, 0xf4, 0xd6, 0x26, 0x48, 0xec, 0x06, 0xa6, 0x1a, 0x4f, 0xd2, + 0x48, 0x52, 0x69, 0x94, 0x45, 0xeb, 0xf1, 0xee, 0x52, 0xb3, 0x25, 0x24, 0x1a, 0x85, 0x24, 0x55, + 0xd5, 0xd4, 0x60, 0x3a, 0xca, 0xa2, 0xf5, 0x6c, 0x07, 0x01, 0x3d, 0x51, 0x83, 0xec, 0x1e, 0x16, + 0x2d, 0x57, 0xe2, 0xc8, 0x05, 0x06, 0x25, 0xf6, 0xca, 0xfc, 0x07, 0x7a, 0x69, 0x09, 0x49, 0x3f, + 0x98, 0x57, 0xfe, 0x84, 0xb7, 0x04, 0xe4, 0x85, 0x3b, 0x98, 0x1b, 0xd2, 0x56, 0x2a, 0x11, 0x8c, + 0xb1, 0x37, 0x92, 0x9e, 0x79, 0xa5, 0x80, 0xff, 0xbc, 0x39, 0x48, 0x25, 0x8d, 0xd5, 0xdc, 0xca, + 0x13, 0x56, 0x5c, 0x23, 0x4f, 0x27, 0xde, 0x64, 0xd7, 0xad, 0x8d, 0x46, 0xee, 0xd6, 0x6a, 0xa9, + 0xe6, 0xad, 0xb4, 0xe7, 0xf4, 0xaf, 0xb7, 0x2e, 0x35, 0xcb, 0x20, 0x31, 0xc7, 0xfd, 0xa5, 0x3d, + 0xed, 0x3f, 0xf7, 0x8b, 0xdc, 0x5e, 0xfd, 0x11, 0xab, 0x56, 0x2a, 0x34, 0xe9, 0x2c, 0x8b, 0xdd, + 0x5e, 0x3d, 0x7c, 0x75, 0x8c, 0xdd, 0x02, 0x68, 0xac, 0x65, 0x27, 0x51, 0x59, 0x93, 0x82, 0x37, + 0x06, 0x84, 0xad, 0x60, 0x4e, 0x5a, 0x70, 0x25, 0x3f, 0xb8, 0x75, 0xd7, 0x4d, 0xc2, 0x6d, 0x86, + 0x6c, 0x7b, 0x84, 0x7f, 0x35, 0x1d, 0xf2, 0x41, 0x44, 0x5b, 0x76, 0x95, 0x4f, 0xe9, 0x32, 0x2c, + 0xa3, 0xf7, 0xe7, 0x5e, 0x11, 0xe4, 0x6e, 0x9b, 0x93, 0x16, 0x85, 0x40, 0xe5, 0x13, 0x2e, 0x42, + 0x8b, 0x77, 0xd2, 0x0c, 0x7f, 0x83, 0x7e, 0xd6, 0xc7, 0xab, 0xea, 0x6b, 0x14, 0xbf, 0xbc, 0x95, + 0xfb, 0x89, 0x7f, 0xf0, 0xe1, 0x3b, 0x00, 0x00, 0xff, 0xff, 0xda, 0x86, 0xd3, 0x22, 0x3e, 0x02, + 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/type/quaternion/quaternion.pb.go b/vendor/google.golang.org/genproto/googleapis/type/quaternion/quaternion.pb.go new file mode 100644 index 0000000..b2289f4 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/type/quaternion/quaternion.pb.go @@ -0,0 +1,165 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/type/quaternion.proto + +package quaternion // import "google.golang.org/genproto/googleapis/type/quaternion" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// A quaternion is defined as the quotient of two directed lines in a +// three-dimensional space or equivalently as the quotient of two Euclidean +// vectors (https://en.wikipedia.org/wiki/Quaternion). +// +// Quaternions are often used in calculations involving three-dimensional +// rotations (https://en.wikipedia.org/wiki/Quaternions_and_spatial_rotation), +// as they provide greater mathematical robustness by avoiding the gimbal lock +// problems that can be encountered when using Euler angles +// (https://en.wikipedia.org/wiki/Gimbal_lock). +// +// Quaternions are generally represented in this form: +// +// w + xi + yj + zk +// +// where x, y, z, and w are real numbers, and i, j, and k are three imaginary +// numbers. +// +// Our naming choice (x, y, z, w) comes from the desire to avoid confusion for +// those interested in the geometric properties of the quaternion in the 3D +// Cartesian space. Other texts often use alternative names or subscripts, such +// as (a, b, c, d), (1, i, j, k), or (0, 1, 2, 3), which are perhaps better +// suited for mathematical interpretations. +// +// To avoid any confusion, as well as to maintain compatibility with a large +// number of software libraries, the quaternions represented using the protocol +// buffer below *must* follow the Hamilton convention, which defines ij = k +// (i.e. a right-handed algebra), and therefore: +// +// i^2 = j^2 = k^2 = ijk = −1 +// ij = −ji = k +// jk = −kj = i +// ki = −ik = j +// +// Please DO NOT use this to represent quaternions that follow the JPL +// convention, or any of the other quaternion flavors out there. +// +// Definitions: +// +// - Quaternion norm (or magnitude): sqrt(x^2 + y^2 + z^2 + w^2). +// - Unit (or normalized) quaternion: a quaternion whose norm is 1. +// - Pure quaternion: a quaternion whose scalar component (w) is 0. +// - Rotation quaternion: a unit quaternion used to represent rotation. +// - Orientation quaternion: a unit quaternion used to represent orientation. +// +// A quaternion can be normalized by dividing it by its norm. The resulting +// quaternion maintains the same direction, but has a norm of 1, i.e. it moves +// on the unit sphere. This is generally necessary for rotation and orientation +// quaternions, to avoid rounding errors: +// https://en.wikipedia.org/wiki/Rotation_formalisms_in_three_dimensions +// +// Note that (x, y, z, w) and (-x, -y, -z, -w) represent the same rotation, but +// normalization would be even more useful, e.g. for comparison purposes, if it +// would produce a unique representation. It is thus recommended that w be kept +// positive, which can be achieved by changing all the signs when w is negative. +// +// +// Next available tag: 5 +type Quaternion struct { + // The x component. + X float64 `protobuf:"fixed64,1,opt,name=x,proto3" json:"x,omitempty"` + // The y component. + Y float64 `protobuf:"fixed64,2,opt,name=y,proto3" json:"y,omitempty"` + // The z component. + Z float64 `protobuf:"fixed64,3,opt,name=z,proto3" json:"z,omitempty"` + // The scalar component. + W float64 `protobuf:"fixed64,4,opt,name=w,proto3" json:"w,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Quaternion) Reset() { *m = Quaternion{} } +func (m *Quaternion) String() string { return proto.CompactTextString(m) } +func (*Quaternion) ProtoMessage() {} +func (*Quaternion) Descriptor() ([]byte, []int) { + return fileDescriptor_quaternion_a8bd5c2f09ef2c54, []int{0} +} +func (m *Quaternion) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Quaternion.Unmarshal(m, b) +} +func (m *Quaternion) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Quaternion.Marshal(b, m, deterministic) +} +func (dst *Quaternion) XXX_Merge(src proto.Message) { + xxx_messageInfo_Quaternion.Merge(dst, src) +} +func (m *Quaternion) XXX_Size() int { + return xxx_messageInfo_Quaternion.Size(m) +} +func (m *Quaternion) XXX_DiscardUnknown() { + xxx_messageInfo_Quaternion.DiscardUnknown(m) +} + +var xxx_messageInfo_Quaternion proto.InternalMessageInfo + +func (m *Quaternion) GetX() float64 { + if m != nil { + return m.X + } + return 0 +} + +func (m *Quaternion) GetY() float64 { + if m != nil { + return m.Y + } + return 0 +} + +func (m *Quaternion) GetZ() float64 { + if m != nil { + return m.Z + } + return 0 +} + +func (m *Quaternion) GetW() float64 { + if m != nil { + return m.W + } + return 0 +} + +func init() { + proto.RegisterType((*Quaternion)(nil), "google.type.Quaternion") +} + +func init() { + proto.RegisterFile("google/type/quaternion.proto", fileDescriptor_quaternion_a8bd5c2f09ef2c54) +} + +var fileDescriptor_quaternion_a8bd5c2f09ef2c54 = []byte{ + // 170 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xe2, 0x92, 0x49, 0xcf, 0xcf, 0x4f, + 0xcf, 0x49, 0xd5, 0x2f, 0xa9, 0x2c, 0x48, 0xd5, 0x2f, 0x2c, 0x4d, 0x2c, 0x49, 0x2d, 0xca, 0xcb, + 0xcc, 0xcf, 0xd3, 0x2b, 0x28, 0xca, 0x2f, 0xc9, 0x17, 0xe2, 0x86, 0xc8, 0xea, 0x81, 0x64, 0x95, + 0x5c, 0xb8, 0xb8, 0x02, 0xe1, 0x0a, 0x84, 0x78, 0xb8, 0x18, 0x2b, 0x24, 0x18, 0x15, 0x18, 0x35, + 0x18, 0x83, 0x18, 0x2b, 0x40, 0xbc, 0x4a, 0x09, 0x26, 0x08, 0xaf, 0x12, 0xc4, 0xab, 0x92, 0x60, + 0x86, 0xf0, 0xaa, 0x40, 0xbc, 0x72, 0x09, 0x16, 0x08, 0xaf, 0xdc, 0x29, 0x9f, 0x8b, 0x3f, 0x39, + 0x3f, 0x57, 0x0f, 0xc9, 0x60, 0x27, 0x7e, 0x84, 0xb1, 0x01, 0x20, 0x6b, 0x03, 0x18, 0xa3, 0x1c, + 0xa0, 0xf2, 0xe9, 0xf9, 0x39, 0x89, 0x79, 0xe9, 0x7a, 0xf9, 0x45, 0xe9, 0xfa, 0xe9, 0xa9, 0x79, + 0x60, 0x47, 0xe9, 0x43, 0xa4, 0x12, 0x0b, 0x32, 0x8b, 0xd1, 0x5d, 0x6d, 0x8d, 0x60, 0xfe, 0x60, + 0x64, 0x5c, 0xc4, 0xc4, 0xec, 0x1e, 0x12, 0x90, 0xc4, 0x06, 0xd6, 0x65, 0x0c, 0x08, 0x00, 0x00, + 0xff, 0xff, 0xc0, 0x0f, 0xce, 0x99, 0xea, 0x00, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/type/timeofday/timeofday.pb.go b/vendor/google.golang.org/genproto/googleapis/type/timeofday/timeofday.pb.go new file mode 100644 index 0000000..b115777 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/type/timeofday/timeofday.pb.go @@ -0,0 +1,115 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/type/timeofday.proto + +package timeofday // import "google.golang.org/genproto/googleapis/type/timeofday" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Represents a time of day. The date and time zone are either not significant +// or are specified elsewhere. An API may choose to allow leap seconds. Related +// types are [google.type.Date][google.type.Date] and `google.protobuf.Timestamp`. +type TimeOfDay struct { + // Hours of day in 24 hour format. Should be from 0 to 23. An API may choose + // to allow the value "24:00:00" for scenarios like business closing time. + Hours int32 `protobuf:"varint,1,opt,name=hours,proto3" json:"hours,omitempty"` + // Minutes of hour of day. Must be from 0 to 59. + Minutes int32 `protobuf:"varint,2,opt,name=minutes,proto3" json:"minutes,omitempty"` + // Seconds of minutes of the time. Must normally be from 0 to 59. An API may + // allow the value 60 if it allows leap-seconds. + Seconds int32 `protobuf:"varint,3,opt,name=seconds,proto3" json:"seconds,omitempty"` + // Fractions of seconds in nanoseconds. Must be from 0 to 999,999,999. + Nanos int32 `protobuf:"varint,4,opt,name=nanos,proto3" json:"nanos,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *TimeOfDay) Reset() { *m = TimeOfDay{} } +func (m *TimeOfDay) String() string { return proto.CompactTextString(m) } +func (*TimeOfDay) ProtoMessage() {} +func (*TimeOfDay) Descriptor() ([]byte, []int) { + return fileDescriptor_timeofday_a44a6b433ddd93bc, []int{0} +} +func (m *TimeOfDay) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_TimeOfDay.Unmarshal(m, b) +} +func (m *TimeOfDay) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_TimeOfDay.Marshal(b, m, deterministic) +} +func (dst *TimeOfDay) XXX_Merge(src proto.Message) { + xxx_messageInfo_TimeOfDay.Merge(dst, src) +} +func (m *TimeOfDay) XXX_Size() int { + return xxx_messageInfo_TimeOfDay.Size(m) +} +func (m *TimeOfDay) XXX_DiscardUnknown() { + xxx_messageInfo_TimeOfDay.DiscardUnknown(m) +} + +var xxx_messageInfo_TimeOfDay proto.InternalMessageInfo + +func (m *TimeOfDay) GetHours() int32 { + if m != nil { + return m.Hours + } + return 0 +} + +func (m *TimeOfDay) GetMinutes() int32 { + if m != nil { + return m.Minutes + } + return 0 +} + +func (m *TimeOfDay) GetSeconds() int32 { + if m != nil { + return m.Seconds + } + return 0 +} + +func (m *TimeOfDay) GetNanos() int32 { + if m != nil { + return m.Nanos + } + return 0 +} + +func init() { + proto.RegisterType((*TimeOfDay)(nil), "google.type.TimeOfDay") +} + +func init() { + proto.RegisterFile("google/type/timeofday.proto", fileDescriptor_timeofday_a44a6b433ddd93bc) +} + +var fileDescriptor_timeofday_a44a6b433ddd93bc = []byte{ + // 201 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xe2, 0x92, 0x4e, 0xcf, 0xcf, 0x4f, + 0xcf, 0x49, 0xd5, 0x2f, 0xa9, 0x2c, 0x48, 0xd5, 0x2f, 0xc9, 0xcc, 0x4d, 0xcd, 0x4f, 0x4b, 0x49, + 0xac, 0xd4, 0x2b, 0x28, 0xca, 0x2f, 0xc9, 0x17, 0xe2, 0x86, 0x48, 0xea, 0x81, 0x24, 0x95, 0xb2, + 0xb9, 0x38, 0x43, 0x32, 0x73, 0x53, 0xfd, 0xd3, 0x5c, 0x12, 0x2b, 0x85, 0x44, 0xb8, 0x58, 0x33, + 0xf2, 0x4b, 0x8b, 0x8a, 0x25, 0x18, 0x15, 0x18, 0x35, 0x58, 0x83, 0x20, 0x1c, 0x21, 0x09, 0x2e, + 0xf6, 0xdc, 0xcc, 0xbc, 0xd2, 0x92, 0xd4, 0x62, 0x09, 0x26, 0xb0, 0x38, 0x8c, 0x0b, 0x92, 0x29, + 0x4e, 0x4d, 0xce, 0xcf, 0x4b, 0x29, 0x96, 0x60, 0x86, 0xc8, 0x40, 0xb9, 0x20, 0x93, 0xf2, 0x12, + 0xf3, 0xf2, 0x8b, 0x25, 0x58, 0x20, 0x26, 0x81, 0x39, 0x4e, 0x39, 0x5c, 0xfc, 0xc9, 0xf9, 0xb9, + 0x7a, 0x48, 0xf6, 0x3b, 0xf1, 0xc1, 0x6d, 0x0f, 0x00, 0x39, 0x2e, 0x80, 0x31, 0xca, 0x0e, 0x2a, + 0x9d, 0x9e, 0x9f, 0x93, 0x98, 0x97, 0xae, 0x97, 0x5f, 0x94, 0xae, 0x9f, 0x9e, 0x9a, 0x07, 0x76, + 0xba, 0x3e, 0x44, 0x2a, 0xb1, 0x20, 0xb3, 0x18, 0xcd, 0x6b, 0xd6, 0x70, 0xd6, 0x0f, 0x46, 0xc6, + 0x45, 0x4c, 0xcc, 0xee, 0x21, 0x01, 0x49, 0x6c, 0x60, 0x3d, 0xc6, 0x80, 0x00, 0x00, 0x00, 0xff, + 0xff, 0x21, 0xc8, 0xc5, 0x17, 0x0d, 0x01, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/googleapis/watcher/v1/watch.pb.go b/vendor/google.golang.org/genproto/googleapis/watcher/v1/watch.pb.go new file mode 100644 index 0000000..cfba658 --- /dev/null +++ b/vendor/google.golang.org/genproto/googleapis/watcher/v1/watch.pb.go @@ -0,0 +1,435 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/watcher/v1/watch.proto + +package watcher // import "google.golang.org/genproto/googleapis/watcher/v1" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import any "github.com/golang/protobuf/ptypes/any" +import _ "github.com/golang/protobuf/ptypes/empty" +import _ "google.golang.org/genproto/googleapis/api/annotations" + +import ( + context "golang.org/x/net/context" + grpc "google.golang.org/grpc" +) + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// A reported value can be in one of the following states: +type Change_State int32 + +const ( + // The element exists and its full value is included in data. + Change_EXISTS Change_State = 0 + // The element does not exist. + Change_DOES_NOT_EXIST Change_State = 1 + // Element may or may not exist. Used only for initial state delivery when + // the client is not interested in fetching the initial state. See the + // "Initial State" section above. + Change_INITIAL_STATE_SKIPPED Change_State = 2 + // The element may exist, but some error has occurred. More information is + // available in the data field - the value is a serialized Status + // proto (from [google.rpc.Status][]) + Change_ERROR Change_State = 3 +) + +var Change_State_name = map[int32]string{ + 0: "EXISTS", + 1: "DOES_NOT_EXIST", + 2: "INITIAL_STATE_SKIPPED", + 3: "ERROR", +} +var Change_State_value = map[string]int32{ + "EXISTS": 0, + "DOES_NOT_EXIST": 1, + "INITIAL_STATE_SKIPPED": 2, + "ERROR": 3, +} + +func (x Change_State) String() string { + return proto.EnumName(Change_State_name, int32(x)) +} +func (Change_State) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_watch_fea2f00fe0718109, []int{2, 0} +} + +// The message used by the client to register interest in an entity. +type Request struct { + // The `target` value **must** be a valid URL path pointing to an entity + // to watch. Note that the service name **must** be + // removed from the target field (e.g., the target field must say + // "/foo/bar", not "myservice.googleapis.com/foo/bar"). A client is + // also allowed to pass system-specific parameters in the URL that + // are only obeyed by some implementations. Some parameters will be + // implementation-specific. However, some have predefined meaning + // and are listed here: + // + // * recursive = true|false [default=false] + // If set to true, indicates that the client wants to watch all elements + // of entities in the subtree rooted at the entity's name in `target`. For + // descendants that are not the immediate children of the target, the + // `Change.element` will contain slashes. + // + // Note that some namespaces and entities will not support recursive + // watching. When watching such an entity, a client must not set recursive + // to true. Otherwise, it will receive an `UNIMPLEMENTED` error. + // + // Normal URL encoding must be used inside `target`. For example, if a query + // parameter name or value, or the non-query parameter portion of `target` + // contains a special character, it must be %-encoded. We recommend that + // clients and servers use their runtime's URL library to produce and consume + // target values. + Target string `protobuf:"bytes,1,opt,name=target,proto3" json:"target,omitempty"` + // The `resume_marker` specifies how much of the existing underlying state is + // delivered to the client when the watch request is received by the + // system. The client can set this marker in one of the following ways to get + // different semantics: + // + // * Parameter is not specified or has the value "". + // Semantics: Fetch initial state. + // The client wants the entity's initial state to be delivered. See the + // description in "Initial State". + // + // * Parameter is set to the string "now" (UTF-8 encoding). + // Semantics: Fetch new changes only. + // The client just wants to get the changes received by the system after + // the watch point. The system may deliver changes from before the watch + // point as well. + // + // * Parameter is set to a value received in an earlier + // `Change.resume_marker` field while watching the same entity. + // Semantics: Resume from a specific point. + // The client wants to receive the changes from a specific point; this + // value must correspond to a value received in the `Change.resume_marker` + // field. The system may deliver changes from before the `resume_marker` + // as well. If the system cannot resume the stream from this point (e.g., + // if it is too far behind in the stream), it can raise the + // `FAILED_PRECONDITION` error. + // + // An implementation MUST support an unspecified parameter and the + // empty string "" marker (initial state fetching) and the "now" marker. + // It need not support resuming from a specific point. + ResumeMarker []byte `protobuf:"bytes,2,opt,name=resume_marker,json=resumeMarker,proto3" json:"resume_marker,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Request) Reset() { *m = Request{} } +func (m *Request) String() string { return proto.CompactTextString(m) } +func (*Request) ProtoMessage() {} +func (*Request) Descriptor() ([]byte, []int) { + return fileDescriptor_watch_fea2f00fe0718109, []int{0} +} +func (m *Request) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Request.Unmarshal(m, b) +} +func (m *Request) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Request.Marshal(b, m, deterministic) +} +func (dst *Request) XXX_Merge(src proto.Message) { + xxx_messageInfo_Request.Merge(dst, src) +} +func (m *Request) XXX_Size() int { + return xxx_messageInfo_Request.Size(m) +} +func (m *Request) XXX_DiscardUnknown() { + xxx_messageInfo_Request.DiscardUnknown(m) +} + +var xxx_messageInfo_Request proto.InternalMessageInfo + +func (m *Request) GetTarget() string { + if m != nil { + return m.Target + } + return "" +} + +func (m *Request) GetResumeMarker() []byte { + if m != nil { + return m.ResumeMarker + } + return nil +} + +// A batch of Change messages. +type ChangeBatch struct { + // A list of Change messages. + Changes []*Change `protobuf:"bytes,1,rep,name=changes,proto3" json:"changes,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ChangeBatch) Reset() { *m = ChangeBatch{} } +func (m *ChangeBatch) String() string { return proto.CompactTextString(m) } +func (*ChangeBatch) ProtoMessage() {} +func (*ChangeBatch) Descriptor() ([]byte, []int) { + return fileDescriptor_watch_fea2f00fe0718109, []int{1} +} +func (m *ChangeBatch) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ChangeBatch.Unmarshal(m, b) +} +func (m *ChangeBatch) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ChangeBatch.Marshal(b, m, deterministic) +} +func (dst *ChangeBatch) XXX_Merge(src proto.Message) { + xxx_messageInfo_ChangeBatch.Merge(dst, src) +} +func (m *ChangeBatch) XXX_Size() int { + return xxx_messageInfo_ChangeBatch.Size(m) +} +func (m *ChangeBatch) XXX_DiscardUnknown() { + xxx_messageInfo_ChangeBatch.DiscardUnknown(m) +} + +var xxx_messageInfo_ChangeBatch proto.InternalMessageInfo + +func (m *ChangeBatch) GetChanges() []*Change { + if m != nil { + return m.Changes + } + return nil +} + +// A Change indicates the most recent state of an element. +type Change struct { + // Name of the element, interpreted relative to the entity's actual + // name. "" refers to the entity itself. The element name is a valid + // UTF-8 string. + Element string `protobuf:"bytes,1,opt,name=element,proto3" json:"element,omitempty"` + // The state of the `element`. + State Change_State `protobuf:"varint,2,opt,name=state,proto3,enum=google.watcher.v1.Change_State" json:"state,omitempty"` + // The actual change data. This field is present only when `state() == EXISTS` + // or `state() == ERROR`. Please see + // [google.protobuf.Any][google.protobuf.Any] about how to use the Any type. + Data *any.Any `protobuf:"bytes,6,opt,name=data,proto3" json:"data,omitempty"` + // If present, provides a compact representation of all the messages that have + // been received by the caller for the given entity, e.g., it could be a + // sequence number or a multi-part timestamp/version vector. This marker can + // be provided in the Request message, allowing the caller to resume the + // stream watching at a specific point without fetching the initial state. + ResumeMarker []byte `protobuf:"bytes,4,opt,name=resume_marker,json=resumeMarker,proto3" json:"resume_marker,omitempty"` + // If true, this Change is followed by more Changes that are in the same group + // as this Change. + Continued bool `protobuf:"varint,5,opt,name=continued,proto3" json:"continued,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Change) Reset() { *m = Change{} } +func (m *Change) String() string { return proto.CompactTextString(m) } +func (*Change) ProtoMessage() {} +func (*Change) Descriptor() ([]byte, []int) { + return fileDescriptor_watch_fea2f00fe0718109, []int{2} +} +func (m *Change) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Change.Unmarshal(m, b) +} +func (m *Change) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Change.Marshal(b, m, deterministic) +} +func (dst *Change) XXX_Merge(src proto.Message) { + xxx_messageInfo_Change.Merge(dst, src) +} +func (m *Change) XXX_Size() int { + return xxx_messageInfo_Change.Size(m) +} +func (m *Change) XXX_DiscardUnknown() { + xxx_messageInfo_Change.DiscardUnknown(m) +} + +var xxx_messageInfo_Change proto.InternalMessageInfo + +func (m *Change) GetElement() string { + if m != nil { + return m.Element + } + return "" +} + +func (m *Change) GetState() Change_State { + if m != nil { + return m.State + } + return Change_EXISTS +} + +func (m *Change) GetData() *any.Any { + if m != nil { + return m.Data + } + return nil +} + +func (m *Change) GetResumeMarker() []byte { + if m != nil { + return m.ResumeMarker + } + return nil +} + +func (m *Change) GetContinued() bool { + if m != nil { + return m.Continued + } + return false +} + +func init() { + proto.RegisterType((*Request)(nil), "google.watcher.v1.Request") + proto.RegisterType((*ChangeBatch)(nil), "google.watcher.v1.ChangeBatch") + proto.RegisterType((*Change)(nil), "google.watcher.v1.Change") + proto.RegisterEnum("google.watcher.v1.Change_State", Change_State_name, Change_State_value) +} + +// Reference imports to suppress errors if they are not otherwise used. +var _ context.Context +var _ grpc.ClientConn + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the grpc package it is being compiled against. +const _ = grpc.SupportPackageIsVersion4 + +// WatcherClient is the client API for Watcher service. +// +// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://godoc.org/google.golang.org/grpc#ClientConn.NewStream. +type WatcherClient interface { + // Start a streaming RPC to get watch information from the server. + Watch(ctx context.Context, in *Request, opts ...grpc.CallOption) (Watcher_WatchClient, error) +} + +type watcherClient struct { + cc *grpc.ClientConn +} + +func NewWatcherClient(cc *grpc.ClientConn) WatcherClient { + return &watcherClient{cc} +} + +func (c *watcherClient) Watch(ctx context.Context, in *Request, opts ...grpc.CallOption) (Watcher_WatchClient, error) { + stream, err := c.cc.NewStream(ctx, &_Watcher_serviceDesc.Streams[0], "/google.watcher.v1.Watcher/Watch", opts...) + if err != nil { + return nil, err + } + x := &watcherWatchClient{stream} + if err := x.ClientStream.SendMsg(in); err != nil { + return nil, err + } + if err := x.ClientStream.CloseSend(); err != nil { + return nil, err + } + return x, nil +} + +type Watcher_WatchClient interface { + Recv() (*ChangeBatch, error) + grpc.ClientStream +} + +type watcherWatchClient struct { + grpc.ClientStream +} + +func (x *watcherWatchClient) Recv() (*ChangeBatch, error) { + m := new(ChangeBatch) + if err := x.ClientStream.RecvMsg(m); err != nil { + return nil, err + } + return m, nil +} + +// WatcherServer is the server API for Watcher service. +type WatcherServer interface { + // Start a streaming RPC to get watch information from the server. + Watch(*Request, Watcher_WatchServer) error +} + +func RegisterWatcherServer(s *grpc.Server, srv WatcherServer) { + s.RegisterService(&_Watcher_serviceDesc, srv) +} + +func _Watcher_Watch_Handler(srv interface{}, stream grpc.ServerStream) error { + m := new(Request) + if err := stream.RecvMsg(m); err != nil { + return err + } + return srv.(WatcherServer).Watch(m, &watcherWatchServer{stream}) +} + +type Watcher_WatchServer interface { + Send(*ChangeBatch) error + grpc.ServerStream +} + +type watcherWatchServer struct { + grpc.ServerStream +} + +func (x *watcherWatchServer) Send(m *ChangeBatch) error { + return x.ServerStream.SendMsg(m) +} + +var _Watcher_serviceDesc = grpc.ServiceDesc{ + ServiceName: "google.watcher.v1.Watcher", + HandlerType: (*WatcherServer)(nil), + Methods: []grpc.MethodDesc{}, + Streams: []grpc.StreamDesc{ + { + StreamName: "Watch", + Handler: _Watcher_Watch_Handler, + ServerStreams: true, + }, + }, + Metadata: "google/watcher/v1/watch.proto", +} + +func init() { + proto.RegisterFile("google/watcher/v1/watch.proto", fileDescriptor_watch_fea2f00fe0718109) +} + +var fileDescriptor_watch_fea2f00fe0718109 = []byte{ + // 449 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x74, 0x52, 0xdd, 0x6e, 0xd3, 0x30, + 0x14, 0xc6, 0xdd, 0x92, 0xd2, 0xd3, 0x31, 0x75, 0x16, 0x43, 0x69, 0x19, 0x10, 0x85, 0x9b, 0x5c, + 0x25, 0xac, 0x13, 0x12, 0x12, 0x57, 0x2d, 0x0b, 0x52, 0x04, 0x5b, 0x2b, 0x27, 0x12, 0x13, 0x37, + 0x91, 0x97, 0x99, 0xac, 0xa2, 0xb1, 0x4b, 0xe2, 0x0e, 0xed, 0x96, 0x57, 0x40, 0x3c, 0x19, 0xaf, + 0xc0, 0x83, 0xa0, 0xda, 0x0e, 0x20, 0xb2, 0xde, 0x9d, 0xf3, 0xfd, 0xd8, 0xe7, 0x3b, 0x36, 0x3c, + 0x29, 0x84, 0x28, 0x96, 0x2c, 0xfc, 0x4a, 0x65, 0x7e, 0xcd, 0xaa, 0xf0, 0xe6, 0x58, 0x97, 0xc1, + 0xaa, 0x12, 0x52, 0xe0, 0x03, 0x4d, 0x07, 0x86, 0x0e, 0x6e, 0x8e, 0x47, 0x47, 0xc6, 0x41, 0x57, + 0x8b, 0x90, 0x72, 0x2e, 0x24, 0x95, 0x0b, 0xc1, 0x6b, 0x6d, 0x18, 0x0d, 0x0d, 0xab, 0xba, 0xcb, + 0xf5, 0xa7, 0x90, 0xf2, 0x5b, 0x43, 0x3d, 0xfe, 0x9f, 0x62, 0xe5, 0x4a, 0x1a, 0xd2, 0x7b, 0x0b, + 0x5d, 0xc2, 0xbe, 0xac, 0x59, 0x2d, 0xf1, 0x23, 0xb0, 0x25, 0xad, 0x0a, 0x26, 0x1d, 0xe4, 0x22, + 0xbf, 0x47, 0x4c, 0x87, 0x9f, 0xc3, 0x83, 0x8a, 0xd5, 0xeb, 0x92, 0x65, 0x25, 0xad, 0x3e, 0xb3, + 0xca, 0xe9, 0xb8, 0xc8, 0xdf, 0x23, 0x7b, 0x1a, 0x3c, 0x53, 0x98, 0x37, 0x85, 0xfe, 0x9b, 0x6b, + 0xca, 0x0b, 0x36, 0xdd, 0x4c, 0x8c, 0x4f, 0xa0, 0x9b, 0xab, 0xb6, 0x76, 0x90, 0xbb, 0xe3, 0xf7, + 0xc7, 0xc3, 0xa0, 0x95, 0x28, 0xd0, 0x06, 0xd2, 0x28, 0xbd, 0x1f, 0x1d, 0xb0, 0x35, 0x86, 0x1d, + 0xe8, 0xb2, 0x25, 0x2b, 0x19, 0x6f, 0x86, 0x69, 0x5a, 0xfc, 0x12, 0xac, 0x5a, 0x52, 0xc9, 0xd4, + 0x14, 0xfb, 0xe3, 0x67, 0x5b, 0xcf, 0x0d, 0x92, 0x8d, 0x8c, 0x68, 0x35, 0xf6, 0x61, 0xf7, 0x8a, + 0x4a, 0xea, 0xd8, 0x2e, 0xf2, 0xfb, 0xe3, 0x87, 0x8d, 0xab, 0xd9, 0x49, 0x30, 0xe1, 0xb7, 0x44, + 0x29, 0xda, 0x71, 0x77, 0xdb, 0x71, 0xf1, 0x11, 0xf4, 0x72, 0xc1, 0xe5, 0x82, 0xaf, 0xd9, 0x95, + 0x63, 0xb9, 0xc8, 0xbf, 0x4f, 0xfe, 0x02, 0xde, 0x19, 0x58, 0xea, 0x72, 0x0c, 0x60, 0x47, 0x17, + 0x71, 0x92, 0x26, 0x83, 0x7b, 0x18, 0xc3, 0xfe, 0xe9, 0x2c, 0x4a, 0xb2, 0xf3, 0x59, 0x9a, 0x29, + 0x70, 0x80, 0xf0, 0x10, 0x0e, 0xe3, 0xf3, 0x38, 0x8d, 0x27, 0xef, 0xb3, 0x24, 0x9d, 0xa4, 0x51, + 0x96, 0xbc, 0x8b, 0xe7, 0xf3, 0xe8, 0x74, 0xd0, 0xc1, 0x3d, 0xb0, 0x22, 0x42, 0x66, 0x64, 0xb0, + 0x33, 0xce, 0xa1, 0xfb, 0x41, 0xa7, 0xc3, 0x17, 0x60, 0xa9, 0x12, 0x8f, 0xee, 0xc8, 0x6d, 0x1e, + 0x72, 0xf4, 0x74, 0xeb, 0x4e, 0xd4, 0xe3, 0x78, 0x07, 0xdf, 0x7e, 0xfe, 0xfa, 0xde, 0xe9, 0xe3, + 0xde, 0x9f, 0x5f, 0xf7, 0x02, 0x4d, 0x33, 0x38, 0xcc, 0x45, 0xd9, 0x76, 0x4e, 0x41, 0x5d, 0x38, + 0xdf, 0x2c, 0x6a, 0x8e, 0x3e, 0xbe, 0x32, 0x82, 0x42, 0x2c, 0x29, 0x2f, 0x02, 0x51, 0x15, 0x61, + 0xc1, 0xb8, 0x5a, 0x63, 0xa8, 0x29, 0xba, 0x5a, 0xd4, 0xff, 0x7c, 0xeb, 0xd7, 0xa6, 0xbc, 0xb4, + 0x95, 0xe8, 0xe4, 0x77, 0x00, 0x00, 0x00, 0xff, 0xff, 0x48, 0x0a, 0xba, 0x6c, 0xfa, 0x02, 0x00, + 0x00, +} diff --git a/vendor/google.golang.org/genproto/protobuf/api/api.pb.go b/vendor/google.golang.org/genproto/protobuf/api/api.pb.go new file mode 100644 index 0000000..34a3f30 --- /dev/null +++ b/vendor/google.golang.org/genproto/protobuf/api/api.pb.go @@ -0,0 +1,405 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/protobuf/api.proto + +package api // import "google.golang.org/genproto/protobuf/api" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import ptype "google.golang.org/genproto/protobuf/ptype" +import source_context "google.golang.org/genproto/protobuf/source_context" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Api is a light-weight descriptor for an API Interface. +// +// Interfaces are also described as "protocol buffer services" in some contexts, +// such as by the "service" keyword in a .proto file, but they are different +// from API Services, which represent a concrete implementation of an interface +// as opposed to simply a description of methods and bindings. They are also +// sometimes simply referred to as "APIs" in other contexts, such as the name of +// this message itself. See https://cloud.google.com/apis/design/glossary for +// detailed terminology. +type Api struct { + // The fully qualified name of this interface, including package name + // followed by the interface's simple name. + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // The methods of this interface, in unspecified order. + Methods []*Method `protobuf:"bytes,2,rep,name=methods,proto3" json:"methods,omitempty"` + // Any metadata attached to the interface. + Options []*ptype.Option `protobuf:"bytes,3,rep,name=options,proto3" json:"options,omitempty"` + // A version string for this interface. If specified, must have the form + // `major-version.minor-version`, as in `1.10`. If the minor version is + // omitted, it defaults to zero. If the entire version field is empty, the + // major version is derived from the package name, as outlined below. If the + // field is not empty, the version in the package name will be verified to be + // consistent with what is provided here. + // + // The versioning schema uses [semantic + // versioning](http://semver.org) where the major version number + // indicates a breaking change and the minor version an additive, + // non-breaking change. Both version numbers are signals to users + // what to expect from different versions, and should be carefully + // chosen based on the product plan. + // + // The major version is also reflected in the package name of the + // interface, which must end in `v`, as in + // `google.feature.v1`. For major versions 0 and 1, the suffix can + // be omitted. Zero major versions must only be used for + // experimental, non-GA interfaces. + // + // + Version string `protobuf:"bytes,4,opt,name=version,proto3" json:"version,omitempty"` + // Source context for the protocol buffer service represented by this + // message. + SourceContext *source_context.SourceContext `protobuf:"bytes,5,opt,name=source_context,json=sourceContext,proto3" json:"source_context,omitempty"` + // Included interfaces. See [Mixin][]. + Mixins []*Mixin `protobuf:"bytes,6,rep,name=mixins,proto3" json:"mixins,omitempty"` + // The source syntax of the service. + Syntax ptype.Syntax `protobuf:"varint,7,opt,name=syntax,proto3,enum=google.protobuf.Syntax" json:"syntax,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Api) Reset() { *m = Api{} } +func (m *Api) String() string { return proto.CompactTextString(m) } +func (*Api) ProtoMessage() {} +func (*Api) Descriptor() ([]byte, []int) { + return fileDescriptor_api_4ac70dbc2fa3744b, []int{0} +} +func (m *Api) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Api.Unmarshal(m, b) +} +func (m *Api) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Api.Marshal(b, m, deterministic) +} +func (dst *Api) XXX_Merge(src proto.Message) { + xxx_messageInfo_Api.Merge(dst, src) +} +func (m *Api) XXX_Size() int { + return xxx_messageInfo_Api.Size(m) +} +func (m *Api) XXX_DiscardUnknown() { + xxx_messageInfo_Api.DiscardUnknown(m) +} + +var xxx_messageInfo_Api proto.InternalMessageInfo + +func (m *Api) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *Api) GetMethods() []*Method { + if m != nil { + return m.Methods + } + return nil +} + +func (m *Api) GetOptions() []*ptype.Option { + if m != nil { + return m.Options + } + return nil +} + +func (m *Api) GetVersion() string { + if m != nil { + return m.Version + } + return "" +} + +func (m *Api) GetSourceContext() *source_context.SourceContext { + if m != nil { + return m.SourceContext + } + return nil +} + +func (m *Api) GetMixins() []*Mixin { + if m != nil { + return m.Mixins + } + return nil +} + +func (m *Api) GetSyntax() ptype.Syntax { + if m != nil { + return m.Syntax + } + return ptype.Syntax_SYNTAX_PROTO2 +} + +// Method represents a method of an API interface. +type Method struct { + // The simple name of this method. + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // A URL of the input message type. + RequestTypeUrl string `protobuf:"bytes,2,opt,name=request_type_url,json=requestTypeUrl,proto3" json:"request_type_url,omitempty"` + // If true, the request is streamed. + RequestStreaming bool `protobuf:"varint,3,opt,name=request_streaming,json=requestStreaming,proto3" json:"request_streaming,omitempty"` + // The URL of the output message type. + ResponseTypeUrl string `protobuf:"bytes,4,opt,name=response_type_url,json=responseTypeUrl,proto3" json:"response_type_url,omitempty"` + // If true, the response is streamed. + ResponseStreaming bool `protobuf:"varint,5,opt,name=response_streaming,json=responseStreaming,proto3" json:"response_streaming,omitempty"` + // Any metadata attached to the method. + Options []*ptype.Option `protobuf:"bytes,6,rep,name=options,proto3" json:"options,omitempty"` + // The source syntax of this method. + Syntax ptype.Syntax `protobuf:"varint,7,opt,name=syntax,proto3,enum=google.protobuf.Syntax" json:"syntax,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Method) Reset() { *m = Method{} } +func (m *Method) String() string { return proto.CompactTextString(m) } +func (*Method) ProtoMessage() {} +func (*Method) Descriptor() ([]byte, []int) { + return fileDescriptor_api_4ac70dbc2fa3744b, []int{1} +} +func (m *Method) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Method.Unmarshal(m, b) +} +func (m *Method) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Method.Marshal(b, m, deterministic) +} +func (dst *Method) XXX_Merge(src proto.Message) { + xxx_messageInfo_Method.Merge(dst, src) +} +func (m *Method) XXX_Size() int { + return xxx_messageInfo_Method.Size(m) +} +func (m *Method) XXX_DiscardUnknown() { + xxx_messageInfo_Method.DiscardUnknown(m) +} + +var xxx_messageInfo_Method proto.InternalMessageInfo + +func (m *Method) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *Method) GetRequestTypeUrl() string { + if m != nil { + return m.RequestTypeUrl + } + return "" +} + +func (m *Method) GetRequestStreaming() bool { + if m != nil { + return m.RequestStreaming + } + return false +} + +func (m *Method) GetResponseTypeUrl() string { + if m != nil { + return m.ResponseTypeUrl + } + return "" +} + +func (m *Method) GetResponseStreaming() bool { + if m != nil { + return m.ResponseStreaming + } + return false +} + +func (m *Method) GetOptions() []*ptype.Option { + if m != nil { + return m.Options + } + return nil +} + +func (m *Method) GetSyntax() ptype.Syntax { + if m != nil { + return m.Syntax + } + return ptype.Syntax_SYNTAX_PROTO2 +} + +// Declares an API Interface to be included in this interface. The including +// interface must redeclare all the methods from the included interface, but +// documentation and options are inherited as follows: +// +// - If after comment and whitespace stripping, the documentation +// string of the redeclared method is empty, it will be inherited +// from the original method. +// +// - Each annotation belonging to the service config (http, +// visibility) which is not set in the redeclared method will be +// inherited. +// +// - If an http annotation is inherited, the path pattern will be +// modified as follows. Any version prefix will be replaced by the +// version of the including interface plus the [root][] path if +// specified. +// +// Example of a simple mixin: +// +// package google.acl.v1; +// service AccessControl { +// // Get the underlying ACL object. +// rpc GetAcl(GetAclRequest) returns (Acl) { +// option (google.api.http).get = "/v1/{resource=**}:getAcl"; +// } +// } +// +// package google.storage.v2; +// service Storage { +// rpc GetAcl(GetAclRequest) returns (Acl); +// +// // Get a data record. +// rpc GetData(GetDataRequest) returns (Data) { +// option (google.api.http).get = "/v2/{resource=**}"; +// } +// } +// +// Example of a mixin configuration: +// +// apis: +// - name: google.storage.v2.Storage +// mixins: +// - name: google.acl.v1.AccessControl +// +// The mixin construct implies that all methods in `AccessControl` are +// also declared with same name and request/response types in +// `Storage`. A documentation generator or annotation processor will +// see the effective `Storage.GetAcl` method after inherting +// documentation and annotations as follows: +// +// service Storage { +// // Get the underlying ACL object. +// rpc GetAcl(GetAclRequest) returns (Acl) { +// option (google.api.http).get = "/v2/{resource=**}:getAcl"; +// } +// ... +// } +// +// Note how the version in the path pattern changed from `v1` to `v2`. +// +// If the `root` field in the mixin is specified, it should be a +// relative path under which inherited HTTP paths are placed. Example: +// +// apis: +// - name: google.storage.v2.Storage +// mixins: +// - name: google.acl.v1.AccessControl +// root: acls +// +// This implies the following inherited HTTP annotation: +// +// service Storage { +// // Get the underlying ACL object. +// rpc GetAcl(GetAclRequest) returns (Acl) { +// option (google.api.http).get = "/v2/acls/{resource=**}:getAcl"; +// } +// ... +// } +type Mixin struct { + // The fully qualified name of the interface which is included. + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // If non-empty specifies a path under which inherited HTTP paths + // are rooted. + Root string `protobuf:"bytes,2,opt,name=root,proto3" json:"root,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Mixin) Reset() { *m = Mixin{} } +func (m *Mixin) String() string { return proto.CompactTextString(m) } +func (*Mixin) ProtoMessage() {} +func (*Mixin) Descriptor() ([]byte, []int) { + return fileDescriptor_api_4ac70dbc2fa3744b, []int{2} +} +func (m *Mixin) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Mixin.Unmarshal(m, b) +} +func (m *Mixin) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Mixin.Marshal(b, m, deterministic) +} +func (dst *Mixin) XXX_Merge(src proto.Message) { + xxx_messageInfo_Mixin.Merge(dst, src) +} +func (m *Mixin) XXX_Size() int { + return xxx_messageInfo_Mixin.Size(m) +} +func (m *Mixin) XXX_DiscardUnknown() { + xxx_messageInfo_Mixin.DiscardUnknown(m) +} + +var xxx_messageInfo_Mixin proto.InternalMessageInfo + +func (m *Mixin) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *Mixin) GetRoot() string { + if m != nil { + return m.Root + } + return "" +} + +func init() { + proto.RegisterType((*Api)(nil), "google.protobuf.Api") + proto.RegisterType((*Method)(nil), "google.protobuf.Method") + proto.RegisterType((*Mixin)(nil), "google.protobuf.Mixin") +} + +func init() { proto.RegisterFile("google/protobuf/api.proto", fileDescriptor_api_4ac70dbc2fa3744b) } + +var fileDescriptor_api_4ac70dbc2fa3744b = []byte{ + // 432 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x94, 0x93, 0xcf, 0x8e, 0xd3, 0x30, + 0x10, 0xc6, 0x95, 0xa4, 0x4d, 0x17, 0xaf, 0xe8, 0x82, 0x91, 0xc0, 0xf4, 0xb0, 0x8a, 0x56, 0x1c, + 0x22, 0x2a, 0x12, 0x51, 0x8e, 0x9c, 0x5a, 0x84, 0x7a, 0x40, 0x88, 0x28, 0x05, 0x21, 0x71, 0xa9, + 0xd2, 0x62, 0x82, 0xa5, 0xc4, 0x63, 0x6c, 0x07, 0xda, 0xd7, 0xe1, 0xc8, 0x91, 0x37, 0xe0, 0xcd, + 0x50, 0x9c, 0xb8, 0x7f, 0xd2, 0x22, 0xb1, 0x37, 0x8f, 0xbf, 0xdf, 0x7c, 0x99, 0xf9, 0xac, 0xa0, + 0xc7, 0x39, 0x40, 0x5e, 0xd0, 0x58, 0x48, 0xd0, 0xb0, 0xaa, 0xbe, 0xc4, 0x99, 0x60, 0x91, 0x29, + 0xf0, 0x55, 0x23, 0x45, 0x56, 0x1a, 0x3d, 0xe9, 0xb2, 0x0a, 0x2a, 0xb9, 0xa6, 0xcb, 0x35, 0x70, + 0x4d, 0x37, 0xba, 0x01, 0x47, 0xa3, 0x2e, 0xa5, 0xb7, 0xa2, 0x35, 0xb9, 0xf9, 0xe3, 0x22, 0x6f, + 0x2a, 0x18, 0xc6, 0xa8, 0xc7, 0xb3, 0x92, 0x12, 0x27, 0x70, 0xc2, 0x3b, 0xa9, 0x39, 0xe3, 0xe7, + 0x68, 0x50, 0x52, 0xfd, 0x15, 0x3e, 0x2b, 0xe2, 0x06, 0x5e, 0x78, 0x39, 0x79, 0x14, 0x75, 0x06, + 0x88, 0xde, 0x1a, 0x3d, 0xb5, 0x5c, 0xdd, 0x02, 0x42, 0x33, 0xe0, 0x8a, 0x78, 0xff, 0x68, 0x79, + 0x67, 0xf4, 0xd4, 0x72, 0x98, 0xa0, 0xc1, 0x77, 0x2a, 0x15, 0x03, 0x4e, 0x7a, 0xe6, 0xe3, 0xb6, + 0xc4, 0xaf, 0xd1, 0xf0, 0x78, 0x1f, 0xd2, 0x0f, 0x9c, 0xf0, 0x72, 0x72, 0x7d, 0xe2, 0xb9, 0x30, + 0xd8, 0xab, 0x86, 0x4a, 0xef, 0xaa, 0xc3, 0x12, 0x47, 0xc8, 0x2f, 0xd9, 0x86, 0x71, 0x45, 0x7c, + 0x33, 0xd2, 0xc3, 0xd3, 0x2d, 0x6a, 0x39, 0x6d, 0x29, 0x1c, 0x23, 0x5f, 0x6d, 0xb9, 0xce, 0x36, + 0x64, 0x10, 0x38, 0xe1, 0xf0, 0xcc, 0x0a, 0x0b, 0x23, 0xa7, 0x2d, 0x76, 0xf3, 0xdb, 0x45, 0x7e, + 0x13, 0xc4, 0xd9, 0x18, 0x43, 0x74, 0x4f, 0xd2, 0x6f, 0x15, 0x55, 0x7a, 0x59, 0x07, 0xbf, 0xac, + 0x64, 0x41, 0x5c, 0xa3, 0x0f, 0xdb, 0xfb, 0xf7, 0x5b, 0x41, 0x3f, 0xc8, 0x02, 0x8f, 0xd1, 0x7d, + 0x4b, 0x2a, 0x2d, 0x69, 0x56, 0x32, 0x9e, 0x13, 0x2f, 0x70, 0xc2, 0x8b, 0xd4, 0x5a, 0x2c, 0xec, + 0x3d, 0x7e, 0x5a, 0xc3, 0x4a, 0x00, 0x57, 0x74, 0xef, 0xdb, 0x24, 0x78, 0x65, 0x05, 0x6b, 0xfc, + 0x0c, 0xe1, 0x1d, 0xbb, 0x77, 0xee, 0x1b, 0xe7, 0x9d, 0xcb, 0xde, 0xfa, 0xe0, 0x15, 0xfd, 0xff, + 0x7c, 0xc5, 0x5b, 0x87, 0x16, 0xa3, 0xbe, 0x89, 0xfd, 0x6c, 0x64, 0x18, 0xf5, 0x24, 0x80, 0x6e, + 0x63, 0x32, 0xe7, 0x59, 0x85, 0x1e, 0xac, 0xa1, 0xec, 0xda, 0xce, 0x2e, 0xa6, 0x82, 0x25, 0x75, + 0x91, 0x38, 0x9f, 0xc6, 0xad, 0x98, 0x43, 0x91, 0xf1, 0x3c, 0x02, 0x99, 0xc7, 0x39, 0xe5, 0x06, + 0x3d, 0xfa, 0x9d, 0x5e, 0x66, 0x82, 0xfd, 0x74, 0xbd, 0x79, 0x32, 0xfb, 0xe5, 0x5e, 0xcf, 0x9b, + 0x9e, 0xc4, 0xce, 0xf9, 0x91, 0x16, 0xc5, 0x1b, 0x0e, 0x3f, 0x78, 0x1d, 0x9e, 0x5a, 0xf9, 0xa6, + 0xf1, 0xc5, 0xdf, 0x00, 0x00, 0x00, 0xff, 0xff, 0xfe, 0x07, 0x73, 0x11, 0x97, 0x03, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/protobuf/field_mask/field_mask.pb.go b/vendor/google.golang.org/genproto/protobuf/field_mask/field_mask.pb.go new file mode 100644 index 0000000..60241b8 --- /dev/null +++ b/vendor/google.golang.org/genproto/protobuf/field_mask/field_mask.pb.go @@ -0,0 +1,280 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/protobuf/field_mask.proto + +package field_mask // import "google.golang.org/genproto/protobuf/field_mask" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// `FieldMask` represents a set of symbolic field paths, for example: +// +// paths: "f.a" +// paths: "f.b.d" +// +// Here `f` represents a field in some root message, `a` and `b` +// fields in the message found in `f`, and `d` a field found in the +// message in `f.b`. +// +// Field masks are used to specify a subset of fields that should be +// returned by a get operation or modified by an update operation. +// Field masks also have a custom JSON encoding (see below). +// +// # Field Masks in Projections +// +// When used in the context of a projection, a response message or +// sub-message is filtered by the API to only contain those fields as +// specified in the mask. For example, if the mask in the previous +// example is applied to a response message as follows: +// +// f { +// a : 22 +// b { +// d : 1 +// x : 2 +// } +// y : 13 +// } +// z: 8 +// +// The result will not contain specific values for fields x,y and z +// (their value will be set to the default, and omitted in proto text +// output): +// +// +// f { +// a : 22 +// b { +// d : 1 +// } +// } +// +// A repeated field is not allowed except at the last position of a +// paths string. +// +// If a FieldMask object is not present in a get operation, the +// operation applies to all fields (as if a FieldMask of all fields +// had been specified). +// +// Note that a field mask does not necessarily apply to the +// top-level response message. In case of a REST get operation, the +// field mask applies directly to the response, but in case of a REST +// list operation, the mask instead applies to each individual message +// in the returned resource list. In case of a REST custom method, +// other definitions may be used. Where the mask applies will be +// clearly documented together with its declaration in the API. In +// any case, the effect on the returned resource/resources is required +// behavior for APIs. +// +// # Field Masks in Update Operations +// +// A field mask in update operations specifies which fields of the +// targeted resource are going to be updated. The API is required +// to only change the values of the fields as specified in the mask +// and leave the others untouched. If a resource is passed in to +// describe the updated values, the API ignores the values of all +// fields not covered by the mask. +// +// If a repeated field is specified for an update operation, new values will +// be appended to the existing repeated field in the target resource. Note that +// a repeated field is only allowed in the last position of a `paths` string. +// +// If a sub-message is specified in the last position of the field mask for an +// update operation, then new value will be merged into the existing sub-message +// in the target resource. +// +// For example, given the target message: +// +// f { +// b { +// d: 1 +// x: 2 +// } +// c: [1] +// } +// +// And an update message: +// +// f { +// b { +// d: 10 +// } +// c: [2] +// } +// +// then if the field mask is: +// +// paths: ["f.b", "f.c"] +// +// then the result will be: +// +// f { +// b { +// d: 10 +// x: 2 +// } +// c: [1, 2] +// } +// +// An implementation may provide options to override this default behavior for +// repeated and message fields. +// +// In order to reset a field's value to the default, the field must +// be in the mask and set to the default value in the provided resource. +// Hence, in order to reset all fields of a resource, provide a default +// instance of the resource and set all fields in the mask, or do +// not provide a mask as described below. +// +// If a field mask is not present on update, the operation applies to +// all fields (as if a field mask of all fields has been specified). +// Note that in the presence of schema evolution, this may mean that +// fields the client does not know and has therefore not filled into +// the request will be reset to their default. If this is unwanted +// behavior, a specific service may require a client to always specify +// a field mask, producing an error if not. +// +// As with get operations, the location of the resource which +// describes the updated values in the request message depends on the +// operation kind. In any case, the effect of the field mask is +// required to be honored by the API. +// +// ## Considerations for HTTP REST +// +// The HTTP kind of an update operation which uses a field mask must +// be set to PATCH instead of PUT in order to satisfy HTTP semantics +// (PUT must only be used for full updates). +// +// # JSON Encoding of Field Masks +// +// In JSON, a field mask is encoded as a single string where paths are +// separated by a comma. Fields name in each path are converted +// to/from lower-camel naming conventions. +// +// As an example, consider the following message declarations: +// +// message Profile { +// User user = 1; +// Photo photo = 2; +// } +// message User { +// string display_name = 1; +// string address = 2; +// } +// +// In proto a field mask for `Profile` may look as such: +// +// mask { +// paths: "user.display_name" +// paths: "photo" +// } +// +// In JSON, the same mask is represented as below: +// +// { +// mask: "user.displayName,photo" +// } +// +// # Field Masks and Oneof Fields +// +// Field masks treat fields in oneofs just as regular fields. Consider the +// following message: +// +// message SampleMessage { +// oneof test_oneof { +// string name = 4; +// SubMessage sub_message = 9; +// } +// } +// +// The field mask can be: +// +// mask { +// paths: "name" +// } +// +// Or: +// +// mask { +// paths: "sub_message" +// } +// +// Note that oneof type names ("test_oneof" in this case) cannot be used in +// paths. +// +// ## Field Mask Verification +// +// The implementation of any API method which has a FieldMask type field in the +// request should verify the included field paths, and return an +// `INVALID_ARGUMENT` error if any path is duplicated or unmappable. +type FieldMask struct { + // The set of field mask paths. + Paths []string `protobuf:"bytes,1,rep,name=paths,proto3" json:"paths,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *FieldMask) Reset() { *m = FieldMask{} } +func (m *FieldMask) String() string { return proto.CompactTextString(m) } +func (*FieldMask) ProtoMessage() {} +func (*FieldMask) Descriptor() ([]byte, []int) { + return fileDescriptor_field_mask_8b509cd473f0dfb7, []int{0} +} +func (m *FieldMask) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_FieldMask.Unmarshal(m, b) +} +func (m *FieldMask) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_FieldMask.Marshal(b, m, deterministic) +} +func (dst *FieldMask) XXX_Merge(src proto.Message) { + xxx_messageInfo_FieldMask.Merge(dst, src) +} +func (m *FieldMask) XXX_Size() int { + return xxx_messageInfo_FieldMask.Size(m) +} +func (m *FieldMask) XXX_DiscardUnknown() { + xxx_messageInfo_FieldMask.DiscardUnknown(m) +} + +var xxx_messageInfo_FieldMask proto.InternalMessageInfo + +func (m *FieldMask) GetPaths() []string { + if m != nil { + return m.Paths + } + return nil +} + +func init() { + proto.RegisterType((*FieldMask)(nil), "google.protobuf.FieldMask") +} + +func init() { + proto.RegisterFile("google/protobuf/field_mask.proto", fileDescriptor_field_mask_8b509cd473f0dfb7) +} + +var fileDescriptor_field_mask_8b509cd473f0dfb7 = []byte{ + // 175 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xe2, 0x52, 0x48, 0xcf, 0xcf, 0x4f, + 0xcf, 0x49, 0xd5, 0x2f, 0x28, 0xca, 0x2f, 0xc9, 0x4f, 0x2a, 0x4d, 0xd3, 0x4f, 0xcb, 0x4c, 0xcd, + 0x49, 0x89, 0xcf, 0x4d, 0x2c, 0xce, 0xd6, 0x03, 0x8b, 0x09, 0xf1, 0x43, 0x54, 0xe8, 0xc1, 0x54, + 0x28, 0x29, 0x72, 0x71, 0xba, 0x81, 0x14, 0xf9, 0x26, 0x16, 0x67, 0x0b, 0x89, 0x70, 0xb1, 0x16, + 0x24, 0x96, 0x64, 0x14, 0x4b, 0x30, 0x2a, 0x30, 0x6b, 0x70, 0x06, 0x41, 0x38, 0x4e, 0x3d, 0x8c, + 0x5c, 0xc2, 0xc9, 0xf9, 0xb9, 0x7a, 0x68, 0x5a, 0x9d, 0xf8, 0xe0, 0x1a, 0x03, 0x40, 0x42, 0x01, + 0x8c, 0x51, 0x96, 0x50, 0x25, 0xe9, 0xf9, 0x39, 0x89, 0x79, 0xe9, 0x7a, 0xf9, 0x45, 0xe9, 0xfa, + 0xe9, 0xa9, 0x79, 0x60, 0x0d, 0xd8, 0xdc, 0x64, 0x8d, 0x60, 0xfe, 0x60, 0x64, 0x5c, 0xc4, 0xc4, + 0xec, 0x1e, 0xe0, 0xb4, 0x8a, 0x49, 0xce, 0x1d, 0x62, 0x48, 0x00, 0x54, 0x83, 0x5e, 0x78, 0x6a, + 0x4e, 0x8e, 0x77, 0x5e, 0x7e, 0x79, 0x5e, 0x48, 0x65, 0x41, 0x6a, 0x71, 0x12, 0x1b, 0xd8, 0x24, + 0x63, 0x40, 0x00, 0x00, 0x00, 0xff, 0xff, 0xfd, 0xda, 0xb7, 0xa8, 0xed, 0x00, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/protobuf/ptype/type.pb.go b/vendor/google.golang.org/genproto/protobuf/ptype/type.pb.go new file mode 100644 index 0000000..c78f1aa --- /dev/null +++ b/vendor/google.golang.org/genproto/protobuf/ptype/type.pb.go @@ -0,0 +1,641 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/protobuf/type.proto + +package ptype // import "google.golang.org/genproto/protobuf/ptype" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import any "github.com/golang/protobuf/ptypes/any" +import source_context "google.golang.org/genproto/protobuf/source_context" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// The syntax in which a protocol buffer element is defined. +type Syntax int32 + +const ( + // Syntax `proto2`. + Syntax_SYNTAX_PROTO2 Syntax = 0 + // Syntax `proto3`. + Syntax_SYNTAX_PROTO3 Syntax = 1 +) + +var Syntax_name = map[int32]string{ + 0: "SYNTAX_PROTO2", + 1: "SYNTAX_PROTO3", +} +var Syntax_value = map[string]int32{ + "SYNTAX_PROTO2": 0, + "SYNTAX_PROTO3": 1, +} + +func (x Syntax) String() string { + return proto.EnumName(Syntax_name, int32(x)) +} +func (Syntax) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_type_9c5a06c7d02c042f, []int{0} +} + +// Basic field types. +type Field_Kind int32 + +const ( + // Field type unknown. + Field_TYPE_UNKNOWN Field_Kind = 0 + // Field type double. + Field_TYPE_DOUBLE Field_Kind = 1 + // Field type float. + Field_TYPE_FLOAT Field_Kind = 2 + // Field type int64. + Field_TYPE_INT64 Field_Kind = 3 + // Field type uint64. + Field_TYPE_UINT64 Field_Kind = 4 + // Field type int32. + Field_TYPE_INT32 Field_Kind = 5 + // Field type fixed64. + Field_TYPE_FIXED64 Field_Kind = 6 + // Field type fixed32. + Field_TYPE_FIXED32 Field_Kind = 7 + // Field type bool. + Field_TYPE_BOOL Field_Kind = 8 + // Field type string. + Field_TYPE_STRING Field_Kind = 9 + // Field type group. Proto2 syntax only, and deprecated. + Field_TYPE_GROUP Field_Kind = 10 + // Field type message. + Field_TYPE_MESSAGE Field_Kind = 11 + // Field type bytes. + Field_TYPE_BYTES Field_Kind = 12 + // Field type uint32. + Field_TYPE_UINT32 Field_Kind = 13 + // Field type enum. + Field_TYPE_ENUM Field_Kind = 14 + // Field type sfixed32. + Field_TYPE_SFIXED32 Field_Kind = 15 + // Field type sfixed64. + Field_TYPE_SFIXED64 Field_Kind = 16 + // Field type sint32. + Field_TYPE_SINT32 Field_Kind = 17 + // Field type sint64. + Field_TYPE_SINT64 Field_Kind = 18 +) + +var Field_Kind_name = map[int32]string{ + 0: "TYPE_UNKNOWN", + 1: "TYPE_DOUBLE", + 2: "TYPE_FLOAT", + 3: "TYPE_INT64", + 4: "TYPE_UINT64", + 5: "TYPE_INT32", + 6: "TYPE_FIXED64", + 7: "TYPE_FIXED32", + 8: "TYPE_BOOL", + 9: "TYPE_STRING", + 10: "TYPE_GROUP", + 11: "TYPE_MESSAGE", + 12: "TYPE_BYTES", + 13: "TYPE_UINT32", + 14: "TYPE_ENUM", + 15: "TYPE_SFIXED32", + 16: "TYPE_SFIXED64", + 17: "TYPE_SINT32", + 18: "TYPE_SINT64", +} +var Field_Kind_value = map[string]int32{ + "TYPE_UNKNOWN": 0, + "TYPE_DOUBLE": 1, + "TYPE_FLOAT": 2, + "TYPE_INT64": 3, + "TYPE_UINT64": 4, + "TYPE_INT32": 5, + "TYPE_FIXED64": 6, + "TYPE_FIXED32": 7, + "TYPE_BOOL": 8, + "TYPE_STRING": 9, + "TYPE_GROUP": 10, + "TYPE_MESSAGE": 11, + "TYPE_BYTES": 12, + "TYPE_UINT32": 13, + "TYPE_ENUM": 14, + "TYPE_SFIXED32": 15, + "TYPE_SFIXED64": 16, + "TYPE_SINT32": 17, + "TYPE_SINT64": 18, +} + +func (x Field_Kind) String() string { + return proto.EnumName(Field_Kind_name, int32(x)) +} +func (Field_Kind) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_type_9c5a06c7d02c042f, []int{1, 0} +} + +// Whether a field is optional, required, or repeated. +type Field_Cardinality int32 + +const ( + // For fields with unknown cardinality. + Field_CARDINALITY_UNKNOWN Field_Cardinality = 0 + // For optional fields. + Field_CARDINALITY_OPTIONAL Field_Cardinality = 1 + // For required fields. Proto2 syntax only. + Field_CARDINALITY_REQUIRED Field_Cardinality = 2 + // For repeated fields. + Field_CARDINALITY_REPEATED Field_Cardinality = 3 +) + +var Field_Cardinality_name = map[int32]string{ + 0: "CARDINALITY_UNKNOWN", + 1: "CARDINALITY_OPTIONAL", + 2: "CARDINALITY_REQUIRED", + 3: "CARDINALITY_REPEATED", +} +var Field_Cardinality_value = map[string]int32{ + "CARDINALITY_UNKNOWN": 0, + "CARDINALITY_OPTIONAL": 1, + "CARDINALITY_REQUIRED": 2, + "CARDINALITY_REPEATED": 3, +} + +func (x Field_Cardinality) String() string { + return proto.EnumName(Field_Cardinality_name, int32(x)) +} +func (Field_Cardinality) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_type_9c5a06c7d02c042f, []int{1, 1} +} + +// A protocol buffer message type. +type Type struct { + // The fully qualified message name. + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // The list of fields. + Fields []*Field `protobuf:"bytes,2,rep,name=fields,proto3" json:"fields,omitempty"` + // The list of types appearing in `oneof` definitions in this type. + Oneofs []string `protobuf:"bytes,3,rep,name=oneofs,proto3" json:"oneofs,omitempty"` + // The protocol buffer options. + Options []*Option `protobuf:"bytes,4,rep,name=options,proto3" json:"options,omitempty"` + // The source context. + SourceContext *source_context.SourceContext `protobuf:"bytes,5,opt,name=source_context,json=sourceContext,proto3" json:"source_context,omitempty"` + // The source syntax. + Syntax Syntax `protobuf:"varint,6,opt,name=syntax,proto3,enum=google.protobuf.Syntax" json:"syntax,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Type) Reset() { *m = Type{} } +func (m *Type) String() string { return proto.CompactTextString(m) } +func (*Type) ProtoMessage() {} +func (*Type) Descriptor() ([]byte, []int) { + return fileDescriptor_type_9c5a06c7d02c042f, []int{0} +} +func (m *Type) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Type.Unmarshal(m, b) +} +func (m *Type) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Type.Marshal(b, m, deterministic) +} +func (dst *Type) XXX_Merge(src proto.Message) { + xxx_messageInfo_Type.Merge(dst, src) +} +func (m *Type) XXX_Size() int { + return xxx_messageInfo_Type.Size(m) +} +func (m *Type) XXX_DiscardUnknown() { + xxx_messageInfo_Type.DiscardUnknown(m) +} + +var xxx_messageInfo_Type proto.InternalMessageInfo + +func (m *Type) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *Type) GetFields() []*Field { + if m != nil { + return m.Fields + } + return nil +} + +func (m *Type) GetOneofs() []string { + if m != nil { + return m.Oneofs + } + return nil +} + +func (m *Type) GetOptions() []*Option { + if m != nil { + return m.Options + } + return nil +} + +func (m *Type) GetSourceContext() *source_context.SourceContext { + if m != nil { + return m.SourceContext + } + return nil +} + +func (m *Type) GetSyntax() Syntax { + if m != nil { + return m.Syntax + } + return Syntax_SYNTAX_PROTO2 +} + +// A single field of a message type. +type Field struct { + // The field type. + Kind Field_Kind `protobuf:"varint,1,opt,name=kind,proto3,enum=google.protobuf.Field_Kind" json:"kind,omitempty"` + // The field cardinality. + Cardinality Field_Cardinality `protobuf:"varint,2,opt,name=cardinality,proto3,enum=google.protobuf.Field_Cardinality" json:"cardinality,omitempty"` + // The field number. + Number int32 `protobuf:"varint,3,opt,name=number,proto3" json:"number,omitempty"` + // The field name. + Name string `protobuf:"bytes,4,opt,name=name,proto3" json:"name,omitempty"` + // The field type URL, without the scheme, for message or enumeration + // types. Example: `"type.googleapis.com/google.protobuf.Timestamp"`. + TypeUrl string `protobuf:"bytes,6,opt,name=type_url,json=typeUrl,proto3" json:"type_url,omitempty"` + // The index of the field type in `Type.oneofs`, for message or enumeration + // types. The first type has index 1; zero means the type is not in the list. + OneofIndex int32 `protobuf:"varint,7,opt,name=oneof_index,json=oneofIndex,proto3" json:"oneof_index,omitempty"` + // Whether to use alternative packed wire representation. + Packed bool `protobuf:"varint,8,opt,name=packed,proto3" json:"packed,omitempty"` + // The protocol buffer options. + Options []*Option `protobuf:"bytes,9,rep,name=options,proto3" json:"options,omitempty"` + // The field JSON name. + JsonName string `protobuf:"bytes,10,opt,name=json_name,json=jsonName,proto3" json:"json_name,omitempty"` + // The string value of the default value of this field. Proto2 syntax only. + DefaultValue string `protobuf:"bytes,11,opt,name=default_value,json=defaultValue,proto3" json:"default_value,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Field) Reset() { *m = Field{} } +func (m *Field) String() string { return proto.CompactTextString(m) } +func (*Field) ProtoMessage() {} +func (*Field) Descriptor() ([]byte, []int) { + return fileDescriptor_type_9c5a06c7d02c042f, []int{1} +} +func (m *Field) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Field.Unmarshal(m, b) +} +func (m *Field) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Field.Marshal(b, m, deterministic) +} +func (dst *Field) XXX_Merge(src proto.Message) { + xxx_messageInfo_Field.Merge(dst, src) +} +func (m *Field) XXX_Size() int { + return xxx_messageInfo_Field.Size(m) +} +func (m *Field) XXX_DiscardUnknown() { + xxx_messageInfo_Field.DiscardUnknown(m) +} + +var xxx_messageInfo_Field proto.InternalMessageInfo + +func (m *Field) GetKind() Field_Kind { + if m != nil { + return m.Kind + } + return Field_TYPE_UNKNOWN +} + +func (m *Field) GetCardinality() Field_Cardinality { + if m != nil { + return m.Cardinality + } + return Field_CARDINALITY_UNKNOWN +} + +func (m *Field) GetNumber() int32 { + if m != nil { + return m.Number + } + return 0 +} + +func (m *Field) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *Field) GetTypeUrl() string { + if m != nil { + return m.TypeUrl + } + return "" +} + +func (m *Field) GetOneofIndex() int32 { + if m != nil { + return m.OneofIndex + } + return 0 +} + +func (m *Field) GetPacked() bool { + if m != nil { + return m.Packed + } + return false +} + +func (m *Field) GetOptions() []*Option { + if m != nil { + return m.Options + } + return nil +} + +func (m *Field) GetJsonName() string { + if m != nil { + return m.JsonName + } + return "" +} + +func (m *Field) GetDefaultValue() string { + if m != nil { + return m.DefaultValue + } + return "" +} + +// Enum type definition. +type Enum struct { + // Enum type name. + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // Enum value definitions. + Enumvalue []*EnumValue `protobuf:"bytes,2,rep,name=enumvalue,proto3" json:"enumvalue,omitempty"` + // Protocol buffer options. + Options []*Option `protobuf:"bytes,3,rep,name=options,proto3" json:"options,omitempty"` + // The source context. + SourceContext *source_context.SourceContext `protobuf:"bytes,4,opt,name=source_context,json=sourceContext,proto3" json:"source_context,omitempty"` + // The source syntax. + Syntax Syntax `protobuf:"varint,5,opt,name=syntax,proto3,enum=google.protobuf.Syntax" json:"syntax,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Enum) Reset() { *m = Enum{} } +func (m *Enum) String() string { return proto.CompactTextString(m) } +func (*Enum) ProtoMessage() {} +func (*Enum) Descriptor() ([]byte, []int) { + return fileDescriptor_type_9c5a06c7d02c042f, []int{2} +} +func (m *Enum) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Enum.Unmarshal(m, b) +} +func (m *Enum) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Enum.Marshal(b, m, deterministic) +} +func (dst *Enum) XXX_Merge(src proto.Message) { + xxx_messageInfo_Enum.Merge(dst, src) +} +func (m *Enum) XXX_Size() int { + return xxx_messageInfo_Enum.Size(m) +} +func (m *Enum) XXX_DiscardUnknown() { + xxx_messageInfo_Enum.DiscardUnknown(m) +} + +var xxx_messageInfo_Enum proto.InternalMessageInfo + +func (m *Enum) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *Enum) GetEnumvalue() []*EnumValue { + if m != nil { + return m.Enumvalue + } + return nil +} + +func (m *Enum) GetOptions() []*Option { + if m != nil { + return m.Options + } + return nil +} + +func (m *Enum) GetSourceContext() *source_context.SourceContext { + if m != nil { + return m.SourceContext + } + return nil +} + +func (m *Enum) GetSyntax() Syntax { + if m != nil { + return m.Syntax + } + return Syntax_SYNTAX_PROTO2 +} + +// Enum value definition. +type EnumValue struct { + // Enum value name. + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // Enum value number. + Number int32 `protobuf:"varint,2,opt,name=number,proto3" json:"number,omitempty"` + // Protocol buffer options. + Options []*Option `protobuf:"bytes,3,rep,name=options,proto3" json:"options,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *EnumValue) Reset() { *m = EnumValue{} } +func (m *EnumValue) String() string { return proto.CompactTextString(m) } +func (*EnumValue) ProtoMessage() {} +func (*EnumValue) Descriptor() ([]byte, []int) { + return fileDescriptor_type_9c5a06c7d02c042f, []int{3} +} +func (m *EnumValue) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_EnumValue.Unmarshal(m, b) +} +func (m *EnumValue) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_EnumValue.Marshal(b, m, deterministic) +} +func (dst *EnumValue) XXX_Merge(src proto.Message) { + xxx_messageInfo_EnumValue.Merge(dst, src) +} +func (m *EnumValue) XXX_Size() int { + return xxx_messageInfo_EnumValue.Size(m) +} +func (m *EnumValue) XXX_DiscardUnknown() { + xxx_messageInfo_EnumValue.DiscardUnknown(m) +} + +var xxx_messageInfo_EnumValue proto.InternalMessageInfo + +func (m *EnumValue) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *EnumValue) GetNumber() int32 { + if m != nil { + return m.Number + } + return 0 +} + +func (m *EnumValue) GetOptions() []*Option { + if m != nil { + return m.Options + } + return nil +} + +// A protocol buffer option, which can be attached to a message, field, +// enumeration, etc. +type Option struct { + // The option's name. For protobuf built-in options (options defined in + // descriptor.proto), this is the short name. For example, `"map_entry"`. + // For custom options, it should be the fully-qualified name. For example, + // `"google.api.http"`. + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // The option's value packed in an Any message. If the value is a primitive, + // the corresponding wrapper type defined in google/protobuf/wrappers.proto + // should be used. If the value is an enum, it should be stored as an int32 + // value using the google.protobuf.Int32Value type. + Value *any.Any `protobuf:"bytes,2,opt,name=value,proto3" json:"value,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Option) Reset() { *m = Option{} } +func (m *Option) String() string { return proto.CompactTextString(m) } +func (*Option) ProtoMessage() {} +func (*Option) Descriptor() ([]byte, []int) { + return fileDescriptor_type_9c5a06c7d02c042f, []int{4} +} +func (m *Option) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Option.Unmarshal(m, b) +} +func (m *Option) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Option.Marshal(b, m, deterministic) +} +func (dst *Option) XXX_Merge(src proto.Message) { + xxx_messageInfo_Option.Merge(dst, src) +} +func (m *Option) XXX_Size() int { + return xxx_messageInfo_Option.Size(m) +} +func (m *Option) XXX_DiscardUnknown() { + xxx_messageInfo_Option.DiscardUnknown(m) +} + +var xxx_messageInfo_Option proto.InternalMessageInfo + +func (m *Option) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *Option) GetValue() *any.Any { + if m != nil { + return m.Value + } + return nil +} + +func init() { + proto.RegisterType((*Type)(nil), "google.protobuf.Type") + proto.RegisterType((*Field)(nil), "google.protobuf.Field") + proto.RegisterType((*Enum)(nil), "google.protobuf.Enum") + proto.RegisterType((*EnumValue)(nil), "google.protobuf.EnumValue") + proto.RegisterType((*Option)(nil), "google.protobuf.Option") + proto.RegisterEnum("google.protobuf.Syntax", Syntax_name, Syntax_value) + proto.RegisterEnum("google.protobuf.Field_Kind", Field_Kind_name, Field_Kind_value) + proto.RegisterEnum("google.protobuf.Field_Cardinality", Field_Cardinality_name, Field_Cardinality_value) +} + +func init() { proto.RegisterFile("google/protobuf/type.proto", fileDescriptor_type_9c5a06c7d02c042f) } + +var fileDescriptor_type_9c5a06c7d02c042f = []byte{ + // 810 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xa4, 0x55, 0xcd, 0x8e, 0xda, 0x56, + 0x14, 0x8e, 0x8d, 0xf1, 0xe0, 0xc3, 0xc0, 0xdc, 0xdc, 0x44, 0x89, 0x33, 0x91, 0x52, 0x44, 0xbb, + 0x40, 0x59, 0x80, 0x0a, 0xa3, 0x51, 0xa5, 0xae, 0x60, 0xf0, 0x50, 0x6b, 0x88, 0xed, 0x5e, 0x4c, + 0x93, 0xe9, 0x06, 0x79, 0xe0, 0x0e, 0x22, 0x31, 0xd7, 0x08, 0xdb, 0xed, 0xb0, 0xe8, 0x23, 0xf4, + 0x25, 0xba, 0xec, 0xba, 0x0f, 0xd1, 0x47, 0xea, 0xae, 0xd5, 0xbd, 0x06, 0x63, 0x7e, 0x2a, 0x4d, + 0x9b, 0xcd, 0x68, 0xce, 0xf7, 0x7d, 0xe7, 0xf7, 0x1e, 0x8e, 0xe1, 0x7c, 0x1a, 0x04, 0x53, 0x9f, + 0x36, 0x16, 0xcb, 0x20, 0x0a, 0xee, 0xe2, 0xfb, 0x46, 0xb4, 0x5a, 0xd0, 0xba, 0xb0, 0xf0, 0x59, + 0xc2, 0xd5, 0x37, 0xdc, 0xf9, 0xab, 0x7d, 0xb1, 0xc7, 0x56, 0x09, 0x7b, 0xfe, 0xd5, 0x3e, 0x15, + 0x06, 0xf1, 0x72, 0x4c, 0x47, 0xe3, 0x80, 0x45, 0xf4, 0x21, 0x4a, 0x54, 0xd5, 0x5f, 0x65, 0x50, + 0xdc, 0xd5, 0x82, 0x62, 0x0c, 0x0a, 0xf3, 0xe6, 0x54, 0x97, 0x2a, 0x52, 0x4d, 0x23, 0xe2, 0x7f, + 0x5c, 0x07, 0xf5, 0x7e, 0x46, 0xfd, 0x49, 0xa8, 0xcb, 0x95, 0x5c, 0xad, 0xd8, 0x7c, 0x51, 0xdf, + 0xcb, 0x5f, 0xbf, 0xe6, 0x34, 0x59, 0xab, 0xf0, 0x0b, 0x50, 0x03, 0x46, 0x83, 0xfb, 0x50, 0xcf, + 0x55, 0x72, 0x35, 0x8d, 0xac, 0x2d, 0xfc, 0x35, 0x9c, 0x04, 0x8b, 0x68, 0x16, 0xb0, 0x50, 0x57, + 0x44, 0xa0, 0x97, 0x07, 0x81, 0x6c, 0xc1, 0x93, 0x8d, 0x0e, 0x1b, 0x50, 0xde, 0xad, 0x57, 0xcf, + 0x57, 0xa4, 0x5a, 0xb1, 0xf9, 0xe6, 0xc0, 0x73, 0x20, 0x64, 0x57, 0x89, 0x8a, 0x94, 0xc2, 0xac, + 0x89, 0x1b, 0xa0, 0x86, 0x2b, 0x16, 0x79, 0x0f, 0xba, 0x5a, 0x91, 0x6a, 0xe5, 0x23, 0x89, 0x07, + 0x82, 0x26, 0x6b, 0x59, 0xf5, 0x0f, 0x15, 0xf2, 0xa2, 0x29, 0xdc, 0x00, 0xe5, 0xd3, 0x8c, 0x4d, + 0xc4, 0x40, 0xca, 0xcd, 0xd7, 0xc7, 0x5b, 0xaf, 0xdf, 0xcc, 0xd8, 0x84, 0x08, 0x21, 0xee, 0x42, + 0x71, 0xec, 0x2d, 0x27, 0x33, 0xe6, 0xf9, 0xb3, 0x68, 0xa5, 0xcb, 0xc2, 0xaf, 0xfa, 0x2f, 0x7e, + 0x57, 0x5b, 0x25, 0xc9, 0xba, 0xf1, 0x19, 0xb2, 0x78, 0x7e, 0x47, 0x97, 0x7a, 0xae, 0x22, 0xd5, + 0xf2, 0x64, 0x6d, 0xa5, 0xef, 0xa3, 0x64, 0xde, 0xe7, 0x15, 0x14, 0xf8, 0x72, 0x8c, 0xe2, 0xa5, + 0x2f, 0xfa, 0xd3, 0xc8, 0x09, 0xb7, 0x87, 0x4b, 0x1f, 0x7f, 0x01, 0x45, 0x31, 0xfc, 0xd1, 0x8c, + 0x4d, 0xe8, 0x83, 0x7e, 0x22, 0x62, 0x81, 0x80, 0x4c, 0x8e, 0xf0, 0x3c, 0x0b, 0x6f, 0xfc, 0x89, + 0x4e, 0xf4, 0x42, 0x45, 0xaa, 0x15, 0xc8, 0xda, 0xca, 0xbe, 0x95, 0xf6, 0xc8, 0xb7, 0x7a, 0x0d, + 0xda, 0xc7, 0x30, 0x60, 0x23, 0x51, 0x1f, 0x88, 0x3a, 0x0a, 0x1c, 0xb0, 0x78, 0x8d, 0x5f, 0x42, + 0x69, 0x42, 0xef, 0xbd, 0xd8, 0x8f, 0x46, 0x3f, 0x79, 0x7e, 0x4c, 0xf5, 0xa2, 0x10, 0x9c, 0xae, + 0xc1, 0x1f, 0x38, 0x56, 0xfd, 0x53, 0x06, 0x85, 0x4f, 0x12, 0x23, 0x38, 0x75, 0x6f, 0x1d, 0x63, + 0x34, 0xb4, 0x6e, 0x2c, 0xfb, 0xbd, 0x85, 0x9e, 0xe0, 0x33, 0x28, 0x0a, 0xa4, 0x6b, 0x0f, 0x3b, + 0x7d, 0x03, 0x49, 0xb8, 0x0c, 0x20, 0x80, 0xeb, 0xbe, 0xdd, 0x76, 0x91, 0x9c, 0xda, 0xa6, 0xe5, + 0x5e, 0x5e, 0xa0, 0x5c, 0xea, 0x30, 0x4c, 0x00, 0x25, 0x2b, 0x68, 0x35, 0x51, 0x3e, 0xcd, 0x71, + 0x6d, 0x7e, 0x30, 0xba, 0x97, 0x17, 0x48, 0xdd, 0x45, 0x5a, 0x4d, 0x74, 0x82, 0x4b, 0xa0, 0x09, + 0xa4, 0x63, 0xdb, 0x7d, 0x54, 0x48, 0x63, 0x0e, 0x5c, 0x62, 0x5a, 0x3d, 0xa4, 0xa5, 0x31, 0x7b, + 0xc4, 0x1e, 0x3a, 0x08, 0xd2, 0x08, 0xef, 0x8c, 0xc1, 0xa0, 0xdd, 0x33, 0x50, 0x31, 0x55, 0x74, + 0x6e, 0x5d, 0x63, 0x80, 0x4e, 0x77, 0xca, 0x6a, 0x35, 0x51, 0x29, 0x4d, 0x61, 0x58, 0xc3, 0x77, + 0xa8, 0x8c, 0x9f, 0x42, 0x29, 0x49, 0xb1, 0x29, 0xe2, 0x6c, 0x0f, 0xba, 0xbc, 0x40, 0x68, 0x5b, + 0x48, 0x12, 0xe5, 0xe9, 0x0e, 0x70, 0x79, 0x81, 0x70, 0x35, 0x82, 0x62, 0x66, 0xb7, 0xf0, 0x4b, + 0x78, 0x76, 0xd5, 0x26, 0x5d, 0xd3, 0x6a, 0xf7, 0x4d, 0xf7, 0x36, 0x33, 0x57, 0x1d, 0x9e, 0x67, + 0x09, 0xdb, 0x71, 0x4d, 0xdb, 0x6a, 0xf7, 0x91, 0xb4, 0xcf, 0x10, 0xe3, 0xfb, 0xa1, 0x49, 0x8c, + 0x2e, 0x92, 0x0f, 0x19, 0xc7, 0x68, 0xbb, 0x46, 0x17, 0xe5, 0xaa, 0x7f, 0x4b, 0xa0, 0x18, 0x2c, + 0x9e, 0x1f, 0x3d, 0x23, 0xdf, 0x80, 0x46, 0x59, 0x3c, 0x4f, 0x9e, 0x3f, 0xb9, 0x24, 0xe7, 0x07, + 0x4b, 0xc5, 0xbd, 0xc5, 0x32, 0x90, 0xad, 0x38, 0xbb, 0x8c, 0xb9, 0xff, 0x7d, 0x38, 0x94, 0xcf, + 0x3b, 0x1c, 0xf9, 0xc7, 0x1d, 0x8e, 0x8f, 0xa0, 0xa5, 0x2d, 0x1c, 0x9d, 0xc2, 0xf6, 0x87, 0x2d, + 0xef, 0xfc, 0xb0, 0xff, 0x7b, 0x8f, 0xd5, 0xef, 0x40, 0x4d, 0xa0, 0xa3, 0x89, 0xde, 0x42, 0x7e, + 0x33, 0x6a, 0xde, 0xf8, 0xf3, 0x83, 0x70, 0x6d, 0xb6, 0x22, 0x89, 0xe4, 0x6d, 0x1d, 0xd4, 0xa4, + 0x0f, 0xbe, 0x6c, 0x83, 0x5b, 0xcb, 0x6d, 0x7f, 0x18, 0x39, 0xc4, 0x76, 0xed, 0x26, 0x7a, 0xb2, + 0x0f, 0xb5, 0x90, 0xd4, 0xf9, 0x05, 0x9e, 0x8d, 0x83, 0xf9, 0x7e, 0xc4, 0x8e, 0xc6, 0x3f, 0x21, + 0x0e, 0xb7, 0x1c, 0xe9, 0xc7, 0xc6, 0x9a, 0x9d, 0x06, 0xbe, 0xc7, 0xa6, 0xf5, 0x60, 0x39, 0x6d, + 0x4c, 0x29, 0x13, 0xda, 0xed, 0xc7, 0x68, 0xc1, 0x0f, 0xd5, 0xb7, 0xe2, 0xef, 0x5f, 0x92, 0xf4, + 0x9b, 0x9c, 0xeb, 0x39, 0x9d, 0xdf, 0xe5, 0x37, 0xbd, 0xc4, 0xd5, 0xd9, 0x94, 0xfa, 0x9e, 0xfa, + 0xfe, 0x0d, 0x0b, 0x7e, 0x66, 0x3c, 0x41, 0x78, 0xa7, 0x0a, 0xff, 0xd6, 0x3f, 0x01, 0x00, 0x00, + 0xff, 0xff, 0x6d, 0x2b, 0xc0, 0xd8, 0x24, 0x07, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/protobuf/source_context/source_context.pb.go b/vendor/google.golang.org/genproto/protobuf/source_context/source_context.pb.go new file mode 100644 index 0000000..2e99dc3 --- /dev/null +++ b/vendor/google.golang.org/genproto/protobuf/source_context/source_context.pb.go @@ -0,0 +1,85 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: google/protobuf/source_context.proto + +package source_context // import "google.golang.org/genproto/protobuf/source_context" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// `SourceContext` represents information about the source of a +// protobuf element, like the file in which it is defined. +type SourceContext struct { + // The path-qualified name of the .proto file that contained the associated + // protobuf element. For example: `"google/protobuf/source_context.proto"`. + FileName string `protobuf:"bytes,1,opt,name=file_name,json=fileName,proto3" json:"file_name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *SourceContext) Reset() { *m = SourceContext{} } +func (m *SourceContext) String() string { return proto.CompactTextString(m) } +func (*SourceContext) ProtoMessage() {} +func (*SourceContext) Descriptor() ([]byte, []int) { + return fileDescriptor_source_context_145ed41f088ecfc0, []int{0} +} +func (m *SourceContext) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_SourceContext.Unmarshal(m, b) +} +func (m *SourceContext) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_SourceContext.Marshal(b, m, deterministic) +} +func (dst *SourceContext) XXX_Merge(src proto.Message) { + xxx_messageInfo_SourceContext.Merge(dst, src) +} +func (m *SourceContext) XXX_Size() int { + return xxx_messageInfo_SourceContext.Size(m) +} +func (m *SourceContext) XXX_DiscardUnknown() { + xxx_messageInfo_SourceContext.DiscardUnknown(m) +} + +var xxx_messageInfo_SourceContext proto.InternalMessageInfo + +func (m *SourceContext) GetFileName() string { + if m != nil { + return m.FileName + } + return "" +} + +func init() { + proto.RegisterType((*SourceContext)(nil), "google.protobuf.SourceContext") +} + +func init() { + proto.RegisterFile("google/protobuf/source_context.proto", fileDescriptor_source_context_145ed41f088ecfc0) +} + +var fileDescriptor_source_context_145ed41f088ecfc0 = []byte{ + // 184 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xe2, 0x52, 0x49, 0xcf, 0xcf, 0x4f, + 0xcf, 0x49, 0xd5, 0x2f, 0x28, 0xca, 0x2f, 0xc9, 0x4f, 0x2a, 0x4d, 0xd3, 0x2f, 0xce, 0x2f, 0x2d, + 0x4a, 0x4e, 0x8d, 0x4f, 0xce, 0xcf, 0x2b, 0x49, 0xad, 0x28, 0xd1, 0x03, 0x8b, 0x0b, 0xf1, 0x43, + 0x54, 0xe9, 0xc1, 0x54, 0x29, 0xe9, 0x70, 0xf1, 0x06, 0x83, 0x15, 0x3a, 0x43, 0xd4, 0x09, 0x49, + 0x73, 0x71, 0xa6, 0x65, 0xe6, 0xa4, 0xc6, 0xe7, 0x25, 0xe6, 0xa6, 0x4a, 0x30, 0x2a, 0x30, 0x6a, + 0x70, 0x06, 0x71, 0x80, 0x04, 0xfc, 0x12, 0x73, 0x53, 0x9d, 0xa6, 0x32, 0x72, 0x09, 0x27, 0xe7, + 0xe7, 0xea, 0xa1, 0x99, 0xe2, 0x24, 0x84, 0x62, 0x46, 0x00, 0x48, 0x38, 0x80, 0x31, 0xca, 0x11, + 0xaa, 0x2c, 0x3d, 0x3f, 0x27, 0x31, 0x2f, 0x5d, 0x2f, 0xbf, 0x28, 0x5d, 0x3f, 0x3d, 0x35, 0x0f, + 0xac, 0x09, 0x97, 0x33, 0xad, 0x51, 0xb9, 0x8b, 0x98, 0x98, 0xdd, 0x03, 0x9c, 0x56, 0x31, 0xc9, + 0xb9, 0x43, 0x4c, 0x0a, 0x80, 0xea, 0xd2, 0x0b, 0x4f, 0xcd, 0xc9, 0xf1, 0xce, 0xcb, 0x2f, 0xcf, + 0x0b, 0xa9, 0x2c, 0x48, 0x2d, 0x4e, 0x62, 0x03, 0x1b, 0x67, 0x0c, 0x08, 0x00, 0x00, 0xff, 0xff, + 0x5c, 0xbd, 0xa4, 0x22, 0x05, 0x01, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/genproto/regen.go b/vendor/google.golang.org/genproto/regen.go new file mode 100644 index 0000000..53779b9 --- /dev/null +++ b/vendor/google.golang.org/genproto/regen.go @@ -0,0 +1,137 @@ +// Copyright 2016 Google Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// +build ignore + +// Regen.go regenerates the genproto repository. +// +// Regen.go recursively walks through each directory named by given arguments, +// looking for all .proto files. (Symlinks are not followed.) +// If the pkg_prefix flag is not an empty string, +// any proto file without `go_package` option +// or whose option does not begin with the prefix is ignored. +// If multiple roots contain files with the same name, +// eg "root1/path/to/file" and "root2/path/to/file", +// only the first file is processed; the rest are ignored. +// Protoc is executed on remaining files, +// one invocation per set of files declaring the same Go package. +package main + +import ( + "flag" + "fmt" + "io/ioutil" + "log" + "os" + "os/exec" + "path/filepath" + "regexp" + "strconv" + "strings" +) + +var goPkgOptRe = regexp.MustCompile(`(?m)^option go_package = (.*);`) + +func usage() { + fmt.Fprintln(os.Stderr, `usage: go run regen.go -go_out=path/to/output [-pkg_prefix=pkg/prefix] roots... + +Most users will not need to run this file directly. +To regenerate this repository, run regen.sh instead.`) + flag.PrintDefaults() +} + +func main() { + goOutDir := flag.String("go_out", "", "go_out argument to pass to protoc-gen-go") + pkgPrefix := flag.String("pkg_prefix", "", "only include proto files with go_package starting with this prefix") + flag.Usage = usage + flag.Parse() + + if *goOutDir == "" { + log.Fatal("need go_out flag") + } + + seenFiles := make(map[string]bool) + pkgFiles := make(map[string][]string) + for _, root := range flag.Args() { + walkFn := func(path string, info os.FileInfo, err error) error { + if err != nil { + return err + } + if !info.Mode().IsRegular() || !strings.HasSuffix(path, ".proto") { + return nil + } + + switch rel, err := filepath.Rel(root, path); { + case err != nil: + return err + case seenFiles[rel]: + return nil + default: + seenFiles[rel] = true + } + + pkg, err := goPkg(path) + if err != nil { + return err + } + pkgFiles[pkg] = append(pkgFiles[pkg], path) + return nil + } + if err := filepath.Walk(root, walkFn); err != nil { + log.Fatal(err) + } + } + for pkg, fnames := range pkgFiles { + if !strings.HasPrefix(pkg, *pkgPrefix) { + continue + } + if out, err := protoc(*goOutDir, flag.Args(), fnames); err != nil { + log.Fatalf("error executing protoc: %s\n%s", err, out) + } + } +} + +// goPkg reports the import path declared in the given file's +// `go_package` option. If the option is missing, goPkg returns empty string. +func goPkg(fname string) (string, error) { + content, err := ioutil.ReadFile(fname) + if err != nil { + return "", err + } + + var pkgName string + if match := goPkgOptRe.FindSubmatch(content); len(match) > 0 { + pn, err := strconv.Unquote(string(match[1])) + if err != nil { + return "", err + } + pkgName = pn + } + if p := strings.IndexRune(pkgName, ';'); p > 0 { + pkgName = pkgName[:p] + } + return pkgName, nil +} + +// protoc executes the "protoc" command on files named in fnames, +// passing go_out and include flags specified in goOut and includes respectively. +// protoc returns combined output from stdout and stderr. +func protoc(goOut string, includes, fnames []string) ([]byte, error) { + args := []string{"--go_out=plugins=grpc:" + goOut} + for _, inc := range includes { + args = append(args, "-I", inc) + } + args = append(args, fnames...) + return exec.Command("protoc", args...).CombinedOutput() +} diff --git a/vendor/google.golang.org/genproto/tools.go b/vendor/google.golang.org/genproto/tools.go new file mode 100644 index 0000000..375b314 --- /dev/null +++ b/vendor/google.golang.org/genproto/tools.go @@ -0,0 +1,32 @@ +// +build tools + +// Copyright 2019 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// This package exists to cause `go mod` and `go get` to believe these tools +// are dependencies, even though they are not runtime dependencies of any +// package (these are tools used by our CI builds). This means they will appear +// in our `go.mod` file, but will not be a part of the build. Also, since the +// build target is something non-existent, these should not be included in any +// binaries. + +package genproto + +import ( + _ "github.com/golang/protobuf/protoc-gen-go" + _ "golang.org/x/exp/cmd/apidiff" + _ "golang.org/x/lint/golint" + _ "golang.org/x/tools/cmd/goimports" + _ "honnef.co/go/tools/cmd/staticcheck" +) diff --git a/vendor/google.golang.org/grpc/AUTHORS b/vendor/google.golang.org/grpc/AUTHORS new file mode 100644 index 0000000..e491a9e --- /dev/null +++ b/vendor/google.golang.org/grpc/AUTHORS @@ -0,0 +1 @@ +Google Inc. diff --git a/vendor/google.golang.org/grpc/LICENSE b/vendor/google.golang.org/grpc/LICENSE new file mode 100644 index 0000000..d645695 --- /dev/null +++ b/vendor/google.golang.org/grpc/LICENSE @@ -0,0 +1,202 @@ + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/vendor/google.golang.org/grpc/backoff.go b/vendor/google.golang.org/grpc/backoff.go new file mode 100644 index 0000000..97c6e25 --- /dev/null +++ b/vendor/google.golang.org/grpc/backoff.go @@ -0,0 +1,38 @@ +/* + * + * Copyright 2017 gRPC authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ + +// See internal/backoff package for the backoff implementation. This file is +// kept for the exported types and API backward compatibility. + +package grpc + +import ( + "time" +) + +// DefaultBackoffConfig uses values specified for backoff in +// https://github.com/grpc/grpc/blob/master/doc/connection-backoff.md. +var DefaultBackoffConfig = BackoffConfig{ + MaxDelay: 120 * time.Second, +} + +// BackoffConfig defines the parameters for the default gRPC backoff strategy. +type BackoffConfig struct { + // MaxDelay is the upper bound of backoff delay. + MaxDelay time.Duration +} diff --git a/vendor/google.golang.org/grpc/balancer.go b/vendor/google.golang.org/grpc/balancer.go new file mode 100644 index 0000000..a78e702 --- /dev/null +++ b/vendor/google.golang.org/grpc/balancer.go @@ -0,0 +1,391 @@ +/* + * + * Copyright 2016 gRPC authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ + +package grpc + +import ( + "context" + "net" + "sync" + + "google.golang.org/grpc/codes" + "google.golang.org/grpc/credentials" + "google.golang.org/grpc/grpclog" + "google.golang.org/grpc/naming" + "google.golang.org/grpc/status" +) + +// Address represents a server the client connects to. +// +// Deprecated: please use package balancer. +type Address struct { + // Addr is the server address on which a connection will be established. + Addr string + // Metadata is the information associated with Addr, which may be used + // to make load balancing decision. + Metadata interface{} +} + +// BalancerConfig specifies the configurations for Balancer. +// +// Deprecated: please use package balancer. +type BalancerConfig struct { + // DialCreds is the transport credential the Balancer implementation can + // use to dial to a remote load balancer server. The Balancer implementations + // can ignore this if it does not need to talk to another party securely. + DialCreds credentials.TransportCredentials + // Dialer is the custom dialer the Balancer implementation can use to dial + // to a remote load balancer server. The Balancer implementations + // can ignore this if it doesn't need to talk to remote balancer. + Dialer func(context.Context, string) (net.Conn, error) +} + +// BalancerGetOptions configures a Get call. +// +// Deprecated: please use package balancer. +type BalancerGetOptions struct { + // BlockingWait specifies whether Get should block when there is no + // connected address. + BlockingWait bool +} + +// Balancer chooses network addresses for RPCs. +// +// Deprecated: please use package balancer. +type Balancer interface { + // Start does the initialization work to bootstrap a Balancer. For example, + // this function may start the name resolution and watch the updates. It will + // be called when dialing. + Start(target string, config BalancerConfig) error + // Up informs the Balancer that gRPC has a connection to the server at + // addr. It returns down which is called once the connection to addr gets + // lost or closed. + // TODO: It is not clear how to construct and take advantage of the meaningful error + // parameter for down. Need realistic demands to guide. + Up(addr Address) (down func(error)) + // Get gets the address of a server for the RPC corresponding to ctx. + // i) If it returns a connected address, gRPC internals issues the RPC on the + // connection to this address; + // ii) If it returns an address on which the connection is under construction + // (initiated by Notify(...)) but not connected, gRPC internals + // * fails RPC if the RPC is fail-fast and connection is in the TransientFailure or + // Shutdown state; + // or + // * issues RPC on the connection otherwise. + // iii) If it returns an address on which the connection does not exist, gRPC + // internals treats it as an error and will fail the corresponding RPC. + // + // Therefore, the following is the recommended rule when writing a custom Balancer. + // If opts.BlockingWait is true, it should return a connected address or + // block if there is no connected address. It should respect the timeout or + // cancellation of ctx when blocking. If opts.BlockingWait is false (for fail-fast + // RPCs), it should return an address it has notified via Notify(...) immediately + // instead of blocking. + // + // The function returns put which is called once the rpc has completed or failed. + // put can collect and report RPC stats to a remote load balancer. + // + // This function should only return the errors Balancer cannot recover by itself. + // gRPC internals will fail the RPC if an error is returned. + Get(ctx context.Context, opts BalancerGetOptions) (addr Address, put func(), err error) + // Notify returns a channel that is used by gRPC internals to watch the addresses + // gRPC needs to connect. The addresses might be from a name resolver or remote + // load balancer. gRPC internals will compare it with the existing connected + // addresses. If the address Balancer notified is not in the existing connected + // addresses, gRPC starts to connect the address. If an address in the existing + // connected addresses is not in the notification list, the corresponding connection + // is shutdown gracefully. Otherwise, there are no operations to take. Note that + // the Address slice must be the full list of the Addresses which should be connected. + // It is NOT delta. + Notify() <-chan []Address + // Close shuts down the balancer. + Close() error +} + +// RoundRobin returns a Balancer that selects addresses round-robin. It uses r to watch +// the name resolution updates and updates the addresses available correspondingly. +// +// Deprecated: please use package balancer/roundrobin. +func RoundRobin(r naming.Resolver) Balancer { + return &roundRobin{r: r} +} + +type addrInfo struct { + addr Address + connected bool +} + +type roundRobin struct { + r naming.Resolver + w naming.Watcher + addrs []*addrInfo // all the addresses the client should potentially connect + mu sync.Mutex + addrCh chan []Address // the channel to notify gRPC internals the list of addresses the client should connect to. + next int // index of the next address to return for Get() + waitCh chan struct{} // the channel to block when there is no connected address available + done bool // The Balancer is closed. +} + +func (rr *roundRobin) watchAddrUpdates() error { + updates, err := rr.w.Next() + if err != nil { + grpclog.Warningf("grpc: the naming watcher stops working due to %v.", err) + return err + } + rr.mu.Lock() + defer rr.mu.Unlock() + for _, update := range updates { + addr := Address{ + Addr: update.Addr, + Metadata: update.Metadata, + } + switch update.Op { + case naming.Add: + var exist bool + for _, v := range rr.addrs { + if addr == v.addr { + exist = true + grpclog.Infoln("grpc: The name resolver wanted to add an existing address: ", addr) + break + } + } + if exist { + continue + } + rr.addrs = append(rr.addrs, &addrInfo{addr: addr}) + case naming.Delete: + for i, v := range rr.addrs { + if addr == v.addr { + copy(rr.addrs[i:], rr.addrs[i+1:]) + rr.addrs = rr.addrs[:len(rr.addrs)-1] + break + } + } + default: + grpclog.Errorln("Unknown update.Op ", update.Op) + } + } + // Make a copy of rr.addrs and write it onto rr.addrCh so that gRPC internals gets notified. + open := make([]Address, len(rr.addrs)) + for i, v := range rr.addrs { + open[i] = v.addr + } + if rr.done { + return ErrClientConnClosing + } + select { + case <-rr.addrCh: + default: + } + rr.addrCh <- open + return nil +} + +func (rr *roundRobin) Start(target string, config BalancerConfig) error { + rr.mu.Lock() + defer rr.mu.Unlock() + if rr.done { + return ErrClientConnClosing + } + if rr.r == nil { + // If there is no name resolver installed, it is not needed to + // do name resolution. In this case, target is added into rr.addrs + // as the only address available and rr.addrCh stays nil. + rr.addrs = append(rr.addrs, &addrInfo{addr: Address{Addr: target}}) + return nil + } + w, err := rr.r.Resolve(target) + if err != nil { + return err + } + rr.w = w + rr.addrCh = make(chan []Address, 1) + go func() { + for { + if err := rr.watchAddrUpdates(); err != nil { + return + } + } + }() + return nil +} + +// Up sets the connected state of addr and sends notification if there are pending +// Get() calls. +func (rr *roundRobin) Up(addr Address) func(error) { + rr.mu.Lock() + defer rr.mu.Unlock() + var cnt int + for _, a := range rr.addrs { + if a.addr == addr { + if a.connected { + return nil + } + a.connected = true + } + if a.connected { + cnt++ + } + } + // addr is only one which is connected. Notify the Get() callers who are blocking. + if cnt == 1 && rr.waitCh != nil { + close(rr.waitCh) + rr.waitCh = nil + } + return func(err error) { + rr.down(addr, err) + } +} + +// down unsets the connected state of addr. +func (rr *roundRobin) down(addr Address, err error) { + rr.mu.Lock() + defer rr.mu.Unlock() + for _, a := range rr.addrs { + if addr == a.addr { + a.connected = false + break + } + } +} + +// Get returns the next addr in the rotation. +func (rr *roundRobin) Get(ctx context.Context, opts BalancerGetOptions) (addr Address, put func(), err error) { + var ch chan struct{} + rr.mu.Lock() + if rr.done { + rr.mu.Unlock() + err = ErrClientConnClosing + return + } + + if len(rr.addrs) > 0 { + if rr.next >= len(rr.addrs) { + rr.next = 0 + } + next := rr.next + for { + a := rr.addrs[next] + next = (next + 1) % len(rr.addrs) + if a.connected { + addr = a.addr + rr.next = next + rr.mu.Unlock() + return + } + if next == rr.next { + // Has iterated all the possible address but none is connected. + break + } + } + } + if !opts.BlockingWait { + if len(rr.addrs) == 0 { + rr.mu.Unlock() + err = status.Errorf(codes.Unavailable, "there is no address available") + return + } + // Returns the next addr on rr.addrs for failfast RPCs. + addr = rr.addrs[rr.next].addr + rr.next++ + rr.mu.Unlock() + return + } + // Wait on rr.waitCh for non-failfast RPCs. + if rr.waitCh == nil { + ch = make(chan struct{}) + rr.waitCh = ch + } else { + ch = rr.waitCh + } + rr.mu.Unlock() + for { + select { + case <-ctx.Done(): + err = ctx.Err() + return + case <-ch: + rr.mu.Lock() + if rr.done { + rr.mu.Unlock() + err = ErrClientConnClosing + return + } + + if len(rr.addrs) > 0 { + if rr.next >= len(rr.addrs) { + rr.next = 0 + } + next := rr.next + for { + a := rr.addrs[next] + next = (next + 1) % len(rr.addrs) + if a.connected { + addr = a.addr + rr.next = next + rr.mu.Unlock() + return + } + if next == rr.next { + // Has iterated all the possible address but none is connected. + break + } + } + } + // The newly added addr got removed by Down() again. + if rr.waitCh == nil { + ch = make(chan struct{}) + rr.waitCh = ch + } else { + ch = rr.waitCh + } + rr.mu.Unlock() + } + } +} + +func (rr *roundRobin) Notify() <-chan []Address { + return rr.addrCh +} + +func (rr *roundRobin) Close() error { + rr.mu.Lock() + defer rr.mu.Unlock() + if rr.done { + return errBalancerClosed + } + rr.done = true + if rr.w != nil { + rr.w.Close() + } + if rr.waitCh != nil { + close(rr.waitCh) + rr.waitCh = nil + } + if rr.addrCh != nil { + close(rr.addrCh) + } + return nil +} + +// pickFirst is used to test multi-addresses in one addrConn in which all addresses share the same addrConn. +// It is a wrapper around roundRobin balancer. The logic of all methods works fine because balancer.Get() +// returns the only address Up by resetTransport(). +type pickFirst struct { + *roundRobin +} diff --git a/vendor/google.golang.org/grpc/balancer/balancer.go b/vendor/google.golang.org/grpc/balancer/balancer.go new file mode 100644 index 0000000..fafede2 --- /dev/null +++ b/vendor/google.golang.org/grpc/balancer/balancer.go @@ -0,0 +1,336 @@ +/* + * + * Copyright 2017 gRPC authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ + +// Package balancer defines APIs for load balancing in gRPC. +// All APIs in this package are experimental. +package balancer + +import ( + "context" + "errors" + "net" + "strings" + + "google.golang.org/grpc/connectivity" + "google.golang.org/grpc/credentials" + "google.golang.org/grpc/internal" + "google.golang.org/grpc/metadata" + "google.golang.org/grpc/resolver" +) + +var ( + // m is a map from name to balancer builder. + m = make(map[string]Builder) +) + +// Register registers the balancer builder to the balancer map. b.Name +// (lowercased) will be used as the name registered with this builder. +// +// NOTE: this function must only be called during initialization time (i.e. in +// an init() function), and is not thread-safe. If multiple Balancers are +// registered with the same name, the one registered last will take effect. +func Register(b Builder) { + m[strings.ToLower(b.Name())] = b +} + +// unregisterForTesting deletes the balancer with the given name from the +// balancer map. +// +// This function is not thread-safe. +func unregisterForTesting(name string) { + delete(m, name) +} + +func init() { + internal.BalancerUnregister = unregisterForTesting +} + +// Get returns the resolver builder registered with the given name. +// Note that the compare is done in a case-insensitive fashion. +// If no builder is register with the name, nil will be returned. +func Get(name string) Builder { + if b, ok := m[strings.ToLower(name)]; ok { + return b + } + return nil +} + +// SubConn represents a gRPC sub connection. +// Each sub connection contains a list of addresses. gRPC will +// try to connect to them (in sequence), and stop trying the +// remainder once one connection is successful. +// +// The reconnect backoff will be applied on the list, not a single address. +// For example, try_on_all_addresses -> backoff -> try_on_all_addresses. +// +// All SubConns start in IDLE, and will not try to connect. To trigger +// the connecting, Balancers must call Connect. +// When the connection encounters an error, it will reconnect immediately. +// When the connection becomes IDLE, it will not reconnect unless Connect is +// called. +// +// This interface is to be implemented by gRPC. Users should not need a +// brand new implementation of this interface. For the situations like +// testing, the new implementation should embed this interface. This allows +// gRPC to add new methods to this interface. +type SubConn interface { + // UpdateAddresses updates the addresses used in this SubConn. + // gRPC checks if currently-connected address is still in the new list. + // If it's in the list, the connection will be kept. + // If it's not in the list, the connection will gracefully closed, and + // a new connection will be created. + // + // This will trigger a state transition for the SubConn. + UpdateAddresses([]resolver.Address) + // Connect starts the connecting for this SubConn. + Connect() +} + +// NewSubConnOptions contains options to create new SubConn. +type NewSubConnOptions struct { + // CredsBundle is the credentials bundle that will be used in the created + // SubConn. If it's nil, the original creds from grpc DialOptions will be + // used. + CredsBundle credentials.Bundle + // HealthCheckEnabled indicates whether health check service should be + // enabled on this SubConn + HealthCheckEnabled bool +} + +// ClientConn represents a gRPC ClientConn. +// +// This interface is to be implemented by gRPC. Users should not need a +// brand new implementation of this interface. For the situations like +// testing, the new implementation should embed this interface. This allows +// gRPC to add new methods to this interface. +type ClientConn interface { + // NewSubConn is called by balancer to create a new SubConn. + // It doesn't block and wait for the connections to be established. + // Behaviors of the SubConn can be controlled by options. + NewSubConn([]resolver.Address, NewSubConnOptions) (SubConn, error) + // RemoveSubConn removes the SubConn from ClientConn. + // The SubConn will be shutdown. + RemoveSubConn(SubConn) + + // UpdateBalancerState is called by balancer to notify gRPC that some internal + // state in balancer has changed. + // + // gRPC will update the connectivity state of the ClientConn, and will call pick + // on the new picker to pick new SubConn. + UpdateBalancerState(s connectivity.State, p Picker) + + // ResolveNow is called by balancer to notify gRPC to do a name resolving. + ResolveNow(resolver.ResolveNowOption) + + // Target returns the dial target for this ClientConn. + Target() string +} + +// BuildOptions contains additional information for Build. +type BuildOptions struct { + // DialCreds is the transport credential the Balancer implementation can + // use to dial to a remote load balancer server. The Balancer implementations + // can ignore this if it does not need to talk to another party securely. + DialCreds credentials.TransportCredentials + // CredsBundle is the credentials bundle that the Balancer can use. + CredsBundle credentials.Bundle + // Dialer is the custom dialer the Balancer implementation can use to dial + // to a remote load balancer server. The Balancer implementations + // can ignore this if it doesn't need to talk to remote balancer. + Dialer func(context.Context, string) (net.Conn, error) + // ChannelzParentID is the entity parent's channelz unique identification number. + ChannelzParentID int64 +} + +// Builder creates a balancer. +type Builder interface { + // Build creates a new balancer with the ClientConn. + Build(cc ClientConn, opts BuildOptions) Balancer + // Name returns the name of balancers built by this builder. + // It will be used to pick balancers (for example in service config). + Name() string +} + +// PickOptions contains addition information for the Pick operation. +type PickOptions struct { + // FullMethodName is the method name that NewClientStream() is called + // with. The canonical format is /service/Method. + FullMethodName string +} + +// DoneInfo contains additional information for done. +type DoneInfo struct { + // Err is the rpc error the RPC finished with. It could be nil. + Err error + // Trailer contains the metadata from the RPC's trailer, if present. + Trailer metadata.MD + // BytesSent indicates if any bytes have been sent to the server. + BytesSent bool + // BytesReceived indicates if any byte has been received from the server. + BytesReceived bool + // ServerLoad is the load received from server. It's usually sent as part of + // trailing metadata. + // + // The only supported type now is *orca_v1.LoadReport. + ServerLoad interface{} +} + +var ( + // ErrNoSubConnAvailable indicates no SubConn is available for pick(). + // gRPC will block the RPC until a new picker is available via UpdateBalancerState(). + ErrNoSubConnAvailable = errors.New("no SubConn is available") + // ErrTransientFailure indicates all SubConns are in TransientFailure. + // WaitForReady RPCs will block, non-WaitForReady RPCs will fail. + ErrTransientFailure = errors.New("all SubConns are in TransientFailure") +) + +// Picker is used by gRPC to pick a SubConn to send an RPC. +// Balancer is expected to generate a new picker from its snapshot every time its +// internal state has changed. +// +// The pickers used by gRPC can be updated by ClientConn.UpdateBalancerState(). +type Picker interface { + // Pick returns the SubConn to be used to send the RPC. + // The returned SubConn must be one returned by NewSubConn(). + // + // This functions is expected to return: + // - a SubConn that is known to be READY; + // - ErrNoSubConnAvailable if no SubConn is available, but progress is being + // made (for example, some SubConn is in CONNECTING mode); + // - other errors if no active connecting is happening (for example, all SubConn + // are in TRANSIENT_FAILURE mode). + // + // If a SubConn is returned: + // - If it is READY, gRPC will send the RPC on it; + // - If it is not ready, or becomes not ready after it's returned, gRPC will + // block until UpdateBalancerState() is called and will call pick on the + // new picker. The done function returned from Pick(), if not nil, will be + // called with nil error, no bytes sent and no bytes received. + // + // If the returned error is not nil: + // - If the error is ErrNoSubConnAvailable, gRPC will block until UpdateBalancerState() + // - If the error is ErrTransientFailure: + // - If the RPC is wait-for-ready, gRPC will block until UpdateBalancerState() + // is called to pick again; + // - Otherwise, RPC will fail with unavailable error. + // - Else (error is other non-nil error): + // - The RPC will fail with unavailable error. + // + // The returned done() function will be called once the rpc has finished, + // with the final status of that RPC. If the SubConn returned is not a + // valid SubConn type, done may not be called. done may be nil if balancer + // doesn't care about the RPC status. + Pick(ctx context.Context, opts PickOptions) (conn SubConn, done func(DoneInfo), err error) +} + +// Balancer takes input from gRPC, manages SubConns, and collects and aggregates +// the connectivity states. +// +// It also generates and updates the Picker used by gRPC to pick SubConns for RPCs. +// +// HandleSubConnectionStateChange, HandleResolvedAddrs and Close are guaranteed +// to be called synchronously from the same goroutine. +// There's no guarantee on picker.Pick, it may be called anytime. +type Balancer interface { + // HandleSubConnStateChange is called by gRPC when the connectivity state + // of sc has changed. + // Balancer is expected to aggregate all the state of SubConn and report + // that back to gRPC. + // Balancer should also generate and update Pickers when its internal state has + // been changed by the new state. + // + // Deprecated: if V2Balancer is implemented by the Balancer, + // UpdateSubConnState will be called instead. + HandleSubConnStateChange(sc SubConn, state connectivity.State) + // HandleResolvedAddrs is called by gRPC to send updated resolved addresses to + // balancers. + // Balancer can create new SubConn or remove SubConn with the addresses. + // An empty address slice and a non-nil error will be passed if the resolver returns + // non-nil error to gRPC. + // + // Deprecated: if V2Balancer is implemented by the Balancer, + // UpdateResolverState will be called instead. + HandleResolvedAddrs([]resolver.Address, error) + // Close closes the balancer. The balancer is not required to call + // ClientConn.RemoveSubConn for its existing SubConns. + Close() +} + +// SubConnState describes the state of a SubConn. +type SubConnState struct { + ConnectivityState connectivity.State + // TODO: add last connection error +} + +// V2Balancer is defined for documentation purposes. If a Balancer also +// implements V2Balancer, its UpdateResolverState method will be called instead +// of HandleResolvedAddrs and its UpdateSubConnState will be called instead of +// HandleSubConnStateChange. +type V2Balancer interface { + // UpdateResolverState is called by gRPC when the state of the resolver + // changes. + UpdateResolverState(resolver.State) + // UpdateSubConnState is called by gRPC when the state of a SubConn + // changes. + UpdateSubConnState(SubConn, SubConnState) + // Close closes the balancer. The balancer is not required to call + // ClientConn.RemoveSubConn for its existing SubConns. + Close() +} + +// ConnectivityStateEvaluator takes the connectivity states of multiple SubConns +// and returns one aggregated connectivity state. +// +// It's not thread safe. +type ConnectivityStateEvaluator struct { + numReady uint64 // Number of addrConns in ready state. + numConnecting uint64 // Number of addrConns in connecting state. + numTransientFailure uint64 // Number of addrConns in transientFailure. +} + +// RecordTransition records state change happening in subConn and based on that +// it evaluates what aggregated state should be. +// +// - If at least one SubConn in Ready, the aggregated state is Ready; +// - Else if at least one SubConn in Connecting, the aggregated state is Connecting; +// - Else the aggregated state is TransientFailure. +// +// Idle and Shutdown are not considered. +func (cse *ConnectivityStateEvaluator) RecordTransition(oldState, newState connectivity.State) connectivity.State { + // Update counters. + for idx, state := range []connectivity.State{oldState, newState} { + updateVal := 2*uint64(idx) - 1 // -1 for oldState and +1 for new. + switch state { + case connectivity.Ready: + cse.numReady += updateVal + case connectivity.Connecting: + cse.numConnecting += updateVal + case connectivity.TransientFailure: + cse.numTransientFailure += updateVal + } + } + + // Evaluate. + if cse.numReady > 0 { + return connectivity.Ready + } + if cse.numConnecting > 0 { + return connectivity.Connecting + } + return connectivity.TransientFailure +} diff --git a/vendor/google.golang.org/grpc/balancer/base/balancer.go b/vendor/google.golang.org/grpc/balancer/base/balancer.go new file mode 100644 index 0000000..c5a51bd --- /dev/null +++ b/vendor/google.golang.org/grpc/balancer/base/balancer.go @@ -0,0 +1,178 @@ +/* + * + * Copyright 2017 gRPC authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ + +package base + +import ( + "context" + + "google.golang.org/grpc/balancer" + "google.golang.org/grpc/connectivity" + "google.golang.org/grpc/grpclog" + "google.golang.org/grpc/resolver" +) + +type baseBuilder struct { + name string + pickerBuilder PickerBuilder + config Config +} + +func (bb *baseBuilder) Build(cc balancer.ClientConn, opt balancer.BuildOptions) balancer.Balancer { + return &baseBalancer{ + cc: cc, + pickerBuilder: bb.pickerBuilder, + + subConns: make(map[resolver.Address]balancer.SubConn), + scStates: make(map[balancer.SubConn]connectivity.State), + csEvltr: &balancer.ConnectivityStateEvaluator{}, + // Initialize picker to a picker that always return + // ErrNoSubConnAvailable, because when state of a SubConn changes, we + // may call UpdateBalancerState with this picker. + picker: NewErrPicker(balancer.ErrNoSubConnAvailable), + config: bb.config, + } +} + +func (bb *baseBuilder) Name() string { + return bb.name +} + +type baseBalancer struct { + cc balancer.ClientConn + pickerBuilder PickerBuilder + + csEvltr *balancer.ConnectivityStateEvaluator + state connectivity.State + + subConns map[resolver.Address]balancer.SubConn + scStates map[balancer.SubConn]connectivity.State + picker balancer.Picker + config Config +} + +func (b *baseBalancer) HandleResolvedAddrs(addrs []resolver.Address, err error) { + panic("not implemented") +} + +func (b *baseBalancer) UpdateResolverState(s resolver.State) { + // TODO: handle s.Err (log if not nil) once implemented. + // TODO: handle s.ServiceConfig? + grpclog.Infoln("base.baseBalancer: got new resolver state: ", s) + // addrsSet is the set converted from addrs, it's used for quick lookup of an address. + addrsSet := make(map[resolver.Address]struct{}) + for _, a := range s.Addresses { + addrsSet[a] = struct{}{} + if _, ok := b.subConns[a]; !ok { + // a is a new address (not existing in b.subConns). + sc, err := b.cc.NewSubConn([]resolver.Address{a}, balancer.NewSubConnOptions{HealthCheckEnabled: b.config.HealthCheck}) + if err != nil { + grpclog.Warningf("base.baseBalancer: failed to create new SubConn: %v", err) + continue + } + b.subConns[a] = sc + b.scStates[sc] = connectivity.Idle + sc.Connect() + } + } + for a, sc := range b.subConns { + // a was removed by resolver. + if _, ok := addrsSet[a]; !ok { + b.cc.RemoveSubConn(sc) + delete(b.subConns, a) + // Keep the state of this sc in b.scStates until sc's state becomes Shutdown. + // The entry will be deleted in HandleSubConnStateChange. + } + } +} + +// regeneratePicker takes a snapshot of the balancer, and generates a picker +// from it. The picker is +// - errPicker with ErrTransientFailure if the balancer is in TransientFailure, +// - built by the pickerBuilder with all READY SubConns otherwise. +func (b *baseBalancer) regeneratePicker() { + if b.state == connectivity.TransientFailure { + b.picker = NewErrPicker(balancer.ErrTransientFailure) + return + } + readySCs := make(map[resolver.Address]balancer.SubConn) + + // Filter out all ready SCs from full subConn map. + for addr, sc := range b.subConns { + if st, ok := b.scStates[sc]; ok && st == connectivity.Ready { + readySCs[addr] = sc + } + } + b.picker = b.pickerBuilder.Build(readySCs) +} + +func (b *baseBalancer) HandleSubConnStateChange(sc balancer.SubConn, s connectivity.State) { + panic("not implemented") +} + +func (b *baseBalancer) UpdateSubConnState(sc balancer.SubConn, state balancer.SubConnState) { + s := state.ConnectivityState + grpclog.Infof("base.baseBalancer: handle SubConn state change: %p, %v", sc, s) + oldS, ok := b.scStates[sc] + if !ok { + grpclog.Infof("base.baseBalancer: got state changes for an unknown SubConn: %p, %v", sc, s) + return + } + b.scStates[sc] = s + switch s { + case connectivity.Idle: + sc.Connect() + case connectivity.Shutdown: + // When an address was removed by resolver, b called RemoveSubConn but + // kept the sc's state in scStates. Remove state for this sc here. + delete(b.scStates, sc) + } + + oldAggrState := b.state + b.state = b.csEvltr.RecordTransition(oldS, s) + + // Regenerate picker when one of the following happens: + // - this sc became ready from not-ready + // - this sc became not-ready from ready + // - the aggregated state of balancer became TransientFailure from non-TransientFailure + // - the aggregated state of balancer became non-TransientFailure from TransientFailure + if (s == connectivity.Ready) != (oldS == connectivity.Ready) || + (b.state == connectivity.TransientFailure) != (oldAggrState == connectivity.TransientFailure) { + b.regeneratePicker() + } + + b.cc.UpdateBalancerState(b.state, b.picker) +} + +// Close is a nop because base balancer doesn't have internal state to clean up, +// and it doesn't need to call RemoveSubConn for the SubConns. +func (b *baseBalancer) Close() { +} + +// NewErrPicker returns a picker that always returns err on Pick(). +func NewErrPicker(err error) balancer.Picker { + return &errPicker{err: err} +} + +type errPicker struct { + err error // Pick() always returns this err. +} + +func (p *errPicker) Pick(ctx context.Context, opts balancer.PickOptions) (balancer.SubConn, func(balancer.DoneInfo), error) { + return nil, nil, p.err +} diff --git a/vendor/google.golang.org/grpc/balancer/base/base.go b/vendor/google.golang.org/grpc/balancer/base/base.go new file mode 100644 index 0000000..34b1f29 --- /dev/null +++ b/vendor/google.golang.org/grpc/balancer/base/base.go @@ -0,0 +1,64 @@ +/* + * + * Copyright 2017 gRPC authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ + +// Package base defines a balancer base that can be used to build balancers with +// different picking algorithms. +// +// The base balancer creates a new SubConn for each resolved address. The +// provided picker will only be notified about READY SubConns. +// +// This package is the base of round_robin balancer, its purpose is to be used +// to build round_robin like balancers with complex picking algorithms. +// Balancers with more complicated logic should try to implement a balancer +// builder from scratch. +// +// All APIs in this package are experimental. +package base + +import ( + "google.golang.org/grpc/balancer" + "google.golang.org/grpc/resolver" +) + +// PickerBuilder creates balancer.Picker. +type PickerBuilder interface { + // Build takes a slice of ready SubConns, and returns a picker that will be + // used by gRPC to pick a SubConn. + Build(readySCs map[resolver.Address]balancer.SubConn) balancer.Picker +} + +// NewBalancerBuilder returns a balancer builder. The balancers +// built by this builder will use the picker builder to build pickers. +func NewBalancerBuilder(name string, pb PickerBuilder) balancer.Builder { + return NewBalancerBuilderWithConfig(name, pb, Config{}) +} + +// Config contains the config info about the base balancer builder. +type Config struct { + // HealthCheck indicates whether health checking should be enabled for this specific balancer. + HealthCheck bool +} + +// NewBalancerBuilderWithConfig returns a base balancer builder configured by the provided config. +func NewBalancerBuilderWithConfig(name string, pb PickerBuilder, config Config) balancer.Builder { + return &baseBuilder{ + name: name, + pickerBuilder: pb, + config: config, + } +} diff --git a/vendor/google.golang.org/grpc/balancer/grpclb/grpc_lb_v1/load_balancer.pb.go b/vendor/google.golang.org/grpc/balancer/grpclb/grpc_lb_v1/load_balancer.pb.go new file mode 100644 index 0000000..78b1c53 --- /dev/null +++ b/vendor/google.golang.org/grpc/balancer/grpclb/grpc_lb_v1/load_balancer.pb.go @@ -0,0 +1,839 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: grpc/lb/v1/load_balancer.proto + +package grpc_lb_v1 // import "google.golang.org/grpc/balancer/grpclb/grpc_lb_v1" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import duration "github.com/golang/protobuf/ptypes/duration" +import timestamp "github.com/golang/protobuf/ptypes/timestamp" + +import ( + context "golang.org/x/net/context" + grpc "google.golang.org/grpc" +) + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +type LoadBalanceRequest struct { + // Types that are valid to be assigned to LoadBalanceRequestType: + // *LoadBalanceRequest_InitialRequest + // *LoadBalanceRequest_ClientStats + LoadBalanceRequestType isLoadBalanceRequest_LoadBalanceRequestType `protobuf_oneof:"load_balance_request_type"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *LoadBalanceRequest) Reset() { *m = LoadBalanceRequest{} } +func (m *LoadBalanceRequest) String() string { return proto.CompactTextString(m) } +func (*LoadBalanceRequest) ProtoMessage() {} +func (*LoadBalanceRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_load_balancer_12026aec3f0251ba, []int{0} +} +func (m *LoadBalanceRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_LoadBalanceRequest.Unmarshal(m, b) +} +func (m *LoadBalanceRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_LoadBalanceRequest.Marshal(b, m, deterministic) +} +func (dst *LoadBalanceRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_LoadBalanceRequest.Merge(dst, src) +} +func (m *LoadBalanceRequest) XXX_Size() int { + return xxx_messageInfo_LoadBalanceRequest.Size(m) +} +func (m *LoadBalanceRequest) XXX_DiscardUnknown() { + xxx_messageInfo_LoadBalanceRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_LoadBalanceRequest proto.InternalMessageInfo + +type isLoadBalanceRequest_LoadBalanceRequestType interface { + isLoadBalanceRequest_LoadBalanceRequestType() +} + +type LoadBalanceRequest_InitialRequest struct { + InitialRequest *InitialLoadBalanceRequest `protobuf:"bytes,1,opt,name=initial_request,json=initialRequest,proto3,oneof"` +} + +type LoadBalanceRequest_ClientStats struct { + ClientStats *ClientStats `protobuf:"bytes,2,opt,name=client_stats,json=clientStats,proto3,oneof"` +} + +func (*LoadBalanceRequest_InitialRequest) isLoadBalanceRequest_LoadBalanceRequestType() {} + +func (*LoadBalanceRequest_ClientStats) isLoadBalanceRequest_LoadBalanceRequestType() {} + +func (m *LoadBalanceRequest) GetLoadBalanceRequestType() isLoadBalanceRequest_LoadBalanceRequestType { + if m != nil { + return m.LoadBalanceRequestType + } + return nil +} + +func (m *LoadBalanceRequest) GetInitialRequest() *InitialLoadBalanceRequest { + if x, ok := m.GetLoadBalanceRequestType().(*LoadBalanceRequest_InitialRequest); ok { + return x.InitialRequest + } + return nil +} + +func (m *LoadBalanceRequest) GetClientStats() *ClientStats { + if x, ok := m.GetLoadBalanceRequestType().(*LoadBalanceRequest_ClientStats); ok { + return x.ClientStats + } + return nil +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*LoadBalanceRequest) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _LoadBalanceRequest_OneofMarshaler, _LoadBalanceRequest_OneofUnmarshaler, _LoadBalanceRequest_OneofSizer, []interface{}{ + (*LoadBalanceRequest_InitialRequest)(nil), + (*LoadBalanceRequest_ClientStats)(nil), + } +} + +func _LoadBalanceRequest_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*LoadBalanceRequest) + // load_balance_request_type + switch x := m.LoadBalanceRequestType.(type) { + case *LoadBalanceRequest_InitialRequest: + b.EncodeVarint(1<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.InitialRequest); err != nil { + return err + } + case *LoadBalanceRequest_ClientStats: + b.EncodeVarint(2<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.ClientStats); err != nil { + return err + } + case nil: + default: + return fmt.Errorf("LoadBalanceRequest.LoadBalanceRequestType has unexpected type %T", x) + } + return nil +} + +func _LoadBalanceRequest_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*LoadBalanceRequest) + switch tag { + case 1: // load_balance_request_type.initial_request + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(InitialLoadBalanceRequest) + err := b.DecodeMessage(msg) + m.LoadBalanceRequestType = &LoadBalanceRequest_InitialRequest{msg} + return true, err + case 2: // load_balance_request_type.client_stats + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(ClientStats) + err := b.DecodeMessage(msg) + m.LoadBalanceRequestType = &LoadBalanceRequest_ClientStats{msg} + return true, err + default: + return false, nil + } +} + +func _LoadBalanceRequest_OneofSizer(msg proto.Message) (n int) { + m := msg.(*LoadBalanceRequest) + // load_balance_request_type + switch x := m.LoadBalanceRequestType.(type) { + case *LoadBalanceRequest_InitialRequest: + s := proto.Size(x.InitialRequest) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *LoadBalanceRequest_ClientStats: + s := proto.Size(x.ClientStats) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +type InitialLoadBalanceRequest struct { + // The name of the load balanced service (e.g., service.googleapis.com). Its + // length should be less than 256 bytes. + // The name might include a port number. How to handle the port number is up + // to the balancer. + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *InitialLoadBalanceRequest) Reset() { *m = InitialLoadBalanceRequest{} } +func (m *InitialLoadBalanceRequest) String() string { return proto.CompactTextString(m) } +func (*InitialLoadBalanceRequest) ProtoMessage() {} +func (*InitialLoadBalanceRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_load_balancer_12026aec3f0251ba, []int{1} +} +func (m *InitialLoadBalanceRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_InitialLoadBalanceRequest.Unmarshal(m, b) +} +func (m *InitialLoadBalanceRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_InitialLoadBalanceRequest.Marshal(b, m, deterministic) +} +func (dst *InitialLoadBalanceRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_InitialLoadBalanceRequest.Merge(dst, src) +} +func (m *InitialLoadBalanceRequest) XXX_Size() int { + return xxx_messageInfo_InitialLoadBalanceRequest.Size(m) +} +func (m *InitialLoadBalanceRequest) XXX_DiscardUnknown() { + xxx_messageInfo_InitialLoadBalanceRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_InitialLoadBalanceRequest proto.InternalMessageInfo + +func (m *InitialLoadBalanceRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +// Contains the number of calls finished for a particular load balance token. +type ClientStatsPerToken struct { + // See Server.load_balance_token. + LoadBalanceToken string `protobuf:"bytes,1,opt,name=load_balance_token,json=loadBalanceToken,proto3" json:"load_balance_token,omitempty"` + // The total number of RPCs that finished associated with the token. + NumCalls int64 `protobuf:"varint,2,opt,name=num_calls,json=numCalls,proto3" json:"num_calls,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ClientStatsPerToken) Reset() { *m = ClientStatsPerToken{} } +func (m *ClientStatsPerToken) String() string { return proto.CompactTextString(m) } +func (*ClientStatsPerToken) ProtoMessage() {} +func (*ClientStatsPerToken) Descriptor() ([]byte, []int) { + return fileDescriptor_load_balancer_12026aec3f0251ba, []int{2} +} +func (m *ClientStatsPerToken) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ClientStatsPerToken.Unmarshal(m, b) +} +func (m *ClientStatsPerToken) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ClientStatsPerToken.Marshal(b, m, deterministic) +} +func (dst *ClientStatsPerToken) XXX_Merge(src proto.Message) { + xxx_messageInfo_ClientStatsPerToken.Merge(dst, src) +} +func (m *ClientStatsPerToken) XXX_Size() int { + return xxx_messageInfo_ClientStatsPerToken.Size(m) +} +func (m *ClientStatsPerToken) XXX_DiscardUnknown() { + xxx_messageInfo_ClientStatsPerToken.DiscardUnknown(m) +} + +var xxx_messageInfo_ClientStatsPerToken proto.InternalMessageInfo + +func (m *ClientStatsPerToken) GetLoadBalanceToken() string { + if m != nil { + return m.LoadBalanceToken + } + return "" +} + +func (m *ClientStatsPerToken) GetNumCalls() int64 { + if m != nil { + return m.NumCalls + } + return 0 +} + +// Contains client level statistics that are useful to load balancing. Each +// count except the timestamp should be reset to zero after reporting the stats. +type ClientStats struct { + // The timestamp of generating the report. + Timestamp *timestamp.Timestamp `protobuf:"bytes,1,opt,name=timestamp,proto3" json:"timestamp,omitempty"` + // The total number of RPCs that started. + NumCallsStarted int64 `protobuf:"varint,2,opt,name=num_calls_started,json=numCallsStarted,proto3" json:"num_calls_started,omitempty"` + // The total number of RPCs that finished. + NumCallsFinished int64 `protobuf:"varint,3,opt,name=num_calls_finished,json=numCallsFinished,proto3" json:"num_calls_finished,omitempty"` + // The total number of RPCs that failed to reach a server except dropped RPCs. + NumCallsFinishedWithClientFailedToSend int64 `protobuf:"varint,6,opt,name=num_calls_finished_with_client_failed_to_send,json=numCallsFinishedWithClientFailedToSend,proto3" json:"num_calls_finished_with_client_failed_to_send,omitempty"` + // The total number of RPCs that finished and are known to have been received + // by a server. + NumCallsFinishedKnownReceived int64 `protobuf:"varint,7,opt,name=num_calls_finished_known_received,json=numCallsFinishedKnownReceived,proto3" json:"num_calls_finished_known_received,omitempty"` + // The list of dropped calls. + CallsFinishedWithDrop []*ClientStatsPerToken `protobuf:"bytes,8,rep,name=calls_finished_with_drop,json=callsFinishedWithDrop,proto3" json:"calls_finished_with_drop,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ClientStats) Reset() { *m = ClientStats{} } +func (m *ClientStats) String() string { return proto.CompactTextString(m) } +func (*ClientStats) ProtoMessage() {} +func (*ClientStats) Descriptor() ([]byte, []int) { + return fileDescriptor_load_balancer_12026aec3f0251ba, []int{3} +} +func (m *ClientStats) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ClientStats.Unmarshal(m, b) +} +func (m *ClientStats) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ClientStats.Marshal(b, m, deterministic) +} +func (dst *ClientStats) XXX_Merge(src proto.Message) { + xxx_messageInfo_ClientStats.Merge(dst, src) +} +func (m *ClientStats) XXX_Size() int { + return xxx_messageInfo_ClientStats.Size(m) +} +func (m *ClientStats) XXX_DiscardUnknown() { + xxx_messageInfo_ClientStats.DiscardUnknown(m) +} + +var xxx_messageInfo_ClientStats proto.InternalMessageInfo + +func (m *ClientStats) GetTimestamp() *timestamp.Timestamp { + if m != nil { + return m.Timestamp + } + return nil +} + +func (m *ClientStats) GetNumCallsStarted() int64 { + if m != nil { + return m.NumCallsStarted + } + return 0 +} + +func (m *ClientStats) GetNumCallsFinished() int64 { + if m != nil { + return m.NumCallsFinished + } + return 0 +} + +func (m *ClientStats) GetNumCallsFinishedWithClientFailedToSend() int64 { + if m != nil { + return m.NumCallsFinishedWithClientFailedToSend + } + return 0 +} + +func (m *ClientStats) GetNumCallsFinishedKnownReceived() int64 { + if m != nil { + return m.NumCallsFinishedKnownReceived + } + return 0 +} + +func (m *ClientStats) GetCallsFinishedWithDrop() []*ClientStatsPerToken { + if m != nil { + return m.CallsFinishedWithDrop + } + return nil +} + +type LoadBalanceResponse struct { + // Types that are valid to be assigned to LoadBalanceResponseType: + // *LoadBalanceResponse_InitialResponse + // *LoadBalanceResponse_ServerList + LoadBalanceResponseType isLoadBalanceResponse_LoadBalanceResponseType `protobuf_oneof:"load_balance_response_type"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *LoadBalanceResponse) Reset() { *m = LoadBalanceResponse{} } +func (m *LoadBalanceResponse) String() string { return proto.CompactTextString(m) } +func (*LoadBalanceResponse) ProtoMessage() {} +func (*LoadBalanceResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_load_balancer_12026aec3f0251ba, []int{4} +} +func (m *LoadBalanceResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_LoadBalanceResponse.Unmarshal(m, b) +} +func (m *LoadBalanceResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_LoadBalanceResponse.Marshal(b, m, deterministic) +} +func (dst *LoadBalanceResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_LoadBalanceResponse.Merge(dst, src) +} +func (m *LoadBalanceResponse) XXX_Size() int { + return xxx_messageInfo_LoadBalanceResponse.Size(m) +} +func (m *LoadBalanceResponse) XXX_DiscardUnknown() { + xxx_messageInfo_LoadBalanceResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_LoadBalanceResponse proto.InternalMessageInfo + +type isLoadBalanceResponse_LoadBalanceResponseType interface { + isLoadBalanceResponse_LoadBalanceResponseType() +} + +type LoadBalanceResponse_InitialResponse struct { + InitialResponse *InitialLoadBalanceResponse `protobuf:"bytes,1,opt,name=initial_response,json=initialResponse,proto3,oneof"` +} + +type LoadBalanceResponse_ServerList struct { + ServerList *ServerList `protobuf:"bytes,2,opt,name=server_list,json=serverList,proto3,oneof"` +} + +func (*LoadBalanceResponse_InitialResponse) isLoadBalanceResponse_LoadBalanceResponseType() {} + +func (*LoadBalanceResponse_ServerList) isLoadBalanceResponse_LoadBalanceResponseType() {} + +func (m *LoadBalanceResponse) GetLoadBalanceResponseType() isLoadBalanceResponse_LoadBalanceResponseType { + if m != nil { + return m.LoadBalanceResponseType + } + return nil +} + +func (m *LoadBalanceResponse) GetInitialResponse() *InitialLoadBalanceResponse { + if x, ok := m.GetLoadBalanceResponseType().(*LoadBalanceResponse_InitialResponse); ok { + return x.InitialResponse + } + return nil +} + +func (m *LoadBalanceResponse) GetServerList() *ServerList { + if x, ok := m.GetLoadBalanceResponseType().(*LoadBalanceResponse_ServerList); ok { + return x.ServerList + } + return nil +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*LoadBalanceResponse) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _LoadBalanceResponse_OneofMarshaler, _LoadBalanceResponse_OneofUnmarshaler, _LoadBalanceResponse_OneofSizer, []interface{}{ + (*LoadBalanceResponse_InitialResponse)(nil), + (*LoadBalanceResponse_ServerList)(nil), + } +} + +func _LoadBalanceResponse_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*LoadBalanceResponse) + // load_balance_response_type + switch x := m.LoadBalanceResponseType.(type) { + case *LoadBalanceResponse_InitialResponse: + b.EncodeVarint(1<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.InitialResponse); err != nil { + return err + } + case *LoadBalanceResponse_ServerList: + b.EncodeVarint(2<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.ServerList); err != nil { + return err + } + case nil: + default: + return fmt.Errorf("LoadBalanceResponse.LoadBalanceResponseType has unexpected type %T", x) + } + return nil +} + +func _LoadBalanceResponse_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*LoadBalanceResponse) + switch tag { + case 1: // load_balance_response_type.initial_response + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(InitialLoadBalanceResponse) + err := b.DecodeMessage(msg) + m.LoadBalanceResponseType = &LoadBalanceResponse_InitialResponse{msg} + return true, err + case 2: // load_balance_response_type.server_list + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(ServerList) + err := b.DecodeMessage(msg) + m.LoadBalanceResponseType = &LoadBalanceResponse_ServerList{msg} + return true, err + default: + return false, nil + } +} + +func _LoadBalanceResponse_OneofSizer(msg proto.Message) (n int) { + m := msg.(*LoadBalanceResponse) + // load_balance_response_type + switch x := m.LoadBalanceResponseType.(type) { + case *LoadBalanceResponse_InitialResponse: + s := proto.Size(x.InitialResponse) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *LoadBalanceResponse_ServerList: + s := proto.Size(x.ServerList) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +type InitialLoadBalanceResponse struct { + // This is an application layer redirect that indicates the client should use + // the specified server for load balancing. When this field is non-empty in + // the response, the client should open a separate connection to the + // load_balancer_delegate and call the BalanceLoad method. Its length should + // be less than 64 bytes. + LoadBalancerDelegate string `protobuf:"bytes,1,opt,name=load_balancer_delegate,json=loadBalancerDelegate,proto3" json:"load_balancer_delegate,omitempty"` + // This interval defines how often the client should send the client stats + // to the load balancer. Stats should only be reported when the duration is + // positive. + ClientStatsReportInterval *duration.Duration `protobuf:"bytes,2,opt,name=client_stats_report_interval,json=clientStatsReportInterval,proto3" json:"client_stats_report_interval,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *InitialLoadBalanceResponse) Reset() { *m = InitialLoadBalanceResponse{} } +func (m *InitialLoadBalanceResponse) String() string { return proto.CompactTextString(m) } +func (*InitialLoadBalanceResponse) ProtoMessage() {} +func (*InitialLoadBalanceResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_load_balancer_12026aec3f0251ba, []int{5} +} +func (m *InitialLoadBalanceResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_InitialLoadBalanceResponse.Unmarshal(m, b) +} +func (m *InitialLoadBalanceResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_InitialLoadBalanceResponse.Marshal(b, m, deterministic) +} +func (dst *InitialLoadBalanceResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_InitialLoadBalanceResponse.Merge(dst, src) +} +func (m *InitialLoadBalanceResponse) XXX_Size() int { + return xxx_messageInfo_InitialLoadBalanceResponse.Size(m) +} +func (m *InitialLoadBalanceResponse) XXX_DiscardUnknown() { + xxx_messageInfo_InitialLoadBalanceResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_InitialLoadBalanceResponse proto.InternalMessageInfo + +func (m *InitialLoadBalanceResponse) GetLoadBalancerDelegate() string { + if m != nil { + return m.LoadBalancerDelegate + } + return "" +} + +func (m *InitialLoadBalanceResponse) GetClientStatsReportInterval() *duration.Duration { + if m != nil { + return m.ClientStatsReportInterval + } + return nil +} + +type ServerList struct { + // Contains a list of servers selected by the load balancer. The list will + // be updated when server resolutions change or as needed to balance load + // across more servers. The client should consume the server list in order + // unless instructed otherwise via the client_config. + Servers []*Server `protobuf:"bytes,1,rep,name=servers,proto3" json:"servers,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ServerList) Reset() { *m = ServerList{} } +func (m *ServerList) String() string { return proto.CompactTextString(m) } +func (*ServerList) ProtoMessage() {} +func (*ServerList) Descriptor() ([]byte, []int) { + return fileDescriptor_load_balancer_12026aec3f0251ba, []int{6} +} +func (m *ServerList) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ServerList.Unmarshal(m, b) +} +func (m *ServerList) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ServerList.Marshal(b, m, deterministic) +} +func (dst *ServerList) XXX_Merge(src proto.Message) { + xxx_messageInfo_ServerList.Merge(dst, src) +} +func (m *ServerList) XXX_Size() int { + return xxx_messageInfo_ServerList.Size(m) +} +func (m *ServerList) XXX_DiscardUnknown() { + xxx_messageInfo_ServerList.DiscardUnknown(m) +} + +var xxx_messageInfo_ServerList proto.InternalMessageInfo + +func (m *ServerList) GetServers() []*Server { + if m != nil { + return m.Servers + } + return nil +} + +// Contains server information. When the drop field is not true, use the other +// fields. +type Server struct { + // A resolved address for the server, serialized in network-byte-order. It may + // either be an IPv4 or IPv6 address. + IpAddress []byte `protobuf:"bytes,1,opt,name=ip_address,json=ipAddress,proto3" json:"ip_address,omitempty"` + // A resolved port number for the server. + Port int32 `protobuf:"varint,2,opt,name=port,proto3" json:"port,omitempty"` + // An opaque but printable token for load reporting. The client must include + // the token of the picked server into the initial metadata when it starts a + // call to that server. The token is used by the server to verify the request + // and to allow the server to report load to the gRPC LB system. The token is + // also used in client stats for reporting dropped calls. + // + // Its length can be variable but must be less than 50 bytes. + LoadBalanceToken string `protobuf:"bytes,3,opt,name=load_balance_token,json=loadBalanceToken,proto3" json:"load_balance_token,omitempty"` + // Indicates whether this particular request should be dropped by the client. + // If the request is dropped, there will be a corresponding entry in + // ClientStats.calls_finished_with_drop. + Drop bool `protobuf:"varint,4,opt,name=drop,proto3" json:"drop,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Server) Reset() { *m = Server{} } +func (m *Server) String() string { return proto.CompactTextString(m) } +func (*Server) ProtoMessage() {} +func (*Server) Descriptor() ([]byte, []int) { + return fileDescriptor_load_balancer_12026aec3f0251ba, []int{7} +} +func (m *Server) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Server.Unmarshal(m, b) +} +func (m *Server) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Server.Marshal(b, m, deterministic) +} +func (dst *Server) XXX_Merge(src proto.Message) { + xxx_messageInfo_Server.Merge(dst, src) +} +func (m *Server) XXX_Size() int { + return xxx_messageInfo_Server.Size(m) +} +func (m *Server) XXX_DiscardUnknown() { + xxx_messageInfo_Server.DiscardUnknown(m) +} + +var xxx_messageInfo_Server proto.InternalMessageInfo + +func (m *Server) GetIpAddress() []byte { + if m != nil { + return m.IpAddress + } + return nil +} + +func (m *Server) GetPort() int32 { + if m != nil { + return m.Port + } + return 0 +} + +func (m *Server) GetLoadBalanceToken() string { + if m != nil { + return m.LoadBalanceToken + } + return "" +} + +func (m *Server) GetDrop() bool { + if m != nil { + return m.Drop + } + return false +} + +func init() { + proto.RegisterType((*LoadBalanceRequest)(nil), "grpc.lb.v1.LoadBalanceRequest") + proto.RegisterType((*InitialLoadBalanceRequest)(nil), "grpc.lb.v1.InitialLoadBalanceRequest") + proto.RegisterType((*ClientStatsPerToken)(nil), "grpc.lb.v1.ClientStatsPerToken") + proto.RegisterType((*ClientStats)(nil), "grpc.lb.v1.ClientStats") + proto.RegisterType((*LoadBalanceResponse)(nil), "grpc.lb.v1.LoadBalanceResponse") + proto.RegisterType((*InitialLoadBalanceResponse)(nil), "grpc.lb.v1.InitialLoadBalanceResponse") + proto.RegisterType((*ServerList)(nil), "grpc.lb.v1.ServerList") + proto.RegisterType((*Server)(nil), "grpc.lb.v1.Server") +} + +// Reference imports to suppress errors if they are not otherwise used. +var _ context.Context +var _ grpc.ClientConn + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the grpc package it is being compiled against. +const _ = grpc.SupportPackageIsVersion4 + +// LoadBalancerClient is the client API for LoadBalancer service. +// +// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://godoc.org/google.golang.org/grpc#ClientConn.NewStream. +type LoadBalancerClient interface { + // Bidirectional rpc to get a list of servers. + BalanceLoad(ctx context.Context, opts ...grpc.CallOption) (LoadBalancer_BalanceLoadClient, error) +} + +type loadBalancerClient struct { + cc *grpc.ClientConn +} + +func NewLoadBalancerClient(cc *grpc.ClientConn) LoadBalancerClient { + return &loadBalancerClient{cc} +} + +func (c *loadBalancerClient) BalanceLoad(ctx context.Context, opts ...grpc.CallOption) (LoadBalancer_BalanceLoadClient, error) { + stream, err := c.cc.NewStream(ctx, &_LoadBalancer_serviceDesc.Streams[0], "/grpc.lb.v1.LoadBalancer/BalanceLoad", opts...) + if err != nil { + return nil, err + } + x := &loadBalancerBalanceLoadClient{stream} + return x, nil +} + +type LoadBalancer_BalanceLoadClient interface { + Send(*LoadBalanceRequest) error + Recv() (*LoadBalanceResponse, error) + grpc.ClientStream +} + +type loadBalancerBalanceLoadClient struct { + grpc.ClientStream +} + +func (x *loadBalancerBalanceLoadClient) Send(m *LoadBalanceRequest) error { + return x.ClientStream.SendMsg(m) +} + +func (x *loadBalancerBalanceLoadClient) Recv() (*LoadBalanceResponse, error) { + m := new(LoadBalanceResponse) + if err := x.ClientStream.RecvMsg(m); err != nil { + return nil, err + } + return m, nil +} + +// LoadBalancerServer is the server API for LoadBalancer service. +type LoadBalancerServer interface { + // Bidirectional rpc to get a list of servers. + BalanceLoad(LoadBalancer_BalanceLoadServer) error +} + +func RegisterLoadBalancerServer(s *grpc.Server, srv LoadBalancerServer) { + s.RegisterService(&_LoadBalancer_serviceDesc, srv) +} + +func _LoadBalancer_BalanceLoad_Handler(srv interface{}, stream grpc.ServerStream) error { + return srv.(LoadBalancerServer).BalanceLoad(&loadBalancerBalanceLoadServer{stream}) +} + +type LoadBalancer_BalanceLoadServer interface { + Send(*LoadBalanceResponse) error + Recv() (*LoadBalanceRequest, error) + grpc.ServerStream +} + +type loadBalancerBalanceLoadServer struct { + grpc.ServerStream +} + +func (x *loadBalancerBalanceLoadServer) Send(m *LoadBalanceResponse) error { + return x.ServerStream.SendMsg(m) +} + +func (x *loadBalancerBalanceLoadServer) Recv() (*LoadBalanceRequest, error) { + m := new(LoadBalanceRequest) + if err := x.ServerStream.RecvMsg(m); err != nil { + return nil, err + } + return m, nil +} + +var _LoadBalancer_serviceDesc = grpc.ServiceDesc{ + ServiceName: "grpc.lb.v1.LoadBalancer", + HandlerType: (*LoadBalancerServer)(nil), + Methods: []grpc.MethodDesc{}, + Streams: []grpc.StreamDesc{ + { + StreamName: "BalanceLoad", + Handler: _LoadBalancer_BalanceLoad_Handler, + ServerStreams: true, + ClientStreams: true, + }, + }, + Metadata: "grpc/lb/v1/load_balancer.proto", +} + +func init() { + proto.RegisterFile("grpc/lb/v1/load_balancer.proto", fileDescriptor_load_balancer_12026aec3f0251ba) +} + +var fileDescriptor_load_balancer_12026aec3f0251ba = []byte{ + // 752 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x84, 0x55, 0xdd, 0x6e, 0x23, 0x35, + 0x14, 0xee, 0x90, 0x69, 0x36, 0x39, 0x29, 0x34, 0xeb, 0x85, 0x65, 0x92, 0xdd, 0x6d, 0x4b, 0x24, + 0x56, 0x11, 0x2a, 0x13, 0x52, 0xb8, 0x00, 0x89, 0x0b, 0x48, 0xab, 0x2a, 0x2d, 0xbd, 0x88, 0x9c, + 0x4a, 0x45, 0x95, 0x90, 0x99, 0xc9, 0xb8, 0xa9, 0x55, 0xc7, 0x1e, 0x3c, 0x4e, 0x2a, 0xae, 0x79, + 0x1f, 0xc4, 0x2b, 0x20, 0x5e, 0x0c, 0x8d, 0xed, 0x49, 0xa6, 0x49, 0xa3, 0xbd, 0xca, 0xf8, 0x9c, + 0xcf, 0xdf, 0xf9, 0xfd, 0x1c, 0x38, 0x98, 0xaa, 0x74, 0xd2, 0xe3, 0x71, 0x6f, 0xd1, 0xef, 0x71, + 0x19, 0x25, 0x24, 0x8e, 0x78, 0x24, 0x26, 0x54, 0x85, 0xa9, 0x92, 0x5a, 0x22, 0xc8, 0xfd, 0x21, + 0x8f, 0xc3, 0x45, 0xbf, 0x7d, 0x30, 0x95, 0x72, 0xca, 0x69, 0xcf, 0x78, 0xe2, 0xf9, 0x5d, 0x2f, + 0x99, 0xab, 0x48, 0x33, 0x29, 0x2c, 0xb6, 0x7d, 0xb8, 0xee, 0xd7, 0x6c, 0x46, 0x33, 0x1d, 0xcd, + 0x52, 0x0b, 0xe8, 0xfc, 0xeb, 0x01, 0xba, 0x92, 0x51, 0x32, 0xb0, 0x31, 0x30, 0xfd, 0x63, 0x4e, + 0x33, 0x8d, 0x46, 0xb0, 0xcf, 0x04, 0xd3, 0x2c, 0xe2, 0x44, 0x59, 0x53, 0xe0, 0x1d, 0x79, 0xdd, + 0xc6, 0xc9, 0x97, 0xe1, 0x2a, 0x7a, 0x78, 0x61, 0x21, 0x9b, 0xf7, 0x87, 0x3b, 0xf8, 0x13, 0x77, + 0xbf, 0x60, 0xfc, 0x11, 0xf6, 0x26, 0x9c, 0x51, 0xa1, 0x49, 0xa6, 0x23, 0x9d, 0x05, 0x1f, 0x19, + 0xba, 0xcf, 0xcb, 0x74, 0xa7, 0xc6, 0x3f, 0xce, 0xdd, 0xc3, 0x1d, 0xdc, 0x98, 0xac, 0x8e, 0x83, + 0x37, 0xd0, 0x2a, 0xb7, 0xa2, 0x48, 0x8a, 0xe8, 0x3f, 0x53, 0xda, 0xe9, 0x41, 0x6b, 0x6b, 0x26, + 0x08, 0x81, 0x2f, 0xa2, 0x19, 0x35, 0xe9, 0xd7, 0xb1, 0xf9, 0xee, 0xfc, 0x0e, 0xaf, 0x4a, 0xb1, + 0x46, 0x54, 0x5d, 0xcb, 0x07, 0x2a, 0xd0, 0x31, 0xa0, 0x27, 0x41, 0x74, 0x6e, 0x75, 0x17, 0x9b, + 0x7c, 0x45, 0x6d, 0xd1, 0x6f, 0xa0, 0x2e, 0xe6, 0x33, 0x32, 0x89, 0x38, 0xb7, 0xd5, 0x54, 0x70, + 0x4d, 0xcc, 0x67, 0xa7, 0xf9, 0xb9, 0xf3, 0x4f, 0x05, 0x1a, 0xa5, 0x10, 0xe8, 0x7b, 0xa8, 0x2f, + 0x3b, 0xef, 0x3a, 0xd9, 0x0e, 0xed, 0x6c, 0xc2, 0x62, 0x36, 0xe1, 0x75, 0x81, 0xc0, 0x2b, 0x30, + 0xfa, 0x0a, 0x5e, 0x2e, 0xc3, 0xe4, 0xad, 0x53, 0x9a, 0x26, 0x2e, 0xdc, 0x7e, 0x11, 0x6e, 0x6c, + 0xcd, 0x79, 0x01, 0x2b, 0xec, 0x1d, 0x13, 0x2c, 0xbb, 0xa7, 0x49, 0x50, 0x31, 0xe0, 0x66, 0x01, + 0x3e, 0x77, 0x76, 0xf4, 0x1b, 0x7c, 0xbd, 0x89, 0x26, 0x8f, 0x4c, 0xdf, 0x13, 0x37, 0xa9, 0xbb, + 0x88, 0x71, 0x9a, 0x10, 0x2d, 0x49, 0x46, 0x45, 0x12, 0x54, 0x0d, 0xd1, 0xfb, 0x75, 0xa2, 0x1b, + 0xa6, 0xef, 0x6d, 0xad, 0xe7, 0x06, 0x7f, 0x2d, 0xc7, 0x54, 0x24, 0x68, 0x08, 0x5f, 0x3c, 0x43, + 0xff, 0x20, 0xe4, 0xa3, 0x20, 0x8a, 0x4e, 0x28, 0x5b, 0xd0, 0x24, 0x78, 0x61, 0x28, 0xdf, 0xad, + 0x53, 0xfe, 0x92, 0xa3, 0xb0, 0x03, 0xa1, 0x5f, 0x21, 0x78, 0x2e, 0xc9, 0x44, 0xc9, 0x34, 0xa8, + 0x1d, 0x55, 0xba, 0x8d, 0x93, 0xc3, 0x2d, 0x6b, 0x54, 0x8c, 0x16, 0x7f, 0x36, 0x59, 0xcf, 0xf8, + 0x4c, 0xc9, 0xf4, 0xd2, 0xaf, 0xf9, 0xcd, 0xdd, 0x4b, 0xbf, 0xb6, 0xdb, 0xac, 0x76, 0xfe, 0xf3, + 0xe0, 0xd5, 0x93, 0xfd, 0xc9, 0x52, 0x29, 0x32, 0x8a, 0xc6, 0xd0, 0x5c, 0x49, 0xc1, 0xda, 0xdc, + 0x04, 0xdf, 0x7f, 0x48, 0x0b, 0x16, 0x3d, 0xdc, 0xc1, 0xfb, 0x4b, 0x31, 0x38, 0xd2, 0x1f, 0xa0, + 0x91, 0x51, 0xb5, 0xa0, 0x8a, 0x70, 0x96, 0x69, 0x27, 0x86, 0xd7, 0x65, 0xbe, 0xb1, 0x71, 0x5f, + 0x31, 0x23, 0x26, 0xc8, 0x96, 0xa7, 0xc1, 0x5b, 0x68, 0xaf, 0x49, 0xc1, 0x72, 0x5a, 0x2d, 0xfc, + 0xed, 0x41, 0x7b, 0x7b, 0x2a, 0xe8, 0x3b, 0x78, 0xfd, 0xe4, 0x49, 0x21, 0x09, 0xe5, 0x74, 0x1a, + 0xe9, 0x42, 0x1f, 0x9f, 0x96, 0xd6, 0x5c, 0x9d, 0x39, 0x1f, 0xba, 0x85, 0xb7, 0x65, 0xed, 0x12, + 0x45, 0x53, 0xa9, 0x34, 0x61, 0x42, 0x53, 0xb5, 0x88, 0xb8, 0x4b, 0xbf, 0xb5, 0xb1, 0xd0, 0x67, + 0xee, 0x31, 0xc2, 0xad, 0x92, 0x96, 0xb1, 0xb9, 0x7c, 0xe1, 0xee, 0x76, 0x7e, 0x02, 0x58, 0x95, + 0x8a, 0x8e, 0xe1, 0x85, 0x2d, 0x35, 0x0b, 0x3c, 0x33, 0x59, 0xb4, 0xd9, 0x13, 0x5c, 0x40, 0x2e, + 0xfd, 0x5a, 0xa5, 0xe9, 0x77, 0xfe, 0xf2, 0xa0, 0x6a, 0x3d, 0xe8, 0x1d, 0x00, 0x4b, 0x49, 0x94, + 0x24, 0x8a, 0x66, 0x99, 0x29, 0x69, 0x0f, 0xd7, 0x59, 0xfa, 0xb3, 0x35, 0xe4, 0x6f, 0x41, 0x1e, + 0xdb, 0xe4, 0xbb, 0x8b, 0xcd, 0xf7, 0x16, 0xd1, 0x57, 0xb6, 0x88, 0x1e, 0x81, 0x6f, 0xd6, 0xce, + 0x3f, 0xf2, 0xba, 0x35, 0x6c, 0xbe, 0xed, 0xfa, 0x9c, 0xc4, 0xb0, 0x57, 0x6a, 0xb8, 0x42, 0x18, + 0x1a, 0xee, 0x3b, 0x37, 0xa3, 0x83, 0x72, 0x1d, 0x9b, 0xcf, 0x54, 0xfb, 0x70, 0xab, 0xdf, 0x4e, + 0xae, 0xeb, 0x7d, 0xe3, 0x0d, 0x6e, 0xe0, 0x63, 0x26, 0x4b, 0xc0, 0xc1, 0xcb, 0x72, 0xc8, 0x51, + 0xde, 0xf6, 0x91, 0x77, 0xdb, 0x77, 0x63, 0x98, 0x4a, 0x1e, 0x89, 0x69, 0x28, 0xd5, 0xb4, 0x67, + 0xfe, 0x51, 0x8a, 0x99, 0x9b, 0x13, 0x8f, 0xcd, 0x0f, 0xe1, 0x31, 0x59, 0xf4, 0xe3, 0xaa, 0x19, + 0xd9, 0xb7, 0xff, 0x07, 0x00, 0x00, 0xff, 0xff, 0x81, 0x14, 0xee, 0xd1, 0x7b, 0x06, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/grpc/balancer/grpclb/grpclb.go b/vendor/google.golang.org/grpc/balancer/grpclb/grpclb.go new file mode 100644 index 0000000..a1123ce --- /dev/null +++ b/vendor/google.golang.org/grpc/balancer/grpclb/grpclb.go @@ -0,0 +1,476 @@ +/* + * + * Copyright 2016 gRPC authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ + +//go:generate ./regenerate.sh + +// Package grpclb defines a grpclb balancer. +// +// To install grpclb balancer, import this package as: +// import _ "google.golang.org/grpc/balancer/grpclb" +package grpclb + +import ( + "context" + "errors" + "strconv" + "strings" + "sync" + "time" + + durationpb "github.com/golang/protobuf/ptypes/duration" + "google.golang.org/grpc" + "google.golang.org/grpc/balancer" + lbpb "google.golang.org/grpc/balancer/grpclb/grpc_lb_v1" + "google.golang.org/grpc/connectivity" + "google.golang.org/grpc/credentials" + "google.golang.org/grpc/grpclog" + "google.golang.org/grpc/internal" + "google.golang.org/grpc/internal/backoff" + "google.golang.org/grpc/resolver" +) + +const ( + lbTokeyKey = "lb-token" + defaultFallbackTimeout = 10 * time.Second + grpclbName = "grpclb" +) + +var ( + // defaultBackoffConfig configures the backoff strategy that's used when the + // init handshake in the RPC is unsuccessful. It's not for the clientconn + // reconnect backoff. + // + // It has the same value as the default grpc.DefaultBackoffConfig. + // + // TODO: make backoff configurable. + defaultBackoffConfig = backoff.Exponential{ + MaxDelay: 120 * time.Second, + } + errServerTerminatedConnection = errors.New("grpclb: failed to recv server list: server terminated connection") +) + +func convertDuration(d *durationpb.Duration) time.Duration { + if d == nil { + return 0 + } + return time.Duration(d.Seconds)*time.Second + time.Duration(d.Nanos)*time.Nanosecond +} + +// Client API for LoadBalancer service. +// Mostly copied from generated pb.go file. +// To avoid circular dependency. +type loadBalancerClient struct { + cc *grpc.ClientConn +} + +func (c *loadBalancerClient) BalanceLoad(ctx context.Context, opts ...grpc.CallOption) (*balanceLoadClientStream, error) { + desc := &grpc.StreamDesc{ + StreamName: "BalanceLoad", + ServerStreams: true, + ClientStreams: true, + } + stream, err := c.cc.NewStream(ctx, desc, "/grpc.lb.v1.LoadBalancer/BalanceLoad", opts...) + if err != nil { + return nil, err + } + x := &balanceLoadClientStream{stream} + return x, nil +} + +type balanceLoadClientStream struct { + grpc.ClientStream +} + +func (x *balanceLoadClientStream) Send(m *lbpb.LoadBalanceRequest) error { + return x.ClientStream.SendMsg(m) +} + +func (x *balanceLoadClientStream) Recv() (*lbpb.LoadBalanceResponse, error) { + m := new(lbpb.LoadBalanceResponse) + if err := x.ClientStream.RecvMsg(m); err != nil { + return nil, err + } + return m, nil +} + +func init() { + balancer.Register(newLBBuilder()) +} + +// newLBBuilder creates a builder for grpclb. +func newLBBuilder() balancer.Builder { + return newLBBuilderWithFallbackTimeout(defaultFallbackTimeout) +} + +// newLBBuilderWithFallbackTimeout creates a grpclb builder with the given +// fallbackTimeout. If no response is received from the remote balancer within +// fallbackTimeout, the backend addresses from the resolved address list will be +// used. +// +// Only call this function when a non-default fallback timeout is needed. +func newLBBuilderWithFallbackTimeout(fallbackTimeout time.Duration) balancer.Builder { + return &lbBuilder{ + fallbackTimeout: fallbackTimeout, + } +} + +// newLBBuilderWithPickFirst creates a grpclb builder with pick-first. +func newLBBuilderWithPickFirst() balancer.Builder { + return &lbBuilder{ + usePickFirst: true, + } +} + +type lbBuilder struct { + fallbackTimeout time.Duration + + // TODO: delete this when balancer can handle service config. This should be + // updated by service config. + usePickFirst bool // Use roundrobin or pickfirst for backends. +} + +func (b *lbBuilder) Name() string { + return grpclbName +} + +func (b *lbBuilder) Build(cc balancer.ClientConn, opt balancer.BuildOptions) balancer.Balancer { + // This generates a manual resolver builder with a random scheme. This + // scheme will be used to dial to remote LB, so we can send filtered address + // updates to remote LB ClientConn using this manual resolver. + scheme := "grpclb_internal_" + strconv.FormatInt(time.Now().UnixNano(), 36) + r := &lbManualResolver{scheme: scheme, ccb: cc} + + var target string + targetSplitted := strings.Split(cc.Target(), ":///") + if len(targetSplitted) < 2 { + target = cc.Target() + } else { + target = targetSplitted[1] + } + + lb := &lbBalancer{ + cc: newLBCacheClientConn(cc), + target: target, + opt: opt, + usePickFirst: b.usePickFirst, + fallbackTimeout: b.fallbackTimeout, + doneCh: make(chan struct{}), + + manualResolver: r, + subConns: make(map[resolver.Address]balancer.SubConn), + scStates: make(map[balancer.SubConn]connectivity.State), + picker: &errPicker{err: balancer.ErrNoSubConnAvailable}, + clientStats: newRPCStats(), + backoff: defaultBackoffConfig, // TODO: make backoff configurable. + } + + var err error + if opt.CredsBundle != nil { + lb.grpclbClientConnCreds, err = opt.CredsBundle.NewWithMode(internal.CredsBundleModeBalancer) + if err != nil { + grpclog.Warningf("lbBalancer: client connection creds NewWithMode failed: %v", err) + } + lb.grpclbBackendCreds, err = opt.CredsBundle.NewWithMode(internal.CredsBundleModeBackendFromBalancer) + if err != nil { + grpclog.Warningf("lbBalancer: backend creds NewWithMode failed: %v", err) + } + } + + return lb +} + +type lbBalancer struct { + cc *lbCacheClientConn + target string + opt balancer.BuildOptions + + usePickFirst bool + + // grpclbClientConnCreds is the creds bundle to be used to connect to grpclb + // servers. If it's nil, use the TransportCredentials from BuildOptions + // instead. + grpclbClientConnCreds credentials.Bundle + // grpclbBackendCreds is the creds bundle to be used for addresses that are + // returned by grpclb server. If it's nil, don't set anything when creating + // SubConns. + grpclbBackendCreds credentials.Bundle + + fallbackTimeout time.Duration + doneCh chan struct{} + + // manualResolver is used in the remote LB ClientConn inside grpclb. When + // resolved address updates are received by grpclb, filtered updates will be + // send to remote LB ClientConn through this resolver. + manualResolver *lbManualResolver + // The ClientConn to talk to the remote balancer. + ccRemoteLB *grpc.ClientConn + // backoff for calling remote balancer. + backoff backoff.Strategy + + // Support client side load reporting. Each picker gets a reference to this, + // and will update its content. + clientStats *rpcStats + + mu sync.Mutex // guards everything following. + // The full server list including drops, used to check if the newly received + // serverList contains anything new. Each generate picker will also have + // reference to this list to do the first layer pick. + fullServerList []*lbpb.Server + // All backends addresses, with metadata set to nil. This list contains all + // backend addresses in the same order and with the same duplicates as in + // serverlist. When generating picker, a SubConn slice with the same order + // but with only READY SCs will be gerenated. + backendAddrs []resolver.Address + // Roundrobin functionalities. + state connectivity.State + subConns map[resolver.Address]balancer.SubConn // Used to new/remove SubConn. + scStates map[balancer.SubConn]connectivity.State // Used to filter READY SubConns. + picker balancer.Picker + // Support fallback to resolved backend addresses if there's no response + // from remote balancer within fallbackTimeout. + remoteBalancerConnected bool + serverListReceived bool + inFallback bool + // resolvedBackendAddrs is resolvedAddrs minus remote balancers. It's set + // when resolved address updates are received, and read in the goroutine + // handling fallback. + resolvedBackendAddrs []resolver.Address +} + +// regeneratePicker takes a snapshot of the balancer, and generates a picker from +// it. The picker +// - always returns ErrTransientFailure if the balancer is in TransientFailure, +// - does two layer roundrobin pick otherwise. +// Caller must hold lb.mu. +func (lb *lbBalancer) regeneratePicker(resetDrop bool) { + if lb.state == connectivity.TransientFailure { + lb.picker = &errPicker{err: balancer.ErrTransientFailure} + return + } + + if lb.state == connectivity.Connecting { + lb.picker = &errPicker{err: balancer.ErrNoSubConnAvailable} + return + } + + var readySCs []balancer.SubConn + if lb.usePickFirst { + for _, sc := range lb.subConns { + readySCs = append(readySCs, sc) + break + } + } else { + for _, a := range lb.backendAddrs { + if sc, ok := lb.subConns[a]; ok { + if st, ok := lb.scStates[sc]; ok && st == connectivity.Ready { + readySCs = append(readySCs, sc) + } + } + } + } + + if len(readySCs) <= 0 { + // If there's no ready SubConns, always re-pick. This is to avoid drops + // unless at least one SubConn is ready. Otherwise we may drop more + // often than want because of drops + re-picks(which become re-drops). + // + // This doesn't seem to be necessary after the connecting check above. + // Kept for safety. + lb.picker = &errPicker{err: balancer.ErrNoSubConnAvailable} + return + } + if lb.inFallback { + lb.picker = newRRPicker(readySCs) + return + } + if resetDrop { + lb.picker = newLBPicker(lb.fullServerList, readySCs, lb.clientStats) + return + } + prevLBPicker, ok := lb.picker.(*lbPicker) + if !ok { + lb.picker = newLBPicker(lb.fullServerList, readySCs, lb.clientStats) + return + } + prevLBPicker.updateReadySCs(readySCs) +} + +// aggregateSubConnStats calculate the aggregated state of SubConns in +// lb.SubConns. These SubConns are subconns in use (when switching between +// fallback and grpclb). lb.scState contains states for all SubConns, including +// those in cache (SubConns are cached for 10 seconds after remove). +// +// The aggregated state is: +// - If at least one SubConn in Ready, the aggregated state is Ready; +// - Else if at least one SubConn in Connecting, the aggregated state is Connecting; +// - Else the aggregated state is TransientFailure. +func (lb *lbBalancer) aggregateSubConnStates() connectivity.State { + var numConnecting uint64 + + for _, sc := range lb.subConns { + if state, ok := lb.scStates[sc]; ok { + switch state { + case connectivity.Ready: + return connectivity.Ready + case connectivity.Connecting: + numConnecting++ + } + } + } + if numConnecting > 0 { + return connectivity.Connecting + } + return connectivity.TransientFailure +} + +func (lb *lbBalancer) HandleSubConnStateChange(sc balancer.SubConn, s connectivity.State) { + if grpclog.V(2) { + grpclog.Infof("lbBalancer: handle SubConn state change: %p, %v", sc, s) + } + lb.mu.Lock() + defer lb.mu.Unlock() + + oldS, ok := lb.scStates[sc] + if !ok { + if grpclog.V(2) { + grpclog.Infof("lbBalancer: got state changes for an unknown SubConn: %p, %v", sc, s) + } + return + } + lb.scStates[sc] = s + switch s { + case connectivity.Idle: + sc.Connect() + case connectivity.Shutdown: + // When an address was removed by resolver, b called RemoveSubConn but + // kept the sc's state in scStates. Remove state for this sc here. + delete(lb.scStates, sc) + } + // Force regenerate picker if + // - this sc became ready from not-ready + // - this sc became not-ready from ready + lb.updateStateAndPicker((oldS == connectivity.Ready) != (s == connectivity.Ready), false) + + // Enter fallback when the aggregated state is not Ready and the connection + // to remote balancer is lost. + if lb.state != connectivity.Ready { + if !lb.inFallback && !lb.remoteBalancerConnected { + // Enter fallback. + lb.refreshSubConns(lb.resolvedBackendAddrs, false) + } + } +} + +// updateStateAndPicker re-calculate the aggregated state, and regenerate picker +// if overall state is changed. +// +// If forceRegeneratePicker is true, picker will be regenerated. +func (lb *lbBalancer) updateStateAndPicker(forceRegeneratePicker bool, resetDrop bool) { + oldAggrState := lb.state + lb.state = lb.aggregateSubConnStates() + // Regenerate picker when one of the following happens: + // - caller wants to regenerate + // - the aggregated state changed + if forceRegeneratePicker || (lb.state != oldAggrState) { + lb.regeneratePicker(resetDrop) + } + + lb.cc.UpdateBalancerState(lb.state, lb.picker) +} + +// fallbackToBackendsAfter blocks for fallbackTimeout and falls back to use +// resolved backends (backends received from resolver, not from remote balancer) +// if no connection to remote balancers was successful. +func (lb *lbBalancer) fallbackToBackendsAfter(fallbackTimeout time.Duration) { + timer := time.NewTimer(fallbackTimeout) + defer timer.Stop() + select { + case <-timer.C: + case <-lb.doneCh: + return + } + lb.mu.Lock() + if lb.inFallback || lb.serverListReceived { + lb.mu.Unlock() + return + } + // Enter fallback. + lb.refreshSubConns(lb.resolvedBackendAddrs, false) + lb.mu.Unlock() +} + +// HandleResolvedAddrs sends the updated remoteLB addresses to remoteLB +// clientConn. The remoteLB clientConn will handle creating/removing remoteLB +// connections. +func (lb *lbBalancer) HandleResolvedAddrs(addrs []resolver.Address, err error) { + if grpclog.V(2) { + grpclog.Infof("lbBalancer: handleResolvedResult: %+v", addrs) + } + if len(addrs) <= 0 { + return + } + + var remoteBalancerAddrs, backendAddrs []resolver.Address + for _, a := range addrs { + if a.Type == resolver.GRPCLB { + a.Type = resolver.Backend + remoteBalancerAddrs = append(remoteBalancerAddrs, a) + } else { + backendAddrs = append(backendAddrs, a) + } + } + + if lb.ccRemoteLB == nil { + if len(remoteBalancerAddrs) <= 0 { + grpclog.Errorf("grpclb: no remote balancer address is available, should never happen") + return + } + // First time receiving resolved addresses, create a cc to remote + // balancers. + lb.dialRemoteLB(remoteBalancerAddrs[0].ServerName) + // Start the fallback goroutine. + go lb.fallbackToBackendsAfter(lb.fallbackTimeout) + } + + // cc to remote balancers uses lb.manualResolver. Send the updated remote + // balancer addresses to it through manualResolver. + lb.manualResolver.UpdateState(resolver.State{Addresses: remoteBalancerAddrs}) + + lb.mu.Lock() + lb.resolvedBackendAddrs = backendAddrs + if lb.inFallback { + // This means we received a new list of resolved backends, and we are + // still in fallback mode. Need to update the list of backends we are + // using to the new list of backends. + lb.refreshSubConns(lb.resolvedBackendAddrs, false) + } + lb.mu.Unlock() +} + +func (lb *lbBalancer) Close() { + select { + case <-lb.doneCh: + return + default: + } + close(lb.doneCh) + if lb.ccRemoteLB != nil { + lb.ccRemoteLB.Close() + } + lb.cc.close() +} diff --git a/vendor/google.golang.org/grpc/balancer/grpclb/grpclb_picker.go b/vendor/google.golang.org/grpc/balancer/grpclb/grpclb_picker.go new file mode 100644 index 0000000..6f023bc --- /dev/null +++ b/vendor/google.golang.org/grpc/balancer/grpclb/grpclb_picker.go @@ -0,0 +1,195 @@ +/* + * + * Copyright 2017 gRPC authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ + +package grpclb + +import ( + "context" + "sync" + "sync/atomic" + + "google.golang.org/grpc/balancer" + lbpb "google.golang.org/grpc/balancer/grpclb/grpc_lb_v1" + "google.golang.org/grpc/codes" + "google.golang.org/grpc/internal/grpcrand" + "google.golang.org/grpc/status" +) + +// rpcStats is same as lbmpb.ClientStats, except that numCallsDropped is a map +// instead of a slice. +type rpcStats struct { + // Only access the following fields atomically. + numCallsStarted int64 + numCallsFinished int64 + numCallsFinishedWithClientFailedToSend int64 + numCallsFinishedKnownReceived int64 + + mu sync.Mutex + // map load_balance_token -> num_calls_dropped + numCallsDropped map[string]int64 +} + +func newRPCStats() *rpcStats { + return &rpcStats{ + numCallsDropped: make(map[string]int64), + } +} + +// toClientStats converts rpcStats to lbpb.ClientStats, and clears rpcStats. +func (s *rpcStats) toClientStats() *lbpb.ClientStats { + stats := &lbpb.ClientStats{ + NumCallsStarted: atomic.SwapInt64(&s.numCallsStarted, 0), + NumCallsFinished: atomic.SwapInt64(&s.numCallsFinished, 0), + NumCallsFinishedWithClientFailedToSend: atomic.SwapInt64(&s.numCallsFinishedWithClientFailedToSend, 0), + NumCallsFinishedKnownReceived: atomic.SwapInt64(&s.numCallsFinishedKnownReceived, 0), + } + s.mu.Lock() + dropped := s.numCallsDropped + s.numCallsDropped = make(map[string]int64) + s.mu.Unlock() + for token, count := range dropped { + stats.CallsFinishedWithDrop = append(stats.CallsFinishedWithDrop, &lbpb.ClientStatsPerToken{ + LoadBalanceToken: token, + NumCalls: count, + }) + } + return stats +} + +func (s *rpcStats) drop(token string) { + atomic.AddInt64(&s.numCallsStarted, 1) + s.mu.Lock() + s.numCallsDropped[token]++ + s.mu.Unlock() + atomic.AddInt64(&s.numCallsFinished, 1) +} + +func (s *rpcStats) failedToSend() { + atomic.AddInt64(&s.numCallsStarted, 1) + atomic.AddInt64(&s.numCallsFinishedWithClientFailedToSend, 1) + atomic.AddInt64(&s.numCallsFinished, 1) +} + +func (s *rpcStats) knownReceived() { + atomic.AddInt64(&s.numCallsStarted, 1) + atomic.AddInt64(&s.numCallsFinishedKnownReceived, 1) + atomic.AddInt64(&s.numCallsFinished, 1) +} + +type errPicker struct { + // Pick always returns this err. + err error +} + +func (p *errPicker) Pick(ctx context.Context, opts balancer.PickOptions) (balancer.SubConn, func(balancer.DoneInfo), error) { + return nil, nil, p.err +} + +// rrPicker does roundrobin on subConns. It's typically used when there's no +// response from remote balancer, and grpclb falls back to the resolved +// backends. +// +// It guaranteed that len(subConns) > 0. +type rrPicker struct { + mu sync.Mutex + subConns []balancer.SubConn // The subConns that were READY when taking the snapshot. + subConnsNext int +} + +func newRRPicker(readySCs []balancer.SubConn) *rrPicker { + return &rrPicker{ + subConns: readySCs, + subConnsNext: grpcrand.Intn(len(readySCs)), + } +} + +func (p *rrPicker) Pick(ctx context.Context, opts balancer.PickOptions) (balancer.SubConn, func(balancer.DoneInfo), error) { + p.mu.Lock() + defer p.mu.Unlock() + sc := p.subConns[p.subConnsNext] + p.subConnsNext = (p.subConnsNext + 1) % len(p.subConns) + return sc, nil, nil +} + +// lbPicker does two layers of picks: +// +// First layer: roundrobin on all servers in serverList, including drops and backends. +// - If it picks a drop, the RPC will fail as being dropped. +// - If it picks a backend, do a second layer pick to pick the real backend. +// +// Second layer: roundrobin on all READY backends. +// +// It's guaranteed that len(serverList) > 0. +type lbPicker struct { + mu sync.Mutex + serverList []*lbpb.Server + serverListNext int + subConns []balancer.SubConn // The subConns that were READY when taking the snapshot. + subConnsNext int + + stats *rpcStats +} + +func newLBPicker(serverList []*lbpb.Server, readySCs []balancer.SubConn, stats *rpcStats) *lbPicker { + return &lbPicker{ + serverList: serverList, + subConns: readySCs, + subConnsNext: grpcrand.Intn(len(readySCs)), + stats: stats, + } +} + +func (p *lbPicker) Pick(ctx context.Context, opts balancer.PickOptions) (balancer.SubConn, func(balancer.DoneInfo), error) { + p.mu.Lock() + defer p.mu.Unlock() + + // Layer one roundrobin on serverList. + s := p.serverList[p.serverListNext] + p.serverListNext = (p.serverListNext + 1) % len(p.serverList) + + // If it's a drop, return an error and fail the RPC. + if s.Drop { + p.stats.drop(s.LoadBalanceToken) + return nil, nil, status.Errorf(codes.Unavailable, "request dropped by grpclb") + } + + // If not a drop but there's no ready subConns. + if len(p.subConns) <= 0 { + return nil, nil, balancer.ErrNoSubConnAvailable + } + + // Return the next ready subConn in the list, also collect rpc stats. + sc := p.subConns[p.subConnsNext] + p.subConnsNext = (p.subConnsNext + 1) % len(p.subConns) + done := func(info balancer.DoneInfo) { + if !info.BytesSent { + p.stats.failedToSend() + } else if info.BytesReceived { + p.stats.knownReceived() + } + } + return sc, done, nil +} + +func (p *lbPicker) updateReadySCs(readySCs []balancer.SubConn) { + p.mu.Lock() + defer p.mu.Unlock() + + p.subConns = readySCs + p.subConnsNext = p.subConnsNext % len(readySCs) +} diff --git a/vendor/google.golang.org/grpc/balancer/grpclb/grpclb_remote_balancer.go b/vendor/google.golang.org/grpc/balancer/grpclb/grpclb_remote_balancer.go new file mode 100644 index 0000000..7ed886f --- /dev/null +++ b/vendor/google.golang.org/grpc/balancer/grpclb/grpclb_remote_balancer.go @@ -0,0 +1,341 @@ +/* + * + * Copyright 2017 gRPC authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ + +package grpclb + +import ( + "context" + "fmt" + "io" + "net" + "reflect" + "time" + + timestamppb "github.com/golang/protobuf/ptypes/timestamp" + "google.golang.org/grpc" + "google.golang.org/grpc/balancer" + lbpb "google.golang.org/grpc/balancer/grpclb/grpc_lb_v1" + "google.golang.org/grpc/connectivity" + "google.golang.org/grpc/grpclog" + "google.golang.org/grpc/internal" + "google.golang.org/grpc/internal/channelz" + "google.golang.org/grpc/metadata" + "google.golang.org/grpc/resolver" +) + +// processServerList updates balaner's internal state, create/remove SubConns +// and regenerates picker using the received serverList. +func (lb *lbBalancer) processServerList(l *lbpb.ServerList) { + if grpclog.V(2) { + grpclog.Infof("lbBalancer: processing server list: %+v", l) + } + lb.mu.Lock() + defer lb.mu.Unlock() + + // Set serverListReceived to true so fallback will not take effect if it has + // not hit timeout. + lb.serverListReceived = true + + // If the new server list == old server list, do nothing. + if reflect.DeepEqual(lb.fullServerList, l.Servers) { + if grpclog.V(2) { + grpclog.Infof("lbBalancer: new serverlist same as the previous one, ignoring") + } + return + } + lb.fullServerList = l.Servers + + var backendAddrs []resolver.Address + for i, s := range l.Servers { + if s.Drop { + continue + } + + md := metadata.Pairs(lbTokeyKey, s.LoadBalanceToken) + ip := net.IP(s.IpAddress) + ipStr := ip.String() + if ip.To4() == nil { + // Add square brackets to ipv6 addresses, otherwise net.Dial() and + // net.SplitHostPort() will return too many colons error. + ipStr = fmt.Sprintf("[%s]", ipStr) + } + addr := resolver.Address{ + Addr: fmt.Sprintf("%s:%d", ipStr, s.Port), + Metadata: &md, + } + if grpclog.V(2) { + grpclog.Infof("lbBalancer: server list entry[%d]: ipStr:|%s|, port:|%d|, load balancer token:|%v|", + i, ipStr, s.Port, s.LoadBalanceToken) + } + backendAddrs = append(backendAddrs, addr) + } + + // Call refreshSubConns to create/remove SubConns. If we are in fallback, + // this is also exiting fallback. + lb.refreshSubConns(backendAddrs, true) +} + +// refreshSubConns creates/removes SubConns with backendAddrs, and refreshes +// balancer state and picker. +// +// Caller must hold lb.mu. +func (lb *lbBalancer) refreshSubConns(backendAddrs []resolver.Address, fromGRPCLBServer bool) { + defer func() { + // Regenerate and update picker after refreshing subconns because with + // cache, even if SubConn was newed/removed, there might be no state + // changes (the subconn will be kept in cache, not actually + // newed/removed). + lb.updateStateAndPicker(true, true) + }() + + lb.inFallback = !fromGRPCLBServer + + opts := balancer.NewSubConnOptions{} + if fromGRPCLBServer { + opts.CredsBundle = lb.grpclbBackendCreds + } + + lb.backendAddrs = nil + + if lb.usePickFirst { + var sc balancer.SubConn + for _, sc = range lb.subConns { + break + } + if sc != nil { + sc.UpdateAddresses(backendAddrs) + sc.Connect() + return + } + // This bypasses the cc wrapper with SubConn cache. + sc, err := lb.cc.cc.NewSubConn(backendAddrs, opts) + if err != nil { + grpclog.Warningf("grpclb: failed to create new SubConn: %v", err) + return + } + sc.Connect() + lb.subConns[backendAddrs[0]] = sc + lb.scStates[sc] = connectivity.Idle + return + } + + // addrsSet is the set converted from backendAddrs, it's used to quick + // lookup for an address. + addrsSet := make(map[resolver.Address]struct{}) + // Create new SubConns. + for _, addr := range backendAddrs { + addrWithoutMD := addr + addrWithoutMD.Metadata = nil + addrsSet[addrWithoutMD] = struct{}{} + lb.backendAddrs = append(lb.backendAddrs, addrWithoutMD) + + if _, ok := lb.subConns[addrWithoutMD]; !ok { + // Use addrWithMD to create the SubConn. + sc, err := lb.cc.NewSubConn([]resolver.Address{addr}, opts) + if err != nil { + grpclog.Warningf("grpclb: failed to create new SubConn: %v", err) + continue + } + lb.subConns[addrWithoutMD] = sc // Use the addr without MD as key for the map. + if _, ok := lb.scStates[sc]; !ok { + // Only set state of new sc to IDLE. The state could already be + // READY for cached SubConns. + lb.scStates[sc] = connectivity.Idle + } + sc.Connect() + } + } + + for a, sc := range lb.subConns { + // a was removed by resolver. + if _, ok := addrsSet[a]; !ok { + lb.cc.RemoveSubConn(sc) + delete(lb.subConns, a) + // Keep the state of this sc in b.scStates until sc's state becomes Shutdown. + // The entry will be deleted in HandleSubConnStateChange. + } + } +} + +func (lb *lbBalancer) readServerList(s *balanceLoadClientStream) error { + for { + reply, err := s.Recv() + if err != nil { + if err == io.EOF { + return errServerTerminatedConnection + } + return fmt.Errorf("grpclb: failed to recv server list: %v", err) + } + if serverList := reply.GetServerList(); serverList != nil { + lb.processServerList(serverList) + } + } +} + +func (lb *lbBalancer) sendLoadReport(s *balanceLoadClientStream, interval time.Duration) { + ticker := time.NewTicker(interval) + defer ticker.Stop() + for { + select { + case <-ticker.C: + case <-s.Context().Done(): + return + } + stats := lb.clientStats.toClientStats() + t := time.Now() + stats.Timestamp = ×tamppb.Timestamp{ + Seconds: t.Unix(), + Nanos: int32(t.Nanosecond()), + } + if err := s.Send(&lbpb.LoadBalanceRequest{ + LoadBalanceRequestType: &lbpb.LoadBalanceRequest_ClientStats{ + ClientStats: stats, + }, + }); err != nil { + return + } + } +} + +func (lb *lbBalancer) callRemoteBalancer() (backoff bool, _ error) { + lbClient := &loadBalancerClient{cc: lb.ccRemoteLB} + ctx, cancel := context.WithCancel(context.Background()) + defer cancel() + stream, err := lbClient.BalanceLoad(ctx, grpc.WaitForReady(true)) + if err != nil { + return true, fmt.Errorf("grpclb: failed to perform RPC to the remote balancer %v", err) + } + lb.mu.Lock() + lb.remoteBalancerConnected = true + lb.mu.Unlock() + + // grpclb handshake on the stream. + initReq := &lbpb.LoadBalanceRequest{ + LoadBalanceRequestType: &lbpb.LoadBalanceRequest_InitialRequest{ + InitialRequest: &lbpb.InitialLoadBalanceRequest{ + Name: lb.target, + }, + }, + } + if err := stream.Send(initReq); err != nil { + return true, fmt.Errorf("grpclb: failed to send init request: %v", err) + } + reply, err := stream.Recv() + if err != nil { + return true, fmt.Errorf("grpclb: failed to recv init response: %v", err) + } + initResp := reply.GetInitialResponse() + if initResp == nil { + return true, fmt.Errorf("grpclb: reply from remote balancer did not include initial response") + } + if initResp.LoadBalancerDelegate != "" { + return true, fmt.Errorf("grpclb: Delegation is not supported") + } + + go func() { + if d := convertDuration(initResp.ClientStatsReportInterval); d > 0 { + lb.sendLoadReport(stream, d) + } + }() + // No backoff if init req/resp handshake was successful. + return false, lb.readServerList(stream) +} + +func (lb *lbBalancer) watchRemoteBalancer() { + var retryCount int + for { + doBackoff, err := lb.callRemoteBalancer() + select { + case <-lb.doneCh: + return + default: + if err != nil { + if err == errServerTerminatedConnection { + grpclog.Info(err) + } else { + grpclog.Warning(err) + } + } + } + // Trigger a re-resolve when the stream errors. + lb.cc.cc.ResolveNow(resolver.ResolveNowOption{}) + + lb.mu.Lock() + lb.remoteBalancerConnected = false + lb.fullServerList = nil + // Enter fallback when connection to remote balancer is lost, and the + // aggregated state is not Ready. + if !lb.inFallback && lb.state != connectivity.Ready { + // Entering fallback. + lb.refreshSubConns(lb.resolvedBackendAddrs, false) + } + lb.mu.Unlock() + + if !doBackoff { + retryCount = 0 + continue + } + + timer := time.NewTimer(lb.backoff.Backoff(retryCount)) + select { + case <-timer.C: + case <-lb.doneCh: + timer.Stop() + return + } + retryCount++ + } +} + +func (lb *lbBalancer) dialRemoteLB(remoteLBName string) { + var dopts []grpc.DialOption + if creds := lb.opt.DialCreds; creds != nil { + if err := creds.OverrideServerName(remoteLBName); err == nil { + dopts = append(dopts, grpc.WithTransportCredentials(creds)) + } else { + grpclog.Warningf("grpclb: failed to override the server name in the credentials: %v, using Insecure", err) + dopts = append(dopts, grpc.WithInsecure()) + } + } else if bundle := lb.grpclbClientConnCreds; bundle != nil { + dopts = append(dopts, grpc.WithCredentialsBundle(bundle)) + } else { + dopts = append(dopts, grpc.WithInsecure()) + } + if lb.opt.Dialer != nil { + dopts = append(dopts, grpc.WithContextDialer(lb.opt.Dialer)) + } + // Explicitly set pickfirst as the balancer. + dopts = append(dopts, grpc.WithBalancerName(grpc.PickFirstBalancerName)) + wrb := internal.WithResolverBuilder.(func(resolver.Builder) grpc.DialOption) + dopts = append(dopts, wrb(lb.manualResolver)) + if channelz.IsOn() { + dopts = append(dopts, grpc.WithChannelzParentID(lb.opt.ChannelzParentID)) + } + + // DialContext using manualResolver.Scheme, which is a random scheme + // generated when init grpclb. The target scheme here is not important. + // + // The grpc dial target will be used by the creds (ALTS) as the authority, + // so it has to be set to remoteLBName that comes from resolver. + cc, err := grpc.DialContext(context.Background(), remoteLBName, dopts...) + if err != nil { + grpclog.Fatalf("failed to dial: %v", err) + } + lb.ccRemoteLB = cc + go lb.watchRemoteBalancer() +} diff --git a/vendor/google.golang.org/grpc/balancer/grpclb/grpclb_util.go b/vendor/google.golang.org/grpc/balancer/grpclb/grpclb_util.go new file mode 100644 index 0000000..2663c37 --- /dev/null +++ b/vendor/google.golang.org/grpc/balancer/grpclb/grpclb_util.go @@ -0,0 +1,209 @@ +/* + * + * Copyright 2016 gRPC authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ + +package grpclb + +import ( + "fmt" + "sync" + "time" + + "google.golang.org/grpc/balancer" + "google.golang.org/grpc/connectivity" + "google.golang.org/grpc/resolver" +) + +// The parent ClientConn should re-resolve when grpclb loses connection to the +// remote balancer. When the ClientConn inside grpclb gets a TransientFailure, +// it calls lbManualResolver.ResolveNow(), which calls parent ClientConn's +// ResolveNow, and eventually results in re-resolve happening in parent +// ClientConn's resolver (DNS for example). +// +// parent +// ClientConn +// +-----------------------------------------------------------------+ +// | parent +---------------------------------+ | +// | DNS ClientConn | grpclb | | +// | resolver balancerWrapper | | | +// | + + | grpclb grpclb | | +// | | | | ManualResolver ClientConn | | +// | | | | + + | | +// | | | | | | Transient | | +// | | | | | | Failure | | +// | | | | | <--------- | | | +// | | | <--------------- | ResolveNow | | | +// | | <--------- | ResolveNow | | | | | +// | | ResolveNow | | | | | | +// | | | | | | | | +// | + + | + + | | +// | +---------------------------------+ | +// +-----------------------------------------------------------------+ + +// lbManualResolver is used by the ClientConn inside grpclb. It's a manual +// resolver with a special ResolveNow() function. +// +// When ResolveNow() is called, it calls ResolveNow() on the parent ClientConn, +// so when grpclb client lose contact with remote balancers, the parent +// ClientConn's resolver will re-resolve. +type lbManualResolver struct { + scheme string + ccr resolver.ClientConn + + ccb balancer.ClientConn +} + +func (r *lbManualResolver) Build(_ resolver.Target, cc resolver.ClientConn, _ resolver.BuildOption) (resolver.Resolver, error) { + r.ccr = cc + return r, nil +} + +func (r *lbManualResolver) Scheme() string { + return r.scheme +} + +// ResolveNow calls resolveNow on the parent ClientConn. +func (r *lbManualResolver) ResolveNow(o resolver.ResolveNowOption) { + r.ccb.ResolveNow(o) +} + +// Close is a noop for Resolver. +func (*lbManualResolver) Close() {} + +// UpdateState calls cc.UpdateState. +func (r *lbManualResolver) UpdateState(s resolver.State) { + r.ccr.UpdateState(s) +} + +const subConnCacheTime = time.Second * 10 + +// lbCacheClientConn is a wrapper balancer.ClientConn with a SubConn cache. +// SubConns will be kept in cache for subConnCacheTime before being removed. +// +// Its new and remove methods are updated to do cache first. +type lbCacheClientConn struct { + cc balancer.ClientConn + timeout time.Duration + + mu sync.Mutex + // subConnCache only keeps subConns that are being deleted. + subConnCache map[resolver.Address]*subConnCacheEntry + subConnToAddr map[balancer.SubConn]resolver.Address +} + +type subConnCacheEntry struct { + sc balancer.SubConn + + cancel func() + abortDeleting bool +} + +func newLBCacheClientConn(cc balancer.ClientConn) *lbCacheClientConn { + return &lbCacheClientConn{ + cc: cc, + timeout: subConnCacheTime, + subConnCache: make(map[resolver.Address]*subConnCacheEntry), + subConnToAddr: make(map[balancer.SubConn]resolver.Address), + } +} + +func (ccc *lbCacheClientConn) NewSubConn(addrs []resolver.Address, opts balancer.NewSubConnOptions) (balancer.SubConn, error) { + if len(addrs) != 1 { + return nil, fmt.Errorf("grpclb calling NewSubConn with addrs of length %v", len(addrs)) + } + addrWithoutMD := addrs[0] + addrWithoutMD.Metadata = nil + + ccc.mu.Lock() + defer ccc.mu.Unlock() + if entry, ok := ccc.subConnCache[addrWithoutMD]; ok { + // If entry is in subConnCache, the SubConn was being deleted. + // cancel function will never be nil. + entry.cancel() + delete(ccc.subConnCache, addrWithoutMD) + return entry.sc, nil + } + + scNew, err := ccc.cc.NewSubConn(addrs, opts) + if err != nil { + return nil, err + } + + ccc.subConnToAddr[scNew] = addrWithoutMD + return scNew, nil +} + +func (ccc *lbCacheClientConn) RemoveSubConn(sc balancer.SubConn) { + ccc.mu.Lock() + defer ccc.mu.Unlock() + addr, ok := ccc.subConnToAddr[sc] + if !ok { + return + } + + if entry, ok := ccc.subConnCache[addr]; ok { + if entry.sc != sc { + // This could happen if NewSubConn was called multiple times for the + // same address, and those SubConns are all removed. We remove sc + // immediately here. + delete(ccc.subConnToAddr, sc) + ccc.cc.RemoveSubConn(sc) + } + return + } + + entry := &subConnCacheEntry{ + sc: sc, + } + ccc.subConnCache[addr] = entry + + timer := time.AfterFunc(ccc.timeout, func() { + ccc.mu.Lock() + if entry.abortDeleting { + return + } + ccc.cc.RemoveSubConn(sc) + delete(ccc.subConnToAddr, sc) + delete(ccc.subConnCache, addr) + ccc.mu.Unlock() + }) + entry.cancel = func() { + if !timer.Stop() { + // If stop was not successful, the timer has fired (this can only + // happen in a race). But the deleting function is blocked on ccc.mu + // because the mutex was held by the caller of this function. + // + // Set abortDeleting to true to abort the deleting function. When + // the lock is released, the deleting function will acquire the + // lock, check the value of abortDeleting and return. + entry.abortDeleting = true + } + } +} + +func (ccc *lbCacheClientConn) UpdateBalancerState(s connectivity.State, p balancer.Picker) { + ccc.cc.UpdateBalancerState(s, p) +} + +func (ccc *lbCacheClientConn) close() { + ccc.mu.Lock() + // Only cancel all existing timers. There's no need to remove SubConns. + for _, entry := range ccc.subConnCache { + entry.cancel() + } + ccc.mu.Unlock() +} diff --git a/vendor/google.golang.org/grpc/balancer/roundrobin/roundrobin.go b/vendor/google.golang.org/grpc/balancer/roundrobin/roundrobin.go new file mode 100644 index 0000000..29f7a4d --- /dev/null +++ b/vendor/google.golang.org/grpc/balancer/roundrobin/roundrobin.go @@ -0,0 +1,83 @@ +/* + * + * Copyright 2017 gRPC authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ + +// Package roundrobin defines a roundrobin balancer. Roundrobin balancer is +// installed as one of the default balancers in gRPC, users don't need to +// explicitly install this balancer. +package roundrobin + +import ( + "context" + "sync" + + "google.golang.org/grpc/balancer" + "google.golang.org/grpc/balancer/base" + "google.golang.org/grpc/grpclog" + "google.golang.org/grpc/internal/grpcrand" + "google.golang.org/grpc/resolver" +) + +// Name is the name of round_robin balancer. +const Name = "round_robin" + +// newBuilder creates a new roundrobin balancer builder. +func newBuilder() balancer.Builder { + return base.NewBalancerBuilderWithConfig(Name, &rrPickerBuilder{}, base.Config{HealthCheck: true}) +} + +func init() { + balancer.Register(newBuilder()) +} + +type rrPickerBuilder struct{} + +func (*rrPickerBuilder) Build(readySCs map[resolver.Address]balancer.SubConn) balancer.Picker { + grpclog.Infof("roundrobinPicker: newPicker called with readySCs: %v", readySCs) + if len(readySCs) == 0 { + return base.NewErrPicker(balancer.ErrNoSubConnAvailable) + } + var scs []balancer.SubConn + for _, sc := range readySCs { + scs = append(scs, sc) + } + return &rrPicker{ + subConns: scs, + // Start at a random index, as the same RR balancer rebuilds a new + // picker when SubConn states change, and we don't want to apply excess + // load to the first server in the list. + next: grpcrand.Intn(len(scs)), + } +} + +type rrPicker struct { + // subConns is the snapshot of the roundrobin balancer when this picker was + // created. The slice is immutable. Each Get() will do a round robin + // selection from it and return the selected SubConn. + subConns []balancer.SubConn + + mu sync.Mutex + next int +} + +func (p *rrPicker) Pick(ctx context.Context, opts balancer.PickOptions) (balancer.SubConn, func(balancer.DoneInfo), error) { + p.mu.Lock() + sc := p.subConns[p.next] + p.next = (p.next + 1) % len(p.subConns) + p.mu.Unlock() + return sc, nil, nil +} diff --git a/vendor/google.golang.org/grpc/balancer/xds/edsbalancer/balancergroup.go b/vendor/google.golang.org/grpc/balancer/xds/edsbalancer/balancergroup.go new file mode 100644 index 0000000..77b185c --- /dev/null +++ b/vendor/google.golang.org/grpc/balancer/xds/edsbalancer/balancergroup.go @@ -0,0 +1,348 @@ +// +build go1.12 + +/* + * Copyright 2019 gRPC authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package edsbalancer + +import ( + "context" + "sync" + + "google.golang.org/grpc/balancer" + "google.golang.org/grpc/balancer/base" + "google.golang.org/grpc/connectivity" + "google.golang.org/grpc/grpclog" + "google.golang.org/grpc/resolver" +) + +type pickerState struct { + weight uint32 + picker balancer.Picker + state connectivity.State +} + +// balancerGroup takes a list of balancers, and make then into one balancer. +// +// Note that this struct doesn't implement balancer.Balancer, because it's not +// intended to be used directly as a balancer. It's expected to be used as a +// sub-balancer manager by a high level balancer. +// +// Updates from ClientConn are forwarded to sub-balancers +// - service config update +// - Not implemented +// - address update +// - subConn state change +// - find the corresponding balancer and forward +// +// Actions from sub-balances are forwarded to parent ClientConn +// - new/remove SubConn +// - picker update and health states change +// - sub-pickers are grouped into a group-picker +// - aggregated connectivity state is the overall state of all pickers. +// - resolveNow +type balancerGroup struct { + cc balancer.ClientConn + + mu sync.Mutex + idToBalancer map[string]balancer.Balancer + scToID map[balancer.SubConn]string + + pickerMu sync.Mutex + // All balancer IDs exist as keys in this map. If an ID is not in map, it's + // either removed or never added. + idToPickerState map[string]*pickerState +} + +func newBalancerGroup(cc balancer.ClientConn) *balancerGroup { + return &balancerGroup{ + cc: cc, + + scToID: make(map[balancer.SubConn]string), + idToBalancer: make(map[string]balancer.Balancer), + idToPickerState: make(map[string]*pickerState), + } +} + +// add adds a balancer built by builder to the group, with given id and weight. +func (bg *balancerGroup) add(id string, weight uint32, builder balancer.Builder) { + bg.mu.Lock() + if _, ok := bg.idToBalancer[id]; ok { + bg.mu.Unlock() + grpclog.Warningf("balancer group: adding a balancer with existing ID: %s", id) + return + } + bg.mu.Unlock() + bgcc := &balancerGroupCC{ + id: id, + group: bg, + } + b := builder.Build(bgcc, balancer.BuildOptions{}) + bg.mu.Lock() + bg.idToBalancer[id] = b + bg.mu.Unlock() + + bg.pickerMu.Lock() + bg.idToPickerState[id] = &pickerState{ + weight: weight, + // Start everything in IDLE. It's doesn't affect the overall state + // because we don't count IDLE when aggregating (as opposite to e.g. + // READY, 1 READY results in overall READY). + state: connectivity.Idle, + } + bg.pickerMu.Unlock() +} + +// remove removes the balancer with id from the group, and closes the balancer. +// +// It also removes the picker generated from this balancer from the picker +// group. It always results in a picker update. +func (bg *balancerGroup) remove(id string) { + bg.mu.Lock() + // Close balancer. + if b, ok := bg.idToBalancer[id]; ok { + b.Close() + delete(bg.idToBalancer, id) + } + // Remove SubConns. + for sc, bid := range bg.scToID { + if bid == id { + bg.cc.RemoveSubConn(sc) + delete(bg.scToID, sc) + } + } + bg.mu.Unlock() + + bg.pickerMu.Lock() + // Remove id and picker from picker map. This also results in future updates + // for this ID to be ignored. + delete(bg.idToPickerState, id) + // Update state and picker to reflect the changes. + bg.cc.UpdateBalancerState(buildPickerAndState(bg.idToPickerState)) + bg.pickerMu.Unlock() +} + +// changeWeight changes the weight of the balancer. +// +// NOTE: It always results in a picker update now. This probably isn't +// necessary. But it seems better to do the update because it's a change in the +// picker (which is balancer's snapshot). +func (bg *balancerGroup) changeWeight(id string, newWeight uint32) { + bg.pickerMu.Lock() + defer bg.pickerMu.Unlock() + pState, ok := bg.idToPickerState[id] + if !ok { + return + } + if pState.weight == newWeight { + return + } + pState.weight = newWeight + // Update state and picker to reflect the changes. + bg.cc.UpdateBalancerState(buildPickerAndState(bg.idToPickerState)) +} + +// Following are actions from the parent grpc.ClientConn, forward to sub-balancers. + +// SubConn state change: find the corresponding balancer and then forward. +func (bg *balancerGroup) handleSubConnStateChange(sc balancer.SubConn, state connectivity.State) { + grpclog.Infof("balancer group: handle subconn state change: %p, %v", sc, state) + bg.mu.Lock() + var b balancer.Balancer + if id, ok := bg.scToID[sc]; ok { + if state == connectivity.Shutdown { + // Only delete sc from the map when state changed to Shutdown. + delete(bg.scToID, sc) + } + b = bg.idToBalancer[id] + } + bg.mu.Unlock() + if b == nil { + grpclog.Infof("balancer group: balancer not found for sc state change") + return + } + if ub, ok := b.(balancer.V2Balancer); ok { + ub.UpdateSubConnState(sc, balancer.SubConnState{ConnectivityState: state}) + } else { + b.HandleSubConnStateChange(sc, state) + } +} + +// Address change: forward to balancer. +func (bg *balancerGroup) handleResolvedAddrs(id string, addrs []resolver.Address) { + bg.mu.Lock() + b, ok := bg.idToBalancer[id] + bg.mu.Unlock() + if !ok { + grpclog.Infof("balancer group: balancer with id %q not found", id) + return + } + if ub, ok := b.(balancer.V2Balancer); ok { + ub.UpdateResolverState(resolver.State{Addresses: addrs}) + } else { + b.HandleResolvedAddrs(addrs, nil) + } +} + +// TODO: handleServiceConfig() +// +// For BNS address for slicer, comes from endpoint.Metadata. It will be sent +// from parent to sub-balancers as service config. + +// Following are actions from sub-balancers, forward to ClientConn. + +// newSubConn: forward to ClientConn, and also create a map from sc to balancer, +// so state update will find the right balancer. +// +// One note about removing SubConn: only forward to ClientConn, but not delete +// from map. Delete sc from the map only when state changes to Shutdown. Since +// it's just forwarding the action, there's no need for a removeSubConn() +// wrapper function. +func (bg *balancerGroup) newSubConn(id string, addrs []resolver.Address, opts balancer.NewSubConnOptions) (balancer.SubConn, error) { + sc, err := bg.cc.NewSubConn(addrs, opts) + if err != nil { + return nil, err + } + bg.mu.Lock() + bg.scToID[sc] = id + bg.mu.Unlock() + return sc, nil +} + +// updateBalancerState: create an aggregated picker and an aggregated +// connectivity state, then forward to ClientConn. +func (bg *balancerGroup) updateBalancerState(id string, state connectivity.State, picker balancer.Picker) { + grpclog.Infof("balancer group: update balancer state: %v, %v, %p", id, state, picker) + bg.pickerMu.Lock() + defer bg.pickerMu.Unlock() + pickerSt, ok := bg.idToPickerState[id] + if !ok { + // All state starts in IDLE. If ID is not in map, it's either removed, + // or never existed. + grpclog.Infof("balancer group: pickerState not found when update picker/state") + return + } + pickerSt.picker = picker + pickerSt.state = state + bg.cc.UpdateBalancerState(buildPickerAndState(bg.idToPickerState)) +} + +func (bg *balancerGroup) close() { + bg.mu.Lock() + for _, b := range bg.idToBalancer { + b.Close() + } + // Also remove all SubConns. + for sc := range bg.scToID { + bg.cc.RemoveSubConn(sc) + } + bg.mu.Unlock() +} + +func buildPickerAndState(m map[string]*pickerState) (connectivity.State, balancer.Picker) { + var readyN, connectingN int + readyPickerWithWeights := make([]pickerState, 0, len(m)) + for _, ps := range m { + switch ps.state { + case connectivity.Ready: + readyN++ + readyPickerWithWeights = append(readyPickerWithWeights, *ps) + case connectivity.Connecting: + connectingN++ + } + } + var aggregatedState connectivity.State + switch { + case readyN > 0: + aggregatedState = connectivity.Ready + case connectingN > 0: + aggregatedState = connectivity.Connecting + default: + aggregatedState = connectivity.TransientFailure + } + if aggregatedState == connectivity.TransientFailure { + return aggregatedState, base.NewErrPicker(balancer.ErrTransientFailure) + } + return aggregatedState, newPickerGroup(readyPickerWithWeights) +} + +type pickerGroup struct { + readyPickerWithWeights []pickerState + length int + + mu sync.Mutex + idx int // The index of the picker that will be picked + count uint32 // The number of times the current picker has been picked. +} + +// newPickerGroup takes pickers with weights, and group them into one picker. +// +// Note it only takes ready pickers. The map shouldn't contain non-ready +// pickers. +// +// TODO: (bg) confirm this is the expected behavior: non-ready balancers should +// be ignored when picking. Only ready balancers are picked. +func newPickerGroup(readyPickerWithWeights []pickerState) *pickerGroup { + return &pickerGroup{ + readyPickerWithWeights: readyPickerWithWeights, + length: len(readyPickerWithWeights), + } +} + +func (pg *pickerGroup) Pick(ctx context.Context, opts balancer.PickOptions) (conn balancer.SubConn, done func(balancer.DoneInfo), err error) { + if pg.length <= 0 { + return nil, nil, balancer.ErrNoSubConnAvailable + } + // TODO: the WRR algorithm needs a design. + // MAYBE: move WRR implmentation to util.go as a separate struct. + pg.mu.Lock() + pickerSt := pg.readyPickerWithWeights[pg.idx] + p := pickerSt.picker + pg.count++ + if pg.count >= pickerSt.weight { + pg.idx = (pg.idx + 1) % pg.length + pg.count = 0 + } + pg.mu.Unlock() + return p.Pick(ctx, opts) +} + +// balancerGroupCC implements the balancer.ClientConn API and get passed to each +// sub-balancer. It contains the sub-balancer ID, so the parent balancer can +// keep track of SubConn/pickers and the sub-balancers they belong to. +// +// Some of the actions are forwarded to the parent ClientConn with no change. +// Some are forward to balancer group with the sub-balancer ID. +type balancerGroupCC struct { + id string + group *balancerGroup +} + +func (bgcc *balancerGroupCC) NewSubConn(addrs []resolver.Address, opts balancer.NewSubConnOptions) (balancer.SubConn, error) { + return bgcc.group.newSubConn(bgcc.id, addrs, opts) +} +func (bgcc *balancerGroupCC) RemoveSubConn(sc balancer.SubConn) { + bgcc.group.cc.RemoveSubConn(sc) +} +func (bgcc *balancerGroupCC) UpdateBalancerState(state connectivity.State, picker balancer.Picker) { + bgcc.group.updateBalancerState(bgcc.id, state, picker) +} +func (bgcc *balancerGroupCC) ResolveNow(opt resolver.ResolveNowOption) { + bgcc.group.cc.ResolveNow(opt) +} +func (bgcc *balancerGroupCC) Target() string { + return bgcc.group.cc.Target() +} diff --git a/vendor/google.golang.org/grpc/balancer/xds/edsbalancer/edsbalancer.go b/vendor/google.golang.org/grpc/balancer/xds/edsbalancer/edsbalancer.go new file mode 100644 index 0000000..67e3926 --- /dev/null +++ b/vendor/google.golang.org/grpc/balancer/xds/edsbalancer/edsbalancer.go @@ -0,0 +1,309 @@ +// +build go1.12 + +/* + * Copyright 2019 gRPC authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +// Package edsbalancer implements a balancer to handle EDS responses. +package edsbalancer + +import ( + "context" + "encoding/json" + "fmt" + "net" + "reflect" + "strconv" + "sync" + + "google.golang.org/grpc/balancer" + "google.golang.org/grpc/balancer/roundrobin" + edspb "google.golang.org/grpc/balancer/xds/internal/proto/envoy/api/v2/eds" + percentpb "google.golang.org/grpc/balancer/xds/internal/proto/envoy/type/percent" + "google.golang.org/grpc/codes" + "google.golang.org/grpc/connectivity" + "google.golang.org/grpc/grpclog" + "google.golang.org/grpc/resolver" + "google.golang.org/grpc/status" +) + +type localityConfig struct { + weight uint32 + addrs []resolver.Address +} + +// EDSBalancer does load balancing based on the EDS responses. Note that it +// doesn't implement the balancer interface. It's intended to be used by a high +// level balancer implementation. +// +// The localities are picked as weighted round robin. A configurable child +// policy is used to manage endpoints in each locality. +type EDSBalancer struct { + balancer.ClientConn + + bg *balancerGroup + subBalancerBuilder balancer.Builder + lidToConfig map[string]*localityConfig + + pickerMu sync.Mutex + drops []*dropper + innerPicker balancer.Picker // The picker without drop support. + innerState connectivity.State // The state of the picker. +} + +// NewXDSBalancer create a new EDSBalancer. +func NewXDSBalancer(cc balancer.ClientConn) *EDSBalancer { + xdsB := &EDSBalancer{ + ClientConn: cc, + subBalancerBuilder: balancer.Get(roundrobin.Name), + + lidToConfig: make(map[string]*localityConfig), + } + // Don't start balancer group here. Start it when handling the first EDS + // response. Otherwise the balancer group will be started with round-robin, + // and if users specify a different sub-balancer, all balancers in balancer + // group will be closed and recreated when sub-balancer update happens. + return xdsB +} + +// HandleChildPolicy updates the child balancers handling endpoints. Child +// policy is roundrobin by default. If the specified balancer is not installed, +// the old child balancer will be used. +// +// HandleChildPolicy and HandleEDSResponse must be called by the same goroutine. +func (xdsB *EDSBalancer) HandleChildPolicy(name string, config json.RawMessage) { + // name could come from cdsResp.GetLbPolicy().String(). LbPolicy.String() + // are all UPPER_CASE with underscore. + // + // No conversion is needed here because balancer package converts all names + // into lower_case before registering/looking up. + xdsB.updateSubBalancerName(name) + // TODO: (eds) send balancer config to the new child balancers. +} + +func (xdsB *EDSBalancer) updateSubBalancerName(subBalancerName string) { + if xdsB.subBalancerBuilder.Name() == subBalancerName { + return + } + newSubBalancerBuilder := balancer.Get(subBalancerName) + if newSubBalancerBuilder == nil { + grpclog.Infof("EDSBalancer: failed to find balancer with name %q, keep using %q", subBalancerName, xdsB.subBalancerBuilder.Name()) + return + } + xdsB.subBalancerBuilder = newSubBalancerBuilder + if xdsB.bg != nil { + // xdsB.bg == nil until the first EDS response is handled. There's no + // need to update balancer group before that. + for id, config := range xdsB.lidToConfig { + // TODO: (eds) add support to balancer group to support smoothly + // switching sub-balancers (keep old balancer around until new + // balancer becomes ready). + xdsB.bg.remove(id) + xdsB.bg.add(id, config.weight, xdsB.subBalancerBuilder) + xdsB.bg.handleResolvedAddrs(id, config.addrs) + } + } +} + +// updateDrops compares new drop policies with the old. If they are different, +// it updates the drop policies and send ClientConn an updated picker. +func (xdsB *EDSBalancer) updateDrops(dropPolicies []*edspb.ClusterLoadAssignment_Policy_DropOverload) { + var ( + newDrops []*dropper + dropsChanged bool + ) + for i, dropPolicy := range dropPolicies { + percentage := dropPolicy.GetDropPercentage() + var ( + numerator = percentage.GetNumerator() + denominator uint32 + ) + switch percentage.GetDenominator() { + case percentpb.FractionalPercent_HUNDRED: + denominator = 100 + case percentpb.FractionalPercent_TEN_THOUSAND: + denominator = 10000 + case percentpb.FractionalPercent_MILLION: + denominator = 1000000 + } + newDrops = append(newDrops, newDropper(numerator, denominator)) + + // The following reading xdsB.drops doesn't need mutex because it can only + // be updated by the code following. + if dropsChanged { + continue + } + if i >= len(xdsB.drops) { + dropsChanged = true + continue + } + if oldDrop := xdsB.drops[i]; numerator != oldDrop.numerator || denominator != oldDrop.denominator { + dropsChanged = true + } + } + if dropsChanged { + xdsB.pickerMu.Lock() + xdsB.drops = newDrops + if xdsB.innerPicker != nil { + // Update picker with old inner picker, new drops. + xdsB.ClientConn.UpdateBalancerState(xdsB.innerState, newDropPicker(xdsB.innerPicker, newDrops)) + } + xdsB.pickerMu.Unlock() + } +} + +// HandleEDSResponse handles the EDS response and creates/deletes localities and +// SubConns. It also handles drops. +// +// HandleCDSResponse and HandleEDSResponse must be called by the same goroutine. +func (xdsB *EDSBalancer) HandleEDSResponse(edsResp *edspb.ClusterLoadAssignment) { + // Create balancer group if it's never created (this is the first EDS + // response). + if xdsB.bg == nil { + xdsB.bg = newBalancerGroup(xdsB) + } + + // TODO: Unhandled fields from EDS response: + // - edsResp.GetPolicy().GetOverprovisioningFactor() + // - locality.GetPriority() + // - lbEndpoint.GetMetadata(): contains BNS name, send to sub-balancers + // - as service config or as resolved address + // - if socketAddress is not ip:port + // - socketAddress.GetNamedPort(), socketAddress.GetResolverName() + // - resolve endpoint's name with another resolver + + xdsB.updateDrops(edsResp.GetPolicy().GetDropOverloads()) + + // newLocalitiesSet contains all names of localitis in the new EDS response. + // It's used to delete localities that are removed in the new EDS response. + newLocalitiesSet := make(map[string]struct{}) + for _, locality := range edsResp.Endpoints { + // One balancer for each locality. + + l := locality.GetLocality() + if l == nil { + grpclog.Warningf("xds: received LocalityLbEndpoints with Locality") + continue + } + lid := fmt.Sprintf("%s-%s-%s", l.Region, l.Zone, l.SubZone) + newLocalitiesSet[lid] = struct{}{} + + newWeight := locality.GetLoadBalancingWeight().GetValue() + if newWeight == 0 { + // Weight can never be 0. + newWeight = 1 + } + + var newAddrs []resolver.Address + for _, lbEndpoint := range locality.GetLbEndpoints() { + socketAddress := lbEndpoint.GetEndpoint().GetAddress().GetSocketAddress() + newAddrs = append(newAddrs, resolver.Address{ + Addr: net.JoinHostPort(socketAddress.GetAddress(), strconv.Itoa(int(socketAddress.GetPortValue()))), + }) + } + var weightChanged, addrsChanged bool + config, ok := xdsB.lidToConfig[lid] + if !ok { + // A new balancer, add it to balancer group and balancer map. + xdsB.bg.add(lid, newWeight, xdsB.subBalancerBuilder) + config = &localityConfig{ + weight: newWeight, + } + xdsB.lidToConfig[lid] = config + + // weightChanged is false for new locality, because there's no need to + // update weight in bg. + addrsChanged = true + } else { + // Compare weight and addrs. + if config.weight != newWeight { + weightChanged = true + } + if !reflect.DeepEqual(config.addrs, newAddrs) { + addrsChanged = true + } + } + + if weightChanged { + config.weight = newWeight + xdsB.bg.changeWeight(lid, newWeight) + } + + if addrsChanged { + config.addrs = newAddrs + xdsB.bg.handleResolvedAddrs(lid, newAddrs) + } + } + + // Delete localities that are removed in the latest response. + for lid := range xdsB.lidToConfig { + if _, ok := newLocalitiesSet[lid]; !ok { + xdsB.bg.remove(lid) + delete(xdsB.lidToConfig, lid) + } + } +} + +// HandleSubConnStateChange handles the state change and update pickers accordingly. +func (xdsB *EDSBalancer) HandleSubConnStateChange(sc balancer.SubConn, s connectivity.State) { + xdsB.bg.handleSubConnStateChange(sc, s) +} + +// UpdateBalancerState overrides balancer.ClientConn to wrap the picker in a +// dropPicker. +func (xdsB *EDSBalancer) UpdateBalancerState(s connectivity.State, p balancer.Picker) { + xdsB.pickerMu.Lock() + defer xdsB.pickerMu.Unlock() + xdsB.innerPicker = p + xdsB.innerState = s + // Don't reset drops when it's a state change. + xdsB.ClientConn.UpdateBalancerState(s, newDropPicker(p, xdsB.drops)) +} + +// Close closes the balancer. +func (xdsB *EDSBalancer) Close() { + xdsB.bg.close() +} + +type dropPicker struct { + drops []*dropper + p balancer.Picker +} + +func newDropPicker(p balancer.Picker, drops []*dropper) *dropPicker { + return &dropPicker{ + drops: drops, + p: p, + } +} + +func (d *dropPicker) Pick(ctx context.Context, opts balancer.PickOptions) (conn balancer.SubConn, done func(balancer.DoneInfo), err error) { + var drop bool + for _, dp := range d.drops { + // It's necessary to call drop on all droppers if the droppers are + // stateful. For example, if the second drop only drops 1/2, and only + // drops even number picks, we need to call it's drop() even if the + // first dropper already returned true. + // + // It won't be necessary if droppers are stateless, like toss a coin. + drop = drop || dp.drop() + } + if drop { + return nil, nil, status.Errorf(codes.Unavailable, "RPC is dropped") + } + // TODO: (eds) don't drop unless the inner picker is READY. Similar to + // https://github.com/grpc/grpc-go/issues/2622. + return d.p.Pick(ctx, opts) +} diff --git a/vendor/google.golang.org/grpc/balancer/xds/edsbalancer/util.go b/vendor/google.golang.org/grpc/balancer/xds/edsbalancer/util.go new file mode 100644 index 0000000..0b1a397 --- /dev/null +++ b/vendor/google.golang.org/grpc/balancer/xds/edsbalancer/util.go @@ -0,0 +1,58 @@ +// +build go1.12 + +/* + * Copyright 2019 gRPC authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package edsbalancer + +import ( + "sync" +) + +type dropper struct { + // Drop rate will be numerator/denominator. + numerator uint32 + denominator uint32 + + mu sync.Mutex + i uint32 +} + +func newDropper(numerator, denominator uint32) *dropper { + return &dropper{ + numerator: numerator, + denominator: denominator, + } +} + +func (d *dropper) drop() (ret bool) { + d.mu.Lock() + defer d.mu.Unlock() + + // TODO: the drop algorithm needs a design. + // Currently, for drop rate 3/5: + // 0 1 2 3 4 + // d d d n n + if d.i < d.numerator { + ret = true + } + d.i++ + if d.i >= d.denominator { + d.i = 0 + } + + return +} diff --git a/vendor/google.golang.org/grpc/balancer/xds/internal/proto/envoy/api/v2/auth/cert/cert.pb.go b/vendor/google.golang.org/grpc/balancer/xds/internal/proto/envoy/api/v2/auth/cert/cert.pb.go new file mode 100755 index 0000000..8c77424 --- /dev/null +++ b/vendor/google.golang.org/grpc/balancer/xds/internal/proto/envoy/api/v2/auth/cert/cert.pb.go @@ -0,0 +1,1144 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: envoy/api/v2/auth/cert.proto + +package auth + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import wrappers "github.com/golang/protobuf/ptypes/wrappers" +import base "google.golang.org/grpc/balancer/xds/internal/proto/envoy/api/v2/core/base" +import config_source "google.golang.org/grpc/balancer/xds/internal/proto/envoy/api/v2/core/config_source" +import _ "google.golang.org/grpc/balancer/xds/internal/proto/validate" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +type TlsParameters_TlsProtocol int32 + +const ( + TlsParameters_TLS_AUTO TlsParameters_TlsProtocol = 0 + TlsParameters_TLSv1_0 TlsParameters_TlsProtocol = 1 + TlsParameters_TLSv1_1 TlsParameters_TlsProtocol = 2 + TlsParameters_TLSv1_2 TlsParameters_TlsProtocol = 3 + TlsParameters_TLSv1_3 TlsParameters_TlsProtocol = 4 +) + +var TlsParameters_TlsProtocol_name = map[int32]string{ + 0: "TLS_AUTO", + 1: "TLSv1_0", + 2: "TLSv1_1", + 3: "TLSv1_2", + 4: "TLSv1_3", +} +var TlsParameters_TlsProtocol_value = map[string]int32{ + "TLS_AUTO": 0, + "TLSv1_0": 1, + "TLSv1_1": 2, + "TLSv1_2": 3, + "TLSv1_3": 4, +} + +func (x TlsParameters_TlsProtocol) String() string { + return proto.EnumName(TlsParameters_TlsProtocol_name, int32(x)) +} +func (TlsParameters_TlsProtocol) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_cert_f82beca1d890b9d7, []int{0, 0} +} + +type TlsParameters struct { + TlsMinimumProtocolVersion TlsParameters_TlsProtocol `protobuf:"varint,1,opt,name=tls_minimum_protocol_version,json=tlsMinimumProtocolVersion,proto3,enum=envoy.api.v2.auth.TlsParameters_TlsProtocol" json:"tls_minimum_protocol_version,omitempty"` + TlsMaximumProtocolVersion TlsParameters_TlsProtocol `protobuf:"varint,2,opt,name=tls_maximum_protocol_version,json=tlsMaximumProtocolVersion,proto3,enum=envoy.api.v2.auth.TlsParameters_TlsProtocol" json:"tls_maximum_protocol_version,omitempty"` + CipherSuites []string `protobuf:"bytes,3,rep,name=cipher_suites,json=cipherSuites,proto3" json:"cipher_suites,omitempty"` + EcdhCurves []string `protobuf:"bytes,4,rep,name=ecdh_curves,json=ecdhCurves,proto3" json:"ecdh_curves,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *TlsParameters) Reset() { *m = TlsParameters{} } +func (m *TlsParameters) String() string { return proto.CompactTextString(m) } +func (*TlsParameters) ProtoMessage() {} +func (*TlsParameters) Descriptor() ([]byte, []int) { + return fileDescriptor_cert_f82beca1d890b9d7, []int{0} +} +func (m *TlsParameters) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_TlsParameters.Unmarshal(m, b) +} +func (m *TlsParameters) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_TlsParameters.Marshal(b, m, deterministic) +} +func (dst *TlsParameters) XXX_Merge(src proto.Message) { + xxx_messageInfo_TlsParameters.Merge(dst, src) +} +func (m *TlsParameters) XXX_Size() int { + return xxx_messageInfo_TlsParameters.Size(m) +} +func (m *TlsParameters) XXX_DiscardUnknown() { + xxx_messageInfo_TlsParameters.DiscardUnknown(m) +} + +var xxx_messageInfo_TlsParameters proto.InternalMessageInfo + +func (m *TlsParameters) GetTlsMinimumProtocolVersion() TlsParameters_TlsProtocol { + if m != nil { + return m.TlsMinimumProtocolVersion + } + return TlsParameters_TLS_AUTO +} + +func (m *TlsParameters) GetTlsMaximumProtocolVersion() TlsParameters_TlsProtocol { + if m != nil { + return m.TlsMaximumProtocolVersion + } + return TlsParameters_TLS_AUTO +} + +func (m *TlsParameters) GetCipherSuites() []string { + if m != nil { + return m.CipherSuites + } + return nil +} + +func (m *TlsParameters) GetEcdhCurves() []string { + if m != nil { + return m.EcdhCurves + } + return nil +} + +type TlsCertificate struct { + CertificateChain *base.DataSource `protobuf:"bytes,1,opt,name=certificate_chain,json=certificateChain,proto3" json:"certificate_chain,omitempty"` + PrivateKey *base.DataSource `protobuf:"bytes,2,opt,name=private_key,json=privateKey,proto3" json:"private_key,omitempty"` + Password *base.DataSource `protobuf:"bytes,3,opt,name=password,proto3" json:"password,omitempty"` + OcspStaple *base.DataSource `protobuf:"bytes,4,opt,name=ocsp_staple,json=ocspStaple,proto3" json:"ocsp_staple,omitempty"` + SignedCertificateTimestamp []*base.DataSource `protobuf:"bytes,5,rep,name=signed_certificate_timestamp,json=signedCertificateTimestamp,proto3" json:"signed_certificate_timestamp,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *TlsCertificate) Reset() { *m = TlsCertificate{} } +func (m *TlsCertificate) String() string { return proto.CompactTextString(m) } +func (*TlsCertificate) ProtoMessage() {} +func (*TlsCertificate) Descriptor() ([]byte, []int) { + return fileDescriptor_cert_f82beca1d890b9d7, []int{1} +} +func (m *TlsCertificate) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_TlsCertificate.Unmarshal(m, b) +} +func (m *TlsCertificate) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_TlsCertificate.Marshal(b, m, deterministic) +} +func (dst *TlsCertificate) XXX_Merge(src proto.Message) { + xxx_messageInfo_TlsCertificate.Merge(dst, src) +} +func (m *TlsCertificate) XXX_Size() int { + return xxx_messageInfo_TlsCertificate.Size(m) +} +func (m *TlsCertificate) XXX_DiscardUnknown() { + xxx_messageInfo_TlsCertificate.DiscardUnknown(m) +} + +var xxx_messageInfo_TlsCertificate proto.InternalMessageInfo + +func (m *TlsCertificate) GetCertificateChain() *base.DataSource { + if m != nil { + return m.CertificateChain + } + return nil +} + +func (m *TlsCertificate) GetPrivateKey() *base.DataSource { + if m != nil { + return m.PrivateKey + } + return nil +} + +func (m *TlsCertificate) GetPassword() *base.DataSource { + if m != nil { + return m.Password + } + return nil +} + +func (m *TlsCertificate) GetOcspStaple() *base.DataSource { + if m != nil { + return m.OcspStaple + } + return nil +} + +func (m *TlsCertificate) GetSignedCertificateTimestamp() []*base.DataSource { + if m != nil { + return m.SignedCertificateTimestamp + } + return nil +} + +type TlsSessionTicketKeys struct { + Keys []*base.DataSource `protobuf:"bytes,1,rep,name=keys,proto3" json:"keys,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *TlsSessionTicketKeys) Reset() { *m = TlsSessionTicketKeys{} } +func (m *TlsSessionTicketKeys) String() string { return proto.CompactTextString(m) } +func (*TlsSessionTicketKeys) ProtoMessage() {} +func (*TlsSessionTicketKeys) Descriptor() ([]byte, []int) { + return fileDescriptor_cert_f82beca1d890b9d7, []int{2} +} +func (m *TlsSessionTicketKeys) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_TlsSessionTicketKeys.Unmarshal(m, b) +} +func (m *TlsSessionTicketKeys) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_TlsSessionTicketKeys.Marshal(b, m, deterministic) +} +func (dst *TlsSessionTicketKeys) XXX_Merge(src proto.Message) { + xxx_messageInfo_TlsSessionTicketKeys.Merge(dst, src) +} +func (m *TlsSessionTicketKeys) XXX_Size() int { + return xxx_messageInfo_TlsSessionTicketKeys.Size(m) +} +func (m *TlsSessionTicketKeys) XXX_DiscardUnknown() { + xxx_messageInfo_TlsSessionTicketKeys.DiscardUnknown(m) +} + +var xxx_messageInfo_TlsSessionTicketKeys proto.InternalMessageInfo + +func (m *TlsSessionTicketKeys) GetKeys() []*base.DataSource { + if m != nil { + return m.Keys + } + return nil +} + +type CertificateValidationContext struct { + TrustedCa *base.DataSource `protobuf:"bytes,1,opt,name=trusted_ca,json=trustedCa,proto3" json:"trusted_ca,omitempty"` + VerifyCertificateSpki []string `protobuf:"bytes,3,rep,name=verify_certificate_spki,json=verifyCertificateSpki,proto3" json:"verify_certificate_spki,omitempty"` + VerifyCertificateHash []string `protobuf:"bytes,2,rep,name=verify_certificate_hash,json=verifyCertificateHash,proto3" json:"verify_certificate_hash,omitempty"` + VerifySubjectAltName []string `protobuf:"bytes,4,rep,name=verify_subject_alt_name,json=verifySubjectAltName,proto3" json:"verify_subject_alt_name,omitempty"` + RequireOcspStaple *wrappers.BoolValue `protobuf:"bytes,5,opt,name=require_ocsp_staple,json=requireOcspStaple,proto3" json:"require_ocsp_staple,omitempty"` + RequireSignedCertificateTimestamp *wrappers.BoolValue `protobuf:"bytes,6,opt,name=require_signed_certificate_timestamp,json=requireSignedCertificateTimestamp,proto3" json:"require_signed_certificate_timestamp,omitempty"` + Crl *base.DataSource `protobuf:"bytes,7,opt,name=crl,proto3" json:"crl,omitempty"` + AllowExpiredCertificate bool `protobuf:"varint,8,opt,name=allow_expired_certificate,json=allowExpiredCertificate,proto3" json:"allow_expired_certificate,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *CertificateValidationContext) Reset() { *m = CertificateValidationContext{} } +func (m *CertificateValidationContext) String() string { return proto.CompactTextString(m) } +func (*CertificateValidationContext) ProtoMessage() {} +func (*CertificateValidationContext) Descriptor() ([]byte, []int) { + return fileDescriptor_cert_f82beca1d890b9d7, []int{3} +} +func (m *CertificateValidationContext) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_CertificateValidationContext.Unmarshal(m, b) +} +func (m *CertificateValidationContext) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_CertificateValidationContext.Marshal(b, m, deterministic) +} +func (dst *CertificateValidationContext) XXX_Merge(src proto.Message) { + xxx_messageInfo_CertificateValidationContext.Merge(dst, src) +} +func (m *CertificateValidationContext) XXX_Size() int { + return xxx_messageInfo_CertificateValidationContext.Size(m) +} +func (m *CertificateValidationContext) XXX_DiscardUnknown() { + xxx_messageInfo_CertificateValidationContext.DiscardUnknown(m) +} + +var xxx_messageInfo_CertificateValidationContext proto.InternalMessageInfo + +func (m *CertificateValidationContext) GetTrustedCa() *base.DataSource { + if m != nil { + return m.TrustedCa + } + return nil +} + +func (m *CertificateValidationContext) GetVerifyCertificateSpki() []string { + if m != nil { + return m.VerifyCertificateSpki + } + return nil +} + +func (m *CertificateValidationContext) GetVerifyCertificateHash() []string { + if m != nil { + return m.VerifyCertificateHash + } + return nil +} + +func (m *CertificateValidationContext) GetVerifySubjectAltName() []string { + if m != nil { + return m.VerifySubjectAltName + } + return nil +} + +func (m *CertificateValidationContext) GetRequireOcspStaple() *wrappers.BoolValue { + if m != nil { + return m.RequireOcspStaple + } + return nil +} + +func (m *CertificateValidationContext) GetRequireSignedCertificateTimestamp() *wrappers.BoolValue { + if m != nil { + return m.RequireSignedCertificateTimestamp + } + return nil +} + +func (m *CertificateValidationContext) GetCrl() *base.DataSource { + if m != nil { + return m.Crl + } + return nil +} + +func (m *CertificateValidationContext) GetAllowExpiredCertificate() bool { + if m != nil { + return m.AllowExpiredCertificate + } + return false +} + +type CommonTlsContext struct { + TlsParams *TlsParameters `protobuf:"bytes,1,opt,name=tls_params,json=tlsParams,proto3" json:"tls_params,omitempty"` + TlsCertificates []*TlsCertificate `protobuf:"bytes,2,rep,name=tls_certificates,json=tlsCertificates,proto3" json:"tls_certificates,omitempty"` + TlsCertificateSdsSecretConfigs []*SdsSecretConfig `protobuf:"bytes,6,rep,name=tls_certificate_sds_secret_configs,json=tlsCertificateSdsSecretConfigs,proto3" json:"tls_certificate_sds_secret_configs,omitempty"` + // Types that are valid to be assigned to ValidationContextType: + // *CommonTlsContext_ValidationContext + // *CommonTlsContext_ValidationContextSdsSecretConfig + // *CommonTlsContext_CombinedValidationContext + ValidationContextType isCommonTlsContext_ValidationContextType `protobuf_oneof:"validation_context_type"` + AlpnProtocols []string `protobuf:"bytes,4,rep,name=alpn_protocols,json=alpnProtocols,proto3" json:"alpn_protocols,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *CommonTlsContext) Reset() { *m = CommonTlsContext{} } +func (m *CommonTlsContext) String() string { return proto.CompactTextString(m) } +func (*CommonTlsContext) ProtoMessage() {} +func (*CommonTlsContext) Descriptor() ([]byte, []int) { + return fileDescriptor_cert_f82beca1d890b9d7, []int{4} +} +func (m *CommonTlsContext) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_CommonTlsContext.Unmarshal(m, b) +} +func (m *CommonTlsContext) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_CommonTlsContext.Marshal(b, m, deterministic) +} +func (dst *CommonTlsContext) XXX_Merge(src proto.Message) { + xxx_messageInfo_CommonTlsContext.Merge(dst, src) +} +func (m *CommonTlsContext) XXX_Size() int { + return xxx_messageInfo_CommonTlsContext.Size(m) +} +func (m *CommonTlsContext) XXX_DiscardUnknown() { + xxx_messageInfo_CommonTlsContext.DiscardUnknown(m) +} + +var xxx_messageInfo_CommonTlsContext proto.InternalMessageInfo + +func (m *CommonTlsContext) GetTlsParams() *TlsParameters { + if m != nil { + return m.TlsParams + } + return nil +} + +func (m *CommonTlsContext) GetTlsCertificates() []*TlsCertificate { + if m != nil { + return m.TlsCertificates + } + return nil +} + +func (m *CommonTlsContext) GetTlsCertificateSdsSecretConfigs() []*SdsSecretConfig { + if m != nil { + return m.TlsCertificateSdsSecretConfigs + } + return nil +} + +type isCommonTlsContext_ValidationContextType interface { + isCommonTlsContext_ValidationContextType() +} + +type CommonTlsContext_ValidationContext struct { + ValidationContext *CertificateValidationContext `protobuf:"bytes,3,opt,name=validation_context,json=validationContext,proto3,oneof"` +} + +type CommonTlsContext_ValidationContextSdsSecretConfig struct { + ValidationContextSdsSecretConfig *SdsSecretConfig `protobuf:"bytes,7,opt,name=validation_context_sds_secret_config,json=validationContextSdsSecretConfig,proto3,oneof"` +} + +type CommonTlsContext_CombinedValidationContext struct { + CombinedValidationContext *CommonTlsContext_CombinedCertificateValidationContext `protobuf:"bytes,8,opt,name=combined_validation_context,json=combinedValidationContext,proto3,oneof"` +} + +func (*CommonTlsContext_ValidationContext) isCommonTlsContext_ValidationContextType() {} + +func (*CommonTlsContext_ValidationContextSdsSecretConfig) isCommonTlsContext_ValidationContextType() {} + +func (*CommonTlsContext_CombinedValidationContext) isCommonTlsContext_ValidationContextType() {} + +func (m *CommonTlsContext) GetValidationContextType() isCommonTlsContext_ValidationContextType { + if m != nil { + return m.ValidationContextType + } + return nil +} + +func (m *CommonTlsContext) GetValidationContext() *CertificateValidationContext { + if x, ok := m.GetValidationContextType().(*CommonTlsContext_ValidationContext); ok { + return x.ValidationContext + } + return nil +} + +func (m *CommonTlsContext) GetValidationContextSdsSecretConfig() *SdsSecretConfig { + if x, ok := m.GetValidationContextType().(*CommonTlsContext_ValidationContextSdsSecretConfig); ok { + return x.ValidationContextSdsSecretConfig + } + return nil +} + +func (m *CommonTlsContext) GetCombinedValidationContext() *CommonTlsContext_CombinedCertificateValidationContext { + if x, ok := m.GetValidationContextType().(*CommonTlsContext_CombinedValidationContext); ok { + return x.CombinedValidationContext + } + return nil +} + +func (m *CommonTlsContext) GetAlpnProtocols() []string { + if m != nil { + return m.AlpnProtocols + } + return nil +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*CommonTlsContext) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _CommonTlsContext_OneofMarshaler, _CommonTlsContext_OneofUnmarshaler, _CommonTlsContext_OneofSizer, []interface{}{ + (*CommonTlsContext_ValidationContext)(nil), + (*CommonTlsContext_ValidationContextSdsSecretConfig)(nil), + (*CommonTlsContext_CombinedValidationContext)(nil), + } +} + +func _CommonTlsContext_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*CommonTlsContext) + // validation_context_type + switch x := m.ValidationContextType.(type) { + case *CommonTlsContext_ValidationContext: + b.EncodeVarint(3<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.ValidationContext); err != nil { + return err + } + case *CommonTlsContext_ValidationContextSdsSecretConfig: + b.EncodeVarint(7<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.ValidationContextSdsSecretConfig); err != nil { + return err + } + case *CommonTlsContext_CombinedValidationContext: + b.EncodeVarint(8<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.CombinedValidationContext); err != nil { + return err + } + case nil: + default: + return fmt.Errorf("CommonTlsContext.ValidationContextType has unexpected type %T", x) + } + return nil +} + +func _CommonTlsContext_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*CommonTlsContext) + switch tag { + case 3: // validation_context_type.validation_context + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(CertificateValidationContext) + err := b.DecodeMessage(msg) + m.ValidationContextType = &CommonTlsContext_ValidationContext{msg} + return true, err + case 7: // validation_context_type.validation_context_sds_secret_config + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(SdsSecretConfig) + err := b.DecodeMessage(msg) + m.ValidationContextType = &CommonTlsContext_ValidationContextSdsSecretConfig{msg} + return true, err + case 8: // validation_context_type.combined_validation_context + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(CommonTlsContext_CombinedCertificateValidationContext) + err := b.DecodeMessage(msg) + m.ValidationContextType = &CommonTlsContext_CombinedValidationContext{msg} + return true, err + default: + return false, nil + } +} + +func _CommonTlsContext_OneofSizer(msg proto.Message) (n int) { + m := msg.(*CommonTlsContext) + // validation_context_type + switch x := m.ValidationContextType.(type) { + case *CommonTlsContext_ValidationContext: + s := proto.Size(x.ValidationContext) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *CommonTlsContext_ValidationContextSdsSecretConfig: + s := proto.Size(x.ValidationContextSdsSecretConfig) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *CommonTlsContext_CombinedValidationContext: + s := proto.Size(x.CombinedValidationContext) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +type CommonTlsContext_CombinedCertificateValidationContext struct { + DefaultValidationContext *CertificateValidationContext `protobuf:"bytes,1,opt,name=default_validation_context,json=defaultValidationContext,proto3" json:"default_validation_context,omitempty"` + ValidationContextSdsSecretConfig *SdsSecretConfig `protobuf:"bytes,2,opt,name=validation_context_sds_secret_config,json=validationContextSdsSecretConfig,proto3" json:"validation_context_sds_secret_config,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *CommonTlsContext_CombinedCertificateValidationContext) Reset() { + *m = CommonTlsContext_CombinedCertificateValidationContext{} +} +func (m *CommonTlsContext_CombinedCertificateValidationContext) String() string { + return proto.CompactTextString(m) +} +func (*CommonTlsContext_CombinedCertificateValidationContext) ProtoMessage() {} +func (*CommonTlsContext_CombinedCertificateValidationContext) Descriptor() ([]byte, []int) { + return fileDescriptor_cert_f82beca1d890b9d7, []int{4, 0} +} +func (m *CommonTlsContext_CombinedCertificateValidationContext) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_CommonTlsContext_CombinedCertificateValidationContext.Unmarshal(m, b) +} +func (m *CommonTlsContext_CombinedCertificateValidationContext) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_CommonTlsContext_CombinedCertificateValidationContext.Marshal(b, m, deterministic) +} +func (dst *CommonTlsContext_CombinedCertificateValidationContext) XXX_Merge(src proto.Message) { + xxx_messageInfo_CommonTlsContext_CombinedCertificateValidationContext.Merge(dst, src) +} +func (m *CommonTlsContext_CombinedCertificateValidationContext) XXX_Size() int { + return xxx_messageInfo_CommonTlsContext_CombinedCertificateValidationContext.Size(m) +} +func (m *CommonTlsContext_CombinedCertificateValidationContext) XXX_DiscardUnknown() { + xxx_messageInfo_CommonTlsContext_CombinedCertificateValidationContext.DiscardUnknown(m) +} + +var xxx_messageInfo_CommonTlsContext_CombinedCertificateValidationContext proto.InternalMessageInfo + +func (m *CommonTlsContext_CombinedCertificateValidationContext) GetDefaultValidationContext() *CertificateValidationContext { + if m != nil { + return m.DefaultValidationContext + } + return nil +} + +func (m *CommonTlsContext_CombinedCertificateValidationContext) GetValidationContextSdsSecretConfig() *SdsSecretConfig { + if m != nil { + return m.ValidationContextSdsSecretConfig + } + return nil +} + +type UpstreamTlsContext struct { + CommonTlsContext *CommonTlsContext `protobuf:"bytes,1,opt,name=common_tls_context,json=commonTlsContext,proto3" json:"common_tls_context,omitempty"` + Sni string `protobuf:"bytes,2,opt,name=sni,proto3" json:"sni,omitempty"` + AllowRenegotiation bool `protobuf:"varint,3,opt,name=allow_renegotiation,json=allowRenegotiation,proto3" json:"allow_renegotiation,omitempty"` + MaxSessionKeys *wrappers.UInt32Value `protobuf:"bytes,4,opt,name=max_session_keys,json=maxSessionKeys,proto3" json:"max_session_keys,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *UpstreamTlsContext) Reset() { *m = UpstreamTlsContext{} } +func (m *UpstreamTlsContext) String() string { return proto.CompactTextString(m) } +func (*UpstreamTlsContext) ProtoMessage() {} +func (*UpstreamTlsContext) Descriptor() ([]byte, []int) { + return fileDescriptor_cert_f82beca1d890b9d7, []int{5} +} +func (m *UpstreamTlsContext) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_UpstreamTlsContext.Unmarshal(m, b) +} +func (m *UpstreamTlsContext) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_UpstreamTlsContext.Marshal(b, m, deterministic) +} +func (dst *UpstreamTlsContext) XXX_Merge(src proto.Message) { + xxx_messageInfo_UpstreamTlsContext.Merge(dst, src) +} +func (m *UpstreamTlsContext) XXX_Size() int { + return xxx_messageInfo_UpstreamTlsContext.Size(m) +} +func (m *UpstreamTlsContext) XXX_DiscardUnknown() { + xxx_messageInfo_UpstreamTlsContext.DiscardUnknown(m) +} + +var xxx_messageInfo_UpstreamTlsContext proto.InternalMessageInfo + +func (m *UpstreamTlsContext) GetCommonTlsContext() *CommonTlsContext { + if m != nil { + return m.CommonTlsContext + } + return nil +} + +func (m *UpstreamTlsContext) GetSni() string { + if m != nil { + return m.Sni + } + return "" +} + +func (m *UpstreamTlsContext) GetAllowRenegotiation() bool { + if m != nil { + return m.AllowRenegotiation + } + return false +} + +func (m *UpstreamTlsContext) GetMaxSessionKeys() *wrappers.UInt32Value { + if m != nil { + return m.MaxSessionKeys + } + return nil +} + +type DownstreamTlsContext struct { + CommonTlsContext *CommonTlsContext `protobuf:"bytes,1,opt,name=common_tls_context,json=commonTlsContext,proto3" json:"common_tls_context,omitempty"` + RequireClientCertificate *wrappers.BoolValue `protobuf:"bytes,2,opt,name=require_client_certificate,json=requireClientCertificate,proto3" json:"require_client_certificate,omitempty"` + RequireSni *wrappers.BoolValue `protobuf:"bytes,3,opt,name=require_sni,json=requireSni,proto3" json:"require_sni,omitempty"` + // Types that are valid to be assigned to SessionTicketKeysType: + // *DownstreamTlsContext_SessionTicketKeys + // *DownstreamTlsContext_SessionTicketKeysSdsSecretConfig + SessionTicketKeysType isDownstreamTlsContext_SessionTicketKeysType `protobuf_oneof:"session_ticket_keys_type"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *DownstreamTlsContext) Reset() { *m = DownstreamTlsContext{} } +func (m *DownstreamTlsContext) String() string { return proto.CompactTextString(m) } +func (*DownstreamTlsContext) ProtoMessage() {} +func (*DownstreamTlsContext) Descriptor() ([]byte, []int) { + return fileDescriptor_cert_f82beca1d890b9d7, []int{6} +} +func (m *DownstreamTlsContext) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_DownstreamTlsContext.Unmarshal(m, b) +} +func (m *DownstreamTlsContext) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_DownstreamTlsContext.Marshal(b, m, deterministic) +} +func (dst *DownstreamTlsContext) XXX_Merge(src proto.Message) { + xxx_messageInfo_DownstreamTlsContext.Merge(dst, src) +} +func (m *DownstreamTlsContext) XXX_Size() int { + return xxx_messageInfo_DownstreamTlsContext.Size(m) +} +func (m *DownstreamTlsContext) XXX_DiscardUnknown() { + xxx_messageInfo_DownstreamTlsContext.DiscardUnknown(m) +} + +var xxx_messageInfo_DownstreamTlsContext proto.InternalMessageInfo + +func (m *DownstreamTlsContext) GetCommonTlsContext() *CommonTlsContext { + if m != nil { + return m.CommonTlsContext + } + return nil +} + +func (m *DownstreamTlsContext) GetRequireClientCertificate() *wrappers.BoolValue { + if m != nil { + return m.RequireClientCertificate + } + return nil +} + +func (m *DownstreamTlsContext) GetRequireSni() *wrappers.BoolValue { + if m != nil { + return m.RequireSni + } + return nil +} + +type isDownstreamTlsContext_SessionTicketKeysType interface { + isDownstreamTlsContext_SessionTicketKeysType() +} + +type DownstreamTlsContext_SessionTicketKeys struct { + SessionTicketKeys *TlsSessionTicketKeys `protobuf:"bytes,4,opt,name=session_ticket_keys,json=sessionTicketKeys,proto3,oneof"` +} + +type DownstreamTlsContext_SessionTicketKeysSdsSecretConfig struct { + SessionTicketKeysSdsSecretConfig *SdsSecretConfig `protobuf:"bytes,5,opt,name=session_ticket_keys_sds_secret_config,json=sessionTicketKeysSdsSecretConfig,proto3,oneof"` +} + +func (*DownstreamTlsContext_SessionTicketKeys) isDownstreamTlsContext_SessionTicketKeysType() {} + +func (*DownstreamTlsContext_SessionTicketKeysSdsSecretConfig) isDownstreamTlsContext_SessionTicketKeysType() { +} + +func (m *DownstreamTlsContext) GetSessionTicketKeysType() isDownstreamTlsContext_SessionTicketKeysType { + if m != nil { + return m.SessionTicketKeysType + } + return nil +} + +func (m *DownstreamTlsContext) GetSessionTicketKeys() *TlsSessionTicketKeys { + if x, ok := m.GetSessionTicketKeysType().(*DownstreamTlsContext_SessionTicketKeys); ok { + return x.SessionTicketKeys + } + return nil +} + +func (m *DownstreamTlsContext) GetSessionTicketKeysSdsSecretConfig() *SdsSecretConfig { + if x, ok := m.GetSessionTicketKeysType().(*DownstreamTlsContext_SessionTicketKeysSdsSecretConfig); ok { + return x.SessionTicketKeysSdsSecretConfig + } + return nil +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*DownstreamTlsContext) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _DownstreamTlsContext_OneofMarshaler, _DownstreamTlsContext_OneofUnmarshaler, _DownstreamTlsContext_OneofSizer, []interface{}{ + (*DownstreamTlsContext_SessionTicketKeys)(nil), + (*DownstreamTlsContext_SessionTicketKeysSdsSecretConfig)(nil), + } +} + +func _DownstreamTlsContext_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*DownstreamTlsContext) + // session_ticket_keys_type + switch x := m.SessionTicketKeysType.(type) { + case *DownstreamTlsContext_SessionTicketKeys: + b.EncodeVarint(4<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.SessionTicketKeys); err != nil { + return err + } + case *DownstreamTlsContext_SessionTicketKeysSdsSecretConfig: + b.EncodeVarint(5<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.SessionTicketKeysSdsSecretConfig); err != nil { + return err + } + case nil: + default: + return fmt.Errorf("DownstreamTlsContext.SessionTicketKeysType has unexpected type %T", x) + } + return nil +} + +func _DownstreamTlsContext_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*DownstreamTlsContext) + switch tag { + case 4: // session_ticket_keys_type.session_ticket_keys + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(TlsSessionTicketKeys) + err := b.DecodeMessage(msg) + m.SessionTicketKeysType = &DownstreamTlsContext_SessionTicketKeys{msg} + return true, err + case 5: // session_ticket_keys_type.session_ticket_keys_sds_secret_config + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(SdsSecretConfig) + err := b.DecodeMessage(msg) + m.SessionTicketKeysType = &DownstreamTlsContext_SessionTicketKeysSdsSecretConfig{msg} + return true, err + default: + return false, nil + } +} + +func _DownstreamTlsContext_OneofSizer(msg proto.Message) (n int) { + m := msg.(*DownstreamTlsContext) + // session_ticket_keys_type + switch x := m.SessionTicketKeysType.(type) { + case *DownstreamTlsContext_SessionTicketKeys: + s := proto.Size(x.SessionTicketKeys) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *DownstreamTlsContext_SessionTicketKeysSdsSecretConfig: + s := proto.Size(x.SessionTicketKeysSdsSecretConfig) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +type SdsSecretConfig struct { + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + SdsConfig *config_source.ConfigSource `protobuf:"bytes,2,opt,name=sds_config,json=sdsConfig,proto3" json:"sds_config,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *SdsSecretConfig) Reset() { *m = SdsSecretConfig{} } +func (m *SdsSecretConfig) String() string { return proto.CompactTextString(m) } +func (*SdsSecretConfig) ProtoMessage() {} +func (*SdsSecretConfig) Descriptor() ([]byte, []int) { + return fileDescriptor_cert_f82beca1d890b9d7, []int{7} +} +func (m *SdsSecretConfig) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_SdsSecretConfig.Unmarshal(m, b) +} +func (m *SdsSecretConfig) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_SdsSecretConfig.Marshal(b, m, deterministic) +} +func (dst *SdsSecretConfig) XXX_Merge(src proto.Message) { + xxx_messageInfo_SdsSecretConfig.Merge(dst, src) +} +func (m *SdsSecretConfig) XXX_Size() int { + return xxx_messageInfo_SdsSecretConfig.Size(m) +} +func (m *SdsSecretConfig) XXX_DiscardUnknown() { + xxx_messageInfo_SdsSecretConfig.DiscardUnknown(m) +} + +var xxx_messageInfo_SdsSecretConfig proto.InternalMessageInfo + +func (m *SdsSecretConfig) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *SdsSecretConfig) GetSdsConfig() *config_source.ConfigSource { + if m != nil { + return m.SdsConfig + } + return nil +} + +type Secret struct { + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // Types that are valid to be assigned to Type: + // *Secret_TlsCertificate + // *Secret_SessionTicketKeys + // *Secret_ValidationContext + Type isSecret_Type `protobuf_oneof:"type"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Secret) Reset() { *m = Secret{} } +func (m *Secret) String() string { return proto.CompactTextString(m) } +func (*Secret) ProtoMessage() {} +func (*Secret) Descriptor() ([]byte, []int) { + return fileDescriptor_cert_f82beca1d890b9d7, []int{8} +} +func (m *Secret) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Secret.Unmarshal(m, b) +} +func (m *Secret) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Secret.Marshal(b, m, deterministic) +} +func (dst *Secret) XXX_Merge(src proto.Message) { + xxx_messageInfo_Secret.Merge(dst, src) +} +func (m *Secret) XXX_Size() int { + return xxx_messageInfo_Secret.Size(m) +} +func (m *Secret) XXX_DiscardUnknown() { + xxx_messageInfo_Secret.DiscardUnknown(m) +} + +var xxx_messageInfo_Secret proto.InternalMessageInfo + +func (m *Secret) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +type isSecret_Type interface { + isSecret_Type() +} + +type Secret_TlsCertificate struct { + TlsCertificate *TlsCertificate `protobuf:"bytes,2,opt,name=tls_certificate,json=tlsCertificate,proto3,oneof"` +} + +type Secret_SessionTicketKeys struct { + SessionTicketKeys *TlsSessionTicketKeys `protobuf:"bytes,3,opt,name=session_ticket_keys,json=sessionTicketKeys,proto3,oneof"` +} + +type Secret_ValidationContext struct { + ValidationContext *CertificateValidationContext `protobuf:"bytes,4,opt,name=validation_context,json=validationContext,proto3,oneof"` +} + +func (*Secret_TlsCertificate) isSecret_Type() {} + +func (*Secret_SessionTicketKeys) isSecret_Type() {} + +func (*Secret_ValidationContext) isSecret_Type() {} + +func (m *Secret) GetType() isSecret_Type { + if m != nil { + return m.Type + } + return nil +} + +func (m *Secret) GetTlsCertificate() *TlsCertificate { + if x, ok := m.GetType().(*Secret_TlsCertificate); ok { + return x.TlsCertificate + } + return nil +} + +func (m *Secret) GetSessionTicketKeys() *TlsSessionTicketKeys { + if x, ok := m.GetType().(*Secret_SessionTicketKeys); ok { + return x.SessionTicketKeys + } + return nil +} + +func (m *Secret) GetValidationContext() *CertificateValidationContext { + if x, ok := m.GetType().(*Secret_ValidationContext); ok { + return x.ValidationContext + } + return nil +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*Secret) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _Secret_OneofMarshaler, _Secret_OneofUnmarshaler, _Secret_OneofSizer, []interface{}{ + (*Secret_TlsCertificate)(nil), + (*Secret_SessionTicketKeys)(nil), + (*Secret_ValidationContext)(nil), + } +} + +func _Secret_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*Secret) + // type + switch x := m.Type.(type) { + case *Secret_TlsCertificate: + b.EncodeVarint(2<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.TlsCertificate); err != nil { + return err + } + case *Secret_SessionTicketKeys: + b.EncodeVarint(3<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.SessionTicketKeys); err != nil { + return err + } + case *Secret_ValidationContext: + b.EncodeVarint(4<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.ValidationContext); err != nil { + return err + } + case nil: + default: + return fmt.Errorf("Secret.Type has unexpected type %T", x) + } + return nil +} + +func _Secret_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*Secret) + switch tag { + case 2: // type.tls_certificate + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(TlsCertificate) + err := b.DecodeMessage(msg) + m.Type = &Secret_TlsCertificate{msg} + return true, err + case 3: // type.session_ticket_keys + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(TlsSessionTicketKeys) + err := b.DecodeMessage(msg) + m.Type = &Secret_SessionTicketKeys{msg} + return true, err + case 4: // type.validation_context + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(CertificateValidationContext) + err := b.DecodeMessage(msg) + m.Type = &Secret_ValidationContext{msg} + return true, err + default: + return false, nil + } +} + +func _Secret_OneofSizer(msg proto.Message) (n int) { + m := msg.(*Secret) + // type + switch x := m.Type.(type) { + case *Secret_TlsCertificate: + s := proto.Size(x.TlsCertificate) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *Secret_SessionTicketKeys: + s := proto.Size(x.SessionTicketKeys) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *Secret_ValidationContext: + s := proto.Size(x.ValidationContext) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +func init() { + proto.RegisterType((*TlsParameters)(nil), "envoy.api.v2.auth.TlsParameters") + proto.RegisterType((*TlsCertificate)(nil), "envoy.api.v2.auth.TlsCertificate") + proto.RegisterType((*TlsSessionTicketKeys)(nil), "envoy.api.v2.auth.TlsSessionTicketKeys") + proto.RegisterType((*CertificateValidationContext)(nil), "envoy.api.v2.auth.CertificateValidationContext") + proto.RegisterType((*CommonTlsContext)(nil), "envoy.api.v2.auth.CommonTlsContext") + proto.RegisterType((*CommonTlsContext_CombinedCertificateValidationContext)(nil), "envoy.api.v2.auth.CommonTlsContext.CombinedCertificateValidationContext") + proto.RegisterType((*UpstreamTlsContext)(nil), "envoy.api.v2.auth.UpstreamTlsContext") + proto.RegisterType((*DownstreamTlsContext)(nil), "envoy.api.v2.auth.DownstreamTlsContext") + proto.RegisterType((*SdsSecretConfig)(nil), "envoy.api.v2.auth.SdsSecretConfig") + proto.RegisterType((*Secret)(nil), "envoy.api.v2.auth.Secret") + proto.RegisterEnum("envoy.api.v2.auth.TlsParameters_TlsProtocol", TlsParameters_TlsProtocol_name, TlsParameters_TlsProtocol_value) +} + +func init() { proto.RegisterFile("envoy/api/v2/auth/cert.proto", fileDescriptor_cert_f82beca1d890b9d7) } + +var fileDescriptor_cert_f82beca1d890b9d7 = []byte{ + // 1295 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xbc, 0x56, 0x4d, 0x77, 0xdb, 0x44, + 0x17, 0x8e, 0x6c, 0xc7, 0x75, 0xae, 0xdb, 0x54, 0x99, 0xf6, 0x3d, 0x51, 0xfc, 0x86, 0xd6, 0x55, + 0xdb, 0x43, 0x16, 0x3d, 0x36, 0x75, 0x61, 0xc1, 0x57, 0xa1, 0x76, 0xe1, 0x84, 0x36, 0xd0, 0x22, + 0x3b, 0x3d, 0xc0, 0x46, 0x4c, 0xe4, 0x49, 0x3c, 0x44, 0x5f, 0xcc, 0x8c, 0x9d, 0x98, 0x05, 0x0b, + 0x96, 0x5d, 0x76, 0xcd, 0x86, 0x5f, 0xc0, 0x9e, 0x15, 0x7f, 0x80, 0x1f, 0xc2, 0xa6, 0x6b, 0x56, + 0xc0, 0x99, 0x91, 0x14, 0x4b, 0x96, 0x5a, 0x1b, 0x4e, 0x0f, 0x3b, 0xcd, 0xdc, 0xb9, 0xcf, 0x33, + 0x73, 0xef, 0x73, 0xef, 0x15, 0x6c, 0x13, 0x7f, 0x12, 0x4c, 0xdb, 0x38, 0xa4, 0xed, 0x49, 0xa7, + 0x8d, 0xc7, 0x62, 0xd4, 0x76, 0x08, 0x13, 0xad, 0x90, 0x05, 0x22, 0x40, 0x1b, 0xca, 0xda, 0xc2, + 0x21, 0x6d, 0x4d, 0x3a, 0x2d, 0x69, 0x6d, 0x64, 0x1d, 0x9c, 0x80, 0x91, 0xf6, 0x01, 0xe6, 0x24, + 0x72, 0x68, 0xdc, 0xcc, 0x5b, 0x9d, 0xc0, 0x3f, 0xa4, 0x47, 0x36, 0x0f, 0xc6, 0xcc, 0x49, 0x8e, + 0x5d, 0x39, 0x0a, 0x82, 0x23, 0x97, 0xb4, 0xd5, 0xea, 0x60, 0x7c, 0xd8, 0x3e, 0x61, 0x38, 0x0c, + 0x09, 0xe3, 0xb1, 0x7d, 0x73, 0x82, 0x5d, 0x3a, 0xc4, 0x82, 0xb4, 0x93, 0x8f, 0xc8, 0x60, 0xfe, + 0x58, 0x86, 0x0b, 0x03, 0x97, 0x3f, 0xc6, 0x0c, 0x7b, 0x44, 0x10, 0xc6, 0xd1, 0x14, 0xb6, 0x85, + 0xcb, 0x6d, 0x8f, 0xfa, 0xd4, 0x1b, 0x7b, 0xb6, 0x3a, 0xe6, 0x04, 0xae, 0x3d, 0x21, 0x8c, 0xd3, + 0xc0, 0x37, 0xb4, 0xa6, 0xb6, 0xb3, 0xde, 0xb9, 0xd5, 0xca, 0xbd, 0xa4, 0x95, 0xc1, 0x51, 0xab, + 0xd8, 0xb7, 0x0b, 0xbf, 0xfc, 0xfe, 0x6b, 0x79, 0xf5, 0x07, 0xad, 0xa4, 0x6b, 0xd6, 0x96, 0x70, + 0xf9, 0xa7, 0x11, 0x78, 0x62, 0x7f, 0x12, 0x41, 0x9f, 0x51, 0xe3, 0xd3, 0x62, 0xea, 0xd2, 0xab, + 0xa0, 0x8e, 0xc0, 0xe7, 0xa9, 0xaf, 0xc3, 0x05, 0x87, 0x86, 0x23, 0xc2, 0x6c, 0x3e, 0xa6, 0x82, + 0x70, 0xa3, 0xdc, 0x2c, 0xef, 0xac, 0x59, 0xe7, 0xa3, 0xcd, 0xbe, 0xda, 0x43, 0x57, 0xa1, 0x4e, + 0x9c, 0xe1, 0xc8, 0x76, 0xc6, 0x6c, 0x42, 0xb8, 0x51, 0x51, 0x47, 0x40, 0x6e, 0xf5, 0xd4, 0x8e, + 0xf9, 0x08, 0xea, 0x29, 0x6e, 0x74, 0x1e, 0x6a, 0x83, 0xbd, 0xbe, 0x7d, 0x6f, 0x7f, 0xf0, 0x48, + 0x5f, 0x41, 0x75, 0x38, 0x37, 0xd8, 0xeb, 0x4f, 0x6e, 0xdb, 0x6f, 0xe8, 0xda, 0x6c, 0x71, 0x5b, + 0x2f, 0xcd, 0x16, 0x1d, 0xbd, 0x3c, 0x5b, 0xdc, 0xd1, 0x2b, 0xe6, 0x1f, 0x25, 0x58, 0x1f, 0xb8, + 0xbc, 0x47, 0x98, 0xa0, 0x87, 0xd4, 0xc1, 0x82, 0xa0, 0x07, 0xb0, 0xe1, 0xcc, 0x96, 0xb6, 0x33, + 0xc2, 0x34, 0x4a, 0x4a, 0xbd, 0xf3, 0x5a, 0x36, 0x32, 0x52, 0x2d, 0xad, 0xfb, 0x58, 0xe0, 0xbe, + 0x92, 0x8a, 0xa5, 0xa7, 0xfc, 0x7a, 0xd2, 0x0d, 0xdd, 0x85, 0x7a, 0xc8, 0xe8, 0x44, 0xe2, 0x1c, + 0x93, 0xa9, 0x8a, 0xef, 0x42, 0x14, 0x88, 0x3d, 0x1e, 0x92, 0x29, 0x7a, 0x1b, 0x6a, 0x21, 0xe6, + 0xfc, 0x24, 0x60, 0x43, 0xa3, 0xbc, 0x8c, 0xf3, 0xd9, 0x71, 0x49, 0x1d, 0x38, 0x3c, 0xb4, 0xb9, + 0xc0, 0xa1, 0x4b, 0x8c, 0xca, 0x52, 0xd4, 0xd2, 0xa3, 0xaf, 0x1c, 0x90, 0x0d, 0xdb, 0x9c, 0x1e, + 0xf9, 0x64, 0x68, 0xa7, 0xa3, 0x21, 0xa8, 0x47, 0xb8, 0xc0, 0x5e, 0x68, 0xac, 0x36, 0xcb, 0x8b, + 0x01, 0x1b, 0x11, 0x44, 0x2a, 0xbc, 0x83, 0x04, 0xc0, 0xdc, 0x87, 0xcb, 0x03, 0x97, 0xf7, 0x09, + 0x97, 0xfa, 0x18, 0x50, 0xe7, 0x98, 0x88, 0x87, 0x64, 0xca, 0xd1, 0xfb, 0x50, 0x39, 0x26, 0x53, + 0x6e, 0x68, 0x4b, 0x10, 0xc4, 0xea, 0x7b, 0xa6, 0x95, 0x6a, 0x9a, 0xa5, 0xdc, 0xcc, 0xdf, 0x2a, + 0xb0, 0x9d, 0xe2, 0x7b, 0x12, 0x95, 0x23, 0x0d, 0xfc, 0x5e, 0xe0, 0x0b, 0x72, 0x2a, 0xd0, 0x7b, + 0x00, 0x82, 0x8d, 0xb9, 0x90, 0x2f, 0xc3, 0xcb, 0x25, 0x76, 0x2d, 0x76, 0xe8, 0x61, 0xb4, 0x0b, + 0x9b, 0x13, 0xc2, 0xe8, 0xe1, 0x34, 0x13, 0x16, 0x1e, 0x1e, 0xd3, 0x48, 0xd1, 0x5d, 0x5d, 0xde, + 0xa8, 0xfe, 0x4c, 0xab, 0x99, 0x55, 0x56, 0x69, 0xde, 0xda, 0xb9, 0x65, 0xfd, 0x2f, 0x72, 0x48, + 0x5d, 0xaa, 0x1f, 0x1e, 0xd3, 0x17, 0x20, 0x8d, 0x30, 0x1f, 0x19, 0xa5, 0x02, 0xa4, 0x0f, 0x77, + 0xec, 0x02, 0xa4, 0x5d, 0xcc, 0x47, 0xe8, 0xad, 0x33, 0x24, 0x3e, 0x3e, 0xf8, 0x86, 0x38, 0xc2, + 0xc6, 0xae, 0xb0, 0x7d, 0xec, 0x91, 0xb8, 0x84, 0x2e, 0x47, 0xe6, 0x7e, 0x64, 0xbd, 0xe7, 0x8a, + 0xcf, 0xb0, 0x27, 0x85, 0x7e, 0x89, 0x91, 0x6f, 0xc7, 0x94, 0x11, 0x3b, 0xad, 0x94, 0x55, 0x15, + 0x91, 0x46, 0x2b, 0xea, 0x78, 0xad, 0xa4, 0xe3, 0xb5, 0xba, 0x41, 0xe0, 0x3e, 0xc1, 0xee, 0x98, + 0x58, 0x1b, 0xb1, 0xdb, 0xa3, 0x99, 0x5a, 0x8e, 0xe1, 0x46, 0x82, 0xf5, 0x52, 0xd5, 0x54, 0x17, + 0x82, 0x5f, 0x8b, 0x71, 0xfa, 0x2f, 0x54, 0x0e, 0x6a, 0x43, 0xd9, 0x61, 0xae, 0x71, 0x6e, 0x99, + 0xd4, 0xc9, 0x93, 0xe8, 0x1d, 0xd8, 0xc2, 0xae, 0x1b, 0x9c, 0xd8, 0xe4, 0x34, 0xa4, 0x2c, 0x7b, + 0x39, 0xa3, 0xd6, 0xd4, 0x76, 0x6a, 0xd6, 0xa6, 0x3a, 0xf0, 0x51, 0x64, 0x4f, 0xb1, 0x9a, 0xcf, + 0xcf, 0x81, 0xde, 0x0b, 0x3c, 0x2f, 0xf0, 0x65, 0x9f, 0x88, 0x35, 0xf4, 0x01, 0x80, 0x6c, 0xa4, + 0xa1, 0x6c, 0x89, 0x3c, 0xd6, 0x50, 0x73, 0x51, 0xdb, 0xb4, 0xd6, 0x44, 0xbc, 0xe4, 0x68, 0x0f, + 0x74, 0x09, 0x90, 0xba, 0x07, 0x57, 0x59, 0xaf, 0x77, 0xae, 0x15, 0xc3, 0xa4, 0xae, 0x64, 0x5d, + 0x14, 0x99, 0x35, 0x47, 0xdf, 0x81, 0x39, 0x87, 0x66, 0xf3, 0x21, 0xb7, 0x39, 0x71, 0x18, 0x11, + 0x76, 0x34, 0xcf, 0xb8, 0x51, 0x55, 0xf8, 0x66, 0x01, 0x7e, 0x7f, 0xc8, 0xfb, 0xea, 0x6c, 0x4f, + 0x1d, 0x9d, 0x55, 0x95, 0xae, 0x59, 0x57, 0xb2, 0x64, 0x73, 0x47, 0x39, 0xfa, 0x1a, 0xd0, 0xe4, + 0xac, 0xc6, 0x24, 0x97, 0x0c, 0x50, 0xdc, 0xac, 0xda, 0x05, 0x5c, 0x2f, 0xab, 0xcd, 0xdd, 0x15, + 0x6b, 0x63, 0x92, 0x2b, 0x58, 0x01, 0x37, 0xf2, 0x0c, 0xf9, 0x07, 0xc6, 0x7a, 0x58, 0xe2, 0x7d, + 0xbb, 0x2b, 0x56, 0x33, 0x47, 0x33, 0x77, 0x06, 0x3d, 0xd5, 0xe0, 0xff, 0x4e, 0xe0, 0x1d, 0x50, + 0x29, 0xe6, 0x82, 0x17, 0xd6, 0x14, 0xdb, 0x6e, 0xd1, 0x0b, 0xe7, 0xd4, 0x22, 0x37, 0x14, 0xcc, + 0x82, 0xa7, 0x6f, 0x25, 0x74, 0xf9, 0x9e, 0x75, 0x13, 0xd6, 0xb1, 0x1b, 0xfa, 0x67, 0x13, 0x3b, + 0x99, 0x8d, 0x17, 0xe4, 0x6e, 0x32, 0x0e, 0x79, 0xe3, 0xa7, 0x12, 0xdc, 0x58, 0x86, 0x0c, 0x4d, + 0xa1, 0x31, 0x24, 0x87, 0x78, 0xec, 0x8a, 0xa2, 0xa7, 0x69, 0xff, 0x2a, 0x79, 0xb1, 0x6a, 0x9e, + 0x2a, 0xd5, 0x18, 0x31, 0x7c, 0x9e, 0xfa, 0xfb, 0x25, 0xb3, 0x59, 0x5a, 0x36, 0x9b, 0x19, 0xde, + 0x85, 0x79, 0xed, 0x6e, 0xc1, 0x66, 0x01, 0xbf, 0x98, 0x86, 0xe4, 0x41, 0xa5, 0xb6, 0xaa, 0x57, + 0xcd, 0x3f, 0x35, 0x40, 0xfb, 0x21, 0x17, 0x8c, 0x60, 0x2f, 0x55, 0xf2, 0x9f, 0x03, 0x72, 0x54, + 0x62, 0x6d, 0x55, 0x6a, 0x99, 0x50, 0x5d, 0x5f, 0x42, 0x05, 0x96, 0xee, 0xcc, 0x77, 0x91, 0x6d, + 0x28, 0x73, 0x9f, 0xaa, 0x97, 0xae, 0xc5, 0xaf, 0x60, 0xe5, 0x9d, 0xbf, 0x34, 0x4b, 0x6e, 0xa3, + 0x36, 0x5c, 0x8a, 0x9a, 0x16, 0x23, 0x3e, 0x39, 0x0a, 0x04, 0x55, 0x37, 0x56, 0x95, 0x55, 0xb3, + 0x90, 0x32, 0x59, 0x69, 0x0b, 0xfa, 0x18, 0x74, 0x0f, 0x9f, 0xda, 0x3c, 0x9a, 0xa8, 0xb6, 0x1a, + 0xa2, 0xd1, 0xd8, 0xdf, 0xce, 0xf5, 0xdb, 0xfd, 0x4f, 0x7c, 0x71, 0xa7, 0x13, 0x75, 0xdc, 0x75, + 0x0f, 0x9f, 0xc6, 0x63, 0x58, 0x0e, 0x60, 0xf3, 0x79, 0x19, 0x2e, 0xdf, 0x0f, 0x4e, 0xfc, 0xff, + 0x22, 0x04, 0x5f, 0x40, 0x23, 0x99, 0x1b, 0x8e, 0x4b, 0x89, 0x2f, 0x32, 0xad, 0xb9, 0xb4, 0x70, + 0x5a, 0x18, 0xb1, 0x77, 0x4f, 0x39, 0xa7, 0x7f, 0xe3, 0xde, 0x85, 0xfa, 0xd9, 0x44, 0xf2, 0x69, + 0xdc, 0x90, 0x5e, 0x06, 0x05, 0xc9, 0xe0, 0xf1, 0x29, 0xfa, 0x12, 0x2e, 0x25, 0x61, 0x14, 0xea, + 0xcf, 0x24, 0x1d, 0xcd, 0xd7, 0x8b, 0x3b, 0x74, 0xee, 0x4f, 0x46, 0x76, 0x33, 0x9e, 0xfb, 0xbd, + 0x19, 0xc3, 0xcd, 0x02, 0xe8, 0x82, 0x02, 0x58, 0xfd, 0x27, 0xed, 0x2c, 0xc7, 0x33, 0x2f, 0xfb, + 0x06, 0x18, 0x45, 0xb4, 0x52, 0xf7, 0x26, 0x81, 0x8b, 0xf3, 0xdd, 0x0f, 0x41, 0x45, 0xfd, 0x3f, + 0xc8, 0xe4, 0xae, 0x59, 0xea, 0x1b, 0xdd, 0x05, 0x90, 0xb7, 0xcc, 0xd4, 0xe7, 0xd5, 0x82, 0xe9, + 0x1b, 0x41, 0x24, 0xbf, 0x4e, 0x7c, 0xc8, 0xa3, 0x0d, 0xf3, 0xe7, 0x12, 0x54, 0x23, 0x92, 0x42, + 0xf8, 0x3d, 0xb8, 0x38, 0x37, 0xc4, 0x62, 0x8e, 0xc5, 0x13, 0x71, 0x77, 0xc5, 0x5a, 0xcf, 0x8e, + 0xa9, 0x17, 0x65, 0xb0, 0xfc, 0x0a, 0x32, 0x58, 0x3c, 0xf1, 0x2a, 0xaf, 0x6e, 0xe2, 0x75, 0xab, + 0x50, 0x91, 0x89, 0xe9, 0xbe, 0x09, 0x57, 0x69, 0x10, 0x21, 0x86, 0x2c, 0x38, 0x9d, 0xe6, 0xc1, + 0xbb, 0x6b, 0x12, 0x5d, 0x4d, 0x80, 0xc7, 0xda, 0x57, 0x15, 0xb9, 0x75, 0x50, 0x55, 0xe2, 0xbe, + 0xf3, 0x77, 0x00, 0x00, 0x00, 0xff, 0xff, 0x3c, 0x5e, 0x86, 0x05, 0x2a, 0x0f, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/grpc/balancer/xds/internal/proto/envoy/api/v2/cds/cds.pb.go b/vendor/google.golang.org/grpc/balancer/xds/internal/proto/envoy/api/v2/cds/cds.pb.go new file mode 100755 index 0000000..f3ec0c4 --- /dev/null +++ b/vendor/google.golang.org/grpc/balancer/xds/internal/proto/envoy/api/v2/cds/cds.pb.go @@ -0,0 +1,1771 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: envoy/api/v2/cds.proto + +package envoy_api_v2 + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import any "github.com/golang/protobuf/ptypes/any" +import duration "github.com/golang/protobuf/ptypes/duration" +import _struct "github.com/golang/protobuf/ptypes/struct" +import wrappers "github.com/golang/protobuf/ptypes/wrappers" +import _ "google.golang.org/genproto/googleapis/api/annotations" +import cert "google.golang.org/grpc/balancer/xds/internal/proto/envoy/api/v2/auth/cert" +import circuit_breaker "google.golang.org/grpc/balancer/xds/internal/proto/envoy/api/v2/cluster/circuit_breaker" +import outlier_detection "google.golang.org/grpc/balancer/xds/internal/proto/envoy/api/v2/cluster/outlier_detection" +import address "google.golang.org/grpc/balancer/xds/internal/proto/envoy/api/v2/core/address" +import base "google.golang.org/grpc/balancer/xds/internal/proto/envoy/api/v2/core/base" +import config_source "google.golang.org/grpc/balancer/xds/internal/proto/envoy/api/v2/core/config_source" +import health_check "google.golang.org/grpc/balancer/xds/internal/proto/envoy/api/v2/core/health_check" +import protocol "google.golang.org/grpc/balancer/xds/internal/proto/envoy/api/v2/core/protocol" +import discovery "google.golang.org/grpc/balancer/xds/internal/proto/envoy/api/v2/discovery" +import eds "google.golang.org/grpc/balancer/xds/internal/proto/envoy/api/v2/eds" +import percent "google.golang.org/grpc/balancer/xds/internal/proto/envoy/type/percent" +import _ "google.golang.org/grpc/balancer/xds/internal/proto/validate" + +import ( + context "golang.org/x/net/context" + grpc "google.golang.org/grpc" +) + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +type Cluster_DiscoveryType int32 + +const ( + Cluster_STATIC Cluster_DiscoveryType = 0 + Cluster_STRICT_DNS Cluster_DiscoveryType = 1 + Cluster_LOGICAL_DNS Cluster_DiscoveryType = 2 + Cluster_EDS Cluster_DiscoveryType = 3 + Cluster_ORIGINAL_DST Cluster_DiscoveryType = 4 +) + +var Cluster_DiscoveryType_name = map[int32]string{ + 0: "STATIC", + 1: "STRICT_DNS", + 2: "LOGICAL_DNS", + 3: "EDS", + 4: "ORIGINAL_DST", +} +var Cluster_DiscoveryType_value = map[string]int32{ + "STATIC": 0, + "STRICT_DNS": 1, + "LOGICAL_DNS": 2, + "EDS": 3, + "ORIGINAL_DST": 4, +} + +func (x Cluster_DiscoveryType) String() string { + return proto.EnumName(Cluster_DiscoveryType_name, int32(x)) +} +func (Cluster_DiscoveryType) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_cds_1dff7e464f9f9a10, []int{0, 0} +} + +type Cluster_LbPolicy int32 + +const ( + Cluster_ROUND_ROBIN Cluster_LbPolicy = 0 + Cluster_LEAST_REQUEST Cluster_LbPolicy = 1 + Cluster_RING_HASH Cluster_LbPolicy = 2 + Cluster_RANDOM Cluster_LbPolicy = 3 + Cluster_ORIGINAL_DST_LB Cluster_LbPolicy = 4 + Cluster_MAGLEV Cluster_LbPolicy = 5 +) + +var Cluster_LbPolicy_name = map[int32]string{ + 0: "ROUND_ROBIN", + 1: "LEAST_REQUEST", + 2: "RING_HASH", + 3: "RANDOM", + 4: "ORIGINAL_DST_LB", + 5: "MAGLEV", +} +var Cluster_LbPolicy_value = map[string]int32{ + "ROUND_ROBIN": 0, + "LEAST_REQUEST": 1, + "RING_HASH": 2, + "RANDOM": 3, + "ORIGINAL_DST_LB": 4, + "MAGLEV": 5, +} + +func (x Cluster_LbPolicy) String() string { + return proto.EnumName(Cluster_LbPolicy_name, int32(x)) +} +func (Cluster_LbPolicy) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_cds_1dff7e464f9f9a10, []int{0, 1} +} + +type Cluster_DnsLookupFamily int32 + +const ( + Cluster_AUTO Cluster_DnsLookupFamily = 0 + Cluster_V4_ONLY Cluster_DnsLookupFamily = 1 + Cluster_V6_ONLY Cluster_DnsLookupFamily = 2 +) + +var Cluster_DnsLookupFamily_name = map[int32]string{ + 0: "AUTO", + 1: "V4_ONLY", + 2: "V6_ONLY", +} +var Cluster_DnsLookupFamily_value = map[string]int32{ + "AUTO": 0, + "V4_ONLY": 1, + "V6_ONLY": 2, +} + +func (x Cluster_DnsLookupFamily) String() string { + return proto.EnumName(Cluster_DnsLookupFamily_name, int32(x)) +} +func (Cluster_DnsLookupFamily) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_cds_1dff7e464f9f9a10, []int{0, 2} +} + +type Cluster_ClusterProtocolSelection int32 + +const ( + Cluster_USE_CONFIGURED_PROTOCOL Cluster_ClusterProtocolSelection = 0 + Cluster_USE_DOWNSTREAM_PROTOCOL Cluster_ClusterProtocolSelection = 1 +) + +var Cluster_ClusterProtocolSelection_name = map[int32]string{ + 0: "USE_CONFIGURED_PROTOCOL", + 1: "USE_DOWNSTREAM_PROTOCOL", +} +var Cluster_ClusterProtocolSelection_value = map[string]int32{ + "USE_CONFIGURED_PROTOCOL": 0, + "USE_DOWNSTREAM_PROTOCOL": 1, +} + +func (x Cluster_ClusterProtocolSelection) String() string { + return proto.EnumName(Cluster_ClusterProtocolSelection_name, int32(x)) +} +func (Cluster_ClusterProtocolSelection) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_cds_1dff7e464f9f9a10, []int{0, 3} +} + +type Cluster_LbSubsetConfig_LbSubsetFallbackPolicy int32 + +const ( + Cluster_LbSubsetConfig_NO_FALLBACK Cluster_LbSubsetConfig_LbSubsetFallbackPolicy = 0 + Cluster_LbSubsetConfig_ANY_ENDPOINT Cluster_LbSubsetConfig_LbSubsetFallbackPolicy = 1 + Cluster_LbSubsetConfig_DEFAULT_SUBSET Cluster_LbSubsetConfig_LbSubsetFallbackPolicy = 2 +) + +var Cluster_LbSubsetConfig_LbSubsetFallbackPolicy_name = map[int32]string{ + 0: "NO_FALLBACK", + 1: "ANY_ENDPOINT", + 2: "DEFAULT_SUBSET", +} +var Cluster_LbSubsetConfig_LbSubsetFallbackPolicy_value = map[string]int32{ + "NO_FALLBACK": 0, + "ANY_ENDPOINT": 1, + "DEFAULT_SUBSET": 2, +} + +func (x Cluster_LbSubsetConfig_LbSubsetFallbackPolicy) String() string { + return proto.EnumName(Cluster_LbSubsetConfig_LbSubsetFallbackPolicy_name, int32(x)) +} +func (Cluster_LbSubsetConfig_LbSubsetFallbackPolicy) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_cds_1dff7e464f9f9a10, []int{0, 4, 0} +} + +type Cluster_RingHashLbConfig_HashFunction int32 + +const ( + Cluster_RingHashLbConfig_XX_HASH Cluster_RingHashLbConfig_HashFunction = 0 + Cluster_RingHashLbConfig_MURMUR_HASH_2 Cluster_RingHashLbConfig_HashFunction = 1 +) + +var Cluster_RingHashLbConfig_HashFunction_name = map[int32]string{ + 0: "XX_HASH", + 1: "MURMUR_HASH_2", +} +var Cluster_RingHashLbConfig_HashFunction_value = map[string]int32{ + "XX_HASH": 0, + "MURMUR_HASH_2": 1, +} + +func (x Cluster_RingHashLbConfig_HashFunction) String() string { + return proto.EnumName(Cluster_RingHashLbConfig_HashFunction_name, int32(x)) +} +func (Cluster_RingHashLbConfig_HashFunction) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_cds_1dff7e464f9f9a10, []int{0, 6, 0} +} + +type Cluster struct { + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + AltStatName string `protobuf:"bytes,28,opt,name=alt_stat_name,json=altStatName,proto3" json:"alt_stat_name,omitempty"` + // Types that are valid to be assigned to ClusterDiscoveryType: + // *Cluster_Type + // *Cluster_ClusterType + ClusterDiscoveryType isCluster_ClusterDiscoveryType `protobuf_oneof:"cluster_discovery_type"` + EdsClusterConfig *Cluster_EdsClusterConfig `protobuf:"bytes,3,opt,name=eds_cluster_config,json=edsClusterConfig,proto3" json:"eds_cluster_config,omitempty"` + ConnectTimeout *duration.Duration `protobuf:"bytes,4,opt,name=connect_timeout,json=connectTimeout,proto3" json:"connect_timeout,omitempty"` + PerConnectionBufferLimitBytes *wrappers.UInt32Value `protobuf:"bytes,5,opt,name=per_connection_buffer_limit_bytes,json=perConnectionBufferLimitBytes,proto3" json:"per_connection_buffer_limit_bytes,omitempty"` + LbPolicy Cluster_LbPolicy `protobuf:"varint,6,opt,name=lb_policy,json=lbPolicy,proto3,enum=envoy.api.v2.Cluster_LbPolicy" json:"lb_policy,omitempty"` + Hosts []*address.Address `protobuf:"bytes,7,rep,name=hosts,proto3" json:"hosts,omitempty"` // Deprecated: Do not use. + LoadAssignment *eds.ClusterLoadAssignment `protobuf:"bytes,33,opt,name=load_assignment,json=loadAssignment,proto3" json:"load_assignment,omitempty"` + HealthChecks []*health_check.HealthCheck `protobuf:"bytes,8,rep,name=health_checks,json=healthChecks,proto3" json:"health_checks,omitempty"` + MaxRequestsPerConnection *wrappers.UInt32Value `protobuf:"bytes,9,opt,name=max_requests_per_connection,json=maxRequestsPerConnection,proto3" json:"max_requests_per_connection,omitempty"` + CircuitBreakers *circuit_breaker.CircuitBreakers `protobuf:"bytes,10,opt,name=circuit_breakers,json=circuitBreakers,proto3" json:"circuit_breakers,omitempty"` + TlsContext *cert.UpstreamTlsContext `protobuf:"bytes,11,opt,name=tls_context,json=tlsContext,proto3" json:"tls_context,omitempty"` + CommonHttpProtocolOptions *protocol.HttpProtocolOptions `protobuf:"bytes,29,opt,name=common_http_protocol_options,json=commonHttpProtocolOptions,proto3" json:"common_http_protocol_options,omitempty"` + HttpProtocolOptions *protocol.Http1ProtocolOptions `protobuf:"bytes,13,opt,name=http_protocol_options,json=httpProtocolOptions,proto3" json:"http_protocol_options,omitempty"` + Http2ProtocolOptions *protocol.Http2ProtocolOptions `protobuf:"bytes,14,opt,name=http2_protocol_options,json=http2ProtocolOptions,proto3" json:"http2_protocol_options,omitempty"` + ExtensionProtocolOptions map[string]*_struct.Struct `protobuf:"bytes,35,rep,name=extension_protocol_options,json=extensionProtocolOptions,proto3" json:"extension_protocol_options,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` + TypedExtensionProtocolOptions map[string]*any.Any `protobuf:"bytes,36,rep,name=typed_extension_protocol_options,json=typedExtensionProtocolOptions,proto3" json:"typed_extension_protocol_options,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` + DnsRefreshRate *duration.Duration `protobuf:"bytes,16,opt,name=dns_refresh_rate,json=dnsRefreshRate,proto3" json:"dns_refresh_rate,omitempty"` + DnsLookupFamily Cluster_DnsLookupFamily `protobuf:"varint,17,opt,name=dns_lookup_family,json=dnsLookupFamily,proto3,enum=envoy.api.v2.Cluster_DnsLookupFamily" json:"dns_lookup_family,omitempty"` + DnsResolvers []*address.Address `protobuf:"bytes,18,rep,name=dns_resolvers,json=dnsResolvers,proto3" json:"dns_resolvers,omitempty"` + OutlierDetection *outlier_detection.OutlierDetection `protobuf:"bytes,19,opt,name=outlier_detection,json=outlierDetection,proto3" json:"outlier_detection,omitempty"` + CleanupInterval *duration.Duration `protobuf:"bytes,20,opt,name=cleanup_interval,json=cleanupInterval,proto3" json:"cleanup_interval,omitempty"` + UpstreamBindConfig *address.BindConfig `protobuf:"bytes,21,opt,name=upstream_bind_config,json=upstreamBindConfig,proto3" json:"upstream_bind_config,omitempty"` + LbSubsetConfig *Cluster_LbSubsetConfig `protobuf:"bytes,22,opt,name=lb_subset_config,json=lbSubsetConfig,proto3" json:"lb_subset_config,omitempty"` + // Types that are valid to be assigned to LbConfig: + // *Cluster_RingHashLbConfig_ + // *Cluster_OriginalDstLbConfig_ + // *Cluster_LeastRequestLbConfig_ + LbConfig isCluster_LbConfig `protobuf_oneof:"lb_config"` + CommonLbConfig *Cluster_CommonLbConfig `protobuf:"bytes,27,opt,name=common_lb_config,json=commonLbConfig,proto3" json:"common_lb_config,omitempty"` + TransportSocket *base.TransportSocket `protobuf:"bytes,24,opt,name=transport_socket,json=transportSocket,proto3" json:"transport_socket,omitempty"` + Metadata *base.Metadata `protobuf:"bytes,25,opt,name=metadata,proto3" json:"metadata,omitempty"` + ProtocolSelection Cluster_ClusterProtocolSelection `protobuf:"varint,26,opt,name=protocol_selection,json=protocolSelection,proto3,enum=envoy.api.v2.Cluster_ClusterProtocolSelection" json:"protocol_selection,omitempty"` + UpstreamConnectionOptions *UpstreamConnectionOptions `protobuf:"bytes,30,opt,name=upstream_connection_options,json=upstreamConnectionOptions,proto3" json:"upstream_connection_options,omitempty"` + CloseConnectionsOnHostHealthFailure bool `protobuf:"varint,31,opt,name=close_connections_on_host_health_failure,json=closeConnectionsOnHostHealthFailure,proto3" json:"close_connections_on_host_health_failure,omitempty"` + DrainConnectionsOnHostRemoval bool `protobuf:"varint,32,opt,name=drain_connections_on_host_removal,json=drainConnectionsOnHostRemoval,proto3" json:"drain_connections_on_host_removal,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Cluster) Reset() { *m = Cluster{} } +func (m *Cluster) String() string { return proto.CompactTextString(m) } +func (*Cluster) ProtoMessage() {} +func (*Cluster) Descriptor() ([]byte, []int) { + return fileDescriptor_cds_1dff7e464f9f9a10, []int{0} +} +func (m *Cluster) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Cluster.Unmarshal(m, b) +} +func (m *Cluster) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Cluster.Marshal(b, m, deterministic) +} +func (dst *Cluster) XXX_Merge(src proto.Message) { + xxx_messageInfo_Cluster.Merge(dst, src) +} +func (m *Cluster) XXX_Size() int { + return xxx_messageInfo_Cluster.Size(m) +} +func (m *Cluster) XXX_DiscardUnknown() { + xxx_messageInfo_Cluster.DiscardUnknown(m) +} + +var xxx_messageInfo_Cluster proto.InternalMessageInfo + +func (m *Cluster) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *Cluster) GetAltStatName() string { + if m != nil { + return m.AltStatName + } + return "" +} + +type isCluster_ClusterDiscoveryType interface { + isCluster_ClusterDiscoveryType() +} + +type Cluster_Type struct { + Type Cluster_DiscoveryType `protobuf:"varint,2,opt,name=type,proto3,enum=envoy.api.v2.Cluster_DiscoveryType,oneof"` +} + +type Cluster_ClusterType struct { + ClusterType *Cluster_CustomClusterType `protobuf:"bytes,38,opt,name=cluster_type,json=clusterType,proto3,oneof"` +} + +func (*Cluster_Type) isCluster_ClusterDiscoveryType() {} + +func (*Cluster_ClusterType) isCluster_ClusterDiscoveryType() {} + +func (m *Cluster) GetClusterDiscoveryType() isCluster_ClusterDiscoveryType { + if m != nil { + return m.ClusterDiscoveryType + } + return nil +} + +func (m *Cluster) GetType() Cluster_DiscoveryType { + if x, ok := m.GetClusterDiscoveryType().(*Cluster_Type); ok { + return x.Type + } + return Cluster_STATIC +} + +func (m *Cluster) GetClusterType() *Cluster_CustomClusterType { + if x, ok := m.GetClusterDiscoveryType().(*Cluster_ClusterType); ok { + return x.ClusterType + } + return nil +} + +func (m *Cluster) GetEdsClusterConfig() *Cluster_EdsClusterConfig { + if m != nil { + return m.EdsClusterConfig + } + return nil +} + +func (m *Cluster) GetConnectTimeout() *duration.Duration { + if m != nil { + return m.ConnectTimeout + } + return nil +} + +func (m *Cluster) GetPerConnectionBufferLimitBytes() *wrappers.UInt32Value { + if m != nil { + return m.PerConnectionBufferLimitBytes + } + return nil +} + +func (m *Cluster) GetLbPolicy() Cluster_LbPolicy { + if m != nil { + return m.LbPolicy + } + return Cluster_ROUND_ROBIN +} + +// Deprecated: Do not use. +func (m *Cluster) GetHosts() []*address.Address { + if m != nil { + return m.Hosts + } + return nil +} + +func (m *Cluster) GetLoadAssignment() *eds.ClusterLoadAssignment { + if m != nil { + return m.LoadAssignment + } + return nil +} + +func (m *Cluster) GetHealthChecks() []*health_check.HealthCheck { + if m != nil { + return m.HealthChecks + } + return nil +} + +func (m *Cluster) GetMaxRequestsPerConnection() *wrappers.UInt32Value { + if m != nil { + return m.MaxRequestsPerConnection + } + return nil +} + +func (m *Cluster) GetCircuitBreakers() *circuit_breaker.CircuitBreakers { + if m != nil { + return m.CircuitBreakers + } + return nil +} + +func (m *Cluster) GetTlsContext() *cert.UpstreamTlsContext { + if m != nil { + return m.TlsContext + } + return nil +} + +func (m *Cluster) GetCommonHttpProtocolOptions() *protocol.HttpProtocolOptions { + if m != nil { + return m.CommonHttpProtocolOptions + } + return nil +} + +func (m *Cluster) GetHttpProtocolOptions() *protocol.Http1ProtocolOptions { + if m != nil { + return m.HttpProtocolOptions + } + return nil +} + +func (m *Cluster) GetHttp2ProtocolOptions() *protocol.Http2ProtocolOptions { + if m != nil { + return m.Http2ProtocolOptions + } + return nil +} + +func (m *Cluster) GetExtensionProtocolOptions() map[string]*_struct.Struct { + if m != nil { + return m.ExtensionProtocolOptions + } + return nil +} + +func (m *Cluster) GetTypedExtensionProtocolOptions() map[string]*any.Any { + if m != nil { + return m.TypedExtensionProtocolOptions + } + return nil +} + +func (m *Cluster) GetDnsRefreshRate() *duration.Duration { + if m != nil { + return m.DnsRefreshRate + } + return nil +} + +func (m *Cluster) GetDnsLookupFamily() Cluster_DnsLookupFamily { + if m != nil { + return m.DnsLookupFamily + } + return Cluster_AUTO +} + +func (m *Cluster) GetDnsResolvers() []*address.Address { + if m != nil { + return m.DnsResolvers + } + return nil +} + +func (m *Cluster) GetOutlierDetection() *outlier_detection.OutlierDetection { + if m != nil { + return m.OutlierDetection + } + return nil +} + +func (m *Cluster) GetCleanupInterval() *duration.Duration { + if m != nil { + return m.CleanupInterval + } + return nil +} + +func (m *Cluster) GetUpstreamBindConfig() *address.BindConfig { + if m != nil { + return m.UpstreamBindConfig + } + return nil +} + +func (m *Cluster) GetLbSubsetConfig() *Cluster_LbSubsetConfig { + if m != nil { + return m.LbSubsetConfig + } + return nil +} + +type isCluster_LbConfig interface { + isCluster_LbConfig() +} + +type Cluster_RingHashLbConfig_ struct { + RingHashLbConfig *Cluster_RingHashLbConfig `protobuf:"bytes,23,opt,name=ring_hash_lb_config,json=ringHashLbConfig,proto3,oneof"` +} + +type Cluster_OriginalDstLbConfig_ struct { + OriginalDstLbConfig *Cluster_OriginalDstLbConfig `protobuf:"bytes,34,opt,name=original_dst_lb_config,json=originalDstLbConfig,proto3,oneof"` +} + +type Cluster_LeastRequestLbConfig_ struct { + LeastRequestLbConfig *Cluster_LeastRequestLbConfig `protobuf:"bytes,37,opt,name=least_request_lb_config,json=leastRequestLbConfig,proto3,oneof"` +} + +func (*Cluster_RingHashLbConfig_) isCluster_LbConfig() {} + +func (*Cluster_OriginalDstLbConfig_) isCluster_LbConfig() {} + +func (*Cluster_LeastRequestLbConfig_) isCluster_LbConfig() {} + +func (m *Cluster) GetLbConfig() isCluster_LbConfig { + if m != nil { + return m.LbConfig + } + return nil +} + +func (m *Cluster) GetRingHashLbConfig() *Cluster_RingHashLbConfig { + if x, ok := m.GetLbConfig().(*Cluster_RingHashLbConfig_); ok { + return x.RingHashLbConfig + } + return nil +} + +func (m *Cluster) GetOriginalDstLbConfig() *Cluster_OriginalDstLbConfig { + if x, ok := m.GetLbConfig().(*Cluster_OriginalDstLbConfig_); ok { + return x.OriginalDstLbConfig + } + return nil +} + +func (m *Cluster) GetLeastRequestLbConfig() *Cluster_LeastRequestLbConfig { + if x, ok := m.GetLbConfig().(*Cluster_LeastRequestLbConfig_); ok { + return x.LeastRequestLbConfig + } + return nil +} + +func (m *Cluster) GetCommonLbConfig() *Cluster_CommonLbConfig { + if m != nil { + return m.CommonLbConfig + } + return nil +} + +func (m *Cluster) GetTransportSocket() *base.TransportSocket { + if m != nil { + return m.TransportSocket + } + return nil +} + +func (m *Cluster) GetMetadata() *base.Metadata { + if m != nil { + return m.Metadata + } + return nil +} + +func (m *Cluster) GetProtocolSelection() Cluster_ClusterProtocolSelection { + if m != nil { + return m.ProtocolSelection + } + return Cluster_USE_CONFIGURED_PROTOCOL +} + +func (m *Cluster) GetUpstreamConnectionOptions() *UpstreamConnectionOptions { + if m != nil { + return m.UpstreamConnectionOptions + } + return nil +} + +func (m *Cluster) GetCloseConnectionsOnHostHealthFailure() bool { + if m != nil { + return m.CloseConnectionsOnHostHealthFailure + } + return false +} + +func (m *Cluster) GetDrainConnectionsOnHostRemoval() bool { + if m != nil { + return m.DrainConnectionsOnHostRemoval + } + return false +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*Cluster) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _Cluster_OneofMarshaler, _Cluster_OneofUnmarshaler, _Cluster_OneofSizer, []interface{}{ + (*Cluster_Type)(nil), + (*Cluster_ClusterType)(nil), + (*Cluster_RingHashLbConfig_)(nil), + (*Cluster_OriginalDstLbConfig_)(nil), + (*Cluster_LeastRequestLbConfig_)(nil), + } +} + +func _Cluster_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*Cluster) + // cluster_discovery_type + switch x := m.ClusterDiscoveryType.(type) { + case *Cluster_Type: + b.EncodeVarint(2<<3 | proto.WireVarint) + b.EncodeVarint(uint64(x.Type)) + case *Cluster_ClusterType: + b.EncodeVarint(38<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.ClusterType); err != nil { + return err + } + case nil: + default: + return fmt.Errorf("Cluster.ClusterDiscoveryType has unexpected type %T", x) + } + // lb_config + switch x := m.LbConfig.(type) { + case *Cluster_RingHashLbConfig_: + b.EncodeVarint(23<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.RingHashLbConfig); err != nil { + return err + } + case *Cluster_OriginalDstLbConfig_: + b.EncodeVarint(34<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.OriginalDstLbConfig); err != nil { + return err + } + case *Cluster_LeastRequestLbConfig_: + b.EncodeVarint(37<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.LeastRequestLbConfig); err != nil { + return err + } + case nil: + default: + return fmt.Errorf("Cluster.LbConfig has unexpected type %T", x) + } + return nil +} + +func _Cluster_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*Cluster) + switch tag { + case 2: // cluster_discovery_type.type + if wire != proto.WireVarint { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeVarint() + m.ClusterDiscoveryType = &Cluster_Type{Cluster_DiscoveryType(x)} + return true, err + case 38: // cluster_discovery_type.cluster_type + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(Cluster_CustomClusterType) + err := b.DecodeMessage(msg) + m.ClusterDiscoveryType = &Cluster_ClusterType{msg} + return true, err + case 23: // lb_config.ring_hash_lb_config + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(Cluster_RingHashLbConfig) + err := b.DecodeMessage(msg) + m.LbConfig = &Cluster_RingHashLbConfig_{msg} + return true, err + case 34: // lb_config.original_dst_lb_config + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(Cluster_OriginalDstLbConfig) + err := b.DecodeMessage(msg) + m.LbConfig = &Cluster_OriginalDstLbConfig_{msg} + return true, err + case 37: // lb_config.least_request_lb_config + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(Cluster_LeastRequestLbConfig) + err := b.DecodeMessage(msg) + m.LbConfig = &Cluster_LeastRequestLbConfig_{msg} + return true, err + default: + return false, nil + } +} + +func _Cluster_OneofSizer(msg proto.Message) (n int) { + m := msg.(*Cluster) + // cluster_discovery_type + switch x := m.ClusterDiscoveryType.(type) { + case *Cluster_Type: + n += 1 // tag and wire + n += proto.SizeVarint(uint64(x.Type)) + case *Cluster_ClusterType: + s := proto.Size(x.ClusterType) + n += 2 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + // lb_config + switch x := m.LbConfig.(type) { + case *Cluster_RingHashLbConfig_: + s := proto.Size(x.RingHashLbConfig) + n += 2 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *Cluster_OriginalDstLbConfig_: + s := proto.Size(x.OriginalDstLbConfig) + n += 2 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *Cluster_LeastRequestLbConfig_: + s := proto.Size(x.LeastRequestLbConfig) + n += 2 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +type Cluster_CustomClusterType struct { + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + TypedConfig *any.Any `protobuf:"bytes,2,opt,name=typed_config,json=typedConfig,proto3" json:"typed_config,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Cluster_CustomClusterType) Reset() { *m = Cluster_CustomClusterType{} } +func (m *Cluster_CustomClusterType) String() string { return proto.CompactTextString(m) } +func (*Cluster_CustomClusterType) ProtoMessage() {} +func (*Cluster_CustomClusterType) Descriptor() ([]byte, []int) { + return fileDescriptor_cds_1dff7e464f9f9a10, []int{0, 0} +} +func (m *Cluster_CustomClusterType) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Cluster_CustomClusterType.Unmarshal(m, b) +} +func (m *Cluster_CustomClusterType) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Cluster_CustomClusterType.Marshal(b, m, deterministic) +} +func (dst *Cluster_CustomClusterType) XXX_Merge(src proto.Message) { + xxx_messageInfo_Cluster_CustomClusterType.Merge(dst, src) +} +func (m *Cluster_CustomClusterType) XXX_Size() int { + return xxx_messageInfo_Cluster_CustomClusterType.Size(m) +} +func (m *Cluster_CustomClusterType) XXX_DiscardUnknown() { + xxx_messageInfo_Cluster_CustomClusterType.DiscardUnknown(m) +} + +var xxx_messageInfo_Cluster_CustomClusterType proto.InternalMessageInfo + +func (m *Cluster_CustomClusterType) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *Cluster_CustomClusterType) GetTypedConfig() *any.Any { + if m != nil { + return m.TypedConfig + } + return nil +} + +type Cluster_EdsClusterConfig struct { + EdsConfig *config_source.ConfigSource `protobuf:"bytes,1,opt,name=eds_config,json=edsConfig,proto3" json:"eds_config,omitempty"` + ServiceName string `protobuf:"bytes,2,opt,name=service_name,json=serviceName,proto3" json:"service_name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Cluster_EdsClusterConfig) Reset() { *m = Cluster_EdsClusterConfig{} } +func (m *Cluster_EdsClusterConfig) String() string { return proto.CompactTextString(m) } +func (*Cluster_EdsClusterConfig) ProtoMessage() {} +func (*Cluster_EdsClusterConfig) Descriptor() ([]byte, []int) { + return fileDescriptor_cds_1dff7e464f9f9a10, []int{0, 1} +} +func (m *Cluster_EdsClusterConfig) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Cluster_EdsClusterConfig.Unmarshal(m, b) +} +func (m *Cluster_EdsClusterConfig) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Cluster_EdsClusterConfig.Marshal(b, m, deterministic) +} +func (dst *Cluster_EdsClusterConfig) XXX_Merge(src proto.Message) { + xxx_messageInfo_Cluster_EdsClusterConfig.Merge(dst, src) +} +func (m *Cluster_EdsClusterConfig) XXX_Size() int { + return xxx_messageInfo_Cluster_EdsClusterConfig.Size(m) +} +func (m *Cluster_EdsClusterConfig) XXX_DiscardUnknown() { + xxx_messageInfo_Cluster_EdsClusterConfig.DiscardUnknown(m) +} + +var xxx_messageInfo_Cluster_EdsClusterConfig proto.InternalMessageInfo + +func (m *Cluster_EdsClusterConfig) GetEdsConfig() *config_source.ConfigSource { + if m != nil { + return m.EdsConfig + } + return nil +} + +func (m *Cluster_EdsClusterConfig) GetServiceName() string { + if m != nil { + return m.ServiceName + } + return "" +} + +type Cluster_LbSubsetConfig struct { + FallbackPolicy Cluster_LbSubsetConfig_LbSubsetFallbackPolicy `protobuf:"varint,1,opt,name=fallback_policy,json=fallbackPolicy,proto3,enum=envoy.api.v2.Cluster_LbSubsetConfig_LbSubsetFallbackPolicy" json:"fallback_policy,omitempty"` + DefaultSubset *_struct.Struct `protobuf:"bytes,2,opt,name=default_subset,json=defaultSubset,proto3" json:"default_subset,omitempty"` + SubsetSelectors []*Cluster_LbSubsetConfig_LbSubsetSelector `protobuf:"bytes,3,rep,name=subset_selectors,json=subsetSelectors,proto3" json:"subset_selectors,omitempty"` + LocalityWeightAware bool `protobuf:"varint,4,opt,name=locality_weight_aware,json=localityWeightAware,proto3" json:"locality_weight_aware,omitempty"` + ScaleLocalityWeight bool `protobuf:"varint,5,opt,name=scale_locality_weight,json=scaleLocalityWeight,proto3" json:"scale_locality_weight,omitempty"` + PanicModeAny bool `protobuf:"varint,6,opt,name=panic_mode_any,json=panicModeAny,proto3" json:"panic_mode_any,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Cluster_LbSubsetConfig) Reset() { *m = Cluster_LbSubsetConfig{} } +func (m *Cluster_LbSubsetConfig) String() string { return proto.CompactTextString(m) } +func (*Cluster_LbSubsetConfig) ProtoMessage() {} +func (*Cluster_LbSubsetConfig) Descriptor() ([]byte, []int) { + return fileDescriptor_cds_1dff7e464f9f9a10, []int{0, 4} +} +func (m *Cluster_LbSubsetConfig) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Cluster_LbSubsetConfig.Unmarshal(m, b) +} +func (m *Cluster_LbSubsetConfig) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Cluster_LbSubsetConfig.Marshal(b, m, deterministic) +} +func (dst *Cluster_LbSubsetConfig) XXX_Merge(src proto.Message) { + xxx_messageInfo_Cluster_LbSubsetConfig.Merge(dst, src) +} +func (m *Cluster_LbSubsetConfig) XXX_Size() int { + return xxx_messageInfo_Cluster_LbSubsetConfig.Size(m) +} +func (m *Cluster_LbSubsetConfig) XXX_DiscardUnknown() { + xxx_messageInfo_Cluster_LbSubsetConfig.DiscardUnknown(m) +} + +var xxx_messageInfo_Cluster_LbSubsetConfig proto.InternalMessageInfo + +func (m *Cluster_LbSubsetConfig) GetFallbackPolicy() Cluster_LbSubsetConfig_LbSubsetFallbackPolicy { + if m != nil { + return m.FallbackPolicy + } + return Cluster_LbSubsetConfig_NO_FALLBACK +} + +func (m *Cluster_LbSubsetConfig) GetDefaultSubset() *_struct.Struct { + if m != nil { + return m.DefaultSubset + } + return nil +} + +func (m *Cluster_LbSubsetConfig) GetSubsetSelectors() []*Cluster_LbSubsetConfig_LbSubsetSelector { + if m != nil { + return m.SubsetSelectors + } + return nil +} + +func (m *Cluster_LbSubsetConfig) GetLocalityWeightAware() bool { + if m != nil { + return m.LocalityWeightAware + } + return false +} + +func (m *Cluster_LbSubsetConfig) GetScaleLocalityWeight() bool { + if m != nil { + return m.ScaleLocalityWeight + } + return false +} + +func (m *Cluster_LbSubsetConfig) GetPanicModeAny() bool { + if m != nil { + return m.PanicModeAny + } + return false +} + +type Cluster_LbSubsetConfig_LbSubsetSelector struct { + Keys []string `protobuf:"bytes,1,rep,name=keys,proto3" json:"keys,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Cluster_LbSubsetConfig_LbSubsetSelector) Reset() { + *m = Cluster_LbSubsetConfig_LbSubsetSelector{} +} +func (m *Cluster_LbSubsetConfig_LbSubsetSelector) String() string { return proto.CompactTextString(m) } +func (*Cluster_LbSubsetConfig_LbSubsetSelector) ProtoMessage() {} +func (*Cluster_LbSubsetConfig_LbSubsetSelector) Descriptor() ([]byte, []int) { + return fileDescriptor_cds_1dff7e464f9f9a10, []int{0, 4, 0} +} +func (m *Cluster_LbSubsetConfig_LbSubsetSelector) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Cluster_LbSubsetConfig_LbSubsetSelector.Unmarshal(m, b) +} +func (m *Cluster_LbSubsetConfig_LbSubsetSelector) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Cluster_LbSubsetConfig_LbSubsetSelector.Marshal(b, m, deterministic) +} +func (dst *Cluster_LbSubsetConfig_LbSubsetSelector) XXX_Merge(src proto.Message) { + xxx_messageInfo_Cluster_LbSubsetConfig_LbSubsetSelector.Merge(dst, src) +} +func (m *Cluster_LbSubsetConfig_LbSubsetSelector) XXX_Size() int { + return xxx_messageInfo_Cluster_LbSubsetConfig_LbSubsetSelector.Size(m) +} +func (m *Cluster_LbSubsetConfig_LbSubsetSelector) XXX_DiscardUnknown() { + xxx_messageInfo_Cluster_LbSubsetConfig_LbSubsetSelector.DiscardUnknown(m) +} + +var xxx_messageInfo_Cluster_LbSubsetConfig_LbSubsetSelector proto.InternalMessageInfo + +func (m *Cluster_LbSubsetConfig_LbSubsetSelector) GetKeys() []string { + if m != nil { + return m.Keys + } + return nil +} + +type Cluster_LeastRequestLbConfig struct { + ChoiceCount *wrappers.UInt32Value `protobuf:"bytes,1,opt,name=choice_count,json=choiceCount,proto3" json:"choice_count,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Cluster_LeastRequestLbConfig) Reset() { *m = Cluster_LeastRequestLbConfig{} } +func (m *Cluster_LeastRequestLbConfig) String() string { return proto.CompactTextString(m) } +func (*Cluster_LeastRequestLbConfig) ProtoMessage() {} +func (*Cluster_LeastRequestLbConfig) Descriptor() ([]byte, []int) { + return fileDescriptor_cds_1dff7e464f9f9a10, []int{0, 5} +} +func (m *Cluster_LeastRequestLbConfig) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Cluster_LeastRequestLbConfig.Unmarshal(m, b) +} +func (m *Cluster_LeastRequestLbConfig) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Cluster_LeastRequestLbConfig.Marshal(b, m, deterministic) +} +func (dst *Cluster_LeastRequestLbConfig) XXX_Merge(src proto.Message) { + xxx_messageInfo_Cluster_LeastRequestLbConfig.Merge(dst, src) +} +func (m *Cluster_LeastRequestLbConfig) XXX_Size() int { + return xxx_messageInfo_Cluster_LeastRequestLbConfig.Size(m) +} +func (m *Cluster_LeastRequestLbConfig) XXX_DiscardUnknown() { + xxx_messageInfo_Cluster_LeastRequestLbConfig.DiscardUnknown(m) +} + +var xxx_messageInfo_Cluster_LeastRequestLbConfig proto.InternalMessageInfo + +func (m *Cluster_LeastRequestLbConfig) GetChoiceCount() *wrappers.UInt32Value { + if m != nil { + return m.ChoiceCount + } + return nil +} + +type Cluster_RingHashLbConfig struct { + MinimumRingSize *wrappers.UInt64Value `protobuf:"bytes,1,opt,name=minimum_ring_size,json=minimumRingSize,proto3" json:"minimum_ring_size,omitempty"` + HashFunction Cluster_RingHashLbConfig_HashFunction `protobuf:"varint,3,opt,name=hash_function,json=hashFunction,proto3,enum=envoy.api.v2.Cluster_RingHashLbConfig_HashFunction" json:"hash_function,omitempty"` + MaximumRingSize *wrappers.UInt64Value `protobuf:"bytes,4,opt,name=maximum_ring_size,json=maximumRingSize,proto3" json:"maximum_ring_size,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Cluster_RingHashLbConfig) Reset() { *m = Cluster_RingHashLbConfig{} } +func (m *Cluster_RingHashLbConfig) String() string { return proto.CompactTextString(m) } +func (*Cluster_RingHashLbConfig) ProtoMessage() {} +func (*Cluster_RingHashLbConfig) Descriptor() ([]byte, []int) { + return fileDescriptor_cds_1dff7e464f9f9a10, []int{0, 6} +} +func (m *Cluster_RingHashLbConfig) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Cluster_RingHashLbConfig.Unmarshal(m, b) +} +func (m *Cluster_RingHashLbConfig) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Cluster_RingHashLbConfig.Marshal(b, m, deterministic) +} +func (dst *Cluster_RingHashLbConfig) XXX_Merge(src proto.Message) { + xxx_messageInfo_Cluster_RingHashLbConfig.Merge(dst, src) +} +func (m *Cluster_RingHashLbConfig) XXX_Size() int { + return xxx_messageInfo_Cluster_RingHashLbConfig.Size(m) +} +func (m *Cluster_RingHashLbConfig) XXX_DiscardUnknown() { + xxx_messageInfo_Cluster_RingHashLbConfig.DiscardUnknown(m) +} + +var xxx_messageInfo_Cluster_RingHashLbConfig proto.InternalMessageInfo + +func (m *Cluster_RingHashLbConfig) GetMinimumRingSize() *wrappers.UInt64Value { + if m != nil { + return m.MinimumRingSize + } + return nil +} + +func (m *Cluster_RingHashLbConfig) GetHashFunction() Cluster_RingHashLbConfig_HashFunction { + if m != nil { + return m.HashFunction + } + return Cluster_RingHashLbConfig_XX_HASH +} + +func (m *Cluster_RingHashLbConfig) GetMaximumRingSize() *wrappers.UInt64Value { + if m != nil { + return m.MaximumRingSize + } + return nil +} + +type Cluster_OriginalDstLbConfig struct { + UseHttpHeader bool `protobuf:"varint,1,opt,name=use_http_header,json=useHttpHeader,proto3" json:"use_http_header,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Cluster_OriginalDstLbConfig) Reset() { *m = Cluster_OriginalDstLbConfig{} } +func (m *Cluster_OriginalDstLbConfig) String() string { return proto.CompactTextString(m) } +func (*Cluster_OriginalDstLbConfig) ProtoMessage() {} +func (*Cluster_OriginalDstLbConfig) Descriptor() ([]byte, []int) { + return fileDescriptor_cds_1dff7e464f9f9a10, []int{0, 7} +} +func (m *Cluster_OriginalDstLbConfig) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Cluster_OriginalDstLbConfig.Unmarshal(m, b) +} +func (m *Cluster_OriginalDstLbConfig) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Cluster_OriginalDstLbConfig.Marshal(b, m, deterministic) +} +func (dst *Cluster_OriginalDstLbConfig) XXX_Merge(src proto.Message) { + xxx_messageInfo_Cluster_OriginalDstLbConfig.Merge(dst, src) +} +func (m *Cluster_OriginalDstLbConfig) XXX_Size() int { + return xxx_messageInfo_Cluster_OriginalDstLbConfig.Size(m) +} +func (m *Cluster_OriginalDstLbConfig) XXX_DiscardUnknown() { + xxx_messageInfo_Cluster_OriginalDstLbConfig.DiscardUnknown(m) +} + +var xxx_messageInfo_Cluster_OriginalDstLbConfig proto.InternalMessageInfo + +func (m *Cluster_OriginalDstLbConfig) GetUseHttpHeader() bool { + if m != nil { + return m.UseHttpHeader + } + return false +} + +type Cluster_CommonLbConfig struct { + HealthyPanicThreshold *percent.Percent `protobuf:"bytes,1,opt,name=healthy_panic_threshold,json=healthyPanicThreshold,proto3" json:"healthy_panic_threshold,omitempty"` + // Types that are valid to be assigned to LocalityConfigSpecifier: + // *Cluster_CommonLbConfig_ZoneAwareLbConfig_ + // *Cluster_CommonLbConfig_LocalityWeightedLbConfig_ + LocalityConfigSpecifier isCluster_CommonLbConfig_LocalityConfigSpecifier `protobuf_oneof:"locality_config_specifier"` + UpdateMergeWindow *duration.Duration `protobuf:"bytes,4,opt,name=update_merge_window,json=updateMergeWindow,proto3" json:"update_merge_window,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Cluster_CommonLbConfig) Reset() { *m = Cluster_CommonLbConfig{} } +func (m *Cluster_CommonLbConfig) String() string { return proto.CompactTextString(m) } +func (*Cluster_CommonLbConfig) ProtoMessage() {} +func (*Cluster_CommonLbConfig) Descriptor() ([]byte, []int) { + return fileDescriptor_cds_1dff7e464f9f9a10, []int{0, 8} +} +func (m *Cluster_CommonLbConfig) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Cluster_CommonLbConfig.Unmarshal(m, b) +} +func (m *Cluster_CommonLbConfig) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Cluster_CommonLbConfig.Marshal(b, m, deterministic) +} +func (dst *Cluster_CommonLbConfig) XXX_Merge(src proto.Message) { + xxx_messageInfo_Cluster_CommonLbConfig.Merge(dst, src) +} +func (m *Cluster_CommonLbConfig) XXX_Size() int { + return xxx_messageInfo_Cluster_CommonLbConfig.Size(m) +} +func (m *Cluster_CommonLbConfig) XXX_DiscardUnknown() { + xxx_messageInfo_Cluster_CommonLbConfig.DiscardUnknown(m) +} + +var xxx_messageInfo_Cluster_CommonLbConfig proto.InternalMessageInfo + +func (m *Cluster_CommonLbConfig) GetHealthyPanicThreshold() *percent.Percent { + if m != nil { + return m.HealthyPanicThreshold + } + return nil +} + +type isCluster_CommonLbConfig_LocalityConfigSpecifier interface { + isCluster_CommonLbConfig_LocalityConfigSpecifier() +} + +type Cluster_CommonLbConfig_ZoneAwareLbConfig_ struct { + ZoneAwareLbConfig *Cluster_CommonLbConfig_ZoneAwareLbConfig `protobuf:"bytes,2,opt,name=zone_aware_lb_config,json=zoneAwareLbConfig,proto3,oneof"` +} + +type Cluster_CommonLbConfig_LocalityWeightedLbConfig_ struct { + LocalityWeightedLbConfig *Cluster_CommonLbConfig_LocalityWeightedLbConfig `protobuf:"bytes,3,opt,name=locality_weighted_lb_config,json=localityWeightedLbConfig,proto3,oneof"` +} + +func (*Cluster_CommonLbConfig_ZoneAwareLbConfig_) isCluster_CommonLbConfig_LocalityConfigSpecifier() {} + +func (*Cluster_CommonLbConfig_LocalityWeightedLbConfig_) isCluster_CommonLbConfig_LocalityConfigSpecifier() { +} + +func (m *Cluster_CommonLbConfig) GetLocalityConfigSpecifier() isCluster_CommonLbConfig_LocalityConfigSpecifier { + if m != nil { + return m.LocalityConfigSpecifier + } + return nil +} + +func (m *Cluster_CommonLbConfig) GetZoneAwareLbConfig() *Cluster_CommonLbConfig_ZoneAwareLbConfig { + if x, ok := m.GetLocalityConfigSpecifier().(*Cluster_CommonLbConfig_ZoneAwareLbConfig_); ok { + return x.ZoneAwareLbConfig + } + return nil +} + +func (m *Cluster_CommonLbConfig) GetLocalityWeightedLbConfig() *Cluster_CommonLbConfig_LocalityWeightedLbConfig { + if x, ok := m.GetLocalityConfigSpecifier().(*Cluster_CommonLbConfig_LocalityWeightedLbConfig_); ok { + return x.LocalityWeightedLbConfig + } + return nil +} + +func (m *Cluster_CommonLbConfig) GetUpdateMergeWindow() *duration.Duration { + if m != nil { + return m.UpdateMergeWindow + } + return nil +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*Cluster_CommonLbConfig) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _Cluster_CommonLbConfig_OneofMarshaler, _Cluster_CommonLbConfig_OneofUnmarshaler, _Cluster_CommonLbConfig_OneofSizer, []interface{}{ + (*Cluster_CommonLbConfig_ZoneAwareLbConfig_)(nil), + (*Cluster_CommonLbConfig_LocalityWeightedLbConfig_)(nil), + } +} + +func _Cluster_CommonLbConfig_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*Cluster_CommonLbConfig) + // locality_config_specifier + switch x := m.LocalityConfigSpecifier.(type) { + case *Cluster_CommonLbConfig_ZoneAwareLbConfig_: + b.EncodeVarint(2<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.ZoneAwareLbConfig); err != nil { + return err + } + case *Cluster_CommonLbConfig_LocalityWeightedLbConfig_: + b.EncodeVarint(3<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.LocalityWeightedLbConfig); err != nil { + return err + } + case nil: + default: + return fmt.Errorf("Cluster_CommonLbConfig.LocalityConfigSpecifier has unexpected type %T", x) + } + return nil +} + +func _Cluster_CommonLbConfig_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*Cluster_CommonLbConfig) + switch tag { + case 2: // locality_config_specifier.zone_aware_lb_config + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(Cluster_CommonLbConfig_ZoneAwareLbConfig) + err := b.DecodeMessage(msg) + m.LocalityConfigSpecifier = &Cluster_CommonLbConfig_ZoneAwareLbConfig_{msg} + return true, err + case 3: // locality_config_specifier.locality_weighted_lb_config + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(Cluster_CommonLbConfig_LocalityWeightedLbConfig) + err := b.DecodeMessage(msg) + m.LocalityConfigSpecifier = &Cluster_CommonLbConfig_LocalityWeightedLbConfig_{msg} + return true, err + default: + return false, nil + } +} + +func _Cluster_CommonLbConfig_OneofSizer(msg proto.Message) (n int) { + m := msg.(*Cluster_CommonLbConfig) + // locality_config_specifier + switch x := m.LocalityConfigSpecifier.(type) { + case *Cluster_CommonLbConfig_ZoneAwareLbConfig_: + s := proto.Size(x.ZoneAwareLbConfig) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *Cluster_CommonLbConfig_LocalityWeightedLbConfig_: + s := proto.Size(x.LocalityWeightedLbConfig) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +type Cluster_CommonLbConfig_ZoneAwareLbConfig struct { + RoutingEnabled *percent.Percent `protobuf:"bytes,1,opt,name=routing_enabled,json=routingEnabled,proto3" json:"routing_enabled,omitempty"` + MinClusterSize *wrappers.UInt64Value `protobuf:"bytes,2,opt,name=min_cluster_size,json=minClusterSize,proto3" json:"min_cluster_size,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Cluster_CommonLbConfig_ZoneAwareLbConfig) Reset() { + *m = Cluster_CommonLbConfig_ZoneAwareLbConfig{} +} +func (m *Cluster_CommonLbConfig_ZoneAwareLbConfig) String() string { return proto.CompactTextString(m) } +func (*Cluster_CommonLbConfig_ZoneAwareLbConfig) ProtoMessage() {} +func (*Cluster_CommonLbConfig_ZoneAwareLbConfig) Descriptor() ([]byte, []int) { + return fileDescriptor_cds_1dff7e464f9f9a10, []int{0, 8, 0} +} +func (m *Cluster_CommonLbConfig_ZoneAwareLbConfig) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Cluster_CommonLbConfig_ZoneAwareLbConfig.Unmarshal(m, b) +} +func (m *Cluster_CommonLbConfig_ZoneAwareLbConfig) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Cluster_CommonLbConfig_ZoneAwareLbConfig.Marshal(b, m, deterministic) +} +func (dst *Cluster_CommonLbConfig_ZoneAwareLbConfig) XXX_Merge(src proto.Message) { + xxx_messageInfo_Cluster_CommonLbConfig_ZoneAwareLbConfig.Merge(dst, src) +} +func (m *Cluster_CommonLbConfig_ZoneAwareLbConfig) XXX_Size() int { + return xxx_messageInfo_Cluster_CommonLbConfig_ZoneAwareLbConfig.Size(m) +} +func (m *Cluster_CommonLbConfig_ZoneAwareLbConfig) XXX_DiscardUnknown() { + xxx_messageInfo_Cluster_CommonLbConfig_ZoneAwareLbConfig.DiscardUnknown(m) +} + +var xxx_messageInfo_Cluster_CommonLbConfig_ZoneAwareLbConfig proto.InternalMessageInfo + +func (m *Cluster_CommonLbConfig_ZoneAwareLbConfig) GetRoutingEnabled() *percent.Percent { + if m != nil { + return m.RoutingEnabled + } + return nil +} + +func (m *Cluster_CommonLbConfig_ZoneAwareLbConfig) GetMinClusterSize() *wrappers.UInt64Value { + if m != nil { + return m.MinClusterSize + } + return nil +} + +type Cluster_CommonLbConfig_LocalityWeightedLbConfig struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Cluster_CommonLbConfig_LocalityWeightedLbConfig) Reset() { + *m = Cluster_CommonLbConfig_LocalityWeightedLbConfig{} +} +func (m *Cluster_CommonLbConfig_LocalityWeightedLbConfig) String() string { + return proto.CompactTextString(m) +} +func (*Cluster_CommonLbConfig_LocalityWeightedLbConfig) ProtoMessage() {} +func (*Cluster_CommonLbConfig_LocalityWeightedLbConfig) Descriptor() ([]byte, []int) { + return fileDescriptor_cds_1dff7e464f9f9a10, []int{0, 8, 1} +} +func (m *Cluster_CommonLbConfig_LocalityWeightedLbConfig) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Cluster_CommonLbConfig_LocalityWeightedLbConfig.Unmarshal(m, b) +} +func (m *Cluster_CommonLbConfig_LocalityWeightedLbConfig) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Cluster_CommonLbConfig_LocalityWeightedLbConfig.Marshal(b, m, deterministic) +} +func (dst *Cluster_CommonLbConfig_LocalityWeightedLbConfig) XXX_Merge(src proto.Message) { + xxx_messageInfo_Cluster_CommonLbConfig_LocalityWeightedLbConfig.Merge(dst, src) +} +func (m *Cluster_CommonLbConfig_LocalityWeightedLbConfig) XXX_Size() int { + return xxx_messageInfo_Cluster_CommonLbConfig_LocalityWeightedLbConfig.Size(m) +} +func (m *Cluster_CommonLbConfig_LocalityWeightedLbConfig) XXX_DiscardUnknown() { + xxx_messageInfo_Cluster_CommonLbConfig_LocalityWeightedLbConfig.DiscardUnknown(m) +} + +var xxx_messageInfo_Cluster_CommonLbConfig_LocalityWeightedLbConfig proto.InternalMessageInfo + +type UpstreamBindConfig struct { + SourceAddress *address.Address `protobuf:"bytes,1,opt,name=source_address,json=sourceAddress,proto3" json:"source_address,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *UpstreamBindConfig) Reset() { *m = UpstreamBindConfig{} } +func (m *UpstreamBindConfig) String() string { return proto.CompactTextString(m) } +func (*UpstreamBindConfig) ProtoMessage() {} +func (*UpstreamBindConfig) Descriptor() ([]byte, []int) { + return fileDescriptor_cds_1dff7e464f9f9a10, []int{1} +} +func (m *UpstreamBindConfig) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_UpstreamBindConfig.Unmarshal(m, b) +} +func (m *UpstreamBindConfig) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_UpstreamBindConfig.Marshal(b, m, deterministic) +} +func (dst *UpstreamBindConfig) XXX_Merge(src proto.Message) { + xxx_messageInfo_UpstreamBindConfig.Merge(dst, src) +} +func (m *UpstreamBindConfig) XXX_Size() int { + return xxx_messageInfo_UpstreamBindConfig.Size(m) +} +func (m *UpstreamBindConfig) XXX_DiscardUnknown() { + xxx_messageInfo_UpstreamBindConfig.DiscardUnknown(m) +} + +var xxx_messageInfo_UpstreamBindConfig proto.InternalMessageInfo + +func (m *UpstreamBindConfig) GetSourceAddress() *address.Address { + if m != nil { + return m.SourceAddress + } + return nil +} + +type UpstreamConnectionOptions struct { + TcpKeepalive *address.TcpKeepalive `protobuf:"bytes,1,opt,name=tcp_keepalive,json=tcpKeepalive,proto3" json:"tcp_keepalive,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *UpstreamConnectionOptions) Reset() { *m = UpstreamConnectionOptions{} } +func (m *UpstreamConnectionOptions) String() string { return proto.CompactTextString(m) } +func (*UpstreamConnectionOptions) ProtoMessage() {} +func (*UpstreamConnectionOptions) Descriptor() ([]byte, []int) { + return fileDescriptor_cds_1dff7e464f9f9a10, []int{2} +} +func (m *UpstreamConnectionOptions) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_UpstreamConnectionOptions.Unmarshal(m, b) +} +func (m *UpstreamConnectionOptions) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_UpstreamConnectionOptions.Marshal(b, m, deterministic) +} +func (dst *UpstreamConnectionOptions) XXX_Merge(src proto.Message) { + xxx_messageInfo_UpstreamConnectionOptions.Merge(dst, src) +} +func (m *UpstreamConnectionOptions) XXX_Size() int { + return xxx_messageInfo_UpstreamConnectionOptions.Size(m) +} +func (m *UpstreamConnectionOptions) XXX_DiscardUnknown() { + xxx_messageInfo_UpstreamConnectionOptions.DiscardUnknown(m) +} + +var xxx_messageInfo_UpstreamConnectionOptions proto.InternalMessageInfo + +func (m *UpstreamConnectionOptions) GetTcpKeepalive() *address.TcpKeepalive { + if m != nil { + return m.TcpKeepalive + } + return nil +} + +func init() { + proto.RegisterType((*Cluster)(nil), "envoy.api.v2.Cluster") + proto.RegisterMapType((map[string]*_struct.Struct)(nil), "envoy.api.v2.Cluster.ExtensionProtocolOptionsEntry") + proto.RegisterMapType((map[string]*any.Any)(nil), "envoy.api.v2.Cluster.TypedExtensionProtocolOptionsEntry") + proto.RegisterType((*Cluster_CustomClusterType)(nil), "envoy.api.v2.Cluster.CustomClusterType") + proto.RegisterType((*Cluster_EdsClusterConfig)(nil), "envoy.api.v2.Cluster.EdsClusterConfig") + proto.RegisterType((*Cluster_LbSubsetConfig)(nil), "envoy.api.v2.Cluster.LbSubsetConfig") + proto.RegisterType((*Cluster_LbSubsetConfig_LbSubsetSelector)(nil), "envoy.api.v2.Cluster.LbSubsetConfig.LbSubsetSelector") + proto.RegisterType((*Cluster_LeastRequestLbConfig)(nil), "envoy.api.v2.Cluster.LeastRequestLbConfig") + proto.RegisterType((*Cluster_RingHashLbConfig)(nil), "envoy.api.v2.Cluster.RingHashLbConfig") + proto.RegisterType((*Cluster_OriginalDstLbConfig)(nil), "envoy.api.v2.Cluster.OriginalDstLbConfig") + proto.RegisterType((*Cluster_CommonLbConfig)(nil), "envoy.api.v2.Cluster.CommonLbConfig") + proto.RegisterType((*Cluster_CommonLbConfig_ZoneAwareLbConfig)(nil), "envoy.api.v2.Cluster.CommonLbConfig.ZoneAwareLbConfig") + proto.RegisterType((*Cluster_CommonLbConfig_LocalityWeightedLbConfig)(nil), "envoy.api.v2.Cluster.CommonLbConfig.LocalityWeightedLbConfig") + proto.RegisterType((*UpstreamBindConfig)(nil), "envoy.api.v2.UpstreamBindConfig") + proto.RegisterType((*UpstreamConnectionOptions)(nil), "envoy.api.v2.UpstreamConnectionOptions") + proto.RegisterEnum("envoy.api.v2.Cluster_DiscoveryType", Cluster_DiscoveryType_name, Cluster_DiscoveryType_value) + proto.RegisterEnum("envoy.api.v2.Cluster_LbPolicy", Cluster_LbPolicy_name, Cluster_LbPolicy_value) + proto.RegisterEnum("envoy.api.v2.Cluster_DnsLookupFamily", Cluster_DnsLookupFamily_name, Cluster_DnsLookupFamily_value) + proto.RegisterEnum("envoy.api.v2.Cluster_ClusterProtocolSelection", Cluster_ClusterProtocolSelection_name, Cluster_ClusterProtocolSelection_value) + proto.RegisterEnum("envoy.api.v2.Cluster_LbSubsetConfig_LbSubsetFallbackPolicy", Cluster_LbSubsetConfig_LbSubsetFallbackPolicy_name, Cluster_LbSubsetConfig_LbSubsetFallbackPolicy_value) + proto.RegisterEnum("envoy.api.v2.Cluster_RingHashLbConfig_HashFunction", Cluster_RingHashLbConfig_HashFunction_name, Cluster_RingHashLbConfig_HashFunction_value) +} + +// Reference imports to suppress errors if they are not otherwise used. +var _ context.Context +var _ grpc.ClientConn + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the grpc package it is being compiled against. +const _ = grpc.SupportPackageIsVersion4 + +// ClusterDiscoveryServiceClient is the client API for ClusterDiscoveryService service. +// +// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://godoc.org/google.golang.org/grpc#ClientConn.NewStream. +type ClusterDiscoveryServiceClient interface { + StreamClusters(ctx context.Context, opts ...grpc.CallOption) (ClusterDiscoveryService_StreamClustersClient, error) + DeltaClusters(ctx context.Context, opts ...grpc.CallOption) (ClusterDiscoveryService_DeltaClustersClient, error) + FetchClusters(ctx context.Context, in *discovery.DiscoveryRequest, opts ...grpc.CallOption) (*discovery.DiscoveryResponse, error) +} + +type clusterDiscoveryServiceClient struct { + cc *grpc.ClientConn +} + +func NewClusterDiscoveryServiceClient(cc *grpc.ClientConn) ClusterDiscoveryServiceClient { + return &clusterDiscoveryServiceClient{cc} +} + +func (c *clusterDiscoveryServiceClient) StreamClusters(ctx context.Context, opts ...grpc.CallOption) (ClusterDiscoveryService_StreamClustersClient, error) { + stream, err := c.cc.NewStream(ctx, &_ClusterDiscoveryService_serviceDesc.Streams[0], "/envoy.api.v2.ClusterDiscoveryService/StreamClusters", opts...) + if err != nil { + return nil, err + } + x := &clusterDiscoveryServiceStreamClustersClient{stream} + return x, nil +} + +type ClusterDiscoveryService_StreamClustersClient interface { + Send(*discovery.DiscoveryRequest) error + Recv() (*discovery.DiscoveryResponse, error) + grpc.ClientStream +} + +type clusterDiscoveryServiceStreamClustersClient struct { + grpc.ClientStream +} + +func (x *clusterDiscoveryServiceStreamClustersClient) Send(m *discovery.DiscoveryRequest) error { + return x.ClientStream.SendMsg(m) +} + +func (x *clusterDiscoveryServiceStreamClustersClient) Recv() (*discovery.DiscoveryResponse, error) { + m := new(discovery.DiscoveryResponse) + if err := x.ClientStream.RecvMsg(m); err != nil { + return nil, err + } + return m, nil +} + +func (c *clusterDiscoveryServiceClient) DeltaClusters(ctx context.Context, opts ...grpc.CallOption) (ClusterDiscoveryService_DeltaClustersClient, error) { + stream, err := c.cc.NewStream(ctx, &_ClusterDiscoveryService_serviceDesc.Streams[1], "/envoy.api.v2.ClusterDiscoveryService/DeltaClusters", opts...) + if err != nil { + return nil, err + } + x := &clusterDiscoveryServiceDeltaClustersClient{stream} + return x, nil +} + +type ClusterDiscoveryService_DeltaClustersClient interface { + Send(*discovery.DeltaDiscoveryRequest) error + Recv() (*discovery.DeltaDiscoveryResponse, error) + grpc.ClientStream +} + +type clusterDiscoveryServiceDeltaClustersClient struct { + grpc.ClientStream +} + +func (x *clusterDiscoveryServiceDeltaClustersClient) Send(m *discovery.DeltaDiscoveryRequest) error { + return x.ClientStream.SendMsg(m) +} + +func (x *clusterDiscoveryServiceDeltaClustersClient) Recv() (*discovery.DeltaDiscoveryResponse, error) { + m := new(discovery.DeltaDiscoveryResponse) + if err := x.ClientStream.RecvMsg(m); err != nil { + return nil, err + } + return m, nil +} + +func (c *clusterDiscoveryServiceClient) FetchClusters(ctx context.Context, in *discovery.DiscoveryRequest, opts ...grpc.CallOption) (*discovery.DiscoveryResponse, error) { + out := new(discovery.DiscoveryResponse) + err := c.cc.Invoke(ctx, "/envoy.api.v2.ClusterDiscoveryService/FetchClusters", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +// ClusterDiscoveryServiceServer is the server API for ClusterDiscoveryService service. +type ClusterDiscoveryServiceServer interface { + StreamClusters(ClusterDiscoveryService_StreamClustersServer) error + DeltaClusters(ClusterDiscoveryService_DeltaClustersServer) error + FetchClusters(context.Context, *discovery.DiscoveryRequest) (*discovery.DiscoveryResponse, error) +} + +func RegisterClusterDiscoveryServiceServer(s *grpc.Server, srv ClusterDiscoveryServiceServer) { + s.RegisterService(&_ClusterDiscoveryService_serviceDesc, srv) +} + +func _ClusterDiscoveryService_StreamClusters_Handler(srv interface{}, stream grpc.ServerStream) error { + return srv.(ClusterDiscoveryServiceServer).StreamClusters(&clusterDiscoveryServiceStreamClustersServer{stream}) +} + +type ClusterDiscoveryService_StreamClustersServer interface { + Send(*discovery.DiscoveryResponse) error + Recv() (*discovery.DiscoveryRequest, error) + grpc.ServerStream +} + +type clusterDiscoveryServiceStreamClustersServer struct { + grpc.ServerStream +} + +func (x *clusterDiscoveryServiceStreamClustersServer) Send(m *discovery.DiscoveryResponse) error { + return x.ServerStream.SendMsg(m) +} + +func (x *clusterDiscoveryServiceStreamClustersServer) Recv() (*discovery.DiscoveryRequest, error) { + m := new(discovery.DiscoveryRequest) + if err := x.ServerStream.RecvMsg(m); err != nil { + return nil, err + } + return m, nil +} + +func _ClusterDiscoveryService_DeltaClusters_Handler(srv interface{}, stream grpc.ServerStream) error { + return srv.(ClusterDiscoveryServiceServer).DeltaClusters(&clusterDiscoveryServiceDeltaClustersServer{stream}) +} + +type ClusterDiscoveryService_DeltaClustersServer interface { + Send(*discovery.DeltaDiscoveryResponse) error + Recv() (*discovery.DeltaDiscoveryRequest, error) + grpc.ServerStream +} + +type clusterDiscoveryServiceDeltaClustersServer struct { + grpc.ServerStream +} + +func (x *clusterDiscoveryServiceDeltaClustersServer) Send(m *discovery.DeltaDiscoveryResponse) error { + return x.ServerStream.SendMsg(m) +} + +func (x *clusterDiscoveryServiceDeltaClustersServer) Recv() (*discovery.DeltaDiscoveryRequest, error) { + m := new(discovery.DeltaDiscoveryRequest) + if err := x.ServerStream.RecvMsg(m); err != nil { + return nil, err + } + return m, nil +} + +func _ClusterDiscoveryService_FetchClusters_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(discovery.DiscoveryRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(ClusterDiscoveryServiceServer).FetchClusters(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/envoy.api.v2.ClusterDiscoveryService/FetchClusters", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(ClusterDiscoveryServiceServer).FetchClusters(ctx, req.(*discovery.DiscoveryRequest)) + } + return interceptor(ctx, in, info, handler) +} + +var _ClusterDiscoveryService_serviceDesc = grpc.ServiceDesc{ + ServiceName: "envoy.api.v2.ClusterDiscoveryService", + HandlerType: (*ClusterDiscoveryServiceServer)(nil), + Methods: []grpc.MethodDesc{ + { + MethodName: "FetchClusters", + Handler: _ClusterDiscoveryService_FetchClusters_Handler, + }, + }, + Streams: []grpc.StreamDesc{ + { + StreamName: "StreamClusters", + Handler: _ClusterDiscoveryService_StreamClusters_Handler, + ServerStreams: true, + ClientStreams: true, + }, + { + StreamName: "DeltaClusters", + Handler: _ClusterDiscoveryService_DeltaClusters_Handler, + ServerStreams: true, + ClientStreams: true, + }, + }, + Metadata: "envoy/api/v2/cds.proto", +} + +func init() { proto.RegisterFile("envoy/api/v2/cds.proto", fileDescriptor_cds_1dff7e464f9f9a10) } + +var fileDescriptor_cds_1dff7e464f9f9a10 = []byte{ + // 2531 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xac, 0x59, 0x4b, 0x77, 0x1b, 0xb7, + 0x15, 0xd6, 0x50, 0x74, 0x4c, 0x43, 0x7c, 0x8c, 0x20, 0x59, 0x1a, 0x53, 0x96, 0x2d, 0x33, 0xb6, + 0xab, 0x3a, 0x2d, 0xd5, 0xca, 0x79, 0x9d, 0xb4, 0x49, 0x0f, 0x5f, 0xb2, 0xe4, 0x50, 0xa4, 0x0a, + 0x52, 0x56, 0xd2, 0x9c, 0x1c, 0x04, 0x9c, 0x01, 0xc5, 0xa9, 0x86, 0x33, 0x93, 0x01, 0x46, 0x36, + 0xbd, 0xe8, 0x49, 0xb3, 0xea, 0xbe, 0xab, 0xfe, 0x85, 0xf6, 0x1f, 0x74, 0xd5, 0x6d, 0xd7, 0xdd, + 0x77, 0xd5, 0x45, 0xfb, 0x2f, 0x7a, 0x06, 0xc0, 0x50, 0x7c, 0x8c, 0x68, 0xa7, 0xa7, 0x2b, 0x11, + 0xb8, 0xdf, 0xfd, 0x2e, 0xe6, 0xe2, 0xe2, 0xde, 0x0b, 0x08, 0x6c, 0x50, 0xf7, 0xd2, 0x1b, 0xed, + 0x11, 0xdf, 0xde, 0xbb, 0xdc, 0xdf, 0x33, 0x2d, 0x56, 0xf6, 0x03, 0x8f, 0x7b, 0x30, 0x2b, 0xe6, + 0xcb, 0xc4, 0xb7, 0xcb, 0x97, 0xfb, 0xc5, 0xfb, 0xd3, 0x28, 0x2f, 0xa0, 0x7b, 0xc4, 0xb2, 0x02, + 0xca, 0x14, 0xbc, 0x78, 0x77, 0x0a, 0x40, 0x42, 0x3e, 0xd8, 0x33, 0x69, 0xc0, 0x13, 0xa5, 0x42, + 0xbd, 0x47, 0x18, 0x55, 0xd2, 0x47, 0xf3, 0x52, 0xd3, 0x73, 0xfb, 0xf6, 0x39, 0x66, 0x5e, 0x18, + 0x98, 0x34, 0x91, 0xc4, 0xb2, 0x99, 0xe9, 0x5d, 0xd2, 0x60, 0xa4, 0xa4, 0x0f, 0xe7, 0x49, 0x06, + 0x94, 0x38, 0x7c, 0x80, 0xcd, 0x01, 0x35, 0x2f, 0x14, 0x6a, 0x67, 0x1e, 0x25, 0x04, 0xa6, 0xe7, + 0x28, 0xc4, 0x93, 0x69, 0x84, 0x13, 0x32, 0x4e, 0x83, 0x3d, 0xd3, 0x0e, 0xcc, 0xd0, 0xe6, 0xb8, + 0x17, 0x50, 0x72, 0x41, 0x03, 0x85, 0xfd, 0x49, 0x22, 0xd6, 0x0b, 0xb9, 0x63, 0xd3, 0x00, 0x5b, + 0x94, 0x53, 0x93, 0xdb, 0x9e, 0xab, 0xd0, 0xd3, 0x9e, 0xa6, 0xb1, 0xa7, 0x8b, 0x86, 0x9c, 0xe7, + 0x23, 0x9f, 0xee, 0xf9, 0x34, 0x30, 0xa9, 0x3b, 0x76, 0xdb, 0xb9, 0xe7, 0x9d, 0x3b, 0x54, 0xa8, + 0x10, 0xd7, 0xf5, 0x38, 0x89, 0xe8, 0x62, 0xbd, 0x3b, 0x4a, 0x2a, 0x46, 0xbd, 0xb0, 0xbf, 0x47, + 0xdc, 0xd8, 0x19, 0xf7, 0x66, 0x45, 0x56, 0x18, 0x90, 0x89, 0xa5, 0xdc, 0x9d, 0x95, 0x33, 0x1e, + 0x84, 0x26, 0xbf, 0x4e, 0xfb, 0x65, 0x40, 0x7c, 0x9f, 0x06, 0xb1, 0xe1, 0xcd, 0x4b, 0xe2, 0xd8, + 0x16, 0xe1, 0x74, 0x2f, 0xfe, 0x21, 0x05, 0xa5, 0xef, 0xdf, 0x03, 0x37, 0x6b, 0xd2, 0x0b, 0x70, + 0x1b, 0xa4, 0x5d, 0x32, 0xa4, 0x86, 0xb6, 0xa3, 0xed, 0xde, 0xaa, 0xde, 0xfa, 0xeb, 0x7f, 0xfe, + 0xb6, 0x9c, 0x0e, 0x52, 0x3b, 0x1a, 0x12, 0xd3, 0xb0, 0x04, 0x72, 0xc4, 0xe1, 0x98, 0x71, 0xc2, + 0xb1, 0xc0, 0xdd, 0x8d, 0x70, 0x68, 0x85, 0x38, 0xbc, 0xc3, 0x09, 0x6f, 0x45, 0x98, 0x06, 0x48, + 0x47, 0x4e, 0x31, 0x52, 0x3b, 0xda, 0x6e, 0x7e, 0xff, 0xdd, 0xf2, 0x64, 0x44, 0x96, 0x95, 0x9d, + 0x72, 0x3d, 0x8e, 0x83, 0xee, 0xc8, 0xa7, 0x55, 0x10, 0xd9, 0xb9, 0xf1, 0xbd, 0x96, 0xd2, 0xb5, + 0xc3, 0x25, 0x24, 0xd4, 0x61, 0x13, 0x64, 0xd5, 0xd6, 0x60, 0x41, 0xf7, 0x78, 0x47, 0xdb, 0x5d, + 0xd9, 0xff, 0x51, 0x32, 0x5d, 0x2d, 0x64, 0xdc, 0x1b, 0xaa, 0x51, 0x44, 0x79, 0xb8, 0x84, 0x56, + 0xcc, 0xab, 0x21, 0xec, 0x02, 0x48, 0x2d, 0x86, 0x63, 0x46, 0x19, 0xa8, 0xc6, 0xb2, 0xe0, 0x7c, + 0x9c, 0xcc, 0xd9, 0xb0, 0x98, 0xfa, 0x59, 0x13, 0x68, 0xa4, 0xd3, 0x99, 0x19, 0xd8, 0x02, 0x05, + 0xd3, 0x73, 0x5d, 0x6a, 0x72, 0xcc, 0xed, 0x21, 0xf5, 0x42, 0x6e, 0xa4, 0x05, 0xe5, 0x9d, 0xb2, + 0xdc, 0x8c, 0x72, 0xbc, 0x19, 0xe5, 0xba, 0xda, 0x4a, 0xf5, 0xad, 0x7f, 0xd6, 0x52, 0x4f, 0x96, + 0x50, 0x5e, 0x69, 0x77, 0xa5, 0x32, 0xec, 0x83, 0x07, 0xbe, 0x5c, 0x9d, 0x2b, 0x63, 0x10, 0xf7, + 0xc2, 0x7e, 0x9f, 0x06, 0xd8, 0xb1, 0x87, 0x51, 0x18, 0x8f, 0x38, 0x65, 0xc6, 0x0d, 0x61, 0xe1, + 0xee, 0x9c, 0x85, 0xd3, 0x23, 0x97, 0x3f, 0xdd, 0x7f, 0x41, 0x9c, 0x90, 0xa2, 0x6d, 0x5f, 0xac, + 0x51, 0xb1, 0x54, 0x05, 0x49, 0x33, 0xe2, 0xa8, 0x46, 0x14, 0xf0, 0x19, 0xb8, 0xe5, 0xf4, 0xb0, + 0xef, 0x39, 0xb6, 0x39, 0x32, 0xde, 0x11, 0xfb, 0x74, 0x2f, 0xd9, 0x09, 0xcd, 0xde, 0x89, 0x40, + 0x4d, 0x6e, 0x11, 0xca, 0x38, 0x6a, 0x16, 0xbe, 0x0f, 0x6e, 0x0c, 0x3c, 0xc6, 0x99, 0x71, 0x73, + 0x67, 0x79, 0x77, 0x65, 0xbf, 0x38, 0x4d, 0x12, 0x1d, 0xd4, 0x72, 0x45, 0x26, 0x9c, 0x6a, 0xca, + 0xd0, 0x90, 0x04, 0xc3, 0x26, 0x28, 0x38, 0x1e, 0xb1, 0x30, 0x61, 0xcc, 0x3e, 0x77, 0x87, 0xd4, + 0xe5, 0xc6, 0x03, 0xf1, 0x51, 0xc9, 0xc1, 0xd2, 0xf4, 0x88, 0x55, 0x19, 0x43, 0x51, 0xde, 0x99, + 0x1a, 0xc3, 0x1a, 0xc8, 0x4d, 0xa6, 0x0c, 0x66, 0x64, 0xc4, 0x5a, 0xee, 0x25, 0xac, 0xe5, 0x50, + 0xe0, 0x6a, 0x11, 0x0c, 0x65, 0x07, 0x57, 0x03, 0x06, 0xbf, 0x02, 0x5b, 0x43, 0xf2, 0x0a, 0x07, + 0xf4, 0xdb, 0x90, 0x32, 0xce, 0xf0, 0xf4, 0x36, 0x18, 0xb7, 0xde, 0xc2, 0xe7, 0xc6, 0x90, 0xbc, + 0x42, 0x4a, 0xff, 0x64, 0xd2, 0xfd, 0xf0, 0x04, 0xe8, 0x33, 0x99, 0x88, 0x19, 0x40, 0x30, 0x3e, + 0x9a, 0x59, 0x64, 0x1c, 0xce, 0x12, 0x5d, 0x55, 0x60, 0x54, 0x30, 0xa7, 0x27, 0xe0, 0x01, 0x58, + 0xe1, 0x0e, 0x8b, 0x56, 0xc8, 0xe9, 0x2b, 0x6e, 0xac, 0x24, 0x91, 0x45, 0xd9, 0xbc, 0x7c, 0xea, + 0x33, 0x1e, 0x50, 0x32, 0xec, 0x3a, 0xac, 0x26, 0xc1, 0x08, 0xf0, 0xf1, 0x6f, 0x78, 0x0e, 0xee, + 0x9a, 0xde, 0x70, 0xe8, 0xb9, 0x78, 0xc0, 0xb9, 0x8f, 0xe3, 0xa4, 0x8a, 0x3d, 0x5f, 0xa4, 0x2c, + 0x63, 0x3b, 0xe9, 0x80, 0x48, 0x57, 0x72, 0xee, 0x9f, 0x28, 0x78, 0x5b, 0xa2, 0xd1, 0x1d, 0xc9, + 0x95, 0x20, 0x82, 0x5f, 0x81, 0xdb, 0xc9, 0x16, 0x72, 0x49, 0xc7, 0x7a, 0x6c, 0xe1, 0xe7, 0xb3, + 0x26, 0xd6, 0x06, 0x09, 0xe4, 0x5f, 0x83, 0x8d, 0x68, 0x7a, 0x7f, 0x9e, 0x3d, 0xbf, 0x90, 0x7d, + 0x7f, 0x96, 0x7d, 0x7d, 0x90, 0x30, 0x0b, 0xbf, 0x05, 0x45, 0xfa, 0x8a, 0x53, 0x97, 0x45, 0x07, + 0x72, 0xce, 0xc4, 0xbb, 0x22, 0xda, 0x9e, 0x5e, 0x93, 0x43, 0x62, 0xbd, 0x19, 0xce, 0x86, 0xcb, + 0x83, 0x11, 0x32, 0xe8, 0x35, 0x62, 0xf8, 0x7b, 0x0d, 0xec, 0x44, 0x59, 0xcf, 0xc2, 0x0b, 0x2c, + 0x3f, 0x14, 0x96, 0x3f, 0x4e, 0xb6, 0x1c, 0x65, 0x3d, 0x6b, 0xb1, 0xf9, 0x6d, 0xbe, 0x08, 0x03, + 0xdb, 0x40, 0xb7, 0x5c, 0x86, 0x03, 0xda, 0x0f, 0x28, 0x1b, 0xe0, 0x80, 0x70, 0x6a, 0xe8, 0x3f, + 0x28, 0xbb, 0x59, 0x2e, 0x43, 0x52, 0x1b, 0x11, 0x4e, 0xe1, 0xd7, 0x60, 0x35, 0x22, 0x74, 0x3c, + 0xef, 0x22, 0xf4, 0x71, 0x9f, 0x0c, 0x6d, 0x67, 0x64, 0xac, 0x8a, 0xec, 0xf3, 0xe8, 0x9a, 0x2a, + 0xe1, 0xb2, 0xa6, 0x40, 0x1f, 0x08, 0xf0, 0x54, 0x12, 0x2a, 0x58, 0xd3, 0x42, 0xf8, 0x2b, 0x90, + 0x93, 0xeb, 0x65, 0x9e, 0x73, 0x19, 0x1d, 0x31, 0xf8, 0xa6, 0x9c, 0x84, 0xb2, 0x62, 0x85, 0x0a, + 0x0f, 0x3b, 0x60, 0x75, 0xae, 0x09, 0x30, 0xd6, 0x12, 0x4f, 0x80, 0x5a, 0x5f, 0x5b, 0xc2, 0xeb, + 0x31, 0x1a, 0xe9, 0xde, 0xcc, 0x8c, 0x38, 0xfb, 0x0e, 0x25, 0x6e, 0xe8, 0x63, 0xdb, 0xe5, 0x34, + 0xb8, 0x24, 0x8e, 0xb1, 0xfe, 0x43, 0xbc, 0x58, 0x50, 0xea, 0x47, 0x4a, 0x1b, 0xb6, 0xc1, 0x7a, + 0xa8, 0x4e, 0x35, 0xee, 0xd9, 0xae, 0x15, 0x17, 0xb3, 0xdb, 0x82, 0x75, 0x3b, 0xe1, 0x73, 0xab, + 0xb6, 0x6b, 0xa9, 0x1a, 0x06, 0x63, 0xd5, 0xab, 0x39, 0xd8, 0x02, 0xba, 0xd3, 0xc3, 0x2c, 0xec, + 0x31, 0xca, 0x63, 0xb2, 0x0d, 0x41, 0xf6, 0xf0, 0xba, 0xa2, 0xd0, 0x11, 0x60, 0xc5, 0x99, 0x77, + 0xa6, 0xc6, 0xf0, 0x0c, 0xac, 0x05, 0xb6, 0x7b, 0x8e, 0x07, 0x84, 0x0d, 0xb0, 0xd3, 0x8b, 0x29, + 0x37, 0x17, 0x15, 0x5b, 0x64, 0xbb, 0xe7, 0x87, 0x84, 0x0d, 0x9a, 0x3d, 0x49, 0x72, 0xa8, 0x21, + 0x3d, 0x98, 0x99, 0x83, 0xdf, 0x80, 0x0d, 0x2f, 0xb0, 0xcf, 0x6d, 0x97, 0x38, 0xd8, 0x62, 0x7c, + 0x82, 0xbb, 0x24, 0xb8, 0x7f, 0x9c, 0xcc, 0xdd, 0x56, 0x3a, 0x75, 0xc6, 0x27, 0xe8, 0xd7, 0xbc, + 0xf9, 0x69, 0x68, 0x82, 0x4d, 0x87, 0x12, 0xc6, 0xe3, 0x42, 0x30, 0x61, 0xe2, 0x91, 0x30, 0xf1, + 0xe4, 0x1a, 0x8f, 0x44, 0x4a, 0x2a, 0xf9, 0x4f, 0xd8, 0x58, 0x77, 0x12, 0xe6, 0x23, 0x7f, 0xab, + 0xa4, 0x7b, 0xc5, 0xbe, 0xb5, 0xc8, 0xdf, 0x35, 0x81, 0x8e, 0xf5, 0xa3, 0xae, 0x61, 0x72, 0x0c, + 0x8f, 0x81, 0xce, 0x03, 0xe2, 0x32, 0xdf, 0x0b, 0x38, 0x66, 0x9e, 0x79, 0x41, 0xb9, 0x61, 0x08, + 0xbe, 0x52, 0x42, 0x30, 0x74, 0x63, 0x68, 0x47, 0x20, 0x51, 0x81, 0x4f, 0x4f, 0xc0, 0x8f, 0x40, + 0x66, 0x48, 0x39, 0xb1, 0x08, 0x27, 0xc6, 0x1d, 0x41, 0xb3, 0x95, 0x40, 0x73, 0xac, 0x20, 0x68, + 0x0c, 0x86, 0x5f, 0x03, 0x38, 0xce, 0x51, 0x8c, 0x3a, 0xea, 0x00, 0x15, 0xc5, 0x01, 0x2f, 0x5f, + 0xf3, 0x65, 0xf2, 0x6f, 0x9c, 0x7a, 0x3a, 0xb1, 0x16, 0x5a, 0xf5, 0x67, 0xa7, 0xe0, 0x39, 0xd8, + 0x1a, 0xc7, 0xfd, 0x44, 0x87, 0x14, 0x67, 0xc3, 0x7b, 0x49, 0xa9, 0x3e, 0x2e, 0x7f, 0x57, 0xc5, + 0x78, 0x5c, 0xab, 0xc2, 0xeb, 0x44, 0xf0, 0x14, 0xec, 0x9a, 0x8e, 0xc7, 0xe8, 0x84, 0x15, 0x86, + 0xa3, 0x12, 0xe9, 0x31, 0x8e, 0x55, 0xab, 0xd1, 0x27, 0xb6, 0x13, 0x06, 0xd4, 0xb8, 0xbf, 0xa3, + 0xed, 0x66, 0xd0, 0xbb, 0x02, 0x7f, 0xc5, 0xc4, 0xda, 0xee, 0xa1, 0xc7, 0xb8, 0x6c, 0x37, 0x0e, + 0x24, 0x14, 0x1e, 0x82, 0x07, 0x56, 0x40, 0x6c, 0x37, 0x91, 0x36, 0xa0, 0x43, 0x2f, 0x4a, 0x0d, + 0x3b, 0x82, 0x6f, 0x5b, 0x00, 0xe7, 0xf8, 0x90, 0x04, 0x15, 0x2f, 0xc0, 0xea, 0x5c, 0xc3, 0xfb, + 0xa6, 0xce, 0xfd, 0x23, 0x90, 0x95, 0x05, 0x45, 0x05, 0x5c, 0x4a, 0xb8, 0x6b, 0x7d, 0x2e, 0x07, + 0x55, 0xdc, 0x11, 0x5a, 0x11, 0x48, 0x19, 0x5d, 0xc5, 0x10, 0xe8, 0xb3, 0x9d, 0x30, 0xfc, 0x0c, + 0x00, 0xd1, 0x4d, 0x4b, 0x2a, 0x4d, 0x50, 0xdd, 0x4f, 0x08, 0x12, 0x09, 0xef, 0x88, 0xeb, 0x20, + 0xba, 0x15, 0xb5, 0xcf, 0x52, 0xff, 0x01, 0xc8, 0x32, 0x1a, 0x5c, 0xda, 0x26, 0x95, 0xb7, 0x88, + 0x94, 0xbc, 0x45, 0xa8, 0xb9, 0xe8, 0x16, 0x51, 0xb4, 0xc0, 0xf6, 0xc2, 0xea, 0x05, 0x75, 0xb0, + 0x7c, 0x41, 0x47, 0xf2, 0x73, 0x51, 0xf4, 0x13, 0xfe, 0x14, 0xdc, 0xb8, 0x8c, 0x3a, 0x31, 0xf5, + 0x6d, 0x9b, 0x73, 0xdf, 0xd6, 0x11, 0xd7, 0x25, 0x24, 0x51, 0x9f, 0xa4, 0x3e, 0xd6, 0x8a, 0x7d, + 0x50, 0x7a, 0x73, 0xa1, 0x4c, 0x30, 0xf5, 0x64, 0xda, 0x54, 0xb2, 0x1b, 0x27, 0xec, 0xfc, 0x25, + 0x0d, 0xf2, 0xd3, 0x59, 0x13, 0xfa, 0xa0, 0xd0, 0x27, 0x8e, 0xd3, 0x23, 0xe6, 0x45, 0xdc, 0x89, + 0x6b, 0xe2, 0xa8, 0xfc, 0xe2, 0x6d, 0x92, 0xee, 0x78, 0x78, 0xa0, 0x38, 0x12, 0xda, 0xf4, 0x7c, + 0x7f, 0x4a, 0x06, 0x3f, 0x03, 0x79, 0x8b, 0xf6, 0x49, 0x18, 0x5d, 0xe0, 0x84, 0xee, 0x9b, 0x1c, + 0x95, 0x53, 0x70, 0x69, 0x09, 0x7e, 0x03, 0x74, 0x55, 0x24, 0xe4, 0xe9, 0xf6, 0x02, 0x66, 0x2c, + 0x8b, 0x1a, 0xfb, 0xc1, 0x0f, 0x5a, 0x72, 0x47, 0x69, 0xa3, 0x02, 0x9b, 0x1a, 0x33, 0xb8, 0x0f, + 0x6e, 0x3b, 0x9e, 0x49, 0x1c, 0x9b, 0x8f, 0xf0, 0x4b, 0x6a, 0x9f, 0x0f, 0x38, 0x26, 0x2f, 0x49, + 0x40, 0xc5, 0xad, 0x2a, 0x83, 0xd6, 0x62, 0xe1, 0x99, 0x90, 0x55, 0x22, 0x51, 0xa4, 0xc3, 0x4c, + 0xe2, 0x50, 0x3c, 0xa3, 0x29, 0xee, 0x49, 0x19, 0xb4, 0x26, 0x84, 0xcd, 0x29, 0x45, 0xf8, 0x10, + 0xe4, 0x7d, 0xe2, 0xda, 0x26, 0x1e, 0x7a, 0x16, 0xc5, 0xc4, 0x95, 0x97, 0xa0, 0x0c, 0xca, 0x8a, + 0xd9, 0x63, 0xcf, 0xa2, 0x15, 0x77, 0x54, 0x7c, 0x0c, 0xf4, 0xd9, 0x25, 0x43, 0x08, 0xd2, 0x17, + 0x74, 0xc4, 0x0c, 0x6d, 0x67, 0x79, 0xf7, 0x16, 0x12, 0xbf, 0x4b, 0x6d, 0xb0, 0x91, 0xbc, 0x1b, + 0xb0, 0x00, 0x56, 0x5a, 0x6d, 0x7c, 0x50, 0x69, 0x36, 0xab, 0x95, 0xda, 0xe7, 0xfa, 0x12, 0xd4, + 0x41, 0xb6, 0xd2, 0xfa, 0x12, 0x37, 0x5a, 0xf5, 0x93, 0xf6, 0x51, 0xab, 0xab, 0x6b, 0x10, 0x82, + 0x7c, 0xbd, 0x71, 0x50, 0x39, 0x6d, 0x76, 0x71, 0xe7, 0xb4, 0xda, 0x69, 0x74, 0xf5, 0x54, 0xb1, + 0x07, 0xd6, 0x93, 0x0a, 0x0a, 0x7c, 0x0e, 0xb2, 0xe6, 0xc0, 0x8b, 0x4e, 0x8d, 0xe9, 0x85, 0x2e, + 0x57, 0x07, 0x6f, 0xe1, 0xad, 0x44, 0x25, 0x82, 0x27, 0xa9, 0xdd, 0x14, 0x5a, 0x91, 0xca, 0xb5, + 0x48, 0xb7, 0xf8, 0xcf, 0x14, 0xd0, 0x67, 0x8b, 0x2e, 0x7c, 0x01, 0x56, 0x87, 0xb6, 0x6b, 0x0f, + 0xc3, 0x21, 0x16, 0x15, 0x9c, 0xd9, 0xaf, 0xe9, 0x42, 0x2b, 0x1f, 0xbe, 0x2f, 0xad, 0x64, 0x23, + 0x2b, 0x37, 0xf7, 0x6f, 0x18, 0xdf, 0x7d, 0xf7, 0x5d, 0x1a, 0x15, 0x14, 0x49, 0xc4, 0xdf, 0xb1, + 0x5f, 0x53, 0x68, 0x81, 0x9c, 0x68, 0x06, 0xfa, 0xa1, 0x2b, 0x8b, 0xc2, 0xb2, 0x88, 0xf4, 0xa7, + 0x6f, 0xd7, 0x0b, 0x94, 0xa3, 0xc1, 0x81, 0x52, 0x9d, 0x8a, 0xf0, 0xec, 0x60, 0x42, 0x22, 0x56, + 0x4f, 0x5e, 0xcd, 0xac, 0x3e, 0xfd, 0x3f, 0xac, 0x5e, 0x92, 0xc4, 0xab, 0x2f, 0x95, 0x41, 0x76, + 0x72, 0x05, 0x70, 0x05, 0xdc, 0xfc, 0xe2, 0x0b, 0x7c, 0x58, 0xe9, 0x1c, 0xea, 0x4b, 0x70, 0x15, + 0xe4, 0x8e, 0x4f, 0xd1, 0xf1, 0x29, 0x12, 0x13, 0x78, 0x5f, 0xd7, 0x9e, 0xa7, 0x33, 0x29, 0x7d, + 0xb9, 0xf8, 0x29, 0x58, 0x4b, 0x68, 0x3c, 0xe0, 0x63, 0x50, 0x08, 0x19, 0x95, 0xd7, 0xad, 0x01, + 0x25, 0x16, 0x0d, 0x84, 0x83, 0x33, 0x28, 0x17, 0x32, 0x1a, 0x5d, 0x4a, 0x0e, 0xc5, 0x64, 0xf1, + 0xdf, 0x69, 0x90, 0x9f, 0xae, 0xfb, 0xf0, 0x73, 0xb0, 0x29, 0xab, 0xcf, 0x08, 0xcb, 0xe8, 0xe5, + 0x83, 0xa8, 0xb7, 0xf6, 0x1c, 0x4b, 0xed, 0xd1, 0x9a, 0xf2, 0x67, 0x94, 0xbe, 0xcb, 0x27, 0xf2, + 0x4d, 0x0a, 0xdd, 0x56, 0x3a, 0x27, 0x91, 0x4a, 0x37, 0xd6, 0x80, 0x36, 0x58, 0x7f, 0xed, 0xb9, + 0x54, 0x9e, 0xaf, 0x89, 0x46, 0x44, 0xa6, 0x84, 0x0f, 0xdf, 0xa6, 0x11, 0x29, 0xff, 0xc6, 0x73, + 0xa9, 0x38, 0x84, 0xe3, 0x96, 0x67, 0x09, 0xad, 0xbe, 0x9e, 0x9d, 0x84, 0xbf, 0x03, 0x5b, 0x33, + 0x67, 0x93, 0x5a, 0x13, 0x16, 0xe5, 0x23, 0xcc, 0xa7, 0x6f, 0x65, 0x71, 0xfa, 0x1c, 0x53, 0x6b, + 0xc2, 0xb0, 0xe1, 0x5c, 0x23, 0x83, 0x47, 0x60, 0x2d, 0xf4, 0x2d, 0xc2, 0x29, 0x1e, 0xd2, 0xe0, + 0x9c, 0xe2, 0x97, 0xb6, 0x6b, 0x79, 0x2f, 0xdf, 0xf8, 0x52, 0x83, 0x56, 0xa5, 0xd6, 0x71, 0xa4, + 0x74, 0x26, 0x74, 0x8a, 0x7f, 0xd2, 0xc0, 0xea, 0xdc, 0x57, 0xc3, 0x5f, 0x82, 0x42, 0xe0, 0x85, + 0x3c, 0x8a, 0x39, 0xea, 0x92, 0x9e, 0x43, 0x17, 0x6e, 0x48, 0x5e, 0x61, 0x1b, 0x12, 0x0a, 0x0f, + 0x80, 0x3e, 0x8c, 0xba, 0x02, 0xf5, 0x34, 0x25, 0xa2, 0x36, 0xf5, 0xe6, 0xa8, 0x45, 0xf9, 0xa1, + 0xed, 0x2a, 0x37, 0x45, 0x61, 0x5a, 0x2c, 0x02, 0xe3, 0x3a, 0xf7, 0x54, 0xb7, 0xc0, 0x9d, 0xf1, + 0x16, 0xc4, 0x8f, 0xb4, 0x3e, 0x35, 0xed, 0xbe, 0x4d, 0x83, 0xd2, 0x19, 0xc8, 0x4d, 0x3d, 0xc7, + 0x41, 0x00, 0xde, 0xe9, 0x74, 0x2b, 0xdd, 0xa3, 0x9a, 0xbe, 0x04, 0xf3, 0x00, 0x74, 0xba, 0xe8, + 0xa8, 0xd6, 0xc5, 0xf5, 0x56, 0x47, 0xd7, 0xa2, 0x94, 0xd6, 0x6c, 0x3f, 0x3b, 0xaa, 0x55, 0x9a, + 0x62, 0x22, 0x05, 0x6f, 0x82, 0xe5, 0x46, 0xbd, 0xa3, 0x2f, 0x47, 0xb9, 0xad, 0x8d, 0x8e, 0x9e, + 0x1d, 0xb5, 0x22, 0x51, 0xa7, 0xab, 0xa7, 0x4b, 0xbf, 0x05, 0x99, 0xf8, 0xfd, 0x28, 0xd2, 0x43, + 0xed, 0xd3, 0x56, 0x1d, 0xa3, 0x76, 0xf5, 0xa8, 0x25, 0x0f, 0x4e, 0xb3, 0x51, 0xe9, 0x74, 0x31, + 0x6a, 0xfc, 0xfa, 0xb4, 0xd1, 0x89, 0x72, 0x61, 0x0e, 0xdc, 0x42, 0x47, 0xad, 0x67, 0xf2, 0x68, + 0xa5, 0xa2, 0x65, 0xa0, 0x4a, 0xab, 0xde, 0x3e, 0xd6, 0x97, 0xe1, 0x1a, 0x28, 0x4c, 0x92, 0xe3, + 0x66, 0x55, 0x4f, 0x47, 0x80, 0xe3, 0xca, 0xb3, 0x66, 0xe3, 0x85, 0x7e, 0xa3, 0xf4, 0x01, 0x28, + 0xcc, 0xdc, 0x16, 0x61, 0x06, 0xa4, 0x2b, 0xa7, 0xdd, 0xb6, 0xbe, 0x14, 0x9d, 0xd8, 0x17, 0xef, + 0xe3, 0x76, 0xab, 0xf9, 0xa5, 0xae, 0x89, 0xc1, 0x87, 0x72, 0x90, 0x2a, 0x75, 0x81, 0x71, 0x5d, + 0x0f, 0x0a, 0xb7, 0xc0, 0xe6, 0x69, 0xa7, 0x81, 0x6b, 0xed, 0xd6, 0xc1, 0xd1, 0xb3, 0x53, 0xd4, + 0xa8, 0xe3, 0x13, 0xd4, 0xee, 0xb6, 0x6b, 0xed, 0xa6, 0xbe, 0x14, 0x0b, 0xeb, 0xed, 0xb3, 0x56, + 0xa7, 0x8b, 0x1a, 0x95, 0xe3, 0x2b, 0xa1, 0x56, 0x35, 0xc0, 0x46, 0xbc, 0x9d, 0xe3, 0x07, 0x6f, + 0xf1, 0x8a, 0x59, 0x5d, 0x11, 0x2f, 0x6f, 0x72, 0x0b, 0x9e, 0xa7, 0x33, 0x59, 0x3d, 0xf7, 0x3c, + 0x9d, 0x29, 0xe8, 0x7a, 0xe9, 0x0c, 0xc0, 0xd3, 0xf9, 0xab, 0x59, 0x05, 0xe4, 0xe5, 0x63, 0x3a, + 0x56, 0xef, 0xf6, 0x2a, 0xb0, 0x16, 0x5d, 0x6a, 0x73, 0x52, 0x43, 0x0d, 0x4b, 0x04, 0xdc, 0xb9, + 0xb6, 0x0b, 0x86, 0x75, 0x90, 0xe3, 0xa6, 0x8f, 0x2f, 0x28, 0xf5, 0x89, 0x63, 0x5f, 0xd2, 0x05, + 0xbd, 0x5c, 0xd7, 0xf4, 0x3f, 0x8f, 0x61, 0x28, 0xcb, 0x27, 0x46, 0xfb, 0x7f, 0x4f, 0x81, 0x4d, + 0xe5, 0xc5, 0x71, 0x20, 0x75, 0x64, 0x2f, 0x07, 0xcf, 0x40, 0xbe, 0x23, 0x8d, 0x4b, 0x00, 0x83, + 0x33, 0x0f, 0x73, 0x63, 0x0d, 0x55, 0xed, 0x8a, 0xf7, 0xaf, 0x95, 0x33, 0xdf, 0x73, 0x19, 0x2d, + 0x2d, 0xed, 0x6a, 0x3f, 0xd3, 0xe0, 0x37, 0x20, 0x57, 0xa7, 0x0e, 0x27, 0x63, 0xde, 0x99, 0xc7, + 0x43, 0x21, 0x9c, 0x23, 0x7f, 0xb8, 0x18, 0x34, 0x65, 0x81, 0x81, 0xdc, 0x01, 0xe5, 0xe6, 0xe0, + 0xff, 0xb7, 0xf2, 0x07, 0xdf, 0xff, 0xe3, 0x5f, 0x7f, 0x4c, 0x6d, 0x95, 0x36, 0xa6, 0xfe, 0x15, + 0xf2, 0x89, 0x8a, 0x15, 0xf6, 0x89, 0xf6, 0xa4, 0xfa, 0x1e, 0x28, 0xda, 0x9e, 0xe4, 0xf1, 0x03, + 0xef, 0xd5, 0x68, 0x8a, 0xb2, 0x9a, 0xa9, 0x59, 0x4c, 0x04, 0xea, 0x89, 0xf6, 0x07, 0x4d, 0xeb, + 0xbd, 0x23, 0x12, 0xc3, 0xd3, 0xff, 0x06, 0x00, 0x00, 0xff, 0xff, 0xe0, 0x16, 0x40, 0xfc, 0x0e, + 0x1a, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/grpc/balancer/xds/internal/proto/envoy/api/v2/cluster/circuit_breaker/circuit_breaker.pb.go b/vendor/google.golang.org/grpc/balancer/xds/internal/proto/envoy/api/v2/cluster/circuit_breaker/circuit_breaker.pb.go new file mode 100755 index 0000000..da52dc0 --- /dev/null +++ b/vendor/google.golang.org/grpc/balancer/xds/internal/proto/envoy/api/v2/cluster/circuit_breaker/circuit_breaker.pb.go @@ -0,0 +1,185 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: envoy/api/v2/cluster/circuit_breaker.proto + +package cluster + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import wrappers "github.com/golang/protobuf/ptypes/wrappers" +import base "google.golang.org/grpc/balancer/xds/internal/proto/envoy/api/v2/core/base" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +type CircuitBreakers struct { + Thresholds []*CircuitBreakers_Thresholds `protobuf:"bytes,1,rep,name=thresholds,proto3" json:"thresholds,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *CircuitBreakers) Reset() { *m = CircuitBreakers{} } +func (m *CircuitBreakers) String() string { return proto.CompactTextString(m) } +func (*CircuitBreakers) ProtoMessage() {} +func (*CircuitBreakers) Descriptor() ([]byte, []int) { + return fileDescriptor_circuit_breaker_dc7392708e718eb5, []int{0} +} +func (m *CircuitBreakers) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_CircuitBreakers.Unmarshal(m, b) +} +func (m *CircuitBreakers) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_CircuitBreakers.Marshal(b, m, deterministic) +} +func (dst *CircuitBreakers) XXX_Merge(src proto.Message) { + xxx_messageInfo_CircuitBreakers.Merge(dst, src) +} +func (m *CircuitBreakers) XXX_Size() int { + return xxx_messageInfo_CircuitBreakers.Size(m) +} +func (m *CircuitBreakers) XXX_DiscardUnknown() { + xxx_messageInfo_CircuitBreakers.DiscardUnknown(m) +} + +var xxx_messageInfo_CircuitBreakers proto.InternalMessageInfo + +func (m *CircuitBreakers) GetThresholds() []*CircuitBreakers_Thresholds { + if m != nil { + return m.Thresholds + } + return nil +} + +type CircuitBreakers_Thresholds struct { + Priority base.RoutingPriority `protobuf:"varint,1,opt,name=priority,proto3,enum=envoy.api.v2.core.RoutingPriority" json:"priority,omitempty"` + MaxConnections *wrappers.UInt32Value `protobuf:"bytes,2,opt,name=max_connections,json=maxConnections,proto3" json:"max_connections,omitempty"` + MaxPendingRequests *wrappers.UInt32Value `protobuf:"bytes,3,opt,name=max_pending_requests,json=maxPendingRequests,proto3" json:"max_pending_requests,omitempty"` + MaxRequests *wrappers.UInt32Value `protobuf:"bytes,4,opt,name=max_requests,json=maxRequests,proto3" json:"max_requests,omitempty"` + MaxRetries *wrappers.UInt32Value `protobuf:"bytes,5,opt,name=max_retries,json=maxRetries,proto3" json:"max_retries,omitempty"` + TrackRemaining bool `protobuf:"varint,6,opt,name=track_remaining,json=trackRemaining,proto3" json:"track_remaining,omitempty"` + MaxConnectionPools *wrappers.UInt32Value `protobuf:"bytes,7,opt,name=max_connection_pools,json=maxConnectionPools,proto3" json:"max_connection_pools,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *CircuitBreakers_Thresholds) Reset() { *m = CircuitBreakers_Thresholds{} } +func (m *CircuitBreakers_Thresholds) String() string { return proto.CompactTextString(m) } +func (*CircuitBreakers_Thresholds) ProtoMessage() {} +func (*CircuitBreakers_Thresholds) Descriptor() ([]byte, []int) { + return fileDescriptor_circuit_breaker_dc7392708e718eb5, []int{0, 0} +} +func (m *CircuitBreakers_Thresholds) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_CircuitBreakers_Thresholds.Unmarshal(m, b) +} +func (m *CircuitBreakers_Thresholds) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_CircuitBreakers_Thresholds.Marshal(b, m, deterministic) +} +func (dst *CircuitBreakers_Thresholds) XXX_Merge(src proto.Message) { + xxx_messageInfo_CircuitBreakers_Thresholds.Merge(dst, src) +} +func (m *CircuitBreakers_Thresholds) XXX_Size() int { + return xxx_messageInfo_CircuitBreakers_Thresholds.Size(m) +} +func (m *CircuitBreakers_Thresholds) XXX_DiscardUnknown() { + xxx_messageInfo_CircuitBreakers_Thresholds.DiscardUnknown(m) +} + +var xxx_messageInfo_CircuitBreakers_Thresholds proto.InternalMessageInfo + +func (m *CircuitBreakers_Thresholds) GetPriority() base.RoutingPriority { + if m != nil { + return m.Priority + } + return base.RoutingPriority_DEFAULT +} + +func (m *CircuitBreakers_Thresholds) GetMaxConnections() *wrappers.UInt32Value { + if m != nil { + return m.MaxConnections + } + return nil +} + +func (m *CircuitBreakers_Thresholds) GetMaxPendingRequests() *wrappers.UInt32Value { + if m != nil { + return m.MaxPendingRequests + } + return nil +} + +func (m *CircuitBreakers_Thresholds) GetMaxRequests() *wrappers.UInt32Value { + if m != nil { + return m.MaxRequests + } + return nil +} + +func (m *CircuitBreakers_Thresholds) GetMaxRetries() *wrappers.UInt32Value { + if m != nil { + return m.MaxRetries + } + return nil +} + +func (m *CircuitBreakers_Thresholds) GetTrackRemaining() bool { + if m != nil { + return m.TrackRemaining + } + return false +} + +func (m *CircuitBreakers_Thresholds) GetMaxConnectionPools() *wrappers.UInt32Value { + if m != nil { + return m.MaxConnectionPools + } + return nil +} + +func init() { + proto.RegisterType((*CircuitBreakers)(nil), "envoy.api.v2.cluster.CircuitBreakers") + proto.RegisterType((*CircuitBreakers_Thresholds)(nil), "envoy.api.v2.cluster.CircuitBreakers.Thresholds") +} + +func init() { + proto.RegisterFile("envoy/api/v2/cluster/circuit_breaker.proto", fileDescriptor_circuit_breaker_dc7392708e718eb5) +} + +var fileDescriptor_circuit_breaker_dc7392708e718eb5 = []byte{ + // 417 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x8c, 0x92, 0xc1, 0x6e, 0xd3, 0x40, + 0x10, 0x86, 0xe5, 0xa6, 0xb4, 0xd5, 0x06, 0x25, 0xd2, 0x52, 0x21, 0x2b, 0xaa, 0x50, 0x94, 0x0b, + 0x11, 0x87, 0x35, 0x72, 0xcf, 0x80, 0x48, 0xd4, 0x03, 0x97, 0xca, 0x32, 0xd0, 0x03, 0x12, 0xb2, + 0x36, 0xee, 0xe0, 0xae, 0x6a, 0xef, 0x2c, 0xb3, 0xeb, 0xe0, 0xbc, 0x12, 0x8f, 0xc1, 0x73, 0xf0, + 0x30, 0x28, 0xde, 0x24, 0xa6, 0x55, 0x0f, 0x3e, 0x7a, 0x66, 0xbe, 0x6f, 0xfc, 0xef, 0x2e, 0x7b, + 0x03, 0x7a, 0x8d, 0x9b, 0x48, 0x1a, 0x15, 0xad, 0xe3, 0x28, 0x2f, 0x6b, 0xeb, 0x80, 0xa2, 0x5c, + 0x51, 0x5e, 0x2b, 0x97, 0xad, 0x08, 0xe4, 0x3d, 0x90, 0x30, 0x84, 0x0e, 0xf9, 0x79, 0x3b, 0x2b, + 0xa4, 0x51, 0x62, 0x1d, 0x8b, 0xdd, 0xec, 0xe4, 0xe2, 0xa1, 0x01, 0x09, 0xa2, 0x95, 0xb4, 0xe0, + 0x99, 0xc9, 0xab, 0x02, 0xb1, 0x28, 0x21, 0x6a, 0xbf, 0x56, 0xf5, 0x8f, 0xe8, 0x17, 0x49, 0x63, + 0x80, 0xac, 0xef, 0xcf, 0xfe, 0x1c, 0xb3, 0xf1, 0xd2, 0x6f, 0x5b, 0xf8, 0x65, 0x96, 0x27, 0x8c, + 0xb9, 0x3b, 0x02, 0x7b, 0x87, 0xe5, 0xad, 0x0d, 0x83, 0xe9, 0x60, 0x3e, 0x8c, 0xdf, 0x8a, 0xa7, + 0x96, 0x8b, 0x47, 0xa8, 0xf8, 0x72, 0xe0, 0xd2, 0xff, 0x1c, 0x93, 0xbf, 0x03, 0xc6, 0xba, 0x16, + 0x7f, 0xcf, 0xce, 0x0c, 0x29, 0x24, 0xe5, 0x36, 0x61, 0x30, 0x0d, 0xe6, 0xa3, 0x78, 0xf6, 0x48, + 0x8f, 0x04, 0x22, 0xc5, 0xda, 0x29, 0x5d, 0x24, 0xbb, 0xc9, 0xf4, 0xc0, 0xf0, 0x2b, 0x36, 0xae, + 0x64, 0x93, 0xe5, 0xa8, 0x35, 0xe4, 0x4e, 0xa1, 0xb6, 0xe1, 0xd1, 0x34, 0x98, 0x0f, 0xe3, 0x0b, + 0xe1, 0xe3, 0x8a, 0x7d, 0x5c, 0xf1, 0xf5, 0x93, 0x76, 0x97, 0xf1, 0x8d, 0x2c, 0x6b, 0x48, 0x47, + 0x95, 0x6c, 0x96, 0x1d, 0xc3, 0xaf, 0xd9, 0xf9, 0x56, 0x63, 0x40, 0xdf, 0x2a, 0x5d, 0x64, 0x04, + 0x3f, 0x6b, 0xb0, 0xce, 0x86, 0x83, 0x1e, 0x2e, 0x5e, 0xc9, 0x26, 0xf1, 0x60, 0xba, 0xe3, 0xf8, + 0x07, 0xf6, 0x7c, 0xeb, 0x3b, 0x78, 0x8e, 0x7b, 0x78, 0x86, 0x95, 0x6c, 0x0e, 0x82, 0x77, 0x6c, + 0xe8, 0x05, 0x8e, 0x14, 0xd8, 0xf0, 0x59, 0x0f, 0x9e, 0xb5, 0x7c, 0x3b, 0xcf, 0x5f, 0xb3, 0xb1, + 0x23, 0x99, 0xdf, 0x67, 0x04, 0x95, 0x54, 0x5a, 0xe9, 0x22, 0x3c, 0x99, 0x06, 0xf3, 0xb3, 0x74, + 0xd4, 0x96, 0xd3, 0x7d, 0x75, 0x1f, 0xbc, 0x3b, 0xbf, 0xcc, 0x20, 0x96, 0x36, 0x3c, 0xed, 0x19, + 0xbc, 0x3b, 0xc4, 0x64, 0xcb, 0x2d, 0xbe, 0xb3, 0x99, 0x42, 0x7f, 0x83, 0x86, 0xb0, 0xd9, 0x3c, + 0xf9, 0x56, 0x16, 0x2f, 0x1e, 0x3e, 0x96, 0x64, 0x6b, 0x4f, 0x82, 0x6f, 0xa7, 0xbb, 0xfe, 0xef, + 0xa3, 0x97, 0x57, 0x2d, 0xf6, 0xd1, 0x28, 0x71, 0x13, 0x8b, 0xa5, 0x2f, 0x5f, 0x7f, 0x5e, 0x9d, + 0xb4, 0x3f, 0x72, 0xf9, 0x2f, 0x00, 0x00, 0xff, 0xff, 0xae, 0xe0, 0x40, 0x7b, 0x2c, 0x03, 0x00, + 0x00, +} diff --git a/vendor/google.golang.org/grpc/balancer/xds/internal/proto/envoy/api/v2/cluster/outlier_detection/outlier_detection.pb.go b/vendor/google.golang.org/grpc/balancer/xds/internal/proto/envoy/api/v2/cluster/outlier_detection/outlier_detection.pb.go new file mode 100755 index 0000000..5ce5b6d --- /dev/null +++ b/vendor/google.golang.org/grpc/balancer/xds/internal/proto/envoy/api/v2/cluster/outlier_detection/outlier_detection.pb.go @@ -0,0 +1,185 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: envoy/api/v2/cluster/outlier_detection.proto + +package envoy_api_v2_cluster + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import duration "github.com/golang/protobuf/ptypes/duration" +import wrappers "github.com/golang/protobuf/ptypes/wrappers" +import _ "google.golang.org/grpc/balancer/xds/internal/proto/validate" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +type OutlierDetection struct { + Consecutive_5Xx *wrappers.UInt32Value `protobuf:"bytes,1,opt,name=consecutive_5xx,json=consecutive5xx,proto3" json:"consecutive_5xx,omitempty"` + Interval *duration.Duration `protobuf:"bytes,2,opt,name=interval,proto3" json:"interval,omitempty"` + BaseEjectionTime *duration.Duration `protobuf:"bytes,3,opt,name=base_ejection_time,json=baseEjectionTime,proto3" json:"base_ejection_time,omitempty"` + MaxEjectionPercent *wrappers.UInt32Value `protobuf:"bytes,4,opt,name=max_ejection_percent,json=maxEjectionPercent,proto3" json:"max_ejection_percent,omitempty"` + EnforcingConsecutive_5Xx *wrappers.UInt32Value `protobuf:"bytes,5,opt,name=enforcing_consecutive_5xx,json=enforcingConsecutive5xx,proto3" json:"enforcing_consecutive_5xx,omitempty"` + EnforcingSuccessRate *wrappers.UInt32Value `protobuf:"bytes,6,opt,name=enforcing_success_rate,json=enforcingSuccessRate,proto3" json:"enforcing_success_rate,omitempty"` + SuccessRateMinimumHosts *wrappers.UInt32Value `protobuf:"bytes,7,opt,name=success_rate_minimum_hosts,json=successRateMinimumHosts,proto3" json:"success_rate_minimum_hosts,omitempty"` + SuccessRateRequestVolume *wrappers.UInt32Value `protobuf:"bytes,8,opt,name=success_rate_request_volume,json=successRateRequestVolume,proto3" json:"success_rate_request_volume,omitempty"` + SuccessRateStdevFactor *wrappers.UInt32Value `protobuf:"bytes,9,opt,name=success_rate_stdev_factor,json=successRateStdevFactor,proto3" json:"success_rate_stdev_factor,omitempty"` + ConsecutiveGatewayFailure *wrappers.UInt32Value `protobuf:"bytes,10,opt,name=consecutive_gateway_failure,json=consecutiveGatewayFailure,proto3" json:"consecutive_gateway_failure,omitempty"` + EnforcingConsecutiveGatewayFailure *wrappers.UInt32Value `protobuf:"bytes,11,opt,name=enforcing_consecutive_gateway_failure,json=enforcingConsecutiveGatewayFailure,proto3" json:"enforcing_consecutive_gateway_failure,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *OutlierDetection) Reset() { *m = OutlierDetection{} } +func (m *OutlierDetection) String() string { return proto.CompactTextString(m) } +func (*OutlierDetection) ProtoMessage() {} +func (*OutlierDetection) Descriptor() ([]byte, []int) { + return fileDescriptor_outlier_detection_c374e0b25113dd85, []int{0} +} +func (m *OutlierDetection) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_OutlierDetection.Unmarshal(m, b) +} +func (m *OutlierDetection) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_OutlierDetection.Marshal(b, m, deterministic) +} +func (dst *OutlierDetection) XXX_Merge(src proto.Message) { + xxx_messageInfo_OutlierDetection.Merge(dst, src) +} +func (m *OutlierDetection) XXX_Size() int { + return xxx_messageInfo_OutlierDetection.Size(m) +} +func (m *OutlierDetection) XXX_DiscardUnknown() { + xxx_messageInfo_OutlierDetection.DiscardUnknown(m) +} + +var xxx_messageInfo_OutlierDetection proto.InternalMessageInfo + +func (m *OutlierDetection) GetConsecutive_5Xx() *wrappers.UInt32Value { + if m != nil { + return m.Consecutive_5Xx + } + return nil +} + +func (m *OutlierDetection) GetInterval() *duration.Duration { + if m != nil { + return m.Interval + } + return nil +} + +func (m *OutlierDetection) GetBaseEjectionTime() *duration.Duration { + if m != nil { + return m.BaseEjectionTime + } + return nil +} + +func (m *OutlierDetection) GetMaxEjectionPercent() *wrappers.UInt32Value { + if m != nil { + return m.MaxEjectionPercent + } + return nil +} + +func (m *OutlierDetection) GetEnforcingConsecutive_5Xx() *wrappers.UInt32Value { + if m != nil { + return m.EnforcingConsecutive_5Xx + } + return nil +} + +func (m *OutlierDetection) GetEnforcingSuccessRate() *wrappers.UInt32Value { + if m != nil { + return m.EnforcingSuccessRate + } + return nil +} + +func (m *OutlierDetection) GetSuccessRateMinimumHosts() *wrappers.UInt32Value { + if m != nil { + return m.SuccessRateMinimumHosts + } + return nil +} + +func (m *OutlierDetection) GetSuccessRateRequestVolume() *wrappers.UInt32Value { + if m != nil { + return m.SuccessRateRequestVolume + } + return nil +} + +func (m *OutlierDetection) GetSuccessRateStdevFactor() *wrappers.UInt32Value { + if m != nil { + return m.SuccessRateStdevFactor + } + return nil +} + +func (m *OutlierDetection) GetConsecutiveGatewayFailure() *wrappers.UInt32Value { + if m != nil { + return m.ConsecutiveGatewayFailure + } + return nil +} + +func (m *OutlierDetection) GetEnforcingConsecutiveGatewayFailure() *wrappers.UInt32Value { + if m != nil { + return m.EnforcingConsecutiveGatewayFailure + } + return nil +} + +func init() { + proto.RegisterType((*OutlierDetection)(nil), "envoy.api.v2.cluster.OutlierDetection") +} + +func init() { + proto.RegisterFile("envoy/api/v2/cluster/outlier_detection.proto", fileDescriptor_outlier_detection_c374e0b25113dd85) +} + +var fileDescriptor_outlier_detection_c374e0b25113dd85 = []byte{ + // 526 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x94, 0x94, 0xed, 0x6a, 0x13, 0x4d, + 0x14, 0xc7, 0x9f, 0xa4, 0xef, 0x53, 0x78, 0x2c, 0x43, 0x4d, 0x26, 0xad, 0x88, 0x04, 0x04, 0x29, + 0x32, 0x0b, 0x29, 0xbd, 0x80, 0xa6, 0x4d, 0xd5, 0x0f, 0x6a, 0x48, 0x34, 0x22, 0x2a, 0xc3, 0x64, + 0x73, 0x12, 0x47, 0x76, 0x77, 0xd6, 0x79, 0xd9, 0x6e, 0xbc, 0xa4, 0x5e, 0x82, 0x9f, 0xbc, 0x1d, + 0xaf, 0xc1, 0x2f, 0xb2, 0x3b, 0x79, 0xd9, 0xa4, 0x01, 0x93, 0x6f, 0x0b, 0x73, 0x7e, 0xbf, 0xff, + 0xd9, 0x99, 0xc3, 0x41, 0xcf, 0x21, 0x4a, 0xe4, 0xd8, 0xe3, 0xb1, 0xf0, 0x92, 0x86, 0xe7, 0x07, + 0x56, 0x1b, 0x50, 0x9e, 0xb4, 0x26, 0x10, 0xa0, 0xd8, 0x00, 0x0c, 0xf8, 0x46, 0xc8, 0x88, 0xc6, + 0x4a, 0x1a, 0x89, 0x8f, 0xf3, 0x6a, 0xca, 0x63, 0x41, 0x93, 0x06, 0x9d, 0x54, 0x9f, 0x3c, 0x1e, + 0x49, 0x39, 0x0a, 0xc0, 0xcb, 0x6b, 0xfa, 0x76, 0xe8, 0x0d, 0xac, 0xe2, 0x73, 0xea, 0xfe, 0xf9, + 0xad, 0xe2, 0x71, 0x0c, 0x4a, 0x4f, 0xce, 0xab, 0x09, 0x0f, 0xc4, 0x80, 0x1b, 0xf0, 0xa6, 0x1f, + 0xee, 0xa0, 0xfe, 0x67, 0x0f, 0x1d, 0xbd, 0x75, 0xad, 0x5c, 0x4f, 0x3b, 0xc1, 0x2d, 0xf4, 0xc0, + 0x97, 0x91, 0x06, 0xdf, 0x1a, 0x91, 0x00, 0xbb, 0x48, 0x53, 0x52, 0x7a, 0x52, 0x7a, 0x76, 0xd8, + 0x78, 0x44, 0x5d, 0x0e, 0x9d, 0xe6, 0xd0, 0xf7, 0xaf, 0x22, 0x73, 0xde, 0xe8, 0xf1, 0xc0, 0x42, + 0xe7, 0xff, 0x02, 0x74, 0x91, 0xa6, 0xf8, 0x12, 0xed, 0x8b, 0xc8, 0x80, 0x4a, 0x78, 0x40, 0xca, + 0x39, 0x5f, 0xbb, 0xc7, 0x5f, 0x4f, 0xfe, 0xa3, 0x89, 0x7e, 0xfe, 0xfe, 0xb5, 0xb5, 0x73, 0x57, + 0x2a, 0x9f, 0xfd, 0xd7, 0x99, 0x61, 0xb8, 0x8b, 0x70, 0x9f, 0x6b, 0x60, 0xf0, 0xcd, 0xb5, 0xc6, + 0x8c, 0x08, 0x81, 0x6c, 0x6d, 0x22, 0x3b, 0xca, 0x04, 0xad, 0x09, 0xff, 0x4e, 0x84, 0x80, 0x3f, + 0xa2, 0xe3, 0x90, 0xa7, 0x73, 0x67, 0x0c, 0xca, 0x87, 0xc8, 0x90, 0xed, 0x7f, 0xff, 0x63, 0xf3, + 0x20, 0x33, 0x6f, 0x9f, 0x95, 0xc9, 0xa0, 0x83, 0x43, 0x9e, 0x4e, 0xbd, 0x6d, 0xa7, 0xc0, 0x3e, + 0xaa, 0x41, 0x34, 0x94, 0xca, 0x17, 0xd1, 0x88, 0x2d, 0xdf, 0xe1, 0xce, 0x66, 0xfe, 0xea, 0xcc, + 0x74, 0xb5, 0x78, 0xaf, 0x5f, 0x50, 0x65, 0x1e, 0xa2, 0xad, 0xef, 0x83, 0xd6, 0x4c, 0x71, 0x03, + 0x64, 0x77, 0xb3, 0x84, 0xe3, 0x99, 0xa6, 0xeb, 0x2c, 0x1d, 0x6e, 0xb2, 0xeb, 0x39, 0x29, 0x4a, + 0x59, 0x28, 0x22, 0x11, 0xda, 0x90, 0x7d, 0x95, 0xda, 0x68, 0xb2, 0xb7, 0xc6, 0x20, 0x54, 0xf5, + 0x5c, 0xf7, 0xda, 0xd1, 0x2f, 0x33, 0x18, 0x7f, 0x42, 0xa7, 0x0b, 0x6a, 0x05, 0xdf, 0x2d, 0x68, + 0xc3, 0x12, 0x19, 0xd8, 0x10, 0xc8, 0xfe, 0x1a, 0x6e, 0x52, 0x70, 0x77, 0x1c, 0xde, 0xcb, 0x69, + 0xfc, 0x01, 0xd5, 0x16, 0xe4, 0xda, 0x0c, 0x20, 0x61, 0x43, 0xee, 0x1b, 0xa9, 0xc8, 0xc1, 0x1a, + 0xea, 0x4a, 0x41, 0xdd, 0xcd, 0xe0, 0x9b, 0x9c, 0xc5, 0x9f, 0xd1, 0x69, 0xf1, 0x29, 0x47, 0xdc, + 0xc0, 0x2d, 0x1f, 0xb3, 0x21, 0x17, 0x81, 0x55, 0x40, 0xd0, 0x1a, 0xea, 0x5a, 0x41, 0xf0, 0xc2, + 0xf1, 0x37, 0x0e, 0xc7, 0x3f, 0xd0, 0xd3, 0xd5, 0x23, 0xb3, 0x9c, 0x73, 0xb8, 0xd9, 0xe3, 0xd6, + 0x57, 0x8d, 0xcf, 0x62, 0x76, 0xb3, 0x87, 0xea, 0x42, 0xd2, 0x7c, 0xe3, 0xc4, 0x4a, 0xa6, 0x63, + 0xba, 0x6a, 0xf9, 0x34, 0x1f, 0x2e, 0x2f, 0x88, 0x76, 0x16, 0xdd, 0x2e, 0xdd, 0x95, 0x2b, 0xad, + 0xbc, 0xfe, 0x32, 0x16, 0xb4, 0xd7, 0xa0, 0x57, 0xae, 0xfe, 0x4d, 0xb7, 0xbf, 0x9b, 0x37, 0x77, + 0xfe, 0x37, 0x00, 0x00, 0xff, 0xff, 0xfa, 0xbc, 0x0f, 0x9b, 0xfb, 0x04, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/grpc/balancer/xds/internal/proto/envoy/api/v2/core/address/address.pb.go b/vendor/google.golang.org/grpc/balancer/xds/internal/proto/envoy/api/v2/core/address/address.pb.go new file mode 100755 index 0000000..85e7fa0 --- /dev/null +++ b/vendor/google.golang.org/grpc/balancer/xds/internal/proto/envoy/api/v2/core/address/address.pb.go @@ -0,0 +1,610 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: envoy/api/v2/core/address.proto + +package envoy_api_v2_core + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import wrappers "github.com/golang/protobuf/ptypes/wrappers" +import base "google.golang.org/grpc/balancer/xds/internal/proto/envoy/api/v2/core/base" +import _ "google.golang.org/grpc/balancer/xds/internal/proto/validate" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +type SocketAddress_Protocol int32 + +const ( + SocketAddress_TCP SocketAddress_Protocol = 0 + SocketAddress_UDP SocketAddress_Protocol = 1 +) + +var SocketAddress_Protocol_name = map[int32]string{ + 0: "TCP", + 1: "UDP", +} +var SocketAddress_Protocol_value = map[string]int32{ + "TCP": 0, + "UDP": 1, +} + +func (x SocketAddress_Protocol) String() string { + return proto.EnumName(SocketAddress_Protocol_name, int32(x)) +} +func (SocketAddress_Protocol) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_address_b91d58d2da3489da, []int{1, 0} +} + +type Pipe struct { + Path string `protobuf:"bytes,1,opt,name=path,proto3" json:"path,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Pipe) Reset() { *m = Pipe{} } +func (m *Pipe) String() string { return proto.CompactTextString(m) } +func (*Pipe) ProtoMessage() {} +func (*Pipe) Descriptor() ([]byte, []int) { + return fileDescriptor_address_b91d58d2da3489da, []int{0} +} +func (m *Pipe) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Pipe.Unmarshal(m, b) +} +func (m *Pipe) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Pipe.Marshal(b, m, deterministic) +} +func (dst *Pipe) XXX_Merge(src proto.Message) { + xxx_messageInfo_Pipe.Merge(dst, src) +} +func (m *Pipe) XXX_Size() int { + return xxx_messageInfo_Pipe.Size(m) +} +func (m *Pipe) XXX_DiscardUnknown() { + xxx_messageInfo_Pipe.DiscardUnknown(m) +} + +var xxx_messageInfo_Pipe proto.InternalMessageInfo + +func (m *Pipe) GetPath() string { + if m != nil { + return m.Path + } + return "" +} + +type SocketAddress struct { + Protocol SocketAddress_Protocol `protobuf:"varint,1,opt,name=protocol,proto3,enum=envoy.api.v2.core.SocketAddress_Protocol" json:"protocol,omitempty"` + Address string `protobuf:"bytes,2,opt,name=address,proto3" json:"address,omitempty"` + // Types that are valid to be assigned to PortSpecifier: + // *SocketAddress_PortValue + // *SocketAddress_NamedPort + PortSpecifier isSocketAddress_PortSpecifier `protobuf_oneof:"port_specifier"` + ResolverName string `protobuf:"bytes,5,opt,name=resolver_name,json=resolverName,proto3" json:"resolver_name,omitempty"` + Ipv4Compat bool `protobuf:"varint,6,opt,name=ipv4_compat,json=ipv4Compat,proto3" json:"ipv4_compat,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *SocketAddress) Reset() { *m = SocketAddress{} } +func (m *SocketAddress) String() string { return proto.CompactTextString(m) } +func (*SocketAddress) ProtoMessage() {} +func (*SocketAddress) Descriptor() ([]byte, []int) { + return fileDescriptor_address_b91d58d2da3489da, []int{1} +} +func (m *SocketAddress) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_SocketAddress.Unmarshal(m, b) +} +func (m *SocketAddress) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_SocketAddress.Marshal(b, m, deterministic) +} +func (dst *SocketAddress) XXX_Merge(src proto.Message) { + xxx_messageInfo_SocketAddress.Merge(dst, src) +} +func (m *SocketAddress) XXX_Size() int { + return xxx_messageInfo_SocketAddress.Size(m) +} +func (m *SocketAddress) XXX_DiscardUnknown() { + xxx_messageInfo_SocketAddress.DiscardUnknown(m) +} + +var xxx_messageInfo_SocketAddress proto.InternalMessageInfo + +func (m *SocketAddress) GetProtocol() SocketAddress_Protocol { + if m != nil { + return m.Protocol + } + return SocketAddress_TCP +} + +func (m *SocketAddress) GetAddress() string { + if m != nil { + return m.Address + } + return "" +} + +type isSocketAddress_PortSpecifier interface { + isSocketAddress_PortSpecifier() +} + +type SocketAddress_PortValue struct { + PortValue uint32 `protobuf:"varint,3,opt,name=port_value,json=portValue,proto3,oneof"` +} + +type SocketAddress_NamedPort struct { + NamedPort string `protobuf:"bytes,4,opt,name=named_port,json=namedPort,proto3,oneof"` +} + +func (*SocketAddress_PortValue) isSocketAddress_PortSpecifier() {} + +func (*SocketAddress_NamedPort) isSocketAddress_PortSpecifier() {} + +func (m *SocketAddress) GetPortSpecifier() isSocketAddress_PortSpecifier { + if m != nil { + return m.PortSpecifier + } + return nil +} + +func (m *SocketAddress) GetPortValue() uint32 { + if x, ok := m.GetPortSpecifier().(*SocketAddress_PortValue); ok { + return x.PortValue + } + return 0 +} + +func (m *SocketAddress) GetNamedPort() string { + if x, ok := m.GetPortSpecifier().(*SocketAddress_NamedPort); ok { + return x.NamedPort + } + return "" +} + +func (m *SocketAddress) GetResolverName() string { + if m != nil { + return m.ResolverName + } + return "" +} + +func (m *SocketAddress) GetIpv4Compat() bool { + if m != nil { + return m.Ipv4Compat + } + return false +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*SocketAddress) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _SocketAddress_OneofMarshaler, _SocketAddress_OneofUnmarshaler, _SocketAddress_OneofSizer, []interface{}{ + (*SocketAddress_PortValue)(nil), + (*SocketAddress_NamedPort)(nil), + } +} + +func _SocketAddress_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*SocketAddress) + // port_specifier + switch x := m.PortSpecifier.(type) { + case *SocketAddress_PortValue: + b.EncodeVarint(3<<3 | proto.WireVarint) + b.EncodeVarint(uint64(x.PortValue)) + case *SocketAddress_NamedPort: + b.EncodeVarint(4<<3 | proto.WireBytes) + b.EncodeStringBytes(x.NamedPort) + case nil: + default: + return fmt.Errorf("SocketAddress.PortSpecifier has unexpected type %T", x) + } + return nil +} + +func _SocketAddress_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*SocketAddress) + switch tag { + case 3: // port_specifier.port_value + if wire != proto.WireVarint { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeVarint() + m.PortSpecifier = &SocketAddress_PortValue{uint32(x)} + return true, err + case 4: // port_specifier.named_port + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeStringBytes() + m.PortSpecifier = &SocketAddress_NamedPort{x} + return true, err + default: + return false, nil + } +} + +func _SocketAddress_OneofSizer(msg proto.Message) (n int) { + m := msg.(*SocketAddress) + // port_specifier + switch x := m.PortSpecifier.(type) { + case *SocketAddress_PortValue: + n += 1 // tag and wire + n += proto.SizeVarint(uint64(x.PortValue)) + case *SocketAddress_NamedPort: + n += 1 // tag and wire + n += proto.SizeVarint(uint64(len(x.NamedPort))) + n += len(x.NamedPort) + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +type TcpKeepalive struct { + KeepaliveProbes *wrappers.UInt32Value `protobuf:"bytes,1,opt,name=keepalive_probes,json=keepaliveProbes,proto3" json:"keepalive_probes,omitempty"` + KeepaliveTime *wrappers.UInt32Value `protobuf:"bytes,2,opt,name=keepalive_time,json=keepaliveTime,proto3" json:"keepalive_time,omitempty"` + KeepaliveInterval *wrappers.UInt32Value `protobuf:"bytes,3,opt,name=keepalive_interval,json=keepaliveInterval,proto3" json:"keepalive_interval,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *TcpKeepalive) Reset() { *m = TcpKeepalive{} } +func (m *TcpKeepalive) String() string { return proto.CompactTextString(m) } +func (*TcpKeepalive) ProtoMessage() {} +func (*TcpKeepalive) Descriptor() ([]byte, []int) { + return fileDescriptor_address_b91d58d2da3489da, []int{2} +} +func (m *TcpKeepalive) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_TcpKeepalive.Unmarshal(m, b) +} +func (m *TcpKeepalive) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_TcpKeepalive.Marshal(b, m, deterministic) +} +func (dst *TcpKeepalive) XXX_Merge(src proto.Message) { + xxx_messageInfo_TcpKeepalive.Merge(dst, src) +} +func (m *TcpKeepalive) XXX_Size() int { + return xxx_messageInfo_TcpKeepalive.Size(m) +} +func (m *TcpKeepalive) XXX_DiscardUnknown() { + xxx_messageInfo_TcpKeepalive.DiscardUnknown(m) +} + +var xxx_messageInfo_TcpKeepalive proto.InternalMessageInfo + +func (m *TcpKeepalive) GetKeepaliveProbes() *wrappers.UInt32Value { + if m != nil { + return m.KeepaliveProbes + } + return nil +} + +func (m *TcpKeepalive) GetKeepaliveTime() *wrappers.UInt32Value { + if m != nil { + return m.KeepaliveTime + } + return nil +} + +func (m *TcpKeepalive) GetKeepaliveInterval() *wrappers.UInt32Value { + if m != nil { + return m.KeepaliveInterval + } + return nil +} + +type BindConfig struct { + SourceAddress *SocketAddress `protobuf:"bytes,1,opt,name=source_address,json=sourceAddress,proto3" json:"source_address,omitempty"` + Freebind *wrappers.BoolValue `protobuf:"bytes,2,opt,name=freebind,proto3" json:"freebind,omitempty"` + SocketOptions []*base.SocketOption `protobuf:"bytes,3,rep,name=socket_options,json=socketOptions,proto3" json:"socket_options,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *BindConfig) Reset() { *m = BindConfig{} } +func (m *BindConfig) String() string { return proto.CompactTextString(m) } +func (*BindConfig) ProtoMessage() {} +func (*BindConfig) Descriptor() ([]byte, []int) { + return fileDescriptor_address_b91d58d2da3489da, []int{3} +} +func (m *BindConfig) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_BindConfig.Unmarshal(m, b) +} +func (m *BindConfig) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_BindConfig.Marshal(b, m, deterministic) +} +func (dst *BindConfig) XXX_Merge(src proto.Message) { + xxx_messageInfo_BindConfig.Merge(dst, src) +} +func (m *BindConfig) XXX_Size() int { + return xxx_messageInfo_BindConfig.Size(m) +} +func (m *BindConfig) XXX_DiscardUnknown() { + xxx_messageInfo_BindConfig.DiscardUnknown(m) +} + +var xxx_messageInfo_BindConfig proto.InternalMessageInfo + +func (m *BindConfig) GetSourceAddress() *SocketAddress { + if m != nil { + return m.SourceAddress + } + return nil +} + +func (m *BindConfig) GetFreebind() *wrappers.BoolValue { + if m != nil { + return m.Freebind + } + return nil +} + +func (m *BindConfig) GetSocketOptions() []*base.SocketOption { + if m != nil { + return m.SocketOptions + } + return nil +} + +type Address struct { + // Types that are valid to be assigned to Address: + // *Address_SocketAddress + // *Address_Pipe + Address isAddress_Address `protobuf_oneof:"address"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Address) Reset() { *m = Address{} } +func (m *Address) String() string { return proto.CompactTextString(m) } +func (*Address) ProtoMessage() {} +func (*Address) Descriptor() ([]byte, []int) { + return fileDescriptor_address_b91d58d2da3489da, []int{4} +} +func (m *Address) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Address.Unmarshal(m, b) +} +func (m *Address) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Address.Marshal(b, m, deterministic) +} +func (dst *Address) XXX_Merge(src proto.Message) { + xxx_messageInfo_Address.Merge(dst, src) +} +func (m *Address) XXX_Size() int { + return xxx_messageInfo_Address.Size(m) +} +func (m *Address) XXX_DiscardUnknown() { + xxx_messageInfo_Address.DiscardUnknown(m) +} + +var xxx_messageInfo_Address proto.InternalMessageInfo + +type isAddress_Address interface { + isAddress_Address() +} + +type Address_SocketAddress struct { + SocketAddress *SocketAddress `protobuf:"bytes,1,opt,name=socket_address,json=socketAddress,proto3,oneof"` +} + +type Address_Pipe struct { + Pipe *Pipe `protobuf:"bytes,2,opt,name=pipe,proto3,oneof"` +} + +func (*Address_SocketAddress) isAddress_Address() {} + +func (*Address_Pipe) isAddress_Address() {} + +func (m *Address) GetAddress() isAddress_Address { + if m != nil { + return m.Address + } + return nil +} + +func (m *Address) GetSocketAddress() *SocketAddress { + if x, ok := m.GetAddress().(*Address_SocketAddress); ok { + return x.SocketAddress + } + return nil +} + +func (m *Address) GetPipe() *Pipe { + if x, ok := m.GetAddress().(*Address_Pipe); ok { + return x.Pipe + } + return nil +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*Address) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _Address_OneofMarshaler, _Address_OneofUnmarshaler, _Address_OneofSizer, []interface{}{ + (*Address_SocketAddress)(nil), + (*Address_Pipe)(nil), + } +} + +func _Address_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*Address) + // address + switch x := m.Address.(type) { + case *Address_SocketAddress: + b.EncodeVarint(1<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.SocketAddress); err != nil { + return err + } + case *Address_Pipe: + b.EncodeVarint(2<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.Pipe); err != nil { + return err + } + case nil: + default: + return fmt.Errorf("Address.Address has unexpected type %T", x) + } + return nil +} + +func _Address_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*Address) + switch tag { + case 1: // address.socket_address + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(SocketAddress) + err := b.DecodeMessage(msg) + m.Address = &Address_SocketAddress{msg} + return true, err + case 2: // address.pipe + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(Pipe) + err := b.DecodeMessage(msg) + m.Address = &Address_Pipe{msg} + return true, err + default: + return false, nil + } +} + +func _Address_OneofSizer(msg proto.Message) (n int) { + m := msg.(*Address) + // address + switch x := m.Address.(type) { + case *Address_SocketAddress: + s := proto.Size(x.SocketAddress) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *Address_Pipe: + s := proto.Size(x.Pipe) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +type CidrRange struct { + AddressPrefix string `protobuf:"bytes,1,opt,name=address_prefix,json=addressPrefix,proto3" json:"address_prefix,omitempty"` + PrefixLen *wrappers.UInt32Value `protobuf:"bytes,2,opt,name=prefix_len,json=prefixLen,proto3" json:"prefix_len,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *CidrRange) Reset() { *m = CidrRange{} } +func (m *CidrRange) String() string { return proto.CompactTextString(m) } +func (*CidrRange) ProtoMessage() {} +func (*CidrRange) Descriptor() ([]byte, []int) { + return fileDescriptor_address_b91d58d2da3489da, []int{5} +} +func (m *CidrRange) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_CidrRange.Unmarshal(m, b) +} +func (m *CidrRange) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_CidrRange.Marshal(b, m, deterministic) +} +func (dst *CidrRange) XXX_Merge(src proto.Message) { + xxx_messageInfo_CidrRange.Merge(dst, src) +} +func (m *CidrRange) XXX_Size() int { + return xxx_messageInfo_CidrRange.Size(m) +} +func (m *CidrRange) XXX_DiscardUnknown() { + xxx_messageInfo_CidrRange.DiscardUnknown(m) +} + +var xxx_messageInfo_CidrRange proto.InternalMessageInfo + +func (m *CidrRange) GetAddressPrefix() string { + if m != nil { + return m.AddressPrefix + } + return "" +} + +func (m *CidrRange) GetPrefixLen() *wrappers.UInt32Value { + if m != nil { + return m.PrefixLen + } + return nil +} + +func init() { + proto.RegisterType((*Pipe)(nil), "envoy.api.v2.core.Pipe") + proto.RegisterType((*SocketAddress)(nil), "envoy.api.v2.core.SocketAddress") + proto.RegisterType((*TcpKeepalive)(nil), "envoy.api.v2.core.TcpKeepalive") + proto.RegisterType((*BindConfig)(nil), "envoy.api.v2.core.BindConfig") + proto.RegisterType((*Address)(nil), "envoy.api.v2.core.Address") + proto.RegisterType((*CidrRange)(nil), "envoy.api.v2.core.CidrRange") + proto.RegisterEnum("envoy.api.v2.core.SocketAddress_Protocol", SocketAddress_Protocol_name, SocketAddress_Protocol_value) +} + +func init() { + proto.RegisterFile("envoy/api/v2/core/address.proto", fileDescriptor_address_b91d58d2da3489da) +} + +var fileDescriptor_address_b91d58d2da3489da = []byte{ + // 667 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x94, 0x53, 0x4f, 0x4f, 0xdb, 0x48, + 0x14, 0xcf, 0xc4, 0x01, 0x92, 0x17, 0x92, 0x0d, 0x73, 0xc1, 0x8a, 0xd8, 0x4d, 0x14, 0xb4, 0x52, + 0x16, 0xed, 0x3a, 0xbb, 0x61, 0xb5, 0x77, 0x9c, 0x55, 0x01, 0x51, 0xb5, 0xae, 0x81, 0x5e, 0xad, + 0x49, 0xf2, 0x92, 0x8e, 0x70, 0x3c, 0xa3, 0xb1, 0x71, 0xe1, 0x56, 0xf5, 0xd0, 0x43, 0xef, 0xfd, + 0x2e, 0x55, 0x4f, 0x7c, 0x87, 0x7e, 0x82, 0x1e, 0xf9, 0x14, 0x54, 0x33, 0xb6, 0x83, 0xda, 0xb4, + 0xa2, 0xbd, 0xcd, 0xbc, 0xf7, 0xfb, 0xfd, 0xe6, 0xf7, 0xfe, 0x0c, 0x74, 0x30, 0x4a, 0xc5, 0xf5, + 0x80, 0x49, 0x3e, 0x48, 0x87, 0x83, 0x89, 0x50, 0x38, 0x60, 0xd3, 0xa9, 0xc2, 0x38, 0x76, 0xa4, + 0x12, 0x89, 0xa0, 0x5b, 0x06, 0xe0, 0x30, 0xc9, 0x9d, 0x74, 0xe8, 0x68, 0x40, 0x7b, 0x67, 0x95, + 0x33, 0x66, 0x31, 0x66, 0x84, 0xf6, 0x6f, 0x73, 0x21, 0xe6, 0x21, 0x0e, 0xcc, 0x6d, 0x7c, 0x39, + 0x1b, 0xbc, 0x54, 0x4c, 0x4a, 0x54, 0xb9, 0x60, 0x7b, 0x3b, 0x65, 0x21, 0x9f, 0xb2, 0x04, 0x07, + 0xc5, 0x21, 0x4b, 0xf4, 0x7e, 0x87, 0x8a, 0xc7, 0x25, 0xd2, 0x5f, 0xa1, 0x22, 0x59, 0xf2, 0xc2, + 0x26, 0x5d, 0xd2, 0xaf, 0xb9, 0xb5, 0x0f, 0xb7, 0x37, 0x56, 0x45, 0x95, 0xbb, 0xc4, 0x37, 0xe1, + 0xde, 0xc7, 0x32, 0x34, 0x4e, 0xc5, 0xe4, 0x02, 0x93, 0x83, 0xcc, 0x28, 0x7d, 0x06, 0x55, 0xa3, + 0x30, 0x11, 0xa1, 0x21, 0x35, 0x87, 0x7f, 0x38, 0x2b, 0xae, 0x9d, 0x2f, 0x38, 0x8e, 0x97, 0x13, + 0x5c, 0xd0, 0xfa, 0x6b, 0xaf, 0x49, 0xb9, 0x45, 0xfc, 0xa5, 0x0c, 0xdd, 0x85, 0x8d, 0xbc, 0x0d, + 0x76, 0xf9, 0x6b, 0x1b, 0x45, 0x86, 0xfe, 0x09, 0x20, 0x85, 0x4a, 0x82, 0x94, 0x85, 0x97, 0x68, + 0x5b, 0x5d, 0xd2, 0x6f, 0xb8, 0x75, 0x8d, 0x5b, 0xdf, 0xab, 0xd8, 0x77, 0x77, 0xd6, 0x51, 0xc9, + 0xaf, 0x69, 0xc0, 0x73, 0x9d, 0xa7, 0x1d, 0x80, 0x88, 0x2d, 0x70, 0x1a, 0xe8, 0x90, 0x5d, 0xd1, + 0xaa, 0x1a, 0x60, 0x62, 0x9e, 0x50, 0x09, 0xdd, 0x85, 0x86, 0xc2, 0x58, 0x84, 0x29, 0xaa, 0x40, + 0x47, 0xed, 0x35, 0x8d, 0xf1, 0x37, 0x8b, 0xe0, 0x13, 0xb6, 0xd0, 0x2a, 0x75, 0x2e, 0xd3, 0x7f, + 0x83, 0x89, 0x58, 0x48, 0x96, 0xd8, 0xeb, 0x5d, 0xd2, 0xaf, 0xfa, 0xa0, 0x43, 0x23, 0x13, 0xe9, + 0xed, 0x40, 0xb5, 0xa8, 0x8d, 0x6e, 0x80, 0x75, 0x36, 0xf2, 0x5a, 0x25, 0x7d, 0x38, 0xff, 0xdf, + 0x6b, 0x11, 0x77, 0x1b, 0x9a, 0xc6, 0x72, 0x2c, 0x71, 0xc2, 0x67, 0x1c, 0x15, 0x5d, 0x7b, 0x7f, + 0x7b, 0x63, 0x91, 0xde, 0x2d, 0x81, 0xcd, 0xb3, 0x89, 0x3c, 0x41, 0x94, 0x2c, 0xe4, 0x29, 0xd2, + 0x43, 0x68, 0x5d, 0x14, 0x97, 0x40, 0x2a, 0x31, 0xc6, 0xd8, 0x34, 0xb7, 0x3e, 0xdc, 0x71, 0xb2, + 0x09, 0x3b, 0xc5, 0x84, 0x9d, 0xf3, 0xe3, 0x28, 0xd9, 0x1f, 0x9a, 0x32, 0xfd, 0x5f, 0x96, 0x2c, + 0xcf, 0x90, 0xe8, 0x08, 0x9a, 0xf7, 0x42, 0x09, 0x5f, 0xa0, 0xe9, 0xe8, 0x43, 0x32, 0x8d, 0x25, + 0xe7, 0x8c, 0x2f, 0x90, 0x9e, 0x00, 0xbd, 0x17, 0xe1, 0x51, 0x82, 0x2a, 0x65, 0xa1, 0x69, 0xf9, + 0x43, 0x42, 0x5b, 0x4b, 0xde, 0x71, 0x4e, 0xeb, 0x7d, 0x22, 0x00, 0x2e, 0x8f, 0xa6, 0x23, 0x11, + 0xcd, 0xf8, 0x9c, 0x9e, 0x42, 0x33, 0x16, 0x97, 0x6a, 0x82, 0x41, 0x31, 0xf2, 0xac, 0xce, 0xee, + 0x43, 0x4b, 0x94, 0xef, 0xce, 0x5b, 0xb3, 0x3b, 0x8d, 0x4c, 0xa3, 0xd8, 0xc9, 0xff, 0xa0, 0x3a, + 0x53, 0x88, 0x63, 0x1e, 0x4d, 0xf3, 0x7a, 0xdb, 0x2b, 0x36, 0x5d, 0x21, 0xc2, 0xcc, 0xe4, 0x12, + 0x4b, 0x1f, 0x69, 0x33, 0xfa, 0x8d, 0x40, 0xc8, 0x84, 0x8b, 0x28, 0xb6, 0xad, 0xae, 0xd5, 0xaf, + 0x0f, 0x3b, 0xdf, 0x35, 0xf3, 0xd4, 0xe0, 0xf4, 0xfb, 0xf7, 0xb7, 0xb8, 0xf7, 0x8e, 0xc0, 0x46, + 0xe1, 0xe5, 0x78, 0xa9, 0xf9, 0x93, 0x05, 0x1e, 0x95, 0x0a, 0xd9, 0x42, 0xea, 0x2f, 0xa8, 0x48, + 0x2e, 0x8b, 0x11, 0x6e, 0x7f, 0x43, 0x40, 0x7f, 0xe1, 0xa3, 0x92, 0x6f, 0x60, 0x6e, 0x6b, 0xf9, + 0x8d, 0x8a, 0x3d, 0x7b, 0x43, 0xa0, 0x36, 0xe2, 0x53, 0xe5, 0xb3, 0x68, 0x8e, 0xf4, 0x6f, 0x68, + 0xe6, 0xf9, 0x40, 0x2a, 0x9c, 0xf1, 0xab, 0xd5, 0x4f, 0xdf, 0xc8, 0x01, 0x9e, 0xc9, 0xd3, 0x43, + 0x80, 0x0c, 0x19, 0x84, 0x18, 0xfd, 0xc8, 0x26, 0xe5, 0x43, 0xda, 0xb3, 0xec, 0x57, 0xc4, 0xaf, + 0x65, 0xdc, 0xc7, 0x18, 0xb9, 0xff, 0x40, 0x87, 0x8b, 0xcc, 0xbf, 0x54, 0xe2, 0xea, 0x7a, 0xb5, + 0x14, 0x77, 0xf3, 0xa0, 0x78, 0x5a, 0x24, 0xc2, 0x23, 0xe3, 0x75, 0xa3, 0xbf, 0xff, 0x39, 0x00, + 0x00, 0xff, 0xff, 0x03, 0x02, 0x9c, 0x89, 0x34, 0x05, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/grpc/balancer/xds/internal/proto/envoy/api/v2/core/base/base.pb.go b/vendor/google.golang.org/grpc/balancer/xds/internal/proto/envoy/api/v2/core/base/base.pb.go new file mode 100755 index 0000000..3345eb0 --- /dev/null +++ b/vendor/google.golang.org/grpc/balancer/xds/internal/proto/envoy/api/v2/core/base/base.pb.go @@ -0,0 +1,1121 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: envoy/api/v2/core/base.proto + +package core + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import any "github.com/golang/protobuf/ptypes/any" +import _struct "github.com/golang/protobuf/ptypes/struct" +import wrappers "github.com/golang/protobuf/ptypes/wrappers" +import percent "google.golang.org/grpc/balancer/xds/internal/proto/envoy/type/percent" +import _ "google.golang.org/grpc/balancer/xds/internal/proto/validate" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +type RoutingPriority int32 + +const ( + RoutingPriority_DEFAULT RoutingPriority = 0 + RoutingPriority_HIGH RoutingPriority = 1 +) + +var RoutingPriority_name = map[int32]string{ + 0: "DEFAULT", + 1: "HIGH", +} +var RoutingPriority_value = map[string]int32{ + "DEFAULT": 0, + "HIGH": 1, +} + +func (x RoutingPriority) String() string { + return proto.EnumName(RoutingPriority_name, int32(x)) +} +func (RoutingPriority) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_base_33c58439b08f821d, []int{0} +} + +type RequestMethod int32 + +const ( + RequestMethod_METHOD_UNSPECIFIED RequestMethod = 0 + RequestMethod_GET RequestMethod = 1 + RequestMethod_HEAD RequestMethod = 2 + RequestMethod_POST RequestMethod = 3 + RequestMethod_PUT RequestMethod = 4 + RequestMethod_DELETE RequestMethod = 5 + RequestMethod_CONNECT RequestMethod = 6 + RequestMethod_OPTIONS RequestMethod = 7 + RequestMethod_TRACE RequestMethod = 8 +) + +var RequestMethod_name = map[int32]string{ + 0: "METHOD_UNSPECIFIED", + 1: "GET", + 2: "HEAD", + 3: "POST", + 4: "PUT", + 5: "DELETE", + 6: "CONNECT", + 7: "OPTIONS", + 8: "TRACE", +} +var RequestMethod_value = map[string]int32{ + "METHOD_UNSPECIFIED": 0, + "GET": 1, + "HEAD": 2, + "POST": 3, + "PUT": 4, + "DELETE": 5, + "CONNECT": 6, + "OPTIONS": 7, + "TRACE": 8, +} + +func (x RequestMethod) String() string { + return proto.EnumName(RequestMethod_name, int32(x)) +} +func (RequestMethod) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_base_33c58439b08f821d, []int{1} +} + +type SocketOption_SocketState int32 + +const ( + SocketOption_STATE_PREBIND SocketOption_SocketState = 0 + SocketOption_STATE_BOUND SocketOption_SocketState = 1 + SocketOption_STATE_LISTENING SocketOption_SocketState = 2 +) + +var SocketOption_SocketState_name = map[int32]string{ + 0: "STATE_PREBIND", + 1: "STATE_BOUND", + 2: "STATE_LISTENING", +} +var SocketOption_SocketState_value = map[string]int32{ + "STATE_PREBIND": 0, + "STATE_BOUND": 1, + "STATE_LISTENING": 2, +} + +func (x SocketOption_SocketState) String() string { + return proto.EnumName(SocketOption_SocketState_name, int32(x)) +} +func (SocketOption_SocketState) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_base_33c58439b08f821d, []int{9, 0} +} + +type Locality struct { + Region string `protobuf:"bytes,1,opt,name=region,proto3" json:"region,omitempty"` + Zone string `protobuf:"bytes,2,opt,name=zone,proto3" json:"zone,omitempty"` + SubZone string `protobuf:"bytes,3,opt,name=sub_zone,json=subZone,proto3" json:"sub_zone,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Locality) Reset() { *m = Locality{} } +func (m *Locality) String() string { return proto.CompactTextString(m) } +func (*Locality) ProtoMessage() {} +func (*Locality) Descriptor() ([]byte, []int) { + return fileDescriptor_base_33c58439b08f821d, []int{0} +} +func (m *Locality) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Locality.Unmarshal(m, b) +} +func (m *Locality) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Locality.Marshal(b, m, deterministic) +} +func (dst *Locality) XXX_Merge(src proto.Message) { + xxx_messageInfo_Locality.Merge(dst, src) +} +func (m *Locality) XXX_Size() int { + return xxx_messageInfo_Locality.Size(m) +} +func (m *Locality) XXX_DiscardUnknown() { + xxx_messageInfo_Locality.DiscardUnknown(m) +} + +var xxx_messageInfo_Locality proto.InternalMessageInfo + +func (m *Locality) GetRegion() string { + if m != nil { + return m.Region + } + return "" +} + +func (m *Locality) GetZone() string { + if m != nil { + return m.Zone + } + return "" +} + +func (m *Locality) GetSubZone() string { + if m != nil { + return m.SubZone + } + return "" +} + +type Node struct { + Id string `protobuf:"bytes,1,opt,name=id,proto3" json:"id,omitempty"` + Cluster string `protobuf:"bytes,2,opt,name=cluster,proto3" json:"cluster,omitempty"` + Metadata *_struct.Struct `protobuf:"bytes,3,opt,name=metadata,proto3" json:"metadata,omitempty"` + Locality *Locality `protobuf:"bytes,4,opt,name=locality,proto3" json:"locality,omitempty"` + BuildVersion string `protobuf:"bytes,5,opt,name=build_version,json=buildVersion,proto3" json:"build_version,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Node) Reset() { *m = Node{} } +func (m *Node) String() string { return proto.CompactTextString(m) } +func (*Node) ProtoMessage() {} +func (*Node) Descriptor() ([]byte, []int) { + return fileDescriptor_base_33c58439b08f821d, []int{1} +} +func (m *Node) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Node.Unmarshal(m, b) +} +func (m *Node) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Node.Marshal(b, m, deterministic) +} +func (dst *Node) XXX_Merge(src proto.Message) { + xxx_messageInfo_Node.Merge(dst, src) +} +func (m *Node) XXX_Size() int { + return xxx_messageInfo_Node.Size(m) +} +func (m *Node) XXX_DiscardUnknown() { + xxx_messageInfo_Node.DiscardUnknown(m) +} + +var xxx_messageInfo_Node proto.InternalMessageInfo + +func (m *Node) GetId() string { + if m != nil { + return m.Id + } + return "" +} + +func (m *Node) GetCluster() string { + if m != nil { + return m.Cluster + } + return "" +} + +func (m *Node) GetMetadata() *_struct.Struct { + if m != nil { + return m.Metadata + } + return nil +} + +func (m *Node) GetLocality() *Locality { + if m != nil { + return m.Locality + } + return nil +} + +func (m *Node) GetBuildVersion() string { + if m != nil { + return m.BuildVersion + } + return "" +} + +type Metadata struct { + FilterMetadata map[string]*_struct.Struct `protobuf:"bytes,1,rep,name=filter_metadata,json=filterMetadata,proto3" json:"filter_metadata,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Metadata) Reset() { *m = Metadata{} } +func (m *Metadata) String() string { return proto.CompactTextString(m) } +func (*Metadata) ProtoMessage() {} +func (*Metadata) Descriptor() ([]byte, []int) { + return fileDescriptor_base_33c58439b08f821d, []int{2} +} +func (m *Metadata) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Metadata.Unmarshal(m, b) +} +func (m *Metadata) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Metadata.Marshal(b, m, deterministic) +} +func (dst *Metadata) XXX_Merge(src proto.Message) { + xxx_messageInfo_Metadata.Merge(dst, src) +} +func (m *Metadata) XXX_Size() int { + return xxx_messageInfo_Metadata.Size(m) +} +func (m *Metadata) XXX_DiscardUnknown() { + xxx_messageInfo_Metadata.DiscardUnknown(m) +} + +var xxx_messageInfo_Metadata proto.InternalMessageInfo + +func (m *Metadata) GetFilterMetadata() map[string]*_struct.Struct { + if m != nil { + return m.FilterMetadata + } + return nil +} + +type RuntimeUInt32 struct { + DefaultValue uint32 `protobuf:"varint,2,opt,name=default_value,json=defaultValue,proto3" json:"default_value,omitempty"` + RuntimeKey string `protobuf:"bytes,3,opt,name=runtime_key,json=runtimeKey,proto3" json:"runtime_key,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *RuntimeUInt32) Reset() { *m = RuntimeUInt32{} } +func (m *RuntimeUInt32) String() string { return proto.CompactTextString(m) } +func (*RuntimeUInt32) ProtoMessage() {} +func (*RuntimeUInt32) Descriptor() ([]byte, []int) { + return fileDescriptor_base_33c58439b08f821d, []int{3} +} +func (m *RuntimeUInt32) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_RuntimeUInt32.Unmarshal(m, b) +} +func (m *RuntimeUInt32) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_RuntimeUInt32.Marshal(b, m, deterministic) +} +func (dst *RuntimeUInt32) XXX_Merge(src proto.Message) { + xxx_messageInfo_RuntimeUInt32.Merge(dst, src) +} +func (m *RuntimeUInt32) XXX_Size() int { + return xxx_messageInfo_RuntimeUInt32.Size(m) +} +func (m *RuntimeUInt32) XXX_DiscardUnknown() { + xxx_messageInfo_RuntimeUInt32.DiscardUnknown(m) +} + +var xxx_messageInfo_RuntimeUInt32 proto.InternalMessageInfo + +func (m *RuntimeUInt32) GetDefaultValue() uint32 { + if m != nil { + return m.DefaultValue + } + return 0 +} + +func (m *RuntimeUInt32) GetRuntimeKey() string { + if m != nil { + return m.RuntimeKey + } + return "" +} + +type HeaderValue struct { + Key string `protobuf:"bytes,1,opt,name=key,proto3" json:"key,omitempty"` + Value string `protobuf:"bytes,2,opt,name=value,proto3" json:"value,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *HeaderValue) Reset() { *m = HeaderValue{} } +func (m *HeaderValue) String() string { return proto.CompactTextString(m) } +func (*HeaderValue) ProtoMessage() {} +func (*HeaderValue) Descriptor() ([]byte, []int) { + return fileDescriptor_base_33c58439b08f821d, []int{4} +} +func (m *HeaderValue) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_HeaderValue.Unmarshal(m, b) +} +func (m *HeaderValue) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_HeaderValue.Marshal(b, m, deterministic) +} +func (dst *HeaderValue) XXX_Merge(src proto.Message) { + xxx_messageInfo_HeaderValue.Merge(dst, src) +} +func (m *HeaderValue) XXX_Size() int { + return xxx_messageInfo_HeaderValue.Size(m) +} +func (m *HeaderValue) XXX_DiscardUnknown() { + xxx_messageInfo_HeaderValue.DiscardUnknown(m) +} + +var xxx_messageInfo_HeaderValue proto.InternalMessageInfo + +func (m *HeaderValue) GetKey() string { + if m != nil { + return m.Key + } + return "" +} + +func (m *HeaderValue) GetValue() string { + if m != nil { + return m.Value + } + return "" +} + +type HeaderValueOption struct { + Header *HeaderValue `protobuf:"bytes,1,opt,name=header,proto3" json:"header,omitempty"` + Append *wrappers.BoolValue `protobuf:"bytes,2,opt,name=append,proto3" json:"append,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *HeaderValueOption) Reset() { *m = HeaderValueOption{} } +func (m *HeaderValueOption) String() string { return proto.CompactTextString(m) } +func (*HeaderValueOption) ProtoMessage() {} +func (*HeaderValueOption) Descriptor() ([]byte, []int) { + return fileDescriptor_base_33c58439b08f821d, []int{5} +} +func (m *HeaderValueOption) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_HeaderValueOption.Unmarshal(m, b) +} +func (m *HeaderValueOption) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_HeaderValueOption.Marshal(b, m, deterministic) +} +func (dst *HeaderValueOption) XXX_Merge(src proto.Message) { + xxx_messageInfo_HeaderValueOption.Merge(dst, src) +} +func (m *HeaderValueOption) XXX_Size() int { + return xxx_messageInfo_HeaderValueOption.Size(m) +} +func (m *HeaderValueOption) XXX_DiscardUnknown() { + xxx_messageInfo_HeaderValueOption.DiscardUnknown(m) +} + +var xxx_messageInfo_HeaderValueOption proto.InternalMessageInfo + +func (m *HeaderValueOption) GetHeader() *HeaderValue { + if m != nil { + return m.Header + } + return nil +} + +func (m *HeaderValueOption) GetAppend() *wrappers.BoolValue { + if m != nil { + return m.Append + } + return nil +} + +type HeaderMap struct { + Headers []*HeaderValue `protobuf:"bytes,1,rep,name=headers,proto3" json:"headers,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *HeaderMap) Reset() { *m = HeaderMap{} } +func (m *HeaderMap) String() string { return proto.CompactTextString(m) } +func (*HeaderMap) ProtoMessage() {} +func (*HeaderMap) Descriptor() ([]byte, []int) { + return fileDescriptor_base_33c58439b08f821d, []int{6} +} +func (m *HeaderMap) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_HeaderMap.Unmarshal(m, b) +} +func (m *HeaderMap) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_HeaderMap.Marshal(b, m, deterministic) +} +func (dst *HeaderMap) XXX_Merge(src proto.Message) { + xxx_messageInfo_HeaderMap.Merge(dst, src) +} +func (m *HeaderMap) XXX_Size() int { + return xxx_messageInfo_HeaderMap.Size(m) +} +func (m *HeaderMap) XXX_DiscardUnknown() { + xxx_messageInfo_HeaderMap.DiscardUnknown(m) +} + +var xxx_messageInfo_HeaderMap proto.InternalMessageInfo + +func (m *HeaderMap) GetHeaders() []*HeaderValue { + if m != nil { + return m.Headers + } + return nil +} + +type DataSource struct { + // Types that are valid to be assigned to Specifier: + // *DataSource_Filename + // *DataSource_InlineBytes + // *DataSource_InlineString + Specifier isDataSource_Specifier `protobuf_oneof:"specifier"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *DataSource) Reset() { *m = DataSource{} } +func (m *DataSource) String() string { return proto.CompactTextString(m) } +func (*DataSource) ProtoMessage() {} +func (*DataSource) Descriptor() ([]byte, []int) { + return fileDescriptor_base_33c58439b08f821d, []int{7} +} +func (m *DataSource) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_DataSource.Unmarshal(m, b) +} +func (m *DataSource) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_DataSource.Marshal(b, m, deterministic) +} +func (dst *DataSource) XXX_Merge(src proto.Message) { + xxx_messageInfo_DataSource.Merge(dst, src) +} +func (m *DataSource) XXX_Size() int { + return xxx_messageInfo_DataSource.Size(m) +} +func (m *DataSource) XXX_DiscardUnknown() { + xxx_messageInfo_DataSource.DiscardUnknown(m) +} + +var xxx_messageInfo_DataSource proto.InternalMessageInfo + +type isDataSource_Specifier interface { + isDataSource_Specifier() +} + +type DataSource_Filename struct { + Filename string `protobuf:"bytes,1,opt,name=filename,proto3,oneof"` +} + +type DataSource_InlineBytes struct { + InlineBytes []byte `protobuf:"bytes,2,opt,name=inline_bytes,json=inlineBytes,proto3,oneof"` +} + +type DataSource_InlineString struct { + InlineString string `protobuf:"bytes,3,opt,name=inline_string,json=inlineString,proto3,oneof"` +} + +func (*DataSource_Filename) isDataSource_Specifier() {} + +func (*DataSource_InlineBytes) isDataSource_Specifier() {} + +func (*DataSource_InlineString) isDataSource_Specifier() {} + +func (m *DataSource) GetSpecifier() isDataSource_Specifier { + if m != nil { + return m.Specifier + } + return nil +} + +func (m *DataSource) GetFilename() string { + if x, ok := m.GetSpecifier().(*DataSource_Filename); ok { + return x.Filename + } + return "" +} + +func (m *DataSource) GetInlineBytes() []byte { + if x, ok := m.GetSpecifier().(*DataSource_InlineBytes); ok { + return x.InlineBytes + } + return nil +} + +func (m *DataSource) GetInlineString() string { + if x, ok := m.GetSpecifier().(*DataSource_InlineString); ok { + return x.InlineString + } + return "" +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*DataSource) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _DataSource_OneofMarshaler, _DataSource_OneofUnmarshaler, _DataSource_OneofSizer, []interface{}{ + (*DataSource_Filename)(nil), + (*DataSource_InlineBytes)(nil), + (*DataSource_InlineString)(nil), + } +} + +func _DataSource_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*DataSource) + // specifier + switch x := m.Specifier.(type) { + case *DataSource_Filename: + b.EncodeVarint(1<<3 | proto.WireBytes) + b.EncodeStringBytes(x.Filename) + case *DataSource_InlineBytes: + b.EncodeVarint(2<<3 | proto.WireBytes) + b.EncodeRawBytes(x.InlineBytes) + case *DataSource_InlineString: + b.EncodeVarint(3<<3 | proto.WireBytes) + b.EncodeStringBytes(x.InlineString) + case nil: + default: + return fmt.Errorf("DataSource.Specifier has unexpected type %T", x) + } + return nil +} + +func _DataSource_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*DataSource) + switch tag { + case 1: // specifier.filename + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeStringBytes() + m.Specifier = &DataSource_Filename{x} + return true, err + case 2: // specifier.inline_bytes + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeRawBytes(true) + m.Specifier = &DataSource_InlineBytes{x} + return true, err + case 3: // specifier.inline_string + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeStringBytes() + m.Specifier = &DataSource_InlineString{x} + return true, err + default: + return false, nil + } +} + +func _DataSource_OneofSizer(msg proto.Message) (n int) { + m := msg.(*DataSource) + // specifier + switch x := m.Specifier.(type) { + case *DataSource_Filename: + n += 1 // tag and wire + n += proto.SizeVarint(uint64(len(x.Filename))) + n += len(x.Filename) + case *DataSource_InlineBytes: + n += 1 // tag and wire + n += proto.SizeVarint(uint64(len(x.InlineBytes))) + n += len(x.InlineBytes) + case *DataSource_InlineString: + n += 1 // tag and wire + n += proto.SizeVarint(uint64(len(x.InlineString))) + n += len(x.InlineString) + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +type TransportSocket struct { + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // Types that are valid to be assigned to ConfigType: + // *TransportSocket_Config + // *TransportSocket_TypedConfig + ConfigType isTransportSocket_ConfigType `protobuf_oneof:"config_type"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *TransportSocket) Reset() { *m = TransportSocket{} } +func (m *TransportSocket) String() string { return proto.CompactTextString(m) } +func (*TransportSocket) ProtoMessage() {} +func (*TransportSocket) Descriptor() ([]byte, []int) { + return fileDescriptor_base_33c58439b08f821d, []int{8} +} +func (m *TransportSocket) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_TransportSocket.Unmarshal(m, b) +} +func (m *TransportSocket) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_TransportSocket.Marshal(b, m, deterministic) +} +func (dst *TransportSocket) XXX_Merge(src proto.Message) { + xxx_messageInfo_TransportSocket.Merge(dst, src) +} +func (m *TransportSocket) XXX_Size() int { + return xxx_messageInfo_TransportSocket.Size(m) +} +func (m *TransportSocket) XXX_DiscardUnknown() { + xxx_messageInfo_TransportSocket.DiscardUnknown(m) +} + +var xxx_messageInfo_TransportSocket proto.InternalMessageInfo + +func (m *TransportSocket) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +type isTransportSocket_ConfigType interface { + isTransportSocket_ConfigType() +} + +type TransportSocket_Config struct { + Config *_struct.Struct `protobuf:"bytes,2,opt,name=config,proto3,oneof"` +} + +type TransportSocket_TypedConfig struct { + TypedConfig *any.Any `protobuf:"bytes,3,opt,name=typed_config,json=typedConfig,proto3,oneof"` +} + +func (*TransportSocket_Config) isTransportSocket_ConfigType() {} + +func (*TransportSocket_TypedConfig) isTransportSocket_ConfigType() {} + +func (m *TransportSocket) GetConfigType() isTransportSocket_ConfigType { + if m != nil { + return m.ConfigType + } + return nil +} + +func (m *TransportSocket) GetConfig() *_struct.Struct { + if x, ok := m.GetConfigType().(*TransportSocket_Config); ok { + return x.Config + } + return nil +} + +func (m *TransportSocket) GetTypedConfig() *any.Any { + if x, ok := m.GetConfigType().(*TransportSocket_TypedConfig); ok { + return x.TypedConfig + } + return nil +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*TransportSocket) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _TransportSocket_OneofMarshaler, _TransportSocket_OneofUnmarshaler, _TransportSocket_OneofSizer, []interface{}{ + (*TransportSocket_Config)(nil), + (*TransportSocket_TypedConfig)(nil), + } +} + +func _TransportSocket_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*TransportSocket) + // config_type + switch x := m.ConfigType.(type) { + case *TransportSocket_Config: + b.EncodeVarint(2<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.Config); err != nil { + return err + } + case *TransportSocket_TypedConfig: + b.EncodeVarint(3<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.TypedConfig); err != nil { + return err + } + case nil: + default: + return fmt.Errorf("TransportSocket.ConfigType has unexpected type %T", x) + } + return nil +} + +func _TransportSocket_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*TransportSocket) + switch tag { + case 2: // config_type.config + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(_struct.Struct) + err := b.DecodeMessage(msg) + m.ConfigType = &TransportSocket_Config{msg} + return true, err + case 3: // config_type.typed_config + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(any.Any) + err := b.DecodeMessage(msg) + m.ConfigType = &TransportSocket_TypedConfig{msg} + return true, err + default: + return false, nil + } +} + +func _TransportSocket_OneofSizer(msg proto.Message) (n int) { + m := msg.(*TransportSocket) + // config_type + switch x := m.ConfigType.(type) { + case *TransportSocket_Config: + s := proto.Size(x.Config) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *TransportSocket_TypedConfig: + s := proto.Size(x.TypedConfig) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +type SocketOption struct { + Description string `protobuf:"bytes,1,opt,name=description,proto3" json:"description,omitempty"` + Level int64 `protobuf:"varint,2,opt,name=level,proto3" json:"level,omitempty"` + Name int64 `protobuf:"varint,3,opt,name=name,proto3" json:"name,omitempty"` + // Types that are valid to be assigned to Value: + // *SocketOption_IntValue + // *SocketOption_BufValue + Value isSocketOption_Value `protobuf_oneof:"value"` + State SocketOption_SocketState `protobuf:"varint,6,opt,name=state,proto3,enum=envoy.api.v2.core.SocketOption_SocketState" json:"state,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *SocketOption) Reset() { *m = SocketOption{} } +func (m *SocketOption) String() string { return proto.CompactTextString(m) } +func (*SocketOption) ProtoMessage() {} +func (*SocketOption) Descriptor() ([]byte, []int) { + return fileDescriptor_base_33c58439b08f821d, []int{9} +} +func (m *SocketOption) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_SocketOption.Unmarshal(m, b) +} +func (m *SocketOption) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_SocketOption.Marshal(b, m, deterministic) +} +func (dst *SocketOption) XXX_Merge(src proto.Message) { + xxx_messageInfo_SocketOption.Merge(dst, src) +} +func (m *SocketOption) XXX_Size() int { + return xxx_messageInfo_SocketOption.Size(m) +} +func (m *SocketOption) XXX_DiscardUnknown() { + xxx_messageInfo_SocketOption.DiscardUnknown(m) +} + +var xxx_messageInfo_SocketOption proto.InternalMessageInfo + +func (m *SocketOption) GetDescription() string { + if m != nil { + return m.Description + } + return "" +} + +func (m *SocketOption) GetLevel() int64 { + if m != nil { + return m.Level + } + return 0 +} + +func (m *SocketOption) GetName() int64 { + if m != nil { + return m.Name + } + return 0 +} + +type isSocketOption_Value interface { + isSocketOption_Value() +} + +type SocketOption_IntValue struct { + IntValue int64 `protobuf:"varint,4,opt,name=int_value,json=intValue,proto3,oneof"` +} + +type SocketOption_BufValue struct { + BufValue []byte `protobuf:"bytes,5,opt,name=buf_value,json=bufValue,proto3,oneof"` +} + +func (*SocketOption_IntValue) isSocketOption_Value() {} + +func (*SocketOption_BufValue) isSocketOption_Value() {} + +func (m *SocketOption) GetValue() isSocketOption_Value { + if m != nil { + return m.Value + } + return nil +} + +func (m *SocketOption) GetIntValue() int64 { + if x, ok := m.GetValue().(*SocketOption_IntValue); ok { + return x.IntValue + } + return 0 +} + +func (m *SocketOption) GetBufValue() []byte { + if x, ok := m.GetValue().(*SocketOption_BufValue); ok { + return x.BufValue + } + return nil +} + +func (m *SocketOption) GetState() SocketOption_SocketState { + if m != nil { + return m.State + } + return SocketOption_STATE_PREBIND +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*SocketOption) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _SocketOption_OneofMarshaler, _SocketOption_OneofUnmarshaler, _SocketOption_OneofSizer, []interface{}{ + (*SocketOption_IntValue)(nil), + (*SocketOption_BufValue)(nil), + } +} + +func _SocketOption_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*SocketOption) + // value + switch x := m.Value.(type) { + case *SocketOption_IntValue: + b.EncodeVarint(4<<3 | proto.WireVarint) + b.EncodeVarint(uint64(x.IntValue)) + case *SocketOption_BufValue: + b.EncodeVarint(5<<3 | proto.WireBytes) + b.EncodeRawBytes(x.BufValue) + case nil: + default: + return fmt.Errorf("SocketOption.Value has unexpected type %T", x) + } + return nil +} + +func _SocketOption_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*SocketOption) + switch tag { + case 4: // value.int_value + if wire != proto.WireVarint { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeVarint() + m.Value = &SocketOption_IntValue{int64(x)} + return true, err + case 5: // value.buf_value + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeRawBytes(true) + m.Value = &SocketOption_BufValue{x} + return true, err + default: + return false, nil + } +} + +func _SocketOption_OneofSizer(msg proto.Message) (n int) { + m := msg.(*SocketOption) + // value + switch x := m.Value.(type) { + case *SocketOption_IntValue: + n += 1 // tag and wire + n += proto.SizeVarint(uint64(x.IntValue)) + case *SocketOption_BufValue: + n += 1 // tag and wire + n += proto.SizeVarint(uint64(len(x.BufValue))) + n += len(x.BufValue) + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +type RuntimeFractionalPercent struct { + DefaultValue *percent.FractionalPercent `protobuf:"bytes,1,opt,name=default_value,json=defaultValue,proto3" json:"default_value,omitempty"` + RuntimeKey string `protobuf:"bytes,2,opt,name=runtime_key,json=runtimeKey,proto3" json:"runtime_key,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *RuntimeFractionalPercent) Reset() { *m = RuntimeFractionalPercent{} } +func (m *RuntimeFractionalPercent) String() string { return proto.CompactTextString(m) } +func (*RuntimeFractionalPercent) ProtoMessage() {} +func (*RuntimeFractionalPercent) Descriptor() ([]byte, []int) { + return fileDescriptor_base_33c58439b08f821d, []int{10} +} +func (m *RuntimeFractionalPercent) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_RuntimeFractionalPercent.Unmarshal(m, b) +} +func (m *RuntimeFractionalPercent) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_RuntimeFractionalPercent.Marshal(b, m, deterministic) +} +func (dst *RuntimeFractionalPercent) XXX_Merge(src proto.Message) { + xxx_messageInfo_RuntimeFractionalPercent.Merge(dst, src) +} +func (m *RuntimeFractionalPercent) XXX_Size() int { + return xxx_messageInfo_RuntimeFractionalPercent.Size(m) +} +func (m *RuntimeFractionalPercent) XXX_DiscardUnknown() { + xxx_messageInfo_RuntimeFractionalPercent.DiscardUnknown(m) +} + +var xxx_messageInfo_RuntimeFractionalPercent proto.InternalMessageInfo + +func (m *RuntimeFractionalPercent) GetDefaultValue() *percent.FractionalPercent { + if m != nil { + return m.DefaultValue + } + return nil +} + +func (m *RuntimeFractionalPercent) GetRuntimeKey() string { + if m != nil { + return m.RuntimeKey + } + return "" +} + +type ControlPlane struct { + Identifier string `protobuf:"bytes,1,opt,name=identifier,proto3" json:"identifier,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ControlPlane) Reset() { *m = ControlPlane{} } +func (m *ControlPlane) String() string { return proto.CompactTextString(m) } +func (*ControlPlane) ProtoMessage() {} +func (*ControlPlane) Descriptor() ([]byte, []int) { + return fileDescriptor_base_33c58439b08f821d, []int{11} +} +func (m *ControlPlane) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ControlPlane.Unmarshal(m, b) +} +func (m *ControlPlane) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ControlPlane.Marshal(b, m, deterministic) +} +func (dst *ControlPlane) XXX_Merge(src proto.Message) { + xxx_messageInfo_ControlPlane.Merge(dst, src) +} +func (m *ControlPlane) XXX_Size() int { + return xxx_messageInfo_ControlPlane.Size(m) +} +func (m *ControlPlane) XXX_DiscardUnknown() { + xxx_messageInfo_ControlPlane.DiscardUnknown(m) +} + +var xxx_messageInfo_ControlPlane proto.InternalMessageInfo + +func (m *ControlPlane) GetIdentifier() string { + if m != nil { + return m.Identifier + } + return "" +} + +func init() { + proto.RegisterType((*Locality)(nil), "envoy.api.v2.core.Locality") + proto.RegisterType((*Node)(nil), "envoy.api.v2.core.Node") + proto.RegisterType((*Metadata)(nil), "envoy.api.v2.core.Metadata") + proto.RegisterMapType((map[string]*_struct.Struct)(nil), "envoy.api.v2.core.Metadata.FilterMetadataEntry") + proto.RegisterType((*RuntimeUInt32)(nil), "envoy.api.v2.core.RuntimeUInt32") + proto.RegisterType((*HeaderValue)(nil), "envoy.api.v2.core.HeaderValue") + proto.RegisterType((*HeaderValueOption)(nil), "envoy.api.v2.core.HeaderValueOption") + proto.RegisterType((*HeaderMap)(nil), "envoy.api.v2.core.HeaderMap") + proto.RegisterType((*DataSource)(nil), "envoy.api.v2.core.DataSource") + proto.RegisterType((*TransportSocket)(nil), "envoy.api.v2.core.TransportSocket") + proto.RegisterType((*SocketOption)(nil), "envoy.api.v2.core.SocketOption") + proto.RegisterType((*RuntimeFractionalPercent)(nil), "envoy.api.v2.core.RuntimeFractionalPercent") + proto.RegisterType((*ControlPlane)(nil), "envoy.api.v2.core.ControlPlane") + proto.RegisterEnum("envoy.api.v2.core.RoutingPriority", RoutingPriority_name, RoutingPriority_value) + proto.RegisterEnum("envoy.api.v2.core.RequestMethod", RequestMethod_name, RequestMethod_value) + proto.RegisterEnum("envoy.api.v2.core.SocketOption_SocketState", SocketOption_SocketState_name, SocketOption_SocketState_value) +} + +func init() { proto.RegisterFile("envoy/api/v2/core/base.proto", fileDescriptor_base_33c58439b08f821d) } + +var fileDescriptor_base_33c58439b08f821d = []byte{ + // 1117 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x84, 0x55, 0xcd, 0x6e, 0xdb, 0x46, + 0x10, 0x36, 0xf5, 0x67, 0x69, 0x28, 0xd9, 0xcc, 0x26, 0x48, 0x14, 0x37, 0x4e, 0x5c, 0xf5, 0x50, + 0x23, 0x45, 0xa9, 0x56, 0x29, 0xd0, 0xb4, 0x37, 0xd3, 0xa2, 0x23, 0xa1, 0xb6, 0xa4, 0x50, 0x74, + 0x5a, 0xe4, 0xa2, 0xae, 0xc4, 0x95, 0xb3, 0x08, 0xc3, 0x65, 0x97, 0x4b, 0xb5, 0xca, 0xa1, 0x08, + 0x7a, 0x6c, 0x1e, 0xa5, 0xe8, 0xbd, 0xe8, 0x29, 0x40, 0x4f, 0x7d, 0x94, 0xbc, 0x45, 0xb1, 0x3f, + 0x72, 0xe4, 0xd8, 0x68, 0x6e, 0xbb, 0xdf, 0x7c, 0xdf, 0x70, 0x66, 0x76, 0x66, 0x08, 0x77, 0x48, + 0xb2, 0x60, 0xcb, 0x36, 0x4e, 0x69, 0x7b, 0xd1, 0x69, 0xcf, 0x18, 0x27, 0xed, 0x29, 0xce, 0x88, + 0x9b, 0x72, 0x26, 0x18, 0xba, 0xa6, 0xac, 0x2e, 0x4e, 0xa9, 0xbb, 0xe8, 0xb8, 0xd2, 0xba, 0x73, + 0xfb, 0x8c, 0xb1, 0xb3, 0x98, 0xb4, 0x15, 0x61, 0x9a, 0xcf, 0xdb, 0x38, 0x59, 0x6a, 0xf6, 0xce, + 0x9d, 0xf7, 0x4d, 0x99, 0xe0, 0xf9, 0x4c, 0x18, 0xeb, 0xdd, 0xf7, 0xad, 0x3f, 0x73, 0x9c, 0xa6, + 0x84, 0x67, 0xc6, 0x7e, 0x6b, 0x81, 0x63, 0x1a, 0x61, 0x41, 0xda, 0xab, 0x83, 0x31, 0x34, 0x75, + 0x88, 0x62, 0x99, 0x92, 0x76, 0x4a, 0xf8, 0x8c, 0x24, 0xc6, 0x65, 0xeb, 0x31, 0x54, 0x8f, 0xd9, + 0x0c, 0xc7, 0x54, 0x2c, 0xd1, 0x4d, 0xa8, 0x70, 0x72, 0x46, 0x59, 0xd2, 0xb4, 0xf6, 0xac, 0xfd, + 0x5a, 0x60, 0x6e, 0x08, 0x41, 0xe9, 0x25, 0x4b, 0x48, 0xb3, 0xa0, 0x50, 0x75, 0x46, 0xb7, 0xa1, + 0x9a, 0xe5, 0xd3, 0x89, 0xc2, 0x8b, 0x0a, 0xdf, 0xcc, 0xf2, 0xe9, 0x53, 0x96, 0x90, 0xd6, 0x3f, + 0x16, 0x94, 0x06, 0x2c, 0x22, 0x68, 0x0b, 0x0a, 0x34, 0x32, 0xbe, 0x0a, 0x34, 0x42, 0x4d, 0xd8, + 0x9c, 0xc5, 0x79, 0x26, 0x08, 0x37, 0xae, 0x56, 0x57, 0xf4, 0x00, 0xaa, 0x2f, 0x88, 0xc0, 0x11, + 0x16, 0x58, 0x79, 0xb3, 0x3b, 0xb7, 0x5c, 0x9d, 0xab, 0xbb, 0xca, 0xd5, 0x1d, 0xab, 0x4a, 0x04, + 0xe7, 0x44, 0xf4, 0x35, 0x54, 0x63, 0x13, 0x7a, 0xb3, 0xa4, 0x44, 0x1f, 0xb9, 0x97, 0x8a, 0xed, + 0xae, 0xb2, 0x0b, 0xce, 0xc9, 0xe8, 0x13, 0x68, 0x4c, 0x73, 0x1a, 0x47, 0x93, 0x05, 0xe1, 0x99, + 0x4c, 0xb7, 0xac, 0xa2, 0xa9, 0x2b, 0xf0, 0x89, 0xc6, 0x5a, 0x6f, 0x2c, 0xa8, 0x9e, 0xac, 0x3e, + 0xf5, 0x03, 0x6c, 0xcf, 0x69, 0x2c, 0x08, 0x9f, 0x9c, 0x87, 0x69, 0xed, 0x15, 0xf7, 0xed, 0x4e, + 0xfb, 0x8a, 0x2f, 0xae, 0x54, 0xee, 0x91, 0x92, 0xac, 0xae, 0x7e, 0x22, 0xf8, 0x32, 0xd8, 0x9a, + 0x5f, 0x00, 0x77, 0x9e, 0xc2, 0xf5, 0x2b, 0x68, 0xc8, 0x81, 0xe2, 0x73, 0xb2, 0x34, 0xb5, 0x93, + 0x47, 0xf4, 0x39, 0x94, 0x17, 0x38, 0xce, 0xf5, 0x2b, 0xfc, 0x4f, 0x7d, 0x34, 0xeb, 0xdb, 0xc2, + 0x43, 0xab, 0xf5, 0x23, 0x34, 0x82, 0x3c, 0x11, 0xf4, 0x05, 0x39, 0xed, 0x27, 0xe2, 0x41, 0x47, + 0x26, 0x1e, 0x91, 0x39, 0xce, 0x63, 0x31, 0x79, 0xe7, 0xab, 0x11, 0xd4, 0x0d, 0xf8, 0x44, 0x62, + 0xe8, 0x3e, 0xd8, 0x5c, 0xab, 0x26, 0x32, 0x04, 0xf5, 0xb8, 0x5e, 0xed, 0xef, 0xb7, 0x6f, 0x8a, + 0x25, 0x5e, 0xd8, 0xb3, 0x02, 0x30, 0xd6, 0xef, 0xc8, 0xb2, 0xf5, 0x18, 0xec, 0x1e, 0xc1, 0x11, + 0xe1, 0x5a, 0x7a, 0x6f, 0x2d, 0x6a, 0xaf, 0x21, 0x25, 0x55, 0x5e, 0xd9, 0xb3, 0xf6, 0x5f, 0xbd, + 0xb2, 0x74, 0x12, 0x1f, 0xaf, 0x27, 0x51, 0xf3, 0x6c, 0x49, 0xa9, 0xf0, 0x92, 0x22, 0x68, 0x4b, + 0xeb, 0xb5, 0x05, 0xd7, 0xd6, 0x7c, 0x0e, 0x53, 0x21, 0x5b, 0xd0, 0x83, 0xca, 0x33, 0x05, 0x2a, + 0xe7, 0x76, 0xe7, 0xee, 0x15, 0x75, 0x5f, 0x53, 0x79, 0x20, 0x3d, 0x97, 0x7f, 0xb7, 0x0a, 0x8e, + 0x15, 0x18, 0x25, 0xea, 0x40, 0x45, 0x4e, 0x4b, 0x12, 0x99, 0x12, 0xee, 0x5c, 0x2a, 0xa1, 0xc7, + 0x58, 0xac, 0xf4, 0x81, 0x61, 0xb6, 0x7c, 0xa8, 0x69, 0xb7, 0x27, 0x38, 0x45, 0x0f, 0x61, 0x53, + 0xbb, 0xca, 0xcc, 0xeb, 0x7f, 0x20, 0x8a, 0x60, 0x45, 0x6f, 0xfd, 0x61, 0x01, 0x74, 0xb1, 0xc0, + 0x63, 0x96, 0xf3, 0x19, 0x41, 0x9f, 0x42, 0x75, 0x4e, 0x63, 0x92, 0xe0, 0x17, 0xc4, 0x14, 0xeb, + 0x5d, 0x7d, 0x7b, 0x1b, 0xc1, 0xb9, 0x11, 0xb9, 0x50, 0xa7, 0x49, 0x4c, 0x13, 0x32, 0x99, 0x2e, + 0x05, 0xc9, 0x54, 0xe0, 0x75, 0x43, 0x7e, 0x59, 0x70, 0x24, 0xd9, 0xd6, 0x04, 0x4f, 0xda, 0xd1, + 0x17, 0xd0, 0x30, 0xfc, 0x4c, 0x70, 0x9a, 0x9c, 0x5d, 0x7a, 0xbd, 0xde, 0x46, 0x60, 0x3c, 0x8e, + 0x15, 0xc1, 0x43, 0x50, 0xcb, 0x52, 0x32, 0xa3, 0x73, 0x4a, 0x38, 0x2a, 0xff, 0xf5, 0xf6, 0x4d, + 0xd1, 0x6a, 0xfd, 0x69, 0xc1, 0x76, 0xc8, 0x71, 0x92, 0xa5, 0x8c, 0x8b, 0x31, 0x9b, 0x3d, 0x27, + 0x02, 0xed, 0x42, 0xe9, 0xca, 0x70, 0x03, 0x05, 0xa3, 0x2f, 0xa1, 0x32, 0x63, 0xc9, 0x9c, 0x9e, + 0x7d, 0xa0, 0x3d, 0x7b, 0x1b, 0x81, 0x21, 0xa2, 0x6f, 0xa0, 0x2e, 0xf7, 0x51, 0x34, 0x31, 0x42, + 0x3d, 0xf7, 0x37, 0x2e, 0x09, 0x0f, 0x92, 0xa5, 0x4c, 0x53, 0x71, 0x0f, 0x15, 0xd5, 0x6b, 0x80, + 0xad, 0x45, 0x13, 0x89, 0xb6, 0xfe, 0x2d, 0x40, 0x5d, 0x87, 0x69, 0xba, 0x65, 0x0f, 0xec, 0x88, + 0x64, 0x33, 0x4e, 0xd5, 0xd5, 0x4c, 0xd1, 0x3a, 0x84, 0x6e, 0x40, 0x39, 0x26, 0x0b, 0x12, 0xab, + 0x70, 0x8b, 0x81, 0xbe, 0xc8, 0x45, 0xa7, 0x92, 0x2c, 0x2a, 0x50, 0x67, 0xb6, 0x0b, 0x35, 0x9a, + 0xac, 0xe6, 0x45, 0xae, 0x99, 0xa2, 0x7c, 0x21, 0x9a, 0x98, 0x69, 0xd9, 0x85, 0xda, 0x34, 0x9f, + 0x1b, 0xb3, 0xdc, 0x23, 0x75, 0x69, 0x9e, 0xe6, 0x73, 0x6d, 0xfe, 0x1e, 0xca, 0x99, 0xc0, 0x82, + 0x34, 0xe5, 0x18, 0x6c, 0x75, 0x3e, 0xbb, 0xa2, 0x61, 0xd6, 0x23, 0x37, 0x97, 0xb1, 0x94, 0x78, + 0x37, 0xde, 0xf5, 0xb0, 0x3a, 0xfd, 0xa6, 0xba, 0x59, 0xfb, 0x6b, 0x1d, 0x81, 0xbd, 0xc6, 0x45, + 0xd7, 0xa0, 0x31, 0x0e, 0x0f, 0x42, 0x7f, 0x32, 0x0a, 0x7c, 0xaf, 0x3f, 0xe8, 0x3a, 0x1b, 0x68, + 0x1b, 0x6c, 0x0d, 0x79, 0xc3, 0xd3, 0x41, 0xd7, 0xb1, 0xd0, 0x75, 0xd8, 0xd6, 0xc0, 0x71, 0x7f, + 0x1c, 0xfa, 0x83, 0xfe, 0xe0, 0x91, 0x53, 0xf0, 0xb6, 0xcc, 0x44, 0xae, 0xde, 0xfe, 0xb5, 0x05, + 0x4d, 0xb3, 0x34, 0x8e, 0x38, 0x9e, 0xc9, 0xa0, 0x70, 0x3c, 0xd2, 0xbf, 0x0c, 0x34, 0x78, 0x7f, + 0x7f, 0xe8, 0x61, 0xdc, 0x35, 0x59, 0xc9, 0xc7, 0x70, 0x2f, 0xa9, 0x2e, 0xcc, 0xe2, 0xc5, 0x55, + 0x73, 0xef, 0xe2, 0xaa, 0xd1, 0x3f, 0x85, 0xf5, 0xfd, 0xe2, 0x42, 0xfd, 0x90, 0x25, 0x82, 0xb3, + 0x78, 0x14, 0xe3, 0x84, 0xa0, 0xbb, 0x00, 0x34, 0x22, 0x89, 0x50, 0xed, 0x6a, 0xde, 0x75, 0x0d, + 0xb9, 0xbf, 0x0f, 0xdb, 0x01, 0xcb, 0x05, 0x4d, 0xce, 0x46, 0x9c, 0x32, 0x2e, 0x97, 0xbd, 0x0d, + 0x9b, 0x5d, 0xff, 0xe8, 0xe0, 0xf4, 0x38, 0x74, 0x36, 0x50, 0x15, 0x4a, 0xbd, 0xfe, 0xa3, 0x9e, + 0x63, 0xdd, 0xff, 0x15, 0x1a, 0x01, 0xf9, 0x29, 0x27, 0x99, 0x38, 0x21, 0xe2, 0x19, 0x8b, 0xd0, + 0x4d, 0x40, 0x27, 0x7e, 0xd8, 0x1b, 0x76, 0x27, 0xa7, 0x83, 0xf1, 0xc8, 0x3f, 0xec, 0x1f, 0xf5, + 0x7d, 0x59, 0xc6, 0x4d, 0x28, 0x3e, 0xf2, 0x43, 0xc7, 0x52, 0x5a, 0xff, 0xa0, 0xeb, 0x14, 0xe4, + 0x69, 0x34, 0x1c, 0x87, 0x4e, 0x51, 0x1a, 0x47, 0xa7, 0xa1, 0x53, 0x42, 0x00, 0x95, 0xae, 0x7f, + 0xec, 0x87, 0xbe, 0x53, 0x96, 0x5f, 0x3c, 0x1c, 0x0e, 0x06, 0xfe, 0x61, 0xe8, 0x54, 0xe4, 0x65, + 0x38, 0x0a, 0xfb, 0xc3, 0xc1, 0xd8, 0xd9, 0x44, 0x35, 0x28, 0x87, 0xc1, 0xc1, 0xa1, 0xef, 0x54, + 0xbd, 0xaf, 0xe0, 0x1e, 0x65, 0xba, 0x6e, 0x29, 0x67, 0xbf, 0x2c, 0x2f, 0x37, 0x86, 0x57, 0xf3, + 0x70, 0x46, 0x46, 0x72, 0x0c, 0x46, 0xd6, 0xd3, 0x92, 0x84, 0xa6, 0x15, 0x35, 0x15, 0x0f, 0xfe, + 0x0b, 0x00, 0x00, 0xff, 0xff, 0x42, 0xb6, 0x68, 0xfe, 0x73, 0x08, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/grpc/balancer/xds/internal/proto/envoy/api/v2/core/config_source/config_source.pb.go b/vendor/google.golang.org/grpc/balancer/xds/internal/proto/envoy/api/v2/core/config_source/config_source.pb.go new file mode 100755 index 0000000..d8b71b6 --- /dev/null +++ b/vendor/google.golang.org/grpc/balancer/xds/internal/proto/envoy/api/v2/core/config_source/config_source.pb.go @@ -0,0 +1,446 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: envoy/api/v2/core/config_source.proto + +package envoy_api_v2_core + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import duration "github.com/golang/protobuf/ptypes/duration" +import wrappers "github.com/golang/protobuf/ptypes/wrappers" +import grpc_service "google.golang.org/grpc/balancer/xds/internal/proto/envoy/api/v2/core/grpc_service" +import _ "google.golang.org/grpc/balancer/xds/internal/proto/validate" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +type ApiConfigSource_ApiType int32 + +const ( + ApiConfigSource_UNSUPPORTED_REST_LEGACY ApiConfigSource_ApiType = 0 // Deprecated: Do not use. + ApiConfigSource_REST ApiConfigSource_ApiType = 1 + ApiConfigSource_GRPC ApiConfigSource_ApiType = 2 + ApiConfigSource_DELTA_GRPC ApiConfigSource_ApiType = 3 +) + +var ApiConfigSource_ApiType_name = map[int32]string{ + 0: "UNSUPPORTED_REST_LEGACY", + 1: "REST", + 2: "GRPC", + 3: "DELTA_GRPC", +} +var ApiConfigSource_ApiType_value = map[string]int32{ + "UNSUPPORTED_REST_LEGACY": 0, + "REST": 1, + "GRPC": 2, + "DELTA_GRPC": 3, +} + +func (x ApiConfigSource_ApiType) String() string { + return proto.EnumName(ApiConfigSource_ApiType_name, int32(x)) +} +func (ApiConfigSource_ApiType) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_config_source_d368846786cd1f7d, []int{0, 0} +} + +type ApiConfigSource struct { + ApiType ApiConfigSource_ApiType `protobuf:"varint,1,opt,name=api_type,json=apiType,proto3,enum=envoy.api.v2.core.ApiConfigSource_ApiType" json:"api_type,omitempty"` + ClusterNames []string `protobuf:"bytes,2,rep,name=cluster_names,json=clusterNames,proto3" json:"cluster_names,omitempty"` + GrpcServices []*grpc_service.GrpcService `protobuf:"bytes,4,rep,name=grpc_services,json=grpcServices,proto3" json:"grpc_services,omitempty"` + RefreshDelay *duration.Duration `protobuf:"bytes,3,opt,name=refresh_delay,json=refreshDelay,proto3" json:"refresh_delay,omitempty"` + RequestTimeout *duration.Duration `protobuf:"bytes,5,opt,name=request_timeout,json=requestTimeout,proto3" json:"request_timeout,omitempty"` + RateLimitSettings *RateLimitSettings `protobuf:"bytes,6,opt,name=rate_limit_settings,json=rateLimitSettings,proto3" json:"rate_limit_settings,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ApiConfigSource) Reset() { *m = ApiConfigSource{} } +func (m *ApiConfigSource) String() string { return proto.CompactTextString(m) } +func (*ApiConfigSource) ProtoMessage() {} +func (*ApiConfigSource) Descriptor() ([]byte, []int) { + return fileDescriptor_config_source_d368846786cd1f7d, []int{0} +} +func (m *ApiConfigSource) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ApiConfigSource.Unmarshal(m, b) +} +func (m *ApiConfigSource) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ApiConfigSource.Marshal(b, m, deterministic) +} +func (dst *ApiConfigSource) XXX_Merge(src proto.Message) { + xxx_messageInfo_ApiConfigSource.Merge(dst, src) +} +func (m *ApiConfigSource) XXX_Size() int { + return xxx_messageInfo_ApiConfigSource.Size(m) +} +func (m *ApiConfigSource) XXX_DiscardUnknown() { + xxx_messageInfo_ApiConfigSource.DiscardUnknown(m) +} + +var xxx_messageInfo_ApiConfigSource proto.InternalMessageInfo + +func (m *ApiConfigSource) GetApiType() ApiConfigSource_ApiType { + if m != nil { + return m.ApiType + } + return ApiConfigSource_UNSUPPORTED_REST_LEGACY +} + +func (m *ApiConfigSource) GetClusterNames() []string { + if m != nil { + return m.ClusterNames + } + return nil +} + +func (m *ApiConfigSource) GetGrpcServices() []*grpc_service.GrpcService { + if m != nil { + return m.GrpcServices + } + return nil +} + +func (m *ApiConfigSource) GetRefreshDelay() *duration.Duration { + if m != nil { + return m.RefreshDelay + } + return nil +} + +func (m *ApiConfigSource) GetRequestTimeout() *duration.Duration { + if m != nil { + return m.RequestTimeout + } + return nil +} + +func (m *ApiConfigSource) GetRateLimitSettings() *RateLimitSettings { + if m != nil { + return m.RateLimitSettings + } + return nil +} + +type AggregatedConfigSource struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *AggregatedConfigSource) Reset() { *m = AggregatedConfigSource{} } +func (m *AggregatedConfigSource) String() string { return proto.CompactTextString(m) } +func (*AggregatedConfigSource) ProtoMessage() {} +func (*AggregatedConfigSource) Descriptor() ([]byte, []int) { + return fileDescriptor_config_source_d368846786cd1f7d, []int{1} +} +func (m *AggregatedConfigSource) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_AggregatedConfigSource.Unmarshal(m, b) +} +func (m *AggregatedConfigSource) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_AggregatedConfigSource.Marshal(b, m, deterministic) +} +func (dst *AggregatedConfigSource) XXX_Merge(src proto.Message) { + xxx_messageInfo_AggregatedConfigSource.Merge(dst, src) +} +func (m *AggregatedConfigSource) XXX_Size() int { + return xxx_messageInfo_AggregatedConfigSource.Size(m) +} +func (m *AggregatedConfigSource) XXX_DiscardUnknown() { + xxx_messageInfo_AggregatedConfigSource.DiscardUnknown(m) +} + +var xxx_messageInfo_AggregatedConfigSource proto.InternalMessageInfo + +type RateLimitSettings struct { + MaxTokens *wrappers.UInt32Value `protobuf:"bytes,1,opt,name=max_tokens,json=maxTokens,proto3" json:"max_tokens,omitempty"` + FillRate *wrappers.DoubleValue `protobuf:"bytes,2,opt,name=fill_rate,json=fillRate,proto3" json:"fill_rate,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *RateLimitSettings) Reset() { *m = RateLimitSettings{} } +func (m *RateLimitSettings) String() string { return proto.CompactTextString(m) } +func (*RateLimitSettings) ProtoMessage() {} +func (*RateLimitSettings) Descriptor() ([]byte, []int) { + return fileDescriptor_config_source_d368846786cd1f7d, []int{2} +} +func (m *RateLimitSettings) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_RateLimitSettings.Unmarshal(m, b) +} +func (m *RateLimitSettings) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_RateLimitSettings.Marshal(b, m, deterministic) +} +func (dst *RateLimitSettings) XXX_Merge(src proto.Message) { + xxx_messageInfo_RateLimitSettings.Merge(dst, src) +} +func (m *RateLimitSettings) XXX_Size() int { + return xxx_messageInfo_RateLimitSettings.Size(m) +} +func (m *RateLimitSettings) XXX_DiscardUnknown() { + xxx_messageInfo_RateLimitSettings.DiscardUnknown(m) +} + +var xxx_messageInfo_RateLimitSettings proto.InternalMessageInfo + +func (m *RateLimitSettings) GetMaxTokens() *wrappers.UInt32Value { + if m != nil { + return m.MaxTokens + } + return nil +} + +func (m *RateLimitSettings) GetFillRate() *wrappers.DoubleValue { + if m != nil { + return m.FillRate + } + return nil +} + +type ConfigSource struct { + // Types that are valid to be assigned to ConfigSourceSpecifier: + // *ConfigSource_Path + // *ConfigSource_ApiConfigSource + // *ConfigSource_Ads + ConfigSourceSpecifier isConfigSource_ConfigSourceSpecifier `protobuf_oneof:"config_source_specifier"` + InitialFetchTimeout *duration.Duration `protobuf:"bytes,4,opt,name=initial_fetch_timeout,json=initialFetchTimeout,proto3" json:"initial_fetch_timeout,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ConfigSource) Reset() { *m = ConfigSource{} } +func (m *ConfigSource) String() string { return proto.CompactTextString(m) } +func (*ConfigSource) ProtoMessage() {} +func (*ConfigSource) Descriptor() ([]byte, []int) { + return fileDescriptor_config_source_d368846786cd1f7d, []int{3} +} +func (m *ConfigSource) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ConfigSource.Unmarshal(m, b) +} +func (m *ConfigSource) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ConfigSource.Marshal(b, m, deterministic) +} +func (dst *ConfigSource) XXX_Merge(src proto.Message) { + xxx_messageInfo_ConfigSource.Merge(dst, src) +} +func (m *ConfigSource) XXX_Size() int { + return xxx_messageInfo_ConfigSource.Size(m) +} +func (m *ConfigSource) XXX_DiscardUnknown() { + xxx_messageInfo_ConfigSource.DiscardUnknown(m) +} + +var xxx_messageInfo_ConfigSource proto.InternalMessageInfo + +type isConfigSource_ConfigSourceSpecifier interface { + isConfigSource_ConfigSourceSpecifier() +} + +type ConfigSource_Path struct { + Path string `protobuf:"bytes,1,opt,name=path,proto3,oneof"` +} + +type ConfigSource_ApiConfigSource struct { + ApiConfigSource *ApiConfigSource `protobuf:"bytes,2,opt,name=api_config_source,json=apiConfigSource,proto3,oneof"` +} + +type ConfigSource_Ads struct { + Ads *AggregatedConfigSource `protobuf:"bytes,3,opt,name=ads,proto3,oneof"` +} + +func (*ConfigSource_Path) isConfigSource_ConfigSourceSpecifier() {} + +func (*ConfigSource_ApiConfigSource) isConfigSource_ConfigSourceSpecifier() {} + +func (*ConfigSource_Ads) isConfigSource_ConfigSourceSpecifier() {} + +func (m *ConfigSource) GetConfigSourceSpecifier() isConfigSource_ConfigSourceSpecifier { + if m != nil { + return m.ConfigSourceSpecifier + } + return nil +} + +func (m *ConfigSource) GetPath() string { + if x, ok := m.GetConfigSourceSpecifier().(*ConfigSource_Path); ok { + return x.Path + } + return "" +} + +func (m *ConfigSource) GetApiConfigSource() *ApiConfigSource { + if x, ok := m.GetConfigSourceSpecifier().(*ConfigSource_ApiConfigSource); ok { + return x.ApiConfigSource + } + return nil +} + +func (m *ConfigSource) GetAds() *AggregatedConfigSource { + if x, ok := m.GetConfigSourceSpecifier().(*ConfigSource_Ads); ok { + return x.Ads + } + return nil +} + +func (m *ConfigSource) GetInitialFetchTimeout() *duration.Duration { + if m != nil { + return m.InitialFetchTimeout + } + return nil +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*ConfigSource) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _ConfigSource_OneofMarshaler, _ConfigSource_OneofUnmarshaler, _ConfigSource_OneofSizer, []interface{}{ + (*ConfigSource_Path)(nil), + (*ConfigSource_ApiConfigSource)(nil), + (*ConfigSource_Ads)(nil), + } +} + +func _ConfigSource_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*ConfigSource) + // config_source_specifier + switch x := m.ConfigSourceSpecifier.(type) { + case *ConfigSource_Path: + b.EncodeVarint(1<<3 | proto.WireBytes) + b.EncodeStringBytes(x.Path) + case *ConfigSource_ApiConfigSource: + b.EncodeVarint(2<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.ApiConfigSource); err != nil { + return err + } + case *ConfigSource_Ads: + b.EncodeVarint(3<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.Ads); err != nil { + return err + } + case nil: + default: + return fmt.Errorf("ConfigSource.ConfigSourceSpecifier has unexpected type %T", x) + } + return nil +} + +func _ConfigSource_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*ConfigSource) + switch tag { + case 1: // config_source_specifier.path + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeStringBytes() + m.ConfigSourceSpecifier = &ConfigSource_Path{x} + return true, err + case 2: // config_source_specifier.api_config_source + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(ApiConfigSource) + err := b.DecodeMessage(msg) + m.ConfigSourceSpecifier = &ConfigSource_ApiConfigSource{msg} + return true, err + case 3: // config_source_specifier.ads + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(AggregatedConfigSource) + err := b.DecodeMessage(msg) + m.ConfigSourceSpecifier = &ConfigSource_Ads{msg} + return true, err + default: + return false, nil + } +} + +func _ConfigSource_OneofSizer(msg proto.Message) (n int) { + m := msg.(*ConfigSource) + // config_source_specifier + switch x := m.ConfigSourceSpecifier.(type) { + case *ConfigSource_Path: + n += 1 // tag and wire + n += proto.SizeVarint(uint64(len(x.Path))) + n += len(x.Path) + case *ConfigSource_ApiConfigSource: + s := proto.Size(x.ApiConfigSource) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *ConfigSource_Ads: + s := proto.Size(x.Ads) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +func init() { + proto.RegisterType((*ApiConfigSource)(nil), "envoy.api.v2.core.ApiConfigSource") + proto.RegisterType((*AggregatedConfigSource)(nil), "envoy.api.v2.core.AggregatedConfigSource") + proto.RegisterType((*RateLimitSettings)(nil), "envoy.api.v2.core.RateLimitSettings") + proto.RegisterType((*ConfigSource)(nil), "envoy.api.v2.core.ConfigSource") + proto.RegisterEnum("envoy.api.v2.core.ApiConfigSource_ApiType", ApiConfigSource_ApiType_name, ApiConfigSource_ApiType_value) +} + +func init() { + proto.RegisterFile("envoy/api/v2/core/config_source.proto", fileDescriptor_config_source_d368846786cd1f7d) +} + +var fileDescriptor_config_source_d368846786cd1f7d = []byte{ + // 667 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x84, 0x53, 0xcd, 0x6e, 0x13, 0x3d, + 0x14, 0xcd, 0x24, 0x69, 0x9b, 0x38, 0x69, 0x9b, 0xb8, 0xdf, 0xf7, 0x75, 0xbe, 0x0a, 0xb5, 0x21, + 0x14, 0x29, 0x74, 0x31, 0x91, 0x52, 0x89, 0x0d, 0x02, 0x29, 0x7f, 0xb4, 0x88, 0x52, 0x82, 0x93, + 0x22, 0xb1, 0xb2, 0xdc, 0xc9, 0xcd, 0xd4, 0x62, 0x32, 0x63, 0x6c, 0x4f, 0x68, 0xb6, 0x2c, 0x78, + 0x0b, 0xb6, 0x2c, 0x78, 0x02, 0xc4, 0xaa, 0xaf, 0xd3, 0xb7, 0x40, 0x9e, 0x4c, 0xa1, 0x6d, 0x52, + 0x35, 0xab, 0xb9, 0xd7, 0xe7, 0x5c, 0x9f, 0x7b, 0x72, 0x8c, 0x1e, 0x43, 0x30, 0x09, 0xa7, 0x75, + 0x26, 0x78, 0x7d, 0xd2, 0xa8, 0xbb, 0xa1, 0x84, 0xba, 0x1b, 0x06, 0x23, 0xee, 0x51, 0x15, 0x46, + 0xd2, 0x05, 0x47, 0xc8, 0x50, 0x87, 0xb8, 0x1c, 0xc3, 0x1c, 0x26, 0xb8, 0x33, 0x69, 0x38, 0x06, + 0xb6, 0xb5, 0x3b, 0xcf, 0xf4, 0xa4, 0x70, 0xa9, 0x02, 0x39, 0xe1, 0x57, 0xc4, 0xad, 0x6d, 0x2f, + 0x0c, 0x3d, 0x1f, 0xea, 0x71, 0x75, 0x1a, 0x8d, 0xea, 0xc3, 0x48, 0x32, 0xcd, 0xc3, 0xe0, 0xae, + 0xf3, 0xcf, 0x92, 0x09, 0x01, 0x52, 0x25, 0xe7, 0x9b, 0x13, 0xe6, 0xf3, 0x21, 0xd3, 0x50, 0xbf, + 0xfa, 0x98, 0x1d, 0x54, 0xbf, 0x66, 0xd1, 0x7a, 0x53, 0xf0, 0x76, 0x2c, 0xb6, 0x1f, 0x6b, 0xc5, + 0xef, 0x50, 0x8e, 0x09, 0x4e, 0xf5, 0x54, 0x80, 0x6d, 0x55, 0xac, 0xda, 0x5a, 0x63, 0xcf, 0x99, + 0x13, 0xee, 0xdc, 0x62, 0x99, 0x7a, 0x30, 0x15, 0xd0, 0x42, 0xbf, 0x2e, 0x2f, 0x32, 0x4b, 0x5f, + 0xac, 0x74, 0xc9, 0x22, 0x2b, 0x6c, 0xd6, 0xc4, 0x8f, 0xd0, 0xaa, 0xeb, 0x47, 0x4a, 0x83, 0xa4, + 0x01, 0x1b, 0x83, 0xb2, 0xd3, 0x95, 0x4c, 0x2d, 0x4f, 0x8a, 0x49, 0xf3, 0xd8, 0xf4, 0x70, 0x1b, + 0xad, 0x5e, 0x5f, 0x5d, 0xd9, 0xd9, 0x4a, 0xa6, 0x56, 0x68, 0x6c, 0x2f, 0xb8, 0xfc, 0x40, 0x0a, + 0xb7, 0x3f, 0x83, 0x91, 0xa2, 0xf7, 0xb7, 0x50, 0xf8, 0x05, 0x5a, 0x95, 0x30, 0x92, 0xa0, 0xce, + 0xe8, 0x10, 0x7c, 0x36, 0xb5, 0x33, 0x15, 0xab, 0x56, 0x68, 0xfc, 0xef, 0xcc, 0x1c, 0x72, 0xae, + 0x1c, 0x72, 0x3a, 0x89, 0x83, 0xa4, 0x98, 0xe0, 0x3b, 0x06, 0x8e, 0x7b, 0x68, 0x5d, 0xc2, 0xa7, + 0x08, 0x94, 0xa6, 0x9a, 0x8f, 0x21, 0x8c, 0xb4, 0xbd, 0x74, 0xcf, 0x84, 0x56, 0xd1, 0xac, 0xbc, + 0xf2, 0xc3, 0xca, 0xee, 0xa5, 0x73, 0x29, 0xb2, 0x96, 0xf0, 0x07, 0x33, 0x3a, 0x1e, 0xa0, 0x0d, + 0xc9, 0x34, 0x50, 0x9f, 0x8f, 0xb9, 0xa6, 0x0a, 0xb4, 0xe6, 0x81, 0xa7, 0xec, 0xe5, 0x78, 0xea, + 0xee, 0x82, 0xe5, 0x08, 0xd3, 0x70, 0x64, 0xc0, 0xfd, 0x04, 0x4b, 0xca, 0xf2, 0x76, 0xab, 0x7a, + 0x8c, 0x56, 0x12, 0xc7, 0xf1, 0x0e, 0xda, 0x3c, 0x39, 0xee, 0x9f, 0xf4, 0x7a, 0x6f, 0xc9, 0xa0, + 0xdb, 0xa1, 0xa4, 0xdb, 0x1f, 0xd0, 0xa3, 0xee, 0x41, 0xb3, 0xfd, 0xa1, 0x94, 0xda, 0x4a, 0xe7, + 0x2c, 0x9c, 0x43, 0x59, 0xd3, 0x2c, 0xc5, 0x5f, 0x07, 0xa4, 0xd7, 0x2e, 0xa5, 0xf1, 0x1a, 0x42, + 0x9d, 0xee, 0xd1, 0xa0, 0x49, 0xe3, 0x3a, 0x53, 0xb5, 0xd1, 0x7f, 0x4d, 0xcf, 0x93, 0xe0, 0x31, + 0x0d, 0xc3, 0xeb, 0x7f, 0x6c, 0xf5, 0x9b, 0x85, 0xca, 0x73, 0x92, 0xf0, 0x33, 0x84, 0xc6, 0xec, + 0x9c, 0xea, 0xf0, 0x23, 0x04, 0x2a, 0x8e, 0x49, 0xa1, 0xf1, 0x60, 0xce, 0xa2, 0x93, 0x57, 0x81, + 0xde, 0x6f, 0xbc, 0x67, 0x7e, 0x04, 0x24, 0x3f, 0x66, 0xe7, 0x83, 0x18, 0x8e, 0x5f, 0xa3, 0xfc, + 0x88, 0xfb, 0x3e, 0x35, 0x6b, 0xd9, 0xe9, 0x3b, 0xb8, 0x9d, 0x30, 0x3a, 0xf5, 0x21, 0xe6, 0xb6, + 0x4a, 0xc6, 0xe1, 0x02, 0xce, 0x3f, 0x4c, 0x25, 0x3f, 0x92, 0x33, 0x03, 0x8c, 0xac, 0xea, 0xf7, + 0x34, 0x2a, 0xde, 0xc8, 0xef, 0x3f, 0x28, 0x2b, 0x98, 0x3e, 0x8b, 0x45, 0xe5, 0x0f, 0x53, 0x24, + 0xae, 0x70, 0x0f, 0x95, 0x4d, 0xaa, 0x6f, 0x3c, 0xcb, 0xe4, 0xee, 0xea, 0xfd, 0xf1, 0x3e, 0x4c, + 0x91, 0x75, 0x76, 0xeb, 0x9d, 0x3c, 0x47, 0x19, 0x36, 0x54, 0x49, 0xc0, 0x9e, 0x2c, 0x9a, 0xb1, + 0xd0, 0xd0, 0xc3, 0x14, 0x31, 0x3c, 0xfc, 0x06, 0xfd, 0xcb, 0x03, 0xae, 0x39, 0xf3, 0xe9, 0x08, + 0xb4, 0x7b, 0xf6, 0x27, 0x6f, 0xd9, 0xfb, 0x12, 0xbb, 0x91, 0xf0, 0x5e, 0x1a, 0x5a, 0x12, 0xb3, + 0x56, 0x05, 0x6d, 0xde, 0xd8, 0x8d, 0x2a, 0x01, 0x2e, 0x1f, 0x71, 0x90, 0x78, 0xe9, 0xe7, 0xe5, + 0x45, 0xc6, 0x6a, 0x3d, 0x45, 0x3b, 0x3c, 0x9c, 0xc9, 0x14, 0x32, 0x3c, 0x9f, 0xce, 0x2b, 0x6e, + 0x95, 0xaf, 0x0b, 0xed, 0x99, 0x8b, 0x7b, 0xd6, 0xe9, 0x72, 0xac, 0x60, 0xff, 0x77, 0x00, 0x00, + 0x00, 0xff, 0xff, 0x4c, 0x68, 0x6a, 0x4f, 0xe5, 0x04, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/grpc/balancer/xds/internal/proto/envoy/api/v2/core/grpc_service/grpc_service.pb.go b/vendor/google.golang.org/grpc/balancer/xds/internal/proto/envoy/api/v2/core/grpc_service/grpc_service.pb.go new file mode 100755 index 0000000..832c2ca --- /dev/null +++ b/vendor/google.golang.org/grpc/balancer/xds/internal/proto/envoy/api/v2/core/grpc_service/grpc_service.pb.go @@ -0,0 +1,1196 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: envoy/api/v2/core/grpc_service.proto + +package envoy_api_v2_core + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import any "github.com/golang/protobuf/ptypes/any" +import duration "github.com/golang/protobuf/ptypes/duration" +import empty "github.com/golang/protobuf/ptypes/empty" +import _struct "github.com/golang/protobuf/ptypes/struct" +import base "google.golang.org/grpc/balancer/xds/internal/proto/envoy/api/v2/core/base" +import _ "google.golang.org/grpc/balancer/xds/internal/proto/validate" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +type GrpcService struct { + // Types that are valid to be assigned to TargetSpecifier: + // *GrpcService_EnvoyGrpc_ + // *GrpcService_GoogleGrpc_ + TargetSpecifier isGrpcService_TargetSpecifier `protobuf_oneof:"target_specifier"` + Timeout *duration.Duration `protobuf:"bytes,3,opt,name=timeout,proto3" json:"timeout,omitempty"` + InitialMetadata []*base.HeaderValue `protobuf:"bytes,5,rep,name=initial_metadata,json=initialMetadata,proto3" json:"initial_metadata,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GrpcService) Reset() { *m = GrpcService{} } +func (m *GrpcService) String() string { return proto.CompactTextString(m) } +func (*GrpcService) ProtoMessage() {} +func (*GrpcService) Descriptor() ([]byte, []int) { + return fileDescriptor_grpc_service_b85549433708d753, []int{0} +} +func (m *GrpcService) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GrpcService.Unmarshal(m, b) +} +func (m *GrpcService) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GrpcService.Marshal(b, m, deterministic) +} +func (dst *GrpcService) XXX_Merge(src proto.Message) { + xxx_messageInfo_GrpcService.Merge(dst, src) +} +func (m *GrpcService) XXX_Size() int { + return xxx_messageInfo_GrpcService.Size(m) +} +func (m *GrpcService) XXX_DiscardUnknown() { + xxx_messageInfo_GrpcService.DiscardUnknown(m) +} + +var xxx_messageInfo_GrpcService proto.InternalMessageInfo + +type isGrpcService_TargetSpecifier interface { + isGrpcService_TargetSpecifier() +} + +type GrpcService_EnvoyGrpc_ struct { + EnvoyGrpc *GrpcService_EnvoyGrpc `protobuf:"bytes,1,opt,name=envoy_grpc,json=envoyGrpc,proto3,oneof"` +} + +type GrpcService_GoogleGrpc_ struct { + GoogleGrpc *GrpcService_GoogleGrpc `protobuf:"bytes,2,opt,name=google_grpc,json=googleGrpc,proto3,oneof"` +} + +func (*GrpcService_EnvoyGrpc_) isGrpcService_TargetSpecifier() {} + +func (*GrpcService_GoogleGrpc_) isGrpcService_TargetSpecifier() {} + +func (m *GrpcService) GetTargetSpecifier() isGrpcService_TargetSpecifier { + if m != nil { + return m.TargetSpecifier + } + return nil +} + +func (m *GrpcService) GetEnvoyGrpc() *GrpcService_EnvoyGrpc { + if x, ok := m.GetTargetSpecifier().(*GrpcService_EnvoyGrpc_); ok { + return x.EnvoyGrpc + } + return nil +} + +func (m *GrpcService) GetGoogleGrpc() *GrpcService_GoogleGrpc { + if x, ok := m.GetTargetSpecifier().(*GrpcService_GoogleGrpc_); ok { + return x.GoogleGrpc + } + return nil +} + +func (m *GrpcService) GetTimeout() *duration.Duration { + if m != nil { + return m.Timeout + } + return nil +} + +func (m *GrpcService) GetInitialMetadata() []*base.HeaderValue { + if m != nil { + return m.InitialMetadata + } + return nil +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*GrpcService) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _GrpcService_OneofMarshaler, _GrpcService_OneofUnmarshaler, _GrpcService_OneofSizer, []interface{}{ + (*GrpcService_EnvoyGrpc_)(nil), + (*GrpcService_GoogleGrpc_)(nil), + } +} + +func _GrpcService_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*GrpcService) + // target_specifier + switch x := m.TargetSpecifier.(type) { + case *GrpcService_EnvoyGrpc_: + b.EncodeVarint(1<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.EnvoyGrpc); err != nil { + return err + } + case *GrpcService_GoogleGrpc_: + b.EncodeVarint(2<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.GoogleGrpc); err != nil { + return err + } + case nil: + default: + return fmt.Errorf("GrpcService.TargetSpecifier has unexpected type %T", x) + } + return nil +} + +func _GrpcService_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*GrpcService) + switch tag { + case 1: // target_specifier.envoy_grpc + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(GrpcService_EnvoyGrpc) + err := b.DecodeMessage(msg) + m.TargetSpecifier = &GrpcService_EnvoyGrpc_{msg} + return true, err + case 2: // target_specifier.google_grpc + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(GrpcService_GoogleGrpc) + err := b.DecodeMessage(msg) + m.TargetSpecifier = &GrpcService_GoogleGrpc_{msg} + return true, err + default: + return false, nil + } +} + +func _GrpcService_OneofSizer(msg proto.Message) (n int) { + m := msg.(*GrpcService) + // target_specifier + switch x := m.TargetSpecifier.(type) { + case *GrpcService_EnvoyGrpc_: + s := proto.Size(x.EnvoyGrpc) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *GrpcService_GoogleGrpc_: + s := proto.Size(x.GoogleGrpc) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +type GrpcService_EnvoyGrpc struct { + ClusterName string `protobuf:"bytes,1,opt,name=cluster_name,json=clusterName,proto3" json:"cluster_name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GrpcService_EnvoyGrpc) Reset() { *m = GrpcService_EnvoyGrpc{} } +func (m *GrpcService_EnvoyGrpc) String() string { return proto.CompactTextString(m) } +func (*GrpcService_EnvoyGrpc) ProtoMessage() {} +func (*GrpcService_EnvoyGrpc) Descriptor() ([]byte, []int) { + return fileDescriptor_grpc_service_b85549433708d753, []int{0, 0} +} +func (m *GrpcService_EnvoyGrpc) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GrpcService_EnvoyGrpc.Unmarshal(m, b) +} +func (m *GrpcService_EnvoyGrpc) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GrpcService_EnvoyGrpc.Marshal(b, m, deterministic) +} +func (dst *GrpcService_EnvoyGrpc) XXX_Merge(src proto.Message) { + xxx_messageInfo_GrpcService_EnvoyGrpc.Merge(dst, src) +} +func (m *GrpcService_EnvoyGrpc) XXX_Size() int { + return xxx_messageInfo_GrpcService_EnvoyGrpc.Size(m) +} +func (m *GrpcService_EnvoyGrpc) XXX_DiscardUnknown() { + xxx_messageInfo_GrpcService_EnvoyGrpc.DiscardUnknown(m) +} + +var xxx_messageInfo_GrpcService_EnvoyGrpc proto.InternalMessageInfo + +func (m *GrpcService_EnvoyGrpc) GetClusterName() string { + if m != nil { + return m.ClusterName + } + return "" +} + +type GrpcService_GoogleGrpc struct { + TargetUri string `protobuf:"bytes,1,opt,name=target_uri,json=targetUri,proto3" json:"target_uri,omitempty"` + ChannelCredentials *GrpcService_GoogleGrpc_ChannelCredentials `protobuf:"bytes,2,opt,name=channel_credentials,json=channelCredentials,proto3" json:"channel_credentials,omitempty"` + CallCredentials []*GrpcService_GoogleGrpc_CallCredentials `protobuf:"bytes,3,rep,name=call_credentials,json=callCredentials,proto3" json:"call_credentials,omitempty"` + StatPrefix string `protobuf:"bytes,4,opt,name=stat_prefix,json=statPrefix,proto3" json:"stat_prefix,omitempty"` + CredentialsFactoryName string `protobuf:"bytes,5,opt,name=credentials_factory_name,json=credentialsFactoryName,proto3" json:"credentials_factory_name,omitempty"` + Config *_struct.Struct `protobuf:"bytes,6,opt,name=config,proto3" json:"config,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GrpcService_GoogleGrpc) Reset() { *m = GrpcService_GoogleGrpc{} } +func (m *GrpcService_GoogleGrpc) String() string { return proto.CompactTextString(m) } +func (*GrpcService_GoogleGrpc) ProtoMessage() {} +func (*GrpcService_GoogleGrpc) Descriptor() ([]byte, []int) { + return fileDescriptor_grpc_service_b85549433708d753, []int{0, 1} +} +func (m *GrpcService_GoogleGrpc) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GrpcService_GoogleGrpc.Unmarshal(m, b) +} +func (m *GrpcService_GoogleGrpc) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GrpcService_GoogleGrpc.Marshal(b, m, deterministic) +} +func (dst *GrpcService_GoogleGrpc) XXX_Merge(src proto.Message) { + xxx_messageInfo_GrpcService_GoogleGrpc.Merge(dst, src) +} +func (m *GrpcService_GoogleGrpc) XXX_Size() int { + return xxx_messageInfo_GrpcService_GoogleGrpc.Size(m) +} +func (m *GrpcService_GoogleGrpc) XXX_DiscardUnknown() { + xxx_messageInfo_GrpcService_GoogleGrpc.DiscardUnknown(m) +} + +var xxx_messageInfo_GrpcService_GoogleGrpc proto.InternalMessageInfo + +func (m *GrpcService_GoogleGrpc) GetTargetUri() string { + if m != nil { + return m.TargetUri + } + return "" +} + +func (m *GrpcService_GoogleGrpc) GetChannelCredentials() *GrpcService_GoogleGrpc_ChannelCredentials { + if m != nil { + return m.ChannelCredentials + } + return nil +} + +func (m *GrpcService_GoogleGrpc) GetCallCredentials() []*GrpcService_GoogleGrpc_CallCredentials { + if m != nil { + return m.CallCredentials + } + return nil +} + +func (m *GrpcService_GoogleGrpc) GetStatPrefix() string { + if m != nil { + return m.StatPrefix + } + return "" +} + +func (m *GrpcService_GoogleGrpc) GetCredentialsFactoryName() string { + if m != nil { + return m.CredentialsFactoryName + } + return "" +} + +func (m *GrpcService_GoogleGrpc) GetConfig() *_struct.Struct { + if m != nil { + return m.Config + } + return nil +} + +type GrpcService_GoogleGrpc_SslCredentials struct { + RootCerts *base.DataSource `protobuf:"bytes,1,opt,name=root_certs,json=rootCerts,proto3" json:"root_certs,omitempty"` + PrivateKey *base.DataSource `protobuf:"bytes,2,opt,name=private_key,json=privateKey,proto3" json:"private_key,omitempty"` + CertChain *base.DataSource `protobuf:"bytes,3,opt,name=cert_chain,json=certChain,proto3" json:"cert_chain,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GrpcService_GoogleGrpc_SslCredentials) Reset() { *m = GrpcService_GoogleGrpc_SslCredentials{} } +func (m *GrpcService_GoogleGrpc_SslCredentials) String() string { return proto.CompactTextString(m) } +func (*GrpcService_GoogleGrpc_SslCredentials) ProtoMessage() {} +func (*GrpcService_GoogleGrpc_SslCredentials) Descriptor() ([]byte, []int) { + return fileDescriptor_grpc_service_b85549433708d753, []int{0, 1, 0} +} +func (m *GrpcService_GoogleGrpc_SslCredentials) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GrpcService_GoogleGrpc_SslCredentials.Unmarshal(m, b) +} +func (m *GrpcService_GoogleGrpc_SslCredentials) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GrpcService_GoogleGrpc_SslCredentials.Marshal(b, m, deterministic) +} +func (dst *GrpcService_GoogleGrpc_SslCredentials) XXX_Merge(src proto.Message) { + xxx_messageInfo_GrpcService_GoogleGrpc_SslCredentials.Merge(dst, src) +} +func (m *GrpcService_GoogleGrpc_SslCredentials) XXX_Size() int { + return xxx_messageInfo_GrpcService_GoogleGrpc_SslCredentials.Size(m) +} +func (m *GrpcService_GoogleGrpc_SslCredentials) XXX_DiscardUnknown() { + xxx_messageInfo_GrpcService_GoogleGrpc_SslCredentials.DiscardUnknown(m) +} + +var xxx_messageInfo_GrpcService_GoogleGrpc_SslCredentials proto.InternalMessageInfo + +func (m *GrpcService_GoogleGrpc_SslCredentials) GetRootCerts() *base.DataSource { + if m != nil { + return m.RootCerts + } + return nil +} + +func (m *GrpcService_GoogleGrpc_SslCredentials) GetPrivateKey() *base.DataSource { + if m != nil { + return m.PrivateKey + } + return nil +} + +func (m *GrpcService_GoogleGrpc_SslCredentials) GetCertChain() *base.DataSource { + if m != nil { + return m.CertChain + } + return nil +} + +type GrpcService_GoogleGrpc_GoogleLocalCredentials struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GrpcService_GoogleGrpc_GoogleLocalCredentials) Reset() { + *m = GrpcService_GoogleGrpc_GoogleLocalCredentials{} +} +func (m *GrpcService_GoogleGrpc_GoogleLocalCredentials) String() string { + return proto.CompactTextString(m) +} +func (*GrpcService_GoogleGrpc_GoogleLocalCredentials) ProtoMessage() {} +func (*GrpcService_GoogleGrpc_GoogleLocalCredentials) Descriptor() ([]byte, []int) { + return fileDescriptor_grpc_service_b85549433708d753, []int{0, 1, 1} +} +func (m *GrpcService_GoogleGrpc_GoogleLocalCredentials) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GrpcService_GoogleGrpc_GoogleLocalCredentials.Unmarshal(m, b) +} +func (m *GrpcService_GoogleGrpc_GoogleLocalCredentials) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GrpcService_GoogleGrpc_GoogleLocalCredentials.Marshal(b, m, deterministic) +} +func (dst *GrpcService_GoogleGrpc_GoogleLocalCredentials) XXX_Merge(src proto.Message) { + xxx_messageInfo_GrpcService_GoogleGrpc_GoogleLocalCredentials.Merge(dst, src) +} +func (m *GrpcService_GoogleGrpc_GoogleLocalCredentials) XXX_Size() int { + return xxx_messageInfo_GrpcService_GoogleGrpc_GoogleLocalCredentials.Size(m) +} +func (m *GrpcService_GoogleGrpc_GoogleLocalCredentials) XXX_DiscardUnknown() { + xxx_messageInfo_GrpcService_GoogleGrpc_GoogleLocalCredentials.DiscardUnknown(m) +} + +var xxx_messageInfo_GrpcService_GoogleGrpc_GoogleLocalCredentials proto.InternalMessageInfo + +type GrpcService_GoogleGrpc_ChannelCredentials struct { + // Types that are valid to be assigned to CredentialSpecifier: + // *GrpcService_GoogleGrpc_ChannelCredentials_SslCredentials + // *GrpcService_GoogleGrpc_ChannelCredentials_GoogleDefault + // *GrpcService_GoogleGrpc_ChannelCredentials_LocalCredentials + CredentialSpecifier isGrpcService_GoogleGrpc_ChannelCredentials_CredentialSpecifier `protobuf_oneof:"credential_specifier"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GrpcService_GoogleGrpc_ChannelCredentials) Reset() { + *m = GrpcService_GoogleGrpc_ChannelCredentials{} +} +func (m *GrpcService_GoogleGrpc_ChannelCredentials) String() string { return proto.CompactTextString(m) } +func (*GrpcService_GoogleGrpc_ChannelCredentials) ProtoMessage() {} +func (*GrpcService_GoogleGrpc_ChannelCredentials) Descriptor() ([]byte, []int) { + return fileDescriptor_grpc_service_b85549433708d753, []int{0, 1, 2} +} +func (m *GrpcService_GoogleGrpc_ChannelCredentials) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GrpcService_GoogleGrpc_ChannelCredentials.Unmarshal(m, b) +} +func (m *GrpcService_GoogleGrpc_ChannelCredentials) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GrpcService_GoogleGrpc_ChannelCredentials.Marshal(b, m, deterministic) +} +func (dst *GrpcService_GoogleGrpc_ChannelCredentials) XXX_Merge(src proto.Message) { + xxx_messageInfo_GrpcService_GoogleGrpc_ChannelCredentials.Merge(dst, src) +} +func (m *GrpcService_GoogleGrpc_ChannelCredentials) XXX_Size() int { + return xxx_messageInfo_GrpcService_GoogleGrpc_ChannelCredentials.Size(m) +} +func (m *GrpcService_GoogleGrpc_ChannelCredentials) XXX_DiscardUnknown() { + xxx_messageInfo_GrpcService_GoogleGrpc_ChannelCredentials.DiscardUnknown(m) +} + +var xxx_messageInfo_GrpcService_GoogleGrpc_ChannelCredentials proto.InternalMessageInfo + +type isGrpcService_GoogleGrpc_ChannelCredentials_CredentialSpecifier interface { + isGrpcService_GoogleGrpc_ChannelCredentials_CredentialSpecifier() +} + +type GrpcService_GoogleGrpc_ChannelCredentials_SslCredentials struct { + SslCredentials *GrpcService_GoogleGrpc_SslCredentials `protobuf:"bytes,1,opt,name=ssl_credentials,json=sslCredentials,proto3,oneof"` +} + +type GrpcService_GoogleGrpc_ChannelCredentials_GoogleDefault struct { + GoogleDefault *empty.Empty `protobuf:"bytes,2,opt,name=google_default,json=googleDefault,proto3,oneof"` +} + +type GrpcService_GoogleGrpc_ChannelCredentials_LocalCredentials struct { + LocalCredentials *GrpcService_GoogleGrpc_GoogleLocalCredentials `protobuf:"bytes,3,opt,name=local_credentials,json=localCredentials,proto3,oneof"` +} + +func (*GrpcService_GoogleGrpc_ChannelCredentials_SslCredentials) isGrpcService_GoogleGrpc_ChannelCredentials_CredentialSpecifier() { +} + +func (*GrpcService_GoogleGrpc_ChannelCredentials_GoogleDefault) isGrpcService_GoogleGrpc_ChannelCredentials_CredentialSpecifier() { +} + +func (*GrpcService_GoogleGrpc_ChannelCredentials_LocalCredentials) isGrpcService_GoogleGrpc_ChannelCredentials_CredentialSpecifier() { +} + +func (m *GrpcService_GoogleGrpc_ChannelCredentials) GetCredentialSpecifier() isGrpcService_GoogleGrpc_ChannelCredentials_CredentialSpecifier { + if m != nil { + return m.CredentialSpecifier + } + return nil +} + +func (m *GrpcService_GoogleGrpc_ChannelCredentials) GetSslCredentials() *GrpcService_GoogleGrpc_SslCredentials { + if x, ok := m.GetCredentialSpecifier().(*GrpcService_GoogleGrpc_ChannelCredentials_SslCredentials); ok { + return x.SslCredentials + } + return nil +} + +func (m *GrpcService_GoogleGrpc_ChannelCredentials) GetGoogleDefault() *empty.Empty { + if x, ok := m.GetCredentialSpecifier().(*GrpcService_GoogleGrpc_ChannelCredentials_GoogleDefault); ok { + return x.GoogleDefault + } + return nil +} + +func (m *GrpcService_GoogleGrpc_ChannelCredentials) GetLocalCredentials() *GrpcService_GoogleGrpc_GoogleLocalCredentials { + if x, ok := m.GetCredentialSpecifier().(*GrpcService_GoogleGrpc_ChannelCredentials_LocalCredentials); ok { + return x.LocalCredentials + } + return nil +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*GrpcService_GoogleGrpc_ChannelCredentials) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _GrpcService_GoogleGrpc_ChannelCredentials_OneofMarshaler, _GrpcService_GoogleGrpc_ChannelCredentials_OneofUnmarshaler, _GrpcService_GoogleGrpc_ChannelCredentials_OneofSizer, []interface{}{ + (*GrpcService_GoogleGrpc_ChannelCredentials_SslCredentials)(nil), + (*GrpcService_GoogleGrpc_ChannelCredentials_GoogleDefault)(nil), + (*GrpcService_GoogleGrpc_ChannelCredentials_LocalCredentials)(nil), + } +} + +func _GrpcService_GoogleGrpc_ChannelCredentials_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*GrpcService_GoogleGrpc_ChannelCredentials) + // credential_specifier + switch x := m.CredentialSpecifier.(type) { + case *GrpcService_GoogleGrpc_ChannelCredentials_SslCredentials: + b.EncodeVarint(1<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.SslCredentials); err != nil { + return err + } + case *GrpcService_GoogleGrpc_ChannelCredentials_GoogleDefault: + b.EncodeVarint(2<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.GoogleDefault); err != nil { + return err + } + case *GrpcService_GoogleGrpc_ChannelCredentials_LocalCredentials: + b.EncodeVarint(3<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.LocalCredentials); err != nil { + return err + } + case nil: + default: + return fmt.Errorf("GrpcService_GoogleGrpc_ChannelCredentials.CredentialSpecifier has unexpected type %T", x) + } + return nil +} + +func _GrpcService_GoogleGrpc_ChannelCredentials_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*GrpcService_GoogleGrpc_ChannelCredentials) + switch tag { + case 1: // credential_specifier.ssl_credentials + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(GrpcService_GoogleGrpc_SslCredentials) + err := b.DecodeMessage(msg) + m.CredentialSpecifier = &GrpcService_GoogleGrpc_ChannelCredentials_SslCredentials{msg} + return true, err + case 2: // credential_specifier.google_default + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(empty.Empty) + err := b.DecodeMessage(msg) + m.CredentialSpecifier = &GrpcService_GoogleGrpc_ChannelCredentials_GoogleDefault{msg} + return true, err + case 3: // credential_specifier.local_credentials + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(GrpcService_GoogleGrpc_GoogleLocalCredentials) + err := b.DecodeMessage(msg) + m.CredentialSpecifier = &GrpcService_GoogleGrpc_ChannelCredentials_LocalCredentials{msg} + return true, err + default: + return false, nil + } +} + +func _GrpcService_GoogleGrpc_ChannelCredentials_OneofSizer(msg proto.Message) (n int) { + m := msg.(*GrpcService_GoogleGrpc_ChannelCredentials) + // credential_specifier + switch x := m.CredentialSpecifier.(type) { + case *GrpcService_GoogleGrpc_ChannelCredentials_SslCredentials: + s := proto.Size(x.SslCredentials) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *GrpcService_GoogleGrpc_ChannelCredentials_GoogleDefault: + s := proto.Size(x.GoogleDefault) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *GrpcService_GoogleGrpc_ChannelCredentials_LocalCredentials: + s := proto.Size(x.LocalCredentials) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +type GrpcService_GoogleGrpc_CallCredentials struct { + // Types that are valid to be assigned to CredentialSpecifier: + // *GrpcService_GoogleGrpc_CallCredentials_AccessToken + // *GrpcService_GoogleGrpc_CallCredentials_GoogleComputeEngine + // *GrpcService_GoogleGrpc_CallCredentials_GoogleRefreshToken + // *GrpcService_GoogleGrpc_CallCredentials_ServiceAccountJwtAccess + // *GrpcService_GoogleGrpc_CallCredentials_GoogleIam + // *GrpcService_GoogleGrpc_CallCredentials_FromPlugin + CredentialSpecifier isGrpcService_GoogleGrpc_CallCredentials_CredentialSpecifier `protobuf_oneof:"credential_specifier"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GrpcService_GoogleGrpc_CallCredentials) Reset() { + *m = GrpcService_GoogleGrpc_CallCredentials{} +} +func (m *GrpcService_GoogleGrpc_CallCredentials) String() string { return proto.CompactTextString(m) } +func (*GrpcService_GoogleGrpc_CallCredentials) ProtoMessage() {} +func (*GrpcService_GoogleGrpc_CallCredentials) Descriptor() ([]byte, []int) { + return fileDescriptor_grpc_service_b85549433708d753, []int{0, 1, 3} +} +func (m *GrpcService_GoogleGrpc_CallCredentials) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GrpcService_GoogleGrpc_CallCredentials.Unmarshal(m, b) +} +func (m *GrpcService_GoogleGrpc_CallCredentials) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GrpcService_GoogleGrpc_CallCredentials.Marshal(b, m, deterministic) +} +func (dst *GrpcService_GoogleGrpc_CallCredentials) XXX_Merge(src proto.Message) { + xxx_messageInfo_GrpcService_GoogleGrpc_CallCredentials.Merge(dst, src) +} +func (m *GrpcService_GoogleGrpc_CallCredentials) XXX_Size() int { + return xxx_messageInfo_GrpcService_GoogleGrpc_CallCredentials.Size(m) +} +func (m *GrpcService_GoogleGrpc_CallCredentials) XXX_DiscardUnknown() { + xxx_messageInfo_GrpcService_GoogleGrpc_CallCredentials.DiscardUnknown(m) +} + +var xxx_messageInfo_GrpcService_GoogleGrpc_CallCredentials proto.InternalMessageInfo + +type isGrpcService_GoogleGrpc_CallCredentials_CredentialSpecifier interface { + isGrpcService_GoogleGrpc_CallCredentials_CredentialSpecifier() +} + +type GrpcService_GoogleGrpc_CallCredentials_AccessToken struct { + AccessToken string `protobuf:"bytes,1,opt,name=access_token,json=accessToken,proto3,oneof"` +} + +type GrpcService_GoogleGrpc_CallCredentials_GoogleComputeEngine struct { + GoogleComputeEngine *empty.Empty `protobuf:"bytes,2,opt,name=google_compute_engine,json=googleComputeEngine,proto3,oneof"` +} + +type GrpcService_GoogleGrpc_CallCredentials_GoogleRefreshToken struct { + GoogleRefreshToken string `protobuf:"bytes,3,opt,name=google_refresh_token,json=googleRefreshToken,proto3,oneof"` +} + +type GrpcService_GoogleGrpc_CallCredentials_ServiceAccountJwtAccess struct { + ServiceAccountJwtAccess *GrpcService_GoogleGrpc_CallCredentials_ServiceAccountJWTAccessCredentials `protobuf:"bytes,4,opt,name=service_account_jwt_access,json=serviceAccountJwtAccess,proto3,oneof"` +} + +type GrpcService_GoogleGrpc_CallCredentials_GoogleIam struct { + GoogleIam *GrpcService_GoogleGrpc_CallCredentials_GoogleIAMCredentials `protobuf:"bytes,5,opt,name=google_iam,json=googleIam,proto3,oneof"` +} + +type GrpcService_GoogleGrpc_CallCredentials_FromPlugin struct { + FromPlugin *GrpcService_GoogleGrpc_CallCredentials_MetadataCredentialsFromPlugin `protobuf:"bytes,6,opt,name=from_plugin,json=fromPlugin,proto3,oneof"` +} + +func (*GrpcService_GoogleGrpc_CallCredentials_AccessToken) isGrpcService_GoogleGrpc_CallCredentials_CredentialSpecifier() { +} + +func (*GrpcService_GoogleGrpc_CallCredentials_GoogleComputeEngine) isGrpcService_GoogleGrpc_CallCredentials_CredentialSpecifier() { +} + +func (*GrpcService_GoogleGrpc_CallCredentials_GoogleRefreshToken) isGrpcService_GoogleGrpc_CallCredentials_CredentialSpecifier() { +} + +func (*GrpcService_GoogleGrpc_CallCredentials_ServiceAccountJwtAccess) isGrpcService_GoogleGrpc_CallCredentials_CredentialSpecifier() { +} + +func (*GrpcService_GoogleGrpc_CallCredentials_GoogleIam) isGrpcService_GoogleGrpc_CallCredentials_CredentialSpecifier() { +} + +func (*GrpcService_GoogleGrpc_CallCredentials_FromPlugin) isGrpcService_GoogleGrpc_CallCredentials_CredentialSpecifier() { +} + +func (m *GrpcService_GoogleGrpc_CallCredentials) GetCredentialSpecifier() isGrpcService_GoogleGrpc_CallCredentials_CredentialSpecifier { + if m != nil { + return m.CredentialSpecifier + } + return nil +} + +func (m *GrpcService_GoogleGrpc_CallCredentials) GetAccessToken() string { + if x, ok := m.GetCredentialSpecifier().(*GrpcService_GoogleGrpc_CallCredentials_AccessToken); ok { + return x.AccessToken + } + return "" +} + +func (m *GrpcService_GoogleGrpc_CallCredentials) GetGoogleComputeEngine() *empty.Empty { + if x, ok := m.GetCredentialSpecifier().(*GrpcService_GoogleGrpc_CallCredentials_GoogleComputeEngine); ok { + return x.GoogleComputeEngine + } + return nil +} + +func (m *GrpcService_GoogleGrpc_CallCredentials) GetGoogleRefreshToken() string { + if x, ok := m.GetCredentialSpecifier().(*GrpcService_GoogleGrpc_CallCredentials_GoogleRefreshToken); ok { + return x.GoogleRefreshToken + } + return "" +} + +func (m *GrpcService_GoogleGrpc_CallCredentials) GetServiceAccountJwtAccess() *GrpcService_GoogleGrpc_CallCredentials_ServiceAccountJWTAccessCredentials { + if x, ok := m.GetCredentialSpecifier().(*GrpcService_GoogleGrpc_CallCredentials_ServiceAccountJwtAccess); ok { + return x.ServiceAccountJwtAccess + } + return nil +} + +func (m *GrpcService_GoogleGrpc_CallCredentials) GetGoogleIam() *GrpcService_GoogleGrpc_CallCredentials_GoogleIAMCredentials { + if x, ok := m.GetCredentialSpecifier().(*GrpcService_GoogleGrpc_CallCredentials_GoogleIam); ok { + return x.GoogleIam + } + return nil +} + +func (m *GrpcService_GoogleGrpc_CallCredentials) GetFromPlugin() *GrpcService_GoogleGrpc_CallCredentials_MetadataCredentialsFromPlugin { + if x, ok := m.GetCredentialSpecifier().(*GrpcService_GoogleGrpc_CallCredentials_FromPlugin); ok { + return x.FromPlugin + } + return nil +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*GrpcService_GoogleGrpc_CallCredentials) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _GrpcService_GoogleGrpc_CallCredentials_OneofMarshaler, _GrpcService_GoogleGrpc_CallCredentials_OneofUnmarshaler, _GrpcService_GoogleGrpc_CallCredentials_OneofSizer, []interface{}{ + (*GrpcService_GoogleGrpc_CallCredentials_AccessToken)(nil), + (*GrpcService_GoogleGrpc_CallCredentials_GoogleComputeEngine)(nil), + (*GrpcService_GoogleGrpc_CallCredentials_GoogleRefreshToken)(nil), + (*GrpcService_GoogleGrpc_CallCredentials_ServiceAccountJwtAccess)(nil), + (*GrpcService_GoogleGrpc_CallCredentials_GoogleIam)(nil), + (*GrpcService_GoogleGrpc_CallCredentials_FromPlugin)(nil), + } +} + +func _GrpcService_GoogleGrpc_CallCredentials_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*GrpcService_GoogleGrpc_CallCredentials) + // credential_specifier + switch x := m.CredentialSpecifier.(type) { + case *GrpcService_GoogleGrpc_CallCredentials_AccessToken: + b.EncodeVarint(1<<3 | proto.WireBytes) + b.EncodeStringBytes(x.AccessToken) + case *GrpcService_GoogleGrpc_CallCredentials_GoogleComputeEngine: + b.EncodeVarint(2<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.GoogleComputeEngine); err != nil { + return err + } + case *GrpcService_GoogleGrpc_CallCredentials_GoogleRefreshToken: + b.EncodeVarint(3<<3 | proto.WireBytes) + b.EncodeStringBytes(x.GoogleRefreshToken) + case *GrpcService_GoogleGrpc_CallCredentials_ServiceAccountJwtAccess: + b.EncodeVarint(4<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.ServiceAccountJwtAccess); err != nil { + return err + } + case *GrpcService_GoogleGrpc_CallCredentials_GoogleIam: + b.EncodeVarint(5<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.GoogleIam); err != nil { + return err + } + case *GrpcService_GoogleGrpc_CallCredentials_FromPlugin: + b.EncodeVarint(6<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.FromPlugin); err != nil { + return err + } + case nil: + default: + return fmt.Errorf("GrpcService_GoogleGrpc_CallCredentials.CredentialSpecifier has unexpected type %T", x) + } + return nil +} + +func _GrpcService_GoogleGrpc_CallCredentials_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*GrpcService_GoogleGrpc_CallCredentials) + switch tag { + case 1: // credential_specifier.access_token + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeStringBytes() + m.CredentialSpecifier = &GrpcService_GoogleGrpc_CallCredentials_AccessToken{x} + return true, err + case 2: // credential_specifier.google_compute_engine + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(empty.Empty) + err := b.DecodeMessage(msg) + m.CredentialSpecifier = &GrpcService_GoogleGrpc_CallCredentials_GoogleComputeEngine{msg} + return true, err + case 3: // credential_specifier.google_refresh_token + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeStringBytes() + m.CredentialSpecifier = &GrpcService_GoogleGrpc_CallCredentials_GoogleRefreshToken{x} + return true, err + case 4: // credential_specifier.service_account_jwt_access + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(GrpcService_GoogleGrpc_CallCredentials_ServiceAccountJWTAccessCredentials) + err := b.DecodeMessage(msg) + m.CredentialSpecifier = &GrpcService_GoogleGrpc_CallCredentials_ServiceAccountJwtAccess{msg} + return true, err + case 5: // credential_specifier.google_iam + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(GrpcService_GoogleGrpc_CallCredentials_GoogleIAMCredentials) + err := b.DecodeMessage(msg) + m.CredentialSpecifier = &GrpcService_GoogleGrpc_CallCredentials_GoogleIam{msg} + return true, err + case 6: // credential_specifier.from_plugin + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(GrpcService_GoogleGrpc_CallCredentials_MetadataCredentialsFromPlugin) + err := b.DecodeMessage(msg) + m.CredentialSpecifier = &GrpcService_GoogleGrpc_CallCredentials_FromPlugin{msg} + return true, err + default: + return false, nil + } +} + +func _GrpcService_GoogleGrpc_CallCredentials_OneofSizer(msg proto.Message) (n int) { + m := msg.(*GrpcService_GoogleGrpc_CallCredentials) + // credential_specifier + switch x := m.CredentialSpecifier.(type) { + case *GrpcService_GoogleGrpc_CallCredentials_AccessToken: + n += 1 // tag and wire + n += proto.SizeVarint(uint64(len(x.AccessToken))) + n += len(x.AccessToken) + case *GrpcService_GoogleGrpc_CallCredentials_GoogleComputeEngine: + s := proto.Size(x.GoogleComputeEngine) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *GrpcService_GoogleGrpc_CallCredentials_GoogleRefreshToken: + n += 1 // tag and wire + n += proto.SizeVarint(uint64(len(x.GoogleRefreshToken))) + n += len(x.GoogleRefreshToken) + case *GrpcService_GoogleGrpc_CallCredentials_ServiceAccountJwtAccess: + s := proto.Size(x.ServiceAccountJwtAccess) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *GrpcService_GoogleGrpc_CallCredentials_GoogleIam: + s := proto.Size(x.GoogleIam) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *GrpcService_GoogleGrpc_CallCredentials_FromPlugin: + s := proto.Size(x.FromPlugin) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +type GrpcService_GoogleGrpc_CallCredentials_ServiceAccountJWTAccessCredentials struct { + JsonKey string `protobuf:"bytes,1,opt,name=json_key,json=jsonKey,proto3" json:"json_key,omitempty"` + TokenLifetimeSeconds uint64 `protobuf:"varint,2,opt,name=token_lifetime_seconds,json=tokenLifetimeSeconds,proto3" json:"token_lifetime_seconds,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GrpcService_GoogleGrpc_CallCredentials_ServiceAccountJWTAccessCredentials) Reset() { + *m = GrpcService_GoogleGrpc_CallCredentials_ServiceAccountJWTAccessCredentials{} +} +func (m *GrpcService_GoogleGrpc_CallCredentials_ServiceAccountJWTAccessCredentials) String() string { + return proto.CompactTextString(m) +} +func (*GrpcService_GoogleGrpc_CallCredentials_ServiceAccountJWTAccessCredentials) ProtoMessage() {} +func (*GrpcService_GoogleGrpc_CallCredentials_ServiceAccountJWTAccessCredentials) Descriptor() ([]byte, []int) { + return fileDescriptor_grpc_service_b85549433708d753, []int{0, 1, 3, 0} +} +func (m *GrpcService_GoogleGrpc_CallCredentials_ServiceAccountJWTAccessCredentials) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GrpcService_GoogleGrpc_CallCredentials_ServiceAccountJWTAccessCredentials.Unmarshal(m, b) +} +func (m *GrpcService_GoogleGrpc_CallCredentials_ServiceAccountJWTAccessCredentials) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GrpcService_GoogleGrpc_CallCredentials_ServiceAccountJWTAccessCredentials.Marshal(b, m, deterministic) +} +func (dst *GrpcService_GoogleGrpc_CallCredentials_ServiceAccountJWTAccessCredentials) XXX_Merge(src proto.Message) { + xxx_messageInfo_GrpcService_GoogleGrpc_CallCredentials_ServiceAccountJWTAccessCredentials.Merge(dst, src) +} +func (m *GrpcService_GoogleGrpc_CallCredentials_ServiceAccountJWTAccessCredentials) XXX_Size() int { + return xxx_messageInfo_GrpcService_GoogleGrpc_CallCredentials_ServiceAccountJWTAccessCredentials.Size(m) +} +func (m *GrpcService_GoogleGrpc_CallCredentials_ServiceAccountJWTAccessCredentials) XXX_DiscardUnknown() { + xxx_messageInfo_GrpcService_GoogleGrpc_CallCredentials_ServiceAccountJWTAccessCredentials.DiscardUnknown(m) +} + +var xxx_messageInfo_GrpcService_GoogleGrpc_CallCredentials_ServiceAccountJWTAccessCredentials proto.InternalMessageInfo + +func (m *GrpcService_GoogleGrpc_CallCredentials_ServiceAccountJWTAccessCredentials) GetJsonKey() string { + if m != nil { + return m.JsonKey + } + return "" +} + +func (m *GrpcService_GoogleGrpc_CallCredentials_ServiceAccountJWTAccessCredentials) GetTokenLifetimeSeconds() uint64 { + if m != nil { + return m.TokenLifetimeSeconds + } + return 0 +} + +type GrpcService_GoogleGrpc_CallCredentials_GoogleIAMCredentials struct { + AuthorizationToken string `protobuf:"bytes,1,opt,name=authorization_token,json=authorizationToken,proto3" json:"authorization_token,omitempty"` + AuthoritySelector string `protobuf:"bytes,2,opt,name=authority_selector,json=authoritySelector,proto3" json:"authority_selector,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GrpcService_GoogleGrpc_CallCredentials_GoogleIAMCredentials) Reset() { + *m = GrpcService_GoogleGrpc_CallCredentials_GoogleIAMCredentials{} +} +func (m *GrpcService_GoogleGrpc_CallCredentials_GoogleIAMCredentials) String() string { + return proto.CompactTextString(m) +} +func (*GrpcService_GoogleGrpc_CallCredentials_GoogleIAMCredentials) ProtoMessage() {} +func (*GrpcService_GoogleGrpc_CallCredentials_GoogleIAMCredentials) Descriptor() ([]byte, []int) { + return fileDescriptor_grpc_service_b85549433708d753, []int{0, 1, 3, 1} +} +func (m *GrpcService_GoogleGrpc_CallCredentials_GoogleIAMCredentials) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GrpcService_GoogleGrpc_CallCredentials_GoogleIAMCredentials.Unmarshal(m, b) +} +func (m *GrpcService_GoogleGrpc_CallCredentials_GoogleIAMCredentials) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GrpcService_GoogleGrpc_CallCredentials_GoogleIAMCredentials.Marshal(b, m, deterministic) +} +func (dst *GrpcService_GoogleGrpc_CallCredentials_GoogleIAMCredentials) XXX_Merge(src proto.Message) { + xxx_messageInfo_GrpcService_GoogleGrpc_CallCredentials_GoogleIAMCredentials.Merge(dst, src) +} +func (m *GrpcService_GoogleGrpc_CallCredentials_GoogleIAMCredentials) XXX_Size() int { + return xxx_messageInfo_GrpcService_GoogleGrpc_CallCredentials_GoogleIAMCredentials.Size(m) +} +func (m *GrpcService_GoogleGrpc_CallCredentials_GoogleIAMCredentials) XXX_DiscardUnknown() { + xxx_messageInfo_GrpcService_GoogleGrpc_CallCredentials_GoogleIAMCredentials.DiscardUnknown(m) +} + +var xxx_messageInfo_GrpcService_GoogleGrpc_CallCredentials_GoogleIAMCredentials proto.InternalMessageInfo + +func (m *GrpcService_GoogleGrpc_CallCredentials_GoogleIAMCredentials) GetAuthorizationToken() string { + if m != nil { + return m.AuthorizationToken + } + return "" +} + +func (m *GrpcService_GoogleGrpc_CallCredentials_GoogleIAMCredentials) GetAuthoritySelector() string { + if m != nil { + return m.AuthoritySelector + } + return "" +} + +type GrpcService_GoogleGrpc_CallCredentials_MetadataCredentialsFromPlugin struct { + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // Types that are valid to be assigned to ConfigType: + // *GrpcService_GoogleGrpc_CallCredentials_MetadataCredentialsFromPlugin_Config + // *GrpcService_GoogleGrpc_CallCredentials_MetadataCredentialsFromPlugin_TypedConfig + ConfigType isGrpcService_GoogleGrpc_CallCredentials_MetadataCredentialsFromPlugin_ConfigType `protobuf_oneof:"config_type"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GrpcService_GoogleGrpc_CallCredentials_MetadataCredentialsFromPlugin) Reset() { + *m = GrpcService_GoogleGrpc_CallCredentials_MetadataCredentialsFromPlugin{} +} +func (m *GrpcService_GoogleGrpc_CallCredentials_MetadataCredentialsFromPlugin) String() string { + return proto.CompactTextString(m) +} +func (*GrpcService_GoogleGrpc_CallCredentials_MetadataCredentialsFromPlugin) ProtoMessage() {} +func (*GrpcService_GoogleGrpc_CallCredentials_MetadataCredentialsFromPlugin) Descriptor() ([]byte, []int) { + return fileDescriptor_grpc_service_b85549433708d753, []int{0, 1, 3, 2} +} +func (m *GrpcService_GoogleGrpc_CallCredentials_MetadataCredentialsFromPlugin) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GrpcService_GoogleGrpc_CallCredentials_MetadataCredentialsFromPlugin.Unmarshal(m, b) +} +func (m *GrpcService_GoogleGrpc_CallCredentials_MetadataCredentialsFromPlugin) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GrpcService_GoogleGrpc_CallCredentials_MetadataCredentialsFromPlugin.Marshal(b, m, deterministic) +} +func (dst *GrpcService_GoogleGrpc_CallCredentials_MetadataCredentialsFromPlugin) XXX_Merge(src proto.Message) { + xxx_messageInfo_GrpcService_GoogleGrpc_CallCredentials_MetadataCredentialsFromPlugin.Merge(dst, src) +} +func (m *GrpcService_GoogleGrpc_CallCredentials_MetadataCredentialsFromPlugin) XXX_Size() int { + return xxx_messageInfo_GrpcService_GoogleGrpc_CallCredentials_MetadataCredentialsFromPlugin.Size(m) +} +func (m *GrpcService_GoogleGrpc_CallCredentials_MetadataCredentialsFromPlugin) XXX_DiscardUnknown() { + xxx_messageInfo_GrpcService_GoogleGrpc_CallCredentials_MetadataCredentialsFromPlugin.DiscardUnknown(m) +} + +var xxx_messageInfo_GrpcService_GoogleGrpc_CallCredentials_MetadataCredentialsFromPlugin proto.InternalMessageInfo + +func (m *GrpcService_GoogleGrpc_CallCredentials_MetadataCredentialsFromPlugin) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +type isGrpcService_GoogleGrpc_CallCredentials_MetadataCredentialsFromPlugin_ConfigType interface { + isGrpcService_GoogleGrpc_CallCredentials_MetadataCredentialsFromPlugin_ConfigType() +} + +type GrpcService_GoogleGrpc_CallCredentials_MetadataCredentialsFromPlugin_Config struct { + Config *_struct.Struct `protobuf:"bytes,2,opt,name=config,proto3,oneof"` +} + +type GrpcService_GoogleGrpc_CallCredentials_MetadataCredentialsFromPlugin_TypedConfig struct { + TypedConfig *any.Any `protobuf:"bytes,3,opt,name=typed_config,json=typedConfig,proto3,oneof"` +} + +func (*GrpcService_GoogleGrpc_CallCredentials_MetadataCredentialsFromPlugin_Config) isGrpcService_GoogleGrpc_CallCredentials_MetadataCredentialsFromPlugin_ConfigType() { +} + +func (*GrpcService_GoogleGrpc_CallCredentials_MetadataCredentialsFromPlugin_TypedConfig) isGrpcService_GoogleGrpc_CallCredentials_MetadataCredentialsFromPlugin_ConfigType() { +} + +func (m *GrpcService_GoogleGrpc_CallCredentials_MetadataCredentialsFromPlugin) GetConfigType() isGrpcService_GoogleGrpc_CallCredentials_MetadataCredentialsFromPlugin_ConfigType { + if m != nil { + return m.ConfigType + } + return nil +} + +func (m *GrpcService_GoogleGrpc_CallCredentials_MetadataCredentialsFromPlugin) GetConfig() *_struct.Struct { + if x, ok := m.GetConfigType().(*GrpcService_GoogleGrpc_CallCredentials_MetadataCredentialsFromPlugin_Config); ok { + return x.Config + } + return nil +} + +func (m *GrpcService_GoogleGrpc_CallCredentials_MetadataCredentialsFromPlugin) GetTypedConfig() *any.Any { + if x, ok := m.GetConfigType().(*GrpcService_GoogleGrpc_CallCredentials_MetadataCredentialsFromPlugin_TypedConfig); ok { + return x.TypedConfig + } + return nil +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*GrpcService_GoogleGrpc_CallCredentials_MetadataCredentialsFromPlugin) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _GrpcService_GoogleGrpc_CallCredentials_MetadataCredentialsFromPlugin_OneofMarshaler, _GrpcService_GoogleGrpc_CallCredentials_MetadataCredentialsFromPlugin_OneofUnmarshaler, _GrpcService_GoogleGrpc_CallCredentials_MetadataCredentialsFromPlugin_OneofSizer, []interface{}{ + (*GrpcService_GoogleGrpc_CallCredentials_MetadataCredentialsFromPlugin_Config)(nil), + (*GrpcService_GoogleGrpc_CallCredentials_MetadataCredentialsFromPlugin_TypedConfig)(nil), + } +} + +func _GrpcService_GoogleGrpc_CallCredentials_MetadataCredentialsFromPlugin_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*GrpcService_GoogleGrpc_CallCredentials_MetadataCredentialsFromPlugin) + // config_type + switch x := m.ConfigType.(type) { + case *GrpcService_GoogleGrpc_CallCredentials_MetadataCredentialsFromPlugin_Config: + b.EncodeVarint(2<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.Config); err != nil { + return err + } + case *GrpcService_GoogleGrpc_CallCredentials_MetadataCredentialsFromPlugin_TypedConfig: + b.EncodeVarint(3<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.TypedConfig); err != nil { + return err + } + case nil: + default: + return fmt.Errorf("GrpcService_GoogleGrpc_CallCredentials_MetadataCredentialsFromPlugin.ConfigType has unexpected type %T", x) + } + return nil +} + +func _GrpcService_GoogleGrpc_CallCredentials_MetadataCredentialsFromPlugin_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*GrpcService_GoogleGrpc_CallCredentials_MetadataCredentialsFromPlugin) + switch tag { + case 2: // config_type.config + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(_struct.Struct) + err := b.DecodeMessage(msg) + m.ConfigType = &GrpcService_GoogleGrpc_CallCredentials_MetadataCredentialsFromPlugin_Config{msg} + return true, err + case 3: // config_type.typed_config + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(any.Any) + err := b.DecodeMessage(msg) + m.ConfigType = &GrpcService_GoogleGrpc_CallCredentials_MetadataCredentialsFromPlugin_TypedConfig{msg} + return true, err + default: + return false, nil + } +} + +func _GrpcService_GoogleGrpc_CallCredentials_MetadataCredentialsFromPlugin_OneofSizer(msg proto.Message) (n int) { + m := msg.(*GrpcService_GoogleGrpc_CallCredentials_MetadataCredentialsFromPlugin) + // config_type + switch x := m.ConfigType.(type) { + case *GrpcService_GoogleGrpc_CallCredentials_MetadataCredentialsFromPlugin_Config: + s := proto.Size(x.Config) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *GrpcService_GoogleGrpc_CallCredentials_MetadataCredentialsFromPlugin_TypedConfig: + s := proto.Size(x.TypedConfig) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +func init() { + proto.RegisterType((*GrpcService)(nil), "envoy.api.v2.core.GrpcService") + proto.RegisterType((*GrpcService_EnvoyGrpc)(nil), "envoy.api.v2.core.GrpcService.EnvoyGrpc") + proto.RegisterType((*GrpcService_GoogleGrpc)(nil), "envoy.api.v2.core.GrpcService.GoogleGrpc") + proto.RegisterType((*GrpcService_GoogleGrpc_SslCredentials)(nil), "envoy.api.v2.core.GrpcService.GoogleGrpc.SslCredentials") + proto.RegisterType((*GrpcService_GoogleGrpc_GoogleLocalCredentials)(nil), "envoy.api.v2.core.GrpcService.GoogleGrpc.GoogleLocalCredentials") + proto.RegisterType((*GrpcService_GoogleGrpc_ChannelCredentials)(nil), "envoy.api.v2.core.GrpcService.GoogleGrpc.ChannelCredentials") + proto.RegisterType((*GrpcService_GoogleGrpc_CallCredentials)(nil), "envoy.api.v2.core.GrpcService.GoogleGrpc.CallCredentials") + proto.RegisterType((*GrpcService_GoogleGrpc_CallCredentials_ServiceAccountJWTAccessCredentials)(nil), "envoy.api.v2.core.GrpcService.GoogleGrpc.CallCredentials.ServiceAccountJWTAccessCredentials") + proto.RegisterType((*GrpcService_GoogleGrpc_CallCredentials_GoogleIAMCredentials)(nil), "envoy.api.v2.core.GrpcService.GoogleGrpc.CallCredentials.GoogleIAMCredentials") + proto.RegisterType((*GrpcService_GoogleGrpc_CallCredentials_MetadataCredentialsFromPlugin)(nil), "envoy.api.v2.core.GrpcService.GoogleGrpc.CallCredentials.MetadataCredentialsFromPlugin") +} + +func init() { + proto.RegisterFile("envoy/api/v2/core/grpc_service.proto", fileDescriptor_grpc_service_b85549433708d753) +} + +var fileDescriptor_grpc_service_b85549433708d753 = []byte{ + // 1052 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x9c, 0x55, 0x4f, 0x6f, 0x1b, 0x45, + 0x14, 0xb7, 0x63, 0xa7, 0xad, 0xdf, 0xa6, 0x89, 0x33, 0x09, 0x89, 0xb3, 0x34, 0x25, 0x2a, 0x1c, + 0x02, 0x82, 0xb5, 0x70, 0x41, 0x6a, 0xa4, 0x0a, 0x88, 0x9d, 0xb4, 0x4e, 0x49, 0xab, 0x68, 0x5d, + 0xe8, 0x05, 0x69, 0x34, 0x19, 0x8f, 0x9d, 0x69, 0xd7, 0x3b, 0xab, 0xd9, 0x59, 0xb7, 0xee, 0x99, + 0x6f, 0xc1, 0x97, 0xe0, 0x88, 0x38, 0xf5, 0x8c, 0xf8, 0x0e, 0x5c, 0xb8, 0xf4, 0x5b, 0xa0, 0xf9, + 0xe3, 0xc4, 0x6b, 0x47, 0x75, 0xc8, 0x6d, 0xf7, 0xfd, 0xde, 0xef, 0xfd, 0xf9, 0xcd, 0x9b, 0x37, + 0xf0, 0x19, 0x8b, 0x87, 0x62, 0x54, 0x27, 0x09, 0xaf, 0x0f, 0x1b, 0x75, 0x2a, 0x24, 0xab, 0xf7, + 0x65, 0x42, 0x71, 0xca, 0xe4, 0x90, 0x53, 0x16, 0x24, 0x52, 0x28, 0x81, 0x56, 0x8d, 0x57, 0x40, + 0x12, 0x1e, 0x0c, 0x1b, 0x81, 0xf6, 0xf2, 0xef, 0xcc, 0x12, 0x4f, 0x49, 0xea, 0x08, 0xfe, 0x56, + 0x5f, 0x88, 0x7e, 0xc4, 0xea, 0xe6, 0xef, 0x34, 0xeb, 0xd5, 0x49, 0x3c, 0x72, 0xd0, 0xdd, 0x69, + 0xa8, 0x9b, 0x49, 0xa2, 0xb8, 0x88, 0x1d, 0x7e, 0x67, 0x1a, 0x4f, 0x95, 0xcc, 0xa8, 0x72, 0xe8, + 0xc7, 0xd3, 0x28, 0x1b, 0x24, 0x6a, 0x1c, 0x7a, 0x73, 0x48, 0x22, 0xde, 0x25, 0x8a, 0xd5, 0xc7, + 0x1f, 0x16, 0xb8, 0xf7, 0x2f, 0x02, 0xef, 0xb1, 0x4c, 0x68, 0xc7, 0x76, 0x85, 0x8e, 0x00, 0x4c, + 0xf9, 0x58, 0xf7, 0x5a, 0x2b, 0xee, 0x14, 0x77, 0xbd, 0xc6, 0x6e, 0x30, 0xd3, 0x64, 0x30, 0xc1, + 0x09, 0x0e, 0x35, 0xaa, 0x0d, 0xed, 0x42, 0x58, 0x61, 0xe3, 0x1f, 0x74, 0x0c, 0x9e, 0x2d, 0xc9, + 0xc6, 0x5a, 0x30, 0xb1, 0x3e, 0x9f, 0x13, 0xeb, 0xb1, 0x61, 0xb8, 0x60, 0xd0, 0x3f, 0xff, 0x43, + 0xf7, 0xe1, 0xa6, 0xe2, 0x03, 0x26, 0x32, 0x55, 0x2b, 0x99, 0x48, 0x5b, 0x81, 0x45, 0x83, 0x71, + 0xc3, 0xc1, 0x81, 0x93, 0x2b, 0x1c, 0x7b, 0xa2, 0x23, 0xa8, 0xf2, 0x98, 0x2b, 0x4e, 0x22, 0x3c, + 0x60, 0x8a, 0x74, 0x89, 0x22, 0xb5, 0xc5, 0x9d, 0xd2, 0xae, 0xd7, 0xb8, 0x7b, 0x49, 0x1d, 0x6d, + 0x46, 0xba, 0x4c, 0xfe, 0x4c, 0xa2, 0x8c, 0x85, 0x2b, 0x8e, 0xf7, 0xd4, 0xd1, 0xfc, 0x3d, 0xa8, + 0x9c, 0xf7, 0x89, 0xbe, 0x84, 0x25, 0x1a, 0x65, 0xa9, 0x62, 0x12, 0xc7, 0x64, 0xc0, 0x8c, 0x4e, + 0x95, 0x66, 0xe5, 0xcf, 0xf7, 0xef, 0x4a, 0x65, 0xb9, 0xb0, 0x53, 0x0c, 0x3d, 0x07, 0x3f, 0x23, + 0x03, 0xe6, 0xff, 0xb3, 0x02, 0x70, 0xd1, 0x17, 0xda, 0x05, 0x50, 0x44, 0xf6, 0x99, 0xc2, 0x99, + 0xe4, 0xb3, 0xd4, 0x8a, 0x05, 0x7f, 0x92, 0x1c, 0x0d, 0x60, 0x8d, 0x9e, 0x91, 0x38, 0x66, 0x11, + 0xa6, 0x92, 0x75, 0x59, 0xac, 0x2b, 0x4a, 0x9d, 0x92, 0x0f, 0xaf, 0xac, 0x64, 0xd0, 0xb2, 0x41, + 0x5a, 0x17, 0x31, 0x42, 0x44, 0x67, 0x6c, 0xa8, 0x0b, 0x55, 0x4a, 0xa2, 0x7c, 0xae, 0x92, 0x51, + 0x6b, 0xef, 0x7f, 0xe4, 0x22, 0x51, 0x2e, 0xd1, 0x0a, 0xcd, 0x1b, 0xd0, 0x17, 0xe0, 0xa5, 0x8a, + 0x28, 0x9c, 0x48, 0xd6, 0xe3, 0x6f, 0x6a, 0xe5, 0xe9, 0xfe, 0x41, 0xa3, 0x27, 0x06, 0x44, 0x0f, + 0xa0, 0x36, 0x51, 0x0c, 0xee, 0x11, 0xaa, 0x84, 0x1c, 0x59, 0xcd, 0x17, 0x35, 0x31, 0xdc, 0x98, + 0xc0, 0x1f, 0x59, 0x58, 0x6b, 0x8e, 0xea, 0x70, 0x83, 0x8a, 0xb8, 0xc7, 0xfb, 0xb5, 0x1b, 0x46, + 0xad, 0xcd, 0x99, 0x69, 0xe9, 0x98, 0xcb, 0x13, 0x3a, 0x37, 0xff, 0xef, 0x22, 0x2c, 0x77, 0xd2, + 0x5c, 0xa5, 0x0f, 0x01, 0xa4, 0x10, 0x0a, 0x53, 0x26, 0x55, 0xea, 0xee, 0xc2, 0xf6, 0x25, 0x4a, + 0x1c, 0x10, 0x45, 0x3a, 0x22, 0x93, 0x94, 0x85, 0x15, 0x4d, 0x68, 0x69, 0x7f, 0xf4, 0x1d, 0x78, + 0x89, 0xe4, 0x43, 0xa2, 0x18, 0x7e, 0xc5, 0x46, 0xee, 0xd0, 0xe6, 0xd0, 0xc1, 0x31, 0x7e, 0x64, + 0x23, 0x9d, 0x5d, 0x27, 0xc6, 0xf4, 0x8c, 0xf0, 0xd8, 0xcd, 0xfc, 0xbc, 0xec, 0x9a, 0xd0, 0xd2, + 0xfe, 0x7e, 0x0d, 0x36, 0xec, 0xa1, 0x1c, 0x0b, 0x4a, 0x26, 0xbb, 0xf2, 0xff, 0x5a, 0x00, 0x34, + 0x3b, 0x10, 0x88, 0xc2, 0x4a, 0x9a, 0xe6, 0xcf, 0xde, 0x76, 0xfc, 0xe0, 0xea, 0x67, 0x9f, 0xd7, + 0xaf, 0x5d, 0x08, 0x97, 0xd3, 0xbc, 0xa2, 0xdf, 0xc3, 0xb2, 0x5b, 0x09, 0x5d, 0xd6, 0x23, 0x59, + 0xa4, 0x9c, 0x2c, 0x1b, 0x33, 0xa7, 0x73, 0xa8, 0x97, 0x57, 0xbb, 0x10, 0xde, 0xb6, 0xc0, 0x81, + 0x75, 0x47, 0x02, 0x56, 0x23, 0xdd, 0xd0, 0xd4, 0x8c, 0xea, 0x18, 0x3f, 0x5c, 0xbd, 0xce, 0xcb, + 0x95, 0x69, 0x17, 0xc2, 0x6a, 0x34, 0x65, 0x6b, 0x6e, 0xc3, 0xfa, 0x45, 0x2a, 0x9c, 0x26, 0x8c, + 0xf2, 0x1e, 0x67, 0x12, 0x2d, 0xfe, 0xf1, 0xfe, 0x5d, 0xa9, 0xe8, 0xff, 0x7a, 0x0b, 0x56, 0xa6, + 0x26, 0x1e, 0x7d, 0x0a, 0x4b, 0x84, 0x52, 0x96, 0xa6, 0x58, 0x89, 0x57, 0x2c, 0xb6, 0x37, 0xbc, + 0x5d, 0x08, 0x3d, 0x6b, 0x7d, 0xae, 0x8d, 0xe8, 0x18, 0x3e, 0x72, 0x4a, 0x50, 0x31, 0x48, 0x32, + 0xc5, 0x30, 0x8b, 0xfb, 0x3c, 0x66, 0x73, 0x05, 0x59, 0xb3, 0x40, 0xcb, 0xb2, 0x0e, 0x0d, 0x09, + 0x35, 0x60, 0xdd, 0x45, 0x93, 0xac, 0x27, 0x59, 0x7a, 0xe6, 0x52, 0x97, 0x5c, 0x6a, 0x64, 0xd1, + 0xd0, 0x82, 0xb6, 0x82, 0xdf, 0x8a, 0xe0, 0xbb, 0xb7, 0x0c, 0x13, 0x4a, 0x45, 0x16, 0x2b, 0xfc, + 0xf2, 0xb5, 0xc2, 0xb6, 0x4a, 0x73, 0x2f, 0xbd, 0xc6, 0x2f, 0xd7, 0xbe, 0xf8, 0x81, 0x73, 0xd9, + 0xb7, 0xa1, 0x9f, 0xbc, 0x78, 0xbe, 0x6f, 0x02, 0xe7, 0x05, 0xdf, 0x4c, 0xf3, 0x5e, 0xaf, 0x95, + 0xf5, 0x42, 0x02, 0xdc, 0xf2, 0xc7, 0x9c, 0x0c, 0xcc, 0x5d, 0xf7, 0x1a, 0xcf, 0xae, 0x5f, 0x8c, + 0x85, 0x8e, 0xf6, 0x9f, 0xe6, 0xd3, 0x57, 0x6c, 0x8e, 0x23, 0x32, 0x40, 0x6f, 0xc1, 0xeb, 0x49, + 0x31, 0xc0, 0x49, 0x94, 0xf5, 0x79, 0xec, 0xb6, 0xc6, 0x8b, 0xeb, 0x67, 0x1c, 0x3f, 0x1c, 0x13, + 0xb6, 0x47, 0x52, 0x0c, 0x4e, 0x4c, 0x78, 0xfd, 0xb6, 0xf5, 0xce, 0xff, 0xfc, 0x0c, 0xee, 0xcd, + 0x57, 0x0b, 0x6d, 0xc1, 0xad, 0x97, 0xa9, 0x88, 0xcd, 0x36, 0x31, 0x33, 0x15, 0xde, 0xd4, 0xff, + 0x7a, 0x57, 0x7c, 0x03, 0x1b, 0xe6, 0xc0, 0x71, 0xc4, 0x7b, 0x4c, 0x3f, 0x7e, 0x38, 0x65, 0x54, + 0xc4, 0x5d, 0xfb, 0x56, 0x94, 0xc3, 0x75, 0x83, 0x1e, 0x3b, 0xb0, 0x63, 0x31, 0x7f, 0x08, 0xeb, + 0x97, 0xe9, 0x82, 0xea, 0xb0, 0x46, 0x32, 0x75, 0x26, 0x24, 0x7f, 0x6b, 0xde, 0xd3, 0xc9, 0x39, + 0x0e, 0x51, 0x0e, 0xb2, 0xa3, 0xf4, 0x15, 0x8c, 0xad, 0x6a, 0x84, 0x53, 0x16, 0x31, 0xbd, 0x87, + 0x4d, 0xea, 0x4a, 0xb8, 0x7a, 0x8e, 0x74, 0x1c, 0xe0, 0xff, 0x5e, 0x84, 0xed, 0x0f, 0xca, 0x83, + 0x10, 0x94, 0x2f, 0xde, 0xd5, 0xd0, 0x7c, 0xa3, 0xaf, 0xcf, 0x37, 0xfa, 0xc2, 0x07, 0x37, 0x7a, + 0xbb, 0x30, 0xde, 0xe9, 0x68, 0x0f, 0x96, 0xd4, 0x28, 0x61, 0x5d, 0xec, 0x88, 0x76, 0x51, 0xac, + 0xcf, 0x10, 0xf7, 0x63, 0x7d, 0xb3, 0x3c, 0xe3, 0xdb, 0x32, 0xae, 0xcd, 0xdb, 0xe0, 0x59, 0x12, + 0xd6, 0xd6, 0x39, 0x6b, 0xa0, 0xb9, 0x05, 0x55, 0xf7, 0xa4, 0x4f, 0x43, 0x4f, 0xca, 0xb7, 0xca, + 0xd5, 0xc5, 0xe6, 0xb7, 0xf0, 0x09, 0x17, 0x76, 0x98, 0x12, 0x29, 0xde, 0x8c, 0x66, 0xe7, 0xaa, + 0x59, 0x9d, 0x18, 0xac, 0x13, 0x5d, 0xd9, 0x49, 0xf1, 0xf4, 0x86, 0x29, 0xf1, 0xfe, 0x7f, 0x01, + 0x00, 0x00, 0xff, 0xff, 0x5c, 0x77, 0x13, 0x9a, 0x8c, 0x0a, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/grpc/balancer/xds/internal/proto/envoy/api/v2/core/health_check/health_check.pb.go b/vendor/google.golang.org/grpc/balancer/xds/internal/proto/envoy/api/v2/core/health_check/health_check.pb.go new file mode 100755 index 0000000..e7df297 --- /dev/null +++ b/vendor/google.golang.org/grpc/balancer/xds/internal/proto/envoy/api/v2/core/health_check/health_check.pb.go @@ -0,0 +1,998 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: envoy/api/v2/core/health_check.proto + +package envoy_api_v2_core + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import any "github.com/golang/protobuf/ptypes/any" +import duration "github.com/golang/protobuf/ptypes/duration" +import _struct "github.com/golang/protobuf/ptypes/struct" +import wrappers "github.com/golang/protobuf/ptypes/wrappers" +import base "google.golang.org/grpc/balancer/xds/internal/proto/envoy/api/v2/core/base" +import _range "google.golang.org/grpc/balancer/xds/internal/proto/envoy/type/range" +import _ "google.golang.org/grpc/balancer/xds/internal/proto/validate" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +type HealthStatus int32 + +const ( + HealthStatus_UNKNOWN HealthStatus = 0 + HealthStatus_HEALTHY HealthStatus = 1 + HealthStatus_UNHEALTHY HealthStatus = 2 + HealthStatus_DRAINING HealthStatus = 3 + HealthStatus_TIMEOUT HealthStatus = 4 + HealthStatus_DEGRADED HealthStatus = 5 +) + +var HealthStatus_name = map[int32]string{ + 0: "UNKNOWN", + 1: "HEALTHY", + 2: "UNHEALTHY", + 3: "DRAINING", + 4: "TIMEOUT", + 5: "DEGRADED", +} +var HealthStatus_value = map[string]int32{ + "UNKNOWN": 0, + "HEALTHY": 1, + "UNHEALTHY": 2, + "DRAINING": 3, + "TIMEOUT": 4, + "DEGRADED": 5, +} + +func (x HealthStatus) String() string { + return proto.EnumName(HealthStatus_name, int32(x)) +} +func (HealthStatus) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_health_check_96ed99a3bbe98749, []int{0} +} + +type HealthCheck struct { + Timeout *duration.Duration `protobuf:"bytes,1,opt,name=timeout,proto3" json:"timeout,omitempty"` + Interval *duration.Duration `protobuf:"bytes,2,opt,name=interval,proto3" json:"interval,omitempty"` + IntervalJitter *duration.Duration `protobuf:"bytes,3,opt,name=interval_jitter,json=intervalJitter,proto3" json:"interval_jitter,omitempty"` + IntervalJitterPercent uint32 `protobuf:"varint,18,opt,name=interval_jitter_percent,json=intervalJitterPercent,proto3" json:"interval_jitter_percent,omitempty"` + UnhealthyThreshold *wrappers.UInt32Value `protobuf:"bytes,4,opt,name=unhealthy_threshold,json=unhealthyThreshold,proto3" json:"unhealthy_threshold,omitempty"` + HealthyThreshold *wrappers.UInt32Value `protobuf:"bytes,5,opt,name=healthy_threshold,json=healthyThreshold,proto3" json:"healthy_threshold,omitempty"` + AltPort *wrappers.UInt32Value `protobuf:"bytes,6,opt,name=alt_port,json=altPort,proto3" json:"alt_port,omitempty"` + ReuseConnection *wrappers.BoolValue `protobuf:"bytes,7,opt,name=reuse_connection,json=reuseConnection,proto3" json:"reuse_connection,omitempty"` + // Types that are valid to be assigned to HealthChecker: + // *HealthCheck_HttpHealthCheck_ + // *HealthCheck_TcpHealthCheck_ + // *HealthCheck_GrpcHealthCheck_ + // *HealthCheck_CustomHealthCheck_ + HealthChecker isHealthCheck_HealthChecker `protobuf_oneof:"health_checker"` + NoTrafficInterval *duration.Duration `protobuf:"bytes,12,opt,name=no_traffic_interval,json=noTrafficInterval,proto3" json:"no_traffic_interval,omitempty"` + UnhealthyInterval *duration.Duration `protobuf:"bytes,14,opt,name=unhealthy_interval,json=unhealthyInterval,proto3" json:"unhealthy_interval,omitempty"` + UnhealthyEdgeInterval *duration.Duration `protobuf:"bytes,15,opt,name=unhealthy_edge_interval,json=unhealthyEdgeInterval,proto3" json:"unhealthy_edge_interval,omitempty"` + HealthyEdgeInterval *duration.Duration `protobuf:"bytes,16,opt,name=healthy_edge_interval,json=healthyEdgeInterval,proto3" json:"healthy_edge_interval,omitempty"` + EventLogPath string `protobuf:"bytes,17,opt,name=event_log_path,json=eventLogPath,proto3" json:"event_log_path,omitempty"` + AlwaysLogHealthCheckFailures bool `protobuf:"varint,19,opt,name=always_log_health_check_failures,json=alwaysLogHealthCheckFailures,proto3" json:"always_log_health_check_failures,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *HealthCheck) Reset() { *m = HealthCheck{} } +func (m *HealthCheck) String() string { return proto.CompactTextString(m) } +func (*HealthCheck) ProtoMessage() {} +func (*HealthCheck) Descriptor() ([]byte, []int) { + return fileDescriptor_health_check_96ed99a3bbe98749, []int{0} +} +func (m *HealthCheck) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_HealthCheck.Unmarshal(m, b) +} +func (m *HealthCheck) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_HealthCheck.Marshal(b, m, deterministic) +} +func (dst *HealthCheck) XXX_Merge(src proto.Message) { + xxx_messageInfo_HealthCheck.Merge(dst, src) +} +func (m *HealthCheck) XXX_Size() int { + return xxx_messageInfo_HealthCheck.Size(m) +} +func (m *HealthCheck) XXX_DiscardUnknown() { + xxx_messageInfo_HealthCheck.DiscardUnknown(m) +} + +var xxx_messageInfo_HealthCheck proto.InternalMessageInfo + +func (m *HealthCheck) GetTimeout() *duration.Duration { + if m != nil { + return m.Timeout + } + return nil +} + +func (m *HealthCheck) GetInterval() *duration.Duration { + if m != nil { + return m.Interval + } + return nil +} + +func (m *HealthCheck) GetIntervalJitter() *duration.Duration { + if m != nil { + return m.IntervalJitter + } + return nil +} + +func (m *HealthCheck) GetIntervalJitterPercent() uint32 { + if m != nil { + return m.IntervalJitterPercent + } + return 0 +} + +func (m *HealthCheck) GetUnhealthyThreshold() *wrappers.UInt32Value { + if m != nil { + return m.UnhealthyThreshold + } + return nil +} + +func (m *HealthCheck) GetHealthyThreshold() *wrappers.UInt32Value { + if m != nil { + return m.HealthyThreshold + } + return nil +} + +func (m *HealthCheck) GetAltPort() *wrappers.UInt32Value { + if m != nil { + return m.AltPort + } + return nil +} + +func (m *HealthCheck) GetReuseConnection() *wrappers.BoolValue { + if m != nil { + return m.ReuseConnection + } + return nil +} + +type isHealthCheck_HealthChecker interface { + isHealthCheck_HealthChecker() +} + +type HealthCheck_HttpHealthCheck_ struct { + HttpHealthCheck *HealthCheck_HttpHealthCheck `protobuf:"bytes,8,opt,name=http_health_check,json=httpHealthCheck,proto3,oneof"` +} + +type HealthCheck_TcpHealthCheck_ struct { + TcpHealthCheck *HealthCheck_TcpHealthCheck `protobuf:"bytes,9,opt,name=tcp_health_check,json=tcpHealthCheck,proto3,oneof"` +} + +type HealthCheck_GrpcHealthCheck_ struct { + GrpcHealthCheck *HealthCheck_GrpcHealthCheck `protobuf:"bytes,11,opt,name=grpc_health_check,json=grpcHealthCheck,proto3,oneof"` +} + +type HealthCheck_CustomHealthCheck_ struct { + CustomHealthCheck *HealthCheck_CustomHealthCheck `protobuf:"bytes,13,opt,name=custom_health_check,json=customHealthCheck,proto3,oneof"` +} + +func (*HealthCheck_HttpHealthCheck_) isHealthCheck_HealthChecker() {} + +func (*HealthCheck_TcpHealthCheck_) isHealthCheck_HealthChecker() {} + +func (*HealthCheck_GrpcHealthCheck_) isHealthCheck_HealthChecker() {} + +func (*HealthCheck_CustomHealthCheck_) isHealthCheck_HealthChecker() {} + +func (m *HealthCheck) GetHealthChecker() isHealthCheck_HealthChecker { + if m != nil { + return m.HealthChecker + } + return nil +} + +func (m *HealthCheck) GetHttpHealthCheck() *HealthCheck_HttpHealthCheck { + if x, ok := m.GetHealthChecker().(*HealthCheck_HttpHealthCheck_); ok { + return x.HttpHealthCheck + } + return nil +} + +func (m *HealthCheck) GetTcpHealthCheck() *HealthCheck_TcpHealthCheck { + if x, ok := m.GetHealthChecker().(*HealthCheck_TcpHealthCheck_); ok { + return x.TcpHealthCheck + } + return nil +} + +func (m *HealthCheck) GetGrpcHealthCheck() *HealthCheck_GrpcHealthCheck { + if x, ok := m.GetHealthChecker().(*HealthCheck_GrpcHealthCheck_); ok { + return x.GrpcHealthCheck + } + return nil +} + +func (m *HealthCheck) GetCustomHealthCheck() *HealthCheck_CustomHealthCheck { + if x, ok := m.GetHealthChecker().(*HealthCheck_CustomHealthCheck_); ok { + return x.CustomHealthCheck + } + return nil +} + +func (m *HealthCheck) GetNoTrafficInterval() *duration.Duration { + if m != nil { + return m.NoTrafficInterval + } + return nil +} + +func (m *HealthCheck) GetUnhealthyInterval() *duration.Duration { + if m != nil { + return m.UnhealthyInterval + } + return nil +} + +func (m *HealthCheck) GetUnhealthyEdgeInterval() *duration.Duration { + if m != nil { + return m.UnhealthyEdgeInterval + } + return nil +} + +func (m *HealthCheck) GetHealthyEdgeInterval() *duration.Duration { + if m != nil { + return m.HealthyEdgeInterval + } + return nil +} + +func (m *HealthCheck) GetEventLogPath() string { + if m != nil { + return m.EventLogPath + } + return "" +} + +func (m *HealthCheck) GetAlwaysLogHealthCheckFailures() bool { + if m != nil { + return m.AlwaysLogHealthCheckFailures + } + return false +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*HealthCheck) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _HealthCheck_OneofMarshaler, _HealthCheck_OneofUnmarshaler, _HealthCheck_OneofSizer, []interface{}{ + (*HealthCheck_HttpHealthCheck_)(nil), + (*HealthCheck_TcpHealthCheck_)(nil), + (*HealthCheck_GrpcHealthCheck_)(nil), + (*HealthCheck_CustomHealthCheck_)(nil), + } +} + +func _HealthCheck_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*HealthCheck) + // health_checker + switch x := m.HealthChecker.(type) { + case *HealthCheck_HttpHealthCheck_: + b.EncodeVarint(8<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.HttpHealthCheck); err != nil { + return err + } + case *HealthCheck_TcpHealthCheck_: + b.EncodeVarint(9<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.TcpHealthCheck); err != nil { + return err + } + case *HealthCheck_GrpcHealthCheck_: + b.EncodeVarint(11<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.GrpcHealthCheck); err != nil { + return err + } + case *HealthCheck_CustomHealthCheck_: + b.EncodeVarint(13<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.CustomHealthCheck); err != nil { + return err + } + case nil: + default: + return fmt.Errorf("HealthCheck.HealthChecker has unexpected type %T", x) + } + return nil +} + +func _HealthCheck_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*HealthCheck) + switch tag { + case 8: // health_checker.http_health_check + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(HealthCheck_HttpHealthCheck) + err := b.DecodeMessage(msg) + m.HealthChecker = &HealthCheck_HttpHealthCheck_{msg} + return true, err + case 9: // health_checker.tcp_health_check + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(HealthCheck_TcpHealthCheck) + err := b.DecodeMessage(msg) + m.HealthChecker = &HealthCheck_TcpHealthCheck_{msg} + return true, err + case 11: // health_checker.grpc_health_check + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(HealthCheck_GrpcHealthCheck) + err := b.DecodeMessage(msg) + m.HealthChecker = &HealthCheck_GrpcHealthCheck_{msg} + return true, err + case 13: // health_checker.custom_health_check + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(HealthCheck_CustomHealthCheck) + err := b.DecodeMessage(msg) + m.HealthChecker = &HealthCheck_CustomHealthCheck_{msg} + return true, err + default: + return false, nil + } +} + +func _HealthCheck_OneofSizer(msg proto.Message) (n int) { + m := msg.(*HealthCheck) + // health_checker + switch x := m.HealthChecker.(type) { + case *HealthCheck_HttpHealthCheck_: + s := proto.Size(x.HttpHealthCheck) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *HealthCheck_TcpHealthCheck_: + s := proto.Size(x.TcpHealthCheck) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *HealthCheck_GrpcHealthCheck_: + s := proto.Size(x.GrpcHealthCheck) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *HealthCheck_CustomHealthCheck_: + s := proto.Size(x.CustomHealthCheck) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +type HealthCheck_Payload struct { + // Types that are valid to be assigned to Payload: + // *HealthCheck_Payload_Text + // *HealthCheck_Payload_Binary + Payload isHealthCheck_Payload_Payload `protobuf_oneof:"payload"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *HealthCheck_Payload) Reset() { *m = HealthCheck_Payload{} } +func (m *HealthCheck_Payload) String() string { return proto.CompactTextString(m) } +func (*HealthCheck_Payload) ProtoMessage() {} +func (*HealthCheck_Payload) Descriptor() ([]byte, []int) { + return fileDescriptor_health_check_96ed99a3bbe98749, []int{0, 0} +} +func (m *HealthCheck_Payload) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_HealthCheck_Payload.Unmarshal(m, b) +} +func (m *HealthCheck_Payload) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_HealthCheck_Payload.Marshal(b, m, deterministic) +} +func (dst *HealthCheck_Payload) XXX_Merge(src proto.Message) { + xxx_messageInfo_HealthCheck_Payload.Merge(dst, src) +} +func (m *HealthCheck_Payload) XXX_Size() int { + return xxx_messageInfo_HealthCheck_Payload.Size(m) +} +func (m *HealthCheck_Payload) XXX_DiscardUnknown() { + xxx_messageInfo_HealthCheck_Payload.DiscardUnknown(m) +} + +var xxx_messageInfo_HealthCheck_Payload proto.InternalMessageInfo + +type isHealthCheck_Payload_Payload interface { + isHealthCheck_Payload_Payload() +} + +type HealthCheck_Payload_Text struct { + Text string `protobuf:"bytes,1,opt,name=text,proto3,oneof"` +} + +type HealthCheck_Payload_Binary struct { + Binary []byte `protobuf:"bytes,2,opt,name=binary,proto3,oneof"` +} + +func (*HealthCheck_Payload_Text) isHealthCheck_Payload_Payload() {} + +func (*HealthCheck_Payload_Binary) isHealthCheck_Payload_Payload() {} + +func (m *HealthCheck_Payload) GetPayload() isHealthCheck_Payload_Payload { + if m != nil { + return m.Payload + } + return nil +} + +func (m *HealthCheck_Payload) GetText() string { + if x, ok := m.GetPayload().(*HealthCheck_Payload_Text); ok { + return x.Text + } + return "" +} + +func (m *HealthCheck_Payload) GetBinary() []byte { + if x, ok := m.GetPayload().(*HealthCheck_Payload_Binary); ok { + return x.Binary + } + return nil +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*HealthCheck_Payload) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _HealthCheck_Payload_OneofMarshaler, _HealthCheck_Payload_OneofUnmarshaler, _HealthCheck_Payload_OneofSizer, []interface{}{ + (*HealthCheck_Payload_Text)(nil), + (*HealthCheck_Payload_Binary)(nil), + } +} + +func _HealthCheck_Payload_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*HealthCheck_Payload) + // payload + switch x := m.Payload.(type) { + case *HealthCheck_Payload_Text: + b.EncodeVarint(1<<3 | proto.WireBytes) + b.EncodeStringBytes(x.Text) + case *HealthCheck_Payload_Binary: + b.EncodeVarint(2<<3 | proto.WireBytes) + b.EncodeRawBytes(x.Binary) + case nil: + default: + return fmt.Errorf("HealthCheck_Payload.Payload has unexpected type %T", x) + } + return nil +} + +func _HealthCheck_Payload_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*HealthCheck_Payload) + switch tag { + case 1: // payload.text + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeStringBytes() + m.Payload = &HealthCheck_Payload_Text{x} + return true, err + case 2: // payload.binary + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeRawBytes(true) + m.Payload = &HealthCheck_Payload_Binary{x} + return true, err + default: + return false, nil + } +} + +func _HealthCheck_Payload_OneofSizer(msg proto.Message) (n int) { + m := msg.(*HealthCheck_Payload) + // payload + switch x := m.Payload.(type) { + case *HealthCheck_Payload_Text: + n += 1 // tag and wire + n += proto.SizeVarint(uint64(len(x.Text))) + n += len(x.Text) + case *HealthCheck_Payload_Binary: + n += 1 // tag and wire + n += proto.SizeVarint(uint64(len(x.Binary))) + n += len(x.Binary) + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +type HealthCheck_HttpHealthCheck struct { + Host string `protobuf:"bytes,1,opt,name=host,proto3" json:"host,omitempty"` + Path string `protobuf:"bytes,2,opt,name=path,proto3" json:"path,omitempty"` + Send *HealthCheck_Payload `protobuf:"bytes,3,opt,name=send,proto3" json:"send,omitempty"` + Receive *HealthCheck_Payload `protobuf:"bytes,4,opt,name=receive,proto3" json:"receive,omitempty"` + ServiceName string `protobuf:"bytes,5,opt,name=service_name,json=serviceName,proto3" json:"service_name,omitempty"` + RequestHeadersToAdd []*base.HeaderValueOption `protobuf:"bytes,6,rep,name=request_headers_to_add,json=requestHeadersToAdd,proto3" json:"request_headers_to_add,omitempty"` + RequestHeadersToRemove []string `protobuf:"bytes,8,rep,name=request_headers_to_remove,json=requestHeadersToRemove,proto3" json:"request_headers_to_remove,omitempty"` + UseHttp2 bool `protobuf:"varint,7,opt,name=use_http2,json=useHttp2,proto3" json:"use_http2,omitempty"` + ExpectedStatuses []*_range.Int64Range `protobuf:"bytes,9,rep,name=expected_statuses,json=expectedStatuses,proto3" json:"expected_statuses,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *HealthCheck_HttpHealthCheck) Reset() { *m = HealthCheck_HttpHealthCheck{} } +func (m *HealthCheck_HttpHealthCheck) String() string { return proto.CompactTextString(m) } +func (*HealthCheck_HttpHealthCheck) ProtoMessage() {} +func (*HealthCheck_HttpHealthCheck) Descriptor() ([]byte, []int) { + return fileDescriptor_health_check_96ed99a3bbe98749, []int{0, 1} +} +func (m *HealthCheck_HttpHealthCheck) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_HealthCheck_HttpHealthCheck.Unmarshal(m, b) +} +func (m *HealthCheck_HttpHealthCheck) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_HealthCheck_HttpHealthCheck.Marshal(b, m, deterministic) +} +func (dst *HealthCheck_HttpHealthCheck) XXX_Merge(src proto.Message) { + xxx_messageInfo_HealthCheck_HttpHealthCheck.Merge(dst, src) +} +func (m *HealthCheck_HttpHealthCheck) XXX_Size() int { + return xxx_messageInfo_HealthCheck_HttpHealthCheck.Size(m) +} +func (m *HealthCheck_HttpHealthCheck) XXX_DiscardUnknown() { + xxx_messageInfo_HealthCheck_HttpHealthCheck.DiscardUnknown(m) +} + +var xxx_messageInfo_HealthCheck_HttpHealthCheck proto.InternalMessageInfo + +func (m *HealthCheck_HttpHealthCheck) GetHost() string { + if m != nil { + return m.Host + } + return "" +} + +func (m *HealthCheck_HttpHealthCheck) GetPath() string { + if m != nil { + return m.Path + } + return "" +} + +func (m *HealthCheck_HttpHealthCheck) GetSend() *HealthCheck_Payload { + if m != nil { + return m.Send + } + return nil +} + +func (m *HealthCheck_HttpHealthCheck) GetReceive() *HealthCheck_Payload { + if m != nil { + return m.Receive + } + return nil +} + +func (m *HealthCheck_HttpHealthCheck) GetServiceName() string { + if m != nil { + return m.ServiceName + } + return "" +} + +func (m *HealthCheck_HttpHealthCheck) GetRequestHeadersToAdd() []*base.HeaderValueOption { + if m != nil { + return m.RequestHeadersToAdd + } + return nil +} + +func (m *HealthCheck_HttpHealthCheck) GetRequestHeadersToRemove() []string { + if m != nil { + return m.RequestHeadersToRemove + } + return nil +} + +func (m *HealthCheck_HttpHealthCheck) GetUseHttp2() bool { + if m != nil { + return m.UseHttp2 + } + return false +} + +func (m *HealthCheck_HttpHealthCheck) GetExpectedStatuses() []*_range.Int64Range { + if m != nil { + return m.ExpectedStatuses + } + return nil +} + +type HealthCheck_TcpHealthCheck struct { + Send *HealthCheck_Payload `protobuf:"bytes,1,opt,name=send,proto3" json:"send,omitempty"` + Receive []*HealthCheck_Payload `protobuf:"bytes,2,rep,name=receive,proto3" json:"receive,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *HealthCheck_TcpHealthCheck) Reset() { *m = HealthCheck_TcpHealthCheck{} } +func (m *HealthCheck_TcpHealthCheck) String() string { return proto.CompactTextString(m) } +func (*HealthCheck_TcpHealthCheck) ProtoMessage() {} +func (*HealthCheck_TcpHealthCheck) Descriptor() ([]byte, []int) { + return fileDescriptor_health_check_96ed99a3bbe98749, []int{0, 2} +} +func (m *HealthCheck_TcpHealthCheck) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_HealthCheck_TcpHealthCheck.Unmarshal(m, b) +} +func (m *HealthCheck_TcpHealthCheck) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_HealthCheck_TcpHealthCheck.Marshal(b, m, deterministic) +} +func (dst *HealthCheck_TcpHealthCheck) XXX_Merge(src proto.Message) { + xxx_messageInfo_HealthCheck_TcpHealthCheck.Merge(dst, src) +} +func (m *HealthCheck_TcpHealthCheck) XXX_Size() int { + return xxx_messageInfo_HealthCheck_TcpHealthCheck.Size(m) +} +func (m *HealthCheck_TcpHealthCheck) XXX_DiscardUnknown() { + xxx_messageInfo_HealthCheck_TcpHealthCheck.DiscardUnknown(m) +} + +var xxx_messageInfo_HealthCheck_TcpHealthCheck proto.InternalMessageInfo + +func (m *HealthCheck_TcpHealthCheck) GetSend() *HealthCheck_Payload { + if m != nil { + return m.Send + } + return nil +} + +func (m *HealthCheck_TcpHealthCheck) GetReceive() []*HealthCheck_Payload { + if m != nil { + return m.Receive + } + return nil +} + +type HealthCheck_RedisHealthCheck struct { + Key string `protobuf:"bytes,1,opt,name=key,proto3" json:"key,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *HealthCheck_RedisHealthCheck) Reset() { *m = HealthCheck_RedisHealthCheck{} } +func (m *HealthCheck_RedisHealthCheck) String() string { return proto.CompactTextString(m) } +func (*HealthCheck_RedisHealthCheck) ProtoMessage() {} +func (*HealthCheck_RedisHealthCheck) Descriptor() ([]byte, []int) { + return fileDescriptor_health_check_96ed99a3bbe98749, []int{0, 3} +} +func (m *HealthCheck_RedisHealthCheck) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_HealthCheck_RedisHealthCheck.Unmarshal(m, b) +} +func (m *HealthCheck_RedisHealthCheck) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_HealthCheck_RedisHealthCheck.Marshal(b, m, deterministic) +} +func (dst *HealthCheck_RedisHealthCheck) XXX_Merge(src proto.Message) { + xxx_messageInfo_HealthCheck_RedisHealthCheck.Merge(dst, src) +} +func (m *HealthCheck_RedisHealthCheck) XXX_Size() int { + return xxx_messageInfo_HealthCheck_RedisHealthCheck.Size(m) +} +func (m *HealthCheck_RedisHealthCheck) XXX_DiscardUnknown() { + xxx_messageInfo_HealthCheck_RedisHealthCheck.DiscardUnknown(m) +} + +var xxx_messageInfo_HealthCheck_RedisHealthCheck proto.InternalMessageInfo + +func (m *HealthCheck_RedisHealthCheck) GetKey() string { + if m != nil { + return m.Key + } + return "" +} + +type HealthCheck_GrpcHealthCheck struct { + ServiceName string `protobuf:"bytes,1,opt,name=service_name,json=serviceName,proto3" json:"service_name,omitempty"` + Authority string `protobuf:"bytes,2,opt,name=authority,proto3" json:"authority,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *HealthCheck_GrpcHealthCheck) Reset() { *m = HealthCheck_GrpcHealthCheck{} } +func (m *HealthCheck_GrpcHealthCheck) String() string { return proto.CompactTextString(m) } +func (*HealthCheck_GrpcHealthCheck) ProtoMessage() {} +func (*HealthCheck_GrpcHealthCheck) Descriptor() ([]byte, []int) { + return fileDescriptor_health_check_96ed99a3bbe98749, []int{0, 4} +} +func (m *HealthCheck_GrpcHealthCheck) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_HealthCheck_GrpcHealthCheck.Unmarshal(m, b) +} +func (m *HealthCheck_GrpcHealthCheck) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_HealthCheck_GrpcHealthCheck.Marshal(b, m, deterministic) +} +func (dst *HealthCheck_GrpcHealthCheck) XXX_Merge(src proto.Message) { + xxx_messageInfo_HealthCheck_GrpcHealthCheck.Merge(dst, src) +} +func (m *HealthCheck_GrpcHealthCheck) XXX_Size() int { + return xxx_messageInfo_HealthCheck_GrpcHealthCheck.Size(m) +} +func (m *HealthCheck_GrpcHealthCheck) XXX_DiscardUnknown() { + xxx_messageInfo_HealthCheck_GrpcHealthCheck.DiscardUnknown(m) +} + +var xxx_messageInfo_HealthCheck_GrpcHealthCheck proto.InternalMessageInfo + +func (m *HealthCheck_GrpcHealthCheck) GetServiceName() string { + if m != nil { + return m.ServiceName + } + return "" +} + +func (m *HealthCheck_GrpcHealthCheck) GetAuthority() string { + if m != nil { + return m.Authority + } + return "" +} + +type HealthCheck_CustomHealthCheck struct { + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // Types that are valid to be assigned to ConfigType: + // *HealthCheck_CustomHealthCheck_Config + // *HealthCheck_CustomHealthCheck_TypedConfig + ConfigType isHealthCheck_CustomHealthCheck_ConfigType `protobuf_oneof:"config_type"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *HealthCheck_CustomHealthCheck) Reset() { *m = HealthCheck_CustomHealthCheck{} } +func (m *HealthCheck_CustomHealthCheck) String() string { return proto.CompactTextString(m) } +func (*HealthCheck_CustomHealthCheck) ProtoMessage() {} +func (*HealthCheck_CustomHealthCheck) Descriptor() ([]byte, []int) { + return fileDescriptor_health_check_96ed99a3bbe98749, []int{0, 5} +} +func (m *HealthCheck_CustomHealthCheck) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_HealthCheck_CustomHealthCheck.Unmarshal(m, b) +} +func (m *HealthCheck_CustomHealthCheck) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_HealthCheck_CustomHealthCheck.Marshal(b, m, deterministic) +} +func (dst *HealthCheck_CustomHealthCheck) XXX_Merge(src proto.Message) { + xxx_messageInfo_HealthCheck_CustomHealthCheck.Merge(dst, src) +} +func (m *HealthCheck_CustomHealthCheck) XXX_Size() int { + return xxx_messageInfo_HealthCheck_CustomHealthCheck.Size(m) +} +func (m *HealthCheck_CustomHealthCheck) XXX_DiscardUnknown() { + xxx_messageInfo_HealthCheck_CustomHealthCheck.DiscardUnknown(m) +} + +var xxx_messageInfo_HealthCheck_CustomHealthCheck proto.InternalMessageInfo + +func (m *HealthCheck_CustomHealthCheck) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +type isHealthCheck_CustomHealthCheck_ConfigType interface { + isHealthCheck_CustomHealthCheck_ConfigType() +} + +type HealthCheck_CustomHealthCheck_Config struct { + Config *_struct.Struct `protobuf:"bytes,2,opt,name=config,proto3,oneof"` +} + +type HealthCheck_CustomHealthCheck_TypedConfig struct { + TypedConfig *any.Any `protobuf:"bytes,3,opt,name=typed_config,json=typedConfig,proto3,oneof"` +} + +func (*HealthCheck_CustomHealthCheck_Config) isHealthCheck_CustomHealthCheck_ConfigType() {} + +func (*HealthCheck_CustomHealthCheck_TypedConfig) isHealthCheck_CustomHealthCheck_ConfigType() {} + +func (m *HealthCheck_CustomHealthCheck) GetConfigType() isHealthCheck_CustomHealthCheck_ConfigType { + if m != nil { + return m.ConfigType + } + return nil +} + +func (m *HealthCheck_CustomHealthCheck) GetConfig() *_struct.Struct { + if x, ok := m.GetConfigType().(*HealthCheck_CustomHealthCheck_Config); ok { + return x.Config + } + return nil +} + +func (m *HealthCheck_CustomHealthCheck) GetTypedConfig() *any.Any { + if x, ok := m.GetConfigType().(*HealthCheck_CustomHealthCheck_TypedConfig); ok { + return x.TypedConfig + } + return nil +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*HealthCheck_CustomHealthCheck) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _HealthCheck_CustomHealthCheck_OneofMarshaler, _HealthCheck_CustomHealthCheck_OneofUnmarshaler, _HealthCheck_CustomHealthCheck_OneofSizer, []interface{}{ + (*HealthCheck_CustomHealthCheck_Config)(nil), + (*HealthCheck_CustomHealthCheck_TypedConfig)(nil), + } +} + +func _HealthCheck_CustomHealthCheck_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*HealthCheck_CustomHealthCheck) + // config_type + switch x := m.ConfigType.(type) { + case *HealthCheck_CustomHealthCheck_Config: + b.EncodeVarint(2<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.Config); err != nil { + return err + } + case *HealthCheck_CustomHealthCheck_TypedConfig: + b.EncodeVarint(3<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.TypedConfig); err != nil { + return err + } + case nil: + default: + return fmt.Errorf("HealthCheck_CustomHealthCheck.ConfigType has unexpected type %T", x) + } + return nil +} + +func _HealthCheck_CustomHealthCheck_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*HealthCheck_CustomHealthCheck) + switch tag { + case 2: // config_type.config + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(_struct.Struct) + err := b.DecodeMessage(msg) + m.ConfigType = &HealthCheck_CustomHealthCheck_Config{msg} + return true, err + case 3: // config_type.typed_config + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(any.Any) + err := b.DecodeMessage(msg) + m.ConfigType = &HealthCheck_CustomHealthCheck_TypedConfig{msg} + return true, err + default: + return false, nil + } +} + +func _HealthCheck_CustomHealthCheck_OneofSizer(msg proto.Message) (n int) { + m := msg.(*HealthCheck_CustomHealthCheck) + // config_type + switch x := m.ConfigType.(type) { + case *HealthCheck_CustomHealthCheck_Config: + s := proto.Size(x.Config) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *HealthCheck_CustomHealthCheck_TypedConfig: + s := proto.Size(x.TypedConfig) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +func init() { + proto.RegisterType((*HealthCheck)(nil), "envoy.api.v2.core.HealthCheck") + proto.RegisterType((*HealthCheck_Payload)(nil), "envoy.api.v2.core.HealthCheck.Payload") + proto.RegisterType((*HealthCheck_HttpHealthCheck)(nil), "envoy.api.v2.core.HealthCheck.HttpHealthCheck") + proto.RegisterType((*HealthCheck_TcpHealthCheck)(nil), "envoy.api.v2.core.HealthCheck.TcpHealthCheck") + proto.RegisterType((*HealthCheck_RedisHealthCheck)(nil), "envoy.api.v2.core.HealthCheck.RedisHealthCheck") + proto.RegisterType((*HealthCheck_GrpcHealthCheck)(nil), "envoy.api.v2.core.HealthCheck.GrpcHealthCheck") + proto.RegisterType((*HealthCheck_CustomHealthCheck)(nil), "envoy.api.v2.core.HealthCheck.CustomHealthCheck") + proto.RegisterEnum("envoy.api.v2.core.HealthStatus", HealthStatus_name, HealthStatus_value) +} + +func init() { + proto.RegisterFile("envoy/api/v2/core/health_check.proto", fileDescriptor_health_check_96ed99a3bbe98749) +} + +var fileDescriptor_health_check_96ed99a3bbe98749 = []byte{ + // 1166 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xa4, 0x96, 0xdf, 0x72, 0xdb, 0xc4, + 0x17, 0xc7, 0xad, 0xd8, 0x89, 0xed, 0x63, 0x27, 0x91, 0xd7, 0xbf, 0x26, 0xaa, 0x7f, 0x81, 0x1a, + 0x26, 0xc3, 0x64, 0x3a, 0x83, 0x0c, 0x2e, 0x94, 0x29, 0x57, 0xc4, 0x49, 0x5a, 0xbb, 0xb4, 0x6e, + 0x66, 0xeb, 0x94, 0xe9, 0x0c, 0x8c, 0xd8, 0x48, 0x27, 0xb6, 0xa8, 0xa2, 0x15, 0xab, 0x95, 0x5b, + 0xbf, 0x04, 0x17, 0x3c, 0x46, 0x6f, 0xe0, 0x92, 0xe1, 0xaa, 0x8f, 0x43, 0x79, 0x0a, 0x46, 0x2b, + 0xd9, 0xb1, 0xad, 0x30, 0x49, 0x86, 0x3b, 0xe9, 0x9c, 0xf3, 0xfd, 0xec, 0x59, 0x9d, 0x3f, 0x23, + 0xd8, 0x45, 0x7f, 0xcc, 0x27, 0x2d, 0x16, 0xb8, 0xad, 0x71, 0xbb, 0x65, 0x73, 0x81, 0xad, 0x11, + 0x32, 0x4f, 0x8e, 0x2c, 0x7b, 0x84, 0xf6, 0x2b, 0x33, 0x10, 0x5c, 0x72, 0x52, 0x53, 0x51, 0x26, + 0x0b, 0x5c, 0x73, 0xdc, 0x36, 0xe3, 0xa8, 0xc6, 0x4e, 0x56, 0x78, 0xca, 0x42, 0x4c, 0x04, 0x8d, + 0xad, 0xc4, 0x2b, 0x27, 0x01, 0xb6, 0x04, 0xf3, 0x87, 0x53, 0xfb, 0xed, 0x21, 0xe7, 0x43, 0x0f, + 0x5b, 0xea, 0xed, 0x34, 0x3a, 0x6b, 0x31, 0x7f, 0x92, 0xba, 0x3e, 0x5c, 0x76, 0x39, 0x91, 0x60, + 0xd2, 0xe5, 0x7e, 0xea, 0xdf, 0x59, 0xf6, 0x87, 0x52, 0x44, 0xb6, 0xfc, 0x37, 0xf5, 0x6b, 0xc1, + 0x82, 0x00, 0x45, 0x98, 0xfa, 0xb7, 0xc7, 0xcc, 0x73, 0x1d, 0x26, 0xb1, 0x35, 0x7d, 0x48, 0x1c, + 0x1f, 0xff, 0x5e, 0x87, 0x4a, 0x57, 0xdd, 0xf8, 0x20, 0xbe, 0x30, 0xd9, 0x87, 0xa2, 0x74, 0xcf, + 0x91, 0x47, 0xd2, 0xd0, 0x9a, 0xda, 0x5e, 0xa5, 0x7d, 0xdb, 0x4c, 0xd0, 0xe6, 0x14, 0x6d, 0x1e, + 0xa6, 0x89, 0x75, 0xaa, 0x7f, 0xbe, 0x7f, 0x97, 0x2f, 0xbe, 0xd5, 0x0a, 0x25, 0xed, 0x6e, 0x8e, + 0x4e, 0x75, 0xe4, 0x00, 0x4a, 0xae, 0x2f, 0x51, 0x8c, 0x99, 0x67, 0xac, 0xdc, 0x8c, 0x31, 0x13, + 0x92, 0x0e, 0x6c, 0x4e, 0x9f, 0xad, 0x9f, 0x5c, 0x29, 0x51, 0x18, 0xf9, 0x2b, 0x58, 0x74, 0x63, + 0xaa, 0x78, 0xac, 0x04, 0xe4, 0x3e, 0x6c, 0x2f, 0x31, 0xac, 0x00, 0x85, 0x8d, 0xbe, 0x34, 0x48, + 0x53, 0xdb, 0x5b, 0xa7, 0xb7, 0x16, 0x05, 0xc7, 0x89, 0x93, 0x3c, 0x85, 0x7a, 0xe4, 0x27, 0x6d, + 0x30, 0xb1, 0xe4, 0x48, 0x60, 0x38, 0xe2, 0x9e, 0x63, 0x14, 0xd4, 0xf9, 0x3b, 0x99, 0xf3, 0x4f, + 0x7a, 0xbe, 0xbc, 0xd7, 0x7e, 0xc1, 0xbc, 0x08, 0x29, 0x99, 0x09, 0x07, 0x53, 0x1d, 0xe9, 0x41, + 0x2d, 0x0b, 0x5b, 0xbd, 0x06, 0x4c, 0xcf, 0xa0, 0xbe, 0x82, 0x12, 0xf3, 0xa4, 0x15, 0x70, 0x21, + 0x8d, 0xb5, 0x6b, 0x10, 0x8a, 0xcc, 0x93, 0xc7, 0x5c, 0x48, 0x72, 0x04, 0xba, 0xc0, 0x28, 0x44, + 0xcb, 0xe6, 0xbe, 0x8f, 0x76, 0xfc, 0xb9, 0x8c, 0xa2, 0x02, 0x34, 0x32, 0x80, 0x0e, 0xe7, 0x5e, + 0x22, 0xdf, 0x54, 0x9a, 0x83, 0x99, 0x84, 0x7c, 0x0f, 0xb5, 0x91, 0x94, 0x81, 0x35, 0x3f, 0x23, + 0x46, 0x49, 0x71, 0x4c, 0x33, 0x33, 0x24, 0xe6, 0x5c, 0x63, 0x99, 0x5d, 0x29, 0x83, 0xb9, 0xf7, + 0x6e, 0x8e, 0x6e, 0x8e, 0x16, 0x4d, 0xe4, 0x25, 0xe8, 0xd2, 0x5e, 0x82, 0x97, 0x15, 0xfc, 0xd3, + 0x2b, 0xe0, 0x03, 0x7b, 0x89, 0xbd, 0x21, 0x17, 0x2c, 0x71, 0xe2, 0x43, 0x11, 0xd8, 0x8b, 0xec, + 0xca, 0xb5, 0x12, 0x7f, 0x24, 0x02, 0x7b, 0x29, 0xf1, 0xe1, 0xa2, 0x89, 0x9c, 0x42, 0xdd, 0x8e, + 0x42, 0xc9, 0xcf, 0x17, 0xf9, 0xeb, 0x8a, 0xff, 0xd9, 0x15, 0xfc, 0x03, 0xa5, 0x5c, 0x3c, 0xa1, + 0x66, 0x2f, 0x1b, 0xc9, 0x09, 0xd4, 0x7d, 0x6e, 0x49, 0xc1, 0xce, 0xce, 0x5c, 0xdb, 0x9a, 0x0d, + 0x58, 0xf5, 0xaa, 0x01, 0x83, 0x78, 0xc0, 0x56, 0xdf, 0x6a, 0x2b, 0x77, 0x73, 0xb4, 0xe6, 0xf3, + 0x41, 0x02, 0xe8, 0x4d, 0xe7, 0x6c, 0x00, 0x17, 0x2d, 0x7b, 0x41, 0xdd, 0xb8, 0x11, 0x75, 0x06, + 0x98, 0x51, 0x7f, 0x80, 0xed, 0x0b, 0x2a, 0x3a, 0x43, 0xbc, 0x40, 0x6f, 0xde, 0x04, 0x7d, 0x6b, + 0x46, 0x39, 0x72, 0x86, 0x38, 0xc3, 0xbf, 0x84, 0x5b, 0x97, 0xc3, 0xf5, 0x9b, 0xc0, 0xeb, 0x97, + 0xa1, 0x77, 0x61, 0x03, 0xc7, 0xe8, 0x4b, 0xcb, 0xe3, 0x43, 0x2b, 0x60, 0x72, 0x64, 0xd4, 0x9a, + 0xda, 0x5e, 0x99, 0x56, 0x95, 0xf5, 0x09, 0x1f, 0x1e, 0x33, 0x39, 0x22, 0x0f, 0xa1, 0xc9, 0xbc, + 0xd7, 0x6c, 0x12, 0xaa, 0xb0, 0xf9, 0xa2, 0x5b, 0x67, 0xcc, 0xf5, 0x22, 0x81, 0xa1, 0x51, 0x6f, + 0x6a, 0x7b, 0x25, 0xba, 0x93, 0xc4, 0x3d, 0xe1, 0xc3, 0xb9, 0x62, 0x3e, 0x4c, 0x63, 0x1a, 0x2f, + 0xa0, 0x78, 0xcc, 0x26, 0x1e, 0x67, 0x0e, 0xb9, 0x03, 0x05, 0x89, 0x6f, 0x92, 0xad, 0x5b, 0xee, + 0x94, 0xe3, 0x3c, 0x0b, 0x62, 0xa5, 0xa9, 0x75, 0x73, 0x54, 0x39, 0x88, 0x01, 0x6b, 0xa7, 0xae, + 0xcf, 0xc4, 0x44, 0x2d, 0xd5, 0x6a, 0x37, 0x47, 0xd3, 0xf7, 0x8e, 0x0e, 0xc5, 0x20, 0xa5, 0xac, + 0xfe, 0xf1, 0xfe, 0x5d, 0x5e, 0x6b, 0xfc, 0x9d, 0x87, 0xcd, 0xa5, 0x81, 0x23, 0x04, 0x0a, 0x23, + 0x1e, 0xa6, 0x07, 0x50, 0xf5, 0x4c, 0x3e, 0x80, 0x82, 0xba, 0xe3, 0xca, 0xd2, 0xa1, 0x54, 0x99, + 0xc9, 0xd7, 0x50, 0x08, 0xd1, 0x77, 0xd2, 0xcd, 0xfb, 0xc9, 0x15, 0x8d, 0x9c, 0xde, 0x84, 0x2a, + 0x0d, 0xf9, 0x06, 0x8a, 0x02, 0x6d, 0x74, 0xc7, 0x98, 0x2e, 0xce, 0xeb, 0xca, 0xa7, 0x32, 0xf2, + 0x11, 0x54, 0x43, 0x14, 0x63, 0xd7, 0x46, 0xcb, 0x67, 0xe7, 0xa8, 0x56, 0x66, 0x99, 0x56, 0x52, + 0x5b, 0x9f, 0x9d, 0x23, 0x39, 0x83, 0x2d, 0x81, 0x3f, 0x47, 0x18, 0xca, 0xb8, 0x08, 0x0e, 0x8a, + 0xd0, 0x92, 0xdc, 0x62, 0x8e, 0x63, 0xac, 0x35, 0xf3, 0x7b, 0x95, 0xf6, 0xee, 0xe5, 0x67, 0x3a, + 0x28, 0xd4, 0x82, 0x7b, 0x16, 0xa8, 0xa6, 0xa8, 0xc4, 0xf7, 0x5e, 0xfb, 0x55, 0xcb, 0xeb, 0x7f, + 0x15, 0x69, 0x3d, 0x05, 0x26, 0x61, 0xe1, 0x80, 0xef, 0x3b, 0x0e, 0x79, 0x00, 0xb7, 0x2f, 0x39, + 0x47, 0xe0, 0x39, 0x1f, 0xa3, 0x51, 0x6a, 0xe6, 0xf7, 0xca, 0x74, 0x6b, 0x59, 0x47, 0x95, 0x97, + 0xfc, 0x1f, 0xca, 0xf1, 0xde, 0x8d, 0x77, 0x5d, 0x5b, 0xad, 0xdc, 0x12, 0x2d, 0x45, 0x21, 0xc6, + 0xd5, 0x69, 0x93, 0x03, 0xa8, 0xe1, 0x9b, 0x00, 0x6d, 0x89, 0x8e, 0x15, 0x4a, 0x26, 0xa3, 0x10, + 0x43, 0xa3, 0xac, 0x52, 0xdf, 0x4a, 0x53, 0x8f, 0xff, 0x21, 0xcc, 0x9e, 0x2f, 0xef, 0x7f, 0x41, + 0xe3, 0x1f, 0x09, 0xaa, 0x4f, 0x05, 0xcf, 0xd3, 0xf8, 0xc6, 0x2f, 0x1a, 0x6c, 0x2c, 0x2e, 0xc0, + 0x59, 0xe1, 0xb4, 0xff, 0x56, 0xb8, 0x15, 0x95, 0xc9, 0x4d, 0x0b, 0xd7, 0xd8, 0x05, 0x9d, 0xa2, + 0xe3, 0x86, 0xf3, 0x19, 0xe9, 0x90, 0x7f, 0x85, 0x93, 0xb4, 0xf9, 0xe2, 0xc7, 0x06, 0x85, 0xcd, + 0xa5, 0xd5, 0x9a, 0xa9, 0xb8, 0x96, 0xad, 0xf8, 0x0e, 0x94, 0x59, 0x24, 0x47, 0x5c, 0xb8, 0x32, + 0x19, 0x84, 0x32, 0xbd, 0x30, 0x34, 0x7e, 0xd3, 0xa0, 0x96, 0xd9, 0xa7, 0x71, 0x97, 0x5f, 0xe0, + 0x16, 0xba, 0x3c, 0x36, 0x93, 0xcf, 0x61, 0xcd, 0xe6, 0xfe, 0x99, 0x3b, 0x4c, 0xff, 0x56, 0xb6, + 0x33, 0xeb, 0xe3, 0xb9, 0xfa, 0xd5, 0x8a, 0x27, 0x2e, 0x09, 0x24, 0x0f, 0xa0, 0x1a, 0xd7, 0xc5, + 0xb1, 0x52, 0x61, 0x32, 0x20, 0xff, 0xcb, 0x08, 0xf7, 0xfd, 0x49, 0x37, 0x47, 0x2b, 0x2a, 0xf6, + 0x40, 0x85, 0x76, 0xd6, 0xa1, 0x92, 0x88, 0xac, 0xd8, 0xda, 0xd9, 0x86, 0x8d, 0xf9, 0xf5, 0x81, + 0x22, 0x1d, 0xe1, 0xc7, 0x85, 0x12, 0xe8, 0x15, 0x4a, 0x44, 0xfc, 0x21, 0x17, 0x36, 0xcc, 0xdd, + 0x1f, 0xa1, 0x9a, 0xdc, 0x2e, 0xe9, 0x00, 0x52, 0x81, 0xe2, 0x49, 0xff, 0xdb, 0xfe, 0xb3, 0xef, + 0xfa, 0x7a, 0x2e, 0x7e, 0xe9, 0x1e, 0xed, 0x3f, 0x19, 0x74, 0x5f, 0xea, 0x1a, 0x59, 0x87, 0xf2, + 0x49, 0x7f, 0xfa, 0xba, 0x42, 0xaa, 0x50, 0x3a, 0xa4, 0xfb, 0xbd, 0x7e, 0xaf, 0xff, 0x48, 0xcf, + 0xc7, 0x91, 0x83, 0xde, 0xd3, 0xa3, 0x67, 0x27, 0x03, 0xbd, 0xa0, 0x5c, 0x47, 0x8f, 0xe8, 0xfe, + 0xe1, 0xd1, 0xa1, 0xbe, 0xda, 0xf9, 0x12, 0xee, 0xb8, 0x3c, 0xa9, 0x7a, 0x20, 0xf8, 0x9b, 0x49, + 0xb6, 0x01, 0x3a, 0xfa, 0xdc, 0x07, 0x3e, 0x8e, 0xaf, 0x7b, 0xac, 0x9d, 0xae, 0xa9, 0x7b, 0xdf, + 0xfb, 0x27, 0x00, 0x00, 0xff, 0xff, 0x31, 0x86, 0x71, 0x0b, 0x55, 0x0b, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/grpc/balancer/xds/internal/proto/envoy/api/v2/core/protocol/protocol.pb.go b/vendor/google.golang.org/grpc/balancer/xds/internal/proto/envoy/api/v2/core/protocol/protocol.pb.go new file mode 100755 index 0000000..1c07ca5 --- /dev/null +++ b/vendor/google.golang.org/grpc/balancer/xds/internal/proto/envoy/api/v2/core/protocol/protocol.pb.go @@ -0,0 +1,311 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: envoy/api/v2/core/protocol.proto + +package envoy_api_v2_core + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import duration "github.com/golang/protobuf/ptypes/duration" +import wrappers "github.com/golang/protobuf/ptypes/wrappers" +import _ "google.golang.org/grpc/balancer/xds/internal/proto/validate" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +type TcpProtocolOptions struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *TcpProtocolOptions) Reset() { *m = TcpProtocolOptions{} } +func (m *TcpProtocolOptions) String() string { return proto.CompactTextString(m) } +func (*TcpProtocolOptions) ProtoMessage() {} +func (*TcpProtocolOptions) Descriptor() ([]byte, []int) { + return fileDescriptor_protocol_2e969372c85b867d, []int{0} +} +func (m *TcpProtocolOptions) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_TcpProtocolOptions.Unmarshal(m, b) +} +func (m *TcpProtocolOptions) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_TcpProtocolOptions.Marshal(b, m, deterministic) +} +func (dst *TcpProtocolOptions) XXX_Merge(src proto.Message) { + xxx_messageInfo_TcpProtocolOptions.Merge(dst, src) +} +func (m *TcpProtocolOptions) XXX_Size() int { + return xxx_messageInfo_TcpProtocolOptions.Size(m) +} +func (m *TcpProtocolOptions) XXX_DiscardUnknown() { + xxx_messageInfo_TcpProtocolOptions.DiscardUnknown(m) +} + +var xxx_messageInfo_TcpProtocolOptions proto.InternalMessageInfo + +type HttpProtocolOptions struct { + IdleTimeout *duration.Duration `protobuf:"bytes,1,opt,name=idle_timeout,json=idleTimeout,proto3" json:"idle_timeout,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *HttpProtocolOptions) Reset() { *m = HttpProtocolOptions{} } +func (m *HttpProtocolOptions) String() string { return proto.CompactTextString(m) } +func (*HttpProtocolOptions) ProtoMessage() {} +func (*HttpProtocolOptions) Descriptor() ([]byte, []int) { + return fileDescriptor_protocol_2e969372c85b867d, []int{1} +} +func (m *HttpProtocolOptions) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_HttpProtocolOptions.Unmarshal(m, b) +} +func (m *HttpProtocolOptions) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_HttpProtocolOptions.Marshal(b, m, deterministic) +} +func (dst *HttpProtocolOptions) XXX_Merge(src proto.Message) { + xxx_messageInfo_HttpProtocolOptions.Merge(dst, src) +} +func (m *HttpProtocolOptions) XXX_Size() int { + return xxx_messageInfo_HttpProtocolOptions.Size(m) +} +func (m *HttpProtocolOptions) XXX_DiscardUnknown() { + xxx_messageInfo_HttpProtocolOptions.DiscardUnknown(m) +} + +var xxx_messageInfo_HttpProtocolOptions proto.InternalMessageInfo + +func (m *HttpProtocolOptions) GetIdleTimeout() *duration.Duration { + if m != nil { + return m.IdleTimeout + } + return nil +} + +type Http1ProtocolOptions struct { + AllowAbsoluteUrl *wrappers.BoolValue `protobuf:"bytes,1,opt,name=allow_absolute_url,json=allowAbsoluteUrl,proto3" json:"allow_absolute_url,omitempty"` + AcceptHttp_10 bool `protobuf:"varint,2,opt,name=accept_http_10,json=acceptHttp10,proto3" json:"accept_http_10,omitempty"` + DefaultHostForHttp_10 string `protobuf:"bytes,3,opt,name=default_host_for_http_10,json=defaultHostForHttp10,proto3" json:"default_host_for_http_10,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Http1ProtocolOptions) Reset() { *m = Http1ProtocolOptions{} } +func (m *Http1ProtocolOptions) String() string { return proto.CompactTextString(m) } +func (*Http1ProtocolOptions) ProtoMessage() {} +func (*Http1ProtocolOptions) Descriptor() ([]byte, []int) { + return fileDescriptor_protocol_2e969372c85b867d, []int{2} +} +func (m *Http1ProtocolOptions) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Http1ProtocolOptions.Unmarshal(m, b) +} +func (m *Http1ProtocolOptions) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Http1ProtocolOptions.Marshal(b, m, deterministic) +} +func (dst *Http1ProtocolOptions) XXX_Merge(src proto.Message) { + xxx_messageInfo_Http1ProtocolOptions.Merge(dst, src) +} +func (m *Http1ProtocolOptions) XXX_Size() int { + return xxx_messageInfo_Http1ProtocolOptions.Size(m) +} +func (m *Http1ProtocolOptions) XXX_DiscardUnknown() { + xxx_messageInfo_Http1ProtocolOptions.DiscardUnknown(m) +} + +var xxx_messageInfo_Http1ProtocolOptions proto.InternalMessageInfo + +func (m *Http1ProtocolOptions) GetAllowAbsoluteUrl() *wrappers.BoolValue { + if m != nil { + return m.AllowAbsoluteUrl + } + return nil +} + +func (m *Http1ProtocolOptions) GetAcceptHttp_10() bool { + if m != nil { + return m.AcceptHttp_10 + } + return false +} + +func (m *Http1ProtocolOptions) GetDefaultHostForHttp_10() string { + if m != nil { + return m.DefaultHostForHttp_10 + } + return "" +} + +type Http2ProtocolOptions struct { + HpackTableSize *wrappers.UInt32Value `protobuf:"bytes,1,opt,name=hpack_table_size,json=hpackTableSize,proto3" json:"hpack_table_size,omitempty"` + MaxConcurrentStreams *wrappers.UInt32Value `protobuf:"bytes,2,opt,name=max_concurrent_streams,json=maxConcurrentStreams,proto3" json:"max_concurrent_streams,omitempty"` + InitialStreamWindowSize *wrappers.UInt32Value `protobuf:"bytes,3,opt,name=initial_stream_window_size,json=initialStreamWindowSize,proto3" json:"initial_stream_window_size,omitempty"` + InitialConnectionWindowSize *wrappers.UInt32Value `protobuf:"bytes,4,opt,name=initial_connection_window_size,json=initialConnectionWindowSize,proto3" json:"initial_connection_window_size,omitempty"` + AllowConnect bool `protobuf:"varint,5,opt,name=allow_connect,json=allowConnect,proto3" json:"allow_connect,omitempty"` + AllowMetadata bool `protobuf:"varint,6,opt,name=allow_metadata,json=allowMetadata,proto3" json:"allow_metadata,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Http2ProtocolOptions) Reset() { *m = Http2ProtocolOptions{} } +func (m *Http2ProtocolOptions) String() string { return proto.CompactTextString(m) } +func (*Http2ProtocolOptions) ProtoMessage() {} +func (*Http2ProtocolOptions) Descriptor() ([]byte, []int) { + return fileDescriptor_protocol_2e969372c85b867d, []int{3} +} +func (m *Http2ProtocolOptions) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Http2ProtocolOptions.Unmarshal(m, b) +} +func (m *Http2ProtocolOptions) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Http2ProtocolOptions.Marshal(b, m, deterministic) +} +func (dst *Http2ProtocolOptions) XXX_Merge(src proto.Message) { + xxx_messageInfo_Http2ProtocolOptions.Merge(dst, src) +} +func (m *Http2ProtocolOptions) XXX_Size() int { + return xxx_messageInfo_Http2ProtocolOptions.Size(m) +} +func (m *Http2ProtocolOptions) XXX_DiscardUnknown() { + xxx_messageInfo_Http2ProtocolOptions.DiscardUnknown(m) +} + +var xxx_messageInfo_Http2ProtocolOptions proto.InternalMessageInfo + +func (m *Http2ProtocolOptions) GetHpackTableSize() *wrappers.UInt32Value { + if m != nil { + return m.HpackTableSize + } + return nil +} + +func (m *Http2ProtocolOptions) GetMaxConcurrentStreams() *wrappers.UInt32Value { + if m != nil { + return m.MaxConcurrentStreams + } + return nil +} + +func (m *Http2ProtocolOptions) GetInitialStreamWindowSize() *wrappers.UInt32Value { + if m != nil { + return m.InitialStreamWindowSize + } + return nil +} + +func (m *Http2ProtocolOptions) GetInitialConnectionWindowSize() *wrappers.UInt32Value { + if m != nil { + return m.InitialConnectionWindowSize + } + return nil +} + +func (m *Http2ProtocolOptions) GetAllowConnect() bool { + if m != nil { + return m.AllowConnect + } + return false +} + +func (m *Http2ProtocolOptions) GetAllowMetadata() bool { + if m != nil { + return m.AllowMetadata + } + return false +} + +type GrpcProtocolOptions struct { + Http2ProtocolOptions *Http2ProtocolOptions `protobuf:"bytes,1,opt,name=http2_protocol_options,json=http2ProtocolOptions,proto3" json:"http2_protocol_options,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GrpcProtocolOptions) Reset() { *m = GrpcProtocolOptions{} } +func (m *GrpcProtocolOptions) String() string { return proto.CompactTextString(m) } +func (*GrpcProtocolOptions) ProtoMessage() {} +func (*GrpcProtocolOptions) Descriptor() ([]byte, []int) { + return fileDescriptor_protocol_2e969372c85b867d, []int{4} +} +func (m *GrpcProtocolOptions) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GrpcProtocolOptions.Unmarshal(m, b) +} +func (m *GrpcProtocolOptions) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GrpcProtocolOptions.Marshal(b, m, deterministic) +} +func (dst *GrpcProtocolOptions) XXX_Merge(src proto.Message) { + xxx_messageInfo_GrpcProtocolOptions.Merge(dst, src) +} +func (m *GrpcProtocolOptions) XXX_Size() int { + return xxx_messageInfo_GrpcProtocolOptions.Size(m) +} +func (m *GrpcProtocolOptions) XXX_DiscardUnknown() { + xxx_messageInfo_GrpcProtocolOptions.DiscardUnknown(m) +} + +var xxx_messageInfo_GrpcProtocolOptions proto.InternalMessageInfo + +func (m *GrpcProtocolOptions) GetHttp2ProtocolOptions() *Http2ProtocolOptions { + if m != nil { + return m.Http2ProtocolOptions + } + return nil +} + +func init() { + proto.RegisterType((*TcpProtocolOptions)(nil), "envoy.api.v2.core.TcpProtocolOptions") + proto.RegisterType((*HttpProtocolOptions)(nil), "envoy.api.v2.core.HttpProtocolOptions") + proto.RegisterType((*Http1ProtocolOptions)(nil), "envoy.api.v2.core.Http1ProtocolOptions") + proto.RegisterType((*Http2ProtocolOptions)(nil), "envoy.api.v2.core.Http2ProtocolOptions") + proto.RegisterType((*GrpcProtocolOptions)(nil), "envoy.api.v2.core.GrpcProtocolOptions") +} + +func init() { + proto.RegisterFile("envoy/api/v2/core/protocol.proto", fileDescriptor_protocol_2e969372c85b867d) +} + +var fileDescriptor_protocol_2e969372c85b867d = []byte{ + // 556 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x9c, 0x93, 0x4f, 0x6f, 0xd3, 0x4c, + 0x10, 0xc6, 0xe5, 0x37, 0x2f, 0xa5, 0x6c, 0xff, 0xd0, 0xba, 0x51, 0x6b, 0x0a, 0x2a, 0x51, 0x00, + 0x11, 0xf5, 0x60, 0xb7, 0xae, 0xc4, 0x89, 0x0b, 0x29, 0x2a, 0xe5, 0x80, 0xa8, 0xdc, 0x16, 0x4e, + 0x68, 0xb5, 0x59, 0x6f, 0x9a, 0x15, 0x1b, 0xcf, 0x6a, 0x3d, 0x4e, 0xd2, 0x7e, 0x34, 0x0e, 0x88, + 0xaf, 0xc3, 0x99, 0x0f, 0x60, 0x64, 0xef, 0x26, 0x82, 0xa4, 0x12, 0x88, 0x53, 0xac, 0x99, 0xe7, + 0xf9, 0x3d, 0x33, 0xf1, 0x98, 0xb4, 0x44, 0x36, 0x82, 0xeb, 0x88, 0x69, 0x19, 0x8d, 0xe2, 0x88, + 0x83, 0x11, 0x91, 0x36, 0x80, 0xc0, 0x41, 0x85, 0xf5, 0x83, 0xbf, 0x59, 0x2b, 0x42, 0xa6, 0x65, + 0x38, 0x8a, 0xc3, 0x4a, 0xb1, 0xbb, 0x77, 0x05, 0x70, 0xa5, 0x9c, 0xb2, 0x57, 0xf4, 0xa3, 0xb4, + 0x30, 0x0c, 0x25, 0x64, 0xd6, 0xb2, 0xd8, 0x1f, 0x1b, 0xa6, 0xb5, 0x30, 0xb9, 0xeb, 0xef, 0x8c, + 0x98, 0x92, 0x29, 0x43, 0x11, 0x4d, 0x1f, 0x6c, 0xa3, 0xdd, 0x24, 0xfe, 0x05, 0xd7, 0x67, 0x6e, + 0x80, 0xf7, 0xba, 0x62, 0xe6, 0xed, 0x73, 0xb2, 0x75, 0x8a, 0x38, 0x5f, 0xf6, 0x5f, 0x92, 0x55, + 0x99, 0x2a, 0x41, 0x51, 0x0e, 0x05, 0x14, 0x18, 0x78, 0x2d, 0xaf, 0xb3, 0x12, 0x3f, 0x08, 0x6d, + 0x78, 0x38, 0x0d, 0x0f, 0x5f, 0xbb, 0xe1, 0x92, 0x95, 0x4a, 0x7e, 0x61, 0xd5, 0xed, 0xaf, 0x1e, + 0x69, 0x56, 0xd4, 0xc3, 0x79, 0xec, 0x29, 0xf1, 0x99, 0x52, 0x30, 0xa6, 0xac, 0x97, 0x83, 0x2a, + 0x50, 0xd0, 0xc2, 0x28, 0x07, 0xdf, 0x5d, 0x80, 0x77, 0x01, 0xd4, 0x07, 0xa6, 0x0a, 0x91, 0x6c, + 0xd4, 0xae, 0x57, 0xce, 0x74, 0x69, 0x94, 0xff, 0x94, 0xac, 0x33, 0xce, 0x85, 0x46, 0x3a, 0x40, + 0xd4, 0xf4, 0xf0, 0x20, 0xf8, 0xaf, 0xe5, 0x75, 0x96, 0x93, 0x55, 0x5b, 0xad, 0xd3, 0x0f, 0xfc, + 0x17, 0x24, 0x48, 0x45, 0x9f, 0x15, 0x0a, 0xe9, 0x00, 0x72, 0xa4, 0x7d, 0x30, 0x33, 0x7d, 0xa3, + 0xe5, 0x75, 0xee, 0x25, 0x4d, 0xd7, 0x3f, 0x85, 0x1c, 0x4f, 0xc0, 0x58, 0x5f, 0xfb, 0x47, 0xc3, + 0x2e, 0x10, 0xcf, 0x2f, 0x70, 0x42, 0x36, 0x06, 0x9a, 0xf1, 0xcf, 0x14, 0x59, 0x4f, 0x09, 0x9a, + 0xcb, 0x1b, 0xe1, 0xc6, 0x7f, 0xb4, 0x30, 0xfe, 0xe5, 0xdb, 0x0c, 0x8f, 0x62, 0xbb, 0xc0, 0x7a, + 0xed, 0xba, 0xa8, 0x4c, 0xe7, 0xf2, 0x46, 0xf8, 0x9c, 0x6c, 0x0f, 0xd9, 0x84, 0x72, 0xc8, 0x78, + 0x61, 0x8c, 0xc8, 0x90, 0xe6, 0x68, 0x04, 0x1b, 0xe6, 0xf5, 0x1a, 0x7f, 0xa0, 0x75, 0xef, 0x7f, + 0xf9, 0xfe, 0xad, 0x41, 0xf6, 0x97, 0x83, 0xb2, 0x2c, 0xcb, 0xbb, 0x1d, 0x2f, 0x69, 0x0e, 0xd9, + 0xe4, 0x78, 0xc6, 0x3a, 0xb7, 0x28, 0x5f, 0x91, 0x5d, 0x99, 0x49, 0x94, 0x4c, 0x39, 0x3a, 0x1d, + 0xcb, 0x2c, 0x85, 0xb1, 0x1d, 0xbb, 0xf1, 0x17, 0x41, 0x9b, 0x55, 0xd0, 0xea, 0x3e, 0x71, 0x41, + 0x65, 0xd9, 0x48, 0x76, 0x1c, 0xd2, 0x86, 0x7c, 0xac, 0x81, 0xf5, 0x4a, 0x48, 0xf6, 0xa6, 0x69, + 0x1c, 0xb2, 0x4c, 0xf0, 0xea, 0x1f, 0xfb, 0x2d, 0xf1, 0xff, 0x7f, 0x4b, 0x7c, 0xe8, 0xb0, 0xc7, + 0x33, 0xea, 0x2f, 0xa9, 0x4f, 0xc8, 0x9a, 0xbd, 0x28, 0x97, 0x19, 0xdc, 0x71, 0x67, 0x50, 0x15, + 0x9d, 0xc3, 0x7f, 0x46, 0xd6, 0xad, 0x68, 0x28, 0x90, 0xa5, 0x0c, 0x59, 0xb0, 0x54, 0xab, 0xac, + 0xf5, 0x9d, 0x2b, 0xb6, 0x91, 0x6c, 0xbd, 0x31, 0x9a, 0xcf, 0xbf, 0xf3, 0x4f, 0x64, 0xbb, 0xba, + 0x99, 0x98, 0x4e, 0x3f, 0x5e, 0x0a, 0xb6, 0xe3, 0xde, 0xfc, 0xf3, 0x70, 0xe1, 0x2b, 0x0e, 0x6f, + 0x3b, 0x9e, 0xa4, 0x39, 0xb8, 0xa5, 0xda, 0x8d, 0xc9, 0x63, 0x09, 0x16, 0xa1, 0x0d, 0x4c, 0xae, + 0x17, 0x69, 0xdd, 0xb5, 0xa9, 0xa7, 0xfe, 0x3d, 0xf3, 0x7a, 0x4b, 0xf5, 0x28, 0x47, 0x3f, 0x03, + 0x00, 0x00, 0xff, 0xff, 0x8f, 0x72, 0x53, 0x8f, 0x62, 0x04, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/grpc/balancer/xds/internal/proto/envoy/api/v2/discovery/discovery.pb.go b/vendor/google.golang.org/grpc/balancer/xds/internal/proto/envoy/api/v2/discovery/discovery.pb.go new file mode 100755 index 0000000..629c770 --- /dev/null +++ b/vendor/google.golang.org/grpc/balancer/xds/internal/proto/envoy/api/v2/discovery/discovery.pb.go @@ -0,0 +1,446 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: envoy/api/v2/discovery.proto + +package v2 + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import any "github.com/golang/protobuf/ptypes/any" +import status "google.golang.org/genproto/googleapis/rpc/status" +import base "google.golang.org/grpc/balancer/xds/internal/proto/envoy/api/v2/core/base" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +type DiscoveryRequest struct { + VersionInfo string `protobuf:"bytes,1,opt,name=version_info,json=versionInfo,proto3" json:"version_info,omitempty"` + Node *base.Node `protobuf:"bytes,2,opt,name=node,proto3" json:"node,omitempty"` + ResourceNames []string `protobuf:"bytes,3,rep,name=resource_names,json=resourceNames,proto3" json:"resource_names,omitempty"` + TypeUrl string `protobuf:"bytes,4,opt,name=type_url,json=typeUrl,proto3" json:"type_url,omitempty"` + ResponseNonce string `protobuf:"bytes,5,opt,name=response_nonce,json=responseNonce,proto3" json:"response_nonce,omitempty"` + ErrorDetail *status.Status `protobuf:"bytes,6,opt,name=error_detail,json=errorDetail,proto3" json:"error_detail,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *DiscoveryRequest) Reset() { *m = DiscoveryRequest{} } +func (m *DiscoveryRequest) String() string { return proto.CompactTextString(m) } +func (*DiscoveryRequest) ProtoMessage() {} +func (*DiscoveryRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_discovery_a1ffda4a09a0e500, []int{0} +} +func (m *DiscoveryRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_DiscoveryRequest.Unmarshal(m, b) +} +func (m *DiscoveryRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_DiscoveryRequest.Marshal(b, m, deterministic) +} +func (dst *DiscoveryRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_DiscoveryRequest.Merge(dst, src) +} +func (m *DiscoveryRequest) XXX_Size() int { + return xxx_messageInfo_DiscoveryRequest.Size(m) +} +func (m *DiscoveryRequest) XXX_DiscardUnknown() { + xxx_messageInfo_DiscoveryRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_DiscoveryRequest proto.InternalMessageInfo + +func (m *DiscoveryRequest) GetVersionInfo() string { + if m != nil { + return m.VersionInfo + } + return "" +} + +func (m *DiscoveryRequest) GetNode() *base.Node { + if m != nil { + return m.Node + } + return nil +} + +func (m *DiscoveryRequest) GetResourceNames() []string { + if m != nil { + return m.ResourceNames + } + return nil +} + +func (m *DiscoveryRequest) GetTypeUrl() string { + if m != nil { + return m.TypeUrl + } + return "" +} + +func (m *DiscoveryRequest) GetResponseNonce() string { + if m != nil { + return m.ResponseNonce + } + return "" +} + +func (m *DiscoveryRequest) GetErrorDetail() *status.Status { + if m != nil { + return m.ErrorDetail + } + return nil +} + +type DiscoveryResponse struct { + VersionInfo string `protobuf:"bytes,1,opt,name=version_info,json=versionInfo,proto3" json:"version_info,omitempty"` + Resources []*any.Any `protobuf:"bytes,2,rep,name=resources,proto3" json:"resources,omitempty"` + Canary bool `protobuf:"varint,3,opt,name=canary,proto3" json:"canary,omitempty"` + TypeUrl string `protobuf:"bytes,4,opt,name=type_url,json=typeUrl,proto3" json:"type_url,omitempty"` + Nonce string `protobuf:"bytes,5,opt,name=nonce,proto3" json:"nonce,omitempty"` + ControlPlane *base.ControlPlane `protobuf:"bytes,6,opt,name=control_plane,json=controlPlane,proto3" json:"control_plane,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *DiscoveryResponse) Reset() { *m = DiscoveryResponse{} } +func (m *DiscoveryResponse) String() string { return proto.CompactTextString(m) } +func (*DiscoveryResponse) ProtoMessage() {} +func (*DiscoveryResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_discovery_a1ffda4a09a0e500, []int{1} +} +func (m *DiscoveryResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_DiscoveryResponse.Unmarshal(m, b) +} +func (m *DiscoveryResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_DiscoveryResponse.Marshal(b, m, deterministic) +} +func (dst *DiscoveryResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_DiscoveryResponse.Merge(dst, src) +} +func (m *DiscoveryResponse) XXX_Size() int { + return xxx_messageInfo_DiscoveryResponse.Size(m) +} +func (m *DiscoveryResponse) XXX_DiscardUnknown() { + xxx_messageInfo_DiscoveryResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_DiscoveryResponse proto.InternalMessageInfo + +func (m *DiscoveryResponse) GetVersionInfo() string { + if m != nil { + return m.VersionInfo + } + return "" +} + +func (m *DiscoveryResponse) GetResources() []*any.Any { + if m != nil { + return m.Resources + } + return nil +} + +func (m *DiscoveryResponse) GetCanary() bool { + if m != nil { + return m.Canary + } + return false +} + +func (m *DiscoveryResponse) GetTypeUrl() string { + if m != nil { + return m.TypeUrl + } + return "" +} + +func (m *DiscoveryResponse) GetNonce() string { + if m != nil { + return m.Nonce + } + return "" +} + +func (m *DiscoveryResponse) GetControlPlane() *base.ControlPlane { + if m != nil { + return m.ControlPlane + } + return nil +} + +type DeltaDiscoveryRequest struct { + Node *base.Node `protobuf:"bytes,1,opt,name=node,proto3" json:"node,omitempty"` + TypeUrl string `protobuf:"bytes,2,opt,name=type_url,json=typeUrl,proto3" json:"type_url,omitempty"` + ResourceNamesSubscribe []string `protobuf:"bytes,3,rep,name=resource_names_subscribe,json=resourceNamesSubscribe,proto3" json:"resource_names_subscribe,omitempty"` + ResourceNamesUnsubscribe []string `protobuf:"bytes,4,rep,name=resource_names_unsubscribe,json=resourceNamesUnsubscribe,proto3" json:"resource_names_unsubscribe,omitempty"` + InitialResourceVersions map[string]string `protobuf:"bytes,5,rep,name=initial_resource_versions,json=initialResourceVersions,proto3" json:"initial_resource_versions,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` + ResponseNonce string `protobuf:"bytes,6,opt,name=response_nonce,json=responseNonce,proto3" json:"response_nonce,omitempty"` + ErrorDetail *status.Status `protobuf:"bytes,7,opt,name=error_detail,json=errorDetail,proto3" json:"error_detail,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *DeltaDiscoveryRequest) Reset() { *m = DeltaDiscoveryRequest{} } +func (m *DeltaDiscoveryRequest) String() string { return proto.CompactTextString(m) } +func (*DeltaDiscoveryRequest) ProtoMessage() {} +func (*DeltaDiscoveryRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_discovery_a1ffda4a09a0e500, []int{2} +} +func (m *DeltaDiscoveryRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_DeltaDiscoveryRequest.Unmarshal(m, b) +} +func (m *DeltaDiscoveryRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_DeltaDiscoveryRequest.Marshal(b, m, deterministic) +} +func (dst *DeltaDiscoveryRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_DeltaDiscoveryRequest.Merge(dst, src) +} +func (m *DeltaDiscoveryRequest) XXX_Size() int { + return xxx_messageInfo_DeltaDiscoveryRequest.Size(m) +} +func (m *DeltaDiscoveryRequest) XXX_DiscardUnknown() { + xxx_messageInfo_DeltaDiscoveryRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_DeltaDiscoveryRequest proto.InternalMessageInfo + +func (m *DeltaDiscoveryRequest) GetNode() *base.Node { + if m != nil { + return m.Node + } + return nil +} + +func (m *DeltaDiscoveryRequest) GetTypeUrl() string { + if m != nil { + return m.TypeUrl + } + return "" +} + +func (m *DeltaDiscoveryRequest) GetResourceNamesSubscribe() []string { + if m != nil { + return m.ResourceNamesSubscribe + } + return nil +} + +func (m *DeltaDiscoveryRequest) GetResourceNamesUnsubscribe() []string { + if m != nil { + return m.ResourceNamesUnsubscribe + } + return nil +} + +func (m *DeltaDiscoveryRequest) GetInitialResourceVersions() map[string]string { + if m != nil { + return m.InitialResourceVersions + } + return nil +} + +func (m *DeltaDiscoveryRequest) GetResponseNonce() string { + if m != nil { + return m.ResponseNonce + } + return "" +} + +func (m *DeltaDiscoveryRequest) GetErrorDetail() *status.Status { + if m != nil { + return m.ErrorDetail + } + return nil +} + +type DeltaDiscoveryResponse struct { + SystemVersionInfo string `protobuf:"bytes,1,opt,name=system_version_info,json=systemVersionInfo,proto3" json:"system_version_info,omitempty"` + Resources []*Resource `protobuf:"bytes,2,rep,name=resources,proto3" json:"resources,omitempty"` + RemovedResources []string `protobuf:"bytes,6,rep,name=removed_resources,json=removedResources,proto3" json:"removed_resources,omitempty"` + Nonce string `protobuf:"bytes,5,opt,name=nonce,proto3" json:"nonce,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *DeltaDiscoveryResponse) Reset() { *m = DeltaDiscoveryResponse{} } +func (m *DeltaDiscoveryResponse) String() string { return proto.CompactTextString(m) } +func (*DeltaDiscoveryResponse) ProtoMessage() {} +func (*DeltaDiscoveryResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_discovery_a1ffda4a09a0e500, []int{3} +} +func (m *DeltaDiscoveryResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_DeltaDiscoveryResponse.Unmarshal(m, b) +} +func (m *DeltaDiscoveryResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_DeltaDiscoveryResponse.Marshal(b, m, deterministic) +} +func (dst *DeltaDiscoveryResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_DeltaDiscoveryResponse.Merge(dst, src) +} +func (m *DeltaDiscoveryResponse) XXX_Size() int { + return xxx_messageInfo_DeltaDiscoveryResponse.Size(m) +} +func (m *DeltaDiscoveryResponse) XXX_DiscardUnknown() { + xxx_messageInfo_DeltaDiscoveryResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_DeltaDiscoveryResponse proto.InternalMessageInfo + +func (m *DeltaDiscoveryResponse) GetSystemVersionInfo() string { + if m != nil { + return m.SystemVersionInfo + } + return "" +} + +func (m *DeltaDiscoveryResponse) GetResources() []*Resource { + if m != nil { + return m.Resources + } + return nil +} + +func (m *DeltaDiscoveryResponse) GetRemovedResources() []string { + if m != nil { + return m.RemovedResources + } + return nil +} + +func (m *DeltaDiscoveryResponse) GetNonce() string { + if m != nil { + return m.Nonce + } + return "" +} + +type Resource struct { + Name string `protobuf:"bytes,3,opt,name=name,proto3" json:"name,omitempty"` + Aliases []string `protobuf:"bytes,4,rep,name=aliases,proto3" json:"aliases,omitempty"` + Version string `protobuf:"bytes,1,opt,name=version,proto3" json:"version,omitempty"` + Resource *any.Any `protobuf:"bytes,2,opt,name=resource,proto3" json:"resource,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Resource) Reset() { *m = Resource{} } +func (m *Resource) String() string { return proto.CompactTextString(m) } +func (*Resource) ProtoMessage() {} +func (*Resource) Descriptor() ([]byte, []int) { + return fileDescriptor_discovery_a1ffda4a09a0e500, []int{4} +} +func (m *Resource) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Resource.Unmarshal(m, b) +} +func (m *Resource) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Resource.Marshal(b, m, deterministic) +} +func (dst *Resource) XXX_Merge(src proto.Message) { + xxx_messageInfo_Resource.Merge(dst, src) +} +func (m *Resource) XXX_Size() int { + return xxx_messageInfo_Resource.Size(m) +} +func (m *Resource) XXX_DiscardUnknown() { + xxx_messageInfo_Resource.DiscardUnknown(m) +} + +var xxx_messageInfo_Resource proto.InternalMessageInfo + +func (m *Resource) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *Resource) GetAliases() []string { + if m != nil { + return m.Aliases + } + return nil +} + +func (m *Resource) GetVersion() string { + if m != nil { + return m.Version + } + return "" +} + +func (m *Resource) GetResource() *any.Any { + if m != nil { + return m.Resource + } + return nil +} + +func init() { + proto.RegisterType((*DiscoveryRequest)(nil), "envoy.api.v2.DiscoveryRequest") + proto.RegisterType((*DiscoveryResponse)(nil), "envoy.api.v2.DiscoveryResponse") + proto.RegisterType((*DeltaDiscoveryRequest)(nil), "envoy.api.v2.DeltaDiscoveryRequest") + proto.RegisterMapType((map[string]string)(nil), "envoy.api.v2.DeltaDiscoveryRequest.InitialResourceVersionsEntry") + proto.RegisterType((*DeltaDiscoveryResponse)(nil), "envoy.api.v2.DeltaDiscoveryResponse") + proto.RegisterType((*Resource)(nil), "envoy.api.v2.Resource") +} + +func init() { + proto.RegisterFile("envoy/api/v2/discovery.proto", fileDescriptor_discovery_a1ffda4a09a0e500) +} + +var fileDescriptor_discovery_a1ffda4a09a0e500 = []byte{ + // 656 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x8c, 0x54, 0x41, 0x6b, 0xdb, 0x4c, + 0x10, 0x45, 0xb6, 0xe3, 0xd8, 0x63, 0x27, 0x24, 0xfb, 0xe5, 0x73, 0x14, 0x13, 0xa8, 0x6b, 0x28, + 0x18, 0x02, 0x52, 0x51, 0x5b, 0x08, 0xa5, 0x87, 0x36, 0x75, 0x0f, 0xe9, 0x21, 0x04, 0x85, 0xe4, + 0xd0, 0x8b, 0x58, 0xcb, 0x93, 0x20, 0xaa, 0xec, 0xaa, 0xbb, 0x92, 0xa8, 0xa0, 0xa7, 0xd2, 0x3f, + 0xd6, 0x9f, 0xd5, 0x53, 0x8b, 0x56, 0x2b, 0x5b, 0x4a, 0x44, 0xf0, 0x4d, 0x33, 0xf3, 0x66, 0x76, + 0xde, 0xcc, 0x1b, 0xc1, 0x31, 0xb2, 0x94, 0x67, 0x36, 0x8d, 0x02, 0x3b, 0x75, 0xec, 0x65, 0x20, + 0x7d, 0x9e, 0xa2, 0xc8, 0xac, 0x48, 0xf0, 0x98, 0x93, 0xa1, 0x8a, 0x5a, 0x34, 0x0a, 0xac, 0xd4, + 0x19, 0xd7, 0xb1, 0x3e, 0x17, 0x68, 0x2f, 0xa8, 0xc4, 0x02, 0x3b, 0x3e, 0xba, 0xe3, 0xfc, 0x2e, + 0x44, 0x5b, 0x59, 0x8b, 0xe4, 0xd6, 0xa6, 0x4c, 0x97, 0x19, 0x1f, 0xea, 0x90, 0x88, 0x7c, 0x5b, + 0xc6, 0x34, 0x4e, 0x64, 0x11, 0x98, 0xfe, 0x6c, 0xc1, 0xde, 0xbc, 0x7c, 0xd3, 0xc5, 0x6f, 0x09, + 0xca, 0x98, 0x3c, 0x87, 0x61, 0x8a, 0x42, 0x06, 0x9c, 0x79, 0x01, 0xbb, 0xe5, 0xa6, 0x31, 0x31, + 0x66, 0x7d, 0x77, 0xa0, 0x7d, 0xe7, 0xec, 0x96, 0x93, 0x13, 0xe8, 0x30, 0xbe, 0x44, 0xb3, 0x35, + 0x31, 0x66, 0x03, 0xe7, 0xd0, 0xaa, 0xb6, 0x69, 0xe5, 0x8d, 0x59, 0x17, 0x7c, 0x89, 0xae, 0x02, + 0x91, 0x17, 0xb0, 0x2b, 0x50, 0xf2, 0x44, 0xf8, 0xe8, 0x31, 0x7a, 0x8f, 0xd2, 0x6c, 0x4f, 0xda, + 0xb3, 0xbe, 0xbb, 0x53, 0x7a, 0x2f, 0x72, 0x27, 0x39, 0x82, 0x5e, 0x9c, 0x45, 0xe8, 0x25, 0x22, + 0x34, 0x3b, 0xea, 0xc9, 0xed, 0xdc, 0xbe, 0x16, 0xa1, 0xae, 0x10, 0x71, 0x26, 0xd1, 0x63, 0x9c, + 0xf9, 0x68, 0x6e, 0x29, 0xc0, 0x4e, 0xe9, 0xbd, 0xc8, 0x9d, 0xe4, 0x0d, 0x0c, 0x51, 0x08, 0x2e, + 0xbc, 0x25, 0xc6, 0x34, 0x08, 0xcd, 0xae, 0xea, 0x8e, 0x58, 0x05, 0x7b, 0x4b, 0x44, 0xbe, 0x75, + 0xa5, 0xd8, 0xbb, 0x03, 0x85, 0x9b, 0x2b, 0xd8, 0xf4, 0x8f, 0x01, 0xfb, 0x95, 0x21, 0x14, 0x15, + 0x37, 0x99, 0x82, 0x03, 0xfd, 0x92, 0x82, 0x34, 0x5b, 0x93, 0xf6, 0x6c, 0xe0, 0x1c, 0x94, 0x8f, + 0x95, 0x5b, 0xb0, 0x3e, 0xb0, 0xcc, 0x5d, 0xc3, 0xc8, 0x08, 0xba, 0x3e, 0x65, 0x54, 0x64, 0x66, + 0x7b, 0x62, 0xcc, 0x7a, 0xae, 0xb6, 0x9e, 0x62, 0x7f, 0x00, 0x5b, 0x55, 0xd2, 0x85, 0x41, 0xe6, + 0xb0, 0xe3, 0x73, 0x16, 0x0b, 0x1e, 0x7a, 0x51, 0x48, 0x19, 0x6a, 0xb6, 0xcf, 0x1a, 0x76, 0xf1, + 0xb1, 0xc0, 0x5d, 0xe6, 0x30, 0x77, 0xe8, 0x57, 0xac, 0xe9, 0xdf, 0x36, 0xfc, 0x3f, 0xc7, 0x30, + 0xa6, 0x8f, 0x54, 0x50, 0xae, 0xd8, 0xd8, 0x64, 0xc5, 0xd5, 0xee, 0x5b, 0xf5, 0xee, 0x4f, 0xc1, + 0xac, 0x6f, 0xdf, 0x93, 0xc9, 0x42, 0xfa, 0x22, 0x58, 0xa0, 0xd6, 0xc1, 0xa8, 0xa6, 0x83, 0xab, + 0x32, 0x4a, 0xde, 0xc1, 0xf8, 0x41, 0x66, 0xc2, 0xd6, 0xb9, 0x1d, 0x95, 0x6b, 0xd6, 0x72, 0xaf, + 0xd7, 0x71, 0xf2, 0x03, 0x8e, 0x02, 0x16, 0xc4, 0x01, 0x0d, 0xbd, 0x55, 0x15, 0xbd, 0x3c, 0x69, + 0x6e, 0xa9, 0x65, 0xbd, 0xaf, 0x93, 0x6a, 0x9c, 0x83, 0x75, 0x5e, 0x14, 0x71, 0x75, 0x8d, 0x1b, + 0x5d, 0xe2, 0x13, 0x8b, 0x45, 0xe6, 0x1e, 0x06, 0xcd, 0xd1, 0x06, 0xc5, 0x76, 0x37, 0x51, 0xec, + 0xf6, 0x46, 0x8a, 0x1d, 0x7f, 0x86, 0xe3, 0xa7, 0xda, 0x22, 0x7b, 0xd0, 0xfe, 0x8a, 0x99, 0x96, + 0x6c, 0xfe, 0x99, 0x6b, 0x28, 0xa5, 0x61, 0x82, 0x7a, 0x3b, 0x85, 0xf1, 0xb6, 0x75, 0x6a, 0x4c, + 0x7f, 0x1b, 0x30, 0x7a, 0xc8, 0x5c, 0x9f, 0x80, 0x05, 0xff, 0xc9, 0x4c, 0xc6, 0x78, 0xef, 0x35, + 0x5c, 0xc2, 0x7e, 0x11, 0xba, 0xa9, 0xdc, 0xc3, 0xeb, 0xc7, 0xf7, 0x30, 0xaa, 0x8f, 0xb8, 0x6c, + 0xb7, 0x7a, 0x11, 0x27, 0xb0, 0x2f, 0xf0, 0x9e, 0xa7, 0xb8, 0xf4, 0xd6, 0xd9, 0x5d, 0xb5, 0xdd, + 0x3d, 0x1d, 0x70, 0x57, 0xe0, 0xc6, 0x5b, 0x98, 0xfe, 0x32, 0xa0, 0x57, 0x62, 0x08, 0x81, 0x4e, + 0xae, 0x16, 0x75, 0x5f, 0x7d, 0x57, 0x7d, 0x13, 0x13, 0xb6, 0x69, 0x18, 0x50, 0x89, 0x52, 0xeb, + 0xa6, 0x34, 0xf3, 0x88, 0x26, 0xa7, 0x79, 0x95, 0x26, 0x79, 0x09, 0xbd, 0xb2, 0x1f, 0xfd, 0x9f, + 0x6b, 0x3e, 0xee, 0x15, 0xea, 0xcc, 0x81, 0x71, 0xc0, 0x0b, 0xc2, 0x91, 0xe0, 0xdf, 0xb3, 0x1a, + 0xf7, 0xb3, 0xdd, 0xd5, 0x80, 0x2f, 0xf3, 0xf4, 0x4b, 0xe3, 0x4b, 0x2b, 0x75, 0x16, 0x5d, 0x55, + 0xeb, 0xd5, 0xbf, 0x00, 0x00, 0x00, 0xff, 0xff, 0x98, 0x91, 0xb2, 0x9f, 0x08, 0x06, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/grpc/balancer/xds/internal/proto/envoy/api/v2/eds/eds.pb.go b/vendor/google.golang.org/grpc/balancer/xds/internal/proto/envoy/api/v2/eds/eds.pb.go new file mode 100755 index 0000000..aa66cc7 --- /dev/null +++ b/vendor/google.golang.org/grpc/balancer/xds/internal/proto/envoy/api/v2/eds/eds.pb.go @@ -0,0 +1,376 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: envoy/api/v2/eds.proto + +package envoy_api_v2 + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import wrappers "github.com/golang/protobuf/ptypes/wrappers" +import _ "google.golang.org/genproto/googleapis/api/annotations" +import discovery "google.golang.org/grpc/balancer/xds/internal/proto/envoy/api/v2/discovery" +import endpoint "google.golang.org/grpc/balancer/xds/internal/proto/envoy/api/v2/endpoint/endpoint" +import percent "google.golang.org/grpc/balancer/xds/internal/proto/envoy/type/percent" +import _ "google.golang.org/grpc/balancer/xds/internal/proto/validate" + +import ( + context "golang.org/x/net/context" + grpc "google.golang.org/grpc" +) + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +type ClusterLoadAssignment struct { + ClusterName string `protobuf:"bytes,1,opt,name=cluster_name,json=clusterName,proto3" json:"cluster_name,omitempty"` + Endpoints []*endpoint.LocalityLbEndpoints `protobuf:"bytes,2,rep,name=endpoints,proto3" json:"endpoints,omitempty"` + NamedEndpoints map[string]*endpoint.Endpoint `protobuf:"bytes,5,rep,name=named_endpoints,json=namedEndpoints,proto3" json:"named_endpoints,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` + Policy *ClusterLoadAssignment_Policy `protobuf:"bytes,4,opt,name=policy,proto3" json:"policy,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ClusterLoadAssignment) Reset() { *m = ClusterLoadAssignment{} } +func (m *ClusterLoadAssignment) String() string { return proto.CompactTextString(m) } +func (*ClusterLoadAssignment) ProtoMessage() {} +func (*ClusterLoadAssignment) Descriptor() ([]byte, []int) { + return fileDescriptor_eds_fb0a999149ff4153, []int{0} +} +func (m *ClusterLoadAssignment) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ClusterLoadAssignment.Unmarshal(m, b) +} +func (m *ClusterLoadAssignment) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ClusterLoadAssignment.Marshal(b, m, deterministic) +} +func (dst *ClusterLoadAssignment) XXX_Merge(src proto.Message) { + xxx_messageInfo_ClusterLoadAssignment.Merge(dst, src) +} +func (m *ClusterLoadAssignment) XXX_Size() int { + return xxx_messageInfo_ClusterLoadAssignment.Size(m) +} +func (m *ClusterLoadAssignment) XXX_DiscardUnknown() { + xxx_messageInfo_ClusterLoadAssignment.DiscardUnknown(m) +} + +var xxx_messageInfo_ClusterLoadAssignment proto.InternalMessageInfo + +func (m *ClusterLoadAssignment) GetClusterName() string { + if m != nil { + return m.ClusterName + } + return "" +} + +func (m *ClusterLoadAssignment) GetEndpoints() []*endpoint.LocalityLbEndpoints { + if m != nil { + return m.Endpoints + } + return nil +} + +func (m *ClusterLoadAssignment) GetNamedEndpoints() map[string]*endpoint.Endpoint { + if m != nil { + return m.NamedEndpoints + } + return nil +} + +func (m *ClusterLoadAssignment) GetPolicy() *ClusterLoadAssignment_Policy { + if m != nil { + return m.Policy + } + return nil +} + +type ClusterLoadAssignment_Policy struct { + DropOverloads []*ClusterLoadAssignment_Policy_DropOverload `protobuf:"bytes,2,rep,name=drop_overloads,json=dropOverloads,proto3" json:"drop_overloads,omitempty"` + OverprovisioningFactor *wrappers.UInt32Value `protobuf:"bytes,3,opt,name=overprovisioning_factor,json=overprovisioningFactor,proto3" json:"overprovisioning_factor,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ClusterLoadAssignment_Policy) Reset() { *m = ClusterLoadAssignment_Policy{} } +func (m *ClusterLoadAssignment_Policy) String() string { return proto.CompactTextString(m) } +func (*ClusterLoadAssignment_Policy) ProtoMessage() {} +func (*ClusterLoadAssignment_Policy) Descriptor() ([]byte, []int) { + return fileDescriptor_eds_fb0a999149ff4153, []int{0, 1} +} +func (m *ClusterLoadAssignment_Policy) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ClusterLoadAssignment_Policy.Unmarshal(m, b) +} +func (m *ClusterLoadAssignment_Policy) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ClusterLoadAssignment_Policy.Marshal(b, m, deterministic) +} +func (dst *ClusterLoadAssignment_Policy) XXX_Merge(src proto.Message) { + xxx_messageInfo_ClusterLoadAssignment_Policy.Merge(dst, src) +} +func (m *ClusterLoadAssignment_Policy) XXX_Size() int { + return xxx_messageInfo_ClusterLoadAssignment_Policy.Size(m) +} +func (m *ClusterLoadAssignment_Policy) XXX_DiscardUnknown() { + xxx_messageInfo_ClusterLoadAssignment_Policy.DiscardUnknown(m) +} + +var xxx_messageInfo_ClusterLoadAssignment_Policy proto.InternalMessageInfo + +func (m *ClusterLoadAssignment_Policy) GetDropOverloads() []*ClusterLoadAssignment_Policy_DropOverload { + if m != nil { + return m.DropOverloads + } + return nil +} + +func (m *ClusterLoadAssignment_Policy) GetOverprovisioningFactor() *wrappers.UInt32Value { + if m != nil { + return m.OverprovisioningFactor + } + return nil +} + +type ClusterLoadAssignment_Policy_DropOverload struct { + Category string `protobuf:"bytes,1,opt,name=category,proto3" json:"category,omitempty"` + DropPercentage *percent.FractionalPercent `protobuf:"bytes,2,opt,name=drop_percentage,json=dropPercentage,proto3" json:"drop_percentage,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ClusterLoadAssignment_Policy_DropOverload) Reset() { + *m = ClusterLoadAssignment_Policy_DropOverload{} +} +func (m *ClusterLoadAssignment_Policy_DropOverload) String() string { return proto.CompactTextString(m) } +func (*ClusterLoadAssignment_Policy_DropOverload) ProtoMessage() {} +func (*ClusterLoadAssignment_Policy_DropOverload) Descriptor() ([]byte, []int) { + return fileDescriptor_eds_fb0a999149ff4153, []int{0, 1, 0} +} +func (m *ClusterLoadAssignment_Policy_DropOverload) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ClusterLoadAssignment_Policy_DropOverload.Unmarshal(m, b) +} +func (m *ClusterLoadAssignment_Policy_DropOverload) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ClusterLoadAssignment_Policy_DropOverload.Marshal(b, m, deterministic) +} +func (dst *ClusterLoadAssignment_Policy_DropOverload) XXX_Merge(src proto.Message) { + xxx_messageInfo_ClusterLoadAssignment_Policy_DropOverload.Merge(dst, src) +} +func (m *ClusterLoadAssignment_Policy_DropOverload) XXX_Size() int { + return xxx_messageInfo_ClusterLoadAssignment_Policy_DropOverload.Size(m) +} +func (m *ClusterLoadAssignment_Policy_DropOverload) XXX_DiscardUnknown() { + xxx_messageInfo_ClusterLoadAssignment_Policy_DropOverload.DiscardUnknown(m) +} + +var xxx_messageInfo_ClusterLoadAssignment_Policy_DropOverload proto.InternalMessageInfo + +func (m *ClusterLoadAssignment_Policy_DropOverload) GetCategory() string { + if m != nil { + return m.Category + } + return "" +} + +func (m *ClusterLoadAssignment_Policy_DropOverload) GetDropPercentage() *percent.FractionalPercent { + if m != nil { + return m.DropPercentage + } + return nil +} + +func init() { + proto.RegisterType((*ClusterLoadAssignment)(nil), "envoy.api.v2.ClusterLoadAssignment") + proto.RegisterMapType((map[string]*endpoint.Endpoint)(nil), "envoy.api.v2.ClusterLoadAssignment.NamedEndpointsEntry") + proto.RegisterType((*ClusterLoadAssignment_Policy)(nil), "envoy.api.v2.ClusterLoadAssignment.Policy") + proto.RegisterType((*ClusterLoadAssignment_Policy_DropOverload)(nil), "envoy.api.v2.ClusterLoadAssignment.Policy.DropOverload") +} + +// Reference imports to suppress errors if they are not otherwise used. +var _ context.Context +var _ grpc.ClientConn + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the grpc package it is being compiled against. +const _ = grpc.SupportPackageIsVersion4 + +// EndpointDiscoveryServiceClient is the client API for EndpointDiscoveryService service. +// +// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://godoc.org/google.golang.org/grpc#ClientConn.NewStream. +type EndpointDiscoveryServiceClient interface { + StreamEndpoints(ctx context.Context, opts ...grpc.CallOption) (EndpointDiscoveryService_StreamEndpointsClient, error) + FetchEndpoints(ctx context.Context, in *discovery.DiscoveryRequest, opts ...grpc.CallOption) (*discovery.DiscoveryResponse, error) +} + +type endpointDiscoveryServiceClient struct { + cc *grpc.ClientConn +} + +func NewEndpointDiscoveryServiceClient(cc *grpc.ClientConn) EndpointDiscoveryServiceClient { + return &endpointDiscoveryServiceClient{cc} +} + +func (c *endpointDiscoveryServiceClient) StreamEndpoints(ctx context.Context, opts ...grpc.CallOption) (EndpointDiscoveryService_StreamEndpointsClient, error) { + stream, err := c.cc.NewStream(ctx, &_EndpointDiscoveryService_serviceDesc.Streams[0], "/envoy.api.v2.EndpointDiscoveryService/StreamEndpoints", opts...) + if err != nil { + return nil, err + } + x := &endpointDiscoveryServiceStreamEndpointsClient{stream} + return x, nil +} + +type EndpointDiscoveryService_StreamEndpointsClient interface { + Send(*discovery.DiscoveryRequest) error + Recv() (*discovery.DiscoveryResponse, error) + grpc.ClientStream +} + +type endpointDiscoveryServiceStreamEndpointsClient struct { + grpc.ClientStream +} + +func (x *endpointDiscoveryServiceStreamEndpointsClient) Send(m *discovery.DiscoveryRequest) error { + return x.ClientStream.SendMsg(m) +} + +func (x *endpointDiscoveryServiceStreamEndpointsClient) Recv() (*discovery.DiscoveryResponse, error) { + m := new(discovery.DiscoveryResponse) + if err := x.ClientStream.RecvMsg(m); err != nil { + return nil, err + } + return m, nil +} + +func (c *endpointDiscoveryServiceClient) FetchEndpoints(ctx context.Context, in *discovery.DiscoveryRequest, opts ...grpc.CallOption) (*discovery.DiscoveryResponse, error) { + out := new(discovery.DiscoveryResponse) + err := c.cc.Invoke(ctx, "/envoy.api.v2.EndpointDiscoveryService/FetchEndpoints", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +// EndpointDiscoveryServiceServer is the server API for EndpointDiscoveryService service. +type EndpointDiscoveryServiceServer interface { + StreamEndpoints(EndpointDiscoveryService_StreamEndpointsServer) error + FetchEndpoints(context.Context, *discovery.DiscoveryRequest) (*discovery.DiscoveryResponse, error) +} + +func RegisterEndpointDiscoveryServiceServer(s *grpc.Server, srv EndpointDiscoveryServiceServer) { + s.RegisterService(&_EndpointDiscoveryService_serviceDesc, srv) +} + +func _EndpointDiscoveryService_StreamEndpoints_Handler(srv interface{}, stream grpc.ServerStream) error { + return srv.(EndpointDiscoveryServiceServer).StreamEndpoints(&endpointDiscoveryServiceStreamEndpointsServer{stream}) +} + +type EndpointDiscoveryService_StreamEndpointsServer interface { + Send(*discovery.DiscoveryResponse) error + Recv() (*discovery.DiscoveryRequest, error) + grpc.ServerStream +} + +type endpointDiscoveryServiceStreamEndpointsServer struct { + grpc.ServerStream +} + +func (x *endpointDiscoveryServiceStreamEndpointsServer) Send(m *discovery.DiscoveryResponse) error { + return x.ServerStream.SendMsg(m) +} + +func (x *endpointDiscoveryServiceStreamEndpointsServer) Recv() (*discovery.DiscoveryRequest, error) { + m := new(discovery.DiscoveryRequest) + if err := x.ServerStream.RecvMsg(m); err != nil { + return nil, err + } + return m, nil +} + +func _EndpointDiscoveryService_FetchEndpoints_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(discovery.DiscoveryRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(EndpointDiscoveryServiceServer).FetchEndpoints(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/envoy.api.v2.EndpointDiscoveryService/FetchEndpoints", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(EndpointDiscoveryServiceServer).FetchEndpoints(ctx, req.(*discovery.DiscoveryRequest)) + } + return interceptor(ctx, in, info, handler) +} + +var _EndpointDiscoveryService_serviceDesc = grpc.ServiceDesc{ + ServiceName: "envoy.api.v2.EndpointDiscoveryService", + HandlerType: (*EndpointDiscoveryServiceServer)(nil), + Methods: []grpc.MethodDesc{ + { + MethodName: "FetchEndpoints", + Handler: _EndpointDiscoveryService_FetchEndpoints_Handler, + }, + }, + Streams: []grpc.StreamDesc{ + { + StreamName: "StreamEndpoints", + Handler: _EndpointDiscoveryService_StreamEndpoints_Handler, + ServerStreams: true, + ClientStreams: true, + }, + }, + Metadata: "envoy/api/v2/eds.proto", +} + +func init() { proto.RegisterFile("envoy/api/v2/eds.proto", fileDescriptor_eds_fb0a999149ff4153) } + +var fileDescriptor_eds_fb0a999149ff4153 = []byte{ + // 612 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xac, 0x54, 0xcd, 0x6e, 0xd3, 0x40, + 0x10, 0xee, 0x3a, 0x6d, 0xd5, 0x6e, 0x43, 0x52, 0x2d, 0xa2, 0xb1, 0xac, 0xd0, 0x46, 0x11, 0x48, + 0x51, 0x40, 0x36, 0x4a, 0x85, 0x8a, 0x72, 0x23, 0x34, 0x11, 0xa0, 0x08, 0x22, 0x57, 0x20, 0x4e, + 0x84, 0x8d, 0xbd, 0x35, 0x2b, 0x9c, 0xdd, 0x65, 0xbd, 0x31, 0xf8, 0xc0, 0x85, 0x13, 0x77, 0xde, + 0x86, 0x13, 0x6f, 0xc0, 0x81, 0x57, 0xe0, 0x82, 0xfa, 0x12, 0xc8, 0xbf, 0x89, 0x69, 0x2a, 0x71, + 0xe0, 0xb6, 0xde, 0x99, 0xef, 0x9b, 0x6f, 0xbe, 0x9d, 0x31, 0x3c, 0x20, 0x2c, 0xe4, 0x91, 0x85, + 0x05, 0xb5, 0xc2, 0x9e, 0x45, 0xdc, 0xc0, 0x14, 0x92, 0x2b, 0x8e, 0xaa, 0xc9, 0xbd, 0x89, 0x05, + 0x35, 0xc3, 0x9e, 0xd1, 0x2c, 0x65, 0xb9, 0x34, 0x70, 0x78, 0x48, 0x64, 0x94, 0xe6, 0x1a, 0xb7, + 0xca, 0x1c, 0xcc, 0x15, 0x9c, 0x32, 0x55, 0x1c, 0xb2, 0x2c, 0x3d, 0xcd, 0x52, 0x91, 0x20, 0x96, + 0x20, 0xd2, 0x21, 0x45, 0xa4, 0xe9, 0x71, 0xee, 0xf9, 0x24, 0x21, 0xc0, 0x8c, 0x71, 0x85, 0x15, + 0xe5, 0x2c, 0x53, 0x62, 0x34, 0x42, 0xec, 0x53, 0x17, 0x2b, 0x62, 0xe5, 0x87, 0x2c, 0x70, 0x98, + 0xc1, 0x92, 0xaf, 0xd9, 0xe2, 0xdc, 0xfa, 0x20, 0xb1, 0x10, 0x44, 0x66, 0xc0, 0xf6, 0xc5, 0x16, + 0xbc, 0xf1, 0xc8, 0x5f, 0x04, 0x8a, 0xc8, 0x31, 0xc7, 0xee, 0xc3, 0x20, 0xa0, 0x1e, 0x9b, 0x13, + 0xa6, 0xd0, 0x5d, 0x58, 0x75, 0xd2, 0xc0, 0x94, 0xe1, 0x39, 0xd1, 0x41, 0x0b, 0x74, 0x76, 0x07, + 0xbb, 0xdf, 0x7e, 0x7f, 0xaf, 0x6c, 0x4a, 0xad, 0x05, 0xec, 0xbd, 0x2c, 0xfc, 0x0c, 0xcf, 0x09, + 0x7a, 0x0c, 0x77, 0xf3, 0x56, 0x02, 0x5d, 0x6b, 0x55, 0x3a, 0x7b, 0xbd, 0xae, 0xb9, 0x6a, 0x8f, + 0x59, 0x74, 0x3a, 0xe6, 0x0e, 0xf6, 0xa9, 0x8a, 0xc6, 0xb3, 0x61, 0x8e, 0xb0, 0x97, 0x60, 0xf4, + 0x06, 0xd6, 0xe3, 0x7a, 0xee, 0x74, 0xc9, 0xb7, 0x95, 0xf0, 0x9d, 0x94, 0xf9, 0xd6, 0xaa, 0x36, + 0x63, 0x31, 0x6e, 0xc1, 0x3b, 0x64, 0x4a, 0x46, 0x76, 0x8d, 0x95, 0x2e, 0xd1, 0x00, 0x6e, 0x0b, + 0xee, 0x53, 0x27, 0xd2, 0x37, 0x5b, 0xe0, 0xb2, 0xd0, 0xf5, 0xc4, 0x93, 0x04, 0x61, 0x67, 0x48, + 0x63, 0x06, 0xaf, 0xaf, 0x29, 0x85, 0xf6, 0x61, 0xe5, 0x1d, 0x89, 0x52, 0xaf, 0xec, 0xf8, 0x88, + 0xee, 0xc3, 0xad, 0x10, 0xfb, 0x0b, 0xa2, 0x6b, 0x49, 0xad, 0xa3, 0x2b, 0x4c, 0xc9, 0x79, 0xec, + 0x34, 0xbb, 0xaf, 0x3d, 0x00, 0xc6, 0x0f, 0x0d, 0x6e, 0xa7, 0x65, 0xd1, 0x6b, 0x58, 0x73, 0x25, + 0x17, 0xd3, 0x78, 0xa2, 0x7c, 0x8e, 0xdd, 0xdc, 0xe3, 0x93, 0x7f, 0x97, 0x6e, 0x9e, 0x4a, 0x2e, + 0x9e, 0x67, 0x78, 0xfb, 0x9a, 0xbb, 0xf2, 0x15, 0x9b, 0xde, 0x88, 0xa9, 0x85, 0xe4, 0x21, 0x0d, + 0x28, 0x67, 0x94, 0x79, 0xd3, 0x73, 0xec, 0x28, 0x2e, 0xf5, 0x4a, 0xa2, 0xbb, 0x69, 0xa6, 0x83, + 0x64, 0xe6, 0x83, 0x64, 0xbe, 0x78, 0xc2, 0xd4, 0x71, 0xef, 0x65, 0xac, 0x36, 0x9b, 0x8a, 0xae, + 0xd6, 0xda, 0xb0, 0x0f, 0xfe, 0xe6, 0x19, 0x25, 0x34, 0xc6, 0x27, 0x58, 0x5d, 0x15, 0x80, 0x6e, + 0xc3, 0x1d, 0x07, 0x2b, 0xe2, 0x71, 0x19, 0x5d, 0x1e, 0xad, 0x22, 0x84, 0x46, 0xb0, 0x9e, 0x34, + 0x9e, 0x2d, 0x03, 0xf6, 0x72, 0x23, 0x6f, 0x66, 0x9d, 0xc7, 0xab, 0x62, 0x8e, 0x24, 0x76, 0xe2, + 0x75, 0xc0, 0xfe, 0x24, 0xcd, 0xb3, 0x13, 0xbb, 0x26, 0x05, 0xe8, 0xe9, 0xe6, 0x0e, 0xd8, 0xd7, + 0x7a, 0x17, 0x00, 0xea, 0xb9, 0xd3, 0xa7, 0xf9, 0x82, 0x9e, 0x11, 0x19, 0x52, 0x87, 0xa0, 0x57, + 0xb0, 0x7e, 0xa6, 0x24, 0xc1, 0xf3, 0xe5, 0xa4, 0x1c, 0x96, 0xed, 0x2d, 0x20, 0x36, 0x79, 0xbf, + 0x20, 0x81, 0x32, 0x8e, 0xae, 0x8c, 0x07, 0x82, 0xb3, 0x80, 0xb4, 0x37, 0x3a, 0xe0, 0x1e, 0x40, + 0x0b, 0x58, 0x1b, 0x11, 0xe5, 0xbc, 0xfd, 0x8f, 0xc4, 0xed, 0xcf, 0x3f, 0x7f, 0x7d, 0xd5, 0x9a, + 0xed, 0x46, 0xe9, 0x5f, 0xd3, 0x2f, 0x76, 0xa6, 0x0f, 0xba, 0x83, 0x3b, 0xd0, 0xa0, 0x3c, 0x25, + 0x12, 0x92, 0x7f, 0x8c, 0x4a, 0x9c, 0x83, 0x9d, 0xa1, 0x1b, 0x4c, 0xe2, 0xc7, 0x9c, 0x80, 0x2f, + 0x00, 0xcc, 0xb6, 0x93, 0x87, 0x3d, 0xfe, 0x13, 0x00, 0x00, 0xff, 0xff, 0xe7, 0x56, 0x21, 0x69, + 0xec, 0x04, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/grpc/balancer/xds/internal/proto/envoy/api/v2/endpoint/endpoint/endpoint.pb.go b/vendor/google.golang.org/grpc/balancer/xds/internal/proto/envoy/api/v2/endpoint/endpoint/endpoint.pb.go new file mode 100755 index 0000000..2ae03d6 --- /dev/null +++ b/vendor/google.golang.org/grpc/balancer/xds/internal/proto/envoy/api/v2/endpoint/endpoint/endpoint.pb.go @@ -0,0 +1,385 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: envoy/api/v2/endpoint/endpoint.proto + +package endpoint + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import wrappers "github.com/golang/protobuf/ptypes/wrappers" +import address "google.golang.org/grpc/balancer/xds/internal/proto/envoy/api/v2/core/address" +import base "google.golang.org/grpc/balancer/xds/internal/proto/envoy/api/v2/core/base" +import health_check "google.golang.org/grpc/balancer/xds/internal/proto/envoy/api/v2/core/health_check" +import _ "google.golang.org/grpc/balancer/xds/internal/proto/validate" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +type Endpoint struct { + Address *address.Address `protobuf:"bytes,1,opt,name=address,proto3" json:"address,omitempty"` + HealthCheckConfig *Endpoint_HealthCheckConfig `protobuf:"bytes,2,opt,name=health_check_config,json=healthCheckConfig,proto3" json:"health_check_config,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Endpoint) Reset() { *m = Endpoint{} } +func (m *Endpoint) String() string { return proto.CompactTextString(m) } +func (*Endpoint) ProtoMessage() {} +func (*Endpoint) Descriptor() ([]byte, []int) { + return fileDescriptor_endpoint_2d1a533d75f3064c, []int{0} +} +func (m *Endpoint) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Endpoint.Unmarshal(m, b) +} +func (m *Endpoint) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Endpoint.Marshal(b, m, deterministic) +} +func (dst *Endpoint) XXX_Merge(src proto.Message) { + xxx_messageInfo_Endpoint.Merge(dst, src) +} +func (m *Endpoint) XXX_Size() int { + return xxx_messageInfo_Endpoint.Size(m) +} +func (m *Endpoint) XXX_DiscardUnknown() { + xxx_messageInfo_Endpoint.DiscardUnknown(m) +} + +var xxx_messageInfo_Endpoint proto.InternalMessageInfo + +func (m *Endpoint) GetAddress() *address.Address { + if m != nil { + return m.Address + } + return nil +} + +func (m *Endpoint) GetHealthCheckConfig() *Endpoint_HealthCheckConfig { + if m != nil { + return m.HealthCheckConfig + } + return nil +} + +type Endpoint_HealthCheckConfig struct { + PortValue uint32 `protobuf:"varint,1,opt,name=port_value,json=portValue,proto3" json:"port_value,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Endpoint_HealthCheckConfig) Reset() { *m = Endpoint_HealthCheckConfig{} } +func (m *Endpoint_HealthCheckConfig) String() string { return proto.CompactTextString(m) } +func (*Endpoint_HealthCheckConfig) ProtoMessage() {} +func (*Endpoint_HealthCheckConfig) Descriptor() ([]byte, []int) { + return fileDescriptor_endpoint_2d1a533d75f3064c, []int{0, 0} +} +func (m *Endpoint_HealthCheckConfig) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Endpoint_HealthCheckConfig.Unmarshal(m, b) +} +func (m *Endpoint_HealthCheckConfig) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Endpoint_HealthCheckConfig.Marshal(b, m, deterministic) +} +func (dst *Endpoint_HealthCheckConfig) XXX_Merge(src proto.Message) { + xxx_messageInfo_Endpoint_HealthCheckConfig.Merge(dst, src) +} +func (m *Endpoint_HealthCheckConfig) XXX_Size() int { + return xxx_messageInfo_Endpoint_HealthCheckConfig.Size(m) +} +func (m *Endpoint_HealthCheckConfig) XXX_DiscardUnknown() { + xxx_messageInfo_Endpoint_HealthCheckConfig.DiscardUnknown(m) +} + +var xxx_messageInfo_Endpoint_HealthCheckConfig proto.InternalMessageInfo + +func (m *Endpoint_HealthCheckConfig) GetPortValue() uint32 { + if m != nil { + return m.PortValue + } + return 0 +} + +type LbEndpoint struct { + // Types that are valid to be assigned to HostIdentifier: + // *LbEndpoint_Endpoint + // *LbEndpoint_EndpointName + HostIdentifier isLbEndpoint_HostIdentifier `protobuf_oneof:"host_identifier"` + HealthStatus health_check.HealthStatus `protobuf:"varint,2,opt,name=health_status,json=healthStatus,proto3,enum=envoy.api.v2.core.HealthStatus" json:"health_status,omitempty"` + Metadata *base.Metadata `protobuf:"bytes,3,opt,name=metadata,proto3" json:"metadata,omitempty"` + LoadBalancingWeight *wrappers.UInt32Value `protobuf:"bytes,4,opt,name=load_balancing_weight,json=loadBalancingWeight,proto3" json:"load_balancing_weight,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *LbEndpoint) Reset() { *m = LbEndpoint{} } +func (m *LbEndpoint) String() string { return proto.CompactTextString(m) } +func (*LbEndpoint) ProtoMessage() {} +func (*LbEndpoint) Descriptor() ([]byte, []int) { + return fileDescriptor_endpoint_2d1a533d75f3064c, []int{1} +} +func (m *LbEndpoint) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_LbEndpoint.Unmarshal(m, b) +} +func (m *LbEndpoint) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_LbEndpoint.Marshal(b, m, deterministic) +} +func (dst *LbEndpoint) XXX_Merge(src proto.Message) { + xxx_messageInfo_LbEndpoint.Merge(dst, src) +} +func (m *LbEndpoint) XXX_Size() int { + return xxx_messageInfo_LbEndpoint.Size(m) +} +func (m *LbEndpoint) XXX_DiscardUnknown() { + xxx_messageInfo_LbEndpoint.DiscardUnknown(m) +} + +var xxx_messageInfo_LbEndpoint proto.InternalMessageInfo + +type isLbEndpoint_HostIdentifier interface { + isLbEndpoint_HostIdentifier() +} + +type LbEndpoint_Endpoint struct { + Endpoint *Endpoint `protobuf:"bytes,1,opt,name=endpoint,proto3,oneof"` +} + +type LbEndpoint_EndpointName struct { + EndpointName string `protobuf:"bytes,5,opt,name=endpoint_name,json=endpointName,proto3,oneof"` +} + +func (*LbEndpoint_Endpoint) isLbEndpoint_HostIdentifier() {} + +func (*LbEndpoint_EndpointName) isLbEndpoint_HostIdentifier() {} + +func (m *LbEndpoint) GetHostIdentifier() isLbEndpoint_HostIdentifier { + if m != nil { + return m.HostIdentifier + } + return nil +} + +func (m *LbEndpoint) GetEndpoint() *Endpoint { + if x, ok := m.GetHostIdentifier().(*LbEndpoint_Endpoint); ok { + return x.Endpoint + } + return nil +} + +func (m *LbEndpoint) GetEndpointName() string { + if x, ok := m.GetHostIdentifier().(*LbEndpoint_EndpointName); ok { + return x.EndpointName + } + return "" +} + +func (m *LbEndpoint) GetHealthStatus() health_check.HealthStatus { + if m != nil { + return m.HealthStatus + } + return health_check.HealthStatus_UNKNOWN +} + +func (m *LbEndpoint) GetMetadata() *base.Metadata { + if m != nil { + return m.Metadata + } + return nil +} + +func (m *LbEndpoint) GetLoadBalancingWeight() *wrappers.UInt32Value { + if m != nil { + return m.LoadBalancingWeight + } + return nil +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*LbEndpoint) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _LbEndpoint_OneofMarshaler, _LbEndpoint_OneofUnmarshaler, _LbEndpoint_OneofSizer, []interface{}{ + (*LbEndpoint_Endpoint)(nil), + (*LbEndpoint_EndpointName)(nil), + } +} + +func _LbEndpoint_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*LbEndpoint) + // host_identifier + switch x := m.HostIdentifier.(type) { + case *LbEndpoint_Endpoint: + b.EncodeVarint(1<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.Endpoint); err != nil { + return err + } + case *LbEndpoint_EndpointName: + b.EncodeVarint(5<<3 | proto.WireBytes) + b.EncodeStringBytes(x.EndpointName) + case nil: + default: + return fmt.Errorf("LbEndpoint.HostIdentifier has unexpected type %T", x) + } + return nil +} + +func _LbEndpoint_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*LbEndpoint) + switch tag { + case 1: // host_identifier.endpoint + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(Endpoint) + err := b.DecodeMessage(msg) + m.HostIdentifier = &LbEndpoint_Endpoint{msg} + return true, err + case 5: // host_identifier.endpoint_name + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeStringBytes() + m.HostIdentifier = &LbEndpoint_EndpointName{x} + return true, err + default: + return false, nil + } +} + +func _LbEndpoint_OneofSizer(msg proto.Message) (n int) { + m := msg.(*LbEndpoint) + // host_identifier + switch x := m.HostIdentifier.(type) { + case *LbEndpoint_Endpoint: + s := proto.Size(x.Endpoint) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *LbEndpoint_EndpointName: + n += 1 // tag and wire + n += proto.SizeVarint(uint64(len(x.EndpointName))) + n += len(x.EndpointName) + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +type LocalityLbEndpoints struct { + Locality *base.Locality `protobuf:"bytes,1,opt,name=locality,proto3" json:"locality,omitempty"` + LbEndpoints []*LbEndpoint `protobuf:"bytes,2,rep,name=lb_endpoints,json=lbEndpoints,proto3" json:"lb_endpoints,omitempty"` + LoadBalancingWeight *wrappers.UInt32Value `protobuf:"bytes,3,opt,name=load_balancing_weight,json=loadBalancingWeight,proto3" json:"load_balancing_weight,omitempty"` + Priority uint32 `protobuf:"varint,5,opt,name=priority,proto3" json:"priority,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *LocalityLbEndpoints) Reset() { *m = LocalityLbEndpoints{} } +func (m *LocalityLbEndpoints) String() string { return proto.CompactTextString(m) } +func (*LocalityLbEndpoints) ProtoMessage() {} +func (*LocalityLbEndpoints) Descriptor() ([]byte, []int) { + return fileDescriptor_endpoint_2d1a533d75f3064c, []int{2} +} +func (m *LocalityLbEndpoints) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_LocalityLbEndpoints.Unmarshal(m, b) +} +func (m *LocalityLbEndpoints) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_LocalityLbEndpoints.Marshal(b, m, deterministic) +} +func (dst *LocalityLbEndpoints) XXX_Merge(src proto.Message) { + xxx_messageInfo_LocalityLbEndpoints.Merge(dst, src) +} +func (m *LocalityLbEndpoints) XXX_Size() int { + return xxx_messageInfo_LocalityLbEndpoints.Size(m) +} +func (m *LocalityLbEndpoints) XXX_DiscardUnknown() { + xxx_messageInfo_LocalityLbEndpoints.DiscardUnknown(m) +} + +var xxx_messageInfo_LocalityLbEndpoints proto.InternalMessageInfo + +func (m *LocalityLbEndpoints) GetLocality() *base.Locality { + if m != nil { + return m.Locality + } + return nil +} + +func (m *LocalityLbEndpoints) GetLbEndpoints() []*LbEndpoint { + if m != nil { + return m.LbEndpoints + } + return nil +} + +func (m *LocalityLbEndpoints) GetLoadBalancingWeight() *wrappers.UInt32Value { + if m != nil { + return m.LoadBalancingWeight + } + return nil +} + +func (m *LocalityLbEndpoints) GetPriority() uint32 { + if m != nil { + return m.Priority + } + return 0 +} + +func init() { + proto.RegisterType((*Endpoint)(nil), "envoy.api.v2.endpoint.Endpoint") + proto.RegisterType((*Endpoint_HealthCheckConfig)(nil), "envoy.api.v2.endpoint.Endpoint.HealthCheckConfig") + proto.RegisterType((*LbEndpoint)(nil), "envoy.api.v2.endpoint.LbEndpoint") + proto.RegisterType((*LocalityLbEndpoints)(nil), "envoy.api.v2.endpoint.LocalityLbEndpoints") +} + +func init() { + proto.RegisterFile("envoy/api/v2/endpoint/endpoint.proto", fileDescriptor_endpoint_2d1a533d75f3064c) +} + +var fileDescriptor_endpoint_2d1a533d75f3064c = []byte{ + // 556 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xac, 0x52, 0xc1, 0x6e, 0xd3, 0x4c, + 0x18, 0xac, 0xe3, 0xa6, 0x4d, 0x37, 0xc9, 0xff, 0x2b, 0x8e, 0x2a, 0xac, 0x50, 0xd1, 0x12, 0x0a, + 0x8a, 0x72, 0x58, 0x8b, 0x14, 0x89, 0x13, 0x02, 0xdc, 0x22, 0x05, 0xa9, 0xa0, 0x6a, 0x11, 0x20, + 0x71, 0xc0, 0xfa, 0x6c, 0x6f, 0xe2, 0x15, 0x8e, 0xd7, 0xb2, 0x37, 0x2e, 0xb9, 0xf5, 0x41, 0x78, + 0x12, 0x4e, 0x3c, 0x4c, 0x2f, 0x3c, 0x45, 0x91, 0xd7, 0x5e, 0x37, 0x6d, 0x52, 0x71, 0xe1, 0xb6, + 0xde, 0x99, 0xf9, 0xbe, 0x99, 0xf1, 0xa2, 0x43, 0x1a, 0x65, 0x7c, 0x61, 0x41, 0xcc, 0xac, 0x6c, + 0x64, 0xd1, 0xc8, 0x8f, 0x39, 0x8b, 0x44, 0x75, 0xc0, 0x71, 0xc2, 0x05, 0x37, 0x76, 0x25, 0x0b, + 0x43, 0xcc, 0x70, 0x36, 0xc2, 0x0a, 0xec, 0xed, 0xdf, 0x10, 0x7b, 0x3c, 0xa1, 0x16, 0xf8, 0x7e, + 0x42, 0xd3, 0xb4, 0xd0, 0xf5, 0xf6, 0x56, 0x09, 0x2e, 0xa4, 0xb4, 0x44, 0x0f, 0x57, 0xd1, 0x80, + 0x42, 0x28, 0x02, 0xc7, 0x0b, 0xa8, 0xf7, 0xad, 0x64, 0x3d, 0x98, 0x72, 0x3e, 0x0d, 0xa9, 0x25, + 0xbf, 0xdc, 0xf9, 0xc4, 0x3a, 0x4f, 0x20, 0x8e, 0x69, 0xa2, 0x76, 0xdc, 0xcb, 0x20, 0x64, 0x3e, + 0x08, 0x6a, 0xa9, 0x43, 0x01, 0xf4, 0x2f, 0x35, 0xd4, 0x78, 0x53, 0x5a, 0x35, 0x9e, 0xa1, 0xed, + 0xd2, 0x9a, 0xa9, 0x1d, 0x68, 0x83, 0xe6, 0xa8, 0x87, 0x6f, 0x64, 0xca, 0xb7, 0xe3, 0xd7, 0x05, + 0x83, 0x28, 0xaa, 0x01, 0xa8, 0xbb, 0xec, 0xc8, 0xf1, 0x78, 0x34, 0x61, 0x53, 0xb3, 0x26, 0x27, + 0x3c, 0xc5, 0x6b, 0x5b, 0xc1, 0x6a, 0x27, 0x1e, 0x4b, 0xe9, 0x71, 0xae, 0x3c, 0x96, 0x42, 0xd2, + 0x09, 0x6e, 0x5f, 0xf5, 0x5e, 0xa2, 0xce, 0x0a, 0xcf, 0x18, 0x22, 0x14, 0xf3, 0x44, 0x38, 0x19, + 0x84, 0x73, 0x2a, 0x0d, 0xb7, 0xed, 0xe6, 0xcf, 0xdf, 0xbf, 0xf4, 0xad, 0xe1, 0xa6, 0x79, 0x75, + 0xa5, 0x93, 0x9d, 0x1c, 0xfe, 0x94, 0xa3, 0xfd, 0xcb, 0x1a, 0x42, 0xa7, 0x6e, 0x15, 0xf4, 0x05, + 0x6a, 0x28, 0x27, 0x65, 0xd2, 0xfd, 0xbf, 0xf8, 0x1c, 0x6f, 0x90, 0x4a, 0x62, 0x3c, 0x46, 0x6d, + 0x75, 0x76, 0x22, 0x98, 0x51, 0xb3, 0x7e, 0xa0, 0x0d, 0x76, 0xc6, 0x1b, 0xa4, 0xa5, 0xae, 0xdf, + 0xc3, 0x8c, 0x1a, 0x27, 0xa8, 0x5d, 0x16, 0x93, 0x0a, 0x10, 0xf3, 0x54, 0x56, 0xf2, 0xdf, 0xed, + 0x55, 0xb2, 0xd4, 0x22, 0xdd, 0x07, 0x49, 0x23, 0xad, 0x60, 0xe9, 0xcb, 0x78, 0x8e, 0x1a, 0x33, + 0x2a, 0xc0, 0x07, 0x01, 0xa6, 0x2e, 0xbd, 0xde, 0x5f, 0x33, 0xe0, 0x5d, 0x49, 0x21, 0x15, 0xd9, + 0xf8, 0x8a, 0x76, 0x43, 0x0e, 0xbe, 0xe3, 0x42, 0x08, 0x91, 0xc7, 0xa2, 0xa9, 0x73, 0x4e, 0xd9, + 0x34, 0x10, 0xe6, 0xa6, 0x9c, 0xb2, 0x87, 0x8b, 0x37, 0x83, 0xd5, 0x9b, 0xc1, 0x1f, 0xdf, 0x46, + 0xe2, 0x68, 0x24, 0x0b, 0xb3, 0x5b, 0x79, 0x91, 0xdb, 0xc3, 0xba, 0x79, 0xa1, 0x0d, 0x34, 0xd2, + 0xcd, 0x07, 0xd9, 0x6a, 0xce, 0x67, 0x39, 0xc6, 0xee, 0xa0, 0xff, 0x03, 0x9e, 0x0a, 0x87, 0xf9, + 0x34, 0x12, 0x6c, 0xc2, 0x68, 0xd2, 0xff, 0x51, 0x43, 0xdd, 0x53, 0xee, 0x41, 0xc8, 0xc4, 0xe2, + 0xba, 0x6e, 0x99, 0x21, 0x2c, 0xaf, 0xcb, 0xbe, 0xd7, 0x65, 0x50, 0x4a, 0x52, 0x91, 0x8d, 0x13, + 0xd4, 0x0a, 0x5d, 0x47, 0xb5, 0x9a, 0x37, 0xa8, 0x0f, 0x9a, 0xa3, 0x87, 0x77, 0xfc, 0xac, 0xeb, + 0x95, 0xa4, 0x19, 0x2e, 0xad, 0xbf, 0xb3, 0x09, 0xfd, 0x9f, 0x34, 0x61, 0x3c, 0x41, 0x8d, 0x38, + 0x61, 0x3c, 0xc9, 0xe3, 0xd5, 0xe5, 0x3b, 0x44, 0xb9, 0xa8, 0x3e, 0xd4, 0xcd, 0x0b, 0x8d, 0x54, + 0x98, 0xfd, 0x0a, 0x3d, 0x62, 0xbc, 0xf0, 0x1e, 0x27, 0xfc, 0xfb, 0x62, 0x7d, 0x0c, 0xbb, 0xad, + 0x9c, 0x9f, 0xe5, 0x7e, 0xce, 0xb4, 0x2f, 0xd5, 0xcb, 0x73, 0xb7, 0xa4, 0xc5, 0xa3, 0x3f, 0x01, + 0x00, 0x00, 0xff, 0xff, 0x6b, 0xdf, 0xf4, 0xe1, 0x92, 0x04, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/grpc/balancer/xds/internal/proto/envoy/service/discovery/v2/ads/ads.pb.go b/vendor/google.golang.org/grpc/balancer/xds/internal/proto/envoy/service/discovery/v2/ads/ads.pb.go new file mode 100755 index 0000000..2efdb00 --- /dev/null +++ b/vendor/google.golang.org/grpc/balancer/xds/internal/proto/envoy/service/discovery/v2/ads/ads.pb.go @@ -0,0 +1,251 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: envoy/service/discovery/v2/ads.proto + +package v2 + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import discovery "google.golang.org/grpc/balancer/xds/internal/proto/envoy/api/v2/discovery" + +import ( + context "golang.org/x/net/context" + grpc "google.golang.org/grpc" +) + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +type AdsDummy struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *AdsDummy) Reset() { *m = AdsDummy{} } +func (m *AdsDummy) String() string { return proto.CompactTextString(m) } +func (*AdsDummy) ProtoMessage() {} +func (*AdsDummy) Descriptor() ([]byte, []int) { + return fileDescriptor_ads_e4cd1a296681dd94, []int{0} +} +func (m *AdsDummy) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_AdsDummy.Unmarshal(m, b) +} +func (m *AdsDummy) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_AdsDummy.Marshal(b, m, deterministic) +} +func (dst *AdsDummy) XXX_Merge(src proto.Message) { + xxx_messageInfo_AdsDummy.Merge(dst, src) +} +func (m *AdsDummy) XXX_Size() int { + return xxx_messageInfo_AdsDummy.Size(m) +} +func (m *AdsDummy) XXX_DiscardUnknown() { + xxx_messageInfo_AdsDummy.DiscardUnknown(m) +} + +var xxx_messageInfo_AdsDummy proto.InternalMessageInfo + +func init() { + proto.RegisterType((*AdsDummy)(nil), "envoy.service.discovery.v2.AdsDummy") +} + +// Reference imports to suppress errors if they are not otherwise used. +var _ context.Context +var _ grpc.ClientConn + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the grpc package it is being compiled against. +const _ = grpc.SupportPackageIsVersion4 + +// AggregatedDiscoveryServiceClient is the client API for AggregatedDiscoveryService service. +// +// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://godoc.org/google.golang.org/grpc#ClientConn.NewStream. +type AggregatedDiscoveryServiceClient interface { + StreamAggregatedResources(ctx context.Context, opts ...grpc.CallOption) (AggregatedDiscoveryService_StreamAggregatedResourcesClient, error) + DeltaAggregatedResources(ctx context.Context, opts ...grpc.CallOption) (AggregatedDiscoveryService_DeltaAggregatedResourcesClient, error) +} + +type aggregatedDiscoveryServiceClient struct { + cc *grpc.ClientConn +} + +func NewAggregatedDiscoveryServiceClient(cc *grpc.ClientConn) AggregatedDiscoveryServiceClient { + return &aggregatedDiscoveryServiceClient{cc} +} + +func (c *aggregatedDiscoveryServiceClient) StreamAggregatedResources(ctx context.Context, opts ...grpc.CallOption) (AggregatedDiscoveryService_StreamAggregatedResourcesClient, error) { + stream, err := c.cc.NewStream(ctx, &_AggregatedDiscoveryService_serviceDesc.Streams[0], "/envoy.service.discovery.v2.AggregatedDiscoveryService/StreamAggregatedResources", opts...) + if err != nil { + return nil, err + } + x := &aggregatedDiscoveryServiceStreamAggregatedResourcesClient{stream} + return x, nil +} + +type AggregatedDiscoveryService_StreamAggregatedResourcesClient interface { + Send(*discovery.DiscoveryRequest) error + Recv() (*discovery.DiscoveryResponse, error) + grpc.ClientStream +} + +type aggregatedDiscoveryServiceStreamAggregatedResourcesClient struct { + grpc.ClientStream +} + +func (x *aggregatedDiscoveryServiceStreamAggregatedResourcesClient) Send(m *discovery.DiscoveryRequest) error { + return x.ClientStream.SendMsg(m) +} + +func (x *aggregatedDiscoveryServiceStreamAggregatedResourcesClient) Recv() (*discovery.DiscoveryResponse, error) { + m := new(discovery.DiscoveryResponse) + if err := x.ClientStream.RecvMsg(m); err != nil { + return nil, err + } + return m, nil +} + +func (c *aggregatedDiscoveryServiceClient) DeltaAggregatedResources(ctx context.Context, opts ...grpc.CallOption) (AggregatedDiscoveryService_DeltaAggregatedResourcesClient, error) { + stream, err := c.cc.NewStream(ctx, &_AggregatedDiscoveryService_serviceDesc.Streams[1], "/envoy.service.discovery.v2.AggregatedDiscoveryService/DeltaAggregatedResources", opts...) + if err != nil { + return nil, err + } + x := &aggregatedDiscoveryServiceDeltaAggregatedResourcesClient{stream} + return x, nil +} + +type AggregatedDiscoveryService_DeltaAggregatedResourcesClient interface { + Send(*discovery.DeltaDiscoveryRequest) error + Recv() (*discovery.DeltaDiscoveryResponse, error) + grpc.ClientStream +} + +type aggregatedDiscoveryServiceDeltaAggregatedResourcesClient struct { + grpc.ClientStream +} + +func (x *aggregatedDiscoveryServiceDeltaAggregatedResourcesClient) Send(m *discovery.DeltaDiscoveryRequest) error { + return x.ClientStream.SendMsg(m) +} + +func (x *aggregatedDiscoveryServiceDeltaAggregatedResourcesClient) Recv() (*discovery.DeltaDiscoveryResponse, error) { + m := new(discovery.DeltaDiscoveryResponse) + if err := x.ClientStream.RecvMsg(m); err != nil { + return nil, err + } + return m, nil +} + +// AggregatedDiscoveryServiceServer is the server API for AggregatedDiscoveryService service. +type AggregatedDiscoveryServiceServer interface { + StreamAggregatedResources(AggregatedDiscoveryService_StreamAggregatedResourcesServer) error + DeltaAggregatedResources(AggregatedDiscoveryService_DeltaAggregatedResourcesServer) error +} + +func RegisterAggregatedDiscoveryServiceServer(s *grpc.Server, srv AggregatedDiscoveryServiceServer) { + s.RegisterService(&_AggregatedDiscoveryService_serviceDesc, srv) +} + +func _AggregatedDiscoveryService_StreamAggregatedResources_Handler(srv interface{}, stream grpc.ServerStream) error { + return srv.(AggregatedDiscoveryServiceServer).StreamAggregatedResources(&aggregatedDiscoveryServiceStreamAggregatedResourcesServer{stream}) +} + +type AggregatedDiscoveryService_StreamAggregatedResourcesServer interface { + Send(*discovery.DiscoveryResponse) error + Recv() (*discovery.DiscoveryRequest, error) + grpc.ServerStream +} + +type aggregatedDiscoveryServiceStreamAggregatedResourcesServer struct { + grpc.ServerStream +} + +func (x *aggregatedDiscoveryServiceStreamAggregatedResourcesServer) Send(m *discovery.DiscoveryResponse) error { + return x.ServerStream.SendMsg(m) +} + +func (x *aggregatedDiscoveryServiceStreamAggregatedResourcesServer) Recv() (*discovery.DiscoveryRequest, error) { + m := new(discovery.DiscoveryRequest) + if err := x.ServerStream.RecvMsg(m); err != nil { + return nil, err + } + return m, nil +} + +func _AggregatedDiscoveryService_DeltaAggregatedResources_Handler(srv interface{}, stream grpc.ServerStream) error { + return srv.(AggregatedDiscoveryServiceServer).DeltaAggregatedResources(&aggregatedDiscoveryServiceDeltaAggregatedResourcesServer{stream}) +} + +type AggregatedDiscoveryService_DeltaAggregatedResourcesServer interface { + Send(*discovery.DeltaDiscoveryResponse) error + Recv() (*discovery.DeltaDiscoveryRequest, error) + grpc.ServerStream +} + +type aggregatedDiscoveryServiceDeltaAggregatedResourcesServer struct { + grpc.ServerStream +} + +func (x *aggregatedDiscoveryServiceDeltaAggregatedResourcesServer) Send(m *discovery.DeltaDiscoveryResponse) error { + return x.ServerStream.SendMsg(m) +} + +func (x *aggregatedDiscoveryServiceDeltaAggregatedResourcesServer) Recv() (*discovery.DeltaDiscoveryRequest, error) { + m := new(discovery.DeltaDiscoveryRequest) + if err := x.ServerStream.RecvMsg(m); err != nil { + return nil, err + } + return m, nil +} + +var _AggregatedDiscoveryService_serviceDesc = grpc.ServiceDesc{ + ServiceName: "envoy.service.discovery.v2.AggregatedDiscoveryService", + HandlerType: (*AggregatedDiscoveryServiceServer)(nil), + Methods: []grpc.MethodDesc{}, + Streams: []grpc.StreamDesc{ + { + StreamName: "StreamAggregatedResources", + Handler: _AggregatedDiscoveryService_StreamAggregatedResources_Handler, + ServerStreams: true, + ClientStreams: true, + }, + { + StreamName: "DeltaAggregatedResources", + Handler: _AggregatedDiscoveryService_DeltaAggregatedResources_Handler, + ServerStreams: true, + ClientStreams: true, + }, + }, + Metadata: "envoy/service/discovery/v2/ads.proto", +} + +func init() { + proto.RegisterFile("envoy/service/discovery/v2/ads.proto", fileDescriptor_ads_e4cd1a296681dd94) +} + +var fileDescriptor_ads_e4cd1a296681dd94 = []byte{ + // 236 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x7c, 0x90, 0xb1, 0x4e, 0xc3, 0x30, + 0x10, 0x86, 0x31, 0x03, 0x42, 0x1e, 0x33, 0x41, 0x84, 0x40, 0x2a, 0x1d, 0x3a, 0x5d, 0x90, 0x99, + 0x19, 0x5a, 0xe5, 0x01, 0xaa, 0x76, 0x63, 0x73, 0x93, 0x53, 0x64, 0x41, 0x7a, 0xc6, 0xe7, 0x58, + 0xf8, 0x0d, 0x78, 0x59, 0xde, 0x01, 0x39, 0x86, 0x16, 0x01, 0x61, 0xbe, 0xef, 0xff, 0xff, 0xd3, + 0x27, 0xe7, 0xb8, 0x0f, 0x14, 0x2b, 0x46, 0x17, 0x4c, 0x83, 0x55, 0x6b, 0xb8, 0xa1, 0x80, 0x2e, + 0x56, 0x41, 0x55, 0xba, 0x65, 0xb0, 0x8e, 0x3c, 0x15, 0xe5, 0x48, 0xc1, 0x27, 0x05, 0x07, 0x0a, + 0x82, 0x2a, 0xaf, 0x72, 0x83, 0xb6, 0x26, 0x65, 0x8e, 0xa7, 0x31, 0x39, 0x93, 0xf2, 0x7c, 0xd9, + 0x72, 0x3d, 0xf4, 0x7d, 0x54, 0xef, 0x42, 0x96, 0xcb, 0xae, 0x73, 0xd8, 0x69, 0x8f, 0x6d, 0xfd, + 0x45, 0x6e, 0x73, 0x6b, 0xb1, 0x93, 0x97, 0x5b, 0xef, 0x50, 0xf7, 0x47, 0x66, 0x83, 0x4c, 0x83, + 0x6b, 0x90, 0x8b, 0x6b, 0xc8, 0x2f, 0x68, 0x6b, 0x20, 0x28, 0x38, 0x84, 0x37, 0xf8, 0x32, 0x20, + 0xfb, 0xf2, 0x66, 0xf2, 0xce, 0x96, 0xf6, 0x8c, 0xb3, 0x93, 0x85, 0xb8, 0x13, 0xc5, 0x93, 0xbc, + 0xa8, 0xf1, 0xd9, 0xeb, 0xbf, 0x26, 0x6e, 0x7f, 0x54, 0x24, 0xee, 0xd7, 0xce, 0xfc, 0x7f, 0xe8, + 0xfb, 0xd8, 0xea, 0x41, 0x2e, 0x0c, 0x65, 0xde, 0x3a, 0x7a, 0x8d, 0x30, 0x6d, 0x71, 0x95, 0x2c, + 0xad, 0x93, 0xb1, 0xb5, 0x78, 0x3c, 0x0d, 0xea, 0x4d, 0x88, 0xdd, 0xd9, 0x68, 0xf0, 0xfe, 0x23, + 0x00, 0x00, 0xff, 0xff, 0x7e, 0x66, 0x01, 0x47, 0xa3, 0x01, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/grpc/balancer/xds/internal/proto/envoy/type/percent/percent.pb.go b/vendor/google.golang.org/grpc/balancer/xds/internal/proto/envoy/type/percent/percent.pb.go new file mode 100755 index 0000000..423dbdc --- /dev/null +++ b/vendor/google.golang.org/grpc/balancer/xds/internal/proto/envoy/type/percent/percent.pb.go @@ -0,0 +1,160 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: envoy/type/percent.proto + +package envoy_type + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _ "google.golang.org/grpc/balancer/xds/internal/proto/validate" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +type FractionalPercent_DenominatorType int32 + +const ( + FractionalPercent_HUNDRED FractionalPercent_DenominatorType = 0 + FractionalPercent_TEN_THOUSAND FractionalPercent_DenominatorType = 1 + FractionalPercent_MILLION FractionalPercent_DenominatorType = 2 +) + +var FractionalPercent_DenominatorType_name = map[int32]string{ + 0: "HUNDRED", + 1: "TEN_THOUSAND", + 2: "MILLION", +} +var FractionalPercent_DenominatorType_value = map[string]int32{ + "HUNDRED": 0, + "TEN_THOUSAND": 1, + "MILLION": 2, +} + +func (x FractionalPercent_DenominatorType) String() string { + return proto.EnumName(FractionalPercent_DenominatorType_name, int32(x)) +} +func (FractionalPercent_DenominatorType) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_percent_cd85ccaca181f641, []int{1, 0} +} + +type Percent struct { + Value float64 `protobuf:"fixed64,1,opt,name=value,proto3" json:"value,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Percent) Reset() { *m = Percent{} } +func (m *Percent) String() string { return proto.CompactTextString(m) } +func (*Percent) ProtoMessage() {} +func (*Percent) Descriptor() ([]byte, []int) { + return fileDescriptor_percent_cd85ccaca181f641, []int{0} +} +func (m *Percent) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Percent.Unmarshal(m, b) +} +func (m *Percent) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Percent.Marshal(b, m, deterministic) +} +func (dst *Percent) XXX_Merge(src proto.Message) { + xxx_messageInfo_Percent.Merge(dst, src) +} +func (m *Percent) XXX_Size() int { + return xxx_messageInfo_Percent.Size(m) +} +func (m *Percent) XXX_DiscardUnknown() { + xxx_messageInfo_Percent.DiscardUnknown(m) +} + +var xxx_messageInfo_Percent proto.InternalMessageInfo + +func (m *Percent) GetValue() float64 { + if m != nil { + return m.Value + } + return 0 +} + +type FractionalPercent struct { + Numerator uint32 `protobuf:"varint,1,opt,name=numerator,proto3" json:"numerator,omitempty"` + Denominator FractionalPercent_DenominatorType `protobuf:"varint,2,opt,name=denominator,proto3,enum=envoy.type.FractionalPercent_DenominatorType" json:"denominator,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *FractionalPercent) Reset() { *m = FractionalPercent{} } +func (m *FractionalPercent) String() string { return proto.CompactTextString(m) } +func (*FractionalPercent) ProtoMessage() {} +func (*FractionalPercent) Descriptor() ([]byte, []int) { + return fileDescriptor_percent_cd85ccaca181f641, []int{1} +} +func (m *FractionalPercent) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_FractionalPercent.Unmarshal(m, b) +} +func (m *FractionalPercent) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_FractionalPercent.Marshal(b, m, deterministic) +} +func (dst *FractionalPercent) XXX_Merge(src proto.Message) { + xxx_messageInfo_FractionalPercent.Merge(dst, src) +} +func (m *FractionalPercent) XXX_Size() int { + return xxx_messageInfo_FractionalPercent.Size(m) +} +func (m *FractionalPercent) XXX_DiscardUnknown() { + xxx_messageInfo_FractionalPercent.DiscardUnknown(m) +} + +var xxx_messageInfo_FractionalPercent proto.InternalMessageInfo + +func (m *FractionalPercent) GetNumerator() uint32 { + if m != nil { + return m.Numerator + } + return 0 +} + +func (m *FractionalPercent) GetDenominator() FractionalPercent_DenominatorType { + if m != nil { + return m.Denominator + } + return FractionalPercent_HUNDRED +} + +func init() { + proto.RegisterType((*Percent)(nil), "envoy.type.Percent") + proto.RegisterType((*FractionalPercent)(nil), "envoy.type.FractionalPercent") + proto.RegisterEnum("envoy.type.FractionalPercent_DenominatorType", FractionalPercent_DenominatorType_name, FractionalPercent_DenominatorType_value) +} + +func init() { proto.RegisterFile("envoy/type/percent.proto", fileDescriptor_percent_cd85ccaca181f641) } + +var fileDescriptor_percent_cd85ccaca181f641 = []byte{ + // 277 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xe2, 0x92, 0x48, 0xcd, 0x2b, 0xcb, + 0xaf, 0xd4, 0x2f, 0xa9, 0x2c, 0x48, 0xd5, 0x2f, 0x48, 0x2d, 0x4a, 0x4e, 0xcd, 0x2b, 0xd1, 0x2b, + 0x28, 0xca, 0x2f, 0xc9, 0x17, 0xe2, 0x02, 0xcb, 0xe8, 0x81, 0x64, 0xa4, 0xc4, 0xcb, 0x12, 0x73, + 0x32, 0x53, 0x12, 0x4b, 0x52, 0xf5, 0x61, 0x0c, 0x88, 0x22, 0x25, 0x2b, 0x2e, 0xf6, 0x00, 0x88, + 0x2e, 0x21, 0x7d, 0x2e, 0xd6, 0xb2, 0xc4, 0x9c, 0xd2, 0x54, 0x09, 0x46, 0x05, 0x46, 0x0d, 0x46, + 0x27, 0xc9, 0x5d, 0x2f, 0x0f, 0x30, 0x8b, 0x08, 0x09, 0x49, 0x32, 0x80, 0x41, 0xa4, 0x83, 0x26, + 0x03, 0x14, 0x04, 0x41, 0xd4, 0x29, 0x9d, 0x65, 0xe4, 0x12, 0x74, 0x2b, 0x4a, 0x4c, 0x2e, 0xc9, + 0xcc, 0xcf, 0x4b, 0xcc, 0x81, 0x19, 0x23, 0xc3, 0xc5, 0x99, 0x57, 0x9a, 0x9b, 0x5a, 0x94, 0x58, + 0x92, 0x5f, 0x04, 0x36, 0x8a, 0x37, 0x08, 0x21, 0x20, 0x14, 0xcd, 0xc5, 0x9d, 0x92, 0x9a, 0x97, + 0x9f, 0x9b, 0x99, 0x07, 0x96, 0x67, 0x52, 0x60, 0xd4, 0xe0, 0x33, 0xd2, 0xd5, 0x43, 0x38, 0x55, + 0x0f, 0xc3, 0x44, 0x3d, 0x17, 0x84, 0x86, 0x90, 0xca, 0x82, 0x54, 0x27, 0x2e, 0x90, 0xcb, 0x58, + 0x9b, 0x18, 0x99, 0x04, 0x18, 0x83, 0x90, 0x4d, 0x53, 0xb2, 0xe5, 0xe2, 0x47, 0x53, 0x2b, 0xc4, + 0xcd, 0xc5, 0xee, 0x11, 0xea, 0xe7, 0x12, 0xe4, 0xea, 0x22, 0xc0, 0x20, 0x24, 0xc0, 0xc5, 0x13, + 0xe2, 0xea, 0x17, 0x1f, 0xe2, 0xe1, 0x1f, 0x1a, 0xec, 0xe8, 0xe7, 0x22, 0xc0, 0x08, 0x92, 0xf6, + 0xf5, 0xf4, 0xf1, 0xf1, 0xf4, 0xf7, 0x13, 0x60, 0x72, 0xd2, 0xe2, 0x92, 0xc8, 0xcc, 0x87, 0x38, + 0xa5, 0xa0, 0x28, 0xbf, 0xa2, 0x12, 0xc9, 0x55, 0x4e, 0x3c, 0x50, 0xc7, 0x04, 0x80, 0x42, 0x2d, + 0x80, 0x31, 0x89, 0x0d, 0x1c, 0x7c, 0xc6, 0x80, 0x00, 0x00, 0x00, 0xff, 0xff, 0x3d, 0x79, 0xd6, + 0xbb, 0x7f, 0x01, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/grpc/balancer/xds/internal/proto/envoy/type/range/range.pb.go b/vendor/google.golang.org/grpc/balancer/xds/internal/proto/envoy/type/range/range.pb.go new file mode 100755 index 0000000..feee6ac --- /dev/null +++ b/vendor/google.golang.org/grpc/balancer/xds/internal/proto/envoy/type/range/range.pb.go @@ -0,0 +1,132 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: envoy/type/range.proto + +package envoy_type + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +type Int64Range struct { + Start int64 `protobuf:"varint,1,opt,name=start,proto3" json:"start,omitempty"` + End int64 `protobuf:"varint,2,opt,name=end,proto3" json:"end,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Int64Range) Reset() { *m = Int64Range{} } +func (m *Int64Range) String() string { return proto.CompactTextString(m) } +func (*Int64Range) ProtoMessage() {} +func (*Int64Range) Descriptor() ([]byte, []int) { + return fileDescriptor_range_b0dd53fd27ccc9b2, []int{0} +} +func (m *Int64Range) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Int64Range.Unmarshal(m, b) +} +func (m *Int64Range) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Int64Range.Marshal(b, m, deterministic) +} +func (dst *Int64Range) XXX_Merge(src proto.Message) { + xxx_messageInfo_Int64Range.Merge(dst, src) +} +func (m *Int64Range) XXX_Size() int { + return xxx_messageInfo_Int64Range.Size(m) +} +func (m *Int64Range) XXX_DiscardUnknown() { + xxx_messageInfo_Int64Range.DiscardUnknown(m) +} + +var xxx_messageInfo_Int64Range proto.InternalMessageInfo + +func (m *Int64Range) GetStart() int64 { + if m != nil { + return m.Start + } + return 0 +} + +func (m *Int64Range) GetEnd() int64 { + if m != nil { + return m.End + } + return 0 +} + +type DoubleRange struct { + Start float64 `protobuf:"fixed64,1,opt,name=start,proto3" json:"start,omitempty"` + End float64 `protobuf:"fixed64,2,opt,name=end,proto3" json:"end,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *DoubleRange) Reset() { *m = DoubleRange{} } +func (m *DoubleRange) String() string { return proto.CompactTextString(m) } +func (*DoubleRange) ProtoMessage() {} +func (*DoubleRange) Descriptor() ([]byte, []int) { + return fileDescriptor_range_b0dd53fd27ccc9b2, []int{1} +} +func (m *DoubleRange) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_DoubleRange.Unmarshal(m, b) +} +func (m *DoubleRange) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_DoubleRange.Marshal(b, m, deterministic) +} +func (dst *DoubleRange) XXX_Merge(src proto.Message) { + xxx_messageInfo_DoubleRange.Merge(dst, src) +} +func (m *DoubleRange) XXX_Size() int { + return xxx_messageInfo_DoubleRange.Size(m) +} +func (m *DoubleRange) XXX_DiscardUnknown() { + xxx_messageInfo_DoubleRange.DiscardUnknown(m) +} + +var xxx_messageInfo_DoubleRange proto.InternalMessageInfo + +func (m *DoubleRange) GetStart() float64 { + if m != nil { + return m.Start + } + return 0 +} + +func (m *DoubleRange) GetEnd() float64 { + if m != nil { + return m.End + } + return 0 +} + +func init() { + proto.RegisterType((*Int64Range)(nil), "envoy.type.Int64Range") + proto.RegisterType((*DoubleRange)(nil), "envoy.type.DoubleRange") +} + +func init() { proto.RegisterFile("envoy/type/range.proto", fileDescriptor_range_b0dd53fd27ccc9b2) } + +var fileDescriptor_range_b0dd53fd27ccc9b2 = []byte{ + // 154 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xe2, 0x12, 0x4b, 0xcd, 0x2b, 0xcb, + 0xaf, 0xd4, 0x2f, 0xa9, 0x2c, 0x48, 0xd5, 0x2f, 0x4a, 0xcc, 0x4b, 0x4f, 0xd5, 0x2b, 0x28, 0xca, + 0x2f, 0xc9, 0x17, 0xe2, 0x02, 0x8b, 0xeb, 0x81, 0xc4, 0x95, 0x4c, 0xb8, 0xb8, 0x3c, 0xf3, 0x4a, + 0xcc, 0x4c, 0x82, 0x40, 0xf2, 0x42, 0x22, 0x5c, 0xac, 0xc5, 0x25, 0x89, 0x45, 0x25, 0x12, 0x8c, + 0x0a, 0x8c, 0x1a, 0xcc, 0x41, 0x10, 0x8e, 0x90, 0x00, 0x17, 0x73, 0x6a, 0x5e, 0x8a, 0x04, 0x13, + 0x58, 0x0c, 0xc4, 0x54, 0x32, 0xe5, 0xe2, 0x76, 0xc9, 0x2f, 0x4d, 0xca, 0x49, 0xc5, 0xa2, 0x8d, + 0x11, 0x8b, 0x36, 0x46, 0xb0, 0x36, 0x27, 0x13, 0x2e, 0x89, 0xcc, 0x7c, 0x3d, 0xb0, 0xed, 0x05, + 0x45, 0xf9, 0x15, 0x95, 0x7a, 0x08, 0x87, 0x38, 0x71, 0x81, 0x8d, 0x0a, 0x00, 0x39, 0x30, 0x80, + 0x31, 0x0a, 0xe2, 0xc4, 0x78, 0x90, 0x4c, 0x12, 0x1b, 0xd8, 0xd5, 0xc6, 0x80, 0x00, 0x00, 0x00, + 0xff, 0xff, 0x7f, 0xad, 0x02, 0xe6, 0xcf, 0x00, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/grpc/balancer/xds/internal/proto/validate/validate.pb.go b/vendor/google.golang.org/grpc/balancer/xds/internal/proto/validate/validate.pb.go new file mode 100755 index 0000000..1495d52 --- /dev/null +++ b/vendor/google.golang.org/grpc/balancer/xds/internal/proto/validate/validate.pb.go @@ -0,0 +1,3214 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: validate/validate.proto + +package validate // import "google.golang.org/grpc/balancer/xds/internal/proto/validate" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import descriptor "github.com/golang/protobuf/protoc-gen-go/descriptor" +import duration "github.com/golang/protobuf/ptypes/duration" +import timestamp "github.com/golang/protobuf/ptypes/timestamp" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +type FieldRules struct { + // Types that are valid to be assigned to Type: + // *FieldRules_Float + // *FieldRules_Double + // *FieldRules_Int32 + // *FieldRules_Int64 + // *FieldRules_Uint32 + // *FieldRules_Uint64 + // *FieldRules_Sint32 + // *FieldRules_Sint64 + // *FieldRules_Fixed32 + // *FieldRules_Fixed64 + // *FieldRules_Sfixed32 + // *FieldRules_Sfixed64 + // *FieldRules_Bool + // *FieldRules_String_ + // *FieldRules_Bytes + // *FieldRules_Enum + // *FieldRules_Message + // *FieldRules_Repeated + // *FieldRules_Map + // *FieldRules_Any + // *FieldRules_Duration + // *FieldRules_Timestamp + Type isFieldRules_Type `protobuf_oneof:"type"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *FieldRules) Reset() { *m = FieldRules{} } +func (m *FieldRules) String() string { return proto.CompactTextString(m) } +func (*FieldRules) ProtoMessage() {} +func (*FieldRules) Descriptor() ([]byte, []int) { + return fileDescriptor_validate_4e427f48c21fab34, []int{0} +} +func (m *FieldRules) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_FieldRules.Unmarshal(m, b) +} +func (m *FieldRules) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_FieldRules.Marshal(b, m, deterministic) +} +func (dst *FieldRules) XXX_Merge(src proto.Message) { + xxx_messageInfo_FieldRules.Merge(dst, src) +} +func (m *FieldRules) XXX_Size() int { + return xxx_messageInfo_FieldRules.Size(m) +} +func (m *FieldRules) XXX_DiscardUnknown() { + xxx_messageInfo_FieldRules.DiscardUnknown(m) +} + +var xxx_messageInfo_FieldRules proto.InternalMessageInfo + +type isFieldRules_Type interface { + isFieldRules_Type() +} + +type FieldRules_Float struct { + Float *FloatRules `protobuf:"bytes,1,opt,name=float,oneof"` +} + +type FieldRules_Double struct { + Double *DoubleRules `protobuf:"bytes,2,opt,name=double,oneof"` +} + +type FieldRules_Int32 struct { + Int32 *Int32Rules `protobuf:"bytes,3,opt,name=int32,oneof"` +} + +type FieldRules_Int64 struct { + Int64 *Int64Rules `protobuf:"bytes,4,opt,name=int64,oneof"` +} + +type FieldRules_Uint32 struct { + Uint32 *UInt32Rules `protobuf:"bytes,5,opt,name=uint32,oneof"` +} + +type FieldRules_Uint64 struct { + Uint64 *UInt64Rules `protobuf:"bytes,6,opt,name=uint64,oneof"` +} + +type FieldRules_Sint32 struct { + Sint32 *SInt32Rules `protobuf:"bytes,7,opt,name=sint32,oneof"` +} + +type FieldRules_Sint64 struct { + Sint64 *SInt64Rules `protobuf:"bytes,8,opt,name=sint64,oneof"` +} + +type FieldRules_Fixed32 struct { + Fixed32 *Fixed32Rules `protobuf:"bytes,9,opt,name=fixed32,oneof"` +} + +type FieldRules_Fixed64 struct { + Fixed64 *Fixed64Rules `protobuf:"bytes,10,opt,name=fixed64,oneof"` +} + +type FieldRules_Sfixed32 struct { + Sfixed32 *SFixed32Rules `protobuf:"bytes,11,opt,name=sfixed32,oneof"` +} + +type FieldRules_Sfixed64 struct { + Sfixed64 *SFixed64Rules `protobuf:"bytes,12,opt,name=sfixed64,oneof"` +} + +type FieldRules_Bool struct { + Bool *BoolRules `protobuf:"bytes,13,opt,name=bool,oneof"` +} + +type FieldRules_String_ struct { + String_ *StringRules `protobuf:"bytes,14,opt,name=string,oneof"` +} + +type FieldRules_Bytes struct { + Bytes *BytesRules `protobuf:"bytes,15,opt,name=bytes,oneof"` +} + +type FieldRules_Enum struct { + Enum *EnumRules `protobuf:"bytes,16,opt,name=enum,oneof"` +} + +type FieldRules_Message struct { + Message *MessageRules `protobuf:"bytes,17,opt,name=message,oneof"` +} + +type FieldRules_Repeated struct { + Repeated *RepeatedRules `protobuf:"bytes,18,opt,name=repeated,oneof"` +} + +type FieldRules_Map struct { + Map *MapRules `protobuf:"bytes,19,opt,name=map,oneof"` +} + +type FieldRules_Any struct { + Any *AnyRules `protobuf:"bytes,20,opt,name=any,oneof"` +} + +type FieldRules_Duration struct { + Duration *DurationRules `protobuf:"bytes,21,opt,name=duration,oneof"` +} + +type FieldRules_Timestamp struct { + Timestamp *TimestampRules `protobuf:"bytes,22,opt,name=timestamp,oneof"` +} + +func (*FieldRules_Float) isFieldRules_Type() {} + +func (*FieldRules_Double) isFieldRules_Type() {} + +func (*FieldRules_Int32) isFieldRules_Type() {} + +func (*FieldRules_Int64) isFieldRules_Type() {} + +func (*FieldRules_Uint32) isFieldRules_Type() {} + +func (*FieldRules_Uint64) isFieldRules_Type() {} + +func (*FieldRules_Sint32) isFieldRules_Type() {} + +func (*FieldRules_Sint64) isFieldRules_Type() {} + +func (*FieldRules_Fixed32) isFieldRules_Type() {} + +func (*FieldRules_Fixed64) isFieldRules_Type() {} + +func (*FieldRules_Sfixed32) isFieldRules_Type() {} + +func (*FieldRules_Sfixed64) isFieldRules_Type() {} + +func (*FieldRules_Bool) isFieldRules_Type() {} + +func (*FieldRules_String_) isFieldRules_Type() {} + +func (*FieldRules_Bytes) isFieldRules_Type() {} + +func (*FieldRules_Enum) isFieldRules_Type() {} + +func (*FieldRules_Message) isFieldRules_Type() {} + +func (*FieldRules_Repeated) isFieldRules_Type() {} + +func (*FieldRules_Map) isFieldRules_Type() {} + +func (*FieldRules_Any) isFieldRules_Type() {} + +func (*FieldRules_Duration) isFieldRules_Type() {} + +func (*FieldRules_Timestamp) isFieldRules_Type() {} + +func (m *FieldRules) GetType() isFieldRules_Type { + if m != nil { + return m.Type + } + return nil +} + +func (m *FieldRules) GetFloat() *FloatRules { + if x, ok := m.GetType().(*FieldRules_Float); ok { + return x.Float + } + return nil +} + +func (m *FieldRules) GetDouble() *DoubleRules { + if x, ok := m.GetType().(*FieldRules_Double); ok { + return x.Double + } + return nil +} + +func (m *FieldRules) GetInt32() *Int32Rules { + if x, ok := m.GetType().(*FieldRules_Int32); ok { + return x.Int32 + } + return nil +} + +func (m *FieldRules) GetInt64() *Int64Rules { + if x, ok := m.GetType().(*FieldRules_Int64); ok { + return x.Int64 + } + return nil +} + +func (m *FieldRules) GetUint32() *UInt32Rules { + if x, ok := m.GetType().(*FieldRules_Uint32); ok { + return x.Uint32 + } + return nil +} + +func (m *FieldRules) GetUint64() *UInt64Rules { + if x, ok := m.GetType().(*FieldRules_Uint64); ok { + return x.Uint64 + } + return nil +} + +func (m *FieldRules) GetSint32() *SInt32Rules { + if x, ok := m.GetType().(*FieldRules_Sint32); ok { + return x.Sint32 + } + return nil +} + +func (m *FieldRules) GetSint64() *SInt64Rules { + if x, ok := m.GetType().(*FieldRules_Sint64); ok { + return x.Sint64 + } + return nil +} + +func (m *FieldRules) GetFixed32() *Fixed32Rules { + if x, ok := m.GetType().(*FieldRules_Fixed32); ok { + return x.Fixed32 + } + return nil +} + +func (m *FieldRules) GetFixed64() *Fixed64Rules { + if x, ok := m.GetType().(*FieldRules_Fixed64); ok { + return x.Fixed64 + } + return nil +} + +func (m *FieldRules) GetSfixed32() *SFixed32Rules { + if x, ok := m.GetType().(*FieldRules_Sfixed32); ok { + return x.Sfixed32 + } + return nil +} + +func (m *FieldRules) GetSfixed64() *SFixed64Rules { + if x, ok := m.GetType().(*FieldRules_Sfixed64); ok { + return x.Sfixed64 + } + return nil +} + +func (m *FieldRules) GetBool() *BoolRules { + if x, ok := m.GetType().(*FieldRules_Bool); ok { + return x.Bool + } + return nil +} + +func (m *FieldRules) GetString_() *StringRules { + if x, ok := m.GetType().(*FieldRules_String_); ok { + return x.String_ + } + return nil +} + +func (m *FieldRules) GetBytes() *BytesRules { + if x, ok := m.GetType().(*FieldRules_Bytes); ok { + return x.Bytes + } + return nil +} + +func (m *FieldRules) GetEnum() *EnumRules { + if x, ok := m.GetType().(*FieldRules_Enum); ok { + return x.Enum + } + return nil +} + +func (m *FieldRules) GetMessage() *MessageRules { + if x, ok := m.GetType().(*FieldRules_Message); ok { + return x.Message + } + return nil +} + +func (m *FieldRules) GetRepeated() *RepeatedRules { + if x, ok := m.GetType().(*FieldRules_Repeated); ok { + return x.Repeated + } + return nil +} + +func (m *FieldRules) GetMap() *MapRules { + if x, ok := m.GetType().(*FieldRules_Map); ok { + return x.Map + } + return nil +} + +func (m *FieldRules) GetAny() *AnyRules { + if x, ok := m.GetType().(*FieldRules_Any); ok { + return x.Any + } + return nil +} + +func (m *FieldRules) GetDuration() *DurationRules { + if x, ok := m.GetType().(*FieldRules_Duration); ok { + return x.Duration + } + return nil +} + +func (m *FieldRules) GetTimestamp() *TimestampRules { + if x, ok := m.GetType().(*FieldRules_Timestamp); ok { + return x.Timestamp + } + return nil +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*FieldRules) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _FieldRules_OneofMarshaler, _FieldRules_OneofUnmarshaler, _FieldRules_OneofSizer, []interface{}{ + (*FieldRules_Float)(nil), + (*FieldRules_Double)(nil), + (*FieldRules_Int32)(nil), + (*FieldRules_Int64)(nil), + (*FieldRules_Uint32)(nil), + (*FieldRules_Uint64)(nil), + (*FieldRules_Sint32)(nil), + (*FieldRules_Sint64)(nil), + (*FieldRules_Fixed32)(nil), + (*FieldRules_Fixed64)(nil), + (*FieldRules_Sfixed32)(nil), + (*FieldRules_Sfixed64)(nil), + (*FieldRules_Bool)(nil), + (*FieldRules_String_)(nil), + (*FieldRules_Bytes)(nil), + (*FieldRules_Enum)(nil), + (*FieldRules_Message)(nil), + (*FieldRules_Repeated)(nil), + (*FieldRules_Map)(nil), + (*FieldRules_Any)(nil), + (*FieldRules_Duration)(nil), + (*FieldRules_Timestamp)(nil), + } +} + +func _FieldRules_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*FieldRules) + // type + switch x := m.Type.(type) { + case *FieldRules_Float: + b.EncodeVarint(1<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.Float); err != nil { + return err + } + case *FieldRules_Double: + b.EncodeVarint(2<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.Double); err != nil { + return err + } + case *FieldRules_Int32: + b.EncodeVarint(3<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.Int32); err != nil { + return err + } + case *FieldRules_Int64: + b.EncodeVarint(4<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.Int64); err != nil { + return err + } + case *FieldRules_Uint32: + b.EncodeVarint(5<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.Uint32); err != nil { + return err + } + case *FieldRules_Uint64: + b.EncodeVarint(6<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.Uint64); err != nil { + return err + } + case *FieldRules_Sint32: + b.EncodeVarint(7<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.Sint32); err != nil { + return err + } + case *FieldRules_Sint64: + b.EncodeVarint(8<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.Sint64); err != nil { + return err + } + case *FieldRules_Fixed32: + b.EncodeVarint(9<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.Fixed32); err != nil { + return err + } + case *FieldRules_Fixed64: + b.EncodeVarint(10<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.Fixed64); err != nil { + return err + } + case *FieldRules_Sfixed32: + b.EncodeVarint(11<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.Sfixed32); err != nil { + return err + } + case *FieldRules_Sfixed64: + b.EncodeVarint(12<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.Sfixed64); err != nil { + return err + } + case *FieldRules_Bool: + b.EncodeVarint(13<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.Bool); err != nil { + return err + } + case *FieldRules_String_: + b.EncodeVarint(14<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.String_); err != nil { + return err + } + case *FieldRules_Bytes: + b.EncodeVarint(15<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.Bytes); err != nil { + return err + } + case *FieldRules_Enum: + b.EncodeVarint(16<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.Enum); err != nil { + return err + } + case *FieldRules_Message: + b.EncodeVarint(17<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.Message); err != nil { + return err + } + case *FieldRules_Repeated: + b.EncodeVarint(18<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.Repeated); err != nil { + return err + } + case *FieldRules_Map: + b.EncodeVarint(19<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.Map); err != nil { + return err + } + case *FieldRules_Any: + b.EncodeVarint(20<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.Any); err != nil { + return err + } + case *FieldRules_Duration: + b.EncodeVarint(21<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.Duration); err != nil { + return err + } + case *FieldRules_Timestamp: + b.EncodeVarint(22<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.Timestamp); err != nil { + return err + } + case nil: + default: + return fmt.Errorf("FieldRules.Type has unexpected type %T", x) + } + return nil +} + +func _FieldRules_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*FieldRules) + switch tag { + case 1: // type.float + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(FloatRules) + err := b.DecodeMessage(msg) + m.Type = &FieldRules_Float{msg} + return true, err + case 2: // type.double + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(DoubleRules) + err := b.DecodeMessage(msg) + m.Type = &FieldRules_Double{msg} + return true, err + case 3: // type.int32 + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(Int32Rules) + err := b.DecodeMessage(msg) + m.Type = &FieldRules_Int32{msg} + return true, err + case 4: // type.int64 + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(Int64Rules) + err := b.DecodeMessage(msg) + m.Type = &FieldRules_Int64{msg} + return true, err + case 5: // type.uint32 + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(UInt32Rules) + err := b.DecodeMessage(msg) + m.Type = &FieldRules_Uint32{msg} + return true, err + case 6: // type.uint64 + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(UInt64Rules) + err := b.DecodeMessage(msg) + m.Type = &FieldRules_Uint64{msg} + return true, err + case 7: // type.sint32 + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(SInt32Rules) + err := b.DecodeMessage(msg) + m.Type = &FieldRules_Sint32{msg} + return true, err + case 8: // type.sint64 + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(SInt64Rules) + err := b.DecodeMessage(msg) + m.Type = &FieldRules_Sint64{msg} + return true, err + case 9: // type.fixed32 + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(Fixed32Rules) + err := b.DecodeMessage(msg) + m.Type = &FieldRules_Fixed32{msg} + return true, err + case 10: // type.fixed64 + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(Fixed64Rules) + err := b.DecodeMessage(msg) + m.Type = &FieldRules_Fixed64{msg} + return true, err + case 11: // type.sfixed32 + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(SFixed32Rules) + err := b.DecodeMessage(msg) + m.Type = &FieldRules_Sfixed32{msg} + return true, err + case 12: // type.sfixed64 + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(SFixed64Rules) + err := b.DecodeMessage(msg) + m.Type = &FieldRules_Sfixed64{msg} + return true, err + case 13: // type.bool + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(BoolRules) + err := b.DecodeMessage(msg) + m.Type = &FieldRules_Bool{msg} + return true, err + case 14: // type.string + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(StringRules) + err := b.DecodeMessage(msg) + m.Type = &FieldRules_String_{msg} + return true, err + case 15: // type.bytes + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(BytesRules) + err := b.DecodeMessage(msg) + m.Type = &FieldRules_Bytes{msg} + return true, err + case 16: // type.enum + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(EnumRules) + err := b.DecodeMessage(msg) + m.Type = &FieldRules_Enum{msg} + return true, err + case 17: // type.message + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(MessageRules) + err := b.DecodeMessage(msg) + m.Type = &FieldRules_Message{msg} + return true, err + case 18: // type.repeated + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(RepeatedRules) + err := b.DecodeMessage(msg) + m.Type = &FieldRules_Repeated{msg} + return true, err + case 19: // type.map + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(MapRules) + err := b.DecodeMessage(msg) + m.Type = &FieldRules_Map{msg} + return true, err + case 20: // type.any + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(AnyRules) + err := b.DecodeMessage(msg) + m.Type = &FieldRules_Any{msg} + return true, err + case 21: // type.duration + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(DurationRules) + err := b.DecodeMessage(msg) + m.Type = &FieldRules_Duration{msg} + return true, err + case 22: // type.timestamp + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(TimestampRules) + err := b.DecodeMessage(msg) + m.Type = &FieldRules_Timestamp{msg} + return true, err + default: + return false, nil + } +} + +func _FieldRules_OneofSizer(msg proto.Message) (n int) { + m := msg.(*FieldRules) + // type + switch x := m.Type.(type) { + case *FieldRules_Float: + s := proto.Size(x.Float) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *FieldRules_Double: + s := proto.Size(x.Double) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *FieldRules_Int32: + s := proto.Size(x.Int32) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *FieldRules_Int64: + s := proto.Size(x.Int64) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *FieldRules_Uint32: + s := proto.Size(x.Uint32) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *FieldRules_Uint64: + s := proto.Size(x.Uint64) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *FieldRules_Sint32: + s := proto.Size(x.Sint32) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *FieldRules_Sint64: + s := proto.Size(x.Sint64) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *FieldRules_Fixed32: + s := proto.Size(x.Fixed32) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *FieldRules_Fixed64: + s := proto.Size(x.Fixed64) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *FieldRules_Sfixed32: + s := proto.Size(x.Sfixed32) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *FieldRules_Sfixed64: + s := proto.Size(x.Sfixed64) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *FieldRules_Bool: + s := proto.Size(x.Bool) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *FieldRules_String_: + s := proto.Size(x.String_) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *FieldRules_Bytes: + s := proto.Size(x.Bytes) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *FieldRules_Enum: + s := proto.Size(x.Enum) + n += 2 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *FieldRules_Message: + s := proto.Size(x.Message) + n += 2 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *FieldRules_Repeated: + s := proto.Size(x.Repeated) + n += 2 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *FieldRules_Map: + s := proto.Size(x.Map) + n += 2 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *FieldRules_Any: + s := proto.Size(x.Any) + n += 2 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *FieldRules_Duration: + s := proto.Size(x.Duration) + n += 2 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *FieldRules_Timestamp: + s := proto.Size(x.Timestamp) + n += 2 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +type FloatRules struct { + Const *float32 `protobuf:"fixed32,1,opt,name=const" json:"const,omitempty"` + Lt *float32 `protobuf:"fixed32,2,opt,name=lt" json:"lt,omitempty"` + Lte *float32 `protobuf:"fixed32,3,opt,name=lte" json:"lte,omitempty"` + Gt *float32 `protobuf:"fixed32,4,opt,name=gt" json:"gt,omitempty"` + Gte *float32 `protobuf:"fixed32,5,opt,name=gte" json:"gte,omitempty"` + In []float32 `protobuf:"fixed32,6,rep,name=in" json:"in,omitempty"` + NotIn []float32 `protobuf:"fixed32,7,rep,name=not_in,json=notIn" json:"not_in,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *FloatRules) Reset() { *m = FloatRules{} } +func (m *FloatRules) String() string { return proto.CompactTextString(m) } +func (*FloatRules) ProtoMessage() {} +func (*FloatRules) Descriptor() ([]byte, []int) { + return fileDescriptor_validate_4e427f48c21fab34, []int{1} +} +func (m *FloatRules) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_FloatRules.Unmarshal(m, b) +} +func (m *FloatRules) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_FloatRules.Marshal(b, m, deterministic) +} +func (dst *FloatRules) XXX_Merge(src proto.Message) { + xxx_messageInfo_FloatRules.Merge(dst, src) +} +func (m *FloatRules) XXX_Size() int { + return xxx_messageInfo_FloatRules.Size(m) +} +func (m *FloatRules) XXX_DiscardUnknown() { + xxx_messageInfo_FloatRules.DiscardUnknown(m) +} + +var xxx_messageInfo_FloatRules proto.InternalMessageInfo + +func (m *FloatRules) GetConst() float32 { + if m != nil && m.Const != nil { + return *m.Const + } + return 0 +} + +func (m *FloatRules) GetLt() float32 { + if m != nil && m.Lt != nil { + return *m.Lt + } + return 0 +} + +func (m *FloatRules) GetLte() float32 { + if m != nil && m.Lte != nil { + return *m.Lte + } + return 0 +} + +func (m *FloatRules) GetGt() float32 { + if m != nil && m.Gt != nil { + return *m.Gt + } + return 0 +} + +func (m *FloatRules) GetGte() float32 { + if m != nil && m.Gte != nil { + return *m.Gte + } + return 0 +} + +func (m *FloatRules) GetIn() []float32 { + if m != nil { + return m.In + } + return nil +} + +func (m *FloatRules) GetNotIn() []float32 { + if m != nil { + return m.NotIn + } + return nil +} + +type DoubleRules struct { + Const *float64 `protobuf:"fixed64,1,opt,name=const" json:"const,omitempty"` + Lt *float64 `protobuf:"fixed64,2,opt,name=lt" json:"lt,omitempty"` + Lte *float64 `protobuf:"fixed64,3,opt,name=lte" json:"lte,omitempty"` + Gt *float64 `protobuf:"fixed64,4,opt,name=gt" json:"gt,omitempty"` + Gte *float64 `protobuf:"fixed64,5,opt,name=gte" json:"gte,omitempty"` + In []float64 `protobuf:"fixed64,6,rep,name=in" json:"in,omitempty"` + NotIn []float64 `protobuf:"fixed64,7,rep,name=not_in,json=notIn" json:"not_in,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *DoubleRules) Reset() { *m = DoubleRules{} } +func (m *DoubleRules) String() string { return proto.CompactTextString(m) } +func (*DoubleRules) ProtoMessage() {} +func (*DoubleRules) Descriptor() ([]byte, []int) { + return fileDescriptor_validate_4e427f48c21fab34, []int{2} +} +func (m *DoubleRules) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_DoubleRules.Unmarshal(m, b) +} +func (m *DoubleRules) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_DoubleRules.Marshal(b, m, deterministic) +} +func (dst *DoubleRules) XXX_Merge(src proto.Message) { + xxx_messageInfo_DoubleRules.Merge(dst, src) +} +func (m *DoubleRules) XXX_Size() int { + return xxx_messageInfo_DoubleRules.Size(m) +} +func (m *DoubleRules) XXX_DiscardUnknown() { + xxx_messageInfo_DoubleRules.DiscardUnknown(m) +} + +var xxx_messageInfo_DoubleRules proto.InternalMessageInfo + +func (m *DoubleRules) GetConst() float64 { + if m != nil && m.Const != nil { + return *m.Const + } + return 0 +} + +func (m *DoubleRules) GetLt() float64 { + if m != nil && m.Lt != nil { + return *m.Lt + } + return 0 +} + +func (m *DoubleRules) GetLte() float64 { + if m != nil && m.Lte != nil { + return *m.Lte + } + return 0 +} + +func (m *DoubleRules) GetGt() float64 { + if m != nil && m.Gt != nil { + return *m.Gt + } + return 0 +} + +func (m *DoubleRules) GetGte() float64 { + if m != nil && m.Gte != nil { + return *m.Gte + } + return 0 +} + +func (m *DoubleRules) GetIn() []float64 { + if m != nil { + return m.In + } + return nil +} + +func (m *DoubleRules) GetNotIn() []float64 { + if m != nil { + return m.NotIn + } + return nil +} + +type Int32Rules struct { + Const *int32 `protobuf:"varint,1,opt,name=const" json:"const,omitempty"` + Lt *int32 `protobuf:"varint,2,opt,name=lt" json:"lt,omitempty"` + Lte *int32 `protobuf:"varint,3,opt,name=lte" json:"lte,omitempty"` + Gt *int32 `protobuf:"varint,4,opt,name=gt" json:"gt,omitempty"` + Gte *int32 `protobuf:"varint,5,opt,name=gte" json:"gte,omitempty"` + In []int32 `protobuf:"varint,6,rep,name=in" json:"in,omitempty"` + NotIn []int32 `protobuf:"varint,7,rep,name=not_in,json=notIn" json:"not_in,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Int32Rules) Reset() { *m = Int32Rules{} } +func (m *Int32Rules) String() string { return proto.CompactTextString(m) } +func (*Int32Rules) ProtoMessage() {} +func (*Int32Rules) Descriptor() ([]byte, []int) { + return fileDescriptor_validate_4e427f48c21fab34, []int{3} +} +func (m *Int32Rules) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Int32Rules.Unmarshal(m, b) +} +func (m *Int32Rules) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Int32Rules.Marshal(b, m, deterministic) +} +func (dst *Int32Rules) XXX_Merge(src proto.Message) { + xxx_messageInfo_Int32Rules.Merge(dst, src) +} +func (m *Int32Rules) XXX_Size() int { + return xxx_messageInfo_Int32Rules.Size(m) +} +func (m *Int32Rules) XXX_DiscardUnknown() { + xxx_messageInfo_Int32Rules.DiscardUnknown(m) +} + +var xxx_messageInfo_Int32Rules proto.InternalMessageInfo + +func (m *Int32Rules) GetConst() int32 { + if m != nil && m.Const != nil { + return *m.Const + } + return 0 +} + +func (m *Int32Rules) GetLt() int32 { + if m != nil && m.Lt != nil { + return *m.Lt + } + return 0 +} + +func (m *Int32Rules) GetLte() int32 { + if m != nil && m.Lte != nil { + return *m.Lte + } + return 0 +} + +func (m *Int32Rules) GetGt() int32 { + if m != nil && m.Gt != nil { + return *m.Gt + } + return 0 +} + +func (m *Int32Rules) GetGte() int32 { + if m != nil && m.Gte != nil { + return *m.Gte + } + return 0 +} + +func (m *Int32Rules) GetIn() []int32 { + if m != nil { + return m.In + } + return nil +} + +func (m *Int32Rules) GetNotIn() []int32 { + if m != nil { + return m.NotIn + } + return nil +} + +type Int64Rules struct { + Const *int64 `protobuf:"varint,1,opt,name=const" json:"const,omitempty"` + Lt *int64 `protobuf:"varint,2,opt,name=lt" json:"lt,omitempty"` + Lte *int64 `protobuf:"varint,3,opt,name=lte" json:"lte,omitempty"` + Gt *int64 `protobuf:"varint,4,opt,name=gt" json:"gt,omitempty"` + Gte *int64 `protobuf:"varint,5,opt,name=gte" json:"gte,omitempty"` + In []int64 `protobuf:"varint,6,rep,name=in" json:"in,omitempty"` + NotIn []int64 `protobuf:"varint,7,rep,name=not_in,json=notIn" json:"not_in,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Int64Rules) Reset() { *m = Int64Rules{} } +func (m *Int64Rules) String() string { return proto.CompactTextString(m) } +func (*Int64Rules) ProtoMessage() {} +func (*Int64Rules) Descriptor() ([]byte, []int) { + return fileDescriptor_validate_4e427f48c21fab34, []int{4} +} +func (m *Int64Rules) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Int64Rules.Unmarshal(m, b) +} +func (m *Int64Rules) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Int64Rules.Marshal(b, m, deterministic) +} +func (dst *Int64Rules) XXX_Merge(src proto.Message) { + xxx_messageInfo_Int64Rules.Merge(dst, src) +} +func (m *Int64Rules) XXX_Size() int { + return xxx_messageInfo_Int64Rules.Size(m) +} +func (m *Int64Rules) XXX_DiscardUnknown() { + xxx_messageInfo_Int64Rules.DiscardUnknown(m) +} + +var xxx_messageInfo_Int64Rules proto.InternalMessageInfo + +func (m *Int64Rules) GetConst() int64 { + if m != nil && m.Const != nil { + return *m.Const + } + return 0 +} + +func (m *Int64Rules) GetLt() int64 { + if m != nil && m.Lt != nil { + return *m.Lt + } + return 0 +} + +func (m *Int64Rules) GetLte() int64 { + if m != nil && m.Lte != nil { + return *m.Lte + } + return 0 +} + +func (m *Int64Rules) GetGt() int64 { + if m != nil && m.Gt != nil { + return *m.Gt + } + return 0 +} + +func (m *Int64Rules) GetGte() int64 { + if m != nil && m.Gte != nil { + return *m.Gte + } + return 0 +} + +func (m *Int64Rules) GetIn() []int64 { + if m != nil { + return m.In + } + return nil +} + +func (m *Int64Rules) GetNotIn() []int64 { + if m != nil { + return m.NotIn + } + return nil +} + +type UInt32Rules struct { + Const *uint32 `protobuf:"varint,1,opt,name=const" json:"const,omitempty"` + Lt *uint32 `protobuf:"varint,2,opt,name=lt" json:"lt,omitempty"` + Lte *uint32 `protobuf:"varint,3,opt,name=lte" json:"lte,omitempty"` + Gt *uint32 `protobuf:"varint,4,opt,name=gt" json:"gt,omitempty"` + Gte *uint32 `protobuf:"varint,5,opt,name=gte" json:"gte,omitempty"` + In []uint32 `protobuf:"varint,6,rep,name=in" json:"in,omitempty"` + NotIn []uint32 `protobuf:"varint,7,rep,name=not_in,json=notIn" json:"not_in,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *UInt32Rules) Reset() { *m = UInt32Rules{} } +func (m *UInt32Rules) String() string { return proto.CompactTextString(m) } +func (*UInt32Rules) ProtoMessage() {} +func (*UInt32Rules) Descriptor() ([]byte, []int) { + return fileDescriptor_validate_4e427f48c21fab34, []int{5} +} +func (m *UInt32Rules) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_UInt32Rules.Unmarshal(m, b) +} +func (m *UInt32Rules) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_UInt32Rules.Marshal(b, m, deterministic) +} +func (dst *UInt32Rules) XXX_Merge(src proto.Message) { + xxx_messageInfo_UInt32Rules.Merge(dst, src) +} +func (m *UInt32Rules) XXX_Size() int { + return xxx_messageInfo_UInt32Rules.Size(m) +} +func (m *UInt32Rules) XXX_DiscardUnknown() { + xxx_messageInfo_UInt32Rules.DiscardUnknown(m) +} + +var xxx_messageInfo_UInt32Rules proto.InternalMessageInfo + +func (m *UInt32Rules) GetConst() uint32 { + if m != nil && m.Const != nil { + return *m.Const + } + return 0 +} + +func (m *UInt32Rules) GetLt() uint32 { + if m != nil && m.Lt != nil { + return *m.Lt + } + return 0 +} + +func (m *UInt32Rules) GetLte() uint32 { + if m != nil && m.Lte != nil { + return *m.Lte + } + return 0 +} + +func (m *UInt32Rules) GetGt() uint32 { + if m != nil && m.Gt != nil { + return *m.Gt + } + return 0 +} + +func (m *UInt32Rules) GetGte() uint32 { + if m != nil && m.Gte != nil { + return *m.Gte + } + return 0 +} + +func (m *UInt32Rules) GetIn() []uint32 { + if m != nil { + return m.In + } + return nil +} + +func (m *UInt32Rules) GetNotIn() []uint32 { + if m != nil { + return m.NotIn + } + return nil +} + +type UInt64Rules struct { + Const *uint64 `protobuf:"varint,1,opt,name=const" json:"const,omitempty"` + Lt *uint64 `protobuf:"varint,2,opt,name=lt" json:"lt,omitempty"` + Lte *uint64 `protobuf:"varint,3,opt,name=lte" json:"lte,omitempty"` + Gt *uint64 `protobuf:"varint,4,opt,name=gt" json:"gt,omitempty"` + Gte *uint64 `protobuf:"varint,5,opt,name=gte" json:"gte,omitempty"` + In []uint64 `protobuf:"varint,6,rep,name=in" json:"in,omitempty"` + NotIn []uint64 `protobuf:"varint,7,rep,name=not_in,json=notIn" json:"not_in,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *UInt64Rules) Reset() { *m = UInt64Rules{} } +func (m *UInt64Rules) String() string { return proto.CompactTextString(m) } +func (*UInt64Rules) ProtoMessage() {} +func (*UInt64Rules) Descriptor() ([]byte, []int) { + return fileDescriptor_validate_4e427f48c21fab34, []int{6} +} +func (m *UInt64Rules) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_UInt64Rules.Unmarshal(m, b) +} +func (m *UInt64Rules) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_UInt64Rules.Marshal(b, m, deterministic) +} +func (dst *UInt64Rules) XXX_Merge(src proto.Message) { + xxx_messageInfo_UInt64Rules.Merge(dst, src) +} +func (m *UInt64Rules) XXX_Size() int { + return xxx_messageInfo_UInt64Rules.Size(m) +} +func (m *UInt64Rules) XXX_DiscardUnknown() { + xxx_messageInfo_UInt64Rules.DiscardUnknown(m) +} + +var xxx_messageInfo_UInt64Rules proto.InternalMessageInfo + +func (m *UInt64Rules) GetConst() uint64 { + if m != nil && m.Const != nil { + return *m.Const + } + return 0 +} + +func (m *UInt64Rules) GetLt() uint64 { + if m != nil && m.Lt != nil { + return *m.Lt + } + return 0 +} + +func (m *UInt64Rules) GetLte() uint64 { + if m != nil && m.Lte != nil { + return *m.Lte + } + return 0 +} + +func (m *UInt64Rules) GetGt() uint64 { + if m != nil && m.Gt != nil { + return *m.Gt + } + return 0 +} + +func (m *UInt64Rules) GetGte() uint64 { + if m != nil && m.Gte != nil { + return *m.Gte + } + return 0 +} + +func (m *UInt64Rules) GetIn() []uint64 { + if m != nil { + return m.In + } + return nil +} + +func (m *UInt64Rules) GetNotIn() []uint64 { + if m != nil { + return m.NotIn + } + return nil +} + +type SInt32Rules struct { + Const *int32 `protobuf:"zigzag32,1,opt,name=const" json:"const,omitempty"` + Lt *int32 `protobuf:"zigzag32,2,opt,name=lt" json:"lt,omitempty"` + Lte *int32 `protobuf:"zigzag32,3,opt,name=lte" json:"lte,omitempty"` + Gt *int32 `protobuf:"zigzag32,4,opt,name=gt" json:"gt,omitempty"` + Gte *int32 `protobuf:"zigzag32,5,opt,name=gte" json:"gte,omitempty"` + In []int32 `protobuf:"zigzag32,6,rep,name=in" json:"in,omitempty"` + NotIn []int32 `protobuf:"zigzag32,7,rep,name=not_in,json=notIn" json:"not_in,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *SInt32Rules) Reset() { *m = SInt32Rules{} } +func (m *SInt32Rules) String() string { return proto.CompactTextString(m) } +func (*SInt32Rules) ProtoMessage() {} +func (*SInt32Rules) Descriptor() ([]byte, []int) { + return fileDescriptor_validate_4e427f48c21fab34, []int{7} +} +func (m *SInt32Rules) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_SInt32Rules.Unmarshal(m, b) +} +func (m *SInt32Rules) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_SInt32Rules.Marshal(b, m, deterministic) +} +func (dst *SInt32Rules) XXX_Merge(src proto.Message) { + xxx_messageInfo_SInt32Rules.Merge(dst, src) +} +func (m *SInt32Rules) XXX_Size() int { + return xxx_messageInfo_SInt32Rules.Size(m) +} +func (m *SInt32Rules) XXX_DiscardUnknown() { + xxx_messageInfo_SInt32Rules.DiscardUnknown(m) +} + +var xxx_messageInfo_SInt32Rules proto.InternalMessageInfo + +func (m *SInt32Rules) GetConst() int32 { + if m != nil && m.Const != nil { + return *m.Const + } + return 0 +} + +func (m *SInt32Rules) GetLt() int32 { + if m != nil && m.Lt != nil { + return *m.Lt + } + return 0 +} + +func (m *SInt32Rules) GetLte() int32 { + if m != nil && m.Lte != nil { + return *m.Lte + } + return 0 +} + +func (m *SInt32Rules) GetGt() int32 { + if m != nil && m.Gt != nil { + return *m.Gt + } + return 0 +} + +func (m *SInt32Rules) GetGte() int32 { + if m != nil && m.Gte != nil { + return *m.Gte + } + return 0 +} + +func (m *SInt32Rules) GetIn() []int32 { + if m != nil { + return m.In + } + return nil +} + +func (m *SInt32Rules) GetNotIn() []int32 { + if m != nil { + return m.NotIn + } + return nil +} + +type SInt64Rules struct { + Const *int64 `protobuf:"zigzag64,1,opt,name=const" json:"const,omitempty"` + Lt *int64 `protobuf:"zigzag64,2,opt,name=lt" json:"lt,omitempty"` + Lte *int64 `protobuf:"zigzag64,3,opt,name=lte" json:"lte,omitempty"` + Gt *int64 `protobuf:"zigzag64,4,opt,name=gt" json:"gt,omitempty"` + Gte *int64 `protobuf:"zigzag64,5,opt,name=gte" json:"gte,omitempty"` + In []int64 `protobuf:"zigzag64,6,rep,name=in" json:"in,omitempty"` + NotIn []int64 `protobuf:"zigzag64,7,rep,name=not_in,json=notIn" json:"not_in,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *SInt64Rules) Reset() { *m = SInt64Rules{} } +func (m *SInt64Rules) String() string { return proto.CompactTextString(m) } +func (*SInt64Rules) ProtoMessage() {} +func (*SInt64Rules) Descriptor() ([]byte, []int) { + return fileDescriptor_validate_4e427f48c21fab34, []int{8} +} +func (m *SInt64Rules) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_SInt64Rules.Unmarshal(m, b) +} +func (m *SInt64Rules) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_SInt64Rules.Marshal(b, m, deterministic) +} +func (dst *SInt64Rules) XXX_Merge(src proto.Message) { + xxx_messageInfo_SInt64Rules.Merge(dst, src) +} +func (m *SInt64Rules) XXX_Size() int { + return xxx_messageInfo_SInt64Rules.Size(m) +} +func (m *SInt64Rules) XXX_DiscardUnknown() { + xxx_messageInfo_SInt64Rules.DiscardUnknown(m) +} + +var xxx_messageInfo_SInt64Rules proto.InternalMessageInfo + +func (m *SInt64Rules) GetConst() int64 { + if m != nil && m.Const != nil { + return *m.Const + } + return 0 +} + +func (m *SInt64Rules) GetLt() int64 { + if m != nil && m.Lt != nil { + return *m.Lt + } + return 0 +} + +func (m *SInt64Rules) GetLte() int64 { + if m != nil && m.Lte != nil { + return *m.Lte + } + return 0 +} + +func (m *SInt64Rules) GetGt() int64 { + if m != nil && m.Gt != nil { + return *m.Gt + } + return 0 +} + +func (m *SInt64Rules) GetGte() int64 { + if m != nil && m.Gte != nil { + return *m.Gte + } + return 0 +} + +func (m *SInt64Rules) GetIn() []int64 { + if m != nil { + return m.In + } + return nil +} + +func (m *SInt64Rules) GetNotIn() []int64 { + if m != nil { + return m.NotIn + } + return nil +} + +type Fixed32Rules struct { + Const *uint32 `protobuf:"fixed32,1,opt,name=const" json:"const,omitempty"` + Lt *uint32 `protobuf:"fixed32,2,opt,name=lt" json:"lt,omitempty"` + Lte *uint32 `protobuf:"fixed32,3,opt,name=lte" json:"lte,omitempty"` + Gt *uint32 `protobuf:"fixed32,4,opt,name=gt" json:"gt,omitempty"` + Gte *uint32 `protobuf:"fixed32,5,opt,name=gte" json:"gte,omitempty"` + In []uint32 `protobuf:"fixed32,6,rep,name=in" json:"in,omitempty"` + NotIn []uint32 `protobuf:"fixed32,7,rep,name=not_in,json=notIn" json:"not_in,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Fixed32Rules) Reset() { *m = Fixed32Rules{} } +func (m *Fixed32Rules) String() string { return proto.CompactTextString(m) } +func (*Fixed32Rules) ProtoMessage() {} +func (*Fixed32Rules) Descriptor() ([]byte, []int) { + return fileDescriptor_validate_4e427f48c21fab34, []int{9} +} +func (m *Fixed32Rules) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Fixed32Rules.Unmarshal(m, b) +} +func (m *Fixed32Rules) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Fixed32Rules.Marshal(b, m, deterministic) +} +func (dst *Fixed32Rules) XXX_Merge(src proto.Message) { + xxx_messageInfo_Fixed32Rules.Merge(dst, src) +} +func (m *Fixed32Rules) XXX_Size() int { + return xxx_messageInfo_Fixed32Rules.Size(m) +} +func (m *Fixed32Rules) XXX_DiscardUnknown() { + xxx_messageInfo_Fixed32Rules.DiscardUnknown(m) +} + +var xxx_messageInfo_Fixed32Rules proto.InternalMessageInfo + +func (m *Fixed32Rules) GetConst() uint32 { + if m != nil && m.Const != nil { + return *m.Const + } + return 0 +} + +func (m *Fixed32Rules) GetLt() uint32 { + if m != nil && m.Lt != nil { + return *m.Lt + } + return 0 +} + +func (m *Fixed32Rules) GetLte() uint32 { + if m != nil && m.Lte != nil { + return *m.Lte + } + return 0 +} + +func (m *Fixed32Rules) GetGt() uint32 { + if m != nil && m.Gt != nil { + return *m.Gt + } + return 0 +} + +func (m *Fixed32Rules) GetGte() uint32 { + if m != nil && m.Gte != nil { + return *m.Gte + } + return 0 +} + +func (m *Fixed32Rules) GetIn() []uint32 { + if m != nil { + return m.In + } + return nil +} + +func (m *Fixed32Rules) GetNotIn() []uint32 { + if m != nil { + return m.NotIn + } + return nil +} + +type Fixed64Rules struct { + Const *uint64 `protobuf:"fixed64,1,opt,name=const" json:"const,omitempty"` + Lt *uint64 `protobuf:"fixed64,2,opt,name=lt" json:"lt,omitempty"` + Lte *uint64 `protobuf:"fixed64,3,opt,name=lte" json:"lte,omitempty"` + Gt *uint64 `protobuf:"fixed64,4,opt,name=gt" json:"gt,omitempty"` + Gte *uint64 `protobuf:"fixed64,5,opt,name=gte" json:"gte,omitempty"` + In []uint64 `protobuf:"fixed64,6,rep,name=in" json:"in,omitempty"` + NotIn []uint64 `protobuf:"fixed64,7,rep,name=not_in,json=notIn" json:"not_in,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Fixed64Rules) Reset() { *m = Fixed64Rules{} } +func (m *Fixed64Rules) String() string { return proto.CompactTextString(m) } +func (*Fixed64Rules) ProtoMessage() {} +func (*Fixed64Rules) Descriptor() ([]byte, []int) { + return fileDescriptor_validate_4e427f48c21fab34, []int{10} +} +func (m *Fixed64Rules) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Fixed64Rules.Unmarshal(m, b) +} +func (m *Fixed64Rules) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Fixed64Rules.Marshal(b, m, deterministic) +} +func (dst *Fixed64Rules) XXX_Merge(src proto.Message) { + xxx_messageInfo_Fixed64Rules.Merge(dst, src) +} +func (m *Fixed64Rules) XXX_Size() int { + return xxx_messageInfo_Fixed64Rules.Size(m) +} +func (m *Fixed64Rules) XXX_DiscardUnknown() { + xxx_messageInfo_Fixed64Rules.DiscardUnknown(m) +} + +var xxx_messageInfo_Fixed64Rules proto.InternalMessageInfo + +func (m *Fixed64Rules) GetConst() uint64 { + if m != nil && m.Const != nil { + return *m.Const + } + return 0 +} + +func (m *Fixed64Rules) GetLt() uint64 { + if m != nil && m.Lt != nil { + return *m.Lt + } + return 0 +} + +func (m *Fixed64Rules) GetLte() uint64 { + if m != nil && m.Lte != nil { + return *m.Lte + } + return 0 +} + +func (m *Fixed64Rules) GetGt() uint64 { + if m != nil && m.Gt != nil { + return *m.Gt + } + return 0 +} + +func (m *Fixed64Rules) GetGte() uint64 { + if m != nil && m.Gte != nil { + return *m.Gte + } + return 0 +} + +func (m *Fixed64Rules) GetIn() []uint64 { + if m != nil { + return m.In + } + return nil +} + +func (m *Fixed64Rules) GetNotIn() []uint64 { + if m != nil { + return m.NotIn + } + return nil +} + +type SFixed32Rules struct { + Const *int32 `protobuf:"fixed32,1,opt,name=const" json:"const,omitempty"` + Lt *int32 `protobuf:"fixed32,2,opt,name=lt" json:"lt,omitempty"` + Lte *int32 `protobuf:"fixed32,3,opt,name=lte" json:"lte,omitempty"` + Gt *int32 `protobuf:"fixed32,4,opt,name=gt" json:"gt,omitempty"` + Gte *int32 `protobuf:"fixed32,5,opt,name=gte" json:"gte,omitempty"` + In []int32 `protobuf:"fixed32,6,rep,name=in" json:"in,omitempty"` + NotIn []int32 `protobuf:"fixed32,7,rep,name=not_in,json=notIn" json:"not_in,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *SFixed32Rules) Reset() { *m = SFixed32Rules{} } +func (m *SFixed32Rules) String() string { return proto.CompactTextString(m) } +func (*SFixed32Rules) ProtoMessage() {} +func (*SFixed32Rules) Descriptor() ([]byte, []int) { + return fileDescriptor_validate_4e427f48c21fab34, []int{11} +} +func (m *SFixed32Rules) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_SFixed32Rules.Unmarshal(m, b) +} +func (m *SFixed32Rules) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_SFixed32Rules.Marshal(b, m, deterministic) +} +func (dst *SFixed32Rules) XXX_Merge(src proto.Message) { + xxx_messageInfo_SFixed32Rules.Merge(dst, src) +} +func (m *SFixed32Rules) XXX_Size() int { + return xxx_messageInfo_SFixed32Rules.Size(m) +} +func (m *SFixed32Rules) XXX_DiscardUnknown() { + xxx_messageInfo_SFixed32Rules.DiscardUnknown(m) +} + +var xxx_messageInfo_SFixed32Rules proto.InternalMessageInfo + +func (m *SFixed32Rules) GetConst() int32 { + if m != nil && m.Const != nil { + return *m.Const + } + return 0 +} + +func (m *SFixed32Rules) GetLt() int32 { + if m != nil && m.Lt != nil { + return *m.Lt + } + return 0 +} + +func (m *SFixed32Rules) GetLte() int32 { + if m != nil && m.Lte != nil { + return *m.Lte + } + return 0 +} + +func (m *SFixed32Rules) GetGt() int32 { + if m != nil && m.Gt != nil { + return *m.Gt + } + return 0 +} + +func (m *SFixed32Rules) GetGte() int32 { + if m != nil && m.Gte != nil { + return *m.Gte + } + return 0 +} + +func (m *SFixed32Rules) GetIn() []int32 { + if m != nil { + return m.In + } + return nil +} + +func (m *SFixed32Rules) GetNotIn() []int32 { + if m != nil { + return m.NotIn + } + return nil +} + +type SFixed64Rules struct { + Const *int64 `protobuf:"fixed64,1,opt,name=const" json:"const,omitempty"` + Lt *int64 `protobuf:"fixed64,2,opt,name=lt" json:"lt,omitempty"` + Lte *int64 `protobuf:"fixed64,3,opt,name=lte" json:"lte,omitempty"` + Gt *int64 `protobuf:"fixed64,4,opt,name=gt" json:"gt,omitempty"` + Gte *int64 `protobuf:"fixed64,5,opt,name=gte" json:"gte,omitempty"` + In []int64 `protobuf:"fixed64,6,rep,name=in" json:"in,omitempty"` + NotIn []int64 `protobuf:"fixed64,7,rep,name=not_in,json=notIn" json:"not_in,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *SFixed64Rules) Reset() { *m = SFixed64Rules{} } +func (m *SFixed64Rules) String() string { return proto.CompactTextString(m) } +func (*SFixed64Rules) ProtoMessage() {} +func (*SFixed64Rules) Descriptor() ([]byte, []int) { + return fileDescriptor_validate_4e427f48c21fab34, []int{12} +} +func (m *SFixed64Rules) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_SFixed64Rules.Unmarshal(m, b) +} +func (m *SFixed64Rules) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_SFixed64Rules.Marshal(b, m, deterministic) +} +func (dst *SFixed64Rules) XXX_Merge(src proto.Message) { + xxx_messageInfo_SFixed64Rules.Merge(dst, src) +} +func (m *SFixed64Rules) XXX_Size() int { + return xxx_messageInfo_SFixed64Rules.Size(m) +} +func (m *SFixed64Rules) XXX_DiscardUnknown() { + xxx_messageInfo_SFixed64Rules.DiscardUnknown(m) +} + +var xxx_messageInfo_SFixed64Rules proto.InternalMessageInfo + +func (m *SFixed64Rules) GetConst() int64 { + if m != nil && m.Const != nil { + return *m.Const + } + return 0 +} + +func (m *SFixed64Rules) GetLt() int64 { + if m != nil && m.Lt != nil { + return *m.Lt + } + return 0 +} + +func (m *SFixed64Rules) GetLte() int64 { + if m != nil && m.Lte != nil { + return *m.Lte + } + return 0 +} + +func (m *SFixed64Rules) GetGt() int64 { + if m != nil && m.Gt != nil { + return *m.Gt + } + return 0 +} + +func (m *SFixed64Rules) GetGte() int64 { + if m != nil && m.Gte != nil { + return *m.Gte + } + return 0 +} + +func (m *SFixed64Rules) GetIn() []int64 { + if m != nil { + return m.In + } + return nil +} + +func (m *SFixed64Rules) GetNotIn() []int64 { + if m != nil { + return m.NotIn + } + return nil +} + +type BoolRules struct { + Const *bool `protobuf:"varint,1,opt,name=const" json:"const,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *BoolRules) Reset() { *m = BoolRules{} } +func (m *BoolRules) String() string { return proto.CompactTextString(m) } +func (*BoolRules) ProtoMessage() {} +func (*BoolRules) Descriptor() ([]byte, []int) { + return fileDescriptor_validate_4e427f48c21fab34, []int{13} +} +func (m *BoolRules) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_BoolRules.Unmarshal(m, b) +} +func (m *BoolRules) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_BoolRules.Marshal(b, m, deterministic) +} +func (dst *BoolRules) XXX_Merge(src proto.Message) { + xxx_messageInfo_BoolRules.Merge(dst, src) +} +func (m *BoolRules) XXX_Size() int { + return xxx_messageInfo_BoolRules.Size(m) +} +func (m *BoolRules) XXX_DiscardUnknown() { + xxx_messageInfo_BoolRules.DiscardUnknown(m) +} + +var xxx_messageInfo_BoolRules proto.InternalMessageInfo + +func (m *BoolRules) GetConst() bool { + if m != nil && m.Const != nil { + return *m.Const + } + return false +} + +type StringRules struct { + Const *string `protobuf:"bytes,1,opt,name=const" json:"const,omitempty"` + Len *uint64 `protobuf:"varint,19,opt,name=len" json:"len,omitempty"` + MinLen *uint64 `protobuf:"varint,2,opt,name=min_len,json=minLen" json:"min_len,omitempty"` + MaxLen *uint64 `protobuf:"varint,3,opt,name=max_len,json=maxLen" json:"max_len,omitempty"` + LenBytes *uint64 `protobuf:"varint,20,opt,name=len_bytes,json=lenBytes" json:"len_bytes,omitempty"` + MinBytes *uint64 `protobuf:"varint,4,opt,name=min_bytes,json=minBytes" json:"min_bytes,omitempty"` + MaxBytes *uint64 `protobuf:"varint,5,opt,name=max_bytes,json=maxBytes" json:"max_bytes,omitempty"` + Pattern *string `protobuf:"bytes,6,opt,name=pattern" json:"pattern,omitempty"` + Prefix *string `protobuf:"bytes,7,opt,name=prefix" json:"prefix,omitempty"` + Suffix *string `protobuf:"bytes,8,opt,name=suffix" json:"suffix,omitempty"` + Contains *string `protobuf:"bytes,9,opt,name=contains" json:"contains,omitempty"` + In []string `protobuf:"bytes,10,rep,name=in" json:"in,omitempty"` + NotIn []string `protobuf:"bytes,11,rep,name=not_in,json=notIn" json:"not_in,omitempty"` + // Types that are valid to be assigned to WellKnown: + // *StringRules_Email + // *StringRules_Hostname + // *StringRules_Ip + // *StringRules_Ipv4 + // *StringRules_Ipv6 + // *StringRules_Uri + // *StringRules_UriRef + WellKnown isStringRules_WellKnown `protobuf_oneof:"well_known"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *StringRules) Reset() { *m = StringRules{} } +func (m *StringRules) String() string { return proto.CompactTextString(m) } +func (*StringRules) ProtoMessage() {} +func (*StringRules) Descriptor() ([]byte, []int) { + return fileDescriptor_validate_4e427f48c21fab34, []int{14} +} +func (m *StringRules) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_StringRules.Unmarshal(m, b) +} +func (m *StringRules) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_StringRules.Marshal(b, m, deterministic) +} +func (dst *StringRules) XXX_Merge(src proto.Message) { + xxx_messageInfo_StringRules.Merge(dst, src) +} +func (m *StringRules) XXX_Size() int { + return xxx_messageInfo_StringRules.Size(m) +} +func (m *StringRules) XXX_DiscardUnknown() { + xxx_messageInfo_StringRules.DiscardUnknown(m) +} + +var xxx_messageInfo_StringRules proto.InternalMessageInfo + +func (m *StringRules) GetConst() string { + if m != nil && m.Const != nil { + return *m.Const + } + return "" +} + +func (m *StringRules) GetLen() uint64 { + if m != nil && m.Len != nil { + return *m.Len + } + return 0 +} + +func (m *StringRules) GetMinLen() uint64 { + if m != nil && m.MinLen != nil { + return *m.MinLen + } + return 0 +} + +func (m *StringRules) GetMaxLen() uint64 { + if m != nil && m.MaxLen != nil { + return *m.MaxLen + } + return 0 +} + +func (m *StringRules) GetLenBytes() uint64 { + if m != nil && m.LenBytes != nil { + return *m.LenBytes + } + return 0 +} + +func (m *StringRules) GetMinBytes() uint64 { + if m != nil && m.MinBytes != nil { + return *m.MinBytes + } + return 0 +} + +func (m *StringRules) GetMaxBytes() uint64 { + if m != nil && m.MaxBytes != nil { + return *m.MaxBytes + } + return 0 +} + +func (m *StringRules) GetPattern() string { + if m != nil && m.Pattern != nil { + return *m.Pattern + } + return "" +} + +func (m *StringRules) GetPrefix() string { + if m != nil && m.Prefix != nil { + return *m.Prefix + } + return "" +} + +func (m *StringRules) GetSuffix() string { + if m != nil && m.Suffix != nil { + return *m.Suffix + } + return "" +} + +func (m *StringRules) GetContains() string { + if m != nil && m.Contains != nil { + return *m.Contains + } + return "" +} + +func (m *StringRules) GetIn() []string { + if m != nil { + return m.In + } + return nil +} + +func (m *StringRules) GetNotIn() []string { + if m != nil { + return m.NotIn + } + return nil +} + +type isStringRules_WellKnown interface { + isStringRules_WellKnown() +} + +type StringRules_Email struct { + Email bool `protobuf:"varint,12,opt,name=email,oneof"` +} + +type StringRules_Hostname struct { + Hostname bool `protobuf:"varint,13,opt,name=hostname,oneof"` +} + +type StringRules_Ip struct { + Ip bool `protobuf:"varint,14,opt,name=ip,oneof"` +} + +type StringRules_Ipv4 struct { + Ipv4 bool `protobuf:"varint,15,opt,name=ipv4,oneof"` +} + +type StringRules_Ipv6 struct { + Ipv6 bool `protobuf:"varint,16,opt,name=ipv6,oneof"` +} + +type StringRules_Uri struct { + Uri bool `protobuf:"varint,17,opt,name=uri,oneof"` +} + +type StringRules_UriRef struct { + UriRef bool `protobuf:"varint,18,opt,name=uri_ref,json=uriRef,oneof"` +} + +func (*StringRules_Email) isStringRules_WellKnown() {} + +func (*StringRules_Hostname) isStringRules_WellKnown() {} + +func (*StringRules_Ip) isStringRules_WellKnown() {} + +func (*StringRules_Ipv4) isStringRules_WellKnown() {} + +func (*StringRules_Ipv6) isStringRules_WellKnown() {} + +func (*StringRules_Uri) isStringRules_WellKnown() {} + +func (*StringRules_UriRef) isStringRules_WellKnown() {} + +func (m *StringRules) GetWellKnown() isStringRules_WellKnown { + if m != nil { + return m.WellKnown + } + return nil +} + +func (m *StringRules) GetEmail() bool { + if x, ok := m.GetWellKnown().(*StringRules_Email); ok { + return x.Email + } + return false +} + +func (m *StringRules) GetHostname() bool { + if x, ok := m.GetWellKnown().(*StringRules_Hostname); ok { + return x.Hostname + } + return false +} + +func (m *StringRules) GetIp() bool { + if x, ok := m.GetWellKnown().(*StringRules_Ip); ok { + return x.Ip + } + return false +} + +func (m *StringRules) GetIpv4() bool { + if x, ok := m.GetWellKnown().(*StringRules_Ipv4); ok { + return x.Ipv4 + } + return false +} + +func (m *StringRules) GetIpv6() bool { + if x, ok := m.GetWellKnown().(*StringRules_Ipv6); ok { + return x.Ipv6 + } + return false +} + +func (m *StringRules) GetUri() bool { + if x, ok := m.GetWellKnown().(*StringRules_Uri); ok { + return x.Uri + } + return false +} + +func (m *StringRules) GetUriRef() bool { + if x, ok := m.GetWellKnown().(*StringRules_UriRef); ok { + return x.UriRef + } + return false +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*StringRules) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _StringRules_OneofMarshaler, _StringRules_OneofUnmarshaler, _StringRules_OneofSizer, []interface{}{ + (*StringRules_Email)(nil), + (*StringRules_Hostname)(nil), + (*StringRules_Ip)(nil), + (*StringRules_Ipv4)(nil), + (*StringRules_Ipv6)(nil), + (*StringRules_Uri)(nil), + (*StringRules_UriRef)(nil), + } +} + +func _StringRules_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*StringRules) + // well_known + switch x := m.WellKnown.(type) { + case *StringRules_Email: + t := uint64(0) + if x.Email { + t = 1 + } + b.EncodeVarint(12<<3 | proto.WireVarint) + b.EncodeVarint(t) + case *StringRules_Hostname: + t := uint64(0) + if x.Hostname { + t = 1 + } + b.EncodeVarint(13<<3 | proto.WireVarint) + b.EncodeVarint(t) + case *StringRules_Ip: + t := uint64(0) + if x.Ip { + t = 1 + } + b.EncodeVarint(14<<3 | proto.WireVarint) + b.EncodeVarint(t) + case *StringRules_Ipv4: + t := uint64(0) + if x.Ipv4 { + t = 1 + } + b.EncodeVarint(15<<3 | proto.WireVarint) + b.EncodeVarint(t) + case *StringRules_Ipv6: + t := uint64(0) + if x.Ipv6 { + t = 1 + } + b.EncodeVarint(16<<3 | proto.WireVarint) + b.EncodeVarint(t) + case *StringRules_Uri: + t := uint64(0) + if x.Uri { + t = 1 + } + b.EncodeVarint(17<<3 | proto.WireVarint) + b.EncodeVarint(t) + case *StringRules_UriRef: + t := uint64(0) + if x.UriRef { + t = 1 + } + b.EncodeVarint(18<<3 | proto.WireVarint) + b.EncodeVarint(t) + case nil: + default: + return fmt.Errorf("StringRules.WellKnown has unexpected type %T", x) + } + return nil +} + +func _StringRules_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*StringRules) + switch tag { + case 12: // well_known.email + if wire != proto.WireVarint { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeVarint() + m.WellKnown = &StringRules_Email{x != 0} + return true, err + case 13: // well_known.hostname + if wire != proto.WireVarint { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeVarint() + m.WellKnown = &StringRules_Hostname{x != 0} + return true, err + case 14: // well_known.ip + if wire != proto.WireVarint { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeVarint() + m.WellKnown = &StringRules_Ip{x != 0} + return true, err + case 15: // well_known.ipv4 + if wire != proto.WireVarint { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeVarint() + m.WellKnown = &StringRules_Ipv4{x != 0} + return true, err + case 16: // well_known.ipv6 + if wire != proto.WireVarint { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeVarint() + m.WellKnown = &StringRules_Ipv6{x != 0} + return true, err + case 17: // well_known.uri + if wire != proto.WireVarint { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeVarint() + m.WellKnown = &StringRules_Uri{x != 0} + return true, err + case 18: // well_known.uri_ref + if wire != proto.WireVarint { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeVarint() + m.WellKnown = &StringRules_UriRef{x != 0} + return true, err + default: + return false, nil + } +} + +func _StringRules_OneofSizer(msg proto.Message) (n int) { + m := msg.(*StringRules) + // well_known + switch x := m.WellKnown.(type) { + case *StringRules_Email: + n += 1 // tag and wire + n += 1 + case *StringRules_Hostname: + n += 1 // tag and wire + n += 1 + case *StringRules_Ip: + n += 1 // tag and wire + n += 1 + case *StringRules_Ipv4: + n += 1 // tag and wire + n += 1 + case *StringRules_Ipv6: + n += 2 // tag and wire + n += 1 + case *StringRules_Uri: + n += 2 // tag and wire + n += 1 + case *StringRules_UriRef: + n += 2 // tag and wire + n += 1 + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +type BytesRules struct { + Const []byte `protobuf:"bytes,1,opt,name=const" json:"const,omitempty"` + Len *uint64 `protobuf:"varint,13,opt,name=len" json:"len,omitempty"` + MinLen *uint64 `protobuf:"varint,2,opt,name=min_len,json=minLen" json:"min_len,omitempty"` + MaxLen *uint64 `protobuf:"varint,3,opt,name=max_len,json=maxLen" json:"max_len,omitempty"` + Pattern *string `protobuf:"bytes,4,opt,name=pattern" json:"pattern,omitempty"` + Prefix []byte `protobuf:"bytes,5,opt,name=prefix" json:"prefix,omitempty"` + Suffix []byte `protobuf:"bytes,6,opt,name=suffix" json:"suffix,omitempty"` + Contains []byte `protobuf:"bytes,7,opt,name=contains" json:"contains,omitempty"` + In [][]byte `protobuf:"bytes,8,rep,name=in" json:"in,omitempty"` + NotIn [][]byte `protobuf:"bytes,9,rep,name=not_in,json=notIn" json:"not_in,omitempty"` + // Types that are valid to be assigned to WellKnown: + // *BytesRules_Ip + // *BytesRules_Ipv4 + // *BytesRules_Ipv6 + WellKnown isBytesRules_WellKnown `protobuf_oneof:"well_known"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *BytesRules) Reset() { *m = BytesRules{} } +func (m *BytesRules) String() string { return proto.CompactTextString(m) } +func (*BytesRules) ProtoMessage() {} +func (*BytesRules) Descriptor() ([]byte, []int) { + return fileDescriptor_validate_4e427f48c21fab34, []int{15} +} +func (m *BytesRules) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_BytesRules.Unmarshal(m, b) +} +func (m *BytesRules) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_BytesRules.Marshal(b, m, deterministic) +} +func (dst *BytesRules) XXX_Merge(src proto.Message) { + xxx_messageInfo_BytesRules.Merge(dst, src) +} +func (m *BytesRules) XXX_Size() int { + return xxx_messageInfo_BytesRules.Size(m) +} +func (m *BytesRules) XXX_DiscardUnknown() { + xxx_messageInfo_BytesRules.DiscardUnknown(m) +} + +var xxx_messageInfo_BytesRules proto.InternalMessageInfo + +func (m *BytesRules) GetConst() []byte { + if m != nil { + return m.Const + } + return nil +} + +func (m *BytesRules) GetLen() uint64 { + if m != nil && m.Len != nil { + return *m.Len + } + return 0 +} + +func (m *BytesRules) GetMinLen() uint64 { + if m != nil && m.MinLen != nil { + return *m.MinLen + } + return 0 +} + +func (m *BytesRules) GetMaxLen() uint64 { + if m != nil && m.MaxLen != nil { + return *m.MaxLen + } + return 0 +} + +func (m *BytesRules) GetPattern() string { + if m != nil && m.Pattern != nil { + return *m.Pattern + } + return "" +} + +func (m *BytesRules) GetPrefix() []byte { + if m != nil { + return m.Prefix + } + return nil +} + +func (m *BytesRules) GetSuffix() []byte { + if m != nil { + return m.Suffix + } + return nil +} + +func (m *BytesRules) GetContains() []byte { + if m != nil { + return m.Contains + } + return nil +} + +func (m *BytesRules) GetIn() [][]byte { + if m != nil { + return m.In + } + return nil +} + +func (m *BytesRules) GetNotIn() [][]byte { + if m != nil { + return m.NotIn + } + return nil +} + +type isBytesRules_WellKnown interface { + isBytesRules_WellKnown() +} + +type BytesRules_Ip struct { + Ip bool `protobuf:"varint,10,opt,name=ip,oneof"` +} + +type BytesRules_Ipv4 struct { + Ipv4 bool `protobuf:"varint,11,opt,name=ipv4,oneof"` +} + +type BytesRules_Ipv6 struct { + Ipv6 bool `protobuf:"varint,12,opt,name=ipv6,oneof"` +} + +func (*BytesRules_Ip) isBytesRules_WellKnown() {} + +func (*BytesRules_Ipv4) isBytesRules_WellKnown() {} + +func (*BytesRules_Ipv6) isBytesRules_WellKnown() {} + +func (m *BytesRules) GetWellKnown() isBytesRules_WellKnown { + if m != nil { + return m.WellKnown + } + return nil +} + +func (m *BytesRules) GetIp() bool { + if x, ok := m.GetWellKnown().(*BytesRules_Ip); ok { + return x.Ip + } + return false +} + +func (m *BytesRules) GetIpv4() bool { + if x, ok := m.GetWellKnown().(*BytesRules_Ipv4); ok { + return x.Ipv4 + } + return false +} + +func (m *BytesRules) GetIpv6() bool { + if x, ok := m.GetWellKnown().(*BytesRules_Ipv6); ok { + return x.Ipv6 + } + return false +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*BytesRules) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _BytesRules_OneofMarshaler, _BytesRules_OneofUnmarshaler, _BytesRules_OneofSizer, []interface{}{ + (*BytesRules_Ip)(nil), + (*BytesRules_Ipv4)(nil), + (*BytesRules_Ipv6)(nil), + } +} + +func _BytesRules_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*BytesRules) + // well_known + switch x := m.WellKnown.(type) { + case *BytesRules_Ip: + t := uint64(0) + if x.Ip { + t = 1 + } + b.EncodeVarint(10<<3 | proto.WireVarint) + b.EncodeVarint(t) + case *BytesRules_Ipv4: + t := uint64(0) + if x.Ipv4 { + t = 1 + } + b.EncodeVarint(11<<3 | proto.WireVarint) + b.EncodeVarint(t) + case *BytesRules_Ipv6: + t := uint64(0) + if x.Ipv6 { + t = 1 + } + b.EncodeVarint(12<<3 | proto.WireVarint) + b.EncodeVarint(t) + case nil: + default: + return fmt.Errorf("BytesRules.WellKnown has unexpected type %T", x) + } + return nil +} + +func _BytesRules_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*BytesRules) + switch tag { + case 10: // well_known.ip + if wire != proto.WireVarint { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeVarint() + m.WellKnown = &BytesRules_Ip{x != 0} + return true, err + case 11: // well_known.ipv4 + if wire != proto.WireVarint { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeVarint() + m.WellKnown = &BytesRules_Ipv4{x != 0} + return true, err + case 12: // well_known.ipv6 + if wire != proto.WireVarint { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeVarint() + m.WellKnown = &BytesRules_Ipv6{x != 0} + return true, err + default: + return false, nil + } +} + +func _BytesRules_OneofSizer(msg proto.Message) (n int) { + m := msg.(*BytesRules) + // well_known + switch x := m.WellKnown.(type) { + case *BytesRules_Ip: + n += 1 // tag and wire + n += 1 + case *BytesRules_Ipv4: + n += 1 // tag and wire + n += 1 + case *BytesRules_Ipv6: + n += 1 // tag and wire + n += 1 + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +type EnumRules struct { + Const *int32 `protobuf:"varint,1,opt,name=const" json:"const,omitempty"` + DefinedOnly *bool `protobuf:"varint,2,opt,name=defined_only,json=definedOnly" json:"defined_only,omitempty"` + In []int32 `protobuf:"varint,3,rep,name=in" json:"in,omitempty"` + NotIn []int32 `protobuf:"varint,4,rep,name=not_in,json=notIn" json:"not_in,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *EnumRules) Reset() { *m = EnumRules{} } +func (m *EnumRules) String() string { return proto.CompactTextString(m) } +func (*EnumRules) ProtoMessage() {} +func (*EnumRules) Descriptor() ([]byte, []int) { + return fileDescriptor_validate_4e427f48c21fab34, []int{16} +} +func (m *EnumRules) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_EnumRules.Unmarshal(m, b) +} +func (m *EnumRules) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_EnumRules.Marshal(b, m, deterministic) +} +func (dst *EnumRules) XXX_Merge(src proto.Message) { + xxx_messageInfo_EnumRules.Merge(dst, src) +} +func (m *EnumRules) XXX_Size() int { + return xxx_messageInfo_EnumRules.Size(m) +} +func (m *EnumRules) XXX_DiscardUnknown() { + xxx_messageInfo_EnumRules.DiscardUnknown(m) +} + +var xxx_messageInfo_EnumRules proto.InternalMessageInfo + +func (m *EnumRules) GetConst() int32 { + if m != nil && m.Const != nil { + return *m.Const + } + return 0 +} + +func (m *EnumRules) GetDefinedOnly() bool { + if m != nil && m.DefinedOnly != nil { + return *m.DefinedOnly + } + return false +} + +func (m *EnumRules) GetIn() []int32 { + if m != nil { + return m.In + } + return nil +} + +func (m *EnumRules) GetNotIn() []int32 { + if m != nil { + return m.NotIn + } + return nil +} + +type MessageRules struct { + Skip *bool `protobuf:"varint,1,opt,name=skip" json:"skip,omitempty"` + Required *bool `protobuf:"varint,2,opt,name=required" json:"required,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *MessageRules) Reset() { *m = MessageRules{} } +func (m *MessageRules) String() string { return proto.CompactTextString(m) } +func (*MessageRules) ProtoMessage() {} +func (*MessageRules) Descriptor() ([]byte, []int) { + return fileDescriptor_validate_4e427f48c21fab34, []int{17} +} +func (m *MessageRules) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_MessageRules.Unmarshal(m, b) +} +func (m *MessageRules) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_MessageRules.Marshal(b, m, deterministic) +} +func (dst *MessageRules) XXX_Merge(src proto.Message) { + xxx_messageInfo_MessageRules.Merge(dst, src) +} +func (m *MessageRules) XXX_Size() int { + return xxx_messageInfo_MessageRules.Size(m) +} +func (m *MessageRules) XXX_DiscardUnknown() { + xxx_messageInfo_MessageRules.DiscardUnknown(m) +} + +var xxx_messageInfo_MessageRules proto.InternalMessageInfo + +func (m *MessageRules) GetSkip() bool { + if m != nil && m.Skip != nil { + return *m.Skip + } + return false +} + +func (m *MessageRules) GetRequired() bool { + if m != nil && m.Required != nil { + return *m.Required + } + return false +} + +type RepeatedRules struct { + MinItems *uint64 `protobuf:"varint,1,opt,name=min_items,json=minItems" json:"min_items,omitempty"` + MaxItems *uint64 `protobuf:"varint,2,opt,name=max_items,json=maxItems" json:"max_items,omitempty"` + Unique *bool `protobuf:"varint,3,opt,name=unique" json:"unique,omitempty"` + Items *FieldRules `protobuf:"bytes,4,opt,name=items" json:"items,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *RepeatedRules) Reset() { *m = RepeatedRules{} } +func (m *RepeatedRules) String() string { return proto.CompactTextString(m) } +func (*RepeatedRules) ProtoMessage() {} +func (*RepeatedRules) Descriptor() ([]byte, []int) { + return fileDescriptor_validate_4e427f48c21fab34, []int{18} +} +func (m *RepeatedRules) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_RepeatedRules.Unmarshal(m, b) +} +func (m *RepeatedRules) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_RepeatedRules.Marshal(b, m, deterministic) +} +func (dst *RepeatedRules) XXX_Merge(src proto.Message) { + xxx_messageInfo_RepeatedRules.Merge(dst, src) +} +func (m *RepeatedRules) XXX_Size() int { + return xxx_messageInfo_RepeatedRules.Size(m) +} +func (m *RepeatedRules) XXX_DiscardUnknown() { + xxx_messageInfo_RepeatedRules.DiscardUnknown(m) +} + +var xxx_messageInfo_RepeatedRules proto.InternalMessageInfo + +func (m *RepeatedRules) GetMinItems() uint64 { + if m != nil && m.MinItems != nil { + return *m.MinItems + } + return 0 +} + +func (m *RepeatedRules) GetMaxItems() uint64 { + if m != nil && m.MaxItems != nil { + return *m.MaxItems + } + return 0 +} + +func (m *RepeatedRules) GetUnique() bool { + if m != nil && m.Unique != nil { + return *m.Unique + } + return false +} + +func (m *RepeatedRules) GetItems() *FieldRules { + if m != nil { + return m.Items + } + return nil +} + +type MapRules struct { + MinPairs *uint64 `protobuf:"varint,1,opt,name=min_pairs,json=minPairs" json:"min_pairs,omitempty"` + MaxPairs *uint64 `protobuf:"varint,2,opt,name=max_pairs,json=maxPairs" json:"max_pairs,omitempty"` + NoSparse *bool `protobuf:"varint,3,opt,name=no_sparse,json=noSparse" json:"no_sparse,omitempty"` + Keys *FieldRules `protobuf:"bytes,4,opt,name=keys" json:"keys,omitempty"` + Values *FieldRules `protobuf:"bytes,5,opt,name=values" json:"values,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *MapRules) Reset() { *m = MapRules{} } +func (m *MapRules) String() string { return proto.CompactTextString(m) } +func (*MapRules) ProtoMessage() {} +func (*MapRules) Descriptor() ([]byte, []int) { + return fileDescriptor_validate_4e427f48c21fab34, []int{19} +} +func (m *MapRules) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_MapRules.Unmarshal(m, b) +} +func (m *MapRules) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_MapRules.Marshal(b, m, deterministic) +} +func (dst *MapRules) XXX_Merge(src proto.Message) { + xxx_messageInfo_MapRules.Merge(dst, src) +} +func (m *MapRules) XXX_Size() int { + return xxx_messageInfo_MapRules.Size(m) +} +func (m *MapRules) XXX_DiscardUnknown() { + xxx_messageInfo_MapRules.DiscardUnknown(m) +} + +var xxx_messageInfo_MapRules proto.InternalMessageInfo + +func (m *MapRules) GetMinPairs() uint64 { + if m != nil && m.MinPairs != nil { + return *m.MinPairs + } + return 0 +} + +func (m *MapRules) GetMaxPairs() uint64 { + if m != nil && m.MaxPairs != nil { + return *m.MaxPairs + } + return 0 +} + +func (m *MapRules) GetNoSparse() bool { + if m != nil && m.NoSparse != nil { + return *m.NoSparse + } + return false +} + +func (m *MapRules) GetKeys() *FieldRules { + if m != nil { + return m.Keys + } + return nil +} + +func (m *MapRules) GetValues() *FieldRules { + if m != nil { + return m.Values + } + return nil +} + +type AnyRules struct { + Required *bool `protobuf:"varint,1,opt,name=required" json:"required,omitempty"` + In []string `protobuf:"bytes,2,rep,name=in" json:"in,omitempty"` + NotIn []string `protobuf:"bytes,3,rep,name=not_in,json=notIn" json:"not_in,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *AnyRules) Reset() { *m = AnyRules{} } +func (m *AnyRules) String() string { return proto.CompactTextString(m) } +func (*AnyRules) ProtoMessage() {} +func (*AnyRules) Descriptor() ([]byte, []int) { + return fileDescriptor_validate_4e427f48c21fab34, []int{20} +} +func (m *AnyRules) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_AnyRules.Unmarshal(m, b) +} +func (m *AnyRules) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_AnyRules.Marshal(b, m, deterministic) +} +func (dst *AnyRules) XXX_Merge(src proto.Message) { + xxx_messageInfo_AnyRules.Merge(dst, src) +} +func (m *AnyRules) XXX_Size() int { + return xxx_messageInfo_AnyRules.Size(m) +} +func (m *AnyRules) XXX_DiscardUnknown() { + xxx_messageInfo_AnyRules.DiscardUnknown(m) +} + +var xxx_messageInfo_AnyRules proto.InternalMessageInfo + +func (m *AnyRules) GetRequired() bool { + if m != nil && m.Required != nil { + return *m.Required + } + return false +} + +func (m *AnyRules) GetIn() []string { + if m != nil { + return m.In + } + return nil +} + +func (m *AnyRules) GetNotIn() []string { + if m != nil { + return m.NotIn + } + return nil +} + +type DurationRules struct { + Required *bool `protobuf:"varint,1,opt,name=required" json:"required,omitempty"` + Const *duration.Duration `protobuf:"bytes,2,opt,name=const" json:"const,omitempty"` + Lt *duration.Duration `protobuf:"bytes,3,opt,name=lt" json:"lt,omitempty"` + Lte *duration.Duration `protobuf:"bytes,4,opt,name=lte" json:"lte,omitempty"` + Gt *duration.Duration `protobuf:"bytes,5,opt,name=gt" json:"gt,omitempty"` + Gte *duration.Duration `protobuf:"bytes,6,opt,name=gte" json:"gte,omitempty"` + In []*duration.Duration `protobuf:"bytes,7,rep,name=in" json:"in,omitempty"` + NotIn []*duration.Duration `protobuf:"bytes,8,rep,name=not_in,json=notIn" json:"not_in,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *DurationRules) Reset() { *m = DurationRules{} } +func (m *DurationRules) String() string { return proto.CompactTextString(m) } +func (*DurationRules) ProtoMessage() {} +func (*DurationRules) Descriptor() ([]byte, []int) { + return fileDescriptor_validate_4e427f48c21fab34, []int{21} +} +func (m *DurationRules) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_DurationRules.Unmarshal(m, b) +} +func (m *DurationRules) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_DurationRules.Marshal(b, m, deterministic) +} +func (dst *DurationRules) XXX_Merge(src proto.Message) { + xxx_messageInfo_DurationRules.Merge(dst, src) +} +func (m *DurationRules) XXX_Size() int { + return xxx_messageInfo_DurationRules.Size(m) +} +func (m *DurationRules) XXX_DiscardUnknown() { + xxx_messageInfo_DurationRules.DiscardUnknown(m) +} + +var xxx_messageInfo_DurationRules proto.InternalMessageInfo + +func (m *DurationRules) GetRequired() bool { + if m != nil && m.Required != nil { + return *m.Required + } + return false +} + +func (m *DurationRules) GetConst() *duration.Duration { + if m != nil { + return m.Const + } + return nil +} + +func (m *DurationRules) GetLt() *duration.Duration { + if m != nil { + return m.Lt + } + return nil +} + +func (m *DurationRules) GetLte() *duration.Duration { + if m != nil { + return m.Lte + } + return nil +} + +func (m *DurationRules) GetGt() *duration.Duration { + if m != nil { + return m.Gt + } + return nil +} + +func (m *DurationRules) GetGte() *duration.Duration { + if m != nil { + return m.Gte + } + return nil +} + +func (m *DurationRules) GetIn() []*duration.Duration { + if m != nil { + return m.In + } + return nil +} + +func (m *DurationRules) GetNotIn() []*duration.Duration { + if m != nil { + return m.NotIn + } + return nil +} + +type TimestampRules struct { + Required *bool `protobuf:"varint,1,opt,name=required" json:"required,omitempty"` + Const *timestamp.Timestamp `protobuf:"bytes,2,opt,name=const" json:"const,omitempty"` + Lt *timestamp.Timestamp `protobuf:"bytes,3,opt,name=lt" json:"lt,omitempty"` + Lte *timestamp.Timestamp `protobuf:"bytes,4,opt,name=lte" json:"lte,omitempty"` + Gt *timestamp.Timestamp `protobuf:"bytes,5,opt,name=gt" json:"gt,omitempty"` + Gte *timestamp.Timestamp `protobuf:"bytes,6,opt,name=gte" json:"gte,omitempty"` + LtNow *bool `protobuf:"varint,7,opt,name=lt_now,json=ltNow" json:"lt_now,omitempty"` + GtNow *bool `protobuf:"varint,8,opt,name=gt_now,json=gtNow" json:"gt_now,omitempty"` + Within *duration.Duration `protobuf:"bytes,9,opt,name=within" json:"within,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *TimestampRules) Reset() { *m = TimestampRules{} } +func (m *TimestampRules) String() string { return proto.CompactTextString(m) } +func (*TimestampRules) ProtoMessage() {} +func (*TimestampRules) Descriptor() ([]byte, []int) { + return fileDescriptor_validate_4e427f48c21fab34, []int{22} +} +func (m *TimestampRules) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_TimestampRules.Unmarshal(m, b) +} +func (m *TimestampRules) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_TimestampRules.Marshal(b, m, deterministic) +} +func (dst *TimestampRules) XXX_Merge(src proto.Message) { + xxx_messageInfo_TimestampRules.Merge(dst, src) +} +func (m *TimestampRules) XXX_Size() int { + return xxx_messageInfo_TimestampRules.Size(m) +} +func (m *TimestampRules) XXX_DiscardUnknown() { + xxx_messageInfo_TimestampRules.DiscardUnknown(m) +} + +var xxx_messageInfo_TimestampRules proto.InternalMessageInfo + +func (m *TimestampRules) GetRequired() bool { + if m != nil && m.Required != nil { + return *m.Required + } + return false +} + +func (m *TimestampRules) GetConst() *timestamp.Timestamp { + if m != nil { + return m.Const + } + return nil +} + +func (m *TimestampRules) GetLt() *timestamp.Timestamp { + if m != nil { + return m.Lt + } + return nil +} + +func (m *TimestampRules) GetLte() *timestamp.Timestamp { + if m != nil { + return m.Lte + } + return nil +} + +func (m *TimestampRules) GetGt() *timestamp.Timestamp { + if m != nil { + return m.Gt + } + return nil +} + +func (m *TimestampRules) GetGte() *timestamp.Timestamp { + if m != nil { + return m.Gte + } + return nil +} + +func (m *TimestampRules) GetLtNow() bool { + if m != nil && m.LtNow != nil { + return *m.LtNow + } + return false +} + +func (m *TimestampRules) GetGtNow() bool { + if m != nil && m.GtNow != nil { + return *m.GtNow + } + return false +} + +func (m *TimestampRules) GetWithin() *duration.Duration { + if m != nil { + return m.Within + } + return nil +} + +var E_Disabled = &proto.ExtensionDesc{ + ExtendedType: (*descriptor.MessageOptions)(nil), + ExtensionType: (*bool)(nil), + Field: 919191, + Name: "validate.disabled", + Tag: "varint,919191,opt,name=disabled", + Filename: "validate/validate.proto", +} + +var E_Required = &proto.ExtensionDesc{ + ExtendedType: (*descriptor.OneofOptions)(nil), + ExtensionType: (*bool)(nil), + Field: 919191, + Name: "validate.required", + Tag: "varint,919191,opt,name=required", + Filename: "validate/validate.proto", +} + +var E_Rules = &proto.ExtensionDesc{ + ExtendedType: (*descriptor.FieldOptions)(nil), + ExtensionType: (*FieldRules)(nil), + Field: 919191, + Name: "validate.rules", + Tag: "bytes,919191,opt,name=rules", + Filename: "validate/validate.proto", +} + +func init() { + proto.RegisterType((*FieldRules)(nil), "validate.FieldRules") + proto.RegisterType((*FloatRules)(nil), "validate.FloatRules") + proto.RegisterType((*DoubleRules)(nil), "validate.DoubleRules") + proto.RegisterType((*Int32Rules)(nil), "validate.Int32Rules") + proto.RegisterType((*Int64Rules)(nil), "validate.Int64Rules") + proto.RegisterType((*UInt32Rules)(nil), "validate.UInt32Rules") + proto.RegisterType((*UInt64Rules)(nil), "validate.UInt64Rules") + proto.RegisterType((*SInt32Rules)(nil), "validate.SInt32Rules") + proto.RegisterType((*SInt64Rules)(nil), "validate.SInt64Rules") + proto.RegisterType((*Fixed32Rules)(nil), "validate.Fixed32Rules") + proto.RegisterType((*Fixed64Rules)(nil), "validate.Fixed64Rules") + proto.RegisterType((*SFixed32Rules)(nil), "validate.SFixed32Rules") + proto.RegisterType((*SFixed64Rules)(nil), "validate.SFixed64Rules") + proto.RegisterType((*BoolRules)(nil), "validate.BoolRules") + proto.RegisterType((*StringRules)(nil), "validate.StringRules") + proto.RegisterType((*BytesRules)(nil), "validate.BytesRules") + proto.RegisterType((*EnumRules)(nil), "validate.EnumRules") + proto.RegisterType((*MessageRules)(nil), "validate.MessageRules") + proto.RegisterType((*RepeatedRules)(nil), "validate.RepeatedRules") + proto.RegisterType((*MapRules)(nil), "validate.MapRules") + proto.RegisterType((*AnyRules)(nil), "validate.AnyRules") + proto.RegisterType((*DurationRules)(nil), "validate.DurationRules") + proto.RegisterType((*TimestampRules)(nil), "validate.TimestampRules") + proto.RegisterExtension(E_Disabled) + proto.RegisterExtension(E_Required) + proto.RegisterExtension(E_Rules) +} + +func init() { proto.RegisterFile("validate/validate.proto", fileDescriptor_validate_4e427f48c21fab34) } + +var fileDescriptor_validate_4e427f48c21fab34 = []byte{ + // 1634 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xa4, 0x98, 0xcb, 0x6e, 0xdb, 0xce, + 0x15, 0xc6, 0x2b, 0xde, 0x44, 0x8d, 0xa5, 0x48, 0x9a, 0xd8, 0x0e, 0xe3, 0x5e, 0xe2, 0x68, 0x51, + 0x38, 0x69, 0x60, 0xa5, 0x8e, 0x2b, 0x04, 0x09, 0x5a, 0xa0, 0x46, 0x1a, 0x34, 0x68, 0xd3, 0x14, + 0x74, 0xb2, 0xe9, 0x46, 0xa0, 0xad, 0x11, 0x33, 0x30, 0x35, 0x64, 0x48, 0xca, 0xb6, 0x1e, 0x22, + 0x6d, 0x77, 0x7d, 0x96, 0xae, 0xba, 0xef, 0x9b, 0x74, 0xdd, 0x6d, 0x17, 0xc5, 0xdc, 0x78, 0x39, + 0xa4, 0xe5, 0xc5, 0x7f, 0xa7, 0x39, 0xe7, 0x3b, 0x33, 0x3f, 0x7c, 0x23, 0xce, 0x1c, 0x12, 0x3d, + 0xba, 0x0e, 0x22, 0xba, 0x08, 0x72, 0x32, 0xd5, 0x3f, 0x8e, 0x93, 0x34, 0xce, 0x63, 0xec, 0xea, + 0xf1, 0xc1, 0x61, 0x18, 0xc7, 0x61, 0x44, 0xa6, 0x22, 0x7e, 0xb1, 0x5e, 0x4e, 0x17, 0x24, 0xbb, + 0x4c, 0x69, 0x92, 0xc7, 0xa9, 0xd4, 0x1e, 0xfc, 0xac, 0xa1, 0x58, 0xa7, 0x41, 0x4e, 0x63, 0xa6, + 0xf2, 0x4f, 0x60, 0x3e, 0xa7, 0x2b, 0x92, 0xe5, 0xc1, 0x2a, 0x91, 0x82, 0xc9, 0xbf, 0x5d, 0x84, + 0xde, 0x53, 0x12, 0x2d, 0xfc, 0x75, 0x44, 0x32, 0xfc, 0x02, 0xd9, 0xcb, 0x28, 0x0e, 0x72, 0xaf, + 0x73, 0xd8, 0x39, 0xda, 0x39, 0xd9, 0x3d, 0x2e, 0xd8, 0xde, 0xf3, 0xb0, 0x10, 0xfd, 0xfe, 0x47, + 0xbe, 0x14, 0xe1, 0x29, 0x72, 0x16, 0xf1, 0xfa, 0x22, 0x22, 0x9e, 0x21, 0xe4, 0x7b, 0xa5, 0xfc, + 0x9d, 0x88, 0x6b, 0xbd, 0x92, 0xf1, 0xe9, 0x29, 0xcb, 0x5f, 0x9d, 0x78, 0x26, 0x9c, 0xfe, 0x03, + 0x0f, 0x17, 0xd3, 0x0b, 0x91, 0x52, 0xcf, 0x4e, 0x3d, 0xab, 0x45, 0x3d, 0x3b, 0xad, 0xaa, 0x67, + 0xa7, 0x1c, 0x66, 0x2d, 0x27, 0xb7, 0x21, 0xcc, 0x97, 0xda, 0xec, 0x4a, 0xa6, 0x0b, 0x66, 0xa7, + 0x9e, 0xd3, 0x56, 0x50, 0x2e, 0xa0, 0x64, 0xbc, 0x20, 0x93, 0x2b, 0x74, 0x61, 0xc1, 0x79, 0x7d, + 0x85, 0xac, 0x58, 0x21, 0x93, 0x2b, 0xb8, 0x6d, 0x05, 0x95, 0x15, 0xa4, 0x0c, 0x9f, 0xa0, 0xee, + 0x92, 0xde, 0x92, 0xc5, 0xab, 0x13, 0xaf, 0x27, 0x2a, 0xf6, 0x2b, 0x1b, 0x20, 0x13, 0xba, 0x44, + 0x0b, 0x8b, 0x9a, 0xd9, 0xa9, 0x87, 0x5a, 0x6b, 0xca, 0x65, 0xb4, 0x10, 0xff, 0x0a, 0xb9, 0x99, + 0x5e, 0x68, 0x47, 0x14, 0x3d, 0xaa, 0xa0, 0x81, 0x95, 0x0a, 0x69, 0x59, 0x36, 0x3b, 0xf5, 0xfa, + 0xed, 0x65, 0xe5, 0x62, 0x85, 0x14, 0x3f, 0x43, 0xd6, 0x45, 0x1c, 0x47, 0xde, 0x40, 0x94, 0x3c, + 0x2c, 0x4b, 0xce, 0xe2, 0x38, 0xd2, 0x72, 0x21, 0x11, 0x8e, 0xe5, 0x29, 0x65, 0xa1, 0xf7, 0xa0, + 0xe1, 0x98, 0x88, 0x97, 0x8e, 0x89, 0x21, 0xff, 0x8f, 0x5c, 0x6c, 0x72, 0x92, 0x79, 0x43, 0xf8, + 0x1f, 0x39, 0xe3, 0xe1, 0xe2, 0x3f, 0x22, 0x44, 0x9c, 0x84, 0xb0, 0xf5, 0xca, 0x1b, 0x41, 0x92, + 0xdf, 0xb1, 0xf5, 0xaa, 0x20, 0xe1, 0x12, 0x6e, 0xeb, 0x8a, 0x64, 0x59, 0x10, 0x12, 0x6f, 0x0c, + 0x6d, 0xfd, 0x28, 0x13, 0x85, 0xad, 0x4a, 0xc8, 0xfd, 0x49, 0x49, 0x42, 0x82, 0x9c, 0x2c, 0x3c, + 0x0c, 0xfd, 0xf1, 0x55, 0xa6, 0xf0, 0x47, 0x4b, 0xf1, 0xcf, 0x91, 0xb9, 0x0a, 0x12, 0xef, 0xa1, + 0xa8, 0xc0, 0x95, 0x65, 0x82, 0x44, 0x8b, 0xb9, 0x80, 0xeb, 0x02, 0xb6, 0xf1, 0x76, 0xa1, 0xee, + 0xb7, 0x6c, 0x53, 0xe8, 0x02, 0xb6, 0xe1, 0x18, 0xfa, 0x18, 0xf0, 0xf6, 0x20, 0xc6, 0x3b, 0x95, + 0x29, 0x30, 0xb4, 0x14, 0xbf, 0x46, 0xbd, 0xe2, 0x74, 0xf0, 0xf6, 0x45, 0x9d, 0x57, 0xd6, 0x7d, + 0xd6, 0x29, 0x5d, 0x58, 0x8a, 0xcf, 0x1c, 0x64, 0xe5, 0x9b, 0x84, 0x4c, 0xbe, 0x77, 0x10, 0x2a, + 0xcf, 0x09, 0xbc, 0x8b, 0xec, 0xcb, 0x98, 0x65, 0xf2, 0x30, 0x31, 0x7c, 0x39, 0xc0, 0x0f, 0x90, + 0x11, 0xe5, 0xe2, 0xc0, 0x30, 0x7c, 0x23, 0xca, 0xf1, 0x08, 0x99, 0x51, 0x4e, 0xc4, 0x89, 0x60, + 0xf8, 0xfc, 0x27, 0x57, 0x84, 0xb9, 0x78, 0xe8, 0x0d, 0xdf, 0x08, 0x85, 0x22, 0xcc, 0x89, 0x78, + 0xac, 0x0d, 0x9f, 0xff, 0xe4, 0x0a, 0xca, 0x3c, 0xe7, 0xd0, 0xe4, 0x0a, 0xca, 0xf0, 0x1e, 0x72, + 0x58, 0x9c, 0xcf, 0x29, 0xf3, 0xba, 0x22, 0x66, 0xb3, 0x38, 0xff, 0xc0, 0x26, 0x7f, 0xed, 0xa0, + 0x9d, 0xca, 0x41, 0x54, 0x07, 0xea, 0x34, 0x81, 0x3a, 0x10, 0xa8, 0x03, 0x81, 0x3a, 0x10, 0xa8, + 0x03, 0x81, 0x3a, 0x2d, 0x40, 0x1d, 0x0d, 0xc4, 0x0d, 0x2a, 0x4f, 0x8a, 0x3a, 0x8f, 0xdd, 0xe4, + 0xb1, 0x21, 0x8f, 0x0d, 0x79, 0x6c, 0xc8, 0x63, 0x43, 0x1e, 0xbb, 0x85, 0xc7, 0x06, 0x3c, 0xea, + 0xa1, 0xad, 0xf3, 0x98, 0x4d, 0x1e, 0x13, 0xf2, 0x98, 0x90, 0xc7, 0x84, 0x3c, 0x26, 0xe4, 0x31, + 0x5b, 0x78, 0xcc, 0xea, 0x86, 0x7d, 0xb9, 0xcb, 0xa0, 0x41, 0x13, 0x68, 0x00, 0x81, 0x06, 0x10, + 0x68, 0x00, 0x81, 0x06, 0x10, 0x68, 0xd0, 0x02, 0x34, 0x80, 0x40, 0xad, 0x0e, 0x59, 0x4d, 0x20, + 0x0b, 0x02, 0x59, 0x10, 0xc8, 0x82, 0x40, 0x16, 0x04, 0xb2, 0x5a, 0x80, 0xac, 0x2a, 0xd0, 0xf9, + 0x5d, 0x0e, 0x8d, 0x9b, 0x40, 0x63, 0x08, 0x34, 0x86, 0x40, 0x63, 0x08, 0x34, 0x86, 0x40, 0xe3, + 0x16, 0xa0, 0x31, 0x04, 0x6a, 0x75, 0x08, 0x37, 0x81, 0x30, 0x04, 0xc2, 0x10, 0x08, 0x43, 0x20, + 0x0c, 0x81, 0x70, 0x0b, 0x10, 0xd6, 0x40, 0x7f, 0xeb, 0xa0, 0x7e, 0xf5, 0x06, 0xab, 0x13, 0x75, + 0x9b, 0x44, 0x5d, 0x48, 0xd4, 0x85, 0x44, 0x5d, 0x48, 0xd4, 0x85, 0x44, 0xdd, 0x16, 0xa2, 0x6e, + 0x83, 0xa8, 0xd5, 0x23, 0xa7, 0x49, 0xe4, 0x40, 0x22, 0x07, 0x12, 0x39, 0x90, 0xc8, 0x81, 0x44, + 0x4e, 0x0b, 0x91, 0xa3, 0x89, 0xfe, 0xde, 0x41, 0x83, 0xf3, 0xbb, 0x4d, 0x1a, 0x36, 0x91, 0x86, + 0x10, 0x69, 0x08, 0x91, 0x86, 0x10, 0x69, 0x08, 0x91, 0x86, 0x2d, 0x48, 0xc3, 0x26, 0x52, 0xab, + 0x4b, 0xa3, 0x26, 0xd2, 0x08, 0x22, 0x8d, 0x20, 0xd2, 0x08, 0x22, 0x8d, 0x20, 0xd2, 0xa8, 0x05, + 0x69, 0xa4, 0x91, 0x9e, 0xa2, 0x5e, 0xd1, 0xa1, 0xd4, 0x69, 0x5c, 0x45, 0x33, 0xf9, 0x9f, 0x89, + 0x76, 0x2a, 0x8d, 0x49, 0x5d, 0xd5, 0xd3, 0xcc, 0x9c, 0x91, 0x30, 0x71, 0xc1, 0xf3, 0xf3, 0x80, + 0x30, 0xfc, 0x08, 0x75, 0x57, 0x94, 0xcd, 0x79, 0x54, 0x1e, 0x1b, 0xce, 0x8a, 0xb2, 0x3f, 0xaa, + 0x44, 0x70, 0x2b, 0x12, 0xa6, 0x4a, 0x04, 0xb7, 0x3c, 0xf1, 0x63, 0xd4, 0x8b, 0x08, 0x9b, 0xcb, + 0x66, 0x67, 0x57, 0xa4, 0xdc, 0x88, 0x30, 0xd1, 0xe5, 0xf0, 0x24, 0x9f, 0x4e, 0x26, 0xe5, 0x29, + 0xe3, 0xae, 0x68, 0x25, 0x19, 0xdc, 0xaa, 0xa4, 0xad, 0x92, 0xc1, 0xad, 0x4c, 0x7a, 0xa8, 0x9b, + 0x04, 0x79, 0x4e, 0x52, 0x26, 0xba, 0xe0, 0x9e, 0xaf, 0x87, 0x78, 0x1f, 0x39, 0x49, 0x4a, 0x96, + 0xf4, 0x56, 0x74, 0xbb, 0x3d, 0x5f, 0x8d, 0x78, 0x3c, 0x5b, 0x2f, 0x79, 0xdc, 0x95, 0x71, 0x39, + 0xc2, 0x07, 0xc8, 0xbd, 0x8c, 0x59, 0x1e, 0x50, 0x96, 0x89, 0xe6, 0xb5, 0xe7, 0x17, 0x63, 0x65, + 0x38, 0x3a, 0x34, 0x8f, 0x7a, 0xc0, 0xf0, 0x1d, 0x11, 0x93, 0x86, 0xe3, 0x7d, 0x64, 0x93, 0x55, + 0x40, 0x23, 0xd1, 0x5c, 0xba, 0xbc, 0x6d, 0x13, 0x43, 0xfc, 0x13, 0xe4, 0x7e, 0x8d, 0xb3, 0x9c, + 0x05, 0x2b, 0x22, 0x9a, 0x48, 0x9e, 0x2a, 0x22, 0x78, 0x84, 0x0c, 0x9a, 0x88, 0x7e, 0x91, 0xc7, + 0x0d, 0x9a, 0xe0, 0x5d, 0x64, 0xd1, 0xe4, 0xfa, 0x54, 0xf4, 0x84, 0x3c, 0x26, 0x46, 0x2a, 0x3a, + 0x13, 0xcd, 0x9f, 0x8e, 0xce, 0x30, 0x46, 0xe6, 0x3a, 0xa5, 0xa2, 0xc7, 0xe3, 0x41, 0x3e, 0xc0, + 0x8f, 0x51, 0x77, 0x9d, 0xd2, 0x79, 0x4a, 0x96, 0xa2, 0x8d, 0x73, 0xc5, 0x3b, 0x40, 0x4a, 0x7d, + 0xb2, 0x3c, 0xeb, 0x23, 0x74, 0x43, 0xa2, 0x68, 0x7e, 0xc5, 0xe2, 0x1b, 0x36, 0xf9, 0x97, 0x81, + 0x50, 0xd9, 0x67, 0xd6, 0x77, 0xbf, 0x0f, 0x76, 0x7f, 0xf0, 0x43, 0x76, 0xbf, 0xb2, 0x4d, 0xd6, + 0x5d, 0xdb, 0x64, 0x8b, 0x45, 0x9b, 0xdb, 0xe4, 0xc8, 0x78, 0xcb, 0x36, 0x75, 0x45, 0x06, 0x6e, + 0x93, 0x7b, 0x68, 0x1e, 0xf5, 0xc1, 0x36, 0xf5, 0x44, 0x4c, 0x6d, 0x93, 0x34, 0x1c, 0xb5, 0x18, + 0xbe, 0xd3, 0x6a, 0x78, 0xbf, 0x6a, 0x38, 0x70, 0xf0, 0x0a, 0xf5, 0x8a, 0xde, 0xfb, 0x8e, 0x7e, + 0xe8, 0x29, 0xea, 0x2f, 0xc8, 0x92, 0x32, 0xb2, 0x98, 0xc7, 0x2c, 0xda, 0x08, 0xcb, 0x5c, 0x7f, + 0x47, 0xc5, 0x3e, 0xb1, 0x68, 0xa3, 0xc0, 0xcd, 0x96, 0x76, 0xc7, 0xaa, 0xb6, 0x3b, 0xbf, 0x41, + 0xfd, 0x6a, 0xeb, 0x8e, 0x31, 0xb2, 0xb2, 0x2b, 0x9a, 0xa8, 0x47, 0x5a, 0xfc, 0xe6, 0xfe, 0xa4, + 0xe4, 0xdb, 0x9a, 0xa6, 0x64, 0xa1, 0x56, 0x2a, 0xc6, 0xbc, 0x5d, 0x1a, 0xd4, 0xda, 0x78, 0xfd, + 0xe0, 0xd1, 0x9c, 0xac, 0x32, 0xd5, 0x13, 0xf0, 0x07, 0xef, 0x03, 0x1f, 0xeb, 0x07, 0x4f, 0x26, + 0x8d, 0xe2, 0xc1, 0x93, 0xc9, 0x7d, 0xe4, 0xac, 0x19, 0xfd, 0xb6, 0x96, 0x47, 0x97, 0xeb, 0xab, + 0x11, 0x7e, 0x8e, 0x6c, 0x59, 0xd0, 0x78, 0xe9, 0x2d, 0x5f, 0xd3, 0x7d, 0x29, 0x99, 0xfc, 0xb3, + 0x83, 0x5c, 0xfd, 0x92, 0xa0, 0x51, 0x92, 0x80, 0xa6, 0x55, 0x94, 0x3f, 0xf3, 0xb1, 0x46, 0x91, + 0xc9, 0x12, 0xa5, 0x48, 0xb2, 0x78, 0x9e, 0x25, 0x41, 0x9a, 0x69, 0x1a, 0x97, 0xc5, 0xe7, 0x62, + 0x8c, 0x8f, 0x90, 0x75, 0x45, 0x36, 0xdb, 0x71, 0x84, 0x02, 0xbf, 0x40, 0xce, 0x75, 0x10, 0xad, + 0xd5, 0x21, 0x73, 0x97, 0x56, 0x69, 0x26, 0x1f, 0x91, 0xab, 0xdf, 0x5b, 0x6a, 0x9e, 0x77, 0xea, + 0x9e, 0xab, 0xad, 0x35, 0x5a, 0x8e, 0x0e, 0xb3, 0x72, 0x74, 0x4c, 0xfe, 0x63, 0xa0, 0x41, 0xed, + 0xd5, 0x66, 0xeb, 0xa4, 0x53, 0xfd, 0x47, 0x93, 0xdf, 0x2d, 0x1e, 0x1f, 0xcb, 0xcf, 0x24, 0xc7, + 0xfa, 0x33, 0x49, 0xf9, 0x96, 0xa4, 0xfe, 0x83, 0xcf, 0xc4, 0xad, 0x63, 0xde, 0xa7, 0xe6, 0x17, + 0xd2, 0x2f, 0xe4, 0x85, 0x64, 0xdd, 0xa7, 0x15, 0x77, 0xd5, 0x33, 0x71, 0x57, 0xd9, 0xf7, 0xce, + 0x1b, 0x8a, 0x79, 0xf9, 0x35, 0xe6, 0xdc, 0x3b, 0x6f, 0x28, 0xe7, 0x55, 0xb7, 0xd9, 0xf6, 0x79, + 0x29, 0xc3, 0x2f, 0x0b, 0x43, 0xdd, 0xfb, 0xe4, 0xca, 0xeb, 0xff, 0x1a, 0xe8, 0x41, 0xfd, 0x75, + 0x70, 0xab, 0xd9, 0x2f, 0xeb, 0x66, 0x1f, 0x34, 0xe6, 0x2f, 0xe7, 0x52, 0x6e, 0x3f, 0xaf, 0xb8, + 0xbd, 0x4d, 0xce, 0xed, 0x7e, 0x51, 0xb5, 0x7b, 0x9b, 0x58, 0xf8, 0xfd, 0xbc, 0xe2, 0xf7, 0xd6, + 0x99, 0x43, 0x31, 0x73, 0x69, 0xf8, 0xd6, 0x99, 0xb9, 0xe3, 0x7b, 0xc8, 0x89, 0xf2, 0x39, 0x8b, + 0x6f, 0xc4, 0xa9, 0xea, 0xfa, 0x76, 0x94, 0xff, 0x29, 0xbe, 0xe1, 0xe1, 0x50, 0x86, 0x5d, 0x19, + 0x0e, 0x45, 0xf8, 0x97, 0xc8, 0xb9, 0xa1, 0xf9, 0x57, 0x71, 0xb2, 0xde, 0xb3, 0x9f, 0x4a, 0xf8, + 0xe6, 0xd7, 0xc8, 0x5d, 0xd0, 0x2c, 0xb8, 0x88, 0xc8, 0x02, 0x3f, 0x69, 0xc8, 0xd5, 0xb9, 0xf6, + 0x29, 0xe1, 0x35, 0x99, 0xf7, 0x8f, 0xef, 0xaf, 0xe5, 0x2e, 0xe8, 0x92, 0x37, 0x6f, 0xcb, 0x1d, + 0xc2, 0x3f, 0x6d, 0x94, 0x7f, 0x62, 0x24, 0x5e, 0x36, 0x8a, 0x75, 0xc1, 0x9b, 0x3f, 0x20, 0x3b, + 0x15, 0xfb, 0xdc, 0xac, 0x14, 0x8f, 0x76, 0xbd, 0xf2, 0xce, 0x53, 0x4b, 0xcc, 0x71, 0xf6, 0x19, + 0xed, 0x5d, 0xc6, 0xab, 0xe3, 0x68, 0xb3, 0xcc, 0x8f, 0x93, 0xf0, 0xba, 0x90, 0xfe, 0xe5, 0xad, + 0x9a, 0x3b, 0x8c, 0xa3, 0x80, 0x85, 0xc7, 0x71, 0x1a, 0x4e, 0xc3, 0x34, 0xb9, 0x9c, 0x5e, 0x04, + 0x51, 0xc0, 0x2e, 0x49, 0x3a, 0xbd, 0x5d, 0x64, 0x53, 0xca, 0xf8, 0xb5, 0x17, 0x44, 0xf2, 0x93, + 0x66, 0xf1, 0xed, 0xf4, 0xff, 0x01, 0x00, 0x00, 0xff, 0xff, 0xdc, 0xca, 0x84, 0x71, 0x4f, 0x15, + 0x00, 0x00, +} diff --git a/vendor/google.golang.org/grpc/balancer/xds/xds.go b/vendor/google.golang.org/grpc/balancer/xds/xds.go new file mode 100644 index 0000000..7795724 --- /dev/null +++ b/vendor/google.golang.org/grpc/balancer/xds/xds.go @@ -0,0 +1,612 @@ +// +build go1.12 + +/* + * + * Copyright 2019 gRPC authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ + +// Package xds implements a balancer that communicates with a remote balancer using the Envoy xDS +// protocol. +package xds + +import ( + "context" + "encoding/json" + "errors" + "fmt" + "reflect" + "sync" + "time" + + "github.com/golang/protobuf/proto" + "google.golang.org/grpc/balancer" + "google.golang.org/grpc/balancer/xds/edsbalancer" + cdspb "google.golang.org/grpc/balancer/xds/internal/proto/envoy/api/v2/cds" + edspb "google.golang.org/grpc/balancer/xds/internal/proto/envoy/api/v2/eds" + + "google.golang.org/grpc/connectivity" + "google.golang.org/grpc/grpclog" + "google.golang.org/grpc/resolver" +) + +const ( + defaultTimeout = 10 * time.Second + xdsName = "xds" +) + +var ( + // This field is for testing purpose. + // TODO: if later we make startupTimeout configurable through BuildOptions(maybe?), then we can remove + // this field and configure through BuildOptions instead. + startupTimeout = defaultTimeout + newEDSBalancer = func(cc balancer.ClientConn) edsBalancerInterface { + return edsbalancer.NewXDSBalancer(cc) + } +) + +func init() { + balancer.Register(newXDSBalancerBuilder()) +} + +type xdsBalancerBuilder struct{} + +func newXDSBalancerBuilder() balancer.Builder { + return &xdsBalancerBuilder{} +} + +func (b *xdsBalancerBuilder) Build(cc balancer.ClientConn, opts balancer.BuildOptions) balancer.Balancer { + ctx, cancel := context.WithCancel(context.Background()) + x := &xdsBalancer{ + ctx: ctx, + cancel: cancel, + buildOpts: opts, + startupTimeout: startupTimeout, + connStateMgr: &connStateMgr{}, + startup: true, + grpcUpdate: make(chan interface{}), + xdsClientUpdate: make(chan interface{}), + timer: createDrainedTimer(), // initialized a timer that won't fire without reset + } + x.cc = &xdsClientConn{ + updateState: x.connStateMgr.updateState, + ClientConn: cc, + } + go x.run() + return x +} + +func (b *xdsBalancerBuilder) Name() string { + return xdsName +} + +// edsBalancerInterface defines the interface that edsBalancer must implement to +// communicate with xdsBalancer. +// +// It's implemented by the real eds balancer and a fake testing eds balancer. +type edsBalancerInterface interface { + // HandleEDSResponse passes the received EDS message from traffic director to eds balancer. + HandleEDSResponse(edsResp *edspb.ClusterLoadAssignment) + // HandleChildPolicy updates the eds balancer the intra-cluster load balancing policy to use. + HandleChildPolicy(name string, config json.RawMessage) + // HandleSubConnStateChange handles state change for SubConn. + HandleSubConnStateChange(sc balancer.SubConn, state connectivity.State) + // Close closes the eds balancer. + Close() +} + +// xdsBalancer manages xdsClient and the actual balancer that does load balancing (either edsBalancer, +// or fallback LB). +type xdsBalancer struct { + cc balancer.ClientConn // *xdsClientConn + buildOpts balancer.BuildOptions + startupTimeout time.Duration + xdsStaleTimeout *time.Duration + connStateMgr *connStateMgr + ctx context.Context + cancel context.CancelFunc + startup bool // startup indicates whether this xdsBalancer is in startup stage. + inFallbackMonitor bool + + // xdsBalancer continuously monitor the channels below, and will handle events from them in sync. + grpcUpdate chan interface{} + xdsClientUpdate chan interface{} + timer *time.Timer + noSubConnAlert <-chan struct{} + + client *client // may change when passed a different service config + config *xdsConfig // may change when passed a different service config + xdsLB edsBalancerInterface + fallbackLB balancer.Balancer + fallbackInitData *addressUpdate // may change when HandleResolved address is called +} + +func (x *xdsBalancer) startNewXDSClient(u *xdsConfig) { + // If the xdsBalancer is in startup stage, then we need to apply the startup timeout for the first + // xdsClient to get a response from the traffic director. + if x.startup { + x.startFallbackMonitoring() + } + + // Whenever service config gives a new traffic director name, we need to create an xds client to + // connect to it. However, previous xds client should not be closed until the new one successfully + // connects to the traffic director (i.e. get an ADS response from the traffic director). Therefore, + // we let each new client to be responsible to close its immediate predecessor. In this way, + // xdsBalancer does not to implement complex synchronization to achieve the same purpose. + prevClient := x.client + // haveGotADS is true means, this xdsClient has got ADS response from director in the past, which + // means it can close previous client if it hasn't and it now can send lose contact signal for + // fallback monitoring. + var haveGotADS bool + + // set up callbacks for the xds client. + newADS := func(ctx context.Context, resp proto.Message) error { + if !haveGotADS { + if prevClient != nil { + prevClient.close() + } + haveGotADS = true + } + return x.newADSResponse(ctx, resp) + } + loseContact := func(ctx context.Context) { + // loseContact signal is only useful when the current xds client has received ADS response before, + // and has not been closed by later xds client. + if haveGotADS { + select { + case <-ctx.Done(): + return + default: + } + x.loseContact(ctx) + } + } + exitCleanup := func() { + // Each xds client is responsible to close its predecessor if there's one. There are two paths + // for a xds client to close its predecessor: + // 1. Once it receives its first ADS response. + // 2. It hasn't received its first ADS response yet, but its own successor has received ADS + // response (which triggers the exit of it). Therefore, it needs to close its predecessor if + // it has one. + // Here the exitCleanup is for the 2nd path. + if !haveGotADS && prevClient != nil { + prevClient.close() + } + } + x.client = newXDSClient(u.BalancerName, x.cc.Target(), u.ChildPolicy == nil, x.buildOpts, newADS, loseContact, exitCleanup) + go x.client.run() +} + +// run gets executed in a goroutine once xdsBalancer is created. It monitors updates from grpc, +// xdsClient and load balancer. It synchronizes the operations that happen inside xdsBalancer. It +// exits when xdsBalancer is closed. +func (x *xdsBalancer) run() { + for { + select { + case update := <-x.grpcUpdate: + x.handleGRPCUpdate(update) + case update := <-x.xdsClientUpdate: + x.handleXDSClientUpdate(update) + case <-x.timer.C: // x.timer.C will block if we are not in fallback monitoring stage. + x.switchFallback() + case <-x.noSubConnAlert: // x.noSubConnAlert will block if we are not in fallback monitoring stage. + x.switchFallback() + case <-x.ctx.Done(): + if x.client != nil { + x.client.close() + } + if x.xdsLB != nil { + x.xdsLB.Close() + } + if x.fallbackLB != nil { + x.fallbackLB.Close() + } + return + } + } +} + +func (x *xdsBalancer) handleGRPCUpdate(update interface{}) { + switch u := update.(type) { + case *addressUpdate: + if x.fallbackLB != nil { + x.fallbackLB.HandleResolvedAddrs(u.addrs, u.err) + } + x.fallbackInitData = u + case *subConnStateUpdate: + if x.xdsLB != nil { + x.xdsLB.HandleSubConnStateChange(u.sc, u.state) + } + if x.fallbackLB != nil { + x.fallbackLB.HandleSubConnStateChange(u.sc, u.state) + } + case *xdsConfig: + if x.config == nil { + // The first time we get config, we just need to start the xdsClient. + x.startNewXDSClient(u) + x.config = u + return + } + // With a different BalancerName, we need to create a new xdsClient. + // If current or previous ChildPolicy is nil, then we also need to recreate a new xdsClient. + // This is because with nil ChildPolicy xdsClient will do CDS request, while non-nil won't. + if u.BalancerName != x.config.BalancerName || (u.ChildPolicy == nil) != (x.config.ChildPolicy == nil) { + x.startNewXDSClient(u) + } + // We will update the xdsLB with the new child policy, if we got a different one and it's not nil. + // The nil case will be handled when the CDS response gets processed, we will update xdsLB at that time. + if !reflect.DeepEqual(u.ChildPolicy, x.config.ChildPolicy) && u.ChildPolicy != nil && x.xdsLB != nil { + x.xdsLB.HandleChildPolicy(u.ChildPolicy.Name, u.ChildPolicy.Config) + } + if !reflect.DeepEqual(u.FallBackPolicy, x.config.FallBackPolicy) && x.fallbackLB != nil { + x.fallbackLB.Close() + x.startFallBackBalancer(u) + } + x.config = u + default: + // unreachable path + panic("wrong update type") + } +} + +func (x *xdsBalancer) handleXDSClientUpdate(update interface{}) { + switch u := update.(type) { + case *cdsResp: + select { + case <-u.ctx.Done(): + return + default: + } + x.cancelFallbackAndSwitchEDSBalancerIfNecessary() + // TODO: Get the optional xds record stale timeout from OutlierDetection message. If not exist, + // reset to 0. + // x.xdsStaleTimeout = u.OutlierDetection.TO_BE_DEFINED_AND_ADDED + x.xdsLB.HandleChildPolicy(u.resp.LbPolicy.String(), nil) + case *edsResp: + select { + case <-u.ctx.Done(): + return + default: + } + x.cancelFallbackAndSwitchEDSBalancerIfNecessary() + x.xdsLB.HandleEDSResponse(u.resp) + case *loseContact: + select { + case <-u.ctx.Done(): + return + default: + } + // if we are already doing fallback monitoring, then we ignore new loseContact signal. + if x.inFallbackMonitor { + return + } + x.inFallbackMonitor = true + x.startFallbackMonitoring() + default: + panic("unexpected xds client update type") + } +} + +type connStateMgr struct { + mu sync.Mutex + curState connectivity.State + notify chan struct{} +} + +func (c *connStateMgr) updateState(s connectivity.State) { + c.mu.Lock() + defer c.mu.Unlock() + c.curState = s + if s != connectivity.Ready && c.notify != nil { + close(c.notify) + c.notify = nil + } +} + +func (c *connStateMgr) notifyWhenNotReady() <-chan struct{} { + c.mu.Lock() + defer c.mu.Unlock() + if c.curState != connectivity.Ready { + ch := make(chan struct{}) + close(ch) + return ch + } + c.notify = make(chan struct{}) + return c.notify +} + +// xdsClientConn wraps around the balancer.ClientConn passed in from grpc. The wrapping is to add +// functionality to get notification when no subconn is in READY state. +// TODO: once we have the change that keeps both edsbalancer and fallback balancer alive at the same +// time, we need to make sure to synchronize updates from both entities on the ClientConn. +type xdsClientConn struct { + updateState func(s connectivity.State) + balancer.ClientConn +} + +func (w *xdsClientConn) UpdateBalancerState(s connectivity.State, p balancer.Picker) { + w.updateState(s) + w.ClientConn.UpdateBalancerState(s, p) +} + +type addressUpdate struct { + addrs []resolver.Address + err error +} + +type subConnStateUpdate struct { + sc balancer.SubConn + state connectivity.State +} + +func (x *xdsBalancer) HandleSubConnStateChange(sc balancer.SubConn, state connectivity.State) { + update := &subConnStateUpdate{ + sc: sc, + state: state, + } + select { + case x.grpcUpdate <- update: + case <-x.ctx.Done(): + } +} + +func (x *xdsBalancer) HandleResolvedAddrs(addrs []resolver.Address, err error) { + update := &addressUpdate{ + addrs: addrs, + err: err, + } + select { + case x.grpcUpdate <- update: + case <-x.ctx.Done(): + } +} + +// TODO: once the API is merged, check whether we need to change the function name/signature here. +func (x *xdsBalancer) HandleBalancerConfig(config json.RawMessage) error { + var cfg xdsConfig + if err := json.Unmarshal(config, &cfg); err != nil { + return errors.New("unable to unmarshal balancer config into xds config") + } + + select { + case x.grpcUpdate <- &cfg: + case <-x.ctx.Done(): + } + return nil +} + +type cdsResp struct { + ctx context.Context + resp *cdspb.Cluster +} + +type edsResp struct { + ctx context.Context + resp *edspb.ClusterLoadAssignment +} + +func (x *xdsBalancer) newADSResponse(ctx context.Context, resp proto.Message) error { + var update interface{} + switch u := resp.(type) { + case *cdspb.Cluster: + if u.GetName() != x.cc.Target() { + return fmt.Errorf("unmatched service name, got %s, want %s", u.GetName(), x.cc.Target()) + } + if u.GetType() != cdspb.Cluster_EDS { + return fmt.Errorf("unexpected service discovery type, got %v, want %v", u.GetType(), cdspb.Cluster_EDS) + } + update = &cdsResp{ctx: ctx, resp: u} + case *edspb.ClusterLoadAssignment: + // nothing to check + update = &edsResp{ctx: ctx, resp: u} + default: + grpclog.Warningf("xdsBalancer: got a response that's neither CDS nor EDS, type = %T", u) + } + + select { + case x.xdsClientUpdate <- update: + case <-x.ctx.Done(): + case <-ctx.Done(): + } + + return nil +} + +type loseContact struct { + ctx context.Context +} + +func (x *xdsBalancer) loseContact(ctx context.Context) { + select { + case x.xdsClientUpdate <- &loseContact{ctx: ctx}: + case <-x.ctx.Done(): + case <-ctx.Done(): + } +} + +func (x *xdsBalancer) switchFallback() { + if x.xdsLB != nil { + x.xdsLB.Close() + x.xdsLB = nil + } + x.startFallBackBalancer(x.config) + x.cancelFallbackMonitoring() +} + +// x.cancelFallbackAndSwitchEDSBalancerIfNecessary() will be no-op if we have a working xds client. +// It will cancel fallback monitoring if we are in fallback monitoring stage. +// If there's no running edsBalancer currently, it will create one and initialize it. Also, it will +// shutdown the fallback balancer if there's one running. +func (x *xdsBalancer) cancelFallbackAndSwitchEDSBalancerIfNecessary() { + // xDS update will cancel fallback monitoring if we are in fallback monitoring stage. + x.cancelFallbackMonitoring() + + // xDS update will switch balancer back to edsBalancer if we are in fallback. + if x.xdsLB == nil { + if x.fallbackLB != nil { + x.fallbackLB.Close() + x.fallbackLB = nil + } + x.xdsLB = newEDSBalancer(x.cc) + if x.config.ChildPolicy != nil { + x.xdsLB.HandleChildPolicy(x.config.ChildPolicy.Name, x.config.ChildPolicy.Config) + } + } +} + +func (x *xdsBalancer) startFallBackBalancer(c *xdsConfig) { + if c.FallBackPolicy == nil { + x.startFallBackBalancer(&xdsConfig{ + FallBackPolicy: &loadBalancingConfig{ + Name: "round_robin", + }, + }) + return + } + // builder will always be non-nil, since when parse JSON into xdsConfig, we check whether the specified + // balancer is registered or not. + builder := balancer.Get(c.FallBackPolicy.Name) + x.fallbackLB = builder.Build(x.cc, x.buildOpts) + if x.fallbackInitData != nil { + // TODO: uncomment when HandleBalancerConfig API is merged. + //x.fallbackLB.HandleBalancerConfig(c.FallBackPolicy.Config) + x.fallbackLB.HandleResolvedAddrs(x.fallbackInitData.addrs, x.fallbackInitData.err) + } +} + +// There are three ways that could lead to fallback: +// 1. During startup (i.e. the first xds client is just created and attempts to contact the traffic +// director), fallback if it has not received any response from the director within the configured +// timeout. +// 2. After xds client loses contact with the remote, fallback if all connections to the backends are +// lost (i.e. not in state READY). +// 3. After xds client loses contact with the remote, fallback if the stale eds timeout has been +// configured through CDS and is timed out. +func (x *xdsBalancer) startFallbackMonitoring() { + if x.startup { + x.startup = false + x.timer.Reset(x.startupTimeout) + return + } + + x.noSubConnAlert = x.connStateMgr.notifyWhenNotReady() + if x.xdsStaleTimeout != nil { + if !x.timer.Stop() { + <-x.timer.C + } + x.timer.Reset(*x.xdsStaleTimeout) + } +} + +// There are two cases where fallback monitoring should be canceled: +// 1. xDS client returns a new ADS message. +// 2. fallback has been triggered. +func (x *xdsBalancer) cancelFallbackMonitoring() { + if !x.timer.Stop() { + select { + case <-x.timer.C: + // For cases where some fallback condition happens along with the timeout, but timeout loses + // the race, so we need to drain the x.timer.C. thus we don't trigger fallback again. + default: + // if the timer timeout leads us here, then there's no thing to drain from x.timer.C. + } + } + x.noSubConnAlert = nil + x.inFallbackMonitor = false +} + +func (x *xdsBalancer) Close() { + x.cancel() +} + +func createDrainedTimer() *time.Timer { + timer := time.NewTimer(0 * time.Millisecond) + // make sure initially the timer channel is blocking until reset. + if !timer.Stop() { + <-timer.C + } + return timer +} + +type xdsConfig struct { + BalancerName string + ChildPolicy *loadBalancingConfig + FallBackPolicy *loadBalancingConfig +} + +// When unmarshalling json to xdsConfig, we iterate through the childPolicy/fallbackPolicy lists +// and select the first LB policy which has been registered to be stored in the returned xdsConfig. +func (p *xdsConfig) UnmarshalJSON(data []byte) error { + var val map[string]json.RawMessage + if err := json.Unmarshal(data, &val); err != nil { + return err + } + for k, v := range val { + switch k { + case "balancerName": + if err := json.Unmarshal(v, &p.BalancerName); err != nil { + return err + } + case "childPolicy": + var lbcfgs []*loadBalancingConfig + if err := json.Unmarshal(v, &lbcfgs); err != nil { + return err + } + for _, lbcfg := range lbcfgs { + if balancer.Get(lbcfg.Name) != nil { + p.ChildPolicy = lbcfg + break + } + } + case "fallbackPolicy": + var lbcfgs []*loadBalancingConfig + if err := json.Unmarshal(v, &lbcfgs); err != nil { + return err + } + for _, lbcfg := range lbcfgs { + if balancer.Get(lbcfg.Name) != nil { + p.FallBackPolicy = lbcfg + break + } + } + } + } + return nil +} + +func (p *xdsConfig) MarshalJSON() ([]byte, error) { + return nil, nil +} + +type loadBalancingConfig struct { + Name string + Config json.RawMessage +} + +func (l *loadBalancingConfig) MarshalJSON() ([]byte, error) { + return nil, nil +} + +func (l *loadBalancingConfig) UnmarshalJSON(data []byte) error { + var cfg map[string]json.RawMessage + if err := json.Unmarshal(data, &cfg); err != nil { + return err + } + for name, config := range cfg { + l.Name = name + l.Config = config + } + return nil +} diff --git a/vendor/google.golang.org/grpc/balancer/xds/xds_client.go b/vendor/google.golang.org/grpc/balancer/xds/xds_client.go new file mode 100644 index 0000000..0cc8d4c --- /dev/null +++ b/vendor/google.golang.org/grpc/balancer/xds/xds_client.go @@ -0,0 +1,269 @@ +// +build go1.12 + +/* + * + * Copyright 2019 gRPC authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ + +package xds + +import ( + "context" + "sync" + "time" + + "github.com/golang/protobuf/proto" + "github.com/golang/protobuf/ptypes" + structpb "github.com/golang/protobuf/ptypes/struct" + "google.golang.org/grpc" + "google.golang.org/grpc/balancer" + cdspb "google.golang.org/grpc/balancer/xds/internal/proto/envoy/api/v2/cds" + basepb "google.golang.org/grpc/balancer/xds/internal/proto/envoy/api/v2/core/base" + discoverypb "google.golang.org/grpc/balancer/xds/internal/proto/envoy/api/v2/discovery" + edspb "google.golang.org/grpc/balancer/xds/internal/proto/envoy/api/v2/eds" + adspb "google.golang.org/grpc/balancer/xds/internal/proto/envoy/service/discovery/v2/ads" + "google.golang.org/grpc/grpclog" + "google.golang.org/grpc/internal/backoff" + "google.golang.org/grpc/internal/channelz" +) + +const ( + grpcHostname = "com.googleapis.trafficdirector.grpc_hostname" + cdsType = "type.googleapis.com/envoy.api.v2.Cluster" + edsType = "type.googleapis.com/envoy.api.v2.ClusterLoadAssignment" + endpointRequired = "endpoints_required" +) + +var ( + defaultBackoffConfig = backoff.Exponential{ + MaxDelay: 120 * time.Second, + } +) + +// client is responsible for connecting to the specified traffic director, passing the received +// ADS response from the traffic director, and sending notification when communication with the +// traffic director is lost. +type client struct { + ctx context.Context + cancel context.CancelFunc + cli adspb.AggregatedDiscoveryServiceClient + opts balancer.BuildOptions + balancerName string // the traffic director name + serviceName string // the user dial target name + enableCDS bool + newADS func(ctx context.Context, resp proto.Message) error + loseContact func(ctx context.Context) + cleanup func() + backoff backoff.Strategy + + mu sync.Mutex + cc *grpc.ClientConn +} + +func (c *client) run() { + c.dial() + c.makeADSCall() +} + +func (c *client) close() { + c.cancel() + c.mu.Lock() + if c.cc != nil { + c.cc.Close() + } + c.mu.Unlock() + c.cleanup() +} + +func (c *client) dial() { + var dopts []grpc.DialOption + if creds := c.opts.DialCreds; creds != nil { + if err := creds.OverrideServerName(c.balancerName); err == nil { + dopts = append(dopts, grpc.WithTransportCredentials(creds)) + } else { + grpclog.Warningf("xds: failed to override the server name in the credentials: %v, using Insecure", err) + dopts = append(dopts, grpc.WithInsecure()) + } + } else { + dopts = append(dopts, grpc.WithInsecure()) + } + if c.opts.Dialer != nil { + dopts = append(dopts, grpc.WithContextDialer(c.opts.Dialer)) + } + // Explicitly set pickfirst as the balancer. + dopts = append(dopts, grpc.WithBalancerName(grpc.PickFirstBalancerName)) + if channelz.IsOn() { + dopts = append(dopts, grpc.WithChannelzParentID(c.opts.ChannelzParentID)) + } + + cc, err := grpc.DialContext(c.ctx, c.balancerName, dopts...) + // Since this is a non-blocking dial, so if it fails, it due to some serious error (not network + // related) error. + if err != nil { + grpclog.Fatalf("xds: failed to dial: %v", err) + } + c.mu.Lock() + select { + case <-c.ctx.Done(): + cc.Close() + default: + // only assign c.cc when xds client has not been closed, to prevent ClientConn leak. + c.cc = cc + } + c.mu.Unlock() +} + +func (c *client) newCDSRequest() *discoverypb.DiscoveryRequest { + cdsReq := &discoverypb.DiscoveryRequest{ + Node: &basepb.Node{ + Metadata: &structpb.Struct{ + Fields: map[string]*structpb.Value{ + grpcHostname: { + Kind: &structpb.Value_StringValue{StringValue: c.serviceName}, + }, + }, + }, + }, + TypeUrl: cdsType, + } + return cdsReq +} + +func (c *client) newEDSRequest() *discoverypb.DiscoveryRequest { + edsReq := &discoverypb.DiscoveryRequest{ + Node: &basepb.Node{ + Metadata: &structpb.Struct{ + Fields: map[string]*structpb.Value{ + endpointRequired: { + Kind: &structpb.Value_BoolValue{BoolValue: c.enableCDS}, + }, + }, + }, + }, + ResourceNames: []string{c.serviceName}, + TypeUrl: edsType, + } + return edsReq +} + +func (c *client) makeADSCall() { + c.cli = adspb.NewAggregatedDiscoveryServiceClient(c.cc) + retryCount := 0 + var doRetry bool + + for { + select { + case <-c.ctx.Done(): + return + default: + } + + if doRetry { + backoffTimer := time.NewTimer(c.backoff.Backoff(retryCount)) + select { + case <-backoffTimer.C: + case <-c.ctx.Done(): + backoffTimer.Stop() + return + } + retryCount++ + } + + firstRespReceived := c.adsCallAttempt() + if firstRespReceived { + retryCount = 0 + doRetry = false + } else { + doRetry = true + } + c.loseContact(c.ctx) + } +} + +func (c *client) adsCallAttempt() (firstRespReceived bool) { + firstRespReceived = false + ctx, cancel := context.WithCancel(c.ctx) + defer cancel() + st, err := c.cli.StreamAggregatedResources(ctx, grpc.WaitForReady(true)) + if err != nil { + grpclog.Infof("xds: failed to initial ADS streaming RPC due to %v", err) + return + } + if c.enableCDS { + if err := st.Send(c.newCDSRequest()); err != nil { + // current stream is broken, start a new one. + return + } + } + if err := st.Send(c.newEDSRequest()); err != nil { + // current stream is broken, start a new one. + return + } + expectCDS := c.enableCDS + for { + resp, err := st.Recv() + if err != nil { + // current stream is broken, start a new one. + return + } + firstRespReceived = true + resources := resp.GetResources() + if len(resources) < 1 { + grpclog.Warning("xds: ADS response contains 0 resource info.") + // start a new call as server misbehaves by sending a ADS response with 0 resource info. + return + } + if resp.GetTypeUrl() == cdsType && !c.enableCDS { + grpclog.Warning("xds: received CDS response in custom plugin mode.") + // start a new call as we receive CDS response when in EDS-only mode. + return + } + var adsResp ptypes.DynamicAny + if err := ptypes.UnmarshalAny(resources[0], &adsResp); err != nil { + grpclog.Warningf("xds: failed to unmarshal resources due to %v.", err) + return + } + switch adsResp.Message.(type) { + case *cdspb.Cluster: + expectCDS = false + case *edspb.ClusterLoadAssignment: + if expectCDS { + grpclog.Warningf("xds: expecting CDS response, got EDS response instead.") + return + } + } + if err := c.newADS(c.ctx, adsResp.Message); err != nil { + grpclog.Warningf("xds: processing new ADS message failed due to %v.", err) + return + } + } +} +func newXDSClient(balancerName string, serviceName string, enableCDS bool, opts balancer.BuildOptions, newADS func(context.Context, proto.Message) error, loseContact func(ctx context.Context), exitCleanup func()) *client { + c := &client{ + balancerName: balancerName, + serviceName: serviceName, + enableCDS: enableCDS, + opts: opts, + newADS: newADS, + loseContact: loseContact, + cleanup: exitCleanup, + backoff: defaultBackoffConfig, + } + + c.ctx, c.cancel = context.WithCancel(context.Background()) + + return c +} diff --git a/vendor/google.golang.org/grpc/balancer_conn_wrappers.go b/vendor/google.golang.org/grpc/balancer_conn_wrappers.go new file mode 100644 index 0000000..bc965f0 --- /dev/null +++ b/vendor/google.golang.org/grpc/balancer_conn_wrappers.go @@ -0,0 +1,315 @@ +/* + * + * Copyright 2017 gRPC authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ + +package grpc + +import ( + "fmt" + "sync" + + "google.golang.org/grpc/balancer" + "google.golang.org/grpc/connectivity" + "google.golang.org/grpc/grpclog" + "google.golang.org/grpc/resolver" +) + +// scStateUpdate contains the subConn and the new state it changed to. +type scStateUpdate struct { + sc balancer.SubConn + state connectivity.State +} + +// scStateUpdateBuffer is an unbounded channel for scStateChangeTuple. +// TODO make a general purpose buffer that uses interface{}. +type scStateUpdateBuffer struct { + c chan *scStateUpdate + mu sync.Mutex + backlog []*scStateUpdate +} + +func newSCStateUpdateBuffer() *scStateUpdateBuffer { + return &scStateUpdateBuffer{ + c: make(chan *scStateUpdate, 1), + } +} + +func (b *scStateUpdateBuffer) put(t *scStateUpdate) { + b.mu.Lock() + defer b.mu.Unlock() + if len(b.backlog) == 0 { + select { + case b.c <- t: + return + default: + } + } + b.backlog = append(b.backlog, t) +} + +func (b *scStateUpdateBuffer) load() { + b.mu.Lock() + defer b.mu.Unlock() + if len(b.backlog) > 0 { + select { + case b.c <- b.backlog[0]: + b.backlog[0] = nil + b.backlog = b.backlog[1:] + default: + } + } +} + +// get returns the channel that the scStateUpdate will be sent to. +// +// Upon receiving, the caller should call load to send another +// scStateChangeTuple onto the channel if there is any. +func (b *scStateUpdateBuffer) get() <-chan *scStateUpdate { + return b.c +} + +// ccBalancerWrapper is a wrapper on top of cc for balancers. +// It implements balancer.ClientConn interface. +type ccBalancerWrapper struct { + cc *ClientConn + balancer balancer.Balancer + stateChangeQueue *scStateUpdateBuffer + resolverUpdateCh chan *resolver.State + done chan struct{} + + mu sync.Mutex + subConns map[*acBalancerWrapper]struct{} +} + +func newCCBalancerWrapper(cc *ClientConn, b balancer.Builder, bopts balancer.BuildOptions) *ccBalancerWrapper { + ccb := &ccBalancerWrapper{ + cc: cc, + stateChangeQueue: newSCStateUpdateBuffer(), + resolverUpdateCh: make(chan *resolver.State, 1), + done: make(chan struct{}), + subConns: make(map[*acBalancerWrapper]struct{}), + } + go ccb.watcher() + ccb.balancer = b.Build(ccb, bopts) + return ccb +} + +// watcher balancer functions sequentially, so the balancer can be implemented +// lock-free. +func (ccb *ccBalancerWrapper) watcher() { + for { + select { + case t := <-ccb.stateChangeQueue.get(): + ccb.stateChangeQueue.load() + select { + case <-ccb.done: + ccb.balancer.Close() + return + default: + } + if ub, ok := ccb.balancer.(balancer.V2Balancer); ok { + ub.UpdateSubConnState(t.sc, balancer.SubConnState{ConnectivityState: t.state}) + } else { + ccb.balancer.HandleSubConnStateChange(t.sc, t.state) + } + case s := <-ccb.resolverUpdateCh: + select { + case <-ccb.done: + ccb.balancer.Close() + return + default: + } + if ub, ok := ccb.balancer.(balancer.V2Balancer); ok { + ub.UpdateResolverState(*s) + } else { + ccb.balancer.HandleResolvedAddrs(s.Addresses, nil) + } + case <-ccb.done: + } + + select { + case <-ccb.done: + ccb.balancer.Close() + ccb.mu.Lock() + scs := ccb.subConns + ccb.subConns = nil + ccb.mu.Unlock() + for acbw := range scs { + ccb.cc.removeAddrConn(acbw.getAddrConn(), errConnDrain) + } + return + default: + } + } +} + +func (ccb *ccBalancerWrapper) close() { + close(ccb.done) +} + +func (ccb *ccBalancerWrapper) handleSubConnStateChange(sc balancer.SubConn, s connectivity.State) { + // When updating addresses for a SubConn, if the address in use is not in + // the new addresses, the old ac will be tearDown() and a new ac will be + // created. tearDown() generates a state change with Shutdown state, we + // don't want the balancer to receive this state change. So before + // tearDown() on the old ac, ac.acbw (acWrapper) will be set to nil, and + // this function will be called with (nil, Shutdown). We don't need to call + // balancer method in this case. + if sc == nil { + return + } + ccb.stateChangeQueue.put(&scStateUpdate{ + sc: sc, + state: s, + }) +} + +func (ccb *ccBalancerWrapper) updateResolverState(s resolver.State) { + if ccb.cc.curBalancerName != grpclbName { + // Filter any grpclb addresses since we don't have the grpclb balancer. + for i := 0; i < len(s.Addresses); { + if s.Addresses[i].Type == resolver.GRPCLB { + copy(s.Addresses[i:], s.Addresses[i+1:]) + s.Addresses = s.Addresses[:len(s.Addresses)-1] + continue + } + i++ + } + } + select { + case <-ccb.resolverUpdateCh: + default: + } + ccb.resolverUpdateCh <- &s +} + +func (ccb *ccBalancerWrapper) NewSubConn(addrs []resolver.Address, opts balancer.NewSubConnOptions) (balancer.SubConn, error) { + if len(addrs) <= 0 { + return nil, fmt.Errorf("grpc: cannot create SubConn with empty address list") + } + ccb.mu.Lock() + defer ccb.mu.Unlock() + if ccb.subConns == nil { + return nil, fmt.Errorf("grpc: ClientConn balancer wrapper was closed") + } + ac, err := ccb.cc.newAddrConn(addrs, opts) + if err != nil { + return nil, err + } + acbw := &acBalancerWrapper{ac: ac} + acbw.ac.mu.Lock() + ac.acbw = acbw + acbw.ac.mu.Unlock() + ccb.subConns[acbw] = struct{}{} + return acbw, nil +} + +func (ccb *ccBalancerWrapper) RemoveSubConn(sc balancer.SubConn) { + acbw, ok := sc.(*acBalancerWrapper) + if !ok { + return + } + ccb.mu.Lock() + defer ccb.mu.Unlock() + if ccb.subConns == nil { + return + } + delete(ccb.subConns, acbw) + ccb.cc.removeAddrConn(acbw.getAddrConn(), errConnDrain) +} + +func (ccb *ccBalancerWrapper) UpdateBalancerState(s connectivity.State, p balancer.Picker) { + ccb.mu.Lock() + defer ccb.mu.Unlock() + if ccb.subConns == nil { + return + } + // Update picker before updating state. Even though the ordering here does + // not matter, it can lead to multiple calls of Pick in the common start-up + // case where we wait for ready and then perform an RPC. If the picker is + // updated later, we could call the "connecting" picker when the state is + // updated, and then call the "ready" picker after the picker gets updated. + ccb.cc.blockingpicker.updatePicker(p) + ccb.cc.csMgr.updateState(s) +} + +func (ccb *ccBalancerWrapper) ResolveNow(o resolver.ResolveNowOption) { + ccb.cc.resolveNow(o) +} + +func (ccb *ccBalancerWrapper) Target() string { + return ccb.cc.target +} + +// acBalancerWrapper is a wrapper on top of ac for balancers. +// It implements balancer.SubConn interface. +type acBalancerWrapper struct { + mu sync.Mutex + ac *addrConn +} + +func (acbw *acBalancerWrapper) UpdateAddresses(addrs []resolver.Address) { + acbw.mu.Lock() + defer acbw.mu.Unlock() + if len(addrs) <= 0 { + acbw.ac.tearDown(errConnDrain) + return + } + if !acbw.ac.tryUpdateAddrs(addrs) { + cc := acbw.ac.cc + opts := acbw.ac.scopts + acbw.ac.mu.Lock() + // Set old ac.acbw to nil so the Shutdown state update will be ignored + // by balancer. + // + // TODO(bar) the state transition could be wrong when tearDown() old ac + // and creating new ac, fix the transition. + acbw.ac.acbw = nil + acbw.ac.mu.Unlock() + acState := acbw.ac.getState() + acbw.ac.tearDown(errConnDrain) + + if acState == connectivity.Shutdown { + return + } + + ac, err := cc.newAddrConn(addrs, opts) + if err != nil { + grpclog.Warningf("acBalancerWrapper: UpdateAddresses: failed to newAddrConn: %v", err) + return + } + acbw.ac = ac + ac.mu.Lock() + ac.acbw = acbw + ac.mu.Unlock() + if acState != connectivity.Idle { + ac.connect() + } + } +} + +func (acbw *acBalancerWrapper) Connect() { + acbw.mu.Lock() + defer acbw.mu.Unlock() + acbw.ac.connect() +} + +func (acbw *acBalancerWrapper) getAddrConn() *addrConn { + acbw.mu.Lock() + defer acbw.mu.Unlock() + return acbw.ac +} diff --git a/vendor/google.golang.org/grpc/balancer_v1_wrapper.go b/vendor/google.golang.org/grpc/balancer_v1_wrapper.go new file mode 100644 index 0000000..29bda63 --- /dev/null +++ b/vendor/google.golang.org/grpc/balancer_v1_wrapper.go @@ -0,0 +1,341 @@ +/* + * + * Copyright 2017 gRPC authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ + +package grpc + +import ( + "context" + "strings" + "sync" + + "google.golang.org/grpc/balancer" + "google.golang.org/grpc/connectivity" + "google.golang.org/grpc/grpclog" + "google.golang.org/grpc/resolver" +) + +type balancerWrapperBuilder struct { + b Balancer // The v1 balancer. +} + +func (bwb *balancerWrapperBuilder) Build(cc balancer.ClientConn, opts balancer.BuildOptions) balancer.Balancer { + targetAddr := cc.Target() + targetSplitted := strings.Split(targetAddr, ":///") + if len(targetSplitted) >= 2 { + targetAddr = targetSplitted[1] + } + + bwb.b.Start(targetAddr, BalancerConfig{ + DialCreds: opts.DialCreds, + Dialer: opts.Dialer, + }) + _, pickfirst := bwb.b.(*pickFirst) + bw := &balancerWrapper{ + balancer: bwb.b, + pickfirst: pickfirst, + cc: cc, + targetAddr: targetAddr, + startCh: make(chan struct{}), + conns: make(map[resolver.Address]balancer.SubConn), + connSt: make(map[balancer.SubConn]*scState), + csEvltr: &balancer.ConnectivityStateEvaluator{}, + state: connectivity.Idle, + } + cc.UpdateBalancerState(connectivity.Idle, bw) + go bw.lbWatcher() + return bw +} + +func (bwb *balancerWrapperBuilder) Name() string { + return "wrapper" +} + +type scState struct { + addr Address // The v1 address type. + s connectivity.State + down func(error) +} + +type balancerWrapper struct { + balancer Balancer // The v1 balancer. + pickfirst bool + + cc balancer.ClientConn + targetAddr string // Target without the scheme. + + mu sync.Mutex + conns map[resolver.Address]balancer.SubConn + connSt map[balancer.SubConn]*scState + // This channel is closed when handling the first resolver result. + // lbWatcher blocks until this is closed, to avoid race between + // - NewSubConn is created, cc wants to notify balancer of state changes; + // - Build hasn't return, cc doesn't have access to balancer. + startCh chan struct{} + + // To aggregate the connectivity state. + csEvltr *balancer.ConnectivityStateEvaluator + state connectivity.State +} + +// lbWatcher watches the Notify channel of the balancer and manages +// connections accordingly. +func (bw *balancerWrapper) lbWatcher() { + <-bw.startCh + notifyCh := bw.balancer.Notify() + if notifyCh == nil { + // There's no resolver in the balancer. Connect directly. + a := resolver.Address{ + Addr: bw.targetAddr, + Type: resolver.Backend, + } + sc, err := bw.cc.NewSubConn([]resolver.Address{a}, balancer.NewSubConnOptions{}) + if err != nil { + grpclog.Warningf("Error creating connection to %v. Err: %v", a, err) + } else { + bw.mu.Lock() + bw.conns[a] = sc + bw.connSt[sc] = &scState{ + addr: Address{Addr: bw.targetAddr}, + s: connectivity.Idle, + } + bw.mu.Unlock() + sc.Connect() + } + return + } + + for addrs := range notifyCh { + grpclog.Infof("balancerWrapper: got update addr from Notify: %v\n", addrs) + if bw.pickfirst { + var ( + oldA resolver.Address + oldSC balancer.SubConn + ) + bw.mu.Lock() + for oldA, oldSC = range bw.conns { + break + } + bw.mu.Unlock() + if len(addrs) <= 0 { + if oldSC != nil { + // Teardown old sc. + bw.mu.Lock() + delete(bw.conns, oldA) + delete(bw.connSt, oldSC) + bw.mu.Unlock() + bw.cc.RemoveSubConn(oldSC) + } + continue + } + + var newAddrs []resolver.Address + for _, a := range addrs { + newAddr := resolver.Address{ + Addr: a.Addr, + Type: resolver.Backend, // All addresses from balancer are all backends. + ServerName: "", + Metadata: a.Metadata, + } + newAddrs = append(newAddrs, newAddr) + } + if oldSC == nil { + // Create new sc. + sc, err := bw.cc.NewSubConn(newAddrs, balancer.NewSubConnOptions{}) + if err != nil { + grpclog.Warningf("Error creating connection to %v. Err: %v", newAddrs, err) + } else { + bw.mu.Lock() + // For pickfirst, there should be only one SubConn, so the + // address doesn't matter. All states updating (up and down) + // and picking should all happen on that only SubConn. + bw.conns[resolver.Address{}] = sc + bw.connSt[sc] = &scState{ + addr: addrs[0], // Use the first address. + s: connectivity.Idle, + } + bw.mu.Unlock() + sc.Connect() + } + } else { + bw.mu.Lock() + bw.connSt[oldSC].addr = addrs[0] + bw.mu.Unlock() + oldSC.UpdateAddresses(newAddrs) + } + } else { + var ( + add []resolver.Address // Addresses need to setup connections. + del []balancer.SubConn // Connections need to tear down. + ) + resAddrs := make(map[resolver.Address]Address) + for _, a := range addrs { + resAddrs[resolver.Address{ + Addr: a.Addr, + Type: resolver.Backend, // All addresses from balancer are all backends. + ServerName: "", + Metadata: a.Metadata, + }] = a + } + bw.mu.Lock() + for a := range resAddrs { + if _, ok := bw.conns[a]; !ok { + add = append(add, a) + } + } + for a, c := range bw.conns { + if _, ok := resAddrs[a]; !ok { + del = append(del, c) + delete(bw.conns, a) + // Keep the state of this sc in bw.connSt until its state becomes Shutdown. + } + } + bw.mu.Unlock() + for _, a := range add { + sc, err := bw.cc.NewSubConn([]resolver.Address{a}, balancer.NewSubConnOptions{}) + if err != nil { + grpclog.Warningf("Error creating connection to %v. Err: %v", a, err) + } else { + bw.mu.Lock() + bw.conns[a] = sc + bw.connSt[sc] = &scState{ + addr: resAddrs[a], + s: connectivity.Idle, + } + bw.mu.Unlock() + sc.Connect() + } + } + for _, c := range del { + bw.cc.RemoveSubConn(c) + } + } + } +} + +func (bw *balancerWrapper) HandleSubConnStateChange(sc balancer.SubConn, s connectivity.State) { + bw.mu.Lock() + defer bw.mu.Unlock() + scSt, ok := bw.connSt[sc] + if !ok { + return + } + if s == connectivity.Idle { + sc.Connect() + } + oldS := scSt.s + scSt.s = s + if oldS != connectivity.Ready && s == connectivity.Ready { + scSt.down = bw.balancer.Up(scSt.addr) + } else if oldS == connectivity.Ready && s != connectivity.Ready { + if scSt.down != nil { + scSt.down(errConnClosing) + } + } + sa := bw.csEvltr.RecordTransition(oldS, s) + if bw.state != sa { + bw.state = sa + } + bw.cc.UpdateBalancerState(bw.state, bw) + if s == connectivity.Shutdown { + // Remove state for this sc. + delete(bw.connSt, sc) + } +} + +func (bw *balancerWrapper) HandleResolvedAddrs([]resolver.Address, error) { + bw.mu.Lock() + defer bw.mu.Unlock() + select { + case <-bw.startCh: + default: + close(bw.startCh) + } + // There should be a resolver inside the balancer. + // All updates here, if any, are ignored. +} + +func (bw *balancerWrapper) Close() { + bw.mu.Lock() + defer bw.mu.Unlock() + select { + case <-bw.startCh: + default: + close(bw.startCh) + } + bw.balancer.Close() +} + +// The picker is the balancerWrapper itself. +// It either blocks or returns error, consistent with v1 balancer Get(). +func (bw *balancerWrapper) Pick(ctx context.Context, opts balancer.PickOptions) (sc balancer.SubConn, done func(balancer.DoneInfo), err error) { + failfast := true // Default failfast is true. + if ss, ok := rpcInfoFromContext(ctx); ok { + failfast = ss.failfast + } + a, p, err := bw.balancer.Get(ctx, BalancerGetOptions{BlockingWait: !failfast}) + if err != nil { + return nil, nil, err + } + if p != nil { + done = func(balancer.DoneInfo) { p() } + defer func() { + if err != nil { + p() + } + }() + } + + bw.mu.Lock() + defer bw.mu.Unlock() + if bw.pickfirst { + // Get the first sc in conns. + for _, sc := range bw.conns { + return sc, done, nil + } + return nil, nil, balancer.ErrNoSubConnAvailable + } + sc, ok1 := bw.conns[resolver.Address{ + Addr: a.Addr, + Type: resolver.Backend, + ServerName: "", + Metadata: a.Metadata, + }] + s, ok2 := bw.connSt[sc] + if !ok1 || !ok2 { + // This can only happen due to a race where Get() returned an address + // that was subsequently removed by Notify. In this case we should + // retry always. + return nil, nil, balancer.ErrNoSubConnAvailable + } + switch s.s { + case connectivity.Ready, connectivity.Idle: + return sc, done, nil + case connectivity.Shutdown, connectivity.TransientFailure: + // If the returned sc has been shut down or is in transient failure, + // return error, and this RPC will fail or wait for another picker (if + // non-failfast). + return nil, nil, balancer.ErrTransientFailure + default: + // For other states (connecting or unknown), the v1 balancer would + // traditionally wait until ready and then issue the RPC. Returning + // ErrNoSubConnAvailable will be a slight improvement in that it will + // allow the balancer to choose another address in case others are + // connected. + return nil, nil, balancer.ErrNoSubConnAvailable + } +} diff --git a/vendor/google.golang.org/grpc/benchmark/benchmain/main.go b/vendor/google.golang.org/grpc/benchmark/benchmain/main.go new file mode 100644 index 0000000..36570a4 --- /dev/null +++ b/vendor/google.golang.org/grpc/benchmark/benchmain/main.go @@ -0,0 +1,606 @@ +/* + * + * Copyright 2017 gRPC authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ + +/* +Package main provides benchmark with setting flags. + +An example to run some benchmarks with profiling enabled: + +go run benchmark/benchmain/main.go -benchtime=10s -workloads=all \ + -compression=on -maxConcurrentCalls=1 -trace=off \ + -reqSizeBytes=1,1048576 -respSizeBytes=1,1048576 -networkMode=Local \ + -cpuProfile=cpuProf -memProfile=memProf -memProfileRate=10000 -resultFile=result + +As a suggestion, when creating a branch, you can run this benchmark and save the result +file "-resultFile=basePerf", and later when you at the middle of the work or finish the +work, you can get the benchmark result and compare it with the base anytime. + +Assume there are two result files names as "basePerf" and "curPerf" created by adding +-resultFile=basePerf and -resultFile=curPerf. + To format the curPerf, run: + go run benchmark/benchresult/main.go curPerf + To observe how the performance changes based on a base result, run: + go run benchmark/benchresult/main.go basePerf curPerf +*/ +package main + +import ( + "context" + "encoding/gob" + "errors" + "flag" + "fmt" + "io" + "io/ioutil" + "log" + "net" + "os" + "reflect" + "runtime" + "runtime/pprof" + "strconv" + "strings" + "sync" + "sync/atomic" + "testing" + "time" + + "google.golang.org/grpc" + bm "google.golang.org/grpc/benchmark" + testpb "google.golang.org/grpc/benchmark/grpc_testing" + "google.golang.org/grpc/benchmark/latency" + "google.golang.org/grpc/benchmark/stats" + "google.golang.org/grpc/grpclog" + "google.golang.org/grpc/internal/channelz" + "google.golang.org/grpc/test/bufconn" +) + +const ( + modeOn = "on" + modeOff = "off" + modeBoth = "both" +) + +var allCompressionModes = []string{modeOn, modeOff, modeBoth} +var allTraceModes = []string{modeOn, modeOff, modeBoth} + +const ( + workloadsUnary = "unary" + workloadsStreaming = "streaming" + workloadsUnconstrained = "unconstrained" + workloadsAll = "all" +) + +var allWorkloads = []string{workloadsUnary, workloadsStreaming, workloadsUnconstrained, workloadsAll} + +var ( + runMode = []bool{true, true, true} // {runUnary, runStream, runUnconstrained} + // When set the latency to 0 (no delay), the result is slower than the real result with no delay + // because latency simulation section has extra operations + ltc = []time.Duration{0, 40 * time.Millisecond} // if non-positive, no delay. + kbps = []int{0, 10240} // if non-positive, infinite + mtu = []int{0} // if non-positive, infinite + maxConcurrentCalls = []int{1, 8, 64, 512} + reqSizeBytes = []int{1, 1024, 1024 * 1024} + respSizeBytes = []int{1, 1024, 1024 * 1024} + enableTrace []bool + benchtime time.Duration + memProfile, cpuProfile string + memProfileRate int + enableCompressor []bool + enableChannelz []bool + networkMode string + benchmarkResultFile string + networks = map[string]latency.Network{ + "Local": latency.Local, + "LAN": latency.LAN, + "WAN": latency.WAN, + "Longhaul": latency.Longhaul, + } +) + +func unaryBenchmark(startTimer func(), stopTimer func(uint64), benchFeatures stats.Features, benchtime time.Duration, s *stats.Stats) uint64 { + caller, cleanup := makeFuncUnary(benchFeatures) + defer cleanup() + return runBenchmark(caller, startTimer, stopTimer, benchFeatures, benchtime, s) +} + +func streamBenchmark(startTimer func(), stopTimer func(uint64), benchFeatures stats.Features, benchtime time.Duration, s *stats.Stats) uint64 { + caller, cleanup := makeFuncStream(benchFeatures) + defer cleanup() + return runBenchmark(caller, startTimer, stopTimer, benchFeatures, benchtime, s) +} + +func unconstrainedStreamBenchmark(benchFeatures stats.Features, warmuptime, benchtime time.Duration) (uint64, uint64) { + sender, recver, cleanup := makeFuncUnconstrainedStream(benchFeatures) + defer cleanup() + + var ( + wg sync.WaitGroup + requestCount uint64 + responseCount uint64 + ) + wg.Add(2 * benchFeatures.MaxConcurrentCalls) + + // Resets the counters once warmed up + go func() { + <-time.NewTimer(warmuptime).C + atomic.StoreUint64(&requestCount, 0) + atomic.StoreUint64(&responseCount, 0) + }() + + bmEnd := time.Now().Add(benchtime + warmuptime) + for i := 0; i < benchFeatures.MaxConcurrentCalls; i++ { + go func(pos int) { + for { + t := time.Now() + if t.After(bmEnd) { + break + } + sender(pos) + atomic.AddUint64(&requestCount, 1) + } + wg.Done() + }(i) + go func(pos int) { + for { + t := time.Now() + if t.After(bmEnd) { + break + } + recver(pos) + atomic.AddUint64(&responseCount, 1) + } + wg.Done() + }(i) + } + wg.Wait() + return requestCount, responseCount +} + +func makeClient(benchFeatures stats.Features) (testpb.BenchmarkServiceClient, func()) { + nw := &latency.Network{Kbps: benchFeatures.Kbps, Latency: benchFeatures.Latency, MTU: benchFeatures.Mtu} + opts := []grpc.DialOption{} + sopts := []grpc.ServerOption{} + if benchFeatures.EnableCompressor { + sopts = append(sopts, + grpc.RPCCompressor(nopCompressor{}), + grpc.RPCDecompressor(nopDecompressor{}), + ) + opts = append(opts, + grpc.WithCompressor(nopCompressor{}), + grpc.WithDecompressor(nopDecompressor{}), + ) + } + sopts = append(sopts, grpc.MaxConcurrentStreams(uint32(benchFeatures.MaxConcurrentCalls+1))) + opts = append(opts, grpc.WithInsecure()) + + var lis net.Listener + if *useBufconn { + bcLis := bufconn.Listen(256 * 1024) + lis = bcLis + opts = append(opts, grpc.WithContextDialer(func(ctx context.Context, address string) (net.Conn, error) { + return nw.ContextDialer(func(context.Context, string, string) (net.Conn, error) { + return bcLis.Dial() + })(ctx, "", "") + })) + } else { + var err error + lis, err = net.Listen("tcp", "localhost:0") + if err != nil { + grpclog.Fatalf("Failed to listen: %v", err) + } + opts = append(opts, grpc.WithContextDialer(func(ctx context.Context, address string) (net.Conn, error) { + return nw.ContextDialer((&net.Dialer{}).DialContext)(ctx, "tcp", lis.Addr().String()) + })) + } + lis = nw.Listener(lis) + stopper := bm.StartServer(bm.ServerInfo{Type: "protobuf", Listener: lis}, sopts...) + conn := bm.NewClientConn("" /* target not used */, opts...) + return testpb.NewBenchmarkServiceClient(conn), func() { + conn.Close() + stopper() + } +} + +func makeFuncUnary(benchFeatures stats.Features) (func(int), func()) { + tc, cleanup := makeClient(benchFeatures) + return func(int) { + unaryCaller(tc, benchFeatures.ReqSizeBytes, benchFeatures.RespSizeBytes) + }, cleanup +} + +func makeFuncStream(benchFeatures stats.Features) (func(int), func()) { + tc, cleanup := makeClient(benchFeatures) + + streams := make([]testpb.BenchmarkService_StreamingCallClient, benchFeatures.MaxConcurrentCalls) + for i := 0; i < benchFeatures.MaxConcurrentCalls; i++ { + stream, err := tc.StreamingCall(context.Background()) + if err != nil { + grpclog.Fatalf("%v.StreamingCall(_) = _, %v", tc, err) + } + streams[i] = stream + } + + return func(pos int) { + streamCaller(streams[pos], benchFeatures.ReqSizeBytes, benchFeatures.RespSizeBytes) + }, cleanup +} + +func makeFuncUnconstrainedStream(benchFeatures stats.Features) (func(int), func(int), func()) { + tc, cleanup := makeClient(benchFeatures) + + streams := make([]testpb.BenchmarkService_StreamingCallClient, benchFeatures.MaxConcurrentCalls) + for i := 0; i < benchFeatures.MaxConcurrentCalls; i++ { + stream, err := tc.UnconstrainedStreamingCall(context.Background()) + if err != nil { + grpclog.Fatalf("%v.UnconstrainedStreamingCall(_) = _, %v", tc, err) + } + streams[i] = stream + } + + pl := bm.NewPayload(testpb.PayloadType_COMPRESSABLE, benchFeatures.ReqSizeBytes) + req := &testpb.SimpleRequest{ + ResponseType: pl.Type, + ResponseSize: int32(benchFeatures.RespSizeBytes), + Payload: pl, + } + + return func(pos int) { + streams[pos].Send(req) + }, func(pos int) { + streams[pos].Recv() + }, cleanup +} + +func unaryCaller(client testpb.BenchmarkServiceClient, reqSize, respSize int) { + if err := bm.DoUnaryCall(client, reqSize, respSize); err != nil { + grpclog.Fatalf("DoUnaryCall failed: %v", err) + } +} + +func streamCaller(stream testpb.BenchmarkService_StreamingCallClient, reqSize, respSize int) { + if err := bm.DoStreamingRoundTrip(stream, reqSize, respSize); err != nil { + grpclog.Fatalf("DoStreamingRoundTrip failed: %v", err) + } +} + +func runBenchmark(caller func(int), startTimer func(), stopTimer func(uint64), benchFeatures stats.Features, benchtime time.Duration, s *stats.Stats) uint64 { + // Warm up connection. + for i := 0; i < 10; i++ { + caller(0) + } + // Run benchmark. + startTimer() + var ( + mu sync.Mutex + wg sync.WaitGroup + ) + wg.Add(benchFeatures.MaxConcurrentCalls) + bmEnd := time.Now().Add(benchtime) + var count uint64 + for i := 0; i < benchFeatures.MaxConcurrentCalls; i++ { + go func(pos int) { + for { + t := time.Now() + if t.After(bmEnd) { + break + } + start := time.Now() + caller(pos) + elapse := time.Since(start) + atomic.AddUint64(&count, 1) + mu.Lock() + s.Add(elapse) + mu.Unlock() + } + wg.Done() + }(i) + } + wg.Wait() + stopTimer(count) + return count +} + +var useBufconn = flag.Bool("bufconn", false, "Use in-memory connection instead of system network I/O") + +// Initiate main function to get settings of features. +func init() { + var ( + workloads, traceMode, compressorMode, readLatency, channelzOn string + readKbps, readMtu, readMaxConcurrentCalls intSliceType + readReqSizeBytes, readRespSizeBytes intSliceType + ) + flag.StringVar(&workloads, "workloads", workloadsAll, + fmt.Sprintf("Workloads to execute - One of: %v", strings.Join(allWorkloads, ", "))) + flag.StringVar(&traceMode, "trace", modeOff, + fmt.Sprintf("Trace mode - One of: %v", strings.Join(allTraceModes, ", "))) + flag.StringVar(&readLatency, "latency", "", "Simulated one-way network latency - may be a comma-separated list") + flag.StringVar(&channelzOn, "channelz", modeOff, "whether channelz should be turned on") + flag.DurationVar(&benchtime, "benchtime", time.Second, "Configures the amount of time to run each benchmark") + flag.Var(&readKbps, "kbps", "Simulated network throughput (in kbps) - may be a comma-separated list") + flag.Var(&readMtu, "mtu", "Simulated network MTU (Maximum Transmission Unit) - may be a comma-separated list") + flag.Var(&readMaxConcurrentCalls, "maxConcurrentCalls", "Number of concurrent RPCs during benchmarks") + flag.Var(&readReqSizeBytes, "reqSizeBytes", "Request size in bytes - may be a comma-separated list") + flag.Var(&readRespSizeBytes, "respSizeBytes", "Response size in bytes - may be a comma-separated list") + flag.StringVar(&memProfile, "memProfile", "", "Enables memory profiling output to the filename provided.") + flag.IntVar(&memProfileRate, "memProfileRate", 512*1024, "Configures the memory profiling rate. \n"+ + "memProfile should be set before setting profile rate. To include every allocated block in the profile, "+ + "set MemProfileRate to 1. To turn off profiling entirely, set MemProfileRate to 0. 512 * 1024 by default.") + flag.StringVar(&cpuProfile, "cpuProfile", "", "Enables CPU profiling output to the filename provided") + flag.StringVar(&compressorMode, "compression", modeOff, + fmt.Sprintf("Compression mode - One of: %v", strings.Join(allCompressionModes, ", "))) + flag.StringVar(&benchmarkResultFile, "resultFile", "", "Save the benchmark result into a binary file") + flag.StringVar(&networkMode, "networkMode", "", "Network mode includes LAN, WAN, Local and Longhaul") + flag.Parse() + if flag.NArg() != 0 { + log.Fatal("Error: unparsed arguments: ", flag.Args()) + } + switch workloads { + case workloadsUnary: + runMode[0] = true + runMode[1] = false + runMode[2] = false + case workloadsStreaming: + runMode[0] = false + runMode[1] = true + runMode[2] = false + case workloadsUnconstrained: + runMode[0] = false + runMode[1] = false + runMode[2] = true + case workloadsAll: + runMode[0] = true + runMode[1] = true + runMode[2] = true + default: + log.Fatalf("Unknown workloads setting: %v (want one of: %v)", + workloads, strings.Join(allWorkloads, ", ")) + } + enableCompressor = setMode(compressorMode) + enableTrace = setMode(traceMode) + enableChannelz = setMode(channelzOn) + // Time input formats as (time + unit). + readTimeFromInput(<c, readLatency) + readIntFromIntSlice(&kbps, readKbps) + readIntFromIntSlice(&mtu, readMtu) + readIntFromIntSlice(&maxConcurrentCalls, readMaxConcurrentCalls) + readIntFromIntSlice(&reqSizeBytes, readReqSizeBytes) + readIntFromIntSlice(&respSizeBytes, readRespSizeBytes) + // Re-write latency, kpbs and mtu if network mode is set. + if network, ok := networks[networkMode]; ok { + ltc = []time.Duration{network.Latency} + kbps = []int{network.Kbps} + mtu = []int{network.MTU} + } +} + +func setMode(name string) []bool { + switch name { + case modeOn: + return []bool{true} + case modeOff: + return []bool{false} + case modeBoth: + return []bool{false, true} + default: + log.Fatalf("Unknown %s setting: %v (want one of: %v)", + name, name, strings.Join(allCompressionModes, ", ")) + return []bool{} + } +} + +type intSliceType []int + +func (intSlice *intSliceType) String() string { + return fmt.Sprintf("%v", *intSlice) +} + +func (intSlice *intSliceType) Set(value string) error { + if len(*intSlice) > 0 { + return errors.New("interval flag already set") + } + for _, num := range strings.Split(value, ",") { + next, err := strconv.Atoi(num) + if err != nil { + return err + } + *intSlice = append(*intSlice, next) + } + return nil +} + +func readIntFromIntSlice(values *[]int, replace intSliceType) { + // If not set replace in the flag, just return to run the default settings. + if len(replace) == 0 { + return + } + *values = replace +} + +func readTimeFromInput(values *[]time.Duration, replace string) { + if strings.Compare(replace, "") != 0 { + *values = []time.Duration{} + for _, ltc := range strings.Split(replace, ",") { + duration, err := time.ParseDuration(ltc) + if err != nil { + log.Fatal(err.Error()) + } + *values = append(*values, duration) + } + } +} + +func printThroughput(requestCount uint64, requestSize int, responseCount uint64, responseSize int) { + requestThroughput := float64(requestCount) * float64(requestSize) * 8 / benchtime.Seconds() + responseThroughput := float64(responseCount) * float64(responseSize) * 8 / benchtime.Seconds() + fmt.Printf("Number of requests: %v\tRequest throughput: %v bit/s\n", requestCount, requestThroughput) + fmt.Printf("Number of responses: %v\tResponse throughput: %v bit/s\n", responseCount, responseThroughput) + fmt.Println() +} + +func main() { + before() + featuresPos := make([]int, 9) + // 0:enableTracing 1:ltc 2:kbps 3:mtu 4:maxC 5:reqSize 6:respSize + featuresNum := []int{len(enableTrace), len(ltc), len(kbps), len(mtu), + len(maxConcurrentCalls), len(reqSizeBytes), len(respSizeBytes), len(enableCompressor), len(enableChannelz)} + initalPos := make([]int, len(featuresPos)) + s := stats.NewStats(10) + s.SortLatency() + var memStats runtime.MemStats + var results testing.BenchmarkResult + var startAllocs, startBytes uint64 + var startTime time.Time + start := true + var startTimer = func() { + runtime.ReadMemStats(&memStats) + startAllocs = memStats.Mallocs + startBytes = memStats.TotalAlloc + startTime = time.Now() + } + var stopTimer = func(count uint64) { + runtime.ReadMemStats(&memStats) + results = testing.BenchmarkResult{N: int(count), T: time.Since(startTime), + Bytes: 0, MemAllocs: memStats.Mallocs - startAllocs, MemBytes: memStats.TotalAlloc - startBytes} + } + sharedPos := make([]bool, len(featuresPos)) + for i := 0; i < len(featuresPos); i++ { + if featuresNum[i] <= 1 { + sharedPos[i] = true + } + } + + // Run benchmarks + resultSlice := []stats.BenchResults{} + for !reflect.DeepEqual(featuresPos, initalPos) || start { + start = false + benchFeature := stats.Features{ + NetworkMode: networkMode, + EnableTrace: enableTrace[featuresPos[0]], + Latency: ltc[featuresPos[1]], + Kbps: kbps[featuresPos[2]], + Mtu: mtu[featuresPos[3]], + MaxConcurrentCalls: maxConcurrentCalls[featuresPos[4]], + ReqSizeBytes: reqSizeBytes[featuresPos[5]], + RespSizeBytes: respSizeBytes[featuresPos[6]], + EnableCompressor: enableCompressor[featuresPos[7]], + EnableChannelz: enableChannelz[featuresPos[8]], + } + + grpc.EnableTracing = enableTrace[featuresPos[0]] + if enableChannelz[featuresPos[8]] { + channelz.TurnOn() + } + if runMode[0] { + count := unaryBenchmark(startTimer, stopTimer, benchFeature, benchtime, s) + s.SetBenchmarkResult("Unary", benchFeature, results.N, + results.AllocedBytesPerOp(), results.AllocsPerOp(), sharedPos) + fmt.Println(s.BenchString()) + fmt.Println(s.String()) + printThroughput(count, benchFeature.ReqSizeBytes, count, benchFeature.RespSizeBytes) + resultSlice = append(resultSlice, s.GetBenchmarkResults()) + s.Clear() + } + if runMode[1] { + count := streamBenchmark(startTimer, stopTimer, benchFeature, benchtime, s) + s.SetBenchmarkResult("Stream", benchFeature, results.N, + results.AllocedBytesPerOp(), results.AllocsPerOp(), sharedPos) + fmt.Println(s.BenchString()) + fmt.Println(s.String()) + printThroughput(count, benchFeature.ReqSizeBytes, count, benchFeature.RespSizeBytes) + resultSlice = append(resultSlice, s.GetBenchmarkResults()) + s.Clear() + } + if runMode[2] { + requestCount, responseCount := unconstrainedStreamBenchmark(benchFeature, time.Second, benchtime) + fmt.Printf("Unconstrained Stream-%v\n", benchFeature) + printThroughput(requestCount, benchFeature.ReqSizeBytes, responseCount, benchFeature.RespSizeBytes) + } + bm.AddOne(featuresPos, featuresNum) + } + after(resultSlice) +} + +func before() { + if memProfile != "" { + runtime.MemProfileRate = memProfileRate + } + if cpuProfile != "" { + f, err := os.Create(cpuProfile) + if err != nil { + fmt.Fprintf(os.Stderr, "testing: %s\n", err) + return + } + if err := pprof.StartCPUProfile(f); err != nil { + fmt.Fprintf(os.Stderr, "testing: can't start cpu profile: %s\n", err) + f.Close() + return + } + } +} + +func after(data []stats.BenchResults) { + if cpuProfile != "" { + pprof.StopCPUProfile() // flushes profile to disk + } + if memProfile != "" { + f, err := os.Create(memProfile) + if err != nil { + fmt.Fprintf(os.Stderr, "testing: %s\n", err) + os.Exit(2) + } + runtime.GC() // materialize all statistics + if err = pprof.WriteHeapProfile(f); err != nil { + fmt.Fprintf(os.Stderr, "testing: can't write heap profile %s: %s\n", memProfile, err) + os.Exit(2) + } + f.Close() + } + if benchmarkResultFile != "" { + f, err := os.Create(benchmarkResultFile) + if err != nil { + log.Fatalf("testing: can't write benchmark result %s: %s\n", benchmarkResultFile, err) + } + dataEncoder := gob.NewEncoder(f) + dataEncoder.Encode(data) + f.Close() + } +} + +// nopCompressor is a compressor that just copies data. +type nopCompressor struct{} + +func (nopCompressor) Do(w io.Writer, p []byte) error { + n, err := w.Write(p) + if err != nil { + return err + } + if n != len(p) { + return fmt.Errorf("nopCompressor.Write: wrote %v bytes; want %v", n, len(p)) + } + return nil +} + +func (nopCompressor) Type() string { return "nop" } + +// nopDecompressor is a decompressor that just copies data. +type nopDecompressor struct{} + +func (nopDecompressor) Do(r io.Reader) ([]byte, error) { return ioutil.ReadAll(r) } +func (nopDecompressor) Type() string { return "nop" } diff --git a/vendor/google.golang.org/grpc/benchmark/benchmark.go b/vendor/google.golang.org/grpc/benchmark/benchmark.go new file mode 100644 index 0000000..511fc4f --- /dev/null +++ b/vendor/google.golang.org/grpc/benchmark/benchmark.go @@ -0,0 +1,436 @@ +/* + * + * Copyright 2014 gRPC authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ + +//go:generate protoc -I grpc_testing --go_out=plugins=grpc:grpc_testing grpc_testing/control.proto grpc_testing/messages.proto grpc_testing/payloads.proto grpc_testing/services.proto grpc_testing/stats.proto + +/* +Package benchmark implements the building blocks to setup end-to-end gRPC benchmarks. +*/ +package benchmark + +import ( + "context" + "fmt" + "io" + "log" + "net" + "sync" + "testing" + "time" + + "google.golang.org/grpc" + testpb "google.golang.org/grpc/benchmark/grpc_testing" + "google.golang.org/grpc/benchmark/latency" + "google.golang.org/grpc/benchmark/stats" + "google.golang.org/grpc/codes" + "google.golang.org/grpc/grpclog" + "google.golang.org/grpc/status" +) + +// AddOne add 1 to the features slice +func AddOne(features []int, featuresMaxPosition []int) { + for i := len(features) - 1; i >= 0; i-- { + features[i] = (features[i] + 1) + if features[i]/featuresMaxPosition[i] == 0 { + break + } + features[i] = features[i] % featuresMaxPosition[i] + } +} + +// Allows reuse of the same testpb.Payload object. +func setPayload(p *testpb.Payload, t testpb.PayloadType, size int) { + if size < 0 { + grpclog.Fatalf("Requested a response with invalid length %d", size) + } + body := make([]byte, size) + switch t { + case testpb.PayloadType_COMPRESSABLE: + case testpb.PayloadType_UNCOMPRESSABLE: + grpclog.Fatalf("PayloadType UNCOMPRESSABLE is not supported") + default: + grpclog.Fatalf("Unsupported payload type: %d", t) + } + p.Type = t + p.Body = body +} + +// NewPayload creates a payload with the given type and size. +func NewPayload(t testpb.PayloadType, size int) *testpb.Payload { + p := new(testpb.Payload) + setPayload(p, t, size) + return p +} + +type testServer struct { +} + +func (s *testServer) UnaryCall(ctx context.Context, in *testpb.SimpleRequest) (*testpb.SimpleResponse, error) { + return &testpb.SimpleResponse{ + Payload: NewPayload(in.ResponseType, int(in.ResponseSize)), + }, nil +} + +func (s *testServer) StreamingCall(stream testpb.BenchmarkService_StreamingCallServer) error { + response := &testpb.SimpleResponse{ + Payload: new(testpb.Payload), + } + in := new(testpb.SimpleRequest) + for { + // use ServerStream directly to reuse the same testpb.SimpleRequest object + err := stream.(grpc.ServerStream).RecvMsg(in) + if err == io.EOF { + // read done. + return nil + } + if err != nil { + return err + } + setPayload(response.Payload, in.ResponseType, int(in.ResponseSize)) + if err := stream.Send(response); err != nil { + return err + } + } +} + +func (s *testServer) UnconstrainedStreamingCall(stream testpb.BenchmarkService_UnconstrainedStreamingCallServer) error { + in := new(testpb.SimpleRequest) + // Receive a message to learn response type and size. + err := stream.RecvMsg(in) + if err == io.EOF { + // read done. + return nil + } + if err != nil { + return err + } + + response := &testpb.SimpleResponse{ + Payload: new(testpb.Payload), + } + setPayload(response.Payload, in.ResponseType, int(in.ResponseSize)) + + go func() { + for { + // Using RecvMsg rather than Recv to prevent reallocation of SimpleRequest. + err := stream.RecvMsg(in) + switch status.Code(err) { + case codes.Canceled: + case codes.OK: + default: + log.Fatalf("server recv error: %v", err) + } + } + }() + + go func() { + for { + err := stream.Send(response) + switch status.Code(err) { + case codes.Unavailable: + case codes.OK: + default: + log.Fatalf("server send error: %v", err) + } + } + }() + + <-stream.Context().Done() + return stream.Context().Err() +} + +// byteBufServer is a gRPC server that sends and receives byte buffer. +// The purpose is to benchmark the gRPC performance without protobuf serialization/deserialization overhead. +type byteBufServer struct { + respSize int32 +} + +// UnaryCall is an empty function and is not used for benchmark. +// If bytebuf UnaryCall benchmark is needed later, the function body needs to be updated. +func (s *byteBufServer) UnaryCall(ctx context.Context, in *testpb.SimpleRequest) (*testpb.SimpleResponse, error) { + return &testpb.SimpleResponse{}, nil +} + +func (s *byteBufServer) StreamingCall(stream testpb.BenchmarkService_StreamingCallServer) error { + for { + var in []byte + err := stream.(grpc.ServerStream).RecvMsg(&in) + if err == io.EOF { + return nil + } + if err != nil { + return err + } + out := make([]byte, s.respSize) + if err := stream.(grpc.ServerStream).SendMsg(&out); err != nil { + return err + } + } +} + +func (s *byteBufServer) UnconstrainedStreamingCall(stream testpb.BenchmarkService_UnconstrainedStreamingCallServer) error { + for { + var in []byte + err := stream.(grpc.ServerStream).RecvMsg(&in) + if err == io.EOF { + return nil + } + if err != nil { + return err + } + out := make([]byte, s.respSize) + if err := stream.(grpc.ServerStream).SendMsg(&out); err != nil { + return err + } + } +} + +// ServerInfo contains the information to create a gRPC benchmark server. +type ServerInfo struct { + // Type is the type of the server. + // It should be "protobuf" or "bytebuf". + Type string + + // Metadata is an optional configuration. + // For "protobuf", it's ignored. + // For "bytebuf", it should be an int representing response size. + Metadata interface{} + + // Listener is the network listener for the server to use + Listener net.Listener +} + +// StartServer starts a gRPC server serving a benchmark service according to info. +// It returns a function to stop the server. +func StartServer(info ServerInfo, opts ...grpc.ServerOption) func() { + opts = append(opts, grpc.WriteBufferSize(128*1024)) + opts = append(opts, grpc.ReadBufferSize(128*1024)) + s := grpc.NewServer(opts...) + switch info.Type { + case "protobuf": + testpb.RegisterBenchmarkServiceServer(s, &testServer{}) + case "bytebuf": + respSize, ok := info.Metadata.(int32) + if !ok { + grpclog.Fatalf("failed to StartServer, invalid metadata: %v, for Type: %v", info.Metadata, info.Type) + } + testpb.RegisterBenchmarkServiceServer(s, &byteBufServer{respSize: respSize}) + default: + grpclog.Fatalf("failed to StartServer, unknown Type: %v", info.Type) + } + go s.Serve(info.Listener) + return func() { + s.Stop() + } +} + +// DoUnaryCall performs an unary RPC with given stub and request and response sizes. +func DoUnaryCall(tc testpb.BenchmarkServiceClient, reqSize, respSize int) error { + pl := NewPayload(testpb.PayloadType_COMPRESSABLE, reqSize) + req := &testpb.SimpleRequest{ + ResponseType: pl.Type, + ResponseSize: int32(respSize), + Payload: pl, + } + if _, err := tc.UnaryCall(context.Background(), req); err != nil { + return fmt.Errorf("/BenchmarkService/UnaryCall(_, _) = _, %v, want _, ", err) + } + return nil +} + +// DoStreamingRoundTrip performs a round trip for a single streaming rpc. +func DoStreamingRoundTrip(stream testpb.BenchmarkService_StreamingCallClient, reqSize, respSize int) error { + pl := NewPayload(testpb.PayloadType_COMPRESSABLE, reqSize) + req := &testpb.SimpleRequest{ + ResponseType: pl.Type, + ResponseSize: int32(respSize), + Payload: pl, + } + if err := stream.Send(req); err != nil { + return fmt.Errorf("/BenchmarkService/StreamingCall.Send(_) = %v, want ", err) + } + if _, err := stream.Recv(); err != nil { + // EOF is a valid error here. + if err == io.EOF { + return nil + } + return fmt.Errorf("/BenchmarkService/StreamingCall.Recv(_) = %v, want ", err) + } + return nil +} + +// DoByteBufStreamingRoundTrip performs a round trip for a single streaming rpc, using a custom codec for byte buffer. +func DoByteBufStreamingRoundTrip(stream testpb.BenchmarkService_StreamingCallClient, reqSize, respSize int) error { + out := make([]byte, reqSize) + if err := stream.(grpc.ClientStream).SendMsg(&out); err != nil { + return fmt.Errorf("/BenchmarkService/StreamingCall.(ClientStream).SendMsg(_) = %v, want ", err) + } + var in []byte + if err := stream.(grpc.ClientStream).RecvMsg(&in); err != nil { + // EOF is a valid error here. + if err == io.EOF { + return nil + } + return fmt.Errorf("/BenchmarkService/StreamingCall.(ClientStream).RecvMsg(_) = %v, want ", err) + } + return nil +} + +// NewClientConn creates a gRPC client connection to addr. +func NewClientConn(addr string, opts ...grpc.DialOption) *grpc.ClientConn { + return NewClientConnWithContext(context.Background(), addr, opts...) +} + +// NewClientConnWithContext creates a gRPC client connection to addr using ctx. +func NewClientConnWithContext(ctx context.Context, addr string, opts ...grpc.DialOption) *grpc.ClientConn { + opts = append(opts, grpc.WithWriteBufferSize(128*1024)) + opts = append(opts, grpc.WithReadBufferSize(128*1024)) + conn, err := grpc.DialContext(ctx, addr, opts...) + if err != nil { + grpclog.Fatalf("NewClientConn(%q) failed to create a ClientConn %v", addr, err) + } + return conn +} + +func runUnary(b *testing.B, benchFeatures stats.Features) { + s := stats.AddStats(b, 38) + nw := &latency.Network{Kbps: benchFeatures.Kbps, Latency: benchFeatures.Latency, MTU: benchFeatures.Mtu} + lis, err := net.Listen("tcp", "localhost:0") + if err != nil { + grpclog.Fatalf("Failed to listen: %v", err) + } + target := lis.Addr().String() + lis = nw.Listener(lis) + stopper := StartServer(ServerInfo{Type: "protobuf", Listener: lis}, grpc.MaxConcurrentStreams(uint32(benchFeatures.MaxConcurrentCalls+1))) + defer stopper() + conn := NewClientConn( + target, grpc.WithInsecure(), + grpc.WithContextDialer(func(ctx context.Context, address string) (net.Conn, error) { + return nw.ContextDialer((&net.Dialer{}).DialContext)(ctx, "tcp", address) + }), + ) + tc := testpb.NewBenchmarkServiceClient(conn) + + // Warm up connection. + for i := 0; i < 10; i++ { + unaryCaller(tc, benchFeatures.ReqSizeBytes, benchFeatures.RespSizeBytes) + } + ch := make(chan int, benchFeatures.MaxConcurrentCalls*4) + var ( + mu sync.Mutex + wg sync.WaitGroup + ) + wg.Add(benchFeatures.MaxConcurrentCalls) + + // Distribute the b.N calls over maxConcurrentCalls workers. + for i := 0; i < benchFeatures.MaxConcurrentCalls; i++ { + go func() { + for range ch { + start := time.Now() + unaryCaller(tc, benchFeatures.ReqSizeBytes, benchFeatures.RespSizeBytes) + elapse := time.Since(start) + mu.Lock() + s.Add(elapse) + mu.Unlock() + } + wg.Done() + }() + } + b.ResetTimer() + for i := 0; i < b.N; i++ { + ch <- i + } + close(ch) + wg.Wait() + b.StopTimer() + conn.Close() +} + +func runStream(b *testing.B, benchFeatures stats.Features) { + s := stats.AddStats(b, 38) + nw := &latency.Network{Kbps: benchFeatures.Kbps, Latency: benchFeatures.Latency, MTU: benchFeatures.Mtu} + lis, err := net.Listen("tcp", "localhost:0") + if err != nil { + grpclog.Fatalf("Failed to listen: %v", err) + } + target := lis.Addr().String() + lis = nw.Listener(lis) + stopper := StartServer(ServerInfo{Type: "protobuf", Listener: lis}, grpc.MaxConcurrentStreams(uint32(benchFeatures.MaxConcurrentCalls+1))) + defer stopper() + conn := NewClientConn( + target, grpc.WithInsecure(), + grpc.WithContextDialer(func(ctx context.Context, address string) (net.Conn, error) { + return nw.ContextDialer((&net.Dialer{}).DialContext)(ctx, "tcp", address) + }), + ) + tc := testpb.NewBenchmarkServiceClient(conn) + + // Warm up connection. + stream, err := tc.StreamingCall(context.Background()) + if err != nil { + b.Fatalf("%v.StreamingCall(_) = _, %v", tc, err) + } + for i := 0; i < 10; i++ { + streamCaller(stream, benchFeatures.ReqSizeBytes, benchFeatures.RespSizeBytes) + } + + ch := make(chan struct{}, benchFeatures.MaxConcurrentCalls*4) + var ( + mu sync.Mutex + wg sync.WaitGroup + ) + wg.Add(benchFeatures.MaxConcurrentCalls) + + // Distribute the b.N calls over maxConcurrentCalls workers. + for i := 0; i < benchFeatures.MaxConcurrentCalls; i++ { + stream, err := tc.StreamingCall(context.Background()) + if err != nil { + b.Fatalf("%v.StreamingCall(_) = _, %v", tc, err) + } + go func() { + for range ch { + start := time.Now() + streamCaller(stream, benchFeatures.ReqSizeBytes, benchFeatures.RespSizeBytes) + elapse := time.Since(start) + mu.Lock() + s.Add(elapse) + mu.Unlock() + } + wg.Done() + }() + } + b.ResetTimer() + for i := 0; i < b.N; i++ { + ch <- struct{}{} + } + close(ch) + wg.Wait() + b.StopTimer() + conn.Close() +} +func unaryCaller(client testpb.BenchmarkServiceClient, reqSize, respSize int) { + if err := DoUnaryCall(client, reqSize, respSize); err != nil { + grpclog.Fatalf("DoUnaryCall failed: %v", err) + } +} + +func streamCaller(stream testpb.BenchmarkService_StreamingCallClient, reqSize, respSize int) { + if err := DoStreamingRoundTrip(stream, reqSize, respSize); err != nil { + grpclog.Fatalf("DoStreamingRoundTrip failed: %v", err) + } +} diff --git a/vendor/google.golang.org/grpc/benchmark/benchresult/main.go b/vendor/google.golang.org/grpc/benchmark/benchresult/main.go new file mode 100644 index 0000000..40226cf --- /dev/null +++ b/vendor/google.golang.org/grpc/benchmark/benchresult/main.go @@ -0,0 +1,133 @@ +/* + * + * Copyright 2017 gRPC authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ + +/* +To format the benchmark result: + go run benchmark/benchresult/main.go resultfile + +To see the performance change based on a old result: + go run benchmark/benchresult/main.go resultfile_old resultfile +It will print the comparison result of intersection benchmarks between two files. + +*/ +package main + +import ( + "encoding/gob" + "fmt" + "log" + "os" + "strconv" + "strings" + "time" + + "google.golang.org/grpc/benchmark/stats" +) + +func createMap(fileName string, m map[string]stats.BenchResults) { + f, err := os.Open(fileName) + if err != nil { + log.Fatalf("Read file %s error: %s\n", fileName, err) + } + defer f.Close() + var data []stats.BenchResults + decoder := gob.NewDecoder(f) + if err = decoder.Decode(&data); err != nil { + log.Fatalf("Decode file %s error: %s\n", fileName, err) + } + for _, d := range data { + m[d.RunMode+"-"+d.Features.String()] = d + } +} + +func intChange(title string, val1, val2 int64) string { + return fmt.Sprintf("%10s %12s %12s %8.2f%%\n", title, strconv.FormatInt(val1, 10), + strconv.FormatInt(val2, 10), float64(val2-val1)*100/float64(val1)) +} + +func timeChange(title int, val1, val2 time.Duration) string { + return fmt.Sprintf("%10s %12s %12s %8.2f%%\n", strconv.Itoa(title)+" latency", val1.String(), + val2.String(), float64(val2-val1)*100/float64(val1)) +} + +func compareTwoMap(m1, m2 map[string]stats.BenchResults) { + for k2, v2 := range m2 { + if v1, ok := m1[k2]; ok { + changes := k2 + "\n" + changes += fmt.Sprintf("%10s %12s %12s %8s\n", "Title", "Before", "After", "Percentage") + changes += intChange("Bytes/op", v1.AllocedBytesPerOp, v2.AllocedBytesPerOp) + changes += intChange("Allocs/op", v1.AllocsPerOp, v2.AllocsPerOp) + changes += timeChange(v1.Latency[1].Percent, v1.Latency[1].Value, v2.Latency[1].Value) + changes += timeChange(v1.Latency[2].Percent, v1.Latency[2].Value, v2.Latency[2].Value) + fmt.Printf("%s\n", changes) + } + } +} + +func compareBenchmark(file1, file2 string) { + var BenchValueFile1 map[string]stats.BenchResults + var BenchValueFile2 map[string]stats.BenchResults + BenchValueFile1 = make(map[string]stats.BenchResults) + BenchValueFile2 = make(map[string]stats.BenchResults) + + createMap(file1, BenchValueFile1) + createMap(file2, BenchValueFile2) + + compareTwoMap(BenchValueFile1, BenchValueFile2) +} + +func printline(benchName, ltc50, ltc90, allocByte, allocsOp interface{}) { + fmt.Printf("%-80v%12v%12v%12v%12v\n", benchName, ltc50, ltc90, allocByte, allocsOp) +} + +func formatBenchmark(fileName string) { + f, err := os.Open(fileName) + if err != nil { + log.Fatalf("Read file %s error: %s\n", fileName, err) + } + defer f.Close() + var data []stats.BenchResults + decoder := gob.NewDecoder(f) + if err = decoder.Decode(&data); err != nil { + log.Fatalf("Decode file %s error: %s\n", fileName, err) + } + if len(data) == 0 { + log.Fatalf("No data in file %s\n", fileName) + } + printPos := data[0].SharedPosion + fmt.Println("\nShared features:\n" + strings.Repeat("-", 20)) + fmt.Print(stats.PartialPrintString(printPos, data[0].Features, true)) + fmt.Println(strings.Repeat("-", 35)) + for i := 0; i < len(data[0].SharedPosion); i++ { + printPos[i] = !printPos[i] + } + printline("Name", "latency-50", "latency-90", "Alloc (B)", "Alloc (#)") + for _, d := range data { + name := d.RunMode + stats.PartialPrintString(printPos, d.Features, false) + printline(name, d.Latency[1].Value.String(), d.Latency[2].Value.String(), + d.AllocedBytesPerOp, d.AllocsPerOp) + } +} + +func main() { + if len(os.Args) == 2 { + formatBenchmark(os.Args[1]) + } else { + compareBenchmark(os.Args[1], os.Args[2]) + } +} diff --git a/vendor/google.golang.org/grpc/benchmark/client/main.go b/vendor/google.golang.org/grpc/benchmark/client/main.go new file mode 100644 index 0000000..7620243 --- /dev/null +++ b/vendor/google.golang.org/grpc/benchmark/client/main.go @@ -0,0 +1,207 @@ +/* + * + * Copyright 2017 gRPC authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ + +/* +Package main provides a client used for benchmarking. Before running the +client, the user would need to launch the grpc server. + +To start the server before running the client, you can run look for the command +under the following file: + + benchmark/server/main.go + +After starting the server, the client can be run. An example of how to run this +command is: + +go run benchmark/client/main.go -test_name=grpc_test + +If the server is running on a different port than 50051, then use the port flag +for the client to hit the server on the correct port. +An example for how to run this command on a different port can be found here: + +go run benchmark/client/main.go -test_name=grpc_test -port=8080 +*/ +package main + +import ( + "context" + "flag" + "fmt" + "os" + "runtime" + "runtime/pprof" + "sync" + "time" + + "google.golang.org/grpc" + "google.golang.org/grpc/benchmark" + testpb "google.golang.org/grpc/benchmark/grpc_testing" + "google.golang.org/grpc/benchmark/stats" + "google.golang.org/grpc/grpclog" + "google.golang.org/grpc/internal/syscall" +) + +var ( + port = flag.String("port", "50051", "Localhost port to connect to.") + numRPC = flag.Int("r", 1, "The number of concurrent RPCs on each connection.") + numConn = flag.Int("c", 1, "The number of parallel connections.") + warmupDur = flag.Int("w", 10, "Warm-up duration in seconds") + duration = flag.Int("d", 60, "Benchmark duration in seconds") + rqSize = flag.Int("req", 1, "Request message size in bytes.") + rspSize = flag.Int("resp", 1, "Response message size in bytes.") + rpcType = flag.String("rpc_type", "unary", + `Configure different client rpc type. Valid options are: + unary; + streaming.`) + testName = flag.String("test_name", "", "Name of the test used for creating profiles.") + wg sync.WaitGroup + hopts = stats.HistogramOptions{ + NumBuckets: 2495, + GrowthFactor: .01, + } + mu sync.Mutex + hists []*stats.Histogram +) + +func main() { + flag.Parse() + if *testName == "" { + grpclog.Fatalf("test_name not set") + } + req := &testpb.SimpleRequest{ + ResponseType: testpb.PayloadType_COMPRESSABLE, + ResponseSize: int32(*rspSize), + Payload: &testpb.Payload{ + Type: testpb.PayloadType_COMPRESSABLE, + Body: make([]byte, *rqSize), + }, + } + connectCtx, connectCancel := context.WithDeadline(context.Background(), time.Now().Add(5*time.Second)) + defer connectCancel() + ccs := buildConnections(connectCtx) + warmDeadline := time.Now().Add(time.Duration(*warmupDur) * time.Second) + endDeadline := warmDeadline.Add(time.Duration(*duration) * time.Second) + cf, err := os.Create("/tmp/" + *testName + ".cpu") + if err != nil { + grpclog.Fatalf("Error creating file: %v", err) + } + defer cf.Close() + pprof.StartCPUProfile(cf) + cpuBeg := syscall.GetCPUTime() + for _, cc := range ccs { + runWithConn(cc, req, warmDeadline, endDeadline) + } + wg.Wait() + cpu := time.Duration(syscall.GetCPUTime() - cpuBeg) + pprof.StopCPUProfile() + mf, err := os.Create("/tmp/" + *testName + ".mem") + if err != nil { + grpclog.Fatalf("Error creating file: %v", err) + } + defer mf.Close() + runtime.GC() // materialize all statistics + if err := pprof.WriteHeapProfile(mf); err != nil { + grpclog.Fatalf("Error writing memory profile: %v", err) + } + hist := stats.NewHistogram(hopts) + for _, h := range hists { + hist.Merge(h) + } + parseHist(hist) + fmt.Println("Client CPU utilization:", cpu) + fmt.Println("Client CPU profile:", cf.Name()) + fmt.Println("Client Mem Profile:", mf.Name()) +} + +func buildConnections(ctx context.Context) []*grpc.ClientConn { + ccs := make([]*grpc.ClientConn, *numConn) + for i := range ccs { + ccs[i] = benchmark.NewClientConnWithContext(ctx, "localhost:"+*port, grpc.WithInsecure(), grpc.WithBlock()) + } + return ccs +} + +func runWithConn(cc *grpc.ClientConn, req *testpb.SimpleRequest, warmDeadline, endDeadline time.Time) { + for i := 0; i < *numRPC; i++ { + wg.Add(1) + go func() { + defer wg.Done() + caller := makeCaller(cc, req) + hist := stats.NewHistogram(hopts) + for { + start := time.Now() + if start.After(endDeadline) { + mu.Lock() + hists = append(hists, hist) + mu.Unlock() + return + } + caller() + elapsed := time.Since(start) + if start.After(warmDeadline) { + hist.Add(elapsed.Nanoseconds()) + } + } + }() + } +} + +func makeCaller(cc *grpc.ClientConn, req *testpb.SimpleRequest) func() { + client := testpb.NewBenchmarkServiceClient(cc) + if *rpcType == "unary" { + return func() { + if _, err := client.UnaryCall(context.Background(), req); err != nil { + grpclog.Fatalf("RPC failed: %v", err) + } + } + } + stream, err := client.StreamingCall(context.Background()) + if err != nil { + grpclog.Fatalf("RPC failed: %v", err) + } + return func() { + if err := stream.Send(req); err != nil { + grpclog.Fatalf("Streaming RPC failed to send: %v", err) + } + if _, err := stream.Recv(); err != nil { + grpclog.Fatalf("Streaming RPC failed to read: %v", err) + } + } +} + +func parseHist(hist *stats.Histogram) { + fmt.Println("qps:", float64(hist.Count)/float64(*duration)) + fmt.Printf("Latency: (50/90/99 %%ile): %v/%v/%v\n", + time.Duration(median(.5, hist)), + time.Duration(median(.9, hist)), + time.Duration(median(.99, hist))) +} + +func median(percentile float64, h *stats.Histogram) int64 { + need := int64(float64(h.Count) * percentile) + have := int64(0) + for _, bucket := range h.Buckets { + count := bucket.Count + if have+count >= need { + percent := float64(need-have) / float64(count) + return int64((1.0-percent)*bucket.LowBound + percent*bucket.LowBound*(1.0+hopts.GrowthFactor)) + } + have += bucket.Count + } + panic("should have found a bound") +} diff --git a/vendor/google.golang.org/grpc/benchmark/grpc_testing/control.pb.go b/vendor/google.golang.org/grpc/benchmark/grpc_testing/control.pb.go new file mode 100644 index 0000000..3d125b3 --- /dev/null +++ b/vendor/google.golang.org/grpc/benchmark/grpc_testing/control.pb.go @@ -0,0 +1,1580 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: control.proto + +package grpc_testing + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +type ClientType int32 + +const ( + ClientType_SYNC_CLIENT ClientType = 0 + ClientType_ASYNC_CLIENT ClientType = 1 +) + +var ClientType_name = map[int32]string{ + 0: "SYNC_CLIENT", + 1: "ASYNC_CLIENT", +} +var ClientType_value = map[string]int32{ + "SYNC_CLIENT": 0, + "ASYNC_CLIENT": 1, +} + +func (x ClientType) String() string { + return proto.EnumName(ClientType_name, int32(x)) +} +func (ClientType) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_control_63d6a60a9ad7e299, []int{0} +} + +type ServerType int32 + +const ( + ServerType_SYNC_SERVER ServerType = 0 + ServerType_ASYNC_SERVER ServerType = 1 + ServerType_ASYNC_GENERIC_SERVER ServerType = 2 +) + +var ServerType_name = map[int32]string{ + 0: "SYNC_SERVER", + 1: "ASYNC_SERVER", + 2: "ASYNC_GENERIC_SERVER", +} +var ServerType_value = map[string]int32{ + "SYNC_SERVER": 0, + "ASYNC_SERVER": 1, + "ASYNC_GENERIC_SERVER": 2, +} + +func (x ServerType) String() string { + return proto.EnumName(ServerType_name, int32(x)) +} +func (ServerType) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_control_63d6a60a9ad7e299, []int{1} +} + +type RpcType int32 + +const ( + RpcType_UNARY RpcType = 0 + RpcType_STREAMING RpcType = 1 +) + +var RpcType_name = map[int32]string{ + 0: "UNARY", + 1: "STREAMING", +} +var RpcType_value = map[string]int32{ + "UNARY": 0, + "STREAMING": 1, +} + +func (x RpcType) String() string { + return proto.EnumName(RpcType_name, int32(x)) +} +func (RpcType) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_control_63d6a60a9ad7e299, []int{2} +} + +// Parameters of poisson process distribution, which is a good representation +// of activity coming in from independent identical stationary sources. +type PoissonParams struct { + // The rate of arrivals (a.k.a. lambda parameter of the exp distribution). + OfferedLoad float64 `protobuf:"fixed64,1,opt,name=offered_load,json=offeredLoad,proto3" json:"offered_load,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *PoissonParams) Reset() { *m = PoissonParams{} } +func (m *PoissonParams) String() string { return proto.CompactTextString(m) } +func (*PoissonParams) ProtoMessage() {} +func (*PoissonParams) Descriptor() ([]byte, []int) { + return fileDescriptor_control_63d6a60a9ad7e299, []int{0} +} +func (m *PoissonParams) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_PoissonParams.Unmarshal(m, b) +} +func (m *PoissonParams) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_PoissonParams.Marshal(b, m, deterministic) +} +func (dst *PoissonParams) XXX_Merge(src proto.Message) { + xxx_messageInfo_PoissonParams.Merge(dst, src) +} +func (m *PoissonParams) XXX_Size() int { + return xxx_messageInfo_PoissonParams.Size(m) +} +func (m *PoissonParams) XXX_DiscardUnknown() { + xxx_messageInfo_PoissonParams.DiscardUnknown(m) +} + +var xxx_messageInfo_PoissonParams proto.InternalMessageInfo + +func (m *PoissonParams) GetOfferedLoad() float64 { + if m != nil { + return m.OfferedLoad + } + return 0 +} + +type UniformParams struct { + InterarrivalLo float64 `protobuf:"fixed64,1,opt,name=interarrival_lo,json=interarrivalLo,proto3" json:"interarrival_lo,omitempty"` + InterarrivalHi float64 `protobuf:"fixed64,2,opt,name=interarrival_hi,json=interarrivalHi,proto3" json:"interarrival_hi,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *UniformParams) Reset() { *m = UniformParams{} } +func (m *UniformParams) String() string { return proto.CompactTextString(m) } +func (*UniformParams) ProtoMessage() {} +func (*UniformParams) Descriptor() ([]byte, []int) { + return fileDescriptor_control_63d6a60a9ad7e299, []int{1} +} +func (m *UniformParams) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_UniformParams.Unmarshal(m, b) +} +func (m *UniformParams) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_UniformParams.Marshal(b, m, deterministic) +} +func (dst *UniformParams) XXX_Merge(src proto.Message) { + xxx_messageInfo_UniformParams.Merge(dst, src) +} +func (m *UniformParams) XXX_Size() int { + return xxx_messageInfo_UniformParams.Size(m) +} +func (m *UniformParams) XXX_DiscardUnknown() { + xxx_messageInfo_UniformParams.DiscardUnknown(m) +} + +var xxx_messageInfo_UniformParams proto.InternalMessageInfo + +func (m *UniformParams) GetInterarrivalLo() float64 { + if m != nil { + return m.InterarrivalLo + } + return 0 +} + +func (m *UniformParams) GetInterarrivalHi() float64 { + if m != nil { + return m.InterarrivalHi + } + return 0 +} + +type DeterministicParams struct { + OfferedLoad float64 `protobuf:"fixed64,1,opt,name=offered_load,json=offeredLoad,proto3" json:"offered_load,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *DeterministicParams) Reset() { *m = DeterministicParams{} } +func (m *DeterministicParams) String() string { return proto.CompactTextString(m) } +func (*DeterministicParams) ProtoMessage() {} +func (*DeterministicParams) Descriptor() ([]byte, []int) { + return fileDescriptor_control_63d6a60a9ad7e299, []int{2} +} +func (m *DeterministicParams) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_DeterministicParams.Unmarshal(m, b) +} +func (m *DeterministicParams) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_DeterministicParams.Marshal(b, m, deterministic) +} +func (dst *DeterministicParams) XXX_Merge(src proto.Message) { + xxx_messageInfo_DeterministicParams.Merge(dst, src) +} +func (m *DeterministicParams) XXX_Size() int { + return xxx_messageInfo_DeterministicParams.Size(m) +} +func (m *DeterministicParams) XXX_DiscardUnknown() { + xxx_messageInfo_DeterministicParams.DiscardUnknown(m) +} + +var xxx_messageInfo_DeterministicParams proto.InternalMessageInfo + +func (m *DeterministicParams) GetOfferedLoad() float64 { + if m != nil { + return m.OfferedLoad + } + return 0 +} + +type ParetoParams struct { + InterarrivalBase float64 `protobuf:"fixed64,1,opt,name=interarrival_base,json=interarrivalBase,proto3" json:"interarrival_base,omitempty"` + Alpha float64 `protobuf:"fixed64,2,opt,name=alpha,proto3" json:"alpha,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ParetoParams) Reset() { *m = ParetoParams{} } +func (m *ParetoParams) String() string { return proto.CompactTextString(m) } +func (*ParetoParams) ProtoMessage() {} +func (*ParetoParams) Descriptor() ([]byte, []int) { + return fileDescriptor_control_63d6a60a9ad7e299, []int{3} +} +func (m *ParetoParams) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ParetoParams.Unmarshal(m, b) +} +func (m *ParetoParams) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ParetoParams.Marshal(b, m, deterministic) +} +func (dst *ParetoParams) XXX_Merge(src proto.Message) { + xxx_messageInfo_ParetoParams.Merge(dst, src) +} +func (m *ParetoParams) XXX_Size() int { + return xxx_messageInfo_ParetoParams.Size(m) +} +func (m *ParetoParams) XXX_DiscardUnknown() { + xxx_messageInfo_ParetoParams.DiscardUnknown(m) +} + +var xxx_messageInfo_ParetoParams proto.InternalMessageInfo + +func (m *ParetoParams) GetInterarrivalBase() float64 { + if m != nil { + return m.InterarrivalBase + } + return 0 +} + +func (m *ParetoParams) GetAlpha() float64 { + if m != nil { + return m.Alpha + } + return 0 +} + +// Once an RPC finishes, immediately start a new one. +// No configuration parameters needed. +type ClosedLoopParams struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ClosedLoopParams) Reset() { *m = ClosedLoopParams{} } +func (m *ClosedLoopParams) String() string { return proto.CompactTextString(m) } +func (*ClosedLoopParams) ProtoMessage() {} +func (*ClosedLoopParams) Descriptor() ([]byte, []int) { + return fileDescriptor_control_63d6a60a9ad7e299, []int{4} +} +func (m *ClosedLoopParams) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ClosedLoopParams.Unmarshal(m, b) +} +func (m *ClosedLoopParams) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ClosedLoopParams.Marshal(b, m, deterministic) +} +func (dst *ClosedLoopParams) XXX_Merge(src proto.Message) { + xxx_messageInfo_ClosedLoopParams.Merge(dst, src) +} +func (m *ClosedLoopParams) XXX_Size() int { + return xxx_messageInfo_ClosedLoopParams.Size(m) +} +func (m *ClosedLoopParams) XXX_DiscardUnknown() { + xxx_messageInfo_ClosedLoopParams.DiscardUnknown(m) +} + +var xxx_messageInfo_ClosedLoopParams proto.InternalMessageInfo + +type LoadParams struct { + // Types that are valid to be assigned to Load: + // *LoadParams_ClosedLoop + // *LoadParams_Poisson + // *LoadParams_Uniform + // *LoadParams_Determ + // *LoadParams_Pareto + Load isLoadParams_Load `protobuf_oneof:"load"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *LoadParams) Reset() { *m = LoadParams{} } +func (m *LoadParams) String() string { return proto.CompactTextString(m) } +func (*LoadParams) ProtoMessage() {} +func (*LoadParams) Descriptor() ([]byte, []int) { + return fileDescriptor_control_63d6a60a9ad7e299, []int{5} +} +func (m *LoadParams) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_LoadParams.Unmarshal(m, b) +} +func (m *LoadParams) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_LoadParams.Marshal(b, m, deterministic) +} +func (dst *LoadParams) XXX_Merge(src proto.Message) { + xxx_messageInfo_LoadParams.Merge(dst, src) +} +func (m *LoadParams) XXX_Size() int { + return xxx_messageInfo_LoadParams.Size(m) +} +func (m *LoadParams) XXX_DiscardUnknown() { + xxx_messageInfo_LoadParams.DiscardUnknown(m) +} + +var xxx_messageInfo_LoadParams proto.InternalMessageInfo + +type isLoadParams_Load interface { + isLoadParams_Load() +} + +type LoadParams_ClosedLoop struct { + ClosedLoop *ClosedLoopParams `protobuf:"bytes,1,opt,name=closed_loop,json=closedLoop,proto3,oneof"` +} + +type LoadParams_Poisson struct { + Poisson *PoissonParams `protobuf:"bytes,2,opt,name=poisson,proto3,oneof"` +} + +type LoadParams_Uniform struct { + Uniform *UniformParams `protobuf:"bytes,3,opt,name=uniform,proto3,oneof"` +} + +type LoadParams_Determ struct { + Determ *DeterministicParams `protobuf:"bytes,4,opt,name=determ,proto3,oneof"` +} + +type LoadParams_Pareto struct { + Pareto *ParetoParams `protobuf:"bytes,5,opt,name=pareto,proto3,oneof"` +} + +func (*LoadParams_ClosedLoop) isLoadParams_Load() {} + +func (*LoadParams_Poisson) isLoadParams_Load() {} + +func (*LoadParams_Uniform) isLoadParams_Load() {} + +func (*LoadParams_Determ) isLoadParams_Load() {} + +func (*LoadParams_Pareto) isLoadParams_Load() {} + +func (m *LoadParams) GetLoad() isLoadParams_Load { + if m != nil { + return m.Load + } + return nil +} + +func (m *LoadParams) GetClosedLoop() *ClosedLoopParams { + if x, ok := m.GetLoad().(*LoadParams_ClosedLoop); ok { + return x.ClosedLoop + } + return nil +} + +func (m *LoadParams) GetPoisson() *PoissonParams { + if x, ok := m.GetLoad().(*LoadParams_Poisson); ok { + return x.Poisson + } + return nil +} + +func (m *LoadParams) GetUniform() *UniformParams { + if x, ok := m.GetLoad().(*LoadParams_Uniform); ok { + return x.Uniform + } + return nil +} + +func (m *LoadParams) GetDeterm() *DeterministicParams { + if x, ok := m.GetLoad().(*LoadParams_Determ); ok { + return x.Determ + } + return nil +} + +func (m *LoadParams) GetPareto() *ParetoParams { + if x, ok := m.GetLoad().(*LoadParams_Pareto); ok { + return x.Pareto + } + return nil +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*LoadParams) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _LoadParams_OneofMarshaler, _LoadParams_OneofUnmarshaler, _LoadParams_OneofSizer, []interface{}{ + (*LoadParams_ClosedLoop)(nil), + (*LoadParams_Poisson)(nil), + (*LoadParams_Uniform)(nil), + (*LoadParams_Determ)(nil), + (*LoadParams_Pareto)(nil), + } +} + +func _LoadParams_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*LoadParams) + // load + switch x := m.Load.(type) { + case *LoadParams_ClosedLoop: + b.EncodeVarint(1<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.ClosedLoop); err != nil { + return err + } + case *LoadParams_Poisson: + b.EncodeVarint(2<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.Poisson); err != nil { + return err + } + case *LoadParams_Uniform: + b.EncodeVarint(3<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.Uniform); err != nil { + return err + } + case *LoadParams_Determ: + b.EncodeVarint(4<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.Determ); err != nil { + return err + } + case *LoadParams_Pareto: + b.EncodeVarint(5<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.Pareto); err != nil { + return err + } + case nil: + default: + return fmt.Errorf("LoadParams.Load has unexpected type %T", x) + } + return nil +} + +func _LoadParams_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*LoadParams) + switch tag { + case 1: // load.closed_loop + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(ClosedLoopParams) + err := b.DecodeMessage(msg) + m.Load = &LoadParams_ClosedLoop{msg} + return true, err + case 2: // load.poisson + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(PoissonParams) + err := b.DecodeMessage(msg) + m.Load = &LoadParams_Poisson{msg} + return true, err + case 3: // load.uniform + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(UniformParams) + err := b.DecodeMessage(msg) + m.Load = &LoadParams_Uniform{msg} + return true, err + case 4: // load.determ + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(DeterministicParams) + err := b.DecodeMessage(msg) + m.Load = &LoadParams_Determ{msg} + return true, err + case 5: // load.pareto + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(ParetoParams) + err := b.DecodeMessage(msg) + m.Load = &LoadParams_Pareto{msg} + return true, err + default: + return false, nil + } +} + +func _LoadParams_OneofSizer(msg proto.Message) (n int) { + m := msg.(*LoadParams) + // load + switch x := m.Load.(type) { + case *LoadParams_ClosedLoop: + s := proto.Size(x.ClosedLoop) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *LoadParams_Poisson: + s := proto.Size(x.Poisson) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *LoadParams_Uniform: + s := proto.Size(x.Uniform) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *LoadParams_Determ: + s := proto.Size(x.Determ) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *LoadParams_Pareto: + s := proto.Size(x.Pareto) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +// presence of SecurityParams implies use of TLS +type SecurityParams struct { + UseTestCa bool `protobuf:"varint,1,opt,name=use_test_ca,json=useTestCa,proto3" json:"use_test_ca,omitempty"` + ServerHostOverride string `protobuf:"bytes,2,opt,name=server_host_override,json=serverHostOverride,proto3" json:"server_host_override,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *SecurityParams) Reset() { *m = SecurityParams{} } +func (m *SecurityParams) String() string { return proto.CompactTextString(m) } +func (*SecurityParams) ProtoMessage() {} +func (*SecurityParams) Descriptor() ([]byte, []int) { + return fileDescriptor_control_63d6a60a9ad7e299, []int{6} +} +func (m *SecurityParams) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_SecurityParams.Unmarshal(m, b) +} +func (m *SecurityParams) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_SecurityParams.Marshal(b, m, deterministic) +} +func (dst *SecurityParams) XXX_Merge(src proto.Message) { + xxx_messageInfo_SecurityParams.Merge(dst, src) +} +func (m *SecurityParams) XXX_Size() int { + return xxx_messageInfo_SecurityParams.Size(m) +} +func (m *SecurityParams) XXX_DiscardUnknown() { + xxx_messageInfo_SecurityParams.DiscardUnknown(m) +} + +var xxx_messageInfo_SecurityParams proto.InternalMessageInfo + +func (m *SecurityParams) GetUseTestCa() bool { + if m != nil { + return m.UseTestCa + } + return false +} + +func (m *SecurityParams) GetServerHostOverride() string { + if m != nil { + return m.ServerHostOverride + } + return "" +} + +type ClientConfig struct { + // List of targets to connect to. At least one target needs to be specified. + ServerTargets []string `protobuf:"bytes,1,rep,name=server_targets,json=serverTargets,proto3" json:"server_targets,omitempty"` + ClientType ClientType `protobuf:"varint,2,opt,name=client_type,json=clientType,proto3,enum=grpc.testing.ClientType" json:"client_type,omitempty"` + SecurityParams *SecurityParams `protobuf:"bytes,3,opt,name=security_params,json=securityParams,proto3" json:"security_params,omitempty"` + // How many concurrent RPCs to start for each channel. + // For synchronous client, use a separate thread for each outstanding RPC. + OutstandingRpcsPerChannel int32 `protobuf:"varint,4,opt,name=outstanding_rpcs_per_channel,json=outstandingRpcsPerChannel,proto3" json:"outstanding_rpcs_per_channel,omitempty"` + // Number of independent client channels to create. + // i-th channel will connect to server_target[i % server_targets.size()] + ClientChannels int32 `protobuf:"varint,5,opt,name=client_channels,json=clientChannels,proto3" json:"client_channels,omitempty"` + // Only for async client. Number of threads to use to start/manage RPCs. + AsyncClientThreads int32 `protobuf:"varint,7,opt,name=async_client_threads,json=asyncClientThreads,proto3" json:"async_client_threads,omitempty"` + RpcType RpcType `protobuf:"varint,8,opt,name=rpc_type,json=rpcType,proto3,enum=grpc.testing.RpcType" json:"rpc_type,omitempty"` + // The requested load for the entire client (aggregated over all the threads). + LoadParams *LoadParams `protobuf:"bytes,10,opt,name=load_params,json=loadParams,proto3" json:"load_params,omitempty"` + PayloadConfig *PayloadConfig `protobuf:"bytes,11,opt,name=payload_config,json=payloadConfig,proto3" json:"payload_config,omitempty"` + HistogramParams *HistogramParams `protobuf:"bytes,12,opt,name=histogram_params,json=histogramParams,proto3" json:"histogram_params,omitempty"` + // Specify the cores we should run the client on, if desired + CoreList []int32 `protobuf:"varint,13,rep,packed,name=core_list,json=coreList,proto3" json:"core_list,omitempty"` + CoreLimit int32 `protobuf:"varint,14,opt,name=core_limit,json=coreLimit,proto3" json:"core_limit,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ClientConfig) Reset() { *m = ClientConfig{} } +func (m *ClientConfig) String() string { return proto.CompactTextString(m) } +func (*ClientConfig) ProtoMessage() {} +func (*ClientConfig) Descriptor() ([]byte, []int) { + return fileDescriptor_control_63d6a60a9ad7e299, []int{7} +} +func (m *ClientConfig) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ClientConfig.Unmarshal(m, b) +} +func (m *ClientConfig) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ClientConfig.Marshal(b, m, deterministic) +} +func (dst *ClientConfig) XXX_Merge(src proto.Message) { + xxx_messageInfo_ClientConfig.Merge(dst, src) +} +func (m *ClientConfig) XXX_Size() int { + return xxx_messageInfo_ClientConfig.Size(m) +} +func (m *ClientConfig) XXX_DiscardUnknown() { + xxx_messageInfo_ClientConfig.DiscardUnknown(m) +} + +var xxx_messageInfo_ClientConfig proto.InternalMessageInfo + +func (m *ClientConfig) GetServerTargets() []string { + if m != nil { + return m.ServerTargets + } + return nil +} + +func (m *ClientConfig) GetClientType() ClientType { + if m != nil { + return m.ClientType + } + return ClientType_SYNC_CLIENT +} + +func (m *ClientConfig) GetSecurityParams() *SecurityParams { + if m != nil { + return m.SecurityParams + } + return nil +} + +func (m *ClientConfig) GetOutstandingRpcsPerChannel() int32 { + if m != nil { + return m.OutstandingRpcsPerChannel + } + return 0 +} + +func (m *ClientConfig) GetClientChannels() int32 { + if m != nil { + return m.ClientChannels + } + return 0 +} + +func (m *ClientConfig) GetAsyncClientThreads() int32 { + if m != nil { + return m.AsyncClientThreads + } + return 0 +} + +func (m *ClientConfig) GetRpcType() RpcType { + if m != nil { + return m.RpcType + } + return RpcType_UNARY +} + +func (m *ClientConfig) GetLoadParams() *LoadParams { + if m != nil { + return m.LoadParams + } + return nil +} + +func (m *ClientConfig) GetPayloadConfig() *PayloadConfig { + if m != nil { + return m.PayloadConfig + } + return nil +} + +func (m *ClientConfig) GetHistogramParams() *HistogramParams { + if m != nil { + return m.HistogramParams + } + return nil +} + +func (m *ClientConfig) GetCoreList() []int32 { + if m != nil { + return m.CoreList + } + return nil +} + +func (m *ClientConfig) GetCoreLimit() int32 { + if m != nil { + return m.CoreLimit + } + return 0 +} + +type ClientStatus struct { + Stats *ClientStats `protobuf:"bytes,1,opt,name=stats,proto3" json:"stats,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ClientStatus) Reset() { *m = ClientStatus{} } +func (m *ClientStatus) String() string { return proto.CompactTextString(m) } +func (*ClientStatus) ProtoMessage() {} +func (*ClientStatus) Descriptor() ([]byte, []int) { + return fileDescriptor_control_63d6a60a9ad7e299, []int{8} +} +func (m *ClientStatus) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ClientStatus.Unmarshal(m, b) +} +func (m *ClientStatus) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ClientStatus.Marshal(b, m, deterministic) +} +func (dst *ClientStatus) XXX_Merge(src proto.Message) { + xxx_messageInfo_ClientStatus.Merge(dst, src) +} +func (m *ClientStatus) XXX_Size() int { + return xxx_messageInfo_ClientStatus.Size(m) +} +func (m *ClientStatus) XXX_DiscardUnknown() { + xxx_messageInfo_ClientStatus.DiscardUnknown(m) +} + +var xxx_messageInfo_ClientStatus proto.InternalMessageInfo + +func (m *ClientStatus) GetStats() *ClientStats { + if m != nil { + return m.Stats + } + return nil +} + +// Request current stats +type Mark struct { + // if true, the stats will be reset after taking their snapshot. + Reset_ bool `protobuf:"varint,1,opt,name=reset,proto3" json:"reset,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Mark) Reset() { *m = Mark{} } +func (m *Mark) String() string { return proto.CompactTextString(m) } +func (*Mark) ProtoMessage() {} +func (*Mark) Descriptor() ([]byte, []int) { + return fileDescriptor_control_63d6a60a9ad7e299, []int{9} +} +func (m *Mark) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Mark.Unmarshal(m, b) +} +func (m *Mark) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Mark.Marshal(b, m, deterministic) +} +func (dst *Mark) XXX_Merge(src proto.Message) { + xxx_messageInfo_Mark.Merge(dst, src) +} +func (m *Mark) XXX_Size() int { + return xxx_messageInfo_Mark.Size(m) +} +func (m *Mark) XXX_DiscardUnknown() { + xxx_messageInfo_Mark.DiscardUnknown(m) +} + +var xxx_messageInfo_Mark proto.InternalMessageInfo + +func (m *Mark) GetReset_() bool { + if m != nil { + return m.Reset_ + } + return false +} + +type ClientArgs struct { + // Types that are valid to be assigned to Argtype: + // *ClientArgs_Setup + // *ClientArgs_Mark + Argtype isClientArgs_Argtype `protobuf_oneof:"argtype"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ClientArgs) Reset() { *m = ClientArgs{} } +func (m *ClientArgs) String() string { return proto.CompactTextString(m) } +func (*ClientArgs) ProtoMessage() {} +func (*ClientArgs) Descriptor() ([]byte, []int) { + return fileDescriptor_control_63d6a60a9ad7e299, []int{10} +} +func (m *ClientArgs) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ClientArgs.Unmarshal(m, b) +} +func (m *ClientArgs) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ClientArgs.Marshal(b, m, deterministic) +} +func (dst *ClientArgs) XXX_Merge(src proto.Message) { + xxx_messageInfo_ClientArgs.Merge(dst, src) +} +func (m *ClientArgs) XXX_Size() int { + return xxx_messageInfo_ClientArgs.Size(m) +} +func (m *ClientArgs) XXX_DiscardUnknown() { + xxx_messageInfo_ClientArgs.DiscardUnknown(m) +} + +var xxx_messageInfo_ClientArgs proto.InternalMessageInfo + +type isClientArgs_Argtype interface { + isClientArgs_Argtype() +} + +type ClientArgs_Setup struct { + Setup *ClientConfig `protobuf:"bytes,1,opt,name=setup,proto3,oneof"` +} + +type ClientArgs_Mark struct { + Mark *Mark `protobuf:"bytes,2,opt,name=mark,proto3,oneof"` +} + +func (*ClientArgs_Setup) isClientArgs_Argtype() {} + +func (*ClientArgs_Mark) isClientArgs_Argtype() {} + +func (m *ClientArgs) GetArgtype() isClientArgs_Argtype { + if m != nil { + return m.Argtype + } + return nil +} + +func (m *ClientArgs) GetSetup() *ClientConfig { + if x, ok := m.GetArgtype().(*ClientArgs_Setup); ok { + return x.Setup + } + return nil +} + +func (m *ClientArgs) GetMark() *Mark { + if x, ok := m.GetArgtype().(*ClientArgs_Mark); ok { + return x.Mark + } + return nil +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*ClientArgs) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _ClientArgs_OneofMarshaler, _ClientArgs_OneofUnmarshaler, _ClientArgs_OneofSizer, []interface{}{ + (*ClientArgs_Setup)(nil), + (*ClientArgs_Mark)(nil), + } +} + +func _ClientArgs_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*ClientArgs) + // argtype + switch x := m.Argtype.(type) { + case *ClientArgs_Setup: + b.EncodeVarint(1<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.Setup); err != nil { + return err + } + case *ClientArgs_Mark: + b.EncodeVarint(2<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.Mark); err != nil { + return err + } + case nil: + default: + return fmt.Errorf("ClientArgs.Argtype has unexpected type %T", x) + } + return nil +} + +func _ClientArgs_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*ClientArgs) + switch tag { + case 1: // argtype.setup + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(ClientConfig) + err := b.DecodeMessage(msg) + m.Argtype = &ClientArgs_Setup{msg} + return true, err + case 2: // argtype.mark + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(Mark) + err := b.DecodeMessage(msg) + m.Argtype = &ClientArgs_Mark{msg} + return true, err + default: + return false, nil + } +} + +func _ClientArgs_OneofSizer(msg proto.Message) (n int) { + m := msg.(*ClientArgs) + // argtype + switch x := m.Argtype.(type) { + case *ClientArgs_Setup: + s := proto.Size(x.Setup) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *ClientArgs_Mark: + s := proto.Size(x.Mark) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +type ServerConfig struct { + ServerType ServerType `protobuf:"varint,1,opt,name=server_type,json=serverType,proto3,enum=grpc.testing.ServerType" json:"server_type,omitempty"` + SecurityParams *SecurityParams `protobuf:"bytes,2,opt,name=security_params,json=securityParams,proto3" json:"security_params,omitempty"` + // Port on which to listen. Zero means pick unused port. + Port int32 `protobuf:"varint,4,opt,name=port,proto3" json:"port,omitempty"` + // Only for async server. Number of threads used to serve the requests. + AsyncServerThreads int32 `protobuf:"varint,7,opt,name=async_server_threads,json=asyncServerThreads,proto3" json:"async_server_threads,omitempty"` + // Specify the number of cores to limit server to, if desired + CoreLimit int32 `protobuf:"varint,8,opt,name=core_limit,json=coreLimit,proto3" json:"core_limit,omitempty"` + // payload config, used in generic server + PayloadConfig *PayloadConfig `protobuf:"bytes,9,opt,name=payload_config,json=payloadConfig,proto3" json:"payload_config,omitempty"` + // Specify the cores we should run the server on, if desired + CoreList []int32 `protobuf:"varint,10,rep,packed,name=core_list,json=coreList,proto3" json:"core_list,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ServerConfig) Reset() { *m = ServerConfig{} } +func (m *ServerConfig) String() string { return proto.CompactTextString(m) } +func (*ServerConfig) ProtoMessage() {} +func (*ServerConfig) Descriptor() ([]byte, []int) { + return fileDescriptor_control_63d6a60a9ad7e299, []int{11} +} +func (m *ServerConfig) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ServerConfig.Unmarshal(m, b) +} +func (m *ServerConfig) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ServerConfig.Marshal(b, m, deterministic) +} +func (dst *ServerConfig) XXX_Merge(src proto.Message) { + xxx_messageInfo_ServerConfig.Merge(dst, src) +} +func (m *ServerConfig) XXX_Size() int { + return xxx_messageInfo_ServerConfig.Size(m) +} +func (m *ServerConfig) XXX_DiscardUnknown() { + xxx_messageInfo_ServerConfig.DiscardUnknown(m) +} + +var xxx_messageInfo_ServerConfig proto.InternalMessageInfo + +func (m *ServerConfig) GetServerType() ServerType { + if m != nil { + return m.ServerType + } + return ServerType_SYNC_SERVER +} + +func (m *ServerConfig) GetSecurityParams() *SecurityParams { + if m != nil { + return m.SecurityParams + } + return nil +} + +func (m *ServerConfig) GetPort() int32 { + if m != nil { + return m.Port + } + return 0 +} + +func (m *ServerConfig) GetAsyncServerThreads() int32 { + if m != nil { + return m.AsyncServerThreads + } + return 0 +} + +func (m *ServerConfig) GetCoreLimit() int32 { + if m != nil { + return m.CoreLimit + } + return 0 +} + +func (m *ServerConfig) GetPayloadConfig() *PayloadConfig { + if m != nil { + return m.PayloadConfig + } + return nil +} + +func (m *ServerConfig) GetCoreList() []int32 { + if m != nil { + return m.CoreList + } + return nil +} + +type ServerArgs struct { + // Types that are valid to be assigned to Argtype: + // *ServerArgs_Setup + // *ServerArgs_Mark + Argtype isServerArgs_Argtype `protobuf_oneof:"argtype"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ServerArgs) Reset() { *m = ServerArgs{} } +func (m *ServerArgs) String() string { return proto.CompactTextString(m) } +func (*ServerArgs) ProtoMessage() {} +func (*ServerArgs) Descriptor() ([]byte, []int) { + return fileDescriptor_control_63d6a60a9ad7e299, []int{12} +} +func (m *ServerArgs) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ServerArgs.Unmarshal(m, b) +} +func (m *ServerArgs) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ServerArgs.Marshal(b, m, deterministic) +} +func (dst *ServerArgs) XXX_Merge(src proto.Message) { + xxx_messageInfo_ServerArgs.Merge(dst, src) +} +func (m *ServerArgs) XXX_Size() int { + return xxx_messageInfo_ServerArgs.Size(m) +} +func (m *ServerArgs) XXX_DiscardUnknown() { + xxx_messageInfo_ServerArgs.DiscardUnknown(m) +} + +var xxx_messageInfo_ServerArgs proto.InternalMessageInfo + +type isServerArgs_Argtype interface { + isServerArgs_Argtype() +} + +type ServerArgs_Setup struct { + Setup *ServerConfig `protobuf:"bytes,1,opt,name=setup,proto3,oneof"` +} + +type ServerArgs_Mark struct { + Mark *Mark `protobuf:"bytes,2,opt,name=mark,proto3,oneof"` +} + +func (*ServerArgs_Setup) isServerArgs_Argtype() {} + +func (*ServerArgs_Mark) isServerArgs_Argtype() {} + +func (m *ServerArgs) GetArgtype() isServerArgs_Argtype { + if m != nil { + return m.Argtype + } + return nil +} + +func (m *ServerArgs) GetSetup() *ServerConfig { + if x, ok := m.GetArgtype().(*ServerArgs_Setup); ok { + return x.Setup + } + return nil +} + +func (m *ServerArgs) GetMark() *Mark { + if x, ok := m.GetArgtype().(*ServerArgs_Mark); ok { + return x.Mark + } + return nil +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*ServerArgs) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _ServerArgs_OneofMarshaler, _ServerArgs_OneofUnmarshaler, _ServerArgs_OneofSizer, []interface{}{ + (*ServerArgs_Setup)(nil), + (*ServerArgs_Mark)(nil), + } +} + +func _ServerArgs_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*ServerArgs) + // argtype + switch x := m.Argtype.(type) { + case *ServerArgs_Setup: + b.EncodeVarint(1<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.Setup); err != nil { + return err + } + case *ServerArgs_Mark: + b.EncodeVarint(2<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.Mark); err != nil { + return err + } + case nil: + default: + return fmt.Errorf("ServerArgs.Argtype has unexpected type %T", x) + } + return nil +} + +func _ServerArgs_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*ServerArgs) + switch tag { + case 1: // argtype.setup + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(ServerConfig) + err := b.DecodeMessage(msg) + m.Argtype = &ServerArgs_Setup{msg} + return true, err + case 2: // argtype.mark + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(Mark) + err := b.DecodeMessage(msg) + m.Argtype = &ServerArgs_Mark{msg} + return true, err + default: + return false, nil + } +} + +func _ServerArgs_OneofSizer(msg proto.Message) (n int) { + m := msg.(*ServerArgs) + // argtype + switch x := m.Argtype.(type) { + case *ServerArgs_Setup: + s := proto.Size(x.Setup) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *ServerArgs_Mark: + s := proto.Size(x.Mark) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +type ServerStatus struct { + Stats *ServerStats `protobuf:"bytes,1,opt,name=stats,proto3" json:"stats,omitempty"` + // the port bound by the server + Port int32 `protobuf:"varint,2,opt,name=port,proto3" json:"port,omitempty"` + // Number of cores available to the server + Cores int32 `protobuf:"varint,3,opt,name=cores,proto3" json:"cores,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ServerStatus) Reset() { *m = ServerStatus{} } +func (m *ServerStatus) String() string { return proto.CompactTextString(m) } +func (*ServerStatus) ProtoMessage() {} +func (*ServerStatus) Descriptor() ([]byte, []int) { + return fileDescriptor_control_63d6a60a9ad7e299, []int{13} +} +func (m *ServerStatus) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ServerStatus.Unmarshal(m, b) +} +func (m *ServerStatus) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ServerStatus.Marshal(b, m, deterministic) +} +func (dst *ServerStatus) XXX_Merge(src proto.Message) { + xxx_messageInfo_ServerStatus.Merge(dst, src) +} +func (m *ServerStatus) XXX_Size() int { + return xxx_messageInfo_ServerStatus.Size(m) +} +func (m *ServerStatus) XXX_DiscardUnknown() { + xxx_messageInfo_ServerStatus.DiscardUnknown(m) +} + +var xxx_messageInfo_ServerStatus proto.InternalMessageInfo + +func (m *ServerStatus) GetStats() *ServerStats { + if m != nil { + return m.Stats + } + return nil +} + +func (m *ServerStatus) GetPort() int32 { + if m != nil { + return m.Port + } + return 0 +} + +func (m *ServerStatus) GetCores() int32 { + if m != nil { + return m.Cores + } + return 0 +} + +type CoreRequest struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *CoreRequest) Reset() { *m = CoreRequest{} } +func (m *CoreRequest) String() string { return proto.CompactTextString(m) } +func (*CoreRequest) ProtoMessage() {} +func (*CoreRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_control_63d6a60a9ad7e299, []int{14} +} +func (m *CoreRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_CoreRequest.Unmarshal(m, b) +} +func (m *CoreRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_CoreRequest.Marshal(b, m, deterministic) +} +func (dst *CoreRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_CoreRequest.Merge(dst, src) +} +func (m *CoreRequest) XXX_Size() int { + return xxx_messageInfo_CoreRequest.Size(m) +} +func (m *CoreRequest) XXX_DiscardUnknown() { + xxx_messageInfo_CoreRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_CoreRequest proto.InternalMessageInfo + +type CoreResponse struct { + // Number of cores available on the server + Cores int32 `protobuf:"varint,1,opt,name=cores,proto3" json:"cores,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *CoreResponse) Reset() { *m = CoreResponse{} } +func (m *CoreResponse) String() string { return proto.CompactTextString(m) } +func (*CoreResponse) ProtoMessage() {} +func (*CoreResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_control_63d6a60a9ad7e299, []int{15} +} +func (m *CoreResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_CoreResponse.Unmarshal(m, b) +} +func (m *CoreResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_CoreResponse.Marshal(b, m, deterministic) +} +func (dst *CoreResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_CoreResponse.Merge(dst, src) +} +func (m *CoreResponse) XXX_Size() int { + return xxx_messageInfo_CoreResponse.Size(m) +} +func (m *CoreResponse) XXX_DiscardUnknown() { + xxx_messageInfo_CoreResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_CoreResponse proto.InternalMessageInfo + +func (m *CoreResponse) GetCores() int32 { + if m != nil { + return m.Cores + } + return 0 +} + +type Void struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Void) Reset() { *m = Void{} } +func (m *Void) String() string { return proto.CompactTextString(m) } +func (*Void) ProtoMessage() {} +func (*Void) Descriptor() ([]byte, []int) { + return fileDescriptor_control_63d6a60a9ad7e299, []int{16} +} +func (m *Void) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Void.Unmarshal(m, b) +} +func (m *Void) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Void.Marshal(b, m, deterministic) +} +func (dst *Void) XXX_Merge(src proto.Message) { + xxx_messageInfo_Void.Merge(dst, src) +} +func (m *Void) XXX_Size() int { + return xxx_messageInfo_Void.Size(m) +} +func (m *Void) XXX_DiscardUnknown() { + xxx_messageInfo_Void.DiscardUnknown(m) +} + +var xxx_messageInfo_Void proto.InternalMessageInfo + +// A single performance scenario: input to qps_json_driver +type Scenario struct { + // Human readable name for this scenario + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // Client configuration + ClientConfig *ClientConfig `protobuf:"bytes,2,opt,name=client_config,json=clientConfig,proto3" json:"client_config,omitempty"` + // Number of clients to start for the test + NumClients int32 `protobuf:"varint,3,opt,name=num_clients,json=numClients,proto3" json:"num_clients,omitempty"` + // Server configuration + ServerConfig *ServerConfig `protobuf:"bytes,4,opt,name=server_config,json=serverConfig,proto3" json:"server_config,omitempty"` + // Number of servers to start for the test + NumServers int32 `protobuf:"varint,5,opt,name=num_servers,json=numServers,proto3" json:"num_servers,omitempty"` + // Warmup period, in seconds + WarmupSeconds int32 `protobuf:"varint,6,opt,name=warmup_seconds,json=warmupSeconds,proto3" json:"warmup_seconds,omitempty"` + // Benchmark time, in seconds + BenchmarkSeconds int32 `protobuf:"varint,7,opt,name=benchmark_seconds,json=benchmarkSeconds,proto3" json:"benchmark_seconds,omitempty"` + // Number of workers to spawn locally (usually zero) + SpawnLocalWorkerCount int32 `protobuf:"varint,8,opt,name=spawn_local_worker_count,json=spawnLocalWorkerCount,proto3" json:"spawn_local_worker_count,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Scenario) Reset() { *m = Scenario{} } +func (m *Scenario) String() string { return proto.CompactTextString(m) } +func (*Scenario) ProtoMessage() {} +func (*Scenario) Descriptor() ([]byte, []int) { + return fileDescriptor_control_63d6a60a9ad7e299, []int{17} +} +func (m *Scenario) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Scenario.Unmarshal(m, b) +} +func (m *Scenario) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Scenario.Marshal(b, m, deterministic) +} +func (dst *Scenario) XXX_Merge(src proto.Message) { + xxx_messageInfo_Scenario.Merge(dst, src) +} +func (m *Scenario) XXX_Size() int { + return xxx_messageInfo_Scenario.Size(m) +} +func (m *Scenario) XXX_DiscardUnknown() { + xxx_messageInfo_Scenario.DiscardUnknown(m) +} + +var xxx_messageInfo_Scenario proto.InternalMessageInfo + +func (m *Scenario) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *Scenario) GetClientConfig() *ClientConfig { + if m != nil { + return m.ClientConfig + } + return nil +} + +func (m *Scenario) GetNumClients() int32 { + if m != nil { + return m.NumClients + } + return 0 +} + +func (m *Scenario) GetServerConfig() *ServerConfig { + if m != nil { + return m.ServerConfig + } + return nil +} + +func (m *Scenario) GetNumServers() int32 { + if m != nil { + return m.NumServers + } + return 0 +} + +func (m *Scenario) GetWarmupSeconds() int32 { + if m != nil { + return m.WarmupSeconds + } + return 0 +} + +func (m *Scenario) GetBenchmarkSeconds() int32 { + if m != nil { + return m.BenchmarkSeconds + } + return 0 +} + +func (m *Scenario) GetSpawnLocalWorkerCount() int32 { + if m != nil { + return m.SpawnLocalWorkerCount + } + return 0 +} + +// A set of scenarios to be run with qps_json_driver +type Scenarios struct { + Scenarios []*Scenario `protobuf:"bytes,1,rep,name=scenarios,proto3" json:"scenarios,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Scenarios) Reset() { *m = Scenarios{} } +func (m *Scenarios) String() string { return proto.CompactTextString(m) } +func (*Scenarios) ProtoMessage() {} +func (*Scenarios) Descriptor() ([]byte, []int) { + return fileDescriptor_control_63d6a60a9ad7e299, []int{18} +} +func (m *Scenarios) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Scenarios.Unmarshal(m, b) +} +func (m *Scenarios) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Scenarios.Marshal(b, m, deterministic) +} +func (dst *Scenarios) XXX_Merge(src proto.Message) { + xxx_messageInfo_Scenarios.Merge(dst, src) +} +func (m *Scenarios) XXX_Size() int { + return xxx_messageInfo_Scenarios.Size(m) +} +func (m *Scenarios) XXX_DiscardUnknown() { + xxx_messageInfo_Scenarios.DiscardUnknown(m) +} + +var xxx_messageInfo_Scenarios proto.InternalMessageInfo + +func (m *Scenarios) GetScenarios() []*Scenario { + if m != nil { + return m.Scenarios + } + return nil +} + +func init() { + proto.RegisterType((*PoissonParams)(nil), "grpc.testing.PoissonParams") + proto.RegisterType((*UniformParams)(nil), "grpc.testing.UniformParams") + proto.RegisterType((*DeterministicParams)(nil), "grpc.testing.DeterministicParams") + proto.RegisterType((*ParetoParams)(nil), "grpc.testing.ParetoParams") + proto.RegisterType((*ClosedLoopParams)(nil), "grpc.testing.ClosedLoopParams") + proto.RegisterType((*LoadParams)(nil), "grpc.testing.LoadParams") + proto.RegisterType((*SecurityParams)(nil), "grpc.testing.SecurityParams") + proto.RegisterType((*ClientConfig)(nil), "grpc.testing.ClientConfig") + proto.RegisterType((*ClientStatus)(nil), "grpc.testing.ClientStatus") + proto.RegisterType((*Mark)(nil), "grpc.testing.Mark") + proto.RegisterType((*ClientArgs)(nil), "grpc.testing.ClientArgs") + proto.RegisterType((*ServerConfig)(nil), "grpc.testing.ServerConfig") + proto.RegisterType((*ServerArgs)(nil), "grpc.testing.ServerArgs") + proto.RegisterType((*ServerStatus)(nil), "grpc.testing.ServerStatus") + proto.RegisterType((*CoreRequest)(nil), "grpc.testing.CoreRequest") + proto.RegisterType((*CoreResponse)(nil), "grpc.testing.CoreResponse") + proto.RegisterType((*Void)(nil), "grpc.testing.Void") + proto.RegisterType((*Scenario)(nil), "grpc.testing.Scenario") + proto.RegisterType((*Scenarios)(nil), "grpc.testing.Scenarios") + proto.RegisterEnum("grpc.testing.ClientType", ClientType_name, ClientType_value) + proto.RegisterEnum("grpc.testing.ServerType", ServerType_name, ServerType_value) + proto.RegisterEnum("grpc.testing.RpcType", RpcType_name, RpcType_value) +} + +func init() { proto.RegisterFile("control.proto", fileDescriptor_control_63d6a60a9ad7e299) } + +var fileDescriptor_control_63d6a60a9ad7e299 = []byte{ + // 1179 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xa4, 0x56, 0x6f, 0x6f, 0xdb, 0xb6, + 0x13, 0xb6, 0x1d, 0xdb, 0xb1, 0x4e, 0xb6, 0xe3, 0x1f, 0x7f, 0xe9, 0xa0, 0xa6, 0x69, 0x97, 0x6a, + 0x1b, 0x16, 0x64, 0x40, 0x5a, 0x78, 0x05, 0xba, 0x62, 0x2f, 0x02, 0xc7, 0x33, 0xea, 0x00, 0x69, + 0x96, 0xd1, 0x69, 0x87, 0xbe, 0x12, 0x18, 0x99, 0xb1, 0x85, 0xc8, 0xa2, 0x46, 0x52, 0x09, 0xf2, + 0x15, 0xf6, 0x99, 0xf6, 0x39, 0xf6, 0x35, 0xf6, 0x15, 0x06, 0xfe, 0x91, 0x23, 0xb9, 0x06, 0x9a, + 0x6d, 0xef, 0xc4, 0xbb, 0xe7, 0xe1, 0x91, 0xf7, 0xdc, 0x1d, 0x05, 0x9d, 0x90, 0x25, 0x92, 0xb3, + 0xf8, 0x30, 0xe5, 0x4c, 0x32, 0xd4, 0x9e, 0xf1, 0x34, 0x3c, 0x94, 0x54, 0xc8, 0x28, 0x99, 0xed, + 0x74, 0x53, 0x72, 0x17, 0x33, 0x32, 0x15, 0xc6, 0xbb, 0xe3, 0x0a, 0x49, 0xa4, 0x5d, 0xf8, 0x7d, + 0xe8, 0x9c, 0xb3, 0x48, 0x08, 0x96, 0x9c, 0x13, 0x4e, 0x16, 0x02, 0x3d, 0x87, 0x36, 0xbb, 0xba, + 0xa2, 0x9c, 0x4e, 0x03, 0x45, 0xf2, 0xaa, 0x7b, 0xd5, 0xfd, 0x2a, 0x76, 0xad, 0xed, 0x94, 0x91, + 0xa9, 0x4f, 0xa0, 0xf3, 0x3e, 0x89, 0xae, 0x18, 0x5f, 0x58, 0xce, 0xb7, 0xb0, 0x15, 0x25, 0x92, + 0x72, 0xc2, 0x79, 0x74, 0x43, 0xe2, 0x20, 0x66, 0x96, 0xd6, 0x2d, 0x9a, 0x4f, 0xd9, 0x27, 0xc0, + 0x79, 0xe4, 0xd5, 0x3e, 0x05, 0x8e, 0x23, 0xff, 0x07, 0xf8, 0xff, 0x4f, 0x54, 0x52, 0xbe, 0x88, + 0x92, 0x48, 0xc8, 0x28, 0x7c, 0xf8, 0xe1, 0x7e, 0x81, 0xf6, 0x39, 0xe1, 0x54, 0x32, 0x4b, 0xf9, + 0x0e, 0xfe, 0x57, 0x0a, 0x79, 0x49, 0x04, 0xb5, 0xbc, 0x5e, 0xd1, 0x71, 0x4c, 0x04, 0x45, 0xdb, + 0xd0, 0x20, 0x71, 0x3a, 0x27, 0xf6, 0x54, 0x66, 0xe1, 0x23, 0xe8, 0x0d, 0x63, 0x26, 0x54, 0x00, + 0x96, 0x9a, 0x6d, 0xfd, 0x3f, 0x6a, 0x00, 0x2a, 0x9e, 0x8d, 0x32, 0x00, 0x37, 0xd4, 0x90, 0x20, + 0x66, 0x2c, 0xd5, 0xfb, 0xbb, 0xfd, 0x67, 0x87, 0x45, 0x1d, 0x0e, 0x57, 0xf7, 0x18, 0x57, 0x30, + 0x84, 0x4b, 0x1b, 0x7a, 0x0d, 0x9b, 0xa9, 0x51, 0x42, 0x47, 0x77, 0xfb, 0x4f, 0xca, 0xf4, 0x92, + 0x4c, 0xe3, 0x0a, 0xce, 0xd1, 0x8a, 0x98, 0x19, 0x39, 0xbc, 0x8d, 0x75, 0xc4, 0x92, 0x56, 0x8a, + 0x68, 0xd1, 0xe8, 0x47, 0x68, 0x4e, 0x75, 0x92, 0xbd, 0xba, 0xe6, 0x3d, 0x2f, 0xf3, 0xd6, 0x08, + 0x30, 0xae, 0x60, 0x4b, 0x41, 0xaf, 0xa0, 0x99, 0xea, 0x3c, 0x7b, 0x0d, 0x4d, 0xde, 0x59, 0x39, + 0x6d, 0x41, 0x03, 0xc5, 0x32, 0xd8, 0xe3, 0x26, 0xd4, 0x95, 0x70, 0xfe, 0x25, 0x74, 0x27, 0x34, + 0xcc, 0x78, 0x24, 0xef, 0x6c, 0x06, 0x9f, 0x81, 0x9b, 0x09, 0x1a, 0x28, 0x7e, 0x10, 0x12, 0x9d, + 0xc1, 0x16, 0x76, 0x32, 0x41, 0x2f, 0xa8, 0x90, 0x43, 0x82, 0x5e, 0xc2, 0xb6, 0xa0, 0xfc, 0x86, + 0xf2, 0x60, 0xce, 0x84, 0x0c, 0xd8, 0x0d, 0xe5, 0x3c, 0x9a, 0x52, 0x9d, 0x2b, 0x07, 0x23, 0xe3, + 0x1b, 0x33, 0x21, 0x7f, 0xb6, 0x1e, 0xff, 0xf7, 0x06, 0xb4, 0x87, 0x71, 0x44, 0x13, 0x39, 0x64, + 0xc9, 0x55, 0x34, 0x43, 0xdf, 0x40, 0xd7, 0x6e, 0x21, 0x09, 0x9f, 0x51, 0x29, 0xbc, 0xea, 0xde, + 0xc6, 0xbe, 0x83, 0x3b, 0xc6, 0x7a, 0x61, 0x8c, 0xe8, 0x8d, 0xd2, 0x52, 0xd1, 0x02, 0x79, 0x97, + 0x9a, 0x00, 0xdd, 0xbe, 0xb7, 0xaa, 0xa5, 0x02, 0x5c, 0xdc, 0xa5, 0x54, 0x69, 0x98, 0x7f, 0xa3, + 0x11, 0x6c, 0x09, 0x7b, 0xad, 0x20, 0xd5, 0xf7, 0xb2, 0x92, 0xec, 0x96, 0xe9, 0xe5, 0xbb, 0xe3, + 0xae, 0x28, 0xe7, 0xe2, 0x08, 0x76, 0x59, 0x26, 0x85, 0x24, 0xc9, 0x34, 0x4a, 0x66, 0x01, 0x4f, + 0x43, 0x11, 0xa4, 0x94, 0x07, 0xe1, 0x9c, 0x24, 0x09, 0x8d, 0xb5, 0x5c, 0x0d, 0xfc, 0xb8, 0x80, + 0xc1, 0x69, 0x28, 0xce, 0x29, 0x1f, 0x1a, 0x80, 0xea, 0x33, 0x7b, 0x05, 0x4b, 0x11, 0x5a, 0xa5, + 0x06, 0xee, 0x1a, 0xb3, 0xc5, 0x09, 0x95, 0x55, 0x22, 0xee, 0x92, 0x30, 0xc8, 0x6f, 0x3c, 0xe7, + 0x94, 0x4c, 0x85, 0xb7, 0xa9, 0xd1, 0x48, 0xfb, 0xec, 0x5d, 0x8d, 0x07, 0xbd, 0x84, 0x16, 0x4f, + 0x43, 0x93, 0x9a, 0x96, 0x4e, 0xcd, 0xa3, 0xf2, 0xdd, 0x70, 0x1a, 0xea, 0xbc, 0x6c, 0x72, 0xf3, + 0xa1, 0xf2, 0xa9, 0x34, 0xcf, 0x13, 0x02, 0x3a, 0x21, 0x2b, 0xf9, 0xbc, 0x6f, 0x25, 0x0c, 0xf1, + 0x7d, 0x5b, 0x1d, 0x43, 0x3e, 0xbc, 0x82, 0x50, 0x6b, 0xe8, 0xb9, 0x6b, 0x5b, 0xc3, 0x60, 0x8c, + 0xcc, 0xb8, 0x93, 0x16, 0x97, 0x68, 0x0c, 0xbd, 0x79, 0x24, 0x24, 0x9b, 0x71, 0xb2, 0xc8, 0xcf, + 0xd0, 0xd6, 0xbb, 0x3c, 0x2d, 0xef, 0x32, 0xce, 0x51, 0xf6, 0x20, 0x5b, 0xf3, 0xb2, 0x01, 0x3d, + 0x01, 0x27, 0x64, 0x9c, 0x06, 0x71, 0x24, 0xa4, 0xd7, 0xd9, 0xdb, 0xd8, 0x6f, 0xe0, 0x96, 0x32, + 0x9c, 0x46, 0x42, 0xa2, 0xa7, 0x00, 0xd6, 0xb9, 0x88, 0xa4, 0xd7, 0xd5, 0xf9, 0x73, 0x8c, 0x77, + 0x11, 0x49, 0xff, 0x28, 0xaf, 0xc5, 0x89, 0x24, 0x32, 0x13, 0xe8, 0x05, 0x34, 0xf4, 0x18, 0xb6, + 0xa3, 0xe2, 0xf1, 0xba, 0xf2, 0x52, 0x50, 0x81, 0x0d, 0xce, 0xdf, 0x85, 0xfa, 0x3b, 0xc2, 0xaf, + 0xd5, 0x88, 0xe2, 0x54, 0x50, 0x69, 0x3b, 0xc4, 0x2c, 0xfc, 0x0c, 0xc0, 0x70, 0x06, 0x7c, 0x26, + 0x50, 0x1f, 0x1a, 0x82, 0xca, 0x2c, 0x9f, 0x43, 0x3b, 0xeb, 0x36, 0x37, 0xd9, 0x19, 0x57, 0xb0, + 0x81, 0xa2, 0x7d, 0xa8, 0x2f, 0x08, 0xbf, 0xb6, 0xb3, 0x07, 0x95, 0x29, 0x2a, 0xf2, 0xb8, 0x82, + 0x35, 0xe2, 0xd8, 0x81, 0x4d, 0xc2, 0x67, 0xaa, 0x00, 0xfc, 0x3f, 0x6b, 0xd0, 0x9e, 0xe8, 0xe6, + 0xb1, 0xc9, 0x7e, 0x03, 0x6e, 0xde, 0x62, 0xaa, 0x40, 0xaa, 0xeb, 0x7a, 0xc7, 0x10, 0x4c, 0xef, + 0x88, 0xe5, 0xf7, 0xba, 0xde, 0xa9, 0xfd, 0x8b, 0xde, 0x41, 0x50, 0x4f, 0x19, 0x97, 0xb6, 0x47, + 0xf4, 0xf7, 0x7d, 0x95, 0xe7, 0x67, 0x5b, 0x53, 0xe5, 0xf6, 0x54, 0xb6, 0xca, 0xcb, 0x6a, 0xb6, + 0x56, 0xd4, 0x5c, 0x53, 0x97, 0xce, 0x3f, 0xae, 0xcb, 0x52, 0x35, 0x41, 0xb9, 0x9a, 0x94, 0x9e, + 0xe6, 0x40, 0x0f, 0xd0, 0xb3, 0x28, 0xc0, 0x7f, 0xd4, 0x33, 0xca, 0xe5, 0x7c, 0x50, 0x95, 0xde, + 0x43, 0xf3, 0x2a, 0x5d, 0x66, 0xbf, 0x56, 0xc8, 0xfe, 0x36, 0x34, 0xd4, 0xbd, 0xcc, 0x28, 0x6c, + 0x60, 0xb3, 0xf0, 0x3b, 0xe0, 0x0e, 0x19, 0xa7, 0x98, 0xfe, 0x96, 0x51, 0x21, 0xfd, 0xaf, 0xa1, + 0x6d, 0x96, 0x22, 0x65, 0x89, 0x79, 0x89, 0x0d, 0xa9, 0x5a, 0x24, 0x35, 0xa1, 0xfe, 0x81, 0x45, + 0x53, 0xff, 0xaf, 0x1a, 0xb4, 0x26, 0x21, 0x4d, 0x08, 0x8f, 0x98, 0x8a, 0x99, 0x90, 0x85, 0x29, + 0x36, 0x07, 0xeb, 0x6f, 0x74, 0x04, 0x9d, 0x7c, 0x00, 0x1a, 0x7d, 0x6a, 0x9f, 0xeb, 0x04, 0xdc, + 0x0e, 0x8b, 0x6f, 0xc5, 0x97, 0xe0, 0x26, 0xd9, 0xc2, 0x8e, 0xc5, 0xfc, 0xe8, 0x90, 0x64, 0x0b, + 0xc3, 0x51, 0x33, 0xda, 0x3e, 0x1b, 0x79, 0x84, 0xfa, 0xe7, 0xb4, 0xc1, 0x6d, 0x51, 0x6c, 0x15, + 0x1b, 0xc1, 0xd8, 0xf2, 0xf9, 0xac, 0x22, 0x18, 0x8e, 0x50, 0xcf, 0xd5, 0x2d, 0xe1, 0x8b, 0x2c, + 0x0d, 0x04, 0x0d, 0x59, 0x32, 0x15, 0x5e, 0x53, 0x63, 0x3a, 0xc6, 0x3a, 0x31, 0x46, 0xf5, 0x83, + 0x73, 0x49, 0x93, 0x70, 0xae, 0xb4, 0x5c, 0x22, 0x4d, 0x65, 0xf7, 0x96, 0x8e, 0x1c, 0xfc, 0x1a, + 0x3c, 0x91, 0x92, 0xdb, 0x24, 0x88, 0x59, 0x48, 0xe2, 0xe0, 0x96, 0xf1, 0x6b, 0x7d, 0x83, 0x2c, + 0xc9, 0xab, 0xfc, 0x91, 0xf6, 0x9f, 0x2a, 0xf7, 0xaf, 0xda, 0x3b, 0x54, 0x4e, 0x7f, 0x00, 0x4e, + 0x9e, 0x70, 0x81, 0x5e, 0x81, 0x23, 0xf2, 0x85, 0x7e, 0x43, 0xdd, 0xfe, 0x17, 0x2b, 0xf7, 0xb6, + 0x6e, 0x7c, 0x0f, 0x3c, 0x78, 0x91, 0xcf, 0x28, 0xdd, 0xee, 0x5b, 0xe0, 0x4e, 0x3e, 0x9e, 0x0d, + 0x83, 0xe1, 0xe9, 0xc9, 0xe8, 0xec, 0xa2, 0x57, 0x41, 0x3d, 0x68, 0x0f, 0x8a, 0x96, 0xea, 0xc1, + 0x49, 0xde, 0x04, 0x25, 0xc2, 0x64, 0x84, 0x3f, 0x8c, 0x70, 0x91, 0x60, 0x2d, 0x55, 0xe4, 0xc1, + 0xb6, 0xb1, 0xbc, 0x1d, 0x9d, 0x8d, 0xf0, 0xc9, 0xd2, 0x53, 0x3b, 0xf8, 0x0a, 0x36, 0xed, 0xbb, + 0x84, 0x1c, 0x68, 0xbc, 0x3f, 0x1b, 0xe0, 0x8f, 0xbd, 0x0a, 0xea, 0x80, 0x33, 0xb9, 0xc0, 0xa3, + 0xc1, 0xbb, 0x93, 0xb3, 0xb7, 0xbd, 0xea, 0x65, 0x53, 0xff, 0x12, 0x7f, 0xff, 0x77, 0x00, 0x00, + 0x00, 0xff, 0xff, 0x75, 0x59, 0xf4, 0x03, 0x4e, 0x0b, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/grpc/benchmark/grpc_testing/messages.pb.go b/vendor/google.golang.org/grpc/benchmark/grpc_testing/messages.pb.go new file mode 100644 index 0000000..9354eea --- /dev/null +++ b/vendor/google.golang.org/grpc/benchmark/grpc_testing/messages.pb.go @@ -0,0 +1,731 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: messages.proto + +package grpc_testing + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// The type of payload that should be returned. +type PayloadType int32 + +const ( + // Compressable text format. + PayloadType_COMPRESSABLE PayloadType = 0 + // Uncompressable binary format. + PayloadType_UNCOMPRESSABLE PayloadType = 1 + // Randomly chosen from all other formats defined in this enum. + PayloadType_RANDOM PayloadType = 2 +) + +var PayloadType_name = map[int32]string{ + 0: "COMPRESSABLE", + 1: "UNCOMPRESSABLE", + 2: "RANDOM", +} +var PayloadType_value = map[string]int32{ + "COMPRESSABLE": 0, + "UNCOMPRESSABLE": 1, + "RANDOM": 2, +} + +func (x PayloadType) String() string { + return proto.EnumName(PayloadType_name, int32(x)) +} +func (PayloadType) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_messages_5c70222ad96bf232, []int{0} +} + +// Compression algorithms +type CompressionType int32 + +const ( + // No compression + CompressionType_NONE CompressionType = 0 + CompressionType_GZIP CompressionType = 1 + CompressionType_DEFLATE CompressionType = 2 +) + +var CompressionType_name = map[int32]string{ + 0: "NONE", + 1: "GZIP", + 2: "DEFLATE", +} +var CompressionType_value = map[string]int32{ + "NONE": 0, + "GZIP": 1, + "DEFLATE": 2, +} + +func (x CompressionType) String() string { + return proto.EnumName(CompressionType_name, int32(x)) +} +func (CompressionType) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_messages_5c70222ad96bf232, []int{1} +} + +// A block of data, to simply increase gRPC message size. +type Payload struct { + // The type of data in body. + Type PayloadType `protobuf:"varint,1,opt,name=type,proto3,enum=grpc.testing.PayloadType" json:"type,omitempty"` + // Primary contents of payload. + Body []byte `protobuf:"bytes,2,opt,name=body,proto3" json:"body,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Payload) Reset() { *m = Payload{} } +func (m *Payload) String() string { return proto.CompactTextString(m) } +func (*Payload) ProtoMessage() {} +func (*Payload) Descriptor() ([]byte, []int) { + return fileDescriptor_messages_5c70222ad96bf232, []int{0} +} +func (m *Payload) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Payload.Unmarshal(m, b) +} +func (m *Payload) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Payload.Marshal(b, m, deterministic) +} +func (dst *Payload) XXX_Merge(src proto.Message) { + xxx_messageInfo_Payload.Merge(dst, src) +} +func (m *Payload) XXX_Size() int { + return xxx_messageInfo_Payload.Size(m) +} +func (m *Payload) XXX_DiscardUnknown() { + xxx_messageInfo_Payload.DiscardUnknown(m) +} + +var xxx_messageInfo_Payload proto.InternalMessageInfo + +func (m *Payload) GetType() PayloadType { + if m != nil { + return m.Type + } + return PayloadType_COMPRESSABLE +} + +func (m *Payload) GetBody() []byte { + if m != nil { + return m.Body + } + return nil +} + +// A protobuf representation for grpc status. This is used by test +// clients to specify a status that the server should attempt to return. +type EchoStatus struct { + Code int32 `protobuf:"varint,1,opt,name=code,proto3" json:"code,omitempty"` + Message string `protobuf:"bytes,2,opt,name=message,proto3" json:"message,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *EchoStatus) Reset() { *m = EchoStatus{} } +func (m *EchoStatus) String() string { return proto.CompactTextString(m) } +func (*EchoStatus) ProtoMessage() {} +func (*EchoStatus) Descriptor() ([]byte, []int) { + return fileDescriptor_messages_5c70222ad96bf232, []int{1} +} +func (m *EchoStatus) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_EchoStatus.Unmarshal(m, b) +} +func (m *EchoStatus) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_EchoStatus.Marshal(b, m, deterministic) +} +func (dst *EchoStatus) XXX_Merge(src proto.Message) { + xxx_messageInfo_EchoStatus.Merge(dst, src) +} +func (m *EchoStatus) XXX_Size() int { + return xxx_messageInfo_EchoStatus.Size(m) +} +func (m *EchoStatus) XXX_DiscardUnknown() { + xxx_messageInfo_EchoStatus.DiscardUnknown(m) +} + +var xxx_messageInfo_EchoStatus proto.InternalMessageInfo + +func (m *EchoStatus) GetCode() int32 { + if m != nil { + return m.Code + } + return 0 +} + +func (m *EchoStatus) GetMessage() string { + if m != nil { + return m.Message + } + return "" +} + +// Unary request. +type SimpleRequest struct { + // Desired payload type in the response from the server. + // If response_type is RANDOM, server randomly chooses one from other formats. + ResponseType PayloadType `protobuf:"varint,1,opt,name=response_type,json=responseType,proto3,enum=grpc.testing.PayloadType" json:"response_type,omitempty"` + // Desired payload size in the response from the server. + // If response_type is COMPRESSABLE, this denotes the size before compression. + ResponseSize int32 `protobuf:"varint,2,opt,name=response_size,json=responseSize,proto3" json:"response_size,omitempty"` + // Optional input payload sent along with the request. + Payload *Payload `protobuf:"bytes,3,opt,name=payload,proto3" json:"payload,omitempty"` + // Whether SimpleResponse should include username. + FillUsername bool `protobuf:"varint,4,opt,name=fill_username,json=fillUsername,proto3" json:"fill_username,omitempty"` + // Whether SimpleResponse should include OAuth scope. + FillOauthScope bool `protobuf:"varint,5,opt,name=fill_oauth_scope,json=fillOauthScope,proto3" json:"fill_oauth_scope,omitempty"` + // Compression algorithm to be used by the server for the response (stream) + ResponseCompression CompressionType `protobuf:"varint,6,opt,name=response_compression,json=responseCompression,proto3,enum=grpc.testing.CompressionType" json:"response_compression,omitempty"` + // Whether server should return a given status + ResponseStatus *EchoStatus `protobuf:"bytes,7,opt,name=response_status,json=responseStatus,proto3" json:"response_status,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *SimpleRequest) Reset() { *m = SimpleRequest{} } +func (m *SimpleRequest) String() string { return proto.CompactTextString(m) } +func (*SimpleRequest) ProtoMessage() {} +func (*SimpleRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_messages_5c70222ad96bf232, []int{2} +} +func (m *SimpleRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_SimpleRequest.Unmarshal(m, b) +} +func (m *SimpleRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_SimpleRequest.Marshal(b, m, deterministic) +} +func (dst *SimpleRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_SimpleRequest.Merge(dst, src) +} +func (m *SimpleRequest) XXX_Size() int { + return xxx_messageInfo_SimpleRequest.Size(m) +} +func (m *SimpleRequest) XXX_DiscardUnknown() { + xxx_messageInfo_SimpleRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_SimpleRequest proto.InternalMessageInfo + +func (m *SimpleRequest) GetResponseType() PayloadType { + if m != nil { + return m.ResponseType + } + return PayloadType_COMPRESSABLE +} + +func (m *SimpleRequest) GetResponseSize() int32 { + if m != nil { + return m.ResponseSize + } + return 0 +} + +func (m *SimpleRequest) GetPayload() *Payload { + if m != nil { + return m.Payload + } + return nil +} + +func (m *SimpleRequest) GetFillUsername() bool { + if m != nil { + return m.FillUsername + } + return false +} + +func (m *SimpleRequest) GetFillOauthScope() bool { + if m != nil { + return m.FillOauthScope + } + return false +} + +func (m *SimpleRequest) GetResponseCompression() CompressionType { + if m != nil { + return m.ResponseCompression + } + return CompressionType_NONE +} + +func (m *SimpleRequest) GetResponseStatus() *EchoStatus { + if m != nil { + return m.ResponseStatus + } + return nil +} + +// Unary response, as configured by the request. +type SimpleResponse struct { + // Payload to increase message size. + Payload *Payload `protobuf:"bytes,1,opt,name=payload,proto3" json:"payload,omitempty"` + // The user the request came from, for verifying authentication was + // successful when the client expected it. + Username string `protobuf:"bytes,2,opt,name=username,proto3" json:"username,omitempty"` + // OAuth scope. + OauthScope string `protobuf:"bytes,3,opt,name=oauth_scope,json=oauthScope,proto3" json:"oauth_scope,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *SimpleResponse) Reset() { *m = SimpleResponse{} } +func (m *SimpleResponse) String() string { return proto.CompactTextString(m) } +func (*SimpleResponse) ProtoMessage() {} +func (*SimpleResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_messages_5c70222ad96bf232, []int{3} +} +func (m *SimpleResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_SimpleResponse.Unmarshal(m, b) +} +func (m *SimpleResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_SimpleResponse.Marshal(b, m, deterministic) +} +func (dst *SimpleResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_SimpleResponse.Merge(dst, src) +} +func (m *SimpleResponse) XXX_Size() int { + return xxx_messageInfo_SimpleResponse.Size(m) +} +func (m *SimpleResponse) XXX_DiscardUnknown() { + xxx_messageInfo_SimpleResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_SimpleResponse proto.InternalMessageInfo + +func (m *SimpleResponse) GetPayload() *Payload { + if m != nil { + return m.Payload + } + return nil +} + +func (m *SimpleResponse) GetUsername() string { + if m != nil { + return m.Username + } + return "" +} + +func (m *SimpleResponse) GetOauthScope() string { + if m != nil { + return m.OauthScope + } + return "" +} + +// Client-streaming request. +type StreamingInputCallRequest struct { + // Optional input payload sent along with the request. + Payload *Payload `protobuf:"bytes,1,opt,name=payload,proto3" json:"payload,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *StreamingInputCallRequest) Reset() { *m = StreamingInputCallRequest{} } +func (m *StreamingInputCallRequest) String() string { return proto.CompactTextString(m) } +func (*StreamingInputCallRequest) ProtoMessage() {} +func (*StreamingInputCallRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_messages_5c70222ad96bf232, []int{4} +} +func (m *StreamingInputCallRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_StreamingInputCallRequest.Unmarshal(m, b) +} +func (m *StreamingInputCallRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_StreamingInputCallRequest.Marshal(b, m, deterministic) +} +func (dst *StreamingInputCallRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_StreamingInputCallRequest.Merge(dst, src) +} +func (m *StreamingInputCallRequest) XXX_Size() int { + return xxx_messageInfo_StreamingInputCallRequest.Size(m) +} +func (m *StreamingInputCallRequest) XXX_DiscardUnknown() { + xxx_messageInfo_StreamingInputCallRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_StreamingInputCallRequest proto.InternalMessageInfo + +func (m *StreamingInputCallRequest) GetPayload() *Payload { + if m != nil { + return m.Payload + } + return nil +} + +// Client-streaming response. +type StreamingInputCallResponse struct { + // Aggregated size of payloads received from the client. + AggregatedPayloadSize int32 `protobuf:"varint,1,opt,name=aggregated_payload_size,json=aggregatedPayloadSize,proto3" json:"aggregated_payload_size,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *StreamingInputCallResponse) Reset() { *m = StreamingInputCallResponse{} } +func (m *StreamingInputCallResponse) String() string { return proto.CompactTextString(m) } +func (*StreamingInputCallResponse) ProtoMessage() {} +func (*StreamingInputCallResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_messages_5c70222ad96bf232, []int{5} +} +func (m *StreamingInputCallResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_StreamingInputCallResponse.Unmarshal(m, b) +} +func (m *StreamingInputCallResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_StreamingInputCallResponse.Marshal(b, m, deterministic) +} +func (dst *StreamingInputCallResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_StreamingInputCallResponse.Merge(dst, src) +} +func (m *StreamingInputCallResponse) XXX_Size() int { + return xxx_messageInfo_StreamingInputCallResponse.Size(m) +} +func (m *StreamingInputCallResponse) XXX_DiscardUnknown() { + xxx_messageInfo_StreamingInputCallResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_StreamingInputCallResponse proto.InternalMessageInfo + +func (m *StreamingInputCallResponse) GetAggregatedPayloadSize() int32 { + if m != nil { + return m.AggregatedPayloadSize + } + return 0 +} + +// Configuration for a particular response. +type ResponseParameters struct { + // Desired payload sizes in responses from the server. + // If response_type is COMPRESSABLE, this denotes the size before compression. + Size int32 `protobuf:"varint,1,opt,name=size,proto3" json:"size,omitempty"` + // Desired interval between consecutive responses in the response stream in + // microseconds. + IntervalUs int32 `protobuf:"varint,2,opt,name=interval_us,json=intervalUs,proto3" json:"interval_us,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ResponseParameters) Reset() { *m = ResponseParameters{} } +func (m *ResponseParameters) String() string { return proto.CompactTextString(m) } +func (*ResponseParameters) ProtoMessage() {} +func (*ResponseParameters) Descriptor() ([]byte, []int) { + return fileDescriptor_messages_5c70222ad96bf232, []int{6} +} +func (m *ResponseParameters) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ResponseParameters.Unmarshal(m, b) +} +func (m *ResponseParameters) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ResponseParameters.Marshal(b, m, deterministic) +} +func (dst *ResponseParameters) XXX_Merge(src proto.Message) { + xxx_messageInfo_ResponseParameters.Merge(dst, src) +} +func (m *ResponseParameters) XXX_Size() int { + return xxx_messageInfo_ResponseParameters.Size(m) +} +func (m *ResponseParameters) XXX_DiscardUnknown() { + xxx_messageInfo_ResponseParameters.DiscardUnknown(m) +} + +var xxx_messageInfo_ResponseParameters proto.InternalMessageInfo + +func (m *ResponseParameters) GetSize() int32 { + if m != nil { + return m.Size + } + return 0 +} + +func (m *ResponseParameters) GetIntervalUs() int32 { + if m != nil { + return m.IntervalUs + } + return 0 +} + +// Server-streaming request. +type StreamingOutputCallRequest struct { + // Desired payload type in the response from the server. + // If response_type is RANDOM, the payload from each response in the stream + // might be of different types. This is to simulate a mixed type of payload + // stream. + ResponseType PayloadType `protobuf:"varint,1,opt,name=response_type,json=responseType,proto3,enum=grpc.testing.PayloadType" json:"response_type,omitempty"` + // Configuration for each expected response message. + ResponseParameters []*ResponseParameters `protobuf:"bytes,2,rep,name=response_parameters,json=responseParameters,proto3" json:"response_parameters,omitempty"` + // Optional input payload sent along with the request. + Payload *Payload `protobuf:"bytes,3,opt,name=payload,proto3" json:"payload,omitempty"` + // Compression algorithm to be used by the server for the response (stream) + ResponseCompression CompressionType `protobuf:"varint,6,opt,name=response_compression,json=responseCompression,proto3,enum=grpc.testing.CompressionType" json:"response_compression,omitempty"` + // Whether server should return a given status + ResponseStatus *EchoStatus `protobuf:"bytes,7,opt,name=response_status,json=responseStatus,proto3" json:"response_status,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *StreamingOutputCallRequest) Reset() { *m = StreamingOutputCallRequest{} } +func (m *StreamingOutputCallRequest) String() string { return proto.CompactTextString(m) } +func (*StreamingOutputCallRequest) ProtoMessage() {} +func (*StreamingOutputCallRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_messages_5c70222ad96bf232, []int{7} +} +func (m *StreamingOutputCallRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_StreamingOutputCallRequest.Unmarshal(m, b) +} +func (m *StreamingOutputCallRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_StreamingOutputCallRequest.Marshal(b, m, deterministic) +} +func (dst *StreamingOutputCallRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_StreamingOutputCallRequest.Merge(dst, src) +} +func (m *StreamingOutputCallRequest) XXX_Size() int { + return xxx_messageInfo_StreamingOutputCallRequest.Size(m) +} +func (m *StreamingOutputCallRequest) XXX_DiscardUnknown() { + xxx_messageInfo_StreamingOutputCallRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_StreamingOutputCallRequest proto.InternalMessageInfo + +func (m *StreamingOutputCallRequest) GetResponseType() PayloadType { + if m != nil { + return m.ResponseType + } + return PayloadType_COMPRESSABLE +} + +func (m *StreamingOutputCallRequest) GetResponseParameters() []*ResponseParameters { + if m != nil { + return m.ResponseParameters + } + return nil +} + +func (m *StreamingOutputCallRequest) GetPayload() *Payload { + if m != nil { + return m.Payload + } + return nil +} + +func (m *StreamingOutputCallRequest) GetResponseCompression() CompressionType { + if m != nil { + return m.ResponseCompression + } + return CompressionType_NONE +} + +func (m *StreamingOutputCallRequest) GetResponseStatus() *EchoStatus { + if m != nil { + return m.ResponseStatus + } + return nil +} + +// Server-streaming response, as configured by the request and parameters. +type StreamingOutputCallResponse struct { + // Payload to increase response size. + Payload *Payload `protobuf:"bytes,1,opt,name=payload,proto3" json:"payload,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *StreamingOutputCallResponse) Reset() { *m = StreamingOutputCallResponse{} } +func (m *StreamingOutputCallResponse) String() string { return proto.CompactTextString(m) } +func (*StreamingOutputCallResponse) ProtoMessage() {} +func (*StreamingOutputCallResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_messages_5c70222ad96bf232, []int{8} +} +func (m *StreamingOutputCallResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_StreamingOutputCallResponse.Unmarshal(m, b) +} +func (m *StreamingOutputCallResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_StreamingOutputCallResponse.Marshal(b, m, deterministic) +} +func (dst *StreamingOutputCallResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_StreamingOutputCallResponse.Merge(dst, src) +} +func (m *StreamingOutputCallResponse) XXX_Size() int { + return xxx_messageInfo_StreamingOutputCallResponse.Size(m) +} +func (m *StreamingOutputCallResponse) XXX_DiscardUnknown() { + xxx_messageInfo_StreamingOutputCallResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_StreamingOutputCallResponse proto.InternalMessageInfo + +func (m *StreamingOutputCallResponse) GetPayload() *Payload { + if m != nil { + return m.Payload + } + return nil +} + +// For reconnect interop test only. +// Client tells server what reconnection parameters it used. +type ReconnectParams struct { + MaxReconnectBackoffMs int32 `protobuf:"varint,1,opt,name=max_reconnect_backoff_ms,json=maxReconnectBackoffMs,proto3" json:"max_reconnect_backoff_ms,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ReconnectParams) Reset() { *m = ReconnectParams{} } +func (m *ReconnectParams) String() string { return proto.CompactTextString(m) } +func (*ReconnectParams) ProtoMessage() {} +func (*ReconnectParams) Descriptor() ([]byte, []int) { + return fileDescriptor_messages_5c70222ad96bf232, []int{9} +} +func (m *ReconnectParams) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ReconnectParams.Unmarshal(m, b) +} +func (m *ReconnectParams) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ReconnectParams.Marshal(b, m, deterministic) +} +func (dst *ReconnectParams) XXX_Merge(src proto.Message) { + xxx_messageInfo_ReconnectParams.Merge(dst, src) +} +func (m *ReconnectParams) XXX_Size() int { + return xxx_messageInfo_ReconnectParams.Size(m) +} +func (m *ReconnectParams) XXX_DiscardUnknown() { + xxx_messageInfo_ReconnectParams.DiscardUnknown(m) +} + +var xxx_messageInfo_ReconnectParams proto.InternalMessageInfo + +func (m *ReconnectParams) GetMaxReconnectBackoffMs() int32 { + if m != nil { + return m.MaxReconnectBackoffMs + } + return 0 +} + +// For reconnect interop test only. +// Server tells client whether its reconnects are following the spec and the +// reconnect backoffs it saw. +type ReconnectInfo struct { + Passed bool `protobuf:"varint,1,opt,name=passed,proto3" json:"passed,omitempty"` + BackoffMs []int32 `protobuf:"varint,2,rep,packed,name=backoff_ms,json=backoffMs,proto3" json:"backoff_ms,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ReconnectInfo) Reset() { *m = ReconnectInfo{} } +func (m *ReconnectInfo) String() string { return proto.CompactTextString(m) } +func (*ReconnectInfo) ProtoMessage() {} +func (*ReconnectInfo) Descriptor() ([]byte, []int) { + return fileDescriptor_messages_5c70222ad96bf232, []int{10} +} +func (m *ReconnectInfo) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ReconnectInfo.Unmarshal(m, b) +} +func (m *ReconnectInfo) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ReconnectInfo.Marshal(b, m, deterministic) +} +func (dst *ReconnectInfo) XXX_Merge(src proto.Message) { + xxx_messageInfo_ReconnectInfo.Merge(dst, src) +} +func (m *ReconnectInfo) XXX_Size() int { + return xxx_messageInfo_ReconnectInfo.Size(m) +} +func (m *ReconnectInfo) XXX_DiscardUnknown() { + xxx_messageInfo_ReconnectInfo.DiscardUnknown(m) +} + +var xxx_messageInfo_ReconnectInfo proto.InternalMessageInfo + +func (m *ReconnectInfo) GetPassed() bool { + if m != nil { + return m.Passed + } + return false +} + +func (m *ReconnectInfo) GetBackoffMs() []int32 { + if m != nil { + return m.BackoffMs + } + return nil +} + +func init() { + proto.RegisterType((*Payload)(nil), "grpc.testing.Payload") + proto.RegisterType((*EchoStatus)(nil), "grpc.testing.EchoStatus") + proto.RegisterType((*SimpleRequest)(nil), "grpc.testing.SimpleRequest") + proto.RegisterType((*SimpleResponse)(nil), "grpc.testing.SimpleResponse") + proto.RegisterType((*StreamingInputCallRequest)(nil), "grpc.testing.StreamingInputCallRequest") + proto.RegisterType((*StreamingInputCallResponse)(nil), "grpc.testing.StreamingInputCallResponse") + proto.RegisterType((*ResponseParameters)(nil), "grpc.testing.ResponseParameters") + proto.RegisterType((*StreamingOutputCallRequest)(nil), "grpc.testing.StreamingOutputCallRequest") + proto.RegisterType((*StreamingOutputCallResponse)(nil), "grpc.testing.StreamingOutputCallResponse") + proto.RegisterType((*ReconnectParams)(nil), "grpc.testing.ReconnectParams") + proto.RegisterType((*ReconnectInfo)(nil), "grpc.testing.ReconnectInfo") + proto.RegisterEnum("grpc.testing.PayloadType", PayloadType_name, PayloadType_value) + proto.RegisterEnum("grpc.testing.CompressionType", CompressionType_name, CompressionType_value) +} + +func init() { proto.RegisterFile("messages.proto", fileDescriptor_messages_5c70222ad96bf232) } + +var fileDescriptor_messages_5c70222ad96bf232 = []byte{ + // 652 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xcc, 0x55, 0x4d, 0x6f, 0xd3, 0x40, + 0x10, 0xc5, 0xf9, 0xee, 0x24, 0x4d, 0xa3, 0x85, 0x82, 0x5b, 0x54, 0x11, 0x99, 0x4b, 0x54, 0x89, + 0x20, 0x05, 0x09, 0x24, 0x0e, 0xa0, 0xb4, 0x4d, 0x51, 0x50, 0x9a, 0x84, 0x75, 0x7b, 0xe1, 0x62, + 0x6d, 0x9c, 0x8d, 0x6b, 0x11, 0x7b, 0x8d, 0x77, 0x8d, 0x9a, 0x1e, 0xb8, 0xf3, 0x83, 0xb9, 0xa3, + 0x5d, 0x7f, 0xc4, 0x69, 0x7b, 0x68, 0xe1, 0xc2, 0x6d, 0xf7, 0xed, 0x9b, 0x97, 0x79, 0x33, 0xcf, + 0x0a, 0x34, 0x3d, 0xca, 0x39, 0x71, 0x28, 0xef, 0x06, 0x21, 0x13, 0x0c, 0x35, 0x9c, 0x30, 0xb0, + 0xbb, 0x82, 0x72, 0xe1, 0xfa, 0x8e, 0x31, 0x82, 0xea, 0x94, 0xac, 0x96, 0x8c, 0xcc, 0xd1, 0x2b, + 0x28, 0x89, 0x55, 0x40, 0x75, 0xad, 0xad, 0x75, 0x9a, 0xbd, 0xbd, 0x6e, 0x9e, 0xd7, 0x4d, 0x48, + 0xe7, 0xab, 0x80, 0x62, 0x45, 0x43, 0x08, 0x4a, 0x33, 0x36, 0x5f, 0xe9, 0x85, 0xb6, 0xd6, 0x69, + 0x60, 0x75, 0x36, 0xde, 0x03, 0x0c, 0xec, 0x4b, 0x66, 0x0a, 0x22, 0x22, 0x2e, 0x19, 0x36, 0x9b, + 0xc7, 0x82, 0x65, 0xac, 0xce, 0x48, 0x87, 0x6a, 0xd2, 0x8f, 0x2a, 0xdc, 0xc2, 0xe9, 0xd5, 0xf8, + 0x55, 0x84, 0x6d, 0xd3, 0xf5, 0x82, 0x25, 0xc5, 0xf4, 0x7b, 0x44, 0xb9, 0x40, 0x1f, 0x60, 0x3b, + 0xa4, 0x3c, 0x60, 0x3e, 0xa7, 0xd6, 0xfd, 0x3a, 0x6b, 0xa4, 0x7c, 0x79, 0x43, 0x2f, 0x73, 0xf5, + 0xdc, 0xbd, 0x8e, 0x7f, 0xb1, 0xbc, 0x26, 0x99, 0xee, 0x35, 0x45, 0xaf, 0xa1, 0x1a, 0xc4, 0x0a, + 0x7a, 0xb1, 0xad, 0x75, 0xea, 0xbd, 0xdd, 0x3b, 0xe5, 0x71, 0xca, 0x92, 0xaa, 0x0b, 0x77, 0xb9, + 0xb4, 0x22, 0x4e, 0x43, 0x9f, 0x78, 0x54, 0x2f, 0xb5, 0xb5, 0x4e, 0x0d, 0x37, 0x24, 0x78, 0x91, + 0x60, 0xa8, 0x03, 0x2d, 0x45, 0x62, 0x24, 0x12, 0x97, 0x16, 0xb7, 0x59, 0x40, 0xf5, 0xb2, 0xe2, + 0x35, 0x25, 0x3e, 0x91, 0xb0, 0x29, 0x51, 0x34, 0x85, 0x27, 0x59, 0x93, 0x36, 0xf3, 0x82, 0x90, + 0x72, 0xee, 0x32, 0x5f, 0xaf, 0x28, 0xaf, 0x07, 0x9b, 0xcd, 0x1c, 0xaf, 0x09, 0xca, 0xef, 0xe3, + 0xb4, 0x34, 0xf7, 0x80, 0xfa, 0xb0, 0xb3, 0xb6, 0xad, 0x36, 0xa1, 0x57, 0x95, 0x33, 0x7d, 0x53, + 0x6c, 0xbd, 0x29, 0xdc, 0xcc, 0x46, 0xa2, 0xee, 0xc6, 0x4f, 0x68, 0xa6, 0xab, 0x88, 0xf1, 0xfc, + 0x98, 0xb4, 0x7b, 0x8d, 0x69, 0x1f, 0x6a, 0xd9, 0x84, 0xe2, 0x4d, 0x67, 0x77, 0xf4, 0x02, 0xea, + 0xf9, 0xc1, 0x14, 0xd5, 0x33, 0xb0, 0x6c, 0x28, 0xc6, 0x08, 0xf6, 0x4c, 0x11, 0x52, 0xe2, 0xb9, + 0xbe, 0x33, 0xf4, 0x83, 0x48, 0x1c, 0x93, 0xe5, 0x32, 0x8d, 0xc5, 0x43, 0x5b, 0x31, 0xce, 0x61, + 0xff, 0x2e, 0xb5, 0xc4, 0xd9, 0x5b, 0x78, 0x46, 0x1c, 0x27, 0xa4, 0x0e, 0x11, 0x74, 0x6e, 0x25, + 0x35, 0x71, 0x5e, 0xe2, 0xe0, 0xee, 0xae, 0x9f, 0x13, 0x69, 0x19, 0x1c, 0x63, 0x08, 0x28, 0xd5, + 0x98, 0x92, 0x90, 0x78, 0x54, 0xd0, 0x50, 0x65, 0x3e, 0x57, 0xaa, 0xce, 0xd2, 0xae, 0xeb, 0x0b, + 0x1a, 0xfe, 0x20, 0x32, 0x35, 0x49, 0x0a, 0x21, 0x85, 0x2e, 0xb8, 0xf1, 0xbb, 0x90, 0xeb, 0x70, + 0x12, 0x89, 0x1b, 0x86, 0xff, 0xf5, 0x3b, 0xf8, 0x02, 0x59, 0x4e, 0xac, 0x20, 0x6b, 0x55, 0x2f, + 0xb4, 0x8b, 0x9d, 0x7a, 0xaf, 0xbd, 0xa9, 0x72, 0xdb, 0x12, 0x46, 0xe1, 0x6d, 0x9b, 0x0f, 0xfe, + 0x6a, 0xfe, 0xcb, 0x98, 0x8f, 0xe1, 0xf9, 0x9d, 0x63, 0xff, 0xcb, 0xcc, 0x1b, 0x9f, 0x61, 0x07, + 0x53, 0x9b, 0xf9, 0x3e, 0xb5, 0x85, 0x1a, 0x16, 0x47, 0xef, 0x40, 0xf7, 0xc8, 0x95, 0x15, 0xa6, + 0xb0, 0x35, 0x23, 0xf6, 0x37, 0xb6, 0x58, 0x58, 0x1e, 0x4f, 0xe3, 0xe5, 0x91, 0xab, 0xac, 0xea, + 0x28, 0x7e, 0x3d, 0xe3, 0xc6, 0x29, 0x6c, 0x67, 0xe8, 0xd0, 0x5f, 0x30, 0xf4, 0x14, 0x2a, 0x01, + 0xe1, 0x9c, 0xc6, 0xcd, 0xd4, 0x70, 0x72, 0x43, 0x07, 0x00, 0x39, 0x4d, 0xb9, 0xd4, 0x32, 0xde, + 0x9a, 0xa5, 0x3a, 0x87, 0x1f, 0xa1, 0x9e, 0x4b, 0x06, 0x6a, 0x41, 0xe3, 0x78, 0x72, 0x36, 0xc5, + 0x03, 0xd3, 0xec, 0x1f, 0x8d, 0x06, 0xad, 0x47, 0x08, 0x41, 0xf3, 0x62, 0xbc, 0x81, 0x69, 0x08, + 0xa0, 0x82, 0xfb, 0xe3, 0x93, 0xc9, 0x59, 0xab, 0x70, 0xd8, 0x83, 0x9d, 0x1b, 0xfb, 0x40, 0x35, + 0x28, 0x8d, 0x27, 0x63, 0x59, 0x5c, 0x83, 0xd2, 0xa7, 0xaf, 0xc3, 0x69, 0x4b, 0x43, 0x75, 0xa8, + 0x9e, 0x0c, 0x4e, 0x47, 0xfd, 0xf3, 0x41, 0xab, 0x30, 0xab, 0xa8, 0xbf, 0x9a, 0x37, 0x7f, 0x02, + 0x00, 0x00, 0xff, 0xff, 0xc2, 0x6a, 0xce, 0x1e, 0x7c, 0x06, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/grpc/benchmark/grpc_testing/payloads.pb.go b/vendor/google.golang.org/grpc/benchmark/grpc_testing/payloads.pb.go new file mode 100644 index 0000000..f7f9cf8 --- /dev/null +++ b/vendor/google.golang.org/grpc/benchmark/grpc_testing/payloads.pb.go @@ -0,0 +1,348 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: payloads.proto + +package grpc_testing + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +type ByteBufferParams struct { + ReqSize int32 `protobuf:"varint,1,opt,name=req_size,json=reqSize,proto3" json:"req_size,omitempty"` + RespSize int32 `protobuf:"varint,2,opt,name=resp_size,json=respSize,proto3" json:"resp_size,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ByteBufferParams) Reset() { *m = ByteBufferParams{} } +func (m *ByteBufferParams) String() string { return proto.CompactTextString(m) } +func (*ByteBufferParams) ProtoMessage() {} +func (*ByteBufferParams) Descriptor() ([]byte, []int) { + return fileDescriptor_payloads_3abc71de35f06c83, []int{0} +} +func (m *ByteBufferParams) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ByteBufferParams.Unmarshal(m, b) +} +func (m *ByteBufferParams) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ByteBufferParams.Marshal(b, m, deterministic) +} +func (dst *ByteBufferParams) XXX_Merge(src proto.Message) { + xxx_messageInfo_ByteBufferParams.Merge(dst, src) +} +func (m *ByteBufferParams) XXX_Size() int { + return xxx_messageInfo_ByteBufferParams.Size(m) +} +func (m *ByteBufferParams) XXX_DiscardUnknown() { + xxx_messageInfo_ByteBufferParams.DiscardUnknown(m) +} + +var xxx_messageInfo_ByteBufferParams proto.InternalMessageInfo + +func (m *ByteBufferParams) GetReqSize() int32 { + if m != nil { + return m.ReqSize + } + return 0 +} + +func (m *ByteBufferParams) GetRespSize() int32 { + if m != nil { + return m.RespSize + } + return 0 +} + +type SimpleProtoParams struct { + ReqSize int32 `protobuf:"varint,1,opt,name=req_size,json=reqSize,proto3" json:"req_size,omitempty"` + RespSize int32 `protobuf:"varint,2,opt,name=resp_size,json=respSize,proto3" json:"resp_size,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *SimpleProtoParams) Reset() { *m = SimpleProtoParams{} } +func (m *SimpleProtoParams) String() string { return proto.CompactTextString(m) } +func (*SimpleProtoParams) ProtoMessage() {} +func (*SimpleProtoParams) Descriptor() ([]byte, []int) { + return fileDescriptor_payloads_3abc71de35f06c83, []int{1} +} +func (m *SimpleProtoParams) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_SimpleProtoParams.Unmarshal(m, b) +} +func (m *SimpleProtoParams) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_SimpleProtoParams.Marshal(b, m, deterministic) +} +func (dst *SimpleProtoParams) XXX_Merge(src proto.Message) { + xxx_messageInfo_SimpleProtoParams.Merge(dst, src) +} +func (m *SimpleProtoParams) XXX_Size() int { + return xxx_messageInfo_SimpleProtoParams.Size(m) +} +func (m *SimpleProtoParams) XXX_DiscardUnknown() { + xxx_messageInfo_SimpleProtoParams.DiscardUnknown(m) +} + +var xxx_messageInfo_SimpleProtoParams proto.InternalMessageInfo + +func (m *SimpleProtoParams) GetReqSize() int32 { + if m != nil { + return m.ReqSize + } + return 0 +} + +func (m *SimpleProtoParams) GetRespSize() int32 { + if m != nil { + return m.RespSize + } + return 0 +} + +type ComplexProtoParams struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ComplexProtoParams) Reset() { *m = ComplexProtoParams{} } +func (m *ComplexProtoParams) String() string { return proto.CompactTextString(m) } +func (*ComplexProtoParams) ProtoMessage() {} +func (*ComplexProtoParams) Descriptor() ([]byte, []int) { + return fileDescriptor_payloads_3abc71de35f06c83, []int{2} +} +func (m *ComplexProtoParams) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ComplexProtoParams.Unmarshal(m, b) +} +func (m *ComplexProtoParams) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ComplexProtoParams.Marshal(b, m, deterministic) +} +func (dst *ComplexProtoParams) XXX_Merge(src proto.Message) { + xxx_messageInfo_ComplexProtoParams.Merge(dst, src) +} +func (m *ComplexProtoParams) XXX_Size() int { + return xxx_messageInfo_ComplexProtoParams.Size(m) +} +func (m *ComplexProtoParams) XXX_DiscardUnknown() { + xxx_messageInfo_ComplexProtoParams.DiscardUnknown(m) +} + +var xxx_messageInfo_ComplexProtoParams proto.InternalMessageInfo + +type PayloadConfig struct { + // Types that are valid to be assigned to Payload: + // *PayloadConfig_BytebufParams + // *PayloadConfig_SimpleParams + // *PayloadConfig_ComplexParams + Payload isPayloadConfig_Payload `protobuf_oneof:"payload"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *PayloadConfig) Reset() { *m = PayloadConfig{} } +func (m *PayloadConfig) String() string { return proto.CompactTextString(m) } +func (*PayloadConfig) ProtoMessage() {} +func (*PayloadConfig) Descriptor() ([]byte, []int) { + return fileDescriptor_payloads_3abc71de35f06c83, []int{3} +} +func (m *PayloadConfig) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_PayloadConfig.Unmarshal(m, b) +} +func (m *PayloadConfig) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_PayloadConfig.Marshal(b, m, deterministic) +} +func (dst *PayloadConfig) XXX_Merge(src proto.Message) { + xxx_messageInfo_PayloadConfig.Merge(dst, src) +} +func (m *PayloadConfig) XXX_Size() int { + return xxx_messageInfo_PayloadConfig.Size(m) +} +func (m *PayloadConfig) XXX_DiscardUnknown() { + xxx_messageInfo_PayloadConfig.DiscardUnknown(m) +} + +var xxx_messageInfo_PayloadConfig proto.InternalMessageInfo + +type isPayloadConfig_Payload interface { + isPayloadConfig_Payload() +} + +type PayloadConfig_BytebufParams struct { + BytebufParams *ByteBufferParams `protobuf:"bytes,1,opt,name=bytebuf_params,json=bytebufParams,proto3,oneof"` +} + +type PayloadConfig_SimpleParams struct { + SimpleParams *SimpleProtoParams `protobuf:"bytes,2,opt,name=simple_params,json=simpleParams,proto3,oneof"` +} + +type PayloadConfig_ComplexParams struct { + ComplexParams *ComplexProtoParams `protobuf:"bytes,3,opt,name=complex_params,json=complexParams,proto3,oneof"` +} + +func (*PayloadConfig_BytebufParams) isPayloadConfig_Payload() {} + +func (*PayloadConfig_SimpleParams) isPayloadConfig_Payload() {} + +func (*PayloadConfig_ComplexParams) isPayloadConfig_Payload() {} + +func (m *PayloadConfig) GetPayload() isPayloadConfig_Payload { + if m != nil { + return m.Payload + } + return nil +} + +func (m *PayloadConfig) GetBytebufParams() *ByteBufferParams { + if x, ok := m.GetPayload().(*PayloadConfig_BytebufParams); ok { + return x.BytebufParams + } + return nil +} + +func (m *PayloadConfig) GetSimpleParams() *SimpleProtoParams { + if x, ok := m.GetPayload().(*PayloadConfig_SimpleParams); ok { + return x.SimpleParams + } + return nil +} + +func (m *PayloadConfig) GetComplexParams() *ComplexProtoParams { + if x, ok := m.GetPayload().(*PayloadConfig_ComplexParams); ok { + return x.ComplexParams + } + return nil +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*PayloadConfig) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _PayloadConfig_OneofMarshaler, _PayloadConfig_OneofUnmarshaler, _PayloadConfig_OneofSizer, []interface{}{ + (*PayloadConfig_BytebufParams)(nil), + (*PayloadConfig_SimpleParams)(nil), + (*PayloadConfig_ComplexParams)(nil), + } +} + +func _PayloadConfig_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*PayloadConfig) + // payload + switch x := m.Payload.(type) { + case *PayloadConfig_BytebufParams: + b.EncodeVarint(1<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.BytebufParams); err != nil { + return err + } + case *PayloadConfig_SimpleParams: + b.EncodeVarint(2<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.SimpleParams); err != nil { + return err + } + case *PayloadConfig_ComplexParams: + b.EncodeVarint(3<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.ComplexParams); err != nil { + return err + } + case nil: + default: + return fmt.Errorf("PayloadConfig.Payload has unexpected type %T", x) + } + return nil +} + +func _PayloadConfig_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*PayloadConfig) + switch tag { + case 1: // payload.bytebuf_params + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(ByteBufferParams) + err := b.DecodeMessage(msg) + m.Payload = &PayloadConfig_BytebufParams{msg} + return true, err + case 2: // payload.simple_params + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(SimpleProtoParams) + err := b.DecodeMessage(msg) + m.Payload = &PayloadConfig_SimpleParams{msg} + return true, err + case 3: // payload.complex_params + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(ComplexProtoParams) + err := b.DecodeMessage(msg) + m.Payload = &PayloadConfig_ComplexParams{msg} + return true, err + default: + return false, nil + } +} + +func _PayloadConfig_OneofSizer(msg proto.Message) (n int) { + m := msg.(*PayloadConfig) + // payload + switch x := m.Payload.(type) { + case *PayloadConfig_BytebufParams: + s := proto.Size(x.BytebufParams) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *PayloadConfig_SimpleParams: + s := proto.Size(x.SimpleParams) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *PayloadConfig_ComplexParams: + s := proto.Size(x.ComplexParams) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +func init() { + proto.RegisterType((*ByteBufferParams)(nil), "grpc.testing.ByteBufferParams") + proto.RegisterType((*SimpleProtoParams)(nil), "grpc.testing.SimpleProtoParams") + proto.RegisterType((*ComplexProtoParams)(nil), "grpc.testing.ComplexProtoParams") + proto.RegisterType((*PayloadConfig)(nil), "grpc.testing.PayloadConfig") +} + +func init() { proto.RegisterFile("payloads.proto", fileDescriptor_payloads_3abc71de35f06c83) } + +var fileDescriptor_payloads_3abc71de35f06c83 = []byte{ + // 254 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xe2, 0xe2, 0x2b, 0x48, 0xac, 0xcc, + 0xc9, 0x4f, 0x4c, 0x29, 0xd6, 0x2b, 0x28, 0xca, 0x2f, 0xc9, 0x17, 0xe2, 0x49, 0x2f, 0x2a, 0x48, + 0xd6, 0x2b, 0x49, 0x2d, 0x2e, 0xc9, 0xcc, 0x4b, 0x57, 0xf2, 0xe2, 0x12, 0x70, 0xaa, 0x2c, 0x49, + 0x75, 0x2a, 0x4d, 0x4b, 0x4b, 0x2d, 0x0a, 0x48, 0x2c, 0x4a, 0xcc, 0x2d, 0x16, 0x92, 0xe4, 0xe2, + 0x28, 0x4a, 0x2d, 0x8c, 0x2f, 0xce, 0xac, 0x4a, 0x95, 0x60, 0x54, 0x60, 0xd4, 0x60, 0x0d, 0x62, + 0x2f, 0x4a, 0x2d, 0x0c, 0xce, 0xac, 0x4a, 0x15, 0x92, 0xe6, 0xe2, 0x2c, 0x4a, 0x2d, 0x2e, 0x80, + 0xc8, 0x31, 0x81, 0xe5, 0x38, 0x40, 0x02, 0x20, 0x49, 0x25, 0x6f, 0x2e, 0xc1, 0xe0, 0xcc, 0xdc, + 0x82, 0x9c, 0xd4, 0x00, 0x90, 0x45, 0x14, 0x1a, 0x26, 0xc2, 0x25, 0xe4, 0x9c, 0x0f, 0x32, 0xac, + 0x02, 0xc9, 0x34, 0xa5, 0x6f, 0x8c, 0x5c, 0xbc, 0x01, 0x10, 0xff, 0x38, 0xe7, 0xe7, 0xa5, 0x65, + 0xa6, 0x0b, 0xb9, 0x73, 0xf1, 0x25, 0x55, 0x96, 0xa4, 0x26, 0x95, 0xa6, 0xc5, 0x17, 0x80, 0xd5, + 0x80, 0x6d, 0xe1, 0x36, 0x92, 0xd3, 0x43, 0xf6, 0xa7, 0x1e, 0xba, 0x27, 0x3d, 0x18, 0x82, 0x78, + 0xa1, 0xfa, 0xa0, 0x0e, 0x75, 0xe3, 0xe2, 0x2d, 0x06, 0xbb, 0x1e, 0x66, 0x0e, 0x13, 0xd8, 0x1c, + 0x79, 0x54, 0x73, 0x30, 0x3c, 0xe8, 0xc1, 0x10, 0xc4, 0x03, 0xd1, 0x07, 0x35, 0xc7, 0x93, 0x8b, + 0x2f, 0x19, 0xe2, 0x70, 0x98, 0x41, 0xcc, 0x60, 0x83, 0x14, 0x50, 0x0d, 0xc2, 0xf4, 0x1c, 0xc8, + 0x49, 0x50, 0x9d, 0x10, 0x01, 0x27, 0x4e, 0x2e, 0x76, 0x68, 0xe4, 0x25, 0xb1, 0x81, 0x23, 0xcf, + 0x18, 0x10, 0x00, 0x00, 0xff, 0xff, 0xb0, 0x8c, 0x18, 0x4e, 0xce, 0x01, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/grpc/benchmark/grpc_testing/services.pb.go b/vendor/google.golang.org/grpc/benchmark/grpc_testing/services.pb.go new file mode 100644 index 0000000..30f8fb6 --- /dev/null +++ b/vendor/google.golang.org/grpc/benchmark/grpc_testing/services.pb.go @@ -0,0 +1,518 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: services.proto + +package grpc_testing + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" + +import ( + context "golang.org/x/net/context" + grpc "google.golang.org/grpc" +) + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Reference imports to suppress errors if they are not otherwise used. +var _ context.Context +var _ grpc.ClientConn + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the grpc package it is being compiled against. +const _ = grpc.SupportPackageIsVersion4 + +// BenchmarkServiceClient is the client API for BenchmarkService service. +// +// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://godoc.org/google.golang.org/grpc#ClientConn.NewStream. +type BenchmarkServiceClient interface { + // One request followed by one response. + // The server returns the client payload as-is. + UnaryCall(ctx context.Context, in *SimpleRequest, opts ...grpc.CallOption) (*SimpleResponse, error) + // One request followed by one response. + // The server returns the client payload as-is. + StreamingCall(ctx context.Context, opts ...grpc.CallOption) (BenchmarkService_StreamingCallClient, error) + // Unconstrainted streaming. + // Both server and client keep sending & receiving simultaneously. + UnconstrainedStreamingCall(ctx context.Context, opts ...grpc.CallOption) (BenchmarkService_UnconstrainedStreamingCallClient, error) +} + +type benchmarkServiceClient struct { + cc *grpc.ClientConn +} + +func NewBenchmarkServiceClient(cc *grpc.ClientConn) BenchmarkServiceClient { + return &benchmarkServiceClient{cc} +} + +func (c *benchmarkServiceClient) UnaryCall(ctx context.Context, in *SimpleRequest, opts ...grpc.CallOption) (*SimpleResponse, error) { + out := new(SimpleResponse) + err := c.cc.Invoke(ctx, "/grpc.testing.BenchmarkService/UnaryCall", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *benchmarkServiceClient) StreamingCall(ctx context.Context, opts ...grpc.CallOption) (BenchmarkService_StreamingCallClient, error) { + stream, err := c.cc.NewStream(ctx, &_BenchmarkService_serviceDesc.Streams[0], "/grpc.testing.BenchmarkService/StreamingCall", opts...) + if err != nil { + return nil, err + } + x := &benchmarkServiceStreamingCallClient{stream} + return x, nil +} + +type BenchmarkService_StreamingCallClient interface { + Send(*SimpleRequest) error + Recv() (*SimpleResponse, error) + grpc.ClientStream +} + +type benchmarkServiceStreamingCallClient struct { + grpc.ClientStream +} + +func (x *benchmarkServiceStreamingCallClient) Send(m *SimpleRequest) error { + return x.ClientStream.SendMsg(m) +} + +func (x *benchmarkServiceStreamingCallClient) Recv() (*SimpleResponse, error) { + m := new(SimpleResponse) + if err := x.ClientStream.RecvMsg(m); err != nil { + return nil, err + } + return m, nil +} + +func (c *benchmarkServiceClient) UnconstrainedStreamingCall(ctx context.Context, opts ...grpc.CallOption) (BenchmarkService_UnconstrainedStreamingCallClient, error) { + stream, err := c.cc.NewStream(ctx, &_BenchmarkService_serviceDesc.Streams[1], "/grpc.testing.BenchmarkService/UnconstrainedStreamingCall", opts...) + if err != nil { + return nil, err + } + x := &benchmarkServiceUnconstrainedStreamingCallClient{stream} + return x, nil +} + +type BenchmarkService_UnconstrainedStreamingCallClient interface { + Send(*SimpleRequest) error + Recv() (*SimpleResponse, error) + grpc.ClientStream +} + +type benchmarkServiceUnconstrainedStreamingCallClient struct { + grpc.ClientStream +} + +func (x *benchmarkServiceUnconstrainedStreamingCallClient) Send(m *SimpleRequest) error { + return x.ClientStream.SendMsg(m) +} + +func (x *benchmarkServiceUnconstrainedStreamingCallClient) Recv() (*SimpleResponse, error) { + m := new(SimpleResponse) + if err := x.ClientStream.RecvMsg(m); err != nil { + return nil, err + } + return m, nil +} + +// BenchmarkServiceServer is the server API for BenchmarkService service. +type BenchmarkServiceServer interface { + // One request followed by one response. + // The server returns the client payload as-is. + UnaryCall(context.Context, *SimpleRequest) (*SimpleResponse, error) + // One request followed by one response. + // The server returns the client payload as-is. + StreamingCall(BenchmarkService_StreamingCallServer) error + // Unconstrainted streaming. + // Both server and client keep sending & receiving simultaneously. + UnconstrainedStreamingCall(BenchmarkService_UnconstrainedStreamingCallServer) error +} + +func RegisterBenchmarkServiceServer(s *grpc.Server, srv BenchmarkServiceServer) { + s.RegisterService(&_BenchmarkService_serviceDesc, srv) +} + +func _BenchmarkService_UnaryCall_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(SimpleRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(BenchmarkServiceServer).UnaryCall(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/grpc.testing.BenchmarkService/UnaryCall", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(BenchmarkServiceServer).UnaryCall(ctx, req.(*SimpleRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _BenchmarkService_StreamingCall_Handler(srv interface{}, stream grpc.ServerStream) error { + return srv.(BenchmarkServiceServer).StreamingCall(&benchmarkServiceStreamingCallServer{stream}) +} + +type BenchmarkService_StreamingCallServer interface { + Send(*SimpleResponse) error + Recv() (*SimpleRequest, error) + grpc.ServerStream +} + +type benchmarkServiceStreamingCallServer struct { + grpc.ServerStream +} + +func (x *benchmarkServiceStreamingCallServer) Send(m *SimpleResponse) error { + return x.ServerStream.SendMsg(m) +} + +func (x *benchmarkServiceStreamingCallServer) Recv() (*SimpleRequest, error) { + m := new(SimpleRequest) + if err := x.ServerStream.RecvMsg(m); err != nil { + return nil, err + } + return m, nil +} + +func _BenchmarkService_UnconstrainedStreamingCall_Handler(srv interface{}, stream grpc.ServerStream) error { + return srv.(BenchmarkServiceServer).UnconstrainedStreamingCall(&benchmarkServiceUnconstrainedStreamingCallServer{stream}) +} + +type BenchmarkService_UnconstrainedStreamingCallServer interface { + Send(*SimpleResponse) error + Recv() (*SimpleRequest, error) + grpc.ServerStream +} + +type benchmarkServiceUnconstrainedStreamingCallServer struct { + grpc.ServerStream +} + +func (x *benchmarkServiceUnconstrainedStreamingCallServer) Send(m *SimpleResponse) error { + return x.ServerStream.SendMsg(m) +} + +func (x *benchmarkServiceUnconstrainedStreamingCallServer) Recv() (*SimpleRequest, error) { + m := new(SimpleRequest) + if err := x.ServerStream.RecvMsg(m); err != nil { + return nil, err + } + return m, nil +} + +var _BenchmarkService_serviceDesc = grpc.ServiceDesc{ + ServiceName: "grpc.testing.BenchmarkService", + HandlerType: (*BenchmarkServiceServer)(nil), + Methods: []grpc.MethodDesc{ + { + MethodName: "UnaryCall", + Handler: _BenchmarkService_UnaryCall_Handler, + }, + }, + Streams: []grpc.StreamDesc{ + { + StreamName: "StreamingCall", + Handler: _BenchmarkService_StreamingCall_Handler, + ServerStreams: true, + ClientStreams: true, + }, + { + StreamName: "UnconstrainedStreamingCall", + Handler: _BenchmarkService_UnconstrainedStreamingCall_Handler, + ServerStreams: true, + ClientStreams: true, + }, + }, + Metadata: "services.proto", +} + +// WorkerServiceClient is the client API for WorkerService service. +// +// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://godoc.org/google.golang.org/grpc#ClientConn.NewStream. +type WorkerServiceClient interface { + // Start server with specified workload. + // First request sent specifies the ServerConfig followed by ServerStatus + // response. After that, a "Mark" can be sent anytime to request the latest + // stats. Closing the stream will initiate shutdown of the test server + // and once the shutdown has finished, the OK status is sent to terminate + // this RPC. + RunServer(ctx context.Context, opts ...grpc.CallOption) (WorkerService_RunServerClient, error) + // Start client with specified workload. + // First request sent specifies the ClientConfig followed by ClientStatus + // response. After that, a "Mark" can be sent anytime to request the latest + // stats. Closing the stream will initiate shutdown of the test client + // and once the shutdown has finished, the OK status is sent to terminate + // this RPC. + RunClient(ctx context.Context, opts ...grpc.CallOption) (WorkerService_RunClientClient, error) + // Just return the core count - unary call + CoreCount(ctx context.Context, in *CoreRequest, opts ...grpc.CallOption) (*CoreResponse, error) + // Quit this worker + QuitWorker(ctx context.Context, in *Void, opts ...grpc.CallOption) (*Void, error) +} + +type workerServiceClient struct { + cc *grpc.ClientConn +} + +func NewWorkerServiceClient(cc *grpc.ClientConn) WorkerServiceClient { + return &workerServiceClient{cc} +} + +func (c *workerServiceClient) RunServer(ctx context.Context, opts ...grpc.CallOption) (WorkerService_RunServerClient, error) { + stream, err := c.cc.NewStream(ctx, &_WorkerService_serviceDesc.Streams[0], "/grpc.testing.WorkerService/RunServer", opts...) + if err != nil { + return nil, err + } + x := &workerServiceRunServerClient{stream} + return x, nil +} + +type WorkerService_RunServerClient interface { + Send(*ServerArgs) error + Recv() (*ServerStatus, error) + grpc.ClientStream +} + +type workerServiceRunServerClient struct { + grpc.ClientStream +} + +func (x *workerServiceRunServerClient) Send(m *ServerArgs) error { + return x.ClientStream.SendMsg(m) +} + +func (x *workerServiceRunServerClient) Recv() (*ServerStatus, error) { + m := new(ServerStatus) + if err := x.ClientStream.RecvMsg(m); err != nil { + return nil, err + } + return m, nil +} + +func (c *workerServiceClient) RunClient(ctx context.Context, opts ...grpc.CallOption) (WorkerService_RunClientClient, error) { + stream, err := c.cc.NewStream(ctx, &_WorkerService_serviceDesc.Streams[1], "/grpc.testing.WorkerService/RunClient", opts...) + if err != nil { + return nil, err + } + x := &workerServiceRunClientClient{stream} + return x, nil +} + +type WorkerService_RunClientClient interface { + Send(*ClientArgs) error + Recv() (*ClientStatus, error) + grpc.ClientStream +} + +type workerServiceRunClientClient struct { + grpc.ClientStream +} + +func (x *workerServiceRunClientClient) Send(m *ClientArgs) error { + return x.ClientStream.SendMsg(m) +} + +func (x *workerServiceRunClientClient) Recv() (*ClientStatus, error) { + m := new(ClientStatus) + if err := x.ClientStream.RecvMsg(m); err != nil { + return nil, err + } + return m, nil +} + +func (c *workerServiceClient) CoreCount(ctx context.Context, in *CoreRequest, opts ...grpc.CallOption) (*CoreResponse, error) { + out := new(CoreResponse) + err := c.cc.Invoke(ctx, "/grpc.testing.WorkerService/CoreCount", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *workerServiceClient) QuitWorker(ctx context.Context, in *Void, opts ...grpc.CallOption) (*Void, error) { + out := new(Void) + err := c.cc.Invoke(ctx, "/grpc.testing.WorkerService/QuitWorker", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +// WorkerServiceServer is the server API for WorkerService service. +type WorkerServiceServer interface { + // Start server with specified workload. + // First request sent specifies the ServerConfig followed by ServerStatus + // response. After that, a "Mark" can be sent anytime to request the latest + // stats. Closing the stream will initiate shutdown of the test server + // and once the shutdown has finished, the OK status is sent to terminate + // this RPC. + RunServer(WorkerService_RunServerServer) error + // Start client with specified workload. + // First request sent specifies the ClientConfig followed by ClientStatus + // response. After that, a "Mark" can be sent anytime to request the latest + // stats. Closing the stream will initiate shutdown of the test client + // and once the shutdown has finished, the OK status is sent to terminate + // this RPC. + RunClient(WorkerService_RunClientServer) error + // Just return the core count - unary call + CoreCount(context.Context, *CoreRequest) (*CoreResponse, error) + // Quit this worker + QuitWorker(context.Context, *Void) (*Void, error) +} + +func RegisterWorkerServiceServer(s *grpc.Server, srv WorkerServiceServer) { + s.RegisterService(&_WorkerService_serviceDesc, srv) +} + +func _WorkerService_RunServer_Handler(srv interface{}, stream grpc.ServerStream) error { + return srv.(WorkerServiceServer).RunServer(&workerServiceRunServerServer{stream}) +} + +type WorkerService_RunServerServer interface { + Send(*ServerStatus) error + Recv() (*ServerArgs, error) + grpc.ServerStream +} + +type workerServiceRunServerServer struct { + grpc.ServerStream +} + +func (x *workerServiceRunServerServer) Send(m *ServerStatus) error { + return x.ServerStream.SendMsg(m) +} + +func (x *workerServiceRunServerServer) Recv() (*ServerArgs, error) { + m := new(ServerArgs) + if err := x.ServerStream.RecvMsg(m); err != nil { + return nil, err + } + return m, nil +} + +func _WorkerService_RunClient_Handler(srv interface{}, stream grpc.ServerStream) error { + return srv.(WorkerServiceServer).RunClient(&workerServiceRunClientServer{stream}) +} + +type WorkerService_RunClientServer interface { + Send(*ClientStatus) error + Recv() (*ClientArgs, error) + grpc.ServerStream +} + +type workerServiceRunClientServer struct { + grpc.ServerStream +} + +func (x *workerServiceRunClientServer) Send(m *ClientStatus) error { + return x.ServerStream.SendMsg(m) +} + +func (x *workerServiceRunClientServer) Recv() (*ClientArgs, error) { + m := new(ClientArgs) + if err := x.ServerStream.RecvMsg(m); err != nil { + return nil, err + } + return m, nil +} + +func _WorkerService_CoreCount_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(CoreRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(WorkerServiceServer).CoreCount(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/grpc.testing.WorkerService/CoreCount", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(WorkerServiceServer).CoreCount(ctx, req.(*CoreRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _WorkerService_QuitWorker_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(Void) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(WorkerServiceServer).QuitWorker(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/grpc.testing.WorkerService/QuitWorker", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(WorkerServiceServer).QuitWorker(ctx, req.(*Void)) + } + return interceptor(ctx, in, info, handler) +} + +var _WorkerService_serviceDesc = grpc.ServiceDesc{ + ServiceName: "grpc.testing.WorkerService", + HandlerType: (*WorkerServiceServer)(nil), + Methods: []grpc.MethodDesc{ + { + MethodName: "CoreCount", + Handler: _WorkerService_CoreCount_Handler, + }, + { + MethodName: "QuitWorker", + Handler: _WorkerService_QuitWorker_Handler, + }, + }, + Streams: []grpc.StreamDesc{ + { + StreamName: "RunServer", + Handler: _WorkerService_RunServer_Handler, + ServerStreams: true, + ClientStreams: true, + }, + { + StreamName: "RunClient", + Handler: _WorkerService_RunClient_Handler, + ServerStreams: true, + ClientStreams: true, + }, + }, + Metadata: "services.proto", +} + +func init() { proto.RegisterFile("services.proto", fileDescriptor_services_e4655369b5d7f4d0) } + +var fileDescriptor_services_e4655369b5d7f4d0 = []byte{ + // 271 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xac, 0x92, 0xc1, 0x4a, 0xc3, 0x40, + 0x10, 0x86, 0x69, 0x0f, 0x42, 0x16, 0x53, 0x64, 0x4f, 0xba, 0xfa, 0x00, 0x9e, 0x82, 0x54, 0x5f, + 0xc0, 0x06, 0x3d, 0x0a, 0x36, 0x54, 0x0f, 0x9e, 0xd6, 0x74, 0x88, 0x4b, 0x93, 0x99, 0x38, 0x33, + 0x11, 0x7c, 0x02, 0x1f, 0xc1, 0xd7, 0x15, 0xb3, 0x56, 0x6a, 0xc8, 0xcd, 0x1e, 0xe7, 0xff, 0x86, + 0x8f, 0xfd, 0x77, 0xd7, 0xcc, 0x04, 0xf8, 0x2d, 0x94, 0x20, 0x59, 0xcb, 0xa4, 0x64, 0x0f, 0x2b, + 0x6e, 0xcb, 0x4c, 0x41, 0x34, 0x60, 0xe5, 0x66, 0x0d, 0x88, 0xf8, 0x6a, 0x4b, 0x5d, 0x5a, 0x12, + 0x2a, 0x53, 0x1d, 0xc7, 0xf9, 0xc7, 0xd4, 0x1c, 0x2d, 0x00, 0xcb, 0x97, 0xc6, 0xf3, 0xa6, 0x88, + 0x22, 0x7b, 0x6b, 0x92, 0x15, 0x7a, 0x7e, 0xcf, 0x7d, 0x5d, 0xdb, 0xd3, 0x6c, 0xd7, 0x97, 0x15, + 0xa1, 0x69, 0x6b, 0x58, 0xc2, 0x6b, 0x07, 0xa2, 0xee, 0x6c, 0x1c, 0x4a, 0x4b, 0x28, 0x60, 0xef, + 0x4c, 0x5a, 0x28, 0x83, 0x6f, 0x02, 0x56, 0xff, 0x74, 0x9d, 0x4f, 0x2e, 0x26, 0xf6, 0xc9, 0xb8, + 0x15, 0x96, 0x84, 0xa2, 0xec, 0x03, 0xc2, 0x7a, 0x9f, 0xf2, 0xf9, 0xe7, 0xd4, 0xa4, 0x8f, 0xc4, + 0x1b, 0xe0, 0xed, 0x35, 0xdc, 0x98, 0x64, 0xd9, 0xe1, 0xf7, 0x04, 0x6c, 0x8f, 0x07, 0x82, 0x3e, + 0xbd, 0xe6, 0x4a, 0x9c, 0x1b, 0x23, 0x85, 0x7a, 0xed, 0xa4, 0x3f, 0x75, 0xd4, 0xe4, 0x75, 0x00, + 0xd4, 0xa1, 0x26, 0xa6, 0x63, 0x9a, 0x48, 0x76, 0x34, 0x0b, 0x93, 0xe4, 0xc4, 0x90, 0x53, 0x87, + 0x6a, 0x4f, 0x06, 0xcb, 0xc4, 0xbf, 0x4d, 0xdd, 0x18, 0xfa, 0x79, 0x90, 0x2b, 0x63, 0xee, 0xbb, + 0xa0, 0xb1, 0xa6, 0xb5, 0x7f, 0x37, 0x1f, 0x28, 0xac, 0xdd, 0x48, 0xf6, 0x7c, 0xd0, 0x7f, 0x95, + 0xcb, 0xaf, 0x00, 0x00, 0x00, 0xff, 0xff, 0x9a, 0xb4, 0x19, 0x36, 0x69, 0x02, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/grpc/benchmark/grpc_testing/stats.pb.go b/vendor/google.golang.org/grpc/benchmark/grpc_testing/stats.pb.go new file mode 100644 index 0000000..abc2aea --- /dev/null +++ b/vendor/google.golang.org/grpc/benchmark/grpc_testing/stats.pb.go @@ -0,0 +1,302 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: stats.proto + +package grpc_testing + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +type ServerStats struct { + // wall clock time change in seconds since last reset + TimeElapsed float64 `protobuf:"fixed64,1,opt,name=time_elapsed,json=timeElapsed,proto3" json:"time_elapsed,omitempty"` + // change in user time (in seconds) used by the server since last reset + TimeUser float64 `protobuf:"fixed64,2,opt,name=time_user,json=timeUser,proto3" json:"time_user,omitempty"` + // change in server time (in seconds) used by the server process and all + // threads since last reset + TimeSystem float64 `protobuf:"fixed64,3,opt,name=time_system,json=timeSystem,proto3" json:"time_system,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ServerStats) Reset() { *m = ServerStats{} } +func (m *ServerStats) String() string { return proto.CompactTextString(m) } +func (*ServerStats) ProtoMessage() {} +func (*ServerStats) Descriptor() ([]byte, []int) { + return fileDescriptor_stats_8ba831c0cb3c3440, []int{0} +} +func (m *ServerStats) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ServerStats.Unmarshal(m, b) +} +func (m *ServerStats) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ServerStats.Marshal(b, m, deterministic) +} +func (dst *ServerStats) XXX_Merge(src proto.Message) { + xxx_messageInfo_ServerStats.Merge(dst, src) +} +func (m *ServerStats) XXX_Size() int { + return xxx_messageInfo_ServerStats.Size(m) +} +func (m *ServerStats) XXX_DiscardUnknown() { + xxx_messageInfo_ServerStats.DiscardUnknown(m) +} + +var xxx_messageInfo_ServerStats proto.InternalMessageInfo + +func (m *ServerStats) GetTimeElapsed() float64 { + if m != nil { + return m.TimeElapsed + } + return 0 +} + +func (m *ServerStats) GetTimeUser() float64 { + if m != nil { + return m.TimeUser + } + return 0 +} + +func (m *ServerStats) GetTimeSystem() float64 { + if m != nil { + return m.TimeSystem + } + return 0 +} + +// Histogram params based on grpc/support/histogram.c +type HistogramParams struct { + Resolution float64 `protobuf:"fixed64,1,opt,name=resolution,proto3" json:"resolution,omitempty"` + MaxPossible float64 `protobuf:"fixed64,2,opt,name=max_possible,json=maxPossible,proto3" json:"max_possible,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *HistogramParams) Reset() { *m = HistogramParams{} } +func (m *HistogramParams) String() string { return proto.CompactTextString(m) } +func (*HistogramParams) ProtoMessage() {} +func (*HistogramParams) Descriptor() ([]byte, []int) { + return fileDescriptor_stats_8ba831c0cb3c3440, []int{1} +} +func (m *HistogramParams) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_HistogramParams.Unmarshal(m, b) +} +func (m *HistogramParams) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_HistogramParams.Marshal(b, m, deterministic) +} +func (dst *HistogramParams) XXX_Merge(src proto.Message) { + xxx_messageInfo_HistogramParams.Merge(dst, src) +} +func (m *HistogramParams) XXX_Size() int { + return xxx_messageInfo_HistogramParams.Size(m) +} +func (m *HistogramParams) XXX_DiscardUnknown() { + xxx_messageInfo_HistogramParams.DiscardUnknown(m) +} + +var xxx_messageInfo_HistogramParams proto.InternalMessageInfo + +func (m *HistogramParams) GetResolution() float64 { + if m != nil { + return m.Resolution + } + return 0 +} + +func (m *HistogramParams) GetMaxPossible() float64 { + if m != nil { + return m.MaxPossible + } + return 0 +} + +// Histogram data based on grpc/support/histogram.c +type HistogramData struct { + Bucket []uint32 `protobuf:"varint,1,rep,packed,name=bucket,proto3" json:"bucket,omitempty"` + MinSeen float64 `protobuf:"fixed64,2,opt,name=min_seen,json=minSeen,proto3" json:"min_seen,omitempty"` + MaxSeen float64 `protobuf:"fixed64,3,opt,name=max_seen,json=maxSeen,proto3" json:"max_seen,omitempty"` + Sum float64 `protobuf:"fixed64,4,opt,name=sum,proto3" json:"sum,omitempty"` + SumOfSquares float64 `protobuf:"fixed64,5,opt,name=sum_of_squares,json=sumOfSquares,proto3" json:"sum_of_squares,omitempty"` + Count float64 `protobuf:"fixed64,6,opt,name=count,proto3" json:"count,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *HistogramData) Reset() { *m = HistogramData{} } +func (m *HistogramData) String() string { return proto.CompactTextString(m) } +func (*HistogramData) ProtoMessage() {} +func (*HistogramData) Descriptor() ([]byte, []int) { + return fileDescriptor_stats_8ba831c0cb3c3440, []int{2} +} +func (m *HistogramData) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_HistogramData.Unmarshal(m, b) +} +func (m *HistogramData) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_HistogramData.Marshal(b, m, deterministic) +} +func (dst *HistogramData) XXX_Merge(src proto.Message) { + xxx_messageInfo_HistogramData.Merge(dst, src) +} +func (m *HistogramData) XXX_Size() int { + return xxx_messageInfo_HistogramData.Size(m) +} +func (m *HistogramData) XXX_DiscardUnknown() { + xxx_messageInfo_HistogramData.DiscardUnknown(m) +} + +var xxx_messageInfo_HistogramData proto.InternalMessageInfo + +func (m *HistogramData) GetBucket() []uint32 { + if m != nil { + return m.Bucket + } + return nil +} + +func (m *HistogramData) GetMinSeen() float64 { + if m != nil { + return m.MinSeen + } + return 0 +} + +func (m *HistogramData) GetMaxSeen() float64 { + if m != nil { + return m.MaxSeen + } + return 0 +} + +func (m *HistogramData) GetSum() float64 { + if m != nil { + return m.Sum + } + return 0 +} + +func (m *HistogramData) GetSumOfSquares() float64 { + if m != nil { + return m.SumOfSquares + } + return 0 +} + +func (m *HistogramData) GetCount() float64 { + if m != nil { + return m.Count + } + return 0 +} + +type ClientStats struct { + // Latency histogram. Data points are in nanoseconds. + Latencies *HistogramData `protobuf:"bytes,1,opt,name=latencies,proto3" json:"latencies,omitempty"` + // See ServerStats for details. + TimeElapsed float64 `protobuf:"fixed64,2,opt,name=time_elapsed,json=timeElapsed,proto3" json:"time_elapsed,omitempty"` + TimeUser float64 `protobuf:"fixed64,3,opt,name=time_user,json=timeUser,proto3" json:"time_user,omitempty"` + TimeSystem float64 `protobuf:"fixed64,4,opt,name=time_system,json=timeSystem,proto3" json:"time_system,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ClientStats) Reset() { *m = ClientStats{} } +func (m *ClientStats) String() string { return proto.CompactTextString(m) } +func (*ClientStats) ProtoMessage() {} +func (*ClientStats) Descriptor() ([]byte, []int) { + return fileDescriptor_stats_8ba831c0cb3c3440, []int{3} +} +func (m *ClientStats) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ClientStats.Unmarshal(m, b) +} +func (m *ClientStats) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ClientStats.Marshal(b, m, deterministic) +} +func (dst *ClientStats) XXX_Merge(src proto.Message) { + xxx_messageInfo_ClientStats.Merge(dst, src) +} +func (m *ClientStats) XXX_Size() int { + return xxx_messageInfo_ClientStats.Size(m) +} +func (m *ClientStats) XXX_DiscardUnknown() { + xxx_messageInfo_ClientStats.DiscardUnknown(m) +} + +var xxx_messageInfo_ClientStats proto.InternalMessageInfo + +func (m *ClientStats) GetLatencies() *HistogramData { + if m != nil { + return m.Latencies + } + return nil +} + +func (m *ClientStats) GetTimeElapsed() float64 { + if m != nil { + return m.TimeElapsed + } + return 0 +} + +func (m *ClientStats) GetTimeUser() float64 { + if m != nil { + return m.TimeUser + } + return 0 +} + +func (m *ClientStats) GetTimeSystem() float64 { + if m != nil { + return m.TimeSystem + } + return 0 +} + +func init() { + proto.RegisterType((*ServerStats)(nil), "grpc.testing.ServerStats") + proto.RegisterType((*HistogramParams)(nil), "grpc.testing.HistogramParams") + proto.RegisterType((*HistogramData)(nil), "grpc.testing.HistogramData") + proto.RegisterType((*ClientStats)(nil), "grpc.testing.ClientStats") +} + +func init() { proto.RegisterFile("stats.proto", fileDescriptor_stats_8ba831c0cb3c3440) } + +var fileDescriptor_stats_8ba831c0cb3c3440 = []byte{ + // 341 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x84, 0x92, 0xc1, 0x4a, 0xeb, 0x40, + 0x14, 0x86, 0x49, 0xd3, 0xf6, 0xb6, 0x27, 0xed, 0xbd, 0x97, 0x41, 0x24, 0x52, 0xd0, 0x1a, 0x5c, + 0x74, 0x95, 0x85, 0xae, 0x5c, 0xab, 0xe0, 0xce, 0xd2, 0xe8, 0x3a, 0x4c, 0xe3, 0x69, 0x19, 0xcc, + 0xcc, 0xc4, 0x39, 0x33, 0x12, 0x1f, 0x49, 0x7c, 0x49, 0xc9, 0x24, 0x68, 0x55, 0xd0, 0x5d, 0xe6, + 0xfb, 0x7e, 0xe6, 0xe4, 0xe4, 0x0f, 0x44, 0x64, 0xb9, 0xa5, 0xb4, 0x32, 0xda, 0x6a, 0x36, 0xd9, + 0x9a, 0xaa, 0x48, 0x2d, 0x92, 0x15, 0x6a, 0x9b, 0x28, 0x88, 0x32, 0x34, 0x4f, 0x68, 0xb2, 0x26, + 0xc2, 0x8e, 0x61, 0x62, 0x85, 0xc4, 0x1c, 0x4b, 0x5e, 0x11, 0xde, 0xc7, 0xc1, 0x3c, 0x58, 0x04, + 0xab, 0xa8, 0x61, 0x57, 0x2d, 0x62, 0x33, 0x18, 0xfb, 0x88, 0x23, 0x34, 0x71, 0xcf, 0xfb, 0x51, + 0x03, 0xee, 0x08, 0x0d, 0x3b, 0x02, 0x9f, 0xcd, 0xe9, 0x99, 0x2c, 0xca, 0x38, 0xf4, 0x1a, 0x1a, + 0x94, 0x79, 0x92, 0xdc, 0xc2, 0xbf, 0x6b, 0x41, 0x56, 0x6f, 0x0d, 0x97, 0x4b, 0x6e, 0xb8, 0x24, + 0x76, 0x08, 0x60, 0x90, 0x74, 0xe9, 0xac, 0xd0, 0xaa, 0x9b, 0xb8, 0x43, 0x9a, 0x77, 0x92, 0xbc, + 0xce, 0x2b, 0x4d, 0x24, 0xd6, 0x25, 0x76, 0x33, 0x23, 0xc9, 0xeb, 0x65, 0x87, 0x92, 0xd7, 0x00, + 0xa6, 0xef, 0xd7, 0x5e, 0x72, 0xcb, 0xd9, 0x3e, 0x0c, 0xd7, 0xae, 0x78, 0x40, 0x1b, 0x07, 0xf3, + 0x70, 0x31, 0x5d, 0x75, 0x27, 0x76, 0x00, 0x23, 0x29, 0x54, 0x4e, 0x88, 0xaa, 0xbb, 0xe8, 0x8f, + 0x14, 0x2a, 0x43, 0x54, 0x5e, 0xf1, 0xba, 0x55, 0x61, 0xa7, 0x78, 0xed, 0xd5, 0x7f, 0x08, 0xc9, + 0xc9, 0xb8, 0xef, 0x69, 0xf3, 0xc8, 0x4e, 0xe0, 0x2f, 0x39, 0x99, 0xeb, 0x4d, 0x4e, 0x8f, 0x8e, + 0x1b, 0xa4, 0x78, 0xe0, 0xe5, 0x84, 0x9c, 0xbc, 0xd9, 0x64, 0x2d, 0x63, 0x7b, 0x30, 0x28, 0xb4, + 0x53, 0x36, 0x1e, 0x7a, 0xd9, 0x1e, 0x92, 0x97, 0x00, 0xa2, 0x8b, 0x52, 0xa0, 0xb2, 0xed, 0x47, + 0x3f, 0x87, 0x71, 0xc9, 0x2d, 0xaa, 0x42, 0x20, 0xf9, 0xfd, 0xa3, 0xd3, 0x59, 0xba, 0xdb, 0x52, + 0xfa, 0x69, 0xb7, 0xd5, 0x47, 0xfa, 0x5b, 0x5f, 0xbd, 0x5f, 0xfa, 0x0a, 0x7f, 0xee, 0xab, 0xff, + 0xb5, 0xaf, 0xf5, 0xd0, 0xff, 0x34, 0x67, 0x6f, 0x01, 0x00, 0x00, 0xff, 0xff, 0xea, 0x75, 0x34, + 0x90, 0x43, 0x02, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/grpc/benchmark/latency/latency.go b/vendor/google.golang.org/grpc/benchmark/latency/latency.go new file mode 100644 index 0000000..d5cc44f --- /dev/null +++ b/vendor/google.golang.org/grpc/benchmark/latency/latency.go @@ -0,0 +1,315 @@ +/* + * + * Copyright 2017 gRPC authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ + +// Package latency provides wrappers for net.Conn, net.Listener, and +// net.Dialers, designed to interoperate to inject real-world latency into +// network connections. +package latency + +import ( + "bytes" + "context" + "encoding/binary" + "fmt" + "io" + "net" + "time" +) + +// Dialer is a function matching the signature of net.Dial. +type Dialer func(network, address string) (net.Conn, error) + +// TimeoutDialer is a function matching the signature of net.DialTimeout. +type TimeoutDialer func(network, address string, timeout time.Duration) (net.Conn, error) + +// ContextDialer is a function matching the signature of +// net.Dialer.DialContext. +type ContextDialer func(ctx context.Context, network, address string) (net.Conn, error) + +// Network represents a network with the given bandwidth, latency, and MTU +// (Maximum Transmission Unit) configuration, and can produce wrappers of +// net.Listeners, net.Conn, and various forms of dialing functions. The +// Listeners and Dialers/Conns on both sides of connections must come from this +// package, but need not be created from the same Network. Latency is computed +// when sending (in Write), and is injected when receiving (in Read). This +// allows senders' Write calls to be non-blocking, as in real-world +// applications. +// +// Note: Latency is injected by the sender specifying the absolute time data +// should be available, and the reader delaying until that time arrives to +// provide the data. This package attempts to counter-act the effects of clock +// drift and existing network latency by measuring the delay between the +// sender's transmission time and the receiver's reception time during startup. +// No attempt is made to measure the existing bandwidth of the connection. +type Network struct { + Kbps int // Kilobits per second; if non-positive, infinite + Latency time.Duration // One-way latency (sending); if non-positive, no delay + MTU int // Bytes per packet; if non-positive, infinite +} + +var ( + //Local simulates local network. + Local = Network{0, 0, 0} + //LAN simulates local area network network. + LAN = Network{100 * 1024, 2 * time.Millisecond, 1500} + //WAN simulates wide area network. + WAN = Network{20 * 1024, 30 * time.Millisecond, 1500} + //Longhaul simulates bad network. + Longhaul = Network{1000 * 1024, 200 * time.Millisecond, 9000} +) + +// Conn returns a net.Conn that wraps c and injects n's latency into that +// connection. This function also imposes latency for connection creation. +// If n's Latency is lower than the measured latency in c, an error is +// returned. +func (n *Network) Conn(c net.Conn) (net.Conn, error) { + start := now() + nc := &conn{Conn: c, network: n, readBuf: new(bytes.Buffer)} + if err := nc.sync(); err != nil { + return nil, err + } + sleep(start.Add(nc.delay).Sub(now())) + return nc, nil +} + +type conn struct { + net.Conn + network *Network + + readBuf *bytes.Buffer // one packet worth of data received + lastSendEnd time.Time // time the previous Write should be fully on the wire + delay time.Duration // desired latency - measured latency +} + +// header is sent before all data transmitted by the application. +type header struct { + ReadTime int64 // Time the reader is allowed to read this packet (UnixNano) + Sz int32 // Size of the data in the packet +} + +func (c *conn) Write(p []byte) (n int, err error) { + tNow := now() + if c.lastSendEnd.Before(tNow) { + c.lastSendEnd = tNow + } + for len(p) > 0 { + pkt := p + if c.network.MTU > 0 && len(pkt) > c.network.MTU { + pkt = pkt[:c.network.MTU] + p = p[c.network.MTU:] + } else { + p = nil + } + if c.network.Kbps > 0 { + if congestion := c.lastSendEnd.Sub(tNow) - c.delay; congestion > 0 { + // The network is full; sleep until this packet can be sent. + sleep(congestion) + tNow = tNow.Add(congestion) + } + } + c.lastSendEnd = c.lastSendEnd.Add(c.network.pktTime(len(pkt))) + hdr := header{ReadTime: c.lastSendEnd.Add(c.delay).UnixNano(), Sz: int32(len(pkt))} + if err := binary.Write(c.Conn, binary.BigEndian, hdr); err != nil { + return n, err + } + x, err := c.Conn.Write(pkt) + n += x + if err != nil { + return n, err + } + } + return n, nil +} + +func (c *conn) Read(p []byte) (n int, err error) { + if c.readBuf.Len() == 0 { + var hdr header + if err := binary.Read(c.Conn, binary.BigEndian, &hdr); err != nil { + return 0, err + } + defer func() { sleep(time.Unix(0, hdr.ReadTime).Sub(now())) }() + + if _, err := io.CopyN(c.readBuf, c.Conn, int64(hdr.Sz)); err != nil { + return 0, err + } + } + // Read from readBuf. + return c.readBuf.Read(p) +} + +// sync does a handshake and then measures the latency on the network in +// coordination with the other side. +func (c *conn) sync() error { + const ( + pingMsg = "syncPing" + warmup = 10 // minimum number of iterations to measure latency + giveUp = 50 // maximum number of iterations to measure latency + accuracy = time.Millisecond // req'd accuracy to stop early + goodRun = 3 // stop early if latency within accuracy this many times + ) + + type syncMsg struct { + SendT int64 // Time sent. If zero, stop. + RecvT int64 // Time received. If zero, fill in and respond. + } + + // A trivial handshake + if err := binary.Write(c.Conn, binary.BigEndian, []byte(pingMsg)); err != nil { + return err + } + var ping [8]byte + if err := binary.Read(c.Conn, binary.BigEndian, &ping); err != nil { + return err + } else if string(ping[:]) != pingMsg { + return fmt.Errorf("malformed handshake message: %v (want %q)", ping, pingMsg) + } + + // Both sides are alive and syncing. Calculate network delay / clock skew. + att := 0 + good := 0 + var latency time.Duration + localDone, remoteDone := false, false + send := true + for !localDone || !remoteDone { + if send { + if err := binary.Write(c.Conn, binary.BigEndian, syncMsg{SendT: now().UnixNano()}); err != nil { + return err + } + att++ + send = false + } + + // Block until we get a syncMsg + m := syncMsg{} + if err := binary.Read(c.Conn, binary.BigEndian, &m); err != nil { + return err + } + + if m.RecvT == 0 { + // Message initiated from other side. + if m.SendT == 0 { + remoteDone = true + continue + } + // Send response. + m.RecvT = now().UnixNano() + if err := binary.Write(c.Conn, binary.BigEndian, m); err != nil { + return err + } + continue + } + + lag := time.Duration(m.RecvT - m.SendT) + latency += lag + avgLatency := latency / time.Duration(att) + if e := lag - avgLatency; e > -accuracy && e < accuracy { + good++ + } else { + good = 0 + } + if att < giveUp && (att < warmup || good < goodRun) { + send = true + continue + } + localDone = true + latency = avgLatency + // Tell the other side we're done. + if err := binary.Write(c.Conn, binary.BigEndian, syncMsg{}); err != nil { + return err + } + } + if c.network.Latency <= 0 { + return nil + } + c.delay = c.network.Latency - latency + if c.delay < 0 { + return fmt.Errorf("measured network latency (%v) higher than desired latency (%v)", latency, c.network.Latency) + } + return nil +} + +// Listener returns a net.Listener that wraps l and injects n's latency in its +// connections. +func (n *Network) Listener(l net.Listener) net.Listener { + return &listener{Listener: l, network: n} +} + +type listener struct { + net.Listener + network *Network +} + +func (l *listener) Accept() (net.Conn, error) { + c, err := l.Listener.Accept() + if err != nil { + return nil, err + } + return l.network.Conn(c) +} + +// Dialer returns a Dialer that wraps d and injects n's latency in its +// connections. n's Latency is also injected to the connection's creation. +func (n *Network) Dialer(d Dialer) Dialer { + return func(network, address string) (net.Conn, error) { + conn, err := d(network, address) + if err != nil { + return nil, err + } + return n.Conn(conn) + } +} + +// TimeoutDialer returns a TimeoutDialer that wraps d and injects n's latency +// in its connections. n's Latency is also injected to the connection's +// creation. +func (n *Network) TimeoutDialer(d TimeoutDialer) TimeoutDialer { + return func(network, address string, timeout time.Duration) (net.Conn, error) { + conn, err := d(network, address, timeout) + if err != nil { + return nil, err + } + return n.Conn(conn) + } +} + +// ContextDialer returns a ContextDialer that wraps d and injects n's latency +// in its connections. n's Latency is also injected to the connection's +// creation. +func (n *Network) ContextDialer(d ContextDialer) ContextDialer { + return func(ctx context.Context, network, address string) (net.Conn, error) { + conn, err := d(ctx, network, address) + if err != nil { + return nil, err + } + return n.Conn(conn) + } +} + +// pktTime returns the time it takes to transmit one packet of data of size b +// in bytes. +func (n *Network) pktTime(b int) time.Duration { + if n.Kbps <= 0 { + return time.Duration(0) + } + return time.Duration(b) * time.Second / time.Duration(n.Kbps*(1024/8)) +} + +// Wrappers for testing + +var now = time.Now +var sleep = time.Sleep diff --git a/vendor/google.golang.org/grpc/benchmark/server/main.go b/vendor/google.golang.org/grpc/benchmark/server/main.go new file mode 100644 index 0000000..5998736 --- /dev/null +++ b/vendor/google.golang.org/grpc/benchmark/server/main.go @@ -0,0 +1,90 @@ +/* + * + * Copyright 2017 gRPC authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ + +/* +Package main provides a server used for benchmarking. It launches a server +which is listening on port 50051. An example to start the server can be found +at: + go run benchmark/server/main.go -test_name=grpc_test + +After starting the server, the client can be run separately and used to test +qps and latency. +*/ +package main + +import ( + "flag" + "fmt" + "net" + _ "net/http/pprof" + "os" + "os/signal" + "runtime" + "runtime/pprof" + "time" + + "google.golang.org/grpc/benchmark" + "google.golang.org/grpc/grpclog" + "google.golang.org/grpc/internal/syscall" +) + +var ( + port = flag.String("port", "50051", "Localhost port to listen on.") + testName = flag.String("test_name", "", "Name of the test used for creating profiles.") +) + +func main() { + flag.Parse() + if *testName == "" { + grpclog.Fatalf("test name not set") + } + lis, err := net.Listen("tcp", ":"+*port) + if err != nil { + grpclog.Fatalf("Failed to listen: %v", err) + } + defer lis.Close() + + cf, err := os.Create("/tmp/" + *testName + ".cpu") + if err != nil { + grpclog.Fatalf("Failed to create file: %v", err) + } + defer cf.Close() + pprof.StartCPUProfile(cf) + cpuBeg := syscall.GetCPUTime() + // Launch server in a separate goroutine. + stop := benchmark.StartServer(benchmark.ServerInfo{Type: "protobuf", Listener: lis}) + // Wait on OS terminate signal. + ch := make(chan os.Signal, 1) + signal.Notify(ch, os.Interrupt) + <-ch + cpu := time.Duration(syscall.GetCPUTime() - cpuBeg) + stop() + pprof.StopCPUProfile() + mf, err := os.Create("/tmp/" + *testName + ".mem") + if err != nil { + grpclog.Fatalf("Failed to create file: %v", err) + } + defer mf.Close() + runtime.GC() // materialize all statistics + if err := pprof.WriteHeapProfile(mf); err != nil { + grpclog.Fatalf("Failed to write memory profile: %v", err) + } + fmt.Println("Server CPU utilization:", cpu) + fmt.Println("Server CPU profile:", cf.Name()) + fmt.Println("Server Mem Profile:", mf.Name()) +} diff --git a/vendor/google.golang.org/grpc/benchmark/stats/histogram.go b/vendor/google.golang.org/grpc/benchmark/stats/histogram.go new file mode 100644 index 0000000..f038d26 --- /dev/null +++ b/vendor/google.golang.org/grpc/benchmark/stats/histogram.go @@ -0,0 +1,222 @@ +/* + * + * Copyright 2017 gRPC authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ + +package stats + +import ( + "bytes" + "fmt" + "io" + "log" + "math" + "strconv" + "strings" +) + +// Histogram accumulates values in the form of a histogram with +// exponentially increased bucket sizes. +type Histogram struct { + // Count is the total number of values added to the histogram. + Count int64 + // Sum is the sum of all the values added to the histogram. + Sum int64 + // SumOfSquares is the sum of squares of all values. + SumOfSquares int64 + // Min is the minimum of all the values added to the histogram. + Min int64 + // Max is the maximum of all the values added to the histogram. + Max int64 + // Buckets contains all the buckets of the histogram. + Buckets []HistogramBucket + + opts HistogramOptions + logBaseBucketSize float64 + oneOverLogOnePlusGrowthFactor float64 +} + +// HistogramOptions contains the parameters that define the histogram's buckets. +// The first bucket of the created histogram (with index 0) contains [min, min+n) +// where n = BaseBucketSize, min = MinValue. +// Bucket i (i>=1) contains [min + n * m^(i-1), min + n * m^i), where m = 1+GrowthFactor. +// The type of the values is int64. +type HistogramOptions struct { + // NumBuckets is the number of buckets. + NumBuckets int + // GrowthFactor is the growth factor of the buckets. A value of 0.1 + // indicates that bucket N+1 will be 10% larger than bucket N. + GrowthFactor float64 + // BaseBucketSize is the size of the first bucket. + BaseBucketSize float64 + // MinValue is the lower bound of the first bucket. + MinValue int64 +} + +// HistogramBucket represents one histogram bucket. +type HistogramBucket struct { + // LowBound is the lower bound of the bucket. + LowBound float64 + // Count is the number of values in the bucket. + Count int64 +} + +// NewHistogram returns a pointer to a new Histogram object that was created +// with the provided options. +func NewHistogram(opts HistogramOptions) *Histogram { + if opts.NumBuckets == 0 { + opts.NumBuckets = 32 + } + if opts.BaseBucketSize == 0.0 { + opts.BaseBucketSize = 1.0 + } + h := Histogram{ + Buckets: make([]HistogramBucket, opts.NumBuckets), + Min: math.MaxInt64, + Max: math.MinInt64, + + opts: opts, + logBaseBucketSize: math.Log(opts.BaseBucketSize), + oneOverLogOnePlusGrowthFactor: 1 / math.Log(1+opts.GrowthFactor), + } + m := 1.0 + opts.GrowthFactor + delta := opts.BaseBucketSize + h.Buckets[0].LowBound = float64(opts.MinValue) + for i := 1; i < opts.NumBuckets; i++ { + h.Buckets[i].LowBound = float64(opts.MinValue) + delta + delta = delta * m + } + return &h +} + +// Print writes textual output of the histogram values. +func (h *Histogram) Print(w io.Writer) { + h.PrintWithUnit(w, 1) +} + +// PrintWithUnit writes textual output of the histogram values . +// Data in histogram is divided by a Unit before print. +func (h *Histogram) PrintWithUnit(w io.Writer, unit float64) { + avg := float64(h.Sum) / float64(h.Count) + fmt.Fprintf(w, "Count: %d Min: %5.1f Max: %5.1f Avg: %.2f\n", h.Count, float64(h.Min)/unit, float64(h.Max)/unit, avg/unit) + fmt.Fprintf(w, "%s\n", strings.Repeat("-", 60)) + if h.Count <= 0 { + return + } + + maxBucketDigitLen := len(strconv.FormatFloat(h.Buckets[len(h.Buckets)-1].LowBound, 'f', 6, 64)) + if maxBucketDigitLen < 3 { + // For "inf". + maxBucketDigitLen = 3 + } + maxCountDigitLen := len(strconv.FormatInt(h.Count, 10)) + percentMulti := 100 / float64(h.Count) + + accCount := int64(0) + for i, b := range h.Buckets { + fmt.Fprintf(w, "[%*f, ", maxBucketDigitLen, b.LowBound/unit) + if i+1 < len(h.Buckets) { + fmt.Fprintf(w, "%*f)", maxBucketDigitLen, h.Buckets[i+1].LowBound/unit) + } else { + fmt.Fprintf(w, "%*s)", maxBucketDigitLen, "inf") + } + + accCount += b.Count + fmt.Fprintf(w, " %*d %5.1f%% %5.1f%%", maxCountDigitLen, b.Count, float64(b.Count)*percentMulti, float64(accCount)*percentMulti) + + const barScale = 0.1 + barLength := int(float64(b.Count)*percentMulti*barScale + 0.5) + fmt.Fprintf(w, " %s\n", strings.Repeat("#", barLength)) + } +} + +// String returns the textual output of the histogram values as string. +func (h *Histogram) String() string { + var b bytes.Buffer + h.Print(&b) + return b.String() +} + +// Clear resets all the content of histogram. +func (h *Histogram) Clear() { + h.Count = 0 + h.Sum = 0 + h.SumOfSquares = 0 + h.Min = math.MaxInt64 + h.Max = math.MinInt64 + for i := range h.Buckets { + h.Buckets[i].Count = 0 + } +} + +// Opts returns a copy of the options used to create the Histogram. +func (h *Histogram) Opts() HistogramOptions { + return h.opts +} + +// Add adds a value to the histogram. +func (h *Histogram) Add(value int64) error { + bucket, err := h.findBucket(value) + if err != nil { + return err + } + h.Buckets[bucket].Count++ + h.Count++ + h.Sum += value + h.SumOfSquares += value * value + if value < h.Min { + h.Min = value + } + if value > h.Max { + h.Max = value + } + return nil +} + +func (h *Histogram) findBucket(value int64) (int, error) { + delta := float64(value - h.opts.MinValue) + var b int + if delta >= h.opts.BaseBucketSize { + // b = log_{1+growthFactor} (delta / baseBucketSize) + 1 + // = log(delta / baseBucketSize) / log(1+growthFactor) + 1 + // = (log(delta) - log(baseBucketSize)) * (1 / log(1+growthFactor)) + 1 + b = int((math.Log(delta)-h.logBaseBucketSize)*h.oneOverLogOnePlusGrowthFactor + 1) + } + if b >= len(h.Buckets) { + return 0, fmt.Errorf("no bucket for value: %d", value) + } + return b, nil +} + +// Merge takes another histogram h2, and merges its content into h. +// The two histograms must be created by equivalent HistogramOptions. +func (h *Histogram) Merge(h2 *Histogram) { + if h.opts != h2.opts { + log.Fatalf("failed to merge histograms, created by inequivalent options") + } + h.Count += h2.Count + h.Sum += h2.Sum + h.SumOfSquares += h2.SumOfSquares + if h2.Min < h.Min { + h.Min = h2.Min + } + if h2.Max > h.Max { + h.Max = h2.Max + } + for i, b := range h2.Buckets { + h.Buckets[i].Count += b.Count + } +} diff --git a/vendor/google.golang.org/grpc/benchmark/stats/stats.go b/vendor/google.golang.org/grpc/benchmark/stats/stats.go new file mode 100644 index 0000000..20c96b8 --- /dev/null +++ b/vendor/google.golang.org/grpc/benchmark/stats/stats.go @@ -0,0 +1,303 @@ +/* + * + * Copyright 2017 gRPC authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ + +// Package stats registers stats used for creating benchmarks +package stats + +import ( + "bytes" + "fmt" + "io" + "math" + "sort" + "strconv" + "time" +) + +// Features contains most fields for a benchmark +type Features struct { + NetworkMode string + EnableTrace bool + Latency time.Duration + Kbps int + Mtu int + MaxConcurrentCalls int + ReqSizeBytes int + RespSizeBytes int + EnableCompressor bool + EnableChannelz bool +} + +// String returns the textual output of the Features as string. +func (f Features) String() string { + return fmt.Sprintf("traceMode_%t-latency_%s-kbps_%#v-MTU_%#v-maxConcurrentCalls_"+ + "%#v-reqSize_%#vB-respSize_%#vB-Compressor_%t", f.EnableTrace, + f.Latency.String(), f.Kbps, f.Mtu, f.MaxConcurrentCalls, f.ReqSizeBytes, f.RespSizeBytes, f.EnableCompressor) +} + +// ConciseString returns the concise textual output of the Features as string, skipping +// setting with default value. +func (f Features) ConciseString() string { + noneEmptyPos := []bool{f.EnableTrace, f.Latency != 0, f.Kbps != 0, f.Mtu != 0, true, true, true, f.EnableCompressor, f.EnableChannelz} + return PartialPrintString(noneEmptyPos, f, false) +} + +// PartialPrintString can print certain features with different format. +func PartialPrintString(noneEmptyPos []bool, f Features, shared bool) string { + s := "" + var ( + prefix, suffix, linker string + isNetwork bool + ) + if shared { + suffix = "\n" + linker = ": " + } else { + prefix = "-" + linker = "_" + } + if noneEmptyPos[0] { + s += fmt.Sprintf("%sTrace%s%t%s", prefix, linker, f.EnableTrace, suffix) + } + if shared && f.NetworkMode != "" { + s += fmt.Sprintf("Network: %s \n", f.NetworkMode) + isNetwork = true + } + if !isNetwork { + if noneEmptyPos[1] { + s += fmt.Sprintf("%slatency%s%s%s", prefix, linker, f.Latency.String(), suffix) + } + if noneEmptyPos[2] { + s += fmt.Sprintf("%skbps%s%#v%s", prefix, linker, f.Kbps, suffix) + } + if noneEmptyPos[3] { + s += fmt.Sprintf("%sMTU%s%#v%s", prefix, linker, f.Mtu, suffix) + } + } + if noneEmptyPos[4] { + s += fmt.Sprintf("%sCallers%s%#v%s", prefix, linker, f.MaxConcurrentCalls, suffix) + } + if noneEmptyPos[5] { + s += fmt.Sprintf("%sreqSize%s%#vB%s", prefix, linker, f.ReqSizeBytes, suffix) + } + if noneEmptyPos[6] { + s += fmt.Sprintf("%srespSize%s%#vB%s", prefix, linker, f.RespSizeBytes, suffix) + } + if noneEmptyPos[7] { + s += fmt.Sprintf("%sCompressor%s%t%s", prefix, linker, f.EnableCompressor, suffix) + } + if noneEmptyPos[8] { + s += fmt.Sprintf("%sChannelz%s%t%s", prefix, linker, f.EnableChannelz, suffix) + } + return s +} + +type percentLatency struct { + Percent int + Value time.Duration +} + +// BenchResults records features and result of a benchmark. +type BenchResults struct { + RunMode string + Features Features + Latency []percentLatency + Operations int + NsPerOp int64 + AllocedBytesPerOp int64 + AllocsPerOp int64 + SharedPosion []bool +} + +// SetBenchmarkResult sets features of benchmark and basic results. +func (stats *Stats) SetBenchmarkResult(mode string, features Features, o int, allocdBytes, allocs int64, sharedPos []bool) { + stats.result.RunMode = mode + stats.result.Features = features + stats.result.Operations = o + stats.result.AllocedBytesPerOp = allocdBytes + stats.result.AllocsPerOp = allocs + stats.result.SharedPosion = sharedPos +} + +// GetBenchmarkResults returns the result of the benchmark including features and result. +func (stats *Stats) GetBenchmarkResults() BenchResults { + return stats.result +} + +// BenchString output latency stats as the format as time + unit. +func (stats *Stats) BenchString() string { + stats.maybeUpdate() + s := stats.result + res := s.RunMode + "-" + s.Features.String() + ": \n" + if len(s.Latency) != 0 { + var statsUnit = s.Latency[0].Value + var timeUnit = fmt.Sprintf("%v", statsUnit)[1:] + for i := 1; i < len(s.Latency)-1; i++ { + res += fmt.Sprintf("%d_Latency: %s %s \t", s.Latency[i].Percent, + strconv.FormatFloat(float64(s.Latency[i].Value)/float64(statsUnit), 'f', 4, 64), timeUnit) + } + res += fmt.Sprintf("Avg latency: %s %s \t", + strconv.FormatFloat(float64(s.Latency[len(s.Latency)-1].Value)/float64(statsUnit), 'f', 4, 64), timeUnit) + } + res += fmt.Sprintf("Count: %v \t", s.Operations) + res += fmt.Sprintf("%v Bytes/op\t", s.AllocedBytesPerOp) + res += fmt.Sprintf("%v Allocs/op\t", s.AllocsPerOp) + + return res +} + +// Stats is a simple helper for gathering additional statistics like histogram +// during benchmarks. This is not thread safe. +type Stats struct { + numBuckets int + unit time.Duration + min, max int64 + histogram *Histogram + + durations durationSlice + dirty bool + + sortLatency bool + result BenchResults +} + +type durationSlice []time.Duration + +// NewStats creates a new Stats instance. If numBuckets is not positive, +// the default value (16) will be used. +func NewStats(numBuckets int) *Stats { + if numBuckets <= 0 { + numBuckets = 16 + } + return &Stats{ + // Use one more bucket for the last unbounded bucket. + numBuckets: numBuckets + 1, + durations: make(durationSlice, 0, 100000), + } +} + +// Add adds an elapsed time per operation to the stats. +func (stats *Stats) Add(d time.Duration) { + stats.durations = append(stats.durations, d) + stats.dirty = true +} + +// Clear resets the stats, removing all values. +func (stats *Stats) Clear() { + stats.durations = stats.durations[:0] + stats.histogram = nil + stats.dirty = false + stats.result = BenchResults{} +} + +//Sort method for durations +func (a durationSlice) Len() int { return len(a) } +func (a durationSlice) Swap(i, j int) { a[i], a[j] = a[j], a[i] } +func (a durationSlice) Less(i, j int) bool { return a[i] < a[j] } +func max(a, b int64) int64 { + if a > b { + return a + } + return b +} + +// maybeUpdate updates internal stat data if there was any newly added +// stats since this was updated. +func (stats *Stats) maybeUpdate() { + if !stats.dirty { + return + } + + if stats.sortLatency { + sort.Sort(stats.durations) + stats.min = int64(stats.durations[0]) + stats.max = int64(stats.durations[len(stats.durations)-1]) + } + + stats.min = math.MaxInt64 + stats.max = 0 + for _, d := range stats.durations { + if stats.min > int64(d) { + stats.min = int64(d) + } + if stats.max < int64(d) { + stats.max = int64(d) + } + } + + // Use the largest unit that can represent the minimum time duration. + stats.unit = time.Nanosecond + for _, u := range []time.Duration{time.Microsecond, time.Millisecond, time.Second} { + if stats.min <= int64(u) { + break + } + stats.unit = u + } + + numBuckets := stats.numBuckets + if n := int(stats.max - stats.min + 1); n < numBuckets { + numBuckets = n + } + stats.histogram = NewHistogram(HistogramOptions{ + NumBuckets: numBuckets, + // max-min(lower bound of last bucket) = (1 + growthFactor)^(numBuckets-2) * baseBucketSize. + GrowthFactor: math.Pow(float64(stats.max-stats.min), 1/float64(numBuckets-2)) - 1, + BaseBucketSize: 1.0, + MinValue: stats.min}) + + for _, d := range stats.durations { + stats.histogram.Add(int64(d)) + } + + stats.dirty = false + + if stats.durations.Len() != 0 { + var percentToObserve = []int{50, 90, 99} + // First data record min unit from the latency result. + stats.result.Latency = append(stats.result.Latency, percentLatency{Percent: -1, Value: stats.unit}) + for _, position := range percentToObserve { + stats.result.Latency = append(stats.result.Latency, percentLatency{Percent: position, Value: stats.durations[max(stats.histogram.Count*int64(position)/100-1, 0)]}) + } + // Last data record the average latency. + avg := float64(stats.histogram.Sum) / float64(stats.histogram.Count) + stats.result.Latency = append(stats.result.Latency, percentLatency{Percent: -1, Value: time.Duration(avg)}) + } +} + +// SortLatency blocks the output +func (stats *Stats) SortLatency() { + stats.sortLatency = true +} + +// Print writes textual output of the Stats. +func (stats *Stats) Print(w io.Writer) { + stats.maybeUpdate() + if stats.histogram == nil { + fmt.Fprint(w, "Histogram (empty)\n") + } else { + fmt.Fprintf(w, "Histogram (unit: %s)\n", fmt.Sprintf("%v", stats.unit)[1:]) + stats.histogram.PrintWithUnit(w, float64(stats.unit)) + } +} + +// String returns the textual output of the Stats as string. +func (stats *Stats) String() string { + var b bytes.Buffer + stats.Print(&b) + return b.String() +} diff --git a/vendor/google.golang.org/grpc/benchmark/stats/util.go b/vendor/google.golang.org/grpc/benchmark/stats/util.go new file mode 100644 index 0000000..f3bb3a3 --- /dev/null +++ b/vendor/google.golang.org/grpc/benchmark/stats/util.go @@ -0,0 +1,208 @@ +/* + * + * Copyright 2017 gRPC authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ + +package stats + +import ( + "bufio" + "bytes" + "fmt" + "os" + "runtime" + "sort" + "strings" + "sync" + "testing" +) + +var ( + curB *testing.B + curBenchName string + curStats map[string]*Stats + + orgStdout *os.File + nextOutPos int + + injectCond *sync.Cond + injectDone chan struct{} +) + +// AddStats adds a new unnamed Stats instance to the current benchmark. You need +// to run benchmarks by calling RunTestMain() to inject the stats to the +// benchmark results. If numBuckets is not positive, the default value (16) will +// be used. Please note that this calls b.ResetTimer() since it may be blocked +// until the previous benchmark stats is printed out. So AddStats() should +// typically be called at the very beginning of each benchmark function. +func AddStats(b *testing.B, numBuckets int) *Stats { + return AddStatsWithName(b, "", numBuckets) +} + +// AddStatsWithName adds a new named Stats instance to the current benchmark. +// With this, you can add multiple stats in a single benchmark. You need +// to run benchmarks by calling RunTestMain() to inject the stats to the +// benchmark results. If numBuckets is not positive, the default value (16) will +// be used. Please note that this calls b.ResetTimer() since it may be blocked +// until the previous benchmark stats is printed out. So AddStatsWithName() +// should typically be called at the very beginning of each benchmark function. +func AddStatsWithName(b *testing.B, name string, numBuckets int) *Stats { + var benchName string + for i := 1; ; i++ { + pc, _, _, ok := runtime.Caller(i) + if !ok { + panic("benchmark function not found") + } + p := strings.Split(runtime.FuncForPC(pc).Name(), ".") + benchName = p[len(p)-1] + if strings.HasPrefix(benchName, "run") { + break + } + } + procs := runtime.GOMAXPROCS(-1) + if procs != 1 { + benchName = fmt.Sprintf("%s-%d", benchName, procs) + } + + stats := NewStats(numBuckets) + + if injectCond != nil { + // We need to wait until the previous benchmark stats is printed out. + injectCond.L.Lock() + for curB != nil && curBenchName != benchName { + injectCond.Wait() + } + + curB = b + curBenchName = benchName + curStats[name] = stats + + injectCond.L.Unlock() + } + + b.ResetTimer() + return stats +} + +// RunTestMain runs the tests with enabling injection of benchmark stats. It +// returns an exit code to pass to os.Exit. +func RunTestMain(m *testing.M) int { + startStatsInjector() + defer stopStatsInjector() + return m.Run() +} + +// startStatsInjector starts stats injection to benchmark results. +func startStatsInjector() { + orgStdout = os.Stdout + r, w, _ := os.Pipe() + os.Stdout = w + nextOutPos = 0 + + resetCurBenchStats() + + injectCond = sync.NewCond(&sync.Mutex{}) + injectDone = make(chan struct{}) + go func() { + defer close(injectDone) + + scanner := bufio.NewScanner(r) + scanner.Split(splitLines) + for scanner.Scan() { + injectStatsIfFinished(scanner.Text()) + } + if err := scanner.Err(); err != nil { + panic(err) + } + }() +} + +// stopStatsInjector stops stats injection and restores os.Stdout. +func stopStatsInjector() { + os.Stdout.Close() + <-injectDone + injectCond = nil + os.Stdout = orgStdout +} + +// splitLines is a split function for a bufio.Scanner that returns each line +// of text, teeing texts to the original stdout even before each line ends. +func splitLines(data []byte, eof bool) (advance int, token []byte, err error) { + if eof && len(data) == 0 { + return 0, nil, nil + } + + if i := bytes.IndexByte(data, '\n'); i >= 0 { + orgStdout.Write(data[nextOutPos : i+1]) + nextOutPos = 0 + return i + 1, data[0:i], nil + } + + orgStdout.Write(data[nextOutPos:]) + nextOutPos = len(data) + + if eof { + // This is a final, non-terminated line. Return it. + return len(data), data, nil + } + + return 0, nil, nil +} + +// injectStatsIfFinished prints out the stats if the current benchmark finishes. +func injectStatsIfFinished(line string) { + injectCond.L.Lock() + defer injectCond.L.Unlock() + // We assume that the benchmark results start with "Benchmark". + if curB == nil || !strings.HasPrefix(line, "Benchmark") { + return + } + + if !curB.Failed() { + // Output all stats in alphabetical order. + names := make([]string, 0, len(curStats)) + for name := range curStats { + names = append(names, name) + } + sort.Strings(names) + for _, name := range names { + stats := curStats[name] + // The output of stats starts with a header like "Histogram (unit: ms)" + // followed by statistical properties and the buckets. Add the stats name + // if it is a named stats and indent them as Go testing outputs. + lines := strings.Split(stats.String(), "\n") + if n := len(lines); n > 0 { + if name != "" { + name = ": " + name + } + fmt.Fprintf(orgStdout, "--- %s%s\n", lines[0], name) + for _, line := range lines[1 : n-1] { + fmt.Fprintf(orgStdout, "\t%s\n", line) + } + } + } + } + + resetCurBenchStats() + injectCond.Signal() +} + +// resetCurBenchStats resets the current benchmark stats. +func resetCurBenchStats() { + curB = nil + curBenchName = "" + curStats = make(map[string]*Stats) +} diff --git a/vendor/google.golang.org/grpc/benchmark/worker/benchmark_client.go b/vendor/google.golang.org/grpc/benchmark/worker/benchmark_client.go new file mode 100644 index 0000000..abb5bc9 --- /dev/null +++ b/vendor/google.golang.org/grpc/benchmark/worker/benchmark_client.go @@ -0,0 +1,386 @@ +/* + * + * Copyright 2016 gRPC authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ + +package main + +import ( + "context" + "flag" + "math" + "runtime" + "sync" + "time" + + "google.golang.org/grpc" + "google.golang.org/grpc/benchmark" + testpb "google.golang.org/grpc/benchmark/grpc_testing" + "google.golang.org/grpc/benchmark/stats" + "google.golang.org/grpc/codes" + "google.golang.org/grpc/credentials" + "google.golang.org/grpc/grpclog" + "google.golang.org/grpc/internal/syscall" + "google.golang.org/grpc/status" + "google.golang.org/grpc/testdata" +) + +var caFile = flag.String("ca_file", "", "The file containing the CA root cert file") + +type lockingHistogram struct { + mu sync.Mutex + histogram *stats.Histogram +} + +func (h *lockingHistogram) add(value int64) { + h.mu.Lock() + defer h.mu.Unlock() + h.histogram.Add(value) +} + +// swap sets h.histogram to o and returns its old value. +func (h *lockingHistogram) swap(o *stats.Histogram) *stats.Histogram { + h.mu.Lock() + defer h.mu.Unlock() + old := h.histogram + h.histogram = o + return old +} + +func (h *lockingHistogram) mergeInto(merged *stats.Histogram) { + h.mu.Lock() + defer h.mu.Unlock() + merged.Merge(h.histogram) +} + +type benchmarkClient struct { + closeConns func() + stop chan bool + lastResetTime time.Time + histogramOptions stats.HistogramOptions + lockingHistograms []lockingHistogram + rusageLastReset *syscall.Rusage +} + +func printClientConfig(config *testpb.ClientConfig) { + // Some config options are ignored: + // - client type: + // will always create sync client + // - async client threads. + // - core list + grpclog.Infof(" * client type: %v (ignored, always creates sync client)", config.ClientType) + grpclog.Infof(" * async client threads: %v (ignored)", config.AsyncClientThreads) + // TODO: use cores specified by CoreList when setting list of cores is supported in go. + grpclog.Infof(" * core list: %v (ignored)", config.CoreList) + + grpclog.Infof(" - security params: %v", config.SecurityParams) + grpclog.Infof(" - core limit: %v", config.CoreLimit) + grpclog.Infof(" - payload config: %v", config.PayloadConfig) + grpclog.Infof(" - rpcs per chann: %v", config.OutstandingRpcsPerChannel) + grpclog.Infof(" - channel number: %v", config.ClientChannels) + grpclog.Infof(" - load params: %v", config.LoadParams) + grpclog.Infof(" - rpc type: %v", config.RpcType) + grpclog.Infof(" - histogram params: %v", config.HistogramParams) + grpclog.Infof(" - server targets: %v", config.ServerTargets) +} + +func setupClientEnv(config *testpb.ClientConfig) { + // Use all cpu cores available on machine by default. + // TODO: Revisit this for the optimal default setup. + if config.CoreLimit > 0 { + runtime.GOMAXPROCS(int(config.CoreLimit)) + } else { + runtime.GOMAXPROCS(runtime.NumCPU()) + } +} + +// createConns creates connections according to given config. +// It returns the connections and corresponding function to close them. +// It returns non-nil error if there is anything wrong. +func createConns(config *testpb.ClientConfig) ([]*grpc.ClientConn, func(), error) { + var opts []grpc.DialOption + + // Sanity check for client type. + switch config.ClientType { + case testpb.ClientType_SYNC_CLIENT: + case testpb.ClientType_ASYNC_CLIENT: + default: + return nil, nil, status.Errorf(codes.InvalidArgument, "unknown client type: %v", config.ClientType) + } + + // Check and set security options. + if config.SecurityParams != nil { + if *caFile == "" { + *caFile = testdata.Path("ca.pem") + } + creds, err := credentials.NewClientTLSFromFile(*caFile, config.SecurityParams.ServerHostOverride) + if err != nil { + return nil, nil, status.Errorf(codes.InvalidArgument, "failed to create TLS credentials %v", err) + } + opts = append(opts, grpc.WithTransportCredentials(creds)) + } else { + opts = append(opts, grpc.WithInsecure()) + } + + // Use byteBufCodec if it is required. + if config.PayloadConfig != nil { + switch config.PayloadConfig.Payload.(type) { + case *testpb.PayloadConfig_BytebufParams: + opts = append(opts, grpc.WithDefaultCallOptions(grpc.CallCustomCodec(byteBufCodec{}))) + case *testpb.PayloadConfig_SimpleParams: + default: + return nil, nil, status.Errorf(codes.InvalidArgument, "unknown payload config: %v", config.PayloadConfig) + } + } + + // Create connections. + connCount := int(config.ClientChannels) + conns := make([]*grpc.ClientConn, connCount) + for connIndex := 0; connIndex < connCount; connIndex++ { + conns[connIndex] = benchmark.NewClientConn(config.ServerTargets[connIndex%len(config.ServerTargets)], opts...) + } + + return conns, func() { + for _, conn := range conns { + conn.Close() + } + }, nil +} + +func performRPCs(config *testpb.ClientConfig, conns []*grpc.ClientConn, bc *benchmarkClient) error { + // Read payload size and type from config. + var ( + payloadReqSize, payloadRespSize int + payloadType string + ) + if config.PayloadConfig != nil { + switch c := config.PayloadConfig.Payload.(type) { + case *testpb.PayloadConfig_BytebufParams: + payloadReqSize = int(c.BytebufParams.ReqSize) + payloadRespSize = int(c.BytebufParams.RespSize) + payloadType = "bytebuf" + case *testpb.PayloadConfig_SimpleParams: + payloadReqSize = int(c.SimpleParams.ReqSize) + payloadRespSize = int(c.SimpleParams.RespSize) + payloadType = "protobuf" + default: + return status.Errorf(codes.InvalidArgument, "unknown payload config: %v", config.PayloadConfig) + } + } + + // TODO add open loop distribution. + switch config.LoadParams.Load.(type) { + case *testpb.LoadParams_ClosedLoop: + case *testpb.LoadParams_Poisson: + return status.Errorf(codes.Unimplemented, "unsupported load params: %v", config.LoadParams) + default: + return status.Errorf(codes.InvalidArgument, "unknown load params: %v", config.LoadParams) + } + + rpcCountPerConn := int(config.OutstandingRpcsPerChannel) + + switch config.RpcType { + case testpb.RpcType_UNARY: + bc.doCloseLoopUnary(conns, rpcCountPerConn, payloadReqSize, payloadRespSize) + // TODO open loop. + case testpb.RpcType_STREAMING: + bc.doCloseLoopStreaming(conns, rpcCountPerConn, payloadReqSize, payloadRespSize, payloadType) + // TODO open loop. + default: + return status.Errorf(codes.InvalidArgument, "unknown rpc type: %v", config.RpcType) + } + + return nil +} + +func startBenchmarkClient(config *testpb.ClientConfig) (*benchmarkClient, error) { + printClientConfig(config) + + // Set running environment like how many cores to use. + setupClientEnv(config) + + conns, closeConns, err := createConns(config) + if err != nil { + return nil, err + } + + rpcCountPerConn := int(config.OutstandingRpcsPerChannel) + bc := &benchmarkClient{ + histogramOptions: stats.HistogramOptions{ + NumBuckets: int(math.Log(config.HistogramParams.MaxPossible)/math.Log(1+config.HistogramParams.Resolution)) + 1, + GrowthFactor: config.HistogramParams.Resolution, + BaseBucketSize: (1 + config.HistogramParams.Resolution), + MinValue: 0, + }, + lockingHistograms: make([]lockingHistogram, rpcCountPerConn*len(conns)), + + stop: make(chan bool), + lastResetTime: time.Now(), + closeConns: closeConns, + rusageLastReset: syscall.GetRusage(), + } + + if err = performRPCs(config, conns, bc); err != nil { + // Close all connections if performRPCs failed. + closeConns() + return nil, err + } + + return bc, nil +} + +func (bc *benchmarkClient) doCloseLoopUnary(conns []*grpc.ClientConn, rpcCountPerConn int, reqSize int, respSize int) { + for ic, conn := range conns { + client := testpb.NewBenchmarkServiceClient(conn) + // For each connection, create rpcCountPerConn goroutines to do rpc. + for j := 0; j < rpcCountPerConn; j++ { + // Create histogram for each goroutine. + idx := ic*rpcCountPerConn + j + bc.lockingHistograms[idx].histogram = stats.NewHistogram(bc.histogramOptions) + // Start goroutine on the created mutex and histogram. + go func(idx int) { + // TODO: do warm up if necessary. + // Now relying on worker client to reserve time to do warm up. + // The worker client needs to wait for some time after client is created, + // before starting benchmark. + done := make(chan bool) + for { + go func() { + start := time.Now() + if err := benchmark.DoUnaryCall(client, reqSize, respSize); err != nil { + select { + case <-bc.stop: + case done <- false: + } + return + } + elapse := time.Since(start) + bc.lockingHistograms[idx].add(int64(elapse)) + select { + case <-bc.stop: + case done <- true: + } + }() + select { + case <-bc.stop: + return + case <-done: + } + } + }(idx) + } + } +} + +func (bc *benchmarkClient) doCloseLoopStreaming(conns []*grpc.ClientConn, rpcCountPerConn int, reqSize int, respSize int, payloadType string) { + var doRPC func(testpb.BenchmarkService_StreamingCallClient, int, int) error + if payloadType == "bytebuf" { + doRPC = benchmark.DoByteBufStreamingRoundTrip + } else { + doRPC = benchmark.DoStreamingRoundTrip + } + for ic, conn := range conns { + // For each connection, create rpcCountPerConn goroutines to do rpc. + for j := 0; j < rpcCountPerConn; j++ { + c := testpb.NewBenchmarkServiceClient(conn) + stream, err := c.StreamingCall(context.Background()) + if err != nil { + grpclog.Fatalf("%v.StreamingCall(_) = _, %v", c, err) + } + // Create histogram for each goroutine. + idx := ic*rpcCountPerConn + j + bc.lockingHistograms[idx].histogram = stats.NewHistogram(bc.histogramOptions) + // Start goroutine on the created mutex and histogram. + go func(idx int) { + // TODO: do warm up if necessary. + // Now relying on worker client to reserve time to do warm up. + // The worker client needs to wait for some time after client is created, + // before starting benchmark. + for { + start := time.Now() + if err := doRPC(stream, reqSize, respSize); err != nil { + return + } + elapse := time.Since(start) + bc.lockingHistograms[idx].add(int64(elapse)) + select { + case <-bc.stop: + return + default: + } + } + }(idx) + } + } +} + +// getStats returns the stats for benchmark client. +// It resets lastResetTime and all histograms if argument reset is true. +func (bc *benchmarkClient) getStats(reset bool) *testpb.ClientStats { + var wallTimeElapsed, uTimeElapsed, sTimeElapsed float64 + mergedHistogram := stats.NewHistogram(bc.histogramOptions) + + if reset { + // Merging histogram may take some time. + // Put all histograms aside and merge later. + toMerge := make([]*stats.Histogram, len(bc.lockingHistograms)) + for i := range bc.lockingHistograms { + toMerge[i] = bc.lockingHistograms[i].swap(stats.NewHistogram(bc.histogramOptions)) + } + + for i := 0; i < len(toMerge); i++ { + mergedHistogram.Merge(toMerge[i]) + } + + wallTimeElapsed = time.Since(bc.lastResetTime).Seconds() + latestRusage := syscall.GetRusage() + uTimeElapsed, sTimeElapsed = syscall.CPUTimeDiff(bc.rusageLastReset, latestRusage) + + bc.rusageLastReset = latestRusage + bc.lastResetTime = time.Now() + } else { + // Merge only, not reset. + for i := range bc.lockingHistograms { + bc.lockingHistograms[i].mergeInto(mergedHistogram) + } + + wallTimeElapsed = time.Since(bc.lastResetTime).Seconds() + uTimeElapsed, sTimeElapsed = syscall.CPUTimeDiff(bc.rusageLastReset, syscall.GetRusage()) + } + + b := make([]uint32, len(mergedHistogram.Buckets)) + for i, v := range mergedHistogram.Buckets { + b[i] = uint32(v.Count) + } + return &testpb.ClientStats{ + Latencies: &testpb.HistogramData{ + Bucket: b, + MinSeen: float64(mergedHistogram.Min), + MaxSeen: float64(mergedHistogram.Max), + Sum: float64(mergedHistogram.Sum), + SumOfSquares: float64(mergedHistogram.SumOfSquares), + Count: float64(mergedHistogram.Count), + }, + TimeElapsed: wallTimeElapsed, + TimeUser: uTimeElapsed, + TimeSystem: sTimeElapsed, + } +} + +func (bc *benchmarkClient) shutdown() { + close(bc.stop) + bc.closeConns() +} diff --git a/vendor/google.golang.org/grpc/benchmark/worker/benchmark_server.go b/vendor/google.golang.org/grpc/benchmark/worker/benchmark_server.go new file mode 100644 index 0000000..56f7e6e --- /dev/null +++ b/vendor/google.golang.org/grpc/benchmark/worker/benchmark_server.go @@ -0,0 +1,184 @@ +/* + * + * Copyright 2016 gRPC authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ + +package main + +import ( + "flag" + "fmt" + "net" + "runtime" + "strconv" + "strings" + "sync" + "time" + + "google.golang.org/grpc" + "google.golang.org/grpc/benchmark" + testpb "google.golang.org/grpc/benchmark/grpc_testing" + "google.golang.org/grpc/codes" + "google.golang.org/grpc/credentials" + "google.golang.org/grpc/grpclog" + "google.golang.org/grpc/internal/syscall" + "google.golang.org/grpc/status" + "google.golang.org/grpc/testdata" +) + +var ( + certFile = flag.String("tls_cert_file", "", "The TLS cert file") + keyFile = flag.String("tls_key_file", "", "The TLS key file") +) + +type benchmarkServer struct { + port int + cores int + closeFunc func() + mu sync.RWMutex + lastResetTime time.Time + rusageLastReset *syscall.Rusage +} + +func printServerConfig(config *testpb.ServerConfig) { + // Some config options are ignored: + // - server type: + // will always start sync server + // - async server threads + // - core list + grpclog.Infof(" * server type: %v (ignored, always starts sync server)", config.ServerType) + grpclog.Infof(" * async server threads: %v (ignored)", config.AsyncServerThreads) + // TODO: use cores specified by CoreList when setting list of cores is supported in go. + grpclog.Infof(" * core list: %v (ignored)", config.CoreList) + + grpclog.Infof(" - security params: %v", config.SecurityParams) + grpclog.Infof(" - core limit: %v", config.CoreLimit) + grpclog.Infof(" - port: %v", config.Port) + grpclog.Infof(" - payload config: %v", config.PayloadConfig) +} + +func startBenchmarkServer(config *testpb.ServerConfig, serverPort int) (*benchmarkServer, error) { + printServerConfig(config) + + // Use all cpu cores available on machine by default. + // TODO: Revisit this for the optimal default setup. + numOfCores := runtime.NumCPU() + if config.CoreLimit > 0 { + numOfCores = int(config.CoreLimit) + } + runtime.GOMAXPROCS(numOfCores) + + var opts []grpc.ServerOption + + // Sanity check for server type. + switch config.ServerType { + case testpb.ServerType_SYNC_SERVER: + case testpb.ServerType_ASYNC_SERVER: + case testpb.ServerType_ASYNC_GENERIC_SERVER: + default: + return nil, status.Errorf(codes.InvalidArgument, "unknown server type: %v", config.ServerType) + } + + // Set security options. + if config.SecurityParams != nil { + if *certFile == "" { + *certFile = testdata.Path("server1.pem") + } + if *keyFile == "" { + *keyFile = testdata.Path("server1.key") + } + creds, err := credentials.NewServerTLSFromFile(*certFile, *keyFile) + if err != nil { + grpclog.Fatalf("failed to generate credentials %v", err) + } + opts = append(opts, grpc.Creds(creds)) + } + + // Priority: config.Port > serverPort > default (0). + port := int(config.Port) + if port == 0 { + port = serverPort + } + lis, err := net.Listen("tcp", fmt.Sprintf(":%d", port)) + if err != nil { + grpclog.Fatalf("Failed to listen: %v", err) + } + addr := lis.Addr().String() + + // Create different benchmark server according to config. + var closeFunc func() + if config.PayloadConfig != nil { + switch payload := config.PayloadConfig.Payload.(type) { + case *testpb.PayloadConfig_BytebufParams: + opts = append(opts, grpc.CustomCodec(byteBufCodec{})) + closeFunc = benchmark.StartServer(benchmark.ServerInfo{ + Type: "bytebuf", + Metadata: payload.BytebufParams.RespSize, + Listener: lis, + }, opts...) + case *testpb.PayloadConfig_SimpleParams: + closeFunc = benchmark.StartServer(benchmark.ServerInfo{ + Type: "protobuf", + Listener: lis, + }, opts...) + case *testpb.PayloadConfig_ComplexParams: + return nil, status.Errorf(codes.Unimplemented, "unsupported payload config: %v", config.PayloadConfig) + default: + return nil, status.Errorf(codes.InvalidArgument, "unknown payload config: %v", config.PayloadConfig) + } + } else { + // Start protobuf server if payload config is nil. + closeFunc = benchmark.StartServer(benchmark.ServerInfo{ + Type: "protobuf", + Listener: lis, + }, opts...) + } + + grpclog.Infof("benchmark server listening at %v", addr) + addrSplitted := strings.Split(addr, ":") + p, err := strconv.Atoi(addrSplitted[len(addrSplitted)-1]) + if err != nil { + grpclog.Fatalf("failed to get port number from server address: %v", err) + } + + return &benchmarkServer{ + port: p, + cores: numOfCores, + closeFunc: closeFunc, + lastResetTime: time.Now(), + rusageLastReset: syscall.GetRusage(), + }, nil +} + +// getStats returns the stats for benchmark server. +// It resets lastResetTime if argument reset is true. +func (bs *benchmarkServer) getStats(reset bool) *testpb.ServerStats { + bs.mu.RLock() + defer bs.mu.RUnlock() + wallTimeElapsed := time.Since(bs.lastResetTime).Seconds() + rusageLatest := syscall.GetRusage() + uTimeElapsed, sTimeElapsed := syscall.CPUTimeDiff(bs.rusageLastReset, rusageLatest) + + if reset { + bs.lastResetTime = time.Now() + bs.rusageLastReset = rusageLatest + } + return &testpb.ServerStats{ + TimeElapsed: wallTimeElapsed, + TimeUser: uTimeElapsed, + TimeSystem: sTimeElapsed, + } +} diff --git a/vendor/google.golang.org/grpc/benchmark/worker/main.go b/vendor/google.golang.org/grpc/benchmark/worker/main.go new file mode 100644 index 0000000..5933bd3 --- /dev/null +++ b/vendor/google.golang.org/grpc/benchmark/worker/main.go @@ -0,0 +1,230 @@ +/* + * + * Copyright 2016 gRPC authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ + +package main + +import ( + "context" + "flag" + "fmt" + "io" + "net" + "net/http" + _ "net/http/pprof" + "runtime" + "strconv" + "time" + + "google.golang.org/grpc" + testpb "google.golang.org/grpc/benchmark/grpc_testing" + "google.golang.org/grpc/codes" + "google.golang.org/grpc/grpclog" + "google.golang.org/grpc/status" +) + +var ( + driverPort = flag.Int("driver_port", 10000, "port for communication with driver") + serverPort = flag.Int("server_port", 0, "port for benchmark server if not specified by server config message") + pprofPort = flag.Int("pprof_port", -1, "Port for pprof debug server to listen on. Pprof server doesn't start if unset") + blockProfRate = flag.Int("block_prof_rate", 0, "fraction of goroutine blocking events to report in blocking profile") +) + +type byteBufCodec struct { +} + +func (byteBufCodec) Marshal(v interface{}) ([]byte, error) { + b, ok := v.(*[]byte) + if !ok { + return nil, fmt.Errorf("failed to marshal: %v is not type of *[]byte", v) + } + return *b, nil +} + +func (byteBufCodec) Unmarshal(data []byte, v interface{}) error { + b, ok := v.(*[]byte) + if !ok { + return fmt.Errorf("failed to marshal: %v is not type of *[]byte", v) + } + *b = data + return nil +} + +func (byteBufCodec) String() string { + return "bytebuffer" +} + +// workerServer implements WorkerService rpc handlers. +// It can create benchmarkServer or benchmarkClient on demand. +type workerServer struct { + stop chan<- bool + serverPort int +} + +func (s *workerServer) RunServer(stream testpb.WorkerService_RunServerServer) error { + var bs *benchmarkServer + defer func() { + // Close benchmark server when stream ends. + grpclog.Infof("closing benchmark server") + if bs != nil { + bs.closeFunc() + } + }() + for { + in, err := stream.Recv() + if err == io.EOF { + return nil + } + if err != nil { + return err + } + + var out *testpb.ServerStatus + switch argtype := in.Argtype.(type) { + case *testpb.ServerArgs_Setup: + grpclog.Infof("server setup received:") + if bs != nil { + grpclog.Infof("server setup received when server already exists, closing the existing server") + bs.closeFunc() + } + bs, err = startBenchmarkServer(argtype.Setup, s.serverPort) + if err != nil { + return err + } + out = &testpb.ServerStatus{ + Stats: bs.getStats(false), + Port: int32(bs.port), + Cores: int32(bs.cores), + } + + case *testpb.ServerArgs_Mark: + grpclog.Infof("server mark received:") + grpclog.Infof(" - %v", argtype) + if bs == nil { + return status.Error(codes.InvalidArgument, "server does not exist when mark received") + } + out = &testpb.ServerStatus{ + Stats: bs.getStats(argtype.Mark.Reset_), + Port: int32(bs.port), + Cores: int32(bs.cores), + } + } + + if err := stream.Send(out); err != nil { + return err + } + } +} + +func (s *workerServer) RunClient(stream testpb.WorkerService_RunClientServer) error { + var bc *benchmarkClient + defer func() { + // Shut down benchmark client when stream ends. + grpclog.Infof("shuting down benchmark client") + if bc != nil { + bc.shutdown() + } + }() + for { + in, err := stream.Recv() + if err == io.EOF { + return nil + } + if err != nil { + return err + } + + var out *testpb.ClientStatus + switch t := in.Argtype.(type) { + case *testpb.ClientArgs_Setup: + grpclog.Infof("client setup received:") + if bc != nil { + grpclog.Infof("client setup received when client already exists, shuting down the existing client") + bc.shutdown() + } + bc, err = startBenchmarkClient(t.Setup) + if err != nil { + return err + } + out = &testpb.ClientStatus{ + Stats: bc.getStats(false), + } + + case *testpb.ClientArgs_Mark: + grpclog.Infof("client mark received:") + grpclog.Infof(" - %v", t) + if bc == nil { + return status.Error(codes.InvalidArgument, "client does not exist when mark received") + } + out = &testpb.ClientStatus{ + Stats: bc.getStats(t.Mark.Reset_), + } + } + + if err := stream.Send(out); err != nil { + return err + } + } +} + +func (s *workerServer) CoreCount(ctx context.Context, in *testpb.CoreRequest) (*testpb.CoreResponse, error) { + grpclog.Infof("core count: %v", runtime.NumCPU()) + return &testpb.CoreResponse{Cores: int32(runtime.NumCPU())}, nil +} + +func (s *workerServer) QuitWorker(ctx context.Context, in *testpb.Void) (*testpb.Void, error) { + grpclog.Infof("quitting worker") + s.stop <- true + return &testpb.Void{}, nil +} + +func main() { + grpc.EnableTracing = false + + flag.Parse() + lis, err := net.Listen("tcp", ":"+strconv.Itoa(*driverPort)) + if err != nil { + grpclog.Fatalf("failed to listen: %v", err) + } + grpclog.Infof("worker listening at port %v", *driverPort) + + s := grpc.NewServer() + stop := make(chan bool) + testpb.RegisterWorkerServiceServer(s, &workerServer{ + stop: stop, + serverPort: *serverPort, + }) + + go func() { + <-stop + // Wait for 1 second before stopping the server to make sure the return value of QuitWorker is sent to client. + // TODO revise this once server graceful stop is supported in gRPC. + time.Sleep(time.Second) + s.Stop() + }() + + runtime.SetBlockProfileRate(*blockProfRate) + + if *pprofPort >= 0 { + go func() { + grpclog.Infoln("Starting pprof server on port " + strconv.Itoa(*pprofPort)) + grpclog.Infoln(http.ListenAndServe("localhost:"+strconv.Itoa(*pprofPort), nil)) + }() + } + + s.Serve(lis) +} diff --git a/vendor/google.golang.org/grpc/binarylog/grpc_binarylog_v1/binarylog.pb.go b/vendor/google.golang.org/grpc/binarylog/grpc_binarylog_v1/binarylog.pb.go new file mode 100644 index 0000000..f393bb6 --- /dev/null +++ b/vendor/google.golang.org/grpc/binarylog/grpc_binarylog_v1/binarylog.pb.go @@ -0,0 +1,900 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: grpc/binarylog/grpc_binarylog_v1/binarylog.proto + +package grpc_binarylog_v1 // import "google.golang.org/grpc/binarylog/grpc_binarylog_v1" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import duration "github.com/golang/protobuf/ptypes/duration" +import timestamp "github.com/golang/protobuf/ptypes/timestamp" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Enumerates the type of event +// Note the terminology is different from the RPC semantics +// definition, but the same meaning is expressed here. +type GrpcLogEntry_EventType int32 + +const ( + GrpcLogEntry_EVENT_TYPE_UNKNOWN GrpcLogEntry_EventType = 0 + // Header sent from client to server + GrpcLogEntry_EVENT_TYPE_CLIENT_HEADER GrpcLogEntry_EventType = 1 + // Header sent from server to client + GrpcLogEntry_EVENT_TYPE_SERVER_HEADER GrpcLogEntry_EventType = 2 + // Message sent from client to server + GrpcLogEntry_EVENT_TYPE_CLIENT_MESSAGE GrpcLogEntry_EventType = 3 + // Message sent from server to client + GrpcLogEntry_EVENT_TYPE_SERVER_MESSAGE GrpcLogEntry_EventType = 4 + // A signal that client is done sending + GrpcLogEntry_EVENT_TYPE_CLIENT_HALF_CLOSE GrpcLogEntry_EventType = 5 + // Trailer indicates the end of the RPC. + // On client side, this event means a trailer was either received + // from the network or the gRPC library locally generated a status + // to inform the application about a failure. + // On server side, this event means the server application requested + // to send a trailer. Note: EVENT_TYPE_CANCEL may still arrive after + // this due to races on server side. + GrpcLogEntry_EVENT_TYPE_SERVER_TRAILER GrpcLogEntry_EventType = 6 + // A signal that the RPC is cancelled. On client side, this + // indicates the client application requests a cancellation. + // On server side, this indicates that cancellation was detected. + // Note: This marks the end of the RPC. Events may arrive after + // this due to races. For example, on client side a trailer + // may arrive even though the application requested to cancel the RPC. + GrpcLogEntry_EVENT_TYPE_CANCEL GrpcLogEntry_EventType = 7 +) + +var GrpcLogEntry_EventType_name = map[int32]string{ + 0: "EVENT_TYPE_UNKNOWN", + 1: "EVENT_TYPE_CLIENT_HEADER", + 2: "EVENT_TYPE_SERVER_HEADER", + 3: "EVENT_TYPE_CLIENT_MESSAGE", + 4: "EVENT_TYPE_SERVER_MESSAGE", + 5: "EVENT_TYPE_CLIENT_HALF_CLOSE", + 6: "EVENT_TYPE_SERVER_TRAILER", + 7: "EVENT_TYPE_CANCEL", +} +var GrpcLogEntry_EventType_value = map[string]int32{ + "EVENT_TYPE_UNKNOWN": 0, + "EVENT_TYPE_CLIENT_HEADER": 1, + "EVENT_TYPE_SERVER_HEADER": 2, + "EVENT_TYPE_CLIENT_MESSAGE": 3, + "EVENT_TYPE_SERVER_MESSAGE": 4, + "EVENT_TYPE_CLIENT_HALF_CLOSE": 5, + "EVENT_TYPE_SERVER_TRAILER": 6, + "EVENT_TYPE_CANCEL": 7, +} + +func (x GrpcLogEntry_EventType) String() string { + return proto.EnumName(GrpcLogEntry_EventType_name, int32(x)) +} +func (GrpcLogEntry_EventType) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_binarylog_264c8c9c551ce911, []int{0, 0} +} + +// Enumerates the entity that generates the log entry +type GrpcLogEntry_Logger int32 + +const ( + GrpcLogEntry_LOGGER_UNKNOWN GrpcLogEntry_Logger = 0 + GrpcLogEntry_LOGGER_CLIENT GrpcLogEntry_Logger = 1 + GrpcLogEntry_LOGGER_SERVER GrpcLogEntry_Logger = 2 +) + +var GrpcLogEntry_Logger_name = map[int32]string{ + 0: "LOGGER_UNKNOWN", + 1: "LOGGER_CLIENT", + 2: "LOGGER_SERVER", +} +var GrpcLogEntry_Logger_value = map[string]int32{ + "LOGGER_UNKNOWN": 0, + "LOGGER_CLIENT": 1, + "LOGGER_SERVER": 2, +} + +func (x GrpcLogEntry_Logger) String() string { + return proto.EnumName(GrpcLogEntry_Logger_name, int32(x)) +} +func (GrpcLogEntry_Logger) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_binarylog_264c8c9c551ce911, []int{0, 1} +} + +type Address_Type int32 + +const ( + Address_TYPE_UNKNOWN Address_Type = 0 + // address is in 1.2.3.4 form + Address_TYPE_IPV4 Address_Type = 1 + // address is in IPv6 canonical form (RFC5952 section 4) + // The scope is NOT included in the address string. + Address_TYPE_IPV6 Address_Type = 2 + // address is UDS string + Address_TYPE_UNIX Address_Type = 3 +) + +var Address_Type_name = map[int32]string{ + 0: "TYPE_UNKNOWN", + 1: "TYPE_IPV4", + 2: "TYPE_IPV6", + 3: "TYPE_UNIX", +} +var Address_Type_value = map[string]int32{ + "TYPE_UNKNOWN": 0, + "TYPE_IPV4": 1, + "TYPE_IPV6": 2, + "TYPE_UNIX": 3, +} + +func (x Address_Type) String() string { + return proto.EnumName(Address_Type_name, int32(x)) +} +func (Address_Type) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_binarylog_264c8c9c551ce911, []int{7, 0} +} + +// Log entry we store in binary logs +type GrpcLogEntry struct { + // The timestamp of the binary log message + Timestamp *timestamp.Timestamp `protobuf:"bytes,1,opt,name=timestamp,proto3" json:"timestamp,omitempty"` + // Uniquely identifies a call. The value must not be 0 in order to disambiguate + // from an unset value. + // Each call may have several log entries, they will all have the same call_id. + // Nothing is guaranteed about their value other than they are unique across + // different RPCs in the same gRPC process. + CallId uint64 `protobuf:"varint,2,opt,name=call_id,json=callId,proto3" json:"call_id,omitempty"` + // The entry sequence id for this call. The first GrpcLogEntry has a + // value of 1, to disambiguate from an unset value. The purpose of + // this field is to detect missing entries in environments where + // durability or ordering is not guaranteed. + SequenceIdWithinCall uint64 `protobuf:"varint,3,opt,name=sequence_id_within_call,json=sequenceIdWithinCall,proto3" json:"sequence_id_within_call,omitempty"` + Type GrpcLogEntry_EventType `protobuf:"varint,4,opt,name=type,proto3,enum=grpc.binarylog.v1.GrpcLogEntry_EventType" json:"type,omitempty"` + Logger GrpcLogEntry_Logger `protobuf:"varint,5,opt,name=logger,proto3,enum=grpc.binarylog.v1.GrpcLogEntry_Logger" json:"logger,omitempty"` + // The logger uses one of the following fields to record the payload, + // according to the type of the log entry. + // + // Types that are valid to be assigned to Payload: + // *GrpcLogEntry_ClientHeader + // *GrpcLogEntry_ServerHeader + // *GrpcLogEntry_Message + // *GrpcLogEntry_Trailer + Payload isGrpcLogEntry_Payload `protobuf_oneof:"payload"` + // true if payload does not represent the full message or metadata. + PayloadTruncated bool `protobuf:"varint,10,opt,name=payload_truncated,json=payloadTruncated,proto3" json:"payload_truncated,omitempty"` + // Peer address information, will only be recorded on the first + // incoming event. On client side, peer is logged on + // EVENT_TYPE_SERVER_HEADER normally or EVENT_TYPE_SERVER_TRAILER in + // the case of trailers-only. On server side, peer is always + // logged on EVENT_TYPE_CLIENT_HEADER. + Peer *Address `protobuf:"bytes,11,opt,name=peer,proto3" json:"peer,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GrpcLogEntry) Reset() { *m = GrpcLogEntry{} } +func (m *GrpcLogEntry) String() string { return proto.CompactTextString(m) } +func (*GrpcLogEntry) ProtoMessage() {} +func (*GrpcLogEntry) Descriptor() ([]byte, []int) { + return fileDescriptor_binarylog_264c8c9c551ce911, []int{0} +} +func (m *GrpcLogEntry) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GrpcLogEntry.Unmarshal(m, b) +} +func (m *GrpcLogEntry) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GrpcLogEntry.Marshal(b, m, deterministic) +} +func (dst *GrpcLogEntry) XXX_Merge(src proto.Message) { + xxx_messageInfo_GrpcLogEntry.Merge(dst, src) +} +func (m *GrpcLogEntry) XXX_Size() int { + return xxx_messageInfo_GrpcLogEntry.Size(m) +} +func (m *GrpcLogEntry) XXX_DiscardUnknown() { + xxx_messageInfo_GrpcLogEntry.DiscardUnknown(m) +} + +var xxx_messageInfo_GrpcLogEntry proto.InternalMessageInfo + +func (m *GrpcLogEntry) GetTimestamp() *timestamp.Timestamp { + if m != nil { + return m.Timestamp + } + return nil +} + +func (m *GrpcLogEntry) GetCallId() uint64 { + if m != nil { + return m.CallId + } + return 0 +} + +func (m *GrpcLogEntry) GetSequenceIdWithinCall() uint64 { + if m != nil { + return m.SequenceIdWithinCall + } + return 0 +} + +func (m *GrpcLogEntry) GetType() GrpcLogEntry_EventType { + if m != nil { + return m.Type + } + return GrpcLogEntry_EVENT_TYPE_UNKNOWN +} + +func (m *GrpcLogEntry) GetLogger() GrpcLogEntry_Logger { + if m != nil { + return m.Logger + } + return GrpcLogEntry_LOGGER_UNKNOWN +} + +type isGrpcLogEntry_Payload interface { + isGrpcLogEntry_Payload() +} + +type GrpcLogEntry_ClientHeader struct { + ClientHeader *ClientHeader `protobuf:"bytes,6,opt,name=client_header,json=clientHeader,proto3,oneof"` +} + +type GrpcLogEntry_ServerHeader struct { + ServerHeader *ServerHeader `protobuf:"bytes,7,opt,name=server_header,json=serverHeader,proto3,oneof"` +} + +type GrpcLogEntry_Message struct { + Message *Message `protobuf:"bytes,8,opt,name=message,proto3,oneof"` +} + +type GrpcLogEntry_Trailer struct { + Trailer *Trailer `protobuf:"bytes,9,opt,name=trailer,proto3,oneof"` +} + +func (*GrpcLogEntry_ClientHeader) isGrpcLogEntry_Payload() {} + +func (*GrpcLogEntry_ServerHeader) isGrpcLogEntry_Payload() {} + +func (*GrpcLogEntry_Message) isGrpcLogEntry_Payload() {} + +func (*GrpcLogEntry_Trailer) isGrpcLogEntry_Payload() {} + +func (m *GrpcLogEntry) GetPayload() isGrpcLogEntry_Payload { + if m != nil { + return m.Payload + } + return nil +} + +func (m *GrpcLogEntry) GetClientHeader() *ClientHeader { + if x, ok := m.GetPayload().(*GrpcLogEntry_ClientHeader); ok { + return x.ClientHeader + } + return nil +} + +func (m *GrpcLogEntry) GetServerHeader() *ServerHeader { + if x, ok := m.GetPayload().(*GrpcLogEntry_ServerHeader); ok { + return x.ServerHeader + } + return nil +} + +func (m *GrpcLogEntry) GetMessage() *Message { + if x, ok := m.GetPayload().(*GrpcLogEntry_Message); ok { + return x.Message + } + return nil +} + +func (m *GrpcLogEntry) GetTrailer() *Trailer { + if x, ok := m.GetPayload().(*GrpcLogEntry_Trailer); ok { + return x.Trailer + } + return nil +} + +func (m *GrpcLogEntry) GetPayloadTruncated() bool { + if m != nil { + return m.PayloadTruncated + } + return false +} + +func (m *GrpcLogEntry) GetPeer() *Address { + if m != nil { + return m.Peer + } + return nil +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*GrpcLogEntry) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _GrpcLogEntry_OneofMarshaler, _GrpcLogEntry_OneofUnmarshaler, _GrpcLogEntry_OneofSizer, []interface{}{ + (*GrpcLogEntry_ClientHeader)(nil), + (*GrpcLogEntry_ServerHeader)(nil), + (*GrpcLogEntry_Message)(nil), + (*GrpcLogEntry_Trailer)(nil), + } +} + +func _GrpcLogEntry_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*GrpcLogEntry) + // payload + switch x := m.Payload.(type) { + case *GrpcLogEntry_ClientHeader: + b.EncodeVarint(6<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.ClientHeader); err != nil { + return err + } + case *GrpcLogEntry_ServerHeader: + b.EncodeVarint(7<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.ServerHeader); err != nil { + return err + } + case *GrpcLogEntry_Message: + b.EncodeVarint(8<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.Message); err != nil { + return err + } + case *GrpcLogEntry_Trailer: + b.EncodeVarint(9<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.Trailer); err != nil { + return err + } + case nil: + default: + return fmt.Errorf("GrpcLogEntry.Payload has unexpected type %T", x) + } + return nil +} + +func _GrpcLogEntry_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*GrpcLogEntry) + switch tag { + case 6: // payload.client_header + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(ClientHeader) + err := b.DecodeMessage(msg) + m.Payload = &GrpcLogEntry_ClientHeader{msg} + return true, err + case 7: // payload.server_header + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(ServerHeader) + err := b.DecodeMessage(msg) + m.Payload = &GrpcLogEntry_ServerHeader{msg} + return true, err + case 8: // payload.message + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(Message) + err := b.DecodeMessage(msg) + m.Payload = &GrpcLogEntry_Message{msg} + return true, err + case 9: // payload.trailer + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(Trailer) + err := b.DecodeMessage(msg) + m.Payload = &GrpcLogEntry_Trailer{msg} + return true, err + default: + return false, nil + } +} + +func _GrpcLogEntry_OneofSizer(msg proto.Message) (n int) { + m := msg.(*GrpcLogEntry) + // payload + switch x := m.Payload.(type) { + case *GrpcLogEntry_ClientHeader: + s := proto.Size(x.ClientHeader) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *GrpcLogEntry_ServerHeader: + s := proto.Size(x.ServerHeader) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *GrpcLogEntry_Message: + s := proto.Size(x.Message) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *GrpcLogEntry_Trailer: + s := proto.Size(x.Trailer) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +type ClientHeader struct { + // This contains only the metadata from the application. + Metadata *Metadata `protobuf:"bytes,1,opt,name=metadata,proto3" json:"metadata,omitempty"` + // The name of the RPC method, which looks something like: + // // + // Note the leading "/" character. + MethodName string `protobuf:"bytes,2,opt,name=method_name,json=methodName,proto3" json:"method_name,omitempty"` + // A single process may be used to run multiple virtual + // servers with different identities. + // The authority is the name of such a server identitiy. + // It is typically a portion of the URI in the form of + // or : . + Authority string `protobuf:"bytes,3,opt,name=authority,proto3" json:"authority,omitempty"` + // the RPC timeout + Timeout *duration.Duration `protobuf:"bytes,4,opt,name=timeout,proto3" json:"timeout,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ClientHeader) Reset() { *m = ClientHeader{} } +func (m *ClientHeader) String() string { return proto.CompactTextString(m) } +func (*ClientHeader) ProtoMessage() {} +func (*ClientHeader) Descriptor() ([]byte, []int) { + return fileDescriptor_binarylog_264c8c9c551ce911, []int{1} +} +func (m *ClientHeader) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ClientHeader.Unmarshal(m, b) +} +func (m *ClientHeader) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ClientHeader.Marshal(b, m, deterministic) +} +func (dst *ClientHeader) XXX_Merge(src proto.Message) { + xxx_messageInfo_ClientHeader.Merge(dst, src) +} +func (m *ClientHeader) XXX_Size() int { + return xxx_messageInfo_ClientHeader.Size(m) +} +func (m *ClientHeader) XXX_DiscardUnknown() { + xxx_messageInfo_ClientHeader.DiscardUnknown(m) +} + +var xxx_messageInfo_ClientHeader proto.InternalMessageInfo + +func (m *ClientHeader) GetMetadata() *Metadata { + if m != nil { + return m.Metadata + } + return nil +} + +func (m *ClientHeader) GetMethodName() string { + if m != nil { + return m.MethodName + } + return "" +} + +func (m *ClientHeader) GetAuthority() string { + if m != nil { + return m.Authority + } + return "" +} + +func (m *ClientHeader) GetTimeout() *duration.Duration { + if m != nil { + return m.Timeout + } + return nil +} + +type ServerHeader struct { + // This contains only the metadata from the application. + Metadata *Metadata `protobuf:"bytes,1,opt,name=metadata,proto3" json:"metadata,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ServerHeader) Reset() { *m = ServerHeader{} } +func (m *ServerHeader) String() string { return proto.CompactTextString(m) } +func (*ServerHeader) ProtoMessage() {} +func (*ServerHeader) Descriptor() ([]byte, []int) { + return fileDescriptor_binarylog_264c8c9c551ce911, []int{2} +} +func (m *ServerHeader) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ServerHeader.Unmarshal(m, b) +} +func (m *ServerHeader) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ServerHeader.Marshal(b, m, deterministic) +} +func (dst *ServerHeader) XXX_Merge(src proto.Message) { + xxx_messageInfo_ServerHeader.Merge(dst, src) +} +func (m *ServerHeader) XXX_Size() int { + return xxx_messageInfo_ServerHeader.Size(m) +} +func (m *ServerHeader) XXX_DiscardUnknown() { + xxx_messageInfo_ServerHeader.DiscardUnknown(m) +} + +var xxx_messageInfo_ServerHeader proto.InternalMessageInfo + +func (m *ServerHeader) GetMetadata() *Metadata { + if m != nil { + return m.Metadata + } + return nil +} + +type Trailer struct { + // This contains only the metadata from the application. + Metadata *Metadata `protobuf:"bytes,1,opt,name=metadata,proto3" json:"metadata,omitempty"` + // The gRPC status code. + StatusCode uint32 `protobuf:"varint,2,opt,name=status_code,json=statusCode,proto3" json:"status_code,omitempty"` + // An original status message before any transport specific + // encoding. + StatusMessage string `protobuf:"bytes,3,opt,name=status_message,json=statusMessage,proto3" json:"status_message,omitempty"` + // The value of the 'grpc-status-details-bin' metadata key. If + // present, this is always an encoded 'google.rpc.Status' message. + StatusDetails []byte `protobuf:"bytes,4,opt,name=status_details,json=statusDetails,proto3" json:"status_details,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Trailer) Reset() { *m = Trailer{} } +func (m *Trailer) String() string { return proto.CompactTextString(m) } +func (*Trailer) ProtoMessage() {} +func (*Trailer) Descriptor() ([]byte, []int) { + return fileDescriptor_binarylog_264c8c9c551ce911, []int{3} +} +func (m *Trailer) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Trailer.Unmarshal(m, b) +} +func (m *Trailer) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Trailer.Marshal(b, m, deterministic) +} +func (dst *Trailer) XXX_Merge(src proto.Message) { + xxx_messageInfo_Trailer.Merge(dst, src) +} +func (m *Trailer) XXX_Size() int { + return xxx_messageInfo_Trailer.Size(m) +} +func (m *Trailer) XXX_DiscardUnknown() { + xxx_messageInfo_Trailer.DiscardUnknown(m) +} + +var xxx_messageInfo_Trailer proto.InternalMessageInfo + +func (m *Trailer) GetMetadata() *Metadata { + if m != nil { + return m.Metadata + } + return nil +} + +func (m *Trailer) GetStatusCode() uint32 { + if m != nil { + return m.StatusCode + } + return 0 +} + +func (m *Trailer) GetStatusMessage() string { + if m != nil { + return m.StatusMessage + } + return "" +} + +func (m *Trailer) GetStatusDetails() []byte { + if m != nil { + return m.StatusDetails + } + return nil +} + +// Message payload, used by CLIENT_MESSAGE and SERVER_MESSAGE +type Message struct { + // Length of the message. It may not be the same as the length of the + // data field, as the logging payload can be truncated or omitted. + Length uint32 `protobuf:"varint,1,opt,name=length,proto3" json:"length,omitempty"` + // May be truncated or omitted. + Data []byte `protobuf:"bytes,2,opt,name=data,proto3" json:"data,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Message) Reset() { *m = Message{} } +func (m *Message) String() string { return proto.CompactTextString(m) } +func (*Message) ProtoMessage() {} +func (*Message) Descriptor() ([]byte, []int) { + return fileDescriptor_binarylog_264c8c9c551ce911, []int{4} +} +func (m *Message) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Message.Unmarshal(m, b) +} +func (m *Message) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Message.Marshal(b, m, deterministic) +} +func (dst *Message) XXX_Merge(src proto.Message) { + xxx_messageInfo_Message.Merge(dst, src) +} +func (m *Message) XXX_Size() int { + return xxx_messageInfo_Message.Size(m) +} +func (m *Message) XXX_DiscardUnknown() { + xxx_messageInfo_Message.DiscardUnknown(m) +} + +var xxx_messageInfo_Message proto.InternalMessageInfo + +func (m *Message) GetLength() uint32 { + if m != nil { + return m.Length + } + return 0 +} + +func (m *Message) GetData() []byte { + if m != nil { + return m.Data + } + return nil +} + +// A list of metadata pairs, used in the payload of client header, +// server header, and server trailer. +// Implementations may omit some entries to honor the header limits +// of GRPC_BINARY_LOG_CONFIG. +// +// Header keys added by gRPC are omitted. To be more specific, +// implementations will not log the following entries, and this is +// not to be treated as a truncation: +// - entries handled by grpc that are not user visible, such as those +// that begin with 'grpc-' (with exception of grpc-trace-bin) +// or keys like 'lb-token' +// - transport specific entries, including but not limited to: +// ':path', ':authority', 'content-encoding', 'user-agent', 'te', etc +// - entries added for call credentials +// +// Implementations must always log grpc-trace-bin if it is present. +// Practically speaking it will only be visible on server side because +// grpc-trace-bin is managed by low level client side mechanisms +// inaccessible from the application level. On server side, the +// header is just a normal metadata key. +// The pair will not count towards the size limit. +type Metadata struct { + Entry []*MetadataEntry `protobuf:"bytes,1,rep,name=entry,proto3" json:"entry,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Metadata) Reset() { *m = Metadata{} } +func (m *Metadata) String() string { return proto.CompactTextString(m) } +func (*Metadata) ProtoMessage() {} +func (*Metadata) Descriptor() ([]byte, []int) { + return fileDescriptor_binarylog_264c8c9c551ce911, []int{5} +} +func (m *Metadata) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Metadata.Unmarshal(m, b) +} +func (m *Metadata) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Metadata.Marshal(b, m, deterministic) +} +func (dst *Metadata) XXX_Merge(src proto.Message) { + xxx_messageInfo_Metadata.Merge(dst, src) +} +func (m *Metadata) XXX_Size() int { + return xxx_messageInfo_Metadata.Size(m) +} +func (m *Metadata) XXX_DiscardUnknown() { + xxx_messageInfo_Metadata.DiscardUnknown(m) +} + +var xxx_messageInfo_Metadata proto.InternalMessageInfo + +func (m *Metadata) GetEntry() []*MetadataEntry { + if m != nil { + return m.Entry + } + return nil +} + +// A metadata key value pair +type MetadataEntry struct { + Key string `protobuf:"bytes,1,opt,name=key,proto3" json:"key,omitempty"` + Value []byte `protobuf:"bytes,2,opt,name=value,proto3" json:"value,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *MetadataEntry) Reset() { *m = MetadataEntry{} } +func (m *MetadataEntry) String() string { return proto.CompactTextString(m) } +func (*MetadataEntry) ProtoMessage() {} +func (*MetadataEntry) Descriptor() ([]byte, []int) { + return fileDescriptor_binarylog_264c8c9c551ce911, []int{6} +} +func (m *MetadataEntry) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_MetadataEntry.Unmarshal(m, b) +} +func (m *MetadataEntry) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_MetadataEntry.Marshal(b, m, deterministic) +} +func (dst *MetadataEntry) XXX_Merge(src proto.Message) { + xxx_messageInfo_MetadataEntry.Merge(dst, src) +} +func (m *MetadataEntry) XXX_Size() int { + return xxx_messageInfo_MetadataEntry.Size(m) +} +func (m *MetadataEntry) XXX_DiscardUnknown() { + xxx_messageInfo_MetadataEntry.DiscardUnknown(m) +} + +var xxx_messageInfo_MetadataEntry proto.InternalMessageInfo + +func (m *MetadataEntry) GetKey() string { + if m != nil { + return m.Key + } + return "" +} + +func (m *MetadataEntry) GetValue() []byte { + if m != nil { + return m.Value + } + return nil +} + +// Address information +type Address struct { + Type Address_Type `protobuf:"varint,1,opt,name=type,proto3,enum=grpc.binarylog.v1.Address_Type" json:"type,omitempty"` + Address string `protobuf:"bytes,2,opt,name=address,proto3" json:"address,omitempty"` + // only for TYPE_IPV4 and TYPE_IPV6 + IpPort uint32 `protobuf:"varint,3,opt,name=ip_port,json=ipPort,proto3" json:"ip_port,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Address) Reset() { *m = Address{} } +func (m *Address) String() string { return proto.CompactTextString(m) } +func (*Address) ProtoMessage() {} +func (*Address) Descriptor() ([]byte, []int) { + return fileDescriptor_binarylog_264c8c9c551ce911, []int{7} +} +func (m *Address) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Address.Unmarshal(m, b) +} +func (m *Address) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Address.Marshal(b, m, deterministic) +} +func (dst *Address) XXX_Merge(src proto.Message) { + xxx_messageInfo_Address.Merge(dst, src) +} +func (m *Address) XXX_Size() int { + return xxx_messageInfo_Address.Size(m) +} +func (m *Address) XXX_DiscardUnknown() { + xxx_messageInfo_Address.DiscardUnknown(m) +} + +var xxx_messageInfo_Address proto.InternalMessageInfo + +func (m *Address) GetType() Address_Type { + if m != nil { + return m.Type + } + return Address_TYPE_UNKNOWN +} + +func (m *Address) GetAddress() string { + if m != nil { + return m.Address + } + return "" +} + +func (m *Address) GetIpPort() uint32 { + if m != nil { + return m.IpPort + } + return 0 +} + +func init() { + proto.RegisterType((*GrpcLogEntry)(nil), "grpc.binarylog.v1.GrpcLogEntry") + proto.RegisterType((*ClientHeader)(nil), "grpc.binarylog.v1.ClientHeader") + proto.RegisterType((*ServerHeader)(nil), "grpc.binarylog.v1.ServerHeader") + proto.RegisterType((*Trailer)(nil), "grpc.binarylog.v1.Trailer") + proto.RegisterType((*Message)(nil), "grpc.binarylog.v1.Message") + proto.RegisterType((*Metadata)(nil), "grpc.binarylog.v1.Metadata") + proto.RegisterType((*MetadataEntry)(nil), "grpc.binarylog.v1.MetadataEntry") + proto.RegisterType((*Address)(nil), "grpc.binarylog.v1.Address") + proto.RegisterEnum("grpc.binarylog.v1.GrpcLogEntry_EventType", GrpcLogEntry_EventType_name, GrpcLogEntry_EventType_value) + proto.RegisterEnum("grpc.binarylog.v1.GrpcLogEntry_Logger", GrpcLogEntry_Logger_name, GrpcLogEntry_Logger_value) + proto.RegisterEnum("grpc.binarylog.v1.Address_Type", Address_Type_name, Address_Type_value) +} + +func init() { + proto.RegisterFile("grpc/binarylog/grpc_binarylog_v1/binarylog.proto", fileDescriptor_binarylog_264c8c9c551ce911) +} + +var fileDescriptor_binarylog_264c8c9c551ce911 = []byte{ + // 900 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xa4, 0x55, 0x51, 0x6f, 0xe3, 0x44, + 0x10, 0x3e, 0x37, 0x69, 0xdc, 0x4c, 0x92, 0xca, 0x5d, 0x95, 0x3b, 0x5f, 0x29, 0x34, 0xb2, 0x04, + 0x0a, 0x42, 0x72, 0xb9, 0x94, 0xeb, 0xf1, 0x02, 0x52, 0x92, 0xfa, 0xd2, 0x88, 0x5c, 0x1a, 0x6d, + 0x72, 0x3d, 0x40, 0x48, 0xd6, 0x36, 0x5e, 0x1c, 0x0b, 0xc7, 0x6b, 0xd6, 0x9b, 0xa0, 0xfc, 0x2c, + 0xde, 0x90, 0xee, 0x77, 0xf1, 0x8e, 0xbc, 0x6b, 0x27, 0xa6, 0x69, 0x0f, 0x09, 0xde, 0x3c, 0xdf, + 0x7c, 0xf3, 0xcd, 0xee, 0x78, 0x66, 0x16, 0xbe, 0xf2, 0x79, 0x3c, 0x3b, 0xbf, 0x0b, 0x22, 0xc2, + 0xd7, 0x21, 0xf3, 0xcf, 0x53, 0xd3, 0xdd, 0x98, 0xee, 0xea, 0xc5, 0xd6, 0x67, 0xc7, 0x9c, 0x09, + 0x86, 0x8e, 0x52, 0x8a, 0xbd, 0x45, 0x57, 0x2f, 0x4e, 0x3e, 0xf5, 0x19, 0xf3, 0x43, 0x7a, 0x2e, + 0x09, 0x77, 0xcb, 0x5f, 0xce, 0xbd, 0x25, 0x27, 0x22, 0x60, 0x91, 0x0a, 0x39, 0x39, 0xbb, 0xef, + 0x17, 0xc1, 0x82, 0x26, 0x82, 0x2c, 0x62, 0x45, 0xb0, 0xde, 0xeb, 0x50, 0xef, 0xf3, 0x78, 0x36, + 0x64, 0xbe, 0x13, 0x09, 0xbe, 0x46, 0xdf, 0x40, 0x75, 0xc3, 0x31, 0xb5, 0xa6, 0xd6, 0xaa, 0xb5, + 0x4f, 0x6c, 0xa5, 0x62, 0xe7, 0x2a, 0xf6, 0x34, 0x67, 0xe0, 0x2d, 0x19, 0x3d, 0x03, 0x7d, 0x46, + 0xc2, 0xd0, 0x0d, 0x3c, 0x73, 0xaf, 0xa9, 0xb5, 0xca, 0xb8, 0x92, 0x9a, 0x03, 0x0f, 0xbd, 0x84, + 0x67, 0x09, 0xfd, 0x6d, 0x49, 0xa3, 0x19, 0x75, 0x03, 0xcf, 0xfd, 0x3d, 0x10, 0xf3, 0x20, 0x72, + 0x53, 0xa7, 0x59, 0x92, 0xc4, 0xe3, 0xdc, 0x3d, 0xf0, 0xde, 0x49, 0x67, 0x8f, 0x84, 0x21, 0xfa, + 0x16, 0xca, 0x62, 0x1d, 0x53, 0xb3, 0xdc, 0xd4, 0x5a, 0x87, 0xed, 0x2f, 0xec, 0x9d, 0xdb, 0xdb, + 0xc5, 0x83, 0xdb, 0xce, 0x8a, 0x46, 0x62, 0xba, 0x8e, 0x29, 0x96, 0x61, 0xe8, 0x3b, 0xa8, 0x84, + 0xcc, 0xf7, 0x29, 0x37, 0xf7, 0xa5, 0xc0, 0xe7, 0xff, 0x26, 0x30, 0x94, 0x6c, 0x9c, 0x45, 0xa1, + 0xd7, 0xd0, 0x98, 0x85, 0x01, 0x8d, 0x84, 0x3b, 0xa7, 0xc4, 0xa3, 0xdc, 0xac, 0xc8, 0x62, 0x9c, + 0x3d, 0x20, 0xd3, 0x93, 0xbc, 0x6b, 0x49, 0xbb, 0x7e, 0x82, 0xeb, 0xb3, 0x82, 0x9d, 0xea, 0x24, + 0x94, 0xaf, 0x28, 0xcf, 0x75, 0xf4, 0x47, 0x75, 0x26, 0x92, 0xb7, 0xd5, 0x49, 0x0a, 0x36, 0xba, + 0x04, 0x7d, 0x41, 0x93, 0x84, 0xf8, 0xd4, 0x3c, 0xc8, 0x7f, 0xcb, 0x8e, 0xc2, 0x1b, 0xc5, 0xb8, + 0x7e, 0x82, 0x73, 0x72, 0x1a, 0x27, 0x38, 0x09, 0x42, 0xca, 0xcd, 0xea, 0xa3, 0x71, 0x53, 0xc5, + 0x48, 0xe3, 0x32, 0x32, 0xfa, 0x12, 0x8e, 0x62, 0xb2, 0x0e, 0x19, 0xf1, 0x5c, 0xc1, 0x97, 0xd1, + 0x8c, 0x08, 0xea, 0x99, 0xd0, 0xd4, 0x5a, 0x07, 0xd8, 0xc8, 0x1c, 0xd3, 0x1c, 0x47, 0x36, 0x94, + 0x63, 0x4a, 0xb9, 0x59, 0x7b, 0x34, 0x43, 0xc7, 0xf3, 0x38, 0x4d, 0x12, 0x2c, 0x79, 0xd6, 0x5f, + 0x1a, 0x54, 0x37, 0x3f, 0x0c, 0x3d, 0x05, 0xe4, 0xdc, 0x3a, 0xa3, 0xa9, 0x3b, 0xfd, 0x71, 0xec, + 0xb8, 0x6f, 0x47, 0xdf, 0x8f, 0x6e, 0xde, 0x8d, 0x8c, 0x27, 0xe8, 0x14, 0xcc, 0x02, 0xde, 0x1b, + 0x0e, 0xd2, 0xef, 0x6b, 0xa7, 0x73, 0xe5, 0x60, 0x43, 0xbb, 0xe7, 0x9d, 0x38, 0xf8, 0xd6, 0xc1, + 0xb9, 0x77, 0x0f, 0x7d, 0x02, 0xcf, 0x77, 0x63, 0xdf, 0x38, 0x93, 0x49, 0xa7, 0xef, 0x18, 0xa5, + 0x7b, 0xee, 0x2c, 0x38, 0x77, 0x97, 0x51, 0x13, 0x4e, 0x1f, 0xc8, 0xdc, 0x19, 0xbe, 0x76, 0x7b, + 0xc3, 0x9b, 0x89, 0x63, 0xec, 0x3f, 0x2c, 0x30, 0xc5, 0x9d, 0xc1, 0xd0, 0xc1, 0x46, 0x05, 0x7d, + 0x04, 0x47, 0x45, 0x81, 0xce, 0xa8, 0xe7, 0x0c, 0x0d, 0xdd, 0xea, 0x42, 0x45, 0xb5, 0x19, 0x42, + 0x70, 0x38, 0xbc, 0xe9, 0xf7, 0x1d, 0x5c, 0xb8, 0xef, 0x11, 0x34, 0x32, 0x4c, 0x65, 0x34, 0xb4, + 0x02, 0xa4, 0x52, 0x18, 0x7b, 0xdd, 0x2a, 0xe8, 0x59, 0xfd, 0xad, 0xf7, 0x1a, 0xd4, 0x8b, 0xcd, + 0x87, 0x5e, 0xc1, 0xc1, 0x82, 0x0a, 0xe2, 0x11, 0x41, 0xb2, 0xe1, 0xfd, 0xf8, 0xc1, 0x2e, 0x51, + 0x14, 0xbc, 0x21, 0xa3, 0x33, 0xa8, 0x2d, 0xa8, 0x98, 0x33, 0xcf, 0x8d, 0xc8, 0x82, 0xca, 0x01, + 0xae, 0x62, 0x50, 0xd0, 0x88, 0x2c, 0x28, 0x3a, 0x85, 0x2a, 0x59, 0x8a, 0x39, 0xe3, 0x81, 0x58, + 0xcb, 0xb1, 0xad, 0xe2, 0x2d, 0x80, 0x2e, 0x40, 0x4f, 0x17, 0x01, 0x5b, 0x0a, 0x39, 0xae, 0xb5, + 0xf6, 0xf3, 0x9d, 0x9d, 0x71, 0x95, 0x6d, 0x26, 0x9c, 0x33, 0xad, 0x3e, 0xd4, 0x8b, 0x1d, 0xff, + 0x9f, 0x0f, 0x6f, 0xfd, 0xa1, 0x81, 0x9e, 0x75, 0xf0, 0xff, 0xaa, 0x40, 0x22, 0x88, 0x58, 0x26, + 0xee, 0x8c, 0x79, 0xaa, 0x02, 0x0d, 0x0c, 0x0a, 0xea, 0x31, 0x8f, 0xa2, 0xcf, 0xe0, 0x30, 0x23, + 0xe4, 0x73, 0xa8, 0xca, 0xd0, 0x50, 0x68, 0x36, 0x7a, 0x05, 0x9a, 0x47, 0x05, 0x09, 0xc2, 0x44, + 0x56, 0xa4, 0x9e, 0xd3, 0xae, 0x14, 0x68, 0xbd, 0x04, 0x3d, 0x8f, 0x78, 0x0a, 0x95, 0x90, 0x46, + 0xbe, 0x98, 0xcb, 0x03, 0x37, 0x70, 0x66, 0x21, 0x04, 0x65, 0x79, 0x8d, 0x3d, 0x19, 0x2f, 0xbf, + 0xad, 0x2e, 0x1c, 0xe4, 0x67, 0x47, 0x97, 0xb0, 0x4f, 0xd3, 0xcd, 0x65, 0x6a, 0xcd, 0x52, 0xab, + 0xd6, 0x6e, 0x7e, 0xe0, 0x9e, 0x72, 0xc3, 0x61, 0x45, 0xb7, 0x5e, 0x41, 0xe3, 0x1f, 0x38, 0x32, + 0xa0, 0xf4, 0x2b, 0x5d, 0xcb, 0xec, 0x55, 0x9c, 0x7e, 0xa2, 0x63, 0xd8, 0x5f, 0x91, 0x70, 0x49, + 0xb3, 0xdc, 0xca, 0xb0, 0xfe, 0xd4, 0x40, 0xcf, 0xe6, 0x18, 0x5d, 0x64, 0xdb, 0x59, 0x93, 0xcb, + 0xf5, 0xec, 0xf1, 0x89, 0xb7, 0x0b, 0x3b, 0xd9, 0x04, 0x9d, 0x28, 0x34, 0xeb, 0xb0, 0xdc, 0x4c, + 0x1f, 0x8f, 0x20, 0x76, 0x63, 0xc6, 0x85, 0xac, 0x6a, 0x03, 0x57, 0x82, 0x78, 0xcc, 0xb8, 0xb0, + 0x1c, 0x28, 0xcb, 0x1d, 0x61, 0x40, 0xfd, 0xde, 0x76, 0x68, 0x40, 0x55, 0x22, 0x83, 0xf1, 0xed, + 0xd7, 0x86, 0x56, 0x34, 0x2f, 0x8d, 0xbd, 0x8d, 0xf9, 0x76, 0x34, 0xf8, 0xc1, 0x28, 0x75, 0x7f, + 0x86, 0xe3, 0x80, 0xed, 0x1e, 0xb2, 0x7b, 0xd8, 0x95, 0xd6, 0x90, 0xf9, 0xe3, 0xb4, 0x51, 0xc7, + 0xda, 0x4f, 0xed, 0xac, 0x71, 0x7d, 0x16, 0x92, 0xc8, 0xb7, 0x19, 0x57, 0x4f, 0xf3, 0x87, 0x5e, + 0xea, 0xbb, 0x8a, 0xec, 0xf2, 0x8b, 0xbf, 0x03, 0x00, 0x00, 0xff, 0xff, 0xe7, 0xf6, 0x4b, 0x50, + 0xd4, 0x07, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/grpc/call.go b/vendor/google.golang.org/grpc/call.go new file mode 100644 index 0000000..9e20e4d --- /dev/null +++ b/vendor/google.golang.org/grpc/call.go @@ -0,0 +1,74 @@ +/* + * + * Copyright 2014 gRPC authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ + +package grpc + +import ( + "context" +) + +// Invoke sends the RPC request on the wire and returns after response is +// received. This is typically called by generated code. +// +// All errors returned by Invoke are compatible with the status package. +func (cc *ClientConn) Invoke(ctx context.Context, method string, args, reply interface{}, opts ...CallOption) error { + // allow interceptor to see all applicable call options, which means those + // configured as defaults from dial option as well as per-call options + opts = combine(cc.dopts.callOptions, opts) + + if cc.dopts.unaryInt != nil { + return cc.dopts.unaryInt(ctx, method, args, reply, cc, invoke, opts...) + } + return invoke(ctx, method, args, reply, cc, opts...) +} + +func combine(o1 []CallOption, o2 []CallOption) []CallOption { + // we don't use append because o1 could have extra capacity whose + // elements would be overwritten, which could cause inadvertent + // sharing (and race conditions) between concurrent calls + if len(o1) == 0 { + return o2 + } else if len(o2) == 0 { + return o1 + } + ret := make([]CallOption, len(o1)+len(o2)) + copy(ret, o1) + copy(ret[len(o1):], o2) + return ret +} + +// Invoke sends the RPC request on the wire and returns after response is +// received. This is typically called by generated code. +// +// DEPRECATED: Use ClientConn.Invoke instead. +func Invoke(ctx context.Context, method string, args, reply interface{}, cc *ClientConn, opts ...CallOption) error { + return cc.Invoke(ctx, method, args, reply, opts...) +} + +var unaryStreamDesc = &StreamDesc{ServerStreams: false, ClientStreams: false} + +func invoke(ctx context.Context, method string, req, reply interface{}, cc *ClientConn, opts ...CallOption) error { + cs, err := newClientStream(ctx, unaryStreamDesc, cc, method, opts...) + if err != nil { + return err + } + if err := cs.SendMsg(req); err != nil { + return err + } + return cs.RecvMsg(reply) +} diff --git a/vendor/google.golang.org/grpc/channelz/grpc_channelz_v1/channelz.pb.go b/vendor/google.golang.org/grpc/channelz/grpc_channelz_v1/channelz.pb.go new file mode 100644 index 0000000..fab354a --- /dev/null +++ b/vendor/google.golang.org/grpc/channelz/grpc_channelz_v1/channelz.pb.go @@ -0,0 +1,3445 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: grpc/channelz/v1/channelz.proto + +package grpc_channelz_v1 // import "google.golang.org/grpc/channelz/grpc_channelz_v1" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import any "github.com/golang/protobuf/ptypes/any" +import duration "github.com/golang/protobuf/ptypes/duration" +import timestamp "github.com/golang/protobuf/ptypes/timestamp" +import wrappers "github.com/golang/protobuf/ptypes/wrappers" + +import ( + context "golang.org/x/net/context" + grpc "google.golang.org/grpc" +) + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +type ChannelConnectivityState_State int32 + +const ( + ChannelConnectivityState_UNKNOWN ChannelConnectivityState_State = 0 + ChannelConnectivityState_IDLE ChannelConnectivityState_State = 1 + ChannelConnectivityState_CONNECTING ChannelConnectivityState_State = 2 + ChannelConnectivityState_READY ChannelConnectivityState_State = 3 + ChannelConnectivityState_TRANSIENT_FAILURE ChannelConnectivityState_State = 4 + ChannelConnectivityState_SHUTDOWN ChannelConnectivityState_State = 5 +) + +var ChannelConnectivityState_State_name = map[int32]string{ + 0: "UNKNOWN", + 1: "IDLE", + 2: "CONNECTING", + 3: "READY", + 4: "TRANSIENT_FAILURE", + 5: "SHUTDOWN", +} +var ChannelConnectivityState_State_value = map[string]int32{ + "UNKNOWN": 0, + "IDLE": 1, + "CONNECTING": 2, + "READY": 3, + "TRANSIENT_FAILURE": 4, + "SHUTDOWN": 5, +} + +func (x ChannelConnectivityState_State) String() string { + return proto.EnumName(ChannelConnectivityState_State_name, int32(x)) +} +func (ChannelConnectivityState_State) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_channelz_449295370a82a4c0, []int{2, 0} +} + +// The supported severity levels of trace events. +type ChannelTraceEvent_Severity int32 + +const ( + ChannelTraceEvent_CT_UNKNOWN ChannelTraceEvent_Severity = 0 + ChannelTraceEvent_CT_INFO ChannelTraceEvent_Severity = 1 + ChannelTraceEvent_CT_WARNING ChannelTraceEvent_Severity = 2 + ChannelTraceEvent_CT_ERROR ChannelTraceEvent_Severity = 3 +) + +var ChannelTraceEvent_Severity_name = map[int32]string{ + 0: "CT_UNKNOWN", + 1: "CT_INFO", + 2: "CT_WARNING", + 3: "CT_ERROR", +} +var ChannelTraceEvent_Severity_value = map[string]int32{ + "CT_UNKNOWN": 0, + "CT_INFO": 1, + "CT_WARNING": 2, + "CT_ERROR": 3, +} + +func (x ChannelTraceEvent_Severity) String() string { + return proto.EnumName(ChannelTraceEvent_Severity_name, int32(x)) +} +func (ChannelTraceEvent_Severity) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_channelz_449295370a82a4c0, []int{4, 0} +} + +// Channel is a logical grouping of channels, subchannels, and sockets. +type Channel struct { + // The identifier for this channel. This should bet set. + Ref *ChannelRef `protobuf:"bytes,1,opt,name=ref,proto3" json:"ref,omitempty"` + // Data specific to this channel. + Data *ChannelData `protobuf:"bytes,2,opt,name=data,proto3" json:"data,omitempty"` + // There are no ordering guarantees on the order of channel refs. + // There may not be cycles in the ref graph. + // A channel ref may be present in more than one channel or subchannel. + ChannelRef []*ChannelRef `protobuf:"bytes,3,rep,name=channel_ref,json=channelRef,proto3" json:"channel_ref,omitempty"` + // At most one of 'channel_ref+subchannel_ref' and 'socket' is set. + // There are no ordering guarantees on the order of subchannel refs. + // There may not be cycles in the ref graph. + // A sub channel ref may be present in more than one channel or subchannel. + SubchannelRef []*SubchannelRef `protobuf:"bytes,4,rep,name=subchannel_ref,json=subchannelRef,proto3" json:"subchannel_ref,omitempty"` + // There are no ordering guarantees on the order of sockets. + SocketRef []*SocketRef `protobuf:"bytes,5,rep,name=socket_ref,json=socketRef,proto3" json:"socket_ref,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Channel) Reset() { *m = Channel{} } +func (m *Channel) String() string { return proto.CompactTextString(m) } +func (*Channel) ProtoMessage() {} +func (*Channel) Descriptor() ([]byte, []int) { + return fileDescriptor_channelz_449295370a82a4c0, []int{0} +} +func (m *Channel) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Channel.Unmarshal(m, b) +} +func (m *Channel) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Channel.Marshal(b, m, deterministic) +} +func (dst *Channel) XXX_Merge(src proto.Message) { + xxx_messageInfo_Channel.Merge(dst, src) +} +func (m *Channel) XXX_Size() int { + return xxx_messageInfo_Channel.Size(m) +} +func (m *Channel) XXX_DiscardUnknown() { + xxx_messageInfo_Channel.DiscardUnknown(m) +} + +var xxx_messageInfo_Channel proto.InternalMessageInfo + +func (m *Channel) GetRef() *ChannelRef { + if m != nil { + return m.Ref + } + return nil +} + +func (m *Channel) GetData() *ChannelData { + if m != nil { + return m.Data + } + return nil +} + +func (m *Channel) GetChannelRef() []*ChannelRef { + if m != nil { + return m.ChannelRef + } + return nil +} + +func (m *Channel) GetSubchannelRef() []*SubchannelRef { + if m != nil { + return m.SubchannelRef + } + return nil +} + +func (m *Channel) GetSocketRef() []*SocketRef { + if m != nil { + return m.SocketRef + } + return nil +} + +// Subchannel is a logical grouping of channels, subchannels, and sockets. +// A subchannel is load balanced over by it's ancestor +type Subchannel struct { + // The identifier for this channel. + Ref *SubchannelRef `protobuf:"bytes,1,opt,name=ref,proto3" json:"ref,omitempty"` + // Data specific to this channel. + Data *ChannelData `protobuf:"bytes,2,opt,name=data,proto3" json:"data,omitempty"` + // There are no ordering guarantees on the order of channel refs. + // There may not be cycles in the ref graph. + // A channel ref may be present in more than one channel or subchannel. + ChannelRef []*ChannelRef `protobuf:"bytes,3,rep,name=channel_ref,json=channelRef,proto3" json:"channel_ref,omitempty"` + // At most one of 'channel_ref+subchannel_ref' and 'socket' is set. + // There are no ordering guarantees on the order of subchannel refs. + // There may not be cycles in the ref graph. + // A sub channel ref may be present in more than one channel or subchannel. + SubchannelRef []*SubchannelRef `protobuf:"bytes,4,rep,name=subchannel_ref,json=subchannelRef,proto3" json:"subchannel_ref,omitempty"` + // There are no ordering guarantees on the order of sockets. + SocketRef []*SocketRef `protobuf:"bytes,5,rep,name=socket_ref,json=socketRef,proto3" json:"socket_ref,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Subchannel) Reset() { *m = Subchannel{} } +func (m *Subchannel) String() string { return proto.CompactTextString(m) } +func (*Subchannel) ProtoMessage() {} +func (*Subchannel) Descriptor() ([]byte, []int) { + return fileDescriptor_channelz_449295370a82a4c0, []int{1} +} +func (m *Subchannel) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Subchannel.Unmarshal(m, b) +} +func (m *Subchannel) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Subchannel.Marshal(b, m, deterministic) +} +func (dst *Subchannel) XXX_Merge(src proto.Message) { + xxx_messageInfo_Subchannel.Merge(dst, src) +} +func (m *Subchannel) XXX_Size() int { + return xxx_messageInfo_Subchannel.Size(m) +} +func (m *Subchannel) XXX_DiscardUnknown() { + xxx_messageInfo_Subchannel.DiscardUnknown(m) +} + +var xxx_messageInfo_Subchannel proto.InternalMessageInfo + +func (m *Subchannel) GetRef() *SubchannelRef { + if m != nil { + return m.Ref + } + return nil +} + +func (m *Subchannel) GetData() *ChannelData { + if m != nil { + return m.Data + } + return nil +} + +func (m *Subchannel) GetChannelRef() []*ChannelRef { + if m != nil { + return m.ChannelRef + } + return nil +} + +func (m *Subchannel) GetSubchannelRef() []*SubchannelRef { + if m != nil { + return m.SubchannelRef + } + return nil +} + +func (m *Subchannel) GetSocketRef() []*SocketRef { + if m != nil { + return m.SocketRef + } + return nil +} + +// These come from the specified states in this document: +// https://github.com/grpc/grpc/blob/master/doc/connectivity-semantics-and-api.md +type ChannelConnectivityState struct { + State ChannelConnectivityState_State `protobuf:"varint,1,opt,name=state,proto3,enum=grpc.channelz.v1.ChannelConnectivityState_State" json:"state,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ChannelConnectivityState) Reset() { *m = ChannelConnectivityState{} } +func (m *ChannelConnectivityState) String() string { return proto.CompactTextString(m) } +func (*ChannelConnectivityState) ProtoMessage() {} +func (*ChannelConnectivityState) Descriptor() ([]byte, []int) { + return fileDescriptor_channelz_449295370a82a4c0, []int{2} +} +func (m *ChannelConnectivityState) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ChannelConnectivityState.Unmarshal(m, b) +} +func (m *ChannelConnectivityState) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ChannelConnectivityState.Marshal(b, m, deterministic) +} +func (dst *ChannelConnectivityState) XXX_Merge(src proto.Message) { + xxx_messageInfo_ChannelConnectivityState.Merge(dst, src) +} +func (m *ChannelConnectivityState) XXX_Size() int { + return xxx_messageInfo_ChannelConnectivityState.Size(m) +} +func (m *ChannelConnectivityState) XXX_DiscardUnknown() { + xxx_messageInfo_ChannelConnectivityState.DiscardUnknown(m) +} + +var xxx_messageInfo_ChannelConnectivityState proto.InternalMessageInfo + +func (m *ChannelConnectivityState) GetState() ChannelConnectivityState_State { + if m != nil { + return m.State + } + return ChannelConnectivityState_UNKNOWN +} + +// Channel data is data related to a specific Channel or Subchannel. +type ChannelData struct { + // The connectivity state of the channel or subchannel. Implementations + // should always set this. + State *ChannelConnectivityState `protobuf:"bytes,1,opt,name=state,proto3" json:"state,omitempty"` + // The target this channel originally tried to connect to. May be absent + Target string `protobuf:"bytes,2,opt,name=target,proto3" json:"target,omitempty"` + // A trace of recent events on the channel. May be absent. + Trace *ChannelTrace `protobuf:"bytes,3,opt,name=trace,proto3" json:"trace,omitempty"` + // The number of calls started on the channel + CallsStarted int64 `protobuf:"varint,4,opt,name=calls_started,json=callsStarted,proto3" json:"calls_started,omitempty"` + // The number of calls that have completed with an OK status + CallsSucceeded int64 `protobuf:"varint,5,opt,name=calls_succeeded,json=callsSucceeded,proto3" json:"calls_succeeded,omitempty"` + // The number of calls that have completed with a non-OK status + CallsFailed int64 `protobuf:"varint,6,opt,name=calls_failed,json=callsFailed,proto3" json:"calls_failed,omitempty"` + // The last time a call was started on the channel. + LastCallStartedTimestamp *timestamp.Timestamp `protobuf:"bytes,7,opt,name=last_call_started_timestamp,json=lastCallStartedTimestamp,proto3" json:"last_call_started_timestamp,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ChannelData) Reset() { *m = ChannelData{} } +func (m *ChannelData) String() string { return proto.CompactTextString(m) } +func (*ChannelData) ProtoMessage() {} +func (*ChannelData) Descriptor() ([]byte, []int) { + return fileDescriptor_channelz_449295370a82a4c0, []int{3} +} +func (m *ChannelData) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ChannelData.Unmarshal(m, b) +} +func (m *ChannelData) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ChannelData.Marshal(b, m, deterministic) +} +func (dst *ChannelData) XXX_Merge(src proto.Message) { + xxx_messageInfo_ChannelData.Merge(dst, src) +} +func (m *ChannelData) XXX_Size() int { + return xxx_messageInfo_ChannelData.Size(m) +} +func (m *ChannelData) XXX_DiscardUnknown() { + xxx_messageInfo_ChannelData.DiscardUnknown(m) +} + +var xxx_messageInfo_ChannelData proto.InternalMessageInfo + +func (m *ChannelData) GetState() *ChannelConnectivityState { + if m != nil { + return m.State + } + return nil +} + +func (m *ChannelData) GetTarget() string { + if m != nil { + return m.Target + } + return "" +} + +func (m *ChannelData) GetTrace() *ChannelTrace { + if m != nil { + return m.Trace + } + return nil +} + +func (m *ChannelData) GetCallsStarted() int64 { + if m != nil { + return m.CallsStarted + } + return 0 +} + +func (m *ChannelData) GetCallsSucceeded() int64 { + if m != nil { + return m.CallsSucceeded + } + return 0 +} + +func (m *ChannelData) GetCallsFailed() int64 { + if m != nil { + return m.CallsFailed + } + return 0 +} + +func (m *ChannelData) GetLastCallStartedTimestamp() *timestamp.Timestamp { + if m != nil { + return m.LastCallStartedTimestamp + } + return nil +} + +// A trace event is an interesting thing that happened to a channel or +// subchannel, such as creation, address resolution, subchannel creation, etc. +type ChannelTraceEvent struct { + // High level description of the event. + Description string `protobuf:"bytes,1,opt,name=description,proto3" json:"description,omitempty"` + // the severity of the trace event + Severity ChannelTraceEvent_Severity `protobuf:"varint,2,opt,name=severity,proto3,enum=grpc.channelz.v1.ChannelTraceEvent_Severity" json:"severity,omitempty"` + // When this event occurred. + Timestamp *timestamp.Timestamp `protobuf:"bytes,3,opt,name=timestamp,proto3" json:"timestamp,omitempty"` + // ref of referenced channel or subchannel. + // Optional, only present if this event refers to a child object. For example, + // this field would be filled if this trace event was for a subchannel being + // created. + // + // Types that are valid to be assigned to ChildRef: + // *ChannelTraceEvent_ChannelRef + // *ChannelTraceEvent_SubchannelRef + ChildRef isChannelTraceEvent_ChildRef `protobuf_oneof:"child_ref"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ChannelTraceEvent) Reset() { *m = ChannelTraceEvent{} } +func (m *ChannelTraceEvent) String() string { return proto.CompactTextString(m) } +func (*ChannelTraceEvent) ProtoMessage() {} +func (*ChannelTraceEvent) Descriptor() ([]byte, []int) { + return fileDescriptor_channelz_449295370a82a4c0, []int{4} +} +func (m *ChannelTraceEvent) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ChannelTraceEvent.Unmarshal(m, b) +} +func (m *ChannelTraceEvent) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ChannelTraceEvent.Marshal(b, m, deterministic) +} +func (dst *ChannelTraceEvent) XXX_Merge(src proto.Message) { + xxx_messageInfo_ChannelTraceEvent.Merge(dst, src) +} +func (m *ChannelTraceEvent) XXX_Size() int { + return xxx_messageInfo_ChannelTraceEvent.Size(m) +} +func (m *ChannelTraceEvent) XXX_DiscardUnknown() { + xxx_messageInfo_ChannelTraceEvent.DiscardUnknown(m) +} + +var xxx_messageInfo_ChannelTraceEvent proto.InternalMessageInfo + +func (m *ChannelTraceEvent) GetDescription() string { + if m != nil { + return m.Description + } + return "" +} + +func (m *ChannelTraceEvent) GetSeverity() ChannelTraceEvent_Severity { + if m != nil { + return m.Severity + } + return ChannelTraceEvent_CT_UNKNOWN +} + +func (m *ChannelTraceEvent) GetTimestamp() *timestamp.Timestamp { + if m != nil { + return m.Timestamp + } + return nil +} + +type isChannelTraceEvent_ChildRef interface { + isChannelTraceEvent_ChildRef() +} + +type ChannelTraceEvent_ChannelRef struct { + ChannelRef *ChannelRef `protobuf:"bytes,4,opt,name=channel_ref,json=channelRef,proto3,oneof"` +} + +type ChannelTraceEvent_SubchannelRef struct { + SubchannelRef *SubchannelRef `protobuf:"bytes,5,opt,name=subchannel_ref,json=subchannelRef,proto3,oneof"` +} + +func (*ChannelTraceEvent_ChannelRef) isChannelTraceEvent_ChildRef() {} + +func (*ChannelTraceEvent_SubchannelRef) isChannelTraceEvent_ChildRef() {} + +func (m *ChannelTraceEvent) GetChildRef() isChannelTraceEvent_ChildRef { + if m != nil { + return m.ChildRef + } + return nil +} + +func (m *ChannelTraceEvent) GetChannelRef() *ChannelRef { + if x, ok := m.GetChildRef().(*ChannelTraceEvent_ChannelRef); ok { + return x.ChannelRef + } + return nil +} + +func (m *ChannelTraceEvent) GetSubchannelRef() *SubchannelRef { + if x, ok := m.GetChildRef().(*ChannelTraceEvent_SubchannelRef); ok { + return x.SubchannelRef + } + return nil +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*ChannelTraceEvent) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _ChannelTraceEvent_OneofMarshaler, _ChannelTraceEvent_OneofUnmarshaler, _ChannelTraceEvent_OneofSizer, []interface{}{ + (*ChannelTraceEvent_ChannelRef)(nil), + (*ChannelTraceEvent_SubchannelRef)(nil), + } +} + +func _ChannelTraceEvent_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*ChannelTraceEvent) + // child_ref + switch x := m.ChildRef.(type) { + case *ChannelTraceEvent_ChannelRef: + b.EncodeVarint(4<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.ChannelRef); err != nil { + return err + } + case *ChannelTraceEvent_SubchannelRef: + b.EncodeVarint(5<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.SubchannelRef); err != nil { + return err + } + case nil: + default: + return fmt.Errorf("ChannelTraceEvent.ChildRef has unexpected type %T", x) + } + return nil +} + +func _ChannelTraceEvent_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*ChannelTraceEvent) + switch tag { + case 4: // child_ref.channel_ref + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(ChannelRef) + err := b.DecodeMessage(msg) + m.ChildRef = &ChannelTraceEvent_ChannelRef{msg} + return true, err + case 5: // child_ref.subchannel_ref + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(SubchannelRef) + err := b.DecodeMessage(msg) + m.ChildRef = &ChannelTraceEvent_SubchannelRef{msg} + return true, err + default: + return false, nil + } +} + +func _ChannelTraceEvent_OneofSizer(msg proto.Message) (n int) { + m := msg.(*ChannelTraceEvent) + // child_ref + switch x := m.ChildRef.(type) { + case *ChannelTraceEvent_ChannelRef: + s := proto.Size(x.ChannelRef) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *ChannelTraceEvent_SubchannelRef: + s := proto.Size(x.SubchannelRef) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +// ChannelTrace represents the recent events that have occurred on the channel. +type ChannelTrace struct { + // Number of events ever logged in this tracing object. This can differ from + // events.size() because events can be overwritten or garbage collected by + // implementations. + NumEventsLogged int64 `protobuf:"varint,1,opt,name=num_events_logged,json=numEventsLogged,proto3" json:"num_events_logged,omitempty"` + // Time that this channel was created. + CreationTimestamp *timestamp.Timestamp `protobuf:"bytes,2,opt,name=creation_timestamp,json=creationTimestamp,proto3" json:"creation_timestamp,omitempty"` + // List of events that have occurred on this channel. + Events []*ChannelTraceEvent `protobuf:"bytes,3,rep,name=events,proto3" json:"events,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ChannelTrace) Reset() { *m = ChannelTrace{} } +func (m *ChannelTrace) String() string { return proto.CompactTextString(m) } +func (*ChannelTrace) ProtoMessage() {} +func (*ChannelTrace) Descriptor() ([]byte, []int) { + return fileDescriptor_channelz_449295370a82a4c0, []int{5} +} +func (m *ChannelTrace) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ChannelTrace.Unmarshal(m, b) +} +func (m *ChannelTrace) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ChannelTrace.Marshal(b, m, deterministic) +} +func (dst *ChannelTrace) XXX_Merge(src proto.Message) { + xxx_messageInfo_ChannelTrace.Merge(dst, src) +} +func (m *ChannelTrace) XXX_Size() int { + return xxx_messageInfo_ChannelTrace.Size(m) +} +func (m *ChannelTrace) XXX_DiscardUnknown() { + xxx_messageInfo_ChannelTrace.DiscardUnknown(m) +} + +var xxx_messageInfo_ChannelTrace proto.InternalMessageInfo + +func (m *ChannelTrace) GetNumEventsLogged() int64 { + if m != nil { + return m.NumEventsLogged + } + return 0 +} + +func (m *ChannelTrace) GetCreationTimestamp() *timestamp.Timestamp { + if m != nil { + return m.CreationTimestamp + } + return nil +} + +func (m *ChannelTrace) GetEvents() []*ChannelTraceEvent { + if m != nil { + return m.Events + } + return nil +} + +// ChannelRef is a reference to a Channel. +type ChannelRef struct { + // The globally unique id for this channel. Must be a positive number. + ChannelId int64 `protobuf:"varint,1,opt,name=channel_id,json=channelId,proto3" json:"channel_id,omitempty"` + // An optional name associated with the channel. + Name string `protobuf:"bytes,2,opt,name=name,proto3" json:"name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ChannelRef) Reset() { *m = ChannelRef{} } +func (m *ChannelRef) String() string { return proto.CompactTextString(m) } +func (*ChannelRef) ProtoMessage() {} +func (*ChannelRef) Descriptor() ([]byte, []int) { + return fileDescriptor_channelz_449295370a82a4c0, []int{6} +} +func (m *ChannelRef) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ChannelRef.Unmarshal(m, b) +} +func (m *ChannelRef) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ChannelRef.Marshal(b, m, deterministic) +} +func (dst *ChannelRef) XXX_Merge(src proto.Message) { + xxx_messageInfo_ChannelRef.Merge(dst, src) +} +func (m *ChannelRef) XXX_Size() int { + return xxx_messageInfo_ChannelRef.Size(m) +} +func (m *ChannelRef) XXX_DiscardUnknown() { + xxx_messageInfo_ChannelRef.DiscardUnknown(m) +} + +var xxx_messageInfo_ChannelRef proto.InternalMessageInfo + +func (m *ChannelRef) GetChannelId() int64 { + if m != nil { + return m.ChannelId + } + return 0 +} + +func (m *ChannelRef) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +// ChannelRef is a reference to a Subchannel. +type SubchannelRef struct { + // The globally unique id for this subchannel. Must be a positive number. + SubchannelId int64 `protobuf:"varint,7,opt,name=subchannel_id,json=subchannelId,proto3" json:"subchannel_id,omitempty"` + // An optional name associated with the subchannel. + Name string `protobuf:"bytes,8,opt,name=name,proto3" json:"name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *SubchannelRef) Reset() { *m = SubchannelRef{} } +func (m *SubchannelRef) String() string { return proto.CompactTextString(m) } +func (*SubchannelRef) ProtoMessage() {} +func (*SubchannelRef) Descriptor() ([]byte, []int) { + return fileDescriptor_channelz_449295370a82a4c0, []int{7} +} +func (m *SubchannelRef) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_SubchannelRef.Unmarshal(m, b) +} +func (m *SubchannelRef) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_SubchannelRef.Marshal(b, m, deterministic) +} +func (dst *SubchannelRef) XXX_Merge(src proto.Message) { + xxx_messageInfo_SubchannelRef.Merge(dst, src) +} +func (m *SubchannelRef) XXX_Size() int { + return xxx_messageInfo_SubchannelRef.Size(m) +} +func (m *SubchannelRef) XXX_DiscardUnknown() { + xxx_messageInfo_SubchannelRef.DiscardUnknown(m) +} + +var xxx_messageInfo_SubchannelRef proto.InternalMessageInfo + +func (m *SubchannelRef) GetSubchannelId() int64 { + if m != nil { + return m.SubchannelId + } + return 0 +} + +func (m *SubchannelRef) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +// SocketRef is a reference to a Socket. +type SocketRef struct { + // The globally unique id for this socket. Must be a positive number. + SocketId int64 `protobuf:"varint,3,opt,name=socket_id,json=socketId,proto3" json:"socket_id,omitempty"` + // An optional name associated with the socket. + Name string `protobuf:"bytes,4,opt,name=name,proto3" json:"name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *SocketRef) Reset() { *m = SocketRef{} } +func (m *SocketRef) String() string { return proto.CompactTextString(m) } +func (*SocketRef) ProtoMessage() {} +func (*SocketRef) Descriptor() ([]byte, []int) { + return fileDescriptor_channelz_449295370a82a4c0, []int{8} +} +func (m *SocketRef) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_SocketRef.Unmarshal(m, b) +} +func (m *SocketRef) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_SocketRef.Marshal(b, m, deterministic) +} +func (dst *SocketRef) XXX_Merge(src proto.Message) { + xxx_messageInfo_SocketRef.Merge(dst, src) +} +func (m *SocketRef) XXX_Size() int { + return xxx_messageInfo_SocketRef.Size(m) +} +func (m *SocketRef) XXX_DiscardUnknown() { + xxx_messageInfo_SocketRef.DiscardUnknown(m) +} + +var xxx_messageInfo_SocketRef proto.InternalMessageInfo + +func (m *SocketRef) GetSocketId() int64 { + if m != nil { + return m.SocketId + } + return 0 +} + +func (m *SocketRef) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +// ServerRef is a reference to a Server. +type ServerRef struct { + // A globally unique identifier for this server. Must be a positive number. + ServerId int64 `protobuf:"varint,5,opt,name=server_id,json=serverId,proto3" json:"server_id,omitempty"` + // An optional name associated with the server. + Name string `protobuf:"bytes,6,opt,name=name,proto3" json:"name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ServerRef) Reset() { *m = ServerRef{} } +func (m *ServerRef) String() string { return proto.CompactTextString(m) } +func (*ServerRef) ProtoMessage() {} +func (*ServerRef) Descriptor() ([]byte, []int) { + return fileDescriptor_channelz_449295370a82a4c0, []int{9} +} +func (m *ServerRef) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ServerRef.Unmarshal(m, b) +} +func (m *ServerRef) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ServerRef.Marshal(b, m, deterministic) +} +func (dst *ServerRef) XXX_Merge(src proto.Message) { + xxx_messageInfo_ServerRef.Merge(dst, src) +} +func (m *ServerRef) XXX_Size() int { + return xxx_messageInfo_ServerRef.Size(m) +} +func (m *ServerRef) XXX_DiscardUnknown() { + xxx_messageInfo_ServerRef.DiscardUnknown(m) +} + +var xxx_messageInfo_ServerRef proto.InternalMessageInfo + +func (m *ServerRef) GetServerId() int64 { + if m != nil { + return m.ServerId + } + return 0 +} + +func (m *ServerRef) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +// Server represents a single server. There may be multiple servers in a single +// program. +type Server struct { + // The identifier for a Server. This should be set. + Ref *ServerRef `protobuf:"bytes,1,opt,name=ref,proto3" json:"ref,omitempty"` + // The associated data of the Server. + Data *ServerData `protobuf:"bytes,2,opt,name=data,proto3" json:"data,omitempty"` + // The sockets that the server is listening on. There are no ordering + // guarantees. This may be absent. + ListenSocket []*SocketRef `protobuf:"bytes,3,rep,name=listen_socket,json=listenSocket,proto3" json:"listen_socket,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Server) Reset() { *m = Server{} } +func (m *Server) String() string { return proto.CompactTextString(m) } +func (*Server) ProtoMessage() {} +func (*Server) Descriptor() ([]byte, []int) { + return fileDescriptor_channelz_449295370a82a4c0, []int{10} +} +func (m *Server) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Server.Unmarshal(m, b) +} +func (m *Server) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Server.Marshal(b, m, deterministic) +} +func (dst *Server) XXX_Merge(src proto.Message) { + xxx_messageInfo_Server.Merge(dst, src) +} +func (m *Server) XXX_Size() int { + return xxx_messageInfo_Server.Size(m) +} +func (m *Server) XXX_DiscardUnknown() { + xxx_messageInfo_Server.DiscardUnknown(m) +} + +var xxx_messageInfo_Server proto.InternalMessageInfo + +func (m *Server) GetRef() *ServerRef { + if m != nil { + return m.Ref + } + return nil +} + +func (m *Server) GetData() *ServerData { + if m != nil { + return m.Data + } + return nil +} + +func (m *Server) GetListenSocket() []*SocketRef { + if m != nil { + return m.ListenSocket + } + return nil +} + +// ServerData is data for a specific Server. +type ServerData struct { + // A trace of recent events on the server. May be absent. + Trace *ChannelTrace `protobuf:"bytes,1,opt,name=trace,proto3" json:"trace,omitempty"` + // The number of incoming calls started on the server + CallsStarted int64 `protobuf:"varint,2,opt,name=calls_started,json=callsStarted,proto3" json:"calls_started,omitempty"` + // The number of incoming calls that have completed with an OK status + CallsSucceeded int64 `protobuf:"varint,3,opt,name=calls_succeeded,json=callsSucceeded,proto3" json:"calls_succeeded,omitempty"` + // The number of incoming calls that have a completed with a non-OK status + CallsFailed int64 `protobuf:"varint,4,opt,name=calls_failed,json=callsFailed,proto3" json:"calls_failed,omitempty"` + // The last time a call was started on the server. + LastCallStartedTimestamp *timestamp.Timestamp `protobuf:"bytes,5,opt,name=last_call_started_timestamp,json=lastCallStartedTimestamp,proto3" json:"last_call_started_timestamp,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ServerData) Reset() { *m = ServerData{} } +func (m *ServerData) String() string { return proto.CompactTextString(m) } +func (*ServerData) ProtoMessage() {} +func (*ServerData) Descriptor() ([]byte, []int) { + return fileDescriptor_channelz_449295370a82a4c0, []int{11} +} +func (m *ServerData) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ServerData.Unmarshal(m, b) +} +func (m *ServerData) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ServerData.Marshal(b, m, deterministic) +} +func (dst *ServerData) XXX_Merge(src proto.Message) { + xxx_messageInfo_ServerData.Merge(dst, src) +} +func (m *ServerData) XXX_Size() int { + return xxx_messageInfo_ServerData.Size(m) +} +func (m *ServerData) XXX_DiscardUnknown() { + xxx_messageInfo_ServerData.DiscardUnknown(m) +} + +var xxx_messageInfo_ServerData proto.InternalMessageInfo + +func (m *ServerData) GetTrace() *ChannelTrace { + if m != nil { + return m.Trace + } + return nil +} + +func (m *ServerData) GetCallsStarted() int64 { + if m != nil { + return m.CallsStarted + } + return 0 +} + +func (m *ServerData) GetCallsSucceeded() int64 { + if m != nil { + return m.CallsSucceeded + } + return 0 +} + +func (m *ServerData) GetCallsFailed() int64 { + if m != nil { + return m.CallsFailed + } + return 0 +} + +func (m *ServerData) GetLastCallStartedTimestamp() *timestamp.Timestamp { + if m != nil { + return m.LastCallStartedTimestamp + } + return nil +} + +// Information about an actual connection. Pronounced "sock-ay". +type Socket struct { + // The identifier for the Socket. + Ref *SocketRef `protobuf:"bytes,1,opt,name=ref,proto3" json:"ref,omitempty"` + // Data specific to this Socket. + Data *SocketData `protobuf:"bytes,2,opt,name=data,proto3" json:"data,omitempty"` + // The locally bound address. + Local *Address `protobuf:"bytes,3,opt,name=local,proto3" json:"local,omitempty"` + // The remote bound address. May be absent. + Remote *Address `protobuf:"bytes,4,opt,name=remote,proto3" json:"remote,omitempty"` + // Security details for this socket. May be absent if not available, or + // there is no security on the socket. + Security *Security `protobuf:"bytes,5,opt,name=security,proto3" json:"security,omitempty"` + // Optional, represents the name of the remote endpoint, if different than + // the original target name. + RemoteName string `protobuf:"bytes,6,opt,name=remote_name,json=remoteName,proto3" json:"remote_name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Socket) Reset() { *m = Socket{} } +func (m *Socket) String() string { return proto.CompactTextString(m) } +func (*Socket) ProtoMessage() {} +func (*Socket) Descriptor() ([]byte, []int) { + return fileDescriptor_channelz_449295370a82a4c0, []int{12} +} +func (m *Socket) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Socket.Unmarshal(m, b) +} +func (m *Socket) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Socket.Marshal(b, m, deterministic) +} +func (dst *Socket) XXX_Merge(src proto.Message) { + xxx_messageInfo_Socket.Merge(dst, src) +} +func (m *Socket) XXX_Size() int { + return xxx_messageInfo_Socket.Size(m) +} +func (m *Socket) XXX_DiscardUnknown() { + xxx_messageInfo_Socket.DiscardUnknown(m) +} + +var xxx_messageInfo_Socket proto.InternalMessageInfo + +func (m *Socket) GetRef() *SocketRef { + if m != nil { + return m.Ref + } + return nil +} + +func (m *Socket) GetData() *SocketData { + if m != nil { + return m.Data + } + return nil +} + +func (m *Socket) GetLocal() *Address { + if m != nil { + return m.Local + } + return nil +} + +func (m *Socket) GetRemote() *Address { + if m != nil { + return m.Remote + } + return nil +} + +func (m *Socket) GetSecurity() *Security { + if m != nil { + return m.Security + } + return nil +} + +func (m *Socket) GetRemoteName() string { + if m != nil { + return m.RemoteName + } + return "" +} + +// SocketData is data associated for a specific Socket. The fields present +// are specific to the implementation, so there may be minor differences in +// the semantics. (e.g. flow control windows) +type SocketData struct { + // The number of streams that have been started. + StreamsStarted int64 `protobuf:"varint,1,opt,name=streams_started,json=streamsStarted,proto3" json:"streams_started,omitempty"` + // The number of streams that have ended successfully: + // On client side, received frame with eos bit set; + // On server side, sent frame with eos bit set. + StreamsSucceeded int64 `protobuf:"varint,2,opt,name=streams_succeeded,json=streamsSucceeded,proto3" json:"streams_succeeded,omitempty"` + // The number of streams that have ended unsuccessfully: + // On client side, ended without receiving frame with eos bit set; + // On server side, ended without sending frame with eos bit set. + StreamsFailed int64 `protobuf:"varint,3,opt,name=streams_failed,json=streamsFailed,proto3" json:"streams_failed,omitempty"` + // The number of grpc messages successfully sent on this socket. + MessagesSent int64 `protobuf:"varint,4,opt,name=messages_sent,json=messagesSent,proto3" json:"messages_sent,omitempty"` + // The number of grpc messages received on this socket. + MessagesReceived int64 `protobuf:"varint,5,opt,name=messages_received,json=messagesReceived,proto3" json:"messages_received,omitempty"` + // The number of keep alives sent. This is typically implemented with HTTP/2 + // ping messages. + KeepAlivesSent int64 `protobuf:"varint,6,opt,name=keep_alives_sent,json=keepAlivesSent,proto3" json:"keep_alives_sent,omitempty"` + // The last time a stream was created by this endpoint. Usually unset for + // servers. + LastLocalStreamCreatedTimestamp *timestamp.Timestamp `protobuf:"bytes,7,opt,name=last_local_stream_created_timestamp,json=lastLocalStreamCreatedTimestamp,proto3" json:"last_local_stream_created_timestamp,omitempty"` + // The last time a stream was created by the remote endpoint. Usually unset + // for clients. + LastRemoteStreamCreatedTimestamp *timestamp.Timestamp `protobuf:"bytes,8,opt,name=last_remote_stream_created_timestamp,json=lastRemoteStreamCreatedTimestamp,proto3" json:"last_remote_stream_created_timestamp,omitempty"` + // The last time a message was sent by this endpoint. + LastMessageSentTimestamp *timestamp.Timestamp `protobuf:"bytes,9,opt,name=last_message_sent_timestamp,json=lastMessageSentTimestamp,proto3" json:"last_message_sent_timestamp,omitempty"` + // The last time a message was received by this endpoint. + LastMessageReceivedTimestamp *timestamp.Timestamp `protobuf:"bytes,10,opt,name=last_message_received_timestamp,json=lastMessageReceivedTimestamp,proto3" json:"last_message_received_timestamp,omitempty"` + // The amount of window, granted to the local endpoint by the remote endpoint. + // This may be slightly out of date due to network latency. This does NOT + // include stream level or TCP level flow control info. + LocalFlowControlWindow *wrappers.Int64Value `protobuf:"bytes,11,opt,name=local_flow_control_window,json=localFlowControlWindow,proto3" json:"local_flow_control_window,omitempty"` + // The amount of window, granted to the remote endpoint by the local endpoint. + // This may be slightly out of date due to network latency. This does NOT + // include stream level or TCP level flow control info. + RemoteFlowControlWindow *wrappers.Int64Value `protobuf:"bytes,12,opt,name=remote_flow_control_window,json=remoteFlowControlWindow,proto3" json:"remote_flow_control_window,omitempty"` + // Socket options set on this socket. May be absent if 'summary' is set + // on GetSocketRequest. + Option []*SocketOption `protobuf:"bytes,13,rep,name=option,proto3" json:"option,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *SocketData) Reset() { *m = SocketData{} } +func (m *SocketData) String() string { return proto.CompactTextString(m) } +func (*SocketData) ProtoMessage() {} +func (*SocketData) Descriptor() ([]byte, []int) { + return fileDescriptor_channelz_449295370a82a4c0, []int{13} +} +func (m *SocketData) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_SocketData.Unmarshal(m, b) +} +func (m *SocketData) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_SocketData.Marshal(b, m, deterministic) +} +func (dst *SocketData) XXX_Merge(src proto.Message) { + xxx_messageInfo_SocketData.Merge(dst, src) +} +func (m *SocketData) XXX_Size() int { + return xxx_messageInfo_SocketData.Size(m) +} +func (m *SocketData) XXX_DiscardUnknown() { + xxx_messageInfo_SocketData.DiscardUnknown(m) +} + +var xxx_messageInfo_SocketData proto.InternalMessageInfo + +func (m *SocketData) GetStreamsStarted() int64 { + if m != nil { + return m.StreamsStarted + } + return 0 +} + +func (m *SocketData) GetStreamsSucceeded() int64 { + if m != nil { + return m.StreamsSucceeded + } + return 0 +} + +func (m *SocketData) GetStreamsFailed() int64 { + if m != nil { + return m.StreamsFailed + } + return 0 +} + +func (m *SocketData) GetMessagesSent() int64 { + if m != nil { + return m.MessagesSent + } + return 0 +} + +func (m *SocketData) GetMessagesReceived() int64 { + if m != nil { + return m.MessagesReceived + } + return 0 +} + +func (m *SocketData) GetKeepAlivesSent() int64 { + if m != nil { + return m.KeepAlivesSent + } + return 0 +} + +func (m *SocketData) GetLastLocalStreamCreatedTimestamp() *timestamp.Timestamp { + if m != nil { + return m.LastLocalStreamCreatedTimestamp + } + return nil +} + +func (m *SocketData) GetLastRemoteStreamCreatedTimestamp() *timestamp.Timestamp { + if m != nil { + return m.LastRemoteStreamCreatedTimestamp + } + return nil +} + +func (m *SocketData) GetLastMessageSentTimestamp() *timestamp.Timestamp { + if m != nil { + return m.LastMessageSentTimestamp + } + return nil +} + +func (m *SocketData) GetLastMessageReceivedTimestamp() *timestamp.Timestamp { + if m != nil { + return m.LastMessageReceivedTimestamp + } + return nil +} + +func (m *SocketData) GetLocalFlowControlWindow() *wrappers.Int64Value { + if m != nil { + return m.LocalFlowControlWindow + } + return nil +} + +func (m *SocketData) GetRemoteFlowControlWindow() *wrappers.Int64Value { + if m != nil { + return m.RemoteFlowControlWindow + } + return nil +} + +func (m *SocketData) GetOption() []*SocketOption { + if m != nil { + return m.Option + } + return nil +} + +// Address represents the address used to create the socket. +type Address struct { + // Types that are valid to be assigned to Address: + // *Address_TcpipAddress + // *Address_UdsAddress_ + // *Address_OtherAddress_ + Address isAddress_Address `protobuf_oneof:"address"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Address) Reset() { *m = Address{} } +func (m *Address) String() string { return proto.CompactTextString(m) } +func (*Address) ProtoMessage() {} +func (*Address) Descriptor() ([]byte, []int) { + return fileDescriptor_channelz_449295370a82a4c0, []int{14} +} +func (m *Address) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Address.Unmarshal(m, b) +} +func (m *Address) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Address.Marshal(b, m, deterministic) +} +func (dst *Address) XXX_Merge(src proto.Message) { + xxx_messageInfo_Address.Merge(dst, src) +} +func (m *Address) XXX_Size() int { + return xxx_messageInfo_Address.Size(m) +} +func (m *Address) XXX_DiscardUnknown() { + xxx_messageInfo_Address.DiscardUnknown(m) +} + +var xxx_messageInfo_Address proto.InternalMessageInfo + +type isAddress_Address interface { + isAddress_Address() +} + +type Address_TcpipAddress struct { + TcpipAddress *Address_TcpIpAddress `protobuf:"bytes,1,opt,name=tcpip_address,json=tcpipAddress,proto3,oneof"` +} + +type Address_UdsAddress_ struct { + UdsAddress *Address_UdsAddress `protobuf:"bytes,2,opt,name=uds_address,json=udsAddress,proto3,oneof"` +} + +type Address_OtherAddress_ struct { + OtherAddress *Address_OtherAddress `protobuf:"bytes,3,opt,name=other_address,json=otherAddress,proto3,oneof"` +} + +func (*Address_TcpipAddress) isAddress_Address() {} + +func (*Address_UdsAddress_) isAddress_Address() {} + +func (*Address_OtherAddress_) isAddress_Address() {} + +func (m *Address) GetAddress() isAddress_Address { + if m != nil { + return m.Address + } + return nil +} + +func (m *Address) GetTcpipAddress() *Address_TcpIpAddress { + if x, ok := m.GetAddress().(*Address_TcpipAddress); ok { + return x.TcpipAddress + } + return nil +} + +func (m *Address) GetUdsAddress() *Address_UdsAddress { + if x, ok := m.GetAddress().(*Address_UdsAddress_); ok { + return x.UdsAddress + } + return nil +} + +func (m *Address) GetOtherAddress() *Address_OtherAddress { + if x, ok := m.GetAddress().(*Address_OtherAddress_); ok { + return x.OtherAddress + } + return nil +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*Address) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _Address_OneofMarshaler, _Address_OneofUnmarshaler, _Address_OneofSizer, []interface{}{ + (*Address_TcpipAddress)(nil), + (*Address_UdsAddress_)(nil), + (*Address_OtherAddress_)(nil), + } +} + +func _Address_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*Address) + // address + switch x := m.Address.(type) { + case *Address_TcpipAddress: + b.EncodeVarint(1<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.TcpipAddress); err != nil { + return err + } + case *Address_UdsAddress_: + b.EncodeVarint(2<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.UdsAddress); err != nil { + return err + } + case *Address_OtherAddress_: + b.EncodeVarint(3<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.OtherAddress); err != nil { + return err + } + case nil: + default: + return fmt.Errorf("Address.Address has unexpected type %T", x) + } + return nil +} + +func _Address_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*Address) + switch tag { + case 1: // address.tcpip_address + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(Address_TcpIpAddress) + err := b.DecodeMessage(msg) + m.Address = &Address_TcpipAddress{msg} + return true, err + case 2: // address.uds_address + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(Address_UdsAddress) + err := b.DecodeMessage(msg) + m.Address = &Address_UdsAddress_{msg} + return true, err + case 3: // address.other_address + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(Address_OtherAddress) + err := b.DecodeMessage(msg) + m.Address = &Address_OtherAddress_{msg} + return true, err + default: + return false, nil + } +} + +func _Address_OneofSizer(msg proto.Message) (n int) { + m := msg.(*Address) + // address + switch x := m.Address.(type) { + case *Address_TcpipAddress: + s := proto.Size(x.TcpipAddress) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *Address_UdsAddress_: + s := proto.Size(x.UdsAddress) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *Address_OtherAddress_: + s := proto.Size(x.OtherAddress) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +type Address_TcpIpAddress struct { + // Either the IPv4 or IPv6 address in bytes. Will be either 4 bytes or 16 + // bytes in length. + IpAddress []byte `protobuf:"bytes,1,opt,name=ip_address,json=ipAddress,proto3" json:"ip_address,omitempty"` + // 0-64k, or -1 if not appropriate. + Port int32 `protobuf:"varint,2,opt,name=port,proto3" json:"port,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Address_TcpIpAddress) Reset() { *m = Address_TcpIpAddress{} } +func (m *Address_TcpIpAddress) String() string { return proto.CompactTextString(m) } +func (*Address_TcpIpAddress) ProtoMessage() {} +func (*Address_TcpIpAddress) Descriptor() ([]byte, []int) { + return fileDescriptor_channelz_449295370a82a4c0, []int{14, 0} +} +func (m *Address_TcpIpAddress) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Address_TcpIpAddress.Unmarshal(m, b) +} +func (m *Address_TcpIpAddress) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Address_TcpIpAddress.Marshal(b, m, deterministic) +} +func (dst *Address_TcpIpAddress) XXX_Merge(src proto.Message) { + xxx_messageInfo_Address_TcpIpAddress.Merge(dst, src) +} +func (m *Address_TcpIpAddress) XXX_Size() int { + return xxx_messageInfo_Address_TcpIpAddress.Size(m) +} +func (m *Address_TcpIpAddress) XXX_DiscardUnknown() { + xxx_messageInfo_Address_TcpIpAddress.DiscardUnknown(m) +} + +var xxx_messageInfo_Address_TcpIpAddress proto.InternalMessageInfo + +func (m *Address_TcpIpAddress) GetIpAddress() []byte { + if m != nil { + return m.IpAddress + } + return nil +} + +func (m *Address_TcpIpAddress) GetPort() int32 { + if m != nil { + return m.Port + } + return 0 +} + +// A Unix Domain Socket address. +type Address_UdsAddress struct { + Filename string `protobuf:"bytes,1,opt,name=filename,proto3" json:"filename,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Address_UdsAddress) Reset() { *m = Address_UdsAddress{} } +func (m *Address_UdsAddress) String() string { return proto.CompactTextString(m) } +func (*Address_UdsAddress) ProtoMessage() {} +func (*Address_UdsAddress) Descriptor() ([]byte, []int) { + return fileDescriptor_channelz_449295370a82a4c0, []int{14, 1} +} +func (m *Address_UdsAddress) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Address_UdsAddress.Unmarshal(m, b) +} +func (m *Address_UdsAddress) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Address_UdsAddress.Marshal(b, m, deterministic) +} +func (dst *Address_UdsAddress) XXX_Merge(src proto.Message) { + xxx_messageInfo_Address_UdsAddress.Merge(dst, src) +} +func (m *Address_UdsAddress) XXX_Size() int { + return xxx_messageInfo_Address_UdsAddress.Size(m) +} +func (m *Address_UdsAddress) XXX_DiscardUnknown() { + xxx_messageInfo_Address_UdsAddress.DiscardUnknown(m) +} + +var xxx_messageInfo_Address_UdsAddress proto.InternalMessageInfo + +func (m *Address_UdsAddress) GetFilename() string { + if m != nil { + return m.Filename + } + return "" +} + +// An address type not included above. +type Address_OtherAddress struct { + // The human readable version of the value. This value should be set. + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // The actual address message. + Value *any.Any `protobuf:"bytes,2,opt,name=value,proto3" json:"value,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Address_OtherAddress) Reset() { *m = Address_OtherAddress{} } +func (m *Address_OtherAddress) String() string { return proto.CompactTextString(m) } +func (*Address_OtherAddress) ProtoMessage() {} +func (*Address_OtherAddress) Descriptor() ([]byte, []int) { + return fileDescriptor_channelz_449295370a82a4c0, []int{14, 2} +} +func (m *Address_OtherAddress) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Address_OtherAddress.Unmarshal(m, b) +} +func (m *Address_OtherAddress) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Address_OtherAddress.Marshal(b, m, deterministic) +} +func (dst *Address_OtherAddress) XXX_Merge(src proto.Message) { + xxx_messageInfo_Address_OtherAddress.Merge(dst, src) +} +func (m *Address_OtherAddress) XXX_Size() int { + return xxx_messageInfo_Address_OtherAddress.Size(m) +} +func (m *Address_OtherAddress) XXX_DiscardUnknown() { + xxx_messageInfo_Address_OtherAddress.DiscardUnknown(m) +} + +var xxx_messageInfo_Address_OtherAddress proto.InternalMessageInfo + +func (m *Address_OtherAddress) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *Address_OtherAddress) GetValue() *any.Any { + if m != nil { + return m.Value + } + return nil +} + +// Security represents details about how secure the socket is. +type Security struct { + // Types that are valid to be assigned to Model: + // *Security_Tls_ + // *Security_Other + Model isSecurity_Model `protobuf_oneof:"model"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Security) Reset() { *m = Security{} } +func (m *Security) String() string { return proto.CompactTextString(m) } +func (*Security) ProtoMessage() {} +func (*Security) Descriptor() ([]byte, []int) { + return fileDescriptor_channelz_449295370a82a4c0, []int{15} +} +func (m *Security) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Security.Unmarshal(m, b) +} +func (m *Security) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Security.Marshal(b, m, deterministic) +} +func (dst *Security) XXX_Merge(src proto.Message) { + xxx_messageInfo_Security.Merge(dst, src) +} +func (m *Security) XXX_Size() int { + return xxx_messageInfo_Security.Size(m) +} +func (m *Security) XXX_DiscardUnknown() { + xxx_messageInfo_Security.DiscardUnknown(m) +} + +var xxx_messageInfo_Security proto.InternalMessageInfo + +type isSecurity_Model interface { + isSecurity_Model() +} + +type Security_Tls_ struct { + Tls *Security_Tls `protobuf:"bytes,1,opt,name=tls,proto3,oneof"` +} + +type Security_Other struct { + Other *Security_OtherSecurity `protobuf:"bytes,2,opt,name=other,proto3,oneof"` +} + +func (*Security_Tls_) isSecurity_Model() {} + +func (*Security_Other) isSecurity_Model() {} + +func (m *Security) GetModel() isSecurity_Model { + if m != nil { + return m.Model + } + return nil +} + +func (m *Security) GetTls() *Security_Tls { + if x, ok := m.GetModel().(*Security_Tls_); ok { + return x.Tls + } + return nil +} + +func (m *Security) GetOther() *Security_OtherSecurity { + if x, ok := m.GetModel().(*Security_Other); ok { + return x.Other + } + return nil +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*Security) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _Security_OneofMarshaler, _Security_OneofUnmarshaler, _Security_OneofSizer, []interface{}{ + (*Security_Tls_)(nil), + (*Security_Other)(nil), + } +} + +func _Security_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*Security) + // model + switch x := m.Model.(type) { + case *Security_Tls_: + b.EncodeVarint(1<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.Tls); err != nil { + return err + } + case *Security_Other: + b.EncodeVarint(2<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.Other); err != nil { + return err + } + case nil: + default: + return fmt.Errorf("Security.Model has unexpected type %T", x) + } + return nil +} + +func _Security_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*Security) + switch tag { + case 1: // model.tls + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(Security_Tls) + err := b.DecodeMessage(msg) + m.Model = &Security_Tls_{msg} + return true, err + case 2: // model.other + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(Security_OtherSecurity) + err := b.DecodeMessage(msg) + m.Model = &Security_Other{msg} + return true, err + default: + return false, nil + } +} + +func _Security_OneofSizer(msg proto.Message) (n int) { + m := msg.(*Security) + // model + switch x := m.Model.(type) { + case *Security_Tls_: + s := proto.Size(x.Tls) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *Security_Other: + s := proto.Size(x.Other) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +type Security_Tls struct { + // Types that are valid to be assigned to CipherSuite: + // *Security_Tls_StandardName + // *Security_Tls_OtherName + CipherSuite isSecurity_Tls_CipherSuite `protobuf_oneof:"cipher_suite"` + // the certificate used by this endpoint. + LocalCertificate []byte `protobuf:"bytes,3,opt,name=local_certificate,json=localCertificate,proto3" json:"local_certificate,omitempty"` + // the certificate used by the remote endpoint. + RemoteCertificate []byte `protobuf:"bytes,4,opt,name=remote_certificate,json=remoteCertificate,proto3" json:"remote_certificate,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Security_Tls) Reset() { *m = Security_Tls{} } +func (m *Security_Tls) String() string { return proto.CompactTextString(m) } +func (*Security_Tls) ProtoMessage() {} +func (*Security_Tls) Descriptor() ([]byte, []int) { + return fileDescriptor_channelz_449295370a82a4c0, []int{15, 0} +} +func (m *Security_Tls) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Security_Tls.Unmarshal(m, b) +} +func (m *Security_Tls) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Security_Tls.Marshal(b, m, deterministic) +} +func (dst *Security_Tls) XXX_Merge(src proto.Message) { + xxx_messageInfo_Security_Tls.Merge(dst, src) +} +func (m *Security_Tls) XXX_Size() int { + return xxx_messageInfo_Security_Tls.Size(m) +} +func (m *Security_Tls) XXX_DiscardUnknown() { + xxx_messageInfo_Security_Tls.DiscardUnknown(m) +} + +var xxx_messageInfo_Security_Tls proto.InternalMessageInfo + +type isSecurity_Tls_CipherSuite interface { + isSecurity_Tls_CipherSuite() +} + +type Security_Tls_StandardName struct { + StandardName string `protobuf:"bytes,1,opt,name=standard_name,json=standardName,proto3,oneof"` +} + +type Security_Tls_OtherName struct { + OtherName string `protobuf:"bytes,2,opt,name=other_name,json=otherName,proto3,oneof"` +} + +func (*Security_Tls_StandardName) isSecurity_Tls_CipherSuite() {} + +func (*Security_Tls_OtherName) isSecurity_Tls_CipherSuite() {} + +func (m *Security_Tls) GetCipherSuite() isSecurity_Tls_CipherSuite { + if m != nil { + return m.CipherSuite + } + return nil +} + +func (m *Security_Tls) GetStandardName() string { + if x, ok := m.GetCipherSuite().(*Security_Tls_StandardName); ok { + return x.StandardName + } + return "" +} + +func (m *Security_Tls) GetOtherName() string { + if x, ok := m.GetCipherSuite().(*Security_Tls_OtherName); ok { + return x.OtherName + } + return "" +} + +func (m *Security_Tls) GetLocalCertificate() []byte { + if m != nil { + return m.LocalCertificate + } + return nil +} + +func (m *Security_Tls) GetRemoteCertificate() []byte { + if m != nil { + return m.RemoteCertificate + } + return nil +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*Security_Tls) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _Security_Tls_OneofMarshaler, _Security_Tls_OneofUnmarshaler, _Security_Tls_OneofSizer, []interface{}{ + (*Security_Tls_StandardName)(nil), + (*Security_Tls_OtherName)(nil), + } +} + +func _Security_Tls_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*Security_Tls) + // cipher_suite + switch x := m.CipherSuite.(type) { + case *Security_Tls_StandardName: + b.EncodeVarint(1<<3 | proto.WireBytes) + b.EncodeStringBytes(x.StandardName) + case *Security_Tls_OtherName: + b.EncodeVarint(2<<3 | proto.WireBytes) + b.EncodeStringBytes(x.OtherName) + case nil: + default: + return fmt.Errorf("Security_Tls.CipherSuite has unexpected type %T", x) + } + return nil +} + +func _Security_Tls_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*Security_Tls) + switch tag { + case 1: // cipher_suite.standard_name + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeStringBytes() + m.CipherSuite = &Security_Tls_StandardName{x} + return true, err + case 2: // cipher_suite.other_name + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeStringBytes() + m.CipherSuite = &Security_Tls_OtherName{x} + return true, err + default: + return false, nil + } +} + +func _Security_Tls_OneofSizer(msg proto.Message) (n int) { + m := msg.(*Security_Tls) + // cipher_suite + switch x := m.CipherSuite.(type) { + case *Security_Tls_StandardName: + n += 1 // tag and wire + n += proto.SizeVarint(uint64(len(x.StandardName))) + n += len(x.StandardName) + case *Security_Tls_OtherName: + n += 1 // tag and wire + n += proto.SizeVarint(uint64(len(x.OtherName))) + n += len(x.OtherName) + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +type Security_OtherSecurity struct { + // The human readable version of the value. + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // The actual security details message. + Value *any.Any `protobuf:"bytes,2,opt,name=value,proto3" json:"value,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Security_OtherSecurity) Reset() { *m = Security_OtherSecurity{} } +func (m *Security_OtherSecurity) String() string { return proto.CompactTextString(m) } +func (*Security_OtherSecurity) ProtoMessage() {} +func (*Security_OtherSecurity) Descriptor() ([]byte, []int) { + return fileDescriptor_channelz_449295370a82a4c0, []int{15, 1} +} +func (m *Security_OtherSecurity) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Security_OtherSecurity.Unmarshal(m, b) +} +func (m *Security_OtherSecurity) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Security_OtherSecurity.Marshal(b, m, deterministic) +} +func (dst *Security_OtherSecurity) XXX_Merge(src proto.Message) { + xxx_messageInfo_Security_OtherSecurity.Merge(dst, src) +} +func (m *Security_OtherSecurity) XXX_Size() int { + return xxx_messageInfo_Security_OtherSecurity.Size(m) +} +func (m *Security_OtherSecurity) XXX_DiscardUnknown() { + xxx_messageInfo_Security_OtherSecurity.DiscardUnknown(m) +} + +var xxx_messageInfo_Security_OtherSecurity proto.InternalMessageInfo + +func (m *Security_OtherSecurity) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *Security_OtherSecurity) GetValue() *any.Any { + if m != nil { + return m.Value + } + return nil +} + +// SocketOption represents socket options for a socket. Specifically, these +// are the options returned by getsockopt(). +type SocketOption struct { + // The full name of the socket option. Typically this will be the upper case + // name, such as "SO_REUSEPORT". + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // The human readable value of this socket option. At least one of value or + // additional will be set. + Value string `protobuf:"bytes,2,opt,name=value,proto3" json:"value,omitempty"` + // Additional data associated with the socket option. At least one of value + // or additional will be set. + Additional *any.Any `protobuf:"bytes,3,opt,name=additional,proto3" json:"additional,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *SocketOption) Reset() { *m = SocketOption{} } +func (m *SocketOption) String() string { return proto.CompactTextString(m) } +func (*SocketOption) ProtoMessage() {} +func (*SocketOption) Descriptor() ([]byte, []int) { + return fileDescriptor_channelz_449295370a82a4c0, []int{16} +} +func (m *SocketOption) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_SocketOption.Unmarshal(m, b) +} +func (m *SocketOption) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_SocketOption.Marshal(b, m, deterministic) +} +func (dst *SocketOption) XXX_Merge(src proto.Message) { + xxx_messageInfo_SocketOption.Merge(dst, src) +} +func (m *SocketOption) XXX_Size() int { + return xxx_messageInfo_SocketOption.Size(m) +} +func (m *SocketOption) XXX_DiscardUnknown() { + xxx_messageInfo_SocketOption.DiscardUnknown(m) +} + +var xxx_messageInfo_SocketOption proto.InternalMessageInfo + +func (m *SocketOption) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *SocketOption) GetValue() string { + if m != nil { + return m.Value + } + return "" +} + +func (m *SocketOption) GetAdditional() *any.Any { + if m != nil { + return m.Additional + } + return nil +} + +// For use with SocketOption's additional field. This is primarily used for +// SO_RCVTIMEO and SO_SNDTIMEO +type SocketOptionTimeout struct { + Duration *duration.Duration `protobuf:"bytes,1,opt,name=duration,proto3" json:"duration,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *SocketOptionTimeout) Reset() { *m = SocketOptionTimeout{} } +func (m *SocketOptionTimeout) String() string { return proto.CompactTextString(m) } +func (*SocketOptionTimeout) ProtoMessage() {} +func (*SocketOptionTimeout) Descriptor() ([]byte, []int) { + return fileDescriptor_channelz_449295370a82a4c0, []int{17} +} +func (m *SocketOptionTimeout) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_SocketOptionTimeout.Unmarshal(m, b) +} +func (m *SocketOptionTimeout) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_SocketOptionTimeout.Marshal(b, m, deterministic) +} +func (dst *SocketOptionTimeout) XXX_Merge(src proto.Message) { + xxx_messageInfo_SocketOptionTimeout.Merge(dst, src) +} +func (m *SocketOptionTimeout) XXX_Size() int { + return xxx_messageInfo_SocketOptionTimeout.Size(m) +} +func (m *SocketOptionTimeout) XXX_DiscardUnknown() { + xxx_messageInfo_SocketOptionTimeout.DiscardUnknown(m) +} + +var xxx_messageInfo_SocketOptionTimeout proto.InternalMessageInfo + +func (m *SocketOptionTimeout) GetDuration() *duration.Duration { + if m != nil { + return m.Duration + } + return nil +} + +// For use with SocketOption's additional field. This is primarily used for +// SO_LINGER. +type SocketOptionLinger struct { + // active maps to `struct linger.l_onoff` + Active bool `protobuf:"varint,1,opt,name=active,proto3" json:"active,omitempty"` + // duration maps to `struct linger.l_linger` + Duration *duration.Duration `protobuf:"bytes,2,opt,name=duration,proto3" json:"duration,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *SocketOptionLinger) Reset() { *m = SocketOptionLinger{} } +func (m *SocketOptionLinger) String() string { return proto.CompactTextString(m) } +func (*SocketOptionLinger) ProtoMessage() {} +func (*SocketOptionLinger) Descriptor() ([]byte, []int) { + return fileDescriptor_channelz_449295370a82a4c0, []int{18} +} +func (m *SocketOptionLinger) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_SocketOptionLinger.Unmarshal(m, b) +} +func (m *SocketOptionLinger) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_SocketOptionLinger.Marshal(b, m, deterministic) +} +func (dst *SocketOptionLinger) XXX_Merge(src proto.Message) { + xxx_messageInfo_SocketOptionLinger.Merge(dst, src) +} +func (m *SocketOptionLinger) XXX_Size() int { + return xxx_messageInfo_SocketOptionLinger.Size(m) +} +func (m *SocketOptionLinger) XXX_DiscardUnknown() { + xxx_messageInfo_SocketOptionLinger.DiscardUnknown(m) +} + +var xxx_messageInfo_SocketOptionLinger proto.InternalMessageInfo + +func (m *SocketOptionLinger) GetActive() bool { + if m != nil { + return m.Active + } + return false +} + +func (m *SocketOptionLinger) GetDuration() *duration.Duration { + if m != nil { + return m.Duration + } + return nil +} + +// For use with SocketOption's additional field. Tcp info for +// SOL_TCP and TCP_INFO. +type SocketOptionTcpInfo struct { + TcpiState uint32 `protobuf:"varint,1,opt,name=tcpi_state,json=tcpiState,proto3" json:"tcpi_state,omitempty"` + TcpiCaState uint32 `protobuf:"varint,2,opt,name=tcpi_ca_state,json=tcpiCaState,proto3" json:"tcpi_ca_state,omitempty"` + TcpiRetransmits uint32 `protobuf:"varint,3,opt,name=tcpi_retransmits,json=tcpiRetransmits,proto3" json:"tcpi_retransmits,omitempty"` + TcpiProbes uint32 `protobuf:"varint,4,opt,name=tcpi_probes,json=tcpiProbes,proto3" json:"tcpi_probes,omitempty"` + TcpiBackoff uint32 `protobuf:"varint,5,opt,name=tcpi_backoff,json=tcpiBackoff,proto3" json:"tcpi_backoff,omitempty"` + TcpiOptions uint32 `protobuf:"varint,6,opt,name=tcpi_options,json=tcpiOptions,proto3" json:"tcpi_options,omitempty"` + TcpiSndWscale uint32 `protobuf:"varint,7,opt,name=tcpi_snd_wscale,json=tcpiSndWscale,proto3" json:"tcpi_snd_wscale,omitempty"` + TcpiRcvWscale uint32 `protobuf:"varint,8,opt,name=tcpi_rcv_wscale,json=tcpiRcvWscale,proto3" json:"tcpi_rcv_wscale,omitempty"` + TcpiRto uint32 `protobuf:"varint,9,opt,name=tcpi_rto,json=tcpiRto,proto3" json:"tcpi_rto,omitempty"` + TcpiAto uint32 `protobuf:"varint,10,opt,name=tcpi_ato,json=tcpiAto,proto3" json:"tcpi_ato,omitempty"` + TcpiSndMss uint32 `protobuf:"varint,11,opt,name=tcpi_snd_mss,json=tcpiSndMss,proto3" json:"tcpi_snd_mss,omitempty"` + TcpiRcvMss uint32 `protobuf:"varint,12,opt,name=tcpi_rcv_mss,json=tcpiRcvMss,proto3" json:"tcpi_rcv_mss,omitempty"` + TcpiUnacked uint32 `protobuf:"varint,13,opt,name=tcpi_unacked,json=tcpiUnacked,proto3" json:"tcpi_unacked,omitempty"` + TcpiSacked uint32 `protobuf:"varint,14,opt,name=tcpi_sacked,json=tcpiSacked,proto3" json:"tcpi_sacked,omitempty"` + TcpiLost uint32 `protobuf:"varint,15,opt,name=tcpi_lost,json=tcpiLost,proto3" json:"tcpi_lost,omitempty"` + TcpiRetrans uint32 `protobuf:"varint,16,opt,name=tcpi_retrans,json=tcpiRetrans,proto3" json:"tcpi_retrans,omitempty"` + TcpiFackets uint32 `protobuf:"varint,17,opt,name=tcpi_fackets,json=tcpiFackets,proto3" json:"tcpi_fackets,omitempty"` + TcpiLastDataSent uint32 `protobuf:"varint,18,opt,name=tcpi_last_data_sent,json=tcpiLastDataSent,proto3" json:"tcpi_last_data_sent,omitempty"` + TcpiLastAckSent uint32 `protobuf:"varint,19,opt,name=tcpi_last_ack_sent,json=tcpiLastAckSent,proto3" json:"tcpi_last_ack_sent,omitempty"` + TcpiLastDataRecv uint32 `protobuf:"varint,20,opt,name=tcpi_last_data_recv,json=tcpiLastDataRecv,proto3" json:"tcpi_last_data_recv,omitempty"` + TcpiLastAckRecv uint32 `protobuf:"varint,21,opt,name=tcpi_last_ack_recv,json=tcpiLastAckRecv,proto3" json:"tcpi_last_ack_recv,omitempty"` + TcpiPmtu uint32 `protobuf:"varint,22,opt,name=tcpi_pmtu,json=tcpiPmtu,proto3" json:"tcpi_pmtu,omitempty"` + TcpiRcvSsthresh uint32 `protobuf:"varint,23,opt,name=tcpi_rcv_ssthresh,json=tcpiRcvSsthresh,proto3" json:"tcpi_rcv_ssthresh,omitempty"` + TcpiRtt uint32 `protobuf:"varint,24,opt,name=tcpi_rtt,json=tcpiRtt,proto3" json:"tcpi_rtt,omitempty"` + TcpiRttvar uint32 `protobuf:"varint,25,opt,name=tcpi_rttvar,json=tcpiRttvar,proto3" json:"tcpi_rttvar,omitempty"` + TcpiSndSsthresh uint32 `protobuf:"varint,26,opt,name=tcpi_snd_ssthresh,json=tcpiSndSsthresh,proto3" json:"tcpi_snd_ssthresh,omitempty"` + TcpiSndCwnd uint32 `protobuf:"varint,27,opt,name=tcpi_snd_cwnd,json=tcpiSndCwnd,proto3" json:"tcpi_snd_cwnd,omitempty"` + TcpiAdvmss uint32 `protobuf:"varint,28,opt,name=tcpi_advmss,json=tcpiAdvmss,proto3" json:"tcpi_advmss,omitempty"` + TcpiReordering uint32 `protobuf:"varint,29,opt,name=tcpi_reordering,json=tcpiReordering,proto3" json:"tcpi_reordering,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *SocketOptionTcpInfo) Reset() { *m = SocketOptionTcpInfo{} } +func (m *SocketOptionTcpInfo) String() string { return proto.CompactTextString(m) } +func (*SocketOptionTcpInfo) ProtoMessage() {} +func (*SocketOptionTcpInfo) Descriptor() ([]byte, []int) { + return fileDescriptor_channelz_449295370a82a4c0, []int{19} +} +func (m *SocketOptionTcpInfo) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_SocketOptionTcpInfo.Unmarshal(m, b) +} +func (m *SocketOptionTcpInfo) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_SocketOptionTcpInfo.Marshal(b, m, deterministic) +} +func (dst *SocketOptionTcpInfo) XXX_Merge(src proto.Message) { + xxx_messageInfo_SocketOptionTcpInfo.Merge(dst, src) +} +func (m *SocketOptionTcpInfo) XXX_Size() int { + return xxx_messageInfo_SocketOptionTcpInfo.Size(m) +} +func (m *SocketOptionTcpInfo) XXX_DiscardUnknown() { + xxx_messageInfo_SocketOptionTcpInfo.DiscardUnknown(m) +} + +var xxx_messageInfo_SocketOptionTcpInfo proto.InternalMessageInfo + +func (m *SocketOptionTcpInfo) GetTcpiState() uint32 { + if m != nil { + return m.TcpiState + } + return 0 +} + +func (m *SocketOptionTcpInfo) GetTcpiCaState() uint32 { + if m != nil { + return m.TcpiCaState + } + return 0 +} + +func (m *SocketOptionTcpInfo) GetTcpiRetransmits() uint32 { + if m != nil { + return m.TcpiRetransmits + } + return 0 +} + +func (m *SocketOptionTcpInfo) GetTcpiProbes() uint32 { + if m != nil { + return m.TcpiProbes + } + return 0 +} + +func (m *SocketOptionTcpInfo) GetTcpiBackoff() uint32 { + if m != nil { + return m.TcpiBackoff + } + return 0 +} + +func (m *SocketOptionTcpInfo) GetTcpiOptions() uint32 { + if m != nil { + return m.TcpiOptions + } + return 0 +} + +func (m *SocketOptionTcpInfo) GetTcpiSndWscale() uint32 { + if m != nil { + return m.TcpiSndWscale + } + return 0 +} + +func (m *SocketOptionTcpInfo) GetTcpiRcvWscale() uint32 { + if m != nil { + return m.TcpiRcvWscale + } + return 0 +} + +func (m *SocketOptionTcpInfo) GetTcpiRto() uint32 { + if m != nil { + return m.TcpiRto + } + return 0 +} + +func (m *SocketOptionTcpInfo) GetTcpiAto() uint32 { + if m != nil { + return m.TcpiAto + } + return 0 +} + +func (m *SocketOptionTcpInfo) GetTcpiSndMss() uint32 { + if m != nil { + return m.TcpiSndMss + } + return 0 +} + +func (m *SocketOptionTcpInfo) GetTcpiRcvMss() uint32 { + if m != nil { + return m.TcpiRcvMss + } + return 0 +} + +func (m *SocketOptionTcpInfo) GetTcpiUnacked() uint32 { + if m != nil { + return m.TcpiUnacked + } + return 0 +} + +func (m *SocketOptionTcpInfo) GetTcpiSacked() uint32 { + if m != nil { + return m.TcpiSacked + } + return 0 +} + +func (m *SocketOptionTcpInfo) GetTcpiLost() uint32 { + if m != nil { + return m.TcpiLost + } + return 0 +} + +func (m *SocketOptionTcpInfo) GetTcpiRetrans() uint32 { + if m != nil { + return m.TcpiRetrans + } + return 0 +} + +func (m *SocketOptionTcpInfo) GetTcpiFackets() uint32 { + if m != nil { + return m.TcpiFackets + } + return 0 +} + +func (m *SocketOptionTcpInfo) GetTcpiLastDataSent() uint32 { + if m != nil { + return m.TcpiLastDataSent + } + return 0 +} + +func (m *SocketOptionTcpInfo) GetTcpiLastAckSent() uint32 { + if m != nil { + return m.TcpiLastAckSent + } + return 0 +} + +func (m *SocketOptionTcpInfo) GetTcpiLastDataRecv() uint32 { + if m != nil { + return m.TcpiLastDataRecv + } + return 0 +} + +func (m *SocketOptionTcpInfo) GetTcpiLastAckRecv() uint32 { + if m != nil { + return m.TcpiLastAckRecv + } + return 0 +} + +func (m *SocketOptionTcpInfo) GetTcpiPmtu() uint32 { + if m != nil { + return m.TcpiPmtu + } + return 0 +} + +func (m *SocketOptionTcpInfo) GetTcpiRcvSsthresh() uint32 { + if m != nil { + return m.TcpiRcvSsthresh + } + return 0 +} + +func (m *SocketOptionTcpInfo) GetTcpiRtt() uint32 { + if m != nil { + return m.TcpiRtt + } + return 0 +} + +func (m *SocketOptionTcpInfo) GetTcpiRttvar() uint32 { + if m != nil { + return m.TcpiRttvar + } + return 0 +} + +func (m *SocketOptionTcpInfo) GetTcpiSndSsthresh() uint32 { + if m != nil { + return m.TcpiSndSsthresh + } + return 0 +} + +func (m *SocketOptionTcpInfo) GetTcpiSndCwnd() uint32 { + if m != nil { + return m.TcpiSndCwnd + } + return 0 +} + +func (m *SocketOptionTcpInfo) GetTcpiAdvmss() uint32 { + if m != nil { + return m.TcpiAdvmss + } + return 0 +} + +func (m *SocketOptionTcpInfo) GetTcpiReordering() uint32 { + if m != nil { + return m.TcpiReordering + } + return 0 +} + +type GetTopChannelsRequest struct { + // start_channel_id indicates that only channels at or above this id should be + // included in the results. + // To request the first page, this should be set to 0. To request + // subsequent pages, the client generates this value by adding 1 to + // the highest seen result ID. + StartChannelId int64 `protobuf:"varint,1,opt,name=start_channel_id,json=startChannelId,proto3" json:"start_channel_id,omitempty"` + // If non-zero, the server will return a page of results containing + // at most this many items. If zero, the server will choose a + // reasonable page size. Must never be negative. + MaxResults int64 `protobuf:"varint,2,opt,name=max_results,json=maxResults,proto3" json:"max_results,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GetTopChannelsRequest) Reset() { *m = GetTopChannelsRequest{} } +func (m *GetTopChannelsRequest) String() string { return proto.CompactTextString(m) } +func (*GetTopChannelsRequest) ProtoMessage() {} +func (*GetTopChannelsRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_channelz_449295370a82a4c0, []int{20} +} +func (m *GetTopChannelsRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GetTopChannelsRequest.Unmarshal(m, b) +} +func (m *GetTopChannelsRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GetTopChannelsRequest.Marshal(b, m, deterministic) +} +func (dst *GetTopChannelsRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_GetTopChannelsRequest.Merge(dst, src) +} +func (m *GetTopChannelsRequest) XXX_Size() int { + return xxx_messageInfo_GetTopChannelsRequest.Size(m) +} +func (m *GetTopChannelsRequest) XXX_DiscardUnknown() { + xxx_messageInfo_GetTopChannelsRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_GetTopChannelsRequest proto.InternalMessageInfo + +func (m *GetTopChannelsRequest) GetStartChannelId() int64 { + if m != nil { + return m.StartChannelId + } + return 0 +} + +func (m *GetTopChannelsRequest) GetMaxResults() int64 { + if m != nil { + return m.MaxResults + } + return 0 +} + +type GetTopChannelsResponse struct { + // list of channels that the connection detail service knows about. Sorted in + // ascending channel_id order. + // Must contain at least 1 result, otherwise 'end' must be true. + Channel []*Channel `protobuf:"bytes,1,rep,name=channel,proto3" json:"channel,omitempty"` + // If set, indicates that the list of channels is the final list. Requesting + // more channels can only return more if they are created after this RPC + // completes. + End bool `protobuf:"varint,2,opt,name=end,proto3" json:"end,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GetTopChannelsResponse) Reset() { *m = GetTopChannelsResponse{} } +func (m *GetTopChannelsResponse) String() string { return proto.CompactTextString(m) } +func (*GetTopChannelsResponse) ProtoMessage() {} +func (*GetTopChannelsResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_channelz_449295370a82a4c0, []int{21} +} +func (m *GetTopChannelsResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GetTopChannelsResponse.Unmarshal(m, b) +} +func (m *GetTopChannelsResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GetTopChannelsResponse.Marshal(b, m, deterministic) +} +func (dst *GetTopChannelsResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_GetTopChannelsResponse.Merge(dst, src) +} +func (m *GetTopChannelsResponse) XXX_Size() int { + return xxx_messageInfo_GetTopChannelsResponse.Size(m) +} +func (m *GetTopChannelsResponse) XXX_DiscardUnknown() { + xxx_messageInfo_GetTopChannelsResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_GetTopChannelsResponse proto.InternalMessageInfo + +func (m *GetTopChannelsResponse) GetChannel() []*Channel { + if m != nil { + return m.Channel + } + return nil +} + +func (m *GetTopChannelsResponse) GetEnd() bool { + if m != nil { + return m.End + } + return false +} + +type GetServersRequest struct { + // start_server_id indicates that only servers at or above this id should be + // included in the results. + // To request the first page, this must be set to 0. To request + // subsequent pages, the client generates this value by adding 1 to + // the highest seen result ID. + StartServerId int64 `protobuf:"varint,1,opt,name=start_server_id,json=startServerId,proto3" json:"start_server_id,omitempty"` + // If non-zero, the server will return a page of results containing + // at most this many items. If zero, the server will choose a + // reasonable page size. Must never be negative. + MaxResults int64 `protobuf:"varint,2,opt,name=max_results,json=maxResults,proto3" json:"max_results,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GetServersRequest) Reset() { *m = GetServersRequest{} } +func (m *GetServersRequest) String() string { return proto.CompactTextString(m) } +func (*GetServersRequest) ProtoMessage() {} +func (*GetServersRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_channelz_449295370a82a4c0, []int{22} +} +func (m *GetServersRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GetServersRequest.Unmarshal(m, b) +} +func (m *GetServersRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GetServersRequest.Marshal(b, m, deterministic) +} +func (dst *GetServersRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_GetServersRequest.Merge(dst, src) +} +func (m *GetServersRequest) XXX_Size() int { + return xxx_messageInfo_GetServersRequest.Size(m) +} +func (m *GetServersRequest) XXX_DiscardUnknown() { + xxx_messageInfo_GetServersRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_GetServersRequest proto.InternalMessageInfo + +func (m *GetServersRequest) GetStartServerId() int64 { + if m != nil { + return m.StartServerId + } + return 0 +} + +func (m *GetServersRequest) GetMaxResults() int64 { + if m != nil { + return m.MaxResults + } + return 0 +} + +type GetServersResponse struct { + // list of servers that the connection detail service knows about. Sorted in + // ascending server_id order. + // Must contain at least 1 result, otherwise 'end' must be true. + Server []*Server `protobuf:"bytes,1,rep,name=server,proto3" json:"server,omitempty"` + // If set, indicates that the list of servers is the final list. Requesting + // more servers will only return more if they are created after this RPC + // completes. + End bool `protobuf:"varint,2,opt,name=end,proto3" json:"end,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GetServersResponse) Reset() { *m = GetServersResponse{} } +func (m *GetServersResponse) String() string { return proto.CompactTextString(m) } +func (*GetServersResponse) ProtoMessage() {} +func (*GetServersResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_channelz_449295370a82a4c0, []int{23} +} +func (m *GetServersResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GetServersResponse.Unmarshal(m, b) +} +func (m *GetServersResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GetServersResponse.Marshal(b, m, deterministic) +} +func (dst *GetServersResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_GetServersResponse.Merge(dst, src) +} +func (m *GetServersResponse) XXX_Size() int { + return xxx_messageInfo_GetServersResponse.Size(m) +} +func (m *GetServersResponse) XXX_DiscardUnknown() { + xxx_messageInfo_GetServersResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_GetServersResponse proto.InternalMessageInfo + +func (m *GetServersResponse) GetServer() []*Server { + if m != nil { + return m.Server + } + return nil +} + +func (m *GetServersResponse) GetEnd() bool { + if m != nil { + return m.End + } + return false +} + +type GetServerRequest struct { + // server_id is the identifier of the specific server to get. + ServerId int64 `protobuf:"varint,1,opt,name=server_id,json=serverId,proto3" json:"server_id,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GetServerRequest) Reset() { *m = GetServerRequest{} } +func (m *GetServerRequest) String() string { return proto.CompactTextString(m) } +func (*GetServerRequest) ProtoMessage() {} +func (*GetServerRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_channelz_449295370a82a4c0, []int{24} +} +func (m *GetServerRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GetServerRequest.Unmarshal(m, b) +} +func (m *GetServerRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GetServerRequest.Marshal(b, m, deterministic) +} +func (dst *GetServerRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_GetServerRequest.Merge(dst, src) +} +func (m *GetServerRequest) XXX_Size() int { + return xxx_messageInfo_GetServerRequest.Size(m) +} +func (m *GetServerRequest) XXX_DiscardUnknown() { + xxx_messageInfo_GetServerRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_GetServerRequest proto.InternalMessageInfo + +func (m *GetServerRequest) GetServerId() int64 { + if m != nil { + return m.ServerId + } + return 0 +} + +type GetServerResponse struct { + // The Server that corresponds to the requested server_id. This field + // should be set. + Server *Server `protobuf:"bytes,1,opt,name=server,proto3" json:"server,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GetServerResponse) Reset() { *m = GetServerResponse{} } +func (m *GetServerResponse) String() string { return proto.CompactTextString(m) } +func (*GetServerResponse) ProtoMessage() {} +func (*GetServerResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_channelz_449295370a82a4c0, []int{25} +} +func (m *GetServerResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GetServerResponse.Unmarshal(m, b) +} +func (m *GetServerResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GetServerResponse.Marshal(b, m, deterministic) +} +func (dst *GetServerResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_GetServerResponse.Merge(dst, src) +} +func (m *GetServerResponse) XXX_Size() int { + return xxx_messageInfo_GetServerResponse.Size(m) +} +func (m *GetServerResponse) XXX_DiscardUnknown() { + xxx_messageInfo_GetServerResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_GetServerResponse proto.InternalMessageInfo + +func (m *GetServerResponse) GetServer() *Server { + if m != nil { + return m.Server + } + return nil +} + +type GetServerSocketsRequest struct { + ServerId int64 `protobuf:"varint,1,opt,name=server_id,json=serverId,proto3" json:"server_id,omitempty"` + // start_socket_id indicates that only sockets at or above this id should be + // included in the results. + // To request the first page, this must be set to 0. To request + // subsequent pages, the client generates this value by adding 1 to + // the highest seen result ID. + StartSocketId int64 `protobuf:"varint,2,opt,name=start_socket_id,json=startSocketId,proto3" json:"start_socket_id,omitempty"` + // If non-zero, the server will return a page of results containing + // at most this many items. If zero, the server will choose a + // reasonable page size. Must never be negative. + MaxResults int64 `protobuf:"varint,3,opt,name=max_results,json=maxResults,proto3" json:"max_results,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GetServerSocketsRequest) Reset() { *m = GetServerSocketsRequest{} } +func (m *GetServerSocketsRequest) String() string { return proto.CompactTextString(m) } +func (*GetServerSocketsRequest) ProtoMessage() {} +func (*GetServerSocketsRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_channelz_449295370a82a4c0, []int{26} +} +func (m *GetServerSocketsRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GetServerSocketsRequest.Unmarshal(m, b) +} +func (m *GetServerSocketsRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GetServerSocketsRequest.Marshal(b, m, deterministic) +} +func (dst *GetServerSocketsRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_GetServerSocketsRequest.Merge(dst, src) +} +func (m *GetServerSocketsRequest) XXX_Size() int { + return xxx_messageInfo_GetServerSocketsRequest.Size(m) +} +func (m *GetServerSocketsRequest) XXX_DiscardUnknown() { + xxx_messageInfo_GetServerSocketsRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_GetServerSocketsRequest proto.InternalMessageInfo + +func (m *GetServerSocketsRequest) GetServerId() int64 { + if m != nil { + return m.ServerId + } + return 0 +} + +func (m *GetServerSocketsRequest) GetStartSocketId() int64 { + if m != nil { + return m.StartSocketId + } + return 0 +} + +func (m *GetServerSocketsRequest) GetMaxResults() int64 { + if m != nil { + return m.MaxResults + } + return 0 +} + +type GetServerSocketsResponse struct { + // list of socket refs that the connection detail service knows about. Sorted in + // ascending socket_id order. + // Must contain at least 1 result, otherwise 'end' must be true. + SocketRef []*SocketRef `protobuf:"bytes,1,rep,name=socket_ref,json=socketRef,proto3" json:"socket_ref,omitempty"` + // If set, indicates that the list of sockets is the final list. Requesting + // more sockets will only return more if they are created after this RPC + // completes. + End bool `protobuf:"varint,2,opt,name=end,proto3" json:"end,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GetServerSocketsResponse) Reset() { *m = GetServerSocketsResponse{} } +func (m *GetServerSocketsResponse) String() string { return proto.CompactTextString(m) } +func (*GetServerSocketsResponse) ProtoMessage() {} +func (*GetServerSocketsResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_channelz_449295370a82a4c0, []int{27} +} +func (m *GetServerSocketsResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GetServerSocketsResponse.Unmarshal(m, b) +} +func (m *GetServerSocketsResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GetServerSocketsResponse.Marshal(b, m, deterministic) +} +func (dst *GetServerSocketsResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_GetServerSocketsResponse.Merge(dst, src) +} +func (m *GetServerSocketsResponse) XXX_Size() int { + return xxx_messageInfo_GetServerSocketsResponse.Size(m) +} +func (m *GetServerSocketsResponse) XXX_DiscardUnknown() { + xxx_messageInfo_GetServerSocketsResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_GetServerSocketsResponse proto.InternalMessageInfo + +func (m *GetServerSocketsResponse) GetSocketRef() []*SocketRef { + if m != nil { + return m.SocketRef + } + return nil +} + +func (m *GetServerSocketsResponse) GetEnd() bool { + if m != nil { + return m.End + } + return false +} + +type GetChannelRequest struct { + // channel_id is the identifier of the specific channel to get. + ChannelId int64 `protobuf:"varint,1,opt,name=channel_id,json=channelId,proto3" json:"channel_id,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GetChannelRequest) Reset() { *m = GetChannelRequest{} } +func (m *GetChannelRequest) String() string { return proto.CompactTextString(m) } +func (*GetChannelRequest) ProtoMessage() {} +func (*GetChannelRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_channelz_449295370a82a4c0, []int{28} +} +func (m *GetChannelRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GetChannelRequest.Unmarshal(m, b) +} +func (m *GetChannelRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GetChannelRequest.Marshal(b, m, deterministic) +} +func (dst *GetChannelRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_GetChannelRequest.Merge(dst, src) +} +func (m *GetChannelRequest) XXX_Size() int { + return xxx_messageInfo_GetChannelRequest.Size(m) +} +func (m *GetChannelRequest) XXX_DiscardUnknown() { + xxx_messageInfo_GetChannelRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_GetChannelRequest proto.InternalMessageInfo + +func (m *GetChannelRequest) GetChannelId() int64 { + if m != nil { + return m.ChannelId + } + return 0 +} + +type GetChannelResponse struct { + // The Channel that corresponds to the requested channel_id. This field + // should be set. + Channel *Channel `protobuf:"bytes,1,opt,name=channel,proto3" json:"channel,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GetChannelResponse) Reset() { *m = GetChannelResponse{} } +func (m *GetChannelResponse) String() string { return proto.CompactTextString(m) } +func (*GetChannelResponse) ProtoMessage() {} +func (*GetChannelResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_channelz_449295370a82a4c0, []int{29} +} +func (m *GetChannelResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GetChannelResponse.Unmarshal(m, b) +} +func (m *GetChannelResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GetChannelResponse.Marshal(b, m, deterministic) +} +func (dst *GetChannelResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_GetChannelResponse.Merge(dst, src) +} +func (m *GetChannelResponse) XXX_Size() int { + return xxx_messageInfo_GetChannelResponse.Size(m) +} +func (m *GetChannelResponse) XXX_DiscardUnknown() { + xxx_messageInfo_GetChannelResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_GetChannelResponse proto.InternalMessageInfo + +func (m *GetChannelResponse) GetChannel() *Channel { + if m != nil { + return m.Channel + } + return nil +} + +type GetSubchannelRequest struct { + // subchannel_id is the identifier of the specific subchannel to get. + SubchannelId int64 `protobuf:"varint,1,opt,name=subchannel_id,json=subchannelId,proto3" json:"subchannel_id,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GetSubchannelRequest) Reset() { *m = GetSubchannelRequest{} } +func (m *GetSubchannelRequest) String() string { return proto.CompactTextString(m) } +func (*GetSubchannelRequest) ProtoMessage() {} +func (*GetSubchannelRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_channelz_449295370a82a4c0, []int{30} +} +func (m *GetSubchannelRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GetSubchannelRequest.Unmarshal(m, b) +} +func (m *GetSubchannelRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GetSubchannelRequest.Marshal(b, m, deterministic) +} +func (dst *GetSubchannelRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_GetSubchannelRequest.Merge(dst, src) +} +func (m *GetSubchannelRequest) XXX_Size() int { + return xxx_messageInfo_GetSubchannelRequest.Size(m) +} +func (m *GetSubchannelRequest) XXX_DiscardUnknown() { + xxx_messageInfo_GetSubchannelRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_GetSubchannelRequest proto.InternalMessageInfo + +func (m *GetSubchannelRequest) GetSubchannelId() int64 { + if m != nil { + return m.SubchannelId + } + return 0 +} + +type GetSubchannelResponse struct { + // The Subchannel that corresponds to the requested subchannel_id. This + // field should be set. + Subchannel *Subchannel `protobuf:"bytes,1,opt,name=subchannel,proto3" json:"subchannel,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GetSubchannelResponse) Reset() { *m = GetSubchannelResponse{} } +func (m *GetSubchannelResponse) String() string { return proto.CompactTextString(m) } +func (*GetSubchannelResponse) ProtoMessage() {} +func (*GetSubchannelResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_channelz_449295370a82a4c0, []int{31} +} +func (m *GetSubchannelResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GetSubchannelResponse.Unmarshal(m, b) +} +func (m *GetSubchannelResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GetSubchannelResponse.Marshal(b, m, deterministic) +} +func (dst *GetSubchannelResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_GetSubchannelResponse.Merge(dst, src) +} +func (m *GetSubchannelResponse) XXX_Size() int { + return xxx_messageInfo_GetSubchannelResponse.Size(m) +} +func (m *GetSubchannelResponse) XXX_DiscardUnknown() { + xxx_messageInfo_GetSubchannelResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_GetSubchannelResponse proto.InternalMessageInfo + +func (m *GetSubchannelResponse) GetSubchannel() *Subchannel { + if m != nil { + return m.Subchannel + } + return nil +} + +type GetSocketRequest struct { + // socket_id is the identifier of the specific socket to get. + SocketId int64 `protobuf:"varint,1,opt,name=socket_id,json=socketId,proto3" json:"socket_id,omitempty"` + // If true, the response will contain only high level information + // that is inexpensive to obtain. Fields thay may be omitted are + // documented. + Summary bool `protobuf:"varint,2,opt,name=summary,proto3" json:"summary,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GetSocketRequest) Reset() { *m = GetSocketRequest{} } +func (m *GetSocketRequest) String() string { return proto.CompactTextString(m) } +func (*GetSocketRequest) ProtoMessage() {} +func (*GetSocketRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_channelz_449295370a82a4c0, []int{32} +} +func (m *GetSocketRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GetSocketRequest.Unmarshal(m, b) +} +func (m *GetSocketRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GetSocketRequest.Marshal(b, m, deterministic) +} +func (dst *GetSocketRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_GetSocketRequest.Merge(dst, src) +} +func (m *GetSocketRequest) XXX_Size() int { + return xxx_messageInfo_GetSocketRequest.Size(m) +} +func (m *GetSocketRequest) XXX_DiscardUnknown() { + xxx_messageInfo_GetSocketRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_GetSocketRequest proto.InternalMessageInfo + +func (m *GetSocketRequest) GetSocketId() int64 { + if m != nil { + return m.SocketId + } + return 0 +} + +func (m *GetSocketRequest) GetSummary() bool { + if m != nil { + return m.Summary + } + return false +} + +type GetSocketResponse struct { + // The Socket that corresponds to the requested socket_id. This field + // should be set. + Socket *Socket `protobuf:"bytes,1,opt,name=socket,proto3" json:"socket,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GetSocketResponse) Reset() { *m = GetSocketResponse{} } +func (m *GetSocketResponse) String() string { return proto.CompactTextString(m) } +func (*GetSocketResponse) ProtoMessage() {} +func (*GetSocketResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_channelz_449295370a82a4c0, []int{33} +} +func (m *GetSocketResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GetSocketResponse.Unmarshal(m, b) +} +func (m *GetSocketResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GetSocketResponse.Marshal(b, m, deterministic) +} +func (dst *GetSocketResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_GetSocketResponse.Merge(dst, src) +} +func (m *GetSocketResponse) XXX_Size() int { + return xxx_messageInfo_GetSocketResponse.Size(m) +} +func (m *GetSocketResponse) XXX_DiscardUnknown() { + xxx_messageInfo_GetSocketResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_GetSocketResponse proto.InternalMessageInfo + +func (m *GetSocketResponse) GetSocket() *Socket { + if m != nil { + return m.Socket + } + return nil +} + +func init() { + proto.RegisterType((*Channel)(nil), "grpc.channelz.v1.Channel") + proto.RegisterType((*Subchannel)(nil), "grpc.channelz.v1.Subchannel") + proto.RegisterType((*ChannelConnectivityState)(nil), "grpc.channelz.v1.ChannelConnectivityState") + proto.RegisterType((*ChannelData)(nil), "grpc.channelz.v1.ChannelData") + proto.RegisterType((*ChannelTraceEvent)(nil), "grpc.channelz.v1.ChannelTraceEvent") + proto.RegisterType((*ChannelTrace)(nil), "grpc.channelz.v1.ChannelTrace") + proto.RegisterType((*ChannelRef)(nil), "grpc.channelz.v1.ChannelRef") + proto.RegisterType((*SubchannelRef)(nil), "grpc.channelz.v1.SubchannelRef") + proto.RegisterType((*SocketRef)(nil), "grpc.channelz.v1.SocketRef") + proto.RegisterType((*ServerRef)(nil), "grpc.channelz.v1.ServerRef") + proto.RegisterType((*Server)(nil), "grpc.channelz.v1.Server") + proto.RegisterType((*ServerData)(nil), "grpc.channelz.v1.ServerData") + proto.RegisterType((*Socket)(nil), "grpc.channelz.v1.Socket") + proto.RegisterType((*SocketData)(nil), "grpc.channelz.v1.SocketData") + proto.RegisterType((*Address)(nil), "grpc.channelz.v1.Address") + proto.RegisterType((*Address_TcpIpAddress)(nil), "grpc.channelz.v1.Address.TcpIpAddress") + proto.RegisterType((*Address_UdsAddress)(nil), "grpc.channelz.v1.Address.UdsAddress") + proto.RegisterType((*Address_OtherAddress)(nil), "grpc.channelz.v1.Address.OtherAddress") + proto.RegisterType((*Security)(nil), "grpc.channelz.v1.Security") + proto.RegisterType((*Security_Tls)(nil), "grpc.channelz.v1.Security.Tls") + proto.RegisterType((*Security_OtherSecurity)(nil), "grpc.channelz.v1.Security.OtherSecurity") + proto.RegisterType((*SocketOption)(nil), "grpc.channelz.v1.SocketOption") + proto.RegisterType((*SocketOptionTimeout)(nil), "grpc.channelz.v1.SocketOptionTimeout") + proto.RegisterType((*SocketOptionLinger)(nil), "grpc.channelz.v1.SocketOptionLinger") + proto.RegisterType((*SocketOptionTcpInfo)(nil), "grpc.channelz.v1.SocketOptionTcpInfo") + proto.RegisterType((*GetTopChannelsRequest)(nil), "grpc.channelz.v1.GetTopChannelsRequest") + proto.RegisterType((*GetTopChannelsResponse)(nil), "grpc.channelz.v1.GetTopChannelsResponse") + proto.RegisterType((*GetServersRequest)(nil), "grpc.channelz.v1.GetServersRequest") + proto.RegisterType((*GetServersResponse)(nil), "grpc.channelz.v1.GetServersResponse") + proto.RegisterType((*GetServerRequest)(nil), "grpc.channelz.v1.GetServerRequest") + proto.RegisterType((*GetServerResponse)(nil), "grpc.channelz.v1.GetServerResponse") + proto.RegisterType((*GetServerSocketsRequest)(nil), "grpc.channelz.v1.GetServerSocketsRequest") + proto.RegisterType((*GetServerSocketsResponse)(nil), "grpc.channelz.v1.GetServerSocketsResponse") + proto.RegisterType((*GetChannelRequest)(nil), "grpc.channelz.v1.GetChannelRequest") + proto.RegisterType((*GetChannelResponse)(nil), "grpc.channelz.v1.GetChannelResponse") + proto.RegisterType((*GetSubchannelRequest)(nil), "grpc.channelz.v1.GetSubchannelRequest") + proto.RegisterType((*GetSubchannelResponse)(nil), "grpc.channelz.v1.GetSubchannelResponse") + proto.RegisterType((*GetSocketRequest)(nil), "grpc.channelz.v1.GetSocketRequest") + proto.RegisterType((*GetSocketResponse)(nil), "grpc.channelz.v1.GetSocketResponse") + proto.RegisterEnum("grpc.channelz.v1.ChannelConnectivityState_State", ChannelConnectivityState_State_name, ChannelConnectivityState_State_value) + proto.RegisterEnum("grpc.channelz.v1.ChannelTraceEvent_Severity", ChannelTraceEvent_Severity_name, ChannelTraceEvent_Severity_value) +} + +// Reference imports to suppress errors if they are not otherwise used. +var _ context.Context +var _ grpc.ClientConn + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the grpc package it is being compiled against. +const _ = grpc.SupportPackageIsVersion4 + +// ChannelzClient is the client API for Channelz service. +// +// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://godoc.org/google.golang.org/grpc#ClientConn.NewStream. +type ChannelzClient interface { + // Gets all root channels (i.e. channels the application has directly + // created). This does not include subchannels nor non-top level channels. + GetTopChannels(ctx context.Context, in *GetTopChannelsRequest, opts ...grpc.CallOption) (*GetTopChannelsResponse, error) + // Gets all servers that exist in the process. + GetServers(ctx context.Context, in *GetServersRequest, opts ...grpc.CallOption) (*GetServersResponse, error) + // Returns a single Server, or else a NOT_FOUND code. + GetServer(ctx context.Context, in *GetServerRequest, opts ...grpc.CallOption) (*GetServerResponse, error) + // Gets all server sockets that exist in the process. + GetServerSockets(ctx context.Context, in *GetServerSocketsRequest, opts ...grpc.CallOption) (*GetServerSocketsResponse, error) + // Returns a single Channel, or else a NOT_FOUND code. + GetChannel(ctx context.Context, in *GetChannelRequest, opts ...grpc.CallOption) (*GetChannelResponse, error) + // Returns a single Subchannel, or else a NOT_FOUND code. + GetSubchannel(ctx context.Context, in *GetSubchannelRequest, opts ...grpc.CallOption) (*GetSubchannelResponse, error) + // Returns a single Socket or else a NOT_FOUND code. + GetSocket(ctx context.Context, in *GetSocketRequest, opts ...grpc.CallOption) (*GetSocketResponse, error) +} + +type channelzClient struct { + cc *grpc.ClientConn +} + +func NewChannelzClient(cc *grpc.ClientConn) ChannelzClient { + return &channelzClient{cc} +} + +func (c *channelzClient) GetTopChannels(ctx context.Context, in *GetTopChannelsRequest, opts ...grpc.CallOption) (*GetTopChannelsResponse, error) { + out := new(GetTopChannelsResponse) + err := c.cc.Invoke(ctx, "/grpc.channelz.v1.Channelz/GetTopChannels", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *channelzClient) GetServers(ctx context.Context, in *GetServersRequest, opts ...grpc.CallOption) (*GetServersResponse, error) { + out := new(GetServersResponse) + err := c.cc.Invoke(ctx, "/grpc.channelz.v1.Channelz/GetServers", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *channelzClient) GetServer(ctx context.Context, in *GetServerRequest, opts ...grpc.CallOption) (*GetServerResponse, error) { + out := new(GetServerResponse) + err := c.cc.Invoke(ctx, "/grpc.channelz.v1.Channelz/GetServer", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *channelzClient) GetServerSockets(ctx context.Context, in *GetServerSocketsRequest, opts ...grpc.CallOption) (*GetServerSocketsResponse, error) { + out := new(GetServerSocketsResponse) + err := c.cc.Invoke(ctx, "/grpc.channelz.v1.Channelz/GetServerSockets", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *channelzClient) GetChannel(ctx context.Context, in *GetChannelRequest, opts ...grpc.CallOption) (*GetChannelResponse, error) { + out := new(GetChannelResponse) + err := c.cc.Invoke(ctx, "/grpc.channelz.v1.Channelz/GetChannel", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *channelzClient) GetSubchannel(ctx context.Context, in *GetSubchannelRequest, opts ...grpc.CallOption) (*GetSubchannelResponse, error) { + out := new(GetSubchannelResponse) + err := c.cc.Invoke(ctx, "/grpc.channelz.v1.Channelz/GetSubchannel", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *channelzClient) GetSocket(ctx context.Context, in *GetSocketRequest, opts ...grpc.CallOption) (*GetSocketResponse, error) { + out := new(GetSocketResponse) + err := c.cc.Invoke(ctx, "/grpc.channelz.v1.Channelz/GetSocket", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +// ChannelzServer is the server API for Channelz service. +type ChannelzServer interface { + // Gets all root channels (i.e. channels the application has directly + // created). This does not include subchannels nor non-top level channels. + GetTopChannels(context.Context, *GetTopChannelsRequest) (*GetTopChannelsResponse, error) + // Gets all servers that exist in the process. + GetServers(context.Context, *GetServersRequest) (*GetServersResponse, error) + // Returns a single Server, or else a NOT_FOUND code. + GetServer(context.Context, *GetServerRequest) (*GetServerResponse, error) + // Gets all server sockets that exist in the process. + GetServerSockets(context.Context, *GetServerSocketsRequest) (*GetServerSocketsResponse, error) + // Returns a single Channel, or else a NOT_FOUND code. + GetChannel(context.Context, *GetChannelRequest) (*GetChannelResponse, error) + // Returns a single Subchannel, or else a NOT_FOUND code. + GetSubchannel(context.Context, *GetSubchannelRequest) (*GetSubchannelResponse, error) + // Returns a single Socket or else a NOT_FOUND code. + GetSocket(context.Context, *GetSocketRequest) (*GetSocketResponse, error) +} + +func RegisterChannelzServer(s *grpc.Server, srv ChannelzServer) { + s.RegisterService(&_Channelz_serviceDesc, srv) +} + +func _Channelz_GetTopChannels_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(GetTopChannelsRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(ChannelzServer).GetTopChannels(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/grpc.channelz.v1.Channelz/GetTopChannels", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(ChannelzServer).GetTopChannels(ctx, req.(*GetTopChannelsRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _Channelz_GetServers_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(GetServersRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(ChannelzServer).GetServers(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/grpc.channelz.v1.Channelz/GetServers", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(ChannelzServer).GetServers(ctx, req.(*GetServersRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _Channelz_GetServer_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(GetServerRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(ChannelzServer).GetServer(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/grpc.channelz.v1.Channelz/GetServer", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(ChannelzServer).GetServer(ctx, req.(*GetServerRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _Channelz_GetServerSockets_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(GetServerSocketsRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(ChannelzServer).GetServerSockets(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/grpc.channelz.v1.Channelz/GetServerSockets", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(ChannelzServer).GetServerSockets(ctx, req.(*GetServerSocketsRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _Channelz_GetChannel_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(GetChannelRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(ChannelzServer).GetChannel(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/grpc.channelz.v1.Channelz/GetChannel", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(ChannelzServer).GetChannel(ctx, req.(*GetChannelRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _Channelz_GetSubchannel_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(GetSubchannelRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(ChannelzServer).GetSubchannel(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/grpc.channelz.v1.Channelz/GetSubchannel", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(ChannelzServer).GetSubchannel(ctx, req.(*GetSubchannelRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _Channelz_GetSocket_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(GetSocketRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(ChannelzServer).GetSocket(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/grpc.channelz.v1.Channelz/GetSocket", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(ChannelzServer).GetSocket(ctx, req.(*GetSocketRequest)) + } + return interceptor(ctx, in, info, handler) +} + +var _Channelz_serviceDesc = grpc.ServiceDesc{ + ServiceName: "grpc.channelz.v1.Channelz", + HandlerType: (*ChannelzServer)(nil), + Methods: []grpc.MethodDesc{ + { + MethodName: "GetTopChannels", + Handler: _Channelz_GetTopChannels_Handler, + }, + { + MethodName: "GetServers", + Handler: _Channelz_GetServers_Handler, + }, + { + MethodName: "GetServer", + Handler: _Channelz_GetServer_Handler, + }, + { + MethodName: "GetServerSockets", + Handler: _Channelz_GetServerSockets_Handler, + }, + { + MethodName: "GetChannel", + Handler: _Channelz_GetChannel_Handler, + }, + { + MethodName: "GetSubchannel", + Handler: _Channelz_GetSubchannel_Handler, + }, + { + MethodName: "GetSocket", + Handler: _Channelz_GetSocket_Handler, + }, + }, + Streams: []grpc.StreamDesc{}, + Metadata: "grpc/channelz/v1/channelz.proto", +} + +func init() { + proto.RegisterFile("grpc/channelz/v1/channelz.proto", fileDescriptor_channelz_449295370a82a4c0) +} + +var fileDescriptor_channelz_449295370a82a4c0 = []byte{ + // 2584 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xe4, 0x59, 0x4b, 0x6f, 0xdb, 0xd8, + 0xf5, 0xb7, 0xde, 0xd4, 0xd1, 0x23, 0xf2, 0x4d, 0x26, 0x43, 0x2b, 0x99, 0xb1, 0xff, 0xf4, 0x4c, + 0xc6, 0x93, 0xfc, 0x23, 0xc7, 0x9e, 0x34, 0x28, 0x3a, 0x2d, 0x3a, 0xb6, 0x62, 0xc7, 0x72, 0x1d, + 0x39, 0xa0, 0xe4, 0x49, 0xa6, 0x28, 0xca, 0xa1, 0xc9, 0x6b, 0x99, 0x35, 0x45, 0xaa, 0xbc, 0x57, + 0xf2, 0x24, 0x9b, 0x2e, 0xba, 0xef, 0xb2, 0x28, 0xfa, 0x01, 0xba, 0xe9, 0xa2, 0x40, 0x81, 0x02, + 0xed, 0xb6, 0xdf, 0xa6, 0xdf, 0xa2, 0xb8, 0x0f, 0x3e, 0xf4, 0xb2, 0x14, 0x64, 0xd9, 0x8d, 0x21, + 0x1e, 0xfe, 0xce, 0xef, 0x9c, 0x7b, 0x5e, 0xf7, 0xf2, 0x1a, 0xd6, 0x7b, 0xc1, 0xc0, 0xda, 0xb6, + 0x2e, 0x4d, 0xcf, 0xc3, 0xee, 0xbb, 0xed, 0xd1, 0x4e, 0xf4, 0xbb, 0x31, 0x08, 0x7c, 0xea, 0xa3, + 0x1a, 0x03, 0x34, 0x22, 0xe1, 0x68, 0xa7, 0xbe, 0xd6, 0xf3, 0xfd, 0x9e, 0x8b, 0xb7, 0xf9, 0xfb, + 0xf3, 0xe1, 0xc5, 0xb6, 0xe9, 0xbd, 0x15, 0xe0, 0xfa, 0xa7, 0x93, 0xaf, 0xec, 0x61, 0x60, 0x52, + 0xc7, 0xf7, 0xe4, 0xfb, 0xf5, 0xc9, 0xf7, 0xd4, 0xe9, 0x63, 0x42, 0xcd, 0xfe, 0x60, 0x1e, 0xc1, + 0x75, 0x60, 0x0e, 0x06, 0x38, 0x20, 0xe2, 0xbd, 0xf6, 0xb7, 0x34, 0x14, 0x9a, 0xc2, 0x17, 0xd4, + 0x80, 0x4c, 0x80, 0x2f, 0xd4, 0xd4, 0x46, 0x6a, 0xab, 0xb4, 0x7b, 0xbf, 0x31, 0xe9, 0x67, 0x43, + 0xe2, 0x74, 0x7c, 0xa1, 0x33, 0x20, 0xda, 0x81, 0xac, 0x6d, 0x52, 0x53, 0x4d, 0x73, 0x85, 0x4f, + 0xe6, 0x2a, 0x3c, 0x37, 0xa9, 0xa9, 0x73, 0x28, 0xfa, 0x19, 0x94, 0x24, 0xc0, 0x60, 0xa6, 0x32, + 0x1b, 0x99, 0x85, 0xa6, 0xc0, 0x8a, 0x7e, 0xa3, 0x43, 0xa8, 0x92, 0xe1, 0x79, 0x92, 0x21, 0xcb, + 0x19, 0xd6, 0xa7, 0x19, 0x3a, 0x11, 0x8e, 0x91, 0x54, 0x48, 0xf2, 0x11, 0xfd, 0x04, 0x80, 0xf8, + 0xd6, 0x15, 0xa6, 0x9c, 0x23, 0xc7, 0x39, 0xee, 0xcd, 0xe0, 0xe0, 0x18, 0xa6, 0x5f, 0x24, 0xe1, + 0x4f, 0xed, 0x1f, 0x69, 0x80, 0x98, 0x1c, 0xed, 0x24, 0x83, 0xb6, 0xd0, 0x8f, 0xff, 0xe1, 0xb8, + 0xfd, 0x3b, 0x05, 0xaa, 0x74, 0xaf, 0xe9, 0x7b, 0x1e, 0xb6, 0xa8, 0x33, 0x72, 0xe8, 0xdb, 0x0e, + 0x35, 0x29, 0x46, 0x87, 0x90, 0x23, 0xec, 0x07, 0x8f, 0x63, 0x75, 0xf7, 0xc9, 0xdc, 0x95, 0x4d, + 0xa9, 0x36, 0xf8, 0x5f, 0x5d, 0xa8, 0x6b, 0xbf, 0x86, 0x9c, 0x20, 0x2c, 0x41, 0xe1, 0xac, 0xfd, + 0x8b, 0xf6, 0xe9, 0xeb, 0x76, 0x6d, 0x05, 0x29, 0x90, 0x6d, 0x3d, 0x3f, 0x39, 0xa8, 0xa5, 0x50, + 0x15, 0xa0, 0x79, 0xda, 0x6e, 0x1f, 0x34, 0xbb, 0xad, 0xf6, 0x8b, 0x5a, 0x1a, 0x15, 0x21, 0xa7, + 0x1f, 0xec, 0x3d, 0xff, 0xae, 0x96, 0x41, 0x1f, 0xc1, 0x6a, 0x57, 0xdf, 0x6b, 0x77, 0x5a, 0x07, + 0xed, 0xae, 0x71, 0xb8, 0xd7, 0x3a, 0x39, 0xd3, 0x0f, 0x6a, 0x59, 0x54, 0x06, 0xa5, 0x73, 0x74, + 0xd6, 0x7d, 0xce, 0x98, 0x72, 0xda, 0x7f, 0xd2, 0x50, 0x4a, 0x64, 0x07, 0x7d, 0x93, 0xf4, 0xbb, + 0xb4, 0xfb, 0x70, 0x79, 0xbf, 0xa5, 0xc7, 0xe8, 0x2e, 0xe4, 0xa9, 0x19, 0xf4, 0x30, 0xe5, 0xe5, + 0x50, 0xd4, 0xe5, 0x13, 0x7a, 0x0a, 0x39, 0x1a, 0x98, 0x16, 0x56, 0x33, 0x9c, 0xf9, 0xd3, 0xb9, + 0xcc, 0x5d, 0x86, 0xd2, 0x05, 0x18, 0x6d, 0x42, 0xc5, 0x32, 0x5d, 0x97, 0x18, 0x84, 0x9a, 0x01, + 0xc5, 0xb6, 0x9a, 0xdd, 0x48, 0x6d, 0x65, 0xf4, 0x32, 0x17, 0x76, 0x84, 0x0c, 0x7d, 0x01, 0xb7, + 0x24, 0x68, 0x68, 0x59, 0x18, 0xdb, 0xd8, 0x56, 0x73, 0x1c, 0x56, 0x15, 0xb0, 0x50, 0x8a, 0xfe, + 0x0f, 0x84, 0xa2, 0x71, 0x61, 0x3a, 0x2e, 0xb6, 0xd5, 0x3c, 0x47, 0x95, 0xb8, 0xec, 0x90, 0x8b, + 0xd0, 0x77, 0x70, 0xcf, 0x35, 0x09, 0x35, 0x98, 0x2c, 0x34, 0x6a, 0x44, 0x43, 0x48, 0x2d, 0x70, + 0xe7, 0xeb, 0x0d, 0x31, 0x85, 0x1a, 0xe1, 0x14, 0x6a, 0x74, 0x43, 0x84, 0xae, 0x32, 0xf5, 0xa6, + 0xe9, 0xba, 0xd2, 0xbb, 0xe8, 0x8d, 0xf6, 0xa7, 0x0c, 0xac, 0x26, 0xd7, 0x78, 0x30, 0xc2, 0x1e, + 0x45, 0x1b, 0x50, 0xb2, 0x31, 0xb1, 0x02, 0x67, 0xc0, 0xc6, 0x20, 0x8f, 0x7b, 0x51, 0x4f, 0x8a, + 0xd0, 0x11, 0x28, 0x04, 0x8f, 0x70, 0xe0, 0xd0, 0xb7, 0x3c, 0xa6, 0xd5, 0xdd, 0xff, 0xbf, 0x39, + 0x78, 0x9c, 0xb8, 0xd1, 0x91, 0x3a, 0x7a, 0xa4, 0x8d, 0x7e, 0x0c, 0xc5, 0x78, 0x29, 0x99, 0x85, + 0x4b, 0x89, 0xc1, 0xe8, 0xe7, 0xe3, 0xfd, 0x9a, 0x5d, 0x3c, 0x52, 0x8f, 0x56, 0xc6, 0x3a, 0xf6, + 0x68, 0xaa, 0x63, 0x73, 0x4b, 0x4d, 0x98, 0xa3, 0x95, 0x89, 0x9e, 0xd5, 0x0e, 0x40, 0x09, 0x97, + 0xc6, 0xcb, 0xbf, 0x6b, 0xc4, 0x8d, 0x51, 0x82, 0x42, 0xb3, 0x6b, 0xb4, 0xda, 0x87, 0xa7, 0xb2, + 0x37, 0xba, 0xc6, 0xeb, 0x3d, 0xbd, 0x2d, 0x7a, 0xa3, 0x0c, 0x4a, 0xb3, 0x6b, 0x1c, 0xe8, 0xfa, + 0xa9, 0x5e, 0xcb, 0xec, 0x97, 0xa0, 0x68, 0x5d, 0x3a, 0xae, 0xcd, 0x7c, 0x61, 0xbd, 0x5c, 0x4e, + 0x46, 0x10, 0x3d, 0x84, 0x55, 0x6f, 0xd8, 0x37, 0x30, 0x8b, 0x24, 0x31, 0x5c, 0xbf, 0xd7, 0xc3, + 0x36, 0xcf, 0x4d, 0x46, 0xbf, 0xe5, 0x0d, 0xfb, 0x3c, 0xc2, 0xe4, 0x84, 0x8b, 0x51, 0x0b, 0x90, + 0x15, 0x60, 0xbe, 0x8b, 0x25, 0x2a, 0x25, 0xbd, 0x30, 0xbc, 0xab, 0xa1, 0x56, 0x24, 0x42, 0x5f, + 0x43, 0x5e, 0x98, 0x94, 0x13, 0x71, 0x73, 0x89, 0x44, 0xeb, 0x52, 0x45, 0xb3, 0x00, 0xe2, 0xf0, + 0xa3, 0x4f, 0x20, 0x0c, 0xbf, 0xe1, 0x84, 0xae, 0x17, 0xa5, 0xa4, 0x65, 0x23, 0x04, 0x59, 0xcf, + 0xec, 0x63, 0xd9, 0xa4, 0xfc, 0xf7, 0x71, 0x56, 0xc9, 0xd4, 0xb2, 0xc7, 0x59, 0x25, 0x5b, 0xcb, + 0x1d, 0x67, 0x95, 0x5c, 0x2d, 0x7f, 0x9c, 0x55, 0xf2, 0xb5, 0xc2, 0x71, 0x56, 0x29, 0xd4, 0x94, + 0xe3, 0xac, 0xa2, 0xd4, 0x8a, 0x9a, 0x0b, 0x95, 0xb1, 0xfc, 0xb0, 0x0e, 0x4d, 0x24, 0xd6, 0xb1, + 0x79, 0x8b, 0x64, 0xf4, 0x72, 0x2c, 0x4c, 0x58, 0x53, 0xc6, 0xac, 0xa5, 0x6a, 0xe9, 0xe3, 0xac, + 0x92, 0xae, 0x65, 0xe6, 0x59, 0xd6, 0xbe, 0x87, 0x62, 0x34, 0x7b, 0xd1, 0x3d, 0x90, 0xd3, 0x97, + 0x59, 0xc9, 0x70, 0x2b, 0x8a, 0x10, 0x24, 0x2c, 0x64, 0xe7, 0x5a, 0x98, 0xbd, 0x1e, 0x66, 0x01, + 0x07, 0x23, 0x1c, 0x84, 0x16, 0xf8, 0x03, 0xb3, 0x90, 0x93, 0x16, 0xb8, 0x20, 0x61, 0x21, 0xbf, + 0xd4, 0x1a, 0x62, 0x0b, 0x7f, 0x4d, 0x41, 0x5e, 0x98, 0x40, 0x8f, 0x93, 0x7b, 0xeb, 0xac, 0x7d, + 0x26, 0xf4, 0x44, 0xec, 0xab, 0x4f, 0xc6, 0xf6, 0xd5, 0xfb, 0xf3, 0xf0, 0x89, 0x6d, 0xf5, 0x1b, + 0xa8, 0xb8, 0x0e, 0xa1, 0xd8, 0x33, 0x44, 0x60, 0x64, 0x19, 0xdd, 0xb8, 0xa5, 0x95, 0x85, 0x86, + 0x10, 0x68, 0x7f, 0x60, 0xa7, 0x81, 0x88, 0x36, 0x9e, 0xda, 0xa9, 0x0f, 0x9a, 0xda, 0xe9, 0xe5, + 0xa6, 0x76, 0x66, 0xa9, 0xa9, 0x9d, 0x7d, 0xef, 0xa9, 0x9d, 0xfb, 0x80, 0xa9, 0xfd, 0x97, 0x34, + 0xe4, 0x45, 0x6c, 0x16, 0xa7, 0x2f, 0x8a, 0xe9, 0x92, 0xe9, 0xe3, 0xf8, 0x44, 0xfa, 0xb6, 0x21, + 0xe7, 0xfa, 0x96, 0xe9, 0xca, 0xd9, 0xbc, 0x36, 0xad, 0xb2, 0x67, 0xdb, 0x01, 0x26, 0x44, 0x17, + 0x38, 0xb4, 0x03, 0xf9, 0x00, 0xf7, 0x7d, 0x8a, 0xe5, 0x44, 0xbe, 0x41, 0x43, 0x02, 0xd1, 0x33, + 0xb6, 0x9b, 0x58, 0x43, 0xbe, 0x9b, 0x44, 0x71, 0x99, 0x2e, 0x2c, 0x81, 0xd0, 0x23, 0x2c, 0x5a, + 0x87, 0x92, 0x60, 0x30, 0x12, 0x5d, 0x00, 0x42, 0xd4, 0x36, 0xfb, 0x58, 0xfb, 0x7d, 0x01, 0x20, + 0x5e, 0x11, 0x4b, 0x2f, 0xa1, 0x01, 0x36, 0xfb, 0x71, 0x15, 0x88, 0x21, 0x54, 0x95, 0xe2, 0xb0, + 0x0e, 0x1e, 0xc1, 0x6a, 0x04, 0x8c, 0x2a, 0x41, 0x14, 0x4c, 0x2d, 0x84, 0x46, 0xb5, 0xf0, 0x39, + 0x84, 0xea, 0x61, 0x35, 0x88, 0x9a, 0xa9, 0x48, 0xa9, 0xac, 0x87, 0x4d, 0xa8, 0xf4, 0x31, 0x21, + 0x66, 0x0f, 0x13, 0x83, 0x60, 0x8f, 0x86, 0xc7, 0x86, 0x50, 0xd8, 0x61, 0x3b, 0xef, 0x23, 0x58, + 0x8d, 0x40, 0x01, 0xb6, 0xb0, 0x33, 0x8a, 0x0e, 0x0e, 0xb5, 0xf0, 0x85, 0x2e, 0xe5, 0x68, 0x0b, + 0x6a, 0x57, 0x18, 0x0f, 0x0c, 0xd3, 0x75, 0x46, 0x21, 0xa9, 0x38, 0x3e, 0x54, 0x99, 0x7c, 0x8f, + 0x8b, 0x39, 0xed, 0x25, 0x6c, 0xf2, 0x5a, 0xe4, 0x19, 0x32, 0x84, 0x5f, 0x06, 0x1f, 0xf5, 0xef, + 0x79, 0x92, 0x58, 0x67, 0x34, 0x27, 0x8c, 0xa5, 0xc3, 0x49, 0x9a, 0x82, 0x23, 0xde, 0x2d, 0x7e, + 0x03, 0x9f, 0x71, 0x4b, 0x32, 0x2f, 0x73, 0x4d, 0x29, 0x0b, 0x4d, 0x6d, 0x30, 0x1e, 0x9d, 0xd3, + 0xcc, 0xb1, 0x15, 0x76, 0x98, 0x0c, 0x0c, 0x0f, 0x40, 0xc2, 0x44, 0x71, 0xb9, 0x0e, 0x7b, 0x29, + 0xb4, 0x59, 0x9c, 0x62, 0x6a, 0x13, 0xd6, 0xc7, 0xa8, 0xc3, 0x5c, 0x24, 0xe8, 0x61, 0x21, 0xfd, + 0xfd, 0x04, 0x7d, 0x98, 0xb4, 0xd8, 0xc4, 0xb7, 0xb0, 0x26, 0xd2, 0x71, 0xe1, 0xfa, 0xd7, 0x86, + 0xe5, 0x7b, 0x34, 0xf0, 0x5d, 0xe3, 0xda, 0xf1, 0x6c, 0xff, 0x5a, 0x2d, 0x85, 0xfd, 0x3c, 0x41, + 0xde, 0xf2, 0xe8, 0xb3, 0xa7, 0xdf, 0x9a, 0xee, 0x10, 0xeb, 0x77, 0xb9, 0xf6, 0xa1, 0xeb, 0x5f, + 0x37, 0x85, 0xee, 0x6b, 0xae, 0x8a, 0xde, 0x40, 0x5d, 0x06, 0x7f, 0x16, 0x71, 0x79, 0x31, 0xf1, + 0xc7, 0x42, 0x7d, 0x9a, 0xf9, 0x19, 0xe4, 0x7d, 0x71, 0x22, 0xac, 0xf0, 0x11, 0xfe, 0xe9, 0xbc, + 0xf1, 0x71, 0xca, 0x51, 0xba, 0x44, 0x6b, 0xff, 0xcc, 0x40, 0x41, 0xb6, 0x3c, 0x7a, 0x09, 0x15, + 0x6a, 0x0d, 0x9c, 0x81, 0x61, 0x0a, 0x81, 0x9c, 0x5c, 0x0f, 0xe6, 0x0e, 0x89, 0x46, 0xd7, 0x1a, + 0xb4, 0x06, 0xf2, 0xe1, 0x68, 0x45, 0x2f, 0x73, 0xf5, 0x90, 0xee, 0x05, 0x94, 0x86, 0x36, 0x89, + 0xc8, 0xc4, 0x58, 0xfb, 0x6c, 0x3e, 0xd9, 0x99, 0x4d, 0x62, 0x2a, 0x18, 0x46, 0x4f, 0xcc, 0x2f, + 0x9f, 0x5e, 0xe2, 0x20, 0xa2, 0xca, 0x2c, 0xf2, 0xeb, 0x94, 0xc1, 0x13, 0x7e, 0xf9, 0x89, 0xe7, + 0xfa, 0x1e, 0x94, 0x93, 0x7e, 0xb3, 0x93, 0xcf, 0xc4, 0x9a, 0xcb, 0x7a, 0x31, 0x5e, 0x06, 0x82, + 0xec, 0xc0, 0x0f, 0xc4, 0xe7, 0x49, 0x4e, 0xe7, 0xbf, 0xeb, 0x5b, 0x00, 0xb1, 0xb7, 0xa8, 0x0e, + 0xca, 0x85, 0xe3, 0x62, 0x3e, 0xe7, 0xc4, 0x79, 0x3c, 0x7a, 0xae, 0xb7, 0xa1, 0x9c, 0x74, 0x26, + 0x3a, 0x15, 0xa4, 0xe2, 0x53, 0x01, 0x7a, 0x08, 0xb9, 0x11, 0xcb, 0xae, 0x0c, 0xd1, 0x9d, 0xa9, + 0x02, 0xd8, 0xf3, 0xde, 0xea, 0x02, 0xb2, 0x5f, 0x84, 0x82, 0xf4, 0x54, 0xfb, 0x63, 0x86, 0x9d, + 0x6c, 0xe5, 0xb8, 0xdd, 0x85, 0x0c, 0x75, 0xc9, 0xfc, 0x6d, 0x37, 0x04, 0x36, 0xba, 0x2e, 0x8b, + 0x08, 0x03, 0xb3, 0x8f, 0x37, 0x1e, 0x18, 0x69, 0x77, 0xeb, 0x06, 0x2d, 0xbe, 0x86, 0xf0, 0xe9, + 0x68, 0x45, 0x17, 0x8a, 0xf5, 0x7f, 0xa5, 0x20, 0xd3, 0x75, 0x09, 0xfa, 0x1c, 0x2a, 0x84, 0x9a, + 0x9e, 0x6d, 0x06, 0xb6, 0x11, 0x2f, 0x8f, 0x45, 0x3e, 0x14, 0xb3, 0x91, 0x8f, 0xd6, 0x01, 0x44, + 0x22, 0xe3, 0xa3, 0xe4, 0xd1, 0x8a, 0x5e, 0xe4, 0x32, 0x0e, 0x78, 0x04, 0xab, 0xa2, 0xef, 0x2c, + 0x1c, 0x50, 0xe7, 0xc2, 0xb1, 0xd8, 0xa7, 0x65, 0x86, 0x67, 0xa4, 0xc6, 0x5f, 0x34, 0x63, 0x39, + 0x7a, 0x0c, 0x48, 0x36, 0x53, 0x12, 0x9d, 0xe5, 0xe8, 0x55, 0xf1, 0x26, 0x01, 0xdf, 0xaf, 0x42, + 0xd9, 0x72, 0x06, 0xcc, 0x3a, 0x19, 0x3a, 0x14, 0xd7, 0x4f, 0xa1, 0x32, 0xb6, 0xaa, 0x0f, 0x4e, + 0x4d, 0x01, 0x72, 0x7d, 0xdf, 0xc6, 0xae, 0xe6, 0x41, 0x39, 0xd9, 0x6b, 0x33, 0x89, 0xef, 0x24, + 0x89, 0x8b, 0x92, 0x02, 0x3d, 0x05, 0x30, 0x6d, 0xdb, 0x61, 0x5a, 0xd1, 0xae, 0x3e, 0xdb, 0x66, + 0x02, 0xa7, 0x9d, 0xc0, 0xed, 0xa4, 0x3d, 0x36, 0xc6, 0xfc, 0x21, 0x45, 0x3f, 0x02, 0x25, 0xbc, + 0x2d, 0x93, 0x75, 0xb1, 0x36, 0x45, 0xf5, 0x5c, 0x02, 0xf4, 0x08, 0xaa, 0x59, 0x80, 0x92, 0x6c, + 0x27, 0x8e, 0xd7, 0xc3, 0x01, 0xfb, 0x4c, 0x37, 0xd9, 0xe7, 0xbb, 0x58, 0x85, 0xa2, 0xcb, 0xa7, + 0x31, 0x23, 0xe9, 0xe5, 0x8d, 0xfc, 0x5d, 0x99, 0xf0, 0xd9, 0x1a, 0xb4, 0xbc, 0x0b, 0x9f, 0xf5, + 0x22, 0x9b, 0x21, 0x46, 0x7c, 0xa9, 0x50, 0xd1, 0x8b, 0x4c, 0x22, 0x6e, 0x35, 0x34, 0x31, 0xa1, + 0x0c, 0xcb, 0x94, 0x88, 0x34, 0x47, 0x94, 0x98, 0xb0, 0x69, 0x0a, 0xcc, 0x97, 0x50, 0xe3, 0x98, + 0x00, 0xd3, 0xc0, 0xf4, 0x48, 0xdf, 0xa1, 0x62, 0x60, 0x54, 0xf4, 0x5b, 0x4c, 0xae, 0xc7, 0x62, + 0x76, 0x46, 0xe1, 0xd0, 0x41, 0xe0, 0x9f, 0x63, 0xc2, 0x4b, 0xa7, 0xa2, 0x73, 0x07, 0x5e, 0x71, + 0x09, 0x3b, 0x4a, 0x72, 0xc0, 0xb9, 0x69, 0x5d, 0xf9, 0x17, 0xe2, 0x1b, 0x54, 0x9a, 0xdb, 0x17, + 0xa2, 0x08, 0x22, 0xe6, 0x29, 0xe1, 0x9b, 0xbc, 0x84, 0x88, 0xa5, 0x11, 0xf4, 0x00, 0x6e, 0x89, + 0x45, 0x79, 0xb6, 0x71, 0x4d, 0x2c, 0xd3, 0xc5, 0x7c, 0x37, 0xaf, 0xe8, 0x7c, 0x31, 0x1d, 0xcf, + 0x7e, 0xcd, 0x85, 0x11, 0x2e, 0xb0, 0x46, 0x21, 0x4e, 0x89, 0x71, 0xba, 0x35, 0x92, 0xb8, 0x35, + 0x50, 0x04, 0x8e, 0xfa, 0x7c, 0x23, 0xad, 0xe8, 0x05, 0x0e, 0xa0, 0x7e, 0xf4, 0xca, 0xa4, 0x3e, + 0xdf, 0x04, 0xe5, 0xab, 0x3d, 0xea, 0xa3, 0x0d, 0xe9, 0x28, 0xf3, 0xa2, 0x4f, 0x08, 0xdf, 0xc6, + 0xe4, 0x6a, 0x3b, 0x9e, 0xfd, 0x92, 0x90, 0x08, 0xc1, 0xec, 0x33, 0x44, 0x39, 0x46, 0xe8, 0xd6, + 0x88, 0x21, 0xc2, 0xc5, 0x0e, 0x3d, 0xd3, 0xba, 0xc2, 0xb6, 0x5a, 0x89, 0x17, 0x7b, 0x26, 0x44, + 0x51, 0x4c, 0x89, 0x40, 0x54, 0x13, 0x56, 0x04, 0xe0, 0x1e, 0xf0, 0x84, 0x1a, 0xae, 0x4f, 0xa8, + 0x7a, 0x8b, 0xbf, 0xe6, 0x3e, 0x9f, 0xf8, 0x84, 0x46, 0x06, 0x64, 0xf2, 0xd4, 0x5a, 0x6c, 0x40, + 0x26, 0x2e, 0x82, 0x5c, 0x30, 0x3a, 0x4a, 0xd4, 0xd5, 0x18, 0x72, 0x28, 0x44, 0xe8, 0x31, 0xdc, + 0x16, 0x26, 0xd8, 0x31, 0x81, 0x9d, 0x94, 0xc5, 0xf9, 0x0b, 0x71, 0x24, 0xaf, 0x8e, 0x13, 0x93, + 0xf0, 0x63, 0xa7, 0x3c, 0xd8, 0xa1, 0x18, 0x6e, 0x5a, 0x57, 0x02, 0x7d, 0x3b, 0xae, 0x19, 0x86, + 0xde, 0xb3, 0xae, 0x38, 0x78, 0x9a, 0x3b, 0xc0, 0xd6, 0x48, 0xbd, 0x33, 0xcd, 0xad, 0x63, 0x6b, + 0x34, 0xcd, 0xcd, 0xd1, 0x1f, 0x4d, 0x71, 0x73, 0x70, 0x18, 0x9a, 0x41, 0x9f, 0x0e, 0xd5, 0xbb, + 0x71, 0x68, 0x5e, 0xf5, 0xe9, 0x10, 0x3d, 0x84, 0xd5, 0x28, 0x3b, 0x84, 0xd0, 0xcb, 0x00, 0x93, + 0x4b, 0xf5, 0xe3, 0x44, 0x61, 0x5b, 0xa3, 0x8e, 0x14, 0x27, 0x2a, 0x84, 0xaa, 0x6a, 0xb2, 0x42, + 0x68, 0x94, 0x9f, 0x80, 0xd2, 0x91, 0x19, 0xa8, 0x6b, 0x89, 0x1c, 0x73, 0x49, 0x64, 0x87, 0xd5, + 0x49, 0x64, 0xa7, 0x1e, 0xdb, 0xe9, 0x78, 0x76, 0x64, 0x27, 0xec, 0x47, 0x86, 0xb5, 0xae, 0x3d, + 0x5b, 0xbd, 0x17, 0x27, 0xa3, 0xe3, 0xd9, 0xcd, 0x6b, 0x2f, 0x2e, 0x08, 0xd3, 0x1e, 0xb1, 0xa2, + 0xba, 0x1f, 0x1b, 0xdc, 0xe3, 0x12, 0x76, 0xf2, 0x97, 0x39, 0xf7, 0x03, 0x1b, 0x07, 0x8e, 0xd7, + 0x53, 0x3f, 0xe1, 0xa0, 0xaa, 0x48, 0x7b, 0x28, 0xd5, 0xce, 0xe1, 0xa3, 0x17, 0x98, 0x76, 0xfd, + 0x81, 0xfc, 0x86, 0x24, 0x3a, 0xfe, 0xed, 0x10, 0x13, 0xca, 0x0e, 0xdb, 0xfc, 0x9b, 0xc1, 0x98, + 0xba, 0xc1, 0xa8, 0x72, 0x79, 0x33, 0xba, 0x58, 0x58, 0x87, 0x52, 0xdf, 0xfc, 0xc1, 0x08, 0x30, + 0x19, 0xba, 0x94, 0xc8, 0xcf, 0x06, 0xe8, 0x9b, 0x3f, 0xe8, 0x42, 0xa2, 0x19, 0x70, 0x77, 0xd2, + 0x06, 0x19, 0xf8, 0x1e, 0xc1, 0xe8, 0x2b, 0x28, 0x48, 0x7a, 0x35, 0xc5, 0x8f, 0x58, 0x6b, 0xf3, + 0xaf, 0xb3, 0x42, 0x24, 0xaa, 0x41, 0x06, 0x7b, 0xe2, 0xf3, 0x44, 0xd1, 0xd9, 0x4f, 0xed, 0x57, + 0xb0, 0xfa, 0x02, 0x53, 0xf1, 0xc9, 0x1c, 0x2d, 0xe0, 0x01, 0xfb, 0xf8, 0x61, 0x0b, 0x88, 0xaf, + 0x13, 0x52, 0xe1, 0x77, 0x8a, 0x19, 0x48, 0xf4, 0x32, 0xee, 0xbf, 0x01, 0x94, 0x64, 0x97, 0xae, + 0x3f, 0x81, 0xbc, 0x20, 0x96, 0x9e, 0xab, 0x73, 0xaf, 0x12, 0x24, 0x6e, 0x86, 0xdf, 0xdb, 0x50, + 0x8b, 0x98, 0x43, 0xb7, 0xc7, 0xee, 0x3f, 0x52, 0xe3, 0xf7, 0x1f, 0xda, 0x41, 0x62, 0xa1, 0x33, + 0x3d, 0x49, 0x2d, 0xe3, 0x89, 0xf6, 0x3b, 0xf8, 0x38, 0xa2, 0x11, 0x3b, 0x06, 0x59, 0xc6, 0x7c, + 0x22, 0xa4, 0xd1, 0x1d, 0x50, 0x3a, 0x19, 0xd2, 0xf0, 0x22, 0x68, 0x22, 0xa4, 0x99, 0xa9, 0x90, + 0x5e, 0x82, 0x3a, 0xed, 0x80, 0x5c, 0xce, 0xf8, 0xff, 0x03, 0x52, 0xef, 0xf3, 0xff, 0x80, 0x19, + 0x21, 0xde, 0xe5, 0x11, 0x8b, 0xee, 0xe4, 0xc4, 0x22, 0x6f, 0xbe, 0x97, 0xd3, 0x5a, 0x3c, 0xe1, + 0x91, 0xce, 0xac, 0x5a, 0x4d, 0x2d, 0x57, 0xab, 0xda, 0xd7, 0x70, 0x87, 0x2d, 0x34, 0x71, 0x5b, + 0x27, 0x3c, 0x98, 0xba, 0xb1, 0x4b, 0x4d, 0xdf, 0xd8, 0x69, 0x67, 0xbc, 0x37, 0x93, 0xca, 0xd2, + 0x95, 0x9f, 0x02, 0xc4, 0xc0, 0xf9, 0xff, 0x5b, 0x4b, 0x68, 0x26, 0xf0, 0x5a, 0x4b, 0x54, 0x9d, + 0x0c, 0x5a, 0x9c, 0xf6, 0x28, 0xa7, 0xa9, 0x89, 0x7b, 0x3d, 0x15, 0x0a, 0x64, 0xd8, 0xef, 0x9b, + 0xc1, 0x5b, 0x19, 0xd9, 0xf0, 0x31, 0xac, 0x47, 0x49, 0x95, 0xa8, 0x47, 0x71, 0xf3, 0x35, 0xbf, + 0x1e, 0x85, 0x86, 0xc4, 0xed, 0xfe, 0x39, 0x07, 0x8a, 0x0c, 0xdd, 0x3b, 0x64, 0x41, 0x75, 0x7c, + 0x5a, 0xa0, 0x2f, 0xa6, 0x09, 0x66, 0xce, 0xac, 0xfa, 0xd6, 0x62, 0xa0, 0xf4, 0xf1, 0x35, 0x40, + 0xdc, 0xd3, 0x68, 0x73, 0xa6, 0xde, 0xf8, 0x3c, 0xa9, 0x7f, 0x76, 0x33, 0x48, 0x12, 0x77, 0xa1, + 0x18, 0x49, 0x91, 0x76, 0x83, 0x4a, 0x48, 0xbb, 0x79, 0x23, 0x46, 0xb2, 0x3a, 0x89, 0x41, 0x21, + 0xfb, 0x05, 0x7d, 0x79, 0x83, 0xe2, 0x78, 0x53, 0xd7, 0x1f, 0x2e, 0x03, 0x1d, 0x8b, 0x4c, 0xf8, + 0xef, 0xdb, 0xd9, 0xde, 0x8d, 0xb7, 0xd3, 0x9c, 0xc8, 0x4c, 0xf6, 0xcf, 0xf7, 0x50, 0x19, 0xab, + 0x66, 0xf4, 0x60, 0xb6, 0x57, 0x93, 0xbd, 0x52, 0xff, 0x62, 0x21, 0x6e, 0x3c, 0xf6, 0xe2, 0xa2, + 0x70, 0x4e, 0xec, 0x93, 0x55, 0x3f, 0x2f, 0xf6, 0x63, 0xe5, 0xbc, 0xff, 0x06, 0x6e, 0x3b, 0xfe, + 0x14, 0x70, 0xbf, 0x12, 0x16, 0xec, 0x2b, 0x76, 0x24, 0x7f, 0x95, 0xfa, 0xe5, 0x13, 0x79, 0x44, + 0xef, 0xf9, 0xae, 0xe9, 0xf5, 0x1a, 0x7e, 0xd0, 0xdb, 0x1e, 0xff, 0xb7, 0x3d, 0x7b, 0x0a, 0x77, + 0xd3, 0x77, 0xc6, 0x68, 0xe7, 0x3c, 0xcf, 0x4f, 0xf3, 0x5f, 0xfd, 0x37, 0x00, 0x00, 0xff, 0xff, + 0x54, 0xae, 0x0b, 0x93, 0xdf, 0x1f, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/grpc/channelz/service/func_linux.go b/vendor/google.golang.org/grpc/channelz/service/func_linux.go new file mode 100644 index 0000000..192c3a1 --- /dev/null +++ b/vendor/google.golang.org/grpc/channelz/service/func_linux.go @@ -0,0 +1,105 @@ +// +build !appengine + +/* + * + * Copyright 2018 gRPC authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ + +package service + +import ( + "github.com/golang/protobuf/ptypes" + channelzpb "google.golang.org/grpc/channelz/grpc_channelz_v1" + "google.golang.org/grpc/internal/channelz" +) + +func sockoptToProto(skopts *channelz.SocketOptionData) []*channelzpb.SocketOption { + var opts []*channelzpb.SocketOption + if skopts.Linger != nil { + additional, err := ptypes.MarshalAny(&channelzpb.SocketOptionLinger{ + Active: skopts.Linger.Onoff != 0, + Duration: convertToPtypesDuration(int64(skopts.Linger.Linger), 0), + }) + if err == nil { + opts = append(opts, &channelzpb.SocketOption{ + Name: "SO_LINGER", + Additional: additional, + }) + } + } + if skopts.RecvTimeout != nil { + additional, err := ptypes.MarshalAny(&channelzpb.SocketOptionTimeout{ + Duration: convertToPtypesDuration(int64(skopts.RecvTimeout.Sec), int64(skopts.RecvTimeout.Usec)), + }) + if err == nil { + opts = append(opts, &channelzpb.SocketOption{ + Name: "SO_RCVTIMEO", + Additional: additional, + }) + } + } + if skopts.SendTimeout != nil { + additional, err := ptypes.MarshalAny(&channelzpb.SocketOptionTimeout{ + Duration: convertToPtypesDuration(int64(skopts.SendTimeout.Sec), int64(skopts.SendTimeout.Usec)), + }) + if err == nil { + opts = append(opts, &channelzpb.SocketOption{ + Name: "SO_SNDTIMEO", + Additional: additional, + }) + } + } + if skopts.TCPInfo != nil { + additional, err := ptypes.MarshalAny(&channelzpb.SocketOptionTcpInfo{ + TcpiState: uint32(skopts.TCPInfo.State), + TcpiCaState: uint32(skopts.TCPInfo.Ca_state), + TcpiRetransmits: uint32(skopts.TCPInfo.Retransmits), + TcpiProbes: uint32(skopts.TCPInfo.Probes), + TcpiBackoff: uint32(skopts.TCPInfo.Backoff), + TcpiOptions: uint32(skopts.TCPInfo.Options), + // https://golang.org/pkg/syscall/#TCPInfo + // TCPInfo struct does not contain info about TcpiSndWscale and TcpiRcvWscale. + TcpiRto: skopts.TCPInfo.Rto, + TcpiAto: skopts.TCPInfo.Ato, + TcpiSndMss: skopts.TCPInfo.Snd_mss, + TcpiRcvMss: skopts.TCPInfo.Rcv_mss, + TcpiUnacked: skopts.TCPInfo.Unacked, + TcpiSacked: skopts.TCPInfo.Sacked, + TcpiLost: skopts.TCPInfo.Lost, + TcpiRetrans: skopts.TCPInfo.Retrans, + TcpiFackets: skopts.TCPInfo.Fackets, + TcpiLastDataSent: skopts.TCPInfo.Last_data_sent, + TcpiLastAckSent: skopts.TCPInfo.Last_ack_sent, + TcpiLastDataRecv: skopts.TCPInfo.Last_data_recv, + TcpiLastAckRecv: skopts.TCPInfo.Last_ack_recv, + TcpiPmtu: skopts.TCPInfo.Pmtu, + TcpiRcvSsthresh: skopts.TCPInfo.Rcv_ssthresh, + TcpiRtt: skopts.TCPInfo.Rtt, + TcpiRttvar: skopts.TCPInfo.Rttvar, + TcpiSndSsthresh: skopts.TCPInfo.Snd_ssthresh, + TcpiSndCwnd: skopts.TCPInfo.Snd_cwnd, + TcpiAdvmss: skopts.TCPInfo.Advmss, + TcpiReordering: skopts.TCPInfo.Reordering, + }) + if err == nil { + opts = append(opts, &channelzpb.SocketOption{ + Name: "TCP_INFO", + Additional: additional, + }) + } + } + return opts +} diff --git a/vendor/google.golang.org/grpc/channelz/service/func_nonlinux.go b/vendor/google.golang.org/grpc/channelz/service/func_nonlinux.go new file mode 100644 index 0000000..eb53334 --- /dev/null +++ b/vendor/google.golang.org/grpc/channelz/service/func_nonlinux.go @@ -0,0 +1,30 @@ +// +build !linux appengine + +/* + * + * Copyright 2018 gRPC authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ + +package service + +import ( + channelzpb "google.golang.org/grpc/channelz/grpc_channelz_v1" + "google.golang.org/grpc/internal/channelz" +) + +func sockoptToProto(skopts *channelz.SocketOptionData) []*channelzpb.SocketOption { + return nil +} diff --git a/vendor/google.golang.org/grpc/channelz/service/service.go b/vendor/google.golang.org/grpc/channelz/service/service.go new file mode 100644 index 0000000..0fad08f --- /dev/null +++ b/vendor/google.golang.org/grpc/channelz/service/service.go @@ -0,0 +1,346 @@ +/* + * + * Copyright 2018 gRPC authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ + +//go:generate ./regenerate.sh + +// Package service provides an implementation for channelz service server. +package service + +import ( + "context" + "net" + "time" + + "github.com/golang/protobuf/ptypes" + durpb "github.com/golang/protobuf/ptypes/duration" + wrpb "github.com/golang/protobuf/ptypes/wrappers" + "google.golang.org/grpc" + channelzgrpc "google.golang.org/grpc/channelz/grpc_channelz_v1" + channelzpb "google.golang.org/grpc/channelz/grpc_channelz_v1" + "google.golang.org/grpc/codes" + "google.golang.org/grpc/connectivity" + "google.golang.org/grpc/credentials" + "google.golang.org/grpc/internal/channelz" + "google.golang.org/grpc/status" +) + +func init() { + channelz.TurnOn() +} + +func convertToPtypesDuration(sec int64, usec int64) *durpb.Duration { + return ptypes.DurationProto(time.Duration(sec*1e9 + usec*1e3)) +} + +// RegisterChannelzServiceToServer registers the channelz service to the given server. +func RegisterChannelzServiceToServer(s *grpc.Server) { + channelzgrpc.RegisterChannelzServer(s, newCZServer()) +} + +func newCZServer() channelzgrpc.ChannelzServer { + return &serverImpl{} +} + +type serverImpl struct{} + +func connectivityStateToProto(s connectivity.State) *channelzpb.ChannelConnectivityState { + switch s { + case connectivity.Idle: + return &channelzpb.ChannelConnectivityState{State: channelzpb.ChannelConnectivityState_IDLE} + case connectivity.Connecting: + return &channelzpb.ChannelConnectivityState{State: channelzpb.ChannelConnectivityState_CONNECTING} + case connectivity.Ready: + return &channelzpb.ChannelConnectivityState{State: channelzpb.ChannelConnectivityState_READY} + case connectivity.TransientFailure: + return &channelzpb.ChannelConnectivityState{State: channelzpb.ChannelConnectivityState_TRANSIENT_FAILURE} + case connectivity.Shutdown: + return &channelzpb.ChannelConnectivityState{State: channelzpb.ChannelConnectivityState_SHUTDOWN} + default: + return &channelzpb.ChannelConnectivityState{State: channelzpb.ChannelConnectivityState_UNKNOWN} + } +} + +func channelTraceToProto(ct *channelz.ChannelTrace) *channelzpb.ChannelTrace { + pbt := &channelzpb.ChannelTrace{} + pbt.NumEventsLogged = ct.EventNum + if ts, err := ptypes.TimestampProto(ct.CreationTime); err == nil { + pbt.CreationTimestamp = ts + } + var events []*channelzpb.ChannelTraceEvent + for _, e := range ct.Events { + cte := &channelzpb.ChannelTraceEvent{ + Description: e.Desc, + Severity: channelzpb.ChannelTraceEvent_Severity(e.Severity), + } + if ts, err := ptypes.TimestampProto(e.Timestamp); err == nil { + cte.Timestamp = ts + } + if e.RefID != 0 { + switch e.RefType { + case channelz.RefChannel: + cte.ChildRef = &channelzpb.ChannelTraceEvent_ChannelRef{ChannelRef: &channelzpb.ChannelRef{ChannelId: e.RefID, Name: e.RefName}} + case channelz.RefSubChannel: + cte.ChildRef = &channelzpb.ChannelTraceEvent_SubchannelRef{SubchannelRef: &channelzpb.SubchannelRef{SubchannelId: e.RefID, Name: e.RefName}} + } + } + events = append(events, cte) + } + pbt.Events = events + return pbt +} + +func channelMetricToProto(cm *channelz.ChannelMetric) *channelzpb.Channel { + c := &channelzpb.Channel{} + c.Ref = &channelzpb.ChannelRef{ChannelId: cm.ID, Name: cm.RefName} + + c.Data = &channelzpb.ChannelData{ + State: connectivityStateToProto(cm.ChannelData.State), + Target: cm.ChannelData.Target, + CallsStarted: cm.ChannelData.CallsStarted, + CallsSucceeded: cm.ChannelData.CallsSucceeded, + CallsFailed: cm.ChannelData.CallsFailed, + } + if ts, err := ptypes.TimestampProto(cm.ChannelData.LastCallStartedTimestamp); err == nil { + c.Data.LastCallStartedTimestamp = ts + } + nestedChans := make([]*channelzpb.ChannelRef, 0, len(cm.NestedChans)) + for id, ref := range cm.NestedChans { + nestedChans = append(nestedChans, &channelzpb.ChannelRef{ChannelId: id, Name: ref}) + } + c.ChannelRef = nestedChans + + subChans := make([]*channelzpb.SubchannelRef, 0, len(cm.SubChans)) + for id, ref := range cm.SubChans { + subChans = append(subChans, &channelzpb.SubchannelRef{SubchannelId: id, Name: ref}) + } + c.SubchannelRef = subChans + + sockets := make([]*channelzpb.SocketRef, 0, len(cm.Sockets)) + for id, ref := range cm.Sockets { + sockets = append(sockets, &channelzpb.SocketRef{SocketId: id, Name: ref}) + } + c.SocketRef = sockets + c.Data.Trace = channelTraceToProto(cm.Trace) + return c +} + +func subChannelMetricToProto(cm *channelz.SubChannelMetric) *channelzpb.Subchannel { + sc := &channelzpb.Subchannel{} + sc.Ref = &channelzpb.SubchannelRef{SubchannelId: cm.ID, Name: cm.RefName} + + sc.Data = &channelzpb.ChannelData{ + State: connectivityStateToProto(cm.ChannelData.State), + Target: cm.ChannelData.Target, + CallsStarted: cm.ChannelData.CallsStarted, + CallsSucceeded: cm.ChannelData.CallsSucceeded, + CallsFailed: cm.ChannelData.CallsFailed, + } + if ts, err := ptypes.TimestampProto(cm.ChannelData.LastCallStartedTimestamp); err == nil { + sc.Data.LastCallStartedTimestamp = ts + } + nestedChans := make([]*channelzpb.ChannelRef, 0, len(cm.NestedChans)) + for id, ref := range cm.NestedChans { + nestedChans = append(nestedChans, &channelzpb.ChannelRef{ChannelId: id, Name: ref}) + } + sc.ChannelRef = nestedChans + + subChans := make([]*channelzpb.SubchannelRef, 0, len(cm.SubChans)) + for id, ref := range cm.SubChans { + subChans = append(subChans, &channelzpb.SubchannelRef{SubchannelId: id, Name: ref}) + } + sc.SubchannelRef = subChans + + sockets := make([]*channelzpb.SocketRef, 0, len(cm.Sockets)) + for id, ref := range cm.Sockets { + sockets = append(sockets, &channelzpb.SocketRef{SocketId: id, Name: ref}) + } + sc.SocketRef = sockets + sc.Data.Trace = channelTraceToProto(cm.Trace) + return sc +} + +func securityToProto(se credentials.ChannelzSecurityValue) *channelzpb.Security { + switch v := se.(type) { + case *credentials.TLSChannelzSecurityValue: + return &channelzpb.Security{Model: &channelzpb.Security_Tls_{Tls: &channelzpb.Security_Tls{ + CipherSuite: &channelzpb.Security_Tls_StandardName{StandardName: v.StandardName}, + LocalCertificate: v.LocalCertificate, + RemoteCertificate: v.RemoteCertificate, + }}} + case *credentials.OtherChannelzSecurityValue: + otherSecurity := &channelzpb.Security_OtherSecurity{ + Name: v.Name, + } + if anyval, err := ptypes.MarshalAny(v.Value); err == nil { + otherSecurity.Value = anyval + } + return &channelzpb.Security{Model: &channelzpb.Security_Other{Other: otherSecurity}} + } + return nil +} + +func addrToProto(a net.Addr) *channelzpb.Address { + switch a.Network() { + case "udp": + // TODO: Address_OtherAddress{}. Need proto def for Value. + case "ip": + // Note zone info is discarded through the conversion. + return &channelzpb.Address{Address: &channelzpb.Address_TcpipAddress{TcpipAddress: &channelzpb.Address_TcpIpAddress{IpAddress: a.(*net.IPAddr).IP}}} + case "ip+net": + // Note mask info is discarded through the conversion. + return &channelzpb.Address{Address: &channelzpb.Address_TcpipAddress{TcpipAddress: &channelzpb.Address_TcpIpAddress{IpAddress: a.(*net.IPNet).IP}}} + case "tcp": + // Note zone info is discarded through the conversion. + return &channelzpb.Address{Address: &channelzpb.Address_TcpipAddress{TcpipAddress: &channelzpb.Address_TcpIpAddress{IpAddress: a.(*net.TCPAddr).IP, Port: int32(a.(*net.TCPAddr).Port)}}} + case "unix", "unixgram", "unixpacket": + return &channelzpb.Address{Address: &channelzpb.Address_UdsAddress_{UdsAddress: &channelzpb.Address_UdsAddress{Filename: a.String()}}} + default: + } + return &channelzpb.Address{} +} + +func socketMetricToProto(sm *channelz.SocketMetric) *channelzpb.Socket { + s := &channelzpb.Socket{} + s.Ref = &channelzpb.SocketRef{SocketId: sm.ID, Name: sm.RefName} + + s.Data = &channelzpb.SocketData{ + StreamsStarted: sm.SocketData.StreamsStarted, + StreamsSucceeded: sm.SocketData.StreamsSucceeded, + StreamsFailed: sm.SocketData.StreamsFailed, + MessagesSent: sm.SocketData.MessagesSent, + MessagesReceived: sm.SocketData.MessagesReceived, + KeepAlivesSent: sm.SocketData.KeepAlivesSent, + } + if ts, err := ptypes.TimestampProto(sm.SocketData.LastLocalStreamCreatedTimestamp); err == nil { + s.Data.LastLocalStreamCreatedTimestamp = ts + } + if ts, err := ptypes.TimestampProto(sm.SocketData.LastRemoteStreamCreatedTimestamp); err == nil { + s.Data.LastRemoteStreamCreatedTimestamp = ts + } + if ts, err := ptypes.TimestampProto(sm.SocketData.LastMessageSentTimestamp); err == nil { + s.Data.LastMessageSentTimestamp = ts + } + if ts, err := ptypes.TimestampProto(sm.SocketData.LastMessageReceivedTimestamp); err == nil { + s.Data.LastMessageReceivedTimestamp = ts + } + s.Data.LocalFlowControlWindow = &wrpb.Int64Value{Value: sm.SocketData.LocalFlowControlWindow} + s.Data.RemoteFlowControlWindow = &wrpb.Int64Value{Value: sm.SocketData.RemoteFlowControlWindow} + + if sm.SocketData.SocketOptions != nil { + s.Data.Option = sockoptToProto(sm.SocketData.SocketOptions) + } + if sm.SocketData.Security != nil { + s.Security = securityToProto(sm.SocketData.Security) + } + + if sm.SocketData.LocalAddr != nil { + s.Local = addrToProto(sm.SocketData.LocalAddr) + } + if sm.SocketData.RemoteAddr != nil { + s.Remote = addrToProto(sm.SocketData.RemoteAddr) + } + s.RemoteName = sm.SocketData.RemoteName + return s +} + +func (s *serverImpl) GetTopChannels(ctx context.Context, req *channelzpb.GetTopChannelsRequest) (*channelzpb.GetTopChannelsResponse, error) { + metrics, end := channelz.GetTopChannels(req.GetStartChannelId(), req.GetMaxResults()) + resp := &channelzpb.GetTopChannelsResponse{} + for _, m := range metrics { + resp.Channel = append(resp.Channel, channelMetricToProto(m)) + } + resp.End = end + return resp, nil +} + +func serverMetricToProto(sm *channelz.ServerMetric) *channelzpb.Server { + s := &channelzpb.Server{} + s.Ref = &channelzpb.ServerRef{ServerId: sm.ID, Name: sm.RefName} + + s.Data = &channelzpb.ServerData{ + CallsStarted: sm.ServerData.CallsStarted, + CallsSucceeded: sm.ServerData.CallsSucceeded, + CallsFailed: sm.ServerData.CallsFailed, + } + + if ts, err := ptypes.TimestampProto(sm.ServerData.LastCallStartedTimestamp); err == nil { + s.Data.LastCallStartedTimestamp = ts + } + sockets := make([]*channelzpb.SocketRef, 0, len(sm.ListenSockets)) + for id, ref := range sm.ListenSockets { + sockets = append(sockets, &channelzpb.SocketRef{SocketId: id, Name: ref}) + } + s.ListenSocket = sockets + return s +} + +func (s *serverImpl) GetServers(ctx context.Context, req *channelzpb.GetServersRequest) (*channelzpb.GetServersResponse, error) { + metrics, end := channelz.GetServers(req.GetStartServerId(), req.GetMaxResults()) + resp := &channelzpb.GetServersResponse{} + for _, m := range metrics { + resp.Server = append(resp.Server, serverMetricToProto(m)) + } + resp.End = end + return resp, nil +} + +func (s *serverImpl) GetServerSockets(ctx context.Context, req *channelzpb.GetServerSocketsRequest) (*channelzpb.GetServerSocketsResponse, error) { + metrics, end := channelz.GetServerSockets(req.GetServerId(), req.GetStartSocketId(), req.GetMaxResults()) + resp := &channelzpb.GetServerSocketsResponse{} + for _, m := range metrics { + resp.SocketRef = append(resp.SocketRef, &channelzpb.SocketRef{SocketId: m.ID, Name: m.RefName}) + } + resp.End = end + return resp, nil +} + +func (s *serverImpl) GetChannel(ctx context.Context, req *channelzpb.GetChannelRequest) (*channelzpb.GetChannelResponse, error) { + var metric *channelz.ChannelMetric + if metric = channelz.GetChannel(req.GetChannelId()); metric == nil { + return nil, status.Errorf(codes.NotFound, "requested channel %d not found", req.GetChannelId()) + } + resp := &channelzpb.GetChannelResponse{Channel: channelMetricToProto(metric)} + return resp, nil +} + +func (s *serverImpl) GetSubchannel(ctx context.Context, req *channelzpb.GetSubchannelRequest) (*channelzpb.GetSubchannelResponse, error) { + var metric *channelz.SubChannelMetric + if metric = channelz.GetSubChannel(req.GetSubchannelId()); metric == nil { + return nil, status.Errorf(codes.NotFound, "requested sub channel %d not found", req.GetSubchannelId()) + } + resp := &channelzpb.GetSubchannelResponse{Subchannel: subChannelMetricToProto(metric)} + return resp, nil +} + +func (s *serverImpl) GetSocket(ctx context.Context, req *channelzpb.GetSocketRequest) (*channelzpb.GetSocketResponse, error) { + var metric *channelz.SocketMetric + if metric = channelz.GetSocket(req.GetSocketId()); metric == nil { + return nil, status.Errorf(codes.NotFound, "requested socket %d not found", req.GetSocketId()) + } + resp := &channelzpb.GetSocketResponse{Socket: socketMetricToProto(metric)} + return resp, nil +} + +func (s *serverImpl) GetServer(ctx context.Context, req *channelzpb.GetServerRequest) (*channelzpb.GetServerResponse, error) { + var metric *channelz.ServerMetric + if metric = channelz.GetServer(req.GetServerId()); metric == nil { + return nil, status.Errorf(codes.NotFound, "requested server %d not found", req.GetServerId()) + } + resp := &channelzpb.GetServerResponse{Server: serverMetricToProto(metric)} + return resp, nil +} diff --git a/vendor/google.golang.org/grpc/clientconn.go b/vendor/google.golang.org/grpc/clientconn.go new file mode 100644 index 0000000..bd2d2b3 --- /dev/null +++ b/vendor/google.golang.org/grpc/clientconn.go @@ -0,0 +1,1356 @@ +/* + * + * Copyright 2014 gRPC authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ + +package grpc + +import ( + "context" + "errors" + "fmt" + "math" + "net" + "reflect" + "strings" + "sync" + "sync/atomic" + "time" + + "google.golang.org/grpc/balancer" + _ "google.golang.org/grpc/balancer/roundrobin" // To register roundrobin. + "google.golang.org/grpc/codes" + "google.golang.org/grpc/connectivity" + "google.golang.org/grpc/credentials" + "google.golang.org/grpc/grpclog" + "google.golang.org/grpc/internal/backoff" + "google.golang.org/grpc/internal/channelz" + "google.golang.org/grpc/internal/envconfig" + "google.golang.org/grpc/internal/grpcsync" + "google.golang.org/grpc/internal/transport" + "google.golang.org/grpc/keepalive" + "google.golang.org/grpc/resolver" + _ "google.golang.org/grpc/resolver/dns" // To register dns resolver. + _ "google.golang.org/grpc/resolver/passthrough" // To register passthrough resolver. + "google.golang.org/grpc/status" +) + +const ( + // minimum time to give a connection to complete + minConnectTimeout = 20 * time.Second + // must match grpclbName in grpclb/grpclb.go + grpclbName = "grpclb" +) + +var ( + // ErrClientConnClosing indicates that the operation is illegal because + // the ClientConn is closing. + // + // Deprecated: this error should not be relied upon by users; use the status + // code of Canceled instead. + ErrClientConnClosing = status.Error(codes.Canceled, "grpc: the client connection is closing") + // errConnDrain indicates that the connection starts to be drained and does not accept any new RPCs. + errConnDrain = errors.New("grpc: the connection is drained") + // errConnClosing indicates that the connection is closing. + errConnClosing = errors.New("grpc: the connection is closing") + // errBalancerClosed indicates that the balancer is closed. + errBalancerClosed = errors.New("grpc: balancer is closed") + // invalidDefaultServiceConfigErrPrefix is used to prefix the json parsing error for the default + // service config. + invalidDefaultServiceConfigErrPrefix = "grpc: the provided default service config is invalid" +) + +// The following errors are returned from Dial and DialContext +var ( + // errNoTransportSecurity indicates that there is no transport security + // being set for ClientConn. Users should either set one or explicitly + // call WithInsecure DialOption to disable security. + errNoTransportSecurity = errors.New("grpc: no transport security set (use grpc.WithInsecure() explicitly or set credentials)") + // errTransportCredsAndBundle indicates that creds bundle is used together + // with other individual Transport Credentials. + errTransportCredsAndBundle = errors.New("grpc: credentials.Bundle may not be used with individual TransportCredentials") + // errTransportCredentialsMissing indicates that users want to transmit security + // information (e.g., OAuth2 token) which requires secure connection on an insecure + // connection. + errTransportCredentialsMissing = errors.New("grpc: the credentials require transport level security (use grpc.WithTransportCredentials() to set)") + // errCredentialsConflict indicates that grpc.WithTransportCredentials() + // and grpc.WithInsecure() are both called for a connection. + errCredentialsConflict = errors.New("grpc: transport credentials are set for an insecure connection (grpc.WithTransportCredentials() and grpc.WithInsecure() are both called)") +) + +const ( + defaultClientMaxReceiveMessageSize = 1024 * 1024 * 4 + defaultClientMaxSendMessageSize = math.MaxInt32 + // http2IOBufSize specifies the buffer size for sending frames. + defaultWriteBufSize = 32 * 1024 + defaultReadBufSize = 32 * 1024 +) + +// Dial creates a client connection to the given target. +func Dial(target string, opts ...DialOption) (*ClientConn, error) { + return DialContext(context.Background(), target, opts...) +} + +// DialContext creates a client connection to the given target. By default, it's +// a non-blocking dial (the function won't wait for connections to be +// established, and connecting happens in the background). To make it a blocking +// dial, use WithBlock() dial option. +// +// In the non-blocking case, the ctx does not act against the connection. It +// only controls the setup steps. +// +// In the blocking case, ctx can be used to cancel or expire the pending +// connection. Once this function returns, the cancellation and expiration of +// ctx will be noop. Users should call ClientConn.Close to terminate all the +// pending operations after this function returns. +// +// The target name syntax is defined in +// https://github.com/grpc/grpc/blob/master/doc/naming.md. +// e.g. to use dns resolver, a "dns:///" prefix should be applied to the target. +func DialContext(ctx context.Context, target string, opts ...DialOption) (conn *ClientConn, err error) { + cc := &ClientConn{ + target: target, + csMgr: &connectivityStateManager{}, + conns: make(map[*addrConn]struct{}), + dopts: defaultDialOptions(), + blockingpicker: newPickerWrapper(), + czData: new(channelzData), + firstResolveEvent: grpcsync.NewEvent(), + } + cc.retryThrottler.Store((*retryThrottler)(nil)) + cc.ctx, cc.cancel = context.WithCancel(context.Background()) + + for _, opt := range opts { + opt.apply(&cc.dopts) + } + + defer func() { + if err != nil { + cc.Close() + } + }() + + if channelz.IsOn() { + if cc.dopts.channelzParentID != 0 { + cc.channelzID = channelz.RegisterChannel(&channelzChannel{cc}, cc.dopts.channelzParentID, target) + channelz.AddTraceEvent(cc.channelzID, &channelz.TraceEventDesc{ + Desc: "Channel Created", + Severity: channelz.CtINFO, + Parent: &channelz.TraceEventDesc{ + Desc: fmt.Sprintf("Nested Channel(id:%d) created", cc.channelzID), + Severity: channelz.CtINFO, + }, + }) + } else { + cc.channelzID = channelz.RegisterChannel(&channelzChannel{cc}, 0, target) + channelz.AddTraceEvent(cc.channelzID, &channelz.TraceEventDesc{ + Desc: "Channel Created", + Severity: channelz.CtINFO, + }) + } + cc.csMgr.channelzID = cc.channelzID + } + + if !cc.dopts.insecure { + if cc.dopts.copts.TransportCredentials == nil && cc.dopts.copts.CredsBundle == nil { + return nil, errNoTransportSecurity + } + if cc.dopts.copts.TransportCredentials != nil && cc.dopts.copts.CredsBundle != nil { + return nil, errTransportCredsAndBundle + } + } else { + if cc.dopts.copts.TransportCredentials != nil || cc.dopts.copts.CredsBundle != nil { + return nil, errCredentialsConflict + } + for _, cd := range cc.dopts.copts.PerRPCCredentials { + if cd.RequireTransportSecurity() { + return nil, errTransportCredentialsMissing + } + } + } + + if cc.dopts.defaultServiceConfigRawJSON != nil { + sc, err := parseServiceConfig(*cc.dopts.defaultServiceConfigRawJSON) + if err != nil { + return nil, fmt.Errorf("%s: %v", invalidDefaultServiceConfigErrPrefix, err) + } + cc.dopts.defaultServiceConfig = sc + } + cc.mkp = cc.dopts.copts.KeepaliveParams + + if cc.dopts.copts.Dialer == nil { + cc.dopts.copts.Dialer = newProxyDialer( + func(ctx context.Context, addr string) (net.Conn, error) { + network, addr := parseDialTarget(addr) + return (&net.Dialer{}).DialContext(ctx, network, addr) + }, + ) + } + + if cc.dopts.copts.UserAgent != "" { + cc.dopts.copts.UserAgent += " " + grpcUA + } else { + cc.dopts.copts.UserAgent = grpcUA + } + + if cc.dopts.timeout > 0 { + var cancel context.CancelFunc + ctx, cancel = context.WithTimeout(ctx, cc.dopts.timeout) + defer cancel() + } + defer func() { + select { + case <-ctx.Done(): + conn, err = nil, ctx.Err() + default: + } + }() + + scSet := false + if cc.dopts.scChan != nil { + // Try to get an initial service config. + select { + case sc, ok := <-cc.dopts.scChan: + if ok { + cc.sc = &sc + scSet = true + } + default: + } + } + if cc.dopts.bs == nil { + cc.dopts.bs = backoff.Exponential{ + MaxDelay: DefaultBackoffConfig.MaxDelay, + } + } + if cc.dopts.resolverBuilder == nil { + // Only try to parse target when resolver builder is not already set. + cc.parsedTarget = parseTarget(cc.target) + grpclog.Infof("parsed scheme: %q", cc.parsedTarget.Scheme) + cc.dopts.resolverBuilder = resolver.Get(cc.parsedTarget.Scheme) + if cc.dopts.resolverBuilder == nil { + // If resolver builder is still nil, the parsed target's scheme is + // not registered. Fallback to default resolver and set Endpoint to + // the original target. + grpclog.Infof("scheme %q not registered, fallback to default scheme", cc.parsedTarget.Scheme) + cc.parsedTarget = resolver.Target{ + Scheme: resolver.GetDefaultScheme(), + Endpoint: target, + } + cc.dopts.resolverBuilder = resolver.Get(cc.parsedTarget.Scheme) + } + } else { + cc.parsedTarget = resolver.Target{Endpoint: target} + } + creds := cc.dopts.copts.TransportCredentials + if creds != nil && creds.Info().ServerName != "" { + cc.authority = creds.Info().ServerName + } else if cc.dopts.insecure && cc.dopts.authority != "" { + cc.authority = cc.dopts.authority + } else { + // Use endpoint from "scheme://authority/endpoint" as the default + // authority for ClientConn. + cc.authority = cc.parsedTarget.Endpoint + } + + if cc.dopts.scChan != nil && !scSet { + // Blocking wait for the initial service config. + select { + case sc, ok := <-cc.dopts.scChan: + if ok { + cc.sc = &sc + } + case <-ctx.Done(): + return nil, ctx.Err() + } + } + if cc.dopts.scChan != nil { + go cc.scWatcher() + } + + var credsClone credentials.TransportCredentials + if creds := cc.dopts.copts.TransportCredentials; creds != nil { + credsClone = creds.Clone() + } + cc.balancerBuildOpts = balancer.BuildOptions{ + DialCreds: credsClone, + CredsBundle: cc.dopts.copts.CredsBundle, + Dialer: cc.dopts.copts.Dialer, + ChannelzParentID: cc.channelzID, + } + + // Build the resolver. + rWrapper, err := newCCResolverWrapper(cc) + if err != nil { + return nil, fmt.Errorf("failed to build resolver: %v", err) + } + + cc.mu.Lock() + cc.resolverWrapper = rWrapper + cc.mu.Unlock() + // A blocking dial blocks until the clientConn is ready. + if cc.dopts.block { + for { + s := cc.GetState() + if s == connectivity.Ready { + break + } else if cc.dopts.copts.FailOnNonTempDialError && s == connectivity.TransientFailure { + if err = cc.blockingpicker.connectionError(); err != nil { + terr, ok := err.(interface { + Temporary() bool + }) + if ok && !terr.Temporary() { + return nil, err + } + } + } + if !cc.WaitForStateChange(ctx, s) { + // ctx got timeout or canceled. + return nil, ctx.Err() + } + } + } + + return cc, nil +} + +// connectivityStateManager keeps the connectivity.State of ClientConn. +// This struct will eventually be exported so the balancers can access it. +type connectivityStateManager struct { + mu sync.Mutex + state connectivity.State + notifyChan chan struct{} + channelzID int64 +} + +// updateState updates the connectivity.State of ClientConn. +// If there's a change it notifies goroutines waiting on state change to +// happen. +func (csm *connectivityStateManager) updateState(state connectivity.State) { + csm.mu.Lock() + defer csm.mu.Unlock() + if csm.state == connectivity.Shutdown { + return + } + if csm.state == state { + return + } + csm.state = state + if channelz.IsOn() { + channelz.AddTraceEvent(csm.channelzID, &channelz.TraceEventDesc{ + Desc: fmt.Sprintf("Channel Connectivity change to %v", state), + Severity: channelz.CtINFO, + }) + } + if csm.notifyChan != nil { + // There are other goroutines waiting on this channel. + close(csm.notifyChan) + csm.notifyChan = nil + } +} + +func (csm *connectivityStateManager) getState() connectivity.State { + csm.mu.Lock() + defer csm.mu.Unlock() + return csm.state +} + +func (csm *connectivityStateManager) getNotifyChan() <-chan struct{} { + csm.mu.Lock() + defer csm.mu.Unlock() + if csm.notifyChan == nil { + csm.notifyChan = make(chan struct{}) + } + return csm.notifyChan +} + +// ClientConn represents a client connection to an RPC server. +type ClientConn struct { + ctx context.Context + cancel context.CancelFunc + + target string + parsedTarget resolver.Target + authority string + dopts dialOptions + csMgr *connectivityStateManager + + balancerBuildOpts balancer.BuildOptions + blockingpicker *pickerWrapper + + mu sync.RWMutex + resolverWrapper *ccResolverWrapper + sc *ServiceConfig + conns map[*addrConn]struct{} + // Keepalive parameter can be updated if a GoAway is received. + mkp keepalive.ClientParameters + curBalancerName string + balancerWrapper *ccBalancerWrapper + retryThrottler atomic.Value + + firstResolveEvent *grpcsync.Event + + channelzID int64 // channelz unique identification number + czData *channelzData +} + +// WaitForStateChange waits until the connectivity.State of ClientConn changes from sourceState or +// ctx expires. A true value is returned in former case and false in latter. +// This is an EXPERIMENTAL API. +func (cc *ClientConn) WaitForStateChange(ctx context.Context, sourceState connectivity.State) bool { + ch := cc.csMgr.getNotifyChan() + if cc.csMgr.getState() != sourceState { + return true + } + select { + case <-ctx.Done(): + return false + case <-ch: + return true + } +} + +// GetState returns the connectivity.State of ClientConn. +// This is an EXPERIMENTAL API. +func (cc *ClientConn) GetState() connectivity.State { + return cc.csMgr.getState() +} + +func (cc *ClientConn) scWatcher() { + for { + select { + case sc, ok := <-cc.dopts.scChan: + if !ok { + return + } + cc.mu.Lock() + // TODO: load balance policy runtime change is ignored. + // We may revisit this decision in the future. + cc.sc = &sc + cc.mu.Unlock() + case <-cc.ctx.Done(): + return + } + } +} + +// waitForResolvedAddrs blocks until the resolver has provided addresses or the +// context expires. Returns nil unless the context expires first; otherwise +// returns a status error based on the context. +func (cc *ClientConn) waitForResolvedAddrs(ctx context.Context) error { + // This is on the RPC path, so we use a fast path to avoid the + // more-expensive "select" below after the resolver has returned once. + if cc.firstResolveEvent.HasFired() { + return nil + } + select { + case <-cc.firstResolveEvent.Done(): + return nil + case <-ctx.Done(): + return status.FromContextError(ctx.Err()).Err() + case <-cc.ctx.Done(): + return ErrClientConnClosing + } +} + +// gRPC should resort to default service config when: +// * resolver service config is disabled +// * or, resolver does not return a service config or returns an invalid one. +func (cc *ClientConn) fallbackToDefaultServiceConfig(sc string) bool { + if cc.dopts.disableServiceConfig { + return true + } + // The logic below is temporary, will be removed once we change the resolver.State ServiceConfig field type. + // Right now, we assume that empty service config string means resolver does not return a config. + if sc == "" { + return true + } + // TODO: the logic below is temporary. Once we finish the logic to validate service config + // in resolver, we will replace the logic below. + _, err := parseServiceConfig(sc) + return err != nil +} + +func (cc *ClientConn) updateResolverState(s resolver.State) error { + cc.mu.Lock() + defer cc.mu.Unlock() + // Check if the ClientConn is already closed. Some fields (e.g. + // balancerWrapper) are set to nil when closing the ClientConn, and could + // cause nil pointer panic if we don't have this check. + if cc.conns == nil { + return nil + } + + if cc.fallbackToDefaultServiceConfig(s.ServiceConfig) { + if cc.dopts.defaultServiceConfig != nil && cc.sc == nil { + cc.applyServiceConfig(cc.dopts.defaultServiceConfig) + } + } else { + // TODO: the parsing logic below will be moved inside resolver. + sc, err := parseServiceConfig(s.ServiceConfig) + if err != nil { + return err + } + if cc.sc == nil || cc.sc.rawJSONString != s.ServiceConfig { + cc.applyServiceConfig(sc) + } + } + + // update the service config that will be sent to balancer. + if cc.sc != nil { + s.ServiceConfig = cc.sc.rawJSONString + } + + if cc.dopts.balancerBuilder == nil { + // Only look at balancer types and switch balancer if balancer dial + // option is not set. + var isGRPCLB bool + for _, a := range s.Addresses { + if a.Type == resolver.GRPCLB { + isGRPCLB = true + break + } + } + var newBalancerName string + // TODO: use new loadBalancerConfig field with appropriate priority. + if isGRPCLB { + newBalancerName = grpclbName + } else if cc.sc != nil && cc.sc.LB != nil { + newBalancerName = *cc.sc.LB + } else { + newBalancerName = PickFirstBalancerName + } + cc.switchBalancer(newBalancerName) + } else if cc.balancerWrapper == nil { + // Balancer dial option was set, and this is the first time handling + // resolved addresses. Build a balancer with dopts.balancerBuilder. + cc.balancerWrapper = newCCBalancerWrapper(cc, cc.dopts.balancerBuilder, cc.balancerBuildOpts) + } + + cc.balancerWrapper.updateResolverState(s) + cc.firstResolveEvent.Fire() + return nil +} + +// switchBalancer starts the switching from current balancer to the balancer +// with the given name. +// +// It will NOT send the current address list to the new balancer. If needed, +// caller of this function should send address list to the new balancer after +// this function returns. +// +// Caller must hold cc.mu. +func (cc *ClientConn) switchBalancer(name string) { + if strings.ToLower(cc.curBalancerName) == strings.ToLower(name) { + return + } + + grpclog.Infof("ClientConn switching balancer to %q", name) + if cc.dopts.balancerBuilder != nil { + grpclog.Infoln("ignoring balancer switching: Balancer DialOption used instead") + return + } + if cc.balancerWrapper != nil { + cc.balancerWrapper.close() + } + + builder := balancer.Get(name) + if channelz.IsOn() { + if builder == nil { + channelz.AddTraceEvent(cc.channelzID, &channelz.TraceEventDesc{ + Desc: fmt.Sprintf("Channel switches to new LB policy %q due to fallback from invalid balancer name", PickFirstBalancerName), + Severity: channelz.CtWarning, + }) + } else { + channelz.AddTraceEvent(cc.channelzID, &channelz.TraceEventDesc{ + Desc: fmt.Sprintf("Channel switches to new LB policy %q", name), + Severity: channelz.CtINFO, + }) + } + } + if builder == nil { + grpclog.Infof("failed to get balancer builder for: %v, using pick_first instead", name) + builder = newPickfirstBuilder() + } + + cc.curBalancerName = builder.Name() + cc.balancerWrapper = newCCBalancerWrapper(cc, builder, cc.balancerBuildOpts) +} + +func (cc *ClientConn) handleSubConnStateChange(sc balancer.SubConn, s connectivity.State) { + cc.mu.Lock() + if cc.conns == nil { + cc.mu.Unlock() + return + } + // TODO(bar switching) send updates to all balancer wrappers when balancer + // gracefully switching is supported. + cc.balancerWrapper.handleSubConnStateChange(sc, s) + cc.mu.Unlock() +} + +// newAddrConn creates an addrConn for addrs and adds it to cc.conns. +// +// Caller needs to make sure len(addrs) > 0. +func (cc *ClientConn) newAddrConn(addrs []resolver.Address, opts balancer.NewSubConnOptions) (*addrConn, error) { + ac := &addrConn{ + cc: cc, + addrs: addrs, + scopts: opts, + dopts: cc.dopts, + czData: new(channelzData), + resetBackoff: make(chan struct{}), + } + ac.ctx, ac.cancel = context.WithCancel(cc.ctx) + // Track ac in cc. This needs to be done before any getTransport(...) is called. + cc.mu.Lock() + if cc.conns == nil { + cc.mu.Unlock() + return nil, ErrClientConnClosing + } + if channelz.IsOn() { + ac.channelzID = channelz.RegisterSubChannel(ac, cc.channelzID, "") + channelz.AddTraceEvent(ac.channelzID, &channelz.TraceEventDesc{ + Desc: "Subchannel Created", + Severity: channelz.CtINFO, + Parent: &channelz.TraceEventDesc{ + Desc: fmt.Sprintf("Subchannel(id:%d) created", ac.channelzID), + Severity: channelz.CtINFO, + }, + }) + } + cc.conns[ac] = struct{}{} + cc.mu.Unlock() + return ac, nil +} + +// removeAddrConn removes the addrConn in the subConn from clientConn. +// It also tears down the ac with the given error. +func (cc *ClientConn) removeAddrConn(ac *addrConn, err error) { + cc.mu.Lock() + if cc.conns == nil { + cc.mu.Unlock() + return + } + delete(cc.conns, ac) + cc.mu.Unlock() + ac.tearDown(err) +} + +func (cc *ClientConn) channelzMetric() *channelz.ChannelInternalMetric { + return &channelz.ChannelInternalMetric{ + State: cc.GetState(), + Target: cc.target, + CallsStarted: atomic.LoadInt64(&cc.czData.callsStarted), + CallsSucceeded: atomic.LoadInt64(&cc.czData.callsSucceeded), + CallsFailed: atomic.LoadInt64(&cc.czData.callsFailed), + LastCallStartedTimestamp: time.Unix(0, atomic.LoadInt64(&cc.czData.lastCallStartedTime)), + } +} + +// Target returns the target string of the ClientConn. +// This is an EXPERIMENTAL API. +func (cc *ClientConn) Target() string { + return cc.target +} + +func (cc *ClientConn) incrCallsStarted() { + atomic.AddInt64(&cc.czData.callsStarted, 1) + atomic.StoreInt64(&cc.czData.lastCallStartedTime, time.Now().UnixNano()) +} + +func (cc *ClientConn) incrCallsSucceeded() { + atomic.AddInt64(&cc.czData.callsSucceeded, 1) +} + +func (cc *ClientConn) incrCallsFailed() { + atomic.AddInt64(&cc.czData.callsFailed, 1) +} + +// connect starts creating a transport. +// It does nothing if the ac is not IDLE. +// TODO(bar) Move this to the addrConn section. +func (ac *addrConn) connect() error { + ac.mu.Lock() + if ac.state == connectivity.Shutdown { + ac.mu.Unlock() + return errConnClosing + } + if ac.state != connectivity.Idle { + ac.mu.Unlock() + return nil + } + ac.updateConnectivityState(connectivity.Connecting) + ac.mu.Unlock() + + // Start a goroutine connecting to the server asynchronously. + go ac.resetTransport() + return nil +} + +// tryUpdateAddrs tries to update ac.addrs with the new addresses list. +// +// It checks whether current connected address of ac is in the new addrs list. +// - If true, it updates ac.addrs and returns true. The ac will keep using +// the existing connection. +// - If false, it does nothing and returns false. +func (ac *addrConn) tryUpdateAddrs(addrs []resolver.Address) bool { + ac.mu.Lock() + defer ac.mu.Unlock() + grpclog.Infof("addrConn: tryUpdateAddrs curAddr: %v, addrs: %v", ac.curAddr, addrs) + if ac.state == connectivity.Shutdown { + ac.addrs = addrs + return true + } + + // Unless we're busy reconnecting already, let's reconnect from the top of + // the list. + if ac.state != connectivity.Ready { + return false + } + + var curAddrFound bool + for _, a := range addrs { + if reflect.DeepEqual(ac.curAddr, a) { + curAddrFound = true + break + } + } + grpclog.Infof("addrConn: tryUpdateAddrs curAddrFound: %v", curAddrFound) + if curAddrFound { + ac.addrs = addrs + } + + return curAddrFound +} + +// GetMethodConfig gets the method config of the input method. +// If there's an exact match for input method (i.e. /service/method), we return +// the corresponding MethodConfig. +// If there isn't an exact match for the input method, we look for the default config +// under the service (i.e /service/). If there is a default MethodConfig for +// the service, we return it. +// Otherwise, we return an empty MethodConfig. +func (cc *ClientConn) GetMethodConfig(method string) MethodConfig { + // TODO: Avoid the locking here. + cc.mu.RLock() + defer cc.mu.RUnlock() + if cc.sc == nil { + return MethodConfig{} + } + m, ok := cc.sc.Methods[method] + if !ok { + i := strings.LastIndex(method, "/") + m = cc.sc.Methods[method[:i+1]] + } + return m +} + +func (cc *ClientConn) healthCheckConfig() *healthCheckConfig { + cc.mu.RLock() + defer cc.mu.RUnlock() + if cc.sc == nil { + return nil + } + return cc.sc.healthCheckConfig +} + +func (cc *ClientConn) getTransport(ctx context.Context, failfast bool, method string) (transport.ClientTransport, func(balancer.DoneInfo), error) { + t, done, err := cc.blockingpicker.pick(ctx, failfast, balancer.PickOptions{ + FullMethodName: method, + }) + if err != nil { + return nil, nil, toRPCErr(err) + } + return t, done, nil +} + +func (cc *ClientConn) applyServiceConfig(sc *ServiceConfig) error { + if sc == nil { + // should never reach here. + return fmt.Errorf("got nil pointer for service config") + } + cc.sc = sc + + if cc.sc.retryThrottling != nil { + newThrottler := &retryThrottler{ + tokens: cc.sc.retryThrottling.MaxTokens, + max: cc.sc.retryThrottling.MaxTokens, + thresh: cc.sc.retryThrottling.MaxTokens / 2, + ratio: cc.sc.retryThrottling.TokenRatio, + } + cc.retryThrottler.Store(newThrottler) + } else { + cc.retryThrottler.Store((*retryThrottler)(nil)) + } + + return nil +} + +func (cc *ClientConn) resolveNow(o resolver.ResolveNowOption) { + cc.mu.RLock() + r := cc.resolverWrapper + cc.mu.RUnlock() + if r == nil { + return + } + go r.resolveNow(o) +} + +// ResetConnectBackoff wakes up all subchannels in transient failure and causes +// them to attempt another connection immediately. It also resets the backoff +// times used for subsequent attempts regardless of the current state. +// +// In general, this function should not be used. Typical service or network +// outages result in a reasonable client reconnection strategy by default. +// However, if a previously unavailable network becomes available, this may be +// used to trigger an immediate reconnect. +// +// This API is EXPERIMENTAL. +func (cc *ClientConn) ResetConnectBackoff() { + cc.mu.Lock() + defer cc.mu.Unlock() + for ac := range cc.conns { + ac.resetConnectBackoff() + } +} + +// Close tears down the ClientConn and all underlying connections. +func (cc *ClientConn) Close() error { + defer cc.cancel() + + cc.mu.Lock() + if cc.conns == nil { + cc.mu.Unlock() + return ErrClientConnClosing + } + conns := cc.conns + cc.conns = nil + cc.csMgr.updateState(connectivity.Shutdown) + + rWrapper := cc.resolverWrapper + cc.resolverWrapper = nil + bWrapper := cc.balancerWrapper + cc.balancerWrapper = nil + cc.mu.Unlock() + + cc.blockingpicker.close() + + if rWrapper != nil { + rWrapper.close() + } + if bWrapper != nil { + bWrapper.close() + } + + for ac := range conns { + ac.tearDown(ErrClientConnClosing) + } + if channelz.IsOn() { + ted := &channelz.TraceEventDesc{ + Desc: "Channel Deleted", + Severity: channelz.CtINFO, + } + if cc.dopts.channelzParentID != 0 { + ted.Parent = &channelz.TraceEventDesc{ + Desc: fmt.Sprintf("Nested channel(id:%d) deleted", cc.channelzID), + Severity: channelz.CtINFO, + } + } + channelz.AddTraceEvent(cc.channelzID, ted) + // TraceEvent needs to be called before RemoveEntry, as TraceEvent may add trace reference to + // the entity being deleted, and thus prevent it from being deleted right away. + channelz.RemoveEntry(cc.channelzID) + } + return nil +} + +// addrConn is a network connection to a given address. +type addrConn struct { + ctx context.Context + cancel context.CancelFunc + + cc *ClientConn + dopts dialOptions + acbw balancer.SubConn + scopts balancer.NewSubConnOptions + + // transport is set when there's a viable transport (note: ac state may not be READY as LB channel + // health checking may require server to report healthy to set ac to READY), and is reset + // to nil when the current transport should no longer be used to create a stream (e.g. after GoAway + // is received, transport is closed, ac has been torn down). + transport transport.ClientTransport // The current transport. + + mu sync.Mutex + curAddr resolver.Address // The current address. + addrs []resolver.Address // All addresses that the resolver resolved to. + + // Use updateConnectivityState for updating addrConn's connectivity state. + state connectivity.State + + backoffIdx int // Needs to be stateful for resetConnectBackoff. + resetBackoff chan struct{} + + channelzID int64 // channelz unique identification number. + czData *channelzData +} + +// Note: this requires a lock on ac.mu. +func (ac *addrConn) updateConnectivityState(s connectivity.State) { + if ac.state == s { + return + } + + updateMsg := fmt.Sprintf("Subchannel Connectivity change to %v", s) + ac.state = s + if channelz.IsOn() { + channelz.AddTraceEvent(ac.channelzID, &channelz.TraceEventDesc{ + Desc: updateMsg, + Severity: channelz.CtINFO, + }) + } + ac.cc.handleSubConnStateChange(ac.acbw, s) +} + +// adjustParams updates parameters used to create transports upon +// receiving a GoAway. +func (ac *addrConn) adjustParams(r transport.GoAwayReason) { + switch r { + case transport.GoAwayTooManyPings: + v := 2 * ac.dopts.copts.KeepaliveParams.Time + ac.cc.mu.Lock() + if v > ac.cc.mkp.Time { + ac.cc.mkp.Time = v + } + ac.cc.mu.Unlock() + } +} + +func (ac *addrConn) resetTransport() { + for i := 0; ; i++ { + if i > 0 { + ac.cc.resolveNow(resolver.ResolveNowOption{}) + } + + ac.mu.Lock() + if ac.state == connectivity.Shutdown { + ac.mu.Unlock() + return + } + + addrs := ac.addrs + backoffFor := ac.dopts.bs.Backoff(ac.backoffIdx) + // This will be the duration that dial gets to finish. + dialDuration := minConnectTimeout + if ac.dopts.minConnectTimeout != nil { + dialDuration = ac.dopts.minConnectTimeout() + } + + if dialDuration < backoffFor { + // Give dial more time as we keep failing to connect. + dialDuration = backoffFor + } + // We can potentially spend all the time trying the first address, and + // if the server accepts the connection and then hangs, the following + // addresses will never be tried. + // + // The spec doesn't mention what should be done for multiple addresses. + // https://github.com/grpc/grpc/blob/master/doc/connection-backoff.md#proposed-backoff-algorithm + connectDeadline := time.Now().Add(dialDuration) + ac.mu.Unlock() + + newTr, addr, reconnect, err := ac.tryAllAddrs(addrs, connectDeadline) + if err != nil { + // After exhausting all addresses, the addrConn enters + // TRANSIENT_FAILURE. + ac.mu.Lock() + if ac.state == connectivity.Shutdown { + ac.mu.Unlock() + return + } + ac.updateConnectivityState(connectivity.TransientFailure) + + // Backoff. + b := ac.resetBackoff + ac.mu.Unlock() + + timer := time.NewTimer(backoffFor) + select { + case <-timer.C: + ac.mu.Lock() + ac.backoffIdx++ + ac.mu.Unlock() + case <-b: + timer.Stop() + case <-ac.ctx.Done(): + timer.Stop() + return + } + continue + } + + ac.mu.Lock() + if ac.state == connectivity.Shutdown { + newTr.Close() + ac.mu.Unlock() + return + } + ac.curAddr = addr + ac.transport = newTr + ac.backoffIdx = 0 + + healthCheckConfig := ac.cc.healthCheckConfig() + // LB channel health checking is only enabled when all the four requirements below are met: + // 1. it is not disabled by the user with the WithDisableHealthCheck DialOption, + // 2. the internal.HealthCheckFunc is set by importing the grpc/healthcheck package, + // 3. a service config with non-empty healthCheckConfig field is provided, + // 4. the current load balancer allows it. + hctx, hcancel := context.WithCancel(ac.ctx) + healthcheckManagingState := false + if !ac.cc.dopts.disableHealthCheck && healthCheckConfig != nil && ac.scopts.HealthCheckEnabled { + if ac.cc.dopts.healthCheckFunc == nil { + // TODO: add a link to the health check doc in the error message. + grpclog.Error("the client side LB channel health check function has not been set.") + } else { + // TODO(deklerk) refactor to just return transport + go ac.startHealthCheck(hctx, newTr, addr, healthCheckConfig.ServiceName) + healthcheckManagingState = true + } + } + if !healthcheckManagingState { + ac.updateConnectivityState(connectivity.Ready) + } + ac.mu.Unlock() + + // Block until the created transport is down. And when this happens, + // we restart from the top of the addr list. + <-reconnect.Done() + hcancel() + + // Need to reconnect after a READY, the addrConn enters + // TRANSIENT_FAILURE. + // + // This will set addrConn to TRANSIENT_FAILURE for a very short period + // of time, and turns CONNECTING. It seems reasonable to skip this, but + // READY-CONNECTING is not a valid transition. + ac.mu.Lock() + if ac.state == connectivity.Shutdown { + ac.mu.Unlock() + return + } + ac.updateConnectivityState(connectivity.TransientFailure) + ac.mu.Unlock() + } +} + +// tryAllAddrs tries to creates a connection to the addresses, and stop when at the +// first successful one. It returns the transport, the address and a Event in +// the successful case. The Event fires when the returned transport disconnects. +func (ac *addrConn) tryAllAddrs(addrs []resolver.Address, connectDeadline time.Time) (transport.ClientTransport, resolver.Address, *grpcsync.Event, error) { + for _, addr := range addrs { + ac.mu.Lock() + if ac.state == connectivity.Shutdown { + ac.mu.Unlock() + return nil, resolver.Address{}, nil, errConnClosing + } + ac.updateConnectivityState(connectivity.Connecting) + ac.transport = nil + + ac.cc.mu.RLock() + ac.dopts.copts.KeepaliveParams = ac.cc.mkp + ac.cc.mu.RUnlock() + + copts := ac.dopts.copts + if ac.scopts.CredsBundle != nil { + copts.CredsBundle = ac.scopts.CredsBundle + } + ac.mu.Unlock() + + if channelz.IsOn() { + channelz.AddTraceEvent(ac.channelzID, &channelz.TraceEventDesc{ + Desc: fmt.Sprintf("Subchannel picks a new address %q to connect", addr.Addr), + Severity: channelz.CtINFO, + }) + } + + newTr, reconnect, err := ac.createTransport(addr, copts, connectDeadline) + if err == nil { + return newTr, addr, reconnect, nil + } + ac.cc.blockingpicker.updateConnectionError(err) + } + + // Couldn't connect to any address. + return nil, resolver.Address{}, nil, fmt.Errorf("couldn't connect to any address") +} + +// createTransport creates a connection to addr. It returns the transport and a +// Event in the successful case. The Event fires when the returned transport +// disconnects. +func (ac *addrConn) createTransport(addr resolver.Address, copts transport.ConnectOptions, connectDeadline time.Time) (transport.ClientTransport, *grpcsync.Event, error) { + prefaceReceived := make(chan struct{}) + onCloseCalled := make(chan struct{}) + reconnect := grpcsync.NewEvent() + + target := transport.TargetInfo{ + Addr: addr.Addr, + Metadata: addr.Metadata, + Authority: ac.cc.authority, + } + + onGoAway := func(r transport.GoAwayReason) { + ac.mu.Lock() + ac.adjustParams(r) + ac.mu.Unlock() + reconnect.Fire() + } + + onClose := func() { + close(onCloseCalled) + reconnect.Fire() + } + + onPrefaceReceipt := func() { + close(prefaceReceived) + } + + connectCtx, cancel := context.WithDeadline(ac.ctx, connectDeadline) + defer cancel() + if channelz.IsOn() { + copts.ChannelzParentID = ac.channelzID + } + + newTr, err := transport.NewClientTransport(connectCtx, ac.cc.ctx, target, copts, onPrefaceReceipt, onGoAway, onClose) + if err != nil { + // newTr is either nil, or closed. + grpclog.Warningf("grpc: addrConn.createTransport failed to connect to %v. Err :%v. Reconnecting...", addr, err) + return nil, nil, err + } + + if ac.dopts.reqHandshake == envconfig.RequireHandshakeOn { + select { + case <-time.After(connectDeadline.Sub(time.Now())): + // We didn't get the preface in time. + newTr.Close() + grpclog.Warningf("grpc: addrConn.createTransport failed to connect to %v: didn't receive server preface in time. Reconnecting...", addr) + return nil, nil, errors.New("timed out waiting for server handshake") + case <-prefaceReceived: + // We got the preface - huzzah! things are good. + case <-onCloseCalled: + // The transport has already closed - noop. + return nil, nil, errors.New("connection closed") + // TODO(deklerk) this should bail on ac.ctx.Done(). Add a test and fix. + } + } + return newTr, reconnect, nil +} + +func (ac *addrConn) startHealthCheck(ctx context.Context, newTr transport.ClientTransport, addr resolver.Address, serviceName string) { + // Set up the health check helper functions + newStream := func() (interface{}, error) { + return ac.newClientStream(ctx, &StreamDesc{ServerStreams: true}, "/grpc.health.v1.Health/Watch", newTr) + } + firstReady := true + reportHealth := func(ok bool) { + ac.mu.Lock() + defer ac.mu.Unlock() + if ac.transport != newTr { + return + } + if ok { + if firstReady { + firstReady = false + ac.curAddr = addr + } + ac.updateConnectivityState(connectivity.Ready) + } else { + ac.updateConnectivityState(connectivity.TransientFailure) + } + } + err := ac.cc.dopts.healthCheckFunc(ctx, newStream, reportHealth, serviceName) + if err != nil { + if status.Code(err) == codes.Unimplemented { + if channelz.IsOn() { + channelz.AddTraceEvent(ac.channelzID, &channelz.TraceEventDesc{ + Desc: "Subchannel health check is unimplemented at server side, thus health check is disabled", + Severity: channelz.CtError, + }) + } + grpclog.Error("Subchannel health check is unimplemented at server side, thus health check is disabled") + } else { + grpclog.Errorf("HealthCheckFunc exits with unexpected error %v", err) + } + } +} + +func (ac *addrConn) resetConnectBackoff() { + ac.mu.Lock() + close(ac.resetBackoff) + ac.backoffIdx = 0 + ac.resetBackoff = make(chan struct{}) + ac.mu.Unlock() +} + +// getReadyTransport returns the transport if ac's state is READY. +// Otherwise it returns nil, false. +// If ac's state is IDLE, it will trigger ac to connect. +func (ac *addrConn) getReadyTransport() (transport.ClientTransport, bool) { + ac.mu.Lock() + if ac.state == connectivity.Ready && ac.transport != nil { + t := ac.transport + ac.mu.Unlock() + return t, true + } + var idle bool + if ac.state == connectivity.Idle { + idle = true + } + ac.mu.Unlock() + // Trigger idle ac to connect. + if idle { + ac.connect() + } + return nil, false +} + +// tearDown starts to tear down the addrConn. +// TODO(zhaoq): Make this synchronous to avoid unbounded memory consumption in +// some edge cases (e.g., the caller opens and closes many addrConn's in a +// tight loop. +// tearDown doesn't remove ac from ac.cc.conns. +func (ac *addrConn) tearDown(err error) { + ac.mu.Lock() + if ac.state == connectivity.Shutdown { + ac.mu.Unlock() + return + } + curTr := ac.transport + ac.transport = nil + // We have to set the state to Shutdown before anything else to prevent races + // between setting the state and logic that waits on context cancelation / etc. + ac.updateConnectivityState(connectivity.Shutdown) + ac.cancel() + ac.curAddr = resolver.Address{} + if err == errConnDrain && curTr != nil { + // GracefulClose(...) may be executed multiple times when + // i) receiving multiple GoAway frames from the server; or + // ii) there are concurrent name resolver/Balancer triggered + // address removal and GoAway. + // We have to unlock and re-lock here because GracefulClose => Close => onClose, which requires locking ac.mu. + ac.mu.Unlock() + curTr.GracefulClose() + ac.mu.Lock() + } + if channelz.IsOn() { + channelz.AddTraceEvent(ac.channelzID, &channelz.TraceEventDesc{ + Desc: "Subchannel Deleted", + Severity: channelz.CtINFO, + Parent: &channelz.TraceEventDesc{ + Desc: fmt.Sprintf("Subchanel(id:%d) deleted", ac.channelzID), + Severity: channelz.CtINFO, + }, + }) + // TraceEvent needs to be called before RemoveEntry, as TraceEvent may add trace reference to + // the entity beng deleted, and thus prevent it from being deleted right away. + channelz.RemoveEntry(ac.channelzID) + } + ac.mu.Unlock() +} + +func (ac *addrConn) getState() connectivity.State { + ac.mu.Lock() + defer ac.mu.Unlock() + return ac.state +} + +func (ac *addrConn) ChannelzMetric() *channelz.ChannelInternalMetric { + ac.mu.Lock() + addr := ac.curAddr.Addr + ac.mu.Unlock() + return &channelz.ChannelInternalMetric{ + State: ac.getState(), + Target: addr, + CallsStarted: atomic.LoadInt64(&ac.czData.callsStarted), + CallsSucceeded: atomic.LoadInt64(&ac.czData.callsSucceeded), + CallsFailed: atomic.LoadInt64(&ac.czData.callsFailed), + LastCallStartedTimestamp: time.Unix(0, atomic.LoadInt64(&ac.czData.lastCallStartedTime)), + } +} + +func (ac *addrConn) incrCallsStarted() { + atomic.AddInt64(&ac.czData.callsStarted, 1) + atomic.StoreInt64(&ac.czData.lastCallStartedTime, time.Now().UnixNano()) +} + +func (ac *addrConn) incrCallsSucceeded() { + atomic.AddInt64(&ac.czData.callsSucceeded, 1) +} + +func (ac *addrConn) incrCallsFailed() { + atomic.AddInt64(&ac.czData.callsFailed, 1) +} + +type retryThrottler struct { + max float64 + thresh float64 + ratio float64 + + mu sync.Mutex + tokens float64 // TODO(dfawley): replace with atomic and remove lock. +} + +// throttle subtracts a retry token from the pool and returns whether a retry +// should be throttled (disallowed) based upon the retry throttling policy in +// the service config. +func (rt *retryThrottler) throttle() bool { + if rt == nil { + return false + } + rt.mu.Lock() + defer rt.mu.Unlock() + rt.tokens-- + if rt.tokens < 0 { + rt.tokens = 0 + } + return rt.tokens <= rt.thresh +} + +func (rt *retryThrottler) successfulRPC() { + if rt == nil { + return + } + rt.mu.Lock() + defer rt.mu.Unlock() + rt.tokens += rt.ratio + if rt.tokens > rt.max { + rt.tokens = rt.max + } +} + +type channelzChannel struct { + cc *ClientConn +} + +func (c *channelzChannel) ChannelzMetric() *channelz.ChannelInternalMetric { + return c.cc.channelzMetric() +} + +// ErrClientConnTimeout indicates that the ClientConn cannot establish the +// underlying connections within the specified timeout. +// +// Deprecated: This error is never returned by grpc and should not be +// referenced by users. +var ErrClientConnTimeout = errors.New("grpc: timed out when dialing") diff --git a/vendor/google.golang.org/grpc/codec.go b/vendor/google.golang.org/grpc/codec.go new file mode 100644 index 0000000..1297765 --- /dev/null +++ b/vendor/google.golang.org/grpc/codec.go @@ -0,0 +1,50 @@ +/* + * + * Copyright 2014 gRPC authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ + +package grpc + +import ( + "google.golang.org/grpc/encoding" + _ "google.golang.org/grpc/encoding/proto" // to register the Codec for "proto" +) + +// baseCodec contains the functionality of both Codec and encoding.Codec, but +// omits the name/string, which vary between the two and are not needed for +// anything besides the registry in the encoding package. +type baseCodec interface { + Marshal(v interface{}) ([]byte, error) + Unmarshal(data []byte, v interface{}) error +} + +var _ baseCodec = Codec(nil) +var _ baseCodec = encoding.Codec(nil) + +// Codec defines the interface gRPC uses to encode and decode messages. +// Note that implementations of this interface must be thread safe; +// a Codec's methods can be called from concurrent goroutines. +// +// Deprecated: use encoding.Codec instead. +type Codec interface { + // Marshal returns the wire format of v. + Marshal(v interface{}) ([]byte, error) + // Unmarshal parses the wire format into v. + Unmarshal(data []byte, v interface{}) error + // String returns the name of the Codec implementation. This is unused by + // gRPC. + String() string +} diff --git a/vendor/google.golang.org/grpc/codes/code_string.go b/vendor/google.golang.org/grpc/codes/code_string.go new file mode 100644 index 0000000..0b206a5 --- /dev/null +++ b/vendor/google.golang.org/grpc/codes/code_string.go @@ -0,0 +1,62 @@ +/* + * + * Copyright 2017 gRPC authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ + +package codes + +import "strconv" + +func (c Code) String() string { + switch c { + case OK: + return "OK" + case Canceled: + return "Canceled" + case Unknown: + return "Unknown" + case InvalidArgument: + return "InvalidArgument" + case DeadlineExceeded: + return "DeadlineExceeded" + case NotFound: + return "NotFound" + case AlreadyExists: + return "AlreadyExists" + case PermissionDenied: + return "PermissionDenied" + case ResourceExhausted: + return "ResourceExhausted" + case FailedPrecondition: + return "FailedPrecondition" + case Aborted: + return "Aborted" + case OutOfRange: + return "OutOfRange" + case Unimplemented: + return "Unimplemented" + case Internal: + return "Internal" + case Unavailable: + return "Unavailable" + case DataLoss: + return "DataLoss" + case Unauthenticated: + return "Unauthenticated" + default: + return "Code(" + strconv.FormatInt(int64(c), 10) + ")" + } +} diff --git a/vendor/google.golang.org/grpc/codes/codes.go b/vendor/google.golang.org/grpc/codes/codes.go new file mode 100644 index 0000000..d9b9d57 --- /dev/null +++ b/vendor/google.golang.org/grpc/codes/codes.go @@ -0,0 +1,197 @@ +/* + * + * Copyright 2014 gRPC authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ + +// Package codes defines the canonical error codes used by gRPC. It is +// consistent across various languages. +package codes // import "google.golang.org/grpc/codes" + +import ( + "fmt" + "strconv" +) + +// A Code is an unsigned 32-bit error code as defined in the gRPC spec. +type Code uint32 + +const ( + // OK is returned on success. + OK Code = 0 + + // Canceled indicates the operation was canceled (typically by the caller). + Canceled Code = 1 + + // Unknown error. An example of where this error may be returned is + // if a Status value received from another address space belongs to + // an error-space that is not known in this address space. Also + // errors raised by APIs that do not return enough error information + // may be converted to this error. + Unknown Code = 2 + + // InvalidArgument indicates client specified an invalid argument. + // Note that this differs from FailedPrecondition. It indicates arguments + // that are problematic regardless of the state of the system + // (e.g., a malformed file name). + InvalidArgument Code = 3 + + // DeadlineExceeded means operation expired before completion. + // For operations that change the state of the system, this error may be + // returned even if the operation has completed successfully. For + // example, a successful response from a server could have been delayed + // long enough for the deadline to expire. + DeadlineExceeded Code = 4 + + // NotFound means some requested entity (e.g., file or directory) was + // not found. + NotFound Code = 5 + + // AlreadyExists means an attempt to create an entity failed because one + // already exists. + AlreadyExists Code = 6 + + // PermissionDenied indicates the caller does not have permission to + // execute the specified operation. It must not be used for rejections + // caused by exhausting some resource (use ResourceExhausted + // instead for those errors). It must not be + // used if the caller cannot be identified (use Unauthenticated + // instead for those errors). + PermissionDenied Code = 7 + + // ResourceExhausted indicates some resource has been exhausted, perhaps + // a per-user quota, or perhaps the entire file system is out of space. + ResourceExhausted Code = 8 + + // FailedPrecondition indicates operation was rejected because the + // system is not in a state required for the operation's execution. + // For example, directory to be deleted may be non-empty, an rmdir + // operation is applied to a non-directory, etc. + // + // A litmus test that may help a service implementor in deciding + // between FailedPrecondition, Aborted, and Unavailable: + // (a) Use Unavailable if the client can retry just the failing call. + // (b) Use Aborted if the client should retry at a higher-level + // (e.g., restarting a read-modify-write sequence). + // (c) Use FailedPrecondition if the client should not retry until + // the system state has been explicitly fixed. E.g., if an "rmdir" + // fails because the directory is non-empty, FailedPrecondition + // should be returned since the client should not retry unless + // they have first fixed up the directory by deleting files from it. + // (d) Use FailedPrecondition if the client performs conditional + // REST Get/Update/Delete on a resource and the resource on the + // server does not match the condition. E.g., conflicting + // read-modify-write on the same resource. + FailedPrecondition Code = 9 + + // Aborted indicates the operation was aborted, typically due to a + // concurrency issue like sequencer check failures, transaction aborts, + // etc. + // + // See litmus test above for deciding between FailedPrecondition, + // Aborted, and Unavailable. + Aborted Code = 10 + + // OutOfRange means operation was attempted past the valid range. + // E.g., seeking or reading past end of file. + // + // Unlike InvalidArgument, this error indicates a problem that may + // be fixed if the system state changes. For example, a 32-bit file + // system will generate InvalidArgument if asked to read at an + // offset that is not in the range [0,2^32-1], but it will generate + // OutOfRange if asked to read from an offset past the current + // file size. + // + // There is a fair bit of overlap between FailedPrecondition and + // OutOfRange. We recommend using OutOfRange (the more specific + // error) when it applies so that callers who are iterating through + // a space can easily look for an OutOfRange error to detect when + // they are done. + OutOfRange Code = 11 + + // Unimplemented indicates operation is not implemented or not + // supported/enabled in this service. + Unimplemented Code = 12 + + // Internal errors. Means some invariants expected by underlying + // system has been broken. If you see one of these errors, + // something is very broken. + Internal Code = 13 + + // Unavailable indicates the service is currently unavailable. + // This is a most likely a transient condition and may be corrected + // by retrying with a backoff. + // + // See litmus test above for deciding between FailedPrecondition, + // Aborted, and Unavailable. + Unavailable Code = 14 + + // DataLoss indicates unrecoverable data loss or corruption. + DataLoss Code = 15 + + // Unauthenticated indicates the request does not have valid + // authentication credentials for the operation. + Unauthenticated Code = 16 + + _maxCode = 17 +) + +var strToCode = map[string]Code{ + `"OK"`: OK, + `"CANCELLED"`:/* [sic] */ Canceled, + `"UNKNOWN"`: Unknown, + `"INVALID_ARGUMENT"`: InvalidArgument, + `"DEADLINE_EXCEEDED"`: DeadlineExceeded, + `"NOT_FOUND"`: NotFound, + `"ALREADY_EXISTS"`: AlreadyExists, + `"PERMISSION_DENIED"`: PermissionDenied, + `"RESOURCE_EXHAUSTED"`: ResourceExhausted, + `"FAILED_PRECONDITION"`: FailedPrecondition, + `"ABORTED"`: Aborted, + `"OUT_OF_RANGE"`: OutOfRange, + `"UNIMPLEMENTED"`: Unimplemented, + `"INTERNAL"`: Internal, + `"UNAVAILABLE"`: Unavailable, + `"DATA_LOSS"`: DataLoss, + `"UNAUTHENTICATED"`: Unauthenticated, +} + +// UnmarshalJSON unmarshals b into the Code. +func (c *Code) UnmarshalJSON(b []byte) error { + // From json.Unmarshaler: By convention, to approximate the behavior of + // Unmarshal itself, Unmarshalers implement UnmarshalJSON([]byte("null")) as + // a no-op. + if string(b) == "null" { + return nil + } + if c == nil { + return fmt.Errorf("nil receiver passed to UnmarshalJSON") + } + + if ci, err := strconv.ParseUint(string(b), 10, 32); err == nil { + if ci >= _maxCode { + return fmt.Errorf("invalid code: %q", ci) + } + + *c = Code(ci) + return nil + } + + if jc, ok := strToCode[string(b)]; ok { + *c = jc + return nil + } + return fmt.Errorf("invalid code: %q", string(b)) +} diff --git a/vendor/google.golang.org/grpc/connectivity/connectivity.go b/vendor/google.golang.org/grpc/connectivity/connectivity.go new file mode 100644 index 0000000..34ec36f --- /dev/null +++ b/vendor/google.golang.org/grpc/connectivity/connectivity.go @@ -0,0 +1,73 @@ +/* + * + * Copyright 2017 gRPC authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ + +// Package connectivity defines connectivity semantics. +// For details, see https://github.com/grpc/grpc/blob/master/doc/connectivity-semantics-and-api.md. +// All APIs in this package are experimental. +package connectivity + +import ( + "context" + + "google.golang.org/grpc/grpclog" +) + +// State indicates the state of connectivity. +// It can be the state of a ClientConn or SubConn. +type State int + +func (s State) String() string { + switch s { + case Idle: + return "IDLE" + case Connecting: + return "CONNECTING" + case Ready: + return "READY" + case TransientFailure: + return "TRANSIENT_FAILURE" + case Shutdown: + return "SHUTDOWN" + default: + grpclog.Errorf("unknown connectivity state: %d", s) + return "Invalid-State" + } +} + +const ( + // Idle indicates the ClientConn is idle. + Idle State = iota + // Connecting indicates the ClientConn is connecting. + Connecting + // Ready indicates the ClientConn is ready for work. + Ready + // TransientFailure indicates the ClientConn has seen a failure but expects to recover. + TransientFailure + // Shutdown indicates the ClientConn has started shutting down. + Shutdown +) + +// Reporter reports the connectivity states. +type Reporter interface { + // CurrentState returns the current state of the reporter. + CurrentState() State + // WaitForStateChange blocks until the reporter's state is different from the given state, + // and returns true. + // It returns false if <-ctx.Done() can proceed (ctx got timeout or got canceled). + WaitForStateChange(context.Context, State) bool +} diff --git a/vendor/google.golang.org/grpc/credentials/alts/alts.go b/vendor/google.golang.org/grpc/credentials/alts/alts.go new file mode 100644 index 0000000..4cb93f1 --- /dev/null +++ b/vendor/google.golang.org/grpc/credentials/alts/alts.go @@ -0,0 +1,330 @@ +/* + * + * Copyright 2018 gRPC authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ + +// Package alts implements the ALTS credential support by gRPC library, which +// encapsulates all the state needed by a client to authenticate with a server +// using ALTS and make various assertions, e.g., about the client's identity, +// role, or whether it is authorized to make a particular call. +// This package is experimental. +package alts + +import ( + "context" + "errors" + "fmt" + "net" + "sync" + "time" + + "google.golang.org/grpc/credentials" + core "google.golang.org/grpc/credentials/alts/internal" + "google.golang.org/grpc/credentials/alts/internal/handshaker" + "google.golang.org/grpc/credentials/alts/internal/handshaker/service" + altspb "google.golang.org/grpc/credentials/alts/internal/proto/grpc_gcp" + "google.golang.org/grpc/grpclog" +) + +const ( + // hypervisorHandshakerServiceAddress represents the default ALTS gRPC + // handshaker service address in the hypervisor. + hypervisorHandshakerServiceAddress = "metadata.google.internal:8080" + // defaultTimeout specifies the server handshake timeout. + defaultTimeout = 30.0 * time.Second + // The following constants specify the minimum and maximum acceptable + // protocol versions. + protocolVersionMaxMajor = 2 + protocolVersionMaxMinor = 1 + protocolVersionMinMajor = 2 + protocolVersionMinMinor = 1 +) + +var ( + once sync.Once + maxRPCVersion = &altspb.RpcProtocolVersions_Version{ + Major: protocolVersionMaxMajor, + Minor: protocolVersionMaxMinor, + } + minRPCVersion = &altspb.RpcProtocolVersions_Version{ + Major: protocolVersionMinMajor, + Minor: protocolVersionMinMinor, + } + // ErrUntrustedPlatform is returned from ClientHandshake and + // ServerHandshake is running on a platform where the trustworthiness of + // the handshaker service is not guaranteed. + ErrUntrustedPlatform = errors.New("ALTS: untrusted platform. ALTS is only supported on GCP") +) + +// AuthInfo exposes security information from the ALTS handshake to the +// application. This interface is to be implemented by ALTS. Users should not +// need a brand new implementation of this interface. For situations like +// testing, any new implementation should embed this interface. This allows +// ALTS to add new methods to this interface. +type AuthInfo interface { + // ApplicationProtocol returns application protocol negotiated for the + // ALTS connection. + ApplicationProtocol() string + // RecordProtocol returns the record protocol negotiated for the ALTS + // connection. + RecordProtocol() string + // SecurityLevel returns the security level of the created ALTS secure + // channel. + SecurityLevel() altspb.SecurityLevel + // PeerServiceAccount returns the peer service account. + PeerServiceAccount() string + // LocalServiceAccount returns the local service account. + LocalServiceAccount() string + // PeerRPCVersions returns the RPC version supported by the peer. + PeerRPCVersions() *altspb.RpcProtocolVersions +} + +// ClientOptions contains the client-side options of an ALTS channel. These +// options will be passed to the underlying ALTS handshaker. +type ClientOptions struct { + // TargetServiceAccounts contains a list of expected target service + // accounts. + TargetServiceAccounts []string + // HandshakerServiceAddress represents the ALTS handshaker gRPC service + // address to connect to. + HandshakerServiceAddress string +} + +// DefaultClientOptions creates a new ClientOptions object with the default +// values. +func DefaultClientOptions() *ClientOptions { + return &ClientOptions{ + HandshakerServiceAddress: hypervisorHandshakerServiceAddress, + } +} + +// ServerOptions contains the server-side options of an ALTS channel. These +// options will be passed to the underlying ALTS handshaker. +type ServerOptions struct { + // HandshakerServiceAddress represents the ALTS handshaker gRPC service + // address to connect to. + HandshakerServiceAddress string +} + +// DefaultServerOptions creates a new ServerOptions object with the default +// values. +func DefaultServerOptions() *ServerOptions { + return &ServerOptions{ + HandshakerServiceAddress: hypervisorHandshakerServiceAddress, + } +} + +// altsTC is the credentials required for authenticating a connection using ALTS. +// It implements credentials.TransportCredentials interface. +type altsTC struct { + info *credentials.ProtocolInfo + side core.Side + accounts []string + hsAddress string +} + +// NewClientCreds constructs a client-side ALTS TransportCredentials object. +func NewClientCreds(opts *ClientOptions) credentials.TransportCredentials { + return newALTS(core.ClientSide, opts.TargetServiceAccounts, opts.HandshakerServiceAddress) +} + +// NewServerCreds constructs a server-side ALTS TransportCredentials object. +func NewServerCreds(opts *ServerOptions) credentials.TransportCredentials { + return newALTS(core.ServerSide, nil, opts.HandshakerServiceAddress) +} + +func newALTS(side core.Side, accounts []string, hsAddress string) credentials.TransportCredentials { + once.Do(func() { + vmOnGCP = isRunningOnGCP() + }) + + if hsAddress == "" { + hsAddress = hypervisorHandshakerServiceAddress + } + return &altsTC{ + info: &credentials.ProtocolInfo{ + SecurityProtocol: "alts", + SecurityVersion: "1.0", + }, + side: side, + accounts: accounts, + hsAddress: hsAddress, + } +} + +// ClientHandshake implements the client side handshake protocol. +func (g *altsTC) ClientHandshake(ctx context.Context, addr string, rawConn net.Conn) (_ net.Conn, _ credentials.AuthInfo, err error) { + if !vmOnGCP { + return nil, nil, ErrUntrustedPlatform + } + + // Connecting to ALTS handshaker service. + hsConn, err := service.Dial(g.hsAddress) + if err != nil { + return nil, nil, err + } + // Do not close hsConn since it is shared with other handshakes. + + // Possible context leak: + // The cancel function for the child context we create will only be + // called a non-nil error is returned. + var cancel context.CancelFunc + ctx, cancel = context.WithCancel(ctx) + defer func() { + if err != nil { + cancel() + } + }() + + opts := handshaker.DefaultClientHandshakerOptions() + opts.TargetName = addr + opts.TargetServiceAccounts = g.accounts + opts.RPCVersions = &altspb.RpcProtocolVersions{ + MaxRpcVersion: maxRPCVersion, + MinRpcVersion: minRPCVersion, + } + chs, err := handshaker.NewClientHandshaker(ctx, hsConn, rawConn, opts) + defer func() { + if err != nil { + chs.Close() + } + }() + if err != nil { + return nil, nil, err + } + secConn, authInfo, err := chs.ClientHandshake(ctx) + if err != nil { + return nil, nil, err + } + altsAuthInfo, ok := authInfo.(AuthInfo) + if !ok { + return nil, nil, errors.New("client-side auth info is not of type alts.AuthInfo") + } + match, _ := checkRPCVersions(opts.RPCVersions, altsAuthInfo.PeerRPCVersions()) + if !match { + return nil, nil, fmt.Errorf("server-side RPC versions are not compatible with this client, local versions: %v, peer versions: %v", opts.RPCVersions, altsAuthInfo.PeerRPCVersions()) + } + return secConn, authInfo, nil +} + +// ServerHandshake implements the server side ALTS handshaker. +func (g *altsTC) ServerHandshake(rawConn net.Conn) (_ net.Conn, _ credentials.AuthInfo, err error) { + if !vmOnGCP { + return nil, nil, ErrUntrustedPlatform + } + // Connecting to ALTS handshaker service. + hsConn, err := service.Dial(g.hsAddress) + if err != nil { + return nil, nil, err + } + // Do not close hsConn since it's shared with other handshakes. + + ctx, cancel := context.WithTimeout(context.Background(), defaultTimeout) + defer cancel() + opts := handshaker.DefaultServerHandshakerOptions() + opts.RPCVersions = &altspb.RpcProtocolVersions{ + MaxRpcVersion: maxRPCVersion, + MinRpcVersion: minRPCVersion, + } + shs, err := handshaker.NewServerHandshaker(ctx, hsConn, rawConn, opts) + defer func() { + if err != nil { + shs.Close() + } + }() + if err != nil { + return nil, nil, err + } + secConn, authInfo, err := shs.ServerHandshake(ctx) + if err != nil { + return nil, nil, err + } + altsAuthInfo, ok := authInfo.(AuthInfo) + if !ok { + return nil, nil, errors.New("server-side auth info is not of type alts.AuthInfo") + } + match, _ := checkRPCVersions(opts.RPCVersions, altsAuthInfo.PeerRPCVersions()) + if !match { + return nil, nil, fmt.Errorf("client-side RPC versions is not compatible with this server, local versions: %v, peer versions: %v", opts.RPCVersions, altsAuthInfo.PeerRPCVersions()) + } + return secConn, authInfo, nil +} + +func (g *altsTC) Info() credentials.ProtocolInfo { + return *g.info +} + +func (g *altsTC) Clone() credentials.TransportCredentials { + info := *g.info + var accounts []string + if g.accounts != nil { + accounts = make([]string, len(g.accounts)) + copy(accounts, g.accounts) + } + return &altsTC{ + info: &info, + side: g.side, + hsAddress: g.hsAddress, + accounts: accounts, + } +} + +func (g *altsTC) OverrideServerName(serverNameOverride string) error { + g.info.ServerName = serverNameOverride + return nil +} + +// compareRPCVersion returns 0 if v1 == v2, 1 if v1 > v2 and -1 if v1 < v2. +func compareRPCVersions(v1, v2 *altspb.RpcProtocolVersions_Version) int { + switch { + case v1.GetMajor() > v2.GetMajor(), + v1.GetMajor() == v2.GetMajor() && v1.GetMinor() > v2.GetMinor(): + return 1 + case v1.GetMajor() < v2.GetMajor(), + v1.GetMajor() == v2.GetMajor() && v1.GetMinor() < v2.GetMinor(): + return -1 + } + return 0 +} + +// checkRPCVersions performs a version check between local and peer rpc protocol +// versions. This function returns true if the check passes which means both +// parties agreed on a common rpc protocol to use, and false otherwise. The +// function also returns the highest common RPC protocol version both parties +// agreed on. +func checkRPCVersions(local, peer *altspb.RpcProtocolVersions) (bool, *altspb.RpcProtocolVersions_Version) { + if local == nil || peer == nil { + grpclog.Error("invalid checkRPCVersions argument, either local or peer is nil.") + return false, nil + } + + // maxCommonVersion is MIN(local.max, peer.max). + maxCommonVersion := local.GetMaxRpcVersion() + if compareRPCVersions(local.GetMaxRpcVersion(), peer.GetMaxRpcVersion()) > 0 { + maxCommonVersion = peer.GetMaxRpcVersion() + } + + // minCommonVersion is MAX(local.min, peer.min). + minCommonVersion := peer.GetMinRpcVersion() + if compareRPCVersions(local.GetMinRpcVersion(), peer.GetMinRpcVersion()) > 0 { + minCommonVersion = local.GetMinRpcVersion() + } + + if compareRPCVersions(maxCommonVersion, minCommonVersion) < 0 { + return false, nil + } + return true, maxCommonVersion +} diff --git a/vendor/google.golang.org/grpc/credentials/alts/internal/authinfo/authinfo.go b/vendor/google.golang.org/grpc/credentials/alts/internal/authinfo/authinfo.go new file mode 100644 index 0000000..ed628dc --- /dev/null +++ b/vendor/google.golang.org/grpc/credentials/alts/internal/authinfo/authinfo.go @@ -0,0 +1,87 @@ +/* + * + * Copyright 2018 gRPC authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ + +// Package authinfo provide authentication information returned by handshakers. +package authinfo + +import ( + "google.golang.org/grpc/credentials" + altspb "google.golang.org/grpc/credentials/alts/internal/proto/grpc_gcp" +) + +var _ credentials.AuthInfo = (*altsAuthInfo)(nil) + +// altsAuthInfo exposes security information from the ALTS handshake to the +// application. altsAuthInfo is immutable and implements credentials.AuthInfo. +type altsAuthInfo struct { + p *altspb.AltsContext +} + +// New returns a new altsAuthInfo object given handshaker results. +func New(result *altspb.HandshakerResult) credentials.AuthInfo { + return newAuthInfo(result) +} + +func newAuthInfo(result *altspb.HandshakerResult) *altsAuthInfo { + return &altsAuthInfo{ + p: &altspb.AltsContext{ + ApplicationProtocol: result.GetApplicationProtocol(), + RecordProtocol: result.GetRecordProtocol(), + // TODO: assign security level from result. + SecurityLevel: altspb.SecurityLevel_INTEGRITY_AND_PRIVACY, + PeerServiceAccount: result.GetPeerIdentity().GetServiceAccount(), + LocalServiceAccount: result.GetLocalIdentity().GetServiceAccount(), + PeerRpcVersions: result.GetPeerRpcVersions(), + }, + } +} + +// AuthType identifies the context as providing ALTS authentication information. +func (s *altsAuthInfo) AuthType() string { + return "alts" +} + +// ApplicationProtocol returns the context's application protocol. +func (s *altsAuthInfo) ApplicationProtocol() string { + return s.p.GetApplicationProtocol() +} + +// RecordProtocol returns the context's record protocol. +func (s *altsAuthInfo) RecordProtocol() string { + return s.p.GetRecordProtocol() +} + +// SecurityLevel returns the context's security level. +func (s *altsAuthInfo) SecurityLevel() altspb.SecurityLevel { + return s.p.GetSecurityLevel() +} + +// PeerServiceAccount returns the context's peer service account. +func (s *altsAuthInfo) PeerServiceAccount() string { + return s.p.GetPeerServiceAccount() +} + +// LocalServiceAccount returns the context's local service account. +func (s *altsAuthInfo) LocalServiceAccount() string { + return s.p.GetLocalServiceAccount() +} + +// PeerRPCVersions returns the context's peer RPC versions. +func (s *altsAuthInfo) PeerRPCVersions() *altspb.RpcProtocolVersions { + return s.p.GetPeerRpcVersions() +} diff --git a/vendor/google.golang.org/grpc/credentials/alts/internal/common.go b/vendor/google.golang.org/grpc/credentials/alts/internal/common.go new file mode 100644 index 0000000..33fba81 --- /dev/null +++ b/vendor/google.golang.org/grpc/credentials/alts/internal/common.go @@ -0,0 +1,69 @@ +/* + * + * Copyright 2018 gRPC authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ + +//go:generate ./regenerate.sh + +// Package internal contains common core functionality for ALTS. +package internal + +import ( + "context" + "net" + + "google.golang.org/grpc/credentials" +) + +const ( + // ClientSide identifies the client in this communication. + ClientSide Side = iota + // ServerSide identifies the server in this communication. + ServerSide +) + +// PeerNotRespondingError is returned when a peer server is not responding +// after a channel has been established. It is treated as a temporary connection +// error and re-connection to the server should be attempted. +var PeerNotRespondingError = &peerNotRespondingError{} + +// Side identifies the party's role: client or server. +type Side int + +type peerNotRespondingError struct{} + +// Return an error message for the purpose of logging. +func (e *peerNotRespondingError) Error() string { + return "peer server is not responding and re-connection should be attempted." +} + +// Temporary indicates if this connection error is temporary or fatal. +func (e *peerNotRespondingError) Temporary() bool { + return true +} + +// Handshaker defines a ALTS handshaker interface. +type Handshaker interface { + // ClientHandshake starts and completes a client-side handshaking and + // returns a secure connection and corresponding auth information. + ClientHandshake(ctx context.Context) (net.Conn, credentials.AuthInfo, error) + // ServerHandshake starts and completes a server-side handshaking and + // returns a secure connection and corresponding auth information. + ServerHandshake(ctx context.Context) (net.Conn, credentials.AuthInfo, error) + // Close terminates the Handshaker. It should be called when the caller + // obtains the secure connection. + Close() +} diff --git a/vendor/google.golang.org/grpc/credentials/alts/internal/conn/aeadrekey.go b/vendor/google.golang.org/grpc/credentials/alts/internal/conn/aeadrekey.go new file mode 100644 index 0000000..43726e8 --- /dev/null +++ b/vendor/google.golang.org/grpc/credentials/alts/internal/conn/aeadrekey.go @@ -0,0 +1,131 @@ +/* + * + * Copyright 2018 gRPC authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ + +package conn + +import ( + "bytes" + "crypto/aes" + "crypto/cipher" + "crypto/hmac" + "crypto/sha256" + "encoding/binary" + "fmt" + "strconv" +) + +// rekeyAEAD holds the necessary information for an AEAD based on +// AES-GCM that performs nonce-based key derivation and XORs the +// nonce with a random mask. +type rekeyAEAD struct { + kdfKey []byte + kdfCounter []byte + nonceMask []byte + nonceBuf []byte + gcmAEAD cipher.AEAD +} + +// KeySizeError signals that the given key does not have the correct size. +type KeySizeError int + +func (k KeySizeError) Error() string { + return "alts/conn: invalid key size " + strconv.Itoa(int(k)) +} + +// newRekeyAEAD creates a new instance of aes128gcm with rekeying. +// The key argument should be 44 bytes, the first 32 bytes are used as a key +// for HKDF-expand and the remainining 12 bytes are used as a random mask for +// the counter. +func newRekeyAEAD(key []byte) (*rekeyAEAD, error) { + k := len(key) + if k != kdfKeyLen+nonceLen { + return nil, KeySizeError(k) + } + return &rekeyAEAD{ + kdfKey: key[:kdfKeyLen], + kdfCounter: make([]byte, kdfCounterLen), + nonceMask: key[kdfKeyLen:], + nonceBuf: make([]byte, nonceLen), + gcmAEAD: nil, + }, nil +} + +// Seal rekeys if nonce[2:8] is different than in the last call, masks the nonce, +// and calls Seal for aes128gcm. +func (s *rekeyAEAD) Seal(dst, nonce, plaintext, additionalData []byte) []byte { + if err := s.rekeyIfRequired(nonce); err != nil { + panic(fmt.Sprintf("Rekeying failed with: %s", err.Error())) + } + maskNonce(s.nonceBuf, nonce, s.nonceMask) + return s.gcmAEAD.Seal(dst, s.nonceBuf, plaintext, additionalData) +} + +// Open rekeys if nonce[2:8] is different than in the last call, masks the nonce, +// and calls Open for aes128gcm. +func (s *rekeyAEAD) Open(dst, nonce, ciphertext, additionalData []byte) ([]byte, error) { + if err := s.rekeyIfRequired(nonce); err != nil { + return nil, err + } + maskNonce(s.nonceBuf, nonce, s.nonceMask) + return s.gcmAEAD.Open(dst, s.nonceBuf, ciphertext, additionalData) +} + +// rekeyIfRequired creates a new aes128gcm AEAD if the existing AEAD is nil +// or cannot be used with given nonce. +func (s *rekeyAEAD) rekeyIfRequired(nonce []byte) error { + newKdfCounter := nonce[kdfCounterOffset : kdfCounterOffset+kdfCounterLen] + if s.gcmAEAD != nil && bytes.Equal(newKdfCounter, s.kdfCounter) { + return nil + } + copy(s.kdfCounter, newKdfCounter) + a, err := aes.NewCipher(hkdfExpand(s.kdfKey, s.kdfCounter)) + if err != nil { + return err + } + s.gcmAEAD, err = cipher.NewGCM(a) + return err +} + +// maskNonce XORs the given nonce with the mask and stores the result in dst. +func maskNonce(dst, nonce, mask []byte) { + nonce1 := binary.LittleEndian.Uint64(nonce[:sizeUint64]) + nonce2 := binary.LittleEndian.Uint32(nonce[sizeUint64:]) + mask1 := binary.LittleEndian.Uint64(mask[:sizeUint64]) + mask2 := binary.LittleEndian.Uint32(mask[sizeUint64:]) + binary.LittleEndian.PutUint64(dst[:sizeUint64], nonce1^mask1) + binary.LittleEndian.PutUint32(dst[sizeUint64:], nonce2^mask2) +} + +// NonceSize returns the required nonce size. +func (s *rekeyAEAD) NonceSize() int { + return s.gcmAEAD.NonceSize() +} + +// Overhead returns the ciphertext overhead. +func (s *rekeyAEAD) Overhead() int { + return s.gcmAEAD.Overhead() +} + +// hkdfExpand computes the first 16 bytes of the HKDF-expand function +// defined in RFC5869. +func hkdfExpand(key, info []byte) []byte { + mac := hmac.New(sha256.New, key) + mac.Write(info) + mac.Write([]byte{0x01}[:]) + return mac.Sum(nil)[:aeadKeyLen] +} diff --git a/vendor/google.golang.org/grpc/credentials/alts/internal/conn/aes128gcm.go b/vendor/google.golang.org/grpc/credentials/alts/internal/conn/aes128gcm.go new file mode 100644 index 0000000..04e0adb --- /dev/null +++ b/vendor/google.golang.org/grpc/credentials/alts/internal/conn/aes128gcm.go @@ -0,0 +1,105 @@ +/* + * + * Copyright 2018 gRPC authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ + +package conn + +import ( + "crypto/aes" + "crypto/cipher" + + core "google.golang.org/grpc/credentials/alts/internal" +) + +const ( + // Overflow length n in bytes, never encrypt more than 2^(n*8) frames (in + // each direction). + overflowLenAES128GCM = 5 +) + +// aes128gcm is the struct that holds necessary information for ALTS record. +// The counter value is NOT included in the payload during the encryption and +// decryption operations. +type aes128gcm struct { + // inCounter is used in ALTS record to check that incoming counters are + // as expected, since ALTS record guarantees that messages are unwrapped + // in the same order that the peer wrapped them. + inCounter Counter + outCounter Counter + aead cipher.AEAD +} + +// NewAES128GCM creates an instance that uses aes128gcm for ALTS record. +func NewAES128GCM(side core.Side, key []byte) (ALTSRecordCrypto, error) { + c, err := aes.NewCipher(key) + if err != nil { + return nil, err + } + a, err := cipher.NewGCM(c) + if err != nil { + return nil, err + } + return &aes128gcm{ + inCounter: NewInCounter(side, overflowLenAES128GCM), + outCounter: NewOutCounter(side, overflowLenAES128GCM), + aead: a, + }, nil +} + +// Encrypt is the encryption function. dst can contain bytes at the beginning of +// the ciphertext that will not be encrypted but will be authenticated. If dst +// has enough capacity to hold these bytes, the ciphertext and the tag, no +// allocation and copy operations will be performed. dst and plaintext do not +// overlap. +func (s *aes128gcm) Encrypt(dst, plaintext []byte) ([]byte, error) { + // If we need to allocate an output buffer, we want to include space for + // GCM tag to avoid forcing ALTS record to reallocate as well. + dlen := len(dst) + dst, out := SliceForAppend(dst, len(plaintext)+GcmTagSize) + seq, err := s.outCounter.Value() + if err != nil { + return nil, err + } + data := out[:len(plaintext)] + copy(data, plaintext) // data may alias plaintext + + // Seal appends the ciphertext and the tag to its first argument and + // returns the updated slice. However, SliceForAppend above ensures that + // dst has enough capacity to avoid a reallocation and copy due to the + // append. + dst = s.aead.Seal(dst[:dlen], seq, data, nil) + s.outCounter.Inc() + return dst, nil +} + +func (s *aes128gcm) EncryptionOverhead() int { + return GcmTagSize +} + +func (s *aes128gcm) Decrypt(dst, ciphertext []byte) ([]byte, error) { + seq, err := s.inCounter.Value() + if err != nil { + return nil, err + } + // If dst is equal to ciphertext[:0], ciphertext storage is reused. + plaintext, err := s.aead.Open(dst, seq, ciphertext, nil) + if err != nil { + return nil, ErrAuth + } + s.inCounter.Inc() + return plaintext, nil +} diff --git a/vendor/google.golang.org/grpc/credentials/alts/internal/conn/aes128gcmrekey.go b/vendor/google.golang.org/grpc/credentials/alts/internal/conn/aes128gcmrekey.go new file mode 100644 index 0000000..6a9035e --- /dev/null +++ b/vendor/google.golang.org/grpc/credentials/alts/internal/conn/aes128gcmrekey.go @@ -0,0 +1,116 @@ +/* + * + * Copyright 2018 gRPC authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ + +package conn + +import ( + "crypto/cipher" + + core "google.golang.org/grpc/credentials/alts/internal" +) + +const ( + // Overflow length n in bytes, never encrypt more than 2^(n*8) frames (in + // each direction). + overflowLenAES128GCMRekey = 8 + nonceLen = 12 + aeadKeyLen = 16 + kdfKeyLen = 32 + kdfCounterOffset = 2 + kdfCounterLen = 6 + sizeUint64 = 8 +) + +// aes128gcmRekey is the struct that holds necessary information for ALTS record. +// The counter value is NOT included in the payload during the encryption and +// decryption operations. +type aes128gcmRekey struct { + // inCounter is used in ALTS record to check that incoming counters are + // as expected, since ALTS record guarantees that messages are unwrapped + // in the same order that the peer wrapped them. + inCounter Counter + outCounter Counter + inAEAD cipher.AEAD + outAEAD cipher.AEAD +} + +// NewAES128GCMRekey creates an instance that uses aes128gcm with rekeying +// for ALTS record. The key argument should be 44 bytes, the first 32 bytes +// are used as a key for HKDF-expand and the remainining 12 bytes are used +// as a random mask for the counter. +func NewAES128GCMRekey(side core.Side, key []byte) (ALTSRecordCrypto, error) { + inCounter := NewInCounter(side, overflowLenAES128GCMRekey) + outCounter := NewOutCounter(side, overflowLenAES128GCMRekey) + inAEAD, err := newRekeyAEAD(key) + if err != nil { + return nil, err + } + outAEAD, err := newRekeyAEAD(key) + if err != nil { + return nil, err + } + return &aes128gcmRekey{ + inCounter, + outCounter, + inAEAD, + outAEAD, + }, nil +} + +// Encrypt is the encryption function. dst can contain bytes at the beginning of +// the ciphertext that will not be encrypted but will be authenticated. If dst +// has enough capacity to hold these bytes, the ciphertext and the tag, no +// allocation and copy operations will be performed. dst and plaintext do not +// overlap. +func (s *aes128gcmRekey) Encrypt(dst, plaintext []byte) ([]byte, error) { + // If we need to allocate an output buffer, we want to include space for + // GCM tag to avoid forcing ALTS record to reallocate as well. + dlen := len(dst) + dst, out := SliceForAppend(dst, len(plaintext)+GcmTagSize) + seq, err := s.outCounter.Value() + if err != nil { + return nil, err + } + data := out[:len(plaintext)] + copy(data, plaintext) // data may alias plaintext + + // Seal appends the ciphertext and the tag to its first argument and + // returns the updated slice. However, SliceForAppend above ensures that + // dst has enough capacity to avoid a reallocation and copy due to the + // append. + dst = s.outAEAD.Seal(dst[:dlen], seq, data, nil) + s.outCounter.Inc() + return dst, nil +} + +func (s *aes128gcmRekey) EncryptionOverhead() int { + return GcmTagSize +} + +func (s *aes128gcmRekey) Decrypt(dst, ciphertext []byte) ([]byte, error) { + seq, err := s.inCounter.Value() + if err != nil { + return nil, err + } + plaintext, err := s.inAEAD.Open(dst, seq, ciphertext, nil) + if err != nil { + return nil, ErrAuth + } + s.inCounter.Inc() + return plaintext, nil +} diff --git a/vendor/google.golang.org/grpc/credentials/alts/internal/conn/common.go b/vendor/google.golang.org/grpc/credentials/alts/internal/conn/common.go new file mode 100644 index 0000000..1795d0c --- /dev/null +++ b/vendor/google.golang.org/grpc/credentials/alts/internal/conn/common.go @@ -0,0 +1,70 @@ +/* + * + * Copyright 2018 gRPC authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ + +package conn + +import ( + "encoding/binary" + "errors" + "fmt" +) + +const ( + // GcmTagSize is the GCM tag size is the difference in length between + // plaintext and ciphertext. From crypto/cipher/gcm.go in Go crypto + // library. + GcmTagSize = 16 +) + +// ErrAuth occurs on authentication failure. +var ErrAuth = errors.New("message authentication failed") + +// SliceForAppend takes a slice and a requested number of bytes. It returns a +// slice with the contents of the given slice followed by that many bytes and a +// second slice that aliases into it and contains only the extra bytes. If the +// original slice has sufficient capacity then no allocation is performed. +func SliceForAppend(in []byte, n int) (head, tail []byte) { + if total := len(in) + n; cap(in) >= total { + head = in[:total] + } else { + head = make([]byte, total) + copy(head, in) + } + tail = head[len(in):] + return head, tail +} + +// ParseFramedMsg parse the provided buffer and returns a frame of the format +// msgLength+msg and any remaining bytes in that buffer. +func ParseFramedMsg(b []byte, maxLen uint32) ([]byte, []byte, error) { + // If the size field is not complete, return the provided buffer as + // remaining buffer. + if len(b) < MsgLenFieldSize { + return nil, b, nil + } + msgLenField := b[:MsgLenFieldSize] + length := binary.LittleEndian.Uint32(msgLenField) + if length > maxLen { + return nil, nil, fmt.Errorf("received the frame length %d larger than the limit %d", length, maxLen) + } + if len(b) < int(length)+4 { // account for the first 4 msg length bytes. + // Frame is not complete yet. + return nil, b, nil + } + return b[:MsgLenFieldSize+length], b[MsgLenFieldSize+length:], nil +} diff --git a/vendor/google.golang.org/grpc/credentials/alts/internal/conn/counter.go b/vendor/google.golang.org/grpc/credentials/alts/internal/conn/counter.go new file mode 100644 index 0000000..9f00aca --- /dev/null +++ b/vendor/google.golang.org/grpc/credentials/alts/internal/conn/counter.go @@ -0,0 +1,62 @@ +/* + * + * Copyright 2018 gRPC authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ + +package conn + +import ( + "errors" +) + +const counterLen = 12 + +var ( + errInvalidCounter = errors.New("invalid counter") +) + +// Counter is a 96-bit, little-endian counter. +type Counter struct { + value [counterLen]byte + invalid bool + overflowLen int +} + +// Value returns the current value of the counter as a byte slice. +func (c *Counter) Value() ([]byte, error) { + if c.invalid { + return nil, errInvalidCounter + } + return c.value[:], nil +} + +// Inc increments the counter and checks for overflow. +func (c *Counter) Inc() { + // If the counter is already invalid, there is no need to increase it. + if c.invalid { + return + } + i := 0 + for ; i < c.overflowLen; i++ { + c.value[i]++ + if c.value[i] != 0 { + break + } + } + if i == c.overflowLen { + c.invalid = true + } +} diff --git a/vendor/google.golang.org/grpc/credentials/alts/internal/conn/record.go b/vendor/google.golang.org/grpc/credentials/alts/internal/conn/record.go new file mode 100644 index 0000000..fd5a53d --- /dev/null +++ b/vendor/google.golang.org/grpc/credentials/alts/internal/conn/record.go @@ -0,0 +1,271 @@ +/* + * + * Copyright 2018 gRPC authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ + +// Package conn contains an implementation of a secure channel created by gRPC +// handshakers. +package conn + +import ( + "encoding/binary" + "fmt" + "math" + "net" + + core "google.golang.org/grpc/credentials/alts/internal" +) + +// ALTSRecordCrypto is the interface for gRPC ALTS record protocol. +type ALTSRecordCrypto interface { + // Encrypt encrypts the plaintext and computes the tag (if any) of dst + // and plaintext, dst and plaintext do not overlap. + Encrypt(dst, plaintext []byte) ([]byte, error) + // EncryptionOverhead returns the tag size (if any) in bytes. + EncryptionOverhead() int + // Decrypt decrypts ciphertext and verify the tag (if any). dst and + // ciphertext may alias exactly or not at all. To reuse ciphertext's + // storage for the decrypted output, use ciphertext[:0] as dst. + Decrypt(dst, ciphertext []byte) ([]byte, error) +} + +// ALTSRecordFunc is a function type for factory functions that create +// ALTSRecordCrypto instances. +type ALTSRecordFunc func(s core.Side, keyData []byte) (ALTSRecordCrypto, error) + +const ( + // MsgLenFieldSize is the byte size of the frame length field of a + // framed message. + MsgLenFieldSize = 4 + // The byte size of the message type field of a framed message. + msgTypeFieldSize = 4 + // The bytes size limit for a ALTS record message. + altsRecordLengthLimit = 1024 * 1024 // 1 MiB + // The default bytes size of a ALTS record message. + altsRecordDefaultLength = 4 * 1024 // 4KiB + // Message type value included in ALTS record framing. + altsRecordMsgType = uint32(0x06) + // The initial write buffer size. + altsWriteBufferInitialSize = 32 * 1024 // 32KiB + // The maximum write buffer size. This *must* be multiple of + // altsRecordDefaultLength. + altsWriteBufferMaxSize = 512 * 1024 // 512KiB +) + +var ( + protocols = make(map[string]ALTSRecordFunc) +) + +// RegisterProtocol register a ALTS record encryption protocol. +func RegisterProtocol(protocol string, f ALTSRecordFunc) error { + if _, ok := protocols[protocol]; ok { + return fmt.Errorf("protocol %v is already registered", protocol) + } + protocols[protocol] = f + return nil +} + +// conn represents a secured connection. It implements the net.Conn interface. +type conn struct { + net.Conn + crypto ALTSRecordCrypto + // buf holds data that has been read from the connection and decrypted, + // but has not yet been returned by Read. + buf []byte + payloadLengthLimit int + // protected holds data read from the network but have not yet been + // decrypted. This data might not compose a complete frame. + protected []byte + // writeBuf is a buffer used to contain encrypted frames before being + // written to the network. + writeBuf []byte + // nextFrame stores the next frame (in protected buffer) info. + nextFrame []byte + // overhead is the calculated overhead of each frame. + overhead int +} + +// NewConn creates a new secure channel instance given the other party role and +// handshaking result. +func NewConn(c net.Conn, side core.Side, recordProtocol string, key []byte, protected []byte) (net.Conn, error) { + newCrypto := protocols[recordProtocol] + if newCrypto == nil { + return nil, fmt.Errorf("negotiated unknown next_protocol %q", recordProtocol) + } + crypto, err := newCrypto(side, key) + if err != nil { + return nil, fmt.Errorf("protocol %q: %v", recordProtocol, err) + } + overhead := MsgLenFieldSize + msgTypeFieldSize + crypto.EncryptionOverhead() + payloadLengthLimit := altsRecordDefaultLength - overhead + if protected == nil { + // We pre-allocate protected to be of size + // 2*altsRecordDefaultLength-1 during initialization. We only + // read from the network into protected when protected does not + // contain a complete frame, which is at most + // altsRecordDefaultLength-1 (bytes). And we read at most + // altsRecordDefaultLength (bytes) data into protected at one + // time. Therefore, 2*altsRecordDefaultLength-1 is large enough + // to buffer data read from the network. + protected = make([]byte, 0, 2*altsRecordDefaultLength-1) + } + + altsConn := &conn{ + Conn: c, + crypto: crypto, + payloadLengthLimit: payloadLengthLimit, + protected: protected, + writeBuf: make([]byte, altsWriteBufferInitialSize), + nextFrame: protected, + overhead: overhead, + } + return altsConn, nil +} + +// Read reads and decrypts a frame from the underlying connection, and copies the +// decrypted payload into b. If the size of the payload is greater than len(b), +// Read retains the remaining bytes in an internal buffer, and subsequent calls +// to Read will read from this buffer until it is exhausted. +func (p *conn) Read(b []byte) (n int, err error) { + if len(p.buf) == 0 { + var framedMsg []byte + framedMsg, p.nextFrame, err = ParseFramedMsg(p.nextFrame, altsRecordLengthLimit) + if err != nil { + return n, err + } + // Check whether the next frame to be decrypted has been + // completely received yet. + if len(framedMsg) == 0 { + copy(p.protected, p.nextFrame) + p.protected = p.protected[:len(p.nextFrame)] + // Always copy next incomplete frame to the beginning of + // the protected buffer and reset nextFrame to it. + p.nextFrame = p.protected + } + // Check whether a complete frame has been received yet. + for len(framedMsg) == 0 { + if len(p.protected) == cap(p.protected) { + tmp := make([]byte, len(p.protected), cap(p.protected)+altsRecordDefaultLength) + copy(tmp, p.protected) + p.protected = tmp + } + n, err = p.Conn.Read(p.protected[len(p.protected):min(cap(p.protected), len(p.protected)+altsRecordDefaultLength)]) + if err != nil { + return 0, err + } + p.protected = p.protected[:len(p.protected)+n] + framedMsg, p.nextFrame, err = ParseFramedMsg(p.protected, altsRecordLengthLimit) + if err != nil { + return 0, err + } + } + // Now we have a complete frame, decrypted it. + msg := framedMsg[MsgLenFieldSize:] + msgType := binary.LittleEndian.Uint32(msg[:msgTypeFieldSize]) + if msgType&0xff != altsRecordMsgType { + return 0, fmt.Errorf("received frame with incorrect message type %v, expected lower byte %v", + msgType, altsRecordMsgType) + } + ciphertext := msg[msgTypeFieldSize:] + + // Decrypt requires that if the dst and ciphertext alias, they + // must alias exactly. Code here used to use msg[:0], but msg + // starts MsgLenFieldSize+msgTypeFieldSize bytes earlier than + // ciphertext, so they alias inexactly. Using ciphertext[:0] + // arranges the appropriate aliasing without needing to copy + // ciphertext or use a separate destination buffer. For more info + // check: https://golang.org/pkg/crypto/cipher/#AEAD. + p.buf, err = p.crypto.Decrypt(ciphertext[:0], ciphertext) + if err != nil { + return 0, err + } + } + + n = copy(b, p.buf) + p.buf = p.buf[n:] + return n, nil +} + +// Write encrypts, frames, and writes bytes from b to the underlying connection. +func (p *conn) Write(b []byte) (n int, err error) { + n = len(b) + // Calculate the output buffer size with framing and encryption overhead. + numOfFrames := int(math.Ceil(float64(len(b)) / float64(p.payloadLengthLimit))) + size := len(b) + numOfFrames*p.overhead + // If writeBuf is too small, increase its size up to the maximum size. + partialBSize := len(b) + if size > altsWriteBufferMaxSize { + size = altsWriteBufferMaxSize + const numOfFramesInMaxWriteBuf = altsWriteBufferMaxSize / altsRecordDefaultLength + partialBSize = numOfFramesInMaxWriteBuf * p.payloadLengthLimit + } + if len(p.writeBuf) < size { + p.writeBuf = make([]byte, size) + } + + for partialBStart := 0; partialBStart < len(b); partialBStart += partialBSize { + partialBEnd := partialBStart + partialBSize + if partialBEnd > len(b) { + partialBEnd = len(b) + } + partialB := b[partialBStart:partialBEnd] + writeBufIndex := 0 + for len(partialB) > 0 { + payloadLen := len(partialB) + if payloadLen > p.payloadLengthLimit { + payloadLen = p.payloadLengthLimit + } + buf := partialB[:payloadLen] + partialB = partialB[payloadLen:] + + // Write buffer contains: length, type, payload, and tag + // if any. + + // 1. Fill in type field. + msg := p.writeBuf[writeBufIndex+MsgLenFieldSize:] + binary.LittleEndian.PutUint32(msg, altsRecordMsgType) + + // 2. Encrypt the payload and create a tag if any. + msg, err = p.crypto.Encrypt(msg[:msgTypeFieldSize], buf) + if err != nil { + return n, err + } + + // 3. Fill in the size field. + binary.LittleEndian.PutUint32(p.writeBuf[writeBufIndex:], uint32(len(msg))) + + // 4. Increase writeBufIndex. + writeBufIndex += len(buf) + p.overhead + } + nn, err := p.Conn.Write(p.writeBuf[:writeBufIndex]) + if err != nil { + // We need to calculate the actual data size that was + // written. This means we need to remove header, + // encryption overheads, and any partially-written + // frame data. + numOfWrittenFrames := int(math.Floor(float64(nn) / float64(altsRecordDefaultLength))) + return partialBStart + numOfWrittenFrames*p.payloadLengthLimit, err + } + } + return n, nil +} + +func min(a, b int) int { + if a < b { + return a + } + return b +} diff --git a/vendor/google.golang.org/grpc/credentials/alts/internal/conn/utils.go b/vendor/google.golang.org/grpc/credentials/alts/internal/conn/utils.go new file mode 100644 index 0000000..84821fa --- /dev/null +++ b/vendor/google.golang.org/grpc/credentials/alts/internal/conn/utils.go @@ -0,0 +1,63 @@ +/* + * + * Copyright 2018 gRPC authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ + +package conn + +import core "google.golang.org/grpc/credentials/alts/internal" + +// NewOutCounter returns an outgoing counter initialized to the starting sequence +// number for the client/server side of a connection. +func NewOutCounter(s core.Side, overflowLen int) (c Counter) { + c.overflowLen = overflowLen + if s == core.ServerSide { + // Server counters in ALTS record have the little-endian high bit + // set. + c.value[counterLen-1] = 0x80 + } + return +} + +// NewInCounter returns an incoming counter initialized to the starting sequence +// number for the client/server side of a connection. This is used in ALTS record +// to check that incoming counters are as expected, since ALTS record guarantees +// that messages are unwrapped in the same order that the peer wrapped them. +func NewInCounter(s core.Side, overflowLen int) (c Counter) { + c.overflowLen = overflowLen + if s == core.ClientSide { + // Server counters in ALTS record have the little-endian high bit + // set. + c.value[counterLen-1] = 0x80 + } + return +} + +// CounterFromValue creates a new counter given an initial value. +func CounterFromValue(value []byte, overflowLen int) (c Counter) { + c.overflowLen = overflowLen + copy(c.value[:], value) + return +} + +// CounterSide returns the connection side (client/server) a sequence counter is +// associated with. +func CounterSide(c []byte) core.Side { + if c[counterLen-1]&0x80 == 0x80 { + return core.ServerSide + } + return core.ClientSide +} diff --git a/vendor/google.golang.org/grpc/credentials/alts/internal/handshaker/handshaker.go b/vendor/google.golang.org/grpc/credentials/alts/internal/handshaker/handshaker.go new file mode 100644 index 0000000..49c22c1 --- /dev/null +++ b/vendor/google.golang.org/grpc/credentials/alts/internal/handshaker/handshaker.go @@ -0,0 +1,365 @@ +/* + * + * Copyright 2018 gRPC authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ + +// Package handshaker provides ALTS handshaking functionality for GCP. +package handshaker + +import ( + "context" + "errors" + "fmt" + "io" + "net" + "sync" + + grpc "google.golang.org/grpc" + "google.golang.org/grpc/codes" + "google.golang.org/grpc/credentials" + core "google.golang.org/grpc/credentials/alts/internal" + "google.golang.org/grpc/credentials/alts/internal/authinfo" + "google.golang.org/grpc/credentials/alts/internal/conn" + altsgrpc "google.golang.org/grpc/credentials/alts/internal/proto/grpc_gcp" + altspb "google.golang.org/grpc/credentials/alts/internal/proto/grpc_gcp" +) + +const ( + // The maximum byte size of receive frames. + frameLimit = 64 * 1024 // 64 KB + rekeyRecordProtocolName = "ALTSRP_GCM_AES128_REKEY" + // maxPendingHandshakes represents the maximum number of concurrent + // handshakes. + maxPendingHandshakes = 100 +) + +var ( + hsProtocol = altspb.HandshakeProtocol_ALTS + appProtocols = []string{"grpc"} + recordProtocols = []string{rekeyRecordProtocolName} + keyLength = map[string]int{ + rekeyRecordProtocolName: 44, + } + altsRecordFuncs = map[string]conn.ALTSRecordFunc{ + // ALTS handshaker protocols. + rekeyRecordProtocolName: func(s core.Side, keyData []byte) (conn.ALTSRecordCrypto, error) { + return conn.NewAES128GCMRekey(s, keyData) + }, + } + // control number of concurrent created (but not closed) handshakers. + mu sync.Mutex + concurrentHandshakes = int64(0) + // errDropped occurs when maxPendingHandshakes is reached. + errDropped = errors.New("maximum number of concurrent ALTS handshakes is reached") +) + +func init() { + for protocol, f := range altsRecordFuncs { + if err := conn.RegisterProtocol(protocol, f); err != nil { + panic(err) + } + } +} + +func acquire(n int64) bool { + mu.Lock() + success := maxPendingHandshakes-concurrentHandshakes >= n + if success { + concurrentHandshakes += n + } + mu.Unlock() + return success +} + +func release(n int64) { + mu.Lock() + concurrentHandshakes -= n + if concurrentHandshakes < 0 { + mu.Unlock() + panic("bad release") + } + mu.Unlock() +} + +// ClientHandshakerOptions contains the client handshaker options that can +// provided by the caller. +type ClientHandshakerOptions struct { + // ClientIdentity is the handshaker client local identity. + ClientIdentity *altspb.Identity + // TargetName is the server service account name for secure name + // checking. + TargetName string + // TargetServiceAccounts contains a list of expected target service + // accounts. One of these accounts should match one of the accounts in + // the handshaker results. Otherwise, the handshake fails. + TargetServiceAccounts []string + // RPCVersions specifies the gRPC versions accepted by the client. + RPCVersions *altspb.RpcProtocolVersions +} + +// ServerHandshakerOptions contains the server handshaker options that can +// provided by the caller. +type ServerHandshakerOptions struct { + // RPCVersions specifies the gRPC versions accepted by the server. + RPCVersions *altspb.RpcProtocolVersions +} + +// DefaultClientHandshakerOptions returns the default client handshaker options. +func DefaultClientHandshakerOptions() *ClientHandshakerOptions { + return &ClientHandshakerOptions{} +} + +// DefaultServerHandshakerOptions returns the default client handshaker options. +func DefaultServerHandshakerOptions() *ServerHandshakerOptions { + return &ServerHandshakerOptions{} +} + +// TODO: add support for future local and remote endpoint in both client options +// and server options (server options struct does not exist now. When +// caller can provide endpoints, it should be created. + +// altsHandshaker is used to complete a ALTS handshaking between client and +// server. This handshaker talks to the ALTS handshaker service in the metadata +// server. +type altsHandshaker struct { + // RPC stream used to access the ALTS Handshaker service. + stream altsgrpc.HandshakerService_DoHandshakeClient + // the connection to the peer. + conn net.Conn + // client handshake options. + clientOpts *ClientHandshakerOptions + // server handshake options. + serverOpts *ServerHandshakerOptions + // defines the side doing the handshake, client or server. + side core.Side +} + +// NewClientHandshaker creates a ALTS handshaker for GCP which contains an RPC +// stub created using the passed conn and used to talk to the ALTS Handshaker +// service in the metadata server. +func NewClientHandshaker(ctx context.Context, conn *grpc.ClientConn, c net.Conn, opts *ClientHandshakerOptions) (core.Handshaker, error) { + stream, err := altsgrpc.NewHandshakerServiceClient(conn).DoHandshake(ctx, grpc.WaitForReady(true)) + if err != nil { + return nil, err + } + return &altsHandshaker{ + stream: stream, + conn: c, + clientOpts: opts, + side: core.ClientSide, + }, nil +} + +// NewServerHandshaker creates a ALTS handshaker for GCP which contains an RPC +// stub created using the passed conn and used to talk to the ALTS Handshaker +// service in the metadata server. +func NewServerHandshaker(ctx context.Context, conn *grpc.ClientConn, c net.Conn, opts *ServerHandshakerOptions) (core.Handshaker, error) { + stream, err := altsgrpc.NewHandshakerServiceClient(conn).DoHandshake(ctx, grpc.WaitForReady(true)) + if err != nil { + return nil, err + } + return &altsHandshaker{ + stream: stream, + conn: c, + serverOpts: opts, + side: core.ServerSide, + }, nil +} + +// ClientHandshake starts and completes a client ALTS handshaking for GCP. Once +// done, ClientHandshake returns a secure connection. +func (h *altsHandshaker) ClientHandshake(ctx context.Context) (net.Conn, credentials.AuthInfo, error) { + if !acquire(1) { + return nil, nil, errDropped + } + defer release(1) + + if h.side != core.ClientSide { + return nil, nil, errors.New("only handshakers created using NewClientHandshaker can perform a client handshaker") + } + + // Create target identities from service account list. + targetIdentities := make([]*altspb.Identity, 0, len(h.clientOpts.TargetServiceAccounts)) + for _, account := range h.clientOpts.TargetServiceAccounts { + targetIdentities = append(targetIdentities, &altspb.Identity{ + IdentityOneof: &altspb.Identity_ServiceAccount{ + ServiceAccount: account, + }, + }) + } + req := &altspb.HandshakerReq{ + ReqOneof: &altspb.HandshakerReq_ClientStart{ + ClientStart: &altspb.StartClientHandshakeReq{ + HandshakeSecurityProtocol: hsProtocol, + ApplicationProtocols: appProtocols, + RecordProtocols: recordProtocols, + TargetIdentities: targetIdentities, + LocalIdentity: h.clientOpts.ClientIdentity, + TargetName: h.clientOpts.TargetName, + RpcVersions: h.clientOpts.RPCVersions, + }, + }, + } + + conn, result, err := h.doHandshake(req) + if err != nil { + return nil, nil, err + } + authInfo := authinfo.New(result) + return conn, authInfo, nil +} + +// ServerHandshake starts and completes a server ALTS handshaking for GCP. Once +// done, ServerHandshake returns a secure connection. +func (h *altsHandshaker) ServerHandshake(ctx context.Context) (net.Conn, credentials.AuthInfo, error) { + if !acquire(1) { + return nil, nil, errDropped + } + defer release(1) + + if h.side != core.ServerSide { + return nil, nil, errors.New("only handshakers created using NewServerHandshaker can perform a server handshaker") + } + + p := make([]byte, frameLimit) + n, err := h.conn.Read(p) + if err != nil { + return nil, nil, err + } + + // Prepare server parameters. + // TODO: currently only ALTS parameters are provided. Might need to use + // more options in the future. + params := make(map[int32]*altspb.ServerHandshakeParameters) + params[int32(altspb.HandshakeProtocol_ALTS)] = &altspb.ServerHandshakeParameters{ + RecordProtocols: recordProtocols, + } + req := &altspb.HandshakerReq{ + ReqOneof: &altspb.HandshakerReq_ServerStart{ + ServerStart: &altspb.StartServerHandshakeReq{ + ApplicationProtocols: appProtocols, + HandshakeParameters: params, + InBytes: p[:n], + RpcVersions: h.serverOpts.RPCVersions, + }, + }, + } + + conn, result, err := h.doHandshake(req) + if err != nil { + return nil, nil, err + } + authInfo := authinfo.New(result) + return conn, authInfo, nil +} + +func (h *altsHandshaker) doHandshake(req *altspb.HandshakerReq) (net.Conn, *altspb.HandshakerResult, error) { + resp, err := h.accessHandshakerService(req) + if err != nil { + return nil, nil, err + } + // Check of the returned status is an error. + if resp.GetStatus() != nil { + if got, want := resp.GetStatus().Code, uint32(codes.OK); got != want { + return nil, nil, fmt.Errorf("%v", resp.GetStatus().Details) + } + } + + var extra []byte + if req.GetServerStart() != nil { + extra = req.GetServerStart().GetInBytes()[resp.GetBytesConsumed():] + } + result, extra, err := h.processUntilDone(resp, extra) + if err != nil { + return nil, nil, err + } + // The handshaker returns a 128 bytes key. It should be truncated based + // on the returned record protocol. + keyLen, ok := keyLength[result.RecordProtocol] + if !ok { + return nil, nil, fmt.Errorf("unknown resulted record protocol %v", result.RecordProtocol) + } + sc, err := conn.NewConn(h.conn, h.side, result.GetRecordProtocol(), result.KeyData[:keyLen], extra) + if err != nil { + return nil, nil, err + } + return sc, result, nil +} + +func (h *altsHandshaker) accessHandshakerService(req *altspb.HandshakerReq) (*altspb.HandshakerResp, error) { + if err := h.stream.Send(req); err != nil { + return nil, err + } + resp, err := h.stream.Recv() + if err != nil { + return nil, err + } + return resp, nil +} + +// processUntilDone processes the handshake until the handshaker service returns +// the results. Handshaker service takes care of frame parsing, so we read +// whatever received from the network and send it to the handshaker service. +func (h *altsHandshaker) processUntilDone(resp *altspb.HandshakerResp, extra []byte) (*altspb.HandshakerResult, []byte, error) { + for { + if len(resp.OutFrames) > 0 { + if _, err := h.conn.Write(resp.OutFrames); err != nil { + return nil, nil, err + } + } + if resp.Result != nil { + return resp.Result, extra, nil + } + buf := make([]byte, frameLimit) + n, err := h.conn.Read(buf) + if err != nil && err != io.EOF { + return nil, nil, err + } + // If there is nothing to send to the handshaker service, and + // nothing is received from the peer, then we are stuck. + // This covers the case when the peer is not responding. Note + // that handshaker service connection issues are caught in + // accessHandshakerService before we even get here. + if len(resp.OutFrames) == 0 && n == 0 { + return nil, nil, core.PeerNotRespondingError + } + // Append extra bytes from the previous interaction with the + // handshaker service with the current buffer read from conn. + p := append(extra, buf[:n]...) + resp, err = h.accessHandshakerService(&altspb.HandshakerReq{ + ReqOneof: &altspb.HandshakerReq_Next{ + Next: &altspb.NextHandshakeMessageReq{ + InBytes: p, + }, + }, + }) + if err != nil { + return nil, nil, err + } + // Set extra based on handshaker service response. + if n == 0 { + extra = nil + } else { + extra = buf[resp.GetBytesConsumed():n] + } + } +} + +// Close terminates the Handshaker. It should be called when the caller obtains +// the secure connection. +func (h *altsHandshaker) Close() { + h.stream.CloseSend() +} diff --git a/vendor/google.golang.org/grpc/credentials/alts/internal/handshaker/service/service.go b/vendor/google.golang.org/grpc/credentials/alts/internal/handshaker/service/service.go new file mode 100644 index 0000000..0c7b568 --- /dev/null +++ b/vendor/google.golang.org/grpc/credentials/alts/internal/handshaker/service/service.go @@ -0,0 +1,54 @@ +/* + * + * Copyright 2018 gRPC authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ + +// Package service manages connections between the VM application and the ALTS +// handshaker service. +package service + +import ( + "sync" + + grpc "google.golang.org/grpc" +) + +var ( + // hsConn represents a connection to hypervisor handshaker service. + hsConn *grpc.ClientConn + mu sync.Mutex + // hsDialer will be reassigned in tests. + hsDialer = grpc.Dial +) + +// Dial dials the handshake service in the hypervisor. If a connection has +// already been established, this function returns it. Otherwise, a new +// connection is created. +func Dial(hsAddress string) (*grpc.ClientConn, error) { + mu.Lock() + defer mu.Unlock() + + if hsConn == nil { + // Create a new connection to the handshaker service. Note that + // this connection stays open until the application is closed. + var err error + hsConn, err = hsDialer(hsAddress, grpc.WithInsecure()) + if err != nil { + return nil, err + } + } + return hsConn, nil +} diff --git a/vendor/google.golang.org/grpc/credentials/alts/internal/proto/grpc_gcp/altscontext.pb.go b/vendor/google.golang.org/grpc/credentials/alts/internal/proto/grpc_gcp/altscontext.pb.go new file mode 100644 index 0000000..d179307 --- /dev/null +++ b/vendor/google.golang.org/grpc/credentials/alts/internal/proto/grpc_gcp/altscontext.pb.go @@ -0,0 +1,151 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: grpc/gcp/altscontext.proto + +package grpc_gcp // import "google.golang.org/grpc/credentials/alts/internal/proto/grpc_gcp" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +type AltsContext struct { + // The application protocol negotiated for this connection. + ApplicationProtocol string `protobuf:"bytes,1,opt,name=application_protocol,json=applicationProtocol,proto3" json:"application_protocol,omitempty"` + // The record protocol negotiated for this connection. + RecordProtocol string `protobuf:"bytes,2,opt,name=record_protocol,json=recordProtocol,proto3" json:"record_protocol,omitempty"` + // The security level of the created secure channel. + SecurityLevel SecurityLevel `protobuf:"varint,3,opt,name=security_level,json=securityLevel,proto3,enum=grpc.gcp.SecurityLevel" json:"security_level,omitempty"` + // The peer service account. + PeerServiceAccount string `protobuf:"bytes,4,opt,name=peer_service_account,json=peerServiceAccount,proto3" json:"peer_service_account,omitempty"` + // The local service account. + LocalServiceAccount string `protobuf:"bytes,5,opt,name=local_service_account,json=localServiceAccount,proto3" json:"local_service_account,omitempty"` + // The RPC protocol versions supported by the peer. + PeerRpcVersions *RpcProtocolVersions `protobuf:"bytes,6,opt,name=peer_rpc_versions,json=peerRpcVersions,proto3" json:"peer_rpc_versions,omitempty"` + // Additional attributes of the peer. + PeerAttributes map[string]string `protobuf:"bytes,7,rep,name=peer_attributes,json=peerAttributes,proto3" json:"peer_attributes,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *AltsContext) Reset() { *m = AltsContext{} } +func (m *AltsContext) String() string { return proto.CompactTextString(m) } +func (*AltsContext) ProtoMessage() {} +func (*AltsContext) Descriptor() ([]byte, []int) { + return fileDescriptor_altscontext_f6b7868f9a30497f, []int{0} +} +func (m *AltsContext) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_AltsContext.Unmarshal(m, b) +} +func (m *AltsContext) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_AltsContext.Marshal(b, m, deterministic) +} +func (dst *AltsContext) XXX_Merge(src proto.Message) { + xxx_messageInfo_AltsContext.Merge(dst, src) +} +func (m *AltsContext) XXX_Size() int { + return xxx_messageInfo_AltsContext.Size(m) +} +func (m *AltsContext) XXX_DiscardUnknown() { + xxx_messageInfo_AltsContext.DiscardUnknown(m) +} + +var xxx_messageInfo_AltsContext proto.InternalMessageInfo + +func (m *AltsContext) GetApplicationProtocol() string { + if m != nil { + return m.ApplicationProtocol + } + return "" +} + +func (m *AltsContext) GetRecordProtocol() string { + if m != nil { + return m.RecordProtocol + } + return "" +} + +func (m *AltsContext) GetSecurityLevel() SecurityLevel { + if m != nil { + return m.SecurityLevel + } + return SecurityLevel_SECURITY_NONE +} + +func (m *AltsContext) GetPeerServiceAccount() string { + if m != nil { + return m.PeerServiceAccount + } + return "" +} + +func (m *AltsContext) GetLocalServiceAccount() string { + if m != nil { + return m.LocalServiceAccount + } + return "" +} + +func (m *AltsContext) GetPeerRpcVersions() *RpcProtocolVersions { + if m != nil { + return m.PeerRpcVersions + } + return nil +} + +func (m *AltsContext) GetPeerAttributes() map[string]string { + if m != nil { + return m.PeerAttributes + } + return nil +} + +func init() { + proto.RegisterType((*AltsContext)(nil), "grpc.gcp.AltsContext") + proto.RegisterMapType((map[string]string)(nil), "grpc.gcp.AltsContext.PeerAttributesEntry") +} + +func init() { + proto.RegisterFile("grpc/gcp/altscontext.proto", fileDescriptor_altscontext_f6b7868f9a30497f) +} + +var fileDescriptor_altscontext_f6b7868f9a30497f = []byte{ + // 411 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x6c, 0x92, 0x4d, 0x6f, 0x13, 0x31, + 0x10, 0x86, 0xb5, 0x0d, 0x2d, 0xe0, 0x88, 0xb4, 0xb8, 0xa9, 0x58, 0x45, 0x42, 0x8a, 0xb8, 0xb0, + 0x5c, 0x76, 0x21, 0x5c, 0x10, 0x07, 0x50, 0x8a, 0x38, 0x20, 0x71, 0x88, 0xb6, 0x12, 0x07, 0x2e, + 0x2b, 0x77, 0x3a, 0xb2, 0x2c, 0x5c, 0x8f, 0x35, 0x76, 0x22, 0xf2, 0xb3, 0xf9, 0x07, 0x68, 0xed, + 0xcd, 0x07, 0x1f, 0xb7, 0x9d, 0x79, 0x9f, 0x19, 0xbf, 0xb3, 0x33, 0x62, 0xa6, 0xd9, 0x43, 0xa3, + 0xc1, 0x37, 0xca, 0xc6, 0x00, 0xe4, 0x22, 0xfe, 0x8c, 0xb5, 0x67, 0x8a, 0x24, 0x1f, 0xf5, 0x5a, + 0xad, 0xc1, 0xcf, 0xaa, 0x3d, 0x15, 0x59, 0xb9, 0xe0, 0x89, 0x63, 0x17, 0x10, 0xd6, 0x6c, 0xe2, + 0xb6, 0x03, 0xba, 0xbf, 0x27, 0x97, 0x6b, 0x5e, 0xfc, 0x1a, 0x89, 0xf1, 0xd2, 0xc6, 0xf0, 0x29, + 0x77, 0x92, 0x6f, 0xc4, 0x54, 0x79, 0x6f, 0x0d, 0xa8, 0x68, 0xc8, 0x75, 0x09, 0x02, 0xb2, 0x65, + 0x31, 0x2f, 0xaa, 0xc7, 0xed, 0xe5, 0x91, 0xb6, 0x1a, 0x24, 0xf9, 0x52, 0x9c, 0x33, 0x02, 0xf1, + 0xdd, 0x81, 0x3e, 0x49, 0xf4, 0x24, 0xa7, 0xf7, 0xe0, 0x07, 0x31, 0xd9, 0x9b, 0xb0, 0xb8, 0x41, + 0x5b, 0x8e, 0xe6, 0x45, 0x35, 0x59, 0x3c, 0xab, 0x77, 0xc6, 0xeb, 0x9b, 0x41, 0xff, 0xda, 0xcb, + 0xed, 0x93, 0x70, 0x1c, 0xca, 0xd7, 0x62, 0xea, 0x11, 0xb9, 0x0b, 0xc8, 0x1b, 0x03, 0xd8, 0x29, + 0x00, 0x5a, 0xbb, 0x58, 0x3e, 0x48, 0xaf, 0xc9, 0x5e, 0xbb, 0xc9, 0xd2, 0x32, 0x2b, 0x72, 0x21, + 0xae, 0x2c, 0x81, 0xb2, 0xff, 0x94, 0x9c, 0xe6, 0x71, 0x92, 0xf8, 0x57, 0xcd, 0x17, 0xf1, 0x34, + 0xbd, 0xc2, 0x1e, 0xba, 0x0d, 0x72, 0x30, 0xe4, 0x42, 0x79, 0x36, 0x2f, 0xaa, 0xf1, 0xe2, 0xf9, + 0xc1, 0x68, 0xeb, 0x61, 0x37, 0xd7, 0xb7, 0x01, 0x6a, 0xcf, 0xfb, 0xba, 0xd6, 0xc3, 0x2e, 0x21, + 0x5b, 0x91, 0x52, 0x9d, 0x8a, 0x91, 0xcd, 0xed, 0x3a, 0x62, 0x28, 0x1f, 0xce, 0x47, 0xd5, 0x78, + 0xf1, 0xea, 0xd0, 0xe8, 0xe8, 0xe7, 0xd7, 0x2b, 0x44, 0x5e, 0xee, 0xd9, 0xcf, 0x2e, 0xf2, 0xb6, + 0x9d, 0xf8, 0x3f, 0x92, 0xb3, 0xa5, 0xb8, 0xfc, 0x0f, 0x26, 0x2f, 0xc4, 0xe8, 0x07, 0x6e, 0x87, + 0x35, 0xf5, 0x9f, 0x72, 0x2a, 0x4e, 0x37, 0xca, 0xae, 0x71, 0x58, 0x46, 0x0e, 0xde, 0x9f, 0xbc, + 0x2b, 0xae, 0xad, 0xb8, 0x32, 0x94, 0x1d, 0xf4, 0x47, 0x54, 0x1b, 0x17, 0x91, 0x9d, 0xb2, 0xd7, + 0x17, 0x47, 0x66, 0xd2, 0x74, 0xab, 0xe2, 0xfb, 0x47, 0x4d, 0xa4, 0x2d, 0xd6, 0x9a, 0xac, 0x72, + 0xba, 0x26, 0xd6, 0x4d, 0x3a, 0x2e, 0x60, 0xbc, 0x43, 0x17, 0x8d, 0xb2, 0x21, 0x9d, 0x62, 0xb3, + 0xeb, 0xd2, 0xa4, 0x2b, 0x48, 0x50, 0xa7, 0xc1, 0xdf, 0x9e, 0xa5, 0xf8, 0xed, 0xef, 0x00, 0x00, + 0x00, 0xff, 0xff, 0x9b, 0x8c, 0xe4, 0x6a, 0xba, 0x02, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/grpc/credentials/alts/internal/proto/grpc_gcp/handshaker.pb.go b/vendor/google.golang.org/grpc/credentials/alts/internal/proto/grpc_gcp/handshaker.pb.go new file mode 100644 index 0000000..0c37ba2 --- /dev/null +++ b/vendor/google.golang.org/grpc/credentials/alts/internal/proto/grpc_gcp/handshaker.pb.go @@ -0,0 +1,1196 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: grpc/gcp/handshaker.proto + +package grpc_gcp // import "google.golang.org/grpc/credentials/alts/internal/proto/grpc_gcp" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" + +import ( + context "golang.org/x/net/context" + grpc "google.golang.org/grpc" +) + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +type HandshakeProtocol int32 + +const ( + // Default value. + HandshakeProtocol_HANDSHAKE_PROTOCOL_UNSPECIFIED HandshakeProtocol = 0 + // TLS handshake protocol. + HandshakeProtocol_TLS HandshakeProtocol = 1 + // Application Layer Transport Security handshake protocol. + HandshakeProtocol_ALTS HandshakeProtocol = 2 +) + +var HandshakeProtocol_name = map[int32]string{ + 0: "HANDSHAKE_PROTOCOL_UNSPECIFIED", + 1: "TLS", + 2: "ALTS", +} +var HandshakeProtocol_value = map[string]int32{ + "HANDSHAKE_PROTOCOL_UNSPECIFIED": 0, + "TLS": 1, + "ALTS": 2, +} + +func (x HandshakeProtocol) String() string { + return proto.EnumName(HandshakeProtocol_name, int32(x)) +} +func (HandshakeProtocol) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_handshaker_1dfe659b12ea825e, []int{0} +} + +type NetworkProtocol int32 + +const ( + NetworkProtocol_NETWORK_PROTOCOL_UNSPECIFIED NetworkProtocol = 0 + NetworkProtocol_TCP NetworkProtocol = 1 + NetworkProtocol_UDP NetworkProtocol = 2 +) + +var NetworkProtocol_name = map[int32]string{ + 0: "NETWORK_PROTOCOL_UNSPECIFIED", + 1: "TCP", + 2: "UDP", +} +var NetworkProtocol_value = map[string]int32{ + "NETWORK_PROTOCOL_UNSPECIFIED": 0, + "TCP": 1, + "UDP": 2, +} + +func (x NetworkProtocol) String() string { + return proto.EnumName(NetworkProtocol_name, int32(x)) +} +func (NetworkProtocol) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_handshaker_1dfe659b12ea825e, []int{1} +} + +type Endpoint struct { + // IP address. It should contain an IPv4 or IPv6 string literal, e.g. + // "192.168.0.1" or "2001:db8::1". + IpAddress string `protobuf:"bytes,1,opt,name=ip_address,json=ipAddress,proto3" json:"ip_address,omitempty"` + // Port number. + Port int32 `protobuf:"varint,2,opt,name=port,proto3" json:"port,omitempty"` + // Network protocol (e.g., TCP, UDP) associated with this endpoint. + Protocol NetworkProtocol `protobuf:"varint,3,opt,name=protocol,proto3,enum=grpc.gcp.NetworkProtocol" json:"protocol,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Endpoint) Reset() { *m = Endpoint{} } +func (m *Endpoint) String() string { return proto.CompactTextString(m) } +func (*Endpoint) ProtoMessage() {} +func (*Endpoint) Descriptor() ([]byte, []int) { + return fileDescriptor_handshaker_1dfe659b12ea825e, []int{0} +} +func (m *Endpoint) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Endpoint.Unmarshal(m, b) +} +func (m *Endpoint) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Endpoint.Marshal(b, m, deterministic) +} +func (dst *Endpoint) XXX_Merge(src proto.Message) { + xxx_messageInfo_Endpoint.Merge(dst, src) +} +func (m *Endpoint) XXX_Size() int { + return xxx_messageInfo_Endpoint.Size(m) +} +func (m *Endpoint) XXX_DiscardUnknown() { + xxx_messageInfo_Endpoint.DiscardUnknown(m) +} + +var xxx_messageInfo_Endpoint proto.InternalMessageInfo + +func (m *Endpoint) GetIpAddress() string { + if m != nil { + return m.IpAddress + } + return "" +} + +func (m *Endpoint) GetPort() int32 { + if m != nil { + return m.Port + } + return 0 +} + +func (m *Endpoint) GetProtocol() NetworkProtocol { + if m != nil { + return m.Protocol + } + return NetworkProtocol_NETWORK_PROTOCOL_UNSPECIFIED +} + +type Identity struct { + // Types that are valid to be assigned to IdentityOneof: + // *Identity_ServiceAccount + // *Identity_Hostname + IdentityOneof isIdentity_IdentityOneof `protobuf_oneof:"identity_oneof"` + // Additional attributes of the identity. + Attributes map[string]string `protobuf:"bytes,3,rep,name=attributes,proto3" json:"attributes,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Identity) Reset() { *m = Identity{} } +func (m *Identity) String() string { return proto.CompactTextString(m) } +func (*Identity) ProtoMessage() {} +func (*Identity) Descriptor() ([]byte, []int) { + return fileDescriptor_handshaker_1dfe659b12ea825e, []int{1} +} +func (m *Identity) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Identity.Unmarshal(m, b) +} +func (m *Identity) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Identity.Marshal(b, m, deterministic) +} +func (dst *Identity) XXX_Merge(src proto.Message) { + xxx_messageInfo_Identity.Merge(dst, src) +} +func (m *Identity) XXX_Size() int { + return xxx_messageInfo_Identity.Size(m) +} +func (m *Identity) XXX_DiscardUnknown() { + xxx_messageInfo_Identity.DiscardUnknown(m) +} + +var xxx_messageInfo_Identity proto.InternalMessageInfo + +type isIdentity_IdentityOneof interface { + isIdentity_IdentityOneof() +} + +type Identity_ServiceAccount struct { + ServiceAccount string `protobuf:"bytes,1,opt,name=service_account,json=serviceAccount,proto3,oneof"` +} + +type Identity_Hostname struct { + Hostname string `protobuf:"bytes,2,opt,name=hostname,proto3,oneof"` +} + +func (*Identity_ServiceAccount) isIdentity_IdentityOneof() {} + +func (*Identity_Hostname) isIdentity_IdentityOneof() {} + +func (m *Identity) GetIdentityOneof() isIdentity_IdentityOneof { + if m != nil { + return m.IdentityOneof + } + return nil +} + +func (m *Identity) GetServiceAccount() string { + if x, ok := m.GetIdentityOneof().(*Identity_ServiceAccount); ok { + return x.ServiceAccount + } + return "" +} + +func (m *Identity) GetHostname() string { + if x, ok := m.GetIdentityOneof().(*Identity_Hostname); ok { + return x.Hostname + } + return "" +} + +func (m *Identity) GetAttributes() map[string]string { + if m != nil { + return m.Attributes + } + return nil +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*Identity) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _Identity_OneofMarshaler, _Identity_OneofUnmarshaler, _Identity_OneofSizer, []interface{}{ + (*Identity_ServiceAccount)(nil), + (*Identity_Hostname)(nil), + } +} + +func _Identity_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*Identity) + // identity_oneof + switch x := m.IdentityOneof.(type) { + case *Identity_ServiceAccount: + b.EncodeVarint(1<<3 | proto.WireBytes) + b.EncodeStringBytes(x.ServiceAccount) + case *Identity_Hostname: + b.EncodeVarint(2<<3 | proto.WireBytes) + b.EncodeStringBytes(x.Hostname) + case nil: + default: + return fmt.Errorf("Identity.IdentityOneof has unexpected type %T", x) + } + return nil +} + +func _Identity_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*Identity) + switch tag { + case 1: // identity_oneof.service_account + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeStringBytes() + m.IdentityOneof = &Identity_ServiceAccount{x} + return true, err + case 2: // identity_oneof.hostname + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeStringBytes() + m.IdentityOneof = &Identity_Hostname{x} + return true, err + default: + return false, nil + } +} + +func _Identity_OneofSizer(msg proto.Message) (n int) { + m := msg.(*Identity) + // identity_oneof + switch x := m.IdentityOneof.(type) { + case *Identity_ServiceAccount: + n += 1 // tag and wire + n += proto.SizeVarint(uint64(len(x.ServiceAccount))) + n += len(x.ServiceAccount) + case *Identity_Hostname: + n += 1 // tag and wire + n += proto.SizeVarint(uint64(len(x.Hostname))) + n += len(x.Hostname) + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +type StartClientHandshakeReq struct { + // Handshake security protocol requested by the client. + HandshakeSecurityProtocol HandshakeProtocol `protobuf:"varint,1,opt,name=handshake_security_protocol,json=handshakeSecurityProtocol,proto3,enum=grpc.gcp.HandshakeProtocol" json:"handshake_security_protocol,omitempty"` + // The application protocols supported by the client, e.g., "h2" (for http2), + // "grpc". + ApplicationProtocols []string `protobuf:"bytes,2,rep,name=application_protocols,json=applicationProtocols,proto3" json:"application_protocols,omitempty"` + // The record protocols supported by the client, e.g., + // "ALTSRP_GCM_AES128". + RecordProtocols []string `protobuf:"bytes,3,rep,name=record_protocols,json=recordProtocols,proto3" json:"record_protocols,omitempty"` + // (Optional) Describes which server identities are acceptable by the client. + // If target identities are provided and none of them matches the peer + // identity of the server, handshake will fail. + TargetIdentities []*Identity `protobuf:"bytes,4,rep,name=target_identities,json=targetIdentities,proto3" json:"target_identities,omitempty"` + // (Optional) Application may specify a local identity. Otherwise, the + // handshaker chooses a default local identity. + LocalIdentity *Identity `protobuf:"bytes,5,opt,name=local_identity,json=localIdentity,proto3" json:"local_identity,omitempty"` + // (Optional) Local endpoint information of the connection to the server, + // such as local IP address, port number, and network protocol. + LocalEndpoint *Endpoint `protobuf:"bytes,6,opt,name=local_endpoint,json=localEndpoint,proto3" json:"local_endpoint,omitempty"` + // (Optional) Endpoint information of the remote server, such as IP address, + // port number, and network protocol. + RemoteEndpoint *Endpoint `protobuf:"bytes,7,opt,name=remote_endpoint,json=remoteEndpoint,proto3" json:"remote_endpoint,omitempty"` + // (Optional) If target name is provided, a secure naming check is performed + // to verify that the peer authenticated identity is indeed authorized to run + // the target name. + TargetName string `protobuf:"bytes,8,opt,name=target_name,json=targetName,proto3" json:"target_name,omitempty"` + // (Optional) RPC protocol versions supported by the client. + RpcVersions *RpcProtocolVersions `protobuf:"bytes,9,opt,name=rpc_versions,json=rpcVersions,proto3" json:"rpc_versions,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *StartClientHandshakeReq) Reset() { *m = StartClientHandshakeReq{} } +func (m *StartClientHandshakeReq) String() string { return proto.CompactTextString(m) } +func (*StartClientHandshakeReq) ProtoMessage() {} +func (*StartClientHandshakeReq) Descriptor() ([]byte, []int) { + return fileDescriptor_handshaker_1dfe659b12ea825e, []int{2} +} +func (m *StartClientHandshakeReq) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_StartClientHandshakeReq.Unmarshal(m, b) +} +func (m *StartClientHandshakeReq) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_StartClientHandshakeReq.Marshal(b, m, deterministic) +} +func (dst *StartClientHandshakeReq) XXX_Merge(src proto.Message) { + xxx_messageInfo_StartClientHandshakeReq.Merge(dst, src) +} +func (m *StartClientHandshakeReq) XXX_Size() int { + return xxx_messageInfo_StartClientHandshakeReq.Size(m) +} +func (m *StartClientHandshakeReq) XXX_DiscardUnknown() { + xxx_messageInfo_StartClientHandshakeReq.DiscardUnknown(m) +} + +var xxx_messageInfo_StartClientHandshakeReq proto.InternalMessageInfo + +func (m *StartClientHandshakeReq) GetHandshakeSecurityProtocol() HandshakeProtocol { + if m != nil { + return m.HandshakeSecurityProtocol + } + return HandshakeProtocol_HANDSHAKE_PROTOCOL_UNSPECIFIED +} + +func (m *StartClientHandshakeReq) GetApplicationProtocols() []string { + if m != nil { + return m.ApplicationProtocols + } + return nil +} + +func (m *StartClientHandshakeReq) GetRecordProtocols() []string { + if m != nil { + return m.RecordProtocols + } + return nil +} + +func (m *StartClientHandshakeReq) GetTargetIdentities() []*Identity { + if m != nil { + return m.TargetIdentities + } + return nil +} + +func (m *StartClientHandshakeReq) GetLocalIdentity() *Identity { + if m != nil { + return m.LocalIdentity + } + return nil +} + +func (m *StartClientHandshakeReq) GetLocalEndpoint() *Endpoint { + if m != nil { + return m.LocalEndpoint + } + return nil +} + +func (m *StartClientHandshakeReq) GetRemoteEndpoint() *Endpoint { + if m != nil { + return m.RemoteEndpoint + } + return nil +} + +func (m *StartClientHandshakeReq) GetTargetName() string { + if m != nil { + return m.TargetName + } + return "" +} + +func (m *StartClientHandshakeReq) GetRpcVersions() *RpcProtocolVersions { + if m != nil { + return m.RpcVersions + } + return nil +} + +type ServerHandshakeParameters struct { + // The record protocols supported by the server, e.g., + // "ALTSRP_GCM_AES128". + RecordProtocols []string `protobuf:"bytes,1,rep,name=record_protocols,json=recordProtocols,proto3" json:"record_protocols,omitempty"` + // (Optional) A list of local identities supported by the server, if + // specified. Otherwise, the handshaker chooses a default local identity. + LocalIdentities []*Identity `protobuf:"bytes,2,rep,name=local_identities,json=localIdentities,proto3" json:"local_identities,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ServerHandshakeParameters) Reset() { *m = ServerHandshakeParameters{} } +func (m *ServerHandshakeParameters) String() string { return proto.CompactTextString(m) } +func (*ServerHandshakeParameters) ProtoMessage() {} +func (*ServerHandshakeParameters) Descriptor() ([]byte, []int) { + return fileDescriptor_handshaker_1dfe659b12ea825e, []int{3} +} +func (m *ServerHandshakeParameters) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ServerHandshakeParameters.Unmarshal(m, b) +} +func (m *ServerHandshakeParameters) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ServerHandshakeParameters.Marshal(b, m, deterministic) +} +func (dst *ServerHandshakeParameters) XXX_Merge(src proto.Message) { + xxx_messageInfo_ServerHandshakeParameters.Merge(dst, src) +} +func (m *ServerHandshakeParameters) XXX_Size() int { + return xxx_messageInfo_ServerHandshakeParameters.Size(m) +} +func (m *ServerHandshakeParameters) XXX_DiscardUnknown() { + xxx_messageInfo_ServerHandshakeParameters.DiscardUnknown(m) +} + +var xxx_messageInfo_ServerHandshakeParameters proto.InternalMessageInfo + +func (m *ServerHandshakeParameters) GetRecordProtocols() []string { + if m != nil { + return m.RecordProtocols + } + return nil +} + +func (m *ServerHandshakeParameters) GetLocalIdentities() []*Identity { + if m != nil { + return m.LocalIdentities + } + return nil +} + +type StartServerHandshakeReq struct { + // The application protocols supported by the server, e.g., "h2" (for http2), + // "grpc". + ApplicationProtocols []string `protobuf:"bytes,1,rep,name=application_protocols,json=applicationProtocols,proto3" json:"application_protocols,omitempty"` + // Handshake parameters (record protocols and local identities supported by + // the server) mapped by the handshake protocol. Each handshake security + // protocol (e.g., TLS or ALTS) has its own set of record protocols and local + // identities. Since protobuf does not support enum as key to the map, the key + // to handshake_parameters is the integer value of HandshakeProtocol enum. + HandshakeParameters map[int32]*ServerHandshakeParameters `protobuf:"bytes,2,rep,name=handshake_parameters,json=handshakeParameters,proto3" json:"handshake_parameters,omitempty" protobuf_key:"varint,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` + // Bytes in out_frames returned from the peer's HandshakerResp. It is possible + // that the peer's out_frames are split into multiple HandshakReq messages. + InBytes []byte `protobuf:"bytes,3,opt,name=in_bytes,json=inBytes,proto3" json:"in_bytes,omitempty"` + // (Optional) Local endpoint information of the connection to the client, + // such as local IP address, port number, and network protocol. + LocalEndpoint *Endpoint `protobuf:"bytes,4,opt,name=local_endpoint,json=localEndpoint,proto3" json:"local_endpoint,omitempty"` + // (Optional) Endpoint information of the remote client, such as IP address, + // port number, and network protocol. + RemoteEndpoint *Endpoint `protobuf:"bytes,5,opt,name=remote_endpoint,json=remoteEndpoint,proto3" json:"remote_endpoint,omitempty"` + // (Optional) RPC protocol versions supported by the server. + RpcVersions *RpcProtocolVersions `protobuf:"bytes,6,opt,name=rpc_versions,json=rpcVersions,proto3" json:"rpc_versions,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *StartServerHandshakeReq) Reset() { *m = StartServerHandshakeReq{} } +func (m *StartServerHandshakeReq) String() string { return proto.CompactTextString(m) } +func (*StartServerHandshakeReq) ProtoMessage() {} +func (*StartServerHandshakeReq) Descriptor() ([]byte, []int) { + return fileDescriptor_handshaker_1dfe659b12ea825e, []int{4} +} +func (m *StartServerHandshakeReq) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_StartServerHandshakeReq.Unmarshal(m, b) +} +func (m *StartServerHandshakeReq) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_StartServerHandshakeReq.Marshal(b, m, deterministic) +} +func (dst *StartServerHandshakeReq) XXX_Merge(src proto.Message) { + xxx_messageInfo_StartServerHandshakeReq.Merge(dst, src) +} +func (m *StartServerHandshakeReq) XXX_Size() int { + return xxx_messageInfo_StartServerHandshakeReq.Size(m) +} +func (m *StartServerHandshakeReq) XXX_DiscardUnknown() { + xxx_messageInfo_StartServerHandshakeReq.DiscardUnknown(m) +} + +var xxx_messageInfo_StartServerHandshakeReq proto.InternalMessageInfo + +func (m *StartServerHandshakeReq) GetApplicationProtocols() []string { + if m != nil { + return m.ApplicationProtocols + } + return nil +} + +func (m *StartServerHandshakeReq) GetHandshakeParameters() map[int32]*ServerHandshakeParameters { + if m != nil { + return m.HandshakeParameters + } + return nil +} + +func (m *StartServerHandshakeReq) GetInBytes() []byte { + if m != nil { + return m.InBytes + } + return nil +} + +func (m *StartServerHandshakeReq) GetLocalEndpoint() *Endpoint { + if m != nil { + return m.LocalEndpoint + } + return nil +} + +func (m *StartServerHandshakeReq) GetRemoteEndpoint() *Endpoint { + if m != nil { + return m.RemoteEndpoint + } + return nil +} + +func (m *StartServerHandshakeReq) GetRpcVersions() *RpcProtocolVersions { + if m != nil { + return m.RpcVersions + } + return nil +} + +type NextHandshakeMessageReq struct { + // Bytes in out_frames returned from the peer's HandshakerResp. It is possible + // that the peer's out_frames are split into multiple NextHandshakerMessageReq + // messages. + InBytes []byte `protobuf:"bytes,1,opt,name=in_bytes,json=inBytes,proto3" json:"in_bytes,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *NextHandshakeMessageReq) Reset() { *m = NextHandshakeMessageReq{} } +func (m *NextHandshakeMessageReq) String() string { return proto.CompactTextString(m) } +func (*NextHandshakeMessageReq) ProtoMessage() {} +func (*NextHandshakeMessageReq) Descriptor() ([]byte, []int) { + return fileDescriptor_handshaker_1dfe659b12ea825e, []int{5} +} +func (m *NextHandshakeMessageReq) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_NextHandshakeMessageReq.Unmarshal(m, b) +} +func (m *NextHandshakeMessageReq) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_NextHandshakeMessageReq.Marshal(b, m, deterministic) +} +func (dst *NextHandshakeMessageReq) XXX_Merge(src proto.Message) { + xxx_messageInfo_NextHandshakeMessageReq.Merge(dst, src) +} +func (m *NextHandshakeMessageReq) XXX_Size() int { + return xxx_messageInfo_NextHandshakeMessageReq.Size(m) +} +func (m *NextHandshakeMessageReq) XXX_DiscardUnknown() { + xxx_messageInfo_NextHandshakeMessageReq.DiscardUnknown(m) +} + +var xxx_messageInfo_NextHandshakeMessageReq proto.InternalMessageInfo + +func (m *NextHandshakeMessageReq) GetInBytes() []byte { + if m != nil { + return m.InBytes + } + return nil +} + +type HandshakerReq struct { + // Types that are valid to be assigned to ReqOneof: + // *HandshakerReq_ClientStart + // *HandshakerReq_ServerStart + // *HandshakerReq_Next + ReqOneof isHandshakerReq_ReqOneof `protobuf_oneof:"req_oneof"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *HandshakerReq) Reset() { *m = HandshakerReq{} } +func (m *HandshakerReq) String() string { return proto.CompactTextString(m) } +func (*HandshakerReq) ProtoMessage() {} +func (*HandshakerReq) Descriptor() ([]byte, []int) { + return fileDescriptor_handshaker_1dfe659b12ea825e, []int{6} +} +func (m *HandshakerReq) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_HandshakerReq.Unmarshal(m, b) +} +func (m *HandshakerReq) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_HandshakerReq.Marshal(b, m, deterministic) +} +func (dst *HandshakerReq) XXX_Merge(src proto.Message) { + xxx_messageInfo_HandshakerReq.Merge(dst, src) +} +func (m *HandshakerReq) XXX_Size() int { + return xxx_messageInfo_HandshakerReq.Size(m) +} +func (m *HandshakerReq) XXX_DiscardUnknown() { + xxx_messageInfo_HandshakerReq.DiscardUnknown(m) +} + +var xxx_messageInfo_HandshakerReq proto.InternalMessageInfo + +type isHandshakerReq_ReqOneof interface { + isHandshakerReq_ReqOneof() +} + +type HandshakerReq_ClientStart struct { + ClientStart *StartClientHandshakeReq `protobuf:"bytes,1,opt,name=client_start,json=clientStart,proto3,oneof"` +} + +type HandshakerReq_ServerStart struct { + ServerStart *StartServerHandshakeReq `protobuf:"bytes,2,opt,name=server_start,json=serverStart,proto3,oneof"` +} + +type HandshakerReq_Next struct { + Next *NextHandshakeMessageReq `protobuf:"bytes,3,opt,name=next,proto3,oneof"` +} + +func (*HandshakerReq_ClientStart) isHandshakerReq_ReqOneof() {} + +func (*HandshakerReq_ServerStart) isHandshakerReq_ReqOneof() {} + +func (*HandshakerReq_Next) isHandshakerReq_ReqOneof() {} + +func (m *HandshakerReq) GetReqOneof() isHandshakerReq_ReqOneof { + if m != nil { + return m.ReqOneof + } + return nil +} + +func (m *HandshakerReq) GetClientStart() *StartClientHandshakeReq { + if x, ok := m.GetReqOneof().(*HandshakerReq_ClientStart); ok { + return x.ClientStart + } + return nil +} + +func (m *HandshakerReq) GetServerStart() *StartServerHandshakeReq { + if x, ok := m.GetReqOneof().(*HandshakerReq_ServerStart); ok { + return x.ServerStart + } + return nil +} + +func (m *HandshakerReq) GetNext() *NextHandshakeMessageReq { + if x, ok := m.GetReqOneof().(*HandshakerReq_Next); ok { + return x.Next + } + return nil +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*HandshakerReq) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _HandshakerReq_OneofMarshaler, _HandshakerReq_OneofUnmarshaler, _HandshakerReq_OneofSizer, []interface{}{ + (*HandshakerReq_ClientStart)(nil), + (*HandshakerReq_ServerStart)(nil), + (*HandshakerReq_Next)(nil), + } +} + +func _HandshakerReq_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*HandshakerReq) + // req_oneof + switch x := m.ReqOneof.(type) { + case *HandshakerReq_ClientStart: + b.EncodeVarint(1<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.ClientStart); err != nil { + return err + } + case *HandshakerReq_ServerStart: + b.EncodeVarint(2<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.ServerStart); err != nil { + return err + } + case *HandshakerReq_Next: + b.EncodeVarint(3<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.Next); err != nil { + return err + } + case nil: + default: + return fmt.Errorf("HandshakerReq.ReqOneof has unexpected type %T", x) + } + return nil +} + +func _HandshakerReq_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*HandshakerReq) + switch tag { + case 1: // req_oneof.client_start + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(StartClientHandshakeReq) + err := b.DecodeMessage(msg) + m.ReqOneof = &HandshakerReq_ClientStart{msg} + return true, err + case 2: // req_oneof.server_start + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(StartServerHandshakeReq) + err := b.DecodeMessage(msg) + m.ReqOneof = &HandshakerReq_ServerStart{msg} + return true, err + case 3: // req_oneof.next + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(NextHandshakeMessageReq) + err := b.DecodeMessage(msg) + m.ReqOneof = &HandshakerReq_Next{msg} + return true, err + default: + return false, nil + } +} + +func _HandshakerReq_OneofSizer(msg proto.Message) (n int) { + m := msg.(*HandshakerReq) + // req_oneof + switch x := m.ReqOneof.(type) { + case *HandshakerReq_ClientStart: + s := proto.Size(x.ClientStart) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *HandshakerReq_ServerStart: + s := proto.Size(x.ServerStart) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *HandshakerReq_Next: + s := proto.Size(x.Next) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +type HandshakerResult struct { + // The application protocol negotiated for this connection. + ApplicationProtocol string `protobuf:"bytes,1,opt,name=application_protocol,json=applicationProtocol,proto3" json:"application_protocol,omitempty"` + // The record protocol negotiated for this connection. + RecordProtocol string `protobuf:"bytes,2,opt,name=record_protocol,json=recordProtocol,proto3" json:"record_protocol,omitempty"` + // Cryptographic key data. The key data may be more than the key length + // required for the record protocol, thus the client of the handshaker + // service needs to truncate the key data into the right key length. + KeyData []byte `protobuf:"bytes,3,opt,name=key_data,json=keyData,proto3" json:"key_data,omitempty"` + // The authenticated identity of the peer. + PeerIdentity *Identity `protobuf:"bytes,4,opt,name=peer_identity,json=peerIdentity,proto3" json:"peer_identity,omitempty"` + // The local identity used in the handshake. + LocalIdentity *Identity `protobuf:"bytes,5,opt,name=local_identity,json=localIdentity,proto3" json:"local_identity,omitempty"` + // Indicate whether the handshaker service client should keep the channel + // between the handshaker service open, e.g., in order to handle + // post-handshake messages in the future. + KeepChannelOpen bool `protobuf:"varint,6,opt,name=keep_channel_open,json=keepChannelOpen,proto3" json:"keep_channel_open,omitempty"` + // The RPC protocol versions supported by the peer. + PeerRpcVersions *RpcProtocolVersions `protobuf:"bytes,7,opt,name=peer_rpc_versions,json=peerRpcVersions,proto3" json:"peer_rpc_versions,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *HandshakerResult) Reset() { *m = HandshakerResult{} } +func (m *HandshakerResult) String() string { return proto.CompactTextString(m) } +func (*HandshakerResult) ProtoMessage() {} +func (*HandshakerResult) Descriptor() ([]byte, []int) { + return fileDescriptor_handshaker_1dfe659b12ea825e, []int{7} +} +func (m *HandshakerResult) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_HandshakerResult.Unmarshal(m, b) +} +func (m *HandshakerResult) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_HandshakerResult.Marshal(b, m, deterministic) +} +func (dst *HandshakerResult) XXX_Merge(src proto.Message) { + xxx_messageInfo_HandshakerResult.Merge(dst, src) +} +func (m *HandshakerResult) XXX_Size() int { + return xxx_messageInfo_HandshakerResult.Size(m) +} +func (m *HandshakerResult) XXX_DiscardUnknown() { + xxx_messageInfo_HandshakerResult.DiscardUnknown(m) +} + +var xxx_messageInfo_HandshakerResult proto.InternalMessageInfo + +func (m *HandshakerResult) GetApplicationProtocol() string { + if m != nil { + return m.ApplicationProtocol + } + return "" +} + +func (m *HandshakerResult) GetRecordProtocol() string { + if m != nil { + return m.RecordProtocol + } + return "" +} + +func (m *HandshakerResult) GetKeyData() []byte { + if m != nil { + return m.KeyData + } + return nil +} + +func (m *HandshakerResult) GetPeerIdentity() *Identity { + if m != nil { + return m.PeerIdentity + } + return nil +} + +func (m *HandshakerResult) GetLocalIdentity() *Identity { + if m != nil { + return m.LocalIdentity + } + return nil +} + +func (m *HandshakerResult) GetKeepChannelOpen() bool { + if m != nil { + return m.KeepChannelOpen + } + return false +} + +func (m *HandshakerResult) GetPeerRpcVersions() *RpcProtocolVersions { + if m != nil { + return m.PeerRpcVersions + } + return nil +} + +type HandshakerStatus struct { + // The status code. This could be the gRPC status code. + Code uint32 `protobuf:"varint,1,opt,name=code,proto3" json:"code,omitempty"` + // The status details. + Details string `protobuf:"bytes,2,opt,name=details,proto3" json:"details,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *HandshakerStatus) Reset() { *m = HandshakerStatus{} } +func (m *HandshakerStatus) String() string { return proto.CompactTextString(m) } +func (*HandshakerStatus) ProtoMessage() {} +func (*HandshakerStatus) Descriptor() ([]byte, []int) { + return fileDescriptor_handshaker_1dfe659b12ea825e, []int{8} +} +func (m *HandshakerStatus) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_HandshakerStatus.Unmarshal(m, b) +} +func (m *HandshakerStatus) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_HandshakerStatus.Marshal(b, m, deterministic) +} +func (dst *HandshakerStatus) XXX_Merge(src proto.Message) { + xxx_messageInfo_HandshakerStatus.Merge(dst, src) +} +func (m *HandshakerStatus) XXX_Size() int { + return xxx_messageInfo_HandshakerStatus.Size(m) +} +func (m *HandshakerStatus) XXX_DiscardUnknown() { + xxx_messageInfo_HandshakerStatus.DiscardUnknown(m) +} + +var xxx_messageInfo_HandshakerStatus proto.InternalMessageInfo + +func (m *HandshakerStatus) GetCode() uint32 { + if m != nil { + return m.Code + } + return 0 +} + +func (m *HandshakerStatus) GetDetails() string { + if m != nil { + return m.Details + } + return "" +} + +type HandshakerResp struct { + // Frames to be given to the peer for the NextHandshakeMessageReq. May be + // empty if no out_frames have to be sent to the peer or if in_bytes in the + // HandshakerReq are incomplete. All the non-empty out frames must be sent to + // the peer even if the handshaker status is not OK as these frames may + // contain the alert frames. + OutFrames []byte `protobuf:"bytes,1,opt,name=out_frames,json=outFrames,proto3" json:"out_frames,omitempty"` + // Number of bytes in the in_bytes consumed by the handshaker. It is possible + // that part of in_bytes in HandshakerReq was unrelated to the handshake + // process. + BytesConsumed uint32 `protobuf:"varint,2,opt,name=bytes_consumed,json=bytesConsumed,proto3" json:"bytes_consumed,omitempty"` + // This is set iff the handshake was successful. out_frames may still be set + // to frames that needs to be forwarded to the peer. + Result *HandshakerResult `protobuf:"bytes,3,opt,name=result,proto3" json:"result,omitempty"` + // Status of the handshaker. + Status *HandshakerStatus `protobuf:"bytes,4,opt,name=status,proto3" json:"status,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *HandshakerResp) Reset() { *m = HandshakerResp{} } +func (m *HandshakerResp) String() string { return proto.CompactTextString(m) } +func (*HandshakerResp) ProtoMessage() {} +func (*HandshakerResp) Descriptor() ([]byte, []int) { + return fileDescriptor_handshaker_1dfe659b12ea825e, []int{9} +} +func (m *HandshakerResp) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_HandshakerResp.Unmarshal(m, b) +} +func (m *HandshakerResp) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_HandshakerResp.Marshal(b, m, deterministic) +} +func (dst *HandshakerResp) XXX_Merge(src proto.Message) { + xxx_messageInfo_HandshakerResp.Merge(dst, src) +} +func (m *HandshakerResp) XXX_Size() int { + return xxx_messageInfo_HandshakerResp.Size(m) +} +func (m *HandshakerResp) XXX_DiscardUnknown() { + xxx_messageInfo_HandshakerResp.DiscardUnknown(m) +} + +var xxx_messageInfo_HandshakerResp proto.InternalMessageInfo + +func (m *HandshakerResp) GetOutFrames() []byte { + if m != nil { + return m.OutFrames + } + return nil +} + +func (m *HandshakerResp) GetBytesConsumed() uint32 { + if m != nil { + return m.BytesConsumed + } + return 0 +} + +func (m *HandshakerResp) GetResult() *HandshakerResult { + if m != nil { + return m.Result + } + return nil +} + +func (m *HandshakerResp) GetStatus() *HandshakerStatus { + if m != nil { + return m.Status + } + return nil +} + +func init() { + proto.RegisterType((*Endpoint)(nil), "grpc.gcp.Endpoint") + proto.RegisterType((*Identity)(nil), "grpc.gcp.Identity") + proto.RegisterMapType((map[string]string)(nil), "grpc.gcp.Identity.AttributesEntry") + proto.RegisterType((*StartClientHandshakeReq)(nil), "grpc.gcp.StartClientHandshakeReq") + proto.RegisterType((*ServerHandshakeParameters)(nil), "grpc.gcp.ServerHandshakeParameters") + proto.RegisterType((*StartServerHandshakeReq)(nil), "grpc.gcp.StartServerHandshakeReq") + proto.RegisterMapType((map[int32]*ServerHandshakeParameters)(nil), "grpc.gcp.StartServerHandshakeReq.HandshakeParametersEntry") + proto.RegisterType((*NextHandshakeMessageReq)(nil), "grpc.gcp.NextHandshakeMessageReq") + proto.RegisterType((*HandshakerReq)(nil), "grpc.gcp.HandshakerReq") + proto.RegisterType((*HandshakerResult)(nil), "grpc.gcp.HandshakerResult") + proto.RegisterType((*HandshakerStatus)(nil), "grpc.gcp.HandshakerStatus") + proto.RegisterType((*HandshakerResp)(nil), "grpc.gcp.HandshakerResp") + proto.RegisterEnum("grpc.gcp.HandshakeProtocol", HandshakeProtocol_name, HandshakeProtocol_value) + proto.RegisterEnum("grpc.gcp.NetworkProtocol", NetworkProtocol_name, NetworkProtocol_value) +} + +// Reference imports to suppress errors if they are not otherwise used. +var _ context.Context +var _ grpc.ClientConn + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the grpc package it is being compiled against. +const _ = grpc.SupportPackageIsVersion4 + +// HandshakerServiceClient is the client API for HandshakerService service. +// +// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://godoc.org/google.golang.org/grpc#ClientConn.NewStream. +type HandshakerServiceClient interface { + // Handshaker service accepts a stream of handshaker request, returning a + // stream of handshaker response. Client is expected to send exactly one + // message with either client_start or server_start followed by one or more + // messages with next. Each time client sends a request, the handshaker + // service expects to respond. Client does not have to wait for service's + // response before sending next request. + DoHandshake(ctx context.Context, opts ...grpc.CallOption) (HandshakerService_DoHandshakeClient, error) +} + +type handshakerServiceClient struct { + cc *grpc.ClientConn +} + +func NewHandshakerServiceClient(cc *grpc.ClientConn) HandshakerServiceClient { + return &handshakerServiceClient{cc} +} + +func (c *handshakerServiceClient) DoHandshake(ctx context.Context, opts ...grpc.CallOption) (HandshakerService_DoHandshakeClient, error) { + stream, err := c.cc.NewStream(ctx, &_HandshakerService_serviceDesc.Streams[0], "/grpc.gcp.HandshakerService/DoHandshake", opts...) + if err != nil { + return nil, err + } + x := &handshakerServiceDoHandshakeClient{stream} + return x, nil +} + +type HandshakerService_DoHandshakeClient interface { + Send(*HandshakerReq) error + Recv() (*HandshakerResp, error) + grpc.ClientStream +} + +type handshakerServiceDoHandshakeClient struct { + grpc.ClientStream +} + +func (x *handshakerServiceDoHandshakeClient) Send(m *HandshakerReq) error { + return x.ClientStream.SendMsg(m) +} + +func (x *handshakerServiceDoHandshakeClient) Recv() (*HandshakerResp, error) { + m := new(HandshakerResp) + if err := x.ClientStream.RecvMsg(m); err != nil { + return nil, err + } + return m, nil +} + +// HandshakerServiceServer is the server API for HandshakerService service. +type HandshakerServiceServer interface { + // Handshaker service accepts a stream of handshaker request, returning a + // stream of handshaker response. Client is expected to send exactly one + // message with either client_start or server_start followed by one or more + // messages with next. Each time client sends a request, the handshaker + // service expects to respond. Client does not have to wait for service's + // response before sending next request. + DoHandshake(HandshakerService_DoHandshakeServer) error +} + +func RegisterHandshakerServiceServer(s *grpc.Server, srv HandshakerServiceServer) { + s.RegisterService(&_HandshakerService_serviceDesc, srv) +} + +func _HandshakerService_DoHandshake_Handler(srv interface{}, stream grpc.ServerStream) error { + return srv.(HandshakerServiceServer).DoHandshake(&handshakerServiceDoHandshakeServer{stream}) +} + +type HandshakerService_DoHandshakeServer interface { + Send(*HandshakerResp) error + Recv() (*HandshakerReq, error) + grpc.ServerStream +} + +type handshakerServiceDoHandshakeServer struct { + grpc.ServerStream +} + +func (x *handshakerServiceDoHandshakeServer) Send(m *HandshakerResp) error { + return x.ServerStream.SendMsg(m) +} + +func (x *handshakerServiceDoHandshakeServer) Recv() (*HandshakerReq, error) { + m := new(HandshakerReq) + if err := x.ServerStream.RecvMsg(m); err != nil { + return nil, err + } + return m, nil +} + +var _HandshakerService_serviceDesc = grpc.ServiceDesc{ + ServiceName: "grpc.gcp.HandshakerService", + HandlerType: (*HandshakerServiceServer)(nil), + Methods: []grpc.MethodDesc{}, + Streams: []grpc.StreamDesc{ + { + StreamName: "DoHandshake", + Handler: _HandshakerService_DoHandshake_Handler, + ServerStreams: true, + ClientStreams: true, + }, + }, + Metadata: "grpc/gcp/handshaker.proto", +} + +func init() { + proto.RegisterFile("grpc/gcp/handshaker.proto", fileDescriptor_handshaker_1dfe659b12ea825e) +} + +var fileDescriptor_handshaker_1dfe659b12ea825e = []byte{ + // 1168 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xa4, 0x56, 0xdf, 0x6e, 0x1a, 0xc7, + 0x17, 0xf6, 0x02, 0xb6, 0xf1, 0xc1, 0xfc, 0xf1, 0xc4, 0x51, 0xd6, 0x4e, 0xf2, 0xfb, 0x51, 0xaa, + 0xaa, 0x24, 0x17, 0xd0, 0x92, 0x56, 0x69, 0x52, 0x45, 0x09, 0x60, 0x2c, 0xdc, 0xa4, 0x18, 0x2d, + 0x4e, 0x2b, 0x35, 0x17, 0xab, 0xc9, 0x32, 0xc1, 0x2b, 0x96, 0x99, 0xf5, 0xcc, 0xe0, 0x86, 0x07, + 0xe8, 0xe3, 0xf4, 0x15, 0xfa, 0x36, 0x95, 0xfa, 0x00, 0xbd, 0x6f, 0xb5, 0xb3, 0xb3, 0x7f, 0xc0, + 0x10, 0x25, 0xea, 0xdd, 0xee, 0x99, 0xef, 0x3b, 0x7b, 0xe6, 0x3b, 0xdf, 0x9c, 0x1d, 0x38, 0x9a, + 0x70, 0xdf, 0x69, 0x4e, 0x1c, 0xbf, 0x79, 0x89, 0xe9, 0x58, 0x5c, 0xe2, 0x29, 0xe1, 0x0d, 0x9f, + 0x33, 0xc9, 0x50, 0x3e, 0x58, 0x6a, 0x4c, 0x1c, 0xff, 0xb8, 0x1e, 0x83, 0x24, 0xc7, 0x54, 0xf8, + 0x8c, 0x4b, 0x5b, 0x10, 0x67, 0xce, 0x5d, 0xb9, 0xb0, 0x1d, 0x36, 0x9b, 0x31, 0x1a, 0x72, 0x6a, + 0x12, 0xf2, 0x3d, 0x3a, 0xf6, 0x99, 0x4b, 0x25, 0xba, 0x0f, 0xe0, 0xfa, 0x36, 0x1e, 0x8f, 0x39, + 0x11, 0xc2, 0x34, 0xaa, 0x46, 0x7d, 0xcf, 0xda, 0x73, 0xfd, 0x76, 0x18, 0x40, 0x08, 0x72, 0x41, + 0x22, 0x33, 0x53, 0x35, 0xea, 0xdb, 0x96, 0x7a, 0x46, 0xdf, 0x42, 0x5e, 0xe5, 0x71, 0x98, 0x67, + 0x66, 0xab, 0x46, 0xbd, 0xd4, 0x3a, 0x6a, 0x44, 0x55, 0x34, 0x06, 0x44, 0xfe, 0xca, 0xf8, 0x74, + 0xa8, 0x01, 0x56, 0x0c, 0xad, 0xfd, 0x65, 0x40, 0xfe, 0x6c, 0x4c, 0xa8, 0x74, 0xe5, 0x02, 0x3d, + 0x80, 0xb2, 0x20, 0xfc, 0xda, 0x75, 0x88, 0x8d, 0x1d, 0x87, 0xcd, 0xa9, 0x0c, 0xbf, 0xdd, 0xdf, + 0xb2, 0x4a, 0x7a, 0xa1, 0x1d, 0xc6, 0xd1, 0x3d, 0xc8, 0x5f, 0x32, 0x21, 0x29, 0x9e, 0x11, 0x55, + 0x46, 0x80, 0x89, 0x23, 0xa8, 0x03, 0x80, 0xa5, 0xe4, 0xee, 0xdb, 0xb9, 0x24, 0xc2, 0xcc, 0x56, + 0xb3, 0xf5, 0x42, 0xab, 0x96, 0x94, 0x13, 0x7d, 0xb0, 0xd1, 0x8e, 0x41, 0x3d, 0x2a, 0xf9, 0xc2, + 0x4a, 0xb1, 0x8e, 0x9f, 0x41, 0x79, 0x65, 0x19, 0x55, 0x20, 0x3b, 0x25, 0x0b, 0xad, 0x47, 0xf0, + 0x88, 0x0e, 0x61, 0xfb, 0x1a, 0x7b, 0x73, 0x5d, 0x83, 0x15, 0xbe, 0x3c, 0xcd, 0x7c, 0x67, 0x74, + 0x2a, 0x50, 0x72, 0xf5, 0x67, 0x6c, 0x46, 0x09, 0x7b, 0x57, 0xfb, 0x3d, 0x07, 0x77, 0x46, 0x12, + 0x73, 0xd9, 0xf5, 0x5c, 0x42, 0x65, 0x3f, 0x6a, 0x9a, 0x45, 0xae, 0xd0, 0x1b, 0xb8, 0x1b, 0x37, + 0x31, 0xe9, 0x4f, 0x2c, 0xa8, 0xa1, 0x04, 0xbd, 0x9b, 0xec, 0x20, 0x26, 0xc7, 0x92, 0x1e, 0xc5, + 0xfc, 0x91, 0xa6, 0x47, 0x4b, 0xe8, 0x11, 0xdc, 0xc6, 0xbe, 0xef, 0xb9, 0x0e, 0x96, 0x2e, 0xa3, + 0x71, 0x56, 0x61, 0x66, 0xaa, 0xd9, 0xfa, 0x9e, 0x75, 0x98, 0x5a, 0x8c, 0x38, 0x02, 0x3d, 0x80, + 0x0a, 0x27, 0x0e, 0xe3, 0xe3, 0x14, 0x3e, 0xab, 0xf0, 0xe5, 0x30, 0x9e, 0x40, 0x9f, 0xc3, 0x81, + 0xc4, 0x7c, 0x42, 0xa4, 0xad, 0x77, 0xec, 0x12, 0x61, 0xe6, 0x94, 0xe8, 0xe8, 0xa6, 0xe8, 0x56, + 0x25, 0x04, 0x9f, 0xc5, 0x58, 0xf4, 0x04, 0x4a, 0x1e, 0x73, 0xb0, 0x17, 0xf1, 0x17, 0xe6, 0x76, + 0xd5, 0xd8, 0xc0, 0x2e, 0x2a, 0x64, 0x6c, 0x99, 0x98, 0x4a, 0xb4, 0x77, 0xcd, 0x9d, 0x55, 0x6a, + 0xe4, 0x6a, 0x4d, 0x8d, 0x4d, 0xfe, 0x3d, 0x94, 0x39, 0x99, 0x31, 0x49, 0x12, 0xee, 0xee, 0x46, + 0x6e, 0x29, 0x84, 0xc6, 0xe4, 0xff, 0x43, 0x41, 0xef, 0x59, 0x59, 0x30, 0xaf, 0xda, 0x0f, 0x61, + 0x68, 0x10, 0x58, 0xf0, 0x05, 0xec, 0x73, 0xdf, 0xb1, 0xaf, 0x09, 0x17, 0x2e, 0xa3, 0xc2, 0xdc, + 0x53, 0xa9, 0xef, 0x27, 0xa9, 0x2d, 0xdf, 0x89, 0x24, 0xfc, 0x49, 0x83, 0xac, 0x02, 0xf7, 0x9d, + 0xe8, 0xa5, 0xf6, 0x9b, 0x01, 0x47, 0x23, 0xc2, 0xaf, 0x09, 0x4f, 0xba, 0x8d, 0x39, 0x9e, 0x11, + 0x49, 0xf8, 0xfa, 0xfe, 0x18, 0xeb, 0xfb, 0xf3, 0x0c, 0x2a, 0x4b, 0xf2, 0x06, 0xed, 0xc9, 0x6c, + 0x6c, 0x4f, 0x39, 0x2d, 0xb0, 0x4b, 0x44, 0xed, 0x9f, 0xac, 0xf6, 0xed, 0x4a, 0x31, 0x81, 0x6f, + 0x37, 0x5a, 0xcb, 0xf8, 0x80, 0xb5, 0x66, 0x70, 0x98, 0x98, 0xdd, 0x8f, 0xb7, 0xa4, 0x6b, 0x7a, + 0x9a, 0xd4, 0xb4, 0xe1, 0xab, 0x8d, 0x35, 0x7a, 0x84, 0xe7, 0xf7, 0xd6, 0xe5, 0x1a, 0xa5, 0x8e, + 0x20, 0xef, 0x52, 0xfb, 0xed, 0x22, 0x1c, 0x05, 0x46, 0x7d, 0xdf, 0xda, 0x75, 0x69, 0x27, 0x78, + 0x5d, 0xe3, 0x9e, 0xdc, 0x7f, 0x70, 0xcf, 0xf6, 0x47, 0xbb, 0x67, 0xd5, 0x1c, 0x3b, 0x9f, 0x6a, + 0x8e, 0xe3, 0x29, 0x98, 0x9b, 0x54, 0x48, 0x8f, 0xa9, 0xed, 0x70, 0x4c, 0x3d, 0x49, 0x8f, 0xa9, + 0x42, 0xeb, 0xf3, 0x94, 0xc4, 0x9b, 0x0c, 0x96, 0x9a, 0x65, 0xb5, 0x6f, 0xe0, 0xce, 0x80, 0xbc, + 0x4f, 0x26, 0xd6, 0x8f, 0x44, 0x08, 0x3c, 0x51, 0x06, 0x48, 0x8b, 0x6b, 0x2c, 0x89, 0x5b, 0xfb, + 0xd3, 0x80, 0x62, 0x4c, 0xe1, 0x01, 0xf8, 0x14, 0xf6, 0x1d, 0x35, 0xfb, 0x6c, 0x11, 0x74, 0x56, + 0x11, 0x0a, 0xad, 0xcf, 0x56, 0x1a, 0x7e, 0x73, 0x3c, 0xf6, 0xb7, 0xac, 0x42, 0x48, 0x54, 0x80, + 0x20, 0x8f, 0x50, 0x75, 0xeb, 0x3c, 0x99, 0xb5, 0x79, 0x6e, 0x1a, 0x27, 0xc8, 0x13, 0x12, 0xc3, + 0x3c, 0x8f, 0x21, 0x47, 0xc9, 0x7b, 0xa9, 0x5c, 0xb1, 0xc4, 0xdf, 0xb0, 0xdb, 0xfe, 0x96, 0xa5, + 0x08, 0x9d, 0x02, 0xec, 0x71, 0x72, 0xa5, 0xe7, 0xfa, 0xdf, 0x19, 0xa8, 0xa4, 0xf7, 0x29, 0xe6, + 0x9e, 0x44, 0x5f, 0xc3, 0xe1, 0xba, 0x83, 0xa1, 0xff, 0x1d, 0xb7, 0xd6, 0x9c, 0x0b, 0xf4, 0x25, + 0x94, 0x57, 0x4e, 0xb4, 0xfe, 0xab, 0x94, 0x96, 0x0f, 0x74, 0xa0, 0xf9, 0x94, 0x2c, 0xec, 0x31, + 0x96, 0x38, 0x32, 0xf4, 0x94, 0x2c, 0x4e, 0xb0, 0xc4, 0xe8, 0x31, 0x14, 0x7d, 0x42, 0x78, 0x32, + 0x48, 0x73, 0x1b, 0x07, 0xe9, 0x7e, 0x00, 0xbc, 0x39, 0x47, 0x3f, 0x7d, 0x04, 0x3f, 0x84, 0x83, + 0x29, 0x21, 0xbe, 0xed, 0x5c, 0x62, 0x4a, 0x89, 0x67, 0x33, 0x9f, 0x50, 0xe5, 0xe8, 0xbc, 0x55, + 0x0e, 0x16, 0xba, 0x61, 0xfc, 0xdc, 0x27, 0x14, 0x9d, 0xc1, 0x81, 0xaa, 0x6f, 0xc9, 0xfd, 0xbb, + 0x1f, 0xe3, 0xfe, 0x72, 0xc0, 0xb3, 0x52, 0xe3, 0xf1, 0x45, 0x5a, 0xf5, 0x91, 0xc4, 0x72, 0xae, + 0x2e, 0x26, 0x0e, 0x1b, 0x13, 0xa5, 0x72, 0xd1, 0x52, 0xcf, 0xc8, 0x84, 0xdd, 0x31, 0x91, 0xd8, + 0x55, 0xff, 0xbb, 0x40, 0xce, 0xe8, 0xb5, 0xf6, 0x87, 0x01, 0xa5, 0xa5, 0xc6, 0xf9, 0xc1, 0xc5, + 0x87, 0xcd, 0xa5, 0xfd, 0x2e, 0x38, 0x05, 0x91, 0xa1, 0xf7, 0xd8, 0x5c, 0x9e, 0xaa, 0x00, 0xfa, + 0x02, 0x4a, 0xca, 0xea, 0xb6, 0xc3, 0xa8, 0x98, 0xcf, 0xc8, 0x58, 0xa5, 0x2c, 0x5a, 0x45, 0x15, + 0xed, 0xea, 0x20, 0x6a, 0xc1, 0x0e, 0x57, 0x36, 0xd0, 0xce, 0x3a, 0x5e, 0xf3, 0xe3, 0xd6, 0x46, + 0xb1, 0x34, 0x32, 0xe0, 0x08, 0xb5, 0x09, 0xdd, 0xb2, 0xb5, 0x9c, 0x70, 0x9b, 0x96, 0x46, 0x3e, + 0xfc, 0x01, 0x0e, 0x6e, 0x5c, 0x04, 0x50, 0x0d, 0xfe, 0xd7, 0x6f, 0x0f, 0x4e, 0x46, 0xfd, 0xf6, + 0xcb, 0x9e, 0x3d, 0xb4, 0xce, 0x2f, 0xce, 0xbb, 0xe7, 0xaf, 0xec, 0xd7, 0x83, 0xd1, 0xb0, 0xd7, + 0x3d, 0x3b, 0x3d, 0xeb, 0x9d, 0x54, 0xb6, 0xd0, 0x2e, 0x64, 0x2f, 0x5e, 0x8d, 0x2a, 0x06, 0xca, + 0x43, 0xae, 0xfd, 0xea, 0x62, 0x54, 0xc9, 0x3c, 0xec, 0x41, 0x79, 0xe5, 0x96, 0x86, 0xaa, 0x70, + 0x6f, 0xd0, 0xbb, 0xf8, 0xf9, 0xdc, 0x7a, 0xf9, 0xa1, 0x3c, 0xdd, 0x61, 0xc5, 0x08, 0x1e, 0x5e, + 0x9f, 0x0c, 0x2b, 0x99, 0xd6, 0x9b, 0x54, 0x49, 0x7c, 0x14, 0xde, 0xd9, 0xd0, 0x29, 0x14, 0x4e, + 0x58, 0x1c, 0x46, 0x77, 0xd6, 0xcb, 0x71, 0x75, 0x6c, 0x6e, 0xd0, 0xc9, 0xaf, 0x6d, 0xd5, 0x8d, + 0xaf, 0x8c, 0xce, 0x14, 0x6e, 0xbb, 0x2c, 0xc4, 0x60, 0x4f, 0x8a, 0x86, 0x4b, 0x25, 0xe1, 0x14, + 0x7b, 0x9d, 0x72, 0x02, 0x57, 0xd5, 0x0f, 0x8d, 0x5f, 0x9e, 0x4f, 0x18, 0x9b, 0x78, 0xa4, 0x31, + 0x61, 0x1e, 0xa6, 0x93, 0x06, 0xe3, 0x93, 0xa6, 0xba, 0x0a, 0x3b, 0x9c, 0x28, 0xe3, 0x62, 0x4f, + 0x34, 0x83, 0x24, 0xcd, 0x28, 0x49, 0x53, 0x9d, 0x3a, 0x05, 0xb2, 0x27, 0x8e, 0xff, 0x76, 0x47, + 0xbd, 0x3f, 0xfa, 0x37, 0x00, 0x00, 0xff, 0xff, 0x6e, 0x37, 0x34, 0x9b, 0x67, 0x0b, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/grpc/credentials/alts/internal/proto/grpc_gcp/transport_security_common.pb.go b/vendor/google.golang.org/grpc/credentials/alts/internal/proto/grpc_gcp/transport_security_common.pb.go new file mode 100644 index 0000000..27510d4 --- /dev/null +++ b/vendor/google.golang.org/grpc/credentials/alts/internal/proto/grpc_gcp/transport_security_common.pb.go @@ -0,0 +1,178 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: grpc/gcp/transport_security_common.proto + +package grpc_gcp // import "google.golang.org/grpc/credentials/alts/internal/proto/grpc_gcp" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// The security level of the created channel. The list is sorted in increasing +// level of security. This order must always be maintained. +type SecurityLevel int32 + +const ( + SecurityLevel_SECURITY_NONE SecurityLevel = 0 + SecurityLevel_INTEGRITY_ONLY SecurityLevel = 1 + SecurityLevel_INTEGRITY_AND_PRIVACY SecurityLevel = 2 +) + +var SecurityLevel_name = map[int32]string{ + 0: "SECURITY_NONE", + 1: "INTEGRITY_ONLY", + 2: "INTEGRITY_AND_PRIVACY", +} +var SecurityLevel_value = map[string]int32{ + "SECURITY_NONE": 0, + "INTEGRITY_ONLY": 1, + "INTEGRITY_AND_PRIVACY": 2, +} + +func (x SecurityLevel) String() string { + return proto.EnumName(SecurityLevel_name, int32(x)) +} +func (SecurityLevel) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_transport_security_common_71945991f2c3b4a6, []int{0} +} + +// Max and min supported RPC protocol versions. +type RpcProtocolVersions struct { + // Maximum supported RPC version. + MaxRpcVersion *RpcProtocolVersions_Version `protobuf:"bytes,1,opt,name=max_rpc_version,json=maxRpcVersion,proto3" json:"max_rpc_version,omitempty"` + // Minimum supported RPC version. + MinRpcVersion *RpcProtocolVersions_Version `protobuf:"bytes,2,opt,name=min_rpc_version,json=minRpcVersion,proto3" json:"min_rpc_version,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *RpcProtocolVersions) Reset() { *m = RpcProtocolVersions{} } +func (m *RpcProtocolVersions) String() string { return proto.CompactTextString(m) } +func (*RpcProtocolVersions) ProtoMessage() {} +func (*RpcProtocolVersions) Descriptor() ([]byte, []int) { + return fileDescriptor_transport_security_common_71945991f2c3b4a6, []int{0} +} +func (m *RpcProtocolVersions) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_RpcProtocolVersions.Unmarshal(m, b) +} +func (m *RpcProtocolVersions) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_RpcProtocolVersions.Marshal(b, m, deterministic) +} +func (dst *RpcProtocolVersions) XXX_Merge(src proto.Message) { + xxx_messageInfo_RpcProtocolVersions.Merge(dst, src) +} +func (m *RpcProtocolVersions) XXX_Size() int { + return xxx_messageInfo_RpcProtocolVersions.Size(m) +} +func (m *RpcProtocolVersions) XXX_DiscardUnknown() { + xxx_messageInfo_RpcProtocolVersions.DiscardUnknown(m) +} + +var xxx_messageInfo_RpcProtocolVersions proto.InternalMessageInfo + +func (m *RpcProtocolVersions) GetMaxRpcVersion() *RpcProtocolVersions_Version { + if m != nil { + return m.MaxRpcVersion + } + return nil +} + +func (m *RpcProtocolVersions) GetMinRpcVersion() *RpcProtocolVersions_Version { + if m != nil { + return m.MinRpcVersion + } + return nil +} + +// RPC version contains a major version and a minor version. +type RpcProtocolVersions_Version struct { + Major uint32 `protobuf:"varint,1,opt,name=major,proto3" json:"major,omitempty"` + Minor uint32 `protobuf:"varint,2,opt,name=minor,proto3" json:"minor,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *RpcProtocolVersions_Version) Reset() { *m = RpcProtocolVersions_Version{} } +func (m *RpcProtocolVersions_Version) String() string { return proto.CompactTextString(m) } +func (*RpcProtocolVersions_Version) ProtoMessage() {} +func (*RpcProtocolVersions_Version) Descriptor() ([]byte, []int) { + return fileDescriptor_transport_security_common_71945991f2c3b4a6, []int{0, 0} +} +func (m *RpcProtocolVersions_Version) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_RpcProtocolVersions_Version.Unmarshal(m, b) +} +func (m *RpcProtocolVersions_Version) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_RpcProtocolVersions_Version.Marshal(b, m, deterministic) +} +func (dst *RpcProtocolVersions_Version) XXX_Merge(src proto.Message) { + xxx_messageInfo_RpcProtocolVersions_Version.Merge(dst, src) +} +func (m *RpcProtocolVersions_Version) XXX_Size() int { + return xxx_messageInfo_RpcProtocolVersions_Version.Size(m) +} +func (m *RpcProtocolVersions_Version) XXX_DiscardUnknown() { + xxx_messageInfo_RpcProtocolVersions_Version.DiscardUnknown(m) +} + +var xxx_messageInfo_RpcProtocolVersions_Version proto.InternalMessageInfo + +func (m *RpcProtocolVersions_Version) GetMajor() uint32 { + if m != nil { + return m.Major + } + return 0 +} + +func (m *RpcProtocolVersions_Version) GetMinor() uint32 { + if m != nil { + return m.Minor + } + return 0 +} + +func init() { + proto.RegisterType((*RpcProtocolVersions)(nil), "grpc.gcp.RpcProtocolVersions") + proto.RegisterType((*RpcProtocolVersions_Version)(nil), "grpc.gcp.RpcProtocolVersions.Version") + proto.RegisterEnum("grpc.gcp.SecurityLevel", SecurityLevel_name, SecurityLevel_value) +} + +func init() { + proto.RegisterFile("grpc/gcp/transport_security_common.proto", fileDescriptor_transport_security_common_71945991f2c3b4a6) +} + +var fileDescriptor_transport_security_common_71945991f2c3b4a6 = []byte{ + // 323 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x94, 0x91, 0x41, 0x4b, 0x3b, 0x31, + 0x10, 0xc5, 0xff, 0x5b, 0xf8, 0xab, 0x44, 0x56, 0xeb, 0x6a, 0x41, 0xc5, 0x83, 0x08, 0x42, 0xf1, + 0x90, 0x05, 0xc5, 0xb3, 0xb4, 0xb5, 0x48, 0xa1, 0x6e, 0xeb, 0xb6, 0x16, 0xea, 0x25, 0xc4, 0x18, + 0x42, 0x24, 0x9b, 0x09, 0xb3, 0xb1, 0xd4, 0xaf, 0xec, 0xa7, 0x90, 0x4d, 0xbb, 0x14, 0xc1, 0x8b, + 0xb7, 0xbc, 0xc7, 0xcc, 0x6f, 0x32, 0xf3, 0x48, 0x5b, 0xa1, 0x13, 0xa9, 0x12, 0x2e, 0xf5, 0xc8, + 0x6d, 0xe9, 0x00, 0x3d, 0x2b, 0xa5, 0xf8, 0x40, 0xed, 0x3f, 0x99, 0x80, 0xa2, 0x00, 0x4b, 0x1d, + 0x82, 0x87, 0x64, 0xa7, 0xaa, 0xa4, 0x4a, 0xb8, 0x8b, 0xaf, 0x88, 0x1c, 0xe6, 0x4e, 0x8c, 0x2b, + 0x5b, 0x80, 0x99, 0x49, 0x2c, 0x35, 0xd8, 0x32, 0x79, 0x24, 0xfb, 0x05, 0x5f, 0x32, 0x74, 0x82, + 0x2d, 0x56, 0xde, 0x71, 0x74, 0x1e, 0xb5, 0x77, 0xaf, 0x2f, 0x69, 0xdd, 0x4b, 0x7f, 0xe9, 0xa3, + 0xeb, 0x47, 0x1e, 0x17, 0x7c, 0x99, 0x3b, 0xb1, 0x96, 0x01, 0xa7, 0xed, 0x0f, 0x5c, 0xe3, 0x6f, + 0x38, 0x6d, 0x37, 0xb8, 0xd3, 0x5b, 0xb2, 0x5d, 0x93, 0x8f, 0xc8, 0xff, 0x82, 0xbf, 0x03, 0x86, + 0xef, 0xc5, 0xf9, 0x4a, 0x04, 0x57, 0x5b, 0xc0, 0x30, 0xa5, 0x72, 0x2b, 0x71, 0xf5, 0x44, 0xe2, + 0xc9, 0xfa, 0x1e, 0x43, 0xb9, 0x90, 0x26, 0x39, 0x20, 0xf1, 0xa4, 0xdf, 0x7b, 0xce, 0x07, 0xd3, + 0x39, 0xcb, 0x46, 0x59, 0xbf, 0xf9, 0x2f, 0x49, 0xc8, 0xde, 0x20, 0x9b, 0xf6, 0x1f, 0x82, 0x37, + 0xca, 0x86, 0xf3, 0x66, 0x94, 0x9c, 0x90, 0xd6, 0xc6, 0xeb, 0x64, 0xf7, 0x6c, 0x9c, 0x0f, 0x66, + 0x9d, 0xde, 0xbc, 0xd9, 0xe8, 0x2e, 0x49, 0x4b, 0xc3, 0x6a, 0x07, 0x6e, 0x7c, 0x49, 0xb5, 0xf5, + 0x12, 0x2d, 0x37, 0xdd, 0xb3, 0x69, 0x9d, 0x41, 0x3d, 0xb2, 0x17, 0x12, 0x08, 0x2b, 0x8e, 0xa3, + 0x97, 0x3b, 0x05, 0xa0, 0x8c, 0xa4, 0x0a, 0x0c, 0xb7, 0x8a, 0x02, 0xaa, 0x34, 0xc4, 0x27, 0x50, + 0xbe, 0x49, 0xeb, 0x35, 0x37, 0x65, 0x5a, 0x11, 0xd3, 0x9a, 0x98, 0x86, 0xe8, 0x42, 0x11, 0x53, + 0xc2, 0xbd, 0x6e, 0x05, 0x7d, 0xf3, 0x1d, 0x00, 0x00, 0xff, 0xff, 0x31, 0x14, 0xb4, 0x11, 0xf6, + 0x01, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/grpc/credentials/alts/internal/testutil/testutil.go b/vendor/google.golang.org/grpc/credentials/alts/internal/testutil/testutil.go new file mode 100644 index 0000000..e114719 --- /dev/null +++ b/vendor/google.golang.org/grpc/credentials/alts/internal/testutil/testutil.go @@ -0,0 +1,125 @@ +/* + * + * Copyright 2018 gRPC authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ + +// Package testutil include useful test utilities for the handshaker. +package testutil + +import ( + "bytes" + "encoding/binary" + "io" + "net" + "sync" + + "google.golang.org/grpc/credentials/alts/internal/conn" +) + +// Stats is used to collect statistics about concurrent handshake calls. +type Stats struct { + mu sync.Mutex + calls int + MaxConcurrentCalls int +} + +// Update updates the statistics by adding one call. +func (s *Stats) Update() func() { + s.mu.Lock() + s.calls++ + if s.calls > s.MaxConcurrentCalls { + s.MaxConcurrentCalls = s.calls + } + s.mu.Unlock() + + return func() { + s.mu.Lock() + s.calls-- + s.mu.Unlock() + } +} + +// Reset resets the statistics. +func (s *Stats) Reset() { + s.mu.Lock() + defer s.mu.Unlock() + s.calls = 0 + s.MaxConcurrentCalls = 0 +} + +// testConn mimics a net.Conn to the peer. +type testConn struct { + net.Conn + in *bytes.Buffer + out *bytes.Buffer +} + +// NewTestConn creates a new instance of testConn object. +func NewTestConn(in *bytes.Buffer, out *bytes.Buffer) net.Conn { + return &testConn{ + in: in, + out: out, + } +} + +// Read reads from the in buffer. +func (c *testConn) Read(b []byte) (n int, err error) { + return c.in.Read(b) +} + +// Write writes to the out buffer. +func (c *testConn) Write(b []byte) (n int, err error) { + return c.out.Write(b) +} + +// Close closes the testConn object. +func (c *testConn) Close() error { + return nil +} + +// unresponsiveTestConn mimics a net.Conn for an unresponsive peer. It is used +// for testing the PeerNotResponding case. +type unresponsiveTestConn struct { + net.Conn +} + +// NewUnresponsiveTestConn creates a new instance of unresponsiveTestConn object. +func NewUnresponsiveTestConn() net.Conn { + return &unresponsiveTestConn{} +} + +// Read reads from the in buffer. +func (c *unresponsiveTestConn) Read(b []byte) (n int, err error) { + return 0, io.EOF +} + +// Write writes to the out buffer. +func (c *unresponsiveTestConn) Write(b []byte) (n int, err error) { + return 0, nil +} + +// Close closes the TestConn object. +func (c *unresponsiveTestConn) Close() error { + return nil +} + +// MakeFrame creates a handshake frame. +func MakeFrame(pl string) []byte { + f := make([]byte, len(pl)+conn.MsgLenFieldSize) + binary.LittleEndian.PutUint32(f, uint32(len(pl))) + copy(f[conn.MsgLenFieldSize:], []byte(pl)) + return f +} diff --git a/vendor/google.golang.org/grpc/credentials/alts/utils.go b/vendor/google.golang.org/grpc/credentials/alts/utils.go new file mode 100644 index 0000000..4ed27c6 --- /dev/null +++ b/vendor/google.golang.org/grpc/credentials/alts/utils.go @@ -0,0 +1,141 @@ +/* + * + * Copyright 2018 gRPC authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ + +package alts + +import ( + "context" + "errors" + "fmt" + "io" + "io/ioutil" + "log" + "os" + "os/exec" + "regexp" + "runtime" + "strings" + + "google.golang.org/grpc/peer" +) + +const ( + linuxProductNameFile = "/sys/class/dmi/id/product_name" + windowsCheckCommand = "powershell.exe" + windowsCheckCommandArgs = "Get-WmiObject -Class Win32_BIOS" + powershellOutputFilter = "Manufacturer" + windowsManufacturerRegex = ":(.*)" +) + +type platformError string + +func (k platformError) Error() string { + return fmt.Sprintf("%s is not supported", string(k)) +} + +var ( + // The following two variables will be reassigned in tests. + runningOS = runtime.GOOS + manufacturerReader = func() (io.Reader, error) { + switch runningOS { + case "linux": + return os.Open(linuxProductNameFile) + case "windows": + cmd := exec.Command(windowsCheckCommand, windowsCheckCommandArgs) + out, err := cmd.Output() + if err != nil { + return nil, err + } + + for _, line := range strings.Split(strings.TrimSuffix(string(out), "\n"), "\n") { + if strings.HasPrefix(line, powershellOutputFilter) { + re := regexp.MustCompile(windowsManufacturerRegex) + name := re.FindString(line) + name = strings.TrimLeft(name, ":") + return strings.NewReader(name), nil + } + } + + return nil, errors.New("cannot determine the machine's manufacturer") + default: + return nil, platformError(runningOS) + } + } + vmOnGCP bool +) + +// isRunningOnGCP checks whether the local system, without doing a network request is +// running on GCP. +func isRunningOnGCP() bool { + manufacturer, err := readManufacturer() + if err != nil { + log.Fatalf("failure to read manufacturer information: %v", err) + } + name := string(manufacturer) + switch runningOS { + case "linux": + name = strings.TrimSpace(name) + return name == "Google" || name == "Google Compute Engine" + case "windows": + name = strings.Replace(name, " ", "", -1) + name = strings.Replace(name, "\n", "", -1) + name = strings.Replace(name, "\r", "", -1) + return name == "Google" + default: + log.Fatal(platformError(runningOS)) + } + return false +} + +func readManufacturer() ([]byte, error) { + reader, err := manufacturerReader() + if err != nil { + return nil, err + } + if reader == nil { + return nil, errors.New("got nil reader") + } + manufacturer, err := ioutil.ReadAll(reader) + if err != nil { + return nil, fmt.Errorf("failed reading %v: %v", linuxProductNameFile, err) + } + return manufacturer, nil +} + +// AuthInfoFromContext extracts the alts.AuthInfo object from the given context, +// if it exists. This API should be used by gRPC server RPC handlers to get +// information about the communicating peer. For client-side, use grpc.Peer() +// CallOption. +func AuthInfoFromContext(ctx context.Context) (AuthInfo, error) { + p, ok := peer.FromContext(ctx) + if !ok { + return nil, errors.New("no Peer found in Context") + } + return AuthInfoFromPeer(p) +} + +// AuthInfoFromPeer extracts the alts.AuthInfo object from the given peer, if it +// exists. This API should be used by gRPC clients after obtaining a peer object +// using the grpc.Peer() CallOption. +func AuthInfoFromPeer(p *peer.Peer) (AuthInfo, error) { + altsAuthInfo, ok := p.AuthInfo.(AuthInfo) + if !ok { + return nil, errors.New("no alts.AuthInfo found in Peer") + } + return altsAuthInfo, nil +} diff --git a/vendor/google.golang.org/grpc/credentials/credentials.go b/vendor/google.golang.org/grpc/credentials/credentials.go new file mode 100644 index 0000000..88aff94 --- /dev/null +++ b/vendor/google.golang.org/grpc/credentials/credentials.go @@ -0,0 +1,338 @@ +/* + * + * Copyright 2014 gRPC authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ + +// Package credentials implements various credentials supported by gRPC library, +// which encapsulate all the state needed by a client to authenticate with a +// server and make various assertions, e.g., about the client's identity, role, +// or whether it is authorized to make a particular call. +package credentials // import "google.golang.org/grpc/credentials" + +import ( + "context" + "crypto/tls" + "crypto/x509" + "errors" + "fmt" + "io/ioutil" + "net" + "strings" + + "github.com/golang/protobuf/proto" + "google.golang.org/grpc/credentials/internal" +) + +// PerRPCCredentials defines the common interface for the credentials which need to +// attach security information to every RPC (e.g., oauth2). +type PerRPCCredentials interface { + // GetRequestMetadata gets the current request metadata, refreshing + // tokens if required. This should be called by the transport layer on + // each request, and the data should be populated in headers or other + // context. If a status code is returned, it will be used as the status + // for the RPC. uri is the URI of the entry point for the request. + // When supported by the underlying implementation, ctx can be used for + // timeout and cancellation. + // TODO(zhaoq): Define the set of the qualified keys instead of leaving + // it as an arbitrary string. + GetRequestMetadata(ctx context.Context, uri ...string) (map[string]string, error) + // RequireTransportSecurity indicates whether the credentials requires + // transport security. + RequireTransportSecurity() bool +} + +// ProtocolInfo provides information regarding the gRPC wire protocol version, +// security protocol, security protocol version in use, server name, etc. +type ProtocolInfo struct { + // ProtocolVersion is the gRPC wire protocol version. + ProtocolVersion string + // SecurityProtocol is the security protocol in use. + SecurityProtocol string + // SecurityVersion is the security protocol version. + SecurityVersion string + // ServerName is the user-configured server name. + ServerName string +} + +// AuthInfo defines the common interface for the auth information the users are interested in. +type AuthInfo interface { + AuthType() string +} + +// ErrConnDispatched indicates that rawConn has been dispatched out of gRPC +// and the caller should not close rawConn. +var ErrConnDispatched = errors.New("credentials: rawConn is dispatched out of gRPC") + +// TransportCredentials defines the common interface for all the live gRPC wire +// protocols and supported transport security protocols (e.g., TLS, SSL). +type TransportCredentials interface { + // ClientHandshake does the authentication handshake specified by the corresponding + // authentication protocol on rawConn for clients. It returns the authenticated + // connection and the corresponding auth information about the connection. + // Implementations must use the provided context to implement timely cancellation. + // gRPC will try to reconnect if the error returned is a temporary error + // (io.EOF, context.DeadlineExceeded or err.Temporary() == true). + // If the returned error is a wrapper error, implementations should make sure that + // the error implements Temporary() to have the correct retry behaviors. + // + // If the returned net.Conn is closed, it MUST close the net.Conn provided. + ClientHandshake(context.Context, string, net.Conn) (net.Conn, AuthInfo, error) + // ServerHandshake does the authentication handshake for servers. It returns + // the authenticated connection and the corresponding auth information about + // the connection. + // + // If the returned net.Conn is closed, it MUST close the net.Conn provided. + ServerHandshake(net.Conn) (net.Conn, AuthInfo, error) + // Info provides the ProtocolInfo of this TransportCredentials. + Info() ProtocolInfo + // Clone makes a copy of this TransportCredentials. + Clone() TransportCredentials + // OverrideServerName overrides the server name used to verify the hostname on the returned certificates from the server. + // gRPC internals also use it to override the virtual hosting name if it is set. + // It must be called before dialing. Currently, this is only used by grpclb. + OverrideServerName(string) error +} + +// Bundle is a combination of TransportCredentials and PerRPCCredentials. +// +// It also contains a mode switching method, so it can be used as a combination +// of different credential policies. +// +// Bundle cannot be used together with individual TransportCredentials. +// PerRPCCredentials from Bundle will be appended to other PerRPCCredentials. +// +// This API is experimental. +type Bundle interface { + TransportCredentials() TransportCredentials + PerRPCCredentials() PerRPCCredentials + // NewWithMode should make a copy of Bundle, and switch mode. Modifying the + // existing Bundle may cause races. + // + // NewWithMode returns nil if the requested mode is not supported. + NewWithMode(mode string) (Bundle, error) +} + +// TLSInfo contains the auth information for a TLS authenticated connection. +// It implements the AuthInfo interface. +type TLSInfo struct { + State tls.ConnectionState +} + +// AuthType returns the type of TLSInfo as a string. +func (t TLSInfo) AuthType() string { + return "tls" +} + +// GetSecurityValue returns security info requested by channelz. +func (t TLSInfo) GetSecurityValue() ChannelzSecurityValue { + v := &TLSChannelzSecurityValue{ + StandardName: cipherSuiteLookup[t.State.CipherSuite], + } + // Currently there's no way to get LocalCertificate info from tls package. + if len(t.State.PeerCertificates) > 0 { + v.RemoteCertificate = t.State.PeerCertificates[0].Raw + } + return v +} + +// tlsCreds is the credentials required for authenticating a connection using TLS. +type tlsCreds struct { + // TLS configuration + config *tls.Config +} + +func (c tlsCreds) Info() ProtocolInfo { + return ProtocolInfo{ + SecurityProtocol: "tls", + SecurityVersion: "1.2", + ServerName: c.config.ServerName, + } +} + +func (c *tlsCreds) ClientHandshake(ctx context.Context, authority string, rawConn net.Conn) (_ net.Conn, _ AuthInfo, err error) { + // use local cfg to avoid clobbering ServerName if using multiple endpoints + cfg := cloneTLSConfig(c.config) + if cfg.ServerName == "" { + colonPos := strings.LastIndex(authority, ":") + if colonPos == -1 { + colonPos = len(authority) + } + cfg.ServerName = authority[:colonPos] + } + conn := tls.Client(rawConn, cfg) + errChannel := make(chan error, 1) + go func() { + errChannel <- conn.Handshake() + }() + select { + case err := <-errChannel: + if err != nil { + return nil, nil, err + } + case <-ctx.Done(): + return nil, nil, ctx.Err() + } + return internal.WrapSyscallConn(rawConn, conn), TLSInfo{conn.ConnectionState()}, nil +} + +func (c *tlsCreds) ServerHandshake(rawConn net.Conn) (net.Conn, AuthInfo, error) { + conn := tls.Server(rawConn, c.config) + if err := conn.Handshake(); err != nil { + return nil, nil, err + } + return internal.WrapSyscallConn(rawConn, conn), TLSInfo{conn.ConnectionState()}, nil +} + +func (c *tlsCreds) Clone() TransportCredentials { + return NewTLS(c.config) +} + +func (c *tlsCreds) OverrideServerName(serverNameOverride string) error { + c.config.ServerName = serverNameOverride + return nil +} + +const alpnProtoStrH2 = "h2" + +func appendH2ToNextProtos(ps []string) []string { + for _, p := range ps { + if p == alpnProtoStrH2 { + return ps + } + } + ret := make([]string, 0, len(ps)+1) + ret = append(ret, ps...) + return append(ret, alpnProtoStrH2) +} + +// NewTLS uses c to construct a TransportCredentials based on TLS. +func NewTLS(c *tls.Config) TransportCredentials { + tc := &tlsCreds{cloneTLSConfig(c)} + tc.config.NextProtos = appendH2ToNextProtos(tc.config.NextProtos) + return tc +} + +// NewClientTLSFromCert constructs TLS credentials from the input certificate for client. +// serverNameOverride is for testing only. If set to a non empty string, +// it will override the virtual host name of authority (e.g. :authority header field) in requests. +func NewClientTLSFromCert(cp *x509.CertPool, serverNameOverride string) TransportCredentials { + return NewTLS(&tls.Config{ServerName: serverNameOverride, RootCAs: cp}) +} + +// NewClientTLSFromFile constructs TLS credentials from the input certificate file for client. +// serverNameOverride is for testing only. If set to a non empty string, +// it will override the virtual host name of authority (e.g. :authority header field) in requests. +func NewClientTLSFromFile(certFile, serverNameOverride string) (TransportCredentials, error) { + b, err := ioutil.ReadFile(certFile) + if err != nil { + return nil, err + } + cp := x509.NewCertPool() + if !cp.AppendCertsFromPEM(b) { + return nil, fmt.Errorf("credentials: failed to append certificates") + } + return NewTLS(&tls.Config{ServerName: serverNameOverride, RootCAs: cp}), nil +} + +// NewServerTLSFromCert constructs TLS credentials from the input certificate for server. +func NewServerTLSFromCert(cert *tls.Certificate) TransportCredentials { + return NewTLS(&tls.Config{Certificates: []tls.Certificate{*cert}}) +} + +// NewServerTLSFromFile constructs TLS credentials from the input certificate file and key +// file for server. +func NewServerTLSFromFile(certFile, keyFile string) (TransportCredentials, error) { + cert, err := tls.LoadX509KeyPair(certFile, keyFile) + if err != nil { + return nil, err + } + return NewTLS(&tls.Config{Certificates: []tls.Certificate{cert}}), nil +} + +// ChannelzSecurityInfo defines the interface that security protocols should implement +// in order to provide security info to channelz. +type ChannelzSecurityInfo interface { + GetSecurityValue() ChannelzSecurityValue +} + +// ChannelzSecurityValue defines the interface that GetSecurityValue() return value +// should satisfy. This interface should only be satisfied by *TLSChannelzSecurityValue +// and *OtherChannelzSecurityValue. +type ChannelzSecurityValue interface { + isChannelzSecurityValue() +} + +// TLSChannelzSecurityValue defines the struct that TLS protocol should return +// from GetSecurityValue(), containing security info like cipher and certificate used. +type TLSChannelzSecurityValue struct { + StandardName string + LocalCertificate []byte + RemoteCertificate []byte +} + +func (*TLSChannelzSecurityValue) isChannelzSecurityValue() {} + +// OtherChannelzSecurityValue defines the struct that non-TLS protocol should return +// from GetSecurityValue(), which contains protocol specific security info. Note +// the Value field will be sent to users of channelz requesting channel info, and +// thus sensitive info should better be avoided. +type OtherChannelzSecurityValue struct { + Name string + Value proto.Message +} + +func (*OtherChannelzSecurityValue) isChannelzSecurityValue() {} + +var cipherSuiteLookup = map[uint16]string{ + tls.TLS_RSA_WITH_RC4_128_SHA: "TLS_RSA_WITH_RC4_128_SHA", + tls.TLS_RSA_WITH_3DES_EDE_CBC_SHA: "TLS_RSA_WITH_3DES_EDE_CBC_SHA", + tls.TLS_RSA_WITH_AES_128_CBC_SHA: "TLS_RSA_WITH_AES_128_CBC_SHA", + tls.TLS_RSA_WITH_AES_256_CBC_SHA: "TLS_RSA_WITH_AES_256_CBC_SHA", + tls.TLS_RSA_WITH_AES_128_GCM_SHA256: "TLS_RSA_WITH_AES_128_GCM_SHA256", + tls.TLS_RSA_WITH_AES_256_GCM_SHA384: "TLS_RSA_WITH_AES_256_GCM_SHA384", + tls.TLS_ECDHE_ECDSA_WITH_RC4_128_SHA: "TLS_ECDHE_ECDSA_WITH_RC4_128_SHA", + tls.TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA: "TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA", + tls.TLS_ECDHE_ECDSA_WITH_AES_256_CBC_SHA: "TLS_ECDHE_ECDSA_WITH_AES_256_CBC_SHA", + tls.TLS_ECDHE_RSA_WITH_RC4_128_SHA: "TLS_ECDHE_RSA_WITH_RC4_128_SHA", + tls.TLS_ECDHE_RSA_WITH_3DES_EDE_CBC_SHA: "TLS_ECDHE_RSA_WITH_3DES_EDE_CBC_SHA", + tls.TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA: "TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA", + tls.TLS_ECDHE_RSA_WITH_AES_256_CBC_SHA: "TLS_ECDHE_RSA_WITH_AES_256_CBC_SHA", + tls.TLS_ECDHE_RSA_WITH_AES_128_GCM_SHA256: "TLS_ECDHE_RSA_WITH_AES_128_GCM_SHA256", + tls.TLS_ECDHE_ECDSA_WITH_AES_128_GCM_SHA256: "TLS_ECDHE_ECDSA_WITH_AES_128_GCM_SHA256", + tls.TLS_ECDHE_RSA_WITH_AES_256_GCM_SHA384: "TLS_ECDHE_RSA_WITH_AES_256_GCM_SHA384", + tls.TLS_ECDHE_ECDSA_WITH_AES_256_GCM_SHA384: "TLS_ECDHE_ECDSA_WITH_AES_256_GCM_SHA384", + tls.TLS_FALLBACK_SCSV: "TLS_FALLBACK_SCSV", + tls.TLS_RSA_WITH_AES_128_CBC_SHA256: "TLS_RSA_WITH_AES_128_CBC_SHA256", + tls.TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256: "TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256", + tls.TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256: "TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256", + tls.TLS_ECDHE_RSA_WITH_CHACHA20_POLY1305: "TLS_ECDHE_RSA_WITH_CHACHA20_POLY1305", + tls.TLS_ECDHE_ECDSA_WITH_CHACHA20_POLY1305: "TLS_ECDHE_ECDSA_WITH_CHACHA20_POLY1305", +} + +// cloneTLSConfig returns a shallow clone of the exported +// fields of cfg, ignoring the unexported sync.Once, which +// contains a mutex and must not be copied. +// +// If cfg is nil, a new zero tls.Config is returned. +// +// TODO: inline this function if possible. +func cloneTLSConfig(cfg *tls.Config) *tls.Config { + if cfg == nil { + return &tls.Config{} + } + + return cfg.Clone() +} diff --git a/vendor/google.golang.org/grpc/credentials/google/google.go b/vendor/google.golang.org/grpc/credentials/google/google.go new file mode 100644 index 0000000..04b349a --- /dev/null +++ b/vendor/google.golang.org/grpc/credentials/google/google.go @@ -0,0 +1,125 @@ +/* + * + * Copyright 2018 gRPC authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ + +// Package google defines credentials for google cloud services. +package google + +import ( + "context" + "fmt" + "time" + + "google.golang.org/grpc/credentials" + "google.golang.org/grpc/credentials/alts" + "google.golang.org/grpc/credentials/oauth" + "google.golang.org/grpc/grpclog" + "google.golang.org/grpc/internal" +) + +const tokenRequestTimeout = 30 * time.Second + +// NewDefaultCredentials returns a credentials bundle that is configured to work +// with google services. +// +// This API is experimental. +func NewDefaultCredentials() credentials.Bundle { + c := &creds{ + newPerRPCCreds: func() credentials.PerRPCCredentials { + ctx, cancel := context.WithTimeout(context.Background(), tokenRequestTimeout) + defer cancel() + perRPCCreds, err := oauth.NewApplicationDefault(ctx) + if err != nil { + grpclog.Warningf("google default creds: failed to create application oauth: %v", err) + } + return perRPCCreds + }, + } + bundle, err := c.NewWithMode(internal.CredsBundleModeFallback) + if err != nil { + grpclog.Warningf("google default creds: failed to create new creds: %v", err) + } + return bundle +} + +// NewComputeEngineCredentials returns a credentials bundle that is configured to work +// with google services. This API must only be used when running on GCE. Authentication configured +// by this API represents the GCE VM's default service account. +// +// This API is experimental. +func NewComputeEngineCredentials() credentials.Bundle { + c := &creds{ + newPerRPCCreds: func() credentials.PerRPCCredentials { + return oauth.NewComputeEngine() + }, + } + bundle, err := c.NewWithMode(internal.CredsBundleModeFallback) + if err != nil { + grpclog.Warningf("compute engine creds: failed to create new creds: %v", err) + } + return bundle +} + +// creds implements credentials.Bundle. +type creds struct { + // Supported modes are defined in internal/internal.go. + mode string + // The transport credentials associated with this bundle. + transportCreds credentials.TransportCredentials + // The per RPC credentials associated with this bundle. + perRPCCreds credentials.PerRPCCredentials + // Creates new per RPC credentials + newPerRPCCreds func() credentials.PerRPCCredentials +} + +func (c *creds) TransportCredentials() credentials.TransportCredentials { + return c.transportCreds +} + +func (c *creds) PerRPCCredentials() credentials.PerRPCCredentials { + if c == nil { + return nil + } + return c.perRPCCreds +} + +// NewWithMode should make a copy of Bundle, and switch mode. Modifying the +// existing Bundle may cause races. +func (c *creds) NewWithMode(mode string) (credentials.Bundle, error) { + newCreds := &creds{ + mode: mode, + newPerRPCCreds: c.newPerRPCCreds, + } + + // Create transport credentials. + switch mode { + case internal.CredsBundleModeFallback: + newCreds.transportCreds = credentials.NewTLS(nil) + case internal.CredsBundleModeBackendFromBalancer, internal.CredsBundleModeBalancer: + // Only the clients can use google default credentials, so we only need + // to create new ALTS client creds here. + newCreds.transportCreds = alts.NewClientCreds(alts.DefaultClientOptions()) + default: + return nil, fmt.Errorf("unsupported mode: %v", mode) + } + + if mode == internal.CredsBundleModeFallback || mode == internal.CredsBundleModeBackendFromBalancer { + newCreds.perRPCCreds = newCreds.newPerRPCCreds() + } + + return newCreds, nil +} diff --git a/vendor/google.golang.org/grpc/credentials/internal/syscallconn.go b/vendor/google.golang.org/grpc/credentials/internal/syscallconn.go new file mode 100644 index 0000000..2f4472b --- /dev/null +++ b/vendor/google.golang.org/grpc/credentials/internal/syscallconn.go @@ -0,0 +1,61 @@ +// +build !appengine + +/* + * + * Copyright 2018 gRPC authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ + +// Package internal contains credentials-internal code. +package internal + +import ( + "net" + "syscall" +) + +type sysConn = syscall.Conn + +// syscallConn keeps reference of rawConn to support syscall.Conn for channelz. +// SyscallConn() (the method in interface syscall.Conn) is explicitly +// implemented on this type, +// +// Interface syscall.Conn is implemented by most net.Conn implementations (e.g. +// TCPConn, UnixConn), but is not part of net.Conn interface. So wrapper conns +// that embed net.Conn don't implement syscall.Conn. (Side note: tls.Conn +// doesn't embed net.Conn, so even if syscall.Conn is part of net.Conn, it won't +// help here). +type syscallConn struct { + net.Conn + // sysConn is a type alias of syscall.Conn. It's necessary because the name + // `Conn` collides with `net.Conn`. + sysConn +} + +// WrapSyscallConn tries to wrap rawConn and newConn into a net.Conn that +// implements syscall.Conn. rawConn will be used to support syscall, and newConn +// will be used for read/write. +// +// This function returns newConn if rawConn doesn't implement syscall.Conn. +func WrapSyscallConn(rawConn, newConn net.Conn) net.Conn { + sysConn, ok := rawConn.(syscall.Conn) + if !ok { + return newConn + } + return &syscallConn{ + Conn: newConn, + sysConn: sysConn, + } +} diff --git a/vendor/google.golang.org/grpc/credentials/internal/syscallconn_appengine.go b/vendor/google.golang.org/grpc/credentials/internal/syscallconn_appengine.go new file mode 100644 index 0000000..d4346e9 --- /dev/null +++ b/vendor/google.golang.org/grpc/credentials/internal/syscallconn_appengine.go @@ -0,0 +1,30 @@ +// +build appengine + +/* + * + * Copyright 2018 gRPC authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ + +package internal + +import ( + "net" +) + +// WrapSyscallConn returns newConn on appengine. +func WrapSyscallConn(rawConn, newConn net.Conn) net.Conn { + return newConn +} diff --git a/vendor/google.golang.org/grpc/credentials/oauth/oauth.go b/vendor/google.golang.org/grpc/credentials/oauth/oauth.go new file mode 100644 index 0000000..e0e74d8 --- /dev/null +++ b/vendor/google.golang.org/grpc/credentials/oauth/oauth.go @@ -0,0 +1,173 @@ +/* + * + * Copyright 2015 gRPC authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ + +// Package oauth implements gRPC credentials using OAuth. +package oauth + +import ( + "context" + "fmt" + "io/ioutil" + "sync" + + "golang.org/x/oauth2" + "golang.org/x/oauth2/google" + "golang.org/x/oauth2/jwt" + "google.golang.org/grpc/credentials" +) + +// TokenSource supplies PerRPCCredentials from an oauth2.TokenSource. +type TokenSource struct { + oauth2.TokenSource +} + +// GetRequestMetadata gets the request metadata as a map from a TokenSource. +func (ts TokenSource) GetRequestMetadata(ctx context.Context, uri ...string) (map[string]string, error) { + token, err := ts.Token() + if err != nil { + return nil, err + } + return map[string]string{ + "authorization": token.Type() + " " + token.AccessToken, + }, nil +} + +// RequireTransportSecurity indicates whether the credentials requires transport security. +func (ts TokenSource) RequireTransportSecurity() bool { + return true +} + +type jwtAccess struct { + jsonKey []byte +} + +// NewJWTAccessFromFile creates PerRPCCredentials from the given keyFile. +func NewJWTAccessFromFile(keyFile string) (credentials.PerRPCCredentials, error) { + jsonKey, err := ioutil.ReadFile(keyFile) + if err != nil { + return nil, fmt.Errorf("credentials: failed to read the service account key file: %v", err) + } + return NewJWTAccessFromKey(jsonKey) +} + +// NewJWTAccessFromKey creates PerRPCCredentials from the given jsonKey. +func NewJWTAccessFromKey(jsonKey []byte) (credentials.PerRPCCredentials, error) { + return jwtAccess{jsonKey}, nil +} + +func (j jwtAccess) GetRequestMetadata(ctx context.Context, uri ...string) (map[string]string, error) { + ts, err := google.JWTAccessTokenSourceFromJSON(j.jsonKey, uri[0]) + if err != nil { + return nil, err + } + token, err := ts.Token() + if err != nil { + return nil, err + } + return map[string]string{ + "authorization": token.Type() + " " + token.AccessToken, + }, nil +} + +func (j jwtAccess) RequireTransportSecurity() bool { + return true +} + +// oauthAccess supplies PerRPCCredentials from a given token. +type oauthAccess struct { + token oauth2.Token +} + +// NewOauthAccess constructs the PerRPCCredentials using a given token. +func NewOauthAccess(token *oauth2.Token) credentials.PerRPCCredentials { + return oauthAccess{token: *token} +} + +func (oa oauthAccess) GetRequestMetadata(ctx context.Context, uri ...string) (map[string]string, error) { + return map[string]string{ + "authorization": oa.token.Type() + " " + oa.token.AccessToken, + }, nil +} + +func (oa oauthAccess) RequireTransportSecurity() bool { + return true +} + +// NewComputeEngine constructs the PerRPCCredentials that fetches access tokens from +// Google Compute Engine (GCE)'s metadata server. It is only valid to use this +// if your program is running on a GCE instance. +// TODO(dsymonds): Deprecate and remove this. +func NewComputeEngine() credentials.PerRPCCredentials { + return TokenSource{google.ComputeTokenSource("")} +} + +// serviceAccount represents PerRPCCredentials via JWT signing key. +type serviceAccount struct { + mu sync.Mutex + config *jwt.Config + t *oauth2.Token +} + +func (s *serviceAccount) GetRequestMetadata(ctx context.Context, uri ...string) (map[string]string, error) { + s.mu.Lock() + defer s.mu.Unlock() + if !s.t.Valid() { + var err error + s.t, err = s.config.TokenSource(ctx).Token() + if err != nil { + return nil, err + } + } + return map[string]string{ + "authorization": s.t.Type() + " " + s.t.AccessToken, + }, nil +} + +func (s *serviceAccount) RequireTransportSecurity() bool { + return true +} + +// NewServiceAccountFromKey constructs the PerRPCCredentials using the JSON key slice +// from a Google Developers service account. +func NewServiceAccountFromKey(jsonKey []byte, scope ...string) (credentials.PerRPCCredentials, error) { + config, err := google.JWTConfigFromJSON(jsonKey, scope...) + if err != nil { + return nil, err + } + return &serviceAccount{config: config}, nil +} + +// NewServiceAccountFromFile constructs the PerRPCCredentials using the JSON key file +// of a Google Developers service account. +func NewServiceAccountFromFile(keyFile string, scope ...string) (credentials.PerRPCCredentials, error) { + jsonKey, err := ioutil.ReadFile(keyFile) + if err != nil { + return nil, fmt.Errorf("credentials: failed to read the service account key file: %v", err) + } + return NewServiceAccountFromKey(jsonKey, scope...) +} + +// NewApplicationDefault returns "Application Default Credentials". For more +// detail, see https://developers.google.com/accounts/docs/application-default-credentials. +func NewApplicationDefault(ctx context.Context, scope ...string) (credentials.PerRPCCredentials, error) { + t, err := google.DefaultTokenSource(ctx, scope...) + if err != nil { + return nil, err + } + return TokenSource{t}, nil +} diff --git a/vendor/google.golang.org/grpc/credentials/tls13.go b/vendor/google.golang.org/grpc/credentials/tls13.go new file mode 100644 index 0000000..ccbf35b --- /dev/null +++ b/vendor/google.golang.org/grpc/credentials/tls13.go @@ -0,0 +1,30 @@ +// +build go1.12 + +/* + * + * Copyright 2019 gRPC authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ + +package credentials + +import "crypto/tls" + +// This init function adds cipher suite constants only defined in Go 1.12. +func init() { + cipherSuiteLookup[tls.TLS_AES_128_GCM_SHA256] = "TLS_AES_128_GCM_SHA256" + cipherSuiteLookup[tls.TLS_AES_256_GCM_SHA384] = "TLS_AES_256_GCM_SHA384" + cipherSuiteLookup[tls.TLS_CHACHA20_POLY1305_SHA256] = "TLS_CHACHA20_POLY1305_SHA256" +} diff --git a/vendor/google.golang.org/grpc/dialoptions.go b/vendor/google.golang.org/grpc/dialoptions.go new file mode 100644 index 0000000..e114fec --- /dev/null +++ b/vendor/google.golang.org/grpc/dialoptions.go @@ -0,0 +1,532 @@ +/* + * + * Copyright 2018 gRPC authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ + +package grpc + +import ( + "context" + "fmt" + "net" + "time" + + "google.golang.org/grpc/balancer" + "google.golang.org/grpc/credentials" + "google.golang.org/grpc/grpclog" + "google.golang.org/grpc/internal" + "google.golang.org/grpc/internal/backoff" + "google.golang.org/grpc/internal/envconfig" + "google.golang.org/grpc/internal/transport" + "google.golang.org/grpc/keepalive" + "google.golang.org/grpc/resolver" + "google.golang.org/grpc/stats" +) + +// dialOptions configure a Dial call. dialOptions are set by the DialOption +// values passed to Dial. +type dialOptions struct { + unaryInt UnaryClientInterceptor + streamInt StreamClientInterceptor + cp Compressor + dc Decompressor + bs backoff.Strategy + block bool + insecure bool + timeout time.Duration + scChan <-chan ServiceConfig + authority string + copts transport.ConnectOptions + callOptions []CallOption + // This is used by v1 balancer dial option WithBalancer to support v1 + // balancer, and also by WithBalancerName dial option. + balancerBuilder balancer.Builder + // This is to support grpclb. + resolverBuilder resolver.Builder + reqHandshake envconfig.RequireHandshakeSetting + channelzParentID int64 + disableServiceConfig bool + disableRetry bool + disableHealthCheck bool + healthCheckFunc internal.HealthChecker + minConnectTimeout func() time.Duration + defaultServiceConfig *ServiceConfig // defaultServiceConfig is parsed from defaultServiceConfigRawJSON. + defaultServiceConfigRawJSON *string +} + +// DialOption configures how we set up the connection. +type DialOption interface { + apply(*dialOptions) +} + +// EmptyDialOption does not alter the dial configuration. It can be embedded in +// another structure to build custom dial options. +// +// This API is EXPERIMENTAL. +type EmptyDialOption struct{} + +func (EmptyDialOption) apply(*dialOptions) {} + +// funcDialOption wraps a function that modifies dialOptions into an +// implementation of the DialOption interface. +type funcDialOption struct { + f func(*dialOptions) +} + +func (fdo *funcDialOption) apply(do *dialOptions) { + fdo.f(do) +} + +func newFuncDialOption(f func(*dialOptions)) *funcDialOption { + return &funcDialOption{ + f: f, + } +} + +// WithWaitForHandshake blocks until the initial settings frame is received from +// the server before assigning RPCs to the connection. +// +// Deprecated: this is the default behavior, and this option will be removed +// after the 1.18 release. +func WithWaitForHandshake() DialOption { + return newFuncDialOption(func(o *dialOptions) { + o.reqHandshake = envconfig.RequireHandshakeOn + }) +} + +// WithWriteBufferSize determines how much data can be batched before doing a +// write on the wire. The corresponding memory allocation for this buffer will +// be twice the size to keep syscalls low. The default value for this buffer is +// 32KB. +// +// Zero will disable the write buffer such that each write will be on underlying +// connection. Note: A Send call may not directly translate to a write. +func WithWriteBufferSize(s int) DialOption { + return newFuncDialOption(func(o *dialOptions) { + o.copts.WriteBufferSize = s + }) +} + +// WithReadBufferSize lets you set the size of read buffer, this determines how +// much data can be read at most for each read syscall. +// +// The default value for this buffer is 32KB. Zero will disable read buffer for +// a connection so data framer can access the underlying conn directly. +func WithReadBufferSize(s int) DialOption { + return newFuncDialOption(func(o *dialOptions) { + o.copts.ReadBufferSize = s + }) +} + +// WithInitialWindowSize returns a DialOption which sets the value for initial +// window size on a stream. The lower bound for window size is 64K and any value +// smaller than that will be ignored. +func WithInitialWindowSize(s int32) DialOption { + return newFuncDialOption(func(o *dialOptions) { + o.copts.InitialWindowSize = s + }) +} + +// WithInitialConnWindowSize returns a DialOption which sets the value for +// initial window size on a connection. The lower bound for window size is 64K +// and any value smaller than that will be ignored. +func WithInitialConnWindowSize(s int32) DialOption { + return newFuncDialOption(func(o *dialOptions) { + o.copts.InitialConnWindowSize = s + }) +} + +// WithMaxMsgSize returns a DialOption which sets the maximum message size the +// client can receive. +// +// Deprecated: use WithDefaultCallOptions(MaxCallRecvMsgSize(s)) instead. +func WithMaxMsgSize(s int) DialOption { + return WithDefaultCallOptions(MaxCallRecvMsgSize(s)) +} + +// WithDefaultCallOptions returns a DialOption which sets the default +// CallOptions for calls over the connection. +func WithDefaultCallOptions(cos ...CallOption) DialOption { + return newFuncDialOption(func(o *dialOptions) { + o.callOptions = append(o.callOptions, cos...) + }) +} + +// WithCodec returns a DialOption which sets a codec for message marshaling and +// unmarshaling. +// +// Deprecated: use WithDefaultCallOptions(ForceCodec(_)) instead. +func WithCodec(c Codec) DialOption { + return WithDefaultCallOptions(CallCustomCodec(c)) +} + +// WithCompressor returns a DialOption which sets a Compressor to use for +// message compression. It has lower priority than the compressor set by the +// UseCompressor CallOption. +// +// Deprecated: use UseCompressor instead. +func WithCompressor(cp Compressor) DialOption { + return newFuncDialOption(func(o *dialOptions) { + o.cp = cp + }) +} + +// WithDecompressor returns a DialOption which sets a Decompressor to use for +// incoming message decompression. If incoming response messages are encoded +// using the decompressor's Type(), it will be used. Otherwise, the message +// encoding will be used to look up the compressor registered via +// encoding.RegisterCompressor, which will then be used to decompress the +// message. If no compressor is registered for the encoding, an Unimplemented +// status error will be returned. +// +// Deprecated: use encoding.RegisterCompressor instead. +func WithDecompressor(dc Decompressor) DialOption { + return newFuncDialOption(func(o *dialOptions) { + o.dc = dc + }) +} + +// WithBalancer returns a DialOption which sets a load balancer with the v1 API. +// Name resolver will be ignored if this DialOption is specified. +// +// Deprecated: use the new balancer APIs in balancer package and +// WithBalancerName. +func WithBalancer(b Balancer) DialOption { + return newFuncDialOption(func(o *dialOptions) { + o.balancerBuilder = &balancerWrapperBuilder{ + b: b, + } + }) +} + +// WithBalancerName sets the balancer that the ClientConn will be initialized +// with. Balancer registered with balancerName will be used. This function +// panics if no balancer was registered by balancerName. +// +// The balancer cannot be overridden by balancer option specified by service +// config. +// +// This is an EXPERIMENTAL API. +func WithBalancerName(balancerName string) DialOption { + builder := balancer.Get(balancerName) + if builder == nil { + panic(fmt.Sprintf("grpc.WithBalancerName: no balancer is registered for name %v", balancerName)) + } + return newFuncDialOption(func(o *dialOptions) { + o.balancerBuilder = builder + }) +} + +// withResolverBuilder is only for grpclb. +func withResolverBuilder(b resolver.Builder) DialOption { + return newFuncDialOption(func(o *dialOptions) { + o.resolverBuilder = b + }) +} + +// WithServiceConfig returns a DialOption which has a channel to read the +// service configuration. +// +// Deprecated: service config should be received through name resolver, as +// specified here. +// https://github.com/grpc/grpc/blob/master/doc/service_config.md +func WithServiceConfig(c <-chan ServiceConfig) DialOption { + return newFuncDialOption(func(o *dialOptions) { + o.scChan = c + }) +} + +// WithBackoffMaxDelay configures the dialer to use the provided maximum delay +// when backing off after failed connection attempts. +func WithBackoffMaxDelay(md time.Duration) DialOption { + return WithBackoffConfig(BackoffConfig{MaxDelay: md}) +} + +// WithBackoffConfig configures the dialer to use the provided backoff +// parameters after connection failures. +// +// Use WithBackoffMaxDelay until more parameters on BackoffConfig are opened up +// for use. +func WithBackoffConfig(b BackoffConfig) DialOption { + return withBackoff(backoff.Exponential{ + MaxDelay: b.MaxDelay, + }) +} + +// withBackoff sets the backoff strategy used for connectRetryNum after a failed +// connection attempt. +// +// This can be exported if arbitrary backoff strategies are allowed by gRPC. +func withBackoff(bs backoff.Strategy) DialOption { + return newFuncDialOption(func(o *dialOptions) { + o.bs = bs + }) +} + +// WithBlock returns a DialOption which makes caller of Dial blocks until the +// underlying connection is up. Without this, Dial returns immediately and +// connecting the server happens in background. +func WithBlock() DialOption { + return newFuncDialOption(func(o *dialOptions) { + o.block = true + }) +} + +// WithInsecure returns a DialOption which disables transport security for this +// ClientConn. Note that transport security is required unless WithInsecure is +// set. +func WithInsecure() DialOption { + return newFuncDialOption(func(o *dialOptions) { + o.insecure = true + }) +} + +// WithTransportCredentials returns a DialOption which configures a connection +// level security credentials (e.g., TLS/SSL). This should not be used together +// with WithCredentialsBundle. +func WithTransportCredentials(creds credentials.TransportCredentials) DialOption { + return newFuncDialOption(func(o *dialOptions) { + o.copts.TransportCredentials = creds + }) +} + +// WithPerRPCCredentials returns a DialOption which sets credentials and places +// auth state on each outbound RPC. +func WithPerRPCCredentials(creds credentials.PerRPCCredentials) DialOption { + return newFuncDialOption(func(o *dialOptions) { + o.copts.PerRPCCredentials = append(o.copts.PerRPCCredentials, creds) + }) +} + +// WithCredentialsBundle returns a DialOption to set a credentials bundle for +// the ClientConn.WithCreds. This should not be used together with +// WithTransportCredentials. +// +// This API is experimental. +func WithCredentialsBundle(b credentials.Bundle) DialOption { + return newFuncDialOption(func(o *dialOptions) { + o.copts.CredsBundle = b + }) +} + +// WithTimeout returns a DialOption that configures a timeout for dialing a +// ClientConn initially. This is valid if and only if WithBlock() is present. +// +// Deprecated: use DialContext and context.WithTimeout instead. +func WithTimeout(d time.Duration) DialOption { + return newFuncDialOption(func(o *dialOptions) { + o.timeout = d + }) +} + +// WithContextDialer returns a DialOption that sets a dialer to create +// connections. If FailOnNonTempDialError() is set to true, and an error is +// returned by f, gRPC checks the error's Temporary() method to decide if it +// should try to reconnect to the network address. +func WithContextDialer(f func(context.Context, string) (net.Conn, error)) DialOption { + return newFuncDialOption(func(o *dialOptions) { + o.copts.Dialer = f + }) +} + +func init() { + internal.WithResolverBuilder = withResolverBuilder + internal.WithHealthCheckFunc = withHealthCheckFunc +} + +// WithDialer returns a DialOption that specifies a function to use for dialing +// network addresses. If FailOnNonTempDialError() is set to true, and an error +// is returned by f, gRPC checks the error's Temporary() method to decide if it +// should try to reconnect to the network address. +// +// Deprecated: use WithContextDialer instead +func WithDialer(f func(string, time.Duration) (net.Conn, error)) DialOption { + return WithContextDialer( + func(ctx context.Context, addr string) (net.Conn, error) { + if deadline, ok := ctx.Deadline(); ok { + return f(addr, time.Until(deadline)) + } + return f(addr, 0) + }) +} + +// WithStatsHandler returns a DialOption that specifies the stats handler for +// all the RPCs and underlying network connections in this ClientConn. +func WithStatsHandler(h stats.Handler) DialOption { + return newFuncDialOption(func(o *dialOptions) { + o.copts.StatsHandler = h + }) +} + +// FailOnNonTempDialError returns a DialOption that specifies if gRPC fails on +// non-temporary dial errors. If f is true, and dialer returns a non-temporary +// error, gRPC will fail the connection to the network address and won't try to +// reconnect. The default value of FailOnNonTempDialError is false. +// +// FailOnNonTempDialError only affects the initial dial, and does not do +// anything useful unless you are also using WithBlock(). +// +// This is an EXPERIMENTAL API. +func FailOnNonTempDialError(f bool) DialOption { + return newFuncDialOption(func(o *dialOptions) { + o.copts.FailOnNonTempDialError = f + }) +} + +// WithUserAgent returns a DialOption that specifies a user agent string for all +// the RPCs. +func WithUserAgent(s string) DialOption { + return newFuncDialOption(func(o *dialOptions) { + o.copts.UserAgent = s + }) +} + +// WithKeepaliveParams returns a DialOption that specifies keepalive parameters +// for the client transport. +func WithKeepaliveParams(kp keepalive.ClientParameters) DialOption { + if kp.Time < internal.KeepaliveMinPingTime { + grpclog.Warningf("Adjusting keepalive ping interval to minimum period of %v", internal.KeepaliveMinPingTime) + kp.Time = internal.KeepaliveMinPingTime + } + return newFuncDialOption(func(o *dialOptions) { + o.copts.KeepaliveParams = kp + }) +} + +// WithUnaryInterceptor returns a DialOption that specifies the interceptor for +// unary RPCs. +func WithUnaryInterceptor(f UnaryClientInterceptor) DialOption { + return newFuncDialOption(func(o *dialOptions) { + o.unaryInt = f + }) +} + +// WithStreamInterceptor returns a DialOption that specifies the interceptor for +// streaming RPCs. +func WithStreamInterceptor(f StreamClientInterceptor) DialOption { + return newFuncDialOption(func(o *dialOptions) { + o.streamInt = f + }) +} + +// WithAuthority returns a DialOption that specifies the value to be used as the +// :authority pseudo-header. This value only works with WithInsecure and has no +// effect if TransportCredentials are present. +func WithAuthority(a string) DialOption { + return newFuncDialOption(func(o *dialOptions) { + o.authority = a + }) +} + +// WithChannelzParentID returns a DialOption that specifies the channelz ID of +// current ClientConn's parent. This function is used in nested channel creation +// (e.g. grpclb dial). +func WithChannelzParentID(id int64) DialOption { + return newFuncDialOption(func(o *dialOptions) { + o.channelzParentID = id + }) +} + +// WithDisableServiceConfig returns a DialOption that causes grpc to ignore any +// service config provided by the resolver and provides a hint to the resolver +// to not fetch service configs. +// +// Note that, this dial option only disables service config from resolver. If +// default service config is provided, grpc will use the default service config. +func WithDisableServiceConfig() DialOption { + return newFuncDialOption(func(o *dialOptions) { + o.disableServiceConfig = true + }) +} + +// WithDefaultServiceConfig returns a DialOption that configures the default +// service config, which will be used in cases where: +// 1. WithDisableServiceConfig is called. +// 2. Resolver does not return service config or if the resolver gets and invalid config. +// +// This API is EXPERIMENTAL. +func WithDefaultServiceConfig(s string) DialOption { + return newFuncDialOption(func(o *dialOptions) { + o.defaultServiceConfigRawJSON = &s + }) +} + +// WithDisableRetry returns a DialOption that disables retries, even if the +// service config enables them. This does not impact transparent retries, which +// will happen automatically if no data is written to the wire or if the RPC is +// unprocessed by the remote server. +// +// Retry support is currently disabled by default, but will be enabled by +// default in the future. Until then, it may be enabled by setting the +// environment variable "GRPC_GO_RETRY" to "on". +// +// This API is EXPERIMENTAL. +func WithDisableRetry() DialOption { + return newFuncDialOption(func(o *dialOptions) { + o.disableRetry = true + }) +} + +// WithMaxHeaderListSize returns a DialOption that specifies the maximum +// (uncompressed) size of header list that the client is prepared to accept. +func WithMaxHeaderListSize(s uint32) DialOption { + return newFuncDialOption(func(o *dialOptions) { + o.copts.MaxHeaderListSize = &s + }) +} + +// WithDisableHealthCheck disables the LB channel health checking for all +// SubConns of this ClientConn. +// +// This API is EXPERIMENTAL. +func WithDisableHealthCheck() DialOption { + return newFuncDialOption(func(o *dialOptions) { + o.disableHealthCheck = true + }) +} + +// withHealthCheckFunc replaces the default health check function with the +// provided one. It makes tests easier to change the health check function. +// +// For testing purpose only. +func withHealthCheckFunc(f internal.HealthChecker) DialOption { + return newFuncDialOption(func(o *dialOptions) { + o.healthCheckFunc = f + }) +} + +func defaultDialOptions() dialOptions { + return dialOptions{ + disableRetry: !envconfig.Retry, + reqHandshake: envconfig.RequireHandshake, + healthCheckFunc: internal.HealthCheckFunc, + copts: transport.ConnectOptions{ + WriteBufferSize: defaultWriteBufSize, + ReadBufferSize: defaultReadBufSize, + }, + } +} + +// withGetMinConnectDeadline specifies the function that clientconn uses to +// get minConnectDeadline. This can be used to make connection attempts happen +// faster/slower. +// +// For testing purpose only. +func withMinConnectDeadline(f func() time.Duration) DialOption { + return newFuncDialOption(func(o *dialOptions) { + o.minConnectTimeout = f + }) +} diff --git a/vendor/google.golang.org/grpc/doc.go b/vendor/google.golang.org/grpc/doc.go new file mode 100644 index 0000000..187adbb --- /dev/null +++ b/vendor/google.golang.org/grpc/doc.go @@ -0,0 +1,24 @@ +/* + * + * Copyright 2015 gRPC authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ + +/* +Package grpc implements an RPC system called gRPC. + +See grpc.io for more information about gRPC. +*/ +package grpc // import "google.golang.org/grpc" diff --git a/vendor/google.golang.org/grpc/encoding/encoding.go b/vendor/google.golang.org/grpc/encoding/encoding.go new file mode 100644 index 0000000..30a75da --- /dev/null +++ b/vendor/google.golang.org/grpc/encoding/encoding.go @@ -0,0 +1,118 @@ +/* + * + * Copyright 2017 gRPC authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ + +// Package encoding defines the interface for the compressor and codec, and +// functions to register and retrieve compressors and codecs. +// +// This package is EXPERIMENTAL. +package encoding + +import ( + "io" + "strings" +) + +// Identity specifies the optional encoding for uncompressed streams. +// It is intended for grpc internal use only. +const Identity = "identity" + +// Compressor is used for compressing and decompressing when sending or +// receiving messages. +type Compressor interface { + // Compress writes the data written to wc to w after compressing it. If an + // error occurs while initializing the compressor, that error is returned + // instead. + Compress(w io.Writer) (io.WriteCloser, error) + // Decompress reads data from r, decompresses it, and provides the + // uncompressed data via the returned io.Reader. If an error occurs while + // initializing the decompressor, that error is returned instead. + Decompress(r io.Reader) (io.Reader, error) + // Name is the name of the compression codec and is used to set the content + // coding header. The result must be static; the result cannot change + // between calls. + Name() string +} + +var registeredCompressor = make(map[string]Compressor) + +// RegisterCompressor registers the compressor with gRPC by its name. It can +// be activated when sending an RPC via grpc.UseCompressor(). It will be +// automatically accessed when receiving a message based on the content coding +// header. Servers also use it to send a response with the same encoding as +// the request. +// +// NOTE: this function must only be called during initialization time (i.e. in +// an init() function), and is not thread-safe. If multiple Compressors are +// registered with the same name, the one registered last will take effect. +func RegisterCompressor(c Compressor) { + registeredCompressor[c.Name()] = c +} + +// GetCompressor returns Compressor for the given compressor name. +func GetCompressor(name string) Compressor { + return registeredCompressor[name] +} + +// Codec defines the interface gRPC uses to encode and decode messages. Note +// that implementations of this interface must be thread safe; a Codec's +// methods can be called from concurrent goroutines. +type Codec interface { + // Marshal returns the wire format of v. + Marshal(v interface{}) ([]byte, error) + // Unmarshal parses the wire format into v. + Unmarshal(data []byte, v interface{}) error + // Name returns the name of the Codec implementation. The returned string + // will be used as part of content type in transmission. The result must be + // static; the result cannot change between calls. + Name() string +} + +var registeredCodecs = make(map[string]Codec) + +// RegisterCodec registers the provided Codec for use with all gRPC clients and +// servers. +// +// The Codec will be stored and looked up by result of its Name() method, which +// should match the content-subtype of the encoding handled by the Codec. This +// is case-insensitive, and is stored and looked up as lowercase. If the +// result of calling Name() is an empty string, RegisterCodec will panic. See +// Content-Type on +// https://github.com/grpc/grpc/blob/master/doc/PROTOCOL-HTTP2.md#requests for +// more details. +// +// NOTE: this function must only be called during initialization time (i.e. in +// an init() function), and is not thread-safe. If multiple Compressors are +// registered with the same name, the one registered last will take effect. +func RegisterCodec(codec Codec) { + if codec == nil { + panic("cannot register a nil Codec") + } + if codec.Name() == "" { + panic("cannot register Codec with empty string result for Name()") + } + contentSubtype := strings.ToLower(codec.Name()) + registeredCodecs[contentSubtype] = codec +} + +// GetCodec gets a registered Codec by content-subtype, or nil if no Codec is +// registered for the content-subtype. +// +// The content-subtype is expected to be lowercase. +func GetCodec(contentSubtype string) Codec { + return registeredCodecs[contentSubtype] +} diff --git a/vendor/google.golang.org/grpc/encoding/gzip/gzip.go b/vendor/google.golang.org/grpc/encoding/gzip/gzip.go new file mode 100644 index 0000000..09564db --- /dev/null +++ b/vendor/google.golang.org/grpc/encoding/gzip/gzip.go @@ -0,0 +1,117 @@ +/* + * + * Copyright 2017 gRPC authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ + +// Package gzip implements and registers the gzip compressor +// during the initialization. +// This package is EXPERIMENTAL. +package gzip + +import ( + "compress/gzip" + "fmt" + "io" + "io/ioutil" + "sync" + + "google.golang.org/grpc/encoding" +) + +// Name is the name registered for the gzip compressor. +const Name = "gzip" + +func init() { + c := &compressor{} + c.poolCompressor.New = func() interface{} { + return &writer{Writer: gzip.NewWriter(ioutil.Discard), pool: &c.poolCompressor} + } + encoding.RegisterCompressor(c) +} + +type writer struct { + *gzip.Writer + pool *sync.Pool +} + +// SetLevel updates the registered gzip compressor to use the compression level specified (gzip.HuffmanOnly is not supported). +// NOTE: this function must only be called during initialization time (i.e. in an init() function), +// and is not thread-safe. +// +// The error returned will be nil if the specified level is valid. +func SetLevel(level int) error { + if level < gzip.DefaultCompression || level > gzip.BestCompression { + return fmt.Errorf("grpc: invalid gzip compression level: %d", level) + } + c := encoding.GetCompressor(Name).(*compressor) + c.poolCompressor.New = func() interface{} { + w, err := gzip.NewWriterLevel(ioutil.Discard, level) + if err != nil { + panic(err) + } + return &writer{Writer: w, pool: &c.poolCompressor} + } + return nil +} + +func (c *compressor) Compress(w io.Writer) (io.WriteCloser, error) { + z := c.poolCompressor.Get().(*writer) + z.Writer.Reset(w) + return z, nil +} + +func (z *writer) Close() error { + defer z.pool.Put(z) + return z.Writer.Close() +} + +type reader struct { + *gzip.Reader + pool *sync.Pool +} + +func (c *compressor) Decompress(r io.Reader) (io.Reader, error) { + z, inPool := c.poolDecompressor.Get().(*reader) + if !inPool { + newZ, err := gzip.NewReader(r) + if err != nil { + return nil, err + } + return &reader{Reader: newZ, pool: &c.poolDecompressor}, nil + } + if err := z.Reset(r); err != nil { + c.poolDecompressor.Put(z) + return nil, err + } + return z, nil +} + +func (z *reader) Read(p []byte) (n int, err error) { + n, err = z.Reader.Read(p) + if err == io.EOF { + z.pool.Put(z) + } + return n, err +} + +func (c *compressor) Name() string { + return Name +} + +type compressor struct { + poolCompressor sync.Pool + poolDecompressor sync.Pool +} diff --git a/vendor/google.golang.org/grpc/encoding/proto/proto.go b/vendor/google.golang.org/grpc/encoding/proto/proto.go new file mode 100644 index 0000000..66b97a6 --- /dev/null +++ b/vendor/google.golang.org/grpc/encoding/proto/proto.go @@ -0,0 +1,110 @@ +/* + * + * Copyright 2018 gRPC authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ + +// Package proto defines the protobuf codec. Importing this package will +// register the codec. +package proto + +import ( + "math" + "sync" + + "github.com/golang/protobuf/proto" + "google.golang.org/grpc/encoding" +) + +// Name is the name registered for the proto compressor. +const Name = "proto" + +func init() { + encoding.RegisterCodec(codec{}) +} + +// codec is a Codec implementation with protobuf. It is the default codec for gRPC. +type codec struct{} + +type cachedProtoBuffer struct { + lastMarshaledSize uint32 + proto.Buffer +} + +func capToMaxInt32(val int) uint32 { + if val > math.MaxInt32 { + return uint32(math.MaxInt32) + } + return uint32(val) +} + +func marshal(v interface{}, cb *cachedProtoBuffer) ([]byte, error) { + protoMsg := v.(proto.Message) + newSlice := make([]byte, 0, cb.lastMarshaledSize) + + cb.SetBuf(newSlice) + cb.Reset() + if err := cb.Marshal(protoMsg); err != nil { + return nil, err + } + out := cb.Bytes() + cb.lastMarshaledSize = capToMaxInt32(len(out)) + return out, nil +} + +func (codec) Marshal(v interface{}) ([]byte, error) { + if pm, ok := v.(proto.Marshaler); ok { + // object can marshal itself, no need for buffer + return pm.Marshal() + } + + cb := protoBufferPool.Get().(*cachedProtoBuffer) + out, err := marshal(v, cb) + + // put back buffer and lose the ref to the slice + cb.SetBuf(nil) + protoBufferPool.Put(cb) + return out, err +} + +func (codec) Unmarshal(data []byte, v interface{}) error { + protoMsg := v.(proto.Message) + protoMsg.Reset() + + if pu, ok := protoMsg.(proto.Unmarshaler); ok { + // object can unmarshal itself, no need for buffer + return pu.Unmarshal(data) + } + + cb := protoBufferPool.Get().(*cachedProtoBuffer) + cb.SetBuf(data) + err := cb.Unmarshal(protoMsg) + cb.SetBuf(nil) + protoBufferPool.Put(cb) + return err +} + +func (codec) Name() string { + return Name +} + +var protoBufferPool = &sync.Pool{ + New: func() interface{} { + return &cachedProtoBuffer{ + Buffer: proto.Buffer{}, + lastMarshaledSize: 16, + } + }, +} diff --git a/vendor/google.golang.org/grpc/examples/features/authentication/client/main.go b/vendor/google.golang.org/grpc/examples/features/authentication/client/main.go new file mode 100644 index 0000000..1097eaf --- /dev/null +++ b/vendor/google.golang.org/grpc/examples/features/authentication/client/main.go @@ -0,0 +1,86 @@ +/* + * + * Copyright 2018 gRPC authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ + +// The client demonstrates how to supply an OAuth2 token for every RPC. +package main + +import ( + "context" + "flag" + "fmt" + "log" + "time" + + "golang.org/x/oauth2" + "google.golang.org/grpc" + "google.golang.org/grpc/credentials" + "google.golang.org/grpc/credentials/oauth" + ecpb "google.golang.org/grpc/examples/features/proto/echo" + "google.golang.org/grpc/testdata" +) + +var addr = flag.String("addr", "localhost:50051", "the address to connect to") + +func callUnaryEcho(client ecpb.EchoClient, message string) { + ctx, cancel := context.WithTimeout(context.Background(), 10*time.Second) + defer cancel() + resp, err := client.UnaryEcho(ctx, &ecpb.EchoRequest{Message: message}) + if err != nil { + log.Fatalf("client.UnaryEcho(_) = _, %v: ", err) + } + fmt.Println("UnaryEcho: ", resp.Message) +} + +func main() { + flag.Parse() + + // Set up the credentials for the connection. + perRPC := oauth.NewOauthAccess(fetchToken()) + creds, err := credentials.NewClientTLSFromFile(testdata.Path("ca.pem"), "x.test.youtube.com") + if err != nil { + log.Fatalf("failed to load credentials: %v", err) + } + opts := []grpc.DialOption{ + // In addition to the following grpc.DialOption, callers may also use + // the grpc.CallOption grpc.PerRPCCredentials with the RPC invocation + // itself. + // See: https://godoc.org/google.golang.org/grpc#PerRPCCredentials + grpc.WithPerRPCCredentials(perRPC), + // oauth.NewOauthAccess requires the configuration of transport + // credentials. + grpc.WithTransportCredentials(creds), + } + + conn, err := grpc.Dial(*addr, opts...) + if err != nil { + log.Fatalf("did not connect: %v", err) + } + defer conn.Close() + rgc := ecpb.NewEchoClient(conn) + + callUnaryEcho(rgc, "hello world") +} + +// fetchToken simulates a token lookup and omits the details of proper token +// acquisition. For examples of how to acquire an OAuth2 token, see: +// https://godoc.org/golang.org/x/oauth2 +func fetchToken() *oauth2.Token { + return &oauth2.Token{ + AccessToken: "some-secret-token", + } +} diff --git a/vendor/google.golang.org/grpc/examples/features/authentication/server/main.go b/vendor/google.golang.org/grpc/examples/features/authentication/server/main.go new file mode 100644 index 0000000..3ea94cd --- /dev/null +++ b/vendor/google.golang.org/grpc/examples/features/authentication/server/main.go @@ -0,0 +1,118 @@ +/* + * + * Copyright 2018 gRPC authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ + +// The server demonstrates how to consume and validate OAuth2 tokens provided by +// clients for each RPC. +package main + +import ( + "context" + "crypto/tls" + "flag" + "fmt" + "log" + "net" + "strings" + + "google.golang.org/grpc" + "google.golang.org/grpc/codes" + "google.golang.org/grpc/credentials" + ecpb "google.golang.org/grpc/examples/features/proto/echo" + "google.golang.org/grpc/metadata" + "google.golang.org/grpc/status" + "google.golang.org/grpc/testdata" +) + +var ( + errMissingMetadata = status.Errorf(codes.InvalidArgument, "missing metadata") + errInvalidToken = status.Errorf(codes.Unauthenticated, "invalid token") +) + +var port = flag.Int("port", 50051, "the port to serve on") + +func main() { + flag.Parse() + fmt.Printf("server starting on port %d...\n", *port) + + cert, err := tls.LoadX509KeyPair(testdata.Path("server1.pem"), testdata.Path("server1.key")) + if err != nil { + log.Fatalf("failed to load key pair: %s", err) + } + opts := []grpc.ServerOption{ + // The following grpc.ServerOption adds an interceptor for all unary + // RPCs. To configure an interceptor for streaming RPCs, see: + // https://godoc.org/google.golang.org/grpc#StreamInterceptor + grpc.UnaryInterceptor(ensureValidToken), + // Enable TLS for all incoming connections. + grpc.Creds(credentials.NewServerTLSFromCert(&cert)), + } + s := grpc.NewServer(opts...) + ecpb.RegisterEchoServer(s, &ecServer{}) + lis, err := net.Listen("tcp", fmt.Sprintf(":%d", *port)) + if err != nil { + log.Fatalf("failed to listen: %v", err) + } + if err := s.Serve(lis); err != nil { + log.Fatalf("failed to serve: %v", err) + } +} + +type ecServer struct{} + +func (s *ecServer) UnaryEcho(ctx context.Context, req *ecpb.EchoRequest) (*ecpb.EchoResponse, error) { + return &ecpb.EchoResponse{Message: req.Message}, nil +} +func (s *ecServer) ServerStreamingEcho(*ecpb.EchoRequest, ecpb.Echo_ServerStreamingEchoServer) error { + return status.Errorf(codes.Unimplemented, "not implemented") +} +func (s *ecServer) ClientStreamingEcho(ecpb.Echo_ClientStreamingEchoServer) error { + return status.Errorf(codes.Unimplemented, "not implemented") +} +func (s *ecServer) BidirectionalStreamingEcho(ecpb.Echo_BidirectionalStreamingEchoServer) error { + return status.Errorf(codes.Unimplemented, "not implemented") +} + +// valid validates the authorization. +func valid(authorization []string) bool { + if len(authorization) < 1 { + return false + } + token := strings.TrimPrefix(authorization[0], "Bearer ") + // Perform the token validation here. For the sake of this example, the code + // here forgoes any of the usual OAuth2 token validation and instead checks + // for a token matching an arbitrary string. + return token == "some-secret-token" +} + +// ensureValidToken ensures a valid token exists within a request's metadata. If +// the token is missing or invalid, the interceptor blocks execution of the +// handler and returns an error. Otherwise, the interceptor invokes the unary +// handler. +func ensureValidToken(ctx context.Context, req interface{}, info *grpc.UnaryServerInfo, handler grpc.UnaryHandler) (interface{}, error) { + md, ok := metadata.FromIncomingContext(ctx) + if !ok { + return nil, errMissingMetadata + } + // The keys within metadata.MD are normalized to lowercase. + // See: https://godoc.org/google.golang.org/grpc/metadata#New + if !valid(md["authorization"]) { + return nil, errInvalidToken + } + // Continue execution of handler after ensuring a valid token. + return handler(ctx, req) +} diff --git a/vendor/google.golang.org/grpc/examples/features/cancellation/client/main.go b/vendor/google.golang.org/grpc/examples/features/cancellation/client/main.go new file mode 100644 index 0000000..58bd4b6 --- /dev/null +++ b/vendor/google.golang.org/grpc/examples/features/cancellation/client/main.go @@ -0,0 +1,94 @@ +/* + * + * Copyright 2018 gRPC authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ + +// Binary client is an example client. +package main + +import ( + "context" + "flag" + "fmt" + "log" + "time" + + "google.golang.org/grpc" + "google.golang.org/grpc/codes" + pb "google.golang.org/grpc/examples/features/proto/echo" + "google.golang.org/grpc/status" +) + +var addr = flag.String("addr", "localhost:50051", "the address to connect to") + +func sendMessage(stream pb.Echo_BidirectionalStreamingEchoClient, msg string) error { + fmt.Printf("sending message %q\n", msg) + return stream.Send(&pb.EchoRequest{Message: msg}) +} + +func recvMessage(stream pb.Echo_BidirectionalStreamingEchoClient, wantErrCode codes.Code) { + res, err := stream.Recv() + if status.Code(err) != wantErrCode { + log.Fatalf("stream.Recv() = %v, %v; want _, status.Code(err)=%v", res, err, wantErrCode) + } + if err != nil { + fmt.Printf("stream.Recv() returned expected error %v\n", err) + return + } + fmt.Printf("received message %q\n", res.GetMessage()) +} + +func main() { + flag.Parse() + + // Set up a connection to the server. + conn, err := grpc.Dial(*addr, grpc.WithInsecure()) + if err != nil { + log.Fatalf("did not connect: %v", err) + } + defer conn.Close() + + c := pb.NewEchoClient(conn) + + // Initiate the stream with a context that supports cancellation. + ctx, cancel := context.WithTimeout(context.Background(), 10*time.Second) + stream, err := c.BidirectionalStreamingEcho(ctx) + if err != nil { + log.Fatalf("error creating stream: %v", err) + } + + // Send some test messages. + if err := sendMessage(stream, "hello"); err != nil { + log.Fatalf("error sending on stream: %v", err) + } + if err := sendMessage(stream, "world"); err != nil { + log.Fatalf("error sending on stream: %v", err) + } + + // Ensure the RPC is working. + recvMessage(stream, codes.OK) + recvMessage(stream, codes.OK) + + fmt.Println("cancelling context") + cancel() + + // This Send may or may not return an error, depending on whether the + // monitored context detects cancellation before the call is made. + sendMessage(stream, "closed") + + // This Recv should never succeed. + recvMessage(stream, codes.Canceled) +} diff --git a/vendor/google.golang.org/grpc/examples/features/cancellation/server/main.go b/vendor/google.golang.org/grpc/examples/features/cancellation/server/main.go new file mode 100644 index 0000000..2d12bb9 --- /dev/null +++ b/vendor/google.golang.org/grpc/examples/features/cancellation/server/main.go @@ -0,0 +1,78 @@ +/* + * + * Copyright 2018 gRPC authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ + +// Binary server is an example server. +package main + +import ( + "context" + "flag" + "fmt" + "io" + "log" + "net" + + "google.golang.org/grpc" + "google.golang.org/grpc/codes" + pb "google.golang.org/grpc/examples/features/proto/echo" + "google.golang.org/grpc/status" +) + +var port = flag.Int("port", 50051, "the port to serve on") + +type server struct{} + +func (s *server) UnaryEcho(ctx context.Context, in *pb.EchoRequest) (*pb.EchoResponse, error) { + return nil, status.Error(codes.Unimplemented, "not implemented") +} + +func (s *server) ServerStreamingEcho(in *pb.EchoRequest, stream pb.Echo_ServerStreamingEchoServer) error { + return status.Error(codes.Unimplemented, "not implemented") +} + +func (s *server) ClientStreamingEcho(stream pb.Echo_ClientStreamingEchoServer) error { + return status.Error(codes.Unimplemented, "not implemented") +} + +func (s *server) BidirectionalStreamingEcho(stream pb.Echo_BidirectionalStreamingEchoServer) error { + for { + in, err := stream.Recv() + if err != nil { + fmt.Printf("server: error receiving from stream: %v\n", err) + if err == io.EOF { + return nil + } + return err + } + fmt.Printf("echoing message %q\n", in.Message) + stream.Send(&pb.EchoResponse{Message: in.Message}) + } +} + +func main() { + flag.Parse() + + lis, err := net.Listen("tcp", fmt.Sprintf(":%d", *port)) + if err != nil { + log.Fatalf("failed to listen: %v", err) + } + fmt.Printf("server listening at port %v\n", lis.Addr()) + s := grpc.NewServer() + pb.RegisterEchoServer(s, &server{}) + s.Serve(lis) +} diff --git a/vendor/google.golang.org/grpc/examples/features/compression/client/main.go b/vendor/google.golang.org/grpc/examples/features/compression/client/main.go new file mode 100644 index 0000000..4375c5d --- /dev/null +++ b/vendor/google.golang.org/grpc/examples/features/compression/client/main.go @@ -0,0 +1,60 @@ +/* + * + * Copyright 2018 gRPC authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ + +// Binary client is an example client. +package main + +import ( + "context" + "flag" + "fmt" + "log" + "time" + + "google.golang.org/grpc" + "google.golang.org/grpc/encoding/gzip" // Install the gzip compressor + pb "google.golang.org/grpc/examples/features/proto/echo" +) + +var addr = flag.String("addr", "localhost:50051", "the address to connect to") + +func main() { + flag.Parse() + + // Set up a connection to the server. + conn, err := grpc.Dial(*addr, grpc.WithInsecure()) + if err != nil { + log.Fatalf("did not connect: %v", err) + } + defer conn.Close() + + c := pb.NewEchoClient(conn) + + // Send the RPC compressed. If all RPCs on a client should be sent this + // way, use the DialOption: + // grpc.WithDefaultCallOptions(grpc.UseCompressor(gzip.Name)) + const msg = "compress" + ctx, cancel := context.WithTimeout(context.Background(), 10*time.Second) + defer cancel() + res, err := c.UnaryEcho(ctx, &pb.EchoRequest{Message: msg}, grpc.UseCompressor(gzip.Name)) + fmt.Printf("UnaryEcho call returned %q, %v\n", res.GetMessage(), err) + if err != nil || res.GetMessage() != msg { + log.Fatalf("Message=%q, err=%v; want Message=%q, err=", res.GetMessage(), err, msg) + } + +} diff --git a/vendor/google.golang.org/grpc/examples/features/compression/server/main.go b/vendor/google.golang.org/grpc/examples/features/compression/server/main.go new file mode 100644 index 0000000..f2862c0 --- /dev/null +++ b/vendor/google.golang.org/grpc/examples/features/compression/server/main.go @@ -0,0 +1,70 @@ +/* + * + * Copyright 2018 gRPC authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ + +// Binary server is an example server. +package main + +import ( + "context" + "flag" + "fmt" + "log" + "net" + + "google.golang.org/grpc" + "google.golang.org/grpc/codes" + pb "google.golang.org/grpc/examples/features/proto/echo" + "google.golang.org/grpc/status" + + _ "google.golang.org/grpc/encoding/gzip" // Install the gzip compressor +) + +var port = flag.Int("port", 50051, "the port to serve on") + +type server struct{} + +func (s *server) UnaryEcho(ctx context.Context, in *pb.EchoRequest) (*pb.EchoResponse, error) { + fmt.Printf("UnaryEcho called with message %q\n", in.GetMessage()) + return &pb.EchoResponse{Message: in.Message}, nil +} + +func (s *server) ServerStreamingEcho(in *pb.EchoRequest, stream pb.Echo_ServerStreamingEchoServer) error { + return status.Error(codes.Unimplemented, "not implemented") +} + +func (s *server) ClientStreamingEcho(stream pb.Echo_ClientStreamingEchoServer) error { + return status.Error(codes.Unimplemented, "not implemented") +} + +func (s *server) BidirectionalStreamingEcho(stream pb.Echo_BidirectionalStreamingEchoServer) error { + return status.Error(codes.Unimplemented, "not implemented") +} + +func main() { + flag.Parse() + + lis, err := net.Listen("tcp", fmt.Sprintf(":%d", *port)) + if err != nil { + log.Fatalf("failed to listen: %v", err) + } + fmt.Printf("server listening at %v\n", lis.Addr()) + + s := grpc.NewServer() + pb.RegisterEchoServer(s, &server{}) + s.Serve(lis) +} diff --git a/vendor/google.golang.org/grpc/examples/features/deadline/client/main.go b/vendor/google.golang.org/grpc/examples/features/deadline/client/main.go new file mode 100644 index 0000000..0e26261 --- /dev/null +++ b/vendor/google.golang.org/grpc/examples/features/deadline/client/main.go @@ -0,0 +1,95 @@ +/* + * + * Copyright 2018 gRPC authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ + +// Binary client is an example client. +package main + +import ( + "context" + "flag" + "fmt" + "log" + "time" + + "google.golang.org/grpc" + "google.golang.org/grpc/codes" + pb "google.golang.org/grpc/examples/features/proto/echo" + "google.golang.org/grpc/status" +) + +var addr = flag.String("addr", "localhost:50052", "the address to connect to") + +func unaryCall(c pb.EchoClient, requestID int, message string, want codes.Code) { + // Creates a context with a one second deadline for the RPC. + ctx, cancel := context.WithTimeout(context.Background(), time.Second) + defer cancel() + + req := &pb.EchoRequest{Message: message} + + _, err := c.UnaryEcho(ctx, req) + got := status.Code(err) + fmt.Printf("[%v] wanted = %v, got = %v\n", requestID, want, got) +} + +func streamingCall(c pb.EchoClient, requestID int, message string, want codes.Code) { + // Creates a context with a one second deadline for the RPC. + ctx, cancel := context.WithTimeout(context.Background(), time.Second) + defer cancel() + + stream, err := c.BidirectionalStreamingEcho(ctx) + if err != nil { + log.Printf("Stream err: %v", err) + return + } + + err = stream.Send(&pb.EchoRequest{Message: message}) + if err != nil { + log.Printf("Send error: %v", err) + return + } + + _, err = stream.Recv() + + got := status.Code(err) + fmt.Printf("[%v] wanted = %v, got = %v\n", requestID, want, got) +} + +func main() { + flag.Parse() + + conn, err := grpc.Dial(*addr, grpc.WithInsecure()) + if err != nil { + log.Fatalf("did not connect: %v", err) + } + defer conn.Close() + + c := pb.NewEchoClient(conn) + + // A successful request + unaryCall(c, 1, "world", codes.OK) + // Exceeds deadline + unaryCall(c, 2, "delay", codes.DeadlineExceeded) + // A successful request with propagated deadline + unaryCall(c, 3, "[propagate me]world", codes.OK) + // Exceeds propagated deadline + unaryCall(c, 4, "[propagate me][propagate me]world", codes.DeadlineExceeded) + // Receives a response from the stream successfully. + streamingCall(c, 5, "[propagate me]world", codes.OK) + // Exceeds propagated deadline before receiving a response + streamingCall(c, 6, "[propagate me][propagate me]world", codes.DeadlineExceeded) +} diff --git a/vendor/google.golang.org/grpc/examples/features/deadline/server/main.go b/vendor/google.golang.org/grpc/examples/features/deadline/server/main.go new file mode 100644 index 0000000..63044b5 --- /dev/null +++ b/vendor/google.golang.org/grpc/examples/features/deadline/server/main.go @@ -0,0 +1,128 @@ +/* + * + * Copyright 2018 gRPC authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ + +// Binary server is an example server. +package main + +import ( + "context" + "flag" + "fmt" + "io" + "log" + "net" + "strings" + "time" + + "google.golang.org/grpc" + "google.golang.org/grpc/codes" + pb "google.golang.org/grpc/examples/features/proto/echo" + "google.golang.org/grpc/status" +) + +var port = flag.Int("port", 50052, "port number") + +// server is used to implement EchoServer. +type server struct { + client pb.EchoClient + cc *grpc.ClientConn +} + +func (s *server) UnaryEcho(ctx context.Context, req *pb.EchoRequest) (*pb.EchoResponse, error) { + message := req.Message + if strings.HasPrefix(message, "[propagate me]") { + time.Sleep(800 * time.Millisecond) + message = strings.TrimPrefix(message, "[propagate me]") + return s.client.UnaryEcho(ctx, &pb.EchoRequest{Message: message}) + } + + if message == "delay" { + time.Sleep(1500 * time.Millisecond) + } + + return &pb.EchoResponse{Message: req.Message}, nil +} + +func (s *server) ServerStreamingEcho(req *pb.EchoRequest, stream pb.Echo_ServerStreamingEchoServer) error { + return status.Error(codes.Unimplemented, "RPC unimplemented") +} + +func (s *server) ClientStreamingEcho(stream pb.Echo_ClientStreamingEchoServer) error { + return status.Error(codes.Unimplemented, "RPC unimplemented") +} + +func (s *server) BidirectionalStreamingEcho(stream pb.Echo_BidirectionalStreamingEchoServer) error { + for { + req, err := stream.Recv() + if err == io.EOF { + return status.Error(codes.InvalidArgument, "request message not received") + } + if err != nil { + return err + } + + message := req.Message + if strings.HasPrefix(message, "[propagate me]") { + time.Sleep(800 * time.Millisecond) + message = strings.TrimPrefix(message, "[propagate me]") + res, err := s.client.UnaryEcho(stream.Context(), &pb.EchoRequest{Message: message}) + if err != nil { + return err + } + stream.Send(res) + } + + if message == "delay" { + time.Sleep(1500 * time.Millisecond) + } + stream.Send(&pb.EchoResponse{Message: message}) + } +} + +func (s *server) Close() { + s.cc.Close() +} + +func newEchoServer() *server { + target := fmt.Sprintf("localhost:%v", *port) + cc, err := grpc.Dial(target, grpc.WithInsecure()) + if err != nil { + log.Fatalf("did not connect: %v", err) + } + return &server{client: pb.NewEchoClient(cc), cc: cc} +} + +func main() { + flag.Parse() + + address := fmt.Sprintf(":%v", *port) + lis, err := net.Listen("tcp", address) + if err != nil { + log.Fatalf("failed to listen: %v", err) + } + + echoServer := newEchoServer() + defer echoServer.Close() + + grpcServer := grpc.NewServer() + pb.RegisterEchoServer(grpcServer, echoServer) + + if err := grpcServer.Serve(lis); err != nil { + log.Fatalf("failed to serve: %v", err) + } +} diff --git a/vendor/google.golang.org/grpc/examples/features/debugging/client/main.go b/vendor/google.golang.org/grpc/examples/features/debugging/client/main.go new file mode 100644 index 0000000..5fc774d --- /dev/null +++ b/vendor/google.golang.org/grpc/examples/features/debugging/client/main.go @@ -0,0 +1,89 @@ +/* + * + * Copyright 2018 gRPC authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ + +// Binary client is an example client. +package main + +import ( + "log" + "net" + "os" + "time" + + "golang.org/x/net/context" + "google.golang.org/grpc" + "google.golang.org/grpc/channelz/service" + pb "google.golang.org/grpc/examples/helloworld/helloworld" + "google.golang.org/grpc/resolver" + "google.golang.org/grpc/resolver/manual" +) + +const ( + defaultName = "world" +) + +func main() { + /***** Set up the server serving channelz service. *****/ + lis, err := net.Listen("tcp", ":50052") + if err != nil { + log.Fatalf("failed to listen: %v", err) + } + defer lis.Close() + s := grpc.NewServer() + service.RegisterChannelzServiceToServer(s) + go s.Serve(lis) + defer s.Stop() + + /***** Initialize manual resolver and Dial *****/ + r, rcleanup := manual.GenerateAndRegisterManualResolver() + defer rcleanup() + // Set up a connection to the server. + conn, err := grpc.Dial(r.Scheme()+":///test.server", grpc.WithInsecure(), grpc.WithBalancerName("round_robin")) + if err != nil { + log.Fatalf("did not connect: %v", err) + } + defer conn.Close() + // Manually provide resolved addresses for the target. + r.UpdateState(resolver.State{Addresses: []resolver.Address{{Addr: ":10001"}, {Addr: ":10002"}, {Addr: ":10003"}}}) + + c := pb.NewGreeterClient(conn) + + // Contact the server and print out its response. + name := defaultName + if len(os.Args) > 1 { + name = os.Args[1] + } + + /***** Make 100 SayHello RPCs *****/ + for i := 0; i < 100; i++ { + // Setting a 150ms timeout on the RPC. + ctx, cancel := context.WithTimeout(context.Background(), 150*time.Millisecond) + defer cancel() + r, err := c.SayHello(ctx, &pb.HelloRequest{Name: name}) + if err != nil { + log.Printf("could not greet: %v", err) + } else { + log.Printf("Greeting: %s", r.Message) + } + } + + /***** Wait for user exiting the program *****/ + // Unless you exit the program (e.g. CTRL+C), channelz data will be available for querying. + // Users can take time to examine and learn about the info provided by channelz. + select {} +} diff --git a/vendor/google.golang.org/grpc/examples/features/debugging/server/main.go b/vendor/google.golang.org/grpc/examples/features/debugging/server/main.go new file mode 100644 index 0000000..8dab454 --- /dev/null +++ b/vendor/google.golang.org/grpc/examples/features/debugging/server/main.go @@ -0,0 +1,86 @@ +/* + * + * Copyright 2018 gRPC authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ + +// Binary server is an example server. +package main + +import ( + "log" + "net" + "time" + + "golang.org/x/net/context" + "google.golang.org/grpc" + "google.golang.org/grpc/channelz/service" + pb "google.golang.org/grpc/examples/helloworld/helloworld" + "google.golang.org/grpc/internal/grpcrand" +) + +var ( + ports = []string{":10001", ":10002", ":10003"} +) + +// server is used to implement helloworld.GreeterServer. +type server struct{} + +// SayHello implements helloworld.GreeterServer +func (s *server) SayHello(ctx context.Context, in *pb.HelloRequest) (*pb.HelloReply, error) { + return &pb.HelloReply{Message: "Hello " + in.Name}, nil +} + +// slow server is used to simulate a server that has a variable delay in its response. +type slowServer struct{} + +// SayHello implements helloworld.GreeterServer +func (s *slowServer) SayHello(ctx context.Context, in *pb.HelloRequest) (*pb.HelloReply, error) { + // Delay 100ms ~ 200ms before replying + time.Sleep(time.Duration(100+grpcrand.Intn(100)) * time.Millisecond) + return &pb.HelloReply{Message: "Hello " + in.Name}, nil +} + +func main() { + /***** Set up the server serving channelz service. *****/ + lis, err := net.Listen("tcp", ":50051") + if err != nil { + log.Fatalf("failed to listen: %v", err) + } + defer lis.Close() + s := grpc.NewServer() + service.RegisterChannelzServiceToServer(s) + go s.Serve(lis) + defer s.Stop() + + /***** Start three GreeterServers(with one of them to be the slowServer). *****/ + for i := 0; i < 3; i++ { + lis, err := net.Listen("tcp", ports[i]) + if err != nil { + log.Fatalf("failed to listen: %v", err) + } + defer lis.Close() + s := grpc.NewServer() + if i == 2 { + pb.RegisterGreeterServer(s, &slowServer{}) + } else { + pb.RegisterGreeterServer(s, &server{}) + } + go s.Serve(lis) + } + + /***** Wait for user exiting the program *****/ + select {} +} diff --git a/vendor/google.golang.org/grpc/examples/features/encryption/ALTS/client/main.go b/vendor/google.golang.org/grpc/examples/features/encryption/ALTS/client/main.go new file mode 100644 index 0000000..aa09080 --- /dev/null +++ b/vendor/google.golang.org/grpc/examples/features/encryption/ALTS/client/main.go @@ -0,0 +1,62 @@ +/* + * + * Copyright 2018 gRPC authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ + +// Binary client is an example client. +package main + +import ( + "context" + "flag" + "fmt" + "log" + "time" + + "google.golang.org/grpc" + "google.golang.org/grpc/credentials/alts" + ecpb "google.golang.org/grpc/examples/features/proto/echo" +) + +var addr = flag.String("addr", "localhost:50051", "the address to connect to") + +func callUnaryEcho(client ecpb.EchoClient, message string) { + ctx, cancel := context.WithTimeout(context.Background(), 10*time.Second) + defer cancel() + resp, err := client.UnaryEcho(ctx, &ecpb.EchoRequest{Message: message}) + if err != nil { + log.Fatalf("client.UnaryEcho(_) = _, %v: ", err) + } + fmt.Println("UnaryEcho: ", resp.Message) +} + +func main() { + flag.Parse() + + // Create alts based credential. + altsTC := alts.NewClientCreds(alts.DefaultClientOptions()) + + // Set up a connection to the server. + conn, err := grpc.Dial(*addr, grpc.WithTransportCredentials(altsTC)) + if err != nil { + log.Fatalf("did not connect: %v", err) + } + defer conn.Close() + + // Make a echo client and send an RPC. + rgc := ecpb.NewEchoClient(conn) + callUnaryEcho(rgc, "hello world") +} diff --git a/vendor/google.golang.org/grpc/examples/features/encryption/ALTS/server/main.go b/vendor/google.golang.org/grpc/examples/features/encryption/ALTS/server/main.go new file mode 100644 index 0000000..f4b84d7 --- /dev/null +++ b/vendor/google.golang.org/grpc/examples/features/encryption/ALTS/server/main.go @@ -0,0 +1,74 @@ +/* + * + * Copyright 2018 gRPC authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ + +// Binary server is an example server. +package main + +import ( + "context" + "flag" + "fmt" + "log" + "net" + + "google.golang.org/grpc" + "google.golang.org/grpc/codes" + "google.golang.org/grpc/credentials/alts" + ecpb "google.golang.org/grpc/examples/features/proto/echo" + "google.golang.org/grpc/status" +) + +var port = flag.Int("port", 50051, "the port to serve on") + +type ecServer struct{} + +func (s *ecServer) UnaryEcho(ctx context.Context, req *ecpb.EchoRequest) (*ecpb.EchoResponse, error) { + return &ecpb.EchoResponse{Message: req.Message}, nil +} + +func (s *ecServer) ServerStreamingEcho(*ecpb.EchoRequest, ecpb.Echo_ServerStreamingEchoServer) error { + return status.Errorf(codes.Unimplemented, "not implemented") +} + +func (s *ecServer) ClientStreamingEcho(ecpb.Echo_ClientStreamingEchoServer) error { + return status.Errorf(codes.Unimplemented, "not implemented") +} + +func (s *ecServer) BidirectionalStreamingEcho(ecpb.Echo_BidirectionalStreamingEchoServer) error { + return status.Errorf(codes.Unimplemented, "not implemented") +} + +func main() { + flag.Parse() + + lis, err := net.Listen("tcp", fmt.Sprintf(":%d", *port)) + if err != nil { + log.Fatalf("failed to listen: %v", err) + } + // Create alts based credential. + altsTC := alts.NewServerCreds(alts.DefaultServerOptions()) + + s := grpc.NewServer(grpc.Creds(altsTC)) + + // Register EchoServer on the server. + ecpb.RegisterEchoServer(s, &ecServer{}) + + if err := s.Serve(lis); err != nil { + log.Fatalf("failed to serve: %v", err) + } +} diff --git a/vendor/google.golang.org/grpc/examples/features/encryption/TLS/client/main.go b/vendor/google.golang.org/grpc/examples/features/encryption/TLS/client/main.go new file mode 100644 index 0000000..3cac021 --- /dev/null +++ b/vendor/google.golang.org/grpc/examples/features/encryption/TLS/client/main.go @@ -0,0 +1,66 @@ +/* + * + * Copyright 2018 gRPC authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ + +// Binary client is an example client. +package main + +import ( + "context" + "flag" + "fmt" + "log" + "time" + + "google.golang.org/grpc" + "google.golang.org/grpc/credentials" + ecpb "google.golang.org/grpc/examples/features/proto/echo" + "google.golang.org/grpc/testdata" +) + +var addr = flag.String("addr", "localhost:50051", "the address to connect to") + +func callUnaryEcho(client ecpb.EchoClient, message string) { + ctx, cancel := context.WithTimeout(context.Background(), 10*time.Second) + defer cancel() + resp, err := client.UnaryEcho(ctx, &ecpb.EchoRequest{Message: message}) + if err != nil { + log.Fatalf("client.UnaryEcho(_) = _, %v: ", err) + } + fmt.Println("UnaryEcho: ", resp.Message) +} + +func main() { + flag.Parse() + + // Create tls based credential. + creds, err := credentials.NewClientTLSFromFile(testdata.Path("ca.pem"), "x.test.youtube.com") + if err != nil { + log.Fatalf("failed to load credentials: %v", err) + } + + // Set up a connection to the server. + conn, err := grpc.Dial(*addr, grpc.WithTransportCredentials(creds)) + if err != nil { + log.Fatalf("did not connect: %v", err) + } + defer conn.Close() + + // Make a echo client and send an RPC. + rgc := ecpb.NewEchoClient(conn) + callUnaryEcho(rgc, "hello world") +} diff --git a/vendor/google.golang.org/grpc/examples/features/encryption/TLS/server/main.go b/vendor/google.golang.org/grpc/examples/features/encryption/TLS/server/main.go new file mode 100644 index 0000000..538a2b8 --- /dev/null +++ b/vendor/google.golang.org/grpc/examples/features/encryption/TLS/server/main.go @@ -0,0 +1,79 @@ +/* + * + * Copyright 2018 gRPC authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ + +// Binary server is an example server. +package main + +import ( + "context" + "flag" + "fmt" + "log" + "net" + + "google.golang.org/grpc" + "google.golang.org/grpc/codes" + "google.golang.org/grpc/credentials" + ecpb "google.golang.org/grpc/examples/features/proto/echo" + "google.golang.org/grpc/status" + "google.golang.org/grpc/testdata" +) + +var port = flag.Int("port", 50051, "the port to serve on") + +type ecServer struct{} + +func (s *ecServer) UnaryEcho(ctx context.Context, req *ecpb.EchoRequest) (*ecpb.EchoResponse, error) { + return &ecpb.EchoResponse{Message: req.Message}, nil +} + +func (s *ecServer) ServerStreamingEcho(*ecpb.EchoRequest, ecpb.Echo_ServerStreamingEchoServer) error { + return status.Errorf(codes.Unimplemented, "not implemented") +} + +func (s *ecServer) ClientStreamingEcho(ecpb.Echo_ClientStreamingEchoServer) error { + return status.Errorf(codes.Unimplemented, "not implemented") +} + +func (s *ecServer) BidirectionalStreamingEcho(ecpb.Echo_BidirectionalStreamingEchoServer) error { + return status.Errorf(codes.Unimplemented, "not implemented") +} + +func main() { + flag.Parse() + + lis, err := net.Listen("tcp", fmt.Sprintf(":%d", *port)) + if err != nil { + log.Fatalf("failed to listen: %v", err) + } + + // Create tls based credential. + creds, err := credentials.NewServerTLSFromFile(testdata.Path("server1.pem"), testdata.Path("server1.key")) + if err != nil { + log.Fatalf("failed to create credentials: %v", err) + } + + s := grpc.NewServer(grpc.Creds(creds)) + + // Register EchoServer on the server. + ecpb.RegisterEchoServer(s, &ecServer{}) + + if err := s.Serve(lis); err != nil { + log.Fatalf("failed to serve: %v", err) + } +} diff --git a/vendor/google.golang.org/grpc/examples/features/errors/client/main.go b/vendor/google.golang.org/grpc/examples/features/errors/client/main.go new file mode 100644 index 0000000..4bacff5 --- /dev/null +++ b/vendor/google.golang.org/grpc/examples/features/errors/client/main.go @@ -0,0 +1,68 @@ +/* + * + * Copyright 2018 gRPC authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ + +// Binary client is an example client. +package main + +import ( + "context" + "flag" + "log" + "os" + "time" + + epb "google.golang.org/genproto/googleapis/rpc/errdetails" + "google.golang.org/grpc" + pb "google.golang.org/grpc/examples/helloworld/helloworld" + "google.golang.org/grpc/status" +) + +var addr = flag.String("addr", "localhost:50052", "the address to connect to") + +func main() { + flag.Parse() + + // Set up a connection to the server. + conn, err := grpc.Dial(*addr, grpc.WithInsecure()) + if err != nil { + log.Fatalf("did not connect: %v", err) + } + defer func() { + if e := conn.Close(); e != nil { + log.Printf("failed to close connection: %s", e) + } + }() + c := pb.NewGreeterClient(conn) + + ctx, cancel := context.WithTimeout(context.Background(), time.Second) + defer cancel() + r, err := c.SayHello(ctx, &pb.HelloRequest{Name: "world"}) + if err != nil { + s := status.Convert(err) + for _, d := range s.Details() { + switch info := d.(type) { + case *epb.QuotaFailure: + log.Printf("Quota failure: %s", info) + default: + log.Printf("Unexpected type: %s", info) + } + } + os.Exit(1) + } + log.Printf("Greeting: %s", r.Message) +} diff --git a/vendor/google.golang.org/grpc/examples/features/errors/server/main.go b/vendor/google.golang.org/grpc/examples/features/errors/server/main.go new file mode 100644 index 0000000..d306c4e --- /dev/null +++ b/vendor/google.golang.org/grpc/examples/features/errors/server/main.go @@ -0,0 +1,83 @@ +/* + * + * Copyright 2018 gRPC authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ + +// Binary server is an example server. +package main + +import ( + "context" + "flag" + "fmt" + "log" + "net" + "sync" + + epb "google.golang.org/genproto/googleapis/rpc/errdetails" + "google.golang.org/grpc" + "google.golang.org/grpc/codes" + pb "google.golang.org/grpc/examples/helloworld/helloworld" + "google.golang.org/grpc/status" +) + +var port = flag.Int("port", 50052, "port number") + +// server is used to implement helloworld.GreeterServer. +type server struct { + mu sync.Mutex + count map[string]int +} + +// SayHello implements helloworld.GreeterServer +func (s *server) SayHello(ctx context.Context, in *pb.HelloRequest) (*pb.HelloReply, error) { + s.mu.Lock() + defer s.mu.Unlock() + // Track the number of times the user has been greeted. + s.count[in.Name]++ + if s.count[in.Name] > 1 { + st := status.New(codes.ResourceExhausted, "Request limit exceeded.") + ds, err := st.WithDetails( + &epb.QuotaFailure{ + Violations: []*epb.QuotaFailure_Violation{{ + Subject: fmt.Sprintf("name:%s", in.Name), + Description: "Limit one greeting per person", + }}, + }, + ) + if err != nil { + return nil, st.Err() + } + return nil, ds.Err() + } + return &pb.HelloReply{Message: "Hello " + in.Name}, nil +} + +func main() { + flag.Parse() + + address := fmt.Sprintf(":%v", *port) + lis, err := net.Listen("tcp", address) + if err != nil { + log.Fatalf("failed to listen: %v", err) + } + + s := grpc.NewServer() + pb.RegisterGreeterServer(s, &server{count: make(map[string]int)}) + if err := s.Serve(lis); err != nil { + log.Fatalf("failed to serve: %v", err) + } +} diff --git a/vendor/google.golang.org/grpc/examples/features/interceptor/client/main.go b/vendor/google.golang.org/grpc/examples/features/interceptor/client/main.go new file mode 100644 index 0000000..4f5ed23 --- /dev/null +++ b/vendor/google.golang.org/grpc/examples/features/interceptor/client/main.go @@ -0,0 +1,165 @@ +/* + * + * Copyright 2018 gRPC authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ + +// Binary client is an example client. +package main + +import ( + "context" + "flag" + "fmt" + "io" + "log" + "time" + + "golang.org/x/oauth2" + "google.golang.org/grpc" + "google.golang.org/grpc/credentials" + "google.golang.org/grpc/credentials/oauth" + ecpb "google.golang.org/grpc/examples/features/proto/echo" + "google.golang.org/grpc/testdata" +) + +var addr = flag.String("addr", "localhost:50051", "the address to connect to") + +const fallbackToken = "some-secret-token" + +// logger is to mock a sophisticated logging system. To simplify the example, we just print out the content. +func logger(format string, a ...interface{}) { + fmt.Printf("LOG:\t"+format+"\n", a...) +} + +// unaryInterceptor is an example unary interceptor. +func unaryInterceptor(ctx context.Context, method string, req, reply interface{}, cc *grpc.ClientConn, invoker grpc.UnaryInvoker, opts ...grpc.CallOption) error { + var credsConfigured bool + for _, o := range opts { + _, ok := o.(grpc.PerRPCCredsCallOption) + if ok { + credsConfigured = true + break + } + } + if !credsConfigured { + opts = append(opts, grpc.PerRPCCredentials(oauth.NewOauthAccess(&oauth2.Token{ + AccessToken: fallbackToken, + }))) + } + start := time.Now() + err := invoker(ctx, method, req, reply, cc, opts...) + end := time.Now() + logger("RPC: %s, start time: %s, end time: %s, err: %v", method, start.Format("Basic"), end.Format(time.RFC3339), err) + return err +} + +// wrappedStream wraps around the embedded grpc.ClientStream, and intercepts the RecvMsg and +// SendMsg method call. +type wrappedStream struct { + grpc.ClientStream +} + +func (w *wrappedStream) RecvMsg(m interface{}) error { + logger("Receive a message (Type: %T) at %v", m, time.Now().Format(time.RFC3339)) + return w.ClientStream.RecvMsg(m) +} + +func (w *wrappedStream) SendMsg(m interface{}) error { + logger("Send a message (Type: %T) at %v", m, time.Now().Format(time.RFC3339)) + return w.ClientStream.SendMsg(m) +} + +func newWrappedStream(s grpc.ClientStream) grpc.ClientStream { + return &wrappedStream{s} +} + +// streamInterceptor is an example stream interceptor. +func streamInterceptor(ctx context.Context, desc *grpc.StreamDesc, cc *grpc.ClientConn, method string, streamer grpc.Streamer, opts ...grpc.CallOption) (grpc.ClientStream, error) { + var credsConfigured bool + for _, o := range opts { + _, ok := o.(*grpc.PerRPCCredsCallOption) + if ok { + credsConfigured = true + } + } + if !credsConfigured { + opts = append(opts, grpc.PerRPCCredentials(oauth.NewOauthAccess(&oauth2.Token{ + AccessToken: fallbackToken, + }))) + } + s, err := streamer(ctx, desc, cc, method, opts...) + if err != nil { + return nil, err + } + return newWrappedStream(s), nil +} + +func callUnaryEcho(client ecpb.EchoClient, message string) { + ctx, cancel := context.WithTimeout(context.Background(), 10*time.Second) + defer cancel() + resp, err := client.UnaryEcho(ctx, &ecpb.EchoRequest{Message: message}) + if err != nil { + log.Fatalf("client.UnaryEcho(_) = _, %v: ", err) + } + fmt.Println("UnaryEcho: ", resp.Message) +} + +func callBidiStreamingEcho(client ecpb.EchoClient) { + ctx, cancel := context.WithTimeout(context.Background(), 10*time.Second) + defer cancel() + c, err := client.BidirectionalStreamingEcho(ctx) + if err != nil { + return + } + for i := 0; i < 5; i++ { + if err := c.Send(&ecpb.EchoRequest{Message: fmt.Sprintf("Request %d", i+1)}); err != nil { + log.Fatalf("failed to send request due to error: %v", err) + } + } + c.CloseSend() + for { + resp, err := c.Recv() + if err == io.EOF { + break + } + if err != nil { + log.Fatalf("failed to receive response due to error: %v", err) + } + fmt.Println("BidiStreaming Echo: ", resp.Message) + } +} + +func main() { + flag.Parse() + + // Create tls based credential. + creds, err := credentials.NewClientTLSFromFile(testdata.Path("ca.pem"), "x.test.youtube.com") + if err != nil { + log.Fatalf("failed to load credentials: %v", err) + } + + // Set up a connection to the server. + conn, err := grpc.Dial(*addr, grpc.WithTransportCredentials(creds), grpc.WithUnaryInterceptor(unaryInterceptor), grpc.WithStreamInterceptor(streamInterceptor)) + if err != nil { + log.Fatalf("did not connect: %v", err) + } + defer conn.Close() + + // Make a echo client and send RPCs. + rgc := ecpb.NewEchoClient(conn) + callUnaryEcho(rgc, "hello world") + callBidiStreamingEcho(rgc) +} diff --git a/vendor/google.golang.org/grpc/examples/features/interceptor/server/main.go b/vendor/google.golang.org/grpc/examples/features/interceptor/server/main.go new file mode 100644 index 0000000..54ae297 --- /dev/null +++ b/vendor/google.golang.org/grpc/examples/features/interceptor/server/main.go @@ -0,0 +1,170 @@ +/* + * + * Copyright 2018 gRPC authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ + +// Binary server is an example server. +package main + +import ( + "context" + "flag" + "fmt" + "io" + "log" + "net" + "strings" + "time" + + "google.golang.org/grpc" + "google.golang.org/grpc/codes" + "google.golang.org/grpc/credentials" + ecpb "google.golang.org/grpc/examples/features/proto/echo" + "google.golang.org/grpc/metadata" + "google.golang.org/grpc/status" + "google.golang.org/grpc/testdata" +) + +var ( + port = flag.Int("port", 50051, "the port to serve on") + + errMissingMetadata = status.Errorf(codes.InvalidArgument, "missing metadata") + errInvalidToken = status.Errorf(codes.Unauthenticated, "invalid token") +) + +// logger is to mock a sophisticated logging system. To simplify the example, we just print out the content. +func logger(format string, a ...interface{}) { + fmt.Printf("LOG:\t"+format+"\n", a...) +} + +type server struct{} + +func (s *server) UnaryEcho(ctx context.Context, in *ecpb.EchoRequest) (*ecpb.EchoResponse, error) { + fmt.Printf("unary echoing message %q\n", in.Message) + return &ecpb.EchoResponse{Message: in.Message}, nil +} + +func (s *server) ServerStreamingEcho(in *ecpb.EchoRequest, stream ecpb.Echo_ServerStreamingEchoServer) error { + return status.Error(codes.Unimplemented, "not implemented") +} + +func (s *server) ClientStreamingEcho(stream ecpb.Echo_ClientStreamingEchoServer) error { + return status.Error(codes.Unimplemented, "not implemented") +} + +func (s *server) BidirectionalStreamingEcho(stream ecpb.Echo_BidirectionalStreamingEchoServer) error { + for { + in, err := stream.Recv() + if err != nil { + if err == io.EOF { + return nil + } + fmt.Printf("server: error receiving from stream: %v\n", err) + return err + } + fmt.Printf("bidi echoing message %q\n", in.Message) + stream.Send(&ecpb.EchoResponse{Message: in.Message}) + } +} + +// valid validates the authorization. +func valid(authorization []string) bool { + if len(authorization) < 1 { + return false + } + token := strings.TrimPrefix(authorization[0], "Bearer ") + // Perform the token validation here. For the sake of this example, the code + // here forgoes any of the usual OAuth2 token validation and instead checks + // for a token matching an arbitrary string. + return token == "some-secret-token" +} + +func unaryInterceptor(ctx context.Context, req interface{}, info *grpc.UnaryServerInfo, handler grpc.UnaryHandler) (interface{}, error) { + // authentication (token verification) + md, ok := metadata.FromIncomingContext(ctx) + if !ok { + return nil, errMissingMetadata + } + if !valid(md["authorization"]) { + return nil, errInvalidToken + } + m, err := handler(ctx, req) + if err != nil { + logger("RPC failed with error %v", err) + } + return m, err +} + +// wrappedStream wraps around the embedded grpc.ServerStream, and intercepts the RecvMsg and +// SendMsg method call. +type wrappedStream struct { + grpc.ServerStream +} + +func (w *wrappedStream) RecvMsg(m interface{}) error { + logger("Receive a message (Type: %T) at %s", m, time.Now().Format(time.RFC3339)) + return w.ServerStream.RecvMsg(m) +} + +func (w *wrappedStream) SendMsg(m interface{}) error { + logger("Send a message (Type: %T) at %v", m, time.Now().Format(time.RFC3339)) + return w.ServerStream.SendMsg(m) +} + +func newWrappedStream(s grpc.ServerStream) grpc.ServerStream { + return &wrappedStream{s} +} + +func streamInterceptor(srv interface{}, ss grpc.ServerStream, info *grpc.StreamServerInfo, handler grpc.StreamHandler) error { + // authentication (token verification) + md, ok := metadata.FromIncomingContext(ss.Context()) + if !ok { + return errMissingMetadata + } + if !valid(md["authorization"]) { + return errInvalidToken + } + + err := handler(srv, newWrappedStream(ss)) + if err != nil { + logger("RPC failed with error %v", err) + } + return err +} + +func main() { + flag.Parse() + + lis, err := net.Listen("tcp", fmt.Sprintf(":%d", *port)) + if err != nil { + log.Fatalf("failed to listen: %v", err) + } + + // Create tls based credential. + creds, err := credentials.NewServerTLSFromFile(testdata.Path("server1.pem"), testdata.Path("server1.key")) + if err != nil { + log.Fatalf("failed to create credentials: %v", err) + } + + s := grpc.NewServer(grpc.Creds(creds), grpc.UnaryInterceptor(unaryInterceptor), grpc.StreamInterceptor(streamInterceptor)) + + // Register EchoServer on the server. + ecpb.RegisterEchoServer(s, &server{}) + + if err := s.Serve(lis); err != nil { + log.Fatalf("failed to serve: %v", err) + } +} diff --git a/vendor/google.golang.org/grpc/examples/features/keepalive/client/main.go b/vendor/google.golang.org/grpc/examples/features/keepalive/client/main.go new file mode 100644 index 0000000..a8cfbc5 --- /dev/null +++ b/vendor/google.golang.org/grpc/examples/features/keepalive/client/main.go @@ -0,0 +1,62 @@ +/* + * + * Copyright 2019 gRPC authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ + +// Binary client is an example client. +package main + +import ( + "context" + "flag" + "fmt" + "log" + "time" + + "google.golang.org/grpc" + pb "google.golang.org/grpc/examples/features/proto/echo" + "google.golang.org/grpc/keepalive" +) + +var addr = flag.String("addr", "localhost:50052", "the address to connect to") + +var kacp = keepalive.ClientParameters{ + Time: 10 * time.Second, // send pings every 10 seconds if there is no activity + Timeout: time.Second, // wait 1 second for ping ack before considering the connection dead + PermitWithoutStream: true, // send pings even without active streams +} + +func main() { + flag.Parse() + + conn, err := grpc.Dial(*addr, grpc.WithInsecure(), grpc.WithKeepaliveParams(kacp)) + if err != nil { + log.Fatalf("did not connect: %v", err) + } + defer conn.Close() + + c := pb.NewEchoClient(conn) + + ctx, cancel := context.WithTimeout(context.Background(), 3*time.Minute) + defer cancel() + fmt.Println("Performing unary request") + res, err := c.UnaryEcho(ctx, &pb.EchoRequest{Message: "keepalive demo"}) + if err != nil { + log.Fatalf("unexpected error from UnaryEcho: %v", err) + } + fmt.Println("RPC response:", res) + select {} // Block forever; run with GODEBUG=http2debug=2 to observe ping frames and GOAWAYs due to idleness. +} diff --git a/vendor/google.golang.org/grpc/examples/features/keepalive/server/main.go b/vendor/google.golang.org/grpc/examples/features/keepalive/server/main.go new file mode 100644 index 0000000..723a6b9 --- /dev/null +++ b/vendor/google.golang.org/grpc/examples/features/keepalive/server/main.go @@ -0,0 +1,86 @@ +/* + * + * Copyright 2019 gRPC authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ + +// Binary server is an example server. +package main + +import ( + "context" + "flag" + "fmt" + "log" + "net" + "time" + + "google.golang.org/grpc" + "google.golang.org/grpc/codes" + pb "google.golang.org/grpc/examples/features/proto/echo" + "google.golang.org/grpc/keepalive" + "google.golang.org/grpc/status" +) + +var port = flag.Int("port", 50052, "port number") + +var kaep = keepalive.EnforcementPolicy{ + MinTime: 5 * time.Second, // If a client pings more than once every 5 seconds, terminate the connection + PermitWithoutStream: true, // Allow pings even when there are no active streams +} + +var kasp = keepalive.ServerParameters{ + MaxConnectionIdle: 15 * time.Second, // If a client is idle for 15 seconds, send a GOAWAY + MaxConnectionAge: 30 * time.Second, // If any connection is alive for more than 30 seconds, send a GOAWAY + MaxConnectionAgeGrace: 5 * time.Second, // Allow 5 seconds for pending RPCs to complete before forcibly closing connections + Time: 5 * time.Second, // Ping the client if it is idle for 5 seconds to ensure the connection is still active + Timeout: 1 * time.Second, // Wait 1 second for the ping ack before assuming the connection is dead +} + +// server implements EchoServer. +type server struct{} + +func (s *server) UnaryEcho(ctx context.Context, req *pb.EchoRequest) (*pb.EchoResponse, error) { + return &pb.EchoResponse{Message: req.Message}, nil +} + +func (s *server) ServerStreamingEcho(req *pb.EchoRequest, stream pb.Echo_ServerStreamingEchoServer) error { + return status.Error(codes.Unimplemented, "RPC unimplemented") +} + +func (s *server) ClientStreamingEcho(stream pb.Echo_ClientStreamingEchoServer) error { + return status.Error(codes.Unimplemented, "RPC unimplemented") +} + +func (s *server) BidirectionalStreamingEcho(stream pb.Echo_BidirectionalStreamingEchoServer) error { + return status.Error(codes.Unimplemented, "RPC unimplemented") +} + +func main() { + flag.Parse() + + address := fmt.Sprintf(":%v", *port) + lis, err := net.Listen("tcp", address) + if err != nil { + log.Fatalf("failed to listen: %v", err) + } + + s := grpc.NewServer(grpc.KeepaliveEnforcementPolicy(kaep), grpc.KeepaliveParams(kasp)) + pb.RegisterEchoServer(s, &server{}) + + if err := s.Serve(lis); err != nil { + log.Fatalf("failed to serve: %v", err) + } +} diff --git a/vendor/google.golang.org/grpc/examples/features/load_balancing/client/main.go b/vendor/google.golang.org/grpc/examples/features/load_balancing/client/main.go new file mode 100644 index 0000000..27a1552a --- /dev/null +++ b/vendor/google.golang.org/grpc/examples/features/load_balancing/client/main.go @@ -0,0 +1,125 @@ +/* + * + * Copyright 2018 gRPC authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ + +// Binary client is an example client. +package main + +import ( + "context" + "fmt" + "log" + "time" + + "google.golang.org/grpc" + ecpb "google.golang.org/grpc/examples/features/proto/echo" + "google.golang.org/grpc/resolver" +) + +const ( + exampleScheme = "example" + exampleServiceName = "lb.example.grpc.io" +) + +var addrs = []string{"localhost:50051", "localhost:50052"} + +func callUnaryEcho(c ecpb.EchoClient, message string) { + ctx, cancel := context.WithTimeout(context.Background(), time.Second) + defer cancel() + r, err := c.UnaryEcho(ctx, &ecpb.EchoRequest{Message: message}) + if err != nil { + log.Fatalf("could not greet: %v", err) + } + fmt.Println(r.Message) +} + +func makeRPCs(cc *grpc.ClientConn, n int) { + hwc := ecpb.NewEchoClient(cc) + for i := 0; i < n; i++ { + callUnaryEcho(hwc, "this is examples/load_balancing") + } +} + +func main() { + pickfirstConn, err := grpc.Dial( + fmt.Sprintf("%s:///%s", exampleScheme, exampleServiceName), + // grpc.WithBalancerName("pick_first"), // "pick_first" is the default, so this DialOption is not necessary. + grpc.WithInsecure(), + ) + if err != nil { + log.Fatalf("did not connect: %v", err) + } + defer pickfirstConn.Close() + + fmt.Println("--- calling helloworld.Greeter/SayHello with pick_first ---") + makeRPCs(pickfirstConn, 10) + + fmt.Println() + + // Make another ClientConn with round_robin policy. + roundrobinConn, err := grpc.Dial( + fmt.Sprintf("%s:///%s", exampleScheme, exampleServiceName), + grpc.WithBalancerName("round_robin"), // This sets the initial balancing policy. + grpc.WithInsecure(), + ) + if err != nil { + log.Fatalf("did not connect: %v", err) + } + defer roundrobinConn.Close() + + fmt.Println("--- calling helloworld.Greeter/SayHello with round_robin ---") + makeRPCs(roundrobinConn, 10) +} + +// Following is an example name resolver implementation. Read the name +// resolution example to learn more about it. + +type exampleResolverBuilder struct{} + +func (*exampleResolverBuilder) Build(target resolver.Target, cc resolver.ClientConn, opts resolver.BuildOption) (resolver.Resolver, error) { + r := &exampleResolver{ + target: target, + cc: cc, + addrsStore: map[string][]string{ + exampleServiceName: addrs, + }, + } + r.start() + return r, nil +} +func (*exampleResolverBuilder) Scheme() string { return exampleScheme } + +type exampleResolver struct { + target resolver.Target + cc resolver.ClientConn + addrsStore map[string][]string +} + +func (r *exampleResolver) start() { + addrStrs := r.addrsStore[r.target.Endpoint] + addrs := make([]resolver.Address, len(addrStrs)) + for i, s := range addrStrs { + addrs[i] = resolver.Address{Addr: s} + } + r.cc.UpdateState(resolver.State{Addresses: addrs}) +} +func (*exampleResolver) ResolveNow(o resolver.ResolveNowOption) {} +func (*exampleResolver) Close() {} + +func init() { + resolver.Register(&exampleResolverBuilder{}) +} diff --git a/vendor/google.golang.org/grpc/examples/features/load_balancing/server/main.go b/vendor/google.golang.org/grpc/examples/features/load_balancing/server/main.go new file mode 100644 index 0000000..8a1dad5 --- /dev/null +++ b/vendor/google.golang.org/grpc/examples/features/load_balancing/server/main.go @@ -0,0 +1,79 @@ +/* + * + * Copyright 2018 gRPC authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ + +// Binary server is an example server. +package main + +import ( + "context" + "fmt" + "log" + "net" + "sync" + + "google.golang.org/grpc" + "google.golang.org/grpc/codes" + ecpb "google.golang.org/grpc/examples/features/proto/echo" + "google.golang.org/grpc/status" +) + +var ( + addrs = []string{":50051", ":50052"} +) + +type ecServer struct { + addr string +} + +func (s *ecServer) UnaryEcho(ctx context.Context, req *ecpb.EchoRequest) (*ecpb.EchoResponse, error) { + return &ecpb.EchoResponse{Message: fmt.Sprintf("%s (from %s)", req.Message, s.addr)}, nil +} +func (s *ecServer) ServerStreamingEcho(*ecpb.EchoRequest, ecpb.Echo_ServerStreamingEchoServer) error { + return status.Errorf(codes.Unimplemented, "not implemented") +} +func (s *ecServer) ClientStreamingEcho(ecpb.Echo_ClientStreamingEchoServer) error { + return status.Errorf(codes.Unimplemented, "not implemented") +} +func (s *ecServer) BidirectionalStreamingEcho(ecpb.Echo_BidirectionalStreamingEchoServer) error { + return status.Errorf(codes.Unimplemented, "not implemented") +} + +func startServer(addr string) { + lis, err := net.Listen("tcp", addr) + if err != nil { + log.Fatalf("failed to listen: %v", err) + } + s := grpc.NewServer() + ecpb.RegisterEchoServer(s, &ecServer{addr: addr}) + log.Printf("serving on %s\n", addr) + if err := s.Serve(lis); err != nil { + log.Fatalf("failed to serve: %v", err) + } +} + +func main() { + var wg sync.WaitGroup + for _, addr := range addrs { + wg.Add(1) + go func(addr string) { + defer wg.Done() + startServer(addr) + }(addr) + } + wg.Wait() +} diff --git a/vendor/google.golang.org/grpc/examples/features/metadata/client/main.go b/vendor/google.golang.org/grpc/examples/features/metadata/client/main.go new file mode 100644 index 0000000..3aa3a59 --- /dev/null +++ b/vendor/google.golang.org/grpc/examples/features/metadata/client/main.go @@ -0,0 +1,307 @@ +/* + * + * Copyright 2018 gRPC authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ + +// Binary client is an example client. +package main + +import ( + "context" + "flag" + "fmt" + "io" + "log" + "time" + + "google.golang.org/grpc" + pb "google.golang.org/grpc/examples/features/proto/echo" + "google.golang.org/grpc/metadata" +) + +var addr = flag.String("addr", "localhost:50051", "the address to connect to") + +const ( + timestampFormat = time.StampNano // "Jan _2 15:04:05.000" + streamingCount = 10 +) + +func unaryCallWithMetadata(c pb.EchoClient, message string) { + fmt.Printf("--- unary ---\n") + // Create metadata and context. + md := metadata.Pairs("timestamp", time.Now().Format(timestampFormat)) + ctx := metadata.NewOutgoingContext(context.Background(), md) + + // Make RPC using the context with the metadata. + var header, trailer metadata.MD + r, err := c.UnaryEcho(ctx, &pb.EchoRequest{Message: message}, grpc.Header(&header), grpc.Trailer(&trailer)) + if err != nil { + log.Fatalf("failed to call UnaryEcho: %v", err) + } + + if t, ok := header["timestamp"]; ok { + fmt.Printf("timestamp from header:\n") + for i, e := range t { + fmt.Printf(" %d. %s\n", i, e) + } + } else { + log.Fatal("timestamp expected but doesn't exist in header") + } + if l, ok := header["location"]; ok { + fmt.Printf("location from header:\n") + for i, e := range l { + fmt.Printf(" %d. %s\n", i, e) + } + } else { + log.Fatal("location expected but doesn't exist in header") + } + fmt.Printf("response:\n") + fmt.Printf(" - %s\n", r.Message) + + if t, ok := trailer["timestamp"]; ok { + fmt.Printf("timestamp from trailer:\n") + for i, e := range t { + fmt.Printf(" %d. %s\n", i, e) + } + } else { + log.Fatal("timestamp expected but doesn't exist in trailer") + } +} + +func serverStreamingWithMetadata(c pb.EchoClient, message string) { + fmt.Printf("--- server streaming ---\n") + // Create metadata and context. + md := metadata.Pairs("timestamp", time.Now().Format(timestampFormat)) + ctx := metadata.NewOutgoingContext(context.Background(), md) + + // Make RPC using the context with the metadata. + stream, err := c.ServerStreamingEcho(ctx, &pb.EchoRequest{Message: message}) + if err != nil { + log.Fatalf("failed to call ServerStreamingEcho: %v", err) + } + + // Read the header when the header arrives. + header, err := stream.Header() + if err != nil { + log.Fatalf("failed to get header from stream: %v", err) + } + // Read metadata from server's header. + if t, ok := header["timestamp"]; ok { + fmt.Printf("timestamp from header:\n") + for i, e := range t { + fmt.Printf(" %d. %s\n", i, e) + } + } else { + log.Fatal("timestamp expected but doesn't exist in header") + } + if l, ok := header["location"]; ok { + fmt.Printf("location from header:\n") + for i, e := range l { + fmt.Printf(" %d. %s\n", i, e) + } + } else { + log.Fatal("location expected but doesn't exist in header") + } + + // Read all the responses. + var rpcStatus error + fmt.Printf("response:\n") + for { + r, err := stream.Recv() + if err != nil { + rpcStatus = err + break + } + fmt.Printf(" - %s\n", r.Message) + } + if rpcStatus != io.EOF { + log.Fatalf("failed to finish server streaming: %v", rpcStatus) + } + + // Read the trailer after the RPC is finished. + trailer := stream.Trailer() + // Read metadata from server's trailer. + if t, ok := trailer["timestamp"]; ok { + fmt.Printf("timestamp from trailer:\n") + for i, e := range t { + fmt.Printf(" %d. %s\n", i, e) + } + } else { + log.Fatal("timestamp expected but doesn't exist in trailer") + } +} + +func clientStreamWithMetadata(c pb.EchoClient, message string) { + fmt.Printf("--- client streaming ---\n") + // Create metadata and context. + md := metadata.Pairs("timestamp", time.Now().Format(timestampFormat)) + ctx := metadata.NewOutgoingContext(context.Background(), md) + + // Make RPC using the context with the metadata. + stream, err := c.ClientStreamingEcho(ctx) + if err != nil { + log.Fatalf("failed to call ClientStreamingEcho: %v\n", err) + } + + // Read the header when the header arrives. + header, err := stream.Header() + if err != nil { + log.Fatalf("failed to get header from stream: %v", err) + } + // Read metadata from server's header. + if t, ok := header["timestamp"]; ok { + fmt.Printf("timestamp from header:\n") + for i, e := range t { + fmt.Printf(" %d. %s\n", i, e) + } + } else { + log.Fatal("timestamp expected but doesn't exist in header") + } + if l, ok := header["location"]; ok { + fmt.Printf("location from header:\n") + for i, e := range l { + fmt.Printf(" %d. %s\n", i, e) + } + } else { + log.Fatal("location expected but doesn't exist in header") + } + + // Send all requests to the server. + for i := 0; i < streamingCount; i++ { + if err := stream.Send(&pb.EchoRequest{Message: message}); err != nil { + log.Fatalf("failed to send streaming: %v\n", err) + } + } + + // Read the response. + r, err := stream.CloseAndRecv() + if err != nil { + log.Fatalf("failed to CloseAndRecv: %v\n", err) + } + fmt.Printf("response:\n") + fmt.Printf(" - %s\n\n", r.Message) + + // Read the trailer after the RPC is finished. + trailer := stream.Trailer() + // Read metadata from server's trailer. + if t, ok := trailer["timestamp"]; ok { + fmt.Printf("timestamp from trailer:\n") + for i, e := range t { + fmt.Printf(" %d. %s\n", i, e) + } + } else { + log.Fatal("timestamp expected but doesn't exist in trailer") + } +} + +func bidirectionalWithMetadata(c pb.EchoClient, message string) { + fmt.Printf("--- bidirectional ---\n") + // Create metadata and context. + md := metadata.Pairs("timestamp", time.Now().Format(timestampFormat)) + ctx := metadata.NewOutgoingContext(context.Background(), md) + + // Make RPC using the context with the metadata. + stream, err := c.BidirectionalStreamingEcho(ctx) + if err != nil { + log.Fatalf("failed to call BidirectionalStreamingEcho: %v\n", err) + } + + go func() { + // Read the header when the header arrives. + header, err := stream.Header() + if err != nil { + log.Fatalf("failed to get header from stream: %v", err) + } + // Read metadata from server's header. + if t, ok := header["timestamp"]; ok { + fmt.Printf("timestamp from header:\n") + for i, e := range t { + fmt.Printf(" %d. %s\n", i, e) + } + } else { + log.Fatal("timestamp expected but doesn't exist in header") + } + if l, ok := header["location"]; ok { + fmt.Printf("location from header:\n") + for i, e := range l { + fmt.Printf(" %d. %s\n", i, e) + } + } else { + log.Fatal("location expected but doesn't exist in header") + } + + // Send all requests to the server. + for i := 0; i < streamingCount; i++ { + if err := stream.Send(&pb.EchoRequest{Message: message}); err != nil { + log.Fatalf("failed to send streaming: %v\n", err) + } + } + stream.CloseSend() + }() + + // Read all the responses. + var rpcStatus error + fmt.Printf("response:\n") + for { + r, err := stream.Recv() + if err != nil { + rpcStatus = err + break + } + fmt.Printf(" - %s\n", r.Message) + } + if rpcStatus != io.EOF { + log.Fatalf("failed to finish server streaming: %v", rpcStatus) + } + + // Read the trailer after the RPC is finished. + trailer := stream.Trailer() + // Read metadata from server's trailer. + if t, ok := trailer["timestamp"]; ok { + fmt.Printf("timestamp from trailer:\n") + for i, e := range t { + fmt.Printf(" %d. %s\n", i, e) + } + } else { + log.Fatal("timestamp expected but doesn't exist in trailer") + } + +} + +const message = "this is examples/metadata" + +func main() { + flag.Parse() + // Set up a connection to the server. + conn, err := grpc.Dial(*addr, grpc.WithInsecure()) + if err != nil { + log.Fatalf("did not connect: %v", err) + } + defer conn.Close() + + c := pb.NewEchoClient(conn) + + unaryCallWithMetadata(c, message) + time.Sleep(1 * time.Second) + + serverStreamingWithMetadata(c, message) + time.Sleep(1 * time.Second) + + clientStreamWithMetadata(c, message) + time.Sleep(1 * time.Second) + + bidirectionalWithMetadata(c, message) +} diff --git a/vendor/google.golang.org/grpc/examples/features/metadata/server/main.go b/vendor/google.golang.org/grpc/examples/features/metadata/server/main.go new file mode 100644 index 0000000..a0dd3b8 --- /dev/null +++ b/vendor/google.golang.org/grpc/examples/features/metadata/server/main.go @@ -0,0 +1,207 @@ +/* + * + * Copyright 2018 gRPC authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ + +// Binary server is an example server. +package main + +import ( + "context" + "flag" + "fmt" + "io" + "log" + "math/rand" + "net" + "time" + + "google.golang.org/grpc" + "google.golang.org/grpc/codes" + pb "google.golang.org/grpc/examples/features/proto/echo" + "google.golang.org/grpc/metadata" + "google.golang.org/grpc/status" +) + +var port = flag.Int("port", 50051, "the port to serve on") + +const ( + timestampFormat = time.StampNano + streamingCount = 10 +) + +type server struct{} + +func (s *server) UnaryEcho(ctx context.Context, in *pb.EchoRequest) (*pb.EchoResponse, error) { + fmt.Printf("--- UnaryEcho ---\n") + // Create trailer in defer to record function return time. + defer func() { + trailer := metadata.Pairs("timestamp", time.Now().Format(timestampFormat)) + grpc.SetTrailer(ctx, trailer) + }() + + // Read metadata from client. + md, ok := metadata.FromIncomingContext(ctx) + if !ok { + return nil, status.Errorf(codes.DataLoss, "UnaryEcho: failed to get metadata") + } + if t, ok := md["timestamp"]; ok { + fmt.Printf("timestamp from metadata:\n") + for i, e := range t { + fmt.Printf(" %d. %s\n", i, e) + } + } + + // Create and send header. + header := metadata.New(map[string]string{"location": "MTV", "timestamp": time.Now().Format(timestampFormat)}) + grpc.SendHeader(ctx, header) + + fmt.Printf("request received: %v, sending echo\n", in) + + return &pb.EchoResponse{Message: in.Message}, nil +} + +func (s *server) ServerStreamingEcho(in *pb.EchoRequest, stream pb.Echo_ServerStreamingEchoServer) error { + fmt.Printf("--- ServerStreamingEcho ---\n") + // Create trailer in defer to record function return time. + defer func() { + trailer := metadata.Pairs("timestamp", time.Now().Format(timestampFormat)) + stream.SetTrailer(trailer) + }() + + // Read metadata from client. + md, ok := metadata.FromIncomingContext(stream.Context()) + if !ok { + return status.Errorf(codes.DataLoss, "ServerStreamingEcho: failed to get metadata") + } + if t, ok := md["timestamp"]; ok { + fmt.Printf("timestamp from metadata:\n") + for i, e := range t { + fmt.Printf(" %d. %s\n", i, e) + } + } + + // Create and send header. + header := metadata.New(map[string]string{"location": "MTV", "timestamp": time.Now().Format(timestampFormat)}) + stream.SendHeader(header) + + fmt.Printf("request received: %v\n", in) + + // Read requests and send responses. + for i := 0; i < streamingCount; i++ { + fmt.Printf("echo message %v\n", in.Message) + err := stream.Send(&pb.EchoResponse{Message: in.Message}) + if err != nil { + return err + } + } + return nil +} + +func (s *server) ClientStreamingEcho(stream pb.Echo_ClientStreamingEchoServer) error { + fmt.Printf("--- ClientStreamingEcho ---\n") + // Create trailer in defer to record function return time. + defer func() { + trailer := metadata.Pairs("timestamp", time.Now().Format(timestampFormat)) + stream.SetTrailer(trailer) + }() + + // Read metadata from client. + md, ok := metadata.FromIncomingContext(stream.Context()) + if !ok { + return status.Errorf(codes.DataLoss, "ClientStreamingEcho: failed to get metadata") + } + if t, ok := md["timestamp"]; ok { + fmt.Printf("timestamp from metadata:\n") + for i, e := range t { + fmt.Printf(" %d. %s\n", i, e) + } + } + + // Create and send header. + header := metadata.New(map[string]string{"location": "MTV", "timestamp": time.Now().Format(timestampFormat)}) + stream.SendHeader(header) + + // Read requests and send responses. + var message string + for { + in, err := stream.Recv() + if err == io.EOF { + fmt.Printf("echo last received message\n") + return stream.SendAndClose(&pb.EchoResponse{Message: message}) + } + message = in.Message + fmt.Printf("request received: %v, building echo\n", in) + if err != nil { + return err + } + } +} + +func (s *server) BidirectionalStreamingEcho(stream pb.Echo_BidirectionalStreamingEchoServer) error { + fmt.Printf("--- BidirectionalStreamingEcho ---\n") + // Create trailer in defer to record function return time. + defer func() { + trailer := metadata.Pairs("timestamp", time.Now().Format(timestampFormat)) + stream.SetTrailer(trailer) + }() + + // Read metadata from client. + md, ok := metadata.FromIncomingContext(stream.Context()) + if !ok { + return status.Errorf(codes.DataLoss, "BidirectionalStreamingEcho: failed to get metadata") + } + + if t, ok := md["timestamp"]; ok { + fmt.Printf("timestamp from metadata:\n") + for i, e := range t { + fmt.Printf(" %d. %s\n", i, e) + } + } + + // Create and send header. + header := metadata.New(map[string]string{"location": "MTV", "timestamp": time.Now().Format(timestampFormat)}) + stream.SendHeader(header) + + // Read requests and send responses. + for { + in, err := stream.Recv() + if err == io.EOF { + return nil + } + if err != nil { + return err + } + fmt.Printf("request received %v, sending echo\n", in) + if err := stream.Send(&pb.EchoResponse{Message: in.Message}); err != nil { + return err + } + } +} + +func main() { + flag.Parse() + rand.Seed(time.Now().UnixNano()) + lis, err := net.Listen("tcp", fmt.Sprintf(":%d", *port)) + if err != nil { + log.Fatalf("failed to listen: %v", err) + } + fmt.Printf("server listening at %v\n", lis.Addr()) + + s := grpc.NewServer() + pb.RegisterEchoServer(s, &server{}) + s.Serve(lis) +} diff --git a/vendor/google.golang.org/grpc/examples/features/multiplex/client/main.go b/vendor/google.golang.org/grpc/examples/features/multiplex/client/main.go new file mode 100644 index 0000000..e25bb7a --- /dev/null +++ b/vendor/google.golang.org/grpc/examples/features/multiplex/client/main.go @@ -0,0 +1,77 @@ +/* + * + * Copyright 2018 gRPC authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ + +// Binary client is an example client. +package main + +import ( + "context" + "flag" + "fmt" + "log" + "time" + + "google.golang.org/grpc" + ecpb "google.golang.org/grpc/examples/features/proto/echo" + hwpb "google.golang.org/grpc/examples/helloworld/helloworld" +) + +var addr = flag.String("addr", "localhost:50051", "the address to connect to") + +// callSayHello calls SayHello on c with the given name, and prints the +// response. +func callSayHello(c hwpb.GreeterClient, name string) { + ctx, cancel := context.WithTimeout(context.Background(), time.Second) + defer cancel() + r, err := c.SayHello(ctx, &hwpb.HelloRequest{Name: name}) + if err != nil { + log.Fatalf("client.SayHello(_) = _, %v", err) + } + fmt.Println("Greeting: ", r.Message) +} + +func callUnaryEcho(client ecpb.EchoClient, message string) { + ctx, cancel := context.WithTimeout(context.Background(), 10*time.Second) + defer cancel() + resp, err := client.UnaryEcho(ctx, &ecpb.EchoRequest{Message: message}) + if err != nil { + log.Fatalf("client.UnaryEcho(_) = _, %v: ", err) + } + fmt.Println("UnaryEcho: ", resp.Message) +} + +func main() { + flag.Parse() + // Set up a connection to the server. + conn, err := grpc.Dial(*addr, grpc.WithInsecure()) + if err != nil { + log.Fatalf("did not connect: %v", err) + } + defer conn.Close() + + fmt.Println("--- calling helloworld.Greeter/SayHello ---") + // Make a greeter client and send an RPC. + hwc := hwpb.NewGreeterClient(conn) + callSayHello(hwc, "multiplex") + + fmt.Println() + fmt.Println("--- calling routeguide.RouteGuide/GetFeature ---") + // Make a routeguild client with the same ClientConn. + rgc := ecpb.NewEchoClient(conn) + callUnaryEcho(rgc, "this is examples/multiplex") +} diff --git a/vendor/google.golang.org/grpc/examples/features/multiplex/server/main.go b/vendor/google.golang.org/grpc/examples/features/multiplex/server/main.go new file mode 100644 index 0000000..956d36f --- /dev/null +++ b/vendor/google.golang.org/grpc/examples/features/multiplex/server/main.go @@ -0,0 +1,83 @@ +/* + * + * Copyright 2018 gRPC authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ + +// Binary server is an example server. +package main + +import ( + "context" + "flag" + "fmt" + "log" + "net" + + "google.golang.org/grpc" + "google.golang.org/grpc/codes" + ecpb "google.golang.org/grpc/examples/features/proto/echo" + hwpb "google.golang.org/grpc/examples/helloworld/helloworld" + "google.golang.org/grpc/status" +) + +var port = flag.Int("port", 50051, "the port to serve on") + +// hwServer is used to implement helloworld.GreeterServer. +type hwServer struct{} + +// SayHello implements helloworld.GreeterServer +func (s *hwServer) SayHello(ctx context.Context, in *hwpb.HelloRequest) (*hwpb.HelloReply, error) { + return &hwpb.HelloReply{Message: "Hello " + in.Name}, nil +} + +type ecServer struct{} + +func (s *ecServer) UnaryEcho(ctx context.Context, req *ecpb.EchoRequest) (*ecpb.EchoResponse, error) { + return &ecpb.EchoResponse{Message: req.Message}, nil +} + +func (s *ecServer) ServerStreamingEcho(*ecpb.EchoRequest, ecpb.Echo_ServerStreamingEchoServer) error { + return status.Errorf(codes.Unimplemented, "not implemented") +} + +func (s *ecServer) ClientStreamingEcho(ecpb.Echo_ClientStreamingEchoServer) error { + return status.Errorf(codes.Unimplemented, "not implemented") +} + +func (s *ecServer) BidirectionalStreamingEcho(ecpb.Echo_BidirectionalStreamingEchoServer) error { + return status.Errorf(codes.Unimplemented, "not implemented") +} + +func main() { + flag.Parse() + lis, err := net.Listen("tcp", fmt.Sprintf(":%d", *port)) + if err != nil { + log.Fatalf("failed to listen: %v", err) + } + fmt.Printf("server listening at %v\n", lis.Addr()) + + s := grpc.NewServer() + + // Register Greeter on the server. + hwpb.RegisterGreeterServer(s, &hwServer{}) + + // Register RouteGuide on the same server. + ecpb.RegisterEchoServer(s, &ecServer{}) + + if err := s.Serve(lis); err != nil { + log.Fatalf("failed to serve: %v", err) + } +} diff --git a/vendor/google.golang.org/grpc/examples/features/name_resolving/client/main.go b/vendor/google.golang.org/grpc/examples/features/name_resolving/client/main.go new file mode 100644 index 0000000..626991b --- /dev/null +++ b/vendor/google.golang.org/grpc/examples/features/name_resolving/client/main.go @@ -0,0 +1,135 @@ +/* + * + * Copyright 2018 gRPC authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ + +// Binary client is an example client. +package main + +import ( + "context" + "fmt" + "log" + "time" + + "google.golang.org/grpc" + ecpb "google.golang.org/grpc/examples/features/proto/echo" + "google.golang.org/grpc/resolver" +) + +const ( + exampleScheme = "example" + exampleServiceName = "resolver.example.grpc.io" + + backendAddr = "localhost:50051" +) + +func callUnaryEcho(c ecpb.EchoClient, message string) { + ctx, cancel := context.WithTimeout(context.Background(), time.Second) + defer cancel() + r, err := c.UnaryEcho(ctx, &ecpb.EchoRequest{Message: message}) + if err != nil { + log.Fatalf("could not greet: %v", err) + } + fmt.Println(r.Message) +} + +func makeRPCs(cc *grpc.ClientConn, n int) { + hwc := ecpb.NewEchoClient(cc) + for i := 0; i < n; i++ { + callUnaryEcho(hwc, "this is examples/name_resolving") + } +} + +func main() { + passthroughConn, err := grpc.Dial( + fmt.Sprintf("passthrough:///%s", backendAddr), // Dial to "passthrough:///localhost:50051" + grpc.WithInsecure(), + ) + if err != nil { + log.Fatalf("did not connect: %v", err) + } + defer passthroughConn.Close() + + fmt.Printf("--- calling helloworld.Greeter/SayHello to \"passthrough:///%s\"\n", backendAddr) + makeRPCs(passthroughConn, 10) + + fmt.Println() + + exampleConn, err := grpc.Dial( + fmt.Sprintf("%s:///%s", exampleScheme, exampleServiceName), // Dial to "example:///resolver.example.grpc.io" + grpc.WithInsecure(), + ) + if err != nil { + log.Fatalf("did not connect: %v", err) + } + defer exampleConn.Close() + + fmt.Printf("--- calling helloworld.Greeter/SayHello to \"%s:///%s\"\n", exampleScheme, exampleServiceName) + makeRPCs(exampleConn, 10) +} + +// Following is an example name resolver. It includes a +// ResolverBuilder(https://godoc.org/google.golang.org/grpc/resolver#Builder) +// and a Resolver(https://godoc.org/google.golang.org/grpc/resolver#Resolver). +// +// A ResolverBuilder is registered for a scheme (in this example, "example" is +// the scheme). When a ClientConn is created for this scheme, the +// ResolverBuilder will be picked to build a Resolver. Note that a new Resolver +// is built for each ClientConn. The Resolver will watch the updates for the +// target, and send updates to the ClientConn. + +// exampleResolverBuilder is a +// ResolverBuilder(https://godoc.org/google.golang.org/grpc/resolver#Builder). +type exampleResolverBuilder struct{} + +func (*exampleResolverBuilder) Build(target resolver.Target, cc resolver.ClientConn, opts resolver.BuildOption) (resolver.Resolver, error) { + r := &exampleResolver{ + target: target, + cc: cc, + addrsStore: map[string][]string{ + exampleServiceName: {backendAddr}, + }, + } + r.start() + return r, nil +} +func (*exampleResolverBuilder) Scheme() string { return exampleScheme } + +// exampleResolver is a +// Resolver(https://godoc.org/google.golang.org/grpc/resolver#Resolver). +type exampleResolver struct { + target resolver.Target + cc resolver.ClientConn + addrsStore map[string][]string +} + +func (r *exampleResolver) start() { + addrStrs := r.addrsStore[r.target.Endpoint] + addrs := make([]resolver.Address, len(addrStrs)) + for i, s := range addrStrs { + addrs[i] = resolver.Address{Addr: s} + } + r.cc.UpdateState(resolver.State{Addresses: addrs}) +} +func (*exampleResolver) ResolveNow(o resolver.ResolveNowOption) {} +func (*exampleResolver) Close() {} + +func init() { + // Register the example ResolverBuilder. This is usually done in a package's + // init() function. + resolver.Register(&exampleResolverBuilder{}) +} diff --git a/vendor/google.golang.org/grpc/examples/features/name_resolving/server/main.go b/vendor/google.golang.org/grpc/examples/features/name_resolving/server/main.go new file mode 100644 index 0000000..d3963a8 --- /dev/null +++ b/vendor/google.golang.org/grpc/examples/features/name_resolving/server/main.go @@ -0,0 +1,64 @@ +/* + * + * Copyright 2018 gRPC authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ + +// Binary server is an example server. +package main + +import ( + "context" + "fmt" + "log" + "net" + + "google.golang.org/grpc" + "google.golang.org/grpc/codes" + ecpb "google.golang.org/grpc/examples/features/proto/echo" + "google.golang.org/grpc/status" +) + +const addr = "localhost:50051" + +type ecServer struct { + addr string +} + +func (s *ecServer) UnaryEcho(ctx context.Context, req *ecpb.EchoRequest) (*ecpb.EchoResponse, error) { + return &ecpb.EchoResponse{Message: fmt.Sprintf("%s (from %s)", req.Message, s.addr)}, nil +} +func (s *ecServer) ServerStreamingEcho(*ecpb.EchoRequest, ecpb.Echo_ServerStreamingEchoServer) error { + return status.Errorf(codes.Unimplemented, "not implemented") +} +func (s *ecServer) ClientStreamingEcho(ecpb.Echo_ClientStreamingEchoServer) error { + return status.Errorf(codes.Unimplemented, "not implemented") +} +func (s *ecServer) BidirectionalStreamingEcho(ecpb.Echo_BidirectionalStreamingEchoServer) error { + return status.Errorf(codes.Unimplemented, "not implemented") +} + +func main() { + lis, err := net.Listen("tcp", addr) + if err != nil { + log.Fatalf("failed to listen: %v", err) + } + s := grpc.NewServer() + ecpb.RegisterEchoServer(s, &ecServer{addr: addr}) + log.Printf("serving on %s\n", addr) + if err := s.Serve(lis); err != nil { + log.Fatalf("failed to serve: %v", err) + } +} diff --git a/vendor/google.golang.org/grpc/examples/features/proto/doc.go b/vendor/google.golang.org/grpc/examples/features/proto/doc.go new file mode 100644 index 0000000..1400e63 --- /dev/null +++ b/vendor/google.golang.org/grpc/examples/features/proto/doc.go @@ -0,0 +1,22 @@ +/* + * + * Copyright 2018 gRPC authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ + +//go:generate protoc -I ./echo --go_out=plugins=grpc,paths=source_relative:./echo ./echo/echo.proto + +// Package proto is for go generate. +package proto diff --git a/vendor/google.golang.org/grpc/examples/features/proto/echo/echo.pb.go b/vendor/google.golang.org/grpc/examples/features/proto/echo/echo.pb.go new file mode 100644 index 0000000..8b1f6f2 --- /dev/null +++ b/vendor/google.golang.org/grpc/examples/features/proto/echo/echo.pb.go @@ -0,0 +1,401 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: echo.proto + +package echo // import "google.golang.org/grpc/examples/features/proto/echo" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" + +import ( + context "golang.org/x/net/context" + grpc "google.golang.org/grpc" +) + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// EchoRequest is the request for echo. +type EchoRequest struct { + Message string `protobuf:"bytes,1,opt,name=message,proto3" json:"message,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *EchoRequest) Reset() { *m = EchoRequest{} } +func (m *EchoRequest) String() string { return proto.CompactTextString(m) } +func (*EchoRequest) ProtoMessage() {} +func (*EchoRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_echo_9d6886b3223721ca, []int{0} +} +func (m *EchoRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_EchoRequest.Unmarshal(m, b) +} +func (m *EchoRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_EchoRequest.Marshal(b, m, deterministic) +} +func (dst *EchoRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_EchoRequest.Merge(dst, src) +} +func (m *EchoRequest) XXX_Size() int { + return xxx_messageInfo_EchoRequest.Size(m) +} +func (m *EchoRequest) XXX_DiscardUnknown() { + xxx_messageInfo_EchoRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_EchoRequest proto.InternalMessageInfo + +func (m *EchoRequest) GetMessage() string { + if m != nil { + return m.Message + } + return "" +} + +// EchoResponse is the response for echo. +type EchoResponse struct { + Message string `protobuf:"bytes,1,opt,name=message,proto3" json:"message,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *EchoResponse) Reset() { *m = EchoResponse{} } +func (m *EchoResponse) String() string { return proto.CompactTextString(m) } +func (*EchoResponse) ProtoMessage() {} +func (*EchoResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_echo_9d6886b3223721ca, []int{1} +} +func (m *EchoResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_EchoResponse.Unmarshal(m, b) +} +func (m *EchoResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_EchoResponse.Marshal(b, m, deterministic) +} +func (dst *EchoResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_EchoResponse.Merge(dst, src) +} +func (m *EchoResponse) XXX_Size() int { + return xxx_messageInfo_EchoResponse.Size(m) +} +func (m *EchoResponse) XXX_DiscardUnknown() { + xxx_messageInfo_EchoResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_EchoResponse proto.InternalMessageInfo + +func (m *EchoResponse) GetMessage() string { + if m != nil { + return m.Message + } + return "" +} + +func init() { + proto.RegisterType((*EchoRequest)(nil), "grpc.examples.echo.EchoRequest") + proto.RegisterType((*EchoResponse)(nil), "grpc.examples.echo.EchoResponse") +} + +// Reference imports to suppress errors if they are not otherwise used. +var _ context.Context +var _ grpc.ClientConn + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the grpc package it is being compiled against. +const _ = grpc.SupportPackageIsVersion4 + +// EchoClient is the client API for Echo service. +// +// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://godoc.org/google.golang.org/grpc#ClientConn.NewStream. +type EchoClient interface { + // UnaryEcho is unary echo. + UnaryEcho(ctx context.Context, in *EchoRequest, opts ...grpc.CallOption) (*EchoResponse, error) + // ServerStreamingEcho is server side streaming. + ServerStreamingEcho(ctx context.Context, in *EchoRequest, opts ...grpc.CallOption) (Echo_ServerStreamingEchoClient, error) + // ClientStreamingEcho is client side streaming. + ClientStreamingEcho(ctx context.Context, opts ...grpc.CallOption) (Echo_ClientStreamingEchoClient, error) + // BidirectionalStreamingEcho is bidi streaming. + BidirectionalStreamingEcho(ctx context.Context, opts ...grpc.CallOption) (Echo_BidirectionalStreamingEchoClient, error) +} + +type echoClient struct { + cc *grpc.ClientConn +} + +func NewEchoClient(cc *grpc.ClientConn) EchoClient { + return &echoClient{cc} +} + +func (c *echoClient) UnaryEcho(ctx context.Context, in *EchoRequest, opts ...grpc.CallOption) (*EchoResponse, error) { + out := new(EchoResponse) + err := c.cc.Invoke(ctx, "/grpc.examples.echo.Echo/UnaryEcho", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *echoClient) ServerStreamingEcho(ctx context.Context, in *EchoRequest, opts ...grpc.CallOption) (Echo_ServerStreamingEchoClient, error) { + stream, err := c.cc.NewStream(ctx, &_Echo_serviceDesc.Streams[0], "/grpc.examples.echo.Echo/ServerStreamingEcho", opts...) + if err != nil { + return nil, err + } + x := &echoServerStreamingEchoClient{stream} + if err := x.ClientStream.SendMsg(in); err != nil { + return nil, err + } + if err := x.ClientStream.CloseSend(); err != nil { + return nil, err + } + return x, nil +} + +type Echo_ServerStreamingEchoClient interface { + Recv() (*EchoResponse, error) + grpc.ClientStream +} + +type echoServerStreamingEchoClient struct { + grpc.ClientStream +} + +func (x *echoServerStreamingEchoClient) Recv() (*EchoResponse, error) { + m := new(EchoResponse) + if err := x.ClientStream.RecvMsg(m); err != nil { + return nil, err + } + return m, nil +} + +func (c *echoClient) ClientStreamingEcho(ctx context.Context, opts ...grpc.CallOption) (Echo_ClientStreamingEchoClient, error) { + stream, err := c.cc.NewStream(ctx, &_Echo_serviceDesc.Streams[1], "/grpc.examples.echo.Echo/ClientStreamingEcho", opts...) + if err != nil { + return nil, err + } + x := &echoClientStreamingEchoClient{stream} + return x, nil +} + +type Echo_ClientStreamingEchoClient interface { + Send(*EchoRequest) error + CloseAndRecv() (*EchoResponse, error) + grpc.ClientStream +} + +type echoClientStreamingEchoClient struct { + grpc.ClientStream +} + +func (x *echoClientStreamingEchoClient) Send(m *EchoRequest) error { + return x.ClientStream.SendMsg(m) +} + +func (x *echoClientStreamingEchoClient) CloseAndRecv() (*EchoResponse, error) { + if err := x.ClientStream.CloseSend(); err != nil { + return nil, err + } + m := new(EchoResponse) + if err := x.ClientStream.RecvMsg(m); err != nil { + return nil, err + } + return m, nil +} + +func (c *echoClient) BidirectionalStreamingEcho(ctx context.Context, opts ...grpc.CallOption) (Echo_BidirectionalStreamingEchoClient, error) { + stream, err := c.cc.NewStream(ctx, &_Echo_serviceDesc.Streams[2], "/grpc.examples.echo.Echo/BidirectionalStreamingEcho", opts...) + if err != nil { + return nil, err + } + x := &echoBidirectionalStreamingEchoClient{stream} + return x, nil +} + +type Echo_BidirectionalStreamingEchoClient interface { + Send(*EchoRequest) error + Recv() (*EchoResponse, error) + grpc.ClientStream +} + +type echoBidirectionalStreamingEchoClient struct { + grpc.ClientStream +} + +func (x *echoBidirectionalStreamingEchoClient) Send(m *EchoRequest) error { + return x.ClientStream.SendMsg(m) +} + +func (x *echoBidirectionalStreamingEchoClient) Recv() (*EchoResponse, error) { + m := new(EchoResponse) + if err := x.ClientStream.RecvMsg(m); err != nil { + return nil, err + } + return m, nil +} + +// EchoServer is the server API for Echo service. +type EchoServer interface { + // UnaryEcho is unary echo. + UnaryEcho(context.Context, *EchoRequest) (*EchoResponse, error) + // ServerStreamingEcho is server side streaming. + ServerStreamingEcho(*EchoRequest, Echo_ServerStreamingEchoServer) error + // ClientStreamingEcho is client side streaming. + ClientStreamingEcho(Echo_ClientStreamingEchoServer) error + // BidirectionalStreamingEcho is bidi streaming. + BidirectionalStreamingEcho(Echo_BidirectionalStreamingEchoServer) error +} + +func RegisterEchoServer(s *grpc.Server, srv EchoServer) { + s.RegisterService(&_Echo_serviceDesc, srv) +} + +func _Echo_UnaryEcho_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(EchoRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(EchoServer).UnaryEcho(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/grpc.examples.echo.Echo/UnaryEcho", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(EchoServer).UnaryEcho(ctx, req.(*EchoRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _Echo_ServerStreamingEcho_Handler(srv interface{}, stream grpc.ServerStream) error { + m := new(EchoRequest) + if err := stream.RecvMsg(m); err != nil { + return err + } + return srv.(EchoServer).ServerStreamingEcho(m, &echoServerStreamingEchoServer{stream}) +} + +type Echo_ServerStreamingEchoServer interface { + Send(*EchoResponse) error + grpc.ServerStream +} + +type echoServerStreamingEchoServer struct { + grpc.ServerStream +} + +func (x *echoServerStreamingEchoServer) Send(m *EchoResponse) error { + return x.ServerStream.SendMsg(m) +} + +func _Echo_ClientStreamingEcho_Handler(srv interface{}, stream grpc.ServerStream) error { + return srv.(EchoServer).ClientStreamingEcho(&echoClientStreamingEchoServer{stream}) +} + +type Echo_ClientStreamingEchoServer interface { + SendAndClose(*EchoResponse) error + Recv() (*EchoRequest, error) + grpc.ServerStream +} + +type echoClientStreamingEchoServer struct { + grpc.ServerStream +} + +func (x *echoClientStreamingEchoServer) SendAndClose(m *EchoResponse) error { + return x.ServerStream.SendMsg(m) +} + +func (x *echoClientStreamingEchoServer) Recv() (*EchoRequest, error) { + m := new(EchoRequest) + if err := x.ServerStream.RecvMsg(m); err != nil { + return nil, err + } + return m, nil +} + +func _Echo_BidirectionalStreamingEcho_Handler(srv interface{}, stream grpc.ServerStream) error { + return srv.(EchoServer).BidirectionalStreamingEcho(&echoBidirectionalStreamingEchoServer{stream}) +} + +type Echo_BidirectionalStreamingEchoServer interface { + Send(*EchoResponse) error + Recv() (*EchoRequest, error) + grpc.ServerStream +} + +type echoBidirectionalStreamingEchoServer struct { + grpc.ServerStream +} + +func (x *echoBidirectionalStreamingEchoServer) Send(m *EchoResponse) error { + return x.ServerStream.SendMsg(m) +} + +func (x *echoBidirectionalStreamingEchoServer) Recv() (*EchoRequest, error) { + m := new(EchoRequest) + if err := x.ServerStream.RecvMsg(m); err != nil { + return nil, err + } + return m, nil +} + +var _Echo_serviceDesc = grpc.ServiceDesc{ + ServiceName: "grpc.examples.echo.Echo", + HandlerType: (*EchoServer)(nil), + Methods: []grpc.MethodDesc{ + { + MethodName: "UnaryEcho", + Handler: _Echo_UnaryEcho_Handler, + }, + }, + Streams: []grpc.StreamDesc{ + { + StreamName: "ServerStreamingEcho", + Handler: _Echo_ServerStreamingEcho_Handler, + ServerStreams: true, + }, + { + StreamName: "ClientStreamingEcho", + Handler: _Echo_ClientStreamingEcho_Handler, + ClientStreams: true, + }, + { + StreamName: "BidirectionalStreamingEcho", + Handler: _Echo_BidirectionalStreamingEcho_Handler, + ServerStreams: true, + ClientStreams: true, + }, + }, + Metadata: "echo.proto", +} + +func init() { proto.RegisterFile("echo.proto", fileDescriptor_echo_9d6886b3223721ca) } + +var fileDescriptor_echo_9d6886b3223721ca = []byte{ + // 234 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xb4, 0x92, 0xb1, 0x4b, 0x03, 0x31, + 0x14, 0x87, 0x3d, 0x11, 0xa5, 0x4f, 0xa7, 0xb8, 0x94, 0x2e, 0x96, 0x5b, 0xbc, 0x29, 0x29, 0x16, + 0xff, 0x81, 0x8a, 0xbb, 0xb4, 0xb8, 0x88, 0x4b, 0x3c, 0x7f, 0xa6, 0x81, 0x5c, 0xde, 0xf9, 0x92, + 0x8a, 0xfe, 0xed, 0x2e, 0x92, 0x2b, 0x05, 0x41, 0xba, 0xd5, 0x2d, 0x8f, 0x7c, 0xef, 0xfb, 0x96, + 0x47, 0x84, 0x76, 0xcd, 0xba, 0x17, 0xce, 0xac, 0x94, 0x93, 0xbe, 0xd5, 0xf8, 0xb4, 0x5d, 0x1f, + 0x90, 0x74, 0xf9, 0xa9, 0xaf, 0xe9, 0xfc, 0xbe, 0x5d, 0xf3, 0x12, 0xef, 0x1b, 0xa4, 0xac, 0xc6, + 0x74, 0xd6, 0x21, 0x25, 0xeb, 0x30, 0xae, 0xa6, 0x55, 0x33, 0x5a, 0xee, 0xc6, 0xba, 0xa1, 0x8b, + 0x2d, 0x98, 0x7a, 0x8e, 0x09, 0xfb, 0xc9, 0x9b, 0xef, 0x63, 0x3a, 0x29, 0xa8, 0x7a, 0xa0, 0xd1, + 0x63, 0xb4, 0xf2, 0x35, 0x0c, 0x57, 0xfa, 0x6f, 0x5d, 0xff, 0x4a, 0x4f, 0xa6, 0xfb, 0x81, 0x6d, + 0xb2, 0x3e, 0x52, 0xcf, 0x74, 0xb9, 0x82, 0x7c, 0x40, 0x56, 0x59, 0x60, 0x3b, 0x1f, 0xdd, 0xc1, + 0xdc, 0xb3, 0xaa, 0xd8, 0xef, 0x82, 0x47, 0xcc, 0x87, 0xb7, 0x37, 0x95, 0x02, 0x4d, 0x16, 0xfe, + 0xd5, 0x0b, 0xda, 0xec, 0x39, 0xda, 0xf0, 0x1f, 0x91, 0x59, 0xb5, 0xb8, 0x7d, 0x9a, 0x3b, 0x66, + 0x17, 0xa0, 0x1d, 0x07, 0x1b, 0x9d, 0x66, 0x71, 0xa6, 0xac, 0x9a, 0xdd, 0xaa, 0x79, 0x83, 0xcd, + 0x1b, 0x41, 0x32, 0xc3, 0x59, 0x98, 0x62, 0x7a, 0x39, 0x1d, 0xde, 0xf3, 0x9f, 0x00, 0x00, 0x00, + 0xff, 0xff, 0x23, 0x14, 0x26, 0x96, 0x30, 0x02, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/grpc/examples/features/reflection/server/main.go b/vendor/google.golang.org/grpc/examples/features/reflection/server/main.go new file mode 100644 index 0000000..c563002 --- /dev/null +++ b/vendor/google.golang.org/grpc/examples/features/reflection/server/main.go @@ -0,0 +1,87 @@ +/* + * + * Copyright 2019 gRPC authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ + +// Binary server is an example server. +package main + +import ( + "context" + "flag" + "fmt" + "log" + "net" + + "google.golang.org/grpc" + "google.golang.org/grpc/codes" + ecpb "google.golang.org/grpc/examples/features/proto/echo" + hwpb "google.golang.org/grpc/examples/helloworld/helloworld" + "google.golang.org/grpc/reflection" + "google.golang.org/grpc/status" +) + +var port = flag.Int("port", 50051, "the port to serve on") + +// hwServer is used to implement helloworld.GreeterServer. +type hwServer struct{} + +// SayHello implements helloworld.GreeterServer +func (s *hwServer) SayHello(ctx context.Context, in *hwpb.HelloRequest) (*hwpb.HelloReply, error) { + return &hwpb.HelloReply{Message: "Hello " + in.Name}, nil +} + +type ecServer struct{} + +func (s *ecServer) UnaryEcho(ctx context.Context, req *ecpb.EchoRequest) (*ecpb.EchoResponse, error) { + return &ecpb.EchoResponse{Message: req.Message}, nil +} + +func (s *ecServer) ServerStreamingEcho(*ecpb.EchoRequest, ecpb.Echo_ServerStreamingEchoServer) error { + return status.Errorf(codes.Unimplemented, "not implemented") +} + +func (s *ecServer) ClientStreamingEcho(ecpb.Echo_ClientStreamingEchoServer) error { + return status.Errorf(codes.Unimplemented, "not implemented") +} + +func (s *ecServer) BidirectionalStreamingEcho(ecpb.Echo_BidirectionalStreamingEchoServer) error { + return status.Errorf(codes.Unimplemented, "not implemented") +} + +func main() { + flag.Parse() + lis, err := net.Listen("tcp", fmt.Sprintf(":%d", *port)) + if err != nil { + log.Fatalf("failed to listen: %v", err) + } + fmt.Printf("server listening at %v\n", lis.Addr()) + + s := grpc.NewServer() + + // Register Greeter on the server. + hwpb.RegisterGreeterServer(s, &hwServer{}) + + // Register RouteGuide on the same server. + ecpb.RegisterEchoServer(s, &ecServer{}) + + // Register reflection service on gRPC server. + reflection.Register(s) + + if err := s.Serve(lis); err != nil { + log.Fatalf("failed to serve: %v", err) + } +} diff --git a/vendor/google.golang.org/grpc/examples/features/wait_for_ready/main.go b/vendor/google.golang.org/grpc/examples/features/wait_for_ready/main.go new file mode 100644 index 0000000..b7df3bc --- /dev/null +++ b/vendor/google.golang.org/grpc/examples/features/wait_for_ready/main.go @@ -0,0 +1,125 @@ +/* + * + * Copyright 2018 gRPC authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ + +// Binary wait_for_ready is an example for "wait for ready". +package main + +import ( + "context" + "fmt" + "log" + "net" + "sync" + "time" + + "google.golang.org/grpc" + "google.golang.org/grpc/codes" + pb "google.golang.org/grpc/examples/features/proto/echo" + "google.golang.org/grpc/status" +) + +// server is used to implement EchoServer. +type server struct{} + +func (s *server) UnaryEcho(ctx context.Context, req *pb.EchoRequest) (*pb.EchoResponse, error) { + return &pb.EchoResponse{Message: req.Message}, nil +} + +func (s *server) ServerStreamingEcho(req *pb.EchoRequest, stream pb.Echo_ServerStreamingEchoServer) error { + return status.Error(codes.Unimplemented, "RPC unimplemented") +} + +func (s *server) ClientStreamingEcho(stream pb.Echo_ClientStreamingEchoServer) error { + return status.Error(codes.Unimplemented, "RPC unimplemented") +} + +func (s *server) BidirectionalStreamingEcho(stream pb.Echo_BidirectionalStreamingEchoServer) error { + return status.Error(codes.Unimplemented, "RPC unimplemented") +} + +// serve starts listening with a 2 seconds delay. +func serve() { + lis, err := net.Listen("tcp", ":50053") + if err != nil { + log.Fatalf("failed to listen: %v", err) + } + s := grpc.NewServer() + pb.RegisterEchoServer(s, &server{}) + + if err := s.Serve(lis); err != nil { + log.Fatalf("failed to serve: %v", err) + } +} + +func main() { + conn, err := grpc.Dial("localhost:50053", grpc.WithInsecure()) + if err != nil { + log.Fatalf("did not connect: %v", err) + } + defer conn.Close() + + c := pb.NewEchoClient(conn) + + var wg sync.WaitGroup + wg.Add(3) + + // "Wait for ready" is not enabled, returns error with code "Unavailable". + go func() { + defer wg.Done() + + ctx, cancel := context.WithTimeout(context.Background(), 10*time.Second) + defer cancel() + + _, err := c.UnaryEcho(ctx, &pb.EchoRequest{Message: "Hi!"}) + + got := status.Code(err) + fmt.Printf("[1] wanted = %v, got = %v\n", codes.Unavailable, got) + }() + + // "Wait for ready" is enabled, returns nil error. + go func() { + defer wg.Done() + + ctx, cancel := context.WithTimeout(context.Background(), 10*time.Second) + defer cancel() + + _, err := c.UnaryEcho(ctx, &pb.EchoRequest{Message: "Hi!"}, grpc.WaitForReady(true)) + + got := status.Code(err) + fmt.Printf("[2] wanted = %v, got = %v\n", codes.OK, got) + }() + + // "Wait for ready" is enabled but exceeds the deadline before server starts listening, + // returns error with code "DeadlineExceeded". + go func() { + defer wg.Done() + + ctx, cancel := context.WithTimeout(context.Background(), 1*time.Second) + defer cancel() + + _, err := c.UnaryEcho(ctx, &pb.EchoRequest{Message: "Hi!"}, grpc.WaitForReady(true)) + + got := status.Code(err) + fmt.Printf("[3] wanted = %v, got = %v\n", codes.DeadlineExceeded, got) + }() + + time.Sleep(2 * time.Second) + go serve() + + wg.Wait() +} diff --git a/vendor/google.golang.org/grpc/examples/helloworld/greeter_client/main.go b/vendor/google.golang.org/grpc/examples/helloworld/greeter_client/main.go new file mode 100644 index 0000000..4330b9e --- /dev/null +++ b/vendor/google.golang.org/grpc/examples/helloworld/greeter_client/main.go @@ -0,0 +1,58 @@ +/* + * + * Copyright 2015 gRPC authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ + +// Package main implements a client for Greeter service. +package main + +import ( + "context" + "log" + "os" + "time" + + "google.golang.org/grpc" + pb "google.golang.org/grpc/examples/helloworld/helloworld" +) + +const ( + address = "localhost:50051" + defaultName = "world" +) + +func main() { + // Set up a connection to the server. + conn, err := grpc.Dial(address, grpc.WithInsecure()) + if err != nil { + log.Fatalf("did not connect: %v", err) + } + defer conn.Close() + c := pb.NewGreeterClient(conn) + + // Contact the server and print out its response. + name := defaultName + if len(os.Args) > 1 { + name = os.Args[1] + } + ctx, cancel := context.WithTimeout(context.Background(), time.Second) + defer cancel() + r, err := c.SayHello(ctx, &pb.HelloRequest{Name: name}) + if err != nil { + log.Fatalf("could not greet: %v", err) + } + log.Printf("Greeting: %s", r.Message) +} diff --git a/vendor/google.golang.org/grpc/examples/helloworld/greeter_server/main.go b/vendor/google.golang.org/grpc/examples/helloworld/greeter_server/main.go new file mode 100644 index 0000000..e99fb26 --- /dev/null +++ b/vendor/google.golang.org/grpc/examples/helloworld/greeter_server/main.go @@ -0,0 +1,56 @@ +/* + * + * Copyright 2015 gRPC authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ + +//go:generate protoc -I ../helloworld --go_out=plugins=grpc:../helloworld ../helloworld/helloworld.proto + +// Package main implements a server for Greeter service. +package main + +import ( + "context" + "log" + "net" + + "google.golang.org/grpc" + pb "google.golang.org/grpc/examples/helloworld/helloworld" +) + +const ( + port = ":50051" +) + +// server is used to implement helloworld.GreeterServer. +type server struct{} + +// SayHello implements helloworld.GreeterServer +func (s *server) SayHello(ctx context.Context, in *pb.HelloRequest) (*pb.HelloReply, error) { + log.Printf("Received: %v", in.Name) + return &pb.HelloReply{Message: "Hello " + in.Name}, nil +} + +func main() { + lis, err := net.Listen("tcp", port) + if err != nil { + log.Fatalf("failed to listen: %v", err) + } + s := grpc.NewServer() + pb.RegisterGreeterServer(s, &server{}) + if err := s.Serve(lis); err != nil { + log.Fatalf("failed to serve: %v", err) + } +} diff --git a/vendor/google.golang.org/grpc/examples/helloworld/helloworld/helloworld.pb.go b/vendor/google.golang.org/grpc/examples/helloworld/helloworld/helloworld.pb.go new file mode 100644 index 0000000..11383d7 --- /dev/null +++ b/vendor/google.golang.org/grpc/examples/helloworld/helloworld/helloworld.pb.go @@ -0,0 +1,198 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: helloworld.proto + +package helloworld + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" + +import ( + context "golang.org/x/net/context" + grpc "google.golang.org/grpc" +) + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// The request message containing the user's name. +type HelloRequest struct { + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *HelloRequest) Reset() { *m = HelloRequest{} } +func (m *HelloRequest) String() string { return proto.CompactTextString(m) } +func (*HelloRequest) ProtoMessage() {} +func (*HelloRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_helloworld_71e208cbdc16936b, []int{0} +} +func (m *HelloRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_HelloRequest.Unmarshal(m, b) +} +func (m *HelloRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_HelloRequest.Marshal(b, m, deterministic) +} +func (dst *HelloRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_HelloRequest.Merge(dst, src) +} +func (m *HelloRequest) XXX_Size() int { + return xxx_messageInfo_HelloRequest.Size(m) +} +func (m *HelloRequest) XXX_DiscardUnknown() { + xxx_messageInfo_HelloRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_HelloRequest proto.InternalMessageInfo + +func (m *HelloRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +// The response message containing the greetings +type HelloReply struct { + Message string `protobuf:"bytes,1,opt,name=message,proto3" json:"message,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *HelloReply) Reset() { *m = HelloReply{} } +func (m *HelloReply) String() string { return proto.CompactTextString(m) } +func (*HelloReply) ProtoMessage() {} +func (*HelloReply) Descriptor() ([]byte, []int) { + return fileDescriptor_helloworld_71e208cbdc16936b, []int{1} +} +func (m *HelloReply) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_HelloReply.Unmarshal(m, b) +} +func (m *HelloReply) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_HelloReply.Marshal(b, m, deterministic) +} +func (dst *HelloReply) XXX_Merge(src proto.Message) { + xxx_messageInfo_HelloReply.Merge(dst, src) +} +func (m *HelloReply) XXX_Size() int { + return xxx_messageInfo_HelloReply.Size(m) +} +func (m *HelloReply) XXX_DiscardUnknown() { + xxx_messageInfo_HelloReply.DiscardUnknown(m) +} + +var xxx_messageInfo_HelloReply proto.InternalMessageInfo + +func (m *HelloReply) GetMessage() string { + if m != nil { + return m.Message + } + return "" +} + +func init() { + proto.RegisterType((*HelloRequest)(nil), "helloworld.HelloRequest") + proto.RegisterType((*HelloReply)(nil), "helloworld.HelloReply") +} + +// Reference imports to suppress errors if they are not otherwise used. +var _ context.Context +var _ grpc.ClientConn + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the grpc package it is being compiled against. +const _ = grpc.SupportPackageIsVersion4 + +// GreeterClient is the client API for Greeter service. +// +// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://godoc.org/google.golang.org/grpc#ClientConn.NewStream. +type GreeterClient interface { + // Sends a greeting + SayHello(ctx context.Context, in *HelloRequest, opts ...grpc.CallOption) (*HelloReply, error) +} + +type greeterClient struct { + cc *grpc.ClientConn +} + +func NewGreeterClient(cc *grpc.ClientConn) GreeterClient { + return &greeterClient{cc} +} + +func (c *greeterClient) SayHello(ctx context.Context, in *HelloRequest, opts ...grpc.CallOption) (*HelloReply, error) { + out := new(HelloReply) + err := c.cc.Invoke(ctx, "/helloworld.Greeter/SayHello", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +// GreeterServer is the server API for Greeter service. +type GreeterServer interface { + // Sends a greeting + SayHello(context.Context, *HelloRequest) (*HelloReply, error) +} + +func RegisterGreeterServer(s *grpc.Server, srv GreeterServer) { + s.RegisterService(&_Greeter_serviceDesc, srv) +} + +func _Greeter_SayHello_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(HelloRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(GreeterServer).SayHello(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/helloworld.Greeter/SayHello", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(GreeterServer).SayHello(ctx, req.(*HelloRequest)) + } + return interceptor(ctx, in, info, handler) +} + +var _Greeter_serviceDesc = grpc.ServiceDesc{ + ServiceName: "helloworld.Greeter", + HandlerType: (*GreeterServer)(nil), + Methods: []grpc.MethodDesc{ + { + MethodName: "SayHello", + Handler: _Greeter_SayHello_Handler, + }, + }, + Streams: []grpc.StreamDesc{}, + Metadata: "helloworld.proto", +} + +func init() { proto.RegisterFile("helloworld.proto", fileDescriptor_helloworld_71e208cbdc16936b) } + +var fileDescriptor_helloworld_71e208cbdc16936b = []byte{ + // 175 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xe2, 0x12, 0xc8, 0x48, 0xcd, 0xc9, + 0xc9, 0x2f, 0xcf, 0x2f, 0xca, 0x49, 0xd1, 0x2b, 0x28, 0xca, 0x2f, 0xc9, 0x17, 0xe2, 0x42, 0x88, + 0x28, 0x29, 0x71, 0xf1, 0x78, 0x80, 0x78, 0x41, 0xa9, 0x85, 0xa5, 0xa9, 0xc5, 0x25, 0x42, 0x42, + 0x5c, 0x2c, 0x79, 0x89, 0xb9, 0xa9, 0x12, 0x8c, 0x0a, 0x8c, 0x1a, 0x9c, 0x41, 0x60, 0xb6, 0x92, + 0x1a, 0x17, 0x17, 0x54, 0x4d, 0x41, 0x4e, 0xa5, 0x90, 0x04, 0x17, 0x7b, 0x6e, 0x6a, 0x71, 0x71, + 0x62, 0x3a, 0x4c, 0x11, 0x8c, 0x6b, 0xe4, 0xc9, 0xc5, 0xee, 0x5e, 0x94, 0x9a, 0x5a, 0x92, 0x5a, + 0x24, 0x64, 0xc7, 0xc5, 0x11, 0x9c, 0x58, 0x09, 0xd6, 0x25, 0x24, 0xa1, 0x87, 0xe4, 0x02, 0x64, + 0xcb, 0xa4, 0xc4, 0xb0, 0xc8, 0x14, 0xe4, 0x54, 0x2a, 0x31, 0x38, 0x19, 0x70, 0x49, 0x67, 0xe6, + 0xeb, 0xa5, 0x17, 0x15, 0x24, 0xeb, 0xa5, 0x56, 0x24, 0xe6, 0x16, 0xe4, 0xa4, 0x16, 0x23, 0xa9, + 0x75, 0xe2, 0x07, 0x2b, 0x0e, 0x07, 0xb1, 0x03, 0x40, 0x5e, 0x0a, 0x60, 0x4c, 0x62, 0x03, 0xfb, + 0xcd, 0x18, 0x10, 0x00, 0x00, 0xff, 0xff, 0x0f, 0xb7, 0xcd, 0xf2, 0xef, 0x00, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/grpc/examples/helloworld/mock_helloworld/hw_mock.go b/vendor/google.golang.org/grpc/examples/helloworld/mock_helloworld/hw_mock.go new file mode 100644 index 0000000..14957ed --- /dev/null +++ b/vendor/google.golang.org/grpc/examples/helloworld/mock_helloworld/hw_mock.go @@ -0,0 +1,48 @@ +// Automatically generated by MockGen. DO NOT EDIT! +// Source: google.golang.org/grpc/examples/helloworld/helloworld (interfaces: GreeterClient) + +package mock_helloworld + +import ( + gomock "github.com/golang/mock/gomock" + context "golang.org/x/net/context" + grpc "google.golang.org/grpc" + helloworld "google.golang.org/grpc/examples/helloworld/helloworld" +) + +// Mock of GreeterClient interface +type MockGreeterClient struct { + ctrl *gomock.Controller + recorder *_MockGreeterClientRecorder +} + +// Recorder for MockGreeterClient (not exported) +type _MockGreeterClientRecorder struct { + mock *MockGreeterClient +} + +func NewMockGreeterClient(ctrl *gomock.Controller) *MockGreeterClient { + mock := &MockGreeterClient{ctrl: ctrl} + mock.recorder = &_MockGreeterClientRecorder{mock} + return mock +} + +func (_m *MockGreeterClient) EXPECT() *_MockGreeterClientRecorder { + return _m.recorder +} + +func (_m *MockGreeterClient) SayHello(_param0 context.Context, _param1 *helloworld.HelloRequest, _param2 ...grpc.CallOption) (*helloworld.HelloReply, error) { + _s := []interface{}{_param0, _param1} + for _, _x := range _param2 { + _s = append(_s, _x) + } + ret := _m.ctrl.Call(_m, "SayHello", _s...) + ret0, _ := ret[0].(*helloworld.HelloReply) + ret1, _ := ret[1].(error) + return ret0, ret1 +} + +func (_mr *_MockGreeterClientRecorder) SayHello(arg0, arg1 interface{}, arg2 ...interface{}) *gomock.Call { + _s := append([]interface{}{arg0, arg1}, arg2...) + return _mr.mock.ctrl.RecordCall(_mr.mock, "SayHello", _s...) +} diff --git a/vendor/google.golang.org/grpc/examples/route_guide/client/client.go b/vendor/google.golang.org/grpc/examples/route_guide/client/client.go new file mode 100644 index 0000000..29fb737 --- /dev/null +++ b/vendor/google.golang.org/grpc/examples/route_guide/client/client.go @@ -0,0 +1,192 @@ +/* + * + * Copyright 2015 gRPC authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ + +// Package main implements a simple gRPC client that demonstrates how to use gRPC-Go libraries +// to perform unary, client streaming, server streaming and full duplex RPCs. +// +// It interacts with the route guide service whose definition can be found in routeguide/route_guide.proto. +package main + +import ( + "context" + "flag" + "io" + "log" + "math/rand" + "time" + + "google.golang.org/grpc" + "google.golang.org/grpc/credentials" + pb "google.golang.org/grpc/examples/route_guide/routeguide" + "google.golang.org/grpc/testdata" +) + +var ( + tls = flag.Bool("tls", false, "Connection uses TLS if true, else plain TCP") + caFile = flag.String("ca_file", "", "The file containning the CA root cert file") + serverAddr = flag.String("server_addr", "127.0.0.1:10000", "The server address in the format of host:port") + serverHostOverride = flag.String("server_host_override", "x.test.youtube.com", "The server name use to verify the hostname returned by TLS handshake") +) + +// printFeature gets the feature for the given point. +func printFeature(client pb.RouteGuideClient, point *pb.Point) { + log.Printf("Getting feature for point (%d, %d)", point.Latitude, point.Longitude) + ctx, cancel := context.WithTimeout(context.Background(), 10*time.Second) + defer cancel() + feature, err := client.GetFeature(ctx, point) + if err != nil { + log.Fatalf("%v.GetFeatures(_) = _, %v: ", client, err) + } + log.Println(feature) +} + +// printFeatures lists all the features within the given bounding Rectangle. +func printFeatures(client pb.RouteGuideClient, rect *pb.Rectangle) { + log.Printf("Looking for features within %v", rect) + ctx, cancel := context.WithTimeout(context.Background(), 10*time.Second) + defer cancel() + stream, err := client.ListFeatures(ctx, rect) + if err != nil { + log.Fatalf("%v.ListFeatures(_) = _, %v", client, err) + } + for { + feature, err := stream.Recv() + if err == io.EOF { + break + } + if err != nil { + log.Fatalf("%v.ListFeatures(_) = _, %v", client, err) + } + log.Println(feature) + } +} + +// runRecordRoute sends a sequence of points to server and expects to get a RouteSummary from server. +func runRecordRoute(client pb.RouteGuideClient) { + // Create a random number of random points + r := rand.New(rand.NewSource(time.Now().UnixNano())) + pointCount := int(r.Int31n(100)) + 2 // Traverse at least two points + var points []*pb.Point + for i := 0; i < pointCount; i++ { + points = append(points, randomPoint(r)) + } + log.Printf("Traversing %d points.", len(points)) + ctx, cancel := context.WithTimeout(context.Background(), 10*time.Second) + defer cancel() + stream, err := client.RecordRoute(ctx) + if err != nil { + log.Fatalf("%v.RecordRoute(_) = _, %v", client, err) + } + for _, point := range points { + if err := stream.Send(point); err != nil { + log.Fatalf("%v.Send(%v) = %v", stream, point, err) + } + } + reply, err := stream.CloseAndRecv() + if err != nil { + log.Fatalf("%v.CloseAndRecv() got error %v, want %v", stream, err, nil) + } + log.Printf("Route summary: %v", reply) +} + +// runRouteChat receives a sequence of route notes, while sending notes for various locations. +func runRouteChat(client pb.RouteGuideClient) { + notes := []*pb.RouteNote{ + {Location: &pb.Point{Latitude: 0, Longitude: 1}, Message: "First message"}, + {Location: &pb.Point{Latitude: 0, Longitude: 2}, Message: "Second message"}, + {Location: &pb.Point{Latitude: 0, Longitude: 3}, Message: "Third message"}, + {Location: &pb.Point{Latitude: 0, Longitude: 1}, Message: "Fourth message"}, + {Location: &pb.Point{Latitude: 0, Longitude: 2}, Message: "Fifth message"}, + {Location: &pb.Point{Latitude: 0, Longitude: 3}, Message: "Sixth message"}, + } + ctx, cancel := context.WithTimeout(context.Background(), 10*time.Second) + defer cancel() + stream, err := client.RouteChat(ctx) + if err != nil { + log.Fatalf("%v.RouteChat(_) = _, %v", client, err) + } + waitc := make(chan struct{}) + go func() { + for { + in, err := stream.Recv() + if err == io.EOF { + // read done. + close(waitc) + return + } + if err != nil { + log.Fatalf("Failed to receive a note : %v", err) + } + log.Printf("Got message %s at point(%d, %d)", in.Message, in.Location.Latitude, in.Location.Longitude) + } + }() + for _, note := range notes { + if err := stream.Send(note); err != nil { + log.Fatalf("Failed to send a note: %v", err) + } + } + stream.CloseSend() + <-waitc +} + +func randomPoint(r *rand.Rand) *pb.Point { + lat := (r.Int31n(180) - 90) * 1e7 + long := (r.Int31n(360) - 180) * 1e7 + return &pb.Point{Latitude: lat, Longitude: long} +} + +func main() { + flag.Parse() + var opts []grpc.DialOption + if *tls { + if *caFile == "" { + *caFile = testdata.Path("ca.pem") + } + creds, err := credentials.NewClientTLSFromFile(*caFile, *serverHostOverride) + if err != nil { + log.Fatalf("Failed to create TLS credentials %v", err) + } + opts = append(opts, grpc.WithTransportCredentials(creds)) + } else { + opts = append(opts, grpc.WithInsecure()) + } + conn, err := grpc.Dial(*serverAddr, opts...) + if err != nil { + log.Fatalf("fail to dial: %v", err) + } + defer conn.Close() + client := pb.NewRouteGuideClient(conn) + + // Looking for a valid feature + printFeature(client, &pb.Point{Latitude: 409146138, Longitude: -746188906}) + + // Feature missing. + printFeature(client, &pb.Point{Latitude: 0, Longitude: 0}) + + // Looking for features between 40, -75 and 42, -73. + printFeatures(client, &pb.Rectangle{ + Lo: &pb.Point{Latitude: 400000000, Longitude: -750000000}, + Hi: &pb.Point{Latitude: 420000000, Longitude: -730000000}, + }) + + // RecordRoute + runRecordRoute(client) + + // RouteChat + runRouteChat(client) +} diff --git a/vendor/google.golang.org/grpc/examples/route_guide/mock_routeguide/rg_mock.go b/vendor/google.golang.org/grpc/examples/route_guide/mock_routeguide/rg_mock.go new file mode 100644 index 0000000..328c929 --- /dev/null +++ b/vendor/google.golang.org/grpc/examples/route_guide/mock_routeguide/rg_mock.go @@ -0,0 +1,200 @@ +// Automatically generated by MockGen. DO NOT EDIT! +// Source: google.golang.org/grpc/examples/route_guide/routeguide (interfaces: RouteGuideClient,RouteGuide_RouteChatClient) + +package mock_routeguide + +import ( + gomock "github.com/golang/mock/gomock" + context "golang.org/x/net/context" + grpc "google.golang.org/grpc" + routeguide "google.golang.org/grpc/examples/route_guide/routeguide" + metadata "google.golang.org/grpc/metadata" +) + +// Mock of RouteGuideClient interface +type MockRouteGuideClient struct { + ctrl *gomock.Controller + recorder *_MockRouteGuideClientRecorder +} + +// Recorder for MockRouteGuideClient (not exported) +type _MockRouteGuideClientRecorder struct { + mock *MockRouteGuideClient +} + +func NewMockRouteGuideClient(ctrl *gomock.Controller) *MockRouteGuideClient { + mock := &MockRouteGuideClient{ctrl: ctrl} + mock.recorder = &_MockRouteGuideClientRecorder{mock} + return mock +} + +func (_m *MockRouteGuideClient) EXPECT() *_MockRouteGuideClientRecorder { + return _m.recorder +} + +func (_m *MockRouteGuideClient) GetFeature(_param0 context.Context, _param1 *routeguide.Point, _param2 ...grpc.CallOption) (*routeguide.Feature, error) { + _s := []interface{}{_param0, _param1} + for _, _x := range _param2 { + _s = append(_s, _x) + } + ret := _m.ctrl.Call(_m, "GetFeature", _s...) + ret0, _ := ret[0].(*routeguide.Feature) + ret1, _ := ret[1].(error) + return ret0, ret1 +} + +func (_mr *_MockRouteGuideClientRecorder) GetFeature(arg0, arg1 interface{}, arg2 ...interface{}) *gomock.Call { + _s := append([]interface{}{arg0, arg1}, arg2...) + return _mr.mock.ctrl.RecordCall(_mr.mock, "GetFeature", _s...) +} + +func (_m *MockRouteGuideClient) ListFeatures(_param0 context.Context, _param1 *routeguide.Rectangle, _param2 ...grpc.CallOption) (routeguide.RouteGuide_ListFeaturesClient, error) { + _s := []interface{}{_param0, _param1} + for _, _x := range _param2 { + _s = append(_s, _x) + } + ret := _m.ctrl.Call(_m, "ListFeatures", _s...) + ret0, _ := ret[0].(routeguide.RouteGuide_ListFeaturesClient) + ret1, _ := ret[1].(error) + return ret0, ret1 +} + +func (_mr *_MockRouteGuideClientRecorder) ListFeatures(arg0, arg1 interface{}, arg2 ...interface{}) *gomock.Call { + _s := append([]interface{}{arg0, arg1}, arg2...) + return _mr.mock.ctrl.RecordCall(_mr.mock, "ListFeatures", _s...) +} + +func (_m *MockRouteGuideClient) RecordRoute(_param0 context.Context, _param1 ...grpc.CallOption) (routeguide.RouteGuide_RecordRouteClient, error) { + _s := []interface{}{_param0} + for _, _x := range _param1 { + _s = append(_s, _x) + } + ret := _m.ctrl.Call(_m, "RecordRoute", _s...) + ret0, _ := ret[0].(routeguide.RouteGuide_RecordRouteClient) + ret1, _ := ret[1].(error) + return ret0, ret1 +} + +func (_mr *_MockRouteGuideClientRecorder) RecordRoute(arg0 interface{}, arg1 ...interface{}) *gomock.Call { + _s := append([]interface{}{arg0}, arg1...) + return _mr.mock.ctrl.RecordCall(_mr.mock, "RecordRoute", _s...) +} + +func (_m *MockRouteGuideClient) RouteChat(_param0 context.Context, _param1 ...grpc.CallOption) (routeguide.RouteGuide_RouteChatClient, error) { + _s := []interface{}{_param0} + for _, _x := range _param1 { + _s = append(_s, _x) + } + ret := _m.ctrl.Call(_m, "RouteChat", _s...) + ret0, _ := ret[0].(routeguide.RouteGuide_RouteChatClient) + ret1, _ := ret[1].(error) + return ret0, ret1 +} + +func (_mr *_MockRouteGuideClientRecorder) RouteChat(arg0 interface{}, arg1 ...interface{}) *gomock.Call { + _s := append([]interface{}{arg0}, arg1...) + return _mr.mock.ctrl.RecordCall(_mr.mock, "RouteChat", _s...) +} + +// Mock of RouteGuide_RouteChatClient interface +type MockRouteGuide_RouteChatClient struct { + ctrl *gomock.Controller + recorder *_MockRouteGuide_RouteChatClientRecorder +} + +// Recorder for MockRouteGuide_RouteChatClient (not exported) +type _MockRouteGuide_RouteChatClientRecorder struct { + mock *MockRouteGuide_RouteChatClient +} + +func NewMockRouteGuide_RouteChatClient(ctrl *gomock.Controller) *MockRouteGuide_RouteChatClient { + mock := &MockRouteGuide_RouteChatClient{ctrl: ctrl} + mock.recorder = &_MockRouteGuide_RouteChatClientRecorder{mock} + return mock +} + +func (_m *MockRouteGuide_RouteChatClient) EXPECT() *_MockRouteGuide_RouteChatClientRecorder { + return _m.recorder +} + +func (_m *MockRouteGuide_RouteChatClient) CloseSend() error { + ret := _m.ctrl.Call(_m, "CloseSend") + ret0, _ := ret[0].(error) + return ret0 +} + +func (_mr *_MockRouteGuide_RouteChatClientRecorder) CloseSend() *gomock.Call { + return _mr.mock.ctrl.RecordCall(_mr.mock, "CloseSend") +} + +func (_m *MockRouteGuide_RouteChatClient) Context() context.Context { + ret := _m.ctrl.Call(_m, "Context") + ret0, _ := ret[0].(context.Context) + return ret0 +} + +func (_mr *_MockRouteGuide_RouteChatClientRecorder) Context() *gomock.Call { + return _mr.mock.ctrl.RecordCall(_mr.mock, "Context") +} + +func (_m *MockRouteGuide_RouteChatClient) Header() (metadata.MD, error) { + ret := _m.ctrl.Call(_m, "Header") + ret0, _ := ret[0].(metadata.MD) + ret1, _ := ret[1].(error) + return ret0, ret1 +} + +func (_mr *_MockRouteGuide_RouteChatClientRecorder) Header() *gomock.Call { + return _mr.mock.ctrl.RecordCall(_mr.mock, "Header") +} + +func (_m *MockRouteGuide_RouteChatClient) Recv() (*routeguide.RouteNote, error) { + ret := _m.ctrl.Call(_m, "Recv") + ret0, _ := ret[0].(*routeguide.RouteNote) + ret1, _ := ret[1].(error) + return ret0, ret1 +} + +func (_mr *_MockRouteGuide_RouteChatClientRecorder) Recv() *gomock.Call { + return _mr.mock.ctrl.RecordCall(_mr.mock, "Recv") +} + +func (_m *MockRouteGuide_RouteChatClient) RecvMsg(_param0 interface{}) error { + ret := _m.ctrl.Call(_m, "RecvMsg", _param0) + ret0, _ := ret[0].(error) + return ret0 +} + +func (_mr *_MockRouteGuide_RouteChatClientRecorder) RecvMsg(arg0 interface{}) *gomock.Call { + return _mr.mock.ctrl.RecordCall(_mr.mock, "RecvMsg", arg0) +} + +func (_m *MockRouteGuide_RouteChatClient) Send(_param0 *routeguide.RouteNote) error { + ret := _m.ctrl.Call(_m, "Send", _param0) + ret0, _ := ret[0].(error) + return ret0 +} + +func (_mr *_MockRouteGuide_RouteChatClientRecorder) Send(arg0 interface{}) *gomock.Call { + return _mr.mock.ctrl.RecordCall(_mr.mock, "Send", arg0) +} + +func (_m *MockRouteGuide_RouteChatClient) SendMsg(_param0 interface{}) error { + ret := _m.ctrl.Call(_m, "SendMsg", _param0) + ret0, _ := ret[0].(error) + return ret0 +} + +func (_mr *_MockRouteGuide_RouteChatClientRecorder) SendMsg(arg0 interface{}) *gomock.Call { + return _mr.mock.ctrl.RecordCall(_mr.mock, "SendMsg", arg0) +} + +func (_m *MockRouteGuide_RouteChatClient) Trailer() metadata.MD { + ret := _m.ctrl.Call(_m, "Trailer") + ret0, _ := ret[0].(metadata.MD) + return ret0 +} + +func (_mr *_MockRouteGuide_RouteChatClientRecorder) Trailer() *gomock.Call { + return _mr.mock.ctrl.RecordCall(_mr.mock, "Trailer") +} diff --git a/vendor/google.golang.org/grpc/examples/route_guide/routeguide/route_guide.pb.go b/vendor/google.golang.org/grpc/examples/route_guide/routeguide/route_guide.pb.go new file mode 100644 index 0000000..199a2ee --- /dev/null +++ b/vendor/google.golang.org/grpc/examples/route_guide/routeguide/route_guide.pb.go @@ -0,0 +1,640 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: route_guide.proto + +package routeguide + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" + +import ( + context "golang.org/x/net/context" + grpc "google.golang.org/grpc" +) + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Points are represented as latitude-longitude pairs in the E7 representation +// (degrees multiplied by 10**7 and rounded to the nearest integer). +// Latitudes should be in the range +/- 90 degrees and longitude should be in +// the range +/- 180 degrees (inclusive). +type Point struct { + Latitude int32 `protobuf:"varint,1,opt,name=latitude,proto3" json:"latitude,omitempty"` + Longitude int32 `protobuf:"varint,2,opt,name=longitude,proto3" json:"longitude,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Point) Reset() { *m = Point{} } +func (m *Point) String() string { return proto.CompactTextString(m) } +func (*Point) ProtoMessage() {} +func (*Point) Descriptor() ([]byte, []int) { + return fileDescriptor_route_guide_dc79de2de4c66c19, []int{0} +} +func (m *Point) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Point.Unmarshal(m, b) +} +func (m *Point) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Point.Marshal(b, m, deterministic) +} +func (dst *Point) XXX_Merge(src proto.Message) { + xxx_messageInfo_Point.Merge(dst, src) +} +func (m *Point) XXX_Size() int { + return xxx_messageInfo_Point.Size(m) +} +func (m *Point) XXX_DiscardUnknown() { + xxx_messageInfo_Point.DiscardUnknown(m) +} + +var xxx_messageInfo_Point proto.InternalMessageInfo + +func (m *Point) GetLatitude() int32 { + if m != nil { + return m.Latitude + } + return 0 +} + +func (m *Point) GetLongitude() int32 { + if m != nil { + return m.Longitude + } + return 0 +} + +// A latitude-longitude rectangle, represented as two diagonally opposite +// points "lo" and "hi". +type Rectangle struct { + // One corner of the rectangle. + Lo *Point `protobuf:"bytes,1,opt,name=lo,proto3" json:"lo,omitempty"` + // The other corner of the rectangle. + Hi *Point `protobuf:"bytes,2,opt,name=hi,proto3" json:"hi,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Rectangle) Reset() { *m = Rectangle{} } +func (m *Rectangle) String() string { return proto.CompactTextString(m) } +func (*Rectangle) ProtoMessage() {} +func (*Rectangle) Descriptor() ([]byte, []int) { + return fileDescriptor_route_guide_dc79de2de4c66c19, []int{1} +} +func (m *Rectangle) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Rectangle.Unmarshal(m, b) +} +func (m *Rectangle) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Rectangle.Marshal(b, m, deterministic) +} +func (dst *Rectangle) XXX_Merge(src proto.Message) { + xxx_messageInfo_Rectangle.Merge(dst, src) +} +func (m *Rectangle) XXX_Size() int { + return xxx_messageInfo_Rectangle.Size(m) +} +func (m *Rectangle) XXX_DiscardUnknown() { + xxx_messageInfo_Rectangle.DiscardUnknown(m) +} + +var xxx_messageInfo_Rectangle proto.InternalMessageInfo + +func (m *Rectangle) GetLo() *Point { + if m != nil { + return m.Lo + } + return nil +} + +func (m *Rectangle) GetHi() *Point { + if m != nil { + return m.Hi + } + return nil +} + +// A feature names something at a given point. +// +// If a feature could not be named, the name is empty. +type Feature struct { + // The name of the feature. + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // The point where the feature is detected. + Location *Point `protobuf:"bytes,2,opt,name=location,proto3" json:"location,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Feature) Reset() { *m = Feature{} } +func (m *Feature) String() string { return proto.CompactTextString(m) } +func (*Feature) ProtoMessage() {} +func (*Feature) Descriptor() ([]byte, []int) { + return fileDescriptor_route_guide_dc79de2de4c66c19, []int{2} +} +func (m *Feature) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Feature.Unmarshal(m, b) +} +func (m *Feature) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Feature.Marshal(b, m, deterministic) +} +func (dst *Feature) XXX_Merge(src proto.Message) { + xxx_messageInfo_Feature.Merge(dst, src) +} +func (m *Feature) XXX_Size() int { + return xxx_messageInfo_Feature.Size(m) +} +func (m *Feature) XXX_DiscardUnknown() { + xxx_messageInfo_Feature.DiscardUnknown(m) +} + +var xxx_messageInfo_Feature proto.InternalMessageInfo + +func (m *Feature) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *Feature) GetLocation() *Point { + if m != nil { + return m.Location + } + return nil +} + +// A RouteNote is a message sent while at a given point. +type RouteNote struct { + // The location from which the message is sent. + Location *Point `protobuf:"bytes,1,opt,name=location,proto3" json:"location,omitempty"` + // The message to be sent. + Message string `protobuf:"bytes,2,opt,name=message,proto3" json:"message,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *RouteNote) Reset() { *m = RouteNote{} } +func (m *RouteNote) String() string { return proto.CompactTextString(m) } +func (*RouteNote) ProtoMessage() {} +func (*RouteNote) Descriptor() ([]byte, []int) { + return fileDescriptor_route_guide_dc79de2de4c66c19, []int{3} +} +func (m *RouteNote) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_RouteNote.Unmarshal(m, b) +} +func (m *RouteNote) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_RouteNote.Marshal(b, m, deterministic) +} +func (dst *RouteNote) XXX_Merge(src proto.Message) { + xxx_messageInfo_RouteNote.Merge(dst, src) +} +func (m *RouteNote) XXX_Size() int { + return xxx_messageInfo_RouteNote.Size(m) +} +func (m *RouteNote) XXX_DiscardUnknown() { + xxx_messageInfo_RouteNote.DiscardUnknown(m) +} + +var xxx_messageInfo_RouteNote proto.InternalMessageInfo + +func (m *RouteNote) GetLocation() *Point { + if m != nil { + return m.Location + } + return nil +} + +func (m *RouteNote) GetMessage() string { + if m != nil { + return m.Message + } + return "" +} + +// A RouteSummary is received in response to a RecordRoute rpc. +// +// It contains the number of individual points received, the number of +// detected features, and the total distance covered as the cumulative sum of +// the distance between each point. +type RouteSummary struct { + // The number of points received. + PointCount int32 `protobuf:"varint,1,opt,name=point_count,json=pointCount,proto3" json:"point_count,omitempty"` + // The number of known features passed while traversing the route. + FeatureCount int32 `protobuf:"varint,2,opt,name=feature_count,json=featureCount,proto3" json:"feature_count,omitempty"` + // The distance covered in metres. + Distance int32 `protobuf:"varint,3,opt,name=distance,proto3" json:"distance,omitempty"` + // The duration of the traversal in seconds. + ElapsedTime int32 `protobuf:"varint,4,opt,name=elapsed_time,json=elapsedTime,proto3" json:"elapsed_time,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *RouteSummary) Reset() { *m = RouteSummary{} } +func (m *RouteSummary) String() string { return proto.CompactTextString(m) } +func (*RouteSummary) ProtoMessage() {} +func (*RouteSummary) Descriptor() ([]byte, []int) { + return fileDescriptor_route_guide_dc79de2de4c66c19, []int{4} +} +func (m *RouteSummary) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_RouteSummary.Unmarshal(m, b) +} +func (m *RouteSummary) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_RouteSummary.Marshal(b, m, deterministic) +} +func (dst *RouteSummary) XXX_Merge(src proto.Message) { + xxx_messageInfo_RouteSummary.Merge(dst, src) +} +func (m *RouteSummary) XXX_Size() int { + return xxx_messageInfo_RouteSummary.Size(m) +} +func (m *RouteSummary) XXX_DiscardUnknown() { + xxx_messageInfo_RouteSummary.DiscardUnknown(m) +} + +var xxx_messageInfo_RouteSummary proto.InternalMessageInfo + +func (m *RouteSummary) GetPointCount() int32 { + if m != nil { + return m.PointCount + } + return 0 +} + +func (m *RouteSummary) GetFeatureCount() int32 { + if m != nil { + return m.FeatureCount + } + return 0 +} + +func (m *RouteSummary) GetDistance() int32 { + if m != nil { + return m.Distance + } + return 0 +} + +func (m *RouteSummary) GetElapsedTime() int32 { + if m != nil { + return m.ElapsedTime + } + return 0 +} + +func init() { + proto.RegisterType((*Point)(nil), "routeguide.Point") + proto.RegisterType((*Rectangle)(nil), "routeguide.Rectangle") + proto.RegisterType((*Feature)(nil), "routeguide.Feature") + proto.RegisterType((*RouteNote)(nil), "routeguide.RouteNote") + proto.RegisterType((*RouteSummary)(nil), "routeguide.RouteSummary") +} + +// Reference imports to suppress errors if they are not otherwise used. +var _ context.Context +var _ grpc.ClientConn + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the grpc package it is being compiled against. +const _ = grpc.SupportPackageIsVersion4 + +// RouteGuideClient is the client API for RouteGuide service. +// +// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://godoc.org/google.golang.org/grpc#ClientConn.NewStream. +type RouteGuideClient interface { + // A simple RPC. + // + // Obtains the feature at a given position. + // + // A feature with an empty name is returned if there's no feature at the given + // position. + GetFeature(ctx context.Context, in *Point, opts ...grpc.CallOption) (*Feature, error) + // A server-to-client streaming RPC. + // + // Obtains the Features available within the given Rectangle. Results are + // streamed rather than returned at once (e.g. in a response message with a + // repeated field), as the rectangle may cover a large area and contain a + // huge number of features. + ListFeatures(ctx context.Context, in *Rectangle, opts ...grpc.CallOption) (RouteGuide_ListFeaturesClient, error) + // A client-to-server streaming RPC. + // + // Accepts a stream of Points on a route being traversed, returning a + // RouteSummary when traversal is completed. + RecordRoute(ctx context.Context, opts ...grpc.CallOption) (RouteGuide_RecordRouteClient, error) + // A Bidirectional streaming RPC. + // + // Accepts a stream of RouteNotes sent while a route is being traversed, + // while receiving other RouteNotes (e.g. from other users). + RouteChat(ctx context.Context, opts ...grpc.CallOption) (RouteGuide_RouteChatClient, error) +} + +type routeGuideClient struct { + cc *grpc.ClientConn +} + +func NewRouteGuideClient(cc *grpc.ClientConn) RouteGuideClient { + return &routeGuideClient{cc} +} + +func (c *routeGuideClient) GetFeature(ctx context.Context, in *Point, opts ...grpc.CallOption) (*Feature, error) { + out := new(Feature) + err := c.cc.Invoke(ctx, "/routeguide.RouteGuide/GetFeature", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *routeGuideClient) ListFeatures(ctx context.Context, in *Rectangle, opts ...grpc.CallOption) (RouteGuide_ListFeaturesClient, error) { + stream, err := c.cc.NewStream(ctx, &_RouteGuide_serviceDesc.Streams[0], "/routeguide.RouteGuide/ListFeatures", opts...) + if err != nil { + return nil, err + } + x := &routeGuideListFeaturesClient{stream} + if err := x.ClientStream.SendMsg(in); err != nil { + return nil, err + } + if err := x.ClientStream.CloseSend(); err != nil { + return nil, err + } + return x, nil +} + +type RouteGuide_ListFeaturesClient interface { + Recv() (*Feature, error) + grpc.ClientStream +} + +type routeGuideListFeaturesClient struct { + grpc.ClientStream +} + +func (x *routeGuideListFeaturesClient) Recv() (*Feature, error) { + m := new(Feature) + if err := x.ClientStream.RecvMsg(m); err != nil { + return nil, err + } + return m, nil +} + +func (c *routeGuideClient) RecordRoute(ctx context.Context, opts ...grpc.CallOption) (RouteGuide_RecordRouteClient, error) { + stream, err := c.cc.NewStream(ctx, &_RouteGuide_serviceDesc.Streams[1], "/routeguide.RouteGuide/RecordRoute", opts...) + if err != nil { + return nil, err + } + x := &routeGuideRecordRouteClient{stream} + return x, nil +} + +type RouteGuide_RecordRouteClient interface { + Send(*Point) error + CloseAndRecv() (*RouteSummary, error) + grpc.ClientStream +} + +type routeGuideRecordRouteClient struct { + grpc.ClientStream +} + +func (x *routeGuideRecordRouteClient) Send(m *Point) error { + return x.ClientStream.SendMsg(m) +} + +func (x *routeGuideRecordRouteClient) CloseAndRecv() (*RouteSummary, error) { + if err := x.ClientStream.CloseSend(); err != nil { + return nil, err + } + m := new(RouteSummary) + if err := x.ClientStream.RecvMsg(m); err != nil { + return nil, err + } + return m, nil +} + +func (c *routeGuideClient) RouteChat(ctx context.Context, opts ...grpc.CallOption) (RouteGuide_RouteChatClient, error) { + stream, err := c.cc.NewStream(ctx, &_RouteGuide_serviceDesc.Streams[2], "/routeguide.RouteGuide/RouteChat", opts...) + if err != nil { + return nil, err + } + x := &routeGuideRouteChatClient{stream} + return x, nil +} + +type RouteGuide_RouteChatClient interface { + Send(*RouteNote) error + Recv() (*RouteNote, error) + grpc.ClientStream +} + +type routeGuideRouteChatClient struct { + grpc.ClientStream +} + +func (x *routeGuideRouteChatClient) Send(m *RouteNote) error { + return x.ClientStream.SendMsg(m) +} + +func (x *routeGuideRouteChatClient) Recv() (*RouteNote, error) { + m := new(RouteNote) + if err := x.ClientStream.RecvMsg(m); err != nil { + return nil, err + } + return m, nil +} + +// RouteGuideServer is the server API for RouteGuide service. +type RouteGuideServer interface { + // A simple RPC. + // + // Obtains the feature at a given position. + // + // A feature with an empty name is returned if there's no feature at the given + // position. + GetFeature(context.Context, *Point) (*Feature, error) + // A server-to-client streaming RPC. + // + // Obtains the Features available within the given Rectangle. Results are + // streamed rather than returned at once (e.g. in a response message with a + // repeated field), as the rectangle may cover a large area and contain a + // huge number of features. + ListFeatures(*Rectangle, RouteGuide_ListFeaturesServer) error + // A client-to-server streaming RPC. + // + // Accepts a stream of Points on a route being traversed, returning a + // RouteSummary when traversal is completed. + RecordRoute(RouteGuide_RecordRouteServer) error + // A Bidirectional streaming RPC. + // + // Accepts a stream of RouteNotes sent while a route is being traversed, + // while receiving other RouteNotes (e.g. from other users). + RouteChat(RouteGuide_RouteChatServer) error +} + +func RegisterRouteGuideServer(s *grpc.Server, srv RouteGuideServer) { + s.RegisterService(&_RouteGuide_serviceDesc, srv) +} + +func _RouteGuide_GetFeature_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(Point) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(RouteGuideServer).GetFeature(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/routeguide.RouteGuide/GetFeature", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(RouteGuideServer).GetFeature(ctx, req.(*Point)) + } + return interceptor(ctx, in, info, handler) +} + +func _RouteGuide_ListFeatures_Handler(srv interface{}, stream grpc.ServerStream) error { + m := new(Rectangle) + if err := stream.RecvMsg(m); err != nil { + return err + } + return srv.(RouteGuideServer).ListFeatures(m, &routeGuideListFeaturesServer{stream}) +} + +type RouteGuide_ListFeaturesServer interface { + Send(*Feature) error + grpc.ServerStream +} + +type routeGuideListFeaturesServer struct { + grpc.ServerStream +} + +func (x *routeGuideListFeaturesServer) Send(m *Feature) error { + return x.ServerStream.SendMsg(m) +} + +func _RouteGuide_RecordRoute_Handler(srv interface{}, stream grpc.ServerStream) error { + return srv.(RouteGuideServer).RecordRoute(&routeGuideRecordRouteServer{stream}) +} + +type RouteGuide_RecordRouteServer interface { + SendAndClose(*RouteSummary) error + Recv() (*Point, error) + grpc.ServerStream +} + +type routeGuideRecordRouteServer struct { + grpc.ServerStream +} + +func (x *routeGuideRecordRouteServer) SendAndClose(m *RouteSummary) error { + return x.ServerStream.SendMsg(m) +} + +func (x *routeGuideRecordRouteServer) Recv() (*Point, error) { + m := new(Point) + if err := x.ServerStream.RecvMsg(m); err != nil { + return nil, err + } + return m, nil +} + +func _RouteGuide_RouteChat_Handler(srv interface{}, stream grpc.ServerStream) error { + return srv.(RouteGuideServer).RouteChat(&routeGuideRouteChatServer{stream}) +} + +type RouteGuide_RouteChatServer interface { + Send(*RouteNote) error + Recv() (*RouteNote, error) + grpc.ServerStream +} + +type routeGuideRouteChatServer struct { + grpc.ServerStream +} + +func (x *routeGuideRouteChatServer) Send(m *RouteNote) error { + return x.ServerStream.SendMsg(m) +} + +func (x *routeGuideRouteChatServer) Recv() (*RouteNote, error) { + m := new(RouteNote) + if err := x.ServerStream.RecvMsg(m); err != nil { + return nil, err + } + return m, nil +} + +var _RouteGuide_serviceDesc = grpc.ServiceDesc{ + ServiceName: "routeguide.RouteGuide", + HandlerType: (*RouteGuideServer)(nil), + Methods: []grpc.MethodDesc{ + { + MethodName: "GetFeature", + Handler: _RouteGuide_GetFeature_Handler, + }, + }, + Streams: []grpc.StreamDesc{ + { + StreamName: "ListFeatures", + Handler: _RouteGuide_ListFeatures_Handler, + ServerStreams: true, + }, + { + StreamName: "RecordRoute", + Handler: _RouteGuide_RecordRoute_Handler, + ClientStreams: true, + }, + { + StreamName: "RouteChat", + Handler: _RouteGuide_RouteChat_Handler, + ServerStreams: true, + ClientStreams: true, + }, + }, + Metadata: "route_guide.proto", +} + +func init() { proto.RegisterFile("route_guide.proto", fileDescriptor_route_guide_dc79de2de4c66c19) } + +var fileDescriptor_route_guide_dc79de2de4c66c19 = []byte{ + // 404 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x84, 0x53, 0xdd, 0xca, 0xd3, 0x40, + 0x10, 0xfd, 0x36, 0x7e, 0x9f, 0x6d, 0x26, 0x11, 0xe9, 0x88, 0x10, 0xa2, 0xa0, 0x8d, 0x37, 0xbd, + 0x31, 0x94, 0x0a, 0x5e, 0x56, 0x6c, 0xc1, 0xde, 0x14, 0xa9, 0xb1, 0xf7, 0x65, 0x4d, 0xc6, 0x74, + 0x61, 0x93, 0x0d, 0xc9, 0x06, 0xf4, 0x01, 0x7c, 0x02, 0x5f, 0x58, 0xb2, 0x49, 0xda, 0x54, 0x5b, + 0xbc, 0xdb, 0x39, 0x73, 0xce, 0xfc, 0x9c, 0x61, 0x61, 0x52, 0xaa, 0x5a, 0xd3, 0x21, 0xad, 0x45, + 0x42, 0x61, 0x51, 0x2a, 0xad, 0x10, 0x0c, 0x64, 0x90, 0xe0, 0x23, 0x3c, 0xec, 0x94, 0xc8, 0x35, + 0xfa, 0x30, 0x96, 0x5c, 0x0b, 0x5d, 0x27, 0xe4, 0xb1, 0xd7, 0x6c, 0xf6, 0x10, 0x9d, 0x62, 0x7c, + 0x09, 0xb6, 0x54, 0x79, 0xda, 0x26, 0x2d, 0x93, 0x3c, 0x03, 0xc1, 0x17, 0xb0, 0x23, 0x8a, 0x35, + 0xcf, 0x53, 0x49, 0x38, 0x05, 0x4b, 0x2a, 0x53, 0xc0, 0x59, 0x4c, 0xc2, 0x73, 0xa3, 0xd0, 0x74, + 0x89, 0x2c, 0xa9, 0x1a, 0xca, 0x51, 0x98, 0x32, 0xd7, 0x29, 0x47, 0x11, 0x6c, 0x61, 0xf4, 0x89, + 0xb8, 0xae, 0x4b, 0x42, 0x84, 0xfb, 0x9c, 0x67, 0xed, 0x4c, 0x76, 0x64, 0xde, 0xf8, 0x16, 0xc6, + 0x52, 0xc5, 0x5c, 0x0b, 0x95, 0xdf, 0xae, 0x73, 0xa2, 0x04, 0x7b, 0xb0, 0xa3, 0x26, 0xfb, 0x59, + 0xe9, 0x4b, 0x2d, 0xfb, 0xaf, 0x16, 0x3d, 0x18, 0x65, 0x54, 0x55, 0x3c, 0x6d, 0x17, 0xb7, 0xa3, + 0x3e, 0x0c, 0x7e, 0x33, 0x70, 0x4d, 0xd9, 0xaf, 0x75, 0x96, 0xf1, 0xf2, 0x27, 0xbe, 0x02, 0xa7, + 0x68, 0xd4, 0x87, 0x58, 0xd5, 0xb9, 0xee, 0x4c, 0x04, 0x03, 0xad, 0x1b, 0x04, 0xdf, 0xc0, 0x93, + 0xef, 0xed, 0x56, 0x1d, 0xa5, 0xb5, 0xd2, 0xed, 0xc0, 0x96, 0xe4, 0xc3, 0x38, 0x11, 0x95, 0xe6, + 0x79, 0x4c, 0xde, 0xa3, 0xf6, 0x0e, 0x7d, 0x8c, 0x53, 0x70, 0x49, 0xf2, 0xa2, 0xa2, 0xe4, 0xa0, + 0x45, 0x46, 0xde, 0xbd, 0xc9, 0x3b, 0x1d, 0xb6, 0x17, 0x19, 0x2d, 0x7e, 0x59, 0x00, 0x66, 0xaa, + 0x4d, 0xb3, 0x0e, 0xbe, 0x07, 0xd8, 0x90, 0xee, 0xbd, 0xfc, 0x77, 0x53, 0xff, 0xd9, 0x10, 0xea, + 0x78, 0xc1, 0x1d, 0x2e, 0xc1, 0xdd, 0x8a, 0xaa, 0x17, 0x56, 0xf8, 0x7c, 0x48, 0x3b, 0x5d, 0xfb, + 0x86, 0x7a, 0xce, 0x70, 0x09, 0x4e, 0x44, 0xb1, 0x2a, 0x13, 0x33, 0xcb, 0xb5, 0xc6, 0xde, 0x45, + 0xc5, 0x81, 0x8f, 0xc1, 0xdd, 0x8c, 0xe1, 0x87, 0xee, 0x64, 0xeb, 0x23, 0xd7, 0x7f, 0x35, 0xef, + 0x2f, 0xe9, 0x5f, 0x87, 0x1b, 0xf9, 0x9c, 0xad, 0xe6, 0xf0, 0x42, 0xa8, 0x30, 0x2d, 0x8b, 0x38, + 0xa4, 0x1f, 0x3c, 0x2b, 0x24, 0x55, 0x03, 0xfa, 0xea, 0xe9, 0xd9, 0xa3, 0x5d, 0xf3, 0x27, 0x76, + 0xec, 0xdb, 0x63, 0xf3, 0x39, 0xde, 0xfd, 0x09, 0x00, 0x00, 0xff, 0xff, 0xc8, 0xe4, 0xef, 0xe6, + 0x31, 0x03, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/grpc/examples/route_guide/server/server.go b/vendor/google.golang.org/grpc/examples/route_guide/server/server.go new file mode 100644 index 0000000..b7dc66c --- /dev/null +++ b/vendor/google.golang.org/grpc/examples/route_guide/server/server.go @@ -0,0 +1,848 @@ +/* + * + * Copyright 2015 gRPC authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ + +//go:generate protoc -I ../routeguide --go_out=plugins=grpc:../routeguide ../routeguide/route_guide.proto + +// Package main implements a simple gRPC server that demonstrates how to use gRPC-Go libraries +// to perform unary, client streaming, server streaming and full duplex RPCs. +// +// It implements the route guide service whose definition can be found in routeguide/route_guide.proto. +package main + +import ( + "context" + "encoding/json" + "flag" + "fmt" + "io" + "io/ioutil" + "log" + "math" + "net" + "sync" + "time" + + "google.golang.org/grpc" + + "google.golang.org/grpc/credentials" + "google.golang.org/grpc/testdata" + + "github.com/golang/protobuf/proto" + + pb "google.golang.org/grpc/examples/route_guide/routeguide" +) + +var ( + tls = flag.Bool("tls", false, "Connection uses TLS if true, else plain TCP") + certFile = flag.String("cert_file", "", "The TLS cert file") + keyFile = flag.String("key_file", "", "The TLS key file") + jsonDBFile = flag.String("json_db_file", "", "A json file containing a list of features") + port = flag.Int("port", 10000, "The server port") +) + +type routeGuideServer struct { + savedFeatures []*pb.Feature // read-only after initialized + + mu sync.Mutex // protects routeNotes + routeNotes map[string][]*pb.RouteNote +} + +// GetFeature returns the feature at the given point. +func (s *routeGuideServer) GetFeature(ctx context.Context, point *pb.Point) (*pb.Feature, error) { + for _, feature := range s.savedFeatures { + if proto.Equal(feature.Location, point) { + return feature, nil + } + } + // No feature was found, return an unnamed feature + return &pb.Feature{Location: point}, nil +} + +// ListFeatures lists all features contained within the given bounding Rectangle. +func (s *routeGuideServer) ListFeatures(rect *pb.Rectangle, stream pb.RouteGuide_ListFeaturesServer) error { + for _, feature := range s.savedFeatures { + if inRange(feature.Location, rect) { + if err := stream.Send(feature); err != nil { + return err + } + } + } + return nil +} + +// RecordRoute records a route composited of a sequence of points. +// +// It gets a stream of points, and responds with statistics about the "trip": +// number of points, number of known features visited, total distance traveled, and +// total time spent. +func (s *routeGuideServer) RecordRoute(stream pb.RouteGuide_RecordRouteServer) error { + var pointCount, featureCount, distance int32 + var lastPoint *pb.Point + startTime := time.Now() + for { + point, err := stream.Recv() + if err == io.EOF { + endTime := time.Now() + return stream.SendAndClose(&pb.RouteSummary{ + PointCount: pointCount, + FeatureCount: featureCount, + Distance: distance, + ElapsedTime: int32(endTime.Sub(startTime).Seconds()), + }) + } + if err != nil { + return err + } + pointCount++ + for _, feature := range s.savedFeatures { + if proto.Equal(feature.Location, point) { + featureCount++ + } + } + if lastPoint != nil { + distance += calcDistance(lastPoint, point) + } + lastPoint = point + } +} + +// RouteChat receives a stream of message/location pairs, and responds with a stream of all +// previous messages at each of those locations. +func (s *routeGuideServer) RouteChat(stream pb.RouteGuide_RouteChatServer) error { + for { + in, err := stream.Recv() + if err == io.EOF { + return nil + } + if err != nil { + return err + } + key := serialize(in.Location) + + s.mu.Lock() + s.routeNotes[key] = append(s.routeNotes[key], in) + // Note: this copy prevents blocking other clients while serving this one. + // We don't need to do a deep copy, because elements in the slice are + // insert-only and never modified. + rn := make([]*pb.RouteNote, len(s.routeNotes[key])) + copy(rn, s.routeNotes[key]) + s.mu.Unlock() + + for _, note := range rn { + if err := stream.Send(note); err != nil { + return err + } + } + } +} + +// loadFeatures loads features from a JSON file. +func (s *routeGuideServer) loadFeatures(filePath string) { + var data []byte + if filePath != "" { + var err error + data, err = ioutil.ReadFile(filePath) + if err != nil { + log.Fatalf("Failed to load default features: %v", err) + } + } else { + data = exampleData + } + if err := json.Unmarshal(data, &s.savedFeatures); err != nil { + log.Fatalf("Failed to load default features: %v", err) + } +} + +func toRadians(num float64) float64 { + return num * math.Pi / float64(180) +} + +// calcDistance calculates the distance between two points using the "haversine" formula. +// The formula is based on http://mathforum.org/library/drmath/view/51879.html. +func calcDistance(p1 *pb.Point, p2 *pb.Point) int32 { + const CordFactor float64 = 1e7 + const R = float64(6371000) // earth radius in metres + lat1 := toRadians(float64(p1.Latitude) / CordFactor) + lat2 := toRadians(float64(p2.Latitude) / CordFactor) + lng1 := toRadians(float64(p1.Longitude) / CordFactor) + lng2 := toRadians(float64(p2.Longitude) / CordFactor) + dlat := lat2 - lat1 + dlng := lng2 - lng1 + + a := math.Sin(dlat/2)*math.Sin(dlat/2) + + math.Cos(lat1)*math.Cos(lat2)* + math.Sin(dlng/2)*math.Sin(dlng/2) + c := 2 * math.Atan2(math.Sqrt(a), math.Sqrt(1-a)) + + distance := R * c + return int32(distance) +} + +func inRange(point *pb.Point, rect *pb.Rectangle) bool { + left := math.Min(float64(rect.Lo.Longitude), float64(rect.Hi.Longitude)) + right := math.Max(float64(rect.Lo.Longitude), float64(rect.Hi.Longitude)) + top := math.Max(float64(rect.Lo.Latitude), float64(rect.Hi.Latitude)) + bottom := math.Min(float64(rect.Lo.Latitude), float64(rect.Hi.Latitude)) + + if float64(point.Longitude) >= left && + float64(point.Longitude) <= right && + float64(point.Latitude) >= bottom && + float64(point.Latitude) <= top { + return true + } + return false +} + +func serialize(point *pb.Point) string { + return fmt.Sprintf("%d %d", point.Latitude, point.Longitude) +} + +func newServer() *routeGuideServer { + s := &routeGuideServer{routeNotes: make(map[string][]*pb.RouteNote)} + s.loadFeatures(*jsonDBFile) + return s +} + +func main() { + flag.Parse() + lis, err := net.Listen("tcp", fmt.Sprintf("localhost:%d", *port)) + if err != nil { + log.Fatalf("failed to listen: %v", err) + } + var opts []grpc.ServerOption + if *tls { + if *certFile == "" { + *certFile = testdata.Path("server1.pem") + } + if *keyFile == "" { + *keyFile = testdata.Path("server1.key") + } + creds, err := credentials.NewServerTLSFromFile(*certFile, *keyFile) + if err != nil { + log.Fatalf("Failed to generate credentials %v", err) + } + opts = []grpc.ServerOption{grpc.Creds(creds)} + } + grpcServer := grpc.NewServer(opts...) + pb.RegisterRouteGuideServer(grpcServer, newServer()) + grpcServer.Serve(lis) +} + +// exampleData is a copy of testdata/route_guide_db.json. It's to avoid +// specifying file path with `go run`. +var exampleData = []byte(`[{ + "location": { + "latitude": 407838351, + "longitude": -746143763 + }, + "name": "Patriots Path, Mendham, NJ 07945, USA" +}, { + "location": { + "latitude": 408122808, + "longitude": -743999179 + }, + "name": "101 New Jersey 10, Whippany, NJ 07981, USA" +}, { + "location": { + "latitude": 413628156, + "longitude": -749015468 + }, + "name": "U.S. 6, Shohola, PA 18458, USA" +}, { + "location": { + "latitude": 419999544, + "longitude": -740371136 + }, + "name": "5 Conners Road, Kingston, NY 12401, USA" +}, { + "location": { + "latitude": 414008389, + "longitude": -743951297 + }, + "name": "Mid Hudson Psychiatric Center, New Hampton, NY 10958, USA" +}, { + "location": { + "latitude": 419611318, + "longitude": -746524769 + }, + "name": "287 Flugertown Road, Livingston Manor, NY 12758, USA" +}, { + "location": { + "latitude": 406109563, + "longitude": -742186778 + }, + "name": "4001 Tremley Point Road, Linden, NJ 07036, USA" +}, { + "location": { + "latitude": 416802456, + "longitude": -742370183 + }, + "name": "352 South Mountain Road, Wallkill, NY 12589, USA" +}, { + "location": { + "latitude": 412950425, + "longitude": -741077389 + }, + "name": "Bailey Turn Road, Harriman, NY 10926, USA" +}, { + "location": { + "latitude": 412144655, + "longitude": -743949739 + }, + "name": "193-199 Wawayanda Road, Hewitt, NJ 07421, USA" +}, { + "location": { + "latitude": 415736605, + "longitude": -742847522 + }, + "name": "406-496 Ward Avenue, Pine Bush, NY 12566, USA" +}, { + "location": { + "latitude": 413843930, + "longitude": -740501726 + }, + "name": "162 Merrill Road, Highland Mills, NY 10930, USA" +}, { + "location": { + "latitude": 410873075, + "longitude": -744459023 + }, + "name": "Clinton Road, West Milford, NJ 07480, USA" +}, { + "location": { + "latitude": 412346009, + "longitude": -744026814 + }, + "name": "16 Old Brook Lane, Warwick, NY 10990, USA" +}, { + "location": { + "latitude": 402948455, + "longitude": -747903913 + }, + "name": "3 Drake Lane, Pennington, NJ 08534, USA" +}, { + "location": { + "latitude": 406337092, + "longitude": -740122226 + }, + "name": "6324 8th Avenue, Brooklyn, NY 11220, USA" +}, { + "location": { + "latitude": 406421967, + "longitude": -747727624 + }, + "name": "1 Merck Access Road, Whitehouse Station, NJ 08889, USA" +}, { + "location": { + "latitude": 416318082, + "longitude": -749677716 + }, + "name": "78-98 Schalck Road, Narrowsburg, NY 12764, USA" +}, { + "location": { + "latitude": 415301720, + "longitude": -748416257 + }, + "name": "282 Lakeview Drive Road, Highland Lake, NY 12743, USA" +}, { + "location": { + "latitude": 402647019, + "longitude": -747071791 + }, + "name": "330 Evelyn Avenue, Hamilton Township, NJ 08619, USA" +}, { + "location": { + "latitude": 412567807, + "longitude": -741058078 + }, + "name": "New York State Reference Route 987E, Southfields, NY 10975, USA" +}, { + "location": { + "latitude": 416855156, + "longitude": -744420597 + }, + "name": "103-271 Tempaloni Road, Ellenville, NY 12428, USA" +}, { + "location": { + "latitude": 404663628, + "longitude": -744820157 + }, + "name": "1300 Airport Road, North Brunswick Township, NJ 08902, USA" +}, { + "location": { + "latitude": 407113723, + "longitude": -749746483 + }, + "name": "" +}, { + "location": { + "latitude": 402133926, + "longitude": -743613249 + }, + "name": "" +}, { + "location": { + "latitude": 400273442, + "longitude": -741220915 + }, + "name": "" +}, { + "location": { + "latitude": 411236786, + "longitude": -744070769 + }, + "name": "" +}, { + "location": { + "latitude": 411633782, + "longitude": -746784970 + }, + "name": "211-225 Plains Road, Augusta, NJ 07822, USA" +}, { + "location": { + "latitude": 415830701, + "longitude": -742952812 + }, + "name": "" +}, { + "location": { + "latitude": 413447164, + "longitude": -748712898 + }, + "name": "165 Pedersen Ridge Road, Milford, PA 18337, USA" +}, { + "location": { + "latitude": 405047245, + "longitude": -749800722 + }, + "name": "100-122 Locktown Road, Frenchtown, NJ 08825, USA" +}, { + "location": { + "latitude": 418858923, + "longitude": -746156790 + }, + "name": "" +}, { + "location": { + "latitude": 417951888, + "longitude": -748484944 + }, + "name": "650-652 Willi Hill Road, Swan Lake, NY 12783, USA" +}, { + "location": { + "latitude": 407033786, + "longitude": -743977337 + }, + "name": "26 East 3rd Street, New Providence, NJ 07974, USA" +}, { + "location": { + "latitude": 417548014, + "longitude": -740075041 + }, + "name": "" +}, { + "location": { + "latitude": 410395868, + "longitude": -744972325 + }, + "name": "" +}, { + "location": { + "latitude": 404615353, + "longitude": -745129803 + }, + "name": "" +}, { + "location": { + "latitude": 406589790, + "longitude": -743560121 + }, + "name": "611 Lawrence Avenue, Westfield, NJ 07090, USA" +}, { + "location": { + "latitude": 414653148, + "longitude": -740477477 + }, + "name": "18 Lannis Avenue, New Windsor, NY 12553, USA" +}, { + "location": { + "latitude": 405957808, + "longitude": -743255336 + }, + "name": "82-104 Amherst Avenue, Colonia, NJ 07067, USA" +}, { + "location": { + "latitude": 411733589, + "longitude": -741648093 + }, + "name": "170 Seven Lakes Drive, Sloatsburg, NY 10974, USA" +}, { + "location": { + "latitude": 412676291, + "longitude": -742606606 + }, + "name": "1270 Lakes Road, Monroe, NY 10950, USA" +}, { + "location": { + "latitude": 409224445, + "longitude": -748286738 + }, + "name": "509-535 Alphano Road, Great Meadows, NJ 07838, USA" +}, { + "location": { + "latitude": 406523420, + "longitude": -742135517 + }, + "name": "652 Garden Street, Elizabeth, NJ 07202, USA" +}, { + "location": { + "latitude": 401827388, + "longitude": -740294537 + }, + "name": "349 Sea Spray Court, Neptune City, NJ 07753, USA" +}, { + "location": { + "latitude": 410564152, + "longitude": -743685054 + }, + "name": "13-17 Stanley Street, West Milford, NJ 07480, USA" +}, { + "location": { + "latitude": 408472324, + "longitude": -740726046 + }, + "name": "47 Industrial Avenue, Teterboro, NJ 07608, USA" +}, { + "location": { + "latitude": 412452168, + "longitude": -740214052 + }, + "name": "5 White Oak Lane, Stony Point, NY 10980, USA" +}, { + "location": { + "latitude": 409146138, + "longitude": -746188906 + }, + "name": "Berkshire Valley Management Area Trail, Jefferson, NJ, USA" +}, { + "location": { + "latitude": 404701380, + "longitude": -744781745 + }, + "name": "1007 Jersey Avenue, New Brunswick, NJ 08901, USA" +}, { + "location": { + "latitude": 409642566, + "longitude": -746017679 + }, + "name": "6 East Emerald Isle Drive, Lake Hopatcong, NJ 07849, USA" +}, { + "location": { + "latitude": 408031728, + "longitude": -748645385 + }, + "name": "1358-1474 New Jersey 57, Port Murray, NJ 07865, USA" +}, { + "location": { + "latitude": 413700272, + "longitude": -742135189 + }, + "name": "367 Prospect Road, Chester, NY 10918, USA" +}, { + "location": { + "latitude": 404310607, + "longitude": -740282632 + }, + "name": "10 Simon Lake Drive, Atlantic Highlands, NJ 07716, USA" +}, { + "location": { + "latitude": 409319800, + "longitude": -746201391 + }, + "name": "11 Ward Street, Mount Arlington, NJ 07856, USA" +}, { + "location": { + "latitude": 406685311, + "longitude": -742108603 + }, + "name": "300-398 Jefferson Avenue, Elizabeth, NJ 07201, USA" +}, { + "location": { + "latitude": 419018117, + "longitude": -749142781 + }, + "name": "43 Dreher Road, Roscoe, NY 12776, USA" +}, { + "location": { + "latitude": 412856162, + "longitude": -745148837 + }, + "name": "Swan Street, Pine Island, NY 10969, USA" +}, { + "location": { + "latitude": 416560744, + "longitude": -746721964 + }, + "name": "66 Pleasantview Avenue, Monticello, NY 12701, USA" +}, { + "location": { + "latitude": 405314270, + "longitude": -749836354 + }, + "name": "" +}, { + "location": { + "latitude": 414219548, + "longitude": -743327440 + }, + "name": "" +}, { + "location": { + "latitude": 415534177, + "longitude": -742900616 + }, + "name": "565 Winding Hills Road, Montgomery, NY 12549, USA" +}, { + "location": { + "latitude": 406898530, + "longitude": -749127080 + }, + "name": "231 Rocky Run Road, Glen Gardner, NJ 08826, USA" +}, { + "location": { + "latitude": 407586880, + "longitude": -741670168 + }, + "name": "100 Mount Pleasant Avenue, Newark, NJ 07104, USA" +}, { + "location": { + "latitude": 400106455, + "longitude": -742870190 + }, + "name": "517-521 Huntington Drive, Manchester Township, NJ 08759, USA" +}, { + "location": { + "latitude": 400066188, + "longitude": -746793294 + }, + "name": "" +}, { + "location": { + "latitude": 418803880, + "longitude": -744102673 + }, + "name": "40 Mountain Road, Napanoch, NY 12458, USA" +}, { + "location": { + "latitude": 414204288, + "longitude": -747895140 + }, + "name": "" +}, { + "location": { + "latitude": 414777405, + "longitude": -740615601 + }, + "name": "" +}, { + "location": { + "latitude": 415464475, + "longitude": -747175374 + }, + "name": "48 North Road, Forestburgh, NY 12777, USA" +}, { + "location": { + "latitude": 404062378, + "longitude": -746376177 + }, + "name": "" +}, { + "location": { + "latitude": 405688272, + "longitude": -749285130 + }, + "name": "" +}, { + "location": { + "latitude": 400342070, + "longitude": -748788996 + }, + "name": "" +}, { + "location": { + "latitude": 401809022, + "longitude": -744157964 + }, + "name": "" +}, { + "location": { + "latitude": 404226644, + "longitude": -740517141 + }, + "name": "9 Thompson Avenue, Leonardo, NJ 07737, USA" +}, { + "location": { + "latitude": 410322033, + "longitude": -747871659 + }, + "name": "" +}, { + "location": { + "latitude": 407100674, + "longitude": -747742727 + }, + "name": "" +}, { + "location": { + "latitude": 418811433, + "longitude": -741718005 + }, + "name": "213 Bush Road, Stone Ridge, NY 12484, USA" +}, { + "location": { + "latitude": 415034302, + "longitude": -743850945 + }, + "name": "" +}, { + "location": { + "latitude": 411349992, + "longitude": -743694161 + }, + "name": "" +}, { + "location": { + "latitude": 404839914, + "longitude": -744759616 + }, + "name": "1-17 Bergen Court, New Brunswick, NJ 08901, USA" +}, { + "location": { + "latitude": 414638017, + "longitude": -745957854 + }, + "name": "35 Oakland Valley Road, Cuddebackville, NY 12729, USA" +}, { + "location": { + "latitude": 412127800, + "longitude": -740173578 + }, + "name": "" +}, { + "location": { + "latitude": 401263460, + "longitude": -747964303 + }, + "name": "" +}, { + "location": { + "latitude": 412843391, + "longitude": -749086026 + }, + "name": "" +}, { + "location": { + "latitude": 418512773, + "longitude": -743067823 + }, + "name": "" +}, { + "location": { + "latitude": 404318328, + "longitude": -740835638 + }, + "name": "42-102 Main Street, Belford, NJ 07718, USA" +}, { + "location": { + "latitude": 419020746, + "longitude": -741172328 + }, + "name": "" +}, { + "location": { + "latitude": 404080723, + "longitude": -746119569 + }, + "name": "" +}, { + "location": { + "latitude": 401012643, + "longitude": -744035134 + }, + "name": "" +}, { + "location": { + "latitude": 404306372, + "longitude": -741079661 + }, + "name": "" +}, { + "location": { + "latitude": 403966326, + "longitude": -748519297 + }, + "name": "" +}, { + "location": { + "latitude": 405002031, + "longitude": -748407866 + }, + "name": "" +}, { + "location": { + "latitude": 409532885, + "longitude": -742200683 + }, + "name": "" +}, { + "location": { + "latitude": 416851321, + "longitude": -742674555 + }, + "name": "" +}, { + "location": { + "latitude": 406411633, + "longitude": -741722051 + }, + "name": "3387 Richmond Terrace, Staten Island, NY 10303, USA" +}, { + "location": { + "latitude": 413069058, + "longitude": -744597778 + }, + "name": "261 Van Sickle Road, Goshen, NY 10924, USA" +}, { + "location": { + "latitude": 418465462, + "longitude": -746859398 + }, + "name": "" +}, { + "location": { + "latitude": 411733222, + "longitude": -744228360 + }, + "name": "" +}, { + "location": { + "latitude": 410248224, + "longitude": -747127767 + }, + "name": "3 Hasta Way, Newton, NJ 07860, USA" +}]`) diff --git a/vendor/google.golang.org/grpc/grpclog/glogger/glogger.go b/vendor/google.golang.org/grpc/grpclog/glogger/glogger.go new file mode 100644 index 0000000..e5498f8 --- /dev/null +++ b/vendor/google.golang.org/grpc/grpclog/glogger/glogger.go @@ -0,0 +1,86 @@ +/* + * + * Copyright 2015 gRPC authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ + +// Package glogger defines glog-based logging for grpc. +// Importing this package will install glog as the logger used by grpclog. +package glogger + +import ( + "fmt" + + "github.com/golang/glog" + "google.golang.org/grpc/grpclog" +) + +func init() { + grpclog.SetLoggerV2(&glogger{}) +} + +type glogger struct{} + +func (g *glogger) Info(args ...interface{}) { + glog.InfoDepth(2, args...) +} + +func (g *glogger) Infoln(args ...interface{}) { + glog.InfoDepth(2, fmt.Sprintln(args...)) +} + +func (g *glogger) Infof(format string, args ...interface{}) { + glog.InfoDepth(2, fmt.Sprintf(format, args...)) +} + +func (g *glogger) Warning(args ...interface{}) { + glog.WarningDepth(2, args...) +} + +func (g *glogger) Warningln(args ...interface{}) { + glog.WarningDepth(2, fmt.Sprintln(args...)) +} + +func (g *glogger) Warningf(format string, args ...interface{}) { + glog.WarningDepth(2, fmt.Sprintf(format, args...)) +} + +func (g *glogger) Error(args ...interface{}) { + glog.ErrorDepth(2, args...) +} + +func (g *glogger) Errorln(args ...interface{}) { + glog.ErrorDepth(2, fmt.Sprintln(args...)) +} + +func (g *glogger) Errorf(format string, args ...interface{}) { + glog.ErrorDepth(2, fmt.Sprintf(format, args...)) +} + +func (g *glogger) Fatal(args ...interface{}) { + glog.FatalDepth(2, args...) +} + +func (g *glogger) Fatalln(args ...interface{}) { + glog.FatalDepth(2, fmt.Sprintln(args...)) +} + +func (g *glogger) Fatalf(format string, args ...interface{}) { + glog.FatalDepth(2, fmt.Sprintf(format, args...)) +} + +func (g *glogger) V(l int) bool { + return bool(glog.V(glog.Level(l))) +} diff --git a/vendor/google.golang.org/grpc/grpclog/grpclog.go b/vendor/google.golang.org/grpc/grpclog/grpclog.go new file mode 100644 index 0000000..51bb945 --- /dev/null +++ b/vendor/google.golang.org/grpc/grpclog/grpclog.go @@ -0,0 +1,126 @@ +/* + * + * Copyright 2017 gRPC authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ + +// Package grpclog defines logging for grpc. +// +// All logs in transport and grpclb packages only go to verbose level 2. +// All logs in other packages in grpc are logged in spite of the verbosity level. +// +// In the default logger, +// severity level can be set by environment variable GRPC_GO_LOG_SEVERITY_LEVEL, +// verbosity level can be set by GRPC_GO_LOG_VERBOSITY_LEVEL. +package grpclog // import "google.golang.org/grpc/grpclog" + +import "os" + +var logger = newLoggerV2() + +// V reports whether verbosity level l is at least the requested verbose level. +func V(l int) bool { + return logger.V(l) +} + +// Info logs to the INFO log. +func Info(args ...interface{}) { + logger.Info(args...) +} + +// Infof logs to the INFO log. Arguments are handled in the manner of fmt.Printf. +func Infof(format string, args ...interface{}) { + logger.Infof(format, args...) +} + +// Infoln logs to the INFO log. Arguments are handled in the manner of fmt.Println. +func Infoln(args ...interface{}) { + logger.Infoln(args...) +} + +// Warning logs to the WARNING log. +func Warning(args ...interface{}) { + logger.Warning(args...) +} + +// Warningf logs to the WARNING log. Arguments are handled in the manner of fmt.Printf. +func Warningf(format string, args ...interface{}) { + logger.Warningf(format, args...) +} + +// Warningln logs to the WARNING log. Arguments are handled in the manner of fmt.Println. +func Warningln(args ...interface{}) { + logger.Warningln(args...) +} + +// Error logs to the ERROR log. +func Error(args ...interface{}) { + logger.Error(args...) +} + +// Errorf logs to the ERROR log. Arguments are handled in the manner of fmt.Printf. +func Errorf(format string, args ...interface{}) { + logger.Errorf(format, args...) +} + +// Errorln logs to the ERROR log. Arguments are handled in the manner of fmt.Println. +func Errorln(args ...interface{}) { + logger.Errorln(args...) +} + +// Fatal logs to the FATAL log. Arguments are handled in the manner of fmt.Print. +// It calls os.Exit() with exit code 1. +func Fatal(args ...interface{}) { + logger.Fatal(args...) + // Make sure fatal logs will exit. + os.Exit(1) +} + +// Fatalf logs to the FATAL log. Arguments are handled in the manner of fmt.Printf. +// It calles os.Exit() with exit code 1. +func Fatalf(format string, args ...interface{}) { + logger.Fatalf(format, args...) + // Make sure fatal logs will exit. + os.Exit(1) +} + +// Fatalln logs to the FATAL log. Arguments are handled in the manner of fmt.Println. +// It calle os.Exit()) with exit code 1. +func Fatalln(args ...interface{}) { + logger.Fatalln(args...) + // Make sure fatal logs will exit. + os.Exit(1) +} + +// Print prints to the logger. Arguments are handled in the manner of fmt.Print. +// +// Deprecated: use Info. +func Print(args ...interface{}) { + logger.Info(args...) +} + +// Printf prints to the logger. Arguments are handled in the manner of fmt.Printf. +// +// Deprecated: use Infof. +func Printf(format string, args ...interface{}) { + logger.Infof(format, args...) +} + +// Println prints to the logger. Arguments are handled in the manner of fmt.Println. +// +// Deprecated: use Infoln. +func Println(args ...interface{}) { + logger.Infoln(args...) +} diff --git a/vendor/google.golang.org/grpc/grpclog/logger.go b/vendor/google.golang.org/grpc/grpclog/logger.go new file mode 100644 index 0000000..097494f --- /dev/null +++ b/vendor/google.golang.org/grpc/grpclog/logger.go @@ -0,0 +1,85 @@ +/* + * + * Copyright 2015 gRPC authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ + +package grpclog + +// Logger mimics golang's standard Logger as an interface. +// +// Deprecated: use LoggerV2. +type Logger interface { + Fatal(args ...interface{}) + Fatalf(format string, args ...interface{}) + Fatalln(args ...interface{}) + Print(args ...interface{}) + Printf(format string, args ...interface{}) + Println(args ...interface{}) +} + +// SetLogger sets the logger that is used in grpc. Call only from +// init() functions. +// +// Deprecated: use SetLoggerV2. +func SetLogger(l Logger) { + logger = &loggerWrapper{Logger: l} +} + +// loggerWrapper wraps Logger into a LoggerV2. +type loggerWrapper struct { + Logger +} + +func (g *loggerWrapper) Info(args ...interface{}) { + g.Logger.Print(args...) +} + +func (g *loggerWrapper) Infoln(args ...interface{}) { + g.Logger.Println(args...) +} + +func (g *loggerWrapper) Infof(format string, args ...interface{}) { + g.Logger.Printf(format, args...) +} + +func (g *loggerWrapper) Warning(args ...interface{}) { + g.Logger.Print(args...) +} + +func (g *loggerWrapper) Warningln(args ...interface{}) { + g.Logger.Println(args...) +} + +func (g *loggerWrapper) Warningf(format string, args ...interface{}) { + g.Logger.Printf(format, args...) +} + +func (g *loggerWrapper) Error(args ...interface{}) { + g.Logger.Print(args...) +} + +func (g *loggerWrapper) Errorln(args ...interface{}) { + g.Logger.Println(args...) +} + +func (g *loggerWrapper) Errorf(format string, args ...interface{}) { + g.Logger.Printf(format, args...) +} + +func (g *loggerWrapper) V(l int) bool { + // Returns true for all verbose level. + return true +} diff --git a/vendor/google.golang.org/grpc/grpclog/loggerv2.go b/vendor/google.golang.org/grpc/grpclog/loggerv2.go new file mode 100644 index 0000000..d493257 --- /dev/null +++ b/vendor/google.golang.org/grpc/grpclog/loggerv2.go @@ -0,0 +1,195 @@ +/* + * + * Copyright 2017 gRPC authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ + +package grpclog + +import ( + "io" + "io/ioutil" + "log" + "os" + "strconv" +) + +// LoggerV2 does underlying logging work for grpclog. +type LoggerV2 interface { + // Info logs to INFO log. Arguments are handled in the manner of fmt.Print. + Info(args ...interface{}) + // Infoln logs to INFO log. Arguments are handled in the manner of fmt.Println. + Infoln(args ...interface{}) + // Infof logs to INFO log. Arguments are handled in the manner of fmt.Printf. + Infof(format string, args ...interface{}) + // Warning logs to WARNING log. Arguments are handled in the manner of fmt.Print. + Warning(args ...interface{}) + // Warningln logs to WARNING log. Arguments are handled in the manner of fmt.Println. + Warningln(args ...interface{}) + // Warningf logs to WARNING log. Arguments are handled in the manner of fmt.Printf. + Warningf(format string, args ...interface{}) + // Error logs to ERROR log. Arguments are handled in the manner of fmt.Print. + Error(args ...interface{}) + // Errorln logs to ERROR log. Arguments are handled in the manner of fmt.Println. + Errorln(args ...interface{}) + // Errorf logs to ERROR log. Arguments are handled in the manner of fmt.Printf. + Errorf(format string, args ...interface{}) + // Fatal logs to ERROR log. Arguments are handled in the manner of fmt.Print. + // gRPC ensures that all Fatal logs will exit with os.Exit(1). + // Implementations may also call os.Exit() with a non-zero exit code. + Fatal(args ...interface{}) + // Fatalln logs to ERROR log. Arguments are handled in the manner of fmt.Println. + // gRPC ensures that all Fatal logs will exit with os.Exit(1). + // Implementations may also call os.Exit() with a non-zero exit code. + Fatalln(args ...interface{}) + // Fatalf logs to ERROR log. Arguments are handled in the manner of fmt.Printf. + // gRPC ensures that all Fatal logs will exit with os.Exit(1). + // Implementations may also call os.Exit() with a non-zero exit code. + Fatalf(format string, args ...interface{}) + // V reports whether verbosity level l is at least the requested verbose level. + V(l int) bool +} + +// SetLoggerV2 sets logger that is used in grpc to a V2 logger. +// Not mutex-protected, should be called before any gRPC functions. +func SetLoggerV2(l LoggerV2) { + logger = l +} + +const ( + // infoLog indicates Info severity. + infoLog int = iota + // warningLog indicates Warning severity. + warningLog + // errorLog indicates Error severity. + errorLog + // fatalLog indicates Fatal severity. + fatalLog +) + +// severityName contains the string representation of each severity. +var severityName = []string{ + infoLog: "INFO", + warningLog: "WARNING", + errorLog: "ERROR", + fatalLog: "FATAL", +} + +// loggerT is the default logger used by grpclog. +type loggerT struct { + m []*log.Logger + v int +} + +// NewLoggerV2 creates a loggerV2 with the provided writers. +// Fatal logs will be written to errorW, warningW, infoW, followed by exit(1). +// Error logs will be written to errorW, warningW and infoW. +// Warning logs will be written to warningW and infoW. +// Info logs will be written to infoW. +func NewLoggerV2(infoW, warningW, errorW io.Writer) LoggerV2 { + return NewLoggerV2WithVerbosity(infoW, warningW, errorW, 0) +} + +// NewLoggerV2WithVerbosity creates a loggerV2 with the provided writers and +// verbosity level. +func NewLoggerV2WithVerbosity(infoW, warningW, errorW io.Writer, v int) LoggerV2 { + var m []*log.Logger + m = append(m, log.New(infoW, severityName[infoLog]+": ", log.LstdFlags)) + m = append(m, log.New(io.MultiWriter(infoW, warningW), severityName[warningLog]+": ", log.LstdFlags)) + ew := io.MultiWriter(infoW, warningW, errorW) // ew will be used for error and fatal. + m = append(m, log.New(ew, severityName[errorLog]+": ", log.LstdFlags)) + m = append(m, log.New(ew, severityName[fatalLog]+": ", log.LstdFlags)) + return &loggerT{m: m, v: v} +} + +// newLoggerV2 creates a loggerV2 to be used as default logger. +// All logs are written to stderr. +func newLoggerV2() LoggerV2 { + errorW := ioutil.Discard + warningW := ioutil.Discard + infoW := ioutil.Discard + + logLevel := os.Getenv("GRPC_GO_LOG_SEVERITY_LEVEL") + switch logLevel { + case "", "ERROR", "error": // If env is unset, set level to ERROR. + errorW = os.Stderr + case "WARNING", "warning": + warningW = os.Stderr + case "INFO", "info": + infoW = os.Stderr + } + + var v int + vLevel := os.Getenv("GRPC_GO_LOG_VERBOSITY_LEVEL") + if vl, err := strconv.Atoi(vLevel); err == nil { + v = vl + } + return NewLoggerV2WithVerbosity(infoW, warningW, errorW, v) +} + +func (g *loggerT) Info(args ...interface{}) { + g.m[infoLog].Print(args...) +} + +func (g *loggerT) Infoln(args ...interface{}) { + g.m[infoLog].Println(args...) +} + +func (g *loggerT) Infof(format string, args ...interface{}) { + g.m[infoLog].Printf(format, args...) +} + +func (g *loggerT) Warning(args ...interface{}) { + g.m[warningLog].Print(args...) +} + +func (g *loggerT) Warningln(args ...interface{}) { + g.m[warningLog].Println(args...) +} + +func (g *loggerT) Warningf(format string, args ...interface{}) { + g.m[warningLog].Printf(format, args...) +} + +func (g *loggerT) Error(args ...interface{}) { + g.m[errorLog].Print(args...) +} + +func (g *loggerT) Errorln(args ...interface{}) { + g.m[errorLog].Println(args...) +} + +func (g *loggerT) Errorf(format string, args ...interface{}) { + g.m[errorLog].Printf(format, args...) +} + +func (g *loggerT) Fatal(args ...interface{}) { + g.m[fatalLog].Fatal(args...) + // No need to call os.Exit() again because log.Logger.Fatal() calls os.Exit(). +} + +func (g *loggerT) Fatalln(args ...interface{}) { + g.m[fatalLog].Fatalln(args...) + // No need to call os.Exit() again because log.Logger.Fatal() calls os.Exit(). +} + +func (g *loggerT) Fatalf(format string, args ...interface{}) { + g.m[fatalLog].Fatalf(format, args...) + // No need to call os.Exit() again because log.Logger.Fatal() calls os.Exit(). +} + +func (g *loggerT) V(l int) bool { + return l <= g.v +} diff --git a/vendor/google.golang.org/grpc/health/client.go b/vendor/google.golang.org/grpc/health/client.go new file mode 100644 index 0000000..e15f04c --- /dev/null +++ b/vendor/google.golang.org/grpc/health/client.go @@ -0,0 +1,107 @@ +/* + * + * Copyright 2018 gRPC authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ + +package health + +import ( + "context" + "fmt" + "io" + "time" + + "google.golang.org/grpc" + "google.golang.org/grpc/codes" + healthpb "google.golang.org/grpc/health/grpc_health_v1" + "google.golang.org/grpc/internal" + "google.golang.org/grpc/internal/backoff" + "google.golang.org/grpc/status" +) + +const maxDelay = 120 * time.Second + +var backoffStrategy = backoff.Exponential{MaxDelay: maxDelay} +var backoffFunc = func(ctx context.Context, retries int) bool { + d := backoffStrategy.Backoff(retries) + timer := time.NewTimer(d) + select { + case <-timer.C: + return true + case <-ctx.Done(): + timer.Stop() + return false + } +} + +func init() { + internal.HealthCheckFunc = clientHealthCheck +} + +func clientHealthCheck(ctx context.Context, newStream func() (interface{}, error), reportHealth func(bool), service string) error { + tryCnt := 0 + +retryConnection: + for { + // Backs off if the connection has failed in some way without receiving a message in the previous retry. + if tryCnt > 0 && !backoffFunc(ctx, tryCnt-1) { + return nil + } + tryCnt++ + + if ctx.Err() != nil { + return nil + } + rawS, err := newStream() + if err != nil { + continue retryConnection + } + + s, ok := rawS.(grpc.ClientStream) + // Ideally, this should never happen. But if it happens, the server is marked as healthy for LBing purposes. + if !ok { + reportHealth(true) + return fmt.Errorf("newStream returned %v (type %T); want grpc.ClientStream", rawS, rawS) + } + + if err = s.SendMsg(&healthpb.HealthCheckRequest{Service: service}); err != nil && err != io.EOF { + // Stream should have been closed, so we can safely continue to create a new stream. + continue retryConnection + } + s.CloseSend() + + resp := new(healthpb.HealthCheckResponse) + for { + err = s.RecvMsg(resp) + + // Reports healthy for the LBing purposes if health check is not implemented in the server. + if status.Code(err) == codes.Unimplemented { + reportHealth(true) + return err + } + + // Reports unhealthy if server's Watch method gives an error other than UNIMPLEMENTED. + if err != nil { + reportHealth(false) + continue retryConnection + } + + // As a message has been received, removes the need for backoff for the next retry by reseting the try count. + tryCnt = 0 + reportHealth(resp.Status == healthpb.HealthCheckResponse_SERVING) + } + } +} diff --git a/vendor/google.golang.org/grpc/health/grpc_health_v1/health.pb.go b/vendor/google.golang.org/grpc/health/grpc_health_v1/health.pb.go new file mode 100644 index 0000000..c2f2c77 --- /dev/null +++ b/vendor/google.golang.org/grpc/health/grpc_health_v1/health.pb.go @@ -0,0 +1,327 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: grpc/health/v1/health.proto + +package grpc_health_v1 // import "google.golang.org/grpc/health/grpc_health_v1" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" + +import ( + context "golang.org/x/net/context" + grpc "google.golang.org/grpc" +) + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +type HealthCheckResponse_ServingStatus int32 + +const ( + HealthCheckResponse_UNKNOWN HealthCheckResponse_ServingStatus = 0 + HealthCheckResponse_SERVING HealthCheckResponse_ServingStatus = 1 + HealthCheckResponse_NOT_SERVING HealthCheckResponse_ServingStatus = 2 + HealthCheckResponse_SERVICE_UNKNOWN HealthCheckResponse_ServingStatus = 3 +) + +var HealthCheckResponse_ServingStatus_name = map[int32]string{ + 0: "UNKNOWN", + 1: "SERVING", + 2: "NOT_SERVING", + 3: "SERVICE_UNKNOWN", +} +var HealthCheckResponse_ServingStatus_value = map[string]int32{ + "UNKNOWN": 0, + "SERVING": 1, + "NOT_SERVING": 2, + "SERVICE_UNKNOWN": 3, +} + +func (x HealthCheckResponse_ServingStatus) String() string { + return proto.EnumName(HealthCheckResponse_ServingStatus_name, int32(x)) +} +func (HealthCheckResponse_ServingStatus) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_health_6b1a06aa67f91efd, []int{1, 0} +} + +type HealthCheckRequest struct { + Service string `protobuf:"bytes,1,opt,name=service,proto3" json:"service,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *HealthCheckRequest) Reset() { *m = HealthCheckRequest{} } +func (m *HealthCheckRequest) String() string { return proto.CompactTextString(m) } +func (*HealthCheckRequest) ProtoMessage() {} +func (*HealthCheckRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_health_6b1a06aa67f91efd, []int{0} +} +func (m *HealthCheckRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_HealthCheckRequest.Unmarshal(m, b) +} +func (m *HealthCheckRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_HealthCheckRequest.Marshal(b, m, deterministic) +} +func (dst *HealthCheckRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_HealthCheckRequest.Merge(dst, src) +} +func (m *HealthCheckRequest) XXX_Size() int { + return xxx_messageInfo_HealthCheckRequest.Size(m) +} +func (m *HealthCheckRequest) XXX_DiscardUnknown() { + xxx_messageInfo_HealthCheckRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_HealthCheckRequest proto.InternalMessageInfo + +func (m *HealthCheckRequest) GetService() string { + if m != nil { + return m.Service + } + return "" +} + +type HealthCheckResponse struct { + Status HealthCheckResponse_ServingStatus `protobuf:"varint,1,opt,name=status,proto3,enum=grpc.health.v1.HealthCheckResponse_ServingStatus" json:"status,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *HealthCheckResponse) Reset() { *m = HealthCheckResponse{} } +func (m *HealthCheckResponse) String() string { return proto.CompactTextString(m) } +func (*HealthCheckResponse) ProtoMessage() {} +func (*HealthCheckResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_health_6b1a06aa67f91efd, []int{1} +} +func (m *HealthCheckResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_HealthCheckResponse.Unmarshal(m, b) +} +func (m *HealthCheckResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_HealthCheckResponse.Marshal(b, m, deterministic) +} +func (dst *HealthCheckResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_HealthCheckResponse.Merge(dst, src) +} +func (m *HealthCheckResponse) XXX_Size() int { + return xxx_messageInfo_HealthCheckResponse.Size(m) +} +func (m *HealthCheckResponse) XXX_DiscardUnknown() { + xxx_messageInfo_HealthCheckResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_HealthCheckResponse proto.InternalMessageInfo + +func (m *HealthCheckResponse) GetStatus() HealthCheckResponse_ServingStatus { + if m != nil { + return m.Status + } + return HealthCheckResponse_UNKNOWN +} + +func init() { + proto.RegisterType((*HealthCheckRequest)(nil), "grpc.health.v1.HealthCheckRequest") + proto.RegisterType((*HealthCheckResponse)(nil), "grpc.health.v1.HealthCheckResponse") + proto.RegisterEnum("grpc.health.v1.HealthCheckResponse_ServingStatus", HealthCheckResponse_ServingStatus_name, HealthCheckResponse_ServingStatus_value) +} + +// Reference imports to suppress errors if they are not otherwise used. +var _ context.Context +var _ grpc.ClientConn + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the grpc package it is being compiled against. +const _ = grpc.SupportPackageIsVersion4 + +// HealthClient is the client API for Health service. +// +// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://godoc.org/google.golang.org/grpc#ClientConn.NewStream. +type HealthClient interface { + // If the requested service is unknown, the call will fail with status + // NOT_FOUND. + Check(ctx context.Context, in *HealthCheckRequest, opts ...grpc.CallOption) (*HealthCheckResponse, error) + // Performs a watch for the serving status of the requested service. + // The server will immediately send back a message indicating the current + // serving status. It will then subsequently send a new message whenever + // the service's serving status changes. + // + // If the requested service is unknown when the call is received, the + // server will send a message setting the serving status to + // SERVICE_UNKNOWN but will *not* terminate the call. If at some + // future point, the serving status of the service becomes known, the + // server will send a new message with the service's serving status. + // + // If the call terminates with status UNIMPLEMENTED, then clients + // should assume this method is not supported and should not retry the + // call. If the call terminates with any other status (including OK), + // clients should retry the call with appropriate exponential backoff. + Watch(ctx context.Context, in *HealthCheckRequest, opts ...grpc.CallOption) (Health_WatchClient, error) +} + +type healthClient struct { + cc *grpc.ClientConn +} + +func NewHealthClient(cc *grpc.ClientConn) HealthClient { + return &healthClient{cc} +} + +func (c *healthClient) Check(ctx context.Context, in *HealthCheckRequest, opts ...grpc.CallOption) (*HealthCheckResponse, error) { + out := new(HealthCheckResponse) + err := c.cc.Invoke(ctx, "/grpc.health.v1.Health/Check", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *healthClient) Watch(ctx context.Context, in *HealthCheckRequest, opts ...grpc.CallOption) (Health_WatchClient, error) { + stream, err := c.cc.NewStream(ctx, &_Health_serviceDesc.Streams[0], "/grpc.health.v1.Health/Watch", opts...) + if err != nil { + return nil, err + } + x := &healthWatchClient{stream} + if err := x.ClientStream.SendMsg(in); err != nil { + return nil, err + } + if err := x.ClientStream.CloseSend(); err != nil { + return nil, err + } + return x, nil +} + +type Health_WatchClient interface { + Recv() (*HealthCheckResponse, error) + grpc.ClientStream +} + +type healthWatchClient struct { + grpc.ClientStream +} + +func (x *healthWatchClient) Recv() (*HealthCheckResponse, error) { + m := new(HealthCheckResponse) + if err := x.ClientStream.RecvMsg(m); err != nil { + return nil, err + } + return m, nil +} + +// HealthServer is the server API for Health service. +type HealthServer interface { + // If the requested service is unknown, the call will fail with status + // NOT_FOUND. + Check(context.Context, *HealthCheckRequest) (*HealthCheckResponse, error) + // Performs a watch for the serving status of the requested service. + // The server will immediately send back a message indicating the current + // serving status. It will then subsequently send a new message whenever + // the service's serving status changes. + // + // If the requested service is unknown when the call is received, the + // server will send a message setting the serving status to + // SERVICE_UNKNOWN but will *not* terminate the call. If at some + // future point, the serving status of the service becomes known, the + // server will send a new message with the service's serving status. + // + // If the call terminates with status UNIMPLEMENTED, then clients + // should assume this method is not supported and should not retry the + // call. If the call terminates with any other status (including OK), + // clients should retry the call with appropriate exponential backoff. + Watch(*HealthCheckRequest, Health_WatchServer) error +} + +func RegisterHealthServer(s *grpc.Server, srv HealthServer) { + s.RegisterService(&_Health_serviceDesc, srv) +} + +func _Health_Check_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(HealthCheckRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(HealthServer).Check(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/grpc.health.v1.Health/Check", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(HealthServer).Check(ctx, req.(*HealthCheckRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _Health_Watch_Handler(srv interface{}, stream grpc.ServerStream) error { + m := new(HealthCheckRequest) + if err := stream.RecvMsg(m); err != nil { + return err + } + return srv.(HealthServer).Watch(m, &healthWatchServer{stream}) +} + +type Health_WatchServer interface { + Send(*HealthCheckResponse) error + grpc.ServerStream +} + +type healthWatchServer struct { + grpc.ServerStream +} + +func (x *healthWatchServer) Send(m *HealthCheckResponse) error { + return x.ServerStream.SendMsg(m) +} + +var _Health_serviceDesc = grpc.ServiceDesc{ + ServiceName: "grpc.health.v1.Health", + HandlerType: (*HealthServer)(nil), + Methods: []grpc.MethodDesc{ + { + MethodName: "Check", + Handler: _Health_Check_Handler, + }, + }, + Streams: []grpc.StreamDesc{ + { + StreamName: "Watch", + Handler: _Health_Watch_Handler, + ServerStreams: true, + }, + }, + Metadata: "grpc/health/v1/health.proto", +} + +func init() { proto.RegisterFile("grpc/health/v1/health.proto", fileDescriptor_health_6b1a06aa67f91efd) } + +var fileDescriptor_health_6b1a06aa67f91efd = []byte{ + // 297 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xe2, 0x92, 0x4e, 0x2f, 0x2a, 0x48, + 0xd6, 0xcf, 0x48, 0x4d, 0xcc, 0x29, 0xc9, 0xd0, 0x2f, 0x33, 0x84, 0xb2, 0xf4, 0x0a, 0x8a, 0xf2, + 0x4b, 0xf2, 0x85, 0xf8, 0x40, 0x92, 0x7a, 0x50, 0xa1, 0x32, 0x43, 0x25, 0x3d, 0x2e, 0x21, 0x0f, + 0x30, 0xc7, 0x39, 0x23, 0x35, 0x39, 0x3b, 0x28, 0xb5, 0xb0, 0x34, 0xb5, 0xb8, 0x44, 0x48, 0x82, + 0x8b, 0xbd, 0x38, 0xb5, 0xa8, 0x2c, 0x33, 0x39, 0x55, 0x82, 0x51, 0x81, 0x51, 0x83, 0x33, 0x08, + 0xc6, 0x55, 0xda, 0xc8, 0xc8, 0x25, 0x8c, 0xa2, 0xa1, 0xb8, 0x20, 0x3f, 0xaf, 0x38, 0x55, 0xc8, + 0x93, 0x8b, 0xad, 0xb8, 0x24, 0xb1, 0xa4, 0xb4, 0x18, 0xac, 0x81, 0xcf, 0xc8, 0x50, 0x0f, 0xd5, + 0x22, 0x3d, 0x2c, 0x9a, 0xf4, 0x82, 0x41, 0x86, 0xe6, 0xa5, 0x07, 0x83, 0x35, 0x06, 0x41, 0x0d, + 0x50, 0xf2, 0xe7, 0xe2, 0x45, 0x91, 0x10, 0xe2, 0xe6, 0x62, 0x0f, 0xf5, 0xf3, 0xf6, 0xf3, 0x0f, + 0xf7, 0x13, 0x60, 0x00, 0x71, 0x82, 0x5d, 0x83, 0xc2, 0x3c, 0xfd, 0xdc, 0x05, 0x18, 0x85, 0xf8, + 0xb9, 0xb8, 0xfd, 0xfc, 0x43, 0xe2, 0x61, 0x02, 0x4c, 0x42, 0xc2, 0x5c, 0xfc, 0x60, 0x8e, 0xb3, + 0x6b, 0x3c, 0x4c, 0x0b, 0xb3, 0xd1, 0x3a, 0x46, 0x2e, 0x36, 0x88, 0xf5, 0x42, 0x01, 0x5c, 0xac, + 0x60, 0x27, 0x08, 0x29, 0xe1, 0x75, 0x1f, 0x38, 0x14, 0xa4, 0x94, 0x89, 0xf0, 0x83, 0x50, 0x10, + 0x17, 0x6b, 0x78, 0x62, 0x49, 0x72, 0x06, 0xd5, 0x4c, 0x34, 0x60, 0x74, 0x4a, 0xe4, 0x12, 0xcc, + 0xcc, 0x47, 0x53, 0xea, 0xc4, 0x0d, 0x51, 0x1b, 0x00, 0x8a, 0xc6, 0x00, 0xc6, 0x28, 0x9d, 0xf4, + 0xfc, 0xfc, 0xf4, 0x9c, 0x54, 0xbd, 0xf4, 0xfc, 0x9c, 0xc4, 0xbc, 0x74, 0xbd, 0xfc, 0xa2, 0x74, + 0x7d, 0xe4, 0x78, 0x07, 0xb1, 0xe3, 0x21, 0xec, 0xf8, 0x32, 0xc3, 0x55, 0x4c, 0x7c, 0xee, 0x20, + 0xd3, 0x20, 0x46, 0xe8, 0x85, 0x19, 0x26, 0xb1, 0x81, 0x93, 0x83, 0x31, 0x20, 0x00, 0x00, 0xff, + 0xff, 0x12, 0x7d, 0x96, 0xcb, 0x2d, 0x02, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/grpc/health/server.go b/vendor/google.golang.org/grpc/health/server.go new file mode 100644 index 0000000..c79f9d2 --- /dev/null +++ b/vendor/google.golang.org/grpc/health/server.go @@ -0,0 +1,165 @@ +/* + * + * Copyright 2017 gRPC authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ + +//go:generate ./regenerate.sh + +// Package health provides a service that exposes server's health and it must be +// imported to enable support for client-side health checks. +package health + +import ( + "context" + "sync" + + "google.golang.org/grpc/codes" + "google.golang.org/grpc/grpclog" + healthgrpc "google.golang.org/grpc/health/grpc_health_v1" + healthpb "google.golang.org/grpc/health/grpc_health_v1" + "google.golang.org/grpc/status" +) + +// Server implements `service Health`. +type Server struct { + mu sync.Mutex + // If shutdown is true, it's expected all serving status is NOT_SERVING, and + // will stay in NOT_SERVING. + shutdown bool + // statusMap stores the serving status of the services this Server monitors. + statusMap map[string]healthpb.HealthCheckResponse_ServingStatus + updates map[string]map[healthgrpc.Health_WatchServer]chan healthpb.HealthCheckResponse_ServingStatus +} + +// NewServer returns a new Server. +func NewServer() *Server { + return &Server{ + statusMap: map[string]healthpb.HealthCheckResponse_ServingStatus{"": healthpb.HealthCheckResponse_SERVING}, + updates: make(map[string]map[healthgrpc.Health_WatchServer]chan healthpb.HealthCheckResponse_ServingStatus), + } +} + +// Check implements `service Health`. +func (s *Server) Check(ctx context.Context, in *healthpb.HealthCheckRequest) (*healthpb.HealthCheckResponse, error) { + s.mu.Lock() + defer s.mu.Unlock() + if servingStatus, ok := s.statusMap[in.Service]; ok { + return &healthpb.HealthCheckResponse{ + Status: servingStatus, + }, nil + } + return nil, status.Error(codes.NotFound, "unknown service") +} + +// Watch implements `service Health`. +func (s *Server) Watch(in *healthpb.HealthCheckRequest, stream healthgrpc.Health_WatchServer) error { + service := in.Service + // update channel is used for getting service status updates. + update := make(chan healthpb.HealthCheckResponse_ServingStatus, 1) + s.mu.Lock() + // Puts the initial status to the channel. + if servingStatus, ok := s.statusMap[service]; ok { + update <- servingStatus + } else { + update <- healthpb.HealthCheckResponse_SERVICE_UNKNOWN + } + + // Registers the update channel to the correct place in the updates map. + if _, ok := s.updates[service]; !ok { + s.updates[service] = make(map[healthgrpc.Health_WatchServer]chan healthpb.HealthCheckResponse_ServingStatus) + } + s.updates[service][stream] = update + defer func() { + s.mu.Lock() + delete(s.updates[service], stream) + s.mu.Unlock() + }() + s.mu.Unlock() + + var lastSentStatus healthpb.HealthCheckResponse_ServingStatus = -1 + for { + select { + // Status updated. Sends the up-to-date status to the client. + case servingStatus := <-update: + if lastSentStatus == servingStatus { + continue + } + lastSentStatus = servingStatus + err := stream.Send(&healthpb.HealthCheckResponse{Status: servingStatus}) + if err != nil { + return status.Error(codes.Canceled, "Stream has ended.") + } + // Context done. Removes the update channel from the updates map. + case <-stream.Context().Done(): + return status.Error(codes.Canceled, "Stream has ended.") + } + } +} + +// SetServingStatus is called when need to reset the serving status of a service +// or insert a new service entry into the statusMap. +func (s *Server) SetServingStatus(service string, servingStatus healthpb.HealthCheckResponse_ServingStatus) { + s.mu.Lock() + defer s.mu.Unlock() + if s.shutdown { + grpclog.Infof("health: status changing for %s to %v is ignored because health service is shutdown", service, servingStatus) + return + } + + s.setServingStatusLocked(service, servingStatus) +} + +func (s *Server) setServingStatusLocked(service string, servingStatus healthpb.HealthCheckResponse_ServingStatus) { + s.statusMap[service] = servingStatus + for _, update := range s.updates[service] { + // Clears previous updates, that are not sent to the client, from the channel. + // This can happen if the client is not reading and the server gets flow control limited. + select { + case <-update: + default: + } + // Puts the most recent update to the channel. + update <- servingStatus + } +} + +// Shutdown sets all serving status to NOT_SERVING, and configures the server to +// ignore all future status changes. +// +// This changes serving status for all services. To set status for a perticular +// services, call SetServingStatus(). +func (s *Server) Shutdown() { + s.mu.Lock() + defer s.mu.Unlock() + s.shutdown = true + for service := range s.statusMap { + s.setServingStatusLocked(service, healthpb.HealthCheckResponse_NOT_SERVING) + } +} + +// Resume sets all serving status to SERVING, and configures the server to +// accept all future status changes. +// +// This changes serving status for all services. To set status for a perticular +// services, call SetServingStatus(). +func (s *Server) Resume() { + s.mu.Lock() + defer s.mu.Unlock() + s.shutdown = false + for service := range s.statusMap { + s.setServingStatusLocked(service, healthpb.HealthCheckResponse_SERVING) + } +} diff --git a/vendor/google.golang.org/grpc/interceptor.go b/vendor/google.golang.org/grpc/interceptor.go new file mode 100644 index 0000000..8b73500 --- /dev/null +++ b/vendor/google.golang.org/grpc/interceptor.go @@ -0,0 +1,77 @@ +/* + * + * Copyright 2016 gRPC authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ + +package grpc + +import ( + "context" +) + +// UnaryInvoker is called by UnaryClientInterceptor to complete RPCs. +type UnaryInvoker func(ctx context.Context, method string, req, reply interface{}, cc *ClientConn, opts ...CallOption) error + +// UnaryClientInterceptor intercepts the execution of a unary RPC on the client. invoker is the handler to complete the RPC +// and it is the responsibility of the interceptor to call it. +// This is an EXPERIMENTAL API. +type UnaryClientInterceptor func(ctx context.Context, method string, req, reply interface{}, cc *ClientConn, invoker UnaryInvoker, opts ...CallOption) error + +// Streamer is called by StreamClientInterceptor to create a ClientStream. +type Streamer func(ctx context.Context, desc *StreamDesc, cc *ClientConn, method string, opts ...CallOption) (ClientStream, error) + +// StreamClientInterceptor intercepts the creation of ClientStream. It may return a custom ClientStream to intercept all I/O +// operations. streamer is the handler to create a ClientStream and it is the responsibility of the interceptor to call it. +// This is an EXPERIMENTAL API. +type StreamClientInterceptor func(ctx context.Context, desc *StreamDesc, cc *ClientConn, method string, streamer Streamer, opts ...CallOption) (ClientStream, error) + +// UnaryServerInfo consists of various information about a unary RPC on +// server side. All per-rpc information may be mutated by the interceptor. +type UnaryServerInfo struct { + // Server is the service implementation the user provides. This is read-only. + Server interface{} + // FullMethod is the full RPC method string, i.e., /package.service/method. + FullMethod string +} + +// UnaryHandler defines the handler invoked by UnaryServerInterceptor to complete the normal +// execution of a unary RPC. If a UnaryHandler returns an error, it should be produced by the +// status package, or else gRPC will use codes.Unknown as the status code and err.Error() as +// the status message of the RPC. +type UnaryHandler func(ctx context.Context, req interface{}) (interface{}, error) + +// UnaryServerInterceptor provides a hook to intercept the execution of a unary RPC on the server. info +// contains all the information of this RPC the interceptor can operate on. And handler is the wrapper +// of the service method implementation. It is the responsibility of the interceptor to invoke handler +// to complete the RPC. +type UnaryServerInterceptor func(ctx context.Context, req interface{}, info *UnaryServerInfo, handler UnaryHandler) (resp interface{}, err error) + +// StreamServerInfo consists of various information about a streaming RPC on +// server side. All per-rpc information may be mutated by the interceptor. +type StreamServerInfo struct { + // FullMethod is the full RPC method string, i.e., /package.service/method. + FullMethod string + // IsClientStream indicates whether the RPC is a client streaming RPC. + IsClientStream bool + // IsServerStream indicates whether the RPC is a server streaming RPC. + IsServerStream bool +} + +// StreamServerInterceptor provides a hook to intercept the execution of a streaming RPC on the server. +// info contains all the information of this RPC the interceptor can operate on. And handler is the +// service method implementation. It is the responsibility of the interceptor to invoke handler to +// complete the RPC. +type StreamServerInterceptor func(srv interface{}, ss ServerStream, info *StreamServerInfo, handler StreamHandler) error diff --git a/vendor/google.golang.org/grpc/internal/backoff/backoff.go b/vendor/google.golang.org/grpc/internal/backoff/backoff.go new file mode 100644 index 0000000..1bd0cce --- /dev/null +++ b/vendor/google.golang.org/grpc/internal/backoff/backoff.go @@ -0,0 +1,78 @@ +/* + * + * Copyright 2017 gRPC authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ + +// Package backoff implement the backoff strategy for gRPC. +// +// This is kept in internal until the gRPC project decides whether or not to +// allow alternative backoff strategies. +package backoff + +import ( + "time" + + "google.golang.org/grpc/internal/grpcrand" +) + +// Strategy defines the methodology for backing off after a grpc connection +// failure. +// +type Strategy interface { + // Backoff returns the amount of time to wait before the next retry given + // the number of consecutive failures. + Backoff(retries int) time.Duration +} + +const ( + // baseDelay is the amount of time to wait before retrying after the first + // failure. + baseDelay = 1.0 * time.Second + // factor is applied to the backoff after each retry. + factor = 1.6 + // jitter provides a range to randomize backoff delays. + jitter = 0.2 +) + +// Exponential implements exponential backoff algorithm as defined in +// https://github.com/grpc/grpc/blob/master/doc/connection-backoff.md. +type Exponential struct { + // MaxDelay is the upper bound of backoff delay. + MaxDelay time.Duration +} + +// Backoff returns the amount of time to wait before the next retry given the +// number of retries. +func (bc Exponential) Backoff(retries int) time.Duration { + if retries == 0 { + return baseDelay + } + backoff, max := float64(baseDelay), float64(bc.MaxDelay) + for backoff < max && retries > 0 { + backoff *= factor + retries-- + } + if backoff > max { + backoff = max + } + // Randomize backoff delays so that if a cluster of requests start at + // the same time, they won't operate in lockstep. + backoff *= 1 + jitter*(grpcrand.Float64()*2-1) + if backoff < 0 { + return 0 + } + return time.Duration(backoff) +} diff --git a/vendor/google.golang.org/grpc/internal/balancerload/load.go b/vendor/google.golang.org/grpc/internal/balancerload/load.go new file mode 100644 index 0000000..3a905d9 --- /dev/null +++ b/vendor/google.golang.org/grpc/internal/balancerload/load.go @@ -0,0 +1,46 @@ +/* + * Copyright 2019 gRPC authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +// Package balancerload defines APIs to parse server loads in trailers. The +// parsed loads are sent to balancers in DoneInfo. +package balancerload + +import ( + "google.golang.org/grpc/metadata" +) + +// Parser converts loads from metadata into a concrete type. +type Parser interface { + // Parse parses loads from metadata. + Parse(md metadata.MD) interface{} +} + +var parser Parser + +// SetParser sets the load parser. +// +// Not mutex-protected, should be called before any gRPC functions. +func SetParser(lr Parser) { + parser = lr +} + +// Parse calls parser.Read(). +func Parse(md metadata.MD) interface{} { + if parser == nil { + return nil + } + return parser.Parse(md) +} diff --git a/vendor/google.golang.org/grpc/internal/balancerload/orca/orca.go b/vendor/google.golang.org/grpc/internal/balancerload/orca/orca.go new file mode 100644 index 0000000..8b8a1f1 --- /dev/null +++ b/vendor/google.golang.org/grpc/internal/balancerload/orca/orca.go @@ -0,0 +1,84 @@ +/* + * Copyright 2019 gRPC authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +//go:generate protoc -I ./orca_v1 --go_out=plugins=grpc:./orca_v1 ./orca_v1/orca.proto + +// Package orca implements Open Request Cost Aggregation. +package orca + +import ( + "github.com/golang/protobuf/proto" + "google.golang.org/grpc/grpclog" + "google.golang.org/grpc/internal/balancerload" + orcapb "google.golang.org/grpc/internal/balancerload/orca/orca_v1" + "google.golang.org/grpc/metadata" +) + +const mdKey = "X-Endpoint-Load-Metrics-Bin" + +// toBytes converts a orca load report into bytes. +func toBytes(r *orcapb.LoadReport) []byte { + if r == nil { + return nil + } + + b, err := proto.Marshal(r) + if err != nil { + grpclog.Warningf("orca: failed to marshal load report: %v", err) + return nil + } + return b +} + +// ToMetadata converts a orca load report into grpc metadata. +func ToMetadata(r *orcapb.LoadReport) metadata.MD { + b := toBytes(r) + if b == nil { + return nil + } + return metadata.Pairs(mdKey, string(b)) +} + +// fromBytes reads load report bytes and converts it to orca. +func fromBytes(b []byte) *orcapb.LoadReport { + ret := new(orcapb.LoadReport) + if err := proto.Unmarshal(b, ret); err != nil { + grpclog.Warningf("orca: failed to unmarshal load report: %v", err) + return nil + } + return ret +} + +// FromMetadata reads load report from metadata and converts it to orca. +// +// It returns nil if report is not found in metadata. +func FromMetadata(md metadata.MD) *orcapb.LoadReport { + vs := md.Get(mdKey) + if len(vs) == 0 { + return nil + } + return fromBytes([]byte(vs[0])) +} + +type loadParser struct{} + +func (*loadParser) Parse(md metadata.MD) interface{} { + return FromMetadata(md) +} + +func init() { + balancerload.SetParser(&loadParser{}) +} diff --git a/vendor/google.golang.org/grpc/internal/balancerload/orca/orca_v1/orca.pb.go b/vendor/google.golang.org/grpc/internal/balancerload/orca/orca_v1/orca.pb.go new file mode 100644 index 0000000..1c21ddf --- /dev/null +++ b/vendor/google.golang.org/grpc/internal/balancerload/orca/orca_v1/orca.pb.go @@ -0,0 +1,293 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: orca.proto + +package orca_v1 + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import duration "github.com/golang/protobuf/ptypes/duration" + +import ( + context "golang.org/x/net/context" + grpc "google.golang.org/grpc" +) + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +type LoadReport struct { + // CPU utilization expressed as a fraction of available CPU resources. This + // should be derived from a sample or measurement taken during the request. + CpuUtilization float64 `protobuf:"fixed64,1,opt,name=cpu_utilization,json=cpuUtilization,proto3" json:"cpu_utilization,omitempty"` + // Memory utilization expressed as a fraction of available memory + // resources. This should be derived from a sample or measurement taken + // during the request. + MemUtilization float64 `protobuf:"fixed64,2,opt,name=mem_utilization,json=memUtilization,proto3" json:"mem_utilization,omitempty"` + // NIC inbound/outbound utilization expressed as a fraction of available NIC + // bandwidth. The request in/out bytes can be inferred by Envoy, but not the + // NIC availability at the endpoint, hence reporting + NicInUtilization float64 `protobuf:"fixed64,3,opt,name=nic_in_utilization,json=nicInUtilization,proto3" json:"nic_in_utilization,omitempty"` + NicOutUtilization float64 `protobuf:"fixed64,4,opt,name=nic_out_utilization,json=nicOutUtilization,proto3" json:"nic_out_utilization,omitempty"` + // Application specific requests costs. Values may be absolute costs (e.g. + // 3487 bytes of storage) associated with the cost or utilization, + // expressed as a fraction of total resources available. Utilization + // metrics should be derived from a sample or measurement taken + // during the request. + RequestCostOrUtilization map[string]float64 `protobuf:"bytes,5,rep,name=request_cost_or_utilization,json=requestCostOrUtilization,proto3" json:"request_cost_or_utilization,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"fixed64,2,opt,name=value,proto3"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *LoadReport) Reset() { *m = LoadReport{} } +func (m *LoadReport) String() string { return proto.CompactTextString(m) } +func (*LoadReport) ProtoMessage() {} +func (*LoadReport) Descriptor() ([]byte, []int) { + return fileDescriptor_orca_542539e3bf435293, []int{0} +} +func (m *LoadReport) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_LoadReport.Unmarshal(m, b) +} +func (m *LoadReport) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_LoadReport.Marshal(b, m, deterministic) +} +func (dst *LoadReport) XXX_Merge(src proto.Message) { + xxx_messageInfo_LoadReport.Merge(dst, src) +} +func (m *LoadReport) XXX_Size() int { + return xxx_messageInfo_LoadReport.Size(m) +} +func (m *LoadReport) XXX_DiscardUnknown() { + xxx_messageInfo_LoadReport.DiscardUnknown(m) +} + +var xxx_messageInfo_LoadReport proto.InternalMessageInfo + +func (m *LoadReport) GetCpuUtilization() float64 { + if m != nil { + return m.CpuUtilization + } + return 0 +} + +func (m *LoadReport) GetMemUtilization() float64 { + if m != nil { + return m.MemUtilization + } + return 0 +} + +func (m *LoadReport) GetNicInUtilization() float64 { + if m != nil { + return m.NicInUtilization + } + return 0 +} + +func (m *LoadReport) GetNicOutUtilization() float64 { + if m != nil { + return m.NicOutUtilization + } + return 0 +} + +func (m *LoadReport) GetRequestCostOrUtilization() map[string]float64 { + if m != nil { + return m.RequestCostOrUtilization + } + return nil +} + +type LoadReportRequest struct { + // Interval for generating Open RCA core metric responses. + ReportInterval *duration.Duration `protobuf:"bytes,1,opt,name=report_interval,json=reportInterval,proto3" json:"report_interval,omitempty"` + // Request costs to collect. If this is empty, all known requests costs tracked by + // the load reporting agent will be returned. This provides an opportunity for + // the client to selectively obtain a subset of tracked costs. + RequestCostNames []string `protobuf:"bytes,2,rep,name=request_cost_names,json=requestCostNames,proto3" json:"request_cost_names,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *LoadReportRequest) Reset() { *m = LoadReportRequest{} } +func (m *LoadReportRequest) String() string { return proto.CompactTextString(m) } +func (*LoadReportRequest) ProtoMessage() {} +func (*LoadReportRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_orca_542539e3bf435293, []int{1} +} +func (m *LoadReportRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_LoadReportRequest.Unmarshal(m, b) +} +func (m *LoadReportRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_LoadReportRequest.Marshal(b, m, deterministic) +} +func (dst *LoadReportRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_LoadReportRequest.Merge(dst, src) +} +func (m *LoadReportRequest) XXX_Size() int { + return xxx_messageInfo_LoadReportRequest.Size(m) +} +func (m *LoadReportRequest) XXX_DiscardUnknown() { + xxx_messageInfo_LoadReportRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_LoadReportRequest proto.InternalMessageInfo + +func (m *LoadReportRequest) GetReportInterval() *duration.Duration { + if m != nil { + return m.ReportInterval + } + return nil +} + +func (m *LoadReportRequest) GetRequestCostNames() []string { + if m != nil { + return m.RequestCostNames + } + return nil +} + +func init() { + proto.RegisterType((*LoadReport)(nil), "orca.v1.LoadReport") + proto.RegisterMapType((map[string]float64)(nil), "orca.v1.LoadReport.RequestCostOrUtilizationEntry") + proto.RegisterType((*LoadReportRequest)(nil), "orca.v1.LoadReportRequest") +} + +// Reference imports to suppress errors if they are not otherwise used. +var _ context.Context +var _ grpc.ClientConn + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the grpc package it is being compiled against. +const _ = grpc.SupportPackageIsVersion4 + +// OpenRCAServiceClient is the client API for OpenRCAService service. +// +// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://godoc.org/google.golang.org/grpc#ClientConn.NewStream. +type OpenRCAServiceClient interface { + StreamCoreMetrics(ctx context.Context, in *LoadReportRequest, opts ...grpc.CallOption) (OpenRCAService_StreamCoreMetricsClient, error) +} + +type openRCAServiceClient struct { + cc *grpc.ClientConn +} + +func NewOpenRCAServiceClient(cc *grpc.ClientConn) OpenRCAServiceClient { + return &openRCAServiceClient{cc} +} + +func (c *openRCAServiceClient) StreamCoreMetrics(ctx context.Context, in *LoadReportRequest, opts ...grpc.CallOption) (OpenRCAService_StreamCoreMetricsClient, error) { + stream, err := c.cc.NewStream(ctx, &_OpenRCAService_serviceDesc.Streams[0], "/orca.v1.OpenRCAService/StreamCoreMetrics", opts...) + if err != nil { + return nil, err + } + x := &openRCAServiceStreamCoreMetricsClient{stream} + if err := x.ClientStream.SendMsg(in); err != nil { + return nil, err + } + if err := x.ClientStream.CloseSend(); err != nil { + return nil, err + } + return x, nil +} + +type OpenRCAService_StreamCoreMetricsClient interface { + Recv() (*LoadReport, error) + grpc.ClientStream +} + +type openRCAServiceStreamCoreMetricsClient struct { + grpc.ClientStream +} + +func (x *openRCAServiceStreamCoreMetricsClient) Recv() (*LoadReport, error) { + m := new(LoadReport) + if err := x.ClientStream.RecvMsg(m); err != nil { + return nil, err + } + return m, nil +} + +// OpenRCAServiceServer is the server API for OpenRCAService service. +type OpenRCAServiceServer interface { + StreamCoreMetrics(*LoadReportRequest, OpenRCAService_StreamCoreMetricsServer) error +} + +func RegisterOpenRCAServiceServer(s *grpc.Server, srv OpenRCAServiceServer) { + s.RegisterService(&_OpenRCAService_serviceDesc, srv) +} + +func _OpenRCAService_StreamCoreMetrics_Handler(srv interface{}, stream grpc.ServerStream) error { + m := new(LoadReportRequest) + if err := stream.RecvMsg(m); err != nil { + return err + } + return srv.(OpenRCAServiceServer).StreamCoreMetrics(m, &openRCAServiceStreamCoreMetricsServer{stream}) +} + +type OpenRCAService_StreamCoreMetricsServer interface { + Send(*LoadReport) error + grpc.ServerStream +} + +type openRCAServiceStreamCoreMetricsServer struct { + grpc.ServerStream +} + +func (x *openRCAServiceStreamCoreMetricsServer) Send(m *LoadReport) error { + return x.ServerStream.SendMsg(m) +} + +var _OpenRCAService_serviceDesc = grpc.ServiceDesc{ + ServiceName: "orca.v1.OpenRCAService", + HandlerType: (*OpenRCAServiceServer)(nil), + Methods: []grpc.MethodDesc{}, + Streams: []grpc.StreamDesc{ + { + StreamName: "StreamCoreMetrics", + Handler: _OpenRCAService_StreamCoreMetrics_Handler, + ServerStreams: true, + }, + }, + Metadata: "orca.proto", +} + +func init() { proto.RegisterFile("orca.proto", fileDescriptor_orca_542539e3bf435293) } + +var fileDescriptor_orca_542539e3bf435293 = []byte{ + // 373 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x7c, 0x91, 0xcd, 0x6b, 0xe3, 0x30, + 0x10, 0xc5, 0xd7, 0xf6, 0x66, 0x97, 0x28, 0x90, 0x0f, 0x65, 0x0f, 0x59, 0x2f, 0xbb, 0x84, 0x5c, + 0x36, 0x87, 0x45, 0xd9, 0xa4, 0x97, 0xd2, 0x5b, 0x9b, 0x16, 0x1a, 0xfa, 0x11, 0x50, 0xe8, 0xa5, + 0x17, 0xe3, 0x28, 0xd3, 0x20, 0x6a, 0x4b, 0xae, 0x2c, 0x19, 0xd2, 0x7b, 0xff, 0xea, 0x5e, 0x8a, + 0x65, 0x97, 0xd8, 0x90, 0xf6, 0x26, 0xbd, 0xf9, 0xbd, 0x61, 0xe6, 0x0d, 0x42, 0x52, 0xb1, 0x90, + 0x24, 0x4a, 0x6a, 0x89, 0xbf, 0xdb, 0x77, 0x36, 0xf5, 0xff, 0x6c, 0xa5, 0xdc, 0x46, 0x30, 0xb1, + 0xf2, 0xda, 0x3c, 0x4c, 0x36, 0x46, 0x85, 0x9a, 0x4b, 0x51, 0x80, 0xa3, 0x57, 0x17, 0xa1, 0x6b, + 0x19, 0x6e, 0x28, 0x24, 0x52, 0x69, 0xfc, 0x17, 0x75, 0x58, 0x62, 0x02, 0xa3, 0x79, 0xc4, 0x9f, + 0x2d, 0x37, 0x70, 0x86, 0xce, 0xd8, 0xa1, 0x6d, 0x96, 0x98, 0xbb, 0xbd, 0x9a, 0x83, 0x31, 0xc4, + 0x35, 0xd0, 0x2d, 0xc0, 0x18, 0xe2, 0x2a, 0xf8, 0x0f, 0x61, 0xc1, 0x59, 0xc0, 0x45, 0x8d, 0xf5, + 0x2c, 0xdb, 0x15, 0x9c, 0x2d, 0x44, 0x95, 0x26, 0xa8, 0x9f, 0xd3, 0xd2, 0xe8, 0x1a, 0xfe, 0xd5, + 0xe2, 0x3d, 0xc1, 0xd9, 0xd2, 0xe8, 0x2a, 0x9f, 0xa0, 0x5f, 0x0a, 0x9e, 0x0c, 0xa4, 0x3a, 0x60, + 0x32, 0xd5, 0x81, 0x54, 0x35, 0x5f, 0x63, 0xe8, 0x8d, 0x5b, 0xb3, 0x29, 0x29, 0xd3, 0x20, 0xfb, + 0x4d, 0x09, 0x2d, 0x6c, 0x73, 0x99, 0xea, 0xa5, 0xaa, 0xb4, 0xbc, 0x10, 0x5a, 0xed, 0xe8, 0x40, + 0x7d, 0x50, 0xf6, 0xaf, 0xd0, 0xef, 0x4f, 0xad, 0xb8, 0x8b, 0xbc, 0x47, 0xd8, 0xd9, 0xd8, 0x9a, + 0x34, 0x7f, 0xe2, 0x1f, 0xa8, 0x91, 0x85, 0x91, 0x81, 0x32, 0xa1, 0xe2, 0x73, 0xe2, 0x1e, 0x3b, + 0xa3, 0x17, 0x07, 0xf5, 0xf6, 0x33, 0x95, 0x7d, 0xf1, 0x19, 0xea, 0x28, 0x2b, 0x04, 0x5c, 0x68, + 0x50, 0x59, 0x18, 0xd9, 0x6e, 0xad, 0xd9, 0x4f, 0x52, 0x5c, 0x93, 0xbc, 0x5f, 0x93, 0x9c, 0x97, + 0xd7, 0xa4, 0xed, 0xc2, 0xb1, 0x28, 0x0d, 0x79, 0xec, 0xb5, 0x60, 0x44, 0x18, 0x43, 0x3a, 0x70, + 0x87, 0xde, 0xb8, 0x49, 0xbb, 0x95, 0xe5, 0x6e, 0x73, 0x7d, 0x76, 0x8f, 0xda, 0xcb, 0x04, 0x04, + 0x9d, 0x9f, 0xae, 0x40, 0x65, 0x9c, 0x01, 0xbe, 0x44, 0xbd, 0x95, 0x56, 0x10, 0xc6, 0x73, 0xa9, + 0xe0, 0x06, 0xb4, 0xe2, 0x2c, 0xc5, 0xfe, 0x81, 0x20, 0xcb, 0xa1, 0xfd, 0xfe, 0x81, 0xda, 0xe8, + 0xcb, 0x7f, 0x67, 0xfd, 0xcd, 0x0e, 0x7b, 0xf4, 0x16, 0x00, 0x00, 0xff, 0xff, 0xc0, 0xda, 0x2d, + 0xb7, 0x9f, 0x02, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/grpc/internal/binarylog/binarylog.go b/vendor/google.golang.org/grpc/internal/binarylog/binarylog.go new file mode 100644 index 0000000..fee6aec --- /dev/null +++ b/vendor/google.golang.org/grpc/internal/binarylog/binarylog.go @@ -0,0 +1,167 @@ +/* + * + * Copyright 2018 gRPC authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ + +// Package binarylog implementation binary logging as defined in +// https://github.com/grpc/proposal/blob/master/A16-binary-logging.md. +package binarylog + +import ( + "fmt" + "os" + + "google.golang.org/grpc/grpclog" +) + +// Logger is the global binary logger. It can be used to get binary logger for +// each method. +type Logger interface { + getMethodLogger(methodName string) *MethodLogger +} + +// binLogger is the global binary logger for the binary. One of this should be +// built at init time from the configuration (environment varialbe or flags). +// +// It is used to get a methodLogger for each individual method. +var binLogger Logger + +// SetLogger sets the binarg logger. +// +// Only call this at init time. +func SetLogger(l Logger) { + binLogger = l +} + +// GetMethodLogger returns the methodLogger for the given methodName. +// +// methodName should be in the format of "/service/method". +// +// Each methodLogger returned by this method is a new instance. This is to +// generate sequence id within the call. +func GetMethodLogger(methodName string) *MethodLogger { + if binLogger == nil { + return nil + } + return binLogger.getMethodLogger(methodName) +} + +func init() { + const envStr = "GRPC_BINARY_LOG_FILTER" + configStr := os.Getenv(envStr) + binLogger = NewLoggerFromConfigString(configStr) +} + +type methodLoggerConfig struct { + // Max length of header and message. + hdr, msg uint64 +} + +type logger struct { + all *methodLoggerConfig + services map[string]*methodLoggerConfig + methods map[string]*methodLoggerConfig + + blacklist map[string]struct{} +} + +// newEmptyLogger creates an empty logger. The map fields need to be filled in +// using the set* functions. +func newEmptyLogger() *logger { + return &logger{} +} + +// Set method logger for "*". +func (l *logger) setDefaultMethodLogger(ml *methodLoggerConfig) error { + if l.all != nil { + return fmt.Errorf("conflicting global rules found") + } + l.all = ml + return nil +} + +// Set method logger for "service/*". +// +// New methodLogger with same service overrides the old one. +func (l *logger) setServiceMethodLogger(service string, ml *methodLoggerConfig) error { + if _, ok := l.services[service]; ok { + return fmt.Errorf("conflicting rules for service %v found", service) + } + if l.services == nil { + l.services = make(map[string]*methodLoggerConfig) + } + l.services[service] = ml + return nil +} + +// Set method logger for "service/method". +// +// New methodLogger with same method overrides the old one. +func (l *logger) setMethodMethodLogger(method string, ml *methodLoggerConfig) error { + if _, ok := l.blacklist[method]; ok { + return fmt.Errorf("conflicting rules for method %v found", method) + } + if _, ok := l.methods[method]; ok { + return fmt.Errorf("conflicting rules for method %v found", method) + } + if l.methods == nil { + l.methods = make(map[string]*methodLoggerConfig) + } + l.methods[method] = ml + return nil +} + +// Set blacklist method for "-service/method". +func (l *logger) setBlacklist(method string) error { + if _, ok := l.blacklist[method]; ok { + return fmt.Errorf("conflicting rules for method %v found", method) + } + if _, ok := l.methods[method]; ok { + return fmt.Errorf("conflicting rules for method %v found", method) + } + if l.blacklist == nil { + l.blacklist = make(map[string]struct{}) + } + l.blacklist[method] = struct{}{} + return nil +} + +// getMethodLogger returns the methodLogger for the given methodName. +// +// methodName should be in the format of "/service/method". +// +// Each methodLogger returned by this method is a new instance. This is to +// generate sequence id within the call. +func (l *logger) getMethodLogger(methodName string) *MethodLogger { + s, m, err := parseMethodName(methodName) + if err != nil { + grpclog.Infof("binarylogging: failed to parse %q: %v", methodName, err) + return nil + } + if ml, ok := l.methods[s+"/"+m]; ok { + return newMethodLogger(ml.hdr, ml.msg) + } + if _, ok := l.blacklist[s+"/"+m]; ok { + return nil + } + if ml, ok := l.services[s]; ok { + return newMethodLogger(ml.hdr, ml.msg) + } + if l.all == nil { + return nil + } + return newMethodLogger(l.all.hdr, l.all.msg) +} diff --git a/vendor/google.golang.org/grpc/internal/binarylog/binarylog_testutil.go b/vendor/google.golang.org/grpc/internal/binarylog/binarylog_testutil.go new file mode 100644 index 0000000..1ee00a3 --- /dev/null +++ b/vendor/google.golang.org/grpc/internal/binarylog/binarylog_testutil.go @@ -0,0 +1,42 @@ +/* + * + * Copyright 2018 gRPC authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ + +// This file contains exported variables/functions that are exported for testing +// only. +// +// An ideal way for this would be to put those in a *_test.go but in binarylog +// package. But this doesn't work with staticcheck with go module. Error was: +// "MdToMetadataProto not declared by package binarylog". This could be caused +// by the way staticcheck looks for files for a certain package, which doesn't +// support *_test.go files. +// +// Move those to binary_test.go when staticcheck is fixed. + +package binarylog + +var ( + // AllLogger is a logger that logs all headers/messages for all RPCs. It's + // for testing only. + AllLogger = NewLoggerFromConfigString("*") + // MdToMetadataProto converts metadata to a binary logging proto message. + // It's for testing only. + MdToMetadataProto = mdToMetadataProto + // AddrToProto converts an address to a binary logging proto message. It's + // for testing only. + AddrToProto = addrToProto +) diff --git a/vendor/google.golang.org/grpc/internal/binarylog/env_config.go b/vendor/google.golang.org/grpc/internal/binarylog/env_config.go new file mode 100644 index 0000000..4cc2525 --- /dev/null +++ b/vendor/google.golang.org/grpc/internal/binarylog/env_config.go @@ -0,0 +1,210 @@ +/* + * + * Copyright 2018 gRPC authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ + +package binarylog + +import ( + "errors" + "fmt" + "regexp" + "strconv" + "strings" + + "google.golang.org/grpc/grpclog" +) + +// NewLoggerFromConfigString reads the string and build a logger. It can be used +// to build a new logger and assign it to binarylog.Logger. +// +// Example filter config strings: +// - "" Nothing will be logged +// - "*" All headers and messages will be fully logged. +// - "*{h}" Only headers will be logged. +// - "*{m:256}" Only the first 256 bytes of each message will be logged. +// - "Foo/*" Logs every method in service Foo +// - "Foo/*,-Foo/Bar" Logs every method in service Foo except method /Foo/Bar +// - "Foo/*,Foo/Bar{m:256}" Logs the first 256 bytes of each message in method +// /Foo/Bar, logs all headers and messages in every other method in service +// Foo. +// +// If two configs exist for one certain method or service, the one specified +// later overrides the privous config. +func NewLoggerFromConfigString(s string) Logger { + if s == "" { + return nil + } + l := newEmptyLogger() + methods := strings.Split(s, ",") + for _, method := range methods { + if err := l.fillMethodLoggerWithConfigString(method); err != nil { + grpclog.Warningf("failed to parse binary log config: %v", err) + return nil + } + } + return l +} + +// fillMethodLoggerWithConfigString parses config, creates methodLogger and adds +// it to the right map in the logger. +func (l *logger) fillMethodLoggerWithConfigString(config string) error { + // "" is invalid. + if config == "" { + return errors.New("empty string is not a valid method binary logging config") + } + + // "-service/method", blacklist, no * or {} allowed. + if config[0] == '-' { + s, m, suffix, err := parseMethodConfigAndSuffix(config[1:]) + if err != nil { + return fmt.Errorf("invalid config: %q, %v", config, err) + } + if m == "*" { + return fmt.Errorf("invalid config: %q, %v", config, "* not allowd in blacklist config") + } + if suffix != "" { + return fmt.Errorf("invalid config: %q, %v", config, "header/message limit not allowed in blacklist config") + } + if err := l.setBlacklist(s + "/" + m); err != nil { + return fmt.Errorf("invalid config: %v", err) + } + return nil + } + + // "*{h:256;m:256}" + if config[0] == '*' { + hdr, msg, err := parseHeaderMessageLengthConfig(config[1:]) + if err != nil { + return fmt.Errorf("invalid config: %q, %v", config, err) + } + if err := l.setDefaultMethodLogger(&methodLoggerConfig{hdr: hdr, msg: msg}); err != nil { + return fmt.Errorf("invalid config: %v", err) + } + return nil + } + + s, m, suffix, err := parseMethodConfigAndSuffix(config) + if err != nil { + return fmt.Errorf("invalid config: %q, %v", config, err) + } + hdr, msg, err := parseHeaderMessageLengthConfig(suffix) + if err != nil { + return fmt.Errorf("invalid header/message length config: %q, %v", suffix, err) + } + if m == "*" { + if err := l.setServiceMethodLogger(s, &methodLoggerConfig{hdr: hdr, msg: msg}); err != nil { + return fmt.Errorf("invalid config: %v", err) + } + } else { + if err := l.setMethodMethodLogger(s+"/"+m, &methodLoggerConfig{hdr: hdr, msg: msg}); err != nil { + return fmt.Errorf("invalid config: %v", err) + } + } + return nil +} + +const ( + // TODO: this const is only used by env_config now. But could be useful for + // other config. Move to binarylog.go if necessary. + maxUInt = ^uint64(0) + + // For "p.s/m" plus any suffix. Suffix will be parsed again. See test for + // expected output. + longMethodConfigRegexpStr = `^([\w./]+)/((?:\w+)|[*])(.+)?$` + + // For suffix from above, "{h:123,m:123}". See test for expected output. + optionalLengthRegexpStr = `(?::(\d+))?` // Optional ":123". + headerConfigRegexpStr = `^{h` + optionalLengthRegexpStr + `}$` + messageConfigRegexpStr = `^{m` + optionalLengthRegexpStr + `}$` + headerMessageConfigRegexpStr = `^{h` + optionalLengthRegexpStr + `;m` + optionalLengthRegexpStr + `}$` +) + +var ( + longMethodConfigRegexp = regexp.MustCompile(longMethodConfigRegexpStr) + headerConfigRegexp = regexp.MustCompile(headerConfigRegexpStr) + messageConfigRegexp = regexp.MustCompile(messageConfigRegexpStr) + headerMessageConfigRegexp = regexp.MustCompile(headerMessageConfigRegexpStr) +) + +// Turn "service/method{h;m}" into "service", "method", "{h;m}". +func parseMethodConfigAndSuffix(c string) (service, method, suffix string, _ error) { + // Regexp result: + // + // in: "p.s/m{h:123,m:123}", + // out: []string{"p.s/m{h:123,m:123}", "p.s", "m", "{h:123,m:123}"}, + match := longMethodConfigRegexp.FindStringSubmatch(c) + if match == nil { + return "", "", "", fmt.Errorf("%q contains invalid substring", c) + } + service = match[1] + method = match[2] + suffix = match[3] + return +} + +// Turn "{h:123;m:345}" into 123, 345. +// +// Return maxUInt if length is unspecified. +func parseHeaderMessageLengthConfig(c string) (hdrLenStr, msgLenStr uint64, err error) { + if c == "" { + return maxUInt, maxUInt, nil + } + // Header config only. + if match := headerConfigRegexp.FindStringSubmatch(c); match != nil { + if s := match[1]; s != "" { + hdrLenStr, err = strconv.ParseUint(s, 10, 64) + if err != nil { + return 0, 0, fmt.Errorf("failed to convert %q to uint", s) + } + return hdrLenStr, 0, nil + } + return maxUInt, 0, nil + } + + // Message config only. + if match := messageConfigRegexp.FindStringSubmatch(c); match != nil { + if s := match[1]; s != "" { + msgLenStr, err = strconv.ParseUint(s, 10, 64) + if err != nil { + return 0, 0, fmt.Errorf("failed to convert %q to uint", s) + } + return 0, msgLenStr, nil + } + return 0, maxUInt, nil + } + + // Header and message config both. + if match := headerMessageConfigRegexp.FindStringSubmatch(c); match != nil { + // Both hdr and msg are specified, but one or two of them might be empty. + hdrLenStr = maxUInt + msgLenStr = maxUInt + if s := match[1]; s != "" { + hdrLenStr, err = strconv.ParseUint(s, 10, 64) + if err != nil { + return 0, 0, fmt.Errorf("failed to convert %q to uint", s) + } + } + if s := match[2]; s != "" { + msgLenStr, err = strconv.ParseUint(s, 10, 64) + if err != nil { + return 0, 0, fmt.Errorf("failed to convert %q to uint", s) + } + } + return hdrLenStr, msgLenStr, nil + } + return 0, 0, fmt.Errorf("%q contains invalid substring", c) +} diff --git a/vendor/google.golang.org/grpc/internal/binarylog/method_logger.go b/vendor/google.golang.org/grpc/internal/binarylog/method_logger.go new file mode 100644 index 0000000..160f6e8 --- /dev/null +++ b/vendor/google.golang.org/grpc/internal/binarylog/method_logger.go @@ -0,0 +1,423 @@ +/* + * + * Copyright 2018 gRPC authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ + +package binarylog + +import ( + "net" + "strings" + "sync/atomic" + "time" + + "github.com/golang/protobuf/proto" + "github.com/golang/protobuf/ptypes" + pb "google.golang.org/grpc/binarylog/grpc_binarylog_v1" + "google.golang.org/grpc/grpclog" + "google.golang.org/grpc/metadata" + "google.golang.org/grpc/status" +) + +type callIDGenerator struct { + id uint64 +} + +func (g *callIDGenerator) next() uint64 { + id := atomic.AddUint64(&g.id, 1) + return id +} + +// reset is for testing only, and doesn't need to be thread safe. +func (g *callIDGenerator) reset() { + g.id = 0 +} + +var idGen callIDGenerator + +// MethodLogger is the sub-logger for each method. +type MethodLogger struct { + headerMaxLen, messageMaxLen uint64 + + callID uint64 + idWithinCallGen *callIDGenerator + + sink Sink // TODO(blog): make this plugable. +} + +func newMethodLogger(h, m uint64) *MethodLogger { + return &MethodLogger{ + headerMaxLen: h, + messageMaxLen: m, + + callID: idGen.next(), + idWithinCallGen: &callIDGenerator{}, + + sink: defaultSink, // TODO(blog): make it plugable. + } +} + +// Log creates a proto binary log entry, and logs it to the sink. +func (ml *MethodLogger) Log(c LogEntryConfig) { + m := c.toProto() + timestamp, _ := ptypes.TimestampProto(time.Now()) + m.Timestamp = timestamp + m.CallId = ml.callID + m.SequenceIdWithinCall = ml.idWithinCallGen.next() + + switch pay := m.Payload.(type) { + case *pb.GrpcLogEntry_ClientHeader: + m.PayloadTruncated = ml.truncateMetadata(pay.ClientHeader.GetMetadata()) + case *pb.GrpcLogEntry_ServerHeader: + m.PayloadTruncated = ml.truncateMetadata(pay.ServerHeader.GetMetadata()) + case *pb.GrpcLogEntry_Message: + m.PayloadTruncated = ml.truncateMessage(pay.Message) + } + + ml.sink.Write(m) +} + +func (ml *MethodLogger) truncateMetadata(mdPb *pb.Metadata) (truncated bool) { + if ml.headerMaxLen == maxUInt { + return false + } + var ( + bytesLimit = ml.headerMaxLen + index int + ) + // At the end of the loop, index will be the first entry where the total + // size is greater than the limit: + // + // len(entry[:index]) <= ml.hdr && len(entry[:index+1]) > ml.hdr. + for ; index < len(mdPb.Entry); index++ { + entry := mdPb.Entry[index] + if entry.Key == "grpc-trace-bin" { + // "grpc-trace-bin" is a special key. It's kept in the log entry, + // but not counted towards the size limit. + continue + } + currentEntryLen := uint64(len(entry.Value)) + if currentEntryLen > bytesLimit { + break + } + bytesLimit -= currentEntryLen + } + truncated = index < len(mdPb.Entry) + mdPb.Entry = mdPb.Entry[:index] + return truncated +} + +func (ml *MethodLogger) truncateMessage(msgPb *pb.Message) (truncated bool) { + if ml.messageMaxLen == maxUInt { + return false + } + if ml.messageMaxLen >= uint64(len(msgPb.Data)) { + return false + } + msgPb.Data = msgPb.Data[:ml.messageMaxLen] + return true +} + +// LogEntryConfig represents the configuration for binary log entry. +type LogEntryConfig interface { + toProto() *pb.GrpcLogEntry +} + +// ClientHeader configs the binary log entry to be a ClientHeader entry. +type ClientHeader struct { + OnClientSide bool + Header metadata.MD + MethodName string + Authority string + Timeout time.Duration + // PeerAddr is required only when it's on server side. + PeerAddr net.Addr +} + +func (c *ClientHeader) toProto() *pb.GrpcLogEntry { + // This function doesn't need to set all the fields (e.g. seq ID). The Log + // function will set the fields when necessary. + clientHeader := &pb.ClientHeader{ + Metadata: mdToMetadataProto(c.Header), + MethodName: c.MethodName, + Authority: c.Authority, + } + if c.Timeout > 0 { + clientHeader.Timeout = ptypes.DurationProto(c.Timeout) + } + ret := &pb.GrpcLogEntry{ + Type: pb.GrpcLogEntry_EVENT_TYPE_CLIENT_HEADER, + Payload: &pb.GrpcLogEntry_ClientHeader{ + ClientHeader: clientHeader, + }, + } + if c.OnClientSide { + ret.Logger = pb.GrpcLogEntry_LOGGER_CLIENT + } else { + ret.Logger = pb.GrpcLogEntry_LOGGER_SERVER + } + if c.PeerAddr != nil { + ret.Peer = addrToProto(c.PeerAddr) + } + return ret +} + +// ServerHeader configs the binary log entry to be a ServerHeader entry. +type ServerHeader struct { + OnClientSide bool + Header metadata.MD + // PeerAddr is required only when it's on client side. + PeerAddr net.Addr +} + +func (c *ServerHeader) toProto() *pb.GrpcLogEntry { + ret := &pb.GrpcLogEntry{ + Type: pb.GrpcLogEntry_EVENT_TYPE_SERVER_HEADER, + Payload: &pb.GrpcLogEntry_ServerHeader{ + ServerHeader: &pb.ServerHeader{ + Metadata: mdToMetadataProto(c.Header), + }, + }, + } + if c.OnClientSide { + ret.Logger = pb.GrpcLogEntry_LOGGER_CLIENT + } else { + ret.Logger = pb.GrpcLogEntry_LOGGER_SERVER + } + if c.PeerAddr != nil { + ret.Peer = addrToProto(c.PeerAddr) + } + return ret +} + +// ClientMessage configs the binary log entry to be a ClientMessage entry. +type ClientMessage struct { + OnClientSide bool + // Message can be a proto.Message or []byte. Other messages formats are not + // supported. + Message interface{} +} + +func (c *ClientMessage) toProto() *pb.GrpcLogEntry { + var ( + data []byte + err error + ) + if m, ok := c.Message.(proto.Message); ok { + data, err = proto.Marshal(m) + if err != nil { + grpclog.Infof("binarylogging: failed to marshal proto message: %v", err) + } + } else if b, ok := c.Message.([]byte); ok { + data = b + } else { + grpclog.Infof("binarylogging: message to log is neither proto.message nor []byte") + } + ret := &pb.GrpcLogEntry{ + Type: pb.GrpcLogEntry_EVENT_TYPE_CLIENT_MESSAGE, + Payload: &pb.GrpcLogEntry_Message{ + Message: &pb.Message{ + Length: uint32(len(data)), + Data: data, + }, + }, + } + if c.OnClientSide { + ret.Logger = pb.GrpcLogEntry_LOGGER_CLIENT + } else { + ret.Logger = pb.GrpcLogEntry_LOGGER_SERVER + } + return ret +} + +// ServerMessage configs the binary log entry to be a ServerMessage entry. +type ServerMessage struct { + OnClientSide bool + // Message can be a proto.Message or []byte. Other messages formats are not + // supported. + Message interface{} +} + +func (c *ServerMessage) toProto() *pb.GrpcLogEntry { + var ( + data []byte + err error + ) + if m, ok := c.Message.(proto.Message); ok { + data, err = proto.Marshal(m) + if err != nil { + grpclog.Infof("binarylogging: failed to marshal proto message: %v", err) + } + } else if b, ok := c.Message.([]byte); ok { + data = b + } else { + grpclog.Infof("binarylogging: message to log is neither proto.message nor []byte") + } + ret := &pb.GrpcLogEntry{ + Type: pb.GrpcLogEntry_EVENT_TYPE_SERVER_MESSAGE, + Payload: &pb.GrpcLogEntry_Message{ + Message: &pb.Message{ + Length: uint32(len(data)), + Data: data, + }, + }, + } + if c.OnClientSide { + ret.Logger = pb.GrpcLogEntry_LOGGER_CLIENT + } else { + ret.Logger = pb.GrpcLogEntry_LOGGER_SERVER + } + return ret +} + +// ClientHalfClose configs the binary log entry to be a ClientHalfClose entry. +type ClientHalfClose struct { + OnClientSide bool +} + +func (c *ClientHalfClose) toProto() *pb.GrpcLogEntry { + ret := &pb.GrpcLogEntry{ + Type: pb.GrpcLogEntry_EVENT_TYPE_CLIENT_HALF_CLOSE, + Payload: nil, // No payload here. + } + if c.OnClientSide { + ret.Logger = pb.GrpcLogEntry_LOGGER_CLIENT + } else { + ret.Logger = pb.GrpcLogEntry_LOGGER_SERVER + } + return ret +} + +// ServerTrailer configs the binary log entry to be a ServerTrailer entry. +type ServerTrailer struct { + OnClientSide bool + Trailer metadata.MD + // Err is the status error. + Err error + // PeerAddr is required only when it's on client side and the RPC is trailer + // only. + PeerAddr net.Addr +} + +func (c *ServerTrailer) toProto() *pb.GrpcLogEntry { + st, ok := status.FromError(c.Err) + if !ok { + grpclog.Info("binarylogging: error in trailer is not a status error") + } + var ( + detailsBytes []byte + err error + ) + stProto := st.Proto() + if stProto != nil && len(stProto.Details) != 0 { + detailsBytes, err = proto.Marshal(stProto) + if err != nil { + grpclog.Infof("binarylogging: failed to marshal status proto: %v", err) + } + } + ret := &pb.GrpcLogEntry{ + Type: pb.GrpcLogEntry_EVENT_TYPE_SERVER_TRAILER, + Payload: &pb.GrpcLogEntry_Trailer{ + Trailer: &pb.Trailer{ + Metadata: mdToMetadataProto(c.Trailer), + StatusCode: uint32(st.Code()), + StatusMessage: st.Message(), + StatusDetails: detailsBytes, + }, + }, + } + if c.OnClientSide { + ret.Logger = pb.GrpcLogEntry_LOGGER_CLIENT + } else { + ret.Logger = pb.GrpcLogEntry_LOGGER_SERVER + } + if c.PeerAddr != nil { + ret.Peer = addrToProto(c.PeerAddr) + } + return ret +} + +// Cancel configs the binary log entry to be a Cancel entry. +type Cancel struct { + OnClientSide bool +} + +func (c *Cancel) toProto() *pb.GrpcLogEntry { + ret := &pb.GrpcLogEntry{ + Type: pb.GrpcLogEntry_EVENT_TYPE_CANCEL, + Payload: nil, + } + if c.OnClientSide { + ret.Logger = pb.GrpcLogEntry_LOGGER_CLIENT + } else { + ret.Logger = pb.GrpcLogEntry_LOGGER_SERVER + } + return ret +} + +// metadataKeyOmit returns whether the metadata entry with this key should be +// omitted. +func metadataKeyOmit(key string) bool { + switch key { + case "lb-token", ":path", ":authority", "content-encoding", "content-type", "user-agent", "te": + return true + case "grpc-trace-bin": // grpc-trace-bin is special because it's visiable to users. + return false + } + return strings.HasPrefix(key, "grpc-") +} + +func mdToMetadataProto(md metadata.MD) *pb.Metadata { + ret := &pb.Metadata{} + for k, vv := range md { + if metadataKeyOmit(k) { + continue + } + for _, v := range vv { + ret.Entry = append(ret.Entry, + &pb.MetadataEntry{ + Key: k, + Value: []byte(v), + }, + ) + } + } + return ret +} + +func addrToProto(addr net.Addr) *pb.Address { + ret := &pb.Address{} + switch a := addr.(type) { + case *net.TCPAddr: + if a.IP.To4() != nil { + ret.Type = pb.Address_TYPE_IPV4 + } else if a.IP.To16() != nil { + ret.Type = pb.Address_TYPE_IPV6 + } else { + ret.Type = pb.Address_TYPE_UNKNOWN + // Do not set address and port fields. + break + } + ret.Address = a.IP.String() + ret.IpPort = uint32(a.Port) + case *net.UnixAddr: + ret.Type = pb.Address_TYPE_UNIX + ret.Address = a.String() + default: + ret.Type = pb.Address_TYPE_UNKNOWN + } + return ret +} diff --git a/vendor/google.golang.org/grpc/internal/binarylog/sink.go b/vendor/google.golang.org/grpc/internal/binarylog/sink.go new file mode 100644 index 0000000..20d044f --- /dev/null +++ b/vendor/google.golang.org/grpc/internal/binarylog/sink.go @@ -0,0 +1,162 @@ +/* + * + * Copyright 2018 gRPC authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ + +package binarylog + +import ( + "bufio" + "encoding/binary" + "fmt" + "io" + "io/ioutil" + "sync" + "time" + + "github.com/golang/protobuf/proto" + pb "google.golang.org/grpc/binarylog/grpc_binarylog_v1" + "google.golang.org/grpc/grpclog" +) + +var ( + defaultSink Sink = &noopSink{} // TODO(blog): change this default (file in /tmp). +) + +// SetDefaultSink sets the sink where binary logs will be written to. +// +// Not thread safe. Only set during initialization. +func SetDefaultSink(s Sink) { + if defaultSink != nil { + defaultSink.Close() + } + defaultSink = s +} + +// Sink writes log entry into the binary log sink. +type Sink interface { + // Write will be called to write the log entry into the sink. + // + // It should be thread-safe so it can be called in parallel. + Write(*pb.GrpcLogEntry) error + // Close will be called when the Sink is replaced by a new Sink. + Close() error +} + +type noopSink struct{} + +func (ns *noopSink) Write(*pb.GrpcLogEntry) error { return nil } +func (ns *noopSink) Close() error { return nil } + +// newWriterSink creates a binary log sink with the given writer. +// +// Write() marshalls the proto message and writes it to the given writer. Each +// message is prefixed with a 4 byte big endian unsigned integer as the length. +// +// No buffer is done, Close() doesn't try to close the writer. +func newWriterSink(w io.Writer) *writerSink { + return &writerSink{out: w} +} + +type writerSink struct { + out io.Writer +} + +func (ws *writerSink) Write(e *pb.GrpcLogEntry) error { + b, err := proto.Marshal(e) + if err != nil { + grpclog.Infof("binary logging: failed to marshal proto message: %v", err) + } + hdr := make([]byte, 4) + binary.BigEndian.PutUint32(hdr, uint32(len(b))) + if _, err := ws.out.Write(hdr); err != nil { + return err + } + if _, err := ws.out.Write(b); err != nil { + return err + } + return nil +} + +func (ws *writerSink) Close() error { return nil } + +type bufWriteCloserSink struct { + mu sync.Mutex + closer io.Closer + out *writerSink // out is built on buf. + buf *bufio.Writer // buf is kept for flush. + + writeStartOnce sync.Once + writeTicker *time.Ticker +} + +func (fs *bufWriteCloserSink) Write(e *pb.GrpcLogEntry) error { + // Start the write loop when Write is called. + fs.writeStartOnce.Do(fs.startFlushGoroutine) + fs.mu.Lock() + if err := fs.out.Write(e); err != nil { + fs.mu.Unlock() + return err + } + fs.mu.Unlock() + return nil +} + +const ( + bufFlushDuration = 60 * time.Second +) + +func (fs *bufWriteCloserSink) startFlushGoroutine() { + fs.writeTicker = time.NewTicker(bufFlushDuration) + go func() { + for range fs.writeTicker.C { + fs.mu.Lock() + fs.buf.Flush() + fs.mu.Unlock() + } + }() +} + +func (fs *bufWriteCloserSink) Close() error { + if fs.writeTicker != nil { + fs.writeTicker.Stop() + } + fs.mu.Lock() + fs.buf.Flush() + fs.closer.Close() + fs.out.Close() + fs.mu.Unlock() + return nil +} + +func newBufWriteCloserSink(o io.WriteCloser) Sink { + bufW := bufio.NewWriter(o) + return &bufWriteCloserSink{ + closer: o, + out: newWriterSink(bufW), + buf: bufW, + } +} + +// NewTempFileSink creates a temp file and returns a Sink that writes to this +// file. +func NewTempFileSink() (Sink, error) { + tempFile, err := ioutil.TempFile("/tmp", "grpcgo_binarylog_*.txt") + if err != nil { + return nil, fmt.Errorf("failed to create temp file: %v", err) + } + return newBufWriteCloserSink(tempFile), nil +} diff --git a/vendor/google.golang.org/grpc/internal/binarylog/util.go b/vendor/google.golang.org/grpc/internal/binarylog/util.go new file mode 100644 index 0000000..15dc780 --- /dev/null +++ b/vendor/google.golang.org/grpc/internal/binarylog/util.go @@ -0,0 +1,41 @@ +/* + * + * Copyright 2018 gRPC authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ + +package binarylog + +import ( + "errors" + "strings" +) + +// parseMethodName splits service and method from the input. It expects format +// "/service/method". +// +// TODO: move to internal/grpcutil. +func parseMethodName(methodName string) (service, method string, _ error) { + if !strings.HasPrefix(methodName, "/") { + return "", "", errors.New("invalid method name: should start with /") + } + methodName = methodName[1:] + + pos := strings.LastIndex(methodName, "/") + if pos < 0 { + return "", "", errors.New("invalid method name: suffix /method is missing") + } + return methodName[:pos], methodName[pos+1:], nil +} diff --git a/vendor/google.golang.org/grpc/internal/channelz/funcs.go b/vendor/google.golang.org/grpc/internal/channelz/funcs.go new file mode 100644 index 0000000..041520d --- /dev/null +++ b/vendor/google.golang.org/grpc/internal/channelz/funcs.go @@ -0,0 +1,699 @@ +/* + * + * Copyright 2018 gRPC authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ + +// Package channelz defines APIs for enabling channelz service, entry +// registration/deletion, and accessing channelz data. It also defines channelz +// metric struct formats. +// +// All APIs in this package are experimental. +package channelz + +import ( + "sort" + "sync" + "sync/atomic" + "time" + + "google.golang.org/grpc/grpclog" +) + +const ( + defaultMaxTraceEntry int32 = 30 +) + +var ( + db dbWrapper + idGen idGenerator + // EntryPerPage defines the number of channelz entries to be shown on a web page. + EntryPerPage = int64(50) + curState int32 + maxTraceEntry = defaultMaxTraceEntry +) + +// TurnOn turns on channelz data collection. +func TurnOn() { + if !IsOn() { + NewChannelzStorage() + atomic.StoreInt32(&curState, 1) + } +} + +// IsOn returns whether channelz data collection is on. +func IsOn() bool { + return atomic.CompareAndSwapInt32(&curState, 1, 1) +} + +// SetMaxTraceEntry sets maximum number of trace entry per entity (i.e. channel/subchannel). +// Setting it to 0 will disable channel tracing. +func SetMaxTraceEntry(i int32) { + atomic.StoreInt32(&maxTraceEntry, i) +} + +// ResetMaxTraceEntryToDefault resets the maximum number of trace entry per entity to default. +func ResetMaxTraceEntryToDefault() { + atomic.StoreInt32(&maxTraceEntry, defaultMaxTraceEntry) +} + +func getMaxTraceEntry() int { + i := atomic.LoadInt32(&maxTraceEntry) + return int(i) +} + +// dbWarpper wraps around a reference to internal channelz data storage, and +// provide synchronized functionality to set and get the reference. +type dbWrapper struct { + mu sync.RWMutex + DB *channelMap +} + +func (d *dbWrapper) set(db *channelMap) { + d.mu.Lock() + d.DB = db + d.mu.Unlock() +} + +func (d *dbWrapper) get() *channelMap { + d.mu.RLock() + defer d.mu.RUnlock() + return d.DB +} + +// NewChannelzStorage initializes channelz data storage and id generator. +// +// Note: This function is exported for testing purpose only. User should not call +// it in most cases. +func NewChannelzStorage() { + db.set(&channelMap{ + topLevelChannels: make(map[int64]struct{}), + channels: make(map[int64]*channel), + listenSockets: make(map[int64]*listenSocket), + normalSockets: make(map[int64]*normalSocket), + servers: make(map[int64]*server), + subChannels: make(map[int64]*subChannel), + }) + idGen.reset() +} + +// GetTopChannels returns a slice of top channel's ChannelMetric, along with a +// boolean indicating whether there's more top channels to be queried for. +// +// The arg id specifies that only top channel with id at or above it will be included +// in the result. The returned slice is up to a length of the arg maxResults or +// EntryPerPage if maxResults is zero, and is sorted in ascending id order. +func GetTopChannels(id int64, maxResults int64) ([]*ChannelMetric, bool) { + return db.get().GetTopChannels(id, maxResults) +} + +// GetServers returns a slice of server's ServerMetric, along with a +// boolean indicating whether there's more servers to be queried for. +// +// The arg id specifies that only server with id at or above it will be included +// in the result. The returned slice is up to a length of the arg maxResults or +// EntryPerPage if maxResults is zero, and is sorted in ascending id order. +func GetServers(id int64, maxResults int64) ([]*ServerMetric, bool) { + return db.get().GetServers(id, maxResults) +} + +// GetServerSockets returns a slice of server's (identified by id) normal socket's +// SocketMetric, along with a boolean indicating whether there's more sockets to +// be queried for. +// +// The arg startID specifies that only sockets with id at or above it will be +// included in the result. The returned slice is up to a length of the arg maxResults +// or EntryPerPage if maxResults is zero, and is sorted in ascending id order. +func GetServerSockets(id int64, startID int64, maxResults int64) ([]*SocketMetric, bool) { + return db.get().GetServerSockets(id, startID, maxResults) +} + +// GetChannel returns the ChannelMetric for the channel (identified by id). +func GetChannel(id int64) *ChannelMetric { + return db.get().GetChannel(id) +} + +// GetSubChannel returns the SubChannelMetric for the subchannel (identified by id). +func GetSubChannel(id int64) *SubChannelMetric { + return db.get().GetSubChannel(id) +} + +// GetSocket returns the SocketInternalMetric for the socket (identified by id). +func GetSocket(id int64) *SocketMetric { + return db.get().GetSocket(id) +} + +// GetServer returns the ServerMetric for the server (identified by id). +func GetServer(id int64) *ServerMetric { + return db.get().GetServer(id) +} + +// RegisterChannel registers the given channel c in channelz database with ref +// as its reference name, and add it to the child list of its parent (identified +// by pid). pid = 0 means no parent. It returns the unique channelz tracking id +// assigned to this channel. +func RegisterChannel(c Channel, pid int64, ref string) int64 { + id := idGen.genID() + cn := &channel{ + refName: ref, + c: c, + subChans: make(map[int64]string), + nestedChans: make(map[int64]string), + id: id, + pid: pid, + trace: &channelTrace{createdTime: time.Now(), events: make([]*TraceEvent, 0, getMaxTraceEntry())}, + } + if pid == 0 { + db.get().addChannel(id, cn, true, pid, ref) + } else { + db.get().addChannel(id, cn, false, pid, ref) + } + return id +} + +// RegisterSubChannel registers the given channel c in channelz database with ref +// as its reference name, and add it to the child list of its parent (identified +// by pid). It returns the unique channelz tracking id assigned to this subchannel. +func RegisterSubChannel(c Channel, pid int64, ref string) int64 { + if pid == 0 { + grpclog.Error("a SubChannel's parent id cannot be 0") + return 0 + } + id := idGen.genID() + sc := &subChannel{ + refName: ref, + c: c, + sockets: make(map[int64]string), + id: id, + pid: pid, + trace: &channelTrace{createdTime: time.Now(), events: make([]*TraceEvent, 0, getMaxTraceEntry())}, + } + db.get().addSubChannel(id, sc, pid, ref) + return id +} + +// RegisterServer registers the given server s in channelz database. It returns +// the unique channelz tracking id assigned to this server. +func RegisterServer(s Server, ref string) int64 { + id := idGen.genID() + svr := &server{ + refName: ref, + s: s, + sockets: make(map[int64]string), + listenSockets: make(map[int64]string), + id: id, + } + db.get().addServer(id, svr) + return id +} + +// RegisterListenSocket registers the given listen socket s in channelz database +// with ref as its reference name, and add it to the child list of its parent +// (identified by pid). It returns the unique channelz tracking id assigned to +// this listen socket. +func RegisterListenSocket(s Socket, pid int64, ref string) int64 { + if pid == 0 { + grpclog.Error("a ListenSocket's parent id cannot be 0") + return 0 + } + id := idGen.genID() + ls := &listenSocket{refName: ref, s: s, id: id, pid: pid} + db.get().addListenSocket(id, ls, pid, ref) + return id +} + +// RegisterNormalSocket registers the given normal socket s in channelz database +// with ref as its reference name, and add it to the child list of its parent +// (identified by pid). It returns the unique channelz tracking id assigned to +// this normal socket. +func RegisterNormalSocket(s Socket, pid int64, ref string) int64 { + if pid == 0 { + grpclog.Error("a NormalSocket's parent id cannot be 0") + return 0 + } + id := idGen.genID() + ns := &normalSocket{refName: ref, s: s, id: id, pid: pid} + db.get().addNormalSocket(id, ns, pid, ref) + return id +} + +// RemoveEntry removes an entry with unique channelz trakcing id to be id from +// channelz database. +func RemoveEntry(id int64) { + db.get().removeEntry(id) +} + +// TraceEventDesc is what the caller of AddTraceEvent should provide to describe the event to be added +// to the channel trace. +// The Parent field is optional. It is used for event that will be recorded in the entity's parent +// trace also. +type TraceEventDesc struct { + Desc string + Severity Severity + Parent *TraceEventDesc +} + +// AddTraceEvent adds trace related to the entity with specified id, using the provided TraceEventDesc. +func AddTraceEvent(id int64, desc *TraceEventDesc) { + if getMaxTraceEntry() == 0 { + return + } + db.get().traceEvent(id, desc) +} + +// channelMap is the storage data structure for channelz. +// Methods of channelMap can be divided in two two categories with respect to locking. +// 1. Methods acquire the global lock. +// 2. Methods that can only be called when global lock is held. +// A second type of method need always to be called inside a first type of method. +type channelMap struct { + mu sync.RWMutex + topLevelChannels map[int64]struct{} + servers map[int64]*server + channels map[int64]*channel + subChannels map[int64]*subChannel + listenSockets map[int64]*listenSocket + normalSockets map[int64]*normalSocket +} + +func (c *channelMap) addServer(id int64, s *server) { + c.mu.Lock() + s.cm = c + c.servers[id] = s + c.mu.Unlock() +} + +func (c *channelMap) addChannel(id int64, cn *channel, isTopChannel bool, pid int64, ref string) { + c.mu.Lock() + cn.cm = c + cn.trace.cm = c + c.channels[id] = cn + if isTopChannel { + c.topLevelChannels[id] = struct{}{} + } else { + c.findEntry(pid).addChild(id, cn) + } + c.mu.Unlock() +} + +func (c *channelMap) addSubChannel(id int64, sc *subChannel, pid int64, ref string) { + c.mu.Lock() + sc.cm = c + sc.trace.cm = c + c.subChannels[id] = sc + c.findEntry(pid).addChild(id, sc) + c.mu.Unlock() +} + +func (c *channelMap) addListenSocket(id int64, ls *listenSocket, pid int64, ref string) { + c.mu.Lock() + ls.cm = c + c.listenSockets[id] = ls + c.findEntry(pid).addChild(id, ls) + c.mu.Unlock() +} + +func (c *channelMap) addNormalSocket(id int64, ns *normalSocket, pid int64, ref string) { + c.mu.Lock() + ns.cm = c + c.normalSockets[id] = ns + c.findEntry(pid).addChild(id, ns) + c.mu.Unlock() +} + +// removeEntry triggers the removal of an entry, which may not indeed delete the entry, if it has to +// wait on the deletion of its children and until no other entity's channel trace references it. +// It may lead to a chain of entry deletion. For example, deleting the last socket of a gracefully +// shutting down server will lead to the server being also deleted. +func (c *channelMap) removeEntry(id int64) { + c.mu.Lock() + c.findEntry(id).triggerDelete() + c.mu.Unlock() +} + +// c.mu must be held by the caller +func (c *channelMap) decrTraceRefCount(id int64) { + e := c.findEntry(id) + if v, ok := e.(tracedChannel); ok { + v.decrTraceRefCount() + e.deleteSelfIfReady() + } +} + +// c.mu must be held by the caller. +func (c *channelMap) findEntry(id int64) entry { + var v entry + var ok bool + if v, ok = c.channels[id]; ok { + return v + } + if v, ok = c.subChannels[id]; ok { + return v + } + if v, ok = c.servers[id]; ok { + return v + } + if v, ok = c.listenSockets[id]; ok { + return v + } + if v, ok = c.normalSockets[id]; ok { + return v + } + return &dummyEntry{idNotFound: id} +} + +// c.mu must be held by the caller +// deleteEntry simply deletes an entry from the channelMap. Before calling this +// method, caller must check this entry is ready to be deleted, i.e removeEntry() +// has been called on it, and no children still exist. +// Conditionals are ordered by the expected frequency of deletion of each entity +// type, in order to optimize performance. +func (c *channelMap) deleteEntry(id int64) { + var ok bool + if _, ok = c.normalSockets[id]; ok { + delete(c.normalSockets, id) + return + } + if _, ok = c.subChannels[id]; ok { + delete(c.subChannels, id) + return + } + if _, ok = c.channels[id]; ok { + delete(c.channels, id) + delete(c.topLevelChannels, id) + return + } + if _, ok = c.listenSockets[id]; ok { + delete(c.listenSockets, id) + return + } + if _, ok = c.servers[id]; ok { + delete(c.servers, id) + return + } +} + +func (c *channelMap) traceEvent(id int64, desc *TraceEventDesc) { + c.mu.Lock() + child := c.findEntry(id) + childTC, ok := child.(tracedChannel) + if !ok { + c.mu.Unlock() + return + } + childTC.getChannelTrace().append(&TraceEvent{Desc: desc.Desc, Severity: desc.Severity, Timestamp: time.Now()}) + if desc.Parent != nil { + parent := c.findEntry(child.getParentID()) + var chanType RefChannelType + switch child.(type) { + case *channel: + chanType = RefChannel + case *subChannel: + chanType = RefSubChannel + } + if parentTC, ok := parent.(tracedChannel); ok { + parentTC.getChannelTrace().append(&TraceEvent{ + Desc: desc.Parent.Desc, + Severity: desc.Parent.Severity, + Timestamp: time.Now(), + RefID: id, + RefName: childTC.getRefName(), + RefType: chanType, + }) + childTC.incrTraceRefCount() + } + } + c.mu.Unlock() +} + +type int64Slice []int64 + +func (s int64Slice) Len() int { return len(s) } +func (s int64Slice) Swap(i, j int) { s[i], s[j] = s[j], s[i] } +func (s int64Slice) Less(i, j int) bool { return s[i] < s[j] } + +func copyMap(m map[int64]string) map[int64]string { + n := make(map[int64]string) + for k, v := range m { + n[k] = v + } + return n +} + +func min(a, b int64) int64 { + if a < b { + return a + } + return b +} + +func (c *channelMap) GetTopChannels(id int64, maxResults int64) ([]*ChannelMetric, bool) { + if maxResults <= 0 { + maxResults = EntryPerPage + } + c.mu.RLock() + l := int64(len(c.topLevelChannels)) + ids := make([]int64, 0, l) + cns := make([]*channel, 0, min(l, maxResults)) + + for k := range c.topLevelChannels { + ids = append(ids, k) + } + sort.Sort(int64Slice(ids)) + idx := sort.Search(len(ids), func(i int) bool { return ids[i] >= id }) + count := int64(0) + var end bool + var t []*ChannelMetric + for i, v := range ids[idx:] { + if count == maxResults { + break + } + if cn, ok := c.channels[v]; ok { + cns = append(cns, cn) + t = append(t, &ChannelMetric{ + NestedChans: copyMap(cn.nestedChans), + SubChans: copyMap(cn.subChans), + }) + count++ + } + if i == len(ids[idx:])-1 { + end = true + break + } + } + c.mu.RUnlock() + if count == 0 { + end = true + } + + for i, cn := range cns { + t[i].ChannelData = cn.c.ChannelzMetric() + t[i].ID = cn.id + t[i].RefName = cn.refName + t[i].Trace = cn.trace.dumpData() + } + return t, end +} + +func (c *channelMap) GetServers(id, maxResults int64) ([]*ServerMetric, bool) { + if maxResults <= 0 { + maxResults = EntryPerPage + } + c.mu.RLock() + l := int64(len(c.servers)) + ids := make([]int64, 0, l) + ss := make([]*server, 0, min(l, maxResults)) + for k := range c.servers { + ids = append(ids, k) + } + sort.Sort(int64Slice(ids)) + idx := sort.Search(len(ids), func(i int) bool { return ids[i] >= id }) + count := int64(0) + var end bool + var s []*ServerMetric + for i, v := range ids[idx:] { + if count == maxResults { + break + } + if svr, ok := c.servers[v]; ok { + ss = append(ss, svr) + s = append(s, &ServerMetric{ + ListenSockets: copyMap(svr.listenSockets), + }) + count++ + } + if i == len(ids[idx:])-1 { + end = true + break + } + } + c.mu.RUnlock() + if count == 0 { + end = true + } + + for i, svr := range ss { + s[i].ServerData = svr.s.ChannelzMetric() + s[i].ID = svr.id + s[i].RefName = svr.refName + } + return s, end +} + +func (c *channelMap) GetServerSockets(id int64, startID int64, maxResults int64) ([]*SocketMetric, bool) { + if maxResults <= 0 { + maxResults = EntryPerPage + } + var svr *server + var ok bool + c.mu.RLock() + if svr, ok = c.servers[id]; !ok { + // server with id doesn't exist. + c.mu.RUnlock() + return nil, true + } + svrskts := svr.sockets + l := int64(len(svrskts)) + ids := make([]int64, 0, l) + sks := make([]*normalSocket, 0, min(l, maxResults)) + for k := range svrskts { + ids = append(ids, k) + } + sort.Sort(int64Slice(ids)) + idx := sort.Search(len(ids), func(i int) bool { return ids[i] >= startID }) + count := int64(0) + var end bool + for i, v := range ids[idx:] { + if count == maxResults { + break + } + if ns, ok := c.normalSockets[v]; ok { + sks = append(sks, ns) + count++ + } + if i == len(ids[idx:])-1 { + end = true + break + } + } + c.mu.RUnlock() + if count == 0 { + end = true + } + var s []*SocketMetric + for _, ns := range sks { + sm := &SocketMetric{} + sm.SocketData = ns.s.ChannelzMetric() + sm.ID = ns.id + sm.RefName = ns.refName + s = append(s, sm) + } + return s, end +} + +func (c *channelMap) GetChannel(id int64) *ChannelMetric { + cm := &ChannelMetric{} + var cn *channel + var ok bool + c.mu.RLock() + if cn, ok = c.channels[id]; !ok { + // channel with id doesn't exist. + c.mu.RUnlock() + return nil + } + cm.NestedChans = copyMap(cn.nestedChans) + cm.SubChans = copyMap(cn.subChans) + // cn.c can be set to &dummyChannel{} when deleteSelfFromMap is called. Save a copy of cn.c when + // holding the lock to prevent potential data race. + chanCopy := cn.c + c.mu.RUnlock() + cm.ChannelData = chanCopy.ChannelzMetric() + cm.ID = cn.id + cm.RefName = cn.refName + cm.Trace = cn.trace.dumpData() + return cm +} + +func (c *channelMap) GetSubChannel(id int64) *SubChannelMetric { + cm := &SubChannelMetric{} + var sc *subChannel + var ok bool + c.mu.RLock() + if sc, ok = c.subChannels[id]; !ok { + // subchannel with id doesn't exist. + c.mu.RUnlock() + return nil + } + cm.Sockets = copyMap(sc.sockets) + // sc.c can be set to &dummyChannel{} when deleteSelfFromMap is called. Save a copy of sc.c when + // holding the lock to prevent potential data race. + chanCopy := sc.c + c.mu.RUnlock() + cm.ChannelData = chanCopy.ChannelzMetric() + cm.ID = sc.id + cm.RefName = sc.refName + cm.Trace = sc.trace.dumpData() + return cm +} + +func (c *channelMap) GetSocket(id int64) *SocketMetric { + sm := &SocketMetric{} + c.mu.RLock() + if ls, ok := c.listenSockets[id]; ok { + c.mu.RUnlock() + sm.SocketData = ls.s.ChannelzMetric() + sm.ID = ls.id + sm.RefName = ls.refName + return sm + } + if ns, ok := c.normalSockets[id]; ok { + c.mu.RUnlock() + sm.SocketData = ns.s.ChannelzMetric() + sm.ID = ns.id + sm.RefName = ns.refName + return sm + } + c.mu.RUnlock() + return nil +} + +func (c *channelMap) GetServer(id int64) *ServerMetric { + sm := &ServerMetric{} + var svr *server + var ok bool + c.mu.RLock() + if svr, ok = c.servers[id]; !ok { + c.mu.RUnlock() + return nil + } + sm.ListenSockets = copyMap(svr.listenSockets) + c.mu.RUnlock() + sm.ID = svr.id + sm.RefName = svr.refName + sm.ServerData = svr.s.ChannelzMetric() + return sm +} + +type idGenerator struct { + id int64 +} + +func (i *idGenerator) reset() { + atomic.StoreInt64(&i.id, 0) +} + +func (i *idGenerator) genID() int64 { + return atomic.AddInt64(&i.id, 1) +} diff --git a/vendor/google.golang.org/grpc/internal/channelz/types.go b/vendor/google.golang.org/grpc/internal/channelz/types.go new file mode 100644 index 0000000..17c2274 --- /dev/null +++ b/vendor/google.golang.org/grpc/internal/channelz/types.go @@ -0,0 +1,702 @@ +/* + * + * Copyright 2018 gRPC authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ + +package channelz + +import ( + "net" + "sync" + "sync/atomic" + "time" + + "google.golang.org/grpc/connectivity" + "google.golang.org/grpc/credentials" + "google.golang.org/grpc/grpclog" +) + +// entry represents a node in the channelz database. +type entry interface { + // addChild adds a child e, whose channelz id is id to child list + addChild(id int64, e entry) + // deleteChild deletes a child with channelz id to be id from child list + deleteChild(id int64) + // triggerDelete tries to delete self from channelz database. However, if child + // list is not empty, then deletion from the database is on hold until the last + // child is deleted from database. + triggerDelete() + // deleteSelfIfReady check whether triggerDelete() has been called before, and whether child + // list is now empty. If both conditions are met, then delete self from database. + deleteSelfIfReady() + // getParentID returns parent ID of the entry. 0 value parent ID means no parent. + getParentID() int64 +} + +// dummyEntry is a fake entry to handle entry not found case. +type dummyEntry struct { + idNotFound int64 +} + +func (d *dummyEntry) addChild(id int64, e entry) { + // Note: It is possible for a normal program to reach here under race condition. + // For example, there could be a race between ClientConn.Close() info being propagated + // to addrConn and http2Client. ClientConn.Close() cancel the context and result + // in http2Client to error. The error info is then caught by transport monitor + // and before addrConn.tearDown() is called in side ClientConn.Close(). Therefore, + // the addrConn will create a new transport. And when registering the new transport in + // channelz, its parent addrConn could have already been torn down and deleted + // from channelz tracking, and thus reach the code here. + grpclog.Infof("attempt to add child of type %T with id %d to a parent (id=%d) that doesn't currently exist", e, id, d.idNotFound) +} + +func (d *dummyEntry) deleteChild(id int64) { + // It is possible for a normal program to reach here under race condition. + // Refer to the example described in addChild(). + grpclog.Infof("attempt to delete child with id %d from a parent (id=%d) that doesn't currently exist", id, d.idNotFound) +} + +func (d *dummyEntry) triggerDelete() { + grpclog.Warningf("attempt to delete an entry (id=%d) that doesn't currently exist", d.idNotFound) +} + +func (*dummyEntry) deleteSelfIfReady() { + // code should not reach here. deleteSelfIfReady is always called on an existing entry. +} + +func (*dummyEntry) getParentID() int64 { + return 0 +} + +// ChannelMetric defines the info channelz provides for a specific Channel, which +// includes ChannelInternalMetric and channelz-specific data, such as channelz id, +// child list, etc. +type ChannelMetric struct { + // ID is the channelz id of this channel. + ID int64 + // RefName is the human readable reference string of this channel. + RefName string + // ChannelData contains channel internal metric reported by the channel through + // ChannelzMetric(). + ChannelData *ChannelInternalMetric + // NestedChans tracks the nested channel type children of this channel in the format of + // a map from nested channel channelz id to corresponding reference string. + NestedChans map[int64]string + // SubChans tracks the subchannel type children of this channel in the format of a + // map from subchannel channelz id to corresponding reference string. + SubChans map[int64]string + // Sockets tracks the socket type children of this channel in the format of a map + // from socket channelz id to corresponding reference string. + // Note current grpc implementation doesn't allow channel having sockets directly, + // therefore, this is field is unused. + Sockets map[int64]string + // Trace contains the most recent traced events. + Trace *ChannelTrace +} + +// SubChannelMetric defines the info channelz provides for a specific SubChannel, +// which includes ChannelInternalMetric and channelz-specific data, such as +// channelz id, child list, etc. +type SubChannelMetric struct { + // ID is the channelz id of this subchannel. + ID int64 + // RefName is the human readable reference string of this subchannel. + RefName string + // ChannelData contains subchannel internal metric reported by the subchannel + // through ChannelzMetric(). + ChannelData *ChannelInternalMetric + // NestedChans tracks the nested channel type children of this subchannel in the format of + // a map from nested channel channelz id to corresponding reference string. + // Note current grpc implementation doesn't allow subchannel to have nested channels + // as children, therefore, this field is unused. + NestedChans map[int64]string + // SubChans tracks the subchannel type children of this subchannel in the format of a + // map from subchannel channelz id to corresponding reference string. + // Note current grpc implementation doesn't allow subchannel to have subchannels + // as children, therefore, this field is unused. + SubChans map[int64]string + // Sockets tracks the socket type children of this subchannel in the format of a map + // from socket channelz id to corresponding reference string. + Sockets map[int64]string + // Trace contains the most recent traced events. + Trace *ChannelTrace +} + +// ChannelInternalMetric defines the struct that the implementor of Channel interface +// should return from ChannelzMetric(). +type ChannelInternalMetric struct { + // current connectivity state of the channel. + State connectivity.State + // The target this channel originally tried to connect to. May be absent + Target string + // The number of calls started on the channel. + CallsStarted int64 + // The number of calls that have completed with an OK status. + CallsSucceeded int64 + // The number of calls that have a completed with a non-OK status. + CallsFailed int64 + // The last time a call was started on the channel. + LastCallStartedTimestamp time.Time +} + +// ChannelTrace stores traced events on a channel/subchannel and related info. +type ChannelTrace struct { + // EventNum is the number of events that ever got traced (i.e. including those that have been deleted) + EventNum int64 + // CreationTime is the creation time of the trace. + CreationTime time.Time + // Events stores the most recent trace events (up to $maxTraceEntry, newer event will overwrite the + // oldest one) + Events []*TraceEvent +} + +// TraceEvent represent a single trace event +type TraceEvent struct { + // Desc is a simple description of the trace event. + Desc string + // Severity states the severity of this trace event. + Severity Severity + // Timestamp is the event time. + Timestamp time.Time + // RefID is the id of the entity that gets referenced in the event. RefID is 0 if no other entity is + // involved in this event. + // e.g. SubChannel (id: 4[]) Created. --> RefID = 4, RefName = "" (inside []) + RefID int64 + // RefName is the reference name for the entity that gets referenced in the event. + RefName string + // RefType indicates the referenced entity type, i.e Channel or SubChannel. + RefType RefChannelType +} + +// Channel is the interface that should be satisfied in order to be tracked by +// channelz as Channel or SubChannel. +type Channel interface { + ChannelzMetric() *ChannelInternalMetric +} + +type dummyChannel struct{} + +func (d *dummyChannel) ChannelzMetric() *ChannelInternalMetric { + return &ChannelInternalMetric{} +} + +type channel struct { + refName string + c Channel + closeCalled bool + nestedChans map[int64]string + subChans map[int64]string + id int64 + pid int64 + cm *channelMap + trace *channelTrace + // traceRefCount is the number of trace events that reference this channel. + // Non-zero traceRefCount means the trace of this channel cannot be deleted. + traceRefCount int32 +} + +func (c *channel) addChild(id int64, e entry) { + switch v := e.(type) { + case *subChannel: + c.subChans[id] = v.refName + case *channel: + c.nestedChans[id] = v.refName + default: + grpclog.Errorf("cannot add a child (id = %d) of type %T to a channel", id, e) + } +} + +func (c *channel) deleteChild(id int64) { + delete(c.subChans, id) + delete(c.nestedChans, id) + c.deleteSelfIfReady() +} + +func (c *channel) triggerDelete() { + c.closeCalled = true + c.deleteSelfIfReady() +} + +func (c *channel) getParentID() int64 { + return c.pid +} + +// deleteSelfFromTree tries to delete the channel from the channelz entry relation tree, which means +// deleting the channel reference from its parent's child list. +// +// In order for a channel to be deleted from the tree, it must meet the criteria that, removal of the +// corresponding grpc object has been invoked, and the channel does not have any children left. +// +// The returned boolean value indicates whether the channel has been successfully deleted from tree. +func (c *channel) deleteSelfFromTree() (deleted bool) { + if !c.closeCalled || len(c.subChans)+len(c.nestedChans) != 0 { + return false + } + // not top channel + if c.pid != 0 { + c.cm.findEntry(c.pid).deleteChild(c.id) + } + return true +} + +// deleteSelfFromMap checks whether it is valid to delete the channel from the map, which means +// deleting the channel from channelz's tracking entirely. Users can no longer use id to query the +// channel, and its memory will be garbage collected. +// +// The trace reference count of the channel must be 0 in order to be deleted from the map. This is +// specified in the channel tracing gRFC that as long as some other trace has reference to an entity, +// the trace of the referenced entity must not be deleted. In order to release the resource allocated +// by grpc, the reference to the grpc object is reset to a dummy object. +// +// deleteSelfFromMap must be called after deleteSelfFromTree returns true. +// +// It returns a bool to indicate whether the channel can be safely deleted from map. +func (c *channel) deleteSelfFromMap() (delete bool) { + if c.getTraceRefCount() != 0 { + c.c = &dummyChannel{} + return false + } + return true +} + +// deleteSelfIfReady tries to delete the channel itself from the channelz database. +// The delete process includes two steps: +// 1. delete the channel from the entry relation tree, i.e. delete the channel reference from its +// parent's child list. +// 2. delete the channel from the map, i.e. delete the channel entirely from channelz. Lookup by id +// will return entry not found error. +func (c *channel) deleteSelfIfReady() { + if !c.deleteSelfFromTree() { + return + } + if !c.deleteSelfFromMap() { + return + } + c.cm.deleteEntry(c.id) + c.trace.clear() +} + +func (c *channel) getChannelTrace() *channelTrace { + return c.trace +} + +func (c *channel) incrTraceRefCount() { + atomic.AddInt32(&c.traceRefCount, 1) +} + +func (c *channel) decrTraceRefCount() { + atomic.AddInt32(&c.traceRefCount, -1) +} + +func (c *channel) getTraceRefCount() int { + i := atomic.LoadInt32(&c.traceRefCount) + return int(i) +} + +func (c *channel) getRefName() string { + return c.refName +} + +type subChannel struct { + refName string + c Channel + closeCalled bool + sockets map[int64]string + id int64 + pid int64 + cm *channelMap + trace *channelTrace + traceRefCount int32 +} + +func (sc *subChannel) addChild(id int64, e entry) { + if v, ok := e.(*normalSocket); ok { + sc.sockets[id] = v.refName + } else { + grpclog.Errorf("cannot add a child (id = %d) of type %T to a subChannel", id, e) + } +} + +func (sc *subChannel) deleteChild(id int64) { + delete(sc.sockets, id) + sc.deleteSelfIfReady() +} + +func (sc *subChannel) triggerDelete() { + sc.closeCalled = true + sc.deleteSelfIfReady() +} + +func (sc *subChannel) getParentID() int64 { + return sc.pid +} + +// deleteSelfFromTree tries to delete the subchannel from the channelz entry relation tree, which +// means deleting the subchannel reference from its parent's child list. +// +// In order for a subchannel to be deleted from the tree, it must meet the criteria that, removal of +// the corresponding grpc object has been invoked, and the subchannel does not have any children left. +// +// The returned boolean value indicates whether the channel has been successfully deleted from tree. +func (sc *subChannel) deleteSelfFromTree() (deleted bool) { + if !sc.closeCalled || len(sc.sockets) != 0 { + return false + } + sc.cm.findEntry(sc.pid).deleteChild(sc.id) + return true +} + +// deleteSelfFromMap checks whether it is valid to delete the subchannel from the map, which means +// deleting the subchannel from channelz's tracking entirely. Users can no longer use id to query +// the subchannel, and its memory will be garbage collected. +// +// The trace reference count of the subchannel must be 0 in order to be deleted from the map. This is +// specified in the channel tracing gRFC that as long as some other trace has reference to an entity, +// the trace of the referenced entity must not be deleted. In order to release the resource allocated +// by grpc, the reference to the grpc object is reset to a dummy object. +// +// deleteSelfFromMap must be called after deleteSelfFromTree returns true. +// +// It returns a bool to indicate whether the channel can be safely deleted from map. +func (sc *subChannel) deleteSelfFromMap() (delete bool) { + if sc.getTraceRefCount() != 0 { + // free the grpc struct (i.e. addrConn) + sc.c = &dummyChannel{} + return false + } + return true +} + +// deleteSelfIfReady tries to delete the subchannel itself from the channelz database. +// The delete process includes two steps: +// 1. delete the subchannel from the entry relation tree, i.e. delete the subchannel reference from +// its parent's child list. +// 2. delete the subchannel from the map, i.e. delete the subchannel entirely from channelz. Lookup +// by id will return entry not found error. +func (sc *subChannel) deleteSelfIfReady() { + if !sc.deleteSelfFromTree() { + return + } + if !sc.deleteSelfFromMap() { + return + } + sc.cm.deleteEntry(sc.id) + sc.trace.clear() +} + +func (sc *subChannel) getChannelTrace() *channelTrace { + return sc.trace +} + +func (sc *subChannel) incrTraceRefCount() { + atomic.AddInt32(&sc.traceRefCount, 1) +} + +func (sc *subChannel) decrTraceRefCount() { + atomic.AddInt32(&sc.traceRefCount, -1) +} + +func (sc *subChannel) getTraceRefCount() int { + i := atomic.LoadInt32(&sc.traceRefCount) + return int(i) +} + +func (sc *subChannel) getRefName() string { + return sc.refName +} + +// SocketMetric defines the info channelz provides for a specific Socket, which +// includes SocketInternalMetric and channelz-specific data, such as channelz id, etc. +type SocketMetric struct { + // ID is the channelz id of this socket. + ID int64 + // RefName is the human readable reference string of this socket. + RefName string + // SocketData contains socket internal metric reported by the socket through + // ChannelzMetric(). + SocketData *SocketInternalMetric +} + +// SocketInternalMetric defines the struct that the implementor of Socket interface +// should return from ChannelzMetric(). +type SocketInternalMetric struct { + // The number of streams that have been started. + StreamsStarted int64 + // The number of streams that have ended successfully: + // On client side, receiving frame with eos bit set. + // On server side, sending frame with eos bit set. + StreamsSucceeded int64 + // The number of streams that have ended unsuccessfully: + // On client side, termination without receiving frame with eos bit set. + // On server side, termination without sending frame with eos bit set. + StreamsFailed int64 + // The number of messages successfully sent on this socket. + MessagesSent int64 + MessagesReceived int64 + // The number of keep alives sent. This is typically implemented with HTTP/2 + // ping messages. + KeepAlivesSent int64 + // The last time a stream was created by this endpoint. Usually unset for + // servers. + LastLocalStreamCreatedTimestamp time.Time + // The last time a stream was created by the remote endpoint. Usually unset + // for clients. + LastRemoteStreamCreatedTimestamp time.Time + // The last time a message was sent by this endpoint. + LastMessageSentTimestamp time.Time + // The last time a message was received by this endpoint. + LastMessageReceivedTimestamp time.Time + // The amount of window, granted to the local endpoint by the remote endpoint. + // This may be slightly out of date due to network latency. This does NOT + // include stream level or TCP level flow control info. + LocalFlowControlWindow int64 + // The amount of window, granted to the remote endpoint by the local endpoint. + // This may be slightly out of date due to network latency. This does NOT + // include stream level or TCP level flow control info. + RemoteFlowControlWindow int64 + // The locally bound address. + LocalAddr net.Addr + // The remote bound address. May be absent. + RemoteAddr net.Addr + // Optional, represents the name of the remote endpoint, if different than + // the original target name. + RemoteName string + SocketOptions *SocketOptionData + Security credentials.ChannelzSecurityValue +} + +// Socket is the interface that should be satisfied in order to be tracked by +// channelz as Socket. +type Socket interface { + ChannelzMetric() *SocketInternalMetric +} + +type listenSocket struct { + refName string + s Socket + id int64 + pid int64 + cm *channelMap +} + +func (ls *listenSocket) addChild(id int64, e entry) { + grpclog.Errorf("cannot add a child (id = %d) of type %T to a listen socket", id, e) +} + +func (ls *listenSocket) deleteChild(id int64) { + grpclog.Errorf("cannot delete a child (id = %d) from a listen socket", id) +} + +func (ls *listenSocket) triggerDelete() { + ls.cm.deleteEntry(ls.id) + ls.cm.findEntry(ls.pid).deleteChild(ls.id) +} + +func (ls *listenSocket) deleteSelfIfReady() { + grpclog.Errorf("cannot call deleteSelfIfReady on a listen socket") +} + +func (ls *listenSocket) getParentID() int64 { + return ls.pid +} + +type normalSocket struct { + refName string + s Socket + id int64 + pid int64 + cm *channelMap +} + +func (ns *normalSocket) addChild(id int64, e entry) { + grpclog.Errorf("cannot add a child (id = %d) of type %T to a normal socket", id, e) +} + +func (ns *normalSocket) deleteChild(id int64) { + grpclog.Errorf("cannot delete a child (id = %d) from a normal socket", id) +} + +func (ns *normalSocket) triggerDelete() { + ns.cm.deleteEntry(ns.id) + ns.cm.findEntry(ns.pid).deleteChild(ns.id) +} + +func (ns *normalSocket) deleteSelfIfReady() { + grpclog.Errorf("cannot call deleteSelfIfReady on a normal socket") +} + +func (ns *normalSocket) getParentID() int64 { + return ns.pid +} + +// ServerMetric defines the info channelz provides for a specific Server, which +// includes ServerInternalMetric and channelz-specific data, such as channelz id, +// child list, etc. +type ServerMetric struct { + // ID is the channelz id of this server. + ID int64 + // RefName is the human readable reference string of this server. + RefName string + // ServerData contains server internal metric reported by the server through + // ChannelzMetric(). + ServerData *ServerInternalMetric + // ListenSockets tracks the listener socket type children of this server in the + // format of a map from socket channelz id to corresponding reference string. + ListenSockets map[int64]string +} + +// ServerInternalMetric defines the struct that the implementor of Server interface +// should return from ChannelzMetric(). +type ServerInternalMetric struct { + // The number of incoming calls started on the server. + CallsStarted int64 + // The number of incoming calls that have completed with an OK status. + CallsSucceeded int64 + // The number of incoming calls that have a completed with a non-OK status. + CallsFailed int64 + // The last time a call was started on the server. + LastCallStartedTimestamp time.Time +} + +// Server is the interface to be satisfied in order to be tracked by channelz as +// Server. +type Server interface { + ChannelzMetric() *ServerInternalMetric +} + +type server struct { + refName string + s Server + closeCalled bool + sockets map[int64]string + listenSockets map[int64]string + id int64 + cm *channelMap +} + +func (s *server) addChild(id int64, e entry) { + switch v := e.(type) { + case *normalSocket: + s.sockets[id] = v.refName + case *listenSocket: + s.listenSockets[id] = v.refName + default: + grpclog.Errorf("cannot add a child (id = %d) of type %T to a server", id, e) + } +} + +func (s *server) deleteChild(id int64) { + delete(s.sockets, id) + delete(s.listenSockets, id) + s.deleteSelfIfReady() +} + +func (s *server) triggerDelete() { + s.closeCalled = true + s.deleteSelfIfReady() +} + +func (s *server) deleteSelfIfReady() { + if !s.closeCalled || len(s.sockets)+len(s.listenSockets) != 0 { + return + } + s.cm.deleteEntry(s.id) +} + +func (s *server) getParentID() int64 { + return 0 +} + +type tracedChannel interface { + getChannelTrace() *channelTrace + incrTraceRefCount() + decrTraceRefCount() + getRefName() string +} + +type channelTrace struct { + cm *channelMap + createdTime time.Time + eventCount int64 + mu sync.Mutex + events []*TraceEvent +} + +func (c *channelTrace) append(e *TraceEvent) { + c.mu.Lock() + if len(c.events) == getMaxTraceEntry() { + del := c.events[0] + c.events = c.events[1:] + if del.RefID != 0 { + // start recursive cleanup in a goroutine to not block the call originated from grpc. + go func() { + // need to acquire c.cm.mu lock to call the unlocked attemptCleanup func. + c.cm.mu.Lock() + c.cm.decrTraceRefCount(del.RefID) + c.cm.mu.Unlock() + }() + } + } + e.Timestamp = time.Now() + c.events = append(c.events, e) + c.eventCount++ + c.mu.Unlock() +} + +func (c *channelTrace) clear() { + c.mu.Lock() + for _, e := range c.events { + if e.RefID != 0 { + // caller should have already held the c.cm.mu lock. + c.cm.decrTraceRefCount(e.RefID) + } + } + c.mu.Unlock() +} + +// Severity is the severity level of a trace event. +// The canonical enumeration of all valid values is here: +// https://github.com/grpc/grpc-proto/blob/9b13d199cc0d4703c7ea26c9c330ba695866eb23/grpc/channelz/v1/channelz.proto#L126. +type Severity int + +const ( + // CtUNKNOWN indicates unknown severity of a trace event. + CtUNKNOWN Severity = iota + // CtINFO indicates info level severity of a trace event. + CtINFO + // CtWarning indicates warning level severity of a trace event. + CtWarning + // CtError indicates error level severity of a trace event. + CtError +) + +// RefChannelType is the type of the entity being referenced in a trace event. +type RefChannelType int + +const ( + // RefChannel indicates the referenced entity is a Channel. + RefChannel RefChannelType = iota + // RefSubChannel indicates the referenced entity is a SubChannel. + RefSubChannel +) + +func (c *channelTrace) dumpData() *ChannelTrace { + c.mu.Lock() + ct := &ChannelTrace{EventNum: c.eventCount, CreationTime: c.createdTime} + ct.Events = c.events[:len(c.events)] + c.mu.Unlock() + return ct +} diff --git a/vendor/google.golang.org/grpc/internal/channelz/types_linux.go b/vendor/google.golang.org/grpc/internal/channelz/types_linux.go new file mode 100644 index 0000000..692dd61 --- /dev/null +++ b/vendor/google.golang.org/grpc/internal/channelz/types_linux.go @@ -0,0 +1,53 @@ +// +build !appengine + +/* + * + * Copyright 2018 gRPC authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ + +package channelz + +import ( + "syscall" + + "golang.org/x/sys/unix" +) + +// SocketOptionData defines the struct to hold socket option data, and related +// getter function to obtain info from fd. +type SocketOptionData struct { + Linger *unix.Linger + RecvTimeout *unix.Timeval + SendTimeout *unix.Timeval + TCPInfo *unix.TCPInfo +} + +// Getsockopt defines the function to get socket options requested by channelz. +// It is to be passed to syscall.RawConn.Control(). +func (s *SocketOptionData) Getsockopt(fd uintptr) { + if v, err := unix.GetsockoptLinger(int(fd), syscall.SOL_SOCKET, syscall.SO_LINGER); err == nil { + s.Linger = v + } + if v, err := unix.GetsockoptTimeval(int(fd), syscall.SOL_SOCKET, syscall.SO_RCVTIMEO); err == nil { + s.RecvTimeout = v + } + if v, err := unix.GetsockoptTimeval(int(fd), syscall.SOL_SOCKET, syscall.SO_SNDTIMEO); err == nil { + s.SendTimeout = v + } + if v, err := unix.GetsockoptTCPInfo(int(fd), syscall.SOL_TCP, syscall.TCP_INFO); err == nil { + s.TCPInfo = v + } +} diff --git a/vendor/google.golang.org/grpc/internal/channelz/types_nonlinux.go b/vendor/google.golang.org/grpc/internal/channelz/types_nonlinux.go new file mode 100644 index 0000000..79edbef --- /dev/null +++ b/vendor/google.golang.org/grpc/internal/channelz/types_nonlinux.go @@ -0,0 +1,44 @@ +// +build !linux appengine + +/* + * + * Copyright 2018 gRPC authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ + +package channelz + +import ( + "sync" + + "google.golang.org/grpc/grpclog" +) + +var once sync.Once + +// SocketOptionData defines the struct to hold socket option data, and related +// getter function to obtain info from fd. +// Windows OS doesn't support Socket Option +type SocketOptionData struct { +} + +// Getsockopt defines the function to get socket options requested by channelz. +// It is to be passed to syscall.RawConn.Control(). +// Windows OS doesn't support Socket Option +func (s *SocketOptionData) Getsockopt(fd uintptr) { + once.Do(func() { + grpclog.Warningln("Channelz: socket options are not supported on non-linux os and appengine.") + }) +} diff --git a/vendor/google.golang.org/grpc/internal/channelz/util_linux.go b/vendor/google.golang.org/grpc/internal/channelz/util_linux.go new file mode 100644 index 0000000..fdf409d --- /dev/null +++ b/vendor/google.golang.org/grpc/internal/channelz/util_linux.go @@ -0,0 +1,39 @@ +// +build linux,!appengine + +/* + * + * Copyright 2018 gRPC authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ + +package channelz + +import ( + "syscall" +) + +// GetSocketOption gets the socket option info of the conn. +func GetSocketOption(socket interface{}) *SocketOptionData { + c, ok := socket.(syscall.Conn) + if !ok { + return nil + } + data := &SocketOptionData{} + if rawConn, err := c.SyscallConn(); err == nil { + rawConn.Control(data.Getsockopt) + return data + } + return nil +} diff --git a/vendor/google.golang.org/grpc/internal/channelz/util_nonlinux.go b/vendor/google.golang.org/grpc/internal/channelz/util_nonlinux.go new file mode 100644 index 0000000..8864a08 --- /dev/null +++ b/vendor/google.golang.org/grpc/internal/channelz/util_nonlinux.go @@ -0,0 +1,26 @@ +// +build !linux appengine + +/* + * + * Copyright 2018 gRPC authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ + +package channelz + +// GetSocketOption gets the socket option info of the conn. +func GetSocketOption(c interface{}) *SocketOptionData { + return nil +} diff --git a/vendor/google.golang.org/grpc/internal/envconfig/envconfig.go b/vendor/google.golang.org/grpc/internal/envconfig/envconfig.go new file mode 100644 index 0000000..11be7cd --- /dev/null +++ b/vendor/google.golang.org/grpc/internal/envconfig/envconfig.go @@ -0,0 +1,64 @@ +/* + * + * Copyright 2018 gRPC authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ + +// Package envconfig contains grpc settings configured by environment variables. +package envconfig + +import ( + "os" + "strings" +) + +const ( + prefix = "GRPC_GO_" + retryStr = prefix + "RETRY" + requireHandshakeStr = prefix + "REQUIRE_HANDSHAKE" +) + +// RequireHandshakeSetting describes the settings for handshaking. +type RequireHandshakeSetting int + +const ( + // RequireHandshakeOn indicates to wait for handshake before considering a + // connection ready/successful. + RequireHandshakeOn RequireHandshakeSetting = iota + // RequireHandshakeOff indicates to not wait for handshake before + // considering a connection ready/successful. + RequireHandshakeOff +) + +var ( + // Retry is set if retry is explicitly enabled via "GRPC_GO_RETRY=on". + Retry = strings.EqualFold(os.Getenv(retryStr), "on") + // RequireHandshake is set based upon the GRPC_GO_REQUIRE_HANDSHAKE + // environment variable. + // + // Will be removed after the 1.18 release. + RequireHandshake = RequireHandshakeOn +) + +func init() { + switch strings.ToLower(os.Getenv(requireHandshakeStr)) { + case "on": + fallthrough + default: + RequireHandshake = RequireHandshakeOn + case "off": + RequireHandshake = RequireHandshakeOff + } +} diff --git a/vendor/google.golang.org/grpc/internal/grpcrand/grpcrand.go b/vendor/google.golang.org/grpc/internal/grpcrand/grpcrand.go new file mode 100644 index 0000000..200b115 --- /dev/null +++ b/vendor/google.golang.org/grpc/internal/grpcrand/grpcrand.go @@ -0,0 +1,56 @@ +/* + * + * Copyright 2018 gRPC authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ + +// Package grpcrand implements math/rand functions in a concurrent-safe way +// with a global random source, independent of math/rand's global source. +package grpcrand + +import ( + "math/rand" + "sync" + "time" +) + +var ( + r = rand.New(rand.NewSource(time.Now().UnixNano())) + mu sync.Mutex +) + +// Int63n implements rand.Int63n on the grpcrand global source. +func Int63n(n int64) int64 { + mu.Lock() + res := r.Int63n(n) + mu.Unlock() + return res +} + +// Intn implements rand.Intn on the grpcrand global source. +func Intn(n int) int { + mu.Lock() + res := r.Intn(n) + mu.Unlock() + return res +} + +// Float64 implements rand.Float64 on the grpcrand global source. +func Float64() float64 { + mu.Lock() + res := r.Float64() + mu.Unlock() + return res +} diff --git a/vendor/google.golang.org/grpc/internal/grpcsync/event.go b/vendor/google.golang.org/grpc/internal/grpcsync/event.go new file mode 100644 index 0000000..fbe697c --- /dev/null +++ b/vendor/google.golang.org/grpc/internal/grpcsync/event.go @@ -0,0 +1,61 @@ +/* + * + * Copyright 2018 gRPC authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ + +// Package grpcsync implements additional synchronization primitives built upon +// the sync package. +package grpcsync + +import ( + "sync" + "sync/atomic" +) + +// Event represents a one-time event that may occur in the future. +type Event struct { + fired int32 + c chan struct{} + o sync.Once +} + +// Fire causes e to complete. It is safe to call multiple times, and +// concurrently. It returns true iff this call to Fire caused the signaling +// channel returned by Done to close. +func (e *Event) Fire() bool { + ret := false + e.o.Do(func() { + atomic.StoreInt32(&e.fired, 1) + close(e.c) + ret = true + }) + return ret +} + +// Done returns a channel that will be closed when Fire is called. +func (e *Event) Done() <-chan struct{} { + return e.c +} + +// HasFired returns true if Fire has been called. +func (e *Event) HasFired() bool { + return atomic.LoadInt32(&e.fired) == 1 +} + +// NewEvent returns a new, ready-to-use Event. +func NewEvent() *Event { + return &Event{c: make(chan struct{})} +} diff --git a/vendor/google.golang.org/grpc/internal/grpctest/grpctest.go b/vendor/google.golang.org/grpc/internal/grpctest/grpctest.go new file mode 100644 index 0000000..8acca95 --- /dev/null +++ b/vendor/google.golang.org/grpc/internal/grpctest/grpctest.go @@ -0,0 +1,69 @@ +/* + * + * Copyright 2018 gRPC authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ + +// Package grpctest implements testing helpers. +package grpctest + +import ( + "reflect" + "strings" + "testing" +) + +func getTestFunc(t *testing.T, xv reflect.Value, name string) func(*testing.T) { + if m := xv.MethodByName(name); m.IsValid() { + if f, ok := m.Interface().(func(*testing.T)); ok { + return f + } + // Method exists but has the wrong type signature. + t.Fatalf("grpctest: function %v has unexpected signature (%T)", name, m.Interface()) + } + return func(*testing.T) {} +} + +// RunSubTests runs all "Test___" functions that are methods of x as subtests +// of the current test. If x contains methods "Setup(*testing.T)" or +// "Teardown(*testing.T)", those are run before or after each of the test +// functions, respectively. +// +// For example usage, see example_test.go. Run it using: +// $ go test -v -run TestExample . +// +// To run a specific test/subtest: +// $ go test -v -run 'TestExample/^Something$' . +func RunSubTests(t *testing.T, x interface{}) { + xt := reflect.TypeOf(x) + xv := reflect.ValueOf(x) + + setup := getTestFunc(t, xv, "Setup") + teardown := getTestFunc(t, xv, "Teardown") + + for i := 0; i < xt.NumMethod(); i++ { + methodName := xt.Method(i).Name + if !strings.HasPrefix(methodName, "Test") { + continue + } + tfunc := getTestFunc(t, xv, methodName) + t.Run(strings.TrimPrefix(methodName, "Test"), func(t *testing.T) { + setup(t) + // defer teardown to guarantee it is run even if tfunc uses t.Fatal() + defer teardown(t) + tfunc(t) + }) + } +} diff --git a/vendor/google.golang.org/grpc/internal/internal.go b/vendor/google.golang.org/grpc/internal/internal.go new file mode 100644 index 0000000..c1d2c69 --- /dev/null +++ b/vendor/google.golang.org/grpc/internal/internal.go @@ -0,0 +1,54 @@ +/* + * Copyright 2016 gRPC authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ + +// Package internal contains gRPC-internal code, to avoid polluting +// the godoc of the top-level grpc package. It must not import any grpc +// symbols to avoid circular dependencies. +package internal + +import ( + "context" + "time" +) + +var ( + // WithResolverBuilder is exported by dialoptions.go + WithResolverBuilder interface{} // func (resolver.Builder) grpc.DialOption + // WithHealthCheckFunc is not exported by dialoptions.go + WithHealthCheckFunc interface{} // func (HealthChecker) DialOption + // HealthCheckFunc is used to provide client-side LB channel health checking + HealthCheckFunc HealthChecker + // BalancerUnregister is exported by package balancer to unregister a balancer. + BalancerUnregister func(name string) + // KeepaliveMinPingTime is the minimum ping interval. This must be 10s by + // default, but tests may wish to set it lower for convenience. + KeepaliveMinPingTime = 10 * time.Second +) + +// HealthChecker defines the signature of the client-side LB channel health checking function. +type HealthChecker func(ctx context.Context, newStream func() (interface{}, error), reportHealth func(bool), serviceName string) error + +const ( + // CredsBundleModeFallback switches GoogleDefaultCreds to fallback mode. + CredsBundleModeFallback = "fallback" + // CredsBundleModeBalancer switches GoogleDefaultCreds to grpclb balancer + // mode. + CredsBundleModeBalancer = "balancer" + // CredsBundleModeBackendFromBalancer switches GoogleDefaultCreds to mode + // that supports backend returned by grpclb balancer. + CredsBundleModeBackendFromBalancer = "backend-from-balancer" +) diff --git a/vendor/google.golang.org/grpc/internal/leakcheck/leakcheck.go b/vendor/google.golang.org/grpc/internal/leakcheck/leakcheck.go new file mode 100644 index 0000000..76f9fc5 --- /dev/null +++ b/vendor/google.golang.org/grpc/internal/leakcheck/leakcheck.go @@ -0,0 +1,118 @@ +/* + * + * Copyright 2017 gRPC authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ + +// Package leakcheck contains functions to check leaked goroutines. +// +// Call "defer leakcheck.Check(t)" at the beginning of tests. +package leakcheck + +import ( + "runtime" + "sort" + "strings" + "time" +) + +var goroutinesToIgnore = []string{ + "testing.Main(", + "testing.tRunner(", + "testing.(*M).", + "runtime.goexit", + "created by runtime.gc", + "created by runtime/trace.Start", + "interestingGoroutines", + "runtime.MHeap_Scavenger", + "signal.signal_recv", + "sigterm.handler", + "runtime_mcall", + "(*loggingT).flushDaemon", + "goroutine in C code", +} + +// RegisterIgnoreGoroutine appends s into the ignore goroutine list. The +// goroutines whose stack trace contains s will not be identified as leaked +// goroutines. Not thread-safe, only call this function in init(). +func RegisterIgnoreGoroutine(s string) { + goroutinesToIgnore = append(goroutinesToIgnore, s) +} + +func ignore(g string) bool { + sl := strings.SplitN(g, "\n", 2) + if len(sl) != 2 { + return true + } + stack := strings.TrimSpace(sl[1]) + if strings.HasPrefix(stack, "testing.RunTests") { + return true + } + + if stack == "" { + return true + } + + for _, s := range goroutinesToIgnore { + if strings.Contains(stack, s) { + return true + } + } + + return false +} + +// interestingGoroutines returns all goroutines we care about for the purpose of +// leak checking. It excludes testing or runtime ones. +func interestingGoroutines() (gs []string) { + buf := make([]byte, 2<<20) + buf = buf[:runtime.Stack(buf, true)] + for _, g := range strings.Split(string(buf), "\n\n") { + if !ignore(g) { + gs = append(gs, g) + } + } + sort.Strings(gs) + return +} + +// Errorfer is the interface that wraps the Errorf method. It's a subset of +// testing.TB to make it easy to use Check. +type Errorfer interface { + Errorf(format string, args ...interface{}) +} + +func check(efer Errorfer, timeout time.Duration) { + // Loop, waiting for goroutines to shut down. + // Wait up to timeout, but finish as quickly as possible. + deadline := time.Now().Add(timeout) + var leaked []string + for time.Now().Before(deadline) { + if leaked = interestingGoroutines(); len(leaked) == 0 { + return + } + time.Sleep(50 * time.Millisecond) + } + for _, g := range leaked { + efer.Errorf("Leaked goroutine: %v", g) + } +} + +// Check looks at the currently-running goroutines and checks if there are any +// interestring (created by gRPC) goroutines leaked. It waits up to 10 seconds +// in the error cases. +func Check(efer Errorfer) { + check(efer, 10*time.Second) +} diff --git a/vendor/google.golang.org/grpc/internal/syscall/syscall_linux.go b/vendor/google.golang.org/grpc/internal/syscall/syscall_linux.go new file mode 100644 index 0000000..43281a3 --- /dev/null +++ b/vendor/google.golang.org/grpc/internal/syscall/syscall_linux.go @@ -0,0 +1,114 @@ +// +build !appengine + +/* + * + * Copyright 2018 gRPC authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ + +// Package syscall provides functionalities that grpc uses to get low-level operating system +// stats/info. +package syscall + +import ( + "fmt" + "net" + "syscall" + "time" + + "golang.org/x/sys/unix" + "google.golang.org/grpc/grpclog" +) + +// GetCPUTime returns the how much CPU time has passed since the start of this process. +func GetCPUTime() int64 { + var ts unix.Timespec + if err := unix.ClockGettime(unix.CLOCK_PROCESS_CPUTIME_ID, &ts); err != nil { + grpclog.Fatal(err) + } + return ts.Nano() +} + +// Rusage is an alias for syscall.Rusage under linux non-appengine environment. +type Rusage syscall.Rusage + +// GetRusage returns the resource usage of current process. +func GetRusage() (rusage *Rusage) { + rusage = new(Rusage) + syscall.Getrusage(syscall.RUSAGE_SELF, (*syscall.Rusage)(rusage)) + return +} + +// CPUTimeDiff returns the differences of user CPU time and system CPU time used +// between two Rusage structs. +func CPUTimeDiff(first *Rusage, latest *Rusage) (float64, float64) { + f := (*syscall.Rusage)(first) + l := (*syscall.Rusage)(latest) + var ( + utimeDiffs = l.Utime.Sec - f.Utime.Sec + utimeDiffus = l.Utime.Usec - f.Utime.Usec + stimeDiffs = l.Stime.Sec - f.Stime.Sec + stimeDiffus = l.Stime.Usec - f.Stime.Usec + ) + + uTimeElapsed := float64(utimeDiffs) + float64(utimeDiffus)*1.0e-6 + sTimeElapsed := float64(stimeDiffs) + float64(stimeDiffus)*1.0e-6 + + return uTimeElapsed, sTimeElapsed +} + +// SetTCPUserTimeout sets the TCP user timeout on a connection's socket +func SetTCPUserTimeout(conn net.Conn, timeout time.Duration) error { + tcpconn, ok := conn.(*net.TCPConn) + if !ok { + // not a TCP connection. exit early + return nil + } + rawConn, err := tcpconn.SyscallConn() + if err != nil { + return fmt.Errorf("error getting raw connection: %v", err) + } + err = rawConn.Control(func(fd uintptr) { + err = syscall.SetsockoptInt(int(fd), syscall.IPPROTO_TCP, unix.TCP_USER_TIMEOUT, int(timeout/time.Millisecond)) + }) + if err != nil { + return fmt.Errorf("error setting option on socket: %v", err) + } + + return nil +} + +// GetTCPUserTimeout gets the TCP user timeout on a connection's socket +func GetTCPUserTimeout(conn net.Conn) (opt int, err error) { + tcpconn, ok := conn.(*net.TCPConn) + if !ok { + err = fmt.Errorf("conn is not *net.TCPConn. got %T", conn) + return + } + rawConn, err := tcpconn.SyscallConn() + if err != nil { + err = fmt.Errorf("error getting raw connection: %v", err) + return + } + err = rawConn.Control(func(fd uintptr) { + opt, err = syscall.GetsockoptInt(int(fd), syscall.IPPROTO_TCP, unix.TCP_USER_TIMEOUT) + }) + if err != nil { + err = fmt.Errorf("error getting option on socket: %v", err) + return + } + + return +} diff --git a/vendor/google.golang.org/grpc/internal/syscall/syscall_nonlinux.go b/vendor/google.golang.org/grpc/internal/syscall/syscall_nonlinux.go new file mode 100644 index 0000000..d3fd9da --- /dev/null +++ b/vendor/google.golang.org/grpc/internal/syscall/syscall_nonlinux.go @@ -0,0 +1,73 @@ +// +build !linux appengine + +/* + * + * Copyright 2018 gRPC authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ + +package syscall + +import ( + "net" + "sync" + "time" + + "google.golang.org/grpc/grpclog" +) + +var once sync.Once + +func log() { + once.Do(func() { + grpclog.Info("CPU time info is unavailable on non-linux or appengine environment.") + }) +} + +// GetCPUTime returns the how much CPU time has passed since the start of this process. +// It always returns 0 under non-linux or appengine environment. +func GetCPUTime() int64 { + log() + return 0 +} + +// Rusage is an empty struct under non-linux or appengine environment. +type Rusage struct{} + +// GetRusage is a no-op function under non-linux or appengine environment. +func GetRusage() (rusage *Rusage) { + log() + return nil +} + +// CPUTimeDiff returns the differences of user CPU time and system CPU time used +// between two Rusage structs. It a no-op function for non-linux or appengine environment. +func CPUTimeDiff(first *Rusage, latest *Rusage) (float64, float64) { + log() + return 0, 0 +} + +// SetTCPUserTimeout is a no-op function under non-linux or appengine environments +func SetTCPUserTimeout(conn net.Conn, timeout time.Duration) error { + log() + return nil +} + +// GetTCPUserTimeout is a no-op function under non-linux or appengine environments +// a negative return value indicates the operation is not supported +func GetTCPUserTimeout(conn net.Conn) (int, error) { + log() + return -1, nil +} diff --git a/vendor/google.golang.org/grpc/internal/testutils/pipe_listener.go b/vendor/google.golang.org/grpc/internal/testutils/pipe_listener.go new file mode 100644 index 0000000..6bd3bc0 --- /dev/null +++ b/vendor/google.golang.org/grpc/internal/testutils/pipe_listener.go @@ -0,0 +1,96 @@ +/* + * + * Copyright 2018 gRPC authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ + +// Package testutils contains testing helpers. +package testutils + +import ( + "errors" + "net" + "time" +) + +var errClosed = errors.New("closed") + +type pipeAddr struct{} + +func (p pipeAddr) Network() string { return "pipe" } +func (p pipeAddr) String() string { return "pipe" } + +// PipeListener is a listener with an unbuffered pipe. Each write will complete only once the other side reads. It +// should only be created using NewPipeListener. +type PipeListener struct { + c chan chan<- net.Conn + done chan struct{} +} + +// NewPipeListener creates a new pipe listener. +func NewPipeListener() *PipeListener { + return &PipeListener{ + c: make(chan chan<- net.Conn), + done: make(chan struct{}), + } +} + +// Accept accepts a connection. +func (p *PipeListener) Accept() (net.Conn, error) { + var connChan chan<- net.Conn + select { + case <-p.done: + return nil, errClosed + case connChan = <-p.c: + select { + case <-p.done: + close(connChan) + return nil, errClosed + default: + } + } + c1, c2 := net.Pipe() + connChan <- c1 + close(connChan) + return c2, nil +} + +// Close closes the listener. +func (p *PipeListener) Close() error { + close(p.done) + return nil +} + +// Addr returns a pipe addr. +func (p *PipeListener) Addr() net.Addr { + return pipeAddr{} +} + +// Dialer dials a connection. +func (p *PipeListener) Dialer() func(string, time.Duration) (net.Conn, error) { + return func(string, time.Duration) (net.Conn, error) { + connChan := make(chan net.Conn) + select { + case p.c <- connChan: + case <-p.done: + return nil, errClosed + } + conn, ok := <-connChan + if !ok { + return nil, errClosed + } + return conn, nil + } +} diff --git a/vendor/google.golang.org/grpc/internal/transport/bdp_estimator.go b/vendor/google.golang.org/grpc/internal/transport/bdp_estimator.go new file mode 100644 index 0000000..070680e --- /dev/null +++ b/vendor/google.golang.org/grpc/internal/transport/bdp_estimator.go @@ -0,0 +1,141 @@ +/* + * + * Copyright 2017 gRPC authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ + +package transport + +import ( + "sync" + "time" +) + +const ( + // bdpLimit is the maximum value the flow control windows will be increased + // to. TCP typically limits this to 4MB, but some systems go up to 16MB. + // Since this is only a limit, it is safe to make it optimistic. + bdpLimit = (1 << 20) * 16 + // alpha is a constant factor used to keep a moving average + // of RTTs. + alpha = 0.9 + // If the current bdp sample is greater than or equal to + // our beta * our estimated bdp and the current bandwidth + // sample is the maximum bandwidth observed so far, we + // increase our bbp estimate by a factor of gamma. + beta = 0.66 + // To put our bdp to be smaller than or equal to twice the real BDP, + // we should multiply our current sample with 4/3, however to round things out + // we use 2 as the multiplication factor. + gamma = 2 +) + +// Adding arbitrary data to ping so that its ack can be identified. +// Easter-egg: what does the ping message say? +var bdpPing = &ping{data: [8]byte{2, 4, 16, 16, 9, 14, 7, 7}} + +type bdpEstimator struct { + // sentAt is the time when the ping was sent. + sentAt time.Time + + mu sync.Mutex + // bdp is the current bdp estimate. + bdp uint32 + // sample is the number of bytes received in one measurement cycle. + sample uint32 + // bwMax is the maximum bandwidth noted so far (bytes/sec). + bwMax float64 + // bool to keep track of the beginning of a new measurement cycle. + isSent bool + // Callback to update the window sizes. + updateFlowControl func(n uint32) + // sampleCount is the number of samples taken so far. + sampleCount uint64 + // round trip time (seconds) + rtt float64 +} + +// timesnap registers the time bdp ping was sent out so that +// network rtt can be calculated when its ack is received. +// It is called (by controller) when the bdpPing is +// being written on the wire. +func (b *bdpEstimator) timesnap(d [8]byte) { + if bdpPing.data != d { + return + } + b.sentAt = time.Now() +} + +// add adds bytes to the current sample for calculating bdp. +// It returns true only if a ping must be sent. This can be used +// by the caller (handleData) to make decision about batching +// a window update with it. +func (b *bdpEstimator) add(n uint32) bool { + b.mu.Lock() + defer b.mu.Unlock() + if b.bdp == bdpLimit { + return false + } + if !b.isSent { + b.isSent = true + b.sample = n + b.sentAt = time.Time{} + b.sampleCount++ + return true + } + b.sample += n + return false +} + +// calculate is called when an ack for a bdp ping is received. +// Here we calculate the current bdp and bandwidth sample and +// decide if the flow control windows should go up. +func (b *bdpEstimator) calculate(d [8]byte) { + // Check if the ping acked for was the bdp ping. + if bdpPing.data != d { + return + } + b.mu.Lock() + rttSample := time.Since(b.sentAt).Seconds() + if b.sampleCount < 10 { + // Bootstrap rtt with an average of first 10 rtt samples. + b.rtt += (rttSample - b.rtt) / float64(b.sampleCount) + } else { + // Heed to the recent past more. + b.rtt += (rttSample - b.rtt) * float64(alpha) + } + b.isSent = false + // The number of bytes accumulated so far in the sample is smaller + // than or equal to 1.5 times the real BDP on a saturated connection. + bwCurrent := float64(b.sample) / (b.rtt * float64(1.5)) + if bwCurrent > b.bwMax { + b.bwMax = bwCurrent + } + // If the current sample (which is smaller than or equal to the 1.5 times the real BDP) is + // greater than or equal to 2/3rd our perceived bdp AND this is the maximum bandwidth seen so far, we + // should update our perception of the network BDP. + if float64(b.sample) >= beta*float64(b.bdp) && bwCurrent == b.bwMax && b.bdp != bdpLimit { + sampleFloat := float64(b.sample) + b.bdp = uint32(gamma * sampleFloat) + if b.bdp > bdpLimit { + b.bdp = bdpLimit + } + bdp := b.bdp + b.mu.Unlock() + b.updateFlowControl(bdp) + return + } + b.mu.Unlock() +} diff --git a/vendor/google.golang.org/grpc/internal/transport/controlbuf.go b/vendor/google.golang.org/grpc/internal/transport/controlbuf.go new file mode 100644 index 0000000..204ba15 --- /dev/null +++ b/vendor/google.golang.org/grpc/internal/transport/controlbuf.go @@ -0,0 +1,852 @@ +/* + * + * Copyright 2014 gRPC authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ + +package transport + +import ( + "bytes" + "fmt" + "runtime" + "sync" + + "golang.org/x/net/http2" + "golang.org/x/net/http2/hpack" +) + +var updateHeaderTblSize = func(e *hpack.Encoder, v uint32) { + e.SetMaxDynamicTableSizeLimit(v) +} + +type itemNode struct { + it interface{} + next *itemNode +} + +type itemList struct { + head *itemNode + tail *itemNode +} + +func (il *itemList) enqueue(i interface{}) { + n := &itemNode{it: i} + if il.tail == nil { + il.head, il.tail = n, n + return + } + il.tail.next = n + il.tail = n +} + +// peek returns the first item in the list without removing it from the +// list. +func (il *itemList) peek() interface{} { + return il.head.it +} + +func (il *itemList) dequeue() interface{} { + if il.head == nil { + return nil + } + i := il.head.it + il.head = il.head.next + if il.head == nil { + il.tail = nil + } + return i +} + +func (il *itemList) dequeueAll() *itemNode { + h := il.head + il.head, il.tail = nil, nil + return h +} + +func (il *itemList) isEmpty() bool { + return il.head == nil +} + +// The following defines various control items which could flow through +// the control buffer of transport. They represent different aspects of +// control tasks, e.g., flow control, settings, streaming resetting, etc. + +// registerStream is used to register an incoming stream with loopy writer. +type registerStream struct { + streamID uint32 + wq *writeQuota +} + +// headerFrame is also used to register stream on the client-side. +type headerFrame struct { + streamID uint32 + hf []hpack.HeaderField + endStream bool // Valid on server side. + initStream func(uint32) (bool, error) // Used only on the client side. + onWrite func() + wq *writeQuota // write quota for the stream created. + cleanup *cleanupStream // Valid on the server side. + onOrphaned func(error) // Valid on client-side +} + +type cleanupStream struct { + streamID uint32 + rst bool + rstCode http2.ErrCode + onWrite func() +} + +type dataFrame struct { + streamID uint32 + endStream bool + h []byte + d []byte + // onEachWrite is called every time + // a part of d is written out. + onEachWrite func() +} + +type incomingWindowUpdate struct { + streamID uint32 + increment uint32 +} + +type outgoingWindowUpdate struct { + streamID uint32 + increment uint32 +} + +type incomingSettings struct { + ss []http2.Setting +} + +type outgoingSettings struct { + ss []http2.Setting +} + +type incomingGoAway struct { +} + +type goAway struct { + code http2.ErrCode + debugData []byte + headsUp bool + closeConn bool +} + +type ping struct { + ack bool + data [8]byte +} + +type outFlowControlSizeRequest struct { + resp chan uint32 +} + +type outStreamState int + +const ( + active outStreamState = iota + empty + waitingOnStreamQuota +) + +type outStream struct { + id uint32 + state outStreamState + itl *itemList + bytesOutStanding int + wq *writeQuota + + next *outStream + prev *outStream +} + +func (s *outStream) deleteSelf() { + if s.prev != nil { + s.prev.next = s.next + } + if s.next != nil { + s.next.prev = s.prev + } + s.next, s.prev = nil, nil +} + +type outStreamList struct { + // Following are sentinel objects that mark the + // beginning and end of the list. They do not + // contain any item lists. All valid objects are + // inserted in between them. + // This is needed so that an outStream object can + // deleteSelf() in O(1) time without knowing which + // list it belongs to. + head *outStream + tail *outStream +} + +func newOutStreamList() *outStreamList { + head, tail := new(outStream), new(outStream) + head.next = tail + tail.prev = head + return &outStreamList{ + head: head, + tail: tail, + } +} + +func (l *outStreamList) enqueue(s *outStream) { + e := l.tail.prev + e.next = s + s.prev = e + s.next = l.tail + l.tail.prev = s +} + +// remove from the beginning of the list. +func (l *outStreamList) dequeue() *outStream { + b := l.head.next + if b == l.tail { + return nil + } + b.deleteSelf() + return b +} + +// controlBuffer is a way to pass information to loopy. +// Information is passed as specific struct types called control frames. +// A control frame not only represents data, messages or headers to be sent out +// but can also be used to instruct loopy to update its internal state. +// It shouldn't be confused with an HTTP2 frame, although some of the control frames +// like dataFrame and headerFrame do go out on wire as HTTP2 frames. +type controlBuffer struct { + ch chan struct{} + done <-chan struct{} + mu sync.Mutex + consumerWaiting bool + list *itemList + err error +} + +func newControlBuffer(done <-chan struct{}) *controlBuffer { + return &controlBuffer{ + ch: make(chan struct{}, 1), + list: &itemList{}, + done: done, + } +} + +func (c *controlBuffer) put(it interface{}) error { + _, err := c.executeAndPut(nil, it) + return err +} + +func (c *controlBuffer) executeAndPut(f func(it interface{}) bool, it interface{}) (bool, error) { + var wakeUp bool + c.mu.Lock() + if c.err != nil { + c.mu.Unlock() + return false, c.err + } + if f != nil { + if !f(it) { // f wasn't successful + c.mu.Unlock() + return false, nil + } + } + if c.consumerWaiting { + wakeUp = true + c.consumerWaiting = false + } + c.list.enqueue(it) + c.mu.Unlock() + if wakeUp { + select { + case c.ch <- struct{}{}: + default: + } + } + return true, nil +} + +// Note argument f should never be nil. +func (c *controlBuffer) execute(f func(it interface{}) bool, it interface{}) (bool, error) { + c.mu.Lock() + if c.err != nil { + c.mu.Unlock() + return false, c.err + } + if !f(it) { // f wasn't successful + c.mu.Unlock() + return false, nil + } + c.mu.Unlock() + return true, nil +} + +func (c *controlBuffer) get(block bool) (interface{}, error) { + for { + c.mu.Lock() + if c.err != nil { + c.mu.Unlock() + return nil, c.err + } + if !c.list.isEmpty() { + h := c.list.dequeue() + c.mu.Unlock() + return h, nil + } + if !block { + c.mu.Unlock() + return nil, nil + } + c.consumerWaiting = true + c.mu.Unlock() + select { + case <-c.ch: + case <-c.done: + c.finish() + return nil, ErrConnClosing + } + } +} + +func (c *controlBuffer) finish() { + c.mu.Lock() + if c.err != nil { + c.mu.Unlock() + return + } + c.err = ErrConnClosing + // There may be headers for streams in the control buffer. + // These streams need to be cleaned out since the transport + // is still not aware of these yet. + for head := c.list.dequeueAll(); head != nil; head = head.next { + hdr, ok := head.it.(*headerFrame) + if !ok { + continue + } + if hdr.onOrphaned != nil { // It will be nil on the server-side. + hdr.onOrphaned(ErrConnClosing) + } + } + c.mu.Unlock() +} + +type side int + +const ( + clientSide side = iota + serverSide +) + +// Loopy receives frames from the control buffer. +// Each frame is handled individually; most of the work done by loopy goes +// into handling data frames. Loopy maintains a queue of active streams, and each +// stream maintains a queue of data frames; as loopy receives data frames +// it gets added to the queue of the relevant stream. +// Loopy goes over this list of active streams by processing one node every iteration, +// thereby closely resemebling to a round-robin scheduling over all streams. While +// processing a stream, loopy writes out data bytes from this stream capped by the min +// of http2MaxFrameLen, connection-level flow control and stream-level flow control. +type loopyWriter struct { + side side + cbuf *controlBuffer + sendQuota uint32 + oiws uint32 // outbound initial window size. + // estdStreams is map of all established streams that are not cleaned-up yet. + // On client-side, this is all streams whose headers were sent out. + // On server-side, this is all streams whose headers were received. + estdStreams map[uint32]*outStream // Established streams. + // activeStreams is a linked-list of all streams that have data to send and some + // stream-level flow control quota. + // Each of these streams internally have a list of data items(and perhaps trailers + // on the server-side) to be sent out. + activeStreams *outStreamList + framer *framer + hBuf *bytes.Buffer // The buffer for HPACK encoding. + hEnc *hpack.Encoder // HPACK encoder. + bdpEst *bdpEstimator + draining bool + + // Side-specific handlers + ssGoAwayHandler func(*goAway) (bool, error) +} + +func newLoopyWriter(s side, fr *framer, cbuf *controlBuffer, bdpEst *bdpEstimator) *loopyWriter { + var buf bytes.Buffer + l := &loopyWriter{ + side: s, + cbuf: cbuf, + sendQuota: defaultWindowSize, + oiws: defaultWindowSize, + estdStreams: make(map[uint32]*outStream), + activeStreams: newOutStreamList(), + framer: fr, + hBuf: &buf, + hEnc: hpack.NewEncoder(&buf), + bdpEst: bdpEst, + } + return l +} + +const minBatchSize = 1000 + +// run should be run in a separate goroutine. +// It reads control frames from controlBuf and processes them by: +// 1. Updating loopy's internal state, or/and +// 2. Writing out HTTP2 frames on the wire. +// +// Loopy keeps all active streams with data to send in a linked-list. +// All streams in the activeStreams linked-list must have both: +// 1. Data to send, and +// 2. Stream level flow control quota available. +// +// In each iteration of run loop, other than processing the incoming control +// frame, loopy calls processData, which processes one node from the activeStreams linked-list. +// This results in writing of HTTP2 frames into an underlying write buffer. +// When there's no more control frames to read from controlBuf, loopy flushes the write buffer. +// As an optimization, to increase the batch size for each flush, loopy yields the processor, once +// if the batch size is too low to give stream goroutines a chance to fill it up. +func (l *loopyWriter) run() (err error) { + defer func() { + if err == ErrConnClosing { + // Don't log ErrConnClosing as error since it happens + // 1. When the connection is closed by some other known issue. + // 2. User closed the connection. + // 3. A graceful close of connection. + infof("transport: loopyWriter.run returning. %v", err) + err = nil + } + }() + for { + it, err := l.cbuf.get(true) + if err != nil { + return err + } + if err = l.handle(it); err != nil { + return err + } + if _, err = l.processData(); err != nil { + return err + } + gosched := true + hasdata: + for { + it, err := l.cbuf.get(false) + if err != nil { + return err + } + if it != nil { + if err = l.handle(it); err != nil { + return err + } + if _, err = l.processData(); err != nil { + return err + } + continue hasdata + } + isEmpty, err := l.processData() + if err != nil { + return err + } + if !isEmpty { + continue hasdata + } + if gosched { + gosched = false + if l.framer.writer.offset < minBatchSize { + runtime.Gosched() + continue hasdata + } + } + l.framer.writer.Flush() + break hasdata + + } + } +} + +func (l *loopyWriter) outgoingWindowUpdateHandler(w *outgoingWindowUpdate) error { + return l.framer.fr.WriteWindowUpdate(w.streamID, w.increment) +} + +func (l *loopyWriter) incomingWindowUpdateHandler(w *incomingWindowUpdate) error { + // Otherwise update the quota. + if w.streamID == 0 { + l.sendQuota += w.increment + return nil + } + // Find the stream and update it. + if str, ok := l.estdStreams[w.streamID]; ok { + str.bytesOutStanding -= int(w.increment) + if strQuota := int(l.oiws) - str.bytesOutStanding; strQuota > 0 && str.state == waitingOnStreamQuota { + str.state = active + l.activeStreams.enqueue(str) + return nil + } + } + return nil +} + +func (l *loopyWriter) outgoingSettingsHandler(s *outgoingSettings) error { + return l.framer.fr.WriteSettings(s.ss...) +} + +func (l *loopyWriter) incomingSettingsHandler(s *incomingSettings) error { + if err := l.applySettings(s.ss); err != nil { + return err + } + return l.framer.fr.WriteSettingsAck() +} + +func (l *loopyWriter) registerStreamHandler(h *registerStream) error { + str := &outStream{ + id: h.streamID, + state: empty, + itl: &itemList{}, + wq: h.wq, + } + l.estdStreams[h.streamID] = str + return nil +} + +func (l *loopyWriter) headerHandler(h *headerFrame) error { + if l.side == serverSide { + str, ok := l.estdStreams[h.streamID] + if !ok { + warningf("transport: loopy doesn't recognize the stream: %d", h.streamID) + return nil + } + // Case 1.A: Server is responding back with headers. + if !h.endStream { + return l.writeHeader(h.streamID, h.endStream, h.hf, h.onWrite) + } + // else: Case 1.B: Server wants to close stream. + + if str.state != empty { // either active or waiting on stream quota. + // add it str's list of items. + str.itl.enqueue(h) + return nil + } + if err := l.writeHeader(h.streamID, h.endStream, h.hf, h.onWrite); err != nil { + return err + } + return l.cleanupStreamHandler(h.cleanup) + } + // Case 2: Client wants to originate stream. + str := &outStream{ + id: h.streamID, + state: empty, + itl: &itemList{}, + wq: h.wq, + } + str.itl.enqueue(h) + return l.originateStream(str) +} + +func (l *loopyWriter) originateStream(str *outStream) error { + hdr := str.itl.dequeue().(*headerFrame) + sendPing, err := hdr.initStream(str.id) + if err != nil { + if err == ErrConnClosing { + return err + } + // Other errors(errStreamDrain) need not close transport. + return nil + } + if err = l.writeHeader(str.id, hdr.endStream, hdr.hf, hdr.onWrite); err != nil { + return err + } + l.estdStreams[str.id] = str + if sendPing { + return l.pingHandler(&ping{data: [8]byte{}}) + } + return nil +} + +func (l *loopyWriter) writeHeader(streamID uint32, endStream bool, hf []hpack.HeaderField, onWrite func()) error { + if onWrite != nil { + onWrite() + } + l.hBuf.Reset() + for _, f := range hf { + if err := l.hEnc.WriteField(f); err != nil { + warningf("transport: loopyWriter.writeHeader encountered error while encoding headers:", err) + } + } + var ( + err error + endHeaders, first bool + ) + first = true + for !endHeaders { + size := l.hBuf.Len() + if size > http2MaxFrameLen { + size = http2MaxFrameLen + } else { + endHeaders = true + } + if first { + first = false + err = l.framer.fr.WriteHeaders(http2.HeadersFrameParam{ + StreamID: streamID, + BlockFragment: l.hBuf.Next(size), + EndStream: endStream, + EndHeaders: endHeaders, + }) + } else { + err = l.framer.fr.WriteContinuation( + streamID, + endHeaders, + l.hBuf.Next(size), + ) + } + if err != nil { + return err + } + } + return nil +} + +func (l *loopyWriter) preprocessData(df *dataFrame) error { + str, ok := l.estdStreams[df.streamID] + if !ok { + return nil + } + // If we got data for a stream it means that + // stream was originated and the headers were sent out. + str.itl.enqueue(df) + if str.state == empty { + str.state = active + l.activeStreams.enqueue(str) + } + return nil +} + +func (l *loopyWriter) pingHandler(p *ping) error { + if !p.ack { + l.bdpEst.timesnap(p.data) + } + return l.framer.fr.WritePing(p.ack, p.data) + +} + +func (l *loopyWriter) outFlowControlSizeRequestHandler(o *outFlowControlSizeRequest) error { + o.resp <- l.sendQuota + return nil +} + +func (l *loopyWriter) cleanupStreamHandler(c *cleanupStream) error { + c.onWrite() + if str, ok := l.estdStreams[c.streamID]; ok { + // On the server side it could be a trailers-only response or + // a RST_STREAM before stream initialization thus the stream might + // not be established yet. + delete(l.estdStreams, c.streamID) + str.deleteSelf() + } + if c.rst { // If RST_STREAM needs to be sent. + if err := l.framer.fr.WriteRSTStream(c.streamID, c.rstCode); err != nil { + return err + } + } + if l.side == clientSide && l.draining && len(l.estdStreams) == 0 { + return ErrConnClosing + } + return nil +} + +func (l *loopyWriter) incomingGoAwayHandler(*incomingGoAway) error { + if l.side == clientSide { + l.draining = true + if len(l.estdStreams) == 0 { + return ErrConnClosing + } + } + return nil +} + +func (l *loopyWriter) goAwayHandler(g *goAway) error { + // Handling of outgoing GoAway is very specific to side. + if l.ssGoAwayHandler != nil { + draining, err := l.ssGoAwayHandler(g) + if err != nil { + return err + } + l.draining = draining + } + return nil +} + +func (l *loopyWriter) handle(i interface{}) error { + switch i := i.(type) { + case *incomingWindowUpdate: + return l.incomingWindowUpdateHandler(i) + case *outgoingWindowUpdate: + return l.outgoingWindowUpdateHandler(i) + case *incomingSettings: + return l.incomingSettingsHandler(i) + case *outgoingSettings: + return l.outgoingSettingsHandler(i) + case *headerFrame: + return l.headerHandler(i) + case *registerStream: + return l.registerStreamHandler(i) + case *cleanupStream: + return l.cleanupStreamHandler(i) + case *incomingGoAway: + return l.incomingGoAwayHandler(i) + case *dataFrame: + return l.preprocessData(i) + case *ping: + return l.pingHandler(i) + case *goAway: + return l.goAwayHandler(i) + case *outFlowControlSizeRequest: + return l.outFlowControlSizeRequestHandler(i) + default: + return fmt.Errorf("transport: unknown control message type %T", i) + } +} + +func (l *loopyWriter) applySettings(ss []http2.Setting) error { + for _, s := range ss { + switch s.ID { + case http2.SettingInitialWindowSize: + o := l.oiws + l.oiws = s.Val + if o < l.oiws { + // If the new limit is greater make all depleted streams active. + for _, stream := range l.estdStreams { + if stream.state == waitingOnStreamQuota { + stream.state = active + l.activeStreams.enqueue(stream) + } + } + } + case http2.SettingHeaderTableSize: + updateHeaderTblSize(l.hEnc, s.Val) + } + } + return nil +} + +// processData removes the first stream from active streams, writes out at most 16KB +// of its data and then puts it at the end of activeStreams if there's still more data +// to be sent and stream has some stream-level flow control. +func (l *loopyWriter) processData() (bool, error) { + if l.sendQuota == 0 { + return true, nil + } + str := l.activeStreams.dequeue() // Remove the first stream. + if str == nil { + return true, nil + } + dataItem := str.itl.peek().(*dataFrame) // Peek at the first data item this stream. + // A data item is represented by a dataFrame, since it later translates into + // multiple HTTP2 data frames. + // Every dataFrame has two buffers; h that keeps grpc-message header and d that is acutal data. + // As an optimization to keep wire traffic low, data from d is copied to h to make as big as the + // maximum possilbe HTTP2 frame size. + + if len(dataItem.h) == 0 && len(dataItem.d) == 0 { // Empty data frame + // Client sends out empty data frame with endStream = true + if err := l.framer.fr.WriteData(dataItem.streamID, dataItem.endStream, nil); err != nil { + return false, err + } + str.itl.dequeue() // remove the empty data item from stream + if str.itl.isEmpty() { + str.state = empty + } else if trailer, ok := str.itl.peek().(*headerFrame); ok { // the next item is trailers. + if err := l.writeHeader(trailer.streamID, trailer.endStream, trailer.hf, trailer.onWrite); err != nil { + return false, err + } + if err := l.cleanupStreamHandler(trailer.cleanup); err != nil { + return false, nil + } + } else { + l.activeStreams.enqueue(str) + } + return false, nil + } + var ( + idx int + buf []byte + ) + if len(dataItem.h) != 0 { // data header has not been written out yet. + buf = dataItem.h + } else { + idx = 1 + buf = dataItem.d + } + size := http2MaxFrameLen + if len(buf) < size { + size = len(buf) + } + if strQuota := int(l.oiws) - str.bytesOutStanding; strQuota <= 0 { // stream-level flow control. + str.state = waitingOnStreamQuota + return false, nil + } else if strQuota < size { + size = strQuota + } + + if l.sendQuota < uint32(size) { // connection-level flow control. + size = int(l.sendQuota) + } + // Now that outgoing flow controls are checked we can replenish str's write quota + str.wq.replenish(size) + var endStream bool + // If this is the last data message on this stream and all of it can be written in this iteration. + if dataItem.endStream && size == len(buf) { + // buf contains either data or it contains header but data is empty. + if idx == 1 || len(dataItem.d) == 0 { + endStream = true + } + } + if dataItem.onEachWrite != nil { + dataItem.onEachWrite() + } + if err := l.framer.fr.WriteData(dataItem.streamID, endStream, buf[:size]); err != nil { + return false, err + } + buf = buf[size:] + str.bytesOutStanding += size + l.sendQuota -= uint32(size) + if idx == 0 { + dataItem.h = buf + } else { + dataItem.d = buf + } + + if len(dataItem.h) == 0 && len(dataItem.d) == 0 { // All the data from that message was written out. + str.itl.dequeue() + } + if str.itl.isEmpty() { + str.state = empty + } else if trailer, ok := str.itl.peek().(*headerFrame); ok { // The next item is trailers. + if err := l.writeHeader(trailer.streamID, trailer.endStream, trailer.hf, trailer.onWrite); err != nil { + return false, err + } + if err := l.cleanupStreamHandler(trailer.cleanup); err != nil { + return false, err + } + } else if int(l.oiws)-str.bytesOutStanding <= 0 { // Ran out of stream quota. + str.state = waitingOnStreamQuota + } else { // Otherwise add it back to the list of active streams. + l.activeStreams.enqueue(str) + } + return false, nil +} diff --git a/vendor/google.golang.org/grpc/internal/transport/defaults.go b/vendor/google.golang.org/grpc/internal/transport/defaults.go new file mode 100644 index 0000000..9fa306b --- /dev/null +++ b/vendor/google.golang.org/grpc/internal/transport/defaults.go @@ -0,0 +1,49 @@ +/* + * + * Copyright 2018 gRPC authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ + +package transport + +import ( + "math" + "time" +) + +const ( + // The default value of flow control window size in HTTP2 spec. + defaultWindowSize = 65535 + // The initial window size for flow control. + initialWindowSize = defaultWindowSize // for an RPC + infinity = time.Duration(math.MaxInt64) + defaultClientKeepaliveTime = infinity + defaultClientKeepaliveTimeout = 20 * time.Second + defaultMaxStreamsClient = 100 + defaultMaxConnectionIdle = infinity + defaultMaxConnectionAge = infinity + defaultMaxConnectionAgeGrace = infinity + defaultServerKeepaliveTime = 2 * time.Hour + defaultServerKeepaliveTimeout = 20 * time.Second + defaultKeepalivePolicyMinTime = 5 * time.Minute + // max window limit set by HTTP2 Specs. + maxWindowSize = math.MaxInt32 + // defaultWriteQuota is the default value for number of data + // bytes that each stream can schedule before some of it being + // flushed out. + defaultWriteQuota = 64 * 1024 + defaultClientMaxHeaderListSize = uint32(16 << 20) + defaultServerMaxHeaderListSize = uint32(16 << 20) +) diff --git a/vendor/google.golang.org/grpc/internal/transport/flowcontrol.go b/vendor/google.golang.org/grpc/internal/transport/flowcontrol.go new file mode 100644 index 0000000..5ea997a --- /dev/null +++ b/vendor/google.golang.org/grpc/internal/transport/flowcontrol.go @@ -0,0 +1,218 @@ +/* + * + * Copyright 2014 gRPC authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ + +package transport + +import ( + "fmt" + "math" + "sync" + "sync/atomic" +) + +// writeQuota is a soft limit on the amount of data a stream can +// schedule before some of it is written out. +type writeQuota struct { + quota int32 + // get waits on read from when quota goes less than or equal to zero. + // replenish writes on it when quota goes positive again. + ch chan struct{} + // done is triggered in error case. + done <-chan struct{} + // replenish is called by loopyWriter to give quota back to. + // It is implemented as a field so that it can be updated + // by tests. + replenish func(n int) +} + +func newWriteQuota(sz int32, done <-chan struct{}) *writeQuota { + w := &writeQuota{ + quota: sz, + ch: make(chan struct{}, 1), + done: done, + } + w.replenish = w.realReplenish + return w +} + +func (w *writeQuota) get(sz int32) error { + for { + if atomic.LoadInt32(&w.quota) > 0 { + atomic.AddInt32(&w.quota, -sz) + return nil + } + select { + case <-w.ch: + continue + case <-w.done: + return errStreamDone + } + } +} + +func (w *writeQuota) realReplenish(n int) { + sz := int32(n) + a := atomic.AddInt32(&w.quota, sz) + b := a - sz + if b <= 0 && a > 0 { + select { + case w.ch <- struct{}{}: + default: + } + } +} + +type trInFlow struct { + limit uint32 + unacked uint32 + effectiveWindowSize uint32 +} + +func (f *trInFlow) newLimit(n uint32) uint32 { + d := n - f.limit + f.limit = n + f.updateEffectiveWindowSize() + return d +} + +func (f *trInFlow) onData(n uint32) uint32 { + f.unacked += n + if f.unacked >= f.limit/4 { + w := f.unacked + f.unacked = 0 + f.updateEffectiveWindowSize() + return w + } + f.updateEffectiveWindowSize() + return 0 +} + +func (f *trInFlow) reset() uint32 { + w := f.unacked + f.unacked = 0 + f.updateEffectiveWindowSize() + return w +} + +func (f *trInFlow) updateEffectiveWindowSize() { + atomic.StoreUint32(&f.effectiveWindowSize, f.limit-f.unacked) +} + +func (f *trInFlow) getSize() uint32 { + return atomic.LoadUint32(&f.effectiveWindowSize) +} + +// TODO(mmukhi): Simplify this code. +// inFlow deals with inbound flow control +type inFlow struct { + mu sync.Mutex + // The inbound flow control limit for pending data. + limit uint32 + // pendingData is the overall data which have been received but not been + // consumed by applications. + pendingData uint32 + // The amount of data the application has consumed but grpc has not sent + // window update for them. Used to reduce window update frequency. + pendingUpdate uint32 + // delta is the extra window update given by receiver when an application + // is reading data bigger in size than the inFlow limit. + delta uint32 +} + +// newLimit updates the inflow window to a new value n. +// It assumes that n is always greater than the old limit. +func (f *inFlow) newLimit(n uint32) uint32 { + f.mu.Lock() + d := n - f.limit + f.limit = n + f.mu.Unlock() + return d +} + +func (f *inFlow) maybeAdjust(n uint32) uint32 { + if n > uint32(math.MaxInt32) { + n = uint32(math.MaxInt32) + } + f.mu.Lock() + // estSenderQuota is the receiver's view of the maximum number of bytes the sender + // can send without a window update. + estSenderQuota := int32(f.limit - (f.pendingData + f.pendingUpdate)) + // estUntransmittedData is the maximum number of bytes the sends might not have put + // on the wire yet. A value of 0 or less means that we have already received all or + // more bytes than the application is requesting to read. + estUntransmittedData := int32(n - f.pendingData) // Casting into int32 since it could be negative. + // This implies that unless we send a window update, the sender won't be able to send all the bytes + // for this message. Therefore we must send an update over the limit since there's an active read + // request from the application. + if estUntransmittedData > estSenderQuota { + // Sender's window shouldn't go more than 2^31 - 1 as specified in the HTTP spec. + if f.limit+n > maxWindowSize { + f.delta = maxWindowSize - f.limit + } else { + // Send a window update for the whole message and not just the difference between + // estUntransmittedData and estSenderQuota. This will be helpful in case the message + // is padded; We will fallback on the current available window(at least a 1/4th of the limit). + f.delta = n + } + f.mu.Unlock() + return f.delta + } + f.mu.Unlock() + return 0 +} + +// onData is invoked when some data frame is received. It updates pendingData. +func (f *inFlow) onData(n uint32) error { + f.mu.Lock() + f.pendingData += n + if f.pendingData+f.pendingUpdate > f.limit+f.delta { + limit := f.limit + rcvd := f.pendingData + f.pendingUpdate + f.mu.Unlock() + return fmt.Errorf("received %d-bytes data exceeding the limit %d bytes", rcvd, limit) + } + f.mu.Unlock() + return nil +} + +// onRead is invoked when the application reads the data. It returns the window size +// to be sent to the peer. +func (f *inFlow) onRead(n uint32) uint32 { + f.mu.Lock() + if f.pendingData == 0 { + f.mu.Unlock() + return 0 + } + f.pendingData -= n + if n > f.delta { + n -= f.delta + f.delta = 0 + } else { + f.delta -= n + n = 0 + } + f.pendingUpdate += n + if f.pendingUpdate >= f.limit/4 { + wu := f.pendingUpdate + f.pendingUpdate = 0 + f.mu.Unlock() + return wu + } + f.mu.Unlock() + return 0 +} diff --git a/vendor/google.golang.org/grpc/internal/transport/handler_server.go b/vendor/google.golang.org/grpc/internal/transport/handler_server.go new file mode 100644 index 0000000..f2de84d --- /dev/null +++ b/vendor/google.golang.org/grpc/internal/transport/handler_server.go @@ -0,0 +1,430 @@ +/* + * + * Copyright 2016 gRPC authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ + +// This file is the implementation of a gRPC server using HTTP/2 which +// uses the standard Go http2 Server implementation (via the +// http.Handler interface), rather than speaking low-level HTTP/2 +// frames itself. It is the implementation of *grpc.Server.ServeHTTP. + +package transport + +import ( + "context" + "errors" + "fmt" + "io" + "net" + "net/http" + "strings" + "sync" + "time" + + "github.com/golang/protobuf/proto" + "golang.org/x/net/http2" + "google.golang.org/grpc/codes" + "google.golang.org/grpc/credentials" + "google.golang.org/grpc/metadata" + "google.golang.org/grpc/peer" + "google.golang.org/grpc/stats" + "google.golang.org/grpc/status" +) + +// NewServerHandlerTransport returns a ServerTransport handling gRPC +// from inside an http.Handler. It requires that the http Server +// supports HTTP/2. +func NewServerHandlerTransport(w http.ResponseWriter, r *http.Request, stats stats.Handler) (ServerTransport, error) { + if r.ProtoMajor != 2 { + return nil, errors.New("gRPC requires HTTP/2") + } + if r.Method != "POST" { + return nil, errors.New("invalid gRPC request method") + } + contentType := r.Header.Get("Content-Type") + // TODO: do we assume contentType is lowercase? we did before + contentSubtype, validContentType := contentSubtype(contentType) + if !validContentType { + return nil, errors.New("invalid gRPC request content-type") + } + if _, ok := w.(http.Flusher); !ok { + return nil, errors.New("gRPC requires a ResponseWriter supporting http.Flusher") + } + + st := &serverHandlerTransport{ + rw: w, + req: r, + closedCh: make(chan struct{}), + writes: make(chan func()), + contentType: contentType, + contentSubtype: contentSubtype, + stats: stats, + } + + if v := r.Header.Get("grpc-timeout"); v != "" { + to, err := decodeTimeout(v) + if err != nil { + return nil, status.Errorf(codes.Internal, "malformed time-out: %v", err) + } + st.timeoutSet = true + st.timeout = to + } + + metakv := []string{"content-type", contentType} + if r.Host != "" { + metakv = append(metakv, ":authority", r.Host) + } + for k, vv := range r.Header { + k = strings.ToLower(k) + if isReservedHeader(k) && !isWhitelistedHeader(k) { + continue + } + for _, v := range vv { + v, err := decodeMetadataHeader(k, v) + if err != nil { + return nil, status.Errorf(codes.Internal, "malformed binary metadata: %v", err) + } + metakv = append(metakv, k, v) + } + } + st.headerMD = metadata.Pairs(metakv...) + + return st, nil +} + +// serverHandlerTransport is an implementation of ServerTransport +// which replies to exactly one gRPC request (exactly one HTTP request), +// using the net/http.Handler interface. This http.Handler is guaranteed +// at this point to be speaking over HTTP/2, so it's able to speak valid +// gRPC. +type serverHandlerTransport struct { + rw http.ResponseWriter + req *http.Request + timeoutSet bool + timeout time.Duration + didCommonHeaders bool + + headerMD metadata.MD + + closeOnce sync.Once + closedCh chan struct{} // closed on Close + + // writes is a channel of code to run serialized in the + // ServeHTTP (HandleStreams) goroutine. The channel is closed + // when WriteStatus is called. + writes chan func() + + // block concurrent WriteStatus calls + // e.g. grpc/(*serverStream).SendMsg/RecvMsg + writeStatusMu sync.Mutex + + // we just mirror the request content-type + contentType string + // we store both contentType and contentSubtype so we don't keep recreating them + // TODO make sure this is consistent across handler_server and http2_server + contentSubtype string + + stats stats.Handler +} + +func (ht *serverHandlerTransport) Close() error { + ht.closeOnce.Do(ht.closeCloseChanOnce) + return nil +} + +func (ht *serverHandlerTransport) closeCloseChanOnce() { close(ht.closedCh) } + +func (ht *serverHandlerTransport) RemoteAddr() net.Addr { return strAddr(ht.req.RemoteAddr) } + +// strAddr is a net.Addr backed by either a TCP "ip:port" string, or +// the empty string if unknown. +type strAddr string + +func (a strAddr) Network() string { + if a != "" { + // Per the documentation on net/http.Request.RemoteAddr, if this is + // set, it's set to the IP:port of the peer (hence, TCP): + // https://golang.org/pkg/net/http/#Request + // + // If we want to support Unix sockets later, we can + // add our own grpc-specific convention within the + // grpc codebase to set RemoteAddr to a different + // format, or probably better: we can attach it to the + // context and use that from serverHandlerTransport.RemoteAddr. + return "tcp" + } + return "" +} + +func (a strAddr) String() string { return string(a) } + +// do runs fn in the ServeHTTP goroutine. +func (ht *serverHandlerTransport) do(fn func()) error { + select { + case <-ht.closedCh: + return ErrConnClosing + case ht.writes <- fn: + return nil + } +} + +func (ht *serverHandlerTransport) WriteStatus(s *Stream, st *status.Status) error { + ht.writeStatusMu.Lock() + defer ht.writeStatusMu.Unlock() + + err := ht.do(func() { + ht.writeCommonHeaders(s) + + // And flush, in case no header or body has been sent yet. + // This forces a separation of headers and trailers if this is the + // first call (for example, in end2end tests's TestNoService). + ht.rw.(http.Flusher).Flush() + + h := ht.rw.Header() + h.Set("Grpc-Status", fmt.Sprintf("%d", st.Code())) + if m := st.Message(); m != "" { + h.Set("Grpc-Message", encodeGrpcMessage(m)) + } + + if p := st.Proto(); p != nil && len(p.Details) > 0 { + stBytes, err := proto.Marshal(p) + if err != nil { + // TODO: return error instead, when callers are able to handle it. + panic(err) + } + + h.Set("Grpc-Status-Details-Bin", encodeBinHeader(stBytes)) + } + + if md := s.Trailer(); len(md) > 0 { + for k, vv := range md { + // Clients don't tolerate reading restricted headers after some non restricted ones were sent. + if isReservedHeader(k) { + continue + } + for _, v := range vv { + // http2 ResponseWriter mechanism to send undeclared Trailers after + // the headers have possibly been written. + h.Add(http2.TrailerPrefix+k, encodeMetadataHeader(k, v)) + } + } + } + }) + + if err == nil { // transport has not been closed + if ht.stats != nil { + ht.stats.HandleRPC(s.Context(), &stats.OutTrailer{}) + } + } + ht.Close() + return err +} + +// writeCommonHeaders sets common headers on the first write +// call (Write, WriteHeader, or WriteStatus). +func (ht *serverHandlerTransport) writeCommonHeaders(s *Stream) { + if ht.didCommonHeaders { + return + } + ht.didCommonHeaders = true + + h := ht.rw.Header() + h["Date"] = nil // suppress Date to make tests happy; TODO: restore + h.Set("Content-Type", ht.contentType) + + // Predeclare trailers we'll set later in WriteStatus (after the body). + // This is a SHOULD in the HTTP RFC, and the way you add (known) + // Trailers per the net/http.ResponseWriter contract. + // See https://golang.org/pkg/net/http/#ResponseWriter + // and https://golang.org/pkg/net/http/#example_ResponseWriter_trailers + h.Add("Trailer", "Grpc-Status") + h.Add("Trailer", "Grpc-Message") + h.Add("Trailer", "Grpc-Status-Details-Bin") + + if s.sendCompress != "" { + h.Set("Grpc-Encoding", s.sendCompress) + } +} + +func (ht *serverHandlerTransport) Write(s *Stream, hdr []byte, data []byte, opts *Options) error { + return ht.do(func() { + ht.writeCommonHeaders(s) + ht.rw.Write(hdr) + ht.rw.Write(data) + ht.rw.(http.Flusher).Flush() + }) +} + +func (ht *serverHandlerTransport) WriteHeader(s *Stream, md metadata.MD) error { + err := ht.do(func() { + ht.writeCommonHeaders(s) + h := ht.rw.Header() + for k, vv := range md { + // Clients don't tolerate reading restricted headers after some non restricted ones were sent. + if isReservedHeader(k) { + continue + } + for _, v := range vv { + v = encodeMetadataHeader(k, v) + h.Add(k, v) + } + } + ht.rw.WriteHeader(200) + ht.rw.(http.Flusher).Flush() + }) + + if err == nil { + if ht.stats != nil { + ht.stats.HandleRPC(s.Context(), &stats.OutHeader{}) + } + } + return err +} + +func (ht *serverHandlerTransport) HandleStreams(startStream func(*Stream), traceCtx func(context.Context, string) context.Context) { + // With this transport type there will be exactly 1 stream: this HTTP request. + + ctx := ht.req.Context() + var cancel context.CancelFunc + if ht.timeoutSet { + ctx, cancel = context.WithTimeout(ctx, ht.timeout) + } else { + ctx, cancel = context.WithCancel(ctx) + } + + // requestOver is closed when the status has been written via WriteStatus. + requestOver := make(chan struct{}) + go func() { + select { + case <-requestOver: + case <-ht.closedCh: + case <-ht.req.Context().Done(): + } + cancel() + ht.Close() + }() + + req := ht.req + + s := &Stream{ + id: 0, // irrelevant + requestRead: func(int) {}, + cancel: cancel, + buf: newRecvBuffer(), + st: ht, + method: req.URL.Path, + recvCompress: req.Header.Get("grpc-encoding"), + contentSubtype: ht.contentSubtype, + } + pr := &peer.Peer{ + Addr: ht.RemoteAddr(), + } + if req.TLS != nil { + pr.AuthInfo = credentials.TLSInfo{State: *req.TLS} + } + ctx = metadata.NewIncomingContext(ctx, ht.headerMD) + s.ctx = peer.NewContext(ctx, pr) + if ht.stats != nil { + s.ctx = ht.stats.TagRPC(s.ctx, &stats.RPCTagInfo{FullMethodName: s.method}) + inHeader := &stats.InHeader{ + FullMethod: s.method, + RemoteAddr: ht.RemoteAddr(), + Compression: s.recvCompress, + } + ht.stats.HandleRPC(s.ctx, inHeader) + } + s.trReader = &transportReader{ + reader: &recvBufferReader{ctx: s.ctx, ctxDone: s.ctx.Done(), recv: s.buf}, + windowHandler: func(int) {}, + } + + // readerDone is closed when the Body.Read-ing goroutine exits. + readerDone := make(chan struct{}) + go func() { + defer close(readerDone) + + // TODO: minimize garbage, optimize recvBuffer code/ownership + const readSize = 8196 + for buf := make([]byte, readSize); ; { + n, err := req.Body.Read(buf) + if n > 0 { + s.buf.put(recvMsg{data: buf[:n:n]}) + buf = buf[n:] + } + if err != nil { + s.buf.put(recvMsg{err: mapRecvMsgError(err)}) + return + } + if len(buf) == 0 { + buf = make([]byte, readSize) + } + } + }() + + // startStream is provided by the *grpc.Server's serveStreams. + // It starts a goroutine serving s and exits immediately. + // The goroutine that is started is the one that then calls + // into ht, calling WriteHeader, Write, WriteStatus, Close, etc. + startStream(s) + + ht.runStream() + close(requestOver) + + // Wait for reading goroutine to finish. + req.Body.Close() + <-readerDone +} + +func (ht *serverHandlerTransport) runStream() { + for { + select { + case fn := <-ht.writes: + fn() + case <-ht.closedCh: + return + } + } +} + +func (ht *serverHandlerTransport) IncrMsgSent() {} + +func (ht *serverHandlerTransport) IncrMsgRecv() {} + +func (ht *serverHandlerTransport) Drain() { + panic("Drain() is not implemented") +} + +// mapRecvMsgError returns the non-nil err into the appropriate +// error value as expected by callers of *grpc.parser.recvMsg. +// In particular, in can only be: +// * io.EOF +// * io.ErrUnexpectedEOF +// * of type transport.ConnectionError +// * an error from the status package +func mapRecvMsgError(err error) error { + if err == io.EOF || err == io.ErrUnexpectedEOF { + return err + } + if se, ok := err.(http2.StreamError); ok { + if code, ok := http2ErrConvTab[se.Code]; ok { + return status.Error(code, se.Error()) + } + } + if strings.Contains(err.Error(), "body closed by handler") { + return status.Error(codes.Canceled, err.Error()) + } + return connectionErrorf(true, err, err.Error()) +} diff --git a/vendor/google.golang.org/grpc/internal/transport/http2_client.go b/vendor/google.golang.org/grpc/internal/transport/http2_client.go new file mode 100644 index 0000000..9dee6db --- /dev/null +++ b/vendor/google.golang.org/grpc/internal/transport/http2_client.go @@ -0,0 +1,1397 @@ +/* + * + * Copyright 2014 gRPC authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ + +package transport + +import ( + "context" + "fmt" + "io" + "math" + "net" + "strconv" + "strings" + "sync" + "sync/atomic" + "time" + + "golang.org/x/net/http2" + "golang.org/x/net/http2/hpack" + + "google.golang.org/grpc/codes" + "google.golang.org/grpc/credentials" + "google.golang.org/grpc/internal/channelz" + "google.golang.org/grpc/internal/syscall" + "google.golang.org/grpc/keepalive" + "google.golang.org/grpc/metadata" + "google.golang.org/grpc/peer" + "google.golang.org/grpc/stats" + "google.golang.org/grpc/status" +) + +// http2Client implements the ClientTransport interface with HTTP2. +type http2Client struct { + ctx context.Context + cancel context.CancelFunc + ctxDone <-chan struct{} // Cache the ctx.Done() chan. + userAgent string + md interface{} + conn net.Conn // underlying communication channel + loopy *loopyWriter + remoteAddr net.Addr + localAddr net.Addr + authInfo credentials.AuthInfo // auth info about the connection + + readerDone chan struct{} // sync point to enable testing. + writerDone chan struct{} // sync point to enable testing. + // goAway is closed to notify the upper layer (i.e., addrConn.transportMonitor) + // that the server sent GoAway on this transport. + goAway chan struct{} + // awakenKeepalive is used to wake up keepalive when after it has gone dormant. + awakenKeepalive chan struct{} + + framer *framer + // controlBuf delivers all the control related tasks (e.g., window + // updates, reset streams, and various settings) to the controller. + controlBuf *controlBuffer + fc *trInFlow + // The scheme used: https if TLS is on, http otherwise. + scheme string + + isSecure bool + + perRPCCreds []credentials.PerRPCCredentials + + // Boolean to keep track of reading activity on transport. + // 1 is true and 0 is false. + activity uint32 // Accessed atomically. + kp keepalive.ClientParameters + keepaliveEnabled bool + + statsHandler stats.Handler + + initialWindowSize int32 + + // configured by peer through SETTINGS_MAX_HEADER_LIST_SIZE + maxSendHeaderListSize *uint32 + + bdpEst *bdpEstimator + // onPrefaceReceipt is a callback that client transport calls upon + // receiving server preface to signal that a succefull HTTP2 + // connection was established. + onPrefaceReceipt func() + + maxConcurrentStreams uint32 + streamQuota int64 + streamsQuotaAvailable chan struct{} + waitingStreams uint32 + nextID uint32 + + mu sync.Mutex // guard the following variables + state transportState + activeStreams map[uint32]*Stream + // prevGoAway ID records the Last-Stream-ID in the previous GOAway frame. + prevGoAwayID uint32 + // goAwayReason records the http2.ErrCode and debug data received with the + // GoAway frame. + goAwayReason GoAwayReason + + // Fields below are for channelz metric collection. + channelzID int64 // channelz unique identification number + czData *channelzData + + onGoAway func(GoAwayReason) + onClose func() +} + +func dial(ctx context.Context, fn func(context.Context, string) (net.Conn, error), addr string) (net.Conn, error) { + if fn != nil { + return fn(ctx, addr) + } + return (&net.Dialer{}).DialContext(ctx, "tcp", addr) +} + +func isTemporary(err error) bool { + switch err := err.(type) { + case interface { + Temporary() bool + }: + return err.Temporary() + case interface { + Timeout() bool + }: + // Timeouts may be resolved upon retry, and are thus treated as + // temporary. + return err.Timeout() + } + return true +} + +// newHTTP2Client constructs a connected ClientTransport to addr based on HTTP2 +// and starts to receive messages on it. Non-nil error returns if construction +// fails. +func newHTTP2Client(connectCtx, ctx context.Context, addr TargetInfo, opts ConnectOptions, onPrefaceReceipt func(), onGoAway func(GoAwayReason), onClose func()) (_ *http2Client, err error) { + scheme := "http" + ctx, cancel := context.WithCancel(ctx) + defer func() { + if err != nil { + cancel() + } + }() + + conn, err := dial(connectCtx, opts.Dialer, addr.Addr) + if err != nil { + if opts.FailOnNonTempDialError { + return nil, connectionErrorf(isTemporary(err), err, "transport: error while dialing: %v", err) + } + return nil, connectionErrorf(true, err, "transport: Error while dialing %v", err) + } + // Any further errors will close the underlying connection + defer func(conn net.Conn) { + if err != nil { + conn.Close() + } + }(conn) + kp := opts.KeepaliveParams + // Validate keepalive parameters. + if kp.Time == 0 { + kp.Time = defaultClientKeepaliveTime + } + if kp.Timeout == 0 { + kp.Timeout = defaultClientKeepaliveTimeout + } + keepaliveEnabled := false + if kp.Time != infinity { + if err = syscall.SetTCPUserTimeout(conn, kp.Timeout); err != nil { + return nil, connectionErrorf(false, err, "transport: failed to set TCP_USER_TIMEOUT: %v", err) + } + keepaliveEnabled = true + } + var ( + isSecure bool + authInfo credentials.AuthInfo + ) + transportCreds := opts.TransportCredentials + perRPCCreds := opts.PerRPCCredentials + + if b := opts.CredsBundle; b != nil { + if t := b.TransportCredentials(); t != nil { + transportCreds = t + } + if t := b.PerRPCCredentials(); t != nil { + perRPCCreds = append(perRPCCreds, t) + } + } + if transportCreds != nil { + scheme = "https" + conn, authInfo, err = transportCreds.ClientHandshake(connectCtx, addr.Authority, conn) + if err != nil { + return nil, connectionErrorf(isTemporary(err), err, "transport: authentication handshake failed: %v", err) + } + isSecure = true + } + dynamicWindow := true + icwz := int32(initialWindowSize) + if opts.InitialConnWindowSize >= defaultWindowSize { + icwz = opts.InitialConnWindowSize + dynamicWindow = false + } + writeBufSize := opts.WriteBufferSize + readBufSize := opts.ReadBufferSize + maxHeaderListSize := defaultClientMaxHeaderListSize + if opts.MaxHeaderListSize != nil { + maxHeaderListSize = *opts.MaxHeaderListSize + } + t := &http2Client{ + ctx: ctx, + ctxDone: ctx.Done(), // Cache Done chan. + cancel: cancel, + userAgent: opts.UserAgent, + md: addr.Metadata, + conn: conn, + remoteAddr: conn.RemoteAddr(), + localAddr: conn.LocalAddr(), + authInfo: authInfo, + readerDone: make(chan struct{}), + writerDone: make(chan struct{}), + goAway: make(chan struct{}), + awakenKeepalive: make(chan struct{}, 1), + framer: newFramer(conn, writeBufSize, readBufSize, maxHeaderListSize), + fc: &trInFlow{limit: uint32(icwz)}, + scheme: scheme, + activeStreams: make(map[uint32]*Stream), + isSecure: isSecure, + perRPCCreds: perRPCCreds, + kp: kp, + statsHandler: opts.StatsHandler, + initialWindowSize: initialWindowSize, + onPrefaceReceipt: onPrefaceReceipt, + nextID: 1, + maxConcurrentStreams: defaultMaxStreamsClient, + streamQuota: defaultMaxStreamsClient, + streamsQuotaAvailable: make(chan struct{}, 1), + czData: new(channelzData), + onGoAway: onGoAway, + onClose: onClose, + keepaliveEnabled: keepaliveEnabled, + } + t.controlBuf = newControlBuffer(t.ctxDone) + if opts.InitialWindowSize >= defaultWindowSize { + t.initialWindowSize = opts.InitialWindowSize + dynamicWindow = false + } + if dynamicWindow { + t.bdpEst = &bdpEstimator{ + bdp: initialWindowSize, + updateFlowControl: t.updateFlowControl, + } + } + // Make sure awakenKeepalive can't be written upon. + // keepalive routine will make it writable, if need be. + t.awakenKeepalive <- struct{}{} + if t.statsHandler != nil { + t.ctx = t.statsHandler.TagConn(t.ctx, &stats.ConnTagInfo{ + RemoteAddr: t.remoteAddr, + LocalAddr: t.localAddr, + }) + connBegin := &stats.ConnBegin{ + Client: true, + } + t.statsHandler.HandleConn(t.ctx, connBegin) + } + if channelz.IsOn() { + t.channelzID = channelz.RegisterNormalSocket(t, opts.ChannelzParentID, fmt.Sprintf("%s -> %s", t.localAddr, t.remoteAddr)) + } + if t.keepaliveEnabled { + go t.keepalive() + } + // Start the reader goroutine for incoming message. Each transport has + // a dedicated goroutine which reads HTTP2 frame from network. Then it + // dispatches the frame to the corresponding stream entity. + go t.reader() + + // Send connection preface to server. + n, err := t.conn.Write(clientPreface) + if err != nil { + t.Close() + return nil, connectionErrorf(true, err, "transport: failed to write client preface: %v", err) + } + if n != len(clientPreface) { + t.Close() + return nil, connectionErrorf(true, err, "transport: preface mismatch, wrote %d bytes; want %d", n, len(clientPreface)) + } + var ss []http2.Setting + + if t.initialWindowSize != defaultWindowSize { + ss = append(ss, http2.Setting{ + ID: http2.SettingInitialWindowSize, + Val: uint32(t.initialWindowSize), + }) + } + if opts.MaxHeaderListSize != nil { + ss = append(ss, http2.Setting{ + ID: http2.SettingMaxHeaderListSize, + Val: *opts.MaxHeaderListSize, + }) + } + err = t.framer.fr.WriteSettings(ss...) + if err != nil { + t.Close() + return nil, connectionErrorf(true, err, "transport: failed to write initial settings frame: %v", err) + } + // Adjust the connection flow control window if needed. + if delta := uint32(icwz - defaultWindowSize); delta > 0 { + if err := t.framer.fr.WriteWindowUpdate(0, delta); err != nil { + t.Close() + return nil, connectionErrorf(true, err, "transport: failed to write window update: %v", err) + } + } + + if err := t.framer.writer.Flush(); err != nil { + return nil, err + } + go func() { + t.loopy = newLoopyWriter(clientSide, t.framer, t.controlBuf, t.bdpEst) + err := t.loopy.run() + if err != nil { + errorf("transport: loopyWriter.run returning. Err: %v", err) + } + // If it's a connection error, let reader goroutine handle it + // since there might be data in the buffers. + if _, ok := err.(net.Error); !ok { + t.conn.Close() + } + close(t.writerDone) + }() + return t, nil +} + +func (t *http2Client) newStream(ctx context.Context, callHdr *CallHdr) *Stream { + // TODO(zhaoq): Handle uint32 overflow of Stream.id. + s := &Stream{ + done: make(chan struct{}), + method: callHdr.Method, + sendCompress: callHdr.SendCompress, + buf: newRecvBuffer(), + headerChan: make(chan struct{}), + contentSubtype: callHdr.ContentSubtype, + } + s.wq = newWriteQuota(defaultWriteQuota, s.done) + s.requestRead = func(n int) { + t.adjustWindow(s, uint32(n)) + } + // The client side stream context should have exactly the same life cycle with the user provided context. + // That means, s.ctx should be read-only. And s.ctx is done iff ctx is done. + // So we use the original context here instead of creating a copy. + s.ctx = ctx + s.trReader = &transportReader{ + reader: &recvBufferReader{ + ctx: s.ctx, + ctxDone: s.ctx.Done(), + recv: s.buf, + closeStream: func(err error) { + t.CloseStream(s, err) + }, + }, + windowHandler: func(n int) { + t.updateWindow(s, uint32(n)) + }, + } + return s +} + +func (t *http2Client) getPeer() *peer.Peer { + pr := &peer.Peer{ + Addr: t.remoteAddr, + } + // Attach Auth info if there is any. + if t.authInfo != nil { + pr.AuthInfo = t.authInfo + } + return pr +} + +func (t *http2Client) createHeaderFields(ctx context.Context, callHdr *CallHdr) ([]hpack.HeaderField, error) { + aud := t.createAudience(callHdr) + authData, err := t.getTrAuthData(ctx, aud) + if err != nil { + return nil, err + } + callAuthData, err := t.getCallAuthData(ctx, aud, callHdr) + if err != nil { + return nil, err + } + // TODO(mmukhi): Benchmark if the performance gets better if count the metadata and other header fields + // first and create a slice of that exact size. + // Make the slice of certain predictable size to reduce allocations made by append. + hfLen := 7 // :method, :scheme, :path, :authority, content-type, user-agent, te + hfLen += len(authData) + len(callAuthData) + headerFields := make([]hpack.HeaderField, 0, hfLen) + headerFields = append(headerFields, hpack.HeaderField{Name: ":method", Value: "POST"}) + headerFields = append(headerFields, hpack.HeaderField{Name: ":scheme", Value: t.scheme}) + headerFields = append(headerFields, hpack.HeaderField{Name: ":path", Value: callHdr.Method}) + headerFields = append(headerFields, hpack.HeaderField{Name: ":authority", Value: callHdr.Host}) + headerFields = append(headerFields, hpack.HeaderField{Name: "content-type", Value: contentType(callHdr.ContentSubtype)}) + headerFields = append(headerFields, hpack.HeaderField{Name: "user-agent", Value: t.userAgent}) + headerFields = append(headerFields, hpack.HeaderField{Name: "te", Value: "trailers"}) + if callHdr.PreviousAttempts > 0 { + headerFields = append(headerFields, hpack.HeaderField{Name: "grpc-previous-rpc-attempts", Value: strconv.Itoa(callHdr.PreviousAttempts)}) + } + + if callHdr.SendCompress != "" { + headerFields = append(headerFields, hpack.HeaderField{Name: "grpc-encoding", Value: callHdr.SendCompress}) + } + if dl, ok := ctx.Deadline(); ok { + // Send out timeout regardless its value. The server can detect timeout context by itself. + // TODO(mmukhi): Perhaps this field should be updated when actually writing out to the wire. + timeout := time.Until(dl) + headerFields = append(headerFields, hpack.HeaderField{Name: "grpc-timeout", Value: encodeTimeout(timeout)}) + } + for k, v := range authData { + headerFields = append(headerFields, hpack.HeaderField{Name: k, Value: encodeMetadataHeader(k, v)}) + } + for k, v := range callAuthData { + headerFields = append(headerFields, hpack.HeaderField{Name: k, Value: encodeMetadataHeader(k, v)}) + } + if b := stats.OutgoingTags(ctx); b != nil { + headerFields = append(headerFields, hpack.HeaderField{Name: "grpc-tags-bin", Value: encodeBinHeader(b)}) + } + if b := stats.OutgoingTrace(ctx); b != nil { + headerFields = append(headerFields, hpack.HeaderField{Name: "grpc-trace-bin", Value: encodeBinHeader(b)}) + } + + if md, added, ok := metadata.FromOutgoingContextRaw(ctx); ok { + var k string + for _, vv := range added { + for i, v := range vv { + if i%2 == 0 { + k = v + continue + } + // HTTP doesn't allow you to set pseudoheaders after non pseudoheaders were set. + if isReservedHeader(k) { + continue + } + headerFields = append(headerFields, hpack.HeaderField{Name: strings.ToLower(k), Value: encodeMetadataHeader(k, v)}) + } + } + for k, vv := range md { + // HTTP doesn't allow you to set pseudoheaders after non pseudoheaders were set. + if isReservedHeader(k) { + continue + } + for _, v := range vv { + headerFields = append(headerFields, hpack.HeaderField{Name: k, Value: encodeMetadataHeader(k, v)}) + } + } + } + if md, ok := t.md.(*metadata.MD); ok { + for k, vv := range *md { + if isReservedHeader(k) { + continue + } + for _, v := range vv { + headerFields = append(headerFields, hpack.HeaderField{Name: k, Value: encodeMetadataHeader(k, v)}) + } + } + } + return headerFields, nil +} + +func (t *http2Client) createAudience(callHdr *CallHdr) string { + // Create an audience string only if needed. + if len(t.perRPCCreds) == 0 && callHdr.Creds == nil { + return "" + } + // Construct URI required to get auth request metadata. + // Omit port if it is the default one. + host := strings.TrimSuffix(callHdr.Host, ":443") + pos := strings.LastIndex(callHdr.Method, "/") + if pos == -1 { + pos = len(callHdr.Method) + } + return "https://" + host + callHdr.Method[:pos] +} + +func (t *http2Client) getTrAuthData(ctx context.Context, audience string) (map[string]string, error) { + authData := map[string]string{} + for _, c := range t.perRPCCreds { + data, err := c.GetRequestMetadata(ctx, audience) + if err != nil { + if _, ok := status.FromError(err); ok { + return nil, err + } + + return nil, status.Errorf(codes.Unauthenticated, "transport: %v", err) + } + for k, v := range data { + // Capital header names are illegal in HTTP/2. + k = strings.ToLower(k) + authData[k] = v + } + } + return authData, nil +} + +func (t *http2Client) getCallAuthData(ctx context.Context, audience string, callHdr *CallHdr) (map[string]string, error) { + callAuthData := map[string]string{} + // Check if credentials.PerRPCCredentials were provided via call options. + // Note: if these credentials are provided both via dial options and call + // options, then both sets of credentials will be applied. + if callCreds := callHdr.Creds; callCreds != nil { + if !t.isSecure && callCreds.RequireTransportSecurity() { + return nil, status.Error(codes.Unauthenticated, "transport: cannot send secure credentials on an insecure connection") + } + data, err := callCreds.GetRequestMetadata(ctx, audience) + if err != nil { + return nil, status.Errorf(codes.Internal, "transport: %v", err) + } + for k, v := range data { + // Capital header names are illegal in HTTP/2 + k = strings.ToLower(k) + callAuthData[k] = v + } + } + return callAuthData, nil +} + +// NewStream creates a stream and registers it into the transport as "active" +// streams. +func (t *http2Client) NewStream(ctx context.Context, callHdr *CallHdr) (_ *Stream, err error) { + ctx = peer.NewContext(ctx, t.getPeer()) + headerFields, err := t.createHeaderFields(ctx, callHdr) + if err != nil { + return nil, err + } + s := t.newStream(ctx, callHdr) + cleanup := func(err error) { + if s.swapState(streamDone) == streamDone { + // If it was already done, return. + return + } + // The stream was unprocessed by the server. + atomic.StoreUint32(&s.unprocessed, 1) + s.write(recvMsg{err: err}) + close(s.done) + // If headerChan isn't closed, then close it. + if atomic.SwapUint32(&s.headerDone, 1) == 0 { + close(s.headerChan) + } + + } + hdr := &headerFrame{ + hf: headerFields, + endStream: false, + initStream: func(id uint32) (bool, error) { + t.mu.Lock() + if state := t.state; state != reachable { + t.mu.Unlock() + // Do a quick cleanup. + err := error(errStreamDrain) + if state == closing { + err = ErrConnClosing + } + cleanup(err) + return false, err + } + t.activeStreams[id] = s + if channelz.IsOn() { + atomic.AddInt64(&t.czData.streamsStarted, 1) + atomic.StoreInt64(&t.czData.lastStreamCreatedTime, time.Now().UnixNano()) + } + var sendPing bool + // If the number of active streams change from 0 to 1, then check if keepalive + // has gone dormant. If so, wake it up. + if len(t.activeStreams) == 1 && t.keepaliveEnabled { + select { + case t.awakenKeepalive <- struct{}{}: + sendPing = true + // Fill the awakenKeepalive channel again as this channel must be + // kept non-writable except at the point that the keepalive() + // goroutine is waiting either to be awaken or shutdown. + t.awakenKeepalive <- struct{}{} + default: + } + } + t.mu.Unlock() + return sendPing, nil + }, + onOrphaned: cleanup, + wq: s.wq, + } + firstTry := true + var ch chan struct{} + checkForStreamQuota := func(it interface{}) bool { + if t.streamQuota <= 0 { // Can go negative if server decreases it. + if firstTry { + t.waitingStreams++ + } + ch = t.streamsQuotaAvailable + return false + } + if !firstTry { + t.waitingStreams-- + } + t.streamQuota-- + h := it.(*headerFrame) + h.streamID = t.nextID + t.nextID += 2 + s.id = h.streamID + s.fc = &inFlow{limit: uint32(t.initialWindowSize)} + if t.streamQuota > 0 && t.waitingStreams > 0 { + select { + case t.streamsQuotaAvailable <- struct{}{}: + default: + } + } + return true + } + var hdrListSizeErr error + checkForHeaderListSize := func(it interface{}) bool { + if t.maxSendHeaderListSize == nil { + return true + } + hdrFrame := it.(*headerFrame) + var sz int64 + for _, f := range hdrFrame.hf { + if sz += int64(f.Size()); sz > int64(*t.maxSendHeaderListSize) { + hdrListSizeErr = status.Errorf(codes.Internal, "header list size to send violates the maximum size (%d bytes) set by server", *t.maxSendHeaderListSize) + return false + } + } + return true + } + for { + success, err := t.controlBuf.executeAndPut(func(it interface{}) bool { + if !checkForStreamQuota(it) { + return false + } + if !checkForHeaderListSize(it) { + return false + } + return true + }, hdr) + if err != nil { + return nil, err + } + if success { + break + } + if hdrListSizeErr != nil { + return nil, hdrListSizeErr + } + firstTry = false + select { + case <-ch: + case <-s.ctx.Done(): + return nil, ContextErr(s.ctx.Err()) + case <-t.goAway: + return nil, errStreamDrain + case <-t.ctx.Done(): + return nil, ErrConnClosing + } + } + if t.statsHandler != nil { + outHeader := &stats.OutHeader{ + Client: true, + FullMethod: callHdr.Method, + RemoteAddr: t.remoteAddr, + LocalAddr: t.localAddr, + Compression: callHdr.SendCompress, + } + t.statsHandler.HandleRPC(s.ctx, outHeader) + } + return s, nil +} + +// CloseStream clears the footprint of a stream when the stream is not needed any more. +// This must not be executed in reader's goroutine. +func (t *http2Client) CloseStream(s *Stream, err error) { + var ( + rst bool + rstCode http2.ErrCode + ) + if err != nil { + rst = true + rstCode = http2.ErrCodeCancel + } + t.closeStream(s, err, rst, rstCode, status.Convert(err), nil, false) +} + +func (t *http2Client) closeStream(s *Stream, err error, rst bool, rstCode http2.ErrCode, st *status.Status, mdata map[string][]string, eosReceived bool) { + // Set stream status to done. + if s.swapState(streamDone) == streamDone { + // If it was already done, return. If multiple closeStream calls + // happen simultaneously, wait for the first to finish. + <-s.done + return + } + // status and trailers can be updated here without any synchronization because the stream goroutine will + // only read it after it sees an io.EOF error from read or write and we'll write those errors + // only after updating this. + s.status = st + if len(mdata) > 0 { + s.trailer = mdata + } + if err != nil { + // This will unblock reads eventually. + s.write(recvMsg{err: err}) + } + // If headerChan isn't closed, then close it. + if atomic.SwapUint32(&s.headerDone, 1) == 0 { + s.noHeaders = true + close(s.headerChan) + } + cleanup := &cleanupStream{ + streamID: s.id, + onWrite: func() { + t.mu.Lock() + if t.activeStreams != nil { + delete(t.activeStreams, s.id) + } + t.mu.Unlock() + if channelz.IsOn() { + if eosReceived { + atomic.AddInt64(&t.czData.streamsSucceeded, 1) + } else { + atomic.AddInt64(&t.czData.streamsFailed, 1) + } + } + }, + rst: rst, + rstCode: rstCode, + } + addBackStreamQuota := func(interface{}) bool { + t.streamQuota++ + if t.streamQuota > 0 && t.waitingStreams > 0 { + select { + case t.streamsQuotaAvailable <- struct{}{}: + default: + } + } + return true + } + t.controlBuf.executeAndPut(addBackStreamQuota, cleanup) + // This will unblock write. + close(s.done) +} + +// Close kicks off the shutdown process of the transport. This should be called +// only once on a transport. Once it is called, the transport should not be +// accessed any more. +// +// This method blocks until the addrConn that initiated this transport is +// re-connected. This happens because t.onClose() begins reconnect logic at the +// addrConn level and blocks until the addrConn is successfully connected. +func (t *http2Client) Close() error { + t.mu.Lock() + // Make sure we only Close once. + if t.state == closing { + t.mu.Unlock() + return nil + } + t.state = closing + streams := t.activeStreams + t.activeStreams = nil + t.mu.Unlock() + t.controlBuf.finish() + t.cancel() + err := t.conn.Close() + if channelz.IsOn() { + channelz.RemoveEntry(t.channelzID) + } + // Notify all active streams. + for _, s := range streams { + t.closeStream(s, ErrConnClosing, false, http2.ErrCodeNo, status.New(codes.Unavailable, ErrConnClosing.Desc), nil, false) + } + if t.statsHandler != nil { + connEnd := &stats.ConnEnd{ + Client: true, + } + t.statsHandler.HandleConn(t.ctx, connEnd) + } + t.onClose() + return err +} + +// GracefulClose sets the state to draining, which prevents new streams from +// being created and causes the transport to be closed when the last active +// stream is closed. If there are no active streams, the transport is closed +// immediately. This does nothing if the transport is already draining or +// closing. +func (t *http2Client) GracefulClose() error { + t.mu.Lock() + // Make sure we move to draining only from active. + if t.state == draining || t.state == closing { + t.mu.Unlock() + return nil + } + t.state = draining + active := len(t.activeStreams) + t.mu.Unlock() + if active == 0 { + return t.Close() + } + t.controlBuf.put(&incomingGoAway{}) + return nil +} + +// Write formats the data into HTTP2 data frame(s) and sends it out. The caller +// should proceed only if Write returns nil. +func (t *http2Client) Write(s *Stream, hdr []byte, data []byte, opts *Options) error { + if opts.Last { + // If it's the last message, update stream state. + if !s.compareAndSwapState(streamActive, streamWriteDone) { + return errStreamDone + } + } else if s.getState() != streamActive { + return errStreamDone + } + df := &dataFrame{ + streamID: s.id, + endStream: opts.Last, + } + if hdr != nil || data != nil { // If it's not an empty data frame. + // Add some data to grpc message header so that we can equally + // distribute bytes across frames. + emptyLen := http2MaxFrameLen - len(hdr) + if emptyLen > len(data) { + emptyLen = len(data) + } + hdr = append(hdr, data[:emptyLen]...) + data = data[emptyLen:] + df.h, df.d = hdr, data + // TODO(mmukhi): The above logic in this if can be moved to loopyWriter's data handler. + if err := s.wq.get(int32(len(hdr) + len(data))); err != nil { + return err + } + } + return t.controlBuf.put(df) +} + +func (t *http2Client) getStream(f http2.Frame) (*Stream, bool) { + t.mu.Lock() + defer t.mu.Unlock() + s, ok := t.activeStreams[f.Header().StreamID] + return s, ok +} + +// adjustWindow sends out extra window update over the initial window size +// of stream if the application is requesting data larger in size than +// the window. +func (t *http2Client) adjustWindow(s *Stream, n uint32) { + if w := s.fc.maybeAdjust(n); w > 0 { + t.controlBuf.put(&outgoingWindowUpdate{streamID: s.id, increment: w}) + } +} + +// updateWindow adjusts the inbound quota for the stream. +// Window updates will be sent out when the cumulative quota +// exceeds the corresponding threshold. +func (t *http2Client) updateWindow(s *Stream, n uint32) { + if w := s.fc.onRead(n); w > 0 { + t.controlBuf.put(&outgoingWindowUpdate{streamID: s.id, increment: w}) + } +} + +// updateFlowControl updates the incoming flow control windows +// for the transport and the stream based on the current bdp +// estimation. +func (t *http2Client) updateFlowControl(n uint32) { + t.mu.Lock() + for _, s := range t.activeStreams { + s.fc.newLimit(n) + } + t.mu.Unlock() + updateIWS := func(interface{}) bool { + t.initialWindowSize = int32(n) + return true + } + t.controlBuf.executeAndPut(updateIWS, &outgoingWindowUpdate{streamID: 0, increment: t.fc.newLimit(n)}) + t.controlBuf.put(&outgoingSettings{ + ss: []http2.Setting{ + { + ID: http2.SettingInitialWindowSize, + Val: n, + }, + }, + }) +} + +func (t *http2Client) handleData(f *http2.DataFrame) { + size := f.Header().Length + var sendBDPPing bool + if t.bdpEst != nil { + sendBDPPing = t.bdpEst.add(size) + } + // Decouple connection's flow control from application's read. + // An update on connection's flow control should not depend on + // whether user application has read the data or not. Such a + // restriction is already imposed on the stream's flow control, + // and therefore the sender will be blocked anyways. + // Decoupling the connection flow control will prevent other + // active(fast) streams from starving in presence of slow or + // inactive streams. + // + if w := t.fc.onData(size); w > 0 { + t.controlBuf.put(&outgoingWindowUpdate{ + streamID: 0, + increment: w, + }) + } + if sendBDPPing { + // Avoid excessive ping detection (e.g. in an L7 proxy) + // by sending a window update prior to the BDP ping. + + if w := t.fc.reset(); w > 0 { + t.controlBuf.put(&outgoingWindowUpdate{ + streamID: 0, + increment: w, + }) + } + + t.controlBuf.put(bdpPing) + } + // Select the right stream to dispatch. + s, ok := t.getStream(f) + if !ok { + return + } + if size > 0 { + if err := s.fc.onData(size); err != nil { + t.closeStream(s, io.EOF, true, http2.ErrCodeFlowControl, status.New(codes.Internal, err.Error()), nil, false) + return + } + if f.Header().Flags.Has(http2.FlagDataPadded) { + if w := s.fc.onRead(size - uint32(len(f.Data()))); w > 0 { + t.controlBuf.put(&outgoingWindowUpdate{s.id, w}) + } + } + // TODO(bradfitz, zhaoq): A copy is required here because there is no + // guarantee f.Data() is consumed before the arrival of next frame. + // Can this copy be eliminated? + if len(f.Data()) > 0 { + data := make([]byte, len(f.Data())) + copy(data, f.Data()) + s.write(recvMsg{data: data}) + } + } + // The server has closed the stream without sending trailers. Record that + // the read direction is closed, and set the status appropriately. + if f.FrameHeader.Flags.Has(http2.FlagDataEndStream) { + t.closeStream(s, io.EOF, false, http2.ErrCodeNo, status.New(codes.Internal, "server closed the stream without sending trailers"), nil, true) + } +} + +func (t *http2Client) handleRSTStream(f *http2.RSTStreamFrame) { + s, ok := t.getStream(f) + if !ok { + return + } + if f.ErrCode == http2.ErrCodeRefusedStream { + // The stream was unprocessed by the server. + atomic.StoreUint32(&s.unprocessed, 1) + } + statusCode, ok := http2ErrConvTab[f.ErrCode] + if !ok { + warningf("transport: http2Client.handleRSTStream found no mapped gRPC status for the received http2 error %v", f.ErrCode) + statusCode = codes.Unknown + } + if statusCode == codes.Canceled { + // Our deadline was already exceeded, and that was likely the cause of + // this cancelation. Alter the status code accordingly. + if d, ok := s.ctx.Deadline(); ok && d.After(time.Now()) { + statusCode = codes.DeadlineExceeded + } + } + t.closeStream(s, io.EOF, false, http2.ErrCodeNo, status.Newf(statusCode, "stream terminated by RST_STREAM with error code: %v", f.ErrCode), nil, false) +} + +func (t *http2Client) handleSettings(f *http2.SettingsFrame, isFirst bool) { + if f.IsAck() { + return + } + var maxStreams *uint32 + var ss []http2.Setting + var updateFuncs []func() + f.ForeachSetting(func(s http2.Setting) error { + switch s.ID { + case http2.SettingMaxConcurrentStreams: + maxStreams = new(uint32) + *maxStreams = s.Val + case http2.SettingMaxHeaderListSize: + updateFuncs = append(updateFuncs, func() { + t.maxSendHeaderListSize = new(uint32) + *t.maxSendHeaderListSize = s.Val + }) + default: + ss = append(ss, s) + } + return nil + }) + if isFirst && maxStreams == nil { + maxStreams = new(uint32) + *maxStreams = math.MaxUint32 + } + sf := &incomingSettings{ + ss: ss, + } + if maxStreams != nil { + updateStreamQuota := func() { + delta := int64(*maxStreams) - int64(t.maxConcurrentStreams) + t.maxConcurrentStreams = *maxStreams + t.streamQuota += delta + if delta > 0 && t.waitingStreams > 0 { + close(t.streamsQuotaAvailable) // wake all of them up. + t.streamsQuotaAvailable = make(chan struct{}, 1) + } + } + updateFuncs = append(updateFuncs, updateStreamQuota) + } + t.controlBuf.executeAndPut(func(interface{}) bool { + for _, f := range updateFuncs { + f() + } + return true + }, sf) +} + +func (t *http2Client) handlePing(f *http2.PingFrame) { + if f.IsAck() { + // Maybe it's a BDP ping. + if t.bdpEst != nil { + t.bdpEst.calculate(f.Data) + } + return + } + pingAck := &ping{ack: true} + copy(pingAck.data[:], f.Data[:]) + t.controlBuf.put(pingAck) +} + +func (t *http2Client) handleGoAway(f *http2.GoAwayFrame) { + t.mu.Lock() + if t.state == closing { + t.mu.Unlock() + return + } + if f.ErrCode == http2.ErrCodeEnhanceYourCalm { + infof("Client received GoAway with http2.ErrCodeEnhanceYourCalm.") + } + id := f.LastStreamID + if id > 0 && id%2 != 1 { + t.mu.Unlock() + t.Close() + return + } + // A client can receive multiple GoAways from the server (see + // https://github.com/grpc/grpc-go/issues/1387). The idea is that the first + // GoAway will be sent with an ID of MaxInt32 and the second GoAway will be + // sent after an RTT delay with the ID of the last stream the server will + // process. + // + // Therefore, when we get the first GoAway we don't necessarily close any + // streams. While in case of second GoAway we close all streams created after + // the GoAwayId. This way streams that were in-flight while the GoAway from + // server was being sent don't get killed. + select { + case <-t.goAway: // t.goAway has been closed (i.e.,multiple GoAways). + // If there are multiple GoAways the first one should always have an ID greater than the following ones. + if id > t.prevGoAwayID { + t.mu.Unlock() + t.Close() + return + } + default: + t.setGoAwayReason(f) + close(t.goAway) + t.state = draining + t.controlBuf.put(&incomingGoAway{}) + + // This has to be a new goroutine because we're still using the current goroutine to read in the transport. + t.onGoAway(t.goAwayReason) + } + // All streams with IDs greater than the GoAwayId + // and smaller than the previous GoAway ID should be killed. + upperLimit := t.prevGoAwayID + if upperLimit == 0 { // This is the first GoAway Frame. + upperLimit = math.MaxUint32 // Kill all streams after the GoAway ID. + } + for streamID, stream := range t.activeStreams { + if streamID > id && streamID <= upperLimit { + // The stream was unprocessed by the server. + atomic.StoreUint32(&stream.unprocessed, 1) + t.closeStream(stream, errStreamDrain, false, http2.ErrCodeNo, statusGoAway, nil, false) + } + } + t.prevGoAwayID = id + active := len(t.activeStreams) + t.mu.Unlock() + if active == 0 { + t.Close() + } +} + +// setGoAwayReason sets the value of t.goAwayReason based +// on the GoAway frame received. +// It expects a lock on transport's mutext to be held by +// the caller. +func (t *http2Client) setGoAwayReason(f *http2.GoAwayFrame) { + t.goAwayReason = GoAwayNoReason + switch f.ErrCode { + case http2.ErrCodeEnhanceYourCalm: + if string(f.DebugData()) == "too_many_pings" { + t.goAwayReason = GoAwayTooManyPings + } + } +} + +func (t *http2Client) GetGoAwayReason() GoAwayReason { + t.mu.Lock() + defer t.mu.Unlock() + return t.goAwayReason +} + +func (t *http2Client) handleWindowUpdate(f *http2.WindowUpdateFrame) { + t.controlBuf.put(&incomingWindowUpdate{ + streamID: f.Header().StreamID, + increment: f.Increment, + }) +} + +// operateHeaders takes action on the decoded headers. +func (t *http2Client) operateHeaders(frame *http2.MetaHeadersFrame) { + s, ok := t.getStream(frame) + if !ok { + return + } + endStream := frame.StreamEnded() + atomic.StoreUint32(&s.bytesReceived, 1) + initialHeader := atomic.SwapUint32(&s.headerDone, 1) == 0 + + if !initialHeader && !endStream { + // As specified by RFC 7540, a HEADERS frame (and associated CONTINUATION frames) can only appear + // at the start or end of a stream. Therefore, second HEADERS frame must have EOS bit set. + st := status.New(codes.Internal, "a HEADERS frame cannot appear in the middle of a stream") + t.closeStream(s, st.Err(), true, http2.ErrCodeProtocol, st, nil, false) + return + } + + state := &decodeState{} + // Initialize isGRPC value to be !initialHeader, since if a gRPC ResponseHeader has been received + // which indicates peer speaking gRPC, we are in gRPC mode. + state.data.isGRPC = !initialHeader + if err := state.decodeHeader(frame); err != nil { + t.closeStream(s, err, true, http2.ErrCodeProtocol, status.Convert(err), nil, endStream) + return + } + + var isHeader bool + defer func() { + if t.statsHandler != nil { + if isHeader { + inHeader := &stats.InHeader{ + Client: true, + WireLength: int(frame.Header().Length), + } + t.statsHandler.HandleRPC(s.ctx, inHeader) + } else { + inTrailer := &stats.InTrailer{ + Client: true, + WireLength: int(frame.Header().Length), + } + t.statsHandler.HandleRPC(s.ctx, inTrailer) + } + } + }() + + // If headers haven't been received yet. + if initialHeader { + if !endStream { + // Headers frame is ResponseHeader. + isHeader = true + // These values can be set without any synchronization because + // stream goroutine will read it only after seeing a closed + // headerChan which we'll close after setting this. + s.recvCompress = state.data.encoding + if len(state.data.mdata) > 0 { + s.header = state.data.mdata + } + close(s.headerChan) + return + } + // Headers frame is Trailers-only. + s.noHeaders = true + close(s.headerChan) + } + + // if client received END_STREAM from server while stream was still active, send RST_STREAM + rst := s.getState() == streamActive + t.closeStream(s, io.EOF, rst, http2.ErrCodeNo, state.status(), state.data.mdata, true) +} + +// reader runs as a separate goroutine in charge of reading data from network +// connection. +// +// TODO(zhaoq): currently one reader per transport. Investigate whether this is +// optimal. +// TODO(zhaoq): Check the validity of the incoming frame sequence. +func (t *http2Client) reader() { + defer close(t.readerDone) + // Check the validity of server preface. + frame, err := t.framer.fr.ReadFrame() + if err != nil { + t.Close() // this kicks off resetTransport, so must be last before return + return + } + t.conn.SetReadDeadline(time.Time{}) // reset deadline once we get the settings frame (we didn't time out, yay!) + if t.keepaliveEnabled { + atomic.CompareAndSwapUint32(&t.activity, 0, 1) + } + sf, ok := frame.(*http2.SettingsFrame) + if !ok { + t.Close() // this kicks off resetTransport, so must be last before return + return + } + t.onPrefaceReceipt() + t.handleSettings(sf, true) + + // loop to keep reading incoming messages on this transport. + for { + frame, err := t.framer.fr.ReadFrame() + if t.keepaliveEnabled { + atomic.CompareAndSwapUint32(&t.activity, 0, 1) + } + if err != nil { + // Abort an active stream if the http2.Framer returns a + // http2.StreamError. This can happen only if the server's response + // is malformed http2. + if se, ok := err.(http2.StreamError); ok { + t.mu.Lock() + s := t.activeStreams[se.StreamID] + t.mu.Unlock() + if s != nil { + // use error detail to provide better err message + code := http2ErrConvTab[se.Code] + msg := t.framer.fr.ErrorDetail().Error() + t.closeStream(s, status.Error(code, msg), true, http2.ErrCodeProtocol, status.New(code, msg), nil, false) + } + continue + } else { + // Transport error. + t.Close() + return + } + } + switch frame := frame.(type) { + case *http2.MetaHeadersFrame: + t.operateHeaders(frame) + case *http2.DataFrame: + t.handleData(frame) + case *http2.RSTStreamFrame: + t.handleRSTStream(frame) + case *http2.SettingsFrame: + t.handleSettings(frame, false) + case *http2.PingFrame: + t.handlePing(frame) + case *http2.GoAwayFrame: + t.handleGoAway(frame) + case *http2.WindowUpdateFrame: + t.handleWindowUpdate(frame) + default: + errorf("transport: http2Client.reader got unhandled frame type %v.", frame) + } + } +} + +// keepalive running in a separate goroutune makes sure the connection is alive by sending pings. +func (t *http2Client) keepalive() { + p := &ping{data: [8]byte{}} + timer := time.NewTimer(t.kp.Time) + for { + select { + case <-timer.C: + if atomic.CompareAndSwapUint32(&t.activity, 1, 0) { + timer.Reset(t.kp.Time) + continue + } + // Check if keepalive should go dormant. + t.mu.Lock() + if len(t.activeStreams) < 1 && !t.kp.PermitWithoutStream { + // Make awakenKeepalive writable. + <-t.awakenKeepalive + t.mu.Unlock() + select { + case <-t.awakenKeepalive: + // If the control gets here a ping has been sent + // need to reset the timer with keepalive.Timeout. + case <-t.ctx.Done(): + return + } + } else { + t.mu.Unlock() + if channelz.IsOn() { + atomic.AddInt64(&t.czData.kpCount, 1) + } + // Send ping. + t.controlBuf.put(p) + } + + // By the time control gets here a ping has been sent one way or the other. + timer.Reset(t.kp.Timeout) + select { + case <-timer.C: + if atomic.CompareAndSwapUint32(&t.activity, 1, 0) { + timer.Reset(t.kp.Time) + continue + } + t.Close() + return + case <-t.ctx.Done(): + if !timer.Stop() { + <-timer.C + } + return + } + case <-t.ctx.Done(): + if !timer.Stop() { + <-timer.C + } + return + } + } +} + +func (t *http2Client) Error() <-chan struct{} { + return t.ctx.Done() +} + +func (t *http2Client) GoAway() <-chan struct{} { + return t.goAway +} + +func (t *http2Client) ChannelzMetric() *channelz.SocketInternalMetric { + s := channelz.SocketInternalMetric{ + StreamsStarted: atomic.LoadInt64(&t.czData.streamsStarted), + StreamsSucceeded: atomic.LoadInt64(&t.czData.streamsSucceeded), + StreamsFailed: atomic.LoadInt64(&t.czData.streamsFailed), + MessagesSent: atomic.LoadInt64(&t.czData.msgSent), + MessagesReceived: atomic.LoadInt64(&t.czData.msgRecv), + KeepAlivesSent: atomic.LoadInt64(&t.czData.kpCount), + LastLocalStreamCreatedTimestamp: time.Unix(0, atomic.LoadInt64(&t.czData.lastStreamCreatedTime)), + LastMessageSentTimestamp: time.Unix(0, atomic.LoadInt64(&t.czData.lastMsgSentTime)), + LastMessageReceivedTimestamp: time.Unix(0, atomic.LoadInt64(&t.czData.lastMsgRecvTime)), + LocalFlowControlWindow: int64(t.fc.getSize()), + SocketOptions: channelz.GetSocketOption(t.conn), + LocalAddr: t.localAddr, + RemoteAddr: t.remoteAddr, + // RemoteName : + } + if au, ok := t.authInfo.(credentials.ChannelzSecurityInfo); ok { + s.Security = au.GetSecurityValue() + } + s.RemoteFlowControlWindow = t.getOutFlowWindow() + return &s +} + +func (t *http2Client) RemoteAddr() net.Addr { return t.remoteAddr } + +func (t *http2Client) IncrMsgSent() { + atomic.AddInt64(&t.czData.msgSent, 1) + atomic.StoreInt64(&t.czData.lastMsgSentTime, time.Now().UnixNano()) +} + +func (t *http2Client) IncrMsgRecv() { + atomic.AddInt64(&t.czData.msgRecv, 1) + atomic.StoreInt64(&t.czData.lastMsgRecvTime, time.Now().UnixNano()) +} + +func (t *http2Client) getOutFlowWindow() int64 { + resp := make(chan uint32, 1) + timer := time.NewTimer(time.Second) + defer timer.Stop() + t.controlBuf.put(&outFlowControlSizeRequest{resp}) + select { + case sz := <-resp: + return int64(sz) + case <-t.ctxDone: + return -1 + case <-timer.C: + return -2 + } +} diff --git a/vendor/google.golang.org/grpc/internal/transport/http2_server.go b/vendor/google.golang.org/grpc/internal/transport/http2_server.go new file mode 100644 index 0000000..435092e --- /dev/null +++ b/vendor/google.golang.org/grpc/internal/transport/http2_server.go @@ -0,0 +1,1214 @@ +/* + * + * Copyright 2014 gRPC authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ + +package transport + +import ( + "bytes" + "context" + "errors" + "fmt" + "io" + "math" + "net" + "strconv" + "sync" + "sync/atomic" + "time" + + "github.com/golang/protobuf/proto" + "golang.org/x/net/http2" + "golang.org/x/net/http2/hpack" + + "google.golang.org/grpc/codes" + "google.golang.org/grpc/credentials" + "google.golang.org/grpc/grpclog" + "google.golang.org/grpc/internal/channelz" + "google.golang.org/grpc/internal/grpcrand" + "google.golang.org/grpc/keepalive" + "google.golang.org/grpc/metadata" + "google.golang.org/grpc/peer" + "google.golang.org/grpc/stats" + "google.golang.org/grpc/status" + "google.golang.org/grpc/tap" +) + +var ( + // ErrIllegalHeaderWrite indicates that setting header is illegal because of + // the stream's state. + ErrIllegalHeaderWrite = errors.New("transport: the stream is done or WriteHeader was already called") + // ErrHeaderListSizeLimitViolation indicates that the header list size is larger + // than the limit set by peer. + ErrHeaderListSizeLimitViolation = errors.New("transport: trying to send header list size larger than the limit set by peer") +) + +// http2Server implements the ServerTransport interface with HTTP2. +type http2Server struct { + ctx context.Context + ctxDone <-chan struct{} // Cache the context.Done() chan + cancel context.CancelFunc + conn net.Conn + loopy *loopyWriter + readerDone chan struct{} // sync point to enable testing. + writerDone chan struct{} // sync point to enable testing. + remoteAddr net.Addr + localAddr net.Addr + maxStreamID uint32 // max stream ID ever seen + authInfo credentials.AuthInfo // auth info about the connection + inTapHandle tap.ServerInHandle + framer *framer + // The max number of concurrent streams. + maxStreams uint32 + // controlBuf delivers all the control related tasks (e.g., window + // updates, reset streams, and various settings) to the controller. + controlBuf *controlBuffer + fc *trInFlow + stats stats.Handler + // Flag to keep track of reading activity on transport. + // 1 is true and 0 is false. + activity uint32 // Accessed atomically. + // Keepalive and max-age parameters for the server. + kp keepalive.ServerParameters + + // Keepalive enforcement policy. + kep keepalive.EnforcementPolicy + // The time instance last ping was received. + lastPingAt time.Time + // Number of times the client has violated keepalive ping policy so far. + pingStrikes uint8 + // Flag to signify that number of ping strikes should be reset to 0. + // This is set whenever data or header frames are sent. + // 1 means yes. + resetPingStrikes uint32 // Accessed atomically. + initialWindowSize int32 + bdpEst *bdpEstimator + maxSendHeaderListSize *uint32 + + mu sync.Mutex // guard the following + + // drainChan is initialized when drain(...) is called the first time. + // After which the server writes out the first GoAway(with ID 2^31-1) frame. + // Then an independent goroutine will be launched to later send the second GoAway. + // During this time we don't want to write another first GoAway(with ID 2^31 -1) frame. + // Thus call to drain(...) will be a no-op if drainChan is already initialized since draining is + // already underway. + drainChan chan struct{} + state transportState + activeStreams map[uint32]*Stream + // idle is the time instant when the connection went idle. + // This is either the beginning of the connection or when the number of + // RPCs go down to 0. + // When the connection is busy, this value is set to 0. + idle time.Time + + // Fields below are for channelz metric collection. + channelzID int64 // channelz unique identification number + czData *channelzData +} + +// newHTTP2Server constructs a ServerTransport based on HTTP2. ConnectionError is +// returned if something goes wrong. +func newHTTP2Server(conn net.Conn, config *ServerConfig) (_ ServerTransport, err error) { + writeBufSize := config.WriteBufferSize + readBufSize := config.ReadBufferSize + maxHeaderListSize := defaultServerMaxHeaderListSize + if config.MaxHeaderListSize != nil { + maxHeaderListSize = *config.MaxHeaderListSize + } + framer := newFramer(conn, writeBufSize, readBufSize, maxHeaderListSize) + // Send initial settings as connection preface to client. + var isettings []http2.Setting + // TODO(zhaoq): Have a better way to signal "no limit" because 0 is + // permitted in the HTTP2 spec. + maxStreams := config.MaxStreams + if maxStreams == 0 { + maxStreams = math.MaxUint32 + } else { + isettings = append(isettings, http2.Setting{ + ID: http2.SettingMaxConcurrentStreams, + Val: maxStreams, + }) + } + dynamicWindow := true + iwz := int32(initialWindowSize) + if config.InitialWindowSize >= defaultWindowSize { + iwz = config.InitialWindowSize + dynamicWindow = false + } + icwz := int32(initialWindowSize) + if config.InitialConnWindowSize >= defaultWindowSize { + icwz = config.InitialConnWindowSize + dynamicWindow = false + } + if iwz != defaultWindowSize { + isettings = append(isettings, http2.Setting{ + ID: http2.SettingInitialWindowSize, + Val: uint32(iwz)}) + } + if config.MaxHeaderListSize != nil { + isettings = append(isettings, http2.Setting{ + ID: http2.SettingMaxHeaderListSize, + Val: *config.MaxHeaderListSize, + }) + } + if err := framer.fr.WriteSettings(isettings...); err != nil { + return nil, connectionErrorf(false, err, "transport: %v", err) + } + // Adjust the connection flow control window if needed. + if delta := uint32(icwz - defaultWindowSize); delta > 0 { + if err := framer.fr.WriteWindowUpdate(0, delta); err != nil { + return nil, connectionErrorf(false, err, "transport: %v", err) + } + } + kp := config.KeepaliveParams + if kp.MaxConnectionIdle == 0 { + kp.MaxConnectionIdle = defaultMaxConnectionIdle + } + if kp.MaxConnectionAge == 0 { + kp.MaxConnectionAge = defaultMaxConnectionAge + } + // Add a jitter to MaxConnectionAge. + kp.MaxConnectionAge += getJitter(kp.MaxConnectionAge) + if kp.MaxConnectionAgeGrace == 0 { + kp.MaxConnectionAgeGrace = defaultMaxConnectionAgeGrace + } + if kp.Time == 0 { + kp.Time = defaultServerKeepaliveTime + } + if kp.Timeout == 0 { + kp.Timeout = defaultServerKeepaliveTimeout + } + kep := config.KeepalivePolicy + if kep.MinTime == 0 { + kep.MinTime = defaultKeepalivePolicyMinTime + } + ctx, cancel := context.WithCancel(context.Background()) + t := &http2Server{ + ctx: ctx, + cancel: cancel, + ctxDone: ctx.Done(), + conn: conn, + remoteAddr: conn.RemoteAddr(), + localAddr: conn.LocalAddr(), + authInfo: config.AuthInfo, + framer: framer, + readerDone: make(chan struct{}), + writerDone: make(chan struct{}), + maxStreams: maxStreams, + inTapHandle: config.InTapHandle, + fc: &trInFlow{limit: uint32(icwz)}, + state: reachable, + activeStreams: make(map[uint32]*Stream), + stats: config.StatsHandler, + kp: kp, + idle: time.Now(), + kep: kep, + initialWindowSize: iwz, + czData: new(channelzData), + } + t.controlBuf = newControlBuffer(t.ctxDone) + if dynamicWindow { + t.bdpEst = &bdpEstimator{ + bdp: initialWindowSize, + updateFlowControl: t.updateFlowControl, + } + } + if t.stats != nil { + t.ctx = t.stats.TagConn(t.ctx, &stats.ConnTagInfo{ + RemoteAddr: t.remoteAddr, + LocalAddr: t.localAddr, + }) + connBegin := &stats.ConnBegin{} + t.stats.HandleConn(t.ctx, connBegin) + } + if channelz.IsOn() { + t.channelzID = channelz.RegisterNormalSocket(t, config.ChannelzParentID, fmt.Sprintf("%s -> %s", t.remoteAddr, t.localAddr)) + } + t.framer.writer.Flush() + + defer func() { + if err != nil { + t.Close() + } + }() + + // Check the validity of client preface. + preface := make([]byte, len(clientPreface)) + if _, err := io.ReadFull(t.conn, preface); err != nil { + return nil, connectionErrorf(false, err, "transport: http2Server.HandleStreams failed to receive the preface from client: %v", err) + } + if !bytes.Equal(preface, clientPreface) { + return nil, connectionErrorf(false, nil, "transport: http2Server.HandleStreams received bogus greeting from client: %q", preface) + } + + frame, err := t.framer.fr.ReadFrame() + if err == io.EOF || err == io.ErrUnexpectedEOF { + return nil, err + } + if err != nil { + return nil, connectionErrorf(false, err, "transport: http2Server.HandleStreams failed to read initial settings frame: %v", err) + } + atomic.StoreUint32(&t.activity, 1) + sf, ok := frame.(*http2.SettingsFrame) + if !ok { + return nil, connectionErrorf(false, nil, "transport: http2Server.HandleStreams saw invalid preface type %T from client", frame) + } + t.handleSettings(sf) + + go func() { + t.loopy = newLoopyWriter(serverSide, t.framer, t.controlBuf, t.bdpEst) + t.loopy.ssGoAwayHandler = t.outgoingGoAwayHandler + if err := t.loopy.run(); err != nil { + errorf("transport: loopyWriter.run returning. Err: %v", err) + } + t.conn.Close() + close(t.writerDone) + }() + go t.keepalive() + return t, nil +} + +// operateHeader takes action on the decoded headers. +func (t *http2Server) operateHeaders(frame *http2.MetaHeadersFrame, handle func(*Stream), traceCtx func(context.Context, string) context.Context) (fatal bool) { + streamID := frame.Header().StreamID + state := &decodeState{ + serverSide: true, + } + if err := state.decodeHeader(frame); err != nil { + if se, ok := status.FromError(err); ok { + t.controlBuf.put(&cleanupStream{ + streamID: streamID, + rst: true, + rstCode: statusCodeConvTab[se.Code()], + onWrite: func() {}, + }) + } + return false + } + + buf := newRecvBuffer() + s := &Stream{ + id: streamID, + st: t, + buf: buf, + fc: &inFlow{limit: uint32(t.initialWindowSize)}, + recvCompress: state.data.encoding, + method: state.data.method, + contentSubtype: state.data.contentSubtype, + } + if frame.StreamEnded() { + // s is just created by the caller. No lock needed. + s.state = streamReadDone + } + if state.data.timeoutSet { + s.ctx, s.cancel = context.WithTimeout(t.ctx, state.data.timeout) + } else { + s.ctx, s.cancel = context.WithCancel(t.ctx) + } + pr := &peer.Peer{ + Addr: t.remoteAddr, + } + // Attach Auth info if there is any. + if t.authInfo != nil { + pr.AuthInfo = t.authInfo + } + s.ctx = peer.NewContext(s.ctx, pr) + // Attach the received metadata to the context. + if len(state.data.mdata) > 0 { + s.ctx = metadata.NewIncomingContext(s.ctx, state.data.mdata) + } + if state.data.statsTags != nil { + s.ctx = stats.SetIncomingTags(s.ctx, state.data.statsTags) + } + if state.data.statsTrace != nil { + s.ctx = stats.SetIncomingTrace(s.ctx, state.data.statsTrace) + } + if t.inTapHandle != nil { + var err error + info := &tap.Info{ + FullMethodName: state.data.method, + } + s.ctx, err = t.inTapHandle(s.ctx, info) + if err != nil { + warningf("transport: http2Server.operateHeaders got an error from InTapHandle: %v", err) + t.controlBuf.put(&cleanupStream{ + streamID: s.id, + rst: true, + rstCode: http2.ErrCodeRefusedStream, + onWrite: func() {}, + }) + return false + } + } + t.mu.Lock() + if t.state != reachable { + t.mu.Unlock() + return false + } + if uint32(len(t.activeStreams)) >= t.maxStreams { + t.mu.Unlock() + t.controlBuf.put(&cleanupStream{ + streamID: streamID, + rst: true, + rstCode: http2.ErrCodeRefusedStream, + onWrite: func() {}, + }) + return false + } + if streamID%2 != 1 || streamID <= t.maxStreamID { + t.mu.Unlock() + // illegal gRPC stream id. + errorf("transport: http2Server.HandleStreams received an illegal stream id: %v", streamID) + return true + } + t.maxStreamID = streamID + t.activeStreams[streamID] = s + if len(t.activeStreams) == 1 { + t.idle = time.Time{} + } + t.mu.Unlock() + if channelz.IsOn() { + atomic.AddInt64(&t.czData.streamsStarted, 1) + atomic.StoreInt64(&t.czData.lastStreamCreatedTime, time.Now().UnixNano()) + } + s.requestRead = func(n int) { + t.adjustWindow(s, uint32(n)) + } + s.ctx = traceCtx(s.ctx, s.method) + if t.stats != nil { + s.ctx = t.stats.TagRPC(s.ctx, &stats.RPCTagInfo{FullMethodName: s.method}) + inHeader := &stats.InHeader{ + FullMethod: s.method, + RemoteAddr: t.remoteAddr, + LocalAddr: t.localAddr, + Compression: s.recvCompress, + WireLength: int(frame.Header().Length), + } + t.stats.HandleRPC(s.ctx, inHeader) + } + s.ctxDone = s.ctx.Done() + s.wq = newWriteQuota(defaultWriteQuota, s.ctxDone) + s.trReader = &transportReader{ + reader: &recvBufferReader{ + ctx: s.ctx, + ctxDone: s.ctxDone, + recv: s.buf, + }, + windowHandler: func(n int) { + t.updateWindow(s, uint32(n)) + }, + } + // Register the stream with loopy. + t.controlBuf.put(®isterStream{ + streamID: s.id, + wq: s.wq, + }) + handle(s) + return false +} + +// HandleStreams receives incoming streams using the given handler. This is +// typically run in a separate goroutine. +// traceCtx attaches trace to ctx and returns the new context. +func (t *http2Server) HandleStreams(handle func(*Stream), traceCtx func(context.Context, string) context.Context) { + defer close(t.readerDone) + for { + frame, err := t.framer.fr.ReadFrame() + atomic.StoreUint32(&t.activity, 1) + if err != nil { + if se, ok := err.(http2.StreamError); ok { + warningf("transport: http2Server.HandleStreams encountered http2.StreamError: %v", se) + t.mu.Lock() + s := t.activeStreams[se.StreamID] + t.mu.Unlock() + if s != nil { + t.closeStream(s, true, se.Code, false) + } else { + t.controlBuf.put(&cleanupStream{ + streamID: se.StreamID, + rst: true, + rstCode: se.Code, + onWrite: func() {}, + }) + } + continue + } + if err == io.EOF || err == io.ErrUnexpectedEOF { + t.Close() + return + } + warningf("transport: http2Server.HandleStreams failed to read frame: %v", err) + t.Close() + return + } + switch frame := frame.(type) { + case *http2.MetaHeadersFrame: + if t.operateHeaders(frame, handle, traceCtx) { + t.Close() + break + } + case *http2.DataFrame: + t.handleData(frame) + case *http2.RSTStreamFrame: + t.handleRSTStream(frame) + case *http2.SettingsFrame: + t.handleSettings(frame) + case *http2.PingFrame: + t.handlePing(frame) + case *http2.WindowUpdateFrame: + t.handleWindowUpdate(frame) + case *http2.GoAwayFrame: + // TODO: Handle GoAway from the client appropriately. + default: + errorf("transport: http2Server.HandleStreams found unhandled frame type %v.", frame) + } + } +} + +func (t *http2Server) getStream(f http2.Frame) (*Stream, bool) { + t.mu.Lock() + defer t.mu.Unlock() + if t.activeStreams == nil { + // The transport is closing. + return nil, false + } + s, ok := t.activeStreams[f.Header().StreamID] + if !ok { + // The stream is already done. + return nil, false + } + return s, true +} + +// adjustWindow sends out extra window update over the initial window size +// of stream if the application is requesting data larger in size than +// the window. +func (t *http2Server) adjustWindow(s *Stream, n uint32) { + if w := s.fc.maybeAdjust(n); w > 0 { + t.controlBuf.put(&outgoingWindowUpdate{streamID: s.id, increment: w}) + } + +} + +// updateWindow adjusts the inbound quota for the stream and the transport. +// Window updates will deliver to the controller for sending when +// the cumulative quota exceeds the corresponding threshold. +func (t *http2Server) updateWindow(s *Stream, n uint32) { + if w := s.fc.onRead(n); w > 0 { + t.controlBuf.put(&outgoingWindowUpdate{streamID: s.id, + increment: w, + }) + } +} + +// updateFlowControl updates the incoming flow control windows +// for the transport and the stream based on the current bdp +// estimation. +func (t *http2Server) updateFlowControl(n uint32) { + t.mu.Lock() + for _, s := range t.activeStreams { + s.fc.newLimit(n) + } + t.initialWindowSize = int32(n) + t.mu.Unlock() + t.controlBuf.put(&outgoingWindowUpdate{ + streamID: 0, + increment: t.fc.newLimit(n), + }) + t.controlBuf.put(&outgoingSettings{ + ss: []http2.Setting{ + { + ID: http2.SettingInitialWindowSize, + Val: n, + }, + }, + }) + +} + +func (t *http2Server) handleData(f *http2.DataFrame) { + size := f.Header().Length + var sendBDPPing bool + if t.bdpEst != nil { + sendBDPPing = t.bdpEst.add(size) + } + // Decouple connection's flow control from application's read. + // An update on connection's flow control should not depend on + // whether user application has read the data or not. Such a + // restriction is already imposed on the stream's flow control, + // and therefore the sender will be blocked anyways. + // Decoupling the connection flow control will prevent other + // active(fast) streams from starving in presence of slow or + // inactive streams. + if w := t.fc.onData(size); w > 0 { + t.controlBuf.put(&outgoingWindowUpdate{ + streamID: 0, + increment: w, + }) + } + if sendBDPPing { + // Avoid excessive ping detection (e.g. in an L7 proxy) + // by sending a window update prior to the BDP ping. + if w := t.fc.reset(); w > 0 { + t.controlBuf.put(&outgoingWindowUpdate{ + streamID: 0, + increment: w, + }) + } + t.controlBuf.put(bdpPing) + } + // Select the right stream to dispatch. + s, ok := t.getStream(f) + if !ok { + return + } + if size > 0 { + if err := s.fc.onData(size); err != nil { + t.closeStream(s, true, http2.ErrCodeFlowControl, false) + return + } + if f.Header().Flags.Has(http2.FlagDataPadded) { + if w := s.fc.onRead(size - uint32(len(f.Data()))); w > 0 { + t.controlBuf.put(&outgoingWindowUpdate{s.id, w}) + } + } + // TODO(bradfitz, zhaoq): A copy is required here because there is no + // guarantee f.Data() is consumed before the arrival of next frame. + // Can this copy be eliminated? + if len(f.Data()) > 0 { + data := make([]byte, len(f.Data())) + copy(data, f.Data()) + s.write(recvMsg{data: data}) + } + } + if f.Header().Flags.Has(http2.FlagDataEndStream) { + // Received the end of stream from the client. + s.compareAndSwapState(streamActive, streamReadDone) + s.write(recvMsg{err: io.EOF}) + } +} + +func (t *http2Server) handleRSTStream(f *http2.RSTStreamFrame) { + // If the stream is not deleted from the transport's active streams map, then do a regular close stream. + if s, ok := t.getStream(f); ok { + t.closeStream(s, false, 0, false) + return + } + // If the stream is already deleted from the active streams map, then put a cleanupStream item into controlbuf to delete the stream from loopy writer's established streams map. + t.controlBuf.put(&cleanupStream{ + streamID: f.Header().StreamID, + rst: false, + rstCode: 0, + onWrite: func() {}, + }) +} + +func (t *http2Server) handleSettings(f *http2.SettingsFrame) { + if f.IsAck() { + return + } + var ss []http2.Setting + var updateFuncs []func() + f.ForeachSetting(func(s http2.Setting) error { + switch s.ID { + case http2.SettingMaxHeaderListSize: + updateFuncs = append(updateFuncs, func() { + t.maxSendHeaderListSize = new(uint32) + *t.maxSendHeaderListSize = s.Val + }) + default: + ss = append(ss, s) + } + return nil + }) + t.controlBuf.executeAndPut(func(interface{}) bool { + for _, f := range updateFuncs { + f() + } + return true + }, &incomingSettings{ + ss: ss, + }) +} + +const ( + maxPingStrikes = 2 + defaultPingTimeout = 2 * time.Hour +) + +func (t *http2Server) handlePing(f *http2.PingFrame) { + if f.IsAck() { + if f.Data == goAwayPing.data && t.drainChan != nil { + close(t.drainChan) + return + } + // Maybe it's a BDP ping. + if t.bdpEst != nil { + t.bdpEst.calculate(f.Data) + } + return + } + pingAck := &ping{ack: true} + copy(pingAck.data[:], f.Data[:]) + t.controlBuf.put(pingAck) + + now := time.Now() + defer func() { + t.lastPingAt = now + }() + // A reset ping strikes means that we don't need to check for policy + // violation for this ping and the pingStrikes counter should be set + // to 0. + if atomic.CompareAndSwapUint32(&t.resetPingStrikes, 1, 0) { + t.pingStrikes = 0 + return + } + t.mu.Lock() + ns := len(t.activeStreams) + t.mu.Unlock() + if ns < 1 && !t.kep.PermitWithoutStream { + // Keepalive shouldn't be active thus, this new ping should + // have come after at least defaultPingTimeout. + if t.lastPingAt.Add(defaultPingTimeout).After(now) { + t.pingStrikes++ + } + } else { + // Check if keepalive policy is respected. + if t.lastPingAt.Add(t.kep.MinTime).After(now) { + t.pingStrikes++ + } + } + + if t.pingStrikes > maxPingStrikes { + // Send goaway and close the connection. + errorf("transport: Got too many pings from the client, closing the connection.") + t.controlBuf.put(&goAway{code: http2.ErrCodeEnhanceYourCalm, debugData: []byte("too_many_pings"), closeConn: true}) + } +} + +func (t *http2Server) handleWindowUpdate(f *http2.WindowUpdateFrame) { + t.controlBuf.put(&incomingWindowUpdate{ + streamID: f.Header().StreamID, + increment: f.Increment, + }) +} + +func appendHeaderFieldsFromMD(headerFields []hpack.HeaderField, md metadata.MD) []hpack.HeaderField { + for k, vv := range md { + if isReservedHeader(k) { + // Clients don't tolerate reading restricted headers after some non restricted ones were sent. + continue + } + for _, v := range vv { + headerFields = append(headerFields, hpack.HeaderField{Name: k, Value: encodeMetadataHeader(k, v)}) + } + } + return headerFields +} + +func (t *http2Server) checkForHeaderListSize(it interface{}) bool { + if t.maxSendHeaderListSize == nil { + return true + } + hdrFrame := it.(*headerFrame) + var sz int64 + for _, f := range hdrFrame.hf { + if sz += int64(f.Size()); sz > int64(*t.maxSendHeaderListSize) { + errorf("header list size to send violates the maximum size (%d bytes) set by client", *t.maxSendHeaderListSize) + return false + } + } + return true +} + +// WriteHeader sends the header metedata md back to the client. +func (t *http2Server) WriteHeader(s *Stream, md metadata.MD) error { + if s.updateHeaderSent() || s.getState() == streamDone { + return ErrIllegalHeaderWrite + } + s.hdrMu.Lock() + if md.Len() > 0 { + if s.header.Len() > 0 { + s.header = metadata.Join(s.header, md) + } else { + s.header = md + } + } + if err := t.writeHeaderLocked(s); err != nil { + s.hdrMu.Unlock() + return err + } + s.hdrMu.Unlock() + return nil +} + +func (t *http2Server) writeHeaderLocked(s *Stream) error { + // TODO(mmukhi): Benchmark if the performance gets better if count the metadata and other header fields + // first and create a slice of that exact size. + headerFields := make([]hpack.HeaderField, 0, 2) // at least :status, content-type will be there if none else. + headerFields = append(headerFields, hpack.HeaderField{Name: ":status", Value: "200"}) + headerFields = append(headerFields, hpack.HeaderField{Name: "content-type", Value: contentType(s.contentSubtype)}) + if s.sendCompress != "" { + headerFields = append(headerFields, hpack.HeaderField{Name: "grpc-encoding", Value: s.sendCompress}) + } + headerFields = appendHeaderFieldsFromMD(headerFields, s.header) + success, err := t.controlBuf.executeAndPut(t.checkForHeaderListSize, &headerFrame{ + streamID: s.id, + hf: headerFields, + endStream: false, + onWrite: func() { + atomic.StoreUint32(&t.resetPingStrikes, 1) + }, + }) + if !success { + if err != nil { + return err + } + t.closeStream(s, true, http2.ErrCodeInternal, false) + return ErrHeaderListSizeLimitViolation + } + if t.stats != nil { + // Note: WireLength is not set in outHeader. + // TODO(mmukhi): Revisit this later, if needed. + outHeader := &stats.OutHeader{} + t.stats.HandleRPC(s.Context(), outHeader) + } + return nil +} + +// WriteStatus sends stream status to the client and terminates the stream. +// There is no further I/O operations being able to perform on this stream. +// TODO(zhaoq): Now it indicates the end of entire stream. Revisit if early +// OK is adopted. +func (t *http2Server) WriteStatus(s *Stream, st *status.Status) error { + if s.getState() == streamDone { + return nil + } + s.hdrMu.Lock() + // TODO(mmukhi): Benchmark if the performance gets better if count the metadata and other header fields + // first and create a slice of that exact size. + headerFields := make([]hpack.HeaderField, 0, 2) // grpc-status and grpc-message will be there if none else. + if !s.updateHeaderSent() { // No headers have been sent. + if len(s.header) > 0 { // Send a separate header frame. + if err := t.writeHeaderLocked(s); err != nil { + s.hdrMu.Unlock() + return err + } + } else { // Send a trailer only response. + headerFields = append(headerFields, hpack.HeaderField{Name: ":status", Value: "200"}) + headerFields = append(headerFields, hpack.HeaderField{Name: "content-type", Value: contentType(s.contentSubtype)}) + } + } + headerFields = append(headerFields, hpack.HeaderField{Name: "grpc-status", Value: strconv.Itoa(int(st.Code()))}) + headerFields = append(headerFields, hpack.HeaderField{Name: "grpc-message", Value: encodeGrpcMessage(st.Message())}) + + if p := st.Proto(); p != nil && len(p.Details) > 0 { + stBytes, err := proto.Marshal(p) + if err != nil { + // TODO: return error instead, when callers are able to handle it. + grpclog.Errorf("transport: failed to marshal rpc status: %v, error: %v", p, err) + } else { + headerFields = append(headerFields, hpack.HeaderField{Name: "grpc-status-details-bin", Value: encodeBinHeader(stBytes)}) + } + } + + // Attach the trailer metadata. + headerFields = appendHeaderFieldsFromMD(headerFields, s.trailer) + trailingHeader := &headerFrame{ + streamID: s.id, + hf: headerFields, + endStream: true, + onWrite: func() { + atomic.StoreUint32(&t.resetPingStrikes, 1) + }, + } + s.hdrMu.Unlock() + success, err := t.controlBuf.execute(t.checkForHeaderListSize, trailingHeader) + if !success { + if err != nil { + return err + } + t.closeStream(s, true, http2.ErrCodeInternal, false) + return ErrHeaderListSizeLimitViolation + } + // Send a RST_STREAM after the trailers if the client has not already half-closed. + rst := s.getState() == streamActive + t.finishStream(s, rst, http2.ErrCodeNo, trailingHeader, true) + if t.stats != nil { + t.stats.HandleRPC(s.Context(), &stats.OutTrailer{}) + } + return nil +} + +// Write converts the data into HTTP2 data frame and sends it out. Non-nil error +// is returns if it fails (e.g., framing error, transport error). +func (t *http2Server) Write(s *Stream, hdr []byte, data []byte, opts *Options) error { + if !s.isHeaderSent() { // Headers haven't been written yet. + if err := t.WriteHeader(s, nil); err != nil { + if _, ok := err.(ConnectionError); ok { + return err + } + // TODO(mmukhi, dfawley): Make sure this is the right code to return. + return status.Errorf(codes.Internal, "transport: %v", err) + } + } else { + // Writing headers checks for this condition. + if s.getState() == streamDone { + // TODO(mmukhi, dfawley): Should the server write also return io.EOF? + s.cancel() + select { + case <-t.ctx.Done(): + return ErrConnClosing + default: + } + return ContextErr(s.ctx.Err()) + } + } + // Add some data to header frame so that we can equally distribute bytes across frames. + emptyLen := http2MaxFrameLen - len(hdr) + if emptyLen > len(data) { + emptyLen = len(data) + } + hdr = append(hdr, data[:emptyLen]...) + data = data[emptyLen:] + df := &dataFrame{ + streamID: s.id, + h: hdr, + d: data, + onEachWrite: func() { + atomic.StoreUint32(&t.resetPingStrikes, 1) + }, + } + if err := s.wq.get(int32(len(hdr) + len(data))); err != nil { + select { + case <-t.ctx.Done(): + return ErrConnClosing + default: + } + return ContextErr(s.ctx.Err()) + } + return t.controlBuf.put(df) +} + +// keepalive running in a separate goroutine does the following: +// 1. Gracefully closes an idle connection after a duration of keepalive.MaxConnectionIdle. +// 2. Gracefully closes any connection after a duration of keepalive.MaxConnectionAge. +// 3. Forcibly closes a connection after an additive period of keepalive.MaxConnectionAgeGrace over keepalive.MaxConnectionAge. +// 4. Makes sure a connection is alive by sending pings with a frequency of keepalive.Time and closes a non-responsive connection +// after an additional duration of keepalive.Timeout. +func (t *http2Server) keepalive() { + p := &ping{} + var pingSent bool + maxIdle := time.NewTimer(t.kp.MaxConnectionIdle) + maxAge := time.NewTimer(t.kp.MaxConnectionAge) + keepalive := time.NewTimer(t.kp.Time) + // NOTE: All exit paths of this function should reset their + // respective timers. A failure to do so will cause the + // following clean-up to deadlock and eventually leak. + defer func() { + if !maxIdle.Stop() { + <-maxIdle.C + } + if !maxAge.Stop() { + <-maxAge.C + } + if !keepalive.Stop() { + <-keepalive.C + } + }() + for { + select { + case <-maxIdle.C: + t.mu.Lock() + idle := t.idle + if idle.IsZero() { // The connection is non-idle. + t.mu.Unlock() + maxIdle.Reset(t.kp.MaxConnectionIdle) + continue + } + val := t.kp.MaxConnectionIdle - time.Since(idle) + t.mu.Unlock() + if val <= 0 { + // The connection has been idle for a duration of keepalive.MaxConnectionIdle or more. + // Gracefully close the connection. + t.drain(http2.ErrCodeNo, []byte{}) + // Resetting the timer so that the clean-up doesn't deadlock. + maxIdle.Reset(infinity) + return + } + maxIdle.Reset(val) + case <-maxAge.C: + t.drain(http2.ErrCodeNo, []byte{}) + maxAge.Reset(t.kp.MaxConnectionAgeGrace) + select { + case <-maxAge.C: + // Close the connection after grace period. + t.Close() + // Resetting the timer so that the clean-up doesn't deadlock. + maxAge.Reset(infinity) + case <-t.ctx.Done(): + } + return + case <-keepalive.C: + if atomic.CompareAndSwapUint32(&t.activity, 1, 0) { + pingSent = false + keepalive.Reset(t.kp.Time) + continue + } + if pingSent { + t.Close() + // Resetting the timer so that the clean-up doesn't deadlock. + keepalive.Reset(infinity) + return + } + pingSent = true + if channelz.IsOn() { + atomic.AddInt64(&t.czData.kpCount, 1) + } + t.controlBuf.put(p) + keepalive.Reset(t.kp.Timeout) + case <-t.ctx.Done(): + return + } + } +} + +// Close starts shutting down the http2Server transport. +// TODO(zhaoq): Now the destruction is not blocked on any pending streams. This +// could cause some resource issue. Revisit this later. +func (t *http2Server) Close() error { + t.mu.Lock() + if t.state == closing { + t.mu.Unlock() + return errors.New("transport: Close() was already called") + } + t.state = closing + streams := t.activeStreams + t.activeStreams = nil + t.mu.Unlock() + t.controlBuf.finish() + t.cancel() + err := t.conn.Close() + if channelz.IsOn() { + channelz.RemoveEntry(t.channelzID) + } + // Cancel all active streams. + for _, s := range streams { + s.cancel() + } + if t.stats != nil { + connEnd := &stats.ConnEnd{} + t.stats.HandleConn(t.ctx, connEnd) + } + return err +} + +// deleteStream deletes the stream s from transport's active streams. +func (t *http2Server) deleteStream(s *Stream, eosReceived bool) (oldState streamState) { + oldState = s.swapState(streamDone) + if oldState == streamDone { + // If the stream was already done, return. + return oldState + } + + // In case stream sending and receiving are invoked in separate + // goroutines (e.g., bi-directional streaming), cancel needs to be + // called to interrupt the potential blocking on other goroutines. + s.cancel() + + t.mu.Lock() + if _, ok := t.activeStreams[s.id]; ok { + delete(t.activeStreams, s.id) + if len(t.activeStreams) == 0 { + t.idle = time.Now() + } + } + t.mu.Unlock() + + if channelz.IsOn() { + if eosReceived { + atomic.AddInt64(&t.czData.streamsSucceeded, 1) + } else { + atomic.AddInt64(&t.czData.streamsFailed, 1) + } + } + + return oldState +} + +// finishStream closes the stream and puts the trailing headerFrame into controlbuf. +func (t *http2Server) finishStream(s *Stream, rst bool, rstCode http2.ErrCode, hdr *headerFrame, eosReceived bool) { + oldState := t.deleteStream(s, eosReceived) + // If the stream is already closed, then don't put trailing header to controlbuf. + if oldState == streamDone { + return + } + + hdr.cleanup = &cleanupStream{ + streamID: s.id, + rst: rst, + rstCode: rstCode, + onWrite: func() {}, + } + t.controlBuf.put(hdr) +} + +// closeStream clears the footprint of a stream when the stream is not needed any more. +func (t *http2Server) closeStream(s *Stream, rst bool, rstCode http2.ErrCode, eosReceived bool) { + t.deleteStream(s, eosReceived) + t.controlBuf.put(&cleanupStream{ + streamID: s.id, + rst: rst, + rstCode: rstCode, + onWrite: func() {}, + }) +} + +func (t *http2Server) RemoteAddr() net.Addr { + return t.remoteAddr +} + +func (t *http2Server) Drain() { + t.drain(http2.ErrCodeNo, []byte{}) +} + +func (t *http2Server) drain(code http2.ErrCode, debugData []byte) { + t.mu.Lock() + defer t.mu.Unlock() + if t.drainChan != nil { + return + } + t.drainChan = make(chan struct{}) + t.controlBuf.put(&goAway{code: code, debugData: debugData, headsUp: true}) +} + +var goAwayPing = &ping{data: [8]byte{1, 6, 1, 8, 0, 3, 3, 9}} + +// Handles outgoing GoAway and returns true if loopy needs to put itself +// in draining mode. +func (t *http2Server) outgoingGoAwayHandler(g *goAway) (bool, error) { + t.mu.Lock() + if t.state == closing { // TODO(mmukhi): This seems unnecessary. + t.mu.Unlock() + // The transport is closing. + return false, ErrConnClosing + } + sid := t.maxStreamID + if !g.headsUp { + // Stop accepting more streams now. + t.state = draining + if len(t.activeStreams) == 0 { + g.closeConn = true + } + t.mu.Unlock() + if err := t.framer.fr.WriteGoAway(sid, g.code, g.debugData); err != nil { + return false, err + } + if g.closeConn { + // Abruptly close the connection following the GoAway (via + // loopywriter). But flush out what's inside the buffer first. + t.framer.writer.Flush() + return false, fmt.Errorf("transport: Connection closing") + } + return true, nil + } + t.mu.Unlock() + // For a graceful close, send out a GoAway with stream ID of MaxUInt32, + // Follow that with a ping and wait for the ack to come back or a timer + // to expire. During this time accept new streams since they might have + // originated before the GoAway reaches the client. + // After getting the ack or timer expiration send out another GoAway this + // time with an ID of the max stream server intends to process. + if err := t.framer.fr.WriteGoAway(math.MaxUint32, http2.ErrCodeNo, []byte{}); err != nil { + return false, err + } + if err := t.framer.fr.WritePing(false, goAwayPing.data); err != nil { + return false, err + } + go func() { + timer := time.NewTimer(time.Minute) + defer timer.Stop() + select { + case <-t.drainChan: + case <-timer.C: + case <-t.ctx.Done(): + return + } + t.controlBuf.put(&goAway{code: g.code, debugData: g.debugData}) + }() + return false, nil +} + +func (t *http2Server) ChannelzMetric() *channelz.SocketInternalMetric { + s := channelz.SocketInternalMetric{ + StreamsStarted: atomic.LoadInt64(&t.czData.streamsStarted), + StreamsSucceeded: atomic.LoadInt64(&t.czData.streamsSucceeded), + StreamsFailed: atomic.LoadInt64(&t.czData.streamsFailed), + MessagesSent: atomic.LoadInt64(&t.czData.msgSent), + MessagesReceived: atomic.LoadInt64(&t.czData.msgRecv), + KeepAlivesSent: atomic.LoadInt64(&t.czData.kpCount), + LastRemoteStreamCreatedTimestamp: time.Unix(0, atomic.LoadInt64(&t.czData.lastStreamCreatedTime)), + LastMessageSentTimestamp: time.Unix(0, atomic.LoadInt64(&t.czData.lastMsgSentTime)), + LastMessageReceivedTimestamp: time.Unix(0, atomic.LoadInt64(&t.czData.lastMsgRecvTime)), + LocalFlowControlWindow: int64(t.fc.getSize()), + SocketOptions: channelz.GetSocketOption(t.conn), + LocalAddr: t.localAddr, + RemoteAddr: t.remoteAddr, + // RemoteName : + } + if au, ok := t.authInfo.(credentials.ChannelzSecurityInfo); ok { + s.Security = au.GetSecurityValue() + } + s.RemoteFlowControlWindow = t.getOutFlowWindow() + return &s +} + +func (t *http2Server) IncrMsgSent() { + atomic.AddInt64(&t.czData.msgSent, 1) + atomic.StoreInt64(&t.czData.lastMsgSentTime, time.Now().UnixNano()) +} + +func (t *http2Server) IncrMsgRecv() { + atomic.AddInt64(&t.czData.msgRecv, 1) + atomic.StoreInt64(&t.czData.lastMsgRecvTime, time.Now().UnixNano()) +} + +func (t *http2Server) getOutFlowWindow() int64 { + resp := make(chan uint32, 1) + timer := time.NewTimer(time.Second) + defer timer.Stop() + t.controlBuf.put(&outFlowControlSizeRequest{resp}) + select { + case sz := <-resp: + return int64(sz) + case <-t.ctxDone: + return -1 + case <-timer.C: + return -2 + } +} + +func getJitter(v time.Duration) time.Duration { + if v == infinity { + return 0 + } + // Generate a jitter between +/- 10% of the value. + r := int64(v / 10) + j := grpcrand.Int63n(2*r) - r + return time.Duration(j) +} diff --git a/vendor/google.golang.org/grpc/internal/transport/http_util.go b/vendor/google.golang.org/grpc/internal/transport/http_util.go new file mode 100644 index 0000000..9d21286 --- /dev/null +++ b/vendor/google.golang.org/grpc/internal/transport/http_util.go @@ -0,0 +1,676 @@ +/* + * + * Copyright 2014 gRPC authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ + +package transport + +import ( + "bufio" + "bytes" + "encoding/base64" + "fmt" + "io" + "math" + "net" + "net/http" + "strconv" + "strings" + "time" + "unicode/utf8" + + "github.com/golang/protobuf/proto" + "golang.org/x/net/http2" + "golang.org/x/net/http2/hpack" + spb "google.golang.org/genproto/googleapis/rpc/status" + "google.golang.org/grpc/codes" + "google.golang.org/grpc/status" +) + +const ( + // http2MaxFrameLen specifies the max length of a HTTP2 frame. + http2MaxFrameLen = 16384 // 16KB frame + // http://http2.github.io/http2-spec/#SettingValues + http2InitHeaderTableSize = 4096 + // baseContentType is the base content-type for gRPC. This is a valid + // content-type on it's own, but can also include a content-subtype such as + // "proto" as a suffix after "+" or ";". See + // https://github.com/grpc/grpc/blob/master/doc/PROTOCOL-HTTP2.md#requests + // for more details. + baseContentType = "application/grpc" +) + +var ( + clientPreface = []byte(http2.ClientPreface) + http2ErrConvTab = map[http2.ErrCode]codes.Code{ + http2.ErrCodeNo: codes.Internal, + http2.ErrCodeProtocol: codes.Internal, + http2.ErrCodeInternal: codes.Internal, + http2.ErrCodeFlowControl: codes.ResourceExhausted, + http2.ErrCodeSettingsTimeout: codes.Internal, + http2.ErrCodeStreamClosed: codes.Internal, + http2.ErrCodeFrameSize: codes.Internal, + http2.ErrCodeRefusedStream: codes.Unavailable, + http2.ErrCodeCancel: codes.Canceled, + http2.ErrCodeCompression: codes.Internal, + http2.ErrCodeConnect: codes.Internal, + http2.ErrCodeEnhanceYourCalm: codes.ResourceExhausted, + http2.ErrCodeInadequateSecurity: codes.PermissionDenied, + http2.ErrCodeHTTP11Required: codes.Internal, + } + statusCodeConvTab = map[codes.Code]http2.ErrCode{ + codes.Internal: http2.ErrCodeInternal, + codes.Canceled: http2.ErrCodeCancel, + codes.Unavailable: http2.ErrCodeRefusedStream, + codes.ResourceExhausted: http2.ErrCodeEnhanceYourCalm, + codes.PermissionDenied: http2.ErrCodeInadequateSecurity, + } + // HTTPStatusConvTab is the HTTP status code to gRPC error code conversion table. + HTTPStatusConvTab = map[int]codes.Code{ + // 400 Bad Request - INTERNAL. + http.StatusBadRequest: codes.Internal, + // 401 Unauthorized - UNAUTHENTICATED. + http.StatusUnauthorized: codes.Unauthenticated, + // 403 Forbidden - PERMISSION_DENIED. + http.StatusForbidden: codes.PermissionDenied, + // 404 Not Found - UNIMPLEMENTED. + http.StatusNotFound: codes.Unimplemented, + // 429 Too Many Requests - UNAVAILABLE. + http.StatusTooManyRequests: codes.Unavailable, + // 502 Bad Gateway - UNAVAILABLE. + http.StatusBadGateway: codes.Unavailable, + // 503 Service Unavailable - UNAVAILABLE. + http.StatusServiceUnavailable: codes.Unavailable, + // 504 Gateway timeout - UNAVAILABLE. + http.StatusGatewayTimeout: codes.Unavailable, + } +) + +type parsedHeaderData struct { + encoding string + // statusGen caches the stream status received from the trailer the server + // sent. Client side only. Do not access directly. After all trailers are + // parsed, use the status method to retrieve the status. + statusGen *status.Status + // rawStatusCode and rawStatusMsg are set from the raw trailer fields and are not + // intended for direct access outside of parsing. + rawStatusCode *int + rawStatusMsg string + httpStatus *int + // Server side only fields. + timeoutSet bool + timeout time.Duration + method string + // key-value metadata map from the peer. + mdata map[string][]string + statsTags []byte + statsTrace []byte + contentSubtype string + + // isGRPC field indicates whether the peer is speaking gRPC (otherwise HTTP). + // + // We are in gRPC mode (peer speaking gRPC) if: + // * We are client side and have already received a HEADER frame that indicates gRPC peer. + // * The header contains valid a content-type, i.e. a string starts with "application/grpc" + // And we should handle error specific to gRPC. + // + // Otherwise (i.e. a content-type string starts without "application/grpc", or does not exist), we + // are in HTTP fallback mode, and should handle error specific to HTTP. + isGRPC bool + grpcErr error + httpErr error + contentTypeErr string +} + +// decodeState configures decoding criteria and records the decoded data. +type decodeState struct { + // whether decoding on server side or not + serverSide bool + + // Records the states during HPACK decoding. It will be filled with info parsed from HTTP HEADERS + // frame once decodeHeader function has been invoked and returned. + data parsedHeaderData +} + +// isReservedHeader checks whether hdr belongs to HTTP2 headers +// reserved by gRPC protocol. Any other headers are classified as the +// user-specified metadata. +func isReservedHeader(hdr string) bool { + if hdr != "" && hdr[0] == ':' { + return true + } + switch hdr { + case "content-type", + "user-agent", + "grpc-message-type", + "grpc-encoding", + "grpc-message", + "grpc-status", + "grpc-timeout", + "grpc-status-details-bin", + // Intentionally exclude grpc-previous-rpc-attempts and + // grpc-retry-pushback-ms, which are "reserved", but their API + // intentionally works via metadata. + "te": + return true + default: + return false + } +} + +// isWhitelistedHeader checks whether hdr should be propagated into metadata +// visible to users, even though it is classified as "reserved", above. +func isWhitelistedHeader(hdr string) bool { + switch hdr { + case ":authority", "user-agent": + return true + default: + return false + } +} + +// contentSubtype returns the content-subtype for the given content-type. The +// given content-type must be a valid content-type that starts with +// "application/grpc". A content-subtype will follow "application/grpc" after a +// "+" or ";". See +// https://github.com/grpc/grpc/blob/master/doc/PROTOCOL-HTTP2.md#requests for +// more details. +// +// If contentType is not a valid content-type for gRPC, the boolean +// will be false, otherwise true. If content-type == "application/grpc", +// "application/grpc+", or "application/grpc;", the boolean will be true, +// but no content-subtype will be returned. +// +// contentType is assumed to be lowercase already. +func contentSubtype(contentType string) (string, bool) { + if contentType == baseContentType { + return "", true + } + if !strings.HasPrefix(contentType, baseContentType) { + return "", false + } + // guaranteed since != baseContentType and has baseContentType prefix + switch contentType[len(baseContentType)] { + case '+', ';': + // this will return true for "application/grpc+" or "application/grpc;" + // which the previous validContentType function tested to be valid, so we + // just say that no content-subtype is specified in this case + return contentType[len(baseContentType)+1:], true + default: + return "", false + } +} + +// contentSubtype is assumed to be lowercase +func contentType(contentSubtype string) string { + if contentSubtype == "" { + return baseContentType + } + return baseContentType + "+" + contentSubtype +} + +func (d *decodeState) status() *status.Status { + if d.data.statusGen == nil { + // No status-details were provided; generate status using code/msg. + d.data.statusGen = status.New(codes.Code(int32(*(d.data.rawStatusCode))), d.data.rawStatusMsg) + } + return d.data.statusGen +} + +const binHdrSuffix = "-bin" + +func encodeBinHeader(v []byte) string { + return base64.RawStdEncoding.EncodeToString(v) +} + +func decodeBinHeader(v string) ([]byte, error) { + if len(v)%4 == 0 { + // Input was padded, or padding was not necessary. + return base64.StdEncoding.DecodeString(v) + } + return base64.RawStdEncoding.DecodeString(v) +} + +func encodeMetadataHeader(k, v string) string { + if strings.HasSuffix(k, binHdrSuffix) { + return encodeBinHeader(([]byte)(v)) + } + return v +} + +func decodeMetadataHeader(k, v string) (string, error) { + if strings.HasSuffix(k, binHdrSuffix) { + b, err := decodeBinHeader(v) + return string(b), err + } + return v, nil +} + +func (d *decodeState) decodeHeader(frame *http2.MetaHeadersFrame) error { + // frame.Truncated is set to true when framer detects that the current header + // list size hits MaxHeaderListSize limit. + if frame.Truncated { + return status.Error(codes.Internal, "peer header list size exceeded limit") + } + + for _, hf := range frame.Fields { + d.processHeaderField(hf) + } + + if d.data.isGRPC { + if d.data.grpcErr != nil { + return d.data.grpcErr + } + if d.serverSide { + return nil + } + if d.data.rawStatusCode == nil && d.data.statusGen == nil { + // gRPC status doesn't exist. + // Set rawStatusCode to be unknown and return nil error. + // So that, if the stream has ended this Unknown status + // will be propagated to the user. + // Otherwise, it will be ignored. In which case, status from + // a later trailer, that has StreamEnded flag set, is propagated. + code := int(codes.Unknown) + d.data.rawStatusCode = &code + } + return nil + } + + // HTTP fallback mode + if d.data.httpErr != nil { + return d.data.httpErr + } + + var ( + code = codes.Internal // when header does not include HTTP status, return INTERNAL + ok bool + ) + + if d.data.httpStatus != nil { + code, ok = HTTPStatusConvTab[*(d.data.httpStatus)] + if !ok { + code = codes.Unknown + } + } + + return status.Error(code, d.constructHTTPErrMsg()) +} + +// constructErrMsg constructs error message to be returned in HTTP fallback mode. +// Format: HTTP status code and its corresponding message + content-type error message. +func (d *decodeState) constructHTTPErrMsg() string { + var errMsgs []string + + if d.data.httpStatus == nil { + errMsgs = append(errMsgs, "malformed header: missing HTTP status") + } else { + errMsgs = append(errMsgs, fmt.Sprintf("%s: HTTP status code %d", http.StatusText(*(d.data.httpStatus)), *d.data.httpStatus)) + } + + if d.data.contentTypeErr == "" { + errMsgs = append(errMsgs, "transport: missing content-type field") + } else { + errMsgs = append(errMsgs, d.data.contentTypeErr) + } + + return strings.Join(errMsgs, "; ") +} + +func (d *decodeState) addMetadata(k, v string) { + if d.data.mdata == nil { + d.data.mdata = make(map[string][]string) + } + d.data.mdata[k] = append(d.data.mdata[k], v) +} + +func (d *decodeState) processHeaderField(f hpack.HeaderField) { + switch f.Name { + case "content-type": + contentSubtype, validContentType := contentSubtype(f.Value) + if !validContentType { + d.data.contentTypeErr = fmt.Sprintf("transport: received the unexpected content-type %q", f.Value) + return + } + d.data.contentSubtype = contentSubtype + // TODO: do we want to propagate the whole content-type in the metadata, + // or come up with a way to just propagate the content-subtype if it was set? + // ie {"content-type": "application/grpc+proto"} or {"content-subtype": "proto"} + // in the metadata? + d.addMetadata(f.Name, f.Value) + d.data.isGRPC = true + case "grpc-encoding": + d.data.encoding = f.Value + case "grpc-status": + code, err := strconv.Atoi(f.Value) + if err != nil { + d.data.grpcErr = status.Errorf(codes.Internal, "transport: malformed grpc-status: %v", err) + return + } + d.data.rawStatusCode = &code + case "grpc-message": + d.data.rawStatusMsg = decodeGrpcMessage(f.Value) + case "grpc-status-details-bin": + v, err := decodeBinHeader(f.Value) + if err != nil { + d.data.grpcErr = status.Errorf(codes.Internal, "transport: malformed grpc-status-details-bin: %v", err) + return + } + s := &spb.Status{} + if err := proto.Unmarshal(v, s); err != nil { + d.data.grpcErr = status.Errorf(codes.Internal, "transport: malformed grpc-status-details-bin: %v", err) + return + } + d.data.statusGen = status.FromProto(s) + case "grpc-timeout": + d.data.timeoutSet = true + var err error + if d.data.timeout, err = decodeTimeout(f.Value); err != nil { + d.data.grpcErr = status.Errorf(codes.Internal, "transport: malformed time-out: %v", err) + } + case ":path": + d.data.method = f.Value + case ":status": + code, err := strconv.Atoi(f.Value) + if err != nil { + d.data.httpErr = status.Errorf(codes.Internal, "transport: malformed http-status: %v", err) + return + } + d.data.httpStatus = &code + case "grpc-tags-bin": + v, err := decodeBinHeader(f.Value) + if err != nil { + d.data.grpcErr = status.Errorf(codes.Internal, "transport: malformed grpc-tags-bin: %v", err) + return + } + d.data.statsTags = v + d.addMetadata(f.Name, string(v)) + case "grpc-trace-bin": + v, err := decodeBinHeader(f.Value) + if err != nil { + d.data.grpcErr = status.Errorf(codes.Internal, "transport: malformed grpc-trace-bin: %v", err) + return + } + d.data.statsTrace = v + d.addMetadata(f.Name, string(v)) + default: + if isReservedHeader(f.Name) && !isWhitelistedHeader(f.Name) { + break + } + v, err := decodeMetadataHeader(f.Name, f.Value) + if err != nil { + errorf("Failed to decode metadata header (%q, %q): %v", f.Name, f.Value, err) + return + } + d.addMetadata(f.Name, v) + } +} + +type timeoutUnit uint8 + +const ( + hour timeoutUnit = 'H' + minute timeoutUnit = 'M' + second timeoutUnit = 'S' + millisecond timeoutUnit = 'm' + microsecond timeoutUnit = 'u' + nanosecond timeoutUnit = 'n' +) + +func timeoutUnitToDuration(u timeoutUnit) (d time.Duration, ok bool) { + switch u { + case hour: + return time.Hour, true + case minute: + return time.Minute, true + case second: + return time.Second, true + case millisecond: + return time.Millisecond, true + case microsecond: + return time.Microsecond, true + case nanosecond: + return time.Nanosecond, true + default: + } + return +} + +const maxTimeoutValue int64 = 100000000 - 1 + +// div does integer division and round-up the result. Note that this is +// equivalent to (d+r-1)/r but has less chance to overflow. +func div(d, r time.Duration) int64 { + if m := d % r; m > 0 { + return int64(d/r + 1) + } + return int64(d / r) +} + +// TODO(zhaoq): It is the simplistic and not bandwidth efficient. Improve it. +func encodeTimeout(t time.Duration) string { + if t <= 0 { + return "0n" + } + if d := div(t, time.Nanosecond); d <= maxTimeoutValue { + return strconv.FormatInt(d, 10) + "n" + } + if d := div(t, time.Microsecond); d <= maxTimeoutValue { + return strconv.FormatInt(d, 10) + "u" + } + if d := div(t, time.Millisecond); d <= maxTimeoutValue { + return strconv.FormatInt(d, 10) + "m" + } + if d := div(t, time.Second); d <= maxTimeoutValue { + return strconv.FormatInt(d, 10) + "S" + } + if d := div(t, time.Minute); d <= maxTimeoutValue { + return strconv.FormatInt(d, 10) + "M" + } + // Note that maxTimeoutValue * time.Hour > MaxInt64. + return strconv.FormatInt(div(t, time.Hour), 10) + "H" +} + +func decodeTimeout(s string) (time.Duration, error) { + size := len(s) + if size < 2 { + return 0, fmt.Errorf("transport: timeout string is too short: %q", s) + } + if size > 9 { + // Spec allows for 8 digits plus the unit. + return 0, fmt.Errorf("transport: timeout string is too long: %q", s) + } + unit := timeoutUnit(s[size-1]) + d, ok := timeoutUnitToDuration(unit) + if !ok { + return 0, fmt.Errorf("transport: timeout unit is not recognized: %q", s) + } + t, err := strconv.ParseInt(s[:size-1], 10, 64) + if err != nil { + return 0, err + } + const maxHours = math.MaxInt64 / int64(time.Hour) + if d == time.Hour && t > maxHours { + // This timeout would overflow math.MaxInt64; clamp it. + return time.Duration(math.MaxInt64), nil + } + return d * time.Duration(t), nil +} + +const ( + spaceByte = ' ' + tildeByte = '~' + percentByte = '%' +) + +// encodeGrpcMessage is used to encode status code in header field +// "grpc-message". It does percent encoding and also replaces invalid utf-8 +// characters with Unicode replacement character. +// +// It checks to see if each individual byte in msg is an allowable byte, and +// then either percent encoding or passing it through. When percent encoding, +// the byte is converted into hexadecimal notation with a '%' prepended. +func encodeGrpcMessage(msg string) string { + if msg == "" { + return "" + } + lenMsg := len(msg) + for i := 0; i < lenMsg; i++ { + c := msg[i] + if !(c >= spaceByte && c <= tildeByte && c != percentByte) { + return encodeGrpcMessageUnchecked(msg) + } + } + return msg +} + +func encodeGrpcMessageUnchecked(msg string) string { + var buf bytes.Buffer + for len(msg) > 0 { + r, size := utf8.DecodeRuneInString(msg) + for _, b := range []byte(string(r)) { + if size > 1 { + // If size > 1, r is not ascii. Always do percent encoding. + buf.WriteString(fmt.Sprintf("%%%02X", b)) + continue + } + + // The for loop is necessary even if size == 1. r could be + // utf8.RuneError. + // + // fmt.Sprintf("%%%02X", utf8.RuneError) gives "%FFFD". + if b >= spaceByte && b <= tildeByte && b != percentByte { + buf.WriteByte(b) + } else { + buf.WriteString(fmt.Sprintf("%%%02X", b)) + } + } + msg = msg[size:] + } + return buf.String() +} + +// decodeGrpcMessage decodes the msg encoded by encodeGrpcMessage. +func decodeGrpcMessage(msg string) string { + if msg == "" { + return "" + } + lenMsg := len(msg) + for i := 0; i < lenMsg; i++ { + if msg[i] == percentByte && i+2 < lenMsg { + return decodeGrpcMessageUnchecked(msg) + } + } + return msg +} + +func decodeGrpcMessageUnchecked(msg string) string { + var buf bytes.Buffer + lenMsg := len(msg) + for i := 0; i < lenMsg; i++ { + c := msg[i] + if c == percentByte && i+2 < lenMsg { + parsed, err := strconv.ParseUint(msg[i+1:i+3], 16, 8) + if err != nil { + buf.WriteByte(c) + } else { + buf.WriteByte(byte(parsed)) + i += 2 + } + } else { + buf.WriteByte(c) + } + } + return buf.String() +} + +type bufWriter struct { + buf []byte + offset int + batchSize int + conn net.Conn + err error + + onFlush func() +} + +func newBufWriter(conn net.Conn, batchSize int) *bufWriter { + return &bufWriter{ + buf: make([]byte, batchSize*2), + batchSize: batchSize, + conn: conn, + } +} + +func (w *bufWriter) Write(b []byte) (n int, err error) { + if w.err != nil { + return 0, w.err + } + if w.batchSize == 0 { // Buffer has been disabled. + return w.conn.Write(b) + } + for len(b) > 0 { + nn := copy(w.buf[w.offset:], b) + b = b[nn:] + w.offset += nn + n += nn + if w.offset >= w.batchSize { + err = w.Flush() + } + } + return n, err +} + +func (w *bufWriter) Flush() error { + if w.err != nil { + return w.err + } + if w.offset == 0 { + return nil + } + if w.onFlush != nil { + w.onFlush() + } + _, w.err = w.conn.Write(w.buf[:w.offset]) + w.offset = 0 + return w.err +} + +type framer struct { + writer *bufWriter + fr *http2.Framer +} + +func newFramer(conn net.Conn, writeBufferSize, readBufferSize int, maxHeaderListSize uint32) *framer { + if writeBufferSize < 0 { + writeBufferSize = 0 + } + var r io.Reader = conn + if readBufferSize > 0 { + r = bufio.NewReaderSize(r, readBufferSize) + } + w := newBufWriter(conn, writeBufferSize) + f := &framer{ + writer: w, + fr: http2.NewFramer(w, r), + } + // Opt-in to Frame reuse API on framer to reduce garbage. + // Frames aren't safe to read from after a subsequent call to ReadFrame. + f.fr.SetReuseFrames() + f.fr.MaxHeaderListSize = maxHeaderListSize + f.fr.ReadMetaHeaders = hpack.NewDecoder(http2InitHeaderTableSize, nil) + return f +} diff --git a/vendor/google.golang.org/grpc/internal/transport/log.go b/vendor/google.golang.org/grpc/internal/transport/log.go new file mode 100644 index 0000000..879df80 --- /dev/null +++ b/vendor/google.golang.org/grpc/internal/transport/log.go @@ -0,0 +1,44 @@ +/* + * + * Copyright 2017 gRPC authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ + +// This file contains wrappers for grpclog functions. +// The transport package only logs to verbose level 2 by default. + +package transport + +import "google.golang.org/grpc/grpclog" + +const logLevel = 2 + +func infof(format string, args ...interface{}) { + if grpclog.V(logLevel) { + grpclog.Infof(format, args...) + } +} + +func warningf(format string, args ...interface{}) { + if grpclog.V(logLevel) { + grpclog.Warningf(format, args...) + } +} + +func errorf(format string, args ...interface{}) { + if grpclog.V(logLevel) { + grpclog.Errorf(format, args...) + } +} diff --git a/vendor/google.golang.org/grpc/internal/transport/transport.go b/vendor/google.golang.org/grpc/internal/transport/transport.go new file mode 100644 index 0000000..7f82cbb --- /dev/null +++ b/vendor/google.golang.org/grpc/internal/transport/transport.go @@ -0,0 +1,760 @@ +/* + * + * Copyright 2014 gRPC authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ + +// Package transport defines and implements message oriented communication +// channel to complete various transactions (e.g., an RPC). It is meant for +// grpc-internal usage and is not intended to be imported directly by users. +package transport + +import ( + "context" + "errors" + "fmt" + "io" + "net" + "sync" + "sync/atomic" + + "google.golang.org/grpc/codes" + "google.golang.org/grpc/credentials" + "google.golang.org/grpc/keepalive" + "google.golang.org/grpc/metadata" + "google.golang.org/grpc/stats" + "google.golang.org/grpc/status" + "google.golang.org/grpc/tap" +) + +// recvMsg represents the received msg from the transport. All transport +// protocol specific info has been removed. +type recvMsg struct { + data []byte + // nil: received some data + // io.EOF: stream is completed. data is nil. + // other non-nil error: transport failure. data is nil. + err error +} + +// recvBuffer is an unbounded channel of recvMsg structs. +// Note recvBuffer differs from controlBuffer only in that recvBuffer +// holds a channel of only recvMsg structs instead of objects implementing "item" interface. +// recvBuffer is written to much more often than +// controlBuffer and using strict recvMsg structs helps avoid allocation in "recvBuffer.put" +type recvBuffer struct { + c chan recvMsg + mu sync.Mutex + backlog []recvMsg + err error +} + +func newRecvBuffer() *recvBuffer { + b := &recvBuffer{ + c: make(chan recvMsg, 1), + } + return b +} + +func (b *recvBuffer) put(r recvMsg) { + b.mu.Lock() + if b.err != nil { + b.mu.Unlock() + // An error had occurred earlier, don't accept more + // data or errors. + return + } + b.err = r.err + if len(b.backlog) == 0 { + select { + case b.c <- r: + b.mu.Unlock() + return + default: + } + } + b.backlog = append(b.backlog, r) + b.mu.Unlock() +} + +func (b *recvBuffer) load() { + b.mu.Lock() + if len(b.backlog) > 0 { + select { + case b.c <- b.backlog[0]: + b.backlog[0] = recvMsg{} + b.backlog = b.backlog[1:] + default: + } + } + b.mu.Unlock() +} + +// get returns the channel that receives a recvMsg in the buffer. +// +// Upon receipt of a recvMsg, the caller should call load to send another +// recvMsg onto the channel if there is any. +func (b *recvBuffer) get() <-chan recvMsg { + return b.c +} + +// recvBufferReader implements io.Reader interface to read the data from +// recvBuffer. +type recvBufferReader struct { + closeStream func(error) // Closes the client transport stream with the given error and nil trailer metadata. + ctx context.Context + ctxDone <-chan struct{} // cache of ctx.Done() (for performance). + recv *recvBuffer + last []byte // Stores the remaining data in the previous calls. + err error +} + +// Read reads the next len(p) bytes from last. If last is drained, it tries to +// read additional data from recv. It blocks if there no additional data available +// in recv. If Read returns any non-nil error, it will continue to return that error. +func (r *recvBufferReader) Read(p []byte) (n int, err error) { + if r.err != nil { + return 0, r.err + } + if r.last != nil && len(r.last) > 0 { + // Read remaining data left in last call. + copied := copy(p, r.last) + r.last = r.last[copied:] + return copied, nil + } + if r.closeStream != nil { + n, r.err = r.readClient(p) + } else { + n, r.err = r.read(p) + } + return n, r.err +} + +func (r *recvBufferReader) read(p []byte) (n int, err error) { + select { + case <-r.ctxDone: + return 0, ContextErr(r.ctx.Err()) + case m := <-r.recv.get(): + return r.readAdditional(m, p) + } +} + +func (r *recvBufferReader) readClient(p []byte) (n int, err error) { + // If the context is canceled, then closes the stream with nil metadata. + // closeStream writes its error parameter to r.recv as a recvMsg. + // r.readAdditional acts on that message and returns the necessary error. + select { + case <-r.ctxDone: + r.closeStream(ContextErr(r.ctx.Err())) + m := <-r.recv.get() + return r.readAdditional(m, p) + case m := <-r.recv.get(): + return r.readAdditional(m, p) + } +} + +func (r *recvBufferReader) readAdditional(m recvMsg, p []byte) (n int, err error) { + r.recv.load() + if m.err != nil { + return 0, m.err + } + copied := copy(p, m.data) + r.last = m.data[copied:] + return copied, nil +} + +type streamState uint32 + +const ( + streamActive streamState = iota + streamWriteDone // EndStream sent + streamReadDone // EndStream received + streamDone // the entire stream is finished. +) + +// Stream represents an RPC in the transport layer. +type Stream struct { + id uint32 + st ServerTransport // nil for client side Stream + ctx context.Context // the associated context of the stream + cancel context.CancelFunc // always nil for client side Stream + done chan struct{} // closed at the end of stream to unblock writers. On the client side. + ctxDone <-chan struct{} // same as done chan but for server side. Cache of ctx.Done() (for performance) + method string // the associated RPC method of the stream + recvCompress string + sendCompress string + buf *recvBuffer + trReader io.Reader + fc *inFlow + wq *writeQuota + + // Callback to state application's intentions to read data. This + // is used to adjust flow control, if needed. + requestRead func(int) + + headerChan chan struct{} // closed to indicate the end of header metadata. + headerDone uint32 // set when headerChan is closed. Used to avoid closing headerChan multiple times. + + // hdrMu protects header and trailer metadata on the server-side. + hdrMu sync.Mutex + // On client side, header keeps the received header metadata. + // + // On server side, header keeps the header set by SetHeader(). The complete + // header will merged into this after t.WriteHeader() is called. + header metadata.MD + trailer metadata.MD // the key-value map of trailer metadata. + + noHeaders bool // set if the client never received headers (set only after the stream is done). + + // On the server-side, headerSent is atomically set to 1 when the headers are sent out. + headerSent uint32 + + state streamState + + // On client-side it is the status error received from the server. + // On server-side it is unused. + status *status.Status + + bytesReceived uint32 // indicates whether any bytes have been received on this stream + unprocessed uint32 // set if the server sends a refused stream or GOAWAY including this stream + + // contentSubtype is the content-subtype for requests. + // this must be lowercase or the behavior is undefined. + contentSubtype string +} + +// isHeaderSent is only valid on the server-side. +func (s *Stream) isHeaderSent() bool { + return atomic.LoadUint32(&s.headerSent) == 1 +} + +// updateHeaderSent updates headerSent and returns true +// if it was alreay set. It is valid only on server-side. +func (s *Stream) updateHeaderSent() bool { + return atomic.SwapUint32(&s.headerSent, 1) == 1 +} + +func (s *Stream) swapState(st streamState) streamState { + return streamState(atomic.SwapUint32((*uint32)(&s.state), uint32(st))) +} + +func (s *Stream) compareAndSwapState(oldState, newState streamState) bool { + return atomic.CompareAndSwapUint32((*uint32)(&s.state), uint32(oldState), uint32(newState)) +} + +func (s *Stream) getState() streamState { + return streamState(atomic.LoadUint32((*uint32)(&s.state))) +} + +func (s *Stream) waitOnHeader() error { + if s.headerChan == nil { + // On the server headerChan is always nil since a stream originates + // only after having received headers. + return nil + } + select { + case <-s.ctx.Done(): + return ContextErr(s.ctx.Err()) + case <-s.headerChan: + return nil + } +} + +// RecvCompress returns the compression algorithm applied to the inbound +// message. It is empty string if there is no compression applied. +func (s *Stream) RecvCompress() string { + if err := s.waitOnHeader(); err != nil { + return "" + } + return s.recvCompress +} + +// SetSendCompress sets the compression algorithm to the stream. +func (s *Stream) SetSendCompress(str string) { + s.sendCompress = str +} + +// Done returns a channel which is closed when it receives the final status +// from the server. +func (s *Stream) Done() <-chan struct{} { + return s.done +} + +// Header returns the header metadata of the stream. +// +// On client side, it acquires the key-value pairs of header metadata once it is +// available. It blocks until i) the metadata is ready or ii) there is no header +// metadata or iii) the stream is canceled/expired. +// +// On server side, it returns the out header after t.WriteHeader is called. +func (s *Stream) Header() (metadata.MD, error) { + if s.headerChan == nil && s.header != nil { + // On server side, return the header in stream. It will be the out + // header after t.WriteHeader is called. + return s.header.Copy(), nil + } + err := s.waitOnHeader() + // Even if the stream is closed, header is returned if available. + select { + case <-s.headerChan: + if s.header == nil { + return nil, nil + } + return s.header.Copy(), nil + default: + } + return nil, err +} + +// TrailersOnly blocks until a header or trailers-only frame is received and +// then returns true if the stream was trailers-only. If the stream ends +// before headers are received, returns true, nil. If a context error happens +// first, returns it as a status error. Client-side only. +func (s *Stream) TrailersOnly() (bool, error) { + err := s.waitOnHeader() + if err != nil { + return false, err + } + return s.noHeaders, nil +} + +// Trailer returns the cached trailer metedata. Note that if it is not called +// after the entire stream is done, it could return an empty MD. Client +// side only. +// It can be safely read only after stream has ended that is either read +// or write have returned io.EOF. +func (s *Stream) Trailer() metadata.MD { + c := s.trailer.Copy() + return c +} + +// ContentSubtype returns the content-subtype for a request. For example, a +// content-subtype of "proto" will result in a content-type of +// "application/grpc+proto". This will always be lowercase. See +// https://github.com/grpc/grpc/blob/master/doc/PROTOCOL-HTTP2.md#requests for +// more details. +func (s *Stream) ContentSubtype() string { + return s.contentSubtype +} + +// Context returns the context of the stream. +func (s *Stream) Context() context.Context { + return s.ctx +} + +// Method returns the method for the stream. +func (s *Stream) Method() string { + return s.method +} + +// Status returns the status received from the server. +// Status can be read safely only after the stream has ended, +// that is, after Done() is closed. +func (s *Stream) Status() *status.Status { + return s.status +} + +// SetHeader sets the header metadata. This can be called multiple times. +// Server side only. +// This should not be called in parallel to other data writes. +func (s *Stream) SetHeader(md metadata.MD) error { + if md.Len() == 0 { + return nil + } + if s.isHeaderSent() || s.getState() == streamDone { + return ErrIllegalHeaderWrite + } + s.hdrMu.Lock() + s.header = metadata.Join(s.header, md) + s.hdrMu.Unlock() + return nil +} + +// SendHeader sends the given header metadata. The given metadata is +// combined with any metadata set by previous calls to SetHeader and +// then written to the transport stream. +func (s *Stream) SendHeader(md metadata.MD) error { + return s.st.WriteHeader(s, md) +} + +// SetTrailer sets the trailer metadata which will be sent with the RPC status +// by the server. This can be called multiple times. Server side only. +// This should not be called parallel to other data writes. +func (s *Stream) SetTrailer(md metadata.MD) error { + if md.Len() == 0 { + return nil + } + if s.getState() == streamDone { + return ErrIllegalHeaderWrite + } + s.hdrMu.Lock() + s.trailer = metadata.Join(s.trailer, md) + s.hdrMu.Unlock() + return nil +} + +func (s *Stream) write(m recvMsg) { + s.buf.put(m) +} + +// Read reads all p bytes from the wire for this stream. +func (s *Stream) Read(p []byte) (n int, err error) { + // Don't request a read if there was an error earlier + if er := s.trReader.(*transportReader).er; er != nil { + return 0, er + } + s.requestRead(len(p)) + return io.ReadFull(s.trReader, p) +} + +// tranportReader reads all the data available for this Stream from the transport and +// passes them into the decoder, which converts them into a gRPC message stream. +// The error is io.EOF when the stream is done or another non-nil error if +// the stream broke. +type transportReader struct { + reader io.Reader + // The handler to control the window update procedure for both this + // particular stream and the associated transport. + windowHandler func(int) + er error +} + +func (t *transportReader) Read(p []byte) (n int, err error) { + n, err = t.reader.Read(p) + if err != nil { + t.er = err + return + } + t.windowHandler(n) + return +} + +// BytesReceived indicates whether any bytes have been received on this stream. +func (s *Stream) BytesReceived() bool { + return atomic.LoadUint32(&s.bytesReceived) == 1 +} + +// Unprocessed indicates whether the server did not process this stream -- +// i.e. it sent a refused stream or GOAWAY including this stream ID. +func (s *Stream) Unprocessed() bool { + return atomic.LoadUint32(&s.unprocessed) == 1 +} + +// GoString is implemented by Stream so context.String() won't +// race when printing %#v. +func (s *Stream) GoString() string { + return fmt.Sprintf("", s, s.method) +} + +// state of transport +type transportState int + +const ( + reachable transportState = iota + closing + draining +) + +// ServerConfig consists of all the configurations to establish a server transport. +type ServerConfig struct { + MaxStreams uint32 + AuthInfo credentials.AuthInfo + InTapHandle tap.ServerInHandle + StatsHandler stats.Handler + KeepaliveParams keepalive.ServerParameters + KeepalivePolicy keepalive.EnforcementPolicy + InitialWindowSize int32 + InitialConnWindowSize int32 + WriteBufferSize int + ReadBufferSize int + ChannelzParentID int64 + MaxHeaderListSize *uint32 +} + +// NewServerTransport creates a ServerTransport with conn or non-nil error +// if it fails. +func NewServerTransport(protocol string, conn net.Conn, config *ServerConfig) (ServerTransport, error) { + return newHTTP2Server(conn, config) +} + +// ConnectOptions covers all relevant options for communicating with the server. +type ConnectOptions struct { + // UserAgent is the application user agent. + UserAgent string + // Dialer specifies how to dial a network address. + Dialer func(context.Context, string) (net.Conn, error) + // FailOnNonTempDialError specifies if gRPC fails on non-temporary dial errors. + FailOnNonTempDialError bool + // PerRPCCredentials stores the PerRPCCredentials required to issue RPCs. + PerRPCCredentials []credentials.PerRPCCredentials + // TransportCredentials stores the Authenticator required to setup a client + // connection. Only one of TransportCredentials and CredsBundle is non-nil. + TransportCredentials credentials.TransportCredentials + // CredsBundle is the credentials bundle to be used. Only one of + // TransportCredentials and CredsBundle is non-nil. + CredsBundle credentials.Bundle + // KeepaliveParams stores the keepalive parameters. + KeepaliveParams keepalive.ClientParameters + // StatsHandler stores the handler for stats. + StatsHandler stats.Handler + // InitialWindowSize sets the initial window size for a stream. + InitialWindowSize int32 + // InitialConnWindowSize sets the initial window size for a connection. + InitialConnWindowSize int32 + // WriteBufferSize sets the size of write buffer which in turn determines how much data can be batched before it's written on the wire. + WriteBufferSize int + // ReadBufferSize sets the size of read buffer, which in turn determines how much data can be read at most for one read syscall. + ReadBufferSize int + // ChannelzParentID sets the addrConn id which initiate the creation of this client transport. + ChannelzParentID int64 + // MaxHeaderListSize sets the max (uncompressed) size of header list that is prepared to be received. + MaxHeaderListSize *uint32 +} + +// TargetInfo contains the information of the target such as network address and metadata. +type TargetInfo struct { + Addr string + Metadata interface{} + Authority string +} + +// NewClientTransport establishes the transport with the required ConnectOptions +// and returns it to the caller. +func NewClientTransport(connectCtx, ctx context.Context, target TargetInfo, opts ConnectOptions, onPrefaceReceipt func(), onGoAway func(GoAwayReason), onClose func()) (ClientTransport, error) { + return newHTTP2Client(connectCtx, ctx, target, opts, onPrefaceReceipt, onGoAway, onClose) +} + +// Options provides additional hints and information for message +// transmission. +type Options struct { + // Last indicates whether this write is the last piece for + // this stream. + Last bool +} + +// CallHdr carries the information of a particular RPC. +type CallHdr struct { + // Host specifies the peer's host. + Host string + + // Method specifies the operation to perform. + Method string + + // SendCompress specifies the compression algorithm applied on + // outbound message. + SendCompress string + + // Creds specifies credentials.PerRPCCredentials for a call. + Creds credentials.PerRPCCredentials + + // ContentSubtype specifies the content-subtype for a request. For example, a + // content-subtype of "proto" will result in a content-type of + // "application/grpc+proto". The value of ContentSubtype must be all + // lowercase, otherwise the behavior is undefined. See + // https://github.com/grpc/grpc/blob/master/doc/PROTOCOL-HTTP2.md#requests + // for more details. + ContentSubtype string + + PreviousAttempts int // value of grpc-previous-rpc-attempts header to set +} + +// ClientTransport is the common interface for all gRPC client-side transport +// implementations. +type ClientTransport interface { + // Close tears down this transport. Once it returns, the transport + // should not be accessed any more. The caller must make sure this + // is called only once. + Close() error + + // GracefulClose starts to tear down the transport. It stops accepting + // new RPCs and wait the completion of the pending RPCs. + GracefulClose() error + + // Write sends the data for the given stream. A nil stream indicates + // the write is to be performed on the transport as a whole. + Write(s *Stream, hdr []byte, data []byte, opts *Options) error + + // NewStream creates a Stream for an RPC. + NewStream(ctx context.Context, callHdr *CallHdr) (*Stream, error) + + // CloseStream clears the footprint of a stream when the stream is + // not needed any more. The err indicates the error incurred when + // CloseStream is called. Must be called when a stream is finished + // unless the associated transport is closing. + CloseStream(stream *Stream, err error) + + // Error returns a channel that is closed when some I/O error + // happens. Typically the caller should have a goroutine to monitor + // this in order to take action (e.g., close the current transport + // and create a new one) in error case. It should not return nil + // once the transport is initiated. + Error() <-chan struct{} + + // GoAway returns a channel that is closed when ClientTransport + // receives the draining signal from the server (e.g., GOAWAY frame in + // HTTP/2). + GoAway() <-chan struct{} + + // GetGoAwayReason returns the reason why GoAway frame was received. + GetGoAwayReason() GoAwayReason + + // RemoteAddr returns the remote network address. + RemoteAddr() net.Addr + + // IncrMsgSent increments the number of message sent through this transport. + IncrMsgSent() + + // IncrMsgRecv increments the number of message received through this transport. + IncrMsgRecv() +} + +// ServerTransport is the common interface for all gRPC server-side transport +// implementations. +// +// Methods may be called concurrently from multiple goroutines, but +// Write methods for a given Stream will be called serially. +type ServerTransport interface { + // HandleStreams receives incoming streams using the given handler. + HandleStreams(func(*Stream), func(context.Context, string) context.Context) + + // WriteHeader sends the header metadata for the given stream. + // WriteHeader may not be called on all streams. + WriteHeader(s *Stream, md metadata.MD) error + + // Write sends the data for the given stream. + // Write may not be called on all streams. + Write(s *Stream, hdr []byte, data []byte, opts *Options) error + + // WriteStatus sends the status of a stream to the client. WriteStatus is + // the final call made on a stream and always occurs. + WriteStatus(s *Stream, st *status.Status) error + + // Close tears down the transport. Once it is called, the transport + // should not be accessed any more. All the pending streams and their + // handlers will be terminated asynchronously. + Close() error + + // RemoteAddr returns the remote network address. + RemoteAddr() net.Addr + + // Drain notifies the client this ServerTransport stops accepting new RPCs. + Drain() + + // IncrMsgSent increments the number of message sent through this transport. + IncrMsgSent() + + // IncrMsgRecv increments the number of message received through this transport. + IncrMsgRecv() +} + +// connectionErrorf creates an ConnectionError with the specified error description. +func connectionErrorf(temp bool, e error, format string, a ...interface{}) ConnectionError { + return ConnectionError{ + Desc: fmt.Sprintf(format, a...), + temp: temp, + err: e, + } +} + +// ConnectionError is an error that results in the termination of the +// entire connection and the retry of all the active streams. +type ConnectionError struct { + Desc string + temp bool + err error +} + +func (e ConnectionError) Error() string { + return fmt.Sprintf("connection error: desc = %q", e.Desc) +} + +// Temporary indicates if this connection error is temporary or fatal. +func (e ConnectionError) Temporary() bool { + return e.temp +} + +// Origin returns the original error of this connection error. +func (e ConnectionError) Origin() error { + // Never return nil error here. + // If the original error is nil, return itself. + if e.err == nil { + return e + } + return e.err +} + +var ( + // ErrConnClosing indicates that the transport is closing. + ErrConnClosing = connectionErrorf(true, nil, "transport is closing") + // errStreamDrain indicates that the stream is rejected because the + // connection is draining. This could be caused by goaway or balancer + // removing the address. + errStreamDrain = status.Error(codes.Unavailable, "the connection is draining") + // errStreamDone is returned from write at the client side to indiacte application + // layer of an error. + errStreamDone = errors.New("the stream is done") + // StatusGoAway indicates that the server sent a GOAWAY that included this + // stream's ID in unprocessed RPCs. + statusGoAway = status.New(codes.Unavailable, "the stream is rejected because server is draining the connection") +) + +// GoAwayReason contains the reason for the GoAway frame received. +type GoAwayReason uint8 + +const ( + // GoAwayInvalid indicates that no GoAway frame is received. + GoAwayInvalid GoAwayReason = 0 + // GoAwayNoReason is the default value when GoAway frame is received. + GoAwayNoReason GoAwayReason = 1 + // GoAwayTooManyPings indicates that a GoAway frame with + // ErrCodeEnhanceYourCalm was received and that the debug data said + // "too_many_pings". + GoAwayTooManyPings GoAwayReason = 2 +) + +// channelzData is used to store channelz related data for http2Client and http2Server. +// These fields cannot be embedded in the original structs (e.g. http2Client), since to do atomic +// operation on int64 variable on 32-bit machine, user is responsible to enforce memory alignment. +// Here, by grouping those int64 fields inside a struct, we are enforcing the alignment. +type channelzData struct { + kpCount int64 + // The number of streams that have started, including already finished ones. + streamsStarted int64 + // Client side: The number of streams that have ended successfully by receiving + // EoS bit set frame from server. + // Server side: The number of streams that have ended successfully by sending + // frame with EoS bit set. + streamsSucceeded int64 + streamsFailed int64 + // lastStreamCreatedTime stores the timestamp that the last stream gets created. It is of int64 type + // instead of time.Time since it's more costly to atomically update time.Time variable than int64 + // variable. The same goes for lastMsgSentTime and lastMsgRecvTime. + lastStreamCreatedTime int64 + msgSent int64 + msgRecv int64 + lastMsgSentTime int64 + lastMsgRecvTime int64 +} + +// ContextErr converts the error from context package into a status error. +func ContextErr(err error) error { + switch err { + case context.DeadlineExceeded: + return status.Error(codes.DeadlineExceeded, err.Error()) + case context.Canceled: + return status.Error(codes.Canceled, err.Error()) + } + return status.Errorf(codes.Internal, "Unexpected error from context packet: %v", err) +} diff --git a/vendor/google.golang.org/grpc/interop/alts/client/client.go b/vendor/google.golang.org/grpc/interop/alts/client/client.go new file mode 100644 index 0000000..f3b1028 --- /dev/null +++ b/vendor/google.golang.org/grpc/interop/alts/client/client.go @@ -0,0 +1,65 @@ +/* + * + * Copyright 2018 gRPC authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ + +// This binary can only run on Google Cloud Platform (GCP). +package main + +import ( + "context" + "flag" + "time" + + grpc "google.golang.org/grpc" + "google.golang.org/grpc/credentials/alts" + "google.golang.org/grpc/grpclog" + testpb "google.golang.org/grpc/interop/grpc_testing" +) + +var ( + hsAddr = flag.String("alts_handshaker_service_address", "", "ALTS handshaker gRPC service address") + serverAddr = flag.String("server_address", ":8080", "The port on which the server is listening") +) + +func main() { + flag.Parse() + + opts := alts.DefaultClientOptions() + if *hsAddr != "" { + opts.HandshakerServiceAddress = *hsAddr + } + altsTC := alts.NewClientCreds(opts) + // Block until the server is ready. + conn, err := grpc.Dial(*serverAddr, grpc.WithTransportCredentials(altsTC), grpc.WithBlock()) + if err != nil { + grpclog.Fatalf("gRPC Client: failed to dial the server at %v: %v", *serverAddr, err) + } + defer conn.Close() + grpcClient := testpb.NewTestServiceClient(conn) + + // Call the EmptyCall API. + ctx := context.Background() + request := &testpb.Empty{} + if _, err := grpcClient.EmptyCall(ctx, request); err != nil { + grpclog.Fatalf("grpc Client: EmptyCall(_, %v) failed: %v", request, err) + } + grpclog.Info("grpc Client: empty call succeeded") + + // This sleep prevents the connection from being abruptly disconnected + // when running this binary (along with grpc_server) on GCP dev cluster. + time.Sleep(1 * time.Second) +} diff --git a/vendor/google.golang.org/grpc/interop/alts/server/server.go b/vendor/google.golang.org/grpc/interop/alts/server/server.go new file mode 100644 index 0000000..c70a20b --- /dev/null +++ b/vendor/google.golang.org/grpc/interop/alts/server/server.go @@ -0,0 +1,53 @@ +/* + * + * Copyright 2018 gRPC authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ + +// This binary can only run on Google Cloud Platform (GCP). +package main + +import ( + "flag" + "net" + + grpc "google.golang.org/grpc" + "google.golang.org/grpc/credentials/alts" + "google.golang.org/grpc/grpclog" + "google.golang.org/grpc/interop" + testpb "google.golang.org/grpc/interop/grpc_testing" +) + +var ( + hsAddr = flag.String("alts_handshaker_service_address", "", "ALTS handshaker gRPC service address") + serverAddr = flag.String("server_address", ":8080", "The port on which the server is listening") +) + +func main() { + flag.Parse() + + lis, err := net.Listen("tcp", *serverAddr) + if err != nil { + grpclog.Fatalf("gRPC Server: failed to start the server at %v: %v", *serverAddr, err) + } + opts := alts.DefaultServerOptions() + if *hsAddr != "" { + opts.HandshakerServiceAddress = *hsAddr + } + altsTC := alts.NewServerCreds(opts) + grpcServer := grpc.NewServer(grpc.Creds(altsTC)) + testpb.RegisterTestServiceServer(grpcServer, interop.NewTestServer()) + grpcServer.Serve(lis) +} diff --git a/vendor/google.golang.org/grpc/interop/client/client.go b/vendor/google.golang.org/grpc/interop/client/client.go new file mode 100644 index 0000000..ce14e56 --- /dev/null +++ b/vendor/google.golang.org/grpc/interop/client/client.go @@ -0,0 +1,276 @@ +/* + * + * Copyright 2014 gRPC authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ + +package main + +import ( + "flag" + "net" + "strconv" + + "google.golang.org/grpc" + _ "google.golang.org/grpc/balancer/grpclb" + "google.golang.org/grpc/credentials" + "google.golang.org/grpc/credentials/alts" + "google.golang.org/grpc/credentials/google" + "google.golang.org/grpc/credentials/oauth" + "google.golang.org/grpc/grpclog" + "google.golang.org/grpc/interop" + testpb "google.golang.org/grpc/interop/grpc_testing" + "google.golang.org/grpc/resolver" + "google.golang.org/grpc/testdata" +) + +const ( + googleDefaultCredsName = "google_default_credentials" + computeEngineCredsName = "compute_engine_channel_creds" +) + +var ( + caFile = flag.String("ca_file", "", "The file containning the CA root cert file") + useTLS = flag.Bool("use_tls", false, "Connection uses TLS if true") + useALTS = flag.Bool("use_alts", false, "Connection uses ALTS if true (this option can only be used on GCP)") + customCredentialsType = flag.String("custom_credentials_type", "", "Custom creds to use, excluding TLS or ALTS") + altsHSAddr = flag.String("alts_handshaker_service_address", "", "ALTS handshaker gRPC service address") + testCA = flag.Bool("use_test_ca", false, "Whether to replace platform root CAs with test CA as the CA root") + serviceAccountKeyFile = flag.String("service_account_key_file", "", "Path to service account json key file") + oauthScope = flag.String("oauth_scope", "", "The scope for OAuth2 tokens") + defaultServiceAccount = flag.String("default_service_account", "", "Email of GCE default service account") + serverHost = flag.String("server_host", "localhost", "The server host name") + serverPort = flag.Int("server_port", 10000, "The server port number") + tlsServerName = flag.String("server_host_override", "", "The server name use to verify the hostname returned by TLS handshake if it is not empty. Otherwise, --server_host is used.") + testCase = flag.String("test_case", "large_unary", + `Configure different test cases. Valid options are: + empty_unary : empty (zero bytes) request and response; + large_unary : single request and (large) response; + client_streaming : request streaming with single response; + server_streaming : single request with response streaming; + ping_pong : full-duplex streaming; + empty_stream : full-duplex streaming with zero message; + timeout_on_sleeping_server: fullduplex streaming on a sleeping server; + compute_engine_creds: large_unary with compute engine auth; + service_account_creds: large_unary with service account auth; + jwt_token_creds: large_unary with jwt token auth; + per_rpc_creds: large_unary with per rpc token; + oauth2_auth_token: large_unary with oauth2 token auth; + google_default_credentials: large_unary with google default credentials + compute_engine_channel_credentials: large_unary with compute engine creds + cancel_after_begin: cancellation after metadata has been sent but before payloads are sent; + cancel_after_first_response: cancellation after receiving 1st message from the server; + status_code_and_message: status code propagated back to client; + special_status_message: Unicode and whitespace is correctly processed in status message; + custom_metadata: server will echo custom metadata; + unimplemented_method: client attempts to call unimplemented method; + unimplemented_service: client attempts to call unimplemented service.`) +) + +type credsMode uint8 + +const ( + credsNone credsMode = iota + credsTLS + credsALTS + credsGoogleDefaultCreds + credsComputeEngineCreds +) + +func main() { + flag.Parse() + var useGDC bool // use google default creds + var useCEC bool // use compute engine creds + if *customCredentialsType != "" { + switch *customCredentialsType { + case googleDefaultCredsName: + useGDC = true + case computeEngineCredsName: + useCEC = true + default: + grpclog.Fatalf("If set, custom_credentials_type can only be set to one of %v or %v", + googleDefaultCredsName, computeEngineCredsName) + } + } + if (*useTLS && *useALTS) || (*useTLS && useGDC) || (*useALTS && useGDC) || (*useTLS && useCEC) || (*useALTS && useCEC) { + grpclog.Fatalf("only one of TLS, ALTS, google default creds, or compute engine creds can be used") + } + + var credsChosen credsMode + switch { + case *useTLS: + credsChosen = credsTLS + case *useALTS: + credsChosen = credsALTS + case useGDC: + credsChosen = credsGoogleDefaultCreds + case useCEC: + credsChosen = credsComputeEngineCreds + } + + resolver.SetDefaultScheme("dns") + serverAddr := net.JoinHostPort(*serverHost, strconv.Itoa(*serverPort)) + var opts []grpc.DialOption + switch credsChosen { + case credsTLS: + var sn string + if *tlsServerName != "" { + sn = *tlsServerName + } + var creds credentials.TransportCredentials + if *testCA { + var err error + if *caFile == "" { + *caFile = testdata.Path("ca.pem") + } + creds, err = credentials.NewClientTLSFromFile(*caFile, sn) + if err != nil { + grpclog.Fatalf("Failed to create TLS credentials %v", err) + } + } else { + creds = credentials.NewClientTLSFromCert(nil, sn) + } + opts = append(opts, grpc.WithTransportCredentials(creds)) + case credsALTS: + altsOpts := alts.DefaultClientOptions() + if *altsHSAddr != "" { + altsOpts.HandshakerServiceAddress = *altsHSAddr + } + altsTC := alts.NewClientCreds(altsOpts) + opts = append(opts, grpc.WithTransportCredentials(altsTC)) + case credsGoogleDefaultCreds: + opts = append(opts, grpc.WithCredentialsBundle(google.NewDefaultCredentials())) + case credsComputeEngineCreds: + opts = append(opts, grpc.WithCredentialsBundle(google.NewComputeEngineCredentials())) + case credsNone: + opts = append(opts, grpc.WithInsecure()) + default: + grpclog.Fatal("Invalid creds") + } + if credsChosen == credsTLS { + if *testCase == "compute_engine_creds" { + opts = append(opts, grpc.WithPerRPCCredentials(oauth.NewComputeEngine())) + } else if *testCase == "service_account_creds" { + jwtCreds, err := oauth.NewServiceAccountFromFile(*serviceAccountKeyFile, *oauthScope) + if err != nil { + grpclog.Fatalf("Failed to create JWT credentials: %v", err) + } + opts = append(opts, grpc.WithPerRPCCredentials(jwtCreds)) + } else if *testCase == "jwt_token_creds" { + jwtCreds, err := oauth.NewJWTAccessFromFile(*serviceAccountKeyFile) + if err != nil { + grpclog.Fatalf("Failed to create JWT credentials: %v", err) + } + opts = append(opts, grpc.WithPerRPCCredentials(jwtCreds)) + } else if *testCase == "oauth2_auth_token" { + opts = append(opts, grpc.WithPerRPCCredentials(oauth.NewOauthAccess(interop.GetToken(*serviceAccountKeyFile, *oauthScope)))) + } + } + opts = append(opts, grpc.WithBlock()) + conn, err := grpc.Dial(serverAddr, opts...) + if err != nil { + grpclog.Fatalf("Fail to dial: %v", err) + } + defer conn.Close() + tc := testpb.NewTestServiceClient(conn) + switch *testCase { + case "empty_unary": + interop.DoEmptyUnaryCall(tc) + grpclog.Infoln("EmptyUnaryCall done") + case "large_unary": + interop.DoLargeUnaryCall(tc) + grpclog.Infoln("LargeUnaryCall done") + case "client_streaming": + interop.DoClientStreaming(tc) + grpclog.Infoln("ClientStreaming done") + case "server_streaming": + interop.DoServerStreaming(tc) + grpclog.Infoln("ServerStreaming done") + case "ping_pong": + interop.DoPingPong(tc) + grpclog.Infoln("Pingpong done") + case "empty_stream": + interop.DoEmptyStream(tc) + grpclog.Infoln("Emptystream done") + case "timeout_on_sleeping_server": + interop.DoTimeoutOnSleepingServer(tc) + grpclog.Infoln("TimeoutOnSleepingServer done") + case "compute_engine_creds": + if credsChosen != credsTLS { + grpclog.Fatalf("TLS credentials need to be set for compute_engine_creds test case.") + } + interop.DoComputeEngineCreds(tc, *defaultServiceAccount, *oauthScope) + grpclog.Infoln("ComputeEngineCreds done") + case "service_account_creds": + if credsChosen != credsTLS { + grpclog.Fatalf("TLS credentials need to be set for service_account_creds test case.") + } + interop.DoServiceAccountCreds(tc, *serviceAccountKeyFile, *oauthScope) + grpclog.Infoln("ServiceAccountCreds done") + case "jwt_token_creds": + if credsChosen != credsTLS { + grpclog.Fatalf("TLS credentials need to be set for jwt_token_creds test case.") + } + interop.DoJWTTokenCreds(tc, *serviceAccountKeyFile) + grpclog.Infoln("JWTtokenCreds done") + case "per_rpc_creds": + if credsChosen != credsTLS { + grpclog.Fatalf("TLS credentials need to be set for per_rpc_creds test case.") + } + interop.DoPerRPCCreds(tc, *serviceAccountKeyFile, *oauthScope) + grpclog.Infoln("PerRPCCreds done") + case "oauth2_auth_token": + if credsChosen != credsTLS { + grpclog.Fatalf("TLS credentials need to be set for oauth2_auth_token test case.") + } + interop.DoOauth2TokenCreds(tc, *serviceAccountKeyFile, *oauthScope) + grpclog.Infoln("Oauth2TokenCreds done") + case "google_default_credentials": + if credsChosen != credsGoogleDefaultCreds { + grpclog.Fatalf("GoogleDefaultCredentials need to be set for google_default_credentials test case.") + } + interop.DoGoogleDefaultCredentials(tc, *defaultServiceAccount) + grpclog.Infoln("GoogleDefaultCredentials done") + case "compute_engine_channel_credentials": + if credsChosen != credsComputeEngineCreds { + grpclog.Fatalf("ComputeEngineCreds need to be set for compute_engine_channel_credentials test case.") + } + interop.DoComputeEngineChannelCredentials(tc, *defaultServiceAccount) + grpclog.Infoln("ComputeEngineChannelCredentials done") + case "cancel_after_begin": + interop.DoCancelAfterBegin(tc) + grpclog.Infoln("CancelAfterBegin done") + case "cancel_after_first_response": + interop.DoCancelAfterFirstResponse(tc) + grpclog.Infoln("CancelAfterFirstResponse done") + case "status_code_and_message": + interop.DoStatusCodeAndMessage(tc) + grpclog.Infoln("StatusCodeAndMessage done") + case "special_status_message": + interop.DoSpecialStatusMessage(tc) + grpclog.Infoln("SpecialStatusMessage done") + case "custom_metadata": + interop.DoCustomMetadata(tc) + grpclog.Infoln("CustomMetadata done") + case "unimplemented_method": + interop.DoUnimplementedMethod(conn) + grpclog.Infoln("UnimplementedMethod done") + case "unimplemented_service": + interop.DoUnimplementedService(testpb.NewUnimplementedServiceClient(conn)) + grpclog.Infoln("UnimplementedService done") + default: + grpclog.Fatal("Unsupported test case: ", *testCase) + } +} diff --git a/vendor/google.golang.org/grpc/interop/fake_grpclb/fake_grpclb.go b/vendor/google.golang.org/grpc/interop/fake_grpclb/fake_grpclb.go new file mode 100644 index 0000000..f6d5504 --- /dev/null +++ b/vendor/google.golang.org/grpc/interop/fake_grpclb/fake_grpclb.go @@ -0,0 +1,169 @@ +/* + * + * Copyright 2018 gRPC authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ + +// This file is for testing only. Runs a fake grpclb balancer server. +// The name of the service to load balance for and the addresses +// of that service are provided by command line flags. +package main + +import ( + "flag" + "net" + "strconv" + "strings" + "time" + + "google.golang.org/grpc" + lbpb "google.golang.org/grpc/balancer/grpclb/grpc_lb_v1" + "google.golang.org/grpc/codes" + "google.golang.org/grpc/credentials" + "google.golang.org/grpc/credentials/alts" + "google.golang.org/grpc/grpclog" + "google.golang.org/grpc/status" + "google.golang.org/grpc/testdata" +) + +var ( + port = flag.Int("port", 10000, "Port to listen on.") + backendAddrs = flag.String("backend_addrs", "", "Comma separated list of backend IP/port addresses.") + useALTS = flag.Bool("use_alts", false, "Listen on ALTS credentials.") + useTLS = flag.Bool("use_tls", false, "Listen on TLS credentials, using a test certificate.") + shortStream = flag.Bool("short_stream", false, "End the balancer stream immediately after sending the first server list.") + serviceName = flag.String("service_name", "UNSET", "Name of the service being load balanced for.") +) + +type loadBalancerServer struct { + serverListResponse *lbpb.LoadBalanceResponse +} + +func (l *loadBalancerServer) BalanceLoad(stream lbpb.LoadBalancer_BalanceLoadServer) error { + grpclog.Info("Begin handling new BalancerLoad request.") + var lbReq *lbpb.LoadBalanceRequest + var err error + if lbReq, err = stream.Recv(); err != nil { + grpclog.Errorf("Error receiving LoadBalanceRequest: %v", err) + return err + } + grpclog.Info("LoadBalancerRequest received.") + initialReq := lbReq.GetInitialRequest() + if initialReq == nil { + grpclog.Info("Expected first request to be an InitialRequest. Got: %v", lbReq) + return status.Error(codes.Unknown, "First request not an InitialRequest") + } + // gRPC clients targeting foo.bar.com:443 can sometimes include the ":443" suffix in + // their requested names; handle this case. TODO: make 443 configurable? + var cleanedName string + var requestedNamePortNumber string + if cleanedName, requestedNamePortNumber, err = net.SplitHostPort(initialReq.Name); err != nil { + cleanedName = initialReq.Name + } else { + if requestedNamePortNumber != "443" { + grpclog.Info("Bad requested service name port number: %v.", requestedNamePortNumber) + return status.Error(codes.Unknown, "Bad requested service name port number") + } + } + if cleanedName != *serviceName { + grpclog.Info("Expected requested service name: %v. Got: %v", *serviceName, initialReq.Name) + return status.Error(codes.NotFound, "Bad requested service name") + } + if err := stream.Send(&lbpb.LoadBalanceResponse{ + LoadBalanceResponseType: &lbpb.LoadBalanceResponse_InitialResponse{ + InitialResponse: &lbpb.InitialLoadBalanceResponse{}, + }, + }); err != nil { + grpclog.Errorf("Error sending initial LB response: %v", err) + return status.Error(codes.Unknown, "Error sending initial response") + } + grpclog.Info("Send LoadBalanceResponse: %v", l.serverListResponse) + if err := stream.Send(l.serverListResponse); err != nil { + grpclog.Errorf("Error sending LB response: %v", err) + return status.Error(codes.Unknown, "Error sending response") + } + if *shortStream { + return nil + } + for { + grpclog.Info("Send LoadBalanceResponse: %v", l.serverListResponse) + if err := stream.Send(l.serverListResponse); err != nil { + grpclog.Errorf("Error sending LB response: %v", err) + return status.Error(codes.Unknown, "Error sending response") + } + time.Sleep(10 * time.Second) + } +} + +func main() { + flag.Parse() + var opts []grpc.ServerOption + if *useTLS { + certFile := testdata.Path("server1.pem") + keyFile := testdata.Path("server1.key") + creds, err := credentials.NewServerTLSFromFile(certFile, keyFile) + if err != nil { + grpclog.Fatalf("Failed to generate credentials %v", err) + } + opts = append(opts, grpc.Creds(creds)) + } else if *useALTS { + altsOpts := alts.DefaultServerOptions() + altsTC := alts.NewServerCreds(altsOpts) + opts = append(opts, grpc.Creds(altsTC)) + } + var serverList []*lbpb.Server + if len(*backendAddrs) == 0 { + serverList = make([]*lbpb.Server, 0) + } else { + rawBackendAddrs := strings.Split(*backendAddrs, ",") + serverList = make([]*lbpb.Server, len(rawBackendAddrs)) + for i := range rawBackendAddrs { + rawIP, rawPort, err := net.SplitHostPort(rawBackendAddrs[i]) + if err != nil { + grpclog.Fatalf("Failed to parse --backend_addrs[%d]=%v, error: %v", i, rawBackendAddrs[i], err) + } + ip := net.ParseIP(rawIP) + if ip == nil { + grpclog.Fatalf("Failed to parse ip: %v", rawIP) + } + numericPort, err := strconv.Atoi(rawPort) + if err != nil { + grpclog.Fatalf("Failed to convert port %v to int", rawPort) + } + grpclog.Infof("Adding backend ip: %v, port: %d", ip.String(), numericPort) + serverList[i] = &lbpb.Server{ + IpAddress: ip, + Port: int32(numericPort), + } + } + } + serverListResponse := &lbpb.LoadBalanceResponse{ + LoadBalanceResponseType: &lbpb.LoadBalanceResponse_ServerList{ + ServerList: &lbpb.ServerList{ + Servers: serverList, + }, + }, + } + server := grpc.NewServer(opts...) + grpclog.Infof("Begin listening on %d.", *port) + lis, err := net.Listen("tcp", ":"+strconv.Itoa(*port)) + if err != nil { + grpclog.Fatalf("Failed to listen on port %v: %v", *port, err) + } + lbpb.RegisterLoadBalancerServer(server, &loadBalancerServer{ + serverListResponse: serverListResponse, + }) + server.Serve(lis) +} diff --git a/vendor/google.golang.org/grpc/interop/grpc_testing/test.pb.go b/vendor/google.golang.org/grpc/interop/grpc_testing/test.pb.go new file mode 100644 index 0000000..bb5e2ee --- /dev/null +++ b/vendor/google.golang.org/grpc/interop/grpc_testing/test.pb.go @@ -0,0 +1,1087 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: grpc_testing/test.proto + +package grpc_testing + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" + +import ( + context "golang.org/x/net/context" + grpc "google.golang.org/grpc" +) + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// The type of payload that should be returned. +type PayloadType int32 + +const ( + // Compressable text format. + PayloadType_COMPRESSABLE PayloadType = 0 + // Uncompressable binary format. + PayloadType_UNCOMPRESSABLE PayloadType = 1 + // Randomly chosen from all other formats defined in this enum. + PayloadType_RANDOM PayloadType = 2 +) + +var PayloadType_name = map[int32]string{ + 0: "COMPRESSABLE", + 1: "UNCOMPRESSABLE", + 2: "RANDOM", +} +var PayloadType_value = map[string]int32{ + "COMPRESSABLE": 0, + "UNCOMPRESSABLE": 1, + "RANDOM": 2, +} + +func (x PayloadType) String() string { + return proto.EnumName(PayloadType_name, int32(x)) +} +func (PayloadType) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_test_4001f755b984bb27, []int{0} +} + +type Empty struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Empty) Reset() { *m = Empty{} } +func (m *Empty) String() string { return proto.CompactTextString(m) } +func (*Empty) ProtoMessage() {} +func (*Empty) Descriptor() ([]byte, []int) { + return fileDescriptor_test_4001f755b984bb27, []int{0} +} +func (m *Empty) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Empty.Unmarshal(m, b) +} +func (m *Empty) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Empty.Marshal(b, m, deterministic) +} +func (dst *Empty) XXX_Merge(src proto.Message) { + xxx_messageInfo_Empty.Merge(dst, src) +} +func (m *Empty) XXX_Size() int { + return xxx_messageInfo_Empty.Size(m) +} +func (m *Empty) XXX_DiscardUnknown() { + xxx_messageInfo_Empty.DiscardUnknown(m) +} + +var xxx_messageInfo_Empty proto.InternalMessageInfo + +// A block of data, to simply increase gRPC message size. +type Payload struct { + // The type of data in body. + Type PayloadType `protobuf:"varint,1,opt,name=type,proto3,enum=grpc.testing.PayloadType" json:"type,omitempty"` + // Primary contents of payload. + Body []byte `protobuf:"bytes,2,opt,name=body,proto3" json:"body,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Payload) Reset() { *m = Payload{} } +func (m *Payload) String() string { return proto.CompactTextString(m) } +func (*Payload) ProtoMessage() {} +func (*Payload) Descriptor() ([]byte, []int) { + return fileDescriptor_test_4001f755b984bb27, []int{1} +} +func (m *Payload) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Payload.Unmarshal(m, b) +} +func (m *Payload) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Payload.Marshal(b, m, deterministic) +} +func (dst *Payload) XXX_Merge(src proto.Message) { + xxx_messageInfo_Payload.Merge(dst, src) +} +func (m *Payload) XXX_Size() int { + return xxx_messageInfo_Payload.Size(m) +} +func (m *Payload) XXX_DiscardUnknown() { + xxx_messageInfo_Payload.DiscardUnknown(m) +} + +var xxx_messageInfo_Payload proto.InternalMessageInfo + +func (m *Payload) GetType() PayloadType { + if m != nil { + return m.Type + } + return PayloadType_COMPRESSABLE +} + +func (m *Payload) GetBody() []byte { + if m != nil { + return m.Body + } + return nil +} + +// A protobuf representation for grpc status. This is used by test +// clients to specify a status that the server should attempt to return. +type EchoStatus struct { + Code int32 `protobuf:"varint,1,opt,name=code,proto3" json:"code,omitempty"` + Message string `protobuf:"bytes,2,opt,name=message,proto3" json:"message,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *EchoStatus) Reset() { *m = EchoStatus{} } +func (m *EchoStatus) String() string { return proto.CompactTextString(m) } +func (*EchoStatus) ProtoMessage() {} +func (*EchoStatus) Descriptor() ([]byte, []int) { + return fileDescriptor_test_4001f755b984bb27, []int{2} +} +func (m *EchoStatus) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_EchoStatus.Unmarshal(m, b) +} +func (m *EchoStatus) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_EchoStatus.Marshal(b, m, deterministic) +} +func (dst *EchoStatus) XXX_Merge(src proto.Message) { + xxx_messageInfo_EchoStatus.Merge(dst, src) +} +func (m *EchoStatus) XXX_Size() int { + return xxx_messageInfo_EchoStatus.Size(m) +} +func (m *EchoStatus) XXX_DiscardUnknown() { + xxx_messageInfo_EchoStatus.DiscardUnknown(m) +} + +var xxx_messageInfo_EchoStatus proto.InternalMessageInfo + +func (m *EchoStatus) GetCode() int32 { + if m != nil { + return m.Code + } + return 0 +} + +func (m *EchoStatus) GetMessage() string { + if m != nil { + return m.Message + } + return "" +} + +// Unary request. +type SimpleRequest struct { + // Desired payload type in the response from the server. + // If response_type is RANDOM, server randomly chooses one from other formats. + ResponseType PayloadType `protobuf:"varint,1,opt,name=response_type,json=responseType,proto3,enum=grpc.testing.PayloadType" json:"response_type,omitempty"` + // Desired payload size in the response from the server. + // If response_type is COMPRESSABLE, this denotes the size before compression. + ResponseSize int32 `protobuf:"varint,2,opt,name=response_size,json=responseSize,proto3" json:"response_size,omitempty"` + // Optional input payload sent along with the request. + Payload *Payload `protobuf:"bytes,3,opt,name=payload,proto3" json:"payload,omitempty"` + // Whether SimpleResponse should include username. + FillUsername bool `protobuf:"varint,4,opt,name=fill_username,json=fillUsername,proto3" json:"fill_username,omitempty"` + // Whether SimpleResponse should include OAuth scope. + FillOauthScope bool `protobuf:"varint,5,opt,name=fill_oauth_scope,json=fillOauthScope,proto3" json:"fill_oauth_scope,omitempty"` + // Whether server should return a given status + ResponseStatus *EchoStatus `protobuf:"bytes,7,opt,name=response_status,json=responseStatus,proto3" json:"response_status,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *SimpleRequest) Reset() { *m = SimpleRequest{} } +func (m *SimpleRequest) String() string { return proto.CompactTextString(m) } +func (*SimpleRequest) ProtoMessage() {} +func (*SimpleRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_test_4001f755b984bb27, []int{3} +} +func (m *SimpleRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_SimpleRequest.Unmarshal(m, b) +} +func (m *SimpleRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_SimpleRequest.Marshal(b, m, deterministic) +} +func (dst *SimpleRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_SimpleRequest.Merge(dst, src) +} +func (m *SimpleRequest) XXX_Size() int { + return xxx_messageInfo_SimpleRequest.Size(m) +} +func (m *SimpleRequest) XXX_DiscardUnknown() { + xxx_messageInfo_SimpleRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_SimpleRequest proto.InternalMessageInfo + +func (m *SimpleRequest) GetResponseType() PayloadType { + if m != nil { + return m.ResponseType + } + return PayloadType_COMPRESSABLE +} + +func (m *SimpleRequest) GetResponseSize() int32 { + if m != nil { + return m.ResponseSize + } + return 0 +} + +func (m *SimpleRequest) GetPayload() *Payload { + if m != nil { + return m.Payload + } + return nil +} + +func (m *SimpleRequest) GetFillUsername() bool { + if m != nil { + return m.FillUsername + } + return false +} + +func (m *SimpleRequest) GetFillOauthScope() bool { + if m != nil { + return m.FillOauthScope + } + return false +} + +func (m *SimpleRequest) GetResponseStatus() *EchoStatus { + if m != nil { + return m.ResponseStatus + } + return nil +} + +// Unary response, as configured by the request. +type SimpleResponse struct { + // Payload to increase message size. + Payload *Payload `protobuf:"bytes,1,opt,name=payload,proto3" json:"payload,omitempty"` + // The user the request came from, for verifying authentication was + // successful when the client expected it. + Username string `protobuf:"bytes,2,opt,name=username,proto3" json:"username,omitempty"` + // OAuth scope. + OauthScope string `protobuf:"bytes,3,opt,name=oauth_scope,json=oauthScope,proto3" json:"oauth_scope,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *SimpleResponse) Reset() { *m = SimpleResponse{} } +func (m *SimpleResponse) String() string { return proto.CompactTextString(m) } +func (*SimpleResponse) ProtoMessage() {} +func (*SimpleResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_test_4001f755b984bb27, []int{4} +} +func (m *SimpleResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_SimpleResponse.Unmarshal(m, b) +} +func (m *SimpleResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_SimpleResponse.Marshal(b, m, deterministic) +} +func (dst *SimpleResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_SimpleResponse.Merge(dst, src) +} +func (m *SimpleResponse) XXX_Size() int { + return xxx_messageInfo_SimpleResponse.Size(m) +} +func (m *SimpleResponse) XXX_DiscardUnknown() { + xxx_messageInfo_SimpleResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_SimpleResponse proto.InternalMessageInfo + +func (m *SimpleResponse) GetPayload() *Payload { + if m != nil { + return m.Payload + } + return nil +} + +func (m *SimpleResponse) GetUsername() string { + if m != nil { + return m.Username + } + return "" +} + +func (m *SimpleResponse) GetOauthScope() string { + if m != nil { + return m.OauthScope + } + return "" +} + +// Client-streaming request. +type StreamingInputCallRequest struct { + // Optional input payload sent along with the request. + Payload *Payload `protobuf:"bytes,1,opt,name=payload,proto3" json:"payload,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *StreamingInputCallRequest) Reset() { *m = StreamingInputCallRequest{} } +func (m *StreamingInputCallRequest) String() string { return proto.CompactTextString(m) } +func (*StreamingInputCallRequest) ProtoMessage() {} +func (*StreamingInputCallRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_test_4001f755b984bb27, []int{5} +} +func (m *StreamingInputCallRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_StreamingInputCallRequest.Unmarshal(m, b) +} +func (m *StreamingInputCallRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_StreamingInputCallRequest.Marshal(b, m, deterministic) +} +func (dst *StreamingInputCallRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_StreamingInputCallRequest.Merge(dst, src) +} +func (m *StreamingInputCallRequest) XXX_Size() int { + return xxx_messageInfo_StreamingInputCallRequest.Size(m) +} +func (m *StreamingInputCallRequest) XXX_DiscardUnknown() { + xxx_messageInfo_StreamingInputCallRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_StreamingInputCallRequest proto.InternalMessageInfo + +func (m *StreamingInputCallRequest) GetPayload() *Payload { + if m != nil { + return m.Payload + } + return nil +} + +// Client-streaming response. +type StreamingInputCallResponse struct { + // Aggregated size of payloads received from the client. + AggregatedPayloadSize int32 `protobuf:"varint,1,opt,name=aggregated_payload_size,json=aggregatedPayloadSize,proto3" json:"aggregated_payload_size,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *StreamingInputCallResponse) Reset() { *m = StreamingInputCallResponse{} } +func (m *StreamingInputCallResponse) String() string { return proto.CompactTextString(m) } +func (*StreamingInputCallResponse) ProtoMessage() {} +func (*StreamingInputCallResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_test_4001f755b984bb27, []int{6} +} +func (m *StreamingInputCallResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_StreamingInputCallResponse.Unmarshal(m, b) +} +func (m *StreamingInputCallResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_StreamingInputCallResponse.Marshal(b, m, deterministic) +} +func (dst *StreamingInputCallResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_StreamingInputCallResponse.Merge(dst, src) +} +func (m *StreamingInputCallResponse) XXX_Size() int { + return xxx_messageInfo_StreamingInputCallResponse.Size(m) +} +func (m *StreamingInputCallResponse) XXX_DiscardUnknown() { + xxx_messageInfo_StreamingInputCallResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_StreamingInputCallResponse proto.InternalMessageInfo + +func (m *StreamingInputCallResponse) GetAggregatedPayloadSize() int32 { + if m != nil { + return m.AggregatedPayloadSize + } + return 0 +} + +// Configuration for a particular response. +type ResponseParameters struct { + // Desired payload sizes in responses from the server. + // If response_type is COMPRESSABLE, this denotes the size before compression. + Size int32 `protobuf:"varint,1,opt,name=size,proto3" json:"size,omitempty"` + // Desired interval between consecutive responses in the response stream in + // microseconds. + IntervalUs int32 `protobuf:"varint,2,opt,name=interval_us,json=intervalUs,proto3" json:"interval_us,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ResponseParameters) Reset() { *m = ResponseParameters{} } +func (m *ResponseParameters) String() string { return proto.CompactTextString(m) } +func (*ResponseParameters) ProtoMessage() {} +func (*ResponseParameters) Descriptor() ([]byte, []int) { + return fileDescriptor_test_4001f755b984bb27, []int{7} +} +func (m *ResponseParameters) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ResponseParameters.Unmarshal(m, b) +} +func (m *ResponseParameters) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ResponseParameters.Marshal(b, m, deterministic) +} +func (dst *ResponseParameters) XXX_Merge(src proto.Message) { + xxx_messageInfo_ResponseParameters.Merge(dst, src) +} +func (m *ResponseParameters) XXX_Size() int { + return xxx_messageInfo_ResponseParameters.Size(m) +} +func (m *ResponseParameters) XXX_DiscardUnknown() { + xxx_messageInfo_ResponseParameters.DiscardUnknown(m) +} + +var xxx_messageInfo_ResponseParameters proto.InternalMessageInfo + +func (m *ResponseParameters) GetSize() int32 { + if m != nil { + return m.Size + } + return 0 +} + +func (m *ResponseParameters) GetIntervalUs() int32 { + if m != nil { + return m.IntervalUs + } + return 0 +} + +// Server-streaming request. +type StreamingOutputCallRequest struct { + // Desired payload type in the response from the server. + // If response_type is RANDOM, the payload from each response in the stream + // might be of different types. This is to simulate a mixed type of payload + // stream. + ResponseType PayloadType `protobuf:"varint,1,opt,name=response_type,json=responseType,proto3,enum=grpc.testing.PayloadType" json:"response_type,omitempty"` + // Configuration for each expected response message. + ResponseParameters []*ResponseParameters `protobuf:"bytes,2,rep,name=response_parameters,json=responseParameters,proto3" json:"response_parameters,omitempty"` + // Optional input payload sent along with the request. + Payload *Payload `protobuf:"bytes,3,opt,name=payload,proto3" json:"payload,omitempty"` + // Whether server should return a given status + ResponseStatus *EchoStatus `protobuf:"bytes,7,opt,name=response_status,json=responseStatus,proto3" json:"response_status,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *StreamingOutputCallRequest) Reset() { *m = StreamingOutputCallRequest{} } +func (m *StreamingOutputCallRequest) String() string { return proto.CompactTextString(m) } +func (*StreamingOutputCallRequest) ProtoMessage() {} +func (*StreamingOutputCallRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_test_4001f755b984bb27, []int{8} +} +func (m *StreamingOutputCallRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_StreamingOutputCallRequest.Unmarshal(m, b) +} +func (m *StreamingOutputCallRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_StreamingOutputCallRequest.Marshal(b, m, deterministic) +} +func (dst *StreamingOutputCallRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_StreamingOutputCallRequest.Merge(dst, src) +} +func (m *StreamingOutputCallRequest) XXX_Size() int { + return xxx_messageInfo_StreamingOutputCallRequest.Size(m) +} +func (m *StreamingOutputCallRequest) XXX_DiscardUnknown() { + xxx_messageInfo_StreamingOutputCallRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_StreamingOutputCallRequest proto.InternalMessageInfo + +func (m *StreamingOutputCallRequest) GetResponseType() PayloadType { + if m != nil { + return m.ResponseType + } + return PayloadType_COMPRESSABLE +} + +func (m *StreamingOutputCallRequest) GetResponseParameters() []*ResponseParameters { + if m != nil { + return m.ResponseParameters + } + return nil +} + +func (m *StreamingOutputCallRequest) GetPayload() *Payload { + if m != nil { + return m.Payload + } + return nil +} + +func (m *StreamingOutputCallRequest) GetResponseStatus() *EchoStatus { + if m != nil { + return m.ResponseStatus + } + return nil +} + +// Server-streaming response, as configured by the request and parameters. +type StreamingOutputCallResponse struct { + // Payload to increase response size. + Payload *Payload `protobuf:"bytes,1,opt,name=payload,proto3" json:"payload,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *StreamingOutputCallResponse) Reset() { *m = StreamingOutputCallResponse{} } +func (m *StreamingOutputCallResponse) String() string { return proto.CompactTextString(m) } +func (*StreamingOutputCallResponse) ProtoMessage() {} +func (*StreamingOutputCallResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_test_4001f755b984bb27, []int{9} +} +func (m *StreamingOutputCallResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_StreamingOutputCallResponse.Unmarshal(m, b) +} +func (m *StreamingOutputCallResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_StreamingOutputCallResponse.Marshal(b, m, deterministic) +} +func (dst *StreamingOutputCallResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_StreamingOutputCallResponse.Merge(dst, src) +} +func (m *StreamingOutputCallResponse) XXX_Size() int { + return xxx_messageInfo_StreamingOutputCallResponse.Size(m) +} +func (m *StreamingOutputCallResponse) XXX_DiscardUnknown() { + xxx_messageInfo_StreamingOutputCallResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_StreamingOutputCallResponse proto.InternalMessageInfo + +func (m *StreamingOutputCallResponse) GetPayload() *Payload { + if m != nil { + return m.Payload + } + return nil +} + +func init() { + proto.RegisterType((*Empty)(nil), "grpc.testing.Empty") + proto.RegisterType((*Payload)(nil), "grpc.testing.Payload") + proto.RegisterType((*EchoStatus)(nil), "grpc.testing.EchoStatus") + proto.RegisterType((*SimpleRequest)(nil), "grpc.testing.SimpleRequest") + proto.RegisterType((*SimpleResponse)(nil), "grpc.testing.SimpleResponse") + proto.RegisterType((*StreamingInputCallRequest)(nil), "grpc.testing.StreamingInputCallRequest") + proto.RegisterType((*StreamingInputCallResponse)(nil), "grpc.testing.StreamingInputCallResponse") + proto.RegisterType((*ResponseParameters)(nil), "grpc.testing.ResponseParameters") + proto.RegisterType((*StreamingOutputCallRequest)(nil), "grpc.testing.StreamingOutputCallRequest") + proto.RegisterType((*StreamingOutputCallResponse)(nil), "grpc.testing.StreamingOutputCallResponse") + proto.RegisterEnum("grpc.testing.PayloadType", PayloadType_name, PayloadType_value) +} + +// Reference imports to suppress errors if they are not otherwise used. +var _ context.Context +var _ grpc.ClientConn + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the grpc package it is being compiled against. +const _ = grpc.SupportPackageIsVersion4 + +// TestServiceClient is the client API for TestService service. +// +// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://godoc.org/google.golang.org/grpc#ClientConn.NewStream. +type TestServiceClient interface { + // One empty request followed by one empty response. + EmptyCall(ctx context.Context, in *Empty, opts ...grpc.CallOption) (*Empty, error) + // One request followed by one response. + // The server returns the client payload as-is. + UnaryCall(ctx context.Context, in *SimpleRequest, opts ...grpc.CallOption) (*SimpleResponse, error) + // One request followed by a sequence of responses (streamed download). + // The server returns the payload with client desired type and sizes. + StreamingOutputCall(ctx context.Context, in *StreamingOutputCallRequest, opts ...grpc.CallOption) (TestService_StreamingOutputCallClient, error) + // A sequence of requests followed by one response (streamed upload). + // The server returns the aggregated size of client payload as the result. + StreamingInputCall(ctx context.Context, opts ...grpc.CallOption) (TestService_StreamingInputCallClient, error) + // A sequence of requests with each request served by the server immediately. + // As one request could lead to multiple responses, this interface + // demonstrates the idea of full duplexing. + FullDuplexCall(ctx context.Context, opts ...grpc.CallOption) (TestService_FullDuplexCallClient, error) + // A sequence of requests followed by a sequence of responses. + // The server buffers all the client requests and then serves them in order. A + // stream of responses are returned to the client when the server starts with + // first request. + HalfDuplexCall(ctx context.Context, opts ...grpc.CallOption) (TestService_HalfDuplexCallClient, error) +} + +type testServiceClient struct { + cc *grpc.ClientConn +} + +func NewTestServiceClient(cc *grpc.ClientConn) TestServiceClient { + return &testServiceClient{cc} +} + +func (c *testServiceClient) EmptyCall(ctx context.Context, in *Empty, opts ...grpc.CallOption) (*Empty, error) { + out := new(Empty) + err := c.cc.Invoke(ctx, "/grpc.testing.TestService/EmptyCall", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *testServiceClient) UnaryCall(ctx context.Context, in *SimpleRequest, opts ...grpc.CallOption) (*SimpleResponse, error) { + out := new(SimpleResponse) + err := c.cc.Invoke(ctx, "/grpc.testing.TestService/UnaryCall", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *testServiceClient) StreamingOutputCall(ctx context.Context, in *StreamingOutputCallRequest, opts ...grpc.CallOption) (TestService_StreamingOutputCallClient, error) { + stream, err := c.cc.NewStream(ctx, &_TestService_serviceDesc.Streams[0], "/grpc.testing.TestService/StreamingOutputCall", opts...) + if err != nil { + return nil, err + } + x := &testServiceStreamingOutputCallClient{stream} + if err := x.ClientStream.SendMsg(in); err != nil { + return nil, err + } + if err := x.ClientStream.CloseSend(); err != nil { + return nil, err + } + return x, nil +} + +type TestService_StreamingOutputCallClient interface { + Recv() (*StreamingOutputCallResponse, error) + grpc.ClientStream +} + +type testServiceStreamingOutputCallClient struct { + grpc.ClientStream +} + +func (x *testServiceStreamingOutputCallClient) Recv() (*StreamingOutputCallResponse, error) { + m := new(StreamingOutputCallResponse) + if err := x.ClientStream.RecvMsg(m); err != nil { + return nil, err + } + return m, nil +} + +func (c *testServiceClient) StreamingInputCall(ctx context.Context, opts ...grpc.CallOption) (TestService_StreamingInputCallClient, error) { + stream, err := c.cc.NewStream(ctx, &_TestService_serviceDesc.Streams[1], "/grpc.testing.TestService/StreamingInputCall", opts...) + if err != nil { + return nil, err + } + x := &testServiceStreamingInputCallClient{stream} + return x, nil +} + +type TestService_StreamingInputCallClient interface { + Send(*StreamingInputCallRequest) error + CloseAndRecv() (*StreamingInputCallResponse, error) + grpc.ClientStream +} + +type testServiceStreamingInputCallClient struct { + grpc.ClientStream +} + +func (x *testServiceStreamingInputCallClient) Send(m *StreamingInputCallRequest) error { + return x.ClientStream.SendMsg(m) +} + +func (x *testServiceStreamingInputCallClient) CloseAndRecv() (*StreamingInputCallResponse, error) { + if err := x.ClientStream.CloseSend(); err != nil { + return nil, err + } + m := new(StreamingInputCallResponse) + if err := x.ClientStream.RecvMsg(m); err != nil { + return nil, err + } + return m, nil +} + +func (c *testServiceClient) FullDuplexCall(ctx context.Context, opts ...grpc.CallOption) (TestService_FullDuplexCallClient, error) { + stream, err := c.cc.NewStream(ctx, &_TestService_serviceDesc.Streams[2], "/grpc.testing.TestService/FullDuplexCall", opts...) + if err != nil { + return nil, err + } + x := &testServiceFullDuplexCallClient{stream} + return x, nil +} + +type TestService_FullDuplexCallClient interface { + Send(*StreamingOutputCallRequest) error + Recv() (*StreamingOutputCallResponse, error) + grpc.ClientStream +} + +type testServiceFullDuplexCallClient struct { + grpc.ClientStream +} + +func (x *testServiceFullDuplexCallClient) Send(m *StreamingOutputCallRequest) error { + return x.ClientStream.SendMsg(m) +} + +func (x *testServiceFullDuplexCallClient) Recv() (*StreamingOutputCallResponse, error) { + m := new(StreamingOutputCallResponse) + if err := x.ClientStream.RecvMsg(m); err != nil { + return nil, err + } + return m, nil +} + +func (c *testServiceClient) HalfDuplexCall(ctx context.Context, opts ...grpc.CallOption) (TestService_HalfDuplexCallClient, error) { + stream, err := c.cc.NewStream(ctx, &_TestService_serviceDesc.Streams[3], "/grpc.testing.TestService/HalfDuplexCall", opts...) + if err != nil { + return nil, err + } + x := &testServiceHalfDuplexCallClient{stream} + return x, nil +} + +type TestService_HalfDuplexCallClient interface { + Send(*StreamingOutputCallRequest) error + Recv() (*StreamingOutputCallResponse, error) + grpc.ClientStream +} + +type testServiceHalfDuplexCallClient struct { + grpc.ClientStream +} + +func (x *testServiceHalfDuplexCallClient) Send(m *StreamingOutputCallRequest) error { + return x.ClientStream.SendMsg(m) +} + +func (x *testServiceHalfDuplexCallClient) Recv() (*StreamingOutputCallResponse, error) { + m := new(StreamingOutputCallResponse) + if err := x.ClientStream.RecvMsg(m); err != nil { + return nil, err + } + return m, nil +} + +// TestServiceServer is the server API for TestService service. +type TestServiceServer interface { + // One empty request followed by one empty response. + EmptyCall(context.Context, *Empty) (*Empty, error) + // One request followed by one response. + // The server returns the client payload as-is. + UnaryCall(context.Context, *SimpleRequest) (*SimpleResponse, error) + // One request followed by a sequence of responses (streamed download). + // The server returns the payload with client desired type and sizes. + StreamingOutputCall(*StreamingOutputCallRequest, TestService_StreamingOutputCallServer) error + // A sequence of requests followed by one response (streamed upload). + // The server returns the aggregated size of client payload as the result. + StreamingInputCall(TestService_StreamingInputCallServer) error + // A sequence of requests with each request served by the server immediately. + // As one request could lead to multiple responses, this interface + // demonstrates the idea of full duplexing. + FullDuplexCall(TestService_FullDuplexCallServer) error + // A sequence of requests followed by a sequence of responses. + // The server buffers all the client requests and then serves them in order. A + // stream of responses are returned to the client when the server starts with + // first request. + HalfDuplexCall(TestService_HalfDuplexCallServer) error +} + +func RegisterTestServiceServer(s *grpc.Server, srv TestServiceServer) { + s.RegisterService(&_TestService_serviceDesc, srv) +} + +func _TestService_EmptyCall_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(Empty) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(TestServiceServer).EmptyCall(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/grpc.testing.TestService/EmptyCall", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(TestServiceServer).EmptyCall(ctx, req.(*Empty)) + } + return interceptor(ctx, in, info, handler) +} + +func _TestService_UnaryCall_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(SimpleRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(TestServiceServer).UnaryCall(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/grpc.testing.TestService/UnaryCall", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(TestServiceServer).UnaryCall(ctx, req.(*SimpleRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _TestService_StreamingOutputCall_Handler(srv interface{}, stream grpc.ServerStream) error { + m := new(StreamingOutputCallRequest) + if err := stream.RecvMsg(m); err != nil { + return err + } + return srv.(TestServiceServer).StreamingOutputCall(m, &testServiceStreamingOutputCallServer{stream}) +} + +type TestService_StreamingOutputCallServer interface { + Send(*StreamingOutputCallResponse) error + grpc.ServerStream +} + +type testServiceStreamingOutputCallServer struct { + grpc.ServerStream +} + +func (x *testServiceStreamingOutputCallServer) Send(m *StreamingOutputCallResponse) error { + return x.ServerStream.SendMsg(m) +} + +func _TestService_StreamingInputCall_Handler(srv interface{}, stream grpc.ServerStream) error { + return srv.(TestServiceServer).StreamingInputCall(&testServiceStreamingInputCallServer{stream}) +} + +type TestService_StreamingInputCallServer interface { + SendAndClose(*StreamingInputCallResponse) error + Recv() (*StreamingInputCallRequest, error) + grpc.ServerStream +} + +type testServiceStreamingInputCallServer struct { + grpc.ServerStream +} + +func (x *testServiceStreamingInputCallServer) SendAndClose(m *StreamingInputCallResponse) error { + return x.ServerStream.SendMsg(m) +} + +func (x *testServiceStreamingInputCallServer) Recv() (*StreamingInputCallRequest, error) { + m := new(StreamingInputCallRequest) + if err := x.ServerStream.RecvMsg(m); err != nil { + return nil, err + } + return m, nil +} + +func _TestService_FullDuplexCall_Handler(srv interface{}, stream grpc.ServerStream) error { + return srv.(TestServiceServer).FullDuplexCall(&testServiceFullDuplexCallServer{stream}) +} + +type TestService_FullDuplexCallServer interface { + Send(*StreamingOutputCallResponse) error + Recv() (*StreamingOutputCallRequest, error) + grpc.ServerStream +} + +type testServiceFullDuplexCallServer struct { + grpc.ServerStream +} + +func (x *testServiceFullDuplexCallServer) Send(m *StreamingOutputCallResponse) error { + return x.ServerStream.SendMsg(m) +} + +func (x *testServiceFullDuplexCallServer) Recv() (*StreamingOutputCallRequest, error) { + m := new(StreamingOutputCallRequest) + if err := x.ServerStream.RecvMsg(m); err != nil { + return nil, err + } + return m, nil +} + +func _TestService_HalfDuplexCall_Handler(srv interface{}, stream grpc.ServerStream) error { + return srv.(TestServiceServer).HalfDuplexCall(&testServiceHalfDuplexCallServer{stream}) +} + +type TestService_HalfDuplexCallServer interface { + Send(*StreamingOutputCallResponse) error + Recv() (*StreamingOutputCallRequest, error) + grpc.ServerStream +} + +type testServiceHalfDuplexCallServer struct { + grpc.ServerStream +} + +func (x *testServiceHalfDuplexCallServer) Send(m *StreamingOutputCallResponse) error { + return x.ServerStream.SendMsg(m) +} + +func (x *testServiceHalfDuplexCallServer) Recv() (*StreamingOutputCallRequest, error) { + m := new(StreamingOutputCallRequest) + if err := x.ServerStream.RecvMsg(m); err != nil { + return nil, err + } + return m, nil +} + +var _TestService_serviceDesc = grpc.ServiceDesc{ + ServiceName: "grpc.testing.TestService", + HandlerType: (*TestServiceServer)(nil), + Methods: []grpc.MethodDesc{ + { + MethodName: "EmptyCall", + Handler: _TestService_EmptyCall_Handler, + }, + { + MethodName: "UnaryCall", + Handler: _TestService_UnaryCall_Handler, + }, + }, + Streams: []grpc.StreamDesc{ + { + StreamName: "StreamingOutputCall", + Handler: _TestService_StreamingOutputCall_Handler, + ServerStreams: true, + }, + { + StreamName: "StreamingInputCall", + Handler: _TestService_StreamingInputCall_Handler, + ClientStreams: true, + }, + { + StreamName: "FullDuplexCall", + Handler: _TestService_FullDuplexCall_Handler, + ServerStreams: true, + ClientStreams: true, + }, + { + StreamName: "HalfDuplexCall", + Handler: _TestService_HalfDuplexCall_Handler, + ServerStreams: true, + ClientStreams: true, + }, + }, + Metadata: "grpc_testing/test.proto", +} + +// UnimplementedServiceClient is the client API for UnimplementedService service. +// +// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://godoc.org/google.golang.org/grpc#ClientConn.NewStream. +type UnimplementedServiceClient interface { + // A call that no server should implement + UnimplementedCall(ctx context.Context, in *Empty, opts ...grpc.CallOption) (*Empty, error) +} + +type unimplementedServiceClient struct { + cc *grpc.ClientConn +} + +func NewUnimplementedServiceClient(cc *grpc.ClientConn) UnimplementedServiceClient { + return &unimplementedServiceClient{cc} +} + +func (c *unimplementedServiceClient) UnimplementedCall(ctx context.Context, in *Empty, opts ...grpc.CallOption) (*Empty, error) { + out := new(Empty) + err := c.cc.Invoke(ctx, "/grpc.testing.UnimplementedService/UnimplementedCall", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +// UnimplementedServiceServer is the server API for UnimplementedService service. +type UnimplementedServiceServer interface { + // A call that no server should implement + UnimplementedCall(context.Context, *Empty) (*Empty, error) +} + +func RegisterUnimplementedServiceServer(s *grpc.Server, srv UnimplementedServiceServer) { + s.RegisterService(&_UnimplementedService_serviceDesc, srv) +} + +func _UnimplementedService_UnimplementedCall_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(Empty) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(UnimplementedServiceServer).UnimplementedCall(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/grpc.testing.UnimplementedService/UnimplementedCall", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(UnimplementedServiceServer).UnimplementedCall(ctx, req.(*Empty)) + } + return interceptor(ctx, in, info, handler) +} + +var _UnimplementedService_serviceDesc = grpc.ServiceDesc{ + ServiceName: "grpc.testing.UnimplementedService", + HandlerType: (*UnimplementedServiceServer)(nil), + Methods: []grpc.MethodDesc{ + { + MethodName: "UnimplementedCall", + Handler: _UnimplementedService_UnimplementedCall_Handler, + }, + }, + Streams: []grpc.StreamDesc{}, + Metadata: "grpc_testing/test.proto", +} + +func init() { proto.RegisterFile("grpc_testing/test.proto", fileDescriptor_test_4001f755b984bb27) } + +var fileDescriptor_test_4001f755b984bb27 = []byte{ + // 664 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xbc, 0x55, 0xdd, 0x6e, 0xd3, 0x4c, + 0x10, 0xfd, 0x9c, 0x26, 0x4d, 0x3b, 0x49, 0xfd, 0x85, 0x2d, 0x55, 0xdd, 0x14, 0x89, 0xc8, 0x5c, + 0x60, 0x90, 0x48, 0x51, 0x10, 0x5c, 0x20, 0x01, 0x2a, 0x6d, 0x2a, 0x2a, 0xb5, 0x4d, 0xb1, 0x9b, + 0xeb, 0x68, 0x9b, 0x4c, 0x5d, 0x4b, 0xfe, 0xc3, 0xbb, 0xae, 0x48, 0x2f, 0x78, 0x19, 0x1e, 0x82, + 0x0b, 0x5e, 0x0e, 0xed, 0xda, 0x4e, 0x9c, 0xd4, 0x15, 0x0d, 0x7f, 0x57, 0xf1, 0x9e, 0x39, 0x33, + 0x3b, 0x67, 0xe6, 0xd8, 0x81, 0x4d, 0x3b, 0x0a, 0x87, 0x03, 0x8e, 0x8c, 0x3b, 0xbe, 0xbd, 0x23, + 0x7e, 0xdb, 0x61, 0x14, 0xf0, 0x80, 0xd4, 0x45, 0xa0, 0x9d, 0x06, 0xf4, 0x2a, 0x54, 0xba, 0x5e, + 0xc8, 0xc7, 0xfa, 0x11, 0x54, 0x4f, 0xe9, 0xd8, 0x0d, 0xe8, 0x88, 0x3c, 0x83, 0x32, 0x1f, 0x87, + 0xa8, 0x29, 0x2d, 0xc5, 0x50, 0x3b, 0x5b, 0xed, 0x7c, 0x42, 0x3b, 0x25, 0x9d, 0x8d, 0x43, 0x34, + 0x25, 0x8d, 0x10, 0x28, 0x9f, 0x07, 0xa3, 0xb1, 0x56, 0x6a, 0x29, 0x46, 0xdd, 0x94, 0xcf, 0xfa, + 0x6b, 0x80, 0xee, 0xf0, 0x32, 0xb0, 0x38, 0xe5, 0x31, 0x13, 0x8c, 0x61, 0x30, 0x4a, 0x0a, 0x56, + 0x4c, 0xf9, 0x4c, 0x34, 0xa8, 0x7a, 0xc8, 0x18, 0xb5, 0x51, 0x26, 0xae, 0x9a, 0xd9, 0x51, 0xff, + 0x56, 0x82, 0x35, 0xcb, 0xf1, 0x42, 0x17, 0x4d, 0xfc, 0x14, 0x23, 0xe3, 0xe4, 0x2d, 0xac, 0x45, + 0xc8, 0xc2, 0xc0, 0x67, 0x38, 0xb8, 0x5b, 0x67, 0xf5, 0x8c, 0x2f, 0x4e, 0xe4, 0x51, 0x2e, 0x9f, + 0x39, 0xd7, 0xc9, 0x8d, 0x95, 0x29, 0xc9, 0x72, 0xae, 0x91, 0xec, 0x40, 0x35, 0x4c, 0x2a, 0x68, + 0x4b, 0x2d, 0xc5, 0xa8, 0x75, 0x36, 0x0a, 0xcb, 0x9b, 0x19, 0x4b, 0x54, 0xbd, 0x70, 0x5c, 0x77, + 0x10, 0x33, 0x8c, 0x7c, 0xea, 0xa1, 0x56, 0x6e, 0x29, 0xc6, 0x8a, 0x59, 0x17, 0x60, 0x3f, 0xc5, + 0x88, 0x01, 0x0d, 0x49, 0x0a, 0x68, 0xcc, 0x2f, 0x07, 0x6c, 0x18, 0x84, 0xa8, 0x55, 0x24, 0x4f, + 0x15, 0x78, 0x4f, 0xc0, 0x96, 0x40, 0xc9, 0x2e, 0xfc, 0x3f, 0x6d, 0x52, 0xce, 0x4d, 0xab, 0xca, + 0x3e, 0xb4, 0xd9, 0x3e, 0xa6, 0x73, 0x35, 0xd5, 0x89, 0x00, 0x79, 0xd6, 0xbf, 0x80, 0x9a, 0x0d, + 0x2e, 0xc1, 0xf3, 0xa2, 0x94, 0x3b, 0x89, 0x6a, 0xc2, 0xca, 0x44, 0x4f, 0xb2, 0x97, 0xc9, 0x99, + 0x3c, 0x84, 0x5a, 0x5e, 0xc6, 0x92, 0x0c, 0x43, 0x30, 0x91, 0xa0, 0x1f, 0xc1, 0x96, 0xc5, 0x23, + 0xa4, 0x9e, 0xe3, 0xdb, 0x87, 0x7e, 0x18, 0xf3, 0x3d, 0xea, 0xba, 0xd9, 0x12, 0x17, 0x6d, 0x45, + 0x3f, 0x83, 0x66, 0x51, 0xb5, 0x54, 0xd9, 0x2b, 0xd8, 0xa4, 0xb6, 0x1d, 0xa1, 0x4d, 0x39, 0x8e, + 0x06, 0x69, 0x4e, 0xb2, 0xdd, 0xc4, 0x66, 0x1b, 0xd3, 0x70, 0x5a, 0x5a, 0xac, 0x59, 0x3f, 0x04, + 0x92, 0xd5, 0x38, 0xa5, 0x11, 0xf5, 0x90, 0x63, 0x24, 0x1d, 0x9a, 0x4b, 0x95, 0xcf, 0x42, 0xae, + 0xe3, 0x73, 0x8c, 0xae, 0xa8, 0xd8, 0x71, 0xea, 0x19, 0xc8, 0xa0, 0x3e, 0xd3, 0xbf, 0x96, 0x72, + 0x1d, 0xf6, 0x62, 0x3e, 0x27, 0xf8, 0x77, 0x5d, 0xfb, 0x11, 0xd6, 0x27, 0xf9, 0xe1, 0xa4, 0x55, + 0xad, 0xd4, 0x5a, 0x32, 0x6a, 0x9d, 0xd6, 0x6c, 0x95, 0x9b, 0x92, 0x4c, 0x12, 0xdd, 0x94, 0xb9, + 0xb0, 0xc7, 0xff, 0x80, 0x29, 0x4f, 0x60, 0xbb, 0x70, 0x48, 0xbf, 0xe8, 0xd0, 0xa7, 0xef, 0xa0, + 0x96, 0x9b, 0x19, 0x69, 0x40, 0x7d, 0xaf, 0x77, 0x7c, 0x6a, 0x76, 0x2d, 0x6b, 0xf7, 0xfd, 0x51, + 0xb7, 0xf1, 0x1f, 0x21, 0xa0, 0xf6, 0x4f, 0x66, 0x30, 0x85, 0x00, 0x2c, 0x9b, 0xbb, 0x27, 0xfb, + 0xbd, 0xe3, 0x46, 0xa9, 0xf3, 0xbd, 0x0c, 0xb5, 0x33, 0x64, 0xdc, 0xc2, 0xe8, 0xca, 0x19, 0x22, + 0x79, 0x09, 0xab, 0xf2, 0x13, 0x28, 0xda, 0x22, 0xeb, 0x73, 0xba, 0x44, 0xa0, 0x59, 0x04, 0x92, + 0x03, 0x58, 0xed, 0xfb, 0x34, 0x4a, 0xd2, 0xb6, 0x67, 0x19, 0x33, 0x9f, 0xaf, 0xe6, 0x83, 0xe2, + 0x60, 0x3a, 0x00, 0x17, 0xd6, 0x0b, 0xe6, 0x43, 0x8c, 0xb9, 0xa4, 0x5b, 0x7d, 0xd6, 0x7c, 0x72, + 0x07, 0x66, 0x72, 0xd7, 0x73, 0x85, 0x38, 0x40, 0x6e, 0xbe, 0x54, 0xe4, 0xf1, 0x2d, 0x25, 0xe6, + 0x5f, 0xe2, 0xa6, 0xf1, 0x73, 0x62, 0x72, 0x95, 0x21, 0xae, 0x52, 0x0f, 0x62, 0xd7, 0xdd, 0x8f, + 0x43, 0x17, 0x3f, 0xff, 0x35, 0x4d, 0x86, 0x22, 0x55, 0xa9, 0x1f, 0xa8, 0x7b, 0xf1, 0x0f, 0xae, + 0xea, 0xf4, 0xe1, 0x7e, 0xdf, 0x97, 0x1b, 0xf4, 0xd0, 0xe7, 0x38, 0xca, 0x5c, 0xf4, 0x06, 0xee, + 0xcd, 0xe0, 0x8b, 0xb9, 0xe9, 0x7c, 0x59, 0xfe, 0x39, 0xbf, 0xf8, 0x11, 0x00, 0x00, 0xff, 0xff, + 0x87, 0xd4, 0xf3, 0x98, 0xb7, 0x07, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/grpc/interop/http2/negative_http2_client.go b/vendor/google.golang.org/grpc/interop/http2/negative_http2_client.go new file mode 100644 index 0000000..2d7da13 --- /dev/null +++ b/vendor/google.golang.org/grpc/interop/http2/negative_http2_client.go @@ -0,0 +1,159 @@ +/* + * + * Copyright 2016 gRPC authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + * + * Client used to test http2 error edge cases like GOAWAYs and RST_STREAMs + * + * Documentation: + * https://github.com/grpc/grpc/blob/master/doc/negative-http2-interop-test-descriptions.md + */ + +package main + +import ( + "context" + "flag" + "net" + "strconv" + "sync" + "time" + + "google.golang.org/grpc" + "google.golang.org/grpc/codes" + "google.golang.org/grpc/grpclog" + "google.golang.org/grpc/interop" + testpb "google.golang.org/grpc/interop/grpc_testing" + "google.golang.org/grpc/status" +) + +var ( + serverHost = flag.String("server_host", "127.0.0.1", "The server host name") + serverPort = flag.Int("server_port", 8080, "The server port number") + testCase = flag.String("test_case", "goaway", + `Configure different test cases. Valid options are: + goaway : client sends two requests, the server will send a goaway in between; + rst_after_header : server will send rst_stream after it sends headers; + rst_during_data : server will send rst_stream while sending data; + rst_after_data : server will send rst_stream after sending data; + ping : server will send pings between each http2 frame; + max_streams : server will ensure that the max_concurrent_streams limit is upheld;`) + largeReqSize = 271828 + largeRespSize = 314159 +) + +func largeSimpleRequest() *testpb.SimpleRequest { + pl := interop.ClientNewPayload(testpb.PayloadType_COMPRESSABLE, largeReqSize) + return &testpb.SimpleRequest{ + ResponseType: testpb.PayloadType_COMPRESSABLE, + ResponseSize: int32(largeRespSize), + Payload: pl, + } +} + +// sends two unary calls. The server asserts that the calls use different connections. +func goaway(tc testpb.TestServiceClient) { + interop.DoLargeUnaryCall(tc) + // sleep to ensure that the client has time to recv the GOAWAY. + // TODO(ncteisen): make this less hacky. + time.Sleep(1 * time.Second) + interop.DoLargeUnaryCall(tc) +} + +func rstAfterHeader(tc testpb.TestServiceClient) { + req := largeSimpleRequest() + reply, err := tc.UnaryCall(context.Background(), req) + if reply != nil { + grpclog.Fatalf("Client received reply despite server sending rst stream after header") + } + if status.Code(err) != codes.Internal { + grpclog.Fatalf("%v.UnaryCall() = _, %v, want _, %v", tc, status.Code(err), codes.Internal) + } +} + +func rstDuringData(tc testpb.TestServiceClient) { + req := largeSimpleRequest() + reply, err := tc.UnaryCall(context.Background(), req) + if reply != nil { + grpclog.Fatalf("Client received reply despite server sending rst stream during data") + } + if status.Code(err) != codes.Unknown { + grpclog.Fatalf("%v.UnaryCall() = _, %v, want _, %v", tc, status.Code(err), codes.Unknown) + } +} + +func rstAfterData(tc testpb.TestServiceClient) { + req := largeSimpleRequest() + reply, err := tc.UnaryCall(context.Background(), req) + if reply != nil { + grpclog.Fatalf("Client received reply despite server sending rst stream after data") + } + if status.Code(err) != codes.Internal { + grpclog.Fatalf("%v.UnaryCall() = _, %v, want _, %v", tc, status.Code(err), codes.Internal) + } +} + +func ping(tc testpb.TestServiceClient) { + // The server will assert that every ping it sends was ACK-ed by the client. + interop.DoLargeUnaryCall(tc) +} + +func maxStreams(tc testpb.TestServiceClient) { + interop.DoLargeUnaryCall(tc) + var wg sync.WaitGroup + for i := 0; i < 15; i++ { + wg.Add(1) + go func() { + defer wg.Done() + interop.DoLargeUnaryCall(tc) + }() + } + wg.Wait() +} + +func main() { + flag.Parse() + serverAddr := net.JoinHostPort(*serverHost, strconv.Itoa(*serverPort)) + var opts []grpc.DialOption + opts = append(opts, grpc.WithInsecure()) + conn, err := grpc.Dial(serverAddr, opts...) + if err != nil { + grpclog.Fatalf("Fail to dial: %v", err) + } + defer conn.Close() + tc := testpb.NewTestServiceClient(conn) + switch *testCase { + case "goaway": + goaway(tc) + grpclog.Infoln("goaway done") + case "rst_after_header": + rstAfterHeader(tc) + grpclog.Infoln("rst_after_header done") + case "rst_during_data": + rstDuringData(tc) + grpclog.Infoln("rst_during_data done") + case "rst_after_data": + rstAfterData(tc) + grpclog.Infoln("rst_after_data done") + case "ping": + ping(tc) + grpclog.Infoln("ping done") + case "max_streams": + maxStreams(tc) + grpclog.Infoln("max_streams done") + default: + grpclog.Fatal("Unsupported test case: ", *testCase) + } +} diff --git a/vendor/google.golang.org/grpc/interop/server/server.go b/vendor/google.golang.org/grpc/interop/server/server.go new file mode 100644 index 0000000..dd0e897 --- /dev/null +++ b/vendor/google.golang.org/grpc/interop/server/server.go @@ -0,0 +1,78 @@ +/* + * + * Copyright 2014 gRPC authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ + +package main + +import ( + "flag" + "net" + "strconv" + + "google.golang.org/grpc" + "google.golang.org/grpc/credentials" + "google.golang.org/grpc/credentials/alts" + "google.golang.org/grpc/grpclog" + "google.golang.org/grpc/interop" + testpb "google.golang.org/grpc/interop/grpc_testing" + "google.golang.org/grpc/testdata" +) + +var ( + useTLS = flag.Bool("use_tls", false, "Connection uses TLS if true, else plain TCP") + useALTS = flag.Bool("use_alts", false, "Connection uses ALTS if true (this option can only be used on GCP)") + altsHSAddr = flag.String("alts_handshaker_service_address", "", "ALTS handshaker gRPC service address") + certFile = flag.String("tls_cert_file", "", "The TLS cert file") + keyFile = flag.String("tls_key_file", "", "The TLS key file") + port = flag.Int("port", 10000, "The server port") +) + +func main() { + flag.Parse() + if *useTLS && *useALTS { + grpclog.Fatalf("use_tls and use_alts cannot be both set to true") + } + p := strconv.Itoa(*port) + lis, err := net.Listen("tcp", ":"+p) + if err != nil { + grpclog.Fatalf("failed to listen: %v", err) + } + var opts []grpc.ServerOption + if *useTLS { + if *certFile == "" { + *certFile = testdata.Path("server1.pem") + } + if *keyFile == "" { + *keyFile = testdata.Path("server1.key") + } + creds, err := credentials.NewServerTLSFromFile(*certFile, *keyFile) + if err != nil { + grpclog.Fatalf("Failed to generate credentials %v", err) + } + opts = append(opts, grpc.Creds(creds)) + } else if *useALTS { + altsOpts := alts.DefaultServerOptions() + if *altsHSAddr != "" { + altsOpts.HandshakerServiceAddress = *altsHSAddr + } + altsTC := alts.NewServerCreds(altsOpts) + opts = append(opts, grpc.Creds(altsTC)) + } + server := grpc.NewServer(opts...) + testpb.RegisterTestServiceServer(server, interop.NewTestServer()) + server.Serve(lis) +} diff --git a/vendor/google.golang.org/grpc/interop/test_utils.go b/vendor/google.golang.org/grpc/interop/test_utils.go new file mode 100644 index 0000000..da57fbf --- /dev/null +++ b/vendor/google.golang.org/grpc/interop/test_utils.go @@ -0,0 +1,804 @@ +/* + * + * Copyright 2014 gRPC authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ + +//go:generate protoc --go_out=plugins=grpc:. grpc_testing/test.proto + +package interop + +import ( + "context" + "fmt" + "io" + "io/ioutil" + "strings" + "time" + + "github.com/golang/protobuf/proto" + "golang.org/x/oauth2" + "golang.org/x/oauth2/google" + "google.golang.org/grpc" + "google.golang.org/grpc/codes" + "google.golang.org/grpc/grpclog" + testpb "google.golang.org/grpc/interop/grpc_testing" + "google.golang.org/grpc/metadata" + "google.golang.org/grpc/status" +) + +var ( + reqSizes = []int{27182, 8, 1828, 45904} + respSizes = []int{31415, 9, 2653, 58979} + largeReqSize = 271828 + largeRespSize = 314159 + initialMetadataKey = "x-grpc-test-echo-initial" + trailingMetadataKey = "x-grpc-test-echo-trailing-bin" +) + +// ClientNewPayload returns a payload of the given type and size. +func ClientNewPayload(t testpb.PayloadType, size int) *testpb.Payload { + if size < 0 { + grpclog.Fatalf("Requested a response with invalid length %d", size) + } + body := make([]byte, size) + switch t { + case testpb.PayloadType_COMPRESSABLE: + case testpb.PayloadType_UNCOMPRESSABLE: + grpclog.Fatalf("PayloadType UNCOMPRESSABLE is not supported") + default: + grpclog.Fatalf("Unsupported payload type: %d", t) + } + return &testpb.Payload{ + Type: t, + Body: body, + } +} + +// DoEmptyUnaryCall performs a unary RPC with empty request and response messages. +func DoEmptyUnaryCall(tc testpb.TestServiceClient, args ...grpc.CallOption) { + reply, err := tc.EmptyCall(context.Background(), &testpb.Empty{}, args...) + if err != nil { + grpclog.Fatal("/TestService/EmptyCall RPC failed: ", err) + } + if !proto.Equal(&testpb.Empty{}, reply) { + grpclog.Fatalf("/TestService/EmptyCall receives %v, want %v", reply, testpb.Empty{}) + } +} + +// DoLargeUnaryCall performs a unary RPC with large payload in the request and response. +func DoLargeUnaryCall(tc testpb.TestServiceClient, args ...grpc.CallOption) { + pl := ClientNewPayload(testpb.PayloadType_COMPRESSABLE, largeReqSize) + req := &testpb.SimpleRequest{ + ResponseType: testpb.PayloadType_COMPRESSABLE, + ResponseSize: int32(largeRespSize), + Payload: pl, + } + reply, err := tc.UnaryCall(context.Background(), req, args...) + if err != nil { + grpclog.Fatal("/TestService/UnaryCall RPC failed: ", err) + } + t := reply.GetPayload().GetType() + s := len(reply.GetPayload().GetBody()) + if t != testpb.PayloadType_COMPRESSABLE || s != largeRespSize { + grpclog.Fatalf("Got the reply with type %d len %d; want %d, %d", t, s, testpb.PayloadType_COMPRESSABLE, largeRespSize) + } +} + +// DoClientStreaming performs a client streaming RPC. +func DoClientStreaming(tc testpb.TestServiceClient, args ...grpc.CallOption) { + stream, err := tc.StreamingInputCall(context.Background(), args...) + if err != nil { + grpclog.Fatalf("%v.StreamingInputCall(_) = _, %v", tc, err) + } + var sum int + for _, s := range reqSizes { + pl := ClientNewPayload(testpb.PayloadType_COMPRESSABLE, s) + req := &testpb.StreamingInputCallRequest{ + Payload: pl, + } + if err := stream.Send(req); err != nil { + grpclog.Fatalf("%v has error %v while sending %v", stream, err, req) + } + sum += s + } + reply, err := stream.CloseAndRecv() + if err != nil { + grpclog.Fatalf("%v.CloseAndRecv() got error %v, want %v", stream, err, nil) + } + if reply.GetAggregatedPayloadSize() != int32(sum) { + grpclog.Fatalf("%v.CloseAndRecv().GetAggregatePayloadSize() = %v; want %v", stream, reply.GetAggregatedPayloadSize(), sum) + } +} + +// DoServerStreaming performs a server streaming RPC. +func DoServerStreaming(tc testpb.TestServiceClient, args ...grpc.CallOption) { + respParam := make([]*testpb.ResponseParameters, len(respSizes)) + for i, s := range respSizes { + respParam[i] = &testpb.ResponseParameters{ + Size: int32(s), + } + } + req := &testpb.StreamingOutputCallRequest{ + ResponseType: testpb.PayloadType_COMPRESSABLE, + ResponseParameters: respParam, + } + stream, err := tc.StreamingOutputCall(context.Background(), req, args...) + if err != nil { + grpclog.Fatalf("%v.StreamingOutputCall(_) = _, %v", tc, err) + } + var rpcStatus error + var respCnt int + var index int + for { + reply, err := stream.Recv() + if err != nil { + rpcStatus = err + break + } + t := reply.GetPayload().GetType() + if t != testpb.PayloadType_COMPRESSABLE { + grpclog.Fatalf("Got the reply of type %d, want %d", t, testpb.PayloadType_COMPRESSABLE) + } + size := len(reply.GetPayload().GetBody()) + if size != respSizes[index] { + grpclog.Fatalf("Got reply body of length %d, want %d", size, respSizes[index]) + } + index++ + respCnt++ + } + if rpcStatus != io.EOF { + grpclog.Fatalf("Failed to finish the server streaming rpc: %v", rpcStatus) + } + if respCnt != len(respSizes) { + grpclog.Fatalf("Got %d reply, want %d", len(respSizes), respCnt) + } +} + +// DoPingPong performs ping-pong style bi-directional streaming RPC. +func DoPingPong(tc testpb.TestServiceClient, args ...grpc.CallOption) { + stream, err := tc.FullDuplexCall(context.Background(), args...) + if err != nil { + grpclog.Fatalf("%v.FullDuplexCall(_) = _, %v", tc, err) + } + var index int + for index < len(reqSizes) { + respParam := []*testpb.ResponseParameters{ + { + Size: int32(respSizes[index]), + }, + } + pl := ClientNewPayload(testpb.PayloadType_COMPRESSABLE, reqSizes[index]) + req := &testpb.StreamingOutputCallRequest{ + ResponseType: testpb.PayloadType_COMPRESSABLE, + ResponseParameters: respParam, + Payload: pl, + } + if err := stream.Send(req); err != nil { + grpclog.Fatalf("%v has error %v while sending %v", stream, err, req) + } + reply, err := stream.Recv() + if err != nil { + grpclog.Fatalf("%v.Recv() = %v", stream, err) + } + t := reply.GetPayload().GetType() + if t != testpb.PayloadType_COMPRESSABLE { + grpclog.Fatalf("Got the reply of type %d, want %d", t, testpb.PayloadType_COMPRESSABLE) + } + size := len(reply.GetPayload().GetBody()) + if size != respSizes[index] { + grpclog.Fatalf("Got reply body of length %d, want %d", size, respSizes[index]) + } + index++ + } + if err := stream.CloseSend(); err != nil { + grpclog.Fatalf("%v.CloseSend() got %v, want %v", stream, err, nil) + } + if _, err := stream.Recv(); err != io.EOF { + grpclog.Fatalf("%v failed to complele the ping pong test: %v", stream, err) + } +} + +// DoEmptyStream sets up a bi-directional streaming with zero message. +func DoEmptyStream(tc testpb.TestServiceClient, args ...grpc.CallOption) { + stream, err := tc.FullDuplexCall(context.Background(), args...) + if err != nil { + grpclog.Fatalf("%v.FullDuplexCall(_) = _, %v", tc, err) + } + if err := stream.CloseSend(); err != nil { + grpclog.Fatalf("%v.CloseSend() got %v, want %v", stream, err, nil) + } + if _, err := stream.Recv(); err != io.EOF { + grpclog.Fatalf("%v failed to complete the empty stream test: %v", stream, err) + } +} + +// DoTimeoutOnSleepingServer performs an RPC on a sleep server which causes RPC timeout. +func DoTimeoutOnSleepingServer(tc testpb.TestServiceClient, args ...grpc.CallOption) { + ctx, cancel := context.WithTimeout(context.Background(), 1*time.Millisecond) + defer cancel() + stream, err := tc.FullDuplexCall(ctx, args...) + if err != nil { + if status.Code(err) == codes.DeadlineExceeded { + return + } + grpclog.Fatalf("%v.FullDuplexCall(_) = _, %v", tc, err) + } + pl := ClientNewPayload(testpb.PayloadType_COMPRESSABLE, 27182) + req := &testpb.StreamingOutputCallRequest{ + ResponseType: testpb.PayloadType_COMPRESSABLE, + Payload: pl, + } + if err := stream.Send(req); err != nil && err != io.EOF { + grpclog.Fatalf("%v.Send(_) = %v", stream, err) + } + if _, err := stream.Recv(); status.Code(err) != codes.DeadlineExceeded { + grpclog.Fatalf("%v.Recv() = _, %v, want error code %d", stream, err, codes.DeadlineExceeded) + } +} + +// DoComputeEngineCreds performs a unary RPC with compute engine auth. +func DoComputeEngineCreds(tc testpb.TestServiceClient, serviceAccount, oauthScope string) { + pl := ClientNewPayload(testpb.PayloadType_COMPRESSABLE, largeReqSize) + req := &testpb.SimpleRequest{ + ResponseType: testpb.PayloadType_COMPRESSABLE, + ResponseSize: int32(largeRespSize), + Payload: pl, + FillUsername: true, + FillOauthScope: true, + } + reply, err := tc.UnaryCall(context.Background(), req) + if err != nil { + grpclog.Fatal("/TestService/UnaryCall RPC failed: ", err) + } + user := reply.GetUsername() + scope := reply.GetOauthScope() + if user != serviceAccount { + grpclog.Fatalf("Got user name %q, want %q.", user, serviceAccount) + } + if !strings.Contains(oauthScope, scope) { + grpclog.Fatalf("Got OAuth scope %q which is NOT a substring of %q.", scope, oauthScope) + } +} + +func getServiceAccountJSONKey(keyFile string) []byte { + jsonKey, err := ioutil.ReadFile(keyFile) + if err != nil { + grpclog.Fatalf("Failed to read the service account key file: %v", err) + } + return jsonKey +} + +// DoServiceAccountCreds performs a unary RPC with service account auth. +func DoServiceAccountCreds(tc testpb.TestServiceClient, serviceAccountKeyFile, oauthScope string) { + pl := ClientNewPayload(testpb.PayloadType_COMPRESSABLE, largeReqSize) + req := &testpb.SimpleRequest{ + ResponseType: testpb.PayloadType_COMPRESSABLE, + ResponseSize: int32(largeRespSize), + Payload: pl, + FillUsername: true, + FillOauthScope: true, + } + reply, err := tc.UnaryCall(context.Background(), req) + if err != nil { + grpclog.Fatal("/TestService/UnaryCall RPC failed: ", err) + } + jsonKey := getServiceAccountJSONKey(serviceAccountKeyFile) + user := reply.GetUsername() + scope := reply.GetOauthScope() + if !strings.Contains(string(jsonKey), user) { + grpclog.Fatalf("Got user name %q which is NOT a substring of %q.", user, jsonKey) + } + if !strings.Contains(oauthScope, scope) { + grpclog.Fatalf("Got OAuth scope %q which is NOT a substring of %q.", scope, oauthScope) + } +} + +// DoJWTTokenCreds performs a unary RPC with JWT token auth. +func DoJWTTokenCreds(tc testpb.TestServiceClient, serviceAccountKeyFile string) { + pl := ClientNewPayload(testpb.PayloadType_COMPRESSABLE, largeReqSize) + req := &testpb.SimpleRequest{ + ResponseType: testpb.PayloadType_COMPRESSABLE, + ResponseSize: int32(largeRespSize), + Payload: pl, + FillUsername: true, + } + reply, err := tc.UnaryCall(context.Background(), req) + if err != nil { + grpclog.Fatal("/TestService/UnaryCall RPC failed: ", err) + } + jsonKey := getServiceAccountJSONKey(serviceAccountKeyFile) + user := reply.GetUsername() + if !strings.Contains(string(jsonKey), user) { + grpclog.Fatalf("Got user name %q which is NOT a substring of %q.", user, jsonKey) + } +} + +// GetToken obtains an OAUTH token from the input. +func GetToken(serviceAccountKeyFile string, oauthScope string) *oauth2.Token { + jsonKey := getServiceAccountJSONKey(serviceAccountKeyFile) + config, err := google.JWTConfigFromJSON(jsonKey, oauthScope) + if err != nil { + grpclog.Fatalf("Failed to get the config: %v", err) + } + token, err := config.TokenSource(context.Background()).Token() + if err != nil { + grpclog.Fatalf("Failed to get the token: %v", err) + } + return token +} + +// DoOauth2TokenCreds performs a unary RPC with OAUTH2 token auth. +func DoOauth2TokenCreds(tc testpb.TestServiceClient, serviceAccountKeyFile, oauthScope string) { + pl := ClientNewPayload(testpb.PayloadType_COMPRESSABLE, largeReqSize) + req := &testpb.SimpleRequest{ + ResponseType: testpb.PayloadType_COMPRESSABLE, + ResponseSize: int32(largeRespSize), + Payload: pl, + FillUsername: true, + FillOauthScope: true, + } + reply, err := tc.UnaryCall(context.Background(), req) + if err != nil { + grpclog.Fatal("/TestService/UnaryCall RPC failed: ", err) + } + jsonKey := getServiceAccountJSONKey(serviceAccountKeyFile) + user := reply.GetUsername() + scope := reply.GetOauthScope() + if !strings.Contains(string(jsonKey), user) { + grpclog.Fatalf("Got user name %q which is NOT a substring of %q.", user, jsonKey) + } + if !strings.Contains(oauthScope, scope) { + grpclog.Fatalf("Got OAuth scope %q which is NOT a substring of %q.", scope, oauthScope) + } +} + +// DoPerRPCCreds performs a unary RPC with per RPC OAUTH2 token. +func DoPerRPCCreds(tc testpb.TestServiceClient, serviceAccountKeyFile, oauthScope string) { + jsonKey := getServiceAccountJSONKey(serviceAccountKeyFile) + pl := ClientNewPayload(testpb.PayloadType_COMPRESSABLE, largeReqSize) + req := &testpb.SimpleRequest{ + ResponseType: testpb.PayloadType_COMPRESSABLE, + ResponseSize: int32(largeRespSize), + Payload: pl, + FillUsername: true, + FillOauthScope: true, + } + token := GetToken(serviceAccountKeyFile, oauthScope) + kv := map[string]string{"authorization": token.Type() + " " + token.AccessToken} + ctx := metadata.NewOutgoingContext(context.Background(), metadata.MD{"authorization": []string{kv["authorization"]}}) + reply, err := tc.UnaryCall(ctx, req) + if err != nil { + grpclog.Fatal("/TestService/UnaryCall RPC failed: ", err) + } + user := reply.GetUsername() + scope := reply.GetOauthScope() + if !strings.Contains(string(jsonKey), user) { + grpclog.Fatalf("Got user name %q which is NOT a substring of %q.", user, jsonKey) + } + if !strings.Contains(oauthScope, scope) { + grpclog.Fatalf("Got OAuth scope %q which is NOT a substring of %q.", scope, oauthScope) + } +} + +// DoGoogleDefaultCredentials performs an unary RPC with google default credentials +func DoGoogleDefaultCredentials(tc testpb.TestServiceClient, defaultServiceAccount string) { + pl := ClientNewPayload(testpb.PayloadType_COMPRESSABLE, largeReqSize) + req := &testpb.SimpleRequest{ + ResponseType: testpb.PayloadType_COMPRESSABLE, + ResponseSize: int32(largeRespSize), + Payload: pl, + FillUsername: true, + FillOauthScope: true, + } + reply, err := tc.UnaryCall(context.Background(), req) + if err != nil { + grpclog.Fatal("/TestService/UnaryCall RPC failed: ", err) + } + if reply.GetUsername() != defaultServiceAccount { + grpclog.Fatalf("Got user name %q; wanted %q. ", reply.GetUsername(), defaultServiceAccount) + } +} + +// DoComputeEngineChannelCredentials performs an unary RPC with compute engine channel credentials +func DoComputeEngineChannelCredentials(tc testpb.TestServiceClient, defaultServiceAccount string) { + pl := ClientNewPayload(testpb.PayloadType_COMPRESSABLE, largeReqSize) + req := &testpb.SimpleRequest{ + ResponseType: testpb.PayloadType_COMPRESSABLE, + ResponseSize: int32(largeRespSize), + Payload: pl, + FillUsername: true, + FillOauthScope: true, + } + reply, err := tc.UnaryCall(context.Background(), req) + if err != nil { + grpclog.Fatal("/TestService/UnaryCall RPC failed: ", err) + } + if reply.GetUsername() != defaultServiceAccount { + grpclog.Fatalf("Got user name %q; wanted %q. ", reply.GetUsername(), defaultServiceAccount) + } +} + +var testMetadata = metadata.MD{ + "key1": []string{"value1"}, + "key2": []string{"value2"}, +} + +// DoCancelAfterBegin cancels the RPC after metadata has been sent but before payloads are sent. +func DoCancelAfterBegin(tc testpb.TestServiceClient, args ...grpc.CallOption) { + ctx, cancel := context.WithCancel(metadata.NewOutgoingContext(context.Background(), testMetadata)) + stream, err := tc.StreamingInputCall(ctx, args...) + if err != nil { + grpclog.Fatalf("%v.StreamingInputCall(_) = _, %v", tc, err) + } + cancel() + _, err = stream.CloseAndRecv() + if status.Code(err) != codes.Canceled { + grpclog.Fatalf("%v.CloseAndRecv() got error code %d, want %d", stream, status.Code(err), codes.Canceled) + } +} + +// DoCancelAfterFirstResponse cancels the RPC after receiving the first message from the server. +func DoCancelAfterFirstResponse(tc testpb.TestServiceClient, args ...grpc.CallOption) { + ctx, cancel := context.WithCancel(context.Background()) + stream, err := tc.FullDuplexCall(ctx, args...) + if err != nil { + grpclog.Fatalf("%v.FullDuplexCall(_) = _, %v", tc, err) + } + respParam := []*testpb.ResponseParameters{ + { + Size: 31415, + }, + } + pl := ClientNewPayload(testpb.PayloadType_COMPRESSABLE, 27182) + req := &testpb.StreamingOutputCallRequest{ + ResponseType: testpb.PayloadType_COMPRESSABLE, + ResponseParameters: respParam, + Payload: pl, + } + if err := stream.Send(req); err != nil { + grpclog.Fatalf("%v has error %v while sending %v", stream, err, req) + } + if _, err := stream.Recv(); err != nil { + grpclog.Fatalf("%v.Recv() = %v", stream, err) + } + cancel() + if _, err := stream.Recv(); status.Code(err) != codes.Canceled { + grpclog.Fatalf("%v compleled with error code %d, want %d", stream, status.Code(err), codes.Canceled) + } +} + +var ( + initialMetadataValue = "test_initial_metadata_value" + trailingMetadataValue = "\x0a\x0b\x0a\x0b\x0a\x0b" + customMetadata = metadata.Pairs( + initialMetadataKey, initialMetadataValue, + trailingMetadataKey, trailingMetadataValue, + ) +) + +func validateMetadata(header, trailer metadata.MD) { + if len(header[initialMetadataKey]) != 1 { + grpclog.Fatalf("Expected exactly one header from server. Received %d", len(header[initialMetadataKey])) + } + if header[initialMetadataKey][0] != initialMetadataValue { + grpclog.Fatalf("Got header %s; want %s", header[initialMetadataKey][0], initialMetadataValue) + } + if len(trailer[trailingMetadataKey]) != 1 { + grpclog.Fatalf("Expected exactly one trailer from server. Received %d", len(trailer[trailingMetadataKey])) + } + if trailer[trailingMetadataKey][0] != trailingMetadataValue { + grpclog.Fatalf("Got trailer %s; want %s", trailer[trailingMetadataKey][0], trailingMetadataValue) + } +} + +// DoCustomMetadata checks that metadata is echoed back to the client. +func DoCustomMetadata(tc testpb.TestServiceClient, args ...grpc.CallOption) { + // Testing with UnaryCall. + pl := ClientNewPayload(testpb.PayloadType_COMPRESSABLE, 1) + req := &testpb.SimpleRequest{ + ResponseType: testpb.PayloadType_COMPRESSABLE, + ResponseSize: int32(1), + Payload: pl, + } + ctx := metadata.NewOutgoingContext(context.Background(), customMetadata) + var header, trailer metadata.MD + args = append(args, grpc.Header(&header), grpc.Trailer(&trailer)) + reply, err := tc.UnaryCall( + ctx, + req, + args..., + ) + if err != nil { + grpclog.Fatal("/TestService/UnaryCall RPC failed: ", err) + } + t := reply.GetPayload().GetType() + s := len(reply.GetPayload().GetBody()) + if t != testpb.PayloadType_COMPRESSABLE || s != 1 { + grpclog.Fatalf("Got the reply with type %d len %d; want %d, %d", t, s, testpb.PayloadType_COMPRESSABLE, 1) + } + validateMetadata(header, trailer) + + // Testing with FullDuplex. + stream, err := tc.FullDuplexCall(ctx, args...) + if err != nil { + grpclog.Fatalf("%v.FullDuplexCall(_) = _, %v, want ", tc, err) + } + respParam := []*testpb.ResponseParameters{ + { + Size: 1, + }, + } + streamReq := &testpb.StreamingOutputCallRequest{ + ResponseType: testpb.PayloadType_COMPRESSABLE, + ResponseParameters: respParam, + Payload: pl, + } + if err := stream.Send(streamReq); err != nil { + grpclog.Fatalf("%v has error %v while sending %v", stream, err, streamReq) + } + streamHeader, err := stream.Header() + if err != nil { + grpclog.Fatalf("%v.Header() = %v", stream, err) + } + if _, err := stream.Recv(); err != nil { + grpclog.Fatalf("%v.Recv() = %v", stream, err) + } + if err := stream.CloseSend(); err != nil { + grpclog.Fatalf("%v.CloseSend() = %v, want ", stream, err) + } + if _, err := stream.Recv(); err != io.EOF { + grpclog.Fatalf("%v failed to complete the custom metadata test: %v", stream, err) + } + streamTrailer := stream.Trailer() + validateMetadata(streamHeader, streamTrailer) +} + +// DoStatusCodeAndMessage checks that the status code is propagated back to the client. +func DoStatusCodeAndMessage(tc testpb.TestServiceClient, args ...grpc.CallOption) { + var code int32 = 2 + msg := "test status message" + expectedErr := status.Error(codes.Code(code), msg) + respStatus := &testpb.EchoStatus{ + Code: code, + Message: msg, + } + // Test UnaryCall. + req := &testpb.SimpleRequest{ + ResponseStatus: respStatus, + } + if _, err := tc.UnaryCall(context.Background(), req, args...); err == nil || err.Error() != expectedErr.Error() { + grpclog.Fatalf("%v.UnaryCall(_, %v) = _, %v, want _, %v", tc, req, err, expectedErr) + } + // Test FullDuplexCall. + stream, err := tc.FullDuplexCall(context.Background(), args...) + if err != nil { + grpclog.Fatalf("%v.FullDuplexCall(_) = _, %v, want ", tc, err) + } + streamReq := &testpb.StreamingOutputCallRequest{ + ResponseStatus: respStatus, + } + if err := stream.Send(streamReq); err != nil { + grpclog.Fatalf("%v has error %v while sending %v, want ", stream, err, streamReq) + } + if err := stream.CloseSend(); err != nil { + grpclog.Fatalf("%v.CloseSend() = %v, want ", stream, err) + } + if _, err = stream.Recv(); err.Error() != expectedErr.Error() { + grpclog.Fatalf("%v.Recv() returned error %v, want %v", stream, err, expectedErr) + } +} + +// DoSpecialStatusMessage verifies Unicode and whitespace is correctly processed +// in status message. +func DoSpecialStatusMessage(tc testpb.TestServiceClient, args ...grpc.CallOption) { + const ( + code int32 = 2 + msg string = "\t\ntest with whitespace\r\nand Unicode BMP ☺ and non-BMP 😈\t\n" + ) + expectedErr := status.Error(codes.Code(code), msg) + req := &testpb.SimpleRequest{ + ResponseStatus: &testpb.EchoStatus{ + Code: code, + Message: msg, + }, + } + ctx, cancel := context.WithTimeout(context.Background(), 10*time.Second) + defer cancel() + if _, err := tc.UnaryCall(ctx, req, args...); err == nil || err.Error() != expectedErr.Error() { + grpclog.Fatalf("%v.UnaryCall(_, %v) = _, %v, want _, %v", tc, req, err, expectedErr) + } +} + +// DoUnimplementedService attempts to call a method from an unimplemented service. +func DoUnimplementedService(tc testpb.UnimplementedServiceClient) { + _, err := tc.UnimplementedCall(context.Background(), &testpb.Empty{}) + if status.Code(err) != codes.Unimplemented { + grpclog.Fatalf("%v.UnimplementedCall() = _, %v, want _, %v", tc, status.Code(err), codes.Unimplemented) + } +} + +// DoUnimplementedMethod attempts to call an unimplemented method. +func DoUnimplementedMethod(cc *grpc.ClientConn) { + var req, reply proto.Message + if err := cc.Invoke(context.Background(), "/grpc.testing.TestService/UnimplementedCall", req, reply); err == nil || status.Code(err) != codes.Unimplemented { + grpclog.Fatalf("ClientConn.Invoke(_, _, _, _, _) = %v, want error code %s", err, codes.Unimplemented) + } +} + +type testServer struct { +} + +// NewTestServer creates a test server for test service. +func NewTestServer() testpb.TestServiceServer { + return &testServer{} +} + +func (s *testServer) EmptyCall(ctx context.Context, in *testpb.Empty) (*testpb.Empty, error) { + return new(testpb.Empty), nil +} + +func serverNewPayload(t testpb.PayloadType, size int32) (*testpb.Payload, error) { + if size < 0 { + return nil, fmt.Errorf("requested a response with invalid length %d", size) + } + body := make([]byte, size) + switch t { + case testpb.PayloadType_COMPRESSABLE: + case testpb.PayloadType_UNCOMPRESSABLE: + return nil, fmt.Errorf("payloadType UNCOMPRESSABLE is not supported") + default: + return nil, fmt.Errorf("unsupported payload type: %d", t) + } + return &testpb.Payload{ + Type: t, + Body: body, + }, nil +} + +func (s *testServer) UnaryCall(ctx context.Context, in *testpb.SimpleRequest) (*testpb.SimpleResponse, error) { + st := in.GetResponseStatus() + if md, ok := metadata.FromIncomingContext(ctx); ok { + if initialMetadata, ok := md[initialMetadataKey]; ok { + header := metadata.Pairs(initialMetadataKey, initialMetadata[0]) + grpc.SendHeader(ctx, header) + } + if trailingMetadata, ok := md[trailingMetadataKey]; ok { + trailer := metadata.Pairs(trailingMetadataKey, trailingMetadata[0]) + grpc.SetTrailer(ctx, trailer) + } + } + if st != nil && st.Code != 0 { + return nil, status.Error(codes.Code(st.Code), st.Message) + } + pl, err := serverNewPayload(in.GetResponseType(), in.GetResponseSize()) + if err != nil { + return nil, err + } + return &testpb.SimpleResponse{ + Payload: pl, + }, nil +} + +func (s *testServer) StreamingOutputCall(args *testpb.StreamingOutputCallRequest, stream testpb.TestService_StreamingOutputCallServer) error { + cs := args.GetResponseParameters() + for _, c := range cs { + if us := c.GetIntervalUs(); us > 0 { + time.Sleep(time.Duration(us) * time.Microsecond) + } + pl, err := serverNewPayload(args.GetResponseType(), c.GetSize()) + if err != nil { + return err + } + if err := stream.Send(&testpb.StreamingOutputCallResponse{ + Payload: pl, + }); err != nil { + return err + } + } + return nil +} + +func (s *testServer) StreamingInputCall(stream testpb.TestService_StreamingInputCallServer) error { + var sum int + for { + in, err := stream.Recv() + if err == io.EOF { + return stream.SendAndClose(&testpb.StreamingInputCallResponse{ + AggregatedPayloadSize: int32(sum), + }) + } + if err != nil { + return err + } + p := in.GetPayload().GetBody() + sum += len(p) + } +} + +func (s *testServer) FullDuplexCall(stream testpb.TestService_FullDuplexCallServer) error { + if md, ok := metadata.FromIncomingContext(stream.Context()); ok { + if initialMetadata, ok := md[initialMetadataKey]; ok { + header := metadata.Pairs(initialMetadataKey, initialMetadata[0]) + stream.SendHeader(header) + } + if trailingMetadata, ok := md[trailingMetadataKey]; ok { + trailer := metadata.Pairs(trailingMetadataKey, trailingMetadata[0]) + stream.SetTrailer(trailer) + } + } + for { + in, err := stream.Recv() + if err == io.EOF { + // read done. + return nil + } + if err != nil { + return err + } + st := in.GetResponseStatus() + if st != nil && st.Code != 0 { + return status.Error(codes.Code(st.Code), st.Message) + } + cs := in.GetResponseParameters() + for _, c := range cs { + if us := c.GetIntervalUs(); us > 0 { + time.Sleep(time.Duration(us) * time.Microsecond) + } + pl, err := serverNewPayload(in.GetResponseType(), c.GetSize()) + if err != nil { + return err + } + if err := stream.Send(&testpb.StreamingOutputCallResponse{ + Payload: pl, + }); err != nil { + return err + } + } + } +} + +func (s *testServer) HalfDuplexCall(stream testpb.TestService_HalfDuplexCallServer) error { + var msgBuf []*testpb.StreamingOutputCallRequest + for { + in, err := stream.Recv() + if err == io.EOF { + // read done. + break + } + if err != nil { + return err + } + msgBuf = append(msgBuf, in) + } + for _, m := range msgBuf { + cs := m.GetResponseParameters() + for _, c := range cs { + if us := c.GetIntervalUs(); us > 0 { + time.Sleep(time.Duration(us) * time.Microsecond) + } + pl, err := serverNewPayload(m.GetResponseType(), c.GetSize()) + if err != nil { + return err + } + if err := stream.Send(&testpb.StreamingOutputCallResponse{ + Payload: pl, + }); err != nil { + return err + } + } + } + return nil +} diff --git a/vendor/google.golang.org/grpc/keepalive/keepalive.go b/vendor/google.golang.org/grpc/keepalive/keepalive.go new file mode 100644 index 0000000..34d31b5 --- /dev/null +++ b/vendor/google.golang.org/grpc/keepalive/keepalive.go @@ -0,0 +1,85 @@ +/* + * + * Copyright 2017 gRPC authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ + +// Package keepalive defines configurable parameters for point-to-point +// healthcheck. +package keepalive + +import ( + "time" +) + +// ClientParameters is used to set keepalive parameters on the client-side. +// These configure how the client will actively probe to notice when a +// connection is broken and send pings so intermediaries will be aware of the +// liveness of the connection. Make sure these parameters are set in +// coordination with the keepalive policy on the server, as incompatible +// settings can result in closing of connection. +type ClientParameters struct { + // After a duration of this time if the client doesn't see any activity it + // pings the server to see if the transport is still alive. + // If set below 10s, a minimum value of 10s will be used instead. + Time time.Duration // The current default value is infinity. + // After having pinged for keepalive check, the client waits for a duration + // of Timeout and if no activity is seen even after that the connection is + // closed. + Timeout time.Duration // The current default value is 20 seconds. + // If true, client sends keepalive pings even with no active RPCs. If false, + // when there are no active RPCs, Time and Timeout will be ignored and no + // keepalive pings will be sent. + PermitWithoutStream bool // false by default. +} + +// ServerParameters is used to set keepalive and max-age parameters on the +// server-side. +type ServerParameters struct { + // MaxConnectionIdle is a duration for the amount of time after which an + // idle connection would be closed by sending a GoAway. Idleness duration is + // defined since the most recent time the number of outstanding RPCs became + // zero or the connection establishment. + MaxConnectionIdle time.Duration // The current default value is infinity. + // MaxConnectionAge is a duration for the maximum amount of time a + // connection may exist before it will be closed by sending a GoAway. A + // random jitter of +/-10% will be added to MaxConnectionAge to spread out + // connection storms. + MaxConnectionAge time.Duration // The current default value is infinity. + // MaxConnectionAgeGrace is an additive period after MaxConnectionAge after + // which the connection will be forcibly closed. + MaxConnectionAgeGrace time.Duration // The current default value is infinity. + // After a duration of this time if the server doesn't see any activity it + // pings the client to see if the transport is still alive. + // If set below 1s, a minimum value of 1s will be used instead. + Time time.Duration // The current default value is 2 hours. + // After having pinged for keepalive check, the server waits for a duration + // of Timeout and if no activity is seen even after that the connection is + // closed. + Timeout time.Duration // The current default value is 20 seconds. +} + +// EnforcementPolicy is used to set keepalive enforcement policy on the +// server-side. Server will close connection with a client that violates this +// policy. +type EnforcementPolicy struct { + // MinTime is the minimum amount of time a client should wait before sending + // a keepalive ping. + MinTime time.Duration // The current default value is 5 minutes. + // If true, server allows keepalive pings even when there are no active + // streams(RPCs). If false, and client sends ping when there are no active + // streams, server will send GOAWAY and close the connection. + PermitWithoutStream bool // false by default. +} diff --git a/vendor/google.golang.org/grpc/metadata/metadata.go b/vendor/google.golang.org/grpc/metadata/metadata.go new file mode 100644 index 0000000..cf6d1b9 --- /dev/null +++ b/vendor/google.golang.org/grpc/metadata/metadata.go @@ -0,0 +1,209 @@ +/* + * + * Copyright 2014 gRPC authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ + +// Package metadata define the structure of the metadata supported by gRPC library. +// Please refer to https://github.com/grpc/grpc/blob/master/doc/PROTOCOL-HTTP2.md +// for more information about custom-metadata. +package metadata // import "google.golang.org/grpc/metadata" + +import ( + "context" + "fmt" + "strings" +) + +// DecodeKeyValue returns k, v, nil. +// +// Deprecated: use k and v directly instead. +func DecodeKeyValue(k, v string) (string, string, error) { + return k, v, nil +} + +// MD is a mapping from metadata keys to values. Users should use the following +// two convenience functions New and Pairs to generate MD. +type MD map[string][]string + +// New creates an MD from a given key-value map. +// +// Only the following ASCII characters are allowed in keys: +// - digits: 0-9 +// - uppercase letters: A-Z (normalized to lower) +// - lowercase letters: a-z +// - special characters: -_. +// Uppercase letters are automatically converted to lowercase. +// +// Keys beginning with "grpc-" are reserved for grpc-internal use only and may +// result in errors if set in metadata. +func New(m map[string]string) MD { + md := MD{} + for k, val := range m { + key := strings.ToLower(k) + md[key] = append(md[key], val) + } + return md +} + +// Pairs returns an MD formed by the mapping of key, value ... +// Pairs panics if len(kv) is odd. +// +// Only the following ASCII characters are allowed in keys: +// - digits: 0-9 +// - uppercase letters: A-Z (normalized to lower) +// - lowercase letters: a-z +// - special characters: -_. +// Uppercase letters are automatically converted to lowercase. +// +// Keys beginning with "grpc-" are reserved for grpc-internal use only and may +// result in errors if set in metadata. +func Pairs(kv ...string) MD { + if len(kv)%2 == 1 { + panic(fmt.Sprintf("metadata: Pairs got the odd number of input pairs for metadata: %d", len(kv))) + } + md := MD{} + var key string + for i, s := range kv { + if i%2 == 0 { + key = strings.ToLower(s) + continue + } + md[key] = append(md[key], s) + } + return md +} + +// Len returns the number of items in md. +func (md MD) Len() int { + return len(md) +} + +// Copy returns a copy of md. +func (md MD) Copy() MD { + return Join(md) +} + +// Get obtains the values for a given key. +func (md MD) Get(k string) []string { + k = strings.ToLower(k) + return md[k] +} + +// Set sets the value of a given key with a slice of values. +func (md MD) Set(k string, vals ...string) { + if len(vals) == 0 { + return + } + k = strings.ToLower(k) + md[k] = vals +} + +// Append adds the values to key k, not overwriting what was already stored at that key. +func (md MD) Append(k string, vals ...string) { + if len(vals) == 0 { + return + } + k = strings.ToLower(k) + md[k] = append(md[k], vals...) +} + +// Join joins any number of mds into a single MD. +// The order of values for each key is determined by the order in which +// the mds containing those values are presented to Join. +func Join(mds ...MD) MD { + out := MD{} + for _, md := range mds { + for k, v := range md { + out[k] = append(out[k], v...) + } + } + return out +} + +type mdIncomingKey struct{} +type mdOutgoingKey struct{} + +// NewIncomingContext creates a new context with incoming md attached. +func NewIncomingContext(ctx context.Context, md MD) context.Context { + return context.WithValue(ctx, mdIncomingKey{}, md) +} + +// NewOutgoingContext creates a new context with outgoing md attached. If used +// in conjunction with AppendToOutgoingContext, NewOutgoingContext will +// overwrite any previously-appended metadata. +func NewOutgoingContext(ctx context.Context, md MD) context.Context { + return context.WithValue(ctx, mdOutgoingKey{}, rawMD{md: md}) +} + +// AppendToOutgoingContext returns a new context with the provided kv merged +// with any existing metadata in the context. Please refer to the +// documentation of Pairs for a description of kv. +func AppendToOutgoingContext(ctx context.Context, kv ...string) context.Context { + if len(kv)%2 == 1 { + panic(fmt.Sprintf("metadata: AppendToOutgoingContext got an odd number of input pairs for metadata: %d", len(kv))) + } + md, _ := ctx.Value(mdOutgoingKey{}).(rawMD) + added := make([][]string, len(md.added)+1) + copy(added, md.added) + added[len(added)-1] = make([]string, len(kv)) + copy(added[len(added)-1], kv) + return context.WithValue(ctx, mdOutgoingKey{}, rawMD{md: md.md, added: added}) +} + +// FromIncomingContext returns the incoming metadata in ctx if it exists. The +// returned MD should not be modified. Writing to it may cause races. +// Modification should be made to copies of the returned MD. +func FromIncomingContext(ctx context.Context) (md MD, ok bool) { + md, ok = ctx.Value(mdIncomingKey{}).(MD) + return +} + +// FromOutgoingContextRaw returns the un-merged, intermediary contents +// of rawMD. Remember to perform strings.ToLower on the keys. The returned +// MD should not be modified. Writing to it may cause races. Modification +// should be made to copies of the returned MD. +// +// This is intended for gRPC-internal use ONLY. +func FromOutgoingContextRaw(ctx context.Context) (MD, [][]string, bool) { + raw, ok := ctx.Value(mdOutgoingKey{}).(rawMD) + if !ok { + return nil, nil, false + } + + return raw.md, raw.added, true +} + +// FromOutgoingContext returns the outgoing metadata in ctx if it exists. The +// returned MD should not be modified. Writing to it may cause races. +// Modification should be made to copies of the returned MD. +func FromOutgoingContext(ctx context.Context) (MD, bool) { + raw, ok := ctx.Value(mdOutgoingKey{}).(rawMD) + if !ok { + return nil, false + } + + mds := make([]MD, 0, len(raw.added)+1) + mds = append(mds, raw.md) + for _, vv := range raw.added { + mds = append(mds, Pairs(vv...)) + } + return Join(mds...), ok +} + +type rawMD struct { + md MD + added [][]string +} diff --git a/vendor/google.golang.org/grpc/naming/dns_resolver.go b/vendor/google.golang.org/grpc/naming/dns_resolver.go new file mode 100644 index 0000000..c9f79dc --- /dev/null +++ b/vendor/google.golang.org/grpc/naming/dns_resolver.go @@ -0,0 +1,293 @@ +/* + * + * Copyright 2017 gRPC authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ + +package naming + +import ( + "context" + "errors" + "fmt" + "net" + "strconv" + "time" + + "google.golang.org/grpc/grpclog" +) + +const ( + defaultPort = "443" + defaultFreq = time.Minute * 30 +) + +var ( + errMissingAddr = errors.New("missing address") + errWatcherClose = errors.New("watcher has been closed") + + lookupHost = net.DefaultResolver.LookupHost + lookupSRV = net.DefaultResolver.LookupSRV +) + +// NewDNSResolverWithFreq creates a DNS Resolver that can resolve DNS names, and +// create watchers that poll the DNS server using the frequency set by freq. +func NewDNSResolverWithFreq(freq time.Duration) (Resolver, error) { + return &dnsResolver{freq: freq}, nil +} + +// NewDNSResolver creates a DNS Resolver that can resolve DNS names, and create +// watchers that poll the DNS server using the default frequency defined by defaultFreq. +func NewDNSResolver() (Resolver, error) { + return NewDNSResolverWithFreq(defaultFreq) +} + +// dnsResolver handles name resolution for names following the DNS scheme +type dnsResolver struct { + // frequency of polling the DNS server that the watchers created by this resolver will use. + freq time.Duration +} + +// formatIP returns ok = false if addr is not a valid textual representation of an IP address. +// If addr is an IPv4 address, return the addr and ok = true. +// If addr is an IPv6 address, return the addr enclosed in square brackets and ok = true. +func formatIP(addr string) (addrIP string, ok bool) { + ip := net.ParseIP(addr) + if ip == nil { + return "", false + } + if ip.To4() != nil { + return addr, true + } + return "[" + addr + "]", true +} + +// parseTarget takes the user input target string, returns formatted host and port info. +// If target doesn't specify a port, set the port to be the defaultPort. +// If target is in IPv6 format and host-name is enclosed in square brackets, brackets +// are stripped when setting the host. +// examples: +// target: "www.google.com" returns host: "www.google.com", port: "443" +// target: "ipv4-host:80" returns host: "ipv4-host", port: "80" +// target: "[ipv6-host]" returns host: "ipv6-host", port: "443" +// target: ":80" returns host: "localhost", port: "80" +// target: ":" returns host: "localhost", port: "443" +func parseTarget(target string) (host, port string, err error) { + if target == "" { + return "", "", errMissingAddr + } + + if ip := net.ParseIP(target); ip != nil { + // target is an IPv4 or IPv6(without brackets) address + return target, defaultPort, nil + } + if host, port, err := net.SplitHostPort(target); err == nil { + // target has port, i.e ipv4-host:port, [ipv6-host]:port, host-name:port + if host == "" { + // Keep consistent with net.Dial(): If the host is empty, as in ":80", the local system is assumed. + host = "localhost" + } + if port == "" { + // If the port field is empty(target ends with colon), e.g. "[::1]:", defaultPort is used. + port = defaultPort + } + return host, port, nil + } + if host, port, err := net.SplitHostPort(target + ":" + defaultPort); err == nil { + // target doesn't have port + return host, port, nil + } + return "", "", fmt.Errorf("invalid target address %v", target) +} + +// Resolve creates a watcher that watches the name resolution of the target. +func (r *dnsResolver) Resolve(target string) (Watcher, error) { + host, port, err := parseTarget(target) + if err != nil { + return nil, err + } + + if net.ParseIP(host) != nil { + ipWatcher := &ipWatcher{ + updateChan: make(chan *Update, 1), + } + host, _ = formatIP(host) + ipWatcher.updateChan <- &Update{Op: Add, Addr: host + ":" + port} + return ipWatcher, nil + } + + ctx, cancel := context.WithCancel(context.Background()) + return &dnsWatcher{ + r: r, + host: host, + port: port, + ctx: ctx, + cancel: cancel, + t: time.NewTimer(0), + }, nil +} + +// dnsWatcher watches for the name resolution update for a specific target +type dnsWatcher struct { + r *dnsResolver + host string + port string + // The latest resolved address set + curAddrs map[string]*Update + ctx context.Context + cancel context.CancelFunc + t *time.Timer +} + +// ipWatcher watches for the name resolution update for an IP address. +type ipWatcher struct { + updateChan chan *Update +} + +// Next returns the address resolution Update for the target. For IP address, +// the resolution is itself, thus polling name server is unnecessary. Therefore, +// Next() will return an Update the first time it is called, and will be blocked +// for all following calls as no Update exists until watcher is closed. +func (i *ipWatcher) Next() ([]*Update, error) { + u, ok := <-i.updateChan + if !ok { + return nil, errWatcherClose + } + return []*Update{u}, nil +} + +// Close closes the ipWatcher. +func (i *ipWatcher) Close() { + close(i.updateChan) +} + +// AddressType indicates the address type returned by name resolution. +type AddressType uint8 + +const ( + // Backend indicates the server is a backend server. + Backend AddressType = iota + // GRPCLB indicates the server is a grpclb load balancer. + GRPCLB +) + +// AddrMetadataGRPCLB contains the information the name resolver for grpclb should provide. The +// name resolver used by the grpclb balancer is required to provide this type of metadata in +// its address updates. +type AddrMetadataGRPCLB struct { + // AddrType is the type of server (grpc load balancer or backend). + AddrType AddressType + // ServerName is the name of the grpc load balancer. Used for authentication. + ServerName string +} + +// compileUpdate compares the old resolved addresses and newly resolved addresses, +// and generates an update list +func (w *dnsWatcher) compileUpdate(newAddrs map[string]*Update) []*Update { + var res []*Update + for a, u := range w.curAddrs { + if _, ok := newAddrs[a]; !ok { + u.Op = Delete + res = append(res, u) + } + } + for a, u := range newAddrs { + if _, ok := w.curAddrs[a]; !ok { + res = append(res, u) + } + } + return res +} + +func (w *dnsWatcher) lookupSRV() map[string]*Update { + newAddrs := make(map[string]*Update) + _, srvs, err := lookupSRV(w.ctx, "grpclb", "tcp", w.host) + if err != nil { + grpclog.Infof("grpc: failed dns SRV record lookup due to %v.\n", err) + return nil + } + for _, s := range srvs { + lbAddrs, err := lookupHost(w.ctx, s.Target) + if err != nil { + grpclog.Warningf("grpc: failed load balancer address dns lookup due to %v.\n", err) + continue + } + for _, a := range lbAddrs { + a, ok := formatIP(a) + if !ok { + grpclog.Errorf("grpc: failed IP parsing due to %v.\n", err) + continue + } + addr := a + ":" + strconv.Itoa(int(s.Port)) + newAddrs[addr] = &Update{Addr: addr, + Metadata: AddrMetadataGRPCLB{AddrType: GRPCLB, ServerName: s.Target}} + } + } + return newAddrs +} + +func (w *dnsWatcher) lookupHost() map[string]*Update { + newAddrs := make(map[string]*Update) + addrs, err := lookupHost(w.ctx, w.host) + if err != nil { + grpclog.Warningf("grpc: failed dns A record lookup due to %v.\n", err) + return nil + } + for _, a := range addrs { + a, ok := formatIP(a) + if !ok { + grpclog.Errorf("grpc: failed IP parsing due to %v.\n", err) + continue + } + addr := a + ":" + w.port + newAddrs[addr] = &Update{Addr: addr} + } + return newAddrs +} + +func (w *dnsWatcher) lookup() []*Update { + newAddrs := w.lookupSRV() + if newAddrs == nil { + // If failed to get any balancer address (either no corresponding SRV for the + // target, or caused by failure during resolution/parsing of the balancer target), + // return any A record info available. + newAddrs = w.lookupHost() + } + result := w.compileUpdate(newAddrs) + w.curAddrs = newAddrs + return result +} + +// Next returns the resolved address update(delta) for the target. If there's no +// change, it will sleep for 30 mins and try to resolve again after that. +func (w *dnsWatcher) Next() ([]*Update, error) { + for { + select { + case <-w.ctx.Done(): + return nil, errWatcherClose + case <-w.t.C: + } + result := w.lookup() + // Next lookup should happen after an interval defined by w.r.freq. + w.t.Reset(w.r.freq) + if len(result) > 0 { + return result, nil + } + } +} + +func (w *dnsWatcher) Close() { + w.cancel() +} diff --git a/vendor/google.golang.org/grpc/naming/naming.go b/vendor/google.golang.org/grpc/naming/naming.go new file mode 100644 index 0000000..c99fdbe --- /dev/null +++ b/vendor/google.golang.org/grpc/naming/naming.go @@ -0,0 +1,69 @@ +/* + * + * Copyright 2014 gRPC authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ + +// Package naming defines the naming API and related data structures for gRPC. +// The interface is EXPERIMENTAL and may be subject to change. +// +// Deprecated: please use package resolver. +package naming + +// Operation defines the corresponding operations for a name resolution change. +// +// Deprecated: please use package resolver. +type Operation uint8 + +const ( + // Add indicates a new address is added. + Add Operation = iota + // Delete indicates an existing address is deleted. + Delete +) + +// Update defines a name resolution update. Notice that it is not valid having both +// empty string Addr and nil Metadata in an Update. +// +// Deprecated: please use package resolver. +type Update struct { + // Op indicates the operation of the update. + Op Operation + // Addr is the updated address. It is empty string if there is no address update. + Addr string + // Metadata is the updated metadata. It is nil if there is no metadata update. + // Metadata is not required for a custom naming implementation. + Metadata interface{} +} + +// Resolver creates a Watcher for a target to track its resolution changes. +// +// Deprecated: please use package resolver. +type Resolver interface { + // Resolve creates a Watcher for target. + Resolve(target string) (Watcher, error) +} + +// Watcher watches for the updates on the specified target. +// +// Deprecated: please use package resolver. +type Watcher interface { + // Next blocks until an update or error happens. It may return one or more + // updates. The first call should get the full set of the results. It should + // return an error if and only if Watcher cannot recover. + Next() ([]*Update, error) + // Close closes the Watcher. + Close() +} diff --git a/vendor/google.golang.org/grpc/peer/peer.go b/vendor/google.golang.org/grpc/peer/peer.go new file mode 100644 index 0000000..e01d219 --- /dev/null +++ b/vendor/google.golang.org/grpc/peer/peer.go @@ -0,0 +1,51 @@ +/* + * + * Copyright 2014 gRPC authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ + +// Package peer defines various peer information associated with RPCs and +// corresponding utils. +package peer + +import ( + "context" + "net" + + "google.golang.org/grpc/credentials" +) + +// Peer contains the information of the peer for an RPC, such as the address +// and authentication information. +type Peer struct { + // Addr is the peer address. + Addr net.Addr + // AuthInfo is the authentication information of the transport. + // It is nil if there is no transport security being used. + AuthInfo credentials.AuthInfo +} + +type peerKey struct{} + +// NewContext creates a new context with peer information attached. +func NewContext(ctx context.Context, p *Peer) context.Context { + return context.WithValue(ctx, peerKey{}, p) +} + +// FromContext returns the peer information in ctx if it exists. +func FromContext(ctx context.Context) (p *Peer, ok bool) { + p, ok = ctx.Value(peerKey{}).(*Peer) + return +} diff --git a/vendor/google.golang.org/grpc/picker_wrapper.go b/vendor/google.golang.org/grpc/picker_wrapper.go new file mode 100644 index 0000000..f962549 --- /dev/null +++ b/vendor/google.golang.org/grpc/picker_wrapper.go @@ -0,0 +1,189 @@ +/* + * + * Copyright 2017 gRPC authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ + +package grpc + +import ( + "context" + "io" + "sync" + + "google.golang.org/grpc/balancer" + "google.golang.org/grpc/codes" + "google.golang.org/grpc/grpclog" + "google.golang.org/grpc/internal/channelz" + "google.golang.org/grpc/internal/transport" + "google.golang.org/grpc/status" +) + +// pickerWrapper is a wrapper of balancer.Picker. It blocks on certain pick +// actions and unblock when there's a picker update. +type pickerWrapper struct { + mu sync.Mutex + done bool + blockingCh chan struct{} + picker balancer.Picker + + // The latest connection happened. + connErrMu sync.Mutex + connErr error +} + +func newPickerWrapper() *pickerWrapper { + bp := &pickerWrapper{blockingCh: make(chan struct{})} + return bp +} + +func (bp *pickerWrapper) updateConnectionError(err error) { + bp.connErrMu.Lock() + bp.connErr = err + bp.connErrMu.Unlock() +} + +func (bp *pickerWrapper) connectionError() error { + bp.connErrMu.Lock() + err := bp.connErr + bp.connErrMu.Unlock() + return err +} + +// updatePicker is called by UpdateBalancerState. It unblocks all blocked pick. +func (bp *pickerWrapper) updatePicker(p balancer.Picker) { + bp.mu.Lock() + if bp.done { + bp.mu.Unlock() + return + } + bp.picker = p + // bp.blockingCh should never be nil. + close(bp.blockingCh) + bp.blockingCh = make(chan struct{}) + bp.mu.Unlock() +} + +func doneChannelzWrapper(acw *acBalancerWrapper, done func(balancer.DoneInfo)) func(balancer.DoneInfo) { + acw.mu.Lock() + ac := acw.ac + acw.mu.Unlock() + ac.incrCallsStarted() + return func(b balancer.DoneInfo) { + if b.Err != nil && b.Err != io.EOF { + ac.incrCallsFailed() + } else { + ac.incrCallsSucceeded() + } + if done != nil { + done(b) + } + } +} + +// pick returns the transport that will be used for the RPC. +// It may block in the following cases: +// - there's no picker +// - the current picker returns ErrNoSubConnAvailable +// - the current picker returns other errors and failfast is false. +// - the subConn returned by the current picker is not READY +// When one of these situations happens, pick blocks until the picker gets updated. +func (bp *pickerWrapper) pick(ctx context.Context, failfast bool, opts balancer.PickOptions) (transport.ClientTransport, func(balancer.DoneInfo), error) { + var ch chan struct{} + + for { + bp.mu.Lock() + if bp.done { + bp.mu.Unlock() + return nil, nil, ErrClientConnClosing + } + + if bp.picker == nil { + ch = bp.blockingCh + } + if ch == bp.blockingCh { + // This could happen when either: + // - bp.picker is nil (the previous if condition), or + // - has called pick on the current picker. + bp.mu.Unlock() + select { + case <-ctx.Done(): + return nil, nil, ctx.Err() + case <-ch: + } + continue + } + + ch = bp.blockingCh + p := bp.picker + bp.mu.Unlock() + + subConn, done, err := p.Pick(ctx, opts) + + if err != nil { + switch err { + case balancer.ErrNoSubConnAvailable: + continue + case balancer.ErrTransientFailure: + if !failfast { + continue + } + return nil, nil, status.Errorf(codes.Unavailable, "%v, latest connection error: %v", err, bp.connectionError()) + case context.DeadlineExceeded: + return nil, nil, status.Error(codes.DeadlineExceeded, err.Error()) + case context.Canceled: + return nil, nil, status.Error(codes.Canceled, err.Error()) + default: + if _, ok := status.FromError(err); ok { + return nil, nil, err + } + // err is some other error. + return nil, nil, status.Error(codes.Unknown, err.Error()) + } + } + + acw, ok := subConn.(*acBalancerWrapper) + if !ok { + grpclog.Error("subconn returned from pick is not *acBalancerWrapper") + continue + } + if t, ok := acw.getAddrConn().getReadyTransport(); ok { + if channelz.IsOn() { + return t, doneChannelzWrapper(acw, done), nil + } + return t, done, nil + } + if done != nil { + // Calling done with nil error, no bytes sent and no bytes received. + // DoneInfo with default value works. + done(balancer.DoneInfo{}) + } + grpclog.Infof("blockingPicker: the picked transport is not ready, loop back to repick") + // If ok == false, ac.state is not READY. + // A valid picker always returns READY subConn. This means the state of ac + // just changed, and picker will be updated shortly. + // continue back to the beginning of the for loop to repick. + } +} + +func (bp *pickerWrapper) close() { + bp.mu.Lock() + defer bp.mu.Unlock() + if bp.done { + return + } + bp.done = true + close(bp.blockingCh) +} diff --git a/vendor/google.golang.org/grpc/pickfirst.go b/vendor/google.golang.org/grpc/pickfirst.go new file mode 100644 index 0000000..d1e38aa --- /dev/null +++ b/vendor/google.golang.org/grpc/pickfirst.go @@ -0,0 +1,110 @@ +/* + * + * Copyright 2017 gRPC authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ + +package grpc + +import ( + "context" + + "google.golang.org/grpc/balancer" + "google.golang.org/grpc/connectivity" + "google.golang.org/grpc/grpclog" + "google.golang.org/grpc/resolver" +) + +// PickFirstBalancerName is the name of the pick_first balancer. +const PickFirstBalancerName = "pick_first" + +func newPickfirstBuilder() balancer.Builder { + return &pickfirstBuilder{} +} + +type pickfirstBuilder struct{} + +func (*pickfirstBuilder) Build(cc balancer.ClientConn, opt balancer.BuildOptions) balancer.Balancer { + return &pickfirstBalancer{cc: cc} +} + +func (*pickfirstBuilder) Name() string { + return PickFirstBalancerName +} + +type pickfirstBalancer struct { + cc balancer.ClientConn + sc balancer.SubConn +} + +func (b *pickfirstBalancer) HandleResolvedAddrs(addrs []resolver.Address, err error) { + if err != nil { + grpclog.Infof("pickfirstBalancer: HandleResolvedAddrs called with error %v", err) + return + } + if b.sc == nil { + b.sc, err = b.cc.NewSubConn(addrs, balancer.NewSubConnOptions{}) + if err != nil { + //TODO(yuxuanli): why not change the cc state to Idle? + grpclog.Errorf("pickfirstBalancer: failed to NewSubConn: %v", err) + return + } + b.cc.UpdateBalancerState(connectivity.Idle, &picker{sc: b.sc}) + b.sc.Connect() + } else { + b.sc.UpdateAddresses(addrs) + b.sc.Connect() + } +} + +func (b *pickfirstBalancer) HandleSubConnStateChange(sc balancer.SubConn, s connectivity.State) { + grpclog.Infof("pickfirstBalancer: HandleSubConnStateChange: %p, %v", sc, s) + if b.sc != sc { + grpclog.Infof("pickfirstBalancer: ignored state change because sc is not recognized") + return + } + if s == connectivity.Shutdown { + b.sc = nil + return + } + + switch s { + case connectivity.Ready, connectivity.Idle: + b.cc.UpdateBalancerState(s, &picker{sc: sc}) + case connectivity.Connecting: + b.cc.UpdateBalancerState(s, &picker{err: balancer.ErrNoSubConnAvailable}) + case connectivity.TransientFailure: + b.cc.UpdateBalancerState(s, &picker{err: balancer.ErrTransientFailure}) + } +} + +func (b *pickfirstBalancer) Close() { +} + +type picker struct { + err error + sc balancer.SubConn +} + +func (p *picker) Pick(ctx context.Context, opts balancer.PickOptions) (balancer.SubConn, func(balancer.DoneInfo), error) { + if p.err != nil { + return nil, nil, p.err + } + return p.sc, nil, nil +} + +func init() { + balancer.Register(newPickfirstBuilder()) +} diff --git a/vendor/google.golang.org/grpc/proxy.go b/vendor/google.golang.org/grpc/proxy.go new file mode 100644 index 0000000..f8f69bf --- /dev/null +++ b/vendor/google.golang.org/grpc/proxy.go @@ -0,0 +1,152 @@ +/* + * + * Copyright 2017 gRPC authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ + +package grpc + +import ( + "bufio" + "context" + "encoding/base64" + "errors" + "fmt" + "io" + "net" + "net/http" + "net/http/httputil" + "net/url" +) + +const proxyAuthHeaderKey = "Proxy-Authorization" + +var ( + // errDisabled indicates that proxy is disabled for the address. + errDisabled = errors.New("proxy is disabled for the address") + // The following variable will be overwritten in the tests. + httpProxyFromEnvironment = http.ProxyFromEnvironment +) + +func mapAddress(ctx context.Context, address string) (*url.URL, error) { + req := &http.Request{ + URL: &url.URL{ + Scheme: "https", + Host: address, + }, + } + url, err := httpProxyFromEnvironment(req) + if err != nil { + return nil, err + } + if url == nil { + return nil, errDisabled + } + return url, nil +} + +// To read a response from a net.Conn, http.ReadResponse() takes a bufio.Reader. +// It's possible that this reader reads more than what's need for the response and stores +// those bytes in the buffer. +// bufConn wraps the original net.Conn and the bufio.Reader to make sure we don't lose the +// bytes in the buffer. +type bufConn struct { + net.Conn + r io.Reader +} + +func (c *bufConn) Read(b []byte) (int, error) { + return c.r.Read(b) +} + +func basicAuth(username, password string) string { + auth := username + ":" + password + return base64.StdEncoding.EncodeToString([]byte(auth)) +} + +func doHTTPConnectHandshake(ctx context.Context, conn net.Conn, backendAddr string, proxyURL *url.URL) (_ net.Conn, err error) { + defer func() { + if err != nil { + conn.Close() + } + }() + + req := &http.Request{ + Method: http.MethodConnect, + URL: &url.URL{Host: backendAddr}, + Header: map[string][]string{"User-Agent": {grpcUA}}, + } + if t := proxyURL.User; t != nil { + u := t.Username() + p, _ := t.Password() + req.Header.Add(proxyAuthHeaderKey, "Basic "+basicAuth(u, p)) + } + + if err := sendHTTPRequest(ctx, req, conn); err != nil { + return nil, fmt.Errorf("failed to write the HTTP request: %v", err) + } + + r := bufio.NewReader(conn) + resp, err := http.ReadResponse(r, req) + if err != nil { + return nil, fmt.Errorf("reading server HTTP response: %v", err) + } + defer resp.Body.Close() + if resp.StatusCode != http.StatusOK { + dump, err := httputil.DumpResponse(resp, true) + if err != nil { + return nil, fmt.Errorf("failed to do connect handshake, status code: %s", resp.Status) + } + return nil, fmt.Errorf("failed to do connect handshake, response: %q", dump) + } + + return &bufConn{Conn: conn, r: r}, nil +} + +// newProxyDialer returns a dialer that connects to proxy first if necessary. +// The returned dialer checks if a proxy is necessary, dial to the proxy with the +// provided dialer, does HTTP CONNECT handshake and returns the connection. +func newProxyDialer(dialer func(context.Context, string) (net.Conn, error)) func(context.Context, string) (net.Conn, error) { + return func(ctx context.Context, addr string) (conn net.Conn, err error) { + var newAddr string + proxyURL, err := mapAddress(ctx, addr) + if err != nil { + if err != errDisabled { + return nil, err + } + newAddr = addr + } else { + newAddr = proxyURL.Host + } + + conn, err = dialer(ctx, newAddr) + if err != nil { + return + } + if proxyURL != nil { + // proxy is disabled if proxyURL is nil. + conn, err = doHTTPConnectHandshake(ctx, conn, addr, proxyURL) + } + return + } +} + +func sendHTTPRequest(ctx context.Context, req *http.Request, conn net.Conn) error { + req = req.WithContext(ctx) + if err := req.Write(conn); err != nil { + return fmt.Errorf("failed to write the HTTP request: %v", err) + } + return nil +} diff --git a/vendor/google.golang.org/grpc/reflection/grpc_reflection_v1alpha/reflection.pb.go b/vendor/google.golang.org/grpc/reflection/grpc_reflection_v1alpha/reflection.pb.go new file mode 100644 index 0000000..ae5aa7d --- /dev/null +++ b/vendor/google.golang.org/grpc/reflection/grpc_reflection_v1alpha/reflection.pb.go @@ -0,0 +1,939 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: grpc_reflection_v1alpha/reflection.proto + +package grpc_reflection_v1alpha + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" + +import ( + context "golang.org/x/net/context" + grpc "google.golang.org/grpc" +) + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// The message sent by the client when calling ServerReflectionInfo method. +type ServerReflectionRequest struct { + Host string `protobuf:"bytes,1,opt,name=host,proto3" json:"host,omitempty"` + // To use reflection service, the client should set one of the following + // fields in message_request. The server distinguishes requests by their + // defined field and then handles them using corresponding methods. + // + // Types that are valid to be assigned to MessageRequest: + // *ServerReflectionRequest_FileByFilename + // *ServerReflectionRequest_FileContainingSymbol + // *ServerReflectionRequest_FileContainingExtension + // *ServerReflectionRequest_AllExtensionNumbersOfType + // *ServerReflectionRequest_ListServices + MessageRequest isServerReflectionRequest_MessageRequest `protobuf_oneof:"message_request"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ServerReflectionRequest) Reset() { *m = ServerReflectionRequest{} } +func (m *ServerReflectionRequest) String() string { return proto.CompactTextString(m) } +func (*ServerReflectionRequest) ProtoMessage() {} +func (*ServerReflectionRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_reflection_178bd1e101bf8b63, []int{0} +} +func (m *ServerReflectionRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ServerReflectionRequest.Unmarshal(m, b) +} +func (m *ServerReflectionRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ServerReflectionRequest.Marshal(b, m, deterministic) +} +func (dst *ServerReflectionRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_ServerReflectionRequest.Merge(dst, src) +} +func (m *ServerReflectionRequest) XXX_Size() int { + return xxx_messageInfo_ServerReflectionRequest.Size(m) +} +func (m *ServerReflectionRequest) XXX_DiscardUnknown() { + xxx_messageInfo_ServerReflectionRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_ServerReflectionRequest proto.InternalMessageInfo + +func (m *ServerReflectionRequest) GetHost() string { + if m != nil { + return m.Host + } + return "" +} + +type isServerReflectionRequest_MessageRequest interface { + isServerReflectionRequest_MessageRequest() +} + +type ServerReflectionRequest_FileByFilename struct { + FileByFilename string `protobuf:"bytes,3,opt,name=file_by_filename,json=fileByFilename,proto3,oneof"` +} + +type ServerReflectionRequest_FileContainingSymbol struct { + FileContainingSymbol string `protobuf:"bytes,4,opt,name=file_containing_symbol,json=fileContainingSymbol,proto3,oneof"` +} + +type ServerReflectionRequest_FileContainingExtension struct { + FileContainingExtension *ExtensionRequest `protobuf:"bytes,5,opt,name=file_containing_extension,json=fileContainingExtension,proto3,oneof"` +} + +type ServerReflectionRequest_AllExtensionNumbersOfType struct { + AllExtensionNumbersOfType string `protobuf:"bytes,6,opt,name=all_extension_numbers_of_type,json=allExtensionNumbersOfType,proto3,oneof"` +} + +type ServerReflectionRequest_ListServices struct { + ListServices string `protobuf:"bytes,7,opt,name=list_services,json=listServices,proto3,oneof"` +} + +func (*ServerReflectionRequest_FileByFilename) isServerReflectionRequest_MessageRequest() {} + +func (*ServerReflectionRequest_FileContainingSymbol) isServerReflectionRequest_MessageRequest() {} + +func (*ServerReflectionRequest_FileContainingExtension) isServerReflectionRequest_MessageRequest() {} + +func (*ServerReflectionRequest_AllExtensionNumbersOfType) isServerReflectionRequest_MessageRequest() {} + +func (*ServerReflectionRequest_ListServices) isServerReflectionRequest_MessageRequest() {} + +func (m *ServerReflectionRequest) GetMessageRequest() isServerReflectionRequest_MessageRequest { + if m != nil { + return m.MessageRequest + } + return nil +} + +func (m *ServerReflectionRequest) GetFileByFilename() string { + if x, ok := m.GetMessageRequest().(*ServerReflectionRequest_FileByFilename); ok { + return x.FileByFilename + } + return "" +} + +func (m *ServerReflectionRequest) GetFileContainingSymbol() string { + if x, ok := m.GetMessageRequest().(*ServerReflectionRequest_FileContainingSymbol); ok { + return x.FileContainingSymbol + } + return "" +} + +func (m *ServerReflectionRequest) GetFileContainingExtension() *ExtensionRequest { + if x, ok := m.GetMessageRequest().(*ServerReflectionRequest_FileContainingExtension); ok { + return x.FileContainingExtension + } + return nil +} + +func (m *ServerReflectionRequest) GetAllExtensionNumbersOfType() string { + if x, ok := m.GetMessageRequest().(*ServerReflectionRequest_AllExtensionNumbersOfType); ok { + return x.AllExtensionNumbersOfType + } + return "" +} + +func (m *ServerReflectionRequest) GetListServices() string { + if x, ok := m.GetMessageRequest().(*ServerReflectionRequest_ListServices); ok { + return x.ListServices + } + return "" +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*ServerReflectionRequest) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _ServerReflectionRequest_OneofMarshaler, _ServerReflectionRequest_OneofUnmarshaler, _ServerReflectionRequest_OneofSizer, []interface{}{ + (*ServerReflectionRequest_FileByFilename)(nil), + (*ServerReflectionRequest_FileContainingSymbol)(nil), + (*ServerReflectionRequest_FileContainingExtension)(nil), + (*ServerReflectionRequest_AllExtensionNumbersOfType)(nil), + (*ServerReflectionRequest_ListServices)(nil), + } +} + +func _ServerReflectionRequest_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*ServerReflectionRequest) + // message_request + switch x := m.MessageRequest.(type) { + case *ServerReflectionRequest_FileByFilename: + b.EncodeVarint(3<<3 | proto.WireBytes) + b.EncodeStringBytes(x.FileByFilename) + case *ServerReflectionRequest_FileContainingSymbol: + b.EncodeVarint(4<<3 | proto.WireBytes) + b.EncodeStringBytes(x.FileContainingSymbol) + case *ServerReflectionRequest_FileContainingExtension: + b.EncodeVarint(5<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.FileContainingExtension); err != nil { + return err + } + case *ServerReflectionRequest_AllExtensionNumbersOfType: + b.EncodeVarint(6<<3 | proto.WireBytes) + b.EncodeStringBytes(x.AllExtensionNumbersOfType) + case *ServerReflectionRequest_ListServices: + b.EncodeVarint(7<<3 | proto.WireBytes) + b.EncodeStringBytes(x.ListServices) + case nil: + default: + return fmt.Errorf("ServerReflectionRequest.MessageRequest has unexpected type %T", x) + } + return nil +} + +func _ServerReflectionRequest_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*ServerReflectionRequest) + switch tag { + case 3: // message_request.file_by_filename + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeStringBytes() + m.MessageRequest = &ServerReflectionRequest_FileByFilename{x} + return true, err + case 4: // message_request.file_containing_symbol + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeStringBytes() + m.MessageRequest = &ServerReflectionRequest_FileContainingSymbol{x} + return true, err + case 5: // message_request.file_containing_extension + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(ExtensionRequest) + err := b.DecodeMessage(msg) + m.MessageRequest = &ServerReflectionRequest_FileContainingExtension{msg} + return true, err + case 6: // message_request.all_extension_numbers_of_type + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeStringBytes() + m.MessageRequest = &ServerReflectionRequest_AllExtensionNumbersOfType{x} + return true, err + case 7: // message_request.list_services + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeStringBytes() + m.MessageRequest = &ServerReflectionRequest_ListServices{x} + return true, err + default: + return false, nil + } +} + +func _ServerReflectionRequest_OneofSizer(msg proto.Message) (n int) { + m := msg.(*ServerReflectionRequest) + // message_request + switch x := m.MessageRequest.(type) { + case *ServerReflectionRequest_FileByFilename: + n += 1 // tag and wire + n += proto.SizeVarint(uint64(len(x.FileByFilename))) + n += len(x.FileByFilename) + case *ServerReflectionRequest_FileContainingSymbol: + n += 1 // tag and wire + n += proto.SizeVarint(uint64(len(x.FileContainingSymbol))) + n += len(x.FileContainingSymbol) + case *ServerReflectionRequest_FileContainingExtension: + s := proto.Size(x.FileContainingExtension) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *ServerReflectionRequest_AllExtensionNumbersOfType: + n += 1 // tag and wire + n += proto.SizeVarint(uint64(len(x.AllExtensionNumbersOfType))) + n += len(x.AllExtensionNumbersOfType) + case *ServerReflectionRequest_ListServices: + n += 1 // tag and wire + n += proto.SizeVarint(uint64(len(x.ListServices))) + n += len(x.ListServices) + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +// The type name and extension number sent by the client when requesting +// file_containing_extension. +type ExtensionRequest struct { + // Fully-qualified type name. The format should be . + ContainingType string `protobuf:"bytes,1,opt,name=containing_type,json=containingType,proto3" json:"containing_type,omitempty"` + ExtensionNumber int32 `protobuf:"varint,2,opt,name=extension_number,json=extensionNumber,proto3" json:"extension_number,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ExtensionRequest) Reset() { *m = ExtensionRequest{} } +func (m *ExtensionRequest) String() string { return proto.CompactTextString(m) } +func (*ExtensionRequest) ProtoMessage() {} +func (*ExtensionRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_reflection_178bd1e101bf8b63, []int{1} +} +func (m *ExtensionRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ExtensionRequest.Unmarshal(m, b) +} +func (m *ExtensionRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ExtensionRequest.Marshal(b, m, deterministic) +} +func (dst *ExtensionRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_ExtensionRequest.Merge(dst, src) +} +func (m *ExtensionRequest) XXX_Size() int { + return xxx_messageInfo_ExtensionRequest.Size(m) +} +func (m *ExtensionRequest) XXX_DiscardUnknown() { + xxx_messageInfo_ExtensionRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_ExtensionRequest proto.InternalMessageInfo + +func (m *ExtensionRequest) GetContainingType() string { + if m != nil { + return m.ContainingType + } + return "" +} + +func (m *ExtensionRequest) GetExtensionNumber() int32 { + if m != nil { + return m.ExtensionNumber + } + return 0 +} + +// The message sent by the server to answer ServerReflectionInfo method. +type ServerReflectionResponse struct { + ValidHost string `protobuf:"bytes,1,opt,name=valid_host,json=validHost,proto3" json:"valid_host,omitempty"` + OriginalRequest *ServerReflectionRequest `protobuf:"bytes,2,opt,name=original_request,json=originalRequest,proto3" json:"original_request,omitempty"` + // The server sets one of the following fields according to the + // message_request in the request. + // + // Types that are valid to be assigned to MessageResponse: + // *ServerReflectionResponse_FileDescriptorResponse + // *ServerReflectionResponse_AllExtensionNumbersResponse + // *ServerReflectionResponse_ListServicesResponse + // *ServerReflectionResponse_ErrorResponse + MessageResponse isServerReflectionResponse_MessageResponse `protobuf_oneof:"message_response"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ServerReflectionResponse) Reset() { *m = ServerReflectionResponse{} } +func (m *ServerReflectionResponse) String() string { return proto.CompactTextString(m) } +func (*ServerReflectionResponse) ProtoMessage() {} +func (*ServerReflectionResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_reflection_178bd1e101bf8b63, []int{2} +} +func (m *ServerReflectionResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ServerReflectionResponse.Unmarshal(m, b) +} +func (m *ServerReflectionResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ServerReflectionResponse.Marshal(b, m, deterministic) +} +func (dst *ServerReflectionResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_ServerReflectionResponse.Merge(dst, src) +} +func (m *ServerReflectionResponse) XXX_Size() int { + return xxx_messageInfo_ServerReflectionResponse.Size(m) +} +func (m *ServerReflectionResponse) XXX_DiscardUnknown() { + xxx_messageInfo_ServerReflectionResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_ServerReflectionResponse proto.InternalMessageInfo + +func (m *ServerReflectionResponse) GetValidHost() string { + if m != nil { + return m.ValidHost + } + return "" +} + +func (m *ServerReflectionResponse) GetOriginalRequest() *ServerReflectionRequest { + if m != nil { + return m.OriginalRequest + } + return nil +} + +type isServerReflectionResponse_MessageResponse interface { + isServerReflectionResponse_MessageResponse() +} + +type ServerReflectionResponse_FileDescriptorResponse struct { + FileDescriptorResponse *FileDescriptorResponse `protobuf:"bytes,4,opt,name=file_descriptor_response,json=fileDescriptorResponse,proto3,oneof"` +} + +type ServerReflectionResponse_AllExtensionNumbersResponse struct { + AllExtensionNumbersResponse *ExtensionNumberResponse `protobuf:"bytes,5,opt,name=all_extension_numbers_response,json=allExtensionNumbersResponse,proto3,oneof"` +} + +type ServerReflectionResponse_ListServicesResponse struct { + ListServicesResponse *ListServiceResponse `protobuf:"bytes,6,opt,name=list_services_response,json=listServicesResponse,proto3,oneof"` +} + +type ServerReflectionResponse_ErrorResponse struct { + ErrorResponse *ErrorResponse `protobuf:"bytes,7,opt,name=error_response,json=errorResponse,proto3,oneof"` +} + +func (*ServerReflectionResponse_FileDescriptorResponse) isServerReflectionResponse_MessageResponse() {} + +func (*ServerReflectionResponse_AllExtensionNumbersResponse) isServerReflectionResponse_MessageResponse() { +} + +func (*ServerReflectionResponse_ListServicesResponse) isServerReflectionResponse_MessageResponse() {} + +func (*ServerReflectionResponse_ErrorResponse) isServerReflectionResponse_MessageResponse() {} + +func (m *ServerReflectionResponse) GetMessageResponse() isServerReflectionResponse_MessageResponse { + if m != nil { + return m.MessageResponse + } + return nil +} + +func (m *ServerReflectionResponse) GetFileDescriptorResponse() *FileDescriptorResponse { + if x, ok := m.GetMessageResponse().(*ServerReflectionResponse_FileDescriptorResponse); ok { + return x.FileDescriptorResponse + } + return nil +} + +func (m *ServerReflectionResponse) GetAllExtensionNumbersResponse() *ExtensionNumberResponse { + if x, ok := m.GetMessageResponse().(*ServerReflectionResponse_AllExtensionNumbersResponse); ok { + return x.AllExtensionNumbersResponse + } + return nil +} + +func (m *ServerReflectionResponse) GetListServicesResponse() *ListServiceResponse { + if x, ok := m.GetMessageResponse().(*ServerReflectionResponse_ListServicesResponse); ok { + return x.ListServicesResponse + } + return nil +} + +func (m *ServerReflectionResponse) GetErrorResponse() *ErrorResponse { + if x, ok := m.GetMessageResponse().(*ServerReflectionResponse_ErrorResponse); ok { + return x.ErrorResponse + } + return nil +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*ServerReflectionResponse) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _ServerReflectionResponse_OneofMarshaler, _ServerReflectionResponse_OneofUnmarshaler, _ServerReflectionResponse_OneofSizer, []interface{}{ + (*ServerReflectionResponse_FileDescriptorResponse)(nil), + (*ServerReflectionResponse_AllExtensionNumbersResponse)(nil), + (*ServerReflectionResponse_ListServicesResponse)(nil), + (*ServerReflectionResponse_ErrorResponse)(nil), + } +} + +func _ServerReflectionResponse_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*ServerReflectionResponse) + // message_response + switch x := m.MessageResponse.(type) { + case *ServerReflectionResponse_FileDescriptorResponse: + b.EncodeVarint(4<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.FileDescriptorResponse); err != nil { + return err + } + case *ServerReflectionResponse_AllExtensionNumbersResponse: + b.EncodeVarint(5<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.AllExtensionNumbersResponse); err != nil { + return err + } + case *ServerReflectionResponse_ListServicesResponse: + b.EncodeVarint(6<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.ListServicesResponse); err != nil { + return err + } + case *ServerReflectionResponse_ErrorResponse: + b.EncodeVarint(7<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.ErrorResponse); err != nil { + return err + } + case nil: + default: + return fmt.Errorf("ServerReflectionResponse.MessageResponse has unexpected type %T", x) + } + return nil +} + +func _ServerReflectionResponse_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*ServerReflectionResponse) + switch tag { + case 4: // message_response.file_descriptor_response + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(FileDescriptorResponse) + err := b.DecodeMessage(msg) + m.MessageResponse = &ServerReflectionResponse_FileDescriptorResponse{msg} + return true, err + case 5: // message_response.all_extension_numbers_response + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(ExtensionNumberResponse) + err := b.DecodeMessage(msg) + m.MessageResponse = &ServerReflectionResponse_AllExtensionNumbersResponse{msg} + return true, err + case 6: // message_response.list_services_response + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(ListServiceResponse) + err := b.DecodeMessage(msg) + m.MessageResponse = &ServerReflectionResponse_ListServicesResponse{msg} + return true, err + case 7: // message_response.error_response + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(ErrorResponse) + err := b.DecodeMessage(msg) + m.MessageResponse = &ServerReflectionResponse_ErrorResponse{msg} + return true, err + default: + return false, nil + } +} + +func _ServerReflectionResponse_OneofSizer(msg proto.Message) (n int) { + m := msg.(*ServerReflectionResponse) + // message_response + switch x := m.MessageResponse.(type) { + case *ServerReflectionResponse_FileDescriptorResponse: + s := proto.Size(x.FileDescriptorResponse) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *ServerReflectionResponse_AllExtensionNumbersResponse: + s := proto.Size(x.AllExtensionNumbersResponse) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *ServerReflectionResponse_ListServicesResponse: + s := proto.Size(x.ListServicesResponse) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case *ServerReflectionResponse_ErrorResponse: + s := proto.Size(x.ErrorResponse) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +// Serialized FileDescriptorProto messages sent by the server answering +// a file_by_filename, file_containing_symbol, or file_containing_extension +// request. +type FileDescriptorResponse struct { + // Serialized FileDescriptorProto messages. We avoid taking a dependency on + // descriptor.proto, which uses proto2 only features, by making them opaque + // bytes instead. + FileDescriptorProto [][]byte `protobuf:"bytes,1,rep,name=file_descriptor_proto,json=fileDescriptorProto,proto3" json:"file_descriptor_proto,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *FileDescriptorResponse) Reset() { *m = FileDescriptorResponse{} } +func (m *FileDescriptorResponse) String() string { return proto.CompactTextString(m) } +func (*FileDescriptorResponse) ProtoMessage() {} +func (*FileDescriptorResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_reflection_178bd1e101bf8b63, []int{3} +} +func (m *FileDescriptorResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_FileDescriptorResponse.Unmarshal(m, b) +} +func (m *FileDescriptorResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_FileDescriptorResponse.Marshal(b, m, deterministic) +} +func (dst *FileDescriptorResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_FileDescriptorResponse.Merge(dst, src) +} +func (m *FileDescriptorResponse) XXX_Size() int { + return xxx_messageInfo_FileDescriptorResponse.Size(m) +} +func (m *FileDescriptorResponse) XXX_DiscardUnknown() { + xxx_messageInfo_FileDescriptorResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_FileDescriptorResponse proto.InternalMessageInfo + +func (m *FileDescriptorResponse) GetFileDescriptorProto() [][]byte { + if m != nil { + return m.FileDescriptorProto + } + return nil +} + +// A list of extension numbers sent by the server answering +// all_extension_numbers_of_type request. +type ExtensionNumberResponse struct { + // Full name of the base type, including the package name. The format + // is . + BaseTypeName string `protobuf:"bytes,1,opt,name=base_type_name,json=baseTypeName,proto3" json:"base_type_name,omitempty"` + ExtensionNumber []int32 `protobuf:"varint,2,rep,packed,name=extension_number,json=extensionNumber,proto3" json:"extension_number,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ExtensionNumberResponse) Reset() { *m = ExtensionNumberResponse{} } +func (m *ExtensionNumberResponse) String() string { return proto.CompactTextString(m) } +func (*ExtensionNumberResponse) ProtoMessage() {} +func (*ExtensionNumberResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_reflection_178bd1e101bf8b63, []int{4} +} +func (m *ExtensionNumberResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ExtensionNumberResponse.Unmarshal(m, b) +} +func (m *ExtensionNumberResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ExtensionNumberResponse.Marshal(b, m, deterministic) +} +func (dst *ExtensionNumberResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_ExtensionNumberResponse.Merge(dst, src) +} +func (m *ExtensionNumberResponse) XXX_Size() int { + return xxx_messageInfo_ExtensionNumberResponse.Size(m) +} +func (m *ExtensionNumberResponse) XXX_DiscardUnknown() { + xxx_messageInfo_ExtensionNumberResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_ExtensionNumberResponse proto.InternalMessageInfo + +func (m *ExtensionNumberResponse) GetBaseTypeName() string { + if m != nil { + return m.BaseTypeName + } + return "" +} + +func (m *ExtensionNumberResponse) GetExtensionNumber() []int32 { + if m != nil { + return m.ExtensionNumber + } + return nil +} + +// A list of ServiceResponse sent by the server answering list_services request. +type ListServiceResponse struct { + // The information of each service may be expanded in the future, so we use + // ServiceResponse message to encapsulate it. + Service []*ServiceResponse `protobuf:"bytes,1,rep,name=service,proto3" json:"service,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ListServiceResponse) Reset() { *m = ListServiceResponse{} } +func (m *ListServiceResponse) String() string { return proto.CompactTextString(m) } +func (*ListServiceResponse) ProtoMessage() {} +func (*ListServiceResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_reflection_178bd1e101bf8b63, []int{5} +} +func (m *ListServiceResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ListServiceResponse.Unmarshal(m, b) +} +func (m *ListServiceResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ListServiceResponse.Marshal(b, m, deterministic) +} +func (dst *ListServiceResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_ListServiceResponse.Merge(dst, src) +} +func (m *ListServiceResponse) XXX_Size() int { + return xxx_messageInfo_ListServiceResponse.Size(m) +} +func (m *ListServiceResponse) XXX_DiscardUnknown() { + xxx_messageInfo_ListServiceResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_ListServiceResponse proto.InternalMessageInfo + +func (m *ListServiceResponse) GetService() []*ServiceResponse { + if m != nil { + return m.Service + } + return nil +} + +// The information of a single service used by ListServiceResponse to answer +// list_services request. +type ServiceResponse struct { + // Full name of a registered service, including its package name. The format + // is . + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ServiceResponse) Reset() { *m = ServiceResponse{} } +func (m *ServiceResponse) String() string { return proto.CompactTextString(m) } +func (*ServiceResponse) ProtoMessage() {} +func (*ServiceResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_reflection_178bd1e101bf8b63, []int{6} +} +func (m *ServiceResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ServiceResponse.Unmarshal(m, b) +} +func (m *ServiceResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ServiceResponse.Marshal(b, m, deterministic) +} +func (dst *ServiceResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_ServiceResponse.Merge(dst, src) +} +func (m *ServiceResponse) XXX_Size() int { + return xxx_messageInfo_ServiceResponse.Size(m) +} +func (m *ServiceResponse) XXX_DiscardUnknown() { + xxx_messageInfo_ServiceResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_ServiceResponse proto.InternalMessageInfo + +func (m *ServiceResponse) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +// The error code and error message sent by the server when an error occurs. +type ErrorResponse struct { + // This field uses the error codes defined in grpc::StatusCode. + ErrorCode int32 `protobuf:"varint,1,opt,name=error_code,json=errorCode,proto3" json:"error_code,omitempty"` + ErrorMessage string `protobuf:"bytes,2,opt,name=error_message,json=errorMessage,proto3" json:"error_message,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ErrorResponse) Reset() { *m = ErrorResponse{} } +func (m *ErrorResponse) String() string { return proto.CompactTextString(m) } +func (*ErrorResponse) ProtoMessage() {} +func (*ErrorResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_reflection_178bd1e101bf8b63, []int{7} +} +func (m *ErrorResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ErrorResponse.Unmarshal(m, b) +} +func (m *ErrorResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ErrorResponse.Marshal(b, m, deterministic) +} +func (dst *ErrorResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_ErrorResponse.Merge(dst, src) +} +func (m *ErrorResponse) XXX_Size() int { + return xxx_messageInfo_ErrorResponse.Size(m) +} +func (m *ErrorResponse) XXX_DiscardUnknown() { + xxx_messageInfo_ErrorResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_ErrorResponse proto.InternalMessageInfo + +func (m *ErrorResponse) GetErrorCode() int32 { + if m != nil { + return m.ErrorCode + } + return 0 +} + +func (m *ErrorResponse) GetErrorMessage() string { + if m != nil { + return m.ErrorMessage + } + return "" +} + +func init() { + proto.RegisterType((*ServerReflectionRequest)(nil), "grpc.reflection.v1alpha.ServerReflectionRequest") + proto.RegisterType((*ExtensionRequest)(nil), "grpc.reflection.v1alpha.ExtensionRequest") + proto.RegisterType((*ServerReflectionResponse)(nil), "grpc.reflection.v1alpha.ServerReflectionResponse") + proto.RegisterType((*FileDescriptorResponse)(nil), "grpc.reflection.v1alpha.FileDescriptorResponse") + proto.RegisterType((*ExtensionNumberResponse)(nil), "grpc.reflection.v1alpha.ExtensionNumberResponse") + proto.RegisterType((*ListServiceResponse)(nil), "grpc.reflection.v1alpha.ListServiceResponse") + proto.RegisterType((*ServiceResponse)(nil), "grpc.reflection.v1alpha.ServiceResponse") + proto.RegisterType((*ErrorResponse)(nil), "grpc.reflection.v1alpha.ErrorResponse") +} + +// Reference imports to suppress errors if they are not otherwise used. +var _ context.Context +var _ grpc.ClientConn + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the grpc package it is being compiled against. +const _ = grpc.SupportPackageIsVersion4 + +// ServerReflectionClient is the client API for ServerReflection service. +// +// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://godoc.org/google.golang.org/grpc#ClientConn.NewStream. +type ServerReflectionClient interface { + // The reflection service is structured as a bidirectional stream, ensuring + // all related requests go to a single server. + ServerReflectionInfo(ctx context.Context, opts ...grpc.CallOption) (ServerReflection_ServerReflectionInfoClient, error) +} + +type serverReflectionClient struct { + cc *grpc.ClientConn +} + +func NewServerReflectionClient(cc *grpc.ClientConn) ServerReflectionClient { + return &serverReflectionClient{cc} +} + +func (c *serverReflectionClient) ServerReflectionInfo(ctx context.Context, opts ...grpc.CallOption) (ServerReflection_ServerReflectionInfoClient, error) { + stream, err := c.cc.NewStream(ctx, &_ServerReflection_serviceDesc.Streams[0], "/grpc.reflection.v1alpha.ServerReflection/ServerReflectionInfo", opts...) + if err != nil { + return nil, err + } + x := &serverReflectionServerReflectionInfoClient{stream} + return x, nil +} + +type ServerReflection_ServerReflectionInfoClient interface { + Send(*ServerReflectionRequest) error + Recv() (*ServerReflectionResponse, error) + grpc.ClientStream +} + +type serverReflectionServerReflectionInfoClient struct { + grpc.ClientStream +} + +func (x *serverReflectionServerReflectionInfoClient) Send(m *ServerReflectionRequest) error { + return x.ClientStream.SendMsg(m) +} + +func (x *serverReflectionServerReflectionInfoClient) Recv() (*ServerReflectionResponse, error) { + m := new(ServerReflectionResponse) + if err := x.ClientStream.RecvMsg(m); err != nil { + return nil, err + } + return m, nil +} + +// ServerReflectionServer is the server API for ServerReflection service. +type ServerReflectionServer interface { + // The reflection service is structured as a bidirectional stream, ensuring + // all related requests go to a single server. + ServerReflectionInfo(ServerReflection_ServerReflectionInfoServer) error +} + +func RegisterServerReflectionServer(s *grpc.Server, srv ServerReflectionServer) { + s.RegisterService(&_ServerReflection_serviceDesc, srv) +} + +func _ServerReflection_ServerReflectionInfo_Handler(srv interface{}, stream grpc.ServerStream) error { + return srv.(ServerReflectionServer).ServerReflectionInfo(&serverReflectionServerReflectionInfoServer{stream}) +} + +type ServerReflection_ServerReflectionInfoServer interface { + Send(*ServerReflectionResponse) error + Recv() (*ServerReflectionRequest, error) + grpc.ServerStream +} + +type serverReflectionServerReflectionInfoServer struct { + grpc.ServerStream +} + +func (x *serverReflectionServerReflectionInfoServer) Send(m *ServerReflectionResponse) error { + return x.ServerStream.SendMsg(m) +} + +func (x *serverReflectionServerReflectionInfoServer) Recv() (*ServerReflectionRequest, error) { + m := new(ServerReflectionRequest) + if err := x.ServerStream.RecvMsg(m); err != nil { + return nil, err + } + return m, nil +} + +var _ServerReflection_serviceDesc = grpc.ServiceDesc{ + ServiceName: "grpc.reflection.v1alpha.ServerReflection", + HandlerType: (*ServerReflectionServer)(nil), + Methods: []grpc.MethodDesc{}, + Streams: []grpc.StreamDesc{ + { + StreamName: "ServerReflectionInfo", + Handler: _ServerReflection_ServerReflectionInfo_Handler, + ServerStreams: true, + ClientStreams: true, + }, + }, + Metadata: "grpc_reflection_v1alpha/reflection.proto", +} + +func init() { + proto.RegisterFile("grpc_reflection_v1alpha/reflection.proto", fileDescriptor_reflection_178bd1e101bf8b63) +} + +var fileDescriptor_reflection_178bd1e101bf8b63 = []byte{ + // 656 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x94, 0x54, 0x51, 0x73, 0xd2, 0x40, + 0x10, 0x6e, 0x5a, 0x68, 0x87, 0x85, 0x02, 0x5e, 0x2b, 0xa4, 0x3a, 0x75, 0x98, 0x68, 0x35, 0x75, + 0x1c, 0xda, 0xe2, 0x8c, 0x3f, 0x80, 0xaa, 0x83, 0x33, 0xb5, 0x75, 0x0e, 0x5f, 0x1c, 0x1f, 0x6e, + 0x02, 0x2c, 0x34, 0x1a, 0x72, 0xf1, 0x2e, 0x45, 0x79, 0xf2, 0x47, 0xf8, 0xa3, 0xfc, 0x4b, 0x3e, + 0x3a, 0x77, 0x09, 0x21, 0xa4, 0x44, 0xa7, 0x4f, 0x30, 0xdf, 0xee, 0xde, 0xb7, 0xbb, 0xdf, 0xb7, + 0x01, 0x7b, 0x22, 0x82, 0x21, 0x13, 0x38, 0xf6, 0x70, 0x18, 0xba, 0xdc, 0x67, 0xb3, 0x33, 0xc7, + 0x0b, 0xae, 0x9d, 0x93, 0x25, 0xd4, 0x0e, 0x04, 0x0f, 0x39, 0x69, 0xaa, 0xcc, 0x76, 0x0a, 0x8e, + 0x33, 0xad, 0x3f, 0x9b, 0xd0, 0xec, 0xa3, 0x98, 0xa1, 0xa0, 0x49, 0x90, 0xe2, 0xb7, 0x1b, 0x94, + 0x21, 0x21, 0x50, 0xb8, 0xe6, 0x32, 0x34, 0x8d, 0x96, 0x61, 0x97, 0xa8, 0xfe, 0x4f, 0x9e, 0x43, + 0x7d, 0xec, 0x7a, 0xc8, 0x06, 0x73, 0xa6, 0x7e, 0x7d, 0x67, 0x8a, 0xe6, 0x96, 0x8a, 0xf7, 0x36, + 0x68, 0x55, 0x21, 0xdd, 0xf9, 0xdb, 0x18, 0x27, 0xaf, 0xa0, 0xa1, 0x73, 0x87, 0xdc, 0x0f, 0x1d, + 0xd7, 0x77, 0xfd, 0x09, 0x93, 0xf3, 0xe9, 0x80, 0x7b, 0x66, 0x21, 0xae, 0xd8, 0x57, 0xf1, 0xf3, + 0x24, 0xdc, 0xd7, 0x51, 0x32, 0x81, 0x83, 0x6c, 0x1d, 0xfe, 0x08, 0xd1, 0x97, 0x2e, 0xf7, 0xcd, + 0x62, 0xcb, 0xb0, 0xcb, 0x9d, 0xe3, 0x76, 0xce, 0x40, 0xed, 0x37, 0x8b, 0xcc, 0x78, 0x8a, 0xde, + 0x06, 0x6d, 0xae, 0xb2, 0x24, 0x19, 0xa4, 0x0b, 0x87, 0x8e, 0xe7, 0x2d, 0x1f, 0x67, 0xfe, 0xcd, + 0x74, 0x80, 0x42, 0x32, 0x3e, 0x66, 0xe1, 0x3c, 0x40, 0x73, 0x3b, 0xee, 0xf3, 0xc0, 0xf1, 0xbc, + 0xa4, 0xec, 0x32, 0x4a, 0xba, 0x1a, 0x7f, 0x9c, 0x07, 0x48, 0x8e, 0x60, 0xd7, 0x73, 0x65, 0xc8, + 0x24, 0x8a, 0x99, 0x3b, 0x44, 0x69, 0xee, 0xc4, 0x35, 0x15, 0x05, 0xf7, 0x63, 0xb4, 0x7b, 0x0f, + 0x6a, 0x53, 0x94, 0xd2, 0x99, 0x20, 0x13, 0x51, 0x63, 0xd6, 0x18, 0xea, 0xd9, 0x66, 0xc9, 0x33, + 0xa8, 0xa5, 0xa6, 0xd6, 0x3d, 0x44, 0xdb, 0xaf, 0x2e, 0x61, 0x4d, 0x7b, 0x0c, 0xf5, 0x6c, 0xdb, + 0xe6, 0x66, 0xcb, 0xb0, 0x8b, 0xb4, 0x86, 0xab, 0x8d, 0x5a, 0xbf, 0x0b, 0x60, 0xde, 0x96, 0x58, + 0x06, 0xdc, 0x97, 0x48, 0x0e, 0x01, 0x66, 0x8e, 0xe7, 0x8e, 0x58, 0x4a, 0xe9, 0x92, 0x46, 0x7a, + 0x4a, 0xee, 0xcf, 0x50, 0xe7, 0xc2, 0x9d, 0xb8, 0xbe, 0xe3, 0x2d, 0xfa, 0xd6, 0x34, 0xe5, 0xce, + 0x69, 0xae, 0x02, 0x39, 0x76, 0xa2, 0xb5, 0xc5, 0x4b, 0x8b, 0x61, 0xbf, 0x82, 0xa9, 0x75, 0x1e, + 0xa1, 0x1c, 0x0a, 0x37, 0x08, 0xb9, 0x60, 0x22, 0xee, 0x4b, 0x3b, 0xa4, 0xdc, 0x39, 0xc9, 0x25, + 0x51, 0x26, 0x7b, 0x9d, 0xd4, 0x2d, 0xc6, 0xe9, 0x6d, 0x50, 0x6d, 0xb9, 0xdb, 0x11, 0xf2, 0x1d, + 0x1e, 0xad, 0xd7, 0x3a, 0xa1, 0x2c, 0xfe, 0x67, 0xae, 0x8c, 0x01, 0x52, 0x9c, 0x0f, 0xd7, 0xd8, + 0x23, 0x21, 0x1e, 0x41, 0x63, 0xc5, 0x20, 0x4b, 0xc2, 0x6d, 0x4d, 0xf8, 0x22, 0x97, 0xf0, 0x62, + 0x69, 0xa0, 0x14, 0xd9, 0x7e, 0xda, 0x57, 0x09, 0xcb, 0x15, 0x54, 0x51, 0x88, 0xf4, 0x06, 0x77, + 0xf4, 0xeb, 0x4f, 0xf3, 0xc7, 0x51, 0xe9, 0xa9, 0x77, 0x77, 0x31, 0x0d, 0x74, 0x09, 0xd4, 0x97, + 0x86, 0x8d, 0x30, 0xeb, 0x02, 0x1a, 0xeb, 0xf7, 0x4e, 0x3a, 0x70, 0x3f, 0x2b, 0xa5, 0xfe, 0xf0, + 0x98, 0x46, 0x6b, 0xcb, 0xae, 0xd0, 0xbd, 0x55, 0x51, 0x3e, 0xa8, 0x90, 0xf5, 0x05, 0x9a, 0x39, + 0x2b, 0x25, 0x4f, 0xa0, 0x3a, 0x70, 0x24, 0xea, 0x03, 0x60, 0xfa, 0x1b, 0x13, 0x39, 0xb3, 0xa2, + 0x50, 0xe5, 0xff, 0x4b, 0xf5, 0x7d, 0x59, 0x7f, 0x03, 0x5b, 0xeb, 0x6e, 0xe0, 0x13, 0xec, 0xad, + 0xd9, 0x26, 0xe9, 0xc2, 0x4e, 0x2c, 0x8b, 0x6e, 0xb4, 0xdc, 0xb1, 0xff, 0xe9, 0xea, 0x54, 0x29, + 0x5d, 0x14, 0x5a, 0x47, 0x50, 0xcb, 0x3e, 0x4b, 0xa0, 0x90, 0x6a, 0x5a, 0xff, 0xb7, 0xfa, 0xb0, + 0xbb, 0xb2, 0x71, 0x75, 0x79, 0x91, 0x62, 0x43, 0x3e, 0x8a, 0x52, 0x8b, 0xb4, 0xa4, 0x91, 0x73, + 0x3e, 0x42, 0xf2, 0x18, 0x22, 0x41, 0x58, 0xac, 0x82, 0x3e, 0xbb, 0x12, 0xad, 0x68, 0xf0, 0x7d, + 0x84, 0x75, 0x7e, 0x19, 0x50, 0xcf, 0x9e, 0x1b, 0xf9, 0x09, 0xfb, 0x59, 0xec, 0x9d, 0x3f, 0xe6, + 0xe4, 0xce, 0x17, 0xfb, 0xe0, 0xec, 0x0e, 0x15, 0xd1, 0x54, 0xb6, 0x71, 0x6a, 0x0c, 0xb6, 0xb5, + 0xf4, 0x2f, 0xff, 0x06, 0x00, 0x00, 0xff, 0xff, 0x85, 0x02, 0x09, 0x9d, 0x9f, 0x06, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/grpc/reflection/grpc_testing/proto2.pb.go b/vendor/google.golang.org/grpc/reflection/grpc_testing/proto2.pb.go new file mode 100644 index 0000000..4a95d9b --- /dev/null +++ b/vendor/google.golang.org/grpc/reflection/grpc_testing/proto2.pb.go @@ -0,0 +1,82 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: proto2.proto + +package grpc_testing + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +type ToBeExtended struct { + Foo *int32 `protobuf:"varint,1,req,name=foo" json:"foo,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + proto.XXX_InternalExtensions `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ToBeExtended) Reset() { *m = ToBeExtended{} } +func (m *ToBeExtended) String() string { return proto.CompactTextString(m) } +func (*ToBeExtended) ProtoMessage() {} +func (*ToBeExtended) Descriptor() ([]byte, []int) { + return fileDescriptor_proto2_b16f7a513d0acdc0, []int{0} +} + +var extRange_ToBeExtended = []proto.ExtensionRange{ + {Start: 10, End: 30}, +} + +func (*ToBeExtended) ExtensionRangeArray() []proto.ExtensionRange { + return extRange_ToBeExtended +} +func (m *ToBeExtended) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ToBeExtended.Unmarshal(m, b) +} +func (m *ToBeExtended) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ToBeExtended.Marshal(b, m, deterministic) +} +func (dst *ToBeExtended) XXX_Merge(src proto.Message) { + xxx_messageInfo_ToBeExtended.Merge(dst, src) +} +func (m *ToBeExtended) XXX_Size() int { + return xxx_messageInfo_ToBeExtended.Size(m) +} +func (m *ToBeExtended) XXX_DiscardUnknown() { + xxx_messageInfo_ToBeExtended.DiscardUnknown(m) +} + +var xxx_messageInfo_ToBeExtended proto.InternalMessageInfo + +func (m *ToBeExtended) GetFoo() int32 { + if m != nil && m.Foo != nil { + return *m.Foo + } + return 0 +} + +func init() { + proto.RegisterType((*ToBeExtended)(nil), "grpc.testing.ToBeExtended") +} + +func init() { proto.RegisterFile("proto2.proto", fileDescriptor_proto2_b16f7a513d0acdc0) } + +var fileDescriptor_proto2_b16f7a513d0acdc0 = []byte{ + // 86 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xe2, 0xe2, 0x29, 0x28, 0xca, 0x2f, + 0xc9, 0x37, 0xd2, 0x03, 0x53, 0x42, 0x3c, 0xe9, 0x45, 0x05, 0xc9, 0x7a, 0x25, 0xa9, 0xc5, 0x25, + 0x99, 0x79, 0xe9, 0x4a, 0x6a, 0x5c, 0x3c, 0x21, 0xf9, 0x4e, 0xa9, 0xae, 0x15, 0x25, 0xa9, 0x79, + 0x29, 0xa9, 0x29, 0x42, 0x02, 0x5c, 0xcc, 0x69, 0xf9, 0xf9, 0x12, 0x8c, 0x0a, 0x4c, 0x1a, 0xac, + 0x41, 0x20, 0xa6, 0x16, 0x0b, 0x07, 0x97, 0x80, 0x3c, 0x20, 0x00, 0x00, 0xff, 0xff, 0x74, 0x86, + 0x9c, 0x08, 0x44, 0x00, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/grpc/reflection/grpc_testing/proto2_ext.pb.go b/vendor/google.golang.org/grpc/reflection/grpc_testing/proto2_ext.pb.go new file mode 100644 index 0000000..25baa1a --- /dev/null +++ b/vendor/google.golang.org/grpc/reflection/grpc_testing/proto2_ext.pb.go @@ -0,0 +1,109 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: proto2_ext.proto + +package grpc_testing + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +type Extension struct { + Whatzit *int32 `protobuf:"varint,1,opt,name=whatzit" json:"whatzit,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Extension) Reset() { *m = Extension{} } +func (m *Extension) String() string { return proto.CompactTextString(m) } +func (*Extension) ProtoMessage() {} +func (*Extension) Descriptor() ([]byte, []int) { + return fileDescriptor_proto2_ext_4437118420d604f2, []int{0} +} +func (m *Extension) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Extension.Unmarshal(m, b) +} +func (m *Extension) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Extension.Marshal(b, m, deterministic) +} +func (dst *Extension) XXX_Merge(src proto.Message) { + xxx_messageInfo_Extension.Merge(dst, src) +} +func (m *Extension) XXX_Size() int { + return xxx_messageInfo_Extension.Size(m) +} +func (m *Extension) XXX_DiscardUnknown() { + xxx_messageInfo_Extension.DiscardUnknown(m) +} + +var xxx_messageInfo_Extension proto.InternalMessageInfo + +func (m *Extension) GetWhatzit() int32 { + if m != nil && m.Whatzit != nil { + return *m.Whatzit + } + return 0 +} + +var E_Foo = &proto.ExtensionDesc{ + ExtendedType: (*ToBeExtended)(nil), + ExtensionType: (*int32)(nil), + Field: 13, + Name: "grpc.testing.foo", + Tag: "varint,13,opt,name=foo", + Filename: "proto2_ext.proto", +} + +var E_Bar = &proto.ExtensionDesc{ + ExtendedType: (*ToBeExtended)(nil), + ExtensionType: (*Extension)(nil), + Field: 17, + Name: "grpc.testing.bar", + Tag: "bytes,17,opt,name=bar", + Filename: "proto2_ext.proto", +} + +var E_Baz = &proto.ExtensionDesc{ + ExtendedType: (*ToBeExtended)(nil), + ExtensionType: (*SearchRequest)(nil), + Field: 19, + Name: "grpc.testing.baz", + Tag: "bytes,19,opt,name=baz", + Filename: "proto2_ext.proto", +} + +func init() { + proto.RegisterType((*Extension)(nil), "grpc.testing.Extension") + proto.RegisterExtension(E_Foo) + proto.RegisterExtension(E_Bar) + proto.RegisterExtension(E_Baz) +} + +func init() { proto.RegisterFile("proto2_ext.proto", fileDescriptor_proto2_ext_4437118420d604f2) } + +var fileDescriptor_proto2_ext_4437118420d604f2 = []byte{ + // 179 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xe2, 0x12, 0x28, 0x28, 0xca, 0x2f, + 0xc9, 0x37, 0x8a, 0x4f, 0xad, 0x28, 0xd1, 0x03, 0x33, 0x85, 0x78, 0xd2, 0x8b, 0x0a, 0x92, 0xf5, + 0x4a, 0x52, 0x8b, 0x4b, 0x32, 0xf3, 0xd2, 0xa5, 0x78, 0x20, 0xf2, 0x10, 0x39, 0x29, 0x2e, 0x90, + 0x30, 0x84, 0xad, 0xa4, 0xca, 0xc5, 0xe9, 0x5a, 0x51, 0x92, 0x9a, 0x57, 0x9c, 0x99, 0x9f, 0x27, + 0x24, 0xc1, 0xc5, 0x5e, 0x9e, 0x91, 0x58, 0x52, 0x95, 0x59, 0x22, 0xc1, 0xa8, 0xc0, 0xa8, 0xc1, + 0x1a, 0x04, 0xe3, 0x5a, 0xe9, 0x70, 0x31, 0xa7, 0xe5, 0xe7, 0x0b, 0x49, 0xe9, 0x21, 0x1b, 0xab, + 0x17, 0x92, 0xef, 0x94, 0x0a, 0xd6, 0x9d, 0x92, 0x9a, 0x22, 0xc1, 0x0b, 0xd6, 0x01, 0x52, 0x66, + 0xe5, 0xca, 0xc5, 0x9c, 0x94, 0x58, 0x84, 0x57, 0xb5, 0xa0, 0x02, 0xa3, 0x06, 0xb7, 0x91, 0x38, + 0xaa, 0x0a, 0xb8, 0x4b, 0x82, 0x40, 0xfa, 0xad, 0x3c, 0x41, 0xc6, 0x54, 0xe1, 0x35, 0x46, 0x18, + 0x6c, 0x8c, 0x34, 0xaa, 0x8a, 0xe0, 0xd4, 0xc4, 0xa2, 0xe4, 0x8c, 0xa0, 0xd4, 0xc2, 0xd2, 0xd4, + 0xe2, 0x12, 0x90, 0x51, 0x55, 0x80, 0x00, 0x00, 0x00, 0xff, 0xff, 0x71, 0x6b, 0x94, 0x9f, 0x21, + 0x01, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/grpc/reflection/grpc_testing/proto2_ext2.pb.go b/vendor/google.golang.org/grpc/reflection/grpc_testing/proto2_ext2.pb.go new file mode 100644 index 0000000..869a6ba --- /dev/null +++ b/vendor/google.golang.org/grpc/reflection/grpc_testing/proto2_ext2.pb.go @@ -0,0 +1,98 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: proto2_ext2.proto + +package grpc_testing + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +type AnotherExtension struct { + Whatchamacallit *int32 `protobuf:"varint,1,opt,name=whatchamacallit" json:"whatchamacallit,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *AnotherExtension) Reset() { *m = AnotherExtension{} } +func (m *AnotherExtension) String() string { return proto.CompactTextString(m) } +func (*AnotherExtension) ProtoMessage() {} +func (*AnotherExtension) Descriptor() ([]byte, []int) { + return fileDescriptor_proto2_ext2_039d342873655470, []int{0} +} +func (m *AnotherExtension) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_AnotherExtension.Unmarshal(m, b) +} +func (m *AnotherExtension) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_AnotherExtension.Marshal(b, m, deterministic) +} +func (dst *AnotherExtension) XXX_Merge(src proto.Message) { + xxx_messageInfo_AnotherExtension.Merge(dst, src) +} +func (m *AnotherExtension) XXX_Size() int { + return xxx_messageInfo_AnotherExtension.Size(m) +} +func (m *AnotherExtension) XXX_DiscardUnknown() { + xxx_messageInfo_AnotherExtension.DiscardUnknown(m) +} + +var xxx_messageInfo_AnotherExtension proto.InternalMessageInfo + +func (m *AnotherExtension) GetWhatchamacallit() int32 { + if m != nil && m.Whatchamacallit != nil { + return *m.Whatchamacallit + } + return 0 +} + +var E_Frob = &proto.ExtensionDesc{ + ExtendedType: (*ToBeExtended)(nil), + ExtensionType: (*string)(nil), + Field: 23, + Name: "grpc.testing.frob", + Tag: "bytes,23,opt,name=frob", + Filename: "proto2_ext2.proto", +} + +var E_Nitz = &proto.ExtensionDesc{ + ExtendedType: (*ToBeExtended)(nil), + ExtensionType: (*AnotherExtension)(nil), + Field: 29, + Name: "grpc.testing.nitz", + Tag: "bytes,29,opt,name=nitz", + Filename: "proto2_ext2.proto", +} + +func init() { + proto.RegisterType((*AnotherExtension)(nil), "grpc.testing.AnotherExtension") + proto.RegisterExtension(E_Frob) + proto.RegisterExtension(E_Nitz) +} + +func init() { proto.RegisterFile("proto2_ext2.proto", fileDescriptor_proto2_ext2_039d342873655470) } + +var fileDescriptor_proto2_ext2_039d342873655470 = []byte{ + // 165 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xe2, 0x12, 0x2c, 0x28, 0xca, 0x2f, + 0xc9, 0x37, 0x8a, 0x4f, 0xad, 0x28, 0x31, 0xd2, 0x03, 0xb3, 0x85, 0x78, 0xd2, 0x8b, 0x0a, 0x92, + 0xf5, 0x4a, 0x52, 0x8b, 0x4b, 0x32, 0xf3, 0xd2, 0xa5, 0x78, 0x20, 0x0a, 0x20, 0x72, 0x4a, 0x36, + 0x5c, 0x02, 0x8e, 0x79, 0xf9, 0x25, 0x19, 0xa9, 0x45, 0xae, 0x15, 0x25, 0xa9, 0x79, 0xc5, 0x99, + 0xf9, 0x79, 0x42, 0x1a, 0x5c, 0xfc, 0xe5, 0x19, 0x89, 0x25, 0xc9, 0x19, 0x89, 0xb9, 0x89, 0xc9, + 0x89, 0x39, 0x39, 0x99, 0x25, 0x12, 0x8c, 0x0a, 0x8c, 0x1a, 0xac, 0x41, 0xe8, 0xc2, 0x56, 0x7a, + 0x5c, 0x2c, 0x69, 0x45, 0xf9, 0x49, 0x42, 0x52, 0x7a, 0xc8, 0x56, 0xe8, 0x85, 0xe4, 0x3b, 0xa5, + 0x82, 0x8d, 0x4b, 0x49, 0x4d, 0x91, 0x10, 0x57, 0x60, 0xd4, 0xe0, 0x0c, 0x02, 0xab, 0xb3, 0xf2, + 0xe3, 0x62, 0xc9, 0xcb, 0x2c, 0xa9, 0xc2, 0xab, 0x5e, 0x56, 0x81, 0x51, 0x83, 0xdb, 0x48, 0x0e, + 0x55, 0x05, 0xba, 0x1b, 0x83, 0xc0, 0xe6, 0x00, 0x02, 0x00, 0x00, 0xff, 0xff, 0xf0, 0x7e, 0x0d, + 0x26, 0xed, 0x00, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/grpc/reflection/grpc_testing/test.pb.go b/vendor/google.golang.org/grpc/reflection/grpc_testing/test.pb.go new file mode 100644 index 0000000..2566a49 --- /dev/null +++ b/vendor/google.golang.org/grpc/reflection/grpc_testing/test.pb.go @@ -0,0 +1,319 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: test.proto + +package grpc_testing + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" + +import ( + context "golang.org/x/net/context" + grpc "google.golang.org/grpc" +) + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +type SearchResponse struct { + Results []*SearchResponse_Result `protobuf:"bytes,1,rep,name=results,proto3" json:"results,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *SearchResponse) Reset() { *m = SearchResponse{} } +func (m *SearchResponse) String() string { return proto.CompactTextString(m) } +func (*SearchResponse) ProtoMessage() {} +func (*SearchResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_test_a0c753075da50dd4, []int{0} +} +func (m *SearchResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_SearchResponse.Unmarshal(m, b) +} +func (m *SearchResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_SearchResponse.Marshal(b, m, deterministic) +} +func (dst *SearchResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_SearchResponse.Merge(dst, src) +} +func (m *SearchResponse) XXX_Size() int { + return xxx_messageInfo_SearchResponse.Size(m) +} +func (m *SearchResponse) XXX_DiscardUnknown() { + xxx_messageInfo_SearchResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_SearchResponse proto.InternalMessageInfo + +func (m *SearchResponse) GetResults() []*SearchResponse_Result { + if m != nil { + return m.Results + } + return nil +} + +type SearchResponse_Result struct { + Url string `protobuf:"bytes,1,opt,name=url,proto3" json:"url,omitempty"` + Title string `protobuf:"bytes,2,opt,name=title,proto3" json:"title,omitempty"` + Snippets []string `protobuf:"bytes,3,rep,name=snippets,proto3" json:"snippets,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *SearchResponse_Result) Reset() { *m = SearchResponse_Result{} } +func (m *SearchResponse_Result) String() string { return proto.CompactTextString(m) } +func (*SearchResponse_Result) ProtoMessage() {} +func (*SearchResponse_Result) Descriptor() ([]byte, []int) { + return fileDescriptor_test_a0c753075da50dd4, []int{0, 0} +} +func (m *SearchResponse_Result) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_SearchResponse_Result.Unmarshal(m, b) +} +func (m *SearchResponse_Result) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_SearchResponse_Result.Marshal(b, m, deterministic) +} +func (dst *SearchResponse_Result) XXX_Merge(src proto.Message) { + xxx_messageInfo_SearchResponse_Result.Merge(dst, src) +} +func (m *SearchResponse_Result) XXX_Size() int { + return xxx_messageInfo_SearchResponse_Result.Size(m) +} +func (m *SearchResponse_Result) XXX_DiscardUnknown() { + xxx_messageInfo_SearchResponse_Result.DiscardUnknown(m) +} + +var xxx_messageInfo_SearchResponse_Result proto.InternalMessageInfo + +func (m *SearchResponse_Result) GetUrl() string { + if m != nil { + return m.Url + } + return "" +} + +func (m *SearchResponse_Result) GetTitle() string { + if m != nil { + return m.Title + } + return "" +} + +func (m *SearchResponse_Result) GetSnippets() []string { + if m != nil { + return m.Snippets + } + return nil +} + +type SearchRequest struct { + Query string `protobuf:"bytes,1,opt,name=query,proto3" json:"query,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *SearchRequest) Reset() { *m = SearchRequest{} } +func (m *SearchRequest) String() string { return proto.CompactTextString(m) } +func (*SearchRequest) ProtoMessage() {} +func (*SearchRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_test_a0c753075da50dd4, []int{1} +} +func (m *SearchRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_SearchRequest.Unmarshal(m, b) +} +func (m *SearchRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_SearchRequest.Marshal(b, m, deterministic) +} +func (dst *SearchRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_SearchRequest.Merge(dst, src) +} +func (m *SearchRequest) XXX_Size() int { + return xxx_messageInfo_SearchRequest.Size(m) +} +func (m *SearchRequest) XXX_DiscardUnknown() { + xxx_messageInfo_SearchRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_SearchRequest proto.InternalMessageInfo + +func (m *SearchRequest) GetQuery() string { + if m != nil { + return m.Query + } + return "" +} + +func init() { + proto.RegisterType((*SearchResponse)(nil), "grpc.testing.SearchResponse") + proto.RegisterType((*SearchResponse_Result)(nil), "grpc.testing.SearchResponse.Result") + proto.RegisterType((*SearchRequest)(nil), "grpc.testing.SearchRequest") +} + +// Reference imports to suppress errors if they are not otherwise used. +var _ context.Context +var _ grpc.ClientConn + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the grpc package it is being compiled against. +const _ = grpc.SupportPackageIsVersion4 + +// SearchServiceClient is the client API for SearchService service. +// +// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://godoc.org/google.golang.org/grpc#ClientConn.NewStream. +type SearchServiceClient interface { + Search(ctx context.Context, in *SearchRequest, opts ...grpc.CallOption) (*SearchResponse, error) + StreamingSearch(ctx context.Context, opts ...grpc.CallOption) (SearchService_StreamingSearchClient, error) +} + +type searchServiceClient struct { + cc *grpc.ClientConn +} + +func NewSearchServiceClient(cc *grpc.ClientConn) SearchServiceClient { + return &searchServiceClient{cc} +} + +func (c *searchServiceClient) Search(ctx context.Context, in *SearchRequest, opts ...grpc.CallOption) (*SearchResponse, error) { + out := new(SearchResponse) + err := c.cc.Invoke(ctx, "/grpc.testing.SearchService/Search", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *searchServiceClient) StreamingSearch(ctx context.Context, opts ...grpc.CallOption) (SearchService_StreamingSearchClient, error) { + stream, err := c.cc.NewStream(ctx, &_SearchService_serviceDesc.Streams[0], "/grpc.testing.SearchService/StreamingSearch", opts...) + if err != nil { + return nil, err + } + x := &searchServiceStreamingSearchClient{stream} + return x, nil +} + +type SearchService_StreamingSearchClient interface { + Send(*SearchRequest) error + Recv() (*SearchResponse, error) + grpc.ClientStream +} + +type searchServiceStreamingSearchClient struct { + grpc.ClientStream +} + +func (x *searchServiceStreamingSearchClient) Send(m *SearchRequest) error { + return x.ClientStream.SendMsg(m) +} + +func (x *searchServiceStreamingSearchClient) Recv() (*SearchResponse, error) { + m := new(SearchResponse) + if err := x.ClientStream.RecvMsg(m); err != nil { + return nil, err + } + return m, nil +} + +// SearchServiceServer is the server API for SearchService service. +type SearchServiceServer interface { + Search(context.Context, *SearchRequest) (*SearchResponse, error) + StreamingSearch(SearchService_StreamingSearchServer) error +} + +func RegisterSearchServiceServer(s *grpc.Server, srv SearchServiceServer) { + s.RegisterService(&_SearchService_serviceDesc, srv) +} + +func _SearchService_Search_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(SearchRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(SearchServiceServer).Search(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/grpc.testing.SearchService/Search", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(SearchServiceServer).Search(ctx, req.(*SearchRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _SearchService_StreamingSearch_Handler(srv interface{}, stream grpc.ServerStream) error { + return srv.(SearchServiceServer).StreamingSearch(&searchServiceStreamingSearchServer{stream}) +} + +type SearchService_StreamingSearchServer interface { + Send(*SearchResponse) error + Recv() (*SearchRequest, error) + grpc.ServerStream +} + +type searchServiceStreamingSearchServer struct { + grpc.ServerStream +} + +func (x *searchServiceStreamingSearchServer) Send(m *SearchResponse) error { + return x.ServerStream.SendMsg(m) +} + +func (x *searchServiceStreamingSearchServer) Recv() (*SearchRequest, error) { + m := new(SearchRequest) + if err := x.ServerStream.RecvMsg(m); err != nil { + return nil, err + } + return m, nil +} + +var _SearchService_serviceDesc = grpc.ServiceDesc{ + ServiceName: "grpc.testing.SearchService", + HandlerType: (*SearchServiceServer)(nil), + Methods: []grpc.MethodDesc{ + { + MethodName: "Search", + Handler: _SearchService_Search_Handler, + }, + }, + Streams: []grpc.StreamDesc{ + { + StreamName: "StreamingSearch", + Handler: _SearchService_StreamingSearch_Handler, + ServerStreams: true, + ClientStreams: true, + }, + }, + Metadata: "test.proto", +} + +func init() { proto.RegisterFile("test.proto", fileDescriptor_test_a0c753075da50dd4) } + +var fileDescriptor_test_a0c753075da50dd4 = []byte{ + // 231 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xa4, 0x91, 0xbd, 0x4a, 0xc5, 0x40, + 0x10, 0x85, 0x59, 0x83, 0xd1, 0x3b, 0xfe, 0x32, 0x58, 0x84, 0x68, 0x11, 0xae, 0x08, 0xa9, 0x16, + 0xb9, 0xd6, 0x56, 0xb6, 0x16, 0xb2, 0x79, 0x82, 0x6b, 0x18, 0xe2, 0x42, 0x4c, 0x36, 0x33, 0x13, + 0xc1, 0x87, 0xb1, 0xf5, 0x39, 0x25, 0x59, 0x23, 0x0a, 0x62, 0x63, 0xb7, 0xe7, 0xe3, 0xcc, 0xb7, + 0xbb, 0x0c, 0x80, 0x92, 0xa8, 0x0d, 0xdc, 0x6b, 0x8f, 0x87, 0x0d, 0x87, 0xda, 0x4e, 0xc0, 0x77, + 0xcd, 0xfa, 0xcd, 0xc0, 0x71, 0x45, 0x5b, 0xae, 0x9f, 0x1c, 0x49, 0xe8, 0x3b, 0x21, 0xbc, 0x85, + 0x3d, 0x26, 0x19, 0x5b, 0x95, 0xcc, 0x14, 0x49, 0x79, 0xb0, 0xb9, 0xb4, 0xdf, 0x47, 0xec, 0xcf, + 0xba, 0x75, 0x73, 0xd7, 0x2d, 0x33, 0xf9, 0x3d, 0xa4, 0x11, 0xe1, 0x29, 0x24, 0x23, 0xb7, 0x99, + 0x29, 0x4c, 0xb9, 0x72, 0xd3, 0x11, 0xcf, 0x60, 0x57, 0xbd, 0xb6, 0x94, 0xed, 0xcc, 0x2c, 0x06, + 0xcc, 0x61, 0x5f, 0x3a, 0x1f, 0x02, 0xa9, 0x64, 0x49, 0x91, 0x94, 0x2b, 0xf7, 0x95, 0xd7, 0x57, + 0x70, 0xb4, 0xdc, 0x37, 0x8c, 0x24, 0x3a, 0x29, 0x86, 0x91, 0xf8, 0xf5, 0x53, 0x1b, 0xc3, 0xe6, + 0xdd, 0x2c, 0xbd, 0x8a, 0xf8, 0xc5, 0xd7, 0x84, 0x77, 0x90, 0x46, 0x80, 0xe7, 0xbf, 0x3f, 0x7f, + 0xd6, 0xe5, 0x17, 0x7f, 0xfd, 0x0d, 0x1f, 0xe0, 0xa4, 0x52, 0xa6, 0xed, 0xb3, 0xef, 0x9a, 0x7f, + 0xdb, 0x4a, 0x73, 0x6d, 0x1e, 0xd3, 0x79, 0x09, 0x37, 0x1f, 0x01, 0x00, 0x00, 0xff, 0xff, 0x20, + 0xd6, 0x09, 0xb8, 0x92, 0x01, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/grpc/reflection/grpc_testingv3/testv3.pb.go b/vendor/google.golang.org/grpc/reflection/grpc_testingv3/testv3.pb.go new file mode 100644 index 0000000..767efdd --- /dev/null +++ b/vendor/google.golang.org/grpc/reflection/grpc_testingv3/testv3.pb.go @@ -0,0 +1,457 @@ +// Code generated by protoc-gen-go. +// source: testv3.proto +// DO NOT EDIT! + +/* +Package grpc_testingv3 is a generated protocol buffer package. + +It is generated from these files: + testv3.proto + +It has these top-level messages: + SearchResponseV3 + SearchRequestV3 +*/ +package grpc_testingv3 + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" + +import ( + context "golang.org/x/net/context" + grpc "google.golang.org/grpc" +) + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +type SearchResponseV3_State int32 + +const ( + SearchResponseV3_UNKNOWN SearchResponseV3_State = 0 + SearchResponseV3_FRESH SearchResponseV3_State = 1 + SearchResponseV3_STALE SearchResponseV3_State = 2 +) + +var SearchResponseV3_State_name = map[int32]string{ + 0: "UNKNOWN", + 1: "FRESH", + 2: "STALE", +} +var SearchResponseV3_State_value = map[string]int32{ + "UNKNOWN": 0, + "FRESH": 1, + "STALE": 2, +} + +func (x SearchResponseV3_State) String() string { + return proto.EnumName(SearchResponseV3_State_name, int32(x)) +} +func (SearchResponseV3_State) EnumDescriptor() ([]byte, []int) { return fileDescriptor0, []int{0, 0} } + +type SearchResponseV3 struct { + Results []*SearchResponseV3_Result `protobuf:"bytes,1,rep,name=results" json:"results,omitempty"` + State SearchResponseV3_State `protobuf:"varint,2,opt,name=state,enum=grpc.testingv3.SearchResponseV3_State" json:"state,omitempty"` +} + +func (m *SearchResponseV3) Reset() { *m = SearchResponseV3{} } +func (m *SearchResponseV3) String() string { return proto.CompactTextString(m) } +func (*SearchResponseV3) ProtoMessage() {} +func (*SearchResponseV3) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{0} } + +func (m *SearchResponseV3) GetResults() []*SearchResponseV3_Result { + if m != nil { + return m.Results + } + return nil +} + +func (m *SearchResponseV3) GetState() SearchResponseV3_State { + if m != nil { + return m.State + } + return SearchResponseV3_UNKNOWN +} + +type SearchResponseV3_Result struct { + Url string `protobuf:"bytes,1,opt,name=url" json:"url,omitempty"` + Title string `protobuf:"bytes,2,opt,name=title" json:"title,omitempty"` + Snippets []string `protobuf:"bytes,3,rep,name=snippets" json:"snippets,omitempty"` + Metadata map[string]*SearchResponseV3_Result_Value `protobuf:"bytes,4,rep,name=metadata" json:"metadata,omitempty" protobuf_key:"bytes,1,opt,name=key" protobuf_val:"bytes,2,opt,name=value"` +} + +func (m *SearchResponseV3_Result) Reset() { *m = SearchResponseV3_Result{} } +func (m *SearchResponseV3_Result) String() string { return proto.CompactTextString(m) } +func (*SearchResponseV3_Result) ProtoMessage() {} +func (*SearchResponseV3_Result) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{0, 0} } + +func (m *SearchResponseV3_Result) GetUrl() string { + if m != nil { + return m.Url + } + return "" +} + +func (m *SearchResponseV3_Result) GetTitle() string { + if m != nil { + return m.Title + } + return "" +} + +func (m *SearchResponseV3_Result) GetSnippets() []string { + if m != nil { + return m.Snippets + } + return nil +} + +func (m *SearchResponseV3_Result) GetMetadata() map[string]*SearchResponseV3_Result_Value { + if m != nil { + return m.Metadata + } + return nil +} + +type SearchResponseV3_Result_Value struct { + // Types that are valid to be assigned to Val: + // *SearchResponseV3_Result_Value_Str + // *SearchResponseV3_Result_Value_Int + // *SearchResponseV3_Result_Value_Real + Val isSearchResponseV3_Result_Value_Val `protobuf_oneof:"val"` +} + +func (m *SearchResponseV3_Result_Value) Reset() { *m = SearchResponseV3_Result_Value{} } +func (m *SearchResponseV3_Result_Value) String() string { return proto.CompactTextString(m) } +func (*SearchResponseV3_Result_Value) ProtoMessage() {} +func (*SearchResponseV3_Result_Value) Descriptor() ([]byte, []int) { + return fileDescriptor0, []int{0, 0, 0} +} + +type isSearchResponseV3_Result_Value_Val interface { + isSearchResponseV3_Result_Value_Val() +} + +type SearchResponseV3_Result_Value_Str struct { + Str string `protobuf:"bytes,1,opt,name=str,oneof"` +} +type SearchResponseV3_Result_Value_Int struct { + Int int64 `protobuf:"varint,2,opt,name=int,oneof"` +} +type SearchResponseV3_Result_Value_Real struct { + Real float64 `protobuf:"fixed64,3,opt,name=real,oneof"` +} + +func (*SearchResponseV3_Result_Value_Str) isSearchResponseV3_Result_Value_Val() {} +func (*SearchResponseV3_Result_Value_Int) isSearchResponseV3_Result_Value_Val() {} +func (*SearchResponseV3_Result_Value_Real) isSearchResponseV3_Result_Value_Val() {} + +func (m *SearchResponseV3_Result_Value) GetVal() isSearchResponseV3_Result_Value_Val { + if m != nil { + return m.Val + } + return nil +} + +func (m *SearchResponseV3_Result_Value) GetStr() string { + if x, ok := m.GetVal().(*SearchResponseV3_Result_Value_Str); ok { + return x.Str + } + return "" +} + +func (m *SearchResponseV3_Result_Value) GetInt() int64 { + if x, ok := m.GetVal().(*SearchResponseV3_Result_Value_Int); ok { + return x.Int + } + return 0 +} + +func (m *SearchResponseV3_Result_Value) GetReal() float64 { + if x, ok := m.GetVal().(*SearchResponseV3_Result_Value_Real); ok { + return x.Real + } + return 0 +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*SearchResponseV3_Result_Value) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _SearchResponseV3_Result_Value_OneofMarshaler, _SearchResponseV3_Result_Value_OneofUnmarshaler, _SearchResponseV3_Result_Value_OneofSizer, []interface{}{ + (*SearchResponseV3_Result_Value_Str)(nil), + (*SearchResponseV3_Result_Value_Int)(nil), + (*SearchResponseV3_Result_Value_Real)(nil), + } +} + +func _SearchResponseV3_Result_Value_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*SearchResponseV3_Result_Value) + // val + switch x := m.Val.(type) { + case *SearchResponseV3_Result_Value_Str: + b.EncodeVarint(1<<3 | proto.WireBytes) + b.EncodeStringBytes(x.Str) + case *SearchResponseV3_Result_Value_Int: + b.EncodeVarint(2<<3 | proto.WireVarint) + b.EncodeVarint(uint64(x.Int)) + case *SearchResponseV3_Result_Value_Real: + b.EncodeVarint(3<<3 | proto.WireFixed64) + b.EncodeFixed64(math.Float64bits(x.Real)) + case nil: + default: + return fmt.Errorf("SearchResponseV3_Result_Value.Val has unexpected type %T", x) + } + return nil +} + +func _SearchResponseV3_Result_Value_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*SearchResponseV3_Result_Value) + switch tag { + case 1: // val.str + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeStringBytes() + m.Val = &SearchResponseV3_Result_Value_Str{x} + return true, err + case 2: // val.int + if wire != proto.WireVarint { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeVarint() + m.Val = &SearchResponseV3_Result_Value_Int{int64(x)} + return true, err + case 3: // val.real + if wire != proto.WireFixed64 { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeFixed64() + m.Val = &SearchResponseV3_Result_Value_Real{math.Float64frombits(x)} + return true, err + default: + return false, nil + } +} + +func _SearchResponseV3_Result_Value_OneofSizer(msg proto.Message) (n int) { + m := msg.(*SearchResponseV3_Result_Value) + // val + switch x := m.Val.(type) { + case *SearchResponseV3_Result_Value_Str: + n += proto.SizeVarint(1<<3 | proto.WireBytes) + n += proto.SizeVarint(uint64(len(x.Str))) + n += len(x.Str) + case *SearchResponseV3_Result_Value_Int: + n += proto.SizeVarint(2<<3 | proto.WireVarint) + n += proto.SizeVarint(uint64(x.Int)) + case *SearchResponseV3_Result_Value_Real: + n += proto.SizeVarint(3<<3 | proto.WireFixed64) + n += 8 + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +type SearchRequestV3 struct { + Query string `protobuf:"bytes,1,opt,name=query" json:"query,omitempty"` +} + +func (m *SearchRequestV3) Reset() { *m = SearchRequestV3{} } +func (m *SearchRequestV3) String() string { return proto.CompactTextString(m) } +func (*SearchRequestV3) ProtoMessage() {} +func (*SearchRequestV3) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{1} } + +func (m *SearchRequestV3) GetQuery() string { + if m != nil { + return m.Query + } + return "" +} + +func init() { + proto.RegisterType((*SearchResponseV3)(nil), "grpc.testingv3.SearchResponseV3") + proto.RegisterType((*SearchResponseV3_Result)(nil), "grpc.testingv3.SearchResponseV3.Result") + proto.RegisterType((*SearchResponseV3_Result_Value)(nil), "grpc.testingv3.SearchResponseV3.Result.Value") + proto.RegisterType((*SearchRequestV3)(nil), "grpc.testingv3.SearchRequestV3") + proto.RegisterEnum("grpc.testingv3.SearchResponseV3_State", SearchResponseV3_State_name, SearchResponseV3_State_value) +} + +// Reference imports to suppress errors if they are not otherwise used. +var _ context.Context +var _ grpc.ClientConn + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the grpc package it is being compiled against. +const _ = grpc.SupportPackageIsVersion3 + +// Client API for SearchServiceV3 service + +type SearchServiceV3Client interface { + Search(ctx context.Context, in *SearchRequestV3, opts ...grpc.CallOption) (*SearchResponseV3, error) + StreamingSearch(ctx context.Context, opts ...grpc.CallOption) (SearchServiceV3_StreamingSearchClient, error) +} + +type searchServiceV3Client struct { + cc *grpc.ClientConn +} + +func NewSearchServiceV3Client(cc *grpc.ClientConn) SearchServiceV3Client { + return &searchServiceV3Client{cc} +} + +func (c *searchServiceV3Client) Search(ctx context.Context, in *SearchRequestV3, opts ...grpc.CallOption) (*SearchResponseV3, error) { + out := new(SearchResponseV3) + err := grpc.Invoke(ctx, "/grpc.testingv3.SearchServiceV3/Search", in, out, c.cc, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *searchServiceV3Client) StreamingSearch(ctx context.Context, opts ...grpc.CallOption) (SearchServiceV3_StreamingSearchClient, error) { + stream, err := grpc.NewClientStream(ctx, &_SearchServiceV3_serviceDesc.Streams[0], c.cc, "/grpc.testingv3.SearchServiceV3/StreamingSearch", opts...) + if err != nil { + return nil, err + } + x := &searchServiceV3StreamingSearchClient{stream} + return x, nil +} + +type SearchServiceV3_StreamingSearchClient interface { + Send(*SearchRequestV3) error + Recv() (*SearchResponseV3, error) + grpc.ClientStream +} + +type searchServiceV3StreamingSearchClient struct { + grpc.ClientStream +} + +func (x *searchServiceV3StreamingSearchClient) Send(m *SearchRequestV3) error { + return x.ClientStream.SendMsg(m) +} + +func (x *searchServiceV3StreamingSearchClient) Recv() (*SearchResponseV3, error) { + m := new(SearchResponseV3) + if err := x.ClientStream.RecvMsg(m); err != nil { + return nil, err + } + return m, nil +} + +// Server API for SearchServiceV3 service + +type SearchServiceV3Server interface { + Search(context.Context, *SearchRequestV3) (*SearchResponseV3, error) + StreamingSearch(SearchServiceV3_StreamingSearchServer) error +} + +func RegisterSearchServiceV3Server(s *grpc.Server, srv SearchServiceV3Server) { + s.RegisterService(&_SearchServiceV3_serviceDesc, srv) +} + +func _SearchServiceV3_Search_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(SearchRequestV3) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(SearchServiceV3Server).Search(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/grpc.testingv3.SearchServiceV3/Search", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(SearchServiceV3Server).Search(ctx, req.(*SearchRequestV3)) + } + return interceptor(ctx, in, info, handler) +} + +func _SearchServiceV3_StreamingSearch_Handler(srv interface{}, stream grpc.ServerStream) error { + return srv.(SearchServiceV3Server).StreamingSearch(&searchServiceV3StreamingSearchServer{stream}) +} + +type SearchServiceV3_StreamingSearchServer interface { + Send(*SearchResponseV3) error + Recv() (*SearchRequestV3, error) + grpc.ServerStream +} + +type searchServiceV3StreamingSearchServer struct { + grpc.ServerStream +} + +func (x *searchServiceV3StreamingSearchServer) Send(m *SearchResponseV3) error { + return x.ServerStream.SendMsg(m) +} + +func (x *searchServiceV3StreamingSearchServer) Recv() (*SearchRequestV3, error) { + m := new(SearchRequestV3) + if err := x.ServerStream.RecvMsg(m); err != nil { + return nil, err + } + return m, nil +} + +var _SearchServiceV3_serviceDesc = grpc.ServiceDesc{ + ServiceName: "grpc.testingv3.SearchServiceV3", + HandlerType: (*SearchServiceV3Server)(nil), + Methods: []grpc.MethodDesc{ + { + MethodName: "Search", + Handler: _SearchServiceV3_Search_Handler, + }, + }, + Streams: []grpc.StreamDesc{ + { + StreamName: "StreamingSearch", + Handler: _SearchServiceV3_StreamingSearch_Handler, + ServerStreams: true, + ClientStreams: true, + }, + }, + Metadata: fileDescriptor0, +} + +func init() { proto.RegisterFile("testv3.proto", fileDescriptor0) } + +var fileDescriptor0 = []byte{ + // 416 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xac, 0x93, 0xd1, 0x6a, 0xd4, 0x40, + 0x14, 0x86, 0x77, 0x36, 0x9b, 0x6d, 0xf7, 0xac, 0xb6, 0x61, 0xe8, 0x45, 0xc8, 0x8d, 0x61, 0x2f, + 0x6c, 0x10, 0x0c, 0x92, 0x20, 0x88, 0x78, 0x53, 0x65, 0x65, 0xa1, 0x75, 0xc5, 0x89, 0xae, 0xde, + 0x8e, 0xeb, 0x61, 0x8d, 0x4d, 0xb3, 0xe9, 0xcc, 0x49, 0x60, 0x9f, 0xc5, 0x17, 0xf1, 0x55, 0x7c, + 0x1b, 0x99, 0x99, 0xa6, 0x50, 0x41, 0xba, 0x17, 0xde, 0xcd, 0x7f, 0x38, 0xff, 0x37, 0xff, 0x3f, + 0x24, 0xf0, 0x80, 0x50, 0x53, 0x97, 0xa7, 0x8d, 0xda, 0xd2, 0x96, 0x1f, 0x6d, 0x54, 0xb3, 0x4e, + 0xcd, 0xa8, 0xac, 0x37, 0x5d, 0x3e, 0xfb, 0x39, 0x82, 0xa0, 0x40, 0xa9, 0xd6, 0xdf, 0x05, 0xea, + 0x66, 0x5b, 0x6b, 0x5c, 0xe5, 0xfc, 0x0c, 0x0e, 0x14, 0xea, 0xb6, 0x22, 0x1d, 0xb2, 0xd8, 0x4b, + 0xa6, 0xd9, 0x69, 0x7a, 0xd7, 0x96, 0xfe, 0x6d, 0x49, 0x85, 0xdd, 0x17, 0xbd, 0x8f, 0xbf, 0x02, + 0x5f, 0x93, 0x24, 0x0c, 0x87, 0x31, 0x4b, 0x8e, 0xb2, 0xc7, 0xf7, 0x02, 0x0a, 0xb3, 0x2d, 0x9c, + 0x29, 0xfa, 0x3d, 0x84, 0xb1, 0x23, 0xf2, 0x00, 0xbc, 0x56, 0x55, 0x21, 0x8b, 0x59, 0x32, 0x11, + 0xe6, 0xc8, 0x4f, 0xc0, 0xa7, 0x92, 0x2a, 0x87, 0x9e, 0x08, 0x27, 0x78, 0x04, 0x87, 0xba, 0x2e, + 0x9b, 0x06, 0x49, 0x87, 0x5e, 0xec, 0x25, 0x13, 0x71, 0xab, 0xf9, 0x07, 0x38, 0xbc, 0x42, 0x92, + 0xdf, 0x24, 0xc9, 0x70, 0x64, 0x0b, 0x3d, 0xdf, 0xb3, 0x50, 0xfa, 0xee, 0xc6, 0x37, 0xaf, 0x49, + 0xed, 0xc4, 0x2d, 0x26, 0xba, 0x00, 0x7f, 0x25, 0xab, 0x16, 0x39, 0x07, 0x4f, 0x93, 0x72, 0xf9, + 0x16, 0x03, 0x61, 0x84, 0x99, 0x95, 0x35, 0xd9, 0x7c, 0x9e, 0x99, 0x95, 0x35, 0xf1, 0x13, 0x18, + 0x29, 0x94, 0x55, 0xe8, 0xc5, 0x2c, 0x61, 0x8b, 0x81, 0xb0, 0xea, 0xb5, 0x0f, 0x5e, 0x27, 0xab, + 0xe8, 0x07, 0x3c, 0xbc, 0x73, 0x91, 0x69, 0x7d, 0x89, 0xbb, 0xbe, 0xf5, 0x25, 0xee, 0xf8, 0x1b, + 0xf0, 0x3b, 0x73, 0xa1, 0xa5, 0x4e, 0xb3, 0xa7, 0xfb, 0x16, 0xb0, 0x29, 0x85, 0xf3, 0xbe, 0x1c, + 0xbe, 0x60, 0xb3, 0x27, 0xe0, 0xdb, 0xb7, 0xe6, 0x53, 0x38, 0xf8, 0xb4, 0x3c, 0x5f, 0xbe, 0xff, + 0xbc, 0x0c, 0x06, 0x7c, 0x02, 0xfe, 0x5b, 0x31, 0x2f, 0x16, 0x01, 0x33, 0xc7, 0xe2, 0xe3, 0xd9, + 0xc5, 0x3c, 0x18, 0xce, 0x4e, 0xe1, 0xb8, 0xe7, 0x5e, 0xb7, 0xa8, 0x69, 0x95, 0x9b, 0xd7, 0xbf, + 0x6e, 0x51, 0xf5, 0xd9, 0x9c, 0xc8, 0x7e, 0xb1, 0x7e, 0xb3, 0x40, 0xd5, 0x95, 0x6b, 0xf3, 0x15, + 0x9d, 0xc3, 0xd8, 0x8d, 0xf8, 0xa3, 0x7f, 0x85, 0xbd, 0x81, 0x46, 0xf1, 0x7d, 0x6d, 0xf8, 0x17, + 0x38, 0x2e, 0x48, 0xa1, 0xbc, 0x2a, 0xeb, 0xcd, 0x7f, 0xa3, 0x26, 0xec, 0x19, 0xfb, 0x3a, 0xb6, + 0x3f, 0x46, 0xfe, 0x27, 0x00, 0x00, 0xff, 0xff, 0xed, 0xa2, 0x8d, 0x75, 0x28, 0x03, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/grpc/reflection/serverreflection.go b/vendor/google.golang.org/grpc/reflection/serverreflection.go new file mode 100644 index 0000000..dd22a2d --- /dev/null +++ b/vendor/google.golang.org/grpc/reflection/serverreflection.go @@ -0,0 +1,454 @@ +/* + * + * Copyright 2016 gRPC authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ + +//go:generate protoc --go_out=plugins=grpc:. grpc_reflection_v1alpha/reflection.proto + +/* +Package reflection implements server reflection service. + +The service implemented is defined in: +https://github.com/grpc/grpc/blob/master/src/proto/grpc/reflection/v1alpha/reflection.proto. + +To register server reflection on a gRPC server: + import "google.golang.org/grpc/reflection" + + s := grpc.NewServer() + pb.RegisterYourOwnServer(s, &server{}) + + // Register reflection service on gRPC server. + reflection.Register(s) + + s.Serve(lis) + +*/ +package reflection // import "google.golang.org/grpc/reflection" + +import ( + "bytes" + "compress/gzip" + "fmt" + "io" + "io/ioutil" + "reflect" + "sort" + "sync" + + "github.com/golang/protobuf/proto" + dpb "github.com/golang/protobuf/protoc-gen-go/descriptor" + "google.golang.org/grpc" + "google.golang.org/grpc/codes" + rpb "google.golang.org/grpc/reflection/grpc_reflection_v1alpha" + "google.golang.org/grpc/status" +) + +type serverReflectionServer struct { + s *grpc.Server + + initSymbols sync.Once + serviceNames []string + symbols map[string]*dpb.FileDescriptorProto // map of fully-qualified names to files +} + +// Register registers the server reflection service on the given gRPC server. +func Register(s *grpc.Server) { + rpb.RegisterServerReflectionServer(s, &serverReflectionServer{ + s: s, + }) +} + +// protoMessage is used for type assertion on proto messages. +// Generated proto message implements function Descriptor(), but Descriptor() +// is not part of interface proto.Message. This interface is needed to +// call Descriptor(). +type protoMessage interface { + Descriptor() ([]byte, []int) +} + +func (s *serverReflectionServer) getSymbols() (svcNames []string, symbolIndex map[string]*dpb.FileDescriptorProto) { + s.initSymbols.Do(func() { + serviceInfo := s.s.GetServiceInfo() + + s.symbols = map[string]*dpb.FileDescriptorProto{} + s.serviceNames = make([]string, 0, len(serviceInfo)) + processed := map[string]struct{}{} + for svc, info := range serviceInfo { + s.serviceNames = append(s.serviceNames, svc) + fdenc, ok := parseMetadata(info.Metadata) + if !ok { + continue + } + fd, err := decodeFileDesc(fdenc) + if err != nil { + continue + } + s.processFile(fd, processed) + } + sort.Strings(s.serviceNames) + }) + + return s.serviceNames, s.symbols +} + +func (s *serverReflectionServer) processFile(fd *dpb.FileDescriptorProto, processed map[string]struct{}) { + filename := fd.GetName() + if _, ok := processed[filename]; ok { + return + } + processed[filename] = struct{}{} + + prefix := fd.GetPackage() + + for _, msg := range fd.MessageType { + s.processMessage(fd, prefix, msg) + } + for _, en := range fd.EnumType { + s.processEnum(fd, prefix, en) + } + for _, ext := range fd.Extension { + s.processField(fd, prefix, ext) + } + for _, svc := range fd.Service { + svcName := fqn(prefix, svc.GetName()) + s.symbols[svcName] = fd + for _, meth := range svc.Method { + name := fqn(svcName, meth.GetName()) + s.symbols[name] = fd + } + } + + for _, dep := range fd.Dependency { + fdenc := proto.FileDescriptor(dep) + fdDep, err := decodeFileDesc(fdenc) + if err != nil { + continue + } + s.processFile(fdDep, processed) + } +} + +func (s *serverReflectionServer) processMessage(fd *dpb.FileDescriptorProto, prefix string, msg *dpb.DescriptorProto) { + msgName := fqn(prefix, msg.GetName()) + s.symbols[msgName] = fd + + for _, nested := range msg.NestedType { + s.processMessage(fd, msgName, nested) + } + for _, en := range msg.EnumType { + s.processEnum(fd, msgName, en) + } + for _, ext := range msg.Extension { + s.processField(fd, msgName, ext) + } + for _, fld := range msg.Field { + s.processField(fd, msgName, fld) + } + for _, oneof := range msg.OneofDecl { + oneofName := fqn(msgName, oneof.GetName()) + s.symbols[oneofName] = fd + } +} + +func (s *serverReflectionServer) processEnum(fd *dpb.FileDescriptorProto, prefix string, en *dpb.EnumDescriptorProto) { + enName := fqn(prefix, en.GetName()) + s.symbols[enName] = fd + + for _, val := range en.Value { + valName := fqn(enName, val.GetName()) + s.symbols[valName] = fd + } +} + +func (s *serverReflectionServer) processField(fd *dpb.FileDescriptorProto, prefix string, fld *dpb.FieldDescriptorProto) { + fldName := fqn(prefix, fld.GetName()) + s.symbols[fldName] = fd +} + +func fqn(prefix, name string) string { + if prefix == "" { + return name + } + return prefix + "." + name +} + +// fileDescForType gets the file descriptor for the given type. +// The given type should be a proto message. +func (s *serverReflectionServer) fileDescForType(st reflect.Type) (*dpb.FileDescriptorProto, error) { + m, ok := reflect.Zero(reflect.PtrTo(st)).Interface().(protoMessage) + if !ok { + return nil, fmt.Errorf("failed to create message from type: %v", st) + } + enc, _ := m.Descriptor() + + return decodeFileDesc(enc) +} + +// decodeFileDesc does decompression and unmarshalling on the given +// file descriptor byte slice. +func decodeFileDesc(enc []byte) (*dpb.FileDescriptorProto, error) { + raw, err := decompress(enc) + if err != nil { + return nil, fmt.Errorf("failed to decompress enc: %v", err) + } + + fd := new(dpb.FileDescriptorProto) + if err := proto.Unmarshal(raw, fd); err != nil { + return nil, fmt.Errorf("bad descriptor: %v", err) + } + return fd, nil +} + +// decompress does gzip decompression. +func decompress(b []byte) ([]byte, error) { + r, err := gzip.NewReader(bytes.NewReader(b)) + if err != nil { + return nil, fmt.Errorf("bad gzipped descriptor: %v", err) + } + out, err := ioutil.ReadAll(r) + if err != nil { + return nil, fmt.Errorf("bad gzipped descriptor: %v", err) + } + return out, nil +} + +func typeForName(name string) (reflect.Type, error) { + pt := proto.MessageType(name) + if pt == nil { + return nil, fmt.Errorf("unknown type: %q", name) + } + st := pt.Elem() + + return st, nil +} + +func fileDescContainingExtension(st reflect.Type, ext int32) (*dpb.FileDescriptorProto, error) { + m, ok := reflect.Zero(reflect.PtrTo(st)).Interface().(proto.Message) + if !ok { + return nil, fmt.Errorf("failed to create message from type: %v", st) + } + + var extDesc *proto.ExtensionDesc + for id, desc := range proto.RegisteredExtensions(m) { + if id == ext { + extDesc = desc + break + } + } + + if extDesc == nil { + return nil, fmt.Errorf("failed to find registered extension for extension number %v", ext) + } + + return decodeFileDesc(proto.FileDescriptor(extDesc.Filename)) +} + +func (s *serverReflectionServer) allExtensionNumbersForType(st reflect.Type) ([]int32, error) { + m, ok := reflect.Zero(reflect.PtrTo(st)).Interface().(proto.Message) + if !ok { + return nil, fmt.Errorf("failed to create message from type: %v", st) + } + + exts := proto.RegisteredExtensions(m) + out := make([]int32, 0, len(exts)) + for id := range exts { + out = append(out, id) + } + return out, nil +} + +// fileDescEncodingByFilename finds the file descriptor for given filename, +// does marshalling on it and returns the marshalled result. +func (s *serverReflectionServer) fileDescEncodingByFilename(name string) ([]byte, error) { + enc := proto.FileDescriptor(name) + if enc == nil { + return nil, fmt.Errorf("unknown file: %v", name) + } + fd, err := decodeFileDesc(enc) + if err != nil { + return nil, err + } + return proto.Marshal(fd) +} + +// parseMetadata finds the file descriptor bytes specified meta. +// For SupportPackageIsVersion4, m is the name of the proto file, we +// call proto.FileDescriptor to get the byte slice. +// For SupportPackageIsVersion3, m is a byte slice itself. +func parseMetadata(meta interface{}) ([]byte, bool) { + // Check if meta is the file name. + if fileNameForMeta, ok := meta.(string); ok { + return proto.FileDescriptor(fileNameForMeta), true + } + + // Check if meta is the byte slice. + if enc, ok := meta.([]byte); ok { + return enc, true + } + + return nil, false +} + +// fileDescEncodingContainingSymbol finds the file descriptor containing the given symbol, +// does marshalling on it and returns the marshalled result. +// The given symbol can be a type, a service or a method. +func (s *serverReflectionServer) fileDescEncodingContainingSymbol(name string) ([]byte, error) { + _, symbols := s.getSymbols() + fd := symbols[name] + if fd == nil { + // Check if it's a type name that was not present in the + // transitive dependencies of the registered services. + if st, err := typeForName(name); err == nil { + fd, err = s.fileDescForType(st) + if err != nil { + return nil, err + } + } + } + + if fd == nil { + return nil, fmt.Errorf("unknown symbol: %v", name) + } + + return proto.Marshal(fd) +} + +// fileDescEncodingContainingExtension finds the file descriptor containing given extension, +// does marshalling on it and returns the marshalled result. +func (s *serverReflectionServer) fileDescEncodingContainingExtension(typeName string, extNum int32) ([]byte, error) { + st, err := typeForName(typeName) + if err != nil { + return nil, err + } + fd, err := fileDescContainingExtension(st, extNum) + if err != nil { + return nil, err + } + return proto.Marshal(fd) +} + +// allExtensionNumbersForTypeName returns all extension numbers for the given type. +func (s *serverReflectionServer) allExtensionNumbersForTypeName(name string) ([]int32, error) { + st, err := typeForName(name) + if err != nil { + return nil, err + } + extNums, err := s.allExtensionNumbersForType(st) + if err != nil { + return nil, err + } + return extNums, nil +} + +// ServerReflectionInfo is the reflection service handler. +func (s *serverReflectionServer) ServerReflectionInfo(stream rpb.ServerReflection_ServerReflectionInfoServer) error { + for { + in, err := stream.Recv() + if err == io.EOF { + return nil + } + if err != nil { + return err + } + + out := &rpb.ServerReflectionResponse{ + ValidHost: in.Host, + OriginalRequest: in, + } + switch req := in.MessageRequest.(type) { + case *rpb.ServerReflectionRequest_FileByFilename: + b, err := s.fileDescEncodingByFilename(req.FileByFilename) + if err != nil { + out.MessageResponse = &rpb.ServerReflectionResponse_ErrorResponse{ + ErrorResponse: &rpb.ErrorResponse{ + ErrorCode: int32(codes.NotFound), + ErrorMessage: err.Error(), + }, + } + } else { + out.MessageResponse = &rpb.ServerReflectionResponse_FileDescriptorResponse{ + FileDescriptorResponse: &rpb.FileDescriptorResponse{FileDescriptorProto: [][]byte{b}}, + } + } + case *rpb.ServerReflectionRequest_FileContainingSymbol: + b, err := s.fileDescEncodingContainingSymbol(req.FileContainingSymbol) + if err != nil { + out.MessageResponse = &rpb.ServerReflectionResponse_ErrorResponse{ + ErrorResponse: &rpb.ErrorResponse{ + ErrorCode: int32(codes.NotFound), + ErrorMessage: err.Error(), + }, + } + } else { + out.MessageResponse = &rpb.ServerReflectionResponse_FileDescriptorResponse{ + FileDescriptorResponse: &rpb.FileDescriptorResponse{FileDescriptorProto: [][]byte{b}}, + } + } + case *rpb.ServerReflectionRequest_FileContainingExtension: + typeName := req.FileContainingExtension.ContainingType + extNum := req.FileContainingExtension.ExtensionNumber + b, err := s.fileDescEncodingContainingExtension(typeName, extNum) + if err != nil { + out.MessageResponse = &rpb.ServerReflectionResponse_ErrorResponse{ + ErrorResponse: &rpb.ErrorResponse{ + ErrorCode: int32(codes.NotFound), + ErrorMessage: err.Error(), + }, + } + } else { + out.MessageResponse = &rpb.ServerReflectionResponse_FileDescriptorResponse{ + FileDescriptorResponse: &rpb.FileDescriptorResponse{FileDescriptorProto: [][]byte{b}}, + } + } + case *rpb.ServerReflectionRequest_AllExtensionNumbersOfType: + extNums, err := s.allExtensionNumbersForTypeName(req.AllExtensionNumbersOfType) + if err != nil { + out.MessageResponse = &rpb.ServerReflectionResponse_ErrorResponse{ + ErrorResponse: &rpb.ErrorResponse{ + ErrorCode: int32(codes.NotFound), + ErrorMessage: err.Error(), + }, + } + } else { + out.MessageResponse = &rpb.ServerReflectionResponse_AllExtensionNumbersResponse{ + AllExtensionNumbersResponse: &rpb.ExtensionNumberResponse{ + BaseTypeName: req.AllExtensionNumbersOfType, + ExtensionNumber: extNums, + }, + } + } + case *rpb.ServerReflectionRequest_ListServices: + svcNames, _ := s.getSymbols() + serviceResponses := make([]*rpb.ServiceResponse, len(svcNames)) + for i, n := range svcNames { + serviceResponses[i] = &rpb.ServiceResponse{ + Name: n, + } + } + out.MessageResponse = &rpb.ServerReflectionResponse_ListServicesResponse{ + ListServicesResponse: &rpb.ListServiceResponse{ + Service: serviceResponses, + }, + } + default: + return status.Errorf(codes.InvalidArgument, "invalid MessageRequest: %v", in.MessageRequest) + } + + if err := stream.Send(out); err != nil { + return err + } + } +} diff --git a/vendor/google.golang.org/grpc/resolver/dns/dns_resolver.go b/vendor/google.golang.org/grpc/resolver/dns/dns_resolver.go new file mode 100644 index 0000000..5835599 --- /dev/null +++ b/vendor/google.golang.org/grpc/resolver/dns/dns_resolver.go @@ -0,0 +1,438 @@ +/* + * + * Copyright 2018 gRPC authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ + +// Package dns implements a dns resolver to be installed as the default resolver +// in grpc. +package dns + +import ( + "context" + "encoding/json" + "errors" + "fmt" + "net" + "os" + "strconv" + "strings" + "sync" + "time" + + "google.golang.org/grpc/grpclog" + "google.golang.org/grpc/internal/backoff" + "google.golang.org/grpc/internal/grpcrand" + "google.golang.org/grpc/resolver" +) + +func init() { + resolver.Register(NewBuilder()) +} + +const ( + defaultPort = "443" + defaultFreq = time.Minute * 30 + defaultDNSSvrPort = "53" + golang = "GO" + // txtPrefix is the prefix string to be prepended to the host name for txt record lookup. + txtPrefix = "_grpc_config." + // In DNS, service config is encoded in a TXT record via the mechanism + // described in RFC-1464 using the attribute name grpc_config. + txtAttribute = "grpc_config=" +) + +var ( + errMissingAddr = errors.New("dns resolver: missing address") + + // Addresses ending with a colon that is supposed to be the separator + // between host and port is not allowed. E.g. "::" is a valid address as + // it is an IPv6 address (host only) and "[::]:" is invalid as it ends with + // a colon as the host and port separator + errEndsWithColon = errors.New("dns resolver: missing port after port-separator colon") +) + +var ( + defaultResolver netResolver = net.DefaultResolver +) + +var customAuthorityDialler = func(authority string) func(ctx context.Context, network, address string) (net.Conn, error) { + return func(ctx context.Context, network, address string) (net.Conn, error) { + var dialer net.Dialer + return dialer.DialContext(ctx, network, authority) + } +} + +var customAuthorityResolver = func(authority string) (netResolver, error) { + host, port, err := parseTarget(authority, defaultDNSSvrPort) + if err != nil { + return nil, err + } + + authorityWithPort := net.JoinHostPort(host, port) + + return &net.Resolver{ + PreferGo: true, + Dial: customAuthorityDialler(authorityWithPort), + }, nil +} + +// NewBuilder creates a dnsBuilder which is used to factory DNS resolvers. +func NewBuilder() resolver.Builder { + return &dnsBuilder{minFreq: defaultFreq} +} + +type dnsBuilder struct { + // minimum frequency of polling the DNS server. + minFreq time.Duration +} + +// Build creates and starts a DNS resolver that watches the name resolution of the target. +func (b *dnsBuilder) Build(target resolver.Target, cc resolver.ClientConn, opts resolver.BuildOption) (resolver.Resolver, error) { + host, port, err := parseTarget(target.Endpoint, defaultPort) + if err != nil { + return nil, err + } + + // IP address. + if net.ParseIP(host) != nil { + host, _ = formatIP(host) + addr := []resolver.Address{{Addr: host + ":" + port}} + i := &ipResolver{ + cc: cc, + ip: addr, + rn: make(chan struct{}, 1), + q: make(chan struct{}), + } + cc.NewAddress(addr) + go i.watcher() + return i, nil + } + + // DNS address (non-IP). + ctx, cancel := context.WithCancel(context.Background()) + d := &dnsResolver{ + freq: b.minFreq, + backoff: backoff.Exponential{MaxDelay: b.minFreq}, + host: host, + port: port, + ctx: ctx, + cancel: cancel, + cc: cc, + t: time.NewTimer(0), + rn: make(chan struct{}, 1), + disableServiceConfig: opts.DisableServiceConfig, + } + + if target.Authority == "" { + d.resolver = defaultResolver + } else { + d.resolver, err = customAuthorityResolver(target.Authority) + if err != nil { + return nil, err + } + } + + d.wg.Add(1) + go d.watcher() + return d, nil +} + +// Scheme returns the naming scheme of this resolver builder, which is "dns". +func (b *dnsBuilder) Scheme() string { + return "dns" +} + +type netResolver interface { + LookupHost(ctx context.Context, host string) (addrs []string, err error) + LookupSRV(ctx context.Context, service, proto, name string) (cname string, addrs []*net.SRV, err error) + LookupTXT(ctx context.Context, name string) (txts []string, err error) +} + +// ipResolver watches for the name resolution update for an IP address. +type ipResolver struct { + cc resolver.ClientConn + ip []resolver.Address + // rn channel is used by ResolveNow() to force an immediate resolution of the target. + rn chan struct{} + q chan struct{} +} + +// ResolveNow resend the address it stores, no resolution is needed. +func (i *ipResolver) ResolveNow(opt resolver.ResolveNowOption) { + select { + case i.rn <- struct{}{}: + default: + } +} + +// Close closes the ipResolver. +func (i *ipResolver) Close() { + close(i.q) +} + +func (i *ipResolver) watcher() { + for { + select { + case <-i.rn: + i.cc.NewAddress(i.ip) + case <-i.q: + return + } + } +} + +// dnsResolver watches for the name resolution update for a non-IP target. +type dnsResolver struct { + freq time.Duration + backoff backoff.Exponential + retryCount int + host string + port string + resolver netResolver + ctx context.Context + cancel context.CancelFunc + cc resolver.ClientConn + // rn channel is used by ResolveNow() to force an immediate resolution of the target. + rn chan struct{} + t *time.Timer + // wg is used to enforce Close() to return after the watcher() goroutine has finished. + // Otherwise, data race will be possible. [Race Example] in dns_resolver_test we + // replace the real lookup functions with mocked ones to facilitate testing. + // If Close() doesn't wait for watcher() goroutine finishes, race detector sometimes + // will warns lookup (READ the lookup function pointers) inside watcher() goroutine + // has data race with replaceNetFunc (WRITE the lookup function pointers). + wg sync.WaitGroup + disableServiceConfig bool +} + +// ResolveNow invoke an immediate resolution of the target that this dnsResolver watches. +func (d *dnsResolver) ResolveNow(opt resolver.ResolveNowOption) { + select { + case d.rn <- struct{}{}: + default: + } +} + +// Close closes the dnsResolver. +func (d *dnsResolver) Close() { + d.cancel() + d.wg.Wait() + d.t.Stop() +} + +func (d *dnsResolver) watcher() { + defer d.wg.Done() + for { + select { + case <-d.ctx.Done(): + return + case <-d.t.C: + case <-d.rn: + } + result, sc := d.lookup() + // Next lookup should happen within an interval defined by d.freq. It may be + // more often due to exponential retry on empty address list. + if len(result) == 0 { + d.retryCount++ + d.t.Reset(d.backoff.Backoff(d.retryCount)) + } else { + d.retryCount = 0 + d.t.Reset(d.freq) + } + d.cc.NewServiceConfig(sc) + d.cc.NewAddress(result) + } +} + +func (d *dnsResolver) lookupSRV() []resolver.Address { + var newAddrs []resolver.Address + _, srvs, err := d.resolver.LookupSRV(d.ctx, "grpclb", "tcp", d.host) + if err != nil { + grpclog.Infof("grpc: failed dns SRV record lookup due to %v.\n", err) + return nil + } + for _, s := range srvs { + lbAddrs, err := d.resolver.LookupHost(d.ctx, s.Target) + if err != nil { + grpclog.Infof("grpc: failed load balancer address dns lookup due to %v.\n", err) + continue + } + for _, a := range lbAddrs { + a, ok := formatIP(a) + if !ok { + grpclog.Errorf("grpc: failed IP parsing due to %v.\n", err) + continue + } + addr := a + ":" + strconv.Itoa(int(s.Port)) + newAddrs = append(newAddrs, resolver.Address{Addr: addr, Type: resolver.GRPCLB, ServerName: s.Target}) + } + } + return newAddrs +} + +func (d *dnsResolver) lookupTXT() string { + ss, err := d.resolver.LookupTXT(d.ctx, txtPrefix+d.host) + if err != nil { + grpclog.Infof("grpc: failed dns TXT record lookup due to %v.\n", err) + return "" + } + var res string + for _, s := range ss { + res += s + } + + // TXT record must have "grpc_config=" attribute in order to be used as service config. + if !strings.HasPrefix(res, txtAttribute) { + grpclog.Warningf("grpc: TXT record %v missing %v attribute", res, txtAttribute) + return "" + } + return strings.TrimPrefix(res, txtAttribute) +} + +func (d *dnsResolver) lookupHost() []resolver.Address { + var newAddrs []resolver.Address + addrs, err := d.resolver.LookupHost(d.ctx, d.host) + if err != nil { + grpclog.Warningf("grpc: failed dns A record lookup due to %v.\n", err) + return nil + } + for _, a := range addrs { + a, ok := formatIP(a) + if !ok { + grpclog.Errorf("grpc: failed IP parsing due to %v.\n", err) + continue + } + addr := a + ":" + d.port + newAddrs = append(newAddrs, resolver.Address{Addr: addr}) + } + return newAddrs +} + +func (d *dnsResolver) lookup() ([]resolver.Address, string) { + newAddrs := d.lookupSRV() + // Support fallback to non-balancer address. + newAddrs = append(newAddrs, d.lookupHost()...) + if d.disableServiceConfig { + return newAddrs, "" + } + sc := d.lookupTXT() + return newAddrs, canaryingSC(sc) +} + +// formatIP returns ok = false if addr is not a valid textual representation of an IP address. +// If addr is an IPv4 address, return the addr and ok = true. +// If addr is an IPv6 address, return the addr enclosed in square brackets and ok = true. +func formatIP(addr string) (addrIP string, ok bool) { + ip := net.ParseIP(addr) + if ip == nil { + return "", false + } + if ip.To4() != nil { + return addr, true + } + return "[" + addr + "]", true +} + +// parseTarget takes the user input target string and default port, returns formatted host and port info. +// If target doesn't specify a port, set the port to be the defaultPort. +// If target is in IPv6 format and host-name is enclosed in square brackets, brackets +// are stripped when setting the host. +// examples: +// target: "www.google.com" defaultPort: "443" returns host: "www.google.com", port: "443" +// target: "ipv4-host:80" defaultPort: "443" returns host: "ipv4-host", port: "80" +// target: "[ipv6-host]" defaultPort: "443" returns host: "ipv6-host", port: "443" +// target: ":80" defaultPort: "443" returns host: "localhost", port: "80" +func parseTarget(target, defaultPort string) (host, port string, err error) { + if target == "" { + return "", "", errMissingAddr + } + if ip := net.ParseIP(target); ip != nil { + // target is an IPv4 or IPv6(without brackets) address + return target, defaultPort, nil + } + if host, port, err = net.SplitHostPort(target); err == nil { + if port == "" { + // If the port field is empty (target ends with colon), e.g. "[::1]:", this is an error. + return "", "", errEndsWithColon + } + // target has port, i.e ipv4-host:port, [ipv6-host]:port, host-name:port + if host == "" { + // Keep consistent with net.Dial(): If the host is empty, as in ":80", the local system is assumed. + host = "localhost" + } + return host, port, nil + } + if host, port, err = net.SplitHostPort(target + ":" + defaultPort); err == nil { + // target doesn't have port + return host, port, nil + } + return "", "", fmt.Errorf("invalid target address %v, error info: %v", target, err) +} + +type rawChoice struct { + ClientLanguage *[]string `json:"clientLanguage,omitempty"` + Percentage *int `json:"percentage,omitempty"` + ClientHostName *[]string `json:"clientHostName,omitempty"` + ServiceConfig *json.RawMessage `json:"serviceConfig,omitempty"` +} + +func containsString(a *[]string, b string) bool { + if a == nil { + return true + } + for _, c := range *a { + if c == b { + return true + } + } + return false +} + +func chosenByPercentage(a *int) bool { + if a == nil { + return true + } + return grpcrand.Intn(100)+1 <= *a +} + +func canaryingSC(js string) string { + if js == "" { + return "" + } + var rcs []rawChoice + err := json.Unmarshal([]byte(js), &rcs) + if err != nil { + grpclog.Warningf("grpc: failed to parse service config json string due to %v.\n", err) + return "" + } + cliHostname, err := os.Hostname() + if err != nil { + grpclog.Warningf("grpc: failed to get client hostname due to %v.\n", err) + return "" + } + var sc string + for _, c := range rcs { + if !containsString(c.ClientLanguage, golang) || + !chosenByPercentage(c.Percentage) || + !containsString(c.ClientHostName, cliHostname) || + c.ServiceConfig == nil { + continue + } + sc = string(*c.ServiceConfig) + break + } + return sc +} diff --git a/vendor/google.golang.org/grpc/resolver/manual/manual.go b/vendor/google.golang.org/grpc/resolver/manual/manual.go new file mode 100644 index 0000000..85a4abe --- /dev/null +++ b/vendor/google.golang.org/grpc/resolver/manual/manual.go @@ -0,0 +1,86 @@ +/* + * + * Copyright 2017 gRPC authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ + +// Package manual defines a resolver that can be used to manually send resolved +// addresses to ClientConn. +package manual + +import ( + "strconv" + "time" + + "google.golang.org/grpc/resolver" +) + +// NewBuilderWithScheme creates a new test resolver builder with the given scheme. +func NewBuilderWithScheme(scheme string) *Resolver { + return &Resolver{ + scheme: scheme, + } +} + +// Resolver is also a resolver builder. +// It's build() function always returns itself. +type Resolver struct { + scheme string + + // Fields actually belong to the resolver. + cc resolver.ClientConn + bootstrapState *resolver.State +} + +// InitialState adds initial state to the resolver so that UpdateState doesn't +// need to be explicitly called after Dial. +func (r *Resolver) InitialState(s resolver.State) { + r.bootstrapState = &s +} + +// Build returns itself for Resolver, because it's both a builder and a resolver. +func (r *Resolver) Build(target resolver.Target, cc resolver.ClientConn, opts resolver.BuildOption) (resolver.Resolver, error) { + r.cc = cc + if r.bootstrapState != nil { + r.UpdateState(*r.bootstrapState) + } + return r, nil +} + +// Scheme returns the test scheme. +func (r *Resolver) Scheme() string { + return r.scheme +} + +// ResolveNow is a noop for Resolver. +func (*Resolver) ResolveNow(o resolver.ResolveNowOption) {} + +// Close is a noop for Resolver. +func (*Resolver) Close() {} + +// UpdateState calls cc.UpdateState. +func (r *Resolver) UpdateState(s resolver.State) { + r.cc.UpdateState(s) +} + +// GenerateAndRegisterManualResolver generates a random scheme and a Resolver +// with it. It also registers this Resolver. +// It returns the Resolver and a cleanup function to unregister it. +func GenerateAndRegisterManualResolver() (*Resolver, func()) { + scheme := strconv.FormatInt(time.Now().UnixNano(), 36) + r := NewBuilderWithScheme(scheme) + resolver.Register(r) + return r, func() { resolver.UnregisterForTesting(scheme) } +} diff --git a/vendor/google.golang.org/grpc/resolver/passthrough/passthrough.go b/vendor/google.golang.org/grpc/resolver/passthrough/passthrough.go new file mode 100644 index 0000000..893d5d1 --- /dev/null +++ b/vendor/google.golang.org/grpc/resolver/passthrough/passthrough.go @@ -0,0 +1,57 @@ +/* + * + * Copyright 2017 gRPC authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ + +// Package passthrough implements a pass-through resolver. It sends the target +// name without scheme back to gRPC as resolved address. +package passthrough + +import "google.golang.org/grpc/resolver" + +const scheme = "passthrough" + +type passthroughBuilder struct{} + +func (*passthroughBuilder) Build(target resolver.Target, cc resolver.ClientConn, opts resolver.BuildOption) (resolver.Resolver, error) { + r := &passthroughResolver{ + target: target, + cc: cc, + } + r.start() + return r, nil +} + +func (*passthroughBuilder) Scheme() string { + return scheme +} + +type passthroughResolver struct { + target resolver.Target + cc resolver.ClientConn +} + +func (r *passthroughResolver) start() { + r.cc.UpdateState(resolver.State{Addresses: []resolver.Address{{Addr: r.target.Endpoint}}}) +} + +func (*passthroughResolver) ResolveNow(o resolver.ResolveNowOption) {} + +func (*passthroughResolver) Close() {} + +func init() { + resolver.Register(&passthroughBuilder{}) +} diff --git a/vendor/google.golang.org/grpc/resolver/resolver.go b/vendor/google.golang.org/grpc/resolver/resolver.go new file mode 100644 index 0000000..52ec603 --- /dev/null +++ b/vendor/google.golang.org/grpc/resolver/resolver.go @@ -0,0 +1,173 @@ +/* + * + * Copyright 2017 gRPC authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ + +// Package resolver defines APIs for name resolution in gRPC. +// All APIs in this package are experimental. +package resolver + +var ( + // m is a map from scheme to resolver builder. + m = make(map[string]Builder) + // defaultScheme is the default scheme to use. + defaultScheme = "passthrough" +) + +// TODO(bar) install dns resolver in init(){}. + +// Register registers the resolver builder to the resolver map. b.Scheme will be +// used as the scheme registered with this builder. +// +// NOTE: this function must only be called during initialization time (i.e. in +// an init() function), and is not thread-safe. If multiple Resolvers are +// registered with the same name, the one registered last will take effect. +func Register(b Builder) { + m[b.Scheme()] = b +} + +// Get returns the resolver builder registered with the given scheme. +// +// If no builder is register with the scheme, nil will be returned. +func Get(scheme string) Builder { + if b, ok := m[scheme]; ok { + return b + } + return nil +} + +// SetDefaultScheme sets the default scheme that will be used. The default +// default scheme is "passthrough". +// +// NOTE: this function must only be called during initialization time (i.e. in +// an init() function), and is not thread-safe. The scheme set last overrides +// previously set values. +func SetDefaultScheme(scheme string) { + defaultScheme = scheme +} + +// GetDefaultScheme gets the default scheme that will be used. +func GetDefaultScheme() string { + return defaultScheme +} + +// AddressType indicates the address type returned by name resolution. +type AddressType uint8 + +const ( + // Backend indicates the address is for a backend server. + Backend AddressType = iota + // GRPCLB indicates the address is for a grpclb load balancer. + GRPCLB +) + +// Address represents a server the client connects to. +// This is the EXPERIMENTAL API and may be changed or extended in the future. +type Address struct { + // Addr is the server address on which a connection will be established. + Addr string + // Type is the type of this address. + Type AddressType + // ServerName is the name of this address. + // + // e.g. if Type is GRPCLB, ServerName should be the name of the remote load + // balancer, not the name of the backend. + ServerName string + // Metadata is the information associated with Addr, which may be used + // to make load balancing decision. + Metadata interface{} +} + +// BuildOption includes additional information for the builder to create +// the resolver. +type BuildOption struct { + // DisableServiceConfig indicates whether resolver should fetch service config data. + DisableServiceConfig bool +} + +// State contains the current Resolver state relevant to the ClientConn. +type State struct { + Addresses []Address // Resolved addresses for the target + ServiceConfig string // JSON representation of the service config + + // TODO: add Err error + // TODO: add ParsedServiceConfig interface{} +} + +// ClientConn contains the callbacks for resolver to notify any updates +// to the gRPC ClientConn. +// +// This interface is to be implemented by gRPC. Users should not need a +// brand new implementation of this interface. For the situations like +// testing, the new implementation should embed this interface. This allows +// gRPC to add new methods to this interface. +type ClientConn interface { + // UpdateState updates the state of the ClientConn appropriately. + UpdateState(State) + // NewAddress is called by resolver to notify ClientConn a new list + // of resolved addresses. + // The address list should be the complete list of resolved addresses. + // + // Deprecated: Use UpdateState instead. + NewAddress(addresses []Address) + // NewServiceConfig is called by resolver to notify ClientConn a new + // service config. The service config should be provided as a json string. + // + // Deprecated: Use UpdateState instead. + NewServiceConfig(serviceConfig string) +} + +// Target represents a target for gRPC, as specified in: +// https://github.com/grpc/grpc/blob/master/doc/naming.md. +type Target struct { + Scheme string + Authority string + Endpoint string +} + +// Builder creates a resolver that will be used to watch name resolution updates. +type Builder interface { + // Build creates a new resolver for the given target. + // + // gRPC dial calls Build synchronously, and fails if the returned error is + // not nil. + Build(target Target, cc ClientConn, opts BuildOption) (Resolver, error) + // Scheme returns the scheme supported by this resolver. + // Scheme is defined at https://github.com/grpc/grpc/blob/master/doc/naming.md. + Scheme() string +} + +// ResolveNowOption includes additional information for ResolveNow. +type ResolveNowOption struct{} + +// Resolver watches for the updates on the specified target. +// Updates include address updates and service config updates. +type Resolver interface { + // ResolveNow will be called by gRPC to try to resolve the target name + // again. It's just a hint, resolver can ignore this if it's not necessary. + // + // It could be called multiple times concurrently. + ResolveNow(ResolveNowOption) + // Close closes the resolver. + Close() +} + +// UnregisterForTesting removes the resolver builder with the given scheme from the +// resolver map. +// This function is for testing only. +func UnregisterForTesting(scheme string) { + delete(m, scheme) +} diff --git a/vendor/google.golang.org/grpc/resolver_conn_wrapper.go b/vendor/google.golang.org/grpc/resolver_conn_wrapper.go new file mode 100644 index 0000000..e9cef3a --- /dev/null +++ b/vendor/google.golang.org/grpc/resolver_conn_wrapper.go @@ -0,0 +1,165 @@ +/* + * + * Copyright 2017 gRPC authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ + +package grpc + +import ( + "fmt" + "strings" + "sync/atomic" + + "google.golang.org/grpc/grpclog" + "google.golang.org/grpc/internal/channelz" + "google.golang.org/grpc/resolver" +) + +// ccResolverWrapper is a wrapper on top of cc for resolvers. +// It implements resolver.ClientConnection interface. +type ccResolverWrapper struct { + cc *ClientConn + resolver resolver.Resolver + addrCh chan []resolver.Address + scCh chan string + done uint32 // accessed atomically; set to 1 when closed. + curState resolver.State +} + +// split2 returns the values from strings.SplitN(s, sep, 2). +// If sep is not found, it returns ("", "", false) instead. +func split2(s, sep string) (string, string, bool) { + spl := strings.SplitN(s, sep, 2) + if len(spl) < 2 { + return "", "", false + } + return spl[0], spl[1], true +} + +// parseTarget splits target into a struct containing scheme, authority and +// endpoint. +// +// If target is not a valid scheme://authority/endpoint, it returns {Endpoint: +// target}. +func parseTarget(target string) (ret resolver.Target) { + var ok bool + ret.Scheme, ret.Endpoint, ok = split2(target, "://") + if !ok { + return resolver.Target{Endpoint: target} + } + ret.Authority, ret.Endpoint, ok = split2(ret.Endpoint, "/") + if !ok { + return resolver.Target{Endpoint: target} + } + return ret +} + +// newCCResolverWrapper parses cc.target for scheme and gets the resolver +// builder for this scheme and builds the resolver. The monitoring goroutine +// for it is not started yet and can be created by calling start(). +// +// If withResolverBuilder dial option is set, the specified resolver will be +// used instead. +func newCCResolverWrapper(cc *ClientConn) (*ccResolverWrapper, error) { + rb := cc.dopts.resolverBuilder + if rb == nil { + return nil, fmt.Errorf("could not get resolver for scheme: %q", cc.parsedTarget.Scheme) + } + + ccr := &ccResolverWrapper{ + cc: cc, + addrCh: make(chan []resolver.Address, 1), + scCh: make(chan string, 1), + } + + var err error + ccr.resolver, err = rb.Build(cc.parsedTarget, ccr, resolver.BuildOption{DisableServiceConfig: cc.dopts.disableServiceConfig}) + if err != nil { + return nil, err + } + return ccr, nil +} + +func (ccr *ccResolverWrapper) resolveNow(o resolver.ResolveNowOption) { + ccr.resolver.ResolveNow(o) +} + +func (ccr *ccResolverWrapper) close() { + ccr.resolver.Close() + atomic.StoreUint32(&ccr.done, 1) +} + +func (ccr *ccResolverWrapper) isDone() bool { + return atomic.LoadUint32(&ccr.done) == 1 +} + +func (ccr *ccResolverWrapper) UpdateState(s resolver.State) { + if ccr.isDone() { + return + } + grpclog.Infof("ccResolverWrapper: sending update to cc: %v", s) + if channelz.IsOn() { + ccr.addChannelzTraceEvent(s) + } + ccr.cc.updateResolverState(s) + ccr.curState = s +} + +// NewAddress is called by the resolver implementation to send addresses to gRPC. +func (ccr *ccResolverWrapper) NewAddress(addrs []resolver.Address) { + if ccr.isDone() { + return + } + grpclog.Infof("ccResolverWrapper: sending new addresses to cc: %v", addrs) + if channelz.IsOn() { + ccr.addChannelzTraceEvent(resolver.State{Addresses: addrs, ServiceConfig: ccr.curState.ServiceConfig}) + } + ccr.curState.Addresses = addrs + ccr.cc.updateResolverState(ccr.curState) +} + +// NewServiceConfig is called by the resolver implementation to send service +// configs to gRPC. +func (ccr *ccResolverWrapper) NewServiceConfig(sc string) { + if ccr.isDone() { + return + } + grpclog.Infof("ccResolverWrapper: got new service config: %v", sc) + if channelz.IsOn() { + ccr.addChannelzTraceEvent(resolver.State{Addresses: ccr.curState.Addresses, ServiceConfig: sc}) + } + ccr.curState.ServiceConfig = sc + ccr.cc.updateResolverState(ccr.curState) +} + +func (ccr *ccResolverWrapper) addChannelzTraceEvent(s resolver.State) { + if s.ServiceConfig == ccr.curState.ServiceConfig && (len(ccr.curState.Addresses) == 0) == (len(s.Addresses) == 0) { + return + } + var updates []string + if s.ServiceConfig != ccr.curState.ServiceConfig { + updates = append(updates, "service config updated") + } + if len(ccr.curState.Addresses) > 0 && len(s.Addresses) == 0 { + updates = append(updates, "resolver returned an empty address list") + } else if len(ccr.curState.Addresses) == 0 && len(s.Addresses) > 0 { + updates = append(updates, "resolver returned new addresses") + } + channelz.AddTraceEvent(ccr.cc.channelzID, &channelz.TraceEventDesc{ + Desc: fmt.Sprintf("Resolver state updated: %+v (%v)", s, strings.Join(updates, "; ")), + Severity: channelz.CtINFO, + }) +} diff --git a/vendor/google.golang.org/grpc/rpc_util.go b/vendor/google.golang.org/grpc/rpc_util.go new file mode 100644 index 0000000..2a59562 --- /dev/null +++ b/vendor/google.golang.org/grpc/rpc_util.go @@ -0,0 +1,843 @@ +/* + * + * Copyright 2014 gRPC authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ + +package grpc + +import ( + "bytes" + "compress/gzip" + "context" + "encoding/binary" + "fmt" + "io" + "io/ioutil" + "math" + "net/url" + "strings" + "sync" + "time" + + "google.golang.org/grpc/codes" + "google.golang.org/grpc/credentials" + "google.golang.org/grpc/encoding" + "google.golang.org/grpc/encoding/proto" + "google.golang.org/grpc/internal/transport" + "google.golang.org/grpc/metadata" + "google.golang.org/grpc/peer" + "google.golang.org/grpc/stats" + "google.golang.org/grpc/status" +) + +// Compressor defines the interface gRPC uses to compress a message. +// +// Deprecated: use package encoding. +type Compressor interface { + // Do compresses p into w. + Do(w io.Writer, p []byte) error + // Type returns the compression algorithm the Compressor uses. + Type() string +} + +type gzipCompressor struct { + pool sync.Pool +} + +// NewGZIPCompressor creates a Compressor based on GZIP. +// +// Deprecated: use package encoding/gzip. +func NewGZIPCompressor() Compressor { + c, _ := NewGZIPCompressorWithLevel(gzip.DefaultCompression) + return c +} + +// NewGZIPCompressorWithLevel is like NewGZIPCompressor but specifies the gzip compression level instead +// of assuming DefaultCompression. +// +// The error returned will be nil if the level is valid. +// +// Deprecated: use package encoding/gzip. +func NewGZIPCompressorWithLevel(level int) (Compressor, error) { + if level < gzip.DefaultCompression || level > gzip.BestCompression { + return nil, fmt.Errorf("grpc: invalid compression level: %d", level) + } + return &gzipCompressor{ + pool: sync.Pool{ + New: func() interface{} { + w, err := gzip.NewWriterLevel(ioutil.Discard, level) + if err != nil { + panic(err) + } + return w + }, + }, + }, nil +} + +func (c *gzipCompressor) Do(w io.Writer, p []byte) error { + z := c.pool.Get().(*gzip.Writer) + defer c.pool.Put(z) + z.Reset(w) + if _, err := z.Write(p); err != nil { + return err + } + return z.Close() +} + +func (c *gzipCompressor) Type() string { + return "gzip" +} + +// Decompressor defines the interface gRPC uses to decompress a message. +// +// Deprecated: use package encoding. +type Decompressor interface { + // Do reads the data from r and uncompress them. + Do(r io.Reader) ([]byte, error) + // Type returns the compression algorithm the Decompressor uses. + Type() string +} + +type gzipDecompressor struct { + pool sync.Pool +} + +// NewGZIPDecompressor creates a Decompressor based on GZIP. +// +// Deprecated: use package encoding/gzip. +func NewGZIPDecompressor() Decompressor { + return &gzipDecompressor{} +} + +func (d *gzipDecompressor) Do(r io.Reader) ([]byte, error) { + var z *gzip.Reader + switch maybeZ := d.pool.Get().(type) { + case nil: + newZ, err := gzip.NewReader(r) + if err != nil { + return nil, err + } + z = newZ + case *gzip.Reader: + z = maybeZ + if err := z.Reset(r); err != nil { + d.pool.Put(z) + return nil, err + } + } + + defer func() { + z.Close() + d.pool.Put(z) + }() + return ioutil.ReadAll(z) +} + +func (d *gzipDecompressor) Type() string { + return "gzip" +} + +// callInfo contains all related configuration and information about an RPC. +type callInfo struct { + compressorType string + failFast bool + stream ClientStream + maxReceiveMessageSize *int + maxSendMessageSize *int + creds credentials.PerRPCCredentials + contentSubtype string + codec baseCodec + maxRetryRPCBufferSize int +} + +func defaultCallInfo() *callInfo { + return &callInfo{ + failFast: true, + maxRetryRPCBufferSize: 256 * 1024, // 256KB + } +} + +// CallOption configures a Call before it starts or extracts information from +// a Call after it completes. +type CallOption interface { + // before is called before the call is sent to any server. If before + // returns a non-nil error, the RPC fails with that error. + before(*callInfo) error + + // after is called after the call has completed. after cannot return an + // error, so any failures should be reported via output parameters. + after(*callInfo) +} + +// EmptyCallOption does not alter the Call configuration. +// It can be embedded in another structure to carry satellite data for use +// by interceptors. +type EmptyCallOption struct{} + +func (EmptyCallOption) before(*callInfo) error { return nil } +func (EmptyCallOption) after(*callInfo) {} + +// Header returns a CallOptions that retrieves the header metadata +// for a unary RPC. +func Header(md *metadata.MD) CallOption { + return HeaderCallOption{HeaderAddr: md} +} + +// HeaderCallOption is a CallOption for collecting response header metadata. +// The metadata field will be populated *after* the RPC completes. +// This is an EXPERIMENTAL API. +type HeaderCallOption struct { + HeaderAddr *metadata.MD +} + +func (o HeaderCallOption) before(c *callInfo) error { return nil } +func (o HeaderCallOption) after(c *callInfo) { + if c.stream != nil { + *o.HeaderAddr, _ = c.stream.Header() + } +} + +// Trailer returns a CallOptions that retrieves the trailer metadata +// for a unary RPC. +func Trailer(md *metadata.MD) CallOption { + return TrailerCallOption{TrailerAddr: md} +} + +// TrailerCallOption is a CallOption for collecting response trailer metadata. +// The metadata field will be populated *after* the RPC completes. +// This is an EXPERIMENTAL API. +type TrailerCallOption struct { + TrailerAddr *metadata.MD +} + +func (o TrailerCallOption) before(c *callInfo) error { return nil } +func (o TrailerCallOption) after(c *callInfo) { + if c.stream != nil { + *o.TrailerAddr = c.stream.Trailer() + } +} + +// Peer returns a CallOption that retrieves peer information for a unary RPC. +// The peer field will be populated *after* the RPC completes. +func Peer(p *peer.Peer) CallOption { + return PeerCallOption{PeerAddr: p} +} + +// PeerCallOption is a CallOption for collecting the identity of the remote +// peer. The peer field will be populated *after* the RPC completes. +// This is an EXPERIMENTAL API. +type PeerCallOption struct { + PeerAddr *peer.Peer +} + +func (o PeerCallOption) before(c *callInfo) error { return nil } +func (o PeerCallOption) after(c *callInfo) { + if c.stream != nil { + if x, ok := peer.FromContext(c.stream.Context()); ok { + *o.PeerAddr = *x + } + } +} + +// WaitForReady configures the action to take when an RPC is attempted on broken +// connections or unreachable servers. If waitForReady is false, the RPC will fail +// immediately. Otherwise, the RPC client will block the call until a +// connection is available (or the call is canceled or times out) and will +// retry the call if it fails due to a transient error. gRPC will not retry if +// data was written to the wire unless the server indicates it did not process +// the data. Please refer to +// https://github.com/grpc/grpc/blob/master/doc/wait-for-ready.md. +// +// By default, RPCs don't "wait for ready". +func WaitForReady(waitForReady bool) CallOption { + return FailFastCallOption{FailFast: !waitForReady} +} + +// FailFast is the opposite of WaitForReady. +// +// Deprecated: use WaitForReady. +func FailFast(failFast bool) CallOption { + return FailFastCallOption{FailFast: failFast} +} + +// FailFastCallOption is a CallOption for indicating whether an RPC should fail +// fast or not. +// This is an EXPERIMENTAL API. +type FailFastCallOption struct { + FailFast bool +} + +func (o FailFastCallOption) before(c *callInfo) error { + c.failFast = o.FailFast + return nil +} +func (o FailFastCallOption) after(c *callInfo) {} + +// MaxCallRecvMsgSize returns a CallOption which sets the maximum message size the client can receive. +func MaxCallRecvMsgSize(s int) CallOption { + return MaxRecvMsgSizeCallOption{MaxRecvMsgSize: s} +} + +// MaxRecvMsgSizeCallOption is a CallOption that indicates the maximum message +// size the client can receive. +// This is an EXPERIMENTAL API. +type MaxRecvMsgSizeCallOption struct { + MaxRecvMsgSize int +} + +func (o MaxRecvMsgSizeCallOption) before(c *callInfo) error { + c.maxReceiveMessageSize = &o.MaxRecvMsgSize + return nil +} +func (o MaxRecvMsgSizeCallOption) after(c *callInfo) {} + +// MaxCallSendMsgSize returns a CallOption which sets the maximum message size the client can send. +func MaxCallSendMsgSize(s int) CallOption { + return MaxSendMsgSizeCallOption{MaxSendMsgSize: s} +} + +// MaxSendMsgSizeCallOption is a CallOption that indicates the maximum message +// size the client can send. +// This is an EXPERIMENTAL API. +type MaxSendMsgSizeCallOption struct { + MaxSendMsgSize int +} + +func (o MaxSendMsgSizeCallOption) before(c *callInfo) error { + c.maxSendMessageSize = &o.MaxSendMsgSize + return nil +} +func (o MaxSendMsgSizeCallOption) after(c *callInfo) {} + +// PerRPCCredentials returns a CallOption that sets credentials.PerRPCCredentials +// for a call. +func PerRPCCredentials(creds credentials.PerRPCCredentials) CallOption { + return PerRPCCredsCallOption{Creds: creds} +} + +// PerRPCCredsCallOption is a CallOption that indicates the per-RPC +// credentials to use for the call. +// This is an EXPERIMENTAL API. +type PerRPCCredsCallOption struct { + Creds credentials.PerRPCCredentials +} + +func (o PerRPCCredsCallOption) before(c *callInfo) error { + c.creds = o.Creds + return nil +} +func (o PerRPCCredsCallOption) after(c *callInfo) {} + +// UseCompressor returns a CallOption which sets the compressor used when +// sending the request. If WithCompressor is also set, UseCompressor has +// higher priority. +// +// This API is EXPERIMENTAL. +func UseCompressor(name string) CallOption { + return CompressorCallOption{CompressorType: name} +} + +// CompressorCallOption is a CallOption that indicates the compressor to use. +// This is an EXPERIMENTAL API. +type CompressorCallOption struct { + CompressorType string +} + +func (o CompressorCallOption) before(c *callInfo) error { + c.compressorType = o.CompressorType + return nil +} +func (o CompressorCallOption) after(c *callInfo) {} + +// CallContentSubtype returns a CallOption that will set the content-subtype +// for a call. For example, if content-subtype is "json", the Content-Type over +// the wire will be "application/grpc+json". The content-subtype is converted +// to lowercase before being included in Content-Type. See Content-Type on +// https://github.com/grpc/grpc/blob/master/doc/PROTOCOL-HTTP2.md#requests for +// more details. +// +// If ForceCodec is not also used, the content-subtype will be used to look up +// the Codec to use in the registry controlled by RegisterCodec. See the +// documentation on RegisterCodec for details on registration. The lookup of +// content-subtype is case-insensitive. If no such Codec is found, the call +// will result in an error with code codes.Internal. +// +// If ForceCodec is also used, that Codec will be used for all request and +// response messages, with the content-subtype set to the given contentSubtype +// here for requests. +func CallContentSubtype(contentSubtype string) CallOption { + return ContentSubtypeCallOption{ContentSubtype: strings.ToLower(contentSubtype)} +} + +// ContentSubtypeCallOption is a CallOption that indicates the content-subtype +// used for marshaling messages. +// This is an EXPERIMENTAL API. +type ContentSubtypeCallOption struct { + ContentSubtype string +} + +func (o ContentSubtypeCallOption) before(c *callInfo) error { + c.contentSubtype = o.ContentSubtype + return nil +} +func (o ContentSubtypeCallOption) after(c *callInfo) {} + +// ForceCodec returns a CallOption that will set the given Codec to be +// used for all request and response messages for a call. The result of calling +// String() will be used as the content-subtype in a case-insensitive manner. +// +// See Content-Type on +// https://github.com/grpc/grpc/blob/master/doc/PROTOCOL-HTTP2.md#requests for +// more details. Also see the documentation on RegisterCodec and +// CallContentSubtype for more details on the interaction between Codec and +// content-subtype. +// +// This function is provided for advanced users; prefer to use only +// CallContentSubtype to select a registered codec instead. +// +// This is an EXPERIMENTAL API. +func ForceCodec(codec encoding.Codec) CallOption { + return ForceCodecCallOption{Codec: codec} +} + +// ForceCodecCallOption is a CallOption that indicates the codec used for +// marshaling messages. +// +// This is an EXPERIMENTAL API. +type ForceCodecCallOption struct { + Codec encoding.Codec +} + +func (o ForceCodecCallOption) before(c *callInfo) error { + c.codec = o.Codec + return nil +} +func (o ForceCodecCallOption) after(c *callInfo) {} + +// CallCustomCodec behaves like ForceCodec, but accepts a grpc.Codec instead of +// an encoding.Codec. +// +// Deprecated: use ForceCodec instead. +func CallCustomCodec(codec Codec) CallOption { + return CustomCodecCallOption{Codec: codec} +} + +// CustomCodecCallOption is a CallOption that indicates the codec used for +// marshaling messages. +// +// This is an EXPERIMENTAL API. +type CustomCodecCallOption struct { + Codec Codec +} + +func (o CustomCodecCallOption) before(c *callInfo) error { + c.codec = o.Codec + return nil +} +func (o CustomCodecCallOption) after(c *callInfo) {} + +// MaxRetryRPCBufferSize returns a CallOption that limits the amount of memory +// used for buffering this RPC's requests for retry purposes. +// +// This API is EXPERIMENTAL. +func MaxRetryRPCBufferSize(bytes int) CallOption { + return MaxRetryRPCBufferSizeCallOption{bytes} +} + +// MaxRetryRPCBufferSizeCallOption is a CallOption indicating the amount of +// memory to be used for caching this RPC for retry purposes. +// This is an EXPERIMENTAL API. +type MaxRetryRPCBufferSizeCallOption struct { + MaxRetryRPCBufferSize int +} + +func (o MaxRetryRPCBufferSizeCallOption) before(c *callInfo) error { + c.maxRetryRPCBufferSize = o.MaxRetryRPCBufferSize + return nil +} +func (o MaxRetryRPCBufferSizeCallOption) after(c *callInfo) {} + +// The format of the payload: compressed or not? +type payloadFormat uint8 + +const ( + compressionNone payloadFormat = 0 // no compression + compressionMade payloadFormat = 1 // compressed +) + +// parser reads complete gRPC messages from the underlying reader. +type parser struct { + // r is the underlying reader. + // See the comment on recvMsg for the permissible + // error types. + r io.Reader + + // The header of a gRPC message. Find more detail at + // https://github.com/grpc/grpc/blob/master/doc/PROTOCOL-HTTP2.md + header [5]byte +} + +// recvMsg reads a complete gRPC message from the stream. +// +// It returns the message and its payload (compression/encoding) +// format. The caller owns the returned msg memory. +// +// If there is an error, possible values are: +// * io.EOF, when no messages remain +// * io.ErrUnexpectedEOF +// * of type transport.ConnectionError +// * an error from the status package +// No other error values or types must be returned, which also means +// that the underlying io.Reader must not return an incompatible +// error. +func (p *parser) recvMsg(maxReceiveMessageSize int) (pf payloadFormat, msg []byte, err error) { + if _, err := p.r.Read(p.header[:]); err != nil { + return 0, nil, err + } + + pf = payloadFormat(p.header[0]) + length := binary.BigEndian.Uint32(p.header[1:]) + + if length == 0 { + return pf, nil, nil + } + if int64(length) > int64(maxInt) { + return 0, nil, status.Errorf(codes.ResourceExhausted, "grpc: received message larger than max length allowed on current machine (%d vs. %d)", length, maxInt) + } + if int(length) > maxReceiveMessageSize { + return 0, nil, status.Errorf(codes.ResourceExhausted, "grpc: received message larger than max (%d vs. %d)", length, maxReceiveMessageSize) + } + // TODO(bradfitz,zhaoq): garbage. reuse buffer after proto decoding instead + // of making it for each message: + msg = make([]byte, int(length)) + if _, err := p.r.Read(msg); err != nil { + if err == io.EOF { + err = io.ErrUnexpectedEOF + } + return 0, nil, err + } + return pf, msg, nil +} + +// encode serializes msg and returns a buffer containing the message, or an +// error if it is too large to be transmitted by grpc. If msg is nil, it +// generates an empty message. +func encode(c baseCodec, msg interface{}) ([]byte, error) { + if msg == nil { // NOTE: typed nils will not be caught by this check + return nil, nil + } + b, err := c.Marshal(msg) + if err != nil { + return nil, status.Errorf(codes.Internal, "grpc: error while marshaling: %v", err.Error()) + } + if uint(len(b)) > math.MaxUint32 { + return nil, status.Errorf(codes.ResourceExhausted, "grpc: message too large (%d bytes)", len(b)) + } + return b, nil +} + +// compress returns the input bytes compressed by compressor or cp. If both +// compressors are nil, returns nil. +// +// TODO(dfawley): eliminate cp parameter by wrapping Compressor in an encoding.Compressor. +func compress(in []byte, cp Compressor, compressor encoding.Compressor) ([]byte, error) { + if compressor == nil && cp == nil { + return nil, nil + } + wrapErr := func(err error) error { + return status.Errorf(codes.Internal, "grpc: error while compressing: %v", err.Error()) + } + cbuf := &bytes.Buffer{} + if compressor != nil { + z, err := compressor.Compress(cbuf) + if err != nil { + return nil, wrapErr(err) + } + if _, err := z.Write(in); err != nil { + return nil, wrapErr(err) + } + if err := z.Close(); err != nil { + return nil, wrapErr(err) + } + } else { + if err := cp.Do(cbuf, in); err != nil { + return nil, wrapErr(err) + } + } + return cbuf.Bytes(), nil +} + +const ( + payloadLen = 1 + sizeLen = 4 + headerLen = payloadLen + sizeLen +) + +// msgHeader returns a 5-byte header for the message being transmitted and the +// payload, which is compData if non-nil or data otherwise. +func msgHeader(data, compData []byte) (hdr []byte, payload []byte) { + hdr = make([]byte, headerLen) + if compData != nil { + hdr[0] = byte(compressionMade) + data = compData + } else { + hdr[0] = byte(compressionNone) + } + + // Write length of payload into buf + binary.BigEndian.PutUint32(hdr[payloadLen:], uint32(len(data))) + return hdr, data +} + +func outPayload(client bool, msg interface{}, data, payload []byte, t time.Time) *stats.OutPayload { + return &stats.OutPayload{ + Client: client, + Payload: msg, + Data: data, + Length: len(data), + WireLength: len(payload) + headerLen, + SentTime: t, + } +} + +func checkRecvPayload(pf payloadFormat, recvCompress string, haveCompressor bool) *status.Status { + switch pf { + case compressionNone: + case compressionMade: + if recvCompress == "" || recvCompress == encoding.Identity { + return status.New(codes.Internal, "grpc: compressed flag set with identity or empty encoding") + } + if !haveCompressor { + return status.Newf(codes.Unimplemented, "grpc: Decompressor is not installed for grpc-encoding %q", recvCompress) + } + default: + return status.Newf(codes.Internal, "grpc: received unexpected payload format %d", pf) + } + return nil +} + +type payloadInfo struct { + wireLength int // The compressed length got from wire. + uncompressedBytes []byte +} + +func recvAndDecompress(p *parser, s *transport.Stream, dc Decompressor, maxReceiveMessageSize int, payInfo *payloadInfo, compressor encoding.Compressor) ([]byte, error) { + pf, d, err := p.recvMsg(maxReceiveMessageSize) + if err != nil { + return nil, err + } + if payInfo != nil { + payInfo.wireLength = len(d) + } + + if st := checkRecvPayload(pf, s.RecvCompress(), compressor != nil || dc != nil); st != nil { + return nil, st.Err() + } + + if pf == compressionMade { + // To match legacy behavior, if the decompressor is set by WithDecompressor or RPCDecompressor, + // use this decompressor as the default. + if dc != nil { + d, err = dc.Do(bytes.NewReader(d)) + if err != nil { + return nil, status.Errorf(codes.Internal, "grpc: failed to decompress the received message %v", err) + } + } else { + dcReader, err := compressor.Decompress(bytes.NewReader(d)) + if err != nil { + return nil, status.Errorf(codes.Internal, "grpc: failed to decompress the received message %v", err) + } + // Read from LimitReader with limit max+1. So if the underlying + // reader is over limit, the result will be bigger than max. + d, err = ioutil.ReadAll(io.LimitReader(dcReader, int64(maxReceiveMessageSize)+1)) + if err != nil { + return nil, status.Errorf(codes.Internal, "grpc: failed to decompress the received message %v", err) + } + } + } + if len(d) > maxReceiveMessageSize { + // TODO: Revisit the error code. Currently keep it consistent with java + // implementation. + return nil, status.Errorf(codes.ResourceExhausted, "grpc: received message larger than max (%d vs. %d)", len(d), maxReceiveMessageSize) + } + return d, nil +} + +// For the two compressor parameters, both should not be set, but if they are, +// dc takes precedence over compressor. +// TODO(dfawley): wrap the old compressor/decompressor using the new API? +func recv(p *parser, c baseCodec, s *transport.Stream, dc Decompressor, m interface{}, maxReceiveMessageSize int, payInfo *payloadInfo, compressor encoding.Compressor) error { + d, err := recvAndDecompress(p, s, dc, maxReceiveMessageSize, payInfo, compressor) + if err != nil { + return err + } + if err := c.Unmarshal(d, m); err != nil { + return status.Errorf(codes.Internal, "grpc: failed to unmarshal the received message %v", err) + } + if payInfo != nil { + payInfo.uncompressedBytes = d + } + return nil +} + +type rpcInfo struct { + failfast bool +} + +type rpcInfoContextKey struct{} + +func newContextWithRPCInfo(ctx context.Context, failfast bool) context.Context { + return context.WithValue(ctx, rpcInfoContextKey{}, &rpcInfo{failfast: failfast}) +} + +func rpcInfoFromContext(ctx context.Context) (s *rpcInfo, ok bool) { + s, ok = ctx.Value(rpcInfoContextKey{}).(*rpcInfo) + return +} + +// Code returns the error code for err if it was produced by the rpc system. +// Otherwise, it returns codes.Unknown. +// +// Deprecated: use status.Code instead. +func Code(err error) codes.Code { + return status.Code(err) +} + +// ErrorDesc returns the error description of err if it was produced by the rpc system. +// Otherwise, it returns err.Error() or empty string when err is nil. +// +// Deprecated: use status.Convert and Message method instead. +func ErrorDesc(err error) string { + return status.Convert(err).Message() +} + +// Errorf returns an error containing an error code and a description; +// Errorf returns nil if c is OK. +// +// Deprecated: use status.Errorf instead. +func Errorf(c codes.Code, format string, a ...interface{}) error { + return status.Errorf(c, format, a...) +} + +// toRPCErr converts an error into an error from the status package. +func toRPCErr(err error) error { + if err == nil || err == io.EOF { + return err + } + if err == io.ErrUnexpectedEOF { + return status.Error(codes.Internal, err.Error()) + } + if _, ok := status.FromError(err); ok { + return err + } + switch e := err.(type) { + case transport.ConnectionError: + return status.Error(codes.Unavailable, e.Desc) + default: + switch err { + case context.DeadlineExceeded: + return status.Error(codes.DeadlineExceeded, err.Error()) + case context.Canceled: + return status.Error(codes.Canceled, err.Error()) + } + } + return status.Error(codes.Unknown, err.Error()) +} + +// setCallInfoCodec should only be called after CallOptions have been applied. +func setCallInfoCodec(c *callInfo) error { + if c.codec != nil { + // codec was already set by a CallOption; use it. + return nil + } + + if c.contentSubtype == "" { + // No codec specified in CallOptions; use proto by default. + c.codec = encoding.GetCodec(proto.Name) + return nil + } + + // c.contentSubtype is already lowercased in CallContentSubtype + c.codec = encoding.GetCodec(c.contentSubtype) + if c.codec == nil { + return status.Errorf(codes.Internal, "no codec registered for content-subtype %s", c.contentSubtype) + } + return nil +} + +// parseDialTarget returns the network and address to pass to dialer +func parseDialTarget(target string) (net string, addr string) { + net = "tcp" + + m1 := strings.Index(target, ":") + m2 := strings.Index(target, ":/") + + // handle unix:addr which will fail with url.Parse + if m1 >= 0 && m2 < 0 { + if n := target[0:m1]; n == "unix" { + net = n + addr = target[m1+1:] + return net, addr + } + } + if m2 >= 0 { + t, err := url.Parse(target) + if err != nil { + return net, target + } + scheme := t.Scheme + addr = t.Path + if scheme == "unix" { + net = scheme + if addr == "" { + addr = t.Host + } + return net, addr + } + } + + return net, target +} + +// channelzData is used to store channelz related data for ClientConn, addrConn and Server. +// These fields cannot be embedded in the original structs (e.g. ClientConn), since to do atomic +// operation on int64 variable on 32-bit machine, user is responsible to enforce memory alignment. +// Here, by grouping those int64 fields inside a struct, we are enforcing the alignment. +type channelzData struct { + callsStarted int64 + callsFailed int64 + callsSucceeded int64 + // lastCallStartedTime stores the timestamp that last call starts. It is of int64 type instead of + // time.Time since it's more costly to atomically update time.Time variable than int64 variable. + lastCallStartedTime int64 +} + +// The SupportPackageIsVersion variables are referenced from generated protocol +// buffer files to ensure compatibility with the gRPC version used. The latest +// support package version is 5. +// +// Older versions are kept for compatibility. They may be removed if +// compatibility cannot be maintained. +// +// These constants should not be referenced from any other code. +const ( + SupportPackageIsVersion3 = true + SupportPackageIsVersion4 = true + SupportPackageIsVersion5 = true +) + +const grpcUA = "grpc-go/" + Version diff --git a/vendor/google.golang.org/grpc/server.go b/vendor/google.golang.org/grpc/server.go new file mode 100644 index 0000000..8115828 --- /dev/null +++ b/vendor/google.golang.org/grpc/server.go @@ -0,0 +1,1498 @@ +/* + * + * Copyright 2014 gRPC authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ + +package grpc + +import ( + "context" + "errors" + "fmt" + "io" + "math" + "net" + "net/http" + "reflect" + "runtime" + "strings" + "sync" + "sync/atomic" + "time" + + "golang.org/x/net/trace" + + "google.golang.org/grpc/codes" + "google.golang.org/grpc/credentials" + "google.golang.org/grpc/encoding" + "google.golang.org/grpc/encoding/proto" + "google.golang.org/grpc/grpclog" + "google.golang.org/grpc/internal/binarylog" + "google.golang.org/grpc/internal/channelz" + "google.golang.org/grpc/internal/transport" + "google.golang.org/grpc/keepalive" + "google.golang.org/grpc/metadata" + "google.golang.org/grpc/peer" + "google.golang.org/grpc/stats" + "google.golang.org/grpc/status" + "google.golang.org/grpc/tap" +) + +const ( + defaultServerMaxReceiveMessageSize = 1024 * 1024 * 4 + defaultServerMaxSendMessageSize = math.MaxInt32 +) + +type methodHandler func(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor UnaryServerInterceptor) (interface{}, error) + +// MethodDesc represents an RPC service's method specification. +type MethodDesc struct { + MethodName string + Handler methodHandler +} + +// ServiceDesc represents an RPC service's specification. +type ServiceDesc struct { + ServiceName string + // The pointer to the service interface. Used to check whether the user + // provided implementation satisfies the interface requirements. + HandlerType interface{} + Methods []MethodDesc + Streams []StreamDesc + Metadata interface{} +} + +// service consists of the information of the server serving this service and +// the methods in this service. +type service struct { + server interface{} // the server for service methods + md map[string]*MethodDesc + sd map[string]*StreamDesc + mdata interface{} +} + +// Server is a gRPC server to serve RPC requests. +type Server struct { + opts options + + mu sync.Mutex // guards following + lis map[net.Listener]bool + conns map[io.Closer]bool + serve bool + drain bool + cv *sync.Cond // signaled when connections close for GracefulStop + m map[string]*service // service name -> service info + events trace.EventLog + + quit chan struct{} + done chan struct{} + quitOnce sync.Once + doneOnce sync.Once + channelzRemoveOnce sync.Once + serveWG sync.WaitGroup // counts active Serve goroutines for GracefulStop + + channelzID int64 // channelz unique identification number + czData *channelzData +} + +type options struct { + creds credentials.TransportCredentials + codec baseCodec + cp Compressor + dc Decompressor + unaryInt UnaryServerInterceptor + streamInt StreamServerInterceptor + inTapHandle tap.ServerInHandle + statsHandler stats.Handler + maxConcurrentStreams uint32 + maxReceiveMessageSize int + maxSendMessageSize int + unknownStreamDesc *StreamDesc + keepaliveParams keepalive.ServerParameters + keepalivePolicy keepalive.EnforcementPolicy + initialWindowSize int32 + initialConnWindowSize int32 + writeBufferSize int + readBufferSize int + connectionTimeout time.Duration + maxHeaderListSize *uint32 +} + +var defaultServerOptions = options{ + maxReceiveMessageSize: defaultServerMaxReceiveMessageSize, + maxSendMessageSize: defaultServerMaxSendMessageSize, + connectionTimeout: 120 * time.Second, + writeBufferSize: defaultWriteBufSize, + readBufferSize: defaultReadBufSize, +} + +// A ServerOption sets options such as credentials, codec and keepalive parameters, etc. +type ServerOption func(*options) + +// WriteBufferSize determines how much data can be batched before doing a write on the wire. +// The corresponding memory allocation for this buffer will be twice the size to keep syscalls low. +// The default value for this buffer is 32KB. +// Zero will disable the write buffer such that each write will be on underlying connection. +// Note: A Send call may not directly translate to a write. +func WriteBufferSize(s int) ServerOption { + return func(o *options) { + o.writeBufferSize = s + } +} + +// ReadBufferSize lets you set the size of read buffer, this determines how much data can be read at most +// for one read syscall. +// The default value for this buffer is 32KB. +// Zero will disable read buffer for a connection so data framer can access the underlying +// conn directly. +func ReadBufferSize(s int) ServerOption { + return func(o *options) { + o.readBufferSize = s + } +} + +// InitialWindowSize returns a ServerOption that sets window size for stream. +// The lower bound for window size is 64K and any value smaller than that will be ignored. +func InitialWindowSize(s int32) ServerOption { + return func(o *options) { + o.initialWindowSize = s + } +} + +// InitialConnWindowSize returns a ServerOption that sets window size for a connection. +// The lower bound for window size is 64K and any value smaller than that will be ignored. +func InitialConnWindowSize(s int32) ServerOption { + return func(o *options) { + o.initialConnWindowSize = s + } +} + +// KeepaliveParams returns a ServerOption that sets keepalive and max-age parameters for the server. +func KeepaliveParams(kp keepalive.ServerParameters) ServerOption { + if kp.Time > 0 && kp.Time < time.Second { + grpclog.Warning("Adjusting keepalive ping interval to minimum period of 1s") + kp.Time = time.Second + } + + return func(o *options) { + o.keepaliveParams = kp + } +} + +// KeepaliveEnforcementPolicy returns a ServerOption that sets keepalive enforcement policy for the server. +func KeepaliveEnforcementPolicy(kep keepalive.EnforcementPolicy) ServerOption { + return func(o *options) { + o.keepalivePolicy = kep + } +} + +// CustomCodec returns a ServerOption that sets a codec for message marshaling and unmarshaling. +// +// This will override any lookups by content-subtype for Codecs registered with RegisterCodec. +func CustomCodec(codec Codec) ServerOption { + return func(o *options) { + o.codec = codec + } +} + +// RPCCompressor returns a ServerOption that sets a compressor for outbound +// messages. For backward compatibility, all outbound messages will be sent +// using this compressor, regardless of incoming message compression. By +// default, server messages will be sent using the same compressor with which +// request messages were sent. +// +// Deprecated: use encoding.RegisterCompressor instead. +func RPCCompressor(cp Compressor) ServerOption { + return func(o *options) { + o.cp = cp + } +} + +// RPCDecompressor returns a ServerOption that sets a decompressor for inbound +// messages. It has higher priority than decompressors registered via +// encoding.RegisterCompressor. +// +// Deprecated: use encoding.RegisterCompressor instead. +func RPCDecompressor(dc Decompressor) ServerOption { + return func(o *options) { + o.dc = dc + } +} + +// MaxMsgSize returns a ServerOption to set the max message size in bytes the server can receive. +// If this is not set, gRPC uses the default limit. +// +// Deprecated: use MaxRecvMsgSize instead. +func MaxMsgSize(m int) ServerOption { + return MaxRecvMsgSize(m) +} + +// MaxRecvMsgSize returns a ServerOption to set the max message size in bytes the server can receive. +// If this is not set, gRPC uses the default 4MB. +func MaxRecvMsgSize(m int) ServerOption { + return func(o *options) { + o.maxReceiveMessageSize = m + } +} + +// MaxSendMsgSize returns a ServerOption to set the max message size in bytes the server can send. +// If this is not set, gRPC uses the default `math.MaxInt32`. +func MaxSendMsgSize(m int) ServerOption { + return func(o *options) { + o.maxSendMessageSize = m + } +} + +// MaxConcurrentStreams returns a ServerOption that will apply a limit on the number +// of concurrent streams to each ServerTransport. +func MaxConcurrentStreams(n uint32) ServerOption { + return func(o *options) { + o.maxConcurrentStreams = n + } +} + +// Creds returns a ServerOption that sets credentials for server connections. +func Creds(c credentials.TransportCredentials) ServerOption { + return func(o *options) { + o.creds = c + } +} + +// UnaryInterceptor returns a ServerOption that sets the UnaryServerInterceptor for the +// server. Only one unary interceptor can be installed. The construction of multiple +// interceptors (e.g., chaining) can be implemented at the caller. +func UnaryInterceptor(i UnaryServerInterceptor) ServerOption { + return func(o *options) { + if o.unaryInt != nil { + panic("The unary server interceptor was already set and may not be reset.") + } + o.unaryInt = i + } +} + +// StreamInterceptor returns a ServerOption that sets the StreamServerInterceptor for the +// server. Only one stream interceptor can be installed. +func StreamInterceptor(i StreamServerInterceptor) ServerOption { + return func(o *options) { + if o.streamInt != nil { + panic("The stream server interceptor was already set and may not be reset.") + } + o.streamInt = i + } +} + +// InTapHandle returns a ServerOption that sets the tap handle for all the server +// transport to be created. Only one can be installed. +func InTapHandle(h tap.ServerInHandle) ServerOption { + return func(o *options) { + if o.inTapHandle != nil { + panic("The tap handle was already set and may not be reset.") + } + o.inTapHandle = h + } +} + +// StatsHandler returns a ServerOption that sets the stats handler for the server. +func StatsHandler(h stats.Handler) ServerOption { + return func(o *options) { + o.statsHandler = h + } +} + +// UnknownServiceHandler returns a ServerOption that allows for adding a custom +// unknown service handler. The provided method is a bidi-streaming RPC service +// handler that will be invoked instead of returning the "unimplemented" gRPC +// error whenever a request is received for an unregistered service or method. +// The handling function has full access to the Context of the request and the +// stream, and the invocation bypasses interceptors. +func UnknownServiceHandler(streamHandler StreamHandler) ServerOption { + return func(o *options) { + o.unknownStreamDesc = &StreamDesc{ + StreamName: "unknown_service_handler", + Handler: streamHandler, + // We need to assume that the users of the streamHandler will want to use both. + ClientStreams: true, + ServerStreams: true, + } + } +} + +// ConnectionTimeout returns a ServerOption that sets the timeout for +// connection establishment (up to and including HTTP/2 handshaking) for all +// new connections. If this is not set, the default is 120 seconds. A zero or +// negative value will result in an immediate timeout. +// +// This API is EXPERIMENTAL. +func ConnectionTimeout(d time.Duration) ServerOption { + return func(o *options) { + o.connectionTimeout = d + } +} + +// MaxHeaderListSize returns a ServerOption that sets the max (uncompressed) size +// of header list that the server is prepared to accept. +func MaxHeaderListSize(s uint32) ServerOption { + return func(o *options) { + o.maxHeaderListSize = &s + } +} + +// NewServer creates a gRPC server which has no service registered and has not +// started to accept requests yet. +func NewServer(opt ...ServerOption) *Server { + opts := defaultServerOptions + for _, o := range opt { + o(&opts) + } + s := &Server{ + lis: make(map[net.Listener]bool), + opts: opts, + conns: make(map[io.Closer]bool), + m: make(map[string]*service), + quit: make(chan struct{}), + done: make(chan struct{}), + czData: new(channelzData), + } + s.cv = sync.NewCond(&s.mu) + if EnableTracing { + _, file, line, _ := runtime.Caller(1) + s.events = trace.NewEventLog("grpc.Server", fmt.Sprintf("%s:%d", file, line)) + } + + if channelz.IsOn() { + s.channelzID = channelz.RegisterServer(&channelzServer{s}, "") + } + return s +} + +// printf records an event in s's event log, unless s has been stopped. +// REQUIRES s.mu is held. +func (s *Server) printf(format string, a ...interface{}) { + if s.events != nil { + s.events.Printf(format, a...) + } +} + +// errorf records an error in s's event log, unless s has been stopped. +// REQUIRES s.mu is held. +func (s *Server) errorf(format string, a ...interface{}) { + if s.events != nil { + s.events.Errorf(format, a...) + } +} + +// RegisterService registers a service and its implementation to the gRPC +// server. It is called from the IDL generated code. This must be called before +// invoking Serve. +func (s *Server) RegisterService(sd *ServiceDesc, ss interface{}) { + ht := reflect.TypeOf(sd.HandlerType).Elem() + st := reflect.TypeOf(ss) + if !st.Implements(ht) { + grpclog.Fatalf("grpc: Server.RegisterService found the handler of type %v that does not satisfy %v", st, ht) + } + s.register(sd, ss) +} + +func (s *Server) register(sd *ServiceDesc, ss interface{}) { + s.mu.Lock() + defer s.mu.Unlock() + s.printf("RegisterService(%q)", sd.ServiceName) + if s.serve { + grpclog.Fatalf("grpc: Server.RegisterService after Server.Serve for %q", sd.ServiceName) + } + if _, ok := s.m[sd.ServiceName]; ok { + grpclog.Fatalf("grpc: Server.RegisterService found duplicate service registration for %q", sd.ServiceName) + } + srv := &service{ + server: ss, + md: make(map[string]*MethodDesc), + sd: make(map[string]*StreamDesc), + mdata: sd.Metadata, + } + for i := range sd.Methods { + d := &sd.Methods[i] + srv.md[d.MethodName] = d + } + for i := range sd.Streams { + d := &sd.Streams[i] + srv.sd[d.StreamName] = d + } + s.m[sd.ServiceName] = srv +} + +// MethodInfo contains the information of an RPC including its method name and type. +type MethodInfo struct { + // Name is the method name only, without the service name or package name. + Name string + // IsClientStream indicates whether the RPC is a client streaming RPC. + IsClientStream bool + // IsServerStream indicates whether the RPC is a server streaming RPC. + IsServerStream bool +} + +// ServiceInfo contains unary RPC method info, streaming RPC method info and metadata for a service. +type ServiceInfo struct { + Methods []MethodInfo + // Metadata is the metadata specified in ServiceDesc when registering service. + Metadata interface{} +} + +// GetServiceInfo returns a map from service names to ServiceInfo. +// Service names include the package names, in the form of .. +func (s *Server) GetServiceInfo() map[string]ServiceInfo { + ret := make(map[string]ServiceInfo) + for n, srv := range s.m { + methods := make([]MethodInfo, 0, len(srv.md)+len(srv.sd)) + for m := range srv.md { + methods = append(methods, MethodInfo{ + Name: m, + IsClientStream: false, + IsServerStream: false, + }) + } + for m, d := range srv.sd { + methods = append(methods, MethodInfo{ + Name: m, + IsClientStream: d.ClientStreams, + IsServerStream: d.ServerStreams, + }) + } + + ret[n] = ServiceInfo{ + Methods: methods, + Metadata: srv.mdata, + } + } + return ret +} + +// ErrServerStopped indicates that the operation is now illegal because of +// the server being stopped. +var ErrServerStopped = errors.New("grpc: the server has been stopped") + +func (s *Server) useTransportAuthenticator(rawConn net.Conn) (net.Conn, credentials.AuthInfo, error) { + if s.opts.creds == nil { + return rawConn, nil, nil + } + return s.opts.creds.ServerHandshake(rawConn) +} + +type listenSocket struct { + net.Listener + channelzID int64 +} + +func (l *listenSocket) ChannelzMetric() *channelz.SocketInternalMetric { + return &channelz.SocketInternalMetric{ + SocketOptions: channelz.GetSocketOption(l.Listener), + LocalAddr: l.Listener.Addr(), + } +} + +func (l *listenSocket) Close() error { + err := l.Listener.Close() + if channelz.IsOn() { + channelz.RemoveEntry(l.channelzID) + } + return err +} + +// Serve accepts incoming connections on the listener lis, creating a new +// ServerTransport and service goroutine for each. The service goroutines +// read gRPC requests and then call the registered handlers to reply to them. +// Serve returns when lis.Accept fails with fatal errors. lis will be closed when +// this method returns. +// Serve will return a non-nil error unless Stop or GracefulStop is called. +func (s *Server) Serve(lis net.Listener) error { + s.mu.Lock() + s.printf("serving") + s.serve = true + if s.lis == nil { + // Serve called after Stop or GracefulStop. + s.mu.Unlock() + lis.Close() + return ErrServerStopped + } + + s.serveWG.Add(1) + defer func() { + s.serveWG.Done() + select { + // Stop or GracefulStop called; block until done and return nil. + case <-s.quit: + <-s.done + default: + } + }() + + ls := &listenSocket{Listener: lis} + s.lis[ls] = true + + if channelz.IsOn() { + ls.channelzID = channelz.RegisterListenSocket(ls, s.channelzID, lis.Addr().String()) + } + s.mu.Unlock() + + defer func() { + s.mu.Lock() + if s.lis != nil && s.lis[ls] { + ls.Close() + delete(s.lis, ls) + } + s.mu.Unlock() + }() + + var tempDelay time.Duration // how long to sleep on accept failure + + for { + rawConn, err := lis.Accept() + if err != nil { + if ne, ok := err.(interface { + Temporary() bool + }); ok && ne.Temporary() { + if tempDelay == 0 { + tempDelay = 5 * time.Millisecond + } else { + tempDelay *= 2 + } + if max := 1 * time.Second; tempDelay > max { + tempDelay = max + } + s.mu.Lock() + s.printf("Accept error: %v; retrying in %v", err, tempDelay) + s.mu.Unlock() + timer := time.NewTimer(tempDelay) + select { + case <-timer.C: + case <-s.quit: + timer.Stop() + return nil + } + continue + } + s.mu.Lock() + s.printf("done serving; Accept = %v", err) + s.mu.Unlock() + + select { + case <-s.quit: + return nil + default: + } + return err + } + tempDelay = 0 + // Start a new goroutine to deal with rawConn so we don't stall this Accept + // loop goroutine. + // + // Make sure we account for the goroutine so GracefulStop doesn't nil out + // s.conns before this conn can be added. + s.serveWG.Add(1) + go func() { + s.handleRawConn(rawConn) + s.serveWG.Done() + }() + } +} + +// handleRawConn forks a goroutine to handle a just-accepted connection that +// has not had any I/O performed on it yet. +func (s *Server) handleRawConn(rawConn net.Conn) { + rawConn.SetDeadline(time.Now().Add(s.opts.connectionTimeout)) + conn, authInfo, err := s.useTransportAuthenticator(rawConn) + if err != nil { + // ErrConnDispatched means that the connection was dispatched away from + // gRPC; those connections should be left open. + if err != credentials.ErrConnDispatched { + s.mu.Lock() + s.errorf("ServerHandshake(%q) failed: %v", rawConn.RemoteAddr(), err) + s.mu.Unlock() + grpclog.Warningf("grpc: Server.Serve failed to complete security handshake from %q: %v", rawConn.RemoteAddr(), err) + rawConn.Close() + } + rawConn.SetDeadline(time.Time{}) + return + } + + s.mu.Lock() + if s.conns == nil { + s.mu.Unlock() + conn.Close() + return + } + s.mu.Unlock() + + // Finish handshaking (HTTP2) + st := s.newHTTP2Transport(conn, authInfo) + if st == nil { + return + } + + rawConn.SetDeadline(time.Time{}) + if !s.addConn(st) { + return + } + go func() { + s.serveStreams(st) + s.removeConn(st) + }() +} + +// newHTTP2Transport sets up a http/2 transport (using the +// gRPC http2 server transport in transport/http2_server.go). +func (s *Server) newHTTP2Transport(c net.Conn, authInfo credentials.AuthInfo) transport.ServerTransport { + config := &transport.ServerConfig{ + MaxStreams: s.opts.maxConcurrentStreams, + AuthInfo: authInfo, + InTapHandle: s.opts.inTapHandle, + StatsHandler: s.opts.statsHandler, + KeepaliveParams: s.opts.keepaliveParams, + KeepalivePolicy: s.opts.keepalivePolicy, + InitialWindowSize: s.opts.initialWindowSize, + InitialConnWindowSize: s.opts.initialConnWindowSize, + WriteBufferSize: s.opts.writeBufferSize, + ReadBufferSize: s.opts.readBufferSize, + ChannelzParentID: s.channelzID, + MaxHeaderListSize: s.opts.maxHeaderListSize, + } + st, err := transport.NewServerTransport("http2", c, config) + if err != nil { + s.mu.Lock() + s.errorf("NewServerTransport(%q) failed: %v", c.RemoteAddr(), err) + s.mu.Unlock() + c.Close() + grpclog.Warningln("grpc: Server.Serve failed to create ServerTransport: ", err) + return nil + } + + return st +} + +func (s *Server) serveStreams(st transport.ServerTransport) { + defer st.Close() + var wg sync.WaitGroup + st.HandleStreams(func(stream *transport.Stream) { + wg.Add(1) + go func() { + defer wg.Done() + s.handleStream(st, stream, s.traceInfo(st, stream)) + }() + }, func(ctx context.Context, method string) context.Context { + if !EnableTracing { + return ctx + } + tr := trace.New("grpc.Recv."+methodFamily(method), method) + return trace.NewContext(ctx, tr) + }) + wg.Wait() +} + +var _ http.Handler = (*Server)(nil) + +// ServeHTTP implements the Go standard library's http.Handler +// interface by responding to the gRPC request r, by looking up +// the requested gRPC method in the gRPC server s. +// +// The provided HTTP request must have arrived on an HTTP/2 +// connection. When using the Go standard library's server, +// practically this means that the Request must also have arrived +// over TLS. +// +// To share one port (such as 443 for https) between gRPC and an +// existing http.Handler, use a root http.Handler such as: +// +// if r.ProtoMajor == 2 && strings.HasPrefix( +// r.Header.Get("Content-Type"), "application/grpc") { +// grpcServer.ServeHTTP(w, r) +// } else { +// yourMux.ServeHTTP(w, r) +// } +// +// Note that ServeHTTP uses Go's HTTP/2 server implementation which is totally +// separate from grpc-go's HTTP/2 server. Performance and features may vary +// between the two paths. ServeHTTP does not support some gRPC features +// available through grpc-go's HTTP/2 server, and it is currently EXPERIMENTAL +// and subject to change. +func (s *Server) ServeHTTP(w http.ResponseWriter, r *http.Request) { + st, err := transport.NewServerHandlerTransport(w, r, s.opts.statsHandler) + if err != nil { + http.Error(w, err.Error(), http.StatusInternalServerError) + return + } + if !s.addConn(st) { + return + } + defer s.removeConn(st) + s.serveStreams(st) +} + +// traceInfo returns a traceInfo and associates it with stream, if tracing is enabled. +// If tracing is not enabled, it returns nil. +func (s *Server) traceInfo(st transport.ServerTransport, stream *transport.Stream) (trInfo *traceInfo) { + tr, ok := trace.FromContext(stream.Context()) + if !ok { + return nil + } + + trInfo = &traceInfo{ + tr: tr, + firstLine: firstLine{ + client: false, + remoteAddr: st.RemoteAddr(), + }, + } + if dl, ok := stream.Context().Deadline(); ok { + trInfo.firstLine.deadline = time.Until(dl) + } + return trInfo +} + +func (s *Server) addConn(c io.Closer) bool { + s.mu.Lock() + defer s.mu.Unlock() + if s.conns == nil { + c.Close() + return false + } + if s.drain { + // Transport added after we drained our existing conns: drain it + // immediately. + c.(transport.ServerTransport).Drain() + } + s.conns[c] = true + return true +} + +func (s *Server) removeConn(c io.Closer) { + s.mu.Lock() + defer s.mu.Unlock() + if s.conns != nil { + delete(s.conns, c) + s.cv.Broadcast() + } +} + +func (s *Server) channelzMetric() *channelz.ServerInternalMetric { + return &channelz.ServerInternalMetric{ + CallsStarted: atomic.LoadInt64(&s.czData.callsStarted), + CallsSucceeded: atomic.LoadInt64(&s.czData.callsSucceeded), + CallsFailed: atomic.LoadInt64(&s.czData.callsFailed), + LastCallStartedTimestamp: time.Unix(0, atomic.LoadInt64(&s.czData.lastCallStartedTime)), + } +} + +func (s *Server) incrCallsStarted() { + atomic.AddInt64(&s.czData.callsStarted, 1) + atomic.StoreInt64(&s.czData.lastCallStartedTime, time.Now().UnixNano()) +} + +func (s *Server) incrCallsSucceeded() { + atomic.AddInt64(&s.czData.callsSucceeded, 1) +} + +func (s *Server) incrCallsFailed() { + atomic.AddInt64(&s.czData.callsFailed, 1) +} + +func (s *Server) sendResponse(t transport.ServerTransport, stream *transport.Stream, msg interface{}, cp Compressor, opts *transport.Options, comp encoding.Compressor) error { + data, err := encode(s.getCodec(stream.ContentSubtype()), msg) + if err != nil { + grpclog.Errorln("grpc: server failed to encode response: ", err) + return err + } + compData, err := compress(data, cp, comp) + if err != nil { + grpclog.Errorln("grpc: server failed to compress response: ", err) + return err + } + hdr, payload := msgHeader(data, compData) + // TODO(dfawley): should we be checking len(data) instead? + if len(payload) > s.opts.maxSendMessageSize { + return status.Errorf(codes.ResourceExhausted, "grpc: trying to send message larger than max (%d vs. %d)", len(payload), s.opts.maxSendMessageSize) + } + err = t.Write(stream, hdr, payload, opts) + if err == nil && s.opts.statsHandler != nil { + s.opts.statsHandler.HandleRPC(stream.Context(), outPayload(false, msg, data, payload, time.Now())) + } + return err +} + +func (s *Server) processUnaryRPC(t transport.ServerTransport, stream *transport.Stream, srv *service, md *MethodDesc, trInfo *traceInfo) (err error) { + if channelz.IsOn() { + s.incrCallsStarted() + defer func() { + if err != nil && err != io.EOF { + s.incrCallsFailed() + } else { + s.incrCallsSucceeded() + } + }() + } + sh := s.opts.statsHandler + if sh != nil { + beginTime := time.Now() + begin := &stats.Begin{ + BeginTime: beginTime, + } + sh.HandleRPC(stream.Context(), begin) + defer func() { + end := &stats.End{ + BeginTime: beginTime, + EndTime: time.Now(), + } + if err != nil && err != io.EOF { + end.Error = toRPCErr(err) + } + sh.HandleRPC(stream.Context(), end) + }() + } + if trInfo != nil { + defer trInfo.tr.Finish() + trInfo.tr.LazyLog(&trInfo.firstLine, false) + defer func() { + if err != nil && err != io.EOF { + trInfo.tr.LazyLog(&fmtStringer{"%v", []interface{}{err}}, true) + trInfo.tr.SetError() + } + }() + } + + binlog := binarylog.GetMethodLogger(stream.Method()) + if binlog != nil { + ctx := stream.Context() + md, _ := metadata.FromIncomingContext(ctx) + logEntry := &binarylog.ClientHeader{ + Header: md, + MethodName: stream.Method(), + PeerAddr: nil, + } + if deadline, ok := ctx.Deadline(); ok { + logEntry.Timeout = time.Until(deadline) + if logEntry.Timeout < 0 { + logEntry.Timeout = 0 + } + } + if a := md[":authority"]; len(a) > 0 { + logEntry.Authority = a[0] + } + if peer, ok := peer.FromContext(ctx); ok { + logEntry.PeerAddr = peer.Addr + } + binlog.Log(logEntry) + } + + // comp and cp are used for compression. decomp and dc are used for + // decompression. If comp and decomp are both set, they are the same; + // however they are kept separate to ensure that at most one of the + // compressor/decompressor variable pairs are set for use later. + var comp, decomp encoding.Compressor + var cp Compressor + var dc Decompressor + + // If dc is set and matches the stream's compression, use it. Otherwise, try + // to find a matching registered compressor for decomp. + if rc := stream.RecvCompress(); s.opts.dc != nil && s.opts.dc.Type() == rc { + dc = s.opts.dc + } else if rc != "" && rc != encoding.Identity { + decomp = encoding.GetCompressor(rc) + if decomp == nil { + st := status.Newf(codes.Unimplemented, "grpc: Decompressor is not installed for grpc-encoding %q", rc) + t.WriteStatus(stream, st) + return st.Err() + } + } + + // If cp is set, use it. Otherwise, attempt to compress the response using + // the incoming message compression method. + // + // NOTE: this needs to be ahead of all handling, https://github.com/grpc/grpc-go/issues/686. + if s.opts.cp != nil { + cp = s.opts.cp + stream.SetSendCompress(cp.Type()) + } else if rc := stream.RecvCompress(); rc != "" && rc != encoding.Identity { + // Legacy compressor not specified; attempt to respond with same encoding. + comp = encoding.GetCompressor(rc) + if comp != nil { + stream.SetSendCompress(rc) + } + } + + var payInfo *payloadInfo + if sh != nil || binlog != nil { + payInfo = &payloadInfo{} + } + d, err := recvAndDecompress(&parser{r: stream}, stream, dc, s.opts.maxReceiveMessageSize, payInfo, decomp) + if err != nil { + if st, ok := status.FromError(err); ok { + if e := t.WriteStatus(stream, st); e != nil { + grpclog.Warningf("grpc: Server.processUnaryRPC failed to write status %v", e) + } + } + return err + } + if channelz.IsOn() { + t.IncrMsgRecv() + } + df := func(v interface{}) error { + if err := s.getCodec(stream.ContentSubtype()).Unmarshal(d, v); err != nil { + return status.Errorf(codes.Internal, "grpc: error unmarshalling request: %v", err) + } + if sh != nil { + sh.HandleRPC(stream.Context(), &stats.InPayload{ + RecvTime: time.Now(), + Payload: v, + Data: d, + Length: len(d), + }) + } + if binlog != nil { + binlog.Log(&binarylog.ClientMessage{ + Message: d, + }) + } + if trInfo != nil { + trInfo.tr.LazyLog(&payload{sent: false, msg: v}, true) + } + return nil + } + ctx := NewContextWithServerTransportStream(stream.Context(), stream) + reply, appErr := md.Handler(srv.server, ctx, df, s.opts.unaryInt) + if appErr != nil { + appStatus, ok := status.FromError(appErr) + if !ok { + // Convert appErr if it is not a grpc status error. + appErr = status.Error(codes.Unknown, appErr.Error()) + appStatus, _ = status.FromError(appErr) + } + if trInfo != nil { + trInfo.tr.LazyLog(stringer(appStatus.Message()), true) + trInfo.tr.SetError() + } + if e := t.WriteStatus(stream, appStatus); e != nil { + grpclog.Warningf("grpc: Server.processUnaryRPC failed to write status: %v", e) + } + if binlog != nil { + if h, _ := stream.Header(); h.Len() > 0 { + // Only log serverHeader if there was header. Otherwise it can + // be trailer only. + binlog.Log(&binarylog.ServerHeader{ + Header: h, + }) + } + binlog.Log(&binarylog.ServerTrailer{ + Trailer: stream.Trailer(), + Err: appErr, + }) + } + return appErr + } + if trInfo != nil { + trInfo.tr.LazyLog(stringer("OK"), false) + } + opts := &transport.Options{Last: true} + + if err := s.sendResponse(t, stream, reply, cp, opts, comp); err != nil { + if err == io.EOF { + // The entire stream is done (for unary RPC only). + return err + } + if s, ok := status.FromError(err); ok { + if e := t.WriteStatus(stream, s); e != nil { + grpclog.Warningf("grpc: Server.processUnaryRPC failed to write status: %v", e) + } + } else { + switch st := err.(type) { + case transport.ConnectionError: + // Nothing to do here. + default: + panic(fmt.Sprintf("grpc: Unexpected error (%T) from sendResponse: %v", st, st)) + } + } + if binlog != nil { + h, _ := stream.Header() + binlog.Log(&binarylog.ServerHeader{ + Header: h, + }) + binlog.Log(&binarylog.ServerTrailer{ + Trailer: stream.Trailer(), + Err: appErr, + }) + } + return err + } + if binlog != nil { + h, _ := stream.Header() + binlog.Log(&binarylog.ServerHeader{ + Header: h, + }) + binlog.Log(&binarylog.ServerMessage{ + Message: reply, + }) + } + if channelz.IsOn() { + t.IncrMsgSent() + } + if trInfo != nil { + trInfo.tr.LazyLog(&payload{sent: true, msg: reply}, true) + } + // TODO: Should we be logging if writing status failed here, like above? + // Should the logging be in WriteStatus? Should we ignore the WriteStatus + // error or allow the stats handler to see it? + err = t.WriteStatus(stream, status.New(codes.OK, "")) + if binlog != nil { + binlog.Log(&binarylog.ServerTrailer{ + Trailer: stream.Trailer(), + Err: appErr, + }) + } + return err +} + +func (s *Server) processStreamingRPC(t transport.ServerTransport, stream *transport.Stream, srv *service, sd *StreamDesc, trInfo *traceInfo) (err error) { + if channelz.IsOn() { + s.incrCallsStarted() + defer func() { + if err != nil && err != io.EOF { + s.incrCallsFailed() + } else { + s.incrCallsSucceeded() + } + }() + } + sh := s.opts.statsHandler + if sh != nil { + beginTime := time.Now() + begin := &stats.Begin{ + BeginTime: beginTime, + } + sh.HandleRPC(stream.Context(), begin) + defer func() { + end := &stats.End{ + BeginTime: beginTime, + EndTime: time.Now(), + } + if err != nil && err != io.EOF { + end.Error = toRPCErr(err) + } + sh.HandleRPC(stream.Context(), end) + }() + } + ctx := NewContextWithServerTransportStream(stream.Context(), stream) + ss := &serverStream{ + ctx: ctx, + t: t, + s: stream, + p: &parser{r: stream}, + codec: s.getCodec(stream.ContentSubtype()), + maxReceiveMessageSize: s.opts.maxReceiveMessageSize, + maxSendMessageSize: s.opts.maxSendMessageSize, + trInfo: trInfo, + statsHandler: sh, + } + + ss.binlog = binarylog.GetMethodLogger(stream.Method()) + if ss.binlog != nil { + md, _ := metadata.FromIncomingContext(ctx) + logEntry := &binarylog.ClientHeader{ + Header: md, + MethodName: stream.Method(), + PeerAddr: nil, + } + if deadline, ok := ctx.Deadline(); ok { + logEntry.Timeout = time.Until(deadline) + if logEntry.Timeout < 0 { + logEntry.Timeout = 0 + } + } + if a := md[":authority"]; len(a) > 0 { + logEntry.Authority = a[0] + } + if peer, ok := peer.FromContext(ss.Context()); ok { + logEntry.PeerAddr = peer.Addr + } + ss.binlog.Log(logEntry) + } + + // If dc is set and matches the stream's compression, use it. Otherwise, try + // to find a matching registered compressor for decomp. + if rc := stream.RecvCompress(); s.opts.dc != nil && s.opts.dc.Type() == rc { + ss.dc = s.opts.dc + } else if rc != "" && rc != encoding.Identity { + ss.decomp = encoding.GetCompressor(rc) + if ss.decomp == nil { + st := status.Newf(codes.Unimplemented, "grpc: Decompressor is not installed for grpc-encoding %q", rc) + t.WriteStatus(ss.s, st) + return st.Err() + } + } + + // If cp is set, use it. Otherwise, attempt to compress the response using + // the incoming message compression method. + // + // NOTE: this needs to be ahead of all handling, https://github.com/grpc/grpc-go/issues/686. + if s.opts.cp != nil { + ss.cp = s.opts.cp + stream.SetSendCompress(s.opts.cp.Type()) + } else if rc := stream.RecvCompress(); rc != "" && rc != encoding.Identity { + // Legacy compressor not specified; attempt to respond with same encoding. + ss.comp = encoding.GetCompressor(rc) + if ss.comp != nil { + stream.SetSendCompress(rc) + } + } + + if trInfo != nil { + trInfo.tr.LazyLog(&trInfo.firstLine, false) + defer func() { + ss.mu.Lock() + if err != nil && err != io.EOF { + ss.trInfo.tr.LazyLog(&fmtStringer{"%v", []interface{}{err}}, true) + ss.trInfo.tr.SetError() + } + ss.trInfo.tr.Finish() + ss.trInfo.tr = nil + ss.mu.Unlock() + }() + } + var appErr error + var server interface{} + if srv != nil { + server = srv.server + } + if s.opts.streamInt == nil { + appErr = sd.Handler(server, ss) + } else { + info := &StreamServerInfo{ + FullMethod: stream.Method(), + IsClientStream: sd.ClientStreams, + IsServerStream: sd.ServerStreams, + } + appErr = s.opts.streamInt(server, ss, info, sd.Handler) + } + if appErr != nil { + appStatus, ok := status.FromError(appErr) + if !ok { + appStatus = status.New(codes.Unknown, appErr.Error()) + appErr = appStatus.Err() + } + if trInfo != nil { + ss.mu.Lock() + ss.trInfo.tr.LazyLog(stringer(appStatus.Message()), true) + ss.trInfo.tr.SetError() + ss.mu.Unlock() + } + t.WriteStatus(ss.s, appStatus) + if ss.binlog != nil { + ss.binlog.Log(&binarylog.ServerTrailer{ + Trailer: ss.s.Trailer(), + Err: appErr, + }) + } + // TODO: Should we log an error from WriteStatus here and below? + return appErr + } + if trInfo != nil { + ss.mu.Lock() + ss.trInfo.tr.LazyLog(stringer("OK"), false) + ss.mu.Unlock() + } + err = t.WriteStatus(ss.s, status.New(codes.OK, "")) + if ss.binlog != nil { + ss.binlog.Log(&binarylog.ServerTrailer{ + Trailer: ss.s.Trailer(), + Err: appErr, + }) + } + return err +} + +func (s *Server) handleStream(t transport.ServerTransport, stream *transport.Stream, trInfo *traceInfo) { + sm := stream.Method() + if sm != "" && sm[0] == '/' { + sm = sm[1:] + } + pos := strings.LastIndex(sm, "/") + if pos == -1 { + if trInfo != nil { + trInfo.tr.LazyLog(&fmtStringer{"Malformed method name %q", []interface{}{sm}}, true) + trInfo.tr.SetError() + } + errDesc := fmt.Sprintf("malformed method name: %q", stream.Method()) + if err := t.WriteStatus(stream, status.New(codes.ResourceExhausted, errDesc)); err != nil { + if trInfo != nil { + trInfo.tr.LazyLog(&fmtStringer{"%v", []interface{}{err}}, true) + trInfo.tr.SetError() + } + grpclog.Warningf("grpc: Server.handleStream failed to write status: %v", err) + } + if trInfo != nil { + trInfo.tr.Finish() + } + return + } + service := sm[:pos] + method := sm[pos+1:] + + srv, knownService := s.m[service] + if knownService { + if md, ok := srv.md[method]; ok { + s.processUnaryRPC(t, stream, srv, md, trInfo) + return + } + if sd, ok := srv.sd[method]; ok { + s.processStreamingRPC(t, stream, srv, sd, trInfo) + return + } + } + // Unknown service, or known server unknown method. + if unknownDesc := s.opts.unknownStreamDesc; unknownDesc != nil { + s.processStreamingRPC(t, stream, nil, unknownDesc, trInfo) + return + } + var errDesc string + if !knownService { + errDesc = fmt.Sprintf("unknown service %v", service) + } else { + errDesc = fmt.Sprintf("unknown method %v for service %v", method, service) + } + if trInfo != nil { + trInfo.tr.LazyPrintf("%s", errDesc) + trInfo.tr.SetError() + } + if err := t.WriteStatus(stream, status.New(codes.Unimplemented, errDesc)); err != nil { + if trInfo != nil { + trInfo.tr.LazyLog(&fmtStringer{"%v", []interface{}{err}}, true) + trInfo.tr.SetError() + } + grpclog.Warningf("grpc: Server.handleStream failed to write status: %v", err) + } + if trInfo != nil { + trInfo.tr.Finish() + } +} + +// The key to save ServerTransportStream in the context. +type streamKey struct{} + +// NewContextWithServerTransportStream creates a new context from ctx and +// attaches stream to it. +// +// This API is EXPERIMENTAL. +func NewContextWithServerTransportStream(ctx context.Context, stream ServerTransportStream) context.Context { + return context.WithValue(ctx, streamKey{}, stream) +} + +// ServerTransportStream is a minimal interface that a transport stream must +// implement. This can be used to mock an actual transport stream for tests of +// handler code that use, for example, grpc.SetHeader (which requires some +// stream to be in context). +// +// See also NewContextWithServerTransportStream. +// +// This API is EXPERIMENTAL. +type ServerTransportStream interface { + Method() string + SetHeader(md metadata.MD) error + SendHeader(md metadata.MD) error + SetTrailer(md metadata.MD) error +} + +// ServerTransportStreamFromContext returns the ServerTransportStream saved in +// ctx. Returns nil if the given context has no stream associated with it +// (which implies it is not an RPC invocation context). +// +// This API is EXPERIMENTAL. +func ServerTransportStreamFromContext(ctx context.Context) ServerTransportStream { + s, _ := ctx.Value(streamKey{}).(ServerTransportStream) + return s +} + +// Stop stops the gRPC server. It immediately closes all open +// connections and listeners. +// It cancels all active RPCs on the server side and the corresponding +// pending RPCs on the client side will get notified by connection +// errors. +func (s *Server) Stop() { + s.quitOnce.Do(func() { + close(s.quit) + }) + + defer func() { + s.serveWG.Wait() + s.doneOnce.Do(func() { + close(s.done) + }) + }() + + s.channelzRemoveOnce.Do(func() { + if channelz.IsOn() { + channelz.RemoveEntry(s.channelzID) + } + }) + + s.mu.Lock() + listeners := s.lis + s.lis = nil + st := s.conns + s.conns = nil + // interrupt GracefulStop if Stop and GracefulStop are called concurrently. + s.cv.Broadcast() + s.mu.Unlock() + + for lis := range listeners { + lis.Close() + } + for c := range st { + c.Close() + } + + s.mu.Lock() + if s.events != nil { + s.events.Finish() + s.events = nil + } + s.mu.Unlock() +} + +// GracefulStop stops the gRPC server gracefully. It stops the server from +// accepting new connections and RPCs and blocks until all the pending RPCs are +// finished. +func (s *Server) GracefulStop() { + s.quitOnce.Do(func() { + close(s.quit) + }) + + defer func() { + s.doneOnce.Do(func() { + close(s.done) + }) + }() + + s.channelzRemoveOnce.Do(func() { + if channelz.IsOn() { + channelz.RemoveEntry(s.channelzID) + } + }) + s.mu.Lock() + if s.conns == nil { + s.mu.Unlock() + return + } + + for lis := range s.lis { + lis.Close() + } + s.lis = nil + if !s.drain { + for c := range s.conns { + c.(transport.ServerTransport).Drain() + } + s.drain = true + } + + // Wait for serving threads to be ready to exit. Only then can we be sure no + // new conns will be created. + s.mu.Unlock() + s.serveWG.Wait() + s.mu.Lock() + + for len(s.conns) != 0 { + s.cv.Wait() + } + s.conns = nil + if s.events != nil { + s.events.Finish() + s.events = nil + } + s.mu.Unlock() +} + +// contentSubtype must be lowercase +// cannot return nil +func (s *Server) getCodec(contentSubtype string) baseCodec { + if s.opts.codec != nil { + return s.opts.codec + } + if contentSubtype == "" { + return encoding.GetCodec(proto.Name) + } + codec := encoding.GetCodec(contentSubtype) + if codec == nil { + return encoding.GetCodec(proto.Name) + } + return codec +} + +// SetHeader sets the header metadata. +// When called multiple times, all the provided metadata will be merged. +// All the metadata will be sent out when one of the following happens: +// - grpc.SendHeader() is called; +// - The first response is sent out; +// - An RPC status is sent out (error or success). +func SetHeader(ctx context.Context, md metadata.MD) error { + if md.Len() == 0 { + return nil + } + stream := ServerTransportStreamFromContext(ctx) + if stream == nil { + return status.Errorf(codes.Internal, "grpc: failed to fetch the stream from the context %v", ctx) + } + return stream.SetHeader(md) +} + +// SendHeader sends header metadata. It may be called at most once. +// The provided md and headers set by SetHeader() will be sent. +func SendHeader(ctx context.Context, md metadata.MD) error { + stream := ServerTransportStreamFromContext(ctx) + if stream == nil { + return status.Errorf(codes.Internal, "grpc: failed to fetch the stream from the context %v", ctx) + } + if err := stream.SendHeader(md); err != nil { + return toRPCErr(err) + } + return nil +} + +// SetTrailer sets the trailer metadata that will be sent when an RPC returns. +// When called more than once, all the provided metadata will be merged. +func SetTrailer(ctx context.Context, md metadata.MD) error { + if md.Len() == 0 { + return nil + } + stream := ServerTransportStreamFromContext(ctx) + if stream == nil { + return status.Errorf(codes.Internal, "grpc: failed to fetch the stream from the context %v", ctx) + } + return stream.SetTrailer(md) +} + +// Method returns the method string for the server context. The returned +// string is in the format of "/service/method". +func Method(ctx context.Context) (string, bool) { + s := ServerTransportStreamFromContext(ctx) + if s == nil { + return "", false + } + return s.Method(), true +} + +type channelzServer struct { + s *Server +} + +func (c *channelzServer) ChannelzMetric() *channelz.ServerInternalMetric { + return c.s.channelzMetric() +} diff --git a/vendor/google.golang.org/grpc/service_config.go b/vendor/google.golang.org/grpc/service_config.go new file mode 100644 index 0000000..1c52274 --- /dev/null +++ b/vendor/google.golang.org/grpc/service_config.go @@ -0,0 +1,373 @@ +/* + * + * Copyright 2017 gRPC authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ + +package grpc + +import ( + "encoding/json" + "fmt" + "strconv" + "strings" + "time" + + "google.golang.org/grpc/codes" + "google.golang.org/grpc/grpclog" +) + +const maxInt = int(^uint(0) >> 1) + +// MethodConfig defines the configuration recommended by the service providers for a +// particular method. +// +// Deprecated: Users should not use this struct. Service config should be received +// through name resolver, as specified here +// https://github.com/grpc/grpc/blob/master/doc/service_config.md +type MethodConfig struct { + // WaitForReady indicates whether RPCs sent to this method should wait until + // the connection is ready by default (!failfast). The value specified via the + // gRPC client API will override the value set here. + WaitForReady *bool + // Timeout is the default timeout for RPCs sent to this method. The actual + // deadline used will be the minimum of the value specified here and the value + // set by the application via the gRPC client API. If either one is not set, + // then the other will be used. If neither is set, then the RPC has no deadline. + Timeout *time.Duration + // MaxReqSize is the maximum allowed payload size for an individual request in a + // stream (client->server) in bytes. The size which is measured is the serialized + // payload after per-message compression (but before stream compression) in bytes. + // The actual value used is the minimum of the value specified here and the value set + // by the application via the gRPC client API. If either one is not set, then the other + // will be used. If neither is set, then the built-in default is used. + MaxReqSize *int + // MaxRespSize is the maximum allowed payload size for an individual response in a + // stream (server->client) in bytes. + MaxRespSize *int + // RetryPolicy configures retry options for the method. + retryPolicy *retryPolicy +} + +// ServiceConfig is provided by the service provider and contains parameters for how +// clients that connect to the service should behave. +// +// Deprecated: Users should not use this struct. Service config should be received +// through name resolver, as specified here +// https://github.com/grpc/grpc/blob/master/doc/service_config.md +type ServiceConfig struct { + // LB is the load balancer the service providers recommends. The balancer specified + // via grpc.WithBalancer will override this. + LB *string + + // Methods contains a map for the methods in this service. If there is an + // exact match for a method (i.e. /service/method) in the map, use the + // corresponding MethodConfig. If there's no exact match, look for the + // default config for the service (/service/) and use the corresponding + // MethodConfig if it exists. Otherwise, the method has no MethodConfig to + // use. + Methods map[string]MethodConfig + + // If a retryThrottlingPolicy is provided, gRPC will automatically throttle + // retry attempts and hedged RPCs when the client’s ratio of failures to + // successes exceeds a threshold. + // + // For each server name, the gRPC client will maintain a token_count which is + // initially set to maxTokens, and can take values between 0 and maxTokens. + // + // Every outgoing RPC (regardless of service or method invoked) will change + // token_count as follows: + // + // - Every failed RPC will decrement the token_count by 1. + // - Every successful RPC will increment the token_count by tokenRatio. + // + // If token_count is less than or equal to maxTokens / 2, then RPCs will not + // be retried and hedged RPCs will not be sent. + retryThrottling *retryThrottlingPolicy + // healthCheckConfig must be set as one of the requirement to enable LB channel + // health check. + healthCheckConfig *healthCheckConfig + // rawJSONString stores service config json string that get parsed into + // this service config struct. + rawJSONString string +} + +// healthCheckConfig defines the go-native version of the LB channel health check config. +type healthCheckConfig struct { + // serviceName is the service name to use in the health-checking request. + ServiceName string +} + +// retryPolicy defines the go-native version of the retry policy defined by the +// service config here: +// https://github.com/grpc/proposal/blob/master/A6-client-retries.md#integration-with-service-config +type retryPolicy struct { + // MaxAttempts is the maximum number of attempts, including the original RPC. + // + // This field is required and must be two or greater. + maxAttempts int + + // Exponential backoff parameters. The initial retry attempt will occur at + // random(0, initialBackoffMS). In general, the nth attempt will occur at + // random(0, + // min(initialBackoffMS*backoffMultiplier**(n-1), maxBackoffMS)). + // + // These fields are required and must be greater than zero. + initialBackoff time.Duration + maxBackoff time.Duration + backoffMultiplier float64 + + // The set of status codes which may be retried. + // + // Status codes are specified as strings, e.g., "UNAVAILABLE". + // + // This field is required and must be non-empty. + // Note: a set is used to store this for easy lookup. + retryableStatusCodes map[codes.Code]bool +} + +type jsonRetryPolicy struct { + MaxAttempts int + InitialBackoff string + MaxBackoff string + BackoffMultiplier float64 + RetryableStatusCodes []codes.Code +} + +// retryThrottlingPolicy defines the go-native version of the retry throttling +// policy defined by the service config here: +// https://github.com/grpc/proposal/blob/master/A6-client-retries.md#integration-with-service-config +type retryThrottlingPolicy struct { + // The number of tokens starts at maxTokens. The token_count will always be + // between 0 and maxTokens. + // + // This field is required and must be greater than zero. + MaxTokens float64 + // The amount of tokens to add on each successful RPC. Typically this will + // be some number between 0 and 1, e.g., 0.1. + // + // This field is required and must be greater than zero. Up to 3 decimal + // places are supported. + TokenRatio float64 +} + +func parseDuration(s *string) (*time.Duration, error) { + if s == nil { + return nil, nil + } + if !strings.HasSuffix(*s, "s") { + return nil, fmt.Errorf("malformed duration %q", *s) + } + ss := strings.SplitN((*s)[:len(*s)-1], ".", 3) + if len(ss) > 2 { + return nil, fmt.Errorf("malformed duration %q", *s) + } + // hasDigits is set if either the whole or fractional part of the number is + // present, since both are optional but one is required. + hasDigits := false + var d time.Duration + if len(ss[0]) > 0 { + i, err := strconv.ParseInt(ss[0], 10, 32) + if err != nil { + return nil, fmt.Errorf("malformed duration %q: %v", *s, err) + } + d = time.Duration(i) * time.Second + hasDigits = true + } + if len(ss) == 2 && len(ss[1]) > 0 { + if len(ss[1]) > 9 { + return nil, fmt.Errorf("malformed duration %q", *s) + } + f, err := strconv.ParseInt(ss[1], 10, 64) + if err != nil { + return nil, fmt.Errorf("malformed duration %q: %v", *s, err) + } + for i := 9; i > len(ss[1]); i-- { + f *= 10 + } + d += time.Duration(f) + hasDigits = true + } + if !hasDigits { + return nil, fmt.Errorf("malformed duration %q", *s) + } + + return &d, nil +} + +type jsonName struct { + Service *string + Method *string +} + +func (j jsonName) generatePath() (string, bool) { + if j.Service == nil { + return "", false + } + res := "/" + *j.Service + "/" + if j.Method != nil { + res += *j.Method + } + return res, true +} + +// TODO(lyuxuan): delete this struct after cleaning up old service config implementation. +type jsonMC struct { + Name *[]jsonName + WaitForReady *bool + Timeout *string + MaxRequestMessageBytes *int64 + MaxResponseMessageBytes *int64 + RetryPolicy *jsonRetryPolicy +} + +// TODO(lyuxuan): delete this struct after cleaning up old service config implementation. +type jsonSC struct { + LoadBalancingPolicy *string + MethodConfig *[]jsonMC + RetryThrottling *retryThrottlingPolicy + HealthCheckConfig *healthCheckConfig +} + +func parseServiceConfig(js string) (*ServiceConfig, error) { + var rsc jsonSC + err := json.Unmarshal([]byte(js), &rsc) + if err != nil { + grpclog.Warningf("grpc: parseServiceConfig error unmarshaling %s due to %v", js, err) + return nil, err + } + sc := ServiceConfig{ + LB: rsc.LoadBalancingPolicy, + Methods: make(map[string]MethodConfig), + retryThrottling: rsc.RetryThrottling, + healthCheckConfig: rsc.HealthCheckConfig, + rawJSONString: js, + } + if rsc.MethodConfig == nil { + return &sc, nil + } + + for _, m := range *rsc.MethodConfig { + if m.Name == nil { + continue + } + d, err := parseDuration(m.Timeout) + if err != nil { + grpclog.Warningf("grpc: parseServiceConfig error unmarshaling %s due to %v", js, err) + return nil, err + } + + mc := MethodConfig{ + WaitForReady: m.WaitForReady, + Timeout: d, + } + if mc.retryPolicy, err = convertRetryPolicy(m.RetryPolicy); err != nil { + grpclog.Warningf("grpc: parseServiceConfig error unmarshaling %s due to %v", js, err) + return nil, err + } + if m.MaxRequestMessageBytes != nil { + if *m.MaxRequestMessageBytes > int64(maxInt) { + mc.MaxReqSize = newInt(maxInt) + } else { + mc.MaxReqSize = newInt(int(*m.MaxRequestMessageBytes)) + } + } + if m.MaxResponseMessageBytes != nil { + if *m.MaxResponseMessageBytes > int64(maxInt) { + mc.MaxRespSize = newInt(maxInt) + } else { + mc.MaxRespSize = newInt(int(*m.MaxResponseMessageBytes)) + } + } + for _, n := range *m.Name { + if path, valid := n.generatePath(); valid { + sc.Methods[path] = mc + } + } + } + + if sc.retryThrottling != nil { + if sc.retryThrottling.MaxTokens <= 0 || + sc.retryThrottling.MaxTokens > 1000 || + sc.retryThrottling.TokenRatio <= 0 { + // Illegal throttling config; disable throttling. + sc.retryThrottling = nil + } + } + return &sc, nil +} + +func convertRetryPolicy(jrp *jsonRetryPolicy) (p *retryPolicy, err error) { + if jrp == nil { + return nil, nil + } + ib, err := parseDuration(&jrp.InitialBackoff) + if err != nil { + return nil, err + } + mb, err := parseDuration(&jrp.MaxBackoff) + if err != nil { + return nil, err + } + + if jrp.MaxAttempts <= 1 || + *ib <= 0 || + *mb <= 0 || + jrp.BackoffMultiplier <= 0 || + len(jrp.RetryableStatusCodes) == 0 { + grpclog.Warningf("grpc: ignoring retry policy %v due to illegal configuration", jrp) + return nil, nil + } + + rp := &retryPolicy{ + maxAttempts: jrp.MaxAttempts, + initialBackoff: *ib, + maxBackoff: *mb, + backoffMultiplier: jrp.BackoffMultiplier, + retryableStatusCodes: make(map[codes.Code]bool), + } + if rp.maxAttempts > 5 { + // TODO(retry): Make the max maxAttempts configurable. + rp.maxAttempts = 5 + } + for _, code := range jrp.RetryableStatusCodes { + rp.retryableStatusCodes[code] = true + } + return rp, nil +} + +func min(a, b *int) *int { + if *a < *b { + return a + } + return b +} + +func getMaxSize(mcMax, doptMax *int, defaultVal int) *int { + if mcMax == nil && doptMax == nil { + return &defaultVal + } + if mcMax != nil && doptMax != nil { + return min(mcMax, doptMax) + } + if mcMax != nil { + return mcMax + } + return doptMax +} + +func newInt(b int) *int { + return &b +} diff --git a/vendor/google.golang.org/grpc/stats/grpc_testing/test.pb.go b/vendor/google.golang.org/grpc/stats/grpc_testing/test.pb.go new file mode 100644 index 0000000..9db10f8 --- /dev/null +++ b/vendor/google.golang.org/grpc/stats/grpc_testing/test.pb.go @@ -0,0 +1,403 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: grpc_testing/test.proto + +package grpc_testing + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" + +import ( + context "golang.org/x/net/context" + grpc "google.golang.org/grpc" +) + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +type SimpleRequest struct { + Id int32 `protobuf:"varint,2,opt,name=id,proto3" json:"id,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *SimpleRequest) Reset() { *m = SimpleRequest{} } +func (m *SimpleRequest) String() string { return proto.CompactTextString(m) } +func (*SimpleRequest) ProtoMessage() {} +func (*SimpleRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_test_dd7ffeaa75513a0a, []int{0} +} +func (m *SimpleRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_SimpleRequest.Unmarshal(m, b) +} +func (m *SimpleRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_SimpleRequest.Marshal(b, m, deterministic) +} +func (dst *SimpleRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_SimpleRequest.Merge(dst, src) +} +func (m *SimpleRequest) XXX_Size() int { + return xxx_messageInfo_SimpleRequest.Size(m) +} +func (m *SimpleRequest) XXX_DiscardUnknown() { + xxx_messageInfo_SimpleRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_SimpleRequest proto.InternalMessageInfo + +func (m *SimpleRequest) GetId() int32 { + if m != nil { + return m.Id + } + return 0 +} + +type SimpleResponse struct { + Id int32 `protobuf:"varint,3,opt,name=id,proto3" json:"id,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *SimpleResponse) Reset() { *m = SimpleResponse{} } +func (m *SimpleResponse) String() string { return proto.CompactTextString(m) } +func (*SimpleResponse) ProtoMessage() {} +func (*SimpleResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_test_dd7ffeaa75513a0a, []int{1} +} +func (m *SimpleResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_SimpleResponse.Unmarshal(m, b) +} +func (m *SimpleResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_SimpleResponse.Marshal(b, m, deterministic) +} +func (dst *SimpleResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_SimpleResponse.Merge(dst, src) +} +func (m *SimpleResponse) XXX_Size() int { + return xxx_messageInfo_SimpleResponse.Size(m) +} +func (m *SimpleResponse) XXX_DiscardUnknown() { + xxx_messageInfo_SimpleResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_SimpleResponse proto.InternalMessageInfo + +func (m *SimpleResponse) GetId() int32 { + if m != nil { + return m.Id + } + return 0 +} + +func init() { + proto.RegisterType((*SimpleRequest)(nil), "grpc.testing.SimpleRequest") + proto.RegisterType((*SimpleResponse)(nil), "grpc.testing.SimpleResponse") +} + +// Reference imports to suppress errors if they are not otherwise used. +var _ context.Context +var _ grpc.ClientConn + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the grpc package it is being compiled against. +const _ = grpc.SupportPackageIsVersion4 + +// TestServiceClient is the client API for TestService service. +// +// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://godoc.org/google.golang.org/grpc#ClientConn.NewStream. +type TestServiceClient interface { + // One request followed by one response. + // The server returns the client id as-is. + UnaryCall(ctx context.Context, in *SimpleRequest, opts ...grpc.CallOption) (*SimpleResponse, error) + // A sequence of requests with each request served by the server immediately. + // As one request could lead to multiple responses, this interface + // demonstrates the idea of full duplexing. + FullDuplexCall(ctx context.Context, opts ...grpc.CallOption) (TestService_FullDuplexCallClient, error) + // Client stream + ClientStreamCall(ctx context.Context, opts ...grpc.CallOption) (TestService_ClientStreamCallClient, error) + // Server stream + ServerStreamCall(ctx context.Context, in *SimpleRequest, opts ...grpc.CallOption) (TestService_ServerStreamCallClient, error) +} + +type testServiceClient struct { + cc *grpc.ClientConn +} + +func NewTestServiceClient(cc *grpc.ClientConn) TestServiceClient { + return &testServiceClient{cc} +} + +func (c *testServiceClient) UnaryCall(ctx context.Context, in *SimpleRequest, opts ...grpc.CallOption) (*SimpleResponse, error) { + out := new(SimpleResponse) + err := c.cc.Invoke(ctx, "/grpc.testing.TestService/UnaryCall", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *testServiceClient) FullDuplexCall(ctx context.Context, opts ...grpc.CallOption) (TestService_FullDuplexCallClient, error) { + stream, err := c.cc.NewStream(ctx, &_TestService_serviceDesc.Streams[0], "/grpc.testing.TestService/FullDuplexCall", opts...) + if err != nil { + return nil, err + } + x := &testServiceFullDuplexCallClient{stream} + return x, nil +} + +type TestService_FullDuplexCallClient interface { + Send(*SimpleRequest) error + Recv() (*SimpleResponse, error) + grpc.ClientStream +} + +type testServiceFullDuplexCallClient struct { + grpc.ClientStream +} + +func (x *testServiceFullDuplexCallClient) Send(m *SimpleRequest) error { + return x.ClientStream.SendMsg(m) +} + +func (x *testServiceFullDuplexCallClient) Recv() (*SimpleResponse, error) { + m := new(SimpleResponse) + if err := x.ClientStream.RecvMsg(m); err != nil { + return nil, err + } + return m, nil +} + +func (c *testServiceClient) ClientStreamCall(ctx context.Context, opts ...grpc.CallOption) (TestService_ClientStreamCallClient, error) { + stream, err := c.cc.NewStream(ctx, &_TestService_serviceDesc.Streams[1], "/grpc.testing.TestService/ClientStreamCall", opts...) + if err != nil { + return nil, err + } + x := &testServiceClientStreamCallClient{stream} + return x, nil +} + +type TestService_ClientStreamCallClient interface { + Send(*SimpleRequest) error + CloseAndRecv() (*SimpleResponse, error) + grpc.ClientStream +} + +type testServiceClientStreamCallClient struct { + grpc.ClientStream +} + +func (x *testServiceClientStreamCallClient) Send(m *SimpleRequest) error { + return x.ClientStream.SendMsg(m) +} + +func (x *testServiceClientStreamCallClient) CloseAndRecv() (*SimpleResponse, error) { + if err := x.ClientStream.CloseSend(); err != nil { + return nil, err + } + m := new(SimpleResponse) + if err := x.ClientStream.RecvMsg(m); err != nil { + return nil, err + } + return m, nil +} + +func (c *testServiceClient) ServerStreamCall(ctx context.Context, in *SimpleRequest, opts ...grpc.CallOption) (TestService_ServerStreamCallClient, error) { + stream, err := c.cc.NewStream(ctx, &_TestService_serviceDesc.Streams[2], "/grpc.testing.TestService/ServerStreamCall", opts...) + if err != nil { + return nil, err + } + x := &testServiceServerStreamCallClient{stream} + if err := x.ClientStream.SendMsg(in); err != nil { + return nil, err + } + if err := x.ClientStream.CloseSend(); err != nil { + return nil, err + } + return x, nil +} + +type TestService_ServerStreamCallClient interface { + Recv() (*SimpleResponse, error) + grpc.ClientStream +} + +type testServiceServerStreamCallClient struct { + grpc.ClientStream +} + +func (x *testServiceServerStreamCallClient) Recv() (*SimpleResponse, error) { + m := new(SimpleResponse) + if err := x.ClientStream.RecvMsg(m); err != nil { + return nil, err + } + return m, nil +} + +// TestServiceServer is the server API for TestService service. +type TestServiceServer interface { + // One request followed by one response. + // The server returns the client id as-is. + UnaryCall(context.Context, *SimpleRequest) (*SimpleResponse, error) + // A sequence of requests with each request served by the server immediately. + // As one request could lead to multiple responses, this interface + // demonstrates the idea of full duplexing. + FullDuplexCall(TestService_FullDuplexCallServer) error + // Client stream + ClientStreamCall(TestService_ClientStreamCallServer) error + // Server stream + ServerStreamCall(*SimpleRequest, TestService_ServerStreamCallServer) error +} + +func RegisterTestServiceServer(s *grpc.Server, srv TestServiceServer) { + s.RegisterService(&_TestService_serviceDesc, srv) +} + +func _TestService_UnaryCall_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(SimpleRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(TestServiceServer).UnaryCall(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/grpc.testing.TestService/UnaryCall", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(TestServiceServer).UnaryCall(ctx, req.(*SimpleRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _TestService_FullDuplexCall_Handler(srv interface{}, stream grpc.ServerStream) error { + return srv.(TestServiceServer).FullDuplexCall(&testServiceFullDuplexCallServer{stream}) +} + +type TestService_FullDuplexCallServer interface { + Send(*SimpleResponse) error + Recv() (*SimpleRequest, error) + grpc.ServerStream +} + +type testServiceFullDuplexCallServer struct { + grpc.ServerStream +} + +func (x *testServiceFullDuplexCallServer) Send(m *SimpleResponse) error { + return x.ServerStream.SendMsg(m) +} + +func (x *testServiceFullDuplexCallServer) Recv() (*SimpleRequest, error) { + m := new(SimpleRequest) + if err := x.ServerStream.RecvMsg(m); err != nil { + return nil, err + } + return m, nil +} + +func _TestService_ClientStreamCall_Handler(srv interface{}, stream grpc.ServerStream) error { + return srv.(TestServiceServer).ClientStreamCall(&testServiceClientStreamCallServer{stream}) +} + +type TestService_ClientStreamCallServer interface { + SendAndClose(*SimpleResponse) error + Recv() (*SimpleRequest, error) + grpc.ServerStream +} + +type testServiceClientStreamCallServer struct { + grpc.ServerStream +} + +func (x *testServiceClientStreamCallServer) SendAndClose(m *SimpleResponse) error { + return x.ServerStream.SendMsg(m) +} + +func (x *testServiceClientStreamCallServer) Recv() (*SimpleRequest, error) { + m := new(SimpleRequest) + if err := x.ServerStream.RecvMsg(m); err != nil { + return nil, err + } + return m, nil +} + +func _TestService_ServerStreamCall_Handler(srv interface{}, stream grpc.ServerStream) error { + m := new(SimpleRequest) + if err := stream.RecvMsg(m); err != nil { + return err + } + return srv.(TestServiceServer).ServerStreamCall(m, &testServiceServerStreamCallServer{stream}) +} + +type TestService_ServerStreamCallServer interface { + Send(*SimpleResponse) error + grpc.ServerStream +} + +type testServiceServerStreamCallServer struct { + grpc.ServerStream +} + +func (x *testServiceServerStreamCallServer) Send(m *SimpleResponse) error { + return x.ServerStream.SendMsg(m) +} + +var _TestService_serviceDesc = grpc.ServiceDesc{ + ServiceName: "grpc.testing.TestService", + HandlerType: (*TestServiceServer)(nil), + Methods: []grpc.MethodDesc{ + { + MethodName: "UnaryCall", + Handler: _TestService_UnaryCall_Handler, + }, + }, + Streams: []grpc.StreamDesc{ + { + StreamName: "FullDuplexCall", + Handler: _TestService_FullDuplexCall_Handler, + ServerStreams: true, + ClientStreams: true, + }, + { + StreamName: "ClientStreamCall", + Handler: _TestService_ClientStreamCall_Handler, + ClientStreams: true, + }, + { + StreamName: "ServerStreamCall", + Handler: _TestService_ServerStreamCall_Handler, + ServerStreams: true, + }, + }, + Metadata: "grpc_testing/test.proto", +} + +func init() { proto.RegisterFile("grpc_testing/test.proto", fileDescriptor_test_dd7ffeaa75513a0a) } + +var fileDescriptor_test_dd7ffeaa75513a0a = []byte{ + // 202 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xe2, 0x12, 0x4f, 0x2f, 0x2a, 0x48, + 0x8e, 0x2f, 0x49, 0x2d, 0x2e, 0xc9, 0xcc, 0x4b, 0xd7, 0x07, 0xd1, 0x7a, 0x05, 0x45, 0xf9, 0x25, + 0xf9, 0x42, 0x3c, 0x20, 0x09, 0x3d, 0xa8, 0x84, 0x92, 0x3c, 0x17, 0x6f, 0x70, 0x66, 0x6e, 0x41, + 0x4e, 0x6a, 0x50, 0x6a, 0x61, 0x69, 0x6a, 0x71, 0x89, 0x10, 0x1f, 0x17, 0x53, 0x66, 0x8a, 0x04, + 0x93, 0x02, 0xa3, 0x06, 0x6b, 0x10, 0x53, 0x66, 0x8a, 0x92, 0x02, 0x17, 0x1f, 0x4c, 0x41, 0x71, + 0x41, 0x7e, 0x5e, 0x71, 0x2a, 0x54, 0x05, 0x33, 0x4c, 0x85, 0xd1, 0x09, 0x26, 0x2e, 0xee, 0x90, + 0xd4, 0xe2, 0x92, 0xe0, 0xd4, 0xa2, 0xb2, 0xcc, 0xe4, 0x54, 0x21, 0x37, 0x2e, 0xce, 0xd0, 0xbc, + 0xc4, 0xa2, 0x4a, 0xe7, 0xc4, 0x9c, 0x1c, 0x21, 0x69, 0x3d, 0x64, 0xeb, 0xf4, 0x50, 0xec, 0x92, + 0x92, 0xc1, 0x2e, 0x09, 0xb5, 0xc7, 0x9f, 0x8b, 0xcf, 0xad, 0x34, 0x27, 0xc7, 0xa5, 0xb4, 0x20, + 0x27, 0xb5, 0x82, 0x42, 0xc3, 0x34, 0x18, 0x0d, 0x18, 0x85, 0xfc, 0xb9, 0x04, 0x9c, 0x73, 0x32, + 0x53, 0xf3, 0x4a, 0x82, 0x4b, 0x8a, 0x52, 0x13, 0x73, 0x29, 0x36, 0x12, 0x64, 0x20, 0xc8, 0xd3, + 0xa9, 0x45, 0x54, 0x31, 0xd0, 0x80, 0x31, 0x89, 0x0d, 0x1c, 0x45, 0xc6, 0x80, 0x00, 0x00, 0x00, + 0xff, 0xff, 0x4c, 0x43, 0x27, 0x67, 0xbd, 0x01, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/grpc/stats/handlers.go b/vendor/google.golang.org/grpc/stats/handlers.go new file mode 100644 index 0000000..dc03731 --- /dev/null +++ b/vendor/google.golang.org/grpc/stats/handlers.go @@ -0,0 +1,63 @@ +/* + * + * Copyright 2016 gRPC authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ + +package stats + +import ( + "context" + "net" +) + +// ConnTagInfo defines the relevant information needed by connection context tagger. +type ConnTagInfo struct { + // RemoteAddr is the remote address of the corresponding connection. + RemoteAddr net.Addr + // LocalAddr is the local address of the corresponding connection. + LocalAddr net.Addr +} + +// RPCTagInfo defines the relevant information needed by RPC context tagger. +type RPCTagInfo struct { + // FullMethodName is the RPC method in the format of /package.service/method. + FullMethodName string + // FailFast indicates if this RPC is failfast. + // This field is only valid on client side, it's always false on server side. + FailFast bool +} + +// Handler defines the interface for the related stats handling (e.g., RPCs, connections). +type Handler interface { + // TagRPC can attach some information to the given context. + // The context used for the rest lifetime of the RPC will be derived from + // the returned context. + TagRPC(context.Context, *RPCTagInfo) context.Context + // HandleRPC processes the RPC stats. + HandleRPC(context.Context, RPCStats) + + // TagConn can attach some information to the given context. + // The returned context will be used for stats handling. + // For conn stats handling, the context used in HandleConn for this + // connection will be derived from the context returned. + // For RPC stats handling, + // - On server side, the context used in HandleRPC for all RPCs on this + // connection will be derived from the context returned. + // - On client side, the context is not derived from the context returned. + TagConn(context.Context, *ConnTagInfo) context.Context + // HandleConn processes the Conn stats. + HandleConn(context.Context, ConnStats) +} diff --git a/vendor/google.golang.org/grpc/stats/stats.go b/vendor/google.golang.org/grpc/stats/stats.go new file mode 100644 index 0000000..f3f593c --- /dev/null +++ b/vendor/google.golang.org/grpc/stats/stats.go @@ -0,0 +1,300 @@ +/* + * + * Copyright 2016 gRPC authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ + +//go:generate protoc --go_out=plugins=grpc:. grpc_testing/test.proto + +// Package stats is for collecting and reporting various network and RPC stats. +// This package is for monitoring purpose only. All fields are read-only. +// All APIs are experimental. +package stats // import "google.golang.org/grpc/stats" + +import ( + "context" + "net" + "time" + + "google.golang.org/grpc/metadata" +) + +// RPCStats contains stats information about RPCs. +type RPCStats interface { + isRPCStats() + // IsClient returns true if this RPCStats is from client side. + IsClient() bool +} + +// Begin contains stats when an RPC begins. +// FailFast is only valid if this Begin is from client side. +type Begin struct { + // Client is true if this Begin is from client side. + Client bool + // BeginTime is the time when the RPC begins. + BeginTime time.Time + // FailFast indicates if this RPC is failfast. + FailFast bool +} + +// IsClient indicates if the stats information is from client side. +func (s *Begin) IsClient() bool { return s.Client } + +func (s *Begin) isRPCStats() {} + +// InPayload contains the information for an incoming payload. +type InPayload struct { + // Client is true if this InPayload is from client side. + Client bool + // Payload is the payload with original type. + Payload interface{} + // Data is the serialized message payload. + Data []byte + // Length is the length of uncompressed data. + Length int + // WireLength is the length of data on wire (compressed, signed, encrypted). + WireLength int + // RecvTime is the time when the payload is received. + RecvTime time.Time +} + +// IsClient indicates if the stats information is from client side. +func (s *InPayload) IsClient() bool { return s.Client } + +func (s *InPayload) isRPCStats() {} + +// InHeader contains stats when a header is received. +type InHeader struct { + // Client is true if this InHeader is from client side. + Client bool + // WireLength is the wire length of header. + WireLength int + + // The following fields are valid only if Client is false. + // FullMethod is the full RPC method string, i.e., /package.service/method. + FullMethod string + // RemoteAddr is the remote address of the corresponding connection. + RemoteAddr net.Addr + // LocalAddr is the local address of the corresponding connection. + LocalAddr net.Addr + // Compression is the compression algorithm used for the RPC. + Compression string +} + +// IsClient indicates if the stats information is from client side. +func (s *InHeader) IsClient() bool { return s.Client } + +func (s *InHeader) isRPCStats() {} + +// InTrailer contains stats when a trailer is received. +type InTrailer struct { + // Client is true if this InTrailer is from client side. + Client bool + // WireLength is the wire length of trailer. + WireLength int +} + +// IsClient indicates if the stats information is from client side. +func (s *InTrailer) IsClient() bool { return s.Client } + +func (s *InTrailer) isRPCStats() {} + +// OutPayload contains the information for an outgoing payload. +type OutPayload struct { + // Client is true if this OutPayload is from client side. + Client bool + // Payload is the payload with original type. + Payload interface{} + // Data is the serialized message payload. + Data []byte + // Length is the length of uncompressed data. + Length int + // WireLength is the length of data on wire (compressed, signed, encrypted). + WireLength int + // SentTime is the time when the payload is sent. + SentTime time.Time +} + +// IsClient indicates if this stats information is from client side. +func (s *OutPayload) IsClient() bool { return s.Client } + +func (s *OutPayload) isRPCStats() {} + +// OutHeader contains stats when a header is sent. +type OutHeader struct { + // Client is true if this OutHeader is from client side. + Client bool + + // The following fields are valid only if Client is true. + // FullMethod is the full RPC method string, i.e., /package.service/method. + FullMethod string + // RemoteAddr is the remote address of the corresponding connection. + RemoteAddr net.Addr + // LocalAddr is the local address of the corresponding connection. + LocalAddr net.Addr + // Compression is the compression algorithm used for the RPC. + Compression string +} + +// IsClient indicates if this stats information is from client side. +func (s *OutHeader) IsClient() bool { return s.Client } + +func (s *OutHeader) isRPCStats() {} + +// OutTrailer contains stats when a trailer is sent. +type OutTrailer struct { + // Client is true if this OutTrailer is from client side. + Client bool + // WireLength is the wire length of trailer. + WireLength int +} + +// IsClient indicates if this stats information is from client side. +func (s *OutTrailer) IsClient() bool { return s.Client } + +func (s *OutTrailer) isRPCStats() {} + +// End contains stats when an RPC ends. +type End struct { + // Client is true if this End is from client side. + Client bool + // BeginTime is the time when the RPC began. + BeginTime time.Time + // EndTime is the time when the RPC ends. + EndTime time.Time + // Trailer contains the trailer metadata received from the server. This + // field is only valid if this End is from the client side. + Trailer metadata.MD + // Error is the error the RPC ended with. It is an error generated from + // status.Status and can be converted back to status.Status using + // status.FromError if non-nil. + Error error +} + +// IsClient indicates if this is from client side. +func (s *End) IsClient() bool { return s.Client } + +func (s *End) isRPCStats() {} + +// ConnStats contains stats information about connections. +type ConnStats interface { + isConnStats() + // IsClient returns true if this ConnStats is from client side. + IsClient() bool +} + +// ConnBegin contains the stats of a connection when it is established. +type ConnBegin struct { + // Client is true if this ConnBegin is from client side. + Client bool +} + +// IsClient indicates if this is from client side. +func (s *ConnBegin) IsClient() bool { return s.Client } + +func (s *ConnBegin) isConnStats() {} + +// ConnEnd contains the stats of a connection when it ends. +type ConnEnd struct { + // Client is true if this ConnEnd is from client side. + Client bool +} + +// IsClient indicates if this is from client side. +func (s *ConnEnd) IsClient() bool { return s.Client } + +func (s *ConnEnd) isConnStats() {} + +type incomingTagsKey struct{} +type outgoingTagsKey struct{} + +// SetTags attaches stats tagging data to the context, which will be sent in +// the outgoing RPC with the header grpc-tags-bin. Subsequent calls to +// SetTags will overwrite the values from earlier calls. +// +// NOTE: this is provided only for backward compatibility with existing clients +// and will likely be removed in an upcoming release. New uses should transmit +// this type of data using metadata with a different, non-reserved (i.e. does +// not begin with "grpc-") header name. +func SetTags(ctx context.Context, b []byte) context.Context { + return context.WithValue(ctx, outgoingTagsKey{}, b) +} + +// Tags returns the tags from the context for the inbound RPC. +// +// NOTE: this is provided only for backward compatibility with existing clients +// and will likely be removed in an upcoming release. New uses should transmit +// this type of data using metadata with a different, non-reserved (i.e. does +// not begin with "grpc-") header name. +func Tags(ctx context.Context) []byte { + b, _ := ctx.Value(incomingTagsKey{}).([]byte) + return b +} + +// SetIncomingTags attaches stats tagging data to the context, to be read by +// the application (not sent in outgoing RPCs). +// +// This is intended for gRPC-internal use ONLY. +func SetIncomingTags(ctx context.Context, b []byte) context.Context { + return context.WithValue(ctx, incomingTagsKey{}, b) +} + +// OutgoingTags returns the tags from the context for the outbound RPC. +// +// This is intended for gRPC-internal use ONLY. +func OutgoingTags(ctx context.Context) []byte { + b, _ := ctx.Value(outgoingTagsKey{}).([]byte) + return b +} + +type incomingTraceKey struct{} +type outgoingTraceKey struct{} + +// SetTrace attaches stats tagging data to the context, which will be sent in +// the outgoing RPC with the header grpc-trace-bin. Subsequent calls to +// SetTrace will overwrite the values from earlier calls. +// +// NOTE: this is provided only for backward compatibility with existing clients +// and will likely be removed in an upcoming release. New uses should transmit +// this type of data using metadata with a different, non-reserved (i.e. does +// not begin with "grpc-") header name. +func SetTrace(ctx context.Context, b []byte) context.Context { + return context.WithValue(ctx, outgoingTraceKey{}, b) +} + +// Trace returns the trace from the context for the inbound RPC. +// +// NOTE: this is provided only for backward compatibility with existing clients +// and will likely be removed in an upcoming release. New uses should transmit +// this type of data using metadata with a different, non-reserved (i.e. does +// not begin with "grpc-") header name. +func Trace(ctx context.Context) []byte { + b, _ := ctx.Value(incomingTraceKey{}).([]byte) + return b +} + +// SetIncomingTrace attaches stats tagging data to the context, to be read by +// the application (not sent in outgoing RPCs). It is intended for +// gRPC-internal use. +func SetIncomingTrace(ctx context.Context, b []byte) context.Context { + return context.WithValue(ctx, incomingTraceKey{}, b) +} + +// OutgoingTrace returns the trace from the context for the outbound RPC. It is +// intended for gRPC-internal use. +func OutgoingTrace(ctx context.Context) []byte { + b, _ := ctx.Value(outgoingTraceKey{}).([]byte) + return b +} diff --git a/vendor/google.golang.org/grpc/status/status.go b/vendor/google.golang.org/grpc/status/status.go new file mode 100644 index 0000000..ed36681 --- /dev/null +++ b/vendor/google.golang.org/grpc/status/status.go @@ -0,0 +1,210 @@ +/* + * + * Copyright 2017 gRPC authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ + +// Package status implements errors returned by gRPC. These errors are +// serialized and transmitted on the wire between server and client, and allow +// for additional data to be transmitted via the Details field in the status +// proto. gRPC service handlers should return an error created by this +// package, and gRPC clients should expect a corresponding error to be +// returned from the RPC call. +// +// This package upholds the invariants that a non-nil error may not +// contain an OK code, and an OK code must result in a nil error. +package status + +import ( + "context" + "errors" + "fmt" + + "github.com/golang/protobuf/proto" + "github.com/golang/protobuf/ptypes" + spb "google.golang.org/genproto/googleapis/rpc/status" + "google.golang.org/grpc/codes" +) + +// statusError is an alias of a status proto. It implements error and Status, +// and a nil statusError should never be returned by this package. +type statusError spb.Status + +func (se *statusError) Error() string { + p := (*spb.Status)(se) + return fmt.Sprintf("rpc error: code = %s desc = %s", codes.Code(p.GetCode()), p.GetMessage()) +} + +func (se *statusError) GRPCStatus() *Status { + return &Status{s: (*spb.Status)(se)} +} + +// Status represents an RPC status code, message, and details. It is immutable +// and should be created with New, Newf, or FromProto. +type Status struct { + s *spb.Status +} + +// Code returns the status code contained in s. +func (s *Status) Code() codes.Code { + if s == nil || s.s == nil { + return codes.OK + } + return codes.Code(s.s.Code) +} + +// Message returns the message contained in s. +func (s *Status) Message() string { + if s == nil || s.s == nil { + return "" + } + return s.s.Message +} + +// Proto returns s's status as an spb.Status proto message. +func (s *Status) Proto() *spb.Status { + if s == nil { + return nil + } + return proto.Clone(s.s).(*spb.Status) +} + +// Err returns an immutable error representing s; returns nil if s.Code() is +// OK. +func (s *Status) Err() error { + if s.Code() == codes.OK { + return nil + } + return (*statusError)(s.s) +} + +// New returns a Status representing c and msg. +func New(c codes.Code, msg string) *Status { + return &Status{s: &spb.Status{Code: int32(c), Message: msg}} +} + +// Newf returns New(c, fmt.Sprintf(format, a...)). +func Newf(c codes.Code, format string, a ...interface{}) *Status { + return New(c, fmt.Sprintf(format, a...)) +} + +// Error returns an error representing c and msg. If c is OK, returns nil. +func Error(c codes.Code, msg string) error { + return New(c, msg).Err() +} + +// Errorf returns Error(c, fmt.Sprintf(format, a...)). +func Errorf(c codes.Code, format string, a ...interface{}) error { + return Error(c, fmt.Sprintf(format, a...)) +} + +// ErrorProto returns an error representing s. If s.Code is OK, returns nil. +func ErrorProto(s *spb.Status) error { + return FromProto(s).Err() +} + +// FromProto returns a Status representing s. +func FromProto(s *spb.Status) *Status { + return &Status{s: proto.Clone(s).(*spb.Status)} +} + +// FromError returns a Status representing err if it was produced from this +// package or has a method `GRPCStatus() *Status`. Otherwise, ok is false and a +// Status is returned with codes.Unknown and the original error message. +func FromError(err error) (s *Status, ok bool) { + if err == nil { + return &Status{s: &spb.Status{Code: int32(codes.OK)}}, true + } + if se, ok := err.(interface { + GRPCStatus() *Status + }); ok { + return se.GRPCStatus(), true + } + return New(codes.Unknown, err.Error()), false +} + +// Convert is a convenience function which removes the need to handle the +// boolean return value from FromError. +func Convert(err error) *Status { + s, _ := FromError(err) + return s +} + +// WithDetails returns a new status with the provided details messages appended to the status. +// If any errors are encountered, it returns nil and the first error encountered. +func (s *Status) WithDetails(details ...proto.Message) (*Status, error) { + if s.Code() == codes.OK { + return nil, errors.New("no error details for status with code OK") + } + // s.Code() != OK implies that s.Proto() != nil. + p := s.Proto() + for _, detail := range details { + any, err := ptypes.MarshalAny(detail) + if err != nil { + return nil, err + } + p.Details = append(p.Details, any) + } + return &Status{s: p}, nil +} + +// Details returns a slice of details messages attached to the status. +// If a detail cannot be decoded, the error is returned in place of the detail. +func (s *Status) Details() []interface{} { + if s == nil || s.s == nil { + return nil + } + details := make([]interface{}, 0, len(s.s.Details)) + for _, any := range s.s.Details { + detail := &ptypes.DynamicAny{} + if err := ptypes.UnmarshalAny(any, detail); err != nil { + details = append(details, err) + continue + } + details = append(details, detail.Message) + } + return details +} + +// Code returns the Code of the error if it is a Status error, codes.OK if err +// is nil, or codes.Unknown otherwise. +func Code(err error) codes.Code { + // Don't use FromError to avoid allocation of OK status. + if err == nil { + return codes.OK + } + if se, ok := err.(interface { + GRPCStatus() *Status + }); ok { + return se.GRPCStatus().Code() + } + return codes.Unknown +} + +// FromContextError converts a context error into a Status. It returns a +// Status with codes.OK if err is nil, or a Status with codes.Unknown if err is +// non-nil and not a context error. +func FromContextError(err error) *Status { + switch err { + case nil: + return New(codes.OK, "") + case context.DeadlineExceeded: + return New(codes.DeadlineExceeded, err.Error()) + case context.Canceled: + return New(codes.Canceled, err.Error()) + default: + return New(codes.Unknown, err.Error()) + } +} diff --git a/vendor/google.golang.org/grpc/stream.go b/vendor/google.golang.org/grpc/stream.go new file mode 100644 index 0000000..6e2bf51 --- /dev/null +++ b/vendor/google.golang.org/grpc/stream.go @@ -0,0 +1,1498 @@ +/* + * + * Copyright 2014 gRPC authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ + +package grpc + +import ( + "context" + "errors" + "io" + "math" + "strconv" + "sync" + "time" + + "golang.org/x/net/trace" + "google.golang.org/grpc/balancer" + "google.golang.org/grpc/codes" + "google.golang.org/grpc/connectivity" + "google.golang.org/grpc/encoding" + "google.golang.org/grpc/grpclog" + "google.golang.org/grpc/internal/balancerload" + "google.golang.org/grpc/internal/binarylog" + "google.golang.org/grpc/internal/channelz" + "google.golang.org/grpc/internal/grpcrand" + "google.golang.org/grpc/internal/transport" + "google.golang.org/grpc/metadata" + "google.golang.org/grpc/peer" + "google.golang.org/grpc/stats" + "google.golang.org/grpc/status" +) + +// StreamHandler defines the handler called by gRPC server to complete the +// execution of a streaming RPC. If a StreamHandler returns an error, it +// should be produced by the status package, or else gRPC will use +// codes.Unknown as the status code and err.Error() as the status message +// of the RPC. +type StreamHandler func(srv interface{}, stream ServerStream) error + +// StreamDesc represents a streaming RPC service's method specification. +type StreamDesc struct { + StreamName string + Handler StreamHandler + + // At least one of these is true. + ServerStreams bool + ClientStreams bool +} + +// Stream defines the common interface a client or server stream has to satisfy. +// +// Deprecated: See ClientStream and ServerStream documentation instead. +type Stream interface { + // Deprecated: See ClientStream and ServerStream documentation instead. + Context() context.Context + // Deprecated: See ClientStream and ServerStream documentation instead. + SendMsg(m interface{}) error + // Deprecated: See ClientStream and ServerStream documentation instead. + RecvMsg(m interface{}) error +} + +// ClientStream defines the client-side behavior of a streaming RPC. +// +// All errors returned from ClientStream methods are compatible with the +// status package. +type ClientStream interface { + // Header returns the header metadata received from the server if there + // is any. It blocks if the metadata is not ready to read. + Header() (metadata.MD, error) + // Trailer returns the trailer metadata from the server, if there is any. + // It must only be called after stream.CloseAndRecv has returned, or + // stream.Recv has returned a non-nil error (including io.EOF). + Trailer() metadata.MD + // CloseSend closes the send direction of the stream. It closes the stream + // when non-nil error is met. It is also not safe to call CloseSend + // concurrently with SendMsg. + CloseSend() error + // Context returns the context for this stream. + // + // It should not be called until after Header or RecvMsg has returned. Once + // called, subsequent client-side retries are disabled. + Context() context.Context + // SendMsg is generally called by generated code. On error, SendMsg aborts + // the stream. If the error was generated by the client, the status is + // returned directly; otherwise, io.EOF is returned and the status of + // the stream may be discovered using RecvMsg. + // + // SendMsg blocks until: + // - There is sufficient flow control to schedule m with the transport, or + // - The stream is done, or + // - The stream breaks. + // + // SendMsg does not wait until the message is received by the server. An + // untimely stream closure may result in lost messages. To ensure delivery, + // users should ensure the RPC completed successfully using RecvMsg. + // + // It is safe to have a goroutine calling SendMsg and another goroutine + // calling RecvMsg on the same stream at the same time, but it is not safe + // to call SendMsg on the same stream in different goroutines. It is also + // not safe to call CloseSend concurrently with SendMsg. + SendMsg(m interface{}) error + // RecvMsg blocks until it receives a message into m or the stream is + // done. It returns io.EOF when the stream completes successfully. On + // any other error, the stream is aborted and the error contains the RPC + // status. + // + // It is safe to have a goroutine calling SendMsg and another goroutine + // calling RecvMsg on the same stream at the same time, but it is not + // safe to call RecvMsg on the same stream in different goroutines. + RecvMsg(m interface{}) error +} + +// NewStream creates a new Stream for the client side. This is typically +// called by generated code. ctx is used for the lifetime of the stream. +// +// To ensure resources are not leaked due to the stream returned, one of the following +// actions must be performed: +// +// 1. Call Close on the ClientConn. +// 2. Cancel the context provided. +// 3. Call RecvMsg until a non-nil error is returned. A protobuf-generated +// client-streaming RPC, for instance, might use the helper function +// CloseAndRecv (note that CloseSend does not Recv, therefore is not +// guaranteed to release all resources). +// 4. Receive a non-nil, non-io.EOF error from Header or SendMsg. +// +// If none of the above happen, a goroutine and a context will be leaked, and grpc +// will not call the optionally-configured stats handler with a stats.End message. +func (cc *ClientConn) NewStream(ctx context.Context, desc *StreamDesc, method string, opts ...CallOption) (ClientStream, error) { + // allow interceptor to see all applicable call options, which means those + // configured as defaults from dial option as well as per-call options + opts = combine(cc.dopts.callOptions, opts) + + if cc.dopts.streamInt != nil { + return cc.dopts.streamInt(ctx, desc, cc, method, newClientStream, opts...) + } + return newClientStream(ctx, desc, cc, method, opts...) +} + +// NewClientStream is a wrapper for ClientConn.NewStream. +func NewClientStream(ctx context.Context, desc *StreamDesc, cc *ClientConn, method string, opts ...CallOption) (ClientStream, error) { + return cc.NewStream(ctx, desc, method, opts...) +} + +func newClientStream(ctx context.Context, desc *StreamDesc, cc *ClientConn, method string, opts ...CallOption) (_ ClientStream, err error) { + if channelz.IsOn() { + cc.incrCallsStarted() + defer func() { + if err != nil { + cc.incrCallsFailed() + } + }() + } + c := defaultCallInfo() + // Provide an opportunity for the first RPC to see the first service config + // provided by the resolver. + if err := cc.waitForResolvedAddrs(ctx); err != nil { + return nil, err + } + mc := cc.GetMethodConfig(method) + if mc.WaitForReady != nil { + c.failFast = !*mc.WaitForReady + } + + // Possible context leak: + // The cancel function for the child context we create will only be called + // when RecvMsg returns a non-nil error, if the ClientConn is closed, or if + // an error is generated by SendMsg. + // https://github.com/grpc/grpc-go/issues/1818. + var cancel context.CancelFunc + if mc.Timeout != nil && *mc.Timeout >= 0 { + ctx, cancel = context.WithTimeout(ctx, *mc.Timeout) + } else { + ctx, cancel = context.WithCancel(ctx) + } + defer func() { + if err != nil { + cancel() + } + }() + + for _, o := range opts { + if err := o.before(c); err != nil { + return nil, toRPCErr(err) + } + } + c.maxSendMessageSize = getMaxSize(mc.MaxReqSize, c.maxSendMessageSize, defaultClientMaxSendMessageSize) + c.maxReceiveMessageSize = getMaxSize(mc.MaxRespSize, c.maxReceiveMessageSize, defaultClientMaxReceiveMessageSize) + if err := setCallInfoCodec(c); err != nil { + return nil, err + } + + callHdr := &transport.CallHdr{ + Host: cc.authority, + Method: method, + ContentSubtype: c.contentSubtype, + } + + // Set our outgoing compression according to the UseCompressor CallOption, if + // set. In that case, also find the compressor from the encoding package. + // Otherwise, use the compressor configured by the WithCompressor DialOption, + // if set. + var cp Compressor + var comp encoding.Compressor + if ct := c.compressorType; ct != "" { + callHdr.SendCompress = ct + if ct != encoding.Identity { + comp = encoding.GetCompressor(ct) + if comp == nil { + return nil, status.Errorf(codes.Internal, "grpc: Compressor is not installed for requested grpc-encoding %q", ct) + } + } + } else if cc.dopts.cp != nil { + callHdr.SendCompress = cc.dopts.cp.Type() + cp = cc.dopts.cp + } + if c.creds != nil { + callHdr.Creds = c.creds + } + var trInfo *traceInfo + if EnableTracing { + trInfo = &traceInfo{ + tr: trace.New("grpc.Sent."+methodFamily(method), method), + firstLine: firstLine{ + client: true, + }, + } + if deadline, ok := ctx.Deadline(); ok { + trInfo.firstLine.deadline = time.Until(deadline) + } + trInfo.tr.LazyLog(&trInfo.firstLine, false) + ctx = trace.NewContext(ctx, trInfo.tr) + } + ctx = newContextWithRPCInfo(ctx, c.failFast) + sh := cc.dopts.copts.StatsHandler + var beginTime time.Time + if sh != nil { + ctx = sh.TagRPC(ctx, &stats.RPCTagInfo{FullMethodName: method, FailFast: c.failFast}) + beginTime = time.Now() + begin := &stats.Begin{ + Client: true, + BeginTime: beginTime, + FailFast: c.failFast, + } + sh.HandleRPC(ctx, begin) + } + + cs := &clientStream{ + callHdr: callHdr, + ctx: ctx, + methodConfig: &mc, + opts: opts, + callInfo: c, + cc: cc, + desc: desc, + codec: c.codec, + cp: cp, + comp: comp, + cancel: cancel, + beginTime: beginTime, + firstAttempt: true, + } + if !cc.dopts.disableRetry { + cs.retryThrottler = cc.retryThrottler.Load().(*retryThrottler) + } + cs.binlog = binarylog.GetMethodLogger(method) + + cs.callInfo.stream = cs + // Only this initial attempt has stats/tracing. + // TODO(dfawley): move to newAttempt when per-attempt stats are implemented. + if err := cs.newAttemptLocked(sh, trInfo); err != nil { + cs.finish(err) + return nil, err + } + + op := func(a *csAttempt) error { return a.newStream() } + if err := cs.withRetry(op, func() { cs.bufferForRetryLocked(0, op) }); err != nil { + cs.finish(err) + return nil, err + } + + if cs.binlog != nil { + md, _ := metadata.FromOutgoingContext(ctx) + logEntry := &binarylog.ClientHeader{ + OnClientSide: true, + Header: md, + MethodName: method, + Authority: cs.cc.authority, + } + if deadline, ok := ctx.Deadline(); ok { + logEntry.Timeout = time.Until(deadline) + if logEntry.Timeout < 0 { + logEntry.Timeout = 0 + } + } + cs.binlog.Log(logEntry) + } + + if desc != unaryStreamDesc { + // Listen on cc and stream contexts to cleanup when the user closes the + // ClientConn or cancels the stream context. In all other cases, an error + // should already be injected into the recv buffer by the transport, which + // the client will eventually receive, and then we will cancel the stream's + // context in clientStream.finish. + go func() { + select { + case <-cc.ctx.Done(): + cs.finish(ErrClientConnClosing) + case <-ctx.Done(): + cs.finish(toRPCErr(ctx.Err())) + } + }() + } + return cs, nil +} + +func (cs *clientStream) newAttemptLocked(sh stats.Handler, trInfo *traceInfo) error { + cs.attempt = &csAttempt{ + cs: cs, + dc: cs.cc.dopts.dc, + statsHandler: sh, + trInfo: trInfo, + } + + if err := cs.ctx.Err(); err != nil { + return toRPCErr(err) + } + t, done, err := cs.cc.getTransport(cs.ctx, cs.callInfo.failFast, cs.callHdr.Method) + if err != nil { + return err + } + if trInfo != nil { + trInfo.firstLine.SetRemoteAddr(t.RemoteAddr()) + } + cs.attempt.t = t + cs.attempt.done = done + return nil +} + +func (a *csAttempt) newStream() error { + cs := a.cs + cs.callHdr.PreviousAttempts = cs.numRetries + s, err := a.t.NewStream(cs.ctx, cs.callHdr) + if err != nil { + return toRPCErr(err) + } + cs.attempt.s = s + cs.attempt.p = &parser{r: s} + return nil +} + +// clientStream implements a client side Stream. +type clientStream struct { + callHdr *transport.CallHdr + opts []CallOption + callInfo *callInfo + cc *ClientConn + desc *StreamDesc + + codec baseCodec + cp Compressor + comp encoding.Compressor + + cancel context.CancelFunc // cancels all attempts + + sentLast bool // sent an end stream + beginTime time.Time + + methodConfig *MethodConfig + + ctx context.Context // the application's context, wrapped by stats/tracing + + retryThrottler *retryThrottler // The throttler active when the RPC began. + + binlog *binarylog.MethodLogger // Binary logger, can be nil. + // serverHeaderBinlogged is a boolean for whether server header has been + // logged. Server header will be logged when the first time one of those + // happens: stream.Header(), stream.Recv(). + // + // It's only read and used by Recv() and Header(), so it doesn't need to be + // synchronized. + serverHeaderBinlogged bool + + mu sync.Mutex + firstAttempt bool // if true, transparent retry is valid + numRetries int // exclusive of transparent retry attempt(s) + numRetriesSincePushback int // retries since pushback; to reset backoff + finished bool // TODO: replace with atomic cmpxchg or sync.Once? + attempt *csAttempt // the active client stream attempt + // TODO(hedging): hedging will have multiple attempts simultaneously. + committed bool // active attempt committed for retry? + buffer []func(a *csAttempt) error // operations to replay on retry + bufferSize int // current size of buffer +} + +// csAttempt implements a single transport stream attempt within a +// clientStream. +type csAttempt struct { + cs *clientStream + t transport.ClientTransport + s *transport.Stream + p *parser + done func(balancer.DoneInfo) + + finished bool + dc Decompressor + decomp encoding.Compressor + decompSet bool + + mu sync.Mutex // guards trInfo.tr + // trInfo may be nil (if EnableTracing is false). + // trInfo.tr is set when created (if EnableTracing is true), + // and cleared when the finish method is called. + trInfo *traceInfo + + statsHandler stats.Handler +} + +func (cs *clientStream) commitAttemptLocked() { + cs.committed = true + cs.buffer = nil +} + +func (cs *clientStream) commitAttempt() { + cs.mu.Lock() + cs.commitAttemptLocked() + cs.mu.Unlock() +} + +// shouldRetry returns nil if the RPC should be retried; otherwise it returns +// the error that should be returned by the operation. +func (cs *clientStream) shouldRetry(err error) error { + if cs.attempt.s == nil && !cs.callInfo.failFast { + // In the event of any error from NewStream (attempt.s == nil), we + // never attempted to write anything to the wire, so we can retry + // indefinitely for non-fail-fast RPCs. + return nil + } + if cs.finished || cs.committed { + // RPC is finished or committed; cannot retry. + return err + } + // Wait for the trailers. + if cs.attempt.s != nil { + <-cs.attempt.s.Done() + } + if cs.firstAttempt && !cs.callInfo.failFast && (cs.attempt.s == nil || cs.attempt.s.Unprocessed()) { + // First attempt, wait-for-ready, stream unprocessed: transparently retry. + cs.firstAttempt = false + return nil + } + cs.firstAttempt = false + if cs.cc.dopts.disableRetry { + return err + } + + pushback := 0 + hasPushback := false + if cs.attempt.s != nil { + if to, toErr := cs.attempt.s.TrailersOnly(); toErr != nil || !to { + return err + } + + // TODO(retry): Move down if the spec changes to not check server pushback + // before considering this a failure for throttling. + sps := cs.attempt.s.Trailer()["grpc-retry-pushback-ms"] + if len(sps) == 1 { + var e error + if pushback, e = strconv.Atoi(sps[0]); e != nil || pushback < 0 { + grpclog.Infof("Server retry pushback specified to abort (%q).", sps[0]) + cs.retryThrottler.throttle() // This counts as a failure for throttling. + return err + } + hasPushback = true + } else if len(sps) > 1 { + grpclog.Warningf("Server retry pushback specified multiple values (%q); not retrying.", sps) + cs.retryThrottler.throttle() // This counts as a failure for throttling. + return err + } + } + + var code codes.Code + if cs.attempt.s != nil { + code = cs.attempt.s.Status().Code() + } else { + code = status.Convert(err).Code() + } + + rp := cs.methodConfig.retryPolicy + if rp == nil || !rp.retryableStatusCodes[code] { + return err + } + + // Note: the ordering here is important; we count this as a failure + // only if the code matched a retryable code. + if cs.retryThrottler.throttle() { + return err + } + if cs.numRetries+1 >= rp.maxAttempts { + return err + } + + var dur time.Duration + if hasPushback { + dur = time.Millisecond * time.Duration(pushback) + cs.numRetriesSincePushback = 0 + } else { + fact := math.Pow(rp.backoffMultiplier, float64(cs.numRetriesSincePushback)) + cur := float64(rp.initialBackoff) * fact + if max := float64(rp.maxBackoff); cur > max { + cur = max + } + dur = time.Duration(grpcrand.Int63n(int64(cur))) + cs.numRetriesSincePushback++ + } + + // TODO(dfawley): we could eagerly fail here if dur puts us past the + // deadline, but unsure if it is worth doing. + t := time.NewTimer(dur) + select { + case <-t.C: + cs.numRetries++ + return nil + case <-cs.ctx.Done(): + t.Stop() + return status.FromContextError(cs.ctx.Err()).Err() + } +} + +// Returns nil if a retry was performed and succeeded; error otherwise. +func (cs *clientStream) retryLocked(lastErr error) error { + for { + cs.attempt.finish(lastErr) + if err := cs.shouldRetry(lastErr); err != nil { + cs.commitAttemptLocked() + return err + } + if err := cs.newAttemptLocked(nil, nil); err != nil { + return err + } + if lastErr = cs.replayBufferLocked(); lastErr == nil { + return nil + } + } +} + +func (cs *clientStream) Context() context.Context { + cs.commitAttempt() + // No need to lock before using attempt, since we know it is committed and + // cannot change. + return cs.attempt.s.Context() +} + +func (cs *clientStream) withRetry(op func(a *csAttempt) error, onSuccess func()) error { + cs.mu.Lock() + for { + if cs.committed { + cs.mu.Unlock() + return op(cs.attempt) + } + a := cs.attempt + cs.mu.Unlock() + err := op(a) + cs.mu.Lock() + if a != cs.attempt { + // We started another attempt already. + continue + } + if err == io.EOF { + <-a.s.Done() + } + if err == nil || (err == io.EOF && a.s.Status().Code() == codes.OK) { + onSuccess() + cs.mu.Unlock() + return err + } + if err := cs.retryLocked(err); err != nil { + cs.mu.Unlock() + return err + } + } +} + +func (cs *clientStream) Header() (metadata.MD, error) { + var m metadata.MD + err := cs.withRetry(func(a *csAttempt) error { + var err error + m, err = a.s.Header() + return toRPCErr(err) + }, cs.commitAttemptLocked) + if err != nil { + cs.finish(err) + return nil, err + } + if cs.binlog != nil && !cs.serverHeaderBinlogged { + // Only log if binary log is on and header has not been logged. + logEntry := &binarylog.ServerHeader{ + OnClientSide: true, + Header: m, + PeerAddr: nil, + } + if peer, ok := peer.FromContext(cs.Context()); ok { + logEntry.PeerAddr = peer.Addr + } + cs.binlog.Log(logEntry) + cs.serverHeaderBinlogged = true + } + return m, err +} + +func (cs *clientStream) Trailer() metadata.MD { + // On RPC failure, we never need to retry, because usage requires that + // RecvMsg() returned a non-nil error before calling this function is valid. + // We would have retried earlier if necessary. + // + // Commit the attempt anyway, just in case users are not following those + // directions -- it will prevent races and should not meaningfully impact + // performance. + cs.commitAttempt() + if cs.attempt.s == nil { + return nil + } + return cs.attempt.s.Trailer() +} + +func (cs *clientStream) replayBufferLocked() error { + a := cs.attempt + for _, f := range cs.buffer { + if err := f(a); err != nil { + return err + } + } + return nil +} + +func (cs *clientStream) bufferForRetryLocked(sz int, op func(a *csAttempt) error) { + // Note: we still will buffer if retry is disabled (for transparent retries). + if cs.committed { + return + } + cs.bufferSize += sz + if cs.bufferSize > cs.callInfo.maxRetryRPCBufferSize { + cs.commitAttemptLocked() + return + } + cs.buffer = append(cs.buffer, op) +} + +func (cs *clientStream) SendMsg(m interface{}) (err error) { + defer func() { + if err != nil && err != io.EOF { + // Call finish on the client stream for errors generated by this SendMsg + // call, as these indicate problems created by this client. (Transport + // errors are converted to an io.EOF error in csAttempt.sendMsg; the real + // error will be returned from RecvMsg eventually in that case, or be + // retried.) + cs.finish(err) + } + }() + if cs.sentLast { + return status.Errorf(codes.Internal, "SendMsg called after CloseSend") + } + if !cs.desc.ClientStreams { + cs.sentLast = true + } + data, err := encode(cs.codec, m) + if err != nil { + return err + } + compData, err := compress(data, cs.cp, cs.comp) + if err != nil { + return err + } + hdr, payload := msgHeader(data, compData) + // TODO(dfawley): should we be checking len(data) instead? + if len(payload) > *cs.callInfo.maxSendMessageSize { + return status.Errorf(codes.ResourceExhausted, "trying to send message larger than max (%d vs. %d)", len(payload), *cs.callInfo.maxSendMessageSize) + } + msgBytes := data // Store the pointer before setting to nil. For binary logging. + op := func(a *csAttempt) error { + err := a.sendMsg(m, hdr, payload, data) + // nil out the message and uncomp when replaying; they are only needed for + // stats which is disabled for subsequent attempts. + m, data = nil, nil + return err + } + err = cs.withRetry(op, func() { cs.bufferForRetryLocked(len(hdr)+len(payload), op) }) + if cs.binlog != nil && err == nil { + cs.binlog.Log(&binarylog.ClientMessage{ + OnClientSide: true, + Message: msgBytes, + }) + } + return +} + +func (cs *clientStream) RecvMsg(m interface{}) error { + if cs.binlog != nil && !cs.serverHeaderBinlogged { + // Call Header() to binary log header if it's not already logged. + cs.Header() + } + var recvInfo *payloadInfo + if cs.binlog != nil { + recvInfo = &payloadInfo{} + } + err := cs.withRetry(func(a *csAttempt) error { + return a.recvMsg(m, recvInfo) + }, cs.commitAttemptLocked) + if cs.binlog != nil && err == nil { + cs.binlog.Log(&binarylog.ServerMessage{ + OnClientSide: true, + Message: recvInfo.uncompressedBytes, + }) + } + if err != nil || !cs.desc.ServerStreams { + // err != nil or non-server-streaming indicates end of stream. + cs.finish(err) + + if cs.binlog != nil { + // finish will not log Trailer. Log Trailer here. + logEntry := &binarylog.ServerTrailer{ + OnClientSide: true, + Trailer: cs.Trailer(), + Err: err, + } + if logEntry.Err == io.EOF { + logEntry.Err = nil + } + if peer, ok := peer.FromContext(cs.Context()); ok { + logEntry.PeerAddr = peer.Addr + } + cs.binlog.Log(logEntry) + } + } + return err +} + +func (cs *clientStream) CloseSend() error { + if cs.sentLast { + // TODO: return an error and finish the stream instead, due to API misuse? + return nil + } + cs.sentLast = true + op := func(a *csAttempt) error { + a.t.Write(a.s, nil, nil, &transport.Options{Last: true}) + // Always return nil; io.EOF is the only error that might make sense + // instead, but there is no need to signal the client to call RecvMsg + // as the only use left for the stream after CloseSend is to call + // RecvMsg. This also matches historical behavior. + return nil + } + cs.withRetry(op, func() { cs.bufferForRetryLocked(0, op) }) + if cs.binlog != nil { + cs.binlog.Log(&binarylog.ClientHalfClose{ + OnClientSide: true, + }) + } + // We never returned an error here for reasons. + return nil +} + +func (cs *clientStream) finish(err error) { + if err == io.EOF { + // Ending a stream with EOF indicates a success. + err = nil + } + cs.mu.Lock() + if cs.finished { + cs.mu.Unlock() + return + } + cs.finished = true + cs.commitAttemptLocked() + cs.mu.Unlock() + // For binary logging. only log cancel in finish (could be caused by RPC ctx + // canceled or ClientConn closed). Trailer will be logged in RecvMsg. + // + // Only one of cancel or trailer needs to be logged. In the cases where + // users don't call RecvMsg, users must have already canceled the RPC. + if cs.binlog != nil && status.Code(err) == codes.Canceled { + cs.binlog.Log(&binarylog.Cancel{ + OnClientSide: true, + }) + } + if err == nil { + cs.retryThrottler.successfulRPC() + } + if channelz.IsOn() { + if err != nil { + cs.cc.incrCallsFailed() + } else { + cs.cc.incrCallsSucceeded() + } + } + if cs.attempt != nil { + cs.attempt.finish(err) + } + // after functions all rely upon having a stream. + if cs.attempt.s != nil { + for _, o := range cs.opts { + o.after(cs.callInfo) + } + } + cs.cancel() +} + +func (a *csAttempt) sendMsg(m interface{}, hdr, payld, data []byte) error { + cs := a.cs + if a.trInfo != nil { + a.mu.Lock() + if a.trInfo.tr != nil { + a.trInfo.tr.LazyLog(&payload{sent: true, msg: m}, true) + } + a.mu.Unlock() + } + if err := a.t.Write(a.s, hdr, payld, &transport.Options{Last: !cs.desc.ClientStreams}); err != nil { + if !cs.desc.ClientStreams { + // For non-client-streaming RPCs, we return nil instead of EOF on error + // because the generated code requires it. finish is not called; RecvMsg() + // will call it with the stream's status independently. + return nil + } + return io.EOF + } + if a.statsHandler != nil { + a.statsHandler.HandleRPC(cs.ctx, outPayload(true, m, data, payld, time.Now())) + } + if channelz.IsOn() { + a.t.IncrMsgSent() + } + return nil +} + +func (a *csAttempt) recvMsg(m interface{}, payInfo *payloadInfo) (err error) { + cs := a.cs + if a.statsHandler != nil && payInfo == nil { + payInfo = &payloadInfo{} + } + + if !a.decompSet { + // Block until we receive headers containing received message encoding. + if ct := a.s.RecvCompress(); ct != "" && ct != encoding.Identity { + if a.dc == nil || a.dc.Type() != ct { + // No configured decompressor, or it does not match the incoming + // message encoding; attempt to find a registered compressor that does. + a.dc = nil + a.decomp = encoding.GetCompressor(ct) + } + } else { + // No compression is used; disable our decompressor. + a.dc = nil + } + // Only initialize this state once per stream. + a.decompSet = true + } + err = recv(a.p, cs.codec, a.s, a.dc, m, *cs.callInfo.maxReceiveMessageSize, payInfo, a.decomp) + if err != nil { + if err == io.EOF { + if statusErr := a.s.Status().Err(); statusErr != nil { + return statusErr + } + return io.EOF // indicates successful end of stream. + } + return toRPCErr(err) + } + if a.trInfo != nil { + a.mu.Lock() + if a.trInfo.tr != nil { + a.trInfo.tr.LazyLog(&payload{sent: false, msg: m}, true) + } + a.mu.Unlock() + } + if a.statsHandler != nil { + a.statsHandler.HandleRPC(cs.ctx, &stats.InPayload{ + Client: true, + RecvTime: time.Now(), + Payload: m, + // TODO truncate large payload. + Data: payInfo.uncompressedBytes, + WireLength: payInfo.wireLength, + Length: len(payInfo.uncompressedBytes), + }) + } + if channelz.IsOn() { + a.t.IncrMsgRecv() + } + if cs.desc.ServerStreams { + // Subsequent messages should be received by subsequent RecvMsg calls. + return nil + } + // Special handling for non-server-stream rpcs. + // This recv expects EOF or errors, so we don't collect inPayload. + err = recv(a.p, cs.codec, a.s, a.dc, m, *cs.callInfo.maxReceiveMessageSize, nil, a.decomp) + if err == nil { + return toRPCErr(errors.New("grpc: client streaming protocol violation: get , want ")) + } + if err == io.EOF { + return a.s.Status().Err() // non-server streaming Recv returns nil on success + } + return toRPCErr(err) +} + +func (a *csAttempt) finish(err error) { + a.mu.Lock() + if a.finished { + a.mu.Unlock() + return + } + a.finished = true + if err == io.EOF { + // Ending a stream with EOF indicates a success. + err = nil + } + var tr metadata.MD + if a.s != nil { + a.t.CloseStream(a.s, err) + tr = a.s.Trailer() + } + + if a.done != nil { + br := false + if a.s != nil { + br = a.s.BytesReceived() + } + a.done(balancer.DoneInfo{ + Err: err, + Trailer: tr, + BytesSent: a.s != nil, + BytesReceived: br, + ServerLoad: balancerload.Parse(tr), + }) + } + if a.statsHandler != nil { + end := &stats.End{ + Client: true, + BeginTime: a.cs.beginTime, + EndTime: time.Now(), + Trailer: tr, + Error: err, + } + a.statsHandler.HandleRPC(a.cs.ctx, end) + } + if a.trInfo != nil && a.trInfo.tr != nil { + if err == nil { + a.trInfo.tr.LazyPrintf("RPC: [OK]") + } else { + a.trInfo.tr.LazyPrintf("RPC: [%v]", err) + a.trInfo.tr.SetError() + } + a.trInfo.tr.Finish() + a.trInfo.tr = nil + } + a.mu.Unlock() +} + +func (ac *addrConn) newClientStream(ctx context.Context, desc *StreamDesc, method string, t transport.ClientTransport, opts ...CallOption) (_ ClientStream, err error) { + ac.mu.Lock() + if ac.transport != t { + ac.mu.Unlock() + return nil, status.Error(codes.Canceled, "the provided transport is no longer valid to use") + } + // transition to CONNECTING state when an attempt starts + if ac.state != connectivity.Connecting { + ac.updateConnectivityState(connectivity.Connecting) + ac.cc.handleSubConnStateChange(ac.acbw, ac.state) + } + ac.mu.Unlock() + + if t == nil { + // TODO: return RPC error here? + return nil, errors.New("transport provided is nil") + } + // defaultCallInfo contains unnecessary info(i.e. failfast, maxRetryRPCBufferSize), so we just initialize an empty struct. + c := &callInfo{} + + for _, o := range opts { + if err := o.before(c); err != nil { + return nil, toRPCErr(err) + } + } + c.maxReceiveMessageSize = getMaxSize(nil, c.maxReceiveMessageSize, defaultClientMaxReceiveMessageSize) + c.maxSendMessageSize = getMaxSize(nil, c.maxSendMessageSize, defaultServerMaxSendMessageSize) + + // Possible context leak: + // The cancel function for the child context we create will only be called + // when RecvMsg returns a non-nil error, if the ClientConn is closed, or if + // an error is generated by SendMsg. + // https://github.com/grpc/grpc-go/issues/1818. + ctx, cancel := context.WithCancel(ctx) + defer func() { + if err != nil { + cancel() + } + }() + + if err := setCallInfoCodec(c); err != nil { + return nil, err + } + + callHdr := &transport.CallHdr{ + Host: ac.cc.authority, + Method: method, + ContentSubtype: c.contentSubtype, + } + + // Set our outgoing compression according to the UseCompressor CallOption, if + // set. In that case, also find the compressor from the encoding package. + // Otherwise, use the compressor configured by the WithCompressor DialOption, + // if set. + var cp Compressor + var comp encoding.Compressor + if ct := c.compressorType; ct != "" { + callHdr.SendCompress = ct + if ct != encoding.Identity { + comp = encoding.GetCompressor(ct) + if comp == nil { + return nil, status.Errorf(codes.Internal, "grpc: Compressor is not installed for requested grpc-encoding %q", ct) + } + } + } else if ac.cc.dopts.cp != nil { + callHdr.SendCompress = ac.cc.dopts.cp.Type() + cp = ac.cc.dopts.cp + } + if c.creds != nil { + callHdr.Creds = c.creds + } + + as := &addrConnStream{ + callHdr: callHdr, + ac: ac, + ctx: ctx, + cancel: cancel, + opts: opts, + callInfo: c, + desc: desc, + codec: c.codec, + cp: cp, + comp: comp, + t: t, + } + + as.callInfo.stream = as + s, err := as.t.NewStream(as.ctx, as.callHdr) + if err != nil { + err = toRPCErr(err) + return nil, err + } + as.s = s + as.p = &parser{r: s} + ac.incrCallsStarted() + if desc != unaryStreamDesc { + // Listen on cc and stream contexts to cleanup when the user closes the + // ClientConn or cancels the stream context. In all other cases, an error + // should already be injected into the recv buffer by the transport, which + // the client will eventually receive, and then we will cancel the stream's + // context in clientStream.finish. + go func() { + select { + case <-ac.ctx.Done(): + as.finish(status.Error(codes.Canceled, "grpc: the SubConn is closing")) + case <-ctx.Done(): + as.finish(toRPCErr(ctx.Err())) + } + }() + } + return as, nil +} + +type addrConnStream struct { + s *transport.Stream + ac *addrConn + callHdr *transport.CallHdr + cancel context.CancelFunc + opts []CallOption + callInfo *callInfo + t transport.ClientTransport + ctx context.Context + sentLast bool + desc *StreamDesc + codec baseCodec + cp Compressor + comp encoding.Compressor + decompSet bool + dc Decompressor + decomp encoding.Compressor + p *parser + mu sync.Mutex + finished bool +} + +func (as *addrConnStream) Header() (metadata.MD, error) { + m, err := as.s.Header() + if err != nil { + as.finish(toRPCErr(err)) + } + return m, err +} + +func (as *addrConnStream) Trailer() metadata.MD { + return as.s.Trailer() +} + +func (as *addrConnStream) CloseSend() error { + if as.sentLast { + // TODO: return an error and finish the stream instead, due to API misuse? + return nil + } + as.sentLast = true + + as.t.Write(as.s, nil, nil, &transport.Options{Last: true}) + // Always return nil; io.EOF is the only error that might make sense + // instead, but there is no need to signal the client to call RecvMsg + // as the only use left for the stream after CloseSend is to call + // RecvMsg. This also matches historical behavior. + return nil +} + +func (as *addrConnStream) Context() context.Context { + return as.s.Context() +} + +func (as *addrConnStream) SendMsg(m interface{}) (err error) { + defer func() { + if err != nil && err != io.EOF { + // Call finish on the client stream for errors generated by this SendMsg + // call, as these indicate problems created by this client. (Transport + // errors are converted to an io.EOF error in csAttempt.sendMsg; the real + // error will be returned from RecvMsg eventually in that case, or be + // retried.) + as.finish(err) + } + }() + if as.sentLast { + return status.Errorf(codes.Internal, "SendMsg called after CloseSend") + } + if !as.desc.ClientStreams { + as.sentLast = true + } + data, err := encode(as.codec, m) + if err != nil { + return err + } + compData, err := compress(data, as.cp, as.comp) + if err != nil { + return err + } + hdr, payld := msgHeader(data, compData) + // TODO(dfawley): should we be checking len(data) instead? + if len(payld) > *as.callInfo.maxSendMessageSize { + return status.Errorf(codes.ResourceExhausted, "trying to send message larger than max (%d vs. %d)", len(payld), *as.callInfo.maxSendMessageSize) + } + + if err := as.t.Write(as.s, hdr, payld, &transport.Options{Last: !as.desc.ClientStreams}); err != nil { + if !as.desc.ClientStreams { + // For non-client-streaming RPCs, we return nil instead of EOF on error + // because the generated code requires it. finish is not called; RecvMsg() + // will call it with the stream's status independently. + return nil + } + return io.EOF + } + + if channelz.IsOn() { + as.t.IncrMsgSent() + } + return nil +} + +func (as *addrConnStream) RecvMsg(m interface{}) (err error) { + defer func() { + if err != nil || !as.desc.ServerStreams { + // err != nil or non-server-streaming indicates end of stream. + as.finish(err) + } + }() + + if !as.decompSet { + // Block until we receive headers containing received message encoding. + if ct := as.s.RecvCompress(); ct != "" && ct != encoding.Identity { + if as.dc == nil || as.dc.Type() != ct { + // No configured decompressor, or it does not match the incoming + // message encoding; attempt to find a registered compressor that does. + as.dc = nil + as.decomp = encoding.GetCompressor(ct) + } + } else { + // No compression is used; disable our decompressor. + as.dc = nil + } + // Only initialize this state once per stream. + as.decompSet = true + } + err = recv(as.p, as.codec, as.s, as.dc, m, *as.callInfo.maxReceiveMessageSize, nil, as.decomp) + if err != nil { + if err == io.EOF { + if statusErr := as.s.Status().Err(); statusErr != nil { + return statusErr + } + return io.EOF // indicates successful end of stream. + } + return toRPCErr(err) + } + + if channelz.IsOn() { + as.t.IncrMsgRecv() + } + if as.desc.ServerStreams { + // Subsequent messages should be received by subsequent RecvMsg calls. + return nil + } + + // Special handling for non-server-stream rpcs. + // This recv expects EOF or errors, so we don't collect inPayload. + err = recv(as.p, as.codec, as.s, as.dc, m, *as.callInfo.maxReceiveMessageSize, nil, as.decomp) + if err == nil { + return toRPCErr(errors.New("grpc: client streaming protocol violation: get , want ")) + } + if err == io.EOF { + return as.s.Status().Err() // non-server streaming Recv returns nil on success + } + return toRPCErr(err) +} + +func (as *addrConnStream) finish(err error) { + as.mu.Lock() + if as.finished { + as.mu.Unlock() + return + } + as.finished = true + if err == io.EOF { + // Ending a stream with EOF indicates a success. + err = nil + } + if as.s != nil { + as.t.CloseStream(as.s, err) + } + + if err != nil { + as.ac.incrCallsFailed() + } else { + as.ac.incrCallsSucceeded() + } + as.cancel() + as.mu.Unlock() +} + +// ServerStream defines the server-side behavior of a streaming RPC. +// +// All errors returned from ServerStream methods are compatible with the +// status package. +type ServerStream interface { + // SetHeader sets the header metadata. It may be called multiple times. + // When call multiple times, all the provided metadata will be merged. + // All the metadata will be sent out when one of the following happens: + // - ServerStream.SendHeader() is called; + // - The first response is sent out; + // - An RPC status is sent out (error or success). + SetHeader(metadata.MD) error + // SendHeader sends the header metadata. + // The provided md and headers set by SetHeader() will be sent. + // It fails if called multiple times. + SendHeader(metadata.MD) error + // SetTrailer sets the trailer metadata which will be sent with the RPC status. + // When called more than once, all the provided metadata will be merged. + SetTrailer(metadata.MD) + // Context returns the context for this stream. + Context() context.Context + // SendMsg sends a message. On error, SendMsg aborts the stream and the + // error is returned directly. + // + // SendMsg blocks until: + // - There is sufficient flow control to schedule m with the transport, or + // - The stream is done, or + // - The stream breaks. + // + // SendMsg does not wait until the message is received by the client. An + // untimely stream closure may result in lost messages. + // + // It is safe to have a goroutine calling SendMsg and another goroutine + // calling RecvMsg on the same stream at the same time, but it is not safe + // to call SendMsg on the same stream in different goroutines. + SendMsg(m interface{}) error + // RecvMsg blocks until it receives a message into m or the stream is + // done. It returns io.EOF when the client has performed a CloseSend. On + // any non-EOF error, the stream is aborted and the error contains the + // RPC status. + // + // It is safe to have a goroutine calling SendMsg and another goroutine + // calling RecvMsg on the same stream at the same time, but it is not + // safe to call RecvMsg on the same stream in different goroutines. + RecvMsg(m interface{}) error +} + +// serverStream implements a server side Stream. +type serverStream struct { + ctx context.Context + t transport.ServerTransport + s *transport.Stream + p *parser + codec baseCodec + + cp Compressor + dc Decompressor + comp encoding.Compressor + decomp encoding.Compressor + + maxReceiveMessageSize int + maxSendMessageSize int + trInfo *traceInfo + + statsHandler stats.Handler + + binlog *binarylog.MethodLogger + // serverHeaderBinlogged indicates whether server header has been logged. It + // will happen when one of the following two happens: stream.SendHeader(), + // stream.Send(). + // + // It's only checked in send and sendHeader, doesn't need to be + // synchronized. + serverHeaderBinlogged bool + + mu sync.Mutex // protects trInfo.tr after the service handler runs. +} + +func (ss *serverStream) Context() context.Context { + return ss.ctx +} + +func (ss *serverStream) SetHeader(md metadata.MD) error { + if md.Len() == 0 { + return nil + } + return ss.s.SetHeader(md) +} + +func (ss *serverStream) SendHeader(md metadata.MD) error { + err := ss.t.WriteHeader(ss.s, md) + if ss.binlog != nil && !ss.serverHeaderBinlogged { + h, _ := ss.s.Header() + ss.binlog.Log(&binarylog.ServerHeader{ + Header: h, + }) + ss.serverHeaderBinlogged = true + } + return err +} + +func (ss *serverStream) SetTrailer(md metadata.MD) { + if md.Len() == 0 { + return + } + ss.s.SetTrailer(md) +} + +func (ss *serverStream) SendMsg(m interface{}) (err error) { + defer func() { + if ss.trInfo != nil { + ss.mu.Lock() + if ss.trInfo.tr != nil { + if err == nil { + ss.trInfo.tr.LazyLog(&payload{sent: true, msg: m}, true) + } else { + ss.trInfo.tr.LazyLog(&fmtStringer{"%v", []interface{}{err}}, true) + ss.trInfo.tr.SetError() + } + } + ss.mu.Unlock() + } + if err != nil && err != io.EOF { + st, _ := status.FromError(toRPCErr(err)) + ss.t.WriteStatus(ss.s, st) + // Non-user specified status was sent out. This should be an error + // case (as a server side Cancel maybe). + // + // This is not handled specifically now. User will return a final + // status from the service handler, we will log that error instead. + // This behavior is similar to an interceptor. + } + if channelz.IsOn() && err == nil { + ss.t.IncrMsgSent() + } + }() + data, err := encode(ss.codec, m) + if err != nil { + return err + } + compData, err := compress(data, ss.cp, ss.comp) + if err != nil { + return err + } + hdr, payload := msgHeader(data, compData) + // TODO(dfawley): should we be checking len(data) instead? + if len(payload) > ss.maxSendMessageSize { + return status.Errorf(codes.ResourceExhausted, "trying to send message larger than max (%d vs. %d)", len(payload), ss.maxSendMessageSize) + } + if err := ss.t.Write(ss.s, hdr, payload, &transport.Options{Last: false}); err != nil { + return toRPCErr(err) + } + if ss.binlog != nil { + if !ss.serverHeaderBinlogged { + h, _ := ss.s.Header() + ss.binlog.Log(&binarylog.ServerHeader{ + Header: h, + }) + ss.serverHeaderBinlogged = true + } + ss.binlog.Log(&binarylog.ServerMessage{ + Message: data, + }) + } + if ss.statsHandler != nil { + ss.statsHandler.HandleRPC(ss.s.Context(), outPayload(false, m, data, payload, time.Now())) + } + return nil +} + +func (ss *serverStream) RecvMsg(m interface{}) (err error) { + defer func() { + if ss.trInfo != nil { + ss.mu.Lock() + if ss.trInfo.tr != nil { + if err == nil { + ss.trInfo.tr.LazyLog(&payload{sent: false, msg: m}, true) + } else if err != io.EOF { + ss.trInfo.tr.LazyLog(&fmtStringer{"%v", []interface{}{err}}, true) + ss.trInfo.tr.SetError() + } + } + ss.mu.Unlock() + } + if err != nil && err != io.EOF { + st, _ := status.FromError(toRPCErr(err)) + ss.t.WriteStatus(ss.s, st) + // Non-user specified status was sent out. This should be an error + // case (as a server side Cancel maybe). + // + // This is not handled specifically now. User will return a final + // status from the service handler, we will log that error instead. + // This behavior is similar to an interceptor. + } + if channelz.IsOn() && err == nil { + ss.t.IncrMsgRecv() + } + }() + var payInfo *payloadInfo + if ss.statsHandler != nil || ss.binlog != nil { + payInfo = &payloadInfo{} + } + if err := recv(ss.p, ss.codec, ss.s, ss.dc, m, ss.maxReceiveMessageSize, payInfo, ss.decomp); err != nil { + if err == io.EOF { + if ss.binlog != nil { + ss.binlog.Log(&binarylog.ClientHalfClose{}) + } + return err + } + if err == io.ErrUnexpectedEOF { + err = status.Errorf(codes.Internal, io.ErrUnexpectedEOF.Error()) + } + return toRPCErr(err) + } + if ss.statsHandler != nil { + ss.statsHandler.HandleRPC(ss.s.Context(), &stats.InPayload{ + RecvTime: time.Now(), + Payload: m, + // TODO truncate large payload. + Data: payInfo.uncompressedBytes, + WireLength: payInfo.wireLength, + Length: len(payInfo.uncompressedBytes), + }) + } + if ss.binlog != nil { + ss.binlog.Log(&binarylog.ClientMessage{ + Message: payInfo.uncompressedBytes, + }) + } + return nil +} + +// MethodFromServerStream returns the method string for the input stream. +// The returned string is in the format of "/service/method". +func MethodFromServerStream(stream ServerStream) (string, bool) { + return Method(stream.Context()) +} diff --git a/vendor/google.golang.org/grpc/stress/client/main.go b/vendor/google.golang.org/grpc/stress/client/main.go new file mode 100644 index 0000000..d9115c9 --- /dev/null +++ b/vendor/google.golang.org/grpc/stress/client/main.go @@ -0,0 +1,337 @@ +/* + * + * Copyright 2016 gRPC authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ + +//go:generate protoc -I ../grpc_testing --go_out=plugins=grpc:../grpc_testing ../grpc_testing/metrics.proto + +// client starts an interop client to do stress test and a metrics server to report qps. +package main + +import ( + "context" + "flag" + "fmt" + "math/rand" + "net" + "strconv" + "strings" + "sync" + "time" + + "google.golang.org/grpc" + "google.golang.org/grpc/codes" + "google.golang.org/grpc/credentials" + "google.golang.org/grpc/grpclog" + "google.golang.org/grpc/interop" + testpb "google.golang.org/grpc/interop/grpc_testing" + "google.golang.org/grpc/status" + metricspb "google.golang.org/grpc/stress/grpc_testing" + "google.golang.org/grpc/testdata" +) + +var ( + serverAddresses = flag.String("server_addresses", "localhost:8080", "a list of server addresses") + testCases = flag.String("test_cases", "", "a list of test cases along with the relative weights") + testDurationSecs = flag.Int("test_duration_secs", -1, "test duration in seconds") + numChannelsPerServer = flag.Int("num_channels_per_server", 1, "Number of channels (i.e connections) to each server") + numStubsPerChannel = flag.Int("num_stubs_per_channel", 1, "Number of client stubs per each connection to server") + metricsPort = flag.Int("metrics_port", 8081, "The port at which the stress client exposes QPS metrics") + useTLS = flag.Bool("use_tls", false, "Connection uses TLS if true, else plain TCP") + testCA = flag.Bool("use_test_ca", false, "Whether to replace platform root CAs with test CA as the CA root") + tlsServerName = flag.String("server_host_override", "foo.test.google.fr", "The server name use to verify the hostname returned by TLS handshake if it is not empty. Otherwise, --server_host is used.") + caFile = flag.String("ca_file", "", "The file containning the CA root cert file") +) + +// testCaseWithWeight contains the test case type and its weight. +type testCaseWithWeight struct { + name string + weight int +} + +// parseTestCases converts test case string to a list of struct testCaseWithWeight. +func parseTestCases(testCaseString string) []testCaseWithWeight { + testCaseStrings := strings.Split(testCaseString, ",") + testCases := make([]testCaseWithWeight, len(testCaseStrings)) + for i, str := range testCaseStrings { + testCase := strings.Split(str, ":") + if len(testCase) != 2 { + panic(fmt.Sprintf("invalid test case with weight: %s", str)) + } + // Check if test case is supported. + switch testCase[0] { + case + "empty_unary", + "large_unary", + "client_streaming", + "server_streaming", + "ping_pong", + "empty_stream", + "timeout_on_sleeping_server", + "cancel_after_begin", + "cancel_after_first_response", + "status_code_and_message", + "custom_metadata": + default: + panic(fmt.Sprintf("unknown test type: %s", testCase[0])) + } + testCases[i].name = testCase[0] + w, err := strconv.Atoi(testCase[1]) + if err != nil { + panic(fmt.Sprintf("%v", err)) + } + testCases[i].weight = w + } + return testCases +} + +// weightedRandomTestSelector defines a weighted random selector for test case types. +type weightedRandomTestSelector struct { + tests []testCaseWithWeight + totalWeight int +} + +// newWeightedRandomTestSelector constructs a weightedRandomTestSelector with the given list of testCaseWithWeight. +func newWeightedRandomTestSelector(tests []testCaseWithWeight) *weightedRandomTestSelector { + var totalWeight int + for _, t := range tests { + totalWeight += t.weight + } + rand.Seed(time.Now().UnixNano()) + return &weightedRandomTestSelector{tests, totalWeight} +} + +func (selector weightedRandomTestSelector) getNextTest() string { + random := rand.Intn(selector.totalWeight) + var weightSofar int + for _, test := range selector.tests { + weightSofar += test.weight + if random < weightSofar { + return test.name + } + } + panic("no test case selected by weightedRandomTestSelector") +} + +// gauge stores the qps of one interop client (one stub). +type gauge struct { + mutex sync.RWMutex + val int64 +} + +func (g *gauge) set(v int64) { + g.mutex.Lock() + defer g.mutex.Unlock() + g.val = v +} + +func (g *gauge) get() int64 { + g.mutex.RLock() + defer g.mutex.RUnlock() + return g.val +} + +// server implements metrics server functions. +type server struct { + mutex sync.RWMutex + // gauges is a map from /stress_test/server_/channel_/stub_/qps to its qps gauge. + gauges map[string]*gauge +} + +// newMetricsServer returns a new metrics server. +func newMetricsServer() *server { + return &server{gauges: make(map[string]*gauge)} +} + +// GetAllGauges returns all gauges. +func (s *server) GetAllGauges(in *metricspb.EmptyMessage, stream metricspb.MetricsService_GetAllGaugesServer) error { + s.mutex.RLock() + defer s.mutex.RUnlock() + + for name, gauge := range s.gauges { + if err := stream.Send(&metricspb.GaugeResponse{Name: name, Value: &metricspb.GaugeResponse_LongValue{LongValue: gauge.get()}}); err != nil { + return err + } + } + return nil +} + +// GetGauge returns the gauge for the given name. +func (s *server) GetGauge(ctx context.Context, in *metricspb.GaugeRequest) (*metricspb.GaugeResponse, error) { + s.mutex.RLock() + defer s.mutex.RUnlock() + + if g, ok := s.gauges[in.Name]; ok { + return &metricspb.GaugeResponse{Name: in.Name, Value: &metricspb.GaugeResponse_LongValue{LongValue: g.get()}}, nil + } + return nil, status.Errorf(codes.InvalidArgument, "gauge with name %s not found", in.Name) +} + +// createGauge creates a gauge using the given name in metrics server. +func (s *server) createGauge(name string) *gauge { + s.mutex.Lock() + defer s.mutex.Unlock() + + if _, ok := s.gauges[name]; ok { + // gauge already exists. + panic(fmt.Sprintf("gauge %s already exists", name)) + } + var g gauge + s.gauges[name] = &g + return &g +} + +func startServer(server *server, port int) { + lis, err := net.Listen("tcp", ":"+strconv.Itoa(port)) + if err != nil { + grpclog.Fatalf("failed to listen: %v", err) + } + + s := grpc.NewServer() + metricspb.RegisterMetricsServiceServer(s, server) + s.Serve(lis) + +} + +// performRPCs uses weightedRandomTestSelector to select test case and runs the tests. +func performRPCs(gauge *gauge, conn *grpc.ClientConn, selector *weightedRandomTestSelector, stop <-chan bool) { + client := testpb.NewTestServiceClient(conn) + var numCalls int64 + startTime := time.Now() + for { + test := selector.getNextTest() + switch test { + case "empty_unary": + interop.DoEmptyUnaryCall(client, grpc.WaitForReady(true)) + case "large_unary": + interop.DoLargeUnaryCall(client, grpc.WaitForReady(true)) + case "client_streaming": + interop.DoClientStreaming(client, grpc.WaitForReady(true)) + case "server_streaming": + interop.DoServerStreaming(client, grpc.WaitForReady(true)) + case "ping_pong": + interop.DoPingPong(client, grpc.WaitForReady(true)) + case "empty_stream": + interop.DoEmptyStream(client, grpc.WaitForReady(true)) + case "timeout_on_sleeping_server": + interop.DoTimeoutOnSleepingServer(client, grpc.WaitForReady(true)) + case "cancel_after_begin": + interop.DoCancelAfterBegin(client, grpc.WaitForReady(true)) + case "cancel_after_first_response": + interop.DoCancelAfterFirstResponse(client, grpc.WaitForReady(true)) + case "status_code_and_message": + interop.DoStatusCodeAndMessage(client, grpc.WaitForReady(true)) + case "custom_metadata": + interop.DoCustomMetadata(client, grpc.WaitForReady(true)) + } + numCalls++ + gauge.set(int64(float64(numCalls) / time.Since(startTime).Seconds())) + + select { + case <-stop: + return + default: + } + } +} + +func logParameterInfo(addresses []string, tests []testCaseWithWeight) { + grpclog.Infof("server_addresses: %s", *serverAddresses) + grpclog.Infof("test_cases: %s", *testCases) + grpclog.Infof("test_duration_secs: %d", *testDurationSecs) + grpclog.Infof("num_channels_per_server: %d", *numChannelsPerServer) + grpclog.Infof("num_stubs_per_channel: %d", *numStubsPerChannel) + grpclog.Infof("metrics_port: %d", *metricsPort) + grpclog.Infof("use_tls: %t", *useTLS) + grpclog.Infof("use_test_ca: %t", *testCA) + grpclog.Infof("server_host_override: %s", *tlsServerName) + + grpclog.Infoln("addresses:") + for i, addr := range addresses { + grpclog.Infof("%d. %s\n", i+1, addr) + } + grpclog.Infoln("tests:") + for i, test := range tests { + grpclog.Infof("%d. %v\n", i+1, test) + } +} + +func newConn(address string, useTLS, testCA bool, tlsServerName string) (*grpc.ClientConn, error) { + var opts []grpc.DialOption + if useTLS { + var sn string + if tlsServerName != "" { + sn = tlsServerName + } + var creds credentials.TransportCredentials + if testCA { + var err error + if *caFile == "" { + *caFile = testdata.Path("ca.pem") + } + creds, err = credentials.NewClientTLSFromFile(*caFile, sn) + if err != nil { + grpclog.Fatalf("Failed to create TLS credentials %v", err) + } + } else { + creds = credentials.NewClientTLSFromCert(nil, sn) + } + opts = append(opts, grpc.WithTransportCredentials(creds)) + } else { + opts = append(opts, grpc.WithInsecure()) + } + return grpc.Dial(address, opts...) +} + +func main() { + flag.Parse() + addresses := strings.Split(*serverAddresses, ",") + tests := parseTestCases(*testCases) + logParameterInfo(addresses, tests) + testSelector := newWeightedRandomTestSelector(tests) + metricsServer := newMetricsServer() + + var wg sync.WaitGroup + wg.Add(len(addresses) * *numChannelsPerServer * *numStubsPerChannel) + stop := make(chan bool) + + for serverIndex, address := range addresses { + for connIndex := 0; connIndex < *numChannelsPerServer; connIndex++ { + conn, err := newConn(address, *useTLS, *testCA, *tlsServerName) + if err != nil { + grpclog.Fatalf("Fail to dial: %v", err) + } + defer conn.Close() + for clientIndex := 0; clientIndex < *numStubsPerChannel; clientIndex++ { + name := fmt.Sprintf("/stress_test/server_%d/channel_%d/stub_%d/qps", serverIndex+1, connIndex+1, clientIndex+1) + go func() { + defer wg.Done() + g := metricsServer.createGauge(name) + performRPCs(g, conn, testSelector, stop) + }() + } + + } + } + go startServer(metricsServer, *metricsPort) + if *testDurationSecs > 0 { + time.Sleep(time.Duration(*testDurationSecs) * time.Second) + close(stop) + } + wg.Wait() + grpclog.Infof(" ===== ALL DONE ===== ") + +} diff --git a/vendor/google.golang.org/grpc/stress/grpc_testing/metrics.pb.go b/vendor/google.golang.org/grpc/stress/grpc_testing/metrics.pb.go new file mode 100644 index 0000000..0a8ad44 --- /dev/null +++ b/vendor/google.golang.org/grpc/stress/grpc_testing/metrics.pb.go @@ -0,0 +1,433 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: metrics.proto + +package grpc_testing + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" + +import ( + context "golang.org/x/net/context" + grpc "google.golang.org/grpc" +) + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Response message containing the gauge name and value +type GaugeResponse struct { + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // Types that are valid to be assigned to Value: + // *GaugeResponse_LongValue + // *GaugeResponse_DoubleValue + // *GaugeResponse_StringValue + Value isGaugeResponse_Value `protobuf_oneof:"value"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GaugeResponse) Reset() { *m = GaugeResponse{} } +func (m *GaugeResponse) String() string { return proto.CompactTextString(m) } +func (*GaugeResponse) ProtoMessage() {} +func (*GaugeResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_metrics_c9a45afc44ac5637, []int{0} +} +func (m *GaugeResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GaugeResponse.Unmarshal(m, b) +} +func (m *GaugeResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GaugeResponse.Marshal(b, m, deterministic) +} +func (dst *GaugeResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_GaugeResponse.Merge(dst, src) +} +func (m *GaugeResponse) XXX_Size() int { + return xxx_messageInfo_GaugeResponse.Size(m) +} +func (m *GaugeResponse) XXX_DiscardUnknown() { + xxx_messageInfo_GaugeResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_GaugeResponse proto.InternalMessageInfo + +func (m *GaugeResponse) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +type isGaugeResponse_Value interface { + isGaugeResponse_Value() +} + +type GaugeResponse_LongValue struct { + LongValue int64 `protobuf:"varint,2,opt,name=long_value,json=longValue,proto3,oneof"` +} + +type GaugeResponse_DoubleValue struct { + DoubleValue float64 `protobuf:"fixed64,3,opt,name=double_value,json=doubleValue,proto3,oneof"` +} + +type GaugeResponse_StringValue struct { + StringValue string `protobuf:"bytes,4,opt,name=string_value,json=stringValue,proto3,oneof"` +} + +func (*GaugeResponse_LongValue) isGaugeResponse_Value() {} + +func (*GaugeResponse_DoubleValue) isGaugeResponse_Value() {} + +func (*GaugeResponse_StringValue) isGaugeResponse_Value() {} + +func (m *GaugeResponse) GetValue() isGaugeResponse_Value { + if m != nil { + return m.Value + } + return nil +} + +func (m *GaugeResponse) GetLongValue() int64 { + if x, ok := m.GetValue().(*GaugeResponse_LongValue); ok { + return x.LongValue + } + return 0 +} + +func (m *GaugeResponse) GetDoubleValue() float64 { + if x, ok := m.GetValue().(*GaugeResponse_DoubleValue); ok { + return x.DoubleValue + } + return 0 +} + +func (m *GaugeResponse) GetStringValue() string { + if x, ok := m.GetValue().(*GaugeResponse_StringValue); ok { + return x.StringValue + } + return "" +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*GaugeResponse) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _GaugeResponse_OneofMarshaler, _GaugeResponse_OneofUnmarshaler, _GaugeResponse_OneofSizer, []interface{}{ + (*GaugeResponse_LongValue)(nil), + (*GaugeResponse_DoubleValue)(nil), + (*GaugeResponse_StringValue)(nil), + } +} + +func _GaugeResponse_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*GaugeResponse) + // value + switch x := m.Value.(type) { + case *GaugeResponse_LongValue: + b.EncodeVarint(2<<3 | proto.WireVarint) + b.EncodeVarint(uint64(x.LongValue)) + case *GaugeResponse_DoubleValue: + b.EncodeVarint(3<<3 | proto.WireFixed64) + b.EncodeFixed64(math.Float64bits(x.DoubleValue)) + case *GaugeResponse_StringValue: + b.EncodeVarint(4<<3 | proto.WireBytes) + b.EncodeStringBytes(x.StringValue) + case nil: + default: + return fmt.Errorf("GaugeResponse.Value has unexpected type %T", x) + } + return nil +} + +func _GaugeResponse_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*GaugeResponse) + switch tag { + case 2: // value.long_value + if wire != proto.WireVarint { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeVarint() + m.Value = &GaugeResponse_LongValue{int64(x)} + return true, err + case 3: // value.double_value + if wire != proto.WireFixed64 { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeFixed64() + m.Value = &GaugeResponse_DoubleValue{math.Float64frombits(x)} + return true, err + case 4: // value.string_value + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeStringBytes() + m.Value = &GaugeResponse_StringValue{x} + return true, err + default: + return false, nil + } +} + +func _GaugeResponse_OneofSizer(msg proto.Message) (n int) { + m := msg.(*GaugeResponse) + // value + switch x := m.Value.(type) { + case *GaugeResponse_LongValue: + n += 1 // tag and wire + n += proto.SizeVarint(uint64(x.LongValue)) + case *GaugeResponse_DoubleValue: + n += 1 // tag and wire + n += 8 + case *GaugeResponse_StringValue: + n += 1 // tag and wire + n += proto.SizeVarint(uint64(len(x.StringValue))) + n += len(x.StringValue) + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +// Request message containing the gauge name +type GaugeRequest struct { + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GaugeRequest) Reset() { *m = GaugeRequest{} } +func (m *GaugeRequest) String() string { return proto.CompactTextString(m) } +func (*GaugeRequest) ProtoMessage() {} +func (*GaugeRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_metrics_c9a45afc44ac5637, []int{1} +} +func (m *GaugeRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GaugeRequest.Unmarshal(m, b) +} +func (m *GaugeRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GaugeRequest.Marshal(b, m, deterministic) +} +func (dst *GaugeRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_GaugeRequest.Merge(dst, src) +} +func (m *GaugeRequest) XXX_Size() int { + return xxx_messageInfo_GaugeRequest.Size(m) +} +func (m *GaugeRequest) XXX_DiscardUnknown() { + xxx_messageInfo_GaugeRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_GaugeRequest proto.InternalMessageInfo + +func (m *GaugeRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +type EmptyMessage struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *EmptyMessage) Reset() { *m = EmptyMessage{} } +func (m *EmptyMessage) String() string { return proto.CompactTextString(m) } +func (*EmptyMessage) ProtoMessage() {} +func (*EmptyMessage) Descriptor() ([]byte, []int) { + return fileDescriptor_metrics_c9a45afc44ac5637, []int{2} +} +func (m *EmptyMessage) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_EmptyMessage.Unmarshal(m, b) +} +func (m *EmptyMessage) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_EmptyMessage.Marshal(b, m, deterministic) +} +func (dst *EmptyMessage) XXX_Merge(src proto.Message) { + xxx_messageInfo_EmptyMessage.Merge(dst, src) +} +func (m *EmptyMessage) XXX_Size() int { + return xxx_messageInfo_EmptyMessage.Size(m) +} +func (m *EmptyMessage) XXX_DiscardUnknown() { + xxx_messageInfo_EmptyMessage.DiscardUnknown(m) +} + +var xxx_messageInfo_EmptyMessage proto.InternalMessageInfo + +func init() { + proto.RegisterType((*GaugeResponse)(nil), "grpc.testing.GaugeResponse") + proto.RegisterType((*GaugeRequest)(nil), "grpc.testing.GaugeRequest") + proto.RegisterType((*EmptyMessage)(nil), "grpc.testing.EmptyMessage") +} + +// Reference imports to suppress errors if they are not otherwise used. +var _ context.Context +var _ grpc.ClientConn + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the grpc package it is being compiled against. +const _ = grpc.SupportPackageIsVersion4 + +// MetricsServiceClient is the client API for MetricsService service. +// +// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://godoc.org/google.golang.org/grpc#ClientConn.NewStream. +type MetricsServiceClient interface { + // Returns the values of all the gauges that are currently being maintained by + // the service + GetAllGauges(ctx context.Context, in *EmptyMessage, opts ...grpc.CallOption) (MetricsService_GetAllGaugesClient, error) + // Returns the value of one gauge + GetGauge(ctx context.Context, in *GaugeRequest, opts ...grpc.CallOption) (*GaugeResponse, error) +} + +type metricsServiceClient struct { + cc *grpc.ClientConn +} + +func NewMetricsServiceClient(cc *grpc.ClientConn) MetricsServiceClient { + return &metricsServiceClient{cc} +} + +func (c *metricsServiceClient) GetAllGauges(ctx context.Context, in *EmptyMessage, opts ...grpc.CallOption) (MetricsService_GetAllGaugesClient, error) { + stream, err := c.cc.NewStream(ctx, &_MetricsService_serviceDesc.Streams[0], "/grpc.testing.MetricsService/GetAllGauges", opts...) + if err != nil { + return nil, err + } + x := &metricsServiceGetAllGaugesClient{stream} + if err := x.ClientStream.SendMsg(in); err != nil { + return nil, err + } + if err := x.ClientStream.CloseSend(); err != nil { + return nil, err + } + return x, nil +} + +type MetricsService_GetAllGaugesClient interface { + Recv() (*GaugeResponse, error) + grpc.ClientStream +} + +type metricsServiceGetAllGaugesClient struct { + grpc.ClientStream +} + +func (x *metricsServiceGetAllGaugesClient) Recv() (*GaugeResponse, error) { + m := new(GaugeResponse) + if err := x.ClientStream.RecvMsg(m); err != nil { + return nil, err + } + return m, nil +} + +func (c *metricsServiceClient) GetGauge(ctx context.Context, in *GaugeRequest, opts ...grpc.CallOption) (*GaugeResponse, error) { + out := new(GaugeResponse) + err := c.cc.Invoke(ctx, "/grpc.testing.MetricsService/GetGauge", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +// MetricsServiceServer is the server API for MetricsService service. +type MetricsServiceServer interface { + // Returns the values of all the gauges that are currently being maintained by + // the service + GetAllGauges(*EmptyMessage, MetricsService_GetAllGaugesServer) error + // Returns the value of one gauge + GetGauge(context.Context, *GaugeRequest) (*GaugeResponse, error) +} + +func RegisterMetricsServiceServer(s *grpc.Server, srv MetricsServiceServer) { + s.RegisterService(&_MetricsService_serviceDesc, srv) +} + +func _MetricsService_GetAllGauges_Handler(srv interface{}, stream grpc.ServerStream) error { + m := new(EmptyMessage) + if err := stream.RecvMsg(m); err != nil { + return err + } + return srv.(MetricsServiceServer).GetAllGauges(m, &metricsServiceGetAllGaugesServer{stream}) +} + +type MetricsService_GetAllGaugesServer interface { + Send(*GaugeResponse) error + grpc.ServerStream +} + +type metricsServiceGetAllGaugesServer struct { + grpc.ServerStream +} + +func (x *metricsServiceGetAllGaugesServer) Send(m *GaugeResponse) error { + return x.ServerStream.SendMsg(m) +} + +func _MetricsService_GetGauge_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(GaugeRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(MetricsServiceServer).GetGauge(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/grpc.testing.MetricsService/GetGauge", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(MetricsServiceServer).GetGauge(ctx, req.(*GaugeRequest)) + } + return interceptor(ctx, in, info, handler) +} + +var _MetricsService_serviceDesc = grpc.ServiceDesc{ + ServiceName: "grpc.testing.MetricsService", + HandlerType: (*MetricsServiceServer)(nil), + Methods: []grpc.MethodDesc{ + { + MethodName: "GetGauge", + Handler: _MetricsService_GetGauge_Handler, + }, + }, + Streams: []grpc.StreamDesc{ + { + StreamName: "GetAllGauges", + Handler: _MetricsService_GetAllGauges_Handler, + ServerStreams: true, + }, + }, + Metadata: "metrics.proto", +} + +func init() { proto.RegisterFile("metrics.proto", fileDescriptor_metrics_c9a45afc44ac5637) } + +var fileDescriptor_metrics_c9a45afc44ac5637 = []byte{ + // 256 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x7c, 0x91, 0x3f, 0x4f, 0xc3, 0x30, + 0x10, 0xc5, 0x6b, 0x5a, 0xfe, 0xf4, 0x70, 0x3b, 0x78, 0xaa, 0xca, 0x40, 0x14, 0x96, 0x4c, 0x11, + 0x82, 0x4f, 0x00, 0x08, 0xa5, 0x0c, 0x5d, 0x82, 0xc4, 0x8a, 0xd2, 0x70, 0xb2, 0x22, 0x39, 0x71, + 0xf0, 0x5d, 0x2a, 0xf1, 0x49, 0x58, 0xf9, 0xa8, 0xc8, 0x4e, 0x55, 0xa5, 0x08, 0x75, 0xb3, 0x7e, + 0xf7, 0xfc, 0xfc, 0x9e, 0x0f, 0x66, 0x35, 0xb2, 0xab, 0x4a, 0x4a, 0x5b, 0x67, 0xd9, 0x2a, 0xa9, + 0x5d, 0x5b, 0xa6, 0x8c, 0xc4, 0x55, 0xa3, 0xe3, 0x6f, 0x01, 0xb3, 0xac, 0xe8, 0x34, 0xe6, 0x48, + 0xad, 0x6d, 0x08, 0x95, 0x82, 0x49, 0x53, 0xd4, 0xb8, 0x10, 0x91, 0x48, 0xa6, 0x79, 0x38, 0xab, + 0x6b, 0x00, 0x63, 0x1b, 0xfd, 0xbe, 0x2d, 0x4c, 0x87, 0x8b, 0x93, 0x48, 0x24, 0xe3, 0xd5, 0x28, + 0x9f, 0x7a, 0xf6, 0xe6, 0x91, 0xba, 0x01, 0xf9, 0x61, 0xbb, 0x8d, 0xc1, 0x9d, 0x64, 0x1c, 0x89, + 0x44, 0xac, 0x46, 0xf9, 0x65, 0x4f, 0xf7, 0x22, 0x62, 0x57, 0xed, 0x7d, 0x26, 0xfe, 0x05, 0x2f, + 0xea, 0x69, 0x10, 0x3d, 0x9e, 0xc3, 0x69, 0x98, 0xc6, 0x31, 0xc8, 0x5d, 0xb0, 0xcf, 0x0e, 0x89, + 0xff, 0xcb, 0x15, 0xcf, 0x41, 0x3e, 0xd7, 0x2d, 0x7f, 0xad, 0x91, 0xa8, 0xd0, 0x78, 0xf7, 0x23, + 0x60, 0xbe, 0xee, 0xdb, 0xbe, 0xa2, 0xdb, 0x56, 0x25, 0xaa, 0x17, 0x90, 0x19, 0xf2, 0x83, 0x31, + 0xc1, 0x8c, 0xd4, 0x32, 0x1d, 0xf6, 0x4f, 0x87, 0xd7, 0x97, 0x57, 0x87, 0xb3, 0x83, 0x7f, 0xb9, + 0x15, 0xea, 0x09, 0x2e, 0x32, 0xe4, 0x40, 0xff, 0xda, 0x0c, 0x93, 0x1e, 0xb5, 0xd9, 0x9c, 0x85, + 0x2d, 0xdc, 0xff, 0x06, 0x00, 0x00, 0xff, 0xff, 0x5e, 0x7d, 0xb2, 0xc9, 0x96, 0x01, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/grpc/stress/metrics_client/main.go b/vendor/google.golang.org/grpc/stress/metrics_client/main.go new file mode 100644 index 0000000..c9a5c8c --- /dev/null +++ b/vendor/google.golang.org/grpc/stress/metrics_client/main.go @@ -0,0 +1,82 @@ +/* + * + * Copyright 2016 gRPC authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ + +package main + +import ( + "context" + "flag" + "fmt" + "io" + + "google.golang.org/grpc" + "google.golang.org/grpc/grpclog" + metricspb "google.golang.org/grpc/stress/grpc_testing" +) + +var ( + metricsServerAddress = flag.String("metrics_server_address", "", "The metrics server addresses in the fomrat :") + totalOnly = flag.Bool("total_only", false, "If true, this prints only the total value of all gauges") +) + +func printMetrics(client metricspb.MetricsServiceClient, totalOnly bool) { + stream, err := client.GetAllGauges(context.Background(), &metricspb.EmptyMessage{}) + if err != nil { + grpclog.Fatalf("failed to call GetAllGuages: %v", err) + } + + var ( + overallQPS int64 + rpcStatus error + ) + for { + gaugeResponse, err := stream.Recv() + if err != nil { + rpcStatus = err + break + } + if _, ok := gaugeResponse.GetValue().(*metricspb.GaugeResponse_LongValue); !ok { + panic(fmt.Sprintf("gauge %s is not a long value", gaugeResponse.Name)) + } + v := gaugeResponse.GetLongValue() + if !totalOnly { + grpclog.Infof("%s: %d", gaugeResponse.Name, v) + } + overallQPS += v + } + if rpcStatus != io.EOF { + grpclog.Fatalf("failed to finish server streaming: %v", rpcStatus) + } + grpclog.Infof("overall qps: %d", overallQPS) +} + +func main() { + flag.Parse() + if *metricsServerAddress == "" { + grpclog.Fatalf("Metrics server address is empty.") + } + + conn, err := grpc.Dial(*metricsServerAddress, grpc.WithInsecure()) + if err != nil { + grpclog.Fatalf("cannot connect to metrics server: %v", err) + } + defer conn.Close() + + c := metricspb.NewMetricsServiceClient(conn) + printMetrics(c, *totalOnly) +} diff --git a/vendor/google.golang.org/grpc/tap/tap.go b/vendor/google.golang.org/grpc/tap/tap.go new file mode 100644 index 0000000..584360f --- /dev/null +++ b/vendor/google.golang.org/grpc/tap/tap.go @@ -0,0 +1,51 @@ +/* + * + * Copyright 2016 gRPC authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ + +// Package tap defines the function handles which are executed on the transport +// layer of gRPC-Go and related information. Everything here is EXPERIMENTAL. +package tap + +import ( + "context" +) + +// Info defines the relevant information needed by the handles. +type Info struct { + // FullMethodName is the string of grpc method (in the format of + // /package.service/method). + FullMethodName string + // TODO: More to be added. +} + +// ServerInHandle defines the function which runs before a new stream is created +// on the server side. If it returns a non-nil error, the stream will not be +// created and a RST_STREAM will be sent back to the client with REFUSED_STREAM. +// The client will receive an RPC error "code = Unavailable, desc = stream +// terminated by RST_STREAM with error code: REFUSED_STREAM". +// +// It's intended to be used in situations where you don't want to waste the +// resources to accept the new stream (e.g. rate-limiting). And the content of +// the error will be ignored and won't be sent back to the client. For other +// general usages, please use interceptors. +// +// Note that it is executed in the per-connection I/O goroutine(s) instead of +// per-RPC goroutine. Therefore, users should NOT have any +// blocking/time-consuming work in this handle. Otherwise all the RPCs would +// slow down. Also, for the same reason, this handle won't be called +// concurrently by gRPC. +type ServerInHandle func(ctx context.Context, info *Info) (context.Context, error) diff --git a/vendor/google.golang.org/grpc/test/bufconn/bufconn.go b/vendor/google.golang.org/grpc/test/bufconn/bufconn.go new file mode 100644 index 0000000..60ae770 --- /dev/null +++ b/vendor/google.golang.org/grpc/test/bufconn/bufconn.go @@ -0,0 +1,244 @@ +/* + * + * Copyright 2017 gRPC authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ + +// Package bufconn provides a net.Conn implemented by a buffer and related +// dialing and listening functionality. +package bufconn + +import ( + "fmt" + "io" + "net" + "sync" + "time" +) + +// Listener implements a net.Listener that creates local, buffered net.Conns +// via its Accept and Dial method. +type Listener struct { + mu sync.Mutex + sz int + ch chan net.Conn + done chan struct{} +} + +var errClosed = fmt.Errorf("closed") + +// Listen returns a Listener that can only be contacted by its own Dialers and +// creates buffered connections between the two. +func Listen(sz int) *Listener { + return &Listener{sz: sz, ch: make(chan net.Conn), done: make(chan struct{})} +} + +// Accept blocks until Dial is called, then returns a net.Conn for the server +// half of the connection. +func (l *Listener) Accept() (net.Conn, error) { + select { + case <-l.done: + return nil, errClosed + case c := <-l.ch: + return c, nil + } +} + +// Close stops the listener. +func (l *Listener) Close() error { + l.mu.Lock() + defer l.mu.Unlock() + select { + case <-l.done: + // Already closed. + break + default: + close(l.done) + } + return nil +} + +// Addr reports the address of the listener. +func (l *Listener) Addr() net.Addr { return addr{} } + +// Dial creates an in-memory full-duplex network connection, unblocks Accept by +// providing it the server half of the connection, and returns the client half +// of the connection. +func (l *Listener) Dial() (net.Conn, error) { + p1, p2 := newPipe(l.sz), newPipe(l.sz) + select { + case <-l.done: + return nil, errClosed + case l.ch <- &conn{p1, p2}: + return &conn{p2, p1}, nil + } +} + +type pipe struct { + mu sync.Mutex + + // buf contains the data in the pipe. It is a ring buffer of fixed capacity, + // with r and w pointing to the offset to read and write, respsectively. + // + // Data is read between [r, w) and written to [w, r), wrapping around the end + // of the slice if necessary. + // + // The buffer is empty if r == len(buf), otherwise if r == w, it is full. + // + // w and r are always in the range [0, cap(buf)) and [0, len(buf)]. + buf []byte + w, r int + + wwait sync.Cond + rwait sync.Cond + + closed bool + writeClosed bool +} + +func newPipe(sz int) *pipe { + p := &pipe{buf: make([]byte, 0, sz)} + p.wwait.L = &p.mu + p.rwait.L = &p.mu + return p +} + +func (p *pipe) empty() bool { + return p.r == len(p.buf) +} + +func (p *pipe) full() bool { + return p.r < len(p.buf) && p.r == p.w +} + +func (p *pipe) Read(b []byte) (n int, err error) { + p.mu.Lock() + defer p.mu.Unlock() + // Block until p has data. + for { + if p.closed { + return 0, io.ErrClosedPipe + } + if !p.empty() { + break + } + if p.writeClosed { + return 0, io.EOF + } + p.rwait.Wait() + } + wasFull := p.full() + + n = copy(b, p.buf[p.r:len(p.buf)]) + p.r += n + if p.r == cap(p.buf) { + p.r = 0 + p.buf = p.buf[:p.w] + } + + // Signal a blocked writer, if any + if wasFull { + p.wwait.Signal() + } + + return n, nil +} + +func (p *pipe) Write(b []byte) (n int, err error) { + p.mu.Lock() + defer p.mu.Unlock() + if p.closed { + return 0, io.ErrClosedPipe + } + for len(b) > 0 { + // Block until p is not full. + for { + if p.closed || p.writeClosed { + return 0, io.ErrClosedPipe + } + if !p.full() { + break + } + p.wwait.Wait() + } + wasEmpty := p.empty() + + end := cap(p.buf) + if p.w < p.r { + end = p.r + } + x := copy(p.buf[p.w:end], b) + b = b[x:] + n += x + p.w += x + if p.w > len(p.buf) { + p.buf = p.buf[:p.w] + } + if p.w == cap(p.buf) { + p.w = 0 + } + + // Signal a blocked reader, if any. + if wasEmpty { + p.rwait.Signal() + } + } + return n, nil +} + +func (p *pipe) Close() error { + p.mu.Lock() + defer p.mu.Unlock() + p.closed = true + // Signal all blocked readers and writers to return an error. + p.rwait.Broadcast() + p.wwait.Broadcast() + return nil +} + +func (p *pipe) closeWrite() error { + p.mu.Lock() + defer p.mu.Unlock() + p.writeClosed = true + // Signal all blocked readers and writers to return an error. + p.rwait.Broadcast() + p.wwait.Broadcast() + return nil +} + +type conn struct { + io.Reader + io.Writer +} + +func (c *conn) Close() error { + err1 := c.Reader.(*pipe).Close() + err2 := c.Writer.(*pipe).closeWrite() + if err1 != nil { + return err1 + } + return err2 +} + +func (*conn) LocalAddr() net.Addr { return addr{} } +func (*conn) RemoteAddr() net.Addr { return addr{} } +func (c *conn) SetDeadline(t time.Time) error { return fmt.Errorf("unsupported") } +func (c *conn) SetReadDeadline(t time.Time) error { return fmt.Errorf("unsupported") } +func (c *conn) SetWriteDeadline(t time.Time) error { return fmt.Errorf("unsupported") } + +type addr struct{} + +func (addr) Network() string { return "bufconn" } +func (addr) String() string { return "bufconn" } diff --git a/vendor/google.golang.org/grpc/test/codec_perf/perf.pb.go b/vendor/google.golang.org/grpc/test/codec_perf/perf.pb.go new file mode 100644 index 0000000..6bcbc3f --- /dev/null +++ b/vendor/google.golang.org/grpc/test/codec_perf/perf.pb.go @@ -0,0 +1,75 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: codec_perf/perf.proto + +package codec_perf + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Buffer is a message that contains a body of bytes that is used to exercise +// encoding and decoding overheads. +type Buffer struct { + Body []byte `protobuf:"bytes,1,opt,name=body,proto3" json:"body,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Buffer) Reset() { *m = Buffer{} } +func (m *Buffer) String() string { return proto.CompactTextString(m) } +func (*Buffer) ProtoMessage() {} +func (*Buffer) Descriptor() ([]byte, []int) { + return fileDescriptor_perf_6cc81a33b24d08e7, []int{0} +} +func (m *Buffer) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Buffer.Unmarshal(m, b) +} +func (m *Buffer) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Buffer.Marshal(b, m, deterministic) +} +func (dst *Buffer) XXX_Merge(src proto.Message) { + xxx_messageInfo_Buffer.Merge(dst, src) +} +func (m *Buffer) XXX_Size() int { + return xxx_messageInfo_Buffer.Size(m) +} +func (m *Buffer) XXX_DiscardUnknown() { + xxx_messageInfo_Buffer.DiscardUnknown(m) +} + +var xxx_messageInfo_Buffer proto.InternalMessageInfo + +func (m *Buffer) GetBody() []byte { + if m != nil { + return m.Body + } + return nil +} + +func init() { + proto.RegisterType((*Buffer)(nil), "codec.perf.Buffer") +} + +func init() { proto.RegisterFile("codec_perf/perf.proto", fileDescriptor_perf_6cc81a33b24d08e7) } + +var fileDescriptor_perf_6cc81a33b24d08e7 = []byte{ + // 83 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xe2, 0x12, 0x4d, 0xce, 0x4f, 0x49, + 0x4d, 0x8e, 0x2f, 0x48, 0x2d, 0x4a, 0xd3, 0x07, 0x11, 0x7a, 0x05, 0x45, 0xf9, 0x25, 0xf9, 0x42, + 0x5c, 0x60, 0x61, 0x3d, 0x90, 0x88, 0x92, 0x0c, 0x17, 0x9b, 0x53, 0x69, 0x5a, 0x5a, 0x6a, 0x91, + 0x90, 0x10, 0x17, 0x4b, 0x52, 0x7e, 0x4a, 0xa5, 0x04, 0xa3, 0x02, 0xa3, 0x06, 0x4f, 0x10, 0x98, + 0x9d, 0xc4, 0x06, 0xd6, 0x60, 0x0c, 0x08, 0x00, 0x00, 0xff, 0xff, 0xa3, 0x5f, 0x4f, 0x3c, 0x49, + 0x00, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/grpc/test/go_vet/vet.go b/vendor/google.golang.org/grpc/test/go_vet/vet.go new file mode 100644 index 0000000..475e8d6 --- /dev/null +++ b/vendor/google.golang.org/grpc/test/go_vet/vet.go @@ -0,0 +1,53 @@ +/* + * + * Copyright 2018 gRPC authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ + +// vet checks whether files that are supposed to be built on appengine running +// Go 1.10 or earlier import an unsupported package (e.g. "unsafe", "syscall"). +package main + +import ( + "fmt" + "go/build" + "os" +) + +func main() { + fail := false + b := build.Default + b.BuildTags = []string{"appengine", "appenginevm"} + argsWithoutProg := os.Args[1:] + for _, dir := range argsWithoutProg { + p, err := b.Import(".", dir, 0) + if _, ok := err.(*build.NoGoError); ok { + continue + } else if err != nil { + fmt.Printf("build.Import failed due to %v\n", err) + fail = true + continue + } + for _, pkg := range p.Imports { + if pkg == "syscall" || pkg == "unsafe" { + fmt.Printf("Package %s/%s importing %s package without appengine build tag is NOT ALLOWED!\n", p.Dir, p.Name, pkg) + fail = true + } + } + } + if fail { + os.Exit(1) + } +} diff --git a/vendor/google.golang.org/grpc/test/grpc_testing/test.pb.go b/vendor/google.golang.org/grpc/test/grpc_testing/test.pb.go new file mode 100644 index 0000000..871f7cc --- /dev/null +++ b/vendor/google.golang.org/grpc/test/grpc_testing/test.pb.go @@ -0,0 +1,949 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: grpc_testing/test.proto + +package grpc_testing + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" + +import ( + context "golang.org/x/net/context" + grpc "google.golang.org/grpc" +) + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// The type of payload that should be returned. +type PayloadType int32 + +const ( + // Compressable text format. + PayloadType_COMPRESSABLE PayloadType = 0 + // Uncompressable binary format. + PayloadType_UNCOMPRESSABLE PayloadType = 1 + // Randomly chosen from all other formats defined in this enum. + PayloadType_RANDOM PayloadType = 2 +) + +var PayloadType_name = map[int32]string{ + 0: "COMPRESSABLE", + 1: "UNCOMPRESSABLE", + 2: "RANDOM", +} +var PayloadType_value = map[string]int32{ + "COMPRESSABLE": 0, + "UNCOMPRESSABLE": 1, + "RANDOM": 2, +} + +func (x PayloadType) String() string { + return proto.EnumName(PayloadType_name, int32(x)) +} +func (PayloadType) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_test_c9f6c5af4267cb88, []int{0} +} + +type Empty struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Empty) Reset() { *m = Empty{} } +func (m *Empty) String() string { return proto.CompactTextString(m) } +func (*Empty) ProtoMessage() {} +func (*Empty) Descriptor() ([]byte, []int) { + return fileDescriptor_test_c9f6c5af4267cb88, []int{0} +} +func (m *Empty) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Empty.Unmarshal(m, b) +} +func (m *Empty) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Empty.Marshal(b, m, deterministic) +} +func (dst *Empty) XXX_Merge(src proto.Message) { + xxx_messageInfo_Empty.Merge(dst, src) +} +func (m *Empty) XXX_Size() int { + return xxx_messageInfo_Empty.Size(m) +} +func (m *Empty) XXX_DiscardUnknown() { + xxx_messageInfo_Empty.DiscardUnknown(m) +} + +var xxx_messageInfo_Empty proto.InternalMessageInfo + +// A block of data, to simply increase gRPC message size. +type Payload struct { + // The type of data in body. + Type PayloadType `protobuf:"varint,1,opt,name=type,proto3,enum=grpc.testing.PayloadType" json:"type,omitempty"` + // Primary contents of payload. + Body []byte `protobuf:"bytes,2,opt,name=body,proto3" json:"body,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Payload) Reset() { *m = Payload{} } +func (m *Payload) String() string { return proto.CompactTextString(m) } +func (*Payload) ProtoMessage() {} +func (*Payload) Descriptor() ([]byte, []int) { + return fileDescriptor_test_c9f6c5af4267cb88, []int{1} +} +func (m *Payload) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Payload.Unmarshal(m, b) +} +func (m *Payload) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Payload.Marshal(b, m, deterministic) +} +func (dst *Payload) XXX_Merge(src proto.Message) { + xxx_messageInfo_Payload.Merge(dst, src) +} +func (m *Payload) XXX_Size() int { + return xxx_messageInfo_Payload.Size(m) +} +func (m *Payload) XXX_DiscardUnknown() { + xxx_messageInfo_Payload.DiscardUnknown(m) +} + +var xxx_messageInfo_Payload proto.InternalMessageInfo + +func (m *Payload) GetType() PayloadType { + if m != nil { + return m.Type + } + return PayloadType_COMPRESSABLE +} + +func (m *Payload) GetBody() []byte { + if m != nil { + return m.Body + } + return nil +} + +// Unary request. +type SimpleRequest struct { + // Desired payload type in the response from the server. + // If response_type is RANDOM, server randomly chooses one from other formats. + ResponseType PayloadType `protobuf:"varint,1,opt,name=response_type,json=responseType,proto3,enum=grpc.testing.PayloadType" json:"response_type,omitempty"` + // Desired payload size in the response from the server. + // If response_type is COMPRESSABLE, this denotes the size before compression. + ResponseSize int32 `protobuf:"varint,2,opt,name=response_size,json=responseSize,proto3" json:"response_size,omitempty"` + // Optional input payload sent along with the request. + Payload *Payload `protobuf:"bytes,3,opt,name=payload,proto3" json:"payload,omitempty"` + // Whether SimpleResponse should include username. + FillUsername bool `protobuf:"varint,4,opt,name=fill_username,json=fillUsername,proto3" json:"fill_username,omitempty"` + // Whether SimpleResponse should include OAuth scope. + FillOauthScope bool `protobuf:"varint,5,opt,name=fill_oauth_scope,json=fillOauthScope,proto3" json:"fill_oauth_scope,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *SimpleRequest) Reset() { *m = SimpleRequest{} } +func (m *SimpleRequest) String() string { return proto.CompactTextString(m) } +func (*SimpleRequest) ProtoMessage() {} +func (*SimpleRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_test_c9f6c5af4267cb88, []int{2} +} +func (m *SimpleRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_SimpleRequest.Unmarshal(m, b) +} +func (m *SimpleRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_SimpleRequest.Marshal(b, m, deterministic) +} +func (dst *SimpleRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_SimpleRequest.Merge(dst, src) +} +func (m *SimpleRequest) XXX_Size() int { + return xxx_messageInfo_SimpleRequest.Size(m) +} +func (m *SimpleRequest) XXX_DiscardUnknown() { + xxx_messageInfo_SimpleRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_SimpleRequest proto.InternalMessageInfo + +func (m *SimpleRequest) GetResponseType() PayloadType { + if m != nil { + return m.ResponseType + } + return PayloadType_COMPRESSABLE +} + +func (m *SimpleRequest) GetResponseSize() int32 { + if m != nil { + return m.ResponseSize + } + return 0 +} + +func (m *SimpleRequest) GetPayload() *Payload { + if m != nil { + return m.Payload + } + return nil +} + +func (m *SimpleRequest) GetFillUsername() bool { + if m != nil { + return m.FillUsername + } + return false +} + +func (m *SimpleRequest) GetFillOauthScope() bool { + if m != nil { + return m.FillOauthScope + } + return false +} + +// Unary response, as configured by the request. +type SimpleResponse struct { + // Payload to increase message size. + Payload *Payload `protobuf:"bytes,1,opt,name=payload,proto3" json:"payload,omitempty"` + // The user the request came from, for verifying authentication was + // successful when the client expected it. + Username string `protobuf:"bytes,2,opt,name=username,proto3" json:"username,omitempty"` + // OAuth scope. + OauthScope string `protobuf:"bytes,3,opt,name=oauth_scope,json=oauthScope,proto3" json:"oauth_scope,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *SimpleResponse) Reset() { *m = SimpleResponse{} } +func (m *SimpleResponse) String() string { return proto.CompactTextString(m) } +func (*SimpleResponse) ProtoMessage() {} +func (*SimpleResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_test_c9f6c5af4267cb88, []int{3} +} +func (m *SimpleResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_SimpleResponse.Unmarshal(m, b) +} +func (m *SimpleResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_SimpleResponse.Marshal(b, m, deterministic) +} +func (dst *SimpleResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_SimpleResponse.Merge(dst, src) +} +func (m *SimpleResponse) XXX_Size() int { + return xxx_messageInfo_SimpleResponse.Size(m) +} +func (m *SimpleResponse) XXX_DiscardUnknown() { + xxx_messageInfo_SimpleResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_SimpleResponse proto.InternalMessageInfo + +func (m *SimpleResponse) GetPayload() *Payload { + if m != nil { + return m.Payload + } + return nil +} + +func (m *SimpleResponse) GetUsername() string { + if m != nil { + return m.Username + } + return "" +} + +func (m *SimpleResponse) GetOauthScope() string { + if m != nil { + return m.OauthScope + } + return "" +} + +// Client-streaming request. +type StreamingInputCallRequest struct { + // Optional input payload sent along with the request. + Payload *Payload `protobuf:"bytes,1,opt,name=payload,proto3" json:"payload,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *StreamingInputCallRequest) Reset() { *m = StreamingInputCallRequest{} } +func (m *StreamingInputCallRequest) String() string { return proto.CompactTextString(m) } +func (*StreamingInputCallRequest) ProtoMessage() {} +func (*StreamingInputCallRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_test_c9f6c5af4267cb88, []int{4} +} +func (m *StreamingInputCallRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_StreamingInputCallRequest.Unmarshal(m, b) +} +func (m *StreamingInputCallRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_StreamingInputCallRequest.Marshal(b, m, deterministic) +} +func (dst *StreamingInputCallRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_StreamingInputCallRequest.Merge(dst, src) +} +func (m *StreamingInputCallRequest) XXX_Size() int { + return xxx_messageInfo_StreamingInputCallRequest.Size(m) +} +func (m *StreamingInputCallRequest) XXX_DiscardUnknown() { + xxx_messageInfo_StreamingInputCallRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_StreamingInputCallRequest proto.InternalMessageInfo + +func (m *StreamingInputCallRequest) GetPayload() *Payload { + if m != nil { + return m.Payload + } + return nil +} + +// Client-streaming response. +type StreamingInputCallResponse struct { + // Aggregated size of payloads received from the client. + AggregatedPayloadSize int32 `protobuf:"varint,1,opt,name=aggregated_payload_size,json=aggregatedPayloadSize,proto3" json:"aggregated_payload_size,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *StreamingInputCallResponse) Reset() { *m = StreamingInputCallResponse{} } +func (m *StreamingInputCallResponse) String() string { return proto.CompactTextString(m) } +func (*StreamingInputCallResponse) ProtoMessage() {} +func (*StreamingInputCallResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_test_c9f6c5af4267cb88, []int{5} +} +func (m *StreamingInputCallResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_StreamingInputCallResponse.Unmarshal(m, b) +} +func (m *StreamingInputCallResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_StreamingInputCallResponse.Marshal(b, m, deterministic) +} +func (dst *StreamingInputCallResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_StreamingInputCallResponse.Merge(dst, src) +} +func (m *StreamingInputCallResponse) XXX_Size() int { + return xxx_messageInfo_StreamingInputCallResponse.Size(m) +} +func (m *StreamingInputCallResponse) XXX_DiscardUnknown() { + xxx_messageInfo_StreamingInputCallResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_StreamingInputCallResponse proto.InternalMessageInfo + +func (m *StreamingInputCallResponse) GetAggregatedPayloadSize() int32 { + if m != nil { + return m.AggregatedPayloadSize + } + return 0 +} + +// Configuration for a particular response. +type ResponseParameters struct { + // Desired payload sizes in responses from the server. + // If response_type is COMPRESSABLE, this denotes the size before compression. + Size int32 `protobuf:"varint,1,opt,name=size,proto3" json:"size,omitempty"` + // Desired interval between consecutive responses in the response stream in + // microseconds. + IntervalUs int32 `protobuf:"varint,2,opt,name=interval_us,json=intervalUs,proto3" json:"interval_us,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ResponseParameters) Reset() { *m = ResponseParameters{} } +func (m *ResponseParameters) String() string { return proto.CompactTextString(m) } +func (*ResponseParameters) ProtoMessage() {} +func (*ResponseParameters) Descriptor() ([]byte, []int) { + return fileDescriptor_test_c9f6c5af4267cb88, []int{6} +} +func (m *ResponseParameters) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ResponseParameters.Unmarshal(m, b) +} +func (m *ResponseParameters) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ResponseParameters.Marshal(b, m, deterministic) +} +func (dst *ResponseParameters) XXX_Merge(src proto.Message) { + xxx_messageInfo_ResponseParameters.Merge(dst, src) +} +func (m *ResponseParameters) XXX_Size() int { + return xxx_messageInfo_ResponseParameters.Size(m) +} +func (m *ResponseParameters) XXX_DiscardUnknown() { + xxx_messageInfo_ResponseParameters.DiscardUnknown(m) +} + +var xxx_messageInfo_ResponseParameters proto.InternalMessageInfo + +func (m *ResponseParameters) GetSize() int32 { + if m != nil { + return m.Size + } + return 0 +} + +func (m *ResponseParameters) GetIntervalUs() int32 { + if m != nil { + return m.IntervalUs + } + return 0 +} + +// Server-streaming request. +type StreamingOutputCallRequest struct { + // Desired payload type in the response from the server. + // If response_type is RANDOM, the payload from each response in the stream + // might be of different types. This is to simulate a mixed type of payload + // stream. + ResponseType PayloadType `protobuf:"varint,1,opt,name=response_type,json=responseType,proto3,enum=grpc.testing.PayloadType" json:"response_type,omitempty"` + // Configuration for each expected response message. + ResponseParameters []*ResponseParameters `protobuf:"bytes,2,rep,name=response_parameters,json=responseParameters,proto3" json:"response_parameters,omitempty"` + // Optional input payload sent along with the request. + Payload *Payload `protobuf:"bytes,3,opt,name=payload,proto3" json:"payload,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *StreamingOutputCallRequest) Reset() { *m = StreamingOutputCallRequest{} } +func (m *StreamingOutputCallRequest) String() string { return proto.CompactTextString(m) } +func (*StreamingOutputCallRequest) ProtoMessage() {} +func (*StreamingOutputCallRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_test_c9f6c5af4267cb88, []int{7} +} +func (m *StreamingOutputCallRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_StreamingOutputCallRequest.Unmarshal(m, b) +} +func (m *StreamingOutputCallRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_StreamingOutputCallRequest.Marshal(b, m, deterministic) +} +func (dst *StreamingOutputCallRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_StreamingOutputCallRequest.Merge(dst, src) +} +func (m *StreamingOutputCallRequest) XXX_Size() int { + return xxx_messageInfo_StreamingOutputCallRequest.Size(m) +} +func (m *StreamingOutputCallRequest) XXX_DiscardUnknown() { + xxx_messageInfo_StreamingOutputCallRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_StreamingOutputCallRequest proto.InternalMessageInfo + +func (m *StreamingOutputCallRequest) GetResponseType() PayloadType { + if m != nil { + return m.ResponseType + } + return PayloadType_COMPRESSABLE +} + +func (m *StreamingOutputCallRequest) GetResponseParameters() []*ResponseParameters { + if m != nil { + return m.ResponseParameters + } + return nil +} + +func (m *StreamingOutputCallRequest) GetPayload() *Payload { + if m != nil { + return m.Payload + } + return nil +} + +// Server-streaming response, as configured by the request and parameters. +type StreamingOutputCallResponse struct { + // Payload to increase response size. + Payload *Payload `protobuf:"bytes,1,opt,name=payload,proto3" json:"payload,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *StreamingOutputCallResponse) Reset() { *m = StreamingOutputCallResponse{} } +func (m *StreamingOutputCallResponse) String() string { return proto.CompactTextString(m) } +func (*StreamingOutputCallResponse) ProtoMessage() {} +func (*StreamingOutputCallResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_test_c9f6c5af4267cb88, []int{8} +} +func (m *StreamingOutputCallResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_StreamingOutputCallResponse.Unmarshal(m, b) +} +func (m *StreamingOutputCallResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_StreamingOutputCallResponse.Marshal(b, m, deterministic) +} +func (dst *StreamingOutputCallResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_StreamingOutputCallResponse.Merge(dst, src) +} +func (m *StreamingOutputCallResponse) XXX_Size() int { + return xxx_messageInfo_StreamingOutputCallResponse.Size(m) +} +func (m *StreamingOutputCallResponse) XXX_DiscardUnknown() { + xxx_messageInfo_StreamingOutputCallResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_StreamingOutputCallResponse proto.InternalMessageInfo + +func (m *StreamingOutputCallResponse) GetPayload() *Payload { + if m != nil { + return m.Payload + } + return nil +} + +func init() { + proto.RegisterType((*Empty)(nil), "grpc.testing.Empty") + proto.RegisterType((*Payload)(nil), "grpc.testing.Payload") + proto.RegisterType((*SimpleRequest)(nil), "grpc.testing.SimpleRequest") + proto.RegisterType((*SimpleResponse)(nil), "grpc.testing.SimpleResponse") + proto.RegisterType((*StreamingInputCallRequest)(nil), "grpc.testing.StreamingInputCallRequest") + proto.RegisterType((*StreamingInputCallResponse)(nil), "grpc.testing.StreamingInputCallResponse") + proto.RegisterType((*ResponseParameters)(nil), "grpc.testing.ResponseParameters") + proto.RegisterType((*StreamingOutputCallRequest)(nil), "grpc.testing.StreamingOutputCallRequest") + proto.RegisterType((*StreamingOutputCallResponse)(nil), "grpc.testing.StreamingOutputCallResponse") + proto.RegisterEnum("grpc.testing.PayloadType", PayloadType_name, PayloadType_value) +} + +// Reference imports to suppress errors if they are not otherwise used. +var _ context.Context +var _ grpc.ClientConn + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the grpc package it is being compiled against. +const _ = grpc.SupportPackageIsVersion4 + +// TestServiceClient is the client API for TestService service. +// +// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://godoc.org/google.golang.org/grpc#ClientConn.NewStream. +type TestServiceClient interface { + // One empty request followed by one empty response. + EmptyCall(ctx context.Context, in *Empty, opts ...grpc.CallOption) (*Empty, error) + // One request followed by one response. + // The server returns the client payload as-is. + UnaryCall(ctx context.Context, in *SimpleRequest, opts ...grpc.CallOption) (*SimpleResponse, error) + // One request followed by a sequence of responses (streamed download). + // The server returns the payload with client desired type and sizes. + StreamingOutputCall(ctx context.Context, in *StreamingOutputCallRequest, opts ...grpc.CallOption) (TestService_StreamingOutputCallClient, error) + // A sequence of requests followed by one response (streamed upload). + // The server returns the aggregated size of client payload as the result. + StreamingInputCall(ctx context.Context, opts ...grpc.CallOption) (TestService_StreamingInputCallClient, error) + // A sequence of requests with each request served by the server immediately. + // As one request could lead to multiple responses, this interface + // demonstrates the idea of full duplexing. + FullDuplexCall(ctx context.Context, opts ...grpc.CallOption) (TestService_FullDuplexCallClient, error) + // A sequence of requests followed by a sequence of responses. + // The server buffers all the client requests and then serves them in order. A + // stream of responses are returned to the client when the server starts with + // first request. + HalfDuplexCall(ctx context.Context, opts ...grpc.CallOption) (TestService_HalfDuplexCallClient, error) +} + +type testServiceClient struct { + cc *grpc.ClientConn +} + +func NewTestServiceClient(cc *grpc.ClientConn) TestServiceClient { + return &testServiceClient{cc} +} + +func (c *testServiceClient) EmptyCall(ctx context.Context, in *Empty, opts ...grpc.CallOption) (*Empty, error) { + out := new(Empty) + err := c.cc.Invoke(ctx, "/grpc.testing.TestService/EmptyCall", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *testServiceClient) UnaryCall(ctx context.Context, in *SimpleRequest, opts ...grpc.CallOption) (*SimpleResponse, error) { + out := new(SimpleResponse) + err := c.cc.Invoke(ctx, "/grpc.testing.TestService/UnaryCall", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *testServiceClient) StreamingOutputCall(ctx context.Context, in *StreamingOutputCallRequest, opts ...grpc.CallOption) (TestService_StreamingOutputCallClient, error) { + stream, err := c.cc.NewStream(ctx, &_TestService_serviceDesc.Streams[0], "/grpc.testing.TestService/StreamingOutputCall", opts...) + if err != nil { + return nil, err + } + x := &testServiceStreamingOutputCallClient{stream} + if err := x.ClientStream.SendMsg(in); err != nil { + return nil, err + } + if err := x.ClientStream.CloseSend(); err != nil { + return nil, err + } + return x, nil +} + +type TestService_StreamingOutputCallClient interface { + Recv() (*StreamingOutputCallResponse, error) + grpc.ClientStream +} + +type testServiceStreamingOutputCallClient struct { + grpc.ClientStream +} + +func (x *testServiceStreamingOutputCallClient) Recv() (*StreamingOutputCallResponse, error) { + m := new(StreamingOutputCallResponse) + if err := x.ClientStream.RecvMsg(m); err != nil { + return nil, err + } + return m, nil +} + +func (c *testServiceClient) StreamingInputCall(ctx context.Context, opts ...grpc.CallOption) (TestService_StreamingInputCallClient, error) { + stream, err := c.cc.NewStream(ctx, &_TestService_serviceDesc.Streams[1], "/grpc.testing.TestService/StreamingInputCall", opts...) + if err != nil { + return nil, err + } + x := &testServiceStreamingInputCallClient{stream} + return x, nil +} + +type TestService_StreamingInputCallClient interface { + Send(*StreamingInputCallRequest) error + CloseAndRecv() (*StreamingInputCallResponse, error) + grpc.ClientStream +} + +type testServiceStreamingInputCallClient struct { + grpc.ClientStream +} + +func (x *testServiceStreamingInputCallClient) Send(m *StreamingInputCallRequest) error { + return x.ClientStream.SendMsg(m) +} + +func (x *testServiceStreamingInputCallClient) CloseAndRecv() (*StreamingInputCallResponse, error) { + if err := x.ClientStream.CloseSend(); err != nil { + return nil, err + } + m := new(StreamingInputCallResponse) + if err := x.ClientStream.RecvMsg(m); err != nil { + return nil, err + } + return m, nil +} + +func (c *testServiceClient) FullDuplexCall(ctx context.Context, opts ...grpc.CallOption) (TestService_FullDuplexCallClient, error) { + stream, err := c.cc.NewStream(ctx, &_TestService_serviceDesc.Streams[2], "/grpc.testing.TestService/FullDuplexCall", opts...) + if err != nil { + return nil, err + } + x := &testServiceFullDuplexCallClient{stream} + return x, nil +} + +type TestService_FullDuplexCallClient interface { + Send(*StreamingOutputCallRequest) error + Recv() (*StreamingOutputCallResponse, error) + grpc.ClientStream +} + +type testServiceFullDuplexCallClient struct { + grpc.ClientStream +} + +func (x *testServiceFullDuplexCallClient) Send(m *StreamingOutputCallRequest) error { + return x.ClientStream.SendMsg(m) +} + +func (x *testServiceFullDuplexCallClient) Recv() (*StreamingOutputCallResponse, error) { + m := new(StreamingOutputCallResponse) + if err := x.ClientStream.RecvMsg(m); err != nil { + return nil, err + } + return m, nil +} + +func (c *testServiceClient) HalfDuplexCall(ctx context.Context, opts ...grpc.CallOption) (TestService_HalfDuplexCallClient, error) { + stream, err := c.cc.NewStream(ctx, &_TestService_serviceDesc.Streams[3], "/grpc.testing.TestService/HalfDuplexCall", opts...) + if err != nil { + return nil, err + } + x := &testServiceHalfDuplexCallClient{stream} + return x, nil +} + +type TestService_HalfDuplexCallClient interface { + Send(*StreamingOutputCallRequest) error + Recv() (*StreamingOutputCallResponse, error) + grpc.ClientStream +} + +type testServiceHalfDuplexCallClient struct { + grpc.ClientStream +} + +func (x *testServiceHalfDuplexCallClient) Send(m *StreamingOutputCallRequest) error { + return x.ClientStream.SendMsg(m) +} + +func (x *testServiceHalfDuplexCallClient) Recv() (*StreamingOutputCallResponse, error) { + m := new(StreamingOutputCallResponse) + if err := x.ClientStream.RecvMsg(m); err != nil { + return nil, err + } + return m, nil +} + +// TestServiceServer is the server API for TestService service. +type TestServiceServer interface { + // One empty request followed by one empty response. + EmptyCall(context.Context, *Empty) (*Empty, error) + // One request followed by one response. + // The server returns the client payload as-is. + UnaryCall(context.Context, *SimpleRequest) (*SimpleResponse, error) + // One request followed by a sequence of responses (streamed download). + // The server returns the payload with client desired type and sizes. + StreamingOutputCall(*StreamingOutputCallRequest, TestService_StreamingOutputCallServer) error + // A sequence of requests followed by one response (streamed upload). + // The server returns the aggregated size of client payload as the result. + StreamingInputCall(TestService_StreamingInputCallServer) error + // A sequence of requests with each request served by the server immediately. + // As one request could lead to multiple responses, this interface + // demonstrates the idea of full duplexing. + FullDuplexCall(TestService_FullDuplexCallServer) error + // A sequence of requests followed by a sequence of responses. + // The server buffers all the client requests and then serves them in order. A + // stream of responses are returned to the client when the server starts with + // first request. + HalfDuplexCall(TestService_HalfDuplexCallServer) error +} + +func RegisterTestServiceServer(s *grpc.Server, srv TestServiceServer) { + s.RegisterService(&_TestService_serviceDesc, srv) +} + +func _TestService_EmptyCall_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(Empty) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(TestServiceServer).EmptyCall(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/grpc.testing.TestService/EmptyCall", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(TestServiceServer).EmptyCall(ctx, req.(*Empty)) + } + return interceptor(ctx, in, info, handler) +} + +func _TestService_UnaryCall_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(SimpleRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(TestServiceServer).UnaryCall(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/grpc.testing.TestService/UnaryCall", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(TestServiceServer).UnaryCall(ctx, req.(*SimpleRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _TestService_StreamingOutputCall_Handler(srv interface{}, stream grpc.ServerStream) error { + m := new(StreamingOutputCallRequest) + if err := stream.RecvMsg(m); err != nil { + return err + } + return srv.(TestServiceServer).StreamingOutputCall(m, &testServiceStreamingOutputCallServer{stream}) +} + +type TestService_StreamingOutputCallServer interface { + Send(*StreamingOutputCallResponse) error + grpc.ServerStream +} + +type testServiceStreamingOutputCallServer struct { + grpc.ServerStream +} + +func (x *testServiceStreamingOutputCallServer) Send(m *StreamingOutputCallResponse) error { + return x.ServerStream.SendMsg(m) +} + +func _TestService_StreamingInputCall_Handler(srv interface{}, stream grpc.ServerStream) error { + return srv.(TestServiceServer).StreamingInputCall(&testServiceStreamingInputCallServer{stream}) +} + +type TestService_StreamingInputCallServer interface { + SendAndClose(*StreamingInputCallResponse) error + Recv() (*StreamingInputCallRequest, error) + grpc.ServerStream +} + +type testServiceStreamingInputCallServer struct { + grpc.ServerStream +} + +func (x *testServiceStreamingInputCallServer) SendAndClose(m *StreamingInputCallResponse) error { + return x.ServerStream.SendMsg(m) +} + +func (x *testServiceStreamingInputCallServer) Recv() (*StreamingInputCallRequest, error) { + m := new(StreamingInputCallRequest) + if err := x.ServerStream.RecvMsg(m); err != nil { + return nil, err + } + return m, nil +} + +func _TestService_FullDuplexCall_Handler(srv interface{}, stream grpc.ServerStream) error { + return srv.(TestServiceServer).FullDuplexCall(&testServiceFullDuplexCallServer{stream}) +} + +type TestService_FullDuplexCallServer interface { + Send(*StreamingOutputCallResponse) error + Recv() (*StreamingOutputCallRequest, error) + grpc.ServerStream +} + +type testServiceFullDuplexCallServer struct { + grpc.ServerStream +} + +func (x *testServiceFullDuplexCallServer) Send(m *StreamingOutputCallResponse) error { + return x.ServerStream.SendMsg(m) +} + +func (x *testServiceFullDuplexCallServer) Recv() (*StreamingOutputCallRequest, error) { + m := new(StreamingOutputCallRequest) + if err := x.ServerStream.RecvMsg(m); err != nil { + return nil, err + } + return m, nil +} + +func _TestService_HalfDuplexCall_Handler(srv interface{}, stream grpc.ServerStream) error { + return srv.(TestServiceServer).HalfDuplexCall(&testServiceHalfDuplexCallServer{stream}) +} + +type TestService_HalfDuplexCallServer interface { + Send(*StreamingOutputCallResponse) error + Recv() (*StreamingOutputCallRequest, error) + grpc.ServerStream +} + +type testServiceHalfDuplexCallServer struct { + grpc.ServerStream +} + +func (x *testServiceHalfDuplexCallServer) Send(m *StreamingOutputCallResponse) error { + return x.ServerStream.SendMsg(m) +} + +func (x *testServiceHalfDuplexCallServer) Recv() (*StreamingOutputCallRequest, error) { + m := new(StreamingOutputCallRequest) + if err := x.ServerStream.RecvMsg(m); err != nil { + return nil, err + } + return m, nil +} + +var _TestService_serviceDesc = grpc.ServiceDesc{ + ServiceName: "grpc.testing.TestService", + HandlerType: (*TestServiceServer)(nil), + Methods: []grpc.MethodDesc{ + { + MethodName: "EmptyCall", + Handler: _TestService_EmptyCall_Handler, + }, + { + MethodName: "UnaryCall", + Handler: _TestService_UnaryCall_Handler, + }, + }, + Streams: []grpc.StreamDesc{ + { + StreamName: "StreamingOutputCall", + Handler: _TestService_StreamingOutputCall_Handler, + ServerStreams: true, + }, + { + StreamName: "StreamingInputCall", + Handler: _TestService_StreamingInputCall_Handler, + ClientStreams: true, + }, + { + StreamName: "FullDuplexCall", + Handler: _TestService_FullDuplexCall_Handler, + ServerStreams: true, + ClientStreams: true, + }, + { + StreamName: "HalfDuplexCall", + Handler: _TestService_HalfDuplexCall_Handler, + ServerStreams: true, + ClientStreams: true, + }, + }, + Metadata: "grpc_testing/test.proto", +} + +func init() { proto.RegisterFile("grpc_testing/test.proto", fileDescriptor_test_c9f6c5af4267cb88) } + +var fileDescriptor_test_c9f6c5af4267cb88 = []byte{ + // 587 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xbc, 0x55, 0xdb, 0x6e, 0xd3, 0x40, + 0x10, 0x65, 0xdb, 0xf4, 0x36, 0x49, 0xad, 0x68, 0xab, 0xaa, 0xae, 0x8b, 0x84, 0x65, 0x1e, 0x30, + 0x48, 0xa4, 0x28, 0x08, 0x1e, 0x41, 0xa5, 0x17, 0x51, 0x29, 0x4d, 0x82, 0x9d, 0x3c, 0x47, 0xdb, + 0x64, 0x6b, 0x2c, 0x39, 0xf6, 0xb2, 0x5e, 0x57, 0xa4, 0x0f, 0xfc, 0x18, 0x3f, 0xc3, 0x47, 0xf0, + 0x01, 0x68, 0xd7, 0x76, 0xe2, 0x24, 0xae, 0x48, 0x41, 0xf0, 0x14, 0x7b, 0xe6, 0xcc, 0x99, 0x73, + 0x3c, 0xb3, 0x1b, 0x38, 0xf0, 0x38, 0x1b, 0x0e, 0x04, 0x8d, 0x85, 0x1f, 0x7a, 0xc7, 0xf2, 0xb7, + 0xc1, 0x78, 0x24, 0x22, 0x5c, 0x93, 0x89, 0x46, 0x96, 0xb0, 0xb6, 0x60, 0xe3, 0x7c, 0xcc, 0xc4, + 0xc4, 0x6a, 0xc1, 0x56, 0x97, 0x4c, 0x82, 0x88, 0x8c, 0xf0, 0x4b, 0xa8, 0x88, 0x09, 0xa3, 0x3a, + 0x32, 0x91, 0xad, 0x35, 0x0f, 0x1b, 0xc5, 0x82, 0x46, 0x06, 0xea, 0x4d, 0x18, 0x75, 0x14, 0x0c, + 0x63, 0xa8, 0x5c, 0x47, 0xa3, 0x89, 0xbe, 0x66, 0x22, 0xbb, 0xe6, 0xa8, 0x67, 0xeb, 0x27, 0x82, + 0x5d, 0xd7, 0x1f, 0xb3, 0x80, 0x3a, 0xf4, 0x4b, 0x42, 0x63, 0x81, 0xdf, 0xc1, 0x2e, 0xa7, 0x31, + 0x8b, 0xc2, 0x98, 0x0e, 0x56, 0x63, 0xaf, 0xe5, 0x78, 0xf9, 0x86, 0x9f, 0x16, 0xea, 0x63, 0xff, + 0x8e, 0xaa, 0x76, 0x1b, 0x33, 0x90, 0xeb, 0xdf, 0x51, 0x7c, 0x0c, 0x5b, 0x2c, 0x65, 0xd0, 0xd7, + 0x4d, 0x64, 0x57, 0x9b, 0xfb, 0xa5, 0xf4, 0x4e, 0x8e, 0x92, 0xac, 0x37, 0x7e, 0x10, 0x0c, 0x92, + 0x98, 0xf2, 0x90, 0x8c, 0xa9, 0x5e, 0x31, 0x91, 0xbd, 0xed, 0xd4, 0x64, 0xb0, 0x9f, 0xc5, 0xb0, + 0x0d, 0x75, 0x05, 0x8a, 0x48, 0x22, 0x3e, 0x0f, 0xe2, 0x61, 0xc4, 0xa8, 0xbe, 0xa1, 0x70, 0x9a, + 0x8c, 0x77, 0x64, 0xd8, 0x95, 0x51, 0xeb, 0x1b, 0x68, 0xb9, 0xeb, 0x54, 0x55, 0x51, 0x11, 0x5a, + 0x49, 0x91, 0x01, 0xdb, 0x53, 0x31, 0xd2, 0xe2, 0x8e, 0x33, 0x7d, 0xc7, 0x4f, 0xa0, 0x5a, 0xd4, + 0xb0, 0xae, 0xd2, 0x10, 0xcd, 0xfa, 0xb7, 0xe0, 0xd0, 0x15, 0x9c, 0x92, 0xb1, 0x1f, 0x7a, 0x97, + 0x21, 0x4b, 0xc4, 0x29, 0x09, 0x82, 0x7c, 0x02, 0x0f, 0x95, 0x62, 0xf5, 0xc0, 0x28, 0x63, 0xcb, + 0x9c, 0xbd, 0x85, 0x03, 0xe2, 0x79, 0x9c, 0x7a, 0x44, 0xd0, 0xd1, 0x20, 0xab, 0x49, 0x47, 0x83, + 0xd4, 0x68, 0xf6, 0x67, 0xe9, 0x8c, 0x5a, 0xce, 0xc8, 0xba, 0x04, 0x9c, 0x73, 0x74, 0x09, 0x27, + 0x63, 0x2a, 0x28, 0x8f, 0xe5, 0x12, 0x15, 0x4a, 0xd5, 0xb3, 0xb4, 0xeb, 0x87, 0x82, 0xf2, 0x5b, + 0x22, 0x07, 0x94, 0x0d, 0x1c, 0xf2, 0x50, 0x3f, 0xb6, 0x7e, 0xa0, 0x82, 0xc2, 0x4e, 0x22, 0x16, + 0x0c, 0xff, 0xed, 0xca, 0x7d, 0x82, 0xbd, 0x69, 0x3d, 0x9b, 0x4a, 0xd5, 0xd7, 0xcc, 0x75, 0xbb, + 0xda, 0x34, 0xe7, 0x59, 0x96, 0x2d, 0x39, 0x98, 0x2f, 0xdb, 0x7c, 0xe8, 0x82, 0x5a, 0x6d, 0x38, + 0x2a, 0x75, 0xf8, 0x87, 0xeb, 0xf5, 0xe2, 0x3d, 0x54, 0x0b, 0x86, 0x71, 0x1d, 0x6a, 0xa7, 0x9d, + 0xab, 0xae, 0x73, 0xee, 0xba, 0x27, 0x1f, 0x5a, 0xe7, 0xf5, 0x47, 0x18, 0x83, 0xd6, 0x6f, 0xcf, + 0xc5, 0x10, 0x06, 0xd8, 0x74, 0x4e, 0xda, 0x67, 0x9d, 0xab, 0xfa, 0x5a, 0xf3, 0x7b, 0x05, 0xaa, + 0x3d, 0x1a, 0x0b, 0x97, 0xf2, 0x5b, 0x7f, 0x48, 0xf1, 0x1b, 0xd8, 0x51, 0x17, 0x88, 0x94, 0x85, + 0xf7, 0xe6, 0xbb, 0xab, 0x84, 0x51, 0x16, 0xc4, 0x17, 0xb0, 0xd3, 0x0f, 0x09, 0x4f, 0xcb, 0x8e, + 0xe6, 0x11, 0x73, 0x17, 0x87, 0xf1, 0xb8, 0x3c, 0x99, 0x7d, 0x80, 0x00, 0xf6, 0x4a, 0xbe, 0x0f, + 0xb6, 0x17, 0x8a, 0xee, 0x5d, 0x12, 0xe3, 0xf9, 0x0a, 0xc8, 0xb4, 0xd7, 0x2b, 0x84, 0x7d, 0xc0, + 0xcb, 0x27, 0x02, 0x3f, 0xbb, 0x87, 0x62, 0xf1, 0x04, 0x1a, 0xf6, 0xef, 0x81, 0x69, 0x2b, 0x5b, + 0xb6, 0xd2, 0x2e, 0x92, 0x20, 0x38, 0x4b, 0x58, 0x40, 0xbf, 0xfe, 0x33, 0x4f, 0x36, 0x52, 0xae, + 0xb4, 0x8f, 0x24, 0xb8, 0xf9, 0x0f, 0xad, 0xae, 0x37, 0xd5, 0x7f, 0xd0, 0xeb, 0x5f, 0x01, 0x00, + 0x00, 0xff, 0xff, 0x07, 0xc7, 0x76, 0x69, 0x9e, 0x06, 0x00, 0x00, +} diff --git a/vendor/google.golang.org/grpc/test/race.go b/vendor/google.golang.org/grpc/test/race.go new file mode 100644 index 0000000..acfa0df --- /dev/null +++ b/vendor/google.golang.org/grpc/test/race.go @@ -0,0 +1,24 @@ +// +build race + +/* + * Copyright 2016 gRPC authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ + +package test + +func init() { + raceMode = true +} diff --git a/vendor/google.golang.org/grpc/test/rawConnWrapper.go b/vendor/google.golang.org/grpc/test/rawConnWrapper.go new file mode 100644 index 0000000..124b10e --- /dev/null +++ b/vendor/google.golang.org/grpc/test/rawConnWrapper.go @@ -0,0 +1,387 @@ +/* + * Copyright 2018 gRPC authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package test + +import ( + "bytes" + "fmt" + "io" + "net" + "strings" + "sync" + "time" + + "golang.org/x/net/http2" + "golang.org/x/net/http2/hpack" +) + +type listenerWrapper struct { + net.Listener + mu sync.Mutex + rcw *rawConnWrapper +} + +func listenWithConnControl(network, address string) (net.Listener, error) { + l, err := net.Listen(network, address) + if err != nil { + return nil, err + } + return &listenerWrapper{Listener: l}, nil +} + +// Accept blocks until Dial is called, then returns a net.Conn for the server +// half of the connection. +func (l *listenerWrapper) Accept() (net.Conn, error) { + c, err := l.Listener.Accept() + if err != nil { + return nil, err + } + l.mu.Lock() + l.rcw = newRawConnWrapperFromConn(c) + l.mu.Unlock() + return c, nil +} + +func (l *listenerWrapper) getLastConn() *rawConnWrapper { + l.mu.Lock() + defer l.mu.Unlock() + return l.rcw +} + +type dialerWrapper struct { + c net.Conn + rcw *rawConnWrapper +} + +func (d *dialerWrapper) dialer(target string, t time.Duration) (net.Conn, error) { + c, err := net.DialTimeout("tcp", target, t) + d.c = c + d.rcw = newRawConnWrapperFromConn(c) + return c, err +} + +func (d *dialerWrapper) getRawConnWrapper() *rawConnWrapper { + return d.rcw +} + +type rawConnWrapper struct { + cc io.ReadWriteCloser + fr *http2.Framer + + // writing headers: + headerBuf bytes.Buffer + hpackEnc *hpack.Encoder + + // reading frames: + frc chan http2.Frame + frErrc chan error + readTimer *time.Timer +} + +func newRawConnWrapperFromConn(cc io.ReadWriteCloser) *rawConnWrapper { + rcw := &rawConnWrapper{ + cc: cc, + frc: make(chan http2.Frame, 1), + frErrc: make(chan error, 1), + } + rcw.hpackEnc = hpack.NewEncoder(&rcw.headerBuf) + rcw.fr = http2.NewFramer(cc, cc) + rcw.fr.ReadMetaHeaders = hpack.NewDecoder(4096 /*initialHeaderTableSize*/, nil) + + return rcw +} + +func (rcw *rawConnWrapper) Close() error { + return rcw.cc.Close() +} + +func (rcw *rawConnWrapper) readFrame() (http2.Frame, error) { + go func() { + fr, err := rcw.fr.ReadFrame() + if err != nil { + rcw.frErrc <- err + } else { + rcw.frc <- fr + } + }() + t := time.NewTimer(2 * time.Second) + defer t.Stop() + select { + case f := <-rcw.frc: + return f, nil + case err := <-rcw.frErrc: + return nil, err + case <-t.C: + return nil, fmt.Errorf("timeout waiting for frame") + } +} + +// greet initiates the client's HTTP/2 connection into a state where +// frames may be sent. +func (rcw *rawConnWrapper) greet() error { + rcw.writePreface() + rcw.writeInitialSettings() + rcw.wantSettings() + rcw.writeSettingsAck() + for { + f, err := rcw.readFrame() + if err != nil { + return err + } + switch f := f.(type) { + case *http2.WindowUpdateFrame: + // grpc's transport/http2_server sends this + // before the settings ack. The Go http2 + // server uses a setting instead. + case *http2.SettingsFrame: + if f.IsAck() { + return nil + } + return fmt.Errorf("during greet, got non-ACK settings frame") + default: + return fmt.Errorf("during greet, unexpected frame type %T", f) + } + } +} + +func (rcw *rawConnWrapper) writePreface() error { + n, err := rcw.cc.Write([]byte(http2.ClientPreface)) + if err != nil { + return fmt.Errorf("error writing client preface: %v", err) + } + if n != len(http2.ClientPreface) { + return fmt.Errorf("writing client preface, wrote %d bytes; want %d", n, len(http2.ClientPreface)) + } + return nil +} + +func (rcw *rawConnWrapper) writeInitialSettings() error { + if err := rcw.fr.WriteSettings(); err != nil { + return fmt.Errorf("error writing initial SETTINGS frame from client to server: %v", err) + } + return nil +} + +func (rcw *rawConnWrapper) writeSettingsAck() error { + if err := rcw.fr.WriteSettingsAck(); err != nil { + return fmt.Errorf("error writing ACK of server's SETTINGS: %v", err) + } + return nil +} + +func (rcw *rawConnWrapper) wantSettings() (*http2.SettingsFrame, error) { + f, err := rcw.readFrame() + if err != nil { + return nil, fmt.Errorf("error while expecting a SETTINGS frame: %v", err) + } + sf, ok := f.(*http2.SettingsFrame) + if !ok { + return nil, fmt.Errorf("got a %T; want *SettingsFrame", f) + } + return sf, nil +} + +func (rcw *rawConnWrapper) wantSettingsAck() error { + f, err := rcw.readFrame() + if err != nil { + return err + } + sf, ok := f.(*http2.SettingsFrame) + if !ok { + return fmt.Errorf("wanting a settings ACK, received a %T", f) + } + if !sf.IsAck() { + return fmt.Errorf("settings Frame didn't have ACK set") + } + return nil +} + +// wait for any activity from the server +func (rcw *rawConnWrapper) wantAnyFrame() (http2.Frame, error) { + f, err := rcw.fr.ReadFrame() + if err != nil { + return nil, err + } + return f, nil +} + +func (rcw *rawConnWrapper) encodeHeaderField(k, v string) error { + err := rcw.hpackEnc.WriteField(hpack.HeaderField{Name: k, Value: v}) + if err != nil { + return fmt.Errorf("HPACK encoding error for %q/%q: %v", k, v, err) + } + return nil +} + +// encodeRawHeader is for usage on both client and server side to construct header based on the input +// key, value pairs. +func (rcw *rawConnWrapper) encodeRawHeader(headers ...string) []byte { + if len(headers)%2 == 1 { + panic("odd number of kv args") + } + + rcw.headerBuf.Reset() + + pseudoCount := map[string]int{} + var keys []string + vals := map[string][]string{} + + for len(headers) > 0 { + k, v := headers[0], headers[1] + headers = headers[2:] + if _, ok := vals[k]; !ok { + keys = append(keys, k) + } + if strings.HasPrefix(k, ":") { + pseudoCount[k]++ + if pseudoCount[k] == 1 { + vals[k] = []string{v} + } else { + // Allows testing of invalid headers w/ dup pseudo fields. + vals[k] = append(vals[k], v) + } + } else { + vals[k] = append(vals[k], v) + } + } + for _, k := range keys { + for _, v := range vals[k] { + rcw.encodeHeaderField(k, v) + } + } + return rcw.headerBuf.Bytes() +} + +// encodeHeader is for usage on client side to write request header. +// +// encodeHeader encodes headers and returns their HPACK bytes. headers +// must contain an even number of key/value pairs. There may be +// multiple pairs for keys (e.g. "cookie"). The :method, :path, and +// :scheme headers default to GET, / and https. +func (rcw *rawConnWrapper) encodeHeader(headers ...string) []byte { + if len(headers)%2 == 1 { + panic("odd number of kv args") + } + + rcw.headerBuf.Reset() + + if len(headers) == 0 { + // Fast path, mostly for benchmarks, so test code doesn't pollute + // profiles when we're looking to improve server allocations. + rcw.encodeHeaderField(":method", "GET") + rcw.encodeHeaderField(":path", "/") + rcw.encodeHeaderField(":scheme", "https") + return rcw.headerBuf.Bytes() + } + + if len(headers) == 2 && headers[0] == ":method" { + // Another fast path for benchmarks. + rcw.encodeHeaderField(":method", headers[1]) + rcw.encodeHeaderField(":path", "/") + rcw.encodeHeaderField(":scheme", "https") + return rcw.headerBuf.Bytes() + } + + pseudoCount := map[string]int{} + keys := []string{":method", ":path", ":scheme"} + vals := map[string][]string{ + ":method": {"GET"}, + ":path": {"/"}, + ":scheme": {"https"}, + } + for len(headers) > 0 { + k, v := headers[0], headers[1] + headers = headers[2:] + if _, ok := vals[k]; !ok { + keys = append(keys, k) + } + if strings.HasPrefix(k, ":") { + pseudoCount[k]++ + if pseudoCount[k] == 1 { + vals[k] = []string{v} + } else { + // Allows testing of invalid headers w/ dup pseudo fields. + vals[k] = append(vals[k], v) + } + } else { + vals[k] = append(vals[k], v) + } + } + for _, k := range keys { + for _, v := range vals[k] { + rcw.encodeHeaderField(k, v) + } + } + return rcw.headerBuf.Bytes() +} + +// writeHeadersGRPC is for usage on client side to write request header. +func (rcw *rawConnWrapper) writeHeadersGRPC(streamID uint32, path string) { + rcw.writeHeaders(http2.HeadersFrameParam{ + StreamID: streamID, + BlockFragment: rcw.encodeHeader( + ":method", "POST", + ":path", path, + "content-type", "application/grpc", + "te", "trailers", + ), + EndStream: false, + EndHeaders: true, + }) +} + +func (rcw *rawConnWrapper) writeHeaders(p http2.HeadersFrameParam) error { + if err := rcw.fr.WriteHeaders(p); err != nil { + return fmt.Errorf("error writing HEADERS: %v", err) + } + return nil +} + +func (rcw *rawConnWrapper) writeData(streamID uint32, endStream bool, data []byte) error { + if err := rcw.fr.WriteData(streamID, endStream, data); err != nil { + return fmt.Errorf("error writing DATA: %v", err) + } + return nil +} + +func (rcw *rawConnWrapper) writeRSTStream(streamID uint32, code http2.ErrCode) error { + if err := rcw.fr.WriteRSTStream(streamID, code); err != nil { + return fmt.Errorf("error writing RST_STREAM: %v", err) + } + return nil +} + +func (rcw *rawConnWrapper) writeDataPadded(streamID uint32, endStream bool, data, padding []byte) error { + if err := rcw.fr.WriteDataPadded(streamID, endStream, data, padding); err != nil { + return fmt.Errorf("error writing DATA with padding: %v", err) + } + return nil +} + +func (rcw *rawConnWrapper) writeGoAway(maxStreamID uint32, code http2.ErrCode, debugData []byte) error { + if err := rcw.fr.WriteGoAway(maxStreamID, code, debugData); err != nil { + return fmt.Errorf("error writing GoAway: %v", err) + } + return nil +} + +func (rcw *rawConnWrapper) writeRawFrame(t http2.FrameType, flags http2.Flags, streamID uint32, payload []byte) error { + if err := rcw.fr.WriteRawFrame(t, flags, streamID, payload); err != nil { + return fmt.Errorf("error writing Raw Frame: %v", err) + } + return nil +} diff --git a/vendor/google.golang.org/grpc/test/servertester.go b/vendor/google.golang.org/grpc/test/servertester.go new file mode 100644 index 0000000..daeca06 --- /dev/null +++ b/vendor/google.golang.org/grpc/test/servertester.go @@ -0,0 +1,280 @@ +/* + * Copyright 2016 gRPC authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package test + +import ( + "bytes" + "errors" + "io" + "strings" + "testing" + "time" + + "golang.org/x/net/http2" + "golang.org/x/net/http2/hpack" +) + +// This is a subset of http2's serverTester type. +// +// serverTester wraps a io.ReadWriter (acting like the underlying +// network connection) and provides utility methods to read and write +// http2 frames. +// +// NOTE(bradfitz): this could eventually be exported somewhere. Others +// have asked for it too. For now I'm still experimenting with the +// API and don't feel like maintaining a stable testing API. + +type serverTester struct { + cc io.ReadWriteCloser // client conn + t testing.TB + fr *http2.Framer + + // writing headers: + headerBuf bytes.Buffer + hpackEnc *hpack.Encoder + + // reading frames: + frc chan http2.Frame + frErrc chan error + readTimer *time.Timer +} + +func newServerTesterFromConn(t testing.TB, cc io.ReadWriteCloser) *serverTester { + st := &serverTester{ + t: t, + cc: cc, + frc: make(chan http2.Frame, 1), + frErrc: make(chan error, 1), + } + st.hpackEnc = hpack.NewEncoder(&st.headerBuf) + st.fr = http2.NewFramer(cc, cc) + st.fr.ReadMetaHeaders = hpack.NewDecoder(4096 /*initialHeaderTableSize*/, nil) + + return st +} + +func (st *serverTester) readFrame() (http2.Frame, error) { + go func() { + fr, err := st.fr.ReadFrame() + if err != nil { + st.frErrc <- err + } else { + st.frc <- fr + } + }() + t := time.NewTimer(2 * time.Second) + defer t.Stop() + select { + case f := <-st.frc: + return f, nil + case err := <-st.frErrc: + return nil, err + case <-t.C: + return nil, errors.New("timeout waiting for frame") + } +} + +// greet initiates the client's HTTP/2 connection into a state where +// frames may be sent. +func (st *serverTester) greet() { + st.writePreface() + st.writeInitialSettings() + st.wantSettings() + st.writeSettingsAck() + for { + f, err := st.readFrame() + if err != nil { + st.t.Fatal(err) + } + switch f := f.(type) { + case *http2.WindowUpdateFrame: + // grpc's transport/http2_server sends this + // before the settings ack. The Go http2 + // server uses a setting instead. + case *http2.SettingsFrame: + if f.IsAck() { + return + } + st.t.Fatalf("during greet, got non-ACK settings frame") + default: + st.t.Fatalf("during greet, unexpected frame type %T", f) + } + } +} + +func (st *serverTester) writePreface() { + n, err := st.cc.Write([]byte(http2.ClientPreface)) + if err != nil { + st.t.Fatalf("Error writing client preface: %v", err) + } + if n != len(http2.ClientPreface) { + st.t.Fatalf("Writing client preface, wrote %d bytes; want %d", n, len(http2.ClientPreface)) + } +} + +func (st *serverTester) writeInitialSettings() { + if err := st.fr.WriteSettings(); err != nil { + st.t.Fatalf("Error writing initial SETTINGS frame from client to server: %v", err) + } +} + +func (st *serverTester) writeSettingsAck() { + if err := st.fr.WriteSettingsAck(); err != nil { + st.t.Fatalf("Error writing ACK of server's SETTINGS: %v", err) + } +} + +func (st *serverTester) wantSettings() *http2.SettingsFrame { + f, err := st.readFrame() + if err != nil { + st.t.Fatalf("Error while expecting a SETTINGS frame: %v", err) + } + sf, ok := f.(*http2.SettingsFrame) + if !ok { + st.t.Fatalf("got a %T; want *SettingsFrame", f) + } + return sf +} + +func (st *serverTester) wantSettingsAck() { + f, err := st.readFrame() + if err != nil { + st.t.Fatal(err) + } + sf, ok := f.(*http2.SettingsFrame) + if !ok { + st.t.Fatalf("Wanting a settings ACK, received a %T", f) + } + if !sf.IsAck() { + st.t.Fatal("Settings Frame didn't have ACK set") + } +} + +// wait for any activity from the server +func (st *serverTester) wantAnyFrame() http2.Frame { + f, err := st.fr.ReadFrame() + if err != nil { + st.t.Fatal(err) + } + return f +} + +func (st *serverTester) encodeHeaderField(k, v string) { + err := st.hpackEnc.WriteField(hpack.HeaderField{Name: k, Value: v}) + if err != nil { + st.t.Fatalf("HPACK encoding error for %q/%q: %v", k, v, err) + } +} + +// encodeHeader encodes headers and returns their HPACK bytes. headers +// must contain an even number of key/value pairs. There may be +// multiple pairs for keys (e.g. "cookie"). The :method, :path, and +// :scheme headers default to GET, / and https. +func (st *serverTester) encodeHeader(headers ...string) []byte { + if len(headers)%2 == 1 { + panic("odd number of kv args") + } + + st.headerBuf.Reset() + + if len(headers) == 0 { + // Fast path, mostly for benchmarks, so test code doesn't pollute + // profiles when we're looking to improve server allocations. + st.encodeHeaderField(":method", "GET") + st.encodeHeaderField(":path", "/") + st.encodeHeaderField(":scheme", "https") + return st.headerBuf.Bytes() + } + + if len(headers) == 2 && headers[0] == ":method" { + // Another fast path for benchmarks. + st.encodeHeaderField(":method", headers[1]) + st.encodeHeaderField(":path", "/") + st.encodeHeaderField(":scheme", "https") + return st.headerBuf.Bytes() + } + + pseudoCount := map[string]int{} + keys := []string{":method", ":path", ":scheme"} + vals := map[string][]string{ + ":method": {"GET"}, + ":path": {"/"}, + ":scheme": {"https"}, + } + for len(headers) > 0 { + k, v := headers[0], headers[1] + headers = headers[2:] + if _, ok := vals[k]; !ok { + keys = append(keys, k) + } + if strings.HasPrefix(k, ":") { + pseudoCount[k]++ + if pseudoCount[k] == 1 { + vals[k] = []string{v} + } else { + // Allows testing of invalid headers w/ dup pseudo fields. + vals[k] = append(vals[k], v) + } + } else { + vals[k] = append(vals[k], v) + } + } + for _, k := range keys { + for _, v := range vals[k] { + st.encodeHeaderField(k, v) + } + } + return st.headerBuf.Bytes() +} + +func (st *serverTester) writeHeadersGRPC(streamID uint32, path string) { + st.writeHeaders(http2.HeadersFrameParam{ + StreamID: streamID, + BlockFragment: st.encodeHeader( + ":method", "POST", + ":path", path, + "content-type", "application/grpc", + "te", "trailers", + ), + EndStream: false, + EndHeaders: true, + }) +} + +func (st *serverTester) writeHeaders(p http2.HeadersFrameParam) { + if err := st.fr.WriteHeaders(p); err != nil { + st.t.Fatalf("Error writing HEADERS: %v", err) + } +} + +func (st *serverTester) writeData(streamID uint32, endStream bool, data []byte) { + if err := st.fr.WriteData(streamID, endStream, data); err != nil { + st.t.Fatalf("Error writing DATA: %v", err) + } +} + +func (st *serverTester) writeRSTStream(streamID uint32, code http2.ErrCode) { + if err := st.fr.WriteRSTStream(streamID, code); err != nil { + st.t.Fatalf("Error writing RST_STREAM: %v", err) + } +} + +func (st *serverTester) writeDataPadded(streamID uint32, endStream bool, data, padding []byte) { + if err := st.fr.WriteDataPadded(streamID, endStream, data, padding); err != nil { + st.t.Fatalf("Error writing DATA with padding: %v", err) + } +} diff --git a/vendor/google.golang.org/grpc/test/tools/tools.go b/vendor/google.golang.org/grpc/test/tools/tools.go new file mode 100644 index 0000000..511dc25 --- /dev/null +++ b/vendor/google.golang.org/grpc/test/tools/tools.go @@ -0,0 +1,34 @@ +// +build tools + +/* + * + * Copyright 2018 gRPC authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ + +// This package exists to cause `go mod` and `go get` to believe these tools +// are dependencies, even though they are not runtime dependencies of any grpc +// package. This means they will appear in our `go.mod` file, but will not be +// a part of the build. + +package tools + +import ( + _ "github.com/client9/misspell/cmd/misspell" + _ "github.com/golang/protobuf/protoc-gen-go" + _ "golang.org/x/lint/golint" + _ "golang.org/x/tools/cmd/goimports" + _ "honnef.co/go/tools/cmd/staticcheck" +) diff --git a/vendor/google.golang.org/grpc/testdata/testdata.go b/vendor/google.golang.org/grpc/testdata/testdata.go new file mode 100644 index 0000000..c7d2481 --- /dev/null +++ b/vendor/google.golang.org/grpc/testdata/testdata.go @@ -0,0 +1,42 @@ +/* + * Copyright 2017 gRPC authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ + +package testdata + +import ( + "path/filepath" + "runtime" +) + +// basepath is the root directory of this package. +var basepath string + +func init() { + _, currentFile, _, _ := runtime.Caller(0) + basepath = filepath.Dir(currentFile) +} + +// Path returns the absolute path the given relative file or directory path, +// relative to the google.golang.org/grpc/testdata directory in the user's GOPATH. +// If rel is already absolute, it is returned unmodified. +func Path(rel string) string { + if filepath.IsAbs(rel) { + return rel + } + + return filepath.Join(basepath, rel) +} diff --git a/vendor/google.golang.org/grpc/trace.go b/vendor/google.golang.org/grpc/trace.go new file mode 100644 index 0000000..0a57b99 --- /dev/null +++ b/vendor/google.golang.org/grpc/trace.go @@ -0,0 +1,126 @@ +/* + * + * Copyright 2015 gRPC authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ + +package grpc + +import ( + "bytes" + "fmt" + "io" + "net" + "strings" + "sync" + "time" + + "golang.org/x/net/trace" +) + +// EnableTracing controls whether to trace RPCs using the golang.org/x/net/trace package. +// This should only be set before any RPCs are sent or received by this program. +var EnableTracing bool + +// methodFamily returns the trace family for the given method. +// It turns "/pkg.Service/GetFoo" into "pkg.Service". +func methodFamily(m string) string { + m = strings.TrimPrefix(m, "/") // remove leading slash + if i := strings.Index(m, "/"); i >= 0 { + m = m[:i] // remove everything from second slash + } + if i := strings.LastIndex(m, "."); i >= 0 { + m = m[i+1:] // cut down to last dotted component + } + return m +} + +// traceInfo contains tracing information for an RPC. +type traceInfo struct { + tr trace.Trace + firstLine firstLine +} + +// firstLine is the first line of an RPC trace. +// It may be mutated after construction; remoteAddr specifically may change +// during client-side use. +type firstLine struct { + mu sync.Mutex + client bool // whether this is a client (outgoing) RPC + remoteAddr net.Addr + deadline time.Duration // may be zero +} + +func (f *firstLine) SetRemoteAddr(addr net.Addr) { + f.mu.Lock() + f.remoteAddr = addr + f.mu.Unlock() +} + +func (f *firstLine) String() string { + f.mu.Lock() + defer f.mu.Unlock() + + var line bytes.Buffer + io.WriteString(&line, "RPC: ") + if f.client { + io.WriteString(&line, "to") + } else { + io.WriteString(&line, "from") + } + fmt.Fprintf(&line, " %v deadline:", f.remoteAddr) + if f.deadline != 0 { + fmt.Fprint(&line, f.deadline) + } else { + io.WriteString(&line, "none") + } + return line.String() +} + +const truncateSize = 100 + +func truncate(x string, l int) string { + if l > len(x) { + return x + } + return x[:l] +} + +// payload represents an RPC request or response payload. +type payload struct { + sent bool // whether this is an outgoing payload + msg interface{} // e.g. a proto.Message + // TODO(dsymonds): add stringifying info to codec, and limit how much we hold here? +} + +func (p payload) String() string { + if p.sent { + return truncate(fmt.Sprintf("sent: %v", p.msg), truncateSize) + } + return truncate(fmt.Sprintf("recv: %v", p.msg), truncateSize) +} + +type fmtStringer struct { + format string + a []interface{} +} + +func (f *fmtStringer) String() string { + return fmt.Sprintf(f.format, f.a...) +} + +type stringer string + +func (s stringer) String() string { return string(s) } diff --git a/vendor/google.golang.org/grpc/version.go b/vendor/google.golang.org/grpc/version.go new file mode 100644 index 0000000..092e088 --- /dev/null +++ b/vendor/google.golang.org/grpc/version.go @@ -0,0 +1,22 @@ +/* + * + * Copyright 2018 gRPC authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ + +package grpc + +// Version is the current grpc version. +const Version = "1.20.1" diff --git a/vendor/k8s.io/code-generator/go.mod b/vendor/k8s.io/code-generator/go.mod deleted file mode 100644 index bd2ef55..0000000 --- a/vendor/k8s.io/code-generator/go.mod +++ /dev/null @@ -1,21 +0,0 @@ -module k8s.io/code-generator - -require ( - github.com/PuerkitoBio/purell v1.0.0 - github.com/PuerkitoBio/urlesc v0.0.0-20160726150825-5bd2802263f2 - github.com/emicklei/go-restful v0.0.0-20170410110728-ff4f55a20633 - github.com/go-openapi/jsonpointer v0.0.0-20160704185906-46af16f9f7b1 - github.com/go-openapi/jsonreference v0.0.0-20160704190145-13c6e3589ad9 - github.com/go-openapi/spec v0.0.0-20180213232550-1de3e0542de6 - github.com/go-openapi/swag v0.0.0-20170606142751-f3f9494671f9 - github.com/gogo/protobuf v0.0.0-20170330071051-c0656edd0d9e - github.com/golang/glog v0.0.0-20141105023935-44145f04b68c - github.com/mailru/easyjson v0.0.0-20170624190925-2f5df55504eb - github.com/spf13/pflag v1.0.1 - golang.org/x/net v0.0.0-20170809000501-1c05540f6879 - golang.org/x/text v0.0.0-20170810154203-b19bf474d317 - golang.org/x/tools v0.0.0-20170428054726-2382e3994d48 - gopkg.in/yaml.v2 v2.0.0-20170721113624-670d4cfef054 - k8s.io/gengo v0.0.0-20180702041517-fdcf9f9480fd - k8s.io/kube-openapi v0.0.0-20180711000925-0cf8f7e6ed1d -) diff --git a/vendor/k8s.io/code-generator/go.sum b/vendor/k8s.io/code-generator/go.sum deleted file mode 100644 index e0cdb8c..0000000 --- a/vendor/k8s.io/code-generator/go.sum +++ /dev/null @@ -1,21 +0,0 @@ -github.com/PuerkitoBio/purell v1.0.0/go.mod h1:c11w/QuzBsJSee3cPx9rAFu61PvFxuPbtSwDGJws/X0= -github.com/PuerkitoBio/urlesc v0.0.0-20160726150825-5bd2802263f2/go.mod h1:uGdkoq3SwY9Y+13GIhn11/XLaGBb4BfwItxLd5jeuXE= -github.com/emicklei/go-restful v0.0.0-20170410110728-ff4f55a20633/go.mod h1:otzb+WCGbkyDHkqmQmT5YD2WR4BBwUdeQoFo8l/7tVs= -github.com/go-openapi/jsonpointer v0.0.0-20160704185906-46af16f9f7b1/go.mod h1:+35s3my2LFTysnkMfxsJBAMHj/DoqoB9knIWoYG/Vk0= -github.com/go-openapi/jsonreference v0.0.0-20160704190145-13c6e3589ad9/go.mod h1:W3Z9FmVs9qj+KR4zFKmDPGiLdk1D9Rlm7cyMvf57TTg= -github.com/go-openapi/spec v0.0.0-20180213232550-1de3e0542de6/go.mod h1:J8+jY1nAiCcj+friV/PDoE1/3eeccG9LYBs0tYvLOWc= -github.com/go-openapi/swag v0.0.0-20170606142751-f3f9494671f9/go.mod h1:DXUve3Dpr1UfpPtxFw+EFuQ41HhCWZfha5jSVRG7C7I= -github.com/gogo/protobuf v0.0.0-20170330071051-c0656edd0d9e/go.mod h1:r8qH/GZQm5c6nD/R0oafs1akxWv10x8SbQlK7atdtwQ= -github.com/golang/glog v0.0.0-20141105023935-44145f04b68c h1:CbdkBQ1/PiAo0FYJhQGwASD8wrgNvTdf01g6+O9tNuA= -github.com/golang/glog v0.0.0-20141105023935-44145f04b68c/go.mod h1:SBH7ygxi8pfUlaOkMMuAQtPIUF8ecWP5IEl/CR7VP2Q= -github.com/mailru/easyjson v0.0.0-20170624190925-2f5df55504eb/go.mod h1:C1wdFJiN94OJF2b5HbByQZoLdCWB1Yqtg26g4irojpc= -github.com/spf13/pflag v1.0.1 h1:aCvUg6QPl3ibpQUxyLkrEkCHtPqYJL4x9AuhqVqFis4= -github.com/spf13/pflag v1.0.1/go.mod h1:DYY7MBk1bdzusC3SYhjObp+wFpr4gzcvqqNjLnInEg4= -golang.org/x/net v0.0.0-20170809000501-1c05540f6879/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= -golang.org/x/text v0.0.0-20170810154203-b19bf474d317/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= -golang.org/x/tools v0.0.0-20170428054726-2382e3994d48 h1:Al/HKLBwsMBsWhxa71LOWO8MeCbD21L+x5rHb83JHjI= -golang.org/x/tools v0.0.0-20170428054726-2382e3994d48/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= -gopkg.in/yaml.v2 v2.0.0-20170721113624-670d4cfef054/go.mod h1:JAlM8MvJe8wmxCU4Bli9HhUf9+ttbYbLASfIpnQbh74= -k8s.io/gengo v0.0.0-20180702041517-fdcf9f9480fd h1:vrN4VnaCWTfFhsSITN/6ZpiuPw0sc7e59Eo07aQfbEw= -k8s.io/gengo v0.0.0-20180702041517-fdcf9f9480fd/go.mod h1:ezvh/TsK7cY6rbqRK0oQQ8IAqLxYwwyPxAX1Pzy0ii0= -k8s.io/kube-openapi v0.0.0-20180711000925-0cf8f7e6ed1d/go.mod h1:BXM9ceUBTj2QnfH2MK1odQs778ajze1RxcmP6S8RVVc=